summaryrefslogtreecommitdiffstats
path: root/src/tools
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
commit698f8c2f01ea549d77d7dc3338a12e04c11057b9 (patch)
tree173a775858bd501c378080a10dca74132f05bc50 /src/tools
parentInitial commit. (diff)
downloadrustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.tar.xz
rustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.zip
Adding upstream version 1.64.0+dfsg1.upstream/1.64.0+dfsg1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/tools')
-rw-r--r--src/tools/build-manifest/Cargo.toml16
-rw-r--r--src/tools/build-manifest/README.md27
-rw-r--r--src/tools/build-manifest/src/checksum.rs97
-rw-r--r--src/tools/build-manifest/src/main.rs608
-rw-r--r--src/tools/build-manifest/src/manifest.rs182
-rw-r--r--src/tools/build-manifest/src/versions.rs200
-rw-r--r--src/tools/bump-stage0/Cargo.toml14
-rw-r--r--src/tools/bump-stage0/src/main.rs233
-rw-r--r--src/tools/cargotest/Cargo.toml8
-rw-r--r--src/tools/cargotest/main.rs214
-rwxr-xr-xsrc/tools/cherry-pick.sh34
-rw-r--r--src/tools/clippy/.cargo/config.toml13
-rw-r--r--src/tools/clippy/.editorconfig21
-rw-r--r--src/tools/clippy/.github/ISSUE_TEMPLATE/blank_issue.yml44
-rw-r--r--src/tools/clippy/.github/ISSUE_TEMPLATE/bug_report.yml57
-rw-r--r--src/tools/clippy/.github/ISSUE_TEMPLATE/config.yml5
-rw-r--r--src/tools/clippy/.github/ISSUE_TEMPLATE/false_negative.yml50
-rw-r--r--src/tools/clippy/.github/ISSUE_TEMPLATE/false_positive.yml68
-rw-r--r--src/tools/clippy/.github/ISSUE_TEMPLATE/ice.yml48
-rw-r--r--src/tools/clippy/.github/ISSUE_TEMPLATE/new_lint.yml71
-rw-r--r--src/tools/clippy/.github/PULL_REQUEST_TEMPLATE.md39
-rw-r--r--src/tools/clippy/.github/deploy.sh67
-rw-r--r--src/tools/clippy/.github/driver.sh39
-rw-r--r--src/tools/clippy/.github/workflows/clippy.yml76
-rw-r--r--src/tools/clippy/.github/workflows/clippy_bors.yml281
-rw-r--r--src/tools/clippy/.github/workflows/clippy_dev.yml70
-rw-r--r--src/tools/clippy/.github/workflows/deploy.yml64
-rw-r--r--src/tools/clippy/.github/workflows/remark.yml66
-rw-r--r--src/tools/clippy/.remarkrc13
-rw-r--r--src/tools/clippy/CHANGELOG.md4044
-rw-r--r--src/tools/clippy/CODE_OF_CONDUCT.md70
-rw-r--r--src/tools/clippy/CONTRIBUTING.md248
-rw-r--r--src/tools/clippy/COPYRIGHT7
-rw-r--r--src/tools/clippy/Cargo.toml67
-rw-r--r--src/tools/clippy/LICENSE-APACHE201
-rw-r--r--src/tools/clippy/LICENSE-MIT27
-rw-r--r--src/tools/clippy/README.md255
-rw-r--r--src/tools/clippy/book/README.md4
-rw-r--r--src/tools/clippy/book/book.toml28
-rw-r--r--src/tools/clippy/book/src/README.md34
-rw-r--r--src/tools/clippy/book/src/SUMMARY.md23
-rw-r--r--src/tools/clippy/book/src/configuration.md92
-rw-r--r--src/tools/clippy/book/src/continuous_integration/README.md18
-rw-r--r--src/tools/clippy/book/src/continuous_integration/github_actions.md21
-rw-r--r--src/tools/clippy/book/src/continuous_integration/travis.md20
-rw-r--r--src/tools/clippy/book/src/development/README.md43
-rw-r--r--src/tools/clippy/book/src/development/adding_lints.md739
-rw-r--r--src/tools/clippy/book/src/development/basics.md192
-rw-r--r--src/tools/clippy/book/src/development/common_tools_writing_lints.md279
-rw-r--r--src/tools/clippy/book/src/development/infrastructure/README.md19
-rw-r--r--src/tools/clippy/book/src/development/infrastructure/backport.md71
-rw-r--r--src/tools/clippy/book/src/development/infrastructure/book.md42
-rw-r--r--src/tools/clippy/book/src/development/infrastructure/changelog_update.md105
-rw-r--r--src/tools/clippy/book/src/development/infrastructure/release.md142
-rw-r--r--src/tools/clippy/book/src/development/infrastructure/sync.md123
-rw-r--r--src/tools/clippy/book/src/development/proposals/README.md11
-rw-r--r--src/tools/clippy/book/src/development/proposals/roadmap-2021.md235
-rw-r--r--src/tools/clippy/book/src/installation.md24
-rw-r--r--src/tools/clippy/book/src/lints.md105
-rw-r--r--src/tools/clippy/book/src/usage.md151
-rw-r--r--src/tools/clippy/build.rs19
-rw-r--r--src/tools/clippy/clippy.toml1
-rw-r--r--src/tools/clippy/clippy_dev/Cargo.toml21
-rw-r--r--src/tools/clippy/clippy_dev/src/bless.rs60
-rw-r--r--src/tools/clippy/clippy_dev/src/dogfood.rs33
-rw-r--r--src/tools/clippy/clippy_dev/src/fmt.rs226
-rw-r--r--src/tools/clippy/clippy_dev/src/lib.rs58
-rw-r--r--src/tools/clippy/clippy_dev/src/lint.rs55
-rw-r--r--src/tools/clippy/clippy_dev/src/main.rs314
-rw-r--r--src/tools/clippy/clippy_dev/src/new_lint.rs575
-rw-r--r--src/tools/clippy/clippy_dev/src/serve.rs65
-rw-r--r--src/tools/clippy/clippy_dev/src/setup/git_hook.rs85
-rw-r--r--src/tools/clippy/clippy_dev/src/setup/intellij.rs223
-rw-r--r--src/tools/clippy/clippy_dev/src/setup/mod.rs23
-rw-r--r--src/tools/clippy/clippy_dev/src/setup/vscode.rs104
-rw-r--r--src/tools/clippy/clippy_dev/src/update_lints.rs1277
-rw-r--r--src/tools/clippy/clippy_dummy/Cargo.toml16
-rw-r--r--src/tools/clippy/clippy_dummy/PUBLISH.md6
-rw-r--r--src/tools/clippy/clippy_dummy/build.rs42
-rw-r--r--src/tools/clippy/clippy_dummy/crates-readme.md9
-rw-r--r--src/tools/clippy/clippy_dummy/src/main.rs3
-rw-r--r--src/tools/clippy/clippy_lints/Cargo.toml38
-rw-r--r--src/tools/clippy/clippy_lints/README.md1
-rw-r--r--src/tools/clippy/clippy_lints/src/almost_complete_letter_range.rs100
-rw-r--r--src/tools/clippy/clippy_lints/src/approx_const.rs132
-rw-r--r--src/tools/clippy/clippy_lints/src/as_conversions.rs65
-rw-r--r--src/tools/clippy/clippy_lints/src/as_underscore.rs74
-rw-r--r--src/tools/clippy/clippy_lints/src/asm_syntax.rs131
-rw-r--r--src/tools/clippy/clippy_lints/src/assertions_on_constants.rs69
-rw-r--r--src/tools/clippy/clippy_lints/src/assertions_on_result_states.rs101
-rw-r--r--src/tools/clippy/clippy_lints/src/async_yields_async.rs89
-rw-r--r--src/tools/clippy/clippy_lints/src/attrs.rs738
-rw-r--r--src/tools/clippy/clippy_lints/src/await_holding_invalid.rs289
-rw-r--r--src/tools/clippy/clippy_lints/src/blacklisted_name.rs77
-rw-r--r--src/tools/clippy/clippy_lints/src/blocks_in_if_conditions.rs155
-rw-r--r--src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs107
-rw-r--r--src/tools/clippy/clippy_lints/src/booleans.rs512
-rw-r--r--src/tools/clippy/clippy_lints/src/borrow_as_ptr.rs99
-rw-r--r--src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs121
-rw-r--r--src/tools/clippy/clippy_lints/src/bytecount.rs103
-rw-r--r--src/tools/clippy/clippy_lints/src/bytes_count_to_len.rs70
-rw-r--r--src/tools/clippy/clippy_lints/src/cargo/common_metadata.rs54
-rw-r--r--src/tools/clippy/clippy_lints/src/cargo/feature_name.rs92
-rw-r--r--src/tools/clippy/clippy_lints/src/cargo/mod.rs221
-rw-r--r--src/tools/clippy/clippy_lints/src/cargo/multiple_crate_versions.rs63
-rw-r--r--src/tools/clippy/clippy_lints/src/cargo/wildcard_dependencies.rs27
-rw-r--r--src/tools/clippy/clippy_lints/src/case_sensitive_file_extension_comparisons.rs86
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/cast_abs_to_unsigned.rs44
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/cast_enum_constructor.rs21
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/cast_lossless.rs112
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/cast_possible_truncation.rs169
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/cast_possible_wrap.rs44
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/cast_precision_loss.rs51
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs96
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/cast_ref_to_mut.rs26
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/cast_sign_loss.rs69
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs143
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/char_lit_as_u8.rs41
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast.rs37
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_any.rs34
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_with_truncation.rs39
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/mod.rs588
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs49
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs126
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/utils.rs75
-rw-r--r--src/tools/clippy/clippy_lints/src/checked_conversions.rs354
-rw-r--r--src/tools/clippy/clippy_lints/src/cognitive_complexity.rs167
-rw-r--r--src/tools/clippy/clippy_lints/src/collapsible_if.rs195
-rw-r--r--src/tools/clippy/clippy_lints/src/comparison_chain.rs132
-rw-r--r--src/tools/clippy/clippy_lints/src/copies.rs584
-rw-r--r--src/tools/clippy/clippy_lints/src/copy_iterator.rs62
-rw-r--r--src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs125
-rw-r--r--src/tools/clippy/clippy_lints/src/create_dir.rs54
-rw-r--r--src/tools/clippy/clippy_lints/src/dbg_macro.rs101
-rw-r--r--src/tools/clippy/clippy_lints/src/default.rs307
-rw-r--r--src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs68
-rw-r--r--src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs245
-rw-r--r--src/tools/clippy/clippy_lints/src/default_union_representation.rs105
-rw-r--r--src/tools/clippy/clippy_lints/src/deprecated_lints.rs217
-rw-r--r--src/tools/clippy/clippy_lints/src/dereference.rs1148
-rw-r--r--src/tools/clippy/clippy_lints/src/derivable_impls.rs117
-rw-r--r--src/tools/clippy/clippy_lints/src/derive.rs528
-rw-r--r--src/tools/clippy/clippy_lints/src/disallowed_methods.rs113
-rw-r--r--src/tools/clippy/clippy_lints/src/disallowed_script_idents.rs113
-rw-r--r--src/tools/clippy/clippy_lints/src/disallowed_types.rs140
-rw-r--r--src/tools/clippy/clippy_lints/src/doc.rs849
-rw-r--r--src/tools/clippy/clippy_lints/src/doc_link_with_quotes.rs60
-rw-r--r--src/tools/clippy/clippy_lints/src/double_parens.rs75
-rw-r--r--src/tools/clippy/clippy_lints/src/drop_forget_ref.rs243
-rw-r--r--src/tools/clippy/clippy_lints/src/duplicate_mod.rs128
-rw-r--r--src/tools/clippy/clippy_lints/src/else_if_without_else.rs72
-rw-r--r--src/tools/clippy/clippy_lints/src/empty_drop.rs65
-rw-r--r--src/tools/clippy/clippy_lints/src/empty_enum.rs67
-rw-r--r--src/tools/clippy/clippy_lints/src/empty_structs_with_brackets.rs99
-rw-r--r--src/tools/clippy/clippy_lints/src/entry.rs658
-rw-r--r--src/tools/clippy/clippy_lints/src/enum_clike.rs81
-rw-r--r--src/tools/clippy/clippy_lints/src/enum_variants.rs306
-rw-r--r--src/tools/clippy/clippy_lints/src/equatable_if_let.rs103
-rw-r--r--src/tools/clippy/clippy_lints/src/escape.rs199
-rw-r--r--src/tools/clippy/clippy_lints/src/eta_reduction.rs235
-rw-r--r--src/tools/clippy/clippy_lints/src/excessive_bools.rs176
-rw-r--r--src/tools/clippy/clippy_lints/src/exhaustive_items.rs111
-rw-r--r--src/tools/clippy/clippy_lints/src/exit.rs46
-rw-r--r--src/tools/clippy/clippy_lints/src/explicit_write.rs142
-rw-r--r--src/tools/clippy/clippy_lints/src/fallible_impl_from.rs132
-rw-r--r--src/tools/clippy/clippy_lints/src/float_literal.rs181
-rw-r--r--src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs735
-rw-r--r--src/tools/clippy/clippy_lints/src/format.rs123
-rw-r--r--src/tools/clippy/clippy_lints/src/format_args.rs199
-rw-r--r--src/tools/clippy/clippy_lints/src/format_impl.rs253
-rw-r--r--src/tools/clippy/clippy_lints/src/format_push_string.rs83
-rw-r--r--src/tools/clippy/clippy_lints/src/formatting.rs341
-rw-r--r--src/tools/clippy/clippy_lints/src/from_over_into.rs81
-rw-r--r--src/tools/clippy/clippy_lints/src/from_str_radix_10.rs103
-rw-r--r--src/tools/clippy/clippy_lints/src/functions/mod.rs276
-rw-r--r--src/tools/clippy/clippy_lints/src/functions/must_use.rs259
-rw-r--r--src/tools/clippy/clippy_lints/src/functions/not_unsafe_ptr_arg_deref.rs122
-rw-r--r--src/tools/clippy/clippy_lints/src/functions/result_unit_err.rs66
-rw-r--r--src/tools/clippy/clippy_lints/src/functions/too_many_arguments.rs68
-rw-r--r--src/tools/clippy/clippy_lints/src/functions/too_many_lines.rs87
-rw-r--r--src/tools/clippy/clippy_lints/src/future_not_send.rs112
-rw-r--r--src/tools/clippy/clippy_lints/src/get_first.rs68
-rw-r--r--src/tools/clippy/clippy_lints/src/if_let_mutex.rs140
-rw-r--r--src/tools/clippy/clippy_lints/src/if_not_else.rs90
-rw-r--r--src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs122
-rw-r--r--src/tools/clippy/clippy_lints/src/implicit_hasher.rs388
-rw-r--r--src/tools/clippy/clippy_lints/src/implicit_return.rs250
-rw-r--r--src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs176
-rw-r--r--src/tools/clippy/clippy_lints/src/inconsistent_struct_constructor.rs136
-rw-r--r--src/tools/clippy/clippy_lints/src/index_refutable_slice.rs275
-rw-r--r--src/tools/clippy/clippy_lints/src/indexing_slicing.rs205
-rw-r--r--src/tools/clippy/clippy_lints/src/infinite_iter.rs260
-rw-r--r--src/tools/clippy/clippy_lints/src/inherent_impl.rs139
-rw-r--r--src/tools/clippy/clippy_lints/src/inherent_to_string.rs153
-rw-r--r--src/tools/clippy/clippy_lints/src/init_numbered_fields.rs81
-rw-r--r--src/tools/clippy/clippy_lints/src/inline_fn_without_body.rs60
-rw-r--r--src/tools/clippy/clippy_lints/src/int_plus_one.rs171
-rw-r--r--src/tools/clippy/clippy_lints/src/invalid_upcast_comparisons.rs161
-rw-r--r--src/tools/clippy/clippy_lints/src/invalid_utf8_in_unchecked.rs74
-rw-r--r--src/tools/clippy/clippy_lints/src/items_after_statements.rs88
-rw-r--r--src/tools/clippy/clippy_lints/src/iter_not_returning_iterator.rs90
-rw-r--r--src/tools/clippy/clippy_lints/src/large_const_arrays.rs86
-rw-r--r--src/tools/clippy/clippy_lints/src/large_enum_variant.rs200
-rw-r--r--src/tools/clippy/clippy_lints/src/large_include_file.rs87
-rw-r--r--src/tools/clippy/clippy_lints/src/large_stack_arrays.rs67
-rw-r--r--src/tools/clippy/clippy_lints/src/len_zero.rs495
-rw-r--r--src/tools/clippy/clippy_lints/src/let_if_seq.rs160
-rw-r--r--src/tools/clippy/clippy_lints/src/let_underscore.rs171
-rw-r--r--src/tools/clippy/clippy_lints/src/lib.deprecated.rs70
-rw-r--r--src/tools/clippy/clippy_lints/src/lib.register_all.rs352
-rw-r--r--src/tools/clippy/clippy_lints/src/lib.register_cargo.rs11
-rw-r--r--src/tools/clippy/clippy_lints/src/lib.register_complexity.rs105
-rw-r--r--src/tools/clippy/clippy_lints/src/lib.register_correctness.rs78
-rw-r--r--src/tools/clippy/clippy_lints/src/lib.register_internal.rs22
-rw-r--r--src/tools/clippy/clippy_lints/src/lib.register_lints.rs597
-rw-r--r--src/tools/clippy/clippy_lints/src/lib.register_nursery.rs36
-rw-r--r--src/tools/clippy/clippy_lints/src/lib.register_pedantic.rs102
-rw-r--r--src/tools/clippy/clippy_lints/src/lib.register_perf.rs31
-rw-r--r--src/tools/clippy/clippy_lints/src/lib.register_restriction.rs88
-rw-r--r--src/tools/clippy/clippy_lints/src/lib.register_style.rs127
-rw-r--r--src/tools/clippy/clippy_lints/src/lib.register_suspicious.rs35
-rw-r--r--src/tools/clippy/clippy_lints/src/lib.rs985
-rw-r--r--src/tools/clippy/clippy_lints/src/lifetimes.rs620
-rw-r--r--src/tools/clippy/clippy_lints/src/literal_representation.rs534
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/empty_loop.rs18
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/explicit_counter_loop.rs93
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/explicit_into_iter_loop.rs29
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/explicit_iter_loop.rs75
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/for_kv_map.rs66
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/for_loops_over_fallibles.rs65
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/iter_next_loop.rs21
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/manual_find.rs158
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs85
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/manual_memcpy.rs461
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/missing_spin_loop.rs56
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/mod.rs768
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs167
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/needless_collect.rs369
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs380
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/never_loop.rs218
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/same_item_push.rs195
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/single_element_loop.rs101
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/utils.rs358
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/while_immutable_condition.rs128
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/while_let_loop.rs96
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/while_let_on_iterator.rs362
-rw-r--r--src/tools/clippy/clippy_lints/src/macro_use.rs221
-rw-r--r--src/tools/clippy/clippy_lints/src/main_recursion.rs63
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_assert.rs71
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_async_fn.rs202
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_bits.rs146
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_non_exhaustive.rs221
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_ok_or.rs98
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs123
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_retain.rs228
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_strip.rs252
-rw-r--r--src/tools/clippy/clippy_lints/src/map_clone.rs167
-rw-r--r--src/tools/clippy/clippy_lints/src/map_err_ignore.rs154
-rw-r--r--src/tools/clippy/clippy_lints/src/map_unit_fn.rs272
-rw-r--r--src/tools/clippy/clippy_lints/src/match_result_ok.rs90
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/collapsible_match.rs143
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/infallible_destructuring_match.rs44
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/manual_map.rs306
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/manual_unwrap_or.rs83
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_as_ref.rs85
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_bool.rs75
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs171
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs61
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_ref_pats.rs66
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs414
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_single_binding.rs216
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_str_case_mismatch.rs125
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_wild_enum.rs196
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_wild_err_arm.rs51
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/mod.rs1134
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/needless_match.rs207
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/overlapping_arms.rs194
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs304
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/rest_pat_in_fully_bound_struct.rs30
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs400
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/single_match.rs248
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/try_err.rs145
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/wild_in_or_pats.rs24
-rw-r--r--src/tools/clippy/clippy_lints/src/mem_forget.rs46
-rw-r--r--src/tools/clippy/clippy_lints/src/mem_replace.rs264
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs190
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/bytes_nth.rs34
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs51
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/chars_cmp_with_unwrap.rs44
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/chars_last_cmp.rs13
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/chars_last_cmp_with_unwrap.rs13
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/chars_next_cmp.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/chars_next_cmp_with_unwrap.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs132
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/clone_on_ref_ptr.rs43
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/cloned_instead_of_copied.rs45
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/err_expect.rs60
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs173
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/expect_used.rs36
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/extend_with_drain.rs45
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/filetype_is_file.rs41
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/filter_map.rs197
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/filter_map_identity.rs22
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/filter_map_next.rs42
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/filter_next.rs42
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/flat_map_identity.rs28
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/flat_map_option.rs34
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/from_iter_instead_of_collect.rs83
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/get_last_with_len.rs55
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/get_unwrap.rs87
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs58
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/inefficient_to_string.rs67
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/inspect_for_each.rs23
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/into_iter_on_ref.rs56
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/is_digit_ascii_radix.rs50
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/iter_cloned_collect.rs31
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/iter_count.rs48
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/iter_next_slice.rs74
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/iter_nth.rs39
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/iter_nth_zero.rs30
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs59
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/iter_skip_next.rs46
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/iter_with_drain.rs47
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/iterator_step_by_zero.rs21
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs164
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs99
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/map_collect_result_unit.rs47
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/map_flatten.rs73
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/map_identity.rs39
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/map_unwrap_or.rs79
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/mod.rs3052
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/needless_option_as_deref.rs37
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/needless_option_take.rs41
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/no_effect_replace.rs47
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/obfuscated_if_else.rs42
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/ok_expect.rs46
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/option_as_ref_deref.rs120
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs122
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/option_map_unwrap_or.rs139
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs175
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/or_then_unwrap.rs68
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/search_is_some.rs156
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/single_char_add_str.rs14
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/single_char_insert_string.rs28
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/single_char_pattern.rs62
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/single_char_push_string.rs27
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/skip_while_next.rs22
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/str_splitn.rs390
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/string_extend_chars.rs45
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/suspicious_map.rs36
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs48
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/uninit_assumed_init.rs26
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_filter_map.rs132
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_fold.rs95
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs104
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_join.rs41
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_lazy_eval.rs70
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs431
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unwrap_or_else_default.rs46
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unwrap_used.rs40
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/useless_asref.rs45
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/utils.rs168
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/wrong_self_convention.rs154
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/zst_offset.rs18
-rw-r--r--src/tools/clippy/clippy_lints/src/minmax.rs122
-rw-r--r--src/tools/clippy/clippy_lints/src/misc.rs342
-rw-r--r--src/tools/clippy/clippy_lints/src/misc_early/builtin_type_shadow.rs19
-rw-r--r--src/tools/clippy/clippy_lints/src/misc_early/double_neg.rs18
-rw-r--r--src/tools/clippy/clippy_lints/src/misc_early/literal_suffix.rs38
-rw-r--r--src/tools/clippy/clippy_lints/src/misc_early/mixed_case_hex_literals.rs34
-rw-r--r--src/tools/clippy/clippy_lints/src/misc_early/mod.rs416
-rw-r--r--src/tools/clippy/clippy_lints/src/misc_early/redundant_pattern.rs31
-rw-r--r--src/tools/clippy/clippy_lints/src/misc_early/unneeded_field_pattern.rs73
-rw-r--r--src/tools/clippy/clippy_lints/src/misc_early/unneeded_wildcard_pattern.rs52
-rw-r--r--src/tools/clippy/clippy_lints/src/misc_early/zero_prefixed_literal.rs29
-rw-r--r--src/tools/clippy/clippy_lints/src/mismatching_type_param_order.rs122
-rw-r--r--src/tools/clippy/clippy_lints/src/missing_const_for_fn.rs174
-rw-r--r--src/tools/clippy/clippy_lints/src/missing_doc.rs180
-rw-r--r--src/tools/clippy/clippy_lints/src/missing_enforced_import_rename.rs102
-rw-r--r--src/tools/clippy/clippy_lints/src/missing_inline.rs172
-rw-r--r--src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs350
-rw-r--r--src/tools/clippy/clippy_lints/src/module_style.rs166
-rw-r--r--src/tools/clippy/clippy_lints/src/mut_key.rs175
-rw-r--r--src/tools/clippy/clippy_lints/src/mut_mut.rs114
-rw-r--r--src/tools/clippy/clippy_lints/src/mut_mutex_lock.rs70
-rw-r--r--src/tools/clippy/clippy_lints/src/mut_reference.rs95
-rw-r--r--src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs124
-rw-r--r--src/tools/clippy/clippy_lints/src/mutex_atomic.rs110
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_arbitrary_self_type.rs139
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_bool.rs385
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_borrowed_ref.rs87
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_continue.rs479
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_for_each.rs160
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_late_init.rs390
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_parens_on_range_literals.rs87
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs347
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_question_mark.rs143
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_update.rs70
-rw-r--r--src/tools/clippy/clippy_lints/src/neg_cmp_op_on_partial_ord.rs90
-rw-r--r--src/tools/clippy/clippy_lints/src/neg_multiply.rs80
-rw-r--r--src/tools/clippy/clippy_lints/src/new_without_default.rs169
-rw-r--r--src/tools/clippy/clippy_lints/src/no_effect.rs277
-rw-r--r--src/tools/clippy/clippy_lints/src/non_copy_const.rs449
-rw-r--r--src/tools/clippy/clippy_lints/src/non_expressive_names.rs427
-rw-r--r--src/tools/clippy/clippy_lints/src/non_octal_unix_permissions.rs106
-rw-r--r--src/tools/clippy/clippy_lints/src/non_send_fields_in_send_ty.rs251
-rw-r--r--src/tools/clippy/clippy_lints/src/nonstandard_macro_braces.rs282
-rw-r--r--src/tools/clippy/clippy_lints/src/octal_escapes.rs151
-rw-r--r--src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs660
-rw-r--r--src/tools/clippy/clippy_lints/src/open_options.rs202
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/absurd_extreme_comparisons.rs142
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/arithmetic.rs119
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/assign_op_pattern.rs181
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/bit_mask.rs197
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/cmp_nan.rs30
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/cmp_owned.rs147
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/double_comparison.rs54
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/duration_subsec.rs44
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/eq_op.rs45
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/erasing_op.rs53
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/float_cmp.rs139
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/float_equality_without_abs.rs71
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/identity_op.rs148
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/integer_division.rs27
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/misrefactored_assign_op.rs84
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/mod.rs888
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/modulo_arithmetic.rs126
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/modulo_one.rs26
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/needless_bitwise_bool.rs36
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/numeric_arithmetic.rs128
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/op_ref.rs218
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/ptr_eq.rs65
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/self_assignment.rs20
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/verbose_bit_mask.rs44
-rw-r--r--src/tools/clippy/clippy_lints/src/option_env_unwrap.rs56
-rw-r--r--src/tools/clippy/clippy_lints/src/option_if_let_else.rs186
-rw-r--r--src/tools/clippy/clippy_lints/src/overflow_check_conditional.rs75
-rw-r--r--src/tools/clippy/clippy_lints/src/panic_in_result_fn.rs87
-rw-r--r--src/tools/clippy/clippy_lints/src/panic_unimplemented.rs116
-rw-r--r--src/tools/clippy/clippy_lints/src/partialeq_ne_impl.rs57
-rw-r--r--src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs313
-rw-r--r--src/tools/clippy/clippy_lints/src/path_buf_push_overwrite.rs74
-rw-r--r--src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs194
-rw-r--r--src/tools/clippy/clippy_lints/src/precedence.rs161
-rw-r--r--src/tools/clippy/clippy_lints/src/ptr.rs684
-rw-r--r--src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs153
-rw-r--r--src/tools/clippy/clippy_lints/src/pub_use.rs56
-rw-r--r--src/tools/clippy/clippy_lints/src/question_mark.rs231
-rw-r--r--src/tools/clippy/clippy_lints/src/ranges.rs598
-rw-r--r--src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs139
-rw-r--r--src/tools/clippy/clippy_lints/src/read_zero_byte_vec.rs142
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_clone.rs776
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_closure_call.rs157
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_else.rs138
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_field_names.rs86
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_pub_crate.rs94
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_slicing.rs169
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_static_lifetimes.rs117
-rw-r--r--src/tools/clippy/clippy_lints/src/ref_option_ref.rs71
-rw-r--r--src/tools/clippy/clippy_lints/src/reference.rs105
-rw-r--r--src/tools/clippy/clippy_lints/src/regex.rs208
-rw-r--r--src/tools/clippy/clippy_lints/src/renamed_lints.rs40
-rw-r--r--src/tools/clippy/clippy_lints/src/repeat_once.rs89
-rw-r--r--src/tools/clippy/clippy_lints/src/return_self_not_must_use.rs134
-rw-r--r--src/tools/clippy/clippy_lints/src/returns.rs333
-rw-r--r--src/tools/clippy/clippy_lints/src/same_name_method.rs166
-rw-r--r--src/tools/clippy/clippy_lints/src/self_named_constructors.rs91
-rw-r--r--src/tools/clippy/clippy_lints/src/semicolon_if_nothing_returned.rs70
-rw-r--r--src/tools/clippy/clippy_lints/src/serde_api.rs60
-rw-r--r--src/tools/clippy/clippy_lints/src/shadow.rs252
-rw-r--r--src/tools/clippy/clippy_lints/src/single_char_lifetime_names.rs63
-rw-r--r--src/tools/clippy/clippy_lints/src/single_component_path_imports.rs175
-rw-r--r--src/tools/clippy/clippy_lints/src/size_of_in_element_count.rs154
-rw-r--r--src/tools/clippy/clippy_lints/src/slow_vector_initialization.rs312
-rw-r--r--src/tools/clippy/clippy_lints/src/stable_sort_primitive.rs145
-rw-r--r--src/tools/clippy/clippy_lints/src/std_instead_of_core.rs148
-rw-r--r--src/tools/clippy/clippy_lints/src/strings.rs517
-rw-r--r--src/tools/clippy/clippy_lints/src/strlen_on_c_strings.rs88
-rw-r--r--src/tools/clippy/clippy_lints/src/suspicious_operation_groupings.rs693
-rw-r--r--src/tools/clippy/clippy_lints/src/suspicious_trait_impl.rs116
-rw-r--r--src/tools/clippy/clippy_lints/src/swap.rs258
-rw-r--r--src/tools/clippy/clippy_lints/src/swap_ptr_to_ref.rs80
-rw-r--r--src/tools/clippy/clippy_lints/src/tabs_in_doc_comments.rs230
-rw-r--r--src/tools/clippy/clippy_lints/src/temporary_assignment.rs44
-rw-r--r--src/tools/clippy/clippy_lints/src/to_digit_is_some.rs99
-rw-r--r--src/tools/clippy/clippy_lints/src/trailing_empty_array.rs78
-rw-r--r--src/tools/clippy/clippy_lints/src/trait_bounds.rs376
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/crosspointer_transmute.rs37
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/mod.rs460
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/transmute_float_to_int.rs65
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_bool.rs42
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_char.rs46
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_float.rs48
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/transmute_num_to_bytes.rs49
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ptr.rs36
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ref.rs84
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/transmute_ref_to_ref.rs89
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs372
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs39
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/unsound_collection_transmute.rs52
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/useless_transmute.rs72
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/utils.rs76
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/wrong_transmute.rs22
-rw-r--r--src/tools/clippy/clippy_lints/src/transmuting_null.rs89
-rw-r--r--src/tools/clippy/clippy_lints/src/types/borrowed_box.rs115
-rw-r--r--src/tools/clippy/clippy_lints/src/types/box_collection.rs54
-rw-r--r--src/tools/clippy/clippy_lints/src/types/linked_list.rs22
-rw-r--r--src/tools/clippy/clippy_lints/src/types/mod.rs574
-rw-r--r--src/tools/clippy/clippy_lints/src/types/option_option.rs28
-rw-r--r--src/tools/clippy/clippy_lints/src/types/rc_buffer.rs106
-rw-r--r--src/tools/clippy/clippy_lints/src/types/rc_mutex.rs30
-rw-r--r--src/tools/clippy/clippy_lints/src/types/redundant_allocation.rs115
-rw-r--r--src/tools/clippy/clippy_lints/src/types/type_complexity.rs78
-rw-r--r--src/tools/clippy/clippy_lints/src/types/utils.rs22
-rw-r--r--src/tools/clippy/clippy_lints/src/types/vec_box.rs64
-rw-r--r--src/tools/clippy/clippy_lints/src/undocumented_unsafe_blocks.rs358
-rw-r--r--src/tools/clippy/clippy_lints/src/unicode.rs142
-rw-r--r--src/tools/clippy/clippy_lints/src/uninit_vec.rs224
-rw-r--r--src/tools/clippy/clippy_lints/src/unit_hash.rs78
-rw-r--r--src/tools/clippy/clippy_lints/src/unit_return_expecting_ord.rs184
-rw-r--r--src/tools/clippy/clippy_lints/src/unit_types/let_unit_value.rs165
-rw-r--r--src/tools/clippy/clippy_lints/src/unit_types/mod.rs110
-rw-r--r--src/tools/clippy/clippy_lints/src/unit_types/unit_arg.rs207
-rw-r--r--src/tools/clippy/clippy_lints/src/unit_types/unit_cmp.rs50
-rw-r--r--src/tools/clippy/clippy_lints/src/unit_types/utils.rs5
-rw-r--r--src/tools/clippy/clippy_lints/src/unnamed_address.rs132
-rw-r--r--src/tools/clippy/clippy_lints/src/unnecessary_owned_empty_strings.rs81
-rw-r--r--src/tools/clippy/clippy_lints/src/unnecessary_self_imports.rs70
-rw-r--r--src/tools/clippy/clippy_lints/src/unnecessary_sort_by.rs258
-rw-r--r--src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs177
-rw-r--r--src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs428
-rw-r--r--src/tools/clippy/clippy_lints/src/unsafe_removed_from_name.rs79
-rw-r--r--src/tools/clippy/clippy_lints/src/unused_async.rs86
-rw-r--r--src/tools/clippy/clippy_lints/src/unused_io_amount.rs170
-rw-r--r--src/tools/clippy/clippy_lints/src/unused_rounding.rs69
-rw-r--r--src/tools/clippy/clippy_lints/src/unused_self.rs80
-rw-r--r--src/tools/clippy/clippy_lints/src/unused_unit.rs148
-rw-r--r--src/tools/clippy/clippy_lints/src/unwrap.rs331
-rw-r--r--src/tools/clippy/clippy_lints/src/unwrap_in_result.rs133
-rw-r--r--src/tools/clippy/clippy_lints/src/upper_case_acronyms.rs127
-rw-r--r--src/tools/clippy/clippy_lints/src/use_self.rs320
-rw-r--r--src/tools/clippy/clippy_lints/src/useless_conversion.rs189
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/author.rs741
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/conf.rs534
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/dump_hir.rs55
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints.rs1436
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs1169
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/mod.rs5
-rw-r--r--src/tools/clippy/clippy_lints/src/vec.rs164
-rw-r--r--src/tools/clippy/clippy_lints/src/vec_init_then_push.rs225
-rw-r--r--src/tools/clippy/clippy_lints/src/vec_resize_to_zero.rs64
-rw-r--r--src/tools/clippy/clippy_lints/src/verbose_file_reads.rs88
-rw-r--r--src/tools/clippy/clippy_lints/src/wildcard_imports.rs222
-rw-r--r--src/tools/clippy/clippy_lints/src/write.rs709
-rw-r--r--src/tools/clippy/clippy_lints/src/zero_div_zero.rs67
-rw-r--r--src/tools/clippy/clippy_lints/src/zero_sized_map_values.rs94
-rw-r--r--src/tools/clippy/clippy_utils/Cargo.toml18
-rw-r--r--src/tools/clippy/clippy_utils/src/ast_utils.rs710
-rw-r--r--src/tools/clippy/clippy_utils/src/ast_utils/ident_iter.rs45
-rw-r--r--src/tools/clippy/clippy_utils/src/attrs.rs159
-rw-r--r--src/tools/clippy/clippy_utils/src/comparisons.rs36
-rw-r--r--src/tools/clippy/clippy_utils/src/consts.rs652
-rw-r--r--src/tools/clippy/clippy_utils/src/diagnostics.rs249
-rw-r--r--src/tools/clippy/clippy_utils/src/eager_or_lazy.rs234
-rw-r--r--src/tools/clippy/clippy_utils/src/higher.rs469
-rw-r--r--src/tools/clippy/clippy_utils/src/hir_utils.rs1031
-rw-r--r--src/tools/clippy/clippy_utils/src/lib.rs2304
-rw-r--r--src/tools/clippy/clippy_utils/src/macros.rs583
-rw-r--r--src/tools/clippy/clippy_utils/src/msrvs.rs39
-rw-r--r--src/tools/clippy/clippy_utils/src/numeric_literal.rs248
-rw-r--r--src/tools/clippy/clippy_utils/src/paths.rs196
-rw-r--r--src/tools/clippy/clippy_utils/src/ptr.rs57
-rw-r--r--src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs371
-rw-r--r--src/tools/clippy/clippy_utils/src/source.rs508
-rw-r--r--src/tools/clippy/clippy_utils/src/str_utils.rs325
-rw-r--r--src/tools/clippy/clippy_utils/src/sugg.rs1099
-rw-r--r--src/tools/clippy/clippy_utils/src/sym_helper.rs7
-rw-r--r--src/tools/clippy/clippy_utils/src/ty.rs829
-rw-r--r--src/tools/clippy/clippy_utils/src/usage.rs216
-rw-r--r--src/tools/clippy/clippy_utils/src/visitors.rs733
-rw-r--r--src/tools/clippy/etc/relicense/RELICENSE_DOCUMENTATION.md69
-rw-r--r--src/tools/clippy/etc/relicense/contributors.txt232
-rw-r--r--src/tools/clippy/etc/relicense/relicense_comments.txt227
-rw-r--r--src/tools/clippy/lintcheck/Cargo.toml24
-rw-r--r--src/tools/clippy/lintcheck/README.md77
-rw-r--r--src/tools/clippy/lintcheck/lintcheck_crates.toml35
-rw-r--r--src/tools/clippy/lintcheck/src/config.rs124
-rw-r--r--src/tools/clippy/lintcheck/src/main.rs814
-rw-r--r--src/tools/clippy/lintcheck/test_sources.toml4
-rw-r--r--src/tools/clippy/rust-toolchain3
-rw-r--r--src/tools/clippy/rustc_tools_util/Cargo.toml15
l---------src/tools/clippy/rustc_tools_util/LICENSE-APACHE1
l---------src/tools/clippy/rustc_tools_util/LICENSE-MIT1
-rw-r--r--src/tools/clippy/rustc_tools_util/README.md62
-rw-r--r--src/tools/clippy/rustc_tools_util/src/lib.rs162
-rw-r--r--src/tools/clippy/rustfmt.toml7
-rw-r--r--src/tools/clippy/src/driver.rs353
-rw-r--r--src/tools/clippy/src/main.rs194
-rw-r--r--src/tools/clippy/tests/check-fmt.rs28
-rw-r--r--src/tools/clippy/tests/clippy.toml1
-rw-r--r--src/tools/clippy/tests/compile-test.rs509
-rw-r--r--src/tools/clippy/tests/dogfood.rs104
-rw-r--r--src/tools/clippy/tests/integration.rs89
-rw-r--r--src/tools/clippy/tests/lint_message_convention.rs116
-rw-r--r--src/tools/clippy/tests/missing-test-files.rs69
-rw-r--r--src/tools/clippy/tests/test_utils/mod.rs13
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/Cargo.toml6
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/src/main.rs4
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/src/main.stderr16
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish/Cargo.toml6
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish/src/main.rs4
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish/src/main.stderr16
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish_true/Cargo.toml6
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish_true/src/main.rs4
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish_true/src/main.stderr16
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass/Cargo.toml12
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass/src/main.rs4
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_empty/Cargo.toml6
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_empty/src/main.rs4
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_false/Cargo.toml6
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_false/src/main.rs4
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_diff/Cargo.toml9
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_diff/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_diff/src/main.rs11
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_diff/src/main.stderr16
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_same/Cargo.toml9
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_same/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_same/src/main.rs11
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_same/src/main.stderr14
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_cargo/Cargo.toml9
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_cargo/src/main.rs11
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_cargo/src/main.stderr14
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_clippy/Cargo.toml8
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_clippy/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_clippy/src/main.rs11
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_clippy/src/main.stderr14
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_file_attr/Cargo.toml9
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_file_attr/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_file_attr/src/main.rs16
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_file_attr/src/main.stderr14
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_both_same/Cargo.toml9
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_both_same/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_both_same/src/main.rs11
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_cargo/Cargo.toml9
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_cargo/src/main.rs11
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_clippy/Cargo.toml8
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_clippy/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_clippy/src/main.rs11
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_file_attr/Cargo.toml9
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_file_attr/src/main.rs13
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/warn_both_diff/Cargo.toml9
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/warn_both_diff/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/warn_both_diff/src/main.rs11
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_rust_version/warn_both_diff/src/main.stderr4
-rw-r--r--src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/Cargo.toml5
-rw-r--r--src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/a.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/b.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/c.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/d.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/from_other_module.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/main.rs28
-rw-r--r--src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/main.stderr53
-rw-r--r--src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/other_module/mod.rs2
-rw-r--r--src/tools/clippy/tests/ui-cargo/feature_name/fail/Cargo.toml21
-rw-r--r--src/tools/clippy/tests/ui-cargo/feature_name/fail/src/main.rs7
-rw-r--r--src/tools/clippy/tests/ui-cargo/feature_name/fail/src/main.stderr44
-rw-r--r--src/tools/clippy/tests/ui-cargo/feature_name/pass/Cargo.toml9
-rw-r--r--src/tools/clippy/tests/ui-cargo/feature_name/pass/src/main.rs7
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/fail_mod/Cargo.toml9
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/bad/inner.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/bad/inner/stuff.rs3
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/bad/inner/stuff/most.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/bad/mod.rs3
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/main.rs9
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/main.stderr19
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/Cargo.toml9
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/src/bad/mod.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/src/main.rs7
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/src/main.stderr11
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/pass_mod/Cargo.toml9
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/pass_mod/src/bad/mod.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/pass_mod/src/main.rs10
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/pass_mod/src/more/foo.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/pass_mod/src/more/inner/mod.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/pass_mod/src/more/mod.rs2
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/pass_no_mod/Cargo.toml9
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/pass_no_mod/src/good.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/pass_no_mod/src/main.rs7
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_config_files/no_warn/Cargo.toml9
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_config_files/no_warn/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_config_files/no_warn/src/main.rs3
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/.clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/Cargo.toml9
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/src/main.rs3
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/src/main.stderr2
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_crate_versions/5041_allow_dev_build/Cargo.toml19
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_crate_versions/5041_allow_dev_build/src/main.rs4
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/Cargo.lock109
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/Cargo.toml10
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/src/main.rs4
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/src/main.stderr6
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_crate_versions/pass/Cargo.toml10
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_crate_versions/pass/src/main.rs4
-rwxr-xr-xsrc/tools/clippy/tests/ui-cargo/update-all-references.sh3
-rw-r--r--src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/Cargo.toml9
-rw-r--r--src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/src/main.rs4
-rw-r--r--src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/src/main.stderr6
-rw-r--r--src/tools/clippy/tests/ui-cargo/wildcard_dependencies/pass/Cargo.toml9
-rw-r--r--src/tools/clippy/tests/ui-cargo/wildcard_dependencies/pass/src/main.rs4
-rw-r--r--src/tools/clippy/tests/ui-internal/check_clippy_version_attribute.rs87
-rw-r--r--src/tools/clippy/tests/ui-internal/check_clippy_version_attribute.stderr68
-rw-r--r--src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.fixed57
-rw-r--r--src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.rs67
-rw-r--r--src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.stderr49
-rw-r--r--src/tools/clippy/tests/ui-internal/custom_ice_message.rs11
-rw-r--r--src/tools/clippy/tests/ui-internal/custom_ice_message.stderr13
-rw-r--r--src/tools/clippy/tests/ui-internal/default_deprecation_reason.rs30
-rw-r--r--src/tools/clippy/tests/ui-internal/default_deprecation_reason.stderr22
-rw-r--r--src/tools/clippy/tests/ui-internal/default_lint.rs28
-rw-r--r--src/tools/clippy/tests/ui-internal/default_lint.stderr21
-rw-r--r--src/tools/clippy/tests/ui-internal/if_chain_style.rs92
-rw-r--r--src/tools/clippy/tests/ui-internal/if_chain_style.stderr85
-rw-r--r--src/tools/clippy/tests/ui-internal/interning_defined_symbol.fixed37
-rw-r--r--src/tools/clippy/tests/ui-internal/interning_defined_symbol.rs37
-rw-r--r--src/tools/clippy/tests/ui-internal/interning_defined_symbol.stderr33
-rw-r--r--src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.fixed40
-rw-r--r--src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.rs38
-rw-r--r--src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.stderr32
-rw-r--r--src/tools/clippy/tests/ui-internal/invalid_paths.rs27
-rw-r--r--src/tools/clippy/tests/ui-internal/invalid_paths.stderr22
-rw-r--r--src/tools/clippy/tests/ui-internal/lint_without_lint_pass.rs45
-rw-r--r--src/tools/clippy/tests/ui-internal/lint_without_lint_pass.stderr21
-rw-r--r--src/tools/clippy/tests/ui-internal/match_type_on_diag_item.rs39
-rw-r--r--src/tools/clippy/tests/ui-internal/match_type_on_diag_item.stderr27
-rw-r--r--src/tools/clippy/tests/ui-internal/outer_expn_data.fixed29
-rw-r--r--src/tools/clippy/tests/ui-internal/outer_expn_data.rs29
-rw-r--r--src/tools/clippy/tests/ui-internal/outer_expn_data.stderr15
-rw-r--r--src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.fixed21
-rw-r--r--src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.rs21
-rw-r--r--src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.stderr39
-rw-r--r--src/tools/clippy/tests/ui-toml/arithmetic_allowed/arithmetic_allowed.rs24
-rw-r--r--src/tools/clippy/tests/ui-toml/arithmetic_allowed/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/await_holding_invalid_type/await_holding_invalid_type.rs41
-rw-r--r--src/tools/clippy/tests/ui-toml/await_holding_invalid_type/await_holding_invalid_type.stderr25
-rw-r--r--src/tools/clippy/tests/ui-toml/await_holding_invalid_type/clippy.toml4
-rw-r--r--src/tools/clippy/tests/ui-toml/bad_toml/clippy.toml2
-rw-r--r--src/tools/clippy/tests/ui-toml/bad_toml/conf_bad_toml.rs1
-rw-r--r--src/tools/clippy/tests/ui-toml/bad_toml/conf_bad_toml.stderr4
-rw-r--r--src/tools/clippy/tests/ui-toml/bad_toml_type/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/bad_toml_type/conf_bad_type.rs1
-rw-r--r--src/tools/clippy/tests/ui-toml/bad_toml_type/conf_bad_type.stderr4
-rw-r--r--src/tools/clippy/tests/ui-toml/blacklisted_names_append/blacklisted_names.rs10
-rw-r--r--src/tools/clippy/tests/ui-toml/blacklisted_names_append/blacklisted_names.stderr16
-rw-r--r--src/tools/clippy/tests/ui-toml/blacklisted_names_append/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/blacklisted_names_replace/blacklisted_names.rs10
-rw-r--r--src/tools/clippy/tests/ui-toml/blacklisted_names_replace/blacklisted_names.stderr10
-rw-r--r--src/tools/clippy/tests/ui-toml/blacklisted_names_replace/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/conf_deprecated_key/clippy.toml6
-rw-r--r--src/tools/clippy/tests/ui-toml/conf_deprecated_key/conf_deprecated_key.rs1
-rw-r--r--src/tools/clippy/tests/ui-toml/conf_deprecated_key/conf_deprecated_key.stderr4
-rw-r--r--src/tools/clippy/tests/ui-toml/dbg_macro/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/dbg_macro/dbg_macro.rs39
-rw-r--r--src/tools/clippy/tests/ui-toml/dbg_macro/dbg_macro.stderr102
-rw-r--r--src/tools/clippy/tests/ui-toml/doc_valid_idents_append/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/doc_valid_idents_append/doc_markdown.rs12
-rw-r--r--src/tools/clippy/tests/ui-toml/doc_valid_idents_append/doc_markdown.stderr14
-rw-r--r--src/tools/clippy/tests/ui-toml/doc_valid_idents_replace/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/doc_valid_idents_replace/doc_markdown.rs12
-rw-r--r--src/tools/clippy/tests/ui-toml/doc_valid_idents_replace/doc_markdown.stderr36
-rw-r--r--src/tools/clippy/tests/ui-toml/expect_used/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/expect_used/expect_used.rs29
-rw-r--r--src/tools/clippy/tests/ui-toml/expect_used/expect_used.stderr19
-rw-r--r--src/tools/clippy/tests/ui-toml/fn_params_excessive_bools/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/fn_params_excessive_bools/test.rs6
-rw-r--r--src/tools/clippy/tests/ui-toml/fn_params_excessive_bools/test.stderr11
-rw-r--r--src/tools/clippy/tests/ui-toml/functions_maxlines/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/functions_maxlines/test.rs60
-rw-r--r--src/tools/clippy/tests/ui-toml/functions_maxlines/test.stderr43
-rw-r--r--src/tools/clippy/tests/ui-toml/good_toml_no_false_negatives/clippy.toml3
-rw-r--r--src/tools/clippy/tests/ui-toml/good_toml_no_false_negatives/conf_no_false_negatives.rs1
-rw-r--r--src/tools/clippy/tests/ui-toml/invalid_min_rust_version/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/invalid_min_rust_version/invalid_min_rust_version.rs3
-rw-r--r--src/tools/clippy/tests/ui-toml/invalid_min_rust_version/invalid_min_rust_version.stderr4
-rw-r--r--src/tools/clippy/tests/ui-toml/large_include_file/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/large_include_file/large_include_file.rs16
-rw-r--r--src/tools/clippy/tests/ui-toml/large_include_file/large_include_file.stderr21
-rw-r--r--src/tools/clippy/tests/ui-toml/large_include_file/too_big.txt1
-rw-r--r--src/tools/clippy/tests/ui-toml/lint_decimal_readability/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/lint_decimal_readability/test.rs23
-rw-r--r--src/tools/clippy/tests/ui-toml/lint_decimal_readability/test.stderr10
-rw-r--r--src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.rs23
-rw-r--r--src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.stderr22
-rw-r--r--src/tools/clippy/tests/ui-toml/min_rust_version/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/min_rust_version/min_rust_version.rs98
-rw-r--r--src/tools/clippy/tests/ui-toml/min_rust_version/min_rust_version.stderr10
-rw-r--r--src/tools/clippy/tests/ui-toml/missing_enforced_import_rename/clippy.toml10
-rw-r--r--src/tools/clippy/tests/ui-toml/missing_enforced_import_rename/conf_missing_enforced_import_rename.rs16
-rw-r--r--src/tools/clippy/tests/ui-toml/missing_enforced_import_rename/conf_missing_enforced_import_rename.stderr40
-rw-r--r--src/tools/clippy/tests/ui-toml/nonstandard_macro_braces/auxiliary/proc_macro_derive.rs18
-rw-r--r--src/tools/clippy/tests/ui-toml/nonstandard_macro_braces/clippy.toml6
-rw-r--r--src/tools/clippy/tests/ui-toml/nonstandard_macro_braces/conf_nonstandard_macro_braces.rs60
-rw-r--r--src/tools/clippy/tests/ui-toml/nonstandard_macro_braces/conf_nonstandard_macro_braces.stderr94
-rw-r--r--src/tools/clippy/tests/ui-toml/strict_non_send_fields_in_send_ty/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/strict_non_send_fields_in_send_ty/test.rs43
-rw-r--r--src/tools/clippy/tests/ui-toml/strict_non_send_fields_in_send_ty/test.stderr91
-rw-r--r--src/tools/clippy/tests/ui-toml/struct_excessive_bools/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/struct_excessive_bools/test.rs9
-rw-r--r--src/tools/clippy/tests/ui-toml/struct_excessive_bools/test.stderr13
-rw-r--r--src/tools/clippy/tests/ui-toml/toml_blacklist/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/toml_blacklist/conf_french_blacklisted_name.rs20
-rw-r--r--src/tools/clippy/tests/ui-toml/toml_blacklist/conf_french_blacklisted_name.stderr46
-rw-r--r--src/tools/clippy/tests/ui-toml/toml_disallowed_methods/clippy.toml10
-rw-r--r--src/tools/clippy/tests/ui-toml/toml_disallowed_methods/conf_disallowed_methods.rs23
-rw-r--r--src/tools/clippy/tests/ui-toml/toml_disallowed_methods/conf_disallowed_methods.stderr54
-rw-r--r--src/tools/clippy/tests/ui-toml/toml_disallowed_types/clippy.toml15
-rw-r--r--src/tools/clippy/tests/ui-toml/toml_disallowed_types/conf_disallowed_types.rs42
-rw-r--r--src/tools/clippy/tests/ui-toml/toml_disallowed_types/conf_disallowed_types.stderr132
-rw-r--r--src/tools/clippy/tests/ui-toml/toml_trivially_copy/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.rs20
-rw-r--r--src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.stderr20
-rw-r--r--src/tools/clippy/tests/ui-toml/toml_unknown_key/clippy.toml6
-rw-r--r--src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.rs1
-rw-r--r--src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr45
-rw-r--r--src/tools/clippy/tests/ui-toml/unwrap_used/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.rs73
-rw-r--r--src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.stderr197
-rwxr-xr-xsrc/tools/clippy/tests/ui-toml/update-all-references.sh3
-rw-r--r--src/tools/clippy/tests/ui-toml/upper_case_acronyms_aggressive/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/upper_case_acronyms_aggressive/upper_case_acronyms.rs44
-rw-r--r--src/tools/clippy/tests/ui-toml/upper_case_acronyms_aggressive/upper_case_acronyms.stderr82
-rw-r--r--src/tools/clippy/tests/ui-toml/vec_box_sized/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/vec_box_sized/test.rs15
-rw-r--r--src/tools/clippy/tests/ui-toml/vec_box_sized/test.stderr22
-rw-r--r--src/tools/clippy/tests/ui-toml/zero_single_char_names/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/zero_single_char_names/zero_single_char_names.rs3
-rw-r--r--src/tools/clippy/tests/ui/absurd-extreme-comparisons.rs61
-rw-r--r--src/tools/clippy/tests/ui/absurd-extreme-comparisons.stderr147
-rw-r--r--src/tools/clippy/tests/ui/allow_attributes_without_reason.rs14
-rw-r--r--src/tools/clippy/tests/ui/allow_attributes_without_reason.stderr23
-rw-r--r--src/tools/clippy/tests/ui/almost_complete_letter_range.fixed67
-rw-r--r--src/tools/clippy/tests/ui/almost_complete_letter_range.rs67
-rw-r--r--src/tools/clippy/tests/ui/almost_complete_letter_range.stderr100
-rw-r--r--src/tools/clippy/tests/ui/approx_const.rs64
-rw-r--r--src/tools/clippy/tests/ui/approx_const.stderr187
-rw-r--r--src/tools/clippy/tests/ui/arithmetic.fixed27
-rw-r--r--src/tools/clippy/tests/ui/arithmetic.rs27
-rw-r--r--src/tools/clippy/tests/ui/as_conversions.rs20
-rw-r--r--src/tools/clippy/tests/ui/as_conversions.stderr27
-rw-r--r--src/tools/clippy/tests/ui/as_underscore.fixed13
-rw-r--r--src/tools/clippy/tests/ui/as_underscore.rs13
-rw-r--r--src/tools/clippy/tests/ui/as_underscore.stderr20
-rw-r--r--src/tools/clippy/tests/ui/asm_syntax.rs34
-rw-r--r--src/tools/clippy/tests/ui/asm_syntax.stderr44
-rw-r--r--src/tools/clippy/tests/ui/assertions_on_constants.rs39
-rw-r--r--src/tools/clippy/tests/ui/assertions_on_constants.stderr75
-rw-r--r--src/tools/clippy/tests/ui/assertions_on_result_states.fixed69
-rw-r--r--src/tools/clippy/tests/ui/assertions_on_result_states.rs69
-rw-r--r--src/tools/clippy/tests/ui/assertions_on_result_states.stderr40
-rw-r--r--src/tools/clippy/tests/ui/assign_ops.fixed32
-rw-r--r--src/tools/clippy/tests/ui/assign_ops.rs32
-rw-r--r--src/tools/clippy/tests/ui/assign_ops.stderr70
-rw-r--r--src/tools/clippy/tests/ui/assign_ops2.rs55
-rw-r--r--src/tools/clippy/tests/ui/assign_ops2.stderr146
-rw-r--r--src/tools/clippy/tests/ui/async_yields_async.fixed78
-rw-r--r--src/tools/clippy/tests/ui/async_yields_async.rs78
-rw-r--r--src/tools/clippy/tests/ui/async_yields_async.stderr96
-rw-r--r--src/tools/clippy/tests/ui/attrs.rs45
-rw-r--r--src/tools/clippy/tests/ui/attrs.stderr24
-rw-r--r--src/tools/clippy/tests/ui/author.rs4
-rw-r--r--src/tools/clippy/tests/ui/author.stdout14
-rw-r--r--src/tools/clippy/tests/ui/author/blocks.rs24
-rw-r--r--src/tools/clippy/tests/ui/author/blocks.stdout64
-rw-r--r--src/tools/clippy/tests/ui/author/call.rs4
-rw-r--r--src/tools/clippy/tests/ui/author/call.stdout16
-rw-r--r--src/tools/clippy/tests/ui/author/if.rs17
-rw-r--r--src/tools/clippy/tests/ui/author/if.stdout50
-rw-r--r--src/tools/clippy/tests/ui/author/issue_3849.rs14
-rw-r--r--src/tools/clippy/tests/ui/author/issue_3849.stdout14
-rw-r--r--src/tools/clippy/tests/ui/author/loop.rs36
-rw-r--r--src/tools/clippy/tests/ui/author/loop.stdout113
-rw-r--r--src/tools/clippy/tests/ui/author/matches.rs13
-rw-r--r--src/tools/clippy/tests/ui/author/matches.stdout38
-rw-r--r--src/tools/clippy/tests/ui/author/repeat.rs5
-rw-r--r--src/tools/clippy/tests/ui/author/repeat.stdout12
-rw-r--r--src/tools/clippy/tests/ui/author/struct.rs40
-rw-r--r--src/tools/clippy/tests/ui/author/struct.stdout64
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/doc_unsafe_macros.rs8
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/implicit_hasher_macros.rs6
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/macro_rules.rs142
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/macro_use_helper.rs60
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/non-exhaustive-enum.rs8
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/option_helpers.rs64
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/proc_macro_attr.rs101
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/proc_macro_derive.rs88
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/proc_macro_suspicious_else_formatting.rs74
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/proc_macro_unsafe.rs18
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/proc_macro_with_span.rs32
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/test_macro.rs11
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/use_self_macro.rs15
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/wildcard_imports_helper.rs27
-rw-r--r--src/tools/clippy/tests/ui/await_holding_lock.rs192
-rw-r--r--src/tools/clippy/tests/ui/await_holding_lock.stderr208
-rw-r--r--src/tools/clippy/tests/ui/await_holding_refcell_ref.rs85
-rw-r--r--src/tools/clippy/tests/ui/await_holding_refcell_ref.stderr101
-rw-r--r--src/tools/clippy/tests/ui/bind_instead_of_map.fixed25
-rw-r--r--src/tools/clippy/tests/ui/bind_instead_of_map.rs25
-rw-r--r--src/tools/clippy/tests/ui/bind_instead_of_map.stderr26
-rw-r--r--src/tools/clippy/tests/ui/bind_instead_of_map_multipart.fixed62
-rw-r--r--src/tools/clippy/tests/ui/bind_instead_of_map_multipart.rs62
-rw-r--r--src/tools/clippy/tests/ui/bind_instead_of_map_multipart.stderr91
-rw-r--r--src/tools/clippy/tests/ui/bit_masks.rs63
-rw-r--r--src/tools/clippy/tests/ui/bit_masks.stderr110
-rw-r--r--src/tools/clippy/tests/ui/blacklisted_name.rs57
-rw-r--r--src/tools/clippy/tests/ui/blacklisted_name.stderr88
-rw-r--r--src/tools/clippy/tests/ui/blanket_clippy_restriction_lints.rs8
-rw-r--r--src/tools/clippy/tests/ui/blanket_clippy_restriction_lints.stderr27
-rw-r--r--src/tools/clippy/tests/ui/blocks_in_if_conditions.fixed65
-rw-r--r--src/tools/clippy/tests/ui/blocks_in_if_conditions.rs65
-rw-r--r--src/tools/clippy/tests/ui/blocks_in_if_conditions.stderr34
-rw-r--r--src/tools/clippy/tests/ui/blocks_in_if_conditions_closure.rs64
-rw-r--r--src/tools/clippy/tests/ui/blocks_in_if_conditions_closure.stderr24
-rw-r--r--src/tools/clippy/tests/ui/bool_assert_comparison.rs122
-rw-r--r--src/tools/clippy/tests/ui/bool_assert_comparison.stderr136
-rw-r--r--src/tools/clippy/tests/ui/bool_comparison.fixed167
-rw-r--r--src/tools/clippy/tests/ui/bool_comparison.rs167
-rw-r--r--src/tools/clippy/tests/ui/bool_comparison.stderr136
-rw-r--r--src/tools/clippy/tests/ui/borrow_as_ptr.fixed10
-rw-r--r--src/tools/clippy/tests/ui/borrow_as_ptr.rs10
-rw-r--r--src/tools/clippy/tests/ui/borrow_as_ptr.stderr16
-rw-r--r--src/tools/clippy/tests/ui/borrow_as_ptr_no_std.fixed22
-rw-r--r--src/tools/clippy/tests/ui/borrow_as_ptr_no_std.rs22
-rw-r--r--src/tools/clippy/tests/ui/borrow_as_ptr_no_std.stderr16
-rw-r--r--src/tools/clippy/tests/ui/borrow_box.rs115
-rw-r--r--src/tools/clippy/tests/ui/borrow_box.stderr68
-rw-r--r--src/tools/clippy/tests/ui/borrow_deref_ref.fixed59
-rw-r--r--src/tools/clippy/tests/ui/borrow_deref_ref.rs59
-rw-r--r--src/tools/clippy/tests/ui/borrow_deref_ref.stderr22
-rw-r--r--src/tools/clippy/tests/ui/borrow_deref_ref_unfixable.rs10
-rw-r--r--src/tools/clippy/tests/ui/borrow_deref_ref_unfixable.stderr18
-rw-r--r--src/tools/clippy/tests/ui/borrow_interior_mutable_const/auxiliary/helper.rs17
-rw-r--r--src/tools/clippy/tests/ui/borrow_interior_mutable_const/enums.rs101
-rw-r--r--src/tools/clippy/tests/ui/borrow_interior_mutable_const/enums.stderr75
-rw-r--r--src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.rs104
-rw-r--r--src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.stderr115
-rw-r--r--src/tools/clippy/tests/ui/borrow_interior_mutable_const/traits.rs202
-rw-r--r--src/tools/clippy/tests/ui/borrow_interior_mutable_const/traits.stderr123
-rw-r--r--src/tools/clippy/tests/ui/box_collection.rs56
-rw-r--r--src/tools/clippy/tests/ui/box_collection.stderr75
-rw-r--r--src/tools/clippy/tests/ui/boxed_local.rs209
-rw-r--r--src/tools/clippy/tests/ui/boxed_local.stderr28
-rw-r--r--src/tools/clippy/tests/ui/branches_sharing_code/false_positives.rs95
-rw-r--r--src/tools/clippy/tests/ui/branches_sharing_code/shared_at_bottom.rs223
-rw-r--r--src/tools/clippy/tests/ui/branches_sharing_code/shared_at_bottom.stderr143
-rw-r--r--src/tools/clippy/tests/ui/branches_sharing_code/shared_at_top.rs114
-rw-r--r--src/tools/clippy/tests/ui/branches_sharing_code/shared_at_top.stderr121
-rw-r--r--src/tools/clippy/tests/ui/branches_sharing_code/shared_at_top_and_bottom.rs119
-rw-r--r--src/tools/clippy/tests/ui/branches_sharing_code/shared_at_top_and_bottom.stderr155
-rw-r--r--src/tools/clippy/tests/ui/branches_sharing_code/valid_if_blocks.rs155
-rw-r--r--src/tools/clippy/tests/ui/branches_sharing_code/valid_if_blocks.stderr101
-rw-r--r--src/tools/clippy/tests/ui/builtin_type_shadow.rs9
-rw-r--r--src/tools/clippy/tests/ui/builtin_type_shadow.stderr24
-rw-r--r--src/tools/clippy/tests/ui/bytecount.rs26
-rw-r--r--src/tools/clippy/tests/ui/bytecount.stderr26
-rw-r--r--src/tools/clippy/tests/ui/bytes_count_to_len.fixed34
-rw-r--r--src/tools/clippy/tests/ui/bytes_count_to_len.rs34
-rw-r--r--src/tools/clippy/tests/ui/bytes_count_to_len.stderr28
-rw-r--r--src/tools/clippy/tests/ui/bytes_nth.fixed11
-rw-r--r--src/tools/clippy/tests/ui/bytes_nth.rs11
-rw-r--r--src/tools/clippy/tests/ui/bytes_nth.stderr22
-rw-r--r--src/tools/clippy/tests/ui/case_sensitive_file_extension_comparisons.rs44
-rw-r--r--src/tools/clippy/tests/ui/case_sensitive_file_extension_comparisons.stderr43
-rw-r--r--src/tools/clippy/tests/ui/cast.rs262
-rw-r--r--src/tools/clippy/tests/ui/cast.stderr210
-rw-r--r--src/tools/clippy/tests/ui/cast_abs_to_unsigned.fixed29
-rw-r--r--src/tools/clippy/tests/ui/cast_abs_to_unsigned.rs29
-rw-r--r--src/tools/clippy/tests/ui/cast_abs_to_unsigned.stderr100
-rw-r--r--src/tools/clippy/tests/ui/cast_alignment.rs51
-rw-r--r--src/tools/clippy/tests/ui/cast_alignment.stderr28
-rw-r--r--src/tools/clippy/tests/ui/cast_enum_constructor.rs17
-rw-r--r--src/tools/clippy/tests/ui/cast_enum_constructor.stderr16
-rw-r--r--src/tools/clippy/tests/ui/cast_lossless_bool.fixed42
-rw-r--r--src/tools/clippy/tests/ui/cast_lossless_bool.rs42
-rw-r--r--src/tools/clippy/tests/ui/cast_lossless_bool.stderr82
-rw-r--r--src/tools/clippy/tests/ui/cast_lossless_float.fixed45
-rw-r--r--src/tools/clippy/tests/ui/cast_lossless_float.rs45
-rw-r--r--src/tools/clippy/tests/ui/cast_lossless_float.stderr70
-rw-r--r--src/tools/clippy/tests/ui/cast_lossless_integer.fixed47
-rw-r--r--src/tools/clippy/tests/ui/cast_lossless_integer.rs47
-rw-r--r--src/tools/clippy/tests/ui/cast_lossless_integer.stderr118
-rw-r--r--src/tools/clippy/tests/ui/cast_ref_to_mut.rs31
-rw-r--r--src/tools/clippy/tests/ui/cast_ref_to_mut.stderr22
-rw-r--r--src/tools/clippy/tests/ui/cast_size.rs35
-rw-r--r--src/tools/clippy/tests/ui/cast_size.stderr116
-rw-r--r--src/tools/clippy/tests/ui/cast_size_32bit.rs35
-rw-r--r--src/tools/clippy/tests/ui/cast_size_32bit.stderr118
-rw-r--r--src/tools/clippy/tests/ui/cast_slice_different_sizes.rs82
-rw-r--r--src/tools/clippy/tests/ui/cast_slice_different_sizes.stderr121
-rw-r--r--src/tools/clippy/tests/ui/cfg_attr_rustfmt.fixed31
-rw-r--r--src/tools/clippy/tests/ui/cfg_attr_rustfmt.rs31
-rw-r--r--src/tools/clippy/tests/ui/cfg_attr_rustfmt.stderr16
-rw-r--r--src/tools/clippy/tests/ui/char_lit_as_u8.rs5
-rw-r--r--src/tools/clippy/tests/ui/char_lit_as_u8.stderr11
-rw-r--r--src/tools/clippy/tests/ui/char_lit_as_u8_suggestions.fixed10
-rw-r--r--src/tools/clippy/tests/ui/char_lit_as_u8_suggestions.rs10
-rw-r--r--src/tools/clippy/tests/ui/char_lit_as_u8_suggestions.stderr35
-rw-r--r--src/tools/clippy/tests/ui/checked_conversions.fixed79
-rw-r--r--src/tools/clippy/tests/ui/checked_conversions.rs79
-rw-r--r--src/tools/clippy/tests/ui/checked_conversions.stderr100
-rw-r--r--src/tools/clippy/tests/ui/checked_unwrap/complex_conditionals.rs54
-rw-r--r--src/tools/clippy/tests/ui/checked_unwrap/complex_conditionals.stderr211
-rw-r--r--src/tools/clippy/tests/ui/checked_unwrap/complex_conditionals_nested.rs15
-rw-r--r--src/tools/clippy/tests/ui/checked_unwrap/complex_conditionals_nested.stderr31
-rw-r--r--src/tools/clippy/tests/ui/checked_unwrap/simple_conditionals.rs102
-rw-r--r--src/tools/clippy/tests/ui/checked_unwrap/simple_conditionals.stderr167
-rw-r--r--src/tools/clippy/tests/ui/clone_on_copy.fixed74
-rw-r--r--src/tools/clippy/tests/ui/clone_on_copy.rs74
-rw-r--r--src/tools/clippy/tests/ui/clone_on_copy.stderr52
-rw-r--r--src/tools/clippy/tests/ui/clone_on_copy_impl.rs22
-rw-r--r--src/tools/clippy/tests/ui/cloned_instead_of_copied.fixed15
-rw-r--r--src/tools/clippy/tests/ui/cloned_instead_of_copied.rs15
-rw-r--r--src/tools/clippy/tests/ui/cloned_instead_of_copied.stderr34
-rw-r--r--src/tools/clippy/tests/ui/cmp_nan.rs34
-rw-r--r--src/tools/clippy/tests/ui/cmp_nan.stderr148
-rw-r--r--src/tools/clippy/tests/ui/cmp_null.rs17
-rw-r--r--src/tools/clippy/tests/ui/cmp_null.stderr16
-rw-r--r--src/tools/clippy/tests/ui/cmp_owned/asymmetric_partial_eq.fixed93
-rw-r--r--src/tools/clippy/tests/ui/cmp_owned/asymmetric_partial_eq.rs93
-rw-r--r--src/tools/clippy/tests/ui/cmp_owned/asymmetric_partial_eq.stderr46
-rw-r--r--src/tools/clippy/tests/ui/cmp_owned/comparison_flip.fixed29
-rw-r--r--src/tools/clippy/tests/ui/cmp_owned/comparison_flip.rs29
-rw-r--r--src/tools/clippy/tests/ui/cmp_owned/comparison_flip.stderr18
-rw-r--r--src/tools/clippy/tests/ui/cmp_owned/with_suggestion.fixed72
-rw-r--r--src/tools/clippy/tests/ui/cmp_owned/with_suggestion.rs72
-rw-r--r--src/tools/clippy/tests/ui/cmp_owned/with_suggestion.stderr40
-rw-r--r--src/tools/clippy/tests/ui/cmp_owned/without_suggestion.rs75
-rw-r--r--src/tools/clippy/tests/ui/cmp_owned/without_suggestion.stderr22
-rw-r--r--src/tools/clippy/tests/ui/cognitive_complexity.rs395
-rw-r--r--src/tools/clippy/tests/ui/cognitive_complexity.stderr139
-rw-r--r--src/tools/clippy/tests/ui/cognitive_complexity_attr_used.rs15
-rw-r--r--src/tools/clippy/tests/ui/cognitive_complexity_attr_used.stderr11
-rw-r--r--src/tools/clippy/tests/ui/collapsible_else_if.fixed84
-rw-r--r--src/tools/clippy/tests/ui/collapsible_else_if.rs100
-rw-r--r--src/tools/clippy/tests/ui/collapsible_else_if.stderr163
-rw-r--r--src/tools/clippy/tests/ui/collapsible_if.fixed148
-rw-r--r--src/tools/clippy/tests/ui/collapsible_if.rs164
-rw-r--r--src/tools/clippy/tests/ui/collapsible_if.stderr130
-rw-r--r--src/tools/clippy/tests/ui/collapsible_match.rs265
-rw-r--r--src/tools/clippy/tests/ui/collapsible_match.stderr179
-rw-r--r--src/tools/clippy/tests/ui/collapsible_match2.rs87
-rw-r--r--src/tools/clippy/tests/ui/collapsible_match2.stderr97
-rw-r--r--src/tools/clippy/tests/ui/comparison_chain.rs234
-rw-r--r--src/tools/clippy/tests/ui/comparison_chain.stderr97
-rw-r--r--src/tools/clippy/tests/ui/comparison_to_empty.fixed23
-rw-r--r--src/tools/clippy/tests/ui/comparison_to_empty.rs23
-rw-r--r--src/tools/clippy/tests/ui/comparison_to_empty.stderr28
-rw-r--r--src/tools/clippy/tests/ui/copy_iterator.rs21
-rw-r--r--src/tools/clippy/tests/ui/copy_iterator.stderr17
-rw-r--r--src/tools/clippy/tests/ui/crashes/associated-constant-ice.rs13
-rw-r--r--src/tools/clippy/tests/ui/crashes/auxiliary/ice-4727-aux.rs9
-rw-r--r--src/tools/clippy/tests/ui/crashes/auxiliary/ice-7272-aux.rs14
-rw-r--r--src/tools/clippy/tests/ui/crashes/auxiliary/ice-7868-aux.rs3
-rw-r--r--src/tools/clippy/tests/ui/crashes/auxiliary/ice-7934-aux.rs4
-rw-r--r--src/tools/clippy/tests/ui/crashes/auxiliary/ice-8681-aux.rs6
-rw-r--r--src/tools/clippy/tests/ui/crashes/auxiliary/proc_macro_crash.rs38
-rw-r--r--src/tools/clippy/tests/ui/crashes/auxiliary/use_self_macro.rs15
-rw-r--r--src/tools/clippy/tests/ui/crashes/cc_seme.rs27
-rw-r--r--src/tools/clippy/tests/ui/crashes/enum-glob-import-crate.rs6
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-1588.rs13
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-1782.rs26
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-1969.rs13
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-2499.rs26
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-2594.rs20
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-2727.rs7
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-2760.rs23
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-2774.rs27
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-2774.stderr10
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-2862.rs16
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-2865.rs16
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-3151.rs15
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-3462.rs23
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-360.rs12
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-360.stderr25
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-3717.rs10
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-3717.stderr22
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-3741.rs10
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-3747.rs17
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-3891.rs3
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-3891.stderr10
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-3969.rs50
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-3969.stderr34
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-4121.rs13
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-4545.rs14
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-4579.rs13
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-4671.rs21
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-4727.rs6
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-4760.rs9
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-4775.rs11
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-4968.rs21
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-5207.rs5
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-5223.rs15
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-5238.rs9
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-5389.rs13
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-5497.rs11
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-5497.stderr10
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-5579.rs17
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-5835.rs9
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-5835.stderr10
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-5872.rs5
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-5872.stderr10
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-5944.rs14
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6139.rs7
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6153.rs9
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6179.rs21
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6250.rs16
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6250.stderr30
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6251.rs6
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6251.stderr41
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6252.rs14
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6252.stderr36
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6254.rs16
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6254.stderr12
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6255.rs15
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6255.stderr13
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6256.rs15
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6256.stderr14
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6332.rs11
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6539.rs16
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6792.rs20
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6793.rs23
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6840.rs31
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-700.rs9
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-7012.rs17
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-7126.rs14
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-7169.rs9
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-7169.stderr10
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-7231.rs9
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-7272.rs12
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-7340.rs6
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-7410.rs32
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-7423.rs13
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-7868.rs7
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-7868.stderr11
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-7869.rs7
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-7869.stderr15
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-7934.rs7
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-8250.rs6
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-8250.stderr10
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-8386.rs3
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-8681.rs10
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-8821.rs8
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-8821.stderr10
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-8850.rs27
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-8850.stderr45
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-9041.rs8
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-9041.stderr10
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-9238.rs12
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-9242.rs8
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-96721.rs10
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-96721.stderr8
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice_exacte_size.rs19
-rw-r--r--src/tools/clippy/tests/ui/crashes/if_same_then_else.rs16
-rw-r--r--src/tools/clippy/tests/ui/crashes/implements-trait.rs5
-rw-r--r--src/tools/clippy/tests/ui/crashes/inherent_impl.rs26
-rw-r--r--src/tools/clippy/tests/ui/crashes/issue-825.rs25
-rw-r--r--src/tools/clippy/tests/ui/crashes/issues_loop_mut_cond.rs28
-rw-r--r--src/tools/clippy/tests/ui/crashes/match_same_arms_const.rs18
-rw-r--r--src/tools/clippy/tests/ui/crashes/mut_mut_macro.rs34
-rw-r--r--src/tools/clippy/tests/ui/crashes/needless_borrow_fp.rs7
-rw-r--r--src/tools/clippy/tests/ui/crashes/needless_lifetimes_impl_trait.rs20
-rw-r--r--src/tools/clippy/tests/ui/crashes/needless_lifetimes_impl_trait.stderr14
-rw-r--r--src/tools/clippy/tests/ui/crashes/regressions.rs11
-rw-r--r--src/tools/clippy/tests/ui/crashes/returns.rs23
-rw-r--r--src/tools/clippy/tests/ui/crashes/shadow.rs6
-rw-r--r--src/tools/clippy/tests/ui/crashes/single-match-else.rs11
-rw-r--r--src/tools/clippy/tests/ui/crashes/third-party/clippy.toml3
-rw-r--r--src/tools/clippy/tests/ui/crashes/third-party/conf_allowlisted.rs1
-rw-r--r--src/tools/clippy/tests/ui/crashes/trivial_bounds.rs11
-rw-r--r--src/tools/clippy/tests/ui/crashes/used_underscore_binding_macro.rs16
-rw-r--r--src/tools/clippy/tests/ui/crate_in_macro_def.fixed56
-rw-r--r--src/tools/clippy/tests/ui/crate_in_macro_def.rs56
-rw-r--r--src/tools/clippy/tests/ui/crate_in_macro_def.stderr10
-rw-r--r--src/tools/clippy/tests/ui/crate_level_checks/entrypoint_recursion.rs11
-rw-r--r--src/tools/clippy/tests/ui/crate_level_checks/entrypoint_recursion.stderr11
-rw-r--r--src/tools/clippy/tests/ui/crate_level_checks/no_std_main_recursion.rs33
-rw-r--r--src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.rs14
-rw-r--r--src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.stderr12
-rw-r--r--src/tools/clippy/tests/ui/crate_level_checks/std_main_recursion.rs6
-rw-r--r--src/tools/clippy/tests/ui/crate_level_checks/std_main_recursion.stderr11
-rw-r--r--src/tools/clippy/tests/ui/create_dir.fixed17
-rw-r--r--src/tools/clippy/tests/ui/create_dir.rs17
-rw-r--r--src/tools/clippy/tests/ui/create_dir.stderr16
-rw-r--r--src/tools/clippy/tests/ui/dbg_macro.rs60
-rw-r--r--src/tools/clippy/tests/ui/dbg_macro.stderr146
-rw-r--r--src/tools/clippy/tests/ui/debug_assert_with_mut_call.rs133
-rw-r--r--src/tools/clippy/tests/ui/debug_assert_with_mut_call.stderr172
-rw-r--r--src/tools/clippy/tests/ui/decimal_literal_representation.fixed27
-rw-r--r--src/tools/clippy/tests/ui/decimal_literal_representation.rs27
-rw-r--r--src/tools/clippy/tests/ui/decimal_literal_representation.stderr46
-rw-r--r--src/tools/clippy/tests/ui/declare_interior_mutable_const/enums.rs123
-rw-r--r--src/tools/clippy/tests/ui/declare_interior_mutable_const/enums.stderr89
-rw-r--r--src/tools/clippy/tests/ui/declare_interior_mutable_const/others.rs55
-rw-r--r--src/tools/clippy/tests/ui/declare_interior_mutable_const/others.stderr50
-rw-r--r--src/tools/clippy/tests/ui/declare_interior_mutable_const/traits.rs150
-rw-r--r--src/tools/clippy/tests/ui/declare_interior_mutable_const/traits.stderr75
-rw-r--r--src/tools/clippy/tests/ui/def_id_nocore.rs31
-rw-r--r--src/tools/clippy/tests/ui/def_id_nocore.stderr11
-rw-r--r--src/tools/clippy/tests/ui/default_instead_of_iter_empty.fixed21
-rw-r--r--src/tools/clippy/tests/ui/default_instead_of_iter_empty.rs21
-rw-r--r--src/tools/clippy/tests/ui/default_instead_of_iter_empty.stderr22
-rw-r--r--src/tools/clippy/tests/ui/default_numeric_fallback_f64.fixed177
-rw-r--r--src/tools/clippy/tests/ui/default_numeric_fallback_f64.rs177
-rw-r--r--src/tools/clippy/tests/ui/default_numeric_fallback_f64.stderr147
-rw-r--r--src/tools/clippy/tests/ui/default_numeric_fallback_i32.fixed182
-rw-r--r--src/tools/clippy/tests/ui/default_numeric_fallback_i32.rs182
-rw-r--r--src/tools/clippy/tests/ui/default_numeric_fallback_i32.stderr159
-rw-r--r--src/tools/clippy/tests/ui/default_trait_access.fixed99
-rw-r--r--src/tools/clippy/tests/ui/default_trait_access.rs99
-rw-r--r--src/tools/clippy/tests/ui/default_trait_access.stderr56
-rw-r--r--src/tools/clippy/tests/ui/default_union_representation.rs78
-rw-r--r--src/tools/clippy/tests/ui/default_union_representation.stderr48
-rw-r--r--src/tools/clippy/tests/ui/deprecated.rs22
-rw-r--r--src/tools/clippy/tests/ui/deprecated.stderr100
-rw-r--r--src/tools/clippy/tests/ui/deprecated_old.rs5
-rw-r--r--src/tools/clippy/tests/ui/deprecated_old.stderr22
-rw-r--r--src/tools/clippy/tests/ui/deref_addrof.fixed68
-rw-r--r--src/tools/clippy/tests/ui/deref_addrof.rs68
-rw-r--r--src/tools/clippy/tests/ui/deref_addrof.stderr74
-rw-r--r--src/tools/clippy/tests/ui/deref_addrof_double_trigger.rs23
-rw-r--r--src/tools/clippy/tests/ui/deref_addrof_double_trigger.stderr22
-rw-r--r--src/tools/clippy/tests/ui/deref_addrof_macro.rs10
-rw-r--r--src/tools/clippy/tests/ui/deref_by_slicing.fixed30
-rw-r--r--src/tools/clippy/tests/ui/deref_by_slicing.rs30
-rw-r--r--src/tools/clippy/tests/ui/deref_by_slicing.stderr58
-rw-r--r--src/tools/clippy/tests/ui/derivable_impls.rs243
-rw-r--r--src/tools/clippy/tests/ui/derivable_impls.stderr89
-rw-r--r--src/tools/clippy/tests/ui/derive.rs89
-rw-r--r--src/tools/clippy/tests/ui/derive.stderr103
-rw-r--r--src/tools/clippy/tests/ui/derive_hash_xor_eq.rs56
-rw-r--r--src/tools/clippy/tests/ui/derive_hash_xor_eq.stderr59
-rw-r--r--src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.rs69
-rw-r--r--src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.stderr63
-rw-r--r--src/tools/clippy/tests/ui/derive_partial_eq_without_eq.fixed126
-rw-r--r--src/tools/clippy/tests/ui/derive_partial_eq_without_eq.rs126
-rw-r--r--src/tools/clippy/tests/ui/derive_partial_eq_without_eq.stderr70
-rw-r--r--src/tools/clippy/tests/ui/disallowed_script_idents.rs10
-rw-r--r--src/tools/clippy/tests/ui/disallowed_script_idents.stderr20
-rw-r--r--src/tools/clippy/tests/ui/diverging_sub_expression.rs41
-rw-r--r--src/tools/clippy/tests/ui/diverging_sub_expression.stderr48
-rw-r--r--src/tools/clippy/tests/ui/doc/doc-fixable.fixed215
-rw-r--r--src/tools/clippy/tests/ui/doc/doc-fixable.rs215
-rw-r--r--src/tools/clippy/tests/ui/doc/doc-fixable.stderr333
-rw-r--r--src/tools/clippy/tests/ui/doc/issue_1832.rs9
-rw-r--r--src/tools/clippy/tests/ui/doc/issue_902.rs7
-rw-r--r--src/tools/clippy/tests/ui/doc/unbalanced_ticks.rs43
-rw-r--r--src/tools/clippy/tests/ui/doc/unbalanced_ticks.stderr79
-rw-r--r--src/tools/clippy/tests/ui/doc_errors.rs104
-rw-r--r--src/tools/clippy/tests/ui/doc_errors.stderr58
-rw-r--r--src/tools/clippy/tests/ui/doc_link_with_quotes.rs12
-rw-r--r--src/tools/clippy/tests/ui/doc_link_with_quotes.stderr10
-rw-r--r--src/tools/clippy/tests/ui/doc_unsafe.rs134
-rw-r--r--src/tools/clippy/tests/ui/doc_unsafe.stderr55
-rw-r--r--src/tools/clippy/tests/ui/double_comparison.fixed30
-rw-r--r--src/tools/clippy/tests/ui/double_comparison.rs30
-rw-r--r--src/tools/clippy/tests/ui/double_comparison.stderr52
-rw-r--r--src/tools/clippy/tests/ui/double_must_use.rs28
-rw-r--r--src/tools/clippy/tests/ui/double_must_use.stderr27
-rw-r--r--src/tools/clippy/tests/ui/double_neg.rs8
-rw-r--r--src/tools/clippy/tests/ui/double_neg.stderr10
-rw-r--r--src/tools/clippy/tests/ui/double_parens.rs56
-rw-r--r--src/tools/clippy/tests/ui/double_parens.stderr40
-rw-r--r--src/tools/clippy/tests/ui/drop_forget_copy.rs66
-rw-r--r--src/tools/clippy/tests/ui/drop_forget_copy.stderr76
-rw-r--r--src/tools/clippy/tests/ui/drop_non_drop.rs40
-rw-r--r--src/tools/clippy/tests/ui/drop_non_drop.stderr27
-rw-r--r--src/tools/clippy/tests/ui/drop_ref.rs74
-rw-r--r--src/tools/clippy/tests/ui/drop_ref.stderr111
-rw-r--r--src/tools/clippy/tests/ui/duplicate_underscore_argument.rs10
-rw-r--r--src/tools/clippy/tests/ui/duplicate_underscore_argument.stderr10
-rw-r--r--src/tools/clippy/tests/ui/duration_subsec.fixed29
-rw-r--r--src/tools/clippy/tests/ui/duration_subsec.rs29
-rw-r--r--src/tools/clippy/tests/ui/duration_subsec.stderr34
-rw-r--r--src/tools/clippy/tests/ui/else_if_without_else.rs58
-rw-r--r--src/tools/clippy/tests/ui/else_if_without_else.stderr27
-rw-r--r--src/tools/clippy/tests/ui/empty_drop.fixed24
-rw-r--r--src/tools/clippy/tests/ui/empty_drop.rs30
-rw-r--r--src/tools/clippy/tests/ui/empty_drop.stderr22
-rw-r--r--src/tools/clippy/tests/ui/empty_enum.rs7
-rw-r--r--src/tools/clippy/tests/ui/empty_enum.stderr11
-rw-r--r--src/tools/clippy/tests/ui/empty_enum_without_never_type.rs7
-rw-r--r--src/tools/clippy/tests/ui/empty_line_after_outer_attribute.rs120
-rw-r--r--src/tools/clippy/tests/ui/empty_line_after_outer_attribute.stderr54
-rw-r--r--src/tools/clippy/tests/ui/empty_loop.rs51
-rw-r--r--src/tools/clippy/tests/ui/empty_loop.stderr27
-rw-r--r--src/tools/clippy/tests/ui/empty_loop_no_std.rs27
-rw-r--r--src/tools/clippy/tests/ui/empty_loop_no_std.stderr19
-rw-r--r--src/tools/clippy/tests/ui/empty_structs_with_brackets.fixed25
-rw-r--r--src/tools/clippy/tests/ui/empty_structs_with_brackets.rs25
-rw-r--r--src/tools/clippy/tests/ui/empty_structs_with_brackets.stderr19
-rw-r--r--src/tools/clippy/tests/ui/entry.fixed154
-rw-r--r--src/tools/clippy/tests/ui/entry.rs158
-rw-r--r--src/tools/clippy/tests/ui/entry.stderr217
-rw-r--r--src/tools/clippy/tests/ui/entry_btree.fixed18
-rw-r--r--src/tools/clippy/tests/ui/entry_btree.rs18
-rw-r--r--src/tools/clippy/tests/ui/entry_btree.stderr20
-rw-r--r--src/tools/clippy/tests/ui/entry_with_else.fixed73
-rw-r--r--src/tools/clippy/tests/ui/entry_with_else.rs60
-rw-r--r--src/tools/clippy/tests/ui/entry_with_else.stderr151
-rw-r--r--src/tools/clippy/tests/ui/enum_clike_unportable_variant.rs50
-rw-r--r--src/tools/clippy/tests/ui/enum_clike_unportable_variant.stderr58
-rw-r--r--src/tools/clippy/tests/ui/enum_glob_use.fixed30
-rw-r--r--src/tools/clippy/tests/ui/enum_glob_use.rs30
-rw-r--r--src/tools/clippy/tests/ui/enum_glob_use.stderr22
-rw-r--r--src/tools/clippy/tests/ui/enum_variants.rs182
-rw-r--r--src/tools/clippy/tests/ui/enum_variants.stderr149
-rw-r--r--src/tools/clippy/tests/ui/eprint_with_newline.rs49
-rw-r--r--src/tools/clippy/tests/ui/eprint_with_newline.stderr129
-rw-r--r--src/tools/clippy/tests/ui/eq_op.rs108
-rw-r--r--src/tools/clippy/tests/ui/eq_op.stderr172
-rw-r--r--src/tools/clippy/tests/ui/eq_op_macros.rs56
-rw-r--r--src/tools/clippy/tests/ui/eq_op_macros.stderr95
-rw-r--r--src/tools/clippy/tests/ui/equatable_if_let.fixed84
-rw-r--r--src/tools/clippy/tests/ui/equatable_if_let.rs84
-rw-r--r--src/tools/clippy/tests/ui/equatable_if_let.stderr70
-rw-r--r--src/tools/clippy/tests/ui/erasing_op.rs43
-rw-r--r--src/tools/clippy/tests/ui/erasing_op.stderr34
-rw-r--r--src/tools/clippy/tests/ui/err_expect.fixed14
-rw-r--r--src/tools/clippy/tests/ui/err_expect.rs14
-rw-r--r--src/tools/clippy/tests/ui/err_expect.stderr10
-rw-r--r--src/tools/clippy/tests/ui/eta.fixed305
-rw-r--r--src/tools/clippy/tests/ui/eta.rs305
-rw-r--r--src/tools/clippy/tests/ui/eta.stderr120
-rw-r--r--src/tools/clippy/tests/ui/excessive_precision.fixed69
-rw-r--r--src/tools/clippy/tests/ui/excessive_precision.rs69
-rw-r--r--src/tools/clippy/tests/ui/excessive_precision.stderr94
-rw-r--r--src/tools/clippy/tests/ui/exhaustive_items.fixed91
-rw-r--r--src/tools/clippy/tests/ui/exhaustive_items.rs88
-rw-r--r--src/tools/clippy/tests/ui/exhaustive_items.stderr61
-rw-r--r--src/tools/clippy/tests/ui/exit1.rs15
-rw-r--r--src/tools/clippy/tests/ui/exit1.stderr10
-rw-r--r--src/tools/clippy/tests/ui/exit2.rs13
-rw-r--r--src/tools/clippy/tests/ui/exit2.stderr10
-rw-r--r--src/tools/clippy/tests/ui/exit3.rs8
-rw-r--r--src/tools/clippy/tests/ui/expect.rs16
-rw-r--r--src/tools/clippy/tests/ui/expect.stderr19
-rw-r--r--src/tools/clippy/tests/ui/expect_fun_call.fixed104
-rw-r--r--src/tools/clippy/tests/ui/expect_fun_call.rs104
-rw-r--r--src/tools/clippy/tests/ui/expect_fun_call.stderr82
-rw-r--r--src/tools/clippy/tests/ui/expect_tool_lint_rfc_2383.rs142
-rw-r--r--src/tools/clippy/tests/ui/expect_tool_lint_rfc_2383.stderr40
-rw-r--r--src/tools/clippy/tests/ui/explicit_auto_deref.fixed218
-rw-r--r--src/tools/clippy/tests/ui/explicit_auto_deref.rs218
-rw-r--r--src/tools/clippy/tests/ui/explicit_auto_deref.stderr202
-rw-r--r--src/tools/clippy/tests/ui/explicit_counter_loop.rs190
-rw-r--r--src/tools/clippy/tests/ui/explicit_counter_loop.stderr60
-rw-r--r--src/tools/clippy/tests/ui/explicit_deref_methods.fixed101
-rw-r--r--src/tools/clippy/tests/ui/explicit_deref_methods.rs101
-rw-r--r--src/tools/clippy/tests/ui/explicit_deref_methods.stderr76
-rw-r--r--src/tools/clippy/tests/ui/explicit_write.fixed63
-rw-r--r--src/tools/clippy/tests/ui/explicit_write.rs63
-rw-r--r--src/tools/clippy/tests/ui/explicit_write.stderr76
-rw-r--r--src/tools/clippy/tests/ui/extend_with_drain.fixed60
-rw-r--r--src/tools/clippy/tests/ui/extend_with_drain.rs60
-rw-r--r--src/tools/clippy/tests/ui/extend_with_drain.stderr28
-rw-r--r--src/tools/clippy/tests/ui/extra_unused_lifetimes.rs129
-rw-r--r--src/tools/clippy/tests/ui/extra_unused_lifetimes.stderr40
-rw-r--r--src/tools/clippy/tests/ui/fallible_impl_from.rs76
-rw-r--r--src/tools/clippy/tests/ui/fallible_impl_from.stderr93
-rw-r--r--src/tools/clippy/tests/ui/field_reassign_with_default.rs249
-rw-r--r--src/tools/clippy/tests/ui/field_reassign_with_default.stderr135
-rw-r--r--src/tools/clippy/tests/ui/filetype_is_file.rs23
-rw-r--r--src/tools/clippy/tests/ui/filetype_is_file.stderr27
-rw-r--r--src/tools/clippy/tests/ui/filter_map_identity.fixed19
-rw-r--r--src/tools/clippy/tests/ui/filter_map_identity.rs19
-rw-r--r--src/tools/clippy/tests/ui/filter_map_identity.stderr28
-rw-r--r--src/tools/clippy/tests/ui/filter_map_next.rs17
-rw-r--r--src/tools/clippy/tests/ui/filter_map_next.stderr17
-rw-r--r--src/tools/clippy/tests/ui/filter_map_next_fixable.fixed10
-rw-r--r--src/tools/clippy/tests/ui/filter_map_next_fixable.rs10
-rw-r--r--src/tools/clippy/tests/ui/filter_map_next_fixable.stderr10
-rw-r--r--src/tools/clippy/tests/ui/find_map.rs33
-rw-r--r--src/tools/clippy/tests/ui/flat_map_identity.fixed17
-rw-r--r--src/tools/clippy/tests/ui/flat_map_identity.rs17
-rw-r--r--src/tools/clippy/tests/ui/flat_map_identity.stderr22
-rw-r--r--src/tools/clippy/tests/ui/flat_map_option.fixed13
-rw-r--r--src/tools/clippy/tests/ui/flat_map_option.rs13
-rw-r--r--src/tools/clippy/tests/ui/flat_map_option.stderr16
-rw-r--r--src/tools/clippy/tests/ui/float_arithmetic.rs52
-rw-r--r--src/tools/clippy/tests/ui/float_arithmetic.stderr106
-rw-r--r--src/tools/clippy/tests/ui/float_cmp.rs115
-rw-r--r--src/tools/clippy/tests/ui/float_cmp.stderr51
-rw-r--r--src/tools/clippy/tests/ui/float_cmp_const.rs58
-rw-r--r--src/tools/clippy/tests/ui/float_cmp_const.stderr67
-rw-r--r--src/tools/clippy/tests/ui/float_equality_without_abs.rs31
-rw-r--r--src/tools/clippy/tests/ui/float_equality_without_abs.stderr92
-rw-r--r--src/tools/clippy/tests/ui/floating_point_abs.fixed84
-rw-r--r--src/tools/clippy/tests/ui/floating_point_abs.rs84
-rw-r--r--src/tools/clippy/tests/ui/floating_point_abs.stderr52
-rw-r--r--src/tools/clippy/tests/ui/floating_point_exp.fixed18
-rw-r--r--src/tools/clippy/tests/ui/floating_point_exp.rs18
-rw-r--r--src/tools/clippy/tests/ui/floating_point_exp.stderr28
-rw-r--r--src/tools/clippy/tests/ui/floating_point_hypot.fixed14
-rw-r--r--src/tools/clippy/tests/ui/floating_point_hypot.rs14
-rw-r--r--src/tools/clippy/tests/ui/floating_point_hypot.stderr22
-rw-r--r--src/tools/clippy/tests/ui/floating_point_log.fixed58
-rw-r--r--src/tools/clippy/tests/ui/floating_point_log.rs58
-rw-r--r--src/tools/clippy/tests/ui/floating_point_log.stderr174
-rw-r--r--src/tools/clippy/tests/ui/floating_point_logbase.fixed16
-rw-r--r--src/tools/clippy/tests/ui/floating_point_logbase.rs16
-rw-r--r--src/tools/clippy/tests/ui/floating_point_logbase.stderr28
-rw-r--r--src/tools/clippy/tests/ui/floating_point_mul_add.fixed37
-rw-r--r--src/tools/clippy/tests/ui/floating_point_mul_add.rs37
-rw-r--r--src/tools/clippy/tests/ui/floating_point_mul_add.stderr64
-rw-r--r--src/tools/clippy/tests/ui/floating_point_powf.fixed42
-rw-r--r--src/tools/clippy/tests/ui/floating_point_powf.rs42
-rw-r--r--src/tools/clippy/tests/ui/floating_point_powf.stderr150
-rw-r--r--src/tools/clippy/tests/ui/floating_point_powi.fixed20
-rw-r--r--src/tools/clippy/tests/ui/floating_point_powi.rs20
-rw-r--r--src/tools/clippy/tests/ui/floating_point_powi.stderr28
-rw-r--r--src/tools/clippy/tests/ui/floating_point_rad.fixed25
-rw-r--r--src/tools/clippy/tests/ui/floating_point_rad.rs25
-rw-r--r--src/tools/clippy/tests/ui/floating_point_rad.stderr40
-rw-r--r--src/tools/clippy/tests/ui/fn_address_comparisons.rs20
-rw-r--r--src/tools/clippy/tests/ui/fn_address_comparisons.stderr16
-rw-r--r--src/tools/clippy/tests/ui/fn_params_excessive_bools.rs45
-rw-r--r--src/tools/clippy/tests/ui/fn_params_excessive_bools.stderr53
-rw-r--r--src/tools/clippy/tests/ui/fn_to_numeric_cast.rs55
-rw-r--r--src/tools/clippy/tests/ui/fn_to_numeric_cast.stderr144
-rw-r--r--src/tools/clippy/tests/ui/fn_to_numeric_cast_32bit.rs55
-rw-r--r--src/tools/clippy/tests/ui/fn_to_numeric_cast_32bit.stderr144
-rw-r--r--src/tools/clippy/tests/ui/fn_to_numeric_cast_any.rs76
-rw-r--r--src/tools/clippy/tests/ui/fn_to_numeric_cast_any.stderr106
-rw-r--r--src/tools/clippy/tests/ui/for_kv_map.rs50
-rw-r--r--src/tools/clippy/tests/ui/for_kv_map.stderr58
-rw-r--r--src/tools/clippy/tests/ui/for_loop_fixable.fixed309
-rw-r--r--src/tools/clippy/tests/ui/for_loop_fixable.rs309
-rw-r--r--src/tools/clippy/tests/ui/for_loop_fixable.stderr96
-rw-r--r--src/tools/clippy/tests/ui/for_loop_unfixable.rs15
-rw-r--r--src/tools/clippy/tests/ui/for_loop_unfixable.stderr10
-rw-r--r--src/tools/clippy/tests/ui/for_loops_over_fallibles.rs72
-rw-r--r--src/tools/clippy/tests/ui/for_loops_over_fallibles.stderr95
-rw-r--r--src/tools/clippy/tests/ui/forget_non_drop.rs27
-rw-r--r--src/tools/clippy/tests/ui/forget_non_drop.stderr27
-rw-r--r--src/tools/clippy/tests/ui/forget_ref.rs50
-rw-r--r--src/tools/clippy/tests/ui/forget_ref.stderr111
-rw-r--r--src/tools/clippy/tests/ui/format.fixed94
-rw-r--r--src/tools/clippy/tests/ui/format.rs96
-rw-r--r--src/tools/clippy/tests/ui/format.stderr127
-rw-r--r--src/tools/clippy/tests/ui/format_args.fixed117
-rw-r--r--src/tools/clippy/tests/ui/format_args.rs117
-rw-r--r--src/tools/clippy/tests/ui/format_args.stderr130
-rw-r--r--src/tools/clippy/tests/ui/format_args_unfixable.rs61
-rw-r--r--src/tools/clippy/tests/ui/format_args_unfixable.stderr175
-rw-r--r--src/tools/clippy/tests/ui/format_push_string.rs7
-rw-r--r--src/tools/clippy/tests/ui/format_push_string.stderr19
-rw-r--r--src/tools/clippy/tests/ui/formatting.rs73
-rw-r--r--src/tools/clippy/tests/ui/formatting.stderr52
-rw-r--r--src/tools/clippy/tests/ui/from_iter_instead_of_collect.fixed61
-rw-r--r--src/tools/clippy/tests/ui/from_iter_instead_of_collect.rs61
-rw-r--r--src/tools/clippy/tests/ui/from_iter_instead_of_collect.stderr94
-rw-r--r--src/tools/clippy/tests/ui/from_over_into.rs21
-rw-r--r--src/tools/clippy/tests/ui/from_over_into.stderr11
-rw-r--r--src/tools/clippy/tests/ui/from_str_radix_10.rs52
-rw-r--r--src/tools/clippy/tests/ui/from_str_radix_10.stderr52
-rw-r--r--src/tools/clippy/tests/ui/functions.rs112
-rw-r--r--src/tools/clippy/tests/ui/functions.stderr108
-rw-r--r--src/tools/clippy/tests/ui/functions_maxlines.rs163
-rw-r--r--src/tools/clippy/tests/ui/functions_maxlines.stderr16
-rw-r--r--src/tools/clippy/tests/ui/future_not_send.rs79
-rw-r--r--src/tools/clippy/tests/ui/future_not_send.stderr145
-rw-r--r--src/tools/clippy/tests/ui/get_first.fixed42
-rw-r--r--src/tools/clippy/tests/ui/get_first.rs42
-rw-r--r--src/tools/clippy/tests/ui/get_first.stderr22
-rw-r--r--src/tools/clippy/tests/ui/get_last_with_len.fixed49
-rw-r--r--src/tools/clippy/tests/ui/get_last_with_len.rs49
-rw-r--r--src/tools/clippy/tests/ui/get_last_with_len.stderr40
-rw-r--r--src/tools/clippy/tests/ui/get_unwrap.fixed67
-rw-r--r--src/tools/clippy/tests/ui/get_unwrap.rs67
-rw-r--r--src/tools/clippy/tests/ui/get_unwrap.stderr191
-rw-r--r--src/tools/clippy/tests/ui/identity_op.fixed119
-rw-r--r--src/tools/clippy/tests/ui/identity_op.rs119
-rw-r--r--src/tools/clippy/tests/ui/identity_op.stderr238
-rw-r--r--src/tools/clippy/tests/ui/if_let_mutex.rs42
-rw-r--r--src/tools/clippy/tests/ui/if_let_mutex.stderr29
-rw-r--r--src/tools/clippy/tests/ui/if_not_else.rs29
-rw-r--r--src/tools/clippy/tests/ui/if_not_else.stderr27
-rw-r--r--src/tools/clippy/tests/ui/if_same_then_else.rs217
-rw-r--r--src/tools/clippy/tests/ui/if_same_then_else.stderr112
-rw-r--r--src/tools/clippy/tests/ui/if_same_then_else2.rs160
-rw-r--r--src/tools/clippy/tests/ui/if_same_then_else2.stderr125
-rw-r--r--src/tools/clippy/tests/ui/if_then_some_else_none.rs115
-rw-r--r--src/tools/clippy/tests/ui/if_then_some_else_none.stderr61
-rw-r--r--src/tools/clippy/tests/ui/ifs_same_cond.rs46
-rw-r--r--src/tools/clippy/tests/ui/ifs_same_cond.stderr39
-rw-r--r--src/tools/clippy/tests/ui/impl.rs67
-rw-r--r--src/tools/clippy/tests/ui/impl.stderr63
-rw-r--r--src/tools/clippy/tests/ui/implicit_clone.fixed118
-rw-r--r--src/tools/clippy/tests/ui/implicit_clone.rs118
-rw-r--r--src/tools/clippy/tests/ui/implicit_clone.stderr76
-rw-r--r--src/tools/clippy/tests/ui/implicit_hasher.rs102
-rw-r--r--src/tools/clippy/tests/ui/implicit_hasher.stderr164
-rw-r--r--src/tools/clippy/tests/ui/implicit_return.fixed140
-rw-r--r--src/tools/clippy/tests/ui/implicit_return.rs140
-rw-r--r--src/tools/clippy/tests/ui/implicit_return.stderr109
-rw-r--r--src/tools/clippy/tests/ui/implicit_saturating_sub.fixed168
-rw-r--r--src/tools/clippy/tests/ui/implicit_saturating_sub.rs214
-rw-r--r--src/tools/clippy/tests/ui/implicit_saturating_sub.stderr188
-rw-r--r--src/tools/clippy/tests/ui/inconsistent_digit_grouping.fixed47
-rw-r--r--src/tools/clippy/tests/ui/inconsistent_digit_grouping.rs47
-rw-r--r--src/tools/clippy/tests/ui/inconsistent_digit_grouping.stderr70
-rw-r--r--src/tools/clippy/tests/ui/inconsistent_struct_constructor.fixed73
-rw-r--r--src/tools/clippy/tests/ui/inconsistent_struct_constructor.rs77
-rw-r--r--src/tools/clippy/tests/ui/inconsistent_struct_constructor.stderr20
-rw-r--r--src/tools/clippy/tests/ui/index_refutable_slice/if_let_slice_binding.rs166
-rw-r--r--src/tools/clippy/tests/ui/index_refutable_slice/if_let_slice_binding.stderr158
-rw-r--r--src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.rs28
-rw-r--r--src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.stderr22
-rw-r--r--src/tools/clippy/tests/ui/indexing_slicing_index.rs48
-rw-r--r--src/tools/clippy/tests/ui/indexing_slicing_index.stderr64
-rw-r--r--src/tools/clippy/tests/ui/indexing_slicing_slice.rs37
-rw-r--r--src/tools/clippy/tests/ui/indexing_slicing_slice.stderr125
-rw-r--r--src/tools/clippy/tests/ui/inefficient_to_string.fixed31
-rw-r--r--src/tools/clippy/tests/ui/inefficient_to_string.rs31
-rw-r--r--src/tools/clippy/tests/ui/inefficient_to_string.stderr55
-rw-r--r--src/tools/clippy/tests/ui/infallible_destructuring_match.fixed112
-rw-r--r--src/tools/clippy/tests/ui/infallible_destructuring_match.rs118
-rw-r--r--src/tools/clippy/tests/ui/infallible_destructuring_match.stderr28
-rw-r--r--src/tools/clippy/tests/ui/infinite_iter.rs68
-rw-r--r--src/tools/clippy/tests/ui/infinite_iter.stderr109
-rw-r--r--src/tools/clippy/tests/ui/infinite_loop.rs217
-rw-r--r--src/tools/clippy/tests/ui/infinite_loop.stderr95
-rw-r--r--src/tools/clippy/tests/ui/inherent_to_string.rs106
-rw-r--r--src/tools/clippy/tests/ui/inherent_to_string.stderr28
-rw-r--r--src/tools/clippy/tests/ui/inline_fn_without_body.fixed17
-rw-r--r--src/tools/clippy/tests/ui/inline_fn_without_body.rs20
-rw-r--r--src/tools/clippy/tests/ui/inline_fn_without_body.stderr28
-rw-r--r--src/tools/clippy/tests/ui/inspect_for_each.rs22
-rw-r--r--src/tools/clippy/tests/ui/inspect_for_each.stderr16
-rw-r--r--src/tools/clippy/tests/ui/int_plus_one.fixed17
-rw-r--r--src/tools/clippy/tests/ui/int_plus_one.rs17
-rw-r--r--src/tools/clippy/tests/ui/int_plus_one.stderr28
-rw-r--r--src/tools/clippy/tests/ui/integer_arithmetic.rs102
-rw-r--r--src/tools/clippy/tests/ui/integer_arithmetic.stderr169
-rw-r--r--src/tools/clippy/tests/ui/integer_division.rs9
-rw-r--r--src/tools/clippy/tests/ui/integer_division.stderr27
-rw-r--r--src/tools/clippy/tests/ui/into_iter_on_ref.fixed45
-rw-r--r--src/tools/clippy/tests/ui/into_iter_on_ref.rs45
-rw-r--r--src/tools/clippy/tests/ui/into_iter_on_ref.stderr166
-rw-r--r--src/tools/clippy/tests/ui/invalid_null_ptr_usage.fixed49
-rw-r--r--src/tools/clippy/tests/ui/invalid_null_ptr_usage.rs49
-rw-r--r--src/tools/clippy/tests/ui/invalid_null_ptr_usage.stderr154
-rw-r--r--src/tools/clippy/tests/ui/invalid_upcast_comparisons.rs85
-rw-r--r--src/tools/clippy/tests/ui/invalid_upcast_comparisons.stderr166
-rw-r--r--src/tools/clippy/tests/ui/invalid_utf8_in_unchecked.rs20
-rw-r--r--src/tools/clippy/tests/ui/invalid_utf8_in_unchecked.stderr22
-rw-r--r--src/tools/clippy/tests/ui/is_digit_ascii_radix.fixed18
-rw-r--r--src/tools/clippy/tests/ui/is_digit_ascii_radix.rs18
-rw-r--r--src/tools/clippy/tests/ui/is_digit_ascii_radix.stderr22
-rw-r--r--src/tools/clippy/tests/ui/issue-3145.rs3
-rw-r--r--src/tools/clippy/tests/ui/issue-3145.stderr8
-rw-r--r--src/tools/clippy/tests/ui/issue-7447.rs25
-rw-r--r--src/tools/clippy/tests/ui/issue-7447.stderr19
-rw-r--r--src/tools/clippy/tests/ui/issue_2356.fixed26
-rw-r--r--src/tools/clippy/tests/ui/issue_2356.rs26
-rw-r--r--src/tools/clippy/tests/ui/issue_2356.stderr14
-rw-r--r--src/tools/clippy/tests/ui/issue_4266.rs38
-rw-r--r--src/tools/clippy/tests/ui/issue_4266.stderr25
-rw-r--r--src/tools/clippy/tests/ui/item_after_statement.rs52
-rw-r--r--src/tools/clippy/tests/ui/item_after_statement.stderr33
-rw-r--r--src/tools/clippy/tests/ui/iter_cloned_collect.fixed29
-rw-r--r--src/tools/clippy/tests/ui/iter_cloned_collect.rs32
-rw-r--r--src/tools/clippy/tests/ui/iter_cloned_collect.stderr38
-rw-r--r--src/tools/clippy/tests/ui/iter_count.fixed87
-rw-r--r--src/tools/clippy/tests/ui/iter_count.rs87
-rw-r--r--src/tools/clippy/tests/ui/iter_count.stderr154
-rw-r--r--src/tools/clippy/tests/ui/iter_next_slice.fixed24
-rw-r--r--src/tools/clippy/tests/ui/iter_next_slice.rs24
-rw-r--r--src/tools/clippy/tests/ui/iter_next_slice.stderr28
-rw-r--r--src/tools/clippy/tests/ui/iter_not_returning_iterator.rs74
-rw-r--r--src/tools/clippy/tests/ui/iter_not_returning_iterator.stderr22
-rw-r--r--src/tools/clippy/tests/ui/iter_nth.rs56
-rw-r--r--src/tools/clippy/tests/ui/iter_nth.stderr59
-rw-r--r--src/tools/clippy/tests/ui/iter_nth_zero.fixed31
-rw-r--r--src/tools/clippy/tests/ui/iter_nth_zero.rs31
-rw-r--r--src/tools/clippy/tests/ui/iter_nth_zero.stderr22
-rw-r--r--src/tools/clippy/tests/ui/iter_overeager_cloned.fixed55
-rw-r--r--src/tools/clippy/tests/ui/iter_overeager_cloned.rs56
-rw-r--r--src/tools/clippy/tests/ui/iter_overeager_cloned.stderr70
-rw-r--r--src/tools/clippy/tests/ui/iter_skip_next.fixed37
-rw-r--r--src/tools/clippy/tests/ui/iter_skip_next.rs37
-rw-r--r--src/tools/clippy/tests/ui/iter_skip_next.stderr46
-rw-r--r--src/tools/clippy/tests/ui/iter_skip_next_unfixable.rs19
-rw-r--r--src/tools/clippy/tests/ui/iter_skip_next_unfixable.stderr39
-rw-r--r--src/tools/clippy/tests/ui/iter_with_drain.fixed65
-rw-r--r--src/tools/clippy/tests/ui/iter_with_drain.rs65
-rw-r--r--src/tools/clippy/tests/ui/iter_with_drain.stderr40
-rw-r--r--src/tools/clippy/tests/ui/iterator_step_by_zero.rs28
-rw-r--r--src/tools/clippy/tests/ui/iterator_step_by_zero.stderr46
-rw-r--r--src/tools/clippy/tests/ui/large_const_arrays.fixed37
-rw-r--r--src/tools/clippy/tests/ui/large_const_arrays.rs37
-rw-r--r--src/tools/clippy/tests/ui/large_const_arrays.stderr76
-rw-r--r--src/tools/clippy/tests/ui/large_digit_groups.fixed31
-rw-r--r--src/tools/clippy/tests/ui/large_digit_groups.rs31
-rw-r--r--src/tools/clippy/tests/ui/large_digit_groups.stderr48
-rw-r--r--src/tools/clippy/tests/ui/large_enum_variant.rs135
-rw-r--r--src/tools/clippy/tests/ui/large_enum_variant.stderr197
-rw-r--r--src/tools/clippy/tests/ui/large_stack_arrays.rs30
-rw-r--r--src/tools/clippy/tests/ui/large_stack_arrays.stderr35
-rw-r--r--src/tools/clippy/tests/ui/large_types_passed_by_value.rs66
-rw-r--r--src/tools/clippy/tests/ui/large_types_passed_by_value.stderr52
-rw-r--r--src/tools/clippy/tests/ui/len_without_is_empty.rs285
-rw-r--r--src/tools/clippy/tests/ui/len_without_is_empty.stderr123
-rw-r--r--src/tools/clippy/tests/ui/len_zero.fixed143
-rw-r--r--src/tools/clippy/tests/ui/len_zero.rs143
-rw-r--r--src/tools/clippy/tests/ui/len_zero.stderr88
-rw-r--r--src/tools/clippy/tests/ui/len_zero_ranges.fixed17
-rw-r--r--src/tools/clippy/tests/ui/len_zero_ranges.rs17
-rw-r--r--src/tools/clippy/tests/ui/len_zero_ranges.stderr16
-rw-r--r--src/tools/clippy/tests/ui/let_and_return.rs169
-rw-r--r--src/tools/clippy/tests/ui/let_and_return.stderr45
-rw-r--r--src/tools/clippy/tests/ui/let_if_seq.rs122
-rw-r--r--src/tools/clippy/tests/ui/let_if_seq.stderr50
-rw-r--r--src/tools/clippy/tests/ui/let_underscore_drop.rs28
-rw-r--r--src/tools/clippy/tests/ui/let_underscore_drop.stderr27
-rw-r--r--src/tools/clippy/tests/ui/let_underscore_lock.rs36
-rw-r--r--src/tools/clippy/tests/ui/let_underscore_lock.stderr83
-rw-r--r--src/tools/clippy/tests/ui/let_underscore_must_use.rs95
-rw-r--r--src/tools/clippy/tests/ui/let_underscore_must_use.stderr99
-rw-r--r--src/tools/clippy/tests/ui/let_unit.fixed177
-rw-r--r--src/tools/clippy/tests/ui/let_unit.rs177
-rw-r--r--src/tools/clippy/tests/ui/let_unit.stderr102
-rw-r--r--src/tools/clippy/tests/ui/linkedlist.rs48
-rw-r--r--src/tools/clippy/tests/ui/linkedlist.stderr75
-rw-r--r--src/tools/clippy/tests/ui/literals.rs42
-rw-r--r--src/tools/clippy/tests/ui/literals.stderr139
-rw-r--r--src/tools/clippy/tests/ui/logic_bug.rs34
-rw-r--r--src/tools/clippy/tests/ui/logic_bug.stderr63
-rw-r--r--src/tools/clippy/tests/ui/lossy_float_literal.fixed35
-rw-r--r--src/tools/clippy/tests/ui/lossy_float_literal.rs35
-rw-r--r--src/tools/clippy/tests/ui/lossy_float_literal.stderr70
-rw-r--r--src/tools/clippy/tests/ui/macro_use_imports.fixed48
-rw-r--r--src/tools/clippy/tests/ui/macro_use_imports.rs48
-rw-r--r--src/tools/clippy/tests/ui/macro_use_imports.stderr28
-rw-r--r--src/tools/clippy/tests/ui/macro_use_imports_expect.rs51
-rw-r--r--src/tools/clippy/tests/ui/manual_assert.edition2018.fixed52
-rw-r--r--src/tools/clippy/tests/ui/manual_assert.edition2018.stderr68
-rw-r--r--src/tools/clippy/tests/ui/manual_assert.edition2021.fixed52
-rw-r--r--src/tools/clippy/tests/ui/manual_assert.edition2021.stderr68
-rw-r--r--src/tools/clippy/tests/ui/manual_assert.fixed45
-rw-r--r--src/tools/clippy/tests/ui/manual_assert.rs68
-rw-r--r--src/tools/clippy/tests/ui/manual_async_fn.fixed110
-rw-r--r--src/tools/clippy/tests/ui/manual_async_fn.rs130
-rw-r--r--src/tools/clippy/tests/ui/manual_async_fn.stderr165
-rw-r--r--src/tools/clippy/tests/ui/manual_bits.fixed59
-rw-r--r--src/tools/clippy/tests/ui/manual_bits.rs59
-rw-r--r--src/tools/clippy/tests/ui/manual_bits.stderr178
-rw-r--r--src/tools/clippy/tests/ui/manual_filter_map.fixed121
-rw-r--r--src/tools/clippy/tests/ui/manual_filter_map.rs134
-rw-r--r--src/tools/clippy/tests/ui/manual_filter_map.stderr194
-rw-r--r--src/tools/clippy/tests/ui/manual_find.rs22
-rw-r--r--src/tools/clippy/tests/ui/manual_find.stderr29
-rw-r--r--src/tools/clippy/tests/ui/manual_find_fixable.fixed182
-rw-r--r--src/tools/clippy/tests/ui/manual_find_fixable.rs242
-rw-r--r--src/tools/clippy/tests/ui/manual_find_fixable.stderr142
-rw-r--r--src/tools/clippy/tests/ui/manual_find_map.fixed124
-rw-r--r--src/tools/clippy/tests/ui/manual_find_map.rs137
-rw-r--r--src/tools/clippy/tests/ui/manual_find_map.stderr210
-rw-r--r--src/tools/clippy/tests/ui/manual_flatten.rs125
-rw-r--r--src/tools/clippy/tests/ui/manual_flatten.stderr199
-rw-r--r--src/tools/clippy/tests/ui/manual_map_option.fixed157
-rw-r--r--src/tools/clippy/tests/ui/manual_map_option.rs223
-rw-r--r--src/tools/clippy/tests/ui/manual_map_option.stderr198
-rw-r--r--src/tools/clippy/tests/ui/manual_map_option_2.fixed60
-rw-r--r--src/tools/clippy/tests/ui/manual_map_option_2.rs75
-rw-r--r--src/tools/clippy/tests/ui/manual_map_option_2.stderr73
-rw-r--r--src/tools/clippy/tests/ui/manual_memcpy/with_loop_counters.rs88
-rw-r--r--src/tools/clippy/tests/ui/manual_memcpy/with_loop_counters.stderr111
-rw-r--r--src/tools/clippy/tests/ui/manual_memcpy/without_loop_counters.rs136
-rw-r--r--src/tools/clippy/tests/ui/manual_memcpy/without_loop_counters.stderr115
-rw-r--r--src/tools/clippy/tests/ui/manual_non_exhaustive_enum.rs87
-rw-r--r--src/tools/clippy/tests/ui/manual_non_exhaustive_enum.stderr41
-rw-r--r--src/tools/clippy/tests/ui/manual_non_exhaustive_struct.rs74
-rw-r--r--src/tools/clippy/tests/ui/manual_non_exhaustive_struct.stderr65
-rw-r--r--src/tools/clippy/tests/ui/manual_ok_or.fixed40
-rw-r--r--src/tools/clippy/tests/ui/manual_ok_or.rs44
-rw-r--r--src/tools/clippy/tests/ui/manual_ok_or.stderr41
-rw-r--r--src/tools/clippy/tests/ui/manual_rem_euclid.fixed55
-rw-r--r--src/tools/clippy/tests/ui/manual_rem_euclid.rs55
-rw-r--r--src/tools/clippy/tests/ui/manual_rem_euclid.stderr57
-rw-r--r--src/tools/clippy/tests/ui/manual_retain.fixed240
-rw-r--r--src/tools/clippy/tests/ui/manual_retain.rs246
-rw-r--r--src/tools/clippy/tests/ui/manual_retain.stderr124
-rw-r--r--src/tools/clippy/tests/ui/manual_saturating_arithmetic.fixed45
-rw-r--r--src/tools/clippy/tests/ui/manual_saturating_arithmetic.rs55
-rw-r--r--src/tools/clippy/tests/ui/manual_saturating_arithmetic.stderr163
-rw-r--r--src/tools/clippy/tests/ui/manual_split_once.fixed147
-rw-r--r--src/tools/clippy/tests/ui/manual_split_once.rs147
-rw-r--r--src/tools/clippy/tests/ui/manual_split_once.stderr213
-rw-r--r--src/tools/clippy/tests/ui/manual_str_repeat.fixed66
-rw-r--r--src/tools/clippy/tests/ui/manual_str_repeat.rs66
-rw-r--r--src/tools/clippy/tests/ui/manual_str_repeat.stderr64
-rw-r--r--src/tools/clippy/tests/ui/manual_strip.rs66
-rw-r--r--src/tools/clippy/tests/ui/manual_strip.stderr132
-rw-r--r--src/tools/clippy/tests/ui/manual_unwrap_or.fixed181
-rw-r--r--src/tools/clippy/tests/ui/manual_unwrap_or.rs223
-rw-r--r--src/tools/clippy/tests/ui/manual_unwrap_or.stderr155
-rw-r--r--src/tools/clippy/tests/ui/many_single_char_names.rs74
-rw-r--r--src/tools/clippy/tests/ui/many_single_char_names.stderr51
-rw-r--r--src/tools/clippy/tests/ui/map_clone.fixed63
-rw-r--r--src/tools/clippy/tests/ui/map_clone.rs63
-rw-r--r--src/tools/clippy/tests/ui/map_clone.stderr40
-rw-r--r--src/tools/clippy/tests/ui/map_collect_result_unit.fixed16
-rw-r--r--src/tools/clippy/tests/ui/map_collect_result_unit.rs16
-rw-r--r--src/tools/clippy/tests/ui/map_collect_result_unit.stderr16
-rw-r--r--src/tools/clippy/tests/ui/map_err.rs29
-rw-r--r--src/tools/clippy/tests/ui/map_err.stderr11
-rw-r--r--src/tools/clippy/tests/ui/map_flatten.rs55
-rw-r--r--src/tools/clippy/tests/ui/map_flatten.stderr100
-rw-r--r--src/tools/clippy/tests/ui/map_flatten_fixable.fixed65
-rw-r--r--src/tools/clippy/tests/ui/map_flatten_fixable.rs67
-rw-r--r--src/tools/clippy/tests/ui/map_flatten_fixable.stderr99
-rw-r--r--src/tools/clippy/tests/ui/map_identity.fixed25
-rw-r--r--src/tools/clippy/tests/ui/map_identity.rs27
-rw-r--r--src/tools/clippy/tests/ui/map_identity.stderr43
-rw-r--r--src/tools/clippy/tests/ui/map_unit_fn.rs11
-rw-r--r--src/tools/clippy/tests/ui/map_unwrap_or.rs81
-rw-r--r--src/tools/clippy/tests/ui/map_unwrap_or.stderr150
-rw-r--r--src/tools/clippy/tests/ui/map_unwrap_or_fixable.fixed54
-rw-r--r--src/tools/clippy/tests/ui/map_unwrap_or_fixable.rs58
-rw-r--r--src/tools/clippy/tests/ui/map_unwrap_or_fixable.stderr22
-rw-r--r--src/tools/clippy/tests/ui/match_as_ref.fixed43
-rw-r--r--src/tools/clippy/tests/ui/match_as_ref.rs52
-rw-r--r--src/tools/clippy/tests/ui/match_as_ref.stderr33
-rw-r--r--src/tools/clippy/tests/ui/match_bool.rs63
-rw-r--r--src/tools/clippy/tests/ui/match_bool.stderr117
-rw-r--r--src/tools/clippy/tests/ui/match_expr_like_matches_macro.fixed170
-rw-r--r--src/tools/clippy/tests/ui/match_expr_like_matches_macro.rs211
-rw-r--r--src/tools/clippy/tests/ui/match_expr_like_matches_macro.stderr137
-rw-r--r--src/tools/clippy/tests/ui/match_on_vec_items.rs152
-rw-r--r--src/tools/clippy/tests/ui/match_on_vec_items.stderr52
-rw-r--r--src/tools/clippy/tests/ui/match_overlapping_arm.rs135
-rw-r--r--src/tools/clippy/tests/ui/match_overlapping_arm.stderr99
-rw-r--r--src/tools/clippy/tests/ui/match_ref_pats.fixed118
-rw-r--r--src/tools/clippy/tests/ui/match_ref_pats.rs118
-rw-r--r--src/tools/clippy/tests/ui/match_ref_pats.stderr68
-rw-r--r--src/tools/clippy/tests/ui/match_result_ok.fixed63
-rw-r--r--src/tools/clippy/tests/ui/match_result_ok.rs63
-rw-r--r--src/tools/clippy/tests/ui/match_result_ok.stderr36
-rw-r--r--src/tools/clippy/tests/ui/match_same_arms.rs56
-rw-r--r--src/tools/clippy/tests/ui/match_same_arms.stderr121
-rw-r--r--src/tools/clippy/tests/ui/match_same_arms2.rs238
-rw-r--r--src/tools/clippy/tests/ui/match_same_arms2.stderr196
-rw-r--r--src/tools/clippy/tests/ui/match_single_binding.fixed126
-rw-r--r--src/tools/clippy/tests/ui/match_single_binding.rs142
-rw-r--r--src/tools/clippy/tests/ui/match_single_binding.stderr200
-rw-r--r--src/tools/clippy/tests/ui/match_single_binding2.fixed53
-rw-r--r--src/tools/clippy/tests/ui/match_single_binding2.rs55
-rw-r--r--src/tools/clippy/tests/ui/match_single_binding2.stderr68
-rw-r--r--src/tools/clippy/tests/ui/match_str_case_mismatch.fixed186
-rw-r--r--src/tools/clippy/tests/ui/match_str_case_mismatch.rs186
-rw-r--r--src/tools/clippy/tests/ui/match_str_case_mismatch.stderr80
-rw-r--r--src/tools/clippy/tests/ui/match_wild_err_arm.edition2018.stderr35
-rw-r--r--src/tools/clippy/tests/ui/match_wild_err_arm.edition2021.stderr35
-rw-r--r--src/tools/clippy/tests/ui/match_wild_err_arm.rs68
-rw-r--r--src/tools/clippy/tests/ui/match_wildcard_for_single_variants.fixed134
-rw-r--r--src/tools/clippy/tests/ui/match_wildcard_for_single_variants.rs134
-rw-r--r--src/tools/clippy/tests/ui/match_wildcard_for_single_variants.stderr52
-rw-r--r--src/tools/clippy/tests/ui/mem_forget.rs23
-rw-r--r--src/tools/clippy/tests/ui/mem_forget.stderr22
-rw-r--r--src/tools/clippy/tests/ui/mem_replace.fixed79
-rw-r--r--src/tools/clippy/tests/ui/mem_replace.rs79
-rw-r--r--src/tools/clippy/tests/ui/mem_replace.stderr120
-rw-r--r--src/tools/clippy/tests/ui/mem_replace_macro.rs21
-rw-r--r--src/tools/clippy/tests/ui/mem_replace_macro.stderr14
-rw-r--r--src/tools/clippy/tests/ui/methods.rs140
-rw-r--r--src/tools/clippy/tests/ui/methods.stderr24
-rw-r--r--src/tools/clippy/tests/ui/methods_fixable.fixed11
-rw-r--r--src/tools/clippy/tests/ui/methods_fixable.rs11
-rw-r--r--src/tools/clippy/tests/ui/methods_fixable.stderr10
-rw-r--r--src/tools/clippy/tests/ui/min_max.rs62
-rw-r--r--src/tools/clippy/tests/ui/min_max.stderr82
-rw-r--r--src/tools/clippy/tests/ui/min_rust_version_attr.rs228
-rw-r--r--src/tools/clippy/tests/ui/min_rust_version_attr.stderr37
-rw-r--r--src/tools/clippy/tests/ui/min_rust_version_invalid_attr.rs4
-rw-r--r--src/tools/clippy/tests/ui/min_rust_version_invalid_attr.stderr8
-rw-r--r--src/tools/clippy/tests/ui/min_rust_version_multiple_inner_attr.rs11
-rw-r--r--src/tools/clippy/tests/ui/min_rust_version_multiple_inner_attr.stderr38
-rw-r--r--src/tools/clippy/tests/ui/min_rust_version_no_patch.rs14
-rw-r--r--src/tools/clippy/tests/ui/min_rust_version_outer_attr.rs4
-rw-r--r--src/tools/clippy/tests/ui/min_rust_version_outer_attr.stderr8
-rw-r--r--src/tools/clippy/tests/ui/mismatched_target_os_non_unix.fixed27
-rw-r--r--src/tools/clippy/tests/ui/mismatched_target_os_non_unix.rs27
-rw-r--r--src/tools/clippy/tests/ui/mismatched_target_os_non_unix.stderr36
-rw-r--r--src/tools/clippy/tests/ui/mismatched_target_os_unix.fixed62
-rw-r--r--src/tools/clippy/tests/ui/mismatched_target_os_unix.rs62
-rw-r--r--src/tools/clippy/tests/ui/mismatched_target_os_unix.stderr183
-rw-r--r--src/tools/clippy/tests/ui/mismatching_type_param_order.rs64
-rw-r--r--src/tools/clippy/tests/ui/mismatching_type_param_order.stderr83
-rw-r--r--src/tools/clippy/tests/ui/missing-doc-crate-missing.rs3
-rw-r--r--src/tools/clippy/tests/ui/missing-doc-crate-missing.stderr12
-rw-r--r--src/tools/clippy/tests/ui/missing-doc-crate.rs4
-rw-r--r--src/tools/clippy/tests/ui/missing-doc-impl.rs92
-rw-r--r--src/tools/clippy/tests/ui/missing-doc-impl.stderr107
-rw-r--r--src/tools/clippy/tests/ui/missing-doc.rs102
-rw-r--r--src/tools/clippy/tests/ui/missing-doc.stderr159
-rw-r--r--src/tools/clippy/tests/ui/missing_const_for_fn/auxiliary/helper.rs8
-rw-r--r--src/tools/clippy/tests/ui/missing_const_for_fn/cant_be_const.rs121
-rw-r--r--src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.rs81
-rw-r--r--src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.stderr85
-rw-r--r--src/tools/clippy/tests/ui/missing_inline.rs66
-rw-r--r--src/tools/clippy/tests/ui/missing_inline.stderr40
-rw-r--r--src/tools/clippy/tests/ui/missing_inline_executable.rs5
-rw-r--r--src/tools/clippy/tests/ui/missing_inline_proc_macro.rs23
-rw-r--r--src/tools/clippy/tests/ui/missing_panics_doc.rs153
-rw-r--r--src/tools/clippy/tests/ui/missing_panics_doc.stderr108
-rw-r--r--src/tools/clippy/tests/ui/missing_spin_loop.fixed28
-rw-r--r--src/tools/clippy/tests/ui/missing_spin_loop.rs28
-rw-r--r--src/tools/clippy/tests/ui/missing_spin_loop.stderr40
-rw-r--r--src/tools/clippy/tests/ui/missing_spin_loop_no_std.fixed23
-rw-r--r--src/tools/clippy/tests/ui/missing_spin_loop_no_std.rs23
-rw-r--r--src/tools/clippy/tests/ui/missing_spin_loop_no_std.stderr10
-rw-r--r--src/tools/clippy/tests/ui/mistyped_literal_suffix.fixed43
-rw-r--r--src/tools/clippy/tests/ui/mistyped_literal_suffix.rs43
-rw-r--r--src/tools/clippy/tests/ui/mistyped_literal_suffix.stderr100
-rw-r--r--src/tools/clippy/tests/ui/mixed_read_write_in_expression.rs112
-rw-r--r--src/tools/clippy/tests/ui/mixed_read_write_in_expression.stderr51
-rw-r--r--src/tools/clippy/tests/ui/module_inception.rs21
-rw-r--r--src/tools/clippy/tests/ui/module_inception.stderr20
-rw-r--r--src/tools/clippy/tests/ui/module_name_repetitions.rs18
-rw-r--r--src/tools/clippy/tests/ui/module_name_repetitions.stderr34
-rw-r--r--src/tools/clippy/tests/ui/modulo_arithmetic_float.rs29
-rw-r--r--src/tools/clippy/tests/ui/modulo_arithmetic_float.stderr83
-rw-r--r--src/tools/clippy/tests/ui/modulo_arithmetic_integral.rs83
-rw-r--r--src/tools/clippy/tests/ui/modulo_arithmetic_integral.stderr156
-rw-r--r--src/tools/clippy/tests/ui/modulo_arithmetic_integral_const.rs42
-rw-r--r--src/tools/clippy/tests/ui/modulo_arithmetic_integral_const.stderr156
-rw-r--r--src/tools/clippy/tests/ui/modulo_one.rs23
-rw-r--r--src/tools/clippy/tests/ui/modulo_one.stderr60
-rw-r--r--src/tools/clippy/tests/ui/must_use_candidates.fixed93
-rw-r--r--src/tools/clippy/tests/ui/must_use_candidates.rs93
-rw-r--r--src/tools/clippy/tests/ui/must_use_candidates.stderr34
-rw-r--r--src/tools/clippy/tests/ui/must_use_unit.fixed26
-rw-r--r--src/tools/clippy/tests/ui/must_use_unit.rs26
-rw-r--r--src/tools/clippy/tests/ui/must_use_unit.stderr28
-rw-r--r--src/tools/clippy/tests/ui/mut_from_ref.rs54
-rw-r--r--src/tools/clippy/tests/ui/mut_from_ref.stderr75
-rw-r--r--src/tools/clippy/tests/ui/mut_key.rs85
-rw-r--r--src/tools/clippy/tests/ui/mut_key.stderr106
-rw-r--r--src/tools/clippy/tests/ui/mut_mut.rs59
-rw-r--r--src/tools/clippy/tests/ui/mut_mut.stderr63
-rw-r--r--src/tools/clippy/tests/ui/mut_mutex_lock.fixed21
-rw-r--r--src/tools/clippy/tests/ui/mut_mutex_lock.rs21
-rw-r--r--src/tools/clippy/tests/ui/mut_mutex_lock.stderr10
-rw-r--r--src/tools/clippy/tests/ui/mut_range_bound.rs84
-rw-r--r--src/tools/clippy/tests/ui/mut_range_bound.stderr59
-rw-r--r--src/tools/clippy/tests/ui/mut_reference.rs43
-rw-r--r--src/tools/clippy/tests/ui/mut_reference.stderr22
-rw-r--r--src/tools/clippy/tests/ui/mutex_atomic.rs17
-rw-r--r--src/tools/clippy/tests/ui/mutex_atomic.stderr48
-rw-r--r--src/tools/clippy/tests/ui/needless_arbitrary_self_type.fixed69
-rw-r--r--src/tools/clippy/tests/ui/needless_arbitrary_self_type.rs69
-rw-r--r--src/tools/clippy/tests/ui/needless_arbitrary_self_type.stderr40
-rw-r--r--src/tools/clippy/tests/ui/needless_arbitrary_self_type_unfixable.rs46
-rw-r--r--src/tools/clippy/tests/ui/needless_arbitrary_self_type_unfixable.stderr10
-rw-r--r--src/tools/clippy/tests/ui/needless_bitwise_bool.fixed40
-rw-r--r--src/tools/clippy/tests/ui/needless_bitwise_bool.rs40
-rw-r--r--src/tools/clippy/tests/ui/needless_bitwise_bool.stderr10
-rw-r--r--src/tools/clippy/tests/ui/needless_bool/fixable.fixed126
-rw-r--r--src/tools/clippy/tests/ui/needless_bool/fixable.rs186
-rw-r--r--src/tools/clippy/tests/ui/needless_bool/fixable.stderr193
-rw-r--r--src/tools/clippy/tests/ui/needless_bool/simple.rs47
-rw-r--r--src/tools/clippy/tests/ui/needless_bool/simple.stderr44
-rw-r--r--src/tools/clippy/tests/ui/needless_borrow.fixed185
-rw-r--r--src/tools/clippy/tests/ui/needless_borrow.rs185
-rw-r--r--src/tools/clippy/tests/ui/needless_borrow.stderr136
-rw-r--r--src/tools/clippy/tests/ui/needless_borrow_pat.rs150
-rw-r--r--src/tools/clippy/tests/ui/needless_borrow_pat.stderr112
-rw-r--r--src/tools/clippy/tests/ui/needless_borrowed_ref.fixed45
-rw-r--r--src/tools/clippy/tests/ui/needless_borrowed_ref.rs45
-rw-r--r--src/tools/clippy/tests/ui/needless_borrowed_ref.stderr10
-rw-r--r--src/tools/clippy/tests/ui/needless_collect.fixed36
-rw-r--r--src/tools/clippy/tests/ui/needless_collect.rs36
-rw-r--r--src/tools/clippy/tests/ui/needless_collect.stderr70
-rw-r--r--src/tools/clippy/tests/ui/needless_collect_indirect.rs114
-rw-r--r--src/tools/clippy/tests/ui/needless_collect_indirect.stderr129
-rw-r--r--src/tools/clippy/tests/ui/needless_continue.rs144
-rw-r--r--src/tools/clippy/tests/ui/needless_continue.stderr131
-rw-r--r--src/tools/clippy/tests/ui/needless_doc_main.rs140
-rw-r--r--src/tools/clippy/tests/ui/needless_doc_main.stderr28
-rw-r--r--src/tools/clippy/tests/ui/needless_for_each_fixable.fixed118
-rw-r--r--src/tools/clippy/tests/ui/needless_for_each_fixable.rs118
-rw-r--r--src/tools/clippy/tests/ui/needless_for_each_fixable.stderr123
-rw-r--r--src/tools/clippy/tests/ui/needless_for_each_unfixable.rs14
-rw-r--r--src/tools/clippy/tests/ui/needless_for_each_unfixable.stderr30
-rw-r--r--src/tools/clippy/tests/ui/needless_late_init.fixed273
-rw-r--r--src/tools/clippy/tests/ui/needless_late_init.rs273
-rw-r--r--src/tools/clippy/tests/ui/needless_late_init.stderr274
-rw-r--r--src/tools/clippy/tests/ui/needless_lifetimes.rs422
-rw-r--r--src/tools/clippy/tests/ui/needless_lifetimes.stderr190
-rw-r--r--src/tools/clippy/tests/ui/needless_match.fixed210
-rw-r--r--src/tools/clippy/tests/ui/needless_match.rs247
-rw-r--r--src/tools/clippy/tests/ui/needless_match.stderr113
-rw-r--r--src/tools/clippy/tests/ui/needless_option_as_deref.fixed55
-rw-r--r--src/tools/clippy/tests/ui/needless_option_as_deref.rs55
-rw-r--r--src/tools/clippy/tests/ui/needless_option_as_deref.stderr22
-rw-r--r--src/tools/clippy/tests/ui/needless_option_take.fixed15
-rw-r--r--src/tools/clippy/tests/ui/needless_option_take.rs15
-rw-r--r--src/tools/clippy/tests/ui/needless_option_take.stderr10
-rw-r--r--src/tools/clippy/tests/ui/needless_parens_on_range_literals.fixed14
-rw-r--r--src/tools/clippy/tests/ui/needless_parens_on_range_literals.rs14
-rw-r--r--src/tools/clippy/tests/ui/needless_parens_on_range_literals.stderr40
-rw-r--r--src/tools/clippy/tests/ui/needless_pass_by_value.rs160
-rw-r--r--src/tools/clippy/tests/ui/needless_pass_by_value.stderr178
-rw-r--r--src/tools/clippy/tests/ui/needless_pass_by_value_proc_macro.rs21
-rw-r--r--src/tools/clippy/tests/ui/needless_question_mark.fixed140
-rw-r--r--src/tools/clippy/tests/ui/needless_question_mark.rs140
-rw-r--r--src/tools/clippy/tests/ui/needless_question_mark.stderr93
-rw-r--r--src/tools/clippy/tests/ui/needless_range_loop.rs95
-rw-r--r--src/tools/clippy/tests/ui/needless_range_loop.stderr157
-rw-r--r--src/tools/clippy/tests/ui/needless_range_loop2.rs109
-rw-r--r--src/tools/clippy/tests/ui/needless_range_loop2.stderr91
-rw-r--r--src/tools/clippy/tests/ui/needless_return.fixed240
-rw-r--r--src/tools/clippy/tests/ui/needless_return.rs240
-rw-r--r--src/tools/clippy/tests/ui/needless_return.stderr226
-rw-r--r--src/tools/clippy/tests/ui/needless_splitn.fixed47
-rw-r--r--src/tools/clippy/tests/ui/needless_splitn.rs47
-rw-r--r--src/tools/clippy/tests/ui/needless_splitn.stderr82
-rw-r--r--src/tools/clippy/tests/ui/needless_update.rs25
-rw-r--r--src/tools/clippy/tests/ui/needless_update.stderr10
-rw-r--r--src/tools/clippy/tests/ui/neg_cmp_op_on_partial_ord.rs62
-rw-r--r--src/tools/clippy/tests/ui/neg_cmp_op_on_partial_ord.stderr28
-rw-r--r--src/tools/clippy/tests/ui/neg_multiply.fixed48
-rw-r--r--src/tools/clippy/tests/ui/neg_multiply.rs48
-rw-r--r--src/tools/clippy/tests/ui/neg_multiply.stderr52
-rw-r--r--src/tools/clippy/tests/ui/never_loop.rs221
-rw-r--r--src/tools/clippy/tests/ui/never_loop.stderr105
-rw-r--r--src/tools/clippy/tests/ui/new_ret_no_self.rs352
-rw-r--r--src/tools/clippy/tests/ui/new_ret_no_self.stderr80
-rw-r--r--src/tools/clippy/tests/ui/new_without_default.rs228
-rw-r--r--src/tools/clippy/tests/ui/new_without_default.stderr124
-rw-r--r--src/tools/clippy/tests/ui/no_effect.rs143
-rw-r--r--src/tools/clippy/tests/ui/no_effect.stderr186
-rw-r--r--src/tools/clippy/tests/ui/no_effect_replace.rs51
-rw-r--r--src/tools/clippy/tests/ui/no_effect_replace.stderr52
-rw-r--r--src/tools/clippy/tests/ui/non_expressive_names.rs58
-rw-r--r--src/tools/clippy/tests/ui/non_expressive_names.stderr40
-rw-r--r--src/tools/clippy/tests/ui/non_octal_unix_permissions.fixed33
-rw-r--r--src/tools/clippy/tests/ui/non_octal_unix_permissions.rs33
-rw-r--r--src/tools/clippy/tests/ui/non_octal_unix_permissions.stderr28
-rw-r--r--src/tools/clippy/tests/ui/non_send_fields_in_send_ty.rs133
-rw-r--r--src/tools/clippy/tests/ui/non_send_fields_in_send_ty.stderr171
-rw-r--r--src/tools/clippy/tests/ui/nonminimal_bool.rs59
-rw-r--r--src/tools/clippy/tests/ui/nonminimal_bool.stderr111
-rw-r--r--src/tools/clippy/tests/ui/nonminimal_bool_methods.fixed111
-rw-r--r--src/tools/clippy/tests/ui/nonminimal_bool_methods.rs111
-rw-r--r--src/tools/clippy/tests/ui/nonminimal_bool_methods.stderr82
-rw-r--r--src/tools/clippy/tests/ui/numbered_fields.fixed39
-rw-r--r--src/tools/clippy/tests/ui/numbered_fields.rs47
-rw-r--r--src/tools/clippy/tests/ui/numbered_fields.stderr26
-rw-r--r--src/tools/clippy/tests/ui/obfuscated_if_else.fixed7
-rw-r--r--src/tools/clippy/tests/ui/obfuscated_if_else.rs7
-rw-r--r--src/tools/clippy/tests/ui/obfuscated_if_else.stderr10
-rw-r--r--src/tools/clippy/tests/ui/octal_escapes.rs20
-rw-r--r--src/tools/clippy/tests/ui/octal_escapes.stderr131
-rw-r--r--src/tools/clippy/tests/ui/ok_expect.rs27
-rw-r--r--src/tools/clippy/tests/ui/ok_expect.stderr43
-rw-r--r--src/tools/clippy/tests/ui/only_used_in_recursion.rs122
-rw-r--r--src/tools/clippy/tests/ui/only_used_in_recursion.stderr82
-rw-r--r--src/tools/clippy/tests/ui/op_ref.rs94
-rw-r--r--src/tools/clippy/tests/ui/op_ref.stderr38
-rw-r--r--src/tools/clippy/tests/ui/open_options.rs14
-rw-r--r--src/tools/clippy/tests/ui/open_options.stderr46
-rw-r--r--src/tools/clippy/tests/ui/option_as_ref_deref.fixed44
-rw-r--r--src/tools/clippy/tests/ui/option_as_ref_deref.rs47
-rw-r--r--src/tools/clippy/tests/ui/option_as_ref_deref.stderr110
-rw-r--r--src/tools/clippy/tests/ui/option_env_unwrap.rs24
-rw-r--r--src/tools/clippy/tests/ui/option_env_unwrap.stderr61
-rw-r--r--src/tools/clippy/tests/ui/option_filter_map.fixed25
-rw-r--r--src/tools/clippy/tests/ui/option_filter_map.rs27
-rw-r--r--src/tools/clippy/tests/ui/option_filter_map.stderr56
-rw-r--r--src/tools/clippy/tests/ui/option_if_let_else.fixed182
-rw-r--r--src/tools/clippy/tests/ui/option_if_let_else.rs211
-rw-r--r--src/tools/clippy/tests/ui/option_if_let_else.stderr210
-rw-r--r--src/tools/clippy/tests/ui/option_map_or_none.fixed26
-rw-r--r--src/tools/clippy/tests/ui/option_map_or_none.rs28
-rw-r--r--src/tools/clippy/tests/ui/option_map_or_none.stderr53
-rw-r--r--src/tools/clippy/tests/ui/option_map_unit_fn_fixable.fixed88
-rw-r--r--src/tools/clippy/tests/ui/option_map_unit_fn_fixable.rs88
-rw-r--r--src/tools/clippy/tests/ui/option_map_unit_fn_fixable.stderr156
-rw-r--r--src/tools/clippy/tests/ui/option_map_unit_fn_unfixable.rs39
-rw-r--r--src/tools/clippy/tests/ui/option_map_unit_fn_unfixable.stderr27
-rw-r--r--src/tools/clippy/tests/ui/option_option.rs89
-rw-r--r--src/tools/clippy/tests/ui/option_option.stderr80
-rw-r--r--src/tools/clippy/tests/ui/option_take_on_temporary.fixed15
-rw-r--r--src/tools/clippy/tests/ui/or_fun_call.fixed229
-rw-r--r--src/tools/clippy/tests/ui/or_fun_call.rs229
-rw-r--r--src/tools/clippy/tests/ui/or_fun_call.stderr136
-rw-r--r--src/tools/clippy/tests/ui/or_then_unwrap.fixed52
-rw-r--r--src/tools/clippy/tests/ui/or_then_unwrap.rs52
-rw-r--r--src/tools/clippy/tests/ui/or_then_unwrap.stderr22
-rw-r--r--src/tools/clippy/tests/ui/out_of_bounds_indexing/issue-3102.rs11
-rw-r--r--src/tools/clippy/tests/ui/out_of_bounds_indexing/issue-3102.stderr16
-rw-r--r--src/tools/clippy/tests/ui/out_of_bounds_indexing/simple.rs22
-rw-r--r--src/tools/clippy/tests/ui/out_of_bounds_indexing/simple.stderr40
-rw-r--r--src/tools/clippy/tests/ui/overflow_check_conditional.rs25
-rw-r--r--src/tools/clippy/tests/ui/overflow_check_conditional.stderr52
-rw-r--r--src/tools/clippy/tests/ui/panic_in_result_fn.rs70
-rw-r--r--src/tools/clippy/tests/ui/panic_in_result_fn.stderr99
-rw-r--r--src/tools/clippy/tests/ui/panic_in_result_fn_assertions.rs48
-rw-r--r--src/tools/clippy/tests/ui/panic_in_result_fn_assertions.stderr54
-rw-r--r--src/tools/clippy/tests/ui/panic_in_result_fn_debug_assertions.rs43
-rw-r--r--src/tools/clippy/tests/ui/panicking_macros.rs95
-rw-r--r--src/tools/clippy/tests/ui/panicking_macros.stderr106
-rw-r--r--src/tools/clippy/tests/ui/partialeq_ne_impl.rs26
-rw-r--r--src/tools/clippy/tests/ui/partialeq_ne_impl.stderr12
-rw-r--r--src/tools/clippy/tests/ui/path_buf_push_overwrite.fixed8
-rw-r--r--src/tools/clippy/tests/ui/path_buf_push_overwrite.rs8
-rw-r--r--src/tools/clippy/tests/ui/path_buf_push_overwrite.stderr10
-rw-r--r--src/tools/clippy/tests/ui/pattern_type_mismatch/mutability.rs49
-rw-r--r--src/tools/clippy/tests/ui/pattern_type_mismatch/mutability.stderr19
-rw-r--r--src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_alternatives.rs24
-rw-r--r--src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_alternatives.stderr27
-rw-r--r--src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_structs.rs45
-rw-r--r--src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_structs.stderr67
-rw-r--r--src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_tuples.rs57
-rw-r--r--src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_tuples.stderr83
-rw-r--r--src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.rs146
-rw-r--r--src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.stderr79
-rw-r--r--src/tools/clippy/tests/ui/patterns.fixed36
-rw-r--r--src/tools/clippy/tests/ui/patterns.rs36
-rw-r--r--src/tools/clippy/tests/ui/patterns.stderr22
-rw-r--r--src/tools/clippy/tests/ui/precedence.fixed61
-rw-r--r--src/tools/clippy/tests/ui/precedence.rs61
-rw-r--r--src/tools/clippy/tests/ui/precedence.stderr76
-rw-r--r--src/tools/clippy/tests/ui/print.rs35
-rw-r--r--src/tools/clippy/tests/ui/print.stderr54
-rw-r--r--src/tools/clippy/tests/ui/print_in_format_impl.rs58
-rw-r--r--src/tools/clippy/tests/ui/print_in_format_impl.stderr46
-rw-r--r--src/tools/clippy/tests/ui/print_literal.rs38
-rw-r--r--src/tools/clippy/tests/ui/print_literal.stderr135
-rw-r--r--src/tools/clippy/tests/ui/print_stderr.rs8
-rw-r--r--src/tools/clippy/tests/ui/print_stderr.stderr16
-rw-r--r--src/tools/clippy/tests/ui/print_stdout_build_script.rs12
-rw-r--r--src/tools/clippy/tests/ui/print_with_newline.rs52
-rw-r--r--src/tools/clippy/tests/ui/print_with_newline.stderr129
-rw-r--r--src/tools/clippy/tests/ui/println_empty_string.fixed18
-rw-r--r--src/tools/clippy/tests/ui/println_empty_string.rs18
-rw-r--r--src/tools/clippy/tests/ui/println_empty_string.stderr28
-rw-r--r--src/tools/clippy/tests/ui/proc_macro.rs26
-rw-r--r--src/tools/clippy/tests/ui/proc_macro.stderr11
-rw-r--r--src/tools/clippy/tests/ui/ptr_arg.rs209
-rw-r--r--src/tools/clippy/tests/ui/ptr_arg.stderr166
-rw-r--r--src/tools/clippy/tests/ui/ptr_as_ptr.fixed65
-rw-r--r--src/tools/clippy/tests/ui/ptr_as_ptr.rs65
-rw-r--r--src/tools/clippy/tests/ui/ptr_as_ptr.stderr57
-rw-r--r--src/tools/clippy/tests/ui/ptr_eq.fixed38
-rw-r--r--src/tools/clippy/tests/ui/ptr_eq.rs38
-rw-r--r--src/tools/clippy/tests/ui/ptr_eq.stderr16
-rw-r--r--src/tools/clippy/tests/ui/ptr_offset_with_cast.fixed20
-rw-r--r--src/tools/clippy/tests/ui/ptr_offset_with_cast.rs20
-rw-r--r--src/tools/clippy/tests/ui/ptr_offset_with_cast.stderr16
-rw-r--r--src/tools/clippy/tests/ui/pub_use.rs14
-rw-r--r--src/tools/clippy/tests/ui/pub_use.stderr11
-rw-r--r--src/tools/clippy/tests/ui/question_mark.fixed210
-rw-r--r--src/tools/clippy/tests/ui/question_mark.rs246
-rw-r--r--src/tools/clippy/tests/ui/question_mark.stderr134
-rw-r--r--src/tools/clippy/tests/ui/range.rs16
-rw-r--r--src/tools/clippy/tests/ui/range.stderr10
-rw-r--r--src/tools/clippy/tests/ui/range_contains.fixed64
-rw-r--r--src/tools/clippy/tests/ui/range_contains.rs64
-rw-r--r--src/tools/clippy/tests/ui/range_contains.stderr124
-rw-r--r--src/tools/clippy/tests/ui/range_plus_minus_one.fixed42
-rw-r--r--src/tools/clippy/tests/ui/range_plus_minus_one.rs42
-rw-r--r--src/tools/clippy/tests/ui/range_plus_minus_one.stderr60
-rw-r--r--src/tools/clippy/tests/ui/rc_buffer.fixed28
-rw-r--r--src/tools/clippy/tests/ui/rc_buffer.rs28
-rw-r--r--src/tools/clippy/tests/ui/rc_buffer.stderr52
-rw-r--r--src/tools/clippy/tests/ui/rc_buffer_arc.fixed27
-rw-r--r--src/tools/clippy/tests/ui/rc_buffer_arc.rs27
-rw-r--r--src/tools/clippy/tests/ui/rc_buffer_arc.stderr52
-rw-r--r--src/tools/clippy/tests/ui/rc_buffer_redefined_string.rs12
-rw-r--r--src/tools/clippy/tests/ui/rc_clone_in_vec_init/arc.rs68
-rw-r--r--src/tools/clippy/tests/ui/rc_clone_in_vec_init/arc.stderr109
-rw-r--r--src/tools/clippy/tests/ui/rc_clone_in_vec_init/rc.rs69
-rw-r--r--src/tools/clippy/tests/ui/rc_clone_in_vec_init/rc.stderr109
-rw-r--r--src/tools/clippy/tests/ui/rc_clone_in_vec_init/weak.rs83
-rw-r--r--src/tools/clippy/tests/ui/rc_clone_in_vec_init/weak.stderr201
-rw-r--r--src/tools/clippy/tests/ui/rc_mutex.rs36
-rw-r--r--src/tools/clippy/tests/ui/rc_mutex.stderr35
-rw-r--r--src/tools/clippy/tests/ui/read_zero_byte_vec.rs87
-rw-r--r--src/tools/clippy/tests/ui/read_zero_byte_vec.stderr64
-rw-r--r--src/tools/clippy/tests/ui/recursive_format_impl.rs322
-rw-r--r--src/tools/clippy/tests/ui/recursive_format_impl.stderr82
-rw-r--r--src/tools/clippy/tests/ui/redundant_allocation.rs135
-rw-r--r--src/tools/clippy/tests/ui/redundant_allocation.stderr183
-rw-r--r--src/tools/clippy/tests/ui/redundant_allocation_fixable.fixed75
-rw-r--r--src/tools/clippy/tests/ui/redundant_allocation_fixable.rs75
-rw-r--r--src/tools/clippy/tests/ui/redundant_allocation_fixable.stderr99
-rw-r--r--src/tools/clippy/tests/ui/redundant_clone.fixed241
-rw-r--r--src/tools/clippy/tests/ui/redundant_clone.rs241
-rw-r--r--src/tools/clippy/tests/ui/redundant_clone.stderr183
-rw-r--r--src/tools/clippy/tests/ui/redundant_closure_call_early.rs20
-rw-r--r--src/tools/clippy/tests/ui/redundant_closure_call_early.stderr16
-rw-r--r--src/tools/clippy/tests/ui/redundant_closure_call_fixable.fixed8
-rw-r--r--src/tools/clippy/tests/ui/redundant_closure_call_fixable.rs8
-rw-r--r--src/tools/clippy/tests/ui/redundant_closure_call_fixable.stderr10
-rw-r--r--src/tools/clippy/tests/ui/redundant_closure_call_late.rs40
-rw-r--r--src/tools/clippy/tests/ui/redundant_closure_call_late.stderr22
-rw-r--r--src/tools/clippy/tests/ui/redundant_else.rs154
-rw-r--r--src/tools/clippy/tests/ui/redundant_else.stderr80
-rw-r--r--src/tools/clippy/tests/ui/redundant_field_names.fixed71
-rw-r--r--src/tools/clippy/tests/ui/redundant_field_names.rs71
-rw-r--r--src/tools/clippy/tests/ui/redundant_field_names.stderr46
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.fixed58
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.rs58
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.stderr171
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.fixed73
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.rs91
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.stderr130
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_option.fixed88
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_option.rs103
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_option.stderr146
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_poll.fixed76
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_poll.rs91
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_poll.stderr128
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_result.fixed110
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_result.rs128
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr154
-rw-r--r--src/tools/clippy/tests/ui/redundant_pub_crate.fixed117
-rw-r--r--src/tools/clippy/tests/ui/redundant_pub_crate.rs117
-rw-r--r--src/tools/clippy/tests/ui/redundant_pub_crate.stderr132
-rw-r--r--src/tools/clippy/tests/ui/redundant_slicing.fixed46
-rw-r--r--src/tools/clippy/tests/ui/redundant_slicing.rs46
-rw-r--r--src/tools/clippy/tests/ui/redundant_slicing.stderr22
-rw-r--r--src/tools/clippy/tests/ui/redundant_static_lifetimes.fixed56
-rw-r--r--src/tools/clippy/tests/ui/redundant_static_lifetimes.rs56
-rw-r--r--src/tools/clippy/tests/ui/redundant_static_lifetimes.stderr100
-rw-r--r--src/tools/clippy/tests/ui/redundant_static_lifetimes_multiple.rs13
-rw-r--r--src/tools/clippy/tests/ui/redundant_static_lifetimes_multiple.stderr64
-rw-r--r--src/tools/clippy/tests/ui/ref_binding_to_reference.rs85
-rw-r--r--src/tools/clippy/tests/ui/ref_binding_to_reference.stderr88
-rw-r--r--src/tools/clippy/tests/ui/ref_option_ref.rs47
-rw-r--r--src/tools/clippy/tests/ui/ref_option_ref.stderr70
-rw-r--r--src/tools/clippy/tests/ui/regex.rs82
-rw-r--r--src/tools/clippy/tests/ui/regex.stderr171
-rw-r--r--src/tools/clippy/tests/ui/rename.fixed72
-rw-r--r--src/tools/clippy/tests/ui/rename.rs72
-rw-r--r--src/tools/clippy/tests/ui/rename.stderr214
-rw-r--r--src/tools/clippy/tests/ui/renamed_builtin_attr.fixed4
-rw-r--r--src/tools/clippy/tests/ui/renamed_builtin_attr.rs4
-rw-r--r--src/tools/clippy/tests/ui/renamed_builtin_attr.stderr8
-rw-r--r--src/tools/clippy/tests/ui/repeat_once.fixed16
-rw-r--r--src/tools/clippy/tests/ui/repeat_once.rs16
-rw-r--r--src/tools/clippy/tests/ui/repeat_once.stderr40
-rw-r--r--src/tools/clippy/tests/ui/repl_uninit.rs41
-rw-r--r--src/tools/clippy/tests/ui/repl_uninit.stderr30
-rw-r--r--src/tools/clippy/tests/ui/rest_pat_in_fully_bound_structs.rs57
-rw-r--r--src/tools/clippy/tests/ui/rest_pat_in_fully_bound_structs.stderr27
-rw-r--r--src/tools/clippy/tests/ui/result_map_or_into_option.fixed19
-rw-r--r--src/tools/clippy/tests/ui/result_map_or_into_option.rs19
-rw-r--r--src/tools/clippy/tests/ui/result_map_or_into_option.stderr10
-rw-r--r--src/tools/clippy/tests/ui/result_map_unit_fn_fixable.fixed82
-rw-r--r--src/tools/clippy/tests/ui/result_map_unit_fn_fixable.rs82
-rw-r--r--src/tools/clippy/tests/ui/result_map_unit_fn_fixable.stderr148
-rw-r--r--src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.rs46
-rw-r--r--src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.stderr58
-rw-r--r--src/tools/clippy/tests/ui/result_unit_error.rs56
-rw-r--r--src/tools/clippy/tests/ui/result_unit_error.stderr43
-rw-r--r--src/tools/clippy/tests/ui/return_self_not_must_use.rs58
-rw-r--r--src/tools/clippy/tests/ui/return_self_not_must_use.stderr31
-rw-r--r--src/tools/clippy/tests/ui/reversed_empty_ranges_fixable.fixed29
-rw-r--r--src/tools/clippy/tests/ui/reversed_empty_ranges_fixable.rs29
-rw-r--r--src/tools/clippy/tests/ui/reversed_empty_ranges_fixable.stderr47
-rw-r--r--src/tools/clippy/tests/ui/reversed_empty_ranges_loops_fixable.fixed57
-rw-r--r--src/tools/clippy/tests/ui/reversed_empty_ranges_loops_fixable.rs57
-rw-r--r--src/tools/clippy/tests/ui/reversed_empty_ranges_loops_fixable.stderr69
-rw-r--r--src/tools/clippy/tests/ui/reversed_empty_ranges_loops_unfixable.rs11
-rw-r--r--src/tools/clippy/tests/ui/reversed_empty_ranges_loops_unfixable.stderr16
-rw-r--r--src/tools/clippy/tests/ui/reversed_empty_ranges_unfixable.rs15
-rw-r--r--src/tools/clippy/tests/ui/reversed_empty_ranges_unfixable.stderr22
-rw-r--r--src/tools/clippy/tests/ui/same_functions_in_if_condition.rs109
-rw-r--r--src/tools/clippy/tests/ui/same_functions_in_if_condition.stderr75
-rw-r--r--src/tools/clippy/tests/ui/same_item_push.rs158
-rw-r--r--src/tools/clippy/tests/ui/same_item_push.stderr43
-rw-r--r--src/tools/clippy/tests/ui/same_name_method.rs127
-rw-r--r--src/tools/clippy/tests/ui/same_name_method.stderr64
-rw-r--r--src/tools/clippy/tests/ui/search_is_some.rs79
-rw-r--r--src/tools/clippy/tests/ui/search_is_some.stderr87
-rw-r--r--src/tools/clippy/tests/ui/search_is_some_fixable_none.fixed216
-rw-r--r--src/tools/clippy/tests/ui/search_is_some_fixable_none.rs222
-rw-r--r--src/tools/clippy/tests/ui/search_is_some_fixable_none.stderr285
-rw-r--r--src/tools/clippy/tests/ui/search_is_some_fixable_some.fixed248
-rw-r--r--src/tools/clippy/tests/ui/search_is_some_fixable_some.rs251
-rw-r--r--src/tools/clippy/tests/ui/search_is_some_fixable_some.stderr292
-rw-r--r--src/tools/clippy/tests/ui/self_assignment.rs67
-rw-r--r--src/tools/clippy/tests/ui/self_assignment.stderr70
-rw-r--r--src/tools/clippy/tests/ui/self_named_constructors.rs59
-rw-r--r--src/tools/clippy/tests/ui/self_named_constructors.stderr12
-rw-r--r--src/tools/clippy/tests/ui/semicolon_if_nothing_returned.rs122
-rw-r--r--src/tools/clippy/tests/ui/semicolon_if_nothing_returned.stderr34
-rw-r--r--src/tools/clippy/tests/ui/serde.rs47
-rw-r--r--src/tools/clippy/tests/ui/serde.stderr15
-rw-r--r--src/tools/clippy/tests/ui/shadow.rs98
-rw-r--r--src/tools/clippy/tests/ui/shadow.stderr281
-rw-r--r--src/tools/clippy/tests/ui/short_circuit_statement.fixed18
-rw-r--r--src/tools/clippy/tests/ui/short_circuit_statement.rs18
-rw-r--r--src/tools/clippy/tests/ui/short_circuit_statement.stderr22
-rw-r--r--src/tools/clippy/tests/ui/should_impl_trait/corner_cases.rs84
-rw-r--r--src/tools/clippy/tests/ui/should_impl_trait/method_list_1.rs87
-rw-r--r--src/tools/clippy/tests/ui/should_impl_trait/method_list_1.stderr143
-rw-r--r--src/tools/clippy/tests/ui/should_impl_trait/method_list_2.rs88
-rw-r--r--src/tools/clippy/tests/ui/should_impl_trait/method_list_2.stderr153
-rw-r--r--src/tools/clippy/tests/ui/significant_drop_in_scrutinee.rs630
-rw-r--r--src/tools/clippy/tests/ui/significant_drop_in_scrutinee.stderr497
-rw-r--r--src/tools/clippy/tests/ui/similar_names.rs121
-rw-r--r--src/tools/clippy/tests/ui/similar_names.stderr87
-rw-r--r--src/tools/clippy/tests/ui/single_char_add_str.fixed45
-rw-r--r--src/tools/clippy/tests/ui/single_char_add_str.rs45
-rw-r--r--src/tools/clippy/tests/ui/single_char_add_str.stderr94
-rw-r--r--src/tools/clippy/tests/ui/single_char_lifetime_names.rs44
-rw-r--r--src/tools/clippy/tests/ui/single_char_lifetime_names.stderr43
-rw-r--r--src/tools/clippy/tests/ui/single_char_pattern.fixed67
-rw-r--r--src/tools/clippy/tests/ui/single_char_pattern.rs67
-rw-r--r--src/tools/clippy/tests/ui/single_char_pattern.stderr238
-rw-r--r--src/tools/clippy/tests/ui/single_component_path_imports.fixed33
-rw-r--r--src/tools/clippy/tests/ui/single_component_path_imports.rs33
-rw-r--r--src/tools/clippy/tests/ui/single_component_path_imports.stderr16
-rw-r--r--src/tools/clippy/tests/ui/single_component_path_imports_macro.rs20
-rw-r--r--src/tools/clippy/tests/ui/single_component_path_imports_nested_first.rs16
-rw-r--r--src/tools/clippy/tests/ui/single_component_path_imports_nested_first.stderr25
-rw-r--r--src/tools/clippy/tests/ui/single_component_path_imports_self_after.rs15
-rw-r--r--src/tools/clippy/tests/ui/single_component_path_imports_self_before.rs16
-rw-r--r--src/tools/clippy/tests/ui/single_element_loop.fixed36
-rw-r--r--src/tools/clippy/tests/ui/single_element_loop.rs30
-rw-r--r--src/tools/clippy/tests/ui/single_element_loop.stderr99
-rw-r--r--src/tools/clippy/tests/ui/single_match.rs245
-rw-r--r--src/tools/clippy/tests/ui/single_match.stderr159
-rw-r--r--src/tools/clippy/tests/ui/single_match_else.rs119
-rw-r--r--src/tools/clippy/tests/ui/single_match_else.stderr104
-rw-r--r--src/tools/clippy/tests/ui/size_of_in_element_count/expressions.rs37
-rw-r--r--src/tools/clippy/tests/ui/size_of_in_element_count/expressions.stderr35
-rw-r--r--src/tools/clippy/tests/ui/size_of_in_element_count/functions.rs46
-rw-r--r--src/tools/clippy/tests/ui/size_of_in_element_count/functions.stderr171
-rw-r--r--src/tools/clippy/tests/ui/skip_while_next.rs29
-rw-r--r--src/tools/clippy/tests/ui/skip_while_next.stderr23
-rw-r--r--src/tools/clippy/tests/ui/slow_vector_initialization.rs69
-rw-r--r--src/tools/clippy/tests/ui/slow_vector_initialization.stderr76
-rw-r--r--src/tools/clippy/tests/ui/stable_sort_primitive.fixed32
-rw-r--r--src/tools/clippy/tests/ui/stable_sort_primitive.rs32
-rw-r--r--src/tools/clippy/tests/ui/stable_sort_primitive.stderr59
-rw-r--r--src/tools/clippy/tests/ui/starts_ends_with.fixed54
-rw-r--r--src/tools/clippy/tests/ui/starts_ends_with.rs54
-rw-r--r--src/tools/clippy/tests/ui/starts_ends_with.stderr102
-rw-r--r--src/tools/clippy/tests/ui/std_instead_of_core.rs45
-rw-r--r--src/tools/clippy/tests/ui/std_instead_of_core.stderr93
-rw-r--r--src/tools/clippy/tests/ui/str_to_string.rs7
-rw-r--r--src/tools/clippy/tests/ui/str_to_string.stderr19
-rw-r--r--src/tools/clippy/tests/ui/string_add.rs26
-rw-r--r--src/tools/clippy/tests/ui/string_add.stderr30
-rw-r--r--src/tools/clippy/tests/ui/string_add_assign.fixed21
-rw-r--r--src/tools/clippy/tests/ui/string_add_assign.rs21
-rw-r--r--src/tools/clippy/tests/ui/string_add_assign.stderr24
-rw-r--r--src/tools/clippy/tests/ui/string_extend.fixed32
-rw-r--r--src/tools/clippy/tests/ui/string_extend.rs32
-rw-r--r--src/tools/clippy/tests/ui/string_extend.stderr22
-rw-r--r--src/tools/clippy/tests/ui/string_from_utf8_as_bytes.fixed6
-rw-r--r--src/tools/clippy/tests/ui/string_from_utf8_as_bytes.rs6
-rw-r--r--src/tools/clippy/tests/ui/string_from_utf8_as_bytes.stderr10
-rw-r--r--src/tools/clippy/tests/ui/string_lit_as_bytes.fixed30
-rw-r--r--src/tools/clippy/tests/ui/string_lit_as_bytes.rs30
-rw-r--r--src/tools/clippy/tests/ui/string_lit_as_bytes.stderr40
-rw-r--r--src/tools/clippy/tests/ui/string_slice.rs10
-rw-r--r--src/tools/clippy/tests/ui/string_slice.stderr22
-rw-r--r--src/tools/clippy/tests/ui/string_to_string.rs7
-rw-r--r--src/tools/clippy/tests/ui/string_to_string.stderr11
-rw-r--r--src/tools/clippy/tests/ui/strlen_on_c_strings.fixed34
-rw-r--r--src/tools/clippy/tests/ui/strlen_on_c_strings.rs34
-rw-r--r--src/tools/clippy/tests/ui/strlen_on_c_strings.stderr46
-rw-r--r--src/tools/clippy/tests/ui/struct_excessive_bools.rs44
-rw-r--r--src/tools/clippy/tests/ui/struct_excessive_bools.stderr29
-rw-r--r--src/tools/clippy/tests/ui/suspicious_arithmetic_impl.rs170
-rw-r--r--src/tools/clippy/tests/ui/suspicious_arithmetic_impl.stderr60
-rw-r--r--src/tools/clippy/tests/ui/suspicious_else_formatting.rs115
-rw-r--r--src/tools/clippy/tests/ui/suspicious_else_formatting.stderr90
-rw-r--r--src/tools/clippy/tests/ui/suspicious_map.rs32
-rw-r--r--src/tools/clippy/tests/ui/suspicious_map.stderr19
-rw-r--r--src/tools/clippy/tests/ui/suspicious_operation_groupings.fixed209
-rw-r--r--src/tools/clippy/tests/ui/suspicious_operation_groupings.rs209
-rw-r--r--src/tools/clippy/tests/ui/suspicious_operation_groupings.stderr160
-rw-r--r--src/tools/clippy/tests/ui/suspicious_splitn.rs21
-rw-r--r--src/tools/clippy/tests/ui/suspicious_splitn.stderr75
-rw-r--r--src/tools/clippy/tests/ui/suspicious_unary_op_formatting.rs23
-rw-r--r--src/tools/clippy/tests/ui/suspicious_unary_op_formatting.stderr35
-rw-r--r--src/tools/clippy/tests/ui/swap.fixed157
-rw-r--r--src/tools/clippy/tests/ui/swap.rs181
-rw-r--r--src/tools/clippy/tests/ui/swap.stderr122
-rw-r--r--src/tools/clippy/tests/ui/swap_ptr_to_ref.fixed24
-rw-r--r--src/tools/clippy/tests/ui/swap_ptr_to_ref.rs24
-rw-r--r--src/tools/clippy/tests/ui/swap_ptr_to_ref.stderr28
-rw-r--r--src/tools/clippy/tests/ui/swap_ptr_to_ref_unfixable.rs18
-rw-r--r--src/tools/clippy/tests/ui/swap_ptr_to_ref_unfixable.stderr22
-rw-r--r--src/tools/clippy/tests/ui/tabs_in_doc_comments.fixed22
-rw-r--r--src/tools/clippy/tests/ui/tabs_in_doc_comments.rs22
-rw-r--r--src/tools/clippy/tests/ui/tabs_in_doc_comments.stderr52
-rw-r--r--src/tools/clippy/tests/ui/temporary_assignment.rs71
-rw-r--r--src/tools/clippy/tests/ui/temporary_assignment.stderr32
-rw-r--r--src/tools/clippy/tests/ui/to_digit_is_some.fixed11
-rw-r--r--src/tools/clippy/tests/ui/to_digit_is_some.rs11
-rw-r--r--src/tools/clippy/tests/ui/to_digit_is_some.stderr16
-rw-r--r--src/tools/clippy/tests/ui/toplevel_ref_arg.fixed50
-rw-r--r--src/tools/clippy/tests/ui/toplevel_ref_arg.rs50
-rw-r--r--src/tools/clippy/tests/ui/toplevel_ref_arg.stderr45
-rw-r--r--src/tools/clippy/tests/ui/toplevel_ref_arg_non_rustfix.rs33
-rw-r--r--src/tools/clippy/tests/ui/toplevel_ref_arg_non_rustfix.stderr21
-rw-r--r--src/tools/clippy/tests/ui/trailing_empty_array.rs185
-rw-r--r--src/tools/clippy/tests/ui/trailing_empty_array.stderr120
-rw-r--r--src/tools/clippy/tests/ui/trailing_zeros.rs10
-rw-r--r--src/tools/clippy/tests/ui/trailing_zeros.stderr16
-rw-r--r--src/tools/clippy/tests/ui/trait_duplication_in_bounds.rs212
-rw-r--r--src/tools/clippy/tests/ui/trait_duplication_in_bounds.stderr167
-rw-r--r--src/tools/clippy/tests/ui/transmute.rs162
-rw-r--r--src/tools/clippy/tests/ui/transmute.stderr244
-rw-r--r--src/tools/clippy/tests/ui/transmute_32bit.rs14
-rw-r--r--src/tools/clippy/tests/ui/transmute_32bit.stderr28
-rw-r--r--src/tools/clippy/tests/ui/transmute_64bit.rs10
-rw-r--r--src/tools/clippy/tests/ui/transmute_64bit.stderr16
-rw-r--r--src/tools/clippy/tests/ui/transmute_collection.rs50
-rw-r--r--src/tools/clippy/tests/ui/transmute_collection.stderr112
-rw-r--r--src/tools/clippy/tests/ui/transmute_float_to_int.rs25
-rw-r--r--src/tools/clippy/tests/ui/transmute_float_to_int.stderr40
-rw-r--r--src/tools/clippy/tests/ui/transmute_ptr_to_ptr.rs63
-rw-r--r--src/tools/clippy/tests/ui/transmute_ptr_to_ptr.stderr40
-rw-r--r--src/tools/clippy/tests/ui/transmute_ptr_to_ref.fixed78
-rw-r--r--src/tools/clippy/tests/ui/transmute_ptr_to_ref.rs78
-rw-r--r--src/tools/clippy/tests/ui/transmute_ptr_to_ref.stderr136
-rw-r--r--src/tools/clippy/tests/ui/transmute_undefined_repr.rs144
-rw-r--r--src/tools/clippy/tests/ui/transmute_undefined_repr.stderr80
-rw-r--r--src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.fixed77
-rw-r--r--src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.rs77
-rw-r--r--src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.stderr56
-rw-r--r--src/tools/clippy/tests/ui/transmuting_null.rs30
-rw-r--r--src/tools/clippy/tests/ui/transmuting_null.stderr22
-rw-r--r--src/tools/clippy/tests/ui/trim_split_whitespace.fixed91
-rw-r--r--src/tools/clippy/tests/ui/trim_split_whitespace.rs91
-rw-r--r--src/tools/clippy/tests/ui/trim_split_whitespace.stderr52
-rw-r--r--src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.rs168
-rw-r--r--src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.stderr116
-rw-r--r--src/tools/clippy/tests/ui/try_err.fixed170
-rw-r--r--src/tools/clippy/tests/ui/try_err.rs170
-rw-r--r--src/tools/clippy/tests/ui/try_err.stderr84
-rw-r--r--src/tools/clippy/tests/ui/ty_fn_sig.rs14
-rw-r--r--src/tools/clippy/tests/ui/type_complexity.rs69
-rw-r--r--src/tools/clippy/tests/ui/type_complexity.stderr94
-rw-r--r--src/tools/clippy/tests/ui/type_repetition_in_bounds.rs97
-rw-r--r--src/tools/clippy/tests/ui/type_repetition_in_bounds.stderr39
-rw-r--r--src/tools/clippy/tests/ui/types.fixed15
-rw-r--r--src/tools/clippy/tests/ui/types.rs15
-rw-r--r--src/tools/clippy/tests/ui/types.stderr10
-rw-r--r--src/tools/clippy/tests/ui/undocumented_unsafe_blocks.rs493
-rw-r--r--src/tools/clippy/tests/ui/undocumented_unsafe_blocks.stderr267
-rw-r--r--src/tools/clippy/tests/ui/undropped_manually_drops.rs26
-rw-r--r--src/tools/clippy/tests/ui/undropped_manually_drops.stderr19
-rw-r--r--src/tools/clippy/tests/ui/unicode.fixed36
-rw-r--r--src/tools/clippy/tests/ui/unicode.rs36
-rw-r--r--src/tools/clippy/tests/ui/unicode.stderr50
-rw-r--r--src/tools/clippy/tests/ui/uninit.rs26
-rw-r--r--src/tools/clippy/tests/ui/uninit.stderr22
-rw-r--r--src/tools/clippy/tests/ui/uninit_vec.rs94
-rw-r--r--src/tools/clippy/tests/ui/uninit_vec.stderr105
-rw-r--r--src/tools/clippy/tests/ui/unit_arg.rs133
-rw-r--r--src/tools/clippy/tests/ui/unit_arg.stderr187
-rw-r--r--src/tools/clippy/tests/ui/unit_arg_empty_blocks.fixed30
-rw-r--r--src/tools/clippy/tests/ui/unit_arg_empty_blocks.rs27
-rw-r--r--src/tools/clippy/tests/ui/unit_arg_empty_blocks.stderr45
-rw-r--r--src/tools/clippy/tests/ui/unit_cmp.rs61
-rw-r--r--src/tools/clippy/tests/ui/unit_cmp.stderr74
-rw-r--r--src/tools/clippy/tests/ui/unit_hash.rs28
-rw-r--r--src/tools/clippy/tests/ui/unit_hash.stderr27
-rw-r--r--src/tools/clippy/tests/ui/unit_return_expecting_ord.rs36
-rw-r--r--src/tools/clippy/tests/ui/unit_return_expecting_ord.stderr39
-rw-r--r--src/tools/clippy/tests/ui/unknown_attribute.rs3
-rw-r--r--src/tools/clippy/tests/ui/unknown_attribute.stderr8
-rw-r--r--src/tools/clippy/tests/ui/unknown_clippy_lints.fixed18
-rw-r--r--src/tools/clippy/tests/ui/unknown_clippy_lints.rs18
-rw-r--r--src/tools/clippy/tests/ui/unknown_clippy_lints.stderr52
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_cast.fixed91
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_cast.rs91
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_cast.stderr154
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_clone.rs110
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_clone.stderr106
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_filter_map.rs150
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_filter_map.stderr38
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_find_map.rs23
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_find_map.stderr38
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_fold.fixed52
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_fold.rs52
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_fold.stderr40
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_iter_cloned.fixed142
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_iter_cloned.rs142
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_iter_cloned.stderr35
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_join.fixed35
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_join.rs37
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_join.stderr20
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_lazy_eval.fixed132
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_lazy_eval.rs132
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_lazy_eval.stderr283
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_lazy_eval_unfixable.rs22
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_lazy_eval_unfixable.stderr28
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_operation.fixed79
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_operation.rs83
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_operation.stderr128
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_owned_empty_strings.fixed22
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_owned_empty_strings.rs22
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_owned_empty_strings.stderr16
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_self_imports.fixed10
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_self_imports.rs10
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_self_imports.stderr23
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_sort_by.fixed103
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_sort_by.rs103
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_sort_by.stderr76
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_to_owned.fixed331
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_to_owned.rs331
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_to_owned.stderr513
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_wraps.rs144
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_wraps.stderr156
-rw-r--r--src/tools/clippy/tests/ui/unneeded_field_pattern.rs22
-rw-r--r--src/tools/clippy/tests/ui/unneeded_field_pattern.stderr19
-rw-r--r--src/tools/clippy/tests/ui/unneeded_wildcard_pattern.fixed45
-rw-r--r--src/tools/clippy/tests/ui/unneeded_wildcard_pattern.rs45
-rw-r--r--src/tools/clippy/tests/ui/unneeded_wildcard_pattern.stderr92
-rw-r--r--src/tools/clippy/tests/ui/unnested_or_patterns.fixed35
-rw-r--r--src/tools/clippy/tests/ui/unnested_or_patterns.rs35
-rw-r--r--src/tools/clippy/tests/ui/unnested_or_patterns.stderr179
-rw-r--r--src/tools/clippy/tests/ui/unnested_or_patterns2.fixed17
-rw-r--r--src/tools/clippy/tests/ui/unnested_or_patterns2.rs17
-rw-r--r--src/tools/clippy/tests/ui/unnested_or_patterns2.stderr91
-rw-r--r--src/tools/clippy/tests/ui/unreadable_literal.fixed46
-rw-r--r--src/tools/clippy/tests/ui/unreadable_literal.rs46
-rw-r--r--src/tools/clippy/tests/ui/unreadable_literal.stderr72
-rw-r--r--src/tools/clippy/tests/ui/unsafe_derive_deserialize.rs70
-rw-r--r--src/tools/clippy/tests/ui/unsafe_derive_deserialize.stderr39
-rw-r--r--src/tools/clippy/tests/ui/unsafe_removed_from_name.rs27
-rw-r--r--src/tools/clippy/tests/ui/unsafe_removed_from_name.stderr22
-rw-r--r--src/tools/clippy/tests/ui/unseparated_prefix_literals.fixed42
-rw-r--r--src/tools/clippy/tests/ui/unseparated_prefix_literals.rs42
-rw-r--r--src/tools/clippy/tests/ui/unseparated_prefix_literals.stderr63
-rw-r--r--src/tools/clippy/tests/ui/unused_async.rs48
-rw-r--r--src/tools/clippy/tests/ui/unused_async.stderr23
-rw-r--r--src/tools/clippy/tests/ui/unused_io_amount.rs117
-rw-r--r--src/tools/clippy/tests/ui/unused_io_amount.stderr131
-rw-r--r--src/tools/clippy/tests/ui/unused_rounding.fixed9
-rw-r--r--src/tools/clippy/tests/ui/unused_rounding.rs9
-rw-r--r--src/tools/clippy/tests/ui/unused_rounding.stderr22
-rw-r--r--src/tools/clippy/tests/ui/unused_self.rs149
-rw-r--r--src/tools/clippy/tests/ui/unused_self.stderr75
-rw-r--r--src/tools/clippy/tests/ui/unused_unit.fixed89
-rw-r--r--src/tools/clippy/tests/ui/unused_unit.rs89
-rw-r--r--src/tools/clippy/tests/ui/unused_unit.stderr122
-rw-r--r--src/tools/clippy/tests/ui/unwrap.rs16
-rw-r--r--src/tools/clippy/tests/ui/unwrap.stderr19
-rw-r--r--src/tools/clippy/tests/ui/unwrap_in_result.rs44
-rw-r--r--src/tools/clippy/tests/ui/unwrap_in_result.stderr41
-rw-r--r--src/tools/clippy/tests/ui/unwrap_or.rs9
-rw-r--r--src/tools/clippy/tests/ui/unwrap_or.stderr16
-rw-r--r--src/tools/clippy/tests/ui/unwrap_or_else_default.fixed74
-rw-r--r--src/tools/clippy/tests/ui/unwrap_or_else_default.rs74
-rw-r--r--src/tools/clippy/tests/ui/unwrap_or_else_default.stderr34
-rwxr-xr-xsrc/tools/clippy/tests/ui/update-all-references.sh3
-rw-r--r--src/tools/clippy/tests/ui/upper_case_acronyms.rs41
-rw-r--r--src/tools/clippy/tests/ui/upper_case_acronyms.stderr58
-rw-r--r--src/tools/clippy/tests/ui/use_self.fixed610
-rw-r--r--src/tools/clippy/tests/ui/use_self.rs610
-rw-r--r--src/tools/clippy/tests/ui/use_self.stderr250
-rw-r--r--src/tools/clippy/tests/ui/use_self_trait.fixed115
-rw-r--r--src/tools/clippy/tests/ui/use_self_trait.rs115
-rw-r--r--src/tools/clippy/tests/ui/use_self_trait.stderr88
-rw-r--r--src/tools/clippy/tests/ui/used_underscore_binding.rs124
-rw-r--r--src/tools/clippy/tests/ui/used_underscore_binding.stderr40
-rw-r--r--src/tools/clippy/tests/ui/useful_asref.rs13
-rw-r--r--src/tools/clippy/tests/ui/useless_asref.fixed136
-rw-r--r--src/tools/clippy/tests/ui/useless_asref.rs136
-rw-r--r--src/tools/clippy/tests/ui/useless_asref.stderr74
-rw-r--r--src/tools/clippy/tests/ui/useless_attribute.fixed75
-rw-r--r--src/tools/clippy/tests/ui/useless_attribute.rs75
-rw-r--r--src/tools/clippy/tests/ui/useless_attribute.stderr22
-rw-r--r--src/tools/clippy/tests/ui/useless_conversion.fixed92
-rw-r--r--src/tools/clippy/tests/ui/useless_conversion.rs92
-rw-r--r--src/tools/clippy/tests/ui/useless_conversion.stderr92
-rw-r--r--src/tools/clippy/tests/ui/useless_conversion_try.rs40
-rw-r--r--src/tools/clippy/tests/ui/useless_conversion_try.stderr79
-rw-r--r--src/tools/clippy/tests/ui/vec.fixed78
-rw-r--r--src/tools/clippy/tests/ui/vec.rs78
-rw-r--r--src/tools/clippy/tests/ui/vec.stderr70
-rw-r--r--src/tools/clippy/tests/ui/vec_box_sized.fixed54
-rw-r--r--src/tools/clippy/tests/ui/vec_box_sized.rs54
-rw-r--r--src/tools/clippy/tests/ui/vec_box_sized.stderr40
-rw-r--r--src/tools/clippy/tests/ui/vec_init_then_push.rs112
-rw-r--r--src/tools/clippy/tests/ui/vec_init_then_push.stderr73
-rw-r--r--src/tools/clippy/tests/ui/vec_resize_to_zero.rs15
-rw-r--r--src/tools/clippy/tests/ui/vec_resize_to_zero.stderr13
-rw-r--r--src/tools/clippy/tests/ui/verbose_file_reads.rs28
-rw-r--r--src/tools/clippy/tests/ui/verbose_file_reads.stderr19
-rw-r--r--src/tools/clippy/tests/ui/vtable_address_comparisons.rs44
-rw-r--r--src/tools/clippy/tests/ui/vtable_address_comparisons.stderr83
-rw-r--r--src/tools/clippy/tests/ui/while_let_loop.rs145
-rw-r--r--src/tools/clippy/tests/ui/while_let_loop.stderr63
-rw-r--r--src/tools/clippy/tests/ui/while_let_on_iterator.fixed453
-rw-r--r--src/tools/clippy/tests/ui/while_let_on_iterator.rs453
-rw-r--r--src/tools/clippy/tests/ui/while_let_on_iterator.stderr160
-rw-r--r--src/tools/clippy/tests/ui/wild_in_or_pats.rs36
-rw-r--r--src/tools/clippy/tests/ui/wild_in_or_pats.stderr35
-rw-r--r--src/tools/clippy/tests/ui/wildcard_enum_match_arm.fixed104
-rw-r--r--src/tools/clippy/tests/ui/wildcard_enum_match_arm.rs104
-rw-r--r--src/tools/clippy/tests/ui/wildcard_enum_match_arm.stderr44
-rw-r--r--src/tools/clippy/tests/ui/wildcard_imports.fixed245
-rw-r--r--src/tools/clippy/tests/ui/wildcard_imports.rs246
-rw-r--r--src/tools/clippy/tests/ui/wildcard_imports.stderr132
-rw-r--r--src/tools/clippy/tests/ui/write_literal.rs43
-rw-r--r--src/tools/clippy/tests/ui/write_literal.stderr135
-rw-r--r--src/tools/clippy/tests/ui/write_literal_2.rs27
-rw-r--r--src/tools/clippy/tests/ui/write_literal_2.stderr112
-rw-r--r--src/tools/clippy/tests/ui/write_with_newline.rs59
-rw-r--r--src/tools/clippy/tests/ui/write_with_newline.stderr133
-rw-r--r--src/tools/clippy/tests/ui/writeln_empty_string.fixed20
-rw-r--r--src/tools/clippy/tests/ui/writeln_empty_string.rs20
-rw-r--r--src/tools/clippy/tests/ui/writeln_empty_string.stderr16
-rw-r--r--src/tools/clippy/tests/ui/wrong_self_convention.rs206
-rw-r--r--src/tools/clippy/tests/ui/wrong_self_convention.stderr195
-rw-r--r--src/tools/clippy/tests/ui/wrong_self_convention2.rs116
-rw-r--r--src/tools/clippy/tests/ui/wrong_self_convention2.stderr19
-rw-r--r--src/tools/clippy/tests/ui/wrong_self_conventions_mut.rs29
-rw-r--r--src/tools/clippy/tests/ui/wrong_self_conventions_mut.stderr19
-rw-r--r--src/tools/clippy/tests/ui/zero_div_zero.rs13
-rw-r--r--src/tools/clippy/tests/ui/zero_div_zero.stderr35
-rw-r--r--src/tools/clippy/tests/ui/zero_offset.rs19
-rw-r--r--src/tools/clippy/tests/ui/zero_offset.stderr52
-rw-r--r--src/tools/clippy/tests/ui/zero_ptr.fixed14
-rw-r--r--src/tools/clippy/tests/ui/zero_ptr.rs14
-rw-r--r--src/tools/clippy/tests/ui/zero_ptr.stderr34
-rw-r--r--src/tools/clippy/tests/ui/zero_sized_btreemap_values.rs68
-rw-r--r--src/tools/clippy/tests/ui/zero_sized_btreemap_values.stderr107
-rw-r--r--src/tools/clippy/tests/ui/zero_sized_hashmap_values.rs68
-rw-r--r--src/tools/clippy/tests/ui/zero_sized_hashmap_values.stderr107
-rw-r--r--src/tools/clippy/tests/versioncheck.rs89
-rw-r--r--src/tools/clippy/tests/workspace.rs107
-rw-r--r--src/tools/clippy/tests/workspace_test/Cargo.toml7
-rw-r--r--src/tools/clippy/tests/workspace_test/build.rs7
-rw-r--r--src/tools/clippy/tests/workspace_test/path_dep/Cargo.toml3
-rw-r--r--src/tools/clippy/tests/workspace_test/path_dep/src/lib.rs6
-rw-r--r--src/tools/clippy/tests/workspace_test/src/main.rs3
-rw-r--r--src/tools/clippy/tests/workspace_test/subcrate/Cargo.toml6
-rw-r--r--src/tools/clippy/tests/workspace_test/subcrate/src/lib.rs1
-rw-r--r--src/tools/clippy/triagebot.toml12
-rwxr-xr-xsrc/tools/clippy/util/etc/pre-commit.sh22
-rw-r--r--src/tools/clippy/util/etc/vscode-tasks.json57
-rwxr-xr-xsrc/tools/clippy/util/fetch_prs_between.sh27
-rwxr-xr-xsrc/tools/clippy/util/versions.py44
-rw-r--r--src/tools/compiletest/Cargo.toml26
-rw-r--r--src/tools/compiletest/src/common.rs473
-rw-r--r--src/tools/compiletest/src/compute_diff.rs157
-rw-r--r--src/tools/compiletest/src/errors.rs179
-rw-r--r--src/tools/compiletest/src/header.rs1060
-rw-r--r--src/tools/compiletest/src/header/tests.rs283
-rw-r--r--src/tools/compiletest/src/json.rs321
-rw-r--r--src/tools/compiletest/src/main.rs1014
-rw-r--r--src/tools/compiletest/src/raise_fd_limit.rs54
-rw-r--r--src/tools/compiletest/src/read2.rs317
-rw-r--r--src/tools/compiletest/src/read2/tests.rs123
-rw-r--r--src/tools/compiletest/src/runtest.rs3948
-rw-r--r--src/tools/compiletest/src/runtest/debugger.rs122
-rw-r--r--src/tools/compiletest/src/runtest/tests.rs50
-rw-r--r--src/tools/compiletest/src/tests.rs78
-rw-r--r--src/tools/compiletest/src/util.rs259
-rw-r--r--src/tools/compiletest/src/util/tests.rs51
-rw-r--r--src/tools/error_index_generator/Cargo.toml14
-rw-r--r--src/tools/error_index_generator/build.rs31
-rw-r--r--src/tools/error_index_generator/main.rs298
-rw-r--r--src/tools/expand-yaml-anchors/Cargo.toml8
-rw-r--r--src/tools/expand-yaml-anchors/src/main.rs205
-rw-r--r--src/tools/html-checker/Cargo.toml12
-rw-r--r--src/tools/html-checker/main.rs125
-rw-r--r--src/tools/jsondocck/Cargo.toml13
-rw-r--r--src/tools/jsondocck/src/cache.rs77
-rw-r--r--src/tools/jsondocck/src/config.rs37
-rw-r--r--src/tools/jsondocck/src/error.rs28
-rw-r--r--src/tools/jsondocck/src/main.rs339
-rw-r--r--src/tools/linkchecker/Cargo.toml12
-rwxr-xr-xsrc/tools/linkchecker/linkcheck.sh116
-rw-r--r--src/tools/linkchecker/main.rs560
-rw-r--r--src/tools/linkchecker/tests/basic_broken/foo.html5
-rw-r--r--src/tools/linkchecker/tests/broken_fragment_local/foo.html5
-rw-r--r--src/tools/linkchecker/tests/broken_fragment_remote/bar.html4
-rw-r--r--src/tools/linkchecker/tests/broken_fragment_remote/inner/foo.html5
-rw-r--r--src/tools/linkchecker/tests/broken_redir/foo.html5
-rw-r--r--src/tools/linkchecker/tests/broken_redir/redir-bad.html11
-rw-r--r--src/tools/linkchecker/tests/checks.rs113
-rw-r--r--src/tools/linkchecker/tests/directory_link/foo.html5
-rw-r--r--src/tools/linkchecker/tests/directory_link/somedir/index.html4
-rw-r--r--src/tools/linkchecker/tests/redirect_loop/foo.html5
-rw-r--r--src/tools/linkchecker/tests/redirect_loop/redir-bad.html11
-rw-r--r--src/tools/linkchecker/tests/valid/inner/bar.html7
-rw-r--r--src/tools/linkchecker/tests/valid/inner/foo.html14
-rw-r--r--src/tools/linkchecker/tests/valid/inner/redir-bad.html12
-rw-r--r--src/tools/linkchecker/tests/valid/inner/redir-target.html5
-rw-r--r--src/tools/linkchecker/tests/valid/inner/redir.html11
-rw-r--r--src/tools/linkchecker/tests/valid/outer.html5
-rw-r--r--src/tools/lint-docs/Cargo.toml12
-rw-r--r--src/tools/lint-docs/src/groups.rs148
-rw-r--r--src/tools/lint-docs/src/lib.rs502
-rw-r--r--src/tools/lint-docs/src/main.rs84
-rw-r--r--src/tools/lld-wrapper/Cargo.toml5
-rw-r--r--src/tools/lld-wrapper/src/main.rs106
-rwxr-xr-xsrc/tools/publish_toolstate.py373
-rw-r--r--src/tools/rust-analyzer/.cargo/config.toml11
-rw-r--r--src/tools/rust-analyzer/.editorconfig19
-rw-r--r--src/tools/rust-analyzer/.git-blame-ignore-revs8
-rw-r--r--src/tools/rust-analyzer/.vscode/extensions.json9
-rw-r--r--src/tools/rust-analyzer/.vscode/launch.json131
-rw-r--r--src/tools/rust-analyzer/.vscode/tasks.json67
-rw-r--r--src/tools/rust-analyzer/Cargo.lock2101
-rw-r--r--src/tools/rust-analyzer/Cargo.toml33
-rw-r--r--src/tools/rust-analyzer/LICENSE-APACHE201
-rw-r--r--src/tools/rust-analyzer/LICENSE-MIT23
-rw-r--r--src/tools/rust-analyzer/PRIVACY.md1
-rw-r--r--src/tools/rust-analyzer/README.md49
-rw-r--r--src/tools/rust-analyzer/assets/logo-square.svg88
-rw-r--r--src/tools/rust-analyzer/assets/logo-wide.svg142
-rw-r--r--src/tools/rust-analyzer/bench_data/glorious_old_parser8562
-rw-r--r--src/tools/rust-analyzer/bench_data/numerous_macro_rules560
-rw-r--r--src/tools/rust-analyzer/crates/base-db/Cargo.toml22
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/change.rs85
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/fixture.rs494
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/input.rs792
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/lib.rs131
-rw-r--r--src/tools/rust-analyzer/crates/cfg/Cargo.toml26
-rw-r--r--src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs145
-rw-r--r--src/tools/rust-analyzer/crates/cfg/src/dnf.rs345
-rw-r--r--src/tools/rust-analyzer/crates/cfg/src/lib.rs202
-rw-r--r--src/tools/rust-analyzer/crates/cfg/src/tests.rs224
-rw-r--r--src/tools/rust-analyzer/crates/flycheck/Cargo.toml22
-rw-r--r--src/tools/rust-analyzer/crates/flycheck/src/lib.rs396
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/Cargo.toml43
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/adt.rs365
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/attr.rs1002
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body.rs471
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs1023
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs571
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs127
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs397
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/builtin_attr.rs654
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs158
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs207
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/data.rs579
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/db.rs243
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs116
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/expr.rs444
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/find_path.rs1134
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/generics.rs522
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/import_map.rs1108
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/intern.rs227
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs464
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs961
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs773
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs754
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs360
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/keys.rs70
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs174
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/lib.rs980
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests.rs354
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs95
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs377
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs1632
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs138
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs154
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs911
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs200
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs130
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres.rs545
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs98
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs2202
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs137
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs161
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs448
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs81
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs933
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/globs.rs338
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs237
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs1187
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/mod_resolution.rs843
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/primitives.rs23
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/path.rs222
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs230
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs95
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/resolver.rs912
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/src.rs85
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/test_db.rs245
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/trace.rs51
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/type_ref.rs486
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/visibility.rs242
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/Cargo.toml34
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs181
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs130
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs249
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs669
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/db.rs509
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/eager.rs266
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs382
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs256
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/lib.rs1000
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs276
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/name.rs433
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs81
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/quote.rs284
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/Cargo.toml44
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs145
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/builder.rs311
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs799
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs358
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs469
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs148
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/db.rs225
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs13
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs701
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs199
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs416
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs508
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs1094
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_util.rs56
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/usefulness.rs811
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs104
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/display.rs1315
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer.rs1088
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs82
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs673
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs1527
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs354
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs295
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs738
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/interner.rs432
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lib.rs525
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lower.rs1778
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs148
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs1186
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs62
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs150
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests.rs578
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs755
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs75
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs176
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs51
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs1338
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs1792
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs485
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs991
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs1650
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs3072
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs3782
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tls.rs133
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/traits.rs187
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/utils.rs408
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/walk.rs147
-rw-r--r--src/tools/rust-analyzer/crates/hir/Cargo.toml28
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/attrs.rs177
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/db.rs16
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/diagnostics.rs170
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/display.rs530
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/from_id.rs293
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/has_source.rs174
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/lib.rs3639
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics.rs1540
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs473
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs915
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/symbols.rs348
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/Cargo.toml31
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs347
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs325
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_label_to_loop.rs164
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs229
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs1340
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs1709
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_return_type.rs447
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs400
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs234
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs1292
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs216
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs575
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs395
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_integer_literal.rs268
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs351
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs556
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_let_else_to_match.rs497
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs574
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs840
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_while_to_loop.rs188
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs2147
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs900
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs5333
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs1770
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs1076
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs360
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs1279
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs606
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs139
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_comma.rs92
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_trait_bound.rs121
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs255
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs179
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs657
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs334
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs343
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs132
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs1328
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs316
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs342
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs227
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs310
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs1787
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs492
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs177
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs295
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs495
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs184
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs1194
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs954
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs838
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_generic.rs144
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs338
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/invert_if.rs144
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs570
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_match_arms.rs822
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs122
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs130
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs997
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs337
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs151
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/number_representation.rs183
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs221
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs507
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs548
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs1297
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs509
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs241
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_mut.rs37
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs409
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs212
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_impl_items.rs284
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs1250
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs999
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs100
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs438
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_string_with_char.rs307
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs150
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs243
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/sort_items.rs588
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/split_import.rs82
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs98
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs237
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs257
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs719
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs1020
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs980
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/lib.rs309
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/tests.rs558
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs2259
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/tests/sourcegen.rs195
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/utils.rs703
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs661
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs775
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/Cargo.toml33
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions.rs691
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs380
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs93
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs116
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs61
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/repr.rs74
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs947
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs280
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs108
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/field.rs43
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs407
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs196
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs130
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs133
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs1160
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs237
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs341
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs354
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs185
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs616
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs311
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs369
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs189
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs246
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs120
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs41
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/config.rs41
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/context.rs639
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs1293
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs413
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/item.rs637
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/lib.rs247
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render.rs1910
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs33
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs671
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs191
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs270
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs193
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs57
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs77
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs96
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs214
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests.rs305
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs1016
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs672
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs1232
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/fn_param.rs274
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs154
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs247
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs716
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs131
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs133
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs229
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs895
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs671
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs384
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/visibility.rs90
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/Cargo.toml39
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs78
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs163
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/assists.rs137
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/defs.rs545
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs185
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs7682
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/helpers.rs105
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs674
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs446
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs1084
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs295
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs151
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/label.rs48
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/lib.rs246
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/line_index.rs300
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs287
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/rename.rs540
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs34
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/search.rs785
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/source_change.rs99
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs429
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs308
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs136
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs460
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt533
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/tests/sourcegen_lints.rs284
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/traits.rs273
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs86
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/use_trivial_contructor.rs34
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml34
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs30
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs203
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs144
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs486
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs38
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs218
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs37
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs334
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs837
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs1012
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs101
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs283
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs131
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs573
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs336
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs49
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs90
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs76
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs156
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs62
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs148
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs260
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs145
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/tests/sourcegen.rs73
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml26
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/errors.rs29
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/fragments.rs58
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs35
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs358
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs803
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/nester.rs99
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs406
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs242
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs308
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/search.rs289
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs1397
-rw-r--r--src/tools/rust-analyzer/crates/ide/Cargo.toml47
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/annotations.rs789
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs460
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/doc_links.rs549
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/doc_links/intra_doc_links.rs77
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs491
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/expand_macro.rs521
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/extend_selection.rs662
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/file_structure.rs579
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/fixture.rs87
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/fn_references.rs94
-rwxr-xr-xsrc/tools/rust-analyzer/crates/ide/src/folding_ranges.rs626
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs112
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_definition.rs1634
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs344
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs296
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/highlight_related.rs1377
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover.rs390
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/render.rs563
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/tests.rs5053
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs2818
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/join_lines.rs1087
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/lib.rs702
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/markdown_remove.rs22
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/markup.rs38
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/matching_brace.rs78
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/moniker.rs342
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/move_item.rs890
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/navigation_target.rs623
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/parent_module.rs167
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/prime_caches.rs158
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/prime_caches/topologic_sort.rs98
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/references.rs1636
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/rename.rs2252
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/runnables.rs2163
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/shuffle_crate_graph.rs71
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/signature_help.rs1334
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/ssr.rs255
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/static_index.rs321
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/status.rs164
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs449
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs25
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs50
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs690
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlights.rs92
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs97
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs279
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/injector.rs81
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/macro_.rs128
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs340
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html62
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html58
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html66
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html50
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html190
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html47
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html233
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html62
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html58
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html55
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html96
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html51
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html50
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html58
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html56
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html164
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html126
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs1096
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_tree.rs339
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/typing.rs1210
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs616
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs93
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/view_hir.rs26
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs16
-rw-r--r--src/tools/rust-analyzer/crates/limit/Cargo.toml11
-rw-r--r--src/tools/rust-analyzer/crates/limit/src/lib.rs69
-rw-r--r--src/tools/rust-analyzer/crates/mbe/Cargo.toml24
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/benchmark.rs222
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander.rs121
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs914
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs272
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/lib.rs352
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/parser.rs261
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs844
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs99
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/token_map.rs113
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs160
-rw-r--r--src/tools/rust-analyzer/crates/parser/Cargo.toml19
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/event.rs133
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar.rs342
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs53
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs625
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs643
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs131
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs242
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/items.rs465
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/items/adt.rs168
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/items/consts.rs37
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/items/traits.rs140
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/items/use_item.rs93
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/params.rs209
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs132
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs440
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/types.rs352
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/input.rs88
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/lexed_str.rs300
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/lib.rs181
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/output.rs77
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/parser.rs340
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/shortcuts.rs215
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/syntax_kind.rs29
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs390
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/tests.rs166
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs107
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/tests/sourcegen_inline_tests.rs123
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs312
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/token_set.rs42
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.rast48
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.rs22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.txt48
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.rast26
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.rs17
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.txt26
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.rast4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.txt4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.rast9
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.txt9
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.txt1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.rast9
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.txt9
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.rast6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.txt6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rast22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.txt22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rast16
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.txt16
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.rast3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.txt3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.rast14
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.txt14
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.rast64
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.txt64
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.rast8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.txt8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.rast57
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.rs9
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.txt57
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.rast2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.txt2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.rast2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.txt2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.rast22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.rs12
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.txt22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.rast8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.txt8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.rast77
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.txt77
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.rast12
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.txt12
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0000_struct_field_missing_comma.rast34
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0000_struct_field_missing_comma.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0001_item_recovery_in_file.rast18
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0001_item_recovery_in_file.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast45
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0003_C++_semicolon.rast39
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0003_C++_semicolon.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0004_use_path_bad_segment.rast15
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0004_use_path_bad_segment.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0005_attribute_recover.rast62
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0005_attribute_recover.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0006_named_field_recovery.rast74
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0006_named_field_recovery.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0007_stray_curly_in_file.rast33
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0007_stray_curly_in_file.rs9
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0008_item_block_recovery.rast80
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0008_item_block_recovery.rs13
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0009_broken_struct_type_parameter.rast56
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0009_broken_struct_type_parameter.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0010_unsafe_lambda_block.rast45
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0010_unsafe_lambda_block.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rast13
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0012_broken_lambda.rast387
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0013_invalid_type.rast89
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0013_invalid_type.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0014_where_no_bounds.rast32
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0014_where_no_bounds.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0015_curly_in_params.rast24
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0015_curly_in_params.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0016_missing_semi.rast44
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0016_missing_semi.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0017_incomplete_binexpr.rast47
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0017_incomplete_binexpr.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0018_incomplete_fn.rast134
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0018_incomplete_fn.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0019_let_recover.rast107
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0019_let_recover.rs12
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0020_fn_recover.rast21
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0020_fn_recover.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0021_incomplete_param.rast34
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0021_incomplete_param.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0022_bad_exprs.rast171
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0022_bad_exprs.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0023_mismatched_paren.rast45
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0023_mismatched_paren.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rast327
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0025_nope.rast209
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0025_nope.rs32
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0026_imp_recovery.rast49
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0026_imp_recovery.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplere_where_for.rast29
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplere_where_for.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0029_field_completion.rast36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0029_field_completion.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rast205
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rs20
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0033_match_arms_outer_attrs.rast68
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0033_match_arms_outer_attrs.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0034_bad_box_pattern.rast96
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0034_bad_box_pattern.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0035_use_recover.rast55
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0035_use_recover.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0036_partial_use.rast51
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0036_partial_use.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0039_lambda_recovery.rast83
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0039_lambda_recovery.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rast75
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rast256
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rs9
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast48
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repated_extern_modifier.rast15
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repated_extern_modifier.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0048_double_fish.rast123
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0048_double_fish.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0001_array_type_missing_semi.rast27
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0001_array_type_missing_semi.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rast28
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0003_pointer_type_no_mutability.rast17
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0003_pointer_type_no_mutability.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0004_impl_type.rast79
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0004_impl_type.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rast23
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0006_unsafe_block_in_mod.rast37
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0006_unsafe_block_in_mod.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0007_async_without_semicolon.rast32
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0007_async_without_semicolon.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0008_pub_expr.rast26
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0008_pub_expr.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0013_anonymous_static.rast21
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0013_anonymous_static.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast49
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_struct_field_recover.rast31
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_struct_field_recover.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_empty_segment.rast14
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_empty_segment.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rast53
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0002_use_tree_list.rast29
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0002_use_tree_list.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0003_where_pred_for.rast63
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0003_where_pred_for.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rast60
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0005_function_type_params.rast38
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0005_function_type_params.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0006_self_param.rast128
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0006_self_param.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0007_type_param_bounds.rast53
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0007_type_param_bounds.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0008_path_part.rast98
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0008_path_part.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0009_loop_expr.rast26
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0009_loop_expr.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0010_extern_block.rast21
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0010_extern_block.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rast60
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rast33
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0013_pointer_type_mut.rast35
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0013_pointer_type_mut.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0014_never_type.rast13
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0014_never_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0015_continue_expr.rast38
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0015_continue_expr.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0017_array_type.rast21
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0017_array_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0018_arb_self_types.rast76
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0018_arb_self_types.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0019_unary_expr.rast45
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0019_unary_expr.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0021_assoc_item_list.rast81
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0021_assoc_item_list.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0022_crate_visibility.rast49
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0022_crate_visibility.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0023_placeholder_type.rast13
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0023_placeholder_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rast42
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0025_slice_type.rast17
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0025_slice_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rast105
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0027_ref_pat.rast50
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0027_ref_pat.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0028_impl_trait_type.rast45
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0028_impl_trait_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0029_cast_expr.rast90
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0029_cast_expr.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0030_let_expr.rast90
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0030_let_expr.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0031_while_expr.rast87
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0031_while_expr.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0032_fn_pointer_type.rast98
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0032_fn_pointer_type.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0033_reference_type;.rast51
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0033_reference_type;.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0034_break_expr.rast57
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0034_break_expr.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0037_qual_paths.rast79
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0037_qual_paths.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0038_full_range_expr.rast29
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0038_full_range_expr.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rast63
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0041_trait_item.rast31
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0041_trait_item.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0042_call_expr.rast148
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0042_call_expr.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0044_block_items.rast30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0044_block_items.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0045_param_list_opt_patterns.rast48
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0045_param_list_opt_patterns.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0046_singleton_tuple_type.rast20
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0046_singleton_tuple_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0048_path_type_with_bounds.rast85
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0048_path_type_with_bounds.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0050_fn_decl.rast22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0050_fn_decl.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0051_unit_type.rast14
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0051_unit_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0052_path_type.rast72
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0052_path_type.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0053_path_expr.rast97
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0053_path_expr.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0054_record_field_attrs.rast33
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0054_record_field_attrs.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0055_literal_pattern.rast77
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0055_literal_pattern.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0056_where_clause.rast117
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0056_where_clause.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast251
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs18
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0059_match_arms_commas.rast60
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0059_match_arms_commas.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0060_extern_crate.rast10
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0060_extern_crate.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0061_record_lit.rast125
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0061_record_lit.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0062_mod_contents.rast65
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0062_mod_contents.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0063_impl_item_neg.rast23
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0063_impl_item_neg.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0064_if_expr.rast126
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0064_if_expr.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0065_dyn_trait_type.rast45
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0065_dyn_trait_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0066_match_arm.rast152
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0066_match_arm.rs9
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0067_crate_path.rast16
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0067_crate_path.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rast53
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0071_match_expr.rast96
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0071_match_expr.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0072_return_expr.rast29
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0072_return_expr.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0073_type_item_type_params.rast20
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0073_type_item_type_params.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rast63
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0075_block.rast90
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0075_block.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0076_function_where_clause.rast40
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0076_function_where_clause.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0077_try_expr.rast26
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0077_try_expr.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0078_type_alias.rast16
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0078_type_alias.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0079_impl_item.rast14
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0079_impl_item.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0080_postfix_range.rast96
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0080_postfix_range.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0081_for_type.rast117
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0081_for_type.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0082_ref_expr.rast139
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0082_ref_expr.rs10
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0083_struct_items.rast87
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0084_paren_type.rast19
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0084_paren_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rast136
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rs12
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0086_function_ret_type.rast36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0086_function_ret_type.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.rast67
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0090_type_param_default.rast22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0090_type_param_default.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rast23
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0093_index_expr.rast34
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0093_index_expr.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0095_placeholder_pat.rast29
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0095_placeholder_pat.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.rast125
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.rs13
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0099_param_list.rast103
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0099_param_list.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0100_for_expr.rast36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0100_for_expr.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0102_record_pat_field_list.rast175
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0102_record_pat_field_list.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0103_array_expr.rast55
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0103_array_expr.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rast41
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0106_lambda_expr.rast246
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0106_lambda_expr.rs15
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rast63
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0108_tuple_expr.rast39
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0108_tuple_expr.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0109_label.rast70
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0109_label.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rast90
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0112_bind_pat.rast128
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0112_bind_pat.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0113_nocontentexpr.rast57
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0113_nocontentexpr.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0114_tuple_struct_where.rast42
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0114_tuple_struct_where.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0115_tuple_field_attrs.rast28
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0115_tuple_field_attrs.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0117_macro_call_type.rast46
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0117_macro_call_type.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0118_match_guard.rast77
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0118_match_guard.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rast84
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rast151
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rs12
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rast62
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rast33
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rast49
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rast105
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0129_marco_pat.rast37
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0129_marco_pat.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_let_stmt.rast36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_let_stmt.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_try_block_expr.rast32
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_try_block_expr.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rast31
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0132_box_expr.rast90
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0132_box_expr.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rast64
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rast70
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_associated_type_bounds.rast111
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_associated_type_bounds.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_expression_after_block.rast66
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_expression_after_block.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_self_param_outer_attr.rast28
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_self_param_outer_attr.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0139_param_outer_arg.rast36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0139_param_outer_arg.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0142_for_range_from.rast42
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0142_for_range_from.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0143_box_pat.rast111
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0143_box_pat.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0144_dot_dot_pat.rast456
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0144_dot_dot_pat.rs25
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0145_record_pat_field.rast123
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0145_record_pat_field.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0146_as_precedence.rast43
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0146_as_precedence.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_const_param.rast23
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_const_param.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rast19
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_array_attrs.rast48
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_array_attrs.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_impl_type_params.rast38
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_impl_type_params.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_fn.rast15
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_fn.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rast33
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0152_arg_with_attr.rast38
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0152_arg_with_attr.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0153_pub_parens_typepath.rast56
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0153_pub_parens_typepath.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rast58
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_no_dyn_trait_leading_for.rast43
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_no_dyn_trait_leading_for.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_tuple_attrs.rast51
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_tuple_attrs.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0155_closure_params.rast70
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0155_closure_params.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_const_block_pat.rast79
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_const_block_pat.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rast48
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_or_pattern.rast112
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_or_pattern.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rast26
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_variant_discriminant.rast30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_variant_discriminant.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_binop_resets_statementness.rast38
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_binop_resets_statementness.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_lambda_ret_block.rast45
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_lambda_ret_block.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_macro_rules_non_brace.rast57
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_macro_rules_non_brace.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rast36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_yield_expr.rast29
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_yield_expr.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_crate_visibility_in.rast42
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_crate_visibility_in.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rast24
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_impl_item_const.rast24
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_impl_item_const.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_labeled_block.rast28
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_labeled_block.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0162_default_async_unsafe_fn.rast43
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0162_default_async_unsafe_fn.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_async_fn.rast41
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_async_fn.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_unsafe_item.rast45
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_unsafe_item.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_default_item.rast24
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_default_item.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_type_path_in_pattern.rast39
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_type_path_in_pattern.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast32
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_rename.rast16
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_rename.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_self.rast10
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_self.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0169_mod_item.rast8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0169_mod_item.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_mod_item_curly.rast12
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_mod_item_curly.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_tuple_struct.rast25
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_tuple_struct.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0171_struct_item.rast11
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0171_struct_item.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_const_item.rast20
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_const_item.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_record_field_list.rast35
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_record_field_list.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_anonymous_const.rast19
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_anonymous_const.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_macro_def_curly.rast27
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_macro_def_curly.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_union_item.rast35
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_union_item.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_trait_item_generic_params.rast35
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_trait_item_generic_params.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_unit_struct.rast8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_unit_struct.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_use_tree_star.rast24
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_use_tree_star.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0175_trait_item_bounds.rast29
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0175_trait_item_bounds.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_trait_item_where_clause.rast30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_trait_item_where_clause.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_use_tree_alias.rast32
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_use_tree_alias.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_assoc_item_list_inner_attrs.rast26
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_assoc_item_list_inner_attrs.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rast96
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree.rast30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree_path.rast72
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree_path.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0178_use_tree_path_use_tree.rast20
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0178_use_tree_path_use_tree.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rast26
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0180_use_tree_path_star.rast13
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0180_use_tree_path_star.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_generic_param_attribute.rast46
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_generic_param_attribute.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_use_item.rast16
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_use_item.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0182_lifetime_param.rast25
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0182_lifetime_param.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_const_arg_block.rast32
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_const_arg_block.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_type_param.rast30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_type_param.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_const_arg.rast22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_const_arg.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_generic_param_list.rast30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_generic_param_list.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0185_assoc_type_bound.rast37
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0185_assoc_type_bound.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0186_lifetime_arg.rast22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0186_lifetime_arg.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0187_assoc_type_eq.rast41
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0187_assoc_type_eq.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rast36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0189_const_arg_literal.rast27
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0189_const_arg_literal.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0190_generic_arg.rast25
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0190_generic_arg.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0191_const_arg_negative_number.rast24
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0191_const_arg_negative_number.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0192_const_arg_bool_literal.rast22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0192_const_arg_bool_literal.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0193_let_stmt_init.rast29
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0193_let_stmt_init.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_else.rast51
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_else.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_stmt_ascription.rast31
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_stmt_ascription.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_macro_inside_generic_arg.rast36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_macro_inside_generic_arg.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0196_pub_tuple_field.rast30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0196_pub_tuple_field.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0197_destructuring_assignment_struct_rest_pattern.rast44
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0197_destructuring_assignment_struct_rest_pattern.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0198_destructuring_assignment_wildcard_pat.rast50
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0198_destructuring_assignment_wildcard_pat.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_const_param_default_expression.rast34
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_const_param_default_expression.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_effect_blocks.rast95
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_effect_blocks.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_type_item_where_clause_deprecated.rast33
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_type_item_where_clause_deprecated.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_assoc_const_eq.rast105
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_assoc_const_eq.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_const_param_default_literal.rast30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_const_param_default_literal.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0201_question_for_type_trait_bound.rast47
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0201_question_for_type_trait_bound.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0000_empty.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0000_empty.rs0
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0001_struct_item.rast39
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0001_struct_item.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0002_struct_item_field.rast22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0002_struct_item_field.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0004_file_shebang.rast2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0004_file_shebang.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0005_fn_item.rast16
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0005_fn_item.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0006_inner_attributes.rast194
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0006_inner_attributes.rs10
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0007_extern_crate.rast40
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0007_extern_crate.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0008_mod_item.rast77
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0008_mod_item.rs12
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0009_use_item.rast21
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0009_use_item.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0010_use_path_segments.rast42
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0010_use_path_segments.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0011_outer_attribute.rast61
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0011_outer_attribute.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0012_visibility.rast133
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0012_visibility.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0013_use_path_self_super.rast36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0013_use_path_self_super.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0014_use_tree.rast95
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0014_use_tree.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rast65
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0016_struct_flavors.rast93
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0016_struct_flavors.rs10
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0017_attr_trailing_comma.rast30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0017_attr_trailing_comma.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0018_struct_type_params.rast274
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0018_struct_type_params.rs17
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0019_enums.rast155
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0019_enums.rs25
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0020_type_param_bounds.rast283
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0020_type_param_bounds.rs10
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0022_empty_extern_block.rast21
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0022_empty_extern_block.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0023_static_items.rast41
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0023_static_items.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0024_const_item.rast1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0024_const_item.rs0
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0025_extern_fn_in_block.rast33
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0025_extern_fn_in_block.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0026_const_fn_in_block.rast32
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0026_const_fn_in_block.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0027_unsafe_fn_in_block.rast43
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0027_unsafe_fn_in_block.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0028_operator_binding_power.rast186
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0028_operator_binding_power.rs14
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rast152
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rs11
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_string_suffixes.rast64
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_string_suffixes.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_traits.rast61
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_traits.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0031_extern.rast973
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0031_extern.rs29
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rast93
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0033_label_break.rast223
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0033_label_break.rs28
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0034_crate_path_in_call.rast43
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0034_crate_path_in_call.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0035_weird_exprs.rast2339
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0035_weird_exprs.rs154
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0036_fully_qualified.rast93
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0036_fully_qualified.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0037_mod.rast16
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0037_mod.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0038_where_pred_type.rast43
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0038_where_pred_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0039_raw_fn_item.rast16
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0039_raw_fn_item.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0040_raw_struct_item_field.rast22
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0040_raw_struct_item_field.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0041_raw_keywords.rast50
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0041_raw_keywords.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0042_ufcs_call_list.rast127
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0042_ufcs_call_list.rs15
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0043_complex_assignment.rast110
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0043_complex_assignment.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0044_let_attrs.rast77
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0044_let_attrs.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0045_block_attrs.rast230
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0045_block_attrs.rs24
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0046_extern_inner_attributes.rast29
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0046_extern_inner_attributes.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0047_minus_in_inner_pattern.rast323
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0047_minus_in_inner_pattern.rs27
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0048_compound_assignment.rast201
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0048_compound_assignment.rs17
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0049_async_block.rast36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0049_async_block.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0050_async_block_as_argument.rast92
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0050_async_block_as_argument.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rast548
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rs21
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0052_for_range_block.rast81
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0052_for_range_block.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rast37
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0054_qual_path_in_type_arg.rast126
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0054_qual_path_in_type_arg.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rast50
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0056_neq_in_type.rast65
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0056_neq_in_type.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0057_loop_in_call.rast59
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0057_loop_in_call.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0058_unary_expr_precedence.rast97
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0058_unary_expr_precedence.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0059_loops_in_parens.rast100
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0059_loops_in_parens.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0060_as_range.rast56
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0060_as_range.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0061_match_full_range.rast27
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0061_match_full_range.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0062_macro_2.0.rast177
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0062_macro_2.0.rs15
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_trait_fn_patterns.rast198
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_trait_fn_patterns.rs7
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_variadic_fun.rast134
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_variadic_fun.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0064_impl_fn_params.rast166
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0064_impl_fn_params.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_comment_newline.rast17
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_comment_newline.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_plus_after_fn_trait_bound.rast61
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_plus_after_fn_trait_bound.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0066_default_modifier.rast222
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0066_default_modifier.rs16
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rast413
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rs30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rast238
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rs18
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0069_multi_trait_object.rast204
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0069_multi_trait_object.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0070_expr_attr_placement.rast59
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0070_expr_attr_placement.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0071_stmt_attr_placement.rast72
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0071_stmt_attr_placement.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0072_destructuring_assignment.rast352
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0072_destructuring_assignment.rs14
-rw-r--r--src/tools/rust-analyzer/crates/paths/Cargo.toml16
-rw-r--r--src/tools/rust-analyzer/crates/paths/src/lib.rs299
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml31
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs181
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs154
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs328
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs107
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs151
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml17
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs19
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml36
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/build.rs25
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/mod.rs104
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/buffer.rs143
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs485
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/closure.rs24
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/handle.rs70
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/mod.rs429
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/rpc.rs305
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/scoped_cell.rs81
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/server.rs352
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/diagnostic.rs166
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs1056
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/quote.rs140
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs819
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs105
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/buffer.rs156
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs510
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/closure.rs32
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/handle.rs89
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/mod.rs451
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/rpc.rs304
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/scoped_cell.rs81
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/selfless_reify.rs83
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/server.rs332
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/diagnostic.rs166
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs1106
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/quote.rs139
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs834
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/mod.rs105
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/buffer.rs156
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/client.rs529
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/closure.rs32
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/handle.rs89
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/mod.rs493
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/rpc.rs304
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/scoped_cell.rs81
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/selfless_reify.rs84
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/server.rs339
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/diagnostic.rs166
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs1125
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/quote.rs139
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs792
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs102
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs518
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/symbol.rs46
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs179
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/mod.rs155
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/cli.rs31
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs199
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs160
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs166
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs47
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml15
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-test/build.rs106
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-test/imp/Cargo.toml17
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs114
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-test/src/lib.rs6
-rw-r--r--src/tools/rust-analyzer/crates/profile/Cargo.toml31
-rw-r--r--src/tools/rust-analyzer/crates/profile/src/google_cpu_profiler.rs44
-rw-r--r--src/tools/rust-analyzer/crates/profile/src/hprof.rs326
-rw-r--r--src/tools/rust-analyzer/crates/profile/src/lib.rs130
-rw-r--r--src/tools/rust-analyzer/crates/profile/src/memory_usage.rs127
-rw-r--r--src/tools/rust-analyzer/crates/profile/src/stop_watch.rs101
-rw-r--r--src/tools/rust-analyzer/crates/profile/src/tree.rs84
-rw-r--r--src/tools/rust-analyzer/crates/project-model/Cargo.toml28
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs238
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs504
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/cfg_flag.rs63
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/lib.rs159
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs51
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/project_json.rs198
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs60
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/sysroot.rs232
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/tests.rs1820
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/workspace.rs1032
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/alloc/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/core/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/panic_abort/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/panic_unwind/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/proc_macro/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/profiler_builtins/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/std/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/stdarch/crates/std_detect/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/term/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/test/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/unwind/src/lib.rs0
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/hello-world-metadata.json245
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/hello-world-project.json12
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/is-proc-macro-project.json13
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml92
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/build.rs50
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs155
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs239
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/bin/rustc_wrapper.rs46
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/caps.rs210
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs228
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs69
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs447
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs86
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs248
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/highlight.rs14
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs164
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs328
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/parse.rs17
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/progress_report.rs122
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs86
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/symbols.rs16
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs1985
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs135
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs109
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/clippy_pass_by_ref.txt301
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/handles_macro_location.txt64
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/macro_compiler_error.txt229
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/reasonable_line_numbers_from_empty_file.txt64
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_incompatible_type_for_trait.txt64
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_mismatched_type.txt64
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_range_map_lsp_position.txt184
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable.txt212
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable_as_hint.txt212
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable_as_info.txt212
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_wrong_number_of_parameters.txt184
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/snap_multi_line_fix.txt388
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs1843
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diff.rs53
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs266
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs117
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs375
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs1892
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs196
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs80
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/line_index.rs68
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs549
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs407
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs823
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/markdown.rs157
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/mem_docs.rs65
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs44
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs705
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/semantic_tokens.rs301
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/task_pool.rs52
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs1397
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/version.rs57
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs1099
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/sourcegen.rs80
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs406
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/testdir.rs75
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs473
-rw-r--r--src/tools/rust-analyzer/crates/sourcegen/Cargo.toml16
-rw-r--r--src/tools/rust-analyzer/crates/sourcegen/src/lib.rs203
-rw-r--r--src/tools/rust-analyzer/crates/stdx/Cargo.toml24
-rw-r--r--src/tools/rust-analyzer/crates/stdx/src/lib.rs247
-rw-r--r--src/tools/rust-analyzer/crates/stdx/src/macros.rs47
-rw-r--r--src/tools/rust-analyzer/crates/stdx/src/non_empty_vec.rs39
-rw-r--r--src/tools/rust-analyzer/crates/stdx/src/panic_context.rs49
-rw-r--r--src/tools/rust-analyzer/crates/stdx/src/process.rs267
-rw-r--r--src/tools/rust-analyzer/crates/syntax/Cargo.toml39
-rw-r--r--src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml27
-rw-r--r--src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/parser.rs11
-rw-r--r--src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/reparse.rs11
-rw-r--r--src/tools/rust-analyzer/crates/syntax/rust.ungram667
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/algo.rs660
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast.rs367
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs174
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs717
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs410
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/generated.rs41
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs4806
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs196
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/make.rs901
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs875
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/operators.rs122
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs472
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs136
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/fuzz.rs75
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/hacks.rs15
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/lib.rs358
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/parsing.rs46
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs441
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ptr.rs104
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/syntax_error.rs44
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/syntax_node.rs75
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ted.rs206
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/tests.rs186
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs252
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs862
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/token_text.rs95
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/utils.rs43
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/validation.rs378
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/validation/block.rs24
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0000.rs199
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0001.rs106
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0002.rs1
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0003.rs1
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0004.rs1
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rast127
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rs15
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0037_visibility_in_traits.rast105
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0037_visibility_in_traits.rs6
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rast30
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rs4
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0040_illegal_crate_kw_location.rast96
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0040_illegal_crate_kw_location.rs4
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0041_illegal_self_keyword_location.rast29
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0041_illegal_self_keyword_location.rs2
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rast196
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rs6
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rast22
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rs1
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/invalid_let_expr.rast216
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/invalid_let_expr.rs14
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0000.rs6
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0001.rs4
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0002.rs4
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0003.rsbin0 -> 8 bytes
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0004.rs4
-rw-r--r--src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0005.rs7
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/Cargo.toml19
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/assert_linear.rs112
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/bench_fixture.rs45
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/fixture.rs409
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/lib.rs500
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/minicore.rs669
-rw-r--r--src/tools/rust-analyzer/crates/text-edit/Cargo.toml14
-rw-r--r--src/tools/rust-analyzer/crates/text-edit/src/lib.rs264
-rw-r--r--src/tools/rust-analyzer/crates/toolchain/Cargo.toml13
-rw-r--r--src/tools/rust-analyzer/crates/toolchain/src/lib.rs69
-rw-r--r--src/tools/rust-analyzer/crates/tt/Cargo.toml15
-rw-r--r--src/tools/rust-analyzer/crates/tt/src/buffer.rs231
-rw-r--r--src/tools/rust-analyzer/crates/tt/src/lib.rs322
-rw-r--r--src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml20
-rw-r--r--src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs234
-rw-r--r--src/tools/rust-analyzer/crates/vfs/Cargo.toml17
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/anchored_path.rs49
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/file_set.rs218
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/file_set/tests.rs42
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/lib.rs221
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/loader.rs215
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/path_interner.rs48
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs406
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/vfs_path/tests.rs30
-rw-r--r--src/tools/rust-analyzer/docs/dev/README.md266
-rw-r--r--src/tools/rust-analyzer/docs/dev/architecture.md497
-rw-r--r--src/tools/rust-analyzer/docs/dev/debugging.md99
-rw-r--r--src/tools/rust-analyzer/docs/dev/guide.md573
-rw-r--r--src/tools/rust-analyzer/docs/dev/lsp-extensions.md761
-rw-r--r--src/tools/rust-analyzer/docs/dev/style.md1172
-rw-r--r--src/tools/rust-analyzer/docs/dev/syntax.md534
-rw-r--r--src/tools/rust-analyzer/docs/user/generated_config.adoc620
-rw-r--r--src/tools/rust-analyzer/docs/user/manual.adoc863
-rw-r--r--src/tools/rust-analyzer/lib/README.md2
-rw-r--r--src/tools/rust-analyzer/lib/la-arena/Cargo.toml10
-rw-r--r--src/tools/rust-analyzer/lib/la-arena/src/lib.rs366
-rw-r--r--src/tools/rust-analyzer/lib/la-arena/src/map.rs75
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/Cargo.toml16
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/examples/goto_def.rs121
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/src/error.rs50
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/src/lib.rs232
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/src/msg.rs343
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/src/req_queue.rs62
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/src/socket.rs46
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/src/stdio.rs71
-rw-r--r--src/tools/rust-analyzer/rustfmt.toml2
-rw-r--r--src/tools/rust-analyzer/triagebot.toml1
-rw-r--r--src/tools/rust-analyzer/xtask/Cargo.toml15
-rw-r--r--src/tools/rust-analyzer/xtask/src/dist.rs170
-rw-r--r--src/tools/rust-analyzer/xtask/src/flags.rs148
-rw-r--r--src/tools/rust-analyzer/xtask/src/install.rs142
-rw-r--r--src/tools/rust-analyzer/xtask/src/main.rs91
-rw-r--r--src/tools/rust-analyzer/xtask/src/metrics.rs200
-rw-r--r--src/tools/rust-analyzer/xtask/src/release.rs96
-rw-r--r--src/tools/rust-analyzer/xtask/src/release/changelog.rs171
-rw-r--r--src/tools/rust-demangler/Cargo.toml16
-rw-r--r--src/tools/rust-demangler/README.md36
-rw-r--r--src/tools/rust-demangler/src/lib.rs21
-rw-r--r--src/tools/rust-demangler/src/main.rs97
-rw-r--r--src/tools/rust-demangler/tests/lib.rs84
-rw-r--r--src/tools/rust-installer/.github/workflows/ci.yml23
-rw-r--r--src/tools/rust-installer/Cargo.toml28
-rw-r--r--src/tools/rust-installer/LICENSE-APACHE201
-rw-r--r--src/tools/rust-installer/LICENSE-MIT25
-rw-r--r--src/tools/rust-installer/README.md71
-rwxr-xr-xsrc/tools/rust-installer/combine-installers.sh24
-rwxr-xr-xsrc/tools/rust-installer/gen-install-script.sh24
-rwxr-xr-xsrc/tools/rust-installer/gen-installer.sh24
-rw-r--r--src/tools/rust-installer/install-template.sh987
-rwxr-xr-xsrc/tools/rust-installer/make-tarballs.sh24
-rw-r--r--src/tools/rust-installer/rust-installer-version1
-rw-r--r--src/tools/rust-installer/src/combiner.rs161
-rw-r--r--src/tools/rust-installer/src/compression.rs177
-rw-r--r--src/tools/rust-installer/src/generator.rs170
-rw-r--r--src/tools/rust-installer/src/lib.rs17
-rw-r--r--src/tools/rust-installer/src/main.rs27
-rw-r--r--src/tools/rust-installer/src/remove_dir_all.rs860
-rw-r--r--src/tools/rust-installer/src/scripter.rs68
-rw-r--r--src/tools/rust-installer/src/tarballer.rs143
-rw-r--r--src/tools/rust-installer/src/util.rs156
-rwxr-xr-xsrc/tools/rust-installer/test.sh1342
-rw-r--r--src/tools/rust-installer/test/image-docdir1/share/doc/rust/README1
-rw-r--r--src/tools/rust-installer/test/image-docdir1/share/doc/rust/rustdocs.txt1
-rw-r--r--src/tools/rust-installer/test/image-docdir2/share/doc/cargo/README1
-rw-r--r--src/tools/rust-installer/test/image-docdir2/share/doc/cargo/cargodocs.txt1
-rw-r--r--src/tools/rust-installer/test/image1/bin/bad-bin1
-rwxr-xr-xsrc/tools/rust-installer/test/image1/bin/program1
-rwxr-xr-xsrc/tools/rust-installer/test/image1/bin/program21
-rw-r--r--src/tools/rust-installer/test/image1/dir-to-install/foo0
-rw-r--r--src/tools/rust-installer/test/image1/dir-to-not-install/foo0
-rw-r--r--src/tools/rust-installer/test/image1/something-to-install0
-rw-r--r--src/tools/rust-installer/test/image1/something-to-not-install0
-rwxr-xr-xsrc/tools/rust-installer/test/image2/bin/oldprogram1
-rw-r--r--src/tools/rust-installer/test/image2/dir-to-install/bar0
-rw-r--r--src/tools/rust-installer/test/image2/something-to-install0
-rwxr-xr-xsrc/tools/rust-installer/test/image3/bin/cargo1
-rw-r--r--src/tools/rust-installer/test/image4/baz0
-rw-r--r--src/tools/rust-installer/test/image4/dir-to-install/qux/bar0
-rw-r--r--src/tools/rust-installer/test/image5/dir-to-install/foo0
-rw-r--r--src/tools/rust-installer/triagebot.toml1
-rw-r--r--src/tools/rustbook/Cargo.toml14
-rw-r--r--src/tools/rustbook/src/main.rs102
-rw-r--r--src/tools/rustc-workspace-hack/Cargo.toml100
-rw-r--r--src/tools/rustc-workspace-hack/README.md25
-rw-r--r--src/tools/rustc-workspace-hack/lib.rs1
-rw-r--r--src/tools/rustdoc-gui/tester.js278
-rw-r--r--src/tools/rustdoc-js/tester.js418
-rw-r--r--src/tools/rustdoc-themes/Cargo.toml8
-rw-r--r--src/tools/rustdoc-themes/main.rs49
-rw-r--r--src/tools/rustdoc/Cargo.toml17
-rw-r--r--src/tools/rustdoc/main.rs3
-rw-r--r--src/tools/rustfmt/.editorconfig26
-rw-r--r--src/tools/rustfmt/.github/workflows/integration.yml85
-rw-r--r--src/tools/rustfmt/.github/workflows/linux.yml39
-rw-r--r--src/tools/rustfmt/.github/workflows/mac.yml36
-rw-r--r--src/tools/rustfmt/.github/workflows/rustdoc_check.yml25
-rw-r--r--src/tools/rustfmt/.github/workflows/upload-assets.yml81
-rw-r--r--src/tools/rustfmt/.github/workflows/windows.yml62
-rw-r--r--src/tools/rustfmt/.travis.yml77
-rw-r--r--src/tools/rustfmt/CHANGELOG.md1335
-rw-r--r--src/tools/rustfmt/CODE_OF_CONDUCT.md40
-rw-r--r--src/tools/rustfmt/Cargo.lock744
-rw-r--r--src/tools/rustfmt/Cargo.toml71
-rw-r--r--src/tools/rustfmt/Configurations.md2865
-rw-r--r--src/tools/rustfmt/Contributing.md251
-rw-r--r--src/tools/rustfmt/Design.md184
-rw-r--r--src/tools/rustfmt/LICENSE-APACHE201
-rw-r--r--src/tools/rustfmt/LICENSE-MIT25
-rw-r--r--src/tools/rustfmt/Makefile.toml71
-rw-r--r--src/tools/rustfmt/Processes.md57
-rw-r--r--src/tools/rustfmt/README.md252
-rw-r--r--src/tools/rustfmt/atom.md31
-rwxr-xr-xsrc/tools/rustfmt/bootstrap.sh17
-rw-r--r--src/tools/rustfmt/build.rs55
-rwxr-xr-xsrc/tools/rustfmt/ci/build_and_test.bat14
-rwxr-xr-xsrc/tools/rustfmt/ci/build_and_test.sh18
-rwxr-xr-xsrc/tools/rustfmt/ci/integration.sh107
-rw-r--r--src/tools/rustfmt/config_proc_macro/Cargo.lock68
-rw-r--r--src/tools/rustfmt/config_proc_macro/Cargo.toml23
-rw-r--r--src/tools/rustfmt/config_proc_macro/src/attrs.rs57
-rw-r--r--src/tools/rustfmt/config_proc_macro/src/config_type.rs15
-rw-r--r--src/tools/rustfmt/config_proc_macro/src/item_enum.rs208
-rw-r--r--src/tools/rustfmt/config_proc_macro/src/item_struct.rs5
-rw-r--r--src/tools/rustfmt/config_proc_macro/src/lib.rs71
-rw-r--r--src/tools/rustfmt/config_proc_macro/src/utils.rs52
-rw-r--r--src/tools/rustfmt/config_proc_macro/tests/smoke.rs20
-rw-r--r--src/tools/rustfmt/intellij.md35
-rw-r--r--src/tools/rustfmt/rust-toolchain3
-rw-r--r--src/tools/rustfmt/rustfmt.toml3
-rw-r--r--src/tools/rustfmt/src/attr.rs541
-rw-r--r--src/tools/rustfmt/src/attr/doc_comment.rs83
-rw-r--r--src/tools/rustfmt/src/bin/main.rs710
-rw-r--r--src/tools/rustfmt/src/cargo-fmt/main.rs550
-rw-r--r--src/tools/rustfmt/src/cargo-fmt/test/message_format.rs80
-rw-r--r--src/tools/rustfmt/src/cargo-fmt/test/mod.rs141
-rw-r--r--src/tools/rustfmt/src/cargo-fmt/test/targets.rs134
-rw-r--r--src/tools/rustfmt/src/chains.rs888
-rw-r--r--src/tools/rustfmt/src/closures.rs448
-rw-r--r--src/tools/rustfmt/src/comment.rs2007
-rw-r--r--src/tools/rustfmt/src/config/config_type.rs426
-rw-r--r--src/tools/rustfmt/src/config/file_lines.rs440
-rw-r--r--src/tools/rustfmt/src/config/lists.rs92
-rw-r--r--src/tools/rustfmt/src/config/mod.rs924
-rw-r--r--src/tools/rustfmt/src/config/options.rs464
-rw-r--r--src/tools/rustfmt/src/coverage.rs15
-rw-r--r--src/tools/rustfmt/src/emitter.rs52
-rw-r--r--src/tools/rustfmt/src/emitter/checkstyle.rs150
-rw-r--r--src/tools/rustfmt/src/emitter/checkstyle/xml.rs52
-rw-r--r--src/tools/rustfmt/src/emitter/diff.rs137
-rw-r--r--src/tools/rustfmt/src/emitter/files.rs37
-rw-r--r--src/tools/rustfmt/src/emitter/files_with_backup.rs31
-rw-r--r--src/tools/rustfmt/src/emitter/json.rs346
-rw-r--r--src/tools/rustfmt/src/emitter/modified_lines.rs24
-rw-r--r--src/tools/rustfmt/src/emitter/stdout.rs32
-rw-r--r--src/tools/rustfmt/src/expr.rs2159
-rw-r--r--src/tools/rustfmt/src/format-diff/main.rs281
-rw-r--r--src/tools/rustfmt/src/format-diff/test/bindgen.diff67
-rw-r--r--src/tools/rustfmt/src/format_report_formatter.rs150
-rw-r--r--src/tools/rustfmt/src/formatting.rs632
-rw-r--r--src/tools/rustfmt/src/formatting/generated.rs7
-rw-r--r--src/tools/rustfmt/src/formatting/newline_style.rs250
-rw-r--r--src/tools/rustfmt/src/git-rustfmt/main.rs192
-rw-r--r--src/tools/rustfmt/src/ignore_path.rs52
-rw-r--r--src/tools/rustfmt/src/imports.rs1506
-rw-r--r--src/tools/rustfmt/src/items.rs3335
-rw-r--r--src/tools/rustfmt/src/lib.rs658
-rw-r--r--src/tools/rustfmt/src/lists.rs943
-rw-r--r--src/tools/rustfmt/src/macros.rs1412
-rw-r--r--src/tools/rustfmt/src/matches.rs602
-rw-r--r--src/tools/rustfmt/src/missed_spans.rs363
-rw-r--r--src/tools/rustfmt/src/modules.rs577
-rw-r--r--src/tools/rustfmt/src/modules/visitor.rs108
-rw-r--r--src/tools/rustfmt/src/overflow.rs785
-rw-r--r--src/tools/rustfmt/src/pairs.rs318
-rw-r--r--src/tools/rustfmt/src/parse/macros/asm.rs11
-rw-r--r--src/tools/rustfmt/src/parse/macros/cfg_if.rs89
-rw-r--r--src/tools/rustfmt/src/parse/macros/lazy_static.rs50
-rw-r--r--src/tools/rustfmt/src/parse/macros/mod.rs229
-rw-r--r--src/tools/rustfmt/src/parse/mod.rs3
-rw-r--r--src/tools/rustfmt/src/parse/parser.rs175
-rw-r--r--src/tools/rustfmt/src/parse/session.rs507
-rw-r--r--src/tools/rustfmt/src/patterns.rs535
-rw-r--r--src/tools/rustfmt/src/release_channel.rs16
-rw-r--r--src/tools/rustfmt/src/reorder.rs330
-rw-r--r--src/tools/rustfmt/src/rewrite.rs98
-rw-r--r--src/tools/rustfmt/src/rustfmt_diff.rs400
-rw-r--r--src/tools/rustfmt/src/shape.rs373
-rw-r--r--src/tools/rustfmt/src/skip.rs76
-rw-r--r--src/tools/rustfmt/src/source_file.rs105
-rw-r--r--src/tools/rustfmt/src/source_map.rs82
-rw-r--r--src/tools/rustfmt/src/spanned.rs199
-rw-r--r--src/tools/rustfmt/src/stmt.rs116
-rw-r--r--src/tools/rustfmt/src/string.rs725
-rw-r--r--src/tools/rustfmt/src/syntux.rs4
-rw-r--r--src/tools/rustfmt/src/test/configuration_snippet.rs322
-rw-r--r--src/tools/rustfmt/src/test/mod.rs1053
-rw-r--r--src/tools/rustfmt/src/test/mod_resolver.rs82
-rw-r--r--src/tools/rustfmt/src/test/parser.rs57
-rw-r--r--src/tools/rustfmt/src/types.rs1086
-rw-r--r--src/tools/rustfmt/src/utils.rs708
-rw-r--r--src/tools/rustfmt/src/vertical.rs302
-rw-r--r--src/tools/rustfmt/src/visitor.rs1010
-rw-r--r--src/tools/rustfmt/tests/cargo-fmt/main.rs98
-rw-r--r--src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/Cargo.toml13
-rw-r--r--src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/dependency-dir-name/Cargo.toml10
-rw-r--r--src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/dependency-dir-name/src/lib.rs7
-rw-r--r--src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/dependency-dir-name/subdep-dir-name/Cargo.toml7
-rw-r--r--src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/dependency-dir-name/subdep-dir-name/src/lib.rs7
-rw-r--r--src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/src/main.rs3
-rw-r--r--src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/e/Cargo.toml8
-rw-r--r--src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/e/src/main.rs1
-rw-r--r--src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/Cargo.toml5
-rw-r--r--src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/Cargo.toml6
-rw-r--r--src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/d/Cargo.toml7
-rw-r--r--src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/d/f/Cargo.toml4
-rw-r--r--src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/d/f/src/lib.rs1
-rw-r--r--src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/d/src/lib.rs1
-rw-r--r--src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/src/main.rs1
-rw-r--r--src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/b/Cargo.toml6
-rw-r--r--src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/b/src/main.rs1
-rw-r--r--src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/c/Cargo.toml4
-rw-r--r--src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/c/src/lib.rs1
-rw-r--r--src/tools/rustfmt/tests/config/disable_all_formatting.toml1
-rw-r--r--src/tools/rustfmt/tests/config/issue-1111.toml1
-rw-r--r--src/tools/rustfmt/tests/config/issue-2641.toml1
-rw-r--r--src/tools/rustfmt/tests/config/issue-3779.toml3
-rw-r--r--src/tools/rustfmt/tests/config/skip_children.toml1
-rw-r--r--src/tools/rustfmt/tests/config/small_tabs.toml10
-rw-r--r--src/tools/rustfmt/tests/coverage/source/comments.rs7
-rw-r--r--src/tools/rustfmt/tests/coverage/target/comments.rs7
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/issue-4874/bar/baz.rs5
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/issue-4874/foo.rs1
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/issue-4874/foo/qux.rs5
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/issue-4874/main.rs8
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/issue-5063/foo.rs2
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/issue-5063/foo/bar/baz.rs1
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/issue-5063/main.rs5
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/issue-5167/src/a.rs0
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/issue-5167/src/a/mod.rs0
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/issue-5167/src/lib.rs1
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/issue-5198/a.rs1
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/issue-5198/lib.rs3
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/b.rs1
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/d.rs3
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/d/explanation.txt16
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/d/f.rs1
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/d/g/mod.rs1
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/e.rs1
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/mod.rs3
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/explanation.txt16
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/module-not-found/bad_path_attribute/lib.rs3
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/module-not-found/relative_module/a.rs2
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/module-not-found/relative_module/lib.rs1
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/module-not-found/sibling_module/lib.rs2
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/skip-files-issue-5065/foo.rs5
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/skip-files-issue-5065/foo/bar/baz.rs1
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/skip-files-issue-5065/main.rs9
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/skip-files-issue-5065/one.rs1
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/Cargo.toml8
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/src/lib.rs7
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/tests/test1.rs8
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub1.rs6
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub2.rs6
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub3/mod.rs1
-rw-r--r--src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub3/sub4.rs0
-rw-r--r--src/tools/rustfmt/tests/parser/issue-4126/invalid.rs6
-rw-r--r--src/tools/rustfmt/tests/parser/issue-4126/lib.rs1
-rw-r--r--src/tools/rustfmt/tests/parser/issue_4418.rs1
-rw-r--r--src/tools/rustfmt/tests/parser/unclosed-delims/issue_4466.rs11
-rw-r--r--src/tools/rustfmt/tests/rustfmt/main.rs159
-rw-r--r--src/tools/rustfmt/tests/source/5131_crate.rs14
-rw-r--r--src/tools/rustfmt/tests/source/5131_module.rs33
-rw-r--r--src/tools/rustfmt/tests/source/5131_one.rs15
-rw-r--r--src/tools/rustfmt/tests/source/alignment_2633/block_style.rs8
-rw-r--r--src/tools/rustfmt/tests/source/alignment_2633/visual_style.rs9
-rw-r--r--src/tools/rustfmt/tests/source/array_comment.rs19
-rw-r--r--src/tools/rustfmt/tests/source/assignment.rs34
-rw-r--r--src/tools/rustfmt/tests/source/associated-types-bounds-wrapping.rs5
-rw-r--r--src/tools/rustfmt/tests/source/associated_type_bounds.rs13
-rw-r--r--src/tools/rustfmt/tests/source/async_block.rs51
-rw-r--r--src/tools/rustfmt/tests/source/async_fn.rs28
-rw-r--r--src/tools/rustfmt/tests/source/attrib.rs234
-rw-r--r--src/tools/rustfmt/tests/source/big-impl-block.rs123
-rw-r--r--src/tools/rustfmt/tests/source/big-impl-visual.rs106
-rw-r--r--src/tools/rustfmt/tests/source/binary-expr.rs10
-rw-r--r--src/tools/rustfmt/tests/source/binop-separator-back/bitwise.rs14
-rw-r--r--src/tools/rustfmt/tests/source/binop-separator-back/comp.rs23
-rw-r--r--src/tools/rustfmt/tests/source/binop-separator-back/logic.rs7
-rw-r--r--src/tools/rustfmt/tests/source/binop-separator-back/math.rs7
-rw-r--r--src/tools/rustfmt/tests/source/binop-separator-back/patterns.rs9
-rw-r--r--src/tools/rustfmt/tests/source/binop-separator-back/range.rs7
-rw-r--r--src/tools/rustfmt/tests/source/break-and-continue.rs23
-rw-r--r--src/tools/rustfmt/tests/source/catch.rs28
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/arch/aarch64.rs106
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/arch/arm.rs39
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/arch/mips.rs29
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/arch/mips64.rs29
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/arch/powerpc.rs42
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/arch/powerpc64.rs42
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/arch/x86.rs348
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/bit.rs9
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/cache.rs164
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/error_macros.rs150
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/mod.rs85
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/os/aarch64.rs79
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/os/freebsd/aarch64.rs28
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/os/freebsd/arm.rs27
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/os/freebsd/auxvec.rs86
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/os/freebsd/mod.rs22
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/os/freebsd/powerpc.rs27
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/aarch64.rs157
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/arm.rs49
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/auxvec.rs307
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/cpuinfo.rs301
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/mips.rs31
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/mod.rs28
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/powerpc.rs41
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/os/other.rs9
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/detect/os/x86.rs375
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/lib.rs49
-rw-r--r--src/tools/rustfmt/tests/source/cfg_if/mod.rs5
-rw-r--r--src/tools/rustfmt/tests/source/cfg_mod/bar.rs3
-rw-r--r--src/tools/rustfmt/tests/source/cfg_mod/dir/dir1/dir2/wasm32.rs6
-rw-r--r--src/tools/rustfmt/tests/source/cfg_mod/dir/dir1/dir3/wasm32.rs6
-rw-r--r--src/tools/rustfmt/tests/source/cfg_mod/foo.rs4
-rw-r--r--src/tools/rustfmt/tests/source/cfg_mod/mod.rs10
-rw-r--r--src/tools/rustfmt/tests/source/cfg_mod/other.rs1
-rw-r--r--src/tools/rustfmt/tests/source/cfg_mod/wasm32.rs4
-rw-r--r--src/tools/rustfmt/tests/source/chains-visual.rs158
-rw-r--r--src/tools/rustfmt/tests/source/chains.rs266
-rw-r--r--src/tools/rustfmt/tests/source/chains_with_comment.rs121
-rw-r--r--src/tools/rustfmt/tests/source/closure-block-inside-macro.rs9
-rw-r--r--src/tools/rustfmt/tests/source/closure.rs223
-rw-r--r--src/tools/rustfmt/tests/source/comment.rs90
-rw-r--r--src/tools/rustfmt/tests/source/comment2.rs4
-rw-r--r--src/tools/rustfmt/tests/source/comment3.rs5
-rw-r--r--src/tools/rustfmt/tests/source/comment4.rs52
-rw-r--r--src/tools/rustfmt/tests/source/comment5.rs14
-rw-r--r--src/tools/rustfmt/tests/source/comment6.rs10
-rw-r--r--src/tools/rustfmt/tests/source/comment_crlf_newline.rs4
-rw-r--r--src/tools/rustfmt/tests/source/comments-in-lists/wrap-comments-not-normalized.rs129
-rw-r--r--src/tools/rustfmt/tests/source/comments-in-lists/wrap-comments-true.rs130
-rw-r--r--src/tools/rustfmt/tests/source/configs/blank_lines_lower_bound/1.rs13
-rw-r--r--src/tools/rustfmt/tests/source/configs/brace_style/fn_always_next_line.rs14
-rw-r--r--src/tools/rustfmt/tests/source/configs/brace_style/fn_prefer_same_line.rs14
-rw-r--r--src/tools/rustfmt/tests/source/configs/brace_style/fn_same_line_where.rs14
-rw-r--r--src/tools/rustfmt/tests/source/configs/brace_style/item_always_next_line.rs20
-rw-r--r--src/tools/rustfmt/tests/source/configs/brace_style/item_prefer_same_line.rs16
-rw-r--r--src/tools/rustfmt/tests/source/configs/brace_style/item_same_line_where.rs16
-rw-r--r--src/tools/rustfmt/tests/source/configs/chain_width/always.rs23
-rw-r--r--src/tools/rustfmt/tests/source/configs/chain_width/small.rs23
-rw-r--r--src/tools/rustfmt/tests/source/configs/chain_width/tiny.rs21
-rw-r--r--src/tools/rustfmt/tests/source/configs/comment_width/above.rs7
-rw-r--r--src/tools/rustfmt/tests/source/configs/comment_width/below.rs7
-rw-r--r--src/tools/rustfmt/tests/source/configs/comment_width/ignore.rs7
-rw-r--r--src/tools/rustfmt/tests/source/configs/condense_wildcard_suffixes/false.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/condense_wildcard_suffixes/true.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/control_brace_style/always_next_line.rs10
-rw-r--r--src/tools/rustfmt/tests/source/configs/control_brace_style/always_same_line.rs10
-rw-r--r--src/tools/rustfmt/tests/source/configs/control_brace_style/closing_next_line.rs10
-rw-r--r--src/tools/rustfmt/tests/source/configs/disable_all_formatting/false.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/disable_all_formatting/true.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/doc_comment_code_block_width/100.rs16
-rw-r--r--src/tools/rustfmt/tests/source/configs/doc_comment_code_block_width/100_greater_max_width.rs17
-rw-r--r--src/tools/rustfmt/tests/source/configs/doc_comment_code_block_width/50.rs16
-rw-r--r--src/tools/rustfmt/tests/source/configs/empty_item_single_line/false.rs16
-rw-r--r--src/tools/rustfmt/tests/source/configs/empty_item_single_line/true.rs16
-rw-r--r--src/tools/rustfmt/tests/source/configs/enum_discrim_align_threshold/40.rs34
-rw-r--r--src/tools/rustfmt/tests/source/configs/error_on_line_overflow/false.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/fn_args_layout/compressed.rs16
-rw-r--r--src/tools/rustfmt/tests/source/configs/fn_args_layout/tall.rs16
-rw-r--r--src/tools/rustfmt/tests/source/configs/fn_args_layout/vertical.rs16
-rw-r--r--src/tools/rustfmt/tests/source/configs/fn_single_line/false.rs11
-rw-r--r--src/tools/rustfmt/tests/source/configs/fn_single_line/true.rs11
-rw-r--r--src/tools/rustfmt/tests/source/configs/force_explicit_abi/false.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/force_explicit_abi/true.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/force_multiline_block/false.rs22
-rw-r--r--src/tools/rustfmt/tests/source/configs/force_multiline_block/true.rs18
-rw-r--r--src/tools/rustfmt/tests/source/configs/format_generated_files/false.rs8
-rw-r--r--src/tools/rustfmt/tests/source/configs/format_generated_files/true.rs8
-rw-r--r--src/tools/rustfmt/tests/source/configs/format_macro_bodies/false.rs7
-rw-r--r--src/tools/rustfmt/tests/source/configs/format_macro_bodies/true.rs7
-rw-r--r--src/tools/rustfmt/tests/source/configs/format_macro_matchers/false.rs7
-rw-r--r--src/tools/rustfmt/tests/source/configs/format_macro_matchers/true.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/format_strings/false.rs8
-rw-r--r--src/tools/rustfmt/tests/source/configs/format_strings/true.rs7
-rw-r--r--src/tools/rustfmt/tests/source/configs/group_imports/One-merge_imports.rs17
-rw-r--r--src/tools/rustfmt/tests/source/configs/group_imports/One-nested.rs7
-rw-r--r--src/tools/rustfmt/tests/source/configs/group_imports/One-no_reorder.rs16
-rw-r--r--src/tools/rustfmt/tests/source/configs/group_imports/One.rs15
-rw-r--r--src/tools/rustfmt/tests/source/configs/group_imports/StdExternalCrate-merge_imports.rs17
-rw-r--r--src/tools/rustfmt/tests/source/configs/group_imports/StdExternalCrate-nested.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/group_imports/StdExternalCrate-no_reorder.rs17
-rw-r--r--src/tools/rustfmt/tests/source/configs/group_imports/StdExternalCrate-non_consecutive.rs27
-rw-r--r--src/tools/rustfmt/tests/source/configs/group_imports/StdExternalCrate.rs15
-rw-r--r--src/tools/rustfmt/tests/source/configs/hard_tabs/false.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/hard_tabs/true.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/imports_layout/merge_mixed.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/indent_style/block_args.rs26
-rw-r--r--src/tools/rustfmt/tests/source/configs/indent_style/block_array.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/indent_style/block_call.rs133
-rw-r--r--src/tools/rustfmt/tests/source/configs/indent_style/block_chain.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/indent_style/block_generic.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/indent_style/block_struct_lit.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/indent_style/block_trailing_comma_call/one.rs9
-rw-r--r--src/tools/rustfmt/tests/source/configs/indent_style/block_trailing_comma_call/two.rs9
-rw-r--r--src/tools/rustfmt/tests/source/configs/indent_style/block_where_pred.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/indent_style/default.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/indent_style/rfc_where.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/indent_style/visual_args.rs32
-rw-r--r--src/tools/rustfmt/tests/source/configs/indent_style/visual_array.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/indent_style/visual_call.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/indent_style/visual_chain.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/indent_style/visual_generics.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/indent_style/visual_struct_lit.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/indent_style/visual_trailing_comma.rs7
-rw-r--r--src/tools/rustfmt/tests/source/configs/indent_style/visual_where_pred.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/match_arm_blocks/false.rs11
-rw-r--r--src/tools/rustfmt/tests/source/configs/match_arm_blocks/true.rs11
-rw-r--r--src/tools/rustfmt/tests/source/configs/match_arm_leading_pipes/always.rs27
-rw-r--r--src/tools/rustfmt/tests/source/configs/match_arm_leading_pipes/never.rs28
-rw-r--r--src/tools/rustfmt/tests/source/configs/match_arm_leading_pipes/preserve.rs36
-rw-r--r--src/tools/rustfmt/tests/source/configs/match_block_trailing_comma/false.rs11
-rw-r--r--src/tools/rustfmt/tests/source/configs/match_block_trailing_comma/true.rs11
-rw-r--r--src/tools/rustfmt/tests/source/configs/merge_derives/true.rs46
-rw-r--r--src/tools/rustfmt/tests/source/configs/normalize_comments/false.rs13
-rw-r--r--src/tools/rustfmt/tests/source/configs/normalize_comments/true.rs13
-rw-r--r--src/tools/rustfmt/tests/source/configs/normalize_doc_attributes/false.rs13
-rw-r--r--src/tools/rustfmt/tests/source/configs/normalize_doc_attributes/true.rs13
-rw-r--r--src/tools/rustfmt/tests/source/configs/remove_nested_parens/remove_nested_parens.rs5
-rw-r--r--src/tools/rustfmt/tests/source/configs/reorder_impl_items/false.rs11
-rw-r--r--src/tools/rustfmt/tests/source/configs/reorder_impl_items/true.rs11
-rw-r--r--src/tools/rustfmt/tests/source/configs/reorder_imports/false.rs7
-rw-r--r--src/tools/rustfmt/tests/source/configs/reorder_imports/true.rs19
-rw-r--r--src/tools/rustfmt/tests/source/configs/reorder_modules/dolor/mod.rs1
-rw-r--r--src/tools/rustfmt/tests/source/configs/reorder_modules/false.rs7
-rw-r--r--src/tools/rustfmt/tests/source/configs/reorder_modules/ipsum/mod.rs1
-rw-r--r--src/tools/rustfmt/tests/source/configs/reorder_modules/lorem/mod.rs1
-rw-r--r--src/tools/rustfmt/tests/source/configs/reorder_modules/sit/mod.rs1
-rw-r--r--src/tools/rustfmt/tests/source/configs/reorder_modules/true.rs7
-rw-r--r--src/tools/rustfmt/tests/source/configs/short_array_element_width_threshold/10.rs11
-rw-r--r--src/tools/rustfmt/tests/source/configs/short_array_element_width_threshold/20.rs11
-rw-r--r--src/tools/rustfmt/tests/source/configs/short_array_element_width_threshold/greater_than_max_width.rs12
-rw-r--r--src/tools/rustfmt/tests/source/configs/skip_children/foo/mod.rs3
-rw-r--r--src/tools/rustfmt/tests/source/configs/skip_children/true.rs4
-rw-r--r--src/tools/rustfmt/tests/source/configs/space_before_colon/true.rs11
-rw-r--r--src/tools/rustfmt/tests/source/configs/spaces_around_ranges/false.rs34
-rw-r--r--src/tools/rustfmt/tests/source/configs/spaces_around_ranges/true.rs34
-rw-r--r--src/tools/rustfmt/tests/source/configs/struct_field_align_threshold/20.rs383
-rw-r--r--src/tools/rustfmt/tests/source/configs/struct_lit_single_line/false.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/tab_spaces/2.rs11
-rw-r--r--src/tools/rustfmt/tests/source/configs/tab_spaces/4.rs11
-rw-r--r--src/tools/rustfmt/tests/source/configs/trailing_comma/always.rs7
-rw-r--r--src/tools/rustfmt/tests/source/configs/trailing_comma/never.rs23
-rw-r--r--src/tools/rustfmt/tests/source/configs/trailing_comma/vertical.rs7
-rw-r--r--src/tools/rustfmt/tests/source/configs/type_punctuation_density/compressed.rs37
-rw-r--r--src/tools/rustfmt/tests/source/configs/type_punctuation_density/wide.rs37
-rw-r--r--src/tools/rustfmt/tests/source/configs/use_field_init_shorthand/false.rs19
-rw-r--r--src/tools/rustfmt/tests/source/configs/use_field_init_shorthand/true.rs19
-rw-r--r--src/tools/rustfmt/tests/source/configs/use_small_heuristics/default.rs25
-rw-r--r--src/tools/rustfmt/tests/source/configs/use_small_heuristics/max.rs25
-rw-r--r--src/tools/rustfmt/tests/source/configs/use_small_heuristics/off.rs25
-rw-r--r--src/tools/rustfmt/tests/source/configs/use_try_shorthand/false.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/use_try_shorthand/true.rs6
-rw-r--r--src/tools/rustfmt/tests/source/configs/where_single_line/true.rs26
-rw-r--r--src/tools/rustfmt/tests/source/configs/wrap_comments/false.rs8
-rw-r--r--src/tools/rustfmt/tests/source/configs/wrap_comments/true.rs15
-rw-r--r--src/tools/rustfmt/tests/source/const_generics.rs44
-rw-r--r--src/tools/rustfmt/tests/source/control-brace-style-always-next-line.rs44
-rw-r--r--src/tools/rustfmt/tests/source/control-brace-style-always-same-line.rs42
-rw-r--r--src/tools/rustfmt/tests/source/doc-attrib.rs118
-rw-r--r--src/tools/rustfmt/tests/source/doc-comment-with-example.rs12
-rw-r--r--src/tools/rustfmt/tests/source/doc.rs5
-rw-r--r--src/tools/rustfmt/tests/source/dyn_trait.rs20
-rw-r--r--src/tools/rustfmt/tests/source/else-if-brace-style-always-next-line.rs54
-rw-r--r--src/tools/rustfmt/tests/source/else-if-brace-style-always-same-line.rs52
-rw-r--r--src/tools/rustfmt/tests/source/else-if-brace-style-closing-next-line.rs54
-rw-r--r--src/tools/rustfmt/tests/source/empty-item-single-line-false.rs46
-rw-r--r--src/tools/rustfmt/tests/source/empty_file.rs0
-rw-r--r--src/tools/rustfmt/tests/source/enum.rs212
-rw-r--r--src/tools/rustfmt/tests/source/existential_type.rs23
-rw-r--r--src/tools/rustfmt/tests/source/expr-block.rs300
-rw-r--r--src/tools/rustfmt/tests/source/expr-overflow-delimited.rs155
-rw-r--r--src/tools/rustfmt/tests/source/expr.rs579
-rw-r--r--src/tools/rustfmt/tests/source/extern.rs92
-rw-r--r--src/tools/rustfmt/tests/source/extern_not_explicit.rs14
-rw-r--r--src/tools/rustfmt/tests/source/file-lines-1.rs29
-rw-r--r--src/tools/rustfmt/tests/source/file-lines-2.rs29
-rw-r--r--src/tools/rustfmt/tests/source/file-lines-3.rs29
-rw-r--r--src/tools/rustfmt/tests/source/file-lines-4.rs30
-rw-r--r--src/tools/rustfmt/tests/source/file-lines-5.rs17
-rw-r--r--src/tools/rustfmt/tests/source/file-lines-6.rs18
-rw-r--r--src/tools/rustfmt/tests/source/file-lines-7.rs24
-rw-r--r--src/tools/rustfmt/tests/source/file-lines-item.rs21
-rw-r--r--src/tools/rustfmt/tests/source/fn-custom-2.rs35
-rw-r--r--src/tools/rustfmt/tests/source/fn-custom-3.rs31
-rw-r--r--src/tools/rustfmt/tests/source/fn-custom-4.rs13
-rw-r--r--src/tools/rustfmt/tests/source/fn-custom-6.rs40
-rw-r--r--src/tools/rustfmt/tests/source/fn-custom-7.rs24
-rw-r--r--src/tools/rustfmt/tests/source/fn-custom-8.rs48
-rw-r--r--src/tools/rustfmt/tests/source/fn-custom.rs13
-rw-r--r--src/tools/rustfmt/tests/source/fn-param-attributes.rs57
-rw-r--r--src/tools/rustfmt/tests/source/fn-simple.rs74
-rw-r--r--src/tools/rustfmt/tests/source/fn-single-line/version_one.rs80
-rw-r--r--src/tools/rustfmt/tests/source/fn-single-line/version_two.rs80
-rw-r--r--src/tools/rustfmt/tests/source/fn_args_indent-block.rs77
-rw-r--r--src/tools/rustfmt/tests/source/fn_args_layout-vertical.rs33
-rw-r--r--src/tools/rustfmt/tests/source/hard-tabs.rs84
-rw-r--r--src/tools/rustfmt/tests/source/hello.rs6
-rw-r--r--src/tools/rustfmt/tests/source/hello2.rs8
-rw-r--r--src/tools/rustfmt/tests/source/hex_literal_lower.rs5
-rw-r--r--src/tools/rustfmt/tests/source/hex_literal_upper.rs5
-rw-r--r--src/tools/rustfmt/tests/source/if_while_or_patterns.rs27
-rw-r--r--src/tools/rustfmt/tests/source/immovable_generators.rs7
-rw-r--r--src/tools/rustfmt/tests/source/impls.rs178
-rw-r--r--src/tools/rustfmt/tests/source/imports/imports-impl-only-use.rs4
-rw-r--r--src/tools/rustfmt/tests/source/imports/imports-reorder-lines-and-items.rs7
-rw-r--r--src/tools/rustfmt/tests/source/imports/imports-reorder-lines.rs32
-rw-r--r--src/tools/rustfmt/tests/source/imports/imports-reorder.rs5
-rw-r--r--src/tools/rustfmt/tests/source/imports/imports.rs107
-rw-r--r--src/tools/rustfmt/tests/source/imports/imports_block_indent.rs2
-rw-r--r--src/tools/rustfmt/tests/source/imports/imports_granularity_crate.rs65
-rw-r--r--src/tools/rustfmt/tests/source/imports/imports_granularity_default-with-dups.rs6
-rw-r--r--src/tools/rustfmt/tests/source/imports/imports_granularity_item-with-dups-StdExternalCrate-no-reorder.rs13
-rw-r--r--src/tools/rustfmt/tests/source/imports/imports_granularity_item-with-dups.rs11
-rw-r--r--src/tools/rustfmt/tests/source/imports/imports_granularity_item.rs34
-rw-r--r--src/tools/rustfmt/tests/source/imports/imports_granularity_module.rs47
-rw-r--r--src/tools/rustfmt/tests/source/imports_granularity_one.rs88
-rw-r--r--src/tools/rustfmt/tests/source/imports_raw_identifiers/version_One.rs5
-rw-r--r--src/tools/rustfmt/tests/source/imports_raw_identifiers/version_Two.rs5
-rw-r--r--src/tools/rustfmt/tests/source/invalid-rust-code-in-doc-comment.rs20
-rw-r--r--src/tools/rustfmt/tests/source/issue-1021.rs22
-rw-r--r--src/tools/rustfmt/tests/source/issue-1049.rs18
-rw-r--r--src/tools/rustfmt/tests/source/issue-1111.rs1
-rw-r--r--src/tools/rustfmt/tests/source/issue-1120.rs9
-rw-r--r--src/tools/rustfmt/tests/source/issue-1124.rs15
-rw-r--r--src/tools/rustfmt/tests/source/issue-1127.rs23
-rw-r--r--src/tools/rustfmt/tests/source/issue-1158.rs3
-rw-r--r--src/tools/rustfmt/tests/source/issue-1177.rs7
-rw-r--r--src/tools/rustfmt/tests/source/issue-1192.rs3
-rw-r--r--src/tools/rustfmt/tests/source/issue-1210/a.rs12
-rw-r--r--src/tools/rustfmt/tests/source/issue-1210/b.rs12
-rw-r--r--src/tools/rustfmt/tests/source/issue-1210/c.rs5
-rw-r--r--src/tools/rustfmt/tests/source/issue-1210/d.rs4
-rw-r--r--src/tools/rustfmt/tests/source/issue-1210/e.rs8
-rw-r--r--src/tools/rustfmt/tests/source/issue-1211.rs15
-rw-r--r--src/tools/rustfmt/tests/source/issue-1216.rs5
-rw-r--r--src/tools/rustfmt/tests/source/issue-1239.rs9
-rw-r--r--src/tools/rustfmt/tests/source/issue-1278.rs9
-rw-r--r--src/tools/rustfmt/tests/source/issue-1350.rs16
-rw-r--r--src/tools/rustfmt/tests/source/issue-1366.rs12
-rw-r--r--src/tools/rustfmt/tests/source/issue-1468.rs27
-rw-r--r--src/tools/rustfmt/tests/source/issue-1693.rs3
-rw-r--r--src/tools/rustfmt/tests/source/issue-1800.rs3
-rw-r--r--src/tools/rustfmt/tests/source/issue-1914.rs6
-rw-r--r--src/tools/rustfmt/tests/source/issue-2025.rs8
-rw-r--r--src/tools/rustfmt/tests/source/issue-2111.rs26
-rw-r--r--src/tools/rustfmt/tests/source/issue-2164.rs4
-rw-r--r--src/tools/rustfmt/tests/source/issue-2179/one.rs36
-rw-r--r--src/tools/rustfmt/tests/source/issue-2179/two.rs36
-rw-r--r--src/tools/rustfmt/tests/source/issue-2256.rs12
-rw-r--r--src/tools/rustfmt/tests/source/issue-2342.rs5
-rw-r--r--src/tools/rustfmt/tests/source/issue-2445.rs21
-rw-r--r--src/tools/rustfmt/tests/source/issue-2446.rs11
-rw-r--r--src/tools/rustfmt/tests/source/issue-2479.rs2
-rw-r--r--src/tools/rustfmt/tests/source/issue-2482/a.rs9
-rw-r--r--src/tools/rustfmt/tests/source/issue-2482/b.rs1
-rw-r--r--src/tools/rustfmt/tests/source/issue-2482/c.rs1
-rw-r--r--src/tools/rustfmt/tests/source/issue-2496.rs16
-rw-r--r--src/tools/rustfmt/tests/source/issue-2520.rs13
-rw-r--r--src/tools/rustfmt/tests/source/issue-2523.rs18
-rw-r--r--src/tools/rustfmt/tests/source/issue-2582.rs1
-rw-r--r--src/tools/rustfmt/tests/source/issue-2641.rs3
-rw-r--r--src/tools/rustfmt/tests/source/issue-2644.rs11
-rw-r--r--src/tools/rustfmt/tests/source/issue-2728.rs8
-rw-r--r--src/tools/rustfmt/tests/source/issue-2761.rs15
-rw-r--r--src/tools/rustfmt/tests/source/issue-2781.rs11
-rw-r--r--src/tools/rustfmt/tests/source/issue-2794.rs7
-rw-r--r--src/tools/rustfmt/tests/source/issue-2835.rs7
-rw-r--r--src/tools/rustfmt/tests/source/issue-2863.rs25
-rw-r--r--src/tools/rustfmt/tests/source/issue-2869.rs41
-rw-r--r--src/tools/rustfmt/tests/source/issue-2896.rs161
-rw-r--r--src/tools/rustfmt/tests/source/issue-2916.rs2
-rw-r--r--src/tools/rustfmt/tests/source/issue-2917/packed_simd.rs63
-rw-r--r--src/tools/rustfmt/tests/source/issue-2922.rs9
-rw-r--r--src/tools/rustfmt/tests/source/issue-2927-2.rs7
-rw-r--r--src/tools/rustfmt/tests/source/issue-2927.rs7
-rw-r--r--src/tools/rustfmt/tests/source/issue-2930.rs5
-rw-r--r--src/tools/rustfmt/tests/source/issue-2936.rs19
-rw-r--r--src/tools/rustfmt/tests/source/issue-2955.rs6
-rw-r--r--src/tools/rustfmt/tests/source/issue-2973.rs158
-rw-r--r--src/tools/rustfmt/tests/source/issue-2977/impl.rs44
-rw-r--r--src/tools/rustfmt/tests/source/issue-2977/trait.rs44
-rw-r--r--src/tools/rustfmt/tests/source/issue-2985.rs35
-rw-r--r--src/tools/rustfmt/tests/source/issue-2995.rs7
-rw-r--r--src/tools/rustfmt/tests/source/issue-3029.rs94
-rw-r--r--src/tools/rustfmt/tests/source/issue-3038.rs20
-rw-r--r--src/tools/rustfmt/tests/source/issue-3049.rs45
-rw-r--r--src/tools/rustfmt/tests/source/issue-3055/original.rs43
-rw-r--r--src/tools/rustfmt/tests/source/issue-3059.rs7
-rw-r--r--src/tools/rustfmt/tests/source/issue-3066.rs7
-rw-r--r--src/tools/rustfmt/tests/source/issue-3131.rs8
-rw-r--r--src/tools/rustfmt/tests/source/issue-3153.rs9
-rw-r--r--src/tools/rustfmt/tests/source/issue-3158.rs74
-rw-r--r--src/tools/rustfmt/tests/source/issue-3194.rs13
-rw-r--r--src/tools/rustfmt/tests/source/issue-3198.rs99
-rw-r--r--src/tools/rustfmt/tests/source/issue-3213/version_one.rs9
-rw-r--r--src/tools/rustfmt/tests/source/issue-3213/version_two.rs9
-rw-r--r--src/tools/rustfmt/tests/source/issue-3217.rs8
-rw-r--r--src/tools/rustfmt/tests/source/issue-3227/two.rs13
-rw-r--r--src/tools/rustfmt/tests/source/issue-3234.rs14
-rw-r--r--src/tools/rustfmt/tests/source/issue-3241.rs11
-rw-r--r--src/tools/rustfmt/tests/source/issue-3253/bar.rs4
-rw-r--r--src/tools/rustfmt/tests/source/issue-3253/foo.rs6
-rw-r--r--src/tools/rustfmt/tests/source/issue-3253/lib.rs14
-rw-r--r--src/tools/rustfmt/tests/source/issue-3253/paths/bar_foo.rs3
-rw-r--r--src/tools/rustfmt/tests/source/issue-3253/paths/excluded.rs6
-rw-r--r--src/tools/rustfmt/tests/source/issue-3253/paths/foo_bar.rs4
-rw-r--r--src/tools/rustfmt/tests/source/issue-3265.rs14
-rw-r--r--src/tools/rustfmt/tests/source/issue-3270/one.rs12
-rw-r--r--src/tools/rustfmt/tests/source/issue-3270/two.rs12
-rw-r--r--src/tools/rustfmt/tests/source/issue-3272/v1.rs15
-rw-r--r--src/tools/rustfmt/tests/source/issue-3272/v2.rs15
-rw-r--r--src/tools/rustfmt/tests/source/issue-3278/version_one.rs8
-rw-r--r--src/tools/rustfmt/tests/source/issue-3278/version_two.rs8
-rw-r--r--src/tools/rustfmt/tests/source/issue-3295/two.rs13
-rw-r--r--src/tools/rustfmt/tests/source/issue-3302.rs43
-rw-r--r--src/tools/rustfmt/tests/source/issue-3343.rs47
-rw-r--r--src/tools/rustfmt/tests/source/issue-3423.rs5
-rw-r--r--src/tools/rustfmt/tests/source/issue-3434/lib.rs57
-rw-r--r--src/tools/rustfmt/tests/source/issue-3434/no_entry.rs18
-rw-r--r--src/tools/rustfmt/tests/source/issue-3434/not_skip_macro.rs8
-rw-r--r--src/tools/rustfmt/tests/source/issue-3465.rs42
-rw-r--r--src/tools/rustfmt/tests/source/issue-3494/crlf.rs8
-rw-r--r--src/tools/rustfmt/tests/source/issue-3494/lf.rs8
-rw-r--r--src/tools/rustfmt/tests/source/issue-3508.rs29
-rw-r--r--src/tools/rustfmt/tests/source/issue-3515.rs6
-rw-r--r--src/tools/rustfmt/tests/source/issue-3532.rs7
-rw-r--r--src/tools/rustfmt/tests/source/issue-3585/extern_crate.rs12
-rw-r--r--src/tools/rustfmt/tests/source/issue-3585/reorder_imports_disabled.rs12
-rw-r--r--src/tools/rustfmt/tests/source/issue-3585/reorder_imports_enabled.rs12
-rw-r--r--src/tools/rustfmt/tests/source/issue-3585/use.rs7
-rw-r--r--src/tools/rustfmt/tests/source/issue-3636.rs10
-rw-r--r--src/tools/rustfmt/tests/source/issue-3639.rs5
-rw-r--r--src/tools/rustfmt/tests/source/issue-3651.rs4
-rw-r--r--src/tools/rustfmt/tests/source/issue-3665/lib.rs33
-rw-r--r--src/tools/rustfmt/tests/source/issue-3665/not_skip_attribute.rs4
-rw-r--r--src/tools/rustfmt/tests/source/issue-3665/sub_mod.rs14
-rw-r--r--src/tools/rustfmt/tests/source/issue-3672.rs4
-rw-r--r--src/tools/rustfmt/tests/source/issue-3675.rs5
-rw-r--r--src/tools/rustfmt/tests/source/issue-3701/one.rs12
-rw-r--r--src/tools/rustfmt/tests/source/issue-3701/two.rs12
-rw-r--r--src/tools/rustfmt/tests/source/issue-3709.rs10
-rw-r--r--src/tools/rustfmt/tests/source/issue-3740.rs10
-rw-r--r--src/tools/rustfmt/tests/source/issue-3750.rs16
-rw-r--r--src/tools/rustfmt/tests/source/issue-3751.rs10
-rw-r--r--src/tools/rustfmt/tests/source/issue-3779/ice.rs3
-rw-r--r--src/tools/rustfmt/tests/source/issue-3779/lib.rs9
-rw-r--r--src/tools/rustfmt/tests/source/issue-3786.rs12
-rw-r--r--src/tools/rustfmt/tests/source/issue-3787.rs9
-rw-r--r--src/tools/rustfmt/tests/source/issue-3840/version-one_hard-tabs.rs15
-rw-r--r--src/tools/rustfmt/tests/source/issue-3840/version-one_soft-tabs.rs13
-rw-r--r--src/tools/rustfmt/tests/source/issue-3840/version-two_hard-tabs.rs16
-rw-r--r--src/tools/rustfmt/tests/source/issue-3840/version-two_soft-tabs.rs15
-rw-r--r--src/tools/rustfmt/tests/source/issue-4018.rs13
-rw-r--r--src/tools/rustfmt/tests/source/issue-4036/one.rs11
-rw-r--r--src/tools/rustfmt/tests/source/issue-4036/three.rs12
-rw-r--r--src/tools/rustfmt/tests/source/issue-4036/two.rs11
-rw-r--r--src/tools/rustfmt/tests/source/issue-4079.rs8
-rw-r--r--src/tools/rustfmt/tests/source/issue-4120.rs85
-rw-r--r--src/tools/rustfmt/tests/source/issue-4243.rs21
-rw-r--r--src/tools/rustfmt/tests/source/issue-4244.rs16
-rw-r--r--src/tools/rustfmt/tests/source/issue-4245.rs26
-rw-r--r--src/tools/rustfmt/tests/source/issue-4312.rs22
-rw-r--r--src/tools/rustfmt/tests/source/issue-4382.rs4
-rw-r--r--src/tools/rustfmt/tests/source/issue-4398.rs19
-rw-r--r--src/tools/rustfmt/tests/source/issue-4427.rs31
-rw-r--r--src/tools/rustfmt/tests/source/issue-447.rs39
-rw-r--r--src/tools/rustfmt/tests/source/issue-4530.rs4
-rw-r--r--src/tools/rustfmt/tests/source/issue-4577.rs20
-rw-r--r--src/tools/rustfmt/tests/source/issue-4603.rs47
-rw-r--r--src/tools/rustfmt/tests/source/issue-4615/minimum_example.rs4
-rw-r--r--src/tools/rustfmt/tests/source/issue-4646.rs20
-rw-r--r--src/tools/rustfmt/tests/source/issue-4656/format_me_please.rs2
-rw-r--r--src/tools/rustfmt/tests/source/issue-4656/lib.rs7
-rw-r--r--src/tools/rustfmt/tests/source/issue-4656/lib2.rs3
-rw-r--r--src/tools/rustfmt/tests/source/issue-4791/buggy.rs14
-rw-r--r--src/tools/rustfmt/tests/source/issue-4791/trailing_comma.rs14
-rw-r--r--src/tools/rustfmt/tests/source/issue-4816/lib.rs10
-rw-r--r--src/tools/rustfmt/tests/source/issue-4926/deeply_nested_struct.rs35
-rw-r--r--src/tools/rustfmt/tests/source/issue-4926/deeply_nested_struct_with_long_field_names.rs43
-rw-r--r--src/tools/rustfmt/tests/source/issue-4926/deeply_nested_struct_with_many_fields.rs44
-rw-r--r--src/tools/rustfmt/tests/source/issue-4926/enum_struct_field.rs35
-rw-r--r--src/tools/rustfmt/tests/source/issue-4926/minimum_example.rs10
-rw-r--r--src/tools/rustfmt/tests/source/issue-4926/struct_with_long_field_names.rs21
-rw-r--r--src/tools/rustfmt/tests/source/issue-4926/struct_with_many_fields.rs21
-rw-r--r--src/tools/rustfmt/tests/source/issue-4984/minimum_example.rs2
-rw-r--r--src/tools/rustfmt/tests/source/issue-4984/multi_line_derive.rs20
-rw-r--r--src/tools/rustfmt/tests/source/issue-4984/multiple_comments_within.rs8
-rw-r--r--src/tools/rustfmt/tests/source/issue-5011.rs12
-rw-r--r--src/tools/rustfmt/tests/source/issue-5023.rs22
-rw-r--r--src/tools/rustfmt/tests/source/issue-5030.rs22
-rw-r--r--src/tools/rustfmt/tests/source/issue-5042/multi-line_comment_with_trailing_comma.rs24
-rw-r--r--src/tools/rustfmt/tests/source/issue-5042/multi-line_comment_without_trailing_comma.rs24
-rw-r--r--src/tools/rustfmt/tests/source/issue-5042/single-line_comment_with_trailing_comma.rs9
-rw-r--r--src/tools/rustfmt/tests/source/issue-5042/single-line_comment_without_trailing_comma.rs10
-rw-r--r--src/tools/rustfmt/tests/source/issue-5088/deeply_nested_long_comment_wrap_comments_true.rs33
-rw-r--r--src/tools/rustfmt/tests/source/issue-5088/start_with_empty_comment_very_long_itemized_block_wrap_comments_true.rs19
-rw-r--r--src/tools/rustfmt/tests/source/issue-5088/very_long_comment_wrap_comments_true.rs13
-rw-r--r--src/tools/rustfmt/tests/source/issue-510.rs37
-rw-r--r--src/tools/rustfmt/tests/source/issue-5157/indented_itemized_markdown_blockquote.rs4
-rw-r--r--src/tools/rustfmt/tests/source/issue-5157/nested_itemized_markdown_blockquote.rs10
-rw-r--r--src/tools/rustfmt/tests/source/issue-5157/support_itemized_markdown_blockquote.rs4
-rw-r--r--src/tools/rustfmt/tests/source/issue-5238/markdown_header_wrap_comments_false.rs11
-rw-r--r--src/tools/rustfmt/tests/source/issue-5238/markdown_header_wrap_comments_true.rs11
-rw-r--r--src/tools/rustfmt/tests/source/issue-5260.rs14
-rw-r--r--src/tools/rustfmt/tests/source/issue-5270/merge_derives_true.rs62
-rw-r--r--src/tools/rustfmt/tests/source/issue-539.rs5
-rw-r--r--src/tools/rustfmt/tests/source/issue-683.rs5
-rw-r--r--src/tools/rustfmt/tests/source/issue-811.rs19
-rw-r--r--src/tools/rustfmt/tests/source/issue-850.rs1
-rw-r--r--src/tools/rustfmt/tests/source/issue-855.rs20
-rw-r--r--src/tools/rustfmt/tests/source/issue-913.rs20
-rw-r--r--src/tools/rustfmt/tests/source/issue-945.rs5
-rw-r--r--src/tools/rustfmt/tests/source/issue-977.rs7
-rw-r--r--src/tools/rustfmt/tests/source/issue_3839.rs8
-rw-r--r--src/tools/rustfmt/tests/source/issue_3844.rs3
-rw-r--r--src/tools/rustfmt/tests/source/issue_3853.rs52
-rw-r--r--src/tools/rustfmt/tests/source/issue_3868.rs13
-rw-r--r--src/tools/rustfmt/tests/source/issue_4032.rs4
-rw-r--r--src/tools/rustfmt/tests/source/issue_4057.rs15
-rw-r--r--src/tools/rustfmt/tests/source/issue_4086.rs2
-rw-r--r--src/tools/rustfmt/tests/source/issue_4257.rs13
-rw-r--r--src/tools/rustfmt/tests/source/issue_4322.rs3
-rw-r--r--src/tools/rustfmt/tests/source/issue_4374.rs13
-rw-r--r--src/tools/rustfmt/tests/source/issue_4475.rs27
-rw-r--r--src/tools/rustfmt/tests/source/issue_4528.rs8
-rw-r--r--src/tools/rustfmt/tests/source/issue_4579.rs15
-rw-r--r--src/tools/rustfmt/tests/source/issue_4584.rs19
-rw-r--r--src/tools/rustfmt/tests/source/issue_4636.rs13
-rw-r--r--src/tools/rustfmt/tests/source/issue_4675.rs8
-rw-r--r--src/tools/rustfmt/tests/source/issue_4823.rs5
-rw-r--r--src/tools/rustfmt/tests/source/issue_4854.rs113
-rw-r--r--src/tools/rustfmt/tests/source/issue_4911.rs6
-rw-r--r--src/tools/rustfmt/tests/source/issue_4943.rs9
-rw-r--r--src/tools/rustfmt/tests/source/issue_4954.rs5
-rw-r--r--src/tools/rustfmt/tests/source/issue_4963.rs5
-rw-r--r--src/tools/rustfmt/tests/source/issue_5027.rs7
-rw-r--r--src/tools/rustfmt/tests/source/issue_5086.rs2
-rw-r--r--src/tools/rustfmt/tests/source/item-brace-style-always-next-line.rs64
-rw-r--r--src/tools/rustfmt/tests/source/item-brace-style-prefer-same-line.rs29
-rw-r--r--src/tools/rustfmt/tests/source/item-brace-style-same-line-where.rs29
-rw-r--r--src/tools/rustfmt/tests/source/itemized-blocks/no_wrap.rs47
-rw-r--r--src/tools/rustfmt/tests/source/itemized-blocks/rewrite_fail.rs11
-rw-r--r--src/tools/rustfmt/tests/source/itemized-blocks/urls.rs22
-rw-r--r--src/tools/rustfmt/tests/source/itemized-blocks/wrap.rs55
-rw-r--r--src/tools/rustfmt/tests/source/label_break.rs28
-rw-r--r--src/tools/rustfmt/tests/source/large-block.rs5
-rw-r--r--src/tools/rustfmt/tests/source/large_vec.rs29
-rw-r--r--src/tools/rustfmt/tests/source/lazy_static.rs45
-rw-r--r--src/tools/rustfmt/tests/source/let_else.rs3
-rw-r--r--src/tools/rustfmt/tests/source/long-fn-1/version_one.rs21
-rw-r--r--src/tools/rustfmt/tests/source/long-fn-1/version_two.rs21
-rw-r--r--src/tools/rustfmt/tests/source/long-match-arms-brace-newline.rs15
-rw-r--r--src/tools/rustfmt/tests/source/long-use-statement-issue-3154.rs3
-rw-r--r--src/tools/rustfmt/tests/source/long_field_access.rs3
-rw-r--r--src/tools/rustfmt/tests/source/loop.rs29
-rw-r--r--src/tools/rustfmt/tests/source/macro_not_expr.rs7
-rw-r--r--src/tools/rustfmt/tests/source/macro_rules.rs301
-rw-r--r--src/tools/rustfmt/tests/source/macros.rs486
-rw-r--r--src/tools/rustfmt/tests/source/markdown-comment-with-options.rs17
-rw-r--r--src/tools/rustfmt/tests/source/markdown-comment.rs15
-rw-r--r--src/tools/rustfmt/tests/source/match-block-trailing-comma.rs22
-rw-r--r--src/tools/rustfmt/tests/source/match-flattening.rs21
-rw-r--r--src/tools/rustfmt/tests/source/match-nowrap-trailing-comma.rs15
-rw-r--r--src/tools/rustfmt/tests/source/match-nowrap.rs12
-rw-r--r--src/tools/rustfmt/tests/source/match.rs589
-rw-r--r--src/tools/rustfmt/tests/source/match_overflow_expr.rs53
-rw-r--r--src/tools/rustfmt/tests/source/max-line-length-in-chars.rs4
-rw-r--r--src/tools/rustfmt/tests/source/merge_imports_true_compat.rs4
-rw-r--r--src/tools/rustfmt/tests/source/mod-1.rs29
-rw-r--r--src/tools/rustfmt/tests/source/mod-2.rs4
-rw-r--r--src/tools/rustfmt/tests/source/mod_skip_child.rs2
-rw-r--r--src/tools/rustfmt/tests/source/multiple.rs134
-rw-r--r--src/tools/rustfmt/tests/source/negative-impl.rs7
-rw-r--r--src/tools/rustfmt/tests/source/nested-if-else.rs11
-rw-r--r--src/tools/rustfmt/tests/source/nested_skipped/mod.rs3
-rw-r--r--src/tools/rustfmt/tests/source/nestedmod/mod.rs13
-rw-r--r--src/tools/rustfmt/tests/source/nestedmod/mod2a.rs4
-rw-r--r--src/tools/rustfmt/tests/source/nestedmod/mod2b.rs3
-rw-r--r--src/tools/rustfmt/tests/source/nestedmod/mod2c.rs3
-rw-r--r--src/tools/rustfmt/tests/source/nestedmod/mymod1/mod3a.rs2
-rw-r--r--src/tools/rustfmt/tests/source/nestedmod/submod2/a.rs6
-rw-r--r--src/tools/rustfmt/tests/source/nestedmod/submod2/mod.rs5
-rw-r--r--src/tools/rustfmt/tests/source/no_arg_with_commnet.rs2
-rw-r--r--src/tools/rustfmt/tests/source/no_new_line_beginning.rs2
-rw-r--r--src/tools/rustfmt/tests/source/normalize_doc_attributes_should_not_imply_format_doc_comments.rs15
-rw-r--r--src/tools/rustfmt/tests/source/normalize_multiline_doc_attribute.rs12
-rw-r--r--src/tools/rustfmt/tests/source/one_line_if_v1.rs42
-rw-r--r--src/tools/rustfmt/tests/source/one_line_if_v2.rs42
-rw-r--r--src/tools/rustfmt/tests/source/other.rs5
-rw-r--r--src/tools/rustfmt/tests/source/paren.rs6
-rw-r--r--src/tools/rustfmt/tests/source/path_clarity/foo.rs2
-rw-r--r--src/tools/rustfmt/tests/source/path_clarity/foo/bar.rs3
-rw-r--r--src/tools/rustfmt/tests/source/paths.rs25
-rw-r--r--src/tools/rustfmt/tests/source/pattern-condense-wildcards.rs12
-rw-r--r--src/tools/rustfmt/tests/source/pattern.rs90
-rw-r--r--src/tools/rustfmt/tests/source/preserves_carriage_return_for_unix.rs2
-rw-r--r--src/tools/rustfmt/tests/source/preserves_carriage_return_for_windows.rs2
-rw-r--r--src/tools/rustfmt/tests/source/pub-restricted.rs51
-rw-r--r--src/tools/rustfmt/tests/source/remove_blank_lines.rs44
-rw-r--r--src/tools/rustfmt/tests/source/reorder-impl-items.rs15
-rw-r--r--src/tools/rustfmt/tests/source/single-line-if-else.rs49
-rw-r--r--src/tools/rustfmt/tests/source/single-line-macro/v1.rs10
-rw-r--r--src/tools/rustfmt/tests/source/single-line-macro/v2.rs10
-rw-r--r--src/tools/rustfmt/tests/source/soft-wrapping.rs15
-rw-r--r--src/tools/rustfmt/tests/source/space-not-before-newline.rs8
-rw-r--r--src/tools/rustfmt/tests/source/spaces-around-ranges.rs15
-rw-r--r--src/tools/rustfmt/tests/source/statements.rs43
-rw-r--r--src/tools/rustfmt/tests/source/static.rs23
-rw-r--r--src/tools/rustfmt/tests/source/string-lit-2.rs25
-rw-r--r--src/tools/rustfmt/tests/source/string-lit.rs61
-rw-r--r--src/tools/rustfmt/tests/source/string_punctuation.rs9
-rw-r--r--src/tools/rustfmt/tests/source/struct-field-attributes.rs52
-rw-r--r--src/tools/rustfmt/tests/source/struct_field_doc_comment.rs72
-rw-r--r--src/tools/rustfmt/tests/source/struct_lits.rs143
-rw-r--r--src/tools/rustfmt/tests/source/struct_lits_multiline.rs81
-rw-r--r--src/tools/rustfmt/tests/source/struct_lits_visual.rs46
-rw-r--r--src/tools/rustfmt/tests/source/struct_lits_visual_multiline.rs44
-rw-r--r--src/tools/rustfmt/tests/source/struct_tuple_visual.rs36
-rw-r--r--src/tools/rustfmt/tests/source/structs.rs298
-rw-r--r--src/tools/rustfmt/tests/source/trailing-comma-never.rs45
-rw-r--r--src/tools/rustfmt/tests/source/trailing_commas.rs47
-rw-r--r--src/tools/rustfmt/tests/source/trailing_comments/hard_tabs.rs21
-rw-r--r--src/tools/rustfmt/tests/source/trailing_comments/soft_tabs.rs21
-rw-r--r--src/tools/rustfmt/tests/source/trait.rs183
-rw-r--r--src/tools/rustfmt/tests/source/try-conversion.rs18
-rw-r--r--src/tools/rustfmt/tests/source/try_block.rs30
-rw-r--r--src/tools/rustfmt/tests/source/tuple.rs63
-rw-r--r--src/tools/rustfmt/tests/source/tuple_v2.rs5
-rw-r--r--src/tools/rustfmt/tests/source/type-ascription.rs10
-rw-r--r--src/tools/rustfmt/tests/source/type.rs168
-rw-r--r--src/tools/rustfmt/tests/source/type_alias.rs34
-rw-r--r--src/tools/rustfmt/tests/source/unicode.rs33
-rw-r--r--src/tools/rustfmt/tests/source/unions.rs195
-rw-r--r--src/tools/rustfmt/tests/source/unsafe-mod.rs7
-rw-r--r--src/tools/rustfmt/tests/source/visibility.rs8
-rw-r--r--src/tools/rustfmt/tests/source/visual-fn-type.rs10
-rw-r--r--src/tools/rustfmt/tests/source/where-clause-rfc.rs73
-rw-r--r--src/tools/rustfmt/tests/source/where-clause.rs58
-rw-r--r--src/tools/rustfmt/tests/source/width-heuristics.rs28
-rw-r--r--src/tools/rustfmt/tests/source/wrap_comments_should_not_imply_format_doc_comments.rs16
-rw-r--r--src/tools/rustfmt/tests/target/5131_crate.rs9
-rw-r--r--src/tools/rustfmt/tests/target/5131_module.rs32
-rw-r--r--src/tools/rustfmt/tests/target/5131_one.rs12
-rw-r--r--src/tools/rustfmt/tests/target/alignment_2633/block_style.rs10
-rw-r--r--src/tools/rustfmt/tests/target/alignment_2633/horizontal_tactic.rs13
-rw-r--r--src/tools/rustfmt/tests/target/alignment_2633/visual_style.rs9
-rw-r--r--src/tools/rustfmt/tests/target/array_comment.rs18
-rw-r--r--src/tools/rustfmt/tests/target/assignment.rs39
-rw-r--r--src/tools/rustfmt/tests/target/associated-items.rs3
-rw-r--r--src/tools/rustfmt/tests/target/associated-types-bounds-wrapping.rs6
-rw-r--r--src/tools/rustfmt/tests/target/associated_type_bounds.rs13
-rw-r--r--src/tools/rustfmt/tests/target/associated_type_defaults.rs4
-rw-r--r--src/tools/rustfmt/tests/target/async_block.rs35
-rw-r--r--src/tools/rustfmt/tests/target/async_closure.rs22
-rw-r--r--src/tools/rustfmt/tests/target/async_fn.rs24
-rw-r--r--src/tools/rustfmt/tests/target/attrib-block-expr.rs58
-rw-r--r--src/tools/rustfmt/tests/target/attrib-extern-crate.rs17
-rw-r--r--src/tools/rustfmt/tests/target/attrib.rs271
-rw-r--r--src/tools/rustfmt/tests/target/big-impl-block.rs82
-rw-r--r--src/tools/rustfmt/tests/target/big-impl-visual.rs65
-rw-r--r--src/tools/rustfmt/tests/target/binary-expr.rs16
-rw-r--r--src/tools/rustfmt/tests/target/binop-separator-back/bitwise.rs18
-rw-r--r--src/tools/rustfmt/tests/target/binop-separator-back/comp.rs33
-rw-r--r--src/tools/rustfmt/tests/target/binop-separator-back/logic.rs10
-rw-r--r--src/tools/rustfmt/tests/target/binop-separator-back/math.rs23
-rw-r--r--src/tools/rustfmt/tests/target/binop-separator-back/patterns.rs11
-rw-r--r--src/tools/rustfmt/tests/target/binop-separator-back/range.rs9
-rw-r--r--src/tools/rustfmt/tests/target/break-and-continue.rs23
-rw-r--r--src/tools/rustfmt/tests/target/catch.rs22
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/arch/aarch64.rs98
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/arch/arm.rs47
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/arch/mips.rs30
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/arch/mips64.rs30
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/arch/powerpc.rs42
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/arch/powerpc64.rs42
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/arch/x86.rs333
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/bit.rs9
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/cache.rs164
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/error_macros.rs150
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/mod.rs85
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/os/aarch64.rs88
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/os/freebsd/aarch64.rs28
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/os/freebsd/arm.rs27
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/os/freebsd/auxvec.rs94
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/os/freebsd/mod.rs22
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/os/freebsd/powerpc.rs27
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/aarch64.rs160
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/arm.rs52
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/auxvec.rs304
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/cpuinfo.rs300
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/mips.rs31
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/mod.rs28
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/powerpc.rs41
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/os/other.rs9
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/detect/os/x86.rs367
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/lib.rs49
-rw-r--r--src/tools/rustfmt/tests/target/cfg_if/mod.rs5
-rw-r--r--src/tools/rustfmt/tests/target/cfg_mod/bar.rs3
-rw-r--r--src/tools/rustfmt/tests/target/cfg_mod/dir/dir1/dir2/wasm32.rs3
-rw-r--r--src/tools/rustfmt/tests/target/cfg_mod/dir/dir1/dir3/wasm32.rs3
-rw-r--r--src/tools/rustfmt/tests/target/cfg_mod/foo.rs3
-rw-r--r--src/tools/rustfmt/tests/target/cfg_mod/mod.rs10
-rw-r--r--src/tools/rustfmt/tests/target/cfg_mod/other.rs3
-rw-r--r--src/tools/rustfmt/tests/target/cfg_mod/wasm32.rs3
-rw-r--r--src/tools/rustfmt/tests/target/chains-visual.rs158
-rw-r--r--src/tools/rustfmt/tests/target/chains.rs306
-rw-r--r--src/tools/rustfmt/tests/target/chains_with_comment.rs137
-rw-r--r--src/tools/rustfmt/tests/target/closure-block-inside-macro.rs9
-rw-r--r--src/tools/rustfmt/tests/target/closure.rs256
-rw-r--r--src/tools/rustfmt/tests/target/comment-inside-const.rs9
-rw-r--r--src/tools/rustfmt/tests/target/comment-not-disappear.rs38
-rw-r--r--src/tools/rustfmt/tests/target/comment.rs93
-rw-r--r--src/tools/rustfmt/tests/target/comment2.rs5
-rw-r--r--src/tools/rustfmt/tests/target/comment3.rs6
-rw-r--r--src/tools/rustfmt/tests/target/comment4.rs51
-rw-r--r--src/tools/rustfmt/tests/target/comment5.rs16
-rw-r--r--src/tools/rustfmt/tests/target/comment6.rs14
-rw-r--r--src/tools/rustfmt/tests/target/comment_crlf_newline.rs4
-rw-r--r--src/tools/rustfmt/tests/target/comments-fn.rs38
-rw-r--r--src/tools/rustfmt/tests/target/comments-in-lists/format-doc-comments.rs94
-rw-r--r--src/tools/rustfmt/tests/target/comments-in-lists/wrap-comments-false.rs83
-rw-r--r--src/tools/rustfmt/tests/target/comments-in-lists/wrap-comments-not-normalized.rs142
-rw-r--r--src/tools/rustfmt/tests/target/comments-in-lists/wrap-comments-true.rs143
-rw-r--r--src/tools/rustfmt/tests/target/configs/blank_lines_lower_bound/1.rs16
-rw-r--r--src/tools/rustfmt/tests/target/configs/brace_style/fn_always_next_line.rs19
-rw-r--r--src/tools/rustfmt/tests/target/configs/brace_style/fn_prefer_same_line.rs16
-rw-r--r--src/tools/rustfmt/tests/target/configs/brace_style/fn_same_line_where.rs17
-rw-r--r--src/tools/rustfmt/tests/target/configs/brace_style/item_always_next_line.rs25
-rw-r--r--src/tools/rustfmt/tests/target/configs/brace_style/item_prefer_same_line.rs18
-rw-r--r--src/tools/rustfmt/tests/target/configs/brace_style/item_same_line_where.rs19
-rw-r--r--src/tools/rustfmt/tests/target/configs/chain_width/always.rs29
-rw-r--r--src/tools/rustfmt/tests/target/configs/chain_width/small.rs32
-rw-r--r--src/tools/rustfmt/tests/target/configs/chain_width/tiny.rs26
-rw-r--r--src/tools/rustfmt/tests/target/configs/combine_control_expr/false.rs134
-rw-r--r--src/tools/rustfmt/tests/target/configs/combine_control_expr/true.rs122
-rw-r--r--src/tools/rustfmt/tests/target/configs/comment_width/above.rs8
-rw-r--r--src/tools/rustfmt/tests/target/configs/comment_width/below.rs7
-rw-r--r--src/tools/rustfmt/tests/target/configs/comment_width/ignore.rs7
-rw-r--r--src/tools/rustfmt/tests/target/configs/condense_wildcard_suffixes/false.rs6
-rw-r--r--src/tools/rustfmt/tests/target/configs/condense_wildcard_suffixes/true.rs6
-rw-r--r--src/tools/rustfmt/tests/target/configs/control_brace_style/always_next_line.rs18
-rw-r--r--src/tools/rustfmt/tests/target/configs/control_brace_style/always_same_line.rs14
-rw-r--r--src/tools/rustfmt/tests/target/configs/control_brace_style/closing_next_line.rs15
-rw-r--r--src/tools/rustfmt/tests/target/configs/disable_all_formatting/false.rs10
-rw-r--r--src/tools/rustfmt/tests/target/configs/disable_all_formatting/true.rs6
-rw-r--r--src/tools/rustfmt/tests/target/configs/doc_comment_code_block_width/100.rs16
-rw-r--r--src/tools/rustfmt/tests/target/configs/doc_comment_code_block_width/100_greater_max_width.rs29
-rw-r--r--src/tools/rustfmt/tests/target/configs/doc_comment_code_block_width/50.rs22
-rw-r--r--src/tools/rustfmt/tests/target/configs/empty_item_single_line/false.rs14
-rw-r--r--src/tools/rustfmt/tests/target/configs/empty_item_single_line/true.rs10
-rw-r--r--src/tools/rustfmt/tests/target/configs/enum_discrim_align_threshold/40.rs34
-rw-r--r--src/tools/rustfmt/tests/target/configs/error_on_line_overflow/false.rs6
-rw-r--r--src/tools/rustfmt/tests/target/configs/error_on_unformatted/false.rs12
-rw-r--r--src/tools/rustfmt/tests/target/configs/fn_args_layout/compressed.rs22
-rw-r--r--src/tools/rustfmt/tests/target/configs/fn_args_layout/tall.rs32
-rw-r--r--src/tools/rustfmt/tests/target/configs/fn_args_layout/vertical.rs42
-rw-r--r--src/tools/rustfmt/tests/target/configs/fn_single_line/false.rs11
-rw-r--r--src/tools/rustfmt/tests/target/configs/fn_single_line/true.rs9
-rw-r--r--src/tools/rustfmt/tests/target/configs/force_explicit_abi/false.rs6
-rw-r--r--src/tools/rustfmt/tests/target/configs/force_explicit_abi/true.rs6
-rw-r--r--src/tools/rustfmt/tests/target/configs/force_multiline_block/false.rs20
-rw-r--r--src/tools/rustfmt/tests/target/configs/force_multiline_block/true.rs22
-rw-r--r--src/tools/rustfmt/tests/target/configs/format_generated_files/false.rs8
-rw-r--r--src/tools/rustfmt/tests/target/configs/format_generated_files/true.rs6
-rw-r--r--src/tools/rustfmt/tests/target/configs/format_macro_bodies/false.rs6
-rw-r--r--src/tools/rustfmt/tests/target/configs/format_macro_bodies/true.rs10
-rw-r--r--src/tools/rustfmt/tests/target/configs/format_macro_matchers/false.rs10
-rw-r--r--src/tools/rustfmt/tests/target/configs/format_macro_matchers/true.rs10
-rw-r--r--src/tools/rustfmt/tests/target/configs/format_strings/false.rs8
-rw-r--r--src/tools/rustfmt/tests/target/configs/format_strings/true.rs9
-rw-r--r--src/tools/rustfmt/tests/target/configs/group_imports/One-merge_imports.rs14
-rw-r--r--src/tools/rustfmt/tests/target/configs/group_imports/One-nested.rs6
-rw-r--r--src/tools/rustfmt/tests/target/configs/group_imports/One-no_reorder.rs12
-rw-r--r--src/tools/rustfmt/tests/target/configs/group_imports/One.rs11
-rw-r--r--src/tools/rustfmt/tests/target/configs/group_imports/StdExternalCrate-merge_imports.rs16
-rw-r--r--src/tools/rustfmt/tests/target/configs/group_imports/StdExternalCrate-nested.rs7
-rw-r--r--src/tools/rustfmt/tests/target/configs/group_imports/StdExternalCrate-no_reorder.rs15
-rw-r--r--src/tools/rustfmt/tests/target/configs/group_imports/StdExternalCrate-non_consecutive.rs18
-rw-r--r--src/tools/rustfmt/tests/target/configs/group_imports/StdExternalCrate.rs13
-rw-r--r--src/tools/rustfmt/tests/target/configs/hard_tabs/false.rs6
-rw-r--r--src/tools/rustfmt/tests/target/configs/hard_tabs/true.rs6
-rw-r--r--src/tools/rustfmt/tests/target/configs/imports_indent/block.rs7
-rw-r--r--src/tools/rustfmt/tests/target/configs/imports_layout/horizontal_vertical.rs18
-rw-r--r--src/tools/rustfmt/tests/target/configs/imports_layout/merge_mixed.rs5
-rw-r--r--src/tools/rustfmt/tests/target/configs/imports_layout/mixed.rs9
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/block_args.rs47
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/block_array.rs14
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/block_call.rs151
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/block_chain.rs12
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/block_generic.rs22
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/block_struct_lit.rs9
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/block_tab_spaces_call.rs14
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/block_trailing_comma_call/one.rs12
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/block_trailing_comma_call/two.rs14
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/block_where_pred.rs12
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/default.rs11
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/rfc_control.rs39
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/rfc_where.rs12
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/visual_args.rs40
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/visual_array.rs12
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/visual_call.rs13
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/visual_chain.rs11
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/visual_generics.rs20
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/visual_struct_lit.rs7
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/visual_trailing_comma.rs7
-rw-r--r--src/tools/rustfmt/tests/target/configs/indent_style/visual_where_pred.rs11
-rw-r--r--src/tools/rustfmt/tests/target/configs/match_arm_blocks/false.rs12
-rw-r--r--src/tools/rustfmt/tests/target/configs/match_arm_blocks/true.rs13
-rw-r--r--src/tools/rustfmt/tests/target/configs/match_arm_leading_pipes/always.rs27
-rw-r--r--src/tools/rustfmt/tests/target/configs/match_arm_leading_pipes/never.rs27
-rw-r--r--src/tools/rustfmt/tests/target/configs/match_arm_leading_pipes/preserve.rs35
-rw-r--r--src/tools/rustfmt/tests/target/configs/match_block_trailing_comma/false.rs11
-rw-r--r--src/tools/rustfmt/tests/target/configs/match_block_trailing_comma/true.rs11
-rw-r--r--src/tools/rustfmt/tests/target/configs/merge_derives/true.rs40
-rw-r--r--src/tools/rustfmt/tests/target/configs/normalize_comments/false.rs13
-rw-r--r--src/tools/rustfmt/tests/target/configs/normalize_comments/true.rs13
-rw-r--r--src/tools/rustfmt/tests/target/configs/normalize_doc_attributes/false.rs13
-rw-r--r--src/tools/rustfmt/tests/target/configs/normalize_doc_attributes/true.rs13
-rw-r--r--src/tools/rustfmt/tests/target/configs/remove_nested_parens/remove_nested_parens.rs5
-rw-r--r--src/tools/rustfmt/tests/target/configs/reorder_impl_items/false.rs11
-rw-r--r--src/tools/rustfmt/tests/target/configs/reorder_impl_items/true.rs11
-rw-r--r--src/tools/rustfmt/tests/target/configs/reorder_imports/false.rs7
-rw-r--r--src/tools/rustfmt/tests/target/configs/reorder_imports/true.rs19
-rw-r--r--src/tools/rustfmt/tests/target/configs/reorder_modules/dolor/mod.rs1
-rw-r--r--src/tools/rustfmt/tests/target/configs/reorder_modules/false.rs7
-rw-r--r--src/tools/rustfmt/tests/target/configs/reorder_modules/ipsum/mod.rs1
-rw-r--r--src/tools/rustfmt/tests/target/configs/reorder_modules/lorem/mod.rs1
-rw-r--r--src/tools/rustfmt/tests/target/configs/reorder_modules/sit/mod.rs1
-rw-r--r--src/tools/rustfmt/tests/target/configs/reorder_modules/true.rs7
-rw-r--r--src/tools/rustfmt/tests/target/configs/short_array_element_width_threshold/10.rs11
-rw-r--r--src/tools/rustfmt/tests/target/configs/short_array_element_width_threshold/20.rs8
-rw-r--r--src/tools/rustfmt/tests/target/configs/short_array_element_width_threshold/greater_than_max_width.rs12
-rw-r--r--src/tools/rustfmt/tests/target/configs/skip_children/foo/mod.rs3
-rw-r--r--src/tools/rustfmt/tests/target/configs/skip_children/true.rs4
-rw-r--r--src/tools/rustfmt/tests/target/configs/space_before_colon/true.rs11
-rw-r--r--src/tools/rustfmt/tests/target/configs/spaces_around_ranges/false.rs34
-rw-r--r--src/tools/rustfmt/tests/target/configs/spaces_around_ranges/true.rs34
-rw-r--r--src/tools/rustfmt/tests/target/configs/struct_field_align_threshold/20.rs471
-rw-r--r--src/tools/rustfmt/tests/target/configs/struct_lit_single_line/false.rs9
-rw-r--r--src/tools/rustfmt/tests/target/configs/tab_spaces/2.rs14
-rw-r--r--src/tools/rustfmt/tests/target/configs/tab_spaces/4.rs14
-rw-r--r--src/tools/rustfmt/tests/target/configs/trailing_comma/always.rs14
-rw-r--r--src/tools/rustfmt/tests/target/configs/trailing_comma/never.rs35
-rw-r--r--src/tools/rustfmt/tests/target/configs/trailing_comma/vertical.rs14
-rw-r--r--src/tools/rustfmt/tests/target/configs/trailing_semicolon/false.rs27
-rw-r--r--src/tools/rustfmt/tests/target/configs/trailing_semicolon/true.rs27
-rw-r--r--src/tools/rustfmt/tests/target/configs/type_punctuation_density/compressed.rs41
-rw-r--r--src/tools/rustfmt/tests/target/configs/type_punctuation_density/wide.rs41
-rw-r--r--src/tools/rustfmt/tests/target/configs/use_field_init_shorthand/false.rs15
-rw-r--r--src/tools/rustfmt/tests/target/configs/use_field_init_shorthand/true.rs15
-rw-r--r--src/tools/rustfmt/tests/target/configs/use_small_heuristics/default.rs26
-rw-r--r--src/tools/rustfmt/tests/target/configs/use_small_heuristics/max.rs15
-rw-r--r--src/tools/rustfmt/tests/target/configs/use_small_heuristics/off.rs25
-rw-r--r--src/tools/rustfmt/tests/target/configs/use_try_shorthand/false.rs6
-rw-r--r--src/tools/rustfmt/tests/target/configs/use_try_shorthand/true.rs6
-rw-r--r--src/tools/rustfmt/tests/target/configs/where_single_line/true-with-brace-style.rs22
-rw-r--r--src/tools/rustfmt/tests/target/configs/where_single_line/true.rs30
-rw-r--r--src/tools/rustfmt/tests/target/configs/wrap_comments/false.rs8
-rw-r--r--src/tools/rustfmt/tests/target/configs/wrap_comments/true.rs20
-rw-r--r--src/tools/rustfmt/tests/target/const_generics.rs37
-rw-r--r--src/tools/rustfmt/tests/target/control-brace-style-always-next-line.rs50
-rw-r--r--src/tools/rustfmt/tests/target/control-brace-style-always-same-line.rs40
-rw-r--r--src/tools/rustfmt/tests/target/doc-attrib.rs131
-rw-r--r--src/tools/rustfmt/tests/target/doc-comment-with-example.rs11
-rw-r--r--src/tools/rustfmt/tests/target/doc-of-generic-item.rs14
-rw-r--r--src/tools/rustfmt/tests/target/doc.rs5
-rw-r--r--src/tools/rustfmt/tests/target/dyn_trait.rs27
-rw-r--r--src/tools/rustfmt/tests/target/else-if-brace-style-always-next-line.rs53
-rw-r--r--src/tools/rustfmt/tests/target/else-if-brace-style-always-same-line.rs43
-rw-r--r--src/tools/rustfmt/tests/target/else-if-brace-style-closing-next-line.rs49
-rw-r--r--src/tools/rustfmt/tests/target/empty-item-single-line-false.rs41
-rw-r--r--src/tools/rustfmt/tests/target/empty-tuple-no-conversion-to-unit-struct.rs12
-rw-r--r--src/tools/rustfmt/tests/target/empty_file.rs1
-rw-r--r--src/tools/rustfmt/tests/target/enum.rs289
-rw-r--r--src/tools/rustfmt/tests/target/existential_type.rs23
-rw-r--r--src/tools/rustfmt/tests/target/expr-block.rs305
-rw-r--r--src/tools/rustfmt/tests/target/expr-overflow-delimited.rs120
-rw-r--r--src/tools/rustfmt/tests/target/expr.rs671
-rw-r--r--src/tools/rustfmt/tests/target/extern.rs97
-rw-r--r--src/tools/rustfmt/tests/target/extern_not_explicit.rs18
-rw-r--r--src/tools/rustfmt/tests/target/file-lines-1.rs30
-rw-r--r--src/tools/rustfmt/tests/target/file-lines-2.rs24
-rw-r--r--src/tools/rustfmt/tests/target/file-lines-3.rs25
-rw-r--r--src/tools/rustfmt/tests/target/file-lines-4.rs30
-rw-r--r--src/tools/rustfmt/tests/target/file-lines-5.rs17
-rw-r--r--src/tools/rustfmt/tests/target/file-lines-6.rs18
-rw-r--r--src/tools/rustfmt/tests/target/file-lines-7.rs21
-rw-r--r--src/tools/rustfmt/tests/target/file-lines-item.rs21
-rw-r--r--src/tools/rustfmt/tests/target/fn-args-with-last-line-comment.rs24
-rw-r--r--src/tools/rustfmt/tests/target/fn-custom-2.rs77
-rw-r--r--src/tools/rustfmt/tests/target/fn-custom-3.rs71
-rw-r--r--src/tools/rustfmt/tests/target/fn-custom-4.rs26
-rw-r--r--src/tools/rustfmt/tests/target/fn-custom-6.rs71
-rw-r--r--src/tools/rustfmt/tests/target/fn-custom-7.rs36
-rw-r--r--src/tools/rustfmt/tests/target/fn-custom-8.rs77
-rw-r--r--src/tools/rustfmt/tests/target/fn-custom.rs19
-rw-r--r--src/tools/rustfmt/tests/target/fn-param-attributes.rs64
-rw-r--r--src/tools/rustfmt/tests/target/fn-simple.rs120
-rw-r--r--src/tools/rustfmt/tests/target/fn-single-line/version_one.rs71
-rw-r--r--src/tools/rustfmt/tests/target/fn-single-line/version_two.rs67
-rw-r--r--src/tools/rustfmt/tests/target/fn-ty.rs14
-rw-r--r--src/tools/rustfmt/tests/target/fn.rs120
-rw-r--r--src/tools/rustfmt/tests/target/fn_args_indent-block.rs143
-rw-r--r--src/tools/rustfmt/tests/target/fn_args_layout-vertical.rs39
-rw-r--r--src/tools/rustfmt/tests/target/fn_once.rs8
-rw-r--r--src/tools/rustfmt/tests/target/format_strings/issue-202.rs25
-rw-r--r--src/tools/rustfmt/tests/target/format_strings/issue-2833.rs15
-rw-r--r--src/tools/rustfmt/tests/target/format_strings/issue-3263.rs26
-rw-r--r--src/tools/rustfmt/tests/target/format_strings/issue-687.rs10
-rw-r--r--src/tools/rustfmt/tests/target/format_strings/issue564.rs7
-rw-r--r--src/tools/rustfmt/tests/target/hard-tabs.rs98
-rw-r--r--src/tools/rustfmt/tests/target/hello.rs8
-rw-r--r--src/tools/rustfmt/tests/target/hex_literal_lower.rs5
-rw-r--r--src/tools/rustfmt/tests/target/hex_literal_preserve.rs5
-rw-r--r--src/tools/rustfmt/tests/target/hex_literal_upper.rs5
-rw-r--r--src/tools/rustfmt/tests/target/if_while_or_patterns.rs38
-rw-r--r--src/tools/rustfmt/tests/target/immovable_generators.rs7
-rw-r--r--src/tools/rustfmt/tests/target/impl.rs43
-rw-r--r--src/tools/rustfmt/tests/target/impls.rs252
-rw-r--r--src/tools/rustfmt/tests/target/imports/import-fencepost-length.rs7
-rw-r--r--src/tools/rustfmt/tests/target/imports/imports-impl-only-use.rs4
-rw-r--r--src/tools/rustfmt/tests/target/imports/imports-reorder-lines-and-items.rs7
-rw-r--r--src/tools/rustfmt/tests/target/imports/imports-reorder-lines.rs31
-rw-r--r--src/tools/rustfmt/tests/target/imports/imports-reorder.rs5
-rw-r--r--src/tools/rustfmt/tests/target/imports/imports.rs129
-rw-r--r--src/tools/rustfmt/tests/target/imports/imports_2021_edition.rs3
-rw-r--r--src/tools/rustfmt/tests/target/imports/imports_block_indent.rs4
-rw-r--r--src/tools/rustfmt/tests/target/imports/imports_granularity_crate.rs59
-rw-r--r--src/tools/rustfmt/tests/target/imports/imports_granularity_default-with-dups.rs6
-rw-r--r--src/tools/rustfmt/tests/target/imports/imports_granularity_item-with-dups-StdExternalCrate-no-reorder.rs7
-rw-r--r--src/tools/rustfmt/tests/target/imports/imports_granularity_item-with-dups.rs5
-rw-r--r--src/tools/rustfmt/tests/target/imports/imports_granularity_item.rs45
-rw-r--r--src/tools/rustfmt/tests/target/imports/imports_granularity_module.rs55
-rw-r--r--src/tools/rustfmt/tests/target/imports_granularity_one.rs109
-rw-r--r--src/tools/rustfmt/tests/target/imports_raw_identifiers/version_One.rs5
-rw-r--r--src/tools/rustfmt/tests/target/imports_raw_identifiers/version_Two.rs5
-rw-r--r--src/tools/rustfmt/tests/target/indented-impl.rs13
-rw-r--r--src/tools/rustfmt/tests/target/inner-module-path/b.rs1
-rw-r--r--src/tools/rustfmt/tests/target/inner-module-path/c/d.rs1
-rw-r--r--src/tools/rustfmt/tests/target/inner-module-path/lib.rs8
-rw-r--r--src/tools/rustfmt/tests/target/invalid-rust-code-in-doc-comment.rs18
-rw-r--r--src/tools/rustfmt/tests/target/issue-1021.rs22
-rw-r--r--src/tools/rustfmt/tests/target/issue-1049.rs29
-rw-r--r--src/tools/rustfmt/tests/target/issue-1055.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue-1096.rs71
-rw-r--r--src/tools/rustfmt/tests/target/issue-1111.rs1
-rw-r--r--src/tools/rustfmt/tests/target/issue-1113.rs33
-rw-r--r--src/tools/rustfmt/tests/target/issue-1120.rs11
-rw-r--r--src/tools/rustfmt/tests/target/issue-1124.rs21
-rw-r--r--src/tools/rustfmt/tests/target/issue-1127.rs25
-rw-r--r--src/tools/rustfmt/tests/target/issue-1158.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue-1177.rs7
-rw-r--r--src/tools/rustfmt/tests/target/issue-1192.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue-1210/a.rs16
-rw-r--r--src/tools/rustfmt/tests/target/issue-1210/b.rs16
-rw-r--r--src/tools/rustfmt/tests/target/issue-1210/c.rs7
-rw-r--r--src/tools/rustfmt/tests/target/issue-1210/d.rs4
-rw-r--r--src/tools/rustfmt/tests/target/issue-1210/e.rs11
-rw-r--r--src/tools/rustfmt/tests/target/issue-1211.rs13
-rw-r--r--src/tools/rustfmt/tests/target/issue-1214.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-1216.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue-1239.rs11
-rw-r--r--src/tools/rustfmt/tests/target/issue-1247.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-1255.rs10
-rw-r--r--src/tools/rustfmt/tests/target/issue-1278.rs9
-rw-r--r--src/tools/rustfmt/tests/target/issue-1350.rs14
-rw-r--r--src/tools/rustfmt/tests/target/issue-1366.rs13
-rw-r--r--src/tools/rustfmt/tests/target/issue-1397.rs25
-rw-r--r--src/tools/rustfmt/tests/target/issue-1468.rs29
-rw-r--r--src/tools/rustfmt/tests/target/issue-1598.rs6
-rw-r--r--src/tools/rustfmt/tests/target/issue-1624.rs6
-rw-r--r--src/tools/rustfmt/tests/target/issue-1681.rs21
-rw-r--r--src/tools/rustfmt/tests/target/issue-1693.rs10
-rw-r--r--src/tools/rustfmt/tests/target/issue-1703.rs9
-rw-r--r--src/tools/rustfmt/tests/target/issue-1800.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue-1802.rs9
-rw-r--r--src/tools/rustfmt/tests/target/issue-1824.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue-1914.rs7
-rw-r--r--src/tools/rustfmt/tests/target/issue-2025.rs4
-rw-r--r--src/tools/rustfmt/tests/target/issue-2103.rs14
-rw-r--r--src/tools/rustfmt/tests/target/issue-2111.rs26
-rw-r--r--src/tools/rustfmt/tests/target/issue-2123.rs6
-rw-r--r--src/tools/rustfmt/tests/target/issue-2164.rs135
-rw-r--r--src/tools/rustfmt/tests/target/issue-2179/one.rs37
-rw-r--r--src/tools/rustfmt/tests/target/issue-2179/two.rs40
-rw-r--r--src/tools/rustfmt/tests/target/issue-2197.rs17
-rw-r--r--src/tools/rustfmt/tests/target/issue-2256.rs7
-rw-r--r--src/tools/rustfmt/tests/target/issue-2324.rs7
-rw-r--r--src/tools/rustfmt/tests/target/issue-2329.rs30
-rw-r--r--src/tools/rustfmt/tests/target/issue-2342.rs6
-rw-r--r--src/tools/rustfmt/tests/target/issue-2346.rs4
-rw-r--r--src/tools/rustfmt/tests/target/issue-2401.rs7
-rw-r--r--src/tools/rustfmt/tests/target/issue-2445.rs21
-rw-r--r--src/tools/rustfmt/tests/target/issue-2446.rs9
-rw-r--r--src/tools/rustfmt/tests/target/issue-2479.rs12
-rw-r--r--src/tools/rustfmt/tests/target/issue-2482/a.rs9
-rw-r--r--src/tools/rustfmt/tests/target/issue-2482/b.rs1
-rw-r--r--src/tools/rustfmt/tests/target/issue-2482/c.rs1
-rw-r--r--src/tools/rustfmt/tests/target/issue-2496.rs14
-rw-r--r--src/tools/rustfmt/tests/target/issue-2520.rs13
-rw-r--r--src/tools/rustfmt/tests/target/issue-2523.rs20
-rw-r--r--src/tools/rustfmt/tests/target/issue-2526.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-2551.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue-2554.rs13
-rw-r--r--src/tools/rustfmt/tests/target/issue-2582.rs1
-rw-r--r--src/tools/rustfmt/tests/target/issue-2641.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue-2644.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-2673-nonmodrs-mods/foo.rs4
-rw-r--r--src/tools/rustfmt/tests/target/issue-2673-nonmodrs-mods/foo/bar.rs1
-rw-r--r--src/tools/rustfmt/tests/target/issue-2673-nonmodrs-mods/lib.rs6
-rw-r--r--src/tools/rustfmt/tests/target/issue-2728.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-2759.rs65
-rw-r--r--src/tools/rustfmt/tests/target/issue-2761.rs15
-rw-r--r--src/tools/rustfmt/tests/target/issue-2781.rs11
-rw-r--r--src/tools/rustfmt/tests/target/issue-2794.rs12
-rw-r--r--src/tools/rustfmt/tests/target/issue-2810.rs14
-rw-r--r--src/tools/rustfmt/tests/target/issue-2835.rs4
-rw-r--r--src/tools/rustfmt/tests/target/issue-2863.rs54
-rw-r--r--src/tools/rustfmt/tests/target/issue-2869.rs41
-rw-r--r--src/tools/rustfmt/tests/target/issue-2896.rs165
-rw-r--r--src/tools/rustfmt/tests/target/issue-2916.rs2
-rw-r--r--src/tools/rustfmt/tests/target/issue-2917/minimal.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-2917/packed_simd.rs63
-rw-r--r--src/tools/rustfmt/tests/target/issue-2922.rs10
-rw-r--r--src/tools/rustfmt/tests/target/issue-2927-2.rs7
-rw-r--r--src/tools/rustfmt/tests/target/issue-2927.rs7
-rw-r--r--src/tools/rustfmt/tests/target/issue-2930.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue-2936.rs21
-rw-r--r--src/tools/rustfmt/tests/target/issue-2941.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue-2955.rs6
-rw-r--r--src/tools/rustfmt/tests/target/issue-2973.rs158
-rw-r--r--src/tools/rustfmt/tests/target/issue-2976.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue-2977/block.rs11
-rw-r--r--src/tools/rustfmt/tests/target/issue-2977/impl.rs44
-rw-r--r--src/tools/rustfmt/tests/target/issue-2977/item.rs11
-rw-r--r--src/tools/rustfmt/tests/target/issue-2977/trait.rs44
-rw-r--r--src/tools/rustfmt/tests/target/issue-2985.rs36
-rw-r--r--src/tools/rustfmt/tests/target/issue-2995.rs7
-rw-r--r--src/tools/rustfmt/tests/target/issue-3029.rs94
-rw-r--r--src/tools/rustfmt/tests/target/issue-3032.rs36
-rw-r--r--src/tools/rustfmt/tests/target/issue-3038.rs29
-rw-r--r--src/tools/rustfmt/tests/target/issue-3043.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue-3049.rs45
-rw-r--r--src/tools/rustfmt/tests/target/issue-3055/backtick.rs10
-rw-r--r--src/tools/rustfmt/tests/target/issue-3055/empty-code-block.rs18
-rw-r--r--src/tools/rustfmt/tests/target/issue-3055/original.rs42
-rw-r--r--src/tools/rustfmt/tests/target/issue-3059.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-3066.rs7
-rw-r--r--src/tools/rustfmt/tests/target/issue-3105.rs48
-rw-r--r--src/tools/rustfmt/tests/target/issue-3118.rs11
-rw-r--r--src/tools/rustfmt/tests/target/issue-3124.rs14
-rw-r--r--src/tools/rustfmt/tests/target/issue-3131.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-3132.rs15
-rw-r--r--src/tools/rustfmt/tests/target/issue-3153.rs9
-rw-r--r--src/tools/rustfmt/tests/target/issue-3158.rs74
-rw-r--r--src/tools/rustfmt/tests/target/issue-3182.rs10
-rw-r--r--src/tools/rustfmt/tests/target/issue-3184.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue-3194.rs52
-rw-r--r--src/tools/rustfmt/tests/target/issue-3198.rs67
-rw-r--r--src/tools/rustfmt/tests/target/issue-3213/version_one.rs13
-rw-r--r--src/tools/rustfmt/tests/target/issue-3213/version_two.rs13
-rw-r--r--src/tools/rustfmt/tests/target/issue-3217.rs24
-rw-r--r--src/tools/rustfmt/tests/target/issue-3224.rs11
-rw-r--r--src/tools/rustfmt/tests/target/issue-3227/one.rs13
-rw-r--r--src/tools/rustfmt/tests/target/issue-3227/two.rs15
-rw-r--r--src/tools/rustfmt/tests/target/issue-3234.rs14
-rw-r--r--src/tools/rustfmt/tests/target/issue-3241.rs11
-rw-r--r--src/tools/rustfmt/tests/target/issue-3253/bar.rs2
-rw-r--r--src/tools/rustfmt/tests/target/issue-3253/foo.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue-3253/lib.rs14
-rw-r--r--src/tools/rustfmt/tests/target/issue-3253/paths/bar_foo.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue-3253/paths/excluded.rs17
-rw-r--r--src/tools/rustfmt/tests/target/issue-3253/paths/foo_bar.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue-3265.rs14
-rw-r--r--src/tools/rustfmt/tests/target/issue-3270/one.rs12
-rw-r--r--src/tools/rustfmt/tests/target/issue-3270/two.rs12
-rw-r--r--src/tools/rustfmt/tests/target/issue-3270/wrap.rs13
-rw-r--r--src/tools/rustfmt/tests/target/issue-3272/v1.rs15
-rw-r--r--src/tools/rustfmt/tests/target/issue-3272/v2.rs17
-rw-r--r--src/tools/rustfmt/tests/target/issue-3278/version_one.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-3278/version_two.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-3295/two.rs14
-rw-r--r--src/tools/rustfmt/tests/target/issue-3302.rs43
-rw-r--r--src/tools/rustfmt/tests/target/issue-3304.rs42
-rw-r--r--src/tools/rustfmt/tests/target/issue-3314.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue-3343.rs44
-rw-r--r--src/tools/rustfmt/tests/target/issue-3423.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue-3434/lib.rs57
-rw-r--r--src/tools/rustfmt/tests/target/issue-3434/no_entry.rs19
-rw-r--r--src/tools/rustfmt/tests/target/issue-3434/not_skip_macro.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-3442.rs10
-rw-r--r--src/tools/rustfmt/tests/target/issue-3465.rs42
-rw-r--r--src/tools/rustfmt/tests/target/issue-3494/crlf.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-3494/lf.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-3499.rs1
-rw-r--r--src/tools/rustfmt/tests/target/issue-3508.rs22
-rw-r--r--src/tools/rustfmt/tests/target/issue-3515.rs6
-rw-r--r--src/tools/rustfmt/tests/target/issue-3532.rs6
-rw-r--r--src/tools/rustfmt/tests/target/issue-3539.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-3554.rs4
-rw-r--r--src/tools/rustfmt/tests/target/issue-3567.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue-3568.rs1
-rw-r--r--src/tools/rustfmt/tests/target/issue-3585/extern_crate.rs10
-rw-r--r--src/tools/rustfmt/tests/target/issue-3585/reorder_imports_disabled.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-3585/reorder_imports_enabled.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-3585/use.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue-3595.rs4
-rw-r--r--src/tools/rustfmt/tests/target/issue-3601.rs11
-rw-r--r--src/tools/rustfmt/tests/target/issue-3614/version_one.rs15
-rw-r--r--src/tools/rustfmt/tests/target/issue-3614/version_two.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-3636.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-3639.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue-3645.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue-3651.rs7
-rw-r--r--src/tools/rustfmt/tests/target/issue-3665/lib.rs31
-rw-r--r--src/tools/rustfmt/tests/target/issue-3665/not_skip_attribute.rs4
-rw-r--r--src/tools/rustfmt/tests/target/issue-3665/sub_mod.rs13
-rw-r--r--src/tools/rustfmt/tests/target/issue-3672.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue-3675.rs6
-rw-r--r--src/tools/rustfmt/tests/target/issue-3701/one.rs12
-rw-r--r--src/tools/rustfmt/tests/target/issue-3701/two.rs14
-rw-r--r--src/tools/rustfmt/tests/target/issue-3709.rs10
-rw-r--r--src/tools/rustfmt/tests/target/issue-3711.rs6
-rw-r--r--src/tools/rustfmt/tests/target/issue-3717.rs7
-rw-r--r--src/tools/rustfmt/tests/target/issue-3718.rs7
-rw-r--r--src/tools/rustfmt/tests/target/issue-3740.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-3741.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue-3750.rs15
-rw-r--r--src/tools/rustfmt/tests/target/issue-3751.rs10
-rw-r--r--src/tools/rustfmt/tests/target/issue-3759.rs27
-rw-r--r--src/tools/rustfmt/tests/target/issue-3779/ice.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue-3779/lib.rs9
-rw-r--r--src/tools/rustfmt/tests/target/issue-3786.rs9
-rw-r--r--src/tools/rustfmt/tests/target/issue-3787.rs13
-rw-r--r--src/tools/rustfmt/tests/target/issue-3815.rs4
-rw-r--r--src/tools/rustfmt/tests/target/issue-3840/version-one_hard-tabs.rs25
-rw-r--r--src/tools/rustfmt/tests/target/issue-3840/version-one_soft-tabs.rs23
-rw-r--r--src/tools/rustfmt/tests/target/issue-3840/version-two_hard-tabs.rs26
-rw-r--r--src/tools/rustfmt/tests/target/issue-3840/version-two_soft-tabs.rs25
-rw-r--r--src/tools/rustfmt/tests/target/issue-3845.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-3882.rs4
-rw-r--r--src/tools/rustfmt/tests/target/issue-3974.rs10
-rw-r--r--src/tools/rustfmt/tests/target/issue-4018.rs11
-rw-r--r--src/tools/rustfmt/tests/target/issue-4020.rs9
-rw-r--r--src/tools/rustfmt/tests/target/issue-4029.rs7
-rw-r--r--src/tools/rustfmt/tests/target/issue-4036/one.rs12
-rw-r--r--src/tools/rustfmt/tests/target/issue-4036/three.rs17
-rw-r--r--src/tools/rustfmt/tests/target/issue-4036/two.rs16
-rw-r--r--src/tools/rustfmt/tests/target/issue-4068.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue-4079.rs11
-rw-r--r--src/tools/rustfmt/tests/target/issue-4115.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-4120.rs85
-rw-r--r--src/tools/rustfmt/tests/target/issue-4152.rs18
-rw-r--r--src/tools/rustfmt/tests/target/issue-4159.rs18
-rw-r--r--src/tools/rustfmt/tests/target/issue-4243.rs28
-rw-r--r--src/tools/rustfmt/tests/target/issue-4244.rs20
-rw-r--r--src/tools/rustfmt/tests/target/issue-4245.rs34
-rw-r--r--src/tools/rustfmt/tests/target/issue-4310.rs9
-rw-r--r--src/tools/rustfmt/tests/target/issue-4312.rs22
-rw-r--r--src/tools/rustfmt/tests/target/issue-4313.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue-4382.rs10
-rw-r--r--src/tools/rustfmt/tests/target/issue-4398.rs19
-rw-r--r--src/tools/rustfmt/tests/target/issue-4427.rs30
-rw-r--r--src/tools/rustfmt/tests/target/issue-447.rs40
-rw-r--r--src/tools/rustfmt/tests/target/issue-4530.rs9
-rw-r--r--src/tools/rustfmt/tests/target/issue-4577.rs15
-rw-r--r--src/tools/rustfmt/tests/target/issue-4603.rs47
-rw-r--r--src/tools/rustfmt/tests/target/issue-4615/minimum_example.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue-4646.rs20
-rw-r--r--src/tools/rustfmt/tests/target/issue-4656/format_me_please.rs1
-rw-r--r--src/tools/rustfmt/tests/target/issue-4656/lib.rs7
-rw-r--r--src/tools/rustfmt/tests/target/issue-4656/lib2.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue-4791/buggy.rs14
-rw-r--r--src/tools/rustfmt/tests/target/issue-4791/issue_4928.rs70
-rw-r--r--src/tools/rustfmt/tests/target/issue-4791/no_trailing_comma.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-4791/trailing_comma.rs14
-rw-r--r--src/tools/rustfmt/tests/target/issue-4816/lib.rs35
-rw-r--r--src/tools/rustfmt/tests/target/issue-4908-2.rs20
-rw-r--r--src/tools/rustfmt/tests/target/issue-4908.rs34
-rw-r--r--src/tools/rustfmt/tests/target/issue-4926/deeply_nested_struct.rs38
-rw-r--r--src/tools/rustfmt/tests/target/issue-4926/deeply_nested_struct_with_long_field_names.rs44
-rw-r--r--src/tools/rustfmt/tests/target/issue-4926/deeply_nested_struct_with_many_fields.rs54
-rw-r--r--src/tools/rustfmt/tests/target/issue-4926/enum_struct_field.rs41
-rw-r--r--src/tools/rustfmt/tests/target/issue-4926/minimum_example.rs10
-rw-r--r--src/tools/rustfmt/tests/target/issue-4926/struct_with_long_field_names.rs24
-rw-r--r--src/tools/rustfmt/tests/target/issue-4926/struct_with_many_fields.rs34
-rw-r--r--src/tools/rustfmt/tests/target/issue-4984/minimum_example.rs2
-rw-r--r--src/tools/rustfmt/tests/target/issue-4984/multi_line_derive.rs26
-rw-r--r--src/tools/rustfmt/tests/target/issue-4984/multiple_comments_within.rs11
-rw-r--r--src/tools/rustfmt/tests/target/issue-4984/should_not_change.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue-5005/minimum_example.rs9
-rw-r--r--src/tools/rustfmt/tests/target/issue-5009/1_minimum_example.rs4
-rw-r--r--src/tools/rustfmt/tests/target/issue-5009/2_many_in_connectors_in_pattern.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue-5009/3_nested_for_loop_with_connector_in_pattern.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue-5009/4_nested_for_loop_with_if_elseif_else.rs13
-rw-r--r--src/tools/rustfmt/tests/target/issue-5009/5_nested_for_loop_with_connector_in_if_elseif_else.rs15
-rw-r--r--src/tools/rustfmt/tests/target/issue-5009/6_deeply_nested_for_loop_with_connector_in_pattern.rs32
-rw-r--r--src/tools/rustfmt/tests/target/issue-5011.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-5012/trailing_comma_always.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-5012/trailing_comma_never.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-5023.rs23
-rw-r--r--src/tools/rustfmt/tests/target/issue-5030.rs21
-rw-r--r--src/tools/rustfmt/tests/target/issue-5033/minimum_example.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue-5033/nested_modules.rs11
-rw-r--r--src/tools/rustfmt/tests/target/issue-5042/multi-line_comment_with_trailing_comma.rs24
-rw-r--r--src/tools/rustfmt/tests/target/issue-5042/multi-line_comment_without_trailing_comma.rs24
-rw-r--r--src/tools/rustfmt/tests/target/issue-5042/single-line_comment_with_trailing_comma.rs7
-rw-r--r--src/tools/rustfmt/tests/target/issue-5042/single-line_comment_without_trailing_comma.rs7
-rw-r--r--src/tools/rustfmt/tests/target/issue-5066/multi_line_struct_trailing_comma_always_struct_lit_width_0.rs10
-rw-r--r--src/tools/rustfmt/tests/target/issue-5066/multi_line_struct_trailing_comma_never_struct_lit_width_0.rs10
-rw-r--r--src/tools/rustfmt/tests/target/issue-5066/multi_line_struct_with_trailing_comma_always.rs10
-rw-r--r--src/tools/rustfmt/tests/target/issue-5066/multi_line_struct_with_trailing_comma_never.rs10
-rw-r--r--src/tools/rustfmt/tests/target/issue-5066/with_trailing_comma_always.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue-5066/with_trailing_comma_never.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue-5088/deeply_nested_long_comment_wrap_comments_false.rs33
-rw-r--r--src/tools/rustfmt/tests/target/issue-5088/deeply_nested_long_comment_wrap_comments_true.rs49
-rw-r--r--src/tools/rustfmt/tests/target/issue-5088/multi_line_itemized_block_wrap_comments_false.rs17
-rw-r--r--src/tools/rustfmt/tests/target/issue-5088/multi_line_itemized_block_wrap_comments_true.rs17
-rw-r--r--src/tools/rustfmt/tests/target/issue-5088/multi_line_text_with_itemized_block_wrap_comments_false.rs37
-rw-r--r--src/tools/rustfmt/tests/target/issue-5088/multi_line_text_with_itemized_block_wrap_comments_true.rs37
-rw-r--r--src/tools/rustfmt/tests/target/issue-5088/single_line_itemized_block_wrap_comments_false.rs9
-rw-r--r--src/tools/rustfmt/tests/target/issue-5088/single_line_itemized_block_wrap_comments_true.rs9
-rw-r--r--src/tools/rustfmt/tests/target/issue-5088/start_with_empty_comment_very_long_itemized_block_wrap_comments_false.rs19
-rw-r--r--src/tools/rustfmt/tests/target/issue-5088/start_with_empty_comment_very_long_itemized_block_wrap_comments_true.rs27
-rw-r--r--src/tools/rustfmt/tests/target/issue-5088/start_with_empty_comment_wrap_comments_false.rs17
-rw-r--r--src/tools/rustfmt/tests/target/issue-5088/start_with_empty_comment_wrap_comments_true.rs17
-rw-r--r--src/tools/rustfmt/tests/target/issue-5088/very_long_comment_wrap_comments_false.rs13
-rw-r--r--src/tools/rustfmt/tests/target/issue-5088/very_long_comment_wrap_comments_true.rs21
-rw-r--r--src/tools/rustfmt/tests/target/issue-5095.rs27
-rw-r--r--src/tools/rustfmt/tests/target/issue-510.rs41
-rw-r--r--src/tools/rustfmt/tests/target/issue-5125/attributes_in_formal_fuction_parameter.rs6
-rw-r--r--src/tools/rustfmt/tests/target/issue-5125/long_parameter_in_different_positions.rs24
-rw-r--r--src/tools/rustfmt/tests/target/issue-5125/minimum_example.rs6
-rw-r--r--src/tools/rustfmt/tests/target/issue-5125/with_leading_and_inline_comments.rs7
-rw-r--r--src/tools/rustfmt/tests/target/issue-5151/minimum_example.rs16
-rw-r--r--src/tools/rustfmt/tests/target/issue-5157/indented_itemized_markdown_blockquote.rs6
-rw-r--r--src/tools/rustfmt/tests/target/issue-5157/nested_itemized_markdown_blockquote.rs18
-rw-r--r--src/tools/rustfmt/tests/target/issue-5157/support_itemized_markdown_blockquote.rs6
-rw-r--r--src/tools/rustfmt/tests/target/issue-5238/markdown_header_wrap_comments_false.rs11
-rw-r--r--src/tools/rustfmt/tests/target/issue-5238/markdown_header_wrap_comments_true.rs14
-rw-r--r--src/tools/rustfmt/tests/target/issue-5260.rs13
-rw-r--r--src/tools/rustfmt/tests/target/issue-5270/merge_derives_false.rs62
-rw-r--r--src/tools/rustfmt/tests/target/issue-5270/merge_derives_true.rs60
-rw-r--r--src/tools/rustfmt/tests/target/issue-539.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue-64.rs7
-rw-r--r--src/tools/rustfmt/tests/target/issue-683.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue-691.rs9
-rw-r--r--src/tools/rustfmt/tests/target/issue-770.rs10
-rw-r--r--src/tools/rustfmt/tests/target/issue-811.rs19
-rw-r--r--src/tools/rustfmt/tests/target/issue-831.rs9
-rw-r--r--src/tools/rustfmt/tests/target/issue-850.rs1
-rw-r--r--src/tools/rustfmt/tests/target/issue-855.rs27
-rw-r--r--src/tools/rustfmt/tests/target/issue-913.rs22
-rw-r--r--src/tools/rustfmt/tests/target/issue-945.rs17
-rw-r--r--src/tools/rustfmt/tests/target/issue-977.rs16
-rw-r--r--src/tools/rustfmt/tests/target/issue_3839.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue_3844.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue_3853.rs47
-rw-r--r--src/tools/rustfmt/tests/target/issue_3854.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue_3868.rs9
-rw-r--r--src/tools/rustfmt/tests/target/issue_3934.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue_3937.rs13
-rw-r--r--src/tools/rustfmt/tests/target/issue_4031.rs21
-rw-r--r--src/tools/rustfmt/tests/target/issue_4032.rs18
-rw-r--r--src/tools/rustfmt/tests/target/issue_4049.rs26
-rw-r--r--src/tools/rustfmt/tests/target/issue_4057.rs15
-rw-r--r--src/tools/rustfmt/tests/target/issue_4086.rs2
-rw-r--r--src/tools/rustfmt/tests/target/issue_4110.rs55
-rw-r--r--src/tools/rustfmt/tests/target/issue_4257.rs18
-rw-r--r--src/tools/rustfmt/tests/target/issue_4322.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue_4374.rs13
-rw-r--r--src/tools/rustfmt/tests/target/issue_4467.rs6
-rw-r--r--src/tools/rustfmt/tests/target/issue_4475.rs29
-rw-r--r--src/tools/rustfmt/tests/target/issue_4522.rs6
-rw-r--r--src/tools/rustfmt/tests/target/issue_4528.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue_4545.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue_4573.rs245
-rw-r--r--src/tools/rustfmt/tests/target/issue_4579.rs16
-rw-r--r--src/tools/rustfmt/tests/target/issue_4584.rs32
-rw-r--r--src/tools/rustfmt/tests/target/issue_4636.rs13
-rw-r--r--src/tools/rustfmt/tests/target/issue_4675.rs8
-rw-r--r--src/tools/rustfmt/tests/target/issue_4823.rs5
-rw-r--r--src/tools/rustfmt/tests/target/issue_4850.rs4
-rw-r--r--src/tools/rustfmt/tests/target/issue_4854.rs115
-rw-r--r--src/tools/rustfmt/tests/target/issue_4868.rs17
-rw-r--r--src/tools/rustfmt/tests/target/issue_4911.rs9
-rw-r--r--src/tools/rustfmt/tests/target/issue_4936.rs10
-rw-r--r--src/tools/rustfmt/tests/target/issue_4943.rs10
-rw-r--r--src/tools/rustfmt/tests/target/issue_4954.rs7
-rw-r--r--src/tools/rustfmt/tests/target/issue_4963.rs9
-rw-r--r--src/tools/rustfmt/tests/target/issue_5027.rs17
-rw-r--r--src/tools/rustfmt/tests/target/issue_5086.rs2
-rw-r--r--src/tools/rustfmt/tests/target/issue_5273.rs3
-rw-r--r--src/tools/rustfmt/tests/target/issue_5399.rs48
-rw-r--r--src/tools/rustfmt/tests/target/item-brace-style-always-next-line.rs71
-rw-r--r--src/tools/rustfmt/tests/target/item-brace-style-prefer-same-line.rs35
-rw-r--r--src/tools/rustfmt/tests/target/item-brace-style-same-line-where.rs37
-rw-r--r--src/tools/rustfmt/tests/target/itemized-blocks/no_wrap.rs47
-rw-r--r--src/tools/rustfmt/tests/target/itemized-blocks/rewrite_fail.rs14
-rw-r--r--src/tools/rustfmt/tests/target/itemized-blocks/urls.rs25
-rw-r--r--src/tools/rustfmt/tests/target/itemized-blocks/wrap.rs89
-rw-r--r--src/tools/rustfmt/tests/target/label_break.rs28
-rw-r--r--src/tools/rustfmt/tests/target/large-block.rs5
-rw-r--r--src/tools/rustfmt/tests/target/large_vec.rs42
-rw-r--r--src/tools/rustfmt/tests/target/lazy_static.rs49
-rw-r--r--src/tools/rustfmt/tests/target/let_else.rs3
-rw-r--r--src/tools/rustfmt/tests/target/long-fn-1/version_one.rs29
-rw-r--r--src/tools/rustfmt/tests/target/long-fn-1/version_two.rs29
-rw-r--r--src/tools/rustfmt/tests/target/long-match-arms-brace-newline.rs15
-rw-r--r--src/tools/rustfmt/tests/target/long-use-statement-issue-3154.rs3
-rw-r--r--src/tools/rustfmt/tests/target/long_field_access.rs4
-rw-r--r--src/tools/rustfmt/tests/target/loop.rs34
-rw-r--r--src/tools/rustfmt/tests/target/macro_not_expr.rs7
-rw-r--r--src/tools/rustfmt/tests/target/macro_rules.rs360
-rw-r--r--src/tools/rustfmt/tests/target/macro_rules_semi.rs22
-rw-r--r--src/tools/rustfmt/tests/target/macros.rs1062
-rw-r--r--src/tools/rustfmt/tests/target/markdown-comment-with-options.rs17
-rw-r--r--src/tools/rustfmt/tests/target/markdown-comment.rs15
-rw-r--r--src/tools/rustfmt/tests/target/match-block-trailing-comma.rs26
-rw-r--r--src/tools/rustfmt/tests/target/match-flattening.rs23
-rw-r--r--src/tools/rustfmt/tests/target/match-nowrap-trailing-comma.rs17
-rw-r--r--src/tools/rustfmt/tests/target/match-nowrap.rs13
-rw-r--r--src/tools/rustfmt/tests/target/match.rs629
-rw-r--r--src/tools/rustfmt/tests/target/match_overflow_expr.rs50
-rw-r--r--src/tools/rustfmt/tests/target/max-line-length-in-chars.rs4
-rw-r--r--src/tools/rustfmt/tests/target/merge_imports_true_compat.rs3
-rw-r--r--src/tools/rustfmt/tests/target/mod-1.rs37
-rw-r--r--src/tools/rustfmt/tests/target/mod-2.rs5
-rw-r--r--src/tools/rustfmt/tests/target/mod_skip_child.rs2
-rw-r--r--src/tools/rustfmt/tests/target/mulit-file.rs10
-rw-r--r--src/tools/rustfmt/tests/target/multiline_string_in_macro_def.rs14
-rw-r--r--src/tools/rustfmt/tests/target/multiple.rs180
-rw-r--r--src/tools/rustfmt/tests/target/negative-impl.rs14
-rw-r--r--src/tools/rustfmt/tests/target/nested-if-else.rs11
-rw-r--r--src/tools/rustfmt/tests/target/nested-visual-block.rs60
-rw-r--r--src/tools/rustfmt/tests/target/nested_skipped/mod.rs3
-rw-r--r--src/tools/rustfmt/tests/target/nestedmod/mod.rs12
-rw-r--r--src/tools/rustfmt/tests/target/nestedmod/mod2a.rs4
-rw-r--r--src/tools/rustfmt/tests/target/nestedmod/mod2b.rs2
-rw-r--r--src/tools/rustfmt/tests/target/nestedmod/mod2c.rs3
-rw-r--r--src/tools/rustfmt/tests/target/nestedmod/mymod1/mod3a.rs2
-rw-r--r--src/tools/rustfmt/tests/target/nestedmod/submod2/a.rs6
-rw-r--r--src/tools/rustfmt/tests/target/nestedmod/submod2/mod.rs5
-rw-r--r--src/tools/rustfmt/tests/target/no_arg_with_commnet.rs1
-rw-r--r--src/tools/rustfmt/tests/target/no_new_line_beginning.rs1
-rw-r--r--src/tools/rustfmt/tests/target/normalize_doc_attributes_should_not_imply_format_doc_comments.rs15
-rw-r--r--src/tools/rustfmt/tests/target/normalize_multiline_doc_attribute.rs12
-rw-r--r--src/tools/rustfmt/tests/target/obsolete_in_place.rs9
-rw-r--r--src/tools/rustfmt/tests/target/one_line_if_v1.rs46
-rw-r--r--src/tools/rustfmt/tests/target/one_line_if_v2.rs38
-rw-r--r--src/tools/rustfmt/tests/target/other.rs5
-rw-r--r--src/tools/rustfmt/tests/target/paren.rs6
-rw-r--r--src/tools/rustfmt/tests/target/path_clarity/foo.rs2
-rw-r--r--src/tools/rustfmt/tests/target/path_clarity/foo/bar.rs3
-rw-r--r--src/tools/rustfmt/tests/target/paths.rs28
-rw-r--r--src/tools/rustfmt/tests/target/pattern-condense-wildcards.rs12
-rw-r--r--src/tools/rustfmt/tests/target/pattern.rs98
-rw-r--r--src/tools/rustfmt/tests/target/preserves_carriage_return_for_unix.rs2
-rw-r--r--src/tools/rustfmt/tests/target/preserves_carriage_return_for_windows.rs2
-rw-r--r--src/tools/rustfmt/tests/target/pub-restricted.rs51
-rw-r--r--src/tools/rustfmt/tests/target/raw_identifiers.rs66
-rw-r--r--src/tools/rustfmt/tests/target/remove_blank_lines.rs28
-rw-r--r--src/tools/rustfmt/tests/target/reorder-impl-items.rs15
-rw-r--r--src/tools/rustfmt/tests/target/should_not_format_string_when_format_strings_is_not_set.rs16
-rw-r--r--src/tools/rustfmt/tests/target/single-line-if-else.rs58
-rw-r--r--src/tools/rustfmt/tests/target/single-line-macro/v1.rs10
-rw-r--r--src/tools/rustfmt/tests/target/single-line-macro/v2.rs14
-rw-r--r--src/tools/rustfmt/tests/target/skip.rs87
-rw-r--r--src/tools/rustfmt/tests/target/skip/foo.rs5
-rw-r--r--src/tools/rustfmt/tests/target/skip/main.rs5
-rw-r--r--src/tools/rustfmt/tests/target/skip/preserve_trailing_comment.rs7
-rw-r--r--src/tools/rustfmt/tests/target/skip_mod.rs3
-rw-r--r--src/tools/rustfmt/tests/target/soft-wrapping.rs15
-rw-r--r--src/tools/rustfmt/tests/target/space-not-before-newline.rs8
-rw-r--r--src/tools/rustfmt/tests/target/spaces-around-ranges.rs15
-rw-r--r--src/tools/rustfmt/tests/target/statements.rs42
-rw-r--r--src/tools/rustfmt/tests/target/static.rs27
-rw-r--r--src/tools/rustfmt/tests/target/string-lit-2.rs25
-rw-r--r--src/tools/rustfmt/tests/target/string-lit-custom.rs20
-rw-r--r--src/tools/rustfmt/tests/target/string-lit.rs63
-rw-r--r--src/tools/rustfmt/tests/target/string_punctuation.rs24
-rw-r--r--src/tools/rustfmt/tests/target/struct-field-attributes.rs62
-rw-r--r--src/tools/rustfmt/tests/target/struct_field_doc_comment.rs69
-rw-r--r--src/tools/rustfmt/tests/target/struct_lits.rs190
-rw-r--r--src/tools/rustfmt/tests/target/struct_lits_multiline.rs117
-rw-r--r--src/tools/rustfmt/tests/target/struct_lits_visual.rs49
-rw-r--r--src/tools/rustfmt/tests/target/struct_lits_visual_multiline.rs49
-rw-r--r--src/tools/rustfmt/tests/target/struct_tuple_visual.rs36
-rw-r--r--src/tools/rustfmt/tests/target/structs.rs358
-rw-r--r--src/tools/rustfmt/tests/target/trailing-comma-never.rs35
-rw-r--r--src/tools/rustfmt/tests/target/trailing_commas.rs78
-rw-r--r--src/tools/rustfmt/tests/target/trailing_comments/hard_tabs.rs30
-rw-r--r--src/tools/rustfmt/tests/target/trailing_comments/soft_tabs.rs30
-rw-r--r--src/tools/rustfmt/tests/target/trait.rs220
-rw-r--r--src/tools/rustfmt/tests/target/try-conversion.rs28
-rw-r--r--src/tools/rustfmt/tests/target/try_block.rs29
-rw-r--r--src/tools/rustfmt/tests/target/tuple.rs100
-rw-r--r--src/tools/rustfmt/tests/target/tuple_v2.rs5
-rw-r--r--src/tools/rustfmt/tests/target/type-ascription.rs12
-rw-r--r--src/tools/rustfmt/tests/target/type.rs175
-rw-r--r--src/tools/rustfmt/tests/target/type_alias.rs76
-rw-r--r--src/tools/rustfmt/tests/target/unicode.rs30
-rw-r--r--src/tools/rustfmt/tests/target/unindent_if_else_cond_comment.rs27
-rw-r--r--src/tools/rustfmt/tests/target/unions.rs198
-rw-r--r--src/tools/rustfmt/tests/target/unsafe-mod.rs7
-rw-r--r--src/tools/rustfmt/tests/target/visibility.rs8
-rw-r--r--src/tools/rustfmt/tests/target/visual-fn-type.rs9
-rw-r--r--src/tools/rustfmt/tests/target/where-clause-rfc.rs156
-rw-r--r--src/tools/rustfmt/tests/target/where-clause.rs107
-rw-r--r--src/tools/rustfmt/tests/target/width-heuristics.rs24
-rw-r--r--src/tools/rustfmt/tests/target/wrap_comments_should_not_imply_format_doc_comments.rs16
-rw-r--r--src/tools/rustfmt/tests/writemode/source/fn-single-line.rs80
-rw-r--r--src/tools/rustfmt/tests/writemode/source/json.rs80
-rw-r--r--src/tools/rustfmt/tests/writemode/source/modified.rs14
-rw-r--r--src/tools/rustfmt/tests/writemode/source/stdin.rs6
-rw-r--r--src/tools/rustfmt/tests/writemode/target/checkstyle.xml2
-rw-r--r--src/tools/rustfmt/tests/writemode/target/modified.txt5
-rw-r--r--src/tools/rustfmt/tests/writemode/target/output.json1
-rw-r--r--src/tools/rustfmt/tests/writemode/target/stdin.json1
-rw-r--r--src/tools/rustfmt/tests/writemode/target/stdin.xml2
-rw-r--r--src/tools/rustfmt/triagebot.toml1
-rw-r--r--src/tools/tidy/Cargo.toml16
-rw-r--r--src/tools/tidy/src/bins.rs151
-rw-r--r--src/tools/tidy/src/debug_artifacts.rs23
-rw-r--r--src/tools/tidy/src/deps.rs621
-rw-r--r--src/tools/tidy/src/edition.rs31
-rw-r--r--src/tools/tidy/src/error_codes_check.rs319
-rw-r--r--src/tools/tidy/src/errors.rs76
-rw-r--r--src/tools/tidy/src/extdeps.rs33
-rw-r--r--src/tools/tidy/src/features.rs550
-rw-r--r--src/tools/tidy/src/features/tests.rs9
-rw-r--r--src/tools/tidy/src/features/version.rs48
-rw-r--r--src/tools/tidy/src/features/version/tests.rs38
-rw-r--r--src/tools/tidy/src/lib.rs110
-rw-r--r--src/tools/tidy/src/main.rs112
-rw-r--r--src/tools/tidy/src/pal.rs209
-rw-r--r--src/tools/tidy/src/primitive_docs.rs17
-rw-r--r--src/tools/tidy/src/style.rs423
-rw-r--r--src/tools/tidy/src/target_specific_tests.rs96
-rw-r--r--src/tools/tidy/src/ui_tests.rs82
-rw-r--r--src/tools/tidy/src/unit_tests.rs66
-rw-r--r--src/tools/tidy/src/unstable_book.rs132
-rw-r--r--src/tools/tier-check/Cargo.toml7
-rw-r--r--src/tools/tier-check/src/main.rs50
-rw-r--r--src/tools/unicode-table-generator/Cargo.toml9
-rw-r--r--src/tools/unicode-table-generator/src/case_mapping.rs70
-rw-r--r--src/tools/unicode-table-generator/src/main.rs439
-rw-r--r--src/tools/unicode-table-generator/src/range_search.rs93
-rw-r--r--src/tools/unicode-table-generator/src/raw_emitter.rs394
-rw-r--r--src/tools/unicode-table-generator/src/skiplist.rs98
-rw-r--r--src/tools/unicode-table-generator/src/unicode_download.rs46
-rw-r--r--src/tools/unstable-book-gen/Cargo.toml12
-rw-r--r--src/tools/unstable-book-gen/src/SUMMARY.md8
-rw-r--r--src/tools/unstable-book-gen/src/main.rs130
-rw-r--r--src/tools/unstable-book-gen/src/stub-issue.md7
-rw-r--r--src/tools/unstable-book-gen/src/stub-no-issue.md5
-rw-r--r--src/tools/x/Cargo.lock5
-rw-r--r--src/tools/x/Cargo.toml6
-rw-r--r--src/tools/x/README.md10
-rw-r--r--src/tools/x/src/main.rs96
5903 files changed, 677135 insertions, 0 deletions
diff --git a/src/tools/build-manifest/Cargo.toml b/src/tools/build-manifest/Cargo.toml
new file mode 100644
index 000000000..c022d3aa0
--- /dev/null
+++ b/src/tools/build-manifest/Cargo.toml
@@ -0,0 +1,16 @@
+[package]
+name = "build-manifest"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+toml = "0.5"
+serde = { version = "1.0", features = ["derive"] }
+serde_json = "1.0"
+anyhow = "1.0.32"
+flate2 = "1.0.16"
+tar = "0.4.29"
+sha2 = "0.10.1"
+rayon = "1.5.1"
+hex = "0.4.2"
+num_cpus = "1.13.0"
diff --git a/src/tools/build-manifest/README.md b/src/tools/build-manifest/README.md
new file mode 100644
index 000000000..44c96f31d
--- /dev/null
+++ b/src/tools/build-manifest/README.md
@@ -0,0 +1,27 @@
+# build-manifest
+
+This tool generates the manifests uploaded to static.rust-lang.org and used by
+rustup. The tool is invoked by the bootstrap tool.
+
+## Testing changes locally
+
+In order to test the changes locally you need to have a valid dist directory
+available locally. If you don't want to build all the compiler, you can easily
+create one from the nightly artifacts with:
+
+```
+#!/bin/bash
+for cmpn in rust rustc rust-std rust-docs cargo; do
+ wget https://static.rust-lang.org/dist/${cmpn}-nightly-x86_64-unknown-linux-gnu.tar.gz
+done
+```
+
+Then, you can generate the manifest and all the packages from `path/to/dist` to
+`path/to/output` with:
+
+```
+$ cargo +nightly run path/to/dist path/to/output 1970-01-01 http://example.com CHANNEL
+```
+
+Remember to replace `CHANNEL` with the channel you produced dist artifacts of
+and `VERSION` with the current Rust version.
diff --git a/src/tools/build-manifest/src/checksum.rs b/src/tools/build-manifest/src/checksum.rs
new file mode 100644
index 000000000..c019c7a2f
--- /dev/null
+++ b/src/tools/build-manifest/src/checksum.rs
@@ -0,0 +1,97 @@
+use crate::manifest::{FileHash, Manifest};
+use rayon::prelude::*;
+use sha2::{Digest, Sha256};
+use std::collections::{HashMap, HashSet};
+use std::error::Error;
+use std::fs::File;
+use std::io::BufReader;
+use std::path::{Path, PathBuf};
+use std::sync::Mutex;
+use std::time::Instant;
+
+pub(crate) struct Checksums {
+ cache_path: Option<PathBuf>,
+ collected: Mutex<HashMap<PathBuf, String>>,
+}
+
+impl Checksums {
+ pub(crate) fn new() -> Result<Self, Box<dyn Error>> {
+ let cache_path = std::env::var_os("BUILD_MANIFEST_CHECKSUM_CACHE").map(PathBuf::from);
+
+ let mut collected = HashMap::new();
+ if let Some(path) = &cache_path {
+ if path.is_file() {
+ collected = serde_json::from_slice(&std::fs::read(path)?)?;
+ }
+ }
+
+ Ok(Checksums { cache_path, collected: Mutex::new(collected) })
+ }
+
+ pub(crate) fn store_cache(&self) -> Result<(), Box<dyn Error>> {
+ if let Some(path) = &self.cache_path {
+ std::fs::write(path, &serde_json::to_vec(&self.collected)?)?;
+ }
+ Ok(())
+ }
+
+ pub(crate) fn fill_missing_checksums(&mut self, manifest: &mut Manifest) {
+ let need_checksums = self.find_missing_checksums(manifest);
+ if !need_checksums.is_empty() {
+ self.collect_checksums(&need_checksums);
+ }
+ self.replace_checksums(manifest);
+ }
+
+ fn find_missing_checksums(&mut self, manifest: &mut Manifest) -> HashSet<PathBuf> {
+ let collected = self.collected.lock().unwrap();
+ let mut need_checksums = HashSet::new();
+ crate::manifest::visit_file_hashes(manifest, |file_hash| {
+ if let FileHash::Missing(path) = file_hash {
+ let path = std::fs::canonicalize(path).unwrap();
+ if !collected.contains_key(&path) {
+ need_checksums.insert(path);
+ }
+ }
+ });
+ need_checksums
+ }
+
+ fn replace_checksums(&mut self, manifest: &mut Manifest) {
+ let collected = self.collected.lock().unwrap();
+ crate::manifest::visit_file_hashes(manifest, |file_hash| {
+ if let FileHash::Missing(path) = file_hash {
+ let path = std::fs::canonicalize(path).unwrap();
+ match collected.get(&path) {
+ Some(hash) => *file_hash = FileHash::Present(hash.clone()),
+ None => panic!("missing hash for file {}", path.display()),
+ }
+ }
+ });
+ }
+
+ fn collect_checksums(&mut self, files: &HashSet<PathBuf>) {
+ let collection_start = Instant::now();
+ println!(
+ "collecting hashes for {} tarballs across {} threads",
+ files.len(),
+ rayon::current_num_threads().min(files.len()),
+ );
+
+ files.par_iter().for_each(|path| match hash(path) {
+ Ok(hash) => {
+ self.collected.lock().unwrap().insert(path.clone(), hash);
+ }
+ Err(err) => eprintln!("error while fetching the hash for {}: {}", path.display(), err),
+ });
+
+ println!("collected {} hashes in {:.2?}", files.len(), collection_start.elapsed());
+ }
+}
+
+fn hash(path: &Path) -> Result<String, Box<dyn Error>> {
+ let mut file = BufReader::new(File::open(path)?);
+ let mut sha256 = Sha256::default();
+ std::io::copy(&mut file, &mut sha256)?;
+ Ok(hex::encode(sha256.finalize()))
+}
diff --git a/src/tools/build-manifest/src/main.rs b/src/tools/build-manifest/src/main.rs
new file mode 100644
index 000000000..efe3f2b61
--- /dev/null
+++ b/src/tools/build-manifest/src/main.rs
@@ -0,0 +1,608 @@
+//! Build a dist manifest, hash and sign everything.
+//! This gets called by `promote-release`
+//! (https://github.com/rust-lang/rust-central-station/tree/master/promote-release)
+//! via `x.py dist hash-and-sign`; the cmdline arguments are set up
+//! by rustbuild (in `src/bootstrap/dist.rs`).
+
+mod checksum;
+mod manifest;
+mod versions;
+
+use crate::checksum::Checksums;
+use crate::manifest::{Component, Manifest, Package, Rename, Target};
+use crate::versions::{PkgType, Versions};
+use std::collections::{BTreeMap, HashMap, HashSet};
+use std::env;
+use std::fs::{self, File};
+use std::path::{Path, PathBuf};
+
+static HOSTS: &[&str] = &[
+ "aarch64-apple-darwin",
+ "aarch64-pc-windows-msvc",
+ "aarch64-unknown-linux-gnu",
+ "aarch64-unknown-linux-musl",
+ "arm-unknown-linux-gnueabi",
+ "arm-unknown-linux-gnueabihf",
+ "armv7-unknown-linux-gnueabihf",
+ "i686-apple-darwin",
+ "i686-pc-windows-gnu",
+ "i686-pc-windows-msvc",
+ "i686-unknown-linux-gnu",
+ "mips-unknown-linux-gnu",
+ "mips64-unknown-linux-gnuabi64",
+ "mips64el-unknown-linux-gnuabi64",
+ "mipsel-unknown-linux-gnu",
+ "mipsisa32r6-unknown-linux-gnu",
+ "mipsisa32r6el-unknown-linux-gnu",
+ "mipsisa64r6-unknown-linux-gnuabi64",
+ "mipsisa64r6el-unknown-linux-gnuabi64",
+ "powerpc-unknown-linux-gnu",
+ "powerpc64-unknown-linux-gnu",
+ "powerpc64le-unknown-linux-gnu",
+ "riscv64gc-unknown-linux-gnu",
+ "s390x-unknown-linux-gnu",
+ "x86_64-apple-darwin",
+ "x86_64-pc-windows-gnu",
+ "x86_64-pc-windows-msvc",
+ "x86_64-unknown-freebsd",
+ "x86_64-unknown-illumos",
+ "x86_64-unknown-linux-gnu",
+ "x86_64-unknown-linux-musl",
+ "x86_64-unknown-netbsd",
+];
+
+static TARGETS: &[&str] = &[
+ "aarch64-apple-darwin",
+ "aarch64-apple-ios",
+ "aarch64-apple-ios-sim",
+ "aarch64-fuchsia",
+ "aarch64-linux-android",
+ "aarch64-pc-windows-msvc",
+ "aarch64-unknown-hermit",
+ "aarch64-unknown-linux-gnu",
+ "aarch64-unknown-linux-musl",
+ "aarch64-unknown-none",
+ "aarch64-unknown-none-softfloat",
+ "aarch64-unknown-redox",
+ "arm-linux-androideabi",
+ "arm-unknown-linux-gnueabi",
+ "arm-unknown-linux-gnueabihf",
+ "arm-unknown-linux-musleabi",
+ "arm-unknown-linux-musleabihf",
+ "armv5te-unknown-linux-gnueabi",
+ "armv5te-unknown-linux-musleabi",
+ "armv7-apple-ios",
+ "armv7-linux-androideabi",
+ "thumbv7neon-linux-androideabi",
+ "armv7-unknown-linux-gnueabi",
+ "armv7-unknown-linux-gnueabihf",
+ "armv7a-none-eabi",
+ "thumbv7neon-unknown-linux-gnueabihf",
+ "armv7-unknown-linux-musleabi",
+ "armv7-unknown-linux-musleabihf",
+ "armebv7r-none-eabi",
+ "armebv7r-none-eabihf",
+ "armv7r-none-eabi",
+ "armv7r-none-eabihf",
+ "armv7s-apple-ios",
+ "asmjs-unknown-emscripten",
+ "bpfeb-unknown-none",
+ "bpfel-unknown-none",
+ "i386-apple-ios",
+ "i586-pc-windows-msvc",
+ "i586-unknown-linux-gnu",
+ "i586-unknown-linux-musl",
+ "i686-apple-darwin",
+ "i686-linux-android",
+ "i686-pc-windows-gnu",
+ "i686-pc-windows-msvc",
+ "i686-unknown-freebsd",
+ "i686-unknown-linux-gnu",
+ "i686-unknown-linux-musl",
+ "m68k-unknown-linux-gnu",
+ "mips-unknown-linux-gnu",
+ "mips-unknown-linux-musl",
+ "mips64-unknown-linux-gnuabi64",
+ "mips64-unknown-linux-muslabi64",
+ "mips64el-unknown-linux-gnuabi64",
+ "mips64el-unknown-linux-muslabi64",
+ "mipsisa32r6-unknown-linux-gnu",
+ "mipsisa32r6el-unknown-linux-gnu",
+ "mipsisa64r6-unknown-linux-gnuabi64",
+ "mipsisa64r6el-unknown-linux-gnuabi64",
+ "mipsel-unknown-linux-gnu",
+ "mipsel-unknown-linux-musl",
+ "nvptx64-nvidia-cuda",
+ "powerpc-unknown-linux-gnu",
+ "powerpc64-unknown-linux-gnu",
+ "powerpc64le-unknown-linux-gnu",
+ "riscv32i-unknown-none-elf",
+ "riscv32im-unknown-none-elf",
+ "riscv32imc-unknown-none-elf",
+ "riscv32imac-unknown-none-elf",
+ "riscv32gc-unknown-linux-gnu",
+ "riscv64imac-unknown-none-elf",
+ "riscv64gc-unknown-none-elf",
+ "riscv64gc-unknown-linux-gnu",
+ "s390x-unknown-linux-gnu",
+ "sparc64-unknown-linux-gnu",
+ "sparcv9-sun-solaris",
+ "thumbv6m-none-eabi",
+ "thumbv7em-none-eabi",
+ "thumbv7em-none-eabihf",
+ "thumbv7m-none-eabi",
+ "thumbv8m.base-none-eabi",
+ "thumbv8m.main-none-eabi",
+ "thumbv8m.main-none-eabihf",
+ "wasm32-unknown-emscripten",
+ "wasm32-unknown-unknown",
+ "wasm32-wasi",
+ "x86_64-apple-darwin",
+ "x86_64-apple-ios",
+ "x86_64-fortanix-unknown-sgx",
+ "x86_64-fuchsia",
+ "x86_64-linux-android",
+ "x86_64-pc-windows-gnu",
+ "x86_64-pc-windows-msvc",
+ "x86_64-sun-solaris",
+ "x86_64-pc-solaris",
+ "x86_64-unknown-freebsd",
+ "x86_64-unknown-illumos",
+ "x86_64-unknown-linux-gnu",
+ "x86_64-unknown-linux-gnux32",
+ "x86_64-unknown-linux-musl",
+ "x86_64-unknown-netbsd",
+ "x86_64-unknown-none",
+ "x86_64-unknown-redox",
+ "x86_64-unknown-hermit",
+];
+
+/// This allows the manifest to contain rust-docs for hosts that don't build
+/// docs.
+///
+/// Tuples of `(host_partial, host_instead)`. If the host does not have the
+/// rust-docs component available, then if the host name contains
+/// `host_partial`, it will use the docs from `host_instead` instead.
+///
+/// The order here matters, more specific entries should be first.
+static DOCS_FALLBACK: &[(&str, &str)] = &[
+ ("-apple-", "x86_64-apple-darwin"),
+ ("aarch64", "aarch64-unknown-linux-gnu"),
+ ("arm-", "aarch64-unknown-linux-gnu"),
+ ("", "x86_64-unknown-linux-gnu"),
+];
+
+static MSI_INSTALLERS: &[&str] = &[
+ "aarch64-pc-windows-msvc",
+ "i686-pc-windows-gnu",
+ "i686-pc-windows-msvc",
+ "x86_64-pc-windows-gnu",
+ "x86_64-pc-windows-msvc",
+];
+
+static PKG_INSTALLERS: &[&str] = &["x86_64-apple-darwin", "aarch64-apple-darwin"];
+
+static MINGW: &[&str] = &["i686-pc-windows-gnu", "x86_64-pc-windows-gnu"];
+
+static NIGHTLY_ONLY_COMPONENTS: &[&str] = &["miri-preview"];
+
+macro_rules! t {
+ ($e:expr) => {
+ match $e {
+ Ok(e) => e,
+ Err(e) => panic!("{} failed with {}", stringify!($e), e),
+ }
+ };
+}
+
+struct Builder {
+ versions: Versions,
+ checksums: Checksums,
+ shipped_files: HashSet<String>,
+
+ input: PathBuf,
+ output: PathBuf,
+ s3_address: String,
+ date: String,
+}
+
+fn main() {
+ let num_threads = if let Some(num) = env::var_os("BUILD_MANIFEST_NUM_THREADS") {
+ num.to_str().unwrap().parse().expect("invalid number for BUILD_MANIFEST_NUM_THREADS")
+ } else {
+ num_cpus::get()
+ };
+ rayon::ThreadPoolBuilder::new()
+ .num_threads(num_threads)
+ .build_global()
+ .expect("failed to initialize Rayon");
+
+ let mut args = env::args().skip(1);
+ let input = PathBuf::from(args.next().unwrap());
+ let output = PathBuf::from(args.next().unwrap());
+ let date = args.next().unwrap();
+ let s3_address = args.next().unwrap();
+ let channel = args.next().unwrap();
+
+ Builder {
+ versions: Versions::new(&channel, &input).unwrap(),
+ checksums: t!(Checksums::new()),
+ shipped_files: HashSet::new(),
+
+ input,
+ output,
+ s3_address,
+ date,
+ }
+ .build();
+}
+
+impl Builder {
+ fn build(&mut self) {
+ self.check_toolstate();
+ let manifest = self.build_manifest();
+
+ let channel = self.versions.channel().to_string();
+ self.write_channel_files(&channel, &manifest);
+ if channel == "stable" {
+ // channel-rust-1.XX.YY.toml
+ let rust_version = self.versions.rustc_version().to_string();
+ self.write_channel_files(&rust_version, &manifest);
+
+ // channel-rust-1.XX.toml
+ let major_minor = rust_version.split('.').take(2).collect::<Vec<_>>().join(".");
+ self.write_channel_files(&major_minor, &manifest);
+ }
+
+ if let Some(path) = std::env::var_os("BUILD_MANIFEST_SHIPPED_FILES_PATH") {
+ self.write_shipped_files(&Path::new(&path));
+ }
+
+ t!(self.checksums.store_cache());
+ }
+
+ /// If a tool does not pass its tests on *any* of Linux and Windows, don't ship
+ /// it on *all* targets, because tools like Miri can "cross-run" programs for
+ /// different targets, for example, run a program for `x86_64-pc-windows-msvc`
+ /// on `x86_64-unknown-linux-gnu`.
+ /// Right now, we do this only for Miri.
+ fn check_toolstate(&mut self) {
+ for file in &["toolstates-linux.json", "toolstates-windows.json"] {
+ let toolstates: Option<HashMap<String, String>> = File::open(self.input.join(file))
+ .ok()
+ .and_then(|f| serde_json::from_reader(&f).ok());
+ let toolstates = toolstates.unwrap_or_else(|| {
+ println!("WARNING: `{}` missing/malformed; assuming all tools failed", file);
+ HashMap::default() // Use empty map if anything went wrong.
+ });
+ // Mark some tools as missing based on toolstate.
+ if toolstates.get("miri").map(|s| &*s as &str) != Some("test-pass") {
+ println!("Miri tests are not passing, removing component");
+ self.versions.disable_version(&PkgType::Miri);
+ break;
+ }
+ }
+ }
+
+ fn build_manifest(&mut self) -> Manifest {
+ let mut manifest = Manifest {
+ manifest_version: "2".to_string(),
+ date: self.date.to_string(),
+ pkg: BTreeMap::new(),
+ artifacts: BTreeMap::new(),
+ renames: BTreeMap::new(),
+ profiles: BTreeMap::new(),
+ };
+ self.add_packages_to(&mut manifest);
+ self.add_artifacts_to(&mut manifest);
+ self.add_profiles_to(&mut manifest);
+ self.add_renames_to(&mut manifest);
+ manifest.pkg.insert("rust".to_string(), self.rust_package(&manifest));
+
+ self.checksums.fill_missing_checksums(&mut manifest);
+
+ manifest
+ }
+
+ fn add_packages_to(&mut self, manifest: &mut Manifest) {
+ macro_rules! package {
+ ($name:expr, $targets:expr) => {
+ self.package($name, &mut manifest.pkg, $targets, &[])
+ };
+ }
+ package!("rustc", HOSTS);
+ package!("rustc-dev", HOSTS);
+ package!("reproducible-artifacts", HOSTS);
+ package!("rustc-docs", HOSTS);
+ package!("cargo", HOSTS);
+ package!("rust-mingw", MINGW);
+ package!("rust-std", TARGETS);
+ self.package("rust-docs", &mut manifest.pkg, HOSTS, DOCS_FALLBACK);
+ package!("rust-src", &["*"]);
+ package!("rls-preview", HOSTS);
+ package!("rust-analyzer-preview", HOSTS);
+ package!("clippy-preview", HOSTS);
+ package!("miri-preview", HOSTS);
+ package!("rustfmt-preview", HOSTS);
+ package!("rust-analysis", TARGETS);
+ package!("llvm-tools-preview", TARGETS);
+ }
+
+ fn add_artifacts_to(&mut self, manifest: &mut Manifest) {
+ manifest.add_artifact("source-code", |artifact| {
+ let tarball = self.versions.tarball_name(&PkgType::Rustc, "src").unwrap();
+ artifact.add_tarball(self, "*", &tarball);
+ });
+
+ manifest.add_artifact("installer-msi", |artifact| {
+ for target in MSI_INSTALLERS {
+ let msi = self.versions.archive_name(&PkgType::Rust, target, "msi").unwrap();
+ artifact.add_file(self, target, &msi);
+ }
+ });
+
+ manifest.add_artifact("installer-pkg", |artifact| {
+ for target in PKG_INSTALLERS {
+ let pkg = self.versions.archive_name(&PkgType::Rust, target, "pkg").unwrap();
+ artifact.add_file(self, target, &pkg);
+ }
+ });
+ }
+
+ fn add_profiles_to(&mut self, manifest: &mut Manifest) {
+ let mut profile = |name, pkgs| self.profile(name, &mut manifest.profiles, pkgs);
+ profile("minimal", &["rustc", "cargo", "rust-std", "rust-mingw"]);
+ profile(
+ "default",
+ &[
+ "rustc",
+ "cargo",
+ "rust-std",
+ "rust-mingw",
+ "rust-docs",
+ "rustfmt-preview",
+ "clippy-preview",
+ ],
+ );
+ profile(
+ "complete",
+ &[
+ "rustc",
+ "cargo",
+ "rust-std",
+ "rust-mingw",
+ "rust-docs",
+ "rustfmt-preview",
+ "clippy-preview",
+ "rls-preview",
+ "rust-analyzer-preview",
+ "rust-src",
+ "llvm-tools-preview",
+ "rust-analysis",
+ "miri-preview",
+ ],
+ );
+
+ // The compiler libraries are not stable for end users, and they're also huge, so we only
+ // `rustc-dev` for nightly users, and only in the "complete" profile. It's still possible
+ // for users to install the additional component manually, if needed.
+ if self.versions.channel() == "nightly" {
+ self.extend_profile("complete", &mut manifest.profiles, &["rustc-dev"]);
+ // Do not include the rustc-docs component for now, as it causes
+ // conflicts with the rust-docs component when installed. See
+ // #75833.
+ // self.extend_profile("complete", &mut manifest.profiles, &["rustc-docs"]);
+ }
+ }
+
+ fn add_renames_to(&self, manifest: &mut Manifest) {
+ let mut rename = |from: &str, to: &str| {
+ manifest.renames.insert(from.to_owned(), Rename { to: to.to_owned() })
+ };
+ rename("rls", "rls-preview");
+ rename("rustfmt", "rustfmt-preview");
+ rename("clippy", "clippy-preview");
+ rename("miri", "miri-preview");
+ rename("rust-analyzer", "rust-analyzer-preview");
+ }
+
+ fn rust_package(&mut self, manifest: &Manifest) -> Package {
+ let version_info = self.versions.version(&PkgType::Rust).expect("missing Rust tarball");
+ let mut pkg = Package {
+ version: version_info.version.expect("missing Rust version"),
+ git_commit_hash: version_info.git_commit,
+ target: BTreeMap::new(),
+ };
+ for host in HOSTS {
+ if let Some(target) = self.target_host_combination(host, &manifest) {
+ pkg.target.insert(host.to_string(), target);
+ } else {
+ pkg.target.insert(host.to_string(), Target::unavailable());
+ continue;
+ }
+ }
+ pkg
+ }
+
+ fn target_host_combination(&mut self, host: &str, manifest: &Manifest) -> Option<Target> {
+ let filename = self.versions.tarball_name(&PkgType::Rust, host).unwrap();
+
+ let mut target = Target::from_compressed_tar(self, &filename);
+ if !target.available {
+ return None;
+ }
+
+ let mut components = Vec::new();
+ let mut extensions = Vec::new();
+
+ let host_component = |pkg| Component::from_str(pkg, host);
+
+ // rustc/rust-std/cargo/docs are all required,
+ // and so is rust-mingw if it's available for the target.
+ components.extend(vec![
+ host_component("rustc"),
+ host_component("rust-std"),
+ host_component("cargo"),
+ host_component("rust-docs"),
+ ]);
+ if host.contains("pc-windows-gnu") {
+ components.push(host_component("rust-mingw"));
+ }
+
+ // Tools are always present in the manifest,
+ // but might be marked as unavailable if they weren't built.
+ extensions.extend(vec![
+ host_component("clippy-preview"),
+ host_component("miri-preview"),
+ host_component("rls-preview"),
+ host_component("rust-analyzer-preview"),
+ host_component("rustfmt-preview"),
+ host_component("llvm-tools-preview"),
+ host_component("rust-analysis"),
+ ]);
+
+ extensions.extend(
+ TARGETS
+ .iter()
+ .filter(|&&target| target != host)
+ .map(|target| Component::from_str("rust-std", target)),
+ );
+ extensions.extend(HOSTS.iter().map(|target| Component::from_str("rustc-dev", target)));
+ extensions.extend(HOSTS.iter().map(|target| Component::from_str("rustc-docs", target)));
+ extensions.push(Component::from_str("rust-src", "*"));
+
+ // If the components/extensions don't actually exist for this
+ // particular host/target combination then nix it entirely from our
+ // lists.
+ let has_component = |c: &Component| {
+ if c.target == "*" {
+ return true;
+ }
+ let pkg = match manifest.pkg.get(&c.pkg) {
+ Some(p) => p,
+ None => return false,
+ };
+ pkg.target.get(&c.target).is_some()
+ };
+ extensions.retain(&has_component);
+ components.retain(&has_component);
+
+ target.components = Some(components);
+ target.extensions = Some(extensions);
+ Some(target)
+ }
+
+ fn profile(
+ &mut self,
+ profile_name: &str,
+ dst: &mut BTreeMap<String, Vec<String>>,
+ pkgs: &[&str],
+ ) {
+ dst.insert(profile_name.to_owned(), pkgs.iter().map(|s| (*s).to_owned()).collect());
+ }
+
+ fn extend_profile(
+ &mut self,
+ profile_name: &str,
+ dst: &mut BTreeMap<String, Vec<String>>,
+ pkgs: &[&str],
+ ) {
+ dst.get_mut(profile_name)
+ .expect("existing profile")
+ .extend(pkgs.iter().map(|s| (*s).to_owned()));
+ }
+
+ fn package(
+ &mut self,
+ pkgname: &str,
+ dst: &mut BTreeMap<String, Package>,
+ targets: &[&str],
+ fallback: &[(&str, &str)],
+ ) {
+ let version_info = self
+ .versions
+ .version(&PkgType::from_component(pkgname))
+ .expect("failed to load package version");
+ let mut is_present = version_info.present;
+
+ // Never ship nightly-only components for other trains.
+ if self.versions.channel() != "nightly" && NIGHTLY_ONLY_COMPONENTS.contains(&pkgname) {
+ is_present = false; // Pretend the component is entirely missing.
+ }
+
+ macro_rules! tarball_name {
+ ($target_name:expr) => {
+ self.versions.tarball_name(&PkgType::from_component(pkgname), $target_name).unwrap()
+ };
+ }
+ let mut target_from_compressed_tar = |target_name| {
+ let target = Target::from_compressed_tar(self, &tarball_name!(target_name));
+ if target.available {
+ return target;
+ }
+ for (substr, fallback_target) in fallback {
+ if target_name.contains(substr) {
+ let t = Target::from_compressed_tar(self, &tarball_name!(fallback_target));
+ // Fallbacks must always be available.
+ assert!(t.available);
+ return t;
+ }
+ }
+ Target::unavailable()
+ };
+
+ let targets = targets
+ .iter()
+ .map(|name| {
+ let target = if is_present {
+ target_from_compressed_tar(name)
+ } else {
+ // If the component is not present for this build add it anyway but mark it as
+ // unavailable -- this way rustup won't allow upgrades without --force
+ Target::unavailable()
+ };
+ (name.to_string(), target)
+ })
+ .collect();
+
+ dst.insert(
+ pkgname.to_string(),
+ Package {
+ version: version_info.version.unwrap_or_default(),
+ git_commit_hash: version_info.git_commit,
+ target: targets,
+ },
+ );
+ }
+
+ fn url(&self, path: &Path) -> String {
+ let file_name = path.file_name().unwrap().to_str().unwrap();
+ format!("{}/{}/{}", self.s3_address, self.date, file_name)
+ }
+
+ fn write_channel_files(&mut self, channel_name: &str, manifest: &Manifest) {
+ self.write(&toml::to_string(&manifest).unwrap(), channel_name, ".toml");
+ self.write(&manifest.date, channel_name, "-date.txt");
+ self.write(
+ manifest.pkg["rust"].git_commit_hash.as_ref().unwrap(),
+ channel_name,
+ "-git-commit-hash.txt",
+ );
+ }
+
+ fn write(&mut self, contents: &str, channel_name: &str, suffix: &str) {
+ let name = format!("channel-rust-{}{}", channel_name, suffix);
+ self.shipped_files.insert(name.clone());
+
+ let dst = self.output.join(name);
+ t!(fs::write(&dst, contents));
+ }
+
+ fn write_shipped_files(&self, path: &Path) {
+ let mut files = self.shipped_files.iter().map(|s| s.as_str()).collect::<Vec<_>>();
+ files.sort();
+ let content = format!("{}\n", files.join("\n"));
+
+ t!(std::fs::write(path, content.as_bytes()));
+ }
+}
diff --git a/src/tools/build-manifest/src/manifest.rs b/src/tools/build-manifest/src/manifest.rs
new file mode 100644
index 000000000..547c270d8
--- /dev/null
+++ b/src/tools/build-manifest/src/manifest.rs
@@ -0,0 +1,182 @@
+use crate::Builder;
+use serde::{Serialize, Serializer};
+use std::collections::BTreeMap;
+use std::path::{Path, PathBuf};
+
+#[derive(Serialize)]
+#[serde(rename_all = "kebab-case")]
+pub(crate) struct Manifest {
+ pub(crate) manifest_version: String,
+ pub(crate) date: String,
+ pub(crate) pkg: BTreeMap<String, Package>,
+ pub(crate) artifacts: BTreeMap<String, Artifact>,
+ pub(crate) renames: BTreeMap<String, Rename>,
+ pub(crate) profiles: BTreeMap<String, Vec<String>>,
+}
+
+impl Manifest {
+ pub(crate) fn add_artifact(&mut self, name: &str, f: impl FnOnce(&mut Artifact)) {
+ let mut artifact = Artifact { target: BTreeMap::new() };
+ f(&mut artifact);
+ self.artifacts.insert(name.to_string(), artifact);
+ }
+}
+
+#[derive(Serialize)]
+pub(crate) struct Package {
+ pub(crate) version: String,
+ pub(crate) git_commit_hash: Option<String>,
+ pub(crate) target: BTreeMap<String, Target>,
+}
+
+#[derive(Serialize)]
+pub(crate) struct Rename {
+ pub(crate) to: String,
+}
+
+#[derive(Serialize)]
+pub(crate) struct Artifact {
+ pub(crate) target: BTreeMap<String, Vec<ArtifactFile>>,
+}
+
+impl Artifact {
+ pub(crate) fn add_file(&mut self, builder: &mut Builder, target: &str, path: &str) {
+ if let Some(path) = record_shipped_file(builder, builder.input.join(path)) {
+ self.target.entry(target.into()).or_insert_with(Vec::new).push(ArtifactFile {
+ url: builder.url(&path),
+ hash_sha256: FileHash::Missing(path),
+ });
+ }
+ }
+
+ pub(crate) fn add_tarball(&mut self, builder: &mut Builder, target: &str, base_path: &str) {
+ let files = self.target.entry(target.into()).or_insert_with(Vec::new);
+ let base_path = builder.input.join(base_path);
+ for compression in &["gz", "xz"] {
+ if let Some(tarball) = tarball_variant(builder, &base_path, compression) {
+ files.push(ArtifactFile {
+ url: builder.url(&tarball),
+ hash_sha256: FileHash::Missing(tarball),
+ });
+ }
+ }
+ }
+}
+
+#[derive(Serialize)]
+#[serde(rename_all = "kebab-case")]
+pub(crate) struct ArtifactFile {
+ pub(crate) url: String,
+ pub(crate) hash_sha256: FileHash,
+}
+
+#[derive(Serialize, Default)]
+pub(crate) struct Target {
+ pub(crate) available: bool,
+ pub(crate) url: Option<String>,
+ pub(crate) hash: Option<FileHash>,
+ pub(crate) xz_url: Option<String>,
+ pub(crate) xz_hash: Option<FileHash>,
+ pub(crate) components: Option<Vec<Component>>,
+ pub(crate) extensions: Option<Vec<Component>>,
+}
+
+impl Target {
+ pub(crate) fn from_compressed_tar(builder: &mut Builder, base_path: &str) -> Self {
+ let base_path = builder.input.join(base_path);
+ let gz = tarball_variant(builder, &base_path, "gz");
+ let xz = tarball_variant(builder, &base_path, "xz");
+
+ if gz.is_none() {
+ return Self::unavailable();
+ }
+
+ Self {
+ available: true,
+ components: None,
+ extensions: None,
+ // .gz
+ url: gz.as_ref().map(|path| builder.url(path)),
+ hash: gz.map(FileHash::Missing),
+ // .xz
+ xz_url: xz.as_ref().map(|path| builder.url(path)),
+ xz_hash: xz.map(FileHash::Missing),
+ }
+ }
+
+ pub(crate) fn unavailable() -> Self {
+ Self::default()
+ }
+}
+
+#[derive(Serialize)]
+pub(crate) struct Component {
+ pub(crate) pkg: String,
+ pub(crate) target: String,
+}
+
+impl Component {
+ pub(crate) fn from_str(pkg: &str, target: &str) -> Self {
+ Self { pkg: pkg.to_string(), target: target.to_string() }
+ }
+}
+
+#[allow(unused)]
+pub(crate) enum FileHash {
+ Missing(PathBuf),
+ Present(String),
+}
+
+impl Serialize for FileHash {
+ fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
+ match self {
+ FileHash::Missing(path) => Err(serde::ser::Error::custom(format!(
+ "can't serialize a missing hash for file {}",
+ path.display()
+ ))),
+ FileHash::Present(inner) => inner.serialize(serializer),
+ }
+ }
+}
+
+fn tarball_variant(builder: &mut Builder, base: &Path, ext: &str) -> Option<PathBuf> {
+ let mut path = base.to_path_buf();
+ path.set_extension(ext);
+ record_shipped_file(builder, path)
+}
+
+fn record_shipped_file(builder: &mut Builder, path: PathBuf) -> Option<PathBuf> {
+ if path.is_file() {
+ builder.shipped_files.insert(
+ path.file_name()
+ .expect("missing filename")
+ .to_str()
+ .expect("non-utf-8 filename")
+ .to_string(),
+ );
+ Some(path)
+ } else {
+ None
+ }
+}
+
+pub(crate) fn visit_file_hashes(manifest: &mut Manifest, mut f: impl FnMut(&mut FileHash)) {
+ for pkg in manifest.pkg.values_mut() {
+ for target in pkg.target.values_mut() {
+ if let Some(hash) = &mut target.hash {
+ f(hash);
+ }
+ if let Some(hash) = &mut target.xz_hash {
+ f(hash);
+ }
+ }
+ }
+
+ for artifact in manifest.artifacts.values_mut() {
+ for target in artifact.target.values_mut() {
+ for file in target {
+ f(&mut file.hash_sha256);
+ }
+ }
+ }
+}
diff --git a/src/tools/build-manifest/src/versions.rs b/src/tools/build-manifest/src/versions.rs
new file mode 100644
index 000000000..95c2297de
--- /dev/null
+++ b/src/tools/build-manifest/src/versions.rs
@@ -0,0 +1,200 @@
+use anyhow::Error;
+use flate2::read::GzDecoder;
+use std::collections::HashMap;
+use std::fs::File;
+use std::io::Read;
+use std::path::{Path, PathBuf};
+use tar::Archive;
+
+const DEFAULT_TARGET: &str = "x86_64-unknown-linux-gnu";
+
+#[derive(Debug, Hash, Eq, PartialEq, Clone)]
+pub(crate) enum PkgType {
+ Rust,
+ RustSrc,
+ Rustc,
+ Cargo,
+ Rls,
+ RustAnalyzer,
+ Clippy,
+ Rustfmt,
+ LlvmTools,
+ Miri,
+ Other(String),
+}
+
+impl PkgType {
+ pub(crate) fn from_component(component: &str) -> Self {
+ match component {
+ "rust" => PkgType::Rust,
+ "rust-src" => PkgType::RustSrc,
+ "rustc" => PkgType::Rustc,
+ "cargo" => PkgType::Cargo,
+ "rls" | "rls-preview" => PkgType::Rls,
+ "rust-analyzer" | "rust-analyzer-preview" => PkgType::RustAnalyzer,
+ "clippy" | "clippy-preview" => PkgType::Clippy,
+ "rustfmt" | "rustfmt-preview" => PkgType::Rustfmt,
+ "llvm-tools" | "llvm-tools-preview" => PkgType::LlvmTools,
+ "miri" | "miri-preview" => PkgType::Miri,
+ other => PkgType::Other(other.into()),
+ }
+ }
+
+ /// First part of the tarball name.
+ fn tarball_component_name(&self) -> &str {
+ match self {
+ PkgType::Rust => "rust",
+ PkgType::RustSrc => "rust-src",
+ PkgType::Rustc => "rustc",
+ PkgType::Cargo => "cargo",
+ PkgType::Rls => "rls",
+ PkgType::RustAnalyzer => "rust-analyzer",
+ PkgType::Clippy => "clippy",
+ PkgType::Rustfmt => "rustfmt",
+ PkgType::LlvmTools => "llvm-tools",
+ PkgType::Miri => "miri",
+ PkgType::Other(component) => component,
+ }
+ }
+
+ /// Whether this package has the same version as Rust itself, or has its own `version` and
+ /// `git-commit-hash` files inside the tarball.
+ fn should_use_rust_version(&self) -> bool {
+ match self {
+ PkgType::Cargo => false,
+ PkgType::Rls => false,
+ PkgType::RustAnalyzer => false,
+ PkgType::Clippy => false,
+ PkgType::Rustfmt => false,
+ PkgType::LlvmTools => false,
+ PkgType::Miri => false,
+
+ PkgType::Rust => true,
+ PkgType::RustSrc => true,
+ PkgType::Rustc => true,
+ PkgType::Other(_) => true,
+ }
+ }
+
+ /// Whether this package is target-independent or not.
+ fn target_independent(&self) -> bool {
+ *self == PkgType::RustSrc
+ }
+}
+
+#[derive(Debug, Default, Clone)]
+pub(crate) struct VersionInfo {
+ pub(crate) version: Option<String>,
+ pub(crate) git_commit: Option<String>,
+ pub(crate) present: bool,
+}
+
+pub(crate) struct Versions {
+ channel: String,
+ dist_path: PathBuf,
+ versions: HashMap<PkgType, VersionInfo>,
+}
+
+impl Versions {
+ pub(crate) fn new(channel: &str, dist_path: &Path) -> Result<Self, Error> {
+ Ok(Self { channel: channel.into(), dist_path: dist_path.into(), versions: HashMap::new() })
+ }
+
+ pub(crate) fn channel(&self) -> &str {
+ &self.channel
+ }
+
+ pub(crate) fn version(&mut self, mut package: &PkgType) -> Result<VersionInfo, Error> {
+ if package.should_use_rust_version() {
+ package = &PkgType::Rust;
+ }
+
+ match self.versions.get(package) {
+ Some(version) => Ok(version.clone()),
+ None => {
+ let version_info = self.load_version_from_tarball(package)?;
+ self.versions.insert(package.clone(), version_info.clone());
+ Ok(version_info)
+ }
+ }
+ }
+
+ fn load_version_from_tarball(&mut self, package: &PkgType) -> Result<VersionInfo, Error> {
+ let tarball_name = self.tarball_name(package, DEFAULT_TARGET)?;
+ let tarball = self.dist_path.join(tarball_name);
+
+ let file = match File::open(&tarball) {
+ Ok(file) => file,
+ Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
+ // Missing tarballs do not return an error, but return empty data.
+ return Ok(VersionInfo::default());
+ }
+ Err(err) => return Err(err.into()),
+ };
+ let mut tar = Archive::new(GzDecoder::new(file));
+
+ let mut version = None;
+ let mut git_commit = None;
+ for entry in tar.entries()? {
+ let mut entry = entry?;
+
+ let dest;
+ match entry.path()?.components().nth(1).and_then(|c| c.as_os_str().to_str()) {
+ Some("version") => dest = &mut version,
+ Some("git-commit-hash") => dest = &mut git_commit,
+ _ => continue,
+ }
+ let mut buf = String::new();
+ entry.read_to_string(&mut buf)?;
+ *dest = Some(buf);
+
+ // Short circuit to avoid reading the whole tar file if not necessary.
+ if version.is_some() && git_commit.is_some() {
+ break;
+ }
+ }
+
+ Ok(VersionInfo { version, git_commit, present: true })
+ }
+
+ pub(crate) fn disable_version(&mut self, package: &PkgType) {
+ match self.versions.get_mut(package) {
+ Some(version) => {
+ *version = VersionInfo::default();
+ }
+ None => {
+ self.versions.insert(package.clone(), VersionInfo::default());
+ }
+ }
+ }
+
+ pub(crate) fn archive_name(
+ &self,
+ package: &PkgType,
+ target: &str,
+ extension: &str,
+ ) -> Result<String, Error> {
+ let component_name = package.tarball_component_name();
+ let version = match self.channel.as_str() {
+ "stable" => self.rustc_version().into(),
+ "beta" => "beta".into(),
+ "nightly" => "nightly".into(),
+ _ => format!("{}-dev", self.rustc_version()),
+ };
+
+ if package.target_independent() {
+ Ok(format!("{}-{}.{}", component_name, version, extension))
+ } else {
+ Ok(format!("{}-{}-{}.{}", component_name, version, target, extension))
+ }
+ }
+
+ pub(crate) fn tarball_name(&self, package: &PkgType, target: &str) -> Result<String, Error> {
+ self.archive_name(package, target, "tar.gz")
+ }
+
+ pub(crate) fn rustc_version(&self) -> &str {
+ const RUSTC_VERSION: &str = include_str!("../../../version");
+ RUSTC_VERSION.trim()
+ }
+}
diff --git a/src/tools/bump-stage0/Cargo.toml b/src/tools/bump-stage0/Cargo.toml
new file mode 100644
index 000000000..758b1b139
--- /dev/null
+++ b/src/tools/bump-stage0/Cargo.toml
@@ -0,0 +1,14 @@
+[package]
+name = "bump-stage0"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+anyhow = "1.0.34"
+curl = "0.4.38"
+indexmap = { version = "1.9.1", features = ["serde"] }
+serde = { version = "1.0.125", features = ["derive"] }
+serde_json = { version = "1.0.59", features = ["preserve_order"] }
+toml = "0.5.7"
diff --git a/src/tools/bump-stage0/src/main.rs b/src/tools/bump-stage0/src/main.rs
new file mode 100644
index 000000000..aa346daf7
--- /dev/null
+++ b/src/tools/bump-stage0/src/main.rs
@@ -0,0 +1,233 @@
+use anyhow::Error;
+use curl::easy::Easy;
+use indexmap::IndexMap;
+use std::collections::HashMap;
+use std::convert::TryInto;
+
+const PATH: &str = "src/stage0.json";
+const COMPILER_COMPONENTS: &[&str] = &["rustc", "rust-std", "cargo"];
+const RUSTFMT_COMPONENTS: &[&str] = &["rustfmt-preview"];
+
+struct Tool {
+ config: Config,
+ comments: Vec<String>,
+
+ channel: Channel,
+ version: [u16; 3],
+ checksums: IndexMap<String, String>,
+}
+
+impl Tool {
+ fn new() -> Result<Self, Error> {
+ let channel = match std::fs::read_to_string("src/ci/channel")?.trim() {
+ "stable" => Channel::Stable,
+ "beta" => Channel::Beta,
+ "nightly" => Channel::Nightly,
+ other => anyhow::bail!("unsupported channel: {}", other),
+ };
+
+ // Split "1.42.0" into [1, 42, 0]
+ let version = std::fs::read_to_string("src/version")?
+ .trim()
+ .split('.')
+ .map(|val| val.parse())
+ .collect::<Result<Vec<_>, _>>()?
+ .try_into()
+ .map_err(|_| anyhow::anyhow!("failed to parse version"))?;
+
+ let existing: Stage0 = serde_json::from_slice(&std::fs::read(PATH)?)?;
+
+ Ok(Self {
+ channel,
+ version,
+ config: existing.config,
+ comments: existing.comments,
+ checksums: IndexMap::new(),
+ })
+ }
+
+ fn update_json(mut self) -> Result<(), Error> {
+ std::fs::write(
+ PATH,
+ format!(
+ "{}\n",
+ serde_json::to_string_pretty(&Stage0 {
+ compiler: self.detect_compiler()?,
+ rustfmt: self.detect_rustfmt()?,
+ checksums_sha256: {
+ // Keys are sorted here instead of beforehand because values in this map
+ // are added while filling the other struct fields just above this block.
+ self.checksums.sort_keys();
+ self.checksums
+ },
+ config: self.config,
+ comments: self.comments,
+ })?
+ ),
+ )?;
+ Ok(())
+ }
+
+ // Currently Rust always bootstraps from the previous stable release, and in our train model
+ // this means that the master branch bootstraps from beta, beta bootstraps from current stable,
+ // and stable bootstraps from the previous stable release.
+ //
+ // On the master branch the compiler version is configured to `beta` whereas if you're looking
+ // at the beta or stable channel you'll likely see `1.x.0` as the version, with the previous
+ // release's version number.
+ fn detect_compiler(&mut self) -> Result<Stage0Toolchain, Error> {
+ let channel = match self.channel {
+ Channel::Stable | Channel::Beta => {
+ // The 1.XX manifest points to the latest point release of that minor release.
+ format!("{}.{}", self.version[0], self.version[1] - 1)
+ }
+ Channel::Nightly => "beta".to_string(),
+ };
+
+ let manifest = fetch_manifest(&self.config, &channel)?;
+ self.collect_checksums(&manifest, COMPILER_COMPONENTS)?;
+ Ok(Stage0Toolchain {
+ date: manifest.date,
+ version: if self.channel == Channel::Nightly {
+ "beta".to_string()
+ } else {
+ // The version field is like "1.42.0 (abcdef1234 1970-01-01)"
+ manifest.pkg["rust"]
+ .version
+ .split_once(' ')
+ .expect("invalid version field")
+ .0
+ .to_string()
+ },
+ })
+ }
+
+ /// We use a nightly rustfmt to format the source because it solves some bootstrapping issues
+ /// with use of new syntax in this repo. For the beta/stable channels rustfmt is not provided,
+ /// as we don't want to depend on rustfmt from nightly there.
+ fn detect_rustfmt(&mut self) -> Result<Option<Stage0Toolchain>, Error> {
+ if self.channel != Channel::Nightly {
+ return Ok(None);
+ }
+
+ let manifest = fetch_manifest(&self.config, "nightly")?;
+ self.collect_checksums(&manifest, RUSTFMT_COMPONENTS)?;
+ Ok(Some(Stage0Toolchain { date: manifest.date, version: "nightly".into() }))
+ }
+
+ fn collect_checksums(&mut self, manifest: &Manifest, components: &[&str]) -> Result<(), Error> {
+ let prefix = format!("{}/", self.config.dist_server);
+ for component in components {
+ let pkg = manifest
+ .pkg
+ .get(*component)
+ .ok_or_else(|| anyhow::anyhow!("missing component from manifest: {}", component))?;
+ for target in pkg.target.values() {
+ for pair in &[(&target.url, &target.hash), (&target.xz_url, &target.xz_hash)] {
+ if let (Some(url), Some(sha256)) = pair {
+ let url = url
+ .strip_prefix(&prefix)
+ .ok_or_else(|| {
+ anyhow::anyhow!("url doesn't start with dist server base: {}", url)
+ })?
+ .to_string();
+ self.checksums.insert(url, sha256.clone());
+ }
+ }
+ }
+ }
+ Ok(())
+ }
+}
+
+fn main() -> Result<(), Error> {
+ let tool = Tool::new()?;
+ tool.update_json()?;
+ Ok(())
+}
+
+fn fetch_manifest(config: &Config, channel: &str) -> Result<Manifest, Error> {
+ Ok(toml::from_slice(&http_get(&format!(
+ "{}/dist/channel-rust-{}.toml",
+ config.dist_server, channel
+ ))?)?)
+}
+
+fn http_get(url: &str) -> Result<Vec<u8>, Error> {
+ let mut data = Vec::new();
+ let mut handle = Easy::new();
+ handle.fail_on_error(true)?;
+ handle.url(url)?;
+ {
+ let mut transfer = handle.transfer();
+ transfer.write_function(|new_data| {
+ data.extend_from_slice(new_data);
+ Ok(new_data.len())
+ })?;
+ transfer.perform()?;
+ }
+ Ok(data)
+}
+
+#[derive(Debug, PartialEq, Eq)]
+enum Channel {
+ Stable,
+ Beta,
+ Nightly,
+}
+
+#[derive(Debug, serde::Serialize, serde::Deserialize)]
+struct Stage0 {
+ config: Config,
+ // Comments are explicitly below the config, do not move them above.
+ //
+ // Downstream forks of the compiler codebase can change the configuration values defined above,
+ // but doing so would risk merge conflicts whenever they import new changes that include a
+ // bootstrap compiler bump.
+ //
+ // To lessen the pain, a big block of comments is placed between the configuration and the
+ // auto-generated parts of the file, preventing git diffs of the config to include parts of the
+ // auto-generated content and vice versa. This should prevent merge conflicts.
+ #[serde(rename = "__comments")]
+ comments: Vec<String>,
+ compiler: Stage0Toolchain,
+ rustfmt: Option<Stage0Toolchain>,
+ checksums_sha256: IndexMap<String, String>,
+}
+
+#[derive(Debug, serde::Serialize, serde::Deserialize)]
+struct Config {
+ dist_server: String,
+ // There are other fields in the configuration, which will be read by src/bootstrap or other
+ // tools consuming stage0.json. To avoid the need to update bump-stage0 every time a new field
+ // is added, we collect all the fields in an untyped Value and serialize them back with the
+ // same order and structure they were deserialized in.
+ #[serde(flatten)]
+ other: serde_json::Value,
+}
+
+#[derive(Debug, serde::Serialize, serde::Deserialize)]
+struct Stage0Toolchain {
+ date: String,
+ version: String,
+}
+
+#[derive(Debug, serde::Serialize, serde::Deserialize)]
+struct Manifest {
+ date: String,
+ pkg: HashMap<String, ManifestPackage>,
+}
+
+#[derive(Debug, serde::Serialize, serde::Deserialize)]
+struct ManifestPackage {
+ version: String,
+ target: HashMap<String, ManifestTargetPackage>,
+}
+
+#[derive(Debug, serde::Serialize, serde::Deserialize)]
+struct ManifestTargetPackage {
+ url: Option<String>,
+ hash: Option<String>,
+ xz_url: Option<String>,
+ xz_hash: Option<String>,
+}
diff --git a/src/tools/cargotest/Cargo.toml b/src/tools/cargotest/Cargo.toml
new file mode 100644
index 000000000..bfd894e2b
--- /dev/null
+++ b/src/tools/cargotest/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "cargotest2"
+version = "0.1.0"
+edition = "2021"
+
+[[bin]]
+name = "cargotest"
+path = "main.rs"
diff --git a/src/tools/cargotest/main.rs b/src/tools/cargotest/main.rs
new file mode 100644
index 000000000..95fe98a68
--- /dev/null
+++ b/src/tools/cargotest/main.rs
@@ -0,0 +1,214 @@
+use std::env;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+
+struct Test {
+ repo: &'static str,
+ name: &'static str,
+ sha: &'static str,
+ lock: Option<&'static str>,
+ packages: &'static [&'static str],
+ features: Option<&'static [&'static str]>,
+ manifest_path: Option<&'static str>,
+ /// `filters` are passed to libtest (i.e., after a `--` in the `cargo test` invocation).
+ filters: &'static [&'static str],
+}
+
+const TEST_REPOS: &[Test] = &[
+ Test {
+ name: "iron",
+ repo: "https://github.com/iron/iron",
+ sha: "cf056ea5e8052c1feea6141e40ab0306715a2c33",
+ lock: None,
+ packages: &[],
+ features: None,
+ manifest_path: None,
+ filters: &[],
+ },
+ Test {
+ name: "ripgrep",
+ repo: "https://github.com/BurntSushi/ripgrep",
+ sha: "ced5b92aa93eb47e892bd2fd26ab454008721730",
+ lock: None,
+ packages: &[],
+ features: None,
+ manifest_path: None,
+ filters: &[],
+ },
+ Test {
+ name: "tokei",
+ repo: "https://github.com/XAMPPRocky/tokei",
+ sha: "fdf3f8cb279a7aeac0696c87e5d8b0cd946e4f9e",
+ lock: None,
+ packages: &[],
+ features: None,
+ manifest_path: None,
+ filters: &[],
+ },
+ Test {
+ name: "xsv",
+ repo: "https://github.com/BurntSushi/xsv",
+ sha: "3de6c04269a7d315f7e9864b9013451cd9580a08",
+ lock: None,
+ packages: &[],
+ features: None,
+ manifest_path: None,
+ // Many tests here use quickcheck and some of them can fail randomly, so only run deterministic tests.
+ filters: &[
+ "test_flatten::",
+ "test_fmt::",
+ "test_headers::",
+ "test_index::",
+ "test_join::",
+ "test_partition::",
+ "test_search::",
+ "test_select::",
+ "test_slice::",
+ "test_split::",
+ "test_stats::",
+ "test_table::",
+ ],
+ },
+ Test {
+ name: "servo",
+ repo: "https://github.com/servo/servo",
+ sha: "caac107ae8145ef2fd20365e2b8fadaf09c2eb3b",
+ lock: None,
+ // Only test Stylo a.k.a. Quantum CSS, the parts of Servo going into Firefox.
+ // This takes much less time to build than all of Servo and supports stable Rust.
+ packages: &["selectors"],
+ features: None,
+ manifest_path: None,
+ filters: &[],
+ },
+ Test {
+ name: "diesel",
+ repo: "https://github.com/diesel-rs/diesel",
+ sha: "91493fe47175076f330ce5fc518f0196c0476f56",
+ lock: None,
+ packages: &[],
+ // Test the embedded sqlite variant of diesel
+ // This does not require any dependency to be present,
+ // sqlite will be compiled as part of the build process
+ features: Some(&["sqlite", "libsqlite3-sys/bundled"]),
+ // We are only interested in testing diesel itself
+ // not any other crate present in the diesel workspace
+ // (This is required to set the feature flags above)
+ manifest_path: Some("diesel/Cargo.toml"),
+ filters: &[],
+ },
+];
+
+fn main() {
+ let args = env::args().collect::<Vec<_>>();
+ let cargo = &args[1];
+ let out_dir = Path::new(&args[2]);
+ let cargo = &Path::new(cargo);
+
+ for test in TEST_REPOS.iter().rev() {
+ if args[3..].is_empty() || args[3..].iter().any(|s| s.contains(test.name)) {
+ test_repo(cargo, out_dir, test);
+ }
+ }
+}
+
+fn test_repo(cargo: &Path, out_dir: &Path, test: &Test) {
+ println!("testing {}", test.repo);
+ let dir = clone_repo(test, out_dir);
+ if let Some(lockfile) = test.lock {
+ fs::write(&dir.join("Cargo.lock"), lockfile).unwrap();
+ }
+ if !run_cargo_test(cargo, &dir, test.packages, test.features, test.manifest_path, test.filters)
+ {
+ panic!("tests failed for {}", test.repo);
+ }
+}
+
+fn clone_repo(test: &Test, out_dir: &Path) -> PathBuf {
+ let out_dir = out_dir.join(test.name);
+
+ if !out_dir.join(".git").is_dir() {
+ let status = Command::new("git").arg("init").arg(&out_dir).status().unwrap();
+ assert!(status.success());
+ }
+
+ // Try progressively deeper fetch depths to find the commit
+ let mut found = false;
+ for depth in &[0, 1, 10, 100, 1000, 100000] {
+ if *depth > 0 {
+ let status = Command::new("git")
+ .arg("fetch")
+ .arg(test.repo)
+ .arg("master")
+ .arg(&format!("--depth={}", depth))
+ .current_dir(&out_dir)
+ .status()
+ .unwrap();
+ assert!(status.success());
+ }
+
+ let status = Command::new("git")
+ .arg("reset")
+ .arg(test.sha)
+ .arg("--hard")
+ .current_dir(&out_dir)
+ .status()
+ .unwrap();
+
+ if status.success() {
+ found = true;
+ break;
+ }
+ }
+
+ if !found {
+ panic!("unable to find commit {}", test.sha)
+ }
+ let status =
+ Command::new("git").arg("clean").arg("-fdx").current_dir(&out_dir).status().unwrap();
+ assert!(status.success());
+
+ out_dir
+}
+
+fn run_cargo_test(
+ cargo_path: &Path,
+ crate_path: &Path,
+ packages: &[&str],
+ features: Option<&[&str]>,
+ manifest_path: Option<&str>,
+ filters: &[&str],
+) -> bool {
+ let mut command = Command::new(cargo_path);
+ command.arg("test");
+
+ if let Some(path) = manifest_path {
+ command.arg(format!("--manifest-path={}", path));
+ }
+
+ if let Some(features) = features {
+ command.arg("--no-default-features");
+ for feature in features {
+ command.arg(format!("--features={}", feature));
+ }
+ }
+
+ for name in packages {
+ command.arg("-p").arg(name);
+ }
+
+ command.arg("--");
+ command.args(filters);
+
+ let status = command
+ // Disable rust-lang/cargo's cross-compile tests
+ .env("CFG_DISABLE_CROSS_TESTS", "1")
+ // Relax #![deny(warnings)] in some crates
+ .env("RUSTFLAGS", "--cap-lints warn")
+ .current_dir(crate_path)
+ .status()
+ .unwrap();
+
+ status.success()
+}
diff --git a/src/tools/cherry-pick.sh b/src/tools/cherry-pick.sh
new file mode 100755
index 000000000..90539a963
--- /dev/null
+++ b/src/tools/cherry-pick.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+set -euo pipefail
+IFS=$'\n\t'
+
+print_error() {
+ echo "Error: \`$1\` is not a valid commit. To debug, run:"
+ echo
+ echo " git rev-parse --verify $1"
+ echo
+}
+
+full_sha() {
+ git rev-parse \
+ --verify \
+ --quiet \
+ "$1^{object}" || print_error $1
+}
+
+commit_message_with_backport_note() {
+ message=$(git log --format=%B -n 1 $1)
+ echo $message | awk "NR==1{print; print \"\n(backport-of: $1)\"} NR!=1"
+}
+
+cherry_pick_commit() {
+ sha=$(full_sha $1)
+ git cherry-pick $sha > /dev/null
+ git commit \
+ --amend \
+ --file <(commit_message_with_backport_note $sha)
+}
+
+for arg ; do
+ cherry_pick_commit $arg
+done
diff --git a/src/tools/clippy/.cargo/config.toml b/src/tools/clippy/.cargo/config.toml
new file mode 100644
index 000000000..f3dd9275a
--- /dev/null
+++ b/src/tools/clippy/.cargo/config.toml
@@ -0,0 +1,13 @@
+[alias]
+uitest = "test --test compile-test"
+dev = "run --package clippy_dev --bin clippy_dev --manifest-path clippy_dev/Cargo.toml --"
+lintcheck = "run --package lintcheck --bin lintcheck --manifest-path lintcheck/Cargo.toml -- "
+collect-metadata = "test --test dogfood --features internal -- run_metadata_collection_lint --ignored"
+
+[build]
+# -Zbinary-dep-depinfo allows us to track which rlib files to use for compiling UI tests
+rustflags = ["-Zunstable-options", "-Zbinary-dep-depinfo"]
+target-dir = "target"
+
+[unstable]
+binary-dep-depinfo = true
diff --git a/src/tools/clippy/.editorconfig b/src/tools/clippy/.editorconfig
new file mode 100644
index 000000000..ec6e107d5
--- /dev/null
+++ b/src/tools/clippy/.editorconfig
@@ -0,0 +1,21 @@
+# EditorConfig helps developers define and maintain consistent
+# coding styles between different editors and IDEs
+# editorconfig.org
+
+root = true
+
+[*]
+end_of_line = lf
+charset = utf-8
+trim_trailing_whitespace = true
+insert_final_newline = true
+indent_style = space
+indent_size = 4
+
+[*.md]
+# double whitespace at end of line
+# denotes a line break in Markdown
+trim_trailing_whitespace = false
+
+[*.yml]
+indent_size = 2
diff --git a/src/tools/clippy/.github/ISSUE_TEMPLATE/blank_issue.yml b/src/tools/clippy/.github/ISSUE_TEMPLATE/blank_issue.yml
new file mode 100644
index 000000000..89884bfc8
--- /dev/null
+++ b/src/tools/clippy/.github/ISSUE_TEMPLATE/blank_issue.yml
@@ -0,0 +1,44 @@
+name: Blank Issue
+description: Create a blank issue.
+body:
+ - type: markdown
+ attributes:
+ value: Thank you for filing an issue!
+ - type: textarea
+ id: problem
+ attributes:
+ label: Description
+ description: >
+ Please provide a description of the issue, along with any information
+ you feel relevant to replicate it.
+ validations:
+ required: true
+ - type: textarea
+ id: version
+ attributes:
+ label: Version
+ description: "Rust version (`rustc -Vv`)"
+ placeholder: |
+ rustc 1.46.0-nightly (f455e46ea 2020-06-20)
+ binary: rustc
+ commit-hash: f455e46eae1a227d735091091144601b467e1565
+ commit-date: 2020-06-20
+ host: x86_64-unknown-linux-gnu
+ release: 1.46.0-nightly
+ LLVM version: 10.0
+ render: text
+ - type: textarea
+ id: labels
+ attributes:
+ label: Additional Labels
+ description: >
+ Additional labels can be added to this issue by including the following
+ command
+ placeholder: |
+ @rustbot label +<label>
+
+ Common labels for this issue type are:
+ * C-an-interesting-project
+ * C-enhancement
+ * C-question
+ * C-tracking-issue
diff --git a/src/tools/clippy/.github/ISSUE_TEMPLATE/bug_report.yml b/src/tools/clippy/.github/ISSUE_TEMPLATE/bug_report.yml
new file mode 100644
index 000000000..b6f70a7f1
--- /dev/null
+++ b/src/tools/clippy/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -0,0 +1,57 @@
+name: Bug Report
+description: Create a bug report for Clippy
+labels: ["C-bug"]
+body:
+ - type: markdown
+ attributes:
+ value: Thank you for filing a bug report! 🐛
+ - type: textarea
+ id: problem
+ attributes:
+ label: Summary
+ description: >
+ Please provide a short summary of the bug, along with any information
+ you feel relevant to replicate the bug.
+ validations:
+ required: true
+ - type: textarea
+ id: reproducer
+ attributes:
+ label: Reproducer
+ description: Please provide the code and steps to reproduce the bug
+ value: |
+ I tried this code:
+
+ ```rust
+ <code>
+ ```
+
+ I expected to see this happen:
+
+ Instead, this happened:
+ - type: textarea
+ id: version
+ attributes:
+ label: Version
+ description: "Rust version (`rustc -Vv`)"
+ placeholder: |
+ rustc 1.46.0-nightly (f455e46ea 2020-06-20)
+ binary: rustc
+ commit-hash: f455e46eae1a227d735091091144601b467e1565
+ commit-date: 2020-06-20
+ host: x86_64-unknown-linux-gnu
+ release: 1.46.0-nightly
+ LLVM version: 10.0
+ render: text
+ - type: textarea
+ id: labels
+ attributes:
+ label: Additional Labels
+ description: >
+ Additional labels can be added to this issue by including the following
+ command
+ placeholder: |
+ @rustbot label +<label>
+
+ Common labels for this issue type are:
+ * `I-suggestion-causes-error`
diff --git a/src/tools/clippy/.github/ISSUE_TEMPLATE/config.yml b/src/tools/clippy/.github/ISSUE_TEMPLATE/config.yml
new file mode 100644
index 000000000..bd7dc0ac9
--- /dev/null
+++ b/src/tools/clippy/.github/ISSUE_TEMPLATE/config.yml
@@ -0,0 +1,5 @@
+blank_issues_enabled: true
+contact_links:
+ - name: Rust Programming Language Forum
+ url: https://users.rust-lang.org
+ about: Please ask and answer questions about Rust here.
diff --git a/src/tools/clippy/.github/ISSUE_TEMPLATE/false_negative.yml b/src/tools/clippy/.github/ISSUE_TEMPLATE/false_negative.yml
new file mode 100644
index 000000000..25e436d30
--- /dev/null
+++ b/src/tools/clippy/.github/ISSUE_TEMPLATE/false_negative.yml
@@ -0,0 +1,50 @@
+name: Bug Report (False Negative)
+description: Create a bug report about missing warnings from a lint
+labels: ["C-bug", "I-false-negative"]
+body:
+ - type: markdown
+ attributes:
+ value: Thank you for filing a bug report! 🐛
+ - type: textarea
+ id: problem
+ attributes:
+ label: Summary
+ description: >
+ Please provide a short summary of the bug, along with any information
+ you feel relevant to replicate the bug.
+ validations:
+ required: true
+ - type: input
+ id: lint-name
+ attributes:
+ label: Lint Name
+ description: Please provide the lint name.
+ - type: textarea
+ id: reproducer
+ attributes:
+ label: Reproducer
+ description: Please provide the code and steps to reproduce the bug
+ value: |
+ I tried this code:
+
+ ```rust
+ <code>
+ ```
+
+ I expected to see this happen:
+
+ Instead, this happened:
+ - type: textarea
+ id: version
+ attributes:
+ label: Version
+ description: "Rust version (`rustc -Vv`)"
+ placeholder: |
+ rustc 1.46.0-nightly (f455e46ea 2020-06-20)
+ binary: rustc
+ commit-hash: f455e46eae1a227d735091091144601b467e1565
+ commit-date: 2020-06-20
+ host: x86_64-unknown-linux-gnu
+ release: 1.46.0-nightly
+ LLVM version: 10.0
+ render: text
diff --git a/src/tools/clippy/.github/ISSUE_TEMPLATE/false_positive.yml b/src/tools/clippy/.github/ISSUE_TEMPLATE/false_positive.yml
new file mode 100644
index 000000000..561b65c93
--- /dev/null
+++ b/src/tools/clippy/.github/ISSUE_TEMPLATE/false_positive.yml
@@ -0,0 +1,68 @@
+name: Bug Report (False Positive)
+description: Create a bug report about a wrongly emitted lint warning
+labels: ["C-bug", "I-false-positive"]
+body:
+ - type: markdown
+ attributes:
+ value: Thank you for filing a bug report! 🐛
+ - type: textarea
+ id: problem
+ attributes:
+ label: Summary
+ description: >
+ Please provide a short summary of the bug, along with any information
+ you feel relevant to replicate the bug.
+ validations:
+ required: true
+ - type: input
+ id: lint-name
+ attributes:
+ label: Lint Name
+ description: Please provide the lint name.
+ - type: textarea
+ id: reproducer
+ attributes:
+ label: Reproducer
+ description: >
+ Please provide the code and steps to reproduce the bug together with the
+ output from Clippy.
+ value: |
+ I tried this code:
+
+ ```rust
+ <code>
+ ```
+
+ I saw this happen:
+
+ ```
+ <output>
+ ```
+
+ I expected to see this happen:
+ - type: textarea
+ id: version
+ attributes:
+ label: Version
+ description: "Rust version (`rustc -Vv`)"
+ placeholder: |
+ rustc 1.46.0-nightly (f455e46ea 2020-06-20)
+ binary: rustc
+ commit-hash: f455e46eae1a227d735091091144601b467e1565
+ commit-date: 2020-06-20
+ host: x86_64-unknown-linux-gnu
+ release: 1.46.0-nightly
+ LLVM version: 10.0
+ render: text
+ - type: textarea
+ id: labels
+ attributes:
+ label: Additional Labels
+ description: >
+ Additional labels can be added to this issue by including the following
+ command
+ placeholder: |
+ @rustbot label +<label>
+
+ Common labels for this issue type are:
+ * `I-suggestion-causes-error`
diff --git a/src/tools/clippy/.github/ISSUE_TEMPLATE/ice.yml b/src/tools/clippy/.github/ISSUE_TEMPLATE/ice.yml
new file mode 100644
index 000000000..81bd9c5e0
--- /dev/null
+++ b/src/tools/clippy/.github/ISSUE_TEMPLATE/ice.yml
@@ -0,0 +1,48 @@
+name: Internal Compiler Error
+description: Create a report for an internal compiler error (ICE) in Clippy.
+labels: ["C-bug", "I-ICE"]
+body:
+ - type: markdown
+ attributes:
+ value: Thank you for finding an Internal Compiler Error! 🧊
+ - type: textarea
+ id: problem
+ attributes:
+ label: Summary
+ description: |
+ If possible, try to provide a minimal verifiable example. You can read ["Rust Bug Minimization Patterns"][mve] for how to create smaller examples. Otherwise, provide the crate where the ICE occurred.
+
+ [mve]: http://blog.pnkfx.org/blog/2019/11/18/rust-bug-minimization-patterns/
+ validations:
+ required: true
+ - type: textarea
+ id: version
+ attributes:
+ label: Version
+ description: "Rust version (`rustc -Vv`)"
+ placeholder: |
+ rustc 1.46.0-nightly (f455e46ea 2020-06-20)
+ binary: rustc
+ commit-hash: f455e46eae1a227d735091091144601b467e1565
+ commit-date: 2020-06-20
+ host: x86_64-unknown-linux-gnu
+ release: 1.46.0-nightly
+ LLVM version: 10.0
+ render: text
+ - type: textarea
+ id: error
+ attributes:
+ label: Error output
+ description: >
+ Include a backtrace in the code block by setting `RUST_BACKTRACE=1` in
+ your environment. E.g. `RUST_BACKTRACE=1 cargo clippy`.
+ value: |
+ <details><summary>Backtrace</summary>
+ <p>
+
+ ```
+ <backtrace>
+ ```
+
+ </p>
+ </details>
diff --git a/src/tools/clippy/.github/ISSUE_TEMPLATE/new_lint.yml b/src/tools/clippy/.github/ISSUE_TEMPLATE/new_lint.yml
new file mode 100644
index 000000000..0b43d8d70
--- /dev/null
+++ b/src/tools/clippy/.github/ISSUE_TEMPLATE/new_lint.yml
@@ -0,0 +1,71 @@
+name: New lint suggestion
+description: Suggest a new Clippy lint.
+labels: ["A-lint"]
+body:
+ - type: markdown
+ attributes:
+ value: Thank you for your lint idea!
+ - type: textarea
+ id: what
+ attributes:
+ label: What it does
+ description: What does this lint do?
+ validations:
+ required: true
+ - type: input
+ id: lint-name
+ attributes:
+ label: Lint Name
+ description: Please provide the lint name.
+ - type: dropdown
+ id: category
+ attributes:
+ label: Category
+ description: >
+ What category should this lint go into? If you're unsure you can select
+ multiple categories. You can find a category description in the
+ `README`.
+ multiple: true
+ options:
+ - correctness
+ - suspicious
+ - style
+ - complexity
+ - perf
+ - pedantic
+ - restriction
+ - cargo
+ - type: textarea
+ id: advantage
+ attributes:
+ label: Advantage
+ description: >
+ What is the advantage of the recommended code over the original code?
+ placeholder: |
+ - Remove bounds check inserted by ...
+ - Remove the need to duplicate/store ...
+ - Remove typo ...
+ - type: textarea
+ id: drawbacks
+ attributes:
+ label: Drawbacks
+ description: What might be possible drawbacks of such a lint?
+ - type: textarea
+ id: example
+ attributes:
+ label: Example
+ description: >
+ Include a short example showing when the lint should trigger together
+ with the improved code.
+ value: |
+ ```rust
+ <code>
+ ```
+
+ Could be written as:
+
+ ```rust
+ <code>
+ ```
+ validations:
+ required: true
diff --git a/src/tools/clippy/.github/PULL_REQUEST_TEMPLATE.md b/src/tools/clippy/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 000000000..9e49f6089
--- /dev/null
+++ b/src/tools/clippy/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,39 @@
+Thank you for making Clippy better!
+
+We're collecting our changelog from pull request descriptions.
+If your PR only includes internal changes, you can just write
+`changelog: none`. Otherwise, please write a short comment
+explaining your change.
+
+It's also helpful for us that the lint name is put within backticks (`` ` ` ``),
+and then encapsulated by square brackets (`[]`), for example:
+```
+changelog: [`lint_name`]: your change
+```
+
+If your PR fixes an issue, you can add `fixes #issue_number` into this
+PR description. This way the issue will be automatically closed when
+your PR is merged.
+
+If you added a new lint, here's a checklist for things that will be
+checked during review or continuous integration.
+
+- \[ ] Followed [lint naming conventions][lint_naming]
+- \[ ] Added passing UI tests (including committed `.stderr` file)
+- \[ ] `cargo test` passes locally
+- \[ ] Executed `cargo dev update_lints`
+- \[ ] Added lint documentation
+- \[ ] Run `cargo dev fmt`
+
+[lint_naming]: https://rust-lang.github.io/rfcs/0344-conventions-galore.html#lints
+
+Note that you can skip the above if you are just opening a WIP PR in
+order to get feedback.
+
+Delete this line and everything above before opening your PR.
+
+---
+
+*Please write a short comment explaining your change (or "none" for internal only changes)*
+
+changelog:
diff --git a/src/tools/clippy/.github/deploy.sh b/src/tools/clippy/.github/deploy.sh
new file mode 100644
index 000000000..5a59f94ec
--- /dev/null
+++ b/src/tools/clippy/.github/deploy.sh
@@ -0,0 +1,67 @@
+#!/bin/bash
+
+set -ex
+
+echo "Removing the current docs for master"
+rm -rf out/master/ || exit 0
+
+echo "Making the docs for master"
+mkdir out/master/
+cp util/gh-pages/index.html out/master
+cp util/gh-pages/script.js out/master
+cp util/gh-pages/lints.json out/master
+
+if [[ -n $TAG_NAME ]]; then
+ echo "Save the doc for the current tag ($TAG_NAME) and point stable/ to it"
+ cp -Tr out/master "out/$TAG_NAME"
+ rm -f out/stable
+ ln -s "$TAG_NAME" out/stable
+fi
+
+if [[ $BETA = "true" ]]; then
+ echo "Update documentation for the beta release"
+ cp -r out/master/* out/beta
+fi
+
+# Generate version index that is shown as root index page
+cp util/gh-pages/versions.html out/index.html
+
+echo "Making the versions.json file"
+python3 ./util/versions.py out
+
+# Now let's go have some fun with the cloned repo
+cd out
+git config user.name "GHA CI"
+git config user.email "gha@ci.invalid"
+
+if [[ -n $TAG_NAME ]]; then
+ # track files, so that the following check works
+ git add --intent-to-add "$TAG_NAME"
+ if git diff --exit-code --quiet -- $TAG_NAME/; then
+ echo "No changes to the output on this push; exiting."
+ exit 0
+ fi
+ # Add the new dir
+ git add "$TAG_NAME"
+ # Update the symlink
+ git add stable
+ # Update versions file
+ git add versions.json
+ git commit -m "Add documentation for ${TAG_NAME} release: ${SHA}"
+elif [[ $BETA = "true" ]]; then
+ if git diff --exit-code --quiet -- beta/; then
+ echo "No changes to the output on this push; exiting."
+ exit 0
+ fi
+ git add beta
+ git commit -m "Automatic deploy to GitHub Pages (beta): ${SHA}"
+else
+ if git diff --exit-code --quiet; then
+ echo "No changes to the output on this push; exiting."
+ exit 0
+ fi
+ git add .
+ git commit -m "Automatic deploy to GitHub Pages: ${SHA}"
+fi
+
+git push "$SSH_REPO" "$TARGET_BRANCH"
diff --git a/src/tools/clippy/.github/driver.sh b/src/tools/clippy/.github/driver.sh
new file mode 100644
index 000000000..6ff189fc8
--- /dev/null
+++ b/src/tools/clippy/.github/driver.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+
+set -ex
+
+# Check sysroot handling
+sysroot=$(./target/debug/clippy-driver --print sysroot)
+test "$sysroot" = "$(rustc --print sysroot)"
+
+if [[ ${OS} == "Windows" ]]; then
+ desired_sysroot=C:/tmp
+else
+ desired_sysroot=/tmp
+fi
+sysroot=$(./target/debug/clippy-driver --sysroot $desired_sysroot --print sysroot)
+test "$sysroot" = $desired_sysroot
+
+sysroot=$(SYSROOT=$desired_sysroot ./target/debug/clippy-driver --print sysroot)
+test "$sysroot" = $desired_sysroot
+
+# Make sure this isn't set - clippy-driver should cope without it
+unset CARGO_MANIFEST_DIR
+
+# Run a lint and make sure it produces the expected output. It's also expected to exit with code 1
+# FIXME: How to match the clippy invocation in compile-test.rs?
+./target/debug/clippy-driver -Dwarnings -Aunused -Zui-testing --emit metadata --crate-type bin tests/ui/double_neg.rs 2>double_neg.stderr && exit 1
+sed -e "s,tests/ui,\$DIR," -e "/= help/d" double_neg.stderr >normalized.stderr
+diff -u normalized.stderr tests/ui/double_neg.stderr
+
+# make sure "clippy-driver --rustc --arg" and "rustc --arg" behave the same
+SYSROOT=$(rustc --print sysroot)
+diff -u <(LD_LIBRARY_PATH=${SYSROOT}/lib ./target/debug/clippy-driver --rustc --version --verbose) <(rustc --version --verbose)
+
+echo "fn main() {}" >target/driver_test.rs
+# we can't run 2 rustcs on the same file at the same time
+CLIPPY=$(LD_LIBRARY_PATH=${SYSROOT}/lib ./target/debug/clippy-driver ./target/driver_test.rs --rustc)
+RUSTC=$(rustc ./target/driver_test.rs)
+diff -u <($CLIPPY) <($RUSTC)
+
+# TODO: CLIPPY_CONF_DIR / CARGO_MANIFEST_DIR
diff --git a/src/tools/clippy/.github/workflows/clippy.yml b/src/tools/clippy/.github/workflows/clippy.yml
new file mode 100644
index 000000000..0e27cc927
--- /dev/null
+++ b/src/tools/clippy/.github/workflows/clippy.yml
@@ -0,0 +1,76 @@
+name: Clippy Test
+
+on:
+ push:
+ # Ignore bors branches, since they are covered by `clippy_bors.yml`
+ branches-ignore:
+ - auto
+ - try
+ # Don't run Clippy tests, when only text files were modified
+ paths-ignore:
+ - 'COPYRIGHT'
+ - 'LICENSE-*'
+ - '**.md'
+ - '**.txt'
+ pull_request:
+ # Don't run Clippy tests, when only text files were modified
+ paths-ignore:
+ - 'COPYRIGHT'
+ - 'LICENSE-*'
+ - '**.md'
+ - '**.txt'
+
+env:
+ RUST_BACKTRACE: 1
+ CARGO_TARGET_DIR: '${{ github.workspace }}/target'
+ NO_FMT_TEST: 1
+
+jobs:
+ base:
+ # NOTE: If you modify this job, make sure you copy the changes to clippy_bors.yml
+ runs-on: ubuntu-latest
+
+ steps:
+ # Setup
+ - uses: rust-lang/simpleinfra/github-actions/cancel-outdated-builds@master
+ with:
+ github_token: "${{ secrets.github_token }}"
+
+ - name: Checkout
+ uses: actions/checkout@v3.0.2
+
+ - name: Install toolchain
+ run: rustup show active-toolchain
+
+ # Run
+ - name: Set LD_LIBRARY_PATH (Linux)
+ run: |
+ SYSROOT=$(rustc --print sysroot)
+ echo "LD_LIBRARY_PATH=${SYSROOT}/lib${LD_LIBRARY_PATH+:${LD_LIBRARY_PATH}}" >> $GITHUB_ENV
+
+ - name: Build
+ run: cargo build --features deny-warnings,internal
+
+ - name: Test
+ run: cargo test --features deny-warnings,internal
+
+ - name: Test clippy_lints
+ run: cargo test --features deny-warnings,internal
+ working-directory: clippy_lints
+
+ - name: Test clippy_utils
+ run: cargo test --features deny-warnings,internal
+ working-directory: clippy_utils
+
+ - name: Test rustc_tools_util
+ run: cargo test --features deny-warnings
+ working-directory: rustc_tools_util
+
+ - name: Test clippy_dev
+ run: cargo test --features deny-warnings
+ working-directory: clippy_dev
+
+ - name: Test clippy-driver
+ run: bash .github/driver.sh
+ env:
+ OS: ${{ runner.os }}
diff --git a/src/tools/clippy/.github/workflows/clippy_bors.yml b/src/tools/clippy/.github/workflows/clippy_bors.yml
new file mode 100644
index 000000000..97453303c
--- /dev/null
+++ b/src/tools/clippy/.github/workflows/clippy_bors.yml
@@ -0,0 +1,281 @@
+name: Clippy Test (bors)
+
+on:
+ push:
+ branches:
+ - auto
+ - try
+
+env:
+ RUST_BACKTRACE: 1
+ CARGO_TARGET_DIR: '${{ github.workspace }}/target'
+ NO_FMT_TEST: 1
+
+defaults:
+ run:
+ shell: bash
+
+jobs:
+ changelog:
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: rust-lang/simpleinfra/github-actions/cancel-outdated-builds@master
+ with:
+ github_token: "${{ secrets.github_token }}"
+
+ - name: Checkout
+ uses: actions/checkout@v3.0.2
+ with:
+ ref: ${{ github.ref }}
+
+ # Run
+ - name: Check Changelog
+ run: |
+ MESSAGE=$(git log --format=%B -n 1)
+ PR=$(echo "$MESSAGE" | grep -o "#[0-9]*" | head -1 | sed -e 's/^#//')
+ body=$(curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -s "https://api.github.com/repos/rust-lang/rust-clippy/pulls/$PR" | \
+ python -c "import sys, json; print(json.load(sys.stdin)['body'])")
+ output=$(grep "^changelog:\s*\S" <<< "$body" | sed "s/changelog:\s*//g") || {
+ echo "ERROR: PR body must contain 'changelog: ...'"
+ exit 1
+ }
+ if [[ "$output" = "none" ]]; then
+ echo "WARNING: changelog is 'none'"
+ else
+ echo "changelog: $output"
+ fi
+ env:
+ PYTHONIOENCODING: 'utf-8'
+ base:
+ needs: changelog
+ strategy:
+ matrix:
+ os: [ubuntu-latest, windows-latest, macos-latest]
+ host: [x86_64-unknown-linux-gnu, i686-unknown-linux-gnu, x86_64-apple-darwin, x86_64-pc-windows-msvc]
+ exclude:
+ - os: ubuntu-latest
+ host: x86_64-apple-darwin
+ - os: ubuntu-latest
+ host: x86_64-pc-windows-msvc
+ - os: macos-latest
+ host: x86_64-unknown-linux-gnu
+ - os: macos-latest
+ host: i686-unknown-linux-gnu
+ - os: macos-latest
+ host: x86_64-pc-windows-msvc
+ - os: windows-latest
+ host: x86_64-unknown-linux-gnu
+ - os: windows-latest
+ host: i686-unknown-linux-gnu
+ - os: windows-latest
+ host: x86_64-apple-darwin
+
+ runs-on: ${{ matrix.os }}
+
+ # NOTE: If you modify this job, make sure you copy the changes to clippy.yml
+ steps:
+ # Setup
+ - uses: rust-lang/simpleinfra/github-actions/cancel-outdated-builds@master
+ with:
+ github_token: "${{ secrets.github_token }}"
+
+ - name: Install dependencies (Linux-i686)
+ run: |
+ sudo dpkg --add-architecture i386
+ sudo apt-get update
+ sudo apt-get install gcc-multilib libssl-dev:i386 libgit2-dev:i386
+ if: matrix.host == 'i686-unknown-linux-gnu'
+
+ - name: Checkout
+ uses: actions/checkout@v3.0.2
+
+ - name: Install toolchain
+ run: rustup show active-toolchain
+
+ # Run
+ - name: Set LD_LIBRARY_PATH (Linux)
+ if: runner.os == 'Linux'
+ run: |
+ SYSROOT=$(rustc --print sysroot)
+ echo "LD_LIBRARY_PATH=${SYSROOT}/lib${LD_LIBRARY_PATH+:${LD_LIBRARY_PATH}}" >> $GITHUB_ENV
+ - name: Link rustc dylib (MacOS)
+ if: runner.os == 'macOS'
+ run: |
+ SYSROOT=$(rustc --print sysroot)
+ sudo mkdir -p /usr/local/lib
+ sudo find "${SYSROOT}/lib" -maxdepth 1 -name '*dylib' -exec ln -s {} /usr/local/lib \;
+ - name: Set PATH (Windows)
+ if: runner.os == 'Windows'
+ run: |
+ SYSROOT=$(rustc --print sysroot)
+ echo "$SYSROOT/bin" >> $GITHUB_PATH
+
+ - name: Build
+ run: cargo build --features deny-warnings,internal
+
+ - name: Test
+ if: runner.os == 'Linux'
+ run: cargo test --features deny-warnings,internal
+
+ - name: Test
+ if: runner.os != 'Linux'
+ run: cargo test --features deny-warnings,internal -- --skip dogfood
+
+ - name: Test clippy_lints
+ run: cargo test --features deny-warnings,internal
+ working-directory: clippy_lints
+
+ - name: Test clippy_utils
+ run: cargo test --features deny-warnings,internal
+ working-directory: clippy_utils
+
+ - name: Test rustc_tools_util
+ run: cargo test --features deny-warnings
+ working-directory: rustc_tools_util
+
+ - name: Test clippy_dev
+ run: cargo test --features deny-warnings
+ working-directory: clippy_dev
+
+ - name: Test clippy-driver
+ run: bash .github/driver.sh
+ env:
+ OS: ${{ runner.os }}
+
+ metadata_collection:
+ needs: changelog
+ runs-on: ubuntu-latest
+
+ steps:
+ # Setup
+ - uses: rust-lang/simpleinfra/github-actions/cancel-outdated-builds@master
+ with:
+ github_token: "${{ secrets.github_token }}"
+
+ - name: Checkout
+ uses: actions/checkout@v3.0.2
+
+ - name: Install toolchain
+ run: rustup show active-toolchain
+
+ - name: Test metadata collection
+ run: cargo collect-metadata
+
+ integration_build:
+ needs: changelog
+ runs-on: ubuntu-latest
+
+ steps:
+ # Setup
+ - uses: rust-lang/simpleinfra/github-actions/cancel-outdated-builds@master
+ with:
+ github_token: "${{ secrets.github_token }}"
+
+ - name: Checkout
+ uses: actions/checkout@v3.0.2
+
+ - name: Install toolchain
+ run: rustup show active-toolchain
+
+ # Run
+ - name: Build Integration Test
+ run: cargo test --test integration --features integration --no-run
+
+ # Upload
+ - name: Extract Binaries
+ run: |
+ DIR=$CARGO_TARGET_DIR/debug
+ rm $DIR/deps/integration-*.d
+ mv $DIR/deps/integration-* $DIR/integration
+ find $DIR ! -executable -o -type d ! -path $DIR | xargs rm -rf
+ rm -rf $CARGO_TARGET_DIR/release
+
+ - name: Upload Binaries
+ uses: actions/upload-artifact@v1
+ with:
+ name: target
+ path: target
+
+ integration:
+ needs: integration_build
+ strategy:
+ fail-fast: false
+ max-parallel: 6
+ matrix:
+ integration:
+ - 'rust-lang/cargo'
+ # FIXME: re-enable once fmt_macros is renamed in RLS
+ # - 'rust-lang/rls'
+ - 'rust-lang/chalk'
+ - 'rust-lang/rustfmt'
+ - 'Marwes/combine'
+ - 'Geal/nom'
+ - 'rust-lang/stdarch'
+ - 'serde-rs/serde'
+ # FIXME: chrono currently cannot be compiled with `--all-targets`
+ # - 'chronotope/chrono'
+ - 'hyperium/hyper'
+ - 'rust-random/rand'
+ - 'rust-lang/futures-rs'
+ - 'rust-itertools/itertools'
+ - 'rust-lang-nursery/failure'
+ - 'rust-lang/log'
+
+ runs-on: ubuntu-latest
+
+ steps:
+ # Setup
+ - uses: rust-lang/simpleinfra/github-actions/cancel-outdated-builds@master
+ with:
+ github_token: "${{ secrets.github_token }}"
+
+ - name: Checkout
+ uses: actions/checkout@v3.0.2
+
+ - name: Install toolchain
+ run: rustup show active-toolchain
+
+ # Download
+ - name: Download target dir
+ uses: actions/download-artifact@v1
+ with:
+ name: target
+ path: target
+
+ - name: Make Binaries Executable
+ run: chmod +x $CARGO_TARGET_DIR/debug/*
+
+ # Run
+ - name: Test ${{ matrix.integration }}
+ run: |
+ RUSTUP_TOOLCHAIN="$(rustup show active-toolchain | grep -o -E "nightly-[0-9]{4}-[0-9]{2}-[0-9]{2}")" \
+ $CARGO_TARGET_DIR/debug/integration
+ env:
+ INTEGRATION: ${{ matrix.integration }}
+
+ # These jobs doesn't actually test anything, but they're only used to tell
+ # bors the build completed, as there is no practical way to detect when a
+ # workflow is successful listening to webhooks only.
+ #
+ # ALL THE PREVIOUS JOBS NEED TO BE ADDED TO THE `needs` SECTION OF THIS JOB!
+
+ end-success:
+ name: bors test finished
+ if: github.event.pusher.name == 'bors' && success()
+ runs-on: ubuntu-latest
+ needs: [changelog, base, metadata_collection, integration_build, integration]
+
+ steps:
+ - name: Mark the job as successful
+ run: exit 0
+
+ end-failure:
+ name: bors test finished
+ if: github.event.pusher.name == 'bors' && (failure() || cancelled())
+ runs-on: ubuntu-latest
+ needs: [changelog, base, metadata_collection, integration_build, integration]
+
+ steps:
+ - name: Mark the job as a failure
+ run: exit 1
diff --git a/src/tools/clippy/.github/workflows/clippy_dev.yml b/src/tools/clippy/.github/workflows/clippy_dev.yml
new file mode 100644
index 000000000..22051093c
--- /dev/null
+++ b/src/tools/clippy/.github/workflows/clippy_dev.yml
@@ -0,0 +1,70 @@
+name: Clippy Dev Test
+
+on:
+ push:
+ branches:
+ - auto
+ - try
+ pull_request:
+ # Only run on paths, that get checked by the clippy_dev tool
+ paths:
+ - 'CHANGELOG.md'
+ - 'README.md'
+ - '**.stderr'
+ - '**.rs'
+
+env:
+ RUST_BACKTRACE: 1
+
+jobs:
+ clippy_dev:
+ runs-on: ubuntu-latest
+
+ steps:
+ # Setup
+ - name: Checkout
+ uses: actions/checkout@v3.0.2
+
+ # Run
+ - name: Build
+ run: cargo build --features deny-warnings
+ working-directory: clippy_dev
+
+ - name: Test update_lints
+ run: cargo dev update_lints --check
+
+ - name: Test fmt
+ run: cargo dev fmt --check
+
+ - name: Test cargo dev new lint
+ run: |
+ cargo dev new_lint --name new_early_pass --pass early
+ cargo dev new_lint --name new_late_pass --pass late
+ cargo check
+ git reset --hard HEAD
+
+ # These jobs doesn't actually test anything, but they're only used to tell
+ # bors the build completed, as there is no practical way to detect when a
+ # workflow is successful listening to webhooks only.
+ #
+ # ALL THE PREVIOUS JOBS NEED TO BE ADDED TO THE `needs` SECTION OF THIS JOB!
+
+ end-success:
+ name: bors dev test finished
+ if: github.event.pusher.name == 'bors' && success()
+ runs-on: ubuntu-latest
+ needs: [clippy_dev]
+
+ steps:
+ - name: Mark the job as successful
+ run: exit 0
+
+ end-failure:
+ name: bors dev test finished
+ if: github.event.pusher.name == 'bors' && (failure() || cancelled())
+ runs-on: ubuntu-latest
+ needs: [clippy_dev]
+
+ steps:
+ - name: Mark the job as a failure
+ run: exit 1
diff --git a/src/tools/clippy/.github/workflows/deploy.yml b/src/tools/clippy/.github/workflows/deploy.yml
new file mode 100644
index 000000000..71d71d103
--- /dev/null
+++ b/src/tools/clippy/.github/workflows/deploy.yml
@@ -0,0 +1,64 @@
+name: Deploy
+
+on:
+ push:
+ branches:
+ - master
+ - beta
+ tags:
+ - rust-1.**
+
+env:
+ TARGET_BRANCH: 'gh-pages'
+ SHA: '${{ github.sha }}'
+ SSH_REPO: 'git@github.com:${{ github.repository }}.git'
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ if: github.repository == 'rust-lang/rust-clippy'
+
+ steps:
+ # Setup
+ - name: Checkout
+ uses: actions/checkout@v3.0.2
+
+ - name: Checkout
+ uses: actions/checkout@v3.0.2
+ with:
+ ref: ${{ env.TARGET_BRANCH }}
+ path: 'out'
+
+ # Run
+ - name: Set tag name
+ if: startswith(github.ref, 'refs/tags/')
+ run: |
+ TAG=$(basename ${{ github.ref }})
+ echo "TAG_NAME=$TAG" >> $GITHUB_ENV
+ - name: Set beta to true
+ if: github.ref == 'refs/heads/beta'
+ run: echo "BETA=true" >> $GITHUB_ENV
+
+ # We need to check out all files that (transitively) depend on the
+ # structure of the gh-pages branch, so that we're able to change that
+ # structure without breaking the deployment.
+ - name: Use deploy files from master branch
+ run: |
+ git fetch --no-tags --prune --depth=1 origin master
+ git checkout origin/master -- .github/deploy.sh util/versions.py util/gh-pages/versions.html
+
+ # Generate lockfile for caching to avoid build problems with cached deps
+ - name: cargo generate-lockfile
+ run: cargo generate-lockfile
+
+ - name: Cache
+ uses: Swatinem/rust-cache@v1.3.0
+
+ - name: cargo collect-metadata
+ run: cargo collect-metadata
+
+ - name: Deploy
+ run: |
+ eval "$(ssh-agent -s)"
+ ssh-add - <<< "${{ secrets.DEPLOY_KEY }}"
+ bash .github/deploy.sh
diff --git a/src/tools/clippy/.github/workflows/remark.yml b/src/tools/clippy/.github/workflows/remark.yml
new file mode 100644
index 000000000..81ef072bb
--- /dev/null
+++ b/src/tools/clippy/.github/workflows/remark.yml
@@ -0,0 +1,66 @@
+name: Remark
+
+on:
+ push:
+ branches:
+ - auto
+ - try
+ pull_request:
+ paths:
+ - '**.md'
+
+jobs:
+ remark:
+ runs-on: ubuntu-latest
+
+ steps:
+ # Setup
+ - name: Checkout
+ uses: actions/checkout@v3.0.2
+
+ - name: Setup Node.js
+ uses: actions/setup-node@v1.4.4
+ with:
+ node-version: '14.x'
+
+ - name: Install remark
+ run: npm install remark-cli remark-lint remark-lint-maximum-line-length remark-preset-lint-recommended remark-gfm
+
+ - name: Install mdbook
+ run: |
+ mkdir mdbook
+ curl -Lf https://github.com/rust-lang/mdBook/releases/download/v0.4.18/mdbook-v0.4.18-x86_64-unknown-linux-gnu.tar.gz | tar -xz --directory=./mdbook
+ echo `pwd`/mdbook >> $GITHUB_PATH
+
+ # Run
+ - name: Check *.md files
+ run: git ls-files -z '*.md' | xargs -0 -n 1 -I {} ./node_modules/.bin/remark {} -u lint -f > /dev/null
+
+ - name: Build mdbook
+ run: mdbook build book
+
+ # These jobs doesn't actually test anything, but they're only used to tell
+ # bors the build completed, as there is no practical way to detect when a
+ # workflow is successful listening to webhooks only.
+ #
+ # ALL THE PREVIOUS JOBS NEED TO BE ADDED TO THE `needs` SECTION OF THIS JOB!
+
+ end-success:
+ name: bors remark test finished
+ if: github.event.pusher.name == 'bors' && success()
+ runs-on: ubuntu-latest
+ needs: [remark]
+
+ steps:
+ - name: Mark the job as successful
+ run: exit 0
+
+ end-failure:
+ name: bors remark test finished
+ if: github.event.pusher.name == 'bors' && (failure() || cancelled())
+ runs-on: ubuntu-latest
+ needs: [remark]
+
+ steps:
+ - name: Mark the job as a failure
+ run: exit 1
diff --git a/src/tools/clippy/.remarkrc b/src/tools/clippy/.remarkrc
new file mode 100644
index 000000000..04b82b8cc
--- /dev/null
+++ b/src/tools/clippy/.remarkrc
@@ -0,0 +1,13 @@
+{
+ "plugins": [
+ "remark-preset-lint-recommended",
+ "remark-gfm",
+ ["remark-lint-list-item-indent", false],
+ ["remark-lint-no-literal-urls", false],
+ ["remark-lint-no-shortcut-reference-link", false],
+ ["remark-lint-maximum-line-length", 120]
+ ],
+ "settings": {
+ "commonmark": true
+ }
+}
diff --git a/src/tools/clippy/CHANGELOG.md b/src/tools/clippy/CHANGELOG.md
new file mode 100644
index 000000000..2278a8dc1
--- /dev/null
+++ b/src/tools/clippy/CHANGELOG.md
@@ -0,0 +1,4044 @@
+# Changelog
+
+All notable changes to this project will be documented in this file.
+See [Changelog Update](book/src/development/infrastructure/changelog_update.md) if you want to update this
+document.
+
+## Unreleased / In Rust Nightly
+
+[7c21f91b...master](https://github.com/rust-lang/rust-clippy/compare/7c21f91b...master)
+
+## Rust 1.62
+
+Current stable, released 2022-06-30
+
+[d0cf3481...7c21f91b](https://github.com/rust-lang/rust-clippy/compare/d0cf3481...7c21f91b)
+
+### New Lints
+
+* [`large_include_file`]
+ [#8727](https://github.com/rust-lang/rust-clippy/pull/8727)
+* [`cast_abs_to_unsigned`]
+ [#8635](https://github.com/rust-lang/rust-clippy/pull/8635)
+* [`err_expect`]
+ [#8606](https://github.com/rust-lang/rust-clippy/pull/8606)
+* [`unnecessary_owned_empty_strings`]
+ [#8660](https://github.com/rust-lang/rust-clippy/pull/8660)
+* [`empty_structs_with_brackets`]
+ [#8594](https://github.com/rust-lang/rust-clippy/pull/8594)
+* [`crate_in_macro_def`]
+ [#8576](https://github.com/rust-lang/rust-clippy/pull/8576)
+* [`needless_option_take`]
+ [#8665](https://github.com/rust-lang/rust-clippy/pull/8665)
+* [`bytes_count_to_len`]
+ [#8711](https://github.com/rust-lang/rust-clippy/pull/8711)
+* [`is_digit_ascii_radix`]
+ [#8624](https://github.com/rust-lang/rust-clippy/pull/8624)
+* [`await_holding_invalid_type`]
+ [#8707](https://github.com/rust-lang/rust-clippy/pull/8707)
+* [`trim_split_whitespace`]
+ [#8575](https://github.com/rust-lang/rust-clippy/pull/8575)
+* [`pub_use`]
+ [#8670](https://github.com/rust-lang/rust-clippy/pull/8670)
+* [`format_push_string`]
+ [#8626](https://github.com/rust-lang/rust-clippy/pull/8626)
+* [`empty_drop`]
+ [#8571](https://github.com/rust-lang/rust-clippy/pull/8571)
+* [`drop_non_drop`]
+ [#8630](https://github.com/rust-lang/rust-clippy/pull/8630)
+* [`forget_non_drop`]
+ [#8630](https://github.com/rust-lang/rust-clippy/pull/8630)
+
+### Moves and Deprecations
+
+* Move [`only_used_in_recursion`] to `nursery` (now allow-by-default)
+ [#8783](https://github.com/rust-lang/rust-clippy/pull/8783)
+* Move [`stable_sort_primitive`] to `pedantic` (now allow-by-default)
+ [#8716](https://github.com/rust-lang/rust-clippy/pull/8716)
+
+### Enhancements
+
+* Remove overlap between [`manual_split_once`] and [`needless_splitn`]
+ [#8631](https://github.com/rust-lang/rust-clippy/pull/8631)
+* [`map_identity`]: Now checks for needless `map_err`
+ [#8487](https://github.com/rust-lang/rust-clippy/pull/8487)
+* [`extra_unused_lifetimes`]: Now checks for impl lifetimes
+ [#8737](https://github.com/rust-lang/rust-clippy/pull/8737)
+* [`cast_possible_truncation`]: Now catches more cases with larger shift or divide operations
+ [#8687](https://github.com/rust-lang/rust-clippy/pull/8687)
+* [`identity_op`]: Now checks for modulo expressions
+ [#8519](https://github.com/rust-lang/rust-clippy/pull/8519)
+* [`panic`]: No longer lint in constant context
+ [#8592](https://github.com/rust-lang/rust-clippy/pull/8592)
+* [`manual_split_once`]: Now lints manual iteration of `splitn`
+ [#8717](https://github.com/rust-lang/rust-clippy/pull/8717)
+* [`self_named_module_files`], [`mod_module_files`]: Now handle relative module paths
+ [#8611](https://github.com/rust-lang/rust-clippy/pull/8611)
+* [`unsound_collection_transmute`]: Now has better size and alignment checks
+ [#8648](https://github.com/rust-lang/rust-clippy/pull/8648)
+* [`unnested_or_patterns`]: Ignore cases, where the suggestion would be longer
+ [#8619](https://github.com/rust-lang/rust-clippy/pull/8619)
+
+### False Positive Fixes
+
+* [`rest_pat_in_fully_bound_structs`]: Now ignores structs marked with `#[non_exhaustive]`
+ [#8690](https://github.com/rust-lang/rust-clippy/pull/8690)
+* [`needless_late_init`]: No longer lints `if let` statements, `let mut` bindings or instances that
+ changes the drop order significantly
+ [#8617](https://github.com/rust-lang/rust-clippy/pull/8617)
+* [`unnecessary_cast`]: No longer lints to casts to aliased or non-primitive types
+ [#8596](https://github.com/rust-lang/rust-clippy/pull/8596)
+* [`init_numbered_fields`]: No longer lints type aliases
+ [#8780](https://github.com/rust-lang/rust-clippy/pull/8780)
+* [`needless_option_as_deref`]: No longer lints for `as_deref_mut` on `Option` values that can't be moved
+ [#8646](https://github.com/rust-lang/rust-clippy/pull/8646)
+* [`mistyped_literal_suffixes`]: Now ignores float literals without an exponent
+ [#8742](https://github.com/rust-lang/rust-clippy/pull/8742)
+* [`undocumented_unsafe_blocks`]: Now ignores unsafe blocks from proc-macros and works better for sub-expressions
+ [#8450](https://github.com/rust-lang/rust-clippy/pull/8450)
+* [`same_functions_in_if_condition`]: Now allows different constants, even if they have the same value
+ [#8673](https://github.com/rust-lang/rust-clippy/pull/8673)
+* [`needless_match`]: Now checks for more complex types and ignores type coercion
+ [#8549](https://github.com/rust-lang/rust-clippy/pull/8549)
+* [`assertions_on_constants`]: Now ignores constants from `cfg!` macros
+ [#8614](https://github.com/rust-lang/rust-clippy/pull/8614)
+* [`indexing_slicing`]: Fix false positives with constant indices in
+ [#8588](https://github.com/rust-lang/rust-clippy/pull/8588)
+* [`iter_with_drain`]: Now ignores iterator references
+ [#8668](https://github.com/rust-lang/rust-clippy/pull/8668)
+* [`useless_attribute`]: Now allows [`redundant_pub_crate`] on `use` items
+ [#8743](https://github.com/rust-lang/rust-clippy/pull/8743)
+* [`cast_ptr_alignment`]: Now ignores expressions, when used for unaligned reads and writes
+ [#8632](https://github.com/rust-lang/rust-clippy/pull/8632)
+* [`wrong_self_convention`]: Now allows `&mut self` and no self as arguments for `is_*` methods
+ [#8738](https://github.com/rust-lang/rust-clippy/pull/8738)
+* [`mut_from_ref`]: Only lint in unsafe code
+ [#8647](https://github.com/rust-lang/rust-clippy/pull/8647)
+* [`redundant_pub_crate`]: Now allows macro exports
+ [#8736](https://github.com/rust-lang/rust-clippy/pull/8736)
+* [`needless_match`]: Ignores cases where the else block expression is different
+ [#8700](https://github.com/rust-lang/rust-clippy/pull/8700)
+* [`transmute_int_to_char`]: Now allows transmutations in `const` code
+ [#8610](https://github.com/rust-lang/rust-clippy/pull/8610)
+* [`manual_non_exhaustive`]: Ignores cases, where the enum value is used
+ [#8645](https://github.com/rust-lang/rust-clippy/pull/8645)
+* [`redundant_closure`]: Now ignores coerced closure
+ [#8431](https://github.com/rust-lang/rust-clippy/pull/8431)
+* [`identity_op`]: Is now ignored in cases where extra brackets would be needed
+ [#8730](https://github.com/rust-lang/rust-clippy/pull/8730)
+* [`let_unit_value`]: Now ignores cases which are used for type inference
+ [#8563](https://github.com/rust-lang/rust-clippy/pull/8563)
+
+### Suggestion Fixes/Improvements
+
+* [`manual_split_once`]: Fixed incorrect suggestions for single result accesses
+ [#8631](https://github.com/rust-lang/rust-clippy/pull/8631)
+* [`bytes_nth`]: Fix typos in the diagnostic message
+ [#8403](https://github.com/rust-lang/rust-clippy/pull/8403)
+* [`mistyped_literal_suffixes`]: Now suggests the correct integer types
+ [#8742](https://github.com/rust-lang/rust-clippy/pull/8742)
+* [`unnecessary_to_owned`]: Fixed suggestion based on the configured msrv
+ [#8692](https://github.com/rust-lang/rust-clippy/pull/8692)
+* [`single_element_loop`]: Improve lint for Edition 2021 arrays
+ [#8616](https://github.com/rust-lang/rust-clippy/pull/8616)
+* [`manual_bits`]: Now includes a cast for proper type conversion, when needed
+ [#8677](https://github.com/rust-lang/rust-clippy/pull/8677)
+* [`option_map_unit_fn`], [`result_map_unit_fn`]: Fix some incorrect suggestions
+ [#8584](https://github.com/rust-lang/rust-clippy/pull/8584)
+* [`collapsible_else_if`]: Add whitespace in suggestion
+ [#8729](https://github.com/rust-lang/rust-clippy/pull/8729)
+* [`transmute_bytes_to_str`]: Now suggest `from_utf8_unchecked` in `const` context
+ [#8612](https://github.com/rust-lang/rust-clippy/pull/8612)
+* [`map_clone`]: Improve message and suggestion based on the msrv
+ [#8688](https://github.com/rust-lang/rust-clippy/pull/8688)
+* [`needless_late_init`]: Now shows the `let` statement where it was first initialized
+ [#8779](https://github.com/rust-lang/rust-clippy/pull/8779)
+
+### ICE Fixes
+
+* [`only_used_in_recursion`]
+ [#8691](https://github.com/rust-lang/rust-clippy/pull/8691)
+* [`cast_slice_different_sizes`]
+ [#8720](https://github.com/rust-lang/rust-clippy/pull/8720)
+* [`iter_overeager_cloned`]
+ [#8602](https://github.com/rust-lang/rust-clippy/pull/8602)
+* [`undocumented_unsafe_blocks`]
+ [#8686](https://github.com/rust-lang/rust-clippy/pull/8686)
+
+## Rust 1.61
+
+Released 2022-05-19
+
+[57b3c4b...d0cf3481](https://github.com/rust-lang/rust-clippy/compare/57b3c4b...d0cf3481)
+
+### New Lints
+
+* [`only_used_in_recursion`]
+ [#8422](https://github.com/rust-lang/rust-clippy/pull/8422)
+* [`cast_enum_truncation`]
+ [#8381](https://github.com/rust-lang/rust-clippy/pull/8381)
+* [`missing_spin_loop`]
+ [#8174](https://github.com/rust-lang/rust-clippy/pull/8174)
+* [`deref_by_slicing`]
+ [#8218](https://github.com/rust-lang/rust-clippy/pull/8218)
+* [`needless_match`]
+ [#8471](https://github.com/rust-lang/rust-clippy/pull/8471)
+* [`allow_attributes_without_reason`] (Requires `#![feature(lint_reasons)]`)
+ [#8504](https://github.com/rust-lang/rust-clippy/pull/8504)
+* [`print_in_format_impl`]
+ [#8253](https://github.com/rust-lang/rust-clippy/pull/8253)
+* [`unnecessary_find_map`]
+ [#8489](https://github.com/rust-lang/rust-clippy/pull/8489)
+* [`or_then_unwrap`]
+ [#8561](https://github.com/rust-lang/rust-clippy/pull/8561)
+* [`unnecessary_join`]
+ [#8579](https://github.com/rust-lang/rust-clippy/pull/8579)
+* [`iter_with_drain`]
+ [#8483](https://github.com/rust-lang/rust-clippy/pull/8483)
+* [`cast_enum_constructor`]
+ [#8562](https://github.com/rust-lang/rust-clippy/pull/8562)
+* [`cast_slice_different_sizes`]
+ [#8445](https://github.com/rust-lang/rust-clippy/pull/8445)
+
+### Moves and Deprecations
+
+* Moved [`transmute_undefined_repr`] to `nursery` (now allow-by-default)
+ [#8432](https://github.com/rust-lang/rust-clippy/pull/8432)
+* Moved [`try_err`] to `restriction`
+ [#8544](https://github.com/rust-lang/rust-clippy/pull/8544)
+* Move [`iter_with_drain`] to `nursery`
+ [#8541](https://github.com/rust-lang/rust-clippy/pull/8541)
+* Renamed `to_string_in_display` to [`recursive_format_impl`]
+ [#8188](https://github.com/rust-lang/rust-clippy/pull/8188)
+
+### Enhancements
+
+* [`dbg_macro`]: The lint level can now be set with crate attributes and works inside macros
+ [#8411](https://github.com/rust-lang/rust-clippy/pull/8411)
+* [`ptr_as_ptr`]: Now works inside macros
+ [#8442](https://github.com/rust-lang/rust-clippy/pull/8442)
+* [`use_self`]: Now works for variants in match expressions
+ [#8456](https://github.com/rust-lang/rust-clippy/pull/8456)
+* [`await_holding_lock`]: Now lints for `parking_lot::{Mutex, RwLock}`
+ [#8419](https://github.com/rust-lang/rust-clippy/pull/8419)
+* [`recursive_format_impl`]: Now checks for format calls on `self`
+ [#8188](https://github.com/rust-lang/rust-clippy/pull/8188)
+
+### False Positive Fixes
+
+* [`new_without_default`]: No longer lints for `new()` methods with `#[doc(hidden)]`
+ [#8472](https://github.com/rust-lang/rust-clippy/pull/8472)
+* [`transmute_undefined_repr`]: No longer lints for single field structs with `#[repr(C)]`,
+ generic parameters, wide pointers, unions, tuples and allow several forms of type erasure
+ [#8425](https://github.com/rust-lang/rust-clippy/pull/8425)
+ [#8553](https://github.com/rust-lang/rust-clippy/pull/8553)
+ [#8440](https://github.com/rust-lang/rust-clippy/pull/8440)
+ [#8547](https://github.com/rust-lang/rust-clippy/pull/8547)
+* [`match_single_binding`], [`match_same_arms`], [`match_as_ref`], [`match_bool`]: No longer
+ lint `match` expressions with `cfg`ed arms
+ [#8443](https://github.com/rust-lang/rust-clippy/pull/8443)
+* [`single_component_path_imports`]: No longer lint on macros
+ [#8537](https://github.com/rust-lang/rust-clippy/pull/8537)
+* [`ptr_arg`]: Allow `&mut` arguments for `Cow<_>`
+ [#8552](https://github.com/rust-lang/rust-clippy/pull/8552)
+* [`needless_borrow`]: No longer lints for method calls
+ [#8441](https://github.com/rust-lang/rust-clippy/pull/8441)
+* [`match_same_arms`]: Now ensures that interposing arm patterns don't overlap
+ [#8232](https://github.com/rust-lang/rust-clippy/pull/8232)
+* [`default_trait_access`]: Now allows `Default::default` in update expressions
+ [#8433](https://github.com/rust-lang/rust-clippy/pull/8433)
+
+### Suggestion Fixes/Improvements
+
+* [`redundant_slicing`]: Fixed suggestion for a method calls
+ [#8218](https://github.com/rust-lang/rust-clippy/pull/8218)
+* [`map_flatten`]: Long suggestions will now be split up into two help messages
+ [#8520](https://github.com/rust-lang/rust-clippy/pull/8520)
+* [`unnecessary_lazy_evaluations`]: Now shows suggestions for longer code snippets
+ [#8543](https://github.com/rust-lang/rust-clippy/pull/8543)
+* [`unnecessary_sort_by`]: Now suggests `Reverse` including the path
+ [#8462](https://github.com/rust-lang/rust-clippy/pull/8462)
+* [`search_is_some`]: More suggestions are now `MachineApplicable`
+ [#8536](https://github.com/rust-lang/rust-clippy/pull/8536)
+
+### Documentation Improvements
+
+* [`new_without_default`]: Document `pub` requirement for the struct and fields
+ [#8429](https://github.com/rust-lang/rust-clippy/pull/8429)
+
+## Rust 1.60
+
+Released 2022-04-07
+
+[0eff589...57b3c4b](https://github.com/rust-lang/rust-clippy/compare/0eff589...57b3c4b)
+
+### New Lints
+
+* [`single_char_lifetime_names`]
+ [#8236](https://github.com/rust-lang/rust-clippy/pull/8236)
+* [`iter_overeager_cloned`]
+ [#8203](https://github.com/rust-lang/rust-clippy/pull/8203)
+* [`transmute_undefined_repr`]
+ [#8398](https://github.com/rust-lang/rust-clippy/pull/8398)
+* [`default_union_representation`]
+ [#8289](https://github.com/rust-lang/rust-clippy/pull/8289)
+* [`manual_bits`]
+ [#8213](https://github.com/rust-lang/rust-clippy/pull/8213)
+* [`borrow_as_ptr`]
+ [#8210](https://github.com/rust-lang/rust-clippy/pull/8210)
+
+### Moves and Deprecations
+
+* Moved [`disallowed_methods`] and [`disallowed_types`] to `style` (now warn-by-default)
+ [#8261](https://github.com/rust-lang/rust-clippy/pull/8261)
+* Rename `ref_in_deref` to [`needless_borrow`]
+ [#8217](https://github.com/rust-lang/rust-clippy/pull/8217)
+* Moved [`mutex_atomic`] to `nursery` (now allow-by-default)
+ [#8260](https://github.com/rust-lang/rust-clippy/pull/8260)
+
+### Enhancements
+
+* [`ptr_arg`]: Now takes the argument usage into account and lints for mutable references
+ [#8271](https://github.com/rust-lang/rust-clippy/pull/8271)
+* [`unused_io_amount`]: Now supports async read and write traits
+ [#8179](https://github.com/rust-lang/rust-clippy/pull/8179)
+* [`while_let_on_iterator`]: Improved detection to catch more cases
+ [#8221](https://github.com/rust-lang/rust-clippy/pull/8221)
+* [`trait_duplication_in_bounds`]: Now covers trait functions with `Self` bounds
+ [#8252](https://github.com/rust-lang/rust-clippy/pull/8252)
+* [`unwrap_used`]: Now works for `.get(i).unwrap()` and `.get_mut(i).unwrap()`
+ [#8372](https://github.com/rust-lang/rust-clippy/pull/8372)
+* [`map_clone`]: The suggestion takes `msrv` into account
+ [#8280](https://github.com/rust-lang/rust-clippy/pull/8280)
+* [`manual_bits`] and [`borrow_as_ptr`]: Now track the `clippy::msrv` attribute
+ [#8280](https://github.com/rust-lang/rust-clippy/pull/8280)
+* [`disallowed_methods`]: Now works for methods on primitive types
+ [#8112](https://github.com/rust-lang/rust-clippy/pull/8112)
+* [`not_unsafe_ptr_arg_deref`]: Now works for type aliases
+ [#8273](https://github.com/rust-lang/rust-clippy/pull/8273)
+* [`needless_question_mark`]: Now works for async functions
+ [#8311](https://github.com/rust-lang/rust-clippy/pull/8311)
+* [`iter_not_returning_iterator`]: Now handles type projections
+ [#8228](https://github.com/rust-lang/rust-clippy/pull/8228)
+* [`wrong_self_convention`]: Now detects wrong `self` references in more cases
+ [#8208](https://github.com/rust-lang/rust-clippy/pull/8208)
+* [`single_match`]: Now works for `match` statements with tuples
+ [#8322](https://github.com/rust-lang/rust-clippy/pull/8322)
+
+### False Positive Fixes
+
+* [`erasing_op`]: No longer triggers if the output type changes
+ [#8204](https://github.com/rust-lang/rust-clippy/pull/8204)
+* [`if_same_then_else`]: No longer triggers for `if let` statements
+ [#8297](https://github.com/rust-lang/rust-clippy/pull/8297)
+* [`manual_memcpy`]: No longer lints on `VecDeque`
+ [#8226](https://github.com/rust-lang/rust-clippy/pull/8226)
+* [`trait_duplication_in_bounds`]: Now takes path segments into account
+ [#8315](https://github.com/rust-lang/rust-clippy/pull/8315)
+* [`deref_addrof`]: No longer lints when the dereference or borrow occurs in different a context
+ [#8268](https://github.com/rust-lang/rust-clippy/pull/8268)
+* [`type_repetition_in_bounds`]: Now checks for full equality to prevent false positives
+ [#8224](https://github.com/rust-lang/rust-clippy/pull/8224)
+* [`ptr_arg`]: No longer lint for mutable references in traits
+ [#8369](https://github.com/rust-lang/rust-clippy/pull/8369)
+* [`implicit_clone`]: No longer lints for double references
+ [#8231](https://github.com/rust-lang/rust-clippy/pull/8231)
+* [`needless_lifetimes`]: No longer lints lifetimes for explicit `self` types
+ [#8278](https://github.com/rust-lang/rust-clippy/pull/8278)
+* [`op_ref`]: No longer lints in `BinOp` impl if that can cause recursion
+ [#8298](https://github.com/rust-lang/rust-clippy/pull/8298)
+* [`enum_variant_names`]: No longer triggers for empty variant names
+ [#8329](https://github.com/rust-lang/rust-clippy/pull/8329)
+* [`redundant_closure`]: No longer lints for `Arc<T>` or `Rc<T>`
+ [#8193](https://github.com/rust-lang/rust-clippy/pull/8193)
+* [`iter_not_returning_iterator`]: No longer lints on trait implementations but therefore on trait definitions
+ [#8228](https://github.com/rust-lang/rust-clippy/pull/8228)
+* [`single_match`]: No longer lints on exhaustive enum patterns without a wildcard
+ [#8322](https://github.com/rust-lang/rust-clippy/pull/8322)
+* [`manual_swap`]: No longer lints on cases that involve automatic dereferences
+ [#8220](https://github.com/rust-lang/rust-clippy/pull/8220)
+* [`useless_format`]: Now works for implicit named arguments
+ [#8295](https://github.com/rust-lang/rust-clippy/pull/8295)
+
+### Suggestion Fixes/Improvements
+
+* [`needless_borrow`]: Prevent mutable borrows being moved and suggest removing the borrow on method calls
+ [#8217](https://github.com/rust-lang/rust-clippy/pull/8217)
+* [`chars_next_cmp`]: Correctly escapes the suggestion
+ [#8376](https://github.com/rust-lang/rust-clippy/pull/8376)
+* [`explicit_write`]: Add suggestions for `write!`s with format arguments
+ [#8365](https://github.com/rust-lang/rust-clippy/pull/8365)
+* [`manual_memcpy`]: Suggests `copy_from_slice` when applicable
+ [#8226](https://github.com/rust-lang/rust-clippy/pull/8226)
+* [`or_fun_call`]: Improved suggestion display for long arguments
+ [#8292](https://github.com/rust-lang/rust-clippy/pull/8292)
+* [`unnecessary_cast`]: Now correctly includes the sign
+ [#8350](https://github.com/rust-lang/rust-clippy/pull/8350)
+* [`cmp_owned`]: No longer flips the comparison order
+ [#8299](https://github.com/rust-lang/rust-clippy/pull/8299)
+* [`explicit_counter_loop`]: Now correctly suggests `iter()` on references
+ [#8382](https://github.com/rust-lang/rust-clippy/pull/8382)
+
+### ICE Fixes
+
+* [`manual_split_once`]
+ [#8250](https://github.com/rust-lang/rust-clippy/pull/8250)
+
+### Documentation Improvements
+
+* [`map_flatten`]: Add documentation for the `Option` type
+ [#8354](https://github.com/rust-lang/rust-clippy/pull/8354)
+* Document that Clippy's driver might use a different code generation than rustc
+ [#8037](https://github.com/rust-lang/rust-clippy/pull/8037)
+* Clippy's lint list will now automatically focus the search box
+ [#8343](https://github.com/rust-lang/rust-clippy/pull/8343)
+
+### Others
+
+* Clippy now warns if we find multiple Clippy config files exist
+ [#8326](https://github.com/rust-lang/rust-clippy/pull/8326)
+
+## Rust 1.59
+
+Released 2022-02-24
+
+[e181011...0eff589](https://github.com/rust-lang/rust-clippy/compare/e181011...0eff589)
+
+### New Lints
+
+* [`index_refutable_slice`]
+ [#7643](https://github.com/rust-lang/rust-clippy/pull/7643)
+* [`needless_splitn`]
+ [#7896](https://github.com/rust-lang/rust-clippy/pull/7896)
+* [`unnecessary_to_owned`]
+ [#7978](https://github.com/rust-lang/rust-clippy/pull/7978)
+* [`needless_late_init`]
+ [#7995](https://github.com/rust-lang/rust-clippy/pull/7995)
+* [`octal_escapes`] [#8007](https://github.com/rust-lang/rust-clippy/pull/8007)
+* [`return_self_not_must_use`]
+ [#8071](https://github.com/rust-lang/rust-clippy/pull/8071)
+* [`init_numbered_fields`]
+ [#8170](https://github.com/rust-lang/rust-clippy/pull/8170)
+
+### Moves and Deprecations
+
+* Move `if_then_panic` to `pedantic` and rename to [`manual_assert`] (now
+ allow-by-default) [#7810](https://github.com/rust-lang/rust-clippy/pull/7810)
+* Rename `disallow_type` to [`disallowed_types`] and `disallowed_method` to
+ [`disallowed_methods`]
+ [#7984](https://github.com/rust-lang/rust-clippy/pull/7984)
+* Move [`map_flatten`] to `complexity` (now warn-by-default)
+ [#8054](https://github.com/rust-lang/rust-clippy/pull/8054)
+
+### Enhancements
+
+* [`match_overlapping_arm`]: Fix false negative where after included ranges,
+ overlapping ranges weren't linted anymore
+ [#7909](https://github.com/rust-lang/rust-clippy/pull/7909)
+* [`deprecated_cfg_attr`]: Now takes the specified MSRV into account
+ [#7944](https://github.com/rust-lang/rust-clippy/pull/7944)
+* [`cast_lossless`]: Now also lints for `bool` to integer casts
+ [#7948](https://github.com/rust-lang/rust-clippy/pull/7948)
+* [`let_underscore_lock`]: Also emit lints for the `parking_lot` crate
+ [#7957](https://github.com/rust-lang/rust-clippy/pull/7957)
+* [`needless_borrow`]
+ [#7977](https://github.com/rust-lang/rust-clippy/pull/7977)
+ * Lint when a borrow is auto-dereffed more than once
+ * Lint in the trailing expression of a block for a match arm
+* [`strlen_on_c_strings`]
+ [8001](https://github.com/rust-lang/rust-clippy/pull/8001)
+ * Lint when used without a fully-qualified path
+ * Suggest removing the surrounding unsafe block when possible
+* [`non_ascii_literal`]: Now also lints on `char`s, not just `string`s
+ [#8034](https://github.com/rust-lang/rust-clippy/pull/8034)
+* [`single_char_pattern`]: Now also lints on `split_inclusive`, `split_once`,
+ `rsplit_once`, `replace`, and `replacen`
+ [#8077](https://github.com/rust-lang/rust-clippy/pull/8077)
+* [`unwrap_or_else_default`]: Now also lints on `std` constructors like
+ `Vec::new`, `HashSet::new`, and `HashMap::new`
+ [#8163](https://github.com/rust-lang/rust-clippy/pull/8163)
+* [`shadow_reuse`]: Now also lints on shadowed `if let` bindings, instead of
+ [`shadow_unrelated`]
+ [#8165](https://github.com/rust-lang/rust-clippy/pull/8165)
+
+### False Positive Fixes
+
+* [`or_fun_call`], [`unnecessary_lazy_evaluations`]: Improve heuristics, so that
+ cheap functions (e.g. calling `.len()` on a `Vec`) won't get linted anymore
+ [#7639](https://github.com/rust-lang/rust-clippy/pull/7639)
+* [`manual_split_once`]: No longer suggests code changing the original behavior
+ [#7896](https://github.com/rust-lang/rust-clippy/pull/7896)
+* Don't show [`no_effect`] or [`unnecessary_operation`] warning for unit struct
+ implementing `FnOnce`
+ [#7898](https://github.com/rust-lang/rust-clippy/pull/7898)
+* [`semicolon_if_nothing_returned`]: Fixed a bug, where the lint wrongly
+ triggered on `let-else` statements
+ [#7955](https://github.com/rust-lang/rust-clippy/pull/7955)
+* [`if_then_some_else_none`]: No longer lints if there is an early return
+ [#7980](https://github.com/rust-lang/rust-clippy/pull/7980)
+* [`needless_collect`]: No longer suggests removal of `collect` when removal
+ would create code requiring mutably borrowing a value multiple times
+ [#7982](https://github.com/rust-lang/rust-clippy/pull/7982)
+* [`shadow_same`]: Fix false positive for `async` function's params
+ [#7997](https://github.com/rust-lang/rust-clippy/pull/7997)
+* [`suboptimal_flops`]: No longer triggers in constant functions
+ [#8009](https://github.com/rust-lang/rust-clippy/pull/8009)
+* [`type_complexity`]: No longer lints on associated types in traits
+ [#8030](https://github.com/rust-lang/rust-clippy/pull/8030)
+* [`question_mark`]: No longer lints if returned object is not local
+ [#8080](https://github.com/rust-lang/rust-clippy/pull/8080)
+* [`option_if_let_else`]: No longer lint on complex sub-patterns
+ [#8086](https://github.com/rust-lang/rust-clippy/pull/8086)
+* [`blocks_in_if_conditions`]: No longer lints on empty closures
+ [#8100](https://github.com/rust-lang/rust-clippy/pull/8100)
+* [`enum_variant_names`]: No longer lint when first prefix is only a substring
+ of a camel-case word
+ [#8127](https://github.com/rust-lang/rust-clippy/pull/8127)
+* [`identity_op`]: Only lint on integral operands
+ [#8183](https://github.com/rust-lang/rust-clippy/pull/8183)
+
+### Suggestion Fixes/Improvements
+
+* [`search_is_some`]: Fix suggestion for `any()` not taking item by reference
+ [#7463](https://github.com/rust-lang/rust-clippy/pull/7463)
+* [`almost_swapped`]: Now detects if there is a `no_std` or `no_core` attribute
+ and adapts the suggestion accordingly
+ [#7877](https://github.com/rust-lang/rust-clippy/pull/7877)
+* [`redundant_pattern_matching`]: Fix suggestion for deref expressions
+ [#7949](https://github.com/rust-lang/rust-clippy/pull/7949)
+* [`explicit_counter_loop`]: Now also produces a suggestion for non-`usize`
+ types [#7950](https://github.com/rust-lang/rust-clippy/pull/7950)
+* [`manual_map`]: Fix suggestion when used with unsafe functions and blocks
+ [#7968](https://github.com/rust-lang/rust-clippy/pull/7968)
+* [`option_map_or_none`]: Suggest `map` over `and_then` when possible
+ [#7971](https://github.com/rust-lang/rust-clippy/pull/7971)
+* [`option_if_let_else`]: No longer expands macros in the suggestion
+ [#7974](https://github.com/rust-lang/rust-clippy/pull/7974)
+* [`iter_cloned_collect`]: Suggest `copied` over `cloned` when possible
+ [#8006](https://github.com/rust-lang/rust-clippy/pull/8006)
+* [`doc_markdown`]: No longer uses inline hints to improve readability of
+ suggestion [#8011](https://github.com/rust-lang/rust-clippy/pull/8011)
+* [`needless_question_mark`]: Now better explains the suggestion
+ [#8028](https://github.com/rust-lang/rust-clippy/pull/8028)
+* [`single_char_pattern`]: Escape backslash `\` in suggestion
+ [#8067](https://github.com/rust-lang/rust-clippy/pull/8067)
+* [`needless_bool`]: Suggest `a != b` over `!(a == b)`
+ [#8117](https://github.com/rust-lang/rust-clippy/pull/8117)
+* [`iter_skip_next`]: Suggest to add a `mut` if it is necessary in order to
+ apply this lints suggestion
+ [#8133](https://github.com/rust-lang/rust-clippy/pull/8133)
+* [`neg_multiply`]: Now produces a suggestion
+ [#8144](https://github.com/rust-lang/rust-clippy/pull/8144)
+* [`needless_return`]: Now suggests the unit type `()` over an empty block `{}`
+ in match arms [#8185](https://github.com/rust-lang/rust-clippy/pull/8185)
+* [`suboptimal_flops`]: Now gives a syntactically correct suggestion for
+ `to_radians` and `to_degrees`
+ [#8187](https://github.com/rust-lang/rust-clippy/pull/8187)
+
+### ICE Fixes
+
+* [`undocumented_unsafe_blocks`]
+ [#7945](https://github.com/rust-lang/rust-clippy/pull/7945)
+ [#7988](https://github.com/rust-lang/rust-clippy/pull/7988)
+* [`unnecessary_cast`]
+ [#8167](https://github.com/rust-lang/rust-clippy/pull/8167)
+
+### Documentation Improvements
+
+* [`print_stdout`], [`print_stderr`], [`dbg_macro`]: Document how the lint level
+ can be changed crate-wide
+ [#8040](https://github.com/rust-lang/rust-clippy/pull/8040)
+* Added a note to the `README` that config changes don't apply to already
+ compiled code [#8175](https://github.com/rust-lang/rust-clippy/pull/8175)
+
+### Others
+
+* [Clippy's lint
+ list](https://rust-lang.github.io/rust-clippy/master/index.html) now displays
+ the version a lint was added. :tada:
+ [#7813](https://github.com/rust-lang/rust-clippy/pull/7813)
+* New and improved issue templates
+ [#8032](https://github.com/rust-lang/rust-clippy/pull/8032)
+* _Dev:_ Add `cargo dev lint` command, to run your modified Clippy version on a
+ file [#7917](https://github.com/rust-lang/rust-clippy/pull/7917)
+
+## Rust 1.58
+
+Released 2022-01-13
+
+[00e31fa...e181011](https://github.com/rust-lang/rust-clippy/compare/00e31fa...e181011)
+
+### Rust 1.58.1
+
+* Move [`non_send_fields_in_send_ty`] to `nursery` (now allow-by-default)
+ [#8075](https://github.com/rust-lang/rust-clippy/pull/8075)
+* [`useless_format`]: Handle implicit named arguments
+ [#8295](https://github.com/rust-lang/rust-clippy/pull/8295)
+
+### New lints
+
+* [`transmute_num_to_bytes`]
+ [#7805](https://github.com/rust-lang/rust-clippy/pull/7805)
+* [`match_str_case_mismatch`]
+ [#7806](https://github.com/rust-lang/rust-clippy/pull/7806)
+* [`format_in_format_args`], [`to_string_in_format_args`]
+ [#7743](https://github.com/rust-lang/rust-clippy/pull/7743)
+* [`uninit_vec`]
+ [#7682](https://github.com/rust-lang/rust-clippy/pull/7682)
+* [`fn_to_numeric_cast_any`]
+ [#7705](https://github.com/rust-lang/rust-clippy/pull/7705)
+* [`undocumented_unsafe_blocks`]
+ [#7748](https://github.com/rust-lang/rust-clippy/pull/7748)
+* [`trailing_empty_array`]
+ [#7838](https://github.com/rust-lang/rust-clippy/pull/7838)
+* [`string_slice`]
+ [#7878](https://github.com/rust-lang/rust-clippy/pull/7878)
+
+### Moves or deprecations of lints
+
+* Move [`non_send_fields_in_send_ty`] to `suspicious`
+ [#7874](https://github.com/rust-lang/rust-clippy/pull/7874)
+* Move [`non_ascii_literal`] to `restriction`
+ [#7907](https://github.com/rust-lang/rust-clippy/pull/7907)
+
+### Changes that expand what code existing lints cover
+
+* [`question_mark`] now covers `Result`
+ [#7840](https://github.com/rust-lang/rust-clippy/pull/7840)
+* Make [`useless_format`] recognize bare `format!("")`
+ [#7801](https://github.com/rust-lang/rust-clippy/pull/7801)
+* Lint on underscored variables with no side effects in [`no_effect`]
+ [#7775](https://github.com/rust-lang/rust-clippy/pull/7775)
+* Expand [`match_ref_pats`] to check for multiple reference patterns
+ [#7800](https://github.com/rust-lang/rust-clippy/pull/7800)
+
+### False positive fixes
+
+* Fix false positive of [`implicit_saturating_sub`] with `else` clause
+ [#7832](https://github.com/rust-lang/rust-clippy/pull/7832)
+* Fix [`question_mark`] when there is call in conditional predicate
+ [#7860](https://github.com/rust-lang/rust-clippy/pull/7860)
+* [`mut_mut`] no longer lints when type is defined in external macros
+ [#7795](https://github.com/rust-lang/rust-clippy/pull/7795)
+* Avoid [`eq_op`] in test functions
+ [#7811](https://github.com/rust-lang/rust-clippy/pull/7811)
+* [`cast_possible_truncation`] no longer lints when cast is coming from `signum`
+ method call [#7850](https://github.com/rust-lang/rust-clippy/pull/7850)
+* [`match_str_case_mismatch`] no longer lints on uncased characters
+ [#7865](https://github.com/rust-lang/rust-clippy/pull/7865)
+* [`ptr_arg`] no longer lints references to type aliases
+ [#7890](https://github.com/rust-lang/rust-clippy/pull/7890)
+* [`missing_safety_doc`] now also accepts "implementation safety" headers
+ [#7856](https://github.com/rust-lang/rust-clippy/pull/7856)
+* [`missing_safety_doc`] no longer lints if any parent has `#[doc(hidden)]`
+ attribute [#7849](https://github.com/rust-lang/rust-clippy/pull/7849)
+* [`if_not_else`] now ignores else-if statements
+ [#7895](https://github.com/rust-lang/rust-clippy/pull/7895)
+* Avoid linting [`cast_possible_truncation`] on bit-reducing operations
+ [#7819](https://github.com/rust-lang/rust-clippy/pull/7819)
+* Avoid linting [`field_reassign_with_default`] when `Drop` and `Copy` are
+ involved [#7794](https://github.com/rust-lang/rust-clippy/pull/7794)
+* [`unnecessary_sort_by`] now checks if argument implements `Ord` trait
+ [#7824](https://github.com/rust-lang/rust-clippy/pull/7824)
+* Fix false positive in [`match_overlapping_arm`]
+ [#7847](https://github.com/rust-lang/rust-clippy/pull/7847)
+* Prevent [`needless_lifetimes`] false positive in `async` function definition
+ [#7901](https://github.com/rust-lang/rust-clippy/pull/7901)
+
+### Suggestion fixes/improvements
+
+* Keep an initial `::` when [`doc_markdown`] suggests to use ticks
+ [#7916](https://github.com/rust-lang/rust-clippy/pull/7916)
+* Add a machine applicable suggestion for the [`doc_markdown`] missing backticks
+ lint [#7904](https://github.com/rust-lang/rust-clippy/pull/7904)
+* [`equatable_if_let`] no longer expands macros in the suggestion
+ [#7788](https://github.com/rust-lang/rust-clippy/pull/7788)
+* Make [`shadow_reuse`] suggestion less verbose
+ [#7782](https://github.com/rust-lang/rust-clippy/pull/7782)
+
+### ICE fixes
+
+* Fix ICE in [`enum_variant_names`]
+ [#7873](https://github.com/rust-lang/rust-clippy/pull/7873)
+* Fix ICE in [`undocumented_unsafe_blocks`]
+ [#7891](https://github.com/rust-lang/rust-clippy/pull/7891)
+
+### Documentation improvements
+
+* Fixed naive doc formatting for `#[must_use]` lints ([`must_use_unit`],
+ [`double_must_use`], [`must_use_candidate`], [`let_underscore_must_use`])
+ [#7827](https://github.com/rust-lang/rust-clippy/pull/7827)
+* Fix typo in example for [`match_result_ok`]
+ [#7815](https://github.com/rust-lang/rust-clippy/pull/7815)
+
+### Others
+
+* Allow giving reasons for [`disallowed_types`]
+ [#7791](https://github.com/rust-lang/rust-clippy/pull/7791)
+* Fix [`manual_assert`] and [`match_wild_err_arm`] for `#![no_std]` and Rust
+ 2021. [#7851](https://github.com/rust-lang/rust-clippy/pull/7851)
+* Fix regression in [`semicolon_if_nothing_returned`] on macros containing while
+ loops [#7789](https://github.com/rust-lang/rust-clippy/pull/7789)
+* Added a new configuration `literal-suffix-style` to enforce a certain style
+ writing [`unseparated_literal_suffix`]
+ [#7726](https://github.com/rust-lang/rust-clippy/pull/7726)
+
+## Rust 1.57
+
+Released 2021-12-02
+
+[7bfc26e...00e31fa](https://github.com/rust-lang/rust-clippy/compare/7bfc26e...00e31fa)
+
+### New Lints
+
+* [`negative_feature_names`]
+ [#7539](https://github.com/rust-lang/rust-clippy/pull/7539)
+* [`redundant_feature_names`]
+ [#7539](https://github.com/rust-lang/rust-clippy/pull/7539)
+* [`mod_module_files`]
+ [#7543](https://github.com/rust-lang/rust-clippy/pull/7543)
+* [`self_named_module_files`]
+ [#7543](https://github.com/rust-lang/rust-clippy/pull/7543)
+* [`manual_split_once`]
+ [#7565](https://github.com/rust-lang/rust-clippy/pull/7565)
+* [`derivable_impls`]
+ [#7570](https://github.com/rust-lang/rust-clippy/pull/7570)
+* [`needless_option_as_deref`]
+ [#7596](https://github.com/rust-lang/rust-clippy/pull/7596)
+* [`iter_not_returning_iterator`]
+ [#7610](https://github.com/rust-lang/rust-clippy/pull/7610)
+* [`same_name_method`]
+ [#7653](https://github.com/rust-lang/rust-clippy/pull/7653)
+* [`manual_assert`] [#7669](https://github.com/rust-lang/rust-clippy/pull/7669)
+* [`non_send_fields_in_send_ty`]
+ [#7709](https://github.com/rust-lang/rust-clippy/pull/7709)
+* [`equatable_if_let`]
+ [#7762](https://github.com/rust-lang/rust-clippy/pull/7762)
+
+### Moves and Deprecations
+
+* Move [`shadow_unrelated`] to `restriction`
+ [#7338](https://github.com/rust-lang/rust-clippy/pull/7338)
+* Move [`option_if_let_else`] to `nursery`
+ [#7568](https://github.com/rust-lang/rust-clippy/pull/7568)
+* Move [`branches_sharing_code`] to `nursery`
+ [#7595](https://github.com/rust-lang/rust-clippy/pull/7595)
+* Rename `if_let_some_result` to [`match_result_ok`] which now also handles
+ `while let` cases [#7608](https://github.com/rust-lang/rust-clippy/pull/7608)
+* Move [`many_single_char_names`] to `pedantic`
+ [#7671](https://github.com/rust-lang/rust-clippy/pull/7671)
+* Move [`float_cmp`] to `pedantic`
+ [#7692](https://github.com/rust-lang/rust-clippy/pull/7692)
+* Rename `box_vec` to [`box_collection`] and lint on more general cases
+ [#7693](https://github.com/rust-lang/rust-clippy/pull/7693)
+* Uplift `invalid_atomic_ordering` to rustc
+ [rust-lang/rust#84039](https://github.com/rust-lang/rust/pull/84039)
+
+### Enhancements
+
+* Rewrite the `shadow*` lints, so that they find a lot more shadows and are not
+ limited to certain patterns
+ [#7338](https://github.com/rust-lang/rust-clippy/pull/7338)
+* The `avoid-breaking-exported-api` configuration now also works for
+ [`box_collection`], [`redundant_allocation`], [`rc_buffer`], [`vec_box`],
+ [`option_option`], [`linkedlist`], [`rc_mutex`]
+ [#7560](https://github.com/rust-lang/rust-clippy/pull/7560)
+* [`unnecessary_unwrap`]: Now also checks for `expect`s
+ [#7584](https://github.com/rust-lang/rust-clippy/pull/7584)
+* [`disallowed_methods`]: Allow adding a reason that will be displayed with the
+ lint message
+ [#7621](https://github.com/rust-lang/rust-clippy/pull/7621)
+* [`approx_constant`]: Now checks the MSRV for `LOG10_2` and `LOG2_10`
+ [#7629](https://github.com/rust-lang/rust-clippy/pull/7629)
+* [`approx_constant`]: Add `TAU`
+ [#7642](https://github.com/rust-lang/rust-clippy/pull/7642)
+* [`needless_borrow`]: Now also lints on needless mutable borrows
+ [#7657](https://github.com/rust-lang/rust-clippy/pull/7657)
+* [`missing_safety_doc`]: Now also lints on unsafe traits
+ [#7734](https://github.com/rust-lang/rust-clippy/pull/7734)
+
+### False Positive Fixes
+
+* [`manual_map`]: No longer lints when the option is borrowed in the match and
+ also consumed in the arm
+ [#7531](https://github.com/rust-lang/rust-clippy/pull/7531)
+* [`filter_next`]: No longer lints if `filter` method is not the
+ `Iterator::filter` method
+ [#7562](https://github.com/rust-lang/rust-clippy/pull/7562)
+* [`manual_flatten`]: No longer lints if expression is used after `if let`
+ [#7566](https://github.com/rust-lang/rust-clippy/pull/7566)
+* [`option_if_let_else`]: Multiple fixes
+ [#7573](https://github.com/rust-lang/rust-clippy/pull/7573)
+ * `break` and `continue` statements local to the would-be closure are
+ allowed
+ * Don't lint in const contexts
+ * Don't lint when yield expressions are used
+ * Don't lint when the captures made by the would-be closure conflict with
+ the other branch
+ * Don't lint when a field of a local is used when the type could be
+ potentially moved from
+ * In some cases, don't lint when scrutinee expression conflicts with the
+ captures of the would-be closure
+* [`redundant_allocation`]: No longer lints on `Box<Box<dyn T>>` which replaces
+ wide pointers with thin pointers
+ [#7592](https://github.com/rust-lang/rust-clippy/pull/7592)
+* [`bool_assert_comparison`]: No longer lints on types that do not implement the
+ `Not` trait with `Output = bool`
+ [#7605](https://github.com/rust-lang/rust-clippy/pull/7605)
+* [`mut_range_bound`]: No longer lints on range bound mutations, that are
+ immediately followed by a `break;`
+ [#7607](https://github.com/rust-lang/rust-clippy/pull/7607)
+* [`mutable_key_type`]: Improve accuracy and document remaining false positives
+ and false negatives
+ [#7640](https://github.com/rust-lang/rust-clippy/pull/7640)
+* [`redundant_closure`]: Rewrite the lint to fix various false positives and
+ false negatives [#7661](https://github.com/rust-lang/rust-clippy/pull/7661)
+* [`large_enum_variant`]: No longer wrongly identifies the second largest
+ variant [#7677](https://github.com/rust-lang/rust-clippy/pull/7677)
+* [`needless_return`]: No longer lints on let-else expressions
+ [#7685](https://github.com/rust-lang/rust-clippy/pull/7685)
+* [`suspicious_else_formatting`]: No longer lints in proc-macros
+ [#7707](https://github.com/rust-lang/rust-clippy/pull/7707)
+* [`excessive_precision`]: No longer lints when in some cases the float was
+ already written in the shortest form
+ [#7722](https://github.com/rust-lang/rust-clippy/pull/7722)
+* [`doc_markdown`]: No longer lints on intra-doc links
+ [#7772](https://github.com/rust-lang/rust-clippy/pull/7772)
+
+### Suggestion Fixes/Improvements
+
+* [`unnecessary_operation`]: Recommend using an `assert!` instead of using a
+ function call in an indexing operation
+ [#7453](https://github.com/rust-lang/rust-clippy/pull/7453)
+* [`manual_split_once`]: Produce semantically equivalent suggestion when
+ `rsplitn` is used [#7663](https://github.com/rust-lang/rust-clippy/pull/7663)
+* [`while_let_on_iterator`]: Produce correct suggestion when using `&mut`
+ [#7690](https://github.com/rust-lang/rust-clippy/pull/7690)
+* [`manual_assert`]: No better handles complex conditions
+ [#7741](https://github.com/rust-lang/rust-clippy/pull/7741)
+* Correctly handle signs in exponents in numeric literals lints
+ [#7747](https://github.com/rust-lang/rust-clippy/pull/7747)
+* [`suspicious_map`]: Now also suggests to use `inspect` as an alternative
+ [#7770](https://github.com/rust-lang/rust-clippy/pull/7770)
+* Drop exponent from suggestion if it is 0 in numeric literals lints
+ [#7774](https://github.com/rust-lang/rust-clippy/pull/7774)
+
+### ICE Fixes
+
+* [`implicit_hasher`]
+ [#7761](https://github.com/rust-lang/rust-clippy/pull/7761)
+
+### Others
+
+* Clippy now uses the 2021
+ [Edition!](https://www.youtube.com/watch?v=q0aNduqb2Ro)
+ [#7664](https://github.com/rust-lang/rust-clippy/pull/7664)
+
+## Rust 1.56
+
+Released 2021-10-21
+
+[74d1561...7bfc26e](https://github.com/rust-lang/rust-clippy/compare/74d1561...7bfc26e)
+
+### New Lints
+
+* [`unwrap_or_else_default`]
+ [#7516](https://github.com/rust-lang/rust-clippy/pull/7516)
+
+### Enhancements
+
+* [`needless_continue`]: Now also lints in `loop { continue; }` case
+ [#7477](https://github.com/rust-lang/rust-clippy/pull/7477)
+* [`disallowed_types`]: Now also primitive types can be disallowed
+ [#7488](https://github.com/rust-lang/rust-clippy/pull/7488)
+* [`manual_swap`]: Now also lints on xor swaps
+ [#7506](https://github.com/rust-lang/rust-clippy/pull/7506)
+* [`map_flatten`]: Now also lints on the `Result` type
+ [#7522](https://github.com/rust-lang/rust-clippy/pull/7522)
+* [`no_effect`]: Now also lints on inclusive ranges
+ [#7556](https://github.com/rust-lang/rust-clippy/pull/7556)
+
+### False Positive Fixes
+
+* [`nonstandard_macro_braces`]: No longer lints on similar named nested macros
+ [#7478](https://github.com/rust-lang/rust-clippy/pull/7478)
+* [`too_many_lines`]: No longer lints in closures to avoid duplicated diagnostics
+ [#7534](https://github.com/rust-lang/rust-clippy/pull/7534)
+* [`similar_names`]: No longer complains about `iter` and `item` being too
+ similar [#7546](https://github.com/rust-lang/rust-clippy/pull/7546)
+
+### Suggestion Fixes/Improvements
+
+* [`similar_names`]: No longer suggests to insert or add an underscore as a fix
+ [#7221](https://github.com/rust-lang/rust-clippy/pull/7221)
+* [`new_without_default`]: No longer shows the full qualified type path when
+ suggesting adding a `Default` implementation
+ [#7493](https://github.com/rust-lang/rust-clippy/pull/7493)
+* [`while_let_on_iterator`]: Now suggests re-borrowing mutable references
+ [#7520](https://github.com/rust-lang/rust-clippy/pull/7520)
+* [`extend_with_drain`]: Improve code suggestion for mutable and immutable
+ references [#7533](https://github.com/rust-lang/rust-clippy/pull/7533)
+* [`trivially_copy_pass_by_ref`]: Now properly handles `Self` type
+ [#7535](https://github.com/rust-lang/rust-clippy/pull/7535)
+* [`never_loop`]: Now suggests using `if let` instead of a `for` loop when
+ applicable [#7541](https://github.com/rust-lang/rust-clippy/pull/7541)
+
+### Documentation Improvements
+
+* Clippy now uses a lint to generate its lint documentation. [Lints all the way
+ down](https://en.wikipedia.org/wiki/Turtles_all_the_way_down).
+ [#7502](https://github.com/rust-lang/rust-clippy/pull/7502)
+* Reworked Clippy's website:
+ [#7172](https://github.com/rust-lang/rust-clippy/issues/7172)
+ [#7279](https://github.com/rust-lang/rust-clippy/pull/7279)
+ * Added applicability information about lints
+ * Added a link to jump into the implementation
+ * Improved loading times
+ * Adapted some styling
+* `cargo clippy --help` now also explains the `--fix` and `--no-deps` flag
+ [#7492](https://github.com/rust-lang/rust-clippy/pull/7492)
+* [`unnested_or_patterns`]: Removed `or_patterns` feature gate in the code
+ example [#7507](https://github.com/rust-lang/rust-clippy/pull/7507)
+
+## Rust 1.55
+
+Released 2021-09-09
+
+[3ae8faf...74d1561](https://github.com/rust-lang/rust-clippy/compare/3ae8faf...74d1561)
+
+### Important Changes
+
+* Stabilized `cargo clippy --fix` :tada:
+ [#7405](https://github.com/rust-lang/rust-clippy/pull/7405)
+
+### New Lints
+
+* [`rc_mutex`]
+ [#7316](https://github.com/rust-lang/rust-clippy/pull/7316)
+* [`nonstandard_macro_braces`]
+ [#7299](https://github.com/rust-lang/rust-clippy/pull/7299)
+* [`strlen_on_c_strings`]
+ [#7243](https://github.com/rust-lang/rust-clippy/pull/7243)
+* [`self_named_constructors`]
+ [#7403](https://github.com/rust-lang/rust-clippy/pull/7403)
+* [`disallowed_script_idents`]
+ [#7400](https://github.com/rust-lang/rust-clippy/pull/7400)
+* [`disallowed_types`]
+ [#7315](https://github.com/rust-lang/rust-clippy/pull/7315)
+* [`missing_enforced_import_renames`]
+ [#7300](https://github.com/rust-lang/rust-clippy/pull/7300)
+* [`extend_with_drain`]
+ [#7270](https://github.com/rust-lang/rust-clippy/pull/7270)
+
+### Moves and Deprecations
+
+* Moved [`from_iter_instead_of_collect`] to `pedantic`
+ [#7375](https://github.com/rust-lang/rust-clippy/pull/7375)
+* Added `suspicious` as a new lint group for *code that is most likely wrong or useless*
+ [#7350](https://github.com/rust-lang/rust-clippy/pull/7350)
+ * Moved [`blanket_clippy_restriction_lints`] to `suspicious`
+ * Moved [`empty_loop`] to `suspicious`
+ * Moved [`eval_order_dependence`] to `suspicious`
+ * Moved [`float_equality_without_abs`] to `suspicious`
+ * Moved [`for_loops_over_fallibles`] to `suspicious`
+ * Moved [`misrefactored_assign_op`] to `suspicious`
+ * Moved [`mut_range_bound`] to `suspicious`
+ * Moved [`mutable_key_type`] to `suspicious`
+ * Moved [`suspicious_arithmetic_impl`] to `suspicious`
+ * Moved [`suspicious_assignment_formatting`] to `suspicious`
+ * Moved [`suspicious_else_formatting`] to `suspicious`
+ * Moved [`suspicious_map`] to `suspicious`
+ * Moved [`suspicious_op_assign_impl`] to `suspicious`
+ * Moved [`suspicious_unary_op_formatting`] to `suspicious`
+
+### Enhancements
+
+* [`while_let_on_iterator`]: Now suggests `&mut iter` inside closures
+ [#7262](https://github.com/rust-lang/rust-clippy/pull/7262)
+* [`doc_markdown`]:
+ * Now detects unbalanced ticks
+ [#7357](https://github.com/rust-lang/rust-clippy/pull/7357)
+ * Add `FreeBSD` to the default configuration as an allowed identifier
+ [#7334](https://github.com/rust-lang/rust-clippy/pull/7334)
+* [`wildcard_enum_match_arm`], [`match_wildcard_for_single_variants`]: Now allows wildcards for enums with unstable
+ or hidden variants
+ [#7407](https://github.com/rust-lang/rust-clippy/pull/7407)
+* [`redundant_allocation`]: Now additionally supports the `Arc<>` type
+ [#7308](https://github.com/rust-lang/rust-clippy/pull/7308)
+* [`blacklisted_name`]: Now allows blacklisted names in test code
+ [#7379](https://github.com/rust-lang/rust-clippy/pull/7379)
+* [`redundant_closure`]: Suggests `&mut` for `FnMut`
+ [#7437](https://github.com/rust-lang/rust-clippy/pull/7437)
+* [`disallowed_methods`], [`disallowed_types`]: The configuration values `disallowed-method` and `disallowed-type`
+ no longer require fully qualified paths
+ [#7345](https://github.com/rust-lang/rust-clippy/pull/7345)
+* [`zst_offset`]: Fixed lint invocation after it was accidentally suppressed
+ [#7396](https://github.com/rust-lang/rust-clippy/pull/7396)
+
+### False Positive Fixes
+
+* [`default_numeric_fallback`]: No longer lints on float literals as function arguments
+ [#7446](https://github.com/rust-lang/rust-clippy/pull/7446)
+* [`use_self`]: No longer lints on type parameters
+ [#7288](https://github.com/rust-lang/rust-clippy/pull/7288)
+* [`unimplemented`]: Now ignores the `assert` and `debug_assert` macros
+ [#7439](https://github.com/rust-lang/rust-clippy/pull/7439)
+* [`branches_sharing_code`]: Now always checks for block expressions
+ [#7462](https://github.com/rust-lang/rust-clippy/pull/7462)
+* [`field_reassign_with_default`]: No longer triggers in macros
+ [#7160](https://github.com/rust-lang/rust-clippy/pull/7160)
+* [`redundant_clone`]: No longer lints on required clones for borrowed data
+ [#7346](https://github.com/rust-lang/rust-clippy/pull/7346)
+* [`default_numeric_fallback`]: No longer triggers in external macros
+ [#7325](https://github.com/rust-lang/rust-clippy/pull/7325)
+* [`needless_bool`]: No longer lints in macros
+ [#7442](https://github.com/rust-lang/rust-clippy/pull/7442)
+* [`useless_format`]: No longer triggers when additional text is being appended
+ [#7442](https://github.com/rust-lang/rust-clippy/pull/7442)
+* [`assertions_on_constants`]: `cfg!(...)` is no longer considered to be a constant
+ [#7319](https://github.com/rust-lang/rust-clippy/pull/7319)
+
+### Suggestion Fixes/Improvements
+
+* [`needless_collect`]: Now show correct lint messages for shadowed values
+ [#7289](https://github.com/rust-lang/rust-clippy/pull/7289)
+* [`wrong_pub_self_convention`]: The deprecated message now suggest the correct configuration value
+ [#7382](https://github.com/rust-lang/rust-clippy/pull/7382)
+* [`semicolon_if_nothing_returned`]: Allow missing semicolon in blocks with only one expression
+ [#7326](https://github.com/rust-lang/rust-clippy/pull/7326)
+
+### ICE Fixes
+
+* [`zero_sized_map_values`]
+ [#7470](https://github.com/rust-lang/rust-clippy/pull/7470)
+* [`redundant_pattern_matching`]
+ [#7471](https://github.com/rust-lang/rust-clippy/pull/7471)
+* [`modulo_one`]
+ [#7473](https://github.com/rust-lang/rust-clippy/pull/7473)
+* [`use_self`]
+ [#7428](https://github.com/rust-lang/rust-clippy/pull/7428)
+
+## Rust 1.54
+
+Released 2021-07-29
+
+[7c7683c...3ae8faf](https://github.com/rust-lang/rust-clippy/compare/7c7683c...3ae8faf)
+
+### New Lints
+
+- [`ref_binding_to_reference`]
+ [#7105](https://github.com/rust-lang/rust-clippy/pull/7105)
+- [`needless_bitwise_bool`]
+ [#7133](https://github.com/rust-lang/rust-clippy/pull/7133)
+- [`unused_async`] [#7225](https://github.com/rust-lang/rust-clippy/pull/7225)
+- [`manual_str_repeat`]
+ [#7265](https://github.com/rust-lang/rust-clippy/pull/7265)
+- [`suspicious_splitn`]
+ [#7292](https://github.com/rust-lang/rust-clippy/pull/7292)
+
+### Moves and Deprecations
+
+- Deprecate `pub_enum_variant_names` and `wrong_pub_self_convention` in favor of
+ the new `avoid-breaking-exported-api` config option (see
+ [Enhancements](#1-54-enhancements))
+ [#7187](https://github.com/rust-lang/rust-clippy/pull/7187)
+- Move [`inconsistent_struct_constructor`] to `pedantic`
+ [#7193](https://github.com/rust-lang/rust-clippy/pull/7193)
+- Move [`needless_borrow`] to `style` (now warn-by-default)
+ [#7254](https://github.com/rust-lang/rust-clippy/pull/7254)
+- Move [`suspicious_operation_groupings`] to `nursery`
+ [#7266](https://github.com/rust-lang/rust-clippy/pull/7266)
+- Move [`semicolon_if_nothing_returned`] to `pedantic`
+ [#7268](https://github.com/rust-lang/rust-clippy/pull/7268)
+
+### Enhancements <a name="1-54-enhancements"></a>
+
+- [`while_let_on_iterator`]: Now also lints in nested loops
+ [#6966](https://github.com/rust-lang/rust-clippy/pull/6966)
+- [`single_char_pattern`]: Now also lints on `strip_prefix` and `strip_suffix`
+ [#7156](https://github.com/rust-lang/rust-clippy/pull/7156)
+- [`needless_collect`]: Now also lints on assignments with type annotations
+ [#7163](https://github.com/rust-lang/rust-clippy/pull/7163)
+- [`if_then_some_else_none`]: Now works with the MSRV config
+ [#7177](https://github.com/rust-lang/rust-clippy/pull/7177)
+- Add `avoid-breaking-exported-api` config option for the lints
+ [`enum_variant_names`], [`large_types_passed_by_value`],
+ [`trivially_copy_pass_by_ref`], [`unnecessary_wraps`],
+ [`upper_case_acronyms`], and [`wrong_self_convention`]. We recommend to set
+ this configuration option to `false` before a major release (1.0/2.0/...) to
+ clean up the API [#7187](https://github.com/rust-lang/rust-clippy/pull/7187)
+- [`needless_collect`]: Now lints on even more data structures
+ [#7188](https://github.com/rust-lang/rust-clippy/pull/7188)
+- [`missing_docs_in_private_items`]: No longer sees `#[<name> = "<value>"]` like
+ attributes as sufficient documentation
+ [#7281](https://github.com/rust-lang/rust-clippy/pull/7281)
+- [`needless_collect`], [`short_circuit_statement`], [`unnecessary_operation`]:
+ Now work as expected when used with `allow`
+ [#7282](https://github.com/rust-lang/rust-clippy/pull/7282)
+
+### False Positive Fixes
+
+- [`implicit_return`]: Now takes all diverging functions in account to avoid
+ false positives [#6951](https://github.com/rust-lang/rust-clippy/pull/6951)
+- [`while_let_on_iterator`]: No longer lints when the iterator is a struct field
+ and the struct is used in the loop
+ [#6966](https://github.com/rust-lang/rust-clippy/pull/6966)
+- [`multiple_inherent_impl`]: No longer lints with generic arguments
+ [#7089](https://github.com/rust-lang/rust-clippy/pull/7089)
+- [`comparison_chain`]: No longer lints in a `const` context
+ [#7118](https://github.com/rust-lang/rust-clippy/pull/7118)
+- [`while_immutable_condition`]: Fix false positive where mutation in the loop
+ variable wasn't picked up
+ [#7144](https://github.com/rust-lang/rust-clippy/pull/7144)
+- [`default_trait_access`]: No longer lints in macros
+ [#7150](https://github.com/rust-lang/rust-clippy/pull/7150)
+- [`needless_question_mark`]: No longer lints when the inner value is implicitly
+ dereferenced [#7165](https://github.com/rust-lang/rust-clippy/pull/7165)
+- [`unused_unit`]: No longer lints when multiple macro contexts are involved
+ [#7167](https://github.com/rust-lang/rust-clippy/pull/7167)
+- [`eval_order_dependence`]: Fix false positive in async context
+ [#7174](https://github.com/rust-lang/rust-clippy/pull/7174)
+- [`unnecessary_filter_map`]: No longer lints if the `filter_map` changes the
+ type [#7175](https://github.com/rust-lang/rust-clippy/pull/7175)
+- [`wrong_self_convention`]: No longer lints in trait implementations of
+ non-`Copy` types [#7182](https://github.com/rust-lang/rust-clippy/pull/7182)
+- [`suboptimal_flops`]: No longer lints on `powi(2)`
+ [#7201](https://github.com/rust-lang/rust-clippy/pull/7201)
+- [`wrong_self_convention`]: No longer lints if there is no implicit `self`
+ [#7215](https://github.com/rust-lang/rust-clippy/pull/7215)
+- [`option_if_let_else`]: No longer lints on `else if let` pattern
+ [#7216](https://github.com/rust-lang/rust-clippy/pull/7216)
+- [`use_self`], [`useless_conversion`]: Fix false positives when generic
+ arguments are involved
+ [#7223](https://github.com/rust-lang/rust-clippy/pull/7223)
+- [`manual_unwrap_or`]: Fix false positive with deref coercion
+ [#7233](https://github.com/rust-lang/rust-clippy/pull/7233)
+- [`similar_names`]: No longer lints on `wparam`/`lparam`
+ [#7255](https://github.com/rust-lang/rust-clippy/pull/7255)
+- [`redundant_closure`]: No longer lints on using the `vec![]` macro in a
+ closure [#7263](https://github.com/rust-lang/rust-clippy/pull/7263)
+
+### Suggestion Fixes/Improvements
+
+- [`implicit_return`]
+ [#6951](https://github.com/rust-lang/rust-clippy/pull/6951)
+ - Fix suggestion for async functions
+ - Improve suggestion with macros
+ - Suggest to change `break` to `return` when appropriate
+- [`while_let_on_iterator`]: Now suggests `&mut iter` when necessary
+ [#6966](https://github.com/rust-lang/rust-clippy/pull/6966)
+- [`match_single_binding`]: Improve suggestion when match scrutinee has side
+ effects [#7095](https://github.com/rust-lang/rust-clippy/pull/7095)
+- [`needless_borrow`]: Now suggests to also change usage sites as needed
+ [#7105](https://github.com/rust-lang/rust-clippy/pull/7105)
+- [`write_with_newline`]: Improve suggestion when only `\n` is written to the
+ buffer [#7183](https://github.com/rust-lang/rust-clippy/pull/7183)
+- [`from_iter_instead_of_collect`]: The suggestion is now auto applicable also
+ when a `<_ as Trait>::_` is involved
+ [#7264](https://github.com/rust-lang/rust-clippy/pull/7264)
+- [`not_unsafe_ptr_arg_deref`]: Improved error message
+ [#7294](https://github.com/rust-lang/rust-clippy/pull/7294)
+
+### ICE Fixes
+
+- Fix ICE when running Clippy on `libstd`
+ [#7140](https://github.com/rust-lang/rust-clippy/pull/7140)
+- [`implicit_return`]
+ [#7242](https://github.com/rust-lang/rust-clippy/pull/7242)
+
+## Rust 1.53
+
+Released 2021-06-17
+
+[6ed6f1e...7c7683c](https://github.com/rust-lang/rust-clippy/compare/6ed6f1e...7c7683c)
+
+### New Lints
+
+* [`option_filter_map`]
+ [#6342](https://github.com/rust-lang/rust-clippy/pull/6342)
+* [`branches_sharing_code`]
+ [#6463](https://github.com/rust-lang/rust-clippy/pull/6463)
+* [`needless_for_each`]
+ [#6706](https://github.com/rust-lang/rust-clippy/pull/6706)
+* [`if_then_some_else_none`]
+ [#6859](https://github.com/rust-lang/rust-clippy/pull/6859)
+* [`non_octal_unix_permissions`]
+ [#7001](https://github.com/rust-lang/rust-clippy/pull/7001)
+* [`unnecessary_self_imports`]
+ [#7072](https://github.com/rust-lang/rust-clippy/pull/7072)
+* [`bool_assert_comparison`]
+ [#7083](https://github.com/rust-lang/rust-clippy/pull/7083)
+* [`cloned_instead_of_copied`]
+ [#7098](https://github.com/rust-lang/rust-clippy/pull/7098)
+* [`flat_map_option`]
+ [#7101](https://github.com/rust-lang/rust-clippy/pull/7101)
+
+### Moves and Deprecations
+
+* Deprecate [`filter_map`] lint
+ [#7059](https://github.com/rust-lang/rust-clippy/pull/7059)
+* Move [`transmute_ptr_to_ptr`] to `pedantic`
+ [#7102](https://github.com/rust-lang/rust-clippy/pull/7102)
+
+### Enhancements
+
+* [`mem_replace_with_default`]: Also lint on common std constructors
+ [#6820](https://github.com/rust-lang/rust-clippy/pull/6820)
+* [`wrong_self_convention`]: Also lint on `to_*_mut` methods
+ [#6828](https://github.com/rust-lang/rust-clippy/pull/6828)
+* [`wildcard_enum_match_arm`], [`match_wildcard_for_single_variants`]:
+ [#6863](https://github.com/rust-lang/rust-clippy/pull/6863)
+ * Attempt to find a common path prefix in suggestion
+ * Don't lint on `Option` and `Result`
+ * Consider `Self` prefix
+* [`explicit_deref_methods`]: Also lint on chained `deref` calls
+ [#6865](https://github.com/rust-lang/rust-clippy/pull/6865)
+* [`or_fun_call`]: Also lint on `unsafe` blocks
+ [#6928](https://github.com/rust-lang/rust-clippy/pull/6928)
+* [`vec_box`], [`linkedlist`], [`option_option`]: Also lint in `const` and
+ `static` items [#6938](https://github.com/rust-lang/rust-clippy/pull/6938)
+* [`search_is_some`]: Also check for `is_none`
+ [#6942](https://github.com/rust-lang/rust-clippy/pull/6942)
+* [`string_lit_as_bytes`]: Also lint on `into_bytes`
+ [#6959](https://github.com/rust-lang/rust-clippy/pull/6959)
+* [`len_without_is_empty`]: Also lint if function signatures of `len` and
+ `is_empty` don't match
+ [#6980](https://github.com/rust-lang/rust-clippy/pull/6980)
+* [`redundant_pattern_matching`]: Also lint if the pattern is a `&` pattern
+ [#6991](https://github.com/rust-lang/rust-clippy/pull/6991)
+* [`clone_on_copy`]: Also lint on chained method calls taking `self` by value
+ [#7000](https://github.com/rust-lang/rust-clippy/pull/7000)
+* [`missing_panics_doc`]: Also lint on `assert_eq!` and `assert_ne!`
+ [#7029](https://github.com/rust-lang/rust-clippy/pull/7029)
+* [`needless_return`]: Also lint in `async` functions
+ [#7067](https://github.com/rust-lang/rust-clippy/pull/7067)
+* [`unused_io_amount`]: Also lint on expressions like `_.read().ok()?`
+ [#7100](https://github.com/rust-lang/rust-clippy/pull/7100)
+* [`iter_cloned_collect`]: Also lint on large arrays, since const-generics are
+ now stable [#7138](https://github.com/rust-lang/rust-clippy/pull/7138)
+
+### False Positive Fixes
+
+* [`upper_case_acronyms`]: No longer lints on public items
+ [#6805](https://github.com/rust-lang/rust-clippy/pull/6805)
+* [`suspicious_map`]: No longer lints when side effects may occur inside the
+ `map` call [#6831](https://github.com/rust-lang/rust-clippy/pull/6831)
+* [`manual_map`], [`manual_unwrap_or`]: No longer lints in `const` functions
+ [#6917](https://github.com/rust-lang/rust-clippy/pull/6917)
+* [`wrong_self_convention`]: Now respects `Copy` types
+ [#6924](https://github.com/rust-lang/rust-clippy/pull/6924)
+* [`needless_question_mark`]: No longer lints if the `?` and the `Some(..)` come
+ from different macro contexts [#6935](https://github.com/rust-lang/rust-clippy/pull/6935)
+* [`map_entry`]: Better detect if the entry API can be used
+ [#6937](https://github.com/rust-lang/rust-clippy/pull/6937)
+* [`or_fun_call`]: No longer lints on some `len` function calls
+ [#6950](https://github.com/rust-lang/rust-clippy/pull/6950)
+* [`new_ret_no_self`]: No longer lints when `Self` is returned with different
+ generic arguments [#6952](https://github.com/rust-lang/rust-clippy/pull/6952)
+* [`upper_case_acronyms`]: No longer lints on public items
+ [#6981](https://github.com/rust-lang/rust-clippy/pull/6981)
+* [`explicit_into_iter_loop`]: Only lint when `into_iter` is an implementation
+ of `IntoIterator` [#6982](https://github.com/rust-lang/rust-clippy/pull/6982)
+* [`expl_impl_clone_on_copy`]: Take generic constraints into account before
+ suggesting to use `derive` instead
+ [#6993](https://github.com/rust-lang/rust-clippy/pull/6993)
+* [`missing_panics_doc`]: No longer lints when only debug-assertions are used
+ [#6996](https://github.com/rust-lang/rust-clippy/pull/6996)
+* [`clone_on_copy`]: Only lint when using the `Clone` trait
+ [#7000](https://github.com/rust-lang/rust-clippy/pull/7000)
+* [`wrong_self_convention`]: No longer lints inside a trait implementation
+ [#7002](https://github.com/rust-lang/rust-clippy/pull/7002)
+* [`redundant_clone`]: No longer lints when the cloned value is modified while
+ the clone is in use
+ [#7011](https://github.com/rust-lang/rust-clippy/pull/7011)
+* [`same_item_push`]: No longer lints if the `Vec` is used in the loop body
+ [#7018](https://github.com/rust-lang/rust-clippy/pull/7018)
+* [`cargo_common_metadata`]: Remove author requirement
+ [#7026](https://github.com/rust-lang/rust-clippy/pull/7026)
+* [`panic_in_result_fn`]: No longer lints on `debug_assert` family
+ [#7060](https://github.com/rust-lang/rust-clippy/pull/7060)
+* [`panic`]: No longer wrongfully lints on `debug_assert` with message
+ [#7063](https://github.com/rust-lang/rust-clippy/pull/7063)
+* [`wrong_self_convention`]: No longer lints in trait implementations where no
+ `self` is involved [#7064](https://github.com/rust-lang/rust-clippy/pull/7064)
+* [`missing_const_for_fn`]: No longer lints when unstable `const` function is
+ involved [#7076](https://github.com/rust-lang/rust-clippy/pull/7076)
+* [`suspicious_else_formatting`]: Allow Allman style braces
+ [#7087](https://github.com/rust-lang/rust-clippy/pull/7087)
+* [`inconsistent_struct_constructor`]: No longer lints in macros
+ [#7097](https://github.com/rust-lang/rust-clippy/pull/7097)
+* [`single_component_path_imports`]: No longer lints on macro re-exports
+ [#7120](https://github.com/rust-lang/rust-clippy/pull/7120)
+
+### Suggestion Fixes/Improvements
+
+* [`redundant_pattern_matching`]: Add a note when applying this lint would
+ change the drop order
+ [#6568](https://github.com/rust-lang/rust-clippy/pull/6568)
+* [`write_literal`], [`print_literal`]: Add auto-applicable suggestion
+ [#6821](https://github.com/rust-lang/rust-clippy/pull/6821)
+* [`manual_map`]: Fix suggestion for complex `if let ... else` chains
+ [#6856](https://github.com/rust-lang/rust-clippy/pull/6856)
+* [`inconsistent_struct_constructor`]: Make lint description and message clearer
+ [#6892](https://github.com/rust-lang/rust-clippy/pull/6892)
+* [`map_entry`]: Now suggests `or_insert`, `insert_with` or `match _.entry(_)`
+ as appropriate [#6937](https://github.com/rust-lang/rust-clippy/pull/6937)
+* [`manual_flatten`]: Suggest to insert `copied` if necessary
+ [#6962](https://github.com/rust-lang/rust-clippy/pull/6962)
+* [`redundant_slicing`]: Fix suggestion when a re-borrow might be required or
+ when the value is from a macro call
+ [#6975](https://github.com/rust-lang/rust-clippy/pull/6975)
+* [`match_wildcard_for_single_variants`]: Fix suggestion for hidden variant
+ [#6988](https://github.com/rust-lang/rust-clippy/pull/6988)
+* [`clone_on_copy`]: Correct suggestion when the cloned value is a macro call
+ [#7000](https://github.com/rust-lang/rust-clippy/pull/7000)
+* [`manual_map`]: Fix suggestion at the end of an if chain
+ [#7004](https://github.com/rust-lang/rust-clippy/pull/7004)
+* Fix needless parenthesis output in multiple lint suggestions
+ [#7013](https://github.com/rust-lang/rust-clippy/pull/7013)
+* [`needless_collect`]: Better explanation in the lint message
+ [#7020](https://github.com/rust-lang/rust-clippy/pull/7020)
+* [`useless_vec`]: Now considers mutability
+ [#7036](https://github.com/rust-lang/rust-clippy/pull/7036)
+* [`useless_format`]: Wrap the content in braces if necessary
+ [#7092](https://github.com/rust-lang/rust-clippy/pull/7092)
+* [`single_match`]: Don't suggest an equality check for types which don't
+ implement `PartialEq`
+ [#7093](https://github.com/rust-lang/rust-clippy/pull/7093)
+* [`from_over_into`]: Mention type in help message
+ [#7099](https://github.com/rust-lang/rust-clippy/pull/7099)
+* [`manual_unwrap_or`]: Fix invalid code suggestion due to a macro call
+ [#7136](https://github.com/rust-lang/rust-clippy/pull/7136)
+
+### ICE Fixes
+
+* [`macro_use_imports`]
+ [#7022](https://github.com/rust-lang/rust-clippy/pull/7022)
+* [`missing_panics_doc`]
+ [#7034](https://github.com/rust-lang/rust-clippy/pull/7034)
+* [`tabs_in_doc_comments`]
+ [#7039](https://github.com/rust-lang/rust-clippy/pull/7039)
+* [`missing_const_for_fn`]
+ [#7128](https://github.com/rust-lang/rust-clippy/pull/7128)
+
+### Others
+
+* [Clippy's lint
+ list](https://rust-lang.github.io/rust-clippy/master/index.html) now supports
+ themes [#7030](https://github.com/rust-lang/rust-clippy/pull/7030)
+* Lints that were uplifted to `rustc` now mention the new `rustc` name in the
+ deprecation warning
+ [#7056](https://github.com/rust-lang/rust-clippy/pull/7056)
+
+## Rust 1.52
+
+Released 2021-05-06
+
+[3e41797...6ed6f1e](https://github.com/rust-lang/rust-clippy/compare/3e41797...6ed6f1e)
+
+### New Lints
+
+* [`from_str_radix_10`]
+ [#6717](https://github.com/rust-lang/rust-clippy/pull/6717)
+* [`implicit_clone`]
+ [#6730](https://github.com/rust-lang/rust-clippy/pull/6730)
+* [`semicolon_if_nothing_returned`]
+ [#6681](https://github.com/rust-lang/rust-clippy/pull/6681)
+* [`manual_flatten`]
+ [#6646](https://github.com/rust-lang/rust-clippy/pull/6646)
+* [`inconsistent_struct_constructor`]
+ [#6769](https://github.com/rust-lang/rust-clippy/pull/6769)
+* [`iter_count`]
+ [#6791](https://github.com/rust-lang/rust-clippy/pull/6791)
+* [`default_numeric_fallback`]
+ [#6662](https://github.com/rust-lang/rust-clippy/pull/6662)
+* [`bytes_nth`]
+ [#6695](https://github.com/rust-lang/rust-clippy/pull/6695)
+* [`filter_map_identity`]
+ [#6685](https://github.com/rust-lang/rust-clippy/pull/6685)
+* [`manual_map`]
+ [#6573](https://github.com/rust-lang/rust-clippy/pull/6573)
+
+### Moves and Deprecations
+
+* Moved [`upper_case_acronyms`] to `pedantic`
+ [#6775](https://github.com/rust-lang/rust-clippy/pull/6775)
+* Moved [`manual_map`] to `nursery`
+ [#6796](https://github.com/rust-lang/rust-clippy/pull/6796)
+* Moved [`unnecessary_wraps`] to `pedantic`
+ [#6765](https://github.com/rust-lang/rust-clippy/pull/6765)
+* Moved [`trivial_regex`] to `nursery`
+ [#6696](https://github.com/rust-lang/rust-clippy/pull/6696)
+* Moved [`naive_bytecount`] to `pedantic`
+ [#6825](https://github.com/rust-lang/rust-clippy/pull/6825)
+* Moved [`upper_case_acronyms`] to `style`
+ [#6788](https://github.com/rust-lang/rust-clippy/pull/6788)
+* Moved [`manual_map`] to `style`
+ [#6801](https://github.com/rust-lang/rust-clippy/pull/6801)
+
+### Enhancements
+
+* [`disallowed_methods`]: Now supports functions in addition to methods
+ [#6674](https://github.com/rust-lang/rust-clippy/pull/6674)
+* [`upper_case_acronyms`]: Added a new configuration `upper-case-acronyms-aggressive` to
+ trigger the lint if there is more than one uppercase character next to each other
+ [#6788](https://github.com/rust-lang/rust-clippy/pull/6788)
+* [`collapsible_match`]: Now supports block comparison with different value names
+ [#6754](https://github.com/rust-lang/rust-clippy/pull/6754)
+* [`unnecessary_wraps`]: Will now suggest removing unnecessary wrapped return unit type, like `Option<()>`
+ [#6665](https://github.com/rust-lang/rust-clippy/pull/6665)
+* Improved value usage detection in closures
+ [#6698](https://github.com/rust-lang/rust-clippy/pull/6698)
+
+### False Positive Fixes
+
+* [`use_self`]: No longer lints in macros
+ [#6833](https://github.com/rust-lang/rust-clippy/pull/6833)
+* [`use_self`]: Fixed multiple false positives for: generics, associated types and derive implementations
+ [#6179](https://github.com/rust-lang/rust-clippy/pull/6179)
+* [`missing_inline_in_public_items`]: No longer lints for procedural macros
+ [#6814](https://github.com/rust-lang/rust-clippy/pull/6814)
+* [`inherent_to_string`]: No longer lints on functions with function generics
+ [#6771](https://github.com/rust-lang/rust-clippy/pull/6771)
+* [`doc_markdown`]: Add `OpenDNS` to the default configuration as an allowed identifier
+ [#6783](https://github.com/rust-lang/rust-clippy/pull/6783)
+* [`missing_panics_doc`]: No longer lints on [`unreachable!`](https://doc.rust-lang.org/std/macro.unreachable.html)
+ [#6700](https://github.com/rust-lang/rust-clippy/pull/6700)
+* [`collapsible_if`]: No longer lints on if statements with attributes
+ [#6701](https://github.com/rust-lang/rust-clippy/pull/6701)
+* [`match_same_arms`]: Only considers empty blocks as equal if the tokens contained are the same
+ [#6843](https://github.com/rust-lang/rust-clippy/pull/6843)
+* [`redundant_closure`]: Now ignores macros
+ [#6871](https://github.com/rust-lang/rust-clippy/pull/6871)
+* [`manual_map`]: Fixed false positives when control flow statements like `return`, `break` etc. are used
+ [#6801](https://github.com/rust-lang/rust-clippy/pull/6801)
+* [`vec_init_then_push`]: Fixed false positives for loops and if statements
+ [#6697](https://github.com/rust-lang/rust-clippy/pull/6697)
+* [`len_without_is_empty`]: Will now consider multiple impl blocks and `#[allow]` on
+ the `len` method as well as the type definition.
+ [#6853](https://github.com/rust-lang/rust-clippy/pull/6853)
+* [`let_underscore_drop`]: Only lints on types which implement `Drop`
+ [#6682](https://github.com/rust-lang/rust-clippy/pull/6682)
+* [`unit_arg`]: No longer lints on unit arguments when they come from a path expression.
+ [#6601](https://github.com/rust-lang/rust-clippy/pull/6601)
+* [`cargo_common_metadata`]: No longer lints if
+ [`publish = false`](https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field)
+ is defined in the manifest
+ [#6650](https://github.com/rust-lang/rust-clippy/pull/6650)
+
+### Suggestion Fixes/Improvements
+
+* [`collapsible_match`]: Fixed lint message capitalization
+ [#6766](https://github.com/rust-lang/rust-clippy/pull/6766)
+* [`or_fun_call`]: Improved suggestions for `or_insert(vec![])`
+ [#6790](https://github.com/rust-lang/rust-clippy/pull/6790)
+* [`manual_map`]: No longer expands macros in the suggestions
+ [#6801](https://github.com/rust-lang/rust-clippy/pull/6801)
+* Aligned Clippy's lint messages with the rustc dev guide
+ [#6787](https://github.com/rust-lang/rust-clippy/pull/6787)
+
+### ICE Fixes
+
+* [`zero_sized_map_values`]
+ [#6866](https://github.com/rust-lang/rust-clippy/pull/6866)
+
+### Documentation Improvements
+
+* [`useless_format`]: Improved the documentation example
+ [#6854](https://github.com/rust-lang/rust-clippy/pull/6854)
+* Clippy's [`README.md`]: Includes a new subsection on running Clippy as a rustc wrapper
+ [#6782](https://github.com/rust-lang/rust-clippy/pull/6782)
+
+### Others
+* Running `cargo clippy` after `cargo check` now works as expected
+ (`cargo clippy` and `cargo check` no longer shares the same build cache)
+ [#6687](https://github.com/rust-lang/rust-clippy/pull/6687)
+* Cargo now re-runs Clippy if arguments after `--` provided to `cargo clippy` are changed.
+ [#6834](https://github.com/rust-lang/rust-clippy/pull/6834)
+* Extracted Clippy's `utils` module into the new `clippy_utils` crate
+ [#6756](https://github.com/rust-lang/rust-clippy/pull/6756)
+* Clippy lintcheck tool improvements
+ [#6800](https://github.com/rust-lang/rust-clippy/pull/6800)
+ [#6735](https://github.com/rust-lang/rust-clippy/pull/6735)
+ [#6764](https://github.com/rust-lang/rust-clippy/pull/6764)
+ [#6708](https://github.com/rust-lang/rust-clippy/pull/6708)
+ [#6780](https://github.com/rust-lang/rust-clippy/pull/6780)
+ [#6686](https://github.com/rust-lang/rust-clippy/pull/6686)
+
+## Rust 1.51
+
+Released 2021-03-25
+
+[4911ab1...3e41797](https://github.com/rust-lang/rust-clippy/compare/4911ab1...3e41797)
+
+### New Lints
+
+* [`upper_case_acronyms`]
+ [#6475](https://github.com/rust-lang/rust-clippy/pull/6475)
+* [`from_over_into`] [#6476](https://github.com/rust-lang/rust-clippy/pull/6476)
+* [`case_sensitive_file_extension_comparisons`]
+ [#6500](https://github.com/rust-lang/rust-clippy/pull/6500)
+* [`needless_question_mark`]
+ [#6507](https://github.com/rust-lang/rust-clippy/pull/6507)
+* [`missing_panics_doc`]
+ [#6523](https://github.com/rust-lang/rust-clippy/pull/6523)
+* [`redundant_slicing`]
+ [#6528](https://github.com/rust-lang/rust-clippy/pull/6528)
+* [`vec_init_then_push`]
+ [#6538](https://github.com/rust-lang/rust-clippy/pull/6538)
+* [`ptr_as_ptr`] [#6542](https://github.com/rust-lang/rust-clippy/pull/6542)
+* [`collapsible_else_if`] (split out from `collapsible_if`)
+ [#6544](https://github.com/rust-lang/rust-clippy/pull/6544)
+* [`inspect_for_each`] [#6577](https://github.com/rust-lang/rust-clippy/pull/6577)
+* [`manual_filter_map`]
+ [#6591](https://github.com/rust-lang/rust-clippy/pull/6591)
+* [`exhaustive_enums`]
+ [#6617](https://github.com/rust-lang/rust-clippy/pull/6617)
+* [`exhaustive_structs`]
+ [#6617](https://github.com/rust-lang/rust-clippy/pull/6617)
+
+### Moves and Deprecations
+
+* Replace [`find_map`] with [`manual_find_map`]
+ [#6591](https://github.com/rust-lang/rust-clippy/pull/6591)
+* `unknown_clippy_lints` Now integrated in the `unknown_lints` rustc lint
+ [#6653](https://github.com/rust-lang/rust-clippy/pull/6653)
+
+### Enhancements
+
+* [`ptr_arg`] Now also suggests to use `&Path` instead of `&PathBuf`
+ [#6506](https://github.com/rust-lang/rust-clippy/pull/6506)
+* [`cast_ptr_alignment`] Also lint when the `pointer::cast` method is used
+ [#6557](https://github.com/rust-lang/rust-clippy/pull/6557)
+* [`collapsible_match`] Now also deals with `&` and `*` operators in the `match`
+ scrutinee [#6619](https://github.com/rust-lang/rust-clippy/pull/6619)
+
+### False Positive Fixes
+
+* [`similar_names`] Ignore underscore prefixed names
+ [#6403](https://github.com/rust-lang/rust-clippy/pull/6403)
+* [`print_literal`] and [`write_literal`] No longer lint numeric literals
+ [#6408](https://github.com/rust-lang/rust-clippy/pull/6408)
+* [`large_enum_variant`] No longer lints in external macros
+ [#6485](https://github.com/rust-lang/rust-clippy/pull/6485)
+* [`empty_enum`] Only lint if `never_type` feature is enabled
+ [#6513](https://github.com/rust-lang/rust-clippy/pull/6513)
+* [`field_reassign_with_default`] No longer lints in macros
+ [#6553](https://github.com/rust-lang/rust-clippy/pull/6553)
+* [`size_of_in_element_count`] No longer lints when dividing by element size
+ [#6578](https://github.com/rust-lang/rust-clippy/pull/6578)
+* [`needless_return`] No longer lints in macros
+ [#6586](https://github.com/rust-lang/rust-clippy/pull/6586)
+* [`match_overlapping_arm`] No longer lint when first arm is completely included
+ in second arm [#6603](https://github.com/rust-lang/rust-clippy/pull/6603)
+* [`doc_markdown`] Add `WebGL` to the default configuration as an allowed
+ identifier [#6605](https://github.com/rust-lang/rust-clippy/pull/6605)
+
+### Suggestion Fixes/Improvements
+
+* [`field_reassign_with_default`] Don't expand macro in lint suggestion
+ [#6531](https://github.com/rust-lang/rust-clippy/pull/6531)
+* [`match_like_matches_macro`] Strip references in suggestion
+ [#6532](https://github.com/rust-lang/rust-clippy/pull/6532)
+* [`single_match`] Suggest `if` over `if let` when possible
+ [#6574](https://github.com/rust-lang/rust-clippy/pull/6574)
+* `ref_in_deref` Use parentheses correctly in suggestion
+ [#6609](https://github.com/rust-lang/rust-clippy/pull/6609)
+* [`stable_sort_primitive`] Clarify error message
+ [#6611](https://github.com/rust-lang/rust-clippy/pull/6611)
+
+### ICE Fixes
+
+* [`zero_sized_map_values`]
+ [#6582](https://github.com/rust-lang/rust-clippy/pull/6582)
+
+### Documentation Improvements
+
+* Improve search performance on the Clippy website and make it possible to
+ directly search for lints on the GitHub issue tracker
+ [#6483](https://github.com/rust-lang/rust-clippy/pull/6483)
+* Clean up `README.md` by removing outdated paragraph
+ [#6488](https://github.com/rust-lang/rust-clippy/pull/6488)
+* [`await_holding_refcell_ref`] and [`await_holding_lock`]
+ [#6585](https://github.com/rust-lang/rust-clippy/pull/6585)
+* [`as_conversions`] [#6608](https://github.com/rust-lang/rust-clippy/pull/6608)
+
+### Others
+
+* Clippy now has a [Roadmap] for 2021. If you like to get involved in a bigger
+ project, take a look at the [Roadmap project page]. All issues listed there
+ are actively mentored
+ [#6462](https://github.com/rust-lang/rust-clippy/pull/6462)
+* The Clippy version number now corresponds to the Rust version number
+ [#6526](https://github.com/rust-lang/rust-clippy/pull/6526)
+* Fix oversight which caused Clippy to lint deps in some environments, where
+ `CLIPPY_TESTS=true` was set somewhere
+ [#6575](https://github.com/rust-lang/rust-clippy/pull/6575)
+* Add `cargo dev-lintcheck` tool to the Clippy Dev Tool
+ [#6469](https://github.com/rust-lang/rust-clippy/pull/6469)
+
+[Roadmap]: https://github.com/rust-lang/rust-clippy/blob/master/book/src/development/proposals/roadmap-2021.md
+[Roadmap project page]: https://github.com/rust-lang/rust-clippy/projects/3
+
+## Rust 1.50
+
+Released 2021-02-11
+
+[b20d4c1...4bd77a1](https://github.com/rust-lang/rust-clippy/compare/b20d4c1...4bd77a1)
+
+### New Lints
+
+* [`suspicious_operation_groupings`] [#6086](https://github.com/rust-lang/rust-clippy/pull/6086)
+* [`size_of_in_element_count`] [#6394](https://github.com/rust-lang/rust-clippy/pull/6394)
+* [`unnecessary_wraps`] [#6070](https://github.com/rust-lang/rust-clippy/pull/6070)
+* [`let_underscore_drop`] [#6305](https://github.com/rust-lang/rust-clippy/pull/6305)
+* [`collapsible_match`] [#6402](https://github.com/rust-lang/rust-clippy/pull/6402)
+* [`redundant_else`] [#6330](https://github.com/rust-lang/rust-clippy/pull/6330)
+* [`zero_sized_map_values`] [#6218](https://github.com/rust-lang/rust-clippy/pull/6218)
+* [`print_stderr`] [#6367](https://github.com/rust-lang/rust-clippy/pull/6367)
+* [`string_from_utf8_as_bytes`] [#6134](https://github.com/rust-lang/rust-clippy/pull/6134)
+
+### Moves and Deprecations
+
+* Previously deprecated [`str_to_string`] and [`string_to_string`] have been un-deprecated
+ as `restriction` lints [#6333](https://github.com/rust-lang/rust-clippy/pull/6333)
+* Deprecate `panic_params` lint. This is now available in rustc as `non_fmt_panics`
+ [#6351](https://github.com/rust-lang/rust-clippy/pull/6351)
+* Move [`map_err_ignore`] to `restriction`
+ [#6416](https://github.com/rust-lang/rust-clippy/pull/6416)
+* Move [`await_holding_refcell_ref`] to `pedantic`
+ [#6354](https://github.com/rust-lang/rust-clippy/pull/6354)
+* Move [`await_holding_lock`] to `pedantic`
+ [#6354](https://github.com/rust-lang/rust-clippy/pull/6354)
+
+### Enhancements
+
+* Add the `unreadable-literal-lint-fractions` configuration to disable
+ the `unreadable_literal` lint for fractions
+ [#6421](https://github.com/rust-lang/rust-clippy/pull/6421)
+* [`clone_on_copy`]: Now shows the type in the lint message
+ [#6443](https://github.com/rust-lang/rust-clippy/pull/6443)
+* [`redundant_pattern_matching`]: Now also lints on `std::task::Poll`
+ [#6339](https://github.com/rust-lang/rust-clippy/pull/6339)
+* [`redundant_pattern_matching`]: Additionally also lints on `std::net::IpAddr`
+ [#6377](https://github.com/rust-lang/rust-clippy/pull/6377)
+* [`search_is_some`]: Now suggests `contains` instead of `find(foo).is_some()`
+ [#6119](https://github.com/rust-lang/rust-clippy/pull/6119)
+* [`clone_double_ref`]: Now prints the reference type in the lint message
+ [#6442](https://github.com/rust-lang/rust-clippy/pull/6442)
+* [`modulo_one`]: Now also lints on -1.
+ [#6360](https://github.com/rust-lang/rust-clippy/pull/6360)
+* [`empty_loop`]: Now lints no_std crates, too
+ [#6205](https://github.com/rust-lang/rust-clippy/pull/6205)
+* [`or_fun_call`]: Now also lints when indexing `HashMap` or `BTreeMap`
+ [#6267](https://github.com/rust-lang/rust-clippy/pull/6267)
+* [`wrong_self_convention`]: Now also lints in trait definitions
+ [#6316](https://github.com/rust-lang/rust-clippy/pull/6316)
+* [`needless_borrow`]: Print the type in the lint message
+ [#6449](https://github.com/rust-lang/rust-clippy/pull/6449)
+
+[msrv_readme]: https://github.com/rust-lang/rust-clippy#specifying-the-minimum-supported-rust-version
+
+### False Positive Fixes
+
+* [`manual_range_contains`]: No longer lints in `const fn`
+ [#6382](https://github.com/rust-lang/rust-clippy/pull/6382)
+* [`unnecessary_lazy_evaluations`]: No longer lints if closure argument is used
+ [#6370](https://github.com/rust-lang/rust-clippy/pull/6370)
+* [`match_single_binding`]: Now ignores cases with `#[cfg()]` macros
+ [#6435](https://github.com/rust-lang/rust-clippy/pull/6435)
+* [`match_like_matches_macro`]: No longer lints on arms with attributes
+ [#6290](https://github.com/rust-lang/rust-clippy/pull/6290)
+* [`map_clone`]: No longer lints with deref and clone
+ [#6269](https://github.com/rust-lang/rust-clippy/pull/6269)
+* [`map_clone`]: No longer lints in the case of &mut
+ [#6301](https://github.com/rust-lang/rust-clippy/pull/6301)
+* [`needless_update`]: Now ignores `non_exhaustive` structs
+ [#6464](https://github.com/rust-lang/rust-clippy/pull/6464)
+* [`needless_collect`]: No longer lints when a collect is needed multiple times
+ [#6313](https://github.com/rust-lang/rust-clippy/pull/6313)
+* [`unnecessary_cast`] No longer lints cfg-dependent types
+ [#6369](https://github.com/rust-lang/rust-clippy/pull/6369)
+* [`declare_interior_mutable_const`] and [`borrow_interior_mutable_const`]:
+ Both now ignore enums with frozen variants
+ [#6110](https://github.com/rust-lang/rust-clippy/pull/6110)
+* [`field_reassign_with_default`] No longer lint for private fields
+ [#6537](https://github.com/rust-lang/rust-clippy/pull/6537)
+
+
+### Suggestion Fixes/Improvements
+
+* [`vec_box`]: Provide correct type scope suggestion
+ [#6271](https://github.com/rust-lang/rust-clippy/pull/6271)
+* [`manual_range_contains`]: Give correct suggestion when using floats
+ [#6320](https://github.com/rust-lang/rust-clippy/pull/6320)
+* [`unnecessary_lazy_evaluations`]: Don't always mark suggestion as MachineApplicable
+ [#6272](https://github.com/rust-lang/rust-clippy/pull/6272)
+* [`manual_async_fn`]: Improve suggestion formatting
+ [#6294](https://github.com/rust-lang/rust-clippy/pull/6294)
+* [`unnecessary_cast`]: Fix incorrectly formatted float literal suggestion
+ [#6362](https://github.com/rust-lang/rust-clippy/pull/6362)
+
+### ICE Fixes
+
+* Fix a crash in [`from_iter_instead_of_collect`]
+ [#6304](https://github.com/rust-lang/rust-clippy/pull/6304)
+* Fix a silent crash when parsing doc comments in [`needless_doctest_main`]
+ [#6458](https://github.com/rust-lang/rust-clippy/pull/6458)
+
+### Documentation Improvements
+
+* The lint website search has been improved ([#6477](https://github.com/rust-lang/rust-clippy/pull/6477)):
+ * Searching for lints with dashes and spaces is possible now. For example
+ `missing-errors-doc` and `missing errors doc` are now valid aliases for lint names
+ * Improved fuzzy search in lint descriptions
+* Various README improvements
+ [#6287](https://github.com/rust-lang/rust-clippy/pull/6287)
+* Add known problems to [`comparison_chain`] documentation
+ [#6390](https://github.com/rust-lang/rust-clippy/pull/6390)
+* Fix example used in [`cargo_common_metadata`]
+ [#6293](https://github.com/rust-lang/rust-clippy/pull/6293)
+* Improve [`map_clone`] documentation
+ [#6340](https://github.com/rust-lang/rust-clippy/pull/6340)
+
+### Others
+
+* You can now tell Clippy about the MSRV your project supports. Please refer to
+ the specific README section to learn more about MSRV support [here][msrv_readme]
+ [#6201](https://github.com/rust-lang/rust-clippy/pull/6201)
+* Add `--no-deps` option to avoid running on path dependencies in workspaces
+ [#6188](https://github.com/rust-lang/rust-clippy/pull/6188)
+
+## Rust 1.49
+
+Released 2020-12-31
+
+[e636b88...b20d4c1](https://github.com/rust-lang/rust-clippy/compare/e636b88...b20d4c1)
+
+### New Lints
+
+* [`field_reassign_with_default`] [#5911](https://github.com/rust-lang/rust-clippy/pull/5911)
+* [`await_holding_refcell_ref`] [#6029](https://github.com/rust-lang/rust-clippy/pull/6029)
+* [`disallowed_methods`] [#6081](https://github.com/rust-lang/rust-clippy/pull/6081)
+* [`inline_asm_x86_att_syntax`] [#6092](https://github.com/rust-lang/rust-clippy/pull/6092)
+* [`inline_asm_x86_intel_syntax`] [#6092](https://github.com/rust-lang/rust-clippy/pull/6092)
+* [`from_iter_instead_of_collect`] [#6101](https://github.com/rust-lang/rust-clippy/pull/6101)
+* [`mut_mutex_lock`] [#6103](https://github.com/rust-lang/rust-clippy/pull/6103)
+* [`single_element_loop`] [#6109](https://github.com/rust-lang/rust-clippy/pull/6109)
+* [`manual_unwrap_or`] [#6123](https://github.com/rust-lang/rust-clippy/pull/6123)
+* [`large_types_passed_by_value`] [#6135](https://github.com/rust-lang/rust-clippy/pull/6135)
+* [`result_unit_err`] [#6157](https://github.com/rust-lang/rust-clippy/pull/6157)
+* [`ref_option_ref`] [#6165](https://github.com/rust-lang/rust-clippy/pull/6165)
+* [`manual_range_contains`] [#6177](https://github.com/rust-lang/rust-clippy/pull/6177)
+* [`unusual_byte_groupings`] [#6183](https://github.com/rust-lang/rust-clippy/pull/6183)
+* [`comparison_to_empty`] [#6226](https://github.com/rust-lang/rust-clippy/pull/6226)
+* [`map_collect_result_unit`] [#6227](https://github.com/rust-lang/rust-clippy/pull/6227)
+* [`manual_ok_or`] [#6233](https://github.com/rust-lang/rust-clippy/pull/6233)
+
+### Moves and Deprecations
+
+* Rename `single_char_push_str` to [`single_char_add_str`]
+ [#6037](https://github.com/rust-lang/rust-clippy/pull/6037)
+* Rename `zero_width_space` to [`invisible_characters`]
+ [#6105](https://github.com/rust-lang/rust-clippy/pull/6105)
+* Deprecate `drop_bounds` (uplifted)
+ [#6111](https://github.com/rust-lang/rust-clippy/pull/6111)
+* Move [`string_lit_as_bytes`] to `nursery`
+ [#6117](https://github.com/rust-lang/rust-clippy/pull/6117)
+* Move [`rc_buffer`] to `restriction`
+ [#6128](https://github.com/rust-lang/rust-clippy/pull/6128)
+
+### Enhancements
+
+* [`manual_memcpy`]: Also lint when there are loop counters (and produce a
+ reliable suggestion)
+ [#5727](https://github.com/rust-lang/rust-clippy/pull/5727)
+* [`single_char_add_str`]: Also lint on `String::insert_str`
+ [#6037](https://github.com/rust-lang/rust-clippy/pull/6037)
+* [`invisible_characters`]: Also lint the characters `\u{AD}` and `\u{2060}`
+ [#6105](https://github.com/rust-lang/rust-clippy/pull/6105)
+* [`eq_op`]: Also lint on the `assert_*!` macro family
+ [#6167](https://github.com/rust-lang/rust-clippy/pull/6167)
+* [`items_after_statements`]: Also lint in local macro expansions
+ [#6176](https://github.com/rust-lang/rust-clippy/pull/6176)
+* [`unnecessary_cast`]: Also lint casts on integer and float literals
+ [#6187](https://github.com/rust-lang/rust-clippy/pull/6187)
+* [`manual_unwrap_or`]: Also lint `Result::unwrap_or`
+ [#6190](https://github.com/rust-lang/rust-clippy/pull/6190)
+* [`match_like_matches_macro`]: Also lint when `match` has more than two arms
+ [#6216](https://github.com/rust-lang/rust-clippy/pull/6216)
+* [`integer_arithmetic`]: Better handle `/` an `%` operators
+ [#6229](https://github.com/rust-lang/rust-clippy/pull/6229)
+
+### False Positive Fixes
+
+* [`needless_lifetimes`]: Bail out if the function has a `where` clause with the
+ lifetime [#5978](https://github.com/rust-lang/rust-clippy/pull/5978)
+* [`explicit_counter_loop`]: No longer lints, when loop counter is used after it
+ is incremented [#6076](https://github.com/rust-lang/rust-clippy/pull/6076)
+* [`or_fun_call`]: Revert changes addressing the handling of `const fn`
+ [#6077](https://github.com/rust-lang/rust-clippy/pull/6077)
+* [`needless_range_loop`]: No longer lints, when the iterable is used in the
+ range [#6102](https://github.com/rust-lang/rust-clippy/pull/6102)
+* [`inconsistent_digit_grouping`]: Fix bug when using floating point exponent
+ [#6104](https://github.com/rust-lang/rust-clippy/pull/6104)
+* [`mistyped_literal_suffixes`]: No longer lints on the fractional part of a
+ float (e.g. `713.32_64`)
+ [#6114](https://github.com/rust-lang/rust-clippy/pull/6114)
+* [`invalid_regex`]: No longer lint on unicode characters within `bytes::Regex`
+ [#6132](https://github.com/rust-lang/rust-clippy/pull/6132)
+* [`boxed_local`]: No longer lints on `extern fn` arguments
+ [#6133](https://github.com/rust-lang/rust-clippy/pull/6133)
+* [`needless_lifetimes`]: Fix regression, where lifetime is used in `where`
+ clause [#6198](https://github.com/rust-lang/rust-clippy/pull/6198)
+
+### Suggestion Fixes/Improvements
+
+* [`unnecessary_sort_by`]: Avoid dereferencing the suggested closure parameter
+ [#6078](https://github.com/rust-lang/rust-clippy/pull/6078)
+* [`needless_arbitrary_self_type`]: Correctly handle expanded code
+ [#6093](https://github.com/rust-lang/rust-clippy/pull/6093)
+* [`useless_format`]: Preserve raw strings in suggestion
+ [#6151](https://github.com/rust-lang/rust-clippy/pull/6151)
+* [`empty_loop`]: Suggest alternatives
+ [#6162](https://github.com/rust-lang/rust-clippy/pull/6162)
+* [`borrowed_box`]: Correctly add parentheses in suggestion
+ [#6200](https://github.com/rust-lang/rust-clippy/pull/6200)
+* [`unused_unit`]: Improve suggestion formatting
+ [#6247](https://github.com/rust-lang/rust-clippy/pull/6247)
+
+### Documentation Improvements
+
+* Some doc improvements:
+ * [`rc_buffer`] [#6090](https://github.com/rust-lang/rust-clippy/pull/6090)
+ * [`empty_loop`] [#6162](https://github.com/rust-lang/rust-clippy/pull/6162)
+* [`doc_markdown`]: Document problematic link text style
+ [#6107](https://github.com/rust-lang/rust-clippy/pull/6107)
+
+## Rust 1.48
+
+Released 2020-11-19
+
+[09bd400...e636b88](https://github.com/rust-lang/rust-clippy/compare/09bd400...e636b88)
+
+### New lints
+
+* [`self_assignment`] [#5894](https://github.com/rust-lang/rust-clippy/pull/5894)
+* [`unnecessary_lazy_evaluations`] [#5720](https://github.com/rust-lang/rust-clippy/pull/5720)
+* [`manual_strip`] [#6038](https://github.com/rust-lang/rust-clippy/pull/6038)
+* [`map_err_ignore`] [#5998](https://github.com/rust-lang/rust-clippy/pull/5998)
+* [`rc_buffer`] [#6044](https://github.com/rust-lang/rust-clippy/pull/6044)
+* `to_string_in_display` [#5831](https://github.com/rust-lang/rust-clippy/pull/5831)
+* `single_char_push_str` [#5881](https://github.com/rust-lang/rust-clippy/pull/5881)
+
+### Moves and Deprecations
+
+* Downgrade [`verbose_bit_mask`] to pedantic
+ [#6036](https://github.com/rust-lang/rust-clippy/pull/6036)
+
+### Enhancements
+
+* Extend [`precedence`] to handle chains of methods combined with unary negation
+ [#5928](https://github.com/rust-lang/rust-clippy/pull/5928)
+* [`useless_vec`]: add a configuration value for the maximum allowed size on the stack
+ [#5907](https://github.com/rust-lang/rust-clippy/pull/5907)
+* [`suspicious_arithmetic_impl`]: extend to implementations of `BitAnd`, `BitOr`, `BitXor`, `Rem`, `Shl`, and `Shr`
+ [#5884](https://github.com/rust-lang/rust-clippy/pull/5884)
+* `invalid_atomic_ordering`: detect misuse of `compare_exchange`, `compare_exchange_weak`, and `fetch_update`
+ [#6025](https://github.com/rust-lang/rust-clippy/pull/6025)
+* Avoid [`redundant_pattern_matching`] triggering in macros
+ [#6069](https://github.com/rust-lang/rust-clippy/pull/6069)
+* [`option_if_let_else`]: distinguish pure from impure `else` expressions
+ [#5937](https://github.com/rust-lang/rust-clippy/pull/5937)
+* [`needless_doctest_main`]: parse doctests instead of using textual search
+ [#5912](https://github.com/rust-lang/rust-clippy/pull/5912)
+* [`wildcard_imports`]: allow `prelude` to appear in any segment of an import
+ [#5929](https://github.com/rust-lang/rust-clippy/pull/5929)
+* Re-enable [`len_zero`] for ranges now that `range_is_empty` is stable
+ [#5961](https://github.com/rust-lang/rust-clippy/pull/5961)
+* [`option_as_ref_deref`]: catch fully-qualified calls to `Deref::deref` and `DerefMut::deref_mut`
+ [#5933](https://github.com/rust-lang/rust-clippy/pull/5933)
+
+### False Positive Fixes
+
+* [`useless_attribute`]: permit allowing [`wildcard_imports`] and [`enum_glob_use`]
+ [#5994](https://github.com/rust-lang/rust-clippy/pull/5994)
+* [`transmute_ptr_to_ptr`]: avoid suggesting dereferencing raw pointers in const contexts
+ [#5999](https://github.com/rust-lang/rust-clippy/pull/5999)
+* [`redundant_closure_call`]: take into account usages of the closure in nested functions and closures
+ [#5920](https://github.com/rust-lang/rust-clippy/pull/5920)
+* Fix false positive in [`borrow_interior_mutable_const`] when referencing a field behind a pointer
+ [#5949](https://github.com/rust-lang/rust-clippy/pull/5949)
+* [`doc_markdown`]: allow using "GraphQL" without backticks
+ [#5996](https://github.com/rust-lang/rust-clippy/pull/5996)
+* `to_string_in_display`: avoid linting when calling `to_string()` on anything that is not `self`
+ [#5971](https://github.com/rust-lang/rust-clippy/pull/5971)
+* [`indexing_slicing`] and [`out_of_bounds_indexing`] treat references to arrays as arrays
+ [#6034](https://github.com/rust-lang/rust-clippy/pull/6034)
+* [`should_implement_trait`]: ignore methods with lifetime parameters
+ [#5725](https://github.com/rust-lang/rust-clippy/pull/5725)
+* [`needless_return`]: avoid linting if a temporary borrows a local variable
+ [#5903](https://github.com/rust-lang/rust-clippy/pull/5903)
+* Restrict [`unnecessary_sort_by`] to non-reference, Copy types
+ [#6006](https://github.com/rust-lang/rust-clippy/pull/6006)
+* Avoid suggesting `from_bits`/`to_bits` in const contexts in [`transmute_int_to_float`]
+ [#5919](https://github.com/rust-lang/rust-clippy/pull/5919)
+* [`declare_interior_mutable_const`] and [`borrow_interior_mutable_const`]: improve detection of interior mutable types
+ [#6046](https://github.com/rust-lang/rust-clippy/pull/6046)
+
+### Suggestion Fixes/Improvements
+
+* [`let_and_return`]: add a cast to the suggestion when the return expression has adjustments
+ [#5946](https://github.com/rust-lang/rust-clippy/pull/5946)
+* [`useless_conversion`]: show the type in the error message
+ [#6035](https://github.com/rust-lang/rust-clippy/pull/6035)
+* [`unnecessary_mut_passed`]: discriminate between functions and methods in the error message
+ [#5892](https://github.com/rust-lang/rust-clippy/pull/5892)
+* [`float_cmp`] and [`float_cmp_const`]: change wording to make margin of error less ambiguous
+ [#6043](https://github.com/rust-lang/rust-clippy/pull/6043)
+* [`default_trait_access`]: do not use unnecessary type parameters in the suggestion
+ [#5993](https://github.com/rust-lang/rust-clippy/pull/5993)
+* [`collapsible_if`]: don't use expanded code in the suggestion
+ [#5992](https://github.com/rust-lang/rust-clippy/pull/5992)
+* Do not suggest empty format strings in [`print_with_newline`] and [`write_with_newline`]
+ [#6042](https://github.com/rust-lang/rust-clippy/pull/6042)
+* [`unit_arg`]: improve the readability of the suggestion
+ [#5931](https://github.com/rust-lang/rust-clippy/pull/5931)
+* [`stable_sort_primitive`]: print the type that is being sorted in the lint message
+ [#5935](https://github.com/rust-lang/rust-clippy/pull/5935)
+* Show line count and max lines in [`too_many_lines`] lint message
+ [#6009](https://github.com/rust-lang/rust-clippy/pull/6009)
+* Keep parentheses in the suggestion of [`useless_conversion`] where applicable
+ [#5900](https://github.com/rust-lang/rust-clippy/pull/5900)
+* [`option_map_unit_fn`] and [`result_map_unit_fn`]: print the unit type `()` explicitly
+ [#6024](https://github.com/rust-lang/rust-clippy/pull/6024)
+* [`redundant_allocation`]: suggest replacing `Rc<Box<T>>` with `Rc<T>`
+ [#5899](https://github.com/rust-lang/rust-clippy/pull/5899)
+* Make lint messages adhere to rustc dev guide conventions
+ [#5893](https://github.com/rust-lang/rust-clippy/pull/5893)
+
+### ICE Fixes
+
+* Fix ICE in [`repeat_once`]
+ [#5948](https://github.com/rust-lang/rust-clippy/pull/5948)
+
+### Documentation Improvements
+
+* [`mutable_key_type`]: explain potential for false positives when the interior mutable type is not accessed in the `Hash` implementation
+ [#6019](https://github.com/rust-lang/rust-clippy/pull/6019)
+* [`unnecessary_mut_passed`]: fix typo
+ [#5913](https://github.com/rust-lang/rust-clippy/pull/5913)
+* Add example of false positive to [`ptr_arg`] docs.
+ [#5885](https://github.com/rust-lang/rust-clippy/pull/5885)
+* [`box_vec`](https://rust-lang.github.io/rust-clippy/master/index.html#box_collection), [`vec_box`] and [`borrowed_box`]: add link to the documentation of `Box`
+ [#6023](https://github.com/rust-lang/rust-clippy/pull/6023)
+
+## Rust 1.47
+
+Released 2020-10-08
+
+[c2c07fa...09bd400](https://github.com/rust-lang/rust-clippy/compare/c2c07fa...09bd400)
+
+### New lints
+
+* [`derive_ord_xor_partial_ord`] [#5848](https://github.com/rust-lang/rust-clippy/pull/5848)
+* [`trait_duplication_in_bounds`] [#5852](https://github.com/rust-lang/rust-clippy/pull/5852)
+* [`map_identity`] [#5694](https://github.com/rust-lang/rust-clippy/pull/5694)
+* [`unit_return_expecting_ord`] [#5737](https://github.com/rust-lang/rust-clippy/pull/5737)
+* [`pattern_type_mismatch`] [#4841](https://github.com/rust-lang/rust-clippy/pull/4841)
+* [`repeat_once`] [#5773](https://github.com/rust-lang/rust-clippy/pull/5773)
+* [`same_item_push`] [#5825](https://github.com/rust-lang/rust-clippy/pull/5825)
+* [`needless_arbitrary_self_type`] [#5869](https://github.com/rust-lang/rust-clippy/pull/5869)
+* [`match_like_matches_macro`] [#5769](https://github.com/rust-lang/rust-clippy/pull/5769)
+* [`stable_sort_primitive`] [#5809](https://github.com/rust-lang/rust-clippy/pull/5809)
+* [`blanket_clippy_restriction_lints`] [#5750](https://github.com/rust-lang/rust-clippy/pull/5750)
+* [`option_if_let_else`] [#5301](https://github.com/rust-lang/rust-clippy/pull/5301)
+
+### Moves and Deprecations
+
+* Deprecate [`regex_macro`] lint
+ [#5760](https://github.com/rust-lang/rust-clippy/pull/5760)
+* Move [`range_minus_one`] to `pedantic`
+ [#5752](https://github.com/rust-lang/rust-clippy/pull/5752)
+
+### Enhancements
+
+* Improve [`needless_collect`] by catching `collect` calls followed by `iter` or `into_iter` calls
+ [#5837](https://github.com/rust-lang/rust-clippy/pull/5837)
+* [`panic`], [`todo`], [`unimplemented`] and [`unreachable`] now detect calls with formatting
+ [#5811](https://github.com/rust-lang/rust-clippy/pull/5811)
+* Detect more cases of [`suboptimal_flops`] and [`imprecise_flops`]
+ [#5443](https://github.com/rust-lang/rust-clippy/pull/5443)
+* Handle asymmetrical implementations of `PartialEq` in [`cmp_owned`]
+ [#5701](https://github.com/rust-lang/rust-clippy/pull/5701)
+* Make it possible to allow [`unsafe_derive_deserialize`]
+ [#5870](https://github.com/rust-lang/rust-clippy/pull/5870)
+* Catch `ord.min(a).max(b)` where a < b in [`min_max`]
+ [#5871](https://github.com/rust-lang/rust-clippy/pull/5871)
+* Make [`clone_on_copy`] suggestion machine applicable
+ [#5745](https://github.com/rust-lang/rust-clippy/pull/5745)
+* Enable [`len_zero`] on ranges now that `is_empty` is stable on them
+ [#5961](https://github.com/rust-lang/rust-clippy/pull/5961)
+
+### False Positive Fixes
+
+* Avoid triggering [`or_fun_call`] with const fns that take no arguments
+ [#5889](https://github.com/rust-lang/rust-clippy/pull/5889)
+* Fix [`redundant_closure_call`] false positive for closures that have multiple calls
+ [#5800](https://github.com/rust-lang/rust-clippy/pull/5800)
+* Don't lint cases involving `ManuallyDrop` in [`redundant_clone`]
+ [#5824](https://github.com/rust-lang/rust-clippy/pull/5824)
+* Treat a single expression the same as a single statement in the 2nd arm of a match in [`single_match_else`]
+ [#5771](https://github.com/rust-lang/rust-clippy/pull/5771)
+* Don't trigger [`unnested_or_patterns`] if the feature `or_patterns` is not enabled
+ [#5758](https://github.com/rust-lang/rust-clippy/pull/5758)
+* Avoid linting if key borrows in [`unnecessary_sort_by`]
+ [#5756](https://github.com/rust-lang/rust-clippy/pull/5756)
+* Consider `Try` impl for `Poll` when generating suggestions in [`try_err`]
+ [#5857](https://github.com/rust-lang/rust-clippy/pull/5857)
+* Take input lifetimes into account in `manual_async_fn`
+ [#5859](https://github.com/rust-lang/rust-clippy/pull/5859)
+* Fix multiple false positives in [`type_repetition_in_bounds`] and add a configuration option
+ [#5761](https://github.com/rust-lang/rust-clippy/pull/5761)
+* Limit the [`suspicious_arithmetic_impl`] lint to one binary operation
+ [#5820](https://github.com/rust-lang/rust-clippy/pull/5820)
+
+### Suggestion Fixes/Improvements
+
+* Improve readability of [`shadow_unrelated`] suggestion by truncating the RHS snippet
+ [#5788](https://github.com/rust-lang/rust-clippy/pull/5788)
+* Suggest `filter_map` instead of `flat_map` when mapping to `Option` in [`map_flatten`]
+ [#5846](https://github.com/rust-lang/rust-clippy/pull/5846)
+* Ensure suggestion is shown correctly for long method call chains in [`iter_nth_zero`]
+ [#5793](https://github.com/rust-lang/rust-clippy/pull/5793)
+* Drop borrow operator in suggestions of [`redundant_pattern_matching`]
+ [#5815](https://github.com/rust-lang/rust-clippy/pull/5815)
+* Add suggestion for [`iter_skip_next`]
+ [#5843](https://github.com/rust-lang/rust-clippy/pull/5843)
+* Improve [`collapsible_if`] fix suggestion
+ [#5732](https://github.com/rust-lang/rust-clippy/pull/5732)
+
+### ICE Fixes
+
+* Fix ICE caused by [`needless_collect`]
+ [#5877](https://github.com/rust-lang/rust-clippy/pull/5877)
+* Fix ICE caused by [`unnested_or_patterns`]
+ [#5784](https://github.com/rust-lang/rust-clippy/pull/5784)
+
+### Documentation Improvements
+
+* Fix grammar of [`await_holding_lock`] documentation
+ [#5748](https://github.com/rust-lang/rust-clippy/pull/5748)
+
+### Others
+
+* Make lints adhere to the rustc dev guide
+ [#5888](https://github.com/rust-lang/rust-clippy/pull/5888)
+
+## Rust 1.46
+
+Released 2020-08-27
+
+[7ea7cd1...c2c07fa](https://github.com/rust-lang/rust-clippy/compare/7ea7cd1...c2c07fa)
+
+### New lints
+
+* [`unnested_or_patterns`] [#5378](https://github.com/rust-lang/rust-clippy/pull/5378)
+* [`iter_next_slice`] [#5597](https://github.com/rust-lang/rust-clippy/pull/5597)
+* [`unnecessary_sort_by`] [#5623](https://github.com/rust-lang/rust-clippy/pull/5623)
+* [`vec_resize_to_zero`] [#5637](https://github.com/rust-lang/rust-clippy/pull/5637)
+
+### Moves and Deprecations
+
+* Move [`cast_ptr_alignment`] to pedantic [#5667](https://github.com/rust-lang/rust-clippy/pull/5667)
+
+### Enhancements
+
+* Improve [`mem_replace_with_uninit`] lint [#5695](https://github.com/rust-lang/rust-clippy/pull/5695)
+
+### False Positive Fixes
+
+* [`len_zero`]: Avoid linting ranges when the `range_is_empty` feature is not enabled
+ [#5656](https://github.com/rust-lang/rust-clippy/pull/5656)
+* [`let_and_return`]: Don't lint if a temporary borrow is involved
+ [#5680](https://github.com/rust-lang/rust-clippy/pull/5680)
+* [`reversed_empty_ranges`]: Avoid linting `N..N` in for loop arguments in
+ [#5692](https://github.com/rust-lang/rust-clippy/pull/5692)
+* [`if_same_then_else`]: Don't assume multiplication is always commutative
+ [#5702](https://github.com/rust-lang/rust-clippy/pull/5702)
+* [`blacklisted_name`]: Remove `bar` from the default configuration
+ [#5712](https://github.com/rust-lang/rust-clippy/pull/5712)
+* [`redundant_pattern_matching`]: Avoid suggesting non-`const fn` calls in const contexts
+ [#5724](https://github.com/rust-lang/rust-clippy/pull/5724)
+
+### Suggestion Fixes/Improvements
+
+* Fix suggestion of [`unit_arg`] lint, so that it suggest semantic equivalent code
+ [#4455](https://github.com/rust-lang/rust-clippy/pull/4455)
+* Add auto applicable suggestion to [`macro_use_imports`]
+ [#5279](https://github.com/rust-lang/rust-clippy/pull/5279)
+
+### ICE Fixes
+
+* Fix ICE in the `consts` module of Clippy [#5709](https://github.com/rust-lang/rust-clippy/pull/5709)
+
+### Documentation Improvements
+
+* Improve code examples across multiple lints [#5664](https://github.com/rust-lang/rust-clippy/pull/5664)
+
+### Others
+
+* Introduce a `--rustc` flag to `clippy-driver`, which turns `clippy-driver`
+ into `rustc` and passes all the given arguments to `rustc`. This is especially
+ useful for tools that need the `rustc` version Clippy was compiled with,
+ instead of the Clippy version. E.g. `clippy-driver --rustc --version` will
+ print the output of `rustc --version`.
+ [#5178](https://github.com/rust-lang/rust-clippy/pull/5178)
+* New issue templates now make it easier to complain if Clippy is too annoying
+ or not annoying enough! [#5735](https://github.com/rust-lang/rust-clippy/pull/5735)
+
+## Rust 1.45
+
+Released 2020-07-16
+
+[891e1a8...7ea7cd1](https://github.com/rust-lang/rust-clippy/compare/891e1a8...7ea7cd1)
+
+### New lints
+
+* [`match_wildcard_for_single_variants`] [#5582](https://github.com/rust-lang/rust-clippy/pull/5582)
+* [`unsafe_derive_deserialize`] [#5493](https://github.com/rust-lang/rust-clippy/pull/5493)
+* [`if_let_mutex`] [#5332](https://github.com/rust-lang/rust-clippy/pull/5332)
+* [`mismatched_target_os`] [#5506](https://github.com/rust-lang/rust-clippy/pull/5506)
+* [`await_holding_lock`] [#5439](https://github.com/rust-lang/rust-clippy/pull/5439)
+* [`match_on_vec_items`] [#5522](https://github.com/rust-lang/rust-clippy/pull/5522)
+* [`manual_async_fn`] [#5576](https://github.com/rust-lang/rust-clippy/pull/5576)
+* [`reversed_empty_ranges`] [#5583](https://github.com/rust-lang/rust-clippy/pull/5583)
+* [`manual_non_exhaustive`] [#5550](https://github.com/rust-lang/rust-clippy/pull/5550)
+
+### Moves and Deprecations
+
+* Downgrade [`match_bool`] to pedantic [#5408](https://github.com/rust-lang/rust-clippy/pull/5408)
+* Downgrade [`match_wild_err_arm`] to pedantic and update help messages. [#5622](https://github.com/rust-lang/rust-clippy/pull/5622)
+* Downgrade [`useless_let_if_seq`] to nursery. [#5599](https://github.com/rust-lang/rust-clippy/pull/5599)
+* Generalize `option_and_then_some` and rename to [`bind_instead_of_map`]. [#5529](https://github.com/rust-lang/rust-clippy/pull/5529)
+* Rename `identity_conversion` to [`useless_conversion`]. [#5568](https://github.com/rust-lang/rust-clippy/pull/5568)
+* Merge `block_in_if_condition_expr` and `block_in_if_condition_stmt` into [`blocks_in_if_conditions`].
+[#5563](https://github.com/rust-lang/rust-clippy/pull/5563)
+* Merge `option_map_unwrap_or`, `option_map_unwrap_or_else` and `result_map_unwrap_or_else` into [`map_unwrap_or`].
+[#5563](https://github.com/rust-lang/rust-clippy/pull/5563)
+* Merge `option_unwrap_used` and `result_unwrap_used` into [`unwrap_used`].
+[#5563](https://github.com/rust-lang/rust-clippy/pull/5563)
+* Merge `option_expect_used` and `result_expect_used` into [`expect_used`].
+[#5563](https://github.com/rust-lang/rust-clippy/pull/5563)
+* Merge `for_loop_over_option` and `for_loop_over_result` into [`for_loops_over_fallibles`].
+[#5563](https://github.com/rust-lang/rust-clippy/pull/5563)
+
+### Enhancements
+
+* Avoid running cargo lints when not enabled to improve performance. [#5505](https://github.com/rust-lang/rust-clippy/pull/5505)
+* Extend [`useless_conversion`] with `TryFrom` and `TryInto`. [#5631](https://github.com/rust-lang/rust-clippy/pull/5631)
+* Lint also in type parameters and where clauses in [`unused_unit`]. [#5592](https://github.com/rust-lang/rust-clippy/pull/5592)
+* Do not suggest deriving `Default` in [`new_without_default`]. [#5616](https://github.com/rust-lang/rust-clippy/pull/5616)
+
+### False Positive Fixes
+
+* [`while_let_on_iterator`] [#5525](https://github.com/rust-lang/rust-clippy/pull/5525)
+* [`empty_line_after_outer_attr`] [#5609](https://github.com/rust-lang/rust-clippy/pull/5609)
+* [`unnecessary_unwrap`] [#5558](https://github.com/rust-lang/rust-clippy/pull/5558)
+* [`comparison_chain`] [#5596](https://github.com/rust-lang/rust-clippy/pull/5596)
+* Don't trigger [`used_underscore_binding`] in await desugaring. [#5535](https://github.com/rust-lang/rust-clippy/pull/5535)
+* Don't trigger [`borrowed_box`] on mutable references. [#5491](https://github.com/rust-lang/rust-clippy/pull/5491)
+* Allow `1 << 0` in [`identity_op`]. [#5602](https://github.com/rust-lang/rust-clippy/pull/5602)
+* Allow `use super::*;` glob imports in [`wildcard_imports`]. [#5564](https://github.com/rust-lang/rust-clippy/pull/5564)
+* Whitelist more words in [`doc_markdown`]. [#5611](https://github.com/rust-lang/rust-clippy/pull/5611)
+* Skip dev and build deps in [`multiple_crate_versions`]. [#5636](https://github.com/rust-lang/rust-clippy/pull/5636)
+* Honor `allow` attribute on arguments in [`ptr_arg`]. [#5647](https://github.com/rust-lang/rust-clippy/pull/5647)
+* Honor lint level attributes for [`redundant_field_names`], [`just_underscores_and_digits`], [`many_single_char_names`]
+and [`similar_names`]. [#5651](https://github.com/rust-lang/rust-clippy/pull/5651)
+* Ignore calls to `len` in [`or_fun_call`]. [#4429](https://github.com/rust-lang/rust-clippy/pull/4429)
+
+### Suggestion Improvements
+
+* Simplify suggestions in [`manual_memcpy`]. [#5536](https://github.com/rust-lang/rust-clippy/pull/5536)
+* Fix suggestion in [`redundant_pattern_matching`] for macros. [#5511](https://github.com/rust-lang/rust-clippy/pull/5511)
+* Avoid suggesting `copied()` for mutable references in [`map_clone`]. [#5530](https://github.com/rust-lang/rust-clippy/pull/5530)
+* Improve help message for [`clone_double_ref`]. [#5547](https://github.com/rust-lang/rust-clippy/pull/5547)
+
+### ICE Fixes
+
+* Fix ICE caused in unwrap module. [#5590](https://github.com/rust-lang/rust-clippy/pull/5590)
+* Fix ICE on rustc test issue-69020-assoc-const-arith-overflow.rs [#5499](https://github.com/rust-lang/rust-clippy/pull/5499)
+
+### Documentation
+
+* Clarify the documentation of [`unnecessary_mut_passed`]. [#5639](https://github.com/rust-lang/rust-clippy/pull/5639)
+* Extend example for [`unneeded_field_pattern`]. [#5541](https://github.com/rust-lang/rust-clippy/pull/5541)
+
+## Rust 1.44
+
+Released 2020-06-04
+
+[204bb9b...891e1a8](https://github.com/rust-lang/rust-clippy/compare/204bb9b...891e1a8)
+
+### New lints
+
+* [`explicit_deref_methods`] [#5226](https://github.com/rust-lang/rust-clippy/pull/5226)
+* [`implicit_saturating_sub`] [#5427](https://github.com/rust-lang/rust-clippy/pull/5427)
+* [`macro_use_imports`] [#5230](https://github.com/rust-lang/rust-clippy/pull/5230)
+* [`verbose_file_reads`] [#5272](https://github.com/rust-lang/rust-clippy/pull/5272)
+* [`future_not_send`] [#5423](https://github.com/rust-lang/rust-clippy/pull/5423)
+* [`redundant_pub_crate`] [#5319](https://github.com/rust-lang/rust-clippy/pull/5319)
+* [`large_const_arrays`] [#5248](https://github.com/rust-lang/rust-clippy/pull/5248)
+* [`result_map_or_into_option`] [#5415](https://github.com/rust-lang/rust-clippy/pull/5415)
+* [`redundant_allocation`] [#5349](https://github.com/rust-lang/rust-clippy/pull/5349)
+* [`fn_address_comparisons`] [#5294](https://github.com/rust-lang/rust-clippy/pull/5294)
+* [`vtable_address_comparisons`] [#5294](https://github.com/rust-lang/rust-clippy/pull/5294)
+
+
+### Moves and Deprecations
+
+* Deprecate [`replace_consts`] lint [#5380](https://github.com/rust-lang/rust-clippy/pull/5380)
+* Move [`cognitive_complexity`] to nursery [#5428](https://github.com/rust-lang/rust-clippy/pull/5428)
+* Move [`useless_transmute`] to nursery [#5364](https://github.com/rust-lang/rust-clippy/pull/5364)
+* Downgrade [`inefficient_to_string`] to pedantic [#5412](https://github.com/rust-lang/rust-clippy/pull/5412)
+* Downgrade [`option_option`] to pedantic [#5401](https://github.com/rust-lang/rust-clippy/pull/5401)
+* Downgrade [`unreadable_literal`] to pedantic [#5419](https://github.com/rust-lang/rust-clippy/pull/5419)
+* Downgrade [`let_unit_value`] to pedantic [#5409](https://github.com/rust-lang/rust-clippy/pull/5409)
+* Downgrade [`trivially_copy_pass_by_ref`] to pedantic [#5410](https://github.com/rust-lang/rust-clippy/pull/5410)
+* Downgrade [`implicit_hasher`] to pedantic [#5411](https://github.com/rust-lang/rust-clippy/pull/5411)
+
+### Enhancements
+
+* On _nightly_ you can now use `cargo clippy --fix -Z unstable-options` to
+ auto-fix lints that support this [#5363](https://github.com/rust-lang/rust-clippy/pull/5363)
+* Make [`redundant_clone`] also trigger on cases where the cloned value is not
+ consumed. [#5304](https://github.com/rust-lang/rust-clippy/pull/5304)
+* Expand [`integer_arithmetic`] to also disallow bit-shifting [#5430](https://github.com/rust-lang/rust-clippy/pull/5430)
+* [`option_as_ref_deref`] now detects more deref cases [#5425](https://github.com/rust-lang/rust-clippy/pull/5425)
+* [`large_enum_variant`] now report the sizes of the largest and second-largest variants [#5466](https://github.com/rust-lang/rust-clippy/pull/5466)
+* [`bool_comparison`] now also checks for inequality comparisons that can be
+ written more concisely [#5365](https://github.com/rust-lang/rust-clippy/pull/5365)
+* Expand [`clone_on_copy`] to work in method call arguments as well [#5441](https://github.com/rust-lang/rust-clippy/pull/5441)
+* [`redundant_pattern_matching`] now also handles `while let` [#5483](https://github.com/rust-lang/rust-clippy/pull/5483)
+* [`integer_arithmetic`] now also lints references of integers [#5329](https://github.com/rust-lang/rust-clippy/pull/5329)
+* Expand [`float_cmp_const`] to also work on arrays [#5345](https://github.com/rust-lang/rust-clippy/pull/5345)
+* Trigger [`map_flatten`] when map is called on an `Option` [#5473](https://github.com/rust-lang/rust-clippy/pull/5473)
+
+### False Positive Fixes
+
+* [`many_single_char_names`] [#5468](https://github.com/rust-lang/rust-clippy/pull/5468)
+* [`should_implement_trait`] [#5437](https://github.com/rust-lang/rust-clippy/pull/5437)
+* [`unused_self`] [#5387](https://github.com/rust-lang/rust-clippy/pull/5387)
+* [`redundant_clone`] [#5453](https://github.com/rust-lang/rust-clippy/pull/5453)
+* [`precedence`] [#5445](https://github.com/rust-lang/rust-clippy/pull/5445)
+* [`suspicious_op_assign_impl`] [#5424](https://github.com/rust-lang/rust-clippy/pull/5424)
+* [`needless_lifetimes`] [#5293](https://github.com/rust-lang/rust-clippy/pull/5293)
+* [`redundant_pattern`] [#5287](https://github.com/rust-lang/rust-clippy/pull/5287)
+* [`inconsistent_digit_grouping`] [#5451](https://github.com/rust-lang/rust-clippy/pull/5451)
+
+
+### Suggestion Improvements
+
+* Improved [`question_mark`] lint suggestion so that it doesn't add redundant `as_ref()` [#5481](https://github.com/rust-lang/rust-clippy/pull/5481)
+* Improve the suggested placeholder in [`option_map_unit_fn`] [#5292](https://github.com/rust-lang/rust-clippy/pull/5292)
+* Improve suggestion for [`match_single_binding`] when triggered inside a closure [#5350](https://github.com/rust-lang/rust-clippy/pull/5350)
+
+### ICE Fixes
+
+* Handle the unstable `trivial_bounds` feature [#5296](https://github.com/rust-lang/rust-clippy/pull/5296)
+* `shadow_*` lints [#5297](https://github.com/rust-lang/rust-clippy/pull/5297)
+
+### Documentation
+
+* Fix documentation generation for configurable lints [#5353](https://github.com/rust-lang/rust-clippy/pull/5353)
+* Update documentation for [`new_ret_no_self`] [#5448](https://github.com/rust-lang/rust-clippy/pull/5448)
+* The documentation for [`option_option`] now suggest using a tri-state enum [#5403](https://github.com/rust-lang/rust-clippy/pull/5403)
+* Fix bit mask example in [`verbose_bit_mask`] documentation [#5454](https://github.com/rust-lang/rust-clippy/pull/5454)
+* [`wildcard_imports`] documentation now mentions that `use ...::prelude::*` is
+ not linted [#5312](https://github.com/rust-lang/rust-clippy/pull/5312)
+
+## Rust 1.43
+
+Released 2020-04-23
+
+[4ee1206...204bb9b](https://github.com/rust-lang/rust-clippy/compare/4ee1206...204bb9b)
+
+### New lints
+
+* [`imprecise_flops`] [#4897](https://github.com/rust-lang/rust-clippy/pull/4897)
+* [`suboptimal_flops`] [#4897](https://github.com/rust-lang/rust-clippy/pull/4897)
+* [`wildcard_imports`] [#5029](https://github.com/rust-lang/rust-clippy/pull/5029)
+* [`single_component_path_imports`] [#5058](https://github.com/rust-lang/rust-clippy/pull/5058)
+* [`match_single_binding`] [#5061](https://github.com/rust-lang/rust-clippy/pull/5061)
+* [`let_underscore_lock`] [#5101](https://github.com/rust-lang/rust-clippy/pull/5101)
+* [`struct_excessive_bools`] [#5125](https://github.com/rust-lang/rust-clippy/pull/5125)
+* [`fn_params_excessive_bools`] [#5125](https://github.com/rust-lang/rust-clippy/pull/5125)
+* [`option_env_unwrap`] [#5148](https://github.com/rust-lang/rust-clippy/pull/5148)
+* [`lossy_float_literal`] [#5202](https://github.com/rust-lang/rust-clippy/pull/5202)
+* [`rest_pat_in_fully_bound_structs`] [#5258](https://github.com/rust-lang/rust-clippy/pull/5258)
+
+### Moves and Deprecations
+
+* Move [`unneeded_field_pattern`] to pedantic group [#5200](https://github.com/rust-lang/rust-clippy/pull/5200)
+
+### Enhancements
+
+* Make [`missing_errors_doc`] lint also trigger on `async` functions
+ [#5181](https://github.com/rust-lang/rust-clippy/pull/5181)
+* Add more constants to [`approx_constant`] [#5193](https://github.com/rust-lang/rust-clippy/pull/5193)
+* Extend [`question_mark`] lint [#5266](https://github.com/rust-lang/rust-clippy/pull/5266)
+
+### False Positive Fixes
+
+* [`use_debug`] [#5047](https://github.com/rust-lang/rust-clippy/pull/5047)
+* [`unnecessary_unwrap`] [#5132](https://github.com/rust-lang/rust-clippy/pull/5132)
+* [`zero_prefixed_literal`] [#5170](https://github.com/rust-lang/rust-clippy/pull/5170)
+* [`missing_const_for_fn`] [#5216](https://github.com/rust-lang/rust-clippy/pull/5216)
+
+### Suggestion Improvements
+
+* Improve suggestion when blocks of code are suggested [#5134](https://github.com/rust-lang/rust-clippy/pull/5134)
+
+### ICE Fixes
+
+* `misc_early` lints [#5129](https://github.com/rust-lang/rust-clippy/pull/5129)
+* [`missing_errors_doc`] [#5213](https://github.com/rust-lang/rust-clippy/pull/5213)
+* Fix ICE when evaluating `usize`s [#5256](https://github.com/rust-lang/rust-clippy/pull/5256)
+
+### Documentation
+
+* Improve documentation of [`iter_nth_zero`]
+* Add documentation pages for stable releases [#5171](https://github.com/rust-lang/rust-clippy/pull/5171)
+
+### Others
+
+* Clippy now completely runs on GitHub Actions [#5190](https://github.com/rust-lang/rust-clippy/pull/5190)
+
+
+## Rust 1.42
+
+Released 2020-03-12
+
+[69f99e7...4ee1206](https://github.com/rust-lang/rust-clippy/compare/69f99e7...4ee1206)
+
+### New lints
+
+* [`filetype_is_file`] [#4543](https://github.com/rust-lang/rust-clippy/pull/4543)
+* [`let_underscore_must_use`] [#4823](https://github.com/rust-lang/rust-clippy/pull/4823)
+* [`modulo_arithmetic`] [#4867](https://github.com/rust-lang/rust-clippy/pull/4867)
+* [`mem_replace_with_default`] [#4881](https://github.com/rust-lang/rust-clippy/pull/4881)
+* [`mutable_key_type`] [#4885](https://github.com/rust-lang/rust-clippy/pull/4885)
+* [`option_as_ref_deref`] [#4945](https://github.com/rust-lang/rust-clippy/pull/4945)
+* [`wildcard_in_or_patterns`] [#4960](https://github.com/rust-lang/rust-clippy/pull/4960)
+* [`iter_nth_zero`] [#4966](https://github.com/rust-lang/rust-clippy/pull/4966)
+* `invalid_atomic_ordering` [#4999](https://github.com/rust-lang/rust-clippy/pull/4999)
+* [`skip_while_next`] [#5067](https://github.com/rust-lang/rust-clippy/pull/5067)
+
+### Moves and Deprecations
+
+* Move [`transmute_float_to_int`] from nursery to complexity group
+ [#5015](https://github.com/rust-lang/rust-clippy/pull/5015)
+* Move [`range_plus_one`] to pedantic group [#5057](https://github.com/rust-lang/rust-clippy/pull/5057)
+* Move [`debug_assert_with_mut_call`] to nursery group [#5106](https://github.com/rust-lang/rust-clippy/pull/5106)
+* Deprecate `unused_label` [#4930](https://github.com/rust-lang/rust-clippy/pull/4930)
+
+### Enhancements
+
+* Lint vectored IO in [`unused_io_amount`] [#5027](https://github.com/rust-lang/rust-clippy/pull/5027)
+* Make [`vec_box`] configurable by adding a size threshold [#5081](https://github.com/rust-lang/rust-clippy/pull/5081)
+* Also lint constants in [`cmp_nan`] [#4910](https://github.com/rust-lang/rust-clippy/pull/4910)
+* Fix false negative in [`expect_fun_call`] [#4915](https://github.com/rust-lang/rust-clippy/pull/4915)
+* Fix false negative in [`redundant_clone`] [#5017](https://github.com/rust-lang/rust-clippy/pull/5017)
+
+### False Positive Fixes
+
+* [`map_clone`] [#4937](https://github.com/rust-lang/rust-clippy/pull/4937)
+* [`replace_consts`] [#4977](https://github.com/rust-lang/rust-clippy/pull/4977)
+* [`let_and_return`] [#5008](https://github.com/rust-lang/rust-clippy/pull/5008)
+* [`eq_op`] [#5079](https://github.com/rust-lang/rust-clippy/pull/5079)
+* [`possible_missing_comma`] [#5083](https://github.com/rust-lang/rust-clippy/pull/5083)
+* [`debug_assert_with_mut_call`] [#5106](https://github.com/rust-lang/rust-clippy/pull/5106)
+* Don't trigger [`let_underscore_must_use`] in external macros
+ [#5082](https://github.com/rust-lang/rust-clippy/pull/5082)
+* Don't trigger [`empty_loop`] in `no_std` crates [#5086](https://github.com/rust-lang/rust-clippy/pull/5086)
+
+### Suggestion Improvements
+
+* `option_map_unwrap_or` [#4634](https://github.com/rust-lang/rust-clippy/pull/4634)
+* [`wildcard_enum_match_arm`] [#4934](https://github.com/rust-lang/rust-clippy/pull/4934)
+* [`cognitive_complexity`] [#4935](https://github.com/rust-lang/rust-clippy/pull/4935)
+* [`decimal_literal_representation`] [#4956](https://github.com/rust-lang/rust-clippy/pull/4956)
+* `unknown_clippy_lints` [#4963](https://github.com/rust-lang/rust-clippy/pull/4963)
+* [`explicit_into_iter_loop`] [#4978](https://github.com/rust-lang/rust-clippy/pull/4978)
+* [`useless_attribute`] [#5022](https://github.com/rust-lang/rust-clippy/pull/5022)
+* `if_let_some_result` [#5032](https://github.com/rust-lang/rust-clippy/pull/5032)
+
+### ICE fixes
+
+* [`unsound_collection_transmute`] [#4975](https://github.com/rust-lang/rust-clippy/pull/4975)
+
+### Documentation
+
+* Improve documentation of [`empty_enum`], [`replace_consts`], [`redundant_clone`], and [`iterator_step_by_zero`]
+
+
+## Rust 1.41
+
+Released 2020-01-30
+
+[c8e3cfb...69f99e7](https://github.com/rust-lang/rust-clippy/compare/c8e3cfb...69f99e7)
+
+* New Lints:
+ * [`exit`] [#4697](https://github.com/rust-lang/rust-clippy/pull/4697)
+ * [`to_digit_is_some`] [#4801](https://github.com/rust-lang/rust-clippy/pull/4801)
+ * [`tabs_in_doc_comments`] [#4806](https://github.com/rust-lang/rust-clippy/pull/4806)
+ * [`large_stack_arrays`] [#4807](https://github.com/rust-lang/rust-clippy/pull/4807)
+ * [`same_functions_in_if_condition`] [#4814](https://github.com/rust-lang/rust-clippy/pull/4814)
+ * [`zst_offset`] [#4816](https://github.com/rust-lang/rust-clippy/pull/4816)
+ * [`as_conversions`] [#4821](https://github.com/rust-lang/rust-clippy/pull/4821)
+ * [`missing_errors_doc`] [#4884](https://github.com/rust-lang/rust-clippy/pull/4884)
+ * [`transmute_float_to_int`] [#4889](https://github.com/rust-lang/rust-clippy/pull/4889)
+* Remove plugin interface, see
+ [Inside Rust Blog](https://blog.rust-lang.org/inside-rust/2019/11/04/Clippy-removes-plugin-interface.html) for
+ details [#4714](https://github.com/rust-lang/rust-clippy/pull/4714)
+* Move [`use_self`] to nursery group [#4863](https://github.com/rust-lang/rust-clippy/pull/4863)
+* Deprecate `into_iter_on_array` [#4788](https://github.com/rust-lang/rust-clippy/pull/4788)
+* Expand [`string_lit_as_bytes`] to also trigger when literal has escapes
+ [#4808](https://github.com/rust-lang/rust-clippy/pull/4808)
+* Fix false positive in `comparison_chain` [#4842](https://github.com/rust-lang/rust-clippy/pull/4842)
+* Fix false positive in `while_immutable_condition` [#4730](https://github.com/rust-lang/rust-clippy/pull/4730)
+* Fix false positive in `explicit_counter_loop` [#4803](https://github.com/rust-lang/rust-clippy/pull/4803)
+* Fix false positive in `must_use_candidate` [#4794](https://github.com/rust-lang/rust-clippy/pull/4794)
+* Fix false positive in `print_with_newline` and `write_with_newline`
+ [#4769](https://github.com/rust-lang/rust-clippy/pull/4769)
+* Fix false positive in `derive_hash_xor_eq` [#4766](https://github.com/rust-lang/rust-clippy/pull/4766)
+* Fix false positive in `missing_inline_in_public_items` [#4870](https://github.com/rust-lang/rust-clippy/pull/4870)
+* Fix false positive in `string_add` [#4880](https://github.com/rust-lang/rust-clippy/pull/4880)
+* Fix false positive in `float_arithmetic` [#4851](https://github.com/rust-lang/rust-clippy/pull/4851)
+* Fix false positive in `cast_sign_loss` [#4883](https://github.com/rust-lang/rust-clippy/pull/4883)
+* Fix false positive in `manual_swap` [#4877](https://github.com/rust-lang/rust-clippy/pull/4877)
+* Fix ICEs occurring while checking some block expressions [#4772](https://github.com/rust-lang/rust-clippy/pull/4772)
+* Fix ICE in `use_self` [#4776](https://github.com/rust-lang/rust-clippy/pull/4776)
+* Fix ICEs related to `const_generics` [#4780](https://github.com/rust-lang/rust-clippy/pull/4780)
+* Display help when running `clippy-driver` without arguments, instead of ICEing
+ [#4810](https://github.com/rust-lang/rust-clippy/pull/4810)
+* Clippy has its own ICE message now [#4588](https://github.com/rust-lang/rust-clippy/pull/4588)
+* Show deprecated lints in the documentation again [#4757](https://github.com/rust-lang/rust-clippy/pull/4757)
+* Improve Documentation by adding positive examples to some lints
+ [#4832](https://github.com/rust-lang/rust-clippy/pull/4832)
+
+## Rust 1.40
+
+Released 2019-12-19
+
+[4e7e71b...c8e3cfb](https://github.com/rust-lang/rust-clippy/compare/4e7e71b...c8e3cfb)
+
+* New Lints:
+ * [`unneeded_wildcard_pattern`] [#4537](https://github.com/rust-lang/rust-clippy/pull/4537)
+ * [`needless_doctest_main`] [#4603](https://github.com/rust-lang/rust-clippy/pull/4603)
+ * [`suspicious_unary_op_formatting`] [#4615](https://github.com/rust-lang/rust-clippy/pull/4615)
+ * [`debug_assert_with_mut_call`] [#4680](https://github.com/rust-lang/rust-clippy/pull/4680)
+ * [`unused_self`] [#4619](https://github.com/rust-lang/rust-clippy/pull/4619)
+ * [`inefficient_to_string`] [#4683](https://github.com/rust-lang/rust-clippy/pull/4683)
+ * [`must_use_unit`] [#4560](https://github.com/rust-lang/rust-clippy/pull/4560)
+ * [`must_use_candidate`] [#4560](https://github.com/rust-lang/rust-clippy/pull/4560)
+ * [`double_must_use`] [#4560](https://github.com/rust-lang/rust-clippy/pull/4560)
+ * [`comparison_chain`] [#4569](https://github.com/rust-lang/rust-clippy/pull/4569)
+ * [`unsound_collection_transmute`] [#4592](https://github.com/rust-lang/rust-clippy/pull/4592)
+ * [`panic`] [#4657](https://github.com/rust-lang/rust-clippy/pull/4657)
+ * [`unreachable`] [#4657](https://github.com/rust-lang/rust-clippy/pull/4657)
+ * [`todo`] [#4657](https://github.com/rust-lang/rust-clippy/pull/4657)
+ * `option_expect_used` [#4657](https://github.com/rust-lang/rust-clippy/pull/4657)
+ * `result_expect_used` [#4657](https://github.com/rust-lang/rust-clippy/pull/4657)
+* Move `redundant_clone` to perf group [#4509](https://github.com/rust-lang/rust-clippy/pull/4509)
+* Move `manual_mul_add` to nursery group [#4736](https://github.com/rust-lang/rust-clippy/pull/4736)
+* Expand `unit_cmp` to also work with `assert_eq!`, `debug_assert_eq!`, `assert_ne!` and `debug_assert_ne!` [#4613](https://github.com/rust-lang/rust-clippy/pull/4613)
+* Expand `integer_arithmetic` to also detect mutating arithmetic like `+=` [#4585](https://github.com/rust-lang/rust-clippy/pull/4585)
+* Fix false positive in `nonminimal_bool` [#4568](https://github.com/rust-lang/rust-clippy/pull/4568)
+* Fix false positive in `missing_safety_doc` [#4611](https://github.com/rust-lang/rust-clippy/pull/4611)
+* Fix false positive in `cast_sign_loss` [#4614](https://github.com/rust-lang/rust-clippy/pull/4614)
+* Fix false positive in `redundant_clone` [#4509](https://github.com/rust-lang/rust-clippy/pull/4509)
+* Fix false positive in `try_err` [#4721](https://github.com/rust-lang/rust-clippy/pull/4721)
+* Fix false positive in `toplevel_ref_arg` [#4570](https://github.com/rust-lang/rust-clippy/pull/4570)
+* Fix false positive in `multiple_inherent_impl` [#4593](https://github.com/rust-lang/rust-clippy/pull/4593)
+* Improve more suggestions and tests in preparation for the unstable `cargo fix --clippy` [#4575](https://github.com/rust-lang/rust-clippy/pull/4575)
+* Improve suggestion for `zero_ptr` [#4599](https://github.com/rust-lang/rust-clippy/pull/4599)
+* Improve suggestion for `explicit_counter_loop` [#4691](https://github.com/rust-lang/rust-clippy/pull/4691)
+* Improve suggestion for `mul_add` [#4602](https://github.com/rust-lang/rust-clippy/pull/4602)
+* Improve suggestion for `assertions_on_constants` [#4635](https://github.com/rust-lang/rust-clippy/pull/4635)
+* Fix ICE in `use_self` [#4671](https://github.com/rust-lang/rust-clippy/pull/4671)
+* Fix ICE when encountering const casts [#4590](https://github.com/rust-lang/rust-clippy/pull/4590)
+
+## Rust 1.39
+
+Released 2019-11-07
+
+[3aea860...4e7e71b](https://github.com/rust-lang/rust-clippy/compare/3aea860...4e7e71b)
+
+* New Lints:
+ * [`uninit_assumed_init`] [#4479](https://github.com/rust-lang/rust-clippy/pull/4479)
+ * [`flat_map_identity`] [#4231](https://github.com/rust-lang/rust-clippy/pull/4231)
+ * [`missing_safety_doc`] [#4535](https://github.com/rust-lang/rust-clippy/pull/4535)
+ * [`mem_replace_with_uninit`] [#4511](https://github.com/rust-lang/rust-clippy/pull/4511)
+ * [`suspicious_map`] [#4394](https://github.com/rust-lang/rust-clippy/pull/4394)
+ * `option_and_then_some` [#4386](https://github.com/rust-lang/rust-clippy/pull/4386)
+ * [`manual_saturating_arithmetic`] [#4498](https://github.com/rust-lang/rust-clippy/pull/4498)
+* Deprecate `unused_collect` lint. This is fully covered by rustc's `#[must_use]` on `collect` [#4348](https://github.com/rust-lang/rust-clippy/pull/4348)
+* Move `type_repetition_in_bounds` to pedantic group [#4403](https://github.com/rust-lang/rust-clippy/pull/4403)
+* Move `cast_lossless` to pedantic group [#4539](https://github.com/rust-lang/rust-clippy/pull/4539)
+* `temporary_cstring_as_ptr` now catches more cases [#4425](https://github.com/rust-lang/rust-clippy/pull/4425)
+* `use_self` now works in constructors, too [#4525](https://github.com/rust-lang/rust-clippy/pull/4525)
+* `cargo_common_metadata` now checks for license files [#4518](https://github.com/rust-lang/rust-clippy/pull/4518)
+* `cognitive_complexity` now includes the measured complexity in the warning message [#4469](https://github.com/rust-lang/rust-clippy/pull/4469)
+* Fix false positives in `block_in_if_*` lints [#4458](https://github.com/rust-lang/rust-clippy/pull/4458)
+* Fix false positive in `cast_lossless` [#4473](https://github.com/rust-lang/rust-clippy/pull/4473)
+* Fix false positive in `clone_on_copy` [#4411](https://github.com/rust-lang/rust-clippy/pull/4411)
+* Fix false positive in `deref_addrof` [#4487](https://github.com/rust-lang/rust-clippy/pull/4487)
+* Fix false positive in `too_many_lines` [#4490](https://github.com/rust-lang/rust-clippy/pull/4490)
+* Fix false positive in `new_ret_no_self` [#4365](https://github.com/rust-lang/rust-clippy/pull/4365)
+* Fix false positive in `manual_swap` [#4478](https://github.com/rust-lang/rust-clippy/pull/4478)
+* Fix false positive in `missing_const_for_fn` [#4450](https://github.com/rust-lang/rust-clippy/pull/4450)
+* Fix false positive in `extra_unused_lifetimes` [#4477](https://github.com/rust-lang/rust-clippy/pull/4477)
+* Fix false positive in `inherent_to_string` [#4460](https://github.com/rust-lang/rust-clippy/pull/4460)
+* Fix false positive in `map_entry` [#4495](https://github.com/rust-lang/rust-clippy/pull/4495)
+* Fix false positive in `unused_unit` [#4445](https://github.com/rust-lang/rust-clippy/pull/4445)
+* Fix false positive in `redundant_pattern` [#4489](https://github.com/rust-lang/rust-clippy/pull/4489)
+* Fix false positive in `wrong_self_convention` [#4369](https://github.com/rust-lang/rust-clippy/pull/4369)
+* Improve various suggestions and tests in preparation for the unstable `cargo fix --clippy` [#4558](https://github.com/rust-lang/rust-clippy/pull/4558)
+* Improve suggestions for `redundant_pattern_matching` [#4352](https://github.com/rust-lang/rust-clippy/pull/4352)
+* Improve suggestions for `explicit_write` [#4544](https://github.com/rust-lang/rust-clippy/pull/4544)
+* Improve suggestion for `or_fun_call` [#4522](https://github.com/rust-lang/rust-clippy/pull/4522)
+* Improve suggestion for `match_as_ref` [#4446](https://github.com/rust-lang/rust-clippy/pull/4446)
+* Improve suggestion for `unnecessary_fold_span` [#4382](https://github.com/rust-lang/rust-clippy/pull/4382)
+* Add suggestions for `unseparated_literal_suffix` [#4401](https://github.com/rust-lang/rust-clippy/pull/4401)
+* Add suggestions for `char_lit_as_u8` [#4418](https://github.com/rust-lang/rust-clippy/pull/4418)
+
+## Rust 1.38
+
+Released 2019-09-26
+
+[e3cb40e...3aea860](https://github.com/rust-lang/rust-clippy/compare/e3cb40e...3aea860)
+
+* New Lints:
+ * [`main_recursion`] [#4203](https://github.com/rust-lang/rust-clippy/pull/4203)
+ * [`inherent_to_string`] [#4259](https://github.com/rust-lang/rust-clippy/pull/4259)
+ * [`inherent_to_string_shadow_display`] [#4259](https://github.com/rust-lang/rust-clippy/pull/4259)
+ * [`type_repetition_in_bounds`] [#3766](https://github.com/rust-lang/rust-clippy/pull/3766)
+ * [`try_err`] [#4222](https://github.com/rust-lang/rust-clippy/pull/4222)
+* Move `{unnecessary,panicking}_unwrap` out of nursery [#4307](https://github.com/rust-lang/rust-clippy/pull/4307)
+* Extend the `use_self` lint to suggest uses of `Self::Variant` [#4308](https://github.com/rust-lang/rust-clippy/pull/4308)
+* Improve suggestion for needless return [#4262](https://github.com/rust-lang/rust-clippy/pull/4262)
+* Add auto-fixable suggestion for `let_unit` [#4337](https://github.com/rust-lang/rust-clippy/pull/4337)
+* Fix false positive in `pub_enum_variant_names` and `enum_variant_names` [#4345](https://github.com/rust-lang/rust-clippy/pull/4345)
+* Fix false positive in `cast_ptr_alignment` [#4257](https://github.com/rust-lang/rust-clippy/pull/4257)
+* Fix false positive in `string_lit_as_bytes` [#4233](https://github.com/rust-lang/rust-clippy/pull/4233)
+* Fix false positive in `needless_lifetimes` [#4266](https://github.com/rust-lang/rust-clippy/pull/4266)
+* Fix false positive in `float_cmp` [#4275](https://github.com/rust-lang/rust-clippy/pull/4275)
+* Fix false positives in `needless_return` [#4274](https://github.com/rust-lang/rust-clippy/pull/4274)
+* Fix false negative in `match_same_arms` [#4246](https://github.com/rust-lang/rust-clippy/pull/4246)
+* Fix incorrect suggestion for `needless_bool` [#4335](https://github.com/rust-lang/rust-clippy/pull/4335)
+* Improve suggestion for `cast_ptr_alignment` [#4257](https://github.com/rust-lang/rust-clippy/pull/4257)
+* Improve suggestion for `single_char_literal` [#4361](https://github.com/rust-lang/rust-clippy/pull/4361)
+* Improve suggestion for `len_zero` [#4314](https://github.com/rust-lang/rust-clippy/pull/4314)
+* Fix ICE in `implicit_hasher` [#4268](https://github.com/rust-lang/rust-clippy/pull/4268)
+* Fix allow bug in `trivially_copy_pass_by_ref` [#4250](https://github.com/rust-lang/rust-clippy/pull/4250)
+
+## Rust 1.37
+
+Released 2019-08-15
+
+[082cfa7...e3cb40e](https://github.com/rust-lang/rust-clippy/compare/082cfa7...e3cb40e)
+
+* New Lints:
+ * [`checked_conversions`] [#4088](https://github.com/rust-lang/rust-clippy/pull/4088)
+ * [`get_last_with_len`] [#3832](https://github.com/rust-lang/rust-clippy/pull/3832)
+ * [`integer_division`] [#4195](https://github.com/rust-lang/rust-clippy/pull/4195)
+* Renamed Lint: `const_static_lifetime` is now called [`redundant_static_lifetimes`].
+ The lint now covers statics in addition to consts [#4162](https://github.com/rust-lang/rust-clippy/pull/4162)
+* [`match_same_arms`] now warns for all identical arms, instead of only the first one [#4102](https://github.com/rust-lang/rust-clippy/pull/4102)
+* [`needless_return`] now works with void functions [#4220](https://github.com/rust-lang/rust-clippy/pull/4220)
+* Fix false positive in [`redundant_closure`] [#4190](https://github.com/rust-lang/rust-clippy/pull/4190)
+* Fix false positive in [`useless_attribute`] [#4107](https://github.com/rust-lang/rust-clippy/pull/4107)
+* Fix incorrect suggestion for [`float_cmp`] [#4214](https://github.com/rust-lang/rust-clippy/pull/4214)
+* Add suggestions for [`print_with_newline`] and [`write_with_newline`] [#4136](https://github.com/rust-lang/rust-clippy/pull/4136)
+* Improve suggestions for `option_map_unwrap_or_else` and `result_map_unwrap_or_else` [#4164](https://github.com/rust-lang/rust-clippy/pull/4164)
+* Improve suggestions for [`non_ascii_literal`] [#4119](https://github.com/rust-lang/rust-clippy/pull/4119)
+* Improve diagnostics for [`let_and_return`] [#4137](https://github.com/rust-lang/rust-clippy/pull/4137)
+* Improve diagnostics for [`trivially_copy_pass_by_ref`] [#4071](https://github.com/rust-lang/rust-clippy/pull/4071)
+* Add macro check for [`unreadable_literal`] [#4099](https://github.com/rust-lang/rust-clippy/pull/4099)
+
+## Rust 1.36
+
+Released 2019-07-04
+
+[eb9f9b1...082cfa7](https://github.com/rust-lang/rust-clippy/compare/eb9f9b1...082cfa7)
+
+* New lints: [`find_map`], [`filter_map_next`] [#4039](https://github.com/rust-lang/rust-clippy/pull/4039)
+* New lint: [`path_buf_push_overwrite`] [#3954](https://github.com/rust-lang/rust-clippy/pull/3954)
+* Move `path_buf_push_overwrite` to the nursery [#4013](https://github.com/rust-lang/rust-clippy/pull/4013)
+* Split [`redundant_closure`] into [`redundant_closure`] and [`redundant_closure_for_method_calls`] [#4110](https://github.com/rust-lang/rust-clippy/pull/4101)
+* Allow allowing of [`toplevel_ref_arg`] lint [#4007](https://github.com/rust-lang/rust-clippy/pull/4007)
+* Fix false negative in [`or_fun_call`] pertaining to nested constructors [#4084](https://github.com/rust-lang/rust-clippy/pull/4084)
+* Fix false positive in [`or_fun_call`] pertaining to enum variant constructors [#4018](https://github.com/rust-lang/rust-clippy/pull/4018)
+* Fix false positive in [`useless_let_if_seq`] pertaining to interior mutability [#4035](https://github.com/rust-lang/rust-clippy/pull/4035)
+* Fix false positive in [`redundant_closure`] pertaining to non-function types [#4008](https://github.com/rust-lang/rust-clippy/pull/4008)
+* Fix false positive in [`let_and_return`] pertaining to attributes on `let`s [#4024](https://github.com/rust-lang/rust-clippy/pull/4024)
+* Fix false positive in [`module_name_repetitions`] lint pertaining to attributes [#4006](https://github.com/rust-lang/rust-clippy/pull/4006)
+* Fix false positive on [`assertions_on_constants`] pertaining to `debug_assert!` [#3989](https://github.com/rust-lang/rust-clippy/pull/3989)
+* Improve suggestion in [`map_clone`] to suggest `.copied()` where applicable [#3970](https://github.com/rust-lang/rust-clippy/pull/3970) [#4043](https://github.com/rust-lang/rust-clippy/pull/4043)
+* Improve suggestion for [`search_is_some`] [#4049](https://github.com/rust-lang/rust-clippy/pull/4049)
+* Improve suggestion applicability for [`naive_bytecount`] [#3984](https://github.com/rust-lang/rust-clippy/pull/3984)
+* Improve suggestion applicability for [`while_let_loop`] [#3975](https://github.com/rust-lang/rust-clippy/pull/3975)
+* Improve diagnostics for [`too_many_arguments`] [#4053](https://github.com/rust-lang/rust-clippy/pull/4053)
+* Improve diagnostics for [`cast_lossless`] [#4021](https://github.com/rust-lang/rust-clippy/pull/4021)
+* Deal with macro checks in desugarings better [#4082](https://github.com/rust-lang/rust-clippy/pull/4082)
+* Add macro check for [`unnecessary_cast`] [#4026](https://github.com/rust-lang/rust-clippy/pull/4026)
+* Remove [`approx_constant`]'s documentation's "Known problems" section. [#4027](https://github.com/rust-lang/rust-clippy/pull/4027)
+* Fix ICE in [`suspicious_else_formatting`] [#3960](https://github.com/rust-lang/rust-clippy/pull/3960)
+* Fix ICE in [`decimal_literal_representation`] [#3931](https://github.com/rust-lang/rust-clippy/pull/3931)
+
+
+## Rust 1.35
+
+Released 2019-05-20
+
+[1fac380..37f5c1e](https://github.com/rust-lang/rust-clippy/compare/1fac380...37f5c1e)
+
+* New lint: `drop_bounds` to detect `T: Drop` bounds
+* Split [`redundant_closure`] into [`redundant_closure`] and [`redundant_closure_for_method_calls`] [#4110](https://github.com/rust-lang/rust-clippy/pull/4101)
+* Rename `cyclomatic_complexity` to [`cognitive_complexity`], start work on making lint more practical for Rust code
+* Move [`get_unwrap`] to the restriction category
+* Improve suggestions for [`iter_cloned_collect`]
+* Improve suggestions for [`cast_lossless`] to suggest suffixed literals
+* Fix false positives in [`print_with_newline`] and [`write_with_newline`] pertaining to raw strings
+* Fix false positive in [`needless_range_loop`] pertaining to structs without a `.iter()`
+* Fix false positive in [`bool_comparison`] pertaining to non-bool types
+* Fix false positive in [`redundant_closure`] pertaining to differences in borrows
+* Fix false positive in `option_map_unwrap_or` on non-copy types
+* Fix false positives in [`missing_const_for_fn`] pertaining to macros and trait method impls
+* Fix false positive in [`needless_pass_by_value`] pertaining to procedural macros
+* Fix false positive in [`needless_continue`] pertaining to loop labels
+* Fix false positive for [`boxed_local`] pertaining to arguments moved into closures
+* Fix false positive for [`use_self`] in nested functions
+* Fix suggestion for [`expect_fun_call`] (https://github.com/rust-lang/rust-clippy/pull/3846)
+* Fix suggestion for [`explicit_counter_loop`] to deal with parenthesizing range variables
+* Fix suggestion for [`single_char_pattern`] to correctly escape single quotes
+* Avoid triggering [`redundant_closure`] in macros
+* ICE fixes: [#3805](https://github.com/rust-lang/rust-clippy/pull/3805), [#3772](https://github.com/rust-lang/rust-clippy/pull/3772), [#3741](https://github.com/rust-lang/rust-clippy/pull/3741)
+
+## Rust 1.34
+
+Released 2019-04-10
+
+[1b89724...1fac380](https://github.com/rust-lang/rust-clippy/compare/1b89724...1fac380)
+
+* New lint: [`assertions_on_constants`] to detect for example `assert!(true)`
+* New lint: [`dbg_macro`] to detect uses of the `dbg!` macro
+* New lint: [`missing_const_for_fn`] that can suggest functions to be made `const`
+* New lint: [`too_many_lines`] to detect functions with excessive LOC. It can be
+ configured using the `too-many-lines-threshold` configuration.
+* New lint: [`wildcard_enum_match_arm`] to check for wildcard enum matches using `_`
+* Expand `redundant_closure` to also work for methods (not only functions)
+* Fix ICEs in `vec_box`, `needless_pass_by_value` and `implicit_hasher`
+* Fix false positive in `cast_sign_loss`
+* Fix false positive in `integer_arithmetic`
+* Fix false positive in `unit_arg`
+* Fix false positives in `implicit_return`
+* Add suggestion to `explicit_write`
+* Improve suggestions for `question_mark` lint
+* Fix incorrect suggestion for `cast_lossless`
+* Fix incorrect suggestion for `expect_fun_call`
+* Fix incorrect suggestion for `needless_bool`
+* Fix incorrect suggestion for `needless_range_loop`
+* Fix incorrect suggestion for `use_self`
+* Fix incorrect suggestion for `while_let_on_iterator`
+* Clippy is now slightly easier to invoke in non-cargo contexts. See
+ [#3665][pull3665] for more details.
+* We now have [improved documentation][adding_lints] on how to add new lints
+
+## Rust 1.33
+
+Released 2019-02-26
+
+[b2601be...1b89724](https://github.com/rust-lang/rust-clippy/compare/b2601be...1b89724)
+
+* New lints: [`implicit_return`], [`vec_box`], [`cast_ref_to_mut`]
+* The `rust-clippy` repository is now part of the `rust-lang` org.
+* Rename `stutter` to `module_name_repetitions`
+* Merge `new_without_default_derive` into `new_without_default` lint
+* Move `large_digit_groups` from `style` group to `pedantic`
+* Expand `bool_comparison` to check for `<`, `<=`, `>`, `>=`, and `!=`
+ comparisons against booleans
+* Expand `no_effect` to detect writes to constants such as `A_CONST.field = 2`
+* Expand `redundant_clone` to work on struct fields
+* Expand `suspicious_else_formatting` to detect `if .. {..} {..}`
+* Expand `use_self` to work on tuple structs and also in local macros
+* Fix ICE in `result_map_unit_fn` and `option_map_unit_fn`
+* Fix false positives in `implicit_return`
+* Fix false positives in `use_self`
+* Fix false negative in `clone_on_copy`
+* Fix false positive in `doc_markdown`
+* Fix false positive in `empty_loop`
+* Fix false positive in `if_same_then_else`
+* Fix false positive in `infinite_iter`
+* Fix false positive in `question_mark`
+* Fix false positive in `useless_asref`
+* Fix false positive in `wildcard_dependencies`
+* Fix false positive in `write_with_newline`
+* Add suggestion to `explicit_write`
+* Improve suggestions for `question_mark` lint
+* Fix incorrect suggestion for `get_unwrap`
+
+## Rust 1.32
+
+Released 2019-01-17
+
+[2e26fdc2...b2601be](https://github.com/rust-lang/rust-clippy/compare/2e26fdc2...b2601be)
+
+* New lints: [`slow_vector_initialization`], `mem_discriminant_non_enum`,
+ [`redundant_clone`], [`wildcard_dependencies`],
+ [`into_iter_on_ref`], `into_iter_on_array`, [`deprecated_cfg_attr`],
+ [`cargo_common_metadata`]
+* Add support for `u128` and `i128` to integer related lints
+* Add float support to `mistyped_literal_suffixes`
+* Fix false positives in `use_self`
+* Fix false positives in `missing_comma`
+* Fix false positives in `new_ret_no_self`
+* Fix false positives in `possible_missing_comma`
+* Fix false positive in `integer_arithmetic` in constant items
+* Fix false positive in `needless_borrow`
+* Fix false positive in `out_of_bounds_indexing`
+* Fix false positive in `new_without_default_derive`
+* Fix false positive in `string_lit_as_bytes`
+* Fix false negative in `out_of_bounds_indexing`
+* Fix false negative in `use_self`. It will now also check existential types
+* Fix incorrect suggestion for `redundant_closure_call`
+* Fix various suggestions that contained expanded macros
+* Fix `bool_comparison` triggering 3 times on on on the same code
+* Expand `trivially_copy_pass_by_ref` to work on trait methods
+* Improve suggestion for `needless_range_loop`
+* Move `needless_pass_by_value` from `pedantic` group to `style`
+
+## Rust 1.31
+
+Released 2018-12-06
+
+[125907ad..2e26fdc2](https://github.com/rust-lang/rust-clippy/compare/125907ad..2e26fdc2)
+
+* Clippy has been relicensed under a dual MIT / Apache license.
+ See [#3093](https://github.com/rust-lang/rust-clippy/issues/3093) for more
+ information.
+* With Rust 1.31, Clippy is no longer available via crates.io. The recommended
+ installation method is via `rustup component add clippy`.
+* New lints: [`redundant_pattern_matching`], [`unnecessary_filter_map`],
+ [`unused_unit`], [`map_flatten`], [`mem_replace_option_with_none`]
+* Fix ICE in `if_let_redundant_pattern_matching`
+* Fix ICE in `needless_pass_by_value` when encountering a generic function
+ argument with a lifetime parameter
+* Fix ICE in `needless_range_loop`
+* Fix ICE in `single_char_pattern` when encountering a constant value
+* Fix false positive in `assign_op_pattern`
+* Fix false positive in `boxed_local` on trait implementations
+* Fix false positive in `cmp_owned`
+* Fix false positive in `collapsible_if` when conditionals have comments
+* Fix false positive in `double_parens`
+* Fix false positive in `excessive_precision`
+* Fix false positive in `explicit_counter_loop`
+* Fix false positive in `fn_to_numeric_cast_with_truncation`
+* Fix false positive in `map_clone`
+* Fix false positive in `new_ret_no_self`
+* Fix false positive in `new_without_default` when `new` is unsafe
+* Fix false positive in `type_complexity` when using extern types
+* Fix false positive in `useless_format`
+* Fix false positive in `wrong_self_convention`
+* Fix incorrect suggestion for `excessive_precision`
+* Fix incorrect suggestion for `expect_fun_call`
+* Fix incorrect suggestion for `get_unwrap`
+* Fix incorrect suggestion for `useless_format`
+* `fn_to_numeric_cast_with_truncation` lint can be disabled again
+* Improve suggestions for `manual_memcpy`
+* Improve help message for `needless_lifetimes`
+
+## Rust 1.30
+
+Released 2018-10-25
+
+[14207503...125907ad](https://github.com/rust-lang/rust-clippy/compare/14207503...125907ad)
+
+* Deprecate `assign_ops` lint
+* New lints: [`mistyped_literal_suffixes`], [`ptr_offset_with_cast`],
+ [`needless_collect`], [`copy_iterator`]
+* `cargo clippy -V` now includes the Clippy commit hash of the Rust
+ Clippy component
+* Fix ICE in `implicit_hasher`
+* Fix ICE when encountering `println!("{}" a);`
+* Fix ICE when encountering a macro call in match statements
+* Fix false positive in `default_trait_access`
+* Fix false positive in `trivially_copy_pass_by_ref`
+* Fix false positive in `similar_names`
+* Fix false positive in `redundant_field_name`
+* Fix false positive in `expect_fun_call`
+* Fix false negative in `identity_conversion`
+* Fix false negative in `explicit_counter_loop`
+* Fix `range_plus_one` suggestion and false negative
+* `print_with_newline` / `write_with_newline`: don't warn about string with several `\n`s in them
+* Fix `useless_attribute` to also whitelist `unused_extern_crates`
+* Fix incorrect suggestion for `single_char_pattern`
+* Improve suggestion for `identity_conversion` lint
+* Move `explicit_iter_loop` and `explicit_into_iter_loop` from `style` group to `pedantic`
+* Move `range_plus_one` and `range_minus_one` from `nursery` group to `complexity`
+* Move `shadow_unrelated` from `restriction` group to `pedantic`
+* Move `indexing_slicing` from `pedantic` group to `restriction`
+
+## Rust 1.29
+
+Released 2018-09-13
+
+[v0.0.212...14207503](https://github.com/rust-lang/rust-clippy/compare/v0.0.212...14207503)
+
+* :tada: :tada: **Rust 1.29 is the first stable Rust that includes a bundled Clippy** :tada:
+ :tada:
+ You can now run `rustup component add clippy-preview` and then `cargo
+ clippy` to run Clippy. This should put an end to the continuous nightly
+ upgrades for Clippy users.
+* Clippy now follows the Rust versioning scheme instead of its own
+* Fix ICE when encountering a `while let (..) = x.iter()` construct
+* Fix false positives in `use_self`
+* Fix false positive in `trivially_copy_pass_by_ref`
+* Fix false positive in `useless_attribute` lint
+* Fix false positive in `print_literal`
+* Fix `use_self` regressions
+* Improve lint message for `neg_cmp_op_on_partial_ord`
+* Improve suggestion highlight for `single_char_pattern`
+* Improve suggestions for various print/write macro lints
+* Improve website header
+
+## 0.0.212 (2018-07-10)
+* Rustup to *rustc 1.29.0-nightly (e06c87544 2018-07-06)*
+
+## 0.0.211
+* Rustup to *rustc 1.28.0-nightly (e3bf634e0 2018-06-28)*
+
+## 0.0.210
+* Rustup to *rustc 1.28.0-nightly (01cc982e9 2018-06-24)*
+
+## 0.0.209
+* Rustup to *rustc 1.28.0-nightly (523097979 2018-06-18)*
+
+## 0.0.208
+* Rustup to *rustc 1.28.0-nightly (86a8f1a63 2018-06-17)*
+
+## 0.0.207
+* Rustup to *rustc 1.28.0-nightly (2a0062974 2018-06-09)*
+
+## 0.0.206
+* Rustup to *rustc 1.28.0-nightly (5bf68db6e 2018-05-28)*
+
+## 0.0.205
+* Rustup to *rustc 1.28.0-nightly (990d8aa74 2018-05-25)*
+* Rename `unused_lifetimes` to `extra_unused_lifetimes` because of naming conflict with new rustc lint
+
+## 0.0.204
+* Rustup to *rustc 1.28.0-nightly (71e87be38 2018-05-22)*
+
+## 0.0.203
+* Rustup to *rustc 1.28.0-nightly (a3085756e 2018-05-19)*
+* Clippy attributes are now of the form `clippy::cyclomatic_complexity` instead of `clippy(cyclomatic_complexity)`
+
+## 0.0.202
+* Rustup to *rustc 1.28.0-nightly (952f344cd 2018-05-18)*
+
+## 0.0.201
+* Rustup to *rustc 1.27.0-nightly (2f2a11dfc 2018-05-16)*
+
+## 0.0.200
+* Rustup to *rustc 1.27.0-nightly (9fae15374 2018-05-13)*
+
+## 0.0.199
+* Rustup to *rustc 1.27.0-nightly (ff2ac35db 2018-05-12)*
+
+## 0.0.198
+* Rustup to *rustc 1.27.0-nightly (acd3871ba 2018-05-10)*
+
+## 0.0.197
+* Rustup to *rustc 1.27.0-nightly (428ea5f6b 2018-05-06)*
+
+## 0.0.196
+* Rustup to *rustc 1.27.0-nightly (e82261dfb 2018-05-03)*
+
+## 0.0.195
+* Rustup to *rustc 1.27.0-nightly (ac3c2288f 2018-04-18)*
+
+## 0.0.194
+* Rustup to *rustc 1.27.0-nightly (bd40cbbe1 2018-04-14)*
+* New lints: [`cast_ptr_alignment`], [`transmute_ptr_to_ptr`], [`write_literal`], [`write_with_newline`], [`writeln_empty_string`]
+
+## 0.0.193
+* Rustup to *rustc 1.27.0-nightly (eeea94c11 2018-04-06)*
+
+## 0.0.192
+* Rustup to *rustc 1.27.0-nightly (fb44b4c0e 2018-04-04)*
+* New lint: [`print_literal`]
+
+## 0.0.191
+* Rustup to *rustc 1.26.0-nightly (ae544ee1c 2018-03-29)*
+* Lint audit; categorize lints as style, correctness, complexity, pedantic, nursery, restriction.
+
+## 0.0.190
+* Fix a bunch of intermittent cargo bugs
+
+## 0.0.189
+* Rustup to *rustc 1.26.0-nightly (5508b2714 2018-03-18)*
+
+## 0.0.188
+* Rustup to *rustc 1.26.0-nightly (392645394 2018-03-15)*
+* New lint: [`while_immutable_condition`]
+
+## 0.0.187
+* Rustup to *rustc 1.26.0-nightly (322d7f7b9 2018-02-25)*
+* New lints: [`redundant_field_names`], [`suspicious_arithmetic_impl`], [`suspicious_op_assign_impl`]
+
+## 0.0.186
+* Rustup to *rustc 1.25.0-nightly (0c6091fbd 2018-02-04)*
+* Various false positive fixes
+
+## 0.0.185
+* Rustup to *rustc 1.25.0-nightly (56733bc9f 2018-02-01)*
+* New lint: [`question_mark`]
+
+## 0.0.184
+* Rustup to *rustc 1.25.0-nightly (90eb44a58 2018-01-29)*
+* New lints: [`double_comparisons`], [`empty_line_after_outer_attr`]
+
+## 0.0.183
+* Rustup to *rustc 1.25.0-nightly (21882aad7 2018-01-28)*
+* New lint: [`misaligned_transmute`]
+
+## 0.0.182
+* Rustup to *rustc 1.25.0-nightly (a0dcecff9 2018-01-24)*
+* New lint: [`decimal_literal_representation`]
+
+## 0.0.181
+* Rustup to *rustc 1.25.0-nightly (97520ccb1 2018-01-21)*
+* New lints: [`else_if_without_else`], [`option_option`], [`unit_arg`], [`unnecessary_fold`]
+* Removed `unit_expr`
+* Various false positive fixes for [`needless_pass_by_value`]
+
+## 0.0.180
+* Rustup to *rustc 1.25.0-nightly (3f92e8d89 2018-01-14)*
+
+## 0.0.179
+* Rustup to *rustc 1.25.0-nightly (61452e506 2018-01-09)*
+
+## 0.0.178
+* Rustup to *rustc 1.25.0-nightly (ee220daca 2018-01-07)*
+
+## 0.0.177
+* Rustup to *rustc 1.24.0-nightly (250b49205 2017-12-21)*
+* New lint: [`match_as_ref`]
+
+## 0.0.176
+* Rustup to *rustc 1.24.0-nightly (0077d128d 2017-12-14)*
+
+## 0.0.175
+* Rustup to *rustc 1.24.0-nightly (bb42071f6 2017-12-01)*
+
+## 0.0.174
+* Rustup to *rustc 1.23.0-nightly (63739ab7b 2017-11-21)*
+
+## 0.0.173
+* Rustup to *rustc 1.23.0-nightly (33374fa9d 2017-11-20)*
+
+## 0.0.172
+* Rustup to *rustc 1.23.0-nightly (d0f8e2913 2017-11-16)*
+
+## 0.0.171
+* Rustup to *rustc 1.23.0-nightly (ff0f5de3b 2017-11-14)*
+
+## 0.0.170
+* Rustup to *rustc 1.23.0-nightly (d6b06c63a 2017-11-09)*
+
+## 0.0.169
+* Rustup to *rustc 1.23.0-nightly (3b82e4c74 2017-11-05)*
+* New lints: [`just_underscores_and_digits`], `result_map_unwrap_or_else`, [`transmute_bytes_to_str`]
+
+## 0.0.168
+* Rustup to *rustc 1.23.0-nightly (f0fe716db 2017-10-30)*
+
+## 0.0.167
+* Rustup to *rustc 1.23.0-nightly (90ef3372e 2017-10-29)*
+* New lints: `const_static_lifetime`, [`erasing_op`], [`fallible_impl_from`], [`println_empty_string`], [`useless_asref`]
+
+## 0.0.166
+* Rustup to *rustc 1.22.0-nightly (b7960878b 2017-10-18)*
+* New lints: [`explicit_write`], `identity_conversion`, [`implicit_hasher`], `invalid_ref`, [`option_map_or_none`],
+ [`range_minus_one`], [`range_plus_one`], [`transmute_int_to_bool`], [`transmute_int_to_char`],
+ [`transmute_int_to_float`]
+
+## 0.0.165
+* Rust upgrade to rustc 1.22.0-nightly (0e6f4cf51 2017-09-27)
+* New lint: [`mut_range_bound`]
+
+## 0.0.164
+* Update to *rustc 1.22.0-nightly (6c476ce46 2017-09-25)*
+* New lint: [`int_plus_one`]
+
+## 0.0.163
+* Update to *rustc 1.22.0-nightly (14039a42a 2017-09-22)*
+
+## 0.0.162
+* Update to *rustc 1.22.0-nightly (0701b37d9 2017-09-18)*
+* New lint: [`chars_last_cmp`]
+* Improved suggestions for [`needless_borrow`], [`ptr_arg`],
+
+## 0.0.161
+* Update to *rustc 1.22.0-nightly (539f2083d 2017-09-13)*
+
+## 0.0.160
+* Update to *rustc 1.22.0-nightly (dd08c3070 2017-09-12)*
+
+## 0.0.159
+* Update to *rustc 1.22.0-nightly (eba374fb2 2017-09-11)*
+* New lint: [`clone_on_ref_ptr`]
+
+## 0.0.158
+* New lint: [`manual_memcpy`]
+* [`cast_lossless`] no longer has redundant parentheses in its suggestions
+* Update to *rustc 1.22.0-nightly (dead08cb3 2017-09-08)*
+
+## 0.0.157 - 2017-09-04
+* Update to *rustc 1.22.0-nightly (981ce7d8d 2017-09-03)*
+* New lint: `unit_expr`
+
+## 0.0.156 - 2017-09-03
+* Update to *rustc 1.22.0-nightly (744dd6c1d 2017-09-02)*
+
+## 0.0.155
+* Update to *rustc 1.21.0-nightly (c11f689d2 2017-08-29)*
+* New lint: [`infinite_iter`], [`maybe_infinite_iter`], [`cast_lossless`]
+
+## 0.0.154
+* Update to *rustc 1.21.0-nightly (2c0558f63 2017-08-24)*
+* Fix [`use_self`] triggering inside derives
+* Add support for linting an entire workspace with `cargo clippy --all`
+* New lint: [`naive_bytecount`]
+
+## 0.0.153
+* Update to *rustc 1.21.0-nightly (8c303ed87 2017-08-20)*
+* New lint: [`use_self`]
+
+## 0.0.152
+* Update to *rustc 1.21.0-nightly (df511d554 2017-08-14)*
+
+## 0.0.151
+* Update to *rustc 1.21.0-nightly (13d94d5fa 2017-08-10)*
+
+## 0.0.150
+* Update to *rustc 1.21.0-nightly (215e0b10e 2017-08-08)*
+
+## 0.0.148
+* Update to *rustc 1.21.0-nightly (37c7d0ebb 2017-07-31)*
+* New lints: [`unreadable_literal`], [`inconsistent_digit_grouping`], [`large_digit_groups`]
+
+## 0.0.147
+* Update to *rustc 1.21.0-nightly (aac223f4f 2017-07-30)*
+
+## 0.0.146
+* Update to *rustc 1.21.0-nightly (52a330969 2017-07-27)*
+* Fixes false positives in `inline_always`
+* Fixes false negatives in `panic_params`
+
+## 0.0.145
+* Update to *rustc 1.20.0-nightly (afe145d22 2017-07-23)*
+
+## 0.0.144
+* Update to *rustc 1.20.0-nightly (086eaa78e 2017-07-15)*
+
+## 0.0.143
+* Update to *rustc 1.20.0-nightly (d84693b93 2017-07-09)*
+* Fix `cargo clippy` crashing on `dylib` projects
+* Fix false positives around `nested_while_let` and `never_loop`
+
+## 0.0.142
+* Update to *rustc 1.20.0-nightly (067971139 2017-07-02)*
+
+## 0.0.141
+* Rewrite of the `doc_markdown` lint.
+* Deprecated [`range_step_by_zero`]
+* New lint: [`iterator_step_by_zero`]
+* New lint: [`needless_borrowed_reference`]
+* Update to *rustc 1.20.0-nightly (69c65d296 2017-06-28)*
+
+## 0.0.140 - 2017-06-16
+* Update to *rustc 1.19.0-nightly (258ae6dd9 2017-06-15)*
+
+## 0.0.139 — 2017-06-10
+* Update to *rustc 1.19.0-nightly (4bf5c99af 2017-06-10)*
+* Fix bugs with for loop desugaring
+* Check for [`AsRef`]/[`AsMut`] arguments in [`wrong_self_convention`]
+
+## 0.0.138 — 2017-06-05
+* Update to *rustc 1.19.0-nightly (0418fa9d3 2017-06-04)*
+
+## 0.0.137 — 2017-06-05
+* Update to *rustc 1.19.0-nightly (6684d176c 2017-06-03)*
+
+## 0.0.136 — 2017—05—26
+* Update to *rustc 1.19.0-nightly (557967766 2017-05-26)*
+
+## 0.0.135 — 2017—05—24
+* Update to *rustc 1.19.0-nightly (5b13bff52 2017-05-23)*
+
+## 0.0.134 — 2017—05—19
+* Update to *rustc 1.19.0-nightly (0ed1ec9f9 2017-05-18)*
+
+## 0.0.133 — 2017—05—14
+* Update to *rustc 1.19.0-nightly (826d8f385 2017-05-13)*
+
+## 0.0.132 — 2017—05—05
+* Fix various bugs and some ices
+
+## 0.0.131 — 2017—05—04
+* Update to *rustc 1.19.0-nightly (2d4ed8e0c 2017-05-03)*
+
+## 0.0.130 — 2017—05—03
+* Update to *rustc 1.19.0-nightly (6a5fc9eec 2017-05-02)*
+
+## 0.0.129 — 2017-05-01
+* Update to *rustc 1.19.0-nightly (06fb4d256 2017-04-30)*
+
+## 0.0.128 — 2017-04-28
+* Update to *rustc 1.18.0-nightly (94e884b63 2017-04-27)*
+
+## 0.0.127 — 2017-04-27
+* Update to *rustc 1.18.0-nightly (036983201 2017-04-26)*
+* New lint: [`needless_continue`]
+
+## 0.0.126 — 2017-04-24
+* Update to *rustc 1.18.0-nightly (2bd4b5c6d 2017-04-23)*
+
+## 0.0.125 — 2017-04-19
+* Update to *rustc 1.18.0-nightly (9f2abadca 2017-04-18)*
+
+## 0.0.124 — 2017-04-16
+* Update to *rustc 1.18.0-nightly (d5cf1cb64 2017-04-15)*
+
+## 0.0.123 — 2017-04-07
+* Fix various false positives
+
+## 0.0.122 — 2017-04-07
+* Rustup to *rustc 1.18.0-nightly (91ae22a01 2017-04-05)*
+* New lint: [`op_ref`]
+
+## 0.0.121 — 2017-03-21
+* Rustup to *rustc 1.17.0-nightly (134c4a0f0 2017-03-20)*
+
+## 0.0.120 — 2017-03-17
+* Rustup to *rustc 1.17.0-nightly (0aeb9c129 2017-03-15)*
+
+## 0.0.119 — 2017-03-13
+* Rustup to *rustc 1.17.0-nightly (824c9ebbd 2017-03-12)*
+
+## 0.0.118 — 2017-03-05
+* Rustup to *rustc 1.17.0-nightly (b1e31766d 2017-03-03)*
+
+## 0.0.117 — 2017-03-01
+* Rustup to *rustc 1.17.0-nightly (be760566c 2017-02-28)*
+
+## 0.0.116 — 2017-02-28
+* Fix `cargo clippy` on 64 bit windows systems
+
+## 0.0.115 — 2017-02-27
+* Rustup to *rustc 1.17.0-nightly (60a0edc6c 2017-02-26)*
+* New lints: [`zero_ptr`], [`never_loop`], [`mut_from_ref`]
+
+## 0.0.114 — 2017-02-08
+* Rustup to *rustc 1.17.0-nightly (c49d10207 2017-02-07)*
+* Tests are now ui tests (testing the exact output of rustc)
+
+## 0.0.113 — 2017-02-04
+* Rustup to *rustc 1.16.0-nightly (eedaa94e3 2017-02-02)*
+* New lint: [`large_enum_variant`]
+* `explicit_into_iter_loop` provides suggestions
+
+## 0.0.112 — 2017-01-27
+* Rustup to *rustc 1.16.0-nightly (df8debf6d 2017-01-25)*
+
+## 0.0.111 — 2017-01-21
+* Rustup to *rustc 1.16.0-nightly (a52da95ce 2017-01-20)*
+
+## 0.0.110 — 2017-01-20
+* Add badges and categories to `Cargo.toml`
+
+## 0.0.109 — 2017-01-19
+* Update to *rustc 1.16.0-nightly (c07a6ae77 2017-01-17)*
+
+## 0.0.108 — 2017-01-12
+* Update to *rustc 1.16.0-nightly (2782e8f8f 2017-01-12)*
+
+## 0.0.107 — 2017-01-11
+* Update regex dependency
+* Fix FP when matching `&&mut` by `&ref`
+* Reintroduce `for (_, x) in &mut hash_map` -> `for x in hash_map.values_mut()`
+* New lints: [`unused_io_amount`], [`forget_ref`], [`short_circuit_statement`]
+
+## 0.0.106 — 2017-01-04
+* Fix FP introduced by rustup in [`wrong_self_convention`]
+
+## 0.0.105 — 2017-01-04
+* Update to *rustc 1.16.0-nightly (468227129 2017-01-03)*
+* New lints: [`deref_addrof`], [`double_parens`], [`pub_enum_variant_names`]
+* Fix suggestion in [`new_without_default`]
+* FP fix in [`absurd_extreme_comparisons`]
+
+## 0.0.104 — 2016-12-15
+* Update to *rustc 1.15.0-nightly (8f02c429a 2016-12-15)*
+
+## 0.0.103 — 2016-11-25
+* Update to *rustc 1.15.0-nightly (d5814b03e 2016-11-23)*
+
+## 0.0.102 — 2016-11-24
+* Update to *rustc 1.15.0-nightly (3bf2be9ce 2016-11-22)*
+
+## 0.0.101 — 2016-11-23
+* Update to *rustc 1.15.0-nightly (7b3eeea22 2016-11-21)*
+* New lint: [`string_extend_chars`]
+
+## 0.0.100 — 2016-11-20
+* Update to *rustc 1.15.0-nightly (ac635aa95 2016-11-18)*
+
+## 0.0.99 — 2016-11-18
+* Update to rustc 1.15.0-nightly (0ed951993 2016-11-14)
+* New lint: [`get_unwrap`]
+
+## 0.0.98 — 2016-11-08
+* Fixes an issue due to a change in how cargo handles `--sysroot`, which broke `cargo clippy`
+
+## 0.0.97 — 2016-11-03
+* For convenience, `cargo clippy` defines a `cargo-clippy` feature. This was
+ previously added for a short time under the name `clippy` but removed for
+ compatibility.
+* `cargo clippy --help` is more helping (and less helpful :smile:)
+* Rustup to *rustc 1.14.0-nightly (5665bdf3e 2016-11-02)*
+* New lints: [`if_let_redundant_pattern_matching`], [`partialeq_ne_impl`]
+
+## 0.0.96 — 2016-10-22
+* Rustup to *rustc 1.14.0-nightly (f09420685 2016-10-20)*
+* New lint: [`iter_skip_next`]
+
+## 0.0.95 — 2016-10-06
+* Rustup to *rustc 1.14.0-nightly (3210fd5c2 2016-10-05)*
+
+## 0.0.94 — 2016-10-04
+* Fixes bustage on Windows due to forbidden directory name
+
+## 0.0.93 — 2016-10-03
+* Rustup to *rustc 1.14.0-nightly (144af3e97 2016-10-02)*
+* `option_map_unwrap_or` and `option_map_unwrap_or_else` are now
+ allowed by default.
+* New lint: [`explicit_into_iter_loop`]
+
+## 0.0.92 — 2016-09-30
+* Rustup to *rustc 1.14.0-nightly (289f3a4ca 2016-09-29)*
+
+## 0.0.91 — 2016-09-28
+* Rustup to *rustc 1.13.0-nightly (d0623cf7b 2016-09-26)*
+
+## 0.0.90 — 2016-09-09
+* Rustup to *rustc 1.13.0-nightly (f1f40f850 2016-09-09)*
+
+## 0.0.89 — 2016-09-06
+* Rustup to *rustc 1.13.0-nightly (cbe4de78e 2016-09-05)*
+
+## 0.0.88 — 2016-09-04
+* Rustup to *rustc 1.13.0-nightly (70598e04f 2016-09-03)*
+* The following lints are not new but were only usable through the `clippy`
+ lint groups: [`filter_next`], `for_loop_over_option`,
+ `for_loop_over_result` and [`match_overlapping_arm`]. You should now be
+ able to `#[allow/deny]` them individually and they are available directly
+ through `cargo clippy`.
+
+## 0.0.87 — 2016-08-31
+* Rustup to *rustc 1.13.0-nightly (eac41469d 2016-08-30)*
+* New lints: [`builtin_type_shadow`]
+* Fix FP in [`zero_prefixed_literal`] and `0b`/`0o`
+
+## 0.0.86 — 2016-08-28
+* Rustup to *rustc 1.13.0-nightly (a23064af5 2016-08-27)*
+* New lints: [`missing_docs_in_private_items`], [`zero_prefixed_literal`]
+
+## 0.0.85 — 2016-08-19
+* Fix ICE with [`useless_attribute`]
+* [`useless_attribute`] ignores `unused_imports` on `use` statements
+
+## 0.0.84 — 2016-08-18
+* Rustup to *rustc 1.13.0-nightly (aef6971ca 2016-08-17)*
+
+## 0.0.83 — 2016-08-17
+* Rustup to *rustc 1.12.0-nightly (1bf5fa326 2016-08-16)*
+* New lints: [`print_with_newline`], [`useless_attribute`]
+
+## 0.0.82 — 2016-08-17
+* Rustup to *rustc 1.12.0-nightly (197be89f3 2016-08-15)*
+* New lint: [`module_inception`]
+
+## 0.0.81 — 2016-08-14
+* Rustup to *rustc 1.12.0-nightly (1deb02ea6 2016-08-12)*
+* New lints: [`eval_order_dependence`], [`mixed_case_hex_literals`], [`unseparated_literal_suffix`]
+* False positive fix in [`too_many_arguments`]
+* Addition of functionality to [`needless_borrow`]
+* Suggestions for [`clone_on_copy`]
+* Bug fix in [`wrong_self_convention`]
+* Doc improvements
+
+## 0.0.80 — 2016-07-31
+* Rustup to *rustc 1.12.0-nightly (1225e122f 2016-07-30)*
+* New lints: [`misrefactored_assign_op`], [`serde_api_misuse`]
+
+## 0.0.79 — 2016-07-10
+* Rustup to *rustc 1.12.0-nightly (f93aaf84c 2016-07-09)*
+* Major suggestions refactoring
+
+## 0.0.78 — 2016-07-02
+* Rustup to *rustc 1.11.0-nightly (01411937f 2016-07-01)*
+* New lints: [`wrong_transmute`], [`double_neg`], [`filter_map`]
+* For compatibility, `cargo clippy` does not defines the `clippy` feature
+ introduced in 0.0.76 anymore
+* [`collapsible_if`] now considers `if let`
+
+## 0.0.77 — 2016-06-21
+* Rustup to *rustc 1.11.0-nightly (5522e678b 2016-06-20)*
+* New lints: `stutter` and [`iter_nth`]
+
+## 0.0.76 — 2016-06-10
+* Rustup to *rustc 1.11.0-nightly (7d2f75a95 2016-06-09)*
+* `cargo clippy` now automatically defines the `clippy` feature
+* New lint: [`not_unsafe_ptr_arg_deref`]
+
+## 0.0.75 — 2016-06-08
+* Rustup to *rustc 1.11.0-nightly (763f9234b 2016-06-06)*
+
+## 0.0.74 — 2016-06-07
+* Fix bug with `cargo-clippy` JSON parsing
+* Add the `CLIPPY_DISABLE_DOCS_LINKS` environment variable to deactivate the
+ “for further information visit *lint-link*” message.
+
+## 0.0.73 — 2016-06-05
+* Fix false positives in [`useless_let_if_seq`]
+
+## 0.0.72 — 2016-06-04
+* Fix false positives in [`useless_let_if_seq`]
+
+## 0.0.71 — 2016-05-31
+* Rustup to *rustc 1.11.0-nightly (a967611d8 2016-05-30)*
+* New lint: [`useless_let_if_seq`]
+
+## 0.0.70 — 2016-05-28
+* Rustup to *rustc 1.10.0-nightly (7bddce693 2016-05-27)*
+* [`invalid_regex`] and [`trivial_regex`] can now warn on `RegexSet::new`,
+ `RegexBuilder::new` and byte regexes
+
+## 0.0.69 — 2016-05-20
+* Rustup to *rustc 1.10.0-nightly (476fe6eef 2016-05-21)*
+* [`used_underscore_binding`] has been made `Allow` temporarily
+
+## 0.0.68 — 2016-05-17
+* Rustup to *rustc 1.10.0-nightly (cd6a40017 2016-05-16)*
+* New lint: [`unnecessary_operation`]
+
+## 0.0.67 — 2016-05-12
+* Rustup to *rustc 1.10.0-nightly (22ac88f1a 2016-05-11)*
+
+## 0.0.66 — 2016-05-11
+* New `cargo clippy` subcommand
+* New lints: [`assign_op_pattern`], [`assign_ops`], [`needless_borrow`]
+
+## 0.0.65 — 2016-05-08
+* Rustup to *rustc 1.10.0-nightly (62e2b2fb7 2016-05-06)*
+* New lints: [`float_arithmetic`], [`integer_arithmetic`]
+
+## 0.0.64 — 2016-04-26
+* Rustup to *rustc 1.10.0-nightly (645dd013a 2016-04-24)*
+* New lints: `temporary_cstring_as_ptr`, [`unsafe_removed_from_name`], and [`mem_forget`]
+
+## 0.0.63 — 2016-04-08
+* Rustup to *rustc 1.9.0-nightly (7979dd608 2016-04-07)*
+
+## 0.0.62 — 2016-04-07
+* Rustup to *rustc 1.9.0-nightly (bf5da36f1 2016-04-06)*
+
+## 0.0.61 — 2016-04-03
+* Rustup to *rustc 1.9.0-nightly (5ab11d72c 2016-04-02)*
+* New lint: [`invalid_upcast_comparisons`]
+
+## 0.0.60 — 2016-04-01
+* Rustup to *rustc 1.9.0-nightly (e1195c24b 2016-03-31)*
+
+## 0.0.59 — 2016-03-31
+* Rustup to *rustc 1.9.0-nightly (30a3849f2 2016-03-30)*
+* New lints: [`logic_bug`], [`nonminimal_bool`]
+* Fixed: [`match_same_arms`] now ignores arms with guards
+* Improved: [`useless_vec`] now warns on `for … in vec![…]`
+
+## 0.0.58 — 2016-03-27
+* Rustup to *rustc 1.9.0-nightly (d5a91e695 2016-03-26)*
+* New lint: [`doc_markdown`]
+
+## 0.0.57 — 2016-03-27
+* Update to *rustc 1.9.0-nightly (a1e29daf1 2016-03-25)*
+* Deprecated lints: [`str_to_string`], [`string_to_string`], [`unstable_as_slice`], [`unstable_as_mut_slice`]
+* New lint: [`crosspointer_transmute`]
+
+## 0.0.56 — 2016-03-23
+* Update to *rustc 1.9.0-nightly (0dcc413e4 2016-03-22)*
+* New lints: [`many_single_char_names`] and [`similar_names`]
+
+## 0.0.55 — 2016-03-21
+* Update to *rustc 1.9.0-nightly (02310fd31 2016-03-19)*
+
+## 0.0.54 — 2016-03-16
+* Update to *rustc 1.9.0-nightly (c66d2380a 2016-03-15)*
+
+## 0.0.53 — 2016-03-15
+* Add a [configuration file]
+
+## ~~0.0.52~~
+
+## 0.0.51 — 2016-03-13
+* Add `str` to types considered by [`len_zero`]
+* New lints: [`indexing_slicing`]
+
+## 0.0.50 — 2016-03-11
+* Update to *rustc 1.9.0-nightly (c9629d61c 2016-03-10)*
+
+## 0.0.49 — 2016-03-09
+* Update to *rustc 1.9.0-nightly (eabfc160f 2016-03-08)*
+* New lints: [`overflow_check_conditional`], `unused_label`, [`new_without_default`]
+
+## 0.0.48 — 2016-03-07
+* Fixed: ICE in [`needless_range_loop`] with globals
+
+## 0.0.47 — 2016-03-07
+* Update to *rustc 1.9.0-nightly (998a6720b 2016-03-07)*
+* New lint: [`redundant_closure_call`]
+
+[`AsMut`]: https://doc.rust-lang.org/std/convert/trait.AsMut.html
+[`AsRef`]: https://doc.rust-lang.org/std/convert/trait.AsRef.html
+[configuration file]: ./rust-clippy#configuration
+[pull3665]: https://github.com/rust-lang/rust-clippy/pull/3665
+[adding_lints]: https://github.com/rust-lang/rust-clippy/blob/master/book/src/development/adding_lints.md
+[`README.md`]: https://github.com/rust-lang/rust-clippy/blob/master/README.md
+
+<!-- lint disable no-unused-definitions -->
+<!-- begin autogenerated links to lint list -->
+[`absurd_extreme_comparisons`]: https://rust-lang.github.io/rust-clippy/master/index.html#absurd_extreme_comparisons
+[`alloc_instead_of_core`]: https://rust-lang.github.io/rust-clippy/master/index.html#alloc_instead_of_core
+[`allow_attributes_without_reason`]: https://rust-lang.github.io/rust-clippy/master/index.html#allow_attributes_without_reason
+[`almost_complete_letter_range`]: https://rust-lang.github.io/rust-clippy/master/index.html#almost_complete_letter_range
+[`almost_swapped`]: https://rust-lang.github.io/rust-clippy/master/index.html#almost_swapped
+[`approx_constant`]: https://rust-lang.github.io/rust-clippy/master/index.html#approx_constant
+[`arithmetic`]: https://rust-lang.github.io/rust-clippy/master/index.html#arithmetic
+[`as_conversions`]: https://rust-lang.github.io/rust-clippy/master/index.html#as_conversions
+[`as_underscore`]: https://rust-lang.github.io/rust-clippy/master/index.html#as_underscore
+[`assertions_on_constants`]: https://rust-lang.github.io/rust-clippy/master/index.html#assertions_on_constants
+[`assertions_on_result_states`]: https://rust-lang.github.io/rust-clippy/master/index.html#assertions_on_result_states
+[`assign_op_pattern`]: https://rust-lang.github.io/rust-clippy/master/index.html#assign_op_pattern
+[`assign_ops`]: https://rust-lang.github.io/rust-clippy/master/index.html#assign_ops
+[`async_yields_async`]: https://rust-lang.github.io/rust-clippy/master/index.html#async_yields_async
+[`await_holding_invalid_type`]: https://rust-lang.github.io/rust-clippy/master/index.html#await_holding_invalid_type
+[`await_holding_lock`]: https://rust-lang.github.io/rust-clippy/master/index.html#await_holding_lock
+[`await_holding_refcell_ref`]: https://rust-lang.github.io/rust-clippy/master/index.html#await_holding_refcell_ref
+[`bad_bit_mask`]: https://rust-lang.github.io/rust-clippy/master/index.html#bad_bit_mask
+[`bind_instead_of_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#bind_instead_of_map
+[`blacklisted_name`]: https://rust-lang.github.io/rust-clippy/master/index.html#blacklisted_name
+[`blanket_clippy_restriction_lints`]: https://rust-lang.github.io/rust-clippy/master/index.html#blanket_clippy_restriction_lints
+[`block_in_if_condition_expr`]: https://rust-lang.github.io/rust-clippy/master/index.html#block_in_if_condition_expr
+[`block_in_if_condition_stmt`]: https://rust-lang.github.io/rust-clippy/master/index.html#block_in_if_condition_stmt
+[`blocks_in_if_conditions`]: https://rust-lang.github.io/rust-clippy/master/index.html#blocks_in_if_conditions
+[`bool_assert_comparison`]: https://rust-lang.github.io/rust-clippy/master/index.html#bool_assert_comparison
+[`bool_comparison`]: https://rust-lang.github.io/rust-clippy/master/index.html#bool_comparison
+[`borrow_as_ptr`]: https://rust-lang.github.io/rust-clippy/master/index.html#borrow_as_ptr
+[`borrow_deref_ref`]: https://rust-lang.github.io/rust-clippy/master/index.html#borrow_deref_ref
+[`borrow_interior_mutable_const`]: https://rust-lang.github.io/rust-clippy/master/index.html#borrow_interior_mutable_const
+[`borrowed_box`]: https://rust-lang.github.io/rust-clippy/master/index.html#borrowed_box
+[`box_collection`]: https://rust-lang.github.io/rust-clippy/master/index.html#box_collection
+[`box_vec`]: https://rust-lang.github.io/rust-clippy/master/index.html#box_vec
+[`boxed_local`]: https://rust-lang.github.io/rust-clippy/master/index.html#boxed_local
+[`branches_sharing_code`]: https://rust-lang.github.io/rust-clippy/master/index.html#branches_sharing_code
+[`builtin_type_shadow`]: https://rust-lang.github.io/rust-clippy/master/index.html#builtin_type_shadow
+[`bytes_count_to_len`]: https://rust-lang.github.io/rust-clippy/master/index.html#bytes_count_to_len
+[`bytes_nth`]: https://rust-lang.github.io/rust-clippy/master/index.html#bytes_nth
+[`cargo_common_metadata`]: https://rust-lang.github.io/rust-clippy/master/index.html#cargo_common_metadata
+[`case_sensitive_file_extension_comparisons`]: https://rust-lang.github.io/rust-clippy/master/index.html#case_sensitive_file_extension_comparisons
+[`cast_abs_to_unsigned`]: https://rust-lang.github.io/rust-clippy/master/index.html#cast_abs_to_unsigned
+[`cast_enum_constructor`]: https://rust-lang.github.io/rust-clippy/master/index.html#cast_enum_constructor
+[`cast_enum_truncation`]: https://rust-lang.github.io/rust-clippy/master/index.html#cast_enum_truncation
+[`cast_lossless`]: https://rust-lang.github.io/rust-clippy/master/index.html#cast_lossless
+[`cast_possible_truncation`]: https://rust-lang.github.io/rust-clippy/master/index.html#cast_possible_truncation
+[`cast_possible_wrap`]: https://rust-lang.github.io/rust-clippy/master/index.html#cast_possible_wrap
+[`cast_precision_loss`]: https://rust-lang.github.io/rust-clippy/master/index.html#cast_precision_loss
+[`cast_ptr_alignment`]: https://rust-lang.github.io/rust-clippy/master/index.html#cast_ptr_alignment
+[`cast_ref_to_mut`]: https://rust-lang.github.io/rust-clippy/master/index.html#cast_ref_to_mut
+[`cast_sign_loss`]: https://rust-lang.github.io/rust-clippy/master/index.html#cast_sign_loss
+[`cast_slice_different_sizes`]: https://rust-lang.github.io/rust-clippy/master/index.html#cast_slice_different_sizes
+[`char_lit_as_u8`]: https://rust-lang.github.io/rust-clippy/master/index.html#char_lit_as_u8
+[`chars_last_cmp`]: https://rust-lang.github.io/rust-clippy/master/index.html#chars_last_cmp
+[`chars_next_cmp`]: https://rust-lang.github.io/rust-clippy/master/index.html#chars_next_cmp
+[`checked_conversions`]: https://rust-lang.github.io/rust-clippy/master/index.html#checked_conversions
+[`clone_double_ref`]: https://rust-lang.github.io/rust-clippy/master/index.html#clone_double_ref
+[`clone_on_copy`]: https://rust-lang.github.io/rust-clippy/master/index.html#clone_on_copy
+[`clone_on_ref_ptr`]: https://rust-lang.github.io/rust-clippy/master/index.html#clone_on_ref_ptr
+[`cloned_instead_of_copied`]: https://rust-lang.github.io/rust-clippy/master/index.html#cloned_instead_of_copied
+[`cmp_nan`]: https://rust-lang.github.io/rust-clippy/master/index.html#cmp_nan
+[`cmp_null`]: https://rust-lang.github.io/rust-clippy/master/index.html#cmp_null
+[`cmp_owned`]: https://rust-lang.github.io/rust-clippy/master/index.html#cmp_owned
+[`cognitive_complexity`]: https://rust-lang.github.io/rust-clippy/master/index.html#cognitive_complexity
+[`collapsible_else_if`]: https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_else_if
+[`collapsible_if`]: https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_if
+[`collapsible_match`]: https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_match
+[`comparison_chain`]: https://rust-lang.github.io/rust-clippy/master/index.html#comparison_chain
+[`comparison_to_empty`]: https://rust-lang.github.io/rust-clippy/master/index.html#comparison_to_empty
+[`const_static_lifetime`]: https://rust-lang.github.io/rust-clippy/master/index.html#const_static_lifetime
+[`copy_iterator`]: https://rust-lang.github.io/rust-clippy/master/index.html#copy_iterator
+[`crate_in_macro_def`]: https://rust-lang.github.io/rust-clippy/master/index.html#crate_in_macro_def
+[`create_dir`]: https://rust-lang.github.io/rust-clippy/master/index.html#create_dir
+[`crosspointer_transmute`]: https://rust-lang.github.io/rust-clippy/master/index.html#crosspointer_transmute
+[`cyclomatic_complexity`]: https://rust-lang.github.io/rust-clippy/master/index.html#cyclomatic_complexity
+[`dbg_macro`]: https://rust-lang.github.io/rust-clippy/master/index.html#dbg_macro
+[`debug_assert_with_mut_call`]: https://rust-lang.github.io/rust-clippy/master/index.html#debug_assert_with_mut_call
+[`decimal_literal_representation`]: https://rust-lang.github.io/rust-clippy/master/index.html#decimal_literal_representation
+[`declare_interior_mutable_const`]: https://rust-lang.github.io/rust-clippy/master/index.html#declare_interior_mutable_const
+[`default_instead_of_iter_empty`]: https://rust-lang.github.io/rust-clippy/master/index.html#default_instead_of_iter_empty
+[`default_numeric_fallback`]: https://rust-lang.github.io/rust-clippy/master/index.html#default_numeric_fallback
+[`default_trait_access`]: https://rust-lang.github.io/rust-clippy/master/index.html#default_trait_access
+[`default_union_representation`]: https://rust-lang.github.io/rust-clippy/master/index.html#default_union_representation
+[`deprecated_cfg_attr`]: https://rust-lang.github.io/rust-clippy/master/index.html#deprecated_cfg_attr
+[`deprecated_semver`]: https://rust-lang.github.io/rust-clippy/master/index.html#deprecated_semver
+[`deref_addrof`]: https://rust-lang.github.io/rust-clippy/master/index.html#deref_addrof
+[`deref_by_slicing`]: https://rust-lang.github.io/rust-clippy/master/index.html#deref_by_slicing
+[`derivable_impls`]: https://rust-lang.github.io/rust-clippy/master/index.html#derivable_impls
+[`derive_hash_xor_eq`]: https://rust-lang.github.io/rust-clippy/master/index.html#derive_hash_xor_eq
+[`derive_ord_xor_partial_ord`]: https://rust-lang.github.io/rust-clippy/master/index.html#derive_ord_xor_partial_ord
+[`derive_partial_eq_without_eq`]: https://rust-lang.github.io/rust-clippy/master/index.html#derive_partial_eq_without_eq
+[`disallowed_method`]: https://rust-lang.github.io/rust-clippy/master/index.html#disallowed_method
+[`disallowed_methods`]: https://rust-lang.github.io/rust-clippy/master/index.html#disallowed_methods
+[`disallowed_script_idents`]: https://rust-lang.github.io/rust-clippy/master/index.html#disallowed_script_idents
+[`disallowed_type`]: https://rust-lang.github.io/rust-clippy/master/index.html#disallowed_type
+[`disallowed_types`]: https://rust-lang.github.io/rust-clippy/master/index.html#disallowed_types
+[`diverging_sub_expression`]: https://rust-lang.github.io/rust-clippy/master/index.html#diverging_sub_expression
+[`doc_link_with_quotes`]: https://rust-lang.github.io/rust-clippy/master/index.html#doc_link_with_quotes
+[`doc_markdown`]: https://rust-lang.github.io/rust-clippy/master/index.html#doc_markdown
+[`double_comparisons`]: https://rust-lang.github.io/rust-clippy/master/index.html#double_comparisons
+[`double_must_use`]: https://rust-lang.github.io/rust-clippy/master/index.html#double_must_use
+[`double_neg`]: https://rust-lang.github.io/rust-clippy/master/index.html#double_neg
+[`double_parens`]: https://rust-lang.github.io/rust-clippy/master/index.html#double_parens
+[`drop_bounds`]: https://rust-lang.github.io/rust-clippy/master/index.html#drop_bounds
+[`drop_copy`]: https://rust-lang.github.io/rust-clippy/master/index.html#drop_copy
+[`drop_non_drop`]: https://rust-lang.github.io/rust-clippy/master/index.html#drop_non_drop
+[`drop_ref`]: https://rust-lang.github.io/rust-clippy/master/index.html#drop_ref
+[`duplicate_mod`]: https://rust-lang.github.io/rust-clippy/master/index.html#duplicate_mod
+[`duplicate_underscore_argument`]: https://rust-lang.github.io/rust-clippy/master/index.html#duplicate_underscore_argument
+[`duration_subsec`]: https://rust-lang.github.io/rust-clippy/master/index.html#duration_subsec
+[`else_if_without_else`]: https://rust-lang.github.io/rust-clippy/master/index.html#else_if_without_else
+[`empty_drop`]: https://rust-lang.github.io/rust-clippy/master/index.html#empty_drop
+[`empty_enum`]: https://rust-lang.github.io/rust-clippy/master/index.html#empty_enum
+[`empty_line_after_outer_attr`]: https://rust-lang.github.io/rust-clippy/master/index.html#empty_line_after_outer_attr
+[`empty_loop`]: https://rust-lang.github.io/rust-clippy/master/index.html#empty_loop
+[`empty_structs_with_brackets`]: https://rust-lang.github.io/rust-clippy/master/index.html#empty_structs_with_brackets
+[`enum_clike_unportable_variant`]: https://rust-lang.github.io/rust-clippy/master/index.html#enum_clike_unportable_variant
+[`enum_glob_use`]: https://rust-lang.github.io/rust-clippy/master/index.html#enum_glob_use
+[`enum_variant_names`]: https://rust-lang.github.io/rust-clippy/master/index.html#enum_variant_names
+[`eq_op`]: https://rust-lang.github.io/rust-clippy/master/index.html#eq_op
+[`equatable_if_let`]: https://rust-lang.github.io/rust-clippy/master/index.html#equatable_if_let
+[`erasing_op`]: https://rust-lang.github.io/rust-clippy/master/index.html#erasing_op
+[`err_expect`]: https://rust-lang.github.io/rust-clippy/master/index.html#err_expect
+[`eval_order_dependence`]: https://rust-lang.github.io/rust-clippy/master/index.html#eval_order_dependence
+[`excessive_precision`]: https://rust-lang.github.io/rust-clippy/master/index.html#excessive_precision
+[`exhaustive_enums`]: https://rust-lang.github.io/rust-clippy/master/index.html#exhaustive_enums
+[`exhaustive_structs`]: https://rust-lang.github.io/rust-clippy/master/index.html#exhaustive_structs
+[`exit`]: https://rust-lang.github.io/rust-clippy/master/index.html#exit
+[`expect_fun_call`]: https://rust-lang.github.io/rust-clippy/master/index.html#expect_fun_call
+[`expect_used`]: https://rust-lang.github.io/rust-clippy/master/index.html#expect_used
+[`expl_impl_clone_on_copy`]: https://rust-lang.github.io/rust-clippy/master/index.html#expl_impl_clone_on_copy
+[`explicit_auto_deref`]: https://rust-lang.github.io/rust-clippy/master/index.html#explicit_auto_deref
+[`explicit_counter_loop`]: https://rust-lang.github.io/rust-clippy/master/index.html#explicit_counter_loop
+[`explicit_deref_methods`]: https://rust-lang.github.io/rust-clippy/master/index.html#explicit_deref_methods
+[`explicit_into_iter_loop`]: https://rust-lang.github.io/rust-clippy/master/index.html#explicit_into_iter_loop
+[`explicit_iter_loop`]: https://rust-lang.github.io/rust-clippy/master/index.html#explicit_iter_loop
+[`explicit_write`]: https://rust-lang.github.io/rust-clippy/master/index.html#explicit_write
+[`extend_from_slice`]: https://rust-lang.github.io/rust-clippy/master/index.html#extend_from_slice
+[`extend_with_drain`]: https://rust-lang.github.io/rust-clippy/master/index.html#extend_with_drain
+[`extra_unused_lifetimes`]: https://rust-lang.github.io/rust-clippy/master/index.html#extra_unused_lifetimes
+[`fallible_impl_from`]: https://rust-lang.github.io/rust-clippy/master/index.html#fallible_impl_from
+[`field_reassign_with_default`]: https://rust-lang.github.io/rust-clippy/master/index.html#field_reassign_with_default
+[`filetype_is_file`]: https://rust-lang.github.io/rust-clippy/master/index.html#filetype_is_file
+[`filter_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#filter_map
+[`filter_map_identity`]: https://rust-lang.github.io/rust-clippy/master/index.html#filter_map_identity
+[`filter_map_next`]: https://rust-lang.github.io/rust-clippy/master/index.html#filter_map_next
+[`filter_next`]: https://rust-lang.github.io/rust-clippy/master/index.html#filter_next
+[`find_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#find_map
+[`flat_map_identity`]: https://rust-lang.github.io/rust-clippy/master/index.html#flat_map_identity
+[`flat_map_option`]: https://rust-lang.github.io/rust-clippy/master/index.html#flat_map_option
+[`float_arithmetic`]: https://rust-lang.github.io/rust-clippy/master/index.html#float_arithmetic
+[`float_cmp`]: https://rust-lang.github.io/rust-clippy/master/index.html#float_cmp
+[`float_cmp_const`]: https://rust-lang.github.io/rust-clippy/master/index.html#float_cmp_const
+[`float_equality_without_abs`]: https://rust-lang.github.io/rust-clippy/master/index.html#float_equality_without_abs
+[`fn_address_comparisons`]: https://rust-lang.github.io/rust-clippy/master/index.html#fn_address_comparisons
+[`fn_params_excessive_bools`]: https://rust-lang.github.io/rust-clippy/master/index.html#fn_params_excessive_bools
+[`fn_to_numeric_cast`]: https://rust-lang.github.io/rust-clippy/master/index.html#fn_to_numeric_cast
+[`fn_to_numeric_cast_any`]: https://rust-lang.github.io/rust-clippy/master/index.html#fn_to_numeric_cast_any
+[`fn_to_numeric_cast_with_truncation`]: https://rust-lang.github.io/rust-clippy/master/index.html#fn_to_numeric_cast_with_truncation
+[`for_kv_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#for_kv_map
+[`for_loop_over_option`]: https://rust-lang.github.io/rust-clippy/master/index.html#for_loop_over_option
+[`for_loop_over_result`]: https://rust-lang.github.io/rust-clippy/master/index.html#for_loop_over_result
+[`for_loops_over_fallibles`]: https://rust-lang.github.io/rust-clippy/master/index.html#for_loops_over_fallibles
+[`forget_copy`]: https://rust-lang.github.io/rust-clippy/master/index.html#forget_copy
+[`forget_non_drop`]: https://rust-lang.github.io/rust-clippy/master/index.html#forget_non_drop
+[`forget_ref`]: https://rust-lang.github.io/rust-clippy/master/index.html#forget_ref
+[`format_in_format_args`]: https://rust-lang.github.io/rust-clippy/master/index.html#format_in_format_args
+[`format_push_string`]: https://rust-lang.github.io/rust-clippy/master/index.html#format_push_string
+[`from_iter_instead_of_collect`]: https://rust-lang.github.io/rust-clippy/master/index.html#from_iter_instead_of_collect
+[`from_over_into`]: https://rust-lang.github.io/rust-clippy/master/index.html#from_over_into
+[`from_str_radix_10`]: https://rust-lang.github.io/rust-clippy/master/index.html#from_str_radix_10
+[`future_not_send`]: https://rust-lang.github.io/rust-clippy/master/index.html#future_not_send
+[`get_first`]: https://rust-lang.github.io/rust-clippy/master/index.html#get_first
+[`get_last_with_len`]: https://rust-lang.github.io/rust-clippy/master/index.html#get_last_with_len
+[`get_unwrap`]: https://rust-lang.github.io/rust-clippy/master/index.html#get_unwrap
+[`identity_conversion`]: https://rust-lang.github.io/rust-clippy/master/index.html#identity_conversion
+[`identity_op`]: https://rust-lang.github.io/rust-clippy/master/index.html#identity_op
+[`if_let_mutex`]: https://rust-lang.github.io/rust-clippy/master/index.html#if_let_mutex
+[`if_let_redundant_pattern_matching`]: https://rust-lang.github.io/rust-clippy/master/index.html#if_let_redundant_pattern_matching
+[`if_let_some_result`]: https://rust-lang.github.io/rust-clippy/master/index.html#if_let_some_result
+[`if_not_else`]: https://rust-lang.github.io/rust-clippy/master/index.html#if_not_else
+[`if_same_then_else`]: https://rust-lang.github.io/rust-clippy/master/index.html#if_same_then_else
+[`if_then_some_else_none`]: https://rust-lang.github.io/rust-clippy/master/index.html#if_then_some_else_none
+[`ifs_same_cond`]: https://rust-lang.github.io/rust-clippy/master/index.html#ifs_same_cond
+[`implicit_clone`]: https://rust-lang.github.io/rust-clippy/master/index.html#implicit_clone
+[`implicit_hasher`]: https://rust-lang.github.io/rust-clippy/master/index.html#implicit_hasher
+[`implicit_return`]: https://rust-lang.github.io/rust-clippy/master/index.html#implicit_return
+[`implicit_saturating_sub`]: https://rust-lang.github.io/rust-clippy/master/index.html#implicit_saturating_sub
+[`imprecise_flops`]: https://rust-lang.github.io/rust-clippy/master/index.html#imprecise_flops
+[`inconsistent_digit_grouping`]: https://rust-lang.github.io/rust-clippy/master/index.html#inconsistent_digit_grouping
+[`inconsistent_struct_constructor`]: https://rust-lang.github.io/rust-clippy/master/index.html#inconsistent_struct_constructor
+[`index_refutable_slice`]: https://rust-lang.github.io/rust-clippy/master/index.html#index_refutable_slice
+[`indexing_slicing`]: https://rust-lang.github.io/rust-clippy/master/index.html#indexing_slicing
+[`ineffective_bit_mask`]: https://rust-lang.github.io/rust-clippy/master/index.html#ineffective_bit_mask
+[`inefficient_to_string`]: https://rust-lang.github.io/rust-clippy/master/index.html#inefficient_to_string
+[`infallible_destructuring_match`]: https://rust-lang.github.io/rust-clippy/master/index.html#infallible_destructuring_match
+[`infinite_iter`]: https://rust-lang.github.io/rust-clippy/master/index.html#infinite_iter
+[`inherent_to_string`]: https://rust-lang.github.io/rust-clippy/master/index.html#inherent_to_string
+[`inherent_to_string_shadow_display`]: https://rust-lang.github.io/rust-clippy/master/index.html#inherent_to_string_shadow_display
+[`init_numbered_fields`]: https://rust-lang.github.io/rust-clippy/master/index.html#init_numbered_fields
+[`inline_always`]: https://rust-lang.github.io/rust-clippy/master/index.html#inline_always
+[`inline_asm_x86_att_syntax`]: https://rust-lang.github.io/rust-clippy/master/index.html#inline_asm_x86_att_syntax
+[`inline_asm_x86_intel_syntax`]: https://rust-lang.github.io/rust-clippy/master/index.html#inline_asm_x86_intel_syntax
+[`inline_fn_without_body`]: https://rust-lang.github.io/rust-clippy/master/index.html#inline_fn_without_body
+[`inspect_for_each`]: https://rust-lang.github.io/rust-clippy/master/index.html#inspect_for_each
+[`int_plus_one`]: https://rust-lang.github.io/rust-clippy/master/index.html#int_plus_one
+[`integer_arithmetic`]: https://rust-lang.github.io/rust-clippy/master/index.html#integer_arithmetic
+[`integer_division`]: https://rust-lang.github.io/rust-clippy/master/index.html#integer_division
+[`into_iter_on_array`]: https://rust-lang.github.io/rust-clippy/master/index.html#into_iter_on_array
+[`into_iter_on_ref`]: https://rust-lang.github.io/rust-clippy/master/index.html#into_iter_on_ref
+[`invalid_atomic_ordering`]: https://rust-lang.github.io/rust-clippy/master/index.html#invalid_atomic_ordering
+[`invalid_null_ptr_usage`]: https://rust-lang.github.io/rust-clippy/master/index.html#invalid_null_ptr_usage
+[`invalid_ref`]: https://rust-lang.github.io/rust-clippy/master/index.html#invalid_ref
+[`invalid_regex`]: https://rust-lang.github.io/rust-clippy/master/index.html#invalid_regex
+[`invalid_upcast_comparisons`]: https://rust-lang.github.io/rust-clippy/master/index.html#invalid_upcast_comparisons
+[`invalid_utf8_in_unchecked`]: https://rust-lang.github.io/rust-clippy/master/index.html#invalid_utf8_in_unchecked
+[`invisible_characters`]: https://rust-lang.github.io/rust-clippy/master/index.html#invisible_characters
+[`is_digit_ascii_radix`]: https://rust-lang.github.io/rust-clippy/master/index.html#is_digit_ascii_radix
+[`items_after_statements`]: https://rust-lang.github.io/rust-clippy/master/index.html#items_after_statements
+[`iter_cloned_collect`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_cloned_collect
+[`iter_count`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_count
+[`iter_next_loop`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_next_loop
+[`iter_next_slice`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_next_slice
+[`iter_not_returning_iterator`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_not_returning_iterator
+[`iter_nth`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_nth
+[`iter_nth_zero`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_nth_zero
+[`iter_overeager_cloned`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_overeager_cloned
+[`iter_skip_next`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_skip_next
+[`iter_with_drain`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_with_drain
+[`iterator_step_by_zero`]: https://rust-lang.github.io/rust-clippy/master/index.html#iterator_step_by_zero
+[`just_underscores_and_digits`]: https://rust-lang.github.io/rust-clippy/master/index.html#just_underscores_and_digits
+[`large_const_arrays`]: https://rust-lang.github.io/rust-clippy/master/index.html#large_const_arrays
+[`large_digit_groups`]: https://rust-lang.github.io/rust-clippy/master/index.html#large_digit_groups
+[`large_enum_variant`]: https://rust-lang.github.io/rust-clippy/master/index.html#large_enum_variant
+[`large_include_file`]: https://rust-lang.github.io/rust-clippy/master/index.html#large_include_file
+[`large_stack_arrays`]: https://rust-lang.github.io/rust-clippy/master/index.html#large_stack_arrays
+[`large_types_passed_by_value`]: https://rust-lang.github.io/rust-clippy/master/index.html#large_types_passed_by_value
+[`len_without_is_empty`]: https://rust-lang.github.io/rust-clippy/master/index.html#len_without_is_empty
+[`len_zero`]: https://rust-lang.github.io/rust-clippy/master/index.html#len_zero
+[`let_and_return`]: https://rust-lang.github.io/rust-clippy/master/index.html#let_and_return
+[`let_underscore_drop`]: https://rust-lang.github.io/rust-clippy/master/index.html#let_underscore_drop
+[`let_underscore_lock`]: https://rust-lang.github.io/rust-clippy/master/index.html#let_underscore_lock
+[`let_underscore_must_use`]: https://rust-lang.github.io/rust-clippy/master/index.html#let_underscore_must_use
+[`let_unit_value`]: https://rust-lang.github.io/rust-clippy/master/index.html#let_unit_value
+[`linkedlist`]: https://rust-lang.github.io/rust-clippy/master/index.html#linkedlist
+[`logic_bug`]: https://rust-lang.github.io/rust-clippy/master/index.html#logic_bug
+[`lossy_float_literal`]: https://rust-lang.github.io/rust-clippy/master/index.html#lossy_float_literal
+[`macro_use_imports`]: https://rust-lang.github.io/rust-clippy/master/index.html#macro_use_imports
+[`main_recursion`]: https://rust-lang.github.io/rust-clippy/master/index.html#main_recursion
+[`manual_assert`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_assert
+[`manual_async_fn`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_async_fn
+[`manual_bits`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_bits
+[`manual_filter_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_filter_map
+[`manual_find`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_find
+[`manual_find_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_find_map
+[`manual_flatten`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_flatten
+[`manual_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_map
+[`manual_memcpy`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_memcpy
+[`manual_non_exhaustive`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_non_exhaustive
+[`manual_ok_or`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_ok_or
+[`manual_range_contains`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_range_contains
+[`manual_rem_euclid`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_rem_euclid
+[`manual_retain`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_retain
+[`manual_saturating_arithmetic`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_saturating_arithmetic
+[`manual_split_once`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_split_once
+[`manual_str_repeat`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_str_repeat
+[`manual_strip`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_strip
+[`manual_swap`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_swap
+[`manual_unwrap_or`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_unwrap_or
+[`many_single_char_names`]: https://rust-lang.github.io/rust-clippy/master/index.html#many_single_char_names
+[`map_clone`]: https://rust-lang.github.io/rust-clippy/master/index.html#map_clone
+[`map_collect_result_unit`]: https://rust-lang.github.io/rust-clippy/master/index.html#map_collect_result_unit
+[`map_entry`]: https://rust-lang.github.io/rust-clippy/master/index.html#map_entry
+[`map_err_ignore`]: https://rust-lang.github.io/rust-clippy/master/index.html#map_err_ignore
+[`map_flatten`]: https://rust-lang.github.io/rust-clippy/master/index.html#map_flatten
+[`map_identity`]: https://rust-lang.github.io/rust-clippy/master/index.html#map_identity
+[`map_unwrap_or`]: https://rust-lang.github.io/rust-clippy/master/index.html#map_unwrap_or
+[`match_as_ref`]: https://rust-lang.github.io/rust-clippy/master/index.html#match_as_ref
+[`match_bool`]: https://rust-lang.github.io/rust-clippy/master/index.html#match_bool
+[`match_like_matches_macro`]: https://rust-lang.github.io/rust-clippy/master/index.html#match_like_matches_macro
+[`match_on_vec_items`]: https://rust-lang.github.io/rust-clippy/master/index.html#match_on_vec_items
+[`match_overlapping_arm`]: https://rust-lang.github.io/rust-clippy/master/index.html#match_overlapping_arm
+[`match_ref_pats`]: https://rust-lang.github.io/rust-clippy/master/index.html#match_ref_pats
+[`match_result_ok`]: https://rust-lang.github.io/rust-clippy/master/index.html#match_result_ok
+[`match_same_arms`]: https://rust-lang.github.io/rust-clippy/master/index.html#match_same_arms
+[`match_single_binding`]: https://rust-lang.github.io/rust-clippy/master/index.html#match_single_binding
+[`match_str_case_mismatch`]: https://rust-lang.github.io/rust-clippy/master/index.html#match_str_case_mismatch
+[`match_wild_err_arm`]: https://rust-lang.github.io/rust-clippy/master/index.html#match_wild_err_arm
+[`match_wildcard_for_single_variants`]: https://rust-lang.github.io/rust-clippy/master/index.html#match_wildcard_for_single_variants
+[`maybe_infinite_iter`]: https://rust-lang.github.io/rust-clippy/master/index.html#maybe_infinite_iter
+[`mem_discriminant_non_enum`]: https://rust-lang.github.io/rust-clippy/master/index.html#mem_discriminant_non_enum
+[`mem_forget`]: https://rust-lang.github.io/rust-clippy/master/index.html#mem_forget
+[`mem_replace_option_with_none`]: https://rust-lang.github.io/rust-clippy/master/index.html#mem_replace_option_with_none
+[`mem_replace_with_default`]: https://rust-lang.github.io/rust-clippy/master/index.html#mem_replace_with_default
+[`mem_replace_with_uninit`]: https://rust-lang.github.io/rust-clippy/master/index.html#mem_replace_with_uninit
+[`min_max`]: https://rust-lang.github.io/rust-clippy/master/index.html#min_max
+[`misaligned_transmute`]: https://rust-lang.github.io/rust-clippy/master/index.html#misaligned_transmute
+[`mismatched_target_os`]: https://rust-lang.github.io/rust-clippy/master/index.html#mismatched_target_os
+[`mismatching_type_param_order`]: https://rust-lang.github.io/rust-clippy/master/index.html#mismatching_type_param_order
+[`misrefactored_assign_op`]: https://rust-lang.github.io/rust-clippy/master/index.html#misrefactored_assign_op
+[`missing_const_for_fn`]: https://rust-lang.github.io/rust-clippy/master/index.html#missing_const_for_fn
+[`missing_docs_in_private_items`]: https://rust-lang.github.io/rust-clippy/master/index.html#missing_docs_in_private_items
+[`missing_enforced_import_renames`]: https://rust-lang.github.io/rust-clippy/master/index.html#missing_enforced_import_renames
+[`missing_errors_doc`]: https://rust-lang.github.io/rust-clippy/master/index.html#missing_errors_doc
+[`missing_inline_in_public_items`]: https://rust-lang.github.io/rust-clippy/master/index.html#missing_inline_in_public_items
+[`missing_panics_doc`]: https://rust-lang.github.io/rust-clippy/master/index.html#missing_panics_doc
+[`missing_safety_doc`]: https://rust-lang.github.io/rust-clippy/master/index.html#missing_safety_doc
+[`missing_spin_loop`]: https://rust-lang.github.io/rust-clippy/master/index.html#missing_spin_loop
+[`mistyped_literal_suffixes`]: https://rust-lang.github.io/rust-clippy/master/index.html#mistyped_literal_suffixes
+[`mixed_case_hex_literals`]: https://rust-lang.github.io/rust-clippy/master/index.html#mixed_case_hex_literals
+[`mixed_read_write_in_expression`]: https://rust-lang.github.io/rust-clippy/master/index.html#mixed_read_write_in_expression
+[`mod_module_files`]: https://rust-lang.github.io/rust-clippy/master/index.html#mod_module_files
+[`module_inception`]: https://rust-lang.github.io/rust-clippy/master/index.html#module_inception
+[`module_name_repetitions`]: https://rust-lang.github.io/rust-clippy/master/index.html#module_name_repetitions
+[`modulo_arithmetic`]: https://rust-lang.github.io/rust-clippy/master/index.html#modulo_arithmetic
+[`modulo_one`]: https://rust-lang.github.io/rust-clippy/master/index.html#modulo_one
+[`multiple_crate_versions`]: https://rust-lang.github.io/rust-clippy/master/index.html#multiple_crate_versions
+[`multiple_inherent_impl`]: https://rust-lang.github.io/rust-clippy/master/index.html#multiple_inherent_impl
+[`must_use_candidate`]: https://rust-lang.github.io/rust-clippy/master/index.html#must_use_candidate
+[`must_use_unit`]: https://rust-lang.github.io/rust-clippy/master/index.html#must_use_unit
+[`mut_from_ref`]: https://rust-lang.github.io/rust-clippy/master/index.html#mut_from_ref
+[`mut_mut`]: https://rust-lang.github.io/rust-clippy/master/index.html#mut_mut
+[`mut_mutex_lock`]: https://rust-lang.github.io/rust-clippy/master/index.html#mut_mutex_lock
+[`mut_range_bound`]: https://rust-lang.github.io/rust-clippy/master/index.html#mut_range_bound
+[`mutable_key_type`]: https://rust-lang.github.io/rust-clippy/master/index.html#mutable_key_type
+[`mutex_atomic`]: https://rust-lang.github.io/rust-clippy/master/index.html#mutex_atomic
+[`mutex_integer`]: https://rust-lang.github.io/rust-clippy/master/index.html#mutex_integer
+[`naive_bytecount`]: https://rust-lang.github.io/rust-clippy/master/index.html#naive_bytecount
+[`needless_arbitrary_self_type`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_arbitrary_self_type
+[`needless_bitwise_bool`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_bitwise_bool
+[`needless_bool`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_bool
+[`needless_borrow`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_borrow
+[`needless_borrowed_reference`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_borrowed_reference
+[`needless_collect`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_collect
+[`needless_continue`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_continue
+[`needless_doctest_main`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_doctest_main
+[`needless_for_each`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_for_each
+[`needless_late_init`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_late_init
+[`needless_lifetimes`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_lifetimes
+[`needless_match`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_match
+[`needless_option_as_deref`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_option_as_deref
+[`needless_option_take`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_option_take
+[`needless_parens_on_range_literals`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_parens_on_range_literals
+[`needless_pass_by_value`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_pass_by_value
+[`needless_question_mark`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_question_mark
+[`needless_range_loop`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_range_loop
+[`needless_return`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_return
+[`needless_splitn`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_splitn
+[`needless_update`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_update
+[`neg_cmp_op_on_partial_ord`]: https://rust-lang.github.io/rust-clippy/master/index.html#neg_cmp_op_on_partial_ord
+[`neg_multiply`]: https://rust-lang.github.io/rust-clippy/master/index.html#neg_multiply
+[`negative_feature_names`]: https://rust-lang.github.io/rust-clippy/master/index.html#negative_feature_names
+[`never_loop`]: https://rust-lang.github.io/rust-clippy/master/index.html#never_loop
+[`new_ret_no_self`]: https://rust-lang.github.io/rust-clippy/master/index.html#new_ret_no_self
+[`new_without_default`]: https://rust-lang.github.io/rust-clippy/master/index.html#new_without_default
+[`new_without_default_derive`]: https://rust-lang.github.io/rust-clippy/master/index.html#new_without_default_derive
+[`no_effect`]: https://rust-lang.github.io/rust-clippy/master/index.html#no_effect
+[`no_effect_replace`]: https://rust-lang.github.io/rust-clippy/master/index.html#no_effect_replace
+[`no_effect_underscore_binding`]: https://rust-lang.github.io/rust-clippy/master/index.html#no_effect_underscore_binding
+[`non_ascii_literal`]: https://rust-lang.github.io/rust-clippy/master/index.html#non_ascii_literal
+[`non_octal_unix_permissions`]: https://rust-lang.github.io/rust-clippy/master/index.html#non_octal_unix_permissions
+[`non_send_fields_in_send_ty`]: https://rust-lang.github.io/rust-clippy/master/index.html#non_send_fields_in_send_ty
+[`nonminimal_bool`]: https://rust-lang.github.io/rust-clippy/master/index.html#nonminimal_bool
+[`nonsensical_open_options`]: https://rust-lang.github.io/rust-clippy/master/index.html#nonsensical_open_options
+[`nonstandard_macro_braces`]: https://rust-lang.github.io/rust-clippy/master/index.html#nonstandard_macro_braces
+[`not_unsafe_ptr_arg_deref`]: https://rust-lang.github.io/rust-clippy/master/index.html#not_unsafe_ptr_arg_deref
+[`obfuscated_if_else`]: https://rust-lang.github.io/rust-clippy/master/index.html#obfuscated_if_else
+[`octal_escapes`]: https://rust-lang.github.io/rust-clippy/master/index.html#octal_escapes
+[`ok_expect`]: https://rust-lang.github.io/rust-clippy/master/index.html#ok_expect
+[`only_used_in_recursion`]: https://rust-lang.github.io/rust-clippy/master/index.html#only_used_in_recursion
+[`op_ref`]: https://rust-lang.github.io/rust-clippy/master/index.html#op_ref
+[`option_and_then_some`]: https://rust-lang.github.io/rust-clippy/master/index.html#option_and_then_some
+[`option_as_ref_deref`]: https://rust-lang.github.io/rust-clippy/master/index.html#option_as_ref_deref
+[`option_env_unwrap`]: https://rust-lang.github.io/rust-clippy/master/index.html#option_env_unwrap
+[`option_expect_used`]: https://rust-lang.github.io/rust-clippy/master/index.html#option_expect_used
+[`option_filter_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#option_filter_map
+[`option_if_let_else`]: https://rust-lang.github.io/rust-clippy/master/index.html#option_if_let_else
+[`option_map_or_none`]: https://rust-lang.github.io/rust-clippy/master/index.html#option_map_or_none
+[`option_map_unit_fn`]: https://rust-lang.github.io/rust-clippy/master/index.html#option_map_unit_fn
+[`option_map_unwrap_or`]: https://rust-lang.github.io/rust-clippy/master/index.html#option_map_unwrap_or
+[`option_map_unwrap_or_else`]: https://rust-lang.github.io/rust-clippy/master/index.html#option_map_unwrap_or_else
+[`option_option`]: https://rust-lang.github.io/rust-clippy/master/index.html#option_option
+[`option_unwrap_used`]: https://rust-lang.github.io/rust-clippy/master/index.html#option_unwrap_used
+[`or_fun_call`]: https://rust-lang.github.io/rust-clippy/master/index.html#or_fun_call
+[`or_then_unwrap`]: https://rust-lang.github.io/rust-clippy/master/index.html#or_then_unwrap
+[`out_of_bounds_indexing`]: https://rust-lang.github.io/rust-clippy/master/index.html#out_of_bounds_indexing
+[`overflow_check_conditional`]: https://rust-lang.github.io/rust-clippy/master/index.html#overflow_check_conditional
+[`panic`]: https://rust-lang.github.io/rust-clippy/master/index.html#panic
+[`panic_in_result_fn`]: https://rust-lang.github.io/rust-clippy/master/index.html#panic_in_result_fn
+[`panic_params`]: https://rust-lang.github.io/rust-clippy/master/index.html#panic_params
+[`panicking_unwrap`]: https://rust-lang.github.io/rust-clippy/master/index.html#panicking_unwrap
+[`partialeq_ne_impl`]: https://rust-lang.github.io/rust-clippy/master/index.html#partialeq_ne_impl
+[`path_buf_push_overwrite`]: https://rust-lang.github.io/rust-clippy/master/index.html#path_buf_push_overwrite
+[`pattern_type_mismatch`]: https://rust-lang.github.io/rust-clippy/master/index.html#pattern_type_mismatch
+[`possible_missing_comma`]: https://rust-lang.github.io/rust-clippy/master/index.html#possible_missing_comma
+[`precedence`]: https://rust-lang.github.io/rust-clippy/master/index.html#precedence
+[`print_in_format_impl`]: https://rust-lang.github.io/rust-clippy/master/index.html#print_in_format_impl
+[`print_literal`]: https://rust-lang.github.io/rust-clippy/master/index.html#print_literal
+[`print_stderr`]: https://rust-lang.github.io/rust-clippy/master/index.html#print_stderr
+[`print_stdout`]: https://rust-lang.github.io/rust-clippy/master/index.html#print_stdout
+[`print_with_newline`]: https://rust-lang.github.io/rust-clippy/master/index.html#print_with_newline
+[`println_empty_string`]: https://rust-lang.github.io/rust-clippy/master/index.html#println_empty_string
+[`ptr_arg`]: https://rust-lang.github.io/rust-clippy/master/index.html#ptr_arg
+[`ptr_as_ptr`]: https://rust-lang.github.io/rust-clippy/master/index.html#ptr_as_ptr
+[`ptr_eq`]: https://rust-lang.github.io/rust-clippy/master/index.html#ptr_eq
+[`ptr_offset_with_cast`]: https://rust-lang.github.io/rust-clippy/master/index.html#ptr_offset_with_cast
+[`pub_enum_variant_names`]: https://rust-lang.github.io/rust-clippy/master/index.html#pub_enum_variant_names
+[`pub_use`]: https://rust-lang.github.io/rust-clippy/master/index.html#pub_use
+[`question_mark`]: https://rust-lang.github.io/rust-clippy/master/index.html#question_mark
+[`range_minus_one`]: https://rust-lang.github.io/rust-clippy/master/index.html#range_minus_one
+[`range_plus_one`]: https://rust-lang.github.io/rust-clippy/master/index.html#range_plus_one
+[`range_step_by_zero`]: https://rust-lang.github.io/rust-clippy/master/index.html#range_step_by_zero
+[`range_zip_with_len`]: https://rust-lang.github.io/rust-clippy/master/index.html#range_zip_with_len
+[`rc_buffer`]: https://rust-lang.github.io/rust-clippy/master/index.html#rc_buffer
+[`rc_clone_in_vec_init`]: https://rust-lang.github.io/rust-clippy/master/index.html#rc_clone_in_vec_init
+[`rc_mutex`]: https://rust-lang.github.io/rust-clippy/master/index.html#rc_mutex
+[`read_zero_byte_vec`]: https://rust-lang.github.io/rust-clippy/master/index.html#read_zero_byte_vec
+[`recursive_format_impl`]: https://rust-lang.github.io/rust-clippy/master/index.html#recursive_format_impl
+[`redundant_allocation`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_allocation
+[`redundant_clone`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_clone
+[`redundant_closure`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure
+[`redundant_closure_call`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure_call
+[`redundant_closure_for_method_calls`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure_for_method_calls
+[`redundant_else`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_else
+[`redundant_feature_names`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_feature_names
+[`redundant_field_names`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_field_names
+[`redundant_pattern`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_pattern
+[`redundant_pattern_matching`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_pattern_matching
+[`redundant_pub_crate`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_pub_crate
+[`redundant_slicing`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_slicing
+[`redundant_static_lifetimes`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_static_lifetimes
+[`ref_binding_to_reference`]: https://rust-lang.github.io/rust-clippy/master/index.html#ref_binding_to_reference
+[`ref_in_deref`]: https://rust-lang.github.io/rust-clippy/master/index.html#ref_in_deref
+[`ref_option_ref`]: https://rust-lang.github.io/rust-clippy/master/index.html#ref_option_ref
+[`regex_macro`]: https://rust-lang.github.io/rust-clippy/master/index.html#regex_macro
+[`repeat_once`]: https://rust-lang.github.io/rust-clippy/master/index.html#repeat_once
+[`replace_consts`]: https://rust-lang.github.io/rust-clippy/master/index.html#replace_consts
+[`rest_pat_in_fully_bound_structs`]: https://rust-lang.github.io/rust-clippy/master/index.html#rest_pat_in_fully_bound_structs
+[`result_expect_used`]: https://rust-lang.github.io/rust-clippy/master/index.html#result_expect_used
+[`result_map_or_into_option`]: https://rust-lang.github.io/rust-clippy/master/index.html#result_map_or_into_option
+[`result_map_unit_fn`]: https://rust-lang.github.io/rust-clippy/master/index.html#result_map_unit_fn
+[`result_map_unwrap_or_else`]: https://rust-lang.github.io/rust-clippy/master/index.html#result_map_unwrap_or_else
+[`result_unit_err`]: https://rust-lang.github.io/rust-clippy/master/index.html#result_unit_err
+[`result_unwrap_used`]: https://rust-lang.github.io/rust-clippy/master/index.html#result_unwrap_used
+[`return_self_not_must_use`]: https://rust-lang.github.io/rust-clippy/master/index.html#return_self_not_must_use
+[`reversed_empty_ranges`]: https://rust-lang.github.io/rust-clippy/master/index.html#reversed_empty_ranges
+[`same_functions_in_if_condition`]: https://rust-lang.github.io/rust-clippy/master/index.html#same_functions_in_if_condition
+[`same_item_push`]: https://rust-lang.github.io/rust-clippy/master/index.html#same_item_push
+[`same_name_method`]: https://rust-lang.github.io/rust-clippy/master/index.html#same_name_method
+[`search_is_some`]: https://rust-lang.github.io/rust-clippy/master/index.html#search_is_some
+[`self_assignment`]: https://rust-lang.github.io/rust-clippy/master/index.html#self_assignment
+[`self_named_constructors`]: https://rust-lang.github.io/rust-clippy/master/index.html#self_named_constructors
+[`self_named_module_files`]: https://rust-lang.github.io/rust-clippy/master/index.html#self_named_module_files
+[`semicolon_if_nothing_returned`]: https://rust-lang.github.io/rust-clippy/master/index.html#semicolon_if_nothing_returned
+[`separated_literal_suffix`]: https://rust-lang.github.io/rust-clippy/master/index.html#separated_literal_suffix
+[`serde_api_misuse`]: https://rust-lang.github.io/rust-clippy/master/index.html#serde_api_misuse
+[`shadow_reuse`]: https://rust-lang.github.io/rust-clippy/master/index.html#shadow_reuse
+[`shadow_same`]: https://rust-lang.github.io/rust-clippy/master/index.html#shadow_same
+[`shadow_unrelated`]: https://rust-lang.github.io/rust-clippy/master/index.html#shadow_unrelated
+[`short_circuit_statement`]: https://rust-lang.github.io/rust-clippy/master/index.html#short_circuit_statement
+[`should_assert_eq`]: https://rust-lang.github.io/rust-clippy/master/index.html#should_assert_eq
+[`should_implement_trait`]: https://rust-lang.github.io/rust-clippy/master/index.html#should_implement_trait
+[`significant_drop_in_scrutinee`]: https://rust-lang.github.io/rust-clippy/master/index.html#significant_drop_in_scrutinee
+[`similar_names`]: https://rust-lang.github.io/rust-clippy/master/index.html#similar_names
+[`single_char_add_str`]: https://rust-lang.github.io/rust-clippy/master/index.html#single_char_add_str
+[`single_char_lifetime_names`]: https://rust-lang.github.io/rust-clippy/master/index.html#single_char_lifetime_names
+[`single_char_pattern`]: https://rust-lang.github.io/rust-clippy/master/index.html#single_char_pattern
+[`single_char_push_str`]: https://rust-lang.github.io/rust-clippy/master/index.html#single_char_push_str
+[`single_component_path_imports`]: https://rust-lang.github.io/rust-clippy/master/index.html#single_component_path_imports
+[`single_element_loop`]: https://rust-lang.github.io/rust-clippy/master/index.html#single_element_loop
+[`single_match`]: https://rust-lang.github.io/rust-clippy/master/index.html#single_match
+[`single_match_else`]: https://rust-lang.github.io/rust-clippy/master/index.html#single_match_else
+[`size_of_in_element_count`]: https://rust-lang.github.io/rust-clippy/master/index.html#size_of_in_element_count
+[`skip_while_next`]: https://rust-lang.github.io/rust-clippy/master/index.html#skip_while_next
+[`slow_vector_initialization`]: https://rust-lang.github.io/rust-clippy/master/index.html#slow_vector_initialization
+[`stable_sort_primitive`]: https://rust-lang.github.io/rust-clippy/master/index.html#stable_sort_primitive
+[`std_instead_of_alloc`]: https://rust-lang.github.io/rust-clippy/master/index.html#std_instead_of_alloc
+[`std_instead_of_core`]: https://rust-lang.github.io/rust-clippy/master/index.html#std_instead_of_core
+[`str_to_string`]: https://rust-lang.github.io/rust-clippy/master/index.html#str_to_string
+[`string_add`]: https://rust-lang.github.io/rust-clippy/master/index.html#string_add
+[`string_add_assign`]: https://rust-lang.github.io/rust-clippy/master/index.html#string_add_assign
+[`string_extend_chars`]: https://rust-lang.github.io/rust-clippy/master/index.html#string_extend_chars
+[`string_from_utf8_as_bytes`]: https://rust-lang.github.io/rust-clippy/master/index.html#string_from_utf8_as_bytes
+[`string_lit_as_bytes`]: https://rust-lang.github.io/rust-clippy/master/index.html#string_lit_as_bytes
+[`string_slice`]: https://rust-lang.github.io/rust-clippy/master/index.html#string_slice
+[`string_to_string`]: https://rust-lang.github.io/rust-clippy/master/index.html#string_to_string
+[`strlen_on_c_strings`]: https://rust-lang.github.io/rust-clippy/master/index.html#strlen_on_c_strings
+[`struct_excessive_bools`]: https://rust-lang.github.io/rust-clippy/master/index.html#struct_excessive_bools
+[`stutter`]: https://rust-lang.github.io/rust-clippy/master/index.html#stutter
+[`suboptimal_flops`]: https://rust-lang.github.io/rust-clippy/master/index.html#suboptimal_flops
+[`suspicious_arithmetic_impl`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_arithmetic_impl
+[`suspicious_assignment_formatting`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_assignment_formatting
+[`suspicious_else_formatting`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_else_formatting
+[`suspicious_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_map
+[`suspicious_op_assign_impl`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_op_assign_impl
+[`suspicious_operation_groupings`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_operation_groupings
+[`suspicious_splitn`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_splitn
+[`suspicious_unary_op_formatting`]: https://rust-lang.github.io/rust-clippy/master/index.html#suspicious_unary_op_formatting
+[`swap_ptr_to_ref`]: https://rust-lang.github.io/rust-clippy/master/index.html#swap_ptr_to_ref
+[`tabs_in_doc_comments`]: https://rust-lang.github.io/rust-clippy/master/index.html#tabs_in_doc_comments
+[`temporary_assignment`]: https://rust-lang.github.io/rust-clippy/master/index.html#temporary_assignment
+[`temporary_cstring_as_ptr`]: https://rust-lang.github.io/rust-clippy/master/index.html#temporary_cstring_as_ptr
+[`to_digit_is_some`]: https://rust-lang.github.io/rust-clippy/master/index.html#to_digit_is_some
+[`to_string_in_display`]: https://rust-lang.github.io/rust-clippy/master/index.html#to_string_in_display
+[`to_string_in_format_args`]: https://rust-lang.github.io/rust-clippy/master/index.html#to_string_in_format_args
+[`todo`]: https://rust-lang.github.io/rust-clippy/master/index.html#todo
+[`too_many_arguments`]: https://rust-lang.github.io/rust-clippy/master/index.html#too_many_arguments
+[`too_many_lines`]: https://rust-lang.github.io/rust-clippy/master/index.html#too_many_lines
+[`toplevel_ref_arg`]: https://rust-lang.github.io/rust-clippy/master/index.html#toplevel_ref_arg
+[`trailing_empty_array`]: https://rust-lang.github.io/rust-clippy/master/index.html#trailing_empty_array
+[`trait_duplication_in_bounds`]: https://rust-lang.github.io/rust-clippy/master/index.html#trait_duplication_in_bounds
+[`transmute_bytes_to_str`]: https://rust-lang.github.io/rust-clippy/master/index.html#transmute_bytes_to_str
+[`transmute_float_to_int`]: https://rust-lang.github.io/rust-clippy/master/index.html#transmute_float_to_int
+[`transmute_int_to_bool`]: https://rust-lang.github.io/rust-clippy/master/index.html#transmute_int_to_bool
+[`transmute_int_to_char`]: https://rust-lang.github.io/rust-clippy/master/index.html#transmute_int_to_char
+[`transmute_int_to_float`]: https://rust-lang.github.io/rust-clippy/master/index.html#transmute_int_to_float
+[`transmute_num_to_bytes`]: https://rust-lang.github.io/rust-clippy/master/index.html#transmute_num_to_bytes
+[`transmute_ptr_to_ptr`]: https://rust-lang.github.io/rust-clippy/master/index.html#transmute_ptr_to_ptr
+[`transmute_ptr_to_ref`]: https://rust-lang.github.io/rust-clippy/master/index.html#transmute_ptr_to_ref
+[`transmute_undefined_repr`]: https://rust-lang.github.io/rust-clippy/master/index.html#transmute_undefined_repr
+[`transmutes_expressible_as_ptr_casts`]: https://rust-lang.github.io/rust-clippy/master/index.html#transmutes_expressible_as_ptr_casts
+[`transmuting_null`]: https://rust-lang.github.io/rust-clippy/master/index.html#transmuting_null
+[`trim_split_whitespace`]: https://rust-lang.github.io/rust-clippy/master/index.html#trim_split_whitespace
+[`trivial_regex`]: https://rust-lang.github.io/rust-clippy/master/index.html#trivial_regex
+[`trivially_copy_pass_by_ref`]: https://rust-lang.github.io/rust-clippy/master/index.html#trivially_copy_pass_by_ref
+[`try_err`]: https://rust-lang.github.io/rust-clippy/master/index.html#try_err
+[`type_complexity`]: https://rust-lang.github.io/rust-clippy/master/index.html#type_complexity
+[`type_repetition_in_bounds`]: https://rust-lang.github.io/rust-clippy/master/index.html#type_repetition_in_bounds
+[`undocumented_unsafe_blocks`]: https://rust-lang.github.io/rust-clippy/master/index.html#undocumented_unsafe_blocks
+[`undropped_manually_drops`]: https://rust-lang.github.io/rust-clippy/master/index.html#undropped_manually_drops
+[`unicode_not_nfc`]: https://rust-lang.github.io/rust-clippy/master/index.html#unicode_not_nfc
+[`unimplemented`]: https://rust-lang.github.io/rust-clippy/master/index.html#unimplemented
+[`uninit_assumed_init`]: https://rust-lang.github.io/rust-clippy/master/index.html#uninit_assumed_init
+[`uninit_vec`]: https://rust-lang.github.io/rust-clippy/master/index.html#uninit_vec
+[`unit_arg`]: https://rust-lang.github.io/rust-clippy/master/index.html#unit_arg
+[`unit_cmp`]: https://rust-lang.github.io/rust-clippy/master/index.html#unit_cmp
+[`unit_hash`]: https://rust-lang.github.io/rust-clippy/master/index.html#unit_hash
+[`unit_return_expecting_ord`]: https://rust-lang.github.io/rust-clippy/master/index.html#unit_return_expecting_ord
+[`unknown_clippy_lints`]: https://rust-lang.github.io/rust-clippy/master/index.html#unknown_clippy_lints
+[`unnecessary_cast`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_cast
+[`unnecessary_filter_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_filter_map
+[`unnecessary_find_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_find_map
+[`unnecessary_fold`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_fold
+[`unnecessary_join`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_join
+[`unnecessary_lazy_evaluations`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_lazy_evaluations
+[`unnecessary_mut_passed`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_mut_passed
+[`unnecessary_operation`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_operation
+[`unnecessary_owned_empty_strings`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_owned_empty_strings
+[`unnecessary_self_imports`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_self_imports
+[`unnecessary_sort_by`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_sort_by
+[`unnecessary_to_owned`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_to_owned
+[`unnecessary_unwrap`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_unwrap
+[`unnecessary_wraps`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_wraps
+[`unneeded_field_pattern`]: https://rust-lang.github.io/rust-clippy/master/index.html#unneeded_field_pattern
+[`unneeded_wildcard_pattern`]: https://rust-lang.github.io/rust-clippy/master/index.html#unneeded_wildcard_pattern
+[`unnested_or_patterns`]: https://rust-lang.github.io/rust-clippy/master/index.html#unnested_or_patterns
+[`unreachable`]: https://rust-lang.github.io/rust-clippy/master/index.html#unreachable
+[`unreadable_literal`]: https://rust-lang.github.io/rust-clippy/master/index.html#unreadable_literal
+[`unsafe_derive_deserialize`]: https://rust-lang.github.io/rust-clippy/master/index.html#unsafe_derive_deserialize
+[`unsafe_removed_from_name`]: https://rust-lang.github.io/rust-clippy/master/index.html#unsafe_removed_from_name
+[`unsafe_vector_initialization`]: https://rust-lang.github.io/rust-clippy/master/index.html#unsafe_vector_initialization
+[`unseparated_literal_suffix`]: https://rust-lang.github.io/rust-clippy/master/index.html#unseparated_literal_suffix
+[`unsound_collection_transmute`]: https://rust-lang.github.io/rust-clippy/master/index.html#unsound_collection_transmute
+[`unstable_as_mut_slice`]: https://rust-lang.github.io/rust-clippy/master/index.html#unstable_as_mut_slice
+[`unstable_as_slice`]: https://rust-lang.github.io/rust-clippy/master/index.html#unstable_as_slice
+[`unused_async`]: https://rust-lang.github.io/rust-clippy/master/index.html#unused_async
+[`unused_collect`]: https://rust-lang.github.io/rust-clippy/master/index.html#unused_collect
+[`unused_io_amount`]: https://rust-lang.github.io/rust-clippy/master/index.html#unused_io_amount
+[`unused_label`]: https://rust-lang.github.io/rust-clippy/master/index.html#unused_label
+[`unused_rounding`]: https://rust-lang.github.io/rust-clippy/master/index.html#unused_rounding
+[`unused_self`]: https://rust-lang.github.io/rust-clippy/master/index.html#unused_self
+[`unused_unit`]: https://rust-lang.github.io/rust-clippy/master/index.html#unused_unit
+[`unusual_byte_groupings`]: https://rust-lang.github.io/rust-clippy/master/index.html#unusual_byte_groupings
+[`unwrap_in_result`]: https://rust-lang.github.io/rust-clippy/master/index.html#unwrap_in_result
+[`unwrap_or_else_default`]: https://rust-lang.github.io/rust-clippy/master/index.html#unwrap_or_else_default
+[`unwrap_used`]: https://rust-lang.github.io/rust-clippy/master/index.html#unwrap_used
+[`upper_case_acronyms`]: https://rust-lang.github.io/rust-clippy/master/index.html#upper_case_acronyms
+[`use_debug`]: https://rust-lang.github.io/rust-clippy/master/index.html#use_debug
+[`use_self`]: https://rust-lang.github.io/rust-clippy/master/index.html#use_self
+[`used_underscore_binding`]: https://rust-lang.github.io/rust-clippy/master/index.html#used_underscore_binding
+[`useless_asref`]: https://rust-lang.github.io/rust-clippy/master/index.html#useless_asref
+[`useless_attribute`]: https://rust-lang.github.io/rust-clippy/master/index.html#useless_attribute
+[`useless_conversion`]: https://rust-lang.github.io/rust-clippy/master/index.html#useless_conversion
+[`useless_format`]: https://rust-lang.github.io/rust-clippy/master/index.html#useless_format
+[`useless_let_if_seq`]: https://rust-lang.github.io/rust-clippy/master/index.html#useless_let_if_seq
+[`useless_transmute`]: https://rust-lang.github.io/rust-clippy/master/index.html#useless_transmute
+[`useless_vec`]: https://rust-lang.github.io/rust-clippy/master/index.html#useless_vec
+[`vec_box`]: https://rust-lang.github.io/rust-clippy/master/index.html#vec_box
+[`vec_init_then_push`]: https://rust-lang.github.io/rust-clippy/master/index.html#vec_init_then_push
+[`vec_resize_to_zero`]: https://rust-lang.github.io/rust-clippy/master/index.html#vec_resize_to_zero
+[`verbose_bit_mask`]: https://rust-lang.github.io/rust-clippy/master/index.html#verbose_bit_mask
+[`verbose_file_reads`]: https://rust-lang.github.io/rust-clippy/master/index.html#verbose_file_reads
+[`vtable_address_comparisons`]: https://rust-lang.github.io/rust-clippy/master/index.html#vtable_address_comparisons
+[`while_immutable_condition`]: https://rust-lang.github.io/rust-clippy/master/index.html#while_immutable_condition
+[`while_let_loop`]: https://rust-lang.github.io/rust-clippy/master/index.html#while_let_loop
+[`while_let_on_iterator`]: https://rust-lang.github.io/rust-clippy/master/index.html#while_let_on_iterator
+[`wildcard_dependencies`]: https://rust-lang.github.io/rust-clippy/master/index.html#wildcard_dependencies
+[`wildcard_enum_match_arm`]: https://rust-lang.github.io/rust-clippy/master/index.html#wildcard_enum_match_arm
+[`wildcard_imports`]: https://rust-lang.github.io/rust-clippy/master/index.html#wildcard_imports
+[`wildcard_in_or_patterns`]: https://rust-lang.github.io/rust-clippy/master/index.html#wildcard_in_or_patterns
+[`write_literal`]: https://rust-lang.github.io/rust-clippy/master/index.html#write_literal
+[`write_with_newline`]: https://rust-lang.github.io/rust-clippy/master/index.html#write_with_newline
+[`writeln_empty_string`]: https://rust-lang.github.io/rust-clippy/master/index.html#writeln_empty_string
+[`wrong_pub_self_convention`]: https://rust-lang.github.io/rust-clippy/master/index.html#wrong_pub_self_convention
+[`wrong_self_convention`]: https://rust-lang.github.io/rust-clippy/master/index.html#wrong_self_convention
+[`wrong_transmute`]: https://rust-lang.github.io/rust-clippy/master/index.html#wrong_transmute
+[`zero_divided_by_zero`]: https://rust-lang.github.io/rust-clippy/master/index.html#zero_divided_by_zero
+[`zero_prefixed_literal`]: https://rust-lang.github.io/rust-clippy/master/index.html#zero_prefixed_literal
+[`zero_ptr`]: https://rust-lang.github.io/rust-clippy/master/index.html#zero_ptr
+[`zero_sized_map_values`]: https://rust-lang.github.io/rust-clippy/master/index.html#zero_sized_map_values
+[`zero_width_space`]: https://rust-lang.github.io/rust-clippy/master/index.html#zero_width_space
+[`zst_offset`]: https://rust-lang.github.io/rust-clippy/master/index.html#zst_offset
+<!-- end autogenerated links to lint list -->
diff --git a/src/tools/clippy/CODE_OF_CONDUCT.md b/src/tools/clippy/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..dec13e44a
--- /dev/null
+++ b/src/tools/clippy/CODE_OF_CONDUCT.md
@@ -0,0 +1,70 @@
+# The Rust Code of Conduct
+
+A version of this document [can be found online](https://www.rust-lang.org/conduct.html).
+
+## Conduct
+
+**Contact**: [rust-mods@rust-lang.org](mailto:rust-mods@rust-lang.org)
+
+* We are committed to providing a friendly, safe and welcoming environment for all, regardless of level of experience,
+ gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+ religion, nationality, or other similar characteristic.
+* On IRC, please avoid using overtly sexual nicknames or other nicknames that might detract from a friendly, safe and
+ welcoming environment for all.
+* Please be kind and courteous. There's no need to be mean or rude.
+* Respect that people have differences of opinion and that every design or implementation choice carries a trade-off and
+ numerous costs. There is seldom a right answer.
+* Please keep unstructured critique to a minimum. If you have solid ideas you want to experiment with, make a fork and
+ see how it works.
+* We will exclude you from interaction if you insult, demean or harass anyone. That is not welcome behavior. We
+ interpret the term "harassment" as including the definition in the <a href="http://citizencodeofconduct.org/">Citizen
+ Code of Conduct</a>; if you have any lack of clarity about what might be included in that concept, please read their
+ definition. In particular, we don't tolerate behavior that excludes people in socially marginalized groups.
+* Private harassment is also unacceptable. No matter who you are, if you feel you have been or are being harassed or
+ made uncomfortable by a community member, please contact one of the channel ops or any of the [Rust moderation
+ team][mod_team] immediately. Whether you're a regular contributor or a newcomer, we care about making this community a
+ safe place for you and we've got your back.
+* Likewise any spamming, trolling, flaming, baiting or other attention-stealing behavior is not welcome.
+
+## Moderation
+
+
+These are the policies for upholding our community's standards of conduct. If you feel that a thread needs moderation,
+please contact the [Rust moderation team][mod_team].
+
+1. Remarks that violate the Rust standards of conduct, including hateful, hurtful, oppressive, or exclusionary remarks,
+ are not allowed. (Cursing is allowed, but never targeting another user, and never in a hateful manner.)
+2. Remarks that moderators find inappropriate, whether listed in the code of conduct or not, are also not allowed.
+3. Moderators will first respond to such remarks with a warning.
+4. If the warning is unheeded, the user will be "kicked," i.e., kicked out of the communication channel to cool off.
+5. If the user comes back and continues to make trouble, they will be banned, i.e., indefinitely excluded.
+6. Moderators may choose at their discretion to un-ban the user if it was a first offense and they offer the offended
+ party a genuine apology.
+7. If a moderator bans someone and you think it was unjustified, please take it up with that moderator, or with a
+ different moderator, **in private**. Complaints about bans in-channel are not allowed.
+8. Moderators are held to a higher standard than other community members. If a moderator creates an inappropriate
+ situation, they should expect less leeway than others.
+
+In the Rust community we strive to go the extra step to look out for each other. Don't just aim to be technically
+unimpeachable, try to be your best self. In particular, avoid flirting with offensive or sensitive issues, particularly
+if they're off-topic; this all too often leads to unnecessary fights, hurt feelings, and damaged trust; worse, it can
+drive people away from the community entirely.
+
+And if someone takes issue with something you said or did, resist the urge to be defensive. Just stop doing what it was
+they complained about and apologize. Even if you feel you were misinterpreted or unfairly accused, chances are good
+there was something you could've communicated better — remember that it's your responsibility to make your fellow
+Rustaceans comfortable. Everyone wants to get along and we are all here first and foremost because we want to talk about
+cool technology. You will find that people will be eager to assume good intent and forgive as long as you earn their
+trust.
+
+The enforcement policies listed above apply to all official Rust venues; including official IRC channels (#rust,
+#rust-internals, #rust-tools, #rust-libs, #rustc, #rust-beginners, #rust-docs, #rust-community, #rust-lang, and #cargo);
+GitHub repositories under rust-lang, rust-lang-nursery, and rust-lang-deprecated; and all forums under rust-lang.org
+(users.rust-lang.org, internals.rust-lang.org). For other projects adopting the Rust Code of Conduct, please contact the
+maintainers of those projects for enforcement. If you wish to use this code of conduct for your own project, consider
+explicitly mentioning your moderation policy or making a copy with your own moderation policy so as to avoid confusion.
+
+*Adapted from the [Node.js Policy on Trolling](http://blog.izs.me/post/30036893703/policy-on-trolling) as well as the
+[Contributor Covenant v1.3.0](https://www.contributor-covenant.org/version/1/3/0/).*
+
+[mod_team]: https://www.rust-lang.org/team.html#Moderation-team
diff --git a/src/tools/clippy/CONTRIBUTING.md b/src/tools/clippy/CONTRIBUTING.md
new file mode 100644
index 000000000..6e15133d2
--- /dev/null
+++ b/src/tools/clippy/CONTRIBUTING.md
@@ -0,0 +1,248 @@
+# Contributing to Clippy
+
+Hello fellow Rustacean! Great to see your interest in compiler internals and lints!
+
+**First**: if you're unsure or afraid of _anything_, just ask or submit the issue or pull request anyway. You won't be
+yelled at for giving it your best effort. The worst that can happen is that you'll be politely asked to change
+something. We appreciate any sort of contributions, and don't want a wall of rules to get in the way of that.
+
+Clippy welcomes contributions from everyone. There are many ways to contribute to Clippy and the following document
+explains how you can contribute and how to get started. If you have any questions about contributing or need help with
+anything, feel free to ask questions on issues or visit the `#clippy` on [Zulip].
+
+All contributors are expected to follow the [Rust Code of Conduct].
+
+- [Contributing to Clippy](#contributing-to-clippy)
+ - [The Clippy book](#the-clippy-book)
+ - [High level approach](#high-level-approach)
+ - [Finding something to fix/improve](#finding-something-to-fiximprove)
+ - [Getting code-completion for rustc internals to work](#getting-code-completion-for-rustc-internals-to-work)
+ - [IntelliJ Rust](#intellij-rust)
+ - [Rust Analyzer](#rust-analyzer)
+ - [How Clippy works](#how-clippy-works)
+ - [Issue and PR triage](#issue-and-pr-triage)
+ - [Bors and Homu](#bors-and-homu)
+ - [Contributions](#contributions)
+
+[Zulip]: https://rust-lang.zulipchat.com/#narrow/stream/clippy
+[Rust Code of Conduct]: https://www.rust-lang.org/policies/code-of-conduct
+
+## The Clippy book
+
+If you're new to Clippy and don't know where to start the [Clippy book] includes
+a developer guide and is a good place to start your journey.
+
+<!-- FIXME: Link to the deployed book, once it is deployed through CI -->
+[Clippy book]: book/src
+
+## High level approach
+
+1. Find something to fix/improve
+2. Change code (likely some file in `clippy_lints/src/`)
+3. Follow the instructions in the [Basics docs](book/src/development/basics.md)
+ to get set up
+4. Run `cargo test` in the root directory and wiggle code until it passes
+5. Open a PR (also can be done after 2. if you run into problems)
+
+## Finding something to fix/improve
+
+All issues on Clippy are mentored, if you want help simply ask someone from the
+Clippy team directly by mentioning them in the issue or over on [Zulip]. All
+currently active team members can be found
+[here](https://github.com/rust-lang/highfive/blob/master/highfive/configs/rust-lang/rust-clippy.json#L3)
+
+Some issues are easier than others. The [`good-first-issue`] label can be used to find the easy
+issues. You can use `@rustbot claim` to assign the issue to yourself.
+
+There are also some abandoned PRs, marked with [`S-inactive-closed`].
+Pretty often these PRs are nearly completed and just need some extra steps
+(formatting, addressing review comments, ...) to be merged. If you want to
+complete such a PR, please leave a comment in the PR and open a new one based
+on it.
+
+Issues marked [`T-AST`] involve simple matching of the syntax tree structure,
+and are generally easier than [`T-middle`] issues, which involve types
+and resolved paths.
+
+[`T-AST`] issues will generally need you to match against a predefined syntax structure.
+To figure out how this syntax structure is encoded in the AST, it is recommended to run
+`rustc -Z unpretty=ast-tree` on an example of the structure and compare with the [nodes in the AST docs].
+Usually the lint will end up to be a nested series of matches and ifs, [like so][deep-nesting].
+But we can make it nest-less by using [let chains], [like this][nest-less].
+
+[`E-medium`] issues are generally pretty easy too, though it's recommended you work on an [`good-first-issue`]
+first. Sometimes they are only somewhat involved code wise, but not difficult per-se.
+Note that [`E-medium`] issues may require some knowledge of Clippy internals or some
+debugging to find the actual problem behind the issue.
+
+[`T-middle`] issues can be more involved and require verifying types. The [`ty`] module contains a
+lot of methods that are useful, though one of the most useful would be `expr_ty` (gives the type of
+an AST expression). `match_def_path()` in Clippy's `utils` module can also be useful.
+
+[`good-first-issue`]: https://github.com/rust-lang/rust-clippy/labels/good-first-issue
+[`S-inactive-closed`]: https://github.com/rust-lang/rust-clippy/pulls?q=is%3Aclosed+label%3AS-inactive-closed
+[`T-AST`]: https://github.com/rust-lang/rust-clippy/labels/T-AST
+[`T-middle`]: https://github.com/rust-lang/rust-clippy/labels/T-middle
+[`E-medium`]: https://github.com/rust-lang/rust-clippy/labels/E-medium
+[`ty`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty
+[nodes in the AST docs]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_ast/ast/
+[deep-nesting]: https://github.com/rust-lang/rust-clippy/blob/5e4f0922911536f80d9591180fa604229ac13939/clippy_lints/src/mem_forget.rs#L31-L45
+[let chains]: https://github.com/rust-lang/rust/pull/94927
+[nest-less]: https://github.com/rust-lang/rust-clippy/blob/5e4f0922911536f80d9591180fa604229ac13939/clippy_lints/src/bit_mask.rs#L133-L159
+
+## Getting code-completion for rustc internals to work
+
+### IntelliJ Rust
+Unfortunately, [`IntelliJ Rust`][IntelliJ_rust_homepage] does not (yet?) understand how Clippy uses compiler-internals
+using `extern crate` and it also needs to be able to read the source files of the rustc-compiler which are not
+available via a `rustup` component at the time of writing.
+To work around this, you need to have a copy of the [rustc-repo][rustc_repo] available which can be obtained via
+`git clone https://github.com/rust-lang/rust/`.
+Then you can run a `cargo dev` command to automatically make Clippy use the rustc-repo via path-dependencies
+which `IntelliJ Rust` will be able to understand.
+Run `cargo dev setup intellij --repo-path <repo-path>` where `<repo-path>` is a path to the rustc repo
+you just cloned.
+The command will add path-dependencies pointing towards rustc-crates inside the rustc repo to
+Clippy's `Cargo.toml`s and should allow `IntelliJ Rust` to understand most of the types that Clippy uses.
+Just make sure to remove the dependencies again before finally making a pull request!
+
+[rustc_repo]: https://github.com/rust-lang/rust/
+[IntelliJ_rust_homepage]: https://intellij-rust.github.io/
+
+### Rust Analyzer
+As of [#6869][6869], [`rust-analyzer`][ra_homepage] can understand that Clippy uses compiler-internals
+using `extern crate` when `package.metadata.rust-analyzer.rustc_private` is set to `true` in Clippy's `Cargo.toml.`
+You will require a `nightly` toolchain with the `rustc-dev` component installed.
+Make sure that in the `rust-analyzer` configuration, you set
+```json
+{ "rust-analyzer.rustc.source": "discover" }
+```
+and
+```json
+{ "rust-analyzer.updates.channel": "nightly" }
+```
+You should be able to see information on things like `Expr` or `EarlyContext` now if you hover them, also
+a lot more type hints.
+This will work with `rust-analyzer 2021-03-15` shipped in nightly `1.52.0-nightly (107896c32 2021-03-15)` or later.
+
+[ra_homepage]: https://rust-analyzer.github.io/
+[6869]: https://github.com/rust-lang/rust-clippy/pull/6869
+
+## How Clippy works
+
+[`clippy_lints/src/lib.rs`][lint_crate_entry] imports all the different lint modules and registers in the [`LintStore`].
+For example, the [`else_if_without_else`][else_if_without_else] lint is registered like this:
+
+```rust
+// ./clippy_lints/src/lib.rs
+
+// ...
+pub mod else_if_without_else;
+// ...
+
+pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf: &Conf) {
+ // ...
+ store.register_early_pass(|| box else_if_without_else::ElseIfWithoutElse);
+ // ...
+
+ store.register_group(true, "clippy::restriction", Some("clippy_restriction"), vec![
+ // ...
+ LintId::of(&else_if_without_else::ELSE_IF_WITHOUT_ELSE),
+ // ...
+ ]);
+}
+```
+
+The [`rustc_lint::LintStore`][`LintStore`] provides two methods to register lints:
+[register_early_pass][reg_early_pass] and [register_late_pass][reg_late_pass]. Both take an object
+that implements an [`EarlyLintPass`][early_lint_pass] or [`LateLintPass`][late_lint_pass] respectively. This is done in
+every single lint. It's worth noting that the majority of `clippy_lints/src/lib.rs` is autogenerated by `cargo dev
+update_lints`. When you are writing your own lint, you can use that script to save you some time.
+
+```rust
+// ./clippy_lints/src/else_if_without_else.rs
+
+use rustc_lint::{EarlyLintPass, EarlyContext};
+
+// ...
+
+pub struct ElseIfWithoutElse;
+
+// ...
+
+impl EarlyLintPass for ElseIfWithoutElse {
+ // ... the functions needed, to make the lint work
+}
+```
+
+The difference between `EarlyLintPass` and `LateLintPass` is that the methods of the `EarlyLintPass` trait only provide
+AST information. The methods of the `LateLintPass` trait are executed after type checking and contain type information
+via the `LateContext` parameter.
+
+That's why the `else_if_without_else` example uses the `register_early_pass` function. Because the
+[actual lint logic][else_if_without_else] does not depend on any type information.
+
+[lint_crate_entry]: https://github.com/rust-lang/rust-clippy/blob/master/clippy_lints/src/lib.rs
+[else_if_without_else]: https://github.com/rust-lang/rust-clippy/blob/4253aa7137cb7378acc96133c787e49a345c2b3c/clippy_lints/src/else_if_without_else.rs
+[`LintStore`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/struct.LintStore.html
+[reg_early_pass]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/struct.LintStore.html#method.register_early_pass
+[reg_late_pass]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/struct.LintStore.html#method.register_late_pass
+[early_lint_pass]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/trait.EarlyLintPass.html
+[late_lint_pass]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/trait.LateLintPass.html
+
+## Issue and PR triage
+
+Clippy is following the [Rust triage procedure][triage] for issues and pull
+requests.
+
+However, we are a smaller project with all contributors being volunteers
+currently. Between writing new lints, fixing issues, reviewing pull requests and
+responding to issues there may not always be enough time to stay on top of it
+all.
+
+Our highest priority is fixing [crashes][l-crash] and [bugs][l-bug], for example
+an ICE in a popular crate that many other crates depend on. We don't
+want Clippy to crash on your code and we want it to be as reliable as the
+suggestions from Rust compiler errors.
+
+We have prioritization labels and a sync-blocker label, which are described below.
+- [P-low][p-low]: Requires attention (fix/response/evaluation) by a team member but isn't urgent.
+- [P-medium][p-medium]: Should be addressed by a team member until the next sync.
+- [P-high][p-high]: Should be immediately addressed and will require an out-of-cycle sync or a backport.
+- [L-sync-blocker][l-sync-blocker]: An issue that "blocks" a sync.
+Or rather: before the sync this should be addressed,
+e.g. by removing a lint again, so it doesn't hit beta/stable.
+
+## Bors and Homu
+
+We use a bot powered by [Homu][homu] to help automate testing and landing of pull
+requests in Clippy. The bot's username is @bors.
+
+You can find the Clippy bors queue [here][homu_queue].
+
+If you have @bors permissions, you can find an overview of the available
+commands [here][homu_instructions].
+
+[triage]: https://forge.rust-lang.org/release/triage-procedure.html
+[l-crash]: https://github.com/rust-lang/rust-clippy/labels/L-crash
+[l-bug]: https://github.com/rust-lang/rust-clippy/labels/L-bug
+[p-low]: https://github.com/rust-lang/rust-clippy/labels/P-low
+[p-medium]: https://github.com/rust-lang/rust-clippy/labels/P-medium
+[p-high]: https://github.com/rust-lang/rust-clippy/labels/P-high
+[l-sync-blocker]: https://github.com/rust-lang/rust-clippy/labels/L-sync-blocker
+[homu]: https://github.com/rust-lang/homu
+[homu_instructions]: https://bors.rust-lang.org/
+[homu_queue]: https://bors.rust-lang.org/queue/clippy
+
+## Contributions
+
+Contributions to Clippy should be made in the form of GitHub pull requests. Each pull request will
+be reviewed by a core contributor (someone with permission to land patches) and either landed in the
+main tree or given feedback for changes that would be required.
+
+All code in this repository is under the [Apache-2.0] or the [MIT] license.
+
+<!-- adapted from https://github.com/servo/servo/blob/master/CONTRIBUTING.md -->
+
+[Apache-2.0]: https://www.apache.org/licenses/LICENSE-2.0
+[MIT]: https://opensource.org/licenses/MIT
diff --git a/src/tools/clippy/COPYRIGHT b/src/tools/clippy/COPYRIGHT
new file mode 100644
index 000000000..a6be75b5e
--- /dev/null
+++ b/src/tools/clippy/COPYRIGHT
@@ -0,0 +1,7 @@
+Copyright 2014-2022 The Rust Project Developers
+
+Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+<LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+option. All files in the project carrying such notice may not be
+copied, modified, or distributed except according to those terms.
diff --git a/src/tools/clippy/Cargo.toml b/src/tools/clippy/Cargo.toml
new file mode 100644
index 000000000..1c875c3ad
--- /dev/null
+++ b/src/tools/clippy/Cargo.toml
@@ -0,0 +1,67 @@
+[package]
+name = "clippy"
+version = "0.1.64"
+description = "A bunch of helpful lints to avoid common pitfalls in Rust"
+repository = "https://github.com/rust-lang/rust-clippy"
+readme = "README.md"
+license = "MIT OR Apache-2.0"
+keywords = ["clippy", "lint", "plugin"]
+categories = ["development-tools", "development-tools::cargo-plugins"]
+build = "build.rs"
+edition = "2021"
+publish = false
+
+[[bin]]
+name = "cargo-clippy"
+test = false
+path = "src/main.rs"
+
+[[bin]]
+name = "clippy-driver"
+path = "src/driver.rs"
+
+[dependencies]
+clippy_lints = { path = "clippy_lints" }
+semver = "1.0"
+rustc_tools_util = { path = "rustc_tools_util" }
+tempfile = { version = "3.2", optional = true }
+termize = "0.1"
+
+[dev-dependencies]
+compiletest_rs = { version = "0.8", features = ["tmp"] }
+tester = "0.9"
+regex = "1.5"
+toml = "0.5"
+walkdir = "2.3"
+# This is used by the `collect-metadata` alias.
+filetime = "0.2"
+
+# A noop dependency that changes in the Rust repository, it's a bit of a hack.
+# See the `src/tools/rustc-workspace-hack/README.md` file in `rust-lang/rust`
+# for more information.
+rustc-workspace-hack = "1.0"
+
+# UI test dependencies
+clippy_utils = { path = "clippy_utils" }
+derive-new = "0.5"
+if_chain = "1.0"
+itertools = "0.10.1"
+quote = "1.0"
+serde = { version = "1.0.125", features = ["derive"] }
+syn = { version = "1.0", features = ["full"] }
+futures = "0.3"
+parking_lot = "0.12"
+tokio = { version = "1", features = ["io-util"] }
+rustc-semver = "1.1"
+
+[build-dependencies]
+rustc_tools_util = { version = "0.2", path = "rustc_tools_util" }
+
+[features]
+deny-warnings = ["clippy_lints/deny-warnings"]
+integration = ["tempfile"]
+internal = ["clippy_lints/internal", "tempfile"]
+
+[package.metadata.rust-analyzer]
+# This package uses #[feature(rustc_private)]
+rustc_private = true
diff --git a/src/tools/clippy/LICENSE-APACHE b/src/tools/clippy/LICENSE-APACHE
new file mode 100644
index 000000000..0d62c3727
--- /dev/null
+++ b/src/tools/clippy/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright 2014-2022 The Rust Project Developers
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/src/tools/clippy/LICENSE-MIT b/src/tools/clippy/LICENSE-MIT
new file mode 100644
index 000000000..b724b24aa
--- /dev/null
+++ b/src/tools/clippy/LICENSE-MIT
@@ -0,0 +1,27 @@
+MIT License
+
+Copyright (c) 2014-2022 The Rust Project Developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/src/tools/clippy/README.md b/src/tools/clippy/README.md
new file mode 100644
index 000000000..2c3defeaa
--- /dev/null
+++ b/src/tools/clippy/README.md
@@ -0,0 +1,255 @@
+# Clippy
+
+[![Clippy Test](https://github.com/rust-lang/rust-clippy/workflows/Clippy%20Test/badge.svg?branch=auto&event=push)](https://github.com/rust-lang/rust-clippy/actions?query=workflow%3A%22Clippy+Test%22+event%3Apush+branch%3Aauto)
+[![License: MIT OR Apache-2.0](https://img.shields.io/crates/l/clippy.svg)](#license)
+
+A collection of lints to catch common mistakes and improve your [Rust](https://github.com/rust-lang/rust) code.
+
+[There are over 550 lints included in this crate!](https://rust-lang.github.io/rust-clippy/master/index.html)
+
+Lints are divided into categories, each with a default [lint level](https://doc.rust-lang.org/rustc/lints/levels.html).
+You can choose how much Clippy is supposed to ~~annoy~~ help you by changing the lint level by category.
+
+| Category | Description | Default level |
+| --------------------- | ----------------------------------------------------------------------------------- | ------------- |
+| `clippy::all` | all lints that are on by default (correctness, suspicious, style, complexity, perf) | **warn/deny** |
+| `clippy::correctness` | code that is outright wrong or useless | **deny** |
+| `clippy::suspicious` | code that is most likely wrong or useless | **warn** |
+| `clippy::style` | code that should be written in a more idiomatic way | **warn** |
+| `clippy::complexity` | code that does something simple but in a complex way | **warn** |
+| `clippy::perf` | code that can be written to run faster | **warn** |
+| `clippy::pedantic` | lints which are rather strict or have occasional false positives | allow |
+| `clippy::nursery` | new lints that are still under development | allow |
+| `clippy::cargo` | lints for the cargo manifest | allow |
+
+More to come, please [file an issue](https://github.com/rust-lang/rust-clippy/issues) if you have ideas!
+
+The [lint list](https://rust-lang.github.io/rust-clippy/master/index.html) also contains "restriction lints", which are
+for things which are usually not considered "bad", but may be useful to turn on in specific cases. These should be used
+very selectively, if at all.
+
+Table of contents:
+
+* [Usage instructions](#usage)
+* [Configuration](#configuration)
+* [Contributing](#contributing)
+* [License](#license)
+
+## Usage
+
+Below are instructions on how to use Clippy as a cargo subcommand,
+in projects that do not use cargo, or in Travis CI.
+
+### As a cargo subcommand (`cargo clippy`)
+
+One way to use Clippy is by installing Clippy through rustup as a cargo
+subcommand.
+
+#### Step 1: Install Rustup
+
+You can install [Rustup](https://rustup.rs/) on supported platforms. This will help
+us install Clippy and its dependencies.
+
+If you already have Rustup installed, update to ensure you have the latest
+Rustup and compiler:
+
+```terminal
+rustup update
+```
+
+#### Step 2: Install Clippy
+
+Once you have rustup and the latest stable release (at least Rust 1.29) installed, run the following command:
+
+```terminal
+rustup component add clippy
+```
+If it says that it can't find the `clippy` component, please run `rustup self update`.
+
+#### Step 3: Run Clippy
+
+Now you can run Clippy by invoking the following command:
+
+```terminal
+cargo clippy
+```
+
+#### Automatically applying Clippy suggestions
+
+Clippy can automatically apply some lint suggestions, just like the compiler.
+
+```terminal
+cargo clippy --fix
+```
+
+#### Workspaces
+
+All the usual workspace options should work with Clippy. For example the following command
+will run Clippy on the `example` crate:
+
+```terminal
+cargo clippy -p example
+```
+
+As with `cargo check`, this includes dependencies that are members of the workspace, like path dependencies.
+If you want to run Clippy **only** on the given crate, use the `--no-deps` option like this:
+
+```terminal
+cargo clippy -p example -- --no-deps
+```
+
+### Using `clippy-driver`
+
+Clippy can also be used in projects that do not use cargo. To do so, run `clippy-driver`
+with the same arguments you use for `rustc`. For example:
+
+```terminal
+clippy-driver --edition 2018 -Cpanic=abort foo.rs
+```
+
+Note that `clippy-driver` is designed for running Clippy only and should not be used as a general
+replacement for `rustc`. `clippy-driver` may produce artifacts that are not optimized as expected,
+for example.
+
+### Travis CI
+
+You can add Clippy to Travis CI in the same way you use it locally:
+
+```yml
+language: rust
+rust:
+ - stable
+ - beta
+before_script:
+ - rustup component add clippy
+script:
+ - cargo clippy
+ # if you want the build job to fail when encountering warnings, use
+ - cargo clippy -- -D warnings
+ # in order to also check tests and non-default crate features, use
+ - cargo clippy --all-targets --all-features -- -D warnings
+ - cargo test
+ # etc.
+```
+
+Note that adding `-D warnings` will cause your build to fail if **any** warnings are found in your code.
+That includes warnings found by rustc (e.g. `dead_code`, etc.). If you want to avoid this and only cause
+an error for Clippy warnings, use `#![deny(clippy::all)]` in your code or `-D clippy::all` on the command
+line. (You can swap `clippy::all` with the specific lint category you are targeting.)
+
+## Configuration
+
+Some lints can be configured in a TOML file named `clippy.toml` or `.clippy.toml`. It contains a basic `variable =
+value` mapping e.g.
+
+```toml
+avoid-breaking-exported-api = false
+blacklisted-names = ["toto", "tata", "titi"]
+cognitive-complexity-threshold = 30
+```
+
+See the [list of lints](https://rust-lang.github.io/rust-clippy/master/index.html) for more information about which
+lints can be configured and the meaning of the variables.
+
+Note that configuration changes will not apply for code that has already been compiled and cached under `./target/`;
+for example, adding a new string to `doc-valid-idents` may still result in Clippy flagging that string. To be sure that
+any configuration changes are applied, you may want to run `cargo clean` and re-compile your crate from scratch.
+
+To deactivate the “for further information visit *lint-link*” message you can
+define the `CLIPPY_DISABLE_DOCS_LINKS` environment variable.
+
+### Allowing/denying lints
+
+You can add options to your code to `allow`/`warn`/`deny` Clippy lints:
+
+* the whole set of `Warn` lints using the `clippy` lint group (`#![deny(clippy::all)]`).
+ Note that `rustc` has additional [lint groups](https://doc.rust-lang.org/rustc/lints/groups.html).
+
+* all lints using both the `clippy` and `clippy::pedantic` lint groups (`#![deny(clippy::all)]`,
+ `#![deny(clippy::pedantic)]`). Note that `clippy::pedantic` contains some very aggressive
+ lints prone to false positives.
+
+* only some lints (`#![deny(clippy::single_match, clippy::box_vec)]`, etc.)
+
+* `allow`/`warn`/`deny` can be limited to a single function or module using `#[allow(...)]`, etc.
+
+Note: `allow` means to suppress the lint for your code. With `warn` the lint
+will only emit a warning, while with `deny` the lint will emit an error, when
+triggering for your code. An error causes clippy to exit with an error code, so
+is useful in scripts like CI/CD.
+
+If you do not want to include your lint levels in your code, you can globally
+enable/disable lints by passing extra flags to Clippy during the run:
+
+To allow `lint_name`, run
+
+```terminal
+cargo clippy -- -A clippy::lint_name
+```
+
+And to warn on `lint_name`, run
+
+```terminal
+cargo clippy -- -W clippy::lint_name
+```
+
+This also works with lint groups. For example, you
+can run Clippy with warnings for all lints enabled:
+```terminal
+cargo clippy -- -W clippy::pedantic
+```
+
+If you care only about a single lint, you can allow all others and then explicitly warn on
+the lint(s) you are interested in:
+```terminal
+cargo clippy -- -A clippy::all -W clippy::useless_format -W clippy::...
+```
+
+### Specifying the minimum supported Rust version
+
+Projects that intend to support old versions of Rust can disable lints pertaining to newer features by
+specifying the minimum supported Rust version (MSRV) in the clippy configuration file.
+
+```toml
+msrv = "1.30.0"
+```
+
+Alternatively, the [`rust-version` field](https://doc.rust-lang.org/cargo/reference/manifest.html#the-rust-version-field)
+in the `Cargo.toml` can be used.
+
+```toml
+# Cargo.toml
+rust-version = "1.30"
+```
+
+The MSRV can also be specified as an inner attribute, like below.
+
+```rust
+#![feature(custom_inner_attributes)]
+#![clippy::msrv = "1.30.0"]
+
+fn main() {
+ ...
+}
+```
+
+You can also omit the patch version when specifying the MSRV, so `msrv = 1.30`
+is equivalent to `msrv = 1.30.0`.
+
+Note: `custom_inner_attributes` is an unstable feature, so it has to be enabled explicitly.
+
+Lints that recognize this configuration option can be found [here](https://rust-lang.github.io/rust-clippy/master/index.html#msrv)
+
+## Contributing
+
+If you want to contribute to Clippy, you can find more information in [CONTRIBUTING.md](https://github.com/rust-lang/rust-clippy/blob/master/CONTRIBUTING.md).
+
+## License
+
+Copyright 2014-2022 The Rust Project Developers
+
+Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+[https://www.apache.org/licenses/LICENSE-2.0](https://www.apache.org/licenses/LICENSE-2.0)> or the MIT license
+<LICENSE-MIT or [https://opensource.org/licenses/MIT](https://opensource.org/licenses/MIT)>, at your
+option. Files in the project may not be
+copied, modified, or distributed except according to those terms.
diff --git a/src/tools/clippy/book/README.md b/src/tools/clippy/book/README.md
new file mode 100644
index 000000000..6d67f80ff
--- /dev/null
+++ b/src/tools/clippy/book/README.md
@@ -0,0 +1,4 @@
+# Clippy Book
+
+This is the source for the Clippy Book. See the
+[book](src/development/infrastructure/book.md) for more information.
diff --git a/src/tools/clippy/book/book.toml b/src/tools/clippy/book/book.toml
new file mode 100644
index 000000000..93b6641f7
--- /dev/null
+++ b/src/tools/clippy/book/book.toml
@@ -0,0 +1,28 @@
+[book]
+authors = ["The Rust Clippy Developers"]
+language = "en"
+multilingual = false
+src = "src"
+title = "Clippy Documentation"
+
+[rust]
+edition = "2018"
+
+[output.html]
+edit-url-template = "https://github.com/rust-lang/rust-clippy/edit/master/book/{path}"
+git-repository-url = "https://github.com/rust-lang/rust-clippy/tree/master/book"
+mathjax-support = true
+site-url = "/rust-clippy/"
+
+[output.html.playground]
+editable = true
+line-numbers = true
+
+[output.html.search]
+boost-hierarchy = 2
+boost-paragraph = 1
+boost-title = 2
+expand = true
+heading-split-level = 2
+limit-results = 20
+use-boolean-and = true
diff --git a/src/tools/clippy/book/src/README.md b/src/tools/clippy/book/src/README.md
new file mode 100644
index 000000000..6248d588a
--- /dev/null
+++ b/src/tools/clippy/book/src/README.md
@@ -0,0 +1,34 @@
+# Clippy
+
+[![Clippy Test](https://github.com/rust-lang/rust-clippy/workflows/Clippy%20Test/badge.svg?branch=auto&event=push)](https://github.com/rust-lang/rust-clippy/actions?query=workflow%3A%22Clippy+Test%22+event%3Apush+branch%3Aauto)
+[![License: MIT OR Apache-2.0](https://img.shields.io/crates/l/clippy.svg)](https://github.com/rust-lang/rust-clippy#license)
+
+A collection of lints to catch common mistakes and improve your
+[Rust](https://github.com/rust-lang/rust) code.
+
+[There are over 550 lints included in this crate!](https://rust-lang.github.io/rust-clippy/master/index.html)
+
+Lints are divided into categories, each with a default [lint
+level](https://doc.rust-lang.org/rustc/lints/levels.html). You can choose how
+much Clippy is supposed to ~~annoy~~ help you by changing the lint level by
+category.
+
+| Category | Description | Default level |
+| --------------------- | ----------------------------------------------------------------------------------- | ------------- |
+| `clippy::all` | all lints that are on by default (correctness, suspicious, style, complexity, perf) | **warn/deny** |
+| `clippy::correctness` | code that is outright wrong or useless | **deny** |
+| `clippy::suspicious` | code that is most likely wrong or useless | **warn** |
+| `clippy::complexity` | code that does something simple but in a complex way | **warn** |
+| `clippy::perf` | code that can be written to run faster | **warn** |
+| `clippy::style` | code that should be written in a more idiomatic way | **warn** |
+| `clippy::pedantic` | lints which are rather strict or might have false positives | allow |
+| `clippy::nursery` | new lints that are still under development | allow |
+| `clippy::cargo` | lints for the cargo manifest | allow | | allow |
+
+More to come, please [file an
+issue](https://github.com/rust-lang/rust-clippy/issues) if you have ideas!
+
+The [lint list](https://rust-lang.github.io/rust-clippy/master/index.html) also
+contains "restriction lints", which are for things which are usually not
+considered "bad", but may be useful to turn on in specific cases. These should
+be used very selectively, if at all.
diff --git a/src/tools/clippy/book/src/SUMMARY.md b/src/tools/clippy/book/src/SUMMARY.md
new file mode 100644
index 000000000..0b945faf9
--- /dev/null
+++ b/src/tools/clippy/book/src/SUMMARY.md
@@ -0,0 +1,23 @@
+# Summary
+
+[Introduction](README.md)
+
+- [Installation](installation.md)
+- [Usage](usage.md)
+- [Configuration](configuration.md)
+- [Clippy's Lints](lints.md)
+- [Continuous Integration](continuous_integration/README.md)
+ - [GitHub Actions](continuous_integration/github_actions.md)
+ - [Travis CI](continuous_integration/travis.md)
+- [Development](development/README.md)
+ - [Basics](development/basics.md)
+ - [Adding Lints](development/adding_lints.md)
+ - [Common Tools](development/common_tools_writing_lints.md)
+ - [Infrastructure](development/infrastructure/README.md)
+ - [Syncing changes between Clippy and rust-lang/rust](development/infrastructure/sync.md)
+ - [Backporting Changes](development/infrastructure/backport.md)
+ - [Updating the Changelog](development/infrastructure/changelog_update.md)
+ - [Release a New Version](development/infrastructure/release.md)
+ - [The Clippy Book](development/infrastructure/book.md)
+ - [Proposals](development/proposals/README.md)
+ - [Roadmap 2021](development/proposals/roadmap-2021.md)
diff --git a/src/tools/clippy/book/src/configuration.md b/src/tools/clippy/book/src/configuration.md
new file mode 100644
index 000000000..6e295ac31
--- /dev/null
+++ b/src/tools/clippy/book/src/configuration.md
@@ -0,0 +1,92 @@
+# Configuring Clippy
+
+> **Note:** The configuration file is unstable and may be deprecated in the future.
+
+Some lints can be configured in a TOML file named `clippy.toml` or `.clippy.toml`. It contains a
+basic `variable = value` mapping eg.
+
+```toml
+avoid-breaking-exported-api = false
+blacklisted-names = ["toto", "tata", "titi"]
+cognitive-complexity-threshold = 30
+```
+
+See the [list of lints](https://rust-lang.github.io/rust-clippy/master/index.html) for more information about which
+lints can be configured and the meaning of the variables.
+
+To deactivate the "for further information visit *lint-link*" message you can define the `CLIPPY_DISABLE_DOCS_LINKS`
+environment variable.
+
+### Allowing/denying lints
+
+You can add options to your code to `allow`/`warn`/`deny` Clippy lints:
+
+* the whole set of `Warn` lints using the `clippy` lint group (`#![deny(clippy::all)]`)
+
+* all lints using both the `clippy` and `clippy::pedantic` lint groups (`#![deny(clippy::all)]`,
+ `#![deny(clippy::pedantic)]`). Note that `clippy::pedantic` contains some very aggressive lints prone to false
+ positives.
+
+* only some lints (`#![deny(clippy::single_match, clippy::box_vec)]`, etc.)
+
+* `allow`/`warn`/`deny` can be limited to a single function or module using `#[allow(...)]`, etc.
+
+Note: `allow` means to suppress the lint for your code. With `warn` the lint will only emit a warning, while with `deny`
+the lint will emit an error, when triggering for your code. An error causes clippy to exit with an error code, so is
+useful in scripts like CI/CD.
+
+If you do not want to include your lint levels in your code, you can globally enable/disable lints by passing extra
+flags to Clippy during the run:
+
+To allow `lint_name`, run
+
+```terminal
+cargo clippy -- -A clippy::lint_name
+```
+
+And to warn on `lint_name`, run
+
+```terminal
+cargo clippy -- -W clippy::lint_name
+```
+
+This also works with lint groups. For example you can run Clippy with warnings for all lints enabled:
+
+```terminal
+cargo clippy -- -W clippy::pedantic
+```
+
+If you care only about a single lint, you can allow all others and then explicitly warn on the lint(s) you are
+interested in:
+
+```terminal
+cargo clippy -- -A clippy::all -W clippy::useless_format -W clippy::...
+```
+
+### Specifying the minimum supported Rust version
+
+Projects that intend to support old versions of Rust can disable lints pertaining to newer features by specifying the
+minimum supported Rust version (MSRV) in the clippy configuration file.
+
+```toml
+msrv = "1.30.0"
+```
+
+The MSRV can also be specified as an inner attribute, like below.
+
+```rust
+#![feature(custom_inner_attributes)]
+#![clippy::msrv = "1.30.0"]
+
+fn main() {
+ ...
+}
+```
+
+You can also omit the patch version when specifying the MSRV, so `msrv = 1.30`
+is equivalent to `msrv = 1.30.0`.
+
+Note: `custom_inner_attributes` is an unstable feature so it has to be enabled explicitly.
+
+Lints that recognize this configuration option can be
+found [here](https://rust-lang.github.io/rust-clippy/master/index.html#msrv)
diff --git a/src/tools/clippy/book/src/continuous_integration/README.md b/src/tools/clippy/book/src/continuous_integration/README.md
new file mode 100644
index 000000000..e5c3673bd
--- /dev/null
+++ b/src/tools/clippy/book/src/continuous_integration/README.md
@@ -0,0 +1,18 @@
+# Continuous Integration
+
+It is recommended to run Clippy on CI with `-Dwarnings`, so that Clippy lints
+prevent CI from passing. To enforce errors on warnings on all `cargo` commands
+not just `cargo clippy`, you can set the env var `RUSTFLAGS="-Dwarnings"`.
+
+We recommend to use Clippy from the same toolchain, that you use for compiling
+your crate for maximum compatibility. E.g. if your crate is compiled with the
+`stable` toolchain, you should also use `stable` Clippy.
+
+> _Note:_ New Clippy lints are first added to the `nightly` toolchain. If you
+> want to help with improving Clippy and have CI resources left, please consider
+> adding a `nightly` Clippy check to your CI and report problems like false
+> positives back to us. With that we can fix bugs early, before they can get to
+> stable.
+
+This chapter will give an overview on how to use Clippy on different popular CI
+providers.
diff --git a/src/tools/clippy/book/src/continuous_integration/github_actions.md b/src/tools/clippy/book/src/continuous_integration/github_actions.md
new file mode 100644
index 000000000..339287a7d
--- /dev/null
+++ b/src/tools/clippy/book/src/continuous_integration/github_actions.md
@@ -0,0 +1,21 @@
+# GitHub Actions
+
+GitHub hosted runners using the latest stable version of Rust have Clippy pre-installed.
+It is as simple as running `cargo clippy` to run lints against the codebase.
+
+```yml
+on: push
+name: Clippy check
+
+# Make sure CI fails on all warnings, including Clippy lints
+env:
+ RUSTFLAGS: "-Dwarnings"
+
+jobs:
+ clippy_check:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - name: Run Clippy
+ run: cargo clippy --all-targets --all-features
+```
diff --git a/src/tools/clippy/book/src/continuous_integration/travis.md b/src/tools/clippy/book/src/continuous_integration/travis.md
new file mode 100644
index 000000000..85b9ed53d
--- /dev/null
+++ b/src/tools/clippy/book/src/continuous_integration/travis.md
@@ -0,0 +1,20 @@
+# Travis CI
+
+You can add Clippy to Travis CI in the same way you use it locally:
+
+```yml
+language: rust
+rust:
+ - stable
+ - beta
+before_script:
+ - rustup component add clippy
+script:
+ - cargo clippy
+ # if you want the build job to fail when encountering warnings, use
+ - cargo clippy -- -D warnings
+ # in order to also check tests and non-default crate features, use
+ - cargo clippy --all-targets --all-features -- -D warnings
+ - cargo test
+ # etc.
+```
diff --git a/src/tools/clippy/book/src/development/README.md b/src/tools/clippy/book/src/development/README.md
new file mode 100644
index 000000000..5cf7201cf
--- /dev/null
+++ b/src/tools/clippy/book/src/development/README.md
@@ -0,0 +1,43 @@
+# Clippy Development
+
+Hello fellow Rustacean! If you made it here, you're probably interested in
+making Clippy better by contributing to it. In that case, welcome to the
+project!
+
+> _Note:_ If you're just interested in using Clippy, there's nothing to see from
+> this point onward and you should return to one of the earlier chapters.
+
+## Getting started
+
+If this is your first time contributing to Clippy, you should first read the
+[Basics docs](basics.md). This will explain the basics on how to get the source
+code and how to compile and test the code.
+
+## Writing code
+
+If you have done the basic setup, it's time to start hacking.
+
+The [Adding lints](adding_lints.md) chapter is a walk through on how to add a
+new lint to Clippy. This is also interesting if you just want to fix a lint,
+because it also covers how to test lints and gives an overview of the bigger
+picture.
+
+If you want to add a new lint or change existing ones apart from bugfixing, it's
+also a good idea to give the [stability guarantees][rfc_stability] and
+[lint categories][rfc_lint_cats] sections of the [Clippy 1.0 RFC][clippy_rfc] a
+quick read. The lint categories are also described [earlier in this
+book](../lints.md).
+
+> _Note:_ Some higher level things about contributing to Clippy are still
+> covered in the [`CONTRIBUTING.md`] document. Some of those will be moved to
+> the book over time, like:
+> - Finding something to fix
+> - IDE setup
+> - High level overview on how Clippy works
+> - Triage procedure
+> - Bors and Homu
+
+[clippy_rfc]: https://github.com/rust-lang/rfcs/blob/master/text/2476-clippy-uno.md
+[rfc_stability]: https://github.com/rust-lang/rfcs/blob/master/text/2476-clippy-uno.md#stability-guarantees
+[rfc_lint_cats]: https://github.com/rust-lang/rfcs/blob/master/text/2476-clippy-uno.md#lint-audit-and-categories
+[`CONTRIBUTING.md`]: https://github.com/rust-lang/rust-clippy/blob/master/CONTRIBUTING.md
diff --git a/src/tools/clippy/book/src/development/adding_lints.md b/src/tools/clippy/book/src/development/adding_lints.md
new file mode 100644
index 000000000..da781eb97
--- /dev/null
+++ b/src/tools/clippy/book/src/development/adding_lints.md
@@ -0,0 +1,739 @@
+# Adding a new lint
+
+You are probably here because you want to add a new lint to Clippy. If this is
+the first time you're contributing to Clippy, this document guides you through
+creating an example lint from scratch.
+
+To get started, we will create a lint that detects functions called `foo`,
+because that's clearly a non-descriptive name.
+
+- [Adding a new lint](#adding-a-new-lint)
+ - [Setup](#setup)
+ - [Getting Started](#getting-started)
+ - [Defining Our Lint](#defining-our-lint)
+ - [Standalone](#standalone)
+ - [Specific Type](#specific-type)
+ - [Tests Location](#tests-location)
+ - [Testing](#testing)
+ - [Cargo lints](#cargo-lints)
+ - [Rustfix tests](#rustfix-tests)
+ - [Testing manually](#testing-manually)
+ - [Lint declaration](#lint-declaration)
+ - [Lint registration](#lint-registration)
+ - [Lint passes](#lint-passes)
+ - [Emitting a lint](#emitting-a-lint)
+ - [Adding the lint logic](#adding-the-lint-logic)
+ - [Specifying the lint's minimum supported Rust version (MSRV)](#specifying-the-lints-minimum-supported-rust-version-msrv)
+ - [Author lint](#author-lint)
+ - [Print HIR lint](#print-hir-lint)
+ - [Documentation](#documentation)
+ - [Running rustfmt](#running-rustfmt)
+ - [Debugging](#debugging)
+ - [PR Checklist](#pr-checklist)
+ - [Adding configuration to a lint](#adding-configuration-to-a-lint)
+ - [Cheat Sheet](#cheat-sheet)
+
+## Setup
+
+See the [Basics](basics.md#get-the-code) documentation.
+
+## Getting Started
+
+There is a bit of boilerplate code that needs to be set up when creating a new
+lint. Fortunately, you can use the Clippy dev tools to handle this for you. We
+are naming our new lint `foo_functions` (lints are generally written in snake
+case), and we don't need type information, so it will have an early pass type
+(more on this later). If you're unsure if the name you chose fits the lint,
+take a look at our [lint naming guidelines][lint_naming].
+
+## Defining Our Lint
+To get started, there are two ways to define our lint.
+
+### Standalone
+Command: `cargo dev new_lint --name=foo_functions --pass=early --category=pedantic`
+(category will default to nursery if not provided)
+
+This command will create a new file: `clippy_lints/src/foo_functions.rs`, as well
+as [register the lint](#lint-registration).
+
+### Specific Type
+Command: `cargo dev new_lint --name=foo_functions --type=functions --category=pedantic`
+
+This command will create a new file: `clippy_lints/src/{type}/foo_functions.rs`.
+
+Notice how this command has a `--type` flag instead of `--pass`. Unlike a standalone
+definition, this lint won't be registered in the traditional sense. Instead, you will
+call your lint from within the type's lint pass, found in `clippy_lints/src/{type}/mod.rs`.
+
+A "type" is just the name of a directory in `clippy_lints/src`, like `functions` in
+the example command. These are groupings of lints with common behaviors, so if your
+lint falls into one, it would be best to add it to that type.
+
+### Tests Location
+Both commands will create a file: `tests/ui/foo_functions.rs`. For cargo lints,
+two project hierarchies (fail/pass) will be created by default under `tests/ui-cargo`.
+
+Next, we'll open up these files and add our lint!
+
+## Testing
+
+Let's write some tests first that we can execute while we iterate on our lint.
+
+Clippy uses UI tests for testing. UI tests check that the output of Clippy is
+exactly as expected. Each test is just a plain Rust file that contains the code
+we want to check. The output of Clippy is compared against a `.stderr` file.
+Note that you don't have to create this file yourself, we'll get to generating
+the `.stderr` files further down.
+
+We start by opening the test file created at `tests/ui/foo_functions.rs`.
+
+Update the file with some examples to get started:
+
+```rust
+#![warn(clippy::foo_functions)]
+
+// Impl methods
+struct A;
+impl A {
+ pub fn fo(&self) {}
+ pub fn foo(&self) {}
+ pub fn food(&self) {}
+}
+
+// Default trait methods
+trait B {
+ fn fo(&self) {}
+ fn foo(&self) {}
+ fn food(&self) {}
+}
+
+// Plain functions
+fn fo() {}
+fn foo() {}
+fn food() {}
+
+fn main() {
+ // We also don't want to lint method calls
+ foo();
+ let a = A;
+ a.foo();
+}
+```
+
+Now we can run the test with `TESTNAME=foo_functions cargo uitest`, currently
+this test is meaningless though.
+
+While we are working on implementing our lint, we can keep running the UI test.
+That allows us to check if the output is turning into what we want.
+
+Once we are satisfied with the output, we need to run `cargo dev bless` to
+update the `.stderr` file for our lint. Please note that, we should run
+`TESTNAME=foo_functions cargo uitest` every time before running `cargo dev
+bless`. Running `TESTNAME=foo_functions cargo uitest` should pass then. When we
+commit our lint, we need to commit the generated `.stderr` files, too. In
+general, you should only commit files changed by `cargo dev bless` for the
+specific lint you are creating/editing. Note that if the generated files are
+empty, they should be removed.
+
+> _Note:_ you can run multiple test files by specifying a comma separated list:
+> `TESTNAME=foo_functions,test2,test3`.
+
+### Cargo lints
+
+For cargo lints, the process of testing differs in that we are interested in the
+`Cargo.toml` manifest file. We also need a minimal crate associated with that
+manifest.
+
+If our new lint is named e.g. `foo_categories`, after running `cargo dev
+new_lint` we will find by default two new crates, each with its manifest file:
+
+* `tests/ui-cargo/foo_categories/fail/Cargo.toml`: this file should cause the
+ new lint to raise an error.
+* `tests/ui-cargo/foo_categories/pass/Cargo.toml`: this file should not trigger
+ the lint.
+
+If you need more cases, you can copy one of those crates (under
+`foo_categories`) and rename it.
+
+The process of generating the `.stderr` file is the same, and prepending the
+`TESTNAME` variable to `cargo uitest` works too.
+
+## Rustfix tests
+
+If the lint you are working on is making use of structured suggestions, the test
+file should include a `// run-rustfix` comment at the top. This will
+additionally run [rustfix] for that test. Rustfix will apply the suggestions
+from the lint to the code of the test file and compare that to the contents of a
+`.fixed` file.
+
+Use `cargo dev bless` to automatically generate the `.fixed` file after running
+the tests.
+
+[rustfix]: https://github.com/rust-lang/rustfix
+
+## Testing manually
+
+Manually testing against an example file can be useful if you have added some
+`println!`s and the test suite output becomes unreadable. To try Clippy with
+your local modifications, run
+
+```
+cargo dev lint input.rs
+```
+
+from the working copy root. With tests in place, let's have a look at
+implementing our lint now.
+
+## Lint declaration
+
+Let's start by opening the new file created in the `clippy_lints` crate at
+`clippy_lints/src/foo_functions.rs`. That's the crate where all the lint code
+is. This file has already imported some initial things we will need:
+
+```rust
+use rustc_lint::{EarlyLintPass, EarlyContext};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_ast::ast::*;
+```
+
+The next step is to update the lint declaration. Lints are declared using the
+[`declare_clippy_lint!`][declare_clippy_lint] macro, and we just need to update
+the auto-generated lint declaration to have a real description, something like
+this:
+
+```rust
+declare_clippy_lint! {
+ /// ### What it does
+ ///
+ /// ### Why is this bad?
+ ///
+ /// ### Example
+ /// ```rust
+ /// // example code
+ /// ```
+ #[clippy::version = "1.29.0"]
+ pub FOO_FUNCTIONS,
+ pedantic,
+ "function named `foo`, which is not a descriptive name"
+}
+```
+
+* The section of lines prefixed with `///` constitutes the lint documentation
+ section. This is the default documentation style and will be displayed [like
+ this][example_lint_page]. To render and open this documentation locally in a
+ browser, run `cargo dev serve`.
+* The `#[clippy::version]` attribute will be rendered as part of the lint
+ documentation. The value should be set to the current Rust version that the
+ lint is developed in, it can be retrieved by running `rustc -vV` in the
+ rust-clippy directory. The version is listed under *release*. (Use the version
+ without the `-nightly`) suffix.
+* `FOO_FUNCTIONS` is the name of our lint. Be sure to follow the [lint naming
+ guidelines][lint_naming] here when naming your lint. In short, the name should
+ state the thing that is being checked for and read well when used with
+ `allow`/`warn`/`deny`.
+* `pedantic` sets the lint level to `Allow`. The exact mapping can be found
+ [here][category_level_mapping]
+* The last part should be a text that explains what exactly is wrong with the
+ code
+
+The rest of this file contains an empty implementation for our lint pass, which
+in this case is `EarlyLintPass` and should look like this:
+
+```rust
+// clippy_lints/src/foo_functions.rs
+
+// .. imports and lint declaration ..
+
+declare_lint_pass!(FooFunctions => [FOO_FUNCTIONS]);
+
+impl EarlyLintPass for FooFunctions {}
+```
+
+[declare_clippy_lint]: https://github.com/rust-lang/rust-clippy/blob/557f6848bd5b7183f55c1e1522a326e9e1df6030/clippy_lints/src/lib.rs#L60
+[example_lint_page]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure
+[lint_naming]: https://rust-lang.github.io/rfcs/0344-conventions-galore.html#lints
+[category_level_mapping]: https://github.com/rust-lang/rust-clippy/blob/557f6848bd5b7183f55c1e1522a326e9e1df6030/clippy_lints/src/lib.rs#L110
+
+## Lint registration
+
+When using `cargo dev new_lint`, the lint is automatically registered and
+nothing more has to be done.
+
+When declaring a new lint by hand and `cargo dev update_lints` is used, the lint
+pass may have to be registered manually in the `register_plugins` function in
+`clippy_lints/src/lib.rs`:
+
+```rust
+store.register_early_pass(|| Box::new(foo_functions::FooFunctions));
+```
+
+As one may expect, there is a corresponding `register_late_pass` method
+available as well. Without a call to one of `register_early_pass` or
+`register_late_pass`, the lint pass in question will not be run.
+
+One reason that `cargo dev update_lints` does not automate this step is that
+multiple lints can use the same lint pass, so registering the lint pass may
+already be done when adding a new lint. Another reason that this step is not
+automated is that the order that the passes are registered determines the order
+the passes actually run, which in turn affects the order that any emitted lints
+are output in.
+
+## Lint passes
+
+Writing a lint that only checks for the name of a function means that we only
+have to deal with the AST and don't have to deal with the type system at all.
+This is good, because it makes writing this particular lint less complicated.
+
+We have to make this decision with every new Clippy lint. It boils down to using
+either [`EarlyLintPass`][early_lint_pass] or [`LateLintPass`][late_lint_pass].
+
+In short, the `LateLintPass` has access to type information while the
+`EarlyLintPass` doesn't. If you don't need access to type information, use the
+`EarlyLintPass`. The `EarlyLintPass` is also faster. However linting speed
+hasn't really been a concern with Clippy so far.
+
+Since we don't need type information for checking the function name, we used
+`--pass=early` when running the new lint automation and all the imports were
+added accordingly.
+
+[early_lint_pass]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/trait.EarlyLintPass.html
+[late_lint_pass]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/trait.LateLintPass.html
+
+## Emitting a lint
+
+With UI tests and the lint declaration in place, we can start working on the
+implementation of the lint logic.
+
+Let's start by implementing the `EarlyLintPass` for our `FooFunctions`:
+
+```rust
+impl EarlyLintPass for FooFunctions {
+ fn check_fn(&mut self, cx: &EarlyContext<'_>, fn_kind: FnKind<'_>, span: Span, _: NodeId) {
+ // TODO: Emit lint here
+ }
+}
+```
+
+We implement the [`check_fn`][check_fn] method from the
+[`EarlyLintPass`][early_lint_pass] trait. This gives us access to various
+information about the function that is currently being checked. More on that in
+the next section. Let's worry about the details later and emit our lint for
+*every* function definition first.
+
+Depending on how complex we want our lint message to be, we can choose from a
+variety of lint emission functions. They can all be found in
+[`clippy_utils/src/diagnostics.rs`][diagnostics].
+
+`span_lint_and_help` seems most appropriate in this case. It allows us to
+provide an extra help message and we can't really suggest a better name
+automatically. This is how it looks:
+
+```rust
+impl EarlyLintPass for FooFunctions {
+ fn check_fn(&mut self, cx: &EarlyContext<'_>, fn_kind: FnKind<'_>, span: Span, _: NodeId) {
+ span_lint_and_help(
+ cx,
+ FOO_FUNCTIONS,
+ span,
+ "function named `foo`",
+ None,
+ "consider using a more meaningful name"
+ );
+ }
+}
+```
+
+Running our UI test should now produce output that contains the lint message.
+
+According to [the rustc-dev-guide], the text should be matter of fact and avoid
+capitalization and periods, unless multiple sentences are needed. When code or
+an identifier must appear in a message or label, it should be surrounded with
+single grave accents \`.
+
+[check_fn]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/trait.EarlyLintPass.html#method.check_fn
+[diagnostics]: https://github.com/rust-lang/rust-clippy/blob/master/clippy_utils/src/diagnostics.rs
+[the rustc-dev-guide]: https://rustc-dev-guide.rust-lang.org/diagnostics.html
+
+## Adding the lint logic
+
+Writing the logic for your lint will most likely be different from our example,
+so this section is kept rather short.
+
+Using the [`check_fn`][check_fn] method gives us access to [`FnKind`][fn_kind]
+that has the [`FnKind::Fn`] variant. It provides access to the name of the
+function/method via an [`Ident`][ident].
+
+With that we can expand our `check_fn` method to:
+
+```rust
+impl EarlyLintPass for FooFunctions {
+ fn check_fn(&mut self, cx: &EarlyContext<'_>, fn_kind: FnKind<'_>, span: Span, _: NodeId) {
+ if is_foo_fn(fn_kind) {
+ span_lint_and_help(
+ cx,
+ FOO_FUNCTIONS,
+ span,
+ "function named `foo`",
+ None,
+ "consider using a more meaningful name"
+ );
+ }
+ }
+}
+```
+
+We separate the lint conditional from the lint emissions because it makes the
+code a bit easier to read. In some cases this separation would also allow to
+write some unit tests (as opposed to only UI tests) for the separate function.
+
+In our example, `is_foo_fn` looks like:
+
+```rust
+// use statements, impl EarlyLintPass, check_fn, ..
+
+fn is_foo_fn(fn_kind: FnKind<'_>) -> bool {
+ match fn_kind {
+ FnKind::Fn(_, ident, ..) => {
+ // check if `fn` name is `foo`
+ ident.name.as_str() == "foo"
+ }
+ // ignore closures
+ FnKind::Closure(..) => false
+ }
+}
+```
+
+Now we should also run the full test suite with `cargo test`. At this point
+running `cargo test` should produce the expected output. Remember to run `cargo
+dev bless` to update the `.stderr` file.
+
+`cargo test` (as opposed to `cargo uitest`) will also ensure that our lint
+implementation is not violating any Clippy lints itself.
+
+That should be it for the lint implementation. Running `cargo test` should now
+pass.
+
+[fn_kind]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_ast/visit/enum.FnKind.html
+[`FnKind::Fn`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_ast/visit/enum.FnKind.html#variant.Fn
+[ident]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_span/symbol/struct.Ident.html
+
+## Specifying the lint's minimum supported Rust version (MSRV)
+
+Sometimes a lint makes suggestions that require a certain version of Rust. For
+example, the `manual_strip` lint suggests using `str::strip_prefix` and
+`str::strip_suffix` which is only available after Rust 1.45. In such cases, you
+need to ensure that the MSRV configured for the project is >= the MSRV of the
+required Rust feature. If multiple features are required, just use the one with
+a lower MSRV.
+
+First, add an MSRV alias for the required feature in [`clippy_utils::msrvs`].
+This can be accessed later as `msrvs::STR_STRIP_PREFIX`, for example.
+
+```rust
+msrv_aliases! {
+ ..
+ 1,45,0 { STR_STRIP_PREFIX }
+}
+```
+
+In order to access the project-configured MSRV, you need to have an `msrv` field
+in the LintPass struct, and a constructor to initialize the field. The `msrv`
+value is passed to the constructor in `clippy_lints/lib.rs`.
+
+```rust
+pub struct ManualStrip {
+ msrv: Option<RustcVersion>,
+}
+
+impl ManualStrip {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self { msrv }
+ }
+}
+```
+
+The project's MSRV can then be matched against the feature MSRV in the LintPass
+using the `meets_msrv` utility function.
+
+``` rust
+if !meets_msrv(self.msrv, msrvs::STR_STRIP_PREFIX) {
+ return;
+}
+```
+
+The project's MSRV can also be specified as an inner attribute, which overrides
+the value from `clippy.toml`. This can be accounted for using the
+`extract_msrv_attr!(LintContext)` macro and passing
+`LateContext`/`EarlyContext`.
+
+```rust
+impl<'tcx> LateLintPass<'tcx> for ManualStrip {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ ...
+ }
+ extract_msrv_attr!(LateContext);
+}
+```
+
+Once the `msrv` is added to the lint, a relevant test case should be added to
+`tests/ui/min_rust_version_attr.rs` which verifies that the lint isn't emitted
+if the project's MSRV is lower.
+
+As a last step, the lint should be added to the lint documentation. This is done
+in `clippy_lints/src/utils/conf.rs`:
+
+```rust
+define_Conf! {
+ /// Lint: LIST, OF, LINTS, <THE_NEWLY_ADDED_LINT>. The minimum rust version that the project supports
+ (msrv: Option<String> = None),
+ ...
+}
+```
+
+[`clippy_utils::msrvs`]: https://doc.rust-lang.org/nightly/nightly-rustc/clippy_utils/msrvs/index.html
+
+## Author lint
+
+If you have trouble implementing your lint, there is also the internal `author`
+lint to generate Clippy code that detects the offending pattern. It does not
+work for all of the Rust syntax, but can give a good starting point.
+
+The quickest way to use it, is the [Rust playground:
+play.rust-lang.org][author_example]. Put the code you want to lint into the
+editor and add the `#[clippy::author]` attribute above the item. Then run Clippy
+via `Tools -> Clippy` and you should see the generated code in the output below.
+
+[Here][author_example] is an example on the playground.
+
+If the command was executed successfully, you can copy the code over to where
+you are implementing your lint.
+
+[author_example]: https://play.rust-lang.org/?version=nightly&mode=debug&edition=2018&gist=9a12cb60e5c6ad4e3003ac6d5e63cf55
+
+## Print HIR lint
+
+To implement a lint, it's helpful to first understand the internal
+representation that rustc uses. Clippy has the `#[clippy::dump]` attribute that
+prints the [_High-Level Intermediate Representation (HIR)_] of the item,
+statement, or expression that the attribute is attached to. To attach the
+attribute to expressions you often need to enable
+`#![feature(stmt_expr_attributes)]`.
+
+[Here][print_hir_example] you can find an example, just select _Tools_ and run
+_Clippy_.
+
+[_High-Level Intermediate Representation (HIR)_]: https://rustc-dev-guide.rust-lang.org/hir.html
+[print_hir_example]: https://play.rust-lang.org/?version=nightly&mode=debug&edition=2021&gist=daf14db3a7f39ca467cd1b86c34b9afb
+
+## Documentation
+
+The final thing before submitting our PR is to add some documentation to our
+lint declaration.
+
+Please document your lint with a doc comment akin to the following:
+
+```rust
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for ... (describe what the lint matches).
+ ///
+ /// ### Why is this bad?
+ /// Supply the reason for linting the code.
+ ///
+ /// ### Example
+ ///
+ /// ```rust,ignore
+ /// // A short example of code that triggers the lint
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// // A short example of improved code that doesn't trigger the lint
+ /// ```
+ #[clippy::version = "1.29.0"]
+ pub FOO_FUNCTIONS,
+ pedantic,
+ "function named `foo`, which is not a descriptive name"
+}
+```
+
+Once your lint is merged, this documentation will show up in the [lint
+list][lint_list].
+
+[lint_list]: https://rust-lang.github.io/rust-clippy/master/index.html
+
+## Running rustfmt
+
+[Rustfmt] is a tool for formatting Rust code according to style guidelines. Your
+code has to be formatted by `rustfmt` before a PR can be merged. Clippy uses
+nightly `rustfmt` in the CI.
+
+It can be installed via `rustup`:
+
+```bash
+rustup component add rustfmt --toolchain=nightly
+```
+
+Use `cargo dev fmt` to format the whole codebase. Make sure that `rustfmt` is
+installed for the nightly toolchain.
+
+[Rustfmt]: https://github.com/rust-lang/rustfmt
+
+## Debugging
+
+If you want to debug parts of your lint implementation, you can use the [`dbg!`]
+macro anywhere in your code. Running the tests should then include the debug
+output in the `stdout` part.
+
+[`dbg!`]: https://doc.rust-lang.org/std/macro.dbg.html
+
+## PR Checklist
+
+Before submitting your PR make sure you followed all of the basic requirements:
+
+<!-- Sync this with `.github/PULL_REQUEST_TEMPLATE` -->
+
+- \[ ] Followed [lint naming conventions][lint_naming]
+- \[ ] Added passing UI tests (including committed `.stderr` file)
+- \[ ] `cargo test` passes locally
+- \[ ] Executed `cargo dev update_lints`
+- \[ ] Added lint documentation
+- \[ ] Run `cargo dev fmt`
+
+## Adding configuration to a lint
+
+Clippy supports the configuration of lints values using a `clippy.toml` file in
+the workspace directory. Adding a configuration to a lint can be useful for
+thresholds or to constrain some behavior that can be seen as a false positive
+for some users. Adding a configuration is done in the following steps:
+
+1. Adding a new configuration entry to [`clippy_lints::utils::conf`] like this:
+
+ ```rust
+ /// Lint: LINT_NAME.
+ ///
+ /// <The configuration field doc comment>
+ (configuration_ident: Type = DefaultValue),
+ ```
+
+ The doc comment is automatically added to the documentation of the listed
+ lints. The default value will be formatted using the `Debug` implementation
+ of the type.
+2. Adding the configuration value to the lint impl struct:
+ 1. This first requires the definition of a lint impl struct. Lint impl
+ structs are usually generated with the `declare_lint_pass!` macro. This
+ struct needs to be defined manually to add some kind of metadata to it:
+ ```rust
+ // Generated struct definition
+ declare_lint_pass!(StructName => [
+ LINT_NAME
+ ]);
+
+ // New manual definition struct
+ #[derive(Copy, Clone)]
+ pub struct StructName {}
+
+ impl_lint_pass!(StructName => [
+ LINT_NAME
+ ]);
+ ```
+
+ 2. Next add the configuration value and a corresponding creation method like
+ this:
+ ```rust
+ #[derive(Copy, Clone)]
+ pub struct StructName {
+ configuration_ident: Type,
+ }
+
+ // ...
+
+ impl StructName {
+ pub fn new(configuration_ident: Type) -> Self {
+ Self {
+ configuration_ident,
+ }
+ }
+ }
+ ```
+3. Passing the configuration value to the lint impl struct:
+
+ First find the struct construction in the [`clippy_lints` lib file]. The
+ configuration value is now cloned or copied into a local value that is then
+ passed to the impl struct like this:
+
+ ```rust
+ // Default generated registration:
+ store.register_*_pass(|| box module::StructName);
+
+ // New registration with configuration value
+ let configuration_ident = conf.configuration_ident.clone();
+ store.register_*_pass(move || box module::StructName::new(configuration_ident));
+ ```
+
+ Congratulations the work is almost done. The configuration value can now be
+ accessed in the linting code via `self.configuration_ident`.
+
+4. Adding tests:
+ 1. The default configured value can be tested like any normal lint in
+ [`tests/ui`].
+ 2. The configuration itself will be tested separately in [`tests/ui-toml`].
+ Simply add a new subfolder with a fitting name. This folder contains a
+ `clippy.toml` file with the configuration value and a rust file that
+ should be linted by Clippy. The test can otherwise be written as usual.
+
+[`clippy_lints::utils::conf`]: https://github.com/rust-lang/rust-clippy/blob/master/clippy_lints/src/utils/conf.rs
+[`clippy_lints` lib file]: https://github.com/rust-lang/rust-clippy/blob/master/clippy_lints/src/lib.rs
+[`tests/ui`]: https://github.com/rust-lang/rust-clippy/blob/master/tests/ui
+[`tests/ui-toml`]: https://github.com/rust-lang/rust-clippy/blob/master/tests/ui-toml
+
+## Cheat Sheet
+
+Here are some pointers to things you are likely going to need for every lint:
+
+* [Clippy utils][utils] - Various helper functions. Maybe the function you need
+ is already in here ([`is_type_diagnostic_item`], [`implements_trait`],
+ [`snippet`], etc)
+* [Clippy diagnostics][diagnostics]
+* [Let chains][let-chains]
+* [`from_expansion`][from_expansion] and
+ [`in_external_macro`][in_external_macro]
+* [`Span`][span]
+* [`Applicability`][applicability]
+* [Common tools for writing lints](common_tools_writing_lints.md) helps with
+ common operations
+* [The rustc-dev-guide][rustc-dev-guide] explains a lot of internal compiler
+ concepts
+* [The nightly rustc docs][nightly_docs] which has been linked to throughout
+ this guide
+
+For `EarlyLintPass` lints:
+
+* [`EarlyLintPass`][early_lint_pass]
+* [`rustc_ast::ast`][ast]
+
+For `LateLintPass` lints:
+
+* [`LateLintPass`][late_lint_pass]
+* [`Ty::TyKind`][ty]
+
+While most of Clippy's lint utils are documented, most of rustc's internals lack
+documentation currently. This is unfortunate, but in most cases you can probably
+get away with copying things from existing similar lints. If you are stuck,
+don't hesitate to ask on [Zulip] or in the issue/PR.
+
+[utils]: https://doc.rust-lang.org/nightly/nightly-rustc/clippy_utils/index.html
+[`is_type_diagnostic_item`]: https://doc.rust-lang.org/nightly/nightly-rustc/clippy_utils/ty/fn.is_type_diagnostic_item.html
+[`implements_trait`]: https://doc.rust-lang.org/nightly/nightly-rustc/clippy_utils/ty/fn.implements_trait.html
+[`snippet`]: https://doc.rust-lang.org/nightly/nightly-rustc/clippy_utils/source/fn.snippet.html
+[let-chains]: https://github.com/rust-lang/rust/pull/94927
+[from_expansion]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_span/struct.Span.html#method.from_expansion
+[in_external_macro]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/lint/fn.in_external_macro.html
+[span]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_span/struct.Span.html
+[applicability]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_errors/enum.Applicability.html
+[rustc-dev-guide]: https://rustc-dev-guide.rust-lang.org/
+[nightly_docs]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/
+[ast]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_ast/ast/index.html
+[ty]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/sty/index.html
+[Zulip]: https://rust-lang.zulipchat.com/#narrow/stream/clippy
diff --git a/src/tools/clippy/book/src/development/basics.md b/src/tools/clippy/book/src/development/basics.md
new file mode 100644
index 000000000..44ba6e327
--- /dev/null
+++ b/src/tools/clippy/book/src/development/basics.md
@@ -0,0 +1,192 @@
+# Basics for hacking on Clippy
+
+This document explains the basics for hacking on Clippy. Besides others, this
+includes how to build and test Clippy. For a more in depth description on the
+codebase take a look at [Adding Lints] or [Common Tools].
+
+[Adding Lints]: https://github.com/rust-lang/rust-clippy/blob/master/book/src/development/adding_lints.md
+[Common Tools]: https://github.com/rust-lang/rust-clippy/blob/master/book/src/development/common_tools_writing_lints.md
+
+- [Basics for hacking on Clippy](#basics-for-hacking-on-clippy)
+ - [Get the Code](#get-the-code)
+ - [Building and Testing](#building-and-testing)
+ - [`cargo dev`](#cargo-dev)
+ - [lintcheck](#lintcheck)
+ - [PR](#pr)
+ - [Common Abbreviations](#common-abbreviations)
+ - [Install from source](#install-from-source)
+
+## Get the Code
+
+First, make sure you have checked out the latest version of Clippy. If this is
+your first time working on Clippy, create a fork of the repository and clone it
+afterwards with the following command:
+
+```bash
+git clone git@github.com:<your-username>/rust-clippy
+```
+
+If you've already cloned Clippy in the past, update it to the latest version:
+
+```bash
+# If the upstream remote has not been added yet
+git remote add upstream https://github.com/rust-lang/rust-clippy
+# upstream has to be the remote of the rust-lang/rust-clippy repo
+git fetch upstream
+# make sure that you are on the master branch
+git checkout master
+# rebase your master branch on the upstream master
+git rebase upstream/master
+# push to the master branch of your fork
+git push
+```
+
+## Building and Testing
+
+You can build and test Clippy like every other Rust project:
+
+```bash
+cargo build # builds Clippy
+cargo test # tests Clippy
+```
+
+Since Clippy's test suite is pretty big, there are some commands that only run a
+subset of Clippy's tests:
+
+```bash
+# only run UI tests
+cargo uitest
+# only run UI tests starting with `test_`
+TESTNAME="test_" cargo uitest
+# only run dogfood tests
+cargo dev dogfood
+```
+
+If the output of a [UI test] differs from the expected output, you can update
+the reference file with:
+
+```bash
+cargo dev bless
+```
+
+For example, this is necessary, if you fix a typo in an error message of a lint
+or if you modify a test file to add a test case.
+
+> _Note:_ This command may update more files than you intended. In that case
+> only commit the files you wanted to update.
+
+[UI test]: https://rustc-dev-guide.rust-lang.org/tests/adding.html#guide-to-the-ui-tests
+
+## `cargo dev`
+
+Clippy has some dev tools to make working on Clippy more convenient. These tools
+can be accessed through the `cargo dev` command. Available tools are listed
+below. To get more information about these commands, just call them with
+`--help`.
+
+```bash
+# formats the whole Clippy codebase and all tests
+cargo dev fmt
+# register or update lint names/groups/...
+cargo dev update_lints
+# create a new lint and register it
+cargo dev new_lint
+# deprecate a lint and attempt to remove code relating to it
+cargo dev deprecate
+# automatically formatting all code before each commit
+cargo dev setup git-hook
+# (experimental) Setup Clippy to work with IntelliJ-Rust
+cargo dev setup intellij
+# runs the `dogfood` tests
+cargo dev dogfood
+```
+
+More about intellij command usage and reasons
+[here](https://github.com/rust-lang/rust-clippy/blob/master/CONTRIBUTING.md#intellij-rust)
+
+## lintcheck
+
+`cargo lintcheck` will build and run clippy on a fixed set of crates and
+generate a log of the results. You can `git diff` the updated log against its
+previous version and see what impact your lint made on a small set of crates.
+If you add a new lint, please audit the resulting warnings and make sure there
+are no false positives and that the suggestions are valid.
+
+Refer to the tools [README] for more details.
+
+[README]: https://github.com/rust-lang/rust-clippy/blob/master/lintcheck/README.md
+
+## PR
+
+We follow a rustc no merge-commit policy. See
+<https://rustc-dev-guide.rust-lang.org/contributing.html#opening-a-pr>.
+
+## Common Abbreviations
+
+| Abbreviation | Meaning |
+| ------------ | -------------------------------------- |
+| UB | Undefined Behavior |
+| FP | False Positive |
+| FN | False Negative |
+| ICE | Internal Compiler Error |
+| AST | Abstract Syntax Tree |
+| MIR | Mid-Level Intermediate Representation |
+| HIR | High-Level Intermediate Representation |
+| TCX | Type context |
+
+This is a concise list of abbreviations that can come up during Clippy
+development. An extensive general list can be found in the [rustc-dev-guide
+glossary][glossary]. Always feel free to ask if an abbreviation or meaning is
+unclear to you.
+
+## Install from source
+
+If you are hacking on Clippy and want to install it from source, do the
+following:
+
+First, take note of the toolchain
+[override](https://rust-lang.github.io/rustup/overrides.html) in
+`/rust-toolchain`. We will use this override to install Clippy into the right
+toolchain.
+
+> Tip: You can view the active toolchain for the current directory with `rustup
+> show active-toolchain`.
+
+From the Clippy project root, run the following command to build the Clippy
+binaries and copy them into the toolchain directory. This will override the
+currently installed Clippy component.
+
+```terminal
+cargo build --release --bin cargo-clippy --bin clippy-driver -Zunstable-options --out-dir "$(rustc --print=sysroot)/bin"
+```
+
+Now you may run `cargo clippy` in any project, using the toolchain where you
+just installed Clippy.
+
+```terminal
+cd my-project
+cargo +nightly-2021-07-01 clippy
+```
+
+...or `clippy-driver`
+
+```terminal
+clippy-driver +nightly-2021-07-01 <filename>
+```
+
+If you need to restore the default Clippy installation, run the following (from
+the Clippy project root).
+
+```terminal
+rustup component remove clippy
+rustup component add clippy
+```
+
+> **DO NOT** install using `cargo install --path . --force` since this will
+> overwrite rustup
+> [proxies](https://rust-lang.github.io/rustup/concepts/proxies.html). That is,
+> `~/.cargo/bin/cargo-clippy` and `~/.cargo/bin/clippy-driver` should be hard or
+> soft links to `~/.cargo/bin/rustup`. You can repair these by running `rustup
+> update`.
+
+[glossary]: https://rustc-dev-guide.rust-lang.org/appendix/glossary.html
diff --git a/src/tools/clippy/book/src/development/common_tools_writing_lints.md b/src/tools/clippy/book/src/development/common_tools_writing_lints.md
new file mode 100644
index 000000000..15e00c7d7
--- /dev/null
+++ b/src/tools/clippy/book/src/development/common_tools_writing_lints.md
@@ -0,0 +1,279 @@
+# Common tools for writing lints
+
+You may need following tooltips to catch up with common operations.
+
+- [Common tools for writing lints](#common-tools-for-writing-lints)
+ - [Retrieving the type of an expression](#retrieving-the-type-of-an-expression)
+ - [Checking if an expr is calling a specific method](#checking-if-an-expr-is-calling-a-specific-method)
+ - [Checking for a specific type](#checking-for-a-specific-type)
+ - [Checking if a type implements a specific trait](#checking-if-a-type-implements-a-specific-trait)
+ - [Checking if a type defines a specific method](#checking-if-a-type-defines-a-specific-method)
+ - [Dealing with macros](#dealing-with-macros-and-expansions)
+
+Useful Rustc dev guide links:
+- [Stages of compilation](https://rustc-dev-guide.rust-lang.org/compiler-src.html#the-main-stages-of-compilation)
+- [Diagnostic items](https://rustc-dev-guide.rust-lang.org/diagnostics/diagnostic-items.html)
+- [Type checking](https://rustc-dev-guide.rust-lang.org/type-checking.html)
+- [Ty module](https://rustc-dev-guide.rust-lang.org/ty.html)
+
+## Retrieving the type of an expression
+
+Sometimes you may want to retrieve the type `Ty` of an expression `Expr`, for
+example to answer following questions:
+
+- which type does this expression correspond to (using its [`TyKind`][TyKind])?
+- is it a sized type?
+- is it a primitive type?
+- does it implement a trait?
+
+This operation is performed using the [`expr_ty()`][expr_ty] method from the
+[`TypeckResults`][TypeckResults] struct, that gives you access to the underlying
+structure [`Ty`][Ty].
+
+Example of use:
+```rust
+impl LateLintPass<'_> for MyStructLint {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
+ // Get type of `expr`
+ let ty = cx.typeck_results().expr_ty(expr);
+ // Match its kind to enter its type
+ match ty.kind {
+ ty::Adt(adt_def, _) if adt_def.is_struct() => println!("Our `expr` is a struct!"),
+ _ => ()
+ }
+ }
+}
+```
+
+Similarly in [`TypeckResults`][TypeckResults] methods, you have the
+[`pat_ty()`][pat_ty] method to retrieve a type from a pattern.
+
+Two noticeable items here:
+- `cx` is the lint context [`LateContext`][LateContext]. The two most useful
+ data structures in this context are `tcx` and the `TypeckResults` returned by
+ `LateContext::typeck_results`, allowing us to jump to type definitions and
+ other compilation stages such as HIR.
+- `typeck_results`'s return value is [`TypeckResults`][TypeckResults] and is
+ created by type checking step, it includes useful information such as types of
+ expressions, ways to resolve methods and so on.
+
+## Checking if an expr is calling a specific method
+
+Starting with an `expr`, you can check whether it is calling a specific method
+`some_method`:
+
+```rust
+impl<'tcx> LateLintPass<'tcx> for MyStructLint {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
+ // Check our expr is calling a method
+ if let hir::ExprKind::MethodCall(path, _, [_self_arg, ..]) = &expr.kind
+ // Check the name of this method is `some_method`
+ && path.ident.name == sym!(some_method)
+ // Optionally, check the type of the self argument.
+ // - See "Checking for a specific type"
+ {
+ // ...
+ }
+ }
+}
+```
+
+## Checking for a specific type
+
+There are three ways to check if an expression type is a specific type we want
+to check for. All of these methods only check for the base type, generic
+arguments have to be checked separately.
+
+```rust
+use clippy_utils::ty::{is_type_diagnostic_item, is_type_lang_item};
+use clippy_utils::{paths, match_def_path};
+use rustc_span::symbol::sym;
+use rustc_hir::LangItem;
+
+impl LateLintPass<'_> for MyStructLint {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
+ // Getting the expression type
+ let ty = cx.typeck_results().expr_ty(expr);
+
+ // 1. Using diagnostic items
+ // The last argument is the diagnostic item to check for
+ if is_type_diagnostic_item(cx, ty, sym::Option) {
+ // The type is an `Option`
+ }
+
+ // 2. Using lang items
+ if is_type_lang_item(cx, ty, LangItem::RangeFull) {
+ // The type is a full range like `.drain(..)`
+ }
+
+ // 3. Using the type path
+ // This method should be avoided if possible
+ if match_def_path(cx, def_id, &paths::RESULT) {
+ // The type is a `core::result::Result`
+ }
+ }
+}
+```
+
+Prefer using diagnostic items and lang items where possible.
+
+## Checking if a type implements a specific trait
+
+There are three ways to do this, depending on if the target trait has a
+diagnostic item, lang item or neither.
+
+```rust
+use clippy_utils::{implements_trait, is_trait_method, match_trait_method, paths};
+use rustc_span::symbol::sym;
+
+impl LateLintPass<'_> for MyStructLint {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
+ // 1. Using diagnostic items with the expression
+ // we use `is_trait_method` function from Clippy's utils
+ if is_trait_method(cx, expr, sym::Iterator) {
+ // method call in `expr` belongs to `Iterator` trait
+ }
+
+ // 2. Using lang items with the expression type
+ let ty = cx.typeck_results().expr_ty(expr);
+ if cx.tcx.lang_items()
+ // we are looking for the `DefId` of `Drop` trait in lang items
+ .drop_trait()
+ // then we use it with our type `ty` by calling `implements_trait` from Clippy's utils
+ .map_or(false, |id| implements_trait(cx, ty, id, &[])) {
+ // `expr` implements `Drop` trait
+ }
+
+ // 3. Using the type path with the expression
+ // we use `match_trait_method` function from Clippy's utils
+ // (This method should be avoided if possible)
+ if match_trait_method(cx, expr, &paths::INTO) {
+ // `expr` implements `Into` trait
+ }
+ }
+}
+```
+
+> Prefer using diagnostic and lang items, if the target trait has one.
+
+We access lang items through the type context `tcx`. `tcx` is of type
+[`TyCtxt`][TyCtxt] and is defined in the `rustc_middle` crate. A list of defined
+paths for Clippy can be found in [paths.rs][paths]
+
+## Checking if a type defines a specific method
+
+To check if our type defines a method called `some_method`:
+
+```rust
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::return_ty;
+
+impl<'tcx> LateLintPass<'tcx> for MyTypeImpl {
+ fn check_impl_item(&mut self, cx: &LateContext<'tcx>, impl_item: &'tcx ImplItem<'_>) {
+ // Check if item is a method/function
+ if let ImplItemKind::Fn(ref signature, _) = impl_item.kind
+ // Check the method is named `some_method`
+ && impl_item.ident.name == sym!(some_method)
+ // We can also check it has a parameter `self`
+ && signature.decl.implicit_self.has_implicit_self()
+ // We can go further and even check if its return type is `String`
+ && is_type_diagnostic_item(cx, return_ty(cx, impl_item.hir_id), sym!(string_type))
+ {
+ // ...
+ }
+ }
+}
+```
+
+## Dealing with macros and expansions
+
+Keep in mind that macros are already expanded and desugaring is already applied
+to the code representation that you are working with in Clippy. This
+unfortunately causes a lot of false positives because macro expansions are
+"invisible" unless you actively check for them. Generally speaking, code with
+macro expansions should just be ignored by Clippy because that code can be
+dynamic in ways that are difficult or impossible to see. Use the following
+functions to deal with macros:
+
+- `span.from_expansion()`: detects if a span is from macro expansion or
+ desugaring. Checking this is a common first step in a lint.
+
+ ```rust
+ if expr.span.from_expansion() {
+ // just forget it
+ return;
+ }
+ ```
+
+- `span.ctxt()`: the span's context represents whether it is from expansion, and
+ if so, which macro call expanded it. It is sometimes useful to check if the
+ context of two spans are equal.
+
+ ```rust
+ // expands to `1 + 0`, but don't lint
+ 1 + mac!()
+ ```
+ ```rust
+ if left.span.ctxt() != right.span.ctxt() {
+ // the coder most likely cannot modify this expression
+ return;
+ }
+ ```
+ > Note: Code that is not from expansion is in the "root" context. So any spans
+ > where `from_expansion` returns `true` can be assumed to have the same
+ > context. And so just using `span.from_expansion()` is often good enough.
+
+
+- `in_external_macro(span)`: detect if the given span is from a macro defined in
+ a foreign crate. If you want the lint to work with macro-generated code, this
+ is the next line of defense to avoid macros not defined in the current crate.
+ It doesn't make sense to lint code that the coder can't change.
+
+ You may want to use it for example to not start linting in macros from other
+ crates
+
+ ```rust
+ #[macro_use]
+ extern crate a_crate_with_macros;
+
+ // `foo` is defined in `a_crate_with_macros`
+ foo!("bar");
+
+ // if we lint the `match` of `foo` call and test its span
+ assert_eq!(in_external_macro(cx.sess(), match_span), true);
+ ```
+
+- `span.ctxt()`: the span's context represents whether it is from expansion, and
+ if so, what expanded it
+
+ One thing `SpanContext` is useful for is to check if two spans are in the same
+ context. For example, in `a == b`, `a` and `b` have the same context. In a
+ `macro_rules!` with `a == $b`, `$b` is expanded to some expression with a
+ different context from `a`.
+
+ ```rust
+ macro_rules! m {
+ ($a:expr, $b:expr) => {
+ if $a.is_some() {
+ $b;
+ }
+ }
+ }
+
+ let x: Option<u32> = Some(42);
+ m!(x, x.unwrap());
+
+ // These spans are not from the same context
+ // x.is_some() is from inside the macro
+ // x.unwrap() is from outside the macro
+ assert_eq!(x_is_some_span.ctxt(), x_unwrap_span.ctxt());
+ ```
+
+[Ty]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.Ty.html
+[TyKind]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/enum.TyKind.html
+[TypeckResults]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TypeckResults.html
+[expr_ty]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TypeckResults.html#method.expr_ty
+[LateContext]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/struct.LateContext.html
+[TyCtxt]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/context/struct.TyCtxt.html
+[pat_ty]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/context/struct.TypeckResults.html#method.pat_ty
+[paths]: https://doc.rust-lang.org/nightly/nightly-rustc/clippy_utils/paths/index.html
diff --git a/src/tools/clippy/book/src/development/infrastructure/README.md b/src/tools/clippy/book/src/development/infrastructure/README.md
new file mode 100644
index 000000000..3b2a25399
--- /dev/null
+++ b/src/tools/clippy/book/src/development/infrastructure/README.md
@@ -0,0 +1,19 @@
+# Infrastructure
+
+In order to deploy Clippy over `rustup`, some infrastructure is necessary. This
+chapter describes the different parts of the Clippy infrastructure that need to
+be maintained to make this possible.
+
+The most important part is the sync between the `rust-lang/rust` repository and
+the Clippy repository that takes place every two weeks. This process is
+described in the [Syncing changes between Clippy and `rust-lang/rust`](sync.md)
+section.
+
+A new Clippy release is done together with every Rust release, so every six
+weeks. The release process is described in the [Release a new Clippy
+Version](release.md) section. During a release cycle a changelog entry for the
+next release has to be written. The format of that and how to do that is
+documented in the [Changelog Update](changelog_update.md) section.
+
+> _Note:_ The Clippy CI should also be described in this chapter, but for now is
+> left as a TODO.
diff --git a/src/tools/clippy/book/src/development/infrastructure/backport.md b/src/tools/clippy/book/src/development/infrastructure/backport.md
new file mode 100644
index 000000000..15f3d1f08
--- /dev/null
+++ b/src/tools/clippy/book/src/development/infrastructure/backport.md
@@ -0,0 +1,71 @@
+# Backport Changes
+
+Sometimes it is necessary to backport changes to the beta release of Clippy.
+Backports in Clippy are rare and should be approved by the Clippy team. For
+example, a backport is done, if a crucial ICE was fixed or a lint is broken to a
+point, that it has to be disabled, before landing on stable.
+
+Backports are done to the `beta` branch of Clippy. Backports to stable Clippy
+releases basically don't exist, since this would require a Rust point release,
+which is almost never justifiable for a Clippy fix.
+
+
+## Backport the changes
+
+Backports are done on the beta branch of the Clippy repository.
+
+```bash
+# Assuming the current directory corresponds to the Clippy repository
+$ git checkout beta
+$ git checkout -b backport
+$ git cherry-pick <SHA> # `<SHA>` is the commit hash of the commit(s), that should be backported
+$ git push origin backport
+```
+
+Now you should test that the backport passes all the tests in the Rust
+repository. You can do this with:
+
+```bash
+# Assuming the current directory corresponds to the Rust repository
+$ git checkout beta
+$ git subtree pull -p src/tools/clippy https://github.com/<your-github-name>/rust-clippy backport
+$ ./x.py test src/tools/clippy
+```
+
+Should the test fail, you can fix Clippy directly in the Rust repository. This
+has to be first applied to the Clippy beta branch and then again synced to the
+Rust repository, though. The easiest way to do this is:
+
+```bash
+# In the Rust repository
+$ git diff --patch --relative=src/tools/clippy > clippy.patch
+# In the Clippy repository
+$ git apply /path/to/clippy.patch
+$ git add -u
+$ git commit -m "Fix rustup fallout"
+$ git push origin backport
+```
+
+After this, you can open a PR to the `beta` branch of the Clippy repository.
+
+
+## Update Clippy in the Rust Repository
+
+This step must be done, **after** the PR of the previous step was merged.
+
+After the backport landed in the Clippy repository, the branch has to be synced
+back to the beta branch of the Rust repository.
+
+```bash
+# Assuming the current directory corresponds to the Rust repository
+$ git checkout beta
+$ git checkout -b clippy_backport
+$ git subtree pull -p src/tools/clippy https://github.com/rust-lang/rust-clippy beta
+$ git push origin clippy_backport
+```
+
+Make sure to test the backport in the Rust repository before opening a PR. This
+is done with `./x.py test src/tools/clippy`. If that passes all tests, open a PR
+to the `beta` branch of the Rust repository. In this PR you should tag the
+Clippy team member, that agreed to the backport or the `@rust-lang/clippy` team.
+Make sure to add `[beta]` to the title of the PR.
diff --git a/src/tools/clippy/book/src/development/infrastructure/book.md b/src/tools/clippy/book/src/development/infrastructure/book.md
new file mode 100644
index 000000000..a48742191
--- /dev/null
+++ b/src/tools/clippy/book/src/development/infrastructure/book.md
@@ -0,0 +1,42 @@
+# The Clippy Book
+
+This document explains how to make additions and changes to the Clippy book, the
+guide to Clippy that you're reading right now. The Clippy book is formatted with
+[Markdown](https://www.markdownguide.org) and generated by
+[mdbook](https://github.com/rust-lang/mdBook).
+
+- [Get mdbook](#get-mdbook)
+- [Make changes](#make-changes)
+
+## Get mdbook
+
+While not strictly necessary since the book source is simply Markdown text
+files, having mdbook locally will allow you to build, test and serve the book
+locally to view changes before you commit them to the repository. You likely
+already have `cargo` installed, so the easiest option is to simply:
+
+```shell
+cargo install mdbook
+```
+
+See the mdbook [installation](https://github.com/rust-lang/mdBook#installation)
+instructions for other options.
+
+## Make changes
+
+The book's
+[src](https://github.com/rust-lang/rust-clippy/tree/master/book/src)
+directory contains all of the markdown files used to generate the book. If you
+want to see your changes in real time, you can use the mdbook `serve` command to
+run a web server locally that will automatically update changes as they are
+made. From the top level of your `rust-clippy` directory:
+
+```shell
+mdbook serve book --open
+```
+
+Then navigate to `http://localhost:3000` to see the generated book. While the
+server is running, changes you make will automatically be updated.
+
+For more information, see the mdbook
+[guide](https://rust-lang.github.io/mdBook/).
diff --git a/src/tools/clippy/book/src/development/infrastructure/changelog_update.md b/src/tools/clippy/book/src/development/infrastructure/changelog_update.md
new file mode 100644
index 000000000..80a47affe
--- /dev/null
+++ b/src/tools/clippy/book/src/development/infrastructure/changelog_update.md
@@ -0,0 +1,105 @@
+# Changelog Update
+
+If you want to help with updating the [changelog], you're in the right place.
+
+## When to update
+
+Typos and other small fixes/additions are _always_ welcome.
+
+Special care needs to be taken when it comes to updating the changelog for a new
+Rust release. For that purpose, the changelog is ideally updated during the week
+before an upcoming stable release. You can find the release dates on the [Rust
+Forge][forge].
+
+Most of the time we only need to update the changelog for minor Rust releases.
+It's been very rare that Clippy changes were included in a patch release.
+
+## Changelog update walkthrough
+
+### 1. Finding the relevant Clippy commits
+
+Each Rust release ships with its own version of Clippy. The Clippy subtree can
+be found in the `tools` directory of the Rust repository.
+
+Depending on the current time and what exactly you want to update, the following
+bullet points might be helpful:
+
+* When writing the release notes for the **upcoming stable release** you need to
+ check out the Clippy commit of the current Rust `beta` branch.
+ [Link][rust_beta_tools]
+* When writing the release notes for the **upcoming beta release**, you need to
+ check out the Clippy commit of the current Rust `master`.
+ [Link][rust_master_tools]
+* When writing the (forgotten) release notes for a **past stable release**, you
+ need to check out the Rust release tag of the stable release.
+ [Link][rust_stable_tools]
+
+Usually you want to write the changelog of the **upcoming stable release**. Make
+sure though, that `beta` was already branched in the Rust repository.
+
+To find the commit hash, issue the following command when in a `rust-lang/rust`
+checkout:
+```
+git log --oneline -- src/tools/clippy/ | grep -o "Merge commit '[a-f0-9]*' into .*" | head -1 | sed -e "s/Merge commit '\([a-f0-9]*\)' into .*/\1/g"
+```
+
+### 2. Fetching the PRs between those commits
+
+Once you've got the correct commit range, run
+
+```
+util/fetch_prs_between.sh commit1 commit2 > changes.txt
+```
+
+and open that file in your editor of choice.
+
+When updating the changelog it's also a good idea to make sure that `commit1` is
+already correct in the current changelog.
+
+### 3. Authoring the final changelog
+
+The above script should have dumped all the relevant PRs to the file you
+specified. It should have filtered out most of the irrelevant PRs already, but
+it's a good idea to do a manual cleanup pass where you look for more irrelevant
+PRs. If you're not sure about some PRs, just leave them in for the review and
+ask for feedback.
+
+With the PRs filtered, you can start to take each PR and move the `changelog: `
+content to `CHANGELOG.md`. Adapt the wording as you see fit but try to keep it
+somewhat coherent.
+
+The order should roughly be:
+
+1. New lints
+2. Moves or deprecations of lints
+3. Changes that expand what code existing lints cover
+4. False positive fixes
+5. Suggestion fixes/improvements
+6. ICE fixes
+7. Documentation improvements
+8. Others
+
+As section headers, we use:
+
+```
+### New Lints
+### Moves and Deprecations
+### Enhancements
+### False Positive Fixes
+### Suggestion Fixes/Improvements
+### ICE Fixes
+### Documentation Improvements
+### Others
+```
+
+Please also be sure to update the Beta/Unreleased sections at the top with the
+relevant commit ranges.
+
+If you have the time, it would be appreciated if you double-check, that the
+`#[clippy::version]` attributes for the added lints contains the correct version.
+
+[changelog]: https://github.com/rust-lang/rust-clippy/blob/master/CHANGELOG.md
+[forge]: https://forge.rust-lang.org/
+[rust_master_tools]: https://github.com/rust-lang/rust/tree/master/src/tools/clippy
+[rust_beta_tools]: https://github.com/rust-lang/rust/tree/beta/src/tools/clippy
+[rust_stable_tools]: https://github.com/rust-lang/rust/releases
diff --git a/src/tools/clippy/book/src/development/infrastructure/release.md b/src/tools/clippy/book/src/development/infrastructure/release.md
new file mode 100644
index 000000000..057228180
--- /dev/null
+++ b/src/tools/clippy/book/src/development/infrastructure/release.md
@@ -0,0 +1,142 @@
+# Release a new Clippy Version
+
+> _NOTE:_ This document is probably only relevant to you, if you're a member of
+> the Clippy team.
+
+Clippy is released together with stable Rust releases. The dates for these
+releases can be found at the [Rust Forge]. This document explains the necessary
+steps to create a Clippy release.
+
+1. [Remerge the `beta` branch](#remerge-the-beta-branch)
+2. [Update the `beta` branch](#update-the-beta-branch)
+3. [Find the Clippy commit](#find-the-clippy-commit)
+4. [Tag the stable commit](#tag-the-stable-commit)
+5. [Update `CHANGELOG.md`](#update-changelogmd)
+
+> _NOTE:_ This document is for stable Rust releases, not for point releases. For
+> point releases, step 1. and 2. should be enough.
+
+[Rust Forge]: https://forge.rust-lang.org/
+
+## Remerge the `beta` branch
+
+This step is only necessary, if since the last release something was backported
+to the beta Rust release. The remerge is then necessary, to make sure that the
+Clippy commit, that was used by the now stable Rust release, persists in the
+tree of the Clippy repository.
+
+To find out if this step is necessary run
+
+```bash
+# Assumes that the local master branch of rust-lang/rust-clippy is up-to-date
+$ git fetch upstream
+$ git branch master --contains upstream/beta
+```
+
+If this command outputs `master`, this step is **not** necessary.
+
+```bash
+# Assuming `HEAD` is the current `master` branch of rust-lang/rust-clippy
+$ git checkout -b backport_remerge
+$ git merge upstream/beta
+$ git diff # This diff has to be empty, otherwise something with the remerge failed
+$ git push origin backport_remerge # This can be pushed to your fork
+```
+
+After this, open a PR to the master branch. In this PR, the commit hash of the
+`HEAD` of the `beta` branch must exists. In addition to that, no files should be
+changed by this PR.
+
+## Update the `beta` branch
+
+This step must be done **after** the PR of the previous step was merged.
+
+First, the Clippy commit of the `beta` branch of the Rust repository has to be
+determined.
+
+```bash
+# Assuming the current directory corresponds to the Rust repository
+$ git fetch upstream
+$ git checkout upstream/beta
+$ BETA_SHA=$(git log --oneline -- src/tools/clippy/ | grep -o "Merge commit '[a-f0-9]*' into .*" | head -1 | sed -e "s/Merge commit '\([a-f0-9]*\)' into .*/\1/g")
+```
+
+After finding the Clippy commit, the `beta` branch in the Clippy repository can
+be updated.
+
+```bash
+# Assuming the current directory corresponds to the Clippy repository
+$ git checkout beta
+$ git reset --hard $BETA_SHA
+$ git push upstream beta
+```
+
+## Find the Clippy commit
+
+The first step is to tag the Clippy commit, that is included in the stable Rust
+release. This commit can be found in the Rust repository.
+
+```bash
+# Assuming the current directory corresponds to the Rust repository
+$ git fetch upstream # `upstream` is the `rust-lang/rust` remote
+$ git checkout 1.XX.0 # XX should be exchanged with the corresponding version
+$ SHA=$(git log --oneline -- src/tools/clippy/ | grep -o "Merge commit '[a-f0-9]*' into .*" | head -1 | sed -e "s/Merge commit '\([a-f0-9]*\)' into .*/\1/g")
+```
+
+## Tag the stable commit
+
+After finding the Clippy commit, it can be tagged with the release number.
+
+```bash
+# Assuming the current directory corresponds to the Clippy repository
+$ git checkout $SHA
+$ git tag rust-1.XX.0 # XX should be exchanged with the corresponding version
+$ git push upstream rust-1.XX.0 # `upstream` is the `rust-lang/rust-clippy` remote
+```
+
+After this, the release should be available on the Clippy [release page].
+
+[release page]: https://github.com/rust-lang/rust-clippy/releases
+
+## Update the `stable` branch
+
+At this step you should have already checked out the commit of the `rust-1.XX.0`
+tag. Updating the stable branch from here is as easy as:
+
+```bash
+# Assuming the current directory corresponds to the Clippy repository and the
+# commit of the just created rust-1.XX.0 tag is checked out.
+$ git push upstream rust-1.XX.0:stable # `upstream` is the `rust-lang/rust-clippy` remote
+```
+
+> _NOTE:_ Usually there are no stable backports for Clippy, so this update
+> should be possible without force pushing or anything like this. If there
+> should have happened a stable backport, make sure to re-merge those changes
+> just as with the `beta` branch.
+
+## Update `CHANGELOG.md`
+
+For this see the document on [how to update the changelog].
+
+If you don't have time to do a complete changelog update right away, just update
+the following parts:
+
+- Remove the `(beta)` from the new stable version:
+
+ ```markdown
+ ## Rust 1.XX (beta) -> ## Rust 1.XX
+ ```
+
+- Update the release date line of the new stable version:
+
+ ```markdown
+ Current beta, release 20YY-MM-DD -> Current stable, released 20YY-MM-DD
+ ```
+
+- Update the release date line of the previous stable version:
+
+ ```markdown
+ Current stable, released 20YY-MM-DD -> Released 20YY-MM-DD
+ ```
+
+[how to update the changelog]: changelog_update.md
diff --git a/src/tools/clippy/book/src/development/infrastructure/sync.md b/src/tools/clippy/book/src/development/infrastructure/sync.md
new file mode 100644
index 000000000..5a0f7409a
--- /dev/null
+++ b/src/tools/clippy/book/src/development/infrastructure/sync.md
@@ -0,0 +1,123 @@
+# Syncing changes between Clippy and [`rust-lang/rust`]
+
+Clippy currently gets built with a pinned nightly version.
+
+In the `rust-lang/rust` repository, where rustc resides, there's a copy of
+Clippy that compiler hackers modify from time to time to adapt to changes in the
+unstable API of the compiler.
+
+We need to sync these changes back to this repository periodically, and the
+changes made to this repository in the meantime also need to be synced to the
+`rust-lang/rust` repository.
+
+To avoid flooding the `rust-lang/rust` PR queue, this two-way sync process is
+done in a bi-weekly basis if there's no urgent changes. This is done starting on
+the day of the Rust stable release and then every other week. That way we
+guarantee that we keep this repo up to date with the latest compiler API, and
+every feature in Clippy is available for 2 weeks in nightly, before it can get
+to beta. For reference, the first sync following this cadence was performed the
+2020-08-27.
+
+This process is described in detail in the following sections. For general
+information about `subtree`s in the Rust repository see [Rust's
+`CONTRIBUTING.md`][subtree].
+
+## Patching git-subtree to work with big repos
+
+Currently, there's a bug in `git-subtree` that prevents it from working properly
+with the [`rust-lang/rust`] repo. There's an open PR to fix that, but it's
+stale. Before continuing with the following steps, we need to manually apply
+that fix to our local copy of `git-subtree`.
+
+You can get the patched version of `git-subtree` from [here][gitgitgadget-pr].
+Put this file under `/usr/lib/git-core` (making a backup of the previous file)
+and make sure it has the proper permissions:
+
+```bash
+sudo cp --backup /path/to/patched/git-subtree.sh /usr/lib/git-core/git-subtree
+sudo chmod --reference=/usr/lib/git-core/git-subtree~ /usr/lib/git-core/git-subtree
+sudo chown --reference=/usr/lib/git-core/git-subtree~ /usr/lib/git-core/git-subtree
+```
+
+> _Note:_ The first time running `git subtree push` a cache has to be built.
+> This involves going through the complete Clippy history once. For this you
+> have to increase the stack limit though, which you can do with `ulimit -s
+> 60000`. Make sure to run the `ulimit` command from the same session you call
+> git subtree.
+
+> _Note:_ If you are a Debian user, `dash` is the shell used by default for
+> scripts instead of `sh`. This shell has a hardcoded recursion limit set to
+> 1000. In order to make this process work, you need to force the script to run
+> `bash` instead. You can do this by editing the first line of the `git-subtree`
+> script and changing `sh` to `bash`.
+
+## Defining remotes
+
+You may want to define remotes, so you don't have to type out the remote
+addresses on every sync. You can do this with the following commands (these
+commands still have to be run inside the `rust` directory):
+
+```bash
+# Set clippy-upstream remote for pulls
+$ git remote add clippy-upstream https://github.com/rust-lang/rust-clippy
+# Make sure to not push to the upstream repo
+$ git remote set-url --push clippy-upstream DISABLED
+# Set a local remote
+$ git remote add clippy-local /path/to/rust-clippy
+```
+
+> Note: The following sections assume that you have set those remotes with the
+> above remote names.
+
+## Performing the sync from [`rust-lang/rust`] to Clippy
+
+Here is a TL;DR version of the sync process (all of the following commands have
+to be run inside the `rust` directory):
+
+1. Clone the [`rust-lang/rust`] repository or make sure it is up to date.
+2. Checkout the commit from the latest available nightly. You can get it using
+ `rustup check`.
+3. Sync the changes to the rust-copy of Clippy to your Clippy fork:
+ ```bash
+ # Make sure to change `your-github-name` to your github name in the following command. Also be
+ # sure to either use a net-new branch, e.g. `sync-from-rust`, or delete the branch beforehand
+ # because changes cannot be fast forwarded and you have to run this command again.
+ git subtree push -P src/tools/clippy clippy-local sync-from-rust
+ ```
+
+ > _Note:_ Most of the time you have to create a merge commit in the
+ > `rust-clippy` repo (this has to be done in the Clippy repo, not in the
+ > rust-copy of Clippy):
+ ```bash
+ git fetch upstream # assuming upstream is the rust-lang/rust remote
+ git checkout sync-from-rust
+ git merge upstream/master --no-ff
+ ```
+ > Note: This is one of the few instances where a merge commit is allowed in
+ > a PR.
+4. Bump the nightly version in the Clippy repository by changing the date in the
+ rust-toolchain file to the current date and committing it with the message:
+ ```bash
+ git commit -m "Bump nightly version -> YYYY-MM-DD"
+ ```
+5. Open a PR to `rust-lang/rust-clippy` and wait for it to get merged (to
+ accelerate the process ping the `@rust-lang/clippy` team in your PR and/or
+ ask them in the [Zulip] stream.)
+
+[Zulip]: https://rust-lang.zulipchat.com/#narrow/stream/clippy
+
+## Performing the sync from Clippy to [`rust-lang/rust`]
+
+All of the following commands have to be run inside the `rust` directory.
+
+1. Make sure you have checked out the latest `master` of `rust-lang/rust`.
+2. Sync the `rust-lang/rust-clippy` master to the rust-copy of Clippy:
+ ```bash
+ git checkout -b sync-from-clippy
+ git subtree pull -P src/tools/clippy clippy-upstream master
+ ```
+3. Open a PR to [`rust-lang/rust`]
+
+[gitgitgadget-pr]: https://github.com/gitgitgadget/git/pull/493
+[subtree]: https://rustc-dev-guide.rust-lang.org/contributing.html#external-dependencies-subtree
+[`rust-lang/rust`]: https://github.com/rust-lang/rust
diff --git a/src/tools/clippy/book/src/development/proposals/README.md b/src/tools/clippy/book/src/development/proposals/README.md
new file mode 100644
index 000000000..78fe34ebf
--- /dev/null
+++ b/src/tools/clippy/book/src/development/proposals/README.md
@@ -0,0 +1,11 @@
+# Proposals
+
+This chapter is about accepted proposals for changes that should be worked on in
+or around Clippy in the long run.
+
+Besides adding more and more lints and improve the lints that Clippy already
+has, Clippy is also interested in making the experience of its users, developers
+and maintainers better over time. Projects that address bigger picture things
+like this usually take more time and it is useful to have a proposal for those
+first. This is the place where such proposals are collected, so that we can
+refer to them when working on them.
diff --git a/src/tools/clippy/book/src/development/proposals/roadmap-2021.md b/src/tools/clippy/book/src/development/proposals/roadmap-2021.md
new file mode 100644
index 000000000..fe8b080f5
--- /dev/null
+++ b/src/tools/clippy/book/src/development/proposals/roadmap-2021.md
@@ -0,0 +1,235 @@
+# Roadmap 2021
+
+# Summary
+
+This Roadmap lays out the plans for Clippy in 2021:
+
+- Improving usability and reliability
+- Improving experience of contributors and maintainers
+- Develop and specify processes
+
+Members of the Clippy team will be assigned tasks from one or more of these
+topics. The team member is then responsible to complete the assigned tasks. This
+can either be done by implementing them or by providing mentorship to interested
+contributors.
+
+# Motivation
+
+With the ongoing growth of the Rust language and with that of the whole
+ecosystem, also Clippy gets more and more users and contributors. This is good
+for the project, but also brings challenges along. Some of these challenges are:
+
+- More issues about reliability or usability are popping up
+- Traffic is hard to handle for a small team
+- Bigger projects don't get completed due to the lack of processes and/or time
+ of the team members
+
+Additionally, according to the [Rust Roadmap 2021], clear processes should be
+defined by every team and unified across teams. This Roadmap is the first step
+towards this.
+
+[Rust Roadmap 2021]: https://github.com/rust-lang/rfcs/pull/3037
+
+# Explanation
+
+This section will explain the things that should be done in 2021. It is
+important to note, that this document focuses on the "What?", not the "How?".
+The later will be addressed in follow-up tracking issue, with an assigned team
+member.
+
+The following is split up in two major sections. The first section covers the
+user facing plans, the second section the internal plans.
+
+## User Facing
+
+Clippy should be as pleasant to use and configure as possible. This section
+covers plans that should be implemented to improve the situation of Clippy in
+this regard.
+
+### Usability
+
+In the following, plans to improve the usability are covered.
+
+#### No Output After `cargo check`
+
+Currently when `cargo clippy` is run after `cargo check`, it does not produce
+any output. This is especially problematic since `rust-analyzer` is on the rise
+and it uses `cargo check` for checking code. A fix is already implemented, but
+it still has to be pushed over the finish line. This also includes the
+stabilization of the `cargo clippy --fix` command or the support of multi-span
+suggestions in `rustfix`.
+
+- [#4612](https://github.com/rust-lang/rust-clippy/issues/4612)
+
+#### `lints.toml` Configuration
+
+This is something that comes up every now and then: a reusable configuration
+file, where lint levels can be defined. Discussions about this often lead to
+nothing specific or to "we need an RFC for this". And this is exactly what needs
+to be done. Get together with the cargo team and write an RFC and implement such
+a configuration file somehow and somewhere.
+
+- [#3164](https://github.com/rust-lang/rust-clippy/issues/3164)
+- [cargo#5034](https://github.com/rust-lang/cargo/issues/5034)
+- [IRLO](https://internals.rust-lang.org/t/proposal-cargo-lint-configuration/9135/8)
+
+#### Lint Groups
+
+There are more and more issues about managing lints in Clippy popping up. Lints
+are hard to implement with a guarantee of no/few false positives (FPs). One way
+to address this might be to introduce more lint groups to give users the ability
+to better manage lints, or improve the process of classifying lints, so that
+disabling lints due to FPs becomes rare. It is important to note, that Clippy
+lints are less conservative than `rustc` lints, which won't change in the
+future.
+
+- [#5537](https://github.com/rust-lang/rust-clippy/issues/5537)
+- [#6366](https://github.com/rust-lang/rust-clippy/issues/6366)
+
+### Reliability
+
+In the following, plans to improve the reliability are covered.
+
+#### False Positive Rate
+
+In the worst case, new lints are only available in nightly for 2 weeks, before
+hitting beta and ultimately stable. This and the fact that fewer people use
+nightly Rust nowadays makes it more probable that a lint with many FPs hits
+stable. This leads to annoyed users, that will disable these new lints in the
+best case and to more annoyed users, that will stop using Clippy in the worst.
+A process should be developed and implemented to prevent this from happening.
+
+- [#6429](https://github.com/rust-lang/rust-clippy/issues/6429)
+
+## Internal
+
+(The end of) 2020 has shown, that Clippy has to think about the available
+resources, especially regarding management and maintenance of the project. This
+section address issues affecting team members and contributors.
+
+### Management
+
+In 2020 Clippy achieved over 1000 open issues with regularly between 25-35 open
+PRs. This is simultaneously a win and a loss. More issues and PRs means more
+people are interested in Clippy and in contributing to it. On the other hand, it
+means for team members more work and for contributors longer wait times for
+reviews. The following will describe plans how to improve the situation for both
+team members and contributors.
+
+#### Clear Expectations for Team Members
+
+According to the [Rust Roadmap 2021], a document specifying what it means to be
+a member of the team should be produced. This should not put more pressure on
+the team members, but rather help them and interested folks to know what the
+expectations are. With this it should also be easier to recruit new team members
+and may encourage people to get in touch, if they're interested to join.
+
+#### Scaling up the Team
+
+More people means less work for each individual. Together with the document
+about expectations for team members, a document defining the process of how to
+join the team should be produced. This can also increase the stability of the
+team, in case of current members dropping out (temporarily). There can also be
+different roles in the team, like people triaging vs. people reviewing.
+
+#### Regular Meetings
+
+Other teams have regular meetings. Clippy is big enough that it might be worth
+to also do them. Especially if more people join the team, this can be important
+for sync-ups. Besides the asynchronous communication, that works well for
+working on separate lints, a meeting adds a synchronous alternative at a known
+time. This is especially helpful if there are bigger things that need to be
+discussed (like the projects in this roadmap). For starters bi-weekly meetings
+before Rust syncs might make sense.
+
+#### Triaging
+
+To get a handle on the influx of open issues, a process for triaging issues and
+PRs should be developed. Officially, Clippy follows the Rust triage process, but
+currently no one enforces it. This can be improved by sharing triage teams
+across projects or by implementing dashboards / tools which simplify triaging.
+
+### Development
+
+Improving the developer and contributor experience is something the Clippy team
+works on regularly. Though, some things might need special attention and
+planing. These topics are listed in the following.
+
+#### Process for New and Existing Lints
+
+As already mentioned above, classifying new lints gets quite hard, because the
+probability of a buggy lint getting into stable is quite high. A process should
+be implemented on how to classify lints. In addition, a test system should be
+developed to find out which lints are currently problematic in real world code
+to fix or disable them.
+
+- [#6429 (comment)](https://github.com/rust-lang/rust-clippy/issues/6429#issuecomment-741056379)
+- [#6429 (comment)](https://github.com/rust-lang/rust-clippy/issues/6429#issuecomment-741153345)
+
+#### Processes
+
+Related to the point before, a process for suggesting and discussing major
+changes should be implemented. It's also not clearly defined when a lint should
+be enabled or disabled by default. This can also be improved by the test system
+mentioned above.
+
+#### Dev-Tools
+
+There's already `cargo dev` which makes Clippy development easier and more
+pleasant. This can still be expanded, so that it covers more areas of the
+development process.
+
+- [#5394](https://github.com/rust-lang/rust-clippy/issues/5394)
+
+#### Contributor Guide
+
+Similar to a Clippy Book, which describes how to use Clippy, a book about how to
+contribute to Clippy might be helpful for new and existing contributors. There's
+already the `doc` directory in the Clippy repo, this can be turned into a
+`mdbook`.
+
+#### `rustc` integration
+
+Recently Clippy was integrated with `git subtree` into the `rust-lang/rust`
+repository. This made syncing between the two repositories easier. A
+`#[non_exhaustive]` list of things that still can be improved is:
+
+1. Use the same `rustfmt` version and configuration as `rustc`.
+2. Make `cargo dev` work in the Rust repo, just as it works in the Clippy repo.
+ E.g. `cargo dev bless` or `cargo dev update_lints`. And even add more things
+ to it that might be useful for the Rust repo, e.g. `cargo dev deprecate`.
+3. Easier sync process. The `subtree` situation is not ideal.
+
+## Prioritization
+
+The most pressing issues for users of Clippy are of course the user facing
+issues. So there should be a priority on those issues, but without losing track
+of the internal issues listed in this document.
+
+Getting the FP rate of warn/deny-by-default lints under control should have the
+highest priority. Other user facing issues should also get a high priority, but
+shouldn't be in the way of addressing internal issues.
+
+To better manage the upcoming projects, the basic internal processes, like
+meetings, tracking issues and documentation, should be established as soon as
+possible. They might even be necessary to properly manage the projects,
+regarding the user facing issues.
+
+# Prior Art
+
+## Rust Roadmap
+
+Rust's roadmap process was established by [RFC 1728] in 2016. Since then every
+year a roadmap was published, that defined the bigger plans for the coming
+years. This years roadmap can be found [here][Rust Roadmap 2021].
+
+[RFC 1728]: https://rust-lang.github.io/rfcs/1728-north-star.html
+
+# Drawbacks
+
+## Big Roadmap
+
+This roadmap is pretty big and not all items listed in this document might be
+addressed during 2021. Because this is the first roadmap for Clippy, having open
+tasks at the end of 2021 is fine, but they should be revisited in the 2022
+roadmap.
diff --git a/src/tools/clippy/book/src/installation.md b/src/tools/clippy/book/src/installation.md
new file mode 100644
index 000000000..b2a28d0be
--- /dev/null
+++ b/src/tools/clippy/book/src/installation.md
@@ -0,0 +1,24 @@
+# Installation
+
+If you're using `rustup` to install and manage you're Rust toolchains, Clippy is
+usually **already installed**. In that case you can skip this chapter and go to
+the [Usage] chapter.
+
+> Note: If you used the `minimal` profile when installing a Rust toolchain,
+> Clippy is not automatically installed.
+
+## Using Rustup
+
+If Clippy was not installed for a toolchain, it can be installed with
+
+```
+$ rustup component add clippy [--toolchain=<name>]
+```
+
+## From Source
+
+Take a look at the [Basics] chapter in the Clippy developer guide to find step
+by step instructions on how to build and install Clippy from source.
+
+[Basics]: development/basics.md#install-from-source
+[Usage]: usage.md
diff --git a/src/tools/clippy/book/src/lints.md b/src/tools/clippy/book/src/lints.md
new file mode 100644
index 000000000..35e30960b
--- /dev/null
+++ b/src/tools/clippy/book/src/lints.md
@@ -0,0 +1,105 @@
+# Clippy's Lints
+
+Clippy offers a bunch of additional lints, to help its users write more correct
+and idiomatic Rust code. A full list of all lints, that can be filtered by
+category, lint level or keywords, can be found in the [Clippy lint
+documentation].
+
+This chapter will give an overview of the different lint categories, which kind
+of lints they offer and recommended actions when you should see a lint out of
+that category. For examples, see the [Clippy lint documentation] and filter by
+category.
+
+The different lint groups were defined in the [Clippy 1.0 RFC].
+
+## Correctness
+
+The `clippy::correctness` group is the only lint group in Clippy which lints are
+deny-by-default and abort the compilation when triggered. This is for good
+reason: If you see a `correctness` lint, it means that your code is outright
+wrong or useless and you should try to fix it.
+
+Lints in this category are carefully picked and should be free of false
+positives. So just `#[allow]`ing those lints is not recommended.
+
+## Suspicious
+
+The `clippy::suspicious` group is similar to the correctness lints in that it
+contains lints that trigger on code that is really _sus_ and should be fixed. As
+opposed to correctness lints, it might be possible that the linted code is
+intentionally written like it is.
+
+It is still recommended to fix code that is linted by lints out of this group
+instead of `#[allow]`ing the lint. In case you intentionally have written code
+that offends the lint you should specifically and locally `#[allow]` the lint
+and add give a reason why the code is correct as written.
+
+## Complexity
+
+The `clippy::complexity` group offers lints that give you suggestions on how to
+simplify your code. It mostly focuses on code that can be written in a shorter
+and more readable way, while preserving the semantics.
+
+If you should see a complexity lint, it usually means that you can remove or
+replace some code and it is recommended to do so. However, if you need the more
+complex code for some expressiveness reason, it is recommended to allow
+complexity lints on a case-by-case basis.
+
+## Perf
+
+The `clippy::perf` group gives you suggestions on how you can increase the
+performance of your code. Those lints are mostly about code that the compiler
+can't trivially optimize, but has to be written in a slightly different way to
+make the optimizer's job easier.
+
+Perf lints are usually easy to apply and it is recommended to do so.
+
+## Style
+
+The `clippy::style` group is mostly about writing idiomatic code. Because style
+is subjective, this lint group is the most opinionated warn-by-default group in
+Clippy.
+
+If you see a style lint, applying the suggestion usually makes your code more
+readable and idiomatic. But because we know that this is opinionated, feel free
+to sprinkle `#[allow]`s for style lints in your code or `#![allow]` a style lint
+on your whole crate if you disagree with the suggested style completely.
+
+## Pedantic
+
+The `clippy::pedantic` group makes Clippy even more _pedantic_. You can enable
+the whole group with `#![warn(clippy::pedantic)]` in the `lib.rs`/`main.rs` of
+your crate. This lint group is for Clippy power users that want an in depth
+check of their code.
+
+> _Note:_ Instead of enabling the whole group (like Clippy itself does), you may
+> want to cherry-pick lints out of the pedantic group.
+
+If you enable this group, expect to also use `#[allow]` attributes generously
+throughout your code. Lints in this group are designed to be pedantic and false
+positives sometimes are intentional in order to prevent false negatives.
+
+## Restriction
+
+The `clippy::restriction` group contains lints that will _restrict_ you from
+using certain parts of the Rust language. It is **not** recommended to enable
+the whole group, but rather cherry-pick lints that are useful for your code base
+and your use case.
+
+> _Note:_ Clippy will produce a warning if it finds a
+> `#![warn(clippy::restriction)]` attribute in your code!
+
+Lints from this group will restrict you in some way. If you enable a restriction
+lint for your crate it is recommended to also fix code that this lint triggers
+on. However, those lints are really strict by design and you might want to
+`#[allow]` them in some special cases, with a comment justifying that.
+
+## Cargo
+
+The `clippy::cargo` group gives you suggestions on how to improve your
+`Cargo.toml` file. This might be especially interesting if you want to publish
+your crate and are not sure if you have all useful information in your
+`Cargo.toml`.
+
+[Clippy lint documentation]: https://rust-lang.github.io/rust-clippy/
+[Clippy 1.0 RFC]: https://github.com/rust-lang/rfcs/blob/master/text/2476-clippy-uno.md#lint-audit-and-categories
diff --git a/src/tools/clippy/book/src/usage.md b/src/tools/clippy/book/src/usage.md
new file mode 100644
index 000000000..61a90445d
--- /dev/null
+++ b/src/tools/clippy/book/src/usage.md
@@ -0,0 +1,151 @@
+# Usage
+
+This chapter describes how to use Clippy to get the most out of it. Clippy can
+be used as a `cargo` subcommand or, like `rustc`, directly with the
+`clippy-driver` binary.
+
+> _Note:_ This chapter assumes that you have Clippy installed already. If you're
+> not sure, take a look at the [Installation] chapter.
+
+## Cargo subcommand
+
+The easiest and most common way to run Clippy is through `cargo`. To do that,
+just run
+
+```bash
+cargo clippy
+```
+
+### Lint configuration
+
+The above command will run the default set of lints, which are included in the
+lint group `clippy::all`. You might want to use even more lints or you might not
+agree with every Clippy lint, and for that there are ways to configure lint
+levels.
+
+> _Note:_ Clippy is meant to be used with a generous sprinkling of
+> `#[allow(..)]`s through your code. So if you disagree with a lint, don't feel
+> bad disabling them for parts of your code or the whole project.
+
+#### Command line
+
+You can configure lint levels on the command line by adding
+`-A/W/D clippy::lint_name` like this:
+
+```bash
+cargo clippy -- -Aclippy::style -Wclippy::double_neg -Dclippy::perf
+```
+
+For [CI] all warnings can be elevated to errors which will inturn fail
+the build and cause Clippy to exit with a code other than `0`.
+
+```
+cargo clippy -- -Dwarnings
+```
+
+> _Note:_ Adding `-D warnings` will cause your build to fail if **any** warnings
+> are found in your code. That includes warnings found by rustc (e.g.
+> `dead_code`, etc.).
+
+For more information on configuring lint levels, see the [rustc documentation].
+
+[rustc documentation]: https://doc.rust-lang.org/rustc/lints/levels.html#configuring-warning-levels
+
+#### Even more lints
+
+Clippy has lint groups which are allow-by-default. This means, that you will
+have to enable the lints in those groups manually.
+
+For a full list of all lints with their description and examples, please refer
+to [Clippy's lint list]. The two most important allow-by-default groups are
+described below:
+
+[Clippy's lint list]: https://rust-lang.github.io/rust-clippy/master/index.html
+
+##### `clippy::pedantic`
+
+The first group is the `pedantic` group. This group contains really opinionated
+lints, that may have some intentional false positives in order to prevent false
+negatives. So while this group is ready to be used in production, you can expect
+to sprinkle multiple `#[allow(..)]`s in your code. If you find any false
+positives, you're still welcome to report them to us for future improvements.
+
+> FYI: Clippy uses the whole group to lint itself.
+
+##### `clippy::restriction`
+
+The second group is the `restriction` group. This group contains lints that
+"restrict" the language in some way. For example the `clippy::unwrap` lint from
+this group won't allow you to use `.unwrap()` in your code. You may want to look
+through the lints in this group and enable the ones that fit your need.
+
+> _Note:_ You shouldn't enable the whole lint group, but cherry-pick lints from
+> this group. Some lints in this group will even contradict other Clippy lints!
+
+#### Too many lints
+
+The most opinionated warn-by-default group of Clippy is the `clippy::style`
+group. Some people prefer to disable this group completely and then cherry-pick
+some lints they like from this group. The same is of course possible with every
+other of Clippy's lint groups.
+
+> _Note:_ We try to keep the warn-by-default groups free from false positives
+> (FP). If you find that a lint wrongly triggers, please report it in an issue
+> (if there isn't an issue for that FP already)
+
+#### Source Code
+
+You can configure lint levels in source code the same way you can configure
+`rustc` lints:
+
+```rust
+#![allow(clippy::style)]
+
+#[warn(clippy::double_neg)]
+fn main() {
+ let x = 1;
+ let y = --x;
+ // ^^ warning: double negation
+}
+```
+
+### Automatically applying Clippy suggestions
+
+Clippy can automatically apply some lint suggestions, just like the compiler.
+
+```terminal
+cargo clippy --fix
+```
+
+### Workspaces
+
+All the usual workspace options should work with Clippy. For example the
+following command will run Clippy on the `example` crate in your workspace:
+
+```terminal
+cargo clippy -p example
+```
+
+As with `cargo check`, this includes dependencies that are members of the
+workspace, like path dependencies. If you want to run Clippy **only** on the
+given crate, use the `--no-deps` option like this:
+
+```terminal
+cargo clippy -p example -- --no-deps
+```
+
+## Using Clippy without `cargo`: `clippy-driver`
+
+Clippy can also be used in projects that do not use cargo. To do so, run
+`clippy-driver` with the same arguments you use for `rustc`. For example:
+
+```terminal
+clippy-driver --edition 2018 -Cpanic=abort foo.rs
+```
+
+> _Note:_ `clippy-driver` is designed for running Clippy and should not be used
+> as a general replacement for `rustc`. `clippy-driver` may produce artifacts
+> that are not optimized as expected, for example.
+
+[Installation]: installation.md
+[CI]: continuous_integration/index.md
diff --git a/src/tools/clippy/build.rs b/src/tools/clippy/build.rs
new file mode 100644
index 000000000..b5484bec3
--- /dev/null
+++ b/src/tools/clippy/build.rs
@@ -0,0 +1,19 @@
+fn main() {
+ // Forward the profile to the main compilation
+ println!("cargo:rustc-env=PROFILE={}", std::env::var("PROFILE").unwrap());
+ // Don't rebuild even if nothing changed
+ println!("cargo:rerun-if-changed=build.rs");
+ // forward git repo hashes we build at
+ println!(
+ "cargo:rustc-env=GIT_HASH={}",
+ rustc_tools_util::get_commit_hash().unwrap_or_default()
+ );
+ println!(
+ "cargo:rustc-env=COMMIT_DATE={}",
+ rustc_tools_util::get_commit_date().unwrap_or_default()
+ );
+ println!(
+ "cargo:rustc-env=RUSTC_RELEASE_CHANNEL={}",
+ rustc_tools_util::get_channel()
+ );
+}
diff --git a/src/tools/clippy/clippy.toml b/src/tools/clippy/clippy.toml
new file mode 100644
index 000000000..cda8d17ee
--- /dev/null
+++ b/src/tools/clippy/clippy.toml
@@ -0,0 +1 @@
+avoid-breaking-exported-api = false
diff --git a/src/tools/clippy/clippy_dev/Cargo.toml b/src/tools/clippy/clippy_dev/Cargo.toml
new file mode 100644
index 000000000..2ac3b4fe2
--- /dev/null
+++ b/src/tools/clippy/clippy_dev/Cargo.toml
@@ -0,0 +1,21 @@
+[package]
+name = "clippy_dev"
+version = "0.0.1"
+edition = "2021"
+
+[dependencies]
+aho-corasick = "0.7"
+clap = "3.2"
+indoc = "1.0"
+itertools = "0.10.1"
+opener = "0.5"
+shell-escape = "0.1"
+tempfile = "3.2"
+walkdir = "2.3"
+
+[features]
+deny-warnings = []
+
+[package.metadata.rust-analyzer]
+# This package uses #[feature(rustc_private)]
+rustc_private = true
diff --git a/src/tools/clippy/clippy_dev/src/bless.rs b/src/tools/clippy/clippy_dev/src/bless.rs
new file mode 100644
index 000000000..f5c51b947
--- /dev/null
+++ b/src/tools/clippy/clippy_dev/src/bless.rs
@@ -0,0 +1,60 @@
+//! `bless` updates the reference files in the repo with changed output files
+//! from the last test run.
+
+use crate::cargo_clippy_path;
+use std::ffi::OsStr;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::sync::LazyLock;
+use walkdir::{DirEntry, WalkDir};
+
+static CLIPPY_BUILD_TIME: LazyLock<Option<std::time::SystemTime>> =
+ LazyLock::new(|| cargo_clippy_path().metadata().ok()?.modified().ok());
+
+/// # Panics
+///
+/// Panics if the path to a test file is broken
+pub fn bless(ignore_timestamp: bool) {
+ let extensions = ["stdout", "stderr", "fixed"].map(OsStr::new);
+
+ WalkDir::new(build_dir())
+ .into_iter()
+ .map(Result::unwrap)
+ .filter(|entry| entry.path().extension().map_or(false, |ext| extensions.contains(&ext)))
+ .for_each(|entry| update_reference_file(&entry, ignore_timestamp));
+}
+
+fn update_reference_file(test_output_entry: &DirEntry, ignore_timestamp: bool) {
+ let test_output_path = test_output_entry.path();
+
+ let reference_file_name = test_output_entry.file_name().to_str().unwrap().replace(".stage-id", "");
+ let reference_file_path = Path::new("tests")
+ .join(test_output_path.strip_prefix(build_dir()).unwrap())
+ .with_file_name(reference_file_name);
+
+ // If the test output was not updated since the last clippy build, it may be outdated
+ if !ignore_timestamp && !updated_since_clippy_build(test_output_entry).unwrap_or(true) {
+ return;
+ }
+
+ let test_output_file = fs::read(&test_output_path).expect("Unable to read test output file");
+ let reference_file = fs::read(&reference_file_path).unwrap_or_default();
+
+ if test_output_file != reference_file {
+ // If a test run caused an output file to change, update the reference file
+ println!("updating {}", reference_file_path.display());
+ fs::copy(test_output_path, &reference_file_path).expect("Could not update reference file");
+ }
+}
+
+fn updated_since_clippy_build(entry: &DirEntry) -> Option<bool> {
+ let clippy_build_time = (*CLIPPY_BUILD_TIME)?;
+ let modified = entry.metadata().ok()?.modified().ok()?;
+ Some(modified >= clippy_build_time)
+}
+
+fn build_dir() -> PathBuf {
+ let mut path = std::env::current_exe().unwrap();
+ path.set_file_name("test");
+ path
+}
diff --git a/src/tools/clippy/clippy_dev/src/dogfood.rs b/src/tools/clippy/clippy_dev/src/dogfood.rs
new file mode 100644
index 000000000..b69e9f649
--- /dev/null
+++ b/src/tools/clippy/clippy_dev/src/dogfood.rs
@@ -0,0 +1,33 @@
+use crate::clippy_project_root;
+use std::process::Command;
+
+/// # Panics
+///
+/// Panics if unable to run the dogfood test
+pub fn dogfood(fix: bool, allow_dirty: bool, allow_staged: bool) {
+ let mut cmd = Command::new("cargo");
+
+ cmd.current_dir(clippy_project_root())
+ .args(["test", "--test", "dogfood"])
+ .args(["--features", "internal"])
+ .args(["--", "dogfood_clippy"]);
+
+ let mut dogfood_args = Vec::new();
+ if fix {
+ dogfood_args.push("--fix");
+ }
+
+ if allow_dirty {
+ dogfood_args.push("--allow-dirty");
+ }
+
+ if allow_staged {
+ dogfood_args.push("--allow-staged");
+ }
+
+ cmd.env("__CLIPPY_DOGFOOD_ARGS", dogfood_args.join(" "));
+
+ let output = cmd.output().expect("failed to run command");
+
+ println!("{}", String::from_utf8_lossy(&output.stdout));
+}
diff --git a/src/tools/clippy/clippy_dev/src/fmt.rs b/src/tools/clippy/clippy_dev/src/fmt.rs
new file mode 100644
index 000000000..3b27f061e
--- /dev/null
+++ b/src/tools/clippy/clippy_dev/src/fmt.rs
@@ -0,0 +1,226 @@
+use crate::clippy_project_root;
+use itertools::Itertools;
+use shell_escape::escape;
+use std::ffi::{OsStr, OsString};
+use std::path::Path;
+use std::process::{self, Command, Stdio};
+use std::{fs, io};
+use walkdir::WalkDir;
+
+#[derive(Debug)]
+pub enum CliError {
+ CommandFailed(String, String),
+ IoError(io::Error),
+ RustfmtNotInstalled,
+ WalkDirError(walkdir::Error),
+ IntellijSetupActive,
+}
+
+impl From<io::Error> for CliError {
+ fn from(error: io::Error) -> Self {
+ Self::IoError(error)
+ }
+}
+
+impl From<walkdir::Error> for CliError {
+ fn from(error: walkdir::Error) -> Self {
+ Self::WalkDirError(error)
+ }
+}
+
+struct FmtContext {
+ check: bool,
+ verbose: bool,
+ rustfmt_path: String,
+}
+
+// the "main" function of cargo dev fmt
+pub fn run(check: bool, verbose: bool) {
+ fn try_run(context: &FmtContext) -> Result<bool, CliError> {
+ let mut success = true;
+
+ let project_root = clippy_project_root();
+
+ // if we added a local rustc repo as path dependency to clippy for rust analyzer, we do NOT want to
+ // format because rustfmt would also format the entire rustc repo as it is a local
+ // dependency
+ if fs::read_to_string(project_root.join("Cargo.toml"))
+ .expect("Failed to read clippy Cargo.toml")
+ .contains(&"[target.'cfg(NOT_A_PLATFORM)'.dependencies]")
+ {
+ return Err(CliError::IntellijSetupActive);
+ }
+
+ rustfmt_test(context)?;
+
+ success &= cargo_fmt(context, project_root.as_path())?;
+ success &= cargo_fmt(context, &project_root.join("clippy_dev"))?;
+ success &= cargo_fmt(context, &project_root.join("rustc_tools_util"))?;
+ success &= cargo_fmt(context, &project_root.join("lintcheck"))?;
+
+ let chunks = WalkDir::new(project_root.join("tests"))
+ .into_iter()
+ .filter_map(|entry| {
+ let entry = entry.expect("failed to find tests");
+ let path = entry.path();
+
+ if path.extension() != Some("rs".as_ref()) || entry.file_name() == "ice-3891.rs" {
+ None
+ } else {
+ Some(entry.into_path().into_os_string())
+ }
+ })
+ .chunks(250);
+
+ for chunk in &chunks {
+ success &= rustfmt(context, chunk)?;
+ }
+
+ Ok(success)
+ }
+
+ fn output_err(err: CliError) {
+ match err {
+ CliError::CommandFailed(command, stderr) => {
+ eprintln!("error: A command failed! `{}`\nstderr: {}", command, stderr);
+ },
+ CliError::IoError(err) => {
+ eprintln!("error: {}", err);
+ },
+ CliError::RustfmtNotInstalled => {
+ eprintln!("error: rustfmt nightly is not installed.");
+ },
+ CliError::WalkDirError(err) => {
+ eprintln!("error: {}", err);
+ },
+ CliError::IntellijSetupActive => {
+ eprintln!(
+ "error: a local rustc repo is enabled as path dependency via `cargo dev setup intellij`.
+Not formatting because that would format the local repo as well!
+Please revert the changes to Cargo.tomls with `cargo dev remove intellij`."
+ );
+ },
+ }
+ }
+
+ let output = Command::new("rustup")
+ .args(["which", "rustfmt"])
+ .stderr(Stdio::inherit())
+ .output()
+ .expect("error running `rustup which rustfmt`");
+ if !output.status.success() {
+ eprintln!("`rustup which rustfmt` did not execute successfully");
+ process::exit(1);
+ }
+ let mut rustfmt_path = String::from_utf8(output.stdout).expect("invalid rustfmt path");
+ rustfmt_path.truncate(rustfmt_path.trim_end().len());
+
+ let context = FmtContext {
+ check,
+ verbose,
+ rustfmt_path,
+ };
+ let result = try_run(&context);
+ let code = match result {
+ Ok(true) => 0,
+ Ok(false) => {
+ eprintln!();
+ eprintln!("Formatting check failed.");
+ eprintln!("Run `cargo dev fmt` to update formatting.");
+ 1
+ },
+ Err(err) => {
+ output_err(err);
+ 1
+ },
+ };
+ process::exit(code);
+}
+
+fn format_command(program: impl AsRef<OsStr>, dir: impl AsRef<Path>, args: &[impl AsRef<OsStr>]) -> String {
+ let arg_display: Vec<_> = args.iter().map(|a| escape(a.as_ref().to_string_lossy())).collect();
+
+ format!(
+ "cd {} && {} {}",
+ escape(dir.as_ref().to_string_lossy()),
+ escape(program.as_ref().to_string_lossy()),
+ arg_display.join(" ")
+ )
+}
+
+fn exec(
+ context: &FmtContext,
+ program: impl AsRef<OsStr>,
+ dir: impl AsRef<Path>,
+ args: &[impl AsRef<OsStr>],
+) -> Result<bool, CliError> {
+ if context.verbose {
+ println!("{}", format_command(&program, &dir, args));
+ }
+
+ let output = Command::new(&program)
+ .env("RUSTFMT", &context.rustfmt_path)
+ .current_dir(&dir)
+ .args(args.iter())
+ .output()
+ .unwrap();
+ let success = output.status.success();
+
+ if !context.check && !success {
+ let stderr = std::str::from_utf8(&output.stderr).unwrap_or("");
+ return Err(CliError::CommandFailed(
+ format_command(&program, &dir, args),
+ String::from(stderr),
+ ));
+ }
+
+ Ok(success)
+}
+
+fn cargo_fmt(context: &FmtContext, path: &Path) -> Result<bool, CliError> {
+ let mut args = vec!["fmt", "--all"];
+ if context.check {
+ args.push("--check");
+ }
+ let success = exec(context, "cargo", path, &args)?;
+
+ Ok(success)
+}
+
+fn rustfmt_test(context: &FmtContext) -> Result<(), CliError> {
+ let program = "rustfmt";
+ let dir = std::env::current_dir()?;
+ let args = &["--version"];
+
+ if context.verbose {
+ println!("{}", format_command(&program, &dir, args));
+ }
+
+ let output = Command::new(&program).current_dir(&dir).args(args.iter()).output()?;
+
+ if output.status.success() {
+ Ok(())
+ } else if std::str::from_utf8(&output.stderr)
+ .unwrap_or("")
+ .starts_with("error: 'rustfmt' is not installed")
+ {
+ Err(CliError::RustfmtNotInstalled)
+ } else {
+ Err(CliError::CommandFailed(
+ format_command(&program, &dir, args),
+ std::str::from_utf8(&output.stderr).unwrap_or("").to_string(),
+ ))
+ }
+}
+
+fn rustfmt(context: &FmtContext, paths: impl Iterator<Item = OsString>) -> Result<bool, CliError> {
+ let mut args = Vec::new();
+ if context.check {
+ args.push(OsString::from("--check"));
+ }
+ args.extend(paths);
+
+ let success = exec(context, &context.rustfmt_path, std::env::current_dir()?, &args)?;
+
+ Ok(success)
+}
diff --git a/src/tools/clippy/clippy_dev/src/lib.rs b/src/tools/clippy/clippy_dev/src/lib.rs
new file mode 100644
index 000000000..82574a8e6
--- /dev/null
+++ b/src/tools/clippy/clippy_dev/src/lib.rs
@@ -0,0 +1,58 @@
+#![feature(let_chains)]
+#![feature(let_else)]
+#![feature(once_cell)]
+#![feature(rustc_private)]
+#![cfg_attr(feature = "deny-warnings", deny(warnings))]
+// warn on lints, that are included in `rust-lang/rust`s bootstrap
+#![warn(rust_2018_idioms, unused_lifetimes)]
+
+extern crate rustc_lexer;
+
+use std::path::PathBuf;
+
+pub mod bless;
+pub mod dogfood;
+pub mod fmt;
+pub mod lint;
+pub mod new_lint;
+pub mod serve;
+pub mod setup;
+pub mod update_lints;
+
+#[cfg(not(windows))]
+static CARGO_CLIPPY_EXE: &str = "cargo-clippy";
+#[cfg(windows)]
+static CARGO_CLIPPY_EXE: &str = "cargo-clippy.exe";
+
+/// Returns the path to the `cargo-clippy` binary
+#[must_use]
+pub fn cargo_clippy_path() -> PathBuf {
+ let mut path = std::env::current_exe().expect("failed to get current executable name");
+ path.set_file_name(CARGO_CLIPPY_EXE);
+ path
+}
+
+/// Returns the path to the Clippy project directory
+///
+/// # Panics
+///
+/// Panics if the current directory could not be retrieved, there was an error reading any of the
+/// Cargo.toml files or ancestor directory is the clippy root directory
+#[must_use]
+pub fn clippy_project_root() -> PathBuf {
+ let current_dir = std::env::current_dir().unwrap();
+ for path in current_dir.ancestors() {
+ let result = std::fs::read_to_string(path.join("Cargo.toml"));
+ if let Err(err) = &result {
+ if err.kind() == std::io::ErrorKind::NotFound {
+ continue;
+ }
+ }
+
+ let content = result.unwrap();
+ if content.contains("[package]\nname = \"clippy\"") {
+ return path.to_path_buf();
+ }
+ }
+ panic!("error: Can't determine root of project. Please run inside a Clippy working dir.");
+}
diff --git a/src/tools/clippy/clippy_dev/src/lint.rs b/src/tools/clippy/clippy_dev/src/lint.rs
new file mode 100644
index 000000000..71005449b
--- /dev/null
+++ b/src/tools/clippy/clippy_dev/src/lint.rs
@@ -0,0 +1,55 @@
+use crate::cargo_clippy_path;
+use std::process::{self, Command, ExitStatus};
+use std::{fs, io};
+
+fn exit_if_err(status: io::Result<ExitStatus>) {
+ match status.expect("failed to run command").code() {
+ Some(0) => {},
+ Some(n) => process::exit(n),
+ None => {
+ eprintln!("Killed by signal");
+ process::exit(1);
+ },
+ }
+}
+
+pub fn run<'a>(path: &str, args: impl Iterator<Item = &'a String>) {
+ let is_file = match fs::metadata(path) {
+ Ok(metadata) => metadata.is_file(),
+ Err(e) => {
+ eprintln!("Failed to read {path}: {e:?}");
+ process::exit(1);
+ },
+ };
+
+ if is_file {
+ exit_if_err(
+ Command::new("cargo")
+ .args(["run", "--bin", "clippy-driver", "--"])
+ .args(["-L", "./target/debug"])
+ .args(["-Z", "no-codegen"])
+ .args(["--edition", "2021"])
+ .arg(path)
+ .args(args)
+ .status(),
+ );
+ } else {
+ exit_if_err(Command::new("cargo").arg("build").status());
+
+ // Run in a tempdir as changes to clippy do not retrigger linting
+ let target = tempfile::Builder::new()
+ .prefix("clippy")
+ .tempdir()
+ .expect("failed to create tempdir");
+
+ let status = Command::new(cargo_clippy_path())
+ .arg("clippy")
+ .args(args)
+ .current_dir(path)
+ .env("CARGO_TARGET_DIR", target.as_ref())
+ .status();
+
+ target.close().expect("failed to remove tempdir");
+ exit_if_err(status);
+ }
+}
diff --git a/src/tools/clippy/clippy_dev/src/main.rs b/src/tools/clippy/clippy_dev/src/main.rs
new file mode 100644
index 000000000..a417d3dd8
--- /dev/null
+++ b/src/tools/clippy/clippy_dev/src/main.rs
@@ -0,0 +1,314 @@
+#![cfg_attr(feature = "deny-warnings", deny(warnings))]
+// warn on lints, that are included in `rust-lang/rust`s bootstrap
+#![warn(rust_2018_idioms, unused_lifetimes)]
+
+use clap::{Arg, ArgAction, ArgMatches, Command, PossibleValue};
+use clippy_dev::{bless, dogfood, fmt, lint, new_lint, serve, setup, update_lints};
+use indoc::indoc;
+
+fn main() {
+ let matches = get_clap_config();
+
+ match matches.subcommand() {
+ Some(("bless", matches)) => {
+ bless::bless(matches.contains_id("ignore-timestamp"));
+ },
+ Some(("dogfood", matches)) => {
+ dogfood::dogfood(
+ matches.contains_id("fix"),
+ matches.contains_id("allow-dirty"),
+ matches.contains_id("allow-staged"),
+ );
+ },
+ Some(("fmt", matches)) => {
+ fmt::run(matches.contains_id("check"), matches.contains_id("verbose"));
+ },
+ Some(("update_lints", matches)) => {
+ if matches.contains_id("print-only") {
+ update_lints::print_lints();
+ } else if matches.contains_id("check") {
+ update_lints::update(update_lints::UpdateMode::Check);
+ } else {
+ update_lints::update(update_lints::UpdateMode::Change);
+ }
+ },
+ Some(("new_lint", matches)) => {
+ match new_lint::create(
+ matches.get_one::<String>("pass"),
+ matches.get_one::<String>("name"),
+ matches.get_one::<String>("category").map(String::as_str),
+ matches.get_one::<String>("type").map(String::as_str),
+ matches.contains_id("msrv"),
+ ) {
+ Ok(_) => update_lints::update(update_lints::UpdateMode::Change),
+ Err(e) => eprintln!("Unable to create lint: {}", e),
+ }
+ },
+ Some(("setup", sub_command)) => match sub_command.subcommand() {
+ Some(("intellij", matches)) => {
+ if matches.contains_id("remove") {
+ setup::intellij::remove_rustc_src();
+ } else {
+ setup::intellij::setup_rustc_src(
+ matches
+ .get_one::<String>("rustc-repo-path")
+ .expect("this field is mandatory and therefore always valid"),
+ );
+ }
+ },
+ Some(("git-hook", matches)) => {
+ if matches.contains_id("remove") {
+ setup::git_hook::remove_hook();
+ } else {
+ setup::git_hook::install_hook(matches.contains_id("force-override"));
+ }
+ },
+ Some(("vscode-tasks", matches)) => {
+ if matches.contains_id("remove") {
+ setup::vscode::remove_tasks();
+ } else {
+ setup::vscode::install_tasks(matches.contains_id("force-override"));
+ }
+ },
+ _ => {},
+ },
+ Some(("remove", sub_command)) => match sub_command.subcommand() {
+ Some(("git-hook", _)) => setup::git_hook::remove_hook(),
+ Some(("intellij", _)) => setup::intellij::remove_rustc_src(),
+ Some(("vscode-tasks", _)) => setup::vscode::remove_tasks(),
+ _ => {},
+ },
+ Some(("serve", matches)) => {
+ let port = *matches.get_one::<u16>("port").unwrap();
+ let lint = matches.get_one::<String>("lint");
+ serve::run(port, lint);
+ },
+ Some(("lint", matches)) => {
+ let path = matches.get_one::<String>("path").unwrap();
+ let args = matches.get_many::<String>("args").into_iter().flatten();
+ lint::run(path, args);
+ },
+ Some(("rename_lint", matches)) => {
+ let old_name = matches.get_one::<String>("old_name").unwrap();
+ let new_name = matches.get_one::<String>("new_name").unwrap_or(old_name);
+ let uplift = matches.contains_id("uplift");
+ update_lints::rename(old_name, new_name, uplift);
+ },
+ Some(("deprecate", matches)) => {
+ let name = matches.get_one::<String>("name").unwrap();
+ let reason = matches.get_one("reason");
+ update_lints::deprecate(name, reason);
+ },
+ _ => {},
+ }
+}
+
+fn get_clap_config() -> ArgMatches {
+ Command::new("Clippy developer tooling")
+ .arg_required_else_help(true)
+ .subcommands([
+ Command::new("bless").about("bless the test output changes").arg(
+ Arg::new("ignore-timestamp")
+ .long("ignore-timestamp")
+ .help("Include files updated before clippy was built"),
+ ),
+ Command::new("dogfood").about("Runs the dogfood test").args([
+ Arg::new("fix").long("fix").help("Apply the suggestions when possible"),
+ Arg::new("allow-dirty")
+ .long("allow-dirty")
+ .help("Fix code even if the working directory has changes")
+ .requires("fix"),
+ Arg::new("allow-staged")
+ .long("allow-staged")
+ .help("Fix code even if the working directory has staged changes")
+ .requires("fix"),
+ ]),
+ Command::new("fmt")
+ .about("Run rustfmt on all projects and tests")
+ .args([
+ Arg::new("check").long("check").help("Use the rustfmt --check option"),
+ Arg::new("verbose").short('v').long("verbose").help("Echo commands run"),
+ ]),
+ Command::new("update_lints")
+ .about("Updates lint registration and information from the source code")
+ .long_about(
+ "Makes sure that:\n \
+ * the lint count in README.md is correct\n \
+ * the changelog contains markdown link references at the bottom\n \
+ * all lint groups include the correct lints\n \
+ * lint modules in `clippy_lints/*` are visible in `src/lib.rs` via `pub mod`\n \
+ * all lints are registered in the lint store",
+ )
+ .args([
+ Arg::new("print-only").long("print-only").help(
+ "Print a table of lints to STDOUT. \
+ This does not include deprecated and internal lints. \
+ (Does not modify any files)",
+ ),
+ Arg::new("check")
+ .long("check")
+ .help("Checks that `cargo dev update_lints` has been run. Used on CI."),
+ ]),
+ Command::new("new_lint")
+ .about("Create new lint and run `cargo dev update_lints`")
+ .args([
+ Arg::new("pass")
+ .short('p')
+ .long("pass")
+ .help("Specify whether the lint runs during the early or late pass")
+ .takes_value(true)
+ .value_parser([PossibleValue::new("early"), PossibleValue::new("late")])
+ .conflicts_with("type")
+ .required_unless_present("type"),
+ Arg::new("name")
+ .short('n')
+ .long("name")
+ .help("Name of the new lint in snake case, ex: fn_too_long")
+ .takes_value(true)
+ .required(true),
+ Arg::new("category")
+ .short('c')
+ .long("category")
+ .help("What category the lint belongs to")
+ .default_value("nursery")
+ .value_parser([
+ PossibleValue::new("style"),
+ PossibleValue::new("correctness"),
+ PossibleValue::new("suspicious"),
+ PossibleValue::new("complexity"),
+ PossibleValue::new("perf"),
+ PossibleValue::new("pedantic"),
+ PossibleValue::new("restriction"),
+ PossibleValue::new("cargo"),
+ PossibleValue::new("nursery"),
+ PossibleValue::new("internal"),
+ PossibleValue::new("internal_warn"),
+ ])
+ .takes_value(true),
+ Arg::new("type")
+ .long("type")
+ .help("What directory the lint belongs in")
+ .takes_value(true)
+ .required(false),
+ Arg::new("msrv").long("msrv").help("Add MSRV config code to the lint"),
+ ]),
+ Command::new("setup")
+ .about("Support for setting up your personal development environment")
+ .arg_required_else_help(true)
+ .subcommands([
+ Command::new("intellij")
+ .about("Alter dependencies so Intellij Rust can find rustc internals")
+ .args([
+ Arg::new("remove")
+ .long("remove")
+ .help("Remove the dependencies added with 'cargo dev setup intellij'")
+ .required(false),
+ Arg::new("rustc-repo-path")
+ .long("repo-path")
+ .short('r')
+ .help("The path to a rustc repo that will be used for setting the dependencies")
+ .takes_value(true)
+ .value_name("path")
+ .conflicts_with("remove")
+ .required(true),
+ ]),
+ Command::new("git-hook")
+ .about("Add a pre-commit git hook that formats your code to make it look pretty")
+ .args([
+ Arg::new("remove")
+ .long("remove")
+ .help("Remove the pre-commit hook added with 'cargo dev setup git-hook'")
+ .required(false),
+ Arg::new("force-override")
+ .long("force-override")
+ .short('f')
+ .help("Forces the override of an existing git pre-commit hook")
+ .required(false),
+ ]),
+ Command::new("vscode-tasks")
+ .about("Add several tasks to vscode for formatting, validation and testing")
+ .args([
+ Arg::new("remove")
+ .long("remove")
+ .help("Remove the tasks added with 'cargo dev setup vscode-tasks'")
+ .required(false),
+ Arg::new("force-override")
+ .long("force-override")
+ .short('f')
+ .help("Forces the override of existing vscode tasks")
+ .required(false),
+ ]),
+ ]),
+ Command::new("remove")
+ .about("Support for undoing changes done by the setup command")
+ .arg_required_else_help(true)
+ .subcommands([
+ Command::new("git-hook").about("Remove any existing pre-commit git hook"),
+ Command::new("vscode-tasks").about("Remove any existing vscode tasks"),
+ Command::new("intellij").about("Removes rustc source paths added via `cargo dev setup intellij`"),
+ ]),
+ Command::new("serve")
+ .about("Launch a local 'ALL the Clippy Lints' website in a browser")
+ .args([
+ Arg::new("port")
+ .long("port")
+ .short('p')
+ .help("Local port for the http server")
+ .default_value("8000")
+ .value_parser(clap::value_parser!(u16)),
+ Arg::new("lint").help("Which lint's page to load initially (optional)"),
+ ]),
+ Command::new("lint")
+ .about("Manually run clippy on a file or package")
+ .after_help(indoc! {"
+ EXAMPLES
+ Lint a single file:
+ cargo dev lint tests/ui/attrs.rs
+
+ Lint a package directory:
+ cargo dev lint tests/ui-cargo/wildcard_dependencies/fail
+ cargo dev lint ~/my-project
+
+ Run rustfix:
+ cargo dev lint ~/my-project -- --fix
+
+ Set lint levels:
+ cargo dev lint file.rs -- -W clippy::pedantic
+ cargo dev lint ~/my-project -- -- -W clippy::pedantic
+ "})
+ .args([
+ Arg::new("path")
+ .required(true)
+ .help("The path to a file or package directory to lint"),
+ Arg::new("args")
+ .action(ArgAction::Append)
+ .help("Pass extra arguments to cargo/clippy-driver"),
+ ]),
+ Command::new("rename_lint").about("Renames the given lint").args([
+ Arg::new("old_name")
+ .index(1)
+ .required(true)
+ .help("The name of the lint to rename"),
+ Arg::new("new_name")
+ .index(2)
+ .required_unless_present("uplift")
+ .help("The new name of the lint"),
+ Arg::new("uplift")
+ .long("uplift")
+ .help("This lint will be uplifted into rustc"),
+ ]),
+ Command::new("deprecate").about("Deprecates the given lint").args([
+ Arg::new("name")
+ .index(1)
+ .required(true)
+ .help("The name of the lint to deprecate"),
+ Arg::new("reason")
+ .long("reason")
+ .short('r')
+ .required(false)
+ .takes_value(true)
+ .help("The reason for deprecation"),
+ ]),
+ ])
+ .get_matches()
+}
diff --git a/src/tools/clippy/clippy_dev/src/new_lint.rs b/src/tools/clippy/clippy_dev/src/new_lint.rs
new file mode 100644
index 000000000..03d2ef3d1
--- /dev/null
+++ b/src/tools/clippy/clippy_dev/src/new_lint.rs
@@ -0,0 +1,575 @@
+use crate::clippy_project_root;
+use indoc::{indoc, writedoc};
+use std::fmt::Write as _;
+use std::fs::{self, OpenOptions};
+use std::io::prelude::*;
+use std::io::{self, ErrorKind};
+use std::path::{Path, PathBuf};
+
+struct LintData<'a> {
+ pass: &'a str,
+ name: &'a str,
+ category: &'a str,
+ ty: Option<&'a str>,
+ project_root: PathBuf,
+}
+
+trait Context {
+ fn context<C: AsRef<str>>(self, text: C) -> Self;
+}
+
+impl<T> Context for io::Result<T> {
+ fn context<C: AsRef<str>>(self, text: C) -> Self {
+ match self {
+ Ok(t) => Ok(t),
+ Err(e) => {
+ let message = format!("{}: {}", text.as_ref(), e);
+ Err(io::Error::new(ErrorKind::Other, message))
+ },
+ }
+ }
+}
+
+/// Creates the files required to implement and test a new lint and runs `update_lints`.
+///
+/// # Errors
+///
+/// This function errors out if the files couldn't be created or written to.
+pub fn create(
+ pass: Option<&String>,
+ lint_name: Option<&String>,
+ category: Option<&str>,
+ mut ty: Option<&str>,
+ msrv: bool,
+) -> io::Result<()> {
+ if category == Some("cargo") && ty.is_none() {
+ // `cargo` is a special category, these lints should always be in `clippy_lints/src/cargo`
+ ty = Some("cargo");
+ }
+
+ let lint = LintData {
+ pass: pass.map_or("", String::as_str),
+ name: lint_name.expect("`name` argument is validated by clap"),
+ category: category.expect("`category` argument is validated by clap"),
+ ty,
+ project_root: clippy_project_root(),
+ };
+
+ create_lint(&lint, msrv).context("Unable to create lint implementation")?;
+ create_test(&lint).context("Unable to create a test for the new lint")?;
+
+ if lint.ty.is_none() {
+ add_lint(&lint, msrv).context("Unable to add lint to clippy_lints/src/lib.rs")?;
+ }
+
+ Ok(())
+}
+
+fn create_lint(lint: &LintData<'_>, enable_msrv: bool) -> io::Result<()> {
+ if let Some(ty) = lint.ty {
+ create_lint_for_ty(lint, enable_msrv, ty)
+ } else {
+ let lint_contents = get_lint_file_contents(lint, enable_msrv);
+ let lint_path = format!("clippy_lints/src/{}.rs", lint.name);
+ write_file(lint.project_root.join(&lint_path), lint_contents.as_bytes())?;
+ println!("Generated lint file: `{}`", lint_path);
+
+ Ok(())
+ }
+}
+
+fn create_test(lint: &LintData<'_>) -> io::Result<()> {
+ fn create_project_layout<P: Into<PathBuf>>(lint_name: &str, location: P, case: &str, hint: &str) -> io::Result<()> {
+ let mut path = location.into().join(case);
+ fs::create_dir(&path)?;
+ write_file(path.join("Cargo.toml"), get_manifest_contents(lint_name, hint))?;
+
+ path.push("src");
+ fs::create_dir(&path)?;
+ let header = format!("// compile-flags: --crate-name={}", lint_name);
+ write_file(path.join("main.rs"), get_test_file_contents(lint_name, Some(&header)))?;
+
+ Ok(())
+ }
+
+ if lint.category == "cargo" {
+ let relative_test_dir = format!("tests/ui-cargo/{}", lint.name);
+ let test_dir = lint.project_root.join(&relative_test_dir);
+ fs::create_dir(&test_dir)?;
+
+ create_project_layout(lint.name, &test_dir, "fail", "Content that triggers the lint goes here")?;
+ create_project_layout(lint.name, &test_dir, "pass", "This file should not trigger the lint")?;
+
+ println!("Generated test directories: `{relative_test_dir}/pass`, `{relative_test_dir}/fail`");
+ } else {
+ let test_path = format!("tests/ui/{}.rs", lint.name);
+ let test_contents = get_test_file_contents(lint.name, None);
+ write_file(lint.project_root.join(&test_path), test_contents)?;
+
+ println!("Generated test file: `{}`", test_path);
+ }
+
+ Ok(())
+}
+
+fn add_lint(lint: &LintData<'_>, enable_msrv: bool) -> io::Result<()> {
+ let path = "clippy_lints/src/lib.rs";
+ let mut lib_rs = fs::read_to_string(path).context("reading")?;
+
+ let comment_start = lib_rs.find("// add lints here,").expect("Couldn't find comment");
+
+ let new_lint = if enable_msrv {
+ format!(
+ "store.register_{lint_pass}_pass(move || Box::new({module_name}::{camel_name}::new(msrv)));\n ",
+ lint_pass = lint.pass,
+ module_name = lint.name,
+ camel_name = to_camel_case(lint.name),
+ )
+ } else {
+ format!(
+ "store.register_{lint_pass}_pass(|| Box::new({module_name}::{camel_name}));\n ",
+ lint_pass = lint.pass,
+ module_name = lint.name,
+ camel_name = to_camel_case(lint.name),
+ )
+ };
+
+ lib_rs.insert_str(comment_start, &new_lint);
+
+ fs::write(path, lib_rs).context("writing")
+}
+
+fn write_file<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> io::Result<()> {
+ fn inner(path: &Path, contents: &[u8]) -> io::Result<()> {
+ OpenOptions::new()
+ .write(true)
+ .create_new(true)
+ .open(path)?
+ .write_all(contents)
+ }
+
+ inner(path.as_ref(), contents.as_ref()).context(format!("writing to file: {}", path.as_ref().display()))
+}
+
+fn to_camel_case(name: &str) -> String {
+ name.split('_')
+ .map(|s| {
+ if s.is_empty() {
+ String::from("")
+ } else {
+ [&s[0..1].to_uppercase(), &s[1..]].concat()
+ }
+ })
+ .collect()
+}
+
+pub(crate) fn get_stabilization_version() -> String {
+ fn parse_manifest(contents: &str) -> Option<String> {
+ let version = contents
+ .lines()
+ .filter_map(|l| l.split_once('='))
+ .find_map(|(k, v)| (k.trim() == "version").then(|| v.trim()))?;
+ let Some(("0", version)) = version.get(1..version.len() - 1)?.split_once('.') else {
+ return None;
+ };
+ let (minor, patch) = version.split_once('.')?;
+ Some(format!(
+ "{}.{}.0",
+ minor.parse::<u32>().ok()?,
+ patch.parse::<u32>().ok()?
+ ))
+ }
+ let contents = fs::read_to_string("Cargo.toml").expect("Unable to read `Cargo.toml`");
+ parse_manifest(&contents).expect("Unable to find package version in `Cargo.toml`")
+}
+
+fn get_test_file_contents(lint_name: &str, header_commands: Option<&str>) -> String {
+ let mut contents = format!(
+ indoc! {"
+ #![warn(clippy::{})]
+
+ fn main() {{
+ // test code goes here
+ }}
+ "},
+ lint_name
+ );
+
+ if let Some(header) = header_commands {
+ contents = format!("{}\n{}", header, contents);
+ }
+
+ contents
+}
+
+fn get_manifest_contents(lint_name: &str, hint: &str) -> String {
+ format!(
+ indoc! {r#"
+ # {}
+
+ [package]
+ name = "{}"
+ version = "0.1.0"
+ publish = false
+
+ [workspace]
+ "#},
+ hint, lint_name
+ )
+}
+
+fn get_lint_file_contents(lint: &LintData<'_>, enable_msrv: bool) -> String {
+ let mut result = String::new();
+
+ let (pass_type, pass_lifetimes, pass_import, context_import) = match lint.pass {
+ "early" => ("EarlyLintPass", "", "use rustc_ast::ast::*;", "EarlyContext"),
+ "late" => ("LateLintPass", "<'_>", "use rustc_hir::*;", "LateContext"),
+ _ => {
+ unreachable!("`pass_type` should only ever be `early` or `late`!");
+ },
+ };
+
+ let lint_name = lint.name;
+ let category = lint.category;
+ let name_camel = to_camel_case(lint.name);
+ let name_upper = lint_name.to_uppercase();
+
+ result.push_str(&if enable_msrv {
+ format!(
+ indoc! {"
+ use clippy_utils::msrvs;
+ {pass_import}
+ use rustc_lint::{{{context_import}, {pass_type}, LintContext}};
+ use rustc_semver::RustcVersion;
+ use rustc_session::{{declare_tool_lint, impl_lint_pass}};
+
+ "},
+ pass_type = pass_type,
+ pass_import = pass_import,
+ context_import = context_import,
+ )
+ } else {
+ format!(
+ indoc! {"
+ {pass_import}
+ use rustc_lint::{{{context_import}, {pass_type}}};
+ use rustc_session::{{declare_lint_pass, declare_tool_lint}};
+
+ "},
+ pass_import = pass_import,
+ pass_type = pass_type,
+ context_import = context_import
+ )
+ });
+
+ let _ = write!(result, "{}", get_lint_declaration(&name_upper, category));
+
+ result.push_str(&if enable_msrv {
+ format!(
+ indoc! {"
+ pub struct {name_camel} {{
+ msrv: Option<RustcVersion>,
+ }}
+
+ impl {name_camel} {{
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {{
+ Self {{ msrv }}
+ }}
+ }}
+
+ impl_lint_pass!({name_camel} => [{name_upper}]);
+
+ impl {pass_type}{pass_lifetimes} for {name_camel} {{
+ extract_msrv_attr!({context_import});
+ }}
+
+ // TODO: Add MSRV level to `clippy_utils/src/msrvs.rs` if needed.
+ // TODO: Add MSRV test to `tests/ui/min_rust_version_attr.rs`.
+ // TODO: Update msrv config comment in `clippy_lints/src/utils/conf.rs`
+ "},
+ pass_type = pass_type,
+ pass_lifetimes = pass_lifetimes,
+ name_upper = name_upper,
+ name_camel = name_camel,
+ context_import = context_import,
+ )
+ } else {
+ format!(
+ indoc! {"
+ declare_lint_pass!({name_camel} => [{name_upper}]);
+
+ impl {pass_type}{pass_lifetimes} for {name_camel} {{}}
+ "},
+ pass_type = pass_type,
+ pass_lifetimes = pass_lifetimes,
+ name_upper = name_upper,
+ name_camel = name_camel,
+ )
+ });
+
+ result
+}
+
+fn get_lint_declaration(name_upper: &str, category: &str) -> String {
+ format!(
+ indoc! {r#"
+ declare_clippy_lint! {{
+ /// ### What it does
+ ///
+ /// ### Why is this bad?
+ ///
+ /// ### Example
+ /// ```rust
+ /// // example code where clippy issues a warning
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// // example code which does not raise clippy warning
+ /// ```
+ #[clippy::version = "{version}"]
+ pub {name_upper},
+ {category},
+ "default lint description"
+ }}
+ "#},
+ version = get_stabilization_version(),
+ name_upper = name_upper,
+ category = category,
+ )
+}
+
+fn create_lint_for_ty(lint: &LintData<'_>, enable_msrv: bool, ty: &str) -> io::Result<()> {
+ match ty {
+ "cargo" => assert_eq!(
+ lint.category, "cargo",
+ "Lints of type `cargo` must have the `cargo` category"
+ ),
+ _ if lint.category == "cargo" => panic!("Lints of category `cargo` must have the `cargo` type"),
+ _ => {},
+ }
+
+ let ty_dir = lint.project_root.join(format!("clippy_lints/src/{}", ty));
+ assert!(
+ ty_dir.exists() && ty_dir.is_dir(),
+ "Directory `{}` does not exist!",
+ ty_dir.display()
+ );
+
+ let lint_file_path = ty_dir.join(format!("{}.rs", lint.name));
+ assert!(
+ !lint_file_path.exists(),
+ "File `{}` already exists",
+ lint_file_path.display()
+ );
+
+ let mod_file_path = ty_dir.join("mod.rs");
+ let context_import = setup_mod_file(&mod_file_path, lint)?;
+
+ let name_upper = lint.name.to_uppercase();
+ let mut lint_file_contents = String::new();
+
+ if enable_msrv {
+ let _ = writedoc!(
+ lint_file_contents,
+ r#"
+ use clippy_utils::{{meets_msrv, msrvs}};
+ use rustc_lint::{{{context_import}, LintContext}};
+ use rustc_semver::RustcVersion;
+
+ use super::{name_upper};
+
+ // TODO: Adjust the parameters as necessary
+ pub(super) fn check(cx: &{context_import}, msrv: Option<RustcVersion>) {{
+ if !meets_msrv(msrv, todo!("Add a new entry in `clippy_utils/src/msrvs`")) {{
+ return;
+ }}
+ todo!();
+ }}
+ "#,
+ context_import = context_import,
+ name_upper = name_upper,
+ );
+ } else {
+ let _ = writedoc!(
+ lint_file_contents,
+ r#"
+ use rustc_lint::{{{context_import}, LintContext}};
+
+ use super::{name_upper};
+
+ // TODO: Adjust the parameters as necessary
+ pub(super) fn check(cx: &{context_import}) {{
+ todo!();
+ }}
+ "#,
+ context_import = context_import,
+ name_upper = name_upper,
+ );
+ }
+
+ write_file(lint_file_path.as_path(), lint_file_contents)?;
+ println!("Generated lint file: `clippy_lints/src/{}/{}.rs`", ty, lint.name);
+ println!(
+ "Be sure to add a call to `{}::check` in `clippy_lints/src/{}/mod.rs`!",
+ lint.name, ty
+ );
+
+ Ok(())
+}
+
+#[allow(clippy::too_many_lines)]
+fn setup_mod_file(path: &Path, lint: &LintData<'_>) -> io::Result<&'static str> {
+ use super::update_lints::{match_tokens, LintDeclSearchResult};
+ use rustc_lexer::TokenKind;
+
+ let lint_name_upper = lint.name.to_uppercase();
+
+ let mut file_contents = fs::read_to_string(path)?;
+ assert!(
+ !file_contents.contains(&lint_name_upper),
+ "Lint `{}` already defined in `{}`",
+ lint.name,
+ path.display()
+ );
+
+ let mut offset = 0usize;
+ let mut last_decl_curly_offset = None;
+ let mut lint_context = None;
+
+ let mut iter = rustc_lexer::tokenize(&file_contents).map(|t| {
+ let range = offset..offset + t.len;
+ offset = range.end;
+
+ LintDeclSearchResult {
+ token_kind: t.kind,
+ content: &file_contents[range.clone()],
+ range,
+ }
+ });
+
+ // Find both the last lint declaration (declare_clippy_lint!) and the lint pass impl
+ while let Some(LintDeclSearchResult { content, .. }) = iter.find(|result| result.token_kind == TokenKind::Ident) {
+ let mut iter = iter
+ .by_ref()
+ .filter(|t| !matches!(t.token_kind, TokenKind::Whitespace | TokenKind::LineComment { .. }));
+
+ match content {
+ "declare_clippy_lint" => {
+ // matches `!{`
+ match_tokens!(iter, Bang OpenBrace);
+ if let Some(LintDeclSearchResult { range, .. }) =
+ iter.find(|result| result.token_kind == TokenKind::CloseBrace)
+ {
+ last_decl_curly_offset = Some(range.end);
+ }
+ },
+ "impl" => {
+ let mut token = iter.next();
+ match token {
+ // matches <'foo>
+ Some(LintDeclSearchResult {
+ token_kind: TokenKind::Lt,
+ ..
+ }) => {
+ match_tokens!(iter, Lifetime { .. } Gt);
+ token = iter.next();
+ },
+ None => break,
+ _ => {},
+ }
+
+ if let Some(LintDeclSearchResult {
+ token_kind: TokenKind::Ident,
+ content,
+ ..
+ }) = token
+ {
+ // Get the appropriate lint context struct
+ lint_context = match content {
+ "LateLintPass" => Some("LateContext"),
+ "EarlyLintPass" => Some("EarlyContext"),
+ _ => continue,
+ };
+ }
+ },
+ _ => {},
+ }
+ }
+
+ drop(iter);
+
+ let last_decl_curly_offset =
+ last_decl_curly_offset.unwrap_or_else(|| panic!("No lint declarations found in `{}`", path.display()));
+ let lint_context =
+ lint_context.unwrap_or_else(|| panic!("No lint pass implementation found in `{}`", path.display()));
+
+ // Add the lint declaration to `mod.rs`
+ file_contents.replace_range(
+ // Remove the trailing newline, which should always be present
+ last_decl_curly_offset..=last_decl_curly_offset,
+ &format!("\n\n{}", get_lint_declaration(&lint_name_upper, lint.category)),
+ );
+
+ // Add the lint to `impl_lint_pass`/`declare_lint_pass`
+ let impl_lint_pass_start = file_contents.find("impl_lint_pass!").unwrap_or_else(|| {
+ file_contents
+ .find("declare_lint_pass!")
+ .unwrap_or_else(|| panic!("failed to find `impl_lint_pass`/`declare_lint_pass`"))
+ });
+
+ let mut arr_start = file_contents[impl_lint_pass_start..].find('[').unwrap_or_else(|| {
+ panic!("malformed `impl_lint_pass`/`declare_lint_pass`");
+ });
+
+ arr_start += impl_lint_pass_start;
+
+ let mut arr_end = file_contents[arr_start..]
+ .find(']')
+ .expect("failed to find `impl_lint_pass` terminator");
+
+ arr_end += arr_start;
+
+ let mut arr_content = file_contents[arr_start + 1..arr_end].to_string();
+ arr_content.retain(|c| !c.is_whitespace());
+
+ let mut new_arr_content = String::new();
+ for ident in arr_content
+ .split(',')
+ .chain(std::iter::once(&*lint_name_upper))
+ .filter(|s| !s.is_empty())
+ {
+ let _ = write!(new_arr_content, "\n {},", ident);
+ }
+ new_arr_content.push('\n');
+
+ file_contents.replace_range(arr_start + 1..arr_end, &new_arr_content);
+
+ // Just add the mod declaration at the top, it'll be fixed by rustfmt
+ file_contents.insert_str(0, &format!("mod {};\n", &lint.name));
+
+ let mut file = OpenOptions::new()
+ .write(true)
+ .truncate(true)
+ .open(path)
+ .context(format!("trying to open: `{}`", path.display()))?;
+ file.write_all(file_contents.as_bytes())
+ .context(format!("writing to file: `{}`", path.display()))?;
+
+ Ok(lint_context)
+}
+
+#[test]
+fn test_camel_case() {
+ let s = "a_lint";
+ let s2 = to_camel_case(s);
+ assert_eq!(s2, "ALint");
+
+ let name = "a_really_long_new_lint";
+ let name2 = to_camel_case(name);
+ assert_eq!(name2, "AReallyLongNewLint");
+
+ let name3 = "lint__name";
+ let name4 = to_camel_case(name3);
+ assert_eq!(name4, "LintName");
+}
diff --git a/src/tools/clippy/clippy_dev/src/serve.rs b/src/tools/clippy/clippy_dev/src/serve.rs
new file mode 100644
index 000000000..f15f24da9
--- /dev/null
+++ b/src/tools/clippy/clippy_dev/src/serve.rs
@@ -0,0 +1,65 @@
+use std::ffi::OsStr;
+use std::num::ParseIntError;
+use std::path::Path;
+use std::process::Command;
+use std::thread;
+use std::time::{Duration, SystemTime};
+
+/// # Panics
+///
+/// Panics if the python commands could not be spawned
+pub fn run(port: u16, lint: Option<&String>) -> ! {
+ let mut url = Some(match lint {
+ None => format!("http://localhost:{}", port),
+ Some(lint) => format!("http://localhost:{}/#{}", port, lint),
+ });
+
+ loop {
+ if mtime("util/gh-pages/lints.json") < mtime("clippy_lints/src") {
+ Command::new("cargo")
+ .arg("collect-metadata")
+ .spawn()
+ .unwrap()
+ .wait()
+ .unwrap();
+ }
+ if let Some(url) = url.take() {
+ thread::spawn(move || {
+ Command::new("python3")
+ .arg("-m")
+ .arg("http.server")
+ .arg(port.to_string())
+ .current_dir("util/gh-pages")
+ .spawn()
+ .unwrap();
+ // Give some time for python to start
+ thread::sleep(Duration::from_millis(500));
+ // Launch browser after first export.py has completed and http.server is up
+ let _result = opener::open(url);
+ });
+ }
+ thread::sleep(Duration::from_millis(1000));
+ }
+}
+
+fn mtime(path: impl AsRef<Path>) -> SystemTime {
+ let path = path.as_ref();
+ if path.is_dir() {
+ path.read_dir()
+ .into_iter()
+ .flatten()
+ .flatten()
+ .map(|entry| mtime(&entry.path()))
+ .max()
+ .unwrap_or(SystemTime::UNIX_EPOCH)
+ } else {
+ path.metadata()
+ .and_then(|metadata| metadata.modified())
+ .unwrap_or(SystemTime::UNIX_EPOCH)
+ }
+}
+
+#[allow(clippy::missing_errors_doc)]
+pub fn validate_port(arg: &OsStr) -> Result<(), ParseIntError> {
+ arg.to_string_lossy().parse::<u16>().map(|_| ())
+}
diff --git a/src/tools/clippy/clippy_dev/src/setup/git_hook.rs b/src/tools/clippy/clippy_dev/src/setup/git_hook.rs
new file mode 100644
index 000000000..3fbb77d59
--- /dev/null
+++ b/src/tools/clippy/clippy_dev/src/setup/git_hook.rs
@@ -0,0 +1,85 @@
+use std::fs;
+use std::path::Path;
+
+use super::verify_inside_clippy_dir;
+
+/// Rusts setup uses `git rev-parse --git-common-dir` to get the root directory of the repo.
+/// I've decided against this for the sake of simplicity and to make sure that it doesn't install
+/// the hook if `clippy_dev` would be used in the rust tree. The hook also references this tool
+/// for formatting and should therefor only be used in a normal clone of clippy
+const REPO_GIT_DIR: &str = ".git";
+const HOOK_SOURCE_FILE: &str = "util/etc/pre-commit.sh";
+const HOOK_TARGET_FILE: &str = ".git/hooks/pre-commit";
+
+pub fn install_hook(force_override: bool) {
+ if !check_precondition(force_override) {
+ return;
+ }
+
+ // So a little bit of a funny story. Git on unix requires the pre-commit file
+ // to have the `execute` permission to be set. The Rust functions for modifying
+ // these flags doesn't seem to work when executed with normal user permissions.
+ //
+ // However, there is a little hack that is also being used by Rust itself in their
+ // setup script. Git saves the `execute` flag when syncing files. This means
+ // that we can check in a file with execution permissions and the sync it to create
+ // a file with the flag set. We then copy this file here. The copy function will also
+ // include the `execute` permission.
+ match fs::copy(HOOK_SOURCE_FILE, HOOK_TARGET_FILE) {
+ Ok(_) => {
+ println!("info: the hook can be removed with `cargo dev remove git-hook`");
+ println!("git hook successfully installed");
+ },
+ Err(err) => eprintln!(
+ "error: unable to copy `{}` to `{}` ({})",
+ HOOK_SOURCE_FILE, HOOK_TARGET_FILE, err
+ ),
+ }
+}
+
+fn check_precondition(force_override: bool) -> bool {
+ if !verify_inside_clippy_dir() {
+ return false;
+ }
+
+ // Make sure that we can find the git repository
+ let git_path = Path::new(REPO_GIT_DIR);
+ if !git_path.exists() || !git_path.is_dir() {
+ eprintln!("error: clippy_dev was unable to find the `.git` directory");
+ return false;
+ }
+
+ // Make sure that we don't override an existing hook by accident
+ let path = Path::new(HOOK_TARGET_FILE);
+ if path.exists() {
+ if force_override {
+ return delete_git_hook_file(path);
+ }
+
+ eprintln!("error: there is already a pre-commit hook installed");
+ println!("info: use the `--force-override` flag to override the existing hook");
+ return false;
+ }
+
+ true
+}
+
+pub fn remove_hook() {
+ let path = Path::new(HOOK_TARGET_FILE);
+ if path.exists() {
+ if delete_git_hook_file(path) {
+ println!("git hook successfully removed");
+ }
+ } else {
+ println!("no pre-commit hook was found");
+ }
+}
+
+fn delete_git_hook_file(path: &Path) -> bool {
+ if let Err(err) = fs::remove_file(path) {
+ eprintln!("error: unable to delete existing pre-commit git hook ({})", err);
+ false
+ } else {
+ true
+ }
+}
diff --git a/src/tools/clippy/clippy_dev/src/setup/intellij.rs b/src/tools/clippy/clippy_dev/src/setup/intellij.rs
new file mode 100644
index 000000000..bf741e6d1
--- /dev/null
+++ b/src/tools/clippy/clippy_dev/src/setup/intellij.rs
@@ -0,0 +1,223 @@
+use std::fs;
+use std::fs::File;
+use std::io::prelude::*;
+use std::path::{Path, PathBuf};
+
+// This module takes an absolute path to a rustc repo and alters the dependencies to point towards
+// the respective rustc subcrates instead of using extern crate xyz.
+// This allows IntelliJ to analyze rustc internals and show proper information inside Clippy
+// code. See https://github.com/rust-lang/rust-clippy/issues/5514 for details
+
+const RUSTC_PATH_SECTION: &str = "[target.'cfg(NOT_A_PLATFORM)'.dependencies]";
+const DEPENDENCIES_SECTION: &str = "[dependencies]";
+
+const CLIPPY_PROJECTS: &[ClippyProjectInfo] = &[
+ ClippyProjectInfo::new("root", "Cargo.toml", "src/driver.rs"),
+ ClippyProjectInfo::new("clippy_lints", "clippy_lints/Cargo.toml", "clippy_lints/src/lib.rs"),
+ ClippyProjectInfo::new("clippy_utils", "clippy_utils/Cargo.toml", "clippy_utils/src/lib.rs"),
+];
+
+/// Used to store clippy project information to later inject the dependency into.
+struct ClippyProjectInfo {
+ /// Only used to display information to the user
+ name: &'static str,
+ cargo_file: &'static str,
+ lib_rs_file: &'static str,
+}
+
+impl ClippyProjectInfo {
+ const fn new(name: &'static str, cargo_file: &'static str, lib_rs_file: &'static str) -> Self {
+ Self {
+ name,
+ cargo_file,
+ lib_rs_file,
+ }
+ }
+}
+
+pub fn setup_rustc_src(rustc_path: &str) {
+ let rustc_source_dir = match check_and_get_rustc_dir(rustc_path) {
+ Ok(path) => path,
+ Err(_) => return,
+ };
+
+ for project in CLIPPY_PROJECTS {
+ if inject_deps_into_project(&rustc_source_dir, project).is_err() {
+ return;
+ }
+ }
+
+ println!("info: the source paths can be removed again with `cargo dev remove intellij`");
+}
+
+fn check_and_get_rustc_dir(rustc_path: &str) -> Result<PathBuf, ()> {
+ let mut path = PathBuf::from(rustc_path);
+
+ if path.is_relative() {
+ match path.canonicalize() {
+ Ok(absolute_path) => {
+ println!("info: the rustc path was resolved to: `{}`", absolute_path.display());
+ path = absolute_path;
+ },
+ Err(err) => {
+ eprintln!("error: unable to get the absolute path of rustc ({})", err);
+ return Err(());
+ },
+ };
+ }
+
+ let path = path.join("compiler");
+ println!("info: looking for compiler sources at: {}", path.display());
+
+ if !path.exists() {
+ eprintln!("error: the given path does not exist");
+ return Err(());
+ }
+
+ if !path.is_dir() {
+ eprintln!("error: the given path is not a directory");
+ return Err(());
+ }
+
+ Ok(path)
+}
+
+fn inject_deps_into_project(rustc_source_dir: &Path, project: &ClippyProjectInfo) -> Result<(), ()> {
+ let cargo_content = read_project_file(project.cargo_file)?;
+ let lib_content = read_project_file(project.lib_rs_file)?;
+
+ if inject_deps_into_manifest(rustc_source_dir, project.cargo_file, &cargo_content, &lib_content).is_err() {
+ eprintln!(
+ "error: unable to inject dependencies into {} with the Cargo file {}",
+ project.name, project.cargo_file
+ );
+ Err(())
+ } else {
+ Ok(())
+ }
+}
+
+/// `clippy_dev` expects to be executed in the root directory of Clippy. This function
+/// loads the given file or returns an error. Having it in this extra function ensures
+/// that the error message looks nice.
+fn read_project_file(file_path: &str) -> Result<String, ()> {
+ let path = Path::new(file_path);
+ if !path.exists() {
+ eprintln!("error: unable to find the file `{}`", file_path);
+ return Err(());
+ }
+
+ match fs::read_to_string(path) {
+ Ok(content) => Ok(content),
+ Err(err) => {
+ eprintln!("error: the file `{}` could not be read ({})", file_path, err);
+ Err(())
+ },
+ }
+}
+
+fn inject_deps_into_manifest(
+ rustc_source_dir: &Path,
+ manifest_path: &str,
+ cargo_toml: &str,
+ lib_rs: &str,
+) -> std::io::Result<()> {
+ // do not inject deps if we have already done so
+ if cargo_toml.contains(RUSTC_PATH_SECTION) {
+ eprintln!(
+ "warn: dependencies are already setup inside {}, skipping file",
+ manifest_path
+ );
+ return Ok(());
+ }
+
+ let extern_crates = lib_rs
+ .lines()
+ // only take dependencies starting with `rustc_`
+ .filter(|line| line.starts_with("extern crate rustc_"))
+ // we have something like "extern crate foo;", we only care about the "foo"
+ // extern crate rustc_middle;
+ // ^^^^^^^^^^^^
+ .map(|s| &s[13..(s.len() - 1)]);
+
+ let new_deps = extern_crates.map(|dep| {
+ // format the dependencies that are going to be put inside the Cargo.toml
+ format!(
+ "{dep} = {{ path = \"{source_path}/{dep}\" }}\n",
+ dep = dep,
+ source_path = rustc_source_dir.display()
+ )
+ });
+
+ // format a new [dependencies]-block with the new deps we need to inject
+ let mut all_deps = String::from("[target.'cfg(NOT_A_PLATFORM)'.dependencies]\n");
+ new_deps.for_each(|dep_line| {
+ all_deps.push_str(&dep_line);
+ });
+ all_deps.push_str("\n[dependencies]\n");
+
+ // replace "[dependencies]" with
+ // [dependencies]
+ // dep1 = { path = ... }
+ // dep2 = { path = ... }
+ // etc
+ let new_manifest = cargo_toml.replacen("[dependencies]\n", &all_deps, 1);
+
+ // println!("{}", new_manifest);
+ let mut file = File::create(manifest_path)?;
+ file.write_all(new_manifest.as_bytes())?;
+
+ println!("info: successfully setup dependencies inside {}", manifest_path);
+
+ Ok(())
+}
+
+pub fn remove_rustc_src() {
+ for project in CLIPPY_PROJECTS {
+ remove_rustc_src_from_project(project);
+ }
+}
+
+fn remove_rustc_src_from_project(project: &ClippyProjectInfo) -> bool {
+ let mut cargo_content = if let Ok(content) = read_project_file(project.cargo_file) {
+ content
+ } else {
+ return false;
+ };
+ let section_start = if let Some(section_start) = cargo_content.find(RUSTC_PATH_SECTION) {
+ section_start
+ } else {
+ println!(
+ "info: dependencies could not be found in `{}` for {}, skipping file",
+ project.cargo_file, project.name
+ );
+ return true;
+ };
+
+ let end_point = if let Some(end_point) = cargo_content.find(DEPENDENCIES_SECTION) {
+ end_point
+ } else {
+ eprintln!(
+ "error: the end of the rustc dependencies section could not be found in `{}`",
+ project.cargo_file
+ );
+ return false;
+ };
+
+ cargo_content.replace_range(section_start..end_point, "");
+
+ match File::create(project.cargo_file) {
+ Ok(mut file) => {
+ file.write_all(cargo_content.as_bytes()).unwrap();
+ println!("info: successfully removed dependencies inside {}", project.cargo_file);
+ true
+ },
+ Err(err) => {
+ eprintln!(
+ "error: unable to open file `{}` to remove rustc dependencies for {} ({})",
+ project.cargo_file, project.name, err
+ );
+ false
+ },
+ }
+}
diff --git a/src/tools/clippy/clippy_dev/src/setup/mod.rs b/src/tools/clippy/clippy_dev/src/setup/mod.rs
new file mode 100644
index 000000000..f691ae4fa
--- /dev/null
+++ b/src/tools/clippy/clippy_dev/src/setup/mod.rs
@@ -0,0 +1,23 @@
+pub mod git_hook;
+pub mod intellij;
+pub mod vscode;
+
+use std::path::Path;
+
+const CLIPPY_DEV_DIR: &str = "clippy_dev";
+
+/// This function verifies that the tool is being executed in the clippy directory.
+/// This is useful to ensure that setups only modify Clippy's resources. The verification
+/// is done by checking that `clippy_dev` is a sub directory of the current directory.
+///
+/// It will print an error message and return `false` if the directory could not be
+/// verified.
+fn verify_inside_clippy_dir() -> bool {
+ let path = Path::new(CLIPPY_DEV_DIR);
+ if path.exists() && path.is_dir() {
+ true
+ } else {
+ eprintln!("error: unable to verify that the working directory is clippy's directory");
+ false
+ }
+}
diff --git a/src/tools/clippy/clippy_dev/src/setup/vscode.rs b/src/tools/clippy/clippy_dev/src/setup/vscode.rs
new file mode 100644
index 000000000..d59001b2c
--- /dev/null
+++ b/src/tools/clippy/clippy_dev/src/setup/vscode.rs
@@ -0,0 +1,104 @@
+use std::fs;
+use std::path::Path;
+
+use super::verify_inside_clippy_dir;
+
+const VSCODE_DIR: &str = ".vscode";
+const TASK_SOURCE_FILE: &str = "util/etc/vscode-tasks.json";
+const TASK_TARGET_FILE: &str = ".vscode/tasks.json";
+
+pub fn install_tasks(force_override: bool) {
+ if !check_install_precondition(force_override) {
+ return;
+ }
+
+ match fs::copy(TASK_SOURCE_FILE, TASK_TARGET_FILE) {
+ Ok(_) => {
+ println!("info: the task file can be removed with `cargo dev remove vscode-tasks`");
+ println!("vscode tasks successfully installed");
+ },
+ Err(err) => eprintln!(
+ "error: unable to copy `{}` to `{}` ({})",
+ TASK_SOURCE_FILE, TASK_TARGET_FILE, err
+ ),
+ }
+}
+
+fn check_install_precondition(force_override: bool) -> bool {
+ if !verify_inside_clippy_dir() {
+ return false;
+ }
+
+ let vs_dir_path = Path::new(VSCODE_DIR);
+ if vs_dir_path.exists() {
+ // verify the target will be valid
+ if !vs_dir_path.is_dir() {
+ eprintln!("error: the `.vscode` path exists but seems to be a file");
+ return false;
+ }
+
+ // make sure that we don't override any existing tasks by accident
+ let path = Path::new(TASK_TARGET_FILE);
+ if path.exists() {
+ if force_override {
+ return delete_vs_task_file(path);
+ }
+
+ eprintln!(
+ "error: there is already a `task.json` file inside the `{}` directory",
+ VSCODE_DIR
+ );
+ println!("info: use the `--force-override` flag to override the existing `task.json` file");
+ return false;
+ }
+ } else {
+ match fs::create_dir(vs_dir_path) {
+ Ok(_) => {
+ println!("info: created `{}` directory for clippy", VSCODE_DIR);
+ },
+ Err(err) => {
+ eprintln!(
+ "error: the task target directory `{}` could not be created ({})",
+ VSCODE_DIR, err
+ );
+ },
+ }
+ }
+
+ true
+}
+
+pub fn remove_tasks() {
+ let path = Path::new(TASK_TARGET_FILE);
+ if path.exists() {
+ if delete_vs_task_file(path) {
+ try_delete_vs_directory_if_empty();
+ println!("vscode tasks successfully removed");
+ }
+ } else {
+ println!("no vscode tasks were found");
+ }
+}
+
+fn delete_vs_task_file(path: &Path) -> bool {
+ if let Err(err) = fs::remove_file(path) {
+ eprintln!("error: unable to delete the existing `tasks.json` file ({})", err);
+ return false;
+ }
+
+ true
+}
+
+/// This function will try to delete the `.vscode` directory if it's empty.
+/// It may fail silently.
+fn try_delete_vs_directory_if_empty() {
+ let path = Path::new(VSCODE_DIR);
+ if path.read_dir().map_or(false, |mut iter| iter.next().is_none()) {
+ // The directory is empty. We just try to delete it but allow a silence
+ // fail as an empty `.vscode` directory is still valid
+ let _silence_result = fs::remove_dir(path);
+ } else {
+ // The directory is not empty or could not be read. Either way don't take
+ // any further actions
+ }
+}
diff --git a/src/tools/clippy/clippy_dev/src/update_lints.rs b/src/tools/clippy/clippy_dev/src/update_lints.rs
new file mode 100644
index 000000000..aed38bc28
--- /dev/null
+++ b/src/tools/clippy/clippy_dev/src/update_lints.rs
@@ -0,0 +1,1277 @@
+use crate::clippy_project_root;
+use aho_corasick::AhoCorasickBuilder;
+use indoc::writedoc;
+use itertools::Itertools;
+use rustc_lexer::{tokenize, unescape, LiteralKind, TokenKind};
+use std::collections::{HashMap, HashSet};
+use std::ffi::OsStr;
+use std::fmt::Write;
+use std::fs::{self, OpenOptions};
+use std::io::{self, Read, Seek, SeekFrom, Write as _};
+use std::ops::Range;
+use std::path::{Path, PathBuf};
+use walkdir::{DirEntry, WalkDir};
+
+const GENERATED_FILE_COMMENT: &str = "// This file was generated by `cargo dev update_lints`.\n\
+ // Use that command to update this file and do not edit by hand.\n\
+ // Manual edits will be overwritten.\n\n";
+
+const DOCS_LINK: &str = "https://rust-lang.github.io/rust-clippy/master/index.html";
+
+#[derive(Clone, Copy, PartialEq, Eq)]
+pub enum UpdateMode {
+ Check,
+ Change,
+}
+
+/// Runs the `update_lints` command.
+///
+/// This updates various generated values from the lint source code.
+///
+/// `update_mode` indicates if the files should be updated or if updates should be checked for.
+///
+/// # Panics
+///
+/// Panics if a file path could not read from or then written to
+pub fn update(update_mode: UpdateMode) {
+ let (lints, deprecated_lints, renamed_lints) = gather_all();
+ generate_lint_files(update_mode, &lints, &deprecated_lints, &renamed_lints);
+}
+
+fn generate_lint_files(
+ update_mode: UpdateMode,
+ lints: &[Lint],
+ deprecated_lints: &[DeprecatedLint],
+ renamed_lints: &[RenamedLint],
+) {
+ let internal_lints = Lint::internal_lints(lints);
+ let usable_lints = Lint::usable_lints(lints);
+ let mut sorted_usable_lints = usable_lints.clone();
+ sorted_usable_lints.sort_by_key(|lint| lint.name.clone());
+
+ replace_region_in_file(
+ update_mode,
+ Path::new("README.md"),
+ "[There are over ",
+ " lints included in this crate!]",
+ |res| {
+ write!(res, "{}", round_to_fifty(usable_lints.len())).unwrap();
+ },
+ );
+
+ replace_region_in_file(
+ update_mode,
+ Path::new("book/src/README.md"),
+ "[There are over ",
+ " lints included in this crate!]",
+ |res| {
+ write!(res, "{}", round_to_fifty(usable_lints.len())).unwrap();
+ },
+ );
+
+ replace_region_in_file(
+ update_mode,
+ Path::new("CHANGELOG.md"),
+ "<!-- begin autogenerated links to lint list -->\n",
+ "<!-- end autogenerated links to lint list -->",
+ |res| {
+ for lint in usable_lints
+ .iter()
+ .map(|l| &*l.name)
+ .chain(deprecated_lints.iter().map(|l| &*l.name))
+ .chain(
+ renamed_lints
+ .iter()
+ .map(|l| l.old_name.strip_prefix("clippy::").unwrap_or(&l.old_name)),
+ )
+ .sorted()
+ {
+ writeln!(res, "[`{}`]: {}#{}", lint, DOCS_LINK, lint).unwrap();
+ }
+ },
+ );
+
+ // This has to be in lib.rs, otherwise rustfmt doesn't work
+ replace_region_in_file(
+ update_mode,
+ Path::new("clippy_lints/src/lib.rs"),
+ "// begin lints modules, do not remove this comment, it’s used in `update_lints`\n",
+ "// end lints modules, do not remove this comment, it’s used in `update_lints`",
+ |res| {
+ for lint_mod in usable_lints.iter().map(|l| &l.module).unique().sorted() {
+ writeln!(res, "mod {};", lint_mod).unwrap();
+ }
+ },
+ );
+
+ process_file(
+ "clippy_lints/src/lib.register_lints.rs",
+ update_mode,
+ &gen_register_lint_list(internal_lints.iter(), usable_lints.iter()),
+ );
+ process_file(
+ "clippy_lints/src/lib.deprecated.rs",
+ update_mode,
+ &gen_deprecated(deprecated_lints),
+ );
+
+ let all_group_lints = usable_lints.iter().filter(|l| {
+ matches!(
+ &*l.group,
+ "correctness" | "suspicious" | "style" | "complexity" | "perf"
+ )
+ });
+ let content = gen_lint_group_list("all", all_group_lints);
+ process_file("clippy_lints/src/lib.register_all.rs", update_mode, &content);
+
+ for (lint_group, lints) in Lint::by_lint_group(usable_lints.into_iter().chain(internal_lints)) {
+ let content = gen_lint_group_list(&lint_group, lints.iter());
+ process_file(
+ &format!("clippy_lints/src/lib.register_{}.rs", lint_group),
+ update_mode,
+ &content,
+ );
+ }
+
+ let content = gen_deprecated_lints_test(deprecated_lints);
+ process_file("tests/ui/deprecated.rs", update_mode, &content);
+
+ let content = gen_renamed_lints_test(renamed_lints);
+ process_file("tests/ui/rename.rs", update_mode, &content);
+}
+
+pub fn print_lints() {
+ let (lint_list, _, _) = gather_all();
+ let usable_lints = Lint::usable_lints(&lint_list);
+ let usable_lint_count = usable_lints.len();
+ let grouped_by_lint_group = Lint::by_lint_group(usable_lints.into_iter());
+
+ for (lint_group, mut lints) in grouped_by_lint_group {
+ println!("\n## {}", lint_group);
+
+ lints.sort_by_key(|l| l.name.clone());
+
+ for lint in lints {
+ println!("* [{}]({}#{}) ({})", lint.name, DOCS_LINK, lint.name, lint.desc);
+ }
+ }
+
+ println!("there are {} lints", usable_lint_count);
+}
+
+/// Runs the `rename_lint` command.
+///
+/// This does the following:
+/// * Adds an entry to `renamed_lints.rs`.
+/// * Renames all lint attributes to the new name (e.g. `#[allow(clippy::lint_name)]`).
+/// * Renames the lint struct to the new name.
+/// * Renames the module containing the lint struct to the new name if it shares a name with the
+/// lint.
+///
+/// # Panics
+/// Panics for the following conditions:
+/// * If a file path could not read from or then written to
+/// * If either lint name has a prefix
+/// * If `old_name` doesn't name an existing lint.
+/// * If `old_name` names a deprecated or renamed lint.
+#[allow(clippy::too_many_lines)]
+pub fn rename(old_name: &str, new_name: &str, uplift: bool) {
+ if let Some((prefix, _)) = old_name.split_once("::") {
+ panic!("`{}` should not contain the `{}` prefix", old_name, prefix);
+ }
+ if let Some((prefix, _)) = new_name.split_once("::") {
+ panic!("`{}` should not contain the `{}` prefix", new_name, prefix);
+ }
+
+ let (mut lints, deprecated_lints, mut renamed_lints) = gather_all();
+ let mut old_lint_index = None;
+ let mut found_new_name = false;
+ for (i, lint) in lints.iter().enumerate() {
+ if lint.name == old_name {
+ old_lint_index = Some(i);
+ } else if lint.name == new_name {
+ found_new_name = true;
+ }
+ }
+ let old_lint_index = old_lint_index.unwrap_or_else(|| panic!("could not find lint `{}`", old_name));
+
+ let lint = RenamedLint {
+ old_name: format!("clippy::{}", old_name),
+ new_name: if uplift {
+ new_name.into()
+ } else {
+ format!("clippy::{}", new_name)
+ },
+ };
+
+ // Renamed lints and deprecated lints shouldn't have been found in the lint list, but check just in
+ // case.
+ assert!(
+ !renamed_lints.iter().any(|l| lint.old_name == l.old_name),
+ "`{}` has already been renamed",
+ old_name
+ );
+ assert!(
+ !deprecated_lints.iter().any(|l| lint.old_name == l.name),
+ "`{}` has already been deprecated",
+ old_name
+ );
+
+ // Update all lint level attributes. (`clippy::lint_name`)
+ for file in WalkDir::new(clippy_project_root())
+ .into_iter()
+ .map(Result::unwrap)
+ .filter(|f| {
+ let name = f.path().file_name();
+ let ext = f.path().extension();
+ (ext == Some(OsStr::new("rs")) || ext == Some(OsStr::new("fixed")))
+ && name != Some(OsStr::new("rename.rs"))
+ && name != Some(OsStr::new("renamed_lints.rs"))
+ })
+ {
+ rewrite_file(file.path(), |s| {
+ replace_ident_like(s, &[(&lint.old_name, &lint.new_name)])
+ });
+ }
+
+ renamed_lints.push(lint);
+ renamed_lints.sort_by(|lhs, rhs| {
+ lhs.new_name
+ .starts_with("clippy::")
+ .cmp(&rhs.new_name.starts_with("clippy::"))
+ .reverse()
+ .then_with(|| lhs.old_name.cmp(&rhs.old_name))
+ });
+
+ write_file(
+ Path::new("clippy_lints/src/renamed_lints.rs"),
+ &gen_renamed_lints_list(&renamed_lints),
+ );
+
+ if uplift {
+ write_file(Path::new("tests/ui/rename.rs"), &gen_renamed_lints_test(&renamed_lints));
+ println!(
+ "`{}` has be uplifted. All the code inside `clippy_lints` related to it needs to be removed manually.",
+ old_name
+ );
+ } else if found_new_name {
+ write_file(Path::new("tests/ui/rename.rs"), &gen_renamed_lints_test(&renamed_lints));
+ println!(
+ "`{}` is already defined. The old linting code inside `clippy_lints` needs to be updated/removed manually.",
+ new_name
+ );
+ } else {
+ // Rename the lint struct and source files sharing a name with the lint.
+ let lint = &mut lints[old_lint_index];
+ let old_name_upper = old_name.to_uppercase();
+ let new_name_upper = new_name.to_uppercase();
+ lint.name = new_name.into();
+
+ // Rename test files. only rename `.stderr` and `.fixed` files if the new test name doesn't exist.
+ if try_rename_file(
+ Path::new(&format!("tests/ui/{}.rs", old_name)),
+ Path::new(&format!("tests/ui/{}.rs", new_name)),
+ ) {
+ try_rename_file(
+ Path::new(&format!("tests/ui/{}.stderr", old_name)),
+ Path::new(&format!("tests/ui/{}.stderr", new_name)),
+ );
+ try_rename_file(
+ Path::new(&format!("tests/ui/{}.fixed", old_name)),
+ Path::new(&format!("tests/ui/{}.fixed", new_name)),
+ );
+ }
+
+ // Try to rename the file containing the lint if the file name matches the lint's name.
+ let replacements;
+ let replacements = if lint.module == old_name
+ && try_rename_file(
+ Path::new(&format!("clippy_lints/src/{}.rs", old_name)),
+ Path::new(&format!("clippy_lints/src/{}.rs", new_name)),
+ ) {
+ // Edit the module name in the lint list. Note there could be multiple lints.
+ for lint in lints.iter_mut().filter(|l| l.module == old_name) {
+ lint.module = new_name.into();
+ }
+ replacements = [(&*old_name_upper, &*new_name_upper), (old_name, new_name)];
+ replacements.as_slice()
+ } else if !lint.module.contains("::")
+ // Catch cases like `methods/lint_name.rs` where the lint is stored in `methods/mod.rs`
+ && try_rename_file(
+ Path::new(&format!("clippy_lints/src/{}/{}.rs", lint.module, old_name)),
+ Path::new(&format!("clippy_lints/src/{}/{}.rs", lint.module, new_name)),
+ )
+ {
+ // Edit the module name in the lint list. Note there could be multiple lints, or none.
+ let renamed_mod = format!("{}::{}", lint.module, old_name);
+ for lint in lints.iter_mut().filter(|l| l.module == renamed_mod) {
+ lint.module = format!("{}::{}", lint.module, new_name);
+ }
+ replacements = [(&*old_name_upper, &*new_name_upper), (old_name, new_name)];
+ replacements.as_slice()
+ } else {
+ replacements = [(&*old_name_upper, &*new_name_upper), ("", "")];
+ &replacements[0..1]
+ };
+
+ // Don't change `clippy_utils/src/renamed_lints.rs` here as it would try to edit the lint being
+ // renamed.
+ for (_, file) in clippy_lints_src_files().filter(|(rel_path, _)| rel_path != OsStr::new("renamed_lints.rs")) {
+ rewrite_file(file.path(), |s| replace_ident_like(s, replacements));
+ }
+
+ generate_lint_files(UpdateMode::Change, &lints, &deprecated_lints, &renamed_lints);
+ println!("{} has been successfully renamed", old_name);
+ }
+
+ println!("note: `cargo uitest` still needs to be run to update the test results");
+}
+
+const DEFAULT_DEPRECATION_REASON: &str = "default deprecation note";
+/// Runs the `deprecate` command
+///
+/// This does the following:
+/// * Adds an entry to `deprecated_lints.rs`.
+/// * Removes the lint declaration (and the entire file if applicable)
+///
+/// # Panics
+///
+/// If a file path could not read from or written to
+pub fn deprecate(name: &str, reason: Option<&String>) {
+ fn finish(
+ (lints, mut deprecated_lints, renamed_lints): (Vec<Lint>, Vec<DeprecatedLint>, Vec<RenamedLint>),
+ name: &str,
+ reason: &str,
+ ) {
+ deprecated_lints.push(DeprecatedLint {
+ name: name.to_string(),
+ reason: reason.to_string(),
+ declaration_range: Range::default(),
+ });
+
+ generate_lint_files(UpdateMode::Change, &lints, &deprecated_lints, &renamed_lints);
+ println!("info: `{}` has successfully been deprecated", name);
+
+ if reason == DEFAULT_DEPRECATION_REASON {
+ println!("note: the deprecation reason must be updated in `clippy_lints/src/deprecated_lints.rs`");
+ }
+ println!("note: you must run `cargo uitest` to update the test results");
+ }
+
+ let reason = reason.map_or(DEFAULT_DEPRECATION_REASON, String::as_str);
+ let name_lower = name.to_lowercase();
+ let name_upper = name.to_uppercase();
+
+ let (mut lints, deprecated_lints, renamed_lints) = gather_all();
+ let Some(lint) = lints.iter().find(|l| l.name == name_lower) else { eprintln!("error: failed to find lint `{}`", name); return; };
+
+ let mod_path = {
+ let mut mod_path = PathBuf::from(format!("clippy_lints/src/{}", lint.module));
+ if mod_path.is_dir() {
+ mod_path = mod_path.join("mod");
+ }
+
+ mod_path.set_extension("rs");
+ mod_path
+ };
+
+ let deprecated_lints_path = &*clippy_project_root().join("clippy_lints/src/deprecated_lints.rs");
+
+ if remove_lint_declaration(&name_lower, &mod_path, &mut lints).unwrap_or(false) {
+ declare_deprecated(&name_upper, deprecated_lints_path, reason).unwrap();
+ finish((lints, deprecated_lints, renamed_lints), name, reason);
+ return;
+ }
+
+ eprintln!("error: lint not found");
+}
+
+fn remove_lint_declaration(name: &str, path: &Path, lints: &mut Vec<Lint>) -> io::Result<bool> {
+ fn remove_lint(name: &str, lints: &mut Vec<Lint>) {
+ lints.iter().position(|l| l.name == name).map(|pos| lints.remove(pos));
+ }
+
+ fn remove_test_assets(name: &str) {
+ let test_file_stem = format!("tests/ui/{}", name);
+ let path = Path::new(&test_file_stem);
+
+ // Some lints have their own directories, delete them
+ if path.is_dir() {
+ fs::remove_dir_all(path).ok();
+ return;
+ }
+
+ // Remove all related test files
+ fs::remove_file(path.with_extension("rs")).ok();
+ fs::remove_file(path.with_extension("stderr")).ok();
+ fs::remove_file(path.with_extension("fixed")).ok();
+ }
+
+ fn remove_impl_lint_pass(lint_name_upper: &str, content: &mut String) {
+ let impl_lint_pass_start = content.find("impl_lint_pass!").unwrap_or_else(|| {
+ content
+ .find("declare_lint_pass!")
+ .unwrap_or_else(|| panic!("failed to find `impl_lint_pass`"))
+ });
+ let mut impl_lint_pass_end = content[impl_lint_pass_start..]
+ .find(']')
+ .expect("failed to find `impl_lint_pass` terminator");
+
+ impl_lint_pass_end += impl_lint_pass_start;
+ if let Some(lint_name_pos) = content[impl_lint_pass_start..impl_lint_pass_end].find(&lint_name_upper) {
+ let mut lint_name_end = impl_lint_pass_start + (lint_name_pos + lint_name_upper.len());
+ for c in content[lint_name_end..impl_lint_pass_end].chars() {
+ // Remove trailing whitespace
+ if c == ',' || c.is_whitespace() {
+ lint_name_end += 1;
+ } else {
+ break;
+ }
+ }
+
+ content.replace_range(impl_lint_pass_start + lint_name_pos..lint_name_end, "");
+ }
+ }
+
+ if path.exists() {
+ if let Some(lint) = lints.iter().find(|l| l.name == name) {
+ if lint.module == name {
+ // The lint name is the same as the file, we can just delete the entire file
+ fs::remove_file(path)?;
+ } else {
+ // We can't delete the entire file, just remove the declaration
+
+ if let Some(Some("mod.rs")) = path.file_name().map(OsStr::to_str) {
+ // Remove clippy_lints/src/some_mod/some_lint.rs
+ let mut lint_mod_path = path.to_path_buf();
+ lint_mod_path.set_file_name(name);
+ lint_mod_path.set_extension("rs");
+
+ fs::remove_file(lint_mod_path).ok();
+ }
+
+ let mut content =
+ fs::read_to_string(&path).unwrap_or_else(|_| panic!("failed to read `{}`", path.to_string_lossy()));
+
+ eprintln!(
+ "warn: you will have to manually remove any code related to `{}` from `{}`",
+ name,
+ path.display()
+ );
+
+ assert!(
+ content[lint.declaration_range.clone()].contains(&name.to_uppercase()),
+ "error: `{}` does not contain lint `{}`'s declaration",
+ path.display(),
+ lint.name
+ );
+
+ // Remove lint declaration (declare_clippy_lint!)
+ content.replace_range(lint.declaration_range.clone(), "");
+
+ // Remove the module declaration (mod xyz;)
+ let mod_decl = format!("\nmod {};", name);
+ content = content.replacen(&mod_decl, "", 1);
+
+ remove_impl_lint_pass(&lint.name.to_uppercase(), &mut content);
+ fs::write(path, content).unwrap_or_else(|_| panic!("failed to write to `{}`", path.to_string_lossy()));
+ }
+
+ remove_test_assets(name);
+ remove_lint(name, lints);
+ return Ok(true);
+ }
+ }
+
+ Ok(false)
+}
+
+fn declare_deprecated(name: &str, path: &Path, reason: &str) -> io::Result<()> {
+ let mut file = OpenOptions::new().write(true).open(path)?;
+
+ file.seek(SeekFrom::End(0))?;
+
+ let version = crate::new_lint::get_stabilization_version();
+ let deprecation_reason = if reason == DEFAULT_DEPRECATION_REASON {
+ "TODO"
+ } else {
+ reason
+ };
+
+ writedoc!(
+ file,
+ "
+
+ declare_deprecated_lint! {{
+ /// ### What it does
+ /// Nothing. This lint has been deprecated.
+ ///
+ /// ### Deprecation reason
+ /// {}
+ #[clippy::version = \"{}\"]
+ pub {},
+ \"{}\"
+ }}
+
+ ",
+ deprecation_reason,
+ version,
+ name,
+ reason,
+ )
+}
+
+/// Replace substrings if they aren't bordered by identifier characters. Returns `None` if there
+/// were no replacements.
+fn replace_ident_like(contents: &str, replacements: &[(&str, &str)]) -> Option<String> {
+ fn is_ident_char(c: u8) -> bool {
+ matches!(c, b'a'..=b'z' | b'A'..=b'Z' | b'0'..=b'9' | b'_')
+ }
+
+ let searcher = AhoCorasickBuilder::new()
+ .dfa(true)
+ .match_kind(aho_corasick::MatchKind::LeftmostLongest)
+ .build_with_size::<u16, _, _>(replacements.iter().map(|&(x, _)| x.as_bytes()))
+ .unwrap();
+
+ let mut result = String::with_capacity(contents.len() + 1024);
+ let mut pos = 0;
+ let mut edited = false;
+ for m in searcher.find_iter(contents) {
+ let (old, new) = replacements[m.pattern()];
+ result.push_str(&contents[pos..m.start()]);
+ result.push_str(
+ if !is_ident_char(contents.as_bytes().get(m.start().wrapping_sub(1)).copied().unwrap_or(0))
+ && !is_ident_char(contents.as_bytes().get(m.end()).copied().unwrap_or(0))
+ {
+ edited = true;
+ new
+ } else {
+ old
+ },
+ );
+ pos = m.end();
+ }
+ result.push_str(&contents[pos..]);
+ edited.then_some(result)
+}
+
+fn round_to_fifty(count: usize) -> usize {
+ count / 50 * 50
+}
+
+fn process_file(path: impl AsRef<Path>, update_mode: UpdateMode, content: &str) {
+ if update_mode == UpdateMode::Check {
+ let old_content =
+ fs::read_to_string(&path).unwrap_or_else(|e| panic!("Cannot read from {}: {}", path.as_ref().display(), e));
+ if content != old_content {
+ exit_with_failure();
+ }
+ } else {
+ fs::write(&path, content.as_bytes())
+ .unwrap_or_else(|e| panic!("Cannot write to {}: {}", path.as_ref().display(), e));
+ }
+}
+
+fn exit_with_failure() {
+ println!(
+ "Not all lints defined properly. \
+ Please run `cargo dev update_lints` to make sure all lints are defined properly."
+ );
+ std::process::exit(1);
+}
+
+/// Lint data parsed from the Clippy source code.
+#[derive(Clone, PartialEq, Eq, Debug)]
+struct Lint {
+ name: String,
+ group: String,
+ desc: String,
+ module: String,
+ declaration_range: Range<usize>,
+}
+
+impl Lint {
+ #[must_use]
+ fn new(name: &str, group: &str, desc: &str, module: &str, declaration_range: Range<usize>) -> Self {
+ Self {
+ name: name.to_lowercase(),
+ group: group.into(),
+ desc: remove_line_splices(desc),
+ module: module.into(),
+ declaration_range,
+ }
+ }
+
+ /// Returns all non-deprecated lints and non-internal lints
+ #[must_use]
+ fn usable_lints(lints: &[Self]) -> Vec<Self> {
+ lints
+ .iter()
+ .filter(|l| !l.group.starts_with("internal"))
+ .cloned()
+ .collect()
+ }
+
+ /// Returns all internal lints (not `internal_warn` lints)
+ #[must_use]
+ fn internal_lints(lints: &[Self]) -> Vec<Self> {
+ lints.iter().filter(|l| l.group == "internal").cloned().collect()
+ }
+
+ /// Returns the lints in a `HashMap`, grouped by the different lint groups
+ #[must_use]
+ fn by_lint_group(lints: impl Iterator<Item = Self>) -> HashMap<String, Vec<Self>> {
+ lints.map(|lint| (lint.group.to_string(), lint)).into_group_map()
+ }
+}
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+struct DeprecatedLint {
+ name: String,
+ reason: String,
+ declaration_range: Range<usize>,
+}
+impl DeprecatedLint {
+ fn new(name: &str, reason: &str, declaration_range: Range<usize>) -> Self {
+ Self {
+ name: name.to_lowercase(),
+ reason: remove_line_splices(reason),
+ declaration_range,
+ }
+ }
+}
+
+struct RenamedLint {
+ old_name: String,
+ new_name: String,
+}
+impl RenamedLint {
+ fn new(old_name: &str, new_name: &str) -> Self {
+ Self {
+ old_name: remove_line_splices(old_name),
+ new_name: remove_line_splices(new_name),
+ }
+ }
+}
+
+/// Generates the code for registering a group
+fn gen_lint_group_list<'a>(group_name: &str, lints: impl Iterator<Item = &'a Lint>) -> String {
+ let mut details: Vec<_> = lints.map(|l| (&l.module, l.name.to_uppercase())).collect();
+ details.sort_unstable();
+
+ let mut output = GENERATED_FILE_COMMENT.to_string();
+
+ let _ = writeln!(
+ output,
+ "store.register_group(true, \"clippy::{0}\", Some(\"clippy_{0}\"), vec![",
+ group_name
+ );
+ for (module, name) in details {
+ let _ = writeln!(output, " LintId::of({}::{}),", module, name);
+ }
+ output.push_str("])\n");
+
+ output
+}
+
+/// Generates the `register_removed` code
+#[must_use]
+fn gen_deprecated(lints: &[DeprecatedLint]) -> String {
+ let mut output = GENERATED_FILE_COMMENT.to_string();
+ output.push_str("{\n");
+ for lint in lints {
+ let _ = write!(
+ output,
+ concat!(
+ " store.register_removed(\n",
+ " \"clippy::{}\",\n",
+ " \"{}\",\n",
+ " );\n"
+ ),
+ lint.name, lint.reason,
+ );
+ }
+ output.push_str("}\n");
+
+ output
+}
+
+/// Generates the code for registering lints
+#[must_use]
+fn gen_register_lint_list<'a>(
+ internal_lints: impl Iterator<Item = &'a Lint>,
+ usable_lints: impl Iterator<Item = &'a Lint>,
+) -> String {
+ let mut details: Vec<_> = internal_lints
+ .map(|l| (false, &l.module, l.name.to_uppercase()))
+ .chain(usable_lints.map(|l| (true, &l.module, l.name.to_uppercase())))
+ .collect();
+ details.sort_unstable();
+
+ let mut output = GENERATED_FILE_COMMENT.to_string();
+ output.push_str("store.register_lints(&[\n");
+
+ for (is_public, module_name, lint_name) in details {
+ if !is_public {
+ output.push_str(" #[cfg(feature = \"internal\")]\n");
+ }
+ let _ = writeln!(output, " {}::{},", module_name, lint_name);
+ }
+ output.push_str("])\n");
+
+ output
+}
+
+fn gen_deprecated_lints_test(lints: &[DeprecatedLint]) -> String {
+ let mut res: String = GENERATED_FILE_COMMENT.into();
+ for lint in lints {
+ writeln!(res, "#![warn(clippy::{})]", lint.name).unwrap();
+ }
+ res.push_str("\nfn main() {}\n");
+ res
+}
+
+fn gen_renamed_lints_test(lints: &[RenamedLint]) -> String {
+ let mut seen_lints = HashSet::new();
+ let mut res: String = GENERATED_FILE_COMMENT.into();
+ res.push_str("// run-rustfix\n\n");
+ for lint in lints {
+ if seen_lints.insert(&lint.new_name) {
+ writeln!(res, "#![allow({})]", lint.new_name).unwrap();
+ }
+ }
+ seen_lints.clear();
+ for lint in lints {
+ if seen_lints.insert(&lint.old_name) {
+ writeln!(res, "#![warn({})]", lint.old_name).unwrap();
+ }
+ }
+ res.push_str("\nfn main() {}\n");
+ res
+}
+
+fn gen_renamed_lints_list(lints: &[RenamedLint]) -> String {
+ const HEADER: &str = "\
+ // This file is managed by `cargo dev rename_lint`. Prefer using that when possible.\n\n\
+ #[rustfmt::skip]\n\
+ pub static RENAMED_LINTS: &[(&str, &str)] = &[\n";
+
+ let mut res = String::from(HEADER);
+ for lint in lints {
+ writeln!(res, " (\"{}\", \"{}\"),", lint.old_name, lint.new_name).unwrap();
+ }
+ res.push_str("];\n");
+ res
+}
+
+/// Gathers all lints defined in `clippy_lints/src`
+fn gather_all() -> (Vec<Lint>, Vec<DeprecatedLint>, Vec<RenamedLint>) {
+ let mut lints = Vec::with_capacity(1000);
+ let mut deprecated_lints = Vec::with_capacity(50);
+ let mut renamed_lints = Vec::with_capacity(50);
+
+ for (rel_path, file) in clippy_lints_src_files() {
+ let path = file.path();
+ let contents =
+ fs::read_to_string(path).unwrap_or_else(|e| panic!("Cannot read from `{}`: {}", path.display(), e));
+ let module = rel_path
+ .components()
+ .map(|c| c.as_os_str().to_str().unwrap())
+ .collect::<Vec<_>>()
+ .join("::");
+
+ // If the lints are stored in mod.rs, we get the module name from
+ // the containing directory:
+ let module = if let Some(module) = module.strip_suffix("::mod.rs") {
+ module
+ } else {
+ module.strip_suffix(".rs").unwrap_or(&module)
+ };
+
+ match module {
+ "deprecated_lints" => parse_deprecated_contents(&contents, &mut deprecated_lints),
+ "renamed_lints" => parse_renamed_contents(&contents, &mut renamed_lints),
+ _ => parse_contents(&contents, module, &mut lints),
+ }
+ }
+ (lints, deprecated_lints, renamed_lints)
+}
+
+fn clippy_lints_src_files() -> impl Iterator<Item = (PathBuf, DirEntry)> {
+ let root_path = clippy_project_root().join("clippy_lints/src");
+ let iter = WalkDir::new(&root_path).into_iter();
+ iter.map(Result::unwrap)
+ .filter(|f| f.path().extension() == Some(OsStr::new("rs")))
+ .map(move |f| (f.path().strip_prefix(&root_path).unwrap().to_path_buf(), f))
+}
+
+macro_rules! match_tokens {
+ ($iter:ident, $($token:ident $({$($fields:tt)*})? $(($capture:ident))?)*) => {
+ {
+ $($(let $capture =)? if let Some(LintDeclSearchResult {
+ token_kind: TokenKind::$token $({$($fields)*})?,
+ content: _x,
+ ..
+ }) = $iter.next() {
+ _x
+ } else {
+ continue;
+ };)*
+ #[allow(clippy::unused_unit)]
+ { ($($($capture,)?)*) }
+ }
+ }
+}
+
+pub(crate) use match_tokens;
+
+pub(crate) struct LintDeclSearchResult<'a> {
+ pub token_kind: TokenKind,
+ pub content: &'a str,
+ pub range: Range<usize>,
+}
+
+/// Parse a source file looking for `declare_clippy_lint` macro invocations.
+fn parse_contents(contents: &str, module: &str, lints: &mut Vec<Lint>) {
+ let mut offset = 0usize;
+ let mut iter = tokenize(contents).map(|t| {
+ let range = offset..offset + t.len;
+ offset = range.end;
+
+ LintDeclSearchResult {
+ token_kind: t.kind,
+ content: &contents[range.clone()],
+ range,
+ }
+ });
+
+ while let Some(LintDeclSearchResult { range, .. }) = iter.find(
+ |LintDeclSearchResult {
+ token_kind, content, ..
+ }| token_kind == &TokenKind::Ident && *content == "declare_clippy_lint",
+ ) {
+ let start = range.start;
+
+ let mut iter = iter
+ .by_ref()
+ .filter(|t| !matches!(t.token_kind, TokenKind::Whitespace | TokenKind::LineComment { .. }));
+ // matches `!{`
+ match_tokens!(iter, Bang OpenBrace);
+ match iter.next() {
+ // #[clippy::version = "version"] pub
+ Some(LintDeclSearchResult {
+ token_kind: TokenKind::Pound,
+ ..
+ }) => {
+ match_tokens!(iter, OpenBracket Ident Colon Colon Ident Eq Literal{..} CloseBracket Ident);
+ },
+ // pub
+ Some(LintDeclSearchResult {
+ token_kind: TokenKind::Ident,
+ ..
+ }) => (),
+ _ => continue,
+ }
+
+ let (name, group, desc) = match_tokens!(
+ iter,
+ // LINT_NAME
+ Ident(name) Comma
+ // group,
+ Ident(group) Comma
+ // "description"
+ Literal{..}(desc)
+ );
+
+ if let Some(LintDeclSearchResult {
+ token_kind: TokenKind::CloseBrace,
+ range,
+ ..
+ }) = iter.next()
+ {
+ lints.push(Lint::new(name, group, desc, module, start..range.end));
+ }
+ }
+}
+
+/// Parse a source file looking for `declare_deprecated_lint` macro invocations.
+fn parse_deprecated_contents(contents: &str, lints: &mut Vec<DeprecatedLint>) {
+ let mut offset = 0usize;
+ let mut iter = tokenize(contents).map(|t| {
+ let range = offset..offset + t.len;
+ offset = range.end;
+
+ LintDeclSearchResult {
+ token_kind: t.kind,
+ content: &contents[range.clone()],
+ range,
+ }
+ });
+
+ while let Some(LintDeclSearchResult { range, .. }) = iter.find(
+ |LintDeclSearchResult {
+ token_kind, content, ..
+ }| token_kind == &TokenKind::Ident && *content == "declare_deprecated_lint",
+ ) {
+ let start = range.start;
+
+ let mut iter = iter.by_ref().filter(|LintDeclSearchResult { ref token_kind, .. }| {
+ !matches!(token_kind, TokenKind::Whitespace | TokenKind::LineComment { .. })
+ });
+ let (name, reason) = match_tokens!(
+ iter,
+ // !{
+ Bang OpenBrace
+ // #[clippy::version = "version"]
+ Pound OpenBracket Ident Colon Colon Ident Eq Literal{..} CloseBracket
+ // pub LINT_NAME,
+ Ident Ident(name) Comma
+ // "description"
+ Literal{kind: LiteralKind::Str{..},..}(reason)
+ );
+
+ if let Some(LintDeclSearchResult {
+ token_kind: TokenKind::CloseBrace,
+ range,
+ ..
+ }) = iter.next()
+ {
+ lints.push(DeprecatedLint::new(name, reason, start..range.end));
+ }
+ }
+}
+
+fn parse_renamed_contents(contents: &str, lints: &mut Vec<RenamedLint>) {
+ for line in contents.lines() {
+ let mut offset = 0usize;
+ let mut iter = tokenize(line).map(|t| {
+ let range = offset..offset + t.len;
+ offset = range.end;
+
+ LintDeclSearchResult {
+ token_kind: t.kind,
+ content: &line[range.clone()],
+ range,
+ }
+ });
+
+ let (old_name, new_name) = match_tokens!(
+ iter,
+ // ("old_name",
+ Whitespace OpenParen Literal{kind: LiteralKind::Str{..},..}(old_name) Comma
+ // "new_name"),
+ Whitespace Literal{kind: LiteralKind::Str{..},..}(new_name) CloseParen Comma
+ );
+ lints.push(RenamedLint::new(old_name, new_name));
+ }
+}
+
+/// Removes the line splices and surrounding quotes from a string literal
+fn remove_line_splices(s: &str) -> String {
+ let s = s
+ .strip_prefix('r')
+ .unwrap_or(s)
+ .trim_matches('#')
+ .strip_prefix('"')
+ .and_then(|s| s.strip_suffix('"'))
+ .unwrap_or_else(|| panic!("expected quoted string, found `{}`", s));
+ let mut res = String::with_capacity(s.len());
+ unescape::unescape_literal(s, unescape::Mode::Str, &mut |range, _| res.push_str(&s[range]));
+ res
+}
+
+/// Replaces a region in a file delimited by two lines matching regexes.
+///
+/// `path` is the relative path to the file on which you want to perform the replacement.
+///
+/// See `replace_region_in_text` for documentation of the other options.
+///
+/// # Panics
+///
+/// Panics if the path could not read or then written
+fn replace_region_in_file(
+ update_mode: UpdateMode,
+ path: &Path,
+ start: &str,
+ end: &str,
+ write_replacement: impl FnMut(&mut String),
+) {
+ let contents = fs::read_to_string(path).unwrap_or_else(|e| panic!("Cannot read from `{}`: {}", path.display(), e));
+ let new_contents = match replace_region_in_text(&contents, start, end, write_replacement) {
+ Ok(x) => x,
+ Err(delim) => panic!("Couldn't find `{}` in file `{}`", delim, path.display()),
+ };
+
+ match update_mode {
+ UpdateMode::Check if contents != new_contents => exit_with_failure(),
+ UpdateMode::Check => (),
+ UpdateMode::Change => {
+ if let Err(e) = fs::write(path, new_contents.as_bytes()) {
+ panic!("Cannot write to `{}`: {}", path.display(), e);
+ }
+ },
+ }
+}
+
+/// Replaces a region in a text delimited by two strings. Returns the new text if both delimiters
+/// were found, or the missing delimiter if not.
+fn replace_region_in_text<'a>(
+ text: &str,
+ start: &'a str,
+ end: &'a str,
+ mut write_replacement: impl FnMut(&mut String),
+) -> Result<String, &'a str> {
+ let (text_start, rest) = text.split_once(start).ok_or(start)?;
+ let (_, text_end) = rest.split_once(end).ok_or(end)?;
+
+ let mut res = String::with_capacity(text.len() + 4096);
+ res.push_str(text_start);
+ res.push_str(start);
+ write_replacement(&mut res);
+ res.push_str(end);
+ res.push_str(text_end);
+
+ Ok(res)
+}
+
+fn try_rename_file(old_name: &Path, new_name: &Path) -> bool {
+ match fs::OpenOptions::new().create_new(true).write(true).open(new_name) {
+ Ok(file) => drop(file),
+ Err(e) if matches!(e.kind(), io::ErrorKind::AlreadyExists | io::ErrorKind::NotFound) => return false,
+ Err(e) => panic_file(e, new_name, "create"),
+ };
+ match fs::rename(old_name, new_name) {
+ Ok(()) => true,
+ Err(e) => {
+ drop(fs::remove_file(new_name));
+ if e.kind() == io::ErrorKind::NotFound {
+ false
+ } else {
+ panic_file(e, old_name, "rename");
+ }
+ },
+ }
+}
+
+#[allow(clippy::needless_pass_by_value)]
+fn panic_file(error: io::Error, name: &Path, action: &str) -> ! {
+ panic!("failed to {} file `{}`: {}", action, name.display(), error)
+}
+
+fn rewrite_file(path: &Path, f: impl FnOnce(&str) -> Option<String>) {
+ let mut file = fs::OpenOptions::new()
+ .write(true)
+ .read(true)
+ .open(path)
+ .unwrap_or_else(|e| panic_file(e, path, "open"));
+ let mut buf = String::new();
+ file.read_to_string(&mut buf)
+ .unwrap_or_else(|e| panic_file(e, path, "read"));
+ if let Some(new_contents) = f(&buf) {
+ file.rewind().unwrap_or_else(|e| panic_file(e, path, "write"));
+ file.write_all(new_contents.as_bytes())
+ .unwrap_or_else(|e| panic_file(e, path, "write"));
+ file.set_len(new_contents.len() as u64)
+ .unwrap_or_else(|e| panic_file(e, path, "write"));
+ }
+}
+
+fn write_file(path: &Path, contents: &str) {
+ fs::write(path, contents).unwrap_or_else(|e| panic_file(e, path, "write"));
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_parse_contents() {
+ static CONTENTS: &str = r#"
+ declare_clippy_lint! {
+ #[clippy::version = "Hello Clippy!"]
+ pub PTR_ARG,
+ style,
+ "really long \
+ text"
+ }
+
+ declare_clippy_lint!{
+ #[clippy::version = "Test version"]
+ pub DOC_MARKDOWN,
+ pedantic,
+ "single line"
+ }
+ "#;
+ let mut result = Vec::new();
+ parse_contents(CONTENTS, "module_name", &mut result);
+ for r in &mut result {
+ r.declaration_range = Range::default();
+ }
+
+ let expected = vec![
+ Lint::new(
+ "ptr_arg",
+ "style",
+ "\"really long text\"",
+ "module_name",
+ Range::default(),
+ ),
+ Lint::new(
+ "doc_markdown",
+ "pedantic",
+ "\"single line\"",
+ "module_name",
+ Range::default(),
+ ),
+ ];
+ assert_eq!(expected, result);
+ }
+
+ #[test]
+ fn test_parse_deprecated_contents() {
+ static DEPRECATED_CONTENTS: &str = r#"
+ /// some doc comment
+ declare_deprecated_lint! {
+ #[clippy::version = "I'm a version"]
+ pub SHOULD_ASSERT_EQ,
+ "`assert!()` will be more flexible with RFC 2011"
+ }
+ "#;
+
+ let mut result = Vec::new();
+ parse_deprecated_contents(DEPRECATED_CONTENTS, &mut result);
+ for r in &mut result {
+ r.declaration_range = Range::default();
+ }
+
+ let expected = vec![DeprecatedLint::new(
+ "should_assert_eq",
+ "\"`assert!()` will be more flexible with RFC 2011\"",
+ Range::default(),
+ )];
+ assert_eq!(expected, result);
+ }
+
+ #[test]
+ fn test_usable_lints() {
+ let lints = vec![
+ Lint::new(
+ "should_assert_eq2",
+ "Not Deprecated",
+ "\"abc\"",
+ "module_name",
+ Range::default(),
+ ),
+ Lint::new(
+ "should_assert_eq2",
+ "internal",
+ "\"abc\"",
+ "module_name",
+ Range::default(),
+ ),
+ Lint::new(
+ "should_assert_eq2",
+ "internal_style",
+ "\"abc\"",
+ "module_name",
+ Range::default(),
+ ),
+ ];
+ let expected = vec![Lint::new(
+ "should_assert_eq2",
+ "Not Deprecated",
+ "\"abc\"",
+ "module_name",
+ Range::default(),
+ )];
+ assert_eq!(expected, Lint::usable_lints(&lints));
+ }
+
+ #[test]
+ fn test_by_lint_group() {
+ let lints = vec![
+ Lint::new("should_assert_eq", "group1", "\"abc\"", "module_name", Range::default()),
+ Lint::new(
+ "should_assert_eq2",
+ "group2",
+ "\"abc\"",
+ "module_name",
+ Range::default(),
+ ),
+ Lint::new("incorrect_match", "group1", "\"abc\"", "module_name", Range::default()),
+ ];
+ let mut expected: HashMap<String, Vec<Lint>> = HashMap::new();
+ expected.insert(
+ "group1".to_string(),
+ vec![
+ Lint::new("should_assert_eq", "group1", "\"abc\"", "module_name", Range::default()),
+ Lint::new("incorrect_match", "group1", "\"abc\"", "module_name", Range::default()),
+ ],
+ );
+ expected.insert(
+ "group2".to_string(),
+ vec![Lint::new(
+ "should_assert_eq2",
+ "group2",
+ "\"abc\"",
+ "module_name",
+ Range::default(),
+ )],
+ );
+ assert_eq!(expected, Lint::by_lint_group(lints.into_iter()));
+ }
+
+ #[test]
+ fn test_gen_deprecated() {
+ let lints = vec![
+ DeprecatedLint::new(
+ "should_assert_eq",
+ "\"has been superseded by should_assert_eq2\"",
+ Range::default(),
+ ),
+ DeprecatedLint::new("another_deprecated", "\"will be removed\"", Range::default()),
+ ];
+
+ let expected = GENERATED_FILE_COMMENT.to_string()
+ + &[
+ "{",
+ " store.register_removed(",
+ " \"clippy::should_assert_eq\",",
+ " \"has been superseded by should_assert_eq2\",",
+ " );",
+ " store.register_removed(",
+ " \"clippy::another_deprecated\",",
+ " \"will be removed\",",
+ " );",
+ "}",
+ ]
+ .join("\n")
+ + "\n";
+
+ assert_eq!(expected, gen_deprecated(&lints));
+ }
+
+ #[test]
+ fn test_gen_lint_group_list() {
+ let lints = vec![
+ Lint::new("abc", "group1", "\"abc\"", "module_name", Range::default()),
+ Lint::new("should_assert_eq", "group1", "\"abc\"", "module_name", Range::default()),
+ Lint::new("internal", "internal_style", "\"abc\"", "module_name", Range::default()),
+ ];
+ let expected = GENERATED_FILE_COMMENT.to_string()
+ + &[
+ "store.register_group(true, \"clippy::group1\", Some(\"clippy_group1\"), vec![",
+ " LintId::of(module_name::ABC),",
+ " LintId::of(module_name::INTERNAL),",
+ " LintId::of(module_name::SHOULD_ASSERT_EQ),",
+ "])",
+ ]
+ .join("\n")
+ + "\n";
+
+ let result = gen_lint_group_list("group1", lints.iter());
+
+ assert_eq!(expected, result);
+ }
+}
diff --git a/src/tools/clippy/clippy_dummy/Cargo.toml b/src/tools/clippy/clippy_dummy/Cargo.toml
new file mode 100644
index 000000000..c206a1eb0
--- /dev/null
+++ b/src/tools/clippy/clippy_dummy/Cargo.toml
@@ -0,0 +1,16 @@
+[package]
+name = "clippy_dummy" # rename to clippy before publishing
+version = "0.0.303"
+edition = "2018"
+readme = "crates-readme.md"
+description = "A bunch of helpful lints to avoid common pitfalls in Rust."
+build = 'build.rs'
+
+repository = "https://github.com/rust-lang/rust-clippy"
+
+license = "MIT OR Apache-2.0"
+keywords = ["clippy", "lint", "plugin"]
+categories = ["development-tools", "development-tools::cargo-plugins"]
+
+[build-dependencies]
+term = "0.7"
diff --git a/src/tools/clippy/clippy_dummy/PUBLISH.md b/src/tools/clippy/clippy_dummy/PUBLISH.md
new file mode 100644
index 000000000..8e420ec95
--- /dev/null
+++ b/src/tools/clippy/clippy_dummy/PUBLISH.md
@@ -0,0 +1,6 @@
+This is a dummy crate to publish to crates.io. It primarily exists to ensure
+that folks trying to install clippy from crates.io get redirected to the
+`rustup` technique.
+
+Before publishing, be sure to rename `clippy_dummy` to `clippy` in `Cargo.toml`,
+it has a different name to avoid workspace issues.
diff --git a/src/tools/clippy/clippy_dummy/build.rs b/src/tools/clippy/clippy_dummy/build.rs
new file mode 100644
index 000000000..21af4f824
--- /dev/null
+++ b/src/tools/clippy/clippy_dummy/build.rs
@@ -0,0 +1,42 @@
+use term::color::{GREEN, RED, WHITE};
+use term::{Attr, Error, Result};
+
+fn main() {
+ if foo().is_err() {
+ eprintln!(
+ "error: Clippy is no longer available via crates.io\n\n\
+ help: please run `rustup component add clippy` instead"
+ );
+ }
+ std::process::exit(1);
+}
+
+fn foo() -> Result<()> {
+ let mut t = term::stderr().ok_or(Error::NotSupported)?;
+
+ t.attr(Attr::Bold)?;
+ t.fg(RED)?;
+ write!(t, "\nerror: ")?;
+
+ t.reset()?;
+ t.fg(WHITE)?;
+ writeln!(t, "Clippy is no longer available via crates.io\n")?;
+
+ t.attr(Attr::Bold)?;
+ t.fg(GREEN)?;
+ write!(t, "help: ")?;
+
+ t.reset()?;
+ t.fg(WHITE)?;
+ write!(t, "please run `")?;
+
+ t.attr(Attr::Bold)?;
+ write!(t, "rustup component add clippy")?;
+
+ t.reset()?;
+ t.fg(WHITE)?;
+ writeln!(t, "` instead")?;
+
+ t.reset()?;
+ Ok(())
+}
diff --git a/src/tools/clippy/clippy_dummy/crates-readme.md b/src/tools/clippy/clippy_dummy/crates-readme.md
new file mode 100644
index 000000000..0decae8b9
--- /dev/null
+++ b/src/tools/clippy/clippy_dummy/crates-readme.md
@@ -0,0 +1,9 @@
+Installing clippy via crates.io is deprecated. Please use the following:
+
+```terminal
+rustup component add clippy
+```
+
+on a Rust version 1.29 or later. You may need to run `rustup self update` if it complains about a missing clippy binary.
+
+See [the homepage](https://github.com/rust-lang/rust-clippy/#clippy) for more information
diff --git a/src/tools/clippy/clippy_dummy/src/main.rs b/src/tools/clippy/clippy_dummy/src/main.rs
new file mode 100644
index 000000000..a118834f1
--- /dev/null
+++ b/src/tools/clippy/clippy_dummy/src/main.rs
@@ -0,0 +1,3 @@
+fn main() {
+ panic!("This shouldn't even compile")
+}
diff --git a/src/tools/clippy/clippy_lints/Cargo.toml b/src/tools/clippy/clippy_lints/Cargo.toml
new file mode 100644
index 000000000..79a56dc40
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/Cargo.toml
@@ -0,0 +1,38 @@
+[package]
+name = "clippy_lints"
+version = "0.1.64"
+description = "A bunch of helpful lints to avoid common pitfalls in Rust"
+repository = "https://github.com/rust-lang/rust-clippy"
+readme = "README.md"
+license = "MIT OR Apache-2.0"
+keywords = ["clippy", "lint", "plugin"]
+edition = "2021"
+
+[dependencies]
+cargo_metadata = "0.14"
+clippy_utils = { path = "../clippy_utils" }
+if_chain = "1.0"
+itertools = "0.10.1"
+pulldown-cmark = { version = "0.9", default-features = false }
+quine-mc_cluskey = "0.2"
+regex-syntax = "0.6"
+serde = { version = "1.0", features = ["derive"] }
+serde_json = { version = "1.0", optional = true }
+tempfile = { version = "3.2", optional = true }
+toml = "0.5"
+unicode-normalization = "0.1"
+unicode-script = { version = "0.5", default-features = false }
+semver = "1.0"
+rustc-semver = "1.1"
+# NOTE: cargo requires serde feat in its url dep
+# see <https://github.com/rust-lang/rust/pull/63587#issuecomment-522343864>
+url = { version = "2.2", features = ["serde"] }
+
+[features]
+deny-warnings = ["clippy_utils/deny-warnings"]
+# build clippy with internal lints enabled, off by default
+internal = ["clippy_utils/internal", "serde_json", "tempfile"]
+
+[package.metadata.rust-analyzer]
+# This crate uses #[feature(rustc_private)]
+rustc_private = true
diff --git a/src/tools/clippy/clippy_lints/README.md b/src/tools/clippy/clippy_lints/README.md
new file mode 100644
index 000000000..513583b7e
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/README.md
@@ -0,0 +1 @@
+This crate contains Clippy lints. For the main crate, check [GitHub](https://github.com/rust-lang/rust-clippy).
diff --git a/src/tools/clippy/clippy_lints/src/almost_complete_letter_range.rs b/src/tools/clippy/clippy_lints/src/almost_complete_letter_range.rs
new file mode 100644
index 000000000..59a7c5354
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/almost_complete_letter_range.rs
@@ -0,0 +1,100 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::{trim_span, walk_span_to_context};
+use clippy_utils::{meets_msrv, msrvs};
+use rustc_ast::ast::{Expr, ExprKind, LitKind, Pat, PatKind, RangeEnd, RangeLimits};
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for ranges which almost include the entire range of letters from 'a' to 'z', but
+ /// don't because they're a half open range.
+ ///
+ /// ### Why is this bad?
+ /// This (`'a'..'z'`) is almost certainly a typo meant to include all letters.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let _ = 'a'..'z';
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let _ = 'a'..='z';
+ /// ```
+ #[clippy::version = "1.63.0"]
+ pub ALMOST_COMPLETE_LETTER_RANGE,
+ suspicious,
+ "almost complete letter range"
+}
+impl_lint_pass!(AlmostCompleteLetterRange => [ALMOST_COMPLETE_LETTER_RANGE]);
+
+pub struct AlmostCompleteLetterRange {
+ msrv: Option<RustcVersion>,
+}
+impl AlmostCompleteLetterRange {
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self { msrv }
+ }
+}
+impl EarlyLintPass for AlmostCompleteLetterRange {
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &Expr) {
+ if let ExprKind::Range(Some(start), Some(end), RangeLimits::HalfOpen) = &e.kind {
+ let ctxt = e.span.ctxt();
+ let sugg = if let Some(start) = walk_span_to_context(start.span, ctxt)
+ && let Some(end) = walk_span_to_context(end.span, ctxt)
+ && meets_msrv(self.msrv, msrvs::RANGE_INCLUSIVE)
+ {
+ Some((trim_span(cx.sess().source_map(), start.between(end)), "..="))
+ } else {
+ None
+ };
+ check_range(cx, e.span, start, end, sugg);
+ }
+ }
+
+ fn check_pat(&mut self, cx: &EarlyContext<'_>, p: &Pat) {
+ if let PatKind::Range(Some(start), Some(end), kind) = &p.kind
+ && matches!(kind.node, RangeEnd::Excluded)
+ {
+ let sugg = if meets_msrv(self.msrv, msrvs::RANGE_INCLUSIVE) {
+ "..="
+ } else {
+ "..."
+ };
+ check_range(cx, p.span, start, end, Some((kind.span, sugg)));
+ }
+ }
+
+ extract_msrv_attr!(EarlyContext);
+}
+
+fn check_range(cx: &EarlyContext<'_>, span: Span, start: &Expr, end: &Expr, sugg: Option<(Span, &str)>) {
+ if let ExprKind::Lit(start_lit) = &start.peel_parens().kind
+ && let ExprKind::Lit(end_lit) = &end.peel_parens().kind
+ && matches!(
+ (&start_lit.kind, &end_lit.kind),
+ (LitKind::Byte(b'a') | LitKind::Char('a'), LitKind::Byte(b'z') | LitKind::Char('z'))
+ | (LitKind::Byte(b'A') | LitKind::Char('A'), LitKind::Byte(b'Z') | LitKind::Char('Z'))
+ )
+ {
+ span_lint_and_then(
+ cx,
+ ALMOST_COMPLETE_LETTER_RANGE,
+ span,
+ "almost complete ascii letter range",
+ |diag| {
+ if let Some((span, sugg)) = sugg {
+ diag.span_suggestion(
+ span,
+ "use an inclusive range",
+ sugg,
+ Applicability::MaybeIncorrect,
+ );
+ }
+ }
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/approx_const.rs b/src/tools/clippy/clippy_lints/src/approx_const.rs
new file mode 100644
index 000000000..159f3b0cd
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/approx_const.rs
@@ -0,0 +1,132 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::{meets_msrv, msrvs};
+use rustc_ast::ast::{FloatTy, LitFloatType, LitKind};
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::symbol;
+use std::f64::consts as f64;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for floating point literals that approximate
+ /// constants which are defined in
+ /// [`std::f32::consts`](https://doc.rust-lang.org/stable/std/f32/consts/#constants)
+ /// or
+ /// [`std::f64::consts`](https://doc.rust-lang.org/stable/std/f64/consts/#constants),
+ /// respectively, suggesting to use the predefined constant.
+ ///
+ /// ### Why is this bad?
+ /// Usually, the definition in the standard library is more
+ /// precise than what people come up with. If you find that your definition is
+ /// actually more precise, please [file a Rust
+ /// issue](https://github.com/rust-lang/rust/issues).
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = 3.14;
+ /// let y = 1_f64 / x;
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let x = std::f32::consts::PI;
+ /// let y = std::f64::consts::FRAC_1_PI;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub APPROX_CONSTANT,
+ correctness,
+ "the approximate of a known float constant (in `std::fXX::consts`)"
+}
+
+// Tuples are of the form (constant, name, min_digits, msrv)
+const KNOWN_CONSTS: [(f64, &str, usize, Option<RustcVersion>); 19] = [
+ (f64::E, "E", 4, None),
+ (f64::FRAC_1_PI, "FRAC_1_PI", 4, None),
+ (f64::FRAC_1_SQRT_2, "FRAC_1_SQRT_2", 5, None),
+ (f64::FRAC_2_PI, "FRAC_2_PI", 5, None),
+ (f64::FRAC_2_SQRT_PI, "FRAC_2_SQRT_PI", 5, None),
+ (f64::FRAC_PI_2, "FRAC_PI_2", 5, None),
+ (f64::FRAC_PI_3, "FRAC_PI_3", 5, None),
+ (f64::FRAC_PI_4, "FRAC_PI_4", 5, None),
+ (f64::FRAC_PI_6, "FRAC_PI_6", 5, None),
+ (f64::FRAC_PI_8, "FRAC_PI_8", 5, None),
+ (f64::LN_2, "LN_2", 5, None),
+ (f64::LN_10, "LN_10", 5, None),
+ (f64::LOG2_10, "LOG2_10", 5, Some(msrvs::LOG2_10)),
+ (f64::LOG2_E, "LOG2_E", 5, None),
+ (f64::LOG10_2, "LOG10_2", 5, Some(msrvs::LOG10_2)),
+ (f64::LOG10_E, "LOG10_E", 5, None),
+ (f64::PI, "PI", 3, None),
+ (f64::SQRT_2, "SQRT_2", 5, None),
+ (f64::TAU, "TAU", 3, Some(msrvs::TAU)),
+];
+
+pub struct ApproxConstant {
+ msrv: Option<RustcVersion>,
+}
+
+impl ApproxConstant {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self { msrv }
+ }
+
+ fn check_lit(&self, cx: &LateContext<'_>, lit: &LitKind, e: &Expr<'_>) {
+ match *lit {
+ LitKind::Float(s, LitFloatType::Suffixed(fty)) => match fty {
+ FloatTy::F32 => self.check_known_consts(cx, e, s, "f32"),
+ FloatTy::F64 => self.check_known_consts(cx, e, s, "f64"),
+ },
+ LitKind::Float(s, LitFloatType::Unsuffixed) => self.check_known_consts(cx, e, s, "f{32, 64}"),
+ _ => (),
+ }
+ }
+
+ fn check_known_consts(&self, cx: &LateContext<'_>, e: &Expr<'_>, s: symbol::Symbol, module: &str) {
+ let s = s.as_str();
+ if s.parse::<f64>().is_ok() {
+ for &(constant, name, min_digits, msrv) in &KNOWN_CONSTS {
+ if is_approx_const(constant, s, min_digits) && msrv.map_or(true, |msrv| meets_msrv(self.msrv, msrv)) {
+ span_lint_and_help(
+ cx,
+ APPROX_CONSTANT,
+ e.span,
+ &format!("approximate value of `{}::consts::{}` found", module, &name),
+ None,
+ "consider using the constant directly",
+ );
+ return;
+ }
+ }
+ }
+ }
+}
+
+impl_lint_pass!(ApproxConstant => [APPROX_CONSTANT]);
+
+impl<'tcx> LateLintPass<'tcx> for ApproxConstant {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ if let ExprKind::Lit(lit) = &e.kind {
+ self.check_lit(cx, &lit.node, e);
+ }
+ }
+
+ extract_msrv_attr!(LateContext);
+}
+
+/// Returns `false` if the number of significant figures in `value` are
+/// less than `min_digits`; otherwise, returns true if `value` is equal
+/// to `constant`, rounded to the number of digits present in `value`.
+#[must_use]
+fn is_approx_const(constant: f64, value: &str, min_digits: usize) -> bool {
+ if value.len() <= min_digits {
+ false
+ } else if constant.to_string().starts_with(value) {
+ // The value is a truncated constant
+ true
+ } else {
+ let round_const = format!("{:.*}", value.len() - 2, constant);
+ value == round_const
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/as_conversions.rs b/src/tools/clippy/clippy_lints/src/as_conversions.rs
new file mode 100644
index 000000000..c7a76e5f9
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/as_conversions.rs
@@ -0,0 +1,65 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_ast::ast::{Expr, ExprKind};
+use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `as` conversions.
+ ///
+ /// Note that this lint is specialized in linting *every single* use of `as`
+ /// regardless of whether good alternatives exist or not.
+ /// If you want more precise lints for `as`, please consider using these separate lints:
+ /// `unnecessary_cast`, `cast_lossless/cast_possible_truncation/cast_possible_wrap/cast_precision_loss/cast_sign_loss`,
+ /// `fn_to_numeric_cast(_with_truncation)`, `char_lit_as_u8`, `ref_to_mut` and `ptr_as_ptr`.
+ /// There is a good explanation the reason why this lint should work in this way and how it is useful
+ /// [in this issue](https://github.com/rust-lang/rust-clippy/issues/5122).
+ ///
+ /// ### Why is this bad?
+ /// `as` conversions will perform many kinds of
+ /// conversions, including silently lossy conversions and dangerous coercions.
+ /// There are cases when it makes sense to use `as`, so the lint is
+ /// Allow by default.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// let a: u32;
+ /// ...
+ /// f(a as u16);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// f(a.try_into()?);
+ ///
+ /// // or
+ ///
+ /// f(a.try_into().expect("Unexpected u16 overflow in f"));
+ /// ```
+ #[clippy::version = "1.41.0"]
+ pub AS_CONVERSIONS,
+ restriction,
+ "using a potentially dangerous silent `as` conversion"
+}
+
+declare_lint_pass!(AsConversions => [AS_CONVERSIONS]);
+
+impl EarlyLintPass for AsConversions {
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
+ if in_external_macro(cx.sess(), expr.span) {
+ return;
+ }
+
+ if let ExprKind::Cast(_, _) = expr.kind {
+ span_lint_and_help(
+ cx,
+ AS_CONVERSIONS,
+ expr.span,
+ "using a potentially dangerous silent `as` conversion",
+ None,
+ "consider using a safe wrapper for this conversion",
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/as_underscore.rs b/src/tools/clippy/clippy_lints/src/as_underscore.rs
new file mode 100644
index 000000000..0bdef9d0a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/as_underscore.rs
@@ -0,0 +1,74 @@
+use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_then};
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind, TyKind};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Check for the usage of `as _` conversion using inferred type.
+ ///
+ /// ### Why is this bad?
+ /// The conversion might include lossy conversion and dangerous cast that might go
+ /// undetected du to the type being inferred.
+ ///
+ /// The lint is allowed by default as using `_` is less wordy than always specifying the type.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn foo(n: usize) {}
+ /// let n: u16 = 256;
+ /// foo(n as _);
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// fn foo(n: usize) {}
+ /// let n: u16 = 256;
+ /// foo(n as usize);
+ /// ```
+ #[clippy::version = "1.63.0"]
+ pub AS_UNDERSCORE,
+ restriction,
+ "detects `as _` conversion"
+}
+declare_lint_pass!(AsUnderscore => [AS_UNDERSCORE]);
+
+impl<'tcx> LateLintPass<'tcx> for AsUnderscore {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &Expr<'tcx>) {
+ if in_external_macro(cx.sess(), expr.span) {
+ return;
+ }
+
+ if let ExprKind::Cast(_, ty) = expr.kind && let TyKind::Infer = ty.kind {
+
+ let ty_resolved = cx.typeck_results().expr_ty(expr);
+ if let ty::Error(_) = ty_resolved.kind() {
+ span_lint_and_help(
+ cx,
+ AS_UNDERSCORE,
+ expr.span,
+ "using `as _` conversion",
+ None,
+ "consider giving the type explicitly",
+ );
+ } else {
+ span_lint_and_then(
+ cx,
+ AS_UNDERSCORE,
+ expr.span,
+ "using `as _` conversion",
+ |diag| {
+ diag.span_suggestion(
+ ty.span,
+ "consider giving the type explicitly",
+ ty_resolved,
+ Applicability::MachineApplicable,
+ );
+ }
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/asm_syntax.rs b/src/tools/clippy/clippy_lints/src/asm_syntax.rs
new file mode 100644
index 000000000..f419781db
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/asm_syntax.rs
@@ -0,0 +1,131 @@
+use std::fmt;
+
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_ast::ast::{Expr, ExprKind, InlineAsmOptions};
+use rustc_lint::{EarlyContext, EarlyLintPass, Lint};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+#[derive(Clone, Copy, PartialEq, Eq)]
+enum AsmStyle {
+ Intel,
+ Att,
+}
+
+impl fmt::Display for AsmStyle {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ AsmStyle::Intel => f.write_str("Intel"),
+ AsmStyle::Att => f.write_str("AT&T"),
+ }
+ }
+}
+
+impl std::ops::Not for AsmStyle {
+ type Output = AsmStyle;
+
+ fn not(self) -> AsmStyle {
+ match self {
+ AsmStyle::Intel => AsmStyle::Att,
+ AsmStyle::Att => AsmStyle::Intel,
+ }
+ }
+}
+
+fn check_expr_asm_syntax(lint: &'static Lint, cx: &EarlyContext<'_>, expr: &Expr, check_for: AsmStyle) {
+ if let ExprKind::InlineAsm(ref inline_asm) = expr.kind {
+ let style = if inline_asm.options.contains(InlineAsmOptions::ATT_SYNTAX) {
+ AsmStyle::Att
+ } else {
+ AsmStyle::Intel
+ };
+
+ if style == check_for {
+ span_lint_and_help(
+ cx,
+ lint,
+ expr.span,
+ &format!("{} x86 assembly syntax used", style),
+ None,
+ &format!("use {} x86 assembly syntax", !style),
+ );
+ }
+ }
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of Intel x86 assembly syntax.
+ ///
+ /// ### Why is this bad?
+ /// The lint has been enabled to indicate a preference
+ /// for AT&T x86 assembly syntax.
+ ///
+ /// ### Example
+ ///
+ /// ```rust,no_run
+ /// # #![feature(asm)]
+ /// # unsafe { let ptr = "".as_ptr();
+ /// # use std::arch::asm;
+ /// asm!("lea {}, [{}]", lateout(reg) _, in(reg) ptr);
+ /// # }
+ /// ```
+ /// Use instead:
+ /// ```rust,no_run
+ /// # #![feature(asm)]
+ /// # unsafe { let ptr = "".as_ptr();
+ /// # use std::arch::asm;
+ /// asm!("lea ({}), {}", in(reg) ptr, lateout(reg) _, options(att_syntax));
+ /// # }
+ /// ```
+ #[clippy::version = "1.49.0"]
+ pub INLINE_ASM_X86_INTEL_SYNTAX,
+ restriction,
+ "prefer AT&T x86 assembly syntax"
+}
+
+declare_lint_pass!(InlineAsmX86IntelSyntax => [INLINE_ASM_X86_INTEL_SYNTAX]);
+
+impl EarlyLintPass for InlineAsmX86IntelSyntax {
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
+ check_expr_asm_syntax(Self::get_lints()[0], cx, expr, AsmStyle::Intel);
+ }
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of AT&T x86 assembly syntax.
+ ///
+ /// ### Why is this bad?
+ /// The lint has been enabled to indicate a preference
+ /// for Intel x86 assembly syntax.
+ ///
+ /// ### Example
+ ///
+ /// ```rust,no_run
+ /// # #![feature(asm)]
+ /// # unsafe { let ptr = "".as_ptr();
+ /// # use std::arch::asm;
+ /// asm!("lea ({}), {}", in(reg) ptr, lateout(reg) _, options(att_syntax));
+ /// # }
+ /// ```
+ /// Use instead:
+ /// ```rust,no_run
+ /// # #![feature(asm)]
+ /// # unsafe { let ptr = "".as_ptr();
+ /// # use std::arch::asm;
+ /// asm!("lea {}, [{}]", lateout(reg) _, in(reg) ptr);
+ /// # }
+ /// ```
+ #[clippy::version = "1.49.0"]
+ pub INLINE_ASM_X86_ATT_SYNTAX,
+ restriction,
+ "prefer Intel x86 assembly syntax"
+}
+
+declare_lint_pass!(InlineAsmX86AttSyntax => [INLINE_ASM_X86_ATT_SYNTAX]);
+
+impl EarlyLintPass for InlineAsmX86AttSyntax {
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
+ check_expr_asm_syntax(Self::get_lints()[0], cx, expr, AsmStyle::Att);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/assertions_on_constants.rs b/src/tools/clippy/clippy_lints/src/assertions_on_constants.rs
new file mode 100644
index 000000000..2705ffffd
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/assertions_on_constants.rs
@@ -0,0 +1,69 @@
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::macros::{find_assert_args, root_macro_call_first_node, PanicExpn};
+use rustc_hir::Expr;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `assert!(true)` and `assert!(false)` calls.
+ ///
+ /// ### Why is this bad?
+ /// Will be optimized out by the compiler or should probably be replaced by a
+ /// `panic!()` or `unreachable!()`
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// assert!(false)
+ /// assert!(true)
+ /// const B: bool = false;
+ /// assert!(B)
+ /// ```
+ #[clippy::version = "1.34.0"]
+ pub ASSERTIONS_ON_CONSTANTS,
+ style,
+ "`assert!(true)` / `assert!(false)` will be optimized out by the compiler, and should probably be replaced by a `panic!()` or `unreachable!()`"
+}
+
+declare_lint_pass!(AssertionsOnConstants => [ASSERTIONS_ON_CONSTANTS]);
+
+impl<'tcx> LateLintPass<'tcx> for AssertionsOnConstants {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ let Some(macro_call) = root_macro_call_first_node(cx, e) else { return };
+ let is_debug = match cx.tcx.get_diagnostic_name(macro_call.def_id) {
+ Some(sym::debug_assert_macro) => true,
+ Some(sym::assert_macro) => false,
+ _ => return,
+ };
+ let Some((condition, panic_expn)) = find_assert_args(cx, e, macro_call.expn) else { return };
+ let Some((Constant::Bool(val), _)) = constant(cx, cx.typeck_results(), condition) else { return };
+ if val {
+ span_lint_and_help(
+ cx,
+ ASSERTIONS_ON_CONSTANTS,
+ macro_call.span,
+ &format!(
+ "`{}!(true)` will be optimized out by the compiler",
+ cx.tcx.item_name(macro_call.def_id)
+ ),
+ None,
+ "remove it",
+ );
+ } else if !is_debug {
+ let (assert_arg, panic_arg) = match panic_expn {
+ PanicExpn::Empty => ("", ""),
+ _ => (", ..", ".."),
+ };
+ span_lint_and_help(
+ cx,
+ ASSERTIONS_ON_CONSTANTS,
+ macro_call.span,
+ &format!("`assert!(false{})` should probably be replaced", assert_arg),
+ None,
+ &format!("use `panic!({})` or `unreachable!({0})`", panic_arg),
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/assertions_on_result_states.rs b/src/tools/clippy/clippy_lints/src/assertions_on_result_states.rs
new file mode 100644
index 000000000..4caab6230
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/assertions_on_result_states.rs
@@ -0,0 +1,101 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::macros::{find_assert_args, root_macro_call_first_node, PanicExpn};
+use clippy_utils::path_res;
+use clippy_utils::source::snippet_with_context;
+use clippy_utils::ty::{implements_trait, is_copy, is_type_diagnostic_item};
+use clippy_utils::usage::local_used_after_expr;
+use rustc_errors::Applicability;
+use rustc_hir::def::Res;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::{self, Ty};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `assert!(r.is_ok())` or `assert!(r.is_err())` calls.
+ ///
+ /// ### Why is this bad?
+ /// An assertion failure cannot output an useful message of the error.
+ ///
+ /// ### Known problems
+ /// The suggested replacement decreases the readability of code and log output.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// # let r = Ok::<_, ()>(());
+ /// assert!(r.is_ok());
+ /// # let r = Err::<_, ()>(());
+ /// assert!(r.is_err());
+ /// ```
+ #[clippy::version = "1.64.0"]
+ pub ASSERTIONS_ON_RESULT_STATES,
+ restriction,
+ "`assert!(r.is_ok())`/`assert!(r.is_err())` gives worse error message than directly calling `r.unwrap()`/`r.unwrap_err()`"
+}
+
+declare_lint_pass!(AssertionsOnResultStates => [ASSERTIONS_ON_RESULT_STATES]);
+
+impl<'tcx> LateLintPass<'tcx> for AssertionsOnResultStates {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ if let Some(macro_call) = root_macro_call_first_node(cx, e)
+ && matches!(cx.tcx.get_diagnostic_name(macro_call.def_id), Some(sym::assert_macro))
+ && let Some((condition, panic_expn)) = find_assert_args(cx, e, macro_call.expn)
+ && matches!(panic_expn, PanicExpn::Empty)
+ && let ExprKind::MethodCall(method_segment, [recv], _) = condition.kind
+ && let result_type_with_refs = cx.typeck_results().expr_ty(recv)
+ && let result_type = result_type_with_refs.peel_refs()
+ && is_type_diagnostic_item(cx, result_type, sym::Result)
+ && let ty::Adt(_, substs) = result_type.kind()
+ {
+ if !is_copy(cx, result_type) {
+ if result_type_with_refs != result_type {
+ return;
+ } else if let Res::Local(binding_id) = path_res(cx, recv)
+ && local_used_after_expr(cx, binding_id, recv) {
+ return;
+ }
+ }
+ let mut app = Applicability::MachineApplicable;
+ match method_segment.ident.as_str() {
+ "is_ok" if has_debug_impl(cx, substs.type_at(1)) => {
+ span_lint_and_sugg(
+ cx,
+ ASSERTIONS_ON_RESULT_STATES,
+ macro_call.span,
+ "called `assert!` with `Result::is_ok`",
+ "replace with",
+ format!(
+ "{}.unwrap()",
+ snippet_with_context(cx, recv.span, condition.span.ctxt(), "..", &mut app).0
+ ),
+ app,
+ );
+ }
+ "is_err" if has_debug_impl(cx, substs.type_at(0)) => {
+ span_lint_and_sugg(
+ cx,
+ ASSERTIONS_ON_RESULT_STATES,
+ macro_call.span,
+ "called `assert!` with `Result::is_err`",
+ "replace with",
+ format!(
+ "{}.unwrap_err()",
+ snippet_with_context(cx, recv.span, condition.span.ctxt(), "..", &mut app).0
+ ),
+ app,
+ );
+ }
+ _ => (),
+ };
+ }
+ }
+}
+
+/// This checks whether a given type is known to implement Debug.
+fn has_debug_impl<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
+ cx.tcx
+ .get_diagnostic_item(sym::Debug)
+ .map_or(false, |debug| implements_trait(cx, ty, debug, &[]))
+}
diff --git a/src/tools/clippy/clippy_lints/src/async_yields_async.rs b/src/tools/clippy/clippy_lints/src/async_yields_async.rs
new file mode 100644
index 000000000..27c2896e1
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/async_yields_async.rs
@@ -0,0 +1,89 @@
+use clippy_utils::diagnostics::span_lint_hir_and_then;
+use clippy_utils::source::snippet;
+use clippy_utils::ty::implements_trait;
+use rustc_errors::Applicability;
+use rustc_hir::{AsyncGeneratorKind, Body, BodyId, ExprKind, GeneratorKind, QPath};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for async blocks that yield values of types
+ /// that can themselves be awaited.
+ ///
+ /// ### Why is this bad?
+ /// An await is likely missing.
+ ///
+ /// ### Example
+ /// ```rust
+ /// async fn foo() {}
+ ///
+ /// fn bar() {
+ /// let x = async {
+ /// foo()
+ /// };
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// async fn foo() {}
+ ///
+ /// fn bar() {
+ /// let x = async {
+ /// foo().await
+ /// };
+ /// }
+ /// ```
+ #[clippy::version = "1.48.0"]
+ pub ASYNC_YIELDS_ASYNC,
+ correctness,
+ "async blocks that return a type that can be awaited"
+}
+
+declare_lint_pass!(AsyncYieldsAsync => [ASYNC_YIELDS_ASYNC]);
+
+impl<'tcx> LateLintPass<'tcx> for AsyncYieldsAsync {
+ fn check_body(&mut self, cx: &LateContext<'tcx>, body: &'tcx Body<'_>) {
+ use AsyncGeneratorKind::{Block, Closure};
+ // For functions, with explicitly defined types, don't warn.
+ // XXXkhuey maybe we should?
+ if let Some(GeneratorKind::Async(Block | Closure)) = body.generator_kind {
+ if let Some(future_trait_def_id) = cx.tcx.lang_items().future_trait() {
+ let body_id = BodyId {
+ hir_id: body.value.hir_id,
+ };
+ let typeck_results = cx.tcx.typeck_body(body_id);
+ let expr_ty = typeck_results.expr_ty(&body.value);
+
+ if implements_trait(cx, expr_ty, future_trait_def_id, &[]) {
+ let return_expr_span = match &body.value.kind {
+ // XXXkhuey there has to be a better way.
+ ExprKind::Block(block, _) => block.expr.map(|e| e.span),
+ ExprKind::Path(QPath::Resolved(_, path)) => Some(path.span),
+ _ => None,
+ };
+ if let Some(return_expr_span) = return_expr_span {
+ span_lint_hir_and_then(
+ cx,
+ ASYNC_YIELDS_ASYNC,
+ body.value.hir_id,
+ return_expr_span,
+ "an async construct yields a type which is itself awaitable",
+ |db| {
+ db.span_label(body.value.span, "outer async construct");
+ db.span_label(return_expr_span, "awaitable value not awaited");
+ db.span_suggestion(
+ return_expr_span,
+ "consider awaiting this value",
+ format!("{}.await", snippet(cx, return_expr_span, "..")),
+ Applicability::MaybeIncorrect,
+ );
+ },
+ );
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/attrs.rs b/src/tools/clippy/clippy_lints/src/attrs.rs
new file mode 100644
index 000000000..4bcbeacf9
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/attrs.rs
@@ -0,0 +1,738 @@
+//! checks for attributes
+
+use clippy_utils::diagnostics::{span_lint, span_lint_and_help, span_lint_and_sugg, span_lint_and_then};
+use clippy_utils::macros::{is_panic, macro_backtrace};
+use clippy_utils::msrvs;
+use clippy_utils::source::{first_line_of_span, is_present_in_source, snippet_opt, without_block_comments};
+use clippy_utils::{extract_msrv_attr, meets_msrv};
+use if_chain::if_chain;
+use rustc_ast::{AttrKind, AttrStyle, Attribute, Lit, LitKind, MetaItemKind, NestedMetaItem};
+use rustc_errors::Applicability;
+use rustc_hir::{
+ Block, Expr, ExprKind, ImplItem, ImplItemKind, Item, ItemKind, StmtKind, TraitFn, TraitItem, TraitItemKind,
+};
+use rustc_lint::{EarlyContext, EarlyLintPass, LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty;
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_lint_pass, declare_tool_lint, impl_lint_pass};
+use rustc_span::source_map::Span;
+use rustc_span::sym;
+use rustc_span::symbol::Symbol;
+use semver::Version;
+
+static UNIX_SYSTEMS: &[&str] = &[
+ "android",
+ "dragonfly",
+ "emscripten",
+ "freebsd",
+ "fuchsia",
+ "haiku",
+ "illumos",
+ "ios",
+ "l4re",
+ "linux",
+ "macos",
+ "netbsd",
+ "openbsd",
+ "redox",
+ "solaris",
+ "vxworks",
+];
+
+// NOTE: windows is excluded from the list because it's also a valid target family.
+static NON_UNIX_SYSTEMS: &[&str] = &["hermit", "none", "wasi"];
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for items annotated with `#[inline(always)]`,
+ /// unless the annotated function is empty or simply panics.
+ ///
+ /// ### Why is this bad?
+ /// While there are valid uses of this annotation (and once
+ /// you know when to use it, by all means `allow` this lint), it's a common
+ /// newbie-mistake to pepper one's code with it.
+ ///
+ /// As a rule of thumb, before slapping `#[inline(always)]` on a function,
+ /// measure if that additional function call really affects your runtime profile
+ /// sufficiently to make up for the increase in compile time.
+ ///
+ /// ### Known problems
+ /// False positives, big time. This lint is meant to be
+ /// deactivated by everyone doing serious performance work. This means having
+ /// done the measurement.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// #[inline(always)]
+ /// fn not_quite_hot_code(..) { ... }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub INLINE_ALWAYS,
+ pedantic,
+ "use of `#[inline(always)]`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `extern crate` and `use` items annotated with
+ /// lint attributes.
+ ///
+ /// This lint permits lint attributes for lints emitted on the items themself.
+ /// For `use` items these lints are:
+ /// * deprecated
+ /// * unreachable_pub
+ /// * unused_imports
+ /// * clippy::enum_glob_use
+ /// * clippy::macro_use_imports
+ /// * clippy::wildcard_imports
+ ///
+ /// For `extern crate` items these lints are:
+ /// * `unused_imports` on items with `#[macro_use]`
+ ///
+ /// ### Why is this bad?
+ /// Lint attributes have no effect on crate imports. Most
+ /// likely a `!` was forgotten.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// #[deny(dead_code)]
+ /// extern crate foo;
+ /// #[forbid(dead_code)]
+ /// use foo::bar;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// #[allow(unused_imports)]
+ /// use foo::baz;
+ /// #[allow(unused_imports)]
+ /// #[macro_use]
+ /// extern crate baz;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub USELESS_ATTRIBUTE,
+ correctness,
+ "use of lint attributes on `extern crate` items"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `#[deprecated]` annotations with a `since`
+ /// field that is not a valid semantic version.
+ ///
+ /// ### Why is this bad?
+ /// For checking the version of the deprecation, it must be
+ /// a valid semver. Failing that, the contained information is useless.
+ ///
+ /// ### Example
+ /// ```rust
+ /// #[deprecated(since = "forever")]
+ /// fn something_else() { /* ... */ }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub DEPRECATED_SEMVER,
+ correctness,
+ "use of `#[deprecated(since = \"x\")]` where x is not semver"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for empty lines after outer attributes
+ ///
+ /// ### Why is this bad?
+ /// Most likely the attribute was meant to be an inner attribute using a '!'.
+ /// If it was meant to be an outer attribute, then the following item
+ /// should not be separated by empty lines.
+ ///
+ /// ### Known problems
+ /// Can cause false positives.
+ ///
+ /// From the clippy side it's difficult to detect empty lines between an attributes and the
+ /// following item because empty lines and comments are not part of the AST. The parsing
+ /// currently works for basic cases but is not perfect.
+ ///
+ /// ### Example
+ /// ```rust
+ /// #[allow(dead_code)]
+ ///
+ /// fn not_quite_good_code() { }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// // Good (as inner attribute)
+ /// #![allow(dead_code)]
+ ///
+ /// fn this_is_fine() { }
+ ///
+ /// // or
+ ///
+ /// // Good (as outer attribute)
+ /// #[allow(dead_code)]
+ /// fn this_is_fine_too() { }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub EMPTY_LINE_AFTER_OUTER_ATTR,
+ nursery,
+ "empty line after outer attribute"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `warn`/`deny`/`forbid` attributes targeting the whole clippy::restriction category.
+ ///
+ /// ### Why is this bad?
+ /// Restriction lints sometimes are in contrast with other lints or even go against idiomatic rust.
+ /// These lints should only be enabled on a lint-by-lint basis and with careful consideration.
+ ///
+ /// ### Example
+ /// ```rust
+ /// #![deny(clippy::restriction)]
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// #![deny(clippy::as_conversions)]
+ /// ```
+ #[clippy::version = "1.47.0"]
+ pub BLANKET_CLIPPY_RESTRICTION_LINTS,
+ suspicious,
+ "enabling the complete restriction group"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `#[cfg_attr(rustfmt, rustfmt_skip)]` and suggests to replace it
+ /// with `#[rustfmt::skip]`.
+ ///
+ /// ### Why is this bad?
+ /// Since tool_attributes ([rust-lang/rust#44690](https://github.com/rust-lang/rust/issues/44690))
+ /// are stable now, they should be used instead of the old `cfg_attr(rustfmt)` attributes.
+ ///
+ /// ### Known problems
+ /// This lint doesn't detect crate level inner attributes, because they get
+ /// processed before the PreExpansionPass lints get executed. See
+ /// [#3123](https://github.com/rust-lang/rust-clippy/pull/3123#issuecomment-422321765)
+ ///
+ /// ### Example
+ /// ```rust
+ /// #[cfg_attr(rustfmt, rustfmt_skip)]
+ /// fn main() { }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// #[rustfmt::skip]
+ /// fn main() { }
+ /// ```
+ #[clippy::version = "1.32.0"]
+ pub DEPRECATED_CFG_ATTR,
+ complexity,
+ "usage of `cfg_attr(rustfmt)` instead of tool attributes"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for cfg attributes having operating systems used in target family position.
+ ///
+ /// ### Why is this bad?
+ /// The configuration option will not be recognised and the related item will not be included
+ /// by the conditional compilation engine.
+ ///
+ /// ### Example
+ /// ```rust
+ /// #[cfg(linux)]
+ /// fn conditional() { }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # mod hidden {
+ /// #[cfg(target_os = "linux")]
+ /// fn conditional() { }
+ /// # }
+ ///
+ /// // or
+ ///
+ /// #[cfg(unix)]
+ /// fn conditional() { }
+ /// ```
+ /// Check the [Rust Reference](https://doc.rust-lang.org/reference/conditional-compilation.html#target_os) for more details.
+ #[clippy::version = "1.45.0"]
+ pub MISMATCHED_TARGET_OS,
+ correctness,
+ "usage of `cfg(operating_system)` instead of `cfg(target_os = \"operating_system\")`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for attributes that allow lints without a reason.
+ ///
+ /// (This requires the `lint_reasons` feature)
+ ///
+ /// ### Why is this bad?
+ /// Allowing a lint should always have a reason. This reason should be documented to
+ /// ensure that others understand the reasoning
+ ///
+ /// ### Example
+ /// ```rust
+ /// #![feature(lint_reasons)]
+ ///
+ /// #![allow(clippy::some_lint)]
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// #![feature(lint_reasons)]
+ ///
+ /// #![allow(clippy::some_lint, reason = "False positive rust-lang/rust-clippy#1002020")]
+ /// ```
+ #[clippy::version = "1.61.0"]
+ pub ALLOW_ATTRIBUTES_WITHOUT_REASON,
+ restriction,
+ "ensures that all `allow` and `expect` attributes have a reason"
+}
+
+declare_lint_pass!(Attributes => [
+ ALLOW_ATTRIBUTES_WITHOUT_REASON,
+ INLINE_ALWAYS,
+ DEPRECATED_SEMVER,
+ USELESS_ATTRIBUTE,
+ BLANKET_CLIPPY_RESTRICTION_LINTS,
+]);
+
+impl<'tcx> LateLintPass<'tcx> for Attributes {
+ fn check_attribute(&mut self, cx: &LateContext<'tcx>, attr: &'tcx Attribute) {
+ if let Some(items) = &attr.meta_item_list() {
+ if let Some(ident) = attr.ident() {
+ if is_lint_level(ident.name) {
+ check_clippy_lint_names(cx, ident.name, items);
+ }
+ if matches!(ident.name, sym::allow | sym::expect) {
+ check_lint_reason(cx, ident.name, items, attr);
+ }
+ if items.is_empty() || !attr.has_name(sym::deprecated) {
+ return;
+ }
+ for item in items {
+ if_chain! {
+ if let NestedMetaItem::MetaItem(mi) = &item;
+ if let MetaItemKind::NameValue(lit) = &mi.kind;
+ if mi.has_name(sym::since);
+ then {
+ check_semver(cx, item.span(), lit);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
+ let attrs = cx.tcx.hir().attrs(item.hir_id());
+ if is_relevant_item(cx, item) {
+ check_attrs(cx, item.span, item.ident.name, attrs);
+ }
+ match item.kind {
+ ItemKind::ExternCrate(..) | ItemKind::Use(..) => {
+ let skip_unused_imports = attrs.iter().any(|attr| attr.has_name(sym::macro_use));
+
+ for attr in attrs {
+ if in_external_macro(cx.sess(), attr.span) {
+ return;
+ }
+ if let Some(lint_list) = &attr.meta_item_list() {
+ if attr.ident().map_or(false, |ident| is_lint_level(ident.name)) {
+ for lint in lint_list {
+ match item.kind {
+ ItemKind::Use(..) => {
+ if is_word(lint, sym::unused_imports)
+ || is_word(lint, sym::deprecated)
+ || is_word(lint, sym!(unreachable_pub))
+ || is_word(lint, sym!(unused))
+ || extract_clippy_lint(lint).map_or(false, |s| {
+ matches!(
+ s.as_str(),
+ "wildcard_imports"
+ | "enum_glob_use"
+ | "redundant_pub_crate"
+ | "macro_use_imports",
+ )
+ })
+ {
+ return;
+ }
+ },
+ ItemKind::ExternCrate(..) => {
+ if is_word(lint, sym::unused_imports) && skip_unused_imports {
+ return;
+ }
+ if is_word(lint, sym!(unused_extern_crates)) {
+ return;
+ }
+ },
+ _ => {},
+ }
+ }
+ let line_span = first_line_of_span(cx, attr.span);
+
+ if let Some(mut sugg) = snippet_opt(cx, line_span) {
+ if sugg.contains("#[") {
+ span_lint_and_then(
+ cx,
+ USELESS_ATTRIBUTE,
+ line_span,
+ "useless lint attribute",
+ |diag| {
+ sugg = sugg.replacen("#[", "#![", 1);
+ diag.span_suggestion(
+ line_span,
+ "if you just forgot a `!`, use",
+ sugg,
+ Applicability::MaybeIncorrect,
+ );
+ },
+ );
+ }
+ }
+ }
+ }
+ }
+ },
+ _ => {},
+ }
+ }
+
+ fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx ImplItem<'_>) {
+ if is_relevant_impl(cx, item) {
+ check_attrs(cx, item.span, item.ident.name, cx.tcx.hir().attrs(item.hir_id()));
+ }
+ }
+
+ fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx TraitItem<'_>) {
+ if is_relevant_trait(cx, item) {
+ check_attrs(cx, item.span, item.ident.name, cx.tcx.hir().attrs(item.hir_id()));
+ }
+ }
+}
+
+/// Returns the lint name if it is clippy lint.
+fn extract_clippy_lint(lint: &NestedMetaItem) -> Option<Symbol> {
+ if_chain! {
+ if let Some(meta_item) = lint.meta_item();
+ if meta_item.path.segments.len() > 1;
+ if let tool_name = meta_item.path.segments[0].ident;
+ if tool_name.name == sym::clippy;
+ then {
+ let lint_name = meta_item.path.segments.last().unwrap().ident.name;
+ return Some(lint_name);
+ }
+ }
+ None
+}
+
+fn check_clippy_lint_names(cx: &LateContext<'_>, name: Symbol, items: &[NestedMetaItem]) {
+ for lint in items {
+ if let Some(lint_name) = extract_clippy_lint(lint) {
+ if lint_name.as_str() == "restriction" && name != sym::allow {
+ span_lint_and_help(
+ cx,
+ BLANKET_CLIPPY_RESTRICTION_LINTS,
+ lint.span(),
+ "restriction lints are not meant to be all enabled",
+ None,
+ "try enabling only the lints you really need",
+ );
+ }
+ }
+ }
+}
+
+fn check_lint_reason(cx: &LateContext<'_>, name: Symbol, items: &[NestedMetaItem], attr: &'_ Attribute) {
+ // Check for the feature
+ if !cx.tcx.sess.features_untracked().lint_reasons {
+ return;
+ }
+
+ // Check if the reason is present
+ if let Some(item) = items.last().and_then(NestedMetaItem::meta_item)
+ && let MetaItemKind::NameValue(_) = &item.kind
+ && item.path == sym::reason
+ {
+ return;
+ }
+
+ span_lint_and_help(
+ cx,
+ ALLOW_ATTRIBUTES_WITHOUT_REASON,
+ attr.span,
+ &format!("`{}` attribute without specifying a reason", name.as_str()),
+ None,
+ "try adding a reason at the end with `, reason = \"..\"`",
+ );
+}
+
+fn is_relevant_item(cx: &LateContext<'_>, item: &Item<'_>) -> bool {
+ if let ItemKind::Fn(_, _, eid) = item.kind {
+ is_relevant_expr(cx, cx.tcx.typeck_body(eid), &cx.tcx.hir().body(eid).value)
+ } else {
+ true
+ }
+}
+
+fn is_relevant_impl(cx: &LateContext<'_>, item: &ImplItem<'_>) -> bool {
+ match item.kind {
+ ImplItemKind::Fn(_, eid) => is_relevant_expr(cx, cx.tcx.typeck_body(eid), &cx.tcx.hir().body(eid).value),
+ _ => false,
+ }
+}
+
+fn is_relevant_trait(cx: &LateContext<'_>, item: &TraitItem<'_>) -> bool {
+ match item.kind {
+ TraitItemKind::Fn(_, TraitFn::Required(_)) => true,
+ TraitItemKind::Fn(_, TraitFn::Provided(eid)) => {
+ is_relevant_expr(cx, cx.tcx.typeck_body(eid), &cx.tcx.hir().body(eid).value)
+ },
+ _ => false,
+ }
+}
+
+fn is_relevant_block(cx: &LateContext<'_>, typeck_results: &ty::TypeckResults<'_>, block: &Block<'_>) -> bool {
+ block.stmts.first().map_or(
+ block
+ .expr
+ .as_ref()
+ .map_or(false, |e| is_relevant_expr(cx, typeck_results, e)),
+ |stmt| match &stmt.kind {
+ StmtKind::Local(_) => true,
+ StmtKind::Expr(expr) | StmtKind::Semi(expr) => is_relevant_expr(cx, typeck_results, expr),
+ StmtKind::Item(_) => false,
+ },
+ )
+}
+
+fn is_relevant_expr(cx: &LateContext<'_>, typeck_results: &ty::TypeckResults<'_>, expr: &Expr<'_>) -> bool {
+ if macro_backtrace(expr.span).last().map_or(false, |macro_call| {
+ is_panic(cx, macro_call.def_id) || cx.tcx.item_name(macro_call.def_id) == sym::unreachable
+ }) {
+ return false;
+ }
+ match &expr.kind {
+ ExprKind::Block(block, _) => is_relevant_block(cx, typeck_results, block),
+ ExprKind::Ret(Some(e)) => is_relevant_expr(cx, typeck_results, e),
+ ExprKind::Ret(None) | ExprKind::Break(_, None) => false,
+ _ => true,
+ }
+}
+
+fn check_attrs(cx: &LateContext<'_>, span: Span, name: Symbol, attrs: &[Attribute]) {
+ if span.from_expansion() {
+ return;
+ }
+
+ for attr in attrs {
+ if let Some(values) = attr.meta_item_list() {
+ if values.len() != 1 || !attr.has_name(sym::inline) {
+ continue;
+ }
+ if is_word(&values[0], sym::always) {
+ span_lint(
+ cx,
+ INLINE_ALWAYS,
+ attr.span,
+ &format!(
+ "you have declared `#[inline(always)]` on `{}`. This is usually a bad idea",
+ name
+ ),
+ );
+ }
+ }
+ }
+}
+
+fn check_semver(cx: &LateContext<'_>, span: Span, lit: &Lit) {
+ if let LitKind::Str(is, _) = lit.kind {
+ if Version::parse(is.as_str()).is_ok() {
+ return;
+ }
+ }
+ span_lint(
+ cx,
+ DEPRECATED_SEMVER,
+ span,
+ "the since field must contain a semver-compliant version",
+ );
+}
+
+fn is_word(nmi: &NestedMetaItem, expected: Symbol) -> bool {
+ if let NestedMetaItem::MetaItem(mi) = &nmi {
+ mi.is_word() && mi.has_name(expected)
+ } else {
+ false
+ }
+}
+
+pub struct EarlyAttributes {
+ pub msrv: Option<RustcVersion>,
+}
+
+impl_lint_pass!(EarlyAttributes => [
+ DEPRECATED_CFG_ATTR,
+ MISMATCHED_TARGET_OS,
+ EMPTY_LINE_AFTER_OUTER_ATTR,
+]);
+
+impl EarlyLintPass for EarlyAttributes {
+ fn check_item(&mut self, cx: &EarlyContext<'_>, item: &rustc_ast::Item) {
+ check_empty_line_after_outer_attr(cx, item);
+ }
+
+ fn check_attribute(&mut self, cx: &EarlyContext<'_>, attr: &Attribute) {
+ check_deprecated_cfg_attr(cx, attr, self.msrv);
+ check_mismatched_target_os(cx, attr);
+ }
+
+ extract_msrv_attr!(EarlyContext);
+}
+
+fn check_empty_line_after_outer_attr(cx: &EarlyContext<'_>, item: &rustc_ast::Item) {
+ let mut iter = item.attrs.iter().peekable();
+ while let Some(attr) = iter.next() {
+ if matches!(attr.kind, AttrKind::Normal(..))
+ && attr.style == AttrStyle::Outer
+ && is_present_in_source(cx, attr.span)
+ {
+ let begin_of_attr_to_item = Span::new(attr.span.lo(), item.span.lo(), item.span.ctxt(), item.span.parent());
+ let end_of_attr_to_next_attr_or_item = Span::new(
+ attr.span.hi(),
+ iter.peek().map_or(item.span.lo(), |next_attr| next_attr.span.lo()),
+ item.span.ctxt(),
+ item.span.parent(),
+ );
+
+ if let Some(snippet) = snippet_opt(cx, end_of_attr_to_next_attr_or_item) {
+ let lines = snippet.split('\n').collect::<Vec<_>>();
+ let lines = without_block_comments(lines);
+
+ if lines.iter().filter(|l| l.trim().is_empty()).count() > 2 {
+ span_lint(
+ cx,
+ EMPTY_LINE_AFTER_OUTER_ATTR,
+ begin_of_attr_to_item,
+ "found an empty line after an outer attribute. \
+ Perhaps you forgot to add a `!` to make it an inner attribute?",
+ );
+ }
+ }
+ }
+ }
+}
+
+fn check_deprecated_cfg_attr(cx: &EarlyContext<'_>, attr: &Attribute, msrv: Option<RustcVersion>) {
+ if_chain! {
+ if meets_msrv(msrv, msrvs::TOOL_ATTRIBUTES);
+ // check cfg_attr
+ if attr.has_name(sym::cfg_attr);
+ if let Some(items) = attr.meta_item_list();
+ if items.len() == 2;
+ // check for `rustfmt`
+ if let Some(feature_item) = items[0].meta_item();
+ if feature_item.has_name(sym::rustfmt);
+ // check for `rustfmt_skip` and `rustfmt::skip`
+ if let Some(skip_item) = &items[1].meta_item();
+ if skip_item.has_name(sym!(rustfmt_skip))
+ || skip_item
+ .path
+ .segments
+ .last()
+ .expect("empty path in attribute")
+ .ident
+ .name
+ == sym::skip;
+ // Only lint outer attributes, because custom inner attributes are unstable
+ // Tracking issue: https://github.com/rust-lang/rust/issues/54726
+ if attr.style == AttrStyle::Outer;
+ then {
+ span_lint_and_sugg(
+ cx,
+ DEPRECATED_CFG_ATTR,
+ attr.span,
+ "`cfg_attr` is deprecated for rustfmt and got replaced by tool attributes",
+ "use",
+ "#[rustfmt::skip]".to_string(),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+}
+
+fn check_mismatched_target_os(cx: &EarlyContext<'_>, attr: &Attribute) {
+ fn find_os(name: &str) -> Option<&'static str> {
+ UNIX_SYSTEMS
+ .iter()
+ .chain(NON_UNIX_SYSTEMS.iter())
+ .find(|&&os| os == name)
+ .copied()
+ }
+
+ fn is_unix(name: &str) -> bool {
+ UNIX_SYSTEMS.iter().any(|&os| os == name)
+ }
+
+ fn find_mismatched_target_os(items: &[NestedMetaItem]) -> Vec<(&str, Span)> {
+ let mut mismatched = Vec::new();
+
+ for item in items {
+ if let NestedMetaItem::MetaItem(meta) = item {
+ match &meta.kind {
+ MetaItemKind::List(list) => {
+ mismatched.extend(find_mismatched_target_os(list));
+ },
+ MetaItemKind::Word => {
+ if_chain! {
+ if let Some(ident) = meta.ident();
+ if let Some(os) = find_os(ident.name.as_str());
+ then {
+ mismatched.push((os, ident.span));
+ }
+ }
+ },
+ MetaItemKind::NameValue(..) => {},
+ }
+ }
+ }
+
+ mismatched
+ }
+
+ if_chain! {
+ if attr.has_name(sym::cfg);
+ if let Some(list) = attr.meta_item_list();
+ let mismatched = find_mismatched_target_os(&list);
+ if !mismatched.is_empty();
+ then {
+ let mess = "operating system used in target family position";
+
+ span_lint_and_then(cx, MISMATCHED_TARGET_OS, attr.span, mess, |diag| {
+ // Avoid showing the unix suggestion multiple times in case
+ // we have more than one mismatch for unix-like systems
+ let mut unix_suggested = false;
+
+ for (os, span) in mismatched {
+ let sugg = format!("target_os = \"{}\"", os);
+ diag.span_suggestion(span, "try", sugg, Applicability::MaybeIncorrect);
+
+ if !unix_suggested && is_unix(os) {
+ diag.help("did you mean `unix`?");
+ unix_suggested = true;
+ }
+ }
+ });
+ }
+ }
+}
+
+fn is_lint_level(symbol: Symbol) -> bool {
+ matches!(symbol, sym::allow | sym::expect | sym::warn | sym::deny | sym::forbid)
+}
diff --git a/src/tools/clippy/clippy_lints/src/await_holding_invalid.rs b/src/tools/clippy/clippy_lints/src/await_holding_invalid.rs
new file mode 100644
index 000000000..1761360fb
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/await_holding_invalid.rs
@@ -0,0 +1,289 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::{match_def_path, paths};
+use rustc_data_structures::fx::FxHashMap;
+use rustc_hir::def_id::DefId;
+use rustc_hir::{def::Res, AsyncGeneratorKind, Body, BodyId, GeneratorKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::GeneratorInteriorTypeCause;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::Span;
+
+use crate::utils::conf::DisallowedType;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls to await while holding a non-async-aware MutexGuard.
+ ///
+ /// ### Why is this bad?
+ /// The Mutex types found in std::sync and parking_lot
+ /// are not designed to operate in an async context across await points.
+ ///
+ /// There are two potential solutions. One is to use an async-aware Mutex
+ /// type. Many asynchronous foundation crates provide such a Mutex type. The
+ /// other solution is to ensure the mutex is unlocked before calling await,
+ /// either by introducing a scope or an explicit call to Drop::drop.
+ ///
+ /// ### Known problems
+ /// Will report false positive for explicitly dropped guards
+ /// ([#6446](https://github.com/rust-lang/rust-clippy/issues/6446)). A workaround for this is
+ /// to wrap the `.lock()` call in a block instead of explicitly dropping the guard.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::sync::Mutex;
+ /// # async fn baz() {}
+ /// async fn foo(x: &Mutex<u32>) {
+ /// let mut guard = x.lock().unwrap();
+ /// *guard += 1;
+ /// baz().await;
+ /// }
+ ///
+ /// async fn bar(x: &Mutex<u32>) {
+ /// let mut guard = x.lock().unwrap();
+ /// *guard += 1;
+ /// drop(guard); // explicit drop
+ /// baz().await;
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # use std::sync::Mutex;
+ /// # async fn baz() {}
+ /// async fn foo(x: &Mutex<u32>) {
+ /// {
+ /// let mut guard = x.lock().unwrap();
+ /// *guard += 1;
+ /// }
+ /// baz().await;
+ /// }
+ ///
+ /// async fn bar(x: &Mutex<u32>) {
+ /// {
+ /// let mut guard = x.lock().unwrap();
+ /// *guard += 1;
+ /// } // guard dropped here at end of scope
+ /// baz().await;
+ /// }
+ /// ```
+ #[clippy::version = "1.45.0"]
+ pub AWAIT_HOLDING_LOCK,
+ suspicious,
+ "inside an async function, holding a `MutexGuard` while calling `await`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls to await while holding a `RefCell` `Ref` or `RefMut`.
+ ///
+ /// ### Why is this bad?
+ /// `RefCell` refs only check for exclusive mutable access
+ /// at runtime. Holding onto a `RefCell` ref across an `await` suspension point
+ /// risks panics from a mutable ref shared while other refs are outstanding.
+ ///
+ /// ### Known problems
+ /// Will report false positive for explicitly dropped refs
+ /// ([#6353](https://github.com/rust-lang/rust-clippy/issues/6353)). A workaround for this is
+ /// to wrap the `.borrow[_mut]()` call in a block instead of explicitly dropping the ref.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::cell::RefCell;
+ /// # async fn baz() {}
+ /// async fn foo(x: &RefCell<u32>) {
+ /// let mut y = x.borrow_mut();
+ /// *y += 1;
+ /// baz().await;
+ /// }
+ ///
+ /// async fn bar(x: &RefCell<u32>) {
+ /// let mut y = x.borrow_mut();
+ /// *y += 1;
+ /// drop(y); // explicit drop
+ /// baz().await;
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # use std::cell::RefCell;
+ /// # async fn baz() {}
+ /// async fn foo(x: &RefCell<u32>) {
+ /// {
+ /// let mut y = x.borrow_mut();
+ /// *y += 1;
+ /// }
+ /// baz().await;
+ /// }
+ ///
+ /// async fn bar(x: &RefCell<u32>) {
+ /// {
+ /// let mut y = x.borrow_mut();
+ /// *y += 1;
+ /// } // y dropped here at end of scope
+ /// baz().await;
+ /// }
+ /// ```
+ #[clippy::version = "1.49.0"]
+ pub AWAIT_HOLDING_REFCELL_REF,
+ suspicious,
+ "inside an async function, holding a `RefCell` ref while calling `await`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Allows users to configure types which should not be held across `await`
+ /// suspension points.
+ ///
+ /// ### Why is this bad?
+ /// There are some types which are perfectly "safe" to be used concurrently
+ /// from a memory access perspective but will cause bugs at runtime if they
+ /// are held in such a way.
+ ///
+ /// ### Example
+ ///
+ /// ```toml
+ /// await-holding-invalid-types = [
+ /// # You can specify a type name
+ /// "CustomLockType",
+ /// # You can (optionally) specify a reason
+ /// { path = "OtherCustomLockType", reason = "Relies on a thread local" }
+ /// ]
+ /// ```
+ ///
+ /// ```rust
+ /// # async fn baz() {}
+ /// struct CustomLockType;
+ /// struct OtherCustomLockType;
+ /// async fn foo() {
+ /// let _x = CustomLockType;
+ /// let _y = OtherCustomLockType;
+ /// baz().await; // Lint violation
+ /// }
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub AWAIT_HOLDING_INVALID_TYPE,
+ suspicious,
+ "holding a type across an await point which is not allowed to be held as per the configuration"
+}
+
+impl_lint_pass!(AwaitHolding => [AWAIT_HOLDING_LOCK, AWAIT_HOLDING_REFCELL_REF, AWAIT_HOLDING_INVALID_TYPE]);
+
+#[derive(Debug)]
+pub struct AwaitHolding {
+ conf_invalid_types: Vec<DisallowedType>,
+ def_ids: FxHashMap<DefId, DisallowedType>,
+}
+
+impl AwaitHolding {
+ pub(crate) fn new(conf_invalid_types: Vec<DisallowedType>) -> Self {
+ Self {
+ conf_invalid_types,
+ def_ids: FxHashMap::default(),
+ }
+ }
+}
+
+impl LateLintPass<'_> for AwaitHolding {
+ fn check_crate(&mut self, cx: &LateContext<'_>) {
+ for conf in &self.conf_invalid_types {
+ let path = match conf {
+ DisallowedType::Simple(path) | DisallowedType::WithReason { path, .. } => path,
+ };
+ let segs: Vec<_> = path.split("::").collect();
+ if let Res::Def(_, id) = clippy_utils::def_path_res(cx, &segs) {
+ self.def_ids.insert(id, conf.clone());
+ }
+ }
+ }
+
+ fn check_body(&mut self, cx: &LateContext<'_>, body: &'_ Body<'_>) {
+ use AsyncGeneratorKind::{Block, Closure, Fn};
+ if let Some(GeneratorKind::Async(Block | Closure | Fn)) = body.generator_kind {
+ let body_id = BodyId {
+ hir_id: body.value.hir_id,
+ };
+ let typeck_results = cx.tcx.typeck_body(body_id);
+ self.check_interior_types(
+ cx,
+ typeck_results.generator_interior_types.as_ref().skip_binder(),
+ body.value.span,
+ );
+ }
+ }
+}
+
+impl AwaitHolding {
+ fn check_interior_types(&self, cx: &LateContext<'_>, ty_causes: &[GeneratorInteriorTypeCause<'_>], span: Span) {
+ for ty_cause in ty_causes {
+ if let rustc_middle::ty::Adt(adt, _) = ty_cause.ty.kind() {
+ if is_mutex_guard(cx, adt.did()) {
+ span_lint_and_then(
+ cx,
+ AWAIT_HOLDING_LOCK,
+ ty_cause.span,
+ "this `MutexGuard` is held across an `await` point",
+ |diag| {
+ diag.help(
+ "consider using an async-aware `Mutex` type or ensuring the \
+ `MutexGuard` is dropped before calling await",
+ );
+ diag.span_note(
+ ty_cause.scope_span.unwrap_or(span),
+ "these are all the `await` points this lock is held through",
+ );
+ },
+ );
+ } else if is_refcell_ref(cx, adt.did()) {
+ span_lint_and_then(
+ cx,
+ AWAIT_HOLDING_REFCELL_REF,
+ ty_cause.span,
+ "this `RefCell` reference is held across an `await` point",
+ |diag| {
+ diag.help("ensure the reference is dropped before calling `await`");
+ diag.span_note(
+ ty_cause.scope_span.unwrap_or(span),
+ "these are all the `await` points this reference is held through",
+ );
+ },
+ );
+ } else if let Some(disallowed) = self.def_ids.get(&adt.did()) {
+ emit_invalid_type(cx, ty_cause.span, disallowed);
+ }
+ }
+ }
+ }
+}
+
+fn emit_invalid_type(cx: &LateContext<'_>, span: Span, disallowed: &DisallowedType) {
+ let (type_name, reason) = match disallowed {
+ DisallowedType::Simple(path) => (path, &None),
+ DisallowedType::WithReason { path, reason } => (path, reason),
+ };
+
+ span_lint_and_then(
+ cx,
+ AWAIT_HOLDING_INVALID_TYPE,
+ span,
+ &format!("`{type_name}` may not be held across an `await` point per `clippy.toml`",),
+ |diag| {
+ if let Some(reason) = reason {
+ diag.note(reason.clone());
+ }
+ },
+ );
+}
+
+fn is_mutex_guard(cx: &LateContext<'_>, def_id: DefId) -> bool {
+ match_def_path(cx, def_id, &paths::MUTEX_GUARD)
+ || match_def_path(cx, def_id, &paths::RWLOCK_READ_GUARD)
+ || match_def_path(cx, def_id, &paths::RWLOCK_WRITE_GUARD)
+ || match_def_path(cx, def_id, &paths::PARKING_LOT_MUTEX_GUARD)
+ || match_def_path(cx, def_id, &paths::PARKING_LOT_RWLOCK_READ_GUARD)
+ || match_def_path(cx, def_id, &paths::PARKING_LOT_RWLOCK_WRITE_GUARD)
+}
+
+fn is_refcell_ref(cx: &LateContext<'_>, def_id: DefId) -> bool {
+ match_def_path(cx, def_id, &paths::REFCELL_REF) || match_def_path(cx, def_id, &paths::REFCELL_REFMUT)
+}
diff --git a/src/tools/clippy/clippy_lints/src/blacklisted_name.rs b/src/tools/clippy/clippy_lints/src/blacklisted_name.rs
new file mode 100644
index 000000000..1600fb25d
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/blacklisted_name.rs
@@ -0,0 +1,77 @@
+use clippy_utils::{diagnostics::span_lint, is_test_module_or_function};
+use rustc_data_structures::fx::FxHashSet;
+use rustc_hir::{Item, Pat, PatKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of blacklisted names for variables, such
+ /// as `foo`.
+ ///
+ /// ### Why is this bad?
+ /// These names are usually placeholder names and should be
+ /// avoided.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let foo = 3.14;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub BLACKLISTED_NAME,
+ style,
+ "usage of a blacklisted/placeholder name"
+}
+
+#[derive(Clone, Debug)]
+pub struct BlacklistedName {
+ blacklist: FxHashSet<String>,
+ test_modules_deep: u32,
+}
+
+impl BlacklistedName {
+ pub fn new(blacklist: FxHashSet<String>) -> Self {
+ Self {
+ blacklist,
+ test_modules_deep: 0,
+ }
+ }
+
+ fn in_test_module(&self) -> bool {
+ self.test_modules_deep != 0
+ }
+}
+
+impl_lint_pass!(BlacklistedName => [BLACKLISTED_NAME]);
+
+impl<'tcx> LateLintPass<'tcx> for BlacklistedName {
+ fn check_item(&mut self, cx: &LateContext<'_>, item: &Item<'_>) {
+ if is_test_module_or_function(cx.tcx, item) {
+ self.test_modules_deep = self.test_modules_deep.saturating_add(1);
+ }
+ }
+
+ fn check_pat(&mut self, cx: &LateContext<'tcx>, pat: &'tcx Pat<'_>) {
+ // Check whether we are under the `test` attribute.
+ if self.in_test_module() {
+ return;
+ }
+
+ if let PatKind::Binding(.., ident, _) = pat.kind {
+ if self.blacklist.contains(&ident.name.to_string()) {
+ span_lint(
+ cx,
+ BLACKLISTED_NAME,
+ ident.span,
+ &format!("use of a blacklisted/placeholder name `{}`", ident.name),
+ );
+ }
+ }
+ }
+
+ fn check_item_post(&mut self, cx: &LateContext<'_>, item: &Item<'_>) {
+ if is_test_module_or_function(cx.tcx, item) {
+ self.test_modules_deep = self.test_modules_deep.saturating_sub(1);
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/blocks_in_if_conditions.rs b/src/tools/clippy/clippy_lints/src/blocks_in_if_conditions.rs
new file mode 100644
index 000000000..ad206b5fb
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/blocks_in_if_conditions.rs
@@ -0,0 +1,155 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg};
+use clippy_utils::get_parent_expr;
+use clippy_utils::higher;
+use clippy_utils::source::snippet_block_with_applicability;
+use clippy_utils::ty::implements_trait;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::intravisit::{walk_expr, Visitor};
+use rustc_hir::{BlockCheckMode, Closure, Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `if` conditions that use blocks containing an
+ /// expression, statements or conditions that use closures with blocks.
+ ///
+ /// ### Why is this bad?
+ /// Style, using blocks in the condition makes it hard to read.
+ ///
+ /// ### Examples
+ /// ```rust
+ /// # fn somefunc() -> bool { true };
+ /// if { true } { /* ... */ }
+ ///
+ /// if { let x = somefunc(); x } { /* ... */ }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # fn somefunc() -> bool { true };
+ /// if true { /* ... */ }
+ ///
+ /// let res = { let x = somefunc(); x };
+ /// if res { /* ... */ }
+ /// ```
+ #[clippy::version = "1.45.0"]
+ pub BLOCKS_IN_IF_CONDITIONS,
+ style,
+ "useless or complex blocks that can be eliminated in conditions"
+}
+
+declare_lint_pass!(BlocksInIfConditions => [BLOCKS_IN_IF_CONDITIONS]);
+
+struct ExVisitor<'a, 'tcx> {
+ found_block: Option<&'tcx Expr<'tcx>>,
+ cx: &'a LateContext<'tcx>,
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for ExVisitor<'a, 'tcx> {
+ fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
+ if let ExprKind::Closure(&Closure { body, .. }) = expr.kind {
+ // do not lint if the closure is called using an iterator (see #1141)
+ if_chain! {
+ if let Some(parent) = get_parent_expr(self.cx, expr);
+ if let ExprKind::MethodCall(_, [self_arg, ..], _) = &parent.kind;
+ let caller = self.cx.typeck_results().expr_ty(self_arg);
+ if let Some(iter_id) = self.cx.tcx.get_diagnostic_item(sym::Iterator);
+ if implements_trait(self.cx, caller, iter_id, &[]);
+ then {
+ return;
+ }
+ }
+
+ let body = self.cx.tcx.hir().body(body);
+ let ex = &body.value;
+ if let ExprKind::Block(block, _) = ex.kind {
+ if !body.value.span.from_expansion() && !block.stmts.is_empty() {
+ self.found_block = Some(ex);
+ return;
+ }
+ }
+ }
+ walk_expr(self, expr);
+ }
+}
+
+const BRACED_EXPR_MESSAGE: &str = "omit braces around single expression condition";
+const COMPLEX_BLOCK_MESSAGE: &str = "in an `if` condition, avoid complex blocks or closures with blocks; \
+ instead, move the block or closure higher and bind it with a `let`";
+
+impl<'tcx> LateLintPass<'tcx> for BlocksInIfConditions {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if in_external_macro(cx.sess(), expr.span) {
+ return;
+ }
+ if let Some(higher::If { cond, .. }) = higher::If::hir(expr) {
+ if let ExprKind::Block(block, _) = &cond.kind {
+ if block.rules == BlockCheckMode::DefaultBlock {
+ if block.stmts.is_empty() {
+ if let Some(ex) = &block.expr {
+ // don't dig into the expression here, just suggest that they remove
+ // the block
+ if expr.span.from_expansion() || ex.span.from_expansion() {
+ return;
+ }
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ BLOCKS_IN_IF_CONDITIONS,
+ cond.span,
+ BRACED_EXPR_MESSAGE,
+ "try",
+ format!(
+ "{}",
+ snippet_block_with_applicability(
+ cx,
+ ex.span,
+ "..",
+ Some(expr.span),
+ &mut applicability
+ )
+ ),
+ applicability,
+ );
+ }
+ } else {
+ let span = block.expr.as_ref().map_or_else(|| block.stmts[0].span, |e| e.span);
+ if span.from_expansion() || expr.span.from_expansion() {
+ return;
+ }
+ // move block higher
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ BLOCKS_IN_IF_CONDITIONS,
+ expr.span.with_hi(cond.span.hi()),
+ COMPLEX_BLOCK_MESSAGE,
+ "try",
+ format!(
+ "let res = {}; if res",
+ snippet_block_with_applicability(
+ cx,
+ block.span,
+ "..",
+ Some(expr.span),
+ &mut applicability
+ ),
+ ),
+ applicability,
+ );
+ }
+ }
+ } else {
+ let mut visitor = ExVisitor { found_block: None, cx };
+ walk_expr(&mut visitor, cond);
+ if let Some(block) = visitor.found_block {
+ span_lint(cx, BLOCKS_IN_IF_CONDITIONS, block.span, COMPLEX_BLOCK_MESSAGE);
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs b/src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs
new file mode 100644
index 000000000..95abe8aa5
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs
@@ -0,0 +1,107 @@
+use clippy_utils::macros::{find_assert_eq_args, root_macro_call_first_node};
+use clippy_utils::{diagnostics::span_lint_and_sugg, ty::implements_trait};
+use rustc_ast::ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind, Lit};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::Ident;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// This lint warns about boolean comparisons in assert-like macros.
+ ///
+ /// ### Why is this bad?
+ /// It is shorter to use the equivalent.
+ ///
+ /// ### Example
+ /// ```rust
+ /// assert_eq!("a".is_empty(), false);
+ /// assert_ne!("a".is_empty(), true);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// assert!(!"a".is_empty());
+ /// ```
+ #[clippy::version = "1.53.0"]
+ pub BOOL_ASSERT_COMPARISON,
+ style,
+ "Using a boolean as comparison value in an assert_* macro when there is no need"
+}
+
+declare_lint_pass!(BoolAssertComparison => [BOOL_ASSERT_COMPARISON]);
+
+fn is_bool_lit(e: &Expr<'_>) -> bool {
+ matches!(
+ e.kind,
+ ExprKind::Lit(Lit {
+ node: LitKind::Bool(_),
+ ..
+ })
+ ) && !e.span.from_expansion()
+}
+
+fn is_impl_not_trait_with_bool_out(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
+ let ty = cx.typeck_results().expr_ty(e);
+
+ cx.tcx
+ .lang_items()
+ .not_trait()
+ .filter(|trait_id| implements_trait(cx, ty, *trait_id, &[]))
+ .and_then(|trait_id| {
+ cx.tcx.associated_items(trait_id).find_by_name_and_kind(
+ cx.tcx,
+ Ident::from_str("Output"),
+ ty::AssocKind::Type,
+ trait_id,
+ )
+ })
+ .map_or(false, |assoc_item| {
+ let proj = cx.tcx.mk_projection(assoc_item.def_id, cx.tcx.mk_substs_trait(ty, &[]));
+ let nty = cx.tcx.normalize_erasing_regions(cx.param_env, proj);
+
+ nty.is_bool()
+ })
+}
+
+impl<'tcx> LateLintPass<'tcx> for BoolAssertComparison {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return };
+ let macro_name = cx.tcx.item_name(macro_call.def_id);
+ if !matches!(
+ macro_name.as_str(),
+ "assert_eq" | "debug_assert_eq" | "assert_ne" | "debug_assert_ne"
+ ) {
+ return;
+ }
+ let Some ((a, b, _)) = find_assert_eq_args(cx, expr, macro_call.expn) else { return };
+ if !(is_bool_lit(a) ^ is_bool_lit(b)) {
+ // If there are two boolean arguments, we definitely don't understand
+ // what's going on, so better leave things as is...
+ //
+ // Or there is simply no boolean and then we can leave things as is!
+ return;
+ }
+
+ if !is_impl_not_trait_with_bool_out(cx, a) || !is_impl_not_trait_with_bool_out(cx, b) {
+ // At this point the expression which is not a boolean
+ // literal does not implement Not trait with a bool output,
+ // so we cannot suggest to rewrite our code
+ return;
+ }
+
+ let macro_name = macro_name.as_str();
+ let non_eq_mac = &macro_name[..macro_name.len() - 3];
+ span_lint_and_sugg(
+ cx,
+ BOOL_ASSERT_COMPARISON,
+ macro_call.span,
+ &format!("used `{}!` with a literal bool", macro_name),
+ "replace it with",
+ format!("{}!(..)", non_eq_mac),
+ Applicability::MaybeIncorrect,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/booleans.rs b/src/tools/clippy/clippy_lints/src/booleans.rs
new file mode 100644
index 000000000..526ee2f89
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/booleans.rs
@@ -0,0 +1,512 @@
+use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_hir_and_then};
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::{implements_trait, is_type_diagnostic_item};
+use clippy_utils::{eq_expr_value, get_trait_def_id, paths};
+use if_chain::if_chain;
+use rustc_ast::ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::intravisit::{walk_expr, FnKind, Visitor};
+use rustc_hir::{BinOpKind, Body, Expr, ExprKind, FnDecl, HirId, UnOp};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Span;
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for boolean expressions that can be written more
+ /// concisely.
+ ///
+ /// ### Why is this bad?
+ /// Readability of boolean expressions suffers from
+ /// unnecessary duplication.
+ ///
+ /// ### Known problems
+ /// Ignores short circuiting behavior of `||` and
+ /// `&&`. Ignores `|`, `&` and `^`.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// if a && true {}
+ /// if !(a == b) {}
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// if a {}
+ /// if a != b {}
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub NONMINIMAL_BOOL,
+ complexity,
+ "boolean expressions that can be written more concisely"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for boolean expressions that contain terminals that
+ /// can be eliminated.
+ ///
+ /// ### Why is this bad?
+ /// This is most likely a logic bug.
+ ///
+ /// ### Known problems
+ /// Ignores short circuiting behavior.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// // The `b` is unnecessary, the expression is equivalent to `if a`.
+ /// if a && b || a { ... }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// if a {}
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub LOGIC_BUG,
+ correctness,
+ "boolean expressions that contain terminals which can be eliminated"
+}
+
+// For each pairs, both orders are considered.
+const METHODS_WITH_NEGATION: [(&str, &str); 2] = [("is_some", "is_none"), ("is_err", "is_ok")];
+
+declare_lint_pass!(NonminimalBool => [NONMINIMAL_BOOL, LOGIC_BUG]);
+
+impl<'tcx> LateLintPass<'tcx> for NonminimalBool {
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ _: FnKind<'tcx>,
+ _: &'tcx FnDecl<'_>,
+ body: &'tcx Body<'_>,
+ _: Span,
+ _: HirId,
+ ) {
+ NonminimalBoolVisitor { cx }.visit_body(body);
+ }
+}
+
+struct NonminimalBoolVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+}
+
+use quine_mc_cluskey::Bool;
+struct Hir2Qmm<'a, 'tcx, 'v> {
+ terminals: Vec<&'v Expr<'v>>,
+ cx: &'a LateContext<'tcx>,
+}
+
+impl<'a, 'tcx, 'v> Hir2Qmm<'a, 'tcx, 'v> {
+ fn extract(&mut self, op: BinOpKind, a: &[&'v Expr<'_>], mut v: Vec<Bool>) -> Result<Vec<Bool>, String> {
+ for a in a {
+ if let ExprKind::Binary(binop, lhs, rhs) = &a.kind {
+ if binop.node == op {
+ v = self.extract(op, &[lhs, rhs], v)?;
+ continue;
+ }
+ }
+ v.push(self.run(a)?);
+ }
+ Ok(v)
+ }
+
+ fn run(&mut self, e: &'v Expr<'_>) -> Result<Bool, String> {
+ fn negate(bin_op_kind: BinOpKind) -> Option<BinOpKind> {
+ match bin_op_kind {
+ BinOpKind::Eq => Some(BinOpKind::Ne),
+ BinOpKind::Ne => Some(BinOpKind::Eq),
+ BinOpKind::Gt => Some(BinOpKind::Le),
+ BinOpKind::Ge => Some(BinOpKind::Lt),
+ BinOpKind::Lt => Some(BinOpKind::Ge),
+ BinOpKind::Le => Some(BinOpKind::Gt),
+ _ => None,
+ }
+ }
+
+ // prevent folding of `cfg!` macros and the like
+ if !e.span.from_expansion() {
+ match &e.kind {
+ ExprKind::Unary(UnOp::Not, inner) => return Ok(Bool::Not(Box::new(self.run(inner)?))),
+ ExprKind::Binary(binop, lhs, rhs) => match &binop.node {
+ BinOpKind::Or => {
+ return Ok(Bool::Or(self.extract(BinOpKind::Or, &[lhs, rhs], Vec::new())?));
+ },
+ BinOpKind::And => {
+ return Ok(Bool::And(self.extract(BinOpKind::And, &[lhs, rhs], Vec::new())?));
+ },
+ _ => (),
+ },
+ ExprKind::Lit(lit) => match lit.node {
+ LitKind::Bool(true) => return Ok(Bool::True),
+ LitKind::Bool(false) => return Ok(Bool::False),
+ _ => (),
+ },
+ _ => (),
+ }
+ }
+ for (n, expr) in self.terminals.iter().enumerate() {
+ if eq_expr_value(self.cx, e, expr) {
+ #[expect(clippy::cast_possible_truncation)]
+ return Ok(Bool::Term(n as u8));
+ }
+
+ if_chain! {
+ if let ExprKind::Binary(e_binop, e_lhs, e_rhs) = &e.kind;
+ if implements_ord(self.cx, e_lhs);
+ if let ExprKind::Binary(expr_binop, expr_lhs, expr_rhs) = &expr.kind;
+ if negate(e_binop.node) == Some(expr_binop.node);
+ if eq_expr_value(self.cx, e_lhs, expr_lhs);
+ if eq_expr_value(self.cx, e_rhs, expr_rhs);
+ then {
+ #[expect(clippy::cast_possible_truncation)]
+ return Ok(Bool::Not(Box::new(Bool::Term(n as u8))));
+ }
+ }
+ }
+ let n = self.terminals.len();
+ self.terminals.push(e);
+ if n < 32 {
+ #[expect(clippy::cast_possible_truncation)]
+ Ok(Bool::Term(n as u8))
+ } else {
+ Err("too many literals".to_owned())
+ }
+ }
+}
+
+struct SuggestContext<'a, 'tcx, 'v> {
+ terminals: &'v [&'v Expr<'v>],
+ cx: &'a LateContext<'tcx>,
+ output: String,
+}
+
+impl<'a, 'tcx, 'v> SuggestContext<'a, 'tcx, 'v> {
+ fn recurse(&mut self, suggestion: &Bool) -> Option<()> {
+ use quine_mc_cluskey::Bool::{And, False, Not, Or, Term, True};
+ match suggestion {
+ True => {
+ self.output.push_str("true");
+ },
+ False => {
+ self.output.push_str("false");
+ },
+ Not(inner) => match **inner {
+ And(_) | Or(_) => {
+ self.output.push('!');
+ self.output.push('(');
+ self.recurse(inner);
+ self.output.push(')');
+ },
+ Term(n) => {
+ let terminal = self.terminals[n as usize];
+ if let Some(str) = simplify_not(self.cx, terminal) {
+ self.output.push_str(&str);
+ } else {
+ self.output.push('!');
+ let snip = snippet_opt(self.cx, terminal.span)?;
+ self.output.push_str(&snip);
+ }
+ },
+ True | False | Not(_) => {
+ self.output.push('!');
+ self.recurse(inner)?;
+ },
+ },
+ And(v) => {
+ for (index, inner) in v.iter().enumerate() {
+ if index > 0 {
+ self.output.push_str(" && ");
+ }
+ if let Or(_) = *inner {
+ self.output.push('(');
+ self.recurse(inner);
+ self.output.push(')');
+ } else {
+ self.recurse(inner);
+ }
+ }
+ },
+ Or(v) => {
+ for (index, inner) in v.iter().rev().enumerate() {
+ if index > 0 {
+ self.output.push_str(" || ");
+ }
+ self.recurse(inner);
+ }
+ },
+ &Term(n) => {
+ let snip = snippet_opt(self.cx, self.terminals[n as usize].span)?;
+ self.output.push_str(&snip);
+ },
+ }
+ Some(())
+ }
+}
+
+fn simplify_not(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<String> {
+ match &expr.kind {
+ ExprKind::Binary(binop, lhs, rhs) => {
+ if !implements_ord(cx, lhs) {
+ return None;
+ }
+
+ match binop.node {
+ BinOpKind::Eq => Some(" != "),
+ BinOpKind::Ne => Some(" == "),
+ BinOpKind::Lt => Some(" >= "),
+ BinOpKind::Gt => Some(" <= "),
+ BinOpKind::Le => Some(" > "),
+ BinOpKind::Ge => Some(" < "),
+ _ => None,
+ }
+ .and_then(|op| {
+ Some(format!(
+ "{}{}{}",
+ snippet_opt(cx, lhs.span)?,
+ op,
+ snippet_opt(cx, rhs.span)?
+ ))
+ })
+ },
+ ExprKind::MethodCall(path, args, _) if args.len() == 1 => {
+ let type_of_receiver = cx.typeck_results().expr_ty(&args[0]);
+ if !is_type_diagnostic_item(cx, type_of_receiver, sym::Option)
+ && !is_type_diagnostic_item(cx, type_of_receiver, sym::Result)
+ {
+ return None;
+ }
+ METHODS_WITH_NEGATION
+ .iter()
+ .copied()
+ .flat_map(|(a, b)| vec![(a, b), (b, a)])
+ .find(|&(a, _)| {
+ let path: &str = path.ident.name.as_str();
+ a == path
+ })
+ .and_then(|(_, neg_method)| Some(format!("{}.{}()", snippet_opt(cx, args[0].span)?, neg_method)))
+ },
+ _ => None,
+ }
+}
+
+fn suggest(cx: &LateContext<'_>, suggestion: &Bool, terminals: &[&Expr<'_>]) -> String {
+ let mut suggest_context = SuggestContext {
+ terminals,
+ cx,
+ output: String::new(),
+ };
+ suggest_context.recurse(suggestion);
+ suggest_context.output
+}
+
+fn simple_negate(b: Bool) -> Bool {
+ use quine_mc_cluskey::Bool::{And, False, Not, Or, Term, True};
+ match b {
+ True => False,
+ False => True,
+ t @ Term(_) => Not(Box::new(t)),
+ And(mut v) => {
+ for el in &mut v {
+ *el = simple_negate(::std::mem::replace(el, True));
+ }
+ Or(v)
+ },
+ Or(mut v) => {
+ for el in &mut v {
+ *el = simple_negate(::std::mem::replace(el, True));
+ }
+ And(v)
+ },
+ Not(inner) => *inner,
+ }
+}
+
+#[derive(Default)]
+struct Stats {
+ terminals: [usize; 32],
+ negations: usize,
+ ops: usize,
+}
+
+fn terminal_stats(b: &Bool) -> Stats {
+ fn recurse(b: &Bool, stats: &mut Stats) {
+ match b {
+ True | False => stats.ops += 1,
+ Not(inner) => {
+ match **inner {
+ And(_) | Or(_) => stats.ops += 1, // brackets are also operations
+ _ => stats.negations += 1,
+ }
+ recurse(inner, stats);
+ },
+ And(v) | Or(v) => {
+ stats.ops += v.len() - 1;
+ for inner in v {
+ recurse(inner, stats);
+ }
+ },
+ &Term(n) => stats.terminals[n as usize] += 1,
+ }
+ }
+ use quine_mc_cluskey::Bool::{And, False, Not, Or, Term, True};
+ let mut stats = Stats::default();
+ recurse(b, &mut stats);
+ stats
+}
+
+impl<'a, 'tcx> NonminimalBoolVisitor<'a, 'tcx> {
+ fn bool_expr(&self, e: &'tcx Expr<'_>) {
+ let mut h2q = Hir2Qmm {
+ terminals: Vec::new(),
+ cx: self.cx,
+ };
+ if let Ok(expr) = h2q.run(e) {
+ if h2q.terminals.len() > 8 {
+ // QMC has exponentially slow behavior as the number of terminals increases
+ // 8 is reasonable, it takes approximately 0.2 seconds.
+ // See #825
+ return;
+ }
+
+ let stats = terminal_stats(&expr);
+ let mut simplified = expr.simplify();
+ for simple in Bool::Not(Box::new(expr)).simplify() {
+ match simple {
+ Bool::Not(_) | Bool::True | Bool::False => {},
+ _ => simplified.push(Bool::Not(Box::new(simple.clone()))),
+ }
+ let simple_negated = simple_negate(simple);
+ if simplified.iter().any(|s| *s == simple_negated) {
+ continue;
+ }
+ simplified.push(simple_negated);
+ }
+ let mut improvements = Vec::with_capacity(simplified.len());
+ 'simplified: for suggestion in &simplified {
+ let simplified_stats = terminal_stats(suggestion);
+ let mut improvement = false;
+ for i in 0..32 {
+ // ignore any "simplifications" that end up requiring a terminal more often
+ // than in the original expression
+ if stats.terminals[i] < simplified_stats.terminals[i] {
+ continue 'simplified;
+ }
+ if stats.terminals[i] != 0 && simplified_stats.terminals[i] == 0 {
+ span_lint_hir_and_then(
+ self.cx,
+ LOGIC_BUG,
+ e.hir_id,
+ e.span,
+ "this boolean expression contains a logic bug",
+ |diag| {
+ diag.span_help(
+ h2q.terminals[i].span,
+ "this expression can be optimized out by applying boolean operations to the \
+ outer expression",
+ );
+ diag.span_suggestion(
+ e.span,
+ "it would look like the following",
+ suggest(self.cx, suggestion, &h2q.terminals),
+ // nonminimal_bool can produce minimal but
+ // not human readable expressions (#3141)
+ Applicability::Unspecified,
+ );
+ },
+ );
+ // don't also lint `NONMINIMAL_BOOL`
+ return;
+ }
+ // if the number of occurrences of a terminal decreases or any of the stats
+ // decreases while none increases
+ improvement |= (stats.terminals[i] > simplified_stats.terminals[i])
+ || (stats.negations > simplified_stats.negations && stats.ops == simplified_stats.ops)
+ || (stats.ops > simplified_stats.ops && stats.negations == simplified_stats.negations);
+ }
+ if improvement {
+ improvements.push(suggestion);
+ }
+ }
+ let nonminimal_bool_lint = |suggestions: Vec<_>| {
+ span_lint_hir_and_then(
+ self.cx,
+ NONMINIMAL_BOOL,
+ e.hir_id,
+ e.span,
+ "this boolean expression can be simplified",
+ |diag| {
+ diag.span_suggestions(
+ e.span,
+ "try",
+ suggestions.into_iter(),
+ // nonminimal_bool can produce minimal but
+ // not human readable expressions (#3141)
+ Applicability::Unspecified,
+ );
+ },
+ );
+ };
+ if improvements.is_empty() {
+ let mut visitor = NotSimplificationVisitor { cx: self.cx };
+ visitor.visit_expr(e);
+ } else {
+ nonminimal_bool_lint(
+ improvements
+ .into_iter()
+ .map(|suggestion| suggest(self.cx, suggestion, &h2q.terminals))
+ .collect(),
+ );
+ }
+ }
+ }
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for NonminimalBoolVisitor<'a, 'tcx> {
+ fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
+ if !e.span.from_expansion() {
+ match &e.kind {
+ ExprKind::Binary(binop, _, _) if binop.node == BinOpKind::Or || binop.node == BinOpKind::And => {
+ self.bool_expr(e);
+ },
+ ExprKind::Unary(UnOp::Not, inner) => {
+ if self.cx.typeck_results().node_types()[inner.hir_id].is_bool() {
+ self.bool_expr(e);
+ }
+ },
+ _ => {},
+ }
+ }
+ walk_expr(self, e);
+ }
+}
+
+fn implements_ord<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>) -> bool {
+ let ty = cx.typeck_results().expr_ty(expr);
+ get_trait_def_id(cx, &paths::ORD).map_or(false, |id| implements_trait(cx, ty, id, &[]))
+}
+
+struct NotSimplificationVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for NotSimplificationVisitor<'a, 'tcx> {
+ fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
+ if let ExprKind::Unary(UnOp::Not, inner) = &expr.kind {
+ if let Some(suggestion) = simplify_not(self.cx, inner) {
+ span_lint_and_sugg(
+ self.cx,
+ NONMINIMAL_BOOL,
+ expr.span,
+ "this boolean expression can be simplified",
+ "try",
+ suggestion,
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+
+ walk_expr(self, expr);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/borrow_as_ptr.rs b/src/tools/clippy/clippy_lints/src/borrow_as_ptr.rs
new file mode 100644
index 000000000..0993adbae
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/borrow_as_ptr.rs
@@ -0,0 +1,99 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::is_no_std_crate;
+use clippy_utils::source::snippet_opt;
+use clippy_utils::{meets_msrv, msrvs};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{BorrowKind, Expr, ExprKind, Mutability, TyKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for the usage of `&expr as *const T` or
+ /// `&mut expr as *mut T`, and suggest using `ptr::addr_of` or
+ /// `ptr::addr_of_mut` instead.
+ ///
+ /// ### Why is this bad?
+ /// This would improve readability and avoid creating a reference
+ /// that points to an uninitialized value or unaligned place.
+ /// Read the `ptr::addr_of` docs for more information.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let val = 1;
+ /// let p = &val as *const i32;
+ ///
+ /// let mut val_mut = 1;
+ /// let p_mut = &mut val_mut as *mut i32;
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let val = 1;
+ /// let p = std::ptr::addr_of!(val);
+ ///
+ /// let mut val_mut = 1;
+ /// let p_mut = std::ptr::addr_of_mut!(val_mut);
+ /// ```
+ #[clippy::version = "1.60.0"]
+ pub BORROW_AS_PTR,
+ pedantic,
+ "borrowing just to cast to a raw pointer"
+}
+
+impl_lint_pass!(BorrowAsPtr => [BORROW_AS_PTR]);
+
+pub struct BorrowAsPtr {
+ msrv: Option<RustcVersion>,
+}
+
+impl BorrowAsPtr {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self { msrv }
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for BorrowAsPtr {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if !meets_msrv(self.msrv, msrvs::BORROW_AS_PTR) {
+ return;
+ }
+
+ if expr.span.from_expansion() {
+ return;
+ }
+
+ if_chain! {
+ if let ExprKind::Cast(left_expr, ty) = &expr.kind;
+ if let TyKind::Ptr(_) = ty.kind;
+ if let ExprKind::AddrOf(BorrowKind::Ref, mutability, e) = &left_expr.kind;
+
+ then {
+ let core_or_std = if is_no_std_crate(cx) { "core" } else { "std" };
+ let macro_name = match mutability {
+ Mutability::Not => "addr_of",
+ Mutability::Mut => "addr_of_mut",
+ };
+
+ span_lint_and_sugg(
+ cx,
+ BORROW_AS_PTR,
+ expr.span,
+ "borrow as raw pointer",
+ "try",
+ format!(
+ "{}::ptr::{}!({})",
+ core_or_std,
+ macro_name,
+ snippet_opt(cx, e.span).unwrap()
+ ),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+
+ extract_msrv_attr!(LateContext);
+}
diff --git a/src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs b/src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs
new file mode 100644
index 000000000..937765b66
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs
@@ -0,0 +1,121 @@
+use crate::reference::DEREF_ADDROF;
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::implements_trait;
+use clippy_utils::{get_parent_expr, is_lint_allowed};
+use rustc_errors::Applicability;
+use rustc_hir::{ExprKind, UnOp};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::mir::Mutability;
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `&*(&T)`.
+ ///
+ /// ### Why is this bad?
+ /// Dereferencing and then borrowing a reference value has no effect in most cases.
+ ///
+ /// ### Known problems
+ /// False negative on such code:
+ /// ```
+ /// let x = &12;
+ /// let addr_x = &x as *const _ as usize;
+ /// let addr_y = &&*x as *const _ as usize; // assert ok now, and lint triggered.
+ /// // But if we fix it, assert will fail.
+ /// assert_ne!(addr_x, addr_y);
+ /// ```
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn foo(_x: &str) {}
+ ///
+ /// let s = &String::new();
+ ///
+ /// let a: &String = &* s;
+ /// foo(&*s);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # fn foo(_x: &str) {}
+ /// # let s = &String::new();
+ /// let a: &String = s;
+ /// foo(&**s);
+ /// ```
+ #[clippy::version = "1.59.0"]
+ pub BORROW_DEREF_REF,
+ complexity,
+ "deref on an immutable reference returns the same type as itself"
+}
+
+declare_lint_pass!(BorrowDerefRef => [BORROW_DEREF_REF]);
+
+impl LateLintPass<'_> for BorrowDerefRef {
+ fn check_expr(&mut self, cx: &LateContext<'_>, e: &rustc_hir::Expr<'_>) {
+ if_chain! {
+ if !e.span.from_expansion();
+ if let ExprKind::AddrOf(_, Mutability::Not, addrof_target) = e.kind;
+ if !addrof_target.span.from_expansion();
+ if let ExprKind::Unary(UnOp::Deref, deref_target) = addrof_target.kind;
+ if !deref_target.span.from_expansion();
+ if !matches!(deref_target.kind, ExprKind::Unary(UnOp::Deref, ..) );
+ let ref_ty = cx.typeck_results().expr_ty(deref_target);
+ if let ty::Ref(_, inner_ty, Mutability::Not) = ref_ty.kind();
+ then{
+
+ if let Some(parent_expr) = get_parent_expr(cx, e){
+ if matches!(parent_expr.kind, ExprKind::Unary(UnOp::Deref, ..)) &&
+ !is_lint_allowed(cx, DEREF_ADDROF, parent_expr.hir_id) {
+ return;
+ }
+
+ // modification to `&mut &*x` is different from `&mut x`
+ if matches!(deref_target.kind, ExprKind::Path(..)
+ | ExprKind::Field(..)
+ | ExprKind::Index(..)
+ | ExprKind::Unary(UnOp::Deref, ..))
+ && matches!(parent_expr.kind, ExprKind::AddrOf(_, Mutability::Mut, _)) {
+ return;
+ }
+ }
+
+ span_lint_and_then(
+ cx,
+ BORROW_DEREF_REF,
+ e.span,
+ "deref on an immutable reference",
+ |diag| {
+ diag.span_suggestion(
+ e.span,
+ "if you would like to reborrow, try removing `&*`",
+ snippet_opt(cx, deref_target.span).unwrap(),
+ Applicability::MachineApplicable
+ );
+
+ // has deref trait -> give 2 help
+ // doesn't have deref trait -> give 1 help
+ if let Some(deref_trait_id) = cx.tcx.lang_items().deref_trait(){
+ if !implements_trait(cx, *inner_ty, deref_trait_id, &[]) {
+ return;
+ }
+ }
+
+ diag.span_suggestion(
+ e.span,
+ "if you would like to deref, try using `&**`",
+ format!(
+ "&**{}",
+ &snippet_opt(cx, deref_target.span).unwrap(),
+ ),
+ Applicability::MaybeIncorrect
+ );
+
+ }
+ );
+
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/bytecount.rs b/src/tools/clippy/clippy_lints/src/bytecount.rs
new file mode 100644
index 000000000..326ce3408
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/bytecount.rs
@@ -0,0 +1,103 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::match_type;
+use clippy_utils::visitors::is_local_used;
+use clippy_utils::{path_to_local_id, paths, peel_blocks, peel_ref_operators, strip_pat_refs};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{BinOpKind, Closure, Expr, ExprKind, PatKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::{self, UintTy};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for naive byte counts
+ ///
+ /// ### Why is this bad?
+ /// The [`bytecount`](https://crates.io/crates/bytecount)
+ /// crate has methods to count your bytes faster, especially for large slices.
+ ///
+ /// ### Known problems
+ /// If you have predominantly small slices, the
+ /// `bytecount::count(..)` method may actually be slower. However, if you can
+ /// ensure that less than 2³²-1 matches arise, the `naive_count_32(..)` can be
+ /// faster in those cases.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let vec = vec![1_u8];
+ /// let count = vec.iter().filter(|x| **x == 0u8).count();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// # let vec = vec![1_u8];
+ /// let count = bytecount::count(&vec, 0u8);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub NAIVE_BYTECOUNT,
+ pedantic,
+ "use of naive `<slice>.filter(|&x| x == y).count()` to count byte values"
+}
+
+declare_lint_pass!(ByteCount => [NAIVE_BYTECOUNT]);
+
+impl<'tcx> LateLintPass<'tcx> for ByteCount {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if_chain! {
+ if let ExprKind::MethodCall(count, [count_recv], _) = expr.kind;
+ if count.ident.name == sym::count;
+ if let ExprKind::MethodCall(filter, [filter_recv, filter_arg], _) = count_recv.kind;
+ if filter.ident.name == sym!(filter);
+ if let ExprKind::Closure(&Closure { body, .. }) = filter_arg.kind;
+ let body = cx.tcx.hir().body(body);
+ if let [param] = body.params;
+ if let PatKind::Binding(_, arg_id, _, _) = strip_pat_refs(param.pat).kind;
+ if let ExprKind::Binary(ref op, l, r) = body.value.kind;
+ if op.node == BinOpKind::Eq;
+ if match_type(cx,
+ cx.typeck_results().expr_ty(filter_recv).peel_refs(),
+ &paths::SLICE_ITER);
+ let operand_is_arg = |expr| {
+ let expr = peel_ref_operators(cx, peel_blocks(expr));
+ path_to_local_id(expr, arg_id)
+ };
+ let needle = if operand_is_arg(l) {
+ r
+ } else if operand_is_arg(r) {
+ l
+ } else {
+ return;
+ };
+ if ty::Uint(UintTy::U8) == *cx.typeck_results().expr_ty(needle).peel_refs().kind();
+ if !is_local_used(cx, needle, arg_id);
+ then {
+ let haystack = if let ExprKind::MethodCall(path, args, _) =
+ filter_recv.kind {
+ let p = path.ident.name;
+ if (p == sym::iter || p == sym!(iter_mut)) && args.len() == 1 {
+ &args[0]
+ } else {
+ filter_recv
+ }
+ } else {
+ filter_recv
+ };
+ let mut applicability = Applicability::MaybeIncorrect;
+ span_lint_and_sugg(
+ cx,
+ NAIVE_BYTECOUNT,
+ expr.span,
+ "you appear to be counting bytes the naive way",
+ "consider using the bytecount crate",
+ format!("bytecount::count({}, {})",
+ snippet_with_applicability(cx, haystack.span, "..", &mut applicability),
+ snippet_with_applicability(cx, needle.span, "..", &mut applicability)),
+ applicability,
+ );
+ }
+ };
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/bytes_count_to_len.rs b/src/tools/clippy/clippy_lints/src/bytes_count_to_len.rs
new file mode 100644
index 000000000..d70dbf5b2
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/bytes_count_to_len.rs
@@ -0,0 +1,70 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{match_def_path, paths};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// It checks for `str::bytes().count()` and suggests replacing it with
+ /// `str::len()`.
+ ///
+ /// ### Why is this bad?
+ /// `str::bytes().count()` is longer and may not be as performant as using
+ /// `str::len()`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// "hello".bytes().count();
+ /// String::from("hello").bytes().count();
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// "hello".len();
+ /// String::from("hello").len();
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub BYTES_COUNT_TO_LEN,
+ complexity,
+ "Using `bytes().count()` when `len()` performs the same functionality"
+}
+
+declare_lint_pass!(BytesCountToLen => [BYTES_COUNT_TO_LEN]);
+
+impl<'tcx> LateLintPass<'tcx> for BytesCountToLen {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
+ if_chain! {
+ if let hir::ExprKind::MethodCall(_, expr_args, _) = &expr.kind;
+ if let Some(expr_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
+ if match_def_path(cx, expr_def_id, &paths::ITER_COUNT);
+
+ if let [bytes_expr] = &**expr_args;
+ if let hir::ExprKind::MethodCall(_, bytes_args, _) = &bytes_expr.kind;
+ if let Some(bytes_def_id) = cx.typeck_results().type_dependent_def_id(bytes_expr.hir_id);
+ if match_def_path(cx, bytes_def_id, &paths::STR_BYTES);
+
+ if let [str_expr] = &**bytes_args;
+ let ty = cx.typeck_results().expr_ty(str_expr).peel_refs();
+
+ if is_type_diagnostic_item(cx, ty, sym::String) || ty.kind() == &ty::Str;
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ BYTES_COUNT_TO_LEN,
+ expr.span,
+ "using long and hard to read `.bytes().count()`",
+ "consider calling `.len()` instead",
+ format!("{}.len()", snippet_with_applicability(cx, str_expr.span, "..", &mut applicability)),
+ applicability
+ );
+ }
+ };
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/cargo/common_metadata.rs b/src/tools/clippy/clippy_lints/src/cargo/common_metadata.rs
new file mode 100644
index 000000000..e0442dda4
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/cargo/common_metadata.rs
@@ -0,0 +1,54 @@
+//! lint on missing cargo common metadata
+
+use cargo_metadata::Metadata;
+use clippy_utils::diagnostics::span_lint;
+use rustc_lint::LateContext;
+use rustc_span::source_map::DUMMY_SP;
+
+use super::CARGO_COMMON_METADATA;
+
+pub(super) fn check(cx: &LateContext<'_>, metadata: &Metadata, ignore_publish: bool) {
+ for package in &metadata.packages {
+ // only run the lint if publish is `None` (`publish = true` or skipped entirely)
+ // or if the vector isn't empty (`publish = ["something"]`)
+ if package.publish.as_ref().filter(|publish| publish.is_empty()).is_none() || ignore_publish {
+ if is_empty_str(&package.description) {
+ missing_warning(cx, package, "package.description");
+ }
+
+ if is_empty_str(&package.license) && is_empty_str(&package.license_file) {
+ missing_warning(cx, package, "either package.license or package.license_file");
+ }
+
+ if is_empty_str(&package.repository) {
+ missing_warning(cx, package, "package.repository");
+ }
+
+ if is_empty_str(&package.readme) {
+ missing_warning(cx, package, "package.readme");
+ }
+
+ if is_empty_vec(&package.keywords) {
+ missing_warning(cx, package, "package.keywords");
+ }
+
+ if is_empty_vec(&package.categories) {
+ missing_warning(cx, package, "package.categories");
+ }
+ }
+ }
+}
+
+fn missing_warning(cx: &LateContext<'_>, package: &cargo_metadata::Package, field: &str) {
+ let message = format!("package `{}` is missing `{}` metadata", package.name, field);
+ span_lint(cx, CARGO_COMMON_METADATA, DUMMY_SP, &message);
+}
+
+fn is_empty_str<T: AsRef<std::ffi::OsStr>>(value: &Option<T>) -> bool {
+ value.as_ref().map_or(true, |s| s.as_ref().is_empty())
+}
+
+fn is_empty_vec(value: &[String]) -> bool {
+ // This works because empty iterators return true
+ value.iter().all(String::is_empty)
+}
diff --git a/src/tools/clippy/clippy_lints/src/cargo/feature_name.rs b/src/tools/clippy/clippy_lints/src/cargo/feature_name.rs
new file mode 100644
index 000000000..79a469a42
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/cargo/feature_name.rs
@@ -0,0 +1,92 @@
+use cargo_metadata::Metadata;
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_lint::LateContext;
+use rustc_span::source_map::DUMMY_SP;
+
+use super::{NEGATIVE_FEATURE_NAMES, REDUNDANT_FEATURE_NAMES};
+
+static PREFIXES: [&str; 8] = ["no-", "no_", "not-", "not_", "use-", "use_", "with-", "with_"];
+static SUFFIXES: [&str; 2] = ["-support", "_support"];
+
+pub(super) fn check(cx: &LateContext<'_>, metadata: &Metadata) {
+ for package in &metadata.packages {
+ let mut features: Vec<&String> = package.features.keys().collect();
+ features.sort();
+ for feature in features {
+ let prefix_opt = {
+ let i = PREFIXES.partition_point(|prefix| prefix < &feature.as_str());
+ if i > 0 && feature.starts_with(PREFIXES[i - 1]) {
+ Some(PREFIXES[i - 1])
+ } else {
+ None
+ }
+ };
+ if let Some(prefix) = prefix_opt {
+ lint(cx, feature, prefix, true);
+ }
+
+ let suffix_opt: Option<&str> = {
+ let i = SUFFIXES.partition_point(|suffix| {
+ suffix.bytes().rev().cmp(feature.bytes().rev()) == std::cmp::Ordering::Less
+ });
+ if i > 0 && feature.ends_with(SUFFIXES[i - 1]) {
+ Some(SUFFIXES[i - 1])
+ } else {
+ None
+ }
+ };
+ if let Some(suffix) = suffix_opt {
+ lint(cx, feature, suffix, false);
+ }
+ }
+ }
+}
+
+fn is_negative_prefix(s: &str) -> bool {
+ s.starts_with("no")
+}
+
+fn lint(cx: &LateContext<'_>, feature: &str, substring: &str, is_prefix: bool) {
+ let is_negative = is_prefix && is_negative_prefix(substring);
+ span_lint_and_help(
+ cx,
+ if is_negative {
+ NEGATIVE_FEATURE_NAMES
+ } else {
+ REDUNDANT_FEATURE_NAMES
+ },
+ DUMMY_SP,
+ &format!(
+ "the \"{}\" {} in the feature name \"{}\" is {}",
+ substring,
+ if is_prefix { "prefix" } else { "suffix" },
+ feature,
+ if is_negative { "negative" } else { "redundant" }
+ ),
+ None,
+ &format!(
+ "consider renaming the feature to \"{}\"{}",
+ if is_prefix {
+ feature.strip_prefix(substring)
+ } else {
+ feature.strip_suffix(substring)
+ }
+ .unwrap(),
+ if is_negative {
+ ", but make sure the feature adds functionality"
+ } else {
+ ""
+ }
+ ),
+ );
+}
+
+#[test]
+fn test_prefixes_sorted() {
+ let mut sorted_prefixes = PREFIXES;
+ sorted_prefixes.sort_unstable();
+ assert_eq!(PREFIXES, sorted_prefixes);
+ let mut sorted_suffixes = SUFFIXES;
+ sorted_suffixes.sort_by(|a, b| a.bytes().rev().cmp(b.bytes().rev()));
+ assert_eq!(SUFFIXES, sorted_suffixes);
+}
diff --git a/src/tools/clippy/clippy_lints/src/cargo/mod.rs b/src/tools/clippy/clippy_lints/src/cargo/mod.rs
new file mode 100644
index 000000000..9f45db86a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/cargo/mod.rs
@@ -0,0 +1,221 @@
+mod common_metadata;
+mod feature_name;
+mod multiple_crate_versions;
+mod wildcard_dependencies;
+
+use cargo_metadata::MetadataCommand;
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::is_lint_allowed;
+use rustc_hir::hir_id::CRATE_HIR_ID;
+use rustc_lint::{LateContext, LateLintPass, Lint};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::DUMMY_SP;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks to see if all common metadata is defined in
+ /// `Cargo.toml`. See: https://rust-lang-nursery.github.io/api-guidelines/documentation.html#cargotoml-includes-all-common-metadata-c-metadata
+ ///
+ /// ### Why is this bad?
+ /// It will be more difficult for users to discover the
+ /// purpose of the crate, and key information related to it.
+ ///
+ /// ### Example
+ /// ```toml
+ /// # This `Cargo.toml` is missing a description field:
+ /// [package]
+ /// name = "clippy"
+ /// version = "0.0.212"
+ /// repository = "https://github.com/rust-lang/rust-clippy"
+ /// readme = "README.md"
+ /// license = "MIT OR Apache-2.0"
+ /// keywords = ["clippy", "lint", "plugin"]
+ /// categories = ["development-tools", "development-tools::cargo-plugins"]
+ /// ```
+ ///
+ /// Should include a description field like:
+ ///
+ /// ```toml
+ /// # This `Cargo.toml` includes all common metadata
+ /// [package]
+ /// name = "clippy"
+ /// version = "0.0.212"
+ /// description = "A bunch of helpful lints to avoid common pitfalls in Rust"
+ /// repository = "https://github.com/rust-lang/rust-clippy"
+ /// readme = "README.md"
+ /// license = "MIT OR Apache-2.0"
+ /// keywords = ["clippy", "lint", "plugin"]
+ /// categories = ["development-tools", "development-tools::cargo-plugins"]
+ /// ```
+ #[clippy::version = "1.32.0"]
+ pub CARGO_COMMON_METADATA,
+ cargo,
+ "common metadata is defined in `Cargo.toml`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for feature names with prefix `use-`, `with-` or suffix `-support`
+ ///
+ /// ### Why is this bad?
+ /// These prefixes and suffixes have no significant meaning.
+ ///
+ /// ### Example
+ /// ```toml
+ /// # The `Cargo.toml` with feature name redundancy
+ /// [features]
+ /// default = ["use-abc", "with-def", "ghi-support"]
+ /// use-abc = [] // redundant
+ /// with-def = [] // redundant
+ /// ghi-support = [] // redundant
+ /// ```
+ ///
+ /// Use instead:
+ /// ```toml
+ /// [features]
+ /// default = ["abc", "def", "ghi"]
+ /// abc = []
+ /// def = []
+ /// ghi = []
+ /// ```
+ ///
+ #[clippy::version = "1.57.0"]
+ pub REDUNDANT_FEATURE_NAMES,
+ cargo,
+ "usage of a redundant feature name"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for negative feature names with prefix `no-` or `not-`
+ ///
+ /// ### Why is this bad?
+ /// Features are supposed to be additive, and negatively-named features violate it.
+ ///
+ /// ### Example
+ /// ```toml
+ /// # The `Cargo.toml` with negative feature names
+ /// [features]
+ /// default = []
+ /// no-abc = []
+ /// not-def = []
+ ///
+ /// ```
+ /// Use instead:
+ /// ```toml
+ /// [features]
+ /// default = ["abc", "def"]
+ /// abc = []
+ /// def = []
+ ///
+ /// ```
+ #[clippy::version = "1.57.0"]
+ pub NEGATIVE_FEATURE_NAMES,
+ cargo,
+ "usage of a negative feature name"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks to see if multiple versions of a crate are being
+ /// used.
+ ///
+ /// ### Why is this bad?
+ /// This bloats the size of targets, and can lead to
+ /// confusing error messages when structs or traits are used interchangeably
+ /// between different versions of a crate.
+ ///
+ /// ### Known problems
+ /// Because this can be caused purely by the dependencies
+ /// themselves, it's not always possible to fix this issue.
+ ///
+ /// ### Example
+ /// ```toml
+ /// # This will pull in both winapi v0.3.x and v0.2.x, triggering a warning.
+ /// [dependencies]
+ /// ctrlc = "=3.1.0"
+ /// ansi_term = "=0.11.0"
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MULTIPLE_CRATE_VERSIONS,
+ cargo,
+ "multiple versions of the same crate being used"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for wildcard dependencies in the `Cargo.toml`.
+ ///
+ /// ### Why is this bad?
+ /// [As the edition guide says](https://rust-lang-nursery.github.io/edition-guide/rust-2018/cargo-and-crates-io/crates-io-disallows-wildcard-dependencies.html),
+ /// it is highly unlikely that you work with any possible version of your dependency,
+ /// and wildcard dependencies would cause unnecessary breakage in the ecosystem.
+ ///
+ /// ### Example
+ /// ```toml
+ /// [dependencies]
+ /// regex = "*"
+ /// ```
+ #[clippy::version = "1.32.0"]
+ pub WILDCARD_DEPENDENCIES,
+ cargo,
+ "wildcard dependencies being used"
+}
+
+pub struct Cargo {
+ pub ignore_publish: bool,
+}
+
+impl_lint_pass!(Cargo => [
+ CARGO_COMMON_METADATA,
+ REDUNDANT_FEATURE_NAMES,
+ NEGATIVE_FEATURE_NAMES,
+ MULTIPLE_CRATE_VERSIONS,
+ WILDCARD_DEPENDENCIES
+]);
+
+impl LateLintPass<'_> for Cargo {
+ fn check_crate(&mut self, cx: &LateContext<'_>) {
+ static NO_DEPS_LINTS: &[&Lint] = &[
+ CARGO_COMMON_METADATA,
+ REDUNDANT_FEATURE_NAMES,
+ NEGATIVE_FEATURE_NAMES,
+ WILDCARD_DEPENDENCIES,
+ ];
+ static WITH_DEPS_LINTS: &[&Lint] = &[MULTIPLE_CRATE_VERSIONS];
+
+ if !NO_DEPS_LINTS
+ .iter()
+ .all(|&lint| is_lint_allowed(cx, lint, CRATE_HIR_ID))
+ {
+ match MetadataCommand::new().no_deps().exec() {
+ Ok(metadata) => {
+ common_metadata::check(cx, &metadata, self.ignore_publish);
+ feature_name::check(cx, &metadata);
+ wildcard_dependencies::check(cx, &metadata);
+ },
+ Err(e) => {
+ for lint in NO_DEPS_LINTS {
+ span_lint(cx, lint, DUMMY_SP, &format!("could not read cargo metadata: {}", e));
+ }
+ },
+ }
+ }
+
+ if !WITH_DEPS_LINTS
+ .iter()
+ .all(|&lint| is_lint_allowed(cx, lint, CRATE_HIR_ID))
+ {
+ match MetadataCommand::new().exec() {
+ Ok(metadata) => {
+ multiple_crate_versions::check(cx, &metadata);
+ },
+ Err(e) => {
+ for lint in WITH_DEPS_LINTS {
+ span_lint(cx, lint, DUMMY_SP, &format!("could not read cargo metadata: {}", e));
+ }
+ },
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/cargo/multiple_crate_versions.rs b/src/tools/clippy/clippy_lints/src/cargo/multiple_crate_versions.rs
new file mode 100644
index 000000000..76fd0819a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/cargo/multiple_crate_versions.rs
@@ -0,0 +1,63 @@
+//! lint on multiple versions of a crate being used
+
+use cargo_metadata::{DependencyKind, Metadata, Node, Package, PackageId};
+use clippy_utils::diagnostics::span_lint;
+use if_chain::if_chain;
+use itertools::Itertools;
+use rustc_hir::def_id::LOCAL_CRATE;
+use rustc_lint::LateContext;
+use rustc_span::source_map::DUMMY_SP;
+
+use super::MULTIPLE_CRATE_VERSIONS;
+
+pub(super) fn check(cx: &LateContext<'_>, metadata: &Metadata) {
+ let local_name = cx.tcx.crate_name(LOCAL_CRATE);
+ let mut packages = metadata.packages.clone();
+ packages.sort_by(|a, b| a.name.cmp(&b.name));
+
+ if_chain! {
+ if let Some(resolve) = &metadata.resolve;
+ if let Some(local_id) = packages
+ .iter()
+ .find_map(|p| if p.name == local_name.as_str() { Some(&p.id) } else { None });
+ then {
+ for (name, group) in &packages.iter().group_by(|p| p.name.clone()) {
+ let group: Vec<&Package> = group.collect();
+
+ if group.len() <= 1 {
+ continue;
+ }
+
+ if group.iter().all(|p| is_normal_dep(&resolve.nodes, local_id, &p.id)) {
+ let mut versions: Vec<_> = group.into_iter().map(|p| &p.version).collect();
+ versions.sort();
+ let versions = versions.iter().join(", ");
+
+ span_lint(
+ cx,
+ MULTIPLE_CRATE_VERSIONS,
+ DUMMY_SP,
+ &format!("multiple versions for dependency `{}`: {}", name, versions),
+ );
+ }
+ }
+ }
+ }
+}
+
+fn is_normal_dep(nodes: &[Node], local_id: &PackageId, dep_id: &PackageId) -> bool {
+ fn depends_on(node: &Node, dep_id: &PackageId) -> bool {
+ node.deps.iter().any(|dep| {
+ dep.pkg == *dep_id
+ && dep
+ .dep_kinds
+ .iter()
+ .any(|info| matches!(info.kind, DependencyKind::Normal))
+ })
+ }
+
+ nodes
+ .iter()
+ .filter(|node| depends_on(node, dep_id))
+ .any(|node| node.id == *local_id || is_normal_dep(nodes, local_id, &node.id))
+}
diff --git a/src/tools/clippy/clippy_lints/src/cargo/wildcard_dependencies.rs b/src/tools/clippy/clippy_lints/src/cargo/wildcard_dependencies.rs
new file mode 100644
index 000000000..7fa6acbf5
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/cargo/wildcard_dependencies.rs
@@ -0,0 +1,27 @@
+use cargo_metadata::Metadata;
+use clippy_utils::diagnostics::span_lint;
+use if_chain::if_chain;
+use rustc_lint::LateContext;
+use rustc_span::source_map::DUMMY_SP;
+
+use super::WILDCARD_DEPENDENCIES;
+
+pub(super) fn check(cx: &LateContext<'_>, metadata: &Metadata) {
+ for dep in &metadata.packages[0].dependencies {
+ // VersionReq::any() does not work
+ if_chain! {
+ if let Ok(wildcard_ver) = semver::VersionReq::parse("*");
+ if let Some(ref source) = dep.source;
+ if !source.starts_with("git");
+ if dep.req == wildcard_ver;
+ then {
+ span_lint(
+ cx,
+ WILDCARD_DEPENDENCIES,
+ DUMMY_SP,
+ &format!("wildcard dependency for `{}`", dep.name),
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/case_sensitive_file_extension_comparisons.rs b/src/tools/clippy/clippy_lints/src/case_sensitive_file_extension_comparisons.rs
new file mode 100644
index 000000000..7eff71d50
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/case_sensitive_file_extension_comparisons.rs
@@ -0,0 +1,86 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use if_chain::if_chain;
+use rustc_ast::ast::LitKind;
+use rustc_hir::{Expr, ExprKind, PathSegment};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{source_map::Spanned, symbol::sym, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls to `ends_with` with possible file extensions
+ /// and suggests to use a case-insensitive approach instead.
+ ///
+ /// ### Why is this bad?
+ /// `ends_with` is case-sensitive and may not detect files with a valid extension.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn is_rust_file(filename: &str) -> bool {
+ /// filename.ends_with(".rs")
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// fn is_rust_file(filename: &str) -> bool {
+ /// let filename = std::path::Path::new(filename);
+ /// filename.extension()
+ /// .map(|ext| ext.eq_ignore_ascii_case("rs"))
+ /// .unwrap_or(false)
+ /// }
+ /// ```
+ #[clippy::version = "1.51.0"]
+ pub CASE_SENSITIVE_FILE_EXTENSION_COMPARISONS,
+ pedantic,
+ "Checks for calls to ends_with with case-sensitive file extensions"
+}
+
+declare_lint_pass!(CaseSensitiveFileExtensionComparisons => [CASE_SENSITIVE_FILE_EXTENSION_COMPARISONS]);
+
+fn check_case_sensitive_file_extension_comparison(ctx: &LateContext<'_>, expr: &Expr<'_>) -> Option<Span> {
+ if_chain! {
+ if let ExprKind::MethodCall(PathSegment { ident, .. }, [obj, extension, ..], span) = expr.kind;
+ if ident.as_str() == "ends_with";
+ if let ExprKind::Lit(Spanned { node: LitKind::Str(ext_literal, ..), ..}) = extension.kind;
+ if (2..=6).contains(&ext_literal.as_str().len());
+ if ext_literal.as_str().starts_with('.');
+ if ext_literal.as_str().chars().skip(1).all(|c| c.is_uppercase() || c.is_ascii_digit())
+ || ext_literal.as_str().chars().skip(1).all(|c| c.is_lowercase() || c.is_ascii_digit());
+ then {
+ let mut ty = ctx.typeck_results().expr_ty(obj);
+ ty = match ty.kind() {
+ ty::Ref(_, ty, ..) => *ty,
+ _ => ty
+ };
+
+ match ty.kind() {
+ ty::Str => {
+ return Some(span);
+ },
+ ty::Adt(def, _) => {
+ if ctx.tcx.is_diagnostic_item(sym::String, def.did()) {
+ return Some(span);
+ }
+ },
+ _ => { return None; }
+ }
+ }
+ }
+ None
+}
+
+impl<'tcx> LateLintPass<'tcx> for CaseSensitiveFileExtensionComparisons {
+ fn check_expr(&mut self, ctx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
+ if let Some(span) = check_case_sensitive_file_extension_comparison(ctx, expr) {
+ span_lint_and_help(
+ ctx,
+ CASE_SENSITIVE_FILE_EXTENSION_COMPARISONS,
+ span,
+ "case-sensitive file extension comparison",
+ None,
+ "consider using a case-insensitive comparison instead",
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_abs_to_unsigned.rs b/src/tools/clippy/clippy_lints/src/casts/cast_abs_to_unsigned.rs
new file mode 100644
index 000000000..64ea326b7
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/casts/cast_abs_to_unsigned.rs
@@ -0,0 +1,44 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::sugg::Sugg;
+use clippy_utils::{meets_msrv, msrvs};
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+use rustc_semver::RustcVersion;
+
+use super::CAST_ABS_TO_UNSIGNED;
+
+pub(super) fn check(
+ cx: &LateContext<'_>,
+ expr: &Expr<'_>,
+ cast_expr: &Expr<'_>,
+ cast_from: Ty<'_>,
+ cast_to: Ty<'_>,
+ msrv: Option<RustcVersion>,
+) {
+ if meets_msrv(msrv, msrvs::UNSIGNED_ABS)
+ && let ty::Int(from) = cast_from.kind()
+ && let ty::Uint(to) = cast_to.kind()
+ && let ExprKind::MethodCall(method_path, args, _) = cast_expr.kind
+ && method_path.ident.name.as_str() == "abs"
+ {
+ let span = if from.bit_width() == to.bit_width() {
+ expr.span
+ } else {
+ // if the result of `.unsigned_abs` would be a different type, keep the cast
+ // e.g. `i64 -> usize`, `i16 -> u8`
+ cast_expr.span
+ };
+
+ span_lint_and_sugg(
+ cx,
+ CAST_ABS_TO_UNSIGNED,
+ span,
+ &format!("casting the result of `{cast_from}::abs()` to {cast_to}"),
+ "replace with",
+ format!("{}.unsigned_abs()", Sugg::hir(cx, &args[0], "..")),
+ Applicability::MachineApplicable,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_enum_constructor.rs b/src/tools/clippy/clippy_lints/src/casts/cast_enum_constructor.rs
new file mode 100644
index 000000000..1973692e1
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/casts/cast_enum_constructor.rs
@@ -0,0 +1,21 @@
+use clippy_utils::diagnostics::span_lint;
+use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res};
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+
+use super::CAST_ENUM_CONSTRUCTOR;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, cast_from: Ty<'_>) {
+ if matches!(cast_from.kind(), ty::FnDef(..))
+ && let ExprKind::Path(path) = &cast_expr.kind
+ && let Res::Def(DefKind::Ctor(CtorOf::Variant, CtorKind::Fn), _) = cx.qpath_res(path, cast_expr.hir_id)
+ {
+ span_lint(
+ cx,
+ CAST_ENUM_CONSTRUCTOR,
+ expr.span,
+ "cast of an enum tuple constructor to an integer",
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_lossless.rs b/src/tools/clippy/clippy_lints/src/casts/cast_lossless.rs
new file mode 100644
index 000000000..938458e30
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/casts/cast_lossless.rs
@@ -0,0 +1,112 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::is_isize_or_usize;
+use clippy_utils::{in_constant, meets_msrv, msrvs};
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, FloatTy, Ty};
+use rustc_semver::RustcVersion;
+
+use super::{utils, CAST_LOSSLESS};
+
+pub(super) fn check(
+ cx: &LateContext<'_>,
+ expr: &Expr<'_>,
+ cast_op: &Expr<'_>,
+ cast_from: Ty<'_>,
+ cast_to: Ty<'_>,
+ msrv: Option<RustcVersion>,
+) {
+ if !should_lint(cx, expr, cast_from, cast_to, msrv) {
+ return;
+ }
+
+ // The suggestion is to use a function call, so if the original expression
+ // has parens on the outside, they are no longer needed.
+ let mut applicability = Applicability::MachineApplicable;
+ let opt = snippet_opt(cx, cast_op.span);
+ let sugg = opt.as_ref().map_or_else(
+ || {
+ applicability = Applicability::HasPlaceholders;
+ ".."
+ },
+ |snip| {
+ if should_strip_parens(cast_op, snip) {
+ &snip[1..snip.len() - 1]
+ } else {
+ snip.as_str()
+ }
+ },
+ );
+
+ let message = if cast_from.is_bool() {
+ format!(
+ "casting `{0:}` to `{1:}` is more cleanly stated with `{1:}::from(_)`",
+ cast_from, cast_to
+ )
+ } else {
+ format!(
+ "casting `{}` to `{}` may become silently lossy if you later change the type",
+ cast_from, cast_to
+ )
+ };
+
+ span_lint_and_sugg(
+ cx,
+ CAST_LOSSLESS,
+ expr.span,
+ &message,
+ "try",
+ format!("{}::from({})", cast_to, sugg),
+ applicability,
+ );
+}
+
+fn should_lint(
+ cx: &LateContext<'_>,
+ expr: &Expr<'_>,
+ cast_from: Ty<'_>,
+ cast_to: Ty<'_>,
+ msrv: Option<RustcVersion>,
+) -> bool {
+ // Do not suggest using From in consts/statics until it is valid to do so (see #2267).
+ if in_constant(cx, expr.hir_id) {
+ return false;
+ }
+
+ match (cast_from.is_integral(), cast_to.is_integral()) {
+ (true, true) => {
+ let cast_signed_to_unsigned = cast_from.is_signed() && !cast_to.is_signed();
+ let from_nbits = utils::int_ty_to_nbits(cast_from, cx.tcx);
+ let to_nbits = utils::int_ty_to_nbits(cast_to, cx.tcx);
+ !is_isize_or_usize(cast_from)
+ && !is_isize_or_usize(cast_to)
+ && from_nbits < to_nbits
+ && !cast_signed_to_unsigned
+ },
+
+ (true, false) => {
+ let from_nbits = utils::int_ty_to_nbits(cast_from, cx.tcx);
+ let to_nbits = if let ty::Float(FloatTy::F32) = cast_to.kind() {
+ 32
+ } else {
+ 64
+ };
+ !is_isize_or_usize(cast_from) && from_nbits < to_nbits
+ },
+ (false, true) if matches!(cast_from.kind(), ty::Bool) && meets_msrv(msrv, msrvs::FROM_BOOL) => true,
+ (_, _) => {
+ matches!(cast_from.kind(), ty::Float(FloatTy::F32)) && matches!(cast_to.kind(), ty::Float(FloatTy::F64))
+ },
+ }
+}
+
+fn should_strip_parens(cast_expr: &Expr<'_>, snip: &str) -> bool {
+ if let ExprKind::Binary(_, _, _) = cast_expr.kind {
+ if snip.starts_with('(') && snip.ends_with(')') {
+ return true;
+ }
+ }
+ false
+}
diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_possible_truncation.rs b/src/tools/clippy/clippy_lints/src/casts/cast_possible_truncation.rs
new file mode 100644
index 000000000..64f87c80f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/casts/cast_possible_truncation.rs
@@ -0,0 +1,169 @@
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::expr_or_init;
+use clippy_utils::ty::{get_discriminant_value, is_isize_or_usize};
+use rustc_ast::ast;
+use rustc_attr::IntType;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::{BinOpKind, Expr, ExprKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, FloatTy, Ty};
+
+use super::{utils, CAST_ENUM_TRUNCATION, CAST_POSSIBLE_TRUNCATION};
+
+fn constant_int(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<u128> {
+ if let Some((Constant::Int(c), _)) = constant(cx, cx.typeck_results(), expr) {
+ Some(c)
+ } else {
+ None
+ }
+}
+
+fn get_constant_bits(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<u64> {
+ constant_int(cx, expr).map(|c| u64::from(128 - c.leading_zeros()))
+}
+
+fn apply_reductions(cx: &LateContext<'_>, nbits: u64, expr: &Expr<'_>, signed: bool) -> u64 {
+ match expr_or_init(cx, expr).kind {
+ ExprKind::Cast(inner, _) => apply_reductions(cx, nbits, inner, signed),
+ ExprKind::Block(block, _) => block.expr.map_or(nbits, |e| apply_reductions(cx, nbits, e, signed)),
+ ExprKind::Binary(op, left, right) => match op.node {
+ BinOpKind::Div => {
+ apply_reductions(cx, nbits, left, signed).saturating_sub(if signed {
+ // let's be conservative here
+ 0
+ } else {
+ // by dividing by 1, we remove 0 bits, etc.
+ get_constant_bits(cx, right).map_or(0, |b| b.saturating_sub(1))
+ })
+ },
+ BinOpKind::Rem | BinOpKind::BitAnd => get_constant_bits(cx, right)
+ .unwrap_or(u64::max_value())
+ .min(apply_reductions(cx, nbits, left, signed)),
+ BinOpKind::Shr => apply_reductions(cx, nbits, left, signed)
+ .saturating_sub(constant_int(cx, right).map_or(0, |s| u64::try_from(s).expect("shift too high"))),
+ _ => nbits,
+ },
+ ExprKind::MethodCall(method, [left, right], _) => {
+ if signed {
+ return nbits;
+ }
+ let max_bits = if method.ident.as_str() == "min" {
+ get_constant_bits(cx, right)
+ } else {
+ None
+ };
+ apply_reductions(cx, nbits, left, signed).min(max_bits.unwrap_or(u64::max_value()))
+ },
+ ExprKind::MethodCall(method, [_, lo, hi], _) => {
+ if method.ident.as_str() == "clamp" {
+ //FIXME: make this a diagnostic item
+ if let (Some(lo_bits), Some(hi_bits)) = (get_constant_bits(cx, lo), get_constant_bits(cx, hi)) {
+ return lo_bits.max(hi_bits);
+ }
+ }
+ nbits
+ },
+ ExprKind::MethodCall(method, [_value], _) => {
+ if method.ident.name.as_str() == "signum" {
+ 0 // do not lint if cast comes from a `signum` function
+ } else {
+ nbits
+ }
+ },
+ _ => nbits,
+ }
+}
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
+ let msg = match (cast_from.kind(), cast_to.is_integral()) {
+ (ty::Int(_) | ty::Uint(_), true) => {
+ let from_nbits = apply_reductions(
+ cx,
+ utils::int_ty_to_nbits(cast_from, cx.tcx),
+ cast_expr,
+ cast_from.is_signed(),
+ );
+ let to_nbits = utils::int_ty_to_nbits(cast_to, cx.tcx);
+
+ let (should_lint, suffix) = match (is_isize_or_usize(cast_from), is_isize_or_usize(cast_to)) {
+ (true, true) | (false, false) => (to_nbits < from_nbits, ""),
+ (true, false) => (
+ to_nbits <= 32,
+ if to_nbits == 32 {
+ " on targets with 64-bit wide pointers"
+ } else {
+ ""
+ },
+ ),
+ (false, true) => (from_nbits == 64, " on targets with 32-bit wide pointers"),
+ };
+
+ if !should_lint {
+ return;
+ }
+
+ format!(
+ "casting `{}` to `{}` may truncate the value{}",
+ cast_from, cast_to, suffix,
+ )
+ },
+
+ (ty::Adt(def, _), true) if def.is_enum() => {
+ let (from_nbits, variant) = if let ExprKind::Path(p) = &cast_expr.kind
+ && let Res::Def(DefKind::Ctor(..), id) = cx.qpath_res(p, cast_expr.hir_id)
+ {
+ let i = def.variant_index_with_ctor_id(id);
+ let variant = def.variant(i);
+ let nbits = utils::enum_value_nbits(get_discriminant_value(cx.tcx, *def, i));
+ (nbits, Some(variant))
+ } else {
+ (utils::enum_ty_to_nbits(*def, cx.tcx), None)
+ };
+ let to_nbits = utils::int_ty_to_nbits(cast_to, cx.tcx);
+
+ let cast_from_ptr_size = def.repr().int.map_or(true, |ty| {
+ matches!(
+ ty,
+ IntType::SignedInt(ast::IntTy::Isize) | IntType::UnsignedInt(ast::UintTy::Usize)
+ )
+ });
+ let suffix = match (cast_from_ptr_size, is_isize_or_usize(cast_to)) {
+ (false, false) if from_nbits > to_nbits => "",
+ (true, false) if from_nbits > to_nbits => "",
+ (false, true) if from_nbits > 64 => "",
+ (false, true) if from_nbits > 32 => " on targets with 32-bit wide pointers",
+ _ => return,
+ };
+
+ if let Some(variant) = variant {
+ span_lint(
+ cx,
+ CAST_ENUM_TRUNCATION,
+ expr.span,
+ &format!(
+ "casting `{}::{}` to `{}` will truncate the value{}",
+ cast_from, variant.name, cast_to, suffix,
+ ),
+ );
+ return;
+ }
+ format!(
+ "casting `{}` to `{}` may truncate the value{}",
+ cast_from, cast_to, suffix,
+ )
+ },
+
+ (ty::Float(_), true) => {
+ format!("casting `{}` to `{}` may truncate the value", cast_from, cast_to)
+ },
+
+ (ty::Float(FloatTy::F64), false) if matches!(cast_to.kind(), &ty::Float(FloatTy::F32)) => {
+ "casting `f64` to `f32` may truncate the value".to_string()
+ },
+
+ _ => return,
+ };
+
+ span_lint(cx, CAST_POSSIBLE_TRUNCATION, expr.span, &msg);
+}
diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_possible_wrap.rs b/src/tools/clippy/clippy_lints/src/casts/cast_possible_wrap.rs
new file mode 100644
index 000000000..2c5c1d7cb
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/casts/cast_possible_wrap.rs
@@ -0,0 +1,44 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::ty::is_isize_or_usize;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty::Ty;
+
+use super::{utils, CAST_POSSIBLE_WRAP};
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
+ if !(cast_from.is_integral() && cast_to.is_integral()) {
+ return;
+ }
+
+ let arch_64_suffix = " on targets with 64-bit wide pointers";
+ let arch_32_suffix = " on targets with 32-bit wide pointers";
+ let cast_unsigned_to_signed = !cast_from.is_signed() && cast_to.is_signed();
+ let from_nbits = utils::int_ty_to_nbits(cast_from, cx.tcx);
+ let to_nbits = utils::int_ty_to_nbits(cast_to, cx.tcx);
+
+ let (should_lint, suffix) = match (is_isize_or_usize(cast_from), is_isize_or_usize(cast_to)) {
+ (true, true) | (false, false) => (to_nbits == from_nbits && cast_unsigned_to_signed, ""),
+ (true, false) => (to_nbits <= 32 && cast_unsigned_to_signed, arch_32_suffix),
+ (false, true) => (
+ cast_unsigned_to_signed,
+ if from_nbits == 64 {
+ arch_64_suffix
+ } else {
+ arch_32_suffix
+ },
+ ),
+ };
+
+ if should_lint {
+ span_lint(
+ cx,
+ CAST_POSSIBLE_WRAP,
+ expr.span,
+ &format!(
+ "casting `{}` to `{}` may wrap around the value{}",
+ cast_from, cast_to, suffix,
+ ),
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_precision_loss.rs b/src/tools/clippy/clippy_lints/src/casts/cast_precision_loss.rs
new file mode 100644
index 000000000..334e1646c
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/casts/cast_precision_loss.rs
@@ -0,0 +1,51 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::ty::is_isize_or_usize;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, FloatTy, Ty};
+
+use super::{utils, CAST_PRECISION_LOSS};
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
+ if !cast_from.is_integral() || cast_to.is_integral() {
+ return;
+ }
+
+ let from_nbits = utils::int_ty_to_nbits(cast_from, cx.tcx);
+ let to_nbits = if cast_to.kind() == &ty::Float(FloatTy::F32) {
+ 32
+ } else {
+ 64
+ };
+
+ if !(is_isize_or_usize(cast_from) || from_nbits >= to_nbits) {
+ return;
+ }
+
+ let cast_to_f64 = to_nbits == 64;
+ let mantissa_nbits = if cast_to_f64 { 52 } else { 23 };
+ let arch_dependent = is_isize_or_usize(cast_from) && cast_to_f64;
+ let arch_dependent_str = "on targets with 64-bit wide pointers ";
+ let from_nbits_str = if arch_dependent {
+ "64".to_owned()
+ } else if is_isize_or_usize(cast_from) {
+ "32 or 64".to_owned()
+ } else {
+ utils::int_ty_to_nbits(cast_from, cx.tcx).to_string()
+ };
+
+ span_lint(
+ cx,
+ CAST_PRECISION_LOSS,
+ expr.span,
+ &format!(
+ "casting `{0}` to `{1}` causes a loss of precision {2}(`{0}` is {3} bits wide, \
+ but `{1}`'s mantissa is only {4} bits wide)",
+ cast_from,
+ if cast_to_f64 { "f64" } else { "f32" },
+ if arch_dependent { arch_dependent_str } else { "" },
+ from_nbits_str,
+ mantissa_nbits
+ ),
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs b/src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs
new file mode 100644
index 000000000..d476a1a76
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs
@@ -0,0 +1,96 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::ty::is_c_void;
+use clippy_utils::{get_parent_expr, is_hir_ty_cfg_dependant, match_any_def_paths, paths};
+use rustc_hir::{Expr, ExprKind, GenericArg};
+use rustc_lint::LateContext;
+use rustc_middle::ty::layout::LayoutOf;
+use rustc_middle::ty::{self, Ty};
+
+use super::CAST_PTR_ALIGNMENT;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if let ExprKind::Cast(cast_expr, cast_to) = expr.kind {
+ if is_hir_ty_cfg_dependant(cx, cast_to) {
+ return;
+ }
+ let (cast_from, cast_to) = (
+ cx.typeck_results().expr_ty(cast_expr),
+ cx.typeck_results().expr_ty(expr),
+ );
+ lint_cast_ptr_alignment(cx, expr, cast_from, cast_to);
+ } else if let ExprKind::MethodCall(method_path, [self_arg, ..], _) = &expr.kind {
+ if method_path.ident.name == sym!(cast)
+ && let Some(generic_args) = method_path.args
+ && let [GenericArg::Type(cast_to)] = generic_args.args
+ // There probably is no obvious reason to do this, just to be consistent with `as` cases.
+ && !is_hir_ty_cfg_dependant(cx, cast_to)
+ {
+ let (cast_from, cast_to) =
+ (cx.typeck_results().expr_ty(self_arg), cx.typeck_results().expr_ty(expr));
+ lint_cast_ptr_alignment(cx, expr, cast_from, cast_to);
+ }
+ }
+}
+
+fn lint_cast_ptr_alignment<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>, cast_from: Ty<'tcx>, cast_to: Ty<'tcx>) {
+ if let ty::RawPtr(from_ptr_ty) = &cast_from.kind()
+ && let ty::RawPtr(to_ptr_ty) = &cast_to.kind()
+ && let Ok(from_layout) = cx.layout_of(from_ptr_ty.ty)
+ && let Ok(to_layout) = cx.layout_of(to_ptr_ty.ty)
+ && from_layout.align.abi < to_layout.align.abi
+ // with c_void, we inherently need to trust the user
+ && !is_c_void(cx, from_ptr_ty.ty)
+ // when casting from a ZST, we don't know enough to properly lint
+ && !from_layout.is_zst()
+ && !is_used_as_unaligned(cx, expr)
+ {
+ span_lint(
+ cx,
+ CAST_PTR_ALIGNMENT,
+ expr.span,
+ &format!(
+ "casting from `{}` to a more-strictly-aligned pointer (`{}`) ({} < {} bytes)",
+ cast_from,
+ cast_to,
+ from_layout.align.abi.bytes(),
+ to_layout.align.abi.bytes(),
+ ),
+ );
+ }
+}
+
+fn is_used_as_unaligned(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
+ let Some(parent) = get_parent_expr(cx, e) else {
+ return false;
+ };
+ match parent.kind {
+ ExprKind::MethodCall(name, [self_arg, ..], _) if self_arg.hir_id == e.hir_id => {
+ if matches!(name.ident.as_str(), "read_unaligned" | "write_unaligned")
+ && let Some(def_id) = cx.typeck_results().type_dependent_def_id(parent.hir_id)
+ && let Some(def_id) = cx.tcx.impl_of_method(def_id)
+ && cx.tcx.type_of(def_id).is_unsafe_ptr()
+ {
+ true
+ } else {
+ false
+ }
+ },
+ ExprKind::Call(func, [arg, ..]) if arg.hir_id == e.hir_id => {
+ static PATHS: &[&[&str]] = &[
+ paths::PTR_READ_UNALIGNED.as_slice(),
+ paths::PTR_WRITE_UNALIGNED.as_slice(),
+ paths::PTR_UNALIGNED_VOLATILE_LOAD.as_slice(),
+ paths::PTR_UNALIGNED_VOLATILE_STORE.as_slice(),
+ ];
+ if let ExprKind::Path(path) = &func.kind
+ && let Some(def_id) = cx.qpath_res(path, func.hir_id).opt_def_id()
+ && match_any_def_paths(cx, def_id, PATHS).is_some()
+ {
+ true
+ } else {
+ false
+ }
+ },
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_ref_to_mut.rs b/src/tools/clippy/clippy_lints/src/casts/cast_ref_to_mut.rs
new file mode 100644
index 000000000..15f2f81f4
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/casts/cast_ref_to_mut.rs
@@ -0,0 +1,26 @@
+use clippy_utils::diagnostics::span_lint;
+use if_chain::if_chain;
+use rustc_hir::{Expr, ExprKind, MutTy, Mutability, TyKind, UnOp};
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+
+use super::CAST_REF_TO_MUT;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if_chain! {
+ if let ExprKind::Unary(UnOp::Deref, e) = &expr.kind;
+ if let ExprKind::Cast(e, t) = &e.kind;
+ if let TyKind::Ptr(MutTy { mutbl: Mutability::Mut, .. }) = t.kind;
+ if let ExprKind::Cast(e, t) = &e.kind;
+ if let TyKind::Ptr(MutTy { mutbl: Mutability::Not, .. }) = t.kind;
+ if let ty::Ref(..) = cx.typeck_results().node_type(e.hir_id).kind();
+ then {
+ span_lint(
+ cx,
+ CAST_REF_TO_MUT,
+ expr.span,
+ "casting `&T` to `&mut T` may cause undefined behavior, consider instead using an `UnsafeCell`",
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_sign_loss.rs b/src/tools/clippy/clippy_lints/src/casts/cast_sign_loss.rs
new file mode 100644
index 000000000..75f70b77e
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/casts/cast_sign_loss.rs
@@ -0,0 +1,69 @@
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::{method_chain_args, sext};
+use if_chain::if_chain;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+
+use super::CAST_SIGN_LOSS;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
+ if should_lint(cx, cast_op, cast_from, cast_to) {
+ span_lint(
+ cx,
+ CAST_SIGN_LOSS,
+ expr.span,
+ &format!(
+ "casting `{}` to `{}` may lose the sign of the value",
+ cast_from, cast_to
+ ),
+ );
+ }
+}
+
+fn should_lint(cx: &LateContext<'_>, cast_op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) -> bool {
+ match (cast_from.is_integral(), cast_to.is_integral()) {
+ (true, true) => {
+ if !cast_from.is_signed() || cast_to.is_signed() {
+ return false;
+ }
+
+ // Don't lint for positive constants.
+ let const_val = constant(cx, cx.typeck_results(), cast_op);
+ if_chain! {
+ if let Some((Constant::Int(n), _)) = const_val;
+ if let ty::Int(ity) = *cast_from.kind();
+ if sext(cx.tcx, n, ity) >= 0;
+ then {
+ return false;
+ }
+ }
+
+ // Don't lint for the result of methods that always return non-negative values.
+ if let ExprKind::MethodCall(path, _, _) = cast_op.kind {
+ let mut method_name = path.ident.name.as_str();
+ let allowed_methods = ["abs", "checked_abs", "rem_euclid", "checked_rem_euclid"];
+
+ if_chain! {
+ if method_name == "unwrap";
+ if let Some(arglist) = method_chain_args(cast_op, &["unwrap"]);
+ if let ExprKind::MethodCall(inner_path, _, _) = &arglist[0][0].kind;
+ then {
+ method_name = inner_path.ident.name.as_str();
+ }
+ }
+
+ if allowed_methods.iter().any(|&name| method_name == name) {
+ return false;
+ }
+ }
+
+ true
+ },
+
+ (false, true) => !cast_to.is_signed(),
+
+ (_, _) => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs b/src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs
new file mode 100644
index 000000000..027c660ce
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs
@@ -0,0 +1,143 @@
+use clippy_utils::{diagnostics::span_lint_and_then, meets_msrv, msrvs, source};
+use if_chain::if_chain;
+use rustc_ast::Mutability;
+use rustc_hir::{Expr, ExprKind, Node};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, layout::LayoutOf, Ty, TypeAndMut};
+use rustc_semver::RustcVersion;
+
+use super::CAST_SLICE_DIFFERENT_SIZES;
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>, msrv: Option<RustcVersion>) {
+ // suggestion is invalid if `ptr::slice_from_raw_parts` does not exist
+ if !meets_msrv(msrv, msrvs::PTR_SLICE_RAW_PARTS) {
+ return;
+ }
+
+ // if this cast is the child of another cast expression then don't emit something for it, the full
+ // chain will be analyzed
+ if is_child_of_cast(cx, expr) {
+ return;
+ }
+
+ if let Some(CastChainInfo {
+ left_cast,
+ start_ty,
+ end_ty,
+ }) = expr_cast_chain_tys(cx, expr)
+ {
+ if let (Ok(from_layout), Ok(to_layout)) = (cx.layout_of(start_ty.ty), cx.layout_of(end_ty.ty)) {
+ let from_size = from_layout.size.bytes();
+ let to_size = to_layout.size.bytes();
+ if from_size != to_size && from_size != 0 && to_size != 0 {
+ span_lint_and_then(
+ cx,
+ CAST_SLICE_DIFFERENT_SIZES,
+ expr.span,
+ &format!(
+ "casting between raw pointers to `[{}]` (element size {}) and `[{}]` (element size {}) does not adjust the count",
+ start_ty.ty, from_size, end_ty.ty, to_size,
+ ),
+ |diag| {
+ let ptr_snippet = source::snippet(cx, left_cast.span, "..");
+
+ let (mutbl_fn_str, mutbl_ptr_str) = match end_ty.mutbl {
+ Mutability::Mut => ("_mut", "mut"),
+ Mutability::Not => ("", "const"),
+ };
+ let sugg = format!(
+ "core::ptr::slice_from_raw_parts{mutbl_fn_str}({ptr_snippet} as *{mutbl_ptr_str} {}, ..)",
+ // get just the ty from the TypeAndMut so that the printed type isn't something like `mut
+ // T`, extract just the `T`
+ end_ty.ty
+ );
+
+ diag.span_suggestion(
+ expr.span,
+ &format!("replace with `ptr::slice_from_raw_parts{mutbl_fn_str}`"),
+ sugg,
+ rustc_errors::Applicability::HasPlaceholders,
+ );
+ },
+ );
+ }
+ }
+ }
+}
+
+fn is_child_of_cast(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ let map = cx.tcx.hir();
+ if_chain! {
+ if let Some(parent_id) = map.find_parent_node(expr.hir_id);
+ if let Some(parent) = map.find(parent_id);
+ then {
+ let expr = match parent {
+ Node::Block(block) => {
+ if let Some(parent_expr) = block.expr {
+ parent_expr
+ } else {
+ return false;
+ }
+ },
+ Node::Expr(expr) => expr,
+ _ => return false,
+ };
+
+ matches!(expr.kind, ExprKind::Cast(..))
+ } else {
+ false
+ }
+ }
+}
+
+/// Returns the type T of the pointed to *const [T] or *mut [T] and the mutability of the slice if
+/// the type is one of those slices
+fn get_raw_slice_ty_mut(ty: Ty<'_>) -> Option<TypeAndMut<'_>> {
+ match ty.kind() {
+ ty::RawPtr(TypeAndMut { ty: slice_ty, mutbl }) => match slice_ty.kind() {
+ ty::Slice(ty) => Some(TypeAndMut { ty: *ty, mutbl: *mutbl }),
+ _ => None,
+ },
+ _ => None,
+ }
+}
+
+struct CastChainInfo<'tcx> {
+ /// The left most part of the cast chain, or in other words, the first cast in the chain
+ /// Used for diagnostics
+ left_cast: &'tcx Expr<'tcx>,
+ /// The starting type of the cast chain
+ start_ty: TypeAndMut<'tcx>,
+ /// The final type of the cast chain
+ end_ty: TypeAndMut<'tcx>,
+}
+
+/// Returns a `CastChainInfo` with the left-most cast in the chain and the original ptr T and final
+/// ptr U if the expression is composed of casts.
+/// Returns None if the expr is not a Cast
+fn expr_cast_chain_tys<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>) -> Option<CastChainInfo<'tcx>> {
+ if let ExprKind::Cast(cast_expr, _cast_to_hir_ty) = expr.peel_blocks().kind {
+ let cast_to = cx.typeck_results().expr_ty(expr);
+ let to_slice_ty = get_raw_slice_ty_mut(cast_to)?;
+
+ // If the expression that makes up the source of this cast is itself a cast, recursively
+ // call `expr_cast_chain_tys` and update the end type with the final target type.
+ // Otherwise, this cast is not immediately nested, just construct the info for this cast
+ if let Some(prev_info) = expr_cast_chain_tys(cx, cast_expr) {
+ Some(CastChainInfo {
+ end_ty: to_slice_ty,
+ ..prev_info
+ })
+ } else {
+ let cast_from = cx.typeck_results().expr_ty(cast_expr);
+ let from_slice_ty = get_raw_slice_ty_mut(cast_from)?;
+ Some(CastChainInfo {
+ left_cast: cast_expr,
+ start_ty: from_slice_ty,
+ end_ty: to_slice_ty,
+ })
+ }
+ } else {
+ None
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/casts/char_lit_as_u8.rs b/src/tools/clippy/clippy_lints/src/casts/char_lit_as_u8.rs
new file mode 100644
index 000000000..7cc406018
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/casts/char_lit_as_u8.rs
@@ -0,0 +1,41 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet_with_applicability;
+use if_chain::if_chain;
+use rustc_ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, UintTy};
+
+use super::CHAR_LIT_AS_U8;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if_chain! {
+ if let ExprKind::Cast(e, _) = &expr.kind;
+ if let ExprKind::Lit(l) = &e.kind;
+ if let LitKind::Char(c) = l.node;
+ if ty::Uint(UintTy::U8) == *cx.typeck_results().expr_ty(expr).kind();
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ let snippet = snippet_with_applicability(cx, e.span, "'x'", &mut applicability);
+
+ span_lint_and_then(
+ cx,
+ CHAR_LIT_AS_U8,
+ expr.span,
+ "casting a character literal to `u8` truncates",
+ |diag| {
+ diag.note("`char` is four bytes wide, but `u8` is a single byte");
+
+ if c.is_ascii() {
+ diag.span_suggestion(
+ expr.span,
+ "use a byte literal instead",
+ format!("b{}", snippet),
+ applicability,
+ );
+ }
+ });
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast.rs b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast.rs
new file mode 100644
index 000000000..35350d8a2
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast.rs
@@ -0,0 +1,37 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use rustc_errors::Applicability;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty, UintTy};
+
+use super::{utils, FN_TO_NUMERIC_CAST};
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
+ // We only want to check casts to `ty::Uint` or `ty::Int`
+ match cast_to.kind() {
+ ty::Uint(_) | ty::Int(..) => { /* continue on */ },
+ _ => return,
+ }
+
+ match cast_from.kind() {
+ ty::FnDef(..) | ty::FnPtr(_) => {
+ let mut applicability = Applicability::MaybeIncorrect;
+ let from_snippet = snippet_with_applicability(cx, cast_expr.span, "x", &mut applicability);
+ let to_nbits = utils::int_ty_to_nbits(cast_to, cx.tcx);
+
+ if (to_nbits >= cx.tcx.data_layout.pointer_size.bits()) && (*cast_to.kind() != ty::Uint(UintTy::Usize)) {
+ span_lint_and_sugg(
+ cx,
+ FN_TO_NUMERIC_CAST,
+ expr.span,
+ &format!("casting function pointer `{}` to `{}`", from_snippet, cast_to),
+ "try",
+ format!("{} as usize", from_snippet),
+ applicability,
+ );
+ }
+ },
+ _ => {},
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_any.rs b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_any.rs
new file mode 100644
index 000000000..03621887a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_any.rs
@@ -0,0 +1,34 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use rustc_errors::Applicability;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+
+use super::FN_TO_NUMERIC_CAST_ANY;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
+ // We allow casts from any function type to any function type.
+ match cast_to.kind() {
+ ty::FnDef(..) | ty::FnPtr(..) => return,
+ _ => { /* continue to checks */ },
+ }
+
+ match cast_from.kind() {
+ ty::FnDef(..) | ty::FnPtr(_) => {
+ let mut applicability = Applicability::MaybeIncorrect;
+ let from_snippet = snippet_with_applicability(cx, cast_expr.span, "..", &mut applicability);
+
+ span_lint_and_sugg(
+ cx,
+ FN_TO_NUMERIC_CAST_ANY,
+ expr.span,
+ &format!("casting function pointer `{}` to `{}`", from_snippet, cast_to),
+ "did you mean to invoke the function?",
+ format!("{}() as {}", from_snippet, cast_to),
+ applicability,
+ );
+ },
+ _ => {},
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_with_truncation.rs b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_with_truncation.rs
new file mode 100644
index 000000000..6287f479b
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_with_truncation.rs
@@ -0,0 +1,39 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use rustc_errors::Applicability;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+
+use super::{utils, FN_TO_NUMERIC_CAST_WITH_TRUNCATION};
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
+ // We only want to check casts to `ty::Uint` or `ty::Int`
+ match cast_to.kind() {
+ ty::Uint(_) | ty::Int(..) => { /* continue on */ },
+ _ => return,
+ }
+ match cast_from.kind() {
+ ty::FnDef(..) | ty::FnPtr(_) => {
+ let mut applicability = Applicability::MaybeIncorrect;
+ let from_snippet = snippet_with_applicability(cx, cast_expr.span, "x", &mut applicability);
+
+ let to_nbits = utils::int_ty_to_nbits(cast_to, cx.tcx);
+ if to_nbits < cx.tcx.data_layout.pointer_size.bits() {
+ span_lint_and_sugg(
+ cx,
+ FN_TO_NUMERIC_CAST_WITH_TRUNCATION,
+ expr.span,
+ &format!(
+ "casting function pointer `{}` to `{}`, which truncates the value",
+ from_snippet, cast_to
+ ),
+ "try",
+ format!("{} as usize", from_snippet),
+ applicability,
+ );
+ }
+ },
+ _ => {},
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/casts/mod.rs b/src/tools/clippy/clippy_lints/src/casts/mod.rs
new file mode 100644
index 000000000..af3798a0c
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/casts/mod.rs
@@ -0,0 +1,588 @@
+mod cast_abs_to_unsigned;
+mod cast_enum_constructor;
+mod cast_lossless;
+mod cast_possible_truncation;
+mod cast_possible_wrap;
+mod cast_precision_loss;
+mod cast_ptr_alignment;
+mod cast_ref_to_mut;
+mod cast_sign_loss;
+mod cast_slice_different_sizes;
+mod char_lit_as_u8;
+mod fn_to_numeric_cast;
+mod fn_to_numeric_cast_any;
+mod fn_to_numeric_cast_with_truncation;
+mod ptr_as_ptr;
+mod unnecessary_cast;
+mod utils;
+
+use clippy_utils::is_hir_ty_cfg_dependant;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for casts from any numerical to a float type where
+ /// the receiving type cannot store all values from the original type without
+ /// rounding errors. This possible rounding is to be expected, so this lint is
+ /// `Allow` by default.
+ ///
+ /// Basically, this warns on casting any integer with 32 or more bits to `f32`
+ /// or any 64-bit integer to `f64`.
+ ///
+ /// ### Why is this bad?
+ /// It's not bad at all. But in some applications it can be
+ /// helpful to know where precision loss can take place. This lint can help find
+ /// those places in the code.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = u64::MAX;
+ /// x as f64;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub CAST_PRECISION_LOSS,
+ pedantic,
+ "casts that cause loss of precision, e.g., `x as f32` where `x: u64`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for casts from a signed to an unsigned numerical
+ /// type. In this case, negative values wrap around to large positive values,
+ /// which can be quite surprising in practice. However, as the cast works as
+ /// defined, this lint is `Allow` by default.
+ ///
+ /// ### Why is this bad?
+ /// Possibly surprising results. You can activate this lint
+ /// as a one-time check to see where numerical wrapping can arise.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let y: i8 = -1;
+ /// y as u128; // will return 18446744073709551615
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub CAST_SIGN_LOSS,
+ pedantic,
+ "casts from signed types to unsigned types, e.g., `x as u32` where `x: i32`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for casts between numerical types that may
+ /// truncate large values. This is expected behavior, so the cast is `Allow` by
+ /// default.
+ ///
+ /// ### Why is this bad?
+ /// In some problem domains, it is good practice to avoid
+ /// truncation. This lint can be activated to help assess where additional
+ /// checks could be beneficial.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn as_u8(x: u64) -> u8 {
+ /// x as u8
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub CAST_POSSIBLE_TRUNCATION,
+ pedantic,
+ "casts that may cause truncation of the value, e.g., `x as u8` where `x: u32`, or `x as i32` where `x: f32`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for casts from an unsigned type to a signed type of
+ /// the same size. Performing such a cast is a 'no-op' for the compiler,
+ /// i.e., nothing is changed at the bit level, and the binary representation of
+ /// the value is reinterpreted. This can cause wrapping if the value is too big
+ /// for the target signed type. However, the cast works as defined, so this lint
+ /// is `Allow` by default.
+ ///
+ /// ### Why is this bad?
+ /// While such a cast is not bad in itself, the results can
+ /// be surprising when this is not the intended behavior, as demonstrated by the
+ /// example below.
+ ///
+ /// ### Example
+ /// ```rust
+ /// u32::MAX as i32; // will yield a value of `-1`
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub CAST_POSSIBLE_WRAP,
+ pedantic,
+ "casts that may cause wrapping around the value, e.g., `x as i32` where `x: u32` and `x > i32::MAX`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for casts between numerical types that may
+ /// be replaced by safe conversion functions.
+ ///
+ /// ### Why is this bad?
+ /// Rust's `as` keyword will perform many kinds of
+ /// conversions, including silently lossy conversions. Conversion functions such
+ /// as `i32::from` will only perform lossless conversions. Using the conversion
+ /// functions prevents conversions from turning into silent lossy conversions if
+ /// the types of the input expressions ever change, and make it easier for
+ /// people reading the code to know that the conversion is lossless.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn as_u64(x: u8) -> u64 {
+ /// x as u64
+ /// }
+ /// ```
+ ///
+ /// Using `::from` would look like this:
+ ///
+ /// ```rust
+ /// fn as_u64(x: u8) -> u64 {
+ /// u64::from(x)
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub CAST_LOSSLESS,
+ pedantic,
+ "casts using `as` that are known to be lossless, e.g., `x as u64` where `x: u8`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for casts to the same type, casts of int literals to integer types
+ /// and casts of float literals to float types.
+ ///
+ /// ### Why is this bad?
+ /// It's just unnecessary.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let _ = 2i32 as i32;
+ /// let _ = 0.5 as f32;
+ /// ```
+ ///
+ /// Better:
+ ///
+ /// ```rust
+ /// let _ = 2_i32;
+ /// let _ = 0.5_f32;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub UNNECESSARY_CAST,
+ complexity,
+ "cast to the same type, e.g., `x as i32` where `x: i32`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for casts, using `as` or `pointer::cast`,
+ /// from a less-strictly-aligned pointer to a more-strictly-aligned pointer
+ ///
+ /// ### Why is this bad?
+ /// Dereferencing the resulting pointer may be undefined
+ /// behavior.
+ ///
+ /// ### Known problems
+ /// Using `std::ptr::read_unaligned` and `std::ptr::write_unaligned` or similar
+ /// on the resulting pointer is fine. Is over-zealous: Casts with manual alignment checks or casts like
+ /// u64-> u8 -> u16 can be fine. Miri is able to do a more in-depth analysis.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let _ = (&1u8 as *const u8) as *const u16;
+ /// let _ = (&mut 1u8 as *mut u8) as *mut u16;
+ ///
+ /// (&1u8 as *const u8).cast::<u16>();
+ /// (&mut 1u8 as *mut u8).cast::<u16>();
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub CAST_PTR_ALIGNMENT,
+ pedantic,
+ "cast from a pointer to a more-strictly-aligned pointer"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for casts of function pointers to something other than usize
+ ///
+ /// ### Why is this bad?
+ /// Casting a function pointer to anything other than usize/isize is not portable across
+ /// architectures, because you end up losing bits if the target type is too small or end up with a
+ /// bunch of extra bits that waste space and add more instructions to the final binary than
+ /// strictly necessary for the problem
+ ///
+ /// Casting to isize also doesn't make sense since there are no signed addresses.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn fun() -> i32 { 1 }
+ /// let _ = fun as i64;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # fn fun() -> i32 { 1 }
+ /// let _ = fun as usize;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub FN_TO_NUMERIC_CAST,
+ style,
+ "casting a function pointer to a numeric type other than usize"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for casts of a function pointer to a numeric type not wide enough to
+ /// store address.
+ ///
+ /// ### Why is this bad?
+ /// Such a cast discards some bits of the function's address. If this is intended, it would be more
+ /// clearly expressed by casting to usize first, then casting the usize to the intended type (with
+ /// a comment) to perform the truncation.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn fn1() -> i16 {
+ /// 1
+ /// };
+ /// let _ = fn1 as i32;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// // Cast to usize first, then comment with the reason for the truncation
+ /// fn fn1() -> i16 {
+ /// 1
+ /// };
+ /// let fn_ptr = fn1 as usize;
+ /// let fn_ptr_truncated = fn_ptr as i32;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub FN_TO_NUMERIC_CAST_WITH_TRUNCATION,
+ style,
+ "casting a function pointer to a numeric type not wide enough to store the address"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for casts of a function pointer to any integer type.
+ ///
+ /// ### Why is this bad?
+ /// Casting a function pointer to an integer can have surprising results and can occur
+ /// accidentally if parentheses are omitted from a function call. If you aren't doing anything
+ /// low-level with function pointers then you can opt-out of casting functions to integers in
+ /// order to avoid mistakes. Alternatively, you can use this lint to audit all uses of function
+ /// pointer casts in your code.
+ ///
+ /// ### Example
+ /// ```rust
+ /// // fn1 is cast as `usize`
+ /// fn fn1() -> u16 {
+ /// 1
+ /// };
+ /// let _ = fn1 as usize;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// // maybe you intended to call the function?
+ /// fn fn2() -> u16 {
+ /// 1
+ /// };
+ /// let _ = fn2() as usize;
+ ///
+ /// // or
+ ///
+ /// // maybe you intended to cast it to a function type?
+ /// fn fn3() -> u16 {
+ /// 1
+ /// }
+ /// let _ = fn3 as fn() -> u16;
+ /// ```
+ #[clippy::version = "1.58.0"]
+ pub FN_TO_NUMERIC_CAST_ANY,
+ restriction,
+ "casting a function pointer to any integer type"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for casts of `&T` to `&mut T` anywhere in the code.
+ ///
+ /// ### Why is this bad?
+ /// It’s basically guaranteed to be undefined behavior.
+ /// `UnsafeCell` is the only way to obtain aliasable data that is considered
+ /// mutable.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// fn x(r: &i32) {
+ /// unsafe {
+ /// *(r as *const _ as *mut _) += 1;
+ /// }
+ /// }
+ /// ```
+ ///
+ /// Instead consider using interior mutability types.
+ ///
+ /// ```rust
+ /// use std::cell::UnsafeCell;
+ ///
+ /// fn x(r: &UnsafeCell<i32>) {
+ /// unsafe {
+ /// *r.get() += 1;
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "1.33.0"]
+ pub CAST_REF_TO_MUT,
+ correctness,
+ "a cast of reference to a mutable pointer"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for expressions where a character literal is cast
+ /// to `u8` and suggests using a byte literal instead.
+ ///
+ /// ### Why is this bad?
+ /// In general, casting values to smaller types is
+ /// error-prone and should be avoided where possible. In the particular case of
+ /// converting a character literal to u8, it is easy to avoid by just using a
+ /// byte literal instead. As an added bonus, `b'a'` is even slightly shorter
+ /// than `'a' as u8`.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// 'x' as u8
+ /// ```
+ ///
+ /// A better version, using the byte literal:
+ ///
+ /// ```rust,ignore
+ /// b'x'
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub CHAR_LIT_AS_U8,
+ complexity,
+ "casting a character literal to `u8` truncates"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `as` casts between raw pointers without changing its mutability,
+ /// namely `*const T` to `*const U` and `*mut T` to `*mut U`.
+ ///
+ /// ### Why is this bad?
+ /// Though `as` casts between raw pointers is not terrible, `pointer::cast` is safer because
+ /// it cannot accidentally change the pointer's mutability nor cast the pointer to other types like `usize`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let ptr: *const u32 = &42_u32;
+ /// let mut_ptr: *mut u32 = &mut 42_u32;
+ /// let _ = ptr as *const i32;
+ /// let _ = mut_ptr as *mut i32;
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let ptr: *const u32 = &42_u32;
+ /// let mut_ptr: *mut u32 = &mut 42_u32;
+ /// let _ = ptr.cast::<i32>();
+ /// let _ = mut_ptr.cast::<i32>();
+ /// ```
+ #[clippy::version = "1.51.0"]
+ pub PTR_AS_PTR,
+ pedantic,
+ "casting using `as` from and to raw pointers that doesn't change its mutability, where `pointer::cast` could take the place of `as`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for casts from an enum type to an integral type which will definitely truncate the
+ /// value.
+ ///
+ /// ### Why is this bad?
+ /// The resulting integral value will not match the value of the variant it came from.
+ ///
+ /// ### Example
+ /// ```rust
+ /// enum E { X = 256 };
+ /// let _ = E::X as u8;
+ /// ```
+ #[clippy::version = "1.61.0"]
+ pub CAST_ENUM_TRUNCATION,
+ suspicious,
+ "casts from an enum type to an integral type which will truncate the value"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `as` casts between raw pointers to slices with differently sized elements.
+ ///
+ /// ### Why is this bad?
+ /// The produced raw pointer to a slice does not update its length metadata. The produced
+ /// pointer will point to a different number of bytes than the original pointer because the
+ /// length metadata of a raw slice pointer is in elements rather than bytes.
+ /// Producing a slice reference from the raw pointer will either create a slice with
+ /// less data (which can be surprising) or create a slice with more data and cause Undefined Behavior.
+ ///
+ /// ### Example
+ /// // Missing data
+ /// ```rust
+ /// let a = [1_i32, 2, 3, 4];
+ /// let p = &a as *const [i32] as *const [u8];
+ /// unsafe {
+ /// println!("{:?}", &*p);
+ /// }
+ /// ```
+ /// // Undefined Behavior (note: also potential alignment issues)
+ /// ```rust
+ /// let a = [1_u8, 2, 3, 4];
+ /// let p = &a as *const [u8] as *const [u32];
+ /// unsafe {
+ /// println!("{:?}", &*p);
+ /// }
+ /// ```
+ /// Instead use `ptr::slice_from_raw_parts` to construct a slice from a data pointer and the correct length
+ /// ```rust
+ /// let a = [1_i32, 2, 3, 4];
+ /// let old_ptr = &a as *const [i32];
+ /// // The data pointer is cast to a pointer to the target `u8` not `[u8]`
+ /// // The length comes from the known length of 4 i32s times the 4 bytes per i32
+ /// let new_ptr = core::ptr::slice_from_raw_parts(old_ptr as *const u8, 16);
+ /// unsafe {
+ /// println!("{:?}", &*new_ptr);
+ /// }
+ /// ```
+ #[clippy::version = "1.61.0"]
+ pub CAST_SLICE_DIFFERENT_SIZES,
+ correctness,
+ "casting using `as` between raw pointers to slices of types with different sizes"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for casts from an enum tuple constructor to an integer.
+ ///
+ /// ### Why is this bad?
+ /// The cast is easily confused with casting a c-like enum value to an integer.
+ ///
+ /// ### Example
+ /// ```rust
+ /// enum E { X(i32) };
+ /// let _ = E::X as usize;
+ /// ```
+ #[clippy::version = "1.61.0"]
+ pub CAST_ENUM_CONSTRUCTOR,
+ suspicious,
+ "casts from an enum tuple constructor to an integer"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for uses of the `abs()` method that cast the result to unsigned.
+ ///
+ /// ### Why is this bad?
+ /// The `unsigned_abs()` method avoids panic when called on the MIN value.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x: i32 = -42;
+ /// let y: u32 = x.abs() as u32;
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let x: i32 = -42;
+ /// let y: u32 = x.unsigned_abs();
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub CAST_ABS_TO_UNSIGNED,
+ suspicious,
+ "casting the result of `abs()` to an unsigned integer can panic"
+}
+
+pub struct Casts {
+ msrv: Option<RustcVersion>,
+}
+
+impl Casts {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self { msrv }
+ }
+}
+
+impl_lint_pass!(Casts => [
+ CAST_PRECISION_LOSS,
+ CAST_SIGN_LOSS,
+ CAST_POSSIBLE_TRUNCATION,
+ CAST_POSSIBLE_WRAP,
+ CAST_LOSSLESS,
+ CAST_REF_TO_MUT,
+ CAST_PTR_ALIGNMENT,
+ CAST_SLICE_DIFFERENT_SIZES,
+ UNNECESSARY_CAST,
+ FN_TO_NUMERIC_CAST_ANY,
+ FN_TO_NUMERIC_CAST,
+ FN_TO_NUMERIC_CAST_WITH_TRUNCATION,
+ CHAR_LIT_AS_U8,
+ PTR_AS_PTR,
+ CAST_ENUM_TRUNCATION,
+ CAST_ENUM_CONSTRUCTOR,
+ CAST_ABS_TO_UNSIGNED
+]);
+
+impl<'tcx> LateLintPass<'tcx> for Casts {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if !in_external_macro(cx.sess(), expr.span) {
+ ptr_as_ptr::check(cx, expr, self.msrv);
+ }
+
+ if expr.span.from_expansion() {
+ return;
+ }
+
+ if let ExprKind::Cast(cast_expr, cast_to) = expr.kind {
+ if is_hir_ty_cfg_dependant(cx, cast_to) {
+ return;
+ }
+ let (cast_from, cast_to) = (
+ cx.typeck_results().expr_ty(cast_expr),
+ cx.typeck_results().expr_ty(expr),
+ );
+
+ if unnecessary_cast::check(cx, expr, cast_expr, cast_from, cast_to) {
+ return;
+ }
+
+ fn_to_numeric_cast_any::check(cx, expr, cast_expr, cast_from, cast_to);
+ fn_to_numeric_cast::check(cx, expr, cast_expr, cast_from, cast_to);
+ fn_to_numeric_cast_with_truncation::check(cx, expr, cast_expr, cast_from, cast_to);
+
+ if cast_to.is_numeric() && !in_external_macro(cx.sess(), expr.span) {
+ cast_possible_truncation::check(cx, expr, cast_expr, cast_from, cast_to);
+ if cast_from.is_numeric() {
+ cast_possible_wrap::check(cx, expr, cast_from, cast_to);
+ cast_precision_loss::check(cx, expr, cast_from, cast_to);
+ cast_sign_loss::check(cx, expr, cast_expr, cast_from, cast_to);
+ cast_abs_to_unsigned::check(cx, expr, cast_expr, cast_from, cast_to, self.msrv);
+ }
+ cast_lossless::check(cx, expr, cast_expr, cast_from, cast_to, self.msrv);
+ cast_enum_constructor::check(cx, expr, cast_expr, cast_from);
+ }
+ }
+
+ cast_ref_to_mut::check(cx, expr);
+ cast_ptr_alignment::check(cx, expr);
+ char_lit_as_u8::check(cx, expr);
+ ptr_as_ptr::check(cx, expr, self.msrv);
+ cast_slice_different_sizes::check(cx, expr, self.msrv);
+ }
+
+ extract_msrv_attr!(LateContext);
+}
diff --git a/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs b/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs
new file mode 100644
index 000000000..46d45d096
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs
@@ -0,0 +1,49 @@
+use std::borrow::Cow;
+
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::sugg::Sugg;
+use clippy_utils::{meets_msrv, msrvs};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind, Mutability, TyKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, TypeAndMut};
+use rustc_semver::RustcVersion;
+
+use super::PTR_AS_PTR;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, msrv: Option<RustcVersion>) {
+ if !meets_msrv(msrv, msrvs::POINTER_CAST) {
+ return;
+ }
+
+ if_chain! {
+ if let ExprKind::Cast(cast_expr, cast_to_hir_ty) = expr.kind;
+ let (cast_from, cast_to) = (cx.typeck_results().expr_ty(cast_expr), cx.typeck_results().expr_ty(expr));
+ if let ty::RawPtr(TypeAndMut { mutbl: from_mutbl, .. }) = cast_from.kind();
+ if let ty::RawPtr(TypeAndMut { ty: to_pointee_ty, mutbl: to_mutbl }) = cast_to.kind();
+ if matches!((from_mutbl, to_mutbl),
+ (Mutability::Not, Mutability::Not) | (Mutability::Mut, Mutability::Mut));
+ // The `U` in `pointer::cast` have to be `Sized`
+ // as explained here: https://github.com/rust-lang/rust/issues/60602.
+ if to_pointee_ty.is_sized(cx.tcx.at(expr.span), cx.param_env);
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ let cast_expr_sugg = Sugg::hir_with_applicability(cx, cast_expr, "_", &mut applicability);
+ let turbofish = match &cast_to_hir_ty.kind {
+ TyKind::Infer => Cow::Borrowed(""),
+ TyKind::Ptr(mut_ty) if matches!(mut_ty.ty.kind, TyKind::Infer) => Cow::Borrowed(""),
+ _ => Cow::Owned(format!("::<{}>", to_pointee_ty)),
+ };
+ span_lint_and_sugg(
+ cx,
+ PTR_AS_PTR,
+ expr.span,
+ "`as` casting between raw pointers without changing its mutability",
+ "try `pointer::cast`, a safer alternative",
+ format!("{}.cast{}()", cast_expr_sugg.maybe_par(), turbofish),
+ applicability,
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs b/src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs
new file mode 100644
index 000000000..fff7da8e3
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs
@@ -0,0 +1,126 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::numeric_literal::NumericLiteral;
+use clippy_utils::source::snippet_opt;
+use if_chain::if_chain;
+use rustc_ast::{LitFloatType, LitIntType, LitKind};
+use rustc_errors::Applicability;
+use rustc_hir::def::Res;
+use rustc_hir::{Expr, ExprKind, Lit, QPath, TyKind, UnOp};
+use rustc_lint::{LateContext, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty::{self, FloatTy, InferTy, Ty};
+
+use super::UNNECESSARY_CAST;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &Expr<'tcx>,
+ cast_expr: &Expr<'tcx>,
+ cast_from: Ty<'tcx>,
+ cast_to: Ty<'tcx>,
+) -> bool {
+ // skip non-primitive type cast
+ if_chain! {
+ if let ExprKind::Cast(_, cast_to) = expr.kind;
+ if let TyKind::Path(QPath::Resolved(_, path)) = &cast_to.kind;
+ if let Res::PrimTy(_) = path.res;
+ then {}
+ else {
+ return false
+ }
+ }
+
+ if let Some(lit) = get_numeric_literal(cast_expr) {
+ let literal_str = snippet_opt(cx, cast_expr.span).unwrap_or_default();
+
+ if_chain! {
+ if let LitKind::Int(n, _) = lit.node;
+ if let Some(src) = snippet_opt(cx, cast_expr.span);
+ if cast_to.is_floating_point();
+ if let Some(num_lit) = NumericLiteral::from_lit_kind(&src, &lit.node);
+ let from_nbits = 128 - n.leading_zeros();
+ let to_nbits = fp_ty_mantissa_nbits(cast_to);
+ if from_nbits != 0 && to_nbits != 0 && from_nbits <= to_nbits && num_lit.is_decimal();
+ then {
+ lint_unnecessary_cast(cx, expr, num_lit.integer, cast_from, cast_to);
+ return true
+ }
+ }
+
+ match lit.node {
+ LitKind::Int(_, LitIntType::Unsuffixed) if cast_to.is_integral() => {
+ lint_unnecessary_cast(cx, expr, &literal_str, cast_from, cast_to);
+ },
+ LitKind::Float(_, LitFloatType::Unsuffixed) if cast_to.is_floating_point() => {
+ lint_unnecessary_cast(cx, expr, &literal_str, cast_from, cast_to);
+ },
+ LitKind::Int(_, LitIntType::Unsuffixed) | LitKind::Float(_, LitFloatType::Unsuffixed) => {},
+ LitKind::Int(_, LitIntType::Signed(_) | LitIntType::Unsigned(_))
+ | LitKind::Float(_, LitFloatType::Suffixed(_))
+ if cast_from.kind() == cast_to.kind() =>
+ {
+ if let Some(src) = snippet_opt(cx, cast_expr.span) {
+ if let Some(num_lit) = NumericLiteral::from_lit_kind(&src, &lit.node) {
+ lint_unnecessary_cast(cx, expr, num_lit.integer, cast_from, cast_to);
+ }
+ }
+ },
+ _ => {
+ if cast_from.kind() == cast_to.kind() && !in_external_macro(cx.sess(), expr.span) {
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_CAST,
+ expr.span,
+ &format!(
+ "casting to the same type is unnecessary (`{}` -> `{}`)",
+ cast_from, cast_to
+ ),
+ "try",
+ literal_str,
+ Applicability::MachineApplicable,
+ );
+ return true;
+ }
+ },
+ }
+ }
+
+ false
+}
+
+fn lint_unnecessary_cast(cx: &LateContext<'_>, expr: &Expr<'_>, literal_str: &str, cast_from: Ty<'_>, cast_to: Ty<'_>) {
+ let literal_kind_name = if cast_from.is_integral() { "integer" } else { "float" };
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_CAST,
+ expr.span,
+ &format!("casting {} literal to `{}` is unnecessary", literal_kind_name, cast_to),
+ "try",
+ format!("{}_{}", literal_str.trim_end_matches('.'), cast_to),
+ Applicability::MachineApplicable,
+ );
+}
+
+fn get_numeric_literal<'e>(expr: &'e Expr<'e>) -> Option<&'e Lit> {
+ match expr.kind {
+ ExprKind::Lit(ref lit) => Some(lit),
+ ExprKind::Unary(UnOp::Neg, e) => {
+ if let ExprKind::Lit(ref lit) = e.kind {
+ Some(lit)
+ } else {
+ None
+ }
+ },
+ _ => None,
+ }
+}
+
+/// Returns the mantissa bits wide of a fp type.
+/// Will return 0 if the type is not a fp
+fn fp_ty_mantissa_nbits(typ: Ty<'_>) -> u32 {
+ match typ.kind() {
+ ty::Float(FloatTy::F32) => 23,
+ ty::Float(FloatTy::F64) | ty::Infer(InferTy::FloatVar(_)) => 52,
+ _ => 0,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/casts/utils.rs b/src/tools/clippy/clippy_lints/src/casts/utils.rs
new file mode 100644
index 000000000..5a4f20f09
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/casts/utils.rs
@@ -0,0 +1,75 @@
+use clippy_utils::ty::{read_explicit_enum_value, EnumValue};
+use rustc_middle::ty::{self, AdtDef, IntTy, Ty, TyCtxt, UintTy, VariantDiscr};
+
+/// Returns the size in bits of an integral type.
+/// Will return 0 if the type is not an int or uint variant
+pub(super) fn int_ty_to_nbits(typ: Ty<'_>, tcx: TyCtxt<'_>) -> u64 {
+ match typ.kind() {
+ ty::Int(i) => match i {
+ IntTy::Isize => tcx.data_layout.pointer_size.bits(),
+ IntTy::I8 => 8,
+ IntTy::I16 => 16,
+ IntTy::I32 => 32,
+ IntTy::I64 => 64,
+ IntTy::I128 => 128,
+ },
+ ty::Uint(i) => match i {
+ UintTy::Usize => tcx.data_layout.pointer_size.bits(),
+ UintTy::U8 => 8,
+ UintTy::U16 => 16,
+ UintTy::U32 => 32,
+ UintTy::U64 => 64,
+ UintTy::U128 => 128,
+ },
+ _ => 0,
+ }
+}
+
+pub(super) fn enum_value_nbits(value: EnumValue) -> u64 {
+ match value {
+ EnumValue::Unsigned(x) => 128 - x.leading_zeros(),
+ EnumValue::Signed(x) if x < 0 => 128 - (-(x + 1)).leading_zeros() + 1,
+ EnumValue::Signed(x) => 128 - x.leading_zeros(),
+ }
+ .into()
+}
+
+pub(super) fn enum_ty_to_nbits(adt: AdtDef<'_>, tcx: TyCtxt<'_>) -> u64 {
+ let mut explicit = 0i128;
+ let (start, end) = adt
+ .variants()
+ .iter()
+ .fold((0, i128::MIN), |(start, end), variant| match variant.discr {
+ VariantDiscr::Relative(x) => match explicit.checked_add(i128::from(x)) {
+ Some(x) => (start, end.max(x)),
+ None => (i128::MIN, end),
+ },
+ VariantDiscr::Explicit(id) => match read_explicit_enum_value(tcx, id) {
+ Some(EnumValue::Signed(x)) => {
+ explicit = x;
+ (start.min(x), end.max(x))
+ },
+ Some(EnumValue::Unsigned(x)) => match i128::try_from(x) {
+ Ok(x) => {
+ explicit = x;
+ (start, end.max(x))
+ },
+ Err(_) => (i128::MIN, end),
+ },
+ None => (start, end),
+ },
+ });
+
+ if start > end {
+ // No variants.
+ 0
+ } else {
+ let neg_bits = if start < 0 {
+ 128 - (-(start + 1)).leading_zeros() + 1
+ } else {
+ 0
+ };
+ let pos_bits = if end > 0 { 128 - end.leading_zeros() } else { 0 };
+ neg_bits.max(pos_bits).into()
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/checked_conversions.rs b/src/tools/clippy/clippy_lints/src/checked_conversions.rs
new file mode 100644
index 000000000..17fc81951
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/checked_conversions.rs
@@ -0,0 +1,354 @@
+//! lint on manually implemented checked conversions that could be transformed into `try_from`
+
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::{in_constant, meets_msrv, msrvs, SpanlessEq};
+use if_chain::if_chain;
+use rustc_ast::ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::{BinOp, BinOpKind, Expr, ExprKind, QPath, TyKind};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for explicit bounds checking when casting.
+ ///
+ /// ### Why is this bad?
+ /// Reduces the readability of statements & is error prone.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let foo: u32 = 5;
+ /// foo <= i32::MAX as u32;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let foo = 1;
+ /// # #[allow(unused)]
+ /// i32::try_from(foo).is_ok();
+ /// ```
+ #[clippy::version = "1.37.0"]
+ pub CHECKED_CONVERSIONS,
+ pedantic,
+ "`try_from` could replace manual bounds checking when casting"
+}
+
+pub struct CheckedConversions {
+ msrv: Option<RustcVersion>,
+}
+
+impl CheckedConversions {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self { msrv }
+ }
+}
+
+impl_lint_pass!(CheckedConversions => [CHECKED_CONVERSIONS]);
+
+impl<'tcx> LateLintPass<'tcx> for CheckedConversions {
+ fn check_expr(&mut self, cx: &LateContext<'_>, item: &Expr<'_>) {
+ if !meets_msrv(self.msrv, msrvs::TRY_FROM) {
+ return;
+ }
+
+ let result = if_chain! {
+ if !in_constant(cx, item.hir_id);
+ if !in_external_macro(cx.sess(), item.span);
+ if let ExprKind::Binary(op, left, right) = &item.kind;
+
+ then {
+ match op.node {
+ BinOpKind::Ge | BinOpKind::Le => single_check(item),
+ BinOpKind::And => double_check(cx, left, right),
+ _ => None,
+ }
+ } else {
+ None
+ }
+ };
+
+ if let Some(cv) = result {
+ if let Some(to_type) = cv.to_type {
+ let mut applicability = Applicability::MachineApplicable;
+ let snippet = snippet_with_applicability(cx, cv.expr_to_cast.span, "_", &mut applicability);
+ span_lint_and_sugg(
+ cx,
+ CHECKED_CONVERSIONS,
+ item.span,
+ "checked cast can be simplified",
+ "try",
+ format!("{}::try_from({}).is_ok()", to_type, snippet),
+ applicability,
+ );
+ }
+ }
+ }
+
+ extract_msrv_attr!(LateContext);
+}
+
+/// Searches for a single check from unsigned to _ is done
+/// todo: check for case signed -> larger unsigned == only x >= 0
+fn single_check<'tcx>(expr: &'tcx Expr<'tcx>) -> Option<Conversion<'tcx>> {
+ check_upper_bound(expr).filter(|cv| cv.cvt == ConversionType::FromUnsigned)
+}
+
+/// Searches for a combination of upper & lower bound checks
+fn double_check<'a>(cx: &LateContext<'_>, left: &'a Expr<'_>, right: &'a Expr<'_>) -> Option<Conversion<'a>> {
+ let upper_lower = |l, r| {
+ let upper = check_upper_bound(l);
+ let lower = check_lower_bound(r);
+
+ upper.zip(lower).and_then(|(l, r)| l.combine(r, cx))
+ };
+
+ upper_lower(left, right).or_else(|| upper_lower(right, left))
+}
+
+/// Contains the result of a tried conversion check
+#[derive(Clone, Debug)]
+struct Conversion<'a> {
+ cvt: ConversionType,
+ expr_to_cast: &'a Expr<'a>,
+ to_type: Option<&'a str>,
+}
+
+/// The kind of conversion that is checked
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+enum ConversionType {
+ SignedToUnsigned,
+ SignedToSigned,
+ FromUnsigned,
+}
+
+impl<'a> Conversion<'a> {
+ /// Combine multiple conversions if the are compatible
+ pub fn combine(self, other: Self, cx: &LateContext<'_>) -> Option<Conversion<'a>> {
+ if self.is_compatible(&other, cx) {
+ // Prefer a Conversion that contains a type-constraint
+ Some(if self.to_type.is_some() { self } else { other })
+ } else {
+ None
+ }
+ }
+
+ /// Checks if two conversions are compatible
+ /// same type of conversion, same 'castee' and same 'to type'
+ pub fn is_compatible(&self, other: &Self, cx: &LateContext<'_>) -> bool {
+ (self.cvt == other.cvt)
+ && (SpanlessEq::new(cx).eq_expr(self.expr_to_cast, other.expr_to_cast))
+ && (self.has_compatible_to_type(other))
+ }
+
+ /// Checks if the to-type is the same (if there is a type constraint)
+ fn has_compatible_to_type(&self, other: &Self) -> bool {
+ match (self.to_type, other.to_type) {
+ (Some(l), Some(r)) => l == r,
+ _ => true,
+ }
+ }
+
+ /// Try to construct a new conversion if the conversion type is valid
+ fn try_new(expr_to_cast: &'a Expr<'_>, from_type: &str, to_type: &'a str) -> Option<Conversion<'a>> {
+ ConversionType::try_new(from_type, to_type).map(|cvt| Conversion {
+ cvt,
+ expr_to_cast,
+ to_type: Some(to_type),
+ })
+ }
+
+ /// Construct a new conversion without type constraint
+ fn new_any(expr_to_cast: &'a Expr<'_>) -> Conversion<'a> {
+ Conversion {
+ cvt: ConversionType::SignedToUnsigned,
+ expr_to_cast,
+ to_type: None,
+ }
+ }
+}
+
+impl ConversionType {
+ /// Creates a conversion type if the type is allowed & conversion is valid
+ #[must_use]
+ fn try_new(from: &str, to: &str) -> Option<Self> {
+ if UINTS.contains(&from) {
+ Some(Self::FromUnsigned)
+ } else if SINTS.contains(&from) {
+ if UINTS.contains(&to) {
+ Some(Self::SignedToUnsigned)
+ } else if SINTS.contains(&to) {
+ Some(Self::SignedToSigned)
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+ }
+}
+
+/// Check for `expr <= (to_type::MAX as from_type)`
+fn check_upper_bound<'tcx>(expr: &'tcx Expr<'tcx>) -> Option<Conversion<'tcx>> {
+ if_chain! {
+ if let ExprKind::Binary(ref op, left, right) = &expr.kind;
+ if let Some((candidate, check)) = normalize_le_ge(op, left, right);
+ if let Some((from, to)) = get_types_from_cast(check, INTS, "max_value", "MAX");
+
+ then {
+ Conversion::try_new(candidate, from, to)
+ } else {
+ None
+ }
+ }
+}
+
+/// Check for `expr >= 0|(to_type::MIN as from_type)`
+fn check_lower_bound<'tcx>(expr: &'tcx Expr<'tcx>) -> Option<Conversion<'tcx>> {
+ fn check_function<'a>(candidate: &'a Expr<'a>, check: &'a Expr<'a>) -> Option<Conversion<'a>> {
+ (check_lower_bound_zero(candidate, check)).or_else(|| (check_lower_bound_min(candidate, check)))
+ }
+
+ // First of we need a binary containing the expression & the cast
+ if let ExprKind::Binary(ref op, left, right) = &expr.kind {
+ normalize_le_ge(op, right, left).and_then(|(l, r)| check_function(l, r))
+ } else {
+ None
+ }
+}
+
+/// Check for `expr >= 0`
+fn check_lower_bound_zero<'a>(candidate: &'a Expr<'_>, check: &'a Expr<'_>) -> Option<Conversion<'a>> {
+ if_chain! {
+ if let ExprKind::Lit(ref lit) = &check.kind;
+ if let LitKind::Int(0, _) = &lit.node;
+
+ then {
+ Some(Conversion::new_any(candidate))
+ } else {
+ None
+ }
+ }
+}
+
+/// Check for `expr >= (to_type::MIN as from_type)`
+fn check_lower_bound_min<'a>(candidate: &'a Expr<'_>, check: &'a Expr<'_>) -> Option<Conversion<'a>> {
+ if let Some((from, to)) = get_types_from_cast(check, SINTS, "min_value", "MIN") {
+ Conversion::try_new(candidate, from, to)
+ } else {
+ None
+ }
+}
+
+/// Tries to extract the from- and to-type from a cast expression
+fn get_types_from_cast<'a>(
+ expr: &'a Expr<'_>,
+ types: &'a [&str],
+ func: &'a str,
+ assoc_const: &'a str,
+) -> Option<(&'a str, &'a str)> {
+ // `to_type::max_value() as from_type`
+ // or `to_type::MAX as from_type`
+ let call_from_cast: Option<(&Expr<'_>, &str)> = if_chain! {
+ // to_type::max_value(), from_type
+ if let ExprKind::Cast(limit, from_type) = &expr.kind;
+ if let TyKind::Path(ref from_type_path) = &from_type.kind;
+ if let Some(from_sym) = int_ty_to_sym(from_type_path);
+
+ then {
+ Some((limit, from_sym))
+ } else {
+ None
+ }
+ };
+
+ // `from_type::from(to_type::max_value())`
+ let limit_from: Option<(&Expr<'_>, &str)> = call_from_cast.or_else(|| {
+ if_chain! {
+ // `from_type::from, to_type::max_value()`
+ if let ExprKind::Call(from_func, args) = &expr.kind;
+ // `to_type::max_value()`
+ if args.len() == 1;
+ if let limit = &args[0];
+ // `from_type::from`
+ if let ExprKind::Path(ref path) = &from_func.kind;
+ if let Some(from_sym) = get_implementing_type(path, INTS, "from");
+
+ then {
+ Some((limit, from_sym))
+ } else {
+ None
+ }
+ }
+ });
+
+ if let Some((limit, from_type)) = limit_from {
+ match limit.kind {
+ // `from_type::from(_)`
+ ExprKind::Call(path, _) => {
+ if let ExprKind::Path(ref path) = path.kind {
+ // `to_type`
+ if let Some(to_type) = get_implementing_type(path, types, func) {
+ return Some((from_type, to_type));
+ }
+ }
+ },
+ // `to_type::MAX`
+ ExprKind::Path(ref path) => {
+ if let Some(to_type) = get_implementing_type(path, types, assoc_const) {
+ return Some((from_type, to_type));
+ }
+ },
+ _ => {},
+ }
+ };
+ None
+}
+
+/// Gets the type which implements the called function
+fn get_implementing_type<'a>(path: &QPath<'_>, candidates: &'a [&str], function: &str) -> Option<&'a str> {
+ if_chain! {
+ if let QPath::TypeRelative(ty, path) = &path;
+ if path.ident.name.as_str() == function;
+ if let TyKind::Path(QPath::Resolved(None, tp)) = &ty.kind;
+ if let [int] = tp.segments;
+ then {
+ let name = int.ident.name.as_str();
+ candidates.iter().find(|c| &name == *c).copied()
+ } else {
+ None
+ }
+ }
+}
+
+/// Gets the type as a string, if it is a supported integer
+fn int_ty_to_sym<'tcx>(path: &QPath<'_>) -> Option<&'tcx str> {
+ if_chain! {
+ if let QPath::Resolved(_, path) = *path;
+ if let [ty] = path.segments;
+ then {
+ let name = ty.ident.name.as_str();
+ INTS.iter().find(|c| &name == *c).copied()
+ } else {
+ None
+ }
+ }
+}
+
+/// Will return the expressions as if they were expr1 <= expr2
+fn normalize_le_ge<'a>(op: &BinOp, left: &'a Expr<'a>, right: &'a Expr<'a>) -> Option<(&'a Expr<'a>, &'a Expr<'a>)> {
+ match op.node {
+ BinOpKind::Le => Some((left, right)),
+ BinOpKind::Ge => Some((right, left)),
+ _ => None,
+ }
+}
+
+// Constants
+const UINTS: &[&str] = &["u8", "u16", "u32", "u64", "usize"];
+const SINTS: &[&str] = &["i8", "i16", "i32", "i64", "isize"];
+const INTS: &[&str] = &["u8", "u16", "u32", "u64", "usize", "i8", "i16", "i32", "i64", "isize"];
diff --git a/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs b/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs
new file mode 100644
index 000000000..33c44f8b2
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs
@@ -0,0 +1,167 @@
+//! calculate cognitive complexity and warn about overly complex functions
+
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::LimitStack;
+use rustc_ast::ast::Attribute;
+use rustc_hir::intravisit::{walk_expr, FnKind, Visitor};
+use rustc_hir::{Body, Expr, ExprKind, FnDecl, HirId};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::source_map::Span;
+use rustc_span::{sym, BytePos};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for methods with high cognitive complexity.
+ ///
+ /// ### Why is this bad?
+ /// Methods of high cognitive complexity tend to be hard to
+ /// both read and maintain. Also LLVM will tend to optimize small methods better.
+ ///
+ /// ### Known problems
+ /// Sometimes it's hard to find a way to reduce the
+ /// complexity.
+ ///
+ /// ### Example
+ /// You'll see it when you get the warning.
+ #[clippy::version = "1.35.0"]
+ pub COGNITIVE_COMPLEXITY,
+ nursery,
+ "functions that should be split up into multiple functions"
+}
+
+pub struct CognitiveComplexity {
+ limit: LimitStack,
+}
+
+impl CognitiveComplexity {
+ #[must_use]
+ pub fn new(limit: u64) -> Self {
+ Self {
+ limit: LimitStack::new(limit),
+ }
+ }
+}
+
+impl_lint_pass!(CognitiveComplexity => [COGNITIVE_COMPLEXITY]);
+
+impl CognitiveComplexity {
+ #[expect(clippy::cast_possible_truncation)]
+ fn check<'tcx>(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ kind: FnKind<'tcx>,
+ decl: &'tcx FnDecl<'_>,
+ body: &'tcx Body<'_>,
+ body_span: Span,
+ ) {
+ if body_span.from_expansion() {
+ return;
+ }
+
+ let expr = &body.value;
+
+ let mut helper = CcHelper { cc: 1, returns: 0 };
+ helper.visit_expr(expr);
+ let CcHelper { cc, returns } = helper;
+ let ret_ty = cx.typeck_results().node_type(expr.hir_id);
+ let ret_adjust = if is_type_diagnostic_item(cx, ret_ty, sym::Result) {
+ returns
+ } else {
+ #[expect(clippy::integer_division)]
+ (returns / 2)
+ };
+
+ let mut rust_cc = cc;
+ // prevent degenerate cases where unreachable code contains `return` statements
+ if rust_cc >= ret_adjust {
+ rust_cc -= ret_adjust;
+ }
+
+ if rust_cc > self.limit.limit() {
+ let fn_span = match kind {
+ FnKind::ItemFn(ident, _, _) | FnKind::Method(ident, _) => ident.span,
+ FnKind::Closure => {
+ let header_span = body_span.with_hi(decl.output.span().lo());
+ let pos = snippet_opt(cx, header_span).and_then(|snip| {
+ let low_offset = snip.find('|')?;
+ let high_offset = 1 + snip.get(low_offset + 1..)?.find('|')?;
+ let low = header_span.lo() + BytePos(low_offset as u32);
+ let high = low + BytePos(high_offset as u32 + 1);
+
+ Some((low, high))
+ });
+
+ if let Some((low, high)) = pos {
+ Span::new(low, high, header_span.ctxt(), header_span.parent())
+ } else {
+ return;
+ }
+ },
+ };
+
+ span_lint_and_help(
+ cx,
+ COGNITIVE_COMPLEXITY,
+ fn_span,
+ &format!(
+ "the function has a cognitive complexity of ({}/{})",
+ rust_cc,
+ self.limit.limit()
+ ),
+ None,
+ "you could split it up into multiple smaller functions",
+ );
+ }
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for CognitiveComplexity {
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ kind: FnKind<'tcx>,
+ decl: &'tcx FnDecl<'_>,
+ body: &'tcx Body<'_>,
+ span: Span,
+ hir_id: HirId,
+ ) {
+ let def_id = cx.tcx.hir().local_def_id(hir_id);
+ if !cx.tcx.has_attr(def_id.to_def_id(), sym::test) {
+ self.check(cx, kind, decl, body, span);
+ }
+ }
+
+ fn enter_lint_attrs(&mut self, cx: &LateContext<'tcx>, attrs: &'tcx [Attribute]) {
+ self.limit.push_attrs(cx.sess(), attrs, "cognitive_complexity");
+ }
+ fn exit_lint_attrs(&mut self, cx: &LateContext<'tcx>, attrs: &'tcx [Attribute]) {
+ self.limit.pop_attrs(cx.sess(), attrs, "cognitive_complexity");
+ }
+}
+
+struct CcHelper {
+ cc: u64,
+ returns: u64,
+}
+
+impl<'tcx> Visitor<'tcx> for CcHelper {
+ fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
+ walk_expr(self, e);
+ match e.kind {
+ ExprKind::If(_, _, _) => {
+ self.cc += 1;
+ },
+ ExprKind::Match(_, arms, _) => {
+ if arms.len() > 1 {
+ self.cc += 1;
+ }
+ self.cc += arms.iter().filter(|arm| arm.guard.is_some()).count() as u64;
+ },
+ ExprKind::Ret(_) => self.returns += 1,
+ _ => {},
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/collapsible_if.rs b/src/tools/clippy/clippy_lints/src/collapsible_if.rs
new file mode 100644
index 000000000..90430b71a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/collapsible_if.rs
@@ -0,0 +1,195 @@
+//! Checks for if expressions that contain only an if expression.
+//!
+//! For example, the lint would catch:
+//!
+//! ```rust,ignore
+//! if x {
+//! if y {
+//! println!("Hello world");
+//! }
+//! }
+//! ```
+//!
+//! This lint is **warn** by default
+
+use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then};
+use clippy_utils::source::{snippet, snippet_block, snippet_block_with_applicability};
+use clippy_utils::sugg::Sugg;
+use if_chain::if_chain;
+use rustc_ast::ast;
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for nested `if` statements which can be collapsed
+ /// by `&&`-combining their conditions.
+ ///
+ /// ### Why is this bad?
+ /// Each `if`-statement adds one level of nesting, which
+ /// makes code look more complex than it really is.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let (x, y) = (true, true);
+ /// if x {
+ /// if y {
+ /// // …
+ /// }
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let (x, y) = (true, true);
+ /// if x && y {
+ /// // …
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub COLLAPSIBLE_IF,
+ style,
+ "nested `if`s that can be collapsed (e.g., `if x { if y { ... } }`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for collapsible `else { if ... }` expressions
+ /// that can be collapsed to `else if ...`.
+ ///
+ /// ### Why is this bad?
+ /// Each `if`-statement adds one level of nesting, which
+ /// makes code look more complex than it really is.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ ///
+ /// if x {
+ /// …
+ /// } else {
+ /// if y {
+ /// …
+ /// }
+ /// }
+ /// ```
+ ///
+ /// Should be written:
+ ///
+ /// ```rust,ignore
+ /// if x {
+ /// …
+ /// } else if y {
+ /// …
+ /// }
+ /// ```
+ #[clippy::version = "1.51.0"]
+ pub COLLAPSIBLE_ELSE_IF,
+ style,
+ "nested `else`-`if` expressions that can be collapsed (e.g., `else { if x { ... } }`)"
+}
+
+declare_lint_pass!(CollapsibleIf => [COLLAPSIBLE_IF, COLLAPSIBLE_ELSE_IF]);
+
+impl EarlyLintPass for CollapsibleIf {
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &ast::Expr) {
+ if !expr.span.from_expansion() {
+ check_if(cx, expr);
+ }
+ }
+}
+
+fn check_if(cx: &EarlyContext<'_>, expr: &ast::Expr) {
+ if let ast::ExprKind::If(check, then, else_) = &expr.kind {
+ if let Some(else_) = else_ {
+ check_collapsible_maybe_if_let(cx, then.span, else_);
+ } else if let ast::ExprKind::Let(..) = check.kind {
+ // Prevent triggering on `if let a = b { if c { .. } }`.
+ } else {
+ check_collapsible_no_if_let(cx, expr, check, then);
+ }
+ }
+}
+
+fn block_starts_with_comment(cx: &EarlyContext<'_>, expr: &ast::Block) -> bool {
+ // We trim all opening braces and whitespaces and then check if the next string is a comment.
+ let trimmed_block_text = snippet_block(cx, expr.span, "..", None)
+ .trim_start_matches(|c: char| c.is_whitespace() || c == '{')
+ .to_owned();
+ trimmed_block_text.starts_with("//") || trimmed_block_text.starts_with("/*")
+}
+
+fn check_collapsible_maybe_if_let(cx: &EarlyContext<'_>, then_span: Span, else_: &ast::Expr) {
+ if_chain! {
+ if let ast::ExprKind::Block(ref block, _) = else_.kind;
+ if !block_starts_with_comment(cx, block);
+ if let Some(else_) = expr_block(block);
+ if else_.attrs.is_empty();
+ if !else_.span.from_expansion();
+ if let ast::ExprKind::If(..) = else_.kind;
+ then {
+ // Prevent "elseif"
+ // Check that the "else" is followed by whitespace
+ let up_to_else = then_span.between(block.span);
+ let requires_space = if let Some(c) = snippet(cx, up_to_else, "..").chars().last() { !c.is_whitespace() } else { false };
+
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ COLLAPSIBLE_ELSE_IF,
+ block.span,
+ "this `else { if .. }` block can be collapsed",
+ "collapse nested if block",
+ format!(
+ "{}{}",
+ if requires_space { " " } else { "" },
+ snippet_block_with_applicability(cx, else_.span, "..", Some(block.span), &mut applicability)
+ ),
+ applicability,
+ );
+ }
+ }
+}
+
+fn check_collapsible_no_if_let(cx: &EarlyContext<'_>, expr: &ast::Expr, check: &ast::Expr, then: &ast::Block) {
+ if_chain! {
+ if !block_starts_with_comment(cx, then);
+ if let Some(inner) = expr_block(then);
+ if inner.attrs.is_empty();
+ if let ast::ExprKind::If(ref check_inner, ref content, None) = inner.kind;
+ // Prevent triggering on `if c { if let a = b { .. } }`.
+ if !matches!(check_inner.kind, ast::ExprKind::Let(..));
+ if expr.span.ctxt() == inner.span.ctxt();
+ then {
+ span_lint_and_then(cx, COLLAPSIBLE_IF, expr.span, "this `if` statement can be collapsed", |diag| {
+ let lhs = Sugg::ast(cx, check, "..");
+ let rhs = Sugg::ast(cx, check_inner, "..");
+ diag.span_suggestion(
+ expr.span,
+ "collapse nested if block",
+ format!(
+ "if {} {}",
+ lhs.and(&rhs),
+ snippet_block(cx, content.span, "..", Some(expr.span)),
+ ),
+ Applicability::MachineApplicable, // snippet
+ );
+ });
+ }
+ }
+}
+
+/// If the block contains only one expression, return it.
+fn expr_block(block: &ast::Block) -> Option<&ast::Expr> {
+ let mut it = block.stmts.iter();
+
+ if let (Some(stmt), None) = (it.next(), it.next()) {
+ match stmt.kind {
+ ast::StmtKind::Expr(ref expr) | ast::StmtKind::Semi(ref expr) => Some(expr),
+ _ => None,
+ }
+ } else {
+ None
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/comparison_chain.rs b/src/tools/clippy/clippy_lints/src/comparison_chain.rs
new file mode 100644
index 000000000..a05b41eb3
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/comparison_chain.rs
@@ -0,0 +1,132 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::ty::implements_trait;
+use clippy_utils::{get_trait_def_id, if_sequence, in_constant, is_else_clause, paths, SpanlessEq};
+use rustc_hir::{BinOpKind, Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks comparison chains written with `if` that can be
+ /// rewritten with `match` and `cmp`.
+ ///
+ /// ### Why is this bad?
+ /// `if` is not guaranteed to be exhaustive and conditionals can get
+ /// repetitive
+ ///
+ /// ### Known problems
+ /// The match statement may be slower due to the compiler
+ /// not inlining the call to cmp. See issue [#5354](https://github.com/rust-lang/rust-clippy/issues/5354)
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// # fn a() {}
+ /// # fn b() {}
+ /// # fn c() {}
+ /// fn f(x: u8, y: u8) {
+ /// if x > y {
+ /// a()
+ /// } else if x < y {
+ /// b()
+ /// } else {
+ /// c()
+ /// }
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// use std::cmp::Ordering;
+ /// # fn a() {}
+ /// # fn b() {}
+ /// # fn c() {}
+ /// fn f(x: u8, y: u8) {
+ /// match x.cmp(&y) {
+ /// Ordering::Greater => a(),
+ /// Ordering::Less => b(),
+ /// Ordering::Equal => c()
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "1.40.0"]
+ pub COMPARISON_CHAIN,
+ style,
+ "`if`s that can be rewritten with `match` and `cmp`"
+}
+
+declare_lint_pass!(ComparisonChain => [COMPARISON_CHAIN]);
+
+impl<'tcx> LateLintPass<'tcx> for ComparisonChain {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if expr.span.from_expansion() {
+ return;
+ }
+
+ // We only care about the top-most `if` in the chain
+ if is_else_clause(cx.tcx, expr) {
+ return;
+ }
+
+ if in_constant(cx, expr.hir_id) {
+ return;
+ }
+
+ // Check that there exists at least one explicit else condition
+ let (conds, _) = if_sequence(expr);
+ if conds.len() < 2 {
+ return;
+ }
+
+ for cond in conds.windows(2) {
+ if let (&ExprKind::Binary(ref kind1, lhs1, rhs1), &ExprKind::Binary(ref kind2, lhs2, rhs2)) =
+ (&cond[0].kind, &cond[1].kind)
+ {
+ if !kind_is_cmp(kind1.node) || !kind_is_cmp(kind2.node) {
+ return;
+ }
+
+ // Check that both sets of operands are equal
+ let mut spanless_eq = SpanlessEq::new(cx);
+ let same_fixed_operands = spanless_eq.eq_expr(lhs1, lhs2) && spanless_eq.eq_expr(rhs1, rhs2);
+ let same_transposed_operands = spanless_eq.eq_expr(lhs1, rhs2) && spanless_eq.eq_expr(rhs1, lhs2);
+
+ if !same_fixed_operands && !same_transposed_operands {
+ return;
+ }
+
+ // Check that if the operation is the same, either it's not `==` or the operands are transposed
+ if kind1.node == kind2.node {
+ if kind1.node == BinOpKind::Eq {
+ return;
+ }
+ if !same_transposed_operands {
+ return;
+ }
+ }
+
+ // Check that the type being compared implements `core::cmp::Ord`
+ let ty = cx.typeck_results().expr_ty(lhs1);
+ let is_ord = get_trait_def_id(cx, &paths::ORD).map_or(false, |id| implements_trait(cx, ty, id, &[]));
+
+ if !is_ord {
+ return;
+ }
+ } else {
+ // We only care about comparison chains
+ return;
+ }
+ }
+ span_lint_and_help(
+ cx,
+ COMPARISON_CHAIN,
+ expr.span,
+ "`if` chain can be rewritten with `match`",
+ None,
+ "consider rewriting the `if` chain to use `cmp` and `match`",
+ );
+ }
+}
+
+fn kind_is_cmp(kind: BinOpKind) -> bool {
+ matches!(kind, BinOpKind::Lt | BinOpKind::Gt | BinOpKind::Eq)
+}
diff --git a/src/tools/clippy/clippy_lints/src/copies.rs b/src/tools/clippy/clippy_lints/src/copies.rs
new file mode 100644
index 000000000..0e3d93175
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/copies.rs
@@ -0,0 +1,584 @@
+use clippy_utils::diagnostics::{span_lint_and_note, span_lint_and_then};
+use clippy_utils::source::{first_line_of_span, indent_of, reindent_multiline, snippet, snippet_opt};
+use clippy_utils::ty::needs_ordered_drop;
+use clippy_utils::visitors::for_each_expr;
+use clippy_utils::{
+ capture_local_usage, eq_expr_value, get_enclosing_block, hash_expr, hash_stmt, if_sequence, is_else_clause,
+ is_lint_allowed, path_to_local, search_same, ContainsName, HirEqInterExpr, SpanlessEq,
+};
+use core::iter;
+use core::ops::ControlFlow;
+use rustc_errors::Applicability;
+use rustc_hir::intravisit;
+use rustc_hir::{BinOpKind, Block, Expr, ExprKind, HirId, HirIdSet, Stmt, StmtKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::hygiene::walk_chain;
+use rustc_span::source_map::SourceMap;
+use rustc_span::{BytePos, Span, Symbol};
+use std::borrow::Cow;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for consecutive `if`s with the same condition.
+ ///
+ /// ### Why is this bad?
+ /// This is probably a copy & paste error.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// if a == b {
+ /// …
+ /// } else if a == b {
+ /// …
+ /// }
+ /// ```
+ ///
+ /// Note that this lint ignores all conditions with a function call as it could
+ /// have side effects:
+ ///
+ /// ```ignore
+ /// if foo() {
+ /// …
+ /// } else if foo() { // not linted
+ /// …
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub IFS_SAME_COND,
+ correctness,
+ "consecutive `if`s with the same condition"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for consecutive `if`s with the same function call.
+ ///
+ /// ### Why is this bad?
+ /// This is probably a copy & paste error.
+ /// Despite the fact that function can have side effects and `if` works as
+ /// intended, such an approach is implicit and can be considered a "code smell".
+ ///
+ /// ### Example
+ /// ```ignore
+ /// if foo() == bar {
+ /// …
+ /// } else if foo() == bar {
+ /// …
+ /// }
+ /// ```
+ ///
+ /// This probably should be:
+ /// ```ignore
+ /// if foo() == bar {
+ /// …
+ /// } else if foo() == baz {
+ /// …
+ /// }
+ /// ```
+ ///
+ /// or if the original code was not a typo and called function mutates a state,
+ /// consider move the mutation out of the `if` condition to avoid similarity to
+ /// a copy & paste error:
+ ///
+ /// ```ignore
+ /// let first = foo();
+ /// if first == bar {
+ /// …
+ /// } else {
+ /// let second = foo();
+ /// if second == bar {
+ /// …
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "1.41.0"]
+ pub SAME_FUNCTIONS_IN_IF_CONDITION,
+ pedantic,
+ "consecutive `if`s with the same function call"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `if/else` with the same body as the *then* part
+ /// and the *else* part.
+ ///
+ /// ### Why is this bad?
+ /// This is probably a copy & paste error.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// let foo = if … {
+ /// 42
+ /// } else {
+ /// 42
+ /// };
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub IF_SAME_THEN_ELSE,
+ correctness,
+ "`if` with the same `then` and `else` blocks"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks if the `if` and `else` block contain shared code that can be
+ /// moved out of the blocks.
+ ///
+ /// ### Why is this bad?
+ /// Duplicate code is less maintainable.
+ ///
+ /// ### Known problems
+ /// * The lint doesn't check if the moved expressions modify values that are being used in
+ /// the if condition. The suggestion can in that case modify the behavior of the program.
+ /// See [rust-clippy#7452](https://github.com/rust-lang/rust-clippy/issues/7452)
+ ///
+ /// ### Example
+ /// ```ignore
+ /// let foo = if … {
+ /// println!("Hello World");
+ /// 13
+ /// } else {
+ /// println!("Hello World");
+ /// 42
+ /// };
+ /// ```
+ ///
+ /// Use instead:
+ /// ```ignore
+ /// println!("Hello World");
+ /// let foo = if … {
+ /// 13
+ /// } else {
+ /// 42
+ /// };
+ /// ```
+ #[clippy::version = "1.53.0"]
+ pub BRANCHES_SHARING_CODE,
+ nursery,
+ "`if` statement with shared code in all blocks"
+}
+
+declare_lint_pass!(CopyAndPaste => [
+ IFS_SAME_COND,
+ SAME_FUNCTIONS_IN_IF_CONDITION,
+ IF_SAME_THEN_ELSE,
+ BRANCHES_SHARING_CODE
+]);
+
+impl<'tcx> LateLintPass<'tcx> for CopyAndPaste {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if !expr.span.from_expansion() && matches!(expr.kind, ExprKind::If(..)) && !is_else_clause(cx.tcx, expr) {
+ let (conds, blocks) = if_sequence(expr);
+ lint_same_cond(cx, &conds);
+ lint_same_fns_in_if_cond(cx, &conds);
+ let all_same =
+ !is_lint_allowed(cx, IF_SAME_THEN_ELSE, expr.hir_id) && lint_if_same_then_else(cx, &conds, &blocks);
+ if !all_same && conds.len() != blocks.len() {
+ lint_branches_sharing_code(cx, &conds, &blocks, expr);
+ }
+ }
+ }
+}
+
+/// Checks if the given expression is a let chain.
+fn contains_let(e: &Expr<'_>) -> bool {
+ match e.kind {
+ ExprKind::Let(..) => true,
+ ExprKind::Binary(op, lhs, rhs) if op.node == BinOpKind::And => {
+ matches!(lhs.kind, ExprKind::Let(..)) || contains_let(rhs)
+ },
+ _ => false,
+ }
+}
+
+fn lint_if_same_then_else(cx: &LateContext<'_>, conds: &[&Expr<'_>], blocks: &[&Block<'_>]) -> bool {
+ let mut eq = SpanlessEq::new(cx);
+ blocks
+ .array_windows::<2>()
+ .enumerate()
+ .fold(true, |all_eq, (i, &[lhs, rhs])| {
+ if eq.eq_block(lhs, rhs) && !contains_let(conds[i]) && conds.get(i + 1).map_or(true, |e| !contains_let(e)) {
+ span_lint_and_note(
+ cx,
+ IF_SAME_THEN_ELSE,
+ lhs.span,
+ "this `if` has identical blocks",
+ Some(rhs.span),
+ "same as this",
+ );
+ all_eq
+ } else {
+ false
+ }
+ })
+}
+
+fn lint_branches_sharing_code<'tcx>(
+ cx: &LateContext<'tcx>,
+ conds: &[&'tcx Expr<'_>],
+ blocks: &[&'tcx Block<'_>],
+ expr: &'tcx Expr<'_>,
+) {
+ // We only lint ifs with multiple blocks
+ let &[first_block, ref blocks @ ..] = blocks else {
+ return;
+ };
+ let &[.., last_block] = blocks else {
+ return;
+ };
+
+ let res = scan_block_for_eq(cx, conds, first_block, blocks);
+ let sm = cx.tcx.sess.source_map();
+ let start_suggestion = res.start_span(first_block, sm).map(|span| {
+ let first_line_span = first_line_of_span(cx, expr.span);
+ let replace_span = first_line_span.with_hi(span.hi());
+ let cond_span = first_line_span.until(first_block.span);
+ let cond_snippet = reindent_multiline(snippet(cx, cond_span, "_"), false, None);
+ let cond_indent = indent_of(cx, cond_span);
+ let moved_snippet = reindent_multiline(snippet(cx, span, "_"), true, None);
+ let suggestion = moved_snippet.to_string() + "\n" + &cond_snippet + "{";
+ let suggestion = reindent_multiline(Cow::Borrowed(&suggestion), true, cond_indent);
+ (replace_span, suggestion.to_string())
+ });
+ let end_suggestion = res.end_span(last_block, sm).map(|span| {
+ let moved_snipped = reindent_multiline(snippet(cx, span, "_"), true, None);
+ let indent = indent_of(cx, expr.span.shrink_to_hi());
+ let suggestion = "}\n".to_string() + &moved_snipped;
+ let suggestion = reindent_multiline(Cow::Borrowed(&suggestion), true, indent);
+
+ let span = span.with_hi(last_block.span.hi());
+ // Improve formatting if the inner block has indention (i.e. normal Rust formatting)
+ let test_span = Span::new(span.lo() - BytePos(4), span.lo(), span.ctxt(), span.parent());
+ let span = if snippet_opt(cx, test_span).map_or(false, |snip| snip == " ") {
+ span.with_lo(test_span.lo())
+ } else {
+ span
+ };
+ (span, suggestion.to_string())
+ });
+
+ let (span, msg, end_span) = match (&start_suggestion, &end_suggestion) {
+ (&Some((span, _)), &Some((end_span, _))) => (
+ span,
+ "all if blocks contain the same code at both the start and the end",
+ Some(end_span),
+ ),
+ (&Some((span, _)), None) => (span, "all if blocks contain the same code at the start", None),
+ (None, &Some((span, _))) => (span, "all if blocks contain the same code at the end", None),
+ (None, None) => return,
+ };
+ span_lint_and_then(cx, BRANCHES_SHARING_CODE, span, msg, |diag| {
+ if let Some(span) = end_span {
+ diag.span_note(span, "this code is shared at the end");
+ }
+ if let Some((span, sugg)) = start_suggestion {
+ diag.span_suggestion(
+ span,
+ "consider moving these statements before the if",
+ sugg,
+ Applicability::Unspecified,
+ );
+ }
+ if let Some((span, sugg)) = end_suggestion {
+ diag.span_suggestion(
+ span,
+ "consider moving these statements after the if",
+ sugg,
+ Applicability::Unspecified,
+ );
+ if !cx.typeck_results().expr_ty(expr).is_unit() {
+ diag.note("the end suggestion probably needs some adjustments to use the expression result correctly");
+ }
+ }
+ if check_for_warn_of_moved_symbol(cx, &res.moved_locals, expr) {
+ diag.warn("some moved values might need to be renamed to avoid wrong references");
+ }
+ });
+}
+
+struct BlockEq {
+ /// The end of the range of equal stmts at the start.
+ start_end_eq: usize,
+ /// The start of the range of equal stmts at the end.
+ end_begin_eq: Option<usize>,
+ /// The name and id of every local which can be moved at the beginning and the end.
+ moved_locals: Vec<(HirId, Symbol)>,
+}
+impl BlockEq {
+ fn start_span(&self, b: &Block<'_>, sm: &SourceMap) -> Option<Span> {
+ match &b.stmts[..self.start_end_eq] {
+ [first, .., last] => Some(sm.stmt_span(first.span, b.span).to(sm.stmt_span(last.span, b.span))),
+ [s] => Some(sm.stmt_span(s.span, b.span)),
+ [] => None,
+ }
+ }
+
+ fn end_span(&self, b: &Block<'_>, sm: &SourceMap) -> Option<Span> {
+ match (&b.stmts[b.stmts.len() - self.end_begin_eq?..], b.expr) {
+ ([first, .., last], None) => Some(sm.stmt_span(first.span, b.span).to(sm.stmt_span(last.span, b.span))),
+ ([first, ..], Some(last)) => Some(sm.stmt_span(first.span, b.span).to(sm.stmt_span(last.span, b.span))),
+ ([s], None) => Some(sm.stmt_span(s.span, b.span)),
+ ([], Some(e)) => Some(walk_chain(e.span, b.span.ctxt())),
+ ([], None) => None,
+ }
+ }
+}
+
+/// If the statement is a local, checks if the bound names match the expected list of names.
+fn eq_binding_names(s: &Stmt<'_>, names: &[(HirId, Symbol)]) -> bool {
+ if let StmtKind::Local(l) = s.kind {
+ let mut i = 0usize;
+ let mut res = true;
+ l.pat.each_binding_or_first(&mut |_, _, _, name| {
+ if names.get(i).map_or(false, |&(_, n)| n == name.name) {
+ i += 1;
+ } else {
+ res = false;
+ }
+ });
+ res && i == names.len()
+ } else {
+ false
+ }
+}
+
+/// Checks if the statement modifies or moves any of the given locals.
+fn modifies_any_local<'tcx>(cx: &LateContext<'tcx>, s: &'tcx Stmt<'_>, locals: &HirIdSet) -> bool {
+ for_each_expr(s, |e| {
+ if let Some(id) = path_to_local(e)
+ && locals.contains(&id)
+ && !capture_local_usage(cx, e).is_imm_ref()
+ {
+ ControlFlow::Break(())
+ } else {
+ ControlFlow::Continue(())
+ }
+ })
+ .is_some()
+}
+
+/// Checks if the given statement should be considered equal to the statement in the same position
+/// for each block.
+fn eq_stmts(
+ stmt: &Stmt<'_>,
+ blocks: &[&Block<'_>],
+ get_stmt: impl for<'a> Fn(&'a Block<'a>) -> Option<&'a Stmt<'a>>,
+ eq: &mut HirEqInterExpr<'_, '_, '_>,
+ moved_bindings: &mut Vec<(HirId, Symbol)>,
+) -> bool {
+ (if let StmtKind::Local(l) = stmt.kind {
+ let old_count = moved_bindings.len();
+ l.pat.each_binding_or_first(&mut |_, id, _, name| {
+ moved_bindings.push((id, name.name));
+ });
+ let new_bindings = &moved_bindings[old_count..];
+ blocks
+ .iter()
+ .all(|b| get_stmt(b).map_or(false, |s| eq_binding_names(s, new_bindings)))
+ } else {
+ true
+ }) && blocks
+ .iter()
+ .all(|b| get_stmt(b).map_or(false, |s| eq.eq_stmt(s, stmt)))
+}
+
+#[expect(clippy::too_many_lines)]
+fn scan_block_for_eq<'tcx>(
+ cx: &LateContext<'tcx>,
+ conds: &[&'tcx Expr<'_>],
+ block: &'tcx Block<'_>,
+ blocks: &[&'tcx Block<'_>],
+) -> BlockEq {
+ let mut eq = SpanlessEq::new(cx);
+ let mut eq = eq.inter_expr();
+ let mut moved_locals = Vec::new();
+
+ let mut cond_locals = HirIdSet::default();
+ for &cond in conds {
+ let _: Option<!> = for_each_expr(cond, |e| {
+ if let Some(id) = path_to_local(e) {
+ cond_locals.insert(id);
+ }
+ ControlFlow::Continue(())
+ });
+ }
+
+ let mut local_needs_ordered_drop = false;
+ let start_end_eq = block
+ .stmts
+ .iter()
+ .enumerate()
+ .find(|&(i, stmt)| {
+ if let StmtKind::Local(l) = stmt.kind
+ && needs_ordered_drop(cx, cx.typeck_results().node_type(l.hir_id))
+ {
+ local_needs_ordered_drop = true;
+ return true;
+ }
+ modifies_any_local(cx, stmt, &cond_locals)
+ || !eq_stmts(stmt, blocks, |b| b.stmts.get(i), &mut eq, &mut moved_locals)
+ })
+ .map_or(block.stmts.len(), |(i, _)| i);
+
+ if local_needs_ordered_drop {
+ return BlockEq {
+ start_end_eq,
+ end_begin_eq: None,
+ moved_locals,
+ };
+ }
+
+ // Walk backwards through the final expression/statements so long as their hashes are equal. Note
+ // `SpanlessHash` treats all local references as equal allowing locals declared earlier in the block
+ // to match those in other blocks. e.g. If each block ends with the following the hash value will be
+ // the same even though each `x` binding will have a different `HirId`:
+ // let x = foo();
+ // x + 50
+ let expr_hash_eq = if let Some(e) = block.expr {
+ let hash = hash_expr(cx, e);
+ blocks
+ .iter()
+ .all(|b| b.expr.map_or(false, |e| hash_expr(cx, e) == hash))
+ } else {
+ blocks.iter().all(|b| b.expr.is_none())
+ };
+ if !expr_hash_eq {
+ return BlockEq {
+ start_end_eq,
+ end_begin_eq: None,
+ moved_locals,
+ };
+ }
+ let end_search_start = block.stmts[start_end_eq..]
+ .iter()
+ .rev()
+ .enumerate()
+ .find(|&(offset, stmt)| {
+ let hash = hash_stmt(cx, stmt);
+ blocks.iter().any(|b| {
+ b.stmts
+ // the bounds check will catch the underflow
+ .get(b.stmts.len().wrapping_sub(offset + 1))
+ .map_or(true, |s| hash != hash_stmt(cx, s))
+ })
+ })
+ .map_or(block.stmts.len() - start_end_eq, |(i, _)| i);
+
+ let moved_locals_at_start = moved_locals.len();
+ let mut i = end_search_start;
+ let end_begin_eq = block.stmts[block.stmts.len() - end_search_start..]
+ .iter()
+ .zip(iter::repeat_with(move || {
+ let x = i;
+ i -= 1;
+ x
+ }))
+ .fold(end_search_start, |init, (stmt, offset)| {
+ if eq_stmts(
+ stmt,
+ blocks,
+ |b| b.stmts.get(b.stmts.len() - offset),
+ &mut eq,
+ &mut moved_locals,
+ ) {
+ init
+ } else {
+ // Clear out all locals seen at the end so far. None of them can be moved.
+ let stmts = &blocks[0].stmts;
+ for stmt in &stmts[stmts.len() - init..=stmts.len() - offset] {
+ if let StmtKind::Local(l) = stmt.kind {
+ l.pat.each_binding_or_first(&mut |_, id, _, _| {
+ eq.locals.remove(&id);
+ });
+ }
+ }
+ moved_locals.truncate(moved_locals_at_start);
+ offset - 1
+ }
+ });
+ if let Some(e) = block.expr {
+ for block in blocks {
+ if block.expr.map_or(false, |expr| !eq.eq_expr(expr, e)) {
+ moved_locals.truncate(moved_locals_at_start);
+ return BlockEq {
+ start_end_eq,
+ end_begin_eq: None,
+ moved_locals,
+ };
+ }
+ }
+ }
+
+ BlockEq {
+ start_end_eq,
+ end_begin_eq: Some(end_begin_eq),
+ moved_locals,
+ }
+}
+
+fn check_for_warn_of_moved_symbol(cx: &LateContext<'_>, symbols: &[(HirId, Symbol)], if_expr: &Expr<'_>) -> bool {
+ get_enclosing_block(cx, if_expr.hir_id).map_or(false, |block| {
+ let ignore_span = block.span.shrink_to_lo().to(if_expr.span);
+
+ symbols
+ .iter()
+ .filter(|&&(_, name)| !name.as_str().starts_with('_'))
+ .any(|&(_, name)| {
+ let mut walker = ContainsName { name, result: false };
+
+ // Scan block
+ block
+ .stmts
+ .iter()
+ .filter(|stmt| !ignore_span.overlaps(stmt.span))
+ .for_each(|stmt| intravisit::walk_stmt(&mut walker, stmt));
+
+ if let Some(expr) = block.expr {
+ intravisit::walk_expr(&mut walker, expr);
+ }
+
+ walker.result
+ })
+ })
+}
+
+/// Implementation of `IFS_SAME_COND`.
+fn lint_same_cond(cx: &LateContext<'_>, conds: &[&Expr<'_>]) {
+ for (i, j) in search_same(conds, |e| hash_expr(cx, e), |lhs, rhs| eq_expr_value(cx, lhs, rhs)) {
+ span_lint_and_note(
+ cx,
+ IFS_SAME_COND,
+ j.span,
+ "this `if` has the same condition as a previous `if`",
+ Some(i.span),
+ "same as this",
+ );
+ }
+}
+
+/// Implementation of `SAME_FUNCTIONS_IN_IF_CONDITION`.
+fn lint_same_fns_in_if_cond(cx: &LateContext<'_>, conds: &[&Expr<'_>]) {
+ let eq: &dyn Fn(&&Expr<'_>, &&Expr<'_>) -> bool = &|&lhs, &rhs| -> bool {
+ // Do not lint if any expr originates from a macro
+ if lhs.span.from_expansion() || rhs.span.from_expansion() {
+ return false;
+ }
+ // Do not spawn warning if `IFS_SAME_COND` already produced it.
+ if eq_expr_value(cx, lhs, rhs) {
+ return false;
+ }
+ SpanlessEq::new(cx).eq_expr(lhs, rhs)
+ };
+
+ for (i, j) in search_same(conds, |e| hash_expr(cx, e), eq) {
+ span_lint_and_note(
+ cx,
+ SAME_FUNCTIONS_IN_IF_CONDITION,
+ j.span,
+ "this `if` has the same function call as a previous `if`",
+ Some(i.span),
+ "same as this",
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/copy_iterator.rs b/src/tools/clippy/clippy_lints/src/copy_iterator.rs
new file mode 100644
index 000000000..026683f60
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/copy_iterator.rs
@@ -0,0 +1,62 @@
+use clippy_utils::diagnostics::span_lint_and_note;
+use clippy_utils::ty::is_copy;
+use rustc_hir::{Impl, Item, ItemKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+use if_chain::if_chain;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for types that implement `Copy` as well as
+ /// `Iterator`.
+ ///
+ /// ### Why is this bad?
+ /// Implicit copies can be confusing when working with
+ /// iterator combinators.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// #[derive(Copy, Clone)]
+ /// struct Countdown(u8);
+ ///
+ /// impl Iterator for Countdown {
+ /// // ...
+ /// }
+ ///
+ /// let a: Vec<_> = my_iterator.take(1).collect();
+ /// let b: Vec<_> = my_iterator.collect();
+ /// ```
+ #[clippy::version = "1.30.0"]
+ pub COPY_ITERATOR,
+ pedantic,
+ "implementing `Iterator` on a `Copy` type"
+}
+
+declare_lint_pass!(CopyIterator => [COPY_ITERATOR]);
+
+impl<'tcx> LateLintPass<'tcx> for CopyIterator {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
+ if_chain! {
+ if let ItemKind::Impl(Impl {
+ of_trait: Some(ref trait_ref),
+ ..
+ }) = item.kind;
+ let ty = cx.tcx.type_of(item.def_id);
+ if is_copy(cx, ty);
+ if let Some(trait_id) = trait_ref.trait_def_id();
+ if cx.tcx.is_diagnostic_item(sym::Iterator, trait_id);
+ then {
+ span_lint_and_note(
+ cx,
+ COPY_ITERATOR,
+ item.span,
+ "you are implementing `Iterator` on a `Copy` type",
+ None,
+ "consider implementing `IntoIterator` instead",
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs b/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs
new file mode 100644
index 000000000..454ec2338
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs
@@ -0,0 +1,125 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use rustc_ast::ast::{AttrKind, Attribute, Item, ItemKind};
+use rustc_ast::token::{Token, TokenKind};
+use rustc_ast::tokenstream::{TokenStream, TokenTree};
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{symbol::sym, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for use of `crate` as opposed to `$crate` in a macro definition.
+ ///
+ /// ### Why is this bad?
+ /// `crate` refers to the macro call's crate, whereas `$crate` refers to the macro definition's
+ /// crate. Rarely is the former intended. See:
+ /// https://doc.rust-lang.org/reference/macros-by-example.html#hygiene
+ ///
+ /// ### Example
+ /// ```rust
+ /// #[macro_export]
+ /// macro_rules! print_message {
+ /// () => {
+ /// println!("{}", crate::MESSAGE);
+ /// };
+ /// }
+ /// pub const MESSAGE: &str = "Hello!";
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// #[macro_export]
+ /// macro_rules! print_message {
+ /// () => {
+ /// println!("{}", $crate::MESSAGE);
+ /// };
+ /// }
+ /// pub const MESSAGE: &str = "Hello!";
+ /// ```
+ ///
+ /// Note that if the use of `crate` is intentional, an `allow` attribute can be applied to the
+ /// macro definition, e.g.:
+ /// ```rust,ignore
+ /// #[allow(clippy::crate_in_macro_def)]
+ /// macro_rules! ok { ... crate::foo ... }
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub CRATE_IN_MACRO_DEF,
+ suspicious,
+ "using `crate` in a macro definition"
+}
+declare_lint_pass!(CrateInMacroDef => [CRATE_IN_MACRO_DEF]);
+
+impl EarlyLintPass for CrateInMacroDef {
+ fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) {
+ if_chain! {
+ if item.attrs.iter().any(is_macro_export);
+ if let ItemKind::MacroDef(macro_def) = &item.kind;
+ let tts = macro_def.body.inner_tokens();
+ if let Some(span) = contains_unhygienic_crate_reference(&tts);
+ then {
+ span_lint_and_sugg(
+ cx,
+ CRATE_IN_MACRO_DEF,
+ span,
+ "`crate` references the macro call's crate",
+ "to reference the macro definition's crate, use",
+ String::from("$crate"),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+}
+
+fn is_macro_export(attr: &Attribute) -> bool {
+ if_chain! {
+ if let AttrKind::Normal(attr_item, _) = &attr.kind;
+ if let [segment] = attr_item.path.segments.as_slice();
+ then {
+ segment.ident.name == sym::macro_export
+ } else {
+ false
+ }
+ }
+}
+
+fn contains_unhygienic_crate_reference(tts: &TokenStream) -> Option<Span> {
+ let mut prev_is_dollar = false;
+ let mut cursor = tts.trees();
+ while let Some(curr) = cursor.next() {
+ if_chain! {
+ if !prev_is_dollar;
+ if let Some(span) = is_crate_keyword(curr);
+ if let Some(next) = cursor.look_ahead(0);
+ if is_token(next, &TokenKind::ModSep);
+ then {
+ return Some(span);
+ }
+ }
+ if let TokenTree::Delimited(_, _, tts) = &curr {
+ let span = contains_unhygienic_crate_reference(tts);
+ if span.is_some() {
+ return span;
+ }
+ }
+ prev_is_dollar = is_token(curr, &TokenKind::Dollar);
+ }
+ None
+}
+
+fn is_crate_keyword(tt: &TokenTree) -> Option<Span> {
+ if_chain! {
+ if let TokenTree::Token(Token { kind: TokenKind::Ident(symbol, _), span }, _) = tt;
+ if symbol.as_str() == "crate";
+ then { Some(*span) } else { None }
+ }
+}
+
+fn is_token(tt: &TokenTree, kind: &TokenKind) -> bool {
+ if let TokenTree::Token(Token { kind: other, .. }, _) = tt {
+ kind == other
+ } else {
+ false
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/create_dir.rs b/src/tools/clippy/clippy_lints/src/create_dir.rs
new file mode 100644
index 000000000..18d34370a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/create_dir.rs
@@ -0,0 +1,54 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet;
+use clippy_utils::{match_def_path, paths};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks usage of `std::fs::create_dir` and suggest using `std::fs::create_dir_all` instead.
+ ///
+ /// ### Why is this bad?
+ /// Sometimes `std::fs::create_dir` is mistakenly chosen over `std::fs::create_dir_all`.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// std::fs::create_dir("foo");
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// std::fs::create_dir_all("foo");
+ /// ```
+ #[clippy::version = "1.48.0"]
+ pub CREATE_DIR,
+ restriction,
+ "calling `std::fs::create_dir` instead of `std::fs::create_dir_all`"
+}
+
+declare_lint_pass!(CreateDir => [CREATE_DIR]);
+
+impl LateLintPass<'_> for CreateDir {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if_chain! {
+ if let ExprKind::Call(func, args) = expr.kind;
+ if let ExprKind::Path(ref path) = func.kind;
+ if let Some(def_id) = cx.qpath_res(path, func.hir_id).opt_def_id();
+ if match_def_path(cx, def_id, &paths::STD_FS_CREATE_DIR);
+ then {
+ span_lint_and_sugg(
+ cx,
+ CREATE_DIR,
+ expr.span,
+ "calling `std::fs::create_dir` where there may be a better way",
+ "consider calling `std::fs::create_dir_all` instead",
+ format!("create_dir_all({})", snippet(cx, args[0].span, "..")),
+ Applicability::MaybeIncorrect,
+ )
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/dbg_macro.rs b/src/tools/clippy/clippy_lints/src/dbg_macro.rs
new file mode 100644
index 000000000..fe9f4f9ae
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/dbg_macro.rs
@@ -0,0 +1,101 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::macros::root_macro_call_first_node;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::{is_in_cfg_test, is_in_test_function};
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of dbg!() macro.
+ ///
+ /// ### Why is this bad?
+ /// `dbg!` macro is intended as a debugging tool. It
+ /// should not be in version control.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// dbg!(true)
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// true
+ /// ```
+ #[clippy::version = "1.34.0"]
+ pub DBG_MACRO,
+ restriction,
+ "`dbg!` macro is intended as a debugging tool"
+}
+
+#[derive(Copy, Clone)]
+pub struct DbgMacro {
+ allow_dbg_in_tests: bool,
+}
+
+impl_lint_pass!(DbgMacro => [DBG_MACRO]);
+
+impl DbgMacro {
+ pub fn new(allow_dbg_in_tests: bool) -> Self {
+ DbgMacro { allow_dbg_in_tests }
+ }
+}
+
+impl LateLintPass<'_> for DbgMacro {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
+ let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return };
+ if cx.tcx.is_diagnostic_item(sym::dbg_macro, macro_call.def_id) {
+ // allows `dbg!` in test code if allow-dbg-in-test is set to true in clippy.toml
+ if self.allow_dbg_in_tests
+ && (is_in_test_function(cx.tcx, expr.hir_id) || is_in_cfg_test(cx.tcx, expr.hir_id))
+ {
+ return;
+ }
+ let mut applicability = Applicability::MachineApplicable;
+ let suggestion = match expr.peel_drop_temps().kind {
+ // dbg!()
+ ExprKind::Block(_, _) => String::new(),
+ // dbg!(1)
+ ExprKind::Match(val, ..) => {
+ snippet_with_applicability(cx, val.span.source_callsite(), "..", &mut applicability).to_string()
+ },
+ // dbg!(2, 3)
+ ExprKind::Tup(
+ [
+ Expr {
+ kind: ExprKind::Match(first, ..),
+ ..
+ },
+ ..,
+ Expr {
+ kind: ExprKind::Match(last, ..),
+ ..
+ },
+ ],
+ ) => {
+ let snippet = snippet_with_applicability(
+ cx,
+ first.span.source_callsite().to(last.span.source_callsite()),
+ "..",
+ &mut applicability,
+ );
+ format!("({snippet})")
+ },
+ _ => return,
+ };
+
+ span_lint_and_sugg(
+ cx,
+ DBG_MACRO,
+ macro_call.span,
+ "`dbg!` macro is intended as a debugging tool",
+ "ensure to avoid having uses of it in version control",
+ suggestion,
+ applicability,
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/default.rs b/src/tools/clippy/clippy_lints/src/default.rs
new file mode 100644
index 000000000..d99a1aa29
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/default.rs
@@ -0,0 +1,307 @@
+use clippy_utils::diagnostics::{span_lint_and_note, span_lint_and_sugg};
+use clippy_utils::source::snippet_with_macro_callsite;
+use clippy_utils::ty::{has_drop, is_copy};
+use clippy_utils::{any_parent_is_automatically_derived, contains_name, get_parent_expr, match_def_path, paths};
+use if_chain::if_chain;
+use rustc_data_structures::fx::FxHashSet;
+use rustc_errors::Applicability;
+use rustc_hir::def::Res;
+use rustc_hir::{Block, Expr, ExprKind, PatKind, QPath, Stmt, StmtKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::symbol::{Ident, Symbol};
+use rustc_span::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for literal calls to `Default::default()`.
+ ///
+ /// ### Why is this bad?
+ /// It's easier for the reader if the name of the type is used, rather than the
+ /// generic `Default`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let s: String = Default::default();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let s = String::default();
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub DEFAULT_TRAIT_ACCESS,
+ pedantic,
+ "checks for literal calls to `Default::default()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for immediate reassignment of fields initialized
+ /// with Default::default().
+ ///
+ /// ### Why is this bad?
+ ///It's more idiomatic to use the [functional update syntax](https://doc.rust-lang.org/reference/expressions/struct-expr.html#functional-update-syntax).
+ ///
+ /// ### Known problems
+ /// Assignments to patterns that are of tuple type are not linted.
+ ///
+ /// ### Example
+ /// ```
+ /// # #[derive(Default)]
+ /// # struct A { i: i32 }
+ /// let mut a: A = Default::default();
+ /// a.i = 42;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```
+ /// # #[derive(Default)]
+ /// # struct A { i: i32 }
+ /// let a = A {
+ /// i: 42,
+ /// .. Default::default()
+ /// };
+ /// ```
+ #[clippy::version = "1.49.0"]
+ pub FIELD_REASSIGN_WITH_DEFAULT,
+ style,
+ "binding initialized with Default should have its fields set in the initializer"
+}
+
+#[derive(Default)]
+pub struct Default {
+ // Spans linted by `field_reassign_with_default`.
+ reassigned_linted: FxHashSet<Span>,
+}
+
+impl_lint_pass!(Default => [DEFAULT_TRAIT_ACCESS, FIELD_REASSIGN_WITH_DEFAULT]);
+
+impl<'tcx> LateLintPass<'tcx> for Default {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if_chain! {
+ if !expr.span.from_expansion();
+ // Avoid cases already linted by `field_reassign_with_default`
+ if !self.reassigned_linted.contains(&expr.span);
+ if let ExprKind::Call(path, ..) = expr.kind;
+ if !any_parent_is_automatically_derived(cx.tcx, expr.hir_id);
+ if let ExprKind::Path(ref qpath) = path.kind;
+ if let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id();
+ if match_def_path(cx, def_id, &paths::DEFAULT_TRAIT_METHOD);
+ if !is_update_syntax_base(cx, expr);
+ // Detect and ignore <Foo as Default>::default() because these calls do explicitly name the type.
+ if let QPath::Resolved(None, _path) = qpath;
+ let expr_ty = cx.typeck_results().expr_ty(expr);
+ if let ty::Adt(def, ..) = expr_ty.kind();
+ then {
+ // TODO: Work out a way to put "whatever the imported way of referencing
+ // this type in this file" rather than a fully-qualified type.
+ let replacement = format!("{}::default()", cx.tcx.def_path_str(def.did()));
+ span_lint_and_sugg(
+ cx,
+ DEFAULT_TRAIT_ACCESS,
+ expr.span,
+ &format!("calling `{}` is more clear than this expression", replacement),
+ "try",
+ replacement,
+ Applicability::Unspecified, // First resolve the TODO above
+ );
+ }
+ }
+ }
+
+ #[expect(clippy::too_many_lines)]
+ fn check_block(&mut self, cx: &LateContext<'tcx>, block: &Block<'tcx>) {
+ // start from the `let mut _ = _::default();` and look at all the following
+ // statements, see if they re-assign the fields of the binding
+ let stmts_head = match block.stmts {
+ // Skip the last statement since there cannot possibly be any following statements that re-assign fields.
+ [head @ .., _] if !head.is_empty() => head,
+ _ => return,
+ };
+ for (stmt_idx, stmt) in stmts_head.iter().enumerate() {
+ // find all binding statements like `let mut _ = T::default()` where `T::default()` is the
+ // `default` method of the `Default` trait, and store statement index in current block being
+ // checked and the name of the bound variable
+ let (local, variant, binding_name, binding_type, span) = if_chain! {
+ // only take `let ...` statements
+ if let StmtKind::Local(local) = stmt.kind;
+ if let Some(expr) = local.init;
+ if !any_parent_is_automatically_derived(cx.tcx, expr.hir_id);
+ if !expr.span.from_expansion();
+ // only take bindings to identifiers
+ if let PatKind::Binding(_, binding_id, ident, _) = local.pat.kind;
+ // only when assigning `... = Default::default()`
+ if is_expr_default(expr, cx);
+ let binding_type = cx.typeck_results().node_type(binding_id);
+ if let Some(adt) = binding_type.ty_adt_def();
+ if adt.is_struct();
+ let variant = adt.non_enum_variant();
+ if adt.did().is_local() || !variant.is_field_list_non_exhaustive();
+ let module_did = cx.tcx.parent_module(stmt.hir_id).to_def_id();
+ if variant
+ .fields
+ .iter()
+ .all(|field| field.vis.is_accessible_from(module_did, cx.tcx));
+ let all_fields_are_copy = variant
+ .fields
+ .iter()
+ .all(|field| {
+ is_copy(cx, cx.tcx.type_of(field.did))
+ });
+ if !has_drop(cx, binding_type) || all_fields_are_copy;
+ then {
+ (local, variant, ident.name, binding_type, expr.span)
+ } else {
+ continue;
+ }
+ };
+
+ // find all "later statement"'s where the fields of the binding set as
+ // Default::default() get reassigned, unless the reassignment refers to the original binding
+ let mut first_assign = None;
+ let mut assigned_fields = Vec::new();
+ let mut cancel_lint = false;
+ for consecutive_statement in &block.stmts[stmt_idx + 1..] {
+ // find out if and which field was set by this `consecutive_statement`
+ if let Some((field_ident, assign_rhs)) = field_reassigned_by_stmt(consecutive_statement, binding_name) {
+ // interrupt and cancel lint if assign_rhs references the original binding
+ if contains_name(binding_name, assign_rhs) {
+ cancel_lint = true;
+ break;
+ }
+
+ // if the field was previously assigned, replace the assignment, otherwise insert the assignment
+ if let Some(prev) = assigned_fields
+ .iter_mut()
+ .find(|(field_name, _)| field_name == &field_ident.name)
+ {
+ *prev = (field_ident.name, assign_rhs);
+ } else {
+ assigned_fields.push((field_ident.name, assign_rhs));
+ }
+
+ // also set first instance of error for help message
+ if first_assign.is_none() {
+ first_assign = Some(consecutive_statement);
+ }
+ }
+ // interrupt if no field was assigned, since we only want to look at consecutive statements
+ else {
+ break;
+ }
+ }
+
+ // if there are incorrectly assigned fields, do a span_lint_and_note to suggest
+ // construction using `Ty { fields, ..Default::default() }`
+ if !assigned_fields.is_empty() && !cancel_lint {
+ // if all fields of the struct are not assigned, add `.. Default::default()` to the suggestion.
+ let ext_with_default = !variant
+ .fields
+ .iter()
+ .all(|field| assigned_fields.iter().any(|(a, _)| a == &field.name));
+
+ let field_list = assigned_fields
+ .into_iter()
+ .map(|(field, rhs)| {
+ // extract and store the assigned value for help message
+ let value_snippet = snippet_with_macro_callsite(cx, rhs.span, "..");
+ format!("{}: {}", field, value_snippet)
+ })
+ .collect::<Vec<String>>()
+ .join(", ");
+
+ // give correct suggestion if generics are involved (see #6944)
+ let binding_type = if_chain! {
+ if let ty::Adt(adt_def, substs) = binding_type.kind();
+ if !substs.is_empty();
+ then {
+ let adt_def_ty_name = cx.tcx.item_name(adt_def.did());
+ let generic_args = substs.iter().collect::<Vec<_>>();
+ let tys_str = generic_args
+ .iter()
+ .map(ToString::to_string)
+ .collect::<Vec<_>>()
+ .join(", ");
+ format!("{}::<{}>", adt_def_ty_name, &tys_str)
+ } else {
+ binding_type.to_string()
+ }
+ };
+
+ let sugg = if ext_with_default {
+ if field_list.is_empty() {
+ format!("{}::default()", binding_type)
+ } else {
+ format!("{} {{ {}, ..Default::default() }}", binding_type, field_list)
+ }
+ } else {
+ format!("{} {{ {} }}", binding_type, field_list)
+ };
+
+ // span lint once per statement that binds default
+ span_lint_and_note(
+ cx,
+ FIELD_REASSIGN_WITH_DEFAULT,
+ first_assign.unwrap().span,
+ "field assignment outside of initializer for an instance created with Default::default()",
+ Some(local.span),
+ &format!(
+ "consider initializing the variable with `{}` and removing relevant reassignments",
+ sugg
+ ),
+ );
+ self.reassigned_linted.insert(span);
+ }
+ }
+ }
+}
+
+/// Checks if the given expression is the `default` method belonging to the `Default` trait.
+fn is_expr_default<'tcx>(expr: &'tcx Expr<'tcx>, cx: &LateContext<'tcx>) -> bool {
+ if_chain! {
+ if let ExprKind::Call(fn_expr, _) = &expr.kind;
+ if let ExprKind::Path(qpath) = &fn_expr.kind;
+ if let Res::Def(_, def_id) = cx.qpath_res(qpath, fn_expr.hir_id);
+ then {
+ // right hand side of assignment is `Default::default`
+ match_def_path(cx, def_id, &paths::DEFAULT_TRAIT_METHOD)
+ } else {
+ false
+ }
+ }
+}
+
+/// Returns the reassigned field and the assigning expression (right-hand side of assign).
+fn field_reassigned_by_stmt<'tcx>(this: &Stmt<'tcx>, binding_name: Symbol) -> Option<(Ident, &'tcx Expr<'tcx>)> {
+ if_chain! {
+ // only take assignments
+ if let StmtKind::Semi(later_expr) = this.kind;
+ if let ExprKind::Assign(assign_lhs, assign_rhs, _) = later_expr.kind;
+ // only take assignments to fields where the left-hand side field is a field of
+ // the same binding as the previous statement
+ if let ExprKind::Field(binding, field_ident) = assign_lhs.kind;
+ if let ExprKind::Path(QPath::Resolved(_, path)) = binding.kind;
+ if let Some(second_binding_name) = path.segments.last();
+ if second_binding_name.ident.name == binding_name;
+ then {
+ Some((field_ident, assign_rhs))
+ } else {
+ None
+ }
+ }
+}
+
+/// Returns whether `expr` is the update syntax base: `Foo { a: 1, .. base }`
+fn is_update_syntax_base<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> bool {
+ if_chain! {
+ if let Some(parent) = get_parent_expr(cx, expr);
+ if let ExprKind::Struct(_, _, Some(base)) = parent.kind;
+ then {
+ base.hir_id == expr.hir_id
+ } else {
+ false
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs b/src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs
new file mode 100644
index 000000000..3c996d3d2
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs
@@ -0,0 +1,68 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::last_path_segment;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::{match_def_path, paths};
+use rustc_errors::Applicability;
+use rustc_hir::{def, Expr, ExprKind, GenericArg, QPath, TyKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// It checks for `std::iter::Empty::default()` and suggests replacing it with
+ /// `std::iter::empty()`.
+ /// ### Why is this bad?
+ /// `std::iter::empty()` is the more idiomatic way.
+ /// ### Example
+ /// ```rust
+ /// let _ = std::iter::Empty::<usize>::default();
+ /// let iter: std::iter::Empty<usize> = std::iter::Empty::default();
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let _ = std::iter::empty::<usize>();
+ /// let iter: std::iter::Empty<usize> = std::iter::empty();
+ /// ```
+ #[clippy::version = "1.63.0"]
+ pub DEFAULT_INSTEAD_OF_ITER_EMPTY,
+ style,
+ "check `std::iter::Empty::default()` and replace with `std::iter::empty()`"
+}
+declare_lint_pass!(DefaultIterEmpty => [DEFAULT_INSTEAD_OF_ITER_EMPTY]);
+
+impl<'tcx> LateLintPass<'tcx> for DefaultIterEmpty {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if let ExprKind::Call(iter_expr, []) = &expr.kind
+ && let ExprKind::Path(QPath::TypeRelative(ty, _)) = &iter_expr.kind
+ && let TyKind::Path(ty_path) = &ty.kind
+ && let QPath::Resolved(None, path) = ty_path
+ && let def::Res::Def(_, def_id) = &path.res
+ && match_def_path(cx, *def_id, &paths::ITER_EMPTY)
+ {
+ let mut applicability = Applicability::MachineApplicable;
+ let sugg = make_sugg(cx, ty_path, &mut applicability);
+ span_lint_and_sugg(
+ cx,
+ DEFAULT_INSTEAD_OF_ITER_EMPTY,
+ expr.span,
+ "`std::iter::empty()` is the more idiomatic way",
+ "try",
+ sugg,
+ applicability,
+ );
+ }
+ }
+}
+
+fn make_sugg(cx: &LateContext<'_>, ty_path: &rustc_hir::QPath<'_>, applicability: &mut Applicability) -> String {
+ if let Some(last) = last_path_segment(ty_path).args
+ && let Some(iter_ty) = last.args.iter().find_map(|arg| match arg {
+ GenericArg::Type(ty) => Some(ty),
+ _ => None,
+ })
+ {
+ format!("std::iter::empty::<{}>()", snippet_with_applicability(cx, iter_ty.span, "..", applicability))
+ } else {
+ "std::iter::empty()".to_owned()
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs b/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs
new file mode 100644
index 000000000..fb418a325
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs
@@ -0,0 +1,245 @@
+use clippy_utils::diagnostics::span_lint_hir_and_then;
+use clippy_utils::numeric_literal;
+use clippy_utils::source::snippet_opt;
+use if_chain::if_chain;
+use rustc_ast::ast::{LitFloatType, LitIntType, LitKind};
+use rustc_errors::Applicability;
+use rustc_hir::{
+ intravisit::{walk_expr, walk_stmt, Visitor},
+ Body, Expr, ExprKind, HirId, Lit, Stmt, StmtKind,
+};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::{
+ lint::in_external_macro,
+ ty::{self, FloatTy, IntTy, PolyFnSig, Ty},
+};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use std::iter;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of unconstrained numeric literals which may cause default numeric fallback in type
+ /// inference.
+ ///
+ /// Default numeric fallback means that if numeric types have not yet been bound to concrete
+ /// types at the end of type inference, then integer type is bound to `i32`, and similarly
+ /// floating type is bound to `f64`.
+ ///
+ /// See [RFC0212](https://github.com/rust-lang/rfcs/blob/master/text/0212-restore-int-fallback.md) for more information about the fallback.
+ ///
+ /// ### Why is this bad?
+ /// For those who are very careful about types, default numeric fallback
+ /// can be a pitfall that cause unexpected runtime behavior.
+ ///
+ /// ### Known problems
+ /// This lint can only be allowed at the function level or above.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let i = 10;
+ /// let f = 1.23;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let i = 10i32;
+ /// let f = 1.23f64;
+ /// ```
+ #[clippy::version = "1.52.0"]
+ pub DEFAULT_NUMERIC_FALLBACK,
+ restriction,
+ "usage of unconstrained numeric literals which may cause default numeric fallback."
+}
+
+declare_lint_pass!(DefaultNumericFallback => [DEFAULT_NUMERIC_FALLBACK]);
+
+impl<'tcx> LateLintPass<'tcx> for DefaultNumericFallback {
+ fn check_body(&mut self, cx: &LateContext<'tcx>, body: &'tcx Body<'_>) {
+ let mut visitor = NumericFallbackVisitor::new(cx);
+ visitor.visit_body(body);
+ }
+}
+
+struct NumericFallbackVisitor<'a, 'tcx> {
+ /// Stack manages type bound of exprs. The top element holds current expr type.
+ ty_bounds: Vec<TyBound<'tcx>>,
+
+ cx: &'a LateContext<'tcx>,
+}
+
+impl<'a, 'tcx> NumericFallbackVisitor<'a, 'tcx> {
+ fn new(cx: &'a LateContext<'tcx>) -> Self {
+ Self {
+ ty_bounds: vec![TyBound::Nothing],
+ cx,
+ }
+ }
+
+ /// Check whether a passed literal has potential to cause fallback or not.
+ fn check_lit(&self, lit: &Lit, lit_ty: Ty<'tcx>, emit_hir_id: HirId) {
+ if_chain! {
+ if !in_external_macro(self.cx.sess(), lit.span);
+ if let Some(ty_bound) = self.ty_bounds.last();
+ if matches!(lit.node,
+ LitKind::Int(_, LitIntType::Unsuffixed) | LitKind::Float(_, LitFloatType::Unsuffixed));
+ if !ty_bound.is_numeric();
+ then {
+ let (suffix, is_float) = match lit_ty.kind() {
+ ty::Int(IntTy::I32) => ("i32", false),
+ ty::Float(FloatTy::F64) => ("f64", true),
+ // Default numeric fallback never results in other types.
+ _ => return,
+ };
+
+ let src = if let Some(src) = snippet_opt(self.cx, lit.span) {
+ src
+ } else {
+ match lit.node {
+ LitKind::Int(src, _) => format!("{}", src),
+ LitKind::Float(src, _) => format!("{}", src),
+ _ => return,
+ }
+ };
+ let sugg = numeric_literal::format(&src, Some(suffix), is_float);
+ span_lint_hir_and_then(
+ self.cx,
+ DEFAULT_NUMERIC_FALLBACK,
+ emit_hir_id,
+ lit.span,
+ "default numeric fallback might occur",
+ |diag| {
+ diag.span_suggestion(lit.span, "consider adding suffix", sugg, Applicability::MaybeIncorrect);
+ }
+ );
+ }
+ }
+ }
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for NumericFallbackVisitor<'a, 'tcx> {
+ fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
+ match &expr.kind {
+ ExprKind::Call(func, args) => {
+ if let Some(fn_sig) = fn_sig_opt(self.cx, func.hir_id) {
+ for (expr, bound) in iter::zip(*args, fn_sig.skip_binder().inputs()) {
+ // Push found arg type, then visit arg.
+ self.ty_bounds.push(TyBound::Ty(*bound));
+ self.visit_expr(expr);
+ self.ty_bounds.pop();
+ }
+ return;
+ }
+ },
+
+ ExprKind::MethodCall(_, args, _) => {
+ if let Some(def_id) = self.cx.typeck_results().type_dependent_def_id(expr.hir_id) {
+ let fn_sig = self.cx.tcx.fn_sig(def_id).skip_binder();
+ for (expr, bound) in iter::zip(*args, fn_sig.inputs()) {
+ self.ty_bounds.push(TyBound::Ty(*bound));
+ self.visit_expr(expr);
+ self.ty_bounds.pop();
+ }
+ return;
+ }
+ },
+
+ ExprKind::Struct(_, fields, base) => {
+ let ty = self.cx.typeck_results().expr_ty(expr);
+ if_chain! {
+ if let Some(adt_def) = ty.ty_adt_def();
+ if adt_def.is_struct();
+ if let Some(variant) = adt_def.variants().iter().next();
+ then {
+ let fields_def = &variant.fields;
+
+ // Push field type then visit each field expr.
+ for field in fields.iter() {
+ let bound =
+ fields_def
+ .iter()
+ .find_map(|f_def| {
+ if f_def.ident(self.cx.tcx) == field.ident
+ { Some(self.cx.tcx.type_of(f_def.did)) }
+ else { None }
+ });
+ self.ty_bounds.push(bound.into());
+ self.visit_expr(field.expr);
+ self.ty_bounds.pop();
+ }
+
+ // Visit base with no bound.
+ if let Some(base) = base {
+ self.ty_bounds.push(TyBound::Nothing);
+ self.visit_expr(base);
+ self.ty_bounds.pop();
+ }
+ return;
+ }
+ }
+ },
+
+ ExprKind::Lit(lit) => {
+ let ty = self.cx.typeck_results().expr_ty(expr);
+ self.check_lit(lit, ty, expr.hir_id);
+ return;
+ },
+
+ _ => {},
+ }
+
+ walk_expr(self, expr);
+ }
+
+ fn visit_stmt(&mut self, stmt: &'tcx Stmt<'_>) {
+ match stmt.kind {
+ StmtKind::Local(local) => {
+ if local.ty.is_some() {
+ self.ty_bounds.push(TyBound::Any);
+ } else {
+ self.ty_bounds.push(TyBound::Nothing);
+ }
+ },
+
+ _ => self.ty_bounds.push(TyBound::Nothing),
+ }
+
+ walk_stmt(self, stmt);
+ self.ty_bounds.pop();
+ }
+}
+
+fn fn_sig_opt<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId) -> Option<PolyFnSig<'tcx>> {
+ let node_ty = cx.typeck_results().node_type_opt(hir_id)?;
+ // We can't use `Ty::fn_sig` because it automatically performs substs, this may result in FNs.
+ match node_ty.kind() {
+ ty::FnDef(def_id, _) => Some(cx.tcx.fn_sig(*def_id)),
+ ty::FnPtr(fn_sig) => Some(*fn_sig),
+ _ => None,
+ }
+}
+
+#[derive(Debug, Clone, Copy)]
+enum TyBound<'tcx> {
+ Any,
+ Ty(Ty<'tcx>),
+ Nothing,
+}
+
+impl<'tcx> TyBound<'tcx> {
+ fn is_numeric(self) -> bool {
+ match self {
+ TyBound::Any => true,
+ TyBound::Ty(t) => t.is_numeric(),
+ TyBound::Nothing => false,
+ }
+ }
+}
+
+impl<'tcx> From<Option<Ty<'tcx>>> for TyBound<'tcx> {
+ fn from(v: Option<Ty<'tcx>>) -> Self {
+ match v {
+ Some(t) => TyBound::Ty(t),
+ None => TyBound::Nothing,
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/default_union_representation.rs b/src/tools/clippy/clippy_lints/src/default_union_representation.rs
new file mode 100644
index 000000000..d559ad423
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/default_union_representation.rs
@@ -0,0 +1,105 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_hir::{self as hir, HirId, Item, ItemKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::layout::LayoutOf;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+use rustc_typeck::hir_ty_to_ty;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Displays a warning when a union is declared with the default representation (without a `#[repr(C)]` attribute).
+ ///
+ /// ### Why is this bad?
+ /// Unions in Rust have unspecified layout by default, despite many people thinking that they
+ /// lay out each field at the start of the union (like C does). That is, there are no guarantees
+ /// about the offset of the fields for unions with multiple non-ZST fields without an explicitly
+ /// specified layout. These cases may lead to undefined behavior in unsafe blocks.
+ ///
+ /// ### Example
+ /// ```rust
+ /// union Foo {
+ /// a: i32,
+ /// b: u32,
+ /// }
+ ///
+ /// fn main() {
+ /// let _x: u32 = unsafe {
+ /// Foo { a: 0_i32 }.b // Undefined behavior: `b` is allowed to be padding
+ /// };
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// #[repr(C)]
+ /// union Foo {
+ /// a: i32,
+ /// b: u32,
+ /// }
+ ///
+ /// fn main() {
+ /// let _x: u32 = unsafe {
+ /// Foo { a: 0_i32 }.b // Now defined behavior, this is just an i32 -> u32 transmute
+ /// };
+ /// }
+ /// ```
+ #[clippy::version = "1.60.0"]
+ pub DEFAULT_UNION_REPRESENTATION,
+ restriction,
+ "unions without a `#[repr(C)]` attribute"
+}
+declare_lint_pass!(DefaultUnionRepresentation => [DEFAULT_UNION_REPRESENTATION]);
+
+impl<'tcx> LateLintPass<'tcx> for DefaultUnionRepresentation {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) {
+ if is_union_with_two_non_zst_fields(cx, item) && !has_c_repr_attr(cx, item.hir_id()) {
+ span_lint_and_help(
+ cx,
+ DEFAULT_UNION_REPRESENTATION,
+ item.span,
+ "this union has the default representation",
+ None,
+ &format!(
+ "consider annotating `{}` with `#[repr(C)]` to explicitly specify memory layout",
+ cx.tcx.def_path_str(item.def_id.to_def_id())
+ ),
+ );
+ }
+ }
+}
+
+/// Returns true if the given item is a union with at least two non-ZST fields.
+fn is_union_with_two_non_zst_fields(cx: &LateContext<'_>, item: &Item<'_>) -> bool {
+ if let ItemKind::Union(data, _) = &item.kind {
+ data.fields().iter().filter(|f| !is_zst(cx, f.ty)).count() >= 2
+ } else {
+ false
+ }
+}
+
+fn is_zst(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>) -> bool {
+ if hir_ty.span.from_expansion() {
+ return false;
+ }
+ let ty = hir_ty_to_ty(cx.tcx, hir_ty);
+ if let Ok(layout) = cx.layout_of(ty) {
+ layout.is_zst()
+ } else {
+ false
+ }
+}
+
+fn has_c_repr_attr(cx: &LateContext<'_>, hir_id: HirId) -> bool {
+ cx.tcx.hir().attrs(hir_id).iter().any(|attr| {
+ if attr.has_name(sym::repr) {
+ if let Some(items) = attr.meta_item_list() {
+ for item in items {
+ if item.is_word() && matches!(item.name_or_empty(), sym::C) {
+ return true;
+ }
+ }
+ }
+ }
+ false
+ })
+}
diff --git a/src/tools/clippy/clippy_lints/src/deprecated_lints.rs b/src/tools/clippy/clippy_lints/src/deprecated_lints.rs
new file mode 100644
index 000000000..9aa5af319
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/deprecated_lints.rs
@@ -0,0 +1,217 @@
+// NOTE: Entries should be created with `cargo dev deprecate`
+
+/// This struct fakes the `Lint` declaration that is usually created by `declare_lint!`. This
+/// enables the simple extraction of the metadata without changing the current deprecation
+/// declaration.
+pub struct ClippyDeprecatedLint {
+ #[allow(dead_code)]
+ pub desc: &'static str,
+}
+
+#[macro_export]
+macro_rules! declare_deprecated_lint {
+ { $(#[$attr:meta])* pub $name: ident, $reason: literal} => {
+ $(#[$attr])*
+ #[allow(dead_code)]
+ pub static $name: ClippyDeprecatedLint = ClippyDeprecatedLint {
+ desc: $reason
+ };
+ }
+}
+
+declare_deprecated_lint! {
+ /// ### What it does
+ /// Nothing. This lint has been deprecated.
+ ///
+ /// ### Deprecation reason
+ /// This used to check for `assert!(a == b)` and recommend
+ /// replacement with `assert_eq!(a, b)`, but this is no longer needed after RFC 2011.
+ #[clippy::version = "pre 1.29.0"]
+ pub SHOULD_ASSERT_EQ,
+ "`assert!()` will be more flexible with RFC 2011"
+}
+
+declare_deprecated_lint! {
+ /// ### What it does
+ /// Nothing. This lint has been deprecated.
+ ///
+ /// ### Deprecation reason
+ /// This used to check for `Vec::extend`, which was slower than
+ /// `Vec::extend_from_slice`. Thanks to specialization, this is no longer true.
+ #[clippy::version = "pre 1.29.0"]
+ pub EXTEND_FROM_SLICE,
+ "`.extend_from_slice(_)` is a faster way to extend a Vec by a slice"
+}
+
+declare_deprecated_lint! {
+ /// ### What it does
+ /// Nothing. This lint has been deprecated.
+ ///
+ /// ### Deprecation reason
+ /// `Range::step_by(0)` used to be linted since it's
+ /// an infinite iterator, which is better expressed by `iter::repeat`,
+ /// but the method has been removed for `Iterator::step_by` which panics
+ /// if given a zero
+ #[clippy::version = "pre 1.29.0"]
+ pub RANGE_STEP_BY_ZERO,
+ "`iterator.step_by(0)` panics nowadays"
+}
+
+declare_deprecated_lint! {
+ /// ### What it does
+ /// Nothing. This lint has been deprecated.
+ ///
+ /// ### Deprecation reason
+ /// This used to check for `Vec::as_slice`, which was unstable with good
+ /// stable alternatives. `Vec::as_slice` has now been stabilized.
+ #[clippy::version = "pre 1.29.0"]
+ pub UNSTABLE_AS_SLICE,
+ "`Vec::as_slice` has been stabilized in 1.7"
+}
+
+declare_deprecated_lint! {
+ /// ### What it does
+ /// Nothing. This lint has been deprecated.
+ ///
+ /// ### Deprecation reason
+ /// This used to check for `Vec::as_mut_slice`, which was unstable with good
+ /// stable alternatives. `Vec::as_mut_slice` has now been stabilized.
+ #[clippy::version = "pre 1.29.0"]
+ pub UNSTABLE_AS_MUT_SLICE,
+ "`Vec::as_mut_slice` has been stabilized in 1.7"
+}
+
+declare_deprecated_lint! {
+ /// ### What it does
+ /// Nothing. This lint has been deprecated.
+ ///
+ /// ### Deprecation reason
+ /// This lint should never have applied to non-pointer types, as transmuting
+ /// between non-pointer types of differing alignment is well-defined behavior (it's semantically
+ /// equivalent to a memcpy). This lint has thus been refactored into two separate lints:
+ /// cast_ptr_alignment and transmute_ptr_to_ptr.
+ #[clippy::version = "pre 1.29.0"]
+ pub MISALIGNED_TRANSMUTE,
+ "this lint has been split into cast_ptr_alignment and transmute_ptr_to_ptr"
+}
+
+declare_deprecated_lint! {
+ /// ### What it does
+ /// Nothing. This lint has been deprecated.
+ ///
+ /// ### Deprecation reason
+ /// This lint is too subjective, not having a good reason for being in clippy.
+ /// Additionally, compound assignment operators may be overloaded separately from their non-assigning
+ /// counterparts, so this lint may suggest a change in behavior or the code may not compile.
+ #[clippy::version = "1.30.0"]
+ pub ASSIGN_OPS,
+ "using compound assignment operators (e.g., `+=`) is harmless"
+}
+
+declare_deprecated_lint! {
+ /// ### What it does
+ /// Nothing. This lint has been deprecated.
+ ///
+ /// ### Deprecation reason
+ /// The original rule will only lint for `if let`. After
+ /// making it support to lint `match`, naming as `if let` is not suitable for it.
+ /// So, this lint is deprecated.
+ #[clippy::version = "pre 1.29.0"]
+ pub IF_LET_REDUNDANT_PATTERN_MATCHING,
+ "this lint has been changed to redundant_pattern_matching"
+}
+
+declare_deprecated_lint! {
+ /// ### What it does
+ /// Nothing. This lint has been deprecated.
+ ///
+ /// ### Deprecation reason
+ /// This lint used to suggest replacing `let mut vec =
+ /// Vec::with_capacity(n); vec.set_len(n);` with `let vec = vec![0; n];`. The
+ /// replacement has very different performance characteristics so the lint is
+ /// deprecated.
+ #[clippy::version = "pre 1.29.0"]
+ pub UNSAFE_VECTOR_INITIALIZATION,
+ "the replacement suggested by this lint had substantially different behavior"
+}
+
+declare_deprecated_lint! {
+ /// ### What it does
+ /// Nothing. This lint has been deprecated.
+ ///
+ /// ### Deprecation reason
+ /// This lint has been superseded by #[must_use] in rustc.
+ #[clippy::version = "1.39.0"]
+ pub UNUSED_COLLECT,
+ "`collect` has been marked as #[must_use] in rustc and that covers all cases of this lint"
+}
+
+declare_deprecated_lint! {
+ /// ### What it does
+ /// Nothing. This lint has been deprecated.
+ ///
+ /// ### Deprecation reason
+ /// Associated-constants are now preferred.
+ #[clippy::version = "1.44.0"]
+ pub REPLACE_CONSTS,
+ "associated-constants `MIN`/`MAX` of integers are preferred to `{min,max}_value()` and module constants"
+}
+
+declare_deprecated_lint! {
+ /// ### What it does
+ /// Nothing. This lint has been deprecated.
+ ///
+ /// ### Deprecation reason
+ /// The regex! macro does not exist anymore.
+ #[clippy::version = "1.47.0"]
+ pub REGEX_MACRO,
+ "the regex! macro has been removed from the regex crate in 2018"
+}
+
+declare_deprecated_lint! {
+ /// ### What it does
+ /// Nothing. This lint has been deprecated.
+ ///
+ /// ### Deprecation reason
+ /// This lint has been replaced by `manual_find_map`, a
+ /// more specific lint.
+ #[clippy::version = "1.51.0"]
+ pub FIND_MAP,
+ "this lint has been replaced by `manual_find_map`, a more specific lint"
+}
+
+declare_deprecated_lint! {
+ /// ### What it does
+ /// Nothing. This lint has been deprecated.
+ ///
+ /// ### Deprecation reason
+ /// This lint has been replaced by `manual_filter_map`, a
+ /// more specific lint.
+ #[clippy::version = "1.53.0"]
+ pub FILTER_MAP,
+ "this lint has been replaced by `manual_filter_map`, a more specific lint"
+}
+
+declare_deprecated_lint! {
+ /// ### What it does
+ /// Nothing. This lint has been deprecated.
+ ///
+ /// ### Deprecation reason
+ /// The `avoid_breaking_exported_api` config option was added, which
+ /// enables the `enum_variant_names` lint for public items.
+ #[clippy::version = "1.54.0"]
+ pub PUB_ENUM_VARIANT_NAMES,
+ "set the `avoid-breaking-exported-api` config option to `false` to enable the `enum_variant_names` lint for public items"
+}
+
+declare_deprecated_lint! {
+ /// ### What it does
+ /// Nothing. This lint has been deprecated.
+ ///
+ /// ### Deprecation reason
+ /// The `avoid_breaking_exported_api` config option was added, which
+ /// enables the `wrong_self_conversion` lint for public items.
+ #[clippy::version = "1.54.0"]
+ pub WRONG_PUB_SELF_CONVENTION,
+ "set the `avoid-breaking-exported-api` config option to `false` to enable the `wrong_self_convention` lint for public items"
+}
diff --git a/src/tools/clippy/clippy_lints/src/dereference.rs b/src/tools/clippy/clippy_lints/src/dereference.rs
new file mode 100644
index 000000000..514661589
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/dereference.rs
@@ -0,0 +1,1148 @@
+use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_hir_and_then};
+use clippy_utils::source::{snippet_with_applicability, snippet_with_context};
+use clippy_utils::sugg::has_enclosing_paren;
+use clippy_utils::ty::{expr_sig, peel_mid_ty_refs, variant_of_res};
+use clippy_utils::{get_parent_expr, is_lint_allowed, path_to_local, walk_to_expr_usage};
+use rustc_ast::util::parser::{PREC_POSTFIX, PREC_PREFIX};
+use rustc_data_structures::fx::FxIndexMap;
+use rustc_errors::Applicability;
+use rustc_hir::intravisit::{walk_ty, Visitor};
+use rustc_hir::{
+ self as hir, BindingAnnotation, Body, BodyId, BorrowKind, Closure, Expr, ExprKind, FnRetTy, GenericArg, HirId,
+ ImplItem, ImplItemKind, Item, ItemKind, Local, MatchSource, Mutability, Node, Pat, PatKind, Path, QPath, TraitItem,
+ TraitItemKind, TyKind, UnOp,
+};
+use rustc_infer::infer::TyCtxtInferExt;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow, AutoBorrowMutability};
+use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitable, TypeckResults};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::{symbol::sym, Span, Symbol};
+use rustc_trait_selection::infer::InferCtxtExt;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for explicit `deref()` or `deref_mut()` method calls.
+ ///
+ /// ### Why is this bad?
+ /// Dereferencing by `&*x` or `&mut *x` is clearer and more concise,
+ /// when not part of a method chain.
+ ///
+ /// ### Example
+ /// ```rust
+ /// use std::ops::Deref;
+ /// let a: &mut String = &mut String::from("foo");
+ /// let b: &str = a.deref();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let a: &mut String = &mut String::from("foo");
+ /// let b = &*a;
+ /// ```
+ ///
+ /// This lint excludes:
+ /// ```rust,ignore
+ /// let _ = d.unwrap().deref();
+ /// ```
+ #[clippy::version = "1.44.0"]
+ pub EXPLICIT_DEREF_METHODS,
+ pedantic,
+ "Explicit use of deref or deref_mut method while not in a method chain."
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for address of operations (`&`) that are going to
+ /// be dereferenced immediately by the compiler.
+ ///
+ /// ### Why is this bad?
+ /// Suggests that the receiver of the expression borrows
+ /// the expression.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn fun(_a: &i32) {}
+ ///
+ /// let x: &i32 = &&&&&&5;
+ /// fun(&x);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # fn fun(_a: &i32) {}
+ /// let x: &i32 = &5;
+ /// fun(x);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub NEEDLESS_BORROW,
+ style,
+ "taking a reference that is going to be automatically dereferenced"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `ref` bindings which create a reference to a reference.
+ ///
+ /// ### Why is this bad?
+ /// The address-of operator at the use site is clearer about the need for a reference.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = Some("");
+ /// if let Some(ref x) = x {
+ /// // use `x` here
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let x = Some("");
+ /// if let Some(x) = x {
+ /// // use `&x` here
+ /// }
+ /// ```
+ #[clippy::version = "1.54.0"]
+ pub REF_BINDING_TO_REFERENCE,
+ pedantic,
+ "`ref` binding to a reference"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for dereferencing expressions which would be covered by auto-deref.
+ ///
+ /// ### Why is this bad?
+ /// This unnecessarily complicates the code.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = String::new();
+ /// let y: &str = &*x;
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let x = String::new();
+ /// let y: &str = &x;
+ /// ```
+ #[clippy::version = "1.60.0"]
+ pub EXPLICIT_AUTO_DEREF,
+ nursery,
+ "dereferencing when the compiler would automatically dereference"
+}
+
+impl_lint_pass!(Dereferencing => [
+ EXPLICIT_DEREF_METHODS,
+ NEEDLESS_BORROW,
+ REF_BINDING_TO_REFERENCE,
+ EXPLICIT_AUTO_DEREF,
+]);
+
+#[derive(Default)]
+pub struct Dereferencing {
+ state: Option<(State, StateData)>,
+
+ // While parsing a `deref` method call in ufcs form, the path to the function is itself an
+ // expression. This is to store the id of that expression so it can be skipped when
+ // `check_expr` is called for it.
+ skip_expr: Option<HirId>,
+
+ /// The body the first local was found in. Used to emit lints when the traversal of the body has
+ /// been finished. Note we can't lint at the end of every body as they can be nested within each
+ /// other.
+ current_body: Option<BodyId>,
+ /// The list of locals currently being checked by the lint.
+ /// If the value is `None`, then the binding has been seen as a ref pattern, but is not linted.
+ /// This is needed for or patterns where one of the branches can be linted, but another can not
+ /// be.
+ ///
+ /// e.g. `m!(x) | Foo::Bar(ref x)`
+ ref_locals: FxIndexMap<HirId, Option<RefPat>>,
+}
+
+struct StateData {
+ /// Span of the top level expression
+ span: Span,
+ hir_id: HirId,
+ position: Position,
+}
+
+struct DerefedBorrow {
+ count: usize,
+ msg: &'static str,
+}
+
+enum State {
+ // Any number of deref method calls.
+ DerefMethod {
+ // The number of calls in a sequence which changed the referenced type
+ ty_changed_count: usize,
+ is_final_ufcs: bool,
+ /// The required mutability
+ target_mut: Mutability,
+ },
+ DerefedBorrow(DerefedBorrow),
+ ExplicitDeref {
+ // Span and id of the top-level deref expression if the parent expression is a borrow.
+ deref_span_id: Option<(Span, HirId)>,
+ },
+ ExplicitDerefField {
+ name: Symbol,
+ },
+ Reborrow {
+ deref_span: Span,
+ deref_hir_id: HirId,
+ },
+ Borrow,
+}
+
+// A reference operation considered by this lint pass
+enum RefOp {
+ Method(Mutability),
+ Deref,
+ AddrOf,
+}
+
+struct RefPat {
+ /// Whether every usage of the binding is dereferenced.
+ always_deref: bool,
+ /// The spans of all the ref bindings for this local.
+ spans: Vec<Span>,
+ /// The applicability of this suggestion.
+ app: Applicability,
+ /// All the replacements which need to be made.
+ replacements: Vec<(Span, String)>,
+ /// The [`HirId`] that the lint should be emitted at.
+ hir_id: HirId,
+}
+
+impl<'tcx> LateLintPass<'tcx> for Dereferencing {
+ #[expect(clippy::too_many_lines)]
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ // Skip path expressions from deref calls. e.g. `Deref::deref(e)`
+ if Some(expr.hir_id) == self.skip_expr.take() {
+ return;
+ }
+
+ if let Some(local) = path_to_local(expr) {
+ self.check_local_usage(cx, expr, local);
+ }
+
+ // Stop processing sub expressions when a macro call is seen
+ if expr.span.from_expansion() {
+ if let Some((state, data)) = self.state.take() {
+ report(cx, expr, state, data);
+ }
+ return;
+ }
+
+ let typeck = cx.typeck_results();
+ let (kind, sub_expr) = if let Some(x) = try_parse_ref_op(cx.tcx, typeck, expr) {
+ x
+ } else {
+ // The whole chain of reference operations has been seen
+ if let Some((state, data)) = self.state.take() {
+ report(cx, expr, state, data);
+ }
+ return;
+ };
+
+ match (self.state.take(), kind) {
+ (None, kind) => {
+ let expr_ty = typeck.expr_ty(expr);
+ let (position, adjustments) = walk_parents(cx, expr);
+
+ match kind {
+ RefOp::Deref => {
+ if let Position::FieldAccess(name) = position
+ && !ty_contains_field(typeck.expr_ty(sub_expr), name)
+ {
+ self.state = Some((
+ State::ExplicitDerefField { name },
+ StateData { span: expr.span, hir_id: expr.hir_id, position },
+ ));
+ } else if position.is_deref_stable() {
+ self.state = Some((
+ State::ExplicitDeref { deref_span_id: None },
+ StateData { span: expr.span, hir_id: expr.hir_id, position },
+ ));
+ }
+ }
+ RefOp::Method(target_mut)
+ if !is_lint_allowed(cx, EXPLICIT_DEREF_METHODS, expr.hir_id)
+ && position.lint_explicit_deref() =>
+ {
+ self.state = Some((
+ State::DerefMethod {
+ ty_changed_count: if deref_method_same_type(expr_ty, typeck.expr_ty(sub_expr)) {
+ 0
+ } else {
+ 1
+ },
+ is_final_ufcs: matches!(expr.kind, ExprKind::Call(..)),
+ target_mut,
+ },
+ StateData {
+ span: expr.span,
+ hir_id: expr.hir_id,
+ position
+ },
+ ));
+ },
+ RefOp::AddrOf => {
+ // Find the number of times the borrow is auto-derefed.
+ let mut iter = adjustments.iter();
+ let mut deref_count = 0usize;
+ let next_adjust = loop {
+ match iter.next() {
+ Some(adjust) => {
+ if !matches!(adjust.kind, Adjust::Deref(_)) {
+ break Some(adjust);
+ } else if !adjust.target.is_ref() {
+ deref_count += 1;
+ break iter.next();
+ }
+ deref_count += 1;
+ },
+ None => break None,
+ };
+ };
+
+ // Determine the required number of references before any can be removed. In all cases the
+ // reference made by the current expression will be removed. After that there are four cases to
+ // handle.
+ //
+ // 1. Auto-borrow will trigger in the current position, so no further references are required.
+ // 2. Auto-deref ends at a reference, or the underlying type, so one extra needs to be left to
+ // handle the automatically inserted re-borrow.
+ // 3. Auto-deref hits a user-defined `Deref` impl, so at least one reference needs to exist to
+ // start auto-deref.
+ // 4. If the chain of non-user-defined derefs ends with a mutable re-borrow, and re-borrow
+ // adjustments will not be inserted automatically, then leave one further reference to avoid
+ // moving a mutable borrow.
+ // e.g.
+ // fn foo<T>(x: &mut Option<&mut T>, y: &mut T) {
+ // let x = match x {
+ // // Removing the borrow will cause `x` to be moved
+ // Some(x) => &mut *x,
+ // None => y
+ // };
+ // }
+ let deref_msg =
+ "this expression creates a reference which is immediately dereferenced by the compiler";
+ let borrow_msg = "this expression borrows a value the compiler would automatically borrow";
+
+ let (required_refs, msg) = if position.can_auto_borrow() {
+ (1, if deref_count == 1 { borrow_msg } else { deref_msg })
+ } else if let Some(&Adjust::Borrow(AutoBorrow::Ref(_, mutability))) =
+ next_adjust.map(|a| &a.kind)
+ {
+ if matches!(mutability, AutoBorrowMutability::Mut { .. }) && !position.is_reborrow_stable()
+ {
+ (3, deref_msg)
+ } else {
+ (2, deref_msg)
+ }
+ } else {
+ (2, deref_msg)
+ };
+
+ if deref_count >= required_refs {
+ self.state = Some((
+ State::DerefedBorrow(DerefedBorrow {
+ // One of the required refs is for the current borrow expression, the remaining ones
+ // can't be removed without breaking the code. See earlier comment.
+ count: deref_count - required_refs,
+ msg,
+ }),
+ StateData { span: expr.span, hir_id: expr.hir_id, position },
+ ));
+ } else if position.is_deref_stable() {
+ self.state = Some((
+ State::Borrow,
+ StateData {
+ span: expr.span,
+ hir_id: expr.hir_id,
+ position
+ },
+ ));
+ }
+ },
+ RefOp::Method(..) => (),
+ }
+ },
+ (
+ Some((
+ State::DerefMethod {
+ target_mut,
+ ty_changed_count,
+ ..
+ },
+ data,
+ )),
+ RefOp::Method(_),
+ ) => {
+ self.state = Some((
+ State::DerefMethod {
+ ty_changed_count: if deref_method_same_type(typeck.expr_ty(expr), typeck.expr_ty(sub_expr)) {
+ ty_changed_count
+ } else {
+ ty_changed_count + 1
+ },
+ is_final_ufcs: matches!(expr.kind, ExprKind::Call(..)),
+ target_mut,
+ },
+ data,
+ ));
+ },
+ (Some((State::DerefedBorrow(state), data)), RefOp::AddrOf) if state.count != 0 => {
+ self.state = Some((
+ State::DerefedBorrow(DerefedBorrow {
+ count: state.count - 1,
+ ..state
+ }),
+ data,
+ ));
+ },
+ (Some((State::DerefedBorrow(state), data)), RefOp::AddrOf) => {
+ let position = data.position;
+ report(cx, expr, State::DerefedBorrow(state), data);
+ if position.is_deref_stable() {
+ self.state = Some((
+ State::Borrow,
+ StateData {
+ span: expr.span,
+ hir_id: expr.hir_id,
+ position,
+ },
+ ));
+ }
+ },
+ (Some((State::DerefedBorrow(state), data)), RefOp::Deref) => {
+ let position = data.position;
+ report(cx, expr, State::DerefedBorrow(state), data);
+ if let Position::FieldAccess(name) = position
+ && !ty_contains_field(typeck.expr_ty(sub_expr), name)
+ {
+ self.state = Some((
+ State::ExplicitDerefField { name },
+ StateData { span: expr.span, hir_id: expr.hir_id, position },
+ ));
+ } else if position.is_deref_stable() {
+ self.state = Some((
+ State::ExplicitDeref { deref_span_id: None },
+ StateData { span: expr.span, hir_id: expr.hir_id, position },
+ ));
+ }
+ },
+
+ (Some((State::Borrow, data)), RefOp::Deref) => {
+ if typeck.expr_ty(sub_expr).is_ref() {
+ self.state = Some((
+ State::Reborrow {
+ deref_span: expr.span,
+ deref_hir_id: expr.hir_id,
+ },
+ data,
+ ));
+ } else {
+ self.state = Some((
+ State::ExplicitDeref {
+ deref_span_id: Some((expr.span, expr.hir_id)),
+ },
+ data,
+ ));
+ }
+ },
+ (
+ Some((
+ State::Reborrow {
+ deref_span,
+ deref_hir_id,
+ },
+ data,
+ )),
+ RefOp::Deref,
+ ) => {
+ self.state = Some((
+ State::ExplicitDeref {
+ deref_span_id: Some((deref_span, deref_hir_id)),
+ },
+ data,
+ ));
+ },
+ (state @ Some((State::ExplicitDeref { .. }, _)), RefOp::Deref) => {
+ self.state = state;
+ },
+ (Some((State::ExplicitDerefField { name }, data)), RefOp::Deref)
+ if !ty_contains_field(typeck.expr_ty(sub_expr), name) =>
+ {
+ self.state = Some((State::ExplicitDerefField { name }, data));
+ },
+
+ (Some((state, data)), _) => report(cx, expr, state, data),
+ }
+ }
+
+ fn check_pat(&mut self, cx: &LateContext<'tcx>, pat: &'tcx Pat<'_>) {
+ if let PatKind::Binding(BindingAnnotation::Ref, id, name, _) = pat.kind {
+ if let Some(opt_prev_pat) = self.ref_locals.get_mut(&id) {
+ // This binding id has been seen before. Add this pattern to the list of changes.
+ if let Some(prev_pat) = opt_prev_pat {
+ if pat.span.from_expansion() {
+ // Doesn't match the context of the previous pattern. Can't lint here.
+ *opt_prev_pat = None;
+ } else {
+ prev_pat.spans.push(pat.span);
+ prev_pat.replacements.push((
+ pat.span,
+ snippet_with_context(cx, name.span, pat.span.ctxt(), "..", &mut prev_pat.app)
+ .0
+ .into(),
+ ));
+ }
+ }
+ return;
+ }
+
+ if_chain! {
+ if !pat.span.from_expansion();
+ if let ty::Ref(_, tam, _) = *cx.typeck_results().pat_ty(pat).kind();
+ // only lint immutable refs, because borrowed `&mut T` cannot be moved out
+ if let ty::Ref(_, _, Mutability::Not) = *tam.kind();
+ then {
+ let mut app = Applicability::MachineApplicable;
+ let snip = snippet_with_context(cx, name.span, pat.span.ctxt(), "..", &mut app).0;
+ self.current_body = self.current_body.or(cx.enclosing_body);
+ self.ref_locals.insert(
+ id,
+ Some(RefPat {
+ always_deref: true,
+ spans: vec![pat.span],
+ app,
+ replacements: vec![(pat.span, snip.into())],
+ hir_id: pat.hir_id
+ }),
+ );
+ }
+ }
+ }
+ }
+
+ fn check_body_post(&mut self, cx: &LateContext<'tcx>, body: &'tcx Body<'_>) {
+ if Some(body.id()) == self.current_body {
+ for pat in self.ref_locals.drain(..).filter_map(|(_, x)| x) {
+ let replacements = pat.replacements;
+ let app = pat.app;
+ let lint = if pat.always_deref {
+ NEEDLESS_BORROW
+ } else {
+ REF_BINDING_TO_REFERENCE
+ };
+ span_lint_hir_and_then(
+ cx,
+ lint,
+ pat.hir_id,
+ pat.spans,
+ "this pattern creates a reference to a reference",
+ |diag| {
+ diag.multipart_suggestion("try this", replacements, app);
+ },
+ );
+ }
+ self.current_body = None;
+ }
+ }
+}
+
+fn try_parse_ref_op<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ typeck: &'tcx TypeckResults<'_>,
+ expr: &'tcx Expr<'_>,
+) -> Option<(RefOp, &'tcx Expr<'tcx>)> {
+ let (def_id, arg) = match expr.kind {
+ ExprKind::MethodCall(_, [arg], _) => (typeck.type_dependent_def_id(expr.hir_id)?, arg),
+ ExprKind::Call(
+ Expr {
+ kind: ExprKind::Path(path),
+ hir_id,
+ ..
+ },
+ [arg],
+ ) => (typeck.qpath_res(path, *hir_id).opt_def_id()?, arg),
+ ExprKind::Unary(UnOp::Deref, sub_expr) if !typeck.expr_ty(sub_expr).is_unsafe_ptr() => {
+ return Some((RefOp::Deref, sub_expr));
+ },
+ ExprKind::AddrOf(BorrowKind::Ref, _, sub_expr) => return Some((RefOp::AddrOf, sub_expr)),
+ _ => return None,
+ };
+ if tcx.is_diagnostic_item(sym::deref_method, def_id) {
+ Some((RefOp::Method(Mutability::Not), arg))
+ } else if tcx.trait_of_item(def_id)? == tcx.lang_items().deref_mut_trait()? {
+ Some((RefOp::Method(Mutability::Mut), arg))
+ } else {
+ None
+ }
+}
+
+// Checks whether the type for a deref call actually changed the type, not just the mutability of
+// the reference.
+fn deref_method_same_type<'tcx>(result_ty: Ty<'tcx>, arg_ty: Ty<'tcx>) -> bool {
+ match (result_ty.kind(), arg_ty.kind()) {
+ (ty::Ref(_, result_ty, _), ty::Ref(_, arg_ty, _)) => result_ty == arg_ty,
+
+ // The result type for a deref method is always a reference
+ // Not matching the previous pattern means the argument type is not a reference
+ // This means that the type did change
+ _ => false,
+ }
+}
+
+/// The position of an expression relative to it's parent.
+#[derive(Clone, Copy)]
+enum Position {
+ MethodReceiver,
+ /// The method is defined on a reference type. e.g. `impl Foo for &T`
+ MethodReceiverRefImpl,
+ Callee,
+ FieldAccess(Symbol),
+ Postfix,
+ Deref,
+ /// Any other location which will trigger auto-deref to a specific time.
+ DerefStable(i8),
+ /// Any other location which will trigger auto-reborrowing.
+ ReborrowStable(i8),
+ Other(i8),
+}
+impl Position {
+ fn is_deref_stable(self) -> bool {
+ matches!(self, Self::DerefStable(_))
+ }
+
+ fn is_reborrow_stable(self) -> bool {
+ matches!(self, Self::DerefStable(_) | Self::ReborrowStable(_))
+ }
+
+ fn can_auto_borrow(self) -> bool {
+ matches!(self, Self::MethodReceiver | Self::FieldAccess(_) | Self::Callee)
+ }
+
+ fn lint_explicit_deref(self) -> bool {
+ matches!(self, Self::Other(_) | Self::DerefStable(_) | Self::ReborrowStable(_))
+ }
+
+ fn precedence(self) -> i8 {
+ match self {
+ Self::MethodReceiver
+ | Self::MethodReceiverRefImpl
+ | Self::Callee
+ | Self::FieldAccess(_)
+ | Self::Postfix => PREC_POSTFIX,
+ Self::Deref => PREC_PREFIX,
+ Self::DerefStable(p) | Self::ReborrowStable(p) | Self::Other(p) => p,
+ }
+ }
+}
+
+/// Walks up the parent expressions attempting to determine both how stable the auto-deref result
+/// is, and which adjustments will be applied to it. Note this will not consider auto-borrow
+/// locations as those follow different rules.
+#[allow(clippy::too_many_lines)]
+fn walk_parents<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) -> (Position, &'tcx [Adjustment<'tcx>]) {
+ let mut adjustments = [].as_slice();
+ let mut precedence = 0i8;
+ let ctxt = e.span.ctxt();
+ let position = walk_to_expr_usage(cx, e, &mut |parent, child_id| {
+ // LocalTableInContext returns the wrong lifetime, so go use `expr_adjustments` instead.
+ if adjustments.is_empty() && let Node::Expr(e) = cx.tcx.hir().get(child_id) {
+ adjustments = cx.typeck_results().expr_adjustments(e);
+ }
+ match parent {
+ Node::Local(Local { ty: Some(ty), span, .. }) if span.ctxt() == ctxt => {
+ Some(binding_ty_auto_deref_stability(ty, precedence))
+ },
+ Node::Item(&Item {
+ kind: ItemKind::Static(..) | ItemKind::Const(..),
+ def_id,
+ span,
+ ..
+ })
+ | Node::TraitItem(&TraitItem {
+ kind: TraitItemKind::Const(..),
+ def_id,
+ span,
+ ..
+ })
+ | Node::ImplItem(&ImplItem {
+ kind: ImplItemKind::Const(..),
+ def_id,
+ span,
+ ..
+ }) if span.ctxt() == ctxt => {
+ let ty = cx.tcx.type_of(def_id);
+ Some(if ty.is_ref() {
+ Position::DerefStable(precedence)
+ } else {
+ Position::Other(precedence)
+ })
+ },
+
+ Node::Item(&Item {
+ kind: ItemKind::Fn(..),
+ def_id,
+ span,
+ ..
+ })
+ | Node::TraitItem(&TraitItem {
+ kind: TraitItemKind::Fn(..),
+ def_id,
+ span,
+ ..
+ })
+ | Node::ImplItem(&ImplItem {
+ kind: ImplItemKind::Fn(..),
+ def_id,
+ span,
+ ..
+ }) if span.ctxt() == ctxt => {
+ let output = cx.tcx.fn_sig(def_id.to_def_id()).skip_binder().output();
+ Some(if !output.is_ref() {
+ Position::Other(precedence)
+ } else if output.has_placeholders() || output.has_opaque_types() {
+ Position::ReborrowStable(precedence)
+ } else {
+ Position::DerefStable(precedence)
+ })
+ },
+
+ Node::Expr(parent) if parent.span.ctxt() == ctxt => match parent.kind {
+ ExprKind::Ret(_) => {
+ let owner_id = cx.tcx.hir().body_owner(cx.enclosing_body.unwrap());
+ Some(
+ if let Node::Expr(Expr {
+ kind: ExprKind::Closure(&Closure { fn_decl, .. }),
+ ..
+ }) = cx.tcx.hir().get(owner_id)
+ {
+ match fn_decl.output {
+ FnRetTy::Return(ty) => binding_ty_auto_deref_stability(ty, precedence),
+ FnRetTy::DefaultReturn(_) => Position::Other(precedence),
+ }
+ } else {
+ let output = cx
+ .tcx
+ .fn_sig(cx.tcx.hir().local_def_id(owner_id))
+ .skip_binder()
+ .output();
+ if !output.is_ref() {
+ Position::Other(precedence)
+ } else if output.has_placeholders() || output.has_opaque_types() {
+ Position::ReborrowStable(precedence)
+ } else {
+ Position::DerefStable(precedence)
+ }
+ },
+ )
+ },
+ ExprKind::Call(func, _) if func.hir_id == child_id => {
+ (child_id == e.hir_id).then_some(Position::Callee)
+ },
+ ExprKind::Call(func, args) => args
+ .iter()
+ .position(|arg| arg.hir_id == child_id)
+ .zip(expr_sig(cx, func))
+ .and_then(|(i, sig)| sig.input_with_hir(i))
+ .map(|(hir_ty, ty)| match hir_ty {
+ // Type inference for closures can depend on how they're called. Only go by the explicit
+ // types here.
+ Some(ty) => binding_ty_auto_deref_stability(ty, precedence),
+ None => param_auto_deref_stability(ty.skip_binder(), precedence),
+ }),
+ ExprKind::MethodCall(_, args, _) => {
+ let id = cx.typeck_results().type_dependent_def_id(parent.hir_id).unwrap();
+ args.iter().position(|arg| arg.hir_id == child_id).map(|i| {
+ if i == 0 {
+ // Check for calls to trait methods where the trait is implemented on a reference.
+ // Two cases need to be handled:
+ // * `self` methods on `&T` will never have auto-borrow
+ // * `&self` methods on `&T` can have auto-borrow, but `&self` methods on `T` will take
+ // priority.
+ if e.hir_id != child_id {
+ Position::ReborrowStable(precedence)
+ } else if let Some(trait_id) = cx.tcx.trait_of_item(id)
+ && let arg_ty = cx.tcx.erase_regions(cx.typeck_results().expr_ty_adjusted(e))
+ && let ty::Ref(_, sub_ty, _) = *arg_ty.kind()
+ && let subs = match cx
+ .typeck_results()
+ .node_substs_opt(parent.hir_id)
+ .and_then(|subs| subs.get(1..))
+ {
+ Some(subs) => cx.tcx.mk_substs(subs.iter().copied()),
+ None => cx.tcx.mk_substs([].iter()),
+ } && let impl_ty = if cx.tcx.fn_sig(id).skip_binder().inputs()[0].is_ref() {
+ // Trait methods taking `&self`
+ sub_ty
+ } else {
+ // Trait methods taking `self`
+ arg_ty
+ } && impl_ty.is_ref()
+ && cx.tcx.infer_ctxt().enter(|infcx|
+ infcx
+ .type_implements_trait(trait_id, impl_ty, subs, cx.param_env)
+ .must_apply_modulo_regions()
+ )
+ {
+ Position::MethodReceiverRefImpl
+ } else {
+ Position::MethodReceiver
+ }
+ } else {
+ param_auto_deref_stability(cx.tcx.fn_sig(id).skip_binder().inputs()[i], precedence)
+ }
+ })
+ },
+ ExprKind::Struct(path, fields, _) => {
+ let variant = variant_of_res(cx, cx.qpath_res(path, parent.hir_id));
+ fields
+ .iter()
+ .find(|f| f.expr.hir_id == child_id)
+ .zip(variant)
+ .and_then(|(field, variant)| variant.fields.iter().find(|f| f.name == field.ident.name))
+ .map(|field| param_auto_deref_stability(cx.tcx.type_of(field.did), precedence))
+ },
+ ExprKind::Field(child, name) if child.hir_id == e.hir_id => Some(Position::FieldAccess(name.name)),
+ ExprKind::Unary(UnOp::Deref, child) if child.hir_id == e.hir_id => Some(Position::Deref),
+ ExprKind::Match(child, _, MatchSource::TryDesugar | MatchSource::AwaitDesugar)
+ | ExprKind::Index(child, _)
+ if child.hir_id == e.hir_id =>
+ {
+ Some(Position::Postfix)
+ },
+ _ if child_id == e.hir_id => {
+ precedence = parent.precedence().order();
+ None
+ },
+ _ => None,
+ },
+ _ => None,
+ }
+ })
+ .unwrap_or(Position::Other(precedence));
+ (position, adjustments)
+}
+
+// Checks the stability of auto-deref when assigned to a binding with the given explicit type.
+//
+// e.g.
+// let x = Box::new(Box::new(0u32));
+// let y1: &Box<_> = x.deref();
+// let y2: &Box<_> = &x;
+//
+// Here `y1` and `y2` would resolve to different types, so the type `&Box<_>` is not stable when
+// switching to auto-dereferencing.
+fn binding_ty_auto_deref_stability(ty: &hir::Ty<'_>, precedence: i8) -> Position {
+ let TyKind::Rptr(_, ty) = &ty.kind else {
+ return Position::Other(precedence);
+ };
+ let mut ty = ty;
+
+ loop {
+ break match ty.ty.kind {
+ TyKind::Rptr(_, ref ref_ty) => {
+ ty = ref_ty;
+ continue;
+ },
+ TyKind::Path(
+ QPath::TypeRelative(_, path)
+ | QPath::Resolved(
+ _,
+ Path {
+ segments: [.., path], ..
+ },
+ ),
+ ) => {
+ if let Some(args) = path.args
+ && args.args.iter().any(|arg| match arg {
+ GenericArg::Infer(_) => true,
+ GenericArg::Type(ty) => ty_contains_infer(ty),
+ _ => false,
+ })
+ {
+ Position::ReborrowStable(precedence)
+ } else {
+ Position::DerefStable(precedence)
+ }
+ },
+ TyKind::Slice(_)
+ | TyKind::Array(..)
+ | TyKind::BareFn(_)
+ | TyKind::Never
+ | TyKind::Tup(_)
+ | TyKind::Ptr(_)
+ | TyKind::TraitObject(..)
+ | TyKind::Path(_) => Position::DerefStable(precedence),
+ TyKind::OpaqueDef(..)
+ | TyKind::Infer
+ | TyKind::Typeof(..)
+ | TyKind::Err => Position::ReborrowStable(precedence),
+ };
+ }
+}
+
+// Checks whether a type is inferred at some point.
+// e.g. `_`, `Box<_>`, `[_]`
+fn ty_contains_infer(ty: &hir::Ty<'_>) -> bool {
+ struct V(bool);
+ impl Visitor<'_> for V {
+ fn visit_ty(&mut self, ty: &hir::Ty<'_>) {
+ if self.0
+ || matches!(
+ ty.kind,
+ TyKind::OpaqueDef(..) | TyKind::Infer | TyKind::Typeof(_) | TyKind::Err
+ )
+ {
+ self.0 = true;
+ } else {
+ walk_ty(self, ty);
+ }
+ }
+
+ fn visit_generic_arg(&mut self, arg: &GenericArg<'_>) {
+ if self.0 || matches!(arg, GenericArg::Infer(_)) {
+ self.0 = true;
+ } else if let GenericArg::Type(ty) = arg {
+ self.visit_ty(ty);
+ }
+ }
+ }
+ let mut v = V(false);
+ v.visit_ty(ty);
+ v.0
+}
+
+// Checks whether a type is stable when switching to auto dereferencing,
+fn param_auto_deref_stability(ty: Ty<'_>, precedence: i8) -> Position {
+ let ty::Ref(_, mut ty, _) = *ty.kind() else {
+ return Position::Other(precedence);
+ };
+
+ loop {
+ break match *ty.kind() {
+ ty::Ref(_, ref_ty, _) => {
+ ty = ref_ty;
+ continue;
+ },
+ ty::Infer(_)
+ | ty::Error(_)
+ | ty::Param(_)
+ | ty::Bound(..)
+ | ty::Opaque(..)
+ | ty::Placeholder(_)
+ | ty::Dynamic(..) => Position::ReborrowStable(precedence),
+ ty::Adt(..) if ty.has_placeholders() || ty.has_param_types_or_consts() => {
+ Position::ReborrowStable(precedence)
+ },
+ ty::Adt(..)
+ | ty::Bool
+ | ty::Char
+ | ty::Int(_)
+ | ty::Uint(_)
+ | ty::Float(_)
+ | ty::Foreign(_)
+ | ty::Str
+ | ty::Array(..)
+ | ty::Slice(..)
+ | ty::RawPtr(..)
+ | ty::FnDef(..)
+ | ty::FnPtr(_)
+ | ty::Closure(..)
+ | ty::Generator(..)
+ | ty::GeneratorWitness(..)
+ | ty::Never
+ | ty::Tuple(_)
+ | ty::Projection(_) => Position::DerefStable(precedence),
+ };
+ }
+}
+
+fn ty_contains_field(ty: Ty<'_>, name: Symbol) -> bool {
+ if let ty::Adt(adt, _) = *ty.kind() {
+ adt.is_struct() && adt.all_fields().any(|f| f.name == name)
+ } else {
+ false
+ }
+}
+
+#[expect(clippy::needless_pass_by_value, clippy::too_many_lines)]
+fn report<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, state: State, data: StateData) {
+ match state {
+ State::DerefMethod {
+ ty_changed_count,
+ is_final_ufcs,
+ target_mut,
+ } => {
+ let mut app = Applicability::MachineApplicable;
+ let (expr_str, expr_is_macro_call) = snippet_with_context(cx, expr.span, data.span.ctxt(), "..", &mut app);
+ let ty = cx.typeck_results().expr_ty(expr);
+ let (_, ref_count) = peel_mid_ty_refs(ty);
+ let deref_str = if ty_changed_count >= ref_count && ref_count != 0 {
+ // a deref call changing &T -> &U requires two deref operators the first time
+ // this occurs. One to remove the reference, a second to call the deref impl.
+ "*".repeat(ty_changed_count + 1)
+ } else {
+ "*".repeat(ty_changed_count)
+ };
+ let addr_of_str = if ty_changed_count < ref_count {
+ // Check if a reborrow from &mut T -> &T is required.
+ if target_mut == Mutability::Not && matches!(ty.kind(), ty::Ref(_, _, Mutability::Mut)) {
+ "&*"
+ } else {
+ ""
+ }
+ } else if target_mut == Mutability::Mut {
+ "&mut "
+ } else {
+ "&"
+ };
+
+ let expr_str = if !expr_is_macro_call && is_final_ufcs && expr.precedence().order() < PREC_PREFIX {
+ format!("({})", expr_str)
+ } else {
+ expr_str.into_owned()
+ };
+
+ span_lint_and_sugg(
+ cx,
+ EXPLICIT_DEREF_METHODS,
+ data.span,
+ match target_mut {
+ Mutability::Not => "explicit `deref` method call",
+ Mutability::Mut => "explicit `deref_mut` method call",
+ },
+ "try this",
+ format!("{}{}{}", addr_of_str, deref_str, expr_str),
+ app,
+ );
+ },
+ State::DerefedBorrow(state) => {
+ let mut app = Applicability::MachineApplicable;
+ let (snip, snip_is_macro) = snippet_with_context(cx, expr.span, data.span.ctxt(), "..", &mut app);
+ span_lint_hir_and_then(cx, NEEDLESS_BORROW, data.hir_id, data.span, state.msg, |diag| {
+ let calls_field = matches!(expr.kind, ExprKind::Field(..)) && matches!(data.position, Position::Callee);
+ let sugg = if !snip_is_macro
+ && !has_enclosing_paren(&snip)
+ && (expr.precedence().order() < data.position.precedence() || calls_field)
+ {
+ format!("({})", snip)
+ } else {
+ snip.into()
+ };
+ diag.span_suggestion(data.span, "change this to", sugg, app);
+ });
+ },
+ State::ExplicitDeref { deref_span_id } => {
+ let (span, hir_id, precedence) = if let Some((span, hir_id)) = deref_span_id
+ && !cx.typeck_results().expr_ty(expr).is_ref()
+ {
+ (span, hir_id, PREC_PREFIX)
+ } else {
+ (data.span, data.hir_id, data.position.precedence())
+ };
+ span_lint_hir_and_then(
+ cx,
+ EXPLICIT_AUTO_DEREF,
+ hir_id,
+ span,
+ "deref which would be done by auto-deref",
+ |diag| {
+ let mut app = Applicability::MachineApplicable;
+ let (snip, snip_is_macro) = snippet_with_context(cx, expr.span, span.ctxt(), "..", &mut app);
+ let sugg =
+ if !snip_is_macro && expr.precedence().order() < precedence && !has_enclosing_paren(&snip) {
+ format!("({})", snip)
+ } else {
+ snip.into()
+ };
+ diag.span_suggestion(span, "try this", sugg, app);
+ },
+ );
+ },
+ State::ExplicitDerefField { .. } => {
+ span_lint_hir_and_then(
+ cx,
+ EXPLICIT_AUTO_DEREF,
+ data.hir_id,
+ data.span,
+ "deref which would be done by auto-deref",
+ |diag| {
+ let mut app = Applicability::MachineApplicable;
+ let snip = snippet_with_context(cx, expr.span, data.span.ctxt(), "..", &mut app).0;
+ diag.span_suggestion(data.span, "try this", snip.into_owned(), app);
+ },
+ );
+ },
+ State::Borrow | State::Reborrow { .. } => (),
+ }
+}
+
+impl Dereferencing {
+ fn check_local_usage<'tcx>(&mut self, cx: &LateContext<'tcx>, e: &Expr<'tcx>, local: HirId) {
+ if let Some(outer_pat) = self.ref_locals.get_mut(&local) {
+ if let Some(pat) = outer_pat {
+ // Check for auto-deref
+ if !matches!(
+ cx.typeck_results().expr_adjustments(e),
+ [
+ Adjustment {
+ kind: Adjust::Deref(_),
+ ..
+ },
+ Adjustment {
+ kind: Adjust::Deref(_),
+ ..
+ },
+ ..
+ ]
+ ) {
+ match get_parent_expr(cx, e) {
+ // Field accesses are the same no matter the number of references.
+ Some(Expr {
+ kind: ExprKind::Field(..),
+ ..
+ }) => (),
+ Some(&Expr {
+ span,
+ kind: ExprKind::Unary(UnOp::Deref, _),
+ ..
+ }) if !span.from_expansion() => {
+ // Remove explicit deref.
+ let snip = snippet_with_context(cx, e.span, span.ctxt(), "..", &mut pat.app).0;
+ pat.replacements.push((span, snip.into()));
+ },
+ Some(parent) if !parent.span.from_expansion() => {
+ // Double reference might be needed at this point.
+ if parent.precedence().order() == PREC_POSTFIX {
+ // Parentheses would be needed here, don't lint.
+ *outer_pat = None;
+ } else {
+ pat.always_deref = false;
+ let snip = snippet_with_context(cx, e.span, parent.span.ctxt(), "..", &mut pat.app).0;
+ pat.replacements.push((e.span, format!("&{}", snip)));
+ }
+ },
+ _ if !e.span.from_expansion() => {
+ // Double reference might be needed at this point.
+ pat.always_deref = false;
+ let snip = snippet_with_applicability(cx, e.span, "..", &mut pat.app);
+ pat.replacements.push((e.span, format!("&{}", snip)));
+ },
+ // Edge case for macros. The span of the identifier will usually match the context of the
+ // binding, but not if the identifier was created in a macro. e.g. `concat_idents` and proc
+ // macros
+ _ => *outer_pat = None,
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/derivable_impls.rs b/src/tools/clippy/clippy_lints/src/derivable_impls.rs
new file mode 100644
index 000000000..4d7f4076d
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/derivable_impls.rs
@@ -0,0 +1,117 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::{is_default_equivalent, peel_blocks};
+use rustc_hir::{
+ def::{DefKind, Res},
+ Body, Expr, ExprKind, GenericArg, Impl, ImplItemKind, Item, ItemKind, Node, PathSegment, QPath, TyKind,
+};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Detects manual `std::default::Default` implementations that are identical to a derived implementation.
+ ///
+ /// ### Why is this bad?
+ /// It is less concise.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct Foo {
+ /// bar: bool
+ /// }
+ ///
+ /// impl Default for Foo {
+ /// fn default() -> Self {
+ /// Self {
+ /// bar: false
+ /// }
+ /// }
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// #[derive(Default)]
+ /// struct Foo {
+ /// bar: bool
+ /// }
+ /// ```
+ ///
+ /// ### Known problems
+ /// Derive macros [sometimes use incorrect bounds](https://github.com/rust-lang/rust/issues/26925)
+ /// in generic types and the user defined `impl` maybe is more generalized or
+ /// specialized than what derive will produce. This lint can't detect the manual `impl`
+ /// has exactly equal bounds, and therefore this lint is disabled for types with
+ /// generic parameters.
+ #[clippy::version = "1.57.0"]
+ pub DERIVABLE_IMPLS,
+ complexity,
+ "manual implementation of the `Default` trait which is equal to a derive"
+}
+
+declare_lint_pass!(DerivableImpls => [DERIVABLE_IMPLS]);
+
+fn is_path_self(e: &Expr<'_>) -> bool {
+ if let ExprKind::Path(QPath::Resolved(_, p)) = e.kind {
+ matches!(p.res, Res::SelfCtor(..) | Res::Def(DefKind::Ctor(..), _))
+ } else {
+ false
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for DerivableImpls {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
+ if_chain! {
+ if let ItemKind::Impl(Impl {
+ of_trait: Some(ref trait_ref),
+ items: [child],
+ self_ty,
+ ..
+ }) = item.kind;
+ if !cx.tcx.has_attr(item.def_id.to_def_id(), sym::automatically_derived);
+ if !item.span.from_expansion();
+ if let Some(def_id) = trait_ref.trait_def_id();
+ if cx.tcx.is_diagnostic_item(sym::Default, def_id);
+ if let impl_item_hir = child.id.hir_id();
+ if let Some(Node::ImplItem(impl_item)) = cx.tcx.hir().find(impl_item_hir);
+ if let ImplItemKind::Fn(_, b) = &impl_item.kind;
+ if let Body { value: func_expr, .. } = cx.tcx.hir().body(*b);
+ if let Some(adt_def) = cx.tcx.type_of(item.def_id).ty_adt_def();
+ if let attrs = cx.tcx.hir().attrs(item.hir_id());
+ if !attrs.iter().any(|attr| attr.doc_str().is_some());
+ if let child_attrs = cx.tcx.hir().attrs(impl_item_hir);
+ if !child_attrs.iter().any(|attr| attr.doc_str().is_some());
+ if adt_def.is_struct();
+ then {
+ if let TyKind::Path(QPath::Resolved(_, p)) = self_ty.kind {
+ if let Some(PathSegment { args: Some(a), .. }) = p.segments.last() {
+ for arg in a.args {
+ if !matches!(arg, GenericArg::Lifetime(_)) {
+ return;
+ }
+ }
+ }
+ }
+ let should_emit = match peel_blocks(func_expr).kind {
+ ExprKind::Tup(fields) => fields.iter().all(|e| is_default_equivalent(cx, e)),
+ ExprKind::Call(callee, args)
+ if is_path_self(callee) => args.iter().all(|e| is_default_equivalent(cx, e)),
+ ExprKind::Struct(_, fields, _) => fields.iter().all(|ef| is_default_equivalent(cx, ef.expr)),
+ _ => false,
+ };
+ if should_emit {
+ let path_string = cx.tcx.def_path_str(adt_def.did());
+ span_lint_and_help(
+ cx,
+ DERIVABLE_IMPLS,
+ item.span,
+ "this `impl` can be derived",
+ None,
+ &format!("try annotating `{}` with `#[derive(Default)]`", path_string),
+ );
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/derive.rs b/src/tools/clippy/clippy_lints/src/derive.rs
new file mode 100644
index 000000000..a982990e4
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/derive.rs
@@ -0,0 +1,528 @@
+use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_note, span_lint_and_sugg, span_lint_and_then};
+use clippy_utils::paths;
+use clippy_utils::ty::{implements_trait, implements_trait_with_env, is_copy};
+use clippy_utils::{is_lint_allowed, match_def_path};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::def_id::DefId;
+use rustc_hir::intravisit::{walk_expr, walk_fn, walk_item, FnKind, Visitor};
+use rustc_hir::{
+ self as hir, BlockCheckMode, BodyId, Constness, Expr, ExprKind, FnDecl, HirId, Impl, Item, ItemKind, UnsafeSource,
+ Unsafety,
+};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::hir::nested_filter;
+use rustc_middle::traits::Reveal;
+use rustc_middle::ty::{
+ self, Binder, BoundConstness, GenericParamDefKind, ImplPolarity, ParamEnv, PredicateKind, TraitPredicate, TraitRef,
+ Ty, TyCtxt, Visibility,
+};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Span;
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for deriving `Hash` but implementing `PartialEq`
+ /// explicitly or vice versa.
+ ///
+ /// ### Why is this bad?
+ /// The implementation of these traits must agree (for
+ /// example for use with `HashMap`) so it’s probably a bad idea to use a
+ /// default-generated `Hash` implementation with an explicitly defined
+ /// `PartialEq`. In particular, the following must hold for any type:
+ ///
+ /// ```text
+ /// k1 == k2 ⇒ hash(k1) == hash(k2)
+ /// ```
+ ///
+ /// ### Example
+ /// ```ignore
+ /// #[derive(Hash)]
+ /// struct Foo;
+ ///
+ /// impl PartialEq for Foo {
+ /// ...
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub DERIVE_HASH_XOR_EQ,
+ correctness,
+ "deriving `Hash` but implementing `PartialEq` explicitly"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for deriving `Ord` but implementing `PartialOrd`
+ /// explicitly or vice versa.
+ ///
+ /// ### Why is this bad?
+ /// The implementation of these traits must agree (for
+ /// example for use with `sort`) so it’s probably a bad idea to use a
+ /// default-generated `Ord` implementation with an explicitly defined
+ /// `PartialOrd`. In particular, the following must hold for any type
+ /// implementing `Ord`:
+ ///
+ /// ```text
+ /// k1.cmp(&k2) == k1.partial_cmp(&k2).unwrap()
+ /// ```
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// #[derive(Ord, PartialEq, Eq)]
+ /// struct Foo;
+ ///
+ /// impl PartialOrd for Foo {
+ /// ...
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust,ignore
+ /// #[derive(PartialEq, Eq)]
+ /// struct Foo;
+ ///
+ /// impl PartialOrd for Foo {
+ /// fn partial_cmp(&self, other: &Foo) -> Option<Ordering> {
+ /// Some(self.cmp(other))
+ /// }
+ /// }
+ ///
+ /// impl Ord for Foo {
+ /// ...
+ /// }
+ /// ```
+ /// or, if you don't need a custom ordering:
+ /// ```rust,ignore
+ /// #[derive(Ord, PartialOrd, PartialEq, Eq)]
+ /// struct Foo;
+ /// ```
+ #[clippy::version = "1.47.0"]
+ pub DERIVE_ORD_XOR_PARTIAL_ORD,
+ correctness,
+ "deriving `Ord` but implementing `PartialOrd` explicitly"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for explicit `Clone` implementations for `Copy`
+ /// types.
+ ///
+ /// ### Why is this bad?
+ /// To avoid surprising behavior, these traits should
+ /// agree and the behavior of `Copy` cannot be overridden. In almost all
+ /// situations a `Copy` type should have a `Clone` implementation that does
+ /// nothing more than copy the object, which is what `#[derive(Copy, Clone)]`
+ /// gets you.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// #[derive(Copy)]
+ /// struct Foo;
+ ///
+ /// impl Clone for Foo {
+ /// // ..
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub EXPL_IMPL_CLONE_ON_COPY,
+ pedantic,
+ "implementing `Clone` explicitly on `Copy` types"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for deriving `serde::Deserialize` on a type that
+ /// has methods using `unsafe`.
+ ///
+ /// ### Why is this bad?
+ /// Deriving `serde::Deserialize` will create a constructor
+ /// that may violate invariants hold by another constructor.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// use serde::Deserialize;
+ ///
+ /// #[derive(Deserialize)]
+ /// pub struct Foo {
+ /// // ..
+ /// }
+ ///
+ /// impl Foo {
+ /// pub fn new() -> Self {
+ /// // setup here ..
+ /// }
+ ///
+ /// pub unsafe fn parts() -> (&str, &str) {
+ /// // assumes invariants hold
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "1.45.0"]
+ pub UNSAFE_DERIVE_DESERIALIZE,
+ pedantic,
+ "deriving `serde::Deserialize` on a type that has methods using `unsafe`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for types that derive `PartialEq` and could implement `Eq`.
+ ///
+ /// ### Why is this bad?
+ /// If a type `T` derives `PartialEq` and all of its members implement `Eq`,
+ /// then `T` can always implement `Eq`. Implementing `Eq` allows `T` to be used
+ /// in APIs that require `Eq` types. It also allows structs containing `T` to derive
+ /// `Eq` themselves.
+ ///
+ /// ### Example
+ /// ```rust
+ /// #[derive(PartialEq)]
+ /// struct Foo {
+ /// i_am_eq: i32,
+ /// i_am_eq_too: Vec<String>,
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// #[derive(PartialEq, Eq)]
+ /// struct Foo {
+ /// i_am_eq: i32,
+ /// i_am_eq_too: Vec<String>,
+ /// }
+ /// ```
+ #[clippy::version = "1.63.0"]
+ pub DERIVE_PARTIAL_EQ_WITHOUT_EQ,
+ style,
+ "deriving `PartialEq` on a type that can implement `Eq`, without implementing `Eq`"
+}
+
+declare_lint_pass!(Derive => [
+ EXPL_IMPL_CLONE_ON_COPY,
+ DERIVE_HASH_XOR_EQ,
+ DERIVE_ORD_XOR_PARTIAL_ORD,
+ UNSAFE_DERIVE_DESERIALIZE,
+ DERIVE_PARTIAL_EQ_WITHOUT_EQ
+]);
+
+impl<'tcx> LateLintPass<'tcx> for Derive {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
+ if let ItemKind::Impl(Impl {
+ of_trait: Some(ref trait_ref),
+ ..
+ }) = item.kind
+ {
+ let ty = cx.tcx.type_of(item.def_id);
+ let is_automatically_derived = cx.tcx.has_attr(item.def_id.to_def_id(), sym::automatically_derived);
+
+ check_hash_peq(cx, item.span, trait_ref, ty, is_automatically_derived);
+ check_ord_partial_ord(cx, item.span, trait_ref, ty, is_automatically_derived);
+
+ if is_automatically_derived {
+ check_unsafe_derive_deserialize(cx, item, trait_ref, ty);
+ check_partial_eq_without_eq(cx, item.span, trait_ref, ty);
+ } else {
+ check_copy_clone(cx, item, trait_ref, ty);
+ }
+ }
+ }
+}
+
+/// Implementation of the `DERIVE_HASH_XOR_EQ` lint.
+fn check_hash_peq<'tcx>(
+ cx: &LateContext<'tcx>,
+ span: Span,
+ trait_ref: &hir::TraitRef<'_>,
+ ty: Ty<'tcx>,
+ hash_is_automatically_derived: bool,
+) {
+ if_chain! {
+ if let Some(peq_trait_def_id) = cx.tcx.lang_items().eq_trait();
+ if let Some(def_id) = trait_ref.trait_def_id();
+ if cx.tcx.is_diagnostic_item(sym::Hash, def_id);
+ then {
+ // Look for the PartialEq implementations for `ty`
+ cx.tcx.for_each_relevant_impl(peq_trait_def_id, ty, |impl_id| {
+ let peq_is_automatically_derived = cx.tcx.has_attr(impl_id, sym::automatically_derived);
+
+ if peq_is_automatically_derived == hash_is_automatically_derived {
+ return;
+ }
+
+ let trait_ref = cx.tcx.impl_trait_ref(impl_id).expect("must be a trait implementation");
+
+ // Only care about `impl PartialEq<Foo> for Foo`
+ // For `impl PartialEq<B> for A, input_types is [A, B]
+ if trait_ref.substs.type_at(1) == ty {
+ let mess = if peq_is_automatically_derived {
+ "you are implementing `Hash` explicitly but have derived `PartialEq`"
+ } else {
+ "you are deriving `Hash` but have implemented `PartialEq` explicitly"
+ };
+
+ span_lint_and_then(
+ cx,
+ DERIVE_HASH_XOR_EQ,
+ span,
+ mess,
+ |diag| {
+ if let Some(local_def_id) = impl_id.as_local() {
+ let hir_id = cx.tcx.hir().local_def_id_to_hir_id(local_def_id);
+ diag.span_note(
+ cx.tcx.hir().span(hir_id),
+ "`PartialEq` implemented here"
+ );
+ }
+ }
+ );
+ }
+ });
+ }
+ }
+}
+
+/// Implementation of the `DERIVE_ORD_XOR_PARTIAL_ORD` lint.
+fn check_ord_partial_ord<'tcx>(
+ cx: &LateContext<'tcx>,
+ span: Span,
+ trait_ref: &hir::TraitRef<'_>,
+ ty: Ty<'tcx>,
+ ord_is_automatically_derived: bool,
+) {
+ if_chain! {
+ if let Some(ord_trait_def_id) = cx.tcx.get_diagnostic_item(sym::Ord);
+ if let Some(partial_ord_trait_def_id) = cx.tcx.lang_items().partial_ord_trait();
+ if let Some(def_id) = &trait_ref.trait_def_id();
+ if *def_id == ord_trait_def_id;
+ then {
+ // Look for the PartialOrd implementations for `ty`
+ cx.tcx.for_each_relevant_impl(partial_ord_trait_def_id, ty, |impl_id| {
+ let partial_ord_is_automatically_derived = cx.tcx.has_attr(impl_id, sym::automatically_derived);
+
+ if partial_ord_is_automatically_derived == ord_is_automatically_derived {
+ return;
+ }
+
+ let trait_ref = cx.tcx.impl_trait_ref(impl_id).expect("must be a trait implementation");
+
+ // Only care about `impl PartialOrd<Foo> for Foo`
+ // For `impl PartialOrd<B> for A, input_types is [A, B]
+ if trait_ref.substs.type_at(1) == ty {
+ let mess = if partial_ord_is_automatically_derived {
+ "you are implementing `Ord` explicitly but have derived `PartialOrd`"
+ } else {
+ "you are deriving `Ord` but have implemented `PartialOrd` explicitly"
+ };
+
+ span_lint_and_then(
+ cx,
+ DERIVE_ORD_XOR_PARTIAL_ORD,
+ span,
+ mess,
+ |diag| {
+ if let Some(local_def_id) = impl_id.as_local() {
+ let hir_id = cx.tcx.hir().local_def_id_to_hir_id(local_def_id);
+ diag.span_note(
+ cx.tcx.hir().span(hir_id),
+ "`PartialOrd` implemented here"
+ );
+ }
+ }
+ );
+ }
+ });
+ }
+ }
+}
+
+/// Implementation of the `EXPL_IMPL_CLONE_ON_COPY` lint.
+fn check_copy_clone<'tcx>(cx: &LateContext<'tcx>, item: &Item<'_>, trait_ref: &hir::TraitRef<'_>, ty: Ty<'tcx>) {
+ let clone_id = match cx.tcx.lang_items().clone_trait() {
+ Some(id) if trait_ref.trait_def_id() == Some(id) => id,
+ _ => return,
+ };
+ let copy_id = match cx.tcx.lang_items().copy_trait() {
+ Some(id) => id,
+ None => return,
+ };
+ let (ty_adt, ty_subs) = match *ty.kind() {
+ // Unions can't derive clone.
+ ty::Adt(adt, subs) if !adt.is_union() => (adt, subs),
+ _ => return,
+ };
+ // If the current self type doesn't implement Copy (due to generic constraints), search to see if
+ // there's a Copy impl for any instance of the adt.
+ if !is_copy(cx, ty) {
+ if ty_subs.non_erasable_generics().next().is_some() {
+ let has_copy_impl = cx.tcx.all_local_trait_impls(()).get(&copy_id).map_or(false, |impls| {
+ impls
+ .iter()
+ .any(|&id| matches!(cx.tcx.type_of(id).kind(), ty::Adt(adt, _) if ty_adt.did() == adt.did()))
+ });
+ if !has_copy_impl {
+ return;
+ }
+ } else {
+ return;
+ }
+ }
+ // Derive constrains all generic types to requiring Clone. Check if any type is not constrained for
+ // this impl.
+ if ty_subs.types().any(|ty| !implements_trait(cx, ty, clone_id, &[])) {
+ return;
+ }
+
+ span_lint_and_note(
+ cx,
+ EXPL_IMPL_CLONE_ON_COPY,
+ item.span,
+ "you are implementing `Clone` explicitly on a `Copy` type",
+ Some(item.span),
+ "consider deriving `Clone` or removing `Copy`",
+ );
+}
+
+/// Implementation of the `UNSAFE_DERIVE_DESERIALIZE` lint.
+fn check_unsafe_derive_deserialize<'tcx>(
+ cx: &LateContext<'tcx>,
+ item: &Item<'_>,
+ trait_ref: &hir::TraitRef<'_>,
+ ty: Ty<'tcx>,
+) {
+ fn has_unsafe<'tcx>(cx: &LateContext<'tcx>, item: &'tcx Item<'_>) -> bool {
+ let mut visitor = UnsafeVisitor { cx, has_unsafe: false };
+ walk_item(&mut visitor, item);
+ visitor.has_unsafe
+ }
+
+ if_chain! {
+ if let Some(trait_def_id) = trait_ref.trait_def_id();
+ if match_def_path(cx, trait_def_id, &paths::SERDE_DESERIALIZE);
+ if let ty::Adt(def, _) = ty.kind();
+ if let Some(local_def_id) = def.did().as_local();
+ let adt_hir_id = cx.tcx.hir().local_def_id_to_hir_id(local_def_id);
+ if !is_lint_allowed(cx, UNSAFE_DERIVE_DESERIALIZE, adt_hir_id);
+ if cx.tcx.inherent_impls(def.did())
+ .iter()
+ .map(|imp_did| cx.tcx.hir().expect_item(imp_did.expect_local()))
+ .any(|imp| has_unsafe(cx, imp));
+ then {
+ span_lint_and_help(
+ cx,
+ UNSAFE_DERIVE_DESERIALIZE,
+ item.span,
+ "you are deriving `serde::Deserialize` on a type that has methods using `unsafe`",
+ None,
+ "consider implementing `serde::Deserialize` manually. See https://serde.rs/impl-deserialize.html"
+ );
+ }
+ }
+}
+
+struct UnsafeVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ has_unsafe: bool,
+}
+
+impl<'tcx> Visitor<'tcx> for UnsafeVisitor<'_, 'tcx> {
+ type NestedFilter = nested_filter::All;
+
+ fn visit_fn(&mut self, kind: FnKind<'tcx>, decl: &'tcx FnDecl<'_>, body_id: BodyId, span: Span, id: HirId) {
+ if self.has_unsafe {
+ return;
+ }
+
+ if_chain! {
+ if let Some(header) = kind.header();
+ if header.unsafety == Unsafety::Unsafe;
+ then {
+ self.has_unsafe = true;
+ }
+ }
+
+ walk_fn(self, kind, decl, body_id, span, id);
+ }
+
+ fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
+ if self.has_unsafe {
+ return;
+ }
+
+ if let ExprKind::Block(block, _) = expr.kind {
+ if block.rules == BlockCheckMode::UnsafeBlock(UnsafeSource::UserProvided) {
+ self.has_unsafe = true;
+ }
+ }
+
+ walk_expr(self, expr);
+ }
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+}
+
+/// Implementation of the `DERIVE_PARTIAL_EQ_WITHOUT_EQ` lint.
+fn check_partial_eq_without_eq<'tcx>(cx: &LateContext<'tcx>, span: Span, trait_ref: &hir::TraitRef<'_>, ty: Ty<'tcx>) {
+ if_chain! {
+ if let ty::Adt(adt, substs) = ty.kind();
+ if cx.tcx.visibility(adt.did()) == Visibility::Public;
+ if let Some(eq_trait_def_id) = cx.tcx.get_diagnostic_item(sym::Eq);
+ if let Some(def_id) = trait_ref.trait_def_id();
+ if cx.tcx.is_diagnostic_item(sym::PartialEq, def_id);
+ let param_env = param_env_for_derived_eq(cx.tcx, adt.did(), eq_trait_def_id);
+ if !implements_trait_with_env(cx.tcx, param_env, ty, eq_trait_def_id, &[]);
+ // If all of our fields implement `Eq`, we can implement `Eq` too
+ if adt
+ .all_fields()
+ .map(|f| f.ty(cx.tcx, substs))
+ .all(|ty| implements_trait_with_env(cx.tcx, param_env, ty, eq_trait_def_id, &[]));
+ then {
+ span_lint_and_sugg(
+ cx,
+ DERIVE_PARTIAL_EQ_WITHOUT_EQ,
+ span.ctxt().outer_expn_data().call_site,
+ "you are deriving `PartialEq` and can implement `Eq`",
+ "consider deriving `Eq` as well",
+ "PartialEq, Eq".to_string(),
+ Applicability::MachineApplicable,
+ )
+ }
+ }
+}
+
+/// Creates the `ParamEnv` used for the give type's derived `Eq` impl.
+fn param_env_for_derived_eq(tcx: TyCtxt<'_>, did: DefId, eq_trait_id: DefId) -> ParamEnv<'_> {
+ // Initial map from generic index to param def.
+ // Vec<(param_def, needs_eq)>
+ let mut params = tcx
+ .generics_of(did)
+ .params
+ .iter()
+ .map(|p| (p, matches!(p.kind, GenericParamDefKind::Type { .. })))
+ .collect::<Vec<_>>();
+
+ let ty_predicates = tcx.predicates_of(did).predicates;
+ for (p, _) in ty_predicates {
+ if let PredicateKind::Trait(p) = p.kind().skip_binder()
+ && p.trait_ref.def_id == eq_trait_id
+ && let ty::Param(self_ty) = p.trait_ref.self_ty().kind()
+ && p.constness == BoundConstness::NotConst
+ {
+ // Flag types which already have an `Eq` bound.
+ params[self_ty.index as usize].1 = false;
+ }
+ }
+
+ ParamEnv::new(
+ tcx.mk_predicates(ty_predicates.iter().map(|&(p, _)| p).chain(
+ params.iter().filter(|&&(_, needs_eq)| needs_eq).map(|&(param, _)| {
+ tcx.mk_predicate(Binder::dummy(PredicateKind::Trait(TraitPredicate {
+ trait_ref: TraitRef::new(eq_trait_id, tcx.mk_substs([tcx.mk_param_from_def(param)].into_iter())),
+ constness: BoundConstness::NotConst,
+ polarity: ImplPolarity::Positive,
+ })))
+ }),
+ )),
+ Reveal::UserFacing,
+ Constness::NotConst,
+ )
+}
diff --git a/src/tools/clippy/clippy_lints/src/disallowed_methods.rs b/src/tools/clippy/clippy_lints/src/disallowed_methods.rs
new file mode 100644
index 000000000..53973ab79
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/disallowed_methods.rs
@@ -0,0 +1,113 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::{fn_def_id, get_parent_expr, path_def_id};
+
+use rustc_hir::{def::Res, def_id::DefIdMap, Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+
+use crate::utils::conf;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Denies the configured methods and functions in clippy.toml
+ ///
+ /// Note: Even though this lint is warn-by-default, it will only trigger if
+ /// methods are defined in the clippy.toml file.
+ ///
+ /// ### Why is this bad?
+ /// Some methods are undesirable in certain contexts, and it's beneficial to
+ /// lint for them as needed.
+ ///
+ /// ### Example
+ /// An example clippy.toml configuration:
+ /// ```toml
+ /// # clippy.toml
+ /// disallowed-methods = [
+ /// # Can use a string as the path of the disallowed method.
+ /// "std::boxed::Box::new",
+ /// # Can also use an inline table with a `path` key.
+ /// { path = "std::time::Instant::now" },
+ /// # When using an inline table, can add a `reason` for why the method
+ /// # is disallowed.
+ /// { path = "std::vec::Vec::leak", reason = "no leaking memory" },
+ /// ]
+ /// ```
+ ///
+ /// ```rust,ignore
+ /// // Example code where clippy issues a warning
+ /// let xs = vec![1, 2, 3, 4];
+ /// xs.leak(); // Vec::leak is disallowed in the config.
+ /// // The diagnostic contains the message "no leaking memory".
+ ///
+ /// let _now = Instant::now(); // Instant::now is disallowed in the config.
+ ///
+ /// let _box = Box::new(3); // Box::new is disallowed in the config.
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// // Example code which does not raise clippy warning
+ /// let mut xs = Vec::new(); // Vec::new is _not_ disallowed in the config.
+ /// xs.push(123); // Vec::push is _not_ disallowed in the config.
+ /// ```
+ #[clippy::version = "1.49.0"]
+ pub DISALLOWED_METHODS,
+ style,
+ "use of a disallowed method call"
+}
+
+#[derive(Clone, Debug)]
+pub struct DisallowedMethods {
+ conf_disallowed: Vec<conf::DisallowedMethod>,
+ disallowed: DefIdMap<usize>,
+}
+
+impl DisallowedMethods {
+ pub fn new(conf_disallowed: Vec<conf::DisallowedMethod>) -> Self {
+ Self {
+ conf_disallowed,
+ disallowed: DefIdMap::default(),
+ }
+ }
+}
+
+impl_lint_pass!(DisallowedMethods => [DISALLOWED_METHODS]);
+
+impl<'tcx> LateLintPass<'tcx> for DisallowedMethods {
+ fn check_crate(&mut self, cx: &LateContext<'_>) {
+ for (index, conf) in self.conf_disallowed.iter().enumerate() {
+ let segs: Vec<_> = conf.path().split("::").collect();
+ if let Res::Def(_, id) = clippy_utils::def_path_res(cx, &segs) {
+ self.disallowed.insert(id, index);
+ }
+ }
+ }
+
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ let uncalled_path = if let Some(parent) = get_parent_expr(cx, expr)
+ && let ExprKind::Call(receiver, _) = parent.kind
+ && receiver.hir_id == expr.hir_id
+ {
+ None
+ } else {
+ path_def_id(cx, expr)
+ };
+ let def_id = match uncalled_path.or_else(|| fn_def_id(cx, expr)) {
+ Some(def_id) => def_id,
+ None => return,
+ };
+ let conf = match self.disallowed.get(&def_id) {
+ Some(&index) => &self.conf_disallowed[index],
+ None => return,
+ };
+ let msg = format!("use of a disallowed method `{}`", conf.path());
+ span_lint_and_then(cx, DISALLOWED_METHODS, expr.span, &msg, |diag| {
+ if let conf::DisallowedMethod::WithReason {
+ reason: Some(reason), ..
+ } = conf
+ {
+ diag.note(&format!("{} (from clippy.toml)", reason));
+ }
+ });
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/disallowed_script_idents.rs b/src/tools/clippy/clippy_lints/src/disallowed_script_idents.rs
new file mode 100644
index 000000000..0c27c3f92
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/disallowed_script_idents.rs
@@ -0,0 +1,113 @@
+use clippy_utils::diagnostics::span_lint;
+use rustc_ast::ast;
+use rustc_data_structures::fx::FxHashSet;
+use rustc_lint::{EarlyContext, EarlyLintPass, Level, LintContext};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use unicode_script::{Script, UnicodeScript};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of unicode scripts other than those explicitly allowed
+ /// by the lint config.
+ ///
+ /// This lint doesn't take into account non-text scripts such as `Unknown` and `Linear_A`.
+ /// It also ignores the `Common` script type.
+ /// While configuring, be sure to use official script name [aliases] from
+ /// [the list of supported scripts][supported_scripts].
+ ///
+ /// See also: [`non_ascii_idents`].
+ ///
+ /// [aliases]: http://www.unicode.org/reports/tr24/tr24-31.html#Script_Value_Aliases
+ /// [supported_scripts]: https://www.unicode.org/iso15924/iso15924-codes.html
+ ///
+ /// ### Why is this bad?
+ /// It may be not desired to have many different scripts for
+ /// identifiers in the codebase.
+ ///
+ /// Note that if you only want to allow plain English, you might want to use
+ /// built-in [`non_ascii_idents`] lint instead.
+ ///
+ /// [`non_ascii_idents`]: https://doc.rust-lang.org/rustc/lints/listing/allowed-by-default.html#non-ascii-idents
+ ///
+ /// ### Example
+ /// ```rust
+ /// // Assuming that `clippy.toml` contains the following line:
+ /// // allowed-locales = ["Latin", "Cyrillic"]
+ /// let counter = 10; // OK, latin is allowed.
+ /// let счётчик = 10; // OK, cyrillic is allowed.
+ /// let zähler = 10; // OK, it's still latin.
+ /// let カウンタ = 10; // Will spawn the lint.
+ /// ```
+ #[clippy::version = "1.55.0"]
+ pub DISALLOWED_SCRIPT_IDENTS,
+ restriction,
+ "usage of non-allowed Unicode scripts"
+}
+
+#[derive(Clone, Debug)]
+pub struct DisallowedScriptIdents {
+ whitelist: FxHashSet<Script>,
+}
+
+impl DisallowedScriptIdents {
+ pub fn new(whitelist: &[String]) -> Self {
+ let whitelist = whitelist
+ .iter()
+ .map(String::as_str)
+ .filter_map(Script::from_full_name)
+ .collect();
+ Self { whitelist }
+ }
+}
+
+impl_lint_pass!(DisallowedScriptIdents => [DISALLOWED_SCRIPT_IDENTS]);
+
+impl EarlyLintPass for DisallowedScriptIdents {
+ fn check_crate(&mut self, cx: &EarlyContext<'_>, _: &ast::Crate) {
+ // Implementation is heavily inspired by the implementation of [`non_ascii_idents`] lint:
+ // https://github.com/rust-lang/rust/blob/master/compiler/rustc_lint/src/non_ascii_idents.rs
+
+ let check_disallowed_script_idents = cx.builder.lint_level(DISALLOWED_SCRIPT_IDENTS).0 != Level::Allow;
+ if !check_disallowed_script_idents {
+ return;
+ }
+
+ let symbols = cx.sess().parse_sess.symbol_gallery.symbols.lock();
+ // Sort by `Span` so that error messages make sense with respect to the
+ // order of identifier locations in the code.
+ let mut symbols: Vec<_> = symbols.iter().collect();
+ symbols.sort_unstable_by_key(|k| k.1);
+
+ for (symbol, &span) in &symbols {
+ // Note: `symbol.as_str()` is an expensive operation, thus should not be called
+ // more than once for a single symbol.
+ let symbol_str = symbol.as_str();
+ if symbol_str.is_ascii() {
+ continue;
+ }
+
+ for c in symbol_str.chars() {
+ // We want to iterate through all the scripts associated with this character
+ // and check whether at least of one scripts is in the whitelist.
+ let forbidden_script = c
+ .script_extension()
+ .iter()
+ .find(|script| !self.whitelist.contains(script));
+ if let Some(script) = forbidden_script {
+ span_lint(
+ cx,
+ DISALLOWED_SCRIPT_IDENTS,
+ span,
+ &format!(
+ "identifier `{}` has a Unicode script that is not allowed by configuration: {}",
+ symbol_str,
+ script.full_name()
+ ),
+ );
+ // We don't want to spawn warning multiple times over a single identifier.
+ break;
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/disallowed_types.rs b/src/tools/clippy/clippy_lints/src/disallowed_types.rs
new file mode 100644
index 000000000..14f89edce
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/disallowed_types.rs
@@ -0,0 +1,140 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+
+use rustc_data_structures::fx::FxHashMap;
+use rustc_hir::{
+ def::Res, def_id::DefId, Item, ItemKind, PolyTraitRef, PrimTy, TraitBoundModifier, Ty, TyKind, UseKind,
+};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::Span;
+
+use crate::utils::conf;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Denies the configured types in clippy.toml.
+ ///
+ /// Note: Even though this lint is warn-by-default, it will only trigger if
+ /// types are defined in the clippy.toml file.
+ ///
+ /// ### Why is this bad?
+ /// Some types are undesirable in certain contexts.
+ ///
+ /// ### Example:
+ /// An example clippy.toml configuration:
+ /// ```toml
+ /// # clippy.toml
+ /// disallowed-types = [
+ /// # Can use a string as the path of the disallowed type.
+ /// "std::collections::BTreeMap",
+ /// # Can also use an inline table with a `path` key.
+ /// { path = "std::net::TcpListener" },
+ /// # When using an inline table, can add a `reason` for why the type
+ /// # is disallowed.
+ /// { path = "std::net::Ipv4Addr", reason = "no IPv4 allowed" },
+ /// ]
+ /// ```
+ ///
+ /// ```rust,ignore
+ /// use std::collections::BTreeMap;
+ /// // or its use
+ /// let x = std::collections::BTreeMap::new();
+ /// ```
+ /// Use instead:
+ /// ```rust,ignore
+ /// // A similar type that is allowed by the config
+ /// use std::collections::HashMap;
+ /// ```
+ #[clippy::version = "1.55.0"]
+ pub DISALLOWED_TYPES,
+ style,
+ "use of disallowed types"
+}
+#[derive(Clone, Debug)]
+pub struct DisallowedTypes {
+ conf_disallowed: Vec<conf::DisallowedType>,
+ def_ids: FxHashMap<DefId, Option<String>>,
+ prim_tys: FxHashMap<PrimTy, Option<String>>,
+}
+
+impl DisallowedTypes {
+ pub fn new(conf_disallowed: Vec<conf::DisallowedType>) -> Self {
+ Self {
+ conf_disallowed,
+ def_ids: FxHashMap::default(),
+ prim_tys: FxHashMap::default(),
+ }
+ }
+
+ fn check_res_emit(&self, cx: &LateContext<'_>, res: &Res, span: Span) {
+ match res {
+ Res::Def(_, did) => {
+ if let Some(reason) = self.def_ids.get(did) {
+ emit(cx, &cx.tcx.def_path_str(*did), span, reason.as_deref());
+ }
+ },
+ Res::PrimTy(prim) => {
+ if let Some(reason) = self.prim_tys.get(prim) {
+ emit(cx, prim.name_str(), span, reason.as_deref());
+ }
+ },
+ _ => {},
+ }
+ }
+}
+
+impl_lint_pass!(DisallowedTypes => [DISALLOWED_TYPES]);
+
+impl<'tcx> LateLintPass<'tcx> for DisallowedTypes {
+ fn check_crate(&mut self, cx: &LateContext<'_>) {
+ for conf in &self.conf_disallowed {
+ let (path, reason) = match conf {
+ conf::DisallowedType::Simple(path) => (path, None),
+ conf::DisallowedType::WithReason { path, reason } => (
+ path,
+ reason.as_ref().map(|reason| format!("{} (from clippy.toml)", reason)),
+ ),
+ };
+ let segs: Vec<_> = path.split("::").collect();
+ match clippy_utils::def_path_res(cx, &segs) {
+ Res::Def(_, id) => {
+ self.def_ids.insert(id, reason);
+ },
+ Res::PrimTy(ty) => {
+ self.prim_tys.insert(ty, reason);
+ },
+ _ => {},
+ }
+ }
+ }
+
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) {
+ if let ItemKind::Use(path, UseKind::Single) = &item.kind {
+ self.check_res_emit(cx, &path.res, item.span);
+ }
+ }
+
+ fn check_ty(&mut self, cx: &LateContext<'tcx>, ty: &'tcx Ty<'tcx>) {
+ if let TyKind::Path(path) = &ty.kind {
+ self.check_res_emit(cx, &cx.qpath_res(path, ty.hir_id), ty.span);
+ }
+ }
+
+ fn check_poly_trait_ref(&mut self, cx: &LateContext<'tcx>, poly: &'tcx PolyTraitRef<'tcx>, _: TraitBoundModifier) {
+ self.check_res_emit(cx, &poly.trait_ref.path.res, poly.trait_ref.path.span);
+ }
+}
+
+fn emit(cx: &LateContext<'_>, name: &str, span: Span, reason: Option<&str>) {
+ span_lint_and_then(
+ cx,
+ DISALLOWED_TYPES,
+ span,
+ &format!("`{}` is not allowed according to config", name),
+ |diag| {
+ if let Some(reason) = reason {
+ diag.note(reason);
+ }
+ },
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/doc.rs b/src/tools/clippy/clippy_lints/src/doc.rs
new file mode 100644
index 000000000..da111e737
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/doc.rs
@@ -0,0 +1,849 @@
+use clippy_utils::attrs::is_doc_hidden;
+use clippy_utils::diagnostics::{span_lint, span_lint_and_help, span_lint_and_note, span_lint_and_then};
+use clippy_utils::macros::{is_panic, root_macro_call_first_node};
+use clippy_utils::source::{first_line_of_span, snippet_with_applicability};
+use clippy_utils::ty::{implements_trait, is_type_diagnostic_item};
+use clippy_utils::{is_entrypoint_fn, method_chain_args, return_ty};
+use if_chain::if_chain;
+use itertools::Itertools;
+use rustc_ast::ast::{Async, AttrKind, Attribute, Fn, FnRetTy, ItemKind};
+use rustc_ast::token::CommentKind;
+use rustc_data_structures::fx::FxHashSet;
+use rustc_data_structures::sync::Lrc;
+use rustc_errors::emitter::EmitterWriter;
+use rustc_errors::{Applicability, Handler, MultiSpan, SuggestionStyle};
+use rustc_hir as hir;
+use rustc_hir::intravisit::{self, Visitor};
+use rustc_hir::{AnonConst, Expr};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::hir::nested_filter;
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty;
+use rustc_parse::maybe_new_parser_from_source_str;
+use rustc_parse::parser::ForceCollect;
+use rustc_session::parse::ParseSess;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::def_id::LocalDefId;
+use rustc_span::edition::Edition;
+use rustc_span::source_map::{BytePos, FilePathMapping, SourceMap, Span};
+use rustc_span::{sym, FileName, Pos};
+use std::io;
+use std::ops::Range;
+use std::thread;
+use url::Url;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for the presence of `_`, `::` or camel-case words
+ /// outside ticks in documentation.
+ ///
+ /// ### Why is this bad?
+ /// *Rustdoc* supports markdown formatting, `_`, `::` and
+ /// camel-case probably indicates some code which should be included between
+ /// ticks. `_` can also be used for emphasis in markdown, this lint tries to
+ /// consider that.
+ ///
+ /// ### Known problems
+ /// Lots of bad docs won’t be fixed, what the lint checks
+ /// for is limited, and there are still false positives. HTML elements and their
+ /// content are not linted.
+ ///
+ /// In addition, when writing documentation comments, including `[]` brackets
+ /// inside a link text would trip the parser. Therefore, documenting link with
+ /// `[`SmallVec<[T; INLINE_CAPACITY]>`]` and then [`SmallVec<[T; INLINE_CAPACITY]>`]: SmallVec
+ /// would fail.
+ ///
+ /// ### Examples
+ /// ```rust
+ /// /// Do something with the foo_bar parameter. See also
+ /// /// that::other::module::foo.
+ /// // ^ `foo_bar` and `that::other::module::foo` should be ticked.
+ /// fn doit(foo_bar: usize) {}
+ /// ```
+ ///
+ /// ```rust
+ /// // Link text with `[]` brackets should be written as following:
+ /// /// Consume the array and return the inner
+ /// /// [`SmallVec<[T; INLINE_CAPACITY]>`][SmallVec].
+ /// /// [SmallVec]: SmallVec
+ /// fn main() {}
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub DOC_MARKDOWN,
+ pedantic,
+ "presence of `_`, `::` or camel-case outside backticks in documentation"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for the doc comments of publicly visible
+ /// unsafe functions and warns if there is no `# Safety` section.
+ ///
+ /// ### Why is this bad?
+ /// Unsafe functions should document their safety
+ /// preconditions, so that users can be sure they are using them safely.
+ ///
+ /// ### Examples
+ /// ```rust
+ ///# type Universe = ();
+ /// /// This function should really be documented
+ /// pub unsafe fn start_apocalypse(u: &mut Universe) {
+ /// unimplemented!();
+ /// }
+ /// ```
+ ///
+ /// At least write a line about safety:
+ ///
+ /// ```rust
+ ///# type Universe = ();
+ /// /// # Safety
+ /// ///
+ /// /// This function should not be called before the horsemen are ready.
+ /// pub unsafe fn start_apocalypse(u: &mut Universe) {
+ /// unimplemented!();
+ /// }
+ /// ```
+ #[clippy::version = "1.39.0"]
+ pub MISSING_SAFETY_DOC,
+ style,
+ "`pub unsafe fn` without `# Safety` docs"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks the doc comments of publicly visible functions that
+ /// return a `Result` type and warns if there is no `# Errors` section.
+ ///
+ /// ### Why is this bad?
+ /// Documenting the type of errors that can be returned from a
+ /// function can help callers write code to handle the errors appropriately.
+ ///
+ /// ### Examples
+ /// Since the following function returns a `Result` it has an `# Errors` section in
+ /// its doc comment:
+ ///
+ /// ```rust
+ ///# use std::io;
+ /// /// # Errors
+ /// ///
+ /// /// Will return `Err` if `filename` does not exist or the user does not have
+ /// /// permission to read it.
+ /// pub fn read(filename: String) -> io::Result<String> {
+ /// unimplemented!();
+ /// }
+ /// ```
+ #[clippy::version = "1.41.0"]
+ pub MISSING_ERRORS_DOC,
+ pedantic,
+ "`pub fn` returns `Result` without `# Errors` in doc comment"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks the doc comments of publicly visible functions that
+ /// may panic and warns if there is no `# Panics` section.
+ ///
+ /// ### Why is this bad?
+ /// Documenting the scenarios in which panicking occurs
+ /// can help callers who do not want to panic to avoid those situations.
+ ///
+ /// ### Examples
+ /// Since the following function may panic it has a `# Panics` section in
+ /// its doc comment:
+ ///
+ /// ```rust
+ /// /// # Panics
+ /// ///
+ /// /// Will panic if y is 0
+ /// pub fn divide_by(x: i32, y: i32) -> i32 {
+ /// if y == 0 {
+ /// panic!("Cannot divide by 0")
+ /// } else {
+ /// x / y
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "1.51.0"]
+ pub MISSING_PANICS_DOC,
+ pedantic,
+ "`pub fn` may panic without `# Panics` in doc comment"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `fn main() { .. }` in doctests
+ ///
+ /// ### Why is this bad?
+ /// The test can be shorter (and likely more readable)
+ /// if the `fn main()` is left implicit.
+ ///
+ /// ### Examples
+ /// ```rust
+ /// /// An example of a doctest with a `main()` function
+ /// ///
+ /// /// # Examples
+ /// ///
+ /// /// ```
+ /// /// fn main() {
+ /// /// // this needs not be in an `fn`
+ /// /// }
+ /// /// ```
+ /// fn needless_main() {
+ /// unimplemented!();
+ /// }
+ /// ```
+ #[clippy::version = "1.40.0"]
+ pub NEEDLESS_DOCTEST_MAIN,
+ style,
+ "presence of `fn main() {` in code examples"
+}
+
+#[expect(clippy::module_name_repetitions)]
+#[derive(Clone)]
+pub struct DocMarkdown {
+ valid_idents: FxHashSet<String>,
+ in_trait_impl: bool,
+}
+
+impl DocMarkdown {
+ pub fn new(valid_idents: FxHashSet<String>) -> Self {
+ Self {
+ valid_idents,
+ in_trait_impl: false,
+ }
+ }
+}
+
+impl_lint_pass!(DocMarkdown =>
+ [DOC_MARKDOWN, MISSING_SAFETY_DOC, MISSING_ERRORS_DOC, MISSING_PANICS_DOC, NEEDLESS_DOCTEST_MAIN]
+);
+
+impl<'tcx> LateLintPass<'tcx> for DocMarkdown {
+ fn check_crate(&mut self, cx: &LateContext<'tcx>) {
+ let attrs = cx.tcx.hir().attrs(hir::CRATE_HIR_ID);
+ check_attrs(cx, &self.valid_idents, attrs);
+ }
+
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) {
+ let attrs = cx.tcx.hir().attrs(item.hir_id());
+ let headers = check_attrs(cx, &self.valid_idents, attrs);
+ match item.kind {
+ hir::ItemKind::Fn(ref sig, _, body_id) => {
+ if !(is_entrypoint_fn(cx, item.def_id.to_def_id()) || in_external_macro(cx.tcx.sess, item.span)) {
+ let body = cx.tcx.hir().body(body_id);
+ let mut fpu = FindPanicUnwrap {
+ cx,
+ typeck_results: cx.tcx.typeck(item.def_id),
+ panic_span: None,
+ };
+ fpu.visit_expr(&body.value);
+ lint_for_missing_headers(cx, item.def_id, item.span, sig, headers, Some(body_id), fpu.panic_span);
+ }
+ },
+ hir::ItemKind::Impl(impl_) => {
+ self.in_trait_impl = impl_.of_trait.is_some();
+ },
+ hir::ItemKind::Trait(_, unsafety, ..) => {
+ if !headers.safety && unsafety == hir::Unsafety::Unsafe {
+ span_lint(
+ cx,
+ MISSING_SAFETY_DOC,
+ item.span,
+ "docs for unsafe trait missing `# Safety` section",
+ );
+ }
+ },
+ _ => (),
+ }
+ }
+
+ fn check_item_post(&mut self, _cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) {
+ if let hir::ItemKind::Impl { .. } = item.kind {
+ self.in_trait_impl = false;
+ }
+ }
+
+ fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::TraitItem<'_>) {
+ let attrs = cx.tcx.hir().attrs(item.hir_id());
+ let headers = check_attrs(cx, &self.valid_idents, attrs);
+ if let hir::TraitItemKind::Fn(ref sig, ..) = item.kind {
+ if !in_external_macro(cx.tcx.sess, item.span) {
+ lint_for_missing_headers(cx, item.def_id, item.span, sig, headers, None, None);
+ }
+ }
+ }
+
+ fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::ImplItem<'_>) {
+ let attrs = cx.tcx.hir().attrs(item.hir_id());
+ let headers = check_attrs(cx, &self.valid_idents, attrs);
+ if self.in_trait_impl || in_external_macro(cx.tcx.sess, item.span) {
+ return;
+ }
+ if let hir::ImplItemKind::Fn(ref sig, body_id) = item.kind {
+ let body = cx.tcx.hir().body(body_id);
+ let mut fpu = FindPanicUnwrap {
+ cx,
+ typeck_results: cx.tcx.typeck(item.def_id),
+ panic_span: None,
+ };
+ fpu.visit_expr(&body.value);
+ lint_for_missing_headers(cx, item.def_id, item.span, sig, headers, Some(body_id), fpu.panic_span);
+ }
+ }
+}
+
+fn lint_for_missing_headers<'tcx>(
+ cx: &LateContext<'tcx>,
+ def_id: LocalDefId,
+ span: impl Into<MultiSpan> + Copy,
+ sig: &hir::FnSig<'_>,
+ headers: DocHeaders,
+ body_id: Option<hir::BodyId>,
+ panic_span: Option<Span>,
+) {
+ if !cx.access_levels.is_exported(def_id) {
+ return; // Private functions do not require doc comments
+ }
+
+ // do not lint if any parent has `#[doc(hidden)]` attribute (#7347)
+ if cx
+ .tcx
+ .hir()
+ .parent_iter(cx.tcx.hir().local_def_id_to_hir_id(def_id))
+ .any(|(id, _node)| is_doc_hidden(cx.tcx.hir().attrs(id)))
+ {
+ return;
+ }
+
+ if !headers.safety && sig.header.unsafety == hir::Unsafety::Unsafe {
+ span_lint(
+ cx,
+ MISSING_SAFETY_DOC,
+ span,
+ "unsafe function's docs miss `# Safety` section",
+ );
+ }
+ if !headers.panics && panic_span.is_some() {
+ span_lint_and_note(
+ cx,
+ MISSING_PANICS_DOC,
+ span,
+ "docs for function which may panic missing `# Panics` section",
+ panic_span,
+ "first possible panic found here",
+ );
+ }
+ if !headers.errors {
+ let hir_id = cx.tcx.hir().local_def_id_to_hir_id(def_id);
+ if is_type_diagnostic_item(cx, return_ty(cx, hir_id), sym::Result) {
+ span_lint(
+ cx,
+ MISSING_ERRORS_DOC,
+ span,
+ "docs for function returning `Result` missing `# Errors` section",
+ );
+ } else {
+ if_chain! {
+ if let Some(body_id) = body_id;
+ if let Some(future) = cx.tcx.lang_items().future_trait();
+ let typeck = cx.tcx.typeck_body(body_id);
+ let body = cx.tcx.hir().body(body_id);
+ let ret_ty = typeck.expr_ty(&body.value);
+ if implements_trait(cx, ret_ty, future, &[]);
+ if let ty::Opaque(_, subs) = ret_ty.kind();
+ if let Some(gen) = subs.types().next();
+ if let ty::Generator(_, subs, _) = gen.kind();
+ if is_type_diagnostic_item(cx, subs.as_generator().return_ty(), sym::Result);
+ then {
+ span_lint(
+ cx,
+ MISSING_ERRORS_DOC,
+ span,
+ "docs for function returning `Result` missing `# Errors` section",
+ );
+ }
+ }
+ }
+ }
+}
+
+/// Cleanup documentation decoration.
+///
+/// We can't use `rustc_ast::attr::AttributeMethods::with_desugared_doc` or
+/// `rustc_ast::parse::lexer::comments::strip_doc_comment_decoration` because we
+/// need to keep track of
+/// the spans but this function is inspired from the later.
+#[expect(clippy::cast_possible_truncation)]
+#[must_use]
+pub fn strip_doc_comment_decoration(doc: &str, comment_kind: CommentKind, span: Span) -> (String, Vec<(usize, Span)>) {
+ // one-line comments lose their prefix
+ if comment_kind == CommentKind::Line {
+ let mut doc = doc.to_owned();
+ doc.push('\n');
+ let len = doc.len();
+ // +3 skips the opening delimiter
+ return (doc, vec![(len, span.with_lo(span.lo() + BytePos(3)))]);
+ }
+
+ let mut sizes = vec![];
+ let mut contains_initial_stars = false;
+ for line in doc.lines() {
+ let offset = line.as_ptr() as usize - doc.as_ptr() as usize;
+ debug_assert_eq!(offset as u32 as usize, offset);
+ contains_initial_stars |= line.trim_start().starts_with('*');
+ // +1 adds the newline, +3 skips the opening delimiter
+ sizes.push((line.len() + 1, span.with_lo(span.lo() + BytePos(3 + offset as u32))));
+ }
+ if !contains_initial_stars {
+ return (doc.to_string(), sizes);
+ }
+ // remove the initial '*'s if any
+ let mut no_stars = String::with_capacity(doc.len());
+ for line in doc.lines() {
+ let mut chars = line.chars();
+ for c in &mut chars {
+ if c.is_whitespace() {
+ no_stars.push(c);
+ } else {
+ no_stars.push(if c == '*' { ' ' } else { c });
+ break;
+ }
+ }
+ no_stars.push_str(chars.as_str());
+ no_stars.push('\n');
+ }
+
+ (no_stars, sizes)
+}
+
+#[derive(Copy, Clone)]
+struct DocHeaders {
+ safety: bool,
+ errors: bool,
+ panics: bool,
+}
+
+fn check_attrs<'a>(cx: &LateContext<'_>, valid_idents: &FxHashSet<String>, attrs: &'a [Attribute]) -> DocHeaders {
+ use pulldown_cmark::{BrokenLink, CowStr, Options};
+ /// We don't want the parser to choke on intra doc links. Since we don't
+ /// actually care about rendering them, just pretend that all broken links are
+ /// point to a fake address.
+ #[expect(clippy::unnecessary_wraps)] // we're following a type signature
+ fn fake_broken_link_callback<'a>(_: BrokenLink<'_>) -> Option<(CowStr<'a>, CowStr<'a>)> {
+ Some(("fake".into(), "fake".into()))
+ }
+
+ let mut doc = String::new();
+ let mut spans = vec![];
+
+ for attr in attrs {
+ if let AttrKind::DocComment(comment_kind, comment) = attr.kind {
+ let (comment, current_spans) = strip_doc_comment_decoration(comment.as_str(), comment_kind, attr.span);
+ spans.extend_from_slice(&current_spans);
+ doc.push_str(&comment);
+ } else if attr.has_name(sym::doc) {
+ // ignore mix of sugared and non-sugared doc
+ // don't trigger the safety or errors check
+ return DocHeaders {
+ safety: true,
+ errors: true,
+ panics: true,
+ };
+ }
+ }
+
+ let mut current = 0;
+ for &mut (ref mut offset, _) in &mut spans {
+ let offset_copy = *offset;
+ *offset = current;
+ current += offset_copy;
+ }
+
+ if doc.is_empty() {
+ return DocHeaders {
+ safety: false,
+ errors: false,
+ panics: false,
+ };
+ }
+
+ let mut cb = fake_broken_link_callback;
+
+ let parser =
+ pulldown_cmark::Parser::new_with_broken_link_callback(&doc, Options::empty(), Some(&mut cb)).into_offset_iter();
+ // Iterate over all `Events` and combine consecutive events into one
+ let events = parser.coalesce(|previous, current| {
+ use pulldown_cmark::Event::Text;
+
+ let previous_range = previous.1;
+ let current_range = current.1;
+
+ match (previous.0, current.0) {
+ (Text(previous), Text(current)) => {
+ let mut previous = previous.to_string();
+ previous.push_str(&current);
+ Ok((Text(previous.into()), previous_range))
+ },
+ (previous, current) => Err(((previous, previous_range), (current, current_range))),
+ }
+ });
+ check_doc(cx, valid_idents, events, &spans)
+}
+
+const RUST_CODE: &[&str] = &["rust", "no_run", "should_panic", "compile_fail"];
+
+fn check_doc<'a, Events: Iterator<Item = (pulldown_cmark::Event<'a>, Range<usize>)>>(
+ cx: &LateContext<'_>,
+ valid_idents: &FxHashSet<String>,
+ events: Events,
+ spans: &[(usize, Span)],
+) -> DocHeaders {
+ // true if a safety header was found
+ use pulldown_cmark::Event::{
+ Code, End, FootnoteReference, HardBreak, Html, Rule, SoftBreak, Start, TaskListMarker, Text,
+ };
+ use pulldown_cmark::Tag::{CodeBlock, Heading, Item, Link, Paragraph};
+ use pulldown_cmark::{CodeBlockKind, CowStr};
+
+ let mut headers = DocHeaders {
+ safety: false,
+ errors: false,
+ panics: false,
+ };
+ let mut in_code = false;
+ let mut in_link = None;
+ let mut in_heading = false;
+ let mut is_rust = false;
+ let mut edition = None;
+ let mut ticks_unbalanced = false;
+ let mut text_to_check: Vec<(CowStr<'_>, Span)> = Vec::new();
+ let mut paragraph_span = spans.get(0).expect("function isn't called if doc comment is empty").1;
+ for (event, range) in events {
+ match event {
+ Start(CodeBlock(ref kind)) => {
+ in_code = true;
+ if let CodeBlockKind::Fenced(lang) = kind {
+ for item in lang.split(',') {
+ if item == "ignore" {
+ is_rust = false;
+ break;
+ }
+ if let Some(stripped) = item.strip_prefix("edition") {
+ is_rust = true;
+ edition = stripped.parse::<Edition>().ok();
+ } else if item.is_empty() || RUST_CODE.contains(&item) {
+ is_rust = true;
+ }
+ }
+ }
+ },
+ End(CodeBlock(_)) => {
+ in_code = false;
+ is_rust = false;
+ },
+ Start(Link(_, url, _)) => in_link = Some(url),
+ End(Link(..)) => in_link = None,
+ Start(Heading(_, _, _) | Paragraph | Item) => {
+ if let Start(Heading(_, _, _)) = event {
+ in_heading = true;
+ }
+ ticks_unbalanced = false;
+ let (_, span) = get_current_span(spans, range.start);
+ paragraph_span = first_line_of_span(cx, span);
+ },
+ End(Heading(_, _, _) | Paragraph | Item) => {
+ if let End(Heading(_, _, _)) = event {
+ in_heading = false;
+ }
+ if ticks_unbalanced {
+ span_lint_and_help(
+ cx,
+ DOC_MARKDOWN,
+ paragraph_span,
+ "backticks are unbalanced",
+ None,
+ "a backtick may be missing a pair",
+ );
+ } else {
+ for (text, span) in text_to_check {
+ check_text(cx, valid_idents, &text, span);
+ }
+ }
+ text_to_check = Vec::new();
+ },
+ Start(_tag) | End(_tag) => (), // We don't care about other tags
+ Html(_html) => (), // HTML is weird, just ignore it
+ SoftBreak | HardBreak | TaskListMarker(_) | Code(_) | Rule => (),
+ FootnoteReference(text) | Text(text) => {
+ let (begin, span) = get_current_span(spans, range.start);
+ paragraph_span = paragraph_span.with_hi(span.hi());
+ ticks_unbalanced |= text.contains('`') && !in_code;
+ if Some(&text) == in_link.as_ref() || ticks_unbalanced {
+ // Probably a link of the form `<http://example.com>`
+ // Which are represented as a link to "http://example.com" with
+ // text "http://example.com" by pulldown-cmark
+ continue;
+ }
+ let trimmed_text = text.trim();
+ headers.safety |= in_heading && trimmed_text == "Safety";
+ headers.safety |= in_heading && trimmed_text == "Implementation safety";
+ headers.safety |= in_heading && trimmed_text == "Implementation Safety";
+ headers.errors |= in_heading && trimmed_text == "Errors";
+ headers.panics |= in_heading && trimmed_text == "Panics";
+ if in_code {
+ if is_rust {
+ let edition = edition.unwrap_or_else(|| cx.tcx.sess.edition());
+ check_code(cx, &text, edition, span);
+ }
+ } else {
+ // Adjust for the beginning of the current `Event`
+ let span = span.with_lo(span.lo() + BytePos::from_usize(range.start - begin));
+ text_to_check.push((text, span));
+ }
+ },
+ }
+ }
+ headers
+}
+
+fn get_current_span(spans: &[(usize, Span)], idx: usize) -> (usize, Span) {
+ let index = match spans.binary_search_by(|c| c.0.cmp(&idx)) {
+ Ok(o) => o,
+ Err(e) => e - 1,
+ };
+ spans[index]
+}
+
+fn check_code(cx: &LateContext<'_>, text: &str, edition: Edition, span: Span) {
+ fn has_needless_main(code: String, edition: Edition) -> bool {
+ rustc_driver::catch_fatal_errors(|| {
+ rustc_span::create_session_globals_then(edition, || {
+ let filename = FileName::anon_source_code(&code);
+
+ let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+ let fallback_bundle =
+ rustc_errors::fallback_fluent_bundle(rustc_errors::DEFAULT_LOCALE_RESOURCES, false);
+ let emitter = EmitterWriter::new(
+ Box::new(io::sink()),
+ None,
+ None,
+ fallback_bundle,
+ false,
+ false,
+ false,
+ None,
+ false,
+ );
+ let handler = Handler::with_emitter(false, None, Box::new(emitter));
+ let sess = ParseSess::with_span_handler(handler, sm);
+
+ let mut parser = match maybe_new_parser_from_source_str(&sess, filename, code) {
+ Ok(p) => p,
+ Err(errs) => {
+ drop(errs);
+ return false;
+ },
+ };
+
+ let mut relevant_main_found = false;
+ loop {
+ match parser.parse_item(ForceCollect::No) {
+ Ok(Some(item)) => match &item.kind {
+ ItemKind::Fn(box Fn {
+ sig, body: Some(block), ..
+ }) if item.ident.name == sym::main => {
+ let is_async = matches!(sig.header.asyncness, Async::Yes { .. });
+ let returns_nothing = match &sig.decl.output {
+ FnRetTy::Default(..) => true,
+ FnRetTy::Ty(ty) if ty.kind.is_unit() => true,
+ FnRetTy::Ty(_) => false,
+ };
+
+ if returns_nothing && !is_async && !block.stmts.is_empty() {
+ // This main function should be linted, but only if there are no other functions
+ relevant_main_found = true;
+ } else {
+ // This main function should not be linted, we're done
+ return false;
+ }
+ },
+ // Tests with one of these items are ignored
+ ItemKind::Static(..)
+ | ItemKind::Const(..)
+ | ItemKind::ExternCrate(..)
+ | ItemKind::ForeignMod(..)
+ // Another function was found; this case is ignored
+ | ItemKind::Fn(..) => return false,
+ _ => {},
+ },
+ Ok(None) => break,
+ Err(e) => {
+ e.cancel();
+ return false;
+ },
+ }
+ }
+
+ relevant_main_found
+ })
+ })
+ .ok()
+ .unwrap_or_default()
+ }
+
+ // Because of the global session, we need to create a new session in a different thread with
+ // the edition we need.
+ let text = text.to_owned();
+ if thread::spawn(move || has_needless_main(text, edition))
+ .join()
+ .expect("thread::spawn failed")
+ {
+ span_lint(cx, NEEDLESS_DOCTEST_MAIN, span, "needless `fn main` in doctest");
+ }
+}
+
+fn check_text(cx: &LateContext<'_>, valid_idents: &FxHashSet<String>, text: &str, span: Span) {
+ for word in text.split(|c: char| c.is_whitespace() || c == '\'') {
+ // Trim punctuation as in `some comment (see foo::bar).`
+ // ^^
+ // Or even as in `_foo bar_` which is emphasized. Also preserve `::` as a prefix/suffix.
+ let mut word = word.trim_matches(|c: char| !c.is_alphanumeric() && c != ':');
+
+ // Remove leading or trailing single `:` which may be part of a sentence.
+ if word.starts_with(':') && !word.starts_with("::") {
+ word = word.trim_start_matches(':');
+ }
+ if word.ends_with(':') && !word.ends_with("::") {
+ word = word.trim_end_matches(':');
+ }
+
+ if valid_idents.contains(word) || word.chars().all(|c| c == ':') {
+ continue;
+ }
+
+ // Adjust for the current word
+ let offset = word.as_ptr() as usize - text.as_ptr() as usize;
+ let span = Span::new(
+ span.lo() + BytePos::from_usize(offset),
+ span.lo() + BytePos::from_usize(offset + word.len()),
+ span.ctxt(),
+ span.parent(),
+ );
+
+ check_word(cx, word, span);
+ }
+}
+
+fn check_word(cx: &LateContext<'_>, word: &str, span: Span) {
+ /// Checks if a string is camel-case, i.e., contains at least two uppercase
+ /// letters (`Clippy` is ok) and one lower-case letter (`NASA` is ok).
+ /// Plurals are also excluded (`IDs` is ok).
+ fn is_camel_case(s: &str) -> bool {
+ if s.starts_with(|c: char| c.is_ascii_digit()) {
+ return false;
+ }
+
+ let s = s.strip_suffix('s').unwrap_or(s);
+
+ s.chars().all(char::is_alphanumeric)
+ && s.chars().filter(|&c| c.is_uppercase()).take(2).count() > 1
+ && s.chars().filter(|&c| c.is_lowercase()).take(1).count() > 0
+ }
+
+ fn has_underscore(s: &str) -> bool {
+ s != "_" && !s.contains("\\_") && s.contains('_')
+ }
+
+ fn has_hyphen(s: &str) -> bool {
+ s != "-" && s.contains('-')
+ }
+
+ if let Ok(url) = Url::parse(word) {
+ // try to get around the fact that `foo::bar` parses as a valid URL
+ if !url.cannot_be_a_base() {
+ span_lint(
+ cx,
+ DOC_MARKDOWN,
+ span,
+ "you should put bare URLs between `<`/`>` or make a proper Markdown link",
+ );
+
+ return;
+ }
+ }
+
+ // We assume that mixed-case words are not meant to be put inside backticks. (Issue #2343)
+ if has_underscore(word) && has_hyphen(word) {
+ return;
+ }
+
+ if has_underscore(word) || word.contains("::") || is_camel_case(word) {
+ let mut applicability = Applicability::MachineApplicable;
+
+ span_lint_and_then(
+ cx,
+ DOC_MARKDOWN,
+ span,
+ "item in documentation is missing backticks",
+ |diag| {
+ let snippet = snippet_with_applicability(cx, span, "..", &mut applicability);
+ diag.span_suggestion_with_style(
+ span,
+ "try",
+ format!("`{}`", snippet),
+ applicability,
+ // always show the suggestion in a separate line, since the
+ // inline presentation adds another pair of backticks
+ SuggestionStyle::ShowAlways,
+ );
+ },
+ );
+ }
+}
+
+struct FindPanicUnwrap<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ panic_span: Option<Span>,
+ typeck_results: &'tcx ty::TypeckResults<'tcx>,
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for FindPanicUnwrap<'a, 'tcx> {
+ type NestedFilter = nested_filter::OnlyBodies;
+
+ fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
+ if self.panic_span.is_some() {
+ return;
+ }
+
+ if let Some(macro_call) = root_macro_call_first_node(self.cx, expr) {
+ if is_panic(self.cx, macro_call.def_id)
+ || matches!(
+ self.cx.tcx.item_name(macro_call.def_id).as_str(),
+ "assert" | "assert_eq" | "assert_ne" | "todo"
+ )
+ {
+ self.panic_span = Some(macro_call.span);
+ }
+ }
+
+ // check for `unwrap`
+ if let Some(arglists) = method_chain_args(expr, &["unwrap"]) {
+ let receiver_ty = self.typeck_results.expr_ty(&arglists[0][0]).peel_refs();
+ if is_type_diagnostic_item(self.cx, receiver_ty, sym::Option)
+ || is_type_diagnostic_item(self.cx, receiver_ty, sym::Result)
+ {
+ self.panic_span = Some(expr.span);
+ }
+ }
+
+ // and check sub-expressions
+ intravisit::walk_expr(self, expr);
+ }
+
+ // Panics in const blocks will cause compilation to fail.
+ fn visit_anon_const(&mut self, _: &'tcx AnonConst) {}
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/doc_link_with_quotes.rs b/src/tools/clippy/clippy_lints/src/doc_link_with_quotes.rs
new file mode 100644
index 000000000..cb07f57e8
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/doc_link_with_quotes.rs
@@ -0,0 +1,60 @@
+use clippy_utils::diagnostics::span_lint;
+use itertools::Itertools;
+use rustc_ast::{AttrKind, Attribute};
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Detects the syntax `['foo']` in documentation comments (notice quotes instead of backticks)
+ /// outside of code blocks
+ /// ### Why is this bad?
+ /// It is likely a typo when defining an intra-doc link
+ ///
+ /// ### Example
+ /// ```rust
+ /// /// See also: ['foo']
+ /// fn bar() {}
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// /// See also: [`foo`]
+ /// fn bar() {}
+ /// ```
+ #[clippy::version = "1.60.0"]
+ pub DOC_LINK_WITH_QUOTES,
+ pedantic,
+ "possible typo for an intra-doc link"
+}
+declare_lint_pass!(DocLinkWithQuotes => [DOC_LINK_WITH_QUOTES]);
+
+impl EarlyLintPass for DocLinkWithQuotes {
+ fn check_attribute(&mut self, ctx: &EarlyContext<'_>, attr: &Attribute) {
+ if let AttrKind::DocComment(_, symbol) = attr.kind {
+ if contains_quote_link(symbol.as_str()) {
+ span_lint(
+ ctx,
+ DOC_LINK_WITH_QUOTES,
+ attr.span,
+ "possible intra-doc link using quotes instead of backticks",
+ );
+ }
+ }
+ }
+}
+
+fn contains_quote_link(s: &str) -> bool {
+ let mut in_backticks = false;
+ let mut found_opening = false;
+
+ for c in s.chars().tuple_windows::<(char, char)>() {
+ match c {
+ ('`', _) => in_backticks = !in_backticks,
+ ('[', '\'') if !in_backticks => found_opening = true,
+ ('\'', ']') if !in_backticks && found_opening => return true,
+ _ => {},
+ }
+ }
+
+ false
+}
diff --git a/src/tools/clippy/clippy_lints/src/double_parens.rs b/src/tools/clippy/clippy_lints/src/double_parens.rs
new file mode 100644
index 000000000..a33ef5ce6
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/double_parens.rs
@@ -0,0 +1,75 @@
+use clippy_utils::diagnostics::span_lint;
+use rustc_ast::ast::{Expr, ExprKind};
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for unnecessary double parentheses.
+ ///
+ /// ### Why is this bad?
+ /// This makes code harder to read and might indicate a
+ /// mistake.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn simple_double_parens() -> i32 {
+ /// ((0))
+ /// }
+ ///
+ /// # fn foo(bar: usize) {}
+ /// foo((0));
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// fn simple_no_parens() -> i32 {
+ /// 0
+ /// }
+ ///
+ /// # fn foo(bar: usize) {}
+ /// foo(0);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub DOUBLE_PARENS,
+ complexity,
+ "Warn on unnecessary double parentheses"
+}
+
+declare_lint_pass!(DoubleParens => [DOUBLE_PARENS]);
+
+impl EarlyLintPass for DoubleParens {
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
+ if expr.span.from_expansion() {
+ return;
+ }
+
+ let msg: &str = "consider removing unnecessary double parentheses";
+
+ match expr.kind {
+ ExprKind::Paren(ref in_paren) => match in_paren.kind {
+ ExprKind::Paren(_) | ExprKind::Tup(_) => {
+ span_lint(cx, DOUBLE_PARENS, expr.span, msg);
+ },
+ _ => {},
+ },
+ ExprKind::Call(_, ref params) => {
+ if params.len() == 1 {
+ let param = &params[0];
+ if let ExprKind::Paren(_) = param.kind {
+ span_lint(cx, DOUBLE_PARENS, param.span, msg);
+ }
+ }
+ },
+ ExprKind::MethodCall(_, ref params, _) => {
+ if params.len() == 2 {
+ let param = &params[1];
+ if let ExprKind::Paren(_) = param.kind {
+ span_lint(cx, DOUBLE_PARENS, param.span, msg);
+ }
+ }
+ },
+ _ => {},
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/drop_forget_ref.rs b/src/tools/clippy/clippy_lints/src/drop_forget_ref.rs
new file mode 100644
index 000000000..b35f0b8ca
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/drop_forget_ref.rs
@@ -0,0 +1,243 @@
+use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_note};
+use clippy_utils::is_must_use_func_call;
+use clippy_utils::ty::{is_copy, is_must_use_ty, is_type_lang_item};
+use rustc_hir::{Expr, ExprKind, LangItem};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls to `std::mem::drop` with a reference
+ /// instead of an owned value.
+ ///
+ /// ### Why is this bad?
+ /// Calling `drop` on a reference will only drop the
+ /// reference itself, which is a no-op. It will not call the `drop` method (from
+ /// the `Drop` trait implementation) on the underlying referenced value, which
+ /// is likely what was intended.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// let mut lock_guard = mutex.lock();
+ /// std::mem::drop(&lock_guard) // Should have been drop(lock_guard), mutex
+ /// // still locked
+ /// operation_that_requires_mutex_to_be_unlocked();
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub DROP_REF,
+ correctness,
+ "calls to `std::mem::drop` with a reference instead of an owned value"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls to `std::mem::forget` with a reference
+ /// instead of an owned value.
+ ///
+ /// ### Why is this bad?
+ /// Calling `forget` on a reference will only forget the
+ /// reference itself, which is a no-op. It will not forget the underlying
+ /// referenced
+ /// value, which is likely what was intended.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = Box::new(1);
+ /// std::mem::forget(&x) // Should have been forget(x), x will still be dropped
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub FORGET_REF,
+ correctness,
+ "calls to `std::mem::forget` with a reference instead of an owned value"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls to `std::mem::drop` with a value
+ /// that derives the Copy trait
+ ///
+ /// ### Why is this bad?
+ /// Calling `std::mem::drop` [does nothing for types that
+ /// implement Copy](https://doc.rust-lang.org/std/mem/fn.drop.html), since the
+ /// value will be copied and moved into the function on invocation.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x: i32 = 42; // i32 implements Copy
+ /// std::mem::drop(x) // A copy of x is passed to the function, leaving the
+ /// // original unaffected
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub DROP_COPY,
+ correctness,
+ "calls to `std::mem::drop` with a value that implements Copy"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls to `std::mem::forget` with a value that
+ /// derives the Copy trait
+ ///
+ /// ### Why is this bad?
+ /// Calling `std::mem::forget` [does nothing for types that
+ /// implement Copy](https://doc.rust-lang.org/std/mem/fn.drop.html) since the
+ /// value will be copied and moved into the function on invocation.
+ ///
+ /// An alternative, but also valid, explanation is that Copy types do not
+ /// implement
+ /// the Drop trait, which means they have no destructors. Without a destructor,
+ /// there
+ /// is nothing for `std::mem::forget` to ignore.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x: i32 = 42; // i32 implements Copy
+ /// std::mem::forget(x) // A copy of x is passed to the function, leaving the
+ /// // original unaffected
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub FORGET_COPY,
+ correctness,
+ "calls to `std::mem::forget` with a value that implements Copy"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls to `std::mem::drop` with a value that does not implement `Drop`.
+ ///
+ /// ### Why is this bad?
+ /// Calling `std::mem::drop` is no different than dropping such a type. A different value may
+ /// have been intended.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct Foo;
+ /// let x = Foo;
+ /// std::mem::drop(x);
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub DROP_NON_DROP,
+ suspicious,
+ "call to `std::mem::drop` with a value which does not implement `Drop`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls to `std::mem::forget` with a value that does not implement `Drop`.
+ ///
+ /// ### Why is this bad?
+ /// Calling `std::mem::forget` is no different than dropping such a type. A different value may
+ /// have been intended.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct Foo;
+ /// let x = Foo;
+ /// std::mem::forget(x);
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub FORGET_NON_DROP,
+ suspicious,
+ "call to `std::mem::forget` with a value which does not implement `Drop`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Prevents the safe `std::mem::drop` function from being called on `std::mem::ManuallyDrop`.
+ ///
+ /// ### Why is this bad?
+ /// The safe `drop` function does not drop the inner value of a `ManuallyDrop`.
+ ///
+ /// ### Known problems
+ /// Does not catch cases if the user binds `std::mem::drop`
+ /// to a different name and calls it that way.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct S;
+ /// drop(std::mem::ManuallyDrop::new(S));
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// struct S;
+ /// unsafe {
+ /// std::mem::ManuallyDrop::drop(&mut std::mem::ManuallyDrop::new(S));
+ /// }
+ /// ```
+ #[clippy::version = "1.49.0"]
+ pub UNDROPPED_MANUALLY_DROPS,
+ correctness,
+ "use of safe `std::mem::drop` function to drop a std::mem::ManuallyDrop, which will not drop the inner value"
+}
+
+const DROP_REF_SUMMARY: &str = "calls to `std::mem::drop` with a reference instead of an owned value. \
+ Dropping a reference does nothing";
+const FORGET_REF_SUMMARY: &str = "calls to `std::mem::forget` with a reference instead of an owned value. \
+ Forgetting a reference does nothing";
+const DROP_COPY_SUMMARY: &str = "calls to `std::mem::drop` with a value that implements `Copy`. \
+ Dropping a copy leaves the original intact";
+const FORGET_COPY_SUMMARY: &str = "calls to `std::mem::forget` with a value that implements `Copy`. \
+ Forgetting a copy leaves the original intact";
+const DROP_NON_DROP_SUMMARY: &str = "call to `std::mem::drop` with a value that does not implement `Drop`. \
+ Dropping such a type only extends its contained lifetimes";
+const FORGET_NON_DROP_SUMMARY: &str = "call to `std::mem::forget` with a value that does not implement `Drop`. \
+ Forgetting such a type is the same as dropping it";
+
+declare_lint_pass!(DropForgetRef => [
+ DROP_REF,
+ FORGET_REF,
+ DROP_COPY,
+ FORGET_COPY,
+ DROP_NON_DROP,
+ FORGET_NON_DROP,
+ UNDROPPED_MANUALLY_DROPS
+]);
+
+impl<'tcx> LateLintPass<'tcx> for DropForgetRef {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if let ExprKind::Call(path, [arg]) = expr.kind
+ && let ExprKind::Path(ref qpath) = path.kind
+ && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
+ && let Some(fn_name) = cx.tcx.get_diagnostic_name(def_id)
+ {
+ let arg_ty = cx.typeck_results().expr_ty(arg);
+ let (lint, msg) = match fn_name {
+ sym::mem_drop if arg_ty.is_ref() => (DROP_REF, DROP_REF_SUMMARY),
+ sym::mem_forget if arg_ty.is_ref() => (FORGET_REF, FORGET_REF_SUMMARY),
+ sym::mem_drop if is_copy(cx, arg_ty) => (DROP_COPY, DROP_COPY_SUMMARY),
+ sym::mem_forget if is_copy(cx, arg_ty) => (FORGET_COPY, FORGET_COPY_SUMMARY),
+ sym::mem_drop if is_type_lang_item(cx, arg_ty, LangItem::ManuallyDrop) => {
+ span_lint_and_help(
+ cx,
+ UNDROPPED_MANUALLY_DROPS,
+ expr.span,
+ "the inner value of this ManuallyDrop will not be dropped",
+ None,
+ "to drop a `ManuallyDrop<T>`, use std::mem::ManuallyDrop::drop",
+ );
+ return;
+ }
+ sym::mem_drop
+ if !(arg_ty.needs_drop(cx.tcx, cx.param_env)
+ || is_must_use_func_call(cx, arg)
+ || is_must_use_ty(cx, arg_ty)) =>
+ {
+ (DROP_NON_DROP, DROP_NON_DROP_SUMMARY)
+ },
+ sym::mem_forget if !arg_ty.needs_drop(cx.tcx, cx.param_env) => {
+ (FORGET_NON_DROP, FORGET_NON_DROP_SUMMARY)
+ },
+ _ => return,
+ };
+ span_lint_and_note(
+ cx,
+ lint,
+ expr.span,
+ msg,
+ Some(arg.span),
+ &format!("argument has type `{}`", arg_ty),
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/duplicate_mod.rs b/src/tools/clippy/clippy_lints/src/duplicate_mod.rs
new file mode 100644
index 000000000..e1eb3b632
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/duplicate_mod.rs
@@ -0,0 +1,128 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_ast::ast::{Crate, Inline, Item, ItemKind, ModKind};
+use rustc_errors::MultiSpan;
+use rustc_lint::{EarlyContext, EarlyLintPass, Level, LintContext};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::{FileName, Span};
+use std::collections::BTreeMap;
+use std::path::PathBuf;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for files that are included as modules multiple times.
+ ///
+ /// ### Why is this bad?
+ /// Loading a file as a module more than once causes it to be compiled
+ /// multiple times, taking longer and putting duplicate content into the
+ /// module tree.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// // lib.rs
+ /// mod a;
+ /// mod b;
+ /// ```
+ /// ```rust,ignore
+ /// // a.rs
+ /// #[path = "./b.rs"]
+ /// mod b;
+ /// ```
+ ///
+ /// Use instead:
+ ///
+ /// ```rust,ignore
+ /// // lib.rs
+ /// mod a;
+ /// mod b;
+ /// ```
+ /// ```rust,ignore
+ /// // a.rs
+ /// use crate::b;
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub DUPLICATE_MOD,
+ suspicious,
+ "file loaded as module multiple times"
+}
+
+#[derive(PartialOrd, Ord, PartialEq, Eq)]
+struct Modules {
+ local_path: PathBuf,
+ spans: Vec<Span>,
+ lint_levels: Vec<Level>,
+}
+
+#[derive(Default)]
+pub struct DuplicateMod {
+ /// map from the canonicalized path to `Modules`, `BTreeMap` to make the
+ /// order deterministic for tests
+ modules: BTreeMap<PathBuf, Modules>,
+}
+
+impl_lint_pass!(DuplicateMod => [DUPLICATE_MOD]);
+
+impl EarlyLintPass for DuplicateMod {
+ fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) {
+ if let ItemKind::Mod(_, ModKind::Loaded(_, Inline::No, mod_spans)) = &item.kind
+ && let FileName::Real(real) = cx.sess().source_map().span_to_filename(mod_spans.inner_span)
+ && let Some(local_path) = real.into_local_path()
+ && let Ok(absolute_path) = local_path.canonicalize()
+ {
+ let modules = self.modules.entry(absolute_path).or_insert(Modules {
+ local_path,
+ spans: Vec::new(),
+ lint_levels: Vec::new(),
+ });
+ modules.spans.push(item.span_with_attributes());
+ modules.lint_levels.push(cx.get_lint_level(DUPLICATE_MOD));
+ }
+ }
+
+ fn check_crate_post(&mut self, cx: &EarlyContext<'_>, _: &Crate) {
+ for Modules {
+ local_path,
+ spans,
+ lint_levels,
+ } in self.modules.values()
+ {
+ if spans.len() < 2 {
+ continue;
+ }
+
+ // At this point the lint would be emitted
+ assert_eq!(spans.len(), lint_levels.len());
+ let spans: Vec<_> = spans
+ .iter()
+ .zip(lint_levels)
+ .filter_map(|(span, lvl)| {
+ if let Some(id) = lvl.get_expectation_id() {
+ cx.fulfill_expectation(id);
+ }
+
+ (!matches!(lvl, Level::Allow | Level::Expect(_))).then_some(*span)
+ })
+ .collect();
+
+ if spans.len() < 2 {
+ continue;
+ }
+
+ let mut multi_span = MultiSpan::from_spans(spans.clone());
+ let (&first, duplicates) = spans.split_first().unwrap();
+
+ multi_span.push_span_label(first, "first loaded here");
+ for &duplicate in duplicates {
+ multi_span.push_span_label(duplicate, "loaded again here");
+ }
+
+ span_lint_and_help(
+ cx,
+ DUPLICATE_MOD,
+ multi_span,
+ &format!("file is loaded as a module multiple times: `{}`", local_path.display()),
+ None,
+ "replace all but one `mod` item with `use` items",
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/else_if_without_else.rs b/src/tools/clippy/clippy_lints/src/else_if_without_else.rs
new file mode 100644
index 000000000..bf4488570
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/else_if_without_else.rs
@@ -0,0 +1,72 @@
+//! Lint on if expressions with an else if, but without a final else branch.
+
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_ast::ast::{Expr, ExprKind};
+use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of if expressions with an `else if` branch,
+ /// but without a final `else` branch.
+ ///
+ /// ### Why is this bad?
+ /// Some coding guidelines require this (e.g., MISRA-C:2004 Rule 14.10).
+ ///
+ /// ### Example
+ /// ```rust
+ /// # fn a() {}
+ /// # fn b() {}
+ /// # let x: i32 = 1;
+ /// if x.is_positive() {
+ /// a();
+ /// } else if x.is_negative() {
+ /// b();
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ ///
+ /// ```rust
+ /// # fn a() {}
+ /// # fn b() {}
+ /// # let x: i32 = 1;
+ /// if x.is_positive() {
+ /// a();
+ /// } else if x.is_negative() {
+ /// b();
+ /// } else {
+ /// // We don't care about zero.
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub ELSE_IF_WITHOUT_ELSE,
+ restriction,
+ "`if` expression with an `else if`, but without a final `else` branch"
+}
+
+declare_lint_pass!(ElseIfWithoutElse => [ELSE_IF_WITHOUT_ELSE]);
+
+impl EarlyLintPass for ElseIfWithoutElse {
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, mut item: &Expr) {
+ if in_external_macro(cx.sess(), item.span) {
+ return;
+ }
+
+ while let ExprKind::If(_, _, Some(ref els)) = item.kind {
+ if let ExprKind::If(_, _, None) = els.kind {
+ span_lint_and_help(
+ cx,
+ ELSE_IF_WITHOUT_ELSE,
+ els.span,
+ "`if` expression with an `else if`, but without a final `else`",
+ None,
+ "add an `else` block here",
+ );
+ }
+
+ item = els;
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/empty_drop.rs b/src/tools/clippy/clippy_lints/src/empty_drop.rs
new file mode 100644
index 000000000..ec063c0f7
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/empty_drop.rs
@@ -0,0 +1,65 @@
+use clippy_utils::{diagnostics::span_lint_and_sugg, peel_blocks};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Body, ExprKind, Impl, ImplItemKind, Item, ItemKind, Node};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for empty `Drop` implementations.
+ ///
+ /// ### Why is this bad?
+ /// Empty `Drop` implementations have no effect when dropping an instance of the type. They are
+ /// most likely useless. However, an empty `Drop` implementation prevents a type from being
+ /// destructured, which might be the intention behind adding the implementation as a marker.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct S;
+ ///
+ /// impl Drop for S {
+ /// fn drop(&mut self) {}
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// struct S;
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub EMPTY_DROP,
+ restriction,
+ "empty `Drop` implementations"
+}
+declare_lint_pass!(EmptyDrop => [EMPTY_DROP]);
+
+impl LateLintPass<'_> for EmptyDrop {
+ fn check_item(&mut self, cx: &LateContext<'_>, item: &Item<'_>) {
+ if_chain! {
+ if let ItemKind::Impl(Impl {
+ of_trait: Some(ref trait_ref),
+ items: [child],
+ ..
+ }) = item.kind;
+ if trait_ref.trait_def_id() == cx.tcx.lang_items().drop_trait();
+ if let impl_item_hir = child.id.hir_id();
+ if let Some(Node::ImplItem(impl_item)) = cx.tcx.hir().find(impl_item_hir);
+ if let ImplItemKind::Fn(_, b) = &impl_item.kind;
+ if let Body { value: func_expr, .. } = cx.tcx.hir().body(*b);
+ let func_expr = peel_blocks(func_expr);
+ if let ExprKind::Block(block, _) = func_expr.kind;
+ if block.stmts.is_empty() && block.expr.is_none();
+ then {
+ span_lint_and_sugg(
+ cx,
+ EMPTY_DROP,
+ item.span,
+ "empty drop implementation",
+ "try removing this impl",
+ String::new(),
+ Applicability::MaybeIncorrect
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/empty_enum.rs b/src/tools/clippy/clippy_lints/src/empty_enum.rs
new file mode 100644
index 000000000..bbebc0244
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/empty_enum.rs
@@ -0,0 +1,67 @@
+//! lint when there is an enum with no variants
+
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_hir::{Item, ItemKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `enum`s with no variants.
+ ///
+ /// As of this writing, the `never_type` is still a
+ /// nightly-only experimental API. Therefore, this lint is only triggered
+ /// if the `never_type` is enabled.
+ ///
+ /// ### Why is this bad?
+ /// If you want to introduce a type which
+ /// can't be instantiated, you should use `!` (the primitive type "never"),
+ /// or a wrapper around it, because `!` has more extensive
+ /// compiler support (type inference, etc...) and wrappers
+ /// around it are the conventional way to define an uninhabited type.
+ /// For further information visit [never type documentation](https://doc.rust-lang.org/std/primitive.never.html)
+ ///
+ ///
+ /// ### Example
+ /// ```rust
+ /// enum Test {}
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// #![feature(never_type)]
+ ///
+ /// struct Test(!);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub EMPTY_ENUM,
+ pedantic,
+ "enum with no variants"
+}
+
+declare_lint_pass!(EmptyEnum => [EMPTY_ENUM]);
+
+impl<'tcx> LateLintPass<'tcx> for EmptyEnum {
+ fn check_item(&mut self, cx: &LateContext<'_>, item: &Item<'_>) {
+ // Only suggest the `never_type` if the feature is enabled
+ if !cx.tcx.features().never_type {
+ return;
+ }
+
+ if let ItemKind::Enum(..) = item.kind {
+ let ty = cx.tcx.type_of(item.def_id);
+ let adt = ty.ty_adt_def().expect("already checked whether this is an enum");
+ if adt.variants().is_empty() {
+ span_lint_and_help(
+ cx,
+ EMPTY_ENUM,
+ item.span,
+ "enum with no variants",
+ None,
+ "consider using the uninhabited type `!` (never type) or a wrapper \
+ around it to introduce a type which can't be instantiated",
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/empty_structs_with_brackets.rs b/src/tools/clippy/clippy_lints/src/empty_structs_with_brackets.rs
new file mode 100644
index 000000000..08bf80a42
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/empty_structs_with_brackets.rs
@@ -0,0 +1,99 @@
+use clippy_utils::{diagnostics::span_lint_and_then, source::snippet_opt};
+use rustc_ast::ast::{Item, ItemKind, VariantData};
+use rustc_errors::Applicability;
+use rustc_lexer::TokenKind;
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Finds structs without fields (a so-called "empty struct") that are declared with brackets.
+ ///
+ /// ### Why is this bad?
+ /// Empty brackets after a struct declaration can be omitted.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct Cookie {}
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// struct Cookie;
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub EMPTY_STRUCTS_WITH_BRACKETS,
+ restriction,
+ "finds struct declarations with empty brackets"
+}
+declare_lint_pass!(EmptyStructsWithBrackets => [EMPTY_STRUCTS_WITH_BRACKETS]);
+
+impl EarlyLintPass for EmptyStructsWithBrackets {
+ fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) {
+ let span_after_ident = item.span.with_lo(item.ident.span.hi());
+
+ if let ItemKind::Struct(var_data, _) = &item.kind
+ && has_brackets(var_data)
+ && has_no_fields(cx, var_data, span_after_ident) {
+ span_lint_and_then(
+ cx,
+ EMPTY_STRUCTS_WITH_BRACKETS,
+ span_after_ident,
+ "found empty brackets on struct declaration",
+ |diagnostic| {
+ diagnostic.span_suggestion_hidden(
+ span_after_ident,
+ "remove the brackets",
+ ";",
+ Applicability::MachineApplicable);
+ },
+ );
+ }
+ }
+}
+
+fn has_no_ident_token(braces_span_str: &str) -> bool {
+ !rustc_lexer::tokenize(braces_span_str).any(|t| t.kind == TokenKind::Ident)
+}
+
+fn has_brackets(var_data: &VariantData) -> bool {
+ !matches!(var_data, VariantData::Unit(_))
+}
+
+fn has_no_fields(cx: &EarlyContext<'_>, var_data: &VariantData, braces_span: Span) -> bool {
+ if !var_data.fields().is_empty() {
+ return false;
+ }
+
+ // there might still be field declarations hidden from the AST
+ // (conditionally compiled code using #[cfg(..)])
+
+ let Some(braces_span_str) = snippet_opt(cx, braces_span) else {
+ return false;
+ };
+
+ has_no_ident_token(braces_span_str.as_ref())
+}
+
+#[cfg(test)]
+mod unit_test {
+ use super::*;
+
+ #[test]
+ fn test_has_no_ident_token() {
+ let input = "{ field: u8 }";
+ assert!(!has_no_ident_token(input));
+
+ let input = "(u8, String);";
+ assert!(!has_no_ident_token(input));
+
+ let input = " {
+ // test = 5
+ }
+ ";
+ assert!(has_no_ident_token(input));
+
+ let input = " ();";
+ assert!(has_no_ident_token(input));
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/entry.rs b/src/tools/clippy/clippy_lints/src/entry.rs
new file mode 100644
index 000000000..4e3ae4c96
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/entry.rs
@@ -0,0 +1,658 @@
+use clippy_utils::higher;
+use clippy_utils::{
+ can_move_expr_to_closure_no_visit,
+ diagnostics::span_lint_and_sugg,
+ is_expr_final_block_expr, is_expr_used_or_unified, match_def_path, paths, peel_hir_expr_while,
+ source::{reindent_multiline, snippet_indent, snippet_with_applicability, snippet_with_context},
+ SpanlessEq,
+};
+use core::fmt::Write;
+use rustc_errors::Applicability;
+use rustc_hir::{
+ hir_id::HirIdSet,
+ intravisit::{walk_expr, Visitor},
+ Block, Expr, ExprKind, Guard, HirId, Let, Pat, Stmt, StmtKind, UnOp,
+};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{Span, SyntaxContext, DUMMY_SP};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for uses of `contains_key` + `insert` on `HashMap`
+ /// or `BTreeMap`.
+ ///
+ /// ### Why is this bad?
+ /// Using `entry` is more efficient.
+ ///
+ /// ### Known problems
+ /// The suggestion may have type inference errors in some cases. e.g.
+ /// ```rust
+ /// let mut map = std::collections::HashMap::new();
+ /// let _ = if !map.contains_key(&0) {
+ /// map.insert(0, 0)
+ /// } else {
+ /// None
+ /// };
+ /// ```
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::collections::HashMap;
+ /// # let mut map = HashMap::new();
+ /// # let k = 1;
+ /// # let v = 1;
+ /// if !map.contains_key(&k) {
+ /// map.insert(k, v);
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # use std::collections::HashMap;
+ /// # let mut map = HashMap::new();
+ /// # let k = 1;
+ /// # let v = 1;
+ /// map.entry(k).or_insert(v);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MAP_ENTRY,
+ perf,
+ "use of `contains_key` followed by `insert` on a `HashMap` or `BTreeMap`"
+}
+
+declare_lint_pass!(HashMapPass => [MAP_ENTRY]);
+
+impl<'tcx> LateLintPass<'tcx> for HashMapPass {
+ #[expect(clippy::too_many_lines)]
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ let (cond_expr, then_expr, else_expr) = match higher::If::hir(expr) {
+ Some(higher::If { cond, then, r#else }) => (cond, then, r#else),
+ _ => return,
+ };
+
+ let (map_ty, contains_expr) = match try_parse_contains(cx, cond_expr) {
+ Some(x) => x,
+ None => return,
+ };
+
+ let then_search = match find_insert_calls(cx, &contains_expr, then_expr) {
+ Some(x) => x,
+ None => return,
+ };
+
+ let mut app = Applicability::MachineApplicable;
+ let map_str = snippet_with_context(cx, contains_expr.map.span, contains_expr.call_ctxt, "..", &mut app).0;
+ let key_str = snippet_with_context(cx, contains_expr.key.span, contains_expr.call_ctxt, "..", &mut app).0;
+ let sugg = if let Some(else_expr) = else_expr {
+ let else_search = match find_insert_calls(cx, &contains_expr, else_expr) {
+ Some(search) => search,
+ None => return,
+ };
+
+ if then_search.edits.is_empty() && else_search.edits.is_empty() {
+ // No insertions
+ return;
+ } else if then_search.edits.is_empty() || else_search.edits.is_empty() {
+ // if .. { insert } else { .. } or if .. { .. } else { insert }
+ let ((then_str, entry_kind), else_str) = match (else_search.edits.is_empty(), contains_expr.negated) {
+ (true, true) => (
+ then_search.snippet_vacant(cx, then_expr.span, &mut app),
+ snippet_with_applicability(cx, else_expr.span, "{ .. }", &mut app),
+ ),
+ (true, false) => (
+ then_search.snippet_occupied(cx, then_expr.span, &mut app),
+ snippet_with_applicability(cx, else_expr.span, "{ .. }", &mut app),
+ ),
+ (false, true) => (
+ else_search.snippet_occupied(cx, else_expr.span, &mut app),
+ snippet_with_applicability(cx, then_expr.span, "{ .. }", &mut app),
+ ),
+ (false, false) => (
+ else_search.snippet_vacant(cx, else_expr.span, &mut app),
+ snippet_with_applicability(cx, then_expr.span, "{ .. }", &mut app),
+ ),
+ };
+ format!(
+ "if let {}::{} = {}.entry({}) {} else {}",
+ map_ty.entry_path(),
+ entry_kind,
+ map_str,
+ key_str,
+ then_str,
+ else_str,
+ )
+ } else {
+ // if .. { insert } else { insert }
+ let ((then_str, then_entry), (else_str, else_entry)) = if contains_expr.negated {
+ (
+ then_search.snippet_vacant(cx, then_expr.span, &mut app),
+ else_search.snippet_occupied(cx, else_expr.span, &mut app),
+ )
+ } else {
+ (
+ then_search.snippet_occupied(cx, then_expr.span, &mut app),
+ else_search.snippet_vacant(cx, else_expr.span, &mut app),
+ )
+ };
+ let indent_str = snippet_indent(cx, expr.span);
+ let indent_str = indent_str.as_deref().unwrap_or("");
+ format!(
+ "match {}.entry({}) {{\n{indent} {entry}::{} => {}\n\
+ {indent} {entry}::{} => {}\n{indent}}}",
+ map_str,
+ key_str,
+ then_entry,
+ reindent_multiline(then_str.into(), true, Some(4 + indent_str.len())),
+ else_entry,
+ reindent_multiline(else_str.into(), true, Some(4 + indent_str.len())),
+ entry = map_ty.entry_path(),
+ indent = indent_str,
+ )
+ }
+ } else {
+ if then_search.edits.is_empty() {
+ // no insertions
+ return;
+ }
+
+ // if .. { insert }
+ if !then_search.allow_insert_closure {
+ let (body_str, entry_kind) = if contains_expr.negated {
+ then_search.snippet_vacant(cx, then_expr.span, &mut app)
+ } else {
+ then_search.snippet_occupied(cx, then_expr.span, &mut app)
+ };
+ format!(
+ "if let {}::{} = {}.entry({}) {}",
+ map_ty.entry_path(),
+ entry_kind,
+ map_str,
+ key_str,
+ body_str,
+ )
+ } else if let Some(insertion) = then_search.as_single_insertion() {
+ let value_str = snippet_with_context(cx, insertion.value.span, then_expr.span.ctxt(), "..", &mut app).0;
+ if contains_expr.negated {
+ if insertion.value.can_have_side_effects() {
+ format!("{}.entry({}).or_insert_with(|| {});", map_str, key_str, value_str)
+ } else {
+ format!("{}.entry({}).or_insert({});", map_str, key_str, value_str)
+ }
+ } else {
+ // TODO: suggest using `if let Some(v) = map.get_mut(k) { .. }` here.
+ // This would need to be a different lint.
+ return;
+ }
+ } else {
+ let block_str = then_search.snippet_closure(cx, then_expr.span, &mut app);
+ if contains_expr.negated {
+ format!("{}.entry({}).or_insert_with(|| {});", map_str, key_str, block_str)
+ } else {
+ // TODO: suggest using `if let Some(v) = map.get_mut(k) { .. }` here.
+ // This would need to be a different lint.
+ return;
+ }
+ }
+ };
+
+ span_lint_and_sugg(
+ cx,
+ MAP_ENTRY,
+ expr.span,
+ &format!("usage of `contains_key` followed by `insert` on a `{}`", map_ty.name()),
+ "try this",
+ sugg,
+ app,
+ );
+ }
+}
+
+#[derive(Clone, Copy)]
+enum MapType {
+ Hash,
+ BTree,
+}
+impl MapType {
+ fn name(self) -> &'static str {
+ match self {
+ Self::Hash => "HashMap",
+ Self::BTree => "BTreeMap",
+ }
+ }
+ fn entry_path(self) -> &'static str {
+ match self {
+ Self::Hash => "std::collections::hash_map::Entry",
+ Self::BTree => "std::collections::btree_map::Entry",
+ }
+ }
+}
+
+struct ContainsExpr<'tcx> {
+ negated: bool,
+ map: &'tcx Expr<'tcx>,
+ key: &'tcx Expr<'tcx>,
+ call_ctxt: SyntaxContext,
+}
+fn try_parse_contains<'tcx>(cx: &LateContext<'_>, expr: &'tcx Expr<'_>) -> Option<(MapType, ContainsExpr<'tcx>)> {
+ let mut negated = false;
+ let expr = peel_hir_expr_while(expr, |e| match e.kind {
+ ExprKind::Unary(UnOp::Not, e) => {
+ negated = !negated;
+ Some(e)
+ },
+ _ => None,
+ });
+ match expr.kind {
+ ExprKind::MethodCall(
+ _,
+ [
+ map,
+ Expr {
+ kind: ExprKind::AddrOf(_, _, key),
+ span: key_span,
+ ..
+ },
+ ],
+ _,
+ ) if key_span.ctxt() == expr.span.ctxt() => {
+ let id = cx.typeck_results().type_dependent_def_id(expr.hir_id)?;
+ let expr = ContainsExpr {
+ negated,
+ map,
+ key,
+ call_ctxt: expr.span.ctxt(),
+ };
+ if match_def_path(cx, id, &paths::BTREEMAP_CONTAINS_KEY) {
+ Some((MapType::BTree, expr))
+ } else if match_def_path(cx, id, &paths::HASHMAP_CONTAINS_KEY) {
+ Some((MapType::Hash, expr))
+ } else {
+ None
+ }
+ },
+ _ => None,
+ }
+}
+
+struct InsertExpr<'tcx> {
+ map: &'tcx Expr<'tcx>,
+ key: &'tcx Expr<'tcx>,
+ value: &'tcx Expr<'tcx>,
+}
+fn try_parse_insert<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> Option<InsertExpr<'tcx>> {
+ if let ExprKind::MethodCall(_, [map, key, value], _) = expr.kind {
+ let id = cx.typeck_results().type_dependent_def_id(expr.hir_id)?;
+ if match_def_path(cx, id, &paths::BTREEMAP_INSERT) || match_def_path(cx, id, &paths::HASHMAP_INSERT) {
+ Some(InsertExpr { map, key, value })
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+}
+
+/// An edit that will need to be made to move the expression to use the entry api
+#[derive(Clone, Copy)]
+enum Edit<'tcx> {
+ /// A semicolon that needs to be removed. Used to create a closure for `insert_with`.
+ RemoveSemi(Span),
+ /// An insertion into the map.
+ Insertion(Insertion<'tcx>),
+}
+impl<'tcx> Edit<'tcx> {
+ fn as_insertion(self) -> Option<Insertion<'tcx>> {
+ if let Self::Insertion(i) = self { Some(i) } else { None }
+ }
+}
+#[derive(Clone, Copy)]
+struct Insertion<'tcx> {
+ call: &'tcx Expr<'tcx>,
+ value: &'tcx Expr<'tcx>,
+}
+
+/// This visitor needs to do a multiple things:
+/// * Find all usages of the map. An insertion can only be made before any other usages of the map.
+/// * Determine if there's an insertion using the same key. There's no need for the entry api
+/// otherwise.
+/// * Determine if the final statement executed is an insertion. This is needed to use
+/// `or_insert_with`.
+/// * Determine if there's any sub-expression that can't be placed in a closure.
+/// * Determine if there's only a single insert statement. `or_insert` can be used in this case.
+#[expect(clippy::struct_excessive_bools)]
+struct InsertSearcher<'cx, 'tcx> {
+ cx: &'cx LateContext<'tcx>,
+ /// The map expression used in the contains call.
+ map: &'tcx Expr<'tcx>,
+ /// The key expression used in the contains call.
+ key: &'tcx Expr<'tcx>,
+ /// The context of the top level block. All insert calls must be in the same context.
+ ctxt: SyntaxContext,
+ /// Whether this expression can be safely moved into a closure.
+ allow_insert_closure: bool,
+ /// Whether this expression can use the entry api.
+ can_use_entry: bool,
+ /// Whether this expression is the final expression in this code path. This may be a statement.
+ in_tail_pos: bool,
+ // Is this expression a single insert. A slightly better suggestion can be made in this case.
+ is_single_insert: bool,
+ /// If the visitor has seen the map being used.
+ is_map_used: bool,
+ /// The locations where changes need to be made for the suggestion.
+ edits: Vec<Edit<'tcx>>,
+ /// A stack of loops the visitor is currently in.
+ loops: Vec<HirId>,
+ /// Local variables created in the expression. These don't need to be captured.
+ locals: HirIdSet,
+}
+impl<'tcx> InsertSearcher<'_, 'tcx> {
+ /// Visit the expression as a branch in control flow. Multiple insert calls can be used, but
+ /// only if they are on separate code paths. This will return whether the map was used in the
+ /// given expression.
+ fn visit_cond_arm(&mut self, e: &'tcx Expr<'_>) -> bool {
+ let is_map_used = self.is_map_used;
+ let in_tail_pos = self.in_tail_pos;
+ self.visit_expr(e);
+ let res = self.is_map_used;
+ self.is_map_used = is_map_used;
+ self.in_tail_pos = in_tail_pos;
+ res
+ }
+
+ /// Visits an expression which is not itself in a tail position, but other sibling expressions
+ /// may be. e.g. if conditions
+ fn visit_non_tail_expr(&mut self, e: &'tcx Expr<'_>) {
+ let in_tail_pos = self.in_tail_pos;
+ self.in_tail_pos = false;
+ self.visit_expr(e);
+ self.in_tail_pos = in_tail_pos;
+ }
+}
+impl<'tcx> Visitor<'tcx> for InsertSearcher<'_, 'tcx> {
+ fn visit_stmt(&mut self, stmt: &'tcx Stmt<'_>) {
+ match stmt.kind {
+ StmtKind::Semi(e) => {
+ self.visit_expr(e);
+
+ if self.in_tail_pos && self.allow_insert_closure {
+ // The spans are used to slice the top level expression into multiple parts. This requires that
+ // they all come from the same part of the source code.
+ if stmt.span.ctxt() == self.ctxt && e.span.ctxt() == self.ctxt {
+ self.edits
+ .push(Edit::RemoveSemi(stmt.span.trim_start(e.span).unwrap_or(DUMMY_SP)));
+ } else {
+ self.allow_insert_closure = false;
+ }
+ }
+ },
+ StmtKind::Expr(e) => self.visit_expr(e),
+ StmtKind::Local(l) => {
+ self.visit_pat(l.pat);
+ if let Some(e) = l.init {
+ self.allow_insert_closure &= !self.in_tail_pos;
+ self.in_tail_pos = false;
+ self.is_single_insert = false;
+ self.visit_expr(e);
+ }
+ },
+ StmtKind::Item(_) => {
+ self.allow_insert_closure &= !self.in_tail_pos;
+ self.is_single_insert = false;
+ },
+ }
+ }
+
+ fn visit_block(&mut self, block: &'tcx Block<'_>) {
+ // If the block is in a tail position, then the last expression (possibly a statement) is in the
+ // tail position. The rest, however, are not.
+ match (block.stmts, block.expr) {
+ ([], None) => {
+ self.allow_insert_closure &= !self.in_tail_pos;
+ },
+ ([], Some(expr)) => self.visit_expr(expr),
+ (stmts, Some(expr)) => {
+ let in_tail_pos = self.in_tail_pos;
+ self.in_tail_pos = false;
+ for stmt in stmts {
+ self.visit_stmt(stmt);
+ }
+ self.in_tail_pos = in_tail_pos;
+ self.visit_expr(expr);
+ },
+ ([stmts @ .., stmt], None) => {
+ let in_tail_pos = self.in_tail_pos;
+ self.in_tail_pos = false;
+ for stmt in stmts {
+ self.visit_stmt(stmt);
+ }
+ self.in_tail_pos = in_tail_pos;
+ self.visit_stmt(stmt);
+ },
+ }
+ }
+
+ fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
+ if !self.can_use_entry {
+ return;
+ }
+
+ match try_parse_insert(self.cx, expr) {
+ Some(insert_expr) if SpanlessEq::new(self.cx).eq_expr(self.map, insert_expr.map) => {
+ // Multiple inserts, inserts with a different key, and inserts from a macro can't use the entry api.
+ if self.is_map_used
+ || !SpanlessEq::new(self.cx).eq_expr(self.key, insert_expr.key)
+ || expr.span.ctxt() != self.ctxt
+ {
+ self.can_use_entry = false;
+ return;
+ }
+
+ self.edits.push(Edit::Insertion(Insertion {
+ call: expr,
+ value: insert_expr.value,
+ }));
+ self.is_map_used = true;
+ self.allow_insert_closure &= self.in_tail_pos;
+
+ // The value doesn't affect whether there is only a single insert expression.
+ let is_single_insert = self.is_single_insert;
+ self.visit_non_tail_expr(insert_expr.value);
+ self.is_single_insert = is_single_insert;
+ },
+ _ if SpanlessEq::new(self.cx).eq_expr(self.map, expr) => {
+ self.is_map_used = true;
+ },
+ _ => match expr.kind {
+ ExprKind::If(cond_expr, then_expr, Some(else_expr)) => {
+ self.is_single_insert = false;
+ self.visit_non_tail_expr(cond_expr);
+ // Each branch may contain it's own insert expression.
+ let mut is_map_used = self.visit_cond_arm(then_expr);
+ is_map_used |= self.visit_cond_arm(else_expr);
+ self.is_map_used = is_map_used;
+ },
+ ExprKind::Match(scrutinee_expr, arms, _) => {
+ self.is_single_insert = false;
+ self.visit_non_tail_expr(scrutinee_expr);
+ // Each branch may contain it's own insert expression.
+ let mut is_map_used = self.is_map_used;
+ for arm in arms {
+ self.visit_pat(arm.pat);
+ if let Some(Guard::If(guard) | Guard::IfLet(&Let { init: guard, .. })) = arm.guard {
+ self.visit_non_tail_expr(guard);
+ }
+ is_map_used |= self.visit_cond_arm(arm.body);
+ }
+ self.is_map_used = is_map_used;
+ },
+ ExprKind::Loop(block, ..) => {
+ self.loops.push(expr.hir_id);
+ self.is_single_insert = false;
+ self.allow_insert_closure &= !self.in_tail_pos;
+ // Don't allow insertions inside of a loop.
+ let edit_len = self.edits.len();
+ self.visit_block(block);
+ if self.edits.len() != edit_len {
+ self.can_use_entry = false;
+ }
+ self.loops.pop();
+ },
+ ExprKind::Block(block, _) => self.visit_block(block),
+ ExprKind::InlineAsm(_) => {
+ self.can_use_entry = false;
+ },
+ _ => {
+ self.allow_insert_closure &= !self.in_tail_pos;
+ self.allow_insert_closure &=
+ can_move_expr_to_closure_no_visit(self.cx, expr, &self.loops, &self.locals);
+ // Sub expressions are no longer in the tail position.
+ self.is_single_insert = false;
+ self.in_tail_pos = false;
+ walk_expr(self, expr);
+ },
+ },
+ }
+ }
+
+ fn visit_pat(&mut self, p: &'tcx Pat<'tcx>) {
+ p.each_binding_or_first(&mut |_, id, _, _| {
+ self.locals.insert(id);
+ });
+ }
+}
+
+struct InsertSearchResults<'tcx> {
+ edits: Vec<Edit<'tcx>>,
+ allow_insert_closure: bool,
+ is_single_insert: bool,
+}
+impl<'tcx> InsertSearchResults<'tcx> {
+ fn as_single_insertion(&self) -> Option<Insertion<'tcx>> {
+ self.is_single_insert.then(|| self.edits[0].as_insertion().unwrap())
+ }
+
+ fn snippet(
+ &self,
+ cx: &LateContext<'_>,
+ mut span: Span,
+ app: &mut Applicability,
+ write_wrapped: impl Fn(&mut String, Insertion<'_>, SyntaxContext, &mut Applicability),
+ ) -> String {
+ let ctxt = span.ctxt();
+ let mut res = String::new();
+ for insertion in self.edits.iter().filter_map(|e| e.as_insertion()) {
+ res.push_str(&snippet_with_applicability(
+ cx,
+ span.until(insertion.call.span),
+ "..",
+ app,
+ ));
+ if is_expr_used_or_unified(cx.tcx, insertion.call) {
+ write_wrapped(&mut res, insertion, ctxt, app);
+ } else {
+ let _ = write!(
+ res,
+ "e.insert({})",
+ snippet_with_context(cx, insertion.value.span, ctxt, "..", app).0
+ );
+ }
+ span = span.trim_start(insertion.call.span).unwrap_or(DUMMY_SP);
+ }
+ res.push_str(&snippet_with_applicability(cx, span, "..", app));
+ res
+ }
+
+ fn snippet_occupied(&self, cx: &LateContext<'_>, span: Span, app: &mut Applicability) -> (String, &'static str) {
+ (
+ self.snippet(cx, span, app, |res, insertion, ctxt, app| {
+ // Insertion into a map would return `Some(&mut value)`, but the entry returns `&mut value`
+ let _ = write!(
+ res,
+ "Some(e.insert({}))",
+ snippet_with_context(cx, insertion.value.span, ctxt, "..", app).0
+ );
+ }),
+ "Occupied(mut e)",
+ )
+ }
+
+ fn snippet_vacant(&self, cx: &LateContext<'_>, span: Span, app: &mut Applicability) -> (String, &'static str) {
+ (
+ self.snippet(cx, span, app, |res, insertion, ctxt, app| {
+ // Insertion into a map would return `None`, but the entry returns a mutable reference.
+ let _ = if is_expr_final_block_expr(cx.tcx, insertion.call) {
+ write!(
+ res,
+ "e.insert({});\n{}None",
+ snippet_with_context(cx, insertion.value.span, ctxt, "..", app).0,
+ snippet_indent(cx, insertion.call.span).as_deref().unwrap_or(""),
+ )
+ } else {
+ write!(
+ res,
+ "{{ e.insert({}); None }}",
+ snippet_with_context(cx, insertion.value.span, ctxt, "..", app).0,
+ )
+ };
+ }),
+ "Vacant(e)",
+ )
+ }
+
+ fn snippet_closure(&self, cx: &LateContext<'_>, mut span: Span, app: &mut Applicability) -> String {
+ let ctxt = span.ctxt();
+ let mut res = String::new();
+ for edit in &self.edits {
+ match *edit {
+ Edit::Insertion(insertion) => {
+ // Cut out the value from `map.insert(key, value)`
+ res.push_str(&snippet_with_applicability(
+ cx,
+ span.until(insertion.call.span),
+ "..",
+ app,
+ ));
+ res.push_str(&snippet_with_context(cx, insertion.value.span, ctxt, "..", app).0);
+ span = span.trim_start(insertion.call.span).unwrap_or(DUMMY_SP);
+ },
+ Edit::RemoveSemi(semi_span) => {
+ // Cut out the semicolon. This allows the value to be returned from the closure.
+ res.push_str(&snippet_with_applicability(cx, span.until(semi_span), "..", app));
+ span = span.trim_start(semi_span).unwrap_or(DUMMY_SP);
+ },
+ }
+ }
+ res.push_str(&snippet_with_applicability(cx, span, "..", app));
+ res
+ }
+}
+
+fn find_insert_calls<'tcx>(
+ cx: &LateContext<'tcx>,
+ contains_expr: &ContainsExpr<'tcx>,
+ expr: &'tcx Expr<'_>,
+) -> Option<InsertSearchResults<'tcx>> {
+ let mut s = InsertSearcher {
+ cx,
+ map: contains_expr.map,
+ key: contains_expr.key,
+ ctxt: expr.span.ctxt(),
+ edits: Vec::new(),
+ is_map_used: false,
+ allow_insert_closure: true,
+ can_use_entry: true,
+ in_tail_pos: true,
+ is_single_insert: true,
+ loops: Vec::new(),
+ locals: HirIdSet::default(),
+ };
+ s.visit_expr(expr);
+ let allow_insert_closure = s.allow_insert_closure;
+ let is_single_insert = s.is_single_insert;
+ let edits = s.edits;
+ s.can_use_entry.then_some(InsertSearchResults {
+ edits,
+ allow_insert_closure,
+ is_single_insert,
+ })
+}
diff --git a/src/tools/clippy/clippy_lints/src/enum_clike.rs b/src/tools/clippy/clippy_lints/src/enum_clike.rs
new file mode 100644
index 000000000..da6788882
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/enum_clike.rs
@@ -0,0 +1,81 @@
+//! lint on C-like enums that are `repr(isize/usize)` and have values that
+//! don't fit into an `i32`
+
+use clippy_utils::consts::{miri_to_const, Constant};
+use clippy_utils::diagnostics::span_lint;
+use rustc_hir::{Item, ItemKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::util::IntTypeExt;
+use rustc_middle::ty::{self, IntTy, UintTy};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for C-like enumerations that are
+ /// `repr(isize/usize)` and have values that don't fit into an `i32`.
+ ///
+ /// ### Why is this bad?
+ /// This will truncate the variant value on 32 bit
+ /// architectures, but works fine on 64 bit.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # #[cfg(target_pointer_width = "64")]
+ /// #[repr(usize)]
+ /// enum NonPortable {
+ /// X = 0x1_0000_0000,
+ /// Y = 0,
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub ENUM_CLIKE_UNPORTABLE_VARIANT,
+ correctness,
+ "C-like enums that are `repr(isize/usize)` and have values that don't fit into an `i32`"
+}
+
+declare_lint_pass!(UnportableVariant => [ENUM_CLIKE_UNPORTABLE_VARIANT]);
+
+impl<'tcx> LateLintPass<'tcx> for UnportableVariant {
+ #[expect(clippy::cast_possible_wrap)]
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
+ if cx.tcx.data_layout.pointer_size.bits() != 64 {
+ return;
+ }
+ if let ItemKind::Enum(def, _) = &item.kind {
+ for var in def.variants {
+ if let Some(anon_const) = &var.disr_expr {
+ let def_id = cx.tcx.hir().body_owner_def_id(anon_const.body);
+ let mut ty = cx.tcx.type_of(def_id.to_def_id());
+ let constant = cx
+ .tcx
+ .const_eval_poly(def_id.to_def_id())
+ .ok()
+ .map(|val| rustc_middle::mir::ConstantKind::from_value(val, ty));
+ if let Some(Constant::Int(val)) = constant.and_then(|c| miri_to_const(cx.tcx, c)) {
+ if let ty::Adt(adt, _) = ty.kind() {
+ if adt.is_enum() {
+ ty = adt.repr().discr_type().to_ty(cx.tcx);
+ }
+ }
+ match ty.kind() {
+ ty::Int(IntTy::Isize) => {
+ let val = ((val as i128) << 64) >> 64;
+ if i32::try_from(val).is_ok() {
+ continue;
+ }
+ },
+ ty::Uint(UintTy::Usize) if val > u128::from(u32::MAX) => {},
+ _ => continue,
+ }
+ span_lint(
+ cx,
+ ENUM_CLIKE_UNPORTABLE_VARIANT,
+ var.span,
+ "C-like enum variant discriminant is not portable to 32-bit targets",
+ );
+ };
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/enum_variants.rs b/src/tools/clippy/clippy_lints/src/enum_variants.rs
new file mode 100644
index 000000000..cd36f9fcd
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/enum_variants.rs
@@ -0,0 +1,306 @@
+//! lint on enum variants that are prefixed or suffixed by the same characters
+
+use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
+use clippy_utils::source::is_present_in_source;
+use clippy_utils::str_utils::{camel_case_split, count_match_end, count_match_start};
+use rustc_hir::{EnumDef, Item, ItemKind, Variant};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::source_map::Span;
+use rustc_span::symbol::Symbol;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Detects enumeration variants that are prefixed or suffixed
+ /// by the same characters.
+ ///
+ /// ### Why is this bad?
+ /// Enumeration variant names should specify their variant,
+ /// not repeat the enumeration name.
+ ///
+ /// ### Limitations
+ /// Characters with no casing will be considered when comparing prefixes/suffixes
+ /// This applies to numbers and non-ascii characters without casing
+ /// e.g. `Foo1` and `Foo2` is considered to have different prefixes
+ /// (the prefixes are `Foo1` and `Foo2` respectively), as also `Bar螃`, `Bar蟹`
+ ///
+ /// ### Example
+ /// ```rust
+ /// enum Cake {
+ /// BlackForestCake,
+ /// HummingbirdCake,
+ /// BattenbergCake,
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// enum Cake {
+ /// BlackForest,
+ /// Hummingbird,
+ /// Battenberg,
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub ENUM_VARIANT_NAMES,
+ style,
+ "enums where all variants share a prefix/postfix"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Detects type names that are prefixed or suffixed by the
+ /// containing module's name.
+ ///
+ /// ### Why is this bad?
+ /// It requires the user to type the module name twice.
+ ///
+ /// ### Example
+ /// ```rust
+ /// mod cake {
+ /// struct BlackForestCake;
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// mod cake {
+ /// struct BlackForest;
+ /// }
+ /// ```
+ #[clippy::version = "1.33.0"]
+ pub MODULE_NAME_REPETITIONS,
+ pedantic,
+ "type names prefixed/postfixed with their containing module's name"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for modules that have the same name as their
+ /// parent module
+ ///
+ /// ### Why is this bad?
+ /// A typical beginner mistake is to have `mod foo;` and
+ /// again `mod foo { ..
+ /// }` in `foo.rs`.
+ /// The expectation is that items inside the inner `mod foo { .. }` are then
+ /// available
+ /// through `foo::x`, but they are only available through
+ /// `foo::foo::x`.
+ /// If this is done on purpose, it would be better to choose a more
+ /// representative module name.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// // lib.rs
+ /// mod foo;
+ /// // foo.rs
+ /// mod foo {
+ /// ...
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MODULE_INCEPTION,
+ style,
+ "modules that have the same name as their parent module"
+}
+
+pub struct EnumVariantNames {
+ modules: Vec<(Symbol, String)>,
+ threshold: u64,
+ avoid_breaking_exported_api: bool,
+}
+
+impl EnumVariantNames {
+ #[must_use]
+ pub fn new(threshold: u64, avoid_breaking_exported_api: bool) -> Self {
+ Self {
+ modules: Vec::new(),
+ threshold,
+ avoid_breaking_exported_api,
+ }
+ }
+}
+
+impl_lint_pass!(EnumVariantNames => [
+ ENUM_VARIANT_NAMES,
+ MODULE_NAME_REPETITIONS,
+ MODULE_INCEPTION
+]);
+
+fn check_enum_start(cx: &LateContext<'_>, item_name: &str, variant: &Variant<'_>) {
+ let name = variant.ident.name.as_str();
+ let item_name_chars = item_name.chars().count();
+
+ if count_match_start(item_name, name).char_count == item_name_chars
+ && name.chars().nth(item_name_chars).map_or(false, |c| !c.is_lowercase())
+ && name.chars().nth(item_name_chars + 1).map_or(false, |c| !c.is_numeric())
+ {
+ span_lint(
+ cx,
+ ENUM_VARIANT_NAMES,
+ variant.span,
+ "variant name starts with the enum's name",
+ );
+ }
+}
+
+fn check_enum_end(cx: &LateContext<'_>, item_name: &str, variant: &Variant<'_>) {
+ let name = variant.ident.name.as_str();
+ let item_name_chars = item_name.chars().count();
+
+ if count_match_end(item_name, name).char_count == item_name_chars {
+ span_lint(
+ cx,
+ ENUM_VARIANT_NAMES,
+ variant.span,
+ "variant name ends with the enum's name",
+ );
+ }
+}
+
+fn check_variant(cx: &LateContext<'_>, threshold: u64, def: &EnumDef<'_>, item_name: &str, span: Span) {
+ if (def.variants.len() as u64) < threshold {
+ return;
+ }
+
+ let first = &def.variants[0].ident.name.as_str();
+ let mut pre = camel_case_split(first);
+ let mut post = pre.clone();
+ post.reverse();
+ for var in def.variants {
+ check_enum_start(cx, item_name, var);
+ check_enum_end(cx, item_name, var);
+ let name = var.ident.name.as_str();
+
+ let variant_split = camel_case_split(name);
+ if variant_split.len() == 1 {
+ return;
+ }
+
+ pre = pre
+ .iter()
+ .zip(variant_split.iter())
+ .take_while(|(a, b)| a == b)
+ .map(|e| *e.0)
+ .collect();
+ post = post
+ .iter()
+ .zip(variant_split.iter().rev())
+ .take_while(|(a, b)| a == b)
+ .map(|e| *e.0)
+ .collect();
+ }
+ let (what, value) = match (have_no_extra_prefix(&pre), post.is_empty()) {
+ (true, true) => return,
+ (false, _) => ("pre", pre.join("")),
+ (true, false) => {
+ post.reverse();
+ ("post", post.join(""))
+ },
+ };
+ span_lint_and_help(
+ cx,
+ ENUM_VARIANT_NAMES,
+ span,
+ &format!("all variants have the same {}fix: `{}`", what, value),
+ None,
+ &format!(
+ "remove the {}fixes and use full paths to \
+ the variants instead of glob imports",
+ what
+ ),
+ );
+}
+
+#[must_use]
+fn have_no_extra_prefix(prefixes: &[&str]) -> bool {
+ prefixes.iter().all(|p| p == &"" || p == &"_")
+}
+
+#[must_use]
+fn to_camel_case(item_name: &str) -> String {
+ let mut s = String::new();
+ let mut up = true;
+ for c in item_name.chars() {
+ if c.is_uppercase() {
+ // we only turn snake case text into CamelCase
+ return item_name.to_string();
+ }
+ if c == '_' {
+ up = true;
+ continue;
+ }
+ if up {
+ up = false;
+ s.extend(c.to_uppercase());
+ } else {
+ s.push(c);
+ }
+ }
+ s
+}
+
+impl LateLintPass<'_> for EnumVariantNames {
+ fn check_item_post(&mut self, _cx: &LateContext<'_>, _item: &Item<'_>) {
+ let last = self.modules.pop();
+ assert!(last.is_some());
+ }
+
+ #[expect(clippy::similar_names)]
+ fn check_item(&mut self, cx: &LateContext<'_>, item: &Item<'_>) {
+ let item_name = item.ident.name.as_str();
+ let item_camel = to_camel_case(item_name);
+ if !item.span.from_expansion() && is_present_in_source(cx, item.span) {
+ if let Some(&(ref mod_name, ref mod_camel)) = self.modules.last() {
+ // constants don't have surrounding modules
+ if !mod_camel.is_empty() {
+ if mod_name == &item.ident.name {
+ if let ItemKind::Mod(..) = item.kind {
+ span_lint(
+ cx,
+ MODULE_INCEPTION,
+ item.span,
+ "module has the same name as its containing module",
+ );
+ }
+ }
+ // The `module_name_repetitions` lint should only trigger if the item has the module in its
+ // name. Having the same name is accepted.
+ if cx.tcx.visibility(item.def_id).is_public() && item_camel.len() > mod_camel.len() {
+ let matching = count_match_start(mod_camel, &item_camel);
+ let rmatching = count_match_end(mod_camel, &item_camel);
+ let nchars = mod_camel.chars().count();
+
+ let is_word_beginning = |c: char| c == '_' || c.is_uppercase() || c.is_numeric();
+
+ if matching.char_count == nchars {
+ match item_camel.chars().nth(nchars) {
+ Some(c) if is_word_beginning(c) => span_lint(
+ cx,
+ MODULE_NAME_REPETITIONS,
+ item.span,
+ "item name starts with its containing module's name",
+ ),
+ _ => (),
+ }
+ }
+ if rmatching.char_count == nchars {
+ span_lint(
+ cx,
+ MODULE_NAME_REPETITIONS,
+ item.span,
+ "item name ends with its containing module's name",
+ );
+ }
+ }
+ }
+ }
+ }
+ if let ItemKind::Enum(ref def, _) = item.kind {
+ if !(self.avoid_breaking_exported_api && cx.access_levels.is_exported(item.def_id)) {
+ check_variant(cx, self.threshold, def, item_name, item.span);
+ }
+ }
+ self.modules.push((item.ident.name, item_camel));
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/equatable_if_let.rs b/src/tools/clippy/clippy_lints/src/equatable_if_let.rs
new file mode 100644
index 000000000..fdfb821ac
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/equatable_if_let.rs
@@ -0,0 +1,103 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_context;
+use clippy_utils::ty::implements_trait;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind, Pat, PatKind};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty::Ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for pattern matchings that can be expressed using equality.
+ ///
+ /// ### Why is this bad?
+ ///
+ /// * It reads better and has less cognitive load because equality won't cause binding.
+ /// * It is a [Yoda condition](https://en.wikipedia.org/wiki/Yoda_conditions). Yoda conditions are widely
+ /// criticized for increasing the cognitive load of reading the code.
+ /// * Equality is a simple bool expression and can be merged with `&&` and `||` and
+ /// reuse if blocks
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// if let Some(2) = x {
+ /// do_thing();
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust,ignore
+ /// if x == Some(2) {
+ /// do_thing();
+ /// }
+ /// ```
+ #[clippy::version = "1.57.0"]
+ pub EQUATABLE_IF_LET,
+ nursery,
+ "using pattern matching instead of equality"
+}
+
+declare_lint_pass!(PatternEquality => [EQUATABLE_IF_LET]);
+
+/// detects if pattern matches just one thing
+fn unary_pattern(pat: &Pat<'_>) -> bool {
+ fn array_rec(pats: &[Pat<'_>]) -> bool {
+ pats.iter().all(unary_pattern)
+ }
+ match &pat.kind {
+ PatKind::Slice(_, _, _) | PatKind::Range(_, _, _) | PatKind::Binding(..) | PatKind::Wild | PatKind::Or(_) => {
+ false
+ },
+ PatKind::Struct(_, a, etc) => !etc && a.iter().all(|x| unary_pattern(x.pat)),
+ PatKind::Tuple(a, etc) | PatKind::TupleStruct(_, a, etc) => !etc.is_some() && array_rec(a),
+ PatKind::Ref(x, _) | PatKind::Box(x) => unary_pattern(x),
+ PatKind::Path(_) | PatKind::Lit(_) => true,
+ }
+}
+
+fn is_structural_partial_eq<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>, other: Ty<'tcx>) -> bool {
+ if let Some(def_id) = cx.tcx.lang_items().eq_trait() {
+ implements_trait(cx, ty, def_id, &[other.into()])
+ } else {
+ false
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for PatternEquality {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
+ if_chain! {
+ if !in_external_macro(cx.sess(), expr.span);
+ if let ExprKind::Let(let_expr) = expr.kind;
+ if unary_pattern(let_expr.pat);
+ let exp_ty = cx.typeck_results().expr_ty(let_expr.init);
+ let pat_ty = cx.typeck_results().pat_ty(let_expr.pat);
+ if is_structural_partial_eq(cx, exp_ty, pat_ty);
+ then {
+
+ let mut applicability = Applicability::MachineApplicable;
+ let pat_str = match let_expr.pat.kind {
+ PatKind::Struct(..) => format!(
+ "({})",
+ snippet_with_context(cx, let_expr.pat.span, expr.span.ctxt(), "..", &mut applicability).0,
+ ),
+ _ => snippet_with_context(cx, let_expr.pat.span, expr.span.ctxt(), "..", &mut applicability).0.to_string(),
+ };
+ span_lint_and_sugg(
+ cx,
+ EQUATABLE_IF_LET,
+ expr.span,
+ "this pattern matching can be expressed using equality",
+ "try",
+ format!(
+ "{} == {}",
+ snippet_with_context(cx, let_expr.init.span, expr.span.ctxt(), "..", &mut applicability).0,
+ pat_str,
+ ),
+ applicability,
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/escape.rs b/src/tools/clippy/clippy_lints/src/escape.rs
new file mode 100644
index 000000000..1ac7bfba0
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/escape.rs
@@ -0,0 +1,199 @@
+use clippy_utils::diagnostics::span_lint_hir;
+use clippy_utils::ty::contains_ty;
+use rustc_hir::intravisit;
+use rustc_hir::{self, AssocItemKind, Body, FnDecl, HirId, HirIdSet, Impl, ItemKind, Node, Pat, PatKind};
+use rustc_infer::infer::TyCtxtInferExt;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::mir::FakeReadCause;
+use rustc_middle::ty::layout::LayoutOf;
+use rustc_middle::ty::{self, TraitRef, Ty};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::source_map::Span;
+use rustc_span::symbol::kw;
+use rustc_target::spec::abi::Abi;
+use rustc_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId};
+
+#[derive(Copy, Clone)]
+pub struct BoxedLocal {
+ pub too_large_for_stack: u64,
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `Box<T>` where an unboxed `T` would
+ /// work fine.
+ ///
+ /// ### Why is this bad?
+ /// This is an unnecessary allocation, and bad for
+ /// performance. It is only necessary to allocate if you wish to move the box
+ /// into something.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # fn foo(bar: usize) {}
+ /// let x = Box::new(1);
+ /// foo(*x);
+ /// println!("{}", *x);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # fn foo(bar: usize) {}
+ /// let x = 1;
+ /// foo(x);
+ /// println!("{}", x);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub BOXED_LOCAL,
+ perf,
+ "using `Box<T>` where unnecessary"
+}
+
+fn is_non_trait_box(ty: Ty<'_>) -> bool {
+ ty.is_box() && !ty.boxed_ty().is_trait()
+}
+
+struct EscapeDelegate<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ set: HirIdSet,
+ trait_self_ty: Option<Ty<'tcx>>,
+ too_large_for_stack: u64,
+}
+
+impl_lint_pass!(BoxedLocal => [BOXED_LOCAL]);
+
+impl<'tcx> LateLintPass<'tcx> for BoxedLocal {
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ fn_kind: intravisit::FnKind<'tcx>,
+ _: &'tcx FnDecl<'_>,
+ body: &'tcx Body<'_>,
+ _: Span,
+ hir_id: HirId,
+ ) {
+ if let Some(header) = fn_kind.header() {
+ if header.abi != Abi::Rust {
+ return;
+ }
+ }
+
+ let parent_id = cx.tcx.hir().get_parent_item(hir_id);
+ let parent_node = cx.tcx.hir().find_by_def_id(parent_id);
+
+ let mut trait_self_ty = None;
+ if let Some(Node::Item(item)) = parent_node {
+ // If the method is an impl for a trait, don't warn.
+ if let ItemKind::Impl(Impl { of_trait: Some(_), .. }) = item.kind {
+ return;
+ }
+
+ // find `self` ty for this trait if relevant
+ if let ItemKind::Trait(_, _, _, _, items) = item.kind {
+ for trait_item in items {
+ if trait_item.id.hir_id() == hir_id {
+ // be sure we have `self` parameter in this function
+ if trait_item.kind == (AssocItemKind::Fn { has_self: true }) {
+ trait_self_ty = Some(
+ TraitRef::identity(cx.tcx, trait_item.id.def_id.to_def_id())
+ .self_ty()
+ .skip_binder(),
+ );
+ }
+ }
+ }
+ }
+ }
+
+ let mut v = EscapeDelegate {
+ cx,
+ set: HirIdSet::default(),
+ trait_self_ty,
+ too_large_for_stack: self.too_large_for_stack,
+ };
+
+ let fn_def_id = cx.tcx.hir().local_def_id(hir_id);
+ cx.tcx.infer_ctxt().enter(|infcx| {
+ ExprUseVisitor::new(&mut v, &infcx, fn_def_id, cx.param_env, cx.typeck_results()).consume_body(body);
+ });
+
+ for node in v.set {
+ span_lint_hir(
+ cx,
+ BOXED_LOCAL,
+ node,
+ cx.tcx.hir().span(node),
+ "local variable doesn't need to be boxed here",
+ );
+ }
+ }
+}
+
+// TODO: Replace with Map::is_argument(..) when it's fixed
+fn is_argument(map: rustc_middle::hir::map::Map<'_>, id: HirId) -> bool {
+ match map.find(id) {
+ Some(Node::Pat(Pat {
+ kind: PatKind::Binding(..),
+ ..
+ })) => (),
+ _ => return false,
+ }
+
+ matches!(map.find(map.get_parent_node(id)), Some(Node::Param(_)))
+}
+
+impl<'a, 'tcx> Delegate<'tcx> for EscapeDelegate<'a, 'tcx> {
+ fn consume(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId) {
+ if cmt.place.projections.is_empty() {
+ if let PlaceBase::Local(lid) = cmt.place.base {
+ self.set.remove(&lid);
+ }
+ }
+ }
+
+ fn borrow(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId, _: ty::BorrowKind) {
+ if cmt.place.projections.is_empty() {
+ if let PlaceBase::Local(lid) = cmt.place.base {
+ self.set.remove(&lid);
+ }
+ }
+ }
+
+ fn mutate(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId) {
+ if cmt.place.projections.is_empty() {
+ let map = &self.cx.tcx.hir();
+ if is_argument(*map, cmt.hir_id) {
+ // Skip closure arguments
+ let parent_id = map.get_parent_node(cmt.hir_id);
+ if let Some(Node::Expr(..)) = map.find(map.get_parent_node(parent_id)) {
+ return;
+ }
+
+ // skip if there is a `self` parameter binding to a type
+ // that contains `Self` (i.e.: `self: Box<Self>`), see #4804
+ if let Some(trait_self_ty) = self.trait_self_ty {
+ if map.name(cmt.hir_id) == kw::SelfLower && contains_ty(cmt.place.ty(), trait_self_ty) {
+ return;
+ }
+ }
+
+ if is_non_trait_box(cmt.place.ty()) && !self.is_large_box(cmt.place.ty()) {
+ self.set.insert(cmt.hir_id);
+ }
+ }
+ }
+ }
+
+ fn fake_read(&mut self, _: &rustc_typeck::expr_use_visitor::PlaceWithHirId<'tcx>, _: FakeReadCause, _: HirId) {}
+}
+
+impl<'a, 'tcx> EscapeDelegate<'a, 'tcx> {
+ fn is_large_box(&self, ty: Ty<'tcx>) -> bool {
+ // Large types need to be boxed to avoid stack overflows.
+ if ty.is_box() {
+ self.cx.layout_of(ty.boxed_ty()).map_or(0, |l| l.size.bytes()) > self.too_large_for_stack
+ } else {
+ false
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/eta_reduction.rs b/src/tools/clippy/clippy_lints/src/eta_reduction.rs
new file mode 100644
index 000000000..4f9ff97f1
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/eta_reduction.rs
@@ -0,0 +1,235 @@
+use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then};
+use clippy_utils::higher::VecArgs;
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::usage::local_used_after_expr;
+use clippy_utils::{higher, is_adjusted, path_to_local, path_to_local_id};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::def_id::DefId;
+use rustc_hir::{Closure, Expr, ExprKind, Param, PatKind, Unsafety};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow};
+use rustc_middle::ty::binding::BindingMode;
+use rustc_middle::ty::subst::Subst;
+use rustc_middle::ty::{self, ClosureKind, Ty, TypeVisitable};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for closures which just call another function where
+ /// the function can be called directly. `unsafe` functions or calls where types
+ /// get adjusted are ignored.
+ ///
+ /// ### Why is this bad?
+ /// Needlessly creating a closure adds code for no benefit
+ /// and gives the optimizer more work.
+ ///
+ /// ### Known problems
+ /// If creating the closure inside the closure has a side-
+ /// effect then moving the closure creation out will change when that side-
+ /// effect runs.
+ /// See [#1439](https://github.com/rust-lang/rust-clippy/issues/1439) for more details.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// xs.map(|x| foo(x))
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// // where `foo(_)` is a plain function that takes the exact argument type of `x`.
+ /// xs.map(foo)
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub REDUNDANT_CLOSURE,
+ style,
+ "redundant closures, i.e., `|a| foo(a)` (which can be written as just `foo`)"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for closures which only invoke a method on the closure
+ /// argument and can be replaced by referencing the method directly.
+ ///
+ /// ### Why is this bad?
+ /// It's unnecessary to create the closure.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// Some('a').map(|s| s.to_uppercase());
+ /// ```
+ /// may be rewritten as
+ /// ```rust,ignore
+ /// Some('a').map(char::to_uppercase);
+ /// ```
+ #[clippy::version = "1.35.0"]
+ pub REDUNDANT_CLOSURE_FOR_METHOD_CALLS,
+ pedantic,
+ "redundant closures for method calls"
+}
+
+declare_lint_pass!(EtaReduction => [REDUNDANT_CLOSURE, REDUNDANT_CLOSURE_FOR_METHOD_CALLS]);
+
+impl<'tcx> LateLintPass<'tcx> for EtaReduction {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if expr.span.from_expansion() {
+ return;
+ }
+ let body = match expr.kind {
+ ExprKind::Closure(&Closure { body, .. }) => cx.tcx.hir().body(body),
+ _ => return,
+ };
+ if body.value.span.from_expansion() {
+ if body.params.is_empty() {
+ if let Some(VecArgs::Vec(&[])) = higher::VecArgs::hir(cx, &body.value) {
+ // replace `|| vec![]` with `Vec::new`
+ span_lint_and_sugg(
+ cx,
+ REDUNDANT_CLOSURE,
+ expr.span,
+ "redundant closure",
+ "replace the closure with `Vec::new`",
+ "std::vec::Vec::new".into(),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ // skip `foo(|| macro!())`
+ return;
+ }
+
+ let closure_ty = cx.typeck_results().expr_ty(expr);
+
+ if_chain!(
+ if !is_adjusted(cx, &body.value);
+ if let ExprKind::Call(callee, args) = body.value.kind;
+ if let ExprKind::Path(_) = callee.kind;
+ if check_inputs(cx, body.params, args);
+ let callee_ty = cx.typeck_results().expr_ty_adjusted(callee);
+ let call_ty = cx.typeck_results().type_dependent_def_id(body.value.hir_id)
+ .map_or(callee_ty, |id| cx.tcx.type_of(id));
+ if check_sig(cx, closure_ty, call_ty);
+ let substs = cx.typeck_results().node_substs(callee.hir_id);
+ // This fixes some false positives that I don't entirely understand
+ if substs.is_empty() || !cx.typeck_results().expr_ty(expr).has_late_bound_regions();
+ // A type param function ref like `T::f` is not 'static, however
+ // it is if cast like `T::f as fn()`. This seems like a rustc bug.
+ if !substs.types().any(|t| matches!(t.kind(), ty::Param(_)));
+ let callee_ty_unadjusted = cx.typeck_results().expr_ty(callee).peel_refs();
+ if !is_type_diagnostic_item(cx, callee_ty_unadjusted, sym::Arc);
+ if !is_type_diagnostic_item(cx, callee_ty_unadjusted, sym::Rc);
+ then {
+ span_lint_and_then(cx, REDUNDANT_CLOSURE, expr.span, "redundant closure", |diag| {
+ if let Some(mut snippet) = snippet_opt(cx, callee.span) {
+ if_chain! {
+ if let ty::Closure(_, substs) = callee_ty.peel_refs().kind();
+ if substs.as_closure().kind() == ClosureKind::FnMut;
+ if path_to_local(callee).map_or(false, |l| local_used_after_expr(cx, l, expr));
+
+ then {
+ // Mutable closure is used after current expr; we cannot consume it.
+ snippet = format!("&mut {}", snippet);
+ }
+ }
+ diag.span_suggestion(
+ expr.span,
+ "replace the closure with the function itself",
+ snippet,
+ Applicability::MachineApplicable,
+ );
+ }
+ });
+ }
+ );
+
+ if_chain!(
+ if !is_adjusted(cx, &body.value);
+ if let ExprKind::MethodCall(path, args, _) = body.value.kind;
+ if check_inputs(cx, body.params, args);
+ let method_def_id = cx.typeck_results().type_dependent_def_id(body.value.hir_id).unwrap();
+ let substs = cx.typeck_results().node_substs(body.value.hir_id);
+ let call_ty = cx.tcx.bound_type_of(method_def_id).subst(cx.tcx, substs);
+ if check_sig(cx, closure_ty, call_ty);
+ then {
+ span_lint_and_then(cx, REDUNDANT_CLOSURE_FOR_METHOD_CALLS, expr.span, "redundant closure", |diag| {
+ let name = get_ufcs_type_name(cx, method_def_id);
+ diag.span_suggestion(
+ expr.span,
+ "replace the closure with the method itself",
+ format!("{}::{}", name, path.ident.name),
+ Applicability::MachineApplicable,
+ );
+ })
+ }
+ );
+ }
+}
+
+fn check_inputs(cx: &LateContext<'_>, params: &[Param<'_>], call_args: &[Expr<'_>]) -> bool {
+ if params.len() != call_args.len() {
+ return false;
+ }
+ let binding_modes = cx.typeck_results().pat_binding_modes();
+ std::iter::zip(params, call_args).all(|(param, arg)| {
+ match param.pat.kind {
+ PatKind::Binding(_, id, ..) if path_to_local_id(arg, id) => {},
+ _ => return false,
+ }
+ // checks that parameters are not bound as `ref` or `ref mut`
+ if let Some(BindingMode::BindByReference(_)) = binding_modes.get(param.pat.hir_id) {
+ return false;
+ }
+
+ match *cx.typeck_results().expr_adjustments(arg) {
+ [] => true,
+ [
+ Adjustment {
+ kind: Adjust::Deref(None),
+ ..
+ },
+ Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(_, mu2)),
+ ..
+ },
+ ] => {
+ // re-borrow with the same mutability is allowed
+ let ty = cx.typeck_results().expr_ty(arg);
+ matches!(*ty.kind(), ty::Ref(.., mu1) if mu1 == mu2.into())
+ },
+ _ => false,
+ }
+ })
+}
+
+fn check_sig<'tcx>(cx: &LateContext<'tcx>, closure_ty: Ty<'tcx>, call_ty: Ty<'tcx>) -> bool {
+ let call_sig = call_ty.fn_sig(cx.tcx);
+ if call_sig.unsafety() == Unsafety::Unsafe {
+ return false;
+ }
+ if !closure_ty.has_late_bound_regions() {
+ return true;
+ }
+ let substs = match closure_ty.kind() {
+ ty::Closure(_, substs) => substs,
+ _ => return false,
+ };
+ let closure_sig = cx.tcx.signature_unclosure(substs.as_closure().sig(), Unsafety::Normal);
+ cx.tcx.erase_late_bound_regions(closure_sig) == cx.tcx.erase_late_bound_regions(call_sig)
+}
+
+fn get_ufcs_type_name(cx: &LateContext<'_>, method_def_id: DefId) -> String {
+ let assoc_item = cx.tcx.associated_item(method_def_id);
+ let def_id = assoc_item.container_id(cx.tcx);
+ match assoc_item.container {
+ ty::TraitContainer => cx.tcx.def_path_str(def_id),
+ ty::ImplContainer => {
+ let ty = cx.tcx.type_of(def_id);
+ match ty.kind() {
+ ty::Adt(adt, _) => cx.tcx.def_path_str(adt.did()),
+ _ => ty.to_string(),
+ }
+ },
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/excessive_bools.rs b/src/tools/clippy/clippy_lints/src/excessive_bools.rs
new file mode 100644
index 000000000..453471c8c
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/excessive_bools.rs
@@ -0,0 +1,176 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_ast::ast::{AssocItemKind, Extern, Fn, FnSig, Impl, Item, ItemKind, Trait, Ty, TyKind};
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::{sym, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for excessive
+ /// use of bools in structs.
+ ///
+ /// ### Why is this bad?
+ /// Excessive bools in a struct
+ /// is often a sign that it's used as a state machine,
+ /// which is much better implemented as an enum.
+ /// If it's not the case, excessive bools usually benefit
+ /// from refactoring into two-variant enums for better
+ /// readability and API.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct S {
+ /// is_pending: bool,
+ /// is_processing: bool,
+ /// is_finished: bool,
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// enum S {
+ /// Pending,
+ /// Processing,
+ /// Finished,
+ /// }
+ /// ```
+ #[clippy::version = "1.43.0"]
+ pub STRUCT_EXCESSIVE_BOOLS,
+ pedantic,
+ "using too many bools in a struct"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for excessive use of
+ /// bools in function definitions.
+ ///
+ /// ### Why is this bad?
+ /// Calls to such functions
+ /// are confusing and error prone, because it's
+ /// hard to remember argument order and you have
+ /// no type system support to back you up. Using
+ /// two-variant enums instead of bools often makes
+ /// API easier to use.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// fn f(is_round: bool, is_hot: bool) { ... }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// enum Shape {
+ /// Round,
+ /// Spiky,
+ /// }
+ ///
+ /// enum Temperature {
+ /// Hot,
+ /// IceCold,
+ /// }
+ ///
+ /// fn f(shape: Shape, temperature: Temperature) { ... }
+ /// ```
+ #[clippy::version = "1.43.0"]
+ pub FN_PARAMS_EXCESSIVE_BOOLS,
+ pedantic,
+ "using too many bools in function parameters"
+}
+
+pub struct ExcessiveBools {
+ max_struct_bools: u64,
+ max_fn_params_bools: u64,
+}
+
+impl ExcessiveBools {
+ #[must_use]
+ pub fn new(max_struct_bools: u64, max_fn_params_bools: u64) -> Self {
+ Self {
+ max_struct_bools,
+ max_fn_params_bools,
+ }
+ }
+
+ fn check_fn_sig(&self, cx: &EarlyContext<'_>, fn_sig: &FnSig, span: Span) {
+ match fn_sig.header.ext {
+ Extern::Implicit(_) | Extern::Explicit(_, _) => return,
+ Extern::None => (),
+ }
+
+ let fn_sig_bools = fn_sig
+ .decl
+ .inputs
+ .iter()
+ .filter(|param| is_bool_ty(&param.ty))
+ .count()
+ .try_into()
+ .unwrap();
+ if self.max_fn_params_bools < fn_sig_bools {
+ span_lint_and_help(
+ cx,
+ FN_PARAMS_EXCESSIVE_BOOLS,
+ span,
+ &format!("more than {} bools in function parameters", self.max_fn_params_bools),
+ None,
+ "consider refactoring bools into two-variant enums",
+ );
+ }
+ }
+}
+
+impl_lint_pass!(ExcessiveBools => [STRUCT_EXCESSIVE_BOOLS, FN_PARAMS_EXCESSIVE_BOOLS]);
+
+fn is_bool_ty(ty: &Ty) -> bool {
+ if let TyKind::Path(None, path) = &ty.kind {
+ if let [name] = path.segments.as_slice() {
+ return name.ident.name == sym::bool;
+ }
+ }
+ false
+}
+
+impl EarlyLintPass for ExcessiveBools {
+ fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) {
+ if item.span.from_expansion() {
+ return;
+ }
+ match &item.kind {
+ ItemKind::Struct(variant_data, _) => {
+ if item.attrs.iter().any(|attr| attr.has_name(sym::repr)) {
+ return;
+ }
+
+ let struct_bools = variant_data
+ .fields()
+ .iter()
+ .filter(|field| is_bool_ty(&field.ty))
+ .count()
+ .try_into()
+ .unwrap();
+ if self.max_struct_bools < struct_bools {
+ span_lint_and_help(
+ cx,
+ STRUCT_EXCESSIVE_BOOLS,
+ item.span,
+ &format!("more than {} bools in a struct", self.max_struct_bools),
+ None,
+ "consider using a state machine or refactoring bools into two-variant enums",
+ );
+ }
+ },
+ ItemKind::Impl(box Impl {
+ of_trait: None, items, ..
+ })
+ | ItemKind::Trait(box Trait { items, .. }) => {
+ for item in items {
+ if let AssocItemKind::Fn(box Fn { sig, .. }) = &item.kind {
+ self.check_fn_sig(cx, sig, item.span);
+ }
+ }
+ },
+ ItemKind::Fn(box Fn { sig, .. }) => self.check_fn_sig(cx, sig, item.span),
+ _ => (),
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/exhaustive_items.rs b/src/tools/clippy/clippy_lints/src/exhaustive_items.rs
new file mode 100644
index 000000000..173d41b4b
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/exhaustive_items.rs
@@ -0,0 +1,111 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::indent_of;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Item, ItemKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Warns on any exported `enum`s that are not tagged `#[non_exhaustive]`
+ ///
+ /// ### Why is this bad?
+ /// Exhaustive enums are typically fine, but a project which does
+ /// not wish to make a stability commitment around exported enums may wish to
+ /// disable them by default.
+ ///
+ /// ### Example
+ /// ```rust
+ /// enum Foo {
+ /// Bar,
+ /// Baz
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// #[non_exhaustive]
+ /// enum Foo {
+ /// Bar,
+ /// Baz
+ /// }
+ /// ```
+ #[clippy::version = "1.51.0"]
+ pub EXHAUSTIVE_ENUMS,
+ restriction,
+ "detects exported enums that have not been marked #[non_exhaustive]"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Warns on any exported `structs`s that are not tagged `#[non_exhaustive]`
+ ///
+ /// ### Why is this bad?
+ /// Exhaustive structs are typically fine, but a project which does
+ /// not wish to make a stability commitment around exported structs may wish to
+ /// disable them by default.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct Foo {
+ /// bar: u8,
+ /// baz: String,
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// #[non_exhaustive]
+ /// struct Foo {
+ /// bar: u8,
+ /// baz: String,
+ /// }
+ /// ```
+ #[clippy::version = "1.51.0"]
+ pub EXHAUSTIVE_STRUCTS,
+ restriction,
+ "detects exported structs that have not been marked #[non_exhaustive]"
+}
+
+declare_lint_pass!(ExhaustiveItems => [EXHAUSTIVE_ENUMS, EXHAUSTIVE_STRUCTS]);
+
+impl LateLintPass<'_> for ExhaustiveItems {
+ fn check_item(&mut self, cx: &LateContext<'_>, item: &Item<'_>) {
+ if_chain! {
+ if let ItemKind::Enum(..) | ItemKind::Struct(..) = item.kind;
+ if cx.access_levels.is_exported(item.def_id);
+ let attrs = cx.tcx.hir().attrs(item.hir_id());
+ if !attrs.iter().any(|a| a.has_name(sym::non_exhaustive));
+ then {
+ let (lint, msg) = if let ItemKind::Struct(ref v, ..) = item.kind {
+ if v.fields().iter().any(|f| {
+ let def_id = cx.tcx.hir().local_def_id(f.hir_id);
+ !cx.tcx.visibility(def_id).is_public()
+ }) {
+ // skip structs with private fields
+ return;
+ }
+ (EXHAUSTIVE_STRUCTS, "exported structs should not be exhaustive")
+ } else {
+ (EXHAUSTIVE_ENUMS, "exported enums should not be exhaustive")
+ };
+ let suggestion_span = item.span.shrink_to_lo();
+ let indent = " ".repeat(indent_of(cx, item.span).unwrap_or(0));
+ span_lint_and_then(
+ cx,
+ lint,
+ item.span,
+ msg,
+ |diag| {
+ let sugg = format!("#[non_exhaustive]\n{}", indent);
+ diag.span_suggestion(suggestion_span,
+ "try adding #[non_exhaustive]",
+ sugg,
+ Applicability::MaybeIncorrect);
+ }
+ );
+
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/exit.rs b/src/tools/clippy/clippy_lints/src/exit.rs
new file mode 100644
index 000000000..cbf52d193
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/exit.rs
@@ -0,0 +1,46 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::{is_entrypoint_fn, match_def_path, paths};
+use if_chain::if_chain;
+use rustc_hir::{Expr, ExprKind, Item, ItemKind, Node};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// `exit()` terminates the program and doesn't provide a
+ /// stack trace.
+ ///
+ /// ### Why is this bad?
+ /// Ideally a program is terminated by finishing
+ /// the main function.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// std::process::exit(0)
+ /// ```
+ #[clippy::version = "1.41.0"]
+ pub EXIT,
+ restriction,
+ "`std::process::exit` is called, terminating the program"
+}
+
+declare_lint_pass!(Exit => [EXIT]);
+
+impl<'tcx> LateLintPass<'tcx> for Exit {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ if_chain! {
+ if let ExprKind::Call(path_expr, _args) = e.kind;
+ if let ExprKind::Path(ref path) = path_expr.kind;
+ if let Some(def_id) = cx.qpath_res(path, path_expr.hir_id).opt_def_id();
+ if match_def_path(cx, def_id, &paths::EXIT);
+ let parent = cx.tcx.hir().get_parent_item(e.hir_id);
+ if let Some(Node::Item(Item{kind: ItemKind::Fn(..), ..})) = cx.tcx.hir().find_by_def_id(parent);
+ // If the next item up is a function we check if it is an entry point
+ // and only then emit a linter warning
+ if !is_entrypoint_fn(cx, parent.to_def_id());
+ then {
+ span_lint(cx, EXIT, e.span, "usage of `process::exit`");
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/explicit_write.rs b/src/tools/clippy/clippy_lints/src/explicit_write.rs
new file mode 100644
index 000000000..5bf4313b4
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/explicit_write.rs
@@ -0,0 +1,142 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::macros::FormatArgsExpn;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::{is_expn_of, match_function_call, paths};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::def::Res;
+use rustc_hir::{BindingAnnotation, Block, BlockCheckMode, Expr, ExprKind, Node, PatKind, QPath, Stmt, StmtKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `write!()` / `writeln()!` which can be
+ /// replaced with `(e)print!()` / `(e)println!()`
+ ///
+ /// ### Why is this bad?
+ /// Using `(e)println! is clearer and more concise
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::io::Write;
+ /// # let bar = "furchtbar";
+ /// writeln!(&mut std::io::stderr(), "foo: {:?}", bar).unwrap();
+ /// writeln!(&mut std::io::stdout(), "foo: {:?}", bar).unwrap();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # use std::io::Write;
+ /// # let bar = "furchtbar";
+ /// eprintln!("foo: {:?}", bar);
+ /// println!("foo: {:?}", bar);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub EXPLICIT_WRITE,
+ complexity,
+ "using the `write!()` family of functions instead of the `print!()` family of functions, when using the latter would work"
+}
+
+declare_lint_pass!(ExplicitWrite => [EXPLICIT_WRITE]);
+
+impl<'tcx> LateLintPass<'tcx> for ExplicitWrite {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if_chain! {
+ // match call to unwrap
+ if let ExprKind::MethodCall(unwrap_fun, [write_call], _) = expr.kind;
+ if unwrap_fun.ident.name == sym::unwrap;
+ // match call to write_fmt
+ if let ExprKind::MethodCall(write_fun, [write_recv, write_arg], _) = look_in_block(cx, &write_call.kind);
+ if write_fun.ident.name == sym!(write_fmt);
+ // match calls to std::io::stdout() / std::io::stderr ()
+ if let Some(dest_name) = if match_function_call(cx, write_recv, &paths::STDOUT).is_some() {
+ Some("stdout")
+ } else if match_function_call(cx, write_recv, &paths::STDERR).is_some() {
+ Some("stderr")
+ } else {
+ None
+ };
+ if let Some(format_args) = FormatArgsExpn::parse(cx, write_arg);
+ then {
+ let calling_macro =
+ // ordering is important here, since `writeln!` uses `write!` internally
+ if is_expn_of(write_call.span, "writeln").is_some() {
+ Some("writeln")
+ } else if is_expn_of(write_call.span, "write").is_some() {
+ Some("write")
+ } else {
+ None
+ };
+ let prefix = if dest_name == "stderr" {
+ "e"
+ } else {
+ ""
+ };
+
+ // We need to remove the last trailing newline from the string because the
+ // underlying `fmt::write` function doesn't know whether `println!` or `print!` was
+ // used.
+ let (used, sugg_mac) = if let Some(macro_name) = calling_macro {
+ (
+ format!("{}!({}(), ...)", macro_name, dest_name),
+ macro_name.replace("write", "print"),
+ )
+ } else {
+ (
+ format!("{}().write_fmt(...)", dest_name),
+ "print".into(),
+ )
+ };
+ let mut applicability = Applicability::MachineApplicable;
+ let inputs_snippet = snippet_with_applicability(
+ cx,
+ format_args.inputs_span(),
+ "..",
+ &mut applicability,
+ );
+ span_lint_and_sugg(
+ cx,
+ EXPLICIT_WRITE,
+ expr.span,
+ &format!("use of `{}.unwrap()`", used),
+ "try this",
+ format!("{}{}!({})", prefix, sugg_mac, inputs_snippet),
+ applicability,
+ )
+ }
+ }
+ }
+}
+
+/// If `kind` is a block that looks like `{ let result = $expr; result }` then
+/// returns $expr. Otherwise returns `kind`.
+fn look_in_block<'tcx, 'hir>(cx: &LateContext<'tcx>, kind: &'tcx ExprKind<'hir>) -> &'tcx ExprKind<'hir> {
+ if_chain! {
+ if let ExprKind::Block(block, _label @ None) = kind;
+ if let Block {
+ stmts: [Stmt { kind: StmtKind::Local(local), .. }],
+ expr: Some(expr_end_of_block),
+ rules: BlockCheckMode::DefaultBlock,
+ ..
+ } = block;
+
+ // Find id of the local that expr_end_of_block resolves to
+ if let ExprKind::Path(QPath::Resolved(None, expr_path)) = expr_end_of_block.kind;
+ if let Res::Local(expr_res) = expr_path.res;
+ if let Some(Node::Pat(res_pat)) = cx.tcx.hir().find(expr_res);
+
+ // Find id of the local we found in the block
+ if let PatKind::Binding(BindingAnnotation::Unannotated, local_hir_id, _ident, None) = local.pat.kind;
+
+ // If those two are the same hir id
+ if res_pat.hir_id == local_hir_id;
+
+ if let Some(init) = local.init;
+ then {
+ return &init.kind;
+ }
+ }
+ kind
+}
diff --git a/src/tools/clippy/clippy_lints/src/fallible_impl_from.rs b/src/tools/clippy/clippy_lints/src/fallible_impl_from.rs
new file mode 100644
index 000000000..b88e53aec
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/fallible_impl_from.rs
@@ -0,0 +1,132 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::macros::{is_panic, root_macro_call_first_node};
+use clippy_utils::method_chain_args;
+use clippy_utils::ty::is_type_diagnostic_item;
+use if_chain::if_chain;
+use rustc_hir as hir;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{sym, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for impls of `From<..>` that contain `panic!()` or `unwrap()`
+ ///
+ /// ### Why is this bad?
+ /// `TryFrom` should be used if there's a possibility of failure.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct Foo(i32);
+ ///
+ /// impl From<String> for Foo {
+ /// fn from(s: String) -> Self {
+ /// Foo(s.parse().unwrap())
+ /// }
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// struct Foo(i32);
+ ///
+ /// impl TryFrom<String> for Foo {
+ /// type Error = ();
+ /// fn try_from(s: String) -> Result<Self, Self::Error> {
+ /// if let Ok(parsed) = s.parse() {
+ /// Ok(Foo(parsed))
+ /// } else {
+ /// Err(())
+ /// }
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub FALLIBLE_IMPL_FROM,
+ nursery,
+ "Warn on impls of `From<..>` that contain `panic!()` or `unwrap()`"
+}
+
+declare_lint_pass!(FallibleImplFrom => [FALLIBLE_IMPL_FROM]);
+
+impl<'tcx> LateLintPass<'tcx> for FallibleImplFrom {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) {
+ // check for `impl From<???> for ..`
+ if_chain! {
+ if let hir::ItemKind::Impl(impl_) = &item.kind;
+ if let Some(impl_trait_ref) = cx.tcx.impl_trait_ref(item.def_id);
+ if cx.tcx.is_diagnostic_item(sym::From, impl_trait_ref.def_id);
+ then {
+ lint_impl_body(cx, item.span, impl_.items);
+ }
+ }
+ }
+}
+
+fn lint_impl_body<'tcx>(cx: &LateContext<'tcx>, impl_span: Span, impl_items: &[hir::ImplItemRef]) {
+ use rustc_hir::intravisit::{self, Visitor};
+ use rustc_hir::{Expr, ImplItemKind};
+
+ struct FindPanicUnwrap<'a, 'tcx> {
+ lcx: &'a LateContext<'tcx>,
+ typeck_results: &'tcx ty::TypeckResults<'tcx>,
+ result: Vec<Span>,
+ }
+
+ impl<'a, 'tcx> Visitor<'tcx> for FindPanicUnwrap<'a, 'tcx> {
+ fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
+ if let Some(macro_call) = root_macro_call_first_node(self.lcx, expr) {
+ if is_panic(self.lcx, macro_call.def_id) {
+ self.result.push(expr.span);
+ }
+ }
+
+ // check for `unwrap`
+ if let Some(arglists) = method_chain_args(expr, &["unwrap"]) {
+ let receiver_ty = self.typeck_results.expr_ty(&arglists[0][0]).peel_refs();
+ if is_type_diagnostic_item(self.lcx, receiver_ty, sym::Option)
+ || is_type_diagnostic_item(self.lcx, receiver_ty, sym::Result)
+ {
+ self.result.push(expr.span);
+ }
+ }
+
+ // and check sub-expressions
+ intravisit::walk_expr(self, expr);
+ }
+ }
+
+ for impl_item in impl_items {
+ if_chain! {
+ if impl_item.ident.name == sym::from;
+ if let ImplItemKind::Fn(_, body_id) =
+ cx.tcx.hir().impl_item(impl_item.id).kind;
+ then {
+ // check the body for `begin_panic` or `unwrap`
+ let body = cx.tcx.hir().body(body_id);
+ let mut fpu = FindPanicUnwrap {
+ lcx: cx,
+ typeck_results: cx.tcx.typeck(impl_item.id.def_id),
+ result: Vec::new(),
+ };
+ fpu.visit_expr(&body.value);
+
+ // if we've found one, lint
+ if !fpu.result.is_empty() {
+ span_lint_and_then(
+ cx,
+ FALLIBLE_IMPL_FROM,
+ impl_span,
+ "consider implementing `TryFrom` instead",
+ move |diag| {
+ diag.help(
+ "`From` is intended for infallible conversions only. \
+ Use `TryFrom` if there's a possibility for the conversion to fail");
+ diag.span_note(fpu.result, "potential failure(s)");
+ });
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/float_literal.rs b/src/tools/clippy/clippy_lints/src/float_literal.rs
new file mode 100644
index 000000000..f2e079809
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/float_literal.rs
@@ -0,0 +1,181 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::numeric_literal;
+use if_chain::if_chain;
+use rustc_ast::ast::{self, LitFloatType, LitKind};
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::{self, FloatTy};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use std::fmt;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for float literals with a precision greater
+ /// than that supported by the underlying type.
+ ///
+ /// ### Why is this bad?
+ /// Rust will truncate the literal silently.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let v: f32 = 0.123_456_789_9;
+ /// println!("{}", v); // 0.123_456_789
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let v: f64 = 0.123_456_789_9;
+ /// println!("{}", v); // 0.123_456_789_9
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub EXCESSIVE_PRECISION,
+ style,
+ "excessive precision for float literal"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for whole number float literals that
+ /// cannot be represented as the underlying type without loss.
+ ///
+ /// ### Why is this bad?
+ /// Rust will silently lose precision during
+ /// conversion to a float.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let _: f32 = 16_777_217.0; // 16_777_216.0
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let _: f32 = 16_777_216.0;
+ /// let _: f64 = 16_777_217.0;
+ /// ```
+ #[clippy::version = "1.43.0"]
+ pub LOSSY_FLOAT_LITERAL,
+ restriction,
+ "lossy whole number float literals"
+}
+
+declare_lint_pass!(FloatLiteral => [EXCESSIVE_PRECISION, LOSSY_FLOAT_LITERAL]);
+
+impl<'tcx> LateLintPass<'tcx> for FloatLiteral {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
+ let ty = cx.typeck_results().expr_ty(expr);
+ if_chain! {
+ if let ty::Float(fty) = *ty.kind();
+ if let hir::ExprKind::Lit(ref lit) = expr.kind;
+ if let LitKind::Float(sym, lit_float_ty) = lit.node;
+ then {
+ let sym_str = sym.as_str();
+ let formatter = FloatFormat::new(sym_str);
+ // Try to bail out if the float is for sure fine.
+ // If its within the 2 decimal digits of being out of precision we
+ // check if the parsed representation is the same as the string
+ // since we'll need the truncated string anyway.
+ let digits = count_digits(sym_str);
+ let max = max_digits(fty);
+ let type_suffix = match lit_float_ty {
+ LitFloatType::Suffixed(ast::FloatTy::F32) => Some("f32"),
+ LitFloatType::Suffixed(ast::FloatTy::F64) => Some("f64"),
+ LitFloatType::Unsuffixed => None
+ };
+ let (is_whole, mut float_str) = match fty {
+ FloatTy::F32 => {
+ let value = sym_str.parse::<f32>().unwrap();
+
+ (value.fract() == 0.0, formatter.format(value))
+ },
+ FloatTy::F64 => {
+ let value = sym_str.parse::<f64>().unwrap();
+
+ (value.fract() == 0.0, formatter.format(value))
+ },
+ };
+
+ if is_whole && !sym_str.contains(|c| c == 'e' || c == 'E') {
+ // Normalize the literal by stripping the fractional portion
+ if sym_str.split('.').next().unwrap() != float_str {
+ // If the type suffix is missing the suggestion would be
+ // incorrectly interpreted as an integer so adding a `.0`
+ // suffix to prevent that.
+ if type_suffix.is_none() {
+ float_str.push_str(".0");
+ }
+
+ span_lint_and_sugg(
+ cx,
+ LOSSY_FLOAT_LITERAL,
+ expr.span,
+ "literal cannot be represented as the underlying type without loss of precision",
+ "consider changing the type or replacing it with",
+ numeric_literal::format(&float_str, type_suffix, true),
+ Applicability::MachineApplicable,
+ );
+ }
+ } else if digits > max as usize && float_str.len() < sym_str.len() {
+ span_lint_and_sugg(
+ cx,
+ EXCESSIVE_PRECISION,
+ expr.span,
+ "float has excessive precision",
+ "consider changing the type or truncating it to",
+ numeric_literal::format(&float_str, type_suffix, true),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+ }
+}
+
+#[must_use]
+fn max_digits(fty: FloatTy) -> u32 {
+ match fty {
+ FloatTy::F32 => f32::DIGITS,
+ FloatTy::F64 => f64::DIGITS,
+ }
+}
+
+/// Counts the digits excluding leading zeros
+#[must_use]
+fn count_digits(s: &str) -> usize {
+ // Note that s does not contain the f32/64 suffix, and underscores have been stripped
+ s.chars()
+ .filter(|c| *c != '-' && *c != '.')
+ .take_while(|c| *c != 'e' && *c != 'E')
+ .fold(0, |count, c| {
+ // leading zeros
+ if c == '0' && count == 0 { count } else { count + 1 }
+ })
+}
+
+enum FloatFormat {
+ LowerExp,
+ UpperExp,
+ Normal,
+}
+impl FloatFormat {
+ #[must_use]
+ fn new(s: &str) -> Self {
+ s.chars()
+ .find_map(|x| match x {
+ 'e' => Some(Self::LowerExp),
+ 'E' => Some(Self::UpperExp),
+ _ => None,
+ })
+ .unwrap_or(Self::Normal)
+ }
+ fn format<T>(&self, f: T) -> String
+ where
+ T: fmt::UpperExp + fmt::LowerExp + fmt::Display,
+ {
+ match self {
+ Self::LowerExp => format!("{:e}", f),
+ Self::UpperExp => format!("{:E}", f),
+ Self::Normal => format!("{}", f),
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs b/src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs
new file mode 100644
index 000000000..df9b41d2c
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs
@@ -0,0 +1,735 @@
+use clippy_utils::consts::{
+ constant, constant_simple, Constant,
+ Constant::{Int, F32, F64},
+};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::higher;
+use clippy_utils::{eq_expr_value, get_parent_expr, in_constant, numeric_literal, peel_blocks, sugg};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{BinOpKind, Expr, ExprKind, PathSegment, UnOp};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Spanned;
+
+use rustc_ast::ast;
+use std::f32::consts as f32_consts;
+use std::f64::consts as f64_consts;
+use sugg::Sugg;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Looks for floating-point expressions that
+ /// can be expressed using built-in methods to improve accuracy
+ /// at the cost of performance.
+ ///
+ /// ### Why is this bad?
+ /// Negatively impacts accuracy.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let a = 3f32;
+ /// let _ = a.powf(1.0 / 3.0);
+ /// let _ = (1.0 + a).ln();
+ /// let _ = a.exp() - 1.0;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let a = 3f32;
+ /// let _ = a.cbrt();
+ /// let _ = a.ln_1p();
+ /// let _ = a.exp_m1();
+ /// ```
+ #[clippy::version = "1.43.0"]
+ pub IMPRECISE_FLOPS,
+ nursery,
+ "usage of imprecise floating point operations"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Looks for floating-point expressions that
+ /// can be expressed using built-in methods to improve both
+ /// accuracy and performance.
+ ///
+ /// ### Why is this bad?
+ /// Negatively impacts accuracy and performance.
+ ///
+ /// ### Example
+ /// ```rust
+ /// use std::f32::consts::E;
+ ///
+ /// let a = 3f32;
+ /// let _ = (2f32).powf(a);
+ /// let _ = E.powf(a);
+ /// let _ = a.powf(1.0 / 2.0);
+ /// let _ = a.log(2.0);
+ /// let _ = a.log(10.0);
+ /// let _ = a.log(E);
+ /// let _ = a.powf(2.0);
+ /// let _ = a * 2.0 + 4.0;
+ /// let _ = if a < 0.0 {
+ /// -a
+ /// } else {
+ /// a
+ /// };
+ /// let _ = if a < 0.0 {
+ /// a
+ /// } else {
+ /// -a
+ /// };
+ /// ```
+ ///
+ /// is better expressed as
+ ///
+ /// ```rust
+ /// use std::f32::consts::E;
+ ///
+ /// let a = 3f32;
+ /// let _ = a.exp2();
+ /// let _ = a.exp();
+ /// let _ = a.sqrt();
+ /// let _ = a.log2();
+ /// let _ = a.log10();
+ /// let _ = a.ln();
+ /// let _ = a.powi(2);
+ /// let _ = a.mul_add(2.0, 4.0);
+ /// let _ = a.abs();
+ /// let _ = -a.abs();
+ /// ```
+ #[clippy::version = "1.43.0"]
+ pub SUBOPTIMAL_FLOPS,
+ nursery,
+ "usage of sub-optimal floating point operations"
+}
+
+declare_lint_pass!(FloatingPointArithmetic => [
+ IMPRECISE_FLOPS,
+ SUBOPTIMAL_FLOPS
+]);
+
+// Returns the specialized log method for a given base if base is constant
+// and is one of 2, 10 and e
+fn get_specialized_log_method(cx: &LateContext<'_>, base: &Expr<'_>) -> Option<&'static str> {
+ if let Some((value, _)) = constant(cx, cx.typeck_results(), base) {
+ if F32(2.0) == value || F64(2.0) == value {
+ return Some("log2");
+ } else if F32(10.0) == value || F64(10.0) == value {
+ return Some("log10");
+ } else if F32(f32_consts::E) == value || F64(f64_consts::E) == value {
+ return Some("ln");
+ }
+ }
+
+ None
+}
+
+// Adds type suffixes and parenthesis to method receivers if necessary
+fn prepare_receiver_sugg<'a>(cx: &LateContext<'_>, mut expr: &'a Expr<'a>) -> Sugg<'a> {
+ let mut suggestion = Sugg::hir(cx, expr, "..");
+
+ if let ExprKind::Unary(UnOp::Neg, inner_expr) = &expr.kind {
+ expr = inner_expr;
+ }
+
+ if_chain! {
+ // if the expression is a float literal and it is unsuffixed then
+ // add a suffix so the suggestion is valid and unambiguous
+ if let ty::Float(float_ty) = cx.typeck_results().expr_ty(expr).kind();
+ if let ExprKind::Lit(lit) = &expr.kind;
+ if let ast::LitKind::Float(sym, ast::LitFloatType::Unsuffixed) = lit.node;
+ then {
+ let op = format!(
+ "{}{}{}",
+ suggestion,
+ // Check for float literals without numbers following the decimal
+ // separator such as `2.` and adds a trailing zero
+ if sym.as_str().ends_with('.') {
+ "0"
+ } else {
+ ""
+ },
+ float_ty.name_str()
+ ).into();
+
+ suggestion = match suggestion {
+ Sugg::MaybeParen(_) => Sugg::MaybeParen(op),
+ _ => Sugg::NonParen(op)
+ };
+ }
+ }
+
+ suggestion.maybe_par()
+}
+
+fn check_log_base(cx: &LateContext<'_>, expr: &Expr<'_>, args: &[Expr<'_>]) {
+ if let Some(method) = get_specialized_log_method(cx, &args[1]) {
+ span_lint_and_sugg(
+ cx,
+ SUBOPTIMAL_FLOPS,
+ expr.span,
+ "logarithm for bases 2, 10 and e can be computed more accurately",
+ "consider using",
+ format!("{}.{}()", Sugg::hir(cx, &args[0], ".."), method),
+ Applicability::MachineApplicable,
+ );
+ }
+}
+
+// TODO: Lint expressions of the form `(x + y).ln()` where y > 1 and
+// suggest usage of `(x + (y - 1)).ln_1p()` instead
+fn check_ln1p(cx: &LateContext<'_>, expr: &Expr<'_>, args: &[Expr<'_>]) {
+ if let ExprKind::Binary(
+ Spanned {
+ node: BinOpKind::Add, ..
+ },
+ lhs,
+ rhs,
+ ) = &args[0].kind
+ {
+ let recv = match (
+ constant(cx, cx.typeck_results(), lhs),
+ constant(cx, cx.typeck_results(), rhs),
+ ) {
+ (Some((value, _)), _) if F32(1.0) == value || F64(1.0) == value => rhs,
+ (_, Some((value, _))) if F32(1.0) == value || F64(1.0) == value => lhs,
+ _ => return,
+ };
+
+ span_lint_and_sugg(
+ cx,
+ IMPRECISE_FLOPS,
+ expr.span,
+ "ln(1 + x) can be computed more accurately",
+ "consider using",
+ format!("{}.ln_1p()", prepare_receiver_sugg(cx, recv)),
+ Applicability::MachineApplicable,
+ );
+ }
+}
+
+// Returns an integer if the float constant is a whole number and it can be
+// converted to an integer without loss of precision. For now we only check
+// ranges [-16777215, 16777216) for type f32 as whole number floats outside
+// this range are lossy and ambiguous.
+#[expect(clippy::cast_possible_truncation)]
+fn get_integer_from_float_constant(value: &Constant) -> Option<i32> {
+ match value {
+ F32(num) if num.fract() == 0.0 => {
+ if (-16_777_215.0..16_777_216.0).contains(num) {
+ Some(num.round() as i32)
+ } else {
+ None
+ }
+ },
+ F64(num) if num.fract() == 0.0 => {
+ if (-2_147_483_648.0..2_147_483_648.0).contains(num) {
+ Some(num.round() as i32)
+ } else {
+ None
+ }
+ },
+ _ => None,
+ }
+}
+
+fn check_powf(cx: &LateContext<'_>, expr: &Expr<'_>, args: &[Expr<'_>]) {
+ // Check receiver
+ if let Some((value, _)) = constant(cx, cx.typeck_results(), &args[0]) {
+ let method = if F32(f32_consts::E) == value || F64(f64_consts::E) == value {
+ "exp"
+ } else if F32(2.0) == value || F64(2.0) == value {
+ "exp2"
+ } else {
+ return;
+ };
+
+ span_lint_and_sugg(
+ cx,
+ SUBOPTIMAL_FLOPS,
+ expr.span,
+ "exponent for bases 2 and e can be computed more accurately",
+ "consider using",
+ format!("{}.{}()", prepare_receiver_sugg(cx, &args[1]), method),
+ Applicability::MachineApplicable,
+ );
+ }
+
+ // Check argument
+ if let Some((value, _)) = constant(cx, cx.typeck_results(), &args[1]) {
+ let (lint, help, suggestion) = if F32(1.0 / 2.0) == value || F64(1.0 / 2.0) == value {
+ (
+ SUBOPTIMAL_FLOPS,
+ "square-root of a number can be computed more efficiently and accurately",
+ format!("{}.sqrt()", Sugg::hir(cx, &args[0], "..")),
+ )
+ } else if F32(1.0 / 3.0) == value || F64(1.0 / 3.0) == value {
+ (
+ IMPRECISE_FLOPS,
+ "cube-root of a number can be computed more accurately",
+ format!("{}.cbrt()", Sugg::hir(cx, &args[0], "..")),
+ )
+ } else if let Some(exponent) = get_integer_from_float_constant(&value) {
+ (
+ SUBOPTIMAL_FLOPS,
+ "exponentiation with integer powers can be computed more efficiently",
+ format!(
+ "{}.powi({})",
+ Sugg::hir(cx, &args[0], ".."),
+ numeric_literal::format(&exponent.to_string(), None, false)
+ ),
+ )
+ } else {
+ return;
+ };
+
+ span_lint_and_sugg(
+ cx,
+ lint,
+ expr.span,
+ help,
+ "consider using",
+ suggestion,
+ Applicability::MachineApplicable,
+ );
+ }
+}
+
+fn check_powi(cx: &LateContext<'_>, expr: &Expr<'_>, args: &[Expr<'_>]) {
+ if let Some((value, _)) = constant(cx, cx.typeck_results(), &args[1]) {
+ if value == Int(2) {
+ if let Some(parent) = get_parent_expr(cx, expr) {
+ if let Some(grandparent) = get_parent_expr(cx, parent) {
+ if let ExprKind::MethodCall(PathSegment { ident: method_name, .. }, args, _) = grandparent.kind {
+ if method_name.as_str() == "sqrt" && detect_hypot(cx, args).is_some() {
+ return;
+ }
+ }
+ }
+
+ if let ExprKind::Binary(
+ Spanned {
+ node: BinOpKind::Add, ..
+ },
+ lhs,
+ rhs,
+ ) = parent.kind
+ {
+ let other_addend = if lhs.hir_id == expr.hir_id { rhs } else { lhs };
+
+ span_lint_and_sugg(
+ cx,
+ SUBOPTIMAL_FLOPS,
+ parent.span,
+ "multiply and add expressions can be calculated more efficiently and accurately",
+ "consider using",
+ format!(
+ "{}.mul_add({}, {})",
+ Sugg::hir(cx, &args[0], ".."),
+ Sugg::hir(cx, &args[0], ".."),
+ Sugg::hir(cx, other_addend, ".."),
+ ),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+ }
+}
+
+fn detect_hypot(cx: &LateContext<'_>, args: &[Expr<'_>]) -> Option<String> {
+ if let ExprKind::Binary(
+ Spanned {
+ node: BinOpKind::Add, ..
+ },
+ add_lhs,
+ add_rhs,
+ ) = args[0].kind
+ {
+ // check if expression of the form x * x + y * y
+ if_chain! {
+ if let ExprKind::Binary(Spanned { node: BinOpKind::Mul, .. }, lmul_lhs, lmul_rhs) = add_lhs.kind;
+ if let ExprKind::Binary(Spanned { node: BinOpKind::Mul, .. }, rmul_lhs, rmul_rhs) = add_rhs.kind;
+ if eq_expr_value(cx, lmul_lhs, lmul_rhs);
+ if eq_expr_value(cx, rmul_lhs, rmul_rhs);
+ then {
+ return Some(format!("{}.hypot({})", Sugg::hir(cx, lmul_lhs, "..").maybe_par(), Sugg::hir(cx, rmul_lhs, "..")));
+ }
+ }
+
+ // check if expression of the form x.powi(2) + y.powi(2)
+ if_chain! {
+ if let ExprKind::MethodCall(
+ PathSegment { ident: lmethod_name, .. },
+ [largs_0, largs_1, ..],
+ _
+ ) = &add_lhs.kind;
+ if let ExprKind::MethodCall(
+ PathSegment { ident: rmethod_name, .. },
+ [rargs_0, rargs_1, ..],
+ _
+ ) = &add_rhs.kind;
+ if lmethod_name.as_str() == "powi" && rmethod_name.as_str() == "powi";
+ if let Some((lvalue, _)) = constant(cx, cx.typeck_results(), largs_1);
+ if let Some((rvalue, _)) = constant(cx, cx.typeck_results(), rargs_1);
+ if Int(2) == lvalue && Int(2) == rvalue;
+ then {
+ return Some(format!("{}.hypot({})", Sugg::hir(cx, largs_0, "..").maybe_par(), Sugg::hir(cx, rargs_0, "..")));
+ }
+ }
+ }
+
+ None
+}
+
+fn check_hypot(cx: &LateContext<'_>, expr: &Expr<'_>, args: &[Expr<'_>]) {
+ if let Some(message) = detect_hypot(cx, args) {
+ span_lint_and_sugg(
+ cx,
+ IMPRECISE_FLOPS,
+ expr.span,
+ "hypotenuse can be computed more accurately",
+ "consider using",
+ message,
+ Applicability::MachineApplicable,
+ );
+ }
+}
+
+// TODO: Lint expressions of the form `x.exp() - y` where y > 1
+// and suggest usage of `x.exp_m1() - (y - 1)` instead
+fn check_expm1(cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if_chain! {
+ if let ExprKind::Binary(Spanned { node: BinOpKind::Sub, .. }, lhs, rhs) = expr.kind;
+ if cx.typeck_results().expr_ty(lhs).is_floating_point();
+ if let Some((value, _)) = constant(cx, cx.typeck_results(), rhs);
+ if F32(1.0) == value || F64(1.0) == value;
+ if let ExprKind::MethodCall(path, [self_arg, ..], _) = &lhs.kind;
+ if cx.typeck_results().expr_ty(self_arg).is_floating_point();
+ if path.ident.name.as_str() == "exp";
+ then {
+ span_lint_and_sugg(
+ cx,
+ IMPRECISE_FLOPS,
+ expr.span,
+ "(e.pow(x) - 1) can be computed more accurately",
+ "consider using",
+ format!(
+ "{}.exp_m1()",
+ Sugg::hir(cx, self_arg, "..")
+ ),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+}
+
+fn is_float_mul_expr<'a>(cx: &LateContext<'_>, expr: &'a Expr<'a>) -> Option<(&'a Expr<'a>, &'a Expr<'a>)> {
+ if_chain! {
+ if let ExprKind::Binary(Spanned { node: BinOpKind::Mul, .. }, lhs, rhs) = &expr.kind;
+ if cx.typeck_results().expr_ty(lhs).is_floating_point();
+ if cx.typeck_results().expr_ty(rhs).is_floating_point();
+ then {
+ return Some((lhs, rhs));
+ }
+ }
+
+ None
+}
+
+// TODO: Fix rust-lang/rust-clippy#4735
+fn check_mul_add(cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if let ExprKind::Binary(
+ Spanned {
+ node: BinOpKind::Add, ..
+ },
+ lhs,
+ rhs,
+ ) = &expr.kind
+ {
+ if let Some(parent) = get_parent_expr(cx, expr) {
+ if let ExprKind::MethodCall(PathSegment { ident: method_name, .. }, args, _) = parent.kind {
+ if method_name.as_str() == "sqrt" && detect_hypot(cx, args).is_some() {
+ return;
+ }
+ }
+ }
+
+ let (recv, arg1, arg2) = if let Some((inner_lhs, inner_rhs)) = is_float_mul_expr(cx, lhs) {
+ (inner_lhs, inner_rhs, rhs)
+ } else if let Some((inner_lhs, inner_rhs)) = is_float_mul_expr(cx, rhs) {
+ (inner_lhs, inner_rhs, lhs)
+ } else {
+ return;
+ };
+
+ span_lint_and_sugg(
+ cx,
+ SUBOPTIMAL_FLOPS,
+ expr.span,
+ "multiply and add expressions can be calculated more efficiently and accurately",
+ "consider using",
+ format!(
+ "{}.mul_add({}, {})",
+ prepare_receiver_sugg(cx, recv),
+ Sugg::hir(cx, arg1, ".."),
+ Sugg::hir(cx, arg2, ".."),
+ ),
+ Applicability::MachineApplicable,
+ );
+ }
+}
+
+/// Returns true iff expr is an expression which tests whether or not
+/// test is positive or an expression which tests whether or not test
+/// is nonnegative.
+/// Used for check-custom-abs function below
+fn is_testing_positive(cx: &LateContext<'_>, expr: &Expr<'_>, test: &Expr<'_>) -> bool {
+ if let ExprKind::Binary(Spanned { node: op, .. }, left, right) = expr.kind {
+ match op {
+ BinOpKind::Gt | BinOpKind::Ge => is_zero(cx, right) && eq_expr_value(cx, left, test),
+ BinOpKind::Lt | BinOpKind::Le => is_zero(cx, left) && eq_expr_value(cx, right, test),
+ _ => false,
+ }
+ } else {
+ false
+ }
+}
+
+/// See [`is_testing_positive`]
+fn is_testing_negative(cx: &LateContext<'_>, expr: &Expr<'_>, test: &Expr<'_>) -> bool {
+ if let ExprKind::Binary(Spanned { node: op, .. }, left, right) = expr.kind {
+ match op {
+ BinOpKind::Gt | BinOpKind::Ge => is_zero(cx, left) && eq_expr_value(cx, right, test),
+ BinOpKind::Lt | BinOpKind::Le => is_zero(cx, right) && eq_expr_value(cx, left, test),
+ _ => false,
+ }
+ } else {
+ false
+ }
+}
+
+/// Returns true iff expr is some zero literal
+fn is_zero(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ match constant_simple(cx, cx.typeck_results(), expr) {
+ Some(Constant::Int(i)) => i == 0,
+ Some(Constant::F32(f)) => f == 0.0,
+ Some(Constant::F64(f)) => f == 0.0,
+ _ => false,
+ }
+}
+
+/// If the two expressions are negations of each other, then it returns
+/// a tuple, in which the first element is true iff expr1 is the
+/// positive expressions, and the second element is the positive
+/// one of the two expressions
+/// If the two expressions are not negations of each other, then it
+/// returns None.
+fn are_negated<'a>(cx: &LateContext<'_>, expr1: &'a Expr<'a>, expr2: &'a Expr<'a>) -> Option<(bool, &'a Expr<'a>)> {
+ if let ExprKind::Unary(UnOp::Neg, expr1_negated) = &expr1.kind {
+ if eq_expr_value(cx, expr1_negated, expr2) {
+ return Some((false, expr2));
+ }
+ }
+ if let ExprKind::Unary(UnOp::Neg, expr2_negated) = &expr2.kind {
+ if eq_expr_value(cx, expr1, expr2_negated) {
+ return Some((true, expr1));
+ }
+ }
+ None
+}
+
+fn check_custom_abs(cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if_chain! {
+ if let Some(higher::If { cond, then, r#else: Some(r#else) }) = higher::If::hir(expr);
+ let if_body_expr = peel_blocks(then);
+ let else_body_expr = peel_blocks(r#else);
+ if let Some((if_expr_positive, body)) = are_negated(cx, if_body_expr, else_body_expr);
+ then {
+ let positive_abs_sugg = (
+ "manual implementation of `abs` method",
+ format!("{}.abs()", Sugg::hir(cx, body, "..")),
+ );
+ let negative_abs_sugg = (
+ "manual implementation of negation of `abs` method",
+ format!("-{}.abs()", Sugg::hir(cx, body, "..")),
+ );
+ let sugg = if is_testing_positive(cx, cond, body) {
+ if if_expr_positive {
+ positive_abs_sugg
+ } else {
+ negative_abs_sugg
+ }
+ } else if is_testing_negative(cx, cond, body) {
+ if if_expr_positive {
+ negative_abs_sugg
+ } else {
+ positive_abs_sugg
+ }
+ } else {
+ return;
+ };
+ span_lint_and_sugg(
+ cx,
+ SUBOPTIMAL_FLOPS,
+ expr.span,
+ sugg.0,
+ "try",
+ sugg.1,
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+}
+
+fn are_same_base_logs(cx: &LateContext<'_>, expr_a: &Expr<'_>, expr_b: &Expr<'_>) -> bool {
+ if_chain! {
+ if let ExprKind::MethodCall(PathSegment { ident: method_name_a, .. }, args_a, _) = expr_a.kind;
+ if let ExprKind::MethodCall(PathSegment { ident: method_name_b, .. }, args_b, _) = expr_b.kind;
+ then {
+ return method_name_a.as_str() == method_name_b.as_str() &&
+ args_a.len() == args_b.len() &&
+ (
+ ["ln", "log2", "log10"].contains(&method_name_a.as_str()) ||
+ method_name_a.as_str() == "log" && args_a.len() == 2 && eq_expr_value(cx, &args_a[1], &args_b[1])
+ );
+ }
+ }
+
+ false
+}
+
+fn check_log_division(cx: &LateContext<'_>, expr: &Expr<'_>) {
+ // check if expression of the form x.logN() / y.logN()
+ if_chain! {
+ if let ExprKind::Binary(
+ Spanned {
+ node: BinOpKind::Div, ..
+ },
+ lhs,
+ rhs,
+ ) = &expr.kind;
+ if are_same_base_logs(cx, lhs, rhs);
+ if let ExprKind::MethodCall(_, [largs_self, ..], _) = &lhs.kind;
+ if let ExprKind::MethodCall(_, [rargs_self, ..], _) = &rhs.kind;
+ then {
+ span_lint_and_sugg(
+ cx,
+ SUBOPTIMAL_FLOPS,
+ expr.span,
+ "log base can be expressed more clearly",
+ "consider using",
+ format!("{}.log({})", Sugg::hir(cx, largs_self, ".."), Sugg::hir(cx, rargs_self, ".."),),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+}
+
+fn check_radians(cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if_chain! {
+ if let ExprKind::Binary(
+ Spanned {
+ node: BinOpKind::Div, ..
+ },
+ div_lhs,
+ div_rhs,
+ ) = &expr.kind;
+ if let ExprKind::Binary(
+ Spanned {
+ node: BinOpKind::Mul, ..
+ },
+ mul_lhs,
+ mul_rhs,
+ ) = &div_lhs.kind;
+ if let Some((rvalue, _)) = constant(cx, cx.typeck_results(), div_rhs);
+ if let Some((lvalue, _)) = constant(cx, cx.typeck_results(), mul_rhs);
+ then {
+ // TODO: also check for constant values near PI/180 or 180/PI
+ if (F32(f32_consts::PI) == rvalue || F64(f64_consts::PI) == rvalue) &&
+ (F32(180_f32) == lvalue || F64(180_f64) == lvalue)
+ {
+ let mut proposal = format!("{}.to_degrees()", Sugg::hir(cx, mul_lhs, ".."));
+ if_chain! {
+ if let ExprKind::Lit(ref literal) = mul_lhs.kind;
+ if let ast::LitKind::Float(ref value, float_type) = literal.node;
+ if float_type == ast::LitFloatType::Unsuffixed;
+ then {
+ if value.as_str().ends_with('.') {
+ proposal = format!("{}0_f64.to_degrees()", Sugg::hir(cx, mul_lhs, ".."));
+ } else {
+ proposal = format!("{}_f64.to_degrees()", Sugg::hir(cx, mul_lhs, ".."));
+ }
+ }
+ }
+ span_lint_and_sugg(
+ cx,
+ SUBOPTIMAL_FLOPS,
+ expr.span,
+ "conversion to degrees can be done more accurately",
+ "consider using",
+ proposal,
+ Applicability::MachineApplicable,
+ );
+ } else if
+ (F32(180_f32) == rvalue || F64(180_f64) == rvalue) &&
+ (F32(f32_consts::PI) == lvalue || F64(f64_consts::PI) == lvalue)
+ {
+ let mut proposal = format!("{}.to_radians()", Sugg::hir(cx, mul_lhs, ".."));
+ if_chain! {
+ if let ExprKind::Lit(ref literal) = mul_lhs.kind;
+ if let ast::LitKind::Float(ref value, float_type) = literal.node;
+ if float_type == ast::LitFloatType::Unsuffixed;
+ then {
+ if value.as_str().ends_with('.') {
+ proposal = format!("{}0_f64.to_radians()", Sugg::hir(cx, mul_lhs, ".."));
+ } else {
+ proposal = format!("{}_f64.to_radians()", Sugg::hir(cx, mul_lhs, ".."));
+ }
+ }
+ }
+ span_lint_and_sugg(
+ cx,
+ SUBOPTIMAL_FLOPS,
+ expr.span,
+ "conversion to radians can be done more accurately",
+ "consider using",
+ proposal,
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for FloatingPointArithmetic {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ // All of these operations are currently not const.
+ if in_constant(cx, expr.hir_id) {
+ return;
+ }
+
+ if let ExprKind::MethodCall(path, args, _) = &expr.kind {
+ let recv_ty = cx.typeck_results().expr_ty(&args[0]);
+
+ if recv_ty.is_floating_point() {
+ match path.ident.name.as_str() {
+ "ln" => check_ln1p(cx, expr, args),
+ "log" => check_log_base(cx, expr, args),
+ "powf" => check_powf(cx, expr, args),
+ "powi" => check_powi(cx, expr, args),
+ "sqrt" => check_hypot(cx, expr, args),
+ _ => {},
+ }
+ }
+ } else {
+ check_expm1(cx, expr);
+ check_mul_add(cx, expr);
+ check_custom_abs(cx, expr);
+ check_log_division(cx, expr);
+ check_radians(cx, expr);
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/format.rs b/src/tools/clippy/clippy_lints/src/format.rs
new file mode 100644
index 000000000..925a8cb8d
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/format.rs
@@ -0,0 +1,123 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::macros::{root_macro_call_first_node, FormatArgsExpn};
+use clippy_utils::source::{snippet_opt, snippet_with_applicability};
+use clippy_utils::sugg::Sugg;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::kw;
+use rustc_span::{sym, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for the use of `format!("string literal with no
+ /// argument")` and `format!("{}", foo)` where `foo` is a string.
+ ///
+ /// ### Why is this bad?
+ /// There is no point of doing that. `format!("foo")` can
+ /// be replaced by `"foo".to_owned()` if you really need a `String`. The even
+ /// worse `&format!("foo")` is often encountered in the wild. `format!("{}",
+ /// foo)` can be replaced by `foo.clone()` if `foo: String` or `foo.to_owned()`
+ /// if `foo: &str`.
+ ///
+ /// ### Examples
+ /// ```rust
+ /// let foo = "foo";
+ /// format!("{}", foo);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let foo = "foo";
+ /// foo.to_owned();
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub USELESS_FORMAT,
+ complexity,
+ "useless use of `format!`"
+}
+
+declare_lint_pass!(UselessFormat => [USELESS_FORMAT]);
+
+impl<'tcx> LateLintPass<'tcx> for UselessFormat {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ let (format_args, call_site) = if_chain! {
+ if let Some(macro_call) = root_macro_call_first_node(cx, expr);
+ if cx.tcx.is_diagnostic_item(sym::format_macro, macro_call.def_id);
+ if let Some(format_args) = FormatArgsExpn::find_nested(cx, expr, macro_call.expn);
+ then {
+ (format_args, macro_call.span)
+ } else {
+ return
+ }
+ };
+
+ let mut applicability = Applicability::MachineApplicable;
+ if format_args.value_args.is_empty() {
+ match *format_args.format_string_parts {
+ [] => span_useless_format_empty(cx, call_site, "String::new()".to_owned(), applicability),
+ [_] => {
+ if let Some(s_src) = snippet_opt(cx, format_args.format_string_span) {
+ // Simulate macro expansion, converting {{ and }} to { and }.
+ let s_expand = s_src.replace("{{", "{").replace("}}", "}");
+ let sugg = format!("{}.to_string()", s_expand);
+ span_useless_format(cx, call_site, sugg, applicability);
+ }
+ },
+ [..] => {},
+ }
+ } else if let [value] = *format_args.value_args {
+ if_chain! {
+ if format_args.format_string_parts == [kw::Empty];
+ if match cx.typeck_results().expr_ty(value).peel_refs().kind() {
+ ty::Adt(adt, _) => cx.tcx.is_diagnostic_item(sym::String, adt.did()),
+ ty::Str => true,
+ _ => false,
+ };
+ if let Some(args) = format_args.args();
+ if args.iter().all(|arg| arg.format_trait == sym::Display && !arg.has_string_formatting());
+ then {
+ let is_new_string = match value.kind {
+ ExprKind::Binary(..) => true,
+ ExprKind::MethodCall(path, ..) => path.ident.name == sym::to_string,
+ _ => false,
+ };
+ let sugg = if is_new_string {
+ snippet_with_applicability(cx, value.span, "..", &mut applicability).into_owned()
+ } else {
+ let sugg = Sugg::hir_with_applicability(cx, value, "<arg>", &mut applicability);
+ format!("{}.to_string()", sugg.maybe_par())
+ };
+ span_useless_format(cx, call_site, sugg, applicability);
+ }
+ }
+ };
+ }
+}
+
+fn span_useless_format_empty(cx: &LateContext<'_>, span: Span, sugg: String, applicability: Applicability) {
+ span_lint_and_sugg(
+ cx,
+ USELESS_FORMAT,
+ span,
+ "useless use of `format!`",
+ "consider using `String::new()`",
+ sugg,
+ applicability,
+ );
+}
+
+fn span_useless_format(cx: &LateContext<'_>, span: Span, sugg: String, applicability: Applicability) {
+ span_lint_and_sugg(
+ cx,
+ USELESS_FORMAT,
+ span,
+ "useless use of `format!`",
+ "consider using `.to_string()`",
+ sugg,
+ applicability,
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/format_args.rs b/src/tools/clippy/clippy_lints/src/format_args.rs
new file mode 100644
index 000000000..1e6feaac2
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/format_args.rs
@@ -0,0 +1,199 @@
+use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then};
+use clippy_utils::is_diag_trait_item;
+use clippy_utils::macros::{is_format_macro, FormatArgsArg, FormatArgsExpn};
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::implements_trait;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::adjustment::{Adjust, Adjustment};
+use rustc_middle::ty::Ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{sym, ExpnData, ExpnKind, Span, Symbol};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Detects `format!` within the arguments of another macro that does
+ /// formatting such as `format!` itself, `write!` or `println!`. Suggests
+ /// inlining the `format!` call.
+ ///
+ /// ### Why is this bad?
+ /// The recommended code is both shorter and avoids a temporary allocation.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::panic::Location;
+ /// println!("error: {}", format!("something failed at {}", Location::caller()));
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # use std::panic::Location;
+ /// println!("error: something failed at {}", Location::caller());
+ /// ```
+ #[clippy::version = "1.58.0"]
+ pub FORMAT_IN_FORMAT_ARGS,
+ perf,
+ "`format!` used in a macro that does formatting"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for [`ToString::to_string`](https://doc.rust-lang.org/std/string/trait.ToString.html#tymethod.to_string)
+ /// applied to a type that implements [`Display`](https://doc.rust-lang.org/std/fmt/trait.Display.html)
+ /// in a macro that does formatting.
+ ///
+ /// ### Why is this bad?
+ /// Since the type implements `Display`, the use of `to_string` is
+ /// unnecessary.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::panic::Location;
+ /// println!("error: something failed at {}", Location::caller().to_string());
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # use std::panic::Location;
+ /// println!("error: something failed at {}", Location::caller());
+ /// ```
+ #[clippy::version = "1.58.0"]
+ pub TO_STRING_IN_FORMAT_ARGS,
+ perf,
+ "`to_string` applied to a type that implements `Display` in format args"
+}
+
+declare_lint_pass!(FormatArgs => [FORMAT_IN_FORMAT_ARGS, TO_STRING_IN_FORMAT_ARGS]);
+
+impl<'tcx> LateLintPass<'tcx> for FormatArgs {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
+ if_chain! {
+ if let Some(format_args) = FormatArgsExpn::parse(cx, expr);
+ let expr_expn_data = expr.span.ctxt().outer_expn_data();
+ let outermost_expn_data = outermost_expn_data(expr_expn_data);
+ if let Some(macro_def_id) = outermost_expn_data.macro_def_id;
+ if is_format_macro(cx, macro_def_id);
+ if let ExpnKind::Macro(_, name) = outermost_expn_data.kind;
+ if let Some(args) = format_args.args();
+ then {
+ for (i, arg) in args.iter().enumerate() {
+ if arg.format_trait != sym::Display {
+ continue;
+ }
+ if arg.has_string_formatting() {
+ continue;
+ }
+ if is_aliased(&args, i) {
+ continue;
+ }
+ check_format_in_format_args(cx, outermost_expn_data.call_site, name, arg.value);
+ check_to_string_in_format_args(cx, name, arg.value);
+ }
+ }
+ }
+ }
+}
+
+fn outermost_expn_data(expn_data: ExpnData) -> ExpnData {
+ if expn_data.call_site.from_expansion() {
+ outermost_expn_data(expn_data.call_site.ctxt().outer_expn_data())
+ } else {
+ expn_data
+ }
+}
+
+fn check_format_in_format_args(cx: &LateContext<'_>, call_site: Span, name: Symbol, arg: &Expr<'_>) {
+ let expn_data = arg.span.ctxt().outer_expn_data();
+ if expn_data.call_site.from_expansion() {
+ return;
+ }
+ let Some(mac_id) = expn_data.macro_def_id else { return };
+ if !cx.tcx.is_diagnostic_item(sym::format_macro, mac_id) {
+ return;
+ }
+ span_lint_and_then(
+ cx,
+ FORMAT_IN_FORMAT_ARGS,
+ call_site,
+ &format!("`format!` in `{}!` args", name),
+ |diag| {
+ diag.help(&format!(
+ "combine the `format!(..)` arguments with the outer `{}!(..)` call",
+ name
+ ));
+ diag.help("or consider changing `format!` to `format_args!`");
+ },
+ );
+}
+
+fn check_to_string_in_format_args(cx: &LateContext<'_>, name: Symbol, value: &Expr<'_>) {
+ if_chain! {
+ if !value.span.from_expansion();
+ if let ExprKind::MethodCall(_, [receiver], _) = value.kind;
+ if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(value.hir_id);
+ if is_diag_trait_item(cx, method_def_id, sym::ToString);
+ let receiver_ty = cx.typeck_results().expr_ty(receiver);
+ if let Some(display_trait_id) = cx.tcx.get_diagnostic_item(sym::Display);
+ if let Some(receiver_snippet) = snippet_opt(cx, receiver.span);
+ then {
+ let (n_needed_derefs, target) = count_needed_derefs(
+ receiver_ty,
+ cx.typeck_results().expr_adjustments(receiver).iter(),
+ );
+ if implements_trait(cx, target, display_trait_id, &[]) {
+ if n_needed_derefs == 0 {
+ span_lint_and_sugg(
+ cx,
+ TO_STRING_IN_FORMAT_ARGS,
+ value.span.with_lo(receiver.span.hi()),
+ &format!("`to_string` applied to a type that implements `Display` in `{}!` args", name),
+ "remove this",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ } else {
+ span_lint_and_sugg(
+ cx,
+ TO_STRING_IN_FORMAT_ARGS,
+ value.span,
+ &format!("`to_string` applied to a type that implements `Display` in `{}!` args", name),
+ "use this",
+ format!("{:*>width$}{}", "", receiver_snippet, width = n_needed_derefs),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+ }
+}
+
+// Returns true if `args[i]` "refers to" or "is referred to by" another argument.
+fn is_aliased(args: &[FormatArgsArg<'_>], i: usize) -> bool {
+ let value = args[i].value;
+ args.iter()
+ .enumerate()
+ .any(|(j, arg)| i != j && std::ptr::eq(value, arg.value))
+}
+
+fn count_needed_derefs<'tcx, I>(mut ty: Ty<'tcx>, mut iter: I) -> (usize, Ty<'tcx>)
+where
+ I: Iterator<Item = &'tcx Adjustment<'tcx>>,
+{
+ let mut n_total = 0;
+ let mut n_needed = 0;
+ loop {
+ if let Some(Adjustment {
+ kind: Adjust::Deref(overloaded_deref),
+ target,
+ }) = iter.next()
+ {
+ n_total += 1;
+ if overloaded_deref.is_some() {
+ n_needed = n_total;
+ }
+ ty = *target;
+ } else {
+ return (n_needed, ty);
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/format_impl.rs b/src/tools/clippy/clippy_lints/src/format_impl.rs
new file mode 100644
index 000000000..04b5be6c8
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/format_impl.rs
@@ -0,0 +1,253 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg};
+use clippy_utils::macros::{is_format_macro, root_macro_call_first_node, FormatArgsArg, FormatArgsExpn};
+use clippy_utils::{get_parent_as_impl, is_diag_trait_item, path_to_local, peel_ref_operators};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind, Impl, ImplItem, ImplItemKind, QPath};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::{sym, symbol::kw, Symbol};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for format trait implementations (e.g. `Display`) with a recursive call to itself
+ /// which uses `self` as a parameter.
+ /// This is typically done indirectly with the `write!` macro or with `to_string()`.
+ ///
+ /// ### Why is this bad?
+ /// This will lead to infinite recursion and a stack overflow.
+ ///
+ /// ### Example
+ ///
+ /// ```rust
+ /// use std::fmt;
+ ///
+ /// struct Structure(i32);
+ /// impl fmt::Display for Structure {
+ /// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ /// write!(f, "{}", self.to_string())
+ /// }
+ /// }
+ ///
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// use std::fmt;
+ ///
+ /// struct Structure(i32);
+ /// impl fmt::Display for Structure {
+ /// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ /// write!(f, "{}", self.0)
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "1.48.0"]
+ pub RECURSIVE_FORMAT_IMPL,
+ correctness,
+ "Format trait method called while implementing the same Format trait"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for use of `println`, `print`, `eprintln` or `eprint` in an
+ /// implementation of a formatting trait.
+ ///
+ /// ### Why is this bad?
+ /// Using a print macro is likely unintentional since formatting traits
+ /// should write to the `Formatter`, not stdout/stderr.
+ ///
+ /// ### Example
+ /// ```rust
+ /// use std::fmt::{Display, Error, Formatter};
+ ///
+ /// struct S;
+ /// impl Display for S {
+ /// fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
+ /// println!("S");
+ ///
+ /// Ok(())
+ /// }
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// use std::fmt::{Display, Error, Formatter};
+ ///
+ /// struct S;
+ /// impl Display for S {
+ /// fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
+ /// writeln!(f, "S");
+ ///
+ /// Ok(())
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "1.61.0"]
+ pub PRINT_IN_FORMAT_IMPL,
+ suspicious,
+ "use of a print macro in a formatting trait impl"
+}
+
+#[derive(Clone, Copy)]
+struct FormatTrait {
+ /// e.g. `sym::Display`
+ name: Symbol,
+ /// `f` in `fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {}`
+ formatter_name: Option<Symbol>,
+}
+
+#[derive(Default)]
+pub struct FormatImpl {
+ // Whether we are inside Display or Debug trait impl - None for neither
+ format_trait_impl: Option<FormatTrait>,
+}
+
+impl FormatImpl {
+ pub fn new() -> Self {
+ Self {
+ format_trait_impl: None,
+ }
+ }
+}
+
+impl_lint_pass!(FormatImpl => [RECURSIVE_FORMAT_IMPL, PRINT_IN_FORMAT_IMPL]);
+
+impl<'tcx> LateLintPass<'tcx> for FormatImpl {
+ fn check_impl_item(&mut self, cx: &LateContext<'_>, impl_item: &ImplItem<'_>) {
+ self.format_trait_impl = is_format_trait_impl(cx, impl_item);
+ }
+
+ fn check_impl_item_post(&mut self, cx: &LateContext<'_>, impl_item: &ImplItem<'_>) {
+ // Assume no nested Impl of Debug and Display within eachother
+ if is_format_trait_impl(cx, impl_item).is_some() {
+ self.format_trait_impl = None;
+ }
+ }
+
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ let Some(format_trait_impl) = self.format_trait_impl else { return };
+
+ if format_trait_impl.name == sym::Display {
+ check_to_string_in_display(cx, expr);
+ }
+
+ check_self_in_format_args(cx, expr, format_trait_impl);
+ check_print_in_format_impl(cx, expr, format_trait_impl);
+ }
+}
+
+fn check_to_string_in_display(cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if_chain! {
+ // Get the hir_id of the object we are calling the method on
+ if let ExprKind::MethodCall(path, [ref self_arg, ..], _) = expr.kind;
+ // Is the method to_string() ?
+ if path.ident.name == sym::to_string;
+ // Is the method a part of the ToString trait? (i.e. not to_string() implemented
+ // separately)
+ if let Some(expr_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
+ if is_diag_trait_item(cx, expr_def_id, sym::ToString);
+ // Is the method is called on self
+ if let ExprKind::Path(QPath::Resolved(_, path)) = self_arg.kind;
+ if let [segment] = path.segments;
+ if segment.ident.name == kw::SelfLower;
+ then {
+ span_lint(
+ cx,
+ RECURSIVE_FORMAT_IMPL,
+ expr.span,
+ "using `self.to_string` in `fmt::Display` implementation will cause infinite recursion",
+ );
+ }
+ }
+}
+
+fn check_self_in_format_args<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, impl_trait: FormatTrait) {
+ // Check each arg in format calls - do we ever use Display on self (directly or via deref)?
+ if_chain! {
+ if let Some(outer_macro) = root_macro_call_first_node(cx, expr);
+ if let macro_def_id = outer_macro.def_id;
+ if let Some(format_args) = FormatArgsExpn::find_nested(cx, expr, outer_macro.expn);
+ if is_format_macro(cx, macro_def_id);
+ if let Some(args) = format_args.args();
+ then {
+ for arg in args {
+ if arg.format_trait != impl_trait.name {
+ continue;
+ }
+ check_format_arg_self(cx, expr, &arg, impl_trait);
+ }
+ }
+ }
+}
+
+fn check_format_arg_self(cx: &LateContext<'_>, expr: &Expr<'_>, arg: &FormatArgsArg<'_>, impl_trait: FormatTrait) {
+ // Handle multiple dereferencing of references e.g. &&self
+ // Handle dereference of &self -> self that is equivalent (i.e. via *self in fmt() impl)
+ // Since the argument to fmt is itself a reference: &self
+ let reference = peel_ref_operators(cx, arg.value);
+ let map = cx.tcx.hir();
+ // Is the reference self?
+ if path_to_local(reference).map(|x| map.name(x)) == Some(kw::SelfLower) {
+ let FormatTrait { name, .. } = impl_trait;
+ span_lint(
+ cx,
+ RECURSIVE_FORMAT_IMPL,
+ expr.span,
+ &format!("using `self` as `{name}` in `impl {name}` will cause infinite recursion"),
+ );
+ }
+}
+
+fn check_print_in_format_impl(cx: &LateContext<'_>, expr: &Expr<'_>, impl_trait: FormatTrait) {
+ if_chain! {
+ if let Some(macro_call) = root_macro_call_first_node(cx, expr);
+ if let Some(name) = cx.tcx.get_diagnostic_name(macro_call.def_id);
+ then {
+ let replacement = match name {
+ sym::print_macro | sym::eprint_macro => "write",
+ sym::println_macro | sym::eprintln_macro => "writeln",
+ _ => return,
+ };
+
+ let name = name.as_str().strip_suffix("_macro").unwrap();
+
+ span_lint_and_sugg(
+ cx,
+ PRINT_IN_FORMAT_IMPL,
+ macro_call.span,
+ &format!("use of `{}!` in `{}` impl", name, impl_trait.name),
+ "replace with",
+ if let Some(formatter_name) = impl_trait.formatter_name {
+ format!("{}!({}, ..)", replacement, formatter_name)
+ } else {
+ format!("{}!(..)", replacement)
+ },
+ Applicability::HasPlaceholders,
+ );
+ }
+ }
+}
+
+fn is_format_trait_impl(cx: &LateContext<'_>, impl_item: &ImplItem<'_>) -> Option<FormatTrait> {
+ if_chain! {
+ if impl_item.ident.name == sym::fmt;
+ if let ImplItemKind::Fn(_, body_id) = impl_item.kind;
+ if let Some(Impl { of_trait: Some(trait_ref),..}) = get_parent_as_impl(cx.tcx, impl_item.hir_id());
+ if let Some(did) = trait_ref.trait_def_id();
+ if let Some(name) = cx.tcx.get_diagnostic_name(did);
+ if matches!(name, sym::Debug | sym::Display);
+ then {
+ let body = cx.tcx.hir().body(body_id);
+ let formatter_name = body.params.get(1)
+ .and_then(|param| param.pat.simple_ident())
+ .map(|ident| ident.name);
+
+ Some(FormatTrait {
+ name,
+ formatter_name,
+ })
+ } else {
+ None
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/format_push_string.rs b/src/tools/clippy/clippy_lints/src/format_push_string.rs
new file mode 100644
index 000000000..ebf5ab086
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/format_push_string.rs
@@ -0,0 +1,83 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{match_def_path, paths, peel_hir_expr_refs};
+use rustc_hir::{BinOpKind, Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Detects cases where the result of a `format!` call is
+ /// appended to an existing `String`.
+ ///
+ /// ### Why is this bad?
+ /// Introduces an extra, avoidable heap allocation.
+ ///
+ /// ### Known problems
+ /// `format!` returns a `String` but `write!` returns a `Result`.
+ /// Thus you are forced to ignore the `Err` variant to achieve the same API.
+ ///
+ /// While using `write!` in the suggested way should never fail, this isn't necessarily clear to the programmer.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let mut s = String::new();
+ /// s += &format!("0x{:X}", 1024);
+ /// s.push_str(&format!("0x{:X}", 1024));
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// use std::fmt::Write as _; // import without risk of name clashing
+ ///
+ /// let mut s = String::new();
+ /// let _ = write!(s, "0x{:X}", 1024);
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub FORMAT_PUSH_STRING,
+ restriction,
+ "`format!(..)` appended to existing `String`"
+}
+declare_lint_pass!(FormatPushString => [FORMAT_PUSH_STRING]);
+
+fn is_string(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
+ is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(e).peel_refs(), sym::String)
+}
+fn is_format(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
+ if let Some(macro_def_id) = e.span.ctxt().outer_expn_data().macro_def_id {
+ cx.tcx.get_diagnostic_name(macro_def_id) == Some(sym::format_macro)
+ } else {
+ false
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for FormatPushString {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ let arg = match expr.kind {
+ ExprKind::MethodCall(_, [_, arg], _) => {
+ if let Some(fn_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) &&
+ match_def_path(cx, fn_def_id, &paths::PUSH_STR) {
+ arg
+ } else {
+ return;
+ }
+ }
+ ExprKind::AssignOp(op, left, arg)
+ if op.node == BinOpKind::Add && is_string(cx, left) => {
+ arg
+ },
+ _ => return,
+ };
+ let (arg, _) = peel_hir_expr_refs(arg);
+ if is_format(cx, arg) {
+ span_lint_and_help(
+ cx,
+ FORMAT_PUSH_STRING,
+ expr.span,
+ "`format!(..)` appended to existing `String`",
+ None,
+ "consider using `write!` to avoid the extra allocation",
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/formatting.rs b/src/tools/clippy/clippy_lints/src/formatting.rs
new file mode 100644
index 000000000..db0166da5
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/formatting.rs
@@ -0,0 +1,341 @@
+use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_note};
+use clippy_utils::source::snippet_opt;
+use if_chain::if_chain;
+use rustc_ast::ast::{BinOpKind, Block, Expr, ExprKind, StmtKind, UnOp};
+use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for use of the non-existent `=*`, `=!` and `=-`
+ /// operators.
+ ///
+ /// ### Why is this bad?
+ /// This is either a typo of `*=`, `!=` or `-=` or
+ /// confusing.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// a =- 42; // confusing, should it be `a -= 42` or `a = -42`?
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub SUSPICIOUS_ASSIGNMENT_FORMATTING,
+ suspicious,
+ "suspicious formatting of `*=`, `-=` or `!=`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks the formatting of a unary operator on the right hand side
+ /// of a binary operator. It lints if there is no space between the binary and unary operators,
+ /// but there is a space between the unary and its operand.
+ ///
+ /// ### Why is this bad?
+ /// This is either a typo in the binary operator or confusing.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let foo = true;
+ /// # let bar = false;
+ /// // &&! looks like a different operator
+ /// if foo &&! bar {}
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let foo = true;
+ /// # let bar = false;
+ /// if foo && !bar {}
+ /// ```
+ #[clippy::version = "1.40.0"]
+ pub SUSPICIOUS_UNARY_OP_FORMATTING,
+ suspicious,
+ "suspicious formatting of unary `-` or `!` on the RHS of a BinOp"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for formatting of `else`. It lints if the `else`
+ /// is followed immediately by a newline or the `else` seems to be missing.
+ ///
+ /// ### Why is this bad?
+ /// This is probably some refactoring remnant, even if the
+ /// code is correct, it might look confusing.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// if foo {
+ /// } { // looks like an `else` is missing here
+ /// }
+ ///
+ /// if foo {
+ /// } if bar { // looks like an `else` is missing here
+ /// }
+ ///
+ /// if foo {
+ /// } else
+ ///
+ /// { // this is the `else` block of the previous `if`, but should it be?
+ /// }
+ ///
+ /// if foo {
+ /// } else
+ ///
+ /// if bar { // this is the `else` block of the previous `if`, but should it be?
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub SUSPICIOUS_ELSE_FORMATTING,
+ suspicious,
+ "suspicious formatting of `else`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for possible missing comma in an array. It lints if
+ /// an array element is a binary operator expression and it lies on two lines.
+ ///
+ /// ### Why is this bad?
+ /// This could lead to unexpected results.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// let a = &[
+ /// -1, -2, -3 // <= no comma here
+ /// -4, -5, -6
+ /// ];
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub POSSIBLE_MISSING_COMMA,
+ correctness,
+ "possible missing comma in array"
+}
+
+declare_lint_pass!(Formatting => [
+ SUSPICIOUS_ASSIGNMENT_FORMATTING,
+ SUSPICIOUS_UNARY_OP_FORMATTING,
+ SUSPICIOUS_ELSE_FORMATTING,
+ POSSIBLE_MISSING_COMMA
+]);
+
+impl EarlyLintPass for Formatting {
+ fn check_block(&mut self, cx: &EarlyContext<'_>, block: &Block) {
+ for w in block.stmts.windows(2) {
+ if let (StmtKind::Expr(first), StmtKind::Expr(second) | StmtKind::Semi(second)) = (&w[0].kind, &w[1].kind) {
+ check_missing_else(cx, first, second);
+ }
+ }
+ }
+
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
+ check_assign(cx, expr);
+ check_unop(cx, expr);
+ check_else(cx, expr);
+ check_array(cx, expr);
+ }
+}
+
+/// Implementation of the `SUSPICIOUS_ASSIGNMENT_FORMATTING` lint.
+fn check_assign(cx: &EarlyContext<'_>, expr: &Expr) {
+ if let ExprKind::Assign(ref lhs, ref rhs, _) = expr.kind {
+ if !lhs.span.from_expansion() && !rhs.span.from_expansion() {
+ let eq_span = lhs.span.between(rhs.span);
+ if let ExprKind::Unary(op, ref sub_rhs) = rhs.kind {
+ if let Some(eq_snippet) = snippet_opt(cx, eq_span) {
+ let op = UnOp::to_string(op);
+ let eqop_span = lhs.span.between(sub_rhs.span);
+ if eq_snippet.ends_with('=') {
+ span_lint_and_note(
+ cx,
+ SUSPICIOUS_ASSIGNMENT_FORMATTING,
+ eqop_span,
+ &format!(
+ "this looks like you are trying to use `.. {op}= ..`, but you \
+ really are doing `.. = ({op} ..)`",
+ op = op
+ ),
+ None,
+ &format!("to remove this lint, use either `{op}=` or `= {op}`", op = op),
+ );
+ }
+ }
+ }
+ }
+ }
+}
+
+/// Implementation of the `SUSPICIOUS_UNARY_OP_FORMATTING` lint.
+fn check_unop(cx: &EarlyContext<'_>, expr: &Expr) {
+ if_chain! {
+ if let ExprKind::Binary(ref binop, ref lhs, ref rhs) = expr.kind;
+ if !lhs.span.from_expansion() && !rhs.span.from_expansion();
+ // span between BinOp LHS and RHS
+ let binop_span = lhs.span.between(rhs.span);
+ // if RHS is an UnOp
+ if let ExprKind::Unary(op, ref un_rhs) = rhs.kind;
+ // from UnOp operator to UnOp operand
+ let unop_operand_span = rhs.span.until(un_rhs.span);
+ if let Some(binop_snippet) = snippet_opt(cx, binop_span);
+ if let Some(unop_operand_snippet) = snippet_opt(cx, unop_operand_span);
+ let binop_str = BinOpKind::to_string(&binop.node);
+ // no space after BinOp operator and space after UnOp operator
+ if binop_snippet.ends_with(binop_str) && unop_operand_snippet.ends_with(' ');
+ then {
+ let unop_str = UnOp::to_string(op);
+ let eqop_span = lhs.span.between(un_rhs.span);
+ span_lint_and_help(
+ cx,
+ SUSPICIOUS_UNARY_OP_FORMATTING,
+ eqop_span,
+ &format!(
+ "by not having a space between `{binop}` and `{unop}` it looks like \
+ `{binop}{unop}` is a single operator",
+ binop = binop_str,
+ unop = unop_str
+ ),
+ None,
+ &format!(
+ "put a space between `{binop}` and `{unop}` and remove the space after `{unop}`",
+ binop = binop_str,
+ unop = unop_str
+ ),
+ );
+ }
+ }
+}
+
+/// Implementation of the `SUSPICIOUS_ELSE_FORMATTING` lint for weird `else`.
+fn check_else(cx: &EarlyContext<'_>, expr: &Expr) {
+ if_chain! {
+ if let ExprKind::If(_, then, Some(else_)) = &expr.kind;
+ if is_block(else_) || is_if(else_);
+ if !then.span.from_expansion() && !else_.span.from_expansion();
+ if !in_external_macro(cx.sess(), expr.span);
+
+ // workaround for rust-lang/rust#43081
+ if expr.span.lo().0 != 0 && expr.span.hi().0 != 0;
+
+ // this will be a span from the closing ‘}’ of the “then” block (excluding) to
+ // the “if” of the “else if” block (excluding)
+ let else_span = then.span.between(else_.span);
+
+ // the snippet should look like " else \n " with maybe comments anywhere
+ // it’s bad when there is a ‘\n’ after the “else”
+ if let Some(else_snippet) = snippet_opt(cx, else_span);
+ if let Some((pre_else, post_else)) = else_snippet.split_once("else");
+ if let Some((_, post_else_post_eol)) = post_else.split_once('\n');
+
+ then {
+ // Allow allman style braces `} \n else \n {`
+ if_chain! {
+ if is_block(else_);
+ if let Some((_, pre_else_post_eol)) = pre_else.split_once('\n');
+ // Exactly one eol before and after the else
+ if !pre_else_post_eol.contains('\n');
+ if !post_else_post_eol.contains('\n');
+ then {
+ return;
+ }
+ }
+
+ let else_desc = if is_if(else_) { "if" } else { "{..}" };
+ span_lint_and_note(
+ cx,
+ SUSPICIOUS_ELSE_FORMATTING,
+ else_span,
+ &format!("this is an `else {}` but the formatting might hide it", else_desc),
+ None,
+ &format!(
+ "to remove this lint, remove the `else` or remove the new line between \
+ `else` and `{}`",
+ else_desc,
+ ),
+ );
+ }
+ }
+}
+
+#[must_use]
+fn has_unary_equivalent(bin_op: BinOpKind) -> bool {
+ // &, *, -
+ bin_op == BinOpKind::And || bin_op == BinOpKind::Mul || bin_op == BinOpKind::Sub
+}
+
+fn indentation(cx: &EarlyContext<'_>, span: Span) -> usize {
+ cx.sess().source_map().lookup_char_pos(span.lo()).col.0
+}
+
+/// Implementation of the `POSSIBLE_MISSING_COMMA` lint for array
+fn check_array(cx: &EarlyContext<'_>, expr: &Expr) {
+ if let ExprKind::Array(ref array) = expr.kind {
+ for element in array {
+ if_chain! {
+ if let ExprKind::Binary(ref op, ref lhs, _) = element.kind;
+ if has_unary_equivalent(op.node) && lhs.span.ctxt() == op.span.ctxt();
+ let space_span = lhs.span.between(op.span);
+ if let Some(space_snippet) = snippet_opt(cx, space_span);
+ let lint_span = lhs.span.with_lo(lhs.span.hi());
+ if space_snippet.contains('\n');
+ if indentation(cx, op.span) <= indentation(cx, lhs.span);
+ then {
+ span_lint_and_note(
+ cx,
+ POSSIBLE_MISSING_COMMA,
+ lint_span,
+ "possibly missing a comma here",
+ None,
+ "to remove this lint, add a comma or write the expr in a single line",
+ );
+ }
+ }
+ }
+ }
+}
+
+fn check_missing_else(cx: &EarlyContext<'_>, first: &Expr, second: &Expr) {
+ if_chain! {
+ if !first.span.from_expansion() && !second.span.from_expansion();
+ if let ExprKind::If(cond_expr, ..) = &first.kind;
+ if is_block(second) || is_if(second);
+
+ // Proc-macros can give weird spans. Make sure this is actually an `if`.
+ if let Some(if_snip) = snippet_opt(cx, first.span.until(cond_expr.span));
+ if if_snip.starts_with("if");
+
+ // If there is a line break between the two expressions, don't lint.
+ // If there is a non-whitespace character, this span came from a proc-macro.
+ let else_span = first.span.between(second.span);
+ if let Some(else_snippet) = snippet_opt(cx, else_span);
+ if !else_snippet.chars().any(|c| c == '\n' || !c.is_whitespace());
+ then {
+ let (looks_like, next_thing) = if is_if(second) {
+ ("an `else if`", "the second `if`")
+ } else {
+ ("an `else {..}`", "the next block")
+ };
+
+ span_lint_and_note(
+ cx,
+ SUSPICIOUS_ELSE_FORMATTING,
+ else_span,
+ &format!("this looks like {} but the `else` is missing", looks_like),
+ None,
+ &format!(
+ "to remove this lint, add the missing `else` or add a new line before {}",
+ next_thing,
+ ),
+ );
+ }
+ }
+}
+
+fn is_block(expr: &Expr) -> bool {
+ matches!(expr.kind, ExprKind::Block(..))
+}
+
+/// Check if the expression is an `if` or `if let`
+fn is_if(expr: &Expr) -> bool {
+ matches!(expr.kind, ExprKind::If(..))
+}
diff --git a/src/tools/clippy/clippy_lints/src/from_over_into.rs b/src/tools/clippy/clippy_lints/src/from_over_into.rs
new file mode 100644
index 000000000..5d25c1d06
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/from_over_into.rs
@@ -0,0 +1,81 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::{meets_msrv, msrvs};
+use if_chain::if_chain;
+use rustc_hir as hir;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::symbol::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Searches for implementations of the `Into<..>` trait and suggests to implement `From<..>` instead.
+ ///
+ /// ### Why is this bad?
+ /// According the std docs implementing `From<..>` is preferred since it gives you `Into<..>` for free where the reverse isn't true.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct StringWrapper(String);
+ ///
+ /// impl Into<StringWrapper> for String {
+ /// fn into(self) -> StringWrapper {
+ /// StringWrapper(self)
+ /// }
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// struct StringWrapper(String);
+ ///
+ /// impl From<String> for StringWrapper {
+ /// fn from(s: String) -> StringWrapper {
+ /// StringWrapper(s)
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "1.51.0"]
+ pub FROM_OVER_INTO,
+ style,
+ "Warns on implementations of `Into<..>` to use `From<..>`"
+}
+
+pub struct FromOverInto {
+ msrv: Option<RustcVersion>,
+}
+
+impl FromOverInto {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ FromOverInto { msrv }
+ }
+}
+
+impl_lint_pass!(FromOverInto => [FROM_OVER_INTO]);
+
+impl<'tcx> LateLintPass<'tcx> for FromOverInto {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) {
+ if !meets_msrv(self.msrv, msrvs::RE_REBALANCING_COHERENCE) {
+ return;
+ }
+
+ if_chain! {
+ if let hir::ItemKind::Impl{ .. } = &item.kind;
+ if let Some(impl_trait_ref) = cx.tcx.impl_trait_ref(item.def_id);
+ if cx.tcx.is_diagnostic_item(sym::Into, impl_trait_ref.def_id);
+
+ then {
+ span_lint_and_help(
+ cx,
+ FROM_OVER_INTO,
+ cx.tcx.sess.source_map().guess_head_span(item.span),
+ "an implementation of `From` is preferred since it gives you `Into<_>` for free where the reverse isn't true",
+ None,
+ &format!("consider to implement `From<{}>` instead", impl_trait_ref.self_ty()),
+ );
+ }
+ }
+ }
+
+ extract_msrv_attr!(LateContext);
+}
diff --git a/src/tools/clippy/clippy_lints/src/from_str_radix_10.rs b/src/tools/clippy/clippy_lints/src/from_str_radix_10.rs
new file mode 100644
index 000000000..57b075132
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/from_str_radix_10.rs
@@ -0,0 +1,103 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::sugg::Sugg;
+use clippy_utils::ty::is_type_diagnostic_item;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{def, Expr, ExprKind, PrimTy, QPath, TyKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::Ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ ///
+ /// Checks for function invocations of the form `primitive::from_str_radix(s, 10)`
+ ///
+ /// ### Why is this bad?
+ ///
+ /// This specific common use case can be rewritten as `s.parse::<primitive>()`
+ /// (and in most cases, the turbofish can be removed), which reduces code length
+ /// and complexity.
+ ///
+ /// ### Known problems
+ ///
+ /// This lint may suggest using (&<expression>).parse() instead of <expression>.parse() directly
+ /// in some cases, which is correct but adds unnecessary complexity to the code.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// let input: &str = get_input();
+ /// let num = u16::from_str_radix(input, 10)?;
+ /// ```
+ /// Use instead:
+ /// ```ignore
+ /// let input: &str = get_input();
+ /// let num: u16 = input.parse()?;
+ /// ```
+ #[clippy::version = "1.52.0"]
+ pub FROM_STR_RADIX_10,
+ style,
+ "from_str_radix with radix 10"
+}
+
+declare_lint_pass!(FromStrRadix10 => [FROM_STR_RADIX_10]);
+
+impl<'tcx> LateLintPass<'tcx> for FromStrRadix10 {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, exp: &Expr<'tcx>) {
+ if_chain! {
+ if let ExprKind::Call(maybe_path, arguments) = &exp.kind;
+ if let ExprKind::Path(QPath::TypeRelative(ty, pathseg)) = &maybe_path.kind;
+
+ // check if the first part of the path is some integer primitive
+ if let TyKind::Path(ty_qpath) = &ty.kind;
+ let ty_res = cx.qpath_res(ty_qpath, ty.hir_id);
+ if let def::Res::PrimTy(prim_ty) = ty_res;
+ if matches!(prim_ty, PrimTy::Int(_) | PrimTy::Uint(_));
+
+ // check if the second part of the path indeed calls the associated
+ // function `from_str_radix`
+ if pathseg.ident.name.as_str() == "from_str_radix";
+
+ // check if the second argument is a primitive `10`
+ if arguments.len() == 2;
+ if let ExprKind::Lit(lit) = &arguments[1].kind;
+ if let rustc_ast::ast::LitKind::Int(10, _) = lit.node;
+
+ then {
+ let expr = if let ExprKind::AddrOf(_, _, expr) = &arguments[0].kind {
+ let ty = cx.typeck_results().expr_ty(expr);
+ if is_ty_stringish(cx, ty) {
+ expr
+ } else {
+ &arguments[0]
+ }
+ } else {
+ &arguments[0]
+ };
+
+ let sugg = Sugg::hir_with_applicability(
+ cx,
+ expr,
+ "<string>",
+ &mut Applicability::MachineApplicable
+ ).maybe_par();
+
+ span_lint_and_sugg(
+ cx,
+ FROM_STR_RADIX_10,
+ exp.span,
+ "this call to `from_str_radix` can be replaced with a call to `str::parse`",
+ "try",
+ format!("{}.parse::<{}>()", sugg, prim_ty.name_str()),
+ Applicability::MaybeIncorrect
+ );
+ }
+ }
+ }
+}
+
+/// Checks if a Ty is `String` or `&str`
+fn is_ty_stringish(cx: &LateContext<'_>, ty: Ty<'_>) -> bool {
+ is_type_diagnostic_item(cx, ty, sym::String) || is_type_diagnostic_item(cx, ty, sym::str)
+}
diff --git a/src/tools/clippy/clippy_lints/src/functions/mod.rs b/src/tools/clippy/clippy_lints/src/functions/mod.rs
new file mode 100644
index 000000000..73261fb8a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/functions/mod.rs
@@ -0,0 +1,276 @@
+mod must_use;
+mod not_unsafe_ptr_arg_deref;
+mod result_unit_err;
+mod too_many_arguments;
+mod too_many_lines;
+
+use rustc_hir as hir;
+use rustc_hir::intravisit;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for functions with too many parameters.
+ ///
+ /// ### Why is this bad?
+ /// Functions with lots of parameters are considered bad
+ /// style and reduce readability (“what does the 5th parameter mean?”). Consider
+ /// grouping some parameters into a new type.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # struct Color;
+ /// fn foo(x: u32, y: u32, name: &str, c: Color, w: f32, h: f32, a: f32, b: f32) {
+ /// // ..
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub TOO_MANY_ARGUMENTS,
+ complexity,
+ "functions with too many arguments"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for functions with a large amount of lines.
+ ///
+ /// ### Why is this bad?
+ /// Functions with a lot of lines are harder to understand
+ /// due to having to look at a larger amount of code to understand what the
+ /// function is doing. Consider splitting the body of the function into
+ /// multiple functions.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn im_too_long() {
+ /// println!("");
+ /// // ... 100 more LoC
+ /// println!("");
+ /// }
+ /// ```
+ #[clippy::version = "1.34.0"]
+ pub TOO_MANY_LINES,
+ pedantic,
+ "functions with too many lines"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for public functions that dereference raw pointer
+ /// arguments but are not marked `unsafe`.
+ ///
+ /// ### Why is this bad?
+ /// The function should probably be marked `unsafe`, since
+ /// for an arbitrary raw pointer, there is no way of telling for sure if it is
+ /// valid.
+ ///
+ /// ### Known problems
+ /// * It does not check functions recursively so if the pointer is passed to a
+ /// private non-`unsafe` function which does the dereferencing, the lint won't
+ /// trigger.
+ /// * It only checks for arguments whose type are raw pointers, not raw pointers
+ /// got from an argument in some other way (`fn foo(bar: &[*const u8])` or
+ /// `some_argument.get_raw_ptr()`).
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// pub fn foo(x: *const u8) {
+ /// println!("{}", unsafe { *x });
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// pub unsafe fn foo(x: *const u8) {
+ /// println!("{}", unsafe { *x });
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub NOT_UNSAFE_PTR_ARG_DEREF,
+ correctness,
+ "public functions dereferencing raw pointer arguments but not marked `unsafe`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for a `#[must_use]` attribute on
+ /// unit-returning functions and methods.
+ ///
+ /// ### Why is this bad?
+ /// Unit values are useless. The attribute is likely
+ /// a remnant of a refactoring that removed the return type.
+ ///
+ /// ### Examples
+ /// ```rust
+ /// #[must_use]
+ /// fn useless() { }
+ /// ```
+ #[clippy::version = "1.40.0"]
+ pub MUST_USE_UNIT,
+ style,
+ "`#[must_use]` attribute on a unit-returning function / method"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for a `#[must_use]` attribute without
+ /// further information on functions and methods that return a type already
+ /// marked as `#[must_use]`.
+ ///
+ /// ### Why is this bad?
+ /// The attribute isn't needed. Not using the result
+ /// will already be reported. Alternatively, one can add some text to the
+ /// attribute to improve the lint message.
+ ///
+ /// ### Examples
+ /// ```rust
+ /// #[must_use]
+ /// fn double_must_use() -> Result<(), ()> {
+ /// unimplemented!();
+ /// }
+ /// ```
+ #[clippy::version = "1.40.0"]
+ pub DOUBLE_MUST_USE,
+ style,
+ "`#[must_use]` attribute on a `#[must_use]`-returning function / method"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for public functions that have no
+ /// `#[must_use]` attribute, but return something not already marked
+ /// must-use, have no mutable arg and mutate no statics.
+ ///
+ /// ### Why is this bad?
+ /// Not bad at all, this lint just shows places where
+ /// you could add the attribute.
+ ///
+ /// ### Known problems
+ /// The lint only checks the arguments for mutable
+ /// types without looking if they are actually changed. On the other hand,
+ /// it also ignores a broad range of potentially interesting side effects,
+ /// because we cannot decide whether the programmer intends the function to
+ /// be called for the side effect or the result. Expect many false
+ /// positives. At least we don't lint if the result type is unit or already
+ /// `#[must_use]`.
+ ///
+ /// ### Examples
+ /// ```rust
+ /// // this could be annotated with `#[must_use]`.
+ /// fn id<T>(t: T) -> T { t }
+ /// ```
+ #[clippy::version = "1.40.0"]
+ pub MUST_USE_CANDIDATE,
+ pedantic,
+ "function or method that could take a `#[must_use]` attribute"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for public functions that return a `Result`
+ /// with an `Err` type of `()`. It suggests using a custom type that
+ /// implements `std::error::Error`.
+ ///
+ /// ### Why is this bad?
+ /// Unit does not implement `Error` and carries no
+ /// further information about what went wrong.
+ ///
+ /// ### Known problems
+ /// Of course, this lint assumes that `Result` is used
+ /// for a fallible operation (which is after all the intended use). However
+ /// code may opt to (mis)use it as a basic two-variant-enum. In that case,
+ /// the suggestion is misguided, and the code should use a custom enum
+ /// instead.
+ ///
+ /// ### Examples
+ /// ```rust
+ /// pub fn read_u8() -> Result<u8, ()> { Err(()) }
+ /// ```
+ /// should become
+ /// ```rust,should_panic
+ /// use std::fmt;
+ ///
+ /// #[derive(Debug)]
+ /// pub struct EndOfStream;
+ ///
+ /// impl fmt::Display for EndOfStream {
+ /// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ /// write!(f, "End of Stream")
+ /// }
+ /// }
+ ///
+ /// impl std::error::Error for EndOfStream { }
+ ///
+ /// pub fn read_u8() -> Result<u8, EndOfStream> { Err(EndOfStream) }
+ ///# fn main() {
+ ///# read_u8().unwrap();
+ ///# }
+ /// ```
+ ///
+ /// Note that there are crates that simplify creating the error type, e.g.
+ /// [`thiserror`](https://docs.rs/thiserror).
+ #[clippy::version = "1.49.0"]
+ pub RESULT_UNIT_ERR,
+ style,
+ "public function returning `Result` with an `Err` type of `()`"
+}
+
+#[derive(Copy, Clone)]
+pub struct Functions {
+ too_many_arguments_threshold: u64,
+ too_many_lines_threshold: u64,
+}
+
+impl Functions {
+ pub fn new(too_many_arguments_threshold: u64, too_many_lines_threshold: u64) -> Self {
+ Self {
+ too_many_arguments_threshold,
+ too_many_lines_threshold,
+ }
+ }
+}
+
+impl_lint_pass!(Functions => [
+ TOO_MANY_ARGUMENTS,
+ TOO_MANY_LINES,
+ NOT_UNSAFE_PTR_ARG_DEREF,
+ MUST_USE_UNIT,
+ DOUBLE_MUST_USE,
+ MUST_USE_CANDIDATE,
+ RESULT_UNIT_ERR,
+]);
+
+impl<'tcx> LateLintPass<'tcx> for Functions {
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ kind: intravisit::FnKind<'tcx>,
+ decl: &'tcx hir::FnDecl<'_>,
+ body: &'tcx hir::Body<'_>,
+ span: Span,
+ hir_id: hir::HirId,
+ ) {
+ too_many_arguments::check_fn(cx, kind, decl, span, hir_id, self.too_many_arguments_threshold);
+ too_many_lines::check_fn(cx, kind, span, body, self.too_many_lines_threshold);
+ not_unsafe_ptr_arg_deref::check_fn(cx, kind, decl, body, hir_id);
+ }
+
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) {
+ must_use::check_item(cx, item);
+ result_unit_err::check_item(cx, item);
+ }
+
+ fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::ImplItem<'_>) {
+ must_use::check_impl_item(cx, item);
+ result_unit_err::check_impl_item(cx, item);
+ }
+
+ fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::TraitItem<'_>) {
+ too_many_arguments::check_trait_item(cx, item, self.too_many_arguments_threshold);
+ not_unsafe_ptr_arg_deref::check_trait_item(cx, item);
+ must_use::check_trait_item(cx, item);
+ result_unit_err::check_trait_item(cx, item);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/functions/must_use.rs b/src/tools/clippy/clippy_lints/src/functions/must_use.rs
new file mode 100644
index 000000000..6672a6cb0
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/functions/must_use.rs
@@ -0,0 +1,259 @@
+use rustc_ast::ast::Attribute;
+use rustc_errors::Applicability;
+use rustc_hir::def_id::{DefIdSet, LocalDefId};
+use rustc_hir::{self as hir, def::Res, intravisit, QPath};
+use rustc_lint::{LateContext, LintContext};
+use rustc_middle::{
+ lint::in_external_macro,
+ ty::{self, Ty},
+};
+use rustc_span::{sym, Span};
+
+use clippy_utils::attrs::is_proc_macro;
+use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_then};
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::is_must_use_ty;
+use clippy_utils::{match_def_path, return_ty, trait_ref_of_method};
+
+use super::{DOUBLE_MUST_USE, MUST_USE_CANDIDATE, MUST_USE_UNIT};
+
+pub(super) fn check_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) {
+ let attrs = cx.tcx.hir().attrs(item.hir_id());
+ let attr = cx.tcx.get_attr(item.def_id.to_def_id(), sym::must_use);
+ if let hir::ItemKind::Fn(ref sig, _generics, ref body_id) = item.kind {
+ let is_public = cx.access_levels.is_exported(item.def_id);
+ let fn_header_span = item.span.with_hi(sig.decl.output.span().hi());
+ if let Some(attr) = attr {
+ check_needless_must_use(cx, sig.decl, item.hir_id(), item.span, fn_header_span, attr);
+ } else if is_public && !is_proc_macro(cx.sess(), attrs) && !attrs.iter().any(|a| a.has_name(sym::no_mangle)) {
+ check_must_use_candidate(
+ cx,
+ sig.decl,
+ cx.tcx.hir().body(*body_id),
+ item.span,
+ item.def_id,
+ item.span.with_hi(sig.decl.output.span().hi()),
+ "this function could have a `#[must_use]` attribute",
+ );
+ }
+ }
+}
+
+pub(super) fn check_impl_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::ImplItem<'_>) {
+ if let hir::ImplItemKind::Fn(ref sig, ref body_id) = item.kind {
+ let is_public = cx.access_levels.is_exported(item.def_id);
+ let fn_header_span = item.span.with_hi(sig.decl.output.span().hi());
+ let attrs = cx.tcx.hir().attrs(item.hir_id());
+ let attr = cx.tcx.get_attr(item.def_id.to_def_id(), sym::must_use);
+ if let Some(attr) = attr {
+ check_needless_must_use(cx, sig.decl, item.hir_id(), item.span, fn_header_span, attr);
+ } else if is_public && !is_proc_macro(cx.sess(), attrs) && trait_ref_of_method(cx, item.def_id).is_none() {
+ check_must_use_candidate(
+ cx,
+ sig.decl,
+ cx.tcx.hir().body(*body_id),
+ item.span,
+ item.def_id,
+ item.span.with_hi(sig.decl.output.span().hi()),
+ "this method could have a `#[must_use]` attribute",
+ );
+ }
+ }
+}
+
+pub(super) fn check_trait_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::TraitItem<'_>) {
+ if let hir::TraitItemKind::Fn(ref sig, ref eid) = item.kind {
+ let is_public = cx.access_levels.is_exported(item.def_id);
+ let fn_header_span = item.span.with_hi(sig.decl.output.span().hi());
+
+ let attrs = cx.tcx.hir().attrs(item.hir_id());
+ let attr = cx.tcx.get_attr(item.def_id.to_def_id(), sym::must_use);
+ if let Some(attr) = attr {
+ check_needless_must_use(cx, sig.decl, item.hir_id(), item.span, fn_header_span, attr);
+ } else if let hir::TraitFn::Provided(eid) = *eid {
+ let body = cx.tcx.hir().body(eid);
+ if attr.is_none() && is_public && !is_proc_macro(cx.sess(), attrs) {
+ check_must_use_candidate(
+ cx,
+ sig.decl,
+ body,
+ item.span,
+ item.def_id,
+ item.span.with_hi(sig.decl.output.span().hi()),
+ "this method could have a `#[must_use]` attribute",
+ );
+ }
+ }
+ }
+}
+
+fn check_needless_must_use(
+ cx: &LateContext<'_>,
+ decl: &hir::FnDecl<'_>,
+ item_id: hir::HirId,
+ item_span: Span,
+ fn_header_span: Span,
+ attr: &Attribute,
+) {
+ if in_external_macro(cx.sess(), item_span) {
+ return;
+ }
+ if returns_unit(decl) {
+ span_lint_and_then(
+ cx,
+ MUST_USE_UNIT,
+ fn_header_span,
+ "this unit-returning function has a `#[must_use]` attribute",
+ |diag| {
+ diag.span_suggestion(attr.span, "remove the attribute", "", Applicability::MachineApplicable);
+ },
+ );
+ } else if attr.value_str().is_none() && is_must_use_ty(cx, return_ty(cx, item_id)) {
+ span_lint_and_help(
+ cx,
+ DOUBLE_MUST_USE,
+ fn_header_span,
+ "this function has an empty `#[must_use]` attribute, but returns a type already marked as `#[must_use]`",
+ None,
+ "either add some descriptive text or remove the attribute",
+ );
+ }
+}
+
+fn check_must_use_candidate<'tcx>(
+ cx: &LateContext<'tcx>,
+ decl: &'tcx hir::FnDecl<'_>,
+ body: &'tcx hir::Body<'_>,
+ item_span: Span,
+ item_id: LocalDefId,
+ fn_span: Span,
+ msg: &str,
+) {
+ if has_mutable_arg(cx, body)
+ || mutates_static(cx, body)
+ || in_external_macro(cx.sess(), item_span)
+ || returns_unit(decl)
+ || !cx.access_levels.is_exported(item_id)
+ || is_must_use_ty(cx, return_ty(cx, cx.tcx.hir().local_def_id_to_hir_id(item_id)))
+ {
+ return;
+ }
+ span_lint_and_then(cx, MUST_USE_CANDIDATE, fn_span, msg, |diag| {
+ if let Some(snippet) = snippet_opt(cx, fn_span) {
+ diag.span_suggestion(
+ fn_span,
+ "add the attribute",
+ format!("#[must_use] {}", snippet),
+ Applicability::MachineApplicable,
+ );
+ }
+ });
+}
+
+fn returns_unit(decl: &hir::FnDecl<'_>) -> bool {
+ match decl.output {
+ hir::FnRetTy::DefaultReturn(_) => true,
+ hir::FnRetTy::Return(ty) => match ty.kind {
+ hir::TyKind::Tup(tys) => tys.is_empty(),
+ hir::TyKind::Never => true,
+ _ => false,
+ },
+ }
+}
+
+fn has_mutable_arg(cx: &LateContext<'_>, body: &hir::Body<'_>) -> bool {
+ let mut tys = DefIdSet::default();
+ body.params.iter().any(|param| is_mutable_pat(cx, param.pat, &mut tys))
+}
+
+fn is_mutable_pat(cx: &LateContext<'_>, pat: &hir::Pat<'_>, tys: &mut DefIdSet) -> bool {
+ if let hir::PatKind::Wild = pat.kind {
+ return false; // ignore `_` patterns
+ }
+ if cx.tcx.has_typeck_results(pat.hir_id.owner.to_def_id()) {
+ is_mutable_ty(cx, cx.tcx.typeck(pat.hir_id.owner).pat_ty(pat), pat.span, tys)
+ } else {
+ false
+ }
+}
+
+static KNOWN_WRAPPER_TYS: &[&[&str]] = &[&["alloc", "rc", "Rc"], &["std", "sync", "Arc"]];
+
+fn is_mutable_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>, span: Span, tys: &mut DefIdSet) -> bool {
+ match *ty.kind() {
+ // primitive types are never mutable
+ ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Str => false,
+ ty::Adt(adt, substs) => {
+ tys.insert(adt.did()) && !ty.is_freeze(cx.tcx.at(span), cx.param_env)
+ || KNOWN_WRAPPER_TYS.iter().any(|path| match_def_path(cx, adt.did(), path))
+ && substs.types().any(|ty| is_mutable_ty(cx, ty, span, tys))
+ },
+ ty::Tuple(substs) => substs.iter().any(|ty| is_mutable_ty(cx, ty, span, tys)),
+ ty::Array(ty, _) | ty::Slice(ty) => is_mutable_ty(cx, ty, span, tys),
+ ty::RawPtr(ty::TypeAndMut { ty, mutbl }) | ty::Ref(_, ty, mutbl) => {
+ mutbl == hir::Mutability::Mut || is_mutable_ty(cx, ty, span, tys)
+ },
+ // calling something constitutes a side effect, so return true on all callables
+ // also never calls need not be used, so return true for them, too
+ _ => true,
+ }
+}
+
+struct StaticMutVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ mutates_static: bool,
+}
+
+impl<'a, 'tcx> intravisit::Visitor<'tcx> for StaticMutVisitor<'a, 'tcx> {
+ fn visit_expr(&mut self, expr: &'tcx hir::Expr<'_>) {
+ use hir::ExprKind::{AddrOf, Assign, AssignOp, Call, MethodCall};
+
+ if self.mutates_static {
+ return;
+ }
+ match expr.kind {
+ Call(_, args) | MethodCall(_, args, _) => {
+ let mut tys = DefIdSet::default();
+ for arg in args {
+ if self.cx.tcx.has_typeck_results(arg.hir_id.owner.to_def_id())
+ && is_mutable_ty(
+ self.cx,
+ self.cx.tcx.typeck(arg.hir_id.owner).expr_ty(arg),
+ arg.span,
+ &mut tys,
+ )
+ && is_mutated_static(arg)
+ {
+ self.mutates_static = true;
+ return;
+ }
+ tys.clear();
+ }
+ },
+ Assign(target, ..) | AssignOp(_, target, _) | AddrOf(_, hir::Mutability::Mut, target) => {
+ self.mutates_static |= is_mutated_static(target);
+ },
+ _ => {},
+ }
+ }
+}
+
+fn is_mutated_static(e: &hir::Expr<'_>) -> bool {
+ use hir::ExprKind::{Field, Index, Path};
+
+ match e.kind {
+ Path(QPath::Resolved(_, path)) => !matches!(path.res, Res::Local(_)),
+ Path(_) => true,
+ Field(inner, _) | Index(inner, _) => is_mutated_static(inner),
+ _ => false,
+ }
+}
+
+fn mutates_static<'tcx>(cx: &LateContext<'tcx>, body: &'tcx hir::Body<'_>) -> bool {
+ let mut v = StaticMutVisitor {
+ cx,
+ mutates_static: false,
+ };
+ intravisit::walk_expr(&mut v, &body.value);
+ v.mutates_static
+}
diff --git a/src/tools/clippy/clippy_lints/src/functions/not_unsafe_ptr_arg_deref.rs b/src/tools/clippy/clippy_lints/src/functions/not_unsafe_ptr_arg_deref.rs
new file mode 100644
index 000000000..565a1c871
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/functions/not_unsafe_ptr_arg_deref.rs
@@ -0,0 +1,122 @@
+use rustc_hir::{self as hir, intravisit, HirIdSet};
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::def_id::LocalDefId;
+
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::ty::type_is_unsafe_function;
+use clippy_utils::{iter_input_pats, path_to_local};
+
+use super::NOT_UNSAFE_PTR_ARG_DEREF;
+
+pub(super) fn check_fn<'tcx>(
+ cx: &LateContext<'tcx>,
+ kind: intravisit::FnKind<'tcx>,
+ decl: &'tcx hir::FnDecl<'tcx>,
+ body: &'tcx hir::Body<'tcx>,
+ hir_id: hir::HirId,
+) {
+ let unsafety = match kind {
+ intravisit::FnKind::ItemFn(_, _, hir::FnHeader { unsafety, .. }) => unsafety,
+ intravisit::FnKind::Method(_, sig) => sig.header.unsafety,
+ intravisit::FnKind::Closure => return,
+ };
+
+ check_raw_ptr(cx, unsafety, decl, body, cx.tcx.hir().local_def_id(hir_id));
+}
+
+pub(super) fn check_trait_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::TraitItem<'_>) {
+ if let hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Provided(eid)) = item.kind {
+ let body = cx.tcx.hir().body(eid);
+ check_raw_ptr(cx, sig.header.unsafety, sig.decl, body, item.def_id);
+ }
+}
+
+fn check_raw_ptr<'tcx>(
+ cx: &LateContext<'tcx>,
+ unsafety: hir::Unsafety,
+ decl: &'tcx hir::FnDecl<'tcx>,
+ body: &'tcx hir::Body<'tcx>,
+ def_id: LocalDefId,
+) {
+ let expr = &body.value;
+ if unsafety == hir::Unsafety::Normal && cx.access_levels.is_exported(def_id) {
+ let raw_ptrs = iter_input_pats(decl, body)
+ .filter_map(|arg| raw_ptr_arg(cx, arg))
+ .collect::<HirIdSet>();
+
+ if !raw_ptrs.is_empty() {
+ let typeck_results = cx.tcx.typeck_body(body.id());
+ let mut v = DerefVisitor {
+ cx,
+ ptrs: raw_ptrs,
+ typeck_results,
+ };
+
+ intravisit::walk_expr(&mut v, expr);
+ }
+ }
+}
+
+fn raw_ptr_arg(cx: &LateContext<'_>, arg: &hir::Param<'_>) -> Option<hir::HirId> {
+ if let (&hir::PatKind::Binding(_, id, _, _), Some(&ty::RawPtr(_))) = (
+ &arg.pat.kind,
+ cx.maybe_typeck_results()
+ .map(|typeck_results| typeck_results.pat_ty(arg.pat).kind()),
+ ) {
+ Some(id)
+ } else {
+ None
+ }
+}
+
+struct DerefVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ ptrs: HirIdSet,
+ typeck_results: &'a ty::TypeckResults<'tcx>,
+}
+
+impl<'a, 'tcx> intravisit::Visitor<'tcx> for DerefVisitor<'a, 'tcx> {
+ fn visit_expr(&mut self, expr: &'tcx hir::Expr<'_>) {
+ match expr.kind {
+ hir::ExprKind::Call(f, args) => {
+ let ty = self.typeck_results.expr_ty(f);
+
+ if type_is_unsafe_function(self.cx, ty) {
+ for arg in args {
+ self.check_arg(arg);
+ }
+ }
+ },
+ hir::ExprKind::MethodCall(_, args, _) => {
+ let def_id = self.typeck_results.type_dependent_def_id(expr.hir_id).unwrap();
+ let base_type = self.cx.tcx.type_of(def_id);
+
+ if type_is_unsafe_function(self.cx, base_type) {
+ for arg in args {
+ self.check_arg(arg);
+ }
+ }
+ },
+ hir::ExprKind::Unary(hir::UnOp::Deref, ptr) => self.check_arg(ptr),
+ _ => (),
+ }
+
+ intravisit::walk_expr(self, expr);
+ }
+}
+
+impl<'a, 'tcx> DerefVisitor<'a, 'tcx> {
+ fn check_arg(&self, ptr: &hir::Expr<'_>) {
+ if let Some(id) = path_to_local(ptr) {
+ if self.ptrs.contains(&id) {
+ span_lint(
+ self.cx,
+ NOT_UNSAFE_PTR_ARG_DEREF,
+ ptr.span,
+ "this public function might dereference a raw pointer but is not marked `unsafe`",
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/functions/result_unit_err.rs b/src/tools/clippy/clippy_lints/src/functions/result_unit_err.rs
new file mode 100644
index 000000000..2e63a1f92
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/functions/result_unit_err.rs
@@ -0,0 +1,66 @@
+use rustc_hir as hir;
+use rustc_lint::{LateContext, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty;
+use rustc_span::{sym, Span};
+use rustc_typeck::hir_ty_to_ty;
+
+use if_chain::if_chain;
+
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::trait_ref_of_method;
+use clippy_utils::ty::is_type_diagnostic_item;
+
+use super::RESULT_UNIT_ERR;
+
+pub(super) fn check_item(cx: &LateContext<'_>, item: &hir::Item<'_>) {
+ if let hir::ItemKind::Fn(ref sig, _generics, _) = item.kind {
+ let is_public = cx.access_levels.is_exported(item.def_id);
+ let fn_header_span = item.span.with_hi(sig.decl.output.span().hi());
+ if is_public {
+ check_result_unit_err(cx, sig.decl, item.span, fn_header_span);
+ }
+ }
+}
+
+pub(super) fn check_impl_item(cx: &LateContext<'_>, item: &hir::ImplItem<'_>) {
+ if let hir::ImplItemKind::Fn(ref sig, _) = item.kind {
+ let is_public = cx.access_levels.is_exported(item.def_id);
+ let fn_header_span = item.span.with_hi(sig.decl.output.span().hi());
+ if is_public && trait_ref_of_method(cx, item.def_id).is_none() {
+ check_result_unit_err(cx, sig.decl, item.span, fn_header_span);
+ }
+ }
+}
+
+pub(super) fn check_trait_item(cx: &LateContext<'_>, item: &hir::TraitItem<'_>) {
+ if let hir::TraitItemKind::Fn(ref sig, _) = item.kind {
+ let is_public = cx.access_levels.is_exported(item.def_id);
+ let fn_header_span = item.span.with_hi(sig.decl.output.span().hi());
+ if is_public {
+ check_result_unit_err(cx, sig.decl, item.span, fn_header_span);
+ }
+ }
+}
+
+fn check_result_unit_err(cx: &LateContext<'_>, decl: &hir::FnDecl<'_>, item_span: Span, fn_header_span: Span) {
+ if_chain! {
+ if !in_external_macro(cx.sess(), item_span);
+ if let hir::FnRetTy::Return(ty) = decl.output;
+ let ty = hir_ty_to_ty(cx.tcx, ty);
+ if is_type_diagnostic_item(cx, ty, sym::Result);
+ if let ty::Adt(_, substs) = ty.kind();
+ let err_ty = substs.type_at(1);
+ if err_ty.is_unit();
+ then {
+ span_lint_and_help(
+ cx,
+ RESULT_UNIT_ERR,
+ fn_header_span,
+ "this returns a `Result<_, ()>`",
+ None,
+ "use a custom `Error` type instead",
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/functions/too_many_arguments.rs b/src/tools/clippy/clippy_lints/src/functions/too_many_arguments.rs
new file mode 100644
index 000000000..5c8d8b8e7
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/functions/too_many_arguments.rs
@@ -0,0 +1,68 @@
+use rustc_hir::{self as hir, intravisit};
+use rustc_lint::LateContext;
+use rustc_span::Span;
+use rustc_target::spec::abi::Abi;
+
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::is_trait_impl_item;
+
+use super::TOO_MANY_ARGUMENTS;
+
+pub(super) fn check_fn(
+ cx: &LateContext<'_>,
+ kind: intravisit::FnKind<'_>,
+ decl: &hir::FnDecl<'_>,
+ span: Span,
+ hir_id: hir::HirId,
+ too_many_arguments_threshold: u64,
+) {
+ // don't warn for implementations, it's not their fault
+ if !is_trait_impl_item(cx, hir_id) {
+ // don't lint extern functions decls, it's not their fault either
+ match kind {
+ intravisit::FnKind::Method(
+ _,
+ &hir::FnSig {
+ header: hir::FnHeader { abi: Abi::Rust, .. },
+ ..
+ },
+ )
+ | intravisit::FnKind::ItemFn(_, _, hir::FnHeader { abi: Abi::Rust, .. }) => check_arg_number(
+ cx,
+ decl,
+ span.with_hi(decl.output.span().hi()),
+ too_many_arguments_threshold,
+ ),
+ _ => {},
+ }
+ }
+}
+
+pub(super) fn check_trait_item(cx: &LateContext<'_>, item: &hir::TraitItem<'_>, too_many_arguments_threshold: u64) {
+ if let hir::TraitItemKind::Fn(ref sig, _) = item.kind {
+ // don't lint extern functions decls, it's not their fault
+ if sig.header.abi == Abi::Rust {
+ check_arg_number(
+ cx,
+ sig.decl,
+ item.span.with_hi(sig.decl.output.span().hi()),
+ too_many_arguments_threshold,
+ );
+ }
+ }
+}
+
+fn check_arg_number(cx: &LateContext<'_>, decl: &hir::FnDecl<'_>, fn_span: Span, too_many_arguments_threshold: u64) {
+ let args = decl.inputs.len() as u64;
+ if args > too_many_arguments_threshold {
+ span_lint(
+ cx,
+ TOO_MANY_ARGUMENTS,
+ fn_span,
+ &format!(
+ "this function has too many arguments ({}/{})",
+ args, too_many_arguments_threshold
+ ),
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/functions/too_many_lines.rs b/src/tools/clippy/clippy_lints/src/functions/too_many_lines.rs
new file mode 100644
index 000000000..54bdea7ea
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/functions/too_many_lines.rs
@@ -0,0 +1,87 @@
+use rustc_hir as hir;
+use rustc_hir::intravisit::FnKind;
+use rustc_lint::{LateContext, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_span::Span;
+
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::source::snippet_opt;
+
+use super::TOO_MANY_LINES;
+
+pub(super) fn check_fn(
+ cx: &LateContext<'_>,
+ kind: FnKind<'_>,
+ span: Span,
+ body: &hir::Body<'_>,
+ too_many_lines_threshold: u64,
+) {
+ // Closures must be contained in a parent body, which will be checked for `too_many_lines`.
+ // Don't check closures for `too_many_lines` to avoid duplicated lints.
+ if matches!(kind, FnKind::Closure) || in_external_macro(cx.sess(), span) {
+ return;
+ }
+
+ let code_snippet = match snippet_opt(cx, body.value.span) {
+ Some(s) => s,
+ _ => return,
+ };
+ let mut line_count: u64 = 0;
+ let mut in_comment = false;
+ let mut code_in_line;
+
+ let function_lines = if matches!(body.value.kind, hir::ExprKind::Block(..))
+ && code_snippet.as_bytes().first().copied() == Some(b'{')
+ && code_snippet.as_bytes().last().copied() == Some(b'}')
+ {
+ // Removing the braces from the enclosing block
+ &code_snippet[1..code_snippet.len() - 1]
+ } else {
+ &code_snippet
+ }
+ .trim() // Remove leading and trailing blank lines
+ .lines();
+
+ for mut line in function_lines {
+ code_in_line = false;
+ loop {
+ line = line.trim_start();
+ if line.is_empty() {
+ break;
+ }
+ if in_comment {
+ if let Some(i) = line.find("*/") {
+ line = &line[i + 2..];
+ in_comment = false;
+ continue;
+ }
+ } else {
+ let multi_idx = line.find("/*").unwrap_or(line.len());
+ let single_idx = line.find("//").unwrap_or(line.len());
+ code_in_line |= multi_idx > 0 && single_idx > 0;
+ // Implies multi_idx is below line.len()
+ if multi_idx < single_idx {
+ line = &line[multi_idx + 2..];
+ in_comment = true;
+ continue;
+ }
+ }
+ break;
+ }
+ if code_in_line {
+ line_count += 1;
+ }
+ }
+
+ if line_count > too_many_lines_threshold {
+ span_lint(
+ cx,
+ TOO_MANY_LINES,
+ span,
+ &format!(
+ "this function has too many lines ({}/{})",
+ line_count, too_many_lines_threshold
+ ),
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/future_not_send.rs b/src/tools/clippy/clippy_lints/src/future_not_send.rs
new file mode 100644
index 000000000..5c46d6c7d
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/future_not_send.rs
@@ -0,0 +1,112 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::return_ty;
+use rustc_hir::intravisit::FnKind;
+use rustc_hir::{Body, FnDecl, HirId};
+use rustc_infer::infer::TyCtxtInferExt;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::subst::Subst;
+use rustc_middle::ty::{EarlyBinder, Opaque, PredicateKind::Trait};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{sym, Span};
+use rustc_trait_selection::traits::error_reporting::suggestions::InferCtxtExt;
+use rustc_trait_selection::traits::{self, FulfillmentError, TraitEngine};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// This lint requires Future implementations returned from
+ /// functions and methods to implement the `Send` marker trait. It is mostly
+ /// used by library authors (public and internal) that target an audience where
+ /// multithreaded executors are likely to be used for running these Futures.
+ ///
+ /// ### Why is this bad?
+ /// A Future implementation captures some state that it
+ /// needs to eventually produce its final value. When targeting a multithreaded
+ /// executor (which is the norm on non-embedded devices) this means that this
+ /// state may need to be transported to other threads, in other words the
+ /// whole Future needs to implement the `Send` marker trait. If it does not,
+ /// then the resulting Future cannot be submitted to a thread pool in the
+ /// end user’s code.
+ ///
+ /// Especially for generic functions it can be confusing to leave the
+ /// discovery of this problem to the end user: the reported error location
+ /// will be far from its cause and can in many cases not even be fixed without
+ /// modifying the library where the offending Future implementation is
+ /// produced.
+ ///
+ /// ### Example
+ /// ```rust
+ /// async fn not_send(bytes: std::rc::Rc<[u8]>) {}
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// async fn is_send(bytes: std::sync::Arc<[u8]>) {}
+ /// ```
+ #[clippy::version = "1.44.0"]
+ pub FUTURE_NOT_SEND,
+ nursery,
+ "public Futures must be Send"
+}
+
+declare_lint_pass!(FutureNotSend => [FUTURE_NOT_SEND]);
+
+impl<'tcx> LateLintPass<'tcx> for FutureNotSend {
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ kind: FnKind<'tcx>,
+ decl: &'tcx FnDecl<'tcx>,
+ _: &'tcx Body<'tcx>,
+ _: Span,
+ hir_id: HirId,
+ ) {
+ if let FnKind::Closure = kind {
+ return;
+ }
+ let ret_ty = return_ty(cx, hir_id);
+ if let Opaque(id, subst) = *ret_ty.kind() {
+ let preds = cx.tcx.explicit_item_bounds(id);
+ let mut is_future = false;
+ for &(p, _span) in preds {
+ let p = EarlyBinder(p).subst(cx.tcx, subst);
+ if let Some(trait_pred) = p.to_opt_poly_trait_pred() {
+ if Some(trait_pred.skip_binder().trait_ref.def_id) == cx.tcx.lang_items().future_trait() {
+ is_future = true;
+ break;
+ }
+ }
+ }
+ if is_future {
+ let send_trait = cx.tcx.get_diagnostic_item(sym::Send).unwrap();
+ let span = decl.output.span();
+ let send_errors = cx.tcx.infer_ctxt().enter(|infcx| {
+ let cause = traits::ObligationCause::misc(span, hir_id);
+ let mut fulfillment_cx = traits::FulfillmentContext::new();
+ fulfillment_cx.register_bound(&infcx, cx.param_env, ret_ty, send_trait, cause);
+ fulfillment_cx.select_all_or_error(&infcx)
+ });
+ if !send_errors.is_empty() {
+ span_lint_and_then(
+ cx,
+ FUTURE_NOT_SEND,
+ span,
+ "future cannot be sent between threads safely",
+ |db| {
+ cx.tcx.infer_ctxt().enter(|infcx| {
+ for FulfillmentError { obligation, .. } in send_errors {
+ infcx.maybe_note_obligation_cause_for_async_await(db, &obligation);
+ if let Trait(trait_pred) = obligation.predicate.kind().skip_binder() {
+ db.note(&format!(
+ "`{}` doesn't implement `{}`",
+ trait_pred.self_ty(),
+ trait_pred.trait_ref.print_only_trait_path(),
+ ));
+ }
+ }
+ });
+ },
+ );
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/get_first.rs b/src/tools/clippy/clippy_lints/src/get_first.rs
new file mode 100644
index 000000000..529f7baba
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/get_first.rs
@@ -0,0 +1,68 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::{is_slice_of_primitives, match_def_path, paths};
+use if_chain::if_chain;
+use rustc_ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Spanned;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for using `x.get(0)` instead of
+ /// `x.first()`.
+ ///
+ /// ### Why is this bad?
+ /// Using `x.first()` is easier to read and has the same
+ /// result.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = vec![2, 3, 5];
+ /// let first_element = x.get(0);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let x = vec![2, 3, 5];
+ /// let first_element = x.first();
+ /// ```
+ #[clippy::version = "1.63.0"]
+ pub GET_FIRST,
+ style,
+ "Using `x.get(0)` when `x.first()` is simpler"
+}
+declare_lint_pass!(GetFirst => [GET_FIRST]);
+
+impl<'tcx> LateLintPass<'tcx> for GetFirst {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
+ if_chain! {
+ if let hir::ExprKind::MethodCall(_, [struct_calling_on, method_arg], _) = &expr.kind;
+ if let Some(expr_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
+ if match_def_path(cx, expr_def_id, &paths::SLICE_GET);
+
+ if let Some(_) = is_slice_of_primitives(cx, struct_calling_on);
+ if let hir::ExprKind::Lit(Spanned { node: LitKind::Int(0, _), .. }) = method_arg.kind;
+
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ let slice_name = snippet_with_applicability(
+ cx,
+ struct_calling_on.span, "..",
+ &mut applicability,
+ );
+ span_lint_and_sugg(
+ cx,
+ GET_FIRST,
+ expr.span,
+ &format!("accessing first element with `{0}.get(0)`", slice_name),
+ "try",
+ format!("{}.first()", slice_name),
+ applicability,
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/if_let_mutex.rs b/src/tools/clippy/clippy_lints/src/if_let_mutex.rs
new file mode 100644
index 000000000..e95017007
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/if_let_mutex.rs
@@ -0,0 +1,140 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::higher;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::SpanlessEq;
+use if_chain::if_chain;
+use rustc_hir::intravisit::{self as visit, Visitor};
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `Mutex::lock` calls in `if let` expression
+ /// with lock calls in any of the else blocks.
+ ///
+ /// ### Why is this bad?
+ /// The Mutex lock remains held for the whole
+ /// `if let ... else` block and deadlocks.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// if let Ok(thing) = mutex.lock() {
+ /// do_thing();
+ /// } else {
+ /// mutex.lock();
+ /// }
+ /// ```
+ /// Should be written
+ /// ```rust,ignore
+ /// let locked = mutex.lock();
+ /// if let Ok(thing) = locked {
+ /// do_thing(thing);
+ /// } else {
+ /// use_locked(locked);
+ /// }
+ /// ```
+ #[clippy::version = "1.45.0"]
+ pub IF_LET_MUTEX,
+ correctness,
+ "locking a `Mutex` in an `if let` block can cause deadlocks"
+}
+
+declare_lint_pass!(IfLetMutex => [IF_LET_MUTEX]);
+
+impl<'tcx> LateLintPass<'tcx> for IfLetMutex {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
+ let mut arm_visit = ArmVisitor {
+ mutex_lock_called: false,
+ found_mutex: None,
+ cx,
+ };
+ let mut op_visit = OppVisitor {
+ mutex_lock_called: false,
+ found_mutex: None,
+ cx,
+ };
+ if let Some(higher::IfLet {
+ let_expr,
+ if_then,
+ if_else: Some(if_else),
+ ..
+ }) = higher::IfLet::hir(cx, expr)
+ {
+ op_visit.visit_expr(let_expr);
+ if op_visit.mutex_lock_called {
+ arm_visit.visit_expr(if_then);
+ arm_visit.visit_expr(if_else);
+
+ if arm_visit.mutex_lock_called && arm_visit.same_mutex(cx, op_visit.found_mutex.unwrap()) {
+ span_lint_and_help(
+ cx,
+ IF_LET_MUTEX,
+ expr.span,
+ "calling `Mutex::lock` inside the scope of another `Mutex::lock` causes a deadlock",
+ None,
+ "move the lock call outside of the `if let ...` expression",
+ );
+ }
+ }
+ }
+ }
+}
+
+/// Checks if `Mutex::lock` is called in the `if let` expr.
+pub struct OppVisitor<'a, 'tcx> {
+ mutex_lock_called: bool,
+ found_mutex: Option<&'tcx Expr<'tcx>>,
+ cx: &'a LateContext<'tcx>,
+}
+
+impl<'tcx> Visitor<'tcx> for OppVisitor<'_, 'tcx> {
+ fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
+ if let Some(mutex) = is_mutex_lock_call(self.cx, expr) {
+ self.found_mutex = Some(mutex);
+ self.mutex_lock_called = true;
+ return;
+ }
+ visit::walk_expr(self, expr);
+ }
+}
+
+/// Checks if `Mutex::lock` is called in any of the branches.
+pub struct ArmVisitor<'a, 'tcx> {
+ mutex_lock_called: bool,
+ found_mutex: Option<&'tcx Expr<'tcx>>,
+ cx: &'a LateContext<'tcx>,
+}
+
+impl<'tcx> Visitor<'tcx> for ArmVisitor<'_, 'tcx> {
+ fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
+ if let Some(mutex) = is_mutex_lock_call(self.cx, expr) {
+ self.found_mutex = Some(mutex);
+ self.mutex_lock_called = true;
+ return;
+ }
+ visit::walk_expr(self, expr);
+ }
+}
+
+impl<'tcx, 'l> ArmVisitor<'tcx, 'l> {
+ fn same_mutex(&self, cx: &LateContext<'_>, op_mutex: &Expr<'_>) -> bool {
+ self.found_mutex
+ .map_or(false, |arm_mutex| SpanlessEq::new(cx).eq_expr(op_mutex, arm_mutex))
+ }
+}
+
+fn is_mutex_lock_call<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> Option<&'tcx Expr<'tcx>> {
+ if_chain! {
+ if let ExprKind::MethodCall(path, [self_arg, ..], _) = &expr.kind;
+ if path.ident.as_str() == "lock";
+ let ty = cx.typeck_results().expr_ty(self_arg);
+ if is_type_diagnostic_item(cx, ty, sym::Mutex);
+ then {
+ Some(self_arg)
+ } else {
+ None
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/if_not_else.rs b/src/tools/clippy/clippy_lints/src/if_not_else.rs
new file mode 100644
index 000000000..3d59b7833
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/if_not_else.rs
@@ -0,0 +1,90 @@
+//! lint on if branches that could be swapped so no `!` operation is necessary
+//! on the condition
+
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::is_else_clause;
+use rustc_hir::{BinOpKind, Expr, ExprKind, UnOp};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `!` or `!=` in an if condition with an
+ /// else branch.
+ ///
+ /// ### Why is this bad?
+ /// Negations reduce the readability of statements.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let v: Vec<usize> = vec![];
+ /// # fn a() {}
+ /// # fn b() {}
+ /// if !v.is_empty() {
+ /// a()
+ /// } else {
+ /// b()
+ /// }
+ /// ```
+ ///
+ /// Could be written:
+ ///
+ /// ```rust
+ /// # let v: Vec<usize> = vec![];
+ /// # fn a() {}
+ /// # fn b() {}
+ /// if v.is_empty() {
+ /// b()
+ /// } else {
+ /// a()
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub IF_NOT_ELSE,
+ pedantic,
+ "`if` branches that could be swapped so no negation operation is necessary on the condition"
+}
+
+declare_lint_pass!(IfNotElse => [IF_NOT_ELSE]);
+
+impl LateLintPass<'_> for IfNotElse {
+ fn check_expr(&mut self, cx: &LateContext<'_>, item: &Expr<'_>) {
+ // While loops will be desugared to ExprKind::If. This will cause the lint to fire.
+ // To fix this, return early if this span comes from a macro or desugaring.
+ if item.span.from_expansion() {
+ return;
+ }
+ if let ExprKind::If(cond, _, Some(els)) = item.kind {
+ if let ExprKind::Block(..) = els.kind {
+ // Disable firing the lint in "else if" expressions.
+ if is_else_clause(cx.tcx, item) {
+ return;
+ }
+
+ match cond.peel_drop_temps().kind {
+ ExprKind::Unary(UnOp::Not, _) => {
+ span_lint_and_help(
+ cx,
+ IF_NOT_ELSE,
+ item.span,
+ "unnecessary boolean `not` operation",
+ None,
+ "remove the `!` and swap the blocks of the `if`/`else`",
+ );
+ },
+ ExprKind::Binary(ref kind, _, _) if kind.node == BinOpKind::Ne => {
+ span_lint_and_help(
+ cx,
+ IF_NOT_ELSE,
+ item.span,
+ "unnecessary `!=` operation",
+ None,
+ "change to `==` and swap the blocks of the `if`/`else`",
+ );
+ },
+ _ => (),
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs b/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs
new file mode 100644
index 000000000..b8d227855
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs
@@ -0,0 +1,122 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::source::snippet_with_macro_callsite;
+use clippy_utils::{contains_return, higher, is_else_clause, is_lang_ctor, meets_msrv, msrvs, peel_blocks};
+use if_chain::if_chain;
+use rustc_hir::LangItem::{OptionNone, OptionSome};
+use rustc_hir::{Expr, ExprKind, Stmt, StmtKind};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for if-else that could be written to `bool::then`.
+ ///
+ /// ### Why is this bad?
+ /// Looks a little redundant. Using `bool::then` helps it have less lines of code.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let v = vec![0];
+ /// let a = if v.is_empty() {
+ /// println!("true!");
+ /// Some(42)
+ /// } else {
+ /// None
+ /// };
+ /// ```
+ ///
+ /// Could be written:
+ ///
+ /// ```rust
+ /// # let v = vec![0];
+ /// let a = v.is_empty().then(|| {
+ /// println!("true!");
+ /// 42
+ /// });
+ /// ```
+ #[clippy::version = "1.53.0"]
+ pub IF_THEN_SOME_ELSE_NONE,
+ restriction,
+ "Finds if-else that could be written using `bool::then`"
+}
+
+pub struct IfThenSomeElseNone {
+ msrv: Option<RustcVersion>,
+}
+
+impl IfThenSomeElseNone {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self { msrv }
+ }
+}
+
+impl_lint_pass!(IfThenSomeElseNone => [IF_THEN_SOME_ELSE_NONE]);
+
+impl<'tcx> LateLintPass<'tcx> for IfThenSomeElseNone {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &'tcx Expr<'_>) {
+ if !meets_msrv(self.msrv, msrvs::BOOL_THEN) {
+ return;
+ }
+
+ if in_external_macro(cx.sess(), expr.span) {
+ return;
+ }
+
+ // We only care about the top-most `if` in the chain
+ if is_else_clause(cx.tcx, expr) {
+ return;
+ }
+
+ if_chain! {
+ if let Some(higher::If { cond, then, r#else: Some(els) }) = higher::If::hir(expr);
+ if let ExprKind::Block(then_block, _) = then.kind;
+ if let Some(then_expr) = then_block.expr;
+ if let ExprKind::Call(then_call, [then_arg]) = then_expr.kind;
+ if let ExprKind::Path(ref then_call_qpath) = then_call.kind;
+ if is_lang_ctor(cx, then_call_qpath, OptionSome);
+ if let ExprKind::Path(ref qpath) = peel_blocks(els).kind;
+ if is_lang_ctor(cx, qpath, OptionNone);
+ if !stmts_contains_early_return(then_block.stmts);
+ then {
+ let cond_snip = snippet_with_macro_callsite(cx, cond.span, "[condition]");
+ let cond_snip = if matches!(cond.kind, ExprKind::Unary(_, _) | ExprKind::Binary(_, _, _)) {
+ format!("({})", cond_snip)
+ } else {
+ cond_snip.into_owned()
+ };
+ let arg_snip = snippet_with_macro_callsite(cx, then_arg.span, "");
+ let closure_body = if then_block.stmts.is_empty() {
+ arg_snip.into_owned()
+ } else {
+ format!("{{ /* snippet */ {} }}", arg_snip)
+ };
+ let help = format!(
+ "consider using `bool::then` like: `{}.then(|| {})`",
+ cond_snip,
+ closure_body,
+ );
+ span_lint_and_help(
+ cx,
+ IF_THEN_SOME_ELSE_NONE,
+ expr.span,
+ "this could be simplified with `bool::then`",
+ None,
+ &help,
+ );
+ }
+ }
+ }
+
+ extract_msrv_attr!(LateContext);
+}
+
+fn stmts_contains_early_return(stmts: &[Stmt<'_>]) -> bool {
+ stmts.iter().any(|stmt| {
+ let Stmt { kind: StmtKind::Semi(e), .. } = stmt else { return false };
+
+ contains_return(e)
+ })
+}
diff --git a/src/tools/clippy/clippy_lints/src/implicit_hasher.rs b/src/tools/clippy/clippy_lints/src/implicit_hasher.rs
new file mode 100644
index 000000000..4f9680f60
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/implicit_hasher.rs
@@ -0,0 +1,388 @@
+use std::borrow::Cow;
+use std::collections::BTreeMap;
+
+use rustc_errors::Diagnostic;
+use rustc_hir as hir;
+use rustc_hir::intravisit::{walk_body, walk_expr, walk_inf, walk_ty, Visitor};
+use rustc_hir::{Body, Expr, ExprKind, GenericArg, Item, ItemKind, QPath, TyKind};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::hir::nested_filter;
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty::{Ty, TypeckResults};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Span;
+use rustc_span::symbol::sym;
+use rustc_typeck::hir_ty_to_ty;
+
+use if_chain::if_chain;
+
+use clippy_utils::diagnostics::{multispan_sugg, span_lint_and_then};
+use clippy_utils::source::{snippet, snippet_opt};
+use clippy_utils::ty::is_type_diagnostic_item;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for public `impl` or `fn` missing generalization
+ /// over different hashers and implicitly defaulting to the default hashing
+ /// algorithm (`SipHash`).
+ ///
+ /// ### Why is this bad?
+ /// `HashMap` or `HashSet` with custom hashers cannot be
+ /// used with them.
+ ///
+ /// ### Known problems
+ /// Suggestions for replacing constructors can contain
+ /// false-positives. Also applying suggestions can require modification of other
+ /// pieces of code, possibly including external crates.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::collections::HashMap;
+ /// # use std::hash::{Hash, BuildHasher};
+ /// # trait Serialize {};
+ /// impl<K: Hash + Eq, V> Serialize for HashMap<K, V> { }
+ ///
+ /// pub fn foo(map: &mut HashMap<i32, i32>) { }
+ /// ```
+ /// could be rewritten as
+ /// ```rust
+ /// # use std::collections::HashMap;
+ /// # use std::hash::{Hash, BuildHasher};
+ /// # trait Serialize {};
+ /// impl<K: Hash + Eq, V, S: BuildHasher> Serialize for HashMap<K, V, S> { }
+ ///
+ /// pub fn foo<S: BuildHasher>(map: &mut HashMap<i32, i32, S>) { }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub IMPLICIT_HASHER,
+ pedantic,
+ "missing generalization over different hashers"
+}
+
+declare_lint_pass!(ImplicitHasher => [IMPLICIT_HASHER]);
+
+impl<'tcx> LateLintPass<'tcx> for ImplicitHasher {
+ #[expect(clippy::cast_possible_truncation, clippy::too_many_lines)]
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
+ use rustc_span::BytePos;
+
+ fn suggestion<'tcx>(
+ cx: &LateContext<'tcx>,
+ diag: &mut Diagnostic,
+ generics_span: Span,
+ generics_suggestion_span: Span,
+ target: &ImplicitHasherType<'_>,
+ vis: ImplicitHasherConstructorVisitor<'_, '_, '_>,
+ ) {
+ let generics_snip = snippet(cx, generics_span, "");
+ // trim `<` `>`
+ let generics_snip = if generics_snip.is_empty() {
+ ""
+ } else {
+ &generics_snip[1..generics_snip.len() - 1]
+ };
+
+ multispan_sugg(
+ diag,
+ "consider adding a type parameter",
+ vec![
+ (
+ generics_suggestion_span,
+ format!(
+ "<{}{}S: ::std::hash::BuildHasher{}>",
+ generics_snip,
+ if generics_snip.is_empty() { "" } else { ", " },
+ if vis.suggestions.is_empty() {
+ ""
+ } else {
+ // request users to add `Default` bound so that generic constructors can be used
+ " + Default"
+ },
+ ),
+ ),
+ (
+ target.span(),
+ format!("{}<{}, S>", target.type_name(), target.type_arguments(),),
+ ),
+ ],
+ );
+
+ if !vis.suggestions.is_empty() {
+ multispan_sugg(diag, "...and use generic constructor", vis.suggestions);
+ }
+ }
+
+ if !cx.access_levels.is_exported(item.def_id) {
+ return;
+ }
+
+ match item.kind {
+ ItemKind::Impl(impl_) => {
+ let mut vis = ImplicitHasherTypeVisitor::new(cx);
+ vis.visit_ty(impl_.self_ty);
+
+ for target in &vis.found {
+ if item.span.ctxt() != target.span().ctxt() {
+ return;
+ }
+
+ let generics_suggestion_span = impl_.generics.span.substitute_dummy({
+ let pos = snippet_opt(cx, item.span.until(target.span()))
+ .and_then(|snip| Some(item.span.lo() + BytePos(snip.find("impl")? as u32 + 4)));
+ if let Some(pos) = pos {
+ Span::new(pos, pos, item.span.ctxt(), item.span.parent())
+ } else {
+ return;
+ }
+ });
+
+ let mut ctr_vis = ImplicitHasherConstructorVisitor::new(cx, target);
+ for item in impl_.items.iter().map(|item| cx.tcx.hir().impl_item(item.id)) {
+ ctr_vis.visit_impl_item(item);
+ }
+
+ span_lint_and_then(
+ cx,
+ IMPLICIT_HASHER,
+ target.span(),
+ &format!(
+ "impl for `{}` should be generalized over different hashers",
+ target.type_name()
+ ),
+ move |diag| {
+ suggestion(cx, diag, impl_.generics.span, generics_suggestion_span, target, ctr_vis);
+ },
+ );
+ }
+ },
+ ItemKind::Fn(ref sig, generics, body_id) => {
+ let body = cx.tcx.hir().body(body_id);
+
+ for ty in sig.decl.inputs {
+ let mut vis = ImplicitHasherTypeVisitor::new(cx);
+ vis.visit_ty(ty);
+
+ for target in &vis.found {
+ if in_external_macro(cx.sess(), generics.span) {
+ continue;
+ }
+ let generics_suggestion_span = generics.span.substitute_dummy({
+ let pos = snippet_opt(
+ cx,
+ Span::new(
+ item.span.lo(),
+ body.params[0].pat.span.lo(),
+ item.span.ctxt(),
+ item.span.parent(),
+ ),
+ )
+ .and_then(|snip| {
+ let i = snip.find("fn")?;
+ Some(item.span.lo() + BytePos((i + snip[i..].find('(')?) as u32))
+ })
+ .expect("failed to create span for type parameters");
+ Span::new(pos, pos, item.span.ctxt(), item.span.parent())
+ });
+
+ let mut ctr_vis = ImplicitHasherConstructorVisitor::new(cx, target);
+ ctr_vis.visit_body(body);
+
+ span_lint_and_then(
+ cx,
+ IMPLICIT_HASHER,
+ target.span(),
+ &format!(
+ "parameter of type `{}` should be generalized over different hashers",
+ target.type_name()
+ ),
+ move |diag| {
+ suggestion(cx, diag, generics.span, generics_suggestion_span, target, ctr_vis);
+ },
+ );
+ }
+ }
+ },
+ _ => {},
+ }
+ }
+}
+
+enum ImplicitHasherType<'tcx> {
+ HashMap(Span, Ty<'tcx>, Cow<'static, str>, Cow<'static, str>),
+ HashSet(Span, Ty<'tcx>, Cow<'static, str>),
+}
+
+impl<'tcx> ImplicitHasherType<'tcx> {
+ /// Checks that `ty` is a target type without a `BuildHasher`.
+ fn new(cx: &LateContext<'tcx>, hir_ty: &hir::Ty<'_>) -> Option<Self> {
+ if let TyKind::Path(QPath::Resolved(None, path)) = hir_ty.kind {
+ let params: Vec<_> = path
+ .segments
+ .last()
+ .as_ref()?
+ .args
+ .as_ref()?
+ .args
+ .iter()
+ .filter_map(|arg| match arg {
+ GenericArg::Type(ty) => Some(ty),
+ _ => None,
+ })
+ .collect();
+ let params_len = params.len();
+
+ let ty = hir_ty_to_ty(cx.tcx, hir_ty);
+
+ if is_type_diagnostic_item(cx, ty, sym::HashMap) && params_len == 2 {
+ Some(ImplicitHasherType::HashMap(
+ hir_ty.span,
+ ty,
+ snippet(cx, params[0].span, "K"),
+ snippet(cx, params[1].span, "V"),
+ ))
+ } else if is_type_diagnostic_item(cx, ty, sym::HashSet) && params_len == 1 {
+ Some(ImplicitHasherType::HashSet(
+ hir_ty.span,
+ ty,
+ snippet(cx, params[0].span, "T"),
+ ))
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+ }
+
+ fn type_name(&self) -> &'static str {
+ match *self {
+ ImplicitHasherType::HashMap(..) => "HashMap",
+ ImplicitHasherType::HashSet(..) => "HashSet",
+ }
+ }
+
+ fn type_arguments(&self) -> String {
+ match *self {
+ ImplicitHasherType::HashMap(.., ref k, ref v) => format!("{}, {}", k, v),
+ ImplicitHasherType::HashSet(.., ref t) => format!("{}", t),
+ }
+ }
+
+ fn ty(&self) -> Ty<'tcx> {
+ match *self {
+ ImplicitHasherType::HashMap(_, ty, ..) | ImplicitHasherType::HashSet(_, ty, ..) => ty,
+ }
+ }
+
+ fn span(&self) -> Span {
+ match *self {
+ ImplicitHasherType::HashMap(span, ..) | ImplicitHasherType::HashSet(span, ..) => span,
+ }
+ }
+}
+
+struct ImplicitHasherTypeVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ found: Vec<ImplicitHasherType<'tcx>>,
+}
+
+impl<'a, 'tcx> ImplicitHasherTypeVisitor<'a, 'tcx> {
+ fn new(cx: &'a LateContext<'tcx>) -> Self {
+ Self { cx, found: vec![] }
+ }
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for ImplicitHasherTypeVisitor<'a, 'tcx> {
+ fn visit_ty(&mut self, t: &'tcx hir::Ty<'_>) {
+ if let Some(target) = ImplicitHasherType::new(self.cx, t) {
+ self.found.push(target);
+ }
+
+ walk_ty(self, t);
+ }
+
+ fn visit_infer(&mut self, inf: &'tcx hir::InferArg) {
+ if let Some(target) = ImplicitHasherType::new(self.cx, &inf.to_ty()) {
+ self.found.push(target);
+ }
+
+ walk_inf(self, inf);
+ }
+}
+
+/// Looks for default-hasher-dependent constructors like `HashMap::new`.
+struct ImplicitHasherConstructorVisitor<'a, 'b, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ maybe_typeck_results: Option<&'tcx TypeckResults<'tcx>>,
+ target: &'b ImplicitHasherType<'tcx>,
+ suggestions: BTreeMap<Span, String>,
+}
+
+impl<'a, 'b, 'tcx> ImplicitHasherConstructorVisitor<'a, 'b, 'tcx> {
+ fn new(cx: &'a LateContext<'tcx>, target: &'b ImplicitHasherType<'tcx>) -> Self {
+ Self {
+ cx,
+ maybe_typeck_results: cx.maybe_typeck_results(),
+ target,
+ suggestions: BTreeMap::new(),
+ }
+ }
+}
+
+impl<'a, 'b, 'tcx> Visitor<'tcx> for ImplicitHasherConstructorVisitor<'a, 'b, 'tcx> {
+ type NestedFilter = nested_filter::OnlyBodies;
+
+ fn visit_body(&mut self, body: &'tcx Body<'_>) {
+ let old_maybe_typeck_results = self.maybe_typeck_results.replace(self.cx.tcx.typeck_body(body.id()));
+ walk_body(self, body);
+ self.maybe_typeck_results = old_maybe_typeck_results;
+ }
+
+ fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
+ if_chain! {
+ if let ExprKind::Call(fun, args) = e.kind;
+ if let ExprKind::Path(QPath::TypeRelative(ty, method)) = fun.kind;
+ if let TyKind::Path(QPath::Resolved(None, ty_path)) = ty.kind;
+ if let Some(ty_did) = ty_path.res.opt_def_id();
+ then {
+ if self.target.ty() != self.maybe_typeck_results.unwrap().expr_ty(e) {
+ return;
+ }
+
+ if self.cx.tcx.is_diagnostic_item(sym::HashMap, ty_did) {
+ if method.ident.name == sym::new {
+ self.suggestions
+ .insert(e.span, "HashMap::default()".to_string());
+ } else if method.ident.name == sym!(with_capacity) {
+ self.suggestions.insert(
+ e.span,
+ format!(
+ "HashMap::with_capacity_and_hasher({}, Default::default())",
+ snippet(self.cx, args[0].span, "capacity"),
+ ),
+ );
+ }
+ } else if self.cx.tcx.is_diagnostic_item(sym::HashSet, ty_did) {
+ if method.ident.name == sym::new {
+ self.suggestions
+ .insert(e.span, "HashSet::default()".to_string());
+ } else if method.ident.name == sym!(with_capacity) {
+ self.suggestions.insert(
+ e.span,
+ format!(
+ "HashSet::with_capacity_and_hasher({}, Default::default())",
+ snippet(self.cx, args[0].span, "capacity"),
+ ),
+ );
+ }
+ }
+ }
+ }
+
+ walk_expr(self, e);
+ }
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/implicit_return.rs b/src/tools/clippy/clippy_lints/src/implicit_return.rs
new file mode 100644
index 000000000..a6610ade3
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/implicit_return.rs
@@ -0,0 +1,250 @@
+use clippy_utils::{
+ diagnostics::span_lint_hir_and_then,
+ get_async_fn_body, is_async_fn,
+ source::{snippet_with_applicability, snippet_with_context, walk_span_to_context},
+ visitors::expr_visitor_no_bodies,
+};
+use rustc_errors::Applicability;
+use rustc_hir::intravisit::{FnKind, Visitor};
+use rustc_hir::{Block, Body, Expr, ExprKind, FnDecl, FnRetTy, HirId};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{Span, SyntaxContext};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for missing return statements at the end of a block.
+ ///
+ /// ### Why is this bad?
+ /// Actually omitting the return keyword is idiomatic Rust code. Programmers
+ /// coming from other languages might prefer the expressiveness of `return`. It's possible to miss
+ /// the last returning statement because the only difference is a missing `;`. Especially in bigger
+ /// code with multiple return paths having a `return` keyword makes it easier to find the
+ /// corresponding statements.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn foo(x: usize) -> usize {
+ /// x
+ /// }
+ /// ```
+ /// add return
+ /// ```rust
+ /// fn foo(x: usize) -> usize {
+ /// return x;
+ /// }
+ /// ```
+ #[clippy::version = "1.33.0"]
+ pub IMPLICIT_RETURN,
+ restriction,
+ "use a return statement like `return expr` instead of an expression"
+}
+
+declare_lint_pass!(ImplicitReturn => [IMPLICIT_RETURN]);
+
+fn lint_return(cx: &LateContext<'_>, emission_place: HirId, span: Span) {
+ let mut app = Applicability::MachineApplicable;
+ let snip = snippet_with_applicability(cx, span, "..", &mut app);
+ span_lint_hir_and_then(
+ cx,
+ IMPLICIT_RETURN,
+ emission_place,
+ span,
+ "missing `return` statement",
+ |diag| {
+ diag.span_suggestion(span, "add `return` as shown", format!("return {}", snip), app);
+ },
+ );
+}
+
+fn lint_break(cx: &LateContext<'_>, emission_place: HirId, break_span: Span, expr_span: Span) {
+ let mut app = Applicability::MachineApplicable;
+ let snip = snippet_with_context(cx, expr_span, break_span.ctxt(), "..", &mut app).0;
+ span_lint_hir_and_then(
+ cx,
+ IMPLICIT_RETURN,
+ emission_place,
+ break_span,
+ "missing `return` statement",
+ |diag| {
+ diag.span_suggestion(
+ break_span,
+ "change `break` to `return` as shown",
+ format!("return {}", snip),
+ app,
+ );
+ },
+ );
+}
+
+#[derive(Clone, Copy, PartialEq, Eq)]
+enum LintLocation {
+ /// The lint was applied to a parent expression.
+ Parent,
+ /// The lint was applied to this expression, a child, or not applied.
+ Inner,
+}
+impl LintLocation {
+ fn still_parent(self, b: bool) -> Self {
+ if b { self } else { Self::Inner }
+ }
+
+ fn is_parent(self) -> bool {
+ self == Self::Parent
+ }
+}
+
+// Gets the call site if the span is in a child context. Otherwise returns `None`.
+fn get_call_site(span: Span, ctxt: SyntaxContext) -> Option<Span> {
+ (span.ctxt() != ctxt).then(|| walk_span_to_context(span, ctxt).unwrap_or(span))
+}
+
+fn lint_implicit_returns(
+ cx: &LateContext<'_>,
+ expr: &Expr<'_>,
+ // The context of the function body.
+ ctxt: SyntaxContext,
+ // Whether the expression is from a macro expansion.
+ call_site_span: Option<Span>,
+) -> LintLocation {
+ match expr.kind {
+ ExprKind::Block(
+ Block {
+ expr: Some(block_expr), ..
+ },
+ _,
+ ) => lint_implicit_returns(
+ cx,
+ block_expr,
+ ctxt,
+ call_site_span.or_else(|| get_call_site(block_expr.span, ctxt)),
+ )
+ .still_parent(call_site_span.is_some()),
+
+ ExprKind::If(_, then_expr, Some(else_expr)) => {
+ // Both `then_expr` or `else_expr` are required to be blocks in the same context as the `if`. Don't
+ // bother checking.
+ let res = lint_implicit_returns(cx, then_expr, ctxt, call_site_span).still_parent(call_site_span.is_some());
+ if res.is_parent() {
+ // The return was added as a parent of this if expression.
+ return res;
+ }
+ lint_implicit_returns(cx, else_expr, ctxt, call_site_span).still_parent(call_site_span.is_some())
+ },
+
+ ExprKind::Match(_, arms, _) => {
+ for arm in arms {
+ let res = lint_implicit_returns(
+ cx,
+ arm.body,
+ ctxt,
+ call_site_span.or_else(|| get_call_site(arm.body.span, ctxt)),
+ )
+ .still_parent(call_site_span.is_some());
+ if res.is_parent() {
+ // The return was added as a parent of this match expression.
+ return res;
+ }
+ }
+ LintLocation::Inner
+ },
+
+ ExprKind::Loop(block, ..) => {
+ let mut add_return = false;
+ expr_visitor_no_bodies(|e| {
+ if let ExprKind::Break(dest, sub_expr) = e.kind {
+ if dest.target_id.ok() == Some(expr.hir_id) {
+ if call_site_span.is_none() && e.span.ctxt() == ctxt {
+ // At this point sub_expr can be `None` in async functions which either diverge, or return
+ // the unit type.
+ if let Some(sub_expr) = sub_expr {
+ lint_break(cx, e.hir_id, e.span, sub_expr.span);
+ }
+ } else {
+ // the break expression is from a macro call, add a return to the loop
+ add_return = true;
+ }
+ }
+ }
+ true
+ })
+ .visit_block(block);
+ if add_return {
+ #[expect(clippy::option_if_let_else)]
+ if let Some(span) = call_site_span {
+ lint_return(cx, expr.hir_id, span);
+ LintLocation::Parent
+ } else {
+ lint_return(cx, expr.hir_id, expr.span);
+ LintLocation::Inner
+ }
+ } else {
+ LintLocation::Inner
+ }
+ },
+
+ // If expressions without an else clause, and blocks without a final expression can only be the final expression
+ // if they are divergent, or return the unit type.
+ ExprKind::If(_, _, None) | ExprKind::Block(Block { expr: None, .. }, _) | ExprKind::Ret(_) => {
+ LintLocation::Inner
+ },
+
+ // Any divergent expression doesn't need a return statement.
+ ExprKind::MethodCall(..)
+ | ExprKind::Call(..)
+ | ExprKind::Binary(..)
+ | ExprKind::Unary(..)
+ | ExprKind::Index(..)
+ if cx.typeck_results().expr_ty(expr).is_never() =>
+ {
+ LintLocation::Inner
+ },
+
+ _ =>
+ {
+ #[expect(clippy::option_if_let_else)]
+ if let Some(span) = call_site_span {
+ lint_return(cx, expr.hir_id, span);
+ LintLocation::Parent
+ } else {
+ lint_return(cx, expr.hir_id, expr.span);
+ LintLocation::Inner
+ }
+ },
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for ImplicitReturn {
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ kind: FnKind<'tcx>,
+ decl: &'tcx FnDecl<'_>,
+ body: &'tcx Body<'_>,
+ span: Span,
+ _: HirId,
+ ) {
+ if (!matches!(kind, FnKind::Closure) && matches!(decl.output, FnRetTy::DefaultReturn(_)))
+ || span.ctxt() != body.value.span.ctxt()
+ || in_external_macro(cx.sess(), span)
+ {
+ return;
+ }
+
+ let res_ty = cx.typeck_results().expr_ty(&body.value);
+ if res_ty.is_unit() || res_ty.is_never() {
+ return;
+ }
+
+ let expr = if is_async_fn(kind) {
+ match get_async_fn_body(cx.tcx, body) {
+ Some(e) => e,
+ None => return,
+ }
+ } else {
+ &body.value
+ };
+ lint_implicit_returns(cx, expr, expr.span.ctxt(), None);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs b/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs
new file mode 100644
index 000000000..46654bc61
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs
@@ -0,0 +1,176 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::{higher, peel_blocks_with_stmt, SpanlessEq};
+use if_chain::if_chain;
+use rustc_ast::ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::{BinOpKind, Expr, ExprKind, QPath};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for implicit saturating subtraction.
+ ///
+ /// ### Why is this bad?
+ /// Simplicity and readability. Instead we can easily use an builtin function.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let end: u32 = 10;
+ /// # let start: u32 = 5;
+ /// let mut i: u32 = end - start;
+ ///
+ /// if i != 0 {
+ /// i -= 1;
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let end: u32 = 10;
+ /// # let start: u32 = 5;
+ /// let mut i: u32 = end - start;
+ ///
+ /// i = i.saturating_sub(1);
+ /// ```
+ #[clippy::version = "1.44.0"]
+ pub IMPLICIT_SATURATING_SUB,
+ pedantic,
+ "Perform saturating subtraction instead of implicitly checking lower bound of data type"
+}
+
+declare_lint_pass!(ImplicitSaturatingSub => [IMPLICIT_SATURATING_SUB]);
+
+impl<'tcx> LateLintPass<'tcx> for ImplicitSaturatingSub {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
+ if expr.span.from_expansion() {
+ return;
+ }
+ if_chain! {
+ if let Some(higher::If { cond, then, r#else: None }) = higher::If::hir(expr);
+
+ // Check if the conditional expression is a binary operation
+ if let ExprKind::Binary(ref cond_op, cond_left, cond_right) = cond.kind;
+
+ // Ensure that the binary operator is >, !=, or <
+ if BinOpKind::Ne == cond_op.node || BinOpKind::Gt == cond_op.node || BinOpKind::Lt == cond_op.node;
+
+ // Check if assign operation is done
+ if let Some(target) = subtracts_one(cx, then);
+
+ // Extracting out the variable name
+ if let ExprKind::Path(QPath::Resolved(_, ares_path)) = target.kind;
+
+ then {
+ // Handle symmetric conditions in the if statement
+ let (cond_var, cond_num_val) = if SpanlessEq::new(cx).eq_expr(cond_left, target) {
+ if BinOpKind::Gt == cond_op.node || BinOpKind::Ne == cond_op.node {
+ (cond_left, cond_right)
+ } else {
+ return;
+ }
+ } else if SpanlessEq::new(cx).eq_expr(cond_right, target) {
+ if BinOpKind::Lt == cond_op.node || BinOpKind::Ne == cond_op.node {
+ (cond_right, cond_left)
+ } else {
+ return;
+ }
+ } else {
+ return;
+ };
+
+ // Check if the variable in the condition statement is an integer
+ if !cx.typeck_results().expr_ty(cond_var).is_integral() {
+ return;
+ }
+
+ // Get the variable name
+ let var_name = ares_path.segments[0].ident.name.as_str();
+ match cond_num_val.kind {
+ ExprKind::Lit(ref cond_lit) => {
+ // Check if the constant is zero
+ if let LitKind::Int(0, _) = cond_lit.node {
+ if cx.typeck_results().expr_ty(cond_left).is_signed() {
+ } else {
+ print_lint_and_sugg(cx, var_name, expr);
+ };
+ }
+ },
+ ExprKind::Path(QPath::TypeRelative(_, name)) => {
+ if_chain! {
+ if name.ident.as_str() == "MIN";
+ if let Some(const_id) = cx.typeck_results().type_dependent_def_id(cond_num_val.hir_id);
+ if let Some(impl_id) = cx.tcx.impl_of_method(const_id);
+ if let None = cx.tcx.impl_trait_ref(impl_id); // An inherent impl
+ if cx.tcx.type_of(impl_id).is_integral();
+ then {
+ print_lint_and_sugg(cx, var_name, expr)
+ }
+ }
+ },
+ ExprKind::Call(func, []) => {
+ if_chain! {
+ if let ExprKind::Path(QPath::TypeRelative(_, name)) = func.kind;
+ if name.ident.as_str() == "min_value";
+ if let Some(func_id) = cx.typeck_results().type_dependent_def_id(func.hir_id);
+ if let Some(impl_id) = cx.tcx.impl_of_method(func_id);
+ if let None = cx.tcx.impl_trait_ref(impl_id); // An inherent impl
+ if cx.tcx.type_of(impl_id).is_integral();
+ then {
+ print_lint_and_sugg(cx, var_name, expr)
+ }
+ }
+ },
+ _ => (),
+ }
+ }
+ }
+ }
+}
+
+fn subtracts_one<'a>(cx: &LateContext<'_>, expr: &'a Expr<'a>) -> Option<&'a Expr<'a>> {
+ match peel_blocks_with_stmt(expr).kind {
+ ExprKind::AssignOp(ref op1, target, value) => {
+ if_chain! {
+ if BinOpKind::Sub == op1.node;
+ // Check if literal being subtracted is one
+ if let ExprKind::Lit(ref lit1) = value.kind;
+ if let LitKind::Int(1, _) = lit1.node;
+ then {
+ Some(target)
+ } else {
+ None
+ }
+ }
+ },
+ ExprKind::Assign(target, value, _) => {
+ if_chain! {
+ if let ExprKind::Binary(ref op1, left1, right1) = value.kind;
+ if BinOpKind::Sub == op1.node;
+
+ if SpanlessEq::new(cx).eq_expr(left1, target);
+
+ if let ExprKind::Lit(ref lit1) = right1.kind;
+ if let LitKind::Int(1, _) = lit1.node;
+ then {
+ Some(target)
+ } else {
+ None
+ }
+ }
+ },
+ _ => None,
+ }
+}
+
+fn print_lint_and_sugg(cx: &LateContext<'_>, var_name: &str, expr: &Expr<'_>) {
+ span_lint_and_sugg(
+ cx,
+ IMPLICIT_SATURATING_SUB,
+ expr.span,
+ "implicitly performing saturating subtraction",
+ "try",
+ format!("{} = {}.saturating_sub({});", var_name, var_name, '1'),
+ Applicability::MachineApplicable,
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/inconsistent_struct_constructor.rs b/src/tools/clippy/clippy_lints/src/inconsistent_struct_constructor.rs
new file mode 100644
index 000000000..14b22d2b5
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/inconsistent_struct_constructor.rs
@@ -0,0 +1,136 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet;
+use if_chain::if_chain;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_errors::Applicability;
+use rustc_hir::{self as hir, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::Symbol;
+use std::fmt::Write as _;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for struct constructors where all fields are shorthand and
+ /// the order of the field init shorthand in the constructor is inconsistent
+ /// with the order in the struct definition.
+ ///
+ /// ### Why is this bad?
+ /// Since the order of fields in a constructor doesn't affect the
+ /// resulted instance as the below example indicates,
+ ///
+ /// ```rust
+ /// #[derive(Debug, PartialEq, Eq)]
+ /// struct Foo {
+ /// x: i32,
+ /// y: i32,
+ /// }
+ /// let x = 1;
+ /// let y = 2;
+ ///
+ /// // This assertion never fails:
+ /// assert_eq!(Foo { x, y }, Foo { y, x });
+ /// ```
+ ///
+ /// inconsistent order can be confusing and decreases readability and consistency.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct Foo {
+ /// x: i32,
+ /// y: i32,
+ /// }
+ /// let x = 1;
+ /// let y = 2;
+ ///
+ /// Foo { y, x };
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # struct Foo {
+ /// # x: i32,
+ /// # y: i32,
+ /// # }
+ /// # let x = 1;
+ /// # let y = 2;
+ /// Foo { x, y };
+ /// ```
+ #[clippy::version = "1.52.0"]
+ pub INCONSISTENT_STRUCT_CONSTRUCTOR,
+ pedantic,
+ "the order of the field init shorthand is inconsistent with the order in the struct definition"
+}
+
+declare_lint_pass!(InconsistentStructConstructor => [INCONSISTENT_STRUCT_CONSTRUCTOR]);
+
+impl<'tcx> LateLintPass<'tcx> for InconsistentStructConstructor {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
+ if_chain! {
+ if !expr.span.from_expansion();
+ if let ExprKind::Struct(qpath, fields, base) = expr.kind;
+ let ty = cx.typeck_results().expr_ty(expr);
+ if let Some(adt_def) = ty.ty_adt_def();
+ if adt_def.is_struct();
+ if let Some(variant) = adt_def.variants().iter().next();
+ if fields.iter().all(|f| f.is_shorthand);
+ then {
+ let mut def_order_map = FxHashMap::default();
+ for (idx, field) in variant.fields.iter().enumerate() {
+ def_order_map.insert(field.name, idx);
+ }
+
+ if is_consistent_order(fields, &def_order_map) {
+ return;
+ }
+
+ let mut ordered_fields: Vec<_> = fields.iter().map(|f| f.ident.name).collect();
+ ordered_fields.sort_unstable_by_key(|id| def_order_map[id]);
+
+ let mut fields_snippet = String::new();
+ let (last_ident, idents) = ordered_fields.split_last().unwrap();
+ for ident in idents {
+ let _ = write!(fields_snippet, "{}, ", ident);
+ }
+ fields_snippet.push_str(&last_ident.to_string());
+
+ let base_snippet = if let Some(base) = base {
+ format!(", ..{}", snippet(cx, base.span, ".."))
+ } else {
+ String::new()
+ };
+
+ let sugg = format!("{} {{ {}{} }}",
+ snippet(cx, qpath.span(), ".."),
+ fields_snippet,
+ base_snippet,
+ );
+
+ span_lint_and_sugg(
+ cx,
+ INCONSISTENT_STRUCT_CONSTRUCTOR,
+ expr.span,
+ "struct constructor field order is inconsistent with struct definition field order",
+ "try",
+ sugg,
+ Applicability::MachineApplicable,
+ )
+ }
+ }
+ }
+}
+
+// Check whether the order of the fields in the constructor is consistent with the order in the
+// definition.
+fn is_consistent_order<'tcx>(fields: &'tcx [hir::ExprField<'tcx>], def_order_map: &FxHashMap<Symbol, usize>) -> bool {
+ let mut cur_idx = usize::MIN;
+ for f in fields {
+ let next_idx = def_order_map[&f.ident.name];
+ if cur_idx > next_idx {
+ return false;
+ }
+ cur_idx = next_idx;
+ }
+
+ true
+}
diff --git a/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs b/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs
new file mode 100644
index 000000000..d0c6495e3
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs
@@ -0,0 +1,275 @@
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::higher::IfLet;
+use clippy_utils::ty::is_copy;
+use clippy_utils::{is_expn_of, is_lint_allowed, meets_msrv, msrvs, path_to_local};
+use if_chain::if_chain;
+use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_hir::intravisit::{self, Visitor};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::hir::nested_filter;
+use rustc_middle::ty;
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::{symbol::Ident, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// The lint checks for slice bindings in patterns that are only used to
+ /// access individual slice values.
+ ///
+ /// ### Why is this bad?
+ /// Accessing slice values using indices can lead to panics. Using refutable
+ /// patterns can avoid these. Binding to individual values also improves the
+ /// readability as they can be named.
+ ///
+ /// ### Limitations
+ /// This lint currently only checks for immutable access inside `if let`
+ /// patterns.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let slice: Option<&[u32]> = Some(&[1, 2, 3]);
+ ///
+ /// if let Some(slice) = slice {
+ /// println!("{}", slice[0]);
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let slice: Option<&[u32]> = Some(&[1, 2, 3]);
+ ///
+ /// if let Some(&[first, ..]) = slice {
+ /// println!("{}", first);
+ /// }
+ /// ```
+ #[clippy::version = "1.59.0"]
+ pub INDEX_REFUTABLE_SLICE,
+ nursery,
+ "avoid indexing on slices which could be destructed"
+}
+
+#[derive(Copy, Clone)]
+pub struct IndexRefutableSlice {
+ max_suggested_slice: u64,
+ msrv: Option<RustcVersion>,
+}
+
+impl IndexRefutableSlice {
+ pub fn new(max_suggested_slice_pattern_length: u64, msrv: Option<RustcVersion>) -> Self {
+ Self {
+ max_suggested_slice: max_suggested_slice_pattern_length,
+ msrv,
+ }
+ }
+}
+
+impl_lint_pass!(IndexRefutableSlice => [INDEX_REFUTABLE_SLICE]);
+
+impl<'tcx> LateLintPass<'tcx> for IndexRefutableSlice {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
+ if_chain! {
+ if !expr.span.from_expansion() || is_expn_of(expr.span, "if_chain").is_some();
+ if let Some(IfLet {let_pat, if_then, ..}) = IfLet::hir(cx, expr);
+ if !is_lint_allowed(cx, INDEX_REFUTABLE_SLICE, expr.hir_id);
+ if meets_msrv(self.msrv, msrvs::SLICE_PATTERNS);
+
+ let found_slices = find_slice_values(cx, let_pat);
+ if !found_slices.is_empty();
+ let filtered_slices = filter_lintable_slices(cx, found_slices, self.max_suggested_slice, if_then);
+ if !filtered_slices.is_empty();
+ then {
+ for slice in filtered_slices.values() {
+ lint_slice(cx, slice);
+ }
+ }
+ }
+ }
+
+ extract_msrv_attr!(LateContext);
+}
+
+fn find_slice_values(cx: &LateContext<'_>, pat: &hir::Pat<'_>) -> FxIndexMap<hir::HirId, SliceLintInformation> {
+ let mut removed_pat: FxHashSet<hir::HirId> = FxHashSet::default();
+ let mut slices: FxIndexMap<hir::HirId, SliceLintInformation> = FxIndexMap::default();
+ pat.walk_always(|pat| {
+ if let hir::PatKind::Binding(binding, value_hir_id, ident, sub_pat) = pat.kind {
+ // We'll just ignore mut and ref mut for simplicity sake right now
+ if let hir::BindingAnnotation::Mutable | hir::BindingAnnotation::RefMut = binding {
+ return;
+ }
+
+ // This block catches bindings with sub patterns. It would be hard to build a correct suggestion
+ // for them and it's likely that the user knows what they are doing in such a case.
+ if removed_pat.contains(&value_hir_id) {
+ return;
+ }
+ if sub_pat.is_some() {
+ removed_pat.insert(value_hir_id);
+ slices.remove(&value_hir_id);
+ return;
+ }
+
+ let bound_ty = cx.typeck_results().node_type(pat.hir_id);
+ if let ty::Slice(inner_ty) | ty::Array(inner_ty, _) = bound_ty.peel_refs().kind() {
+ // The values need to use the `ref` keyword if they can't be copied.
+ // This will need to be adjusted if the lint want to support mutable access in the future
+ let src_is_ref = bound_ty.is_ref() && binding != hir::BindingAnnotation::Ref;
+ let needs_ref = !(src_is_ref || is_copy(cx, *inner_ty));
+
+ let slice_info = slices
+ .entry(value_hir_id)
+ .or_insert_with(|| SliceLintInformation::new(ident, needs_ref));
+ slice_info.pattern_spans.push(pat.span);
+ }
+ }
+ });
+
+ slices
+}
+
+fn lint_slice(cx: &LateContext<'_>, slice: &SliceLintInformation) {
+ let used_indices = slice
+ .index_use
+ .iter()
+ .map(|(index, _)| *index)
+ .collect::<FxHashSet<_>>();
+
+ let value_name = |index| format!("{}_{}", slice.ident.name, index);
+
+ if let Some(max_index) = used_indices.iter().max() {
+ let opt_ref = if slice.needs_ref { "ref " } else { "" };
+ let pat_sugg_idents = (0..=*max_index)
+ .map(|index| {
+ if used_indices.contains(&index) {
+ format!("{}{}", opt_ref, value_name(index))
+ } else {
+ "_".to_string()
+ }
+ })
+ .collect::<Vec<_>>();
+ let pat_sugg = format!("[{}, ..]", pat_sugg_idents.join(", "));
+
+ span_lint_and_then(
+ cx,
+ INDEX_REFUTABLE_SLICE,
+ slice.ident.span,
+ "this binding can be a slice pattern to avoid indexing",
+ |diag| {
+ diag.multipart_suggestion(
+ "try using a slice pattern here",
+ slice
+ .pattern_spans
+ .iter()
+ .map(|span| (*span, pat_sugg.clone()))
+ .collect(),
+ Applicability::MaybeIncorrect,
+ );
+
+ diag.multipart_suggestion(
+ "and replace the index expressions here",
+ slice
+ .index_use
+ .iter()
+ .map(|(index, span)| (*span, value_name(*index)))
+ .collect(),
+ Applicability::MaybeIncorrect,
+ );
+
+ // The lint message doesn't contain a warning about the removed index expression,
+ // since `filter_lintable_slices` will only return slices where all access indices
+ // are known at compile time. Therefore, they can be removed without side effects.
+ },
+ );
+ }
+}
+
+#[derive(Debug)]
+struct SliceLintInformation {
+ ident: Ident,
+ needs_ref: bool,
+ pattern_spans: Vec<Span>,
+ index_use: Vec<(u64, Span)>,
+}
+
+impl SliceLintInformation {
+ fn new(ident: Ident, needs_ref: bool) -> Self {
+ Self {
+ ident,
+ needs_ref,
+ pattern_spans: Vec::new(),
+ index_use: Vec::new(),
+ }
+ }
+}
+
+fn filter_lintable_slices<'a, 'tcx>(
+ cx: &'a LateContext<'tcx>,
+ slice_lint_info: FxIndexMap<hir::HirId, SliceLintInformation>,
+ max_suggested_slice: u64,
+ scope: &'tcx hir::Expr<'tcx>,
+) -> FxIndexMap<hir::HirId, SliceLintInformation> {
+ let mut visitor = SliceIndexLintingVisitor {
+ cx,
+ slice_lint_info,
+ max_suggested_slice,
+ };
+
+ intravisit::walk_expr(&mut visitor, scope);
+
+ visitor.slice_lint_info
+}
+
+struct SliceIndexLintingVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ slice_lint_info: FxIndexMap<hir::HirId, SliceLintInformation>,
+ max_suggested_slice: u64,
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for SliceIndexLintingVisitor<'a, 'tcx> {
+ type NestedFilter = nested_filter::OnlyBodies;
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+
+ fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
+ if let Some(local_id) = path_to_local(expr) {
+ let Self {
+ cx,
+ ref mut slice_lint_info,
+ max_suggested_slice,
+ } = *self;
+
+ if_chain! {
+ // Check if this is even a local we're interested in
+ if let Some(use_info) = slice_lint_info.get_mut(&local_id);
+
+ let map = cx.tcx.hir();
+
+ // Checking for slice indexing
+ let parent_id = map.get_parent_node(expr.hir_id);
+ if let Some(hir::Node::Expr(parent_expr)) = map.find(parent_id);
+ if let hir::ExprKind::Index(_, index_expr) = parent_expr.kind;
+ if let Some((Constant::Int(index_value), _)) = constant(cx, cx.typeck_results(), index_expr);
+ if let Ok(index_value) = index_value.try_into();
+ if index_value < max_suggested_slice;
+
+ // Make sure that this slice index is read only
+ let maybe_addrof_id = map.get_parent_node(parent_id);
+ if let Some(hir::Node::Expr(maybe_addrof_expr)) = map.find(maybe_addrof_id);
+ if let hir::ExprKind::AddrOf(_kind, hir::Mutability::Not, _inner_expr) = maybe_addrof_expr.kind;
+ then {
+ use_info.index_use.push((index_value, map.span(parent_expr.hir_id)));
+ return;
+ }
+ }
+
+ // The slice was used for something other than indexing
+ self.slice_lint_info.remove(&local_id);
+ }
+ intravisit::walk_expr(self, expr);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/indexing_slicing.rs b/src/tools/clippy/clippy_lints/src/indexing_slicing.rs
new file mode 100644
index 000000000..4a375752e
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/indexing_slicing.rs
@@ -0,0 +1,205 @@
+//! lint on indexing and slicing operations
+
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
+use clippy_utils::higher;
+use rustc_ast::ast::RangeLimits;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for out of bounds array indexing with a constant
+ /// index.
+ ///
+ /// ### Why is this bad?
+ /// This will always panic at runtime.
+ ///
+ /// ### Example
+ /// ```rust,no_run
+ /// # #![allow(const_err)]
+ /// let x = [1, 2, 3, 4];
+ ///
+ /// x[9];
+ /// &x[2..9];
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let x = [1, 2, 3, 4];
+ /// // Index within bounds
+ ///
+ /// x[0];
+ /// x[3];
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub OUT_OF_BOUNDS_INDEXING,
+ correctness,
+ "out of bounds constant indexing"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of indexing or slicing. Arrays are special cases, this lint
+ /// does report on arrays if we can tell that slicing operations are in bounds and does not
+ /// lint on constant `usize` indexing on arrays because that is handled by rustc's `const_err` lint.
+ ///
+ /// ### Why is this bad?
+ /// Indexing and slicing can panic at runtime and there are
+ /// safe alternatives.
+ ///
+ /// ### Example
+ /// ```rust,no_run
+ /// // Vector
+ /// let x = vec![0; 5];
+ ///
+ /// x[2];
+ /// &x[2..100];
+ ///
+ /// // Array
+ /// let y = [0, 1, 2, 3];
+ ///
+ /// &y[10..100];
+ /// &y[10..];
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # #![allow(unused)]
+ ///
+ /// # let x = vec![0; 5];
+ /// # let y = [0, 1, 2, 3];
+ /// x.get(2);
+ /// x.get(2..100);
+ ///
+ /// y.get(10);
+ /// y.get(10..100);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub INDEXING_SLICING,
+ restriction,
+ "indexing/slicing usage"
+}
+
+declare_lint_pass!(IndexingSlicing => [INDEXING_SLICING, OUT_OF_BOUNDS_INDEXING]);
+
+impl<'tcx> LateLintPass<'tcx> for IndexingSlicing {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if cx.tcx.hir().is_inside_const_context(expr.hir_id) {
+ return;
+ }
+
+ if let ExprKind::Index(array, index) = &expr.kind {
+ let ty = cx.typeck_results().expr_ty(array).peel_refs();
+ if let Some(range) = higher::Range::hir(index) {
+ // Ranged indexes, i.e., &x[n..m], &x[n..], &x[..n] and &x[..]
+ if let ty::Array(_, s) = ty.kind() {
+ let size: u128 = if let Some(size) = s.try_eval_usize(cx.tcx, cx.param_env) {
+ size.into()
+ } else {
+ return;
+ };
+
+ let const_range = to_const_range(cx, range, size);
+
+ if let (Some(start), _) = const_range {
+ if start > size {
+ span_lint(
+ cx,
+ OUT_OF_BOUNDS_INDEXING,
+ range.start.map_or(expr.span, |start| start.span),
+ "range is out of bounds",
+ );
+ return;
+ }
+ }
+
+ if let (_, Some(end)) = const_range {
+ if end > size {
+ span_lint(
+ cx,
+ OUT_OF_BOUNDS_INDEXING,
+ range.end.map_or(expr.span, |end| end.span),
+ "range is out of bounds",
+ );
+ return;
+ }
+ }
+
+ if let (Some(_), Some(_)) = const_range {
+ // early return because both start and end are constants
+ // and we have proven above that they are in bounds
+ return;
+ }
+ }
+
+ let help_msg = match (range.start, range.end) {
+ (None, Some(_)) => "consider using `.get(..n)`or `.get_mut(..n)` instead",
+ (Some(_), None) => "consider using `.get(n..)` or .get_mut(n..)` instead",
+ (Some(_), Some(_)) => "consider using `.get(n..m)` or `.get_mut(n..m)` instead",
+ (None, None) => return, // [..] is ok.
+ };
+
+ span_lint_and_help(cx, INDEXING_SLICING, expr.span, "slicing may panic", None, help_msg);
+ } else {
+ // Catchall non-range index, i.e., [n] or [n << m]
+ if let ty::Array(..) = ty.kind() {
+ // Index is a const block.
+ if let ExprKind::ConstBlock(..) = index.kind {
+ return;
+ }
+ // Index is a constant uint.
+ if let Some(..) = constant(cx, cx.typeck_results(), index) {
+ // Let rustc's `const_err` lint handle constant `usize` indexing on arrays.
+ return;
+ }
+ }
+
+ span_lint_and_help(
+ cx,
+ INDEXING_SLICING,
+ expr.span,
+ "indexing may panic",
+ None,
+ "consider using `.get(n)` or `.get_mut(n)` instead",
+ );
+ }
+ }
+ }
+}
+
+/// Returns a tuple of options with the start and end (exclusive) values of
+/// the range. If the start or end is not constant, None is returned.
+fn to_const_range<'tcx>(
+ cx: &LateContext<'tcx>,
+ range: higher::Range<'_>,
+ array_size: u128,
+) -> (Option<u128>, Option<u128>) {
+ let s = range
+ .start
+ .map(|expr| constant(cx, cx.typeck_results(), expr).map(|(c, _)| c));
+ let start = match s {
+ Some(Some(Constant::Int(x))) => Some(x),
+ Some(_) => None,
+ None => Some(0),
+ };
+
+ let e = range
+ .end
+ .map(|expr| constant(cx, cx.typeck_results(), expr).map(|(c, _)| c));
+ let end = match e {
+ Some(Some(Constant::Int(x))) => {
+ if range.limits == RangeLimits::Closed {
+ Some(x + 1)
+ } else {
+ Some(x)
+ }
+ },
+ Some(_) => None,
+ None => Some(array_size),
+ };
+
+ (start, end)
+}
diff --git a/src/tools/clippy/clippy_lints/src/infinite_iter.rs b/src/tools/clippy/clippy_lints/src/infinite_iter.rs
new file mode 100644
index 000000000..01c7eef4e
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/infinite_iter.rs
@@ -0,0 +1,260 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::ty::{implements_trait, is_type_diagnostic_item};
+use clippy_utils::{higher, match_def_path, path_def_id, paths};
+use rustc_hir::{BorrowKind, Closure, Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::{sym, Symbol};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for iteration that is guaranteed to be infinite.
+ ///
+ /// ### Why is this bad?
+ /// While there may be places where this is acceptable
+ /// (e.g., in event streams), in most cases this is simply an error.
+ ///
+ /// ### Example
+ /// ```no_run
+ /// use std::iter;
+ ///
+ /// iter::repeat(1_u8).collect::<Vec<_>>();
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub INFINITE_ITER,
+ correctness,
+ "infinite iteration"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for iteration that may be infinite.
+ ///
+ /// ### Why is this bad?
+ /// While there may be places where this is acceptable
+ /// (e.g., in event streams), in most cases this is simply an error.
+ ///
+ /// ### Known problems
+ /// The code may have a condition to stop iteration, but
+ /// this lint is not clever enough to analyze it.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let infinite_iter = 0..;
+ /// # #[allow(unused)]
+ /// [0..].iter().zip(infinite_iter.take_while(|x| *x > 5));
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MAYBE_INFINITE_ITER,
+ pedantic,
+ "possible infinite iteration"
+}
+
+declare_lint_pass!(InfiniteIter => [INFINITE_ITER, MAYBE_INFINITE_ITER]);
+
+impl<'tcx> LateLintPass<'tcx> for InfiniteIter {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ let (lint, msg) = match complete_infinite_iter(cx, expr) {
+ Infinite => (INFINITE_ITER, "infinite iteration detected"),
+ MaybeInfinite => (MAYBE_INFINITE_ITER, "possible infinite iteration detected"),
+ Finite => {
+ return;
+ },
+ };
+ span_lint(cx, lint, expr.span, msg);
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+enum Finiteness {
+ Infinite,
+ MaybeInfinite,
+ Finite,
+}
+
+use self::Finiteness::{Finite, Infinite, MaybeInfinite};
+
+impl Finiteness {
+ #[must_use]
+ fn and(self, b: Self) -> Self {
+ match (self, b) {
+ (Finite, _) | (_, Finite) => Finite,
+ (MaybeInfinite, _) | (_, MaybeInfinite) => MaybeInfinite,
+ _ => Infinite,
+ }
+ }
+
+ #[must_use]
+ fn or(self, b: Self) -> Self {
+ match (self, b) {
+ (Infinite, _) | (_, Infinite) => Infinite,
+ (MaybeInfinite, _) | (_, MaybeInfinite) => MaybeInfinite,
+ _ => Finite,
+ }
+ }
+}
+
+impl From<bool> for Finiteness {
+ #[must_use]
+ fn from(b: bool) -> Self {
+ if b { Infinite } else { Finite }
+ }
+}
+
+/// This tells us what to look for to know if the iterator returned by
+/// this method is infinite
+#[derive(Copy, Clone)]
+enum Heuristic {
+ /// infinite no matter what
+ Always,
+ /// infinite if the first argument is
+ First,
+ /// infinite if any of the supplied arguments is
+ Any,
+ /// infinite if all of the supplied arguments are
+ All,
+}
+
+use self::Heuristic::{All, Always, Any, First};
+
+/// a slice of (method name, number of args, heuristic, bounds) tuples
+/// that will be used to determine whether the method in question
+/// returns an infinite or possibly infinite iterator. The finiteness
+/// is an upper bound, e.g., some methods can return a possibly
+/// infinite iterator at worst, e.g., `take_while`.
+const HEURISTICS: [(&str, usize, Heuristic, Finiteness); 19] = [
+ ("zip", 2, All, Infinite),
+ ("chain", 2, Any, Infinite),
+ ("cycle", 1, Always, Infinite),
+ ("map", 2, First, Infinite),
+ ("by_ref", 1, First, Infinite),
+ ("cloned", 1, First, Infinite),
+ ("rev", 1, First, Infinite),
+ ("inspect", 1, First, Infinite),
+ ("enumerate", 1, First, Infinite),
+ ("peekable", 2, First, Infinite),
+ ("fuse", 1, First, Infinite),
+ ("skip", 2, First, Infinite),
+ ("skip_while", 1, First, Infinite),
+ ("filter", 2, First, Infinite),
+ ("filter_map", 2, First, Infinite),
+ ("flat_map", 2, First, Infinite),
+ ("unzip", 1, First, Infinite),
+ ("take_while", 2, First, MaybeInfinite),
+ ("scan", 3, First, MaybeInfinite),
+];
+
+fn is_infinite(cx: &LateContext<'_>, expr: &Expr<'_>) -> Finiteness {
+ match expr.kind {
+ ExprKind::MethodCall(method, args, _) => {
+ for &(name, len, heuristic, cap) in &HEURISTICS {
+ if method.ident.name.as_str() == name && args.len() == len {
+ return (match heuristic {
+ Always => Infinite,
+ First => is_infinite(cx, &args[0]),
+ Any => is_infinite(cx, &args[0]).or(is_infinite(cx, &args[1])),
+ All => is_infinite(cx, &args[0]).and(is_infinite(cx, &args[1])),
+ })
+ .and(cap);
+ }
+ }
+ if method.ident.name == sym!(flat_map) && args.len() == 2 {
+ if let ExprKind::Closure(&Closure { body, .. }) = args[1].kind {
+ let body = cx.tcx.hir().body(body);
+ return is_infinite(cx, &body.value);
+ }
+ }
+ Finite
+ },
+ ExprKind::Block(block, _) => block.expr.as_ref().map_or(Finite, |e| is_infinite(cx, e)),
+ ExprKind::Box(e) | ExprKind::AddrOf(BorrowKind::Ref, _, e) => is_infinite(cx, e),
+ ExprKind::Call(path, _) => path_def_id(cx, path)
+ .map_or(false, |id| match_def_path(cx, id, &paths::ITER_REPEAT))
+ .into(),
+ ExprKind::Struct(..) => higher::Range::hir(expr).map_or(false, |r| r.end.is_none()).into(),
+ _ => Finite,
+ }
+}
+
+/// the names and argument lengths of methods that *may* exhaust their
+/// iterators
+const POSSIBLY_COMPLETING_METHODS: [(&str, usize); 6] = [
+ ("find", 2),
+ ("rfind", 2),
+ ("position", 2),
+ ("rposition", 2),
+ ("any", 2),
+ ("all", 2),
+];
+
+/// the names and argument lengths of methods that *always* exhaust
+/// their iterators
+const COMPLETING_METHODS: [(&str, usize); 12] = [
+ ("count", 1),
+ ("fold", 3),
+ ("for_each", 2),
+ ("partition", 2),
+ ("max", 1),
+ ("max_by", 2),
+ ("max_by_key", 2),
+ ("min", 1),
+ ("min_by", 2),
+ ("min_by_key", 2),
+ ("sum", 1),
+ ("product", 1),
+];
+
+/// the paths of types that are known to be infinitely allocating
+const INFINITE_COLLECTORS: &[Symbol] = &[
+ sym::BinaryHeap,
+ sym::BTreeMap,
+ sym::BTreeSet,
+ sym::HashMap,
+ sym::HashSet,
+ sym::LinkedList,
+ sym::Vec,
+ sym::VecDeque,
+];
+
+fn complete_infinite_iter(cx: &LateContext<'_>, expr: &Expr<'_>) -> Finiteness {
+ match expr.kind {
+ ExprKind::MethodCall(method, args, _) => {
+ for &(name, len) in &COMPLETING_METHODS {
+ if method.ident.name.as_str() == name && args.len() == len {
+ return is_infinite(cx, &args[0]);
+ }
+ }
+ for &(name, len) in &POSSIBLY_COMPLETING_METHODS {
+ if method.ident.name.as_str() == name && args.len() == len {
+ return MaybeInfinite.and(is_infinite(cx, &args[0]));
+ }
+ }
+ if method.ident.name == sym!(last) && args.len() == 1 {
+ let not_double_ended = cx
+ .tcx
+ .get_diagnostic_item(sym::DoubleEndedIterator)
+ .map_or(false, |id| {
+ !implements_trait(cx, cx.typeck_results().expr_ty(&args[0]), id, &[])
+ });
+ if not_double_ended {
+ return is_infinite(cx, &args[0]);
+ }
+ } else if method.ident.name == sym!(collect) {
+ let ty = cx.typeck_results().expr_ty(expr);
+ if INFINITE_COLLECTORS
+ .iter()
+ .any(|diag_item| is_type_diagnostic_item(cx, ty, *diag_item))
+ {
+ return is_infinite(cx, &args[0]);
+ }
+ }
+ },
+ ExprKind::Binary(op, l, r) => {
+ if op.node.is_comparison() {
+ return is_infinite(cx, l).and(is_infinite(cx, r)).and(MaybeInfinite);
+ }
+ }, // TODO: ExprKind::Loop + Match
+ _ => (),
+ }
+ Finite
+}
diff --git a/src/tools/clippy/clippy_lints/src/inherent_impl.rs b/src/tools/clippy/clippy_lints/src/inherent_impl.rs
new file mode 100644
index 000000000..c5abcc462
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/inherent_impl.rs
@@ -0,0 +1,139 @@
+//! lint on inherent implementations
+
+use clippy_utils::diagnostics::span_lint_and_note;
+use clippy_utils::is_lint_allowed;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_hir::{def_id::LocalDefId, Item, ItemKind, Node};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::Span;
+use std::collections::hash_map::Entry;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for multiple inherent implementations of a struct
+ ///
+ /// ### Why is this bad?
+ /// Splitting the implementation of a type makes the code harder to navigate.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct X;
+ /// impl X {
+ /// fn one() {}
+ /// }
+ /// impl X {
+ /// fn other() {}
+ /// }
+ /// ```
+ ///
+ /// Could be written:
+ ///
+ /// ```rust
+ /// struct X;
+ /// impl X {
+ /// fn one() {}
+ /// fn other() {}
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MULTIPLE_INHERENT_IMPL,
+ restriction,
+ "Multiple inherent impl that could be grouped"
+}
+
+declare_lint_pass!(MultipleInherentImpl => [MULTIPLE_INHERENT_IMPL]);
+
+impl<'tcx> LateLintPass<'tcx> for MultipleInherentImpl {
+ fn check_crate_post(&mut self, cx: &LateContext<'tcx>) {
+ // Map from a type to it's first impl block. Needed to distinguish generic arguments.
+ // e.g. `Foo<Bar>` and `Foo<Baz>`
+ let mut type_map = FxHashMap::default();
+ // List of spans to lint. (lint_span, first_span)
+ let mut lint_spans = Vec::new();
+
+ for (_, impl_ids) in cx
+ .tcx
+ .crate_inherent_impls(())
+ .inherent_impls
+ .iter()
+ .filter(|(&id, impls)| {
+ impls.len() > 1
+ // Check for `#[allow]` on the type definition
+ && !is_lint_allowed(
+ cx,
+ MULTIPLE_INHERENT_IMPL,
+ cx.tcx.hir().local_def_id_to_hir_id(id),
+ )
+ })
+ {
+ for impl_id in impl_ids.iter().map(|id| id.expect_local()) {
+ match type_map.entry(cx.tcx.type_of(impl_id)) {
+ Entry::Vacant(e) => {
+ // Store the id for the first impl block of this type. The span is retrieved lazily.
+ e.insert(IdOrSpan::Id(impl_id));
+ },
+ Entry::Occupied(mut e) => {
+ if let Some(span) = get_impl_span(cx, impl_id) {
+ let first_span = match *e.get() {
+ IdOrSpan::Span(s) => s,
+ IdOrSpan::Id(id) => {
+ if let Some(s) = get_impl_span(cx, id) {
+ // Remember the span of the first block.
+ *e.get_mut() = IdOrSpan::Span(s);
+ s
+ } else {
+ // The first impl block isn't considered by the lint. Replace it with the
+ // current one.
+ *e.get_mut() = IdOrSpan::Span(span);
+ continue;
+ }
+ },
+ };
+ lint_spans.push((span, first_span));
+ }
+ },
+ }
+ }
+
+ // Switching to the next type definition, no need to keep the current entries around.
+ type_map.clear();
+ }
+
+ // `TyCtxt::crate_inherent_impls` doesn't have a defined order. Sort the lint output first.
+ lint_spans.sort_by_key(|x| x.0.lo());
+ for (span, first_span) in lint_spans {
+ span_lint_and_note(
+ cx,
+ MULTIPLE_INHERENT_IMPL,
+ span,
+ "multiple implementations of this structure",
+ Some(first_span),
+ "first implementation here",
+ );
+ }
+ }
+}
+
+/// Gets the span for the given impl block unless it's not being considered by the lint.
+fn get_impl_span(cx: &LateContext<'_>, id: LocalDefId) -> Option<Span> {
+ let id = cx.tcx.hir().local_def_id_to_hir_id(id);
+ if let Node::Item(&Item {
+ kind: ItemKind::Impl(impl_item),
+ span,
+ ..
+ }) = cx.tcx.hir().get(id)
+ {
+ (!span.from_expansion()
+ && impl_item.generics.params.is_empty()
+ && !is_lint_allowed(cx, MULTIPLE_INHERENT_IMPL, id))
+ .then_some(span)
+ } else {
+ None
+ }
+}
+
+enum IdOrSpan {
+ Id(LocalDefId),
+ Span(Span),
+}
diff --git a/src/tools/clippy/clippy_lints/src/inherent_to_string.rs b/src/tools/clippy/clippy_lints/src/inherent_to_string.rs
new file mode 100644
index 000000000..17d867aac
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/inherent_to_string.rs
@@ -0,0 +1,153 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::ty::{implements_trait, is_type_diagnostic_item};
+use clippy_utils::{get_trait_def_id, paths, return_ty, trait_ref_of_method};
+use if_chain::if_chain;
+use rustc_hir::{GenericParamKind, ImplItem, ImplItemKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for the definition of inherent methods with a signature of `to_string(&self) -> String`.
+ ///
+ /// ### Why is this bad?
+ /// This method is also implicitly defined if a type implements the `Display` trait. As the functionality of `Display` is much more versatile, it should be preferred.
+ ///
+ /// ### Example
+ /// ```rust
+ /// pub struct A;
+ ///
+ /// impl A {
+ /// pub fn to_string(&self) -> String {
+ /// "I am A".to_string()
+ /// }
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// use std::fmt;
+ ///
+ /// pub struct A;
+ ///
+ /// impl fmt::Display for A {
+ /// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ /// write!(f, "I am A")
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "1.38.0"]
+ pub INHERENT_TO_STRING,
+ style,
+ "type implements inherent method `to_string()`, but should instead implement the `Display` trait"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for the definition of inherent methods with a signature of `to_string(&self) -> String` and if the type implementing this method also implements the `Display` trait.
+ ///
+ /// ### Why is this bad?
+ /// This method is also implicitly defined if a type implements the `Display` trait. The less versatile inherent method will then shadow the implementation introduced by `Display`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// use std::fmt;
+ ///
+ /// pub struct A;
+ ///
+ /// impl A {
+ /// pub fn to_string(&self) -> String {
+ /// "I am A".to_string()
+ /// }
+ /// }
+ ///
+ /// impl fmt::Display for A {
+ /// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ /// write!(f, "I am A, too")
+ /// }
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// use std::fmt;
+ ///
+ /// pub struct A;
+ ///
+ /// impl fmt::Display for A {
+ /// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ /// write!(f, "I am A")
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "1.38.0"]
+ pub INHERENT_TO_STRING_SHADOW_DISPLAY,
+ correctness,
+ "type implements inherent method `to_string()`, which gets shadowed by the implementation of the `Display` trait"
+}
+
+declare_lint_pass!(InherentToString => [INHERENT_TO_STRING, INHERENT_TO_STRING_SHADOW_DISPLAY]);
+
+impl<'tcx> LateLintPass<'tcx> for InherentToString {
+ fn check_impl_item(&mut self, cx: &LateContext<'tcx>, impl_item: &'tcx ImplItem<'_>) {
+ if impl_item.span.from_expansion() {
+ return;
+ }
+
+ if_chain! {
+ // Check if item is a method, called to_string and has a parameter 'self'
+ if let ImplItemKind::Fn(ref signature, _) = impl_item.kind;
+ if impl_item.ident.name == sym::to_string;
+ let decl = &signature.decl;
+ if decl.implicit_self.has_implicit_self();
+ if decl.inputs.len() == 1;
+ if impl_item.generics.params.iter().all(|p| matches!(p.kind, GenericParamKind::Lifetime { .. }));
+
+ // Check if return type is String
+ if is_type_diagnostic_item(cx, return_ty(cx, impl_item.hir_id()), sym::String);
+
+ // Filters instances of to_string which are required by a trait
+ if trait_ref_of_method(cx, impl_item.def_id).is_none();
+
+ then {
+ show_lint(cx, impl_item);
+ }
+ }
+ }
+}
+
+fn show_lint(cx: &LateContext<'_>, item: &ImplItem<'_>) {
+ let display_trait_id = get_trait_def_id(cx, &paths::DISPLAY_TRAIT).expect("Failed to get trait ID of `Display`!");
+
+ // Get the real type of 'self'
+ let self_type = cx.tcx.fn_sig(item.def_id).input(0);
+ let self_type = self_type.skip_binder().peel_refs();
+
+ // Emit either a warning or an error
+ if implements_trait(cx, self_type, display_trait_id, &[]) {
+ span_lint_and_help(
+ cx,
+ INHERENT_TO_STRING_SHADOW_DISPLAY,
+ item.span,
+ &format!(
+ "type `{}` implements inherent method `to_string(&self) -> String` which shadows the implementation of `Display`",
+ self_type
+ ),
+ None,
+ &format!("remove the inherent method from type `{}`", self_type),
+ );
+ } else {
+ span_lint_and_help(
+ cx,
+ INHERENT_TO_STRING,
+ item.span,
+ &format!(
+ "implementation of inherent method `to_string(&self) -> String` for type `{}`",
+ self_type
+ ),
+ None,
+ &format!("implement trait `Display` for type `{}` instead", self_type),
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/init_numbered_fields.rs b/src/tools/clippy/clippy_lints/src/init_numbered_fields.rs
new file mode 100644
index 000000000..7e1548531
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/init_numbered_fields.rs
@@ -0,0 +1,81 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use rustc_errors::Applicability;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use std::borrow::Cow;
+use std::cmp::Reverse;
+use std::collections::BinaryHeap;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for tuple structs initialized with field syntax.
+ /// It will however not lint if a base initializer is present.
+ /// The lint will also ignore code in macros.
+ ///
+ /// ### Why is this bad?
+ /// This may be confusing to the uninitiated and adds no
+ /// benefit as opposed to tuple initializers
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct TupleStruct(u8, u16);
+ ///
+ /// let _ = TupleStruct {
+ /// 0: 1,
+ /// 1: 23,
+ /// };
+ ///
+ /// // should be written as
+ /// let base = TupleStruct(1, 23);
+ ///
+ /// // This is OK however
+ /// let _ = TupleStruct { 0: 42, ..base };
+ /// ```
+ #[clippy::version = "1.59.0"]
+ pub INIT_NUMBERED_FIELDS,
+ style,
+ "numbered fields in tuple struct initializer"
+}
+
+declare_lint_pass!(NumberedFields => [INIT_NUMBERED_FIELDS]);
+
+impl<'tcx> LateLintPass<'tcx> for NumberedFields {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ if let ExprKind::Struct(path, fields, None) = e.kind {
+ if !fields.is_empty()
+ && !e.span.from_expansion()
+ && fields
+ .iter()
+ .all(|f| f.ident.as_str().as_bytes().iter().all(u8::is_ascii_digit))
+ && !matches!(cx.qpath_res(path, e.hir_id), Res::Def(DefKind::TyAlias, ..))
+ {
+ let expr_spans = fields
+ .iter()
+ .map(|f| (Reverse(f.ident.as_str().parse::<usize>().unwrap()), f.expr.span))
+ .collect::<BinaryHeap<_>>();
+ let mut appl = Applicability::MachineApplicable;
+ let snippet = format!(
+ "{}({})",
+ snippet_with_applicability(cx, path.span(), "..", &mut appl),
+ expr_spans
+ .into_iter_sorted()
+ .map(|(_, span)| snippet_with_applicability(cx, span, "..", &mut appl))
+ .intersperse(Cow::Borrowed(", "))
+ .collect::<String>()
+ );
+ span_lint_and_sugg(
+ cx,
+ INIT_NUMBERED_FIELDS,
+ e.span,
+ "used a field initializer for a tuple struct",
+ "try this instead",
+ snippet,
+ appl,
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/inline_fn_without_body.rs b/src/tools/clippy/clippy_lints/src/inline_fn_without_body.rs
new file mode 100644
index 000000000..dd7177e01
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/inline_fn_without_body.rs
@@ -0,0 +1,60 @@
+//! checks for `#[inline]` on trait methods without bodies
+
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::sugg::DiagnosticExt;
+use rustc_ast::ast::Attribute;
+use rustc_errors::Applicability;
+use rustc_hir::{TraitFn, TraitItem, TraitItemKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{sym, Symbol};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `#[inline]` on trait methods without bodies
+ ///
+ /// ### Why is this bad?
+ /// Only implementations of trait methods may be inlined.
+ /// The inline attribute is ignored for trait methods without bodies.
+ ///
+ /// ### Example
+ /// ```rust
+ /// trait Animal {
+ /// #[inline]
+ /// fn name(&self) -> &'static str;
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub INLINE_FN_WITHOUT_BODY,
+ correctness,
+ "use of `#[inline]` on trait methods without bodies"
+}
+
+declare_lint_pass!(InlineFnWithoutBody => [INLINE_FN_WITHOUT_BODY]);
+
+impl<'tcx> LateLintPass<'tcx> for InlineFnWithoutBody {
+ fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx TraitItem<'_>) {
+ if let TraitItemKind::Fn(_, TraitFn::Required(_)) = item.kind {
+ let attrs = cx.tcx.hir().attrs(item.hir_id());
+ check_attrs(cx, item.ident.name, attrs);
+ }
+ }
+}
+
+fn check_attrs(cx: &LateContext<'_>, name: Symbol, attrs: &[Attribute]) {
+ for attr in attrs {
+ if !attr.has_name(sym::inline) {
+ continue;
+ }
+
+ span_lint_and_then(
+ cx,
+ INLINE_FN_WITHOUT_BODY,
+ attr.span,
+ &format!("use of `#[inline]` on trait method `{}` which has no body", name),
+ |diag| {
+ diag.suggest_remove_item(cx, attr.span, "remove", Applicability::MachineApplicable);
+ },
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/int_plus_one.rs b/src/tools/clippy/clippy_lints/src/int_plus_one.rs
new file mode 100644
index 000000000..9a944def3
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/int_plus_one.rs
@@ -0,0 +1,171 @@
+//! lint on blocks unnecessarily using >= with a + 1 or - 1
+
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_opt;
+use rustc_ast::ast::{BinOpKind, Expr, ExprKind, Lit, LitKind};
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `x >= y + 1` or `x - 1 >= y` (and `<=`) in a block
+ ///
+ /// ### Why is this bad?
+ /// Readability -- better to use `> y` instead of `>= y + 1`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let x = 1;
+ /// # let y = 1;
+ /// if x >= y + 1 {}
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let x = 1;
+ /// # let y = 1;
+ /// if x > y {}
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub INT_PLUS_ONE,
+ complexity,
+ "instead of using `x >= y + 1`, use `x > y`"
+}
+
+declare_lint_pass!(IntPlusOne => [INT_PLUS_ONE]);
+
+// cases:
+// BinOpKind::Ge
+// x >= y + 1
+// x - 1 >= y
+//
+// BinOpKind::Le
+// x + 1 <= y
+// x <= y - 1
+
+#[derive(Copy, Clone)]
+enum Side {
+ Lhs,
+ Rhs,
+}
+
+impl IntPlusOne {
+ #[expect(clippy::cast_sign_loss)]
+ fn check_lit(lit: &Lit, target_value: i128) -> bool {
+ if let LitKind::Int(value, ..) = lit.kind {
+ return value == (target_value as u128);
+ }
+ false
+ }
+
+ fn check_binop(cx: &EarlyContext<'_>, binop: BinOpKind, lhs: &Expr, rhs: &Expr) -> Option<String> {
+ match (binop, &lhs.kind, &rhs.kind) {
+ // case where `x - 1 >= ...` or `-1 + x >= ...`
+ (BinOpKind::Ge, &ExprKind::Binary(ref lhskind, ref lhslhs, ref lhsrhs), _) => {
+ match (lhskind.node, &lhslhs.kind, &lhsrhs.kind) {
+ // `-1 + x`
+ (BinOpKind::Add, &ExprKind::Lit(ref lit), _) if Self::check_lit(lit, -1) => {
+ Self::generate_recommendation(cx, binop, lhsrhs, rhs, Side::Lhs)
+ },
+ // `x - 1`
+ (BinOpKind::Sub, _, &ExprKind::Lit(ref lit)) if Self::check_lit(lit, 1) => {
+ Self::generate_recommendation(cx, binop, lhslhs, rhs, Side::Lhs)
+ },
+ _ => None,
+ }
+ },
+ // case where `... >= y + 1` or `... >= 1 + y`
+ (BinOpKind::Ge, _, &ExprKind::Binary(ref rhskind, ref rhslhs, ref rhsrhs))
+ if rhskind.node == BinOpKind::Add =>
+ {
+ match (&rhslhs.kind, &rhsrhs.kind) {
+ // `y + 1` and `1 + y`
+ (&ExprKind::Lit(ref lit), _) if Self::check_lit(lit, 1) => {
+ Self::generate_recommendation(cx, binop, rhsrhs, lhs, Side::Rhs)
+ },
+ (_, &ExprKind::Lit(ref lit)) if Self::check_lit(lit, 1) => {
+ Self::generate_recommendation(cx, binop, rhslhs, lhs, Side::Rhs)
+ },
+ _ => None,
+ }
+ },
+ // case where `x + 1 <= ...` or `1 + x <= ...`
+ (BinOpKind::Le, &ExprKind::Binary(ref lhskind, ref lhslhs, ref lhsrhs), _)
+ if lhskind.node == BinOpKind::Add =>
+ {
+ match (&lhslhs.kind, &lhsrhs.kind) {
+ // `1 + x` and `x + 1`
+ (&ExprKind::Lit(ref lit), _) if Self::check_lit(lit, 1) => {
+ Self::generate_recommendation(cx, binop, lhsrhs, rhs, Side::Lhs)
+ },
+ (_, &ExprKind::Lit(ref lit)) if Self::check_lit(lit, 1) => {
+ Self::generate_recommendation(cx, binop, lhslhs, rhs, Side::Lhs)
+ },
+ _ => None,
+ }
+ },
+ // case where `... >= y - 1` or `... >= -1 + y`
+ (BinOpKind::Le, _, &ExprKind::Binary(ref rhskind, ref rhslhs, ref rhsrhs)) => {
+ match (rhskind.node, &rhslhs.kind, &rhsrhs.kind) {
+ // `-1 + y`
+ (BinOpKind::Add, &ExprKind::Lit(ref lit), _) if Self::check_lit(lit, -1) => {
+ Self::generate_recommendation(cx, binop, rhsrhs, lhs, Side::Rhs)
+ },
+ // `y - 1`
+ (BinOpKind::Sub, _, &ExprKind::Lit(ref lit)) if Self::check_lit(lit, 1) => {
+ Self::generate_recommendation(cx, binop, rhslhs, lhs, Side::Rhs)
+ },
+ _ => None,
+ }
+ },
+ _ => None,
+ }
+ }
+
+ fn generate_recommendation(
+ cx: &EarlyContext<'_>,
+ binop: BinOpKind,
+ node: &Expr,
+ other_side: &Expr,
+ side: Side,
+ ) -> Option<String> {
+ let binop_string = match binop {
+ BinOpKind::Ge => ">",
+ BinOpKind::Le => "<",
+ _ => return None,
+ };
+ if let Some(snippet) = snippet_opt(cx, node.span) {
+ if let Some(other_side_snippet) = snippet_opt(cx, other_side.span) {
+ let rec = match side {
+ Side::Lhs => Some(format!("{} {} {}", snippet, binop_string, other_side_snippet)),
+ Side::Rhs => Some(format!("{} {} {}", other_side_snippet, binop_string, snippet)),
+ };
+ return rec;
+ }
+ }
+ None
+ }
+
+ fn emit_warning(cx: &EarlyContext<'_>, block: &Expr, recommendation: String) {
+ span_lint_and_sugg(
+ cx,
+ INT_PLUS_ONE,
+ block.span,
+ "unnecessary `>= y + 1` or `x - 1 >=`",
+ "change it to",
+ recommendation,
+ Applicability::MachineApplicable, // snippet
+ );
+ }
+}
+
+impl EarlyLintPass for IntPlusOne {
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, item: &Expr) {
+ if let ExprKind::Binary(ref kind, ref lhs, ref rhs) = item.kind {
+ if let Some(rec) = Self::check_binop(cx, kind.node, lhs, rhs) {
+ Self::emit_warning(cx, item, rec);
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/invalid_upcast_comparisons.rs b/src/tools/clippy/clippy_lints/src/invalid_upcast_comparisons.rs
new file mode 100644
index 000000000..36e03e50a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/invalid_upcast_comparisons.rs
@@ -0,0 +1,161 @@
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::layout::LayoutOf;
+use rustc_middle::ty::{self, IntTy, UintTy};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::Span;
+
+use clippy_utils::comparisons;
+use clippy_utils::comparisons::Rel;
+use clippy_utils::consts::{constant_full_int, FullInt};
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::source::snippet;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for comparisons where the relation is always either
+ /// true or false, but where one side has been upcast so that the comparison is
+ /// necessary. Only integer types are checked.
+ ///
+ /// ### Why is this bad?
+ /// An expression like `let x : u8 = ...; (x as u32) > 300`
+ /// will mistakenly imply that it is possible for `x` to be outside the range of
+ /// `u8`.
+ ///
+ /// ### Known problems
+ /// https://github.com/rust-lang/rust-clippy/issues/886
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x: u8 = 1;
+ /// (x as u32) > 300;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub INVALID_UPCAST_COMPARISONS,
+ pedantic,
+ "a comparison involving an upcast which is always true or false"
+}
+
+declare_lint_pass!(InvalidUpcastComparisons => [INVALID_UPCAST_COMPARISONS]);
+
+fn numeric_cast_precast_bounds<'a>(cx: &LateContext<'_>, expr: &'a Expr<'_>) -> Option<(FullInt, FullInt)> {
+ if let ExprKind::Cast(cast_exp, _) = expr.kind {
+ let pre_cast_ty = cx.typeck_results().expr_ty(cast_exp);
+ let cast_ty = cx.typeck_results().expr_ty(expr);
+ // if it's a cast from i32 to u32 wrapping will invalidate all these checks
+ if cx.layout_of(pre_cast_ty).ok().map(|l| l.size) == cx.layout_of(cast_ty).ok().map(|l| l.size) {
+ return None;
+ }
+ match pre_cast_ty.kind() {
+ ty::Int(int_ty) => Some(match int_ty {
+ IntTy::I8 => (FullInt::S(i128::from(i8::MIN)), FullInt::S(i128::from(i8::MAX))),
+ IntTy::I16 => (FullInt::S(i128::from(i16::MIN)), FullInt::S(i128::from(i16::MAX))),
+ IntTy::I32 => (FullInt::S(i128::from(i32::MIN)), FullInt::S(i128::from(i32::MAX))),
+ IntTy::I64 => (FullInt::S(i128::from(i64::MIN)), FullInt::S(i128::from(i64::MAX))),
+ IntTy::I128 => (FullInt::S(i128::MIN), FullInt::S(i128::MAX)),
+ IntTy::Isize => (FullInt::S(isize::MIN as i128), FullInt::S(isize::MAX as i128)),
+ }),
+ ty::Uint(uint_ty) => Some(match uint_ty {
+ UintTy::U8 => (FullInt::U(u128::from(u8::MIN)), FullInt::U(u128::from(u8::MAX))),
+ UintTy::U16 => (FullInt::U(u128::from(u16::MIN)), FullInt::U(u128::from(u16::MAX))),
+ UintTy::U32 => (FullInt::U(u128::from(u32::MIN)), FullInt::U(u128::from(u32::MAX))),
+ UintTy::U64 => (FullInt::U(u128::from(u64::MIN)), FullInt::U(u128::from(u64::MAX))),
+ UintTy::U128 => (FullInt::U(u128::MIN), FullInt::U(u128::MAX)),
+ UintTy::Usize => (FullInt::U(usize::MIN as u128), FullInt::U(usize::MAX as u128)),
+ }),
+ _ => None,
+ }
+ } else {
+ None
+ }
+}
+
+fn err_upcast_comparison(cx: &LateContext<'_>, span: Span, expr: &Expr<'_>, always: bool) {
+ if let ExprKind::Cast(cast_val, _) = expr.kind {
+ span_lint(
+ cx,
+ INVALID_UPCAST_COMPARISONS,
+ span,
+ &format!(
+ "because of the numeric bounds on `{}` prior to casting, this expression is always {}",
+ snippet(cx, cast_val.span, "the expression"),
+ if always { "true" } else { "false" },
+ ),
+ );
+ }
+}
+
+fn upcast_comparison_bounds_err<'tcx>(
+ cx: &LateContext<'tcx>,
+ span: Span,
+ rel: comparisons::Rel,
+ lhs_bounds: Option<(FullInt, FullInt)>,
+ lhs: &'tcx Expr<'_>,
+ rhs: &'tcx Expr<'_>,
+ invert: bool,
+) {
+ if let Some((lb, ub)) = lhs_bounds {
+ if let Some(norm_rhs_val) = constant_full_int(cx, cx.typeck_results(), rhs) {
+ if rel == Rel::Eq || rel == Rel::Ne {
+ if norm_rhs_val < lb || norm_rhs_val > ub {
+ err_upcast_comparison(cx, span, lhs, rel == Rel::Ne);
+ }
+ } else if match rel {
+ Rel::Lt => {
+ if invert {
+ norm_rhs_val < lb
+ } else {
+ ub < norm_rhs_val
+ }
+ },
+ Rel::Le => {
+ if invert {
+ norm_rhs_val <= lb
+ } else {
+ ub <= norm_rhs_val
+ }
+ },
+ Rel::Eq | Rel::Ne => unreachable!(),
+ } {
+ err_upcast_comparison(cx, span, lhs, true);
+ } else if match rel {
+ Rel::Lt => {
+ if invert {
+ norm_rhs_val >= ub
+ } else {
+ lb >= norm_rhs_val
+ }
+ },
+ Rel::Le => {
+ if invert {
+ norm_rhs_val > ub
+ } else {
+ lb > norm_rhs_val
+ }
+ },
+ Rel::Eq | Rel::Ne => unreachable!(),
+ } {
+ err_upcast_comparison(cx, span, lhs, false);
+ }
+ }
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for InvalidUpcastComparisons {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if let ExprKind::Binary(ref cmp, lhs, rhs) = expr.kind {
+ let normalized = comparisons::normalize_comparison(cmp.node, lhs, rhs);
+ let (rel, normalized_lhs, normalized_rhs) = if let Some(val) = normalized {
+ val
+ } else {
+ return;
+ };
+
+ let lhs_bounds = numeric_cast_precast_bounds(cx, normalized_lhs);
+ let rhs_bounds = numeric_cast_precast_bounds(cx, normalized_rhs);
+
+ upcast_comparison_bounds_err(cx, expr.span, rel, lhs_bounds, normalized_lhs, normalized_rhs, false);
+ upcast_comparison_bounds_err(cx, expr.span, rel, rhs_bounds, normalized_rhs, normalized_lhs, true);
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/invalid_utf8_in_unchecked.rs b/src/tools/clippy/clippy_lints/src/invalid_utf8_in_unchecked.rs
new file mode 100644
index 000000000..e0a607f9a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/invalid_utf8_in_unchecked.rs
@@ -0,0 +1,74 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::{match_function_call, paths};
+use rustc_ast::{BorrowKind, LitKind};
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Spanned;
+use rustc_span::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `std::str::from_utf8_unchecked` with an invalid UTF-8 literal
+ ///
+ /// ### Why is this bad?
+ /// Creating such a `str` would result in undefined behavior
+ ///
+ /// ### Example
+ /// ```rust
+ /// # #[allow(unused)]
+ /// unsafe {
+ /// std::str::from_utf8_unchecked(b"cl\x82ippy");
+ /// }
+ /// ```
+ #[clippy::version = "1.64.0"]
+ pub INVALID_UTF8_IN_UNCHECKED,
+ correctness,
+ "using a non UTF-8 literal in `std::std::from_utf8_unchecked`"
+}
+declare_lint_pass!(InvalidUtf8InUnchecked => [INVALID_UTF8_IN_UNCHECKED]);
+
+impl<'tcx> LateLintPass<'tcx> for InvalidUtf8InUnchecked {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
+ if let Some([arg]) = match_function_call(cx, expr, &paths::STR_FROM_UTF8_UNCHECKED) {
+ match &arg.kind {
+ ExprKind::Lit(Spanned { node: lit, .. }) => {
+ if let LitKind::ByteStr(bytes) = &lit
+ && std::str::from_utf8(bytes).is_err()
+ {
+ lint(cx, expr.span);
+ }
+ },
+ ExprKind::AddrOf(BorrowKind::Ref, _, Expr { kind: ExprKind::Array(args), .. }) => {
+ let elements = args.iter().map(|e|{
+ match &e.kind {
+ ExprKind::Lit(Spanned { node: lit, .. }) => match lit {
+ LitKind::Byte(b) => Some(*b),
+ #[allow(clippy::cast_possible_truncation)]
+ LitKind::Int(b, _) => Some(*b as u8),
+ _ => None
+ }
+ _ => None
+ }
+ }).collect::<Option<Vec<_>>>();
+
+ if let Some(elements) = elements
+ && std::str::from_utf8(&elements).is_err()
+ {
+ lint(cx, expr.span);
+ }
+ }
+ _ => {}
+ }
+ }
+ }
+}
+
+fn lint(cx: &LateContext<'_>, span: Span) {
+ span_lint(
+ cx,
+ INVALID_UTF8_IN_UNCHECKED,
+ span,
+ "non UTF-8 literal in `std::str::from_utf8_unchecked`",
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/items_after_statements.rs b/src/tools/clippy/clippy_lints/src/items_after_statements.rs
new file mode 100644
index 000000000..46d439b44
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/items_after_statements.rs
@@ -0,0 +1,88 @@
+//! lint when items are used after statements
+
+use clippy_utils::diagnostics::span_lint;
+use rustc_ast::ast::{Block, ItemKind, StmtKind};
+use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for items declared after some statement in a block.
+ ///
+ /// ### Why is this bad?
+ /// Items live for the entire scope they are declared
+ /// in. But statements are processed in order. This might cause confusion as
+ /// it's hard to figure out which item is meant in a statement.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn foo() {
+ /// println!("cake");
+ /// }
+ ///
+ /// fn main() {
+ /// foo(); // prints "foo"
+ /// fn foo() {
+ /// println!("foo");
+ /// }
+ /// foo(); // prints "foo"
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// fn foo() {
+ /// println!("cake");
+ /// }
+ ///
+ /// fn main() {
+ /// fn foo() {
+ /// println!("foo");
+ /// }
+ /// foo(); // prints "foo"
+ /// foo(); // prints "foo"
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub ITEMS_AFTER_STATEMENTS,
+ pedantic,
+ "blocks where an item comes after a statement"
+}
+
+declare_lint_pass!(ItemsAfterStatements => [ITEMS_AFTER_STATEMENTS]);
+
+impl EarlyLintPass for ItemsAfterStatements {
+ fn check_block(&mut self, cx: &EarlyContext<'_>, item: &Block) {
+ if in_external_macro(cx.sess(), item.span) {
+ return;
+ }
+
+ // skip initial items and trailing semicolons
+ let stmts = item
+ .stmts
+ .iter()
+ .map(|stmt| &stmt.kind)
+ .skip_while(|s| matches!(**s, StmtKind::Item(..) | StmtKind::Empty));
+
+ // lint on all further items
+ for stmt in stmts {
+ if let StmtKind::Item(ref it) = *stmt {
+ if in_external_macro(cx.sess(), it.span) {
+ return;
+ }
+ if let ItemKind::MacroDef(..) = it.kind {
+ // do not lint `macro_rules`, but continue processing further statements
+ continue;
+ }
+ span_lint(
+ cx,
+ ITEMS_AFTER_STATEMENTS,
+ it.span,
+ "adding items after statements is confusing, since items exist from the \
+ start of the scope",
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/iter_not_returning_iterator.rs b/src/tools/clippy/clippy_lints/src/iter_not_returning_iterator.rs
new file mode 100644
index 000000000..b56d87c53
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/iter_not_returning_iterator.rs
@@ -0,0 +1,90 @@
+use clippy_utils::{diagnostics::span_lint, get_parent_node, ty::implements_trait};
+use rustc_hir::{def_id::LocalDefId, FnSig, ImplItem, ImplItemKind, Item, ItemKind, Node, TraitItem, TraitItemKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Detects methods named `iter` or `iter_mut` that do not have a return type that implements `Iterator`.
+ ///
+ /// ### Why is this bad?
+ /// Methods named `iter` or `iter_mut` conventionally return an `Iterator`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// // `String` does not implement `Iterator`
+ /// struct Data {}
+ /// impl Data {
+ /// fn iter(&self) -> String {
+ /// todo!()
+ /// }
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// use std::str::Chars;
+ /// struct Data {}
+ /// impl Data {
+ /// fn iter(&self) -> Chars<'static> {
+ /// todo!()
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "1.57.0"]
+ pub ITER_NOT_RETURNING_ITERATOR,
+ pedantic,
+ "methods named `iter` or `iter_mut` that do not return an `Iterator`"
+}
+
+declare_lint_pass!(IterNotReturningIterator => [ITER_NOT_RETURNING_ITERATOR]);
+
+impl<'tcx> LateLintPass<'tcx> for IterNotReturningIterator {
+ fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx TraitItem<'_>) {
+ let name = item.ident.name.as_str();
+ if matches!(name, "iter" | "iter_mut") {
+ if let TraitItemKind::Fn(fn_sig, _) = &item.kind {
+ check_sig(cx, name, fn_sig, item.def_id);
+ }
+ }
+ }
+
+ fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx ImplItem<'tcx>) {
+ let name = item.ident.name.as_str();
+ if matches!(name, "iter" | "iter_mut")
+ && !matches!(
+ get_parent_node(cx.tcx, item.hir_id()),
+ Some(Node::Item(Item { kind: ItemKind::Impl(i), .. })) if i.of_trait.is_some()
+ )
+ {
+ if let ImplItemKind::Fn(fn_sig, _) = &item.kind {
+ check_sig(cx, name, fn_sig, item.def_id);
+ }
+ }
+ }
+}
+
+fn check_sig(cx: &LateContext<'_>, name: &str, sig: &FnSig<'_>, fn_id: LocalDefId) {
+ if sig.decl.implicit_self.has_implicit_self() {
+ let ret_ty = cx.tcx.erase_late_bound_regions(cx.tcx.fn_sig(fn_id).output());
+ let ret_ty = cx
+ .tcx
+ .try_normalize_erasing_regions(cx.param_env, ret_ty)
+ .unwrap_or(ret_ty);
+ if cx
+ .tcx
+ .get_diagnostic_item(sym::Iterator)
+ .map_or(false, |iter_id| !implements_trait(cx, ret_ty, iter_id, &[]))
+ {
+ span_lint(
+ cx,
+ ITER_NOT_RETURNING_ITERATOR,
+ sig.span,
+ &format!(
+ "this method is named `{}` but its return type does not implement `Iterator`",
+ name
+ ),
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/large_const_arrays.rs b/src/tools/clippy/clippy_lints/src/large_const_arrays.rs
new file mode 100644
index 000000000..984c5cd4e
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/large_const_arrays.rs
@@ -0,0 +1,86 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Item, ItemKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::layout::LayoutOf;
+use rustc_middle::ty::{self, ConstKind};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::{BytePos, Pos, Span};
+use rustc_typeck::hir_ty_to_ty;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for large `const` arrays that should
+ /// be defined as `static` instead.
+ ///
+ /// ### Why is this bad?
+ /// Performance: const variables are inlined upon use.
+ /// Static items result in only one instance and has a fixed location in memory.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// pub const a = [0u32; 1_000_000];
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// pub static a = [0u32; 1_000_000];
+ /// ```
+ #[clippy::version = "1.44.0"]
+ pub LARGE_CONST_ARRAYS,
+ perf,
+ "large non-scalar const array may cause performance overhead"
+}
+
+pub struct LargeConstArrays {
+ maximum_allowed_size: u64,
+}
+
+impl LargeConstArrays {
+ #[must_use]
+ pub fn new(maximum_allowed_size: u64) -> Self {
+ Self { maximum_allowed_size }
+ }
+}
+
+impl_lint_pass!(LargeConstArrays => [LARGE_CONST_ARRAYS]);
+
+impl<'tcx> LateLintPass<'tcx> for LargeConstArrays {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
+ if_chain! {
+ if !item.span.from_expansion();
+ if let ItemKind::Const(hir_ty, _) = &item.kind;
+ let ty = hir_ty_to_ty(cx.tcx, hir_ty);
+ if let ty::Array(element_type, cst) = ty.kind();
+ if let ConstKind::Value(ty::ValTree::Leaf(element_count)) = cst.kind();
+ if let Ok(element_count) = element_count.try_to_machine_usize(cx.tcx);
+ if let Ok(element_size) = cx.layout_of(*element_type).map(|l| l.size.bytes());
+ if self.maximum_allowed_size < element_count * element_size;
+
+ then {
+ let hi_pos = item.ident.span.lo() - BytePos::from_usize(1);
+ let sugg_span = Span::new(
+ hi_pos - BytePos::from_usize("const".len()),
+ hi_pos,
+ item.span.ctxt(),
+ item.span.parent(),
+ );
+ span_lint_and_then(
+ cx,
+ LARGE_CONST_ARRAYS,
+ item.span,
+ "large array defined as const",
+ |diag| {
+ diag.span_suggestion(
+ sugg_span,
+ "make this a static item",
+ "static",
+ Applicability::MachineApplicable,
+ );
+ }
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/large_enum_variant.rs b/src/tools/clippy/clippy_lints/src/large_enum_variant.rs
new file mode 100644
index 000000000..c58df126d
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/large_enum_variant.rs
@@ -0,0 +1,200 @@
+//! lint when there is a large size difference between variants on an enum
+
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::{diagnostics::span_lint_and_then, ty::is_copy};
+use rustc_errors::Applicability;
+use rustc_hir::{Item, ItemKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty::layout::LayoutOf;
+use rustc_middle::ty::{Adt, Ty};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::source_map::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for large size differences between variants on
+ /// `enum`s.
+ ///
+ /// ### Why is this bad?
+ /// Enum size is bounded by the largest variant. Having a
+ /// large variant can penalize the memory layout of that enum.
+ ///
+ /// ### Known problems
+ /// This lint obviously cannot take the distribution of
+ /// variants in your running program into account. It is possible that the
+ /// smaller variants make up less than 1% of all instances, in which case
+ /// the overhead is negligible and the boxing is counter-productive. Always
+ /// measure the change this lint suggests.
+ ///
+ /// For types that implement `Copy`, the suggestion to `Box` a variant's
+ /// data would require removing the trait impl. The types can of course
+ /// still be `Clone`, but that is worse ergonomically. Depending on the
+ /// use case it may be possible to store the large data in an auxiliary
+ /// structure (e.g. Arena or ECS).
+ ///
+ /// The lint will ignore generic types if the layout depends on the
+ /// generics, even if the size difference will be large anyway.
+ ///
+ /// ### Example
+ /// ```rust
+ /// enum Test {
+ /// A(i32),
+ /// B([i32; 8000]),
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// // Possibly better
+ /// enum Test2 {
+ /// A(i32),
+ /// B(Box<[i32; 8000]>),
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub LARGE_ENUM_VARIANT,
+ perf,
+ "large size difference between variants on an enum"
+}
+
+#[derive(Copy, Clone)]
+pub struct LargeEnumVariant {
+ maximum_size_difference_allowed: u64,
+}
+
+impl LargeEnumVariant {
+ #[must_use]
+ pub fn new(maximum_size_difference_allowed: u64) -> Self {
+ Self {
+ maximum_size_difference_allowed,
+ }
+ }
+}
+
+struct FieldInfo {
+ ind: usize,
+ size: u64,
+}
+
+struct VariantInfo {
+ ind: usize,
+ size: u64,
+ fields_size: Vec<FieldInfo>,
+}
+
+impl_lint_pass!(LargeEnumVariant => [LARGE_ENUM_VARIANT]);
+
+impl<'tcx> LateLintPass<'tcx> for LargeEnumVariant {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &Item<'tcx>) {
+ if in_external_macro(cx.tcx.sess, item.span) {
+ return;
+ }
+ if let ItemKind::Enum(ref def, _) = item.kind {
+ let ty = cx.tcx.type_of(item.def_id);
+ let adt = ty.ty_adt_def().expect("already checked whether this is an enum");
+ if adt.variants().len() <= 1 {
+ return;
+ }
+ let mut variants_size: Vec<VariantInfo> = Vec::new();
+ for (i, variant) in adt.variants().iter().enumerate() {
+ let mut fields_size = Vec::new();
+ for (i, f) in variant.fields.iter().enumerate() {
+ let ty = cx.tcx.type_of(f.did);
+ // don't lint variants which have a field of generic type.
+ match cx.layout_of(ty) {
+ Ok(l) => {
+ let fsize = l.size.bytes();
+ fields_size.push(FieldInfo { ind: i, size: fsize });
+ },
+ Err(_) => {
+ return;
+ },
+ }
+ }
+ let size: u64 = fields_size.iter().map(|info| info.size).sum();
+
+ variants_size.push(VariantInfo {
+ ind: i,
+ size,
+ fields_size,
+ });
+ }
+
+ variants_size.sort_by(|a, b| (b.size.cmp(&a.size)));
+
+ let mut difference = variants_size[0].size - variants_size[1].size;
+ if difference > self.maximum_size_difference_allowed {
+ let help_text = "consider boxing the large fields to reduce the total size of the enum";
+ span_lint_and_then(
+ cx,
+ LARGE_ENUM_VARIANT,
+ def.variants[variants_size[0].ind].span,
+ "large size difference between variants",
+ |diag| {
+ diag.span_label(
+ def.variants[variants_size[0].ind].span,
+ &format!("this variant is {} bytes", variants_size[0].size),
+ );
+ diag.span_note(
+ def.variants[variants_size[1].ind].span,
+ &format!("and the second-largest variant is {} bytes:", variants_size[1].size),
+ );
+
+ let fields = def.variants[variants_size[0].ind].data.fields();
+ variants_size[0].fields_size.sort_by(|a, b| (a.size.cmp(&b.size)));
+ let mut applicability = Applicability::MaybeIncorrect;
+ if is_copy(cx, ty) || maybe_copy(cx, ty) {
+ diag.span_note(
+ item.ident.span,
+ "boxing a variant would require the type no longer be `Copy`",
+ );
+ } else {
+ let sugg: Vec<(Span, String)> = variants_size[0]
+ .fields_size
+ .iter()
+ .rev()
+ .map_while(|val| {
+ if difference > self.maximum_size_difference_allowed {
+ difference = difference.saturating_sub(val.size);
+ Some((
+ fields[val.ind].ty.span,
+ format!(
+ "Box<{}>",
+ snippet_with_applicability(
+ cx,
+ fields[val.ind].ty.span,
+ "..",
+ &mut applicability
+ )
+ .into_owned()
+ ),
+ ))
+ } else {
+ None
+ }
+ })
+ .collect();
+
+ if !sugg.is_empty() {
+ diag.multipart_suggestion(help_text, sugg, Applicability::MaybeIncorrect);
+ return;
+ }
+ }
+ diag.span_help(def.variants[variants_size[0].ind].span, help_text);
+ },
+ );
+ }
+ }
+ }
+}
+
+fn maybe_copy<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
+ if let Adt(_def, substs) = ty.kind()
+ && substs.types().next().is_some()
+ && let Some(copy_trait) = cx.tcx.lang_items().copy_trait()
+ {
+ return cx.tcx.non_blanket_impls_for_ty(copy_trait, ty).next().is_some();
+ }
+ false
+}
diff --git a/src/tools/clippy/clippy_lints/src/large_include_file.rs b/src/tools/clippy/clippy_lints/src/large_include_file.rs
new file mode 100644
index 000000000..84dd61a1e
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/large_include_file.rs
@@ -0,0 +1,87 @@
+use clippy_utils::diagnostics::span_lint_and_note;
+use clippy_utils::is_lint_allowed;
+use clippy_utils::macros::root_macro_call_first_node;
+use rustc_ast::LitKind;
+use rustc_hir::Expr;
+use rustc_hir::ExprKind;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for the inclusion of large files via `include_bytes!()`
+ /// and `include_str!()`
+ ///
+ /// ### Why is this bad?
+ /// Including large files can increase the size of the binary
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// let included_str = include_str!("very_large_file.txt");
+ /// let included_bytes = include_bytes!("very_large_file.txt");
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// use std::fs;
+ ///
+ /// // You can load the file at runtime
+ /// let string = fs::read_to_string("very_large_file.txt")?;
+ /// let bytes = fs::read("very_large_file.txt")?;
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub LARGE_INCLUDE_FILE,
+ restriction,
+ "including a large file"
+}
+
+pub struct LargeIncludeFile {
+ max_file_size: u64,
+}
+
+impl LargeIncludeFile {
+ #[must_use]
+ pub fn new(max_file_size: u64) -> Self {
+ Self { max_file_size }
+ }
+}
+
+impl_lint_pass!(LargeIncludeFile => [LARGE_INCLUDE_FILE]);
+
+impl LateLintPass<'_> for LargeIncludeFile {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &'_ Expr<'_>) {
+ if_chain! {
+ if let Some(macro_call) = root_macro_call_first_node(cx, expr);
+ if !is_lint_allowed(cx, LARGE_INCLUDE_FILE, expr.hir_id);
+ if cx.tcx.is_diagnostic_item(sym::include_bytes_macro, macro_call.def_id)
+ || cx.tcx.is_diagnostic_item(sym::include_str_macro, macro_call.def_id);
+ if let ExprKind::Lit(lit) = &expr.kind;
+ then {
+ let len = match &lit.node {
+ // include_bytes
+ LitKind::ByteStr(bstr) => bstr.len(),
+ // include_str
+ LitKind::Str(sym, _) => sym.as_str().len(),
+ _ => return,
+ };
+
+ if len as u64 <= self.max_file_size {
+ return;
+ }
+
+ span_lint_and_note(
+ cx,
+ LARGE_INCLUDE_FILE,
+ expr.span,
+ "attempted to include a large file",
+ None,
+ &format!(
+ "the configuration allows a maximum size of {} bytes",
+ self.max_file_size
+ ),
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/large_stack_arrays.rs b/src/tools/clippy/clippy_lints/src/large_stack_arrays.rs
new file mode 100644
index 000000000..0acbd81ae
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/large_stack_arrays.rs
@@ -0,0 +1,67 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::source::snippet;
+use if_chain::if_chain;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::layout::LayoutOf;
+use rustc_middle::ty::{self, ConstKind};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for local arrays that may be too large.
+ ///
+ /// ### Why is this bad?
+ /// Large local arrays may cause stack overflow.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// let a = [0u32; 1_000_000];
+ /// ```
+ #[clippy::version = "1.41.0"]
+ pub LARGE_STACK_ARRAYS,
+ pedantic,
+ "allocating large arrays on stack may cause stack overflow"
+}
+
+pub struct LargeStackArrays {
+ maximum_allowed_size: u64,
+}
+
+impl LargeStackArrays {
+ #[must_use]
+ pub fn new(maximum_allowed_size: u64) -> Self {
+ Self { maximum_allowed_size }
+ }
+}
+
+impl_lint_pass!(LargeStackArrays => [LARGE_STACK_ARRAYS]);
+
+impl<'tcx> LateLintPass<'tcx> for LargeStackArrays {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if_chain! {
+ if let ExprKind::Repeat(_, _) = expr.kind;
+ if let ty::Array(element_type, cst) = cx.typeck_results().expr_ty(expr).kind();
+ if let ConstKind::Value(ty::ValTree::Leaf(element_count)) = cst.kind();
+ if let Ok(element_count) = element_count.try_to_machine_usize(cx.tcx);
+ if let Ok(element_size) = cx.layout_of(*element_type).map(|l| l.size.bytes());
+ if self.maximum_allowed_size < element_count * element_size;
+ then {
+ span_lint_and_help(
+ cx,
+ LARGE_STACK_ARRAYS,
+ expr.span,
+ &format!(
+ "allocating a local array larger than {} bytes",
+ self.maximum_allowed_size
+ ),
+ None,
+ &format!(
+ "consider allocating on the heap with `vec!{}.into_boxed_slice()`",
+ snippet(cx, expr.span, "[...]")
+ ),
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/len_zero.rs b/src/tools/clippy/clippy_lints/src/len_zero.rs
new file mode 100644
index 000000000..246f5aad8
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/len_zero.rs
@@ -0,0 +1,495 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg, span_lint_and_then};
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::{get_item_name, get_parent_as_impl, is_lint_allowed};
+use if_chain::if_chain;
+use rustc_ast::ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::def_id::DefIdSet;
+use rustc_hir::{
+ def_id::DefId, AssocItemKind, BinOpKind, Expr, ExprKind, FnRetTy, ImplItem, ImplItemKind, ImplicitSelfKind, Item,
+ ItemKind, Mutability, Node, TraitItemRef, TyKind,
+};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::{self, AssocKind, FnSig, Ty};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{
+ source_map::{Span, Spanned, Symbol},
+ symbol::sym,
+};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for getting the length of something via `.len()`
+ /// just to compare to zero, and suggests using `.is_empty()` where applicable.
+ ///
+ /// ### Why is this bad?
+ /// Some structures can answer `.is_empty()` much faster
+ /// than calculating their length. So it is good to get into the habit of using
+ /// `.is_empty()`, and having it is cheap.
+ /// Besides, it makes the intent clearer than a manual comparison in some contexts.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// if x.len() == 0 {
+ /// ..
+ /// }
+ /// if y.len() != 0 {
+ /// ..
+ /// }
+ /// ```
+ /// instead use
+ /// ```ignore
+ /// if x.is_empty() {
+ /// ..
+ /// }
+ /// if !y.is_empty() {
+ /// ..
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub LEN_ZERO,
+ style,
+ "checking `.len() == 0` or `.len() > 0` (or similar) when `.is_empty()` could be used instead"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for items that implement `.len()` but not
+ /// `.is_empty()`.
+ ///
+ /// ### Why is this bad?
+ /// It is good custom to have both methods, because for
+ /// some data structures, asking about the length will be a costly operation,
+ /// whereas `.is_empty()` can usually answer in constant time. Also it used to
+ /// lead to false positives on the [`len_zero`](#len_zero) lint – currently that
+ /// lint will ignore such entities.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// impl X {
+ /// pub fn len(&self) -> usize {
+ /// ..
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub LEN_WITHOUT_IS_EMPTY,
+ style,
+ "traits or impls with a public `len` method but no corresponding `is_empty` method"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for comparing to an empty slice such as `""` or `[]`,
+ /// and suggests using `.is_empty()` where applicable.
+ ///
+ /// ### Why is this bad?
+ /// Some structures can answer `.is_empty()` much faster
+ /// than checking for equality. So it is good to get into the habit of using
+ /// `.is_empty()`, and having it is cheap.
+ /// Besides, it makes the intent clearer than a manual comparison in some contexts.
+ ///
+ /// ### Example
+ ///
+ /// ```ignore
+ /// if s == "" {
+ /// ..
+ /// }
+ ///
+ /// if arr == [] {
+ /// ..
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```ignore
+ /// if s.is_empty() {
+ /// ..
+ /// }
+ ///
+ /// if arr.is_empty() {
+ /// ..
+ /// }
+ /// ```
+ #[clippy::version = "1.49.0"]
+ pub COMPARISON_TO_EMPTY,
+ style,
+ "checking `x == \"\"` or `x == []` (or similar) when `.is_empty()` could be used instead"
+}
+
+declare_lint_pass!(LenZero => [LEN_ZERO, LEN_WITHOUT_IS_EMPTY, COMPARISON_TO_EMPTY]);
+
+impl<'tcx> LateLintPass<'tcx> for LenZero {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
+ if item.span.from_expansion() {
+ return;
+ }
+
+ if let ItemKind::Trait(_, _, _, _, trait_items) = item.kind {
+ check_trait_items(cx, item, trait_items);
+ }
+ }
+
+ fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx ImplItem<'_>) {
+ if_chain! {
+ if item.ident.name == sym::len;
+ if let ImplItemKind::Fn(sig, _) = &item.kind;
+ if sig.decl.implicit_self.has_implicit_self();
+ if cx.access_levels.is_exported(item.def_id);
+ if matches!(sig.decl.output, FnRetTy::Return(_));
+ if let Some(imp) = get_parent_as_impl(cx.tcx, item.hir_id());
+ if imp.of_trait.is_none();
+ if let TyKind::Path(ty_path) = &imp.self_ty.kind;
+ if let Some(ty_id) = cx.qpath_res(ty_path, imp.self_ty.hir_id).opt_def_id();
+ if let Some(local_id) = ty_id.as_local();
+ let ty_hir_id = cx.tcx.hir().local_def_id_to_hir_id(local_id);
+ if !is_lint_allowed(cx, LEN_WITHOUT_IS_EMPTY, ty_hir_id);
+ if let Some(output) = parse_len_output(cx, cx.tcx.fn_sig(item.def_id).skip_binder());
+ then {
+ let (name, kind) = match cx.tcx.hir().find(ty_hir_id) {
+ Some(Node::ForeignItem(x)) => (x.ident.name, "extern type"),
+ Some(Node::Item(x)) => match x.kind {
+ ItemKind::Struct(..) => (x.ident.name, "struct"),
+ ItemKind::Enum(..) => (x.ident.name, "enum"),
+ ItemKind::Union(..) => (x.ident.name, "union"),
+ _ => (x.ident.name, "type"),
+ }
+ _ => return,
+ };
+ check_for_is_empty(cx, sig.span, sig.decl.implicit_self, output, ty_id, name, kind)
+ }
+ }
+ }
+
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if expr.span.from_expansion() {
+ return;
+ }
+
+ if let ExprKind::Binary(Spanned { node: cmp, .. }, left, right) = expr.kind {
+ match cmp {
+ BinOpKind::Eq => {
+ check_cmp(cx, expr.span, left, right, "", 0); // len == 0
+ check_cmp(cx, expr.span, right, left, "", 0); // 0 == len
+ },
+ BinOpKind::Ne => {
+ check_cmp(cx, expr.span, left, right, "!", 0); // len != 0
+ check_cmp(cx, expr.span, right, left, "!", 0); // 0 != len
+ },
+ BinOpKind::Gt => {
+ check_cmp(cx, expr.span, left, right, "!", 0); // len > 0
+ check_cmp(cx, expr.span, right, left, "", 1); // 1 > len
+ },
+ BinOpKind::Lt => {
+ check_cmp(cx, expr.span, left, right, "", 1); // len < 1
+ check_cmp(cx, expr.span, right, left, "!", 0); // 0 < len
+ },
+ BinOpKind::Ge => check_cmp(cx, expr.span, left, right, "!", 1), // len >= 1
+ BinOpKind::Le => check_cmp(cx, expr.span, right, left, "!", 1), // 1 <= len
+ _ => (),
+ }
+ }
+ }
+}
+
+fn check_trait_items(cx: &LateContext<'_>, visited_trait: &Item<'_>, trait_items: &[TraitItemRef]) {
+ fn is_named_self(cx: &LateContext<'_>, item: &TraitItemRef, name: Symbol) -> bool {
+ item.ident.name == name
+ && if let AssocItemKind::Fn { has_self } = item.kind {
+ has_self && { cx.tcx.fn_sig(item.id.def_id).inputs().skip_binder().len() == 1 }
+ } else {
+ false
+ }
+ }
+
+ // fill the set with current and super traits
+ fn fill_trait_set(traitt: DefId, set: &mut DefIdSet, cx: &LateContext<'_>) {
+ if set.insert(traitt) {
+ for supertrait in rustc_trait_selection::traits::supertrait_def_ids(cx.tcx, traitt) {
+ fill_trait_set(supertrait, set, cx);
+ }
+ }
+ }
+
+ if cx.access_levels.is_exported(visited_trait.def_id) && trait_items.iter().any(|i| is_named_self(cx, i, sym::len))
+ {
+ let mut current_and_super_traits = DefIdSet::default();
+ fill_trait_set(visited_trait.def_id.to_def_id(), &mut current_and_super_traits, cx);
+ let is_empty = sym!(is_empty);
+
+ let is_empty_method_found = current_and_super_traits
+ .iter()
+ .flat_map(|&i| cx.tcx.associated_items(i).filter_by_name_unhygienic(is_empty))
+ .any(|i| {
+ i.kind == ty::AssocKind::Fn
+ && i.fn_has_self_parameter
+ && cx.tcx.fn_sig(i.def_id).inputs().skip_binder().len() == 1
+ });
+
+ if !is_empty_method_found {
+ span_lint(
+ cx,
+ LEN_WITHOUT_IS_EMPTY,
+ visited_trait.span,
+ &format!(
+ "trait `{}` has a `len` method but no (possibly inherited) `is_empty` method",
+ visited_trait.ident.name
+ ),
+ );
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy)]
+enum LenOutput<'tcx> {
+ Integral,
+ Option(DefId),
+ Result(DefId, Ty<'tcx>),
+}
+fn parse_len_output<'tcx>(cx: &LateContext<'_>, sig: FnSig<'tcx>) -> Option<LenOutput<'tcx>> {
+ match *sig.output().kind() {
+ ty::Int(_) | ty::Uint(_) => Some(LenOutput::Integral),
+ ty::Adt(adt, subs) if cx.tcx.is_diagnostic_item(sym::Option, adt.did()) => {
+ subs.type_at(0).is_integral().then(|| LenOutput::Option(adt.did()))
+ },
+ ty::Adt(adt, subs) if cx.tcx.is_diagnostic_item(sym::Result, adt.did()) => subs
+ .type_at(0)
+ .is_integral()
+ .then(|| LenOutput::Result(adt.did(), subs.type_at(1))),
+ _ => None,
+ }
+}
+
+impl<'tcx> LenOutput<'tcx> {
+ fn matches_is_empty_output(self, ty: Ty<'tcx>) -> bool {
+ match (self, ty.kind()) {
+ (_, &ty::Bool) => true,
+ (Self::Option(id), &ty::Adt(adt, subs)) if id == adt.did() => subs.type_at(0).is_bool(),
+ (Self::Result(id, err_ty), &ty::Adt(adt, subs)) if id == adt.did() => {
+ subs.type_at(0).is_bool() && subs.type_at(1) == err_ty
+ },
+ _ => false,
+ }
+ }
+
+ fn expected_sig(self, self_kind: ImplicitSelfKind) -> String {
+ let self_ref = match self_kind {
+ ImplicitSelfKind::ImmRef => "&",
+ ImplicitSelfKind::MutRef => "&mut ",
+ _ => "",
+ };
+ match self {
+ Self::Integral => format!("expected signature: `({}self) -> bool`", self_ref),
+ Self::Option(_) => format!(
+ "expected signature: `({}self) -> bool` or `({}self) -> Option<bool>",
+ self_ref, self_ref
+ ),
+ Self::Result(..) => format!(
+ "expected signature: `({}self) -> bool` or `({}self) -> Result<bool>",
+ self_ref, self_ref
+ ),
+ }
+ }
+}
+
+/// Checks if the given signature matches the expectations for `is_empty`
+fn check_is_empty_sig<'tcx>(sig: FnSig<'tcx>, self_kind: ImplicitSelfKind, len_output: LenOutput<'tcx>) -> bool {
+ match &**sig.inputs_and_output {
+ [arg, res] if len_output.matches_is_empty_output(*res) => {
+ matches!(
+ (arg.kind(), self_kind),
+ (ty::Ref(_, _, Mutability::Not), ImplicitSelfKind::ImmRef)
+ | (ty::Ref(_, _, Mutability::Mut), ImplicitSelfKind::MutRef)
+ ) || (!arg.is_ref() && matches!(self_kind, ImplicitSelfKind::Imm | ImplicitSelfKind::Mut))
+ },
+ _ => false,
+ }
+}
+
+/// Checks if the given type has an `is_empty` method with the appropriate signature.
+fn check_for_is_empty<'tcx>(
+ cx: &LateContext<'tcx>,
+ span: Span,
+ self_kind: ImplicitSelfKind,
+ output: LenOutput<'tcx>,
+ impl_ty: DefId,
+ item_name: Symbol,
+ item_kind: &str,
+) {
+ let is_empty = Symbol::intern("is_empty");
+ let is_empty = cx
+ .tcx
+ .inherent_impls(impl_ty)
+ .iter()
+ .flat_map(|&id| cx.tcx.associated_items(id).filter_by_name_unhygienic(is_empty))
+ .find(|item| item.kind == AssocKind::Fn);
+
+ let (msg, is_empty_span, self_kind) = match is_empty {
+ None => (
+ format!(
+ "{} `{}` has a public `len` method, but no `is_empty` method",
+ item_kind,
+ item_name.as_str(),
+ ),
+ None,
+ None,
+ ),
+ Some(is_empty) if !cx.access_levels.is_exported(is_empty.def_id.expect_local()) => (
+ format!(
+ "{} `{}` has a public `len` method, but a private `is_empty` method",
+ item_kind,
+ item_name.as_str(),
+ ),
+ Some(cx.tcx.def_span(is_empty.def_id)),
+ None,
+ ),
+ Some(is_empty)
+ if !(is_empty.fn_has_self_parameter
+ && check_is_empty_sig(cx.tcx.fn_sig(is_empty.def_id).skip_binder(), self_kind, output)) =>
+ {
+ (
+ format!(
+ "{} `{}` has a public `len` method, but the `is_empty` method has an unexpected signature",
+ item_kind,
+ item_name.as_str(),
+ ),
+ Some(cx.tcx.def_span(is_empty.def_id)),
+ Some(self_kind),
+ )
+ },
+ Some(_) => return,
+ };
+
+ span_lint_and_then(cx, LEN_WITHOUT_IS_EMPTY, span, &msg, |db| {
+ if let Some(span) = is_empty_span {
+ db.span_note(span, "`is_empty` defined here");
+ }
+ if let Some(self_kind) = self_kind {
+ db.note(&output.expected_sig(self_kind));
+ }
+ });
+}
+
+fn check_cmp(cx: &LateContext<'_>, span: Span, method: &Expr<'_>, lit: &Expr<'_>, op: &str, compare_to: u32) {
+ if let (&ExprKind::MethodCall(method_path, args, _), &ExprKind::Lit(ref lit)) = (&method.kind, &lit.kind) {
+ // check if we are in an is_empty() method
+ if let Some(name) = get_item_name(cx, method) {
+ if name.as_str() == "is_empty" {
+ return;
+ }
+ }
+
+ check_len(cx, span, method_path.ident.name, args, &lit.node, op, compare_to);
+ } else {
+ check_empty_expr(cx, span, method, lit, op);
+ }
+}
+
+fn check_len(
+ cx: &LateContext<'_>,
+ span: Span,
+ method_name: Symbol,
+ args: &[Expr<'_>],
+ lit: &LitKind,
+ op: &str,
+ compare_to: u32,
+) {
+ if let LitKind::Int(lit, _) = *lit {
+ // check if length is compared to the specified number
+ if lit != u128::from(compare_to) {
+ return;
+ }
+
+ if method_name == sym::len && args.len() == 1 && has_is_empty(cx, &args[0]) {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ LEN_ZERO,
+ span,
+ &format!("length comparison to {}", if compare_to == 0 { "zero" } else { "one" }),
+ &format!("using `{}is_empty` is clearer and more explicit", op),
+ format!(
+ "{}{}.is_empty()",
+ op,
+ snippet_with_applicability(cx, args[0].span, "_", &mut applicability)
+ ),
+ applicability,
+ );
+ }
+ }
+}
+
+fn check_empty_expr(cx: &LateContext<'_>, span: Span, lit1: &Expr<'_>, lit2: &Expr<'_>, op: &str) {
+ if (is_empty_array(lit2) || is_empty_string(lit2)) && has_is_empty(cx, lit1) {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ COMPARISON_TO_EMPTY,
+ span,
+ "comparison to empty slice",
+ &format!("using `{}is_empty` is clearer and more explicit", op),
+ format!(
+ "{}{}.is_empty()",
+ op,
+ snippet_with_applicability(cx, lit1.span, "_", &mut applicability)
+ ),
+ applicability,
+ );
+ }
+}
+
+fn is_empty_string(expr: &Expr<'_>) -> bool {
+ if let ExprKind::Lit(ref lit) = expr.kind {
+ if let LitKind::Str(lit, _) = lit.node {
+ let lit = lit.as_str();
+ return lit.is_empty();
+ }
+ }
+ false
+}
+
+fn is_empty_array(expr: &Expr<'_>) -> bool {
+ if let ExprKind::Array(arr) = expr.kind {
+ return arr.is_empty();
+ }
+ false
+}
+
+/// Checks if this type has an `is_empty` method.
+fn has_is_empty(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ /// Gets an `AssocItem` and return true if it matches `is_empty(self)`.
+ fn is_is_empty(cx: &LateContext<'_>, item: &ty::AssocItem) -> bool {
+ if item.kind == ty::AssocKind::Fn {
+ let sig = cx.tcx.fn_sig(item.def_id);
+ let ty = sig.skip_binder();
+ ty.inputs().len() == 1
+ } else {
+ false
+ }
+ }
+
+ /// Checks the inherent impl's items for an `is_empty(self)` method.
+ fn has_is_empty_impl(cx: &LateContext<'_>, id: DefId) -> bool {
+ let is_empty = sym!(is_empty);
+ cx.tcx.inherent_impls(id).iter().any(|imp| {
+ cx.tcx
+ .associated_items(*imp)
+ .filter_by_name_unhygienic(is_empty)
+ .any(|item| is_is_empty(cx, item))
+ })
+ }
+
+ let ty = &cx.typeck_results().expr_ty(expr).peel_refs();
+ match ty.kind() {
+ ty::Dynamic(tt, ..) => tt.principal().map_or(false, |principal| {
+ let is_empty = sym!(is_empty);
+ cx.tcx
+ .associated_items(principal.def_id())
+ .filter_by_name_unhygienic(is_empty)
+ .any(|item| is_is_empty(cx, item))
+ }),
+ ty::Projection(ref proj) => has_is_empty_impl(cx, proj.item_def_id),
+ ty::Adt(id, _) => has_is_empty_impl(cx, id.did()),
+ ty::Array(..) | ty::Slice(..) | ty::Str => true,
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/let_if_seq.rs b/src/tools/clippy/clippy_lints/src/let_if_seq.rs
new file mode 100644
index 000000000..56bbbbbc8
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/let_if_seq.rs
@@ -0,0 +1,160 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet;
+use clippy_utils::{path_to_local_id, visitors::is_local_used};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_hir::BindingAnnotation;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for variable declarations immediately followed by a
+ /// conditional affectation.
+ ///
+ /// ### Why is this bad?
+ /// This is not idiomatic Rust.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// let foo;
+ ///
+ /// if bar() {
+ /// foo = 42;
+ /// } else {
+ /// foo = 0;
+ /// }
+ ///
+ /// let mut baz = None;
+ ///
+ /// if bar() {
+ /// baz = Some(42);
+ /// }
+ /// ```
+ ///
+ /// should be written
+ ///
+ /// ```rust,ignore
+ /// let foo = if bar() {
+ /// 42
+ /// } else {
+ /// 0
+ /// };
+ ///
+ /// let baz = if bar() {
+ /// Some(42)
+ /// } else {
+ /// None
+ /// };
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub USELESS_LET_IF_SEQ,
+ nursery,
+ "unidiomatic `let mut` declaration followed by initialization in `if`"
+}
+
+declare_lint_pass!(LetIfSeq => [USELESS_LET_IF_SEQ]);
+
+impl<'tcx> LateLintPass<'tcx> for LetIfSeq {
+ fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx hir::Block<'_>) {
+ let mut it = block.stmts.iter().peekable();
+ while let Some(stmt) = it.next() {
+ if_chain! {
+ if let Some(expr) = it.peek();
+ if let hir::StmtKind::Local(local) = stmt.kind;
+ if let hir::PatKind::Binding(mode, canonical_id, ident, None) = local.pat.kind;
+ if let hir::StmtKind::Expr(if_) = expr.kind;
+ if let hir::ExprKind::If(hir::Expr { kind: hir::ExprKind::DropTemps(cond), ..}, then, else_) = if_.kind;
+ if !is_local_used(cx, *cond, canonical_id);
+ if let hir::ExprKind::Block(then, _) = then.kind;
+ if let Some(value) = check_assign(cx, canonical_id, then);
+ if !is_local_used(cx, value, canonical_id);
+ then {
+ let span = stmt.span.to(if_.span);
+
+ let has_interior_mutability = !cx.typeck_results().node_type(canonical_id).is_freeze(
+ cx.tcx.at(span),
+ cx.param_env,
+ );
+ if has_interior_mutability { return; }
+
+ let (default_multi_stmts, default) = if let Some(else_) = else_ {
+ if let hir::ExprKind::Block(else_, _) = else_.kind {
+ if let Some(default) = check_assign(cx, canonical_id, else_) {
+ (else_.stmts.len() > 1, default)
+ } else if let Some(default) = local.init {
+ (true, default)
+ } else {
+ continue;
+ }
+ } else {
+ continue;
+ }
+ } else if let Some(default) = local.init {
+ (false, default)
+ } else {
+ continue;
+ };
+
+ let mutability = match mode {
+ BindingAnnotation::RefMut | BindingAnnotation::Mutable => "<mut> ",
+ _ => "",
+ };
+
+ // FIXME: this should not suggest `mut` if we can detect that the variable is not
+ // use mutably after the `if`
+
+ let sug = format!(
+ "let {mut}{name} = if {cond} {{{then} {value} }} else {{{else} {default} }};",
+ mut=mutability,
+ name=ident.name,
+ cond=snippet(cx, cond.span, "_"),
+ then=if then.stmts.len() > 1 { " ..;" } else { "" },
+ else=if default_multi_stmts { " ..;" } else { "" },
+ value=snippet(cx, value.span, "<value>"),
+ default=snippet(cx, default.span, "<default>"),
+ );
+ span_lint_and_then(cx,
+ USELESS_LET_IF_SEQ,
+ span,
+ "`if _ { .. } else { .. }` is an expression",
+ |diag| {
+ diag.span_suggestion(
+ span,
+ "it is more idiomatic to write",
+ sug,
+ Applicability::HasPlaceholders,
+ );
+ if !mutability.is_empty() {
+ diag.note("you might not need `mut` at all");
+ }
+ });
+ }
+ }
+ }
+ }
+}
+
+fn check_assign<'tcx>(
+ cx: &LateContext<'tcx>,
+ decl: hir::HirId,
+ block: &'tcx hir::Block<'_>,
+) -> Option<&'tcx hir::Expr<'tcx>> {
+ if_chain! {
+ if block.expr.is_none();
+ if let Some(expr) = block.stmts.iter().last();
+ if let hir::StmtKind::Semi(expr) = expr.kind;
+ if let hir::ExprKind::Assign(var, value, _) = expr.kind;
+ if path_to_local_id(var, decl);
+ then {
+ if block.stmts.iter().take(block.stmts.len()-1).any(|stmt| is_local_used(cx, stmt, decl)) {
+ None
+ } else {
+ Some(value)
+ }
+ } else {
+ None
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/let_underscore.rs b/src/tools/clippy/clippy_lints/src/let_underscore.rs
new file mode 100644
index 000000000..176787497
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/let_underscore.rs
@@ -0,0 +1,171 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::ty::{is_must_use_ty, match_type};
+use clippy_utils::{is_must_use_func_call, paths};
+use if_chain::if_chain;
+use rustc_hir::{Local, PatKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty::subst::GenericArgKind;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `let _ = <expr>` where expr is `#[must_use]`
+ ///
+ /// ### Why is this bad?
+ /// It's better to explicitly handle the value of a `#[must_use]`
+ /// expr
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn f() -> Result<u32, u32> {
+ /// Ok(0)
+ /// }
+ ///
+ /// let _ = f();
+ /// // is_ok() is marked #[must_use]
+ /// let _ = f().is_ok();
+ /// ```
+ #[clippy::version = "1.42.0"]
+ pub LET_UNDERSCORE_MUST_USE,
+ restriction,
+ "non-binding let on a `#[must_use]` expression"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `let _ = sync_lock`.
+ /// This supports `mutex` and `rwlock` in `std::sync` and `parking_lot`.
+ ///
+ /// ### Why is this bad?
+ /// This statement immediately drops the lock instead of
+ /// extending its lifetime to the end of the scope, which is often not intended.
+ /// To extend lock lifetime to the end of the scope, use an underscore-prefixed
+ /// name instead (i.e. _lock). If you want to explicitly drop the lock,
+ /// `std::mem::drop` conveys your intention better and is less error-prone.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// let _ = mutex.lock();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// let _lock = mutex.lock();
+ /// ```
+ #[clippy::version = "1.43.0"]
+ pub LET_UNDERSCORE_LOCK,
+ correctness,
+ "non-binding let on a synchronization lock"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `let _ = <expr>`
+ /// where expr has a type that implements `Drop`
+ ///
+ /// ### Why is this bad?
+ /// This statement immediately drops the initializer
+ /// expression instead of extending its lifetime to the end of the scope, which
+ /// is often not intended. To extend the expression's lifetime to the end of the
+ /// scope, use an underscore-prefixed name instead (i.e. _var). If you want to
+ /// explicitly drop the expression, `std::mem::drop` conveys your intention
+ /// better and is less error-prone.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # struct DroppableItem;
+ /// {
+ /// let _ = DroppableItem;
+ /// // ^ dropped here
+ /// /* more code */
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # struct DroppableItem;
+ /// {
+ /// let _droppable = DroppableItem;
+ /// /* more code */
+ /// // dropped at end of scope
+ /// }
+ /// ```
+ #[clippy::version = "1.50.0"]
+ pub LET_UNDERSCORE_DROP,
+ pedantic,
+ "non-binding let on a type that implements `Drop`"
+}
+
+declare_lint_pass!(LetUnderscore => [LET_UNDERSCORE_MUST_USE, LET_UNDERSCORE_LOCK, LET_UNDERSCORE_DROP]);
+
+const SYNC_GUARD_PATHS: [&[&str]; 6] = [
+ &paths::MUTEX_GUARD,
+ &paths::RWLOCK_READ_GUARD,
+ &paths::RWLOCK_WRITE_GUARD,
+ &paths::PARKING_LOT_MUTEX_GUARD,
+ &paths::PARKING_LOT_RWLOCK_READ_GUARD,
+ &paths::PARKING_LOT_RWLOCK_WRITE_GUARD,
+];
+
+impl<'tcx> LateLintPass<'tcx> for LetUnderscore {
+ fn check_local(&mut self, cx: &LateContext<'_>, local: &Local<'_>) {
+ if in_external_macro(cx.tcx.sess, local.span) {
+ return;
+ }
+
+ if_chain! {
+ if let PatKind::Wild = local.pat.kind;
+ if let Some(init) = local.init;
+ then {
+ let init_ty = cx.typeck_results().expr_ty(init);
+ let contains_sync_guard = init_ty.walk().any(|inner| match inner.unpack() {
+ GenericArgKind::Type(inner_ty) => {
+ SYNC_GUARD_PATHS.iter().any(|path| match_type(cx, inner_ty, path))
+ },
+
+ GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => false,
+ });
+ if contains_sync_guard {
+ span_lint_and_help(
+ cx,
+ LET_UNDERSCORE_LOCK,
+ local.span,
+ "non-binding let on a synchronization lock",
+ None,
+ "consider using an underscore-prefixed named \
+ binding or dropping explicitly with `std::mem::drop`"
+ );
+ } else if init_ty.needs_drop(cx.tcx, cx.param_env) {
+ span_lint_and_help(
+ cx,
+ LET_UNDERSCORE_DROP,
+ local.span,
+ "non-binding `let` on a type that implements `Drop`",
+ None,
+ "consider using an underscore-prefixed named \
+ binding or dropping explicitly with `std::mem::drop`"
+ );
+ } else if is_must_use_ty(cx, cx.typeck_results().expr_ty(init)) {
+ span_lint_and_help(
+ cx,
+ LET_UNDERSCORE_MUST_USE,
+ local.span,
+ "non-binding let on an expression with `#[must_use]` type",
+ None,
+ "consider explicitly using expression value"
+ );
+ } else if is_must_use_func_call(cx, init) {
+ span_lint_and_help(
+ cx,
+ LET_UNDERSCORE_MUST_USE,
+ local.span,
+ "non-binding let on a result of a `#[must_use]` function",
+ None,
+ "consider explicitly using function result"
+ );
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/lib.deprecated.rs b/src/tools/clippy/clippy_lints/src/lib.deprecated.rs
new file mode 100644
index 000000000..80bde1b11
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/lib.deprecated.rs
@@ -0,0 +1,70 @@
+// This file was generated by `cargo dev update_lints`.
+// Use that command to update this file and do not edit by hand.
+// Manual edits will be overwritten.
+
+{
+ store.register_removed(
+ "clippy::should_assert_eq",
+ "`assert!()` will be more flexible with RFC 2011",
+ );
+ store.register_removed(
+ "clippy::extend_from_slice",
+ "`.extend_from_slice(_)` is a faster way to extend a Vec by a slice",
+ );
+ store.register_removed(
+ "clippy::range_step_by_zero",
+ "`iterator.step_by(0)` panics nowadays",
+ );
+ store.register_removed(
+ "clippy::unstable_as_slice",
+ "`Vec::as_slice` has been stabilized in 1.7",
+ );
+ store.register_removed(
+ "clippy::unstable_as_mut_slice",
+ "`Vec::as_mut_slice` has been stabilized in 1.7",
+ );
+ store.register_removed(
+ "clippy::misaligned_transmute",
+ "this lint has been split into cast_ptr_alignment and transmute_ptr_to_ptr",
+ );
+ store.register_removed(
+ "clippy::assign_ops",
+ "using compound assignment operators (e.g., `+=`) is harmless",
+ );
+ store.register_removed(
+ "clippy::if_let_redundant_pattern_matching",
+ "this lint has been changed to redundant_pattern_matching",
+ );
+ store.register_removed(
+ "clippy::unsafe_vector_initialization",
+ "the replacement suggested by this lint had substantially different behavior",
+ );
+ store.register_removed(
+ "clippy::unused_collect",
+ "`collect` has been marked as #[must_use] in rustc and that covers all cases of this lint",
+ );
+ store.register_removed(
+ "clippy::replace_consts",
+ "associated-constants `MIN`/`MAX` of integers are preferred to `{min,max}_value()` and module constants",
+ );
+ store.register_removed(
+ "clippy::regex_macro",
+ "the regex! macro has been removed from the regex crate in 2018",
+ );
+ store.register_removed(
+ "clippy::find_map",
+ "this lint has been replaced by `manual_find_map`, a more specific lint",
+ );
+ store.register_removed(
+ "clippy::filter_map",
+ "this lint has been replaced by `manual_filter_map`, a more specific lint",
+ );
+ store.register_removed(
+ "clippy::pub_enum_variant_names",
+ "set the `avoid-breaking-exported-api` config option to `false` to enable the `enum_variant_names` lint for public items",
+ );
+ store.register_removed(
+ "clippy::wrong_pub_self_convention",
+ "set the `avoid-breaking-exported-api` config option to `false` to enable the `wrong_self_convention` lint for public items",
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/lib.register_all.rs b/src/tools/clippy/clippy_lints/src/lib.register_all.rs
new file mode 100644
index 000000000..763dd2a40
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/lib.register_all.rs
@@ -0,0 +1,352 @@
+// This file was generated by `cargo dev update_lints`.
+// Use that command to update this file and do not edit by hand.
+// Manual edits will be overwritten.
+
+store.register_group(true, "clippy::all", Some("clippy_all"), vec![
+ LintId::of(almost_complete_letter_range::ALMOST_COMPLETE_LETTER_RANGE),
+ LintId::of(approx_const::APPROX_CONSTANT),
+ LintId::of(assertions_on_constants::ASSERTIONS_ON_CONSTANTS),
+ LintId::of(async_yields_async::ASYNC_YIELDS_ASYNC),
+ LintId::of(attrs::BLANKET_CLIPPY_RESTRICTION_LINTS),
+ LintId::of(attrs::DEPRECATED_CFG_ATTR),
+ LintId::of(attrs::DEPRECATED_SEMVER),
+ LintId::of(attrs::MISMATCHED_TARGET_OS),
+ LintId::of(attrs::USELESS_ATTRIBUTE),
+ LintId::of(await_holding_invalid::AWAIT_HOLDING_INVALID_TYPE),
+ LintId::of(await_holding_invalid::AWAIT_HOLDING_LOCK),
+ LintId::of(await_holding_invalid::AWAIT_HOLDING_REFCELL_REF),
+ LintId::of(blacklisted_name::BLACKLISTED_NAME),
+ LintId::of(blocks_in_if_conditions::BLOCKS_IN_IF_CONDITIONS),
+ LintId::of(bool_assert_comparison::BOOL_ASSERT_COMPARISON),
+ LintId::of(booleans::LOGIC_BUG),
+ LintId::of(booleans::NONMINIMAL_BOOL),
+ LintId::of(borrow_deref_ref::BORROW_DEREF_REF),
+ LintId::of(bytes_count_to_len::BYTES_COUNT_TO_LEN),
+ LintId::of(casts::CAST_ABS_TO_UNSIGNED),
+ LintId::of(casts::CAST_ENUM_CONSTRUCTOR),
+ LintId::of(casts::CAST_ENUM_TRUNCATION),
+ LintId::of(casts::CAST_REF_TO_MUT),
+ LintId::of(casts::CAST_SLICE_DIFFERENT_SIZES),
+ LintId::of(casts::CHAR_LIT_AS_U8),
+ LintId::of(casts::FN_TO_NUMERIC_CAST),
+ LintId::of(casts::FN_TO_NUMERIC_CAST_WITH_TRUNCATION),
+ LintId::of(casts::UNNECESSARY_CAST),
+ LintId::of(collapsible_if::COLLAPSIBLE_ELSE_IF),
+ LintId::of(collapsible_if::COLLAPSIBLE_IF),
+ LintId::of(comparison_chain::COMPARISON_CHAIN),
+ LintId::of(copies::IFS_SAME_COND),
+ LintId::of(copies::IF_SAME_THEN_ELSE),
+ LintId::of(crate_in_macro_def::CRATE_IN_MACRO_DEF),
+ LintId::of(default::FIELD_REASSIGN_WITH_DEFAULT),
+ LintId::of(default_instead_of_iter_empty::DEFAULT_INSTEAD_OF_ITER_EMPTY),
+ LintId::of(dereference::NEEDLESS_BORROW),
+ LintId::of(derivable_impls::DERIVABLE_IMPLS),
+ LintId::of(derive::DERIVE_HASH_XOR_EQ),
+ LintId::of(derive::DERIVE_ORD_XOR_PARTIAL_ORD),
+ LintId::of(derive::DERIVE_PARTIAL_EQ_WITHOUT_EQ),
+ LintId::of(disallowed_methods::DISALLOWED_METHODS),
+ LintId::of(disallowed_types::DISALLOWED_TYPES),
+ LintId::of(doc::MISSING_SAFETY_DOC),
+ LintId::of(doc::NEEDLESS_DOCTEST_MAIN),
+ LintId::of(double_parens::DOUBLE_PARENS),
+ LintId::of(drop_forget_ref::DROP_COPY),
+ LintId::of(drop_forget_ref::DROP_NON_DROP),
+ LintId::of(drop_forget_ref::DROP_REF),
+ LintId::of(drop_forget_ref::FORGET_COPY),
+ LintId::of(drop_forget_ref::FORGET_NON_DROP),
+ LintId::of(drop_forget_ref::FORGET_REF),
+ LintId::of(drop_forget_ref::UNDROPPED_MANUALLY_DROPS),
+ LintId::of(duplicate_mod::DUPLICATE_MOD),
+ LintId::of(entry::MAP_ENTRY),
+ LintId::of(enum_clike::ENUM_CLIKE_UNPORTABLE_VARIANT),
+ LintId::of(enum_variants::ENUM_VARIANT_NAMES),
+ LintId::of(enum_variants::MODULE_INCEPTION),
+ LintId::of(escape::BOXED_LOCAL),
+ LintId::of(eta_reduction::REDUNDANT_CLOSURE),
+ LintId::of(explicit_write::EXPLICIT_WRITE),
+ LintId::of(float_literal::EXCESSIVE_PRECISION),
+ LintId::of(format::USELESS_FORMAT),
+ LintId::of(format_args::FORMAT_IN_FORMAT_ARGS),
+ LintId::of(format_args::TO_STRING_IN_FORMAT_ARGS),
+ LintId::of(format_impl::PRINT_IN_FORMAT_IMPL),
+ LintId::of(format_impl::RECURSIVE_FORMAT_IMPL),
+ LintId::of(formatting::POSSIBLE_MISSING_COMMA),
+ LintId::of(formatting::SUSPICIOUS_ASSIGNMENT_FORMATTING),
+ LintId::of(formatting::SUSPICIOUS_ELSE_FORMATTING),
+ LintId::of(formatting::SUSPICIOUS_UNARY_OP_FORMATTING),
+ LintId::of(from_over_into::FROM_OVER_INTO),
+ LintId::of(from_str_radix_10::FROM_STR_RADIX_10),
+ LintId::of(functions::DOUBLE_MUST_USE),
+ LintId::of(functions::MUST_USE_UNIT),
+ LintId::of(functions::NOT_UNSAFE_PTR_ARG_DEREF),
+ LintId::of(functions::RESULT_UNIT_ERR),
+ LintId::of(functions::TOO_MANY_ARGUMENTS),
+ LintId::of(get_first::GET_FIRST),
+ LintId::of(if_let_mutex::IF_LET_MUTEX),
+ LintId::of(indexing_slicing::OUT_OF_BOUNDS_INDEXING),
+ LintId::of(infinite_iter::INFINITE_ITER),
+ LintId::of(inherent_to_string::INHERENT_TO_STRING),
+ LintId::of(inherent_to_string::INHERENT_TO_STRING_SHADOW_DISPLAY),
+ LintId::of(init_numbered_fields::INIT_NUMBERED_FIELDS),
+ LintId::of(inline_fn_without_body::INLINE_FN_WITHOUT_BODY),
+ LintId::of(int_plus_one::INT_PLUS_ONE),
+ LintId::of(invalid_utf8_in_unchecked::INVALID_UTF8_IN_UNCHECKED),
+ LintId::of(large_const_arrays::LARGE_CONST_ARRAYS),
+ LintId::of(large_enum_variant::LARGE_ENUM_VARIANT),
+ LintId::of(len_zero::COMPARISON_TO_EMPTY),
+ LintId::of(len_zero::LEN_WITHOUT_IS_EMPTY),
+ LintId::of(len_zero::LEN_ZERO),
+ LintId::of(let_underscore::LET_UNDERSCORE_LOCK),
+ LintId::of(lifetimes::EXTRA_UNUSED_LIFETIMES),
+ LintId::of(lifetimes::NEEDLESS_LIFETIMES),
+ LintId::of(literal_representation::INCONSISTENT_DIGIT_GROUPING),
+ LintId::of(literal_representation::MISTYPED_LITERAL_SUFFIXES),
+ LintId::of(literal_representation::UNUSUAL_BYTE_GROUPINGS),
+ LintId::of(loops::EMPTY_LOOP),
+ LintId::of(loops::EXPLICIT_COUNTER_LOOP),
+ LintId::of(loops::FOR_KV_MAP),
+ LintId::of(loops::FOR_LOOPS_OVER_FALLIBLES),
+ LintId::of(loops::ITER_NEXT_LOOP),
+ LintId::of(loops::MANUAL_FIND),
+ LintId::of(loops::MANUAL_FLATTEN),
+ LintId::of(loops::MANUAL_MEMCPY),
+ LintId::of(loops::MISSING_SPIN_LOOP),
+ LintId::of(loops::MUT_RANGE_BOUND),
+ LintId::of(loops::NEEDLESS_COLLECT),
+ LintId::of(loops::NEEDLESS_RANGE_LOOP),
+ LintId::of(loops::NEVER_LOOP),
+ LintId::of(loops::SAME_ITEM_PUSH),
+ LintId::of(loops::SINGLE_ELEMENT_LOOP),
+ LintId::of(loops::WHILE_IMMUTABLE_CONDITION),
+ LintId::of(loops::WHILE_LET_LOOP),
+ LintId::of(loops::WHILE_LET_ON_ITERATOR),
+ LintId::of(main_recursion::MAIN_RECURSION),
+ LintId::of(manual_async_fn::MANUAL_ASYNC_FN),
+ LintId::of(manual_bits::MANUAL_BITS),
+ LintId::of(manual_non_exhaustive::MANUAL_NON_EXHAUSTIVE),
+ LintId::of(manual_rem_euclid::MANUAL_REM_EUCLID),
+ LintId::of(manual_retain::MANUAL_RETAIN),
+ LintId::of(manual_strip::MANUAL_STRIP),
+ LintId::of(map_clone::MAP_CLONE),
+ LintId::of(map_unit_fn::OPTION_MAP_UNIT_FN),
+ LintId::of(map_unit_fn::RESULT_MAP_UNIT_FN),
+ LintId::of(match_result_ok::MATCH_RESULT_OK),
+ LintId::of(matches::COLLAPSIBLE_MATCH),
+ LintId::of(matches::INFALLIBLE_DESTRUCTURING_MATCH),
+ LintId::of(matches::MANUAL_MAP),
+ LintId::of(matches::MANUAL_UNWRAP_OR),
+ LintId::of(matches::MATCH_AS_REF),
+ LintId::of(matches::MATCH_LIKE_MATCHES_MACRO),
+ LintId::of(matches::MATCH_OVERLAPPING_ARM),
+ LintId::of(matches::MATCH_REF_PATS),
+ LintId::of(matches::MATCH_SINGLE_BINDING),
+ LintId::of(matches::MATCH_STR_CASE_MISMATCH),
+ LintId::of(matches::NEEDLESS_MATCH),
+ LintId::of(matches::REDUNDANT_PATTERN_MATCHING),
+ LintId::of(matches::SINGLE_MATCH),
+ LintId::of(matches::WILDCARD_IN_OR_PATTERNS),
+ LintId::of(mem_replace::MEM_REPLACE_OPTION_WITH_NONE),
+ LintId::of(mem_replace::MEM_REPLACE_WITH_DEFAULT),
+ LintId::of(mem_replace::MEM_REPLACE_WITH_UNINIT),
+ LintId::of(methods::BIND_INSTEAD_OF_MAP),
+ LintId::of(methods::BYTES_NTH),
+ LintId::of(methods::CHARS_LAST_CMP),
+ LintId::of(methods::CHARS_NEXT_CMP),
+ LintId::of(methods::CLONE_DOUBLE_REF),
+ LintId::of(methods::CLONE_ON_COPY),
+ LintId::of(methods::ERR_EXPECT),
+ LintId::of(methods::EXPECT_FUN_CALL),
+ LintId::of(methods::EXTEND_WITH_DRAIN),
+ LintId::of(methods::FILTER_MAP_IDENTITY),
+ LintId::of(methods::FILTER_NEXT),
+ LintId::of(methods::FLAT_MAP_IDENTITY),
+ LintId::of(methods::GET_LAST_WITH_LEN),
+ LintId::of(methods::INSPECT_FOR_EACH),
+ LintId::of(methods::INTO_ITER_ON_REF),
+ LintId::of(methods::IS_DIGIT_ASCII_RADIX),
+ LintId::of(methods::ITERATOR_STEP_BY_ZERO),
+ LintId::of(methods::ITER_CLONED_COLLECT),
+ LintId::of(methods::ITER_COUNT),
+ LintId::of(methods::ITER_NEXT_SLICE),
+ LintId::of(methods::ITER_NTH),
+ LintId::of(methods::ITER_NTH_ZERO),
+ LintId::of(methods::ITER_OVEREAGER_CLONED),
+ LintId::of(methods::ITER_SKIP_NEXT),
+ LintId::of(methods::MANUAL_FILTER_MAP),
+ LintId::of(methods::MANUAL_FIND_MAP),
+ LintId::of(methods::MANUAL_SATURATING_ARITHMETIC),
+ LintId::of(methods::MANUAL_SPLIT_ONCE),
+ LintId::of(methods::MANUAL_STR_REPEAT),
+ LintId::of(methods::MAP_COLLECT_RESULT_UNIT),
+ LintId::of(methods::MAP_FLATTEN),
+ LintId::of(methods::MAP_IDENTITY),
+ LintId::of(methods::NEEDLESS_OPTION_AS_DEREF),
+ LintId::of(methods::NEEDLESS_OPTION_TAKE),
+ LintId::of(methods::NEEDLESS_SPLITN),
+ LintId::of(methods::NEW_RET_NO_SELF),
+ LintId::of(methods::NO_EFFECT_REPLACE),
+ LintId::of(methods::OBFUSCATED_IF_ELSE),
+ LintId::of(methods::OK_EXPECT),
+ LintId::of(methods::OPTION_AS_REF_DEREF),
+ LintId::of(methods::OPTION_FILTER_MAP),
+ LintId::of(methods::OPTION_MAP_OR_NONE),
+ LintId::of(methods::OR_FUN_CALL),
+ LintId::of(methods::OR_THEN_UNWRAP),
+ LintId::of(methods::RESULT_MAP_OR_INTO_OPTION),
+ LintId::of(methods::SEARCH_IS_SOME),
+ LintId::of(methods::SHOULD_IMPLEMENT_TRAIT),
+ LintId::of(methods::SINGLE_CHAR_ADD_STR),
+ LintId::of(methods::SINGLE_CHAR_PATTERN),
+ LintId::of(methods::SKIP_WHILE_NEXT),
+ LintId::of(methods::STRING_EXTEND_CHARS),
+ LintId::of(methods::SUSPICIOUS_MAP),
+ LintId::of(methods::SUSPICIOUS_SPLITN),
+ LintId::of(methods::UNINIT_ASSUMED_INIT),
+ LintId::of(methods::UNNECESSARY_FILTER_MAP),
+ LintId::of(methods::UNNECESSARY_FIND_MAP),
+ LintId::of(methods::UNNECESSARY_FOLD),
+ LintId::of(methods::UNNECESSARY_LAZY_EVALUATIONS),
+ LintId::of(methods::UNNECESSARY_TO_OWNED),
+ LintId::of(methods::UNWRAP_OR_ELSE_DEFAULT),
+ LintId::of(methods::USELESS_ASREF),
+ LintId::of(methods::WRONG_SELF_CONVENTION),
+ LintId::of(methods::ZST_OFFSET),
+ LintId::of(minmax::MIN_MAX),
+ LintId::of(misc::SHORT_CIRCUIT_STATEMENT),
+ LintId::of(misc::TOPLEVEL_REF_ARG),
+ LintId::of(misc::ZERO_PTR),
+ LintId::of(misc_early::BUILTIN_TYPE_SHADOW),
+ LintId::of(misc_early::DOUBLE_NEG),
+ LintId::of(misc_early::DUPLICATE_UNDERSCORE_ARGUMENT),
+ LintId::of(misc_early::MIXED_CASE_HEX_LITERALS),
+ LintId::of(misc_early::REDUNDANT_PATTERN),
+ LintId::of(misc_early::UNNEEDED_WILDCARD_PATTERN),
+ LintId::of(misc_early::ZERO_PREFIXED_LITERAL),
+ LintId::of(mixed_read_write_in_expression::DIVERGING_SUB_EXPRESSION),
+ LintId::of(mut_key::MUTABLE_KEY_TYPE),
+ LintId::of(mut_mutex_lock::MUT_MUTEX_LOCK),
+ LintId::of(mut_reference::UNNECESSARY_MUT_PASSED),
+ LintId::of(needless_arbitrary_self_type::NEEDLESS_ARBITRARY_SELF_TYPE),
+ LintId::of(needless_bool::BOOL_COMPARISON),
+ LintId::of(needless_bool::NEEDLESS_BOOL),
+ LintId::of(needless_borrowed_ref::NEEDLESS_BORROWED_REFERENCE),
+ LintId::of(needless_late_init::NEEDLESS_LATE_INIT),
+ LintId::of(needless_parens_on_range_literals::NEEDLESS_PARENS_ON_RANGE_LITERALS),
+ LintId::of(needless_question_mark::NEEDLESS_QUESTION_MARK),
+ LintId::of(needless_update::NEEDLESS_UPDATE),
+ LintId::of(neg_cmp_op_on_partial_ord::NEG_CMP_OP_ON_PARTIAL_ORD),
+ LintId::of(neg_multiply::NEG_MULTIPLY),
+ LintId::of(new_without_default::NEW_WITHOUT_DEFAULT),
+ LintId::of(no_effect::NO_EFFECT),
+ LintId::of(no_effect::UNNECESSARY_OPERATION),
+ LintId::of(non_copy_const::BORROW_INTERIOR_MUTABLE_CONST),
+ LintId::of(non_copy_const::DECLARE_INTERIOR_MUTABLE_CONST),
+ LintId::of(non_expressive_names::JUST_UNDERSCORES_AND_DIGITS),
+ LintId::of(non_octal_unix_permissions::NON_OCTAL_UNIX_PERMISSIONS),
+ LintId::of(octal_escapes::OCTAL_ESCAPES),
+ LintId::of(open_options::NONSENSICAL_OPEN_OPTIONS),
+ LintId::of(operators::ABSURD_EXTREME_COMPARISONS),
+ LintId::of(operators::ASSIGN_OP_PATTERN),
+ LintId::of(operators::BAD_BIT_MASK),
+ LintId::of(operators::CMP_NAN),
+ LintId::of(operators::CMP_OWNED),
+ LintId::of(operators::DOUBLE_COMPARISONS),
+ LintId::of(operators::DURATION_SUBSEC),
+ LintId::of(operators::EQ_OP),
+ LintId::of(operators::ERASING_OP),
+ LintId::of(operators::FLOAT_EQUALITY_WITHOUT_ABS),
+ LintId::of(operators::IDENTITY_OP),
+ LintId::of(operators::INEFFECTIVE_BIT_MASK),
+ LintId::of(operators::MISREFACTORED_ASSIGN_OP),
+ LintId::of(operators::MODULO_ONE),
+ LintId::of(operators::OP_REF),
+ LintId::of(operators::PTR_EQ),
+ LintId::of(operators::SELF_ASSIGNMENT),
+ LintId::of(option_env_unwrap::OPTION_ENV_UNWRAP),
+ LintId::of(overflow_check_conditional::OVERFLOW_CHECK_CONDITIONAL),
+ LintId::of(partialeq_ne_impl::PARTIALEQ_NE_IMPL),
+ LintId::of(precedence::PRECEDENCE),
+ LintId::of(ptr::CMP_NULL),
+ LintId::of(ptr::INVALID_NULL_PTR_USAGE),
+ LintId::of(ptr::MUT_FROM_REF),
+ LintId::of(ptr::PTR_ARG),
+ LintId::of(ptr_offset_with_cast::PTR_OFFSET_WITH_CAST),
+ LintId::of(question_mark::QUESTION_MARK),
+ LintId::of(ranges::MANUAL_RANGE_CONTAINS),
+ LintId::of(ranges::RANGE_ZIP_WITH_LEN),
+ LintId::of(ranges::REVERSED_EMPTY_RANGES),
+ LintId::of(rc_clone_in_vec_init::RC_CLONE_IN_VEC_INIT),
+ LintId::of(read_zero_byte_vec::READ_ZERO_BYTE_VEC),
+ LintId::of(redundant_clone::REDUNDANT_CLONE),
+ LintId::of(redundant_closure_call::REDUNDANT_CLOSURE_CALL),
+ LintId::of(redundant_field_names::REDUNDANT_FIELD_NAMES),
+ LintId::of(redundant_slicing::REDUNDANT_SLICING),
+ LintId::of(redundant_static_lifetimes::REDUNDANT_STATIC_LIFETIMES),
+ LintId::of(reference::DEREF_ADDROF),
+ LintId::of(regex::INVALID_REGEX),
+ LintId::of(repeat_once::REPEAT_ONCE),
+ LintId::of(returns::LET_AND_RETURN),
+ LintId::of(returns::NEEDLESS_RETURN),
+ LintId::of(self_named_constructors::SELF_NAMED_CONSTRUCTORS),
+ LintId::of(serde_api::SERDE_API_MISUSE),
+ LintId::of(single_component_path_imports::SINGLE_COMPONENT_PATH_IMPORTS),
+ LintId::of(size_of_in_element_count::SIZE_OF_IN_ELEMENT_COUNT),
+ LintId::of(slow_vector_initialization::SLOW_VECTOR_INITIALIZATION),
+ LintId::of(strings::STRING_FROM_UTF8_AS_BYTES),
+ LintId::of(strings::TRIM_SPLIT_WHITESPACE),
+ LintId::of(strlen_on_c_strings::STRLEN_ON_C_STRINGS),
+ LintId::of(suspicious_trait_impl::SUSPICIOUS_ARITHMETIC_IMPL),
+ LintId::of(suspicious_trait_impl::SUSPICIOUS_OP_ASSIGN_IMPL),
+ LintId::of(swap::ALMOST_SWAPPED),
+ LintId::of(swap::MANUAL_SWAP),
+ LintId::of(swap_ptr_to_ref::SWAP_PTR_TO_REF),
+ LintId::of(tabs_in_doc_comments::TABS_IN_DOC_COMMENTS),
+ LintId::of(temporary_assignment::TEMPORARY_ASSIGNMENT),
+ LintId::of(to_digit_is_some::TO_DIGIT_IS_SOME),
+ LintId::of(transmute::CROSSPOINTER_TRANSMUTE),
+ LintId::of(transmute::TRANSMUTES_EXPRESSIBLE_AS_PTR_CASTS),
+ LintId::of(transmute::TRANSMUTE_BYTES_TO_STR),
+ LintId::of(transmute::TRANSMUTE_FLOAT_TO_INT),
+ LintId::of(transmute::TRANSMUTE_INT_TO_BOOL),
+ LintId::of(transmute::TRANSMUTE_INT_TO_CHAR),
+ LintId::of(transmute::TRANSMUTE_INT_TO_FLOAT),
+ LintId::of(transmute::TRANSMUTE_NUM_TO_BYTES),
+ LintId::of(transmute::TRANSMUTE_PTR_TO_REF),
+ LintId::of(transmute::UNSOUND_COLLECTION_TRANSMUTE),
+ LintId::of(transmute::USELESS_TRANSMUTE),
+ LintId::of(transmute::WRONG_TRANSMUTE),
+ LintId::of(transmuting_null::TRANSMUTING_NULL),
+ LintId::of(types::BORROWED_BOX),
+ LintId::of(types::BOX_COLLECTION),
+ LintId::of(types::REDUNDANT_ALLOCATION),
+ LintId::of(types::TYPE_COMPLEXITY),
+ LintId::of(types::VEC_BOX),
+ LintId::of(unicode::INVISIBLE_CHARACTERS),
+ LintId::of(uninit_vec::UNINIT_VEC),
+ LintId::of(unit_hash::UNIT_HASH),
+ LintId::of(unit_return_expecting_ord::UNIT_RETURN_EXPECTING_ORD),
+ LintId::of(unit_types::LET_UNIT_VALUE),
+ LintId::of(unit_types::UNIT_ARG),
+ LintId::of(unit_types::UNIT_CMP),
+ LintId::of(unnamed_address::FN_ADDRESS_COMPARISONS),
+ LintId::of(unnamed_address::VTABLE_ADDRESS_COMPARISONS),
+ LintId::of(unnecessary_owned_empty_strings::UNNECESSARY_OWNED_EMPTY_STRINGS),
+ LintId::of(unnecessary_sort_by::UNNECESSARY_SORT_BY),
+ LintId::of(unsafe_removed_from_name::UNSAFE_REMOVED_FROM_NAME),
+ LintId::of(unused_io_amount::UNUSED_IO_AMOUNT),
+ LintId::of(unused_unit::UNUSED_UNIT),
+ LintId::of(unwrap::PANICKING_UNWRAP),
+ LintId::of(unwrap::UNNECESSARY_UNWRAP),
+ LintId::of(upper_case_acronyms::UPPER_CASE_ACRONYMS),
+ LintId::of(useless_conversion::USELESS_CONVERSION),
+ LintId::of(vec::USELESS_VEC),
+ LintId::of(vec_init_then_push::VEC_INIT_THEN_PUSH),
+ LintId::of(vec_resize_to_zero::VEC_RESIZE_TO_ZERO),
+ LintId::of(write::PRINTLN_EMPTY_STRING),
+ LintId::of(write::PRINT_LITERAL),
+ LintId::of(write::PRINT_WITH_NEWLINE),
+ LintId::of(write::WRITELN_EMPTY_STRING),
+ LintId::of(write::WRITE_LITERAL),
+ LintId::of(write::WRITE_WITH_NEWLINE),
+ LintId::of(zero_div_zero::ZERO_DIVIDED_BY_ZERO),
+])
diff --git a/src/tools/clippy/clippy_lints/src/lib.register_cargo.rs b/src/tools/clippy/clippy_lints/src/lib.register_cargo.rs
new file mode 100644
index 000000000..c890523fe
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/lib.register_cargo.rs
@@ -0,0 +1,11 @@
+// This file was generated by `cargo dev update_lints`.
+// Use that command to update this file and do not edit by hand.
+// Manual edits will be overwritten.
+
+store.register_group(true, "clippy::cargo", Some("clippy_cargo"), vec![
+ LintId::of(cargo::CARGO_COMMON_METADATA),
+ LintId::of(cargo::MULTIPLE_CRATE_VERSIONS),
+ LintId::of(cargo::NEGATIVE_FEATURE_NAMES),
+ LintId::of(cargo::REDUNDANT_FEATURE_NAMES),
+ LintId::of(cargo::WILDCARD_DEPENDENCIES),
+])
diff --git a/src/tools/clippy/clippy_lints/src/lib.register_complexity.rs b/src/tools/clippy/clippy_lints/src/lib.register_complexity.rs
new file mode 100644
index 000000000..ed5446f58
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/lib.register_complexity.rs
@@ -0,0 +1,105 @@
+// This file was generated by `cargo dev update_lints`.
+// Use that command to update this file and do not edit by hand.
+// Manual edits will be overwritten.
+
+store.register_group(true, "clippy::complexity", Some("clippy_complexity"), vec![
+ LintId::of(attrs::DEPRECATED_CFG_ATTR),
+ LintId::of(booleans::NONMINIMAL_BOOL),
+ LintId::of(borrow_deref_ref::BORROW_DEREF_REF),
+ LintId::of(bytes_count_to_len::BYTES_COUNT_TO_LEN),
+ LintId::of(casts::CHAR_LIT_AS_U8),
+ LintId::of(casts::UNNECESSARY_CAST),
+ LintId::of(derivable_impls::DERIVABLE_IMPLS),
+ LintId::of(double_parens::DOUBLE_PARENS),
+ LintId::of(explicit_write::EXPLICIT_WRITE),
+ LintId::of(format::USELESS_FORMAT),
+ LintId::of(functions::TOO_MANY_ARGUMENTS),
+ LintId::of(int_plus_one::INT_PLUS_ONE),
+ LintId::of(lifetimes::EXTRA_UNUSED_LIFETIMES),
+ LintId::of(lifetimes::NEEDLESS_LIFETIMES),
+ LintId::of(loops::EXPLICIT_COUNTER_LOOP),
+ LintId::of(loops::MANUAL_FIND),
+ LintId::of(loops::MANUAL_FLATTEN),
+ LintId::of(loops::SINGLE_ELEMENT_LOOP),
+ LintId::of(loops::WHILE_LET_LOOP),
+ LintId::of(manual_rem_euclid::MANUAL_REM_EUCLID),
+ LintId::of(manual_strip::MANUAL_STRIP),
+ LintId::of(map_unit_fn::OPTION_MAP_UNIT_FN),
+ LintId::of(map_unit_fn::RESULT_MAP_UNIT_FN),
+ LintId::of(matches::MANUAL_UNWRAP_OR),
+ LintId::of(matches::MATCH_AS_REF),
+ LintId::of(matches::MATCH_SINGLE_BINDING),
+ LintId::of(matches::NEEDLESS_MATCH),
+ LintId::of(matches::WILDCARD_IN_OR_PATTERNS),
+ LintId::of(methods::BIND_INSTEAD_OF_MAP),
+ LintId::of(methods::CLONE_ON_COPY),
+ LintId::of(methods::FILTER_MAP_IDENTITY),
+ LintId::of(methods::FILTER_NEXT),
+ LintId::of(methods::FLAT_MAP_IDENTITY),
+ LintId::of(methods::GET_LAST_WITH_LEN),
+ LintId::of(methods::INSPECT_FOR_EACH),
+ LintId::of(methods::ITER_COUNT),
+ LintId::of(methods::MANUAL_FILTER_MAP),
+ LintId::of(methods::MANUAL_FIND_MAP),
+ LintId::of(methods::MANUAL_SPLIT_ONCE),
+ LintId::of(methods::MAP_FLATTEN),
+ LintId::of(methods::MAP_IDENTITY),
+ LintId::of(methods::NEEDLESS_OPTION_AS_DEREF),
+ LintId::of(methods::NEEDLESS_OPTION_TAKE),
+ LintId::of(methods::NEEDLESS_SPLITN),
+ LintId::of(methods::OPTION_AS_REF_DEREF),
+ LintId::of(methods::OPTION_FILTER_MAP),
+ LintId::of(methods::OR_THEN_UNWRAP),
+ LintId::of(methods::SEARCH_IS_SOME),
+ LintId::of(methods::SKIP_WHILE_NEXT),
+ LintId::of(methods::UNNECESSARY_FILTER_MAP),
+ LintId::of(methods::UNNECESSARY_FIND_MAP),
+ LintId::of(methods::USELESS_ASREF),
+ LintId::of(misc::SHORT_CIRCUIT_STATEMENT),
+ LintId::of(misc_early::UNNEEDED_WILDCARD_PATTERN),
+ LintId::of(misc_early::ZERO_PREFIXED_LITERAL),
+ LintId::of(mixed_read_write_in_expression::DIVERGING_SUB_EXPRESSION),
+ LintId::of(needless_arbitrary_self_type::NEEDLESS_ARBITRARY_SELF_TYPE),
+ LintId::of(needless_bool::BOOL_COMPARISON),
+ LintId::of(needless_bool::NEEDLESS_BOOL),
+ LintId::of(needless_borrowed_ref::NEEDLESS_BORROWED_REFERENCE),
+ LintId::of(needless_question_mark::NEEDLESS_QUESTION_MARK),
+ LintId::of(needless_update::NEEDLESS_UPDATE),
+ LintId::of(neg_cmp_op_on_partial_ord::NEG_CMP_OP_ON_PARTIAL_ORD),
+ LintId::of(no_effect::NO_EFFECT),
+ LintId::of(no_effect::UNNECESSARY_OPERATION),
+ LintId::of(operators::DOUBLE_COMPARISONS),
+ LintId::of(operators::DURATION_SUBSEC),
+ LintId::of(operators::IDENTITY_OP),
+ LintId::of(overflow_check_conditional::OVERFLOW_CHECK_CONDITIONAL),
+ LintId::of(partialeq_ne_impl::PARTIALEQ_NE_IMPL),
+ LintId::of(precedence::PRECEDENCE),
+ LintId::of(ptr_offset_with_cast::PTR_OFFSET_WITH_CAST),
+ LintId::of(ranges::RANGE_ZIP_WITH_LEN),
+ LintId::of(redundant_closure_call::REDUNDANT_CLOSURE_CALL),
+ LintId::of(redundant_slicing::REDUNDANT_SLICING),
+ LintId::of(reference::DEREF_ADDROF),
+ LintId::of(repeat_once::REPEAT_ONCE),
+ LintId::of(strings::STRING_FROM_UTF8_AS_BYTES),
+ LintId::of(strlen_on_c_strings::STRLEN_ON_C_STRINGS),
+ LintId::of(swap::MANUAL_SWAP),
+ LintId::of(temporary_assignment::TEMPORARY_ASSIGNMENT),
+ LintId::of(transmute::CROSSPOINTER_TRANSMUTE),
+ LintId::of(transmute::TRANSMUTES_EXPRESSIBLE_AS_PTR_CASTS),
+ LintId::of(transmute::TRANSMUTE_BYTES_TO_STR),
+ LintId::of(transmute::TRANSMUTE_FLOAT_TO_INT),
+ LintId::of(transmute::TRANSMUTE_INT_TO_BOOL),
+ LintId::of(transmute::TRANSMUTE_INT_TO_CHAR),
+ LintId::of(transmute::TRANSMUTE_INT_TO_FLOAT),
+ LintId::of(transmute::TRANSMUTE_NUM_TO_BYTES),
+ LintId::of(transmute::TRANSMUTE_PTR_TO_REF),
+ LintId::of(transmute::USELESS_TRANSMUTE),
+ LintId::of(types::BORROWED_BOX),
+ LintId::of(types::TYPE_COMPLEXITY),
+ LintId::of(types::VEC_BOX),
+ LintId::of(unit_types::UNIT_ARG),
+ LintId::of(unnecessary_sort_by::UNNECESSARY_SORT_BY),
+ LintId::of(unwrap::UNNECESSARY_UNWRAP),
+ LintId::of(useless_conversion::USELESS_CONVERSION),
+ LintId::of(zero_div_zero::ZERO_DIVIDED_BY_ZERO),
+])
diff --git a/src/tools/clippy/clippy_lints/src/lib.register_correctness.rs b/src/tools/clippy/clippy_lints/src/lib.register_correctness.rs
new file mode 100644
index 000000000..9975859c5
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/lib.register_correctness.rs
@@ -0,0 +1,78 @@
+// This file was generated by `cargo dev update_lints`.
+// Use that command to update this file and do not edit by hand.
+// Manual edits will be overwritten.
+
+store.register_group(true, "clippy::correctness", Some("clippy_correctness"), vec![
+ LintId::of(approx_const::APPROX_CONSTANT),
+ LintId::of(async_yields_async::ASYNC_YIELDS_ASYNC),
+ LintId::of(attrs::DEPRECATED_SEMVER),
+ LintId::of(attrs::MISMATCHED_TARGET_OS),
+ LintId::of(attrs::USELESS_ATTRIBUTE),
+ LintId::of(booleans::LOGIC_BUG),
+ LintId::of(casts::CAST_REF_TO_MUT),
+ LintId::of(casts::CAST_SLICE_DIFFERENT_SIZES),
+ LintId::of(copies::IFS_SAME_COND),
+ LintId::of(copies::IF_SAME_THEN_ELSE),
+ LintId::of(derive::DERIVE_HASH_XOR_EQ),
+ LintId::of(derive::DERIVE_ORD_XOR_PARTIAL_ORD),
+ LintId::of(drop_forget_ref::DROP_COPY),
+ LintId::of(drop_forget_ref::DROP_REF),
+ LintId::of(drop_forget_ref::FORGET_COPY),
+ LintId::of(drop_forget_ref::FORGET_REF),
+ LintId::of(drop_forget_ref::UNDROPPED_MANUALLY_DROPS),
+ LintId::of(enum_clike::ENUM_CLIKE_UNPORTABLE_VARIANT),
+ LintId::of(format_impl::RECURSIVE_FORMAT_IMPL),
+ LintId::of(formatting::POSSIBLE_MISSING_COMMA),
+ LintId::of(functions::NOT_UNSAFE_PTR_ARG_DEREF),
+ LintId::of(if_let_mutex::IF_LET_MUTEX),
+ LintId::of(indexing_slicing::OUT_OF_BOUNDS_INDEXING),
+ LintId::of(infinite_iter::INFINITE_ITER),
+ LintId::of(inherent_to_string::INHERENT_TO_STRING_SHADOW_DISPLAY),
+ LintId::of(inline_fn_without_body::INLINE_FN_WITHOUT_BODY),
+ LintId::of(invalid_utf8_in_unchecked::INVALID_UTF8_IN_UNCHECKED),
+ LintId::of(let_underscore::LET_UNDERSCORE_LOCK),
+ LintId::of(literal_representation::MISTYPED_LITERAL_SUFFIXES),
+ LintId::of(loops::ITER_NEXT_LOOP),
+ LintId::of(loops::NEVER_LOOP),
+ LintId::of(loops::WHILE_IMMUTABLE_CONDITION),
+ LintId::of(matches::MATCH_STR_CASE_MISMATCH),
+ LintId::of(mem_replace::MEM_REPLACE_WITH_UNINIT),
+ LintId::of(methods::CLONE_DOUBLE_REF),
+ LintId::of(methods::ITERATOR_STEP_BY_ZERO),
+ LintId::of(methods::SUSPICIOUS_SPLITN),
+ LintId::of(methods::UNINIT_ASSUMED_INIT),
+ LintId::of(methods::ZST_OFFSET),
+ LintId::of(minmax::MIN_MAX),
+ LintId::of(non_octal_unix_permissions::NON_OCTAL_UNIX_PERMISSIONS),
+ LintId::of(open_options::NONSENSICAL_OPEN_OPTIONS),
+ LintId::of(operators::ABSURD_EXTREME_COMPARISONS),
+ LintId::of(operators::BAD_BIT_MASK),
+ LintId::of(operators::CMP_NAN),
+ LintId::of(operators::EQ_OP),
+ LintId::of(operators::ERASING_OP),
+ LintId::of(operators::INEFFECTIVE_BIT_MASK),
+ LintId::of(operators::MODULO_ONE),
+ LintId::of(operators::SELF_ASSIGNMENT),
+ LintId::of(option_env_unwrap::OPTION_ENV_UNWRAP),
+ LintId::of(ptr::INVALID_NULL_PTR_USAGE),
+ LintId::of(ptr::MUT_FROM_REF),
+ LintId::of(ranges::REVERSED_EMPTY_RANGES),
+ LintId::of(read_zero_byte_vec::READ_ZERO_BYTE_VEC),
+ LintId::of(regex::INVALID_REGEX),
+ LintId::of(serde_api::SERDE_API_MISUSE),
+ LintId::of(size_of_in_element_count::SIZE_OF_IN_ELEMENT_COUNT),
+ LintId::of(swap::ALMOST_SWAPPED),
+ LintId::of(transmute::UNSOUND_COLLECTION_TRANSMUTE),
+ LintId::of(transmute::WRONG_TRANSMUTE),
+ LintId::of(transmuting_null::TRANSMUTING_NULL),
+ LintId::of(unicode::INVISIBLE_CHARACTERS),
+ LintId::of(uninit_vec::UNINIT_VEC),
+ LintId::of(unit_hash::UNIT_HASH),
+ LintId::of(unit_return_expecting_ord::UNIT_RETURN_EXPECTING_ORD),
+ LintId::of(unit_types::UNIT_CMP),
+ LintId::of(unnamed_address::FN_ADDRESS_COMPARISONS),
+ LintId::of(unnamed_address::VTABLE_ADDRESS_COMPARISONS),
+ LintId::of(unused_io_amount::UNUSED_IO_AMOUNT),
+ LintId::of(unwrap::PANICKING_UNWRAP),
+ LintId::of(vec_resize_to_zero::VEC_RESIZE_TO_ZERO),
+])
diff --git a/src/tools/clippy/clippy_lints/src/lib.register_internal.rs b/src/tools/clippy/clippy_lints/src/lib.register_internal.rs
new file mode 100644
index 000000000..be63646a1
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/lib.register_internal.rs
@@ -0,0 +1,22 @@
+// This file was generated by `cargo dev update_lints`.
+// Use that command to update this file and do not edit by hand.
+// Manual edits will be overwritten.
+
+store.register_group(true, "clippy::internal", Some("clippy_internal"), vec![
+ LintId::of(utils::internal_lints::CLIPPY_LINTS_INTERNAL),
+ LintId::of(utils::internal_lints::COLLAPSIBLE_SPAN_LINT_CALLS),
+ LintId::of(utils::internal_lints::COMPILER_LINT_FUNCTIONS),
+ LintId::of(utils::internal_lints::DEFAULT_DEPRECATION_REASON),
+ LintId::of(utils::internal_lints::DEFAULT_LINT),
+ LintId::of(utils::internal_lints::IF_CHAIN_STYLE),
+ LintId::of(utils::internal_lints::INTERNING_DEFINED_SYMBOL),
+ LintId::of(utils::internal_lints::INVALID_CLIPPY_VERSION_ATTRIBUTE),
+ LintId::of(utils::internal_lints::INVALID_PATHS),
+ LintId::of(utils::internal_lints::LINT_WITHOUT_LINT_PASS),
+ LintId::of(utils::internal_lints::MATCH_TYPE_ON_DIAGNOSTIC_ITEM),
+ LintId::of(utils::internal_lints::MISSING_CLIPPY_VERSION_ATTRIBUTE),
+ LintId::of(utils::internal_lints::MISSING_MSRV_ATTR_IMPL),
+ LintId::of(utils::internal_lints::OUTER_EXPN_EXPN_DATA),
+ LintId::of(utils::internal_lints::PRODUCE_ICE),
+ LintId::of(utils::internal_lints::UNNECESSARY_SYMBOL_STR),
+])
diff --git a/src/tools/clippy/clippy_lints/src/lib.register_lints.rs b/src/tools/clippy/clippy_lints/src/lib.register_lints.rs
new file mode 100644
index 000000000..99bde35cf
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/lib.register_lints.rs
@@ -0,0 +1,597 @@
+// This file was generated by `cargo dev update_lints`.
+// Use that command to update this file and do not edit by hand.
+// Manual edits will be overwritten.
+
+store.register_lints(&[
+ #[cfg(feature = "internal")]
+ utils::internal_lints::CLIPPY_LINTS_INTERNAL,
+ #[cfg(feature = "internal")]
+ utils::internal_lints::COLLAPSIBLE_SPAN_LINT_CALLS,
+ #[cfg(feature = "internal")]
+ utils::internal_lints::COMPILER_LINT_FUNCTIONS,
+ #[cfg(feature = "internal")]
+ utils::internal_lints::DEFAULT_DEPRECATION_REASON,
+ #[cfg(feature = "internal")]
+ utils::internal_lints::DEFAULT_LINT,
+ #[cfg(feature = "internal")]
+ utils::internal_lints::IF_CHAIN_STYLE,
+ #[cfg(feature = "internal")]
+ utils::internal_lints::INTERNING_DEFINED_SYMBOL,
+ #[cfg(feature = "internal")]
+ utils::internal_lints::INVALID_CLIPPY_VERSION_ATTRIBUTE,
+ #[cfg(feature = "internal")]
+ utils::internal_lints::INVALID_PATHS,
+ #[cfg(feature = "internal")]
+ utils::internal_lints::LINT_WITHOUT_LINT_PASS,
+ #[cfg(feature = "internal")]
+ utils::internal_lints::MATCH_TYPE_ON_DIAGNOSTIC_ITEM,
+ #[cfg(feature = "internal")]
+ utils::internal_lints::MISSING_CLIPPY_VERSION_ATTRIBUTE,
+ #[cfg(feature = "internal")]
+ utils::internal_lints::MISSING_MSRV_ATTR_IMPL,
+ #[cfg(feature = "internal")]
+ utils::internal_lints::OUTER_EXPN_EXPN_DATA,
+ #[cfg(feature = "internal")]
+ utils::internal_lints::PRODUCE_ICE,
+ #[cfg(feature = "internal")]
+ utils::internal_lints::UNNECESSARY_SYMBOL_STR,
+ almost_complete_letter_range::ALMOST_COMPLETE_LETTER_RANGE,
+ approx_const::APPROX_CONSTANT,
+ as_conversions::AS_CONVERSIONS,
+ as_underscore::AS_UNDERSCORE,
+ asm_syntax::INLINE_ASM_X86_ATT_SYNTAX,
+ asm_syntax::INLINE_ASM_X86_INTEL_SYNTAX,
+ assertions_on_constants::ASSERTIONS_ON_CONSTANTS,
+ assertions_on_result_states::ASSERTIONS_ON_RESULT_STATES,
+ async_yields_async::ASYNC_YIELDS_ASYNC,
+ attrs::ALLOW_ATTRIBUTES_WITHOUT_REASON,
+ attrs::BLANKET_CLIPPY_RESTRICTION_LINTS,
+ attrs::DEPRECATED_CFG_ATTR,
+ attrs::DEPRECATED_SEMVER,
+ attrs::EMPTY_LINE_AFTER_OUTER_ATTR,
+ attrs::INLINE_ALWAYS,
+ attrs::MISMATCHED_TARGET_OS,
+ attrs::USELESS_ATTRIBUTE,
+ await_holding_invalid::AWAIT_HOLDING_INVALID_TYPE,
+ await_holding_invalid::AWAIT_HOLDING_LOCK,
+ await_holding_invalid::AWAIT_HOLDING_REFCELL_REF,
+ blacklisted_name::BLACKLISTED_NAME,
+ blocks_in_if_conditions::BLOCKS_IN_IF_CONDITIONS,
+ bool_assert_comparison::BOOL_ASSERT_COMPARISON,
+ booleans::LOGIC_BUG,
+ booleans::NONMINIMAL_BOOL,
+ borrow_as_ptr::BORROW_AS_PTR,
+ borrow_deref_ref::BORROW_DEREF_REF,
+ bytecount::NAIVE_BYTECOUNT,
+ bytes_count_to_len::BYTES_COUNT_TO_LEN,
+ cargo::CARGO_COMMON_METADATA,
+ cargo::MULTIPLE_CRATE_VERSIONS,
+ cargo::NEGATIVE_FEATURE_NAMES,
+ cargo::REDUNDANT_FEATURE_NAMES,
+ cargo::WILDCARD_DEPENDENCIES,
+ case_sensitive_file_extension_comparisons::CASE_SENSITIVE_FILE_EXTENSION_COMPARISONS,
+ casts::CAST_ABS_TO_UNSIGNED,
+ casts::CAST_ENUM_CONSTRUCTOR,
+ casts::CAST_ENUM_TRUNCATION,
+ casts::CAST_LOSSLESS,
+ casts::CAST_POSSIBLE_TRUNCATION,
+ casts::CAST_POSSIBLE_WRAP,
+ casts::CAST_PRECISION_LOSS,
+ casts::CAST_PTR_ALIGNMENT,
+ casts::CAST_REF_TO_MUT,
+ casts::CAST_SIGN_LOSS,
+ casts::CAST_SLICE_DIFFERENT_SIZES,
+ casts::CHAR_LIT_AS_U8,
+ casts::FN_TO_NUMERIC_CAST,
+ casts::FN_TO_NUMERIC_CAST_ANY,
+ casts::FN_TO_NUMERIC_CAST_WITH_TRUNCATION,
+ casts::PTR_AS_PTR,
+ casts::UNNECESSARY_CAST,
+ checked_conversions::CHECKED_CONVERSIONS,
+ cognitive_complexity::COGNITIVE_COMPLEXITY,
+ collapsible_if::COLLAPSIBLE_ELSE_IF,
+ collapsible_if::COLLAPSIBLE_IF,
+ comparison_chain::COMPARISON_CHAIN,
+ copies::BRANCHES_SHARING_CODE,
+ copies::IFS_SAME_COND,
+ copies::IF_SAME_THEN_ELSE,
+ copies::SAME_FUNCTIONS_IN_IF_CONDITION,
+ copy_iterator::COPY_ITERATOR,
+ crate_in_macro_def::CRATE_IN_MACRO_DEF,
+ create_dir::CREATE_DIR,
+ dbg_macro::DBG_MACRO,
+ default::DEFAULT_TRAIT_ACCESS,
+ default::FIELD_REASSIGN_WITH_DEFAULT,
+ default_instead_of_iter_empty::DEFAULT_INSTEAD_OF_ITER_EMPTY,
+ default_numeric_fallback::DEFAULT_NUMERIC_FALLBACK,
+ default_union_representation::DEFAULT_UNION_REPRESENTATION,
+ dereference::EXPLICIT_AUTO_DEREF,
+ dereference::EXPLICIT_DEREF_METHODS,
+ dereference::NEEDLESS_BORROW,
+ dereference::REF_BINDING_TO_REFERENCE,
+ derivable_impls::DERIVABLE_IMPLS,
+ derive::DERIVE_HASH_XOR_EQ,
+ derive::DERIVE_ORD_XOR_PARTIAL_ORD,
+ derive::DERIVE_PARTIAL_EQ_WITHOUT_EQ,
+ derive::EXPL_IMPL_CLONE_ON_COPY,
+ derive::UNSAFE_DERIVE_DESERIALIZE,
+ disallowed_methods::DISALLOWED_METHODS,
+ disallowed_script_idents::DISALLOWED_SCRIPT_IDENTS,
+ disallowed_types::DISALLOWED_TYPES,
+ doc::DOC_MARKDOWN,
+ doc::MISSING_ERRORS_DOC,
+ doc::MISSING_PANICS_DOC,
+ doc::MISSING_SAFETY_DOC,
+ doc::NEEDLESS_DOCTEST_MAIN,
+ doc_link_with_quotes::DOC_LINK_WITH_QUOTES,
+ double_parens::DOUBLE_PARENS,
+ drop_forget_ref::DROP_COPY,
+ drop_forget_ref::DROP_NON_DROP,
+ drop_forget_ref::DROP_REF,
+ drop_forget_ref::FORGET_COPY,
+ drop_forget_ref::FORGET_NON_DROP,
+ drop_forget_ref::FORGET_REF,
+ drop_forget_ref::UNDROPPED_MANUALLY_DROPS,
+ duplicate_mod::DUPLICATE_MOD,
+ else_if_without_else::ELSE_IF_WITHOUT_ELSE,
+ empty_drop::EMPTY_DROP,
+ empty_enum::EMPTY_ENUM,
+ empty_structs_with_brackets::EMPTY_STRUCTS_WITH_BRACKETS,
+ entry::MAP_ENTRY,
+ enum_clike::ENUM_CLIKE_UNPORTABLE_VARIANT,
+ enum_variants::ENUM_VARIANT_NAMES,
+ enum_variants::MODULE_INCEPTION,
+ enum_variants::MODULE_NAME_REPETITIONS,
+ equatable_if_let::EQUATABLE_IF_LET,
+ escape::BOXED_LOCAL,
+ eta_reduction::REDUNDANT_CLOSURE,
+ eta_reduction::REDUNDANT_CLOSURE_FOR_METHOD_CALLS,
+ excessive_bools::FN_PARAMS_EXCESSIVE_BOOLS,
+ excessive_bools::STRUCT_EXCESSIVE_BOOLS,
+ exhaustive_items::EXHAUSTIVE_ENUMS,
+ exhaustive_items::EXHAUSTIVE_STRUCTS,
+ exit::EXIT,
+ explicit_write::EXPLICIT_WRITE,
+ fallible_impl_from::FALLIBLE_IMPL_FROM,
+ float_literal::EXCESSIVE_PRECISION,
+ float_literal::LOSSY_FLOAT_LITERAL,
+ floating_point_arithmetic::IMPRECISE_FLOPS,
+ floating_point_arithmetic::SUBOPTIMAL_FLOPS,
+ format::USELESS_FORMAT,
+ format_args::FORMAT_IN_FORMAT_ARGS,
+ format_args::TO_STRING_IN_FORMAT_ARGS,
+ format_impl::PRINT_IN_FORMAT_IMPL,
+ format_impl::RECURSIVE_FORMAT_IMPL,
+ format_push_string::FORMAT_PUSH_STRING,
+ formatting::POSSIBLE_MISSING_COMMA,
+ formatting::SUSPICIOUS_ASSIGNMENT_FORMATTING,
+ formatting::SUSPICIOUS_ELSE_FORMATTING,
+ formatting::SUSPICIOUS_UNARY_OP_FORMATTING,
+ from_over_into::FROM_OVER_INTO,
+ from_str_radix_10::FROM_STR_RADIX_10,
+ functions::DOUBLE_MUST_USE,
+ functions::MUST_USE_CANDIDATE,
+ functions::MUST_USE_UNIT,
+ functions::NOT_UNSAFE_PTR_ARG_DEREF,
+ functions::RESULT_UNIT_ERR,
+ functions::TOO_MANY_ARGUMENTS,
+ functions::TOO_MANY_LINES,
+ future_not_send::FUTURE_NOT_SEND,
+ get_first::GET_FIRST,
+ if_let_mutex::IF_LET_MUTEX,
+ if_not_else::IF_NOT_ELSE,
+ if_then_some_else_none::IF_THEN_SOME_ELSE_NONE,
+ implicit_hasher::IMPLICIT_HASHER,
+ implicit_return::IMPLICIT_RETURN,
+ implicit_saturating_sub::IMPLICIT_SATURATING_SUB,
+ inconsistent_struct_constructor::INCONSISTENT_STRUCT_CONSTRUCTOR,
+ index_refutable_slice::INDEX_REFUTABLE_SLICE,
+ indexing_slicing::INDEXING_SLICING,
+ indexing_slicing::OUT_OF_BOUNDS_INDEXING,
+ infinite_iter::INFINITE_ITER,
+ infinite_iter::MAYBE_INFINITE_ITER,
+ inherent_impl::MULTIPLE_INHERENT_IMPL,
+ inherent_to_string::INHERENT_TO_STRING,
+ inherent_to_string::INHERENT_TO_STRING_SHADOW_DISPLAY,
+ init_numbered_fields::INIT_NUMBERED_FIELDS,
+ inline_fn_without_body::INLINE_FN_WITHOUT_BODY,
+ int_plus_one::INT_PLUS_ONE,
+ invalid_upcast_comparisons::INVALID_UPCAST_COMPARISONS,
+ invalid_utf8_in_unchecked::INVALID_UTF8_IN_UNCHECKED,
+ items_after_statements::ITEMS_AFTER_STATEMENTS,
+ iter_not_returning_iterator::ITER_NOT_RETURNING_ITERATOR,
+ large_const_arrays::LARGE_CONST_ARRAYS,
+ large_enum_variant::LARGE_ENUM_VARIANT,
+ large_include_file::LARGE_INCLUDE_FILE,
+ large_stack_arrays::LARGE_STACK_ARRAYS,
+ len_zero::COMPARISON_TO_EMPTY,
+ len_zero::LEN_WITHOUT_IS_EMPTY,
+ len_zero::LEN_ZERO,
+ let_if_seq::USELESS_LET_IF_SEQ,
+ let_underscore::LET_UNDERSCORE_DROP,
+ let_underscore::LET_UNDERSCORE_LOCK,
+ let_underscore::LET_UNDERSCORE_MUST_USE,
+ lifetimes::EXTRA_UNUSED_LIFETIMES,
+ lifetimes::NEEDLESS_LIFETIMES,
+ literal_representation::DECIMAL_LITERAL_REPRESENTATION,
+ literal_representation::INCONSISTENT_DIGIT_GROUPING,
+ literal_representation::LARGE_DIGIT_GROUPS,
+ literal_representation::MISTYPED_LITERAL_SUFFIXES,
+ literal_representation::UNREADABLE_LITERAL,
+ literal_representation::UNUSUAL_BYTE_GROUPINGS,
+ loops::EMPTY_LOOP,
+ loops::EXPLICIT_COUNTER_LOOP,
+ loops::EXPLICIT_INTO_ITER_LOOP,
+ loops::EXPLICIT_ITER_LOOP,
+ loops::FOR_KV_MAP,
+ loops::FOR_LOOPS_OVER_FALLIBLES,
+ loops::ITER_NEXT_LOOP,
+ loops::MANUAL_FIND,
+ loops::MANUAL_FLATTEN,
+ loops::MANUAL_MEMCPY,
+ loops::MISSING_SPIN_LOOP,
+ loops::MUT_RANGE_BOUND,
+ loops::NEEDLESS_COLLECT,
+ loops::NEEDLESS_RANGE_LOOP,
+ loops::NEVER_LOOP,
+ loops::SAME_ITEM_PUSH,
+ loops::SINGLE_ELEMENT_LOOP,
+ loops::WHILE_IMMUTABLE_CONDITION,
+ loops::WHILE_LET_LOOP,
+ loops::WHILE_LET_ON_ITERATOR,
+ macro_use::MACRO_USE_IMPORTS,
+ main_recursion::MAIN_RECURSION,
+ manual_assert::MANUAL_ASSERT,
+ manual_async_fn::MANUAL_ASYNC_FN,
+ manual_bits::MANUAL_BITS,
+ manual_non_exhaustive::MANUAL_NON_EXHAUSTIVE,
+ manual_ok_or::MANUAL_OK_OR,
+ manual_rem_euclid::MANUAL_REM_EUCLID,
+ manual_retain::MANUAL_RETAIN,
+ manual_strip::MANUAL_STRIP,
+ map_clone::MAP_CLONE,
+ map_err_ignore::MAP_ERR_IGNORE,
+ map_unit_fn::OPTION_MAP_UNIT_FN,
+ map_unit_fn::RESULT_MAP_UNIT_FN,
+ match_result_ok::MATCH_RESULT_OK,
+ matches::COLLAPSIBLE_MATCH,
+ matches::INFALLIBLE_DESTRUCTURING_MATCH,
+ matches::MANUAL_MAP,
+ matches::MANUAL_UNWRAP_OR,
+ matches::MATCH_AS_REF,
+ matches::MATCH_BOOL,
+ matches::MATCH_LIKE_MATCHES_MACRO,
+ matches::MATCH_ON_VEC_ITEMS,
+ matches::MATCH_OVERLAPPING_ARM,
+ matches::MATCH_REF_PATS,
+ matches::MATCH_SAME_ARMS,
+ matches::MATCH_SINGLE_BINDING,
+ matches::MATCH_STR_CASE_MISMATCH,
+ matches::MATCH_WILDCARD_FOR_SINGLE_VARIANTS,
+ matches::MATCH_WILD_ERR_ARM,
+ matches::NEEDLESS_MATCH,
+ matches::REDUNDANT_PATTERN_MATCHING,
+ matches::REST_PAT_IN_FULLY_BOUND_STRUCTS,
+ matches::SIGNIFICANT_DROP_IN_SCRUTINEE,
+ matches::SINGLE_MATCH,
+ matches::SINGLE_MATCH_ELSE,
+ matches::TRY_ERR,
+ matches::WILDCARD_ENUM_MATCH_ARM,
+ matches::WILDCARD_IN_OR_PATTERNS,
+ mem_forget::MEM_FORGET,
+ mem_replace::MEM_REPLACE_OPTION_WITH_NONE,
+ mem_replace::MEM_REPLACE_WITH_DEFAULT,
+ mem_replace::MEM_REPLACE_WITH_UNINIT,
+ methods::BIND_INSTEAD_OF_MAP,
+ methods::BYTES_NTH,
+ methods::CHARS_LAST_CMP,
+ methods::CHARS_NEXT_CMP,
+ methods::CLONED_INSTEAD_OF_COPIED,
+ methods::CLONE_DOUBLE_REF,
+ methods::CLONE_ON_COPY,
+ methods::CLONE_ON_REF_PTR,
+ methods::ERR_EXPECT,
+ methods::EXPECT_FUN_CALL,
+ methods::EXPECT_USED,
+ methods::EXTEND_WITH_DRAIN,
+ methods::FILETYPE_IS_FILE,
+ methods::FILTER_MAP_IDENTITY,
+ methods::FILTER_MAP_NEXT,
+ methods::FILTER_NEXT,
+ methods::FLAT_MAP_IDENTITY,
+ methods::FLAT_MAP_OPTION,
+ methods::FROM_ITER_INSTEAD_OF_COLLECT,
+ methods::GET_LAST_WITH_LEN,
+ methods::GET_UNWRAP,
+ methods::IMPLICIT_CLONE,
+ methods::INEFFICIENT_TO_STRING,
+ methods::INSPECT_FOR_EACH,
+ methods::INTO_ITER_ON_REF,
+ methods::IS_DIGIT_ASCII_RADIX,
+ methods::ITERATOR_STEP_BY_ZERO,
+ methods::ITER_CLONED_COLLECT,
+ methods::ITER_COUNT,
+ methods::ITER_NEXT_SLICE,
+ methods::ITER_NTH,
+ methods::ITER_NTH_ZERO,
+ methods::ITER_OVEREAGER_CLONED,
+ methods::ITER_SKIP_NEXT,
+ methods::ITER_WITH_DRAIN,
+ methods::MANUAL_FILTER_MAP,
+ methods::MANUAL_FIND_MAP,
+ methods::MANUAL_SATURATING_ARITHMETIC,
+ methods::MANUAL_SPLIT_ONCE,
+ methods::MANUAL_STR_REPEAT,
+ methods::MAP_COLLECT_RESULT_UNIT,
+ methods::MAP_FLATTEN,
+ methods::MAP_IDENTITY,
+ methods::MAP_UNWRAP_OR,
+ methods::NEEDLESS_OPTION_AS_DEREF,
+ methods::NEEDLESS_OPTION_TAKE,
+ methods::NEEDLESS_SPLITN,
+ methods::NEW_RET_NO_SELF,
+ methods::NO_EFFECT_REPLACE,
+ methods::OBFUSCATED_IF_ELSE,
+ methods::OK_EXPECT,
+ methods::OPTION_AS_REF_DEREF,
+ methods::OPTION_FILTER_MAP,
+ methods::OPTION_MAP_OR_NONE,
+ methods::OR_FUN_CALL,
+ methods::OR_THEN_UNWRAP,
+ methods::RESULT_MAP_OR_INTO_OPTION,
+ methods::SEARCH_IS_SOME,
+ methods::SHOULD_IMPLEMENT_TRAIT,
+ methods::SINGLE_CHAR_ADD_STR,
+ methods::SINGLE_CHAR_PATTERN,
+ methods::SKIP_WHILE_NEXT,
+ methods::STRING_EXTEND_CHARS,
+ methods::SUSPICIOUS_MAP,
+ methods::SUSPICIOUS_SPLITN,
+ methods::UNINIT_ASSUMED_INIT,
+ methods::UNNECESSARY_FILTER_MAP,
+ methods::UNNECESSARY_FIND_MAP,
+ methods::UNNECESSARY_FOLD,
+ methods::UNNECESSARY_JOIN,
+ methods::UNNECESSARY_LAZY_EVALUATIONS,
+ methods::UNNECESSARY_TO_OWNED,
+ methods::UNWRAP_OR_ELSE_DEFAULT,
+ methods::UNWRAP_USED,
+ methods::USELESS_ASREF,
+ methods::WRONG_SELF_CONVENTION,
+ methods::ZST_OFFSET,
+ minmax::MIN_MAX,
+ misc::SHORT_CIRCUIT_STATEMENT,
+ misc::TOPLEVEL_REF_ARG,
+ misc::USED_UNDERSCORE_BINDING,
+ misc::ZERO_PTR,
+ misc_early::BUILTIN_TYPE_SHADOW,
+ misc_early::DOUBLE_NEG,
+ misc_early::DUPLICATE_UNDERSCORE_ARGUMENT,
+ misc_early::MIXED_CASE_HEX_LITERALS,
+ misc_early::REDUNDANT_PATTERN,
+ misc_early::SEPARATED_LITERAL_SUFFIX,
+ misc_early::UNNEEDED_FIELD_PATTERN,
+ misc_early::UNNEEDED_WILDCARD_PATTERN,
+ misc_early::UNSEPARATED_LITERAL_SUFFIX,
+ misc_early::ZERO_PREFIXED_LITERAL,
+ mismatching_type_param_order::MISMATCHING_TYPE_PARAM_ORDER,
+ missing_const_for_fn::MISSING_CONST_FOR_FN,
+ missing_doc::MISSING_DOCS_IN_PRIVATE_ITEMS,
+ missing_enforced_import_rename::MISSING_ENFORCED_IMPORT_RENAMES,
+ missing_inline::MISSING_INLINE_IN_PUBLIC_ITEMS,
+ mixed_read_write_in_expression::DIVERGING_SUB_EXPRESSION,
+ mixed_read_write_in_expression::MIXED_READ_WRITE_IN_EXPRESSION,
+ module_style::MOD_MODULE_FILES,
+ module_style::SELF_NAMED_MODULE_FILES,
+ mut_key::MUTABLE_KEY_TYPE,
+ mut_mut::MUT_MUT,
+ mut_mutex_lock::MUT_MUTEX_LOCK,
+ mut_reference::UNNECESSARY_MUT_PASSED,
+ mutable_debug_assertion::DEBUG_ASSERT_WITH_MUT_CALL,
+ mutex_atomic::MUTEX_ATOMIC,
+ mutex_atomic::MUTEX_INTEGER,
+ needless_arbitrary_self_type::NEEDLESS_ARBITRARY_SELF_TYPE,
+ needless_bool::BOOL_COMPARISON,
+ needless_bool::NEEDLESS_BOOL,
+ needless_borrowed_ref::NEEDLESS_BORROWED_REFERENCE,
+ needless_continue::NEEDLESS_CONTINUE,
+ needless_for_each::NEEDLESS_FOR_EACH,
+ needless_late_init::NEEDLESS_LATE_INIT,
+ needless_parens_on_range_literals::NEEDLESS_PARENS_ON_RANGE_LITERALS,
+ needless_pass_by_value::NEEDLESS_PASS_BY_VALUE,
+ needless_question_mark::NEEDLESS_QUESTION_MARK,
+ needless_update::NEEDLESS_UPDATE,
+ neg_cmp_op_on_partial_ord::NEG_CMP_OP_ON_PARTIAL_ORD,
+ neg_multiply::NEG_MULTIPLY,
+ new_without_default::NEW_WITHOUT_DEFAULT,
+ no_effect::NO_EFFECT,
+ no_effect::NO_EFFECT_UNDERSCORE_BINDING,
+ no_effect::UNNECESSARY_OPERATION,
+ non_copy_const::BORROW_INTERIOR_MUTABLE_CONST,
+ non_copy_const::DECLARE_INTERIOR_MUTABLE_CONST,
+ non_expressive_names::JUST_UNDERSCORES_AND_DIGITS,
+ non_expressive_names::MANY_SINGLE_CHAR_NAMES,
+ non_expressive_names::SIMILAR_NAMES,
+ non_octal_unix_permissions::NON_OCTAL_UNIX_PERMISSIONS,
+ non_send_fields_in_send_ty::NON_SEND_FIELDS_IN_SEND_TY,
+ nonstandard_macro_braces::NONSTANDARD_MACRO_BRACES,
+ octal_escapes::OCTAL_ESCAPES,
+ only_used_in_recursion::ONLY_USED_IN_RECURSION,
+ open_options::NONSENSICAL_OPEN_OPTIONS,
+ operators::ABSURD_EXTREME_COMPARISONS,
+ operators::ARITHMETIC,
+ operators::ASSIGN_OP_PATTERN,
+ operators::BAD_BIT_MASK,
+ operators::CMP_NAN,
+ operators::CMP_OWNED,
+ operators::DOUBLE_COMPARISONS,
+ operators::DURATION_SUBSEC,
+ operators::EQ_OP,
+ operators::ERASING_OP,
+ operators::FLOAT_ARITHMETIC,
+ operators::FLOAT_CMP,
+ operators::FLOAT_CMP_CONST,
+ operators::FLOAT_EQUALITY_WITHOUT_ABS,
+ operators::IDENTITY_OP,
+ operators::INEFFECTIVE_BIT_MASK,
+ operators::INTEGER_ARITHMETIC,
+ operators::INTEGER_DIVISION,
+ operators::MISREFACTORED_ASSIGN_OP,
+ operators::MODULO_ARITHMETIC,
+ operators::MODULO_ONE,
+ operators::NEEDLESS_BITWISE_BOOL,
+ operators::OP_REF,
+ operators::PTR_EQ,
+ operators::SELF_ASSIGNMENT,
+ operators::VERBOSE_BIT_MASK,
+ option_env_unwrap::OPTION_ENV_UNWRAP,
+ option_if_let_else::OPTION_IF_LET_ELSE,
+ overflow_check_conditional::OVERFLOW_CHECK_CONDITIONAL,
+ panic_in_result_fn::PANIC_IN_RESULT_FN,
+ panic_unimplemented::PANIC,
+ panic_unimplemented::TODO,
+ panic_unimplemented::UNIMPLEMENTED,
+ panic_unimplemented::UNREACHABLE,
+ partialeq_ne_impl::PARTIALEQ_NE_IMPL,
+ pass_by_ref_or_value::LARGE_TYPES_PASSED_BY_VALUE,
+ pass_by_ref_or_value::TRIVIALLY_COPY_PASS_BY_REF,
+ path_buf_push_overwrite::PATH_BUF_PUSH_OVERWRITE,
+ pattern_type_mismatch::PATTERN_TYPE_MISMATCH,
+ precedence::PRECEDENCE,
+ ptr::CMP_NULL,
+ ptr::INVALID_NULL_PTR_USAGE,
+ ptr::MUT_FROM_REF,
+ ptr::PTR_ARG,
+ ptr_offset_with_cast::PTR_OFFSET_WITH_CAST,
+ pub_use::PUB_USE,
+ question_mark::QUESTION_MARK,
+ ranges::MANUAL_RANGE_CONTAINS,
+ ranges::RANGE_MINUS_ONE,
+ ranges::RANGE_PLUS_ONE,
+ ranges::RANGE_ZIP_WITH_LEN,
+ ranges::REVERSED_EMPTY_RANGES,
+ rc_clone_in_vec_init::RC_CLONE_IN_VEC_INIT,
+ read_zero_byte_vec::READ_ZERO_BYTE_VEC,
+ redundant_clone::REDUNDANT_CLONE,
+ redundant_closure_call::REDUNDANT_CLOSURE_CALL,
+ redundant_else::REDUNDANT_ELSE,
+ redundant_field_names::REDUNDANT_FIELD_NAMES,
+ redundant_pub_crate::REDUNDANT_PUB_CRATE,
+ redundant_slicing::DEREF_BY_SLICING,
+ redundant_slicing::REDUNDANT_SLICING,
+ redundant_static_lifetimes::REDUNDANT_STATIC_LIFETIMES,
+ ref_option_ref::REF_OPTION_REF,
+ reference::DEREF_ADDROF,
+ regex::INVALID_REGEX,
+ regex::TRIVIAL_REGEX,
+ repeat_once::REPEAT_ONCE,
+ return_self_not_must_use::RETURN_SELF_NOT_MUST_USE,
+ returns::LET_AND_RETURN,
+ returns::NEEDLESS_RETURN,
+ same_name_method::SAME_NAME_METHOD,
+ self_named_constructors::SELF_NAMED_CONSTRUCTORS,
+ semicolon_if_nothing_returned::SEMICOLON_IF_NOTHING_RETURNED,
+ serde_api::SERDE_API_MISUSE,
+ shadow::SHADOW_REUSE,
+ shadow::SHADOW_SAME,
+ shadow::SHADOW_UNRELATED,
+ single_char_lifetime_names::SINGLE_CHAR_LIFETIME_NAMES,
+ single_component_path_imports::SINGLE_COMPONENT_PATH_IMPORTS,
+ size_of_in_element_count::SIZE_OF_IN_ELEMENT_COUNT,
+ slow_vector_initialization::SLOW_VECTOR_INITIALIZATION,
+ stable_sort_primitive::STABLE_SORT_PRIMITIVE,
+ std_instead_of_core::ALLOC_INSTEAD_OF_CORE,
+ std_instead_of_core::STD_INSTEAD_OF_ALLOC,
+ std_instead_of_core::STD_INSTEAD_OF_CORE,
+ strings::STRING_ADD,
+ strings::STRING_ADD_ASSIGN,
+ strings::STRING_FROM_UTF8_AS_BYTES,
+ strings::STRING_LIT_AS_BYTES,
+ strings::STRING_SLICE,
+ strings::STRING_TO_STRING,
+ strings::STR_TO_STRING,
+ strings::TRIM_SPLIT_WHITESPACE,
+ strlen_on_c_strings::STRLEN_ON_C_STRINGS,
+ suspicious_operation_groupings::SUSPICIOUS_OPERATION_GROUPINGS,
+ suspicious_trait_impl::SUSPICIOUS_ARITHMETIC_IMPL,
+ suspicious_trait_impl::SUSPICIOUS_OP_ASSIGN_IMPL,
+ swap::ALMOST_SWAPPED,
+ swap::MANUAL_SWAP,
+ swap_ptr_to_ref::SWAP_PTR_TO_REF,
+ tabs_in_doc_comments::TABS_IN_DOC_COMMENTS,
+ temporary_assignment::TEMPORARY_ASSIGNMENT,
+ to_digit_is_some::TO_DIGIT_IS_SOME,
+ trailing_empty_array::TRAILING_EMPTY_ARRAY,
+ trait_bounds::TRAIT_DUPLICATION_IN_BOUNDS,
+ trait_bounds::TYPE_REPETITION_IN_BOUNDS,
+ transmute::CROSSPOINTER_TRANSMUTE,
+ transmute::TRANSMUTES_EXPRESSIBLE_AS_PTR_CASTS,
+ transmute::TRANSMUTE_BYTES_TO_STR,
+ transmute::TRANSMUTE_FLOAT_TO_INT,
+ transmute::TRANSMUTE_INT_TO_BOOL,
+ transmute::TRANSMUTE_INT_TO_CHAR,
+ transmute::TRANSMUTE_INT_TO_FLOAT,
+ transmute::TRANSMUTE_NUM_TO_BYTES,
+ transmute::TRANSMUTE_PTR_TO_PTR,
+ transmute::TRANSMUTE_PTR_TO_REF,
+ transmute::TRANSMUTE_UNDEFINED_REPR,
+ transmute::UNSOUND_COLLECTION_TRANSMUTE,
+ transmute::USELESS_TRANSMUTE,
+ transmute::WRONG_TRANSMUTE,
+ transmuting_null::TRANSMUTING_NULL,
+ types::BORROWED_BOX,
+ types::BOX_COLLECTION,
+ types::LINKEDLIST,
+ types::OPTION_OPTION,
+ types::RC_BUFFER,
+ types::RC_MUTEX,
+ types::REDUNDANT_ALLOCATION,
+ types::TYPE_COMPLEXITY,
+ types::VEC_BOX,
+ undocumented_unsafe_blocks::UNDOCUMENTED_UNSAFE_BLOCKS,
+ unicode::INVISIBLE_CHARACTERS,
+ unicode::NON_ASCII_LITERAL,
+ unicode::UNICODE_NOT_NFC,
+ uninit_vec::UNINIT_VEC,
+ unit_hash::UNIT_HASH,
+ unit_return_expecting_ord::UNIT_RETURN_EXPECTING_ORD,
+ unit_types::LET_UNIT_VALUE,
+ unit_types::UNIT_ARG,
+ unit_types::UNIT_CMP,
+ unnamed_address::FN_ADDRESS_COMPARISONS,
+ unnamed_address::VTABLE_ADDRESS_COMPARISONS,
+ unnecessary_owned_empty_strings::UNNECESSARY_OWNED_EMPTY_STRINGS,
+ unnecessary_self_imports::UNNECESSARY_SELF_IMPORTS,
+ unnecessary_sort_by::UNNECESSARY_SORT_BY,
+ unnecessary_wraps::UNNECESSARY_WRAPS,
+ unnested_or_patterns::UNNESTED_OR_PATTERNS,
+ unsafe_removed_from_name::UNSAFE_REMOVED_FROM_NAME,
+ unused_async::UNUSED_ASYNC,
+ unused_io_amount::UNUSED_IO_AMOUNT,
+ unused_rounding::UNUSED_ROUNDING,
+ unused_self::UNUSED_SELF,
+ unused_unit::UNUSED_UNIT,
+ unwrap::PANICKING_UNWRAP,
+ unwrap::UNNECESSARY_UNWRAP,
+ unwrap_in_result::UNWRAP_IN_RESULT,
+ upper_case_acronyms::UPPER_CASE_ACRONYMS,
+ use_self::USE_SELF,
+ useless_conversion::USELESS_CONVERSION,
+ vec::USELESS_VEC,
+ vec_init_then_push::VEC_INIT_THEN_PUSH,
+ vec_resize_to_zero::VEC_RESIZE_TO_ZERO,
+ verbose_file_reads::VERBOSE_FILE_READS,
+ wildcard_imports::ENUM_GLOB_USE,
+ wildcard_imports::WILDCARD_IMPORTS,
+ write::PRINTLN_EMPTY_STRING,
+ write::PRINT_LITERAL,
+ write::PRINT_STDERR,
+ write::PRINT_STDOUT,
+ write::PRINT_WITH_NEWLINE,
+ write::USE_DEBUG,
+ write::WRITELN_EMPTY_STRING,
+ write::WRITE_LITERAL,
+ write::WRITE_WITH_NEWLINE,
+ zero_div_zero::ZERO_DIVIDED_BY_ZERO,
+ zero_sized_map_values::ZERO_SIZED_MAP_VALUES,
+])
diff --git a/src/tools/clippy/clippy_lints/src/lib.register_nursery.rs b/src/tools/clippy/clippy_lints/src/lib.register_nursery.rs
new file mode 100644
index 000000000..973191eb1
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/lib.register_nursery.rs
@@ -0,0 +1,36 @@
+// This file was generated by `cargo dev update_lints`.
+// Use that command to update this file and do not edit by hand.
+// Manual edits will be overwritten.
+
+store.register_group(true, "clippy::nursery", Some("clippy_nursery"), vec![
+ LintId::of(attrs::EMPTY_LINE_AFTER_OUTER_ATTR),
+ LintId::of(cognitive_complexity::COGNITIVE_COMPLEXITY),
+ LintId::of(copies::BRANCHES_SHARING_CODE),
+ LintId::of(dereference::EXPLICIT_AUTO_DEREF),
+ LintId::of(equatable_if_let::EQUATABLE_IF_LET),
+ LintId::of(fallible_impl_from::FALLIBLE_IMPL_FROM),
+ LintId::of(floating_point_arithmetic::IMPRECISE_FLOPS),
+ LintId::of(floating_point_arithmetic::SUBOPTIMAL_FLOPS),
+ LintId::of(future_not_send::FUTURE_NOT_SEND),
+ LintId::of(index_refutable_slice::INDEX_REFUTABLE_SLICE),
+ LintId::of(let_if_seq::USELESS_LET_IF_SEQ),
+ LintId::of(matches::SIGNIFICANT_DROP_IN_SCRUTINEE),
+ LintId::of(methods::ITER_WITH_DRAIN),
+ LintId::of(missing_const_for_fn::MISSING_CONST_FOR_FN),
+ LintId::of(mutable_debug_assertion::DEBUG_ASSERT_WITH_MUT_CALL),
+ LintId::of(mutex_atomic::MUTEX_ATOMIC),
+ LintId::of(mutex_atomic::MUTEX_INTEGER),
+ LintId::of(non_send_fields_in_send_ty::NON_SEND_FIELDS_IN_SEND_TY),
+ LintId::of(nonstandard_macro_braces::NONSTANDARD_MACRO_BRACES),
+ LintId::of(only_used_in_recursion::ONLY_USED_IN_RECURSION),
+ LintId::of(option_if_let_else::OPTION_IF_LET_ELSE),
+ LintId::of(path_buf_push_overwrite::PATH_BUF_PUSH_OVERWRITE),
+ LintId::of(redundant_pub_crate::REDUNDANT_PUB_CRATE),
+ LintId::of(regex::TRIVIAL_REGEX),
+ LintId::of(strings::STRING_LIT_AS_BYTES),
+ LintId::of(suspicious_operation_groupings::SUSPICIOUS_OPERATION_GROUPINGS),
+ LintId::of(trailing_empty_array::TRAILING_EMPTY_ARRAY),
+ LintId::of(transmute::TRANSMUTE_UNDEFINED_REPR),
+ LintId::of(unused_rounding::UNUSED_ROUNDING),
+ LintId::of(use_self::USE_SELF),
+])
diff --git a/src/tools/clippy/clippy_lints/src/lib.register_pedantic.rs b/src/tools/clippy/clippy_lints/src/lib.register_pedantic.rs
new file mode 100644
index 000000000..a1b546658
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/lib.register_pedantic.rs
@@ -0,0 +1,102 @@
+// This file was generated by `cargo dev update_lints`.
+// Use that command to update this file and do not edit by hand.
+// Manual edits will be overwritten.
+
+store.register_group(true, "clippy::pedantic", Some("clippy_pedantic"), vec![
+ LintId::of(attrs::INLINE_ALWAYS),
+ LintId::of(borrow_as_ptr::BORROW_AS_PTR),
+ LintId::of(bytecount::NAIVE_BYTECOUNT),
+ LintId::of(case_sensitive_file_extension_comparisons::CASE_SENSITIVE_FILE_EXTENSION_COMPARISONS),
+ LintId::of(casts::CAST_LOSSLESS),
+ LintId::of(casts::CAST_POSSIBLE_TRUNCATION),
+ LintId::of(casts::CAST_POSSIBLE_WRAP),
+ LintId::of(casts::CAST_PRECISION_LOSS),
+ LintId::of(casts::CAST_PTR_ALIGNMENT),
+ LintId::of(casts::CAST_SIGN_LOSS),
+ LintId::of(casts::PTR_AS_PTR),
+ LintId::of(checked_conversions::CHECKED_CONVERSIONS),
+ LintId::of(copies::SAME_FUNCTIONS_IN_IF_CONDITION),
+ LintId::of(copy_iterator::COPY_ITERATOR),
+ LintId::of(default::DEFAULT_TRAIT_ACCESS),
+ LintId::of(dereference::EXPLICIT_DEREF_METHODS),
+ LintId::of(dereference::REF_BINDING_TO_REFERENCE),
+ LintId::of(derive::EXPL_IMPL_CLONE_ON_COPY),
+ LintId::of(derive::UNSAFE_DERIVE_DESERIALIZE),
+ LintId::of(doc::DOC_MARKDOWN),
+ LintId::of(doc::MISSING_ERRORS_DOC),
+ LintId::of(doc::MISSING_PANICS_DOC),
+ LintId::of(doc_link_with_quotes::DOC_LINK_WITH_QUOTES),
+ LintId::of(empty_enum::EMPTY_ENUM),
+ LintId::of(enum_variants::MODULE_NAME_REPETITIONS),
+ LintId::of(eta_reduction::REDUNDANT_CLOSURE_FOR_METHOD_CALLS),
+ LintId::of(excessive_bools::FN_PARAMS_EXCESSIVE_BOOLS),
+ LintId::of(excessive_bools::STRUCT_EXCESSIVE_BOOLS),
+ LintId::of(functions::MUST_USE_CANDIDATE),
+ LintId::of(functions::TOO_MANY_LINES),
+ LintId::of(if_not_else::IF_NOT_ELSE),
+ LintId::of(implicit_hasher::IMPLICIT_HASHER),
+ LintId::of(implicit_saturating_sub::IMPLICIT_SATURATING_SUB),
+ LintId::of(inconsistent_struct_constructor::INCONSISTENT_STRUCT_CONSTRUCTOR),
+ LintId::of(infinite_iter::MAYBE_INFINITE_ITER),
+ LintId::of(invalid_upcast_comparisons::INVALID_UPCAST_COMPARISONS),
+ LintId::of(items_after_statements::ITEMS_AFTER_STATEMENTS),
+ LintId::of(iter_not_returning_iterator::ITER_NOT_RETURNING_ITERATOR),
+ LintId::of(large_stack_arrays::LARGE_STACK_ARRAYS),
+ LintId::of(let_underscore::LET_UNDERSCORE_DROP),
+ LintId::of(literal_representation::LARGE_DIGIT_GROUPS),
+ LintId::of(literal_representation::UNREADABLE_LITERAL),
+ LintId::of(loops::EXPLICIT_INTO_ITER_LOOP),
+ LintId::of(loops::EXPLICIT_ITER_LOOP),
+ LintId::of(macro_use::MACRO_USE_IMPORTS),
+ LintId::of(manual_assert::MANUAL_ASSERT),
+ LintId::of(manual_ok_or::MANUAL_OK_OR),
+ LintId::of(matches::MATCH_BOOL),
+ LintId::of(matches::MATCH_ON_VEC_ITEMS),
+ LintId::of(matches::MATCH_SAME_ARMS),
+ LintId::of(matches::MATCH_WILDCARD_FOR_SINGLE_VARIANTS),
+ LintId::of(matches::MATCH_WILD_ERR_ARM),
+ LintId::of(matches::SINGLE_MATCH_ELSE),
+ LintId::of(methods::CLONED_INSTEAD_OF_COPIED),
+ LintId::of(methods::FILTER_MAP_NEXT),
+ LintId::of(methods::FLAT_MAP_OPTION),
+ LintId::of(methods::FROM_ITER_INSTEAD_OF_COLLECT),
+ LintId::of(methods::IMPLICIT_CLONE),
+ LintId::of(methods::INEFFICIENT_TO_STRING),
+ LintId::of(methods::MAP_UNWRAP_OR),
+ LintId::of(methods::UNNECESSARY_JOIN),
+ LintId::of(misc::USED_UNDERSCORE_BINDING),
+ LintId::of(mismatching_type_param_order::MISMATCHING_TYPE_PARAM_ORDER),
+ LintId::of(mut_mut::MUT_MUT),
+ LintId::of(needless_continue::NEEDLESS_CONTINUE),
+ LintId::of(needless_for_each::NEEDLESS_FOR_EACH),
+ LintId::of(needless_pass_by_value::NEEDLESS_PASS_BY_VALUE),
+ LintId::of(no_effect::NO_EFFECT_UNDERSCORE_BINDING),
+ LintId::of(non_expressive_names::MANY_SINGLE_CHAR_NAMES),
+ LintId::of(non_expressive_names::SIMILAR_NAMES),
+ LintId::of(operators::FLOAT_CMP),
+ LintId::of(operators::NEEDLESS_BITWISE_BOOL),
+ LintId::of(operators::VERBOSE_BIT_MASK),
+ LintId::of(pass_by_ref_or_value::LARGE_TYPES_PASSED_BY_VALUE),
+ LintId::of(pass_by_ref_or_value::TRIVIALLY_COPY_PASS_BY_REF),
+ LintId::of(ranges::RANGE_MINUS_ONE),
+ LintId::of(ranges::RANGE_PLUS_ONE),
+ LintId::of(redundant_else::REDUNDANT_ELSE),
+ LintId::of(ref_option_ref::REF_OPTION_REF),
+ LintId::of(return_self_not_must_use::RETURN_SELF_NOT_MUST_USE),
+ LintId::of(semicolon_if_nothing_returned::SEMICOLON_IF_NOTHING_RETURNED),
+ LintId::of(stable_sort_primitive::STABLE_SORT_PRIMITIVE),
+ LintId::of(strings::STRING_ADD_ASSIGN),
+ LintId::of(trait_bounds::TRAIT_DUPLICATION_IN_BOUNDS),
+ LintId::of(trait_bounds::TYPE_REPETITION_IN_BOUNDS),
+ LintId::of(transmute::TRANSMUTE_PTR_TO_PTR),
+ LintId::of(types::LINKEDLIST),
+ LintId::of(types::OPTION_OPTION),
+ LintId::of(unicode::UNICODE_NOT_NFC),
+ LintId::of(unnecessary_wraps::UNNECESSARY_WRAPS),
+ LintId::of(unnested_or_patterns::UNNESTED_OR_PATTERNS),
+ LintId::of(unused_async::UNUSED_ASYNC),
+ LintId::of(unused_self::UNUSED_SELF),
+ LintId::of(wildcard_imports::ENUM_GLOB_USE),
+ LintId::of(wildcard_imports::WILDCARD_IMPORTS),
+ LintId::of(zero_sized_map_values::ZERO_SIZED_MAP_VALUES),
+])
diff --git a/src/tools/clippy/clippy_lints/src/lib.register_perf.rs b/src/tools/clippy/clippy_lints/src/lib.register_perf.rs
new file mode 100644
index 000000000..e1b90acb9
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/lib.register_perf.rs
@@ -0,0 +1,31 @@
+// This file was generated by `cargo dev update_lints`.
+// Use that command to update this file and do not edit by hand.
+// Manual edits will be overwritten.
+
+store.register_group(true, "clippy::perf", Some("clippy_perf"), vec![
+ LintId::of(entry::MAP_ENTRY),
+ LintId::of(escape::BOXED_LOCAL),
+ LintId::of(format_args::FORMAT_IN_FORMAT_ARGS),
+ LintId::of(format_args::TO_STRING_IN_FORMAT_ARGS),
+ LintId::of(large_const_arrays::LARGE_CONST_ARRAYS),
+ LintId::of(large_enum_variant::LARGE_ENUM_VARIANT),
+ LintId::of(loops::MANUAL_MEMCPY),
+ LintId::of(loops::MISSING_SPIN_LOOP),
+ LintId::of(loops::NEEDLESS_COLLECT),
+ LintId::of(manual_retain::MANUAL_RETAIN),
+ LintId::of(methods::EXPECT_FUN_CALL),
+ LintId::of(methods::EXTEND_WITH_DRAIN),
+ LintId::of(methods::ITER_NTH),
+ LintId::of(methods::ITER_OVEREAGER_CLONED),
+ LintId::of(methods::MANUAL_STR_REPEAT),
+ LintId::of(methods::OR_FUN_CALL),
+ LintId::of(methods::SINGLE_CHAR_PATTERN),
+ LintId::of(methods::UNNECESSARY_TO_OWNED),
+ LintId::of(operators::CMP_OWNED),
+ LintId::of(redundant_clone::REDUNDANT_CLONE),
+ LintId::of(slow_vector_initialization::SLOW_VECTOR_INITIALIZATION),
+ LintId::of(types::BOX_COLLECTION),
+ LintId::of(types::REDUNDANT_ALLOCATION),
+ LintId::of(vec::USELESS_VEC),
+ LintId::of(vec_init_then_push::VEC_INIT_THEN_PUSH),
+])
diff --git a/src/tools/clippy/clippy_lints/src/lib.register_restriction.rs b/src/tools/clippy/clippy_lints/src/lib.register_restriction.rs
new file mode 100644
index 000000000..a7339ef27
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/lib.register_restriction.rs
@@ -0,0 +1,88 @@
+// This file was generated by `cargo dev update_lints`.
+// Use that command to update this file and do not edit by hand.
+// Manual edits will be overwritten.
+
+store.register_group(true, "clippy::restriction", Some("clippy_restriction"), vec![
+ LintId::of(as_conversions::AS_CONVERSIONS),
+ LintId::of(as_underscore::AS_UNDERSCORE),
+ LintId::of(asm_syntax::INLINE_ASM_X86_ATT_SYNTAX),
+ LintId::of(asm_syntax::INLINE_ASM_X86_INTEL_SYNTAX),
+ LintId::of(assertions_on_result_states::ASSERTIONS_ON_RESULT_STATES),
+ LintId::of(attrs::ALLOW_ATTRIBUTES_WITHOUT_REASON),
+ LintId::of(casts::FN_TO_NUMERIC_CAST_ANY),
+ LintId::of(create_dir::CREATE_DIR),
+ LintId::of(dbg_macro::DBG_MACRO),
+ LintId::of(default_numeric_fallback::DEFAULT_NUMERIC_FALLBACK),
+ LintId::of(default_union_representation::DEFAULT_UNION_REPRESENTATION),
+ LintId::of(disallowed_script_idents::DISALLOWED_SCRIPT_IDENTS),
+ LintId::of(else_if_without_else::ELSE_IF_WITHOUT_ELSE),
+ LintId::of(empty_drop::EMPTY_DROP),
+ LintId::of(empty_structs_with_brackets::EMPTY_STRUCTS_WITH_BRACKETS),
+ LintId::of(exhaustive_items::EXHAUSTIVE_ENUMS),
+ LintId::of(exhaustive_items::EXHAUSTIVE_STRUCTS),
+ LintId::of(exit::EXIT),
+ LintId::of(float_literal::LOSSY_FLOAT_LITERAL),
+ LintId::of(format_push_string::FORMAT_PUSH_STRING),
+ LintId::of(if_then_some_else_none::IF_THEN_SOME_ELSE_NONE),
+ LintId::of(implicit_return::IMPLICIT_RETURN),
+ LintId::of(indexing_slicing::INDEXING_SLICING),
+ LintId::of(inherent_impl::MULTIPLE_INHERENT_IMPL),
+ LintId::of(large_include_file::LARGE_INCLUDE_FILE),
+ LintId::of(let_underscore::LET_UNDERSCORE_MUST_USE),
+ LintId::of(literal_representation::DECIMAL_LITERAL_REPRESENTATION),
+ LintId::of(map_err_ignore::MAP_ERR_IGNORE),
+ LintId::of(matches::REST_PAT_IN_FULLY_BOUND_STRUCTS),
+ LintId::of(matches::TRY_ERR),
+ LintId::of(matches::WILDCARD_ENUM_MATCH_ARM),
+ LintId::of(mem_forget::MEM_FORGET),
+ LintId::of(methods::CLONE_ON_REF_PTR),
+ LintId::of(methods::EXPECT_USED),
+ LintId::of(methods::FILETYPE_IS_FILE),
+ LintId::of(methods::GET_UNWRAP),
+ LintId::of(methods::UNWRAP_USED),
+ LintId::of(misc_early::SEPARATED_LITERAL_SUFFIX),
+ LintId::of(misc_early::UNNEEDED_FIELD_PATTERN),
+ LintId::of(misc_early::UNSEPARATED_LITERAL_SUFFIX),
+ LintId::of(missing_doc::MISSING_DOCS_IN_PRIVATE_ITEMS),
+ LintId::of(missing_enforced_import_rename::MISSING_ENFORCED_IMPORT_RENAMES),
+ LintId::of(missing_inline::MISSING_INLINE_IN_PUBLIC_ITEMS),
+ LintId::of(mixed_read_write_in_expression::MIXED_READ_WRITE_IN_EXPRESSION),
+ LintId::of(module_style::MOD_MODULE_FILES),
+ LintId::of(module_style::SELF_NAMED_MODULE_FILES),
+ LintId::of(operators::ARITHMETIC),
+ LintId::of(operators::FLOAT_ARITHMETIC),
+ LintId::of(operators::FLOAT_CMP_CONST),
+ LintId::of(operators::INTEGER_ARITHMETIC),
+ LintId::of(operators::INTEGER_DIVISION),
+ LintId::of(operators::MODULO_ARITHMETIC),
+ LintId::of(panic_in_result_fn::PANIC_IN_RESULT_FN),
+ LintId::of(panic_unimplemented::PANIC),
+ LintId::of(panic_unimplemented::TODO),
+ LintId::of(panic_unimplemented::UNIMPLEMENTED),
+ LintId::of(panic_unimplemented::UNREACHABLE),
+ LintId::of(pattern_type_mismatch::PATTERN_TYPE_MISMATCH),
+ LintId::of(pub_use::PUB_USE),
+ LintId::of(redundant_slicing::DEREF_BY_SLICING),
+ LintId::of(same_name_method::SAME_NAME_METHOD),
+ LintId::of(shadow::SHADOW_REUSE),
+ LintId::of(shadow::SHADOW_SAME),
+ LintId::of(shadow::SHADOW_UNRELATED),
+ LintId::of(single_char_lifetime_names::SINGLE_CHAR_LIFETIME_NAMES),
+ LintId::of(std_instead_of_core::ALLOC_INSTEAD_OF_CORE),
+ LintId::of(std_instead_of_core::STD_INSTEAD_OF_ALLOC),
+ LintId::of(std_instead_of_core::STD_INSTEAD_OF_CORE),
+ LintId::of(strings::STRING_ADD),
+ LintId::of(strings::STRING_SLICE),
+ LintId::of(strings::STRING_TO_STRING),
+ LintId::of(strings::STR_TO_STRING),
+ LintId::of(types::RC_BUFFER),
+ LintId::of(types::RC_MUTEX),
+ LintId::of(undocumented_unsafe_blocks::UNDOCUMENTED_UNSAFE_BLOCKS),
+ LintId::of(unicode::NON_ASCII_LITERAL),
+ LintId::of(unnecessary_self_imports::UNNECESSARY_SELF_IMPORTS),
+ LintId::of(unwrap_in_result::UNWRAP_IN_RESULT),
+ LintId::of(verbose_file_reads::VERBOSE_FILE_READS),
+ LintId::of(write::PRINT_STDERR),
+ LintId::of(write::PRINT_STDOUT),
+ LintId::of(write::USE_DEBUG),
+])
diff --git a/src/tools/clippy/clippy_lints/src/lib.register_style.rs b/src/tools/clippy/clippy_lints/src/lib.register_style.rs
new file mode 100644
index 000000000..e95bab1d0
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/lib.register_style.rs
@@ -0,0 +1,127 @@
+// This file was generated by `cargo dev update_lints`.
+// Use that command to update this file and do not edit by hand.
+// Manual edits will be overwritten.
+
+store.register_group(true, "clippy::style", Some("clippy_style"), vec![
+ LintId::of(assertions_on_constants::ASSERTIONS_ON_CONSTANTS),
+ LintId::of(blacklisted_name::BLACKLISTED_NAME),
+ LintId::of(blocks_in_if_conditions::BLOCKS_IN_IF_CONDITIONS),
+ LintId::of(bool_assert_comparison::BOOL_ASSERT_COMPARISON),
+ LintId::of(casts::FN_TO_NUMERIC_CAST),
+ LintId::of(casts::FN_TO_NUMERIC_CAST_WITH_TRUNCATION),
+ LintId::of(collapsible_if::COLLAPSIBLE_ELSE_IF),
+ LintId::of(collapsible_if::COLLAPSIBLE_IF),
+ LintId::of(comparison_chain::COMPARISON_CHAIN),
+ LintId::of(default::FIELD_REASSIGN_WITH_DEFAULT),
+ LintId::of(default_instead_of_iter_empty::DEFAULT_INSTEAD_OF_ITER_EMPTY),
+ LintId::of(dereference::NEEDLESS_BORROW),
+ LintId::of(derive::DERIVE_PARTIAL_EQ_WITHOUT_EQ),
+ LintId::of(disallowed_methods::DISALLOWED_METHODS),
+ LintId::of(disallowed_types::DISALLOWED_TYPES),
+ LintId::of(doc::MISSING_SAFETY_DOC),
+ LintId::of(doc::NEEDLESS_DOCTEST_MAIN),
+ LintId::of(enum_variants::ENUM_VARIANT_NAMES),
+ LintId::of(enum_variants::MODULE_INCEPTION),
+ LintId::of(eta_reduction::REDUNDANT_CLOSURE),
+ LintId::of(float_literal::EXCESSIVE_PRECISION),
+ LintId::of(from_over_into::FROM_OVER_INTO),
+ LintId::of(from_str_radix_10::FROM_STR_RADIX_10),
+ LintId::of(functions::DOUBLE_MUST_USE),
+ LintId::of(functions::MUST_USE_UNIT),
+ LintId::of(functions::RESULT_UNIT_ERR),
+ LintId::of(get_first::GET_FIRST),
+ LintId::of(inherent_to_string::INHERENT_TO_STRING),
+ LintId::of(init_numbered_fields::INIT_NUMBERED_FIELDS),
+ LintId::of(len_zero::COMPARISON_TO_EMPTY),
+ LintId::of(len_zero::LEN_WITHOUT_IS_EMPTY),
+ LintId::of(len_zero::LEN_ZERO),
+ LintId::of(literal_representation::INCONSISTENT_DIGIT_GROUPING),
+ LintId::of(literal_representation::UNUSUAL_BYTE_GROUPINGS),
+ LintId::of(loops::FOR_KV_MAP),
+ LintId::of(loops::NEEDLESS_RANGE_LOOP),
+ LintId::of(loops::SAME_ITEM_PUSH),
+ LintId::of(loops::WHILE_LET_ON_ITERATOR),
+ LintId::of(main_recursion::MAIN_RECURSION),
+ LintId::of(manual_async_fn::MANUAL_ASYNC_FN),
+ LintId::of(manual_bits::MANUAL_BITS),
+ LintId::of(manual_non_exhaustive::MANUAL_NON_EXHAUSTIVE),
+ LintId::of(map_clone::MAP_CLONE),
+ LintId::of(match_result_ok::MATCH_RESULT_OK),
+ LintId::of(matches::COLLAPSIBLE_MATCH),
+ LintId::of(matches::INFALLIBLE_DESTRUCTURING_MATCH),
+ LintId::of(matches::MANUAL_MAP),
+ LintId::of(matches::MATCH_LIKE_MATCHES_MACRO),
+ LintId::of(matches::MATCH_OVERLAPPING_ARM),
+ LintId::of(matches::MATCH_REF_PATS),
+ LintId::of(matches::REDUNDANT_PATTERN_MATCHING),
+ LintId::of(matches::SINGLE_MATCH),
+ LintId::of(mem_replace::MEM_REPLACE_OPTION_WITH_NONE),
+ LintId::of(mem_replace::MEM_REPLACE_WITH_DEFAULT),
+ LintId::of(methods::BYTES_NTH),
+ LintId::of(methods::CHARS_LAST_CMP),
+ LintId::of(methods::CHARS_NEXT_CMP),
+ LintId::of(methods::ERR_EXPECT),
+ LintId::of(methods::INTO_ITER_ON_REF),
+ LintId::of(methods::IS_DIGIT_ASCII_RADIX),
+ LintId::of(methods::ITER_CLONED_COLLECT),
+ LintId::of(methods::ITER_NEXT_SLICE),
+ LintId::of(methods::ITER_NTH_ZERO),
+ LintId::of(methods::ITER_SKIP_NEXT),
+ LintId::of(methods::MANUAL_SATURATING_ARITHMETIC),
+ LintId::of(methods::MAP_COLLECT_RESULT_UNIT),
+ LintId::of(methods::NEW_RET_NO_SELF),
+ LintId::of(methods::OBFUSCATED_IF_ELSE),
+ LintId::of(methods::OK_EXPECT),
+ LintId::of(methods::OPTION_MAP_OR_NONE),
+ LintId::of(methods::RESULT_MAP_OR_INTO_OPTION),
+ LintId::of(methods::SHOULD_IMPLEMENT_TRAIT),
+ LintId::of(methods::SINGLE_CHAR_ADD_STR),
+ LintId::of(methods::STRING_EXTEND_CHARS),
+ LintId::of(methods::UNNECESSARY_FOLD),
+ LintId::of(methods::UNNECESSARY_LAZY_EVALUATIONS),
+ LintId::of(methods::UNWRAP_OR_ELSE_DEFAULT),
+ LintId::of(methods::WRONG_SELF_CONVENTION),
+ LintId::of(misc::TOPLEVEL_REF_ARG),
+ LintId::of(misc::ZERO_PTR),
+ LintId::of(misc_early::BUILTIN_TYPE_SHADOW),
+ LintId::of(misc_early::DOUBLE_NEG),
+ LintId::of(misc_early::DUPLICATE_UNDERSCORE_ARGUMENT),
+ LintId::of(misc_early::MIXED_CASE_HEX_LITERALS),
+ LintId::of(misc_early::REDUNDANT_PATTERN),
+ LintId::of(mut_mutex_lock::MUT_MUTEX_LOCK),
+ LintId::of(mut_reference::UNNECESSARY_MUT_PASSED),
+ LintId::of(needless_late_init::NEEDLESS_LATE_INIT),
+ LintId::of(needless_parens_on_range_literals::NEEDLESS_PARENS_ON_RANGE_LITERALS),
+ LintId::of(neg_multiply::NEG_MULTIPLY),
+ LintId::of(new_without_default::NEW_WITHOUT_DEFAULT),
+ LintId::of(non_copy_const::BORROW_INTERIOR_MUTABLE_CONST),
+ LintId::of(non_copy_const::DECLARE_INTERIOR_MUTABLE_CONST),
+ LintId::of(non_expressive_names::JUST_UNDERSCORES_AND_DIGITS),
+ LintId::of(operators::ASSIGN_OP_PATTERN),
+ LintId::of(operators::OP_REF),
+ LintId::of(operators::PTR_EQ),
+ LintId::of(ptr::CMP_NULL),
+ LintId::of(ptr::PTR_ARG),
+ LintId::of(question_mark::QUESTION_MARK),
+ LintId::of(ranges::MANUAL_RANGE_CONTAINS),
+ LintId::of(redundant_field_names::REDUNDANT_FIELD_NAMES),
+ LintId::of(redundant_static_lifetimes::REDUNDANT_STATIC_LIFETIMES),
+ LintId::of(returns::LET_AND_RETURN),
+ LintId::of(returns::NEEDLESS_RETURN),
+ LintId::of(self_named_constructors::SELF_NAMED_CONSTRUCTORS),
+ LintId::of(single_component_path_imports::SINGLE_COMPONENT_PATH_IMPORTS),
+ LintId::of(strings::TRIM_SPLIT_WHITESPACE),
+ LintId::of(tabs_in_doc_comments::TABS_IN_DOC_COMMENTS),
+ LintId::of(to_digit_is_some::TO_DIGIT_IS_SOME),
+ LintId::of(unit_types::LET_UNIT_VALUE),
+ LintId::of(unnecessary_owned_empty_strings::UNNECESSARY_OWNED_EMPTY_STRINGS),
+ LintId::of(unsafe_removed_from_name::UNSAFE_REMOVED_FROM_NAME),
+ LintId::of(unused_unit::UNUSED_UNIT),
+ LintId::of(upper_case_acronyms::UPPER_CASE_ACRONYMS),
+ LintId::of(write::PRINTLN_EMPTY_STRING),
+ LintId::of(write::PRINT_LITERAL),
+ LintId::of(write::PRINT_WITH_NEWLINE),
+ LintId::of(write::WRITELN_EMPTY_STRING),
+ LintId::of(write::WRITE_LITERAL),
+ LintId::of(write::WRITE_WITH_NEWLINE),
+])
diff --git a/src/tools/clippy/clippy_lints/src/lib.register_suspicious.rs b/src/tools/clippy/clippy_lints/src/lib.register_suspicious.rs
new file mode 100644
index 000000000..964992bd9
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/lib.register_suspicious.rs
@@ -0,0 +1,35 @@
+// This file was generated by `cargo dev update_lints`.
+// Use that command to update this file and do not edit by hand.
+// Manual edits will be overwritten.
+
+store.register_group(true, "clippy::suspicious", Some("clippy_suspicious"), vec![
+ LintId::of(almost_complete_letter_range::ALMOST_COMPLETE_LETTER_RANGE),
+ LintId::of(attrs::BLANKET_CLIPPY_RESTRICTION_LINTS),
+ LintId::of(await_holding_invalid::AWAIT_HOLDING_INVALID_TYPE),
+ LintId::of(await_holding_invalid::AWAIT_HOLDING_LOCK),
+ LintId::of(await_holding_invalid::AWAIT_HOLDING_REFCELL_REF),
+ LintId::of(casts::CAST_ABS_TO_UNSIGNED),
+ LintId::of(casts::CAST_ENUM_CONSTRUCTOR),
+ LintId::of(casts::CAST_ENUM_TRUNCATION),
+ LintId::of(crate_in_macro_def::CRATE_IN_MACRO_DEF),
+ LintId::of(drop_forget_ref::DROP_NON_DROP),
+ LintId::of(drop_forget_ref::FORGET_NON_DROP),
+ LintId::of(duplicate_mod::DUPLICATE_MOD),
+ LintId::of(format_impl::PRINT_IN_FORMAT_IMPL),
+ LintId::of(formatting::SUSPICIOUS_ASSIGNMENT_FORMATTING),
+ LintId::of(formatting::SUSPICIOUS_ELSE_FORMATTING),
+ LintId::of(formatting::SUSPICIOUS_UNARY_OP_FORMATTING),
+ LintId::of(loops::EMPTY_LOOP),
+ LintId::of(loops::FOR_LOOPS_OVER_FALLIBLES),
+ LintId::of(loops::MUT_RANGE_BOUND),
+ LintId::of(methods::NO_EFFECT_REPLACE),
+ LintId::of(methods::SUSPICIOUS_MAP),
+ LintId::of(mut_key::MUTABLE_KEY_TYPE),
+ LintId::of(octal_escapes::OCTAL_ESCAPES),
+ LintId::of(operators::FLOAT_EQUALITY_WITHOUT_ABS),
+ LintId::of(operators::MISREFACTORED_ASSIGN_OP),
+ LintId::of(rc_clone_in_vec_init::RC_CLONE_IN_VEC_INIT),
+ LintId::of(suspicious_trait_impl::SUSPICIOUS_ARITHMETIC_IMPL),
+ LintId::of(suspicious_trait_impl::SUSPICIOUS_OP_ASSIGN_IMPL),
+ LintId::of(swap_ptr_to_ref::SWAP_PTR_TO_REF),
+])
diff --git a/src/tools/clippy/clippy_lints/src/lib.rs b/src/tools/clippy/clippy_lints/src/lib.rs
new file mode 100644
index 000000000..5a3111632
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/lib.rs
@@ -0,0 +1,985 @@
+#![feature(array_windows)]
+#![feature(binary_heap_into_iter_sorted)]
+#![feature(box_patterns)]
+#![feature(control_flow_enum)]
+#![feature(drain_filter)]
+#![feature(iter_intersperse)]
+#![feature(let_chains)]
+#![feature(let_else)]
+#![feature(lint_reasons)]
+#![feature(never_type)]
+#![feature(once_cell)]
+#![feature(rustc_private)]
+#![feature(stmt_expr_attributes)]
+#![recursion_limit = "512"]
+#![cfg_attr(feature = "deny-warnings", deny(warnings))]
+#![allow(clippy::missing_docs_in_private_items, clippy::must_use_candidate)]
+#![warn(trivial_casts, trivial_numeric_casts)]
+// warn on lints, that are included in `rust-lang/rust`s bootstrap
+#![warn(rust_2018_idioms, unused_lifetimes)]
+// warn on rustc internal lints
+#![warn(rustc::internal)]
+// Disable this rustc lint for now, as it was also done in rustc
+#![allow(rustc::potential_query_instability)]
+
+// FIXME: switch to something more ergonomic here, once available.
+// (Currently there is no way to opt into sysroot crates without `extern crate`.)
+extern crate rustc_arena;
+extern crate rustc_ast;
+extern crate rustc_ast_pretty;
+extern crate rustc_attr;
+extern crate rustc_data_structures;
+extern crate rustc_driver;
+extern crate rustc_errors;
+extern crate rustc_hir;
+extern crate rustc_hir_pretty;
+extern crate rustc_index;
+extern crate rustc_infer;
+extern crate rustc_lexer;
+extern crate rustc_lint;
+extern crate rustc_middle;
+extern crate rustc_mir_dataflow;
+extern crate rustc_parse;
+extern crate rustc_parse_format;
+extern crate rustc_session;
+extern crate rustc_span;
+extern crate rustc_target;
+extern crate rustc_trait_selection;
+extern crate rustc_typeck;
+
+#[macro_use]
+extern crate clippy_utils;
+
+use clippy_utils::parse_msrv;
+use rustc_data_structures::fx::FxHashSet;
+use rustc_lint::LintId;
+use rustc_semver::RustcVersion;
+use rustc_session::Session;
+
+/// Macro used to declare a Clippy lint.
+///
+/// Every lint declaration consists of 4 parts:
+///
+/// 1. The documentation, which is used for the website
+/// 2. The `LINT_NAME`. See [lint naming][lint_naming] on lint naming conventions.
+/// 3. The `lint_level`, which is a mapping from *one* of our lint groups to `Allow`, `Warn` or
+/// `Deny`. The lint level here has nothing to do with what lint groups the lint is a part of.
+/// 4. The `description` that contains a short explanation on what's wrong with code where the
+/// lint is triggered.
+///
+/// Currently the categories `style`, `correctness`, `suspicious`, `complexity` and `perf` are
+/// enabled by default. As said in the README.md of this repository, if the lint level mapping
+/// changes, please update README.md.
+///
+/// # Example
+///
+/// ```
+/// #![feature(rustc_private)]
+/// extern crate rustc_session;
+/// use rustc_session::declare_tool_lint;
+/// use clippy_lints::declare_clippy_lint;
+///
+/// declare_clippy_lint! {
+/// /// ### What it does
+/// /// Checks for ... (describe what the lint matches).
+/// ///
+/// /// ### Why is this bad?
+/// /// Supply the reason for linting the code.
+/// ///
+/// /// ### Example
+/// /// ```rust
+/// /// Insert a short example of code that triggers the lint
+/// /// ```
+/// ///
+/// /// Use instead:
+/// /// ```rust
+/// /// Insert a short example of improved code that doesn't trigger the lint
+/// /// ```
+/// pub LINT_NAME,
+/// pedantic,
+/// "description"
+/// }
+/// ```
+/// [lint_naming]: https://rust-lang.github.io/rfcs/0344-conventions-galore.html#lints
+#[macro_export]
+macro_rules! declare_clippy_lint {
+ { $(#[$attr:meta])* pub $name:tt, style, $description:tt } => {
+ declare_tool_lint! {
+ $(#[$attr])* pub clippy::$name, Warn, $description, report_in_external_macro: true
+ }
+ };
+ { $(#[$attr:meta])* pub $name:tt, correctness, $description:tt } => {
+ declare_tool_lint! {
+ $(#[$attr])* pub clippy::$name, Deny, $description, report_in_external_macro: true
+ }
+ };
+ { $(#[$attr:meta])* pub $name:tt, suspicious, $description:tt } => {
+ declare_tool_lint! {
+ $(#[$attr])* pub clippy::$name, Warn, $description, report_in_external_macro: true
+ }
+ };
+ { $(#[$attr:meta])* pub $name:tt, complexity, $description:tt } => {
+ declare_tool_lint! {
+ $(#[$attr])* pub clippy::$name, Warn, $description, report_in_external_macro: true
+ }
+ };
+ { $(#[$attr:meta])* pub $name:tt, perf, $description:tt } => {
+ declare_tool_lint! {
+ $(#[$attr])* pub clippy::$name, Warn, $description, report_in_external_macro: true
+ }
+ };
+ { $(#[$attr:meta])* pub $name:tt, pedantic, $description:tt } => {
+ declare_tool_lint! {
+ $(#[$attr])* pub clippy::$name, Allow, $description, report_in_external_macro: true
+ }
+ };
+ { $(#[$attr:meta])* pub $name:tt, restriction, $description:tt } => {
+ declare_tool_lint! {
+ $(#[$attr])* pub clippy::$name, Allow, $description, report_in_external_macro: true
+ }
+ };
+ { $(#[$attr:meta])* pub $name:tt, cargo, $description:tt } => {
+ declare_tool_lint! {
+ $(#[$attr])* pub clippy::$name, Allow, $description, report_in_external_macro: true
+ }
+ };
+ { $(#[$attr:meta])* pub $name:tt, nursery, $description:tt } => {
+ declare_tool_lint! {
+ $(#[$attr])* pub clippy::$name, Allow, $description, report_in_external_macro: true
+ }
+ };
+ { $(#[$attr:meta])* pub $name:tt, internal, $description:tt } => {
+ declare_tool_lint! {
+ $(#[$attr])* pub clippy::$name, Allow, $description, report_in_external_macro: true
+ }
+ };
+ { $(#[$attr:meta])* pub $name:tt, internal_warn, $description:tt } => {
+ declare_tool_lint! {
+ $(#[$attr])* pub clippy::$name, Warn, $description, report_in_external_macro: true
+ }
+ };
+}
+
+#[cfg(feature = "internal")]
+pub mod deprecated_lints;
+#[cfg_attr(feature = "internal", allow(clippy::missing_clippy_version_attribute))]
+mod utils;
+
+mod renamed_lints;
+
+// begin lints modules, do not remove this comment, it’s used in `update_lints`
+mod almost_complete_letter_range;
+mod approx_const;
+mod as_conversions;
+mod as_underscore;
+mod asm_syntax;
+mod assertions_on_constants;
+mod assertions_on_result_states;
+mod async_yields_async;
+mod attrs;
+mod await_holding_invalid;
+mod blacklisted_name;
+mod blocks_in_if_conditions;
+mod bool_assert_comparison;
+mod booleans;
+mod borrow_as_ptr;
+mod borrow_deref_ref;
+mod bytecount;
+mod bytes_count_to_len;
+mod cargo;
+mod case_sensitive_file_extension_comparisons;
+mod casts;
+mod checked_conversions;
+mod cognitive_complexity;
+mod collapsible_if;
+mod comparison_chain;
+mod copies;
+mod copy_iterator;
+mod crate_in_macro_def;
+mod create_dir;
+mod dbg_macro;
+mod default;
+mod default_instead_of_iter_empty;
+mod default_numeric_fallback;
+mod default_union_representation;
+mod dereference;
+mod derivable_impls;
+mod derive;
+mod disallowed_methods;
+mod disallowed_script_idents;
+mod disallowed_types;
+mod doc;
+mod doc_link_with_quotes;
+mod double_parens;
+mod drop_forget_ref;
+mod duplicate_mod;
+mod else_if_without_else;
+mod empty_drop;
+mod empty_enum;
+mod empty_structs_with_brackets;
+mod entry;
+mod enum_clike;
+mod enum_variants;
+mod equatable_if_let;
+mod escape;
+mod eta_reduction;
+mod excessive_bools;
+mod exhaustive_items;
+mod exit;
+mod explicit_write;
+mod fallible_impl_from;
+mod float_literal;
+mod floating_point_arithmetic;
+mod format;
+mod format_args;
+mod format_impl;
+mod format_push_string;
+mod formatting;
+mod from_over_into;
+mod from_str_radix_10;
+mod functions;
+mod future_not_send;
+mod get_first;
+mod if_let_mutex;
+mod if_not_else;
+mod if_then_some_else_none;
+mod implicit_hasher;
+mod implicit_return;
+mod implicit_saturating_sub;
+mod inconsistent_struct_constructor;
+mod index_refutable_slice;
+mod indexing_slicing;
+mod infinite_iter;
+mod inherent_impl;
+mod inherent_to_string;
+mod init_numbered_fields;
+mod inline_fn_without_body;
+mod int_plus_one;
+mod invalid_upcast_comparisons;
+mod invalid_utf8_in_unchecked;
+mod items_after_statements;
+mod iter_not_returning_iterator;
+mod large_const_arrays;
+mod large_enum_variant;
+mod large_include_file;
+mod large_stack_arrays;
+mod len_zero;
+mod let_if_seq;
+mod let_underscore;
+mod lifetimes;
+mod literal_representation;
+mod loops;
+mod macro_use;
+mod main_recursion;
+mod manual_assert;
+mod manual_async_fn;
+mod manual_bits;
+mod manual_non_exhaustive;
+mod manual_ok_or;
+mod manual_rem_euclid;
+mod manual_retain;
+mod manual_strip;
+mod map_clone;
+mod map_err_ignore;
+mod map_unit_fn;
+mod match_result_ok;
+mod matches;
+mod mem_forget;
+mod mem_replace;
+mod methods;
+mod minmax;
+mod misc;
+mod misc_early;
+mod mismatching_type_param_order;
+mod missing_const_for_fn;
+mod missing_doc;
+mod missing_enforced_import_rename;
+mod missing_inline;
+mod mixed_read_write_in_expression;
+mod module_style;
+mod mut_key;
+mod mut_mut;
+mod mut_mutex_lock;
+mod mut_reference;
+mod mutable_debug_assertion;
+mod mutex_atomic;
+mod needless_arbitrary_self_type;
+mod needless_bool;
+mod needless_borrowed_ref;
+mod needless_continue;
+mod needless_for_each;
+mod needless_late_init;
+mod needless_parens_on_range_literals;
+mod needless_pass_by_value;
+mod needless_question_mark;
+mod needless_update;
+mod neg_cmp_op_on_partial_ord;
+mod neg_multiply;
+mod new_without_default;
+mod no_effect;
+mod non_copy_const;
+mod non_expressive_names;
+mod non_octal_unix_permissions;
+mod non_send_fields_in_send_ty;
+mod nonstandard_macro_braces;
+mod octal_escapes;
+mod only_used_in_recursion;
+mod open_options;
+mod operators;
+mod option_env_unwrap;
+mod option_if_let_else;
+mod overflow_check_conditional;
+mod panic_in_result_fn;
+mod panic_unimplemented;
+mod partialeq_ne_impl;
+mod pass_by_ref_or_value;
+mod path_buf_push_overwrite;
+mod pattern_type_mismatch;
+mod precedence;
+mod ptr;
+mod ptr_offset_with_cast;
+mod pub_use;
+mod question_mark;
+mod ranges;
+mod rc_clone_in_vec_init;
+mod read_zero_byte_vec;
+mod redundant_clone;
+mod redundant_closure_call;
+mod redundant_else;
+mod redundant_field_names;
+mod redundant_pub_crate;
+mod redundant_slicing;
+mod redundant_static_lifetimes;
+mod ref_option_ref;
+mod reference;
+mod regex;
+mod repeat_once;
+mod return_self_not_must_use;
+mod returns;
+mod same_name_method;
+mod self_named_constructors;
+mod semicolon_if_nothing_returned;
+mod serde_api;
+mod shadow;
+mod single_char_lifetime_names;
+mod single_component_path_imports;
+mod size_of_in_element_count;
+mod slow_vector_initialization;
+mod stable_sort_primitive;
+mod std_instead_of_core;
+mod strings;
+mod strlen_on_c_strings;
+mod suspicious_operation_groupings;
+mod suspicious_trait_impl;
+mod swap;
+mod swap_ptr_to_ref;
+mod tabs_in_doc_comments;
+mod temporary_assignment;
+mod to_digit_is_some;
+mod trailing_empty_array;
+mod trait_bounds;
+mod transmute;
+mod transmuting_null;
+mod types;
+mod undocumented_unsafe_blocks;
+mod unicode;
+mod uninit_vec;
+mod unit_hash;
+mod unit_return_expecting_ord;
+mod unit_types;
+mod unnamed_address;
+mod unnecessary_owned_empty_strings;
+mod unnecessary_self_imports;
+mod unnecessary_sort_by;
+mod unnecessary_wraps;
+mod unnested_or_patterns;
+mod unsafe_removed_from_name;
+mod unused_async;
+mod unused_io_amount;
+mod unused_rounding;
+mod unused_self;
+mod unused_unit;
+mod unwrap;
+mod unwrap_in_result;
+mod upper_case_acronyms;
+mod use_self;
+mod useless_conversion;
+mod vec;
+mod vec_init_then_push;
+mod vec_resize_to_zero;
+mod verbose_file_reads;
+mod wildcard_imports;
+mod write;
+mod zero_div_zero;
+mod zero_sized_map_values;
+// end lints modules, do not remove this comment, it’s used in `update_lints`
+
+pub use crate::utils::conf::Conf;
+use crate::utils::conf::{format_error, TryConf};
+
+/// Register all pre expansion lints
+///
+/// Pre-expansion lints run before any macro expansion has happened.
+///
+/// Note that due to the architecture of the compiler, currently `cfg_attr` attributes on crate
+/// level (i.e `#![cfg_attr(...)]`) will still be expanded even when using a pre-expansion pass.
+///
+/// Used in `./src/driver.rs`.
+pub fn register_pre_expansion_lints(store: &mut rustc_lint::LintStore, sess: &Session, conf: &Conf) {
+ // NOTE: Do not add any more pre-expansion passes. These should be removed eventually.
+
+ let msrv = conf.msrv.as_ref().and_then(|s| {
+ parse_msrv(s, None, None).or_else(|| {
+ sess.err(&format!(
+ "error reading Clippy's configuration file. `{}` is not a valid Rust version",
+ s
+ ));
+ None
+ })
+ });
+
+ store.register_pre_expansion_pass(|| Box::new(write::Write::default()));
+ store.register_pre_expansion_pass(move || Box::new(attrs::EarlyAttributes { msrv }));
+}
+
+fn read_msrv(conf: &Conf, sess: &Session) -> Option<RustcVersion> {
+ let cargo_msrv = std::env::var("CARGO_PKG_RUST_VERSION")
+ .ok()
+ .and_then(|v| parse_msrv(&v, None, None));
+ let clippy_msrv = conf.msrv.as_ref().and_then(|s| {
+ parse_msrv(s, None, None).or_else(|| {
+ sess.err(&format!(
+ "error reading Clippy's configuration file. `{}` is not a valid Rust version",
+ s
+ ));
+ None
+ })
+ });
+
+ if let Some(cargo_msrv) = cargo_msrv {
+ if let Some(clippy_msrv) = clippy_msrv {
+ // if both files have an msrv, let's compare them and emit a warning if they differ
+ if clippy_msrv != cargo_msrv {
+ sess.warn(&format!(
+ "the MSRV in `clippy.toml` and `Cargo.toml` differ; using `{}` from `clippy.toml`",
+ clippy_msrv
+ ));
+ }
+
+ Some(clippy_msrv)
+ } else {
+ Some(cargo_msrv)
+ }
+ } else {
+ clippy_msrv
+ }
+}
+
+#[doc(hidden)]
+pub fn read_conf(sess: &Session) -> Conf {
+ let file_name = match utils::conf::lookup_conf_file() {
+ Ok(Some(path)) => path,
+ Ok(None) => return Conf::default(),
+ Err(error) => {
+ sess.struct_err(&format!("error finding Clippy's configuration file: {}", error))
+ .emit();
+ return Conf::default();
+ },
+ };
+
+ let TryConf { conf, errors } = utils::conf::read(&file_name);
+ // all conf errors are non-fatal, we just use the default conf in case of error
+ for error in errors {
+ sess.err(&format!(
+ "error reading Clippy's configuration file `{}`: {}",
+ file_name.display(),
+ format_error(error)
+ ));
+ }
+
+ conf
+}
+
+/// Register all lints and lint groups with the rustc plugin registry
+///
+/// Used in `./src/driver.rs`.
+#[expect(clippy::too_many_lines)]
+pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf: &Conf) {
+ register_removed_non_tool_lints(store);
+
+ include!("lib.deprecated.rs");
+
+ include!("lib.register_lints.rs");
+ include!("lib.register_restriction.rs");
+ include!("lib.register_pedantic.rs");
+
+ #[cfg(feature = "internal")]
+ include!("lib.register_internal.rs");
+
+ include!("lib.register_all.rs");
+ include!("lib.register_style.rs");
+ include!("lib.register_complexity.rs");
+ include!("lib.register_correctness.rs");
+ include!("lib.register_suspicious.rs");
+ include!("lib.register_perf.rs");
+ include!("lib.register_cargo.rs");
+ include!("lib.register_nursery.rs");
+
+ #[cfg(feature = "internal")]
+ {
+ if std::env::var("ENABLE_METADATA_COLLECTION").eq(&Ok("1".to_string())) {
+ store.register_late_pass(|| Box::new(utils::internal_lints::metadata_collector::MetadataCollector::new()));
+ return;
+ }
+ }
+
+ // all the internal lints
+ #[cfg(feature = "internal")]
+ {
+ store.register_early_pass(|| Box::new(utils::internal_lints::ClippyLintsInternal));
+ store.register_early_pass(|| Box::new(utils::internal_lints::ProduceIce));
+ store.register_late_pass(|| Box::new(utils::internal_lints::CollapsibleCalls));
+ store.register_late_pass(|| Box::new(utils::internal_lints::CompilerLintFunctions::new()));
+ store.register_late_pass(|| Box::new(utils::internal_lints::IfChainStyle));
+ store.register_late_pass(|| Box::new(utils::internal_lints::InvalidPaths));
+ store.register_late_pass(|| Box::new(utils::internal_lints::InterningDefinedSymbol::default()));
+ store.register_late_pass(|| Box::new(utils::internal_lints::LintWithoutLintPass::default()));
+ store.register_late_pass(|| Box::new(utils::internal_lints::MatchTypeOnDiagItem));
+ store.register_late_pass(|| Box::new(utils::internal_lints::OuterExpnDataPass));
+ store.register_late_pass(|| Box::new(utils::internal_lints::MsrvAttrImpl));
+ }
+
+ let arithmetic_allowed = conf.arithmetic_allowed.clone();
+ store.register_late_pass(move || Box::new(operators::arithmetic::Arithmetic::new(arithmetic_allowed.clone())));
+ store.register_late_pass(|| Box::new(utils::dump_hir::DumpHir));
+ store.register_late_pass(|| Box::new(utils::author::Author));
+ let await_holding_invalid_types = conf.await_holding_invalid_types.clone();
+ store.register_late_pass(move || {
+ Box::new(await_holding_invalid::AwaitHolding::new(
+ await_holding_invalid_types.clone(),
+ ))
+ });
+ store.register_late_pass(|| Box::new(serde_api::SerdeApi));
+ let vec_box_size_threshold = conf.vec_box_size_threshold;
+ let type_complexity_threshold = conf.type_complexity_threshold;
+ let avoid_breaking_exported_api = conf.avoid_breaking_exported_api;
+ store.register_late_pass(move || {
+ Box::new(types::Types::new(
+ vec_box_size_threshold,
+ type_complexity_threshold,
+ avoid_breaking_exported_api,
+ ))
+ });
+ store.register_late_pass(|| Box::new(booleans::NonminimalBool));
+ store.register_late_pass(|| Box::new(enum_clike::UnportableVariant));
+ store.register_late_pass(|| Box::new(float_literal::FloatLiteral));
+ store.register_late_pass(|| Box::new(ptr::Ptr));
+ store.register_late_pass(|| Box::new(needless_bool::NeedlessBool));
+ store.register_late_pass(|| Box::new(needless_bool::BoolComparison));
+ store.register_late_pass(|| Box::new(needless_for_each::NeedlessForEach));
+ store.register_late_pass(|| Box::new(misc::MiscLints));
+ store.register_late_pass(|| Box::new(eta_reduction::EtaReduction));
+ store.register_late_pass(|| Box::new(mut_mut::MutMut));
+ store.register_late_pass(|| Box::new(mut_reference::UnnecessaryMutPassed));
+ store.register_late_pass(|| Box::new(len_zero::LenZero));
+ store.register_late_pass(|| Box::new(attrs::Attributes));
+ store.register_late_pass(|| Box::new(blocks_in_if_conditions::BlocksInIfConditions));
+ store.register_late_pass(|| Box::new(unicode::Unicode));
+ store.register_late_pass(|| Box::new(uninit_vec::UninitVec));
+ store.register_late_pass(|| Box::new(unit_hash::UnitHash));
+ store.register_late_pass(|| Box::new(unit_return_expecting_ord::UnitReturnExpectingOrd));
+ store.register_late_pass(|| Box::new(strings::StringAdd));
+ store.register_late_pass(|| Box::new(implicit_return::ImplicitReturn));
+ store.register_late_pass(|| Box::new(implicit_saturating_sub::ImplicitSaturatingSub));
+ store.register_late_pass(|| Box::new(default_numeric_fallback::DefaultNumericFallback));
+ store.register_late_pass(|| Box::new(inconsistent_struct_constructor::InconsistentStructConstructor));
+ store.register_late_pass(|| Box::new(non_octal_unix_permissions::NonOctalUnixPermissions));
+ store.register_early_pass(|| Box::new(unnecessary_self_imports::UnnecessarySelfImports));
+
+ let msrv = read_msrv(conf, sess);
+ let avoid_breaking_exported_api = conf.avoid_breaking_exported_api;
+ let allow_expect_in_tests = conf.allow_expect_in_tests;
+ let allow_unwrap_in_tests = conf.allow_unwrap_in_tests;
+ store.register_late_pass(move || Box::new(approx_const::ApproxConstant::new(msrv)));
+ store.register_late_pass(move || {
+ Box::new(methods::Methods::new(
+ avoid_breaking_exported_api,
+ msrv,
+ allow_expect_in_tests,
+ allow_unwrap_in_tests,
+ ))
+ });
+ store.register_late_pass(move || Box::new(matches::Matches::new(msrv)));
+ store.register_early_pass(move || Box::new(manual_non_exhaustive::ManualNonExhaustiveStruct::new(msrv)));
+ store.register_late_pass(move || Box::new(manual_non_exhaustive::ManualNonExhaustiveEnum::new(msrv)));
+ store.register_late_pass(move || Box::new(manual_strip::ManualStrip::new(msrv)));
+ store.register_early_pass(move || Box::new(redundant_static_lifetimes::RedundantStaticLifetimes::new(msrv)));
+ store.register_early_pass(move || Box::new(redundant_field_names::RedundantFieldNames::new(msrv)));
+ store.register_late_pass(move || Box::new(checked_conversions::CheckedConversions::new(msrv)));
+ store.register_late_pass(move || Box::new(mem_replace::MemReplace::new(msrv)));
+ store.register_late_pass(move || Box::new(ranges::Ranges::new(msrv)));
+ store.register_late_pass(move || Box::new(from_over_into::FromOverInto::new(msrv)));
+ store.register_late_pass(move || Box::new(use_self::UseSelf::new(msrv)));
+ store.register_late_pass(move || Box::new(missing_const_for_fn::MissingConstForFn::new(msrv)));
+ store.register_late_pass(move || Box::new(needless_question_mark::NeedlessQuestionMark));
+ store.register_late_pass(move || Box::new(casts::Casts::new(msrv)));
+ store.register_early_pass(move || Box::new(unnested_or_patterns::UnnestedOrPatterns::new(msrv)));
+ store.register_late_pass(move || Box::new(map_clone::MapClone::new(msrv)));
+
+ store.register_late_pass(|| Box::new(size_of_in_element_count::SizeOfInElementCount));
+ store.register_late_pass(|| Box::new(same_name_method::SameNameMethod));
+ let max_suggested_slice_pattern_length = conf.max_suggested_slice_pattern_length;
+ store.register_late_pass(move || {
+ Box::new(index_refutable_slice::IndexRefutableSlice::new(
+ max_suggested_slice_pattern_length,
+ msrv,
+ ))
+ });
+ store.register_late_pass(|| Box::new(map_err_ignore::MapErrIgnore));
+ store.register_late_pass(|| Box::new(shadow::Shadow::default()));
+ store.register_late_pass(|| Box::new(unit_types::UnitTypes));
+ store.register_late_pass(|| Box::new(loops::Loops));
+ store.register_late_pass(|| Box::new(main_recursion::MainRecursion::default()));
+ store.register_late_pass(|| Box::new(lifetimes::Lifetimes));
+ store.register_late_pass(|| Box::new(entry::HashMapPass));
+ store.register_late_pass(|| Box::new(minmax::MinMaxPass));
+ store.register_late_pass(|| Box::new(open_options::OpenOptions));
+ store.register_late_pass(|| Box::new(zero_div_zero::ZeroDiv));
+ store.register_late_pass(|| Box::new(mutex_atomic::Mutex));
+ store.register_late_pass(|| Box::new(needless_update::NeedlessUpdate));
+ store.register_late_pass(|| Box::new(needless_borrowed_ref::NeedlessBorrowedRef));
+ store.register_late_pass(|| Box::new(borrow_deref_ref::BorrowDerefRef));
+ store.register_late_pass(|| Box::new(no_effect::NoEffect));
+ store.register_late_pass(|| Box::new(temporary_assignment::TemporaryAssignment));
+ store.register_late_pass(move || Box::new(transmute::Transmute::new(msrv)));
+ let cognitive_complexity_threshold = conf.cognitive_complexity_threshold;
+ store.register_late_pass(move || {
+ Box::new(cognitive_complexity::CognitiveComplexity::new(
+ cognitive_complexity_threshold,
+ ))
+ });
+ let too_large_for_stack = conf.too_large_for_stack;
+ store.register_late_pass(move || Box::new(escape::BoxedLocal { too_large_for_stack }));
+ store.register_late_pass(move || Box::new(vec::UselessVec { too_large_for_stack }));
+ store.register_late_pass(|| Box::new(panic_unimplemented::PanicUnimplemented));
+ store.register_late_pass(|| Box::new(strings::StringLitAsBytes));
+ store.register_late_pass(|| Box::new(derive::Derive));
+ store.register_late_pass(|| Box::new(derivable_impls::DerivableImpls));
+ store.register_late_pass(|| Box::new(drop_forget_ref::DropForgetRef));
+ store.register_late_pass(|| Box::new(empty_enum::EmptyEnum));
+ store.register_late_pass(|| Box::new(invalid_upcast_comparisons::InvalidUpcastComparisons));
+ store.register_late_pass(|| Box::new(regex::Regex));
+ store.register_late_pass(|| Box::new(copies::CopyAndPaste));
+ store.register_late_pass(|| Box::new(copy_iterator::CopyIterator));
+ store.register_late_pass(|| Box::new(format::UselessFormat));
+ store.register_late_pass(|| Box::new(swap::Swap));
+ store.register_late_pass(|| Box::new(overflow_check_conditional::OverflowCheckConditional));
+ store.register_late_pass(|| Box::new(new_without_default::NewWithoutDefault::default()));
+ let blacklisted_names = conf.blacklisted_names.iter().cloned().collect::<FxHashSet<_>>();
+ store.register_late_pass(move || Box::new(blacklisted_name::BlacklistedName::new(blacklisted_names.clone())));
+ let too_many_arguments_threshold = conf.too_many_arguments_threshold;
+ let too_many_lines_threshold = conf.too_many_lines_threshold;
+ store.register_late_pass(move || {
+ Box::new(functions::Functions::new(
+ too_many_arguments_threshold,
+ too_many_lines_threshold,
+ ))
+ });
+ let doc_valid_idents = conf.doc_valid_idents.iter().cloned().collect::<FxHashSet<_>>();
+ store.register_late_pass(move || Box::new(doc::DocMarkdown::new(doc_valid_idents.clone())));
+ store.register_late_pass(|| Box::new(neg_multiply::NegMultiply));
+ store.register_late_pass(|| Box::new(mem_forget::MemForget));
+ store.register_late_pass(|| Box::new(let_if_seq::LetIfSeq));
+ store.register_late_pass(|| Box::new(mixed_read_write_in_expression::EvalOrderDependence));
+ store.register_late_pass(|| Box::new(missing_doc::MissingDoc::new()));
+ store.register_late_pass(|| Box::new(missing_inline::MissingInline));
+ store.register_late_pass(move || Box::new(exhaustive_items::ExhaustiveItems));
+ store.register_late_pass(|| Box::new(match_result_ok::MatchResultOk));
+ store.register_late_pass(|| Box::new(partialeq_ne_impl::PartialEqNeImpl));
+ store.register_late_pass(|| Box::new(unused_io_amount::UnusedIoAmount));
+ let enum_variant_size_threshold = conf.enum_variant_size_threshold;
+ store.register_late_pass(move || Box::new(large_enum_variant::LargeEnumVariant::new(enum_variant_size_threshold)));
+ store.register_late_pass(|| Box::new(explicit_write::ExplicitWrite));
+ store.register_late_pass(|| Box::new(needless_pass_by_value::NeedlessPassByValue));
+ let pass_by_ref_or_value = pass_by_ref_or_value::PassByRefOrValue::new(
+ conf.trivial_copy_size_limit,
+ conf.pass_by_value_size_limit,
+ conf.avoid_breaking_exported_api,
+ &sess.target,
+ );
+ store.register_late_pass(move || Box::new(pass_by_ref_or_value));
+ store.register_late_pass(|| Box::new(ref_option_ref::RefOptionRef));
+ store.register_late_pass(|| Box::new(bytecount::ByteCount));
+ store.register_late_pass(|| Box::new(infinite_iter::InfiniteIter));
+ store.register_late_pass(|| Box::new(inline_fn_without_body::InlineFnWithoutBody));
+ store.register_late_pass(|| Box::new(useless_conversion::UselessConversion::default()));
+ store.register_late_pass(|| Box::new(implicit_hasher::ImplicitHasher));
+ store.register_late_pass(|| Box::new(fallible_impl_from::FallibleImplFrom));
+ store.register_late_pass(|| Box::new(question_mark::QuestionMark));
+ store.register_early_pass(|| Box::new(suspicious_operation_groupings::SuspiciousOperationGroupings));
+ store.register_late_pass(|| Box::new(suspicious_trait_impl::SuspiciousImpl));
+ store.register_late_pass(|| Box::new(map_unit_fn::MapUnit));
+ store.register_late_pass(|| Box::new(inherent_impl::MultipleInherentImpl));
+ store.register_late_pass(|| Box::new(neg_cmp_op_on_partial_ord::NoNegCompOpForPartialOrd));
+ store.register_late_pass(|| Box::new(unwrap::Unwrap));
+ store.register_late_pass(|| Box::new(indexing_slicing::IndexingSlicing));
+ store.register_late_pass(|| Box::new(non_copy_const::NonCopyConst));
+ store.register_late_pass(|| Box::new(ptr_offset_with_cast::PtrOffsetWithCast));
+ store.register_late_pass(|| Box::new(redundant_clone::RedundantClone));
+ store.register_late_pass(|| Box::new(slow_vector_initialization::SlowVectorInit));
+ store.register_late_pass(|| Box::new(unnecessary_sort_by::UnnecessarySortBy));
+ store.register_late_pass(move || Box::new(unnecessary_wraps::UnnecessaryWraps::new(avoid_breaking_exported_api)));
+ store.register_late_pass(|| Box::new(assertions_on_constants::AssertionsOnConstants));
+ store.register_late_pass(|| Box::new(assertions_on_result_states::AssertionsOnResultStates));
+ store.register_late_pass(|| Box::new(transmuting_null::TransmutingNull));
+ store.register_late_pass(|| Box::new(path_buf_push_overwrite::PathBufPushOverwrite));
+ store.register_late_pass(|| Box::new(inherent_to_string::InherentToString));
+ let max_trait_bounds = conf.max_trait_bounds;
+ store.register_late_pass(move || Box::new(trait_bounds::TraitBounds::new(max_trait_bounds)));
+ store.register_late_pass(|| Box::new(comparison_chain::ComparisonChain));
+ store.register_late_pass(|| Box::new(mut_key::MutableKeyType));
+ store.register_early_pass(|| Box::new(reference::DerefAddrOf));
+ store.register_early_pass(|| Box::new(double_parens::DoubleParens));
+ store.register_late_pass(|| Box::new(format_impl::FormatImpl::new()));
+ store.register_early_pass(|| Box::new(unsafe_removed_from_name::UnsafeNameRemoval));
+ store.register_early_pass(|| Box::new(else_if_without_else::ElseIfWithoutElse));
+ store.register_early_pass(|| Box::new(int_plus_one::IntPlusOne));
+ store.register_early_pass(|| Box::new(formatting::Formatting));
+ store.register_early_pass(|| Box::new(misc_early::MiscEarlyLints));
+ store.register_early_pass(|| Box::new(redundant_closure_call::RedundantClosureCall));
+ store.register_late_pass(|| Box::new(redundant_closure_call::RedundantClosureCall));
+ store.register_early_pass(|| Box::new(unused_unit::UnusedUnit));
+ store.register_late_pass(|| Box::new(returns::Return));
+ store.register_early_pass(|| Box::new(collapsible_if::CollapsibleIf));
+ store.register_early_pass(|| Box::new(items_after_statements::ItemsAfterStatements));
+ store.register_early_pass(|| Box::new(precedence::Precedence));
+ store.register_late_pass(|| Box::new(needless_parens_on_range_literals::NeedlessParensOnRangeLiterals));
+ store.register_early_pass(|| Box::new(needless_continue::NeedlessContinue));
+ store.register_early_pass(|| Box::new(redundant_else::RedundantElse));
+ store.register_late_pass(|| Box::new(create_dir::CreateDir));
+ store.register_early_pass(|| Box::new(needless_arbitrary_self_type::NeedlessArbitrarySelfType));
+ let literal_representation_lint_fraction_readability = conf.unreadable_literal_lint_fractions;
+ store.register_early_pass(move || {
+ Box::new(literal_representation::LiteralDigitGrouping::new(
+ literal_representation_lint_fraction_readability,
+ ))
+ });
+ let literal_representation_threshold = conf.literal_representation_threshold;
+ store.register_early_pass(move || {
+ Box::new(literal_representation::DecimalLiteralRepresentation::new(
+ literal_representation_threshold,
+ ))
+ });
+ let enum_variant_name_threshold = conf.enum_variant_name_threshold;
+ store.register_late_pass(move || {
+ Box::new(enum_variants::EnumVariantNames::new(
+ enum_variant_name_threshold,
+ avoid_breaking_exported_api,
+ ))
+ });
+ store.register_early_pass(|| Box::new(tabs_in_doc_comments::TabsInDocComments));
+ let upper_case_acronyms_aggressive = conf.upper_case_acronyms_aggressive;
+ store.register_late_pass(move || {
+ Box::new(upper_case_acronyms::UpperCaseAcronyms::new(
+ avoid_breaking_exported_api,
+ upper_case_acronyms_aggressive,
+ ))
+ });
+ store.register_late_pass(|| Box::new(default::Default::default()));
+ store.register_late_pass(move || Box::new(unused_self::UnusedSelf::new(avoid_breaking_exported_api)));
+ store.register_late_pass(|| Box::new(mutable_debug_assertion::DebugAssertWithMutCall));
+ store.register_late_pass(|| Box::new(exit::Exit));
+ store.register_late_pass(|| Box::new(to_digit_is_some::ToDigitIsSome));
+ let array_size_threshold = conf.array_size_threshold;
+ store.register_late_pass(move || Box::new(large_stack_arrays::LargeStackArrays::new(array_size_threshold)));
+ store.register_late_pass(move || Box::new(large_const_arrays::LargeConstArrays::new(array_size_threshold)));
+ store.register_late_pass(|| Box::new(floating_point_arithmetic::FloatingPointArithmetic));
+ store.register_early_pass(|| Box::new(as_conversions::AsConversions));
+ store.register_late_pass(|| Box::new(let_underscore::LetUnderscore));
+ store.register_early_pass(|| Box::new(single_component_path_imports::SingleComponentPathImports));
+ let max_fn_params_bools = conf.max_fn_params_bools;
+ let max_struct_bools = conf.max_struct_bools;
+ store.register_early_pass(move || {
+ Box::new(excessive_bools::ExcessiveBools::new(
+ max_struct_bools,
+ max_fn_params_bools,
+ ))
+ });
+ store.register_early_pass(|| Box::new(option_env_unwrap::OptionEnvUnwrap));
+ let warn_on_all_wildcard_imports = conf.warn_on_all_wildcard_imports;
+ store.register_late_pass(move || Box::new(wildcard_imports::WildcardImports::new(warn_on_all_wildcard_imports)));
+ store.register_late_pass(|| Box::new(verbose_file_reads::VerboseFileReads));
+ store.register_late_pass(|| Box::new(redundant_pub_crate::RedundantPubCrate::default()));
+ store.register_late_pass(|| Box::new(unnamed_address::UnnamedAddress));
+ store.register_late_pass(|| Box::new(dereference::Dereferencing::default()));
+ store.register_late_pass(|| Box::new(option_if_let_else::OptionIfLetElse));
+ store.register_late_pass(|| Box::new(future_not_send::FutureNotSend));
+ store.register_late_pass(|| Box::new(if_let_mutex::IfLetMutex));
+ store.register_late_pass(|| Box::new(if_not_else::IfNotElse));
+ store.register_late_pass(|| Box::new(equatable_if_let::PatternEquality));
+ store.register_late_pass(|| Box::new(mut_mutex_lock::MutMutexLock));
+ store.register_late_pass(|| Box::new(manual_async_fn::ManualAsyncFn));
+ store.register_late_pass(|| Box::new(vec_resize_to_zero::VecResizeToZero));
+ store.register_late_pass(|| Box::new(panic_in_result_fn::PanicInResultFn));
+ let single_char_binding_names_threshold = conf.single_char_binding_names_threshold;
+ store.register_early_pass(move || {
+ Box::new(non_expressive_names::NonExpressiveNames {
+ single_char_binding_names_threshold,
+ })
+ });
+ let macro_matcher = conf.standard_macro_braces.iter().cloned().collect::<FxHashSet<_>>();
+ store.register_early_pass(move || Box::new(nonstandard_macro_braces::MacroBraces::new(&macro_matcher)));
+ store.register_late_pass(|| Box::new(macro_use::MacroUseImports::default()));
+ store.register_late_pass(|| Box::new(pattern_type_mismatch::PatternTypeMismatch));
+ store.register_late_pass(|| Box::new(stable_sort_primitive::StableSortPrimitive));
+ store.register_late_pass(|| Box::new(repeat_once::RepeatOnce));
+ store.register_late_pass(|| Box::new(unwrap_in_result::UnwrapInResult));
+ store.register_late_pass(|| Box::new(manual_ok_or::ManualOkOr));
+ store.register_late_pass(|| Box::new(semicolon_if_nothing_returned::SemicolonIfNothingReturned));
+ store.register_late_pass(|| Box::new(async_yields_async::AsyncYieldsAsync));
+ let disallowed_methods = conf.disallowed_methods.clone();
+ store.register_late_pass(move || Box::new(disallowed_methods::DisallowedMethods::new(disallowed_methods.clone())));
+ store.register_early_pass(|| Box::new(asm_syntax::InlineAsmX86AttSyntax));
+ store.register_early_pass(|| Box::new(asm_syntax::InlineAsmX86IntelSyntax));
+ store.register_late_pass(|| Box::new(empty_drop::EmptyDrop));
+ store.register_late_pass(|| Box::new(strings::StrToString));
+ store.register_late_pass(|| Box::new(strings::StringToString));
+ store.register_late_pass(|| Box::new(zero_sized_map_values::ZeroSizedMapValues));
+ store.register_late_pass(|| Box::new(vec_init_then_push::VecInitThenPush::default()));
+ store.register_late_pass(|| {
+ Box::new(case_sensitive_file_extension_comparisons::CaseSensitiveFileExtensionComparisons)
+ });
+ store.register_late_pass(|| Box::new(redundant_slicing::RedundantSlicing));
+ store.register_late_pass(|| Box::new(from_str_radix_10::FromStrRadix10));
+ store.register_late_pass(move || Box::new(if_then_some_else_none::IfThenSomeElseNone::new(msrv)));
+ store.register_late_pass(|| Box::new(bool_assert_comparison::BoolAssertComparison));
+ store.register_early_pass(move || Box::new(module_style::ModStyle));
+ store.register_late_pass(|| Box::new(unused_async::UnusedAsync));
+ let disallowed_types = conf.disallowed_types.clone();
+ store.register_late_pass(move || Box::new(disallowed_types::DisallowedTypes::new(disallowed_types.clone())));
+ let import_renames = conf.enforced_import_renames.clone();
+ store.register_late_pass(move || {
+ Box::new(missing_enforced_import_rename::ImportRename::new(
+ import_renames.clone(),
+ ))
+ });
+ let scripts = conf.allowed_scripts.clone();
+ store.register_early_pass(move || Box::new(disallowed_script_idents::DisallowedScriptIdents::new(&scripts)));
+ store.register_late_pass(|| Box::new(strlen_on_c_strings::StrlenOnCStrings));
+ store.register_late_pass(move || Box::new(self_named_constructors::SelfNamedConstructors));
+ store.register_late_pass(move || Box::new(iter_not_returning_iterator::IterNotReturningIterator));
+ store.register_late_pass(move || Box::new(manual_assert::ManualAssert));
+ let enable_raw_pointer_heuristic_for_send = conf.enable_raw_pointer_heuristic_for_send;
+ store.register_late_pass(move || {
+ Box::new(non_send_fields_in_send_ty::NonSendFieldInSendTy::new(
+ enable_raw_pointer_heuristic_for_send,
+ ))
+ });
+ store.register_late_pass(move || Box::new(undocumented_unsafe_blocks::UndocumentedUnsafeBlocks));
+ store.register_late_pass(move || Box::new(format_args::FormatArgs));
+ store.register_late_pass(|| Box::new(trailing_empty_array::TrailingEmptyArray));
+ store.register_early_pass(|| Box::new(octal_escapes::OctalEscapes));
+ store.register_late_pass(|| Box::new(needless_late_init::NeedlessLateInit));
+ store.register_late_pass(|| Box::new(return_self_not_must_use::ReturnSelfNotMustUse));
+ store.register_late_pass(|| Box::new(init_numbered_fields::NumberedFields));
+ store.register_early_pass(|| Box::new(single_char_lifetime_names::SingleCharLifetimeNames));
+ store.register_late_pass(move || Box::new(borrow_as_ptr::BorrowAsPtr::new(msrv)));
+ store.register_late_pass(move || Box::new(manual_bits::ManualBits::new(msrv)));
+ store.register_late_pass(|| Box::new(default_union_representation::DefaultUnionRepresentation));
+ store.register_early_pass(|| Box::new(doc_link_with_quotes::DocLinkWithQuotes));
+ store.register_late_pass(|| Box::new(only_used_in_recursion::OnlyUsedInRecursion));
+ let allow_dbg_in_tests = conf.allow_dbg_in_tests;
+ store.register_late_pass(move || Box::new(dbg_macro::DbgMacro::new(allow_dbg_in_tests)));
+ let cargo_ignore_publish = conf.cargo_ignore_publish;
+ store.register_late_pass(move || {
+ Box::new(cargo::Cargo {
+ ignore_publish: cargo_ignore_publish,
+ })
+ });
+ store.register_early_pass(|| Box::new(crate_in_macro_def::CrateInMacroDef));
+ store.register_early_pass(|| Box::new(empty_structs_with_brackets::EmptyStructsWithBrackets));
+ store.register_late_pass(|| Box::new(unnecessary_owned_empty_strings::UnnecessaryOwnedEmptyStrings));
+ store.register_early_pass(|| Box::new(pub_use::PubUse));
+ store.register_late_pass(|| Box::new(format_push_string::FormatPushString));
+ store.register_late_pass(|| Box::new(bytes_count_to_len::BytesCountToLen));
+ let max_include_file_size = conf.max_include_file_size;
+ store.register_late_pass(move || Box::new(large_include_file::LargeIncludeFile::new(max_include_file_size)));
+ store.register_late_pass(|| Box::new(strings::TrimSplitWhitespace));
+ store.register_late_pass(|| Box::new(rc_clone_in_vec_init::RcCloneInVecInit));
+ store.register_early_pass(|| Box::new(duplicate_mod::DuplicateMod::default()));
+ store.register_late_pass(|| Box::new(get_first::GetFirst));
+ store.register_early_pass(|| Box::new(unused_rounding::UnusedRounding));
+ store.register_early_pass(move || Box::new(almost_complete_letter_range::AlmostCompleteLetterRange::new(msrv)));
+ store.register_late_pass(|| Box::new(swap_ptr_to_ref::SwapPtrToRef));
+ store.register_late_pass(|| Box::new(mismatching_type_param_order::TypeParamMismatch));
+ store.register_late_pass(|| Box::new(as_underscore::AsUnderscore));
+ store.register_late_pass(|| Box::new(read_zero_byte_vec::ReadZeroByteVec));
+ store.register_late_pass(|| Box::new(default_instead_of_iter_empty::DefaultIterEmpty));
+ store.register_late_pass(move || Box::new(manual_rem_euclid::ManualRemEuclid::new(msrv)));
+ store.register_late_pass(move || Box::new(manual_retain::ManualRetain::new(msrv)));
+ let verbose_bit_mask_threshold = conf.verbose_bit_mask_threshold;
+ store.register_late_pass(move || Box::new(operators::Operators::new(verbose_bit_mask_threshold)));
+ store.register_late_pass(|| Box::new(invalid_utf8_in_unchecked::InvalidUtf8InUnchecked));
+ store.register_late_pass(|| Box::new(std_instead_of_core::StdReexports::default()));
+ // add lints here, do not remove this comment, it's used in `new_lint`
+}
+
+#[rustfmt::skip]
+fn register_removed_non_tool_lints(store: &mut rustc_lint::LintStore) {
+ store.register_removed(
+ "should_assert_eq",
+ "`assert!()` will be more flexible with RFC 2011",
+ );
+ store.register_removed(
+ "extend_from_slice",
+ "`.extend_from_slice(_)` is a faster way to extend a Vec by a slice",
+ );
+ store.register_removed(
+ "range_step_by_zero",
+ "`iterator.step_by(0)` panics nowadays",
+ );
+ store.register_removed(
+ "unstable_as_slice",
+ "`Vec::as_slice` has been stabilized in 1.7",
+ );
+ store.register_removed(
+ "unstable_as_mut_slice",
+ "`Vec::as_mut_slice` has been stabilized in 1.7",
+ );
+ store.register_removed(
+ "misaligned_transmute",
+ "this lint has been split into cast_ptr_alignment and transmute_ptr_to_ptr",
+ );
+ store.register_removed(
+ "assign_ops",
+ "using compound assignment operators (e.g., `+=`) is harmless",
+ );
+ store.register_removed(
+ "if_let_redundant_pattern_matching",
+ "this lint has been changed to redundant_pattern_matching",
+ );
+ store.register_removed(
+ "unsafe_vector_initialization",
+ "the replacement suggested by this lint had substantially different behavior",
+ );
+ store.register_removed(
+ "reverse_range_loop",
+ "this lint is now included in reversed_empty_ranges",
+ );
+}
+
+/// Register renamed lints.
+///
+/// Used in `./src/driver.rs`.
+pub fn register_renamed(ls: &mut rustc_lint::LintStore) {
+ for (old_name, new_name) in renamed_lints::RENAMED_LINTS {
+ ls.register_renamed(old_name, new_name);
+ }
+}
+
+// only exists to let the dogfood integration test works.
+// Don't run clippy as an executable directly
+#[allow(dead_code)]
+fn main() {
+ panic!("Please use the cargo-clippy executable");
+}
diff --git a/src/tools/clippy/clippy_lints/src/lifetimes.rs b/src/tools/clippy/clippy_lints/src/lifetimes.rs
new file mode 100644
index 000000000..573a7c016
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/lifetimes.rs
@@ -0,0 +1,620 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::trait_ref_of_method;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_hir::intravisit::nested_filter::{self as hir_nested_filter, NestedFilter};
+use rustc_hir::intravisit::{
+ walk_fn_decl, walk_generic_param, walk_generics, walk_impl_item_ref, walk_item, walk_param_bound,
+ walk_poly_trait_ref, walk_trait_ref, walk_ty, Visitor,
+};
+use rustc_hir::FnRetTy::Return;
+use rustc_hir::{
+ BareFnTy, BodyId, FnDecl, GenericArg, GenericBound, GenericParam, GenericParamKind, Generics, Impl, ImplItem,
+ ImplItemKind, Item, ItemKind, LangItem, Lifetime, LifetimeName, ParamName, PolyTraitRef, PredicateOrigin,
+ TraitBoundModifier, TraitFn, TraitItem, TraitItemKind, Ty, TyKind, WherePredicate,
+};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::hir::nested_filter as middle_nested_filter;
+use rustc_middle::ty::TyCtxt;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::def_id::LocalDefId;
+use rustc_span::source_map::Span;
+use rustc_span::symbol::{kw, Ident, Symbol};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for lifetime annotations which can be removed by
+ /// relying on lifetime elision.
+ ///
+ /// ### Why is this bad?
+ /// The additional lifetimes make the code look more
+ /// complicated, while there is nothing out of the ordinary going on. Removing
+ /// them leads to more readable code.
+ ///
+ /// ### Known problems
+ /// - We bail out if the function has a `where` clause where lifetimes
+ /// are mentioned due to potential false positives.
+ /// - Lifetime bounds such as `impl Foo + 'a` and `T: 'a` must be elided with the
+ /// placeholder notation `'_` because the fully elided notation leaves the type bound to `'static`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// // Unnecessary lifetime annotations
+ /// fn in_and_out<'a>(x: &'a u8, y: u8) -> &'a u8 {
+ /// x
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// fn elided(x: &u8, y: u8) -> &u8 {
+ /// x
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub NEEDLESS_LIFETIMES,
+ complexity,
+ "using explicit lifetimes for references in function arguments when elision rules \
+ would allow omitting them"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for lifetimes in generics that are never used
+ /// anywhere else.
+ ///
+ /// ### Why is this bad?
+ /// The additional lifetimes make the code look more
+ /// complicated, while there is nothing out of the ordinary going on. Removing
+ /// them leads to more readable code.
+ ///
+ /// ### Example
+ /// ```rust
+ /// // unnecessary lifetimes
+ /// fn unused_lifetime<'a>(x: u8) {
+ /// // ..
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// fn no_lifetime(x: u8) {
+ /// // ...
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub EXTRA_UNUSED_LIFETIMES,
+ complexity,
+ "unused lifetimes in function definitions"
+}
+
+declare_lint_pass!(Lifetimes => [NEEDLESS_LIFETIMES, EXTRA_UNUSED_LIFETIMES]);
+
+impl<'tcx> LateLintPass<'tcx> for Lifetimes {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
+ if let ItemKind::Fn(ref sig, generics, id) = item.kind {
+ check_fn_inner(cx, sig.decl, Some(id), None, generics, item.span, true);
+ } else if let ItemKind::Impl(impl_) = item.kind {
+ if !item.span.from_expansion() {
+ report_extra_impl_lifetimes(cx, impl_);
+ }
+ }
+ }
+
+ fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx ImplItem<'_>) {
+ if let ImplItemKind::Fn(ref sig, id) = item.kind {
+ let report_extra_lifetimes = trait_ref_of_method(cx, item.def_id).is_none();
+ check_fn_inner(
+ cx,
+ sig.decl,
+ Some(id),
+ None,
+ item.generics,
+ item.span,
+ report_extra_lifetimes,
+ );
+ }
+ }
+
+ fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx TraitItem<'_>) {
+ if let TraitItemKind::Fn(ref sig, ref body) = item.kind {
+ let (body, trait_sig) = match *body {
+ TraitFn::Required(sig) => (None, Some(sig)),
+ TraitFn::Provided(id) => (Some(id), None),
+ };
+ check_fn_inner(cx, sig.decl, body, trait_sig, item.generics, item.span, true);
+ }
+ }
+}
+
+/// The lifetime of a &-reference.
+#[derive(PartialEq, Eq, Hash, Debug, Clone)]
+enum RefLt {
+ Unnamed,
+ Static,
+ Named(LocalDefId),
+}
+
+fn check_fn_inner<'tcx>(
+ cx: &LateContext<'tcx>,
+ decl: &'tcx FnDecl<'_>,
+ body: Option<BodyId>,
+ trait_sig: Option<&[Ident]>,
+ generics: &'tcx Generics<'_>,
+ span: Span,
+ report_extra_lifetimes: bool,
+) {
+ if span.from_expansion() || has_where_lifetimes(cx, generics) {
+ return;
+ }
+
+ let types = generics
+ .params
+ .iter()
+ .filter(|param| matches!(param.kind, GenericParamKind::Type { .. }));
+ for typ in types {
+ for pred in generics.bounds_for_param(cx.tcx.hir().local_def_id(typ.hir_id)) {
+ if pred.origin == PredicateOrigin::WhereClause {
+ // has_where_lifetimes checked that this predicate contains no lifetime.
+ continue;
+ }
+
+ for bound in pred.bounds {
+ let mut visitor = RefVisitor::new(cx);
+ walk_param_bound(&mut visitor, bound);
+ if visitor.lts.iter().any(|lt| matches!(lt, RefLt::Named(_))) {
+ return;
+ }
+ if let GenericBound::Trait(ref trait_ref, _) = *bound {
+ let params = &trait_ref
+ .trait_ref
+ .path
+ .segments
+ .last()
+ .expect("a path must have at least one segment")
+ .args;
+ if let Some(params) = *params {
+ let lifetimes = params.args.iter().filter_map(|arg| match arg {
+ GenericArg::Lifetime(lt) => Some(lt),
+ _ => None,
+ });
+ for bound in lifetimes {
+ if bound.name != LifetimeName::Static && !bound.is_elided() {
+ return;
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ if could_use_elision(cx, decl, body, trait_sig, generics.params) {
+ span_lint(
+ cx,
+ NEEDLESS_LIFETIMES,
+ span.with_hi(decl.output.span().hi()),
+ "explicit lifetimes given in parameter types where they could be elided \
+ (or replaced with `'_` if needed by type declaration)",
+ );
+ }
+ if report_extra_lifetimes {
+ self::report_extra_lifetimes(cx, decl, generics);
+ }
+}
+
+// elision doesn't work for explicit self types, see rust-lang/rust#69064
+fn explicit_self_type<'tcx>(cx: &LateContext<'tcx>, func: &FnDecl<'tcx>, ident: Option<Ident>) -> bool {
+ if_chain! {
+ if let Some(ident) = ident;
+ if ident.name == kw::SelfLower;
+ if !func.implicit_self.has_implicit_self();
+
+ if let Some(self_ty) = func.inputs.first();
+ then {
+ let mut visitor = RefVisitor::new(cx);
+ visitor.visit_ty(self_ty);
+
+ !visitor.all_lts().is_empty()
+ } else {
+ false
+ }
+ }
+}
+
+fn could_use_elision<'tcx>(
+ cx: &LateContext<'tcx>,
+ func: &'tcx FnDecl<'_>,
+ body: Option<BodyId>,
+ trait_sig: Option<&[Ident]>,
+ named_generics: &'tcx [GenericParam<'_>],
+) -> bool {
+ // There are two scenarios where elision works:
+ // * no output references, all input references have different LT
+ // * output references, exactly one input reference with same LT
+ // All lifetimes must be unnamed, 'static or defined without bounds on the
+ // level of the current item.
+
+ // check named LTs
+ let allowed_lts = allowed_lts_from(cx.tcx, named_generics);
+
+ // these will collect all the lifetimes for references in arg/return types
+ let mut input_visitor = RefVisitor::new(cx);
+ let mut output_visitor = RefVisitor::new(cx);
+
+ // extract lifetimes in input argument types
+ for arg in func.inputs {
+ input_visitor.visit_ty(arg);
+ }
+ // extract lifetimes in output type
+ if let Return(ty) = func.output {
+ output_visitor.visit_ty(ty);
+ }
+ for lt in named_generics {
+ input_visitor.visit_generic_param(lt);
+ }
+
+ if input_visitor.abort() || output_visitor.abort() {
+ return false;
+ }
+
+ let input_lts = input_visitor.lts;
+ let output_lts = output_visitor.lts;
+
+ if let Some(trait_sig) = trait_sig {
+ if explicit_self_type(cx, func, trait_sig.first().copied()) {
+ return false;
+ }
+ }
+
+ if let Some(body_id) = body {
+ let body = cx.tcx.hir().body(body_id);
+
+ let first_ident = body.params.first().and_then(|param| param.pat.simple_ident());
+ if explicit_self_type(cx, func, first_ident) {
+ return false;
+ }
+
+ let mut checker = BodyLifetimeChecker {
+ lifetimes_used_in_body: false,
+ };
+ checker.visit_expr(&body.value);
+ if checker.lifetimes_used_in_body {
+ return false;
+ }
+ }
+
+ // check for lifetimes from higher scopes
+ for lt in input_lts.iter().chain(output_lts.iter()) {
+ if !allowed_lts.contains(lt) {
+ return false;
+ }
+ }
+
+ // check for higher-ranked trait bounds
+ if !input_visitor.nested_elision_site_lts.is_empty() || !output_visitor.nested_elision_site_lts.is_empty() {
+ let allowed_lts: FxHashSet<_> = allowed_lts
+ .iter()
+ .filter_map(|lt| match lt {
+ RefLt::Named(def_id) => Some(cx.tcx.item_name(def_id.to_def_id())),
+ _ => None,
+ })
+ .collect();
+ for lt in input_visitor.nested_elision_site_lts {
+ if let RefLt::Named(def_id) = lt {
+ if allowed_lts.contains(&cx.tcx.item_name(def_id.to_def_id())) {
+ return false;
+ }
+ }
+ }
+ for lt in output_visitor.nested_elision_site_lts {
+ if let RefLt::Named(def_id) = lt {
+ if allowed_lts.contains(&cx.tcx.item_name(def_id.to_def_id())) {
+ return false;
+ }
+ }
+ }
+ }
+
+ // no input lifetimes? easy case!
+ if input_lts.is_empty() {
+ false
+ } else if output_lts.is_empty() {
+ // no output lifetimes, check distinctness of input lifetimes
+
+ // only unnamed and static, ok
+ let unnamed_and_static = input_lts.iter().all(|lt| *lt == RefLt::Unnamed || *lt == RefLt::Static);
+ if unnamed_and_static {
+ return false;
+ }
+ // we have no output reference, so we only need all distinct lifetimes
+ input_lts.len() == unique_lifetimes(&input_lts)
+ } else {
+ // we have output references, so we need one input reference,
+ // and all output lifetimes must be the same
+ if unique_lifetimes(&output_lts) > 1 {
+ return false;
+ }
+ if input_lts.len() == 1 {
+ match (&input_lts[0], &output_lts[0]) {
+ (&RefLt::Named(n1), &RefLt::Named(n2)) if n1 == n2 => true,
+ (&RefLt::Named(_), &RefLt::Unnamed) => true,
+ _ => false, /* already elided, different named lifetimes
+ * or something static going on */
+ }
+ } else {
+ false
+ }
+ }
+}
+
+fn allowed_lts_from(tcx: TyCtxt<'_>, named_generics: &[GenericParam<'_>]) -> FxHashSet<RefLt> {
+ let mut allowed_lts = FxHashSet::default();
+ for par in named_generics.iter() {
+ if let GenericParamKind::Lifetime { .. } = par.kind {
+ allowed_lts.insert(RefLt::Named(tcx.hir().local_def_id(par.hir_id)));
+ }
+ }
+ allowed_lts.insert(RefLt::Unnamed);
+ allowed_lts.insert(RefLt::Static);
+ allowed_lts
+}
+
+/// Number of unique lifetimes in the given vector.
+#[must_use]
+fn unique_lifetimes(lts: &[RefLt]) -> usize {
+ lts.iter().collect::<FxHashSet<_>>().len()
+}
+
+const CLOSURE_TRAIT_BOUNDS: [LangItem; 3] = [LangItem::Fn, LangItem::FnMut, LangItem::FnOnce];
+
+/// A visitor usable for `rustc_front::visit::walk_ty()`.
+struct RefVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ lts: Vec<RefLt>,
+ nested_elision_site_lts: Vec<RefLt>,
+ unelided_trait_object_lifetime: bool,
+}
+
+impl<'a, 'tcx> RefVisitor<'a, 'tcx> {
+ fn new(cx: &'a LateContext<'tcx>) -> Self {
+ Self {
+ cx,
+ lts: Vec::new(),
+ nested_elision_site_lts: Vec::new(),
+ unelided_trait_object_lifetime: false,
+ }
+ }
+
+ fn record(&mut self, lifetime: &Option<Lifetime>) {
+ if let Some(ref lt) = *lifetime {
+ if lt.name == LifetimeName::Static {
+ self.lts.push(RefLt::Static);
+ } else if let LifetimeName::Param(_, ParamName::Fresh) = lt.name {
+ // Fresh lifetimes generated should be ignored.
+ self.lts.push(RefLt::Unnamed);
+ } else if lt.is_elided() {
+ self.lts.push(RefLt::Unnamed);
+ } else if let LifetimeName::Param(def_id, _) = lt.name {
+ self.lts.push(RefLt::Named(def_id));
+ } else {
+ self.lts.push(RefLt::Unnamed);
+ }
+ } else {
+ self.lts.push(RefLt::Unnamed);
+ }
+ }
+
+ fn all_lts(&self) -> Vec<RefLt> {
+ self.lts
+ .iter()
+ .chain(self.nested_elision_site_lts.iter())
+ .cloned()
+ .collect::<Vec<_>>()
+ }
+
+ fn abort(&self) -> bool {
+ self.unelided_trait_object_lifetime
+ }
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for RefVisitor<'a, 'tcx> {
+ // for lifetimes as parameters of generics
+ fn visit_lifetime(&mut self, lifetime: &'tcx Lifetime) {
+ self.record(&Some(*lifetime));
+ }
+
+ fn visit_poly_trait_ref(&mut self, poly_tref: &'tcx PolyTraitRef<'tcx>, tbm: TraitBoundModifier) {
+ let trait_ref = &poly_tref.trait_ref;
+ if CLOSURE_TRAIT_BOUNDS.iter().any(|&item| {
+ self.cx
+ .tcx
+ .lang_items()
+ .require(item)
+ .map_or(false, |id| Some(id) == trait_ref.trait_def_id())
+ }) {
+ let mut sub_visitor = RefVisitor::new(self.cx);
+ sub_visitor.visit_trait_ref(trait_ref);
+ self.nested_elision_site_lts.append(&mut sub_visitor.all_lts());
+ } else {
+ walk_poly_trait_ref(self, poly_tref, tbm);
+ }
+ }
+
+ fn visit_ty(&mut self, ty: &'tcx Ty<'_>) {
+ match ty.kind {
+ TyKind::OpaqueDef(item, bounds) => {
+ let map = self.cx.tcx.hir();
+ let item = map.item(item);
+ let len = self.lts.len();
+ walk_item(self, item);
+ self.lts.truncate(len);
+ self.lts.extend(bounds.iter().filter_map(|bound| match bound {
+ GenericArg::Lifetime(l) => Some(if let LifetimeName::Param(def_id, _) = l.name {
+ RefLt::Named(def_id)
+ } else {
+ RefLt::Unnamed
+ }),
+ _ => None,
+ }));
+ },
+ TyKind::BareFn(&BareFnTy { decl, .. }) => {
+ let mut sub_visitor = RefVisitor::new(self.cx);
+ sub_visitor.visit_fn_decl(decl);
+ self.nested_elision_site_lts.append(&mut sub_visitor.all_lts());
+ },
+ TyKind::TraitObject(bounds, ref lt, _) => {
+ if !lt.is_elided() {
+ self.unelided_trait_object_lifetime = true;
+ }
+ for bound in bounds {
+ self.visit_poly_trait_ref(bound, TraitBoundModifier::None);
+ }
+ },
+ _ => walk_ty(self, ty),
+ }
+ }
+}
+
+/// Are any lifetimes mentioned in the `where` clause? If so, we don't try to
+/// reason about elision.
+fn has_where_lifetimes<'tcx>(cx: &LateContext<'tcx>, generics: &'tcx Generics<'_>) -> bool {
+ for predicate in generics.predicates {
+ match *predicate {
+ WherePredicate::RegionPredicate(..) => return true,
+ WherePredicate::BoundPredicate(ref pred) => {
+ // a predicate like F: Trait or F: for<'a> Trait<'a>
+ let mut visitor = RefVisitor::new(cx);
+ // walk the type F, it may not contain LT refs
+ walk_ty(&mut visitor, pred.bounded_ty);
+ if !visitor.all_lts().is_empty() {
+ return true;
+ }
+ // if the bounds define new lifetimes, they are fine to occur
+ let allowed_lts = allowed_lts_from(cx.tcx, pred.bound_generic_params);
+ // now walk the bounds
+ for bound in pred.bounds.iter() {
+ walk_param_bound(&mut visitor, bound);
+ }
+ // and check that all lifetimes are allowed
+ if visitor.all_lts().iter().any(|it| !allowed_lts.contains(it)) {
+ return true;
+ }
+ },
+ WherePredicate::EqPredicate(ref pred) => {
+ let mut visitor = RefVisitor::new(cx);
+ walk_ty(&mut visitor, pred.lhs_ty);
+ walk_ty(&mut visitor, pred.rhs_ty);
+ if !visitor.lts.is_empty() {
+ return true;
+ }
+ },
+ }
+ }
+ false
+}
+
+struct LifetimeChecker<'cx, 'tcx, F> {
+ cx: &'cx LateContext<'tcx>,
+ map: FxHashMap<Symbol, Span>,
+ phantom: std::marker::PhantomData<F>,
+}
+
+impl<'cx, 'tcx, F> LifetimeChecker<'cx, 'tcx, F> {
+ fn new(cx: &'cx LateContext<'tcx>, map: FxHashMap<Symbol, Span>) -> LifetimeChecker<'cx, 'tcx, F> {
+ Self {
+ cx,
+ map,
+ phantom: std::marker::PhantomData,
+ }
+ }
+}
+
+impl<'cx, 'tcx, F> Visitor<'tcx> for LifetimeChecker<'cx, 'tcx, F>
+where
+ F: NestedFilter<'tcx>,
+{
+ type Map = rustc_middle::hir::map::Map<'tcx>;
+ type NestedFilter = F;
+
+ // for lifetimes as parameters of generics
+ fn visit_lifetime(&mut self, lifetime: &'tcx Lifetime) {
+ self.map.remove(&lifetime.name.ident().name);
+ }
+
+ fn visit_generic_param(&mut self, param: &'tcx GenericParam<'_>) {
+ // don't actually visit `<'a>` or `<'a: 'b>`
+ // we've already visited the `'a` declarations and
+ // don't want to spuriously remove them
+ // `'b` in `'a: 'b` is useless unless used elsewhere in
+ // a non-lifetime bound
+ if let GenericParamKind::Type { .. } = param.kind {
+ walk_generic_param(self, param);
+ }
+ }
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+}
+
+fn report_extra_lifetimes<'tcx>(cx: &LateContext<'tcx>, func: &'tcx FnDecl<'_>, generics: &'tcx Generics<'_>) {
+ let hs = generics
+ .params
+ .iter()
+ .filter_map(|par| match par.kind {
+ GenericParamKind::Lifetime { .. } => Some((par.name.ident().name, par.span)),
+ _ => None,
+ })
+ .collect();
+ let mut checker = LifetimeChecker::<hir_nested_filter::None>::new(cx, hs);
+
+ walk_generics(&mut checker, generics);
+ walk_fn_decl(&mut checker, func);
+
+ for &v in checker.map.values() {
+ span_lint(
+ cx,
+ EXTRA_UNUSED_LIFETIMES,
+ v,
+ "this lifetime isn't used in the function definition",
+ );
+ }
+}
+
+fn report_extra_impl_lifetimes<'tcx>(cx: &LateContext<'tcx>, impl_: &'tcx Impl<'_>) {
+ let hs = impl_
+ .generics
+ .params
+ .iter()
+ .filter_map(|par| match par.kind {
+ GenericParamKind::Lifetime { .. } => Some((par.name.ident().name, par.span)),
+ _ => None,
+ })
+ .collect();
+ let mut checker = LifetimeChecker::<middle_nested_filter::All>::new(cx, hs);
+
+ walk_generics(&mut checker, impl_.generics);
+ if let Some(ref trait_ref) = impl_.of_trait {
+ walk_trait_ref(&mut checker, trait_ref);
+ }
+ walk_ty(&mut checker, impl_.self_ty);
+ for item in impl_.items {
+ walk_impl_item_ref(&mut checker, item);
+ }
+
+ for &v in checker.map.values() {
+ span_lint(cx, EXTRA_UNUSED_LIFETIMES, v, "this lifetime isn't used in the impl");
+ }
+}
+
+struct BodyLifetimeChecker {
+ lifetimes_used_in_body: bool,
+}
+
+impl<'tcx> Visitor<'tcx> for BodyLifetimeChecker {
+ // for lifetimes as parameters of generics
+ fn visit_lifetime(&mut self, lifetime: &'tcx Lifetime) {
+ if lifetime.name.ident().name != kw::UnderscoreLifetime && lifetime.name.ident().name != kw::StaticLifetime {
+ self.lifetimes_used_in_body = true;
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/literal_representation.rs b/src/tools/clippy/clippy_lints/src/literal_representation.rs
new file mode 100644
index 000000000..fb2104861
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/literal_representation.rs
@@ -0,0 +1,534 @@
+//! Lints concerned with the grouping of digits with underscores in integral or
+//! floating-point literal expressions.
+
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::numeric_literal::{NumericLiteral, Radix};
+use clippy_utils::source::snippet_opt;
+use if_chain::if_chain;
+use rustc_ast::ast::{Expr, ExprKind, Lit, LitKind};
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use std::iter;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Warns if a long integral or floating-point constant does
+ /// not contain underscores.
+ ///
+ /// ### Why is this bad?
+ /// Reading long numbers is difficult without separators.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let _: u64 =
+ /// 61864918973511
+ /// # ;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let _: u64 =
+ /// 61_864_918_973_511
+ /// # ;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub UNREADABLE_LITERAL,
+ pedantic,
+ "long literal without underscores"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Warns for mistyped suffix in literals
+ ///
+ /// ### Why is this bad?
+ /// This is most probably a typo
+ ///
+ /// ### Known problems
+ /// - Does not match on integers too large to fit in the corresponding unsigned type
+ /// - Does not match on `_127` since that is a valid grouping for decimal and octal numbers
+ ///
+ /// ### Example
+ /// ```ignore
+ /// `2_32` => `2_i32`
+ /// `250_8 => `250_u8`
+ /// ```
+ #[clippy::version = "1.30.0"]
+ pub MISTYPED_LITERAL_SUFFIXES,
+ correctness,
+ "mistyped literal suffix"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Warns if an integral or floating-point constant is
+ /// grouped inconsistently with underscores.
+ ///
+ /// ### Why is this bad?
+ /// Readers may incorrectly interpret inconsistently
+ /// grouped digits.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let _: u64 =
+ /// 618_64_9189_73_511
+ /// # ;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let _: u64 =
+ /// 61_864_918_973_511
+ /// # ;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub INCONSISTENT_DIGIT_GROUPING,
+ style,
+ "integer literals with digits grouped inconsistently"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Warns if hexadecimal or binary literals are not grouped
+ /// by nibble or byte.
+ ///
+ /// ### Why is this bad?
+ /// Negatively impacts readability.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x: u32 = 0xFFF_FFF;
+ /// let y: u8 = 0b01_011_101;
+ /// ```
+ #[clippy::version = "1.49.0"]
+ pub UNUSUAL_BYTE_GROUPINGS,
+ style,
+ "binary or hex literals that aren't grouped by four"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Warns if the digits of an integral or floating-point
+ /// constant are grouped into groups that
+ /// are too large.
+ ///
+ /// ### Why is this bad?
+ /// Negatively impacts readability.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x: u64 = 6186491_8973511;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub LARGE_DIGIT_GROUPS,
+ pedantic,
+ "grouping digits into groups that are too large"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Warns if there is a better representation for a numeric literal.
+ ///
+ /// ### Why is this bad?
+ /// Especially for big powers of 2 a hexadecimal representation is more
+ /// readable than a decimal representation.
+ ///
+ /// ### Example
+ /// ```text
+ /// `255` => `0xFF`
+ /// `65_535` => `0xFFFF`
+ /// `4_042_322_160` => `0xF0F0_F0F0`
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub DECIMAL_LITERAL_REPRESENTATION,
+ restriction,
+ "using decimal representation when hexadecimal would be better"
+}
+
+enum WarningType {
+ UnreadableLiteral,
+ InconsistentDigitGrouping,
+ LargeDigitGroups,
+ DecimalRepresentation,
+ MistypedLiteralSuffix,
+ UnusualByteGroupings,
+}
+
+impl WarningType {
+ fn display(&self, suggested_format: String, cx: &EarlyContext<'_>, span: rustc_span::Span) {
+ match self {
+ Self::MistypedLiteralSuffix => span_lint_and_sugg(
+ cx,
+ MISTYPED_LITERAL_SUFFIXES,
+ span,
+ "mistyped literal suffix",
+ "did you mean to write",
+ suggested_format,
+ Applicability::MaybeIncorrect,
+ ),
+ Self::UnreadableLiteral => span_lint_and_sugg(
+ cx,
+ UNREADABLE_LITERAL,
+ span,
+ "long literal lacking separators",
+ "consider",
+ suggested_format,
+ Applicability::MachineApplicable,
+ ),
+ Self::LargeDigitGroups => span_lint_and_sugg(
+ cx,
+ LARGE_DIGIT_GROUPS,
+ span,
+ "digit groups should be smaller",
+ "consider",
+ suggested_format,
+ Applicability::MachineApplicable,
+ ),
+ Self::InconsistentDigitGrouping => span_lint_and_sugg(
+ cx,
+ INCONSISTENT_DIGIT_GROUPING,
+ span,
+ "digits grouped inconsistently by underscores",
+ "consider",
+ suggested_format,
+ Applicability::MachineApplicable,
+ ),
+ Self::DecimalRepresentation => span_lint_and_sugg(
+ cx,
+ DECIMAL_LITERAL_REPRESENTATION,
+ span,
+ "integer literal has a better hexadecimal representation",
+ "consider",
+ suggested_format,
+ Applicability::MachineApplicable,
+ ),
+ Self::UnusualByteGroupings => span_lint_and_sugg(
+ cx,
+ UNUSUAL_BYTE_GROUPINGS,
+ span,
+ "digits of hex or binary literal not grouped by four",
+ "consider",
+ suggested_format,
+ Applicability::MachineApplicable,
+ ),
+ };
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct LiteralDigitGrouping {
+ lint_fraction_readability: bool,
+}
+
+impl_lint_pass!(LiteralDigitGrouping => [
+ UNREADABLE_LITERAL,
+ INCONSISTENT_DIGIT_GROUPING,
+ LARGE_DIGIT_GROUPS,
+ MISTYPED_LITERAL_SUFFIXES,
+ UNUSUAL_BYTE_GROUPINGS,
+]);
+
+impl EarlyLintPass for LiteralDigitGrouping {
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
+ if in_external_macro(cx.sess(), expr.span) {
+ return;
+ }
+
+ if let ExprKind::Lit(ref lit) = expr.kind {
+ self.check_lit(cx, lit);
+ }
+ }
+}
+
+// Length of each UUID hyphenated group in hex digits.
+const UUID_GROUP_LENS: [usize; 5] = [8, 4, 4, 4, 12];
+
+impl LiteralDigitGrouping {
+ pub fn new(lint_fraction_readability: bool) -> Self {
+ Self {
+ lint_fraction_readability,
+ }
+ }
+
+ fn check_lit(self, cx: &EarlyContext<'_>, lit: &Lit) {
+ if_chain! {
+ if let Some(src) = snippet_opt(cx, lit.span);
+ if let Some(mut num_lit) = NumericLiteral::from_lit(&src, lit);
+ then {
+ if !Self::check_for_mistyped_suffix(cx, lit.span, &mut num_lit) {
+ return;
+ }
+
+ if Self::is_literal_uuid_formatted(&mut num_lit) {
+ return;
+ }
+
+ let result = (|| {
+
+ let integral_group_size = Self::get_group_size(num_lit.integer.split('_'), num_lit.radix, true)?;
+ if let Some(fraction) = num_lit.fraction {
+ let fractional_group_size = Self::get_group_size(
+ fraction.rsplit('_'),
+ num_lit.radix,
+ self.lint_fraction_readability)?;
+
+ let consistent = Self::parts_consistent(integral_group_size,
+ fractional_group_size,
+ num_lit.integer.len(),
+ fraction.len());
+ if !consistent {
+ return Err(WarningType::InconsistentDigitGrouping);
+ };
+ }
+
+ Ok(())
+ })();
+
+
+ if let Err(warning_type) = result {
+ let should_warn = match warning_type {
+ | WarningType::UnreadableLiteral
+ | WarningType::InconsistentDigitGrouping
+ | WarningType::UnusualByteGroupings
+ | WarningType::LargeDigitGroups => {
+ !lit.span.from_expansion()
+ }
+ WarningType::DecimalRepresentation | WarningType::MistypedLiteralSuffix => {
+ true
+ }
+ };
+ if should_warn {
+ warning_type.display(num_lit.format(), cx, lit.span);
+ }
+ }
+ }
+ }
+ }
+
+ // Returns `false` if the check fails
+ fn check_for_mistyped_suffix(
+ cx: &EarlyContext<'_>,
+ span: rustc_span::Span,
+ num_lit: &mut NumericLiteral<'_>,
+ ) -> bool {
+ if num_lit.suffix.is_some() {
+ return true;
+ }
+
+ let (part, mistyped_suffixes, is_float) = if let Some((_, exponent)) = &mut num_lit.exponent {
+ (exponent, &["32", "64"][..], true)
+ } else if num_lit.fraction.is_some() {
+ return true;
+ } else {
+ (&mut num_lit.integer, &["8", "16", "32", "64"][..], false)
+ };
+
+ let mut split = part.rsplit('_');
+ let last_group = split.next().expect("At least one group");
+ if split.next().is_some() && mistyped_suffixes.contains(&last_group) {
+ let main_part = &part[..part.len() - last_group.len()];
+ let missing_char;
+ if is_float {
+ missing_char = 'f';
+ } else {
+ let radix = match num_lit.radix {
+ Radix::Binary => 2,
+ Radix::Octal => 8,
+ Radix::Decimal => 10,
+ Radix::Hexadecimal => 16,
+ };
+ if let Ok(int) = u64::from_str_radix(&main_part.replace('_', ""), radix) {
+ missing_char = match (last_group, int) {
+ ("8", i) if i8::try_from(i).is_ok() => 'i',
+ ("16", i) if i16::try_from(i).is_ok() => 'i',
+ ("32", i) if i32::try_from(i).is_ok() => 'i',
+ ("64", i) if i64::try_from(i).is_ok() => 'i',
+ ("8", u) if u8::try_from(u).is_ok() => 'u',
+ ("16", u) if u16::try_from(u).is_ok() => 'u',
+ ("32", u) if u32::try_from(u).is_ok() => 'u',
+ ("64", _) => 'u',
+ _ => {
+ return true;
+ },
+ }
+ } else {
+ return true;
+ }
+ }
+ *part = main_part;
+ let mut sugg = num_lit.format();
+ sugg.push('_');
+ sugg.push(missing_char);
+ sugg.push_str(last_group);
+ WarningType::MistypedLiteralSuffix.display(sugg, cx, span);
+ false
+ } else {
+ true
+ }
+ }
+
+ /// Checks whether the numeric literal matches the formatting of a UUID.
+ ///
+ /// Returns `true` if the radix is hexadecimal, and the groups match the
+ /// UUID format of 8-4-4-4-12.
+ fn is_literal_uuid_formatted(num_lit: &mut NumericLiteral<'_>) -> bool {
+ if num_lit.radix != Radix::Hexadecimal {
+ return false;
+ }
+
+ // UUIDs should not have a fraction
+ if num_lit.fraction.is_some() {
+ return false;
+ }
+
+ let group_sizes: Vec<usize> = num_lit.integer.split('_').map(str::len).collect();
+ if UUID_GROUP_LENS.len() == group_sizes.len() {
+ iter::zip(&UUID_GROUP_LENS, &group_sizes).all(|(&a, &b)| a == b)
+ } else {
+ false
+ }
+ }
+
+ /// Given the sizes of the digit groups of both integral and fractional
+ /// parts, and the length
+ /// of both parts, determine if the digits have been grouped consistently.
+ #[must_use]
+ fn parts_consistent(
+ int_group_size: Option<usize>,
+ frac_group_size: Option<usize>,
+ int_size: usize,
+ frac_size: usize,
+ ) -> bool {
+ match (int_group_size, frac_group_size) {
+ // No groups on either side of decimal point - trivially consistent.
+ (None, None) => true,
+ // Integral part has grouped digits, fractional part does not.
+ (Some(int_group_size), None) => frac_size <= int_group_size,
+ // Fractional part has grouped digits, integral part does not.
+ (None, Some(frac_group_size)) => int_size <= frac_group_size,
+ // Both parts have grouped digits. Groups should be the same size.
+ (Some(int_group_size), Some(frac_group_size)) => int_group_size == frac_group_size,
+ }
+ }
+
+ /// Returns the size of the digit groups (or None if ungrouped) if successful,
+ /// otherwise returns a `WarningType` for linting.
+ fn get_group_size<'a>(
+ groups: impl Iterator<Item = &'a str>,
+ radix: Radix,
+ lint_unreadable: bool,
+ ) -> Result<Option<usize>, WarningType> {
+ let mut groups = groups.map(str::len);
+
+ let first = groups.next().expect("At least one group");
+
+ if (radix == Radix::Binary || radix == Radix::Hexadecimal) && groups.any(|i| i != 4 && i != 2) {
+ return Err(WarningType::UnusualByteGroupings);
+ }
+
+ if let Some(second) = groups.next() {
+ if !groups.all(|x| x == second) || first > second {
+ Err(WarningType::InconsistentDigitGrouping)
+ } else if second > 4 {
+ Err(WarningType::LargeDigitGroups)
+ } else {
+ Ok(Some(second))
+ }
+ } else if first > 5 && lint_unreadable {
+ Err(WarningType::UnreadableLiteral)
+ } else {
+ Ok(None)
+ }
+ }
+}
+
+#[expect(clippy::module_name_repetitions)]
+#[derive(Copy, Clone)]
+pub struct DecimalLiteralRepresentation {
+ threshold: u64,
+}
+
+impl_lint_pass!(DecimalLiteralRepresentation => [DECIMAL_LITERAL_REPRESENTATION]);
+
+impl EarlyLintPass for DecimalLiteralRepresentation {
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
+ if in_external_macro(cx.sess(), expr.span) {
+ return;
+ }
+
+ if let ExprKind::Lit(ref lit) = expr.kind {
+ self.check_lit(cx, lit);
+ }
+ }
+}
+
+impl DecimalLiteralRepresentation {
+ #[must_use]
+ pub fn new(threshold: u64) -> Self {
+ Self { threshold }
+ }
+ fn check_lit(self, cx: &EarlyContext<'_>, lit: &Lit) {
+ // Lint integral literals.
+ if_chain! {
+ if let LitKind::Int(val, _) = lit.kind;
+ if let Some(src) = snippet_opt(cx, lit.span);
+ if let Some(num_lit) = NumericLiteral::from_lit(&src, lit);
+ if num_lit.radix == Radix::Decimal;
+ if val >= u128::from(self.threshold);
+ then {
+ let hex = format!("{:#X}", val);
+ let num_lit = NumericLiteral::new(&hex, num_lit.suffix, false);
+ let _ = Self::do_lint(num_lit.integer).map_err(|warning_type| {
+ warning_type.display(num_lit.format(), cx, lit.span);
+ });
+ }
+ }
+ }
+
+ fn do_lint(digits: &str) -> Result<(), WarningType> {
+ if digits.len() == 1 {
+ // Lint for 1 digit literals, if someone really sets the threshold that low
+ if digits == "1"
+ || digits == "2"
+ || digits == "4"
+ || digits == "8"
+ || digits == "3"
+ || digits == "7"
+ || digits == "F"
+ {
+ return Err(WarningType::DecimalRepresentation);
+ }
+ } else if digits.len() < 4 {
+ // Lint for Literals with a hex-representation of 2 or 3 digits
+ let f = &digits[0..1]; // first digit
+ let s = &digits[1..]; // suffix
+
+ // Powers of 2
+ if ((f.eq("1") || f.eq("2") || f.eq("4") || f.eq("8")) && s.chars().all(|c| c == '0'))
+ // Powers of 2 minus 1
+ || ((f.eq("1") || f.eq("3") || f.eq("7") || f.eq("F")) && s.chars().all(|c| c == 'F'))
+ {
+ return Err(WarningType::DecimalRepresentation);
+ }
+ } else {
+ // Lint for Literals with a hex-representation of 4 digits or more
+ let f = &digits[0..1]; // first digit
+ let m = &digits[1..digits.len() - 1]; // middle digits, except last
+ let s = &digits[1..]; // suffix
+
+ // Powers of 2 with a margin of +15/-16
+ if ((f.eq("1") || f.eq("2") || f.eq("4") || f.eq("8")) && m.chars().all(|c| c == '0'))
+ || ((f.eq("1") || f.eq("3") || f.eq("7") || f.eq("F")) && m.chars().all(|c| c == 'F'))
+ // Lint for representations with only 0s and Fs, while allowing 7 as the first
+ // digit
+ || ((f.eq("7") || f.eq("F")) && s.chars().all(|c| c == '0' || c == 'F'))
+ {
+ return Err(WarningType::DecimalRepresentation);
+ }
+ }
+
+ Ok(())
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/empty_loop.rs b/src/tools/clippy/clippy_lints/src/loops/empty_loop.rs
new file mode 100644
index 000000000..823cf0f43
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/empty_loop.rs
@@ -0,0 +1,18 @@
+use super::EMPTY_LOOP;
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::{is_in_panic_handler, is_no_std_crate};
+
+use rustc_hir::{Block, Expr};
+use rustc_lint::LateContext;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, loop_block: &Block<'_>) {
+ if loop_block.stmts.is_empty() && loop_block.expr.is_none() && !is_in_panic_handler(cx, expr) {
+ let msg = "empty `loop {}` wastes CPU cycles";
+ let help = if is_no_std_crate(cx) {
+ "you should either use `panic!()` or add a call pausing or sleeping the thread to the loop body"
+ } else {
+ "you should either use `panic!()` or add `std::thread::sleep(..);` to the loop body"
+ };
+ span_lint_and_help(cx, EMPTY_LOOP, expr.span, msg, None, help);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/explicit_counter_loop.rs b/src/tools/clippy/clippy_lints/src/loops/explicit_counter_loop.rs
new file mode 100644
index 000000000..8e3ab26a9
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/explicit_counter_loop.rs
@@ -0,0 +1,93 @@
+use super::{make_iterator_snippet, IncrementVisitor, InitializeVisitor, EXPLICIT_COUNTER_LOOP};
+use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then};
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::{get_enclosing_block, is_integer_const};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::intravisit::{walk_block, walk_expr};
+use rustc_hir::{Expr, Pat};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty, UintTy};
+
+// To trigger the EXPLICIT_COUNTER_LOOP lint, a variable must be
+// incremented exactly once in the loop body, and initialized to zero
+// at the start of the loop.
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ pat: &'tcx Pat<'_>,
+ arg: &'tcx Expr<'_>,
+ body: &'tcx Expr<'_>,
+ expr: &'tcx Expr<'_>,
+) {
+ // Look for variables that are incremented once per loop iteration.
+ let mut increment_visitor = IncrementVisitor::new(cx);
+ walk_expr(&mut increment_visitor, body);
+
+ // For each candidate, check the parent block to see if
+ // it's initialized to zero at the start of the loop.
+ if let Some(block) = get_enclosing_block(cx, expr.hir_id) {
+ for id in increment_visitor.into_results() {
+ let mut initialize_visitor = InitializeVisitor::new(cx, expr, id);
+ walk_block(&mut initialize_visitor, block);
+
+ if_chain! {
+ if let Some((name, ty, initializer)) = initialize_visitor.get_result();
+ if is_integer_const(cx, initializer, 0);
+ then {
+ let mut applicability = Applicability::MaybeIncorrect;
+ let span = expr.span.with_hi(arg.span.hi());
+
+ let int_name = match ty.map(Ty::kind) {
+ // usize or inferred
+ Some(ty::Uint(UintTy::Usize)) | None => {
+ span_lint_and_sugg(
+ cx,
+ EXPLICIT_COUNTER_LOOP,
+ span,
+ &format!("the variable `{}` is used as a loop counter", name),
+ "consider using",
+ format!(
+ "for ({}, {}) in {}.enumerate()",
+ name,
+ snippet_with_applicability(cx, pat.span, "item", &mut applicability),
+ make_iterator_snippet(cx, arg, &mut applicability),
+ ),
+ applicability,
+ );
+ return;
+ }
+ Some(ty::Int(int_ty)) => int_ty.name_str(),
+ Some(ty::Uint(uint_ty)) => uint_ty.name_str(),
+ _ => return,
+ };
+
+ span_lint_and_then(
+ cx,
+ EXPLICIT_COUNTER_LOOP,
+ span,
+ &format!("the variable `{}` is used as a loop counter", name),
+ |diag| {
+ diag.span_suggestion(
+ span,
+ "consider using",
+ format!(
+ "for ({}, {}) in (0_{}..).zip({})",
+ name,
+ snippet_with_applicability(cx, pat.span, "item", &mut applicability),
+ int_name,
+ make_iterator_snippet(cx, arg, &mut applicability),
+ ),
+ applicability,
+ );
+
+ diag.note(&format!(
+ "`{}` is of type `{}`, making it ineligible for `Iterator::enumerate`",
+ name, int_name
+ ));
+ },
+ );
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/explicit_into_iter_loop.rs b/src/tools/clippy/clippy_lints/src/loops/explicit_into_iter_loop.rs
new file mode 100644
index 000000000..175e2b382
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/explicit_into_iter_loop.rs
@@ -0,0 +1,29 @@
+use super::EXPLICIT_INTO_ITER_LOOP;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::is_trait_method;
+use clippy_utils::source::snippet_with_applicability;
+use rustc_errors::Applicability;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_span::symbol::sym;
+
+pub(super) fn check(cx: &LateContext<'_>, self_arg: &Expr<'_>, call_expr: &Expr<'_>) {
+ let self_ty = cx.typeck_results().expr_ty(self_arg);
+ let self_ty_adjusted = cx.typeck_results().expr_ty_adjusted(self_arg);
+ if !(self_ty == self_ty_adjusted && is_trait_method(cx, call_expr, sym::IntoIterator)) {
+ return;
+ }
+
+ let mut applicability = Applicability::MachineApplicable;
+ let object = snippet_with_applicability(cx, self_arg.span, "_", &mut applicability);
+ span_lint_and_sugg(
+ cx,
+ EXPLICIT_INTO_ITER_LOOP,
+ call_expr.span,
+ "it is more concise to loop over containers instead of using explicit \
+ iteration methods",
+ "to write this more concisely, try",
+ object.to_string(),
+ applicability,
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/explicit_iter_loop.rs b/src/tools/clippy/clippy_lints/src/loops/explicit_iter_loop.rs
new file mode 100644
index 000000000..5f5beccd0
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/explicit_iter_loop.rs
@@ -0,0 +1,75 @@
+use super::EXPLICIT_ITER_LOOP;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::is_trait_method;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::is_type_diagnostic_item;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, Mutability};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+use rustc_span::sym;
+
+pub(super) fn check(cx: &LateContext<'_>, self_arg: &Expr<'_>, arg: &Expr<'_>, method_name: &str) {
+ let should_lint = match method_name {
+ "iter" | "iter_mut" => is_ref_iterable_type(cx, self_arg),
+ "into_iter" if is_trait_method(cx, arg, sym::IntoIterator) => {
+ let receiver_ty = cx.typeck_results().expr_ty(self_arg);
+ let receiver_ty_adjusted = cx.typeck_results().expr_ty_adjusted(self_arg);
+ let ref_receiver_ty = cx.tcx.mk_ref(
+ cx.tcx.lifetimes.re_erased,
+ ty::TypeAndMut {
+ ty: receiver_ty,
+ mutbl: Mutability::Not,
+ },
+ );
+ receiver_ty_adjusted == ref_receiver_ty
+ },
+ _ => false,
+ };
+
+ if !should_lint {
+ return;
+ }
+
+ let mut applicability = Applicability::MachineApplicable;
+ let object = snippet_with_applicability(cx, self_arg.span, "_", &mut applicability);
+ let muta = if method_name == "iter_mut" { "mut " } else { "" };
+ span_lint_and_sugg(
+ cx,
+ EXPLICIT_ITER_LOOP,
+ arg.span,
+ "it is more concise to loop over references to containers instead of using explicit \
+ iteration methods",
+ "to write this more concisely, try",
+ format!("&{}{}", muta, object),
+ applicability,
+ );
+}
+
+/// Returns `true` if the type of expr is one that provides `IntoIterator` impls
+/// for `&T` and `&mut T`, such as `Vec`.
+#[rustfmt::skip]
+fn is_ref_iterable_type(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
+ // no walk_ptrs_ty: calling iter() on a reference can make sense because it
+ // will allow further borrows afterwards
+ let ty = cx.typeck_results().expr_ty(e);
+ is_iterable_array(ty, cx) ||
+ is_type_diagnostic_item(cx, ty, sym::Vec) ||
+ is_type_diagnostic_item(cx, ty, sym::LinkedList) ||
+ is_type_diagnostic_item(cx, ty, sym::HashMap) ||
+ is_type_diagnostic_item(cx, ty, sym::HashSet) ||
+ is_type_diagnostic_item(cx, ty, sym::VecDeque) ||
+ is_type_diagnostic_item(cx, ty, sym::BinaryHeap) ||
+ is_type_diagnostic_item(cx, ty, sym::BTreeMap) ||
+ is_type_diagnostic_item(cx, ty, sym::BTreeSet)
+}
+
+fn is_iterable_array<'tcx>(ty: Ty<'tcx>, cx: &LateContext<'tcx>) -> bool {
+ // IntoIterator is currently only implemented for array sizes <= 32 in rustc
+ match ty.kind() {
+ ty::Array(_, n) => n
+ .try_eval_usize(cx.tcx, cx.param_env)
+ .map_or(false, |val| (0..=32).contains(&val)),
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/for_kv_map.rs b/src/tools/clippy/clippy_lints/src/loops/for_kv_map.rs
new file mode 100644
index 000000000..bee0e1d76
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/for_kv_map.rs
@@ -0,0 +1,66 @@
+use super::FOR_KV_MAP;
+use clippy_utils::diagnostics::{multispan_sugg, span_lint_and_then};
+use clippy_utils::source::snippet;
+use clippy_utils::sugg;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::visitors::is_local_used;
+use rustc_hir::{BorrowKind, Expr, ExprKind, Mutability, Pat, PatKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::sym;
+
+/// Checks for the `FOR_KV_MAP` lint.
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, pat: &'tcx Pat<'_>, arg: &'tcx Expr<'_>, body: &'tcx Expr<'_>) {
+ let pat_span = pat.span;
+
+ if let PatKind::Tuple(pat, _) = pat.kind {
+ if pat.len() == 2 {
+ let arg_span = arg.span;
+ let (new_pat_span, kind, ty, mutbl) = match *cx.typeck_results().expr_ty(arg).kind() {
+ ty::Ref(_, ty, mutbl) => match (&pat[0].kind, &pat[1].kind) {
+ (key, _) if pat_is_wild(cx, key, body) => (pat[1].span, "value", ty, mutbl),
+ (_, value) if pat_is_wild(cx, value, body) => (pat[0].span, "key", ty, Mutability::Not),
+ _ => return,
+ },
+ _ => return,
+ };
+ let mutbl = match mutbl {
+ Mutability::Not => "",
+ Mutability::Mut => "_mut",
+ };
+ let arg = match arg.kind {
+ ExprKind::AddrOf(BorrowKind::Ref, _, expr) => expr,
+ _ => arg,
+ };
+
+ if is_type_diagnostic_item(cx, ty, sym::HashMap) || is_type_diagnostic_item(cx, ty, sym::BTreeMap) {
+ span_lint_and_then(
+ cx,
+ FOR_KV_MAP,
+ arg_span,
+ &format!("you seem to want to iterate on a map's {}s", kind),
+ |diag| {
+ let map = sugg::Sugg::hir(cx, arg, "map");
+ multispan_sugg(
+ diag,
+ "use the corresponding method",
+ vec![
+ (pat_span, snippet(cx, new_pat_span, kind).into_owned()),
+ (arg_span, format!("{}.{}s{}()", map.maybe_par(), kind, mutbl)),
+ ],
+ );
+ },
+ );
+ }
+ }
+ }
+}
+
+/// Returns `true` if the pattern is a `PatWild` or an ident prefixed with `_`.
+fn pat_is_wild<'tcx>(cx: &LateContext<'tcx>, pat: &'tcx PatKind<'_>, body: &'tcx Expr<'_>) -> bool {
+ match *pat {
+ PatKind::Wild => true,
+ PatKind::Binding(_, id, ident, None) if ident.as_str().starts_with('_') => !is_local_used(cx, body, id),
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/for_loops_over_fallibles.rs b/src/tools/clippy/clippy_lints/src/loops/for_loops_over_fallibles.rs
new file mode 100644
index 000000000..77de90fd7
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/for_loops_over_fallibles.rs
@@ -0,0 +1,65 @@
+use super::FOR_LOOPS_OVER_FALLIBLES;
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::source::snippet;
+use clippy_utils::ty::is_type_diagnostic_item;
+use rustc_hir::{Expr, Pat};
+use rustc_lint::LateContext;
+use rustc_span::symbol::sym;
+
+/// Checks for `for` loops over `Option`s and `Result`s.
+pub(super) fn check(cx: &LateContext<'_>, pat: &Pat<'_>, arg: &Expr<'_>, method_name: Option<&str>) {
+ let ty = cx.typeck_results().expr_ty(arg);
+ if is_type_diagnostic_item(cx, ty, sym::Option) {
+ let help_string = if let Some(method_name) = method_name {
+ format!(
+ "consider replacing `for {0} in {1}.{method_name}()` with `if let Some({0}) = {1}`",
+ snippet(cx, pat.span, "_"),
+ snippet(cx, arg.span, "_")
+ )
+ } else {
+ format!(
+ "consider replacing `for {0} in {1}` with `if let Some({0}) = {1}`",
+ snippet(cx, pat.span, "_"),
+ snippet(cx, arg.span, "_")
+ )
+ };
+ span_lint_and_help(
+ cx,
+ FOR_LOOPS_OVER_FALLIBLES,
+ arg.span,
+ &format!(
+ "for loop over `{0}`, which is an `Option`. This is more readably written as an \
+ `if let` statement",
+ snippet(cx, arg.span, "_")
+ ),
+ None,
+ &help_string,
+ );
+ } else if is_type_diagnostic_item(cx, ty, sym::Result) {
+ let help_string = if let Some(method_name) = method_name {
+ format!(
+ "consider replacing `for {0} in {1}.{method_name}()` with `if let Ok({0}) = {1}`",
+ snippet(cx, pat.span, "_"),
+ snippet(cx, arg.span, "_")
+ )
+ } else {
+ format!(
+ "consider replacing `for {0} in {1}` with `if let Ok({0}) = {1}`",
+ snippet(cx, pat.span, "_"),
+ snippet(cx, arg.span, "_")
+ )
+ };
+ span_lint_and_help(
+ cx,
+ FOR_LOOPS_OVER_FALLIBLES,
+ arg.span,
+ &format!(
+ "for loop over `{0}`, which is a `Result`. This is more readably written as an \
+ `if let` statement",
+ snippet(cx, arg.span, "_")
+ ),
+ None,
+ &help_string,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/iter_next_loop.rs b/src/tools/clippy/clippy_lints/src/loops/iter_next_loop.rs
new file mode 100644
index 000000000..e640c62eb
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/iter_next_loop.rs
@@ -0,0 +1,21 @@
+use super::ITER_NEXT_LOOP;
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::is_trait_method;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+pub(super) fn check(cx: &LateContext<'_>, arg: &Expr<'_>) -> bool {
+ if is_trait_method(cx, arg, sym::Iterator) {
+ span_lint(
+ cx,
+ ITER_NEXT_LOOP,
+ arg.span,
+ "you are iterating over `Iterator::next()` which is an Option; this will compile but is \
+ probably not what you want",
+ );
+ true
+ } else {
+ false
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/manual_find.rs b/src/tools/clippy/clippy_lints/src/loops/manual_find.rs
new file mode 100644
index 000000000..215c83a7e
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/manual_find.rs
@@ -0,0 +1,158 @@
+use super::utils::make_iterator_snippet;
+use super::MANUAL_FIND;
+use clippy_utils::{
+ diagnostics::span_lint_and_then, higher, is_lang_ctor, path_res, peel_blocks_with_stmt,
+ source::snippet_with_applicability, ty::implements_trait,
+};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{
+ def::Res, lang_items::LangItem, BindingAnnotation, Block, Expr, ExprKind, HirId, Node, Pat, PatKind, Stmt, StmtKind,
+};
+use rustc_lint::LateContext;
+use rustc_span::source_map::Span;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ pat: &'tcx Pat<'_>,
+ arg: &'tcx Expr<'_>,
+ body: &'tcx Expr<'_>,
+ span: Span,
+ expr: &'tcx Expr<'_>,
+) {
+ let inner_expr = peel_blocks_with_stmt(body);
+ // Check for the specific case that the result is returned and optimize suggestion for that (more
+ // cases can be added later)
+ if_chain! {
+ if let Some(higher::If { cond, then, r#else: None, }) = higher::If::hir(inner_expr);
+ if let Some(binding_id) = get_binding(pat);
+ if let ExprKind::Block(block, _) = then.kind;
+ if let [stmt] = block.stmts;
+ if let StmtKind::Semi(semi) = stmt.kind;
+ if let ExprKind::Ret(Some(ret_value)) = semi.kind;
+ if let ExprKind::Call(Expr { kind: ExprKind::Path(ctor), .. }, [inner_ret]) = ret_value.kind;
+ if is_lang_ctor(cx, ctor, LangItem::OptionSome);
+ if path_res(cx, inner_ret) == Res::Local(binding_id);
+ if let Some((last_stmt, last_ret)) = last_stmt_and_ret(cx, expr);
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ let mut snippet = make_iterator_snippet(cx, arg, &mut applicability);
+ // Checks if `pat` is a single reference to a binding (`&x`)
+ let is_ref_to_binding =
+ matches!(pat.kind, PatKind::Ref(inner, _) if matches!(inner.kind, PatKind::Binding(..)));
+ // If `pat` is not a binding or a reference to a binding (`x` or `&x`)
+ // we need to map it to the binding returned by the function (i.e. `.map(|(x, _)| x)`)
+ if !(matches!(pat.kind, PatKind::Binding(..)) || is_ref_to_binding) {
+ snippet.push_str(
+ &format!(
+ ".map(|{}| {})",
+ snippet_with_applicability(cx, pat.span, "..", &mut applicability),
+ snippet_with_applicability(cx, inner_ret.span, "..", &mut applicability),
+ )[..],
+ );
+ }
+ let ty = cx.typeck_results().expr_ty(inner_ret);
+ if cx.tcx.lang_items().copy_trait().map_or(false, |id| implements_trait(cx, ty, id, &[])) {
+ snippet.push_str(
+ &format!(
+ ".find(|{}{}| {})",
+ "&".repeat(1 + usize::from(is_ref_to_binding)),
+ snippet_with_applicability(cx, inner_ret.span, "..", &mut applicability),
+ snippet_with_applicability(cx, cond.span, "..", &mut applicability),
+ )[..],
+ );
+ if is_ref_to_binding {
+ snippet.push_str(".copied()");
+ }
+ } else {
+ applicability = Applicability::MaybeIncorrect;
+ snippet.push_str(
+ &format!(
+ ".find(|{}| {})",
+ snippet_with_applicability(cx, inner_ret.span, "..", &mut applicability),
+ snippet_with_applicability(cx, cond.span, "..", &mut applicability),
+ )[..],
+ );
+ }
+ // Extends to `last_stmt` to include semicolon in case of `return None;`
+ let lint_span = span.to(last_stmt.span).to(last_ret.span);
+ span_lint_and_then(
+ cx,
+ MANUAL_FIND,
+ lint_span,
+ "manual implementation of `Iterator::find`",
+ |diag| {
+ if applicability == Applicability::MaybeIncorrect {
+ diag.note("you may need to dereference some variables");
+ }
+ diag.span_suggestion(
+ lint_span,
+ "replace with an iterator",
+ snippet,
+ applicability,
+ );
+ },
+ );
+ }
+ }
+}
+
+fn get_binding(pat: &Pat<'_>) -> Option<HirId> {
+ let mut hir_id = None;
+ let mut count = 0;
+ pat.each_binding(|annotation, id, _, _| {
+ count += 1;
+ if count > 1 {
+ hir_id = None;
+ return;
+ }
+ if let BindingAnnotation::Unannotated = annotation {
+ hir_id = Some(id);
+ }
+ });
+ hir_id
+}
+
+// Returns the last statement and last return if function fits format for lint
+fn last_stmt_and_ret<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+) -> Option<(&'tcx Stmt<'tcx>, &'tcx Expr<'tcx>)> {
+ // Returns last non-return statement and the last return
+ fn extract<'tcx>(block: &Block<'tcx>) -> Option<(&'tcx Stmt<'tcx>, &'tcx Expr<'tcx>)> {
+ if let [.., last_stmt] = block.stmts {
+ if let Some(ret) = block.expr {
+ return Some((last_stmt, ret));
+ }
+ if_chain! {
+ if let [.., snd_last, _] = block.stmts;
+ if let StmtKind::Semi(last_expr) = last_stmt.kind;
+ if let ExprKind::Ret(Some(ret)) = last_expr.kind;
+ then {
+ return Some((snd_last, ret));
+ }
+ }
+ }
+ None
+ }
+ let mut parent_iter = cx.tcx.hir().parent_iter(expr.hir_id);
+ if_chain! {
+ // This should be the loop
+ if let Some((node_hir, Node::Stmt(..))) = parent_iter.next();
+ // This should be the function body
+ if let Some((_, Node::Block(block))) = parent_iter.next();
+ if let Some((last_stmt, last_ret)) = extract(block);
+ if last_stmt.hir_id == node_hir;
+ if let ExprKind::Path(path) = &last_ret.kind;
+ if is_lang_ctor(cx, path, LangItem::OptionNone);
+ if let Some((_, Node::Expr(_block))) = parent_iter.next();
+ // This includes the function header
+ if let Some((_, func)) = parent_iter.next();
+ if func.fn_kind().is_some();
+ then {
+ Some((block.stmts.last().unwrap(), last_ret))
+ } else {
+ None
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs b/src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs
new file mode 100644
index 000000000..1d6ddf4b9
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs
@@ -0,0 +1,85 @@
+use super::utils::make_iterator_snippet;
+use super::MANUAL_FLATTEN;
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::higher;
+use clippy_utils::visitors::is_local_used;
+use clippy_utils::{is_lang_ctor, path_to_local_id, peel_blocks_with_stmt};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::LangItem::{OptionSome, ResultOk};
+use rustc_hir::{Expr, Pat, PatKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::source_map::Span;
+
+/// Check for unnecessary `if let` usage in a for loop where only the `Some` or `Ok` variant of the
+/// iterator element is used.
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ pat: &'tcx Pat<'_>,
+ arg: &'tcx Expr<'_>,
+ body: &'tcx Expr<'_>,
+ span: Span,
+) {
+ let inner_expr = peel_blocks_with_stmt(body);
+ if_chain! {
+ if let Some(higher::IfLet { let_pat, let_expr, if_then, if_else: None })
+ = higher::IfLet::hir(cx, inner_expr);
+ // Ensure match_expr in `if let` statement is the same as the pat from the for-loop
+ if let PatKind::Binding(_, pat_hir_id, _, _) = pat.kind;
+ if path_to_local_id(let_expr, pat_hir_id);
+ // Ensure the `if let` statement is for the `Some` variant of `Option` or the `Ok` variant of `Result`
+ if let PatKind::TupleStruct(ref qpath, _, _) = let_pat.kind;
+ let some_ctor = is_lang_ctor(cx, qpath, OptionSome);
+ let ok_ctor = is_lang_ctor(cx, qpath, ResultOk);
+ if some_ctor || ok_ctor;
+ // Ensure expr in `if let` is not used afterwards
+ if !is_local_used(cx, if_then, pat_hir_id);
+ then {
+ let if_let_type = if some_ctor { "Some" } else { "Ok" };
+ // Prepare the error message
+ let msg = format!("unnecessary `if let` since only the `{}` variant of the iterator element is used", if_let_type);
+
+ // Prepare the help message
+ let mut applicability = Applicability::MaybeIncorrect;
+ let arg_snippet = make_iterator_snippet(cx, arg, &mut applicability);
+ let copied = match cx.typeck_results().expr_ty(let_expr).kind() {
+ ty::Ref(_, inner, _) => match inner.kind() {
+ ty::Ref(..) => ".copied()",
+ _ => ""
+ }
+ _ => ""
+ };
+
+ let sugg = format!("{arg_snippet}{copied}.flatten()");
+
+ // If suggestion is not a one-liner, it won't be shown inline within the error message. In that case,
+ // it will be shown in the extra `help` message at the end, which is why the first `help_msg` needs
+ // to refer to the correct relative position of the suggestion.
+ let help_msg = if sugg.contains('\n') {
+ "remove the `if let` statement in the for loop and then..."
+ } else {
+ "...and remove the `if let` statement in the for loop"
+ };
+
+ span_lint_and_then(
+ cx,
+ MANUAL_FLATTEN,
+ span,
+ &msg,
+ |diag| {
+ diag.span_suggestion(
+ arg.span,
+ "try",
+ sugg,
+ applicability,
+ );
+ diag.span_help(
+ inner_expr.span,
+ help_msg,
+ );
+ }
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/manual_memcpy.rs b/src/tools/clippy/clippy_lints/src/loops/manual_memcpy.rs
new file mode 100644
index 000000000..b31015d19
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/manual_memcpy.rs
@@ -0,0 +1,461 @@
+use super::{IncrementVisitor, InitializeVisitor, MANUAL_MEMCPY};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet;
+use clippy_utils::sugg::Sugg;
+use clippy_utils::ty::is_copy;
+use clippy_utils::{get_enclosing_block, higher, path_to_local, sugg};
+use if_chain::if_chain;
+use rustc_ast::ast;
+use rustc_errors::Applicability;
+use rustc_hir::intravisit::walk_block;
+use rustc_hir::{BinOpKind, Block, Expr, ExprKind, HirId, Pat, PatKind, StmtKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+use rustc_span::symbol::sym;
+use std::fmt::Display;
+use std::iter::Iterator;
+
+/// Checks for for loops that sequentially copy items from one slice-like
+/// object to another.
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ pat: &'tcx Pat<'_>,
+ arg: &'tcx Expr<'_>,
+ body: &'tcx Expr<'_>,
+ expr: &'tcx Expr<'_>,
+) -> bool {
+ if let Some(higher::Range {
+ start: Some(start),
+ end: Some(end),
+ limits,
+ }) = higher::Range::hir(arg)
+ {
+ // the var must be a single name
+ if let PatKind::Binding(_, canonical_id, _, _) = pat.kind {
+ let mut starts = vec![Start {
+ id: canonical_id,
+ kind: StartKind::Range,
+ }];
+
+ // This is one of few ways to return different iterators
+ // derived from: https://stackoverflow.com/questions/29760668/conditionally-iterate-over-one-of-several-possible-iterators/52064434#52064434
+ let mut iter_a = None;
+ let mut iter_b = None;
+
+ if let ExprKind::Block(block, _) = body.kind {
+ if let Some(loop_counters) = get_loop_counters(cx, block, expr) {
+ starts.extend(loop_counters);
+ }
+ iter_a = Some(get_assignments(block, &starts));
+ } else {
+ iter_b = Some(get_assignment(body));
+ }
+
+ let assignments = iter_a.into_iter().flatten().chain(iter_b.into_iter());
+
+ let big_sugg = assignments
+ // The only statements in the for loops can be indexed assignments from
+ // indexed retrievals (except increments of loop counters).
+ .map(|o| {
+ o.and_then(|(lhs, rhs)| {
+ let rhs = fetch_cloned_expr(rhs);
+ if_chain! {
+ if let ExprKind::Index(base_left, idx_left) = lhs.kind;
+ if let ExprKind::Index(base_right, idx_right) = rhs.kind;
+ if let Some(ty) = get_slice_like_element_ty(cx, cx.typeck_results().expr_ty(base_left));
+ if get_slice_like_element_ty(cx, cx.typeck_results().expr_ty(base_right)).is_some();
+ if let Some((start_left, offset_left)) = get_details_from_idx(cx, idx_left, &starts);
+ if let Some((start_right, offset_right)) = get_details_from_idx(cx, idx_right, &starts);
+
+ // Source and destination must be different
+ if path_to_local(base_left) != path_to_local(base_right);
+ then {
+ Some((ty, IndexExpr { base: base_left, idx: start_left, idx_offset: offset_left },
+ IndexExpr { base: base_right, idx: start_right, idx_offset: offset_right }))
+ } else {
+ None
+ }
+ }
+ })
+ })
+ .map(|o| o.map(|(ty, dst, src)| build_manual_memcpy_suggestion(cx, start, end, limits, ty, &dst, &src)))
+ .collect::<Option<Vec<_>>>()
+ .filter(|v| !v.is_empty())
+ .map(|v| v.join("\n "));
+
+ if let Some(big_sugg) = big_sugg {
+ span_lint_and_sugg(
+ cx,
+ MANUAL_MEMCPY,
+ expr.span,
+ "it looks like you're manually copying between slices",
+ "try replacing the loop by",
+ big_sugg,
+ Applicability::Unspecified,
+ );
+ return true;
+ }
+ }
+ }
+ false
+}
+
+fn build_manual_memcpy_suggestion<'tcx>(
+ cx: &LateContext<'tcx>,
+ start: &Expr<'_>,
+ end: &Expr<'_>,
+ limits: ast::RangeLimits,
+ elem_ty: Ty<'tcx>,
+ dst: &IndexExpr<'_>,
+ src: &IndexExpr<'_>,
+) -> String {
+ fn print_offset(offset: MinifyingSugg<'static>) -> MinifyingSugg<'static> {
+ if offset.to_string() == "0" {
+ sugg::EMPTY.into()
+ } else {
+ offset
+ }
+ }
+
+ let print_limit = |end: &Expr<'_>, end_str: &str, base: &Expr<'_>, sugg: MinifyingSugg<'static>| {
+ if_chain! {
+ if let ExprKind::MethodCall(method, len_args, _) = end.kind;
+ if method.ident.name == sym::len;
+ if len_args.len() == 1;
+ if let Some(arg) = len_args.get(0);
+ if path_to_local(arg) == path_to_local(base);
+ then {
+ if sugg.to_string() == end_str {
+ sugg::EMPTY.into()
+ } else {
+ sugg
+ }
+ } else {
+ match limits {
+ ast::RangeLimits::Closed => {
+ sugg + &sugg::ONE.into()
+ },
+ ast::RangeLimits::HalfOpen => sugg,
+ }
+ }
+ }
+ };
+
+ let start_str = Sugg::hir(cx, start, "").into();
+ let end_str: MinifyingSugg<'_> = Sugg::hir(cx, end, "").into();
+
+ let print_offset_and_limit = |idx_expr: &IndexExpr<'_>| match idx_expr.idx {
+ StartKind::Range => (
+ print_offset(apply_offset(&start_str, &idx_expr.idx_offset)).into_sugg(),
+ print_limit(
+ end,
+ end_str.to_string().as_str(),
+ idx_expr.base,
+ apply_offset(&end_str, &idx_expr.idx_offset),
+ )
+ .into_sugg(),
+ ),
+ StartKind::Counter { initializer } => {
+ let counter_start = Sugg::hir(cx, initializer, "").into();
+ (
+ print_offset(apply_offset(&counter_start, &idx_expr.idx_offset)).into_sugg(),
+ print_limit(
+ end,
+ end_str.to_string().as_str(),
+ idx_expr.base,
+ apply_offset(&end_str, &idx_expr.idx_offset) + &counter_start - &start_str,
+ )
+ .into_sugg(),
+ )
+ },
+ };
+
+ let (dst_offset, dst_limit) = print_offset_and_limit(dst);
+ let (src_offset, src_limit) = print_offset_and_limit(src);
+
+ let dst_base_str = snippet(cx, dst.base.span, "???");
+ let src_base_str = snippet(cx, src.base.span, "???");
+
+ let dst = if dst_offset == sugg::EMPTY && dst_limit == sugg::EMPTY {
+ dst_base_str
+ } else {
+ format!(
+ "{}[{}..{}]",
+ dst_base_str,
+ dst_offset.maybe_par(),
+ dst_limit.maybe_par()
+ )
+ .into()
+ };
+
+ let method_str = if is_copy(cx, elem_ty) {
+ "copy_from_slice"
+ } else {
+ "clone_from_slice"
+ };
+
+ format!(
+ "{}.{}(&{}[{}..{}]);",
+ dst,
+ method_str,
+ src_base_str,
+ src_offset.maybe_par(),
+ src_limit.maybe_par()
+ )
+}
+
+/// a wrapper of `Sugg`. Besides what `Sugg` do, this removes unnecessary `0`;
+/// and also, it avoids subtracting a variable from the same one by replacing it with `0`.
+/// it exists for the convenience of the overloaded operators while normal functions can do the
+/// same.
+#[derive(Clone)]
+struct MinifyingSugg<'a>(Sugg<'a>);
+
+impl<'a> Display for MinifyingSugg<'a> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+impl<'a> MinifyingSugg<'a> {
+ fn into_sugg(self) -> Sugg<'a> {
+ self.0
+ }
+}
+
+impl<'a> From<Sugg<'a>> for MinifyingSugg<'a> {
+ fn from(sugg: Sugg<'a>) -> Self {
+ Self(sugg)
+ }
+}
+
+impl std::ops::Add for &MinifyingSugg<'static> {
+ type Output = MinifyingSugg<'static>;
+ fn add(self, rhs: &MinifyingSugg<'static>) -> MinifyingSugg<'static> {
+ match (self.to_string().as_str(), rhs.to_string().as_str()) {
+ ("0", _) => rhs.clone(),
+ (_, "0") => self.clone(),
+ (_, _) => (&self.0 + &rhs.0).into(),
+ }
+ }
+}
+
+impl std::ops::Sub for &MinifyingSugg<'static> {
+ type Output = MinifyingSugg<'static>;
+ fn sub(self, rhs: &MinifyingSugg<'static>) -> MinifyingSugg<'static> {
+ match (self.to_string().as_str(), rhs.to_string().as_str()) {
+ (_, "0") => self.clone(),
+ ("0", _) => (-rhs.0.clone()).into(),
+ (x, y) if x == y => sugg::ZERO.into(),
+ (_, _) => (&self.0 - &rhs.0).into(),
+ }
+ }
+}
+
+impl std::ops::Add<&MinifyingSugg<'static>> for MinifyingSugg<'static> {
+ type Output = MinifyingSugg<'static>;
+ fn add(self, rhs: &MinifyingSugg<'static>) -> MinifyingSugg<'static> {
+ match (self.to_string().as_str(), rhs.to_string().as_str()) {
+ ("0", _) => rhs.clone(),
+ (_, "0") => self,
+ (_, _) => (self.0 + &rhs.0).into(),
+ }
+ }
+}
+
+impl std::ops::Sub<&MinifyingSugg<'static>> for MinifyingSugg<'static> {
+ type Output = MinifyingSugg<'static>;
+ fn sub(self, rhs: &MinifyingSugg<'static>) -> MinifyingSugg<'static> {
+ match (self.to_string().as_str(), rhs.to_string().as_str()) {
+ (_, "0") => self,
+ ("0", _) => (-rhs.0.clone()).into(),
+ (x, y) if x == y => sugg::ZERO.into(),
+ (_, _) => (self.0 - &rhs.0).into(),
+ }
+ }
+}
+
+/// a wrapper around `MinifyingSugg`, which carries an operator like currying
+/// so that the suggested code become more efficient (e.g. `foo + -bar` `foo - bar`).
+struct Offset {
+ value: MinifyingSugg<'static>,
+ sign: OffsetSign,
+}
+
+#[derive(Clone, Copy)]
+enum OffsetSign {
+ Positive,
+ Negative,
+}
+
+impl Offset {
+ fn negative(value: Sugg<'static>) -> Self {
+ Self {
+ value: value.into(),
+ sign: OffsetSign::Negative,
+ }
+ }
+
+ fn positive(value: Sugg<'static>) -> Self {
+ Self {
+ value: value.into(),
+ sign: OffsetSign::Positive,
+ }
+ }
+
+ fn empty() -> Self {
+ Self::positive(sugg::ZERO)
+ }
+}
+
+fn apply_offset(lhs: &MinifyingSugg<'static>, rhs: &Offset) -> MinifyingSugg<'static> {
+ match rhs.sign {
+ OffsetSign::Positive => lhs + &rhs.value,
+ OffsetSign::Negative => lhs - &rhs.value,
+ }
+}
+
+#[derive(Debug, Clone, Copy)]
+enum StartKind<'hir> {
+ Range,
+ Counter { initializer: &'hir Expr<'hir> },
+}
+
+struct IndexExpr<'hir> {
+ base: &'hir Expr<'hir>,
+ idx: StartKind<'hir>,
+ idx_offset: Offset,
+}
+
+struct Start<'hir> {
+ id: HirId,
+ kind: StartKind<'hir>,
+}
+
+fn get_slice_like_element_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<Ty<'tcx>> {
+ match ty.kind() {
+ ty::Adt(adt, subs) if cx.tcx.is_diagnostic_item(sym::Vec, adt.did()) => Some(subs.type_at(0)),
+ ty::Ref(_, subty, _) => get_slice_like_element_ty(cx, *subty),
+ ty::Slice(ty) | ty::Array(ty, _) => Some(*ty),
+ _ => None,
+ }
+}
+
+fn fetch_cloned_expr<'tcx>(expr: &'tcx Expr<'tcx>) -> &'tcx Expr<'tcx> {
+ if_chain! {
+ if let ExprKind::MethodCall(method, args, _) = expr.kind;
+ if method.ident.name == sym::clone;
+ if args.len() == 1;
+ if let Some(arg) = args.get(0);
+ then { arg } else { expr }
+ }
+}
+
+fn get_details_from_idx<'tcx>(
+ cx: &LateContext<'tcx>,
+ idx: &Expr<'_>,
+ starts: &[Start<'tcx>],
+) -> Option<(StartKind<'tcx>, Offset)> {
+ fn get_start<'tcx>(e: &Expr<'_>, starts: &[Start<'tcx>]) -> Option<StartKind<'tcx>> {
+ let id = path_to_local(e)?;
+ starts.iter().find(|start| start.id == id).map(|start| start.kind)
+ }
+
+ fn get_offset<'tcx>(cx: &LateContext<'tcx>, e: &Expr<'_>, starts: &[Start<'tcx>]) -> Option<Sugg<'static>> {
+ match &e.kind {
+ ExprKind::Lit(l) => match l.node {
+ ast::LitKind::Int(x, _ty) => Some(Sugg::NonParen(x.to_string().into())),
+ _ => None,
+ },
+ ExprKind::Path(..) if get_start(e, starts).is_none() => Some(Sugg::hir(cx, e, "???")),
+ _ => None,
+ }
+ }
+
+ match idx.kind {
+ ExprKind::Binary(op, lhs, rhs) => match op.node {
+ BinOpKind::Add => {
+ let offset_opt = get_start(lhs, starts)
+ .and_then(|s| get_offset(cx, rhs, starts).map(|o| (s, o)))
+ .or_else(|| get_start(rhs, starts).and_then(|s| get_offset(cx, lhs, starts).map(|o| (s, o))));
+
+ offset_opt.map(|(s, o)| (s, Offset::positive(o)))
+ },
+ BinOpKind::Sub => {
+ get_start(lhs, starts).and_then(|s| get_offset(cx, rhs, starts).map(|o| (s, Offset::negative(o))))
+ },
+ _ => None,
+ },
+ ExprKind::Path(..) => get_start(idx, starts).map(|s| (s, Offset::empty())),
+ _ => None,
+ }
+}
+
+fn get_assignment<'tcx>(e: &'tcx Expr<'tcx>) -> Option<(&'tcx Expr<'tcx>, &'tcx Expr<'tcx>)> {
+ if let ExprKind::Assign(lhs, rhs, _) = e.kind {
+ Some((lhs, rhs))
+ } else {
+ None
+ }
+}
+
+/// Get assignments from the given block.
+/// The returned iterator yields `None` if no assignment expressions are there,
+/// filtering out the increments of the given whitelisted loop counters;
+/// because its job is to make sure there's nothing other than assignments and the increments.
+fn get_assignments<'a, 'tcx>(
+ Block { stmts, expr, .. }: &'tcx Block<'tcx>,
+ loop_counters: &'a [Start<'tcx>],
+) -> impl Iterator<Item = Option<(&'tcx Expr<'tcx>, &'tcx Expr<'tcx>)>> + 'a {
+ // As the `filter` and `map` below do different things, I think putting together
+ // just increases complexity. (cc #3188 and #4193)
+ stmts
+ .iter()
+ .filter_map(move |stmt| match stmt.kind {
+ StmtKind::Local(..) | StmtKind::Item(..) => None,
+ StmtKind::Expr(e) | StmtKind::Semi(e) => Some(e),
+ })
+ .chain((*expr).into_iter())
+ .filter(move |e| {
+ if let ExprKind::AssignOp(_, place, _) = e.kind {
+ path_to_local(place).map_or(false, |id| {
+ !loop_counters
+ .iter()
+ // skip the first item which should be `StartKind::Range`
+ // this makes it possible to use the slice with `StartKind::Range` in the same iterator loop.
+ .skip(1)
+ .any(|counter| counter.id == id)
+ })
+ } else {
+ true
+ }
+ })
+ .map(get_assignment)
+}
+
+fn get_loop_counters<'a, 'tcx>(
+ cx: &'a LateContext<'tcx>,
+ body: &'tcx Block<'tcx>,
+ expr: &'tcx Expr<'_>,
+) -> Option<impl Iterator<Item = Start<'tcx>> + 'a> {
+ // Look for variables that are incremented once per loop iteration.
+ let mut increment_visitor = IncrementVisitor::new(cx);
+ walk_block(&mut increment_visitor, body);
+
+ // For each candidate, check the parent block to see if
+ // it's initialized to zero at the start of the loop.
+ get_enclosing_block(cx, expr.hir_id).and_then(|block| {
+ increment_visitor
+ .into_results()
+ .filter_map(move |var_id| {
+ let mut initialize_visitor = InitializeVisitor::new(cx, expr, var_id);
+ walk_block(&mut initialize_visitor, block);
+
+ initialize_visitor.get_result().map(|(_, _, initializer)| Start {
+ id: var_id,
+ kind: StartKind::Counter { initializer },
+ })
+ })
+ .into()
+ })
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/missing_spin_loop.rs b/src/tools/clippy/clippy_lints/src/loops/missing_spin_loop.rs
new file mode 100644
index 000000000..0696afa39
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/missing_spin_loop.rs
@@ -0,0 +1,56 @@
+use super::MISSING_SPIN_LOOP;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::is_no_std_crate;
+use rustc_errors::Applicability;
+use rustc_hir::{Block, Expr, ExprKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::sym;
+
+fn unpack_cond<'tcx>(cond: &'tcx Expr<'tcx>) -> &'tcx Expr<'tcx> {
+ match &cond.kind {
+ ExprKind::Block(
+ Block {
+ stmts: [],
+ expr: Some(e),
+ ..
+ },
+ _,
+ )
+ | ExprKind::Unary(_, e) => unpack_cond(e),
+ ExprKind::Binary(_, l, r) => {
+ let l = unpack_cond(l);
+ if let ExprKind::MethodCall(..) = l.kind {
+ l
+ } else {
+ unpack_cond(r)
+ }
+ },
+ _ => cond,
+ }
+}
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, cond: &'tcx Expr<'_>, body: &'tcx Expr<'_>) {
+ if_chain! {
+ if let ExprKind::Block(Block { stmts: [], expr: None, ..}, _) = body.kind;
+ if let ExprKind::MethodCall(method, [callee, ..], _) = unpack_cond(cond).kind;
+ if [sym::load, sym::compare_exchange, sym::compare_exchange_weak].contains(&method.ident.name);
+ if let ty::Adt(def, _substs) = cx.typeck_results().expr_ty(callee).kind();
+ if cx.tcx.is_diagnostic_item(sym::AtomicBool, def.did());
+ then {
+ span_lint_and_sugg(
+ cx,
+ MISSING_SPIN_LOOP,
+ body.span,
+ "busy-waiting loop should at least have a spin loop hint",
+ "try this",
+ (if is_no_std_crate(cx) {
+ "{ core::hint::spin_loop() }"
+ } else {
+ "{ std::hint::spin_loop() }"
+ }).into(),
+ Applicability::MachineApplicable
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/mod.rs b/src/tools/clippy/clippy_lints/src/loops/mod.rs
new file mode 100644
index 000000000..ed270bd49
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/mod.rs
@@ -0,0 +1,768 @@
+mod empty_loop;
+mod explicit_counter_loop;
+mod explicit_into_iter_loop;
+mod explicit_iter_loop;
+mod for_kv_map;
+mod for_loops_over_fallibles;
+mod iter_next_loop;
+mod manual_find;
+mod manual_flatten;
+mod manual_memcpy;
+mod missing_spin_loop;
+mod mut_range_bound;
+mod needless_collect;
+mod needless_range_loop;
+mod never_loop;
+mod same_item_push;
+mod single_element_loop;
+mod utils;
+mod while_immutable_condition;
+mod while_let_loop;
+mod while_let_on_iterator;
+
+use clippy_utils::higher;
+use rustc_hir::{Expr, ExprKind, LoopSource, Pat};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Span;
+use utils::{make_iterator_snippet, IncrementVisitor, InitializeVisitor};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for for-loops that manually copy items between
+ /// slices that could be optimized by having a memcpy.
+ ///
+ /// ### Why is this bad?
+ /// It is not as fast as a memcpy.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let src = vec![1];
+ /// # let mut dst = vec![0; 65];
+ /// for i in 0..src.len() {
+ /// dst[i + 64] = src[i];
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let src = vec![1];
+ /// # let mut dst = vec![0; 65];
+ /// dst[64..(src.len() + 64)].clone_from_slice(&src[..]);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MANUAL_MEMCPY,
+ perf,
+ "manually copying items between slices"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for looping over the range of `0..len` of some
+ /// collection just to get the values by index.
+ ///
+ /// ### Why is this bad?
+ /// Just iterating the collection itself makes the intent
+ /// more clear and is probably faster.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let vec = vec!['a', 'b', 'c'];
+ /// for i in 0..vec.len() {
+ /// println!("{}", vec[i]);
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let vec = vec!['a', 'b', 'c'];
+ /// for i in vec {
+ /// println!("{}", i);
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub NEEDLESS_RANGE_LOOP,
+ style,
+ "for-looping over a range of indices where an iterator over items would do"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for loops on `x.iter()` where `&x` will do, and
+ /// suggests the latter.
+ ///
+ /// ### Why is this bad?
+ /// Readability.
+ ///
+ /// ### Known problems
+ /// False negatives. We currently only warn on some known
+ /// types.
+ ///
+ /// ### Example
+ /// ```rust
+ /// // with `y` a `Vec` or slice:
+ /// # let y = vec![1];
+ /// for x in y.iter() {
+ /// // ..
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let y = vec![1];
+ /// for x in &y {
+ /// // ..
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub EXPLICIT_ITER_LOOP,
+ pedantic,
+ "for-looping over `_.iter()` or `_.iter_mut()` when `&_` or `&mut _` would do"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for loops on `y.into_iter()` where `y` will do, and
+ /// suggests the latter.
+ ///
+ /// ### Why is this bad?
+ /// Readability.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let y = vec![1];
+ /// // with `y` a `Vec` or slice:
+ /// for x in y.into_iter() {
+ /// // ..
+ /// }
+ /// ```
+ /// can be rewritten to
+ /// ```rust
+ /// # let y = vec![1];
+ /// for x in y {
+ /// // ..
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub EXPLICIT_INTO_ITER_LOOP,
+ pedantic,
+ "for-looping over `_.into_iter()` when `_` would do"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for loops on `x.next()`.
+ ///
+ /// ### Why is this bad?
+ /// `next()` returns either `Some(value)` if there was a
+ /// value, or `None` otherwise. The insidious thing is that `Option<_>`
+ /// implements `IntoIterator`, so that possibly one value will be iterated,
+ /// leading to some hard to find bugs. No one will want to write such code
+ /// [except to win an Underhanded Rust
+ /// Contest](https://www.reddit.com/r/rust/comments/3hb0wm/underhanded_rust_contest/cu5yuhr).
+ ///
+ /// ### Example
+ /// ```ignore
+ /// for x in y.next() {
+ /// ..
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub ITER_NEXT_LOOP,
+ correctness,
+ "for-looping over `_.next()` which is probably not intended"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `for` loops over `Option` or `Result` values.
+ ///
+ /// ### Why is this bad?
+ /// Readability. This is more clearly expressed as an `if
+ /// let`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let opt = Some(1);
+ /// # let res: Result<i32, std::io::Error> = Ok(1);
+ /// for x in opt {
+ /// // ..
+ /// }
+ ///
+ /// for x in &res {
+ /// // ..
+ /// }
+ ///
+ /// for x in res.iter() {
+ /// // ..
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let opt = Some(1);
+ /// # let res: Result<i32, std::io::Error> = Ok(1);
+ /// if let Some(x) = opt {
+ /// // ..
+ /// }
+ ///
+ /// if let Ok(x) = res {
+ /// // ..
+ /// }
+ /// ```
+ #[clippy::version = "1.45.0"]
+ pub FOR_LOOPS_OVER_FALLIBLES,
+ suspicious,
+ "for-looping over an `Option` or a `Result`, which is more clearly expressed as an `if let`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Detects `loop + match` combinations that are easier
+ /// written as a `while let` loop.
+ ///
+ /// ### Why is this bad?
+ /// The `while let` loop is usually shorter and more
+ /// readable.
+ ///
+ /// ### Known problems
+ /// Sometimes the wrong binding is displayed ([#383](https://github.com/rust-lang/rust-clippy/issues/383)).
+ ///
+ /// ### Example
+ /// ```rust,no_run
+ /// # let y = Some(1);
+ /// loop {
+ /// let x = match y {
+ /// Some(x) => x,
+ /// None => break,
+ /// };
+ /// // .. do something with x
+ /// }
+ /// // is easier written as
+ /// while let Some(x) = y {
+ /// // .. do something with x
+ /// };
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub WHILE_LET_LOOP,
+ complexity,
+ "`loop { if let { ... } else break }`, which can be written as a `while let` loop"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for functions collecting an iterator when collect
+ /// is not needed.
+ ///
+ /// ### Why is this bad?
+ /// `collect` causes the allocation of a new data structure,
+ /// when this allocation may not be needed.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let iterator = vec![1].into_iter();
+ /// let len = iterator.clone().collect::<Vec<_>>().len();
+ /// // should be
+ /// let len = iterator.count();
+ /// ```
+ #[clippy::version = "1.30.0"]
+ pub NEEDLESS_COLLECT,
+ perf,
+ "collecting an iterator when collect is not needed"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks `for` loops over slices with an explicit counter
+ /// and suggests the use of `.enumerate()`.
+ ///
+ /// ### Why is this bad?
+ /// Using `.enumerate()` makes the intent more clear,
+ /// declutters the code and may be faster in some instances.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let v = vec![1];
+ /// # fn bar(bar: usize, baz: usize) {}
+ /// let mut i = 0;
+ /// for item in &v {
+ /// bar(i, *item);
+ /// i += 1;
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let v = vec![1];
+ /// # fn bar(bar: usize, baz: usize) {}
+ /// for (i, item) in v.iter().enumerate() { bar(i, *item); }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub EXPLICIT_COUNTER_LOOP,
+ complexity,
+ "for-looping with an explicit counter when `_.enumerate()` would do"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for empty `loop` expressions.
+ ///
+ /// ### Why is this bad?
+ /// These busy loops burn CPU cycles without doing
+ /// anything. It is _almost always_ a better idea to `panic!` than to have
+ /// a busy loop.
+ ///
+ /// If panicking isn't possible, think of the environment and either:
+ /// - block on something
+ /// - sleep the thread for some microseconds
+ /// - yield or pause the thread
+ ///
+ /// For `std` targets, this can be done with
+ /// [`std::thread::sleep`](https://doc.rust-lang.org/std/thread/fn.sleep.html)
+ /// or [`std::thread::yield_now`](https://doc.rust-lang.org/std/thread/fn.yield_now.html).
+ ///
+ /// For `no_std` targets, doing this is more complicated, especially because
+ /// `#[panic_handler]`s can't panic. To stop/pause the thread, you will
+ /// probably need to invoke some target-specific intrinsic. Examples include:
+ /// - [`x86_64::instructions::hlt`](https://docs.rs/x86_64/0.12.2/x86_64/instructions/fn.hlt.html)
+ /// - [`cortex_m::asm::wfi`](https://docs.rs/cortex-m/0.6.3/cortex_m/asm/fn.wfi.html)
+ ///
+ /// ### Example
+ /// ```no_run
+ /// loop {}
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub EMPTY_LOOP,
+ suspicious,
+ "empty `loop {}`, which should block or sleep"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `while let` expressions on iterators.
+ ///
+ /// ### Why is this bad?
+ /// Readability. A simple `for` loop is shorter and conveys
+ /// the intent better.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// while let Some(val) = iter.next() {
+ /// ..
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```ignore
+ /// for val in &mut iter {
+ /// ..
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub WHILE_LET_ON_ITERATOR,
+ style,
+ "using a `while let` loop instead of a for loop on an iterator"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for iterating a map (`HashMap` or `BTreeMap`) and
+ /// ignoring either the keys or values.
+ ///
+ /// ### Why is this bad?
+ /// Readability. There are `keys` and `values` methods that
+ /// can be used to express that don't need the values or keys.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// for (k, _) in &map {
+ /// ..
+ /// }
+ /// ```
+ ///
+ /// could be replaced by
+ ///
+ /// ```ignore
+ /// for k in map.keys() {
+ /// ..
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub FOR_KV_MAP,
+ style,
+ "looping on a map using `iter` when `keys` or `values` would do"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for loops that will always `break`, `return` or
+ /// `continue` an outer loop.
+ ///
+ /// ### Why is this bad?
+ /// This loop never loops, all it does is obfuscating the
+ /// code.
+ ///
+ /// ### Example
+ /// ```rust
+ /// loop {
+ /// ..;
+ /// break;
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub NEVER_LOOP,
+ correctness,
+ "any loop that will always `break` or `return`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for loops which have a range bound that is a mutable variable
+ ///
+ /// ### Why is this bad?
+ /// One might think that modifying the mutable variable changes the loop bounds
+ ///
+ /// ### Known problems
+ /// False positive when mutation is followed by a `break`, but the `break` is not immediately
+ /// after the mutation:
+ ///
+ /// ```rust
+ /// let mut x = 5;
+ /// for _ in 0..x {
+ /// x += 1; // x is a range bound that is mutated
+ /// ..; // some other expression
+ /// break; // leaves the loop, so mutation is not an issue
+ /// }
+ /// ```
+ ///
+ /// False positive on nested loops ([#6072](https://github.com/rust-lang/rust-clippy/issues/6072))
+ ///
+ /// ### Example
+ /// ```rust
+ /// let mut foo = 42;
+ /// for i in 0..foo {
+ /// foo -= 1;
+ /// println!("{}", i); // prints numbers from 0 to 42, not 0 to 21
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MUT_RANGE_BOUND,
+ suspicious,
+ "for loop over a range where one of the bounds is a mutable variable"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks whether variables used within while loop condition
+ /// can be (and are) mutated in the body.
+ ///
+ /// ### Why is this bad?
+ /// If the condition is unchanged, entering the body of the loop
+ /// will lead to an infinite loop.
+ ///
+ /// ### Known problems
+ /// If the `while`-loop is in a closure, the check for mutation of the
+ /// condition variables in the body can cause false negatives. For example when only `Upvar` `a` is
+ /// in the condition and only `Upvar` `b` gets mutated in the body, the lint will not trigger.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let i = 0;
+ /// while i > 10 {
+ /// println!("let me loop forever!");
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub WHILE_IMMUTABLE_CONDITION,
+ correctness,
+ "variables used within while expression are not mutated in the body"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks whether a for loop is being used to push a constant
+ /// value into a Vec.
+ ///
+ /// ### Why is this bad?
+ /// This kind of operation can be expressed more succinctly with
+ /// `vec![item; SIZE]` or `vec.resize(NEW_SIZE, item)` and using these alternatives may also
+ /// have better performance.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let item1 = 2;
+ /// let item2 = 3;
+ /// let mut vec: Vec<u8> = Vec::new();
+ /// for _ in 0..20 {
+ /// vec.push(item1);
+ /// }
+ /// for _ in 0..30 {
+ /// vec.push(item2);
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let item1 = 2;
+ /// let item2 = 3;
+ /// let mut vec: Vec<u8> = vec![item1; 20];
+ /// vec.resize(20 + 30, item2);
+ /// ```
+ #[clippy::version = "1.47.0"]
+ pub SAME_ITEM_PUSH,
+ style,
+ "the same item is pushed inside of a for loop"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks whether a for loop has a single element.
+ ///
+ /// ### Why is this bad?
+ /// There is no reason to have a loop of a
+ /// single element.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let item1 = 2;
+ /// for item in &[item1] {
+ /// println!("{}", item);
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let item1 = 2;
+ /// let item = &item1;
+ /// println!("{}", item);
+ /// ```
+ #[clippy::version = "1.49.0"]
+ pub SINGLE_ELEMENT_LOOP,
+ complexity,
+ "there is no reason to have a single element loop"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Check for unnecessary `if let` usage in a for loop
+ /// where only the `Some` or `Ok` variant of the iterator element is used.
+ ///
+ /// ### Why is this bad?
+ /// It is verbose and can be simplified
+ /// by first calling the `flatten` method on the `Iterator`.
+ ///
+ /// ### Example
+ ///
+ /// ```rust
+ /// let x = vec![Some(1), Some(2), Some(3)];
+ /// for n in x {
+ /// if let Some(n) = n {
+ /// println!("{}", n);
+ /// }
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let x = vec![Some(1), Some(2), Some(3)];
+ /// for n in x.into_iter().flatten() {
+ /// println!("{}", n);
+ /// }
+ /// ```
+ #[clippy::version = "1.52.0"]
+ pub MANUAL_FLATTEN,
+ complexity,
+ "for loops over `Option`s or `Result`s with a single expression can be simplified"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Check for empty spin loops
+ ///
+ /// ### Why is this bad?
+ /// The loop body should have something like `thread::park()` or at least
+ /// `std::hint::spin_loop()` to avoid needlessly burning cycles and conserve
+ /// energy. Perhaps even better use an actual lock, if possible.
+ ///
+ /// ### Known problems
+ /// This lint doesn't currently trigger on `while let` or
+ /// `loop { match .. { .. } }` loops, which would be considered idiomatic in
+ /// combination with e.g. `AtomicBool::compare_exchange_weak`.
+ ///
+ /// ### Example
+ ///
+ /// ```ignore
+ /// use core::sync::atomic::{AtomicBool, Ordering};
+ /// let b = AtomicBool::new(true);
+ /// // give a ref to `b` to another thread,wait for it to become false
+ /// while b.load(Ordering::Acquire) {};
+ /// ```
+ /// Use instead:
+ /// ```rust,no_run
+ ///# use core::sync::atomic::{AtomicBool, Ordering};
+ ///# let b = AtomicBool::new(true);
+ /// while b.load(Ordering::Acquire) {
+ /// std::hint::spin_loop()
+ /// }
+ /// ```
+ #[clippy::version = "1.61.0"]
+ pub MISSING_SPIN_LOOP,
+ perf,
+ "An empty busy waiting loop"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Check for manual implementations of Iterator::find
+ ///
+ /// ### Why is this bad?
+ /// It doesn't affect performance, but using `find` is shorter and easier to read.
+ ///
+ /// ### Example
+ ///
+ /// ```rust
+ /// fn example(arr: Vec<i32>) -> Option<i32> {
+ /// for el in arr {
+ /// if el == 1 {
+ /// return Some(el);
+ /// }
+ /// }
+ /// None
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// fn example(arr: Vec<i32>) -> Option<i32> {
+ /// arr.into_iter().find(|&el| el == 1)
+ /// }
+ /// ```
+ #[clippy::version = "1.61.0"]
+ pub MANUAL_FIND,
+ complexity,
+ "manual implementation of `Iterator::find`"
+}
+
+declare_lint_pass!(Loops => [
+ MANUAL_MEMCPY,
+ MANUAL_FLATTEN,
+ NEEDLESS_RANGE_LOOP,
+ EXPLICIT_ITER_LOOP,
+ EXPLICIT_INTO_ITER_LOOP,
+ ITER_NEXT_LOOP,
+ FOR_LOOPS_OVER_FALLIBLES,
+ WHILE_LET_LOOP,
+ NEEDLESS_COLLECT,
+ EXPLICIT_COUNTER_LOOP,
+ EMPTY_LOOP,
+ WHILE_LET_ON_ITERATOR,
+ FOR_KV_MAP,
+ NEVER_LOOP,
+ MUT_RANGE_BOUND,
+ WHILE_IMMUTABLE_CONDITION,
+ SAME_ITEM_PUSH,
+ SINGLE_ELEMENT_LOOP,
+ MISSING_SPIN_LOOP,
+ MANUAL_FIND,
+]);
+
+impl<'tcx> LateLintPass<'tcx> for Loops {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ let for_loop = higher::ForLoop::hir(expr);
+ if let Some(higher::ForLoop {
+ pat,
+ arg,
+ body,
+ loop_id,
+ span,
+ }) = for_loop
+ {
+ // we don't want to check expanded macros
+ // this check is not at the top of the function
+ // since higher::for_loop expressions are marked as expansions
+ if body.span.from_expansion() {
+ return;
+ }
+ check_for_loop(cx, pat, arg, body, expr, span);
+ if let ExprKind::Block(block, _) = body.kind {
+ never_loop::check(cx, block, loop_id, span, for_loop.as_ref());
+ }
+ }
+
+ // we don't want to check expanded macros
+ if expr.span.from_expansion() {
+ return;
+ }
+
+ // check for never_loop
+ if let ExprKind::Loop(block, ..) = expr.kind {
+ never_loop::check(cx, block, expr.hir_id, expr.span, None);
+ }
+
+ // check for `loop { if let {} else break }` that could be `while let`
+ // (also matches an explicit "match" instead of "if let")
+ // (even if the "match" or "if let" is used for declaration)
+ if let ExprKind::Loop(block, _, LoopSource::Loop, _) = expr.kind {
+ // also check for empty `loop {}` statements, skipping those in #[panic_handler]
+ empty_loop::check(cx, expr, block);
+ while_let_loop::check(cx, expr, block);
+ }
+
+ while_let_on_iterator::check(cx, expr);
+
+ if let Some(higher::While { condition, body }) = higher::While::hir(expr) {
+ while_immutable_condition::check(cx, condition, body);
+ missing_spin_loop::check(cx, condition, body);
+ }
+
+ needless_collect::check(expr, cx);
+ }
+}
+
+fn check_for_loop<'tcx>(
+ cx: &LateContext<'tcx>,
+ pat: &'tcx Pat<'_>,
+ arg: &'tcx Expr<'_>,
+ body: &'tcx Expr<'_>,
+ expr: &'tcx Expr<'_>,
+ span: Span,
+) {
+ let is_manual_memcpy_triggered = manual_memcpy::check(cx, pat, arg, body, expr);
+ if !is_manual_memcpy_triggered {
+ needless_range_loop::check(cx, pat, arg, body, expr);
+ explicit_counter_loop::check(cx, pat, arg, body, expr);
+ }
+ check_for_loop_arg(cx, pat, arg);
+ for_kv_map::check(cx, pat, arg, body);
+ mut_range_bound::check(cx, arg, body);
+ single_element_loop::check(cx, pat, arg, body, expr);
+ same_item_push::check(cx, pat, arg, body, expr);
+ manual_flatten::check(cx, pat, arg, body, span);
+ manual_find::check(cx, pat, arg, body, span, expr);
+}
+
+fn check_for_loop_arg(cx: &LateContext<'_>, pat: &Pat<'_>, arg: &Expr<'_>) {
+ let mut next_loop_linted = false; // whether or not ITER_NEXT_LOOP lint was used
+
+ if let ExprKind::MethodCall(method, [self_arg], _) = arg.kind {
+ let method_name = method.ident.as_str();
+ // check for looping over x.iter() or x.iter_mut(), could use &x or &mut x
+ match method_name {
+ "iter" | "iter_mut" => {
+ explicit_iter_loop::check(cx, self_arg, arg, method_name);
+ for_loops_over_fallibles::check(cx, pat, self_arg, Some(method_name));
+ },
+ "into_iter" => {
+ explicit_iter_loop::check(cx, self_arg, arg, method_name);
+ explicit_into_iter_loop::check(cx, self_arg, arg);
+ for_loops_over_fallibles::check(cx, pat, self_arg, Some(method_name));
+ },
+ "next" => {
+ next_loop_linted = iter_next_loop::check(cx, arg);
+ },
+ _ => {},
+ }
+ }
+
+ if !next_loop_linted {
+ for_loops_over_fallibles::check(cx, pat, arg, None);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs b/src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs
new file mode 100644
index 000000000..aedf3810b
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs
@@ -0,0 +1,167 @@
+use super::MUT_RANGE_BOUND;
+use clippy_utils::diagnostics::span_lint_and_note;
+use clippy_utils::{get_enclosing_block, higher, path_to_local};
+use if_chain::if_chain;
+use rustc_hir::intravisit::{self, Visitor};
+use rustc_hir::{BindingAnnotation, Expr, ExprKind, HirId, Node, PatKind};
+use rustc_infer::infer::TyCtxtInferExt;
+use rustc_lint::LateContext;
+use rustc_middle::{mir::FakeReadCause, ty};
+use rustc_span::source_map::Span;
+use rustc_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId};
+
+pub(super) fn check(cx: &LateContext<'_>, arg: &Expr<'_>, body: &Expr<'_>) {
+ if_chain! {
+ if let Some(higher::Range {
+ start: Some(start),
+ end: Some(end),
+ ..
+ }) = higher::Range::hir(arg);
+ let (mut_id_start, mut_id_end) = (check_for_mutability(cx, start), check_for_mutability(cx, end));
+ if mut_id_start.is_some() || mut_id_end.is_some();
+ then {
+ let (span_low, span_high) = check_for_mutation(cx, body, mut_id_start, mut_id_end);
+ mut_warn_with_span(cx, span_low);
+ mut_warn_with_span(cx, span_high);
+ }
+ }
+}
+
+fn mut_warn_with_span(cx: &LateContext<'_>, span: Option<Span>) {
+ if let Some(sp) = span {
+ span_lint_and_note(
+ cx,
+ MUT_RANGE_BOUND,
+ sp,
+ "attempt to mutate range bound within loop",
+ None,
+ "the range of the loop is unchanged",
+ );
+ }
+}
+
+fn check_for_mutability(cx: &LateContext<'_>, bound: &Expr<'_>) -> Option<HirId> {
+ if_chain! {
+ if let Some(hir_id) = path_to_local(bound);
+ if let Node::Pat(pat) = cx.tcx.hir().get(hir_id);
+ if let PatKind::Binding(BindingAnnotation::Mutable, ..) = pat.kind;
+ then {
+ return Some(hir_id);
+ }
+ }
+ None
+}
+
+fn check_for_mutation<'tcx>(
+ cx: &LateContext<'tcx>,
+ body: &Expr<'_>,
+ bound_id_start: Option<HirId>,
+ bound_id_end: Option<HirId>,
+) -> (Option<Span>, Option<Span>) {
+ let mut delegate = MutatePairDelegate {
+ cx,
+ hir_id_low: bound_id_start,
+ hir_id_high: bound_id_end,
+ span_low: None,
+ span_high: None,
+ };
+ cx.tcx.infer_ctxt().enter(|infcx| {
+ ExprUseVisitor::new(
+ &mut delegate,
+ &infcx,
+ body.hir_id.owner,
+ cx.param_env,
+ cx.typeck_results(),
+ )
+ .walk_expr(body);
+ });
+
+ delegate.mutation_span()
+}
+
+struct MutatePairDelegate<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ hir_id_low: Option<HirId>,
+ hir_id_high: Option<HirId>,
+ span_low: Option<Span>,
+ span_high: Option<Span>,
+}
+
+impl<'tcx> Delegate<'tcx> for MutatePairDelegate<'_, 'tcx> {
+ fn consume(&mut self, _: &PlaceWithHirId<'tcx>, _: HirId) {}
+
+ fn borrow(&mut self, cmt: &PlaceWithHirId<'tcx>, diag_expr_id: HirId, bk: ty::BorrowKind) {
+ if bk == ty::BorrowKind::MutBorrow {
+ if let PlaceBase::Local(id) = cmt.place.base {
+ if Some(id) == self.hir_id_low && !BreakAfterExprVisitor::is_found(self.cx, diag_expr_id) {
+ self.span_low = Some(self.cx.tcx.hir().span(diag_expr_id));
+ }
+ if Some(id) == self.hir_id_high && !BreakAfterExprVisitor::is_found(self.cx, diag_expr_id) {
+ self.span_high = Some(self.cx.tcx.hir().span(diag_expr_id));
+ }
+ }
+ }
+ }
+
+ fn mutate(&mut self, cmt: &PlaceWithHirId<'tcx>, diag_expr_id: HirId) {
+ if let PlaceBase::Local(id) = cmt.place.base {
+ if Some(id) == self.hir_id_low && !BreakAfterExprVisitor::is_found(self.cx, diag_expr_id) {
+ self.span_low = Some(self.cx.tcx.hir().span(diag_expr_id));
+ }
+ if Some(id) == self.hir_id_high && !BreakAfterExprVisitor::is_found(self.cx, diag_expr_id) {
+ self.span_high = Some(self.cx.tcx.hir().span(diag_expr_id));
+ }
+ }
+ }
+
+ fn fake_read(&mut self, _: &rustc_typeck::expr_use_visitor::PlaceWithHirId<'tcx>, _: FakeReadCause, _: HirId) {}
+}
+
+impl MutatePairDelegate<'_, '_> {
+ fn mutation_span(&self) -> (Option<Span>, Option<Span>) {
+ (self.span_low, self.span_high)
+ }
+}
+
+struct BreakAfterExprVisitor {
+ hir_id: HirId,
+ past_expr: bool,
+ past_candidate: bool,
+ break_after_expr: bool,
+}
+
+impl BreakAfterExprVisitor {
+ pub fn is_found(cx: &LateContext<'_>, hir_id: HirId) -> bool {
+ let mut visitor = BreakAfterExprVisitor {
+ hir_id,
+ past_expr: false,
+ past_candidate: false,
+ break_after_expr: false,
+ };
+
+ get_enclosing_block(cx, hir_id).map_or(false, |block| {
+ visitor.visit_block(block);
+ visitor.break_after_expr
+ })
+ }
+}
+
+impl<'tcx> intravisit::Visitor<'tcx> for BreakAfterExprVisitor {
+ fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
+ if self.past_candidate {
+ return;
+ }
+
+ if expr.hir_id == self.hir_id {
+ self.past_expr = true;
+ } else if self.past_expr {
+ if matches!(&expr.kind, ExprKind::Break(..)) {
+ self.break_after_expr = true;
+ }
+
+ self.past_candidate = true;
+ } else {
+ intravisit::walk_expr(self, expr);
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/needless_collect.rs b/src/tools/clippy/clippy_lints/src/loops/needless_collect.rs
new file mode 100644
index 000000000..ddaffc751
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/needless_collect.rs
@@ -0,0 +1,369 @@
+use super::NEEDLESS_COLLECT;
+use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_hir_and_then};
+use clippy_utils::source::{snippet, snippet_with_applicability};
+use clippy_utils::sugg::Sugg;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{can_move_expr_to_closure, is_trait_method, path_to_local, path_to_local_id, CaptureKind};
+use if_chain::if_chain;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_errors::{Applicability, MultiSpan};
+use rustc_hir::intravisit::{walk_block, walk_expr, Visitor};
+use rustc_hir::{Block, Expr, ExprKind, HirId, HirIdSet, Local, Mutability, Node, PatKind, Stmt, StmtKind};
+use rustc_lint::LateContext;
+use rustc_middle::hir::nested_filter;
+use rustc_middle::ty::subst::GenericArgKind;
+use rustc_middle::ty::{self, Ty};
+use rustc_span::sym;
+use rustc_span::Span;
+
+const NEEDLESS_COLLECT_MSG: &str = "avoid using `collect()` when not needed";
+
+pub(super) fn check<'tcx>(expr: &'tcx Expr<'_>, cx: &LateContext<'tcx>) {
+ check_needless_collect_direct_usage(expr, cx);
+ check_needless_collect_indirect_usage(expr, cx);
+}
+fn check_needless_collect_direct_usage<'tcx>(expr: &'tcx Expr<'_>, cx: &LateContext<'tcx>) {
+ if_chain! {
+ if let ExprKind::MethodCall(method, args, _) = expr.kind;
+ if let ExprKind::MethodCall(chain_method, _, _) = args[0].kind;
+ if chain_method.ident.name == sym!(collect) && is_trait_method(cx, &args[0], sym::Iterator);
+ then {
+ let ty = cx.typeck_results().expr_ty(&args[0]);
+ let mut applicability = Applicability::MaybeIncorrect;
+ let is_empty_sugg = "next().is_none()".to_string();
+ let method_name = method.ident.name.as_str();
+ let sugg = if is_type_diagnostic_item(cx, ty, sym::Vec) ||
+ is_type_diagnostic_item(cx, ty, sym::VecDeque) ||
+ is_type_diagnostic_item(cx, ty, sym::LinkedList) ||
+ is_type_diagnostic_item(cx, ty, sym::BinaryHeap) {
+ match method_name {
+ "len" => "count()".to_string(),
+ "is_empty" => is_empty_sugg,
+ "contains" => {
+ let contains_arg = snippet_with_applicability(cx, args[1].span, "??", &mut applicability);
+ let (arg, pred) = contains_arg
+ .strip_prefix('&')
+ .map_or(("&x", &*contains_arg), |s| ("x", s));
+ format!("any(|{}| x == {})", arg, pred)
+ }
+ _ => return,
+ }
+ }
+ else if is_type_diagnostic_item(cx, ty, sym::BTreeMap) ||
+ is_type_diagnostic_item(cx, ty, sym::HashMap) {
+ match method_name {
+ "is_empty" => is_empty_sugg,
+ _ => return,
+ }
+ }
+ else {
+ return;
+ };
+ span_lint_and_sugg(
+ cx,
+ NEEDLESS_COLLECT,
+ chain_method.ident.span.with_hi(expr.span.hi()),
+ NEEDLESS_COLLECT_MSG,
+ "replace with",
+ sugg,
+ applicability,
+ );
+ }
+ }
+}
+
+fn check_needless_collect_indirect_usage<'tcx>(expr: &'tcx Expr<'_>, cx: &LateContext<'tcx>) {
+ if let ExprKind::Block(block, _) = expr.kind {
+ for stmt in block.stmts {
+ if_chain! {
+ if let StmtKind::Local(local) = stmt.kind;
+ if let PatKind::Binding(_, id, ..) = local.pat.kind;
+ if let Some(init_expr) = local.init;
+ if let ExprKind::MethodCall(method_name, &[ref iter_source], ..) = init_expr.kind;
+ if method_name.ident.name == sym!(collect) && is_trait_method(cx, init_expr, sym::Iterator);
+ let ty = cx.typeck_results().expr_ty(init_expr);
+ if is_type_diagnostic_item(cx, ty, sym::Vec) ||
+ is_type_diagnostic_item(cx, ty, sym::VecDeque) ||
+ is_type_diagnostic_item(cx, ty, sym::BinaryHeap) ||
+ is_type_diagnostic_item(cx, ty, sym::LinkedList);
+ let iter_ty = cx.typeck_results().expr_ty(iter_source);
+ if let Some(iter_calls) = detect_iter_and_into_iters(block, id, cx, get_captured_ids(cx, iter_ty));
+ if let [iter_call] = &*iter_calls;
+ then {
+ let mut used_count_visitor = UsedCountVisitor {
+ cx,
+ id,
+ count: 0,
+ };
+ walk_block(&mut used_count_visitor, block);
+ if used_count_visitor.count > 1 {
+ return;
+ }
+
+ // Suggest replacing iter_call with iter_replacement, and removing stmt
+ let mut span = MultiSpan::from_span(method_name.ident.span);
+ span.push_span_label(iter_call.span, "the iterator could be used here instead");
+ span_lint_hir_and_then(
+ cx,
+ super::NEEDLESS_COLLECT,
+ init_expr.hir_id,
+ span,
+ NEEDLESS_COLLECT_MSG,
+ |diag| {
+ let iter_replacement = format!("{}{}", Sugg::hir(cx, iter_source, ".."), iter_call.get_iter_method(cx));
+ diag.multipart_suggestion(
+ iter_call.get_suggestion_text(),
+ vec![
+ (stmt.span, String::new()),
+ (iter_call.span, iter_replacement)
+ ],
+ Applicability::MaybeIncorrect,
+ );
+ },
+ );
+ }
+ }
+ }
+ }
+}
+
+struct IterFunction {
+ func: IterFunctionKind,
+ span: Span,
+}
+impl IterFunction {
+ fn get_iter_method(&self, cx: &LateContext<'_>) -> String {
+ match &self.func {
+ IterFunctionKind::IntoIter => String::new(),
+ IterFunctionKind::Len => String::from(".count()"),
+ IterFunctionKind::IsEmpty => String::from(".next().is_none()"),
+ IterFunctionKind::Contains(span) => {
+ let s = snippet(cx, *span, "..");
+ if let Some(stripped) = s.strip_prefix('&') {
+ format!(".any(|x| x == {})", stripped)
+ } else {
+ format!(".any(|x| x == *{})", s)
+ }
+ },
+ }
+ }
+ fn get_suggestion_text(&self) -> &'static str {
+ match &self.func {
+ IterFunctionKind::IntoIter => {
+ "use the original Iterator instead of collecting it and then producing a new one"
+ },
+ IterFunctionKind::Len => {
+ "take the original Iterator's count instead of collecting it and finding the length"
+ },
+ IterFunctionKind::IsEmpty => {
+ "check if the original Iterator has anything instead of collecting it and seeing if it's empty"
+ },
+ IterFunctionKind::Contains(_) => {
+ "check if the original Iterator contains an element instead of collecting then checking"
+ },
+ }
+ }
+}
+enum IterFunctionKind {
+ IntoIter,
+ Len,
+ IsEmpty,
+ Contains(Span),
+}
+
+struct IterFunctionVisitor<'a, 'tcx> {
+ illegal_mutable_capture_ids: HirIdSet,
+ current_mutably_captured_ids: HirIdSet,
+ cx: &'a LateContext<'tcx>,
+ uses: Vec<Option<IterFunction>>,
+ hir_id_uses_map: FxHashMap<HirId, usize>,
+ current_statement_hir_id: Option<HirId>,
+ seen_other: bool,
+ target: HirId,
+}
+impl<'tcx> Visitor<'tcx> for IterFunctionVisitor<'_, 'tcx> {
+ fn visit_block(&mut self, block: &'tcx Block<'tcx>) {
+ for (expr, hir_id) in block.stmts.iter().filter_map(get_expr_and_hir_id_from_stmt) {
+ self.visit_block_expr(expr, hir_id);
+ }
+ if let Some(expr) = block.expr {
+ self.visit_block_expr(expr, None);
+ }
+ }
+
+ fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
+ // Check function calls on our collection
+ if let ExprKind::MethodCall(method_name, [recv, args @ ..], _) = &expr.kind {
+ if method_name.ident.name == sym!(collect) && is_trait_method(self.cx, expr, sym::Iterator) {
+ self.current_mutably_captured_ids = get_captured_ids(self.cx, self.cx.typeck_results().expr_ty(recv));
+ self.visit_expr(recv);
+ return;
+ }
+
+ if path_to_local_id(recv, self.target) {
+ if self
+ .illegal_mutable_capture_ids
+ .intersection(&self.current_mutably_captured_ids)
+ .next()
+ .is_none()
+ {
+ if let Some(hir_id) = self.current_statement_hir_id {
+ self.hir_id_uses_map.insert(hir_id, self.uses.len());
+ }
+ match method_name.ident.name.as_str() {
+ "into_iter" => self.uses.push(Some(IterFunction {
+ func: IterFunctionKind::IntoIter,
+ span: expr.span,
+ })),
+ "len" => self.uses.push(Some(IterFunction {
+ func: IterFunctionKind::Len,
+ span: expr.span,
+ })),
+ "is_empty" => self.uses.push(Some(IterFunction {
+ func: IterFunctionKind::IsEmpty,
+ span: expr.span,
+ })),
+ "contains" => self.uses.push(Some(IterFunction {
+ func: IterFunctionKind::Contains(args[0].span),
+ span: expr.span,
+ })),
+ _ => {
+ self.seen_other = true;
+ if let Some(hir_id) = self.current_statement_hir_id {
+ self.hir_id_uses_map.remove(&hir_id);
+ }
+ },
+ }
+ }
+ return;
+ }
+
+ if let Some(hir_id) = path_to_local(recv) {
+ if let Some(index) = self.hir_id_uses_map.remove(&hir_id) {
+ if self
+ .illegal_mutable_capture_ids
+ .intersection(&self.current_mutably_captured_ids)
+ .next()
+ .is_none()
+ {
+ if let Some(hir_id) = self.current_statement_hir_id {
+ self.hir_id_uses_map.insert(hir_id, index);
+ }
+ } else {
+ self.uses[index] = None;
+ }
+ }
+ }
+ }
+ // Check if the collection is used for anything else
+ if path_to_local_id(expr, self.target) {
+ self.seen_other = true;
+ } else {
+ walk_expr(self, expr);
+ }
+ }
+}
+
+impl<'tcx> IterFunctionVisitor<'_, 'tcx> {
+ fn visit_block_expr(&mut self, expr: &'tcx Expr<'tcx>, hir_id: Option<HirId>) {
+ self.current_statement_hir_id = hir_id;
+ self.current_mutably_captured_ids = get_captured_ids(self.cx, self.cx.typeck_results().expr_ty(expr));
+ self.visit_expr(expr);
+ }
+}
+
+fn get_expr_and_hir_id_from_stmt<'v>(stmt: &'v Stmt<'v>) -> Option<(&'v Expr<'v>, Option<HirId>)> {
+ match stmt.kind {
+ StmtKind::Expr(expr) | StmtKind::Semi(expr) => Some((expr, None)),
+ StmtKind::Item(..) => None,
+ StmtKind::Local(Local { init, pat, .. }) => {
+ if let PatKind::Binding(_, hir_id, ..) = pat.kind {
+ init.map(|init_expr| (init_expr, Some(hir_id)))
+ } else {
+ init.map(|init_expr| (init_expr, None))
+ }
+ },
+ }
+}
+
+struct UsedCountVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ id: HirId,
+ count: usize,
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for UsedCountVisitor<'a, 'tcx> {
+ type NestedFilter = nested_filter::OnlyBodies;
+
+ fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
+ if path_to_local_id(expr, self.id) {
+ self.count += 1;
+ } else {
+ walk_expr(self, expr);
+ }
+ }
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+}
+
+/// Detect the occurrences of calls to `iter` or `into_iter` for the
+/// given identifier
+fn detect_iter_and_into_iters<'tcx: 'a, 'a>(
+ block: &'tcx Block<'tcx>,
+ id: HirId,
+ cx: &'a LateContext<'tcx>,
+ captured_ids: HirIdSet,
+) -> Option<Vec<IterFunction>> {
+ let mut visitor = IterFunctionVisitor {
+ uses: Vec::new(),
+ target: id,
+ seen_other: false,
+ cx,
+ current_mutably_captured_ids: HirIdSet::default(),
+ illegal_mutable_capture_ids: captured_ids,
+ hir_id_uses_map: FxHashMap::default(),
+ current_statement_hir_id: None,
+ };
+ visitor.visit_block(block);
+ if visitor.seen_other {
+ None
+ } else {
+ Some(visitor.uses.into_iter().flatten().collect())
+ }
+}
+
+fn get_captured_ids(cx: &LateContext<'_>, ty: Ty<'_>) -> HirIdSet {
+ fn get_captured_ids_recursive(cx: &LateContext<'_>, ty: Ty<'_>, set: &mut HirIdSet) {
+ match ty.kind() {
+ ty::Adt(_, generics) => {
+ for generic in *generics {
+ if let GenericArgKind::Type(ty) = generic.unpack() {
+ get_captured_ids_recursive(cx, ty, set);
+ }
+ }
+ },
+ ty::Closure(def_id, _) => {
+ let closure_hir_node = cx.tcx.hir().get_if_local(*def_id).unwrap();
+ if let Node::Expr(closure_expr) = closure_hir_node {
+ can_move_expr_to_closure(cx, closure_expr)
+ .unwrap()
+ .into_iter()
+ .for_each(|(hir_id, capture_kind)| {
+ if matches!(capture_kind, CaptureKind::Ref(Mutability::Mut)) {
+ set.insert(hir_id);
+ }
+ });
+ }
+ },
+ _ => (),
+ }
+ }
+
+ let mut set = HirIdSet::default();
+
+ get_captured_ids_recursive(cx, ty, &mut set);
+
+ set
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs b/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs
new file mode 100644
index 000000000..a7ef562b2
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs
@@ -0,0 +1,380 @@
+use super::NEEDLESS_RANGE_LOOP;
+use clippy_utils::diagnostics::{multispan_sugg, span_lint_and_then};
+use clippy_utils::source::snippet;
+use clippy_utils::ty::has_iter_method;
+use clippy_utils::visitors::is_local_used;
+use clippy_utils::{contains_name, higher, is_integer_const, match_trait_method, paths, sugg, SpanlessEq};
+use if_chain::if_chain;
+use rustc_ast::ast;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::intravisit::{walk_expr, Visitor};
+use rustc_hir::{BinOpKind, BorrowKind, Closure, Expr, ExprKind, HirId, Mutability, Pat, PatKind, QPath};
+use rustc_lint::LateContext;
+use rustc_middle::middle::region;
+use rustc_middle::ty::{self, Ty};
+use rustc_span::symbol::{sym, Symbol};
+use std::iter::{self, Iterator};
+use std::mem;
+
+/// Checks for looping over a range and then indexing a sequence with it.
+/// The iteratee must be a range literal.
+#[expect(clippy::too_many_lines)]
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ pat: &'tcx Pat<'_>,
+ arg: &'tcx Expr<'_>,
+ body: &'tcx Expr<'_>,
+ expr: &'tcx Expr<'_>,
+) {
+ if let Some(higher::Range {
+ start: Some(start),
+ ref end,
+ limits,
+ }) = higher::Range::hir(arg)
+ {
+ // the var must be a single name
+ if let PatKind::Binding(_, canonical_id, ident, _) = pat.kind {
+ let mut visitor = VarVisitor {
+ cx,
+ var: canonical_id,
+ indexed_mut: FxHashSet::default(),
+ indexed_indirectly: FxHashMap::default(),
+ indexed_directly: FxHashMap::default(),
+ referenced: FxHashSet::default(),
+ nonindex: false,
+ prefer_mutable: false,
+ };
+ walk_expr(&mut visitor, body);
+
+ // linting condition: we only indexed one variable, and indexed it directly
+ if visitor.indexed_indirectly.is_empty() && visitor.indexed_directly.len() == 1 {
+ let (indexed, (indexed_extent, indexed_ty)) = visitor
+ .indexed_directly
+ .into_iter()
+ .next()
+ .expect("already checked that we have exactly 1 element");
+
+ // ensure that the indexed variable was declared before the loop, see #601
+ if let Some(indexed_extent) = indexed_extent {
+ let parent_def_id = cx.tcx.hir().get_parent_item(expr.hir_id);
+ let region_scope_tree = cx.tcx.region_scope_tree(parent_def_id);
+ let pat_extent = region_scope_tree.var_scope(pat.hir_id.local_id).unwrap();
+ if region_scope_tree.is_subscope_of(indexed_extent, pat_extent) {
+ return;
+ }
+ }
+
+ // don't lint if the container that is indexed does not have .iter() method
+ let has_iter = has_iter_method(cx, indexed_ty);
+ if has_iter.is_none() {
+ return;
+ }
+
+ // don't lint if the container that is indexed into is also used without
+ // indexing
+ if visitor.referenced.contains(&indexed) {
+ return;
+ }
+
+ let starts_at_zero = is_integer_const(cx, start, 0);
+
+ let skip = if starts_at_zero {
+ String::new()
+ } else if visitor.indexed_mut.contains(&indexed) && contains_name(indexed, start) {
+ return;
+ } else {
+ format!(".skip({})", snippet(cx, start.span, ".."))
+ };
+
+ let mut end_is_start_plus_val = false;
+
+ let take = if let Some(end) = *end {
+ let mut take_expr = end;
+
+ if let ExprKind::Binary(ref op, left, right) = end.kind {
+ if op.node == BinOpKind::Add {
+ let start_equal_left = SpanlessEq::new(cx).eq_expr(start, left);
+ let start_equal_right = SpanlessEq::new(cx).eq_expr(start, right);
+
+ if start_equal_left {
+ take_expr = right;
+ } else if start_equal_right {
+ take_expr = left;
+ }
+
+ end_is_start_plus_val = start_equal_left | start_equal_right;
+ }
+ }
+
+ if is_len_call(end, indexed) || is_end_eq_array_len(cx, end, limits, indexed_ty) {
+ String::new()
+ } else if visitor.indexed_mut.contains(&indexed) && contains_name(indexed, take_expr) {
+ return;
+ } else {
+ match limits {
+ ast::RangeLimits::Closed => {
+ let take_expr = sugg::Sugg::hir(cx, take_expr, "<count>");
+ format!(".take({})", take_expr + sugg::ONE)
+ },
+ ast::RangeLimits::HalfOpen => {
+ format!(".take({})", snippet(cx, take_expr.span, ".."))
+ },
+ }
+ }
+ } else {
+ String::new()
+ };
+
+ let (ref_mut, method) = if visitor.indexed_mut.contains(&indexed) {
+ ("mut ", "iter_mut")
+ } else {
+ ("", "iter")
+ };
+
+ let take_is_empty = take.is_empty();
+ let mut method_1 = take;
+ let mut method_2 = skip;
+
+ if end_is_start_plus_val {
+ mem::swap(&mut method_1, &mut method_2);
+ }
+
+ if visitor.nonindex {
+ span_lint_and_then(
+ cx,
+ NEEDLESS_RANGE_LOOP,
+ arg.span,
+ &format!("the loop variable `{}` is used to index `{}`", ident.name, indexed),
+ |diag| {
+ multispan_sugg(
+ diag,
+ "consider using an iterator",
+ vec![
+ (pat.span, format!("({}, <item>)", ident.name)),
+ (
+ arg.span,
+ format!("{}.{}().enumerate(){}{}", indexed, method, method_1, method_2),
+ ),
+ ],
+ );
+ },
+ );
+ } else {
+ let repl = if starts_at_zero && take_is_empty {
+ format!("&{}{}", ref_mut, indexed)
+ } else {
+ format!("{}.{}(){}{}", indexed, method, method_1, method_2)
+ };
+
+ span_lint_and_then(
+ cx,
+ NEEDLESS_RANGE_LOOP,
+ arg.span,
+ &format!("the loop variable `{}` is only used to index `{}`", ident.name, indexed),
+ |diag| {
+ multispan_sugg(
+ diag,
+ "consider using an iterator",
+ vec![(pat.span, "<item>".to_string()), (arg.span, repl)],
+ );
+ },
+ );
+ }
+ }
+ }
+ }
+}
+
+fn is_len_call(expr: &Expr<'_>, var: Symbol) -> bool {
+ if_chain! {
+ if let ExprKind::MethodCall(method, len_args, _) = expr.kind;
+ if len_args.len() == 1;
+ if method.ident.name == sym::len;
+ if let ExprKind::Path(QPath::Resolved(_, path)) = len_args[0].kind;
+ if path.segments.len() == 1;
+ if path.segments[0].ident.name == var;
+ then {
+ return true;
+ }
+ }
+
+ false
+}
+
+fn is_end_eq_array_len<'tcx>(
+ cx: &LateContext<'tcx>,
+ end: &Expr<'_>,
+ limits: ast::RangeLimits,
+ indexed_ty: Ty<'tcx>,
+) -> bool {
+ if_chain! {
+ if let ExprKind::Lit(ref lit) = end.kind;
+ if let ast::LitKind::Int(end_int, _) = lit.node;
+ if let ty::Array(_, arr_len_const) = indexed_ty.kind();
+ if let Some(arr_len) = arr_len_const.try_eval_usize(cx.tcx, cx.param_env);
+ then {
+ return match limits {
+ ast::RangeLimits::Closed => end_int + 1 >= arr_len.into(),
+ ast::RangeLimits::HalfOpen => end_int >= arr_len.into(),
+ };
+ }
+ }
+
+ false
+}
+
+struct VarVisitor<'a, 'tcx> {
+ /// context reference
+ cx: &'a LateContext<'tcx>,
+ /// var name to look for as index
+ var: HirId,
+ /// indexed variables that are used mutably
+ indexed_mut: FxHashSet<Symbol>,
+ /// indirectly indexed variables (`v[(i + 4) % N]`), the extend is `None` for global
+ indexed_indirectly: FxHashMap<Symbol, Option<region::Scope>>,
+ /// subset of `indexed` of vars that are indexed directly: `v[i]`
+ /// this will not contain cases like `v[calc_index(i)]` or `v[(i + 4) % N]`
+ indexed_directly: FxHashMap<Symbol, (Option<region::Scope>, Ty<'tcx>)>,
+ /// Any names that are used outside an index operation.
+ /// Used to detect things like `&mut vec` used together with `vec[i]`
+ referenced: FxHashSet<Symbol>,
+ /// has the loop variable been used in expressions other than the index of
+ /// an index op?
+ nonindex: bool,
+ /// Whether we are inside the `$` in `&mut $` or `$ = foo` or `$.bar`, where bar
+ /// takes `&mut self`
+ prefer_mutable: bool,
+}
+
+impl<'a, 'tcx> VarVisitor<'a, 'tcx> {
+ fn check(&mut self, idx: &'tcx Expr<'_>, seqexpr: &'tcx Expr<'_>, expr: &'tcx Expr<'_>) -> bool {
+ if_chain! {
+ // the indexed container is referenced by a name
+ if let ExprKind::Path(ref seqpath) = seqexpr.kind;
+ if let QPath::Resolved(None, seqvar) = *seqpath;
+ if seqvar.segments.len() == 1;
+ if is_local_used(self.cx, idx, self.var);
+ then {
+ if self.prefer_mutable {
+ self.indexed_mut.insert(seqvar.segments[0].ident.name);
+ }
+ let index_used_directly = matches!(idx.kind, ExprKind::Path(_));
+ let res = self.cx.qpath_res(seqpath, seqexpr.hir_id);
+ match res {
+ Res::Local(hir_id) => {
+ let parent_def_id = self.cx.tcx.hir().get_parent_item(expr.hir_id);
+ let extent = self.cx
+ .tcx
+ .region_scope_tree(parent_def_id)
+ .var_scope(hir_id.local_id)
+ .unwrap();
+ if index_used_directly {
+ self.indexed_directly.insert(
+ seqvar.segments[0].ident.name,
+ (Some(extent), self.cx.typeck_results().node_type(seqexpr.hir_id)),
+ );
+ } else {
+ self.indexed_indirectly.insert(seqvar.segments[0].ident.name, Some(extent));
+ }
+ return false; // no need to walk further *on the variable*
+ }
+ Res::Def(DefKind::Static (_)| DefKind::Const, ..) => {
+ if index_used_directly {
+ self.indexed_directly.insert(
+ seqvar.segments[0].ident.name,
+ (None, self.cx.typeck_results().node_type(seqexpr.hir_id)),
+ );
+ } else {
+ self.indexed_indirectly.insert(seqvar.segments[0].ident.name, None);
+ }
+ return false; // no need to walk further *on the variable*
+ }
+ _ => (),
+ }
+ }
+ }
+ true
+ }
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for VarVisitor<'a, 'tcx> {
+ fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
+ if_chain! {
+ // a range index op
+ if let ExprKind::MethodCall(meth, [args_0, args_1, ..], _) = &expr.kind;
+ if (meth.ident.name == sym::index && match_trait_method(self.cx, expr, &paths::INDEX))
+ || (meth.ident.name == sym::index_mut && match_trait_method(self.cx, expr, &paths::INDEX_MUT));
+ if !self.check(args_1, args_0, expr);
+ then { return }
+ }
+
+ if_chain! {
+ // an index op
+ if let ExprKind::Index(seqexpr, idx) = expr.kind;
+ if !self.check(idx, seqexpr, expr);
+ then { return }
+ }
+
+ if_chain! {
+ // directly using a variable
+ if let ExprKind::Path(QPath::Resolved(None, path)) = expr.kind;
+ if let Res::Local(local_id) = path.res;
+ then {
+ if local_id == self.var {
+ self.nonindex = true;
+ } else {
+ // not the correct variable, but still a variable
+ self.referenced.insert(path.segments[0].ident.name);
+ }
+ }
+ }
+
+ let old = self.prefer_mutable;
+ match expr.kind {
+ ExprKind::AssignOp(_, lhs, rhs) | ExprKind::Assign(lhs, rhs, _) => {
+ self.prefer_mutable = true;
+ self.visit_expr(lhs);
+ self.prefer_mutable = false;
+ self.visit_expr(rhs);
+ },
+ ExprKind::AddrOf(BorrowKind::Ref, mutbl, expr) => {
+ if mutbl == Mutability::Mut {
+ self.prefer_mutable = true;
+ }
+ self.visit_expr(expr);
+ },
+ ExprKind::Call(f, args) => {
+ self.visit_expr(f);
+ for expr in args {
+ let ty = self.cx.typeck_results().expr_ty_adjusted(expr);
+ self.prefer_mutable = false;
+ if let ty::Ref(_, _, mutbl) = *ty.kind() {
+ if mutbl == Mutability::Mut {
+ self.prefer_mutable = true;
+ }
+ }
+ self.visit_expr(expr);
+ }
+ },
+ ExprKind::MethodCall(_, args, _) => {
+ let def_id = self.cx.typeck_results().type_dependent_def_id(expr.hir_id).unwrap();
+ for (ty, expr) in iter::zip(self.cx.tcx.fn_sig(def_id).inputs().skip_binder(), args) {
+ self.prefer_mutable = false;
+ if let ty::Ref(_, _, mutbl) = *ty.kind() {
+ if mutbl == Mutability::Mut {
+ self.prefer_mutable = true;
+ }
+ }
+ self.visit_expr(expr);
+ }
+ },
+ ExprKind::Closure(&Closure { body, .. }) => {
+ let body = self.cx.tcx.hir().body(body);
+ self.visit_expr(&body.value);
+ },
+ _ => walk_expr(self, expr),
+ }
+ self.prefer_mutable = old;
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/never_loop.rs b/src/tools/clippy/clippy_lints/src/loops/never_loop.rs
new file mode 100644
index 000000000..32de20f65
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/never_loop.rs
@@ -0,0 +1,218 @@
+use super::utils::make_iterator_snippet;
+use super::NEVER_LOOP;
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::higher::ForLoop;
+use clippy_utils::source::snippet;
+use rustc_errors::Applicability;
+use rustc_hir::{Block, Expr, ExprKind, HirId, InlineAsmOperand, Pat, Stmt, StmtKind};
+use rustc_lint::LateContext;
+use rustc_span::Span;
+use std::iter::{once, Iterator};
+
+pub(super) fn check(
+ cx: &LateContext<'_>,
+ block: &Block<'_>,
+ loop_id: HirId,
+ span: Span,
+ for_loop: Option<&ForLoop<'_>>,
+) {
+ match never_loop_block(block, loop_id) {
+ NeverLoopResult::AlwaysBreak => {
+ span_lint_and_then(cx, NEVER_LOOP, span, "this loop never actually loops", |diag| {
+ if let Some(ForLoop {
+ arg: iterator,
+ pat,
+ span: for_span,
+ ..
+ }) = for_loop
+ {
+ // Suggests using an `if let` instead. This is `Unspecified` because the
+ // loop may (probably) contain `break` statements which would be invalid
+ // in an `if let`.
+ diag.span_suggestion_verbose(
+ for_span.with_hi(iterator.span.hi()),
+ "if you need the first element of the iterator, try writing",
+ for_to_if_let_sugg(cx, iterator, pat),
+ Applicability::Unspecified,
+ );
+ }
+ });
+ },
+ NeverLoopResult::MayContinueMainLoop | NeverLoopResult::Otherwise => (),
+ }
+}
+
+enum NeverLoopResult {
+ // A break/return always get triggered but not necessarily for the main loop.
+ AlwaysBreak,
+ // A continue may occur for the main loop.
+ MayContinueMainLoop,
+ Otherwise,
+}
+
+#[must_use]
+fn absorb_break(arg: &NeverLoopResult) -> NeverLoopResult {
+ match *arg {
+ NeverLoopResult::AlwaysBreak | NeverLoopResult::Otherwise => NeverLoopResult::Otherwise,
+ NeverLoopResult::MayContinueMainLoop => NeverLoopResult::MayContinueMainLoop,
+ }
+}
+
+// Combine two results for parts that are called in order.
+#[must_use]
+fn combine_seq(first: NeverLoopResult, second: NeverLoopResult) -> NeverLoopResult {
+ match first {
+ NeverLoopResult::AlwaysBreak | NeverLoopResult::MayContinueMainLoop => first,
+ NeverLoopResult::Otherwise => second,
+ }
+}
+
+// Combine two results where both parts are called but not necessarily in order.
+#[must_use]
+fn combine_both(left: NeverLoopResult, right: NeverLoopResult) -> NeverLoopResult {
+ match (left, right) {
+ (NeverLoopResult::MayContinueMainLoop, _) | (_, NeverLoopResult::MayContinueMainLoop) => {
+ NeverLoopResult::MayContinueMainLoop
+ },
+ (NeverLoopResult::AlwaysBreak, _) | (_, NeverLoopResult::AlwaysBreak) => NeverLoopResult::AlwaysBreak,
+ (NeverLoopResult::Otherwise, NeverLoopResult::Otherwise) => NeverLoopResult::Otherwise,
+ }
+}
+
+// Combine two results where only one of the part may have been executed.
+#[must_use]
+fn combine_branches(b1: NeverLoopResult, b2: NeverLoopResult) -> NeverLoopResult {
+ match (b1, b2) {
+ (NeverLoopResult::AlwaysBreak, NeverLoopResult::AlwaysBreak) => NeverLoopResult::AlwaysBreak,
+ (NeverLoopResult::MayContinueMainLoop, _) | (_, NeverLoopResult::MayContinueMainLoop) => {
+ NeverLoopResult::MayContinueMainLoop
+ },
+ (NeverLoopResult::Otherwise, _) | (_, NeverLoopResult::Otherwise) => NeverLoopResult::Otherwise,
+ }
+}
+
+fn never_loop_block(block: &Block<'_>, main_loop_id: HirId) -> NeverLoopResult {
+ let mut iter = block.stmts.iter().filter_map(stmt_to_expr).chain(block.expr);
+ never_loop_expr_seq(&mut iter, main_loop_id)
+}
+
+fn never_loop_expr_seq<'a, T: Iterator<Item = &'a Expr<'a>>>(es: &mut T, main_loop_id: HirId) -> NeverLoopResult {
+ es.map(|e| never_loop_expr(e, main_loop_id))
+ .fold(NeverLoopResult::Otherwise, combine_seq)
+}
+
+fn stmt_to_expr<'tcx>(stmt: &Stmt<'tcx>) -> Option<&'tcx Expr<'tcx>> {
+ match stmt.kind {
+ StmtKind::Semi(e, ..) | StmtKind::Expr(e, ..) => Some(e),
+ StmtKind::Local(local) => local.init,
+ StmtKind::Item(..) => None,
+ }
+}
+
+fn never_loop_expr(expr: &Expr<'_>, main_loop_id: HirId) -> NeverLoopResult {
+ match expr.kind {
+ ExprKind::Box(e)
+ | ExprKind::Unary(_, e)
+ | ExprKind::Cast(e, _)
+ | ExprKind::Type(e, _)
+ | ExprKind::Field(e, _)
+ | ExprKind::AddrOf(_, _, e)
+ | ExprKind::Repeat(e, _)
+ | ExprKind::DropTemps(e) => never_loop_expr(e, main_loop_id),
+ ExprKind::Let(let_expr) => never_loop_expr(let_expr.init, main_loop_id),
+ ExprKind::Array(es) | ExprKind::MethodCall(_, es, _) | ExprKind::Tup(es) => {
+ never_loop_expr_all(&mut es.iter(), main_loop_id)
+ },
+ ExprKind::Struct(_, fields, base) => {
+ let fields = never_loop_expr_all(&mut fields.iter().map(|f| f.expr), main_loop_id);
+ if let Some(base) = base {
+ combine_both(fields, never_loop_expr(base, main_loop_id))
+ } else {
+ fields
+ }
+ },
+ ExprKind::Call(e, es) => never_loop_expr_all(&mut once(e).chain(es.iter()), main_loop_id),
+ ExprKind::Binary(_, e1, e2)
+ | ExprKind::Assign(e1, e2, _)
+ | ExprKind::AssignOp(_, e1, e2)
+ | ExprKind::Index(e1, e2) => never_loop_expr_all(&mut [e1, e2].iter().copied(), main_loop_id),
+ ExprKind::Loop(b, _, _, _) => {
+ // Break can come from the inner loop so remove them.
+ absorb_break(&never_loop_block(b, main_loop_id))
+ },
+ ExprKind::If(e, e2, e3) => {
+ let e1 = never_loop_expr(e, main_loop_id);
+ let e2 = never_loop_expr(e2, main_loop_id);
+ let e3 = e3
+ .as_ref()
+ .map_or(NeverLoopResult::Otherwise, |e| never_loop_expr(e, main_loop_id));
+ combine_seq(e1, combine_branches(e2, e3))
+ },
+ ExprKind::Match(e, arms, _) => {
+ let e = never_loop_expr(e, main_loop_id);
+ if arms.is_empty() {
+ e
+ } else {
+ let arms = never_loop_expr_branch(&mut arms.iter().map(|a| a.body), main_loop_id);
+ combine_seq(e, arms)
+ }
+ },
+ ExprKind::Block(b, _) => never_loop_block(b, main_loop_id),
+ ExprKind::Continue(d) => {
+ let id = d
+ .target_id
+ .expect("target ID can only be missing in the presence of compilation errors");
+ if id == main_loop_id {
+ NeverLoopResult::MayContinueMainLoop
+ } else {
+ NeverLoopResult::AlwaysBreak
+ }
+ },
+ ExprKind::Break(_, e) | ExprKind::Ret(e) => e.as_ref().map_or(NeverLoopResult::AlwaysBreak, |e| {
+ combine_seq(never_loop_expr(e, main_loop_id), NeverLoopResult::AlwaysBreak)
+ }),
+ ExprKind::InlineAsm(asm) => asm
+ .operands
+ .iter()
+ .map(|(o, _)| match o {
+ InlineAsmOperand::In { expr, .. } | InlineAsmOperand::InOut { expr, .. } => {
+ never_loop_expr(expr, main_loop_id)
+ },
+ InlineAsmOperand::Out { expr, .. } => never_loop_expr_all(&mut expr.iter(), main_loop_id),
+ InlineAsmOperand::SplitInOut { in_expr, out_expr, .. } => {
+ never_loop_expr_all(&mut once(in_expr).chain(out_expr.iter()), main_loop_id)
+ },
+ InlineAsmOperand::Const { .. }
+ | InlineAsmOperand::SymFn { .. }
+ | InlineAsmOperand::SymStatic { .. } => NeverLoopResult::Otherwise,
+ })
+ .fold(NeverLoopResult::Otherwise, combine_both),
+ ExprKind::Yield(_, _)
+ | ExprKind::Closure { .. }
+ | ExprKind::Path(_)
+ | ExprKind::ConstBlock(_)
+ | ExprKind::Lit(_)
+ | ExprKind::Err => NeverLoopResult::Otherwise,
+ }
+}
+
+fn never_loop_expr_all<'a, T: Iterator<Item = &'a Expr<'a>>>(es: &mut T, main_loop_id: HirId) -> NeverLoopResult {
+ es.map(|e| never_loop_expr(e, main_loop_id))
+ .fold(NeverLoopResult::Otherwise, combine_both)
+}
+
+fn never_loop_expr_branch<'a, T: Iterator<Item = &'a Expr<'a>>>(e: &mut T, main_loop_id: HirId) -> NeverLoopResult {
+ e.map(|e| never_loop_expr(e, main_loop_id))
+ .fold(NeverLoopResult::AlwaysBreak, combine_branches)
+}
+
+fn for_to_if_let_sugg(cx: &LateContext<'_>, iterator: &Expr<'_>, pat: &Pat<'_>) -> String {
+ let pat_snippet = snippet(cx, pat.span, "_");
+ let iter_snippet = make_iterator_snippet(cx, iterator, &mut Applicability::Unspecified);
+
+ format!(
+ "if let Some({pat}) = {iter}.next()",
+ pat = pat_snippet,
+ iter = iter_snippet
+ )
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/same_item_push.rs b/src/tools/clippy/clippy_lints/src/loops/same_item_push.rs
new file mode 100644
index 000000000..1439f1f4c
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/same_item_push.rs
@@ -0,0 +1,195 @@
+use super::SAME_ITEM_PUSH;
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::path_to_local;
+use clippy_utils::source::snippet_with_macro_callsite;
+use clippy_utils::ty::{implements_trait, is_type_diagnostic_item};
+use if_chain::if_chain;
+use rustc_data_structures::fx::FxHashSet;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::intravisit::{walk_expr, Visitor};
+use rustc_hir::{BindingAnnotation, Block, Expr, ExprKind, HirId, Node, Pat, PatKind, Stmt, StmtKind};
+use rustc_lint::LateContext;
+use rustc_span::symbol::sym;
+use std::iter::Iterator;
+
+/// Detects for loop pushing the same item into a Vec
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ pat: &'tcx Pat<'_>,
+ _: &'tcx Expr<'_>,
+ body: &'tcx Expr<'_>,
+ _: &'tcx Expr<'_>,
+) {
+ fn emit_lint(cx: &LateContext<'_>, vec: &Expr<'_>, pushed_item: &Expr<'_>) {
+ let vec_str = snippet_with_macro_callsite(cx, vec.span, "");
+ let item_str = snippet_with_macro_callsite(cx, pushed_item.span, "");
+
+ span_lint_and_help(
+ cx,
+ SAME_ITEM_PUSH,
+ vec.span,
+ "it looks like the same item is being pushed into this Vec",
+ None,
+ &format!(
+ "try using vec![{};SIZE] or {}.resize(NEW_SIZE, {})",
+ item_str, vec_str, item_str
+ ),
+ );
+ }
+
+ if !matches!(pat.kind, PatKind::Wild) {
+ return;
+ }
+
+ // Determine whether it is safe to lint the body
+ let mut same_item_push_visitor = SameItemPushVisitor::new(cx);
+ walk_expr(&mut same_item_push_visitor, body);
+ if_chain! {
+ if same_item_push_visitor.should_lint();
+ if let Some((vec, pushed_item)) = same_item_push_visitor.vec_push;
+ let vec_ty = cx.typeck_results().expr_ty(vec);
+ let ty = vec_ty.walk().nth(1).unwrap().expect_ty();
+ if cx
+ .tcx
+ .lang_items()
+ .clone_trait()
+ .map_or(false, |id| implements_trait(cx, ty, id, &[]));
+ then {
+ // Make sure that the push does not involve possibly mutating values
+ match pushed_item.kind {
+ ExprKind::Path(ref qpath) => {
+ match cx.qpath_res(qpath, pushed_item.hir_id) {
+ // immutable bindings that are initialized with literal or constant
+ Res::Local(hir_id) => {
+ let node = cx.tcx.hir().get(hir_id);
+ if_chain! {
+ if let Node::Pat(pat) = node;
+ if let PatKind::Binding(bind_ann, ..) = pat.kind;
+ if !matches!(bind_ann, BindingAnnotation::RefMut | BindingAnnotation::Mutable);
+ let parent_node = cx.tcx.hir().get_parent_node(hir_id);
+ if let Some(Node::Local(parent_let_expr)) = cx.tcx.hir().find(parent_node);
+ if let Some(init) = parent_let_expr.init;
+ then {
+ match init.kind {
+ // immutable bindings that are initialized with literal
+ ExprKind::Lit(..) => emit_lint(cx, vec, pushed_item),
+ // immutable bindings that are initialized with constant
+ ExprKind::Path(ref path) => {
+ if let Res::Def(DefKind::Const, ..) = cx.qpath_res(path, init.hir_id) {
+ emit_lint(cx, vec, pushed_item);
+ }
+ }
+ _ => {},
+ }
+ }
+ }
+ },
+ // constant
+ Res::Def(DefKind::Const, ..) => emit_lint(cx, vec, pushed_item),
+ _ => {},
+ }
+ },
+ ExprKind::Lit(..) => emit_lint(cx, vec, pushed_item),
+ _ => {},
+ }
+ }
+ }
+}
+
+// Scans the body of the for loop and determines whether lint should be given
+struct SameItemPushVisitor<'a, 'tcx> {
+ non_deterministic_expr: bool,
+ multiple_pushes: bool,
+ // this field holds the last vec push operation visited, which should be the only push seen
+ vec_push: Option<(&'tcx Expr<'tcx>, &'tcx Expr<'tcx>)>,
+ cx: &'a LateContext<'tcx>,
+ used_locals: FxHashSet<HirId>,
+}
+
+impl<'a, 'tcx> SameItemPushVisitor<'a, 'tcx> {
+ fn new(cx: &'a LateContext<'tcx>) -> Self {
+ Self {
+ non_deterministic_expr: false,
+ multiple_pushes: false,
+ vec_push: None,
+ cx,
+ used_locals: FxHashSet::default(),
+ }
+ }
+
+ fn should_lint(&self) -> bool {
+ if_chain! {
+ if !self.non_deterministic_expr;
+ if !self.multiple_pushes;
+ if let Some((vec, _)) = self.vec_push;
+ if let Some(hir_id) = path_to_local(vec);
+ then {
+ !self.used_locals.contains(&hir_id)
+ } else {
+ false
+ }
+ }
+ }
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for SameItemPushVisitor<'a, 'tcx> {
+ fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
+ match &expr.kind {
+ // Non-determinism may occur ... don't give a lint
+ ExprKind::Loop(..) | ExprKind::Match(..) | ExprKind::If(..) => self.non_deterministic_expr = true,
+ ExprKind::Block(block, _) => self.visit_block(block),
+ _ => {
+ if let Some(hir_id) = path_to_local(expr) {
+ self.used_locals.insert(hir_id);
+ }
+ walk_expr(self, expr);
+ },
+ }
+ }
+
+ fn visit_block(&mut self, b: &'tcx Block<'_>) {
+ for stmt in b.stmts.iter() {
+ self.visit_stmt(stmt);
+ }
+ }
+
+ fn visit_stmt(&mut self, s: &'tcx Stmt<'_>) {
+ let vec_push_option = get_vec_push(self.cx, s);
+ if vec_push_option.is_none() {
+ // Current statement is not a push so visit inside
+ match &s.kind {
+ StmtKind::Expr(expr) | StmtKind::Semi(expr) => self.visit_expr(expr),
+ _ => {},
+ }
+ } else {
+ // Current statement is a push ...check whether another
+ // push had been previously done
+ if self.vec_push.is_none() {
+ self.vec_push = vec_push_option;
+ } else {
+ // There are multiple pushes ... don't lint
+ self.multiple_pushes = true;
+ }
+ }
+ }
+}
+
+// Given some statement, determine if that statement is a push on a Vec. If it is, return
+// the Vec being pushed into and the item being pushed
+fn get_vec_push<'tcx>(cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) -> Option<(&'tcx Expr<'tcx>, &'tcx Expr<'tcx>)> {
+ if_chain! {
+ // Extract method being called
+ if let StmtKind::Semi(semi_stmt) = &stmt.kind;
+ if let ExprKind::MethodCall(path, args, _) = &semi_stmt.kind;
+ // Figure out the parameters for the method call
+ if let Some(self_expr) = args.get(0);
+ if let Some(pushed_item) = args.get(1);
+ // Check that the method being called is push() on a Vec
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(self_expr), sym::Vec);
+ if path.ident.name.as_str() == "push";
+ then {
+ return Some((self_expr, pushed_item))
+ }
+ }
+ None
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/single_element_loop.rs b/src/tools/clippy/clippy_lints/src/loops/single_element_loop.rs
new file mode 100644
index 000000000..a0bd7ad0a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/single_element_loop.rs
@@ -0,0 +1,101 @@
+use super::SINGLE_ELEMENT_LOOP;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::{indent_of, snippet_with_applicability};
+use if_chain::if_chain;
+use rustc_ast::util::parser::PREC_PREFIX;
+use rustc_ast::Mutability;
+use rustc_errors::Applicability;
+use rustc_hir::{is_range_literal, BorrowKind, Expr, ExprKind, Pat};
+use rustc_lint::LateContext;
+use rustc_span::edition::Edition;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ pat: &'tcx Pat<'_>,
+ arg: &'tcx Expr<'_>,
+ body: &'tcx Expr<'_>,
+ expr: &'tcx Expr<'_>,
+) {
+ let (arg_expression, prefix) = match arg.kind {
+ ExprKind::AddrOf(
+ BorrowKind::Ref,
+ Mutability::Not,
+ Expr {
+ kind: ExprKind::Array([arg]),
+ ..
+ },
+ ) => (arg, "&"),
+ ExprKind::AddrOf(
+ BorrowKind::Ref,
+ Mutability::Mut,
+ Expr {
+ kind: ExprKind::Array([arg]),
+ ..
+ },
+ ) => (arg, "&mut "),
+ ExprKind::MethodCall(
+ method,
+ [
+ Expr {
+ kind: ExprKind::Array([arg]),
+ ..
+ },
+ ],
+ _,
+ ) if method.ident.name == rustc_span::sym::iter => (arg, "&"),
+ ExprKind::MethodCall(
+ method,
+ [
+ Expr {
+ kind: ExprKind::Array([arg]),
+ ..
+ },
+ ],
+ _,
+ ) if method.ident.name.as_str() == "iter_mut" => (arg, "&mut "),
+ ExprKind::MethodCall(
+ method,
+ [
+ Expr {
+ kind: ExprKind::Array([arg]),
+ ..
+ },
+ ],
+ _,
+ ) if method.ident.name == rustc_span::sym::into_iter => (arg, ""),
+ // Only check for arrays edition 2021 or later, as this case will trigger a compiler error otherwise.
+ ExprKind::Array([arg]) if cx.tcx.sess.edition() >= Edition::Edition2021 => (arg, ""),
+ _ => return,
+ };
+ if_chain! {
+ if let ExprKind::Block(block, _) = body.kind;
+ if !block.stmts.is_empty();
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ let pat_snip = snippet_with_applicability(cx, pat.span, "..", &mut applicability);
+ let mut arg_snip = snippet_with_applicability(cx, arg_expression.span, "..", &mut applicability);
+ let mut block_str = snippet_with_applicability(cx, block.span, "..", &mut applicability).into_owned();
+ block_str.remove(0);
+ block_str.pop();
+ let indent = " ".repeat(indent_of(cx, block.stmts[0].span).unwrap_or(0));
+
+ // Reference iterator from `&(mut) []` or `[].iter(_mut)()`.
+ if !prefix.is_empty() && (
+ // Precedence of internal expression is less than or equal to precedence of `&expr`.
+ arg_expression.precedence().order() <= PREC_PREFIX || is_range_literal(arg_expression)
+ ) {
+ arg_snip = format!("({arg_snip})").into();
+ }
+
+ span_lint_and_sugg(
+ cx,
+ SINGLE_ELEMENT_LOOP,
+ expr.span,
+ "for loop over a single element",
+ "try",
+ format!("{{\n{indent}let {pat_snip} = {prefix}{arg_snip};{block_str}}}"),
+ applicability,
+ )
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/utils.rs b/src/tools/clippy/clippy_lints/src/loops/utils.rs
new file mode 100644
index 000000000..4801a84eb
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/utils.rs
@@ -0,0 +1,358 @@
+use clippy_utils::ty::{has_iter_method, implements_trait};
+use clippy_utils::{get_parent_expr, is_integer_const, path_to_local, path_to_local_id, sugg};
+use if_chain::if_chain;
+use rustc_ast::ast::{LitIntType, LitKind};
+use rustc_errors::Applicability;
+use rustc_hir::intravisit::{walk_expr, walk_local, walk_pat, walk_stmt, Visitor};
+use rustc_hir::{BinOpKind, BorrowKind, Expr, ExprKind, HirId, HirIdMap, Local, Mutability, Pat, PatKind, Stmt};
+use rustc_lint::LateContext;
+use rustc_middle::hir::nested_filter;
+use rustc_middle::ty::{self, Ty};
+use rustc_span::source_map::Spanned;
+use rustc_span::symbol::{sym, Symbol};
+use rustc_typeck::hir_ty_to_ty;
+use std::iter::Iterator;
+
+#[derive(Debug, PartialEq, Eq)]
+enum IncrementVisitorVarState {
+ Initial, // Not examined yet
+ IncrOnce, // Incremented exactly once, may be a loop counter
+ DontWarn,
+}
+
+/// Scan a for loop for variables that are incremented exactly once and not used after that.
+pub(super) struct IncrementVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>, // context reference
+ states: HirIdMap<IncrementVisitorVarState>, // incremented variables
+ depth: u32, // depth of conditional expressions
+ done: bool,
+}
+
+impl<'a, 'tcx> IncrementVisitor<'a, 'tcx> {
+ pub(super) fn new(cx: &'a LateContext<'tcx>) -> Self {
+ Self {
+ cx,
+ states: HirIdMap::default(),
+ depth: 0,
+ done: false,
+ }
+ }
+
+ pub(super) fn into_results(self) -> impl Iterator<Item = HirId> {
+ self.states.into_iter().filter_map(|(id, state)| {
+ if state == IncrementVisitorVarState::IncrOnce {
+ Some(id)
+ } else {
+ None
+ }
+ })
+ }
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for IncrementVisitor<'a, 'tcx> {
+ fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
+ if self.done {
+ return;
+ }
+
+ // If node is a variable
+ if let Some(def_id) = path_to_local(expr) {
+ if let Some(parent) = get_parent_expr(self.cx, expr) {
+ let state = self.states.entry(def_id).or_insert(IncrementVisitorVarState::Initial);
+ if *state == IncrementVisitorVarState::IncrOnce {
+ *state = IncrementVisitorVarState::DontWarn;
+ return;
+ }
+
+ match parent.kind {
+ ExprKind::AssignOp(op, lhs, rhs) => {
+ if lhs.hir_id == expr.hir_id {
+ *state = if op.node == BinOpKind::Add
+ && is_integer_const(self.cx, rhs, 1)
+ && *state == IncrementVisitorVarState::Initial
+ && self.depth == 0
+ {
+ IncrementVisitorVarState::IncrOnce
+ } else {
+ // Assigned some other value or assigned multiple times
+ IncrementVisitorVarState::DontWarn
+ };
+ }
+ },
+ ExprKind::Assign(lhs, _, _) if lhs.hir_id == expr.hir_id => {
+ *state = IncrementVisitorVarState::DontWarn;
+ },
+ ExprKind::AddrOf(BorrowKind::Ref, mutability, _) if mutability == Mutability::Mut => {
+ *state = IncrementVisitorVarState::DontWarn;
+ },
+ _ => (),
+ }
+ }
+
+ walk_expr(self, expr);
+ } else if is_loop(expr) || is_conditional(expr) {
+ self.depth += 1;
+ walk_expr(self, expr);
+ self.depth -= 1;
+ } else if let ExprKind::Continue(_) = expr.kind {
+ self.done = true;
+ } else {
+ walk_expr(self, expr);
+ }
+ }
+}
+
+enum InitializeVisitorState<'hir> {
+ Initial, // Not examined yet
+ Declared(Symbol, Option<Ty<'hir>>), // Declared but not (yet) initialized
+ Initialized {
+ name: Symbol,
+ ty: Option<Ty<'hir>>,
+ initializer: &'hir Expr<'hir>,
+ },
+ DontWarn,
+}
+
+/// Checks whether a variable is initialized at the start of a loop and not modified
+/// and used after the loop.
+pub(super) struct InitializeVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>, // context reference
+ end_expr: &'tcx Expr<'tcx>, // the for loop. Stop scanning here.
+ var_id: HirId,
+ state: InitializeVisitorState<'tcx>,
+ depth: u32, // depth of conditional expressions
+ past_loop: bool,
+}
+
+impl<'a, 'tcx> InitializeVisitor<'a, 'tcx> {
+ pub(super) fn new(cx: &'a LateContext<'tcx>, end_expr: &'tcx Expr<'tcx>, var_id: HirId) -> Self {
+ Self {
+ cx,
+ end_expr,
+ var_id,
+ state: InitializeVisitorState::Initial,
+ depth: 0,
+ past_loop: false,
+ }
+ }
+
+ pub(super) fn get_result(&self) -> Option<(Symbol, Option<Ty<'tcx>>, &'tcx Expr<'tcx>)> {
+ if let InitializeVisitorState::Initialized { name, ty, initializer } = self.state {
+ Some((name, ty, initializer))
+ } else {
+ None
+ }
+ }
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for InitializeVisitor<'a, 'tcx> {
+ type NestedFilter = nested_filter::OnlyBodies;
+
+ fn visit_local(&mut self, l: &'tcx Local<'_>) {
+ // Look for declarations of the variable
+ if_chain! {
+ if l.pat.hir_id == self.var_id;
+ if let PatKind::Binding(.., ident, _) = l.pat.kind;
+ then {
+ let ty = l.ty.map(|ty| hir_ty_to_ty(self.cx.tcx, ty));
+
+ self.state = l.init.map_or(InitializeVisitorState::Declared(ident.name, ty), |init| {
+ InitializeVisitorState::Initialized {
+ initializer: init,
+ ty,
+ name: ident.name,
+ }
+ })
+ }
+ }
+
+ walk_local(self, l);
+ }
+
+ fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
+ if matches!(self.state, InitializeVisitorState::DontWarn) {
+ return;
+ }
+ if expr.hir_id == self.end_expr.hir_id {
+ self.past_loop = true;
+ return;
+ }
+ // No need to visit expressions before the variable is
+ // declared
+ if matches!(self.state, InitializeVisitorState::Initial) {
+ return;
+ }
+
+ // If node is the desired variable, see how it's used
+ if path_to_local_id(expr, self.var_id) {
+ if self.past_loop {
+ self.state = InitializeVisitorState::DontWarn;
+ return;
+ }
+
+ if let Some(parent) = get_parent_expr(self.cx, expr) {
+ match parent.kind {
+ ExprKind::AssignOp(_, lhs, _) if lhs.hir_id == expr.hir_id => {
+ self.state = InitializeVisitorState::DontWarn;
+ },
+ ExprKind::Assign(lhs, rhs, _) if lhs.hir_id == expr.hir_id => {
+ self.state = if self.depth == 0 {
+ match self.state {
+ InitializeVisitorState::Declared(name, mut ty) => {
+ if ty.is_none() {
+ if let ExprKind::Lit(Spanned {
+ node: LitKind::Int(_, LitIntType::Unsuffixed),
+ ..
+ }) = rhs.kind
+ {
+ ty = None;
+ } else {
+ ty = self.cx.typeck_results().expr_ty_opt(rhs);
+ }
+ }
+
+ InitializeVisitorState::Initialized {
+ initializer: rhs,
+ ty,
+ name,
+ }
+ },
+ InitializeVisitorState::Initialized { ty, name, .. } => {
+ InitializeVisitorState::Initialized {
+ initializer: rhs,
+ ty,
+ name,
+ }
+ },
+ _ => InitializeVisitorState::DontWarn,
+ }
+ } else {
+ InitializeVisitorState::DontWarn
+ }
+ },
+ ExprKind::AddrOf(BorrowKind::Ref, mutability, _) if mutability == Mutability::Mut => {
+ self.state = InitializeVisitorState::DontWarn;
+ },
+ _ => (),
+ }
+ }
+
+ walk_expr(self, expr);
+ } else if !self.past_loop && is_loop(expr) {
+ self.state = InitializeVisitorState::DontWarn;
+ } else if is_conditional(expr) {
+ self.depth += 1;
+ walk_expr(self, expr);
+ self.depth -= 1;
+ } else {
+ walk_expr(self, expr);
+ }
+ }
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+}
+
+fn is_loop(expr: &Expr<'_>) -> bool {
+ matches!(expr.kind, ExprKind::Loop(..))
+}
+
+fn is_conditional(expr: &Expr<'_>) -> bool {
+ matches!(expr.kind, ExprKind::If(..) | ExprKind::Match(..))
+}
+
+#[derive(PartialEq, Eq)]
+pub(super) enum Nesting {
+ Unknown, // no nesting detected yet
+ RuledOut, // the iterator is initialized or assigned within scope
+ LookFurther, // no nesting detected, no further walk required
+}
+
+use self::Nesting::{LookFurther, RuledOut, Unknown};
+
+pub(super) struct LoopNestVisitor {
+ pub(super) hir_id: HirId,
+ pub(super) iterator: HirId,
+ pub(super) nesting: Nesting,
+}
+
+impl<'tcx> Visitor<'tcx> for LoopNestVisitor {
+ fn visit_stmt(&mut self, stmt: &'tcx Stmt<'_>) {
+ if stmt.hir_id == self.hir_id {
+ self.nesting = LookFurther;
+ } else if self.nesting == Unknown {
+ walk_stmt(self, stmt);
+ }
+ }
+
+ fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
+ if self.nesting != Unknown {
+ return;
+ }
+ if expr.hir_id == self.hir_id {
+ self.nesting = LookFurther;
+ return;
+ }
+ match expr.kind {
+ ExprKind::Assign(path, _, _) | ExprKind::AssignOp(_, path, _) => {
+ if path_to_local_id(path, self.iterator) {
+ self.nesting = RuledOut;
+ }
+ },
+ _ => walk_expr(self, expr),
+ }
+ }
+
+ fn visit_pat(&mut self, pat: &'tcx Pat<'_>) {
+ if self.nesting != Unknown {
+ return;
+ }
+ if let PatKind::Binding(_, id, ..) = pat.kind {
+ if id == self.iterator {
+ self.nesting = RuledOut;
+ return;
+ }
+ }
+ walk_pat(self, pat);
+ }
+}
+
+/// If `arg` was the argument to a `for` loop, return the "cleanest" way of writing the
+/// actual `Iterator` that the loop uses.
+pub(super) fn make_iterator_snippet(cx: &LateContext<'_>, arg: &Expr<'_>, applic_ref: &mut Applicability) -> String {
+ let impls_iterator = cx.tcx.get_diagnostic_item(sym::Iterator).map_or(false, |id| {
+ implements_trait(cx, cx.typeck_results().expr_ty(arg), id, &[])
+ });
+ if impls_iterator {
+ format!(
+ "{}",
+ sugg::Sugg::hir_with_applicability(cx, arg, "_", applic_ref).maybe_par()
+ )
+ } else {
+ // (&x).into_iter() ==> x.iter()
+ // (&mut x).into_iter() ==> x.iter_mut()
+ let arg_ty = cx.typeck_results().expr_ty_adjusted(arg);
+ match &arg_ty.kind() {
+ ty::Ref(_, inner_ty, mutbl) if has_iter_method(cx, *inner_ty).is_some() => {
+ let method_name = match mutbl {
+ Mutability::Mut => "iter_mut",
+ Mutability::Not => "iter",
+ };
+ let caller = match &arg.kind {
+ ExprKind::AddrOf(BorrowKind::Ref, _, arg_inner) => arg_inner,
+ _ => arg,
+ };
+ format!(
+ "{}.{}()",
+ sugg::Sugg::hir_with_applicability(cx, caller, "_", applic_ref).maybe_par(),
+ method_name,
+ )
+ },
+ _ => format!(
+ "{}.into_iter()",
+ sugg::Sugg::hir_with_applicability(cx, arg, "_", applic_ref).maybe_par()
+ ),
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/while_immutable_condition.rs b/src/tools/clippy/clippy_lints/src/loops/while_immutable_condition.rs
new file mode 100644
index 000000000..a63422d2a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/while_immutable_condition.rs
@@ -0,0 +1,128 @@
+use super::WHILE_IMMUTABLE_CONDITION;
+use clippy_utils::consts::constant;
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::usage::mutated_variables;
+use if_chain::if_chain;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::def_id::DefIdMap;
+use rustc_hir::intravisit::{walk_expr, Visitor};
+use rustc_hir::HirIdSet;
+use rustc_hir::{Expr, ExprKind, QPath};
+use rustc_lint::LateContext;
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, cond: &'tcx Expr<'_>, expr: &'tcx Expr<'_>) {
+ if constant(cx, cx.typeck_results(), cond).is_some() {
+ // A pure constant condition (e.g., `while false`) is not linted.
+ return;
+ }
+
+ let mut var_visitor = VarCollectorVisitor {
+ cx,
+ ids: HirIdSet::default(),
+ def_ids: DefIdMap::default(),
+ skip: false,
+ };
+ var_visitor.visit_expr(cond);
+ if var_visitor.skip {
+ return;
+ }
+ let used_in_condition = &var_visitor.ids;
+ let mutated_in_body = mutated_variables(expr, cx);
+ let mutated_in_condition = mutated_variables(cond, cx);
+ let no_cond_variable_mutated =
+ if let (Some(used_mutably_body), Some(used_mutably_cond)) = (mutated_in_body, mutated_in_condition) {
+ used_in_condition.is_disjoint(&used_mutably_body) && used_in_condition.is_disjoint(&used_mutably_cond)
+ } else {
+ return;
+ };
+ let mutable_static_in_cond = var_visitor.def_ids.iter().any(|(_, v)| *v);
+
+ let mut has_break_or_return_visitor = HasBreakOrReturnVisitor {
+ has_break_or_return: false,
+ };
+ has_break_or_return_visitor.visit_expr(expr);
+ let has_break_or_return = has_break_or_return_visitor.has_break_or_return;
+
+ if no_cond_variable_mutated && !mutable_static_in_cond {
+ span_lint_and_then(
+ cx,
+ WHILE_IMMUTABLE_CONDITION,
+ cond.span,
+ "variables in the condition are not mutated in the loop body",
+ |diag| {
+ diag.note("this may lead to an infinite or to a never running loop");
+
+ if has_break_or_return {
+ diag.note("this loop contains `return`s or `break`s");
+ diag.help("rewrite it as `if cond { loop { } }`");
+ }
+ },
+ );
+ }
+}
+
+struct HasBreakOrReturnVisitor {
+ has_break_or_return: bool,
+}
+
+impl<'tcx> Visitor<'tcx> for HasBreakOrReturnVisitor {
+ fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
+ if self.has_break_or_return {
+ return;
+ }
+
+ match expr.kind {
+ ExprKind::Ret(_) | ExprKind::Break(_, _) => {
+ self.has_break_or_return = true;
+ return;
+ },
+ _ => {},
+ }
+
+ walk_expr(self, expr);
+ }
+}
+
+/// Collects the set of variables in an expression
+/// Stops analysis if a function call is found
+/// Note: In some cases such as `self`, there are no mutable annotation,
+/// All variables definition IDs are collected
+struct VarCollectorVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ ids: HirIdSet,
+ def_ids: DefIdMap<bool>,
+ skip: bool,
+}
+
+impl<'a, 'tcx> VarCollectorVisitor<'a, 'tcx> {
+ fn insert_def_id(&mut self, ex: &'tcx Expr<'_>) {
+ if_chain! {
+ if let ExprKind::Path(ref qpath) = ex.kind;
+ if let QPath::Resolved(None, _) = *qpath;
+ then {
+ match self.cx.qpath_res(qpath, ex.hir_id) {
+ Res::Local(hir_id) => {
+ self.ids.insert(hir_id);
+ },
+ Res::Def(DefKind::Static(_), def_id) => {
+ let mutable = self.cx.tcx.is_mutable_static(def_id);
+ self.def_ids.insert(def_id, mutable);
+ },
+ _ => {},
+ }
+ }
+ }
+ }
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for VarCollectorVisitor<'a, 'tcx> {
+ fn visit_expr(&mut self, ex: &'tcx Expr<'_>) {
+ match ex.kind {
+ ExprKind::Path(_) => self.insert_def_id(ex),
+ // If there is any function/method call… we just stop analysis
+ ExprKind::Call(..) | ExprKind::MethodCall(..) => self.skip = true,
+
+ _ => walk_expr(self, ex),
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/while_let_loop.rs b/src/tools/clippy/clippy_lints/src/loops/while_let_loop.rs
new file mode 100644
index 000000000..ca617859d
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/while_let_loop.rs
@@ -0,0 +1,96 @@
+use super::WHILE_LET_LOOP;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::higher;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::needs_ordered_drop;
+use clippy_utils::visitors::any_temporaries_need_ordered_drop;
+use rustc_errors::Applicability;
+use rustc_hir::{Block, Expr, ExprKind, Local, MatchSource, Pat, StmtKind};
+use rustc_lint::LateContext;
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, loop_block: &'tcx Block<'_>) {
+ let (init, has_trailing_exprs) = match (loop_block.stmts, loop_block.expr) {
+ ([stmt, stmts @ ..], expr) => {
+ if let StmtKind::Local(&Local { init: Some(e), els: None, .. }) | StmtKind::Semi(e) | StmtKind::Expr(e) = stmt.kind {
+ (e, !stmts.is_empty() || expr.is_some())
+ } else {
+ return;
+ }
+ },
+ ([], Some(e)) => (e, false),
+ _ => return,
+ };
+
+ if let Some(if_let) = higher::IfLet::hir(cx, init)
+ && let Some(else_expr) = if_let.if_else
+ && is_simple_break_expr(else_expr)
+ {
+ could_be_while_let(cx, expr, if_let.let_pat, if_let.let_expr, has_trailing_exprs);
+ } else if let ExprKind::Match(scrutinee, [arm1, arm2], MatchSource::Normal) = init.kind
+ && arm1.guard.is_none()
+ && arm2.guard.is_none()
+ && is_simple_break_expr(arm2.body)
+ {
+ could_be_while_let(cx, expr, arm1.pat, scrutinee, has_trailing_exprs);
+ }
+}
+
+/// Returns `true` if expr contains a single break expression without a label or eub-expression.
+fn is_simple_break_expr(e: &Expr<'_>) -> bool {
+ matches!(peel_blocks(e).kind, ExprKind::Break(dest, None) if dest.label.is_none())
+}
+
+/// Removes any blocks containing only a single expression.
+fn peel_blocks<'tcx>(e: &'tcx Expr<'tcx>) -> &'tcx Expr<'tcx> {
+ if let ExprKind::Block(b, _) = e.kind {
+ match (b.stmts, b.expr) {
+ ([s], None) => {
+ if let StmtKind::Expr(e) | StmtKind::Semi(e) = s.kind {
+ peel_blocks(e)
+ } else {
+ e
+ }
+ },
+ ([], Some(e)) => peel_blocks(e),
+ _ => e,
+ }
+ } else {
+ e
+ }
+}
+
+fn could_be_while_let<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ let_pat: &'tcx Pat<'_>,
+ let_expr: &'tcx Expr<'_>,
+ has_trailing_exprs: bool,
+) {
+ if has_trailing_exprs
+ && (needs_ordered_drop(cx, cx.typeck_results().expr_ty(let_expr))
+ || any_temporaries_need_ordered_drop(cx, let_expr))
+ {
+ // Switching to a `while let` loop will extend the lifetime of some values.
+ return;
+ }
+
+ // NOTE: we used to build a body here instead of using
+ // ellipsis, this was removed because:
+ // 1) it was ugly with big bodies;
+ // 2) it was not indented properly;
+ // 3) it wasn’t very smart (see #675).
+ let mut applicability = Applicability::HasPlaceholders;
+ span_lint_and_sugg(
+ cx,
+ WHILE_LET_LOOP,
+ expr.span,
+ "this loop could be written as a `while let` loop",
+ "try",
+ format!(
+ "while let {} = {} {{ .. }}",
+ snippet_with_applicability(cx, let_pat.span, "..", &mut applicability),
+ snippet_with_applicability(cx, let_expr.span, "..", &mut applicability),
+ ),
+ applicability,
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/loops/while_let_on_iterator.rs b/src/tools/clippy/clippy_lints/src/loops/while_let_on_iterator.rs
new file mode 100644
index 000000000..e9e215e66
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/loops/while_let_on_iterator.rs
@@ -0,0 +1,362 @@
+use super::WHILE_LET_ON_ITERATOR;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::higher;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::{
+ get_enclosing_loop_or_multi_call_closure, is_refutable, is_trait_method, match_def_path, paths,
+ visitors::is_res_used,
+};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::intravisit::{walk_expr, Visitor};
+use rustc_hir::{def::Res, Closure, Expr, ExprKind, HirId, Local, Mutability, PatKind, QPath, UnOp};
+use rustc_lint::LateContext;
+use rustc_middle::hir::nested_filter::OnlyBodies;
+use rustc_middle::ty::adjustment::Adjust;
+use rustc_span::{symbol::sym, Symbol};
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ let (scrutinee_expr, iter_expr_struct, iter_expr, some_pat, loop_expr) = if_chain! {
+ if let Some(higher::WhileLet { if_then, let_pat, let_expr }) = higher::WhileLet::hir(expr);
+ // check for `Some(..)` pattern
+ if let PatKind::TupleStruct(QPath::Resolved(None, pat_path), some_pat, _) = let_pat.kind;
+ if let Res::Def(_, pat_did) = pat_path.res;
+ if match_def_path(cx, pat_did, &paths::OPTION_SOME);
+ // check for call to `Iterator::next`
+ if let ExprKind::MethodCall(method_name, [iter_expr], _) = let_expr.kind;
+ if method_name.ident.name == sym::next;
+ if is_trait_method(cx, let_expr, sym::Iterator);
+ if let Some(iter_expr_struct) = try_parse_iter_expr(cx, iter_expr);
+ // get the loop containing the match expression
+ if !uses_iter(cx, &iter_expr_struct, if_then);
+ then {
+ (let_expr, iter_expr_struct, iter_expr, some_pat, expr)
+ } else {
+ return;
+ }
+ };
+
+ let mut applicability = Applicability::MachineApplicable;
+ let loop_var = if let Some(some_pat) = some_pat.first() {
+ if is_refutable(cx, some_pat) {
+ // Refutable patterns don't work with for loops.
+ return;
+ }
+ snippet_with_applicability(cx, some_pat.span, "..", &mut applicability)
+ } else {
+ "_".into()
+ };
+
+ // If the iterator is a field or the iterator is accessed after the loop is complete it needs to be
+ // borrowed mutably. TODO: If the struct can be partially moved from and the struct isn't used
+ // afterwards a mutable borrow of a field isn't necessary.
+ let by_ref = if cx.typeck_results().expr_ty(iter_expr).ref_mutability() == Some(Mutability::Mut)
+ || !iter_expr_struct.can_move
+ || !iter_expr_struct.fields.is_empty()
+ || needs_mutable_borrow(cx, &iter_expr_struct, loop_expr)
+ {
+ ".by_ref()"
+ } else {
+ ""
+ };
+
+ let iterator = snippet_with_applicability(cx, iter_expr.span, "_", &mut applicability);
+ span_lint_and_sugg(
+ cx,
+ WHILE_LET_ON_ITERATOR,
+ expr.span.with_hi(scrutinee_expr.span.hi()),
+ "this loop could be written as a `for` loop",
+ "try",
+ format!("for {} in {}{}", loop_var, iterator, by_ref),
+ applicability,
+ );
+}
+
+#[derive(Debug)]
+struct IterExpr {
+ /// The fields used, in order of child to parent.
+ fields: Vec<Symbol>,
+ /// The path being used.
+ path: Res,
+ /// Whether or not the iterator can be moved.
+ can_move: bool,
+}
+
+/// Parses any expression to find out which field of which variable is used. Will return `None` if
+/// the expression might have side effects.
+fn try_parse_iter_expr(cx: &LateContext<'_>, mut e: &Expr<'_>) -> Option<IterExpr> {
+ let mut fields = Vec::new();
+ let mut can_move = true;
+ loop {
+ if cx
+ .typeck_results()
+ .expr_adjustments(e)
+ .iter()
+ .any(|a| matches!(a.kind, Adjust::Deref(Some(..))))
+ {
+ // Custom deref impls need to borrow the whole value as it's captured by reference
+ can_move = false;
+ fields.clear();
+ }
+ match e.kind {
+ ExprKind::Path(ref path) => {
+ break Some(IterExpr {
+ fields,
+ path: cx.qpath_res(path, e.hir_id),
+ can_move,
+ });
+ },
+ ExprKind::Field(base, name) => {
+ fields.push(name.name);
+ e = base;
+ },
+ // Dereferencing a pointer has no side effects and doesn't affect which field is being used.
+ ExprKind::Unary(UnOp::Deref, base) if cx.typeck_results().expr_ty(base).is_ref() => e = base,
+
+ // Shouldn't have side effects, but there's no way to trace which field is used. So forget which fields have
+ // already been seen.
+ ExprKind::Index(base, idx) if !idx.can_have_side_effects() => {
+ can_move = false;
+ fields.clear();
+ e = base;
+ },
+ ExprKind::Unary(UnOp::Deref, base) => {
+ can_move = false;
+ fields.clear();
+ e = base;
+ },
+
+ // No effect and doesn't affect which field is being used.
+ ExprKind::DropTemps(base) | ExprKind::AddrOf(_, _, base) | ExprKind::Type(base, _) => e = base,
+ _ => break None,
+ }
+ }
+}
+
+fn is_expr_same_field(cx: &LateContext<'_>, mut e: &Expr<'_>, mut fields: &[Symbol], path_res: Res) -> bool {
+ loop {
+ match (&e.kind, fields) {
+ (&ExprKind::Field(base, name), [head_field, tail_fields @ ..]) if name.name == *head_field => {
+ e = base;
+ fields = tail_fields;
+ },
+ (ExprKind::Path(path), []) => {
+ break cx.qpath_res(path, e.hir_id) == path_res;
+ },
+ (&(ExprKind::DropTemps(base) | ExprKind::AddrOf(_, _, base) | ExprKind::Type(base, _)), _) => e = base,
+ _ => break false,
+ }
+ }
+}
+
+/// Checks if the given expression is the same field as, is a child of, or is the parent of the
+/// given field. Used to check if the expression can be used while the given field is borrowed
+/// mutably. e.g. if checking for `x.y`, then `x.y`, `x.y.z`, and `x` will all return true, but
+/// `x.z`, and `y` will return false.
+fn is_expr_same_child_or_parent_field(cx: &LateContext<'_>, expr: &Expr<'_>, fields: &[Symbol], path_res: Res) -> bool {
+ match expr.kind {
+ ExprKind::Field(base, name) => {
+ if let Some((head_field, tail_fields)) = fields.split_first() {
+ if name.name == *head_field && is_expr_same_field(cx, base, tail_fields, path_res) {
+ return true;
+ }
+ // Check if the expression is a parent field
+ let mut fields_iter = tail_fields.iter();
+ while let Some(field) = fields_iter.next() {
+ if *field == name.name && is_expr_same_field(cx, base, fields_iter.as_slice(), path_res) {
+ return true;
+ }
+ }
+ }
+
+ // Check if the expression is a child field.
+ let mut e = base;
+ loop {
+ match e.kind {
+ ExprKind::Field(..) if is_expr_same_field(cx, e, fields, path_res) => break true,
+ ExprKind::Field(base, _) | ExprKind::DropTemps(base) | ExprKind::Type(base, _) => e = base,
+ ExprKind::Path(ref path) if fields.is_empty() => {
+ break cx.qpath_res(path, e.hir_id) == path_res;
+ },
+ _ => break false,
+ }
+ }
+ },
+ // If the path matches, this is either an exact match, or the expression is a parent of the field.
+ ExprKind::Path(ref path) => cx.qpath_res(path, expr.hir_id) == path_res,
+ ExprKind::DropTemps(base) | ExprKind::Type(base, _) | ExprKind::AddrOf(_, _, base) => {
+ is_expr_same_child_or_parent_field(cx, base, fields, path_res)
+ },
+ _ => false,
+ }
+}
+
+/// Strips off all field and path expressions. This will return true if a field or path has been
+/// skipped. Used to skip them after failing to check for equality.
+fn skip_fields_and_path<'tcx>(expr: &'tcx Expr<'_>) -> (Option<&'tcx Expr<'tcx>>, bool) {
+ let mut e = expr;
+ let e = loop {
+ match e.kind {
+ ExprKind::Field(base, _) | ExprKind::DropTemps(base) | ExprKind::Type(base, _) => e = base,
+ ExprKind::Path(_) => return (None, true),
+ _ => break e,
+ }
+ };
+ (Some(e), e.hir_id != expr.hir_id)
+}
+
+/// Checks if the given expression uses the iterator.
+fn uses_iter<'tcx>(cx: &LateContext<'tcx>, iter_expr: &IterExpr, container: &'tcx Expr<'_>) -> bool {
+ struct V<'a, 'b, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ iter_expr: &'b IterExpr,
+ uses_iter: bool,
+ }
+ impl<'tcx> Visitor<'tcx> for V<'_, '_, 'tcx> {
+ fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
+ if self.uses_iter {
+ // return
+ } else if is_expr_same_child_or_parent_field(self.cx, e, &self.iter_expr.fields, self.iter_expr.path) {
+ self.uses_iter = true;
+ } else if let (e, true) = skip_fields_and_path(e) {
+ if let Some(e) = e {
+ self.visit_expr(e);
+ }
+ } else if let ExprKind::Closure(&Closure { body: id, .. }) = e.kind {
+ if is_res_used(self.cx, self.iter_expr.path, id) {
+ self.uses_iter = true;
+ }
+ } else {
+ walk_expr(self, e);
+ }
+ }
+ }
+
+ let mut v = V {
+ cx,
+ iter_expr,
+ uses_iter: false,
+ };
+ v.visit_expr(container);
+ v.uses_iter
+}
+
+#[expect(clippy::too_many_lines)]
+fn needs_mutable_borrow(cx: &LateContext<'_>, iter_expr: &IterExpr, loop_expr: &Expr<'_>) -> bool {
+ struct AfterLoopVisitor<'a, 'b, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ iter_expr: &'b IterExpr,
+ loop_id: HirId,
+ after_loop: bool,
+ used_iter: bool,
+ }
+ impl<'tcx> Visitor<'tcx> for AfterLoopVisitor<'_, '_, 'tcx> {
+ type NestedFilter = OnlyBodies;
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+
+ fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
+ if self.used_iter {
+ return;
+ }
+ if self.after_loop {
+ if is_expr_same_child_or_parent_field(self.cx, e, &self.iter_expr.fields, self.iter_expr.path) {
+ self.used_iter = true;
+ } else if let (e, true) = skip_fields_and_path(e) {
+ if let Some(e) = e {
+ self.visit_expr(e);
+ }
+ } else if let ExprKind::Closure(&Closure { body: id, .. }) = e.kind {
+ self.used_iter = is_res_used(self.cx, self.iter_expr.path, id);
+ } else {
+ walk_expr(self, e);
+ }
+ } else if self.loop_id == e.hir_id {
+ self.after_loop = true;
+ } else {
+ walk_expr(self, e);
+ }
+ }
+ }
+
+ struct NestedLoopVisitor<'a, 'b, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ iter_expr: &'b IterExpr,
+ local_id: HirId,
+ loop_id: HirId,
+ after_loop: bool,
+ found_local: bool,
+ used_after: bool,
+ }
+ impl<'a, 'b, 'tcx> Visitor<'tcx> for NestedLoopVisitor<'a, 'b, 'tcx> {
+ type NestedFilter = OnlyBodies;
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+
+ fn visit_local(&mut self, l: &'tcx Local<'_>) {
+ if !self.after_loop {
+ l.pat.each_binding_or_first(&mut |_, id, _, _| {
+ if id == self.local_id {
+ self.found_local = true;
+ }
+ });
+ }
+ if let Some(e) = l.init {
+ self.visit_expr(e);
+ }
+ }
+
+ fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
+ if self.used_after {
+ return;
+ }
+ if self.after_loop {
+ if is_expr_same_child_or_parent_field(self.cx, e, &self.iter_expr.fields, self.iter_expr.path) {
+ self.used_after = true;
+ } else if let (e, true) = skip_fields_and_path(e) {
+ if let Some(e) = e {
+ self.visit_expr(e);
+ }
+ } else if let ExprKind::Closure(&Closure { body: id, .. }) = e.kind {
+ self.used_after = is_res_used(self.cx, self.iter_expr.path, id);
+ } else {
+ walk_expr(self, e);
+ }
+ } else if e.hir_id == self.loop_id {
+ self.after_loop = true;
+ } else {
+ walk_expr(self, e);
+ }
+ }
+ }
+
+ if let Some(e) = get_enclosing_loop_or_multi_call_closure(cx, loop_expr) {
+ let local_id = match iter_expr.path {
+ Res::Local(id) => id,
+ _ => return true,
+ };
+ let mut v = NestedLoopVisitor {
+ cx,
+ iter_expr,
+ local_id,
+ loop_id: loop_expr.hir_id,
+ after_loop: false,
+ found_local: false,
+ used_after: false,
+ };
+ v.visit_expr(e);
+ v.used_after || !v.found_local
+ } else {
+ let mut v = AfterLoopVisitor {
+ cx,
+ iter_expr,
+ loop_id: loop_expr.hir_id,
+ after_loop: false,
+ used_iter: false,
+ };
+ v.visit_expr(&cx.tcx.hir().body(cx.enclosing_body.unwrap()).value);
+ v.used_iter
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/macro_use.rs b/src/tools/clippy/clippy_lints/src/macro_use.rs
new file mode 100644
index 000000000..d573a1b4f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/macro_use.rs
@@ -0,0 +1,221 @@
+use clippy_utils::diagnostics::span_lint_hir_and_then;
+use clippy_utils::source::snippet;
+use hir::def::{DefKind, Res};
+use if_chain::if_chain;
+use rustc_ast::ast;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::{edition::Edition, sym, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `#[macro_use] use...`.
+ ///
+ /// ### Why is this bad?
+ /// Since the Rust 2018 edition you can import
+ /// macro's directly, this is considered idiomatic.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// #[macro_use]
+ /// use some_macro;
+ /// ```
+ #[clippy::version = "1.44.0"]
+ pub MACRO_USE_IMPORTS,
+ pedantic,
+ "#[macro_use] is no longer needed"
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+struct PathAndSpan {
+ path: String,
+ span: Span,
+}
+
+/// `MacroRefData` includes the name of the macro.
+#[derive(Debug, Clone)]
+pub struct MacroRefData {
+ name: String,
+}
+
+impl MacroRefData {
+ pub fn new(name: String) -> Self {
+ Self { name }
+ }
+}
+
+#[derive(Default)]
+#[expect(clippy::module_name_repetitions)]
+pub struct MacroUseImports {
+ /// the actual import path used and the span of the attribute above it. The value is
+ /// the location, where the lint should be emitted.
+ imports: Vec<(String, Span, hir::HirId)>,
+ /// the span of the macro reference, kept to ensure only one reference is used per macro call.
+ collected: FxHashSet<Span>,
+ mac_refs: Vec<MacroRefData>,
+}
+
+impl_lint_pass!(MacroUseImports => [MACRO_USE_IMPORTS]);
+
+impl MacroUseImports {
+ fn push_unique_macro(&mut self, cx: &LateContext<'_>, span: Span) {
+ let call_site = span.source_callsite();
+ let name = snippet(cx, cx.sess().source_map().span_until_char(call_site, '!'), "_");
+ if span.source_callee().is_some() && !self.collected.contains(&call_site) {
+ let name = if name.contains("::") {
+ name.split("::").last().unwrap().to_string()
+ } else {
+ name.to_string()
+ };
+
+ self.mac_refs.push(MacroRefData::new(name));
+ self.collected.insert(call_site);
+ }
+ }
+
+ fn push_unique_macro_pat_ty(&mut self, cx: &LateContext<'_>, span: Span) {
+ let call_site = span.source_callsite();
+ let name = snippet(cx, cx.sess().source_map().span_until_char(call_site, '!'), "_");
+ if span.source_callee().is_some() && !self.collected.contains(&call_site) {
+ self.mac_refs.push(MacroRefData::new(name.to_string()));
+ self.collected.insert(call_site);
+ }
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for MacroUseImports {
+ fn check_item(&mut self, cx: &LateContext<'_>, item: &hir::Item<'_>) {
+ if_chain! {
+ if cx.sess().opts.edition >= Edition::Edition2018;
+ if let hir::ItemKind::Use(path, _kind) = &item.kind;
+ let hir_id = item.hir_id();
+ let attrs = cx.tcx.hir().attrs(hir_id);
+ if let Some(mac_attr) = attrs.iter().find(|attr| attr.has_name(sym::macro_use));
+ if let Res::Def(DefKind::Mod, id) = path.res;
+ if !id.is_local();
+ then {
+ for kid in cx.tcx.module_children(id).iter() {
+ if let Res::Def(DefKind::Macro(_mac_type), mac_id) = kid.res {
+ let span = mac_attr.span;
+ let def_path = cx.tcx.def_path_str(mac_id);
+ self.imports.push((def_path, span, hir_id));
+ }
+ }
+ } else {
+ if item.span.from_expansion() {
+ self.push_unique_macro_pat_ty(cx, item.span);
+ }
+ }
+ }
+ }
+ fn check_attribute(&mut self, cx: &LateContext<'_>, attr: &ast::Attribute) {
+ if attr.span.from_expansion() {
+ self.push_unique_macro(cx, attr.span);
+ }
+ }
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &hir::Expr<'_>) {
+ if expr.span.from_expansion() {
+ self.push_unique_macro(cx, expr.span);
+ }
+ }
+ fn check_stmt(&mut self, cx: &LateContext<'_>, stmt: &hir::Stmt<'_>) {
+ if stmt.span.from_expansion() {
+ self.push_unique_macro(cx, stmt.span);
+ }
+ }
+ fn check_pat(&mut self, cx: &LateContext<'_>, pat: &hir::Pat<'_>) {
+ if pat.span.from_expansion() {
+ self.push_unique_macro_pat_ty(cx, pat.span);
+ }
+ }
+ fn check_ty(&mut self, cx: &LateContext<'_>, ty: &hir::Ty<'_>) {
+ if ty.span.from_expansion() {
+ self.push_unique_macro_pat_ty(cx, ty.span);
+ }
+ }
+ fn check_crate_post(&mut self, cx: &LateContext<'_>) {
+ let mut used = FxHashMap::default();
+ let mut check_dup = vec![];
+ for (import, span, hir_id) in &self.imports {
+ let found_idx = self.mac_refs.iter().position(|mac| import.ends_with(&mac.name));
+
+ if let Some(idx) = found_idx {
+ self.mac_refs.remove(idx);
+ let seg = import.split("::").collect::<Vec<_>>();
+
+ match seg.as_slice() {
+ // an empty path is impossible
+ // a path should always consist of 2 or more segments
+ [] | [_] => return,
+ [root, item] => {
+ if !check_dup.contains(&(*item).to_string()) {
+ used.entry(((*root).to_string(), span, hir_id))
+ .or_insert_with(Vec::new)
+ .push((*item).to_string());
+ check_dup.push((*item).to_string());
+ }
+ },
+ [root, rest @ ..] => {
+ if rest.iter().all(|item| !check_dup.contains(&(*item).to_string())) {
+ let filtered = rest
+ .iter()
+ .filter_map(|item| {
+ if check_dup.contains(&(*item).to_string()) {
+ None
+ } else {
+ Some((*item).to_string())
+ }
+ })
+ .collect::<Vec<_>>();
+ used.entry(((*root).to_string(), span, hir_id))
+ .or_insert_with(Vec::new)
+ .push(filtered.join("::"));
+ check_dup.extend(filtered);
+ } else {
+ let rest = rest.to_vec();
+ used.entry(((*root).to_string(), span, hir_id))
+ .or_insert_with(Vec::new)
+ .push(rest.join("::"));
+ check_dup.extend(rest.iter().map(ToString::to_string));
+ }
+ },
+ }
+ }
+ }
+
+ let mut suggestions = vec![];
+ for ((root, span, hir_id), path) in used {
+ if path.len() == 1 {
+ suggestions.push((span, format!("{}::{}", root, path[0]), hir_id));
+ } else {
+ suggestions.push((span, format!("{}::{{{}}}", root, path.join(", ")), hir_id));
+ }
+ }
+
+ // If mac_refs is not empty we have encountered an import we could not handle
+ // such as `std::prelude::v1::foo` or some other macro that expands to an import.
+ if self.mac_refs.is_empty() {
+ for (span, import, hir_id) in suggestions {
+ let help = format!("use {};", import);
+ span_lint_hir_and_then(
+ cx,
+ MACRO_USE_IMPORTS,
+ *hir_id,
+ *span,
+ "`macro_use` attributes are no longer needed in the Rust 2018 edition",
+ |diag| {
+ diag.span_suggestion(
+ *span,
+ "remove the attribute and import the macro directly, try",
+ help,
+ Applicability::MaybeIncorrect,
+ );
+ },
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/main_recursion.rs b/src/tools/clippy/clippy_lints/src/main_recursion.rs
new file mode 100644
index 000000000..20333c150
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/main_recursion.rs
@@ -0,0 +1,63 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::source::snippet;
+use clippy_utils::{is_entrypoint_fn, is_no_std_crate};
+use if_chain::if_chain;
+use rustc_hir::{Expr, ExprKind, QPath};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for recursion using the entrypoint.
+ ///
+ /// ### Why is this bad?
+ /// Apart from special setups (which we could detect following attributes like #![no_std]),
+ /// recursing into main() seems like an unintuitive anti-pattern we should be able to detect.
+ ///
+ /// ### Example
+ /// ```no_run
+ /// fn main() {
+ /// main();
+ /// }
+ /// ```
+ #[clippy::version = "1.38.0"]
+ pub MAIN_RECURSION,
+ style,
+ "recursion using the entrypoint"
+}
+
+#[derive(Default)]
+pub struct MainRecursion {
+ has_no_std_attr: bool,
+}
+
+impl_lint_pass!(MainRecursion => [MAIN_RECURSION]);
+
+impl LateLintPass<'_> for MainRecursion {
+ fn check_crate(&mut self, cx: &LateContext<'_>) {
+ self.has_no_std_attr = is_no_std_crate(cx);
+ }
+
+ fn check_expr_post(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if self.has_no_std_attr {
+ return;
+ }
+
+ if_chain! {
+ if let ExprKind::Call(func, _) = &expr.kind;
+ if let ExprKind::Path(QPath::Resolved(_, path)) = &func.kind;
+ if let Some(def_id) = path.res.opt_def_id();
+ if is_entrypoint_fn(cx, def_id);
+ then {
+ span_lint_and_help(
+ cx,
+ MAIN_RECURSION,
+ func.span,
+ &format!("recursing into entrypoint `{}`", snippet(cx, func.span, "main")),
+ None,
+ "consider using another function for this recursion"
+ )
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/manual_assert.rs b/src/tools/clippy/clippy_lints/src/manual_assert.rs
new file mode 100644
index 000000000..26b53ab5d
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/manual_assert.rs
@@ -0,0 +1,71 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::macros::{root_macro_call, FormatArgsExpn};
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::{peel_blocks_with_stmt, sugg};
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind, UnOp};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Detects `if`-then-`panic!` that can be replaced with `assert!`.
+ ///
+ /// ### Why is this bad?
+ /// `assert!` is simpler than `if`-then-`panic!`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let sad_people: Vec<&str> = vec![];
+ /// if !sad_people.is_empty() {
+ /// panic!("there are sad people: {:?}", sad_people);
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let sad_people: Vec<&str> = vec![];
+ /// assert!(sad_people.is_empty(), "there are sad people: {:?}", sad_people);
+ /// ```
+ #[clippy::version = "1.57.0"]
+ pub MANUAL_ASSERT,
+ pedantic,
+ "`panic!` and only a `panic!` in `if`-then statement"
+}
+
+declare_lint_pass!(ManualAssert => [MANUAL_ASSERT]);
+
+impl<'tcx> LateLintPass<'tcx> for ManualAssert {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &Expr<'tcx>) {
+ if_chain! {
+ if let ExprKind::If(cond, then, None) = expr.kind;
+ if !matches!(cond.kind, ExprKind::Let(_));
+ if !expr.span.from_expansion();
+ let then = peel_blocks_with_stmt(then);
+ if let Some(macro_call) = root_macro_call(then.span);
+ if cx.tcx.item_name(macro_call.def_id) == sym::panic;
+ if !cx.tcx.sess.source_map().is_multiline(cond.span);
+ if let Some(format_args) = FormatArgsExpn::find_nested(cx, then, macro_call.expn);
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ let format_args_snip = snippet_with_applicability(cx, format_args.inputs_span(), "..", &mut applicability);
+ let cond = cond.peel_drop_temps();
+ let (cond, not) = match cond.kind {
+ ExprKind::Unary(UnOp::Not, e) => (e, ""),
+ _ => (cond, "!"),
+ };
+ let cond_sugg = sugg::Sugg::hir_with_applicability(cx, cond, "..", &mut applicability).maybe_par();
+ let sugg = format!("assert!({not}{cond_sugg}, {format_args_snip});");
+ span_lint_and_sugg(
+ cx,
+ MANUAL_ASSERT,
+ expr.span,
+ "only a `panic!` in `if`-then statement",
+ "try",
+ sugg,
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/manual_async_fn.rs b/src/tools/clippy/clippy_lints/src/manual_async_fn.rs
new file mode 100644
index 000000000..a0ca7e6ff
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/manual_async_fn.rs
@@ -0,0 +1,202 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::match_function_call;
+use clippy_utils::paths::FUTURE_FROM_GENERATOR;
+use clippy_utils::source::{position_before_rarrow, snippet_block, snippet_opt};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::intravisit::FnKind;
+use rustc_hir::{
+ AsyncGeneratorKind, Block, Body, Closure, Expr, ExprKind, FnDecl, FnRetTy, GeneratorKind, GenericArg, GenericBound,
+ HirId, IsAsync, ItemKind, LifetimeName, Term, TraitRef, Ty, TyKind, TypeBindingKind,
+};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{sym, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// It checks for manual implementations of `async` functions.
+ ///
+ /// ### Why is this bad?
+ /// It's more idiomatic to use the dedicated syntax.
+ ///
+ /// ### Example
+ /// ```rust
+ /// use std::future::Future;
+ ///
+ /// fn foo() -> impl Future<Output = i32> { async { 42 } }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// async fn foo() -> i32 { 42 }
+ /// ```
+ #[clippy::version = "1.45.0"]
+ pub MANUAL_ASYNC_FN,
+ style,
+ "manual implementations of `async` functions can be simplified using the dedicated syntax"
+}
+
+declare_lint_pass!(ManualAsyncFn => [MANUAL_ASYNC_FN]);
+
+impl<'tcx> LateLintPass<'tcx> for ManualAsyncFn {
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ kind: FnKind<'tcx>,
+ decl: &'tcx FnDecl<'_>,
+ body: &'tcx Body<'_>,
+ span: Span,
+ _: HirId,
+ ) {
+ if_chain! {
+ if let Some(header) = kind.header();
+ if header.asyncness == IsAsync::NotAsync;
+ // Check that this function returns `impl Future`
+ if let FnRetTy::Return(ret_ty) = decl.output;
+ if let Some((trait_ref, output_lifetimes)) = future_trait_ref(cx, ret_ty);
+ if let Some(output) = future_output_ty(trait_ref);
+ if captures_all_lifetimes(decl.inputs, &output_lifetimes);
+ // Check that the body of the function consists of one async block
+ if let ExprKind::Block(block, _) = body.value.kind;
+ if block.stmts.is_empty();
+ if let Some(closure_body) = desugared_async_block(cx, block);
+ then {
+ let header_span = span.with_hi(ret_ty.span.hi());
+
+ span_lint_and_then(
+ cx,
+ MANUAL_ASYNC_FN,
+ header_span,
+ "this function can be simplified using the `async fn` syntax",
+ |diag| {
+ if_chain! {
+ if let Some(header_snip) = snippet_opt(cx, header_span);
+ if let Some(ret_pos) = position_before_rarrow(&header_snip);
+ if let Some((ret_sugg, ret_snip)) = suggested_ret(cx, output);
+ then {
+ let help = format!("make the function `async` and {}", ret_sugg);
+ diag.span_suggestion(
+ header_span,
+ &help,
+ format!("async {}{}", &header_snip[..ret_pos], ret_snip),
+ Applicability::MachineApplicable
+ );
+
+ let body_snip = snippet_block(cx, closure_body.value.span, "..", Some(block.span));
+ diag.span_suggestion(
+ block.span,
+ "move the body of the async block to the enclosing function",
+ body_snip,
+ Applicability::MachineApplicable
+ );
+ }
+ }
+ },
+ );
+ }
+ }
+ }
+}
+
+fn future_trait_ref<'tcx>(
+ cx: &LateContext<'tcx>,
+ ty: &'tcx Ty<'tcx>,
+) -> Option<(&'tcx TraitRef<'tcx>, Vec<LifetimeName>)> {
+ if_chain! {
+ if let TyKind::OpaqueDef(item_id, bounds) = ty.kind;
+ let item = cx.tcx.hir().item(item_id);
+ if let ItemKind::OpaqueTy(opaque) = &item.kind;
+ if let Some(trait_ref) = opaque.bounds.iter().find_map(|bound| {
+ if let GenericBound::Trait(poly, _) = bound {
+ Some(&poly.trait_ref)
+ } else {
+ None
+ }
+ });
+ if trait_ref.trait_def_id() == cx.tcx.lang_items().future_trait();
+ then {
+ let output_lifetimes = bounds
+ .iter()
+ .filter_map(|bound| {
+ if let GenericArg::Lifetime(lt) = bound {
+ Some(lt.name)
+ } else {
+ None
+ }
+ })
+ .collect();
+
+ return Some((trait_ref, output_lifetimes));
+ }
+ }
+
+ None
+}
+
+fn future_output_ty<'tcx>(trait_ref: &'tcx TraitRef<'tcx>) -> Option<&'tcx Ty<'tcx>> {
+ if_chain! {
+ if let Some(segment) = trait_ref.path.segments.last();
+ if let Some(args) = segment.args;
+ if args.bindings.len() == 1;
+ let binding = &args.bindings[0];
+ if binding.ident.name == sym::Output;
+ if let TypeBindingKind::Equality{term: Term::Ty(output)} = binding.kind;
+ then {
+ return Some(output)
+ }
+ }
+
+ None
+}
+
+fn captures_all_lifetimes(inputs: &[Ty<'_>], output_lifetimes: &[LifetimeName]) -> bool {
+ let input_lifetimes: Vec<LifetimeName> = inputs
+ .iter()
+ .filter_map(|ty| {
+ if let TyKind::Rptr(lt, _) = ty.kind {
+ Some(lt.name)
+ } else {
+ None
+ }
+ })
+ .collect();
+
+ // The lint should trigger in one of these cases:
+ // - There are no input lifetimes
+ // - There's only one output lifetime bound using `+ '_`
+ // - All input lifetimes are explicitly bound to the output
+ input_lifetimes.is_empty()
+ || (output_lifetimes.len() == 1 && matches!(output_lifetimes[0], LifetimeName::Infer))
+ || input_lifetimes
+ .iter()
+ .all(|in_lt| output_lifetimes.iter().any(|out_lt| in_lt == out_lt))
+}
+
+fn desugared_async_block<'tcx>(cx: &LateContext<'tcx>, block: &'tcx Block<'tcx>) -> Option<&'tcx Body<'tcx>> {
+ if_chain! {
+ if let Some(block_expr) = block.expr;
+ if let Some(args) = match_function_call(cx, block_expr, &FUTURE_FROM_GENERATOR);
+ if args.len() == 1;
+ if let Expr{kind: ExprKind::Closure(&Closure { body, .. }), ..} = args[0];
+ let closure_body = cx.tcx.hir().body(body);
+ if closure_body.generator_kind == Some(GeneratorKind::Async(AsyncGeneratorKind::Block));
+ then {
+ return Some(closure_body);
+ }
+ }
+
+ None
+}
+
+fn suggested_ret(cx: &LateContext<'_>, output: &Ty<'_>) -> Option<(&'static str, String)> {
+ match output.kind {
+ TyKind::Tup(tys) if tys.is_empty() => {
+ let sugg = "remove the return type";
+ Some((sugg, "".into()))
+ },
+ _ => {
+ let sugg = "return the output of the future directly";
+ snippet_opt(cx, output.span).map(|snip| (sugg, format!(" -> {}", snip)))
+ },
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/manual_bits.rs b/src/tools/clippy/clippy_lints/src/manual_bits.rs
new file mode 100644
index 000000000..60bbcde4f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/manual_bits.rs
@@ -0,0 +1,146 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::{get_parent_expr, meets_msrv, msrvs};
+use rustc_ast::ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::{BinOpKind, Expr, ExprKind, GenericArg, QPath};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::{self, Ty};
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for uses of `std::mem::size_of::<T>() * 8` when
+ /// `T::BITS` is available.
+ ///
+ /// ### Why is this bad?
+ /// Can be written as the shorter `T::BITS`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// std::mem::size_of::<usize>() * 8;
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// usize::BITS as usize;
+ /// ```
+ #[clippy::version = "1.60.0"]
+ pub MANUAL_BITS,
+ style,
+ "manual implementation of `size_of::<T>() * 8` can be simplified with `T::BITS`"
+}
+
+#[derive(Clone)]
+pub struct ManualBits {
+ msrv: Option<RustcVersion>,
+}
+
+impl ManualBits {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self { msrv }
+ }
+}
+
+impl_lint_pass!(ManualBits => [MANUAL_BITS]);
+
+impl<'tcx> LateLintPass<'tcx> for ManualBits {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if !meets_msrv(self.msrv, msrvs::MANUAL_BITS) {
+ return;
+ }
+
+ if_chain! {
+ if let ExprKind::Binary(bin_op, left_expr, right_expr) = expr.kind;
+ if let BinOpKind::Mul = &bin_op.node;
+ if let Some((real_ty, resolved_ty, other_expr)) = get_one_size_of_ty(cx, left_expr, right_expr);
+ if matches!(resolved_ty.kind(), ty::Int(_) | ty::Uint(_));
+ if let ExprKind::Lit(lit) = &other_expr.kind;
+ if let LitKind::Int(8, _) = lit.node;
+ then {
+ let mut app = Applicability::MachineApplicable;
+ let ty_snip = snippet_with_applicability(cx, real_ty.span, "..", &mut app);
+ let sugg = create_sugg(cx, expr, format!("{ty_snip}::BITS"));
+
+ span_lint_and_sugg(
+ cx,
+ MANUAL_BITS,
+ expr.span,
+ "usage of `mem::size_of::<T>()` to obtain the size of `T` in bits",
+ "consider using",
+ sugg,
+ app,
+ );
+ }
+ }
+ }
+
+ extract_msrv_attr!(LateContext);
+}
+
+fn get_one_size_of_ty<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr1: &'tcx Expr<'_>,
+ expr2: &'tcx Expr<'_>,
+) -> Option<(&'tcx rustc_hir::Ty<'tcx>, Ty<'tcx>, &'tcx Expr<'tcx>)> {
+ match (get_size_of_ty(cx, expr1), get_size_of_ty(cx, expr2)) {
+ (Some((real_ty, resolved_ty)), None) => Some((real_ty, resolved_ty, expr2)),
+ (None, Some((real_ty, resolved_ty))) => Some((real_ty, resolved_ty, expr1)),
+ _ => None,
+ }
+}
+
+fn get_size_of_ty<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> Option<(&'tcx rustc_hir::Ty<'tcx>, Ty<'tcx>)> {
+ if_chain! {
+ if let ExprKind::Call(count_func, _func_args) = expr.kind;
+ if let ExprKind::Path(ref count_func_qpath) = count_func.kind;
+
+ if let QPath::Resolved(_, count_func_path) = count_func_qpath;
+ if let Some(segment_zero) = count_func_path.segments.get(0);
+ if let Some(args) = segment_zero.args;
+ if let Some(GenericArg::Type(real_ty)) = args.args.get(0);
+
+ if let Some(def_id) = cx.qpath_res(count_func_qpath, count_func.hir_id).opt_def_id();
+ if cx.tcx.is_diagnostic_item(sym::mem_size_of, def_id);
+ then {
+ cx.typeck_results().node_substs(count_func.hir_id).types().next().map(|resolved_ty| (real_ty, resolved_ty))
+ } else {
+ None
+ }
+ }
+}
+
+fn create_sugg(cx: &LateContext<'_>, expr: &Expr<'_>, base_sugg: String) -> String {
+ if let Some(parent_expr) = get_parent_expr(cx, expr) {
+ if is_ty_conversion(parent_expr) {
+ return base_sugg;
+ }
+
+ // These expressions have precedence over casts, the suggestion therefore
+ // needs to be wrapped into parentheses
+ match parent_expr.kind {
+ ExprKind::Unary(..) | ExprKind::AddrOf(..) | ExprKind::MethodCall(..) => {
+ return format!("({base_sugg} as usize)");
+ },
+ _ => {},
+ }
+ }
+
+ format!("{base_sugg} as usize")
+}
+
+fn is_ty_conversion(expr: &Expr<'_>) -> bool {
+ if let ExprKind::Cast(..) = expr.kind {
+ true
+ } else if let ExprKind::MethodCall(path, [_], _) = expr.kind
+ && path.ident.name == rustc_span::sym::try_into
+ {
+ // This is only called for `usize` which implements `TryInto`. Therefore,
+ // we don't have to check here if `self` implements the `TryInto` trait.
+ true
+ } else {
+ false
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/manual_non_exhaustive.rs b/src/tools/clippy/clippy_lints/src/manual_non_exhaustive.rs
new file mode 100644
index 000000000..2b04475c7
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/manual_non_exhaustive.rs
@@ -0,0 +1,221 @@
+use clippy_utils::diagnostics::{span_lint_and_then, span_lint_hir_and_then};
+use clippy_utils::source::snippet_opt;
+use clippy_utils::{is_doc_hidden, meets_msrv, msrvs};
+use rustc_ast::ast::{self, VisibilityKind};
+use rustc_data_structures::fx::FxHashSet;
+use rustc_errors::Applicability;
+use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res};
+use rustc_hir::{self as hir, Expr, ExprKind, QPath};
+use rustc_lint::{EarlyContext, EarlyLintPass, LateContext, LateLintPass, LintContext};
+use rustc_middle::ty::DefIdTree;
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::def_id::{DefId, LocalDefId};
+use rustc_span::{sym, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for manual implementations of the non-exhaustive pattern.
+ ///
+ /// ### Why is this bad?
+ /// Using the #[non_exhaustive] attribute expresses better the intent
+ /// and allows possible optimizations when applied to enums.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct S {
+ /// pub a: i32,
+ /// pub b: i32,
+ /// _c: (),
+ /// }
+ ///
+ /// enum E {
+ /// A,
+ /// B,
+ /// #[doc(hidden)]
+ /// _C,
+ /// }
+ ///
+ /// struct T(pub i32, pub i32, ());
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// #[non_exhaustive]
+ /// struct S {
+ /// pub a: i32,
+ /// pub b: i32,
+ /// }
+ ///
+ /// #[non_exhaustive]
+ /// enum E {
+ /// A,
+ /// B,
+ /// }
+ ///
+ /// #[non_exhaustive]
+ /// struct T(pub i32, pub i32);
+ /// ```
+ #[clippy::version = "1.45.0"]
+ pub MANUAL_NON_EXHAUSTIVE,
+ style,
+ "manual implementations of the non-exhaustive pattern can be simplified using #[non_exhaustive]"
+}
+
+#[expect(clippy::module_name_repetitions)]
+pub struct ManualNonExhaustiveStruct {
+ msrv: Option<RustcVersion>,
+}
+
+impl ManualNonExhaustiveStruct {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self { msrv }
+ }
+}
+
+impl_lint_pass!(ManualNonExhaustiveStruct => [MANUAL_NON_EXHAUSTIVE]);
+
+#[expect(clippy::module_name_repetitions)]
+pub struct ManualNonExhaustiveEnum {
+ msrv: Option<RustcVersion>,
+ constructed_enum_variants: FxHashSet<(DefId, DefId)>,
+ potential_enums: Vec<(LocalDefId, LocalDefId, Span, Span)>,
+}
+
+impl ManualNonExhaustiveEnum {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self {
+ msrv,
+ constructed_enum_variants: FxHashSet::default(),
+ potential_enums: Vec::new(),
+ }
+ }
+}
+
+impl_lint_pass!(ManualNonExhaustiveEnum => [MANUAL_NON_EXHAUSTIVE]);
+
+impl EarlyLintPass for ManualNonExhaustiveStruct {
+ fn check_item(&mut self, cx: &EarlyContext<'_>, item: &ast::Item) {
+ if !meets_msrv(self.msrv, msrvs::NON_EXHAUSTIVE) {
+ return;
+ }
+
+ if let ast::ItemKind::Struct(variant_data, _) = &item.kind {
+ let (fields, delimiter) = match variant_data {
+ ast::VariantData::Struct(fields, _) => (&**fields, '{'),
+ ast::VariantData::Tuple(fields, _) => (&**fields, '('),
+ ast::VariantData::Unit(_) => return,
+ };
+ if fields.len() <= 1 {
+ return;
+ }
+ let mut iter = fields.iter().filter_map(|f| match f.vis.kind {
+ VisibilityKind::Public => None,
+ VisibilityKind::Inherited => Some(Ok(f)),
+ VisibilityKind::Restricted { .. } => Some(Err(())),
+ });
+ if let Some(Ok(field)) = iter.next()
+ && iter.next().is_none()
+ && field.ty.kind.is_unit()
+ && field.ident.map_or(true, |name| name.as_str().starts_with('_'))
+ {
+ span_lint_and_then(
+ cx,
+ MANUAL_NON_EXHAUSTIVE,
+ item.span,
+ "this seems like a manual implementation of the non-exhaustive pattern",
+ |diag| {
+ if !item.attrs.iter().any(|attr| attr.has_name(sym::non_exhaustive))
+ && let header_span = cx.sess().source_map().span_until_char(item.span, delimiter)
+ && let Some(snippet) = snippet_opt(cx, header_span)
+ {
+ diag.span_suggestion(
+ header_span,
+ "add the attribute",
+ format!("#[non_exhaustive] {}", snippet),
+ Applicability::Unspecified,
+ );
+ }
+ diag.span_help(field.span, "remove this field");
+ }
+ );
+ }
+ }
+ }
+
+ extract_msrv_attr!(EarlyContext);
+}
+
+impl<'tcx> LateLintPass<'tcx> for ManualNonExhaustiveEnum {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) {
+ if !meets_msrv(self.msrv, msrvs::NON_EXHAUSTIVE) {
+ return;
+ }
+
+ if let hir::ItemKind::Enum(def, _) = &item.kind
+ && def.variants.len() > 1
+ {
+ let mut iter = def.variants.iter().filter_map(|v| {
+ let id = cx.tcx.hir().local_def_id(v.id);
+ (matches!(v.data, hir::VariantData::Unit(_))
+ && v.ident.as_str().starts_with('_')
+ && is_doc_hidden(cx.tcx.hir().attrs(v.id)))
+ .then_some((id, v.span))
+ });
+ if let Some((id, span)) = iter.next()
+ && iter.next().is_none()
+ {
+ self.potential_enums.push((item.def_id, id, item.span, span));
+ }
+ }
+ }
+
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ if let ExprKind::Path(QPath::Resolved(None, p)) = &e.kind
+ && let [.., name] = p.segments
+ && let Res::Def(DefKind::Ctor(CtorOf::Variant, CtorKind::Const), id) = p.res
+ && name.ident.as_str().starts_with('_')
+ {
+ let variant_id = cx.tcx.parent(id);
+ let enum_id = cx.tcx.parent(variant_id);
+
+ self.constructed_enum_variants.insert((enum_id, variant_id));
+ }
+ }
+
+ fn check_crate_post(&mut self, cx: &LateContext<'tcx>) {
+ for &(enum_id, _, enum_span, variant_span) in
+ self.potential_enums.iter().filter(|&&(enum_id, variant_id, _, _)| {
+ !self
+ .constructed_enum_variants
+ .contains(&(enum_id.to_def_id(), variant_id.to_def_id()))
+ })
+ {
+ let hir_id = cx.tcx.hir().local_def_id_to_hir_id(enum_id);
+ span_lint_hir_and_then(
+ cx,
+ MANUAL_NON_EXHAUSTIVE,
+ hir_id,
+ enum_span,
+ "this seems like a manual implementation of the non-exhaustive pattern",
+ |diag| {
+ if !cx.tcx.adt_def(enum_id).is_variant_list_non_exhaustive()
+ && let header_span = cx.sess().source_map().span_until_char(enum_span, '{')
+ && let Some(snippet) = snippet_opt(cx, header_span)
+ {
+ diag.span_suggestion(
+ header_span,
+ "add the attribute",
+ format!("#[non_exhaustive] {}", snippet),
+ Applicability::Unspecified,
+ );
+ }
+ diag.span_help(variant_span, "remove this variant");
+ },
+ );
+ }
+ }
+
+ extract_msrv_attr!(LateContext);
+}
diff --git a/src/tools/clippy/clippy_lints/src/manual_ok_or.rs b/src/tools/clippy/clippy_lints/src/manual_ok_or.rs
new file mode 100644
index 000000000..9abf2507b
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/manual_ok_or.rs
@@ -0,0 +1,98 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::{indent_of, reindent_multiline, snippet_opt};
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{is_lang_ctor, path_to_local_id};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::LangItem::{ResultErr, ResultOk};
+use rustc_hir::{Closure, Expr, ExprKind, PatKind};
+use rustc_lint::LintContext;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ ///
+ /// Finds patterns that reimplement `Option::ok_or`.
+ ///
+ /// ### Why is this bad?
+ ///
+ /// Concise code helps focusing on behavior instead of boilerplate.
+ ///
+ /// ### Examples
+ /// ```rust
+ /// let foo: Option<i32> = None;
+ /// foo.map_or(Err("error"), |v| Ok(v));
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let foo: Option<i32> = None;
+ /// foo.ok_or("error");
+ /// ```
+ #[clippy::version = "1.49.0"]
+ pub MANUAL_OK_OR,
+ pedantic,
+ "finds patterns that can be encoded more concisely with `Option::ok_or`"
+}
+
+declare_lint_pass!(ManualOkOr => [MANUAL_OK_OR]);
+
+impl<'tcx> LateLintPass<'tcx> for ManualOkOr {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, scrutinee: &'tcx Expr<'tcx>) {
+ if in_external_macro(cx.sess(), scrutinee.span) {
+ return;
+ }
+
+ if_chain! {
+ if let ExprKind::MethodCall(method_segment, args, _) = scrutinee.kind;
+ if method_segment.ident.name == sym!(map_or);
+ if args.len() == 3;
+ let method_receiver = &args[0];
+ let ty = cx.typeck_results().expr_ty(method_receiver);
+ if is_type_diagnostic_item(cx, ty, sym::Option);
+ let or_expr = &args[1];
+ if is_ok_wrapping(cx, &args[2]);
+ if let ExprKind::Call(Expr { kind: ExprKind::Path(err_path), .. }, &[ref err_arg]) = or_expr.kind;
+ if is_lang_ctor(cx, err_path, ResultErr);
+ if let Some(method_receiver_snippet) = snippet_opt(cx, method_receiver.span);
+ if let Some(err_arg_snippet) = snippet_opt(cx, err_arg.span);
+ if let Some(indent) = indent_of(cx, scrutinee.span);
+ then {
+ let reindented_err_arg_snippet =
+ reindent_multiline(err_arg_snippet.into(), true, Some(indent + 4));
+ span_lint_and_sugg(
+ cx,
+ MANUAL_OK_OR,
+ scrutinee.span,
+ "this pattern reimplements `Option::ok_or`",
+ "replace with",
+ format!(
+ "{}.ok_or({})",
+ method_receiver_snippet,
+ reindented_err_arg_snippet
+ ),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+}
+
+fn is_ok_wrapping(cx: &LateContext<'_>, map_expr: &Expr<'_>) -> bool {
+ if let ExprKind::Path(ref qpath) = map_expr.kind {
+ if is_lang_ctor(cx, qpath, ResultOk) {
+ return true;
+ }
+ }
+ if_chain! {
+ if let ExprKind::Closure(&Closure { body, .. }) = map_expr.kind;
+ let body = cx.tcx.hir().body(body);
+ if let PatKind::Binding(_, param_id, ..) = body.params[0].pat.kind;
+ if let ExprKind::Call(Expr { kind: ExprKind::Path(ok_path), .. }, &[ref ok_arg]) = body.value.kind;
+ if is_lang_ctor(cx, ok_path, ResultOk);
+ then { path_to_local_id(ok_arg, param_id) } else { false }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs b/src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs
new file mode 100644
index 000000000..95cc6bdbd
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs
@@ -0,0 +1,123 @@
+use clippy_utils::consts::{constant_full_int, FullInt};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::{in_constant, meets_msrv, msrvs, path_to_local};
+use rustc_errors::Applicability;
+use rustc_hir::{BinOpKind, Expr, ExprKind, Node, TyKind};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for an expression like `((x % 4) + 4) % 4` which is a common manual reimplementation
+ /// of `x.rem_euclid(4)`.
+ ///
+ /// ### Why is this bad?
+ /// It's simpler and more readable.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x: i32 = 24;
+ /// let rem = ((x % 4) + 4) % 4;
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let x: i32 = 24;
+ /// let rem = x.rem_euclid(4);
+ /// ```
+ #[clippy::version = "1.63.0"]
+ pub MANUAL_REM_EUCLID,
+ complexity,
+ "manually reimplementing `rem_euclid`"
+}
+
+pub struct ManualRemEuclid {
+ msrv: Option<RustcVersion>,
+}
+
+impl ManualRemEuclid {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self { msrv }
+ }
+}
+
+impl_lint_pass!(ManualRemEuclid => [MANUAL_REM_EUCLID]);
+
+impl<'tcx> LateLintPass<'tcx> for ManualRemEuclid {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if !meets_msrv(self.msrv, msrvs::REM_EUCLID) {
+ return;
+ }
+
+ if in_constant(cx, expr.hir_id) && !meets_msrv(self.msrv, msrvs::REM_EUCLID_CONST) {
+ return;
+ }
+
+ if in_external_macro(cx.sess(), expr.span) {
+ return;
+ }
+
+ if let ExprKind::Binary(op1, expr1, right) = expr.kind
+ && op1.node == BinOpKind::Rem
+ && let Some(const1) = check_for_unsigned_int_constant(cx, right)
+ && let ExprKind::Binary(op2, left, right) = expr1.kind
+ && op2.node == BinOpKind::Add
+ && let Some((const2, expr2)) = check_for_either_unsigned_int_constant(cx, left, right)
+ && let ExprKind::Binary(op3, expr3, right) = expr2.kind
+ && op3.node == BinOpKind::Rem
+ && let Some(const3) = check_for_unsigned_int_constant(cx, right)
+ // Also ensures the const is nonzero since zero can't be a divisor
+ && const1 == const2 && const2 == const3
+ && let Some(hir_id) = path_to_local(expr3)
+ && let Some(Node::Pat(_)) = cx.tcx.hir().find(hir_id) {
+ // Apply only to params or locals with annotated types
+ match cx.tcx.hir().find(cx.tcx.hir().get_parent_node(hir_id)) {
+ Some(Node::Param(..)) => (),
+ Some(Node::Local(local)) => {
+ let Some(ty) = local.ty else { return };
+ if matches!(ty.kind, TyKind::Infer) {
+ return;
+ }
+ }
+ _ => return,
+ };
+
+ let mut app = Applicability::MachineApplicable;
+ let rem_of = snippet_with_applicability(cx, expr3.span, "_", &mut app);
+ span_lint_and_sugg(
+ cx,
+ MANUAL_REM_EUCLID,
+ expr.span,
+ "manual `rem_euclid` implementation",
+ "consider using",
+ format!("{rem_of}.rem_euclid({const1})"),
+ app,
+ );
+ }
+ }
+
+ extract_msrv_attr!(LateContext);
+}
+
+// Checks if either the left or right expressions can be an unsigned int constant and returns that
+// constant along with the other expression unchanged if so
+fn check_for_either_unsigned_int_constant<'a>(
+ cx: &'a LateContext<'_>,
+ left: &'a Expr<'_>,
+ right: &'a Expr<'_>,
+) -> Option<(u128, &'a Expr<'a>)> {
+ check_for_unsigned_int_constant(cx, left)
+ .map(|int_const| (int_const, right))
+ .or_else(|| check_for_unsigned_int_constant(cx, right).map(|int_const| (int_const, left)))
+}
+
+fn check_for_unsigned_int_constant<'a>(cx: &'a LateContext<'_>, expr: &'a Expr<'_>) -> Option<u128> {
+ let Some(int_const) = constant_full_int(cx, cx.typeck_results(), expr) else { return None };
+ match int_const {
+ FullInt::S(s) => s.try_into().ok(),
+ FullInt::U(u) => Some(u),
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/manual_retain.rs b/src/tools/clippy/clippy_lints/src/manual_retain.rs
new file mode 100644
index 000000000..42d2577cc
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/manual_retain.rs
@@ -0,0 +1,228 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{get_parent_expr, match_def_path, paths, SpanlessEq};
+use clippy_utils::{meets_msrv, msrvs};
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_hir::def_id::DefId;
+use rustc_hir::ExprKind::Assign;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::symbol::sym;
+
+const ACCEPTABLE_METHODS: [&[&str]; 4] = [
+ &paths::HASHSET_ITER,
+ &paths::BTREESET_ITER,
+ &paths::SLICE_INTO,
+ &paths::VEC_DEQUE_ITER,
+];
+const ACCEPTABLE_TYPES: [(rustc_span::Symbol, Option<RustcVersion>); 6] = [
+ (sym::BTreeSet, Some(msrvs::BTREE_SET_RETAIN)),
+ (sym::BTreeMap, Some(msrvs::BTREE_MAP_RETAIN)),
+ (sym::HashSet, Some(msrvs::HASH_SET_RETAIN)),
+ (sym::HashMap, Some(msrvs::HASH_MAP_RETAIN)),
+ (sym::Vec, None),
+ (sym::VecDeque, None),
+];
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for code to be replaced by `.retain()`.
+ /// ### Why is this bad?
+ /// `.retain()` is simpler and avoids needless allocation.
+ /// ### Example
+ /// ```rust
+ /// let mut vec = vec![0, 1, 2];
+ /// vec = vec.iter().filter(|&x| x % 2 == 0).copied().collect();
+ /// vec = vec.into_iter().filter(|x| x % 2 == 0).collect();
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let mut vec = vec![0, 1, 2];
+ /// vec.retain(|x| x % 2 == 0);
+ /// ```
+ #[clippy::version = "1.63.0"]
+ pub MANUAL_RETAIN,
+ perf,
+ "`retain()` is simpler and the same functionalitys"
+}
+
+pub struct ManualRetain {
+ msrv: Option<RustcVersion>,
+}
+
+impl ManualRetain {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self { msrv }
+ }
+}
+
+impl_lint_pass!(ManualRetain => [MANUAL_RETAIN]);
+
+impl<'tcx> LateLintPass<'tcx> for ManualRetain {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
+ if let Some(parent_expr) = get_parent_expr(cx, expr)
+ && let Assign(left_expr, collect_expr, _) = &parent_expr.kind
+ && let hir::ExprKind::MethodCall(seg, _, _) = &collect_expr.kind
+ && seg.args.is_none()
+ && let hir::ExprKind::MethodCall(_, [target_expr], _) = &collect_expr.kind
+ && let Some(collect_def_id) = cx.typeck_results().type_dependent_def_id(collect_expr.hir_id)
+ && match_def_path(cx, collect_def_id, &paths::CORE_ITER_COLLECT) {
+ check_into_iter(cx, parent_expr, left_expr, target_expr, self.msrv);
+ check_iter(cx, parent_expr, left_expr, target_expr, self.msrv);
+ check_to_owned(cx, parent_expr, left_expr, target_expr, self.msrv);
+ }
+ }
+
+ extract_msrv_attr!(LateContext);
+}
+
+fn check_into_iter(
+ cx: &LateContext<'_>,
+ parent_expr: &hir::Expr<'_>,
+ left_expr: &hir::Expr<'_>,
+ target_expr: &hir::Expr<'_>,
+ msrv: Option<RustcVersion>,
+) {
+ if let hir::ExprKind::MethodCall(_, [into_iter_expr, _], _) = &target_expr.kind
+ && let Some(filter_def_id) = cx.typeck_results().type_dependent_def_id(target_expr.hir_id)
+ && match_def_path(cx, filter_def_id, &paths::CORE_ITER_FILTER)
+ && let hir::ExprKind::MethodCall(_, [struct_expr], _) = &into_iter_expr.kind
+ && let Some(into_iter_def_id) = cx.typeck_results().type_dependent_def_id(into_iter_expr.hir_id)
+ && match_def_path(cx, into_iter_def_id, &paths::CORE_ITER_INTO_ITER)
+ && match_acceptable_type(cx, left_expr, msrv)
+ && SpanlessEq::new(cx).eq_expr(left_expr, struct_expr) {
+ suggest(cx, parent_expr, left_expr, target_expr);
+ }
+}
+
+fn check_iter(
+ cx: &LateContext<'_>,
+ parent_expr: &hir::Expr<'_>,
+ left_expr: &hir::Expr<'_>,
+ target_expr: &hir::Expr<'_>,
+ msrv: Option<RustcVersion>,
+) {
+ if let hir::ExprKind::MethodCall(_, [filter_expr], _) = &target_expr.kind
+ && let Some(copied_def_id) = cx.typeck_results().type_dependent_def_id(target_expr.hir_id)
+ && (match_def_path(cx, copied_def_id, &paths::CORE_ITER_COPIED)
+ || match_def_path(cx, copied_def_id, &paths::CORE_ITER_CLONED))
+ && let hir::ExprKind::MethodCall(_, [iter_expr, _], _) = &filter_expr.kind
+ && let Some(filter_def_id) = cx.typeck_results().type_dependent_def_id(filter_expr.hir_id)
+ && match_def_path(cx, filter_def_id, &paths::CORE_ITER_FILTER)
+ && let hir::ExprKind::MethodCall(_, [struct_expr], _) = &iter_expr.kind
+ && let Some(iter_expr_def_id) = cx.typeck_results().type_dependent_def_id(iter_expr.hir_id)
+ && match_acceptable_def_path(cx, iter_expr_def_id)
+ && match_acceptable_type(cx, left_expr, msrv)
+ && SpanlessEq::new(cx).eq_expr(left_expr, struct_expr) {
+ suggest(cx, parent_expr, left_expr, filter_expr);
+ }
+}
+
+fn check_to_owned(
+ cx: &LateContext<'_>,
+ parent_expr: &hir::Expr<'_>,
+ left_expr: &hir::Expr<'_>,
+ target_expr: &hir::Expr<'_>,
+ msrv: Option<RustcVersion>,
+) {
+ if meets_msrv(msrv, msrvs::STRING_RETAIN)
+ && let hir::ExprKind::MethodCall(_, [filter_expr], _) = &target_expr.kind
+ && let Some(to_owned_def_id) = cx.typeck_results().type_dependent_def_id(target_expr.hir_id)
+ && match_def_path(cx, to_owned_def_id, &paths::TO_OWNED_METHOD)
+ && let hir::ExprKind::MethodCall(_, [chars_expr, _], _) = &filter_expr.kind
+ && let Some(filter_def_id) = cx.typeck_results().type_dependent_def_id(filter_expr.hir_id)
+ && match_def_path(cx, filter_def_id, &paths::CORE_ITER_FILTER)
+ && let hir::ExprKind::MethodCall(_, [str_expr], _) = &chars_expr.kind
+ && let Some(chars_expr_def_id) = cx.typeck_results().type_dependent_def_id(chars_expr.hir_id)
+ && match_def_path(cx, chars_expr_def_id, &paths::STR_CHARS)
+ && let ty = cx.typeck_results().expr_ty(str_expr).peel_refs()
+ && is_type_diagnostic_item(cx, ty, sym::String)
+ && SpanlessEq::new(cx).eq_expr(left_expr, str_expr) {
+ suggest(cx, parent_expr, left_expr, filter_expr);
+ }
+}
+
+fn suggest(cx: &LateContext<'_>, parent_expr: &hir::Expr<'_>, left_expr: &hir::Expr<'_>, filter_expr: &hir::Expr<'_>) {
+ if let hir::ExprKind::MethodCall(_, [_, closure], _) = filter_expr.kind
+ && let hir::ExprKind::Closure(&hir::Closure { body, ..}) = closure.kind
+ && let filter_body = cx.tcx.hir().body(body)
+ && let [filter_params] = filter_body.params
+ && let Some(sugg) = match filter_params.pat.kind {
+ hir::PatKind::Binding(_, _, filter_param_ident, None) => {
+ Some(format!("{}.retain(|{}| {})", snippet(cx, left_expr.span, ".."), filter_param_ident, snippet(cx, filter_body.value.span, "..")))
+ },
+ hir::PatKind::Tuple([key_pat, value_pat], _) => {
+ make_sugg(cx, key_pat, value_pat, left_expr, filter_body)
+ },
+ hir::PatKind::Ref(pat, _) => {
+ match pat.kind {
+ hir::PatKind::Binding(_, _, filter_param_ident, None) => {
+ Some(format!("{}.retain(|{}| {})", snippet(cx, left_expr.span, ".."), filter_param_ident, snippet(cx, filter_body.value.span, "..")))
+ },
+ _ => None
+ }
+ },
+ _ => None
+ } {
+ span_lint_and_sugg(
+ cx,
+ MANUAL_RETAIN,
+ parent_expr.span,
+ "this expression can be written more simply using `.retain()`",
+ "consider calling `.retain()` instead",
+ sugg,
+ Applicability::MachineApplicable
+ );
+ }
+}
+
+fn make_sugg(
+ cx: &LateContext<'_>,
+ key_pat: &rustc_hir::Pat<'_>,
+ value_pat: &rustc_hir::Pat<'_>,
+ left_expr: &hir::Expr<'_>,
+ filter_body: &hir::Body<'_>,
+) -> Option<String> {
+ match (&key_pat.kind, &value_pat.kind) {
+ (hir::PatKind::Binding(_, _, key_param_ident, None), hir::PatKind::Binding(_, _, value_param_ident, None)) => {
+ Some(format!(
+ "{}.retain(|{}, &mut {}| {})",
+ snippet(cx, left_expr.span, ".."),
+ key_param_ident,
+ value_param_ident,
+ snippet(cx, filter_body.value.span, "..")
+ ))
+ },
+ (hir::PatKind::Binding(_, _, key_param_ident, None), hir::PatKind::Wild) => Some(format!(
+ "{}.retain(|{}, _| {})",
+ snippet(cx, left_expr.span, ".."),
+ key_param_ident,
+ snippet(cx, filter_body.value.span, "..")
+ )),
+ (hir::PatKind::Wild, hir::PatKind::Binding(_, _, value_param_ident, None)) => Some(format!(
+ "{}.retain(|_, &mut {}| {})",
+ snippet(cx, left_expr.span, ".."),
+ value_param_ident,
+ snippet(cx, filter_body.value.span, "..")
+ )),
+ _ => None,
+ }
+}
+
+fn match_acceptable_def_path(cx: &LateContext<'_>, collect_def_id: DefId) -> bool {
+ ACCEPTABLE_METHODS
+ .iter()
+ .any(|&method| match_def_path(cx, collect_def_id, method))
+}
+
+fn match_acceptable_type(cx: &LateContext<'_>, expr: &hir::Expr<'_>, msrv: Option<RustcVersion>) -> bool {
+ let expr_ty = cx.typeck_results().expr_ty(expr).peel_refs();
+ ACCEPTABLE_TYPES.iter().any(|(ty, acceptable_msrv)| {
+ is_type_diagnostic_item(cx, expr_ty, *ty)
+ && acceptable_msrv.map_or(true, |acceptable_msrv| meets_msrv(msrv, acceptable_msrv))
+ })
+}
diff --git a/src/tools/clippy/clippy_lints/src/manual_strip.rs b/src/tools/clippy/clippy_lints/src/manual_strip.rs
new file mode 100644
index 000000000..dfb3efc4e
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/manual_strip.rs
@@ -0,0 +1,252 @@
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::{multispan_sugg, span_lint_and_then};
+use clippy_utils::source::snippet;
+use clippy_utils::usage::mutated_variables;
+use clippy_utils::{eq_expr_value, higher, match_def_path, meets_msrv, msrvs, paths};
+use if_chain::if_chain;
+use rustc_ast::ast::LitKind;
+use rustc_hir::def::Res;
+use rustc_hir::intravisit::{walk_expr, Visitor};
+use rustc_hir::BinOpKind;
+use rustc_hir::{BorrowKind, Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::source_map::Spanned;
+use rustc_span::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Suggests using `strip_{prefix,suffix}` over `str::{starts,ends}_with` and slicing using
+ /// the pattern's length.
+ ///
+ /// ### Why is this bad?
+ /// Using `str:strip_{prefix,suffix}` is safer and may have better performance as there is no
+ /// slicing which may panic and the compiler does not need to insert this panic code. It is
+ /// also sometimes more readable as it removes the need for duplicating or storing the pattern
+ /// used by `str::{starts,ends}_with` and in the slicing.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let s = "hello, world!";
+ /// if s.starts_with("hello, ") {
+ /// assert_eq!(s["hello, ".len()..].to_uppercase(), "WORLD!");
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let s = "hello, world!";
+ /// if let Some(end) = s.strip_prefix("hello, ") {
+ /// assert_eq!(end.to_uppercase(), "WORLD!");
+ /// }
+ /// ```
+ #[clippy::version = "1.48.0"]
+ pub MANUAL_STRIP,
+ complexity,
+ "suggests using `strip_{prefix,suffix}` over `str::{starts,ends}_with` and slicing"
+}
+
+pub struct ManualStrip {
+ msrv: Option<RustcVersion>,
+}
+
+impl ManualStrip {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self { msrv }
+ }
+}
+
+impl_lint_pass!(ManualStrip => [MANUAL_STRIP]);
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+enum StripKind {
+ Prefix,
+ Suffix,
+}
+
+impl<'tcx> LateLintPass<'tcx> for ManualStrip {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if !meets_msrv(self.msrv, msrvs::STR_STRIP_PREFIX) {
+ return;
+ }
+
+ if_chain! {
+ if let Some(higher::If { cond, then, .. }) = higher::If::hir(expr);
+ if let ExprKind::MethodCall(_, [target_arg, pattern], _) = cond.kind;
+ if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(cond.hir_id);
+ if let ExprKind::Path(target_path) = &target_arg.kind;
+ then {
+ let strip_kind = if match_def_path(cx, method_def_id, &paths::STR_STARTS_WITH) {
+ StripKind::Prefix
+ } else if match_def_path(cx, method_def_id, &paths::STR_ENDS_WITH) {
+ StripKind::Suffix
+ } else {
+ return;
+ };
+ let target_res = cx.qpath_res(target_path, target_arg.hir_id);
+ if target_res == Res::Err {
+ return;
+ };
+
+ if_chain! {
+ if let Res::Local(hir_id) = target_res;
+ if let Some(used_mutably) = mutated_variables(then, cx);
+ if used_mutably.contains(&hir_id);
+ then {
+ return;
+ }
+ }
+
+ let strippings = find_stripping(cx, strip_kind, target_res, pattern, then);
+ if !strippings.is_empty() {
+
+ let kind_word = match strip_kind {
+ StripKind::Prefix => "prefix",
+ StripKind::Suffix => "suffix",
+ };
+
+ let test_span = expr.span.until(then.span);
+ span_lint_and_then(cx, MANUAL_STRIP, strippings[0], &format!("stripping a {} manually", kind_word), |diag| {
+ diag.span_note(test_span, &format!("the {} was tested here", kind_word));
+ multispan_sugg(
+ diag,
+ &format!("try using the `strip_{}` method", kind_word),
+ vec![(test_span,
+ format!("if let Some(<stripped>) = {}.strip_{}({}) ",
+ snippet(cx, target_arg.span, ".."),
+ kind_word,
+ snippet(cx, pattern.span, "..")))]
+ .into_iter().chain(strippings.into_iter().map(|span| (span, "<stripped>".into()))),
+ );
+ });
+ }
+ }
+ }
+ }
+
+ extract_msrv_attr!(LateContext);
+}
+
+// Returns `Some(arg)` if `expr` matches `arg.len()` and `None` otherwise.
+fn len_arg<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> Option<&'tcx Expr<'tcx>> {
+ if_chain! {
+ if let ExprKind::MethodCall(_, [arg], _) = expr.kind;
+ if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
+ if match_def_path(cx, method_def_id, &paths::STR_LEN);
+ then {
+ Some(arg)
+ } else {
+ None
+ }
+ }
+}
+
+// Returns the length of the `expr` if it's a constant string or char.
+fn constant_length(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<u128> {
+ let (value, _) = constant(cx, cx.typeck_results(), expr)?;
+ match value {
+ Constant::Str(value) => Some(value.len() as u128),
+ Constant::Char(value) => Some(value.len_utf8() as u128),
+ _ => None,
+ }
+}
+
+// Tests if `expr` equals the length of the pattern.
+fn eq_pattern_length<'tcx>(cx: &LateContext<'tcx>, pattern: &Expr<'_>, expr: &'tcx Expr<'_>) -> bool {
+ if let ExprKind::Lit(Spanned {
+ node: LitKind::Int(n, _),
+ ..
+ }) = expr.kind
+ {
+ constant_length(cx, pattern).map_or(false, |length| length == n)
+ } else {
+ len_arg(cx, expr).map_or(false, |arg| eq_expr_value(cx, pattern, arg))
+ }
+}
+
+// Tests if `expr` is a `&str`.
+fn is_ref_str(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ match cx.typeck_results().expr_ty_adjusted(expr).kind() {
+ ty::Ref(_, ty, _) => ty.is_str(),
+ _ => false,
+ }
+}
+
+// Removes the outer `AddrOf` expression if needed.
+fn peel_ref<'a>(expr: &'a Expr<'_>) -> &'a Expr<'a> {
+ if let ExprKind::AddrOf(BorrowKind::Ref, _, unref) = &expr.kind {
+ unref
+ } else {
+ expr
+ }
+}
+
+// Find expressions where `target` is stripped using the length of `pattern`.
+// We'll suggest replacing these expressions with the result of the `strip_{prefix,suffix}`
+// method.
+fn find_stripping<'tcx>(
+ cx: &LateContext<'tcx>,
+ strip_kind: StripKind,
+ target: Res,
+ pattern: &'tcx Expr<'_>,
+ expr: &'tcx Expr<'_>,
+) -> Vec<Span> {
+ struct StrippingFinder<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ strip_kind: StripKind,
+ target: Res,
+ pattern: &'tcx Expr<'tcx>,
+ results: Vec<Span>,
+ }
+
+ impl<'a, 'tcx> Visitor<'tcx> for StrippingFinder<'a, 'tcx> {
+ fn visit_expr(&mut self, ex: &'tcx Expr<'_>) {
+ if_chain! {
+ if is_ref_str(self.cx, ex);
+ let unref = peel_ref(ex);
+ if let ExprKind::Index(indexed, index) = &unref.kind;
+ if let Some(higher::Range { start, end, .. }) = higher::Range::hir(index);
+ if let ExprKind::Path(path) = &indexed.kind;
+ if self.cx.qpath_res(path, ex.hir_id) == self.target;
+ then {
+ match (self.strip_kind, start, end) {
+ (StripKind::Prefix, Some(start), None) => {
+ if eq_pattern_length(self.cx, self.pattern, start) {
+ self.results.push(ex.span);
+ return;
+ }
+ },
+ (StripKind::Suffix, None, Some(end)) => {
+ if_chain! {
+ if let ExprKind::Binary(Spanned { node: BinOpKind::Sub, .. }, left, right) = end.kind;
+ if let Some(left_arg) = len_arg(self.cx, left);
+ if let ExprKind::Path(left_path) = &left_arg.kind;
+ if self.cx.qpath_res(left_path, left_arg.hir_id) == self.target;
+ if eq_pattern_length(self.cx, self.pattern, right);
+ then {
+ self.results.push(ex.span);
+ return;
+ }
+ }
+ },
+ _ => {}
+ }
+ }
+ }
+
+ walk_expr(self, ex);
+ }
+ }
+
+ let mut finder = StrippingFinder {
+ cx,
+ strip_kind,
+ target,
+ pattern,
+ results: vec![],
+ };
+ walk_expr(&mut finder, expr);
+ finder.results
+}
diff --git a/src/tools/clippy/clippy_lints/src/map_clone.rs b/src/tools/clippy/clippy_lints/src/map_clone.rs
new file mode 100644
index 000000000..95c312f1f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/map_clone.rs
@@ -0,0 +1,167 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::{is_copy, is_type_diagnostic_item};
+use clippy_utils::{is_trait_method, meets_msrv, msrvs, peel_blocks};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::mir::Mutability;
+use rustc_middle::ty;
+use rustc_middle::ty::adjustment::Adjust;
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::symbol::Ident;
+use rustc_span::{sym, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `map(|x| x.clone())` or
+ /// dereferencing closures for `Copy` types, on `Iterator` or `Option`,
+ /// and suggests `cloned()` or `copied()` instead
+ ///
+ /// ### Why is this bad?
+ /// Readability, this can be written more concisely
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = vec![42, 43];
+ /// let y = x.iter();
+ /// let z = y.map(|i| *i);
+ /// ```
+ ///
+ /// The correct use would be:
+ ///
+ /// ```rust
+ /// let x = vec![42, 43];
+ /// let y = x.iter();
+ /// let z = y.cloned();
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MAP_CLONE,
+ style,
+ "using `iterator.map(|x| x.clone())`, or dereferencing closures for `Copy` types"
+}
+
+pub struct MapClone {
+ msrv: Option<RustcVersion>,
+}
+
+impl_lint_pass!(MapClone => [MAP_CLONE]);
+
+impl MapClone {
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self { msrv }
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for MapClone {
+ fn check_expr(&mut self, cx: &LateContext<'_>, e: &hir::Expr<'_>) {
+ if e.span.from_expansion() {
+ return;
+ }
+
+ if_chain! {
+ if let hir::ExprKind::MethodCall(method, args, _) = e.kind;
+ if args.len() == 2;
+ if method.ident.name == sym::map;
+ let ty = cx.typeck_results().expr_ty(&args[0]);
+ if is_type_diagnostic_item(cx, ty, sym::Option) || is_trait_method(cx, e, sym::Iterator);
+ if let hir::ExprKind::Closure(&hir::Closure { body, .. }) = args[1].kind;
+ then {
+ let closure_body = cx.tcx.hir().body(body);
+ let closure_expr = peel_blocks(&closure_body.value);
+ match closure_body.params[0].pat.kind {
+ hir::PatKind::Ref(inner, hir::Mutability::Not) => if let hir::PatKind::Binding(
+ hir::BindingAnnotation::Unannotated, .., name, None
+ ) = inner.kind {
+ if ident_eq(name, closure_expr) {
+ self.lint_explicit_closure(cx, e.span, args[0].span, true);
+ }
+ },
+ hir::PatKind::Binding(hir::BindingAnnotation::Unannotated, .., name, None) => {
+ match closure_expr.kind {
+ hir::ExprKind::Unary(hir::UnOp::Deref, inner) => {
+ if ident_eq(name, inner) {
+ if let ty::Ref(.., Mutability::Not) = cx.typeck_results().expr_ty(inner).kind() {
+ self.lint_explicit_closure(cx, e.span, args[0].span, true);
+ }
+ }
+ },
+ hir::ExprKind::MethodCall(method, [obj], _) => if_chain! {
+ if ident_eq(name, obj) && method.ident.name == sym::clone;
+ if let Some(fn_id) = cx.typeck_results().type_dependent_def_id(closure_expr.hir_id);
+ if let Some(trait_id) = cx.tcx.trait_of_item(fn_id);
+ if cx.tcx.lang_items().clone_trait().map_or(false, |id| id == trait_id);
+ // no autoderefs
+ if !cx.typeck_results().expr_adjustments(obj).iter()
+ .any(|a| matches!(a.kind, Adjust::Deref(Some(..))));
+ then {
+ let obj_ty = cx.typeck_results().expr_ty(obj);
+ if let ty::Ref(_, ty, mutability) = obj_ty.kind() {
+ if matches!(mutability, Mutability::Not) {
+ let copy = is_copy(cx, *ty);
+ self.lint_explicit_closure(cx, e.span, args[0].span, copy);
+ }
+ } else {
+ lint_needless_cloning(cx, e.span, args[0].span);
+ }
+ }
+ },
+ _ => {},
+ }
+ },
+ _ => {},
+ }
+ }
+ }
+ }
+
+ extract_msrv_attr!(LateContext);
+}
+
+fn ident_eq(name: Ident, path: &hir::Expr<'_>) -> bool {
+ if let hir::ExprKind::Path(hir::QPath::Resolved(None, path)) = path.kind {
+ path.segments.len() == 1 && path.segments[0].ident == name
+ } else {
+ false
+ }
+}
+
+fn lint_needless_cloning(cx: &LateContext<'_>, root: Span, receiver: Span) {
+ span_lint_and_sugg(
+ cx,
+ MAP_CLONE,
+ root.trim_start(receiver).unwrap(),
+ "you are needlessly cloning iterator elements",
+ "remove the `map` call",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+}
+
+impl MapClone {
+ fn lint_explicit_closure(&self, cx: &LateContext<'_>, replace: Span, root: Span, is_copy: bool) {
+ let mut applicability = Applicability::MachineApplicable;
+
+ let (message, sugg_method) = if is_copy && meets_msrv(self.msrv, msrvs::ITERATOR_COPIED) {
+ ("you are using an explicit closure for copying elements", "copied")
+ } else {
+ ("you are using an explicit closure for cloning elements", "cloned")
+ };
+
+ span_lint_and_sugg(
+ cx,
+ MAP_CLONE,
+ replace,
+ message,
+ &format!("consider calling the dedicated `{}` method", sugg_method),
+ format!(
+ "{}.{}()",
+ snippet_with_applicability(cx, root, "..", &mut applicability),
+ sugg_method,
+ ),
+ applicability,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/map_err_ignore.rs b/src/tools/clippy/clippy_lints/src/map_err_ignore.rs
new file mode 100644
index 000000000..21d0e19eb
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/map_err_ignore.rs
@@ -0,0 +1,154 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_hir::{CaptureBy, Closure, Expr, ExprKind, PatKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for instances of `map_err(|_| Some::Enum)`
+ ///
+ /// ### Why is this bad?
+ /// This `map_err` throws away the original error rather than allowing the enum to contain and report the cause of the error
+ ///
+ /// ### Example
+ /// Before:
+ /// ```rust
+ /// use std::fmt;
+ ///
+ /// #[derive(Debug)]
+ /// enum Error {
+ /// Indivisible,
+ /// Remainder(u8),
+ /// }
+ ///
+ /// impl fmt::Display for Error {
+ /// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ /// match self {
+ /// Error::Indivisible => write!(f, "could not divide input by three"),
+ /// Error::Remainder(remainder) => write!(
+ /// f,
+ /// "input is not divisible by three, remainder = {}",
+ /// remainder
+ /// ),
+ /// }
+ /// }
+ /// }
+ ///
+ /// impl std::error::Error for Error {}
+ ///
+ /// fn divisible_by_3(input: &str) -> Result<(), Error> {
+ /// input
+ /// .parse::<i32>()
+ /// .map_err(|_| Error::Indivisible)
+ /// .map(|v| v % 3)
+ /// .and_then(|remainder| {
+ /// if remainder == 0 {
+ /// Ok(())
+ /// } else {
+ /// Err(Error::Remainder(remainder as u8))
+ /// }
+ /// })
+ /// }
+ /// ```
+ ///
+ /// After:
+ /// ```rust
+ /// use std::{fmt, num::ParseIntError};
+ ///
+ /// #[derive(Debug)]
+ /// enum Error {
+ /// Indivisible(ParseIntError),
+ /// Remainder(u8),
+ /// }
+ ///
+ /// impl fmt::Display for Error {
+ /// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ /// match self {
+ /// Error::Indivisible(_) => write!(f, "could not divide input by three"),
+ /// Error::Remainder(remainder) => write!(
+ /// f,
+ /// "input is not divisible by three, remainder = {}",
+ /// remainder
+ /// ),
+ /// }
+ /// }
+ /// }
+ ///
+ /// impl std::error::Error for Error {
+ /// fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
+ /// match self {
+ /// Error::Indivisible(source) => Some(source),
+ /// _ => None,
+ /// }
+ /// }
+ /// }
+ ///
+ /// fn divisible_by_3(input: &str) -> Result<(), Error> {
+ /// input
+ /// .parse::<i32>()
+ /// .map_err(Error::Indivisible)
+ /// .map(|v| v % 3)
+ /// .and_then(|remainder| {
+ /// if remainder == 0 {
+ /// Ok(())
+ /// } else {
+ /// Err(Error::Remainder(remainder as u8))
+ /// }
+ /// })
+ /// }
+ /// ```
+ #[clippy::version = "1.48.0"]
+ pub MAP_ERR_IGNORE,
+ restriction,
+ "`map_err` should not ignore the original error"
+}
+
+declare_lint_pass!(MapErrIgnore => [MAP_ERR_IGNORE]);
+
+impl<'tcx> LateLintPass<'tcx> for MapErrIgnore {
+ // do not try to lint if this is from a macro or desugaring
+ fn check_expr(&mut self, cx: &LateContext<'_>, e: &Expr<'_>) {
+ if e.span.from_expansion() {
+ return;
+ }
+
+ // check if this is a method call (e.g. x.foo())
+ if let ExprKind::MethodCall(method, args, _) = e.kind {
+ // only work if the method name is `map_err` and there are only 2 arguments (e.g. x.map_err(|_|[1]
+ // Enum::Variant[2]))
+ if method.ident.as_str() == "map_err" && args.len() == 2 {
+ // make sure the first argument is a closure, and grab the CaptureRef, BodyId, and fn_decl_span
+ // fields
+ if let ExprKind::Closure(&Closure {
+ capture_clause,
+ body,
+ fn_decl_span,
+ ..
+ }) = args[1].kind
+ {
+ // check if this is by Reference (meaning there's no move statement)
+ if capture_clause == CaptureBy::Ref {
+ // Get the closure body to check the parameters and values
+ let closure_body = cx.tcx.hir().body(body);
+ // make sure there's only one parameter (`|_|`)
+ if closure_body.params.len() == 1 {
+ // make sure that parameter is the wild token (`_`)
+ if let PatKind::Wild = closure_body.params[0].pat.kind {
+ // span the area of the closure capture and warn that the
+ // original error will be thrown away
+ span_lint_and_help(
+ cx,
+ MAP_ERR_IGNORE,
+ fn_decl_span,
+ "`map_err(|_|...` wildcard pattern discards the original error",
+ None,
+ "consider storing the original error as a source in the new error, or silence this warning using an ignored identifier (`.map_err(|_foo| ...`)",
+ );
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/map_unit_fn.rs b/src/tools/clippy/clippy_lints/src/map_unit_fn.rs
new file mode 100644
index 000000000..af9d948af
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/map_unit_fn.rs
@@ -0,0 +1,272 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::{snippet, snippet_with_applicability, snippet_with_context};
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{iter_input_pats, method_chain_args};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::{self, Ty};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Span;
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `option.map(f)` where f is a function
+ /// or closure that returns the unit type `()`.
+ ///
+ /// ### Why is this bad?
+ /// Readability, this can be written more clearly with
+ /// an if let statement
+ ///
+ /// ### Example
+ /// ```rust
+ /// # fn do_stuff() -> Option<String> { Some(String::new()) }
+ /// # fn log_err_msg(foo: String) -> Option<String> { Some(foo) }
+ /// # fn format_msg(foo: String) -> String { String::new() }
+ /// let x: Option<String> = do_stuff();
+ /// x.map(log_err_msg);
+ /// # let x: Option<String> = do_stuff();
+ /// x.map(|msg| log_err_msg(format_msg(msg)));
+ /// ```
+ ///
+ /// The correct use would be:
+ ///
+ /// ```rust
+ /// # fn do_stuff() -> Option<String> { Some(String::new()) }
+ /// # fn log_err_msg(foo: String) -> Option<String> { Some(foo) }
+ /// # fn format_msg(foo: String) -> String { String::new() }
+ /// let x: Option<String> = do_stuff();
+ /// if let Some(msg) = x {
+ /// log_err_msg(msg);
+ /// }
+ ///
+ /// # let x: Option<String> = do_stuff();
+ /// if let Some(msg) = x {
+ /// log_err_msg(format_msg(msg));
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub OPTION_MAP_UNIT_FN,
+ complexity,
+ "using `option.map(f)`, where `f` is a function or closure that returns `()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `result.map(f)` where f is a function
+ /// or closure that returns the unit type `()`.
+ ///
+ /// ### Why is this bad?
+ /// Readability, this can be written more clearly with
+ /// an if let statement
+ ///
+ /// ### Example
+ /// ```rust
+ /// # fn do_stuff() -> Result<String, String> { Ok(String::new()) }
+ /// # fn log_err_msg(foo: String) -> Result<String, String> { Ok(foo) }
+ /// # fn format_msg(foo: String) -> String { String::new() }
+ /// let x: Result<String, String> = do_stuff();
+ /// x.map(log_err_msg);
+ /// # let x: Result<String, String> = do_stuff();
+ /// x.map(|msg| log_err_msg(format_msg(msg)));
+ /// ```
+ ///
+ /// The correct use would be:
+ ///
+ /// ```rust
+ /// # fn do_stuff() -> Result<String, String> { Ok(String::new()) }
+ /// # fn log_err_msg(foo: String) -> Result<String, String> { Ok(foo) }
+ /// # fn format_msg(foo: String) -> String { String::new() }
+ /// let x: Result<String, String> = do_stuff();
+ /// if let Ok(msg) = x {
+ /// log_err_msg(msg);
+ /// };
+ /// # let x: Result<String, String> = do_stuff();
+ /// if let Ok(msg) = x {
+ /// log_err_msg(format_msg(msg));
+ /// };
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub RESULT_MAP_UNIT_FN,
+ complexity,
+ "using `result.map(f)`, where `f` is a function or closure that returns `()`"
+}
+
+declare_lint_pass!(MapUnit => [OPTION_MAP_UNIT_FN, RESULT_MAP_UNIT_FN]);
+
+fn is_unit_type(ty: Ty<'_>) -> bool {
+ match ty.kind() {
+ ty::Tuple(slice) => slice.is_empty(),
+ ty::Never => true,
+ _ => false,
+ }
+}
+
+fn is_unit_function(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> bool {
+ let ty = cx.typeck_results().expr_ty(expr);
+
+ if let ty::FnDef(id, _) = *ty.kind() {
+ if let Some(fn_type) = cx.tcx.fn_sig(id).no_bound_vars() {
+ return is_unit_type(fn_type.output());
+ }
+ }
+ false
+}
+
+fn is_unit_expression(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> bool {
+ is_unit_type(cx.typeck_results().expr_ty(expr))
+}
+
+/// The expression inside a closure may or may not have surrounding braces and
+/// semicolons, which causes problems when generating a suggestion. Given an
+/// expression that evaluates to '()' or '!', recursively remove useless braces
+/// and semi-colons until is suitable for including in the suggestion template
+fn reduce_unit_expression<'a>(cx: &LateContext<'_>, expr: &'a hir::Expr<'_>) -> Option<Span> {
+ if !is_unit_expression(cx, expr) {
+ return None;
+ }
+
+ match expr.kind {
+ hir::ExprKind::Call(_, _) | hir::ExprKind::MethodCall(..) => {
+ // Calls can't be reduced any more
+ Some(expr.span)
+ },
+ hir::ExprKind::Block(block, _) => {
+ match (block.stmts, block.expr.as_ref()) {
+ (&[], Some(inner_expr)) => {
+ // If block only contains an expression,
+ // reduce `{ X }` to `X`
+ reduce_unit_expression(cx, inner_expr)
+ },
+ (&[ref inner_stmt], None) => {
+ // If block only contains statements,
+ // reduce `{ X; }` to `X` or `X;`
+ match inner_stmt.kind {
+ hir::StmtKind::Local(local) => Some(local.span),
+ hir::StmtKind::Expr(e) => Some(e.span),
+ hir::StmtKind::Semi(..) => Some(inner_stmt.span),
+ hir::StmtKind::Item(..) => None,
+ }
+ },
+ _ => {
+ // For closures that contain multiple statements
+ // it's difficult to get a correct suggestion span
+ // for all cases (multi-line closures specifically)
+ //
+ // We do not attempt to build a suggestion for those right now.
+ None
+ },
+ }
+ },
+ _ => None,
+ }
+}
+
+fn unit_closure<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &hir::Expr<'_>,
+) -> Option<(&'tcx hir::Param<'tcx>, &'tcx hir::Expr<'tcx>)> {
+ if_chain! {
+ if let hir::ExprKind::Closure(&hir::Closure { fn_decl, body, .. }) = expr.kind;
+ let body = cx.tcx.hir().body(body);
+ let body_expr = &body.value;
+ if fn_decl.inputs.len() == 1;
+ if is_unit_expression(cx, body_expr);
+ if let Some(binding) = iter_input_pats(fn_decl, body).next();
+ then {
+ return Some((binding, body_expr));
+ }
+ }
+ None
+}
+
+/// Builds a name for the let binding variable (`var_arg`)
+///
+/// `x.field` => `x_field`
+/// `y` => `_y`
+///
+/// Anything else will return `a`.
+fn let_binding_name(cx: &LateContext<'_>, var_arg: &hir::Expr<'_>) -> String {
+ match &var_arg.kind {
+ hir::ExprKind::Field(_, _) => snippet(cx, var_arg.span, "_").replace('.', "_"),
+ hir::ExprKind::Path(_) => format!("_{}", snippet(cx, var_arg.span, "")),
+ _ => "a".to_string(),
+ }
+}
+
+#[must_use]
+fn suggestion_msg(function_type: &str, map_type: &str) -> String {
+ format!(
+ "called `map(f)` on an `{0}` value where `f` is a {1} that returns the unit type `()`",
+ map_type, function_type
+ )
+}
+
+fn lint_map_unit_fn(cx: &LateContext<'_>, stmt: &hir::Stmt<'_>, expr: &hir::Expr<'_>, map_args: &[hir::Expr<'_>]) {
+ let var_arg = &map_args[0];
+
+ let (map_type, variant, lint) = if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(var_arg), sym::Option) {
+ ("Option", "Some", OPTION_MAP_UNIT_FN)
+ } else if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(var_arg), sym::Result) {
+ ("Result", "Ok", RESULT_MAP_UNIT_FN)
+ } else {
+ return;
+ };
+ let fn_arg = &map_args[1];
+
+ if is_unit_function(cx, fn_arg) {
+ let mut applicability = Applicability::MachineApplicable;
+ let msg = suggestion_msg("function", map_type);
+ let suggestion = format!(
+ "if let {0}({binding}) = {1} {{ {2}({binding}) }}",
+ variant,
+ snippet_with_applicability(cx, var_arg.span, "_", &mut applicability),
+ snippet_with_applicability(cx, fn_arg.span, "_", &mut applicability),
+ binding = let_binding_name(cx, var_arg)
+ );
+
+ span_lint_and_then(cx, lint, expr.span, &msg, |diag| {
+ diag.span_suggestion(stmt.span, "try this", suggestion, applicability);
+ });
+ } else if let Some((binding, closure_expr)) = unit_closure(cx, fn_arg) {
+ let msg = suggestion_msg("closure", map_type);
+
+ span_lint_and_then(cx, lint, expr.span, &msg, |diag| {
+ if let Some(reduced_expr_span) = reduce_unit_expression(cx, closure_expr) {
+ let mut applicability = Applicability::MachineApplicable;
+ let suggestion = format!(
+ "if let {0}({1}) = {2} {{ {3} }}",
+ variant,
+ snippet_with_applicability(cx, binding.pat.span, "_", &mut applicability),
+ snippet_with_applicability(cx, var_arg.span, "_", &mut applicability),
+ snippet_with_context(cx, reduced_expr_span, var_arg.span.ctxt(), "_", &mut applicability).0,
+ );
+ diag.span_suggestion(stmt.span, "try this", suggestion, applicability);
+ } else {
+ let suggestion = format!(
+ "if let {0}({1}) = {2} {{ ... }}",
+ variant,
+ snippet(cx, binding.pat.span, "_"),
+ snippet(cx, var_arg.span, "_"),
+ );
+ diag.span_suggestion(stmt.span, "try this", suggestion, Applicability::HasPlaceholders);
+ }
+ });
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for MapUnit {
+ fn check_stmt(&mut self, cx: &LateContext<'_>, stmt: &hir::Stmt<'_>) {
+ if stmt.span.from_expansion() {
+ return;
+ }
+
+ if let hir::StmtKind::Semi(expr) = stmt.kind {
+ if let Some(arglists) = method_chain_args(expr, &["map"]) {
+ lint_map_unit_fn(cx, stmt, expr, arglists[0]);
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/match_result_ok.rs b/src/tools/clippy/clippy_lints/src/match_result_ok.rs
new file mode 100644
index 000000000..3349b85f1
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/match_result_ok.rs
@@ -0,0 +1,90 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::higher;
+use clippy_utils::method_chain_args;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::is_type_diagnostic_item;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind, PatKind, QPath};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for unnecessary `ok()` in `while let`.
+ ///
+ /// ### Why is this bad?
+ /// Calling `ok()` in `while let` is unnecessary, instead match
+ /// on `Ok(pat)`
+ ///
+ /// ### Example
+ /// ```ignore
+ /// while let Some(value) = iter.next().ok() {
+ /// vec.push(value)
+ /// }
+ ///
+ /// if let Some(value) = iter.next().ok() {
+ /// vec.push(value)
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```ignore
+ /// while let Ok(value) = iter.next() {
+ /// vec.push(value)
+ /// }
+ ///
+ /// if let Ok(value) = iter.next() {
+ /// vec.push(value)
+ /// }
+ /// ```
+ #[clippy::version = "1.57.0"]
+ pub MATCH_RESULT_OK,
+ style,
+ "usage of `ok()` in `let Some(pat)` statements is unnecessary, match on `Ok(pat)` instead"
+}
+
+declare_lint_pass!(MatchResultOk => [MATCH_RESULT_OK]);
+
+impl<'tcx> LateLintPass<'tcx> for MatchResultOk {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ let (let_pat, let_expr, ifwhile) =
+ if let Some(higher::IfLet { let_pat, let_expr, .. }) = higher::IfLet::hir(cx, expr) {
+ (let_pat, let_expr, "if")
+ } else if let Some(higher::WhileLet { let_pat, let_expr, .. }) = higher::WhileLet::hir(expr) {
+ (let_pat, let_expr, "while")
+ } else {
+ return;
+ };
+
+ if_chain! {
+ if let ExprKind::MethodCall(ok_path, [ref result_types_0, ..], _) = let_expr.kind; //check is expr.ok() has type Result<T,E>.ok(, _)
+ if let PatKind::TupleStruct(QPath::Resolved(_, x), y, _) = let_pat.kind; //get operation
+ if method_chain_args(let_expr, &["ok"]).is_some(); //test to see if using ok() method use std::marker::Sized;
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(result_types_0), sym::Result);
+ if rustc_hir_pretty::to_string(rustc_hir_pretty::NO_ANN, |s| s.print_path(x, false)) == "Some";
+
+ then {
+
+ let mut applicability = Applicability::MachineApplicable;
+ let some_expr_string = snippet_with_applicability(cx, y[0].span, "", &mut applicability);
+ let trimmed_ok = snippet_with_applicability(cx, let_expr.span.until(ok_path.ident.span), "", &mut applicability);
+ let sugg = format!(
+ "{} let Ok({}) = {}",
+ ifwhile,
+ some_expr_string,
+ trimmed_ok.trim().trim_end_matches('.'),
+ );
+ span_lint_and_sugg(
+ cx,
+ MATCH_RESULT_OK,
+ expr.span.with_hi(let_expr.span.hi()),
+ "matching on `Some` with `ok()` is redundant",
+ &format!("consider matching on `Ok({})` and removing the call to `ok` instead", some_expr_string),
+ sugg,
+ applicability,
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/collapsible_match.rs b/src/tools/clippy/clippy_lints/src/matches/collapsible_match.rs
new file mode 100644
index 000000000..07021f1bc
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/collapsible_match.rs
@@ -0,0 +1,143 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::higher::IfLetOrMatch;
+use clippy_utils::visitors::is_local_used;
+use clippy_utils::{is_lang_ctor, is_unit_expr, path_to_local, peel_blocks_with_stmt, peel_ref_operators, SpanlessEq};
+use if_chain::if_chain;
+use rustc_errors::MultiSpan;
+use rustc_hir::LangItem::OptionNone;
+use rustc_hir::{Arm, Expr, Guard, HirId, Let, Pat, PatKind};
+use rustc_lint::LateContext;
+use rustc_span::Span;
+
+use super::COLLAPSIBLE_MATCH;
+
+pub(super) fn check_match<'tcx>(cx: &LateContext<'tcx>, arms: &'tcx [Arm<'_>]) {
+ if let Some(els_arm) = arms.iter().rfind(|arm| arm_is_wild_like(cx, arm)) {
+ for arm in arms {
+ check_arm(cx, true, arm.pat, arm.body, arm.guard.as_ref(), Some(els_arm.body));
+ }
+ }
+}
+
+pub(super) fn check_if_let<'tcx>(
+ cx: &LateContext<'tcx>,
+ pat: &'tcx Pat<'_>,
+ body: &'tcx Expr<'_>,
+ else_expr: Option<&'tcx Expr<'_>>,
+) {
+ check_arm(cx, false, pat, body, None, else_expr);
+}
+
+fn check_arm<'tcx>(
+ cx: &LateContext<'tcx>,
+ outer_is_match: bool,
+ outer_pat: &'tcx Pat<'tcx>,
+ outer_then_body: &'tcx Expr<'tcx>,
+ outer_guard: Option<&'tcx Guard<'tcx>>,
+ outer_else_body: Option<&'tcx Expr<'tcx>>,
+) {
+ let inner_expr = peel_blocks_with_stmt(outer_then_body);
+ if_chain! {
+ if let Some(inner) = IfLetOrMatch::parse(cx, inner_expr);
+ if let Some((inner_scrutinee, inner_then_pat, inner_else_body)) = match inner {
+ IfLetOrMatch::IfLet(scrutinee, pat, _, els) => Some((scrutinee, pat, els)),
+ IfLetOrMatch::Match(scrutinee, arms, ..) => if_chain! {
+ // if there are more than two arms, collapsing would be non-trivial
+ if arms.len() == 2 && arms.iter().all(|a| a.guard.is_none());
+ // one of the arms must be "wild-like"
+ if let Some(wild_idx) = arms.iter().rposition(|a| arm_is_wild_like(cx, a));
+ then {
+ let (then, els) = (&arms[1 - wild_idx], &arms[wild_idx]);
+ Some((scrutinee, then.pat, Some(els.body)))
+ } else {
+ None
+ }
+ },
+ };
+ if outer_pat.span.ctxt() == inner_scrutinee.span.ctxt();
+ // match expression must be a local binding
+ // match <local> { .. }
+ if let Some(binding_id) = path_to_local(peel_ref_operators(cx, inner_scrutinee));
+ if !pat_contains_or(inner_then_pat);
+ // the binding must come from the pattern of the containing match arm
+ // ..<local>.. => match <local> { .. }
+ if let Some(binding_span) = find_pat_binding(outer_pat, binding_id);
+ // the "else" branches must be equal
+ if match (outer_else_body, inner_else_body) {
+ (None, None) => true,
+ (None, Some(e)) | (Some(e), None) => is_unit_expr(e),
+ (Some(a), Some(b)) => SpanlessEq::new(cx).eq_expr(a, b),
+ };
+ // the binding must not be used in the if guard
+ if outer_guard.map_or(
+ true,
+ |(Guard::If(e) | Guard::IfLet(Let { init: e, .. }))| !is_local_used(cx, *e, binding_id)
+ );
+ // ...or anywhere in the inner expression
+ if match inner {
+ IfLetOrMatch::IfLet(_, _, body, els) => {
+ !is_local_used(cx, body, binding_id) && els.map_or(true, |e| !is_local_used(cx, e, binding_id))
+ },
+ IfLetOrMatch::Match(_, arms, ..) => !arms.iter().any(|arm| is_local_used(cx, arm, binding_id)),
+ };
+ then {
+ let msg = format!(
+ "this `{}` can be collapsed into the outer `{}`",
+ if matches!(inner, IfLetOrMatch::Match(..)) { "match" } else { "if let" },
+ if outer_is_match { "match" } else { "if let" },
+ );
+ span_lint_and_then(
+ cx,
+ COLLAPSIBLE_MATCH,
+ inner_expr.span,
+ &msg,
+ |diag| {
+ let mut help_span = MultiSpan::from_spans(vec![binding_span, inner_then_pat.span]);
+ help_span.push_span_label(binding_span, "replace this binding");
+ help_span.push_span_label(inner_then_pat.span, "with this pattern");
+ diag.span_help(help_span, "the outer pattern can be modified to include the inner pattern");
+ },
+ );
+ }
+ }
+}
+
+/// A "wild-like" arm has a wild (`_`) or `None` pattern and no guard. Such arms can be "collapsed"
+/// into a single wild arm without any significant loss in semantics or readability.
+fn arm_is_wild_like(cx: &LateContext<'_>, arm: &Arm<'_>) -> bool {
+ if arm.guard.is_some() {
+ return false;
+ }
+ match arm.pat.kind {
+ PatKind::Binding(..) | PatKind::Wild => true,
+ PatKind::Path(ref qpath) => is_lang_ctor(cx, qpath, OptionNone),
+ _ => false,
+ }
+}
+
+fn find_pat_binding(pat: &Pat<'_>, hir_id: HirId) -> Option<Span> {
+ let mut span = None;
+ pat.walk_short(|p| match &p.kind {
+ // ignore OR patterns
+ PatKind::Or(_) => false,
+ PatKind::Binding(_bm, _, _ident, _) => {
+ let found = p.hir_id == hir_id;
+ if found {
+ span = Some(p.span);
+ }
+ !found
+ },
+ _ => true,
+ });
+ span
+}
+
+fn pat_contains_or(pat: &Pat<'_>) -> bool {
+ let mut result = false;
+ pat.walk(|p| {
+ let is_or = matches!(p.kind, PatKind::Or(_));
+ result |= is_or;
+ !is_or
+ });
+ result
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/infallible_destructuring_match.rs b/src/tools/clippy/clippy_lints/src/matches/infallible_destructuring_match.rs
new file mode 100644
index 000000000..2472acb6f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/infallible_destructuring_match.rs
@@ -0,0 +1,44 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::{path_to_local_id, peel_blocks, strip_pat_refs};
+use rustc_errors::Applicability;
+use rustc_hir::{ExprKind, Local, MatchSource, PatKind, QPath};
+use rustc_lint::LateContext;
+
+use super::INFALLIBLE_DESTRUCTURING_MATCH;
+
+pub(crate) fn check(cx: &LateContext<'_>, local: &Local<'_>) -> bool {
+ if_chain! {
+ if !local.span.from_expansion();
+ if let Some(expr) = local.init;
+ if let ExprKind::Match(target, arms, MatchSource::Normal) = expr.kind;
+ if arms.len() == 1 && arms[0].guard.is_none();
+ if let PatKind::TupleStruct(
+ QPath::Resolved(None, variant_name), args, _) = arms[0].pat.kind;
+ if args.len() == 1;
+ if let PatKind::Binding(_, arg, ..) = strip_pat_refs(&args[0]).kind;
+ let body = peel_blocks(arms[0].body);
+ if path_to_local_id(body, arg);
+
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ INFALLIBLE_DESTRUCTURING_MATCH,
+ local.span,
+ "you seem to be trying to use `match` to destructure a single infallible pattern. \
+ Consider using `let`",
+ "try this",
+ format!(
+ "let {}({}) = {};",
+ snippet_with_applicability(cx, variant_name.span, "..", &mut applicability),
+ snippet_with_applicability(cx, local.pat.span, "..", &mut applicability),
+ snippet_with_applicability(cx, target.span, "..", &mut applicability),
+ ),
+ applicability,
+ );
+ return true;
+ }
+ }
+ false
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/manual_map.rs b/src/tools/clippy/clippy_lints/src/matches/manual_map.rs
new file mode 100644
index 000000000..8f98b43b9
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/manual_map.rs
@@ -0,0 +1,306 @@
+use crate::{map_unit_fn::OPTION_MAP_UNIT_FN, matches::MATCH_AS_REF};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::{snippet_with_applicability, snippet_with_context};
+use clippy_utils::ty::{is_type_diagnostic_item, peel_mid_ty_refs_is_mutable, type_is_unsafe_function};
+use clippy_utils::{
+ can_move_expr_to_closure, is_else_clause, is_lang_ctor, is_lint_allowed, path_to_local_id, peel_blocks,
+ peel_hir_expr_refs, peel_hir_expr_while, CaptureKind,
+};
+use rustc_ast::util::parser::PREC_POSTFIX;
+use rustc_errors::Applicability;
+use rustc_hir::LangItem::{OptionNone, OptionSome};
+use rustc_hir::{
+ def::Res, Arm, BindingAnnotation, Block, BlockCheckMode, Expr, ExprKind, HirId, Mutability, Pat, PatKind, Path,
+ QPath, UnsafeSource,
+};
+use rustc_lint::LateContext;
+use rustc_span::{sym, SyntaxContext};
+
+use super::MANUAL_MAP;
+
+pub(super) fn check_match<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ scrutinee: &'tcx Expr<'_>,
+ arms: &'tcx [Arm<'_>],
+) {
+ if let [arm1, arm2] = arms
+ && arm1.guard.is_none()
+ && arm2.guard.is_none()
+ {
+ check(cx, expr, scrutinee, arm1.pat, arm1.body, Some(arm2.pat), arm2.body);
+ }
+}
+
+pub(super) fn check_if_let<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ let_pat: &'tcx Pat<'_>,
+ let_expr: &'tcx Expr<'_>,
+ then_expr: &'tcx Expr<'_>,
+ else_expr: &'tcx Expr<'_>,
+) {
+ check(cx, expr, let_expr, let_pat, then_expr, None, else_expr);
+}
+
+#[expect(clippy::too_many_lines)]
+fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ scrutinee: &'tcx Expr<'_>,
+ then_pat: &'tcx Pat<'_>,
+ then_body: &'tcx Expr<'_>,
+ else_pat: Option<&'tcx Pat<'_>>,
+ else_body: &'tcx Expr<'_>,
+) {
+ let (scrutinee_ty, ty_ref_count, ty_mutability) =
+ peel_mid_ty_refs_is_mutable(cx.typeck_results().expr_ty(scrutinee));
+ if !(is_type_diagnostic_item(cx, scrutinee_ty, sym::Option)
+ && is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(expr), sym::Option))
+ {
+ return;
+ }
+
+ let expr_ctxt = expr.span.ctxt();
+ let (some_expr, some_pat, pat_ref_count, is_wild_none) = match (
+ try_parse_pattern(cx, then_pat, expr_ctxt),
+ else_pat.map_or(Some(OptionPat::Wild), |p| try_parse_pattern(cx, p, expr_ctxt)),
+ ) {
+ (Some(OptionPat::Wild), Some(OptionPat::Some { pattern, ref_count })) if is_none_expr(cx, then_body) => {
+ (else_body, pattern, ref_count, true)
+ },
+ (Some(OptionPat::None), Some(OptionPat::Some { pattern, ref_count })) if is_none_expr(cx, then_body) => {
+ (else_body, pattern, ref_count, false)
+ },
+ (Some(OptionPat::Some { pattern, ref_count }), Some(OptionPat::Wild)) if is_none_expr(cx, else_body) => {
+ (then_body, pattern, ref_count, true)
+ },
+ (Some(OptionPat::Some { pattern, ref_count }), Some(OptionPat::None)) if is_none_expr(cx, else_body) => {
+ (then_body, pattern, ref_count, false)
+ },
+ _ => return,
+ };
+
+ // Top level or patterns aren't allowed in closures.
+ if matches!(some_pat.kind, PatKind::Or(_)) {
+ return;
+ }
+
+ let some_expr = match get_some_expr(cx, some_expr, false, expr_ctxt) {
+ Some(expr) => expr,
+ None => return,
+ };
+
+ // These two lints will go back and forth with each other.
+ if cx.typeck_results().expr_ty(some_expr.expr) == cx.tcx.types.unit
+ && !is_lint_allowed(cx, OPTION_MAP_UNIT_FN, expr.hir_id)
+ {
+ return;
+ }
+
+ // `map` won't perform any adjustments.
+ if !cx.typeck_results().expr_adjustments(some_expr.expr).is_empty() {
+ return;
+ }
+
+ // Determine which binding mode to use.
+ let explicit_ref = some_pat.contains_explicit_ref_binding();
+ let binding_ref = explicit_ref.or_else(|| (ty_ref_count != pat_ref_count).then_some(ty_mutability));
+
+ let as_ref_str = match binding_ref {
+ Some(Mutability::Mut) => ".as_mut()",
+ Some(Mutability::Not) => ".as_ref()",
+ None => "",
+ };
+
+ match can_move_expr_to_closure(cx, some_expr.expr) {
+ Some(captures) => {
+ // Check if captures the closure will need conflict with borrows made in the scrutinee.
+ // TODO: check all the references made in the scrutinee expression. This will require interacting
+ // with the borrow checker. Currently only `<local>[.<field>]*` is checked for.
+ if let Some(binding_ref_mutability) = binding_ref {
+ let e = peel_hir_expr_while(scrutinee, |e| match e.kind {
+ ExprKind::Field(e, _) | ExprKind::AddrOf(_, _, e) => Some(e),
+ _ => None,
+ });
+ if let ExprKind::Path(QPath::Resolved(None, Path { res: Res::Local(l), .. })) = e.kind {
+ match captures.get(l) {
+ Some(CaptureKind::Value | CaptureKind::Ref(Mutability::Mut)) => return,
+ Some(CaptureKind::Ref(Mutability::Not)) if binding_ref_mutability == Mutability::Mut => {
+ return;
+ },
+ Some(CaptureKind::Ref(Mutability::Not)) | None => (),
+ }
+ }
+ }
+ },
+ None => return,
+ };
+
+ let mut app = Applicability::MachineApplicable;
+
+ // Remove address-of expressions from the scrutinee. Either `as_ref` will be called, or
+ // it's being passed by value.
+ let scrutinee = peel_hir_expr_refs(scrutinee).0;
+ let (scrutinee_str, _) = snippet_with_context(cx, scrutinee.span, expr_ctxt, "..", &mut app);
+ let scrutinee_str = if scrutinee.span.ctxt() == expr.span.ctxt() && scrutinee.precedence().order() < PREC_POSTFIX {
+ format!("({})", scrutinee_str)
+ } else {
+ scrutinee_str.into()
+ };
+
+ let body_str = if let PatKind::Binding(annotation, id, some_binding, None) = some_pat.kind {
+ if_chain! {
+ if !some_expr.needs_unsafe_block;
+ if let Some(func) = can_pass_as_func(cx, id, some_expr.expr);
+ if func.span.ctxt() == some_expr.expr.span.ctxt();
+ then {
+ snippet_with_applicability(cx, func.span, "..", &mut app).into_owned()
+ } else {
+ if path_to_local_id(some_expr.expr, id)
+ && !is_lint_allowed(cx, MATCH_AS_REF, expr.hir_id)
+ && binding_ref.is_some()
+ {
+ return;
+ }
+
+ // `ref` and `ref mut` annotations were handled earlier.
+ let annotation = if matches!(annotation, BindingAnnotation::Mutable) {
+ "mut "
+ } else {
+ ""
+ };
+ let expr_snip = snippet_with_context(cx, some_expr.expr.span, expr_ctxt, "..", &mut app).0;
+ if some_expr.needs_unsafe_block {
+ format!("|{}{}| unsafe {{ {} }}", annotation, some_binding, expr_snip)
+ } else {
+ format!("|{}{}| {}", annotation, some_binding, expr_snip)
+ }
+ }
+ }
+ } else if !is_wild_none && explicit_ref.is_none() {
+ // TODO: handle explicit reference annotations.
+ let pat_snip = snippet_with_context(cx, some_pat.span, expr_ctxt, "..", &mut app).0;
+ let expr_snip = snippet_with_context(cx, some_expr.expr.span, expr_ctxt, "..", &mut app).0;
+ if some_expr.needs_unsafe_block {
+ format!("|{}| unsafe {{ {} }}", pat_snip, expr_snip)
+ } else {
+ format!("|{}| {}", pat_snip, expr_snip)
+ }
+ } else {
+ // Refutable bindings and mixed reference annotations can't be handled by `map`.
+ return;
+ };
+
+ span_lint_and_sugg(
+ cx,
+ MANUAL_MAP,
+ expr.span,
+ "manual implementation of `Option::map`",
+ "try this",
+ if else_pat.is_none() && is_else_clause(cx.tcx, expr) {
+ format!("{{ {}{}.map({}) }}", scrutinee_str, as_ref_str, body_str)
+ } else {
+ format!("{}{}.map({})", scrutinee_str, as_ref_str, body_str)
+ },
+ app,
+ );
+}
+
+// Checks whether the expression could be passed as a function, or whether a closure is needed.
+// Returns the function to be passed to `map` if it exists.
+fn can_pass_as_func<'tcx>(cx: &LateContext<'tcx>, binding: HirId, expr: &'tcx Expr<'_>) -> Option<&'tcx Expr<'tcx>> {
+ match expr.kind {
+ ExprKind::Call(func, [arg])
+ if path_to_local_id(arg, binding)
+ && cx.typeck_results().expr_adjustments(arg).is_empty()
+ && !type_is_unsafe_function(cx, cx.typeck_results().expr_ty(func).peel_refs()) =>
+ {
+ Some(func)
+ },
+ _ => None,
+ }
+}
+
+enum OptionPat<'a> {
+ Wild,
+ None,
+ Some {
+ // The pattern contained in the `Some` tuple.
+ pattern: &'a Pat<'a>,
+ // The number of references before the `Some` tuple.
+ // e.g. `&&Some(_)` has a ref count of 2.
+ ref_count: usize,
+ },
+}
+
+struct SomeExpr<'tcx> {
+ expr: &'tcx Expr<'tcx>,
+ needs_unsafe_block: bool,
+}
+
+// Try to parse into a recognized `Option` pattern.
+// i.e. `_`, `None`, `Some(..)`, or a reference to any of those.
+fn try_parse_pattern<'tcx>(cx: &LateContext<'tcx>, pat: &'tcx Pat<'_>, ctxt: SyntaxContext) -> Option<OptionPat<'tcx>> {
+ fn f<'tcx>(
+ cx: &LateContext<'tcx>,
+ pat: &'tcx Pat<'_>,
+ ref_count: usize,
+ ctxt: SyntaxContext,
+ ) -> Option<OptionPat<'tcx>> {
+ match pat.kind {
+ PatKind::Wild => Some(OptionPat::Wild),
+ PatKind::Ref(pat, _) => f(cx, pat, ref_count + 1, ctxt),
+ PatKind::Path(ref qpath) if is_lang_ctor(cx, qpath, OptionNone) => Some(OptionPat::None),
+ PatKind::TupleStruct(ref qpath, [pattern], _)
+ if is_lang_ctor(cx, qpath, OptionSome) && pat.span.ctxt() == ctxt =>
+ {
+ Some(OptionPat::Some { pattern, ref_count })
+ },
+ _ => None,
+ }
+ }
+ f(cx, pat, 0, ctxt)
+}
+
+// Checks for an expression wrapped by the `Some` constructor. Returns the contained expression.
+fn get_some_expr<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ needs_unsafe_block: bool,
+ ctxt: SyntaxContext,
+) -> Option<SomeExpr<'tcx>> {
+ // TODO: Allow more complex expressions.
+ match expr.kind {
+ ExprKind::Call(
+ Expr {
+ kind: ExprKind::Path(ref qpath),
+ ..
+ },
+ [arg],
+ ) if ctxt == expr.span.ctxt() && is_lang_ctor(cx, qpath, OptionSome) => Some(SomeExpr {
+ expr: arg,
+ needs_unsafe_block,
+ }),
+ ExprKind::Block(
+ Block {
+ stmts: [],
+ expr: Some(expr),
+ rules,
+ ..
+ },
+ _,
+ ) => get_some_expr(
+ cx,
+ expr,
+ needs_unsafe_block || *rules == BlockCheckMode::UnsafeBlock(UnsafeSource::UserProvided),
+ ctxt,
+ ),
+ _ => None,
+ }
+}
+
+// Checks for the `None` value.
+fn is_none_expr(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ matches!(peel_blocks(expr).kind, ExprKind::Path(ref qpath) if is_lang_ctor(cx, qpath, OptionNone))
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/manual_unwrap_or.rs b/src/tools/clippy/clippy_lints/src/matches/manual_unwrap_or.rs
new file mode 100644
index 000000000..e1111c80f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/manual_unwrap_or.rs
@@ -0,0 +1,83 @@
+use clippy_utils::consts::constant_simple;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::{indent_of, reindent_multiline, snippet_opt};
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::usage::contains_return_break_continue_macro;
+use clippy_utils::{is_lang_ctor, path_to_local_id, sugg};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::LangItem::{OptionNone, OptionSome, ResultErr, ResultOk};
+use rustc_hir::{Arm, Expr, PatKind};
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::MANUAL_UNWRAP_OR;
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>, scrutinee: &'tcx Expr<'_>, arms: &'tcx [Arm<'_>]) {
+ let ty = cx.typeck_results().expr_ty(scrutinee);
+ if_chain! {
+ if let Some(ty_name) = if is_type_diagnostic_item(cx, ty, sym::Option) {
+ Some("Option")
+ } else if is_type_diagnostic_item(cx, ty, sym::Result) {
+ Some("Result")
+ } else {
+ None
+ };
+ if let Some(or_arm) = applicable_or_arm(cx, arms);
+ if let Some(or_body_snippet) = snippet_opt(cx, or_arm.body.span);
+ if let Some(indent) = indent_of(cx, expr.span);
+ if constant_simple(cx, cx.typeck_results(), or_arm.body).is_some();
+ then {
+ let reindented_or_body =
+ reindent_multiline(or_body_snippet.into(), true, Some(indent));
+
+ let suggestion = if scrutinee.span.from_expansion() {
+ // we don't want parentheses around macro, e.g. `(some_macro!()).unwrap_or(0)`
+ sugg::Sugg::hir_with_macro_callsite(cx, scrutinee, "..")
+ }
+ else {
+ sugg::Sugg::hir(cx, scrutinee, "..").maybe_par()
+ };
+
+ span_lint_and_sugg(
+ cx,
+ MANUAL_UNWRAP_OR, expr.span,
+ &format!("this pattern reimplements `{}::unwrap_or`", ty_name),
+ "replace with",
+ format!(
+ "{}.unwrap_or({})",
+ suggestion,
+ reindented_or_body,
+ ),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+}
+
+fn applicable_or_arm<'a>(cx: &LateContext<'_>, arms: &'a [Arm<'a>]) -> Option<&'a Arm<'a>> {
+ if_chain! {
+ if arms.len() == 2;
+ if arms.iter().all(|arm| arm.guard.is_none());
+ if let Some((idx, or_arm)) = arms.iter().enumerate().find(|(_, arm)| {
+ match arm.pat.kind {
+ PatKind::Path(ref qpath) => is_lang_ctor(cx, qpath, OptionNone),
+ PatKind::TupleStruct(ref qpath, [pat], _) =>
+ matches!(pat.kind, PatKind::Wild) && is_lang_ctor(cx, qpath, ResultErr),
+ _ => false,
+ }
+ });
+ let unwrap_arm = &arms[1 - idx];
+ if let PatKind::TupleStruct(ref qpath, [unwrap_pat], _) = unwrap_arm.pat.kind;
+ if is_lang_ctor(cx, qpath, OptionSome) || is_lang_ctor(cx, qpath, ResultOk);
+ if let PatKind::Binding(_, binding_hir_id, ..) = unwrap_pat.kind;
+ if path_to_local_id(unwrap_arm.body, binding_hir_id);
+ if cx.typeck_results().expr_adjustments(unwrap_arm.body).is_empty();
+ if !contains_return_break_continue_macro(or_arm.body);
+ then {
+ Some(or_arm)
+ } else {
+ None
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_as_ref.rs b/src/tools/clippy/clippy_lints/src/matches/match_as_ref.rs
new file mode 100644
index 000000000..d914eba01
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/match_as_ref.rs
@@ -0,0 +1,85 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::{is_lang_ctor, peel_blocks};
+use rustc_errors::Applicability;
+use rustc_hir::{Arm, BindingAnnotation, Expr, ExprKind, LangItem, PatKind, QPath};
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+
+use super::MATCH_AS_REF;
+
+pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>], expr: &Expr<'_>) {
+ if arms.len() == 2 && arms[0].guard.is_none() && arms[1].guard.is_none() {
+ let arm_ref: Option<BindingAnnotation> = if is_none_arm(cx, &arms[0]) {
+ is_ref_some_arm(cx, &arms[1])
+ } else if is_none_arm(cx, &arms[1]) {
+ is_ref_some_arm(cx, &arms[0])
+ } else {
+ None
+ };
+ if let Some(rb) = arm_ref {
+ let suggestion = if rb == BindingAnnotation::Ref {
+ "as_ref"
+ } else {
+ "as_mut"
+ };
+
+ let output_ty = cx.typeck_results().expr_ty(expr);
+ let input_ty = cx.typeck_results().expr_ty(ex);
+
+ let cast = if_chain! {
+ if let ty::Adt(_, substs) = input_ty.kind();
+ let input_ty = substs.type_at(0);
+ if let ty::Adt(_, substs) = output_ty.kind();
+ let output_ty = substs.type_at(0);
+ if let ty::Ref(_, output_ty, _) = *output_ty.kind();
+ if input_ty != output_ty;
+ then {
+ ".map(|x| x as _)"
+ } else {
+ ""
+ }
+ };
+
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ MATCH_AS_REF,
+ expr.span,
+ &format!("use `{}()` instead", suggestion),
+ "try this",
+ format!(
+ "{}.{}(){}",
+ snippet_with_applicability(cx, ex.span, "_", &mut applicability),
+ suggestion,
+ cast,
+ ),
+ applicability,
+ );
+ }
+ }
+}
+
+// Checks if arm has the form `None => None`
+fn is_none_arm(cx: &LateContext<'_>, arm: &Arm<'_>) -> bool {
+ matches!(arm.pat.kind, PatKind::Path(ref qpath) if is_lang_ctor(cx, qpath, LangItem::OptionNone))
+}
+
+// Checks if arm has the form `Some(ref v) => Some(v)` (checks for `ref` and `ref mut`)
+fn is_ref_some_arm(cx: &LateContext<'_>, arm: &Arm<'_>) -> Option<BindingAnnotation> {
+ if_chain! {
+ if let PatKind::TupleStruct(ref qpath, [first_pat, ..], _) = arm.pat.kind;
+ if is_lang_ctor(cx, qpath, LangItem::OptionSome);
+ if let PatKind::Binding(rb, .., ident, _) = first_pat.kind;
+ if rb == BindingAnnotation::Ref || rb == BindingAnnotation::RefMut;
+ if let ExprKind::Call(e, args) = peel_blocks(arm.body).kind;
+ if let ExprKind::Path(ref some_path) = e.kind;
+ if is_lang_ctor(cx, some_path, LangItem::OptionSome) && args.len() == 1;
+ if let ExprKind::Path(QPath::Resolved(_, path2)) = args[0].kind;
+ if path2.segments.len() == 1 && ident.name == path2.segments[0].ident.name;
+ then {
+ return Some(rb)
+ }
+ }
+ None
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_bool.rs b/src/tools/clippy/clippy_lints/src/matches/match_bool.rs
new file mode 100644
index 000000000..1c216e135
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/match_bool.rs
@@ -0,0 +1,75 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::is_unit_expr;
+use clippy_utils::source::{expr_block, snippet};
+use clippy_utils::sugg::Sugg;
+use rustc_ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::{Arm, Expr, ExprKind, PatKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+
+use super::MATCH_BOOL;
+
+pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>], expr: &Expr<'_>) {
+ // Type of expression is `bool`.
+ if *cx.typeck_results().expr_ty(ex).kind() == ty::Bool {
+ span_lint_and_then(
+ cx,
+ MATCH_BOOL,
+ expr.span,
+ "you seem to be trying to match on a boolean expression",
+ move |diag| {
+ if arms.len() == 2 {
+ // no guards
+ let exprs = if let PatKind::Lit(arm_bool) = arms[0].pat.kind {
+ if let ExprKind::Lit(ref lit) = arm_bool.kind {
+ match lit.node {
+ LitKind::Bool(true) => Some((arms[0].body, arms[1].body)),
+ LitKind::Bool(false) => Some((arms[1].body, arms[0].body)),
+ _ => None,
+ }
+ } else {
+ None
+ }
+ } else {
+ None
+ };
+
+ if let Some((true_expr, false_expr)) = exprs {
+ let sugg = match (is_unit_expr(true_expr), is_unit_expr(false_expr)) {
+ (false, false) => Some(format!(
+ "if {} {} else {}",
+ snippet(cx, ex.span, "b"),
+ expr_block(cx, true_expr, None, "..", Some(expr.span)),
+ expr_block(cx, false_expr, None, "..", Some(expr.span))
+ )),
+ (false, true) => Some(format!(
+ "if {} {}",
+ snippet(cx, ex.span, "b"),
+ expr_block(cx, true_expr, None, "..", Some(expr.span))
+ )),
+ (true, false) => {
+ let test = Sugg::hir(cx, ex, "..");
+ Some(format!(
+ "if {} {}",
+ !test,
+ expr_block(cx, false_expr, None, "..", Some(expr.span))
+ ))
+ },
+ (true, true) => None,
+ };
+
+ if let Some(sugg) = sugg {
+ diag.span_suggestion(
+ expr.span,
+ "consider using an `if`/`else` expression",
+ sugg,
+ Applicability::HasPlaceholders,
+ );
+ }
+ }
+ }
+ },
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs b/src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs
new file mode 100644
index 000000000..0da4833f1
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs
@@ -0,0 +1,171 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::is_wild;
+use clippy_utils::source::snippet_with_applicability;
+use rustc_ast::{Attribute, LitKind};
+use rustc_errors::Applicability;
+use rustc_hir::{Arm, BorrowKind, Expr, ExprKind, Guard, Pat};
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::source_map::Spanned;
+
+use super::MATCH_LIKE_MATCHES_MACRO;
+
+/// Lint a `match` or `if let .. { .. } else { .. }` expr that could be replaced by `matches!`
+pub(crate) fn check_if_let<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ let_pat: &'tcx Pat<'_>,
+ let_expr: &'tcx Expr<'_>,
+ then_expr: &'tcx Expr<'_>,
+ else_expr: &'tcx Expr<'_>,
+) {
+ find_matches_sugg(
+ cx,
+ let_expr,
+ IntoIterator::into_iter([
+ (&[][..], Some(let_pat), then_expr, None),
+ (&[][..], None, else_expr, None),
+ ]),
+ expr,
+ true,
+ );
+}
+
+pub(super) fn check_match<'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ scrutinee: &'tcx Expr<'_>,
+ arms: &'tcx [Arm<'tcx>],
+) -> bool {
+ find_matches_sugg(
+ cx,
+ scrutinee,
+ arms.iter().map(|arm| {
+ (
+ cx.tcx.hir().attrs(arm.hir_id),
+ Some(arm.pat),
+ arm.body,
+ arm.guard.as_ref(),
+ )
+ }),
+ e,
+ false,
+ )
+}
+
+/// Lint a `match` or `if let` for replacement by `matches!`
+fn find_matches_sugg<'a, 'b, I>(
+ cx: &LateContext<'_>,
+ ex: &Expr<'_>,
+ mut iter: I,
+ expr: &Expr<'_>,
+ is_if_let: bool,
+) -> bool
+where
+ 'b: 'a,
+ I: Clone
+ + DoubleEndedIterator
+ + ExactSizeIterator
+ + Iterator<
+ Item = (
+ &'a [Attribute],
+ Option<&'a Pat<'b>>,
+ &'a Expr<'b>,
+ Option<&'a Guard<'b>>,
+ ),
+ >,
+{
+ if_chain! {
+ if iter.len() >= 2;
+ if cx.typeck_results().expr_ty(expr).is_bool();
+ if let Some((_, last_pat_opt, last_expr, _)) = iter.next_back();
+ let iter_without_last = iter.clone();
+ if let Some((first_attrs, _, first_expr, first_guard)) = iter.next();
+ if let Some(b0) = find_bool_lit(&first_expr.kind);
+ if let Some(b1) = find_bool_lit(&last_expr.kind);
+ if b0 != b1;
+ if first_guard.is_none() || iter.len() == 0;
+ if first_attrs.is_empty();
+ if iter
+ .all(|arm| {
+ find_bool_lit(&arm.2.kind).map_or(false, |b| b == b0) && arm.3.is_none() && arm.0.is_empty()
+ });
+ then {
+ if let Some(last_pat) = last_pat_opt {
+ if !is_wild(last_pat) {
+ return false;
+ }
+ }
+
+ // The suggestion may be incorrect, because some arms can have `cfg` attributes
+ // evaluated into `false` and so such arms will be stripped before.
+ let mut applicability = Applicability::MaybeIncorrect;
+ let pat = {
+ use itertools::Itertools as _;
+ iter_without_last
+ .filter_map(|arm| {
+ let pat_span = arm.1?.span;
+ Some(snippet_with_applicability(cx, pat_span, "..", &mut applicability))
+ })
+ .join(" | ")
+ };
+ let pat_and_guard = if let Some(Guard::If(g)) = first_guard {
+ format!("{} if {}", pat, snippet_with_applicability(cx, g.span, "..", &mut applicability))
+ } else {
+ pat
+ };
+
+ // strip potential borrows (#6503), but only if the type is a reference
+ let mut ex_new = ex;
+ if let ExprKind::AddrOf(BorrowKind::Ref, .., ex_inner) = ex.kind {
+ if let ty::Ref(..) = cx.typeck_results().expr_ty(ex_inner).kind() {
+ ex_new = ex_inner;
+ }
+ };
+ span_lint_and_sugg(
+ cx,
+ MATCH_LIKE_MATCHES_MACRO,
+ expr.span,
+ &format!("{} expression looks like `matches!` macro", if is_if_let { "if let .. else" } else { "match" }),
+ "try this",
+ format!(
+ "{}matches!({}, {})",
+ if b0 { "" } else { "!" },
+ snippet_with_applicability(cx, ex_new.span, "..", &mut applicability),
+ pat_and_guard,
+ ),
+ applicability,
+ );
+ true
+ } else {
+ false
+ }
+ }
+}
+
+/// Extract a `bool` or `{ bool }`
+fn find_bool_lit(ex: &ExprKind<'_>) -> Option<bool> {
+ match ex {
+ ExprKind::Lit(Spanned {
+ node: LitKind::Bool(b), ..
+ }) => Some(*b),
+ ExprKind::Block(
+ rustc_hir::Block {
+ stmts: &[],
+ expr: Some(exp),
+ ..
+ },
+ _,
+ ) => {
+ if let ExprKind::Lit(Spanned {
+ node: LitKind::Bool(b), ..
+ }) = exp.kind
+ {
+ Some(b)
+ } else {
+ None
+ }
+ },
+ _ => None,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs b/src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs
new file mode 100644
index 000000000..2917f85c4
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs
@@ -0,0 +1,61 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet;
+use clippy_utils::ty::{is_type_diagnostic_item, is_type_lang_item};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind, LangItem};
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::MATCH_ON_VEC_ITEMS;
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, scrutinee: &'tcx Expr<'_>) {
+ if_chain! {
+ if let Some(idx_expr) = is_vec_indexing(cx, scrutinee);
+ if let ExprKind::Index(vec, idx) = idx_expr.kind;
+
+ then {
+ // FIXME: could be improved to suggest surrounding every pattern with Some(_),
+ // but only when `or_patterns` are stabilized.
+ span_lint_and_sugg(
+ cx,
+ MATCH_ON_VEC_ITEMS,
+ scrutinee.span,
+ "indexing into a vector may panic",
+ "try this",
+ format!(
+ "{}.get({})",
+ snippet(cx, vec.span, ".."),
+ snippet(cx, idx.span, "..")
+ ),
+ Applicability::MaybeIncorrect
+ );
+ }
+ }
+}
+
+fn is_vec_indexing<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> {
+ if_chain! {
+ if let ExprKind::Index(array, index) = expr.kind;
+ if is_vector(cx, array);
+ if !is_full_range(cx, index);
+
+ then {
+ return Some(expr);
+ }
+ }
+
+ None
+}
+
+fn is_vector(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ let ty = cx.typeck_results().expr_ty(expr);
+ let ty = ty.peel_refs();
+ is_type_diagnostic_item(cx, ty, sym::Vec)
+}
+
+fn is_full_range(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ let ty = cx.typeck_results().expr_ty(expr);
+ let ty = ty.peel_refs();
+ is_type_lang_item(cx, ty, LangItem::RangeFull)
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_ref_pats.rs b/src/tools/clippy/clippy_lints/src/matches/match_ref_pats.rs
new file mode 100644
index 000000000..80f964ba1
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/match_ref_pats.rs
@@ -0,0 +1,66 @@
+use clippy_utils::diagnostics::{multispan_sugg, span_lint_and_then};
+use clippy_utils::source::snippet;
+use clippy_utils::sugg::Sugg;
+use core::iter::once;
+use rustc_hir::{BorrowKind, Expr, ExprKind, Mutability, Pat, PatKind};
+use rustc_lint::LateContext;
+
+use super::MATCH_REF_PATS;
+
+pub(crate) fn check<'a, 'b, I>(cx: &LateContext<'_>, ex: &Expr<'_>, pats: I, expr: &Expr<'_>)
+where
+ 'b: 'a,
+ I: Clone + Iterator<Item = &'a Pat<'b>>,
+{
+ if !has_multiple_ref_pats(pats.clone()) {
+ return;
+ }
+
+ let (first_sugg, msg, title);
+ let span = ex.span.source_callsite();
+ if let ExprKind::AddrOf(BorrowKind::Ref, Mutability::Not, inner) = ex.kind {
+ first_sugg = once((span, Sugg::hir_with_macro_callsite(cx, inner, "..").to_string()));
+ msg = "try";
+ title = "you don't need to add `&` to both the expression and the patterns";
+ } else {
+ first_sugg = once((span, Sugg::hir_with_macro_callsite(cx, ex, "..").deref().to_string()));
+ msg = "instead of prefixing all patterns with `&`, you can dereference the expression";
+ title = "you don't need to add `&` to all patterns";
+ }
+
+ let remaining_suggs = pats.filter_map(|pat| {
+ if let PatKind::Ref(refp, _) = pat.kind {
+ Some((pat.span, snippet(cx, refp.span, "..").to_string()))
+ } else {
+ None
+ }
+ });
+
+ span_lint_and_then(cx, MATCH_REF_PATS, expr.span, title, |diag| {
+ if !expr.span.from_expansion() {
+ multispan_sugg(diag, msg, first_sugg.chain(remaining_suggs));
+ }
+ });
+}
+
+fn has_multiple_ref_pats<'a, 'b, I>(pats: I) -> bool
+where
+ 'b: 'a,
+ I: Iterator<Item = &'a Pat<'b>>,
+{
+ let mut ref_count = 0;
+ for opt in pats.map(|pat| match pat.kind {
+ PatKind::Ref(..) => Some(true), // &-patterns
+ PatKind::Wild => Some(false), // an "anything" wildcard is also fine
+ _ => None, // any other pattern is not fine
+ }) {
+ if let Some(inner) = opt {
+ if inner {
+ ref_count += 1;
+ }
+ } else {
+ return false;
+ }
+ }
+ ref_count > 1
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs b/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs
new file mode 100644
index 000000000..582782f24
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs
@@ -0,0 +1,414 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet;
+use clippy_utils::{path_to_local, search_same, SpanlessEq, SpanlessHash};
+use core::cmp::Ordering;
+use core::iter;
+use core::slice;
+use rustc_arena::DroplessArena;
+use rustc_ast::ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::def_id::DefId;
+use rustc_hir::{Arm, Expr, ExprKind, HirId, HirIdMap, HirIdSet, Pat, PatKind, RangeEnd};
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::Symbol;
+use std::collections::hash_map::Entry;
+
+use super::MATCH_SAME_ARMS;
+
+#[expect(clippy::too_many_lines)]
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, arms: &'tcx [Arm<'_>]) {
+ let hash = |&(_, arm): &(usize, &Arm<'_>)| -> u64 {
+ let mut h = SpanlessHash::new(cx);
+ h.hash_expr(arm.body);
+ h.finish()
+ };
+
+ let arena = DroplessArena::default();
+ let normalized_pats: Vec<_> = arms
+ .iter()
+ .map(|a| NormalizedPat::from_pat(cx, &arena, a.pat))
+ .collect();
+
+ // The furthest forwards a pattern can move without semantic changes
+ let forwards_blocking_idxs: Vec<_> = normalized_pats
+ .iter()
+ .enumerate()
+ .map(|(i, pat)| {
+ normalized_pats[i + 1..]
+ .iter()
+ .enumerate()
+ .find_map(|(j, other)| pat.has_overlapping_values(other).then_some(i + 1 + j))
+ .unwrap_or(normalized_pats.len())
+ })
+ .collect();
+
+ // The furthest backwards a pattern can move without semantic changes
+ let backwards_blocking_idxs: Vec<_> = normalized_pats
+ .iter()
+ .enumerate()
+ .map(|(i, pat)| {
+ normalized_pats[..i]
+ .iter()
+ .enumerate()
+ .rev()
+ .zip(forwards_blocking_idxs[..i].iter().copied().rev())
+ .skip_while(|&(_, forward_block)| forward_block > i)
+ .find_map(|((j, other), forward_block)| {
+ (forward_block == i || pat.has_overlapping_values(other)).then_some(j)
+ })
+ .unwrap_or(0)
+ })
+ .collect();
+
+ let eq = |&(lindex, lhs): &(usize, &Arm<'_>), &(rindex, rhs): &(usize, &Arm<'_>)| -> bool {
+ let min_index = usize::min(lindex, rindex);
+ let max_index = usize::max(lindex, rindex);
+
+ let mut local_map: HirIdMap<HirId> = HirIdMap::default();
+ let eq_fallback = |a: &Expr<'_>, b: &Expr<'_>| {
+ if_chain! {
+ if let Some(a_id) = path_to_local(a);
+ if let Some(b_id) = path_to_local(b);
+ let entry = match local_map.entry(a_id) {
+ Entry::Vacant(entry) => entry,
+ // check if using the same bindings as before
+ Entry::Occupied(entry) => return *entry.get() == b_id,
+ };
+ // the names technically don't have to match; this makes the lint more conservative
+ if cx.tcx.hir().name(a_id) == cx.tcx.hir().name(b_id);
+ if cx.typeck_results().expr_ty(a) == cx.typeck_results().expr_ty(b);
+ if pat_contains_local(lhs.pat, a_id);
+ if pat_contains_local(rhs.pat, b_id);
+ then {
+ entry.insert(b_id);
+ true
+ } else {
+ false
+ }
+ }
+ };
+ // Arms with a guard are ignored, those can’t always be merged together
+ // If both arms overlap with an arm in between then these can't be merged either.
+ !(backwards_blocking_idxs[max_index] > min_index && forwards_blocking_idxs[min_index] < max_index)
+ && lhs.guard.is_none()
+ && rhs.guard.is_none()
+ && SpanlessEq::new(cx)
+ .expr_fallback(eq_fallback)
+ .eq_expr(lhs.body, rhs.body)
+ // these checks could be removed to allow unused bindings
+ && bindings_eq(lhs.pat, local_map.keys().copied().collect())
+ && bindings_eq(rhs.pat, local_map.values().copied().collect())
+ };
+
+ let indexed_arms: Vec<(usize, &Arm<'_>)> = arms.iter().enumerate().collect();
+ for (&(i, arm1), &(j, arm2)) in search_same(&indexed_arms, hash, eq) {
+ if matches!(arm2.pat.kind, PatKind::Wild) {
+ span_lint_and_then(
+ cx,
+ MATCH_SAME_ARMS,
+ arm1.span,
+ "this match arm has an identical body to the `_` wildcard arm",
+ |diag| {
+ diag.span_suggestion(arm1.span, "try removing the arm", "", Applicability::MaybeIncorrect)
+ .help("or try changing either arm body")
+ .span_note(arm2.span, "`_` wildcard arm here");
+ },
+ );
+ } else {
+ let back_block = backwards_blocking_idxs[j];
+ let (keep_arm, move_arm) = if back_block < i || (back_block == 0 && forwards_blocking_idxs[i] <= j) {
+ (arm1, arm2)
+ } else {
+ (arm2, arm1)
+ };
+
+ span_lint_and_then(
+ cx,
+ MATCH_SAME_ARMS,
+ keep_arm.span,
+ "this match arm has an identical body to another arm",
+ |diag| {
+ let move_pat_snip = snippet(cx, move_arm.pat.span, "<pat2>");
+ let keep_pat_snip = snippet(cx, keep_arm.pat.span, "<pat1>");
+
+ diag.span_suggestion(
+ keep_arm.pat.span,
+ "try merging the arm patterns",
+ format!("{} | {}", keep_pat_snip, move_pat_snip),
+ Applicability::MaybeIncorrect,
+ )
+ .help("or try changing either arm body")
+ .span_note(move_arm.span, "other arm here");
+ },
+ );
+ }
+ }
+}
+
+#[derive(Clone, Copy)]
+enum NormalizedPat<'a> {
+ Wild,
+ Struct(Option<DefId>, &'a [(Symbol, Self)]),
+ Tuple(Option<DefId>, &'a [Self]),
+ Or(&'a [Self]),
+ Path(Option<DefId>),
+ LitStr(Symbol),
+ LitBytes(&'a [u8]),
+ LitInt(u128),
+ LitBool(bool),
+ Range(PatRange),
+ /// A slice pattern. If the second value is `None`, then this matches an exact size. Otherwise
+ /// the first value contains everything before the `..` wildcard pattern, and the second value
+ /// contains everything afterwards. Note that either side, or both sides, may contain zero
+ /// patterns.
+ Slice(&'a [Self], Option<&'a [Self]>),
+}
+
+#[derive(Clone, Copy)]
+struct PatRange {
+ start: u128,
+ end: u128,
+ bounds: RangeEnd,
+}
+impl PatRange {
+ fn contains(&self, x: u128) -> bool {
+ x >= self.start
+ && match self.bounds {
+ RangeEnd::Included => x <= self.end,
+ RangeEnd::Excluded => x < self.end,
+ }
+ }
+
+ fn overlaps(&self, other: &Self) -> bool {
+ // Note: Empty ranges are impossible, so this is correct even though it would return true if an
+ // empty exclusive range were to reside within an inclusive range.
+ (match self.bounds {
+ RangeEnd::Included => self.end >= other.start,
+ RangeEnd::Excluded => self.end > other.start,
+ } && match other.bounds {
+ RangeEnd::Included => self.start <= other.end,
+ RangeEnd::Excluded => self.start < other.end,
+ })
+ }
+}
+
+/// Iterates over the pairs of fields with matching names.
+fn iter_matching_struct_fields<'a>(
+ left: &'a [(Symbol, NormalizedPat<'a>)],
+ right: &'a [(Symbol, NormalizedPat<'a>)],
+) -> impl Iterator<Item = (&'a NormalizedPat<'a>, &'a NormalizedPat<'a>)> + 'a {
+ struct Iter<'a>(
+ slice::Iter<'a, (Symbol, NormalizedPat<'a>)>,
+ slice::Iter<'a, (Symbol, NormalizedPat<'a>)>,
+ );
+ impl<'a> Iterator for Iter<'a> {
+ type Item = (&'a NormalizedPat<'a>, &'a NormalizedPat<'a>);
+ fn next(&mut self) -> Option<Self::Item> {
+ // Note: all the fields in each slice are sorted by symbol value.
+ let mut left = self.0.next()?;
+ let mut right = self.1.next()?;
+ loop {
+ match left.0.cmp(&right.0) {
+ Ordering::Equal => return Some((&left.1, &right.1)),
+ Ordering::Less => left = self.0.next()?,
+ Ordering::Greater => right = self.1.next()?,
+ }
+ }
+ }
+ }
+ Iter(left.iter(), right.iter())
+}
+
+#[expect(clippy::similar_names)]
+impl<'a> NormalizedPat<'a> {
+ #[expect(clippy::too_many_lines)]
+ fn from_pat(cx: &LateContext<'_>, arena: &'a DroplessArena, pat: &'a Pat<'_>) -> Self {
+ match pat.kind {
+ PatKind::Wild | PatKind::Binding(.., None) => Self::Wild,
+ PatKind::Binding(.., Some(pat)) | PatKind::Box(pat) | PatKind::Ref(pat, _) => {
+ Self::from_pat(cx, arena, pat)
+ },
+ PatKind::Struct(ref path, fields, _) => {
+ let fields =
+ arena.alloc_from_iter(fields.iter().map(|f| (f.ident.name, Self::from_pat(cx, arena, f.pat))));
+ fields.sort_by_key(|&(name, _)| name);
+ Self::Struct(cx.qpath_res(path, pat.hir_id).opt_def_id(), fields)
+ },
+ PatKind::TupleStruct(ref path, pats, wild_idx) => {
+ let adt = match cx.typeck_results().pat_ty(pat).ty_adt_def() {
+ Some(x) => x,
+ None => return Self::Wild,
+ };
+ let (var_id, variant) = if adt.is_enum() {
+ match cx.qpath_res(path, pat.hir_id).opt_def_id() {
+ Some(x) => (Some(x), adt.variant_with_ctor_id(x)),
+ None => return Self::Wild,
+ }
+ } else {
+ (None, adt.non_enum_variant())
+ };
+ let (front, back) = match wild_idx {
+ Some(i) => pats.split_at(i),
+ None => (pats, [].as_slice()),
+ };
+ let pats = arena.alloc_from_iter(
+ front
+ .iter()
+ .map(|pat| Self::from_pat(cx, arena, pat))
+ .chain(iter::repeat_with(|| Self::Wild).take(variant.fields.len() - pats.len()))
+ .chain(back.iter().map(|pat| Self::from_pat(cx, arena, pat))),
+ );
+ Self::Tuple(var_id, pats)
+ },
+ PatKind::Or(pats) => Self::Or(arena.alloc_from_iter(pats.iter().map(|pat| Self::from_pat(cx, arena, pat)))),
+ PatKind::Path(ref path) => Self::Path(cx.qpath_res(path, pat.hir_id).opt_def_id()),
+ PatKind::Tuple(pats, wild_idx) => {
+ let field_count = match cx.typeck_results().pat_ty(pat).kind() {
+ ty::Tuple(subs) => subs.len(),
+ _ => return Self::Wild,
+ };
+ let (front, back) = match wild_idx {
+ Some(i) => pats.split_at(i),
+ None => (pats, [].as_slice()),
+ };
+ let pats = arena.alloc_from_iter(
+ front
+ .iter()
+ .map(|pat| Self::from_pat(cx, arena, pat))
+ .chain(iter::repeat_with(|| Self::Wild).take(field_count - pats.len()))
+ .chain(back.iter().map(|pat| Self::from_pat(cx, arena, pat))),
+ );
+ Self::Tuple(None, pats)
+ },
+ PatKind::Lit(e) => match &e.kind {
+ // TODO: Handle negative integers. They're currently treated as a wild match.
+ ExprKind::Lit(lit) => match lit.node {
+ LitKind::Str(sym, _) => Self::LitStr(sym),
+ LitKind::ByteStr(ref bytes) => Self::LitBytes(bytes),
+ LitKind::Byte(val) => Self::LitInt(val.into()),
+ LitKind::Char(val) => Self::LitInt(val.into()),
+ LitKind::Int(val, _) => Self::LitInt(val),
+ LitKind::Bool(val) => Self::LitBool(val),
+ LitKind::Float(..) | LitKind::Err(_) => Self::Wild,
+ },
+ _ => Self::Wild,
+ },
+ PatKind::Range(start, end, bounds) => {
+ // TODO: Handle negative integers. They're currently treated as a wild match.
+ let start = match start {
+ None => 0,
+ Some(e) => match &e.kind {
+ ExprKind::Lit(lit) => match lit.node {
+ LitKind::Int(val, _) => val,
+ LitKind::Char(val) => val.into(),
+ LitKind::Byte(val) => val.into(),
+ _ => return Self::Wild,
+ },
+ _ => return Self::Wild,
+ },
+ };
+ let (end, bounds) = match end {
+ None => (u128::MAX, RangeEnd::Included),
+ Some(e) => match &e.kind {
+ ExprKind::Lit(lit) => match lit.node {
+ LitKind::Int(val, _) => (val, bounds),
+ LitKind::Char(val) => (val.into(), bounds),
+ LitKind::Byte(val) => (val.into(), bounds),
+ _ => return Self::Wild,
+ },
+ _ => return Self::Wild,
+ },
+ };
+ Self::Range(PatRange { start, end, bounds })
+ },
+ PatKind::Slice(front, wild_pat, back) => Self::Slice(
+ arena.alloc_from_iter(front.iter().map(|pat| Self::from_pat(cx, arena, pat))),
+ wild_pat.map(|_| &*arena.alloc_from_iter(back.iter().map(|pat| Self::from_pat(cx, arena, pat)))),
+ ),
+ }
+ }
+
+ /// Checks if two patterns overlap in the values they can match assuming they are for the same
+ /// type.
+ fn has_overlapping_values(&self, other: &Self) -> bool {
+ match (*self, *other) {
+ (Self::Wild, _) | (_, Self::Wild) => true,
+ (Self::Or(pats), ref other) | (ref other, Self::Or(pats)) => {
+ pats.iter().any(|pat| pat.has_overlapping_values(other))
+ },
+ (Self::Struct(lpath, lfields), Self::Struct(rpath, rfields)) => {
+ if lpath != rpath {
+ return false;
+ }
+ iter_matching_struct_fields(lfields, rfields).all(|(lpat, rpat)| lpat.has_overlapping_values(rpat))
+ },
+ (Self::Tuple(lpath, lpats), Self::Tuple(rpath, rpats)) => {
+ if lpath != rpath {
+ return false;
+ }
+ lpats
+ .iter()
+ .zip(rpats.iter())
+ .all(|(lpat, rpat)| lpat.has_overlapping_values(rpat))
+ },
+ (Self::Path(x), Self::Path(y)) => x == y,
+ (Self::LitStr(x), Self::LitStr(y)) => x == y,
+ (Self::LitBytes(x), Self::LitBytes(y)) => x == y,
+ (Self::LitInt(x), Self::LitInt(y)) => x == y,
+ (Self::LitBool(x), Self::LitBool(y)) => x == y,
+ (Self::Range(ref x), Self::Range(ref y)) => x.overlaps(y),
+ (Self::Range(ref range), Self::LitInt(x)) | (Self::LitInt(x), Self::Range(ref range)) => range.contains(x),
+ (Self::Slice(lpats, None), Self::Slice(rpats, None)) => {
+ lpats.len() == rpats.len() && lpats.iter().zip(rpats.iter()).all(|(x, y)| x.has_overlapping_values(y))
+ },
+ (Self::Slice(pats, None), Self::Slice(front, Some(back)))
+ | (Self::Slice(front, Some(back)), Self::Slice(pats, None)) => {
+ // Here `pats` is an exact size match. If the combined lengths of `front` and `back` are greater
+ // then the minimum length required will be greater than the length of `pats`.
+ if pats.len() < front.len() + back.len() {
+ return false;
+ }
+ pats[..front.len()]
+ .iter()
+ .zip(front.iter())
+ .chain(pats[pats.len() - back.len()..].iter().zip(back.iter()))
+ .all(|(x, y)| x.has_overlapping_values(y))
+ },
+ (Self::Slice(lfront, Some(lback)), Self::Slice(rfront, Some(rback))) => lfront
+ .iter()
+ .zip(rfront.iter())
+ .chain(lback.iter().rev().zip(rback.iter().rev()))
+ .all(|(x, y)| x.has_overlapping_values(y)),
+
+ // Enums can mix unit variants with tuple/struct variants. These can never overlap.
+ (Self::Path(_), Self::Tuple(..) | Self::Struct(..))
+ | (Self::Tuple(..) | Self::Struct(..), Self::Path(_)) => false,
+
+ // Tuples can be matched like a struct.
+ (Self::Tuple(x, _), Self::Struct(y, _)) | (Self::Struct(x, _), Self::Tuple(y, _)) => {
+ // TODO: check fields here.
+ x == y
+ },
+
+ // TODO: Lit* with Path, Range with Path, LitBytes with Slice
+ _ => true,
+ }
+ }
+}
+
+fn pat_contains_local(pat: &Pat<'_>, id: HirId) -> bool {
+ let mut result = false;
+ pat.walk_short(|p| {
+ result |= matches!(p.kind, PatKind::Binding(_, binding_id, ..) if binding_id == id);
+ !result
+ });
+ result
+}
+
+/// Returns true if all the bindings in the `Pat` are in `ids` and vice versa
+fn bindings_eq(pat: &Pat<'_>, mut ids: HirIdSet) -> bool {
+ let mut result = true;
+ pat.each_binding_or_first(&mut |_, id, _, _| result &= ids.remove(&id));
+ result && ids.is_empty()
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_single_binding.rs b/src/tools/clippy/clippy_lints/src/matches/match_single_binding.rs
new file mode 100644
index 000000000..5ae4a65ac
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/match_single_binding.rs
@@ -0,0 +1,216 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::macros::HirNode;
+use clippy_utils::source::{indent_of, snippet, snippet_block, snippet_with_applicability};
+use clippy_utils::sugg::Sugg;
+use clippy_utils::{get_parent_expr, is_refutable, peel_blocks};
+use rustc_errors::Applicability;
+use rustc_hir::{Arm, Expr, ExprKind, Node, PatKind};
+use rustc_lint::LateContext;
+use rustc_span::Span;
+
+use super::MATCH_SINGLE_BINDING;
+
+enum AssignmentExpr {
+ Assign { span: Span, match_span: Span },
+ Local { span: Span, pat_span: Span },
+}
+
+#[expect(clippy::too_many_lines)]
+pub(crate) fn check<'a>(cx: &LateContext<'a>, ex: &Expr<'a>, arms: &[Arm<'_>], expr: &Expr<'a>) {
+ if expr.span.from_expansion() || arms.len() != 1 || is_refutable(cx, arms[0].pat) {
+ return;
+ }
+
+ let matched_vars = ex.span;
+ let bind_names = arms[0].pat.span;
+ let match_body = peel_blocks(arms[0].body);
+ let mut snippet_body = if match_body.span.from_expansion() {
+ Sugg::hir_with_macro_callsite(cx, match_body, "..").to_string()
+ } else {
+ snippet_block(cx, match_body.span, "..", Some(expr.span)).to_string()
+ };
+
+ // Do we need to add ';' to suggestion ?
+ match match_body.kind {
+ ExprKind::Block(block, _) => {
+ // macro + expr_ty(body) == ()
+ if block.span.from_expansion() && cx.typeck_results().expr_ty(match_body).is_unit() {
+ snippet_body.push(';');
+ }
+ },
+ _ => {
+ // expr_ty(body) == ()
+ if cx.typeck_results().expr_ty(match_body).is_unit() {
+ snippet_body.push(';');
+ }
+ },
+ }
+
+ let mut applicability = Applicability::MaybeIncorrect;
+ match arms[0].pat.kind {
+ PatKind::Binding(..) | PatKind::Tuple(_, _) | PatKind::Struct(..) => {
+ let (target_span, sugg) = match opt_parent_assign_span(cx, ex) {
+ Some(AssignmentExpr::Assign { span, match_span }) => {
+ let sugg = sugg_with_curlies(
+ cx,
+ (ex, expr),
+ (bind_names, matched_vars),
+ &snippet_body,
+ &mut applicability,
+ Some(span),
+ );
+
+ span_lint_and_sugg(
+ cx,
+ MATCH_SINGLE_BINDING,
+ span.to(match_span),
+ "this assignment could be simplified",
+ "consider removing the `match` expression",
+ sugg,
+ applicability,
+ );
+
+ return;
+ },
+ Some(AssignmentExpr::Local { span, pat_span }) => (
+ span,
+ format!(
+ "let {} = {};\n{}let {} = {};",
+ snippet_with_applicability(cx, bind_names, "..", &mut applicability),
+ snippet_with_applicability(cx, matched_vars, "..", &mut applicability),
+ " ".repeat(indent_of(cx, expr.span).unwrap_or(0)),
+ snippet_with_applicability(cx, pat_span, "..", &mut applicability),
+ snippet_body
+ ),
+ ),
+ None => {
+ let sugg = sugg_with_curlies(
+ cx,
+ (ex, expr),
+ (bind_names, matched_vars),
+ &snippet_body,
+ &mut applicability,
+ None,
+ );
+ (expr.span, sugg)
+ },
+ };
+
+ span_lint_and_sugg(
+ cx,
+ MATCH_SINGLE_BINDING,
+ target_span,
+ "this match could be written as a `let` statement",
+ "consider using a `let` statement",
+ sugg,
+ applicability,
+ );
+ },
+ PatKind::Wild => {
+ if ex.can_have_side_effects() {
+ let indent = " ".repeat(indent_of(cx, expr.span).unwrap_or(0));
+ let sugg = format!(
+ "{};\n{}{}",
+ snippet_with_applicability(cx, ex.span, "..", &mut applicability),
+ indent,
+ snippet_body
+ );
+
+ span_lint_and_sugg(
+ cx,
+ MATCH_SINGLE_BINDING,
+ expr.span,
+ "this match could be replaced by its scrutinee and body",
+ "consider using the scrutinee and body instead",
+ sugg,
+ applicability,
+ );
+ } else {
+ span_lint_and_sugg(
+ cx,
+ MATCH_SINGLE_BINDING,
+ expr.span,
+ "this match could be replaced by its body itself",
+ "consider using the match body instead",
+ snippet_body,
+ Applicability::MachineApplicable,
+ );
+ }
+ },
+ _ => (),
+ }
+}
+
+/// Returns true if the `ex` match expression is in a local (`let`) or assign expression
+fn opt_parent_assign_span<'a>(cx: &LateContext<'a>, ex: &Expr<'a>) -> Option<AssignmentExpr> {
+ let map = &cx.tcx.hir();
+
+ if let Some(Node::Expr(parent_arm_expr)) = map.find(map.get_parent_node(ex.hir_id)) {
+ return match map.find(map.get_parent_node(parent_arm_expr.hir_id)) {
+ Some(Node::Local(parent_let_expr)) => Some(AssignmentExpr::Local {
+ span: parent_let_expr.span,
+ pat_span: parent_let_expr.pat.span(),
+ }),
+ Some(Node::Expr(Expr {
+ kind: ExprKind::Assign(parent_assign_expr, match_expr, _),
+ ..
+ })) => Some(AssignmentExpr::Assign {
+ span: parent_assign_expr.span,
+ match_span: match_expr.span,
+ }),
+ _ => None,
+ };
+ }
+
+ None
+}
+
+fn sugg_with_curlies<'a>(
+ cx: &LateContext<'a>,
+ (ex, match_expr): (&Expr<'a>, &Expr<'a>),
+ (bind_names, matched_vars): (Span, Span),
+ snippet_body: &str,
+ applicability: &mut Applicability,
+ assignment: Option<Span>,
+) -> String {
+ let mut indent = " ".repeat(indent_of(cx, ex.span).unwrap_or(0));
+
+ let (mut cbrace_start, mut cbrace_end) = (String::new(), String::new());
+ if let Some(parent_expr) = get_parent_expr(cx, match_expr) {
+ if let ExprKind::Closure { .. } = parent_expr.kind {
+ cbrace_end = format!("\n{}}}", indent);
+ // Fix body indent due to the closure
+ indent = " ".repeat(indent_of(cx, bind_names).unwrap_or(0));
+ cbrace_start = format!("{{\n{}", indent);
+ }
+ }
+
+ // If the parent is already an arm, and the body is another match statement,
+ // we need curly braces around suggestion
+ let parent_node_id = cx.tcx.hir().get_parent_node(match_expr.hir_id);
+ if let Node::Arm(arm) = &cx.tcx.hir().get(parent_node_id) {
+ if let ExprKind::Match(..) = arm.body.kind {
+ cbrace_end = format!("\n{}}}", indent);
+ // Fix body indent due to the match
+ indent = " ".repeat(indent_of(cx, bind_names).unwrap_or(0));
+ cbrace_start = format!("{{\n{}", indent);
+ }
+ }
+
+ let assignment_str = assignment.map_or_else(String::new, |span| {
+ let mut s = snippet(cx, span, "..").to_string();
+ s.push_str(" = ");
+ s
+ });
+
+ format!(
+ "{}let {} = {};\n{}{}{}{}",
+ cbrace_start,
+ snippet_with_applicability(cx, bind_names, "..", applicability),
+ snippet_with_applicability(cx, matched_vars, "..", applicability),
+ indent,
+ assignment_str,
+ snippet_body,
+ cbrace_end
+ )
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_str_case_mismatch.rs b/src/tools/clippy/clippy_lints/src/matches/match_str_case_mismatch.rs
new file mode 100644
index 000000000..fa3b8d1fc
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/match_str_case_mismatch.rs
@@ -0,0 +1,125 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::ty::is_type_diagnostic_item;
+use rustc_ast::ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::intravisit::{walk_expr, Visitor};
+use rustc_hir::{Arm, Expr, ExprKind, PatKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::symbol::Symbol;
+use rustc_span::{sym, Span};
+
+use super::MATCH_STR_CASE_MISMATCH;
+
+#[derive(Debug)]
+enum CaseMethod {
+ LowerCase,
+ AsciiLowerCase,
+ UpperCase,
+ AsciiUppercase,
+}
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, scrutinee: &'tcx Expr<'_>, arms: &'tcx [Arm<'_>]) {
+ if_chain! {
+ if let ty::Ref(_, ty, _) = cx.typeck_results().expr_ty(scrutinee).kind();
+ if let ty::Str = ty.kind();
+ then {
+ let mut visitor = MatchExprVisitor {
+ cx,
+ case_method: None,
+ };
+
+ visitor.visit_expr(scrutinee);
+
+ if let Some(case_method) = visitor.case_method {
+ if let Some((bad_case_span, bad_case_sym)) = verify_case(&case_method, arms) {
+ lint(cx, &case_method, bad_case_span, bad_case_sym.as_str());
+ }
+ }
+ }
+ }
+}
+
+struct MatchExprVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ case_method: Option<CaseMethod>,
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for MatchExprVisitor<'a, 'tcx> {
+ fn visit_expr(&mut self, ex: &'tcx Expr<'_>) {
+ match ex.kind {
+ ExprKind::MethodCall(segment, [receiver], _) if self.case_altered(segment.ident.as_str(), receiver) => {},
+ _ => walk_expr(self, ex),
+ }
+ }
+}
+
+impl<'a, 'tcx> MatchExprVisitor<'a, 'tcx> {
+ fn case_altered(&mut self, segment_ident: &str, receiver: &Expr<'_>) -> bool {
+ if let Some(case_method) = get_case_method(segment_ident) {
+ let ty = self.cx.typeck_results().expr_ty(receiver).peel_refs();
+
+ if is_type_diagnostic_item(self.cx, ty, sym::String) || ty.kind() == &ty::Str {
+ self.case_method = Some(case_method);
+ return true;
+ }
+ }
+
+ false
+ }
+}
+
+fn get_case_method(segment_ident_str: &str) -> Option<CaseMethod> {
+ match segment_ident_str {
+ "to_lowercase" => Some(CaseMethod::LowerCase),
+ "to_ascii_lowercase" => Some(CaseMethod::AsciiLowerCase),
+ "to_uppercase" => Some(CaseMethod::UpperCase),
+ "to_ascii_uppercase" => Some(CaseMethod::AsciiUppercase),
+ _ => None,
+ }
+}
+
+fn verify_case<'a>(case_method: &'a CaseMethod, arms: &'a [Arm<'_>]) -> Option<(Span, Symbol)> {
+ let case_check = match case_method {
+ CaseMethod::LowerCase => |input: &str| -> bool { input.chars().all(|c| c.to_lowercase().next() == Some(c)) },
+ CaseMethod::AsciiLowerCase => |input: &str| -> bool { !input.chars().any(|c| c.is_ascii_uppercase()) },
+ CaseMethod::UpperCase => |input: &str| -> bool { input.chars().all(|c| c.to_uppercase().next() == Some(c)) },
+ CaseMethod::AsciiUppercase => |input: &str| -> bool { !input.chars().any(|c| c.is_ascii_lowercase()) },
+ };
+
+ for arm in arms {
+ if_chain! {
+ if let PatKind::Lit(Expr {
+ kind: ExprKind::Lit(lit),
+ ..
+ }) = arm.pat.kind;
+ if let LitKind::Str(symbol, _) = lit.node;
+ let input = symbol.as_str();
+ if !case_check(input);
+ then {
+ return Some((lit.span, symbol));
+ }
+ }
+ }
+
+ None
+}
+
+fn lint(cx: &LateContext<'_>, case_method: &CaseMethod, bad_case_span: Span, bad_case_str: &str) {
+ let (method_str, suggestion) = match case_method {
+ CaseMethod::LowerCase => ("to_lowercase", bad_case_str.to_lowercase()),
+ CaseMethod::AsciiLowerCase => ("to_ascii_lowercase", bad_case_str.to_ascii_lowercase()),
+ CaseMethod::UpperCase => ("to_uppercase", bad_case_str.to_uppercase()),
+ CaseMethod::AsciiUppercase => ("to_ascii_uppercase", bad_case_str.to_ascii_uppercase()),
+ };
+
+ span_lint_and_sugg(
+ cx,
+ MATCH_STR_CASE_MISMATCH,
+ bad_case_span,
+ "this `match` arm has a differing case than its expression",
+ &format!("consider changing the case of this arm to respect `{}`", method_str),
+ format!("\"{}\"", suggestion),
+ Applicability::MachineApplicable,
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_wild_enum.rs b/src/tools/clippy/clippy_lints/src/matches/match_wild_enum.rs
new file mode 100644
index 000000000..6f8d766ae
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/match_wild_enum.rs
@@ -0,0 +1,196 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{is_refutable, peel_hir_pat_refs, recurse_or_patterns};
+use rustc_errors::Applicability;
+use rustc_hir::def::{CtorKind, DefKind, Res};
+use rustc_hir::{Arm, Expr, PatKind, PathSegment, QPath, Ty, TyKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, VariantDef};
+use rustc_span::sym;
+
+use super::{MATCH_WILDCARD_FOR_SINGLE_VARIANTS, WILDCARD_ENUM_MATCH_ARM};
+
+#[expect(clippy::too_many_lines)]
+pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>]) {
+ let ty = cx.typeck_results().expr_ty(ex).peel_refs();
+ let adt_def = match ty.kind() {
+ ty::Adt(adt_def, _)
+ if adt_def.is_enum()
+ && !(is_type_diagnostic_item(cx, ty, sym::Option) || is_type_diagnostic_item(cx, ty, sym::Result)) =>
+ {
+ adt_def
+ },
+ _ => return,
+ };
+
+ // First pass - check for violation, but don't do much book-keeping because this is hopefully
+ // the uncommon case, and the book-keeping is slightly expensive.
+ let mut wildcard_span = None;
+ let mut wildcard_ident = None;
+ let mut has_non_wild = false;
+ for arm in arms {
+ match peel_hir_pat_refs(arm.pat).0.kind {
+ PatKind::Wild => wildcard_span = Some(arm.pat.span),
+ PatKind::Binding(_, _, ident, None) => {
+ wildcard_span = Some(arm.pat.span);
+ wildcard_ident = Some(ident);
+ },
+ _ => has_non_wild = true,
+ }
+ }
+ let wildcard_span = match wildcard_span {
+ Some(x) if has_non_wild => x,
+ _ => return,
+ };
+
+ // Accumulate the variants which should be put in place of the wildcard because they're not
+ // already covered.
+ let has_hidden = adt_def.variants().iter().any(|x| is_hidden(cx, x));
+ let mut missing_variants: Vec<_> = adt_def.variants().iter().filter(|x| !is_hidden(cx, x)).collect();
+
+ let mut path_prefix = CommonPrefixSearcher::None;
+ for arm in arms {
+ // Guards mean that this case probably isn't exhaustively covered. Technically
+ // this is incorrect, as we should really check whether each variant is exhaustively
+ // covered by the set of guards that cover it, but that's really hard to do.
+ recurse_or_patterns(arm.pat, |pat| {
+ let path = match &peel_hir_pat_refs(pat).0.kind {
+ PatKind::Path(path) => {
+ let id = match cx.qpath_res(path, pat.hir_id) {
+ Res::Def(
+ DefKind::Const | DefKind::ConstParam | DefKind::AnonConst | DefKind::InlineConst,
+ _,
+ ) => return,
+ Res::Def(_, id) => id,
+ _ => return,
+ };
+ if arm.guard.is_none() {
+ missing_variants.retain(|e| e.ctor_def_id != Some(id));
+ }
+ path
+ },
+ PatKind::TupleStruct(path, patterns, ..) => {
+ if let Some(id) = cx.qpath_res(path, pat.hir_id).opt_def_id() {
+ if arm.guard.is_none() && patterns.iter().all(|p| !is_refutable(cx, p)) {
+ missing_variants.retain(|e| e.ctor_def_id != Some(id));
+ }
+ }
+ path
+ },
+ PatKind::Struct(path, patterns, ..) => {
+ if let Some(id) = cx.qpath_res(path, pat.hir_id).opt_def_id() {
+ if arm.guard.is_none() && patterns.iter().all(|p| !is_refutable(cx, p.pat)) {
+ missing_variants.retain(|e| e.def_id != id);
+ }
+ }
+ path
+ },
+ _ => return,
+ };
+ match path {
+ QPath::Resolved(_, path) => path_prefix.with_path(path.segments),
+ QPath::TypeRelative(
+ Ty {
+ kind: TyKind::Path(QPath::Resolved(_, path)),
+ ..
+ },
+ _,
+ ) => path_prefix.with_prefix(path.segments),
+ _ => (),
+ }
+ });
+ }
+
+ let format_suggestion = |variant: &VariantDef| {
+ format!(
+ "{}{}{}{}",
+ if let Some(ident) = wildcard_ident {
+ format!("{} @ ", ident.name)
+ } else {
+ String::new()
+ },
+ if let CommonPrefixSearcher::Path(path_prefix) = path_prefix {
+ let mut s = String::new();
+ for seg in path_prefix {
+ s.push_str(seg.ident.as_str());
+ s.push_str("::");
+ }
+ s
+ } else {
+ let mut s = cx.tcx.def_path_str(adt_def.did());
+ s.push_str("::");
+ s
+ },
+ variant.name,
+ match variant.ctor_kind {
+ CtorKind::Fn if variant.fields.len() == 1 => "(_)",
+ CtorKind::Fn => "(..)",
+ CtorKind::Const => "",
+ CtorKind::Fictive => "{ .. }",
+ }
+ )
+ };
+
+ match missing_variants.as_slice() {
+ [] => (),
+ [x] if !adt_def.is_variant_list_non_exhaustive() && !has_hidden => span_lint_and_sugg(
+ cx,
+ MATCH_WILDCARD_FOR_SINGLE_VARIANTS,
+ wildcard_span,
+ "wildcard matches only a single variant and will also match any future added variants",
+ "try this",
+ format_suggestion(x),
+ Applicability::MaybeIncorrect,
+ ),
+ variants => {
+ let mut suggestions: Vec<_> = variants.iter().copied().map(format_suggestion).collect();
+ let message = if adt_def.is_variant_list_non_exhaustive() || has_hidden {
+ suggestions.push("_".into());
+ "wildcard matches known variants and will also match future added variants"
+ } else {
+ "wildcard match will also match any future added variants"
+ };
+
+ span_lint_and_sugg(
+ cx,
+ WILDCARD_ENUM_MATCH_ARM,
+ wildcard_span,
+ message,
+ "try this",
+ suggestions.join(" | "),
+ Applicability::MaybeIncorrect,
+ );
+ },
+ };
+}
+
+enum CommonPrefixSearcher<'a> {
+ None,
+ Path(&'a [PathSegment<'a>]),
+ Mixed,
+}
+impl<'a> CommonPrefixSearcher<'a> {
+ fn with_path(&mut self, path: &'a [PathSegment<'a>]) {
+ match path {
+ [path @ .., _] => self.with_prefix(path),
+ [] => (),
+ }
+ }
+
+ fn with_prefix(&mut self, path: &'a [PathSegment<'a>]) {
+ match self {
+ Self::None => *self = Self::Path(path),
+ Self::Path(self_path)
+ if path
+ .iter()
+ .map(|p| p.ident.name)
+ .eq(self_path.iter().map(|p| p.ident.name)) => {},
+ Self::Path(_) => *self = Self::Mixed,
+ Self::Mixed => (),
+ }
+ }
+}
+
+fn is_hidden(cx: &LateContext<'_>, variant_def: &VariantDef) -> bool {
+ cx.tcx.is_doc_hidden(variant_def.def_id) || cx.tcx.has_attr(variant_def.def_id, sym::unstable)
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_wild_err_arm.rs b/src/tools/clippy/clippy_lints/src/matches/match_wild_err_arm.rs
new file mode 100644
index 000000000..bc16f17b6
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/match_wild_err_arm.rs
@@ -0,0 +1,51 @@
+use clippy_utils::diagnostics::span_lint_and_note;
+use clippy_utils::macros::{is_panic, root_macro_call};
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::visitors::is_local_used;
+use clippy_utils::{is_wild, peel_blocks_with_stmt};
+use rustc_hir::{Arm, Expr, PatKind};
+use rustc_lint::LateContext;
+use rustc_span::symbol::{kw, sym};
+
+use super::MATCH_WILD_ERR_ARM;
+
+pub(crate) fn check<'tcx>(cx: &LateContext<'tcx>, ex: &Expr<'tcx>, arms: &[Arm<'tcx>]) {
+ let ex_ty = cx.typeck_results().expr_ty(ex).peel_refs();
+ if is_type_diagnostic_item(cx, ex_ty, sym::Result) {
+ for arm in arms {
+ if let PatKind::TupleStruct(ref path, inner, _) = arm.pat.kind {
+ let path_str = rustc_hir_pretty::to_string(rustc_hir_pretty::NO_ANN, |s| s.print_qpath(path, false));
+ if path_str == "Err" {
+ let mut matching_wild = inner.iter().any(is_wild);
+ let mut ident_bind_name = kw::Underscore;
+ if !matching_wild {
+ // Looking for unused bindings (i.e.: `_e`)
+ for pat in inner.iter() {
+ if let PatKind::Binding(_, id, ident, None) = pat.kind {
+ if ident.as_str().starts_with('_') && !is_local_used(cx, arm.body, id) {
+ ident_bind_name = ident.name;
+ matching_wild = true;
+ }
+ }
+ }
+ }
+ if_chain! {
+ if matching_wild;
+ if let Some(macro_call) = root_macro_call(peel_blocks_with_stmt(arm.body).span);
+ if is_panic(cx, macro_call.def_id);
+ then {
+ // `Err(_)` or `Err(_e)` arm with `panic!` found
+ span_lint_and_note(cx,
+ MATCH_WILD_ERR_ARM,
+ arm.pat.span,
+ &format!("`Err({})` matches all errors", ident_bind_name),
+ None,
+ "match each error separately or use the error output, or use `.except(msg)` if the error case is unreachable",
+ );
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/mod.rs b/src/tools/clippy/clippy_lints/src/matches/mod.rs
new file mode 100644
index 000000000..eba230e5a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/mod.rs
@@ -0,0 +1,1134 @@
+mod collapsible_match;
+mod infallible_destructuring_match;
+mod manual_map;
+mod manual_unwrap_or;
+mod match_as_ref;
+mod match_bool;
+mod match_like_matches;
+mod match_on_vec_items;
+mod match_ref_pats;
+mod match_same_arms;
+mod match_single_binding;
+mod match_str_case_mismatch;
+mod match_wild_enum;
+mod match_wild_err_arm;
+mod needless_match;
+mod overlapping_arms;
+mod redundant_pattern_match;
+mod rest_pat_in_fully_bound_struct;
+mod significant_drop_in_scrutinee;
+mod single_match;
+mod try_err;
+mod wild_in_or_pats;
+
+use clippy_utils::source::{snippet_opt, span_starts_with, walk_span_to_context};
+use clippy_utils::{higher, in_constant, meets_msrv, msrvs};
+use rustc_hir::{Arm, Expr, ExprKind, Local, MatchSource, Pat};
+use rustc_lexer::{tokenize, TokenKind};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::{Span, SpanData, SyntaxContext};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for matches with a single arm where an `if let`
+ /// will usually suffice.
+ ///
+ /// ### Why is this bad?
+ /// Just readability – `if let` nests less than a `match`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # fn bar(stool: &str) {}
+ /// # let x = Some("abc");
+ /// match x {
+ /// Some(ref foo) => bar(foo),
+ /// _ => (),
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # fn bar(stool: &str) {}
+ /// # let x = Some("abc");
+ /// if let Some(ref foo) = x {
+ /// bar(foo);
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub SINGLE_MATCH,
+ style,
+ "a `match` statement with a single nontrivial arm (i.e., where the other arm is `_ => {}`) instead of `if let`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for matches with two arms where an `if let else` will
+ /// usually suffice.
+ ///
+ /// ### Why is this bad?
+ /// Just readability – `if let` nests less than a `match`.
+ ///
+ /// ### Known problems
+ /// Personal style preferences may differ.
+ ///
+ /// ### Example
+ /// Using `match`:
+ ///
+ /// ```rust
+ /// # fn bar(foo: &usize) {}
+ /// # let other_ref: usize = 1;
+ /// # let x: Option<&usize> = Some(&1);
+ /// match x {
+ /// Some(ref foo) => bar(foo),
+ /// _ => bar(&other_ref),
+ /// }
+ /// ```
+ ///
+ /// Using `if let` with `else`:
+ ///
+ /// ```rust
+ /// # fn bar(foo: &usize) {}
+ /// # let other_ref: usize = 1;
+ /// # let x: Option<&usize> = Some(&1);
+ /// if let Some(ref foo) = x {
+ /// bar(foo);
+ /// } else {
+ /// bar(&other_ref);
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub SINGLE_MATCH_ELSE,
+ pedantic,
+ "a `match` statement with two arms where the second arm's pattern is a placeholder instead of a specific match pattern"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for matches where all arms match a reference,
+ /// suggesting to remove the reference and deref the matched expression
+ /// instead. It also checks for `if let &foo = bar` blocks.
+ ///
+ /// ### Why is this bad?
+ /// It just makes the code less readable. That reference
+ /// destructuring adds nothing to the code.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// match x {
+ /// &A(ref y) => foo(y),
+ /// &B => bar(),
+ /// _ => frob(&x),
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// match *x {
+ /// A(ref y) => foo(y),
+ /// B => bar(),
+ /// _ => frob(x),
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MATCH_REF_PATS,
+ style,
+ "a `match` or `if let` with all arms prefixed with `&` instead of deref-ing the match expression"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for matches where match expression is a `bool`. It
+ /// suggests to replace the expression with an `if...else` block.
+ ///
+ /// ### Why is this bad?
+ /// It makes the code less readable.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # fn foo() {}
+ /// # fn bar() {}
+ /// let condition: bool = true;
+ /// match condition {
+ /// true => foo(),
+ /// false => bar(),
+ /// }
+ /// ```
+ /// Use if/else instead:
+ /// ```rust
+ /// # fn foo() {}
+ /// # fn bar() {}
+ /// let condition: bool = true;
+ /// if condition {
+ /// foo();
+ /// } else {
+ /// bar();
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MATCH_BOOL,
+ pedantic,
+ "a `match` on a boolean expression instead of an `if..else` block"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for overlapping match arms.
+ ///
+ /// ### Why is this bad?
+ /// It is likely to be an error and if not, makes the code
+ /// less obvious.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = 5;
+ /// match x {
+ /// 1..=10 => println!("1 ... 10"),
+ /// 5..=15 => println!("5 ... 15"),
+ /// _ => (),
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MATCH_OVERLAPPING_ARM,
+ style,
+ "a `match` with overlapping arms"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for arm which matches all errors with `Err(_)`
+ /// and take drastic actions like `panic!`.
+ ///
+ /// ### Why is this bad?
+ /// It is generally a bad practice, similar to
+ /// catching all exceptions in java with `catch(Exception)`
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x: Result<i32, &str> = Ok(3);
+ /// match x {
+ /// Ok(_) => println!("ok"),
+ /// Err(_) => panic!("err"),
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MATCH_WILD_ERR_ARM,
+ pedantic,
+ "a `match` with `Err(_)` arm and take drastic actions"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for match which is used to add a reference to an
+ /// `Option` value.
+ ///
+ /// ### Why is this bad?
+ /// Using `as_ref()` or `as_mut()` instead is shorter.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x: Option<()> = None;
+ ///
+ /// let r: Option<&()> = match x {
+ /// None => None,
+ /// Some(ref v) => Some(v),
+ /// };
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let x: Option<()> = None;
+ ///
+ /// let r: Option<&()> = x.as_ref();
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MATCH_AS_REF,
+ complexity,
+ "a `match` on an Option value instead of using `as_ref()` or `as_mut`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for wildcard enum matches using `_`.
+ ///
+ /// ### Why is this bad?
+ /// New enum variants added by library updates can be missed.
+ ///
+ /// ### Known problems
+ /// Suggested replacements may be incorrect if guards exhaustively cover some
+ /// variants, and also may not use correct path to enum if it's not present in the current scope.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # enum Foo { A(usize), B(usize) }
+ /// # let x = Foo::B(1);
+ /// match x {
+ /// Foo::A(_) => {},
+ /// _ => {},
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # enum Foo { A(usize), B(usize) }
+ /// # let x = Foo::B(1);
+ /// match x {
+ /// Foo::A(_) => {},
+ /// Foo::B(_) => {},
+ /// }
+ /// ```
+ #[clippy::version = "1.34.0"]
+ pub WILDCARD_ENUM_MATCH_ARM,
+ restriction,
+ "a wildcard enum match arm using `_`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for wildcard enum matches for a single variant.
+ ///
+ /// ### Why is this bad?
+ /// New enum variants added by library updates can be missed.
+ ///
+ /// ### Known problems
+ /// Suggested replacements may not use correct path to enum
+ /// if it's not present in the current scope.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # enum Foo { A, B, C }
+ /// # let x = Foo::B;
+ /// match x {
+ /// Foo::A => {},
+ /// Foo::B => {},
+ /// _ => {},
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # enum Foo { A, B, C }
+ /// # let x = Foo::B;
+ /// match x {
+ /// Foo::A => {},
+ /// Foo::B => {},
+ /// Foo::C => {},
+ /// }
+ /// ```
+ #[clippy::version = "1.45.0"]
+ pub MATCH_WILDCARD_FOR_SINGLE_VARIANTS,
+ pedantic,
+ "a wildcard enum match for a single variant"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for wildcard pattern used with others patterns in same match arm.
+ ///
+ /// ### Why is this bad?
+ /// Wildcard pattern already covers any other pattern as it will match anyway.
+ /// It makes the code less readable, especially to spot wildcard pattern use in match arm.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let s = "foo";
+ /// match s {
+ /// "a" => {},
+ /// "bar" | _ => {},
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let s = "foo";
+ /// match s {
+ /// "a" => {},
+ /// _ => {},
+ /// }
+ /// ```
+ #[clippy::version = "1.42.0"]
+ pub WILDCARD_IN_OR_PATTERNS,
+ complexity,
+ "a wildcard pattern used with others patterns in same match arm"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for matches being used to destructure a single-variant enum
+ /// or tuple struct where a `let` will suffice.
+ ///
+ /// ### Why is this bad?
+ /// Just readability – `let` doesn't nest, whereas a `match` does.
+ ///
+ /// ### Example
+ /// ```rust
+ /// enum Wrapper {
+ /// Data(i32),
+ /// }
+ ///
+ /// let wrapper = Wrapper::Data(42);
+ ///
+ /// let data = match wrapper {
+ /// Wrapper::Data(i) => i,
+ /// };
+ /// ```
+ ///
+ /// The correct use would be:
+ /// ```rust
+ /// enum Wrapper {
+ /// Data(i32),
+ /// }
+ ///
+ /// let wrapper = Wrapper::Data(42);
+ /// let Wrapper::Data(data) = wrapper;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub INFALLIBLE_DESTRUCTURING_MATCH,
+ style,
+ "a `match` statement with a single infallible arm instead of a `let`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for useless match that binds to only one value.
+ ///
+ /// ### Why is this bad?
+ /// Readability and needless complexity.
+ ///
+ /// ### Known problems
+ /// Suggested replacements may be incorrect when `match`
+ /// is actually binding temporary value, bringing a 'dropped while borrowed' error.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let a = 1;
+ /// # let b = 2;
+ /// match (a, b) {
+ /// (c, d) => {
+ /// // useless match
+ /// }
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let a = 1;
+ /// # let b = 2;
+ /// let (c, d) = (a, b);
+ /// ```
+ #[clippy::version = "1.43.0"]
+ pub MATCH_SINGLE_BINDING,
+ complexity,
+ "a match with a single binding instead of using `let` statement"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for unnecessary '..' pattern binding on struct when all fields are explicitly matched.
+ ///
+ /// ### Why is this bad?
+ /// Correctness and readability. It's like having a wildcard pattern after
+ /// matching all enum variants explicitly.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # struct A { a: i32 }
+ /// let a = A { a: 5 };
+ ///
+ /// match a {
+ /// A { a: 5, .. } => {},
+ /// _ => {},
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # struct A { a: i32 }
+ /// # let a = A { a: 5 };
+ /// match a {
+ /// A { a: 5 } => {},
+ /// _ => {},
+ /// }
+ /// ```
+ #[clippy::version = "1.43.0"]
+ pub REST_PAT_IN_FULLY_BOUND_STRUCTS,
+ restriction,
+ "a match on a struct that binds all fields but still uses the wildcard pattern"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Lint for redundant pattern matching over `Result`, `Option`,
+ /// `std::task::Poll` or `std::net::IpAddr`
+ ///
+ /// ### Why is this bad?
+ /// It's more concise and clear to just use the proper
+ /// utility function
+ ///
+ /// ### Known problems
+ /// This will change the drop order for the matched type. Both `if let` and
+ /// `while let` will drop the value at the end of the block, both `if` and `while` will drop the
+ /// value before entering the block. For most types this change will not matter, but for a few
+ /// types this will not be an acceptable change (e.g. locks). See the
+ /// [reference](https://doc.rust-lang.org/reference/destructors.html#drop-scopes) for more about
+ /// drop order.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::task::Poll;
+ /// # use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
+ /// if let Ok(_) = Ok::<i32, i32>(42) {}
+ /// if let Err(_) = Err::<i32, i32>(42) {}
+ /// if let None = None::<()> {}
+ /// if let Some(_) = Some(42) {}
+ /// if let Poll::Pending = Poll::Pending::<()> {}
+ /// if let Poll::Ready(_) = Poll::Ready(42) {}
+ /// if let IpAddr::V4(_) = IpAddr::V4(Ipv4Addr::LOCALHOST) {}
+ /// if let IpAddr::V6(_) = IpAddr::V6(Ipv6Addr::LOCALHOST) {}
+ /// match Ok::<i32, i32>(42) {
+ /// Ok(_) => true,
+ /// Err(_) => false,
+ /// };
+ /// ```
+ ///
+ /// The more idiomatic use would be:
+ ///
+ /// ```rust
+ /// # use std::task::Poll;
+ /// # use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
+ /// if Ok::<i32, i32>(42).is_ok() {}
+ /// if Err::<i32, i32>(42).is_err() {}
+ /// if None::<()>.is_none() {}
+ /// if Some(42).is_some() {}
+ /// if Poll::Pending::<()>.is_pending() {}
+ /// if Poll::Ready(42).is_ready() {}
+ /// if IpAddr::V4(Ipv4Addr::LOCALHOST).is_ipv4() {}
+ /// if IpAddr::V6(Ipv6Addr::LOCALHOST).is_ipv6() {}
+ /// Ok::<i32, i32>(42).is_ok();
+ /// ```
+ #[clippy::version = "1.31.0"]
+ pub REDUNDANT_PATTERN_MATCHING,
+ style,
+ "use the proper utility function avoiding an `if let`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `match` or `if let` expressions producing a
+ /// `bool` that could be written using `matches!`
+ ///
+ /// ### Why is this bad?
+ /// Readability and needless complexity.
+ ///
+ /// ### Known problems
+ /// This lint falsely triggers, if there are arms with
+ /// `cfg` attributes that remove an arm evaluating to `false`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = Some(5);
+ ///
+ /// let a = match x {
+ /// Some(0) => true,
+ /// _ => false,
+ /// };
+ ///
+ /// let a = if let Some(0) = x {
+ /// true
+ /// } else {
+ /// false
+ /// };
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let x = Some(5);
+ /// let a = matches!(x, Some(0));
+ /// ```
+ #[clippy::version = "1.47.0"]
+ pub MATCH_LIKE_MATCHES_MACRO,
+ style,
+ "a match that could be written with the matches! macro"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `match` with identical arm bodies.
+ ///
+ /// ### Why is this bad?
+ /// This is probably a copy & paste error. If arm bodies
+ /// are the same on purpose, you can factor them
+ /// [using `|`](https://doc.rust-lang.org/book/patterns.html#multiple-patterns).
+ ///
+ /// ### Known problems
+ /// False positive possible with order dependent `match`
+ /// (see issue
+ /// [#860](https://github.com/rust-lang/rust-clippy/issues/860)).
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// match foo {
+ /// Bar => bar(),
+ /// Quz => quz(),
+ /// Baz => bar(), // <= oops
+ /// }
+ /// ```
+ ///
+ /// This should probably be
+ /// ```rust,ignore
+ /// match foo {
+ /// Bar => bar(),
+ /// Quz => quz(),
+ /// Baz => baz(), // <= fixed
+ /// }
+ /// ```
+ ///
+ /// or if the original code was not a typo:
+ /// ```rust,ignore
+ /// match foo {
+ /// Bar | Baz => bar(), // <= shows the intent better
+ /// Quz => quz(),
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MATCH_SAME_ARMS,
+ pedantic,
+ "`match` with identical arm bodies"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for unnecessary `match` or match-like `if let` returns for `Option` and `Result`
+ /// when function signatures are the same.
+ ///
+ /// ### Why is this bad?
+ /// This `match` block does nothing and might not be what the coder intended.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// fn foo() -> Result<(), i32> {
+ /// match result {
+ /// Ok(val) => Ok(val),
+ /// Err(err) => Err(err),
+ /// }
+ /// }
+ ///
+ /// fn bar() -> Option<i32> {
+ /// if let Some(val) = option {
+ /// Some(val)
+ /// } else {
+ /// None
+ /// }
+ /// }
+ /// ```
+ ///
+ /// Could be replaced as
+ ///
+ /// ```rust,ignore
+ /// fn foo() -> Result<(), i32> {
+ /// result
+ /// }
+ ///
+ /// fn bar() -> Option<i32> {
+ /// option
+ /// }
+ /// ```
+ #[clippy::version = "1.61.0"]
+ pub NEEDLESS_MATCH,
+ complexity,
+ "`match` or match-like `if let` that are unnecessary"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Finds nested `match` or `if let` expressions where the patterns may be "collapsed" together
+ /// without adding any branches.
+ ///
+ /// Note that this lint is not intended to find _all_ cases where nested match patterns can be merged, but only
+ /// cases where merging would most likely make the code more readable.
+ ///
+ /// ### Why is this bad?
+ /// It is unnecessarily verbose and complex.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn func(opt: Option<Result<u64, String>>) {
+ /// let n = match opt {
+ /// Some(n) => match n {
+ /// Ok(n) => n,
+ /// _ => return,
+ /// }
+ /// None => return,
+ /// };
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// fn func(opt: Option<Result<u64, String>>) {
+ /// let n = match opt {
+ /// Some(Ok(n)) => n,
+ /// _ => return,
+ /// };
+ /// }
+ /// ```
+ #[clippy::version = "1.50.0"]
+ pub COLLAPSIBLE_MATCH,
+ style,
+ "Nested `match` or `if let` expressions where the patterns may be \"collapsed\" together."
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Finds patterns that reimplement `Option::unwrap_or` or `Result::unwrap_or`.
+ ///
+ /// ### Why is this bad?
+ /// Concise code helps focusing on behavior instead of boilerplate.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let foo: Option<i32> = None;
+ /// match foo {
+ /// Some(v) => v,
+ /// None => 1,
+ /// };
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let foo: Option<i32> = None;
+ /// foo.unwrap_or(1);
+ /// ```
+ #[clippy::version = "1.49.0"]
+ pub MANUAL_UNWRAP_OR,
+ complexity,
+ "finds patterns that can be encoded more concisely with `Option::unwrap_or` or `Result::unwrap_or`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `match vec[idx]` or `match vec[n..m]`.
+ ///
+ /// ### Why is this bad?
+ /// This can panic at runtime.
+ ///
+ /// ### Example
+ /// ```rust, no_run
+ /// let arr = vec![0, 1, 2, 3];
+ /// let idx = 1;
+ ///
+ /// match arr[idx] {
+ /// 0 => println!("{}", 0),
+ /// 1 => println!("{}", 3),
+ /// _ => {},
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust, no_run
+ /// let arr = vec![0, 1, 2, 3];
+ /// let idx = 1;
+ ///
+ /// match arr.get(idx) {
+ /// Some(0) => println!("{}", 0),
+ /// Some(1) => println!("{}", 3),
+ /// _ => {},
+ /// }
+ /// ```
+ #[clippy::version = "1.45.0"]
+ pub MATCH_ON_VEC_ITEMS,
+ pedantic,
+ "matching on vector elements can panic"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `match` expressions modifying the case of a string with non-compliant arms
+ ///
+ /// ### Why is this bad?
+ /// The arm is unreachable, which is likely a mistake
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let text = "Foo";
+ /// match &*text.to_ascii_lowercase() {
+ /// "foo" => {},
+ /// "Bar" => {},
+ /// _ => {},
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # let text = "Foo";
+ /// match &*text.to_ascii_lowercase() {
+ /// "foo" => {},
+ /// "bar" => {},
+ /// _ => {},
+ /// }
+ /// ```
+ #[clippy::version = "1.58.0"]
+ pub MATCH_STR_CASE_MISMATCH,
+ correctness,
+ "creation of a case altering match expression with non-compliant arms"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Check for temporaries returned from function calls in a match scrutinee that have the
+ /// `clippy::has_significant_drop` attribute.
+ ///
+ /// ### Why is this bad?
+ /// The `clippy::has_significant_drop` attribute can be added to types whose Drop impls have
+ /// an important side-effect, such as unlocking a mutex, making it important for users to be
+ /// able to accurately understand their lifetimes. When a temporary is returned in a function
+ /// call in a match scrutinee, its lifetime lasts until the end of the match block, which may
+ /// be surprising.
+ ///
+ /// For `Mutex`es this can lead to a deadlock. This happens when the match scrutinee uses a
+ /// function call that returns a `MutexGuard` and then tries to lock again in one of the match
+ /// arms. In that case the `MutexGuard` in the scrutinee will not be dropped until the end of
+ /// the match block and thus will not unlock.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// # use std::sync::Mutex;
+ /// # struct State {}
+ /// # impl State {
+ /// # fn foo(&self) -> bool {
+ /// # true
+ /// # }
+ /// # fn bar(&self) {}
+ /// # }
+ /// let mutex = Mutex::new(State {});
+ ///
+ /// match mutex.lock().unwrap().foo() {
+ /// true => {
+ /// mutex.lock().unwrap().bar(); // Deadlock!
+ /// }
+ /// false => {}
+ /// };
+ ///
+ /// println!("All done!");
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # use std::sync::Mutex;
+ /// # struct State {}
+ /// # impl State {
+ /// # fn foo(&self) -> bool {
+ /// # true
+ /// # }
+ /// # fn bar(&self) {}
+ /// # }
+ /// let mutex = Mutex::new(State {});
+ ///
+ /// let is_foo = mutex.lock().unwrap().foo();
+ /// match is_foo {
+ /// true => {
+ /// mutex.lock().unwrap().bar();
+ /// }
+ /// false => {}
+ /// };
+ ///
+ /// println!("All done!");
+ /// ```
+ #[clippy::version = "1.60.0"]
+ pub SIGNIFICANT_DROP_IN_SCRUTINEE,
+ nursery,
+ "warns when a temporary of a type with a drop with a significant side-effect might have a surprising lifetime"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usages of `Err(x)?`.
+ ///
+ /// ### Why is this bad?
+ /// The `?` operator is designed to allow calls that
+ /// can fail to be easily chained. For example, `foo()?.bar()` or
+ /// `foo(bar()?)`. Because `Err(x)?` can't be used that way (it will
+ /// always return), it is more clear to write `return Err(x)`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn foo(fail: bool) -> Result<i32, String> {
+ /// if fail {
+ /// Err("failed")?;
+ /// }
+ /// Ok(0)
+ /// }
+ /// ```
+ /// Could be written:
+ ///
+ /// ```rust
+ /// fn foo(fail: bool) -> Result<i32, String> {
+ /// if fail {
+ /// return Err("failed".into());
+ /// }
+ /// Ok(0)
+ /// }
+ /// ```
+ #[clippy::version = "1.38.0"]
+ pub TRY_ERR,
+ restriction,
+ "return errors explicitly rather than hiding them behind a `?`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usages of `match` which could be implemented using `map`
+ ///
+ /// ### Why is this bad?
+ /// Using the `map` method is clearer and more concise.
+ ///
+ /// ### Example
+ /// ```rust
+ /// match Some(0) {
+ /// Some(x) => Some(x + 1),
+ /// None => None,
+ /// };
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// Some(0).map(|x| x + 1);
+ /// ```
+ #[clippy::version = "1.52.0"]
+ pub MANUAL_MAP,
+ style,
+ "reimplementation of `map`"
+}
+
+#[derive(Default)]
+pub struct Matches {
+ msrv: Option<RustcVersion>,
+ infallible_destructuring_match_linted: bool,
+}
+
+impl Matches {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self {
+ msrv,
+ ..Matches::default()
+ }
+ }
+}
+
+impl_lint_pass!(Matches => [
+ SINGLE_MATCH,
+ MATCH_REF_PATS,
+ MATCH_BOOL,
+ SINGLE_MATCH_ELSE,
+ MATCH_OVERLAPPING_ARM,
+ MATCH_WILD_ERR_ARM,
+ MATCH_AS_REF,
+ WILDCARD_ENUM_MATCH_ARM,
+ MATCH_WILDCARD_FOR_SINGLE_VARIANTS,
+ WILDCARD_IN_OR_PATTERNS,
+ MATCH_SINGLE_BINDING,
+ INFALLIBLE_DESTRUCTURING_MATCH,
+ REST_PAT_IN_FULLY_BOUND_STRUCTS,
+ REDUNDANT_PATTERN_MATCHING,
+ MATCH_LIKE_MATCHES_MACRO,
+ MATCH_SAME_ARMS,
+ NEEDLESS_MATCH,
+ COLLAPSIBLE_MATCH,
+ MANUAL_UNWRAP_OR,
+ MATCH_ON_VEC_ITEMS,
+ MATCH_STR_CASE_MISMATCH,
+ SIGNIFICANT_DROP_IN_SCRUTINEE,
+ TRY_ERR,
+ MANUAL_MAP,
+]);
+
+impl<'tcx> LateLintPass<'tcx> for Matches {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if in_external_macro(cx.sess(), expr.span) {
+ return;
+ }
+ let from_expansion = expr.span.from_expansion();
+
+ if let ExprKind::Match(ex, arms, source) = expr.kind {
+ if source == MatchSource::Normal && !span_starts_with(cx, expr.span, "match") {
+ return;
+ }
+ if matches!(source, MatchSource::Normal | MatchSource::ForLoopDesugar) {
+ significant_drop_in_scrutinee::check(cx, expr, ex, arms, source);
+ }
+
+ collapsible_match::check_match(cx, arms);
+ if !from_expansion {
+ // These don't depend on a relationship between multiple arms
+ match_wild_err_arm::check(cx, ex, arms);
+ wild_in_or_pats::check(cx, arms);
+ }
+
+ if source == MatchSource::TryDesugar {
+ try_err::check(cx, expr, ex);
+ }
+
+ if !from_expansion && !contains_cfg_arm(cx, expr, ex, arms) {
+ if source == MatchSource::Normal {
+ if !(meets_msrv(self.msrv, msrvs::MATCHES_MACRO)
+ && match_like_matches::check_match(cx, expr, ex, arms))
+ {
+ match_same_arms::check(cx, arms);
+ }
+
+ redundant_pattern_match::check_match(cx, expr, ex, arms);
+ single_match::check(cx, ex, arms, expr);
+ match_bool::check(cx, ex, arms, expr);
+ overlapping_arms::check(cx, ex, arms);
+ match_wild_enum::check(cx, ex, arms);
+ match_as_ref::check(cx, ex, arms, expr);
+ needless_match::check_match(cx, ex, arms, expr);
+ match_on_vec_items::check(cx, ex);
+ match_str_case_mismatch::check(cx, ex, arms);
+
+ if !in_constant(cx, expr.hir_id) {
+ manual_unwrap_or::check(cx, expr, ex, arms);
+ manual_map::check_match(cx, expr, ex, arms);
+ }
+
+ if self.infallible_destructuring_match_linted {
+ self.infallible_destructuring_match_linted = false;
+ } else {
+ match_single_binding::check(cx, ex, arms, expr);
+ }
+ }
+ match_ref_pats::check(cx, ex, arms.iter().map(|el| el.pat), expr);
+ }
+ } else if let Some(if_let) = higher::IfLet::hir(cx, expr) {
+ collapsible_match::check_if_let(cx, if_let.let_pat, if_let.if_then, if_let.if_else);
+ if !from_expansion {
+ if let Some(else_expr) = if_let.if_else {
+ if meets_msrv(self.msrv, msrvs::MATCHES_MACRO) {
+ match_like_matches::check_if_let(
+ cx,
+ expr,
+ if_let.let_pat,
+ if_let.let_expr,
+ if_let.if_then,
+ else_expr,
+ );
+ }
+ if !in_constant(cx, expr.hir_id) {
+ manual_map::check_if_let(cx, expr, if_let.let_pat, if_let.let_expr, if_let.if_then, else_expr);
+ }
+ }
+ redundant_pattern_match::check_if_let(
+ cx,
+ expr,
+ if_let.let_pat,
+ if_let.let_expr,
+ if_let.if_else.is_some(),
+ );
+ needless_match::check_if_let(cx, expr, &if_let);
+ }
+ } else if !from_expansion {
+ redundant_pattern_match::check(cx, expr);
+ }
+ }
+
+ fn check_local(&mut self, cx: &LateContext<'tcx>, local: &'tcx Local<'_>) {
+ self.infallible_destructuring_match_linted |=
+ local.els.is_none() && infallible_destructuring_match::check(cx, local);
+ }
+
+ fn check_pat(&mut self, cx: &LateContext<'tcx>, pat: &'tcx Pat<'_>) {
+ rest_pat_in_fully_bound_struct::check(cx, pat);
+ }
+
+ extract_msrv_attr!(LateContext);
+}
+
+/// Checks if there are any arms with a `#[cfg(..)]` attribute.
+fn contains_cfg_arm(cx: &LateContext<'_>, e: &Expr<'_>, scrutinee: &Expr<'_>, arms: &[Arm<'_>]) -> bool {
+ let Some(scrutinee_span) = walk_span_to_context(scrutinee.span, SyntaxContext::root()) else {
+ // Shouldn't happen, but treat this as though a `cfg` attribute were found
+ return true;
+ };
+
+ let start = scrutinee_span.hi();
+ let mut arm_spans = arms.iter().map(|arm| {
+ let data = arm.span.data();
+ (data.ctxt == SyntaxContext::root()).then_some((data.lo, data.hi))
+ });
+ let end = e.span.hi();
+
+ // Walk through all the non-code space before each match arm. The space trailing the final arm is
+ // handled after the `try_fold` e.g.
+ //
+ // match foo {
+ // _________^- everything between the scrutinee and arm1
+ //| arm1 => (),
+ //|---^___________^ everything before arm2
+ //| #[cfg(feature = "enabled")]
+ //| arm2 => some_code(),
+ //|---^____________________^ everything before arm3
+ //| // some comment about arm3
+ //| arm3 => some_code(),
+ //|---^____________________^ everything after arm3
+ //| #[cfg(feature = "disabled")]
+ //| arm4 = some_code(),
+ //|};
+ //|^
+ let found = arm_spans.try_fold(start, |start, range| {
+ let Some((end, next_start)) = range else {
+ // Shouldn't happen as macros can't expand to match arms, but treat this as though a `cfg` attribute were
+ // found.
+ return Err(());
+ };
+ let span = SpanData {
+ lo: start,
+ hi: end,
+ ctxt: SyntaxContext::root(),
+ parent: None,
+ }
+ .span();
+ (!span_contains_cfg(cx, span)).then_some(next_start).ok_or(())
+ });
+ match found {
+ Ok(start) => {
+ let span = SpanData {
+ lo: start,
+ hi: end,
+ ctxt: SyntaxContext::root(),
+ parent: None,
+ }
+ .span();
+ span_contains_cfg(cx, span)
+ },
+ Err(()) => true,
+ }
+}
+
+/// Checks if the given span contains a `#[cfg(..)]` attribute
+fn span_contains_cfg(cx: &LateContext<'_>, s: Span) -> bool {
+ let Some(snip) = snippet_opt(cx, s) else {
+ // Assume true. This would require either an invalid span, or one which crosses file boundaries.
+ return true;
+ };
+ let mut pos = 0usize;
+ let mut iter = tokenize(&snip).map(|t| {
+ let start = pos;
+ pos += t.len as usize;
+ (t.kind, start..pos)
+ });
+
+ // Search for the token sequence [`#`, `[`, `cfg`]
+ while iter.any(|(t, _)| matches!(t, TokenKind::Pound)) {
+ let mut iter = iter.by_ref().skip_while(|(t, _)| {
+ matches!(
+ t,
+ TokenKind::Whitespace | TokenKind::LineComment { .. } | TokenKind::BlockComment { .. }
+ )
+ });
+ if matches!(iter.next(), Some((TokenKind::OpenBracket, _)))
+ && matches!(iter.next(), Some((TokenKind::Ident, range)) if &snip[range.clone()] == "cfg")
+ {
+ return true;
+ }
+ }
+ false
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/needless_match.rs b/src/tools/clippy/clippy_lints/src/matches/needless_match.rs
new file mode 100644
index 000000000..fa19cddd3
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/needless_match.rs
@@ -0,0 +1,207 @@
+use super::NEEDLESS_MATCH;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::{is_type_diagnostic_item, same_type_and_consts};
+use clippy_utils::{
+ eq_expr_value, get_parent_expr_for_hir, get_parent_node, higher, is_else_clause, is_lang_ctor, over,
+ peel_blocks_with_stmt,
+};
+use rustc_errors::Applicability;
+use rustc_hir::LangItem::OptionNone;
+use rustc_hir::{Arm, BindingAnnotation, Expr, ExprKind, FnRetTy, Node, Pat, PatKind, Path, QPath};
+use rustc_lint::LateContext;
+use rustc_span::sym;
+use rustc_typeck::hir_ty_to_ty;
+
+pub(crate) fn check_match(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>], expr: &Expr<'_>) {
+ if arms.len() > 1 && expr_ty_matches_p_ty(cx, ex, expr) && check_all_arms(cx, ex, arms) {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ NEEDLESS_MATCH,
+ expr.span,
+ "this match expression is unnecessary",
+ "replace it with",
+ snippet_with_applicability(cx, ex.span, "..", &mut applicability).to_string(),
+ applicability,
+ );
+ }
+}
+
+/// Check for nop `if let` expression that assembled as unnecessary match
+///
+/// ```rust,ignore
+/// if let Some(a) = option {
+/// Some(a)
+/// } else {
+/// None
+/// }
+/// ```
+/// OR
+/// ```rust,ignore
+/// if let SomeEnum::A = some_enum {
+/// SomeEnum::A
+/// } else if let SomeEnum::B = some_enum {
+/// SomeEnum::B
+/// } else {
+/// some_enum
+/// }
+/// ```
+pub(crate) fn check_if_let<'tcx>(cx: &LateContext<'tcx>, ex: &Expr<'_>, if_let: &higher::IfLet<'tcx>) {
+ if !is_else_clause(cx.tcx, ex) && expr_ty_matches_p_ty(cx, if_let.let_expr, ex) && check_if_let_inner(cx, if_let) {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ NEEDLESS_MATCH,
+ ex.span,
+ "this if-let expression is unnecessary",
+ "replace it with",
+ snippet_with_applicability(cx, if_let.let_expr.span, "..", &mut applicability).to_string(),
+ applicability,
+ );
+ }
+}
+
+fn check_all_arms(cx: &LateContext<'_>, match_expr: &Expr<'_>, arms: &[Arm<'_>]) -> bool {
+ for arm in arms {
+ let arm_expr = peel_blocks_with_stmt(arm.body);
+ if let PatKind::Wild = arm.pat.kind {
+ return eq_expr_value(cx, match_expr, strip_return(arm_expr));
+ } else if !pat_same_as_expr(arm.pat, arm_expr) {
+ return false;
+ }
+ }
+
+ true
+}
+
+fn check_if_let_inner(cx: &LateContext<'_>, if_let: &higher::IfLet<'_>) -> bool {
+ if let Some(if_else) = if_let.if_else {
+ if !pat_same_as_expr(if_let.let_pat, peel_blocks_with_stmt(if_let.if_then)) {
+ return false;
+ }
+
+ // Recursively check for each `else if let` phrase,
+ if let Some(ref nested_if_let) = higher::IfLet::hir(cx, if_else) {
+ return check_if_let_inner(cx, nested_if_let);
+ }
+
+ if matches!(if_else.kind, ExprKind::Block(..)) {
+ let else_expr = peel_blocks_with_stmt(if_else);
+ if matches!(else_expr.kind, ExprKind::Block(..)) {
+ return false;
+ }
+ let ret = strip_return(else_expr);
+ let let_expr_ty = cx.typeck_results().expr_ty(if_let.let_expr);
+ if is_type_diagnostic_item(cx, let_expr_ty, sym::Option) {
+ if let ExprKind::Path(ref qpath) = ret.kind {
+ return is_lang_ctor(cx, qpath, OptionNone) || eq_expr_value(cx, if_let.let_expr, ret);
+ }
+ return false;
+ }
+ return eq_expr_value(cx, if_let.let_expr, ret);
+ }
+ }
+
+ false
+}
+
+/// Strip `return` keyword if the expression type is `ExprKind::Ret`.
+fn strip_return<'hir>(expr: &'hir Expr<'hir>) -> &'hir Expr<'hir> {
+ if let ExprKind::Ret(Some(ret)) = expr.kind {
+ ret
+ } else {
+ expr
+ }
+}
+
+/// Manually check for coercion casting by checking if the type of the match operand or let expr
+/// differs with the assigned local variable or the function return type.
+fn expr_ty_matches_p_ty(cx: &LateContext<'_>, expr: &Expr<'_>, p_expr: &Expr<'_>) -> bool {
+ if let Some(p_node) = get_parent_node(cx.tcx, p_expr.hir_id) {
+ match p_node {
+ // Compare match_expr ty with local in `let local = match match_expr {..}`
+ Node::Local(local) => {
+ let results = cx.typeck_results();
+ return same_type_and_consts(results.node_type(local.hir_id), results.expr_ty(expr));
+ },
+ // compare match_expr ty with RetTy in `fn foo() -> RetTy`
+ Node::Item(..) => {
+ if let Some(fn_decl) = p_node.fn_decl() {
+ if let FnRetTy::Return(ret_ty) = fn_decl.output {
+ return same_type_and_consts(hir_ty_to_ty(cx.tcx, ret_ty), cx.typeck_results().expr_ty(expr));
+ }
+ }
+ },
+ // check the parent expr for this whole block `{ match match_expr {..} }`
+ Node::Block(block) => {
+ if let Some(block_parent_expr) = get_parent_expr_for_hir(cx, block.hir_id) {
+ return expr_ty_matches_p_ty(cx, expr, block_parent_expr);
+ }
+ },
+ // recursively call on `if xxx {..}` etc.
+ Node::Expr(p_expr) => {
+ return expr_ty_matches_p_ty(cx, expr, p_expr);
+ },
+ _ => {},
+ }
+ }
+ false
+}
+
+fn pat_same_as_expr(pat: &Pat<'_>, expr: &Expr<'_>) -> bool {
+ let expr = strip_return(expr);
+ match (&pat.kind, &expr.kind) {
+ // Example: `Some(val) => Some(val)`
+ (PatKind::TupleStruct(QPath::Resolved(_, path), tuple_params, _), ExprKind::Call(call_expr, call_params)) => {
+ if let ExprKind::Path(QPath::Resolved(_, call_path)) = call_expr.kind {
+ return over(path.segments, call_path.segments, |pat_seg, call_seg| {
+ pat_seg.ident.name == call_seg.ident.name
+ }) && same_non_ref_symbols(tuple_params, call_params);
+ }
+ },
+ // Example: `val => val`
+ (
+ PatKind::Binding(annot, _, pat_ident, _),
+ ExprKind::Path(QPath::Resolved(
+ _,
+ Path {
+ segments: [first_seg, ..],
+ ..
+ },
+ )),
+ ) => {
+ return !matches!(annot, BindingAnnotation::Ref | BindingAnnotation::RefMut)
+ && pat_ident.name == first_seg.ident.name;
+ },
+ // Example: `Custom::TypeA => Custom::TypeB`, or `None => None`
+ (PatKind::Path(QPath::Resolved(_, p_path)), ExprKind::Path(QPath::Resolved(_, e_path))) => {
+ return over(p_path.segments, e_path.segments, |p_seg, e_seg| {
+ p_seg.ident.name == e_seg.ident.name
+ });
+ },
+ // Example: `5 => 5`
+ (PatKind::Lit(pat_lit_expr), ExprKind::Lit(expr_spanned)) => {
+ if let ExprKind::Lit(pat_spanned) = &pat_lit_expr.kind {
+ return pat_spanned.node == expr_spanned.node;
+ }
+ },
+ _ => {},
+ }
+
+ false
+}
+
+fn same_non_ref_symbols(pats: &[Pat<'_>], exprs: &[Expr<'_>]) -> bool {
+ if pats.len() != exprs.len() {
+ return false;
+ }
+
+ for i in 0..pats.len() {
+ if !pat_same_as_expr(&pats[i], &exprs[i]) {
+ return false;
+ }
+ }
+
+ true
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/overlapping_arms.rs b/src/tools/clippy/clippy_lints/src/matches/overlapping_arms.rs
new file mode 100644
index 000000000..ae69ca8a3
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/overlapping_arms.rs
@@ -0,0 +1,194 @@
+use clippy_utils::consts::{constant, constant_full_int, miri_to_const, FullInt};
+use clippy_utils::diagnostics::span_lint_and_note;
+use core::cmp::Ordering;
+use rustc_hir::{Arm, Expr, PatKind, RangeEnd};
+use rustc_lint::LateContext;
+use rustc_middle::mir;
+use rustc_middle::ty::Ty;
+use rustc_span::Span;
+
+use super::MATCH_OVERLAPPING_ARM;
+
+pub(crate) fn check<'tcx>(cx: &LateContext<'tcx>, ex: &'tcx Expr<'_>, arms: &'tcx [Arm<'_>]) {
+ if arms.len() >= 2 && cx.typeck_results().expr_ty(ex).is_integral() {
+ let ranges = all_ranges(cx, arms, cx.typeck_results().expr_ty(ex));
+ if !ranges.is_empty() {
+ if let Some((start, end)) = overlapping(&ranges) {
+ span_lint_and_note(
+ cx,
+ MATCH_OVERLAPPING_ARM,
+ start.span,
+ "some ranges overlap",
+ Some(end.span),
+ "overlaps with this",
+ );
+ }
+ }
+ }
+}
+
+/// Gets the ranges for each range pattern arm. Applies `ty` bounds for open ranges.
+fn all_ranges<'tcx>(cx: &LateContext<'tcx>, arms: &'tcx [Arm<'_>], ty: Ty<'tcx>) -> Vec<SpannedRange<FullInt>> {
+ arms.iter()
+ .filter_map(|arm| {
+ if let Arm { pat, guard: None, .. } = *arm {
+ if let PatKind::Range(ref lhs, ref rhs, range_end) = pat.kind {
+ let lhs_const = match lhs {
+ Some(lhs) => constant(cx, cx.typeck_results(), lhs)?.0,
+ None => {
+ let min_val_const = ty.numeric_min_val(cx.tcx)?;
+ let min_constant = mir::ConstantKind::from_value(
+ cx.tcx.valtree_to_const_val((ty, min_val_const.to_valtree())),
+ ty,
+ );
+ miri_to_const(cx.tcx, min_constant)?
+ },
+ };
+ let rhs_const = match rhs {
+ Some(rhs) => constant(cx, cx.typeck_results(), rhs)?.0,
+ None => {
+ let max_val_const = ty.numeric_max_val(cx.tcx)?;
+ let max_constant = mir::ConstantKind::from_value(
+ cx.tcx.valtree_to_const_val((ty, max_val_const.to_valtree())),
+ ty,
+ );
+ miri_to_const(cx.tcx, max_constant)?
+ },
+ };
+ let lhs_val = lhs_const.int_value(cx, ty)?;
+ let rhs_val = rhs_const.int_value(cx, ty)?;
+ let rhs_bound = match range_end {
+ RangeEnd::Included => EndBound::Included(rhs_val),
+ RangeEnd::Excluded => EndBound::Excluded(rhs_val),
+ };
+ return Some(SpannedRange {
+ span: pat.span,
+ node: (lhs_val, rhs_bound),
+ });
+ }
+
+ if let PatKind::Lit(value) = pat.kind {
+ let value = constant_full_int(cx, cx.typeck_results(), value)?;
+ return Some(SpannedRange {
+ span: pat.span,
+ node: (value, EndBound::Included(value)),
+ });
+ }
+ }
+ None
+ })
+ .collect()
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub enum EndBound<T> {
+ Included(T),
+ Excluded(T),
+}
+
+#[derive(Debug, Eq, PartialEq)]
+struct SpannedRange<T> {
+ pub span: Span,
+ pub node: (T, EndBound<T>),
+}
+
+fn overlapping<T>(ranges: &[SpannedRange<T>]) -> Option<(&SpannedRange<T>, &SpannedRange<T>)>
+where
+ T: Copy + Ord,
+{
+ #[derive(Copy, Clone, Debug, Eq, Ord, PartialEq, PartialOrd)]
+ enum BoundKind {
+ EndExcluded,
+ Start,
+ EndIncluded,
+ }
+
+ #[derive(Copy, Clone, Debug, Eq, PartialEq)]
+ struct RangeBound<'a, T>(T, BoundKind, &'a SpannedRange<T>);
+
+ impl<'a, T: Copy + Ord> PartialOrd for RangeBound<'a, T> {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+ }
+
+ impl<'a, T: Copy + Ord> Ord for RangeBound<'a, T> {
+ fn cmp(&self, RangeBound(other_value, other_kind, _): &Self) -> Ordering {
+ let RangeBound(self_value, self_kind, _) = *self;
+ (self_value, self_kind).cmp(&(*other_value, *other_kind))
+ }
+ }
+
+ let mut values = Vec::with_capacity(2 * ranges.len());
+
+ for r @ SpannedRange { node: (start, end), .. } in ranges {
+ values.push(RangeBound(*start, BoundKind::Start, r));
+ values.push(match end {
+ EndBound::Excluded(val) => RangeBound(*val, BoundKind::EndExcluded, r),
+ EndBound::Included(val) => RangeBound(*val, BoundKind::EndIncluded, r),
+ });
+ }
+
+ values.sort();
+
+ let mut started = vec![];
+
+ for RangeBound(_, kind, range) in values {
+ match kind {
+ BoundKind::Start => started.push(range),
+ BoundKind::EndExcluded | BoundKind::EndIncluded => {
+ let mut overlap = None;
+
+ while let Some(last_started) = started.pop() {
+ if last_started == range {
+ break;
+ }
+ overlap = Some(last_started);
+ }
+
+ if let Some(first_overlapping) = overlap {
+ return Some((range, first_overlapping));
+ }
+ },
+ }
+ }
+
+ None
+}
+
+#[test]
+fn test_overlapping() {
+ use rustc_span::source_map::DUMMY_SP;
+
+ let sp = |s, e| SpannedRange {
+ span: DUMMY_SP,
+ node: (s, e),
+ };
+
+ assert_eq!(None, overlapping::<u8>(&[]));
+ assert_eq!(None, overlapping(&[sp(1, EndBound::Included(4))]));
+ assert_eq!(
+ None,
+ overlapping(&[sp(1, EndBound::Included(4)), sp(5, EndBound::Included(6))])
+ );
+ assert_eq!(
+ None,
+ overlapping(&[
+ sp(1, EndBound::Included(4)),
+ sp(5, EndBound::Included(6)),
+ sp(10, EndBound::Included(11))
+ ],)
+ );
+ assert_eq!(
+ Some((&sp(1, EndBound::Included(4)), &sp(3, EndBound::Included(6)))),
+ overlapping(&[sp(1, EndBound::Included(4)), sp(3, EndBound::Included(6))])
+ );
+ assert_eq!(
+ Some((&sp(5, EndBound::Included(6)), &sp(6, EndBound::Included(11)))),
+ overlapping(&[
+ sp(1, EndBound::Included(4)),
+ sp(5, EndBound::Included(6)),
+ sp(6, EndBound::Included(11))
+ ],)
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs b/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs
new file mode 100644
index 000000000..8499e050a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs
@@ -0,0 +1,304 @@
+use super::REDUNDANT_PATTERN_MATCHING;
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet;
+use clippy_utils::sugg::Sugg;
+use clippy_utils::ty::needs_ordered_drop;
+use clippy_utils::visitors::any_temporaries_need_ordered_drop;
+use clippy_utils::{higher, is_lang_ctor, is_trait_method, match_def_path, paths};
+use if_chain::if_chain;
+use rustc_ast::ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::LangItem::{OptionNone, PollPending};
+use rustc_hir::{Arm, Expr, ExprKind, Node, Pat, PatKind, QPath, UnOp};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, subst::GenericArgKind, DefIdTree, Ty};
+use rustc_span::sym;
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if let Some(higher::WhileLet { let_pat, let_expr, .. }) = higher::WhileLet::hir(expr) {
+ find_sugg_for_if_let(cx, expr, let_pat, let_expr, "while", false);
+ }
+}
+
+pub(super) fn check_if_let<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ pat: &'tcx Pat<'_>,
+ scrutinee: &'tcx Expr<'_>,
+ has_else: bool,
+) {
+ find_sugg_for_if_let(cx, expr, pat, scrutinee, "if", has_else);
+}
+
+// Extract the generic arguments out of a type
+fn try_get_generic_ty(ty: Ty<'_>, index: usize) -> Option<Ty<'_>> {
+ if_chain! {
+ if let ty::Adt(_, subs) = ty.kind();
+ if let Some(sub) = subs.get(index);
+ if let GenericArgKind::Type(sub_ty) = sub.unpack();
+ then {
+ Some(sub_ty)
+ } else {
+ None
+ }
+ }
+}
+
+fn find_sugg_for_if_let<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ let_pat: &Pat<'_>,
+ let_expr: &'tcx Expr<'_>,
+ keyword: &'static str,
+ has_else: bool,
+) {
+ // also look inside refs
+ // if we have &None for example, peel it so we can detect "if let None = x"
+ let check_pat = match let_pat.kind {
+ PatKind::Ref(inner, _mutability) => inner,
+ _ => let_pat,
+ };
+ let op_ty = cx.typeck_results().expr_ty(let_expr);
+ // Determine which function should be used, and the type contained by the corresponding
+ // variant.
+ let (good_method, inner_ty) = match check_pat.kind {
+ PatKind::TupleStruct(ref qpath, [sub_pat], _) => {
+ if let PatKind::Wild = sub_pat.kind {
+ let res = cx.typeck_results().qpath_res(qpath, check_pat.hir_id);
+ let Some(id) = res.opt_def_id().map(|ctor_id| cx.tcx.parent(ctor_id)) else { return };
+ let lang_items = cx.tcx.lang_items();
+ if Some(id) == lang_items.result_ok_variant() {
+ ("is_ok()", try_get_generic_ty(op_ty, 0).unwrap_or(op_ty))
+ } else if Some(id) == lang_items.result_err_variant() {
+ ("is_err()", try_get_generic_ty(op_ty, 1).unwrap_or(op_ty))
+ } else if Some(id) == lang_items.option_some_variant() {
+ ("is_some()", op_ty)
+ } else if Some(id) == lang_items.poll_ready_variant() {
+ ("is_ready()", op_ty)
+ } else if match_def_path(cx, id, &paths::IPADDR_V4) {
+ ("is_ipv4()", op_ty)
+ } else if match_def_path(cx, id, &paths::IPADDR_V6) {
+ ("is_ipv6()", op_ty)
+ } else {
+ return;
+ }
+ } else {
+ return;
+ }
+ },
+ PatKind::Path(ref path) => {
+ let method = if is_lang_ctor(cx, path, OptionNone) {
+ "is_none()"
+ } else if is_lang_ctor(cx, path, PollPending) {
+ "is_pending()"
+ } else {
+ return;
+ };
+ // `None` and `Pending` don't have an inner type.
+ (method, cx.tcx.types.unit)
+ },
+ _ => return,
+ };
+
+ // If this is the last expression in a block or there is an else clause then the whole
+ // type needs to be considered, not just the inner type of the branch being matched on.
+ // Note the last expression in a block is dropped after all local bindings.
+ let check_ty = if has_else
+ || (keyword == "if" && matches!(cx.tcx.hir().parent_iter(expr.hir_id).next(), Some((_, Node::Block(..)))))
+ {
+ op_ty
+ } else {
+ inner_ty
+ };
+
+ // All temporaries created in the scrutinee expression are dropped at the same time as the
+ // scrutinee would be, so they have to be considered as well.
+ // e.g. in `if let Some(x) = foo.lock().unwrap().baz.as_ref() { .. }` the lock will be held
+ // for the duration if body.
+ let needs_drop = needs_ordered_drop(cx, check_ty) || any_temporaries_need_ordered_drop(cx, let_expr);
+
+ // check that `while_let_on_iterator` lint does not trigger
+ if_chain! {
+ if keyword == "while";
+ if let ExprKind::MethodCall(method_path, _, _) = let_expr.kind;
+ if method_path.ident.name == sym::next;
+ if is_trait_method(cx, let_expr, sym::Iterator);
+ then {
+ return;
+ }
+ }
+
+ let result_expr = match &let_expr.kind {
+ ExprKind::AddrOf(_, _, borrowed) => borrowed,
+ ExprKind::Unary(UnOp::Deref, deref) => deref,
+ _ => let_expr,
+ };
+
+ span_lint_and_then(
+ cx,
+ REDUNDANT_PATTERN_MATCHING,
+ let_pat.span,
+ &format!("redundant pattern matching, consider using `{}`", good_method),
+ |diag| {
+ // if/while let ... = ... { ... }
+ // ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ let expr_span = expr.span;
+
+ // if/while let ... = ... { ... }
+ // ^^^
+ let op_span = result_expr.span.source_callsite();
+
+ // if/while let ... = ... { ... }
+ // ^^^^^^^^^^^^^^^^^^^
+ let span = expr_span.until(op_span.shrink_to_hi());
+
+ let app = if needs_drop {
+ Applicability::MaybeIncorrect
+ } else {
+ Applicability::MachineApplicable
+ };
+
+ let sugg = Sugg::hir_with_macro_callsite(cx, result_expr, "_")
+ .maybe_par()
+ .to_string();
+
+ diag.span_suggestion(span, "try this", format!("{} {}.{}", keyword, sugg, good_method), app);
+
+ if needs_drop {
+ diag.note("this will change drop order of the result, as well as all temporaries");
+ diag.note("add `#[allow(clippy::redundant_pattern_matching)]` if this is important");
+ }
+ },
+ );
+}
+
+pub(super) fn check_match<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, op: &Expr<'_>, arms: &[Arm<'_>]) {
+ if arms.len() == 2 {
+ let node_pair = (&arms[0].pat.kind, &arms[1].pat.kind);
+
+ let found_good_method = match node_pair {
+ (
+ PatKind::TupleStruct(ref path_left, patterns_left, _),
+ PatKind::TupleStruct(ref path_right, patterns_right, _),
+ ) if patterns_left.len() == 1 && patterns_right.len() == 1 => {
+ if let (PatKind::Wild, PatKind::Wild) = (&patterns_left[0].kind, &patterns_right[0].kind) {
+ find_good_method_for_match(
+ cx,
+ arms,
+ path_left,
+ path_right,
+ &paths::RESULT_OK,
+ &paths::RESULT_ERR,
+ "is_ok()",
+ "is_err()",
+ )
+ .or_else(|| {
+ find_good_method_for_match(
+ cx,
+ arms,
+ path_left,
+ path_right,
+ &paths::IPADDR_V4,
+ &paths::IPADDR_V6,
+ "is_ipv4()",
+ "is_ipv6()",
+ )
+ })
+ } else {
+ None
+ }
+ },
+ (PatKind::TupleStruct(ref path_left, patterns, _), PatKind::Path(ref path_right))
+ | (PatKind::Path(ref path_left), PatKind::TupleStruct(ref path_right, patterns, _))
+ if patterns.len() == 1 =>
+ {
+ if let PatKind::Wild = patterns[0].kind {
+ find_good_method_for_match(
+ cx,
+ arms,
+ path_left,
+ path_right,
+ &paths::OPTION_SOME,
+ &paths::OPTION_NONE,
+ "is_some()",
+ "is_none()",
+ )
+ .or_else(|| {
+ find_good_method_for_match(
+ cx,
+ arms,
+ path_left,
+ path_right,
+ &paths::POLL_READY,
+ &paths::POLL_PENDING,
+ "is_ready()",
+ "is_pending()",
+ )
+ })
+ } else {
+ None
+ }
+ },
+ _ => None,
+ };
+
+ if let Some(good_method) = found_good_method {
+ let span = expr.span.to(op.span);
+ let result_expr = match &op.kind {
+ ExprKind::AddrOf(_, _, borrowed) => borrowed,
+ _ => op,
+ };
+ span_lint_and_then(
+ cx,
+ REDUNDANT_PATTERN_MATCHING,
+ expr.span,
+ &format!("redundant pattern matching, consider using `{}`", good_method),
+ |diag| {
+ diag.span_suggestion(
+ span,
+ "try this",
+ format!("{}.{}", snippet(cx, result_expr.span, "_"), good_method),
+ Applicability::MaybeIncorrect, // snippet
+ );
+ },
+ );
+ }
+ }
+}
+
+#[expect(clippy::too_many_arguments)]
+fn find_good_method_for_match<'a>(
+ cx: &LateContext<'_>,
+ arms: &[Arm<'_>],
+ path_left: &QPath<'_>,
+ path_right: &QPath<'_>,
+ expected_left: &[&str],
+ expected_right: &[&str],
+ should_be_left: &'a str,
+ should_be_right: &'a str,
+) -> Option<&'a str> {
+ let left_id = cx
+ .typeck_results()
+ .qpath_res(path_left, arms[0].pat.hir_id)
+ .opt_def_id()?;
+ let right_id = cx
+ .typeck_results()
+ .qpath_res(path_right, arms[1].pat.hir_id)
+ .opt_def_id()?;
+ let body_node_pair = if match_def_path(cx, left_id, expected_left) && match_def_path(cx, right_id, expected_right) {
+ (&arms[0].body.kind, &arms[1].body.kind)
+ } else if match_def_path(cx, right_id, expected_left) && match_def_path(cx, right_id, expected_right) {
+ (&arms[1].body.kind, &arms[0].body.kind)
+ } else {
+ return None;
+ };
+
+ match body_node_pair {
+ (ExprKind::Lit(ref lit_left), ExprKind::Lit(ref lit_right)) => match (&lit_left.node, &lit_right.node) {
+ (LitKind::Bool(true), LitKind::Bool(false)) => Some(should_be_left),
+ (LitKind::Bool(false), LitKind::Bool(true)) => Some(should_be_right),
+ _ => None,
+ },
+ _ => None,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/rest_pat_in_fully_bound_struct.rs b/src/tools/clippy/clippy_lints/src/matches/rest_pat_in_fully_bound_struct.rs
new file mode 100644
index 000000000..0aadb482a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/rest_pat_in_fully_bound_struct.rs
@@ -0,0 +1,30 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_hir::{Pat, PatKind, QPath};
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+
+use super::REST_PAT_IN_FULLY_BOUND_STRUCTS;
+
+pub(crate) fn check(cx: &LateContext<'_>, pat: &Pat<'_>) {
+ if_chain! {
+ if !pat.span.from_expansion();
+ if let PatKind::Struct(QPath::Resolved(_, path), fields, true) = pat.kind;
+ if let Some(def_id) = path.res.opt_def_id();
+ let ty = cx.tcx.type_of(def_id);
+ if let ty::Adt(def, _) = ty.kind();
+ if def.is_struct() || def.is_union();
+ if fields.len() == def.non_enum_variant().fields.len();
+ if !def.non_enum_variant().is_field_list_non_exhaustive();
+
+ then {
+ span_lint_and_help(
+ cx,
+ REST_PAT_IN_FULLY_BOUND_STRUCTS,
+ pat.span,
+ "unnecessary use of `..` pattern in struct binding. All fields were already bound",
+ None,
+ "consider removing `..` from this binding",
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs b/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs
new file mode 100644
index 000000000..b0b15b3f5
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs
@@ -0,0 +1,400 @@
+use crate::FxHashSet;
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::{indent_of, snippet};
+use clippy_utils::{get_attr, is_lint_allowed};
+use rustc_errors::{Applicability, Diagnostic};
+use rustc_hir::intravisit::{walk_expr, Visitor};
+use rustc_hir::{Arm, Expr, ExprKind, MatchSource};
+use rustc_lint::{LateContext, LintContext};
+use rustc_middle::ty::subst::GenericArgKind;
+use rustc_middle::ty::{Ty, TypeAndMut};
+use rustc_span::Span;
+
+use super::SIGNIFICANT_DROP_IN_SCRUTINEE;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'tcx>,
+ scrutinee: &'tcx Expr<'_>,
+ arms: &'tcx [Arm<'_>],
+ source: MatchSource,
+) {
+ if is_lint_allowed(cx, SIGNIFICANT_DROP_IN_SCRUTINEE, expr.hir_id) {
+ return;
+ }
+
+ if let Some((suggestions, message)) = has_significant_drop_in_scrutinee(cx, scrutinee, source) {
+ for found in suggestions {
+ span_lint_and_then(cx, SIGNIFICANT_DROP_IN_SCRUTINEE, found.found_span, message, |diag| {
+ set_diagnostic(diag, cx, expr, found);
+ let s = Span::new(expr.span.hi(), expr.span.hi(), expr.span.ctxt(), None);
+ diag.span_label(s, "temporary lives until here");
+ for span in has_significant_drop_in_arms(cx, arms) {
+ diag.span_label(span, "another value with significant `Drop` created here");
+ }
+ diag.note("this might lead to deadlocks or other unexpected behavior");
+ });
+ }
+ }
+}
+
+fn set_diagnostic<'tcx>(diag: &mut Diagnostic, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, found: FoundSigDrop) {
+ if found.lint_suggestion == LintSuggestion::MoveAndClone {
+ // If our suggestion is to move and clone, then we want to leave it to the user to
+ // decide how to address this lint, since it may be that cloning is inappropriate.
+ // Therefore, we won't to emit a suggestion.
+ return;
+ }
+
+ let original = snippet(cx, found.found_span, "..");
+ let trailing_indent = " ".repeat(indent_of(cx, found.found_span).unwrap_or(0));
+
+ let replacement = if found.lint_suggestion == LintSuggestion::MoveAndDerefToCopy {
+ format!("let value = *{};\n{}", original, trailing_indent)
+ } else if found.is_unit_return_val {
+ // If the return value of the expression to be moved is unit, then we don't need to
+ // capture the result in a temporary -- we can just replace it completely with `()`.
+ format!("{};\n{}", original, trailing_indent)
+ } else {
+ format!("let value = {};\n{}", original, trailing_indent)
+ };
+
+ let suggestion_message = if found.lint_suggestion == LintSuggestion::MoveOnly {
+ "try moving the temporary above the match"
+ } else {
+ "try moving the temporary above the match and create a copy"
+ };
+
+ let scrutinee_replacement = if found.is_unit_return_val {
+ "()".to_owned()
+ } else {
+ "value".to_owned()
+ };
+
+ diag.multipart_suggestion(
+ suggestion_message,
+ vec![
+ (expr.span.shrink_to_lo(), replacement),
+ (found.found_span, scrutinee_replacement),
+ ],
+ Applicability::MaybeIncorrect,
+ );
+}
+
+/// If the expression is an `ExprKind::Match`, check if the scrutinee has a significant drop that
+/// may have a surprising lifetime.
+fn has_significant_drop_in_scrutinee<'tcx, 'a>(
+ cx: &'a LateContext<'tcx>,
+ scrutinee: &'tcx Expr<'tcx>,
+ source: MatchSource,
+) -> Option<(Vec<FoundSigDrop>, &'static str)> {
+ let mut helper = SigDropHelper::new(cx);
+ let scrutinee = match (source, &scrutinee.kind) {
+ (MatchSource::ForLoopDesugar, ExprKind::Call(_, [e])) => e,
+ _ => scrutinee,
+ };
+ helper.find_sig_drop(scrutinee).map(|drops| {
+ let message = if source == MatchSource::Normal {
+ "temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression"
+ } else {
+ "temporary with significant `Drop` in `for` loop condition will live until the end of the `for` expression"
+ };
+ (drops, message)
+ })
+}
+
+struct SigDropChecker<'a, 'tcx> {
+ seen_types: FxHashSet<Ty<'tcx>>,
+ cx: &'a LateContext<'tcx>,
+}
+
+impl<'a, 'tcx> SigDropChecker<'a, 'tcx> {
+ fn new(cx: &'a LateContext<'tcx>) -> SigDropChecker<'a, 'tcx> {
+ SigDropChecker {
+ seen_types: FxHashSet::default(),
+ cx,
+ }
+ }
+
+ fn get_type(&self, ex: &'tcx Expr<'_>) -> Ty<'tcx> {
+ self.cx.typeck_results().expr_ty(ex)
+ }
+
+ fn has_seen_type(&mut self, ty: Ty<'tcx>) -> bool {
+ !self.seen_types.insert(ty)
+ }
+
+ fn has_sig_drop_attr(&mut self, cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
+ if let Some(adt) = ty.ty_adt_def() {
+ if get_attr(cx.sess(), cx.tcx.get_attrs_unchecked(adt.did()), "has_significant_drop").count() > 0 {
+ return true;
+ }
+ }
+
+ match ty.kind() {
+ rustc_middle::ty::Adt(a, b) => {
+ for f in a.all_fields() {
+ let ty = f.ty(cx.tcx, b);
+ if !self.has_seen_type(ty) && self.has_sig_drop_attr(cx, ty) {
+ return true;
+ }
+ }
+
+ for generic_arg in b.iter() {
+ if let GenericArgKind::Type(ty) = generic_arg.unpack() {
+ if self.has_sig_drop_attr(cx, ty) {
+ return true;
+ }
+ }
+ }
+ false
+ },
+ rustc_middle::ty::Array(ty, _)
+ | rustc_middle::ty::RawPtr(TypeAndMut { ty, .. })
+ | rustc_middle::ty::Ref(_, ty, _)
+ | rustc_middle::ty::Slice(ty) => self.has_sig_drop_attr(cx, *ty),
+ _ => false,
+ }
+ }
+}
+
+struct SigDropHelper<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ is_chain_end: bool,
+ has_significant_drop: bool,
+ current_sig_drop: Option<FoundSigDrop>,
+ sig_drop_spans: Option<Vec<FoundSigDrop>>,
+ special_handling_for_binary_op: bool,
+ sig_drop_checker: SigDropChecker<'a, 'tcx>,
+}
+
+#[expect(clippy::enum_variant_names)]
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+enum LintSuggestion {
+ MoveOnly,
+ MoveAndDerefToCopy,
+ MoveAndClone,
+}
+
+#[derive(Clone, Copy)]
+struct FoundSigDrop {
+ found_span: Span,
+ is_unit_return_val: bool,
+ lint_suggestion: LintSuggestion,
+}
+
+impl<'a, 'tcx> SigDropHelper<'a, 'tcx> {
+ fn new(cx: &'a LateContext<'tcx>) -> SigDropHelper<'a, 'tcx> {
+ SigDropHelper {
+ cx,
+ is_chain_end: true,
+ has_significant_drop: false,
+ current_sig_drop: None,
+ sig_drop_spans: None,
+ special_handling_for_binary_op: false,
+ sig_drop_checker: SigDropChecker::new(cx),
+ }
+ }
+
+ fn find_sig_drop(&mut self, match_expr: &'tcx Expr<'_>) -> Option<Vec<FoundSigDrop>> {
+ self.visit_expr(match_expr);
+
+ // If sig drop spans is empty but we found a significant drop, it means that we didn't find
+ // a type that was trivially copyable as we moved up the chain after finding a significant
+ // drop, so move the entire scrutinee.
+ if self.has_significant_drop && self.sig_drop_spans.is_none() {
+ self.try_setting_current_suggestion(match_expr, true);
+ self.move_current_suggestion();
+ }
+
+ self.sig_drop_spans.take()
+ }
+
+ fn replace_current_sig_drop(
+ &mut self,
+ found_span: Span,
+ is_unit_return_val: bool,
+ lint_suggestion: LintSuggestion,
+ ) {
+ self.current_sig_drop.replace(FoundSigDrop {
+ found_span,
+ is_unit_return_val,
+ lint_suggestion,
+ });
+ }
+
+ /// This will try to set the current suggestion (so it can be moved into the suggestions vec
+ /// later). If `allow_move_and_clone` is false, the suggestion *won't* be set -- this gives us
+ /// an opportunity to look for another type in the chain that will be trivially copyable.
+ /// However, if we are at the the end of the chain, we want to accept whatever is there. (The
+ /// suggestion won't actually be output, but the diagnostic message will be output, so the user
+ /// can determine the best way to handle the lint.)
+ fn try_setting_current_suggestion(&mut self, expr: &'tcx Expr<'_>, allow_move_and_clone: bool) {
+ if self.current_sig_drop.is_some() {
+ return;
+ }
+ let ty = self.sig_drop_checker.get_type(expr);
+ if ty.is_ref() {
+ // We checked that the type was ref, so builtin_deref will return Some TypeAndMut,
+ // but let's avoid any chance of an ICE
+ if let Some(TypeAndMut { ty, .. }) = ty.builtin_deref(true) {
+ if ty.is_trivially_pure_clone_copy() {
+ self.replace_current_sig_drop(expr.span, false, LintSuggestion::MoveAndDerefToCopy);
+ } else if allow_move_and_clone {
+ self.replace_current_sig_drop(expr.span, false, LintSuggestion::MoveAndClone);
+ }
+ }
+ } else if ty.is_trivially_pure_clone_copy() {
+ self.replace_current_sig_drop(expr.span, false, LintSuggestion::MoveOnly);
+ } else if allow_move_and_clone {
+ self.replace_current_sig_drop(expr.span, false, LintSuggestion::MoveAndClone);
+ }
+ }
+
+ fn move_current_suggestion(&mut self) {
+ if let Some(current) = self.current_sig_drop.take() {
+ self.sig_drop_spans.get_or_insert_with(Vec::new).push(current);
+ }
+ }
+
+ fn visit_exprs_for_binary_ops(
+ &mut self,
+ left: &'tcx Expr<'_>,
+ right: &'tcx Expr<'_>,
+ is_unit_return_val: bool,
+ span: Span,
+ ) {
+ self.special_handling_for_binary_op = true;
+ self.visit_expr(left);
+ self.visit_expr(right);
+
+ // If either side had a significant drop, suggest moving the entire scrutinee to avoid
+ // unnecessary copies and to simplify cases where both sides have significant drops.
+ if self.has_significant_drop {
+ self.replace_current_sig_drop(span, is_unit_return_val, LintSuggestion::MoveOnly);
+ }
+
+ self.special_handling_for_binary_op = false;
+ }
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for SigDropHelper<'a, 'tcx> {
+ fn visit_expr(&mut self, ex: &'tcx Expr<'_>) {
+ if !self.is_chain_end
+ && self
+ .sig_drop_checker
+ .has_sig_drop_attr(self.cx, self.sig_drop_checker.get_type(ex))
+ {
+ self.has_significant_drop = true;
+ return;
+ }
+ self.is_chain_end = false;
+
+ match ex.kind {
+ ExprKind::MethodCall(_, [ref expr, ..], _) => {
+ self.visit_expr(expr);
+ }
+ ExprKind::Binary(_, left, right) => {
+ self.visit_exprs_for_binary_ops(left, right, false, ex.span);
+ }
+ ExprKind::Assign(left, right, _) | ExprKind::AssignOp(_, left, right) => {
+ self.visit_exprs_for_binary_ops(left, right, true, ex.span);
+ }
+ ExprKind::Tup(exprs) => {
+ for expr in exprs {
+ self.visit_expr(expr);
+ if self.has_significant_drop {
+ // We may have not have set current_sig_drop if all the suggestions were
+ // MoveAndClone, so add this tuple item's full expression in that case.
+ if self.current_sig_drop.is_none() {
+ self.try_setting_current_suggestion(expr, true);
+ }
+
+ // Now we are guaranteed to have something, so add it to the final vec.
+ self.move_current_suggestion();
+ }
+ // Reset `has_significant_drop` after each tuple expression so we can look for
+ // additional cases.
+ self.has_significant_drop = false;
+ }
+ if self.sig_drop_spans.is_some() {
+ self.has_significant_drop = true;
+ }
+ }
+ ExprKind::Box(..) |
+ ExprKind::Array(..) |
+ ExprKind::Call(..) |
+ ExprKind::Unary(..) |
+ ExprKind::If(..) |
+ ExprKind::Match(..) |
+ ExprKind::Field(..) |
+ ExprKind::Index(..) |
+ ExprKind::Ret(..) |
+ ExprKind::Repeat(..) |
+ ExprKind::Yield(..) |
+ ExprKind::MethodCall(..) => walk_expr(self, ex),
+ ExprKind::AddrOf(_, _, _) |
+ ExprKind::Block(_, _) |
+ ExprKind::Break(_, _) |
+ ExprKind::Cast(_, _) |
+ // Don't want to check the closure itself, only invocation, which is covered by MethodCall
+ ExprKind::Closure { .. } |
+ ExprKind::ConstBlock(_) |
+ ExprKind::Continue(_) |
+ ExprKind::DropTemps(_) |
+ ExprKind::Err |
+ ExprKind::InlineAsm(_) |
+ ExprKind::Let(_) |
+ ExprKind::Lit(_) |
+ ExprKind::Loop(_, _, _, _) |
+ ExprKind::Path(_) |
+ ExprKind::Struct(_, _, _) |
+ ExprKind::Type(_, _) => {
+ return;
+ }
+ }
+
+ // Once a significant temporary has been found, we need to go back up at least 1 level to
+ // find the span to extract for replacement, so the temporary gets dropped. However, for
+ // binary ops, we want to move the whole scrutinee so we avoid unnecessary copies and to
+ // simplify cases where both sides have significant drops.
+ if self.has_significant_drop && !self.special_handling_for_binary_op {
+ self.try_setting_current_suggestion(ex, false);
+ }
+ }
+}
+
+struct ArmSigDropHelper<'a, 'tcx> {
+ sig_drop_checker: SigDropChecker<'a, 'tcx>,
+ found_sig_drop_spans: FxHashSet<Span>,
+}
+
+impl<'a, 'tcx> ArmSigDropHelper<'a, 'tcx> {
+ fn new(cx: &'a LateContext<'tcx>) -> ArmSigDropHelper<'a, 'tcx> {
+ ArmSigDropHelper {
+ sig_drop_checker: SigDropChecker::new(cx),
+ found_sig_drop_spans: FxHashSet::<Span>::default(),
+ }
+ }
+}
+
+fn has_significant_drop_in_arms<'tcx, 'a>(cx: &'a LateContext<'tcx>, arms: &'tcx [Arm<'_>]) -> FxHashSet<Span> {
+ let mut helper = ArmSigDropHelper::new(cx);
+ for arm in arms {
+ helper.visit_expr(arm.body);
+ }
+ helper.found_sig_drop_spans
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for ArmSigDropHelper<'a, 'tcx> {
+ fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) {
+ if self
+ .sig_drop_checker
+ .has_sig_drop_attr(self.sig_drop_checker.cx, self.sig_drop_checker.get_type(ex))
+ {
+ self.found_sig_drop_spans.insert(ex.span);
+ return;
+ }
+ walk_expr(self, ex);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/single_match.rs b/src/tools/clippy/clippy_lints/src/matches/single_match.rs
new file mode 100644
index 000000000..92091a0c3
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/single_match.rs
@@ -0,0 +1,248 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::{expr_block, snippet};
+use clippy_utils::ty::{implements_trait, match_type, peel_mid_ty_refs};
+use clippy_utils::{
+ is_lint_allowed, is_unit_expr, is_wild, paths, peel_blocks, peel_hir_pat_refs, peel_n_hir_expr_refs,
+};
+use core::cmp::max;
+use rustc_errors::Applicability;
+use rustc_hir::{Arm, BindingAnnotation, Block, Expr, ExprKind, Pat, PatKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+
+use super::{MATCH_BOOL, SINGLE_MATCH, SINGLE_MATCH_ELSE};
+
+#[rustfmt::skip]
+pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>], expr: &Expr<'_>) {
+ if arms.len() == 2 && arms[0].guard.is_none() && arms[1].guard.is_none() {
+ if expr.span.from_expansion() {
+ // Don't lint match expressions present in
+ // macro_rules! block
+ return;
+ }
+ if let PatKind::Or(..) = arms[0].pat.kind {
+ // don't lint for or patterns for now, this makes
+ // the lint noisy in unnecessary situations
+ return;
+ }
+ let els = arms[1].body;
+ let els = if is_unit_expr(peel_blocks(els)) {
+ None
+ } else if let ExprKind::Block(Block { stmts, expr: block_expr, .. }, _) = els.kind {
+ if stmts.len() == 1 && block_expr.is_none() || stmts.is_empty() && block_expr.is_some() {
+ // single statement/expr "else" block, don't lint
+ return;
+ }
+ // block with 2+ statements or 1 expr and 1+ statement
+ Some(els)
+ } else {
+ // not a block, don't lint
+ return;
+ };
+
+ let ty = cx.typeck_results().expr_ty(ex);
+ if *ty.kind() != ty::Bool || is_lint_allowed(cx, MATCH_BOOL, ex.hir_id) {
+ check_single_pattern(cx, ex, arms, expr, els);
+ check_opt_like(cx, ex, arms, expr, ty, els);
+ }
+ }
+}
+
+fn check_single_pattern(
+ cx: &LateContext<'_>,
+ ex: &Expr<'_>,
+ arms: &[Arm<'_>],
+ expr: &Expr<'_>,
+ els: Option<&Expr<'_>>,
+) {
+ if is_wild(arms[1].pat) {
+ report_single_pattern(cx, ex, arms, expr, els);
+ }
+}
+
+fn report_single_pattern(
+ cx: &LateContext<'_>,
+ ex: &Expr<'_>,
+ arms: &[Arm<'_>],
+ expr: &Expr<'_>,
+ els: Option<&Expr<'_>>,
+) {
+ let lint = if els.is_some() { SINGLE_MATCH_ELSE } else { SINGLE_MATCH };
+ let els_str = els.map_or(String::new(), |els| {
+ format!(" else {}", expr_block(cx, els, None, "..", Some(expr.span)))
+ });
+
+ let (pat, pat_ref_count) = peel_hir_pat_refs(arms[0].pat);
+ let (msg, sugg) = if_chain! {
+ if let PatKind::Path(_) | PatKind::Lit(_) = pat.kind;
+ let (ty, ty_ref_count) = peel_mid_ty_refs(cx.typeck_results().expr_ty(ex));
+ if let Some(spe_trait_id) = cx.tcx.lang_items().structural_peq_trait();
+ if let Some(pe_trait_id) = cx.tcx.lang_items().eq_trait();
+ if ty.is_integral() || ty.is_char() || ty.is_str()
+ || (implements_trait(cx, ty, spe_trait_id, &[])
+ && implements_trait(cx, ty, pe_trait_id, &[ty.into()]));
+ then {
+ // scrutinee derives PartialEq and the pattern is a constant.
+ let pat_ref_count = match pat.kind {
+ // string literals are already a reference.
+ PatKind::Lit(Expr { kind: ExprKind::Lit(lit), .. }) if lit.node.is_str() => pat_ref_count + 1,
+ _ => pat_ref_count,
+ };
+ // References are only implicitly added to the pattern, so no overflow here.
+ // e.g. will work: match &Some(_) { Some(_) => () }
+ // will not: match Some(_) { &Some(_) => () }
+ let ref_count_diff = ty_ref_count - pat_ref_count;
+
+ // Try to remove address of expressions first.
+ let (ex, removed) = peel_n_hir_expr_refs(ex, ref_count_diff);
+ let ref_count_diff = ref_count_diff - removed;
+
+ let msg = "you seem to be trying to use `match` for an equality check. Consider using `if`";
+ let sugg = format!(
+ "if {} == {}{} {}{}",
+ snippet(cx, ex.span, ".."),
+ // PartialEq for different reference counts may not exist.
+ "&".repeat(ref_count_diff),
+ snippet(cx, arms[0].pat.span, ".."),
+ expr_block(cx, arms[0].body, None, "..", Some(expr.span)),
+ els_str,
+ );
+ (msg, sugg)
+ } else {
+ let msg = "you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`";
+ let sugg = format!(
+ "if let {} = {} {}{}",
+ snippet(cx, arms[0].pat.span, ".."),
+ snippet(cx, ex.span, ".."),
+ expr_block(cx, arms[0].body, None, "..", Some(expr.span)),
+ els_str,
+ );
+ (msg, sugg)
+ }
+ };
+
+ span_lint_and_sugg(
+ cx,
+ lint,
+ expr.span,
+ msg,
+ "try this",
+ sugg,
+ Applicability::HasPlaceholders,
+ );
+}
+
+fn check_opt_like<'a>(
+ cx: &LateContext<'a>,
+ ex: &Expr<'_>,
+ arms: &[Arm<'_>],
+ expr: &Expr<'_>,
+ ty: Ty<'a>,
+ els: Option<&Expr<'_>>,
+) {
+ // We don't want to lint if the second arm contains an enum which could
+ // have more variants in the future.
+ if form_exhaustive_matches(cx, ty, arms[0].pat, arms[1].pat) {
+ report_single_pattern(cx, ex, arms, expr, els);
+ }
+}
+
+/// Returns `true` if all of the types in the pattern are enums which we know
+/// won't be expanded in the future
+fn pat_in_candidate_enum<'a>(cx: &LateContext<'a>, ty: Ty<'a>, pat: &Pat<'_>) -> bool {
+ let mut paths_and_types = Vec::new();
+ collect_pat_paths(&mut paths_and_types, cx, pat, ty);
+ paths_and_types.iter().all(|ty| in_candidate_enum(cx, *ty))
+}
+
+/// Returns `true` if the given type is an enum we know won't be expanded in the future
+fn in_candidate_enum<'a>(cx: &LateContext<'a>, ty: Ty<'_>) -> bool {
+ // list of candidate `Enum`s we know will never get any more members
+ let candidates = [&paths::COW, &paths::OPTION, &paths::RESULT];
+
+ for candidate_ty in candidates {
+ if match_type(cx, ty, candidate_ty) {
+ return true;
+ }
+ }
+ false
+}
+
+/// Collects types from the given pattern
+fn collect_pat_paths<'a>(acc: &mut Vec<Ty<'a>>, cx: &LateContext<'a>, pat: &Pat<'_>, ty: Ty<'a>) {
+ match pat.kind {
+ PatKind::Tuple(inner, _) => inner.iter().for_each(|p| {
+ let p_ty = cx.typeck_results().pat_ty(p);
+ collect_pat_paths(acc, cx, p, p_ty);
+ }),
+ PatKind::TupleStruct(..) | PatKind::Binding(BindingAnnotation::Unannotated, .., None) | PatKind::Path(_) => {
+ acc.push(ty);
+ },
+ _ => {},
+ }
+}
+
+/// Returns true if the given arm of pattern matching contains wildcard patterns.
+fn contains_only_wilds(pat: &Pat<'_>) -> bool {
+ match pat.kind {
+ PatKind::Wild => true,
+ PatKind::Tuple(inner, _) | PatKind::TupleStruct(_, inner, ..) => inner.iter().all(contains_only_wilds),
+ _ => false,
+ }
+}
+
+/// Returns true if the given patterns forms only exhaustive matches that don't contain enum
+/// patterns without a wildcard.
+fn form_exhaustive_matches<'a>(cx: &LateContext<'a>, ty: Ty<'a>, left: &Pat<'_>, right: &Pat<'_>) -> bool {
+ match (&left.kind, &right.kind) {
+ (PatKind::Wild, _) | (_, PatKind::Wild) => true,
+ (PatKind::Tuple(left_in, left_pos), PatKind::Tuple(right_in, right_pos)) => {
+ // We don't actually know the position and the presence of the `..` (dotdot) operator
+ // in the arms, so we need to evaluate the correct offsets here in order to iterate in
+ // both arms at the same time.
+ let len = max(
+ left_in.len() + {
+ if left_pos.is_some() { 1 } else { 0 }
+ },
+ right_in.len() + {
+ if right_pos.is_some() { 1 } else { 0 }
+ },
+ );
+ let mut left_pos = left_pos.unwrap_or(usize::MAX);
+ let mut right_pos = right_pos.unwrap_or(usize::MAX);
+ let mut left_dot_space = 0;
+ let mut right_dot_space = 0;
+ for i in 0..len {
+ let mut found_dotdot = false;
+ if i == left_pos {
+ left_dot_space += 1;
+ if left_dot_space < len - left_in.len() {
+ left_pos += 1;
+ }
+ found_dotdot = true;
+ }
+ if i == right_pos {
+ right_dot_space += 1;
+ if right_dot_space < len - right_in.len() {
+ right_pos += 1;
+ }
+ found_dotdot = true;
+ }
+ if found_dotdot {
+ continue;
+ }
+ if !contains_only_wilds(&left_in[i - left_dot_space])
+ && !contains_only_wilds(&right_in[i - right_dot_space])
+ {
+ return false;
+ }
+ }
+ true
+ },
+ (PatKind::TupleStruct(..), PatKind::Path(_)) => pat_in_candidate_enum(cx, ty, right),
+ (PatKind::TupleStruct(..), PatKind::TupleStruct(_, inner, _)) => {
+ pat_in_candidate_enum(cx, ty, right) && inner.iter().all(contains_only_wilds)
+ },
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/try_err.rs b/src/tools/clippy/clippy_lints/src/matches/try_err.rs
new file mode 100644
index 000000000..0491a0679
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/try_err.rs
@@ -0,0 +1,145 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{get_parent_expr, is_lang_ctor, match_def_path, paths};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::LangItem::ResultErr;
+use rustc_hir::{Expr, ExprKind, LangItem, MatchSource, QPath};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+use rustc_span::{hygiene, sym};
+
+use super::TRY_ERR;
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, scrutinee: &'tcx Expr<'_>) {
+ // Looks for a structure like this:
+ // match ::std::ops::Try::into_result(Err(5)) {
+ // ::std::result::Result::Err(err) =>
+ // #[allow(unreachable_code)]
+ // return ::std::ops::Try::from_error(::std::convert::From::from(err)),
+ // ::std::result::Result::Ok(val) =>
+ // #[allow(unreachable_code)]
+ // val,
+ // };
+ if_chain! {
+ if let ExprKind::Call(match_fun, try_args) = scrutinee.kind;
+ if let ExprKind::Path(ref match_fun_path) = match_fun.kind;
+ if matches!(match_fun_path, QPath::LangItem(LangItem::TryTraitBranch, ..));
+ if let Some(try_arg) = try_args.get(0);
+ if let ExprKind::Call(err_fun, err_args) = try_arg.kind;
+ if let Some(err_arg) = err_args.get(0);
+ if let ExprKind::Path(ref err_fun_path) = err_fun.kind;
+ if is_lang_ctor(cx, err_fun_path, ResultErr);
+ if let Some(return_ty) = find_return_type(cx, &expr.kind);
+ then {
+ let prefix;
+ let suffix;
+ let err_ty;
+
+ if let Some(ty) = result_error_type(cx, return_ty) {
+ prefix = "Err(";
+ suffix = ")";
+ err_ty = ty;
+ } else if let Some(ty) = poll_result_error_type(cx, return_ty) {
+ prefix = "Poll::Ready(Err(";
+ suffix = "))";
+ err_ty = ty;
+ } else if let Some(ty) = poll_option_result_error_type(cx, return_ty) {
+ prefix = "Poll::Ready(Some(Err(";
+ suffix = ")))";
+ err_ty = ty;
+ } else {
+ return;
+ };
+
+ let expr_err_ty = cx.typeck_results().expr_ty(err_arg);
+ let span = hygiene::walk_chain(err_arg.span, try_arg.span.ctxt());
+ let mut applicability = Applicability::MachineApplicable;
+ let origin_snippet = snippet_with_applicability(cx, span, "_", &mut applicability);
+ let ret_prefix = if get_parent_expr(cx, expr).map_or(false, |e| matches!(e.kind, ExprKind::Ret(_))) {
+ "" // already returns
+ } else {
+ "return "
+ };
+ let suggestion = if err_ty == expr_err_ty {
+ format!("{}{}{}{}", ret_prefix, prefix, origin_snippet, suffix)
+ } else {
+ format!("{}{}{}.into(){}", ret_prefix, prefix, origin_snippet, suffix)
+ };
+
+ span_lint_and_sugg(
+ cx,
+ TRY_ERR,
+ expr.span,
+ "returning an `Err(_)` with the `?` operator",
+ "try this",
+ suggestion,
+ applicability,
+ );
+ }
+ }
+}
+
+/// Finds function return type by examining return expressions in match arms.
+fn find_return_type<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx ExprKind<'_>) -> Option<Ty<'tcx>> {
+ if let ExprKind::Match(_, arms, MatchSource::TryDesugar) = expr {
+ for arm in arms.iter() {
+ if let ExprKind::Ret(Some(ret)) = arm.body.kind {
+ return Some(cx.typeck_results().expr_ty(ret));
+ }
+ }
+ }
+ None
+}
+
+/// Extracts the error type from Result<T, E>.
+fn result_error_type<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<Ty<'tcx>> {
+ if_chain! {
+ if let ty::Adt(_, subst) = ty.kind();
+ if is_type_diagnostic_item(cx, ty, sym::Result);
+ then {
+ Some(subst.type_at(1))
+ } else {
+ None
+ }
+ }
+}
+
+/// Extracts the error type from Poll<Result<T, E>>.
+fn poll_result_error_type<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<Ty<'tcx>> {
+ if_chain! {
+ if let ty::Adt(def, subst) = ty.kind();
+ if match_def_path(cx, def.did(), &paths::POLL);
+ let ready_ty = subst.type_at(0);
+
+ if let ty::Adt(ready_def, ready_subst) = ready_ty.kind();
+ if cx.tcx.is_diagnostic_item(sym::Result, ready_def.did());
+ then {
+ Some(ready_subst.type_at(1))
+ } else {
+ None
+ }
+ }
+}
+
+/// Extracts the error type from Poll<Option<Result<T, E>>>.
+fn poll_option_result_error_type<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<Ty<'tcx>> {
+ if_chain! {
+ if let ty::Adt(def, subst) = ty.kind();
+ if match_def_path(cx, def.did(), &paths::POLL);
+ let ready_ty = subst.type_at(0);
+
+ if let ty::Adt(ready_def, ready_subst) = ready_ty.kind();
+ if cx.tcx.is_diagnostic_item(sym::Option, ready_def.did());
+ let some_ty = ready_subst.type_at(0);
+
+ if let ty::Adt(some_def, some_subst) = some_ty.kind();
+ if cx.tcx.is_diagnostic_item(sym::Result, some_def.did());
+ then {
+ Some(some_subst.type_at(1))
+ } else {
+ None
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/wild_in_or_pats.rs b/src/tools/clippy/clippy_lints/src/matches/wild_in_or_pats.rs
new file mode 100644
index 000000000..459513e65
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/wild_in_or_pats.rs
@@ -0,0 +1,24 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::is_wild;
+use rustc_hir::{Arm, PatKind};
+use rustc_lint::LateContext;
+
+use super::WILDCARD_IN_OR_PATTERNS;
+
+pub(crate) fn check(cx: &LateContext<'_>, arms: &[Arm<'_>]) {
+ for arm in arms {
+ if let PatKind::Or(fields) = arm.pat.kind {
+ // look for multiple fields in this arm that contains at least one Wild pattern
+ if fields.len() > 1 && fields.iter().any(is_wild) {
+ span_lint_and_help(
+ cx,
+ WILDCARD_IN_OR_PATTERNS,
+ arm.pat.span,
+ "wildcard pattern covers any other pattern as it will match anyway",
+ None,
+ "consider handling `_` separately",
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/mem_forget.rs b/src/tools/clippy/clippy_lints/src/mem_forget.rs
new file mode 100644
index 000000000..d6c235b5a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/mem_forget.rs
@@ -0,0 +1,46 @@
+use clippy_utils::diagnostics::span_lint;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `std::mem::forget(t)` where `t` is
+ /// `Drop`.
+ ///
+ /// ### Why is this bad?
+ /// `std::mem::forget(t)` prevents `t` from running its
+ /// destructor, possibly causing leaks.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::mem;
+ /// # use std::rc::Rc;
+ /// mem::forget(Rc::new(55))
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MEM_FORGET,
+ restriction,
+ "`mem::forget` usage on `Drop` types, likely to cause memory leaks"
+}
+
+declare_lint_pass!(MemForget => [MEM_FORGET]);
+
+impl<'tcx> LateLintPass<'tcx> for MemForget {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ if let ExprKind::Call(path_expr, [ref first_arg, ..]) = e.kind {
+ if let ExprKind::Path(ref qpath) = path_expr.kind {
+ if let Some(def_id) = cx.qpath_res(qpath, path_expr.hir_id).opt_def_id() {
+ if cx.tcx.is_diagnostic_item(sym::mem_forget, def_id) {
+ let forgot_ty = cx.typeck_results().expr_ty(first_arg);
+
+ if forgot_ty.ty_adt_def().map_or(false, |def| def.has_dtor(cx.tcx)) {
+ span_lint(cx, MEM_FORGET, e.span, "usage of `mem::forget` on `Drop` type");
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/mem_replace.rs b/src/tools/clippy/clippy_lints/src/mem_replace.rs
new file mode 100644
index 000000000..41073d40f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/mem_replace.rs
@@ -0,0 +1,264 @@
+use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_sugg, span_lint_and_then};
+use clippy_utils::source::{snippet, snippet_with_applicability};
+use clippy_utils::ty::is_non_aggregate_primitive_type;
+use clippy_utils::{is_default_equivalent, is_lang_ctor, meets_msrv, msrvs};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::LangItem::OptionNone;
+use rustc_hir::{BorrowKind, Expr, ExprKind, Mutability, QPath};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::lint::in_external_macro;
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::source_map::Span;
+use rustc_span::symbol::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `mem::replace()` on an `Option` with
+ /// `None`.
+ ///
+ /// ### Why is this bad?
+ /// `Option` already has the method `take()` for
+ /// taking its current value (Some(..) or None) and replacing it with
+ /// `None`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// use std::mem;
+ ///
+ /// let mut an_option = Some(0);
+ /// let replaced = mem::replace(&mut an_option, None);
+ /// ```
+ /// Is better expressed with:
+ /// ```rust
+ /// let mut an_option = Some(0);
+ /// let taken = an_option.take();
+ /// ```
+ #[clippy::version = "1.31.0"]
+ pub MEM_REPLACE_OPTION_WITH_NONE,
+ style,
+ "replacing an `Option` with `None` instead of `take()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `mem::replace(&mut _, mem::uninitialized())`
+ /// and `mem::replace(&mut _, mem::zeroed())`.
+ ///
+ /// ### Why is this bad?
+ /// This will lead to undefined behavior even if the
+ /// value is overwritten later, because the uninitialized value may be
+ /// observed in the case of a panic.
+ ///
+ /// ### Example
+ /// ```
+ /// use std::mem;
+ ///# fn may_panic(v: Vec<i32>) -> Vec<i32> { v }
+ ///
+ /// #[allow(deprecated, invalid_value)]
+ /// fn myfunc (v: &mut Vec<i32>) {
+ /// let taken_v = unsafe { mem::replace(v, mem::uninitialized()) };
+ /// let new_v = may_panic(taken_v); // undefined behavior on panic
+ /// mem::forget(mem::replace(v, new_v));
+ /// }
+ /// ```
+ ///
+ /// The [take_mut](https://docs.rs/take_mut) crate offers a sound solution,
+ /// at the cost of either lazily creating a replacement value or aborting
+ /// on panic, to ensure that the uninitialized value cannot be observed.
+ #[clippy::version = "1.39.0"]
+ pub MEM_REPLACE_WITH_UNINIT,
+ correctness,
+ "`mem::replace(&mut _, mem::uninitialized())` or `mem::replace(&mut _, mem::zeroed())`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `std::mem::replace` on a value of type
+ /// `T` with `T::default()`.
+ ///
+ /// ### Why is this bad?
+ /// `std::mem` module already has the method `take` to
+ /// take the current value and replace it with the default value of that type.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let mut text = String::from("foo");
+ /// let replaced = std::mem::replace(&mut text, String::default());
+ /// ```
+ /// Is better expressed with:
+ /// ```rust
+ /// let mut text = String::from("foo");
+ /// let taken = std::mem::take(&mut text);
+ /// ```
+ #[clippy::version = "1.42.0"]
+ pub MEM_REPLACE_WITH_DEFAULT,
+ style,
+ "replacing a value of type `T` with `T::default()` instead of using `std::mem::take`"
+}
+
+impl_lint_pass!(MemReplace =>
+ [MEM_REPLACE_OPTION_WITH_NONE, MEM_REPLACE_WITH_UNINIT, MEM_REPLACE_WITH_DEFAULT]);
+
+fn check_replace_option_with_none(cx: &LateContext<'_>, src: &Expr<'_>, dest: &Expr<'_>, expr_span: Span) {
+ if let ExprKind::Path(ref replacement_qpath) = src.kind {
+ // Check that second argument is `Option::None`
+ if is_lang_ctor(cx, replacement_qpath, OptionNone) {
+ // Since this is a late pass (already type-checked),
+ // and we already know that the second argument is an
+ // `Option`, we do not need to check the first
+ // argument's type. All that's left is to get
+ // replacee's path.
+ let replaced_path = match dest.kind {
+ ExprKind::AddrOf(BorrowKind::Ref, Mutability::Mut, replaced) => {
+ if let ExprKind::Path(QPath::Resolved(None, replaced_path)) = replaced.kind {
+ replaced_path
+ } else {
+ return;
+ }
+ },
+ ExprKind::Path(QPath::Resolved(None, replaced_path)) => replaced_path,
+ _ => return,
+ };
+
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ MEM_REPLACE_OPTION_WITH_NONE,
+ expr_span,
+ "replacing an `Option` with `None`",
+ "consider `Option::take()` instead",
+ format!(
+ "{}.take()",
+ snippet_with_applicability(cx, replaced_path.span, "", &mut applicability)
+ ),
+ applicability,
+ );
+ }
+ }
+}
+
+fn check_replace_with_uninit(cx: &LateContext<'_>, src: &Expr<'_>, dest: &Expr<'_>, expr_span: Span) {
+ if_chain! {
+ // check if replacement is mem::MaybeUninit::uninit().assume_init()
+ if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(src.hir_id);
+ if cx.tcx.is_diagnostic_item(sym::assume_init, method_def_id);
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ MEM_REPLACE_WITH_UNINIT,
+ expr_span,
+ "replacing with `mem::MaybeUninit::uninit().assume_init()`",
+ "consider using",
+ format!(
+ "std::ptr::read({})",
+ snippet_with_applicability(cx, dest.span, "", &mut applicability)
+ ),
+ applicability,
+ );
+ return;
+ }
+ }
+
+ if_chain! {
+ if let ExprKind::Call(repl_func, repl_args) = src.kind;
+ if repl_args.is_empty();
+ if let ExprKind::Path(ref repl_func_qpath) = repl_func.kind;
+ if let Some(repl_def_id) = cx.qpath_res(repl_func_qpath, repl_func.hir_id).opt_def_id();
+ then {
+ if cx.tcx.is_diagnostic_item(sym::mem_uninitialized, repl_def_id) {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ MEM_REPLACE_WITH_UNINIT,
+ expr_span,
+ "replacing with `mem::uninitialized()`",
+ "consider using",
+ format!(
+ "std::ptr::read({})",
+ snippet_with_applicability(cx, dest.span, "", &mut applicability)
+ ),
+ applicability,
+ );
+ } else if cx.tcx.is_diagnostic_item(sym::mem_zeroed, repl_def_id) &&
+ !cx.typeck_results().expr_ty(src).is_primitive() {
+ span_lint_and_help(
+ cx,
+ MEM_REPLACE_WITH_UNINIT,
+ expr_span,
+ "replacing with `mem::zeroed()`",
+ None,
+ "consider using a default value or the `take_mut` crate instead",
+ );
+ }
+ }
+ }
+}
+
+fn check_replace_with_default(cx: &LateContext<'_>, src: &Expr<'_>, dest: &Expr<'_>, expr_span: Span) {
+ // disable lint for primitives
+ let expr_type = cx.typeck_results().expr_ty_adjusted(src);
+ if is_non_aggregate_primitive_type(expr_type) {
+ return;
+ }
+ // disable lint for Option since it is covered in another lint
+ if let ExprKind::Path(q) = &src.kind {
+ if is_lang_ctor(cx, q, OptionNone) {
+ return;
+ }
+ }
+ if is_default_equivalent(cx, src) && !in_external_macro(cx.tcx.sess, expr_span) {
+ span_lint_and_then(
+ cx,
+ MEM_REPLACE_WITH_DEFAULT,
+ expr_span,
+ "replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`",
+ |diag| {
+ if !expr_span.from_expansion() {
+ let suggestion = format!("std::mem::take({})", snippet(cx, dest.span, ""));
+
+ diag.span_suggestion(
+ expr_span,
+ "consider using",
+ suggestion,
+ Applicability::MachineApplicable,
+ );
+ }
+ },
+ );
+ }
+}
+
+pub struct MemReplace {
+ msrv: Option<RustcVersion>,
+}
+
+impl MemReplace {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self { msrv }
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for MemReplace {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if_chain! {
+ // Check that `expr` is a call to `mem::replace()`
+ if let ExprKind::Call(func, func_args) = expr.kind;
+ if let ExprKind::Path(ref func_qpath) = func.kind;
+ if let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id();
+ if cx.tcx.is_diagnostic_item(sym::mem_replace, def_id);
+ if let [dest, src] = func_args;
+ then {
+ check_replace_option_with_none(cx, src, dest, expr.span);
+ check_replace_with_uninit(cx, src, dest, expr.span);
+ if meets_msrv(self.msrv, msrvs::MEM_TAKE) {
+ check_replace_with_default(cx, src, dest, expr.span);
+ }
+ }
+ }
+ }
+ extract_msrv_attr!(LateContext);
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs b/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs
new file mode 100644
index 000000000..2f117e4dc
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs
@@ -0,0 +1,190 @@
+use super::{contains_return, BIND_INSTEAD_OF_MAP};
+use clippy_utils::diagnostics::{multispan_sugg_with_applicability, span_lint_and_sugg, span_lint_and_then};
+use clippy_utils::source::{snippet, snippet_with_macro_callsite};
+use clippy_utils::{peel_blocks, visitors::find_all_ret_expressions};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res};
+use rustc_hir::{LangItem, QPath};
+use rustc_lint::LateContext;
+use rustc_middle::ty::DefIdTree;
+use rustc_span::Span;
+
+pub(crate) struct OptionAndThenSome;
+
+impl BindInsteadOfMap for OptionAndThenSome {
+ const VARIANT_LANG_ITEM: LangItem = LangItem::OptionSome;
+ const BAD_METHOD_NAME: &'static str = "and_then";
+ const GOOD_METHOD_NAME: &'static str = "map";
+}
+
+pub(crate) struct ResultAndThenOk;
+
+impl BindInsteadOfMap for ResultAndThenOk {
+ const VARIANT_LANG_ITEM: LangItem = LangItem::ResultOk;
+ const BAD_METHOD_NAME: &'static str = "and_then";
+ const GOOD_METHOD_NAME: &'static str = "map";
+}
+
+pub(crate) struct ResultOrElseErrInfo;
+
+impl BindInsteadOfMap for ResultOrElseErrInfo {
+ const VARIANT_LANG_ITEM: LangItem = LangItem::ResultErr;
+ const BAD_METHOD_NAME: &'static str = "or_else";
+ const GOOD_METHOD_NAME: &'static str = "map_err";
+}
+
+pub(crate) trait BindInsteadOfMap {
+ const VARIANT_LANG_ITEM: LangItem;
+ const BAD_METHOD_NAME: &'static str;
+ const GOOD_METHOD_NAME: &'static str;
+
+ fn no_op_msg(cx: &LateContext<'_>) -> Option<String> {
+ let variant_id = cx.tcx.lang_items().require(Self::VARIANT_LANG_ITEM).ok()?;
+ let item_id = cx.tcx.parent(variant_id);
+ Some(format!(
+ "using `{}.{}({})`, which is a no-op",
+ cx.tcx.item_name(item_id),
+ Self::BAD_METHOD_NAME,
+ cx.tcx.item_name(variant_id),
+ ))
+ }
+
+ fn lint_msg(cx: &LateContext<'_>) -> Option<String> {
+ let variant_id = cx.tcx.lang_items().require(Self::VARIANT_LANG_ITEM).ok()?;
+ let item_id = cx.tcx.parent(variant_id);
+ Some(format!(
+ "using `{}.{}(|x| {}(y))`, which is more succinctly expressed as `{}(|x| y)`",
+ cx.tcx.item_name(item_id),
+ Self::BAD_METHOD_NAME,
+ cx.tcx.item_name(variant_id),
+ Self::GOOD_METHOD_NAME
+ ))
+ }
+
+ fn lint_closure_autofixable(
+ cx: &LateContext<'_>,
+ expr: &hir::Expr<'_>,
+ recv: &hir::Expr<'_>,
+ closure_expr: &hir::Expr<'_>,
+ closure_args_span: Span,
+ ) -> bool {
+ if_chain! {
+ if let hir::ExprKind::Call(some_expr, [inner_expr]) = closure_expr.kind;
+ if let hir::ExprKind::Path(QPath::Resolved(_, path)) = some_expr.kind;
+ if Self::is_variant(cx, path.res);
+ if !contains_return(inner_expr);
+ if let Some(msg) = Self::lint_msg(cx);
+ then {
+ let some_inner_snip = if inner_expr.span.from_expansion() {
+ snippet_with_macro_callsite(cx, inner_expr.span, "_")
+ } else {
+ snippet(cx, inner_expr.span, "_")
+ };
+
+ let closure_args_snip = snippet(cx, closure_args_span, "..");
+ let option_snip = snippet(cx, recv.span, "..");
+ let note = format!("{}.{}({} {})", option_snip, Self::GOOD_METHOD_NAME, closure_args_snip, some_inner_snip);
+ span_lint_and_sugg(
+ cx,
+ BIND_INSTEAD_OF_MAP,
+ expr.span,
+ &msg,
+ "try this",
+ note,
+ Applicability::MachineApplicable,
+ );
+ true
+ } else {
+ false
+ }
+ }
+ }
+
+ fn lint_closure(cx: &LateContext<'_>, expr: &hir::Expr<'_>, closure_expr: &hir::Expr<'_>) -> bool {
+ let mut suggs = Vec::new();
+ let can_sugg: bool = find_all_ret_expressions(cx, closure_expr, |ret_expr| {
+ if_chain! {
+ if !ret_expr.span.from_expansion();
+ if let hir::ExprKind::Call(func_path, [arg]) = ret_expr.kind;
+ if let hir::ExprKind::Path(QPath::Resolved(_, path)) = func_path.kind;
+ if Self::is_variant(cx, path.res);
+ if !contains_return(arg);
+ then {
+ suggs.push((ret_expr.span, arg.span.source_callsite()));
+ true
+ } else {
+ false
+ }
+ }
+ });
+ let (span, msg) = if_chain! {
+ if can_sugg;
+ if let hir::ExprKind::MethodCall(segment, ..) = expr.kind;
+ if let Some(msg) = Self::lint_msg(cx);
+ then { (segment.ident.span, msg) } else { return false; }
+ };
+ span_lint_and_then(cx, BIND_INSTEAD_OF_MAP, expr.span, &msg, |diag| {
+ multispan_sugg_with_applicability(
+ diag,
+ "try this",
+ Applicability::MachineApplicable,
+ std::iter::once((span, Self::GOOD_METHOD_NAME.into())).chain(
+ suggs
+ .into_iter()
+ .map(|(span1, span2)| (span1, snippet(cx, span2, "_").into())),
+ ),
+ );
+ });
+ true
+ }
+
+ /// Lint use of `_.and_then(|x| Some(y))` for `Option`s
+ fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr<'_>, arg: &hir::Expr<'_>) -> bool {
+ if_chain! {
+ if let Some(adt) = cx.typeck_results().expr_ty(recv).ty_adt_def();
+ if let Ok(vid) = cx.tcx.lang_items().require(Self::VARIANT_LANG_ITEM);
+ if adt.did() == cx.tcx.parent(vid);
+ then {} else { return false; }
+ }
+
+ match arg.kind {
+ hir::ExprKind::Closure(&hir::Closure { body, fn_decl_span, .. }) => {
+ let closure_body = cx.tcx.hir().body(body);
+ let closure_expr = peel_blocks(&closure_body.value);
+
+ if Self::lint_closure_autofixable(cx, expr, recv, closure_expr, fn_decl_span) {
+ true
+ } else {
+ Self::lint_closure(cx, expr, closure_expr)
+ }
+ },
+ // `_.and_then(Some)` case, which is no-op.
+ hir::ExprKind::Path(QPath::Resolved(_, path)) if Self::is_variant(cx, path.res) => {
+ if let Some(msg) = Self::no_op_msg(cx) {
+ span_lint_and_sugg(
+ cx,
+ BIND_INSTEAD_OF_MAP,
+ expr.span,
+ &msg,
+ "use the expression directly",
+ snippet(cx, recv.span, "..").into(),
+ Applicability::MachineApplicable,
+ );
+ }
+ true
+ },
+ _ => false,
+ }
+ }
+
+ fn is_variant(cx: &LateContext<'_>, res: Res) -> bool {
+ if let Res::Def(DefKind::Ctor(CtorOf::Variant, CtorKind::Fn), id) = res {
+ if let Ok(variant_id) = cx.tcx.lang_items().require(Self::VARIANT_LANG_ITEM) {
+ return cx.tcx.parent(id) == variant_id;
+ }
+ }
+ false
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/bytes_nth.rs b/src/tools/clippy/clippy_lints/src/methods/bytes_nth.rs
new file mode 100644
index 000000000..44857d61f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/bytes_nth.rs
@@ -0,0 +1,34 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::is_type_diagnostic_item;
+use rustc_errors::Applicability;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::BYTES_NTH;
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>, recv: &'tcx Expr<'tcx>, n_arg: &'tcx Expr<'tcx>) {
+ let ty = cx.typeck_results().expr_ty(recv).peel_refs();
+ let caller_type = if ty.is_str() {
+ "str"
+ } else if is_type_diagnostic_item(cx, ty, sym::String) {
+ "String"
+ } else {
+ return;
+ };
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ BYTES_NTH,
+ expr.span,
+ &format!("called `.bytes().nth()` on a `{}`", caller_type),
+ "try",
+ format!(
+ "{}.as_bytes().get({})",
+ snippet_with_applicability(cx, recv.span, "..", &mut applicability),
+ snippet_with_applicability(cx, n_arg.span, "..", &mut applicability)
+ ),
+ applicability,
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs b/src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs
new file mode 100644
index 000000000..f7b79f083
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs
@@ -0,0 +1,51 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::{method_chain_args, path_def_id};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_lint::Lint;
+use rustc_middle::ty::{self, DefIdTree};
+
+/// Wrapper fn for `CHARS_NEXT_CMP` and `CHARS_LAST_CMP` lints.
+pub(super) fn check(
+ cx: &LateContext<'_>,
+ info: &crate::methods::BinaryExprInfo<'_>,
+ chain_methods: &[&str],
+ lint: &'static Lint,
+ suggest: &str,
+) -> bool {
+ if_chain! {
+ if let Some(args) = method_chain_args(info.chain, chain_methods);
+ if let hir::ExprKind::Call(fun, [arg_char]) = info.other.kind;
+ if let Some(id) = path_def_id(cx, fun).map(|ctor_id| cx.tcx.parent(ctor_id));
+ if Some(id) == cx.tcx.lang_items().option_some_variant();
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ let self_ty = cx.typeck_results().expr_ty_adjusted(&args[0][0]).peel_refs();
+
+ if *self_ty.kind() != ty::Str {
+ return false;
+ }
+
+ span_lint_and_sugg(
+ cx,
+ lint,
+ info.expr.span,
+ &format!("you should use the `{}` method", suggest),
+ "like this",
+ format!("{}{}.{}({})",
+ if info.eq { "" } else { "!" },
+ snippet_with_applicability(cx, args[0][0].span, "..", &mut applicability),
+ suggest,
+ snippet_with_applicability(cx, arg_char.span, "..", &mut applicability)),
+ applicability,
+ );
+
+ return true;
+ }
+ }
+
+ false
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/chars_cmp_with_unwrap.rs b/src/tools/clippy/clippy_lints/src/methods/chars_cmp_with_unwrap.rs
new file mode 100644
index 000000000..a7c0e4392
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/chars_cmp_with_unwrap.rs
@@ -0,0 +1,44 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::method_chain_args;
+use clippy_utils::source::snippet_with_applicability;
+use if_chain::if_chain;
+use rustc_ast::ast;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_lint::Lint;
+
+/// Wrapper fn for `CHARS_NEXT_CMP` and `CHARS_LAST_CMP` lints with `unwrap()`.
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ info: &crate::methods::BinaryExprInfo<'_>,
+ chain_methods: &[&str],
+ lint: &'static Lint,
+ suggest: &str,
+) -> bool {
+ if_chain! {
+ if let Some(args) = method_chain_args(info.chain, chain_methods);
+ if let hir::ExprKind::Lit(ref lit) = info.other.kind;
+ if let ast::LitKind::Char(c) = lit.node;
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ lint,
+ info.expr.span,
+ &format!("you should use the `{}` method", suggest),
+ "like this",
+ format!("{}{}.{}('{}')",
+ if info.eq { "" } else { "!" },
+ snippet_with_applicability(cx, args[0][0].span, "..", &mut applicability),
+ suggest,
+ c.escape_default()),
+ applicability,
+ );
+
+ true
+ } else {
+ false
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/chars_last_cmp.rs b/src/tools/clippy/clippy_lints/src/methods/chars_last_cmp.rs
new file mode 100644
index 000000000..07bbc5ca1
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/chars_last_cmp.rs
@@ -0,0 +1,13 @@
+use crate::methods::chars_cmp;
+use rustc_lint::LateContext;
+
+use super::CHARS_LAST_CMP;
+
+/// Checks for the `CHARS_LAST_CMP` lint.
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, info: &crate::methods::BinaryExprInfo<'_>) -> bool {
+ if chars_cmp::check(cx, info, &["chars", "last"], CHARS_LAST_CMP, "ends_with") {
+ true
+ } else {
+ chars_cmp::check(cx, info, &["chars", "next_back"], CHARS_LAST_CMP, "ends_with")
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/chars_last_cmp_with_unwrap.rs b/src/tools/clippy/clippy_lints/src/methods/chars_last_cmp_with_unwrap.rs
new file mode 100644
index 000000000..c29ee0ec8
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/chars_last_cmp_with_unwrap.rs
@@ -0,0 +1,13 @@
+use crate::methods::chars_cmp_with_unwrap;
+use rustc_lint::LateContext;
+
+use super::CHARS_LAST_CMP;
+
+/// Checks for the `CHARS_LAST_CMP` lint with `unwrap()`.
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, info: &crate::methods::BinaryExprInfo<'_>) -> bool {
+ if chars_cmp_with_unwrap::check(cx, info, &["chars", "last", "unwrap"], CHARS_LAST_CMP, "ends_with") {
+ true
+ } else {
+ chars_cmp_with_unwrap::check(cx, info, &["chars", "next_back", "unwrap"], CHARS_LAST_CMP, "ends_with")
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/chars_next_cmp.rs b/src/tools/clippy/clippy_lints/src/methods/chars_next_cmp.rs
new file mode 100644
index 000000000..a6701d883
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/chars_next_cmp.rs
@@ -0,0 +1,8 @@
+use rustc_lint::LateContext;
+
+use super::CHARS_NEXT_CMP;
+
+/// Checks for the `CHARS_NEXT_CMP` lint.
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, info: &crate::methods::BinaryExprInfo<'_>) -> bool {
+ crate::methods::chars_cmp::check(cx, info, &["chars", "next"], CHARS_NEXT_CMP, "starts_with")
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/chars_next_cmp_with_unwrap.rs b/src/tools/clippy/clippy_lints/src/methods/chars_next_cmp_with_unwrap.rs
new file mode 100644
index 000000000..28ede28e9
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/chars_next_cmp_with_unwrap.rs
@@ -0,0 +1,8 @@
+use rustc_lint::LateContext;
+
+use super::CHARS_NEXT_CMP;
+
+/// Checks for the `CHARS_NEXT_CMP` lint with `unwrap()`.
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, info: &crate::methods::BinaryExprInfo<'_>) -> bool {
+ crate::methods::chars_cmp_with_unwrap::check(cx, info, &["chars", "next", "unwrap"], CHARS_NEXT_CMP, "starts_with")
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs b/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs
new file mode 100644
index 000000000..0b38a0720
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs
@@ -0,0 +1,132 @@
+use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then};
+use clippy_utils::get_parent_node;
+use clippy_utils::source::snippet_with_context;
+use clippy_utils::sugg;
+use clippy_utils::ty::is_copy;
+use rustc_errors::Applicability;
+use rustc_hir::{BindingAnnotation, Expr, ExprKind, MatchSource, Node, PatKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, adjustment::Adjust};
+use rustc_span::symbol::{sym, Symbol};
+
+use super::CLONE_DOUBLE_REF;
+use super::CLONE_ON_COPY;
+
+/// Checks for the `CLONE_ON_COPY` lint.
+#[allow(clippy::too_many_lines)]
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, method_name: Symbol, args: &[Expr<'_>]) {
+ let arg = match args {
+ [arg] if method_name == sym::clone => arg,
+ _ => return,
+ };
+ if cx
+ .typeck_results()
+ .type_dependent_def_id(expr.hir_id)
+ .and_then(|id| cx.tcx.trait_of_item(id))
+ .zip(cx.tcx.lang_items().clone_trait())
+ .map_or(true, |(x, y)| x != y)
+ {
+ return;
+ }
+ let arg_adjustments = cx.typeck_results().expr_adjustments(arg);
+ let arg_ty = arg_adjustments
+ .last()
+ .map_or_else(|| cx.typeck_results().expr_ty(arg), |a| a.target);
+
+ let ty = cx.typeck_results().expr_ty(expr);
+ if let ty::Ref(_, inner, _) = arg_ty.kind() {
+ if let ty::Ref(_, innermost, _) = inner.kind() {
+ span_lint_and_then(
+ cx,
+ CLONE_DOUBLE_REF,
+ expr.span,
+ &format!(
+ "using `clone` on a double-reference; \
+ this will copy the reference of type `{}` instead of cloning the inner type",
+ ty
+ ),
+ |diag| {
+ if let Some(snip) = sugg::Sugg::hir_opt(cx, arg) {
+ let mut ty = innermost;
+ let mut n = 0;
+ while let ty::Ref(_, inner, _) = ty.kind() {
+ ty = inner;
+ n += 1;
+ }
+ let refs = "&".repeat(n + 1);
+ let derefs = "*".repeat(n);
+ let explicit = format!("<{}{}>::clone({})", refs, ty, snip);
+ diag.span_suggestion(
+ expr.span,
+ "try dereferencing it",
+ format!("{}({}{}).clone()", refs, derefs, snip.deref()),
+ Applicability::MaybeIncorrect,
+ );
+ diag.span_suggestion(
+ expr.span,
+ "or try being explicit if you are sure, that you want to clone a reference",
+ explicit,
+ Applicability::MaybeIncorrect,
+ );
+ }
+ },
+ );
+ return; // don't report clone_on_copy
+ }
+ }
+
+ if is_copy(cx, ty) {
+ let parent_is_suffix_expr = match get_parent_node(cx.tcx, expr.hir_id) {
+ Some(Node::Expr(parent)) => match parent.kind {
+ // &*x is a nop, &x.clone() is not
+ ExprKind::AddrOf(..) => return,
+ // (*x).func() is useless, x.clone().func() can work in case func borrows self
+ ExprKind::MethodCall(_, [self_arg, ..], _)
+ if expr.hir_id == self_arg.hir_id && ty != cx.typeck_results().expr_ty_adjusted(expr) =>
+ {
+ return;
+ },
+ ExprKind::MethodCall(_, [self_arg, ..], _) if expr.hir_id == self_arg.hir_id => true,
+ ExprKind::Match(_, _, MatchSource::TryDesugar | MatchSource::AwaitDesugar)
+ | ExprKind::Field(..)
+ | ExprKind::Index(..) => true,
+ _ => false,
+ },
+ // local binding capturing a reference
+ Some(Node::Local(l))
+ if matches!(
+ l.pat.kind,
+ PatKind::Binding(BindingAnnotation::Ref | BindingAnnotation::RefMut, ..)
+ ) =>
+ {
+ return;
+ },
+ _ => false,
+ };
+
+ let mut app = Applicability::MachineApplicable;
+ let snip = snippet_with_context(cx, arg.span, expr.span.ctxt(), "_", &mut app).0;
+
+ let deref_count = arg_adjustments
+ .iter()
+ .take_while(|adj| matches!(adj.kind, Adjust::Deref(_)))
+ .count();
+ let (help, sugg) = if deref_count == 0 {
+ ("try removing the `clone` call", snip.into())
+ } else if parent_is_suffix_expr {
+ ("try dereferencing it", format!("({}{})", "*".repeat(deref_count), snip))
+ } else {
+ ("try dereferencing it", format!("{}{}", "*".repeat(deref_count), snip))
+ };
+
+ span_lint_and_sugg(
+ cx,
+ CLONE_ON_COPY,
+ expr.span,
+ &format!("using `clone` on type `{}` which implements the `Copy` trait", ty),
+ help,
+ sugg,
+ app,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/clone_on_ref_ptr.rs b/src/tools/clippy/clippy_lints/src/methods/clone_on_ref_ptr.rs
new file mode 100644
index 000000000..6417bc813
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/clone_on_ref_ptr.rs
@@ -0,0 +1,43 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::paths;
+use clippy_utils::source::snippet_with_macro_callsite;
+use clippy_utils::ty::{is_type_diagnostic_item, match_type};
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::symbol::{sym, Symbol};
+
+use super::CLONE_ON_REF_PTR;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, method_name: Symbol, args: &[hir::Expr<'_>]) {
+ if !(args.len() == 1 && method_name == sym::clone) {
+ return;
+ }
+ let arg = &args[0];
+ let obj_ty = cx.typeck_results().expr_ty(arg).peel_refs();
+
+ if let ty::Adt(_, subst) = obj_ty.kind() {
+ let caller_type = if is_type_diagnostic_item(cx, obj_ty, sym::Rc) {
+ "Rc"
+ } else if is_type_diagnostic_item(cx, obj_ty, sym::Arc) {
+ "Arc"
+ } else if match_type(cx, obj_ty, &paths::WEAK_RC) || match_type(cx, obj_ty, &paths::WEAK_ARC) {
+ "Weak"
+ } else {
+ return;
+ };
+
+ let snippet = snippet_with_macro_callsite(cx, arg.span, "..");
+
+ span_lint_and_sugg(
+ cx,
+ CLONE_ON_REF_PTR,
+ expr.span,
+ "using `.clone()` on a ref-counted pointer",
+ "try this",
+ format!("{}::<{}>::clone(&{})", caller_type, subst.type_at(0), snippet),
+ Applicability::Unspecified, // Sometimes unnecessary ::<_> after Rc/Arc/Weak
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/cloned_instead_of_copied.rs b/src/tools/clippy/clippy_lints/src/methods/cloned_instead_of_copied.rs
new file mode 100644
index 000000000..e9aeab2d5
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/cloned_instead_of_copied.rs
@@ -0,0 +1,45 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::ty::{get_iterator_item_ty, is_copy};
+use clippy_utils::{is_trait_method, meets_msrv, msrvs};
+use rustc_errors::Applicability;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_semver::RustcVersion;
+use rustc_span::{sym, Span};
+
+use super::CLONED_INSTEAD_OF_COPIED;
+
+pub fn check(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, span: Span, msrv: Option<RustcVersion>) {
+ let recv_ty = cx.typeck_results().expr_ty_adjusted(recv);
+ let inner_ty = match recv_ty.kind() {
+ // `Option<T>` -> `T`
+ ty::Adt(adt, subst)
+ if cx.tcx.is_diagnostic_item(sym::Option, adt.did()) && meets_msrv(msrv, msrvs::OPTION_COPIED) =>
+ {
+ subst.type_at(0)
+ },
+ _ if is_trait_method(cx, expr, sym::Iterator) && meets_msrv(msrv, msrvs::ITERATOR_COPIED) => {
+ match get_iterator_item_ty(cx, recv_ty) {
+ // <T as Iterator>::Item
+ Some(ty) => ty,
+ _ => return,
+ }
+ },
+ _ => return,
+ };
+ match inner_ty.kind() {
+ // &T where T: Copy
+ ty::Ref(_, ty, _) if is_copy(cx, *ty) => {},
+ _ => return,
+ };
+ span_lint_and_sugg(
+ cx,
+ CLONED_INSTEAD_OF_COPIED,
+ span,
+ "used `cloned` where `copied` could be used instead",
+ "try",
+ "copied".into(),
+ Applicability::MachineApplicable,
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/err_expect.rs b/src/tools/clippy/clippy_lints/src/methods/err_expect.rs
new file mode 100644
index 000000000..570a1b873
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/err_expect.rs
@@ -0,0 +1,60 @@
+use super::ERR_EXPECT;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::ty::implements_trait;
+use clippy_utils::{meets_msrv, msrvs, ty::is_type_diagnostic_item};
+use rustc_errors::Applicability;
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_middle::ty::Ty;
+use rustc_semver::RustcVersion;
+use rustc_span::{sym, Span};
+
+pub(super) fn check(
+ cx: &LateContext<'_>,
+ _expr: &rustc_hir::Expr<'_>,
+ recv: &rustc_hir::Expr<'_>,
+ msrv: Option<RustcVersion>,
+ expect_span: Span,
+ err_span: Span,
+) {
+ if_chain! {
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv), sym::Result);
+ // Test the version to make sure the lint can be showed (expect_err has been
+ // introduced in rust 1.17.0 : https://github.com/rust-lang/rust/pull/38982)
+ if meets_msrv(msrv, msrvs::EXPECT_ERR);
+
+ // Grabs the `Result<T, E>` type
+ let result_type = cx.typeck_results().expr_ty(recv);
+ // Tests if the T type in a `Result<T, E>` is not None
+ if let Some(data_type) = get_data_type(cx, result_type);
+ // Tests if the T type in a `Result<T, E>` implements debug
+ if has_debug_impl(data_type, cx);
+
+ then {
+ span_lint_and_sugg(
+ cx,
+ ERR_EXPECT,
+ err_span.to(expect_span),
+ "called `.err().expect()` on a `Result` value",
+ "try",
+ "expect_err".to_string(),
+ Applicability::MachineApplicable
+ );
+ }
+ };
+}
+
+/// Given a `Result<T, E>` type, return its data (`T`).
+fn get_data_type<'a>(cx: &LateContext<'_>, ty: Ty<'a>) -> Option<Ty<'a>> {
+ match ty.kind() {
+ ty::Adt(_, substs) if is_type_diagnostic_item(cx, ty, sym::Result) => substs.types().next(),
+ _ => None,
+ }
+}
+
+/// Given a type, very if the Debug trait has been impl'd
+fn has_debug_impl<'tcx>(ty: Ty<'tcx>, cx: &LateContext<'tcx>) -> bool {
+ cx.tcx
+ .get_diagnostic_item(sym::Debug)
+ .map_or(false, |debug| implements_trait(cx, ty, debug, &[]))
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs b/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs
new file mode 100644
index 000000000..6f2307d8f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs
@@ -0,0 +1,173 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::macros::{root_macro_call_first_node, FormatArgsExpn};
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::is_type_diagnostic_item;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::source_map::Span;
+use rustc_span::symbol::sym;
+use std::borrow::Cow;
+
+use super::EXPECT_FUN_CALL;
+
+/// Checks for the `EXPECT_FUN_CALL` lint.
+#[allow(clippy::too_many_lines)]
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &hir::Expr<'_>,
+ method_span: Span,
+ name: &str,
+ args: &'tcx [hir::Expr<'tcx>],
+) {
+ // Strip `&`, `as_ref()` and `as_str()` off `arg` until we're left with either a `String` or
+ // `&str`
+ fn get_arg_root<'a>(cx: &LateContext<'_>, arg: &'a hir::Expr<'a>) -> &'a hir::Expr<'a> {
+ let mut arg_root = arg;
+ loop {
+ arg_root = match &arg_root.kind {
+ hir::ExprKind::AddrOf(hir::BorrowKind::Ref, _, expr) => expr,
+ hir::ExprKind::MethodCall(method_name, call_args, _) => {
+ if call_args.len() == 1
+ && (method_name.ident.name == sym::as_str || method_name.ident.name == sym::as_ref)
+ && {
+ let arg_type = cx.typeck_results().expr_ty(&call_args[0]);
+ let base_type = arg_type.peel_refs();
+ *base_type.kind() == ty::Str || is_type_diagnostic_item(cx, base_type, sym::String)
+ }
+ {
+ &call_args[0]
+ } else {
+ break;
+ }
+ },
+ _ => break,
+ };
+ }
+ arg_root
+ }
+
+ // Only `&'static str` or `String` can be used directly in the `panic!`. Other types should be
+ // converted to string.
+ fn requires_to_string(cx: &LateContext<'_>, arg: &hir::Expr<'_>) -> bool {
+ let arg_ty = cx.typeck_results().expr_ty(arg);
+ if is_type_diagnostic_item(cx, arg_ty, sym::String) {
+ return false;
+ }
+ if let ty::Ref(_, ty, ..) = arg_ty.kind() {
+ if *ty.kind() == ty::Str && can_be_static_str(cx, arg) {
+ return false;
+ }
+ };
+ true
+ }
+
+ // Check if an expression could have type `&'static str`, knowing that it
+ // has type `&str` for some lifetime.
+ fn can_be_static_str(cx: &LateContext<'_>, arg: &hir::Expr<'_>) -> bool {
+ match arg.kind {
+ hir::ExprKind::Lit(_) => true,
+ hir::ExprKind::Call(fun, _) => {
+ if let hir::ExprKind::Path(ref p) = fun.kind {
+ match cx.qpath_res(p, fun.hir_id) {
+ hir::def::Res::Def(hir::def::DefKind::Fn | hir::def::DefKind::AssocFn, def_id) => matches!(
+ cx.tcx.fn_sig(def_id).output().skip_binder().kind(),
+ ty::Ref(re, ..) if re.is_static(),
+ ),
+ _ => false,
+ }
+ } else {
+ false
+ }
+ },
+ hir::ExprKind::MethodCall(..) => {
+ cx.typeck_results()
+ .type_dependent_def_id(arg.hir_id)
+ .map_or(false, |method_id| {
+ matches!(
+ cx.tcx.fn_sig(method_id).output().skip_binder().kind(),
+ ty::Ref(re, ..) if re.is_static()
+ )
+ })
+ },
+ hir::ExprKind::Path(ref p) => matches!(
+ cx.qpath_res(p, arg.hir_id),
+ hir::def::Res::Def(hir::def::DefKind::Const | hir::def::DefKind::Static(_), _)
+ ),
+ _ => false,
+ }
+ }
+
+ fn is_call(node: &hir::ExprKind<'_>) -> bool {
+ match node {
+ hir::ExprKind::AddrOf(hir::BorrowKind::Ref, _, expr) => {
+ is_call(&expr.kind)
+ },
+ hir::ExprKind::Call(..)
+ | hir::ExprKind::MethodCall(..)
+ // These variants are debatable or require further examination
+ | hir::ExprKind::If(..)
+ | hir::ExprKind::Match(..)
+ | hir::ExprKind::Block{ .. } => true,
+ _ => false,
+ }
+ }
+
+ if args.len() != 2 || name != "expect" || !is_call(&args[1].kind) {
+ return;
+ }
+
+ let receiver_type = cx.typeck_results().expr_ty_adjusted(&args[0]);
+ let closure_args = if is_type_diagnostic_item(cx, receiver_type, sym::Option) {
+ "||"
+ } else if is_type_diagnostic_item(cx, receiver_type, sym::Result) {
+ "|_|"
+ } else {
+ return;
+ };
+
+ let arg_root = get_arg_root(cx, &args[1]);
+
+ let span_replace_word = method_span.with_hi(expr.span.hi());
+
+ let mut applicability = Applicability::MachineApplicable;
+
+ //Special handling for `format!` as arg_root
+ if let Some(macro_call) = root_macro_call_first_node(cx, arg_root) {
+ if !cx.tcx.is_diagnostic_item(sym::format_macro, macro_call.def_id) {
+ return;
+ }
+ let Some(format_args) = FormatArgsExpn::find_nested(cx, arg_root, macro_call.expn) else { return };
+ let span = format_args.inputs_span();
+ let sugg = snippet_with_applicability(cx, span, "..", &mut applicability);
+ span_lint_and_sugg(
+ cx,
+ EXPECT_FUN_CALL,
+ span_replace_word,
+ &format!("use of `{}` followed by a function call", name),
+ "try this",
+ format!("unwrap_or_else({} panic!({}))", closure_args, sugg),
+ applicability,
+ );
+ return;
+ }
+
+ let mut arg_root_snippet: Cow<'_, _> = snippet_with_applicability(cx, arg_root.span, "..", &mut applicability);
+ if requires_to_string(cx, arg_root) {
+ arg_root_snippet.to_mut().push_str(".to_string()");
+ }
+
+ span_lint_and_sugg(
+ cx,
+ EXPECT_FUN_CALL,
+ span_replace_word,
+ &format!("use of `{}` followed by a function call", name),
+ "try this",
+ format!(
+ "unwrap_or_else({} {{ panic!(\"{{}}\", {}) }})",
+ closure_args, arg_root_snippet
+ ),
+ applicability,
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/expect_used.rs b/src/tools/clippy/clippy_lints/src/methods/expect_used.rs
new file mode 100644
index 000000000..fbc3348f1
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/expect_used.rs
@@ -0,0 +1,36 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::is_in_test_function;
+use clippy_utils::ty::is_type_diagnostic_item;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::EXPECT_USED;
+
+/// lint use of `expect()` for `Option`s and `Result`s
+pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr<'_>, allow_expect_in_tests: bool) {
+ let obj_ty = cx.typeck_results().expr_ty(recv).peel_refs();
+
+ let mess = if is_type_diagnostic_item(cx, obj_ty, sym::Option) {
+ Some((EXPECT_USED, "an Option", "None"))
+ } else if is_type_diagnostic_item(cx, obj_ty, sym::Result) {
+ Some((EXPECT_USED, "a Result", "Err"))
+ } else {
+ None
+ };
+
+ if allow_expect_in_tests && is_in_test_function(cx.tcx, expr.hir_id) {
+ return;
+ }
+
+ if let Some((lint, kind, none_value)) = mess {
+ span_lint_and_help(
+ cx,
+ lint,
+ expr.span,
+ &format!("used `expect()` on `{}` value", kind,),
+ None,
+ &format!("if this value is an `{}`, it will panic", none_value,),
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/extend_with_drain.rs b/src/tools/clippy/clippy_lints/src/methods/extend_with_drain.rs
new file mode 100644
index 000000000..a15fe6094
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/extend_with_drain.rs
@@ -0,0 +1,45 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::{is_type_diagnostic_item, is_type_lang_item};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind, LangItem};
+use rustc_lint::LateContext;
+use rustc_span::symbol::sym;
+
+use super::EXTEND_WITH_DRAIN;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, arg: &Expr<'_>) {
+ let ty = cx.typeck_results().expr_ty(recv).peel_refs();
+ if_chain! {
+ if is_type_diagnostic_item(cx, ty, sym::Vec);
+ //check source object
+ if let ExprKind::MethodCall(src_method, [drain_vec, drain_arg], _) = &arg.kind;
+ if src_method.ident.as_str() == "drain";
+ let src_ty = cx.typeck_results().expr_ty(drain_vec);
+ //check if actual src type is mutable for code suggestion
+ let immutable = src_ty.is_mutable_ptr();
+ let src_ty = src_ty.peel_refs();
+ if is_type_diagnostic_item(cx, src_ty, sym::Vec);
+ //check drain range
+ if let src_ty_range = cx.typeck_results().expr_ty(drain_arg).peel_refs();
+ if is_type_lang_item(cx, src_ty_range, LangItem::RangeFull);
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ EXTEND_WITH_DRAIN,
+ expr.span,
+ "use of `extend` instead of `append` for adding the full range of a second vector",
+ "try this",
+ format!(
+ "{}.append({}{})",
+ snippet_with_applicability(cx, recv.span, "..", &mut applicability),
+ if immutable { "" } else { "&mut " },
+ snippet_with_applicability(cx, drain_vec.span, "..", &mut applicability)
+ ),
+ applicability,
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/filetype_is_file.rs b/src/tools/clippy/clippy_lints/src/methods/filetype_is_file.rs
new file mode 100644
index 000000000..7b2967feb
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/filetype_is_file.rs
@@ -0,0 +1,41 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::ty::match_type;
+use clippy_utils::{get_parent_expr, paths};
+use if_chain::if_chain;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::source_map::Span;
+
+use super::FILETYPE_IS_FILE;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr<'_>) {
+ let ty = cx.typeck_results().expr_ty(recv);
+
+ if !match_type(cx, ty, &paths::FILE_TYPE) {
+ return;
+ }
+
+ let span: Span;
+ let verb: &str;
+ let lint_unary: &str;
+ let help_unary: &str;
+ if_chain! {
+ if let Some(parent) = get_parent_expr(cx, expr);
+ if let hir::ExprKind::Unary(op, _) = parent.kind;
+ if op == hir::UnOp::Not;
+ then {
+ lint_unary = "!";
+ verb = "denies";
+ help_unary = "";
+ span = parent.span;
+ } else {
+ lint_unary = "";
+ verb = "covers";
+ help_unary = "!";
+ span = expr.span;
+ }
+ }
+ let lint_msg = format!("`{}FileType::is_file()` only {} regular files", lint_unary, verb);
+ let help_msg = format!("use `{}FileType::is_dir()` instead", help_unary);
+ span_lint_and_help(cx, FILETYPE_IS_FILE, span, &lint_msg, None, &help_msg);
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_map.rs b/src/tools/clippy/clippy_lints/src/methods/filter_map.rs
new file mode 100644
index 000000000..692e22a7c
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/filter_map.rs
@@ -0,0 +1,197 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::{indent_of, reindent_multiline, snippet};
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{is_trait_method, path_to_local_id, peel_blocks, SpanlessEq};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_hir::def::Res;
+use rustc_hir::{Closure, Expr, ExprKind, PatKind, PathSegment, QPath, UnOp};
+use rustc_lint::LateContext;
+use rustc_middle::ty::adjustment::Adjust;
+use rustc_span::source_map::Span;
+use rustc_span::symbol::{sym, Symbol};
+use std::borrow::Cow;
+
+use super::MANUAL_FILTER_MAP;
+use super::MANUAL_FIND_MAP;
+use super::OPTION_FILTER_MAP;
+
+fn is_method<'tcx>(cx: &LateContext<'tcx>, expr: &hir::Expr<'_>, method_name: Symbol) -> bool {
+ match &expr.kind {
+ hir::ExprKind::Path(QPath::TypeRelative(_, mname)) => mname.ident.name == method_name,
+ hir::ExprKind::Path(QPath::Resolved(_, segments)) => {
+ segments.segments.last().unwrap().ident.name == method_name
+ },
+ hir::ExprKind::Closure(&hir::Closure { body, .. }) => {
+ let body = cx.tcx.hir().body(body);
+ let closure_expr = peel_blocks(&body.value);
+ let arg_id = body.params[0].pat.hir_id;
+ match closure_expr.kind {
+ hir::ExprKind::MethodCall(hir::PathSegment { ident, .. }, args, _) => {
+ if_chain! {
+ if ident.name == method_name;
+ if let hir::ExprKind::Path(path) = &args[0].kind;
+ if let Res::Local(ref local) = cx.qpath_res(path, args[0].hir_id);
+ then {
+ return arg_id == *local
+ }
+ }
+ false
+ },
+ _ => false,
+ }
+ },
+ _ => false,
+ }
+}
+
+fn is_option_filter_map<'tcx>(cx: &LateContext<'tcx>, filter_arg: &hir::Expr<'_>, map_arg: &hir::Expr<'_>) -> bool {
+ is_method(cx, map_arg, sym::unwrap) && is_method(cx, filter_arg, sym!(is_some))
+}
+
+/// is `filter(|x| x.is_some()).map(|x| x.unwrap())`
+fn is_filter_some_map_unwrap(
+ cx: &LateContext<'_>,
+ expr: &hir::Expr<'_>,
+ filter_recv: &hir::Expr<'_>,
+ filter_arg: &hir::Expr<'_>,
+ map_arg: &hir::Expr<'_>,
+) -> bool {
+ let iterator = is_trait_method(cx, expr, sym::Iterator);
+ let option = is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(filter_recv), sym::Option);
+
+ (iterator || option) && is_option_filter_map(cx, filter_arg, map_arg)
+}
+
+/// lint use of `filter().map()` or `find().map()` for `Iterators`
+#[allow(clippy::too_many_arguments)]
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &hir::Expr<'_>,
+ filter_recv: &hir::Expr<'_>,
+ filter_arg: &hir::Expr<'_>,
+ filter_span: Span,
+ map_recv: &hir::Expr<'_>,
+ map_arg: &hir::Expr<'_>,
+ map_span: Span,
+ is_find: bool,
+) {
+ if is_filter_some_map_unwrap(cx, expr, filter_recv, filter_arg, map_arg) {
+ span_lint_and_sugg(
+ cx,
+ OPTION_FILTER_MAP,
+ filter_span.with_hi(expr.span.hi()),
+ "`filter` for `Some` followed by `unwrap`",
+ "consider using `flatten` instead",
+ reindent_multiline(Cow::Borrowed("flatten()"), true, indent_of(cx, map_span)).into_owned(),
+ Applicability::MachineApplicable,
+ );
+
+ return;
+ }
+
+ if_chain! {
+ if is_trait_method(cx, map_recv, sym::Iterator);
+
+ // filter(|x| ...is_some())...
+ if let ExprKind::Closure(&Closure { body: filter_body_id, .. }) = filter_arg.kind;
+ let filter_body = cx.tcx.hir().body(filter_body_id);
+ if let [filter_param] = filter_body.params;
+ // optional ref pattern: `filter(|&x| ..)`
+ let (filter_pat, is_filter_param_ref) = if let PatKind::Ref(ref_pat, _) = filter_param.pat.kind {
+ (ref_pat, true)
+ } else {
+ (filter_param.pat, false)
+ };
+ // closure ends with is_some() or is_ok()
+ if let PatKind::Binding(_, filter_param_id, _, None) = filter_pat.kind;
+ if let ExprKind::MethodCall(path, [filter_arg], _) = filter_body.value.kind;
+ if let Some(opt_ty) = cx.typeck_results().expr_ty(filter_arg).peel_refs().ty_adt_def();
+ if let Some(is_result) = if cx.tcx.is_diagnostic_item(sym::Option, opt_ty.did()) {
+ Some(false)
+ } else if cx.tcx.is_diagnostic_item(sym::Result, opt_ty.did()) {
+ Some(true)
+ } else {
+ None
+ };
+ if path.ident.name.as_str() == if is_result { "is_ok" } else { "is_some" };
+
+ // ...map(|x| ...unwrap())
+ if let ExprKind::Closure(&Closure { body: map_body_id, .. }) = map_arg.kind;
+ let map_body = cx.tcx.hir().body(map_body_id);
+ if let [map_param] = map_body.params;
+ if let PatKind::Binding(_, map_param_id, map_param_ident, None) = map_param.pat.kind;
+ // closure ends with expect() or unwrap()
+ if let ExprKind::MethodCall(seg, [map_arg, ..], _) = map_body.value.kind;
+ if matches!(seg.ident.name, sym::expect | sym::unwrap | sym::unwrap_or);
+
+ // .filter(..).map(|y| f(y).copied().unwrap())
+ // ~~~~
+ let map_arg_peeled = match map_arg.kind {
+ ExprKind::MethodCall(method, [original_arg], _) if acceptable_methods(method) => {
+ original_arg
+ },
+ _ => map_arg,
+ };
+
+ // .filter(|x| x.is_some()).map(|y| y[.acceptable_method()].unwrap())
+ let simple_equal = path_to_local_id(filter_arg, filter_param_id)
+ && path_to_local_id(map_arg_peeled, map_param_id);
+
+ let eq_fallback = |a: &Expr<'_>, b: &Expr<'_>| {
+ // in `filter(|x| ..)`, replace `*x` with `x`
+ let a_path = if_chain! {
+ if !is_filter_param_ref;
+ if let ExprKind::Unary(UnOp::Deref, expr_path) = a.kind;
+ then { expr_path } else { a }
+ };
+ // let the filter closure arg and the map closure arg be equal
+ path_to_local_id(a_path, filter_param_id)
+ && path_to_local_id(b, map_param_id)
+ && cx.typeck_results().expr_ty_adjusted(a) == cx.typeck_results().expr_ty_adjusted(b)
+ };
+
+ if simple_equal || SpanlessEq::new(cx).expr_fallback(eq_fallback).eq_expr(filter_arg, map_arg_peeled);
+ then {
+ let span = filter_span.with_hi(expr.span.hi());
+ let (filter_name, lint) = if is_find {
+ ("find", MANUAL_FIND_MAP)
+ } else {
+ ("filter", MANUAL_FILTER_MAP)
+ };
+ let msg = format!("`{filter_name}(..).map(..)` can be simplified as `{filter_name}_map(..)`");
+ let (to_opt, deref) = if is_result {
+ (".ok()", String::new())
+ } else {
+ let derefs = cx.typeck_results()
+ .expr_adjustments(map_arg)
+ .iter()
+ .filter(|adj| matches!(adj.kind, Adjust::Deref(_)))
+ .count();
+
+ ("", "*".repeat(derefs))
+ };
+ let sugg = format!(
+ "{filter_name}_map(|{map_param_ident}| {deref}{}{to_opt})",
+ snippet(cx, map_arg.span, ".."),
+ );
+ span_lint_and_sugg(cx, lint, span, &msg, "try", sugg, Applicability::MachineApplicable);
+ }
+ }
+}
+
+fn acceptable_methods(method: &PathSegment<'_>) -> bool {
+ let methods: [Symbol; 8] = [
+ sym::clone,
+ sym::as_ref,
+ sym!(copied),
+ sym!(cloned),
+ sym!(as_deref),
+ sym!(as_mut),
+ sym!(as_deref_mut),
+ sym!(to_owned),
+ ];
+
+ methods.contains(&method.ident.name)
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_map_identity.rs b/src/tools/clippy/clippy_lints/src/methods/filter_map_identity.rs
new file mode 100644
index 000000000..d1b5e945d
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/filter_map_identity.rs
@@ -0,0 +1,22 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::{is_expr_identity_function, is_trait_method};
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::{source_map::Span, sym};
+
+use super::FILTER_MAP_IDENTITY;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, filter_map_arg: &hir::Expr<'_>, filter_map_span: Span) {
+ if is_trait_method(cx, expr, sym::Iterator) && is_expr_identity_function(cx, filter_map_arg) {
+ span_lint_and_sugg(
+ cx,
+ FILTER_MAP_IDENTITY,
+ filter_map_span.with_hi(expr.span.hi()),
+ "use of `filter_map` with an identity function",
+ "try",
+ "flatten()".to_string(),
+ Applicability::MachineApplicable,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_map_next.rs b/src/tools/clippy/clippy_lints/src/methods/filter_map_next.rs
new file mode 100644
index 000000000..38ec4d8e3
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/filter_map_next.rs
@@ -0,0 +1,42 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg};
+use clippy_utils::source::snippet;
+use clippy_utils::{is_trait_method, meets_msrv, msrvs};
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_semver::RustcVersion;
+use rustc_span::sym;
+
+use super::FILTER_MAP_NEXT;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx hir::Expr<'_>,
+ recv: &'tcx hir::Expr<'_>,
+ arg: &'tcx hir::Expr<'_>,
+ msrv: Option<RustcVersion>,
+) {
+ if is_trait_method(cx, expr, sym::Iterator) {
+ if !meets_msrv(msrv, msrvs::ITERATOR_FIND_MAP) {
+ return;
+ }
+
+ let msg = "called `filter_map(..).next()` on an `Iterator`. This is more succinctly expressed by calling \
+ `.find_map(..)` instead";
+ let filter_snippet = snippet(cx, arg.span, "..");
+ if filter_snippet.lines().count() <= 1 {
+ let iter_snippet = snippet(cx, recv.span, "..");
+ span_lint_and_sugg(
+ cx,
+ FILTER_MAP_NEXT,
+ expr.span,
+ msg,
+ "try this",
+ format!("{}.find_map({})", iter_snippet, filter_snippet),
+ Applicability::MachineApplicable,
+ );
+ } else {
+ span_lint(cx, FILTER_MAP_NEXT, expr.span, msg);
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_next.rs b/src/tools/clippy/clippy_lints/src/methods/filter_next.rs
new file mode 100644
index 000000000..bcf8d93b6
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/filter_next.rs
@@ -0,0 +1,42 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg};
+use clippy_utils::source::snippet;
+use clippy_utils::ty::implements_trait;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::FILTER_NEXT;
+
+/// lint use of `filter().next()` for `Iterators`
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx hir::Expr<'_>,
+ recv: &'tcx hir::Expr<'_>,
+ filter_arg: &'tcx hir::Expr<'_>,
+) {
+ // lint if caller of `.filter().next()` is an Iterator
+ let recv_impls_iterator = cx.tcx.get_diagnostic_item(sym::Iterator).map_or(false, |id| {
+ implements_trait(cx, cx.typeck_results().expr_ty(recv), id, &[])
+ });
+ if recv_impls_iterator {
+ let msg = "called `filter(..).next()` on an `Iterator`. This is more succinctly expressed by calling \
+ `.find(..)` instead";
+ let filter_snippet = snippet(cx, filter_arg.span, "..");
+ if filter_snippet.lines().count() <= 1 {
+ let iter_snippet = snippet(cx, recv.span, "..");
+ // add note if not multi-line
+ span_lint_and_sugg(
+ cx,
+ FILTER_NEXT,
+ expr.span,
+ msg,
+ "try this",
+ format!("{}.find({})", iter_snippet, filter_snippet),
+ Applicability::MachineApplicable,
+ );
+ } else {
+ span_lint(cx, FILTER_NEXT, expr.span, msg);
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/flat_map_identity.rs b/src/tools/clippy/clippy_lints/src/methods/flat_map_identity.rs
new file mode 100644
index 000000000..6f911d79d
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/flat_map_identity.rs
@@ -0,0 +1,28 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::{is_expr_identity_function, is_trait_method};
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::{source_map::Span, sym};
+
+use super::FLAT_MAP_IDENTITY;
+
+/// lint use of `flat_map` for `Iterators` where `flatten` would be sufficient
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx hir::Expr<'_>,
+ flat_map_arg: &'tcx hir::Expr<'_>,
+ flat_map_span: Span,
+) {
+ if is_trait_method(cx, expr, sym::Iterator) && is_expr_identity_function(cx, flat_map_arg) {
+ span_lint_and_sugg(
+ cx,
+ FLAT_MAP_IDENTITY,
+ flat_map_span.with_hi(expr.span.hi()),
+ "use of `flat_map` with an identity function",
+ "try",
+ "flatten()".to_string(),
+ Applicability::MachineApplicable,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/flat_map_option.rs b/src/tools/clippy/clippy_lints/src/methods/flat_map_option.rs
new file mode 100644
index 000000000..615bde941
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/flat_map_option.rs
@@ -0,0 +1,34 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::is_trait_method;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::{source_map::Span, sym};
+
+use super::FLAT_MAP_OPTION;
+use clippy_utils::ty::is_type_diagnostic_item;
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>, arg: &'tcx hir::Expr<'_>, span: Span) {
+ if !is_trait_method(cx, expr, sym::Iterator) {
+ return;
+ }
+ let arg_ty = cx.typeck_results().expr_ty_adjusted(arg);
+ let sig = match arg_ty.kind() {
+ ty::Closure(_, substs) => substs.as_closure().sig(),
+ _ if arg_ty.is_fn() => arg_ty.fn_sig(cx.tcx),
+ _ => return,
+ };
+ if !is_type_diagnostic_item(cx, sig.output().skip_binder(), sym::Option) {
+ return;
+ }
+ span_lint_and_sugg(
+ cx,
+ FLAT_MAP_OPTION,
+ span,
+ "used `flat_map` where `filter_map` could be used instead",
+ "try",
+ "filter_map".into(),
+ Applicability::MachineApplicable,
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/from_iter_instead_of_collect.rs b/src/tools/clippy/clippy_lints/src/methods/from_iter_instead_of_collect.rs
new file mode 100644
index 000000000..6436e28a6
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/from_iter_instead_of_collect.rs
@@ -0,0 +1,83 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::implements_trait;
+use clippy_utils::{is_expr_path_def_path, paths, sugg};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_middle::ty::Ty;
+use rustc_span::sym;
+
+use super::FROM_ITER_INSTEAD_OF_COLLECT;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, args: &[hir::Expr<'_>], func: &hir::Expr<'_>) {
+ if_chain! {
+ if is_expr_path_def_path(cx, func, &paths::FROM_ITERATOR_METHOD);
+ let ty = cx.typeck_results().expr_ty(expr);
+ let arg_ty = cx.typeck_results().expr_ty(&args[0]);
+ if let Some(iter_id) = cx.tcx.get_diagnostic_item(sym::Iterator);
+
+ if implements_trait(cx, arg_ty, iter_id, &[]);
+ then {
+ // `expr` implements `FromIterator` trait
+ let iter_expr = sugg::Sugg::hir(cx, &args[0], "..").maybe_par();
+ let turbofish = extract_turbofish(cx, expr, ty);
+ let sugg = format!("{}.collect::<{}>()", iter_expr, turbofish);
+ span_lint_and_sugg(
+ cx,
+ FROM_ITER_INSTEAD_OF_COLLECT,
+ expr.span,
+ "usage of `FromIterator::from_iter`",
+ "use `.collect()` instead of `::from_iter()`",
+ sugg,
+ Applicability::MaybeIncorrect,
+ );
+ }
+ }
+}
+
+fn extract_turbofish(cx: &LateContext<'_>, expr: &hir::Expr<'_>, ty: Ty<'_>) -> String {
+ fn strip_angle_brackets(s: &str) -> Option<&str> {
+ s.strip_prefix('<')?.strip_suffix('>')
+ }
+
+ let call_site = expr.span.source_callsite();
+ if_chain! {
+ if let Some(snippet) = snippet_opt(cx, call_site);
+ let snippet_split = snippet.split("::").collect::<Vec<_>>();
+ if let Some((_, elements)) = snippet_split.split_last();
+
+ then {
+ if_chain! {
+ if let [type_specifier, _] = snippet_split.as_slice();
+ if let Some(type_specifier) = strip_angle_brackets(type_specifier);
+ if let Some((type_specifier, ..)) = type_specifier.split_once(" as ");
+ then {
+ type_specifier.to_string()
+ } else {
+ // is there a type specifier? (i.e.: like `<u32>` in `collections::BTreeSet::<u32>::`)
+ if let Some(type_specifier) = snippet_split.iter().find(|e| strip_angle_brackets(e).is_some()) {
+ // remove the type specifier from the path elements
+ let without_ts = elements.iter().filter_map(|e| {
+ if e == type_specifier { None } else { Some((*e).to_string()) }
+ }).collect::<Vec<_>>();
+ // join and add the type specifier at the end (i.e.: `collections::BTreeSet<u32>`)
+ format!("{}{}", without_ts.join("::"), type_specifier)
+ } else {
+ // type is not explicitly specified so wildcards are needed
+ // i.e.: 2 wildcards in `std::collections::BTreeMap<&i32, &char>`
+ let ty_str = ty.to_string();
+ let start = ty_str.find('<').unwrap_or(0);
+ let end = ty_str.find('>').unwrap_or(ty_str.len());
+ let nb_wildcard = ty_str[start..end].split(',').count();
+ let wildcards = format!("_{}", ", _".repeat(nb_wildcard - 1));
+ format!("{}<{}>", elements.join("::"), wildcards)
+ }
+ }
+ }
+ } else {
+ ty.to_string()
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/get_last_with_len.rs b/src/tools/clippy/clippy_lints/src/methods/get_last_with_len.rs
new file mode 100644
index 000000000..23368238e
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/get_last_with_len.rs
@@ -0,0 +1,55 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::SpanlessEq;
+use rustc_ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::{BinOpKind, Expr, ExprKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::source_map::Spanned;
+use rustc_span::sym;
+
+use super::GET_LAST_WITH_LEN;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, arg: &Expr<'_>) {
+ // Argument to "get" is a subtraction
+ if let ExprKind::Binary(
+ Spanned {
+ node: BinOpKind::Sub, ..
+ },
+ lhs,
+ rhs,
+ ) = arg.kind
+
+ // LHS of subtraction is "x.len()"
+ && let ExprKind::MethodCall(lhs_path, [lhs_recv], _) = &lhs.kind
+ && lhs_path.ident.name == sym::len
+
+ // RHS of subtraction is 1
+ && let ExprKind::Lit(rhs_lit) = &rhs.kind
+ && let LitKind::Int(1, ..) = rhs_lit.node
+
+ // check that recv == lhs_recv `recv.get(lhs_recv.len() - 1)`
+ && SpanlessEq::new(cx).eq_expr(recv, lhs_recv)
+ && !recv.can_have_side_effects()
+ {
+ let method = match cx.typeck_results().expr_ty_adjusted(recv).peel_refs().kind() {
+ ty::Adt(def, _) if cx.tcx.is_diagnostic_item(sym::VecDeque, def.did()) => "back",
+ ty::Slice(_) => "last",
+ _ => return,
+ };
+
+ let mut applicability = Applicability::MachineApplicable;
+ let recv_snippet = snippet_with_applicability(cx, recv.span, "_", &mut applicability);
+
+ span_lint_and_sugg(
+ cx,
+ GET_LAST_WITH_LEN,
+ expr.span,
+ &format!("accessing last element with `{recv_snippet}.get({recv_snippet}.len() - 1)`"),
+ "try",
+ format!("{recv_snippet}.{method}()"),
+ applicability,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/get_unwrap.rs b/src/tools/clippy/clippy_lints/src/methods/get_unwrap.rs
new file mode 100644
index 000000000..18e08d6ee
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/get_unwrap.rs
@@ -0,0 +1,87 @@
+use super::utils::derefs_to_slice;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::get_parent_expr;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::is_type_diagnostic_item;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::GET_UNWRAP;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &hir::Expr<'_>,
+ recv: &'tcx hir::Expr<'tcx>,
+ get_arg: &'tcx hir::Expr<'_>,
+ is_mut: bool,
+) {
+ // Note: we don't want to lint `get_mut().unwrap` for `HashMap` or `BTreeMap`,
+ // because they do not implement `IndexMut`
+ let mut applicability = Applicability::MachineApplicable;
+ let expr_ty = cx.typeck_results().expr_ty(recv);
+ let get_args_str = snippet_with_applicability(cx, get_arg.span, "..", &mut applicability);
+ let mut needs_ref;
+ let caller_type = if derefs_to_slice(cx, recv, expr_ty).is_some() {
+ needs_ref = get_args_str.parse::<usize>().is_ok();
+ "slice"
+ } else if is_type_diagnostic_item(cx, expr_ty, sym::Vec) {
+ needs_ref = get_args_str.parse::<usize>().is_ok();
+ "Vec"
+ } else if is_type_diagnostic_item(cx, expr_ty, sym::VecDeque) {
+ needs_ref = get_args_str.parse::<usize>().is_ok();
+ "VecDeque"
+ } else if !is_mut && is_type_diagnostic_item(cx, expr_ty, sym::HashMap) {
+ needs_ref = true;
+ "HashMap"
+ } else if !is_mut && is_type_diagnostic_item(cx, expr_ty, sym::BTreeMap) {
+ needs_ref = true;
+ "BTreeMap"
+ } else {
+ return; // caller is not a type that we want to lint
+ };
+
+ let mut span = expr.span;
+
+ // Handle the case where the result is immediately dereferenced
+ // by not requiring ref and pulling the dereference into the
+ // suggestion.
+ if_chain! {
+ if needs_ref;
+ if let Some(parent) = get_parent_expr(cx, expr);
+ if let hir::ExprKind::Unary(hir::UnOp::Deref, _) = parent.kind;
+ then {
+ needs_ref = false;
+ span = parent.span;
+ }
+ }
+
+ let mut_str = if is_mut { "_mut" } else { "" };
+ let borrow_str = if !needs_ref {
+ ""
+ } else if is_mut {
+ "&mut "
+ } else {
+ "&"
+ };
+
+ span_lint_and_sugg(
+ cx,
+ GET_UNWRAP,
+ span,
+ &format!(
+ "called `.get{0}().unwrap()` on a {1}. Using `[]` is more clear and more concise",
+ mut_str, caller_type
+ ),
+ "try this",
+ format!(
+ "{}{}[{}]",
+ borrow_str,
+ snippet_with_applicability(cx, recv.span, "..", &mut applicability),
+ get_args_str
+ ),
+ applicability,
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs b/src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs
new file mode 100644
index 000000000..9651a52be
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs
@@ -0,0 +1,58 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_context;
+use clippy_utils::ty::peel_mid_ty_refs;
+use clippy_utils::{is_diag_item_method, is_diag_trait_item};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::IMPLICIT_CLONE;
+
+pub fn check(cx: &LateContext<'_>, method_name: &str, expr: &hir::Expr<'_>, recv: &hir::Expr<'_>) {
+ if_chain! {
+ if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
+ if is_clone_like(cx, method_name, method_def_id);
+ let return_type = cx.typeck_results().expr_ty(expr);
+ let input_type = cx.typeck_results().expr_ty(recv);
+ let (input_type, ref_count) = peel_mid_ty_refs(input_type);
+ if let Some(ty_name) = input_type.ty_adt_def().map(|adt_def| cx.tcx.item_name(adt_def.did()));
+ if return_type == input_type;
+ then {
+ let mut app = Applicability::MachineApplicable;
+ let recv_snip = snippet_with_context(cx, recv.span, expr.span.ctxt(), "..", &mut app).0;
+ span_lint_and_sugg(
+ cx,
+ IMPLICIT_CLONE,
+ expr.span,
+ &format!("implicitly cloning a `{}` by calling `{}` on its dereferenced type", ty_name, method_name),
+ "consider using",
+ if ref_count > 1 {
+ format!("({}{}).clone()", "*".repeat(ref_count - 1), recv_snip)
+ } else {
+ format!("{}.clone()", recv_snip)
+ },
+ app,
+ );
+ }
+ }
+}
+
+/// Returns true if the named method can be used to clone the receiver.
+/// Note that `to_string` is not flagged by `implicit_clone`. So other lints that call
+/// `is_clone_like` and that do flag `to_string` must handle it separately. See, e.g.,
+/// `is_to_owned_like` in `unnecessary_to_owned.rs`.
+pub fn is_clone_like(cx: &LateContext<'_>, method_name: &str, method_def_id: hir::def_id::DefId) -> bool {
+ match method_name {
+ "to_os_string" => is_diag_item_method(cx, method_def_id, sym::OsStr),
+ "to_owned" => is_diag_trait_item(cx, method_def_id, sym::ToOwned),
+ "to_path_buf" => is_diag_item_method(cx, method_def_id, sym::Path),
+ "to_vec" => cx
+ .tcx
+ .impl_of_method(method_def_id)
+ .filter(|&impl_did| cx.tcx.type_of(impl_did).is_slice() && cx.tcx.impl_trait_ref(impl_did).is_none())
+ .is_some(),
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/inefficient_to_string.rs b/src/tools/clippy/clippy_lints/src/methods/inefficient_to_string.rs
new file mode 100644
index 000000000..f52170df6
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/inefficient_to_string.rs
@@ -0,0 +1,67 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::{is_type_diagnostic_item, walk_ptrs_ty_depth};
+use clippy_utils::{match_def_path, paths};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+use rustc_span::symbol::{sym, Symbol};
+
+use super::INEFFICIENT_TO_STRING;
+
+/// Checks for the `INEFFICIENT_TO_STRING` lint
+pub fn check<'tcx>(cx: &LateContext<'tcx>, expr: &hir::Expr<'_>, method_name: Symbol, args: &[hir::Expr<'_>]) {
+ if_chain! {
+ if args.len() == 1 && method_name == sym::to_string;
+ if let Some(to_string_meth_did) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
+ if match_def_path(cx, to_string_meth_did, &paths::TO_STRING_METHOD);
+ if let Some(substs) = cx.typeck_results().node_substs_opt(expr.hir_id);
+ let arg_ty = cx.typeck_results().expr_ty_adjusted(&args[0]);
+ let self_ty = substs.type_at(0);
+ let (deref_self_ty, deref_count) = walk_ptrs_ty_depth(self_ty);
+ if deref_count >= 1;
+ if specializes_tostring(cx, deref_self_ty);
+ then {
+ span_lint_and_then(
+ cx,
+ INEFFICIENT_TO_STRING,
+ expr.span,
+ &format!("calling `to_string` on `{}`", arg_ty),
+ |diag| {
+ diag.help(&format!(
+ "`{}` implements `ToString` through a slower blanket impl, but `{}` has a fast specialization of `ToString`",
+ self_ty, deref_self_ty
+ ));
+ let mut applicability = Applicability::MachineApplicable;
+ let arg_snippet = snippet_with_applicability(cx, args[0].span, "..", &mut applicability);
+ diag.span_suggestion(
+ expr.span,
+ "try dereferencing the receiver",
+ format!("({}{}).to_string()", "*".repeat(deref_count), arg_snippet),
+ applicability,
+ );
+ },
+ );
+ }
+ }
+}
+
+/// Returns whether `ty` specializes `ToString`.
+/// Currently, these are `str`, `String`, and `Cow<'_, str>`.
+fn specializes_tostring(cx: &LateContext<'_>, ty: Ty<'_>) -> bool {
+ if let ty::Str = ty.kind() {
+ return true;
+ }
+
+ if is_type_diagnostic_item(cx, ty, sym::String) {
+ return true;
+ }
+
+ if let ty::Adt(adt, substs) = ty.kind() {
+ match_def_path(cx, adt.did(), &paths::COW) && substs.type_at(1).is_str()
+ } else {
+ false
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/inspect_for_each.rs b/src/tools/clippy/clippy_lints/src/methods/inspect_for_each.rs
new file mode 100644
index 000000000..7fd3ef1a6
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/inspect_for_each.rs
@@ -0,0 +1,23 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::is_trait_method;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::{source_map::Span, sym};
+
+use super::INSPECT_FOR_EACH;
+
+/// lint use of `inspect().for_each()` for `Iterators`
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>, inspect_span: Span) {
+ if is_trait_method(cx, expr, sym::Iterator) {
+ let msg = "called `inspect(..).for_each(..)` on an `Iterator`";
+ let hint = "move the code from `inspect(..)` to `for_each(..)` and remove the `inspect(..)`";
+ span_lint_and_help(
+ cx,
+ INSPECT_FOR_EACH,
+ inspect_span.with_hi(expr.span.hi()),
+ msg,
+ None,
+ hint,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/into_iter_on_ref.rs b/src/tools/clippy/clippy_lints/src/methods/into_iter_on_ref.rs
new file mode 100644
index 000000000..da13b4ba3
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/into_iter_on_ref.rs
@@ -0,0 +1,56 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::is_trait_method;
+use clippy_utils::ty::has_iter_method;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+use rustc_span::source_map::Span;
+use rustc_span::symbol::{sym, Symbol};
+
+use super::INTO_ITER_ON_REF;
+
+pub(super) fn check(
+ cx: &LateContext<'_>,
+ expr: &hir::Expr<'_>,
+ method_span: Span,
+ method_name: Symbol,
+ args: &[hir::Expr<'_>],
+) {
+ let self_ty = cx.typeck_results().expr_ty_adjusted(&args[0]);
+ if_chain! {
+ if let ty::Ref(..) = self_ty.kind();
+ if method_name == sym::into_iter;
+ if is_trait_method(cx, expr, sym::IntoIterator);
+ if let Some((kind, method_name)) = ty_has_iter_method(cx, self_ty);
+ then {
+ span_lint_and_sugg(
+ cx,
+ INTO_ITER_ON_REF,
+ method_span,
+ &format!(
+ "this `.into_iter()` call is equivalent to `.{}()` and will not consume the `{}`",
+ method_name, kind,
+ ),
+ "call directly",
+ method_name.to_string(),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+}
+
+fn ty_has_iter_method(cx: &LateContext<'_>, self_ref_ty: Ty<'_>) -> Option<(Symbol, &'static str)> {
+ has_iter_method(cx, self_ref_ty).map(|ty_name| {
+ let mutbl = match self_ref_ty.kind() {
+ ty::Ref(_, _, mutbl) => mutbl,
+ _ => unreachable!(),
+ };
+ let method_name = match mutbl {
+ hir::Mutability::Not => "iter",
+ hir::Mutability::Mut => "iter_mut",
+ };
+ (ty_name, method_name)
+ })
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/is_digit_ascii_radix.rs b/src/tools/clippy/clippy_lints/src/methods/is_digit_ascii_radix.rs
new file mode 100644
index 000000000..aa176dcc8
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/is_digit_ascii_radix.rs
@@ -0,0 +1,50 @@
+//! Lint for `c.is_digit(10)`
+
+use super::IS_DIGIT_ASCII_RADIX;
+use clippy_utils::{
+ consts::constant_full_int, consts::FullInt, diagnostics::span_lint_and_sugg, meets_msrv, msrvs,
+ source::snippet_with_applicability,
+};
+use rustc_errors::Applicability;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_semver::RustcVersion;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ self_arg: &'tcx Expr<'_>,
+ radix: &'tcx Expr<'_>,
+ msrv: Option<RustcVersion>,
+) {
+ if !meets_msrv(msrv, msrvs::IS_ASCII_DIGIT) {
+ return;
+ }
+
+ if !cx.typeck_results().expr_ty_adjusted(self_arg).peel_refs().is_char() {
+ return;
+ }
+
+ if let Some(radix_val) = constant_full_int(cx, cx.typeck_results(), radix) {
+ let (num, replacement) = match radix_val {
+ FullInt::S(10) | FullInt::U(10) => (10, "is_ascii_digit"),
+ FullInt::S(16) | FullInt::U(16) => (16, "is_ascii_hexdigit"),
+ _ => return,
+ };
+ let mut applicability = Applicability::MachineApplicable;
+
+ span_lint_and_sugg(
+ cx,
+ IS_DIGIT_ASCII_RADIX,
+ expr.span,
+ &format!("use of `char::is_digit` with literal radix of {}", num),
+ "try",
+ format!(
+ "{}.{}()",
+ snippet_with_applicability(cx, self_arg.span, "..", &mut applicability),
+ replacement
+ ),
+ applicability,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_cloned_collect.rs b/src/tools/clippy/clippy_lints/src/methods/iter_cloned_collect.rs
new file mode 100644
index 000000000..30d56113c
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/iter_cloned_collect.rs
@@ -0,0 +1,31 @@
+use crate::methods::utils::derefs_to_slice;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::ty::is_type_diagnostic_item;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::ITER_CLONED_COLLECT;
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, method_name: &str, expr: &hir::Expr<'_>, recv: &'tcx hir::Expr<'_>) {
+ if_chain! {
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(expr), sym::Vec);
+ if let Some(slice) = derefs_to_slice(cx, recv, cx.typeck_results().expr_ty(recv));
+ if let Some(to_replace) = expr.span.trim_start(slice.span.source_callsite());
+
+ then {
+ span_lint_and_sugg(
+ cx,
+ ITER_CLONED_COLLECT,
+ to_replace,
+ &format!("called `iter().{}().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and \
+ more readable", method_name),
+ "try",
+ ".to_vec()".to_string(),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_count.rs b/src/tools/clippy/clippy_lints/src/methods/iter_count.rs
new file mode 100644
index 000000000..052be3d8e
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/iter_count.rs
@@ -0,0 +1,48 @@
+use super::utils::derefs_to_slice;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::is_type_diagnostic_item;
+use rustc_errors::Applicability;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::ITER_COUNT;
+
+pub(crate) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>, recv: &'tcx Expr<'tcx>, iter_method: &str) {
+ let ty = cx.typeck_results().expr_ty(recv);
+ let caller_type = if derefs_to_slice(cx, recv, ty).is_some() {
+ "slice"
+ } else if is_type_diagnostic_item(cx, ty, sym::Vec) {
+ "Vec"
+ } else if is_type_diagnostic_item(cx, ty, sym::VecDeque) {
+ "VecDeque"
+ } else if is_type_diagnostic_item(cx, ty, sym::HashSet) {
+ "HashSet"
+ } else if is_type_diagnostic_item(cx, ty, sym::HashMap) {
+ "HashMap"
+ } else if is_type_diagnostic_item(cx, ty, sym::BTreeMap) {
+ "BTreeMap"
+ } else if is_type_diagnostic_item(cx, ty, sym::BTreeSet) {
+ "BTreeSet"
+ } else if is_type_diagnostic_item(cx, ty, sym::LinkedList) {
+ "LinkedList"
+ } else if is_type_diagnostic_item(cx, ty, sym::BinaryHeap) {
+ "BinaryHeap"
+ } else {
+ return;
+ };
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ ITER_COUNT,
+ expr.span,
+ &format!("called `.{}().count()` on a `{}`", iter_method, caller_type),
+ "try",
+ format!(
+ "{}.len()",
+ snippet_with_applicability(cx, recv.span, "..", &mut applicability),
+ ),
+ applicability,
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_next_slice.rs b/src/tools/clippy/clippy_lints/src/methods/iter_next_slice.rs
new file mode 100644
index 000000000..b8d1dabe0
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/iter_next_slice.rs
@@ -0,0 +1,74 @@
+use super::utils::derefs_to_slice;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{get_parent_expr, higher};
+use if_chain::if_chain;
+use rustc_ast::ast;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::symbol::sym;
+
+use super::ITER_NEXT_SLICE;
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>, caller_expr: &'tcx hir::Expr<'_>) {
+ // Skip lint if the `iter().next()` expression is a for loop argument,
+ // since it is already covered by `&loops::ITER_NEXT_LOOP`
+ let mut parent_expr_opt = get_parent_expr(cx, expr);
+ while let Some(parent_expr) = parent_expr_opt {
+ if higher::ForLoop::hir(parent_expr).is_some() {
+ return;
+ }
+ parent_expr_opt = get_parent_expr(cx, parent_expr);
+ }
+
+ if derefs_to_slice(cx, caller_expr, cx.typeck_results().expr_ty(caller_expr)).is_some() {
+ // caller is a Slice
+ if_chain! {
+ if let hir::ExprKind::Index(caller_var, index_expr) = &caller_expr.kind;
+ if let Some(higher::Range { start: Some(start_expr), end: None, limits: ast::RangeLimits::HalfOpen })
+ = higher::Range::hir(index_expr);
+ if let hir::ExprKind::Lit(ref start_lit) = &start_expr.kind;
+ if let ast::LitKind::Int(start_idx, _) = start_lit.node;
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ let suggest = if start_idx == 0 {
+ format!("{}.first()", snippet_with_applicability(cx, caller_var.span, "..", &mut applicability))
+ } else {
+ format!("{}.get({})", snippet_with_applicability(cx, caller_var.span, "..", &mut applicability), start_idx)
+ };
+ span_lint_and_sugg(
+ cx,
+ ITER_NEXT_SLICE,
+ expr.span,
+ "using `.iter().next()` on a Slice without end index",
+ "try calling",
+ suggest,
+ applicability,
+ );
+ }
+ }
+ } else if is_vec_or_array(cx, caller_expr) {
+ // caller is a Vec or an Array
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ ITER_NEXT_SLICE,
+ expr.span,
+ "using `.iter().next()` on an array",
+ "try calling",
+ format!(
+ "{}.first()",
+ snippet_with_applicability(cx, caller_expr.span, "..", &mut applicability)
+ ),
+ applicability,
+ );
+ }
+}
+
+fn is_vec_or_array<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) -> bool {
+ is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(expr), sym::Vec)
+ || matches!(&cx.typeck_results().expr_ty(expr).peel_refs().kind(), ty::Array(_, _))
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_nth.rs b/src/tools/clippy/clippy_lints/src/methods/iter_nth.rs
new file mode 100644
index 000000000..80ca4c942
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/iter_nth.rs
@@ -0,0 +1,39 @@
+use super::utils::derefs_to_slice;
+use crate::methods::iter_nth_zero;
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::ty::is_type_diagnostic_item;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::symbol::sym;
+
+use super::ITER_NTH;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &hir::Expr<'_>,
+ iter_recv: &'tcx hir::Expr<'tcx>,
+ nth_recv: &hir::Expr<'_>,
+ nth_arg: &hir::Expr<'_>,
+ is_mut: bool,
+) {
+ let mut_str = if is_mut { "_mut" } else { "" };
+ let caller_type = if derefs_to_slice(cx, iter_recv, cx.typeck_results().expr_ty(iter_recv)).is_some() {
+ "slice"
+ } else if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(iter_recv), sym::Vec) {
+ "Vec"
+ } else if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(iter_recv), sym::VecDeque) {
+ "VecDeque"
+ } else {
+ iter_nth_zero::check(cx, expr, nth_recv, nth_arg);
+ return; // caller is not a type that we want to lint
+ };
+
+ span_lint_and_help(
+ cx,
+ ITER_NTH,
+ expr.span,
+ &format!("called `.iter{0}().nth()` on a {1}", mut_str, caller_type),
+ None,
+ &format!("calling `.get{}()` is both faster and more readable", mut_str),
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_nth_zero.rs b/src/tools/clippy/clippy_lints/src/methods/iter_nth_zero.rs
new file mode 100644
index 000000000..68d906c3e
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/iter_nth_zero.rs
@@ -0,0 +1,30 @@
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::is_trait_method;
+use clippy_utils::source::snippet_with_applicability;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::ITER_NTH_ZERO;
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &hir::Expr<'_>, recv: &hir::Expr<'_>, arg: &hir::Expr<'_>) {
+ if_chain! {
+ if is_trait_method(cx, expr, sym::Iterator);
+ if let Some((Constant::Int(0), _)) = constant(cx, cx.typeck_results(), arg);
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ ITER_NTH_ZERO,
+ expr.span,
+ "called `.nth(0)` on a `std::iter::Iterator`, when `.next()` is equivalent",
+ "try calling `.next()` instead of `.nth(0)`",
+ format!("{}.next()", snippet_with_applicability(cx, recv.span, "..", &mut applicability)),
+ applicability,
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs b/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs
new file mode 100644
index 000000000..06a39c599
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs
@@ -0,0 +1,59 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::{get_associated_type, implements_trait, is_copy};
+use rustc_errors::Applicability;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::sym;
+
+use super::ITER_OVEREAGER_CLONED;
+use crate::redundant_clone::REDUNDANT_CLONE;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ cloned_call: &'tcx Expr<'_>,
+ cloned_recv: &'tcx Expr<'_>,
+ is_count: bool,
+ needs_into_iter: bool,
+) {
+ let typeck = cx.typeck_results();
+ if let Some(iter_id) = cx.tcx.get_diagnostic_item(sym::Iterator)
+ && let Some(method_id) = typeck.type_dependent_def_id(expr.hir_id)
+ && cx.tcx.trait_of_item(method_id) == Some(iter_id)
+ && let Some(method_id) = typeck.type_dependent_def_id(cloned_call.hir_id)
+ && cx.tcx.trait_of_item(method_id) == Some(iter_id)
+ && let cloned_recv_ty = typeck.expr_ty_adjusted(cloned_recv)
+ && let Some(iter_assoc_ty) = get_associated_type(cx, cloned_recv_ty, iter_id, "Item")
+ && matches!(*iter_assoc_ty.kind(), ty::Ref(_, ty, _) if !is_copy(cx, ty))
+ {
+ if needs_into_iter
+ && let Some(into_iter_id) = cx.tcx.get_diagnostic_item(sym::IntoIterator)
+ && !implements_trait(cx, iter_assoc_ty, into_iter_id, &[])
+ {
+ return;
+ }
+
+ let (lint, msg, trailing_clone) = if is_count {
+ (REDUNDANT_CLONE, "unneeded cloning of iterator items", "")
+ } else {
+ (ITER_OVEREAGER_CLONED, "unnecessarily eager cloning of iterator items", ".cloned()")
+ };
+
+ span_lint_and_then(
+ cx,
+ lint,
+ expr.span,
+ msg,
+ |diag| {
+ let method_span = expr.span.with_lo(cloned_call.span.hi());
+ if let Some(mut snip) = snippet_opt(cx, method_span) {
+ snip.push_str(trailing_clone);
+ let replace_span = expr.span.with_lo(cloned_recv.span.hi());
+ diag.span_suggestion(replace_span, "try this", snip, Applicability::MachineApplicable);
+ }
+ }
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_skip_next.rs b/src/tools/clippy/clippy_lints/src/methods/iter_skip_next.rs
new file mode 100644
index 000000000..43e9451f7
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/iter_skip_next.rs
@@ -0,0 +1,46 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::is_trait_method;
+use clippy_utils::path_to_local;
+use clippy_utils::source::snippet;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_hir::{BindingAnnotation, Node, PatKind};
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::ITER_SKIP_NEXT;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr<'_>, arg: &hir::Expr<'_>) {
+ // lint if caller of skip is an Iterator
+ if is_trait_method(cx, expr, sym::Iterator) {
+ let mut application = Applicability::MachineApplicable;
+ span_lint_and_then(
+ cx,
+ ITER_SKIP_NEXT,
+ expr.span.trim_start(recv.span).unwrap(),
+ "called `skip(..).next()` on an iterator",
+ |diag| {
+ if_chain! {
+ if let Some(id) = path_to_local(recv);
+ if let Node::Pat(pat) = cx.tcx.hir().get(id);
+ if let PatKind::Binding(ann, _, _, _) = pat.kind;
+ if ann != BindingAnnotation::Mutable;
+ then {
+ application = Applicability::Unspecified;
+ diag.span_help(
+ pat.span,
+ &format!("for this change `{}` has to be mutable", snippet(cx, pat.span, "..")),
+ );
+ }
+ }
+
+ diag.span_suggestion(
+ expr.span.trim_start(recv.span).unwrap(),
+ "use `nth` instead",
+ format!(".nth({})", snippet(cx, arg.span, "..")),
+ application,
+ );
+ },
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_with_drain.rs b/src/tools/clippy/clippy_lints/src/methods/iter_with_drain.rs
new file mode 100644
index 000000000..152072e09
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/iter_with_drain.rs
@@ -0,0 +1,47 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::higher::Range;
+use clippy_utils::is_integer_const;
+use rustc_ast::ast::RangeLimits;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind, QPath};
+use rustc_lint::LateContext;
+use rustc_span::symbol::sym;
+use rustc_span::Span;
+
+use super::ITER_WITH_DRAIN;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, span: Span, arg: &Expr<'_>) {
+ if !matches!(recv.kind, ExprKind::Field(..))
+ && let Some(adt) = cx.typeck_results().expr_ty(recv).ty_adt_def()
+ && let Some(ty_name) = cx.tcx.get_diagnostic_name(adt.did())
+ && matches!(ty_name, sym::Vec | sym::VecDeque)
+ && let Some(range) = Range::hir(arg)
+ && is_full_range(cx, recv, range)
+ {
+ span_lint_and_sugg(
+ cx,
+ ITER_WITH_DRAIN,
+ span.with_hi(expr.span.hi()),
+ &format!("`drain(..)` used on a `{}`", ty_name),
+ "try this",
+ "into_iter()".to_string(),
+ Applicability::MaybeIncorrect,
+ );
+ };
+}
+
+fn is_full_range(cx: &LateContext<'_>, container: &Expr<'_>, range: Range<'_>) -> bool {
+ range.start.map_or(true, |e| is_integer_const(cx, e, 0))
+ && range.end.map_or(true, |e| {
+ if range.limits == RangeLimits::HalfOpen
+ && let ExprKind::Path(QPath::Resolved(None, container_path)) = container.kind
+ && let ExprKind::MethodCall(name, [self_arg], _) = e.kind
+ && name.ident.name == sym::len
+ && let ExprKind::Path(QPath::Resolved(None, path)) = self_arg.kind
+ {
+ container_path.res == path.res
+ } else {
+ false
+ }
+ })
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/iterator_step_by_zero.rs b/src/tools/clippy/clippy_lints/src/methods/iterator_step_by_zero.rs
new file mode 100644
index 000000000..64c09214a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/iterator_step_by_zero.rs
@@ -0,0 +1,21 @@
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::is_trait_method;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::ITERATOR_STEP_BY_ZERO;
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &hir::Expr<'_>, arg: &'tcx hir::Expr<'_>) {
+ if is_trait_method(cx, expr, sym::Iterator) {
+ if let Some((Constant::Int(0), _)) = constant(cx, cx.typeck_results(), arg) {
+ span_lint(
+ cx,
+ ITERATOR_STEP_BY_ZERO,
+ expr.span,
+ "`Iterator::step_by(0)` will panic at runtime",
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs b/src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs
new file mode 100644
index 000000000..0fe510bea
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs
@@ -0,0 +1,164 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::{match_def_path, path_def_id};
+use if_chain::if_chain;
+use rustc_ast::ast;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_middle::ty::layout::LayoutOf;
+
+pub fn check(
+ cx: &LateContext<'_>,
+ expr: &hir::Expr<'_>,
+ arith_lhs: &hir::Expr<'_>,
+ arith_rhs: &hir::Expr<'_>,
+ unwrap_arg: &hir::Expr<'_>,
+ arith: &str,
+) {
+ let ty = cx.typeck_results().expr_ty(arith_lhs);
+ if !ty.is_integral() {
+ return;
+ }
+
+ let mm = if let Some(mm) = is_min_or_max(cx, unwrap_arg) {
+ mm
+ } else {
+ return;
+ };
+
+ if ty.is_signed() {
+ use self::{
+ MinMax::{Max, Min},
+ Sign::{Neg, Pos},
+ };
+
+ let sign = if let Some(sign) = lit_sign(arith_rhs) {
+ sign
+ } else {
+ return;
+ };
+
+ match (arith, sign, mm) {
+ ("add", Pos, Max) | ("add", Neg, Min) | ("sub", Neg, Max) | ("sub", Pos, Min) => (),
+ // "mul" is omitted because lhs can be negative.
+ _ => return,
+ }
+ } else {
+ match (mm, arith) {
+ (MinMax::Max, "add" | "mul") | (MinMax::Min, "sub") => (),
+ _ => return,
+ }
+ }
+
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ super::MANUAL_SATURATING_ARITHMETIC,
+ expr.span,
+ "manual saturating arithmetic",
+ &format!("try using `saturating_{}`", arith),
+ format!(
+ "{}.saturating_{}({})",
+ snippet_with_applicability(cx, arith_lhs.span, "..", &mut applicability),
+ arith,
+ snippet_with_applicability(cx, arith_rhs.span, "..", &mut applicability),
+ ),
+ applicability,
+ );
+}
+
+#[derive(PartialEq, Eq)]
+enum MinMax {
+ Min,
+ Max,
+}
+
+fn is_min_or_max<'tcx>(cx: &LateContext<'tcx>, expr: &hir::Expr<'_>) -> Option<MinMax> {
+ // `T::max_value()` `T::min_value()` inherent methods
+ if_chain! {
+ if let hir::ExprKind::Call(func, args) = &expr.kind;
+ if args.is_empty();
+ if let hir::ExprKind::Path(hir::QPath::TypeRelative(_, segment)) = &func.kind;
+ then {
+ match segment.ident.as_str() {
+ "max_value" => return Some(MinMax::Max),
+ "min_value" => return Some(MinMax::Min),
+ _ => {}
+ }
+ }
+ }
+
+ let ty = cx.typeck_results().expr_ty(expr);
+ let ty_str = ty.to_string();
+
+ // `std::T::MAX` `std::T::MIN` constants
+ if let Some(id) = path_def_id(cx, expr) {
+ if match_def_path(cx, id, &["core", &ty_str, "MAX"]) {
+ return Some(MinMax::Max);
+ }
+
+ if match_def_path(cx, id, &["core", &ty_str, "MIN"]) {
+ return Some(MinMax::Min);
+ }
+ }
+
+ // Literals
+ let bits = cx.layout_of(ty).unwrap().size.bits();
+ let (minval, maxval): (u128, u128) = if ty.is_signed() {
+ let minval = 1 << (bits - 1);
+ let mut maxval = !(1 << (bits - 1));
+ if bits != 128 {
+ maxval &= (1 << bits) - 1;
+ }
+ (minval, maxval)
+ } else {
+ (0, if bits == 128 { !0 } else { (1 << bits) - 1 })
+ };
+
+ let check_lit = |expr: &hir::Expr<'_>, check_min: bool| {
+ if let hir::ExprKind::Lit(lit) = &expr.kind {
+ if let ast::LitKind::Int(value, _) = lit.node {
+ if value == maxval {
+ return Some(MinMax::Max);
+ }
+
+ if check_min && value == minval {
+ return Some(MinMax::Min);
+ }
+ }
+ }
+
+ None
+ };
+
+ if let r @ Some(_) = check_lit(expr, !ty.is_signed()) {
+ return r;
+ }
+
+ if ty.is_signed() {
+ if let hir::ExprKind::Unary(hir::UnOp::Neg, val) = &expr.kind {
+ return check_lit(val, true);
+ }
+ }
+
+ None
+}
+
+#[derive(PartialEq, Eq)]
+enum Sign {
+ Pos,
+ Neg,
+}
+
+fn lit_sign(expr: &hir::Expr<'_>) -> Option<Sign> {
+ if let hir::ExprKind::Unary(hir::UnOp::Neg, inner) = &expr.kind {
+ if let hir::ExprKind::Lit(..) = &inner.kind {
+ return Some(Sign::Neg);
+ }
+ } else if let hir::ExprKind::Lit(..) = &expr.kind {
+ return Some(Sign::Pos);
+ }
+
+ None
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs b/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs
new file mode 100644
index 000000000..46d2fc493
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs
@@ -0,0 +1,99 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::{snippet_with_applicability, snippet_with_context};
+use clippy_utils::sugg::Sugg;
+use clippy_utils::ty::{is_type_diagnostic_item, is_type_lang_item, match_type};
+use clippy_utils::{is_expr_path_def_path, paths};
+use if_chain::if_chain;
+use rustc_ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind, LangItem};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+use rustc_span::symbol::sym;
+use std::borrow::Cow;
+
+use super::MANUAL_STR_REPEAT;
+
+enum RepeatKind {
+ String,
+ Char(char),
+}
+
+fn get_ty_param(ty: Ty<'_>) -> Option<Ty<'_>> {
+ if let ty::Adt(_, subs) = ty.kind() {
+ subs.types().next()
+ } else {
+ None
+ }
+}
+
+fn parse_repeat_arg(cx: &LateContext<'_>, e: &Expr<'_>) -> Option<RepeatKind> {
+ if let ExprKind::Lit(lit) = &e.kind {
+ match lit.node {
+ LitKind::Str(..) => Some(RepeatKind::String),
+ LitKind::Char(c) => Some(RepeatKind::Char(c)),
+ _ => None,
+ }
+ } else {
+ let ty = cx.typeck_results().expr_ty(e);
+ if is_type_diagnostic_item(cx, ty, sym::String)
+ || (is_type_lang_item(cx, ty, LangItem::OwnedBox) && get_ty_param(ty).map_or(false, Ty::is_str))
+ || (match_type(cx, ty, &paths::COW) && get_ty_param(ty).map_or(false, Ty::is_str))
+ {
+ Some(RepeatKind::String)
+ } else {
+ let ty = ty.peel_refs();
+ (ty.is_str() || is_type_diagnostic_item(cx, ty, sym::String)).then_some(RepeatKind::String)
+ }
+ }
+}
+
+pub(super) fn check(
+ cx: &LateContext<'_>,
+ collect_expr: &Expr<'_>,
+ take_expr: &Expr<'_>,
+ take_self_arg: &Expr<'_>,
+ take_arg: &Expr<'_>,
+) {
+ if_chain! {
+ if let ExprKind::Call(repeat_fn, [repeat_arg]) = take_self_arg.kind;
+ if is_expr_path_def_path(cx, repeat_fn, &paths::ITER_REPEAT);
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(collect_expr), sym::String);
+ if let Some(collect_id) = cx.typeck_results().type_dependent_def_id(collect_expr.hir_id);
+ if let Some(take_id) = cx.typeck_results().type_dependent_def_id(take_expr.hir_id);
+ if let Some(iter_trait_id) = cx.tcx.get_diagnostic_item(sym::Iterator);
+ if cx.tcx.trait_of_item(collect_id) == Some(iter_trait_id);
+ if cx.tcx.trait_of_item(take_id) == Some(iter_trait_id);
+ if let Some(repeat_kind) = parse_repeat_arg(cx, repeat_arg);
+ let ctxt = collect_expr.span.ctxt();
+ if ctxt == take_expr.span.ctxt();
+ if ctxt == take_self_arg.span.ctxt();
+ then {
+ let mut app = Applicability::MachineApplicable;
+ let count_snip = snippet_with_context(cx, take_arg.span, ctxt, "..", &mut app).0;
+
+ let val_str = match repeat_kind {
+ RepeatKind::Char(_) if repeat_arg.span.ctxt() != ctxt => return,
+ RepeatKind::Char('\'') => r#""'""#.into(),
+ RepeatKind::Char('"') => r#""\"""#.into(),
+ RepeatKind::Char(_) =>
+ match snippet_with_applicability(cx, repeat_arg.span, "..", &mut app) {
+ Cow::Owned(s) => Cow::Owned(format!("\"{}\"", &s[1..s.len() - 1])),
+ s @ Cow::Borrowed(_) => s,
+ },
+ RepeatKind::String =>
+ Sugg::hir_with_context(cx, repeat_arg, ctxt, "..", &mut app).maybe_par().to_string().into(),
+ };
+
+ span_lint_and_sugg(
+ cx,
+ MANUAL_STR_REPEAT,
+ collect_expr.span,
+ "manual implementation of `str::repeat` using iterators",
+ "try this",
+ format!("{}.repeat({})", val_str, count_snip),
+ app
+ )
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/map_collect_result_unit.rs b/src/tools/clippy/clippy_lints/src/methods/map_collect_result_unit.rs
new file mode 100644
index 000000000..d420f144e
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/map_collect_result_unit.rs
@@ -0,0 +1,47 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::is_trait_method;
+use clippy_utils::source::snippet;
+use clippy_utils::ty::is_type_diagnostic_item;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::symbol::sym;
+
+use super::MAP_COLLECT_RESULT_UNIT;
+
+pub(super) fn check(
+ cx: &LateContext<'_>,
+ expr: &hir::Expr<'_>,
+ iter: &hir::Expr<'_>,
+ map_fn: &hir::Expr<'_>,
+ collect_recv: &hir::Expr<'_>,
+) {
+ if_chain! {
+ // called on Iterator
+ if is_trait_method(cx, collect_recv, sym::Iterator);
+ // return of collect `Result<(),_>`
+ let collect_ret_ty = cx.typeck_results().expr_ty(expr);
+ if is_type_diagnostic_item(cx, collect_ret_ty, sym::Result);
+ if let ty::Adt(_, substs) = collect_ret_ty.kind();
+ if let Some(result_t) = substs.types().next();
+ if result_t.is_unit();
+ // get parts for snippet
+ then {
+ span_lint_and_sugg(
+ cx,
+ MAP_COLLECT_RESULT_UNIT,
+ expr.span,
+ "`.map().collect()` can be replaced with `.try_for_each()`",
+ "try this",
+ format!(
+ "{}.try_for_each({})",
+ snippet(cx, iter.span, ".."),
+ snippet(cx, map_fn.span, "..")
+ ),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs b/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs
new file mode 100644
index 000000000..13853dec9
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs
@@ -0,0 +1,73 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::is_trait_method;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::is_type_diagnostic_item;
+use rustc_errors::Applicability;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::{symbol::sym, Span};
+
+use super::MAP_FLATTEN;
+
+/// lint use of `map().flatten()` for `Iterators` and 'Options'
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, map_arg: &Expr<'_>, map_span: Span) {
+ if let Some((caller_ty_name, method_to_use)) = try_get_caller_ty_name_and_method_name(cx, expr, recv, map_arg) {
+ let mut applicability = Applicability::MachineApplicable;
+
+ let closure_snippet = snippet_with_applicability(cx, map_arg.span, "..", &mut applicability);
+ span_lint_and_sugg(
+ cx,
+ MAP_FLATTEN,
+ expr.span.with_lo(map_span.lo()),
+ &format!("called `map(..).flatten()` on `{}`", caller_ty_name),
+ &format!(
+ "try replacing `map` with `{}` and remove the `.flatten()`",
+ method_to_use
+ ),
+ format!("{}({})", method_to_use, closure_snippet),
+ applicability,
+ );
+ }
+}
+
+fn try_get_caller_ty_name_and_method_name(
+ cx: &LateContext<'_>,
+ expr: &Expr<'_>,
+ caller_expr: &Expr<'_>,
+ map_arg: &Expr<'_>,
+) -> Option<(&'static str, &'static str)> {
+ if is_trait_method(cx, expr, sym::Iterator) {
+ if is_map_to_option(cx, map_arg) {
+ // `(...).map(...)` has type `impl Iterator<Item=Option<...>>
+ Some(("Iterator", "filter_map"))
+ } else {
+ // `(...).map(...)` has type `impl Iterator<Item=impl Iterator<...>>
+ Some(("Iterator", "flat_map"))
+ }
+ } else {
+ if let ty::Adt(adt, _) = cx.typeck_results().expr_ty(caller_expr).kind() {
+ if cx.tcx.is_diagnostic_item(sym::Option, adt.did()) {
+ return Some(("Option", "and_then"));
+ } else if cx.tcx.is_diagnostic_item(sym::Result, adt.did()) {
+ return Some(("Result", "and_then"));
+ }
+ }
+ None
+ }
+}
+
+fn is_map_to_option(cx: &LateContext<'_>, map_arg: &Expr<'_>) -> bool {
+ let map_closure_ty = cx.typeck_results().expr_ty(map_arg);
+ match map_closure_ty.kind() {
+ ty::Closure(_, _) | ty::FnDef(_, _) | ty::FnPtr(_) => {
+ let map_closure_sig = match map_closure_ty.kind() {
+ ty::Closure(_, substs) => substs.as_closure().sig(),
+ _ => map_closure_ty.fn_sig(cx.tcx),
+ };
+ let map_closure_return_ty = cx.tcx.erase_late_bound_regions(map_closure_sig.output());
+ is_type_diagnostic_item(cx, map_closure_return_ty, sym::Option)
+ },
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/map_identity.rs b/src/tools/clippy/clippy_lints/src/methods/map_identity.rs
new file mode 100644
index 000000000..862a9578e
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/map_identity.rs
@@ -0,0 +1,39 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{is_expr_identity_function, is_trait_method};
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::{source_map::Span, sym};
+
+use super::MAP_IDENTITY;
+
+pub(super) fn check(
+ cx: &LateContext<'_>,
+ expr: &hir::Expr<'_>,
+ caller: &hir::Expr<'_>,
+ map_arg: &hir::Expr<'_>,
+ name: &str,
+ _map_span: Span,
+) {
+ let caller_ty = cx.typeck_results().expr_ty(caller);
+
+ if_chain! {
+ if is_trait_method(cx, expr, sym::Iterator)
+ || is_type_diagnostic_item(cx, caller_ty, sym::Result)
+ || is_type_diagnostic_item(cx, caller_ty, sym::Option);
+ if is_expr_identity_function(cx, map_arg);
+ if let Some(sugg_span) = expr.span.trim_start(caller.span);
+ then {
+ span_lint_and_sugg(
+ cx,
+ MAP_IDENTITY,
+ sugg_span,
+ "unnecessary map of the identity function",
+ &format!("remove the call to `{}`", name),
+ String::new(),
+ Applicability::MachineApplicable,
+ )
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/map_unwrap_or.rs b/src/tools/clippy/clippy_lints/src/methods/map_unwrap_or.rs
new file mode 100644
index 000000000..4a8e7ce4d
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/map_unwrap_or.rs
@@ -0,0 +1,79 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg};
+use clippy_utils::source::snippet;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::usage::mutated_variables;
+use clippy_utils::{meets_msrv, msrvs};
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_semver::RustcVersion;
+use rustc_span::symbol::sym;
+
+use super::MAP_UNWRAP_OR;
+
+/// lint use of `map().unwrap_or_else()` for `Option`s and `Result`s
+/// Return true if lint triggered
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx hir::Expr<'_>,
+ recv: &'tcx hir::Expr<'_>,
+ map_arg: &'tcx hir::Expr<'_>,
+ unwrap_arg: &'tcx hir::Expr<'_>,
+ msrv: Option<RustcVersion>,
+) -> bool {
+ // lint if the caller of `map()` is an `Option`
+ let is_option = is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv), sym::Option);
+ let is_result = is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv), sym::Result);
+
+ if is_result && !meets_msrv(msrv, msrvs::RESULT_MAP_OR_ELSE) {
+ return false;
+ }
+
+ if is_option || is_result {
+ // Don't make a suggestion that may fail to compile due to mutably borrowing
+ // the same variable twice.
+ let map_mutated_vars = mutated_variables(recv, cx);
+ let unwrap_mutated_vars = mutated_variables(unwrap_arg, cx);
+ if let (Some(map_mutated_vars), Some(unwrap_mutated_vars)) = (map_mutated_vars, unwrap_mutated_vars) {
+ if map_mutated_vars.intersection(&unwrap_mutated_vars).next().is_some() {
+ return false;
+ }
+ } else {
+ return false;
+ }
+
+ // lint message
+ let msg = if is_option {
+ "called `map(<f>).unwrap_or_else(<g>)` on an `Option` value. This can be done more directly by calling \
+ `map_or_else(<g>, <f>)` instead"
+ } else {
+ "called `map(<f>).unwrap_or_else(<g>)` on a `Result` value. This can be done more directly by calling \
+ `.map_or_else(<g>, <f>)` instead"
+ };
+ // get snippets for args to map() and unwrap_or_else()
+ let map_snippet = snippet(cx, map_arg.span, "..");
+ let unwrap_snippet = snippet(cx, unwrap_arg.span, "..");
+ // lint, with note if neither arg is > 1 line and both map() and
+ // unwrap_or_else() have the same span
+ let multiline = map_snippet.lines().count() > 1 || unwrap_snippet.lines().count() > 1;
+ let same_span = map_arg.span.ctxt() == unwrap_arg.span.ctxt();
+ if same_span && !multiline {
+ let var_snippet = snippet(cx, recv.span, "..");
+ span_lint_and_sugg(
+ cx,
+ MAP_UNWRAP_OR,
+ expr.span,
+ msg,
+ "try this",
+ format!("{}.map_or_else({}, {})", var_snippet, unwrap_snippet, map_snippet),
+ Applicability::MachineApplicable,
+ );
+ return true;
+ } else if same_span && multiline {
+ span_lint(cx, MAP_UNWRAP_OR, expr.span, msg);
+ return true;
+ }
+ }
+
+ false
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/mod.rs b/src/tools/clippy/clippy_lints/src/methods/mod.rs
new file mode 100644
index 000000000..202fbc1f7
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/mod.rs
@@ -0,0 +1,3052 @@
+mod bind_instead_of_map;
+mod bytes_nth;
+mod chars_cmp;
+mod chars_cmp_with_unwrap;
+mod chars_last_cmp;
+mod chars_last_cmp_with_unwrap;
+mod chars_next_cmp;
+mod chars_next_cmp_with_unwrap;
+mod clone_on_copy;
+mod clone_on_ref_ptr;
+mod cloned_instead_of_copied;
+mod err_expect;
+mod expect_fun_call;
+mod expect_used;
+mod extend_with_drain;
+mod filetype_is_file;
+mod filter_map;
+mod filter_map_identity;
+mod filter_map_next;
+mod filter_next;
+mod flat_map_identity;
+mod flat_map_option;
+mod from_iter_instead_of_collect;
+mod get_last_with_len;
+mod get_unwrap;
+mod implicit_clone;
+mod inefficient_to_string;
+mod inspect_for_each;
+mod into_iter_on_ref;
+mod is_digit_ascii_radix;
+mod iter_cloned_collect;
+mod iter_count;
+mod iter_next_slice;
+mod iter_nth;
+mod iter_nth_zero;
+mod iter_overeager_cloned;
+mod iter_skip_next;
+mod iter_with_drain;
+mod iterator_step_by_zero;
+mod manual_saturating_arithmetic;
+mod manual_str_repeat;
+mod map_collect_result_unit;
+mod map_flatten;
+mod map_identity;
+mod map_unwrap_or;
+mod needless_option_as_deref;
+mod needless_option_take;
+mod no_effect_replace;
+mod obfuscated_if_else;
+mod ok_expect;
+mod option_as_ref_deref;
+mod option_map_or_none;
+mod option_map_unwrap_or;
+mod or_fun_call;
+mod or_then_unwrap;
+mod search_is_some;
+mod single_char_add_str;
+mod single_char_insert_string;
+mod single_char_pattern;
+mod single_char_push_string;
+mod skip_while_next;
+mod str_splitn;
+mod string_extend_chars;
+mod suspicious_map;
+mod suspicious_splitn;
+mod uninit_assumed_init;
+mod unnecessary_filter_map;
+mod unnecessary_fold;
+mod unnecessary_iter_cloned;
+mod unnecessary_join;
+mod unnecessary_lazy_eval;
+mod unnecessary_to_owned;
+mod unwrap_or_else_default;
+mod unwrap_used;
+mod useless_asref;
+mod utils;
+mod wrong_self_convention;
+mod zst_offset;
+
+use bind_instead_of_map::BindInsteadOfMap;
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
+use clippy_utils::ty::{contains_adt_constructor, contains_ty, implements_trait, is_copy, is_type_diagnostic_item};
+use clippy_utils::{contains_return, get_trait_def_id, iter_input_pats, meets_msrv, msrvs, paths, return_ty};
+use if_chain::if_chain;
+use rustc_hir as hir;
+use rustc_hir::def::Res;
+use rustc_hir::{Expr, ExprKind, PrimTy, QPath, TraitItem, TraitItemKind};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty::{self, TraitRef, Ty};
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::{sym, Span};
+use rustc_typeck::hir_ty_to_ty;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usages of `cloned()` on an `Iterator` or `Option` where
+ /// `copied()` could be used instead.
+ ///
+ /// ### Why is this bad?
+ /// `copied()` is better because it guarantees that the type being cloned
+ /// implements `Copy`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// [1, 2, 3].iter().cloned();
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// [1, 2, 3].iter().copied();
+ /// ```
+ #[clippy::version = "1.53.0"]
+ pub CLONED_INSTEAD_OF_COPIED,
+ pedantic,
+ "used `cloned` where `copied` could be used instead"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `_.cloned().<func>()` where call to `.cloned()` can be postponed.
+ ///
+ /// ### Why is this bad?
+ /// It's often inefficient to clone all elements of an iterator, when eventually, only some
+ /// of them will be consumed.
+ ///
+ /// ### Known Problems
+ /// This `lint` removes the side of effect of cloning items in the iterator.
+ /// A code that relies on that side-effect could fail.
+ ///
+ /// ### Examples
+ /// ```rust
+ /// # let vec = vec!["string".to_string()];
+ /// vec.iter().cloned().take(10);
+ /// vec.iter().cloned().last();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let vec = vec!["string".to_string()];
+ /// vec.iter().take(10).cloned();
+ /// vec.iter().last().cloned();
+ /// ```
+ #[clippy::version = "1.60.0"]
+ pub ITER_OVEREAGER_CLONED,
+ perf,
+ "using `cloned()` early with `Iterator::iter()` can lead to some performance inefficiencies"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usages of `Iterator::flat_map()` where `filter_map()` could be
+ /// used instead.
+ ///
+ /// ### Why is this bad?
+ /// When applicable, `filter_map()` is more clear since it shows that
+ /// `Option` is used to produce 0 or 1 items.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let nums: Vec<i32> = ["1", "2", "whee!"].iter().flat_map(|x| x.parse().ok()).collect();
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let nums: Vec<i32> = ["1", "2", "whee!"].iter().filter_map(|x| x.parse().ok()).collect();
+ /// ```
+ #[clippy::version = "1.53.0"]
+ pub FLAT_MAP_OPTION,
+ pedantic,
+ "used `flat_map` where `filter_map` could be used instead"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `.unwrap()` calls on `Option`s and on `Result`s.
+ ///
+ /// ### Why is this bad?
+ /// It is better to handle the `None` or `Err` case,
+ /// or at least call `.expect(_)` with a more helpful message. Still, for a lot of
+ /// quick-and-dirty code, `unwrap` is a good choice, which is why this lint is
+ /// `Allow` by default.
+ ///
+ /// `result.unwrap()` will let the thread panic on `Err` values.
+ /// Normally, you want to implement more sophisticated error handling,
+ /// and propagate errors upwards with `?` operator.
+ ///
+ /// Even if you want to panic on errors, not all `Error`s implement good
+ /// messages on display. Therefore, it may be beneficial to look at the places
+ /// where they may get displayed. Activate this lint to do just that.
+ ///
+ /// ### Examples
+ /// ```rust
+ /// # let option = Some(1);
+ /// # let result: Result<usize, ()> = Ok(1);
+ /// option.unwrap();
+ /// result.unwrap();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let option = Some(1);
+ /// # let result: Result<usize, ()> = Ok(1);
+ /// option.expect("more helpful message");
+ /// result.expect("more helpful message");
+ /// ```
+ #[clippy::version = "1.45.0"]
+ pub UNWRAP_USED,
+ restriction,
+ "using `.unwrap()` on `Result` or `Option`, which should at least get a better message using `expect()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `.expect()` calls on `Option`s and `Result`s.
+ ///
+ /// ### Why is this bad?
+ /// Usually it is better to handle the `None` or `Err` case.
+ /// Still, for a lot of quick-and-dirty code, `expect` is a good choice, which is why
+ /// this lint is `Allow` by default.
+ ///
+ /// `result.expect()` will let the thread panic on `Err`
+ /// values. Normally, you want to implement more sophisticated error handling,
+ /// and propagate errors upwards with `?` operator.
+ ///
+ /// ### Examples
+ /// ```rust,ignore
+ /// # let option = Some(1);
+ /// # let result: Result<usize, ()> = Ok(1);
+ /// option.expect("one");
+ /// result.expect("one");
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// # let option = Some(1);
+ /// # let result: Result<usize, ()> = Ok(1);
+ /// option?;
+ ///
+ /// // or
+ ///
+ /// result?;
+ /// ```
+ #[clippy::version = "1.45.0"]
+ pub EXPECT_USED,
+ restriction,
+ "using `.expect()` on `Result` or `Option`, which might be better handled"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for methods that should live in a trait
+ /// implementation of a `std` trait (see [llogiq's blog
+ /// post](http://llogiq.github.io/2015/07/30/traits.html) for further
+ /// information) instead of an inherent implementation.
+ ///
+ /// ### Why is this bad?
+ /// Implementing the traits improve ergonomics for users of
+ /// the code, often with very little cost. Also people seeing a `mul(...)`
+ /// method
+ /// may expect `*` to work equally, so you should have good reason to disappoint
+ /// them.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct X;
+ /// impl X {
+ /// fn add(&self, other: &X) -> X {
+ /// // ..
+ /// # X
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub SHOULD_IMPLEMENT_TRAIT,
+ style,
+ "defining a method that should be implementing a std trait"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for methods with certain name prefixes and which
+ /// doesn't match how self is taken. The actual rules are:
+ ///
+ /// |Prefix |Postfix |`self` taken | `self` type |
+ /// |-------|------------|-------------------------------|--------------|
+ /// |`as_` | none |`&self` or `&mut self` | any |
+ /// |`from_`| none | none | any |
+ /// |`into_`| none |`self` | any |
+ /// |`is_` | none |`&mut self` or `&self` or none | any |
+ /// |`to_` | `_mut` |`&mut self` | any |
+ /// |`to_` | not `_mut` |`self` | `Copy` |
+ /// |`to_` | not `_mut` |`&self` | not `Copy` |
+ ///
+ /// Note: Clippy doesn't trigger methods with `to_` prefix in:
+ /// - Traits definition.
+ /// Clippy can not tell if a type that implements a trait is `Copy` or not.
+ /// - Traits implementation, when `&self` is taken.
+ /// The method signature is controlled by the trait and often `&self` is required for all types that implement the trait
+ /// (see e.g. the `std::string::ToString` trait).
+ ///
+ /// Clippy allows `Pin<&Self>` and `Pin<&mut Self>` if `&self` and `&mut self` is required.
+ ///
+ /// Please find more info here:
+ /// https://rust-lang.github.io/api-guidelines/naming.html#ad-hoc-conversions-follow-as_-to_-into_-conventions-c-conv
+ ///
+ /// ### Why is this bad?
+ /// Consistency breeds readability. If you follow the
+ /// conventions, your users won't be surprised that they, e.g., need to supply a
+ /// mutable reference to a `as_..` function.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # struct X;
+ /// impl X {
+ /// fn as_str(self) -> &'static str {
+ /// // ..
+ /// # ""
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub WRONG_SELF_CONVENTION,
+ style,
+ "defining a method named with an established prefix (like \"into_\") that takes `self` with the wrong convention"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `ok().expect(..)`.
+ ///
+ /// ### Why is this bad?
+ /// Because you usually call `expect()` on the `Result`
+ /// directly to get a better error message.
+ ///
+ /// ### Known problems
+ /// The error type needs to implement `Debug`
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let x = Ok::<_, ()>(());
+ /// x.ok().expect("why did I do this again?");
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let x = Ok::<_, ()>(());
+ /// x.expect("why did I do this again?");
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub OK_EXPECT,
+ style,
+ "using `ok().expect()`, which gives worse error messages than calling `expect` directly on the Result"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `.err().expect()` calls on the `Result` type.
+ ///
+ /// ### Why is this bad?
+ /// `.expect_err()` can be called directly to avoid the extra type conversion from `err()`.
+ ///
+ /// ### Example
+ /// ```should_panic
+ /// let x: Result<u32, &str> = Ok(10);
+ /// x.err().expect("Testing err().expect()");
+ /// ```
+ /// Use instead:
+ /// ```should_panic
+ /// let x: Result<u32, &str> = Ok(10);
+ /// x.expect_err("Testing expect_err");
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub ERR_EXPECT,
+ style,
+ r#"using `.err().expect("")` when `.expect_err("")` can be used"#
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usages of `_.unwrap_or_else(Default::default)` on `Option` and
+ /// `Result` values.
+ ///
+ /// ### Why is this bad?
+ /// Readability, these can be written as `_.unwrap_or_default`, which is
+ /// simpler and more concise.
+ ///
+ /// ### Examples
+ /// ```rust
+ /// # let x = Some(1);
+ /// x.unwrap_or_else(Default::default);
+ /// x.unwrap_or_else(u32::default);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let x = Some(1);
+ /// x.unwrap_or_default();
+ /// ```
+ #[clippy::version = "1.56.0"]
+ pub UNWRAP_OR_ELSE_DEFAULT,
+ style,
+ "using `.unwrap_or_else(Default::default)`, which is more succinctly expressed as `.unwrap_or_default()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `option.map(_).unwrap_or(_)` or `option.map(_).unwrap_or_else(_)` or
+ /// `result.map(_).unwrap_or_else(_)`.
+ ///
+ /// ### Why is this bad?
+ /// Readability, these can be written more concisely (resp.) as
+ /// `option.map_or(_, _)`, `option.map_or_else(_, _)` and `result.map_or_else(_, _)`.
+ ///
+ /// ### Known problems
+ /// The order of the arguments is not in execution order
+ ///
+ /// ### Examples
+ /// ```rust
+ /// # let option = Some(1);
+ /// # let result: Result<usize, ()> = Ok(1);
+ /// # fn some_function(foo: ()) -> usize { 1 }
+ /// option.map(|a| a + 1).unwrap_or(0);
+ /// result.map(|a| a + 1).unwrap_or_else(some_function);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let option = Some(1);
+ /// # let result: Result<usize, ()> = Ok(1);
+ /// # fn some_function(foo: ()) -> usize { 1 }
+ /// option.map_or(0, |a| a + 1);
+ /// result.map_or_else(some_function, |a| a + 1);
+ /// ```
+ #[clippy::version = "1.45.0"]
+ pub MAP_UNWRAP_OR,
+ pedantic,
+ "using `.map(f).unwrap_or(a)` or `.map(f).unwrap_or_else(func)`, which are more succinctly expressed as `map_or(a, f)` or `map_or_else(a, f)`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `_.map_or(None, _)`.
+ ///
+ /// ### Why is this bad?
+ /// Readability, this can be written more concisely as
+ /// `_.and_then(_)`.
+ ///
+ /// ### Known problems
+ /// The order of the arguments is not in execution order.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let opt = Some(1);
+ /// opt.map_or(None, |a| Some(a + 1));
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let opt = Some(1);
+ /// opt.and_then(|a| Some(a + 1));
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub OPTION_MAP_OR_NONE,
+ style,
+ "using `Option.map_or(None, f)`, which is more succinctly expressed as `and_then(f)`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `_.map_or(None, Some)`.
+ ///
+ /// ### Why is this bad?
+ /// Readability, this can be written more concisely as
+ /// `_.ok()`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let r: Result<u32, &str> = Ok(1);
+ /// assert_eq!(Some(1), r.map_or(None, Some));
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let r: Result<u32, &str> = Ok(1);
+ /// assert_eq!(Some(1), r.ok());
+ /// ```
+ #[clippy::version = "1.44.0"]
+ pub RESULT_MAP_OR_INTO_OPTION,
+ style,
+ "using `Result.map_or(None, Some)`, which is more succinctly expressed as `ok()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `_.and_then(|x| Some(y))`, `_.and_then(|x| Ok(y))` or
+ /// `_.or_else(|x| Err(y))`.
+ ///
+ /// ### Why is this bad?
+ /// Readability, this can be written more concisely as
+ /// `_.map(|x| y)` or `_.map_err(|x| y)`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # fn opt() -> Option<&'static str> { Some("42") }
+ /// # fn res() -> Result<&'static str, &'static str> { Ok("42") }
+ /// let _ = opt().and_then(|s| Some(s.len()));
+ /// let _ = res().and_then(|s| if s.len() == 42 { Ok(10) } else { Ok(20) });
+ /// let _ = res().or_else(|s| if s.len() == 42 { Err(10) } else { Err(20) });
+ /// ```
+ ///
+ /// The correct use would be:
+ ///
+ /// ```rust
+ /// # fn opt() -> Option<&'static str> { Some("42") }
+ /// # fn res() -> Result<&'static str, &'static str> { Ok("42") }
+ /// let _ = opt().map(|s| s.len());
+ /// let _ = res().map(|s| if s.len() == 42 { 10 } else { 20 });
+ /// let _ = res().map_err(|s| if s.len() == 42 { 10 } else { 20 });
+ /// ```
+ #[clippy::version = "1.45.0"]
+ pub BIND_INSTEAD_OF_MAP,
+ complexity,
+ "using `Option.and_then(|x| Some(y))`, which is more succinctly expressed as `map(|x| y)`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `_.filter(_).next()`.
+ ///
+ /// ### Why is this bad?
+ /// Readability, this can be written more concisely as
+ /// `_.find(_)`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let vec = vec![1];
+ /// vec.iter().filter(|x| **x == 0).next();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let vec = vec![1];
+ /// vec.iter().find(|x| **x == 0);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub FILTER_NEXT,
+ complexity,
+ "using `filter(p).next()`, which is more succinctly expressed as `.find(p)`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `_.skip_while(condition).next()`.
+ ///
+ /// ### Why is this bad?
+ /// Readability, this can be written more concisely as
+ /// `_.find(!condition)`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let vec = vec![1];
+ /// vec.iter().skip_while(|x| **x == 0).next();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let vec = vec![1];
+ /// vec.iter().find(|x| **x != 0);
+ /// ```
+ #[clippy::version = "1.42.0"]
+ pub SKIP_WHILE_NEXT,
+ complexity,
+ "using `skip_while(p).next()`, which is more succinctly expressed as `.find(!p)`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `_.map(_).flatten(_)` on `Iterator` and `Option`
+ ///
+ /// ### Why is this bad?
+ /// Readability, this can be written more concisely as
+ /// `_.flat_map(_)` for `Iterator` or `_.and_then(_)` for `Option`
+ ///
+ /// ### Example
+ /// ```rust
+ /// let vec = vec![vec![1]];
+ /// let opt = Some(5);
+ ///
+ /// vec.iter().map(|x| x.iter()).flatten();
+ /// opt.map(|x| Some(x * 2)).flatten();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let vec = vec![vec![1]];
+ /// # let opt = Some(5);
+ /// vec.iter().flat_map(|x| x.iter());
+ /// opt.and_then(|x| Some(x * 2));
+ /// ```
+ #[clippy::version = "1.31.0"]
+ pub MAP_FLATTEN,
+ complexity,
+ "using combinations of `flatten` and `map` which can usually be written as a single method call"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `_.filter(_).map(_)` that can be written more simply
+ /// as `filter_map(_)`.
+ ///
+ /// ### Why is this bad?
+ /// Redundant code in the `filter` and `map` operations is poor style and
+ /// less performant.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # #![allow(unused)]
+ /// (0_i32..10)
+ /// .filter(|n| n.checked_add(1).is_some())
+ /// .map(|n| n.checked_add(1).unwrap());
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # #[allow(unused)]
+ /// (0_i32..10).filter_map(|n| n.checked_add(1));
+ /// ```
+ #[clippy::version = "1.51.0"]
+ pub MANUAL_FILTER_MAP,
+ complexity,
+ "using `_.filter(_).map(_)` in a way that can be written more simply as `filter_map(_)`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `_.find(_).map(_)` that can be written more simply
+ /// as `find_map(_)`.
+ ///
+ /// ### Why is this bad?
+ /// Redundant code in the `find` and `map` operations is poor style and
+ /// less performant.
+ ///
+ /// ### Example
+ /// ```rust
+ /// (0_i32..10)
+ /// .find(|n| n.checked_add(1).is_some())
+ /// .map(|n| n.checked_add(1).unwrap());
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// (0_i32..10).find_map(|n| n.checked_add(1));
+ /// ```
+ #[clippy::version = "1.51.0"]
+ pub MANUAL_FIND_MAP,
+ complexity,
+ "using `_.find(_).map(_)` in a way that can be written more simply as `find_map(_)`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `_.filter_map(_).next()`.
+ ///
+ /// ### Why is this bad?
+ /// Readability, this can be written more concisely as
+ /// `_.find_map(_)`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// (0..3).filter_map(|x| if x == 2 { Some(x) } else { None }).next();
+ /// ```
+ /// Can be written as
+ ///
+ /// ```rust
+ /// (0..3).find_map(|x| if x == 2 { Some(x) } else { None });
+ /// ```
+ #[clippy::version = "1.36.0"]
+ pub FILTER_MAP_NEXT,
+ pedantic,
+ "using combination of `filter_map` and `next` which can usually be written as a single method call"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `flat_map(|x| x)`.
+ ///
+ /// ### Why is this bad?
+ /// Readability, this can be written more concisely by using `flatten`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let iter = vec![vec![0]].into_iter();
+ /// iter.flat_map(|x| x);
+ /// ```
+ /// Can be written as
+ /// ```rust
+ /// # let iter = vec![vec![0]].into_iter();
+ /// iter.flatten();
+ /// ```
+ #[clippy::version = "1.39.0"]
+ pub FLAT_MAP_IDENTITY,
+ complexity,
+ "call to `flat_map` where `flatten` is sufficient"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for an iterator or string search (such as `find()`,
+ /// `position()`, or `rposition()`) followed by a call to `is_some()` or `is_none()`.
+ ///
+ /// ### Why is this bad?
+ /// Readability, this can be written more concisely as:
+ /// * `_.any(_)`, or `_.contains(_)` for `is_some()`,
+ /// * `!_.any(_)`, or `!_.contains(_)` for `is_none()`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # #![allow(unused)]
+ /// let vec = vec![1];
+ /// vec.iter().find(|x| **x == 0).is_some();
+ ///
+ /// "hello world".find("world").is_none();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let vec = vec![1];
+ /// vec.iter().any(|x| *x == 0);
+ ///
+ /// # #[allow(unused)]
+ /// !"hello world".contains("world");
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub SEARCH_IS_SOME,
+ complexity,
+ "using an iterator or string search followed by `is_some()` or `is_none()`, which is more succinctly expressed as a call to `any()` or `contains()` (with negation in case of `is_none()`)"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `.chars().next()` on a `str` to check
+ /// if it starts with a given char.
+ ///
+ /// ### Why is this bad?
+ /// Readability, this can be written more concisely as
+ /// `_.starts_with(_)`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let name = "foo";
+ /// if name.chars().next() == Some('_') {};
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let name = "foo";
+ /// if name.starts_with('_') {};
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub CHARS_NEXT_CMP,
+ style,
+ "using `.chars().next()` to check if a string starts with a char"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls to `.or(foo(..))`, `.unwrap_or(foo(..))`,
+ /// etc., and suggests to use `or_else`, `unwrap_or_else`, etc., or
+ /// `unwrap_or_default` instead.
+ ///
+ /// ### Why is this bad?
+ /// The function will always be called and potentially
+ /// allocate an object acting as the default.
+ ///
+ /// ### Known problems
+ /// If the function has side-effects, not calling it will
+ /// change the semantic of the program, but you shouldn't rely on that anyway.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let foo = Some(String::new());
+ /// foo.unwrap_or(String::new());
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let foo = Some(String::new());
+ /// foo.unwrap_or_else(String::new);
+ ///
+ /// // or
+ ///
+ /// # let foo = Some(String::new());
+ /// foo.unwrap_or_default();
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub OR_FUN_CALL,
+ perf,
+ "using any `*or` method with a function call, which suggests `*or_else`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `.or(…).unwrap()` calls to Options and Results.
+ ///
+ /// ### Why is this bad?
+ /// You should use `.unwrap_or(…)` instead for clarity.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let fallback = "fallback";
+ /// // Result
+ /// # type Error = &'static str;
+ /// # let result: Result<&str, Error> = Err("error");
+ /// let value = result.or::<Error>(Ok(fallback)).unwrap();
+ ///
+ /// // Option
+ /// # let option: Option<&str> = None;
+ /// let value = option.or(Some(fallback)).unwrap();
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # let fallback = "fallback";
+ /// // Result
+ /// # let result: Result<&str, &str> = Err("error");
+ /// let value = result.unwrap_or(fallback);
+ ///
+ /// // Option
+ /// # let option: Option<&str> = None;
+ /// let value = option.unwrap_or(fallback);
+ /// ```
+ #[clippy::version = "1.61.0"]
+ pub OR_THEN_UNWRAP,
+ complexity,
+ "checks for `.or(…).unwrap()` calls to Options and Results."
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls to `.expect(&format!(...))`, `.expect(foo(..))`,
+ /// etc., and suggests to use `unwrap_or_else` instead
+ ///
+ /// ### Why is this bad?
+ /// The function will always be called.
+ ///
+ /// ### Known problems
+ /// If the function has side-effects, not calling it will
+ /// change the semantics of the program, but you shouldn't rely on that anyway.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let foo = Some(String::new());
+ /// # let err_code = "418";
+ /// # let err_msg = "I'm a teapot";
+ /// foo.expect(&format!("Err {}: {}", err_code, err_msg));
+ ///
+ /// // or
+ ///
+ /// # let foo = Some(String::new());
+ /// foo.expect(format!("Err {}: {}", err_code, err_msg).as_str());
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let foo = Some(String::new());
+ /// # let err_code = "418";
+ /// # let err_msg = "I'm a teapot";
+ /// foo.unwrap_or_else(|| panic!("Err {}: {}", err_code, err_msg));
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub EXPECT_FUN_CALL,
+ perf,
+ "using any `expect` method with a function call"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `.clone()` on a `Copy` type.
+ ///
+ /// ### Why is this bad?
+ /// The only reason `Copy` types implement `Clone` is for
+ /// generics, not for using the `clone` method on a concrete type.
+ ///
+ /// ### Example
+ /// ```rust
+ /// 42u64.clone();
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub CLONE_ON_COPY,
+ complexity,
+ "using `clone` on a `Copy` type"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `.clone()` on a ref-counted pointer,
+ /// (`Rc`, `Arc`, `rc::Weak`, or `sync::Weak`), and suggests calling Clone via unified
+ /// function syntax instead (e.g., `Rc::clone(foo)`).
+ ///
+ /// ### Why is this bad?
+ /// Calling '.clone()' on an Rc, Arc, or Weak
+ /// can obscure the fact that only the pointer is being cloned, not the underlying
+ /// data.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::rc::Rc;
+ /// let x = Rc::new(1);
+ ///
+ /// x.clone();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # use std::rc::Rc;
+ /// # let x = Rc::new(1);
+ /// Rc::clone(&x);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub CLONE_ON_REF_PTR,
+ restriction,
+ "using 'clone' on a ref-counted pointer"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `.clone()` on an `&&T`.
+ ///
+ /// ### Why is this bad?
+ /// Cloning an `&&T` copies the inner `&T`, instead of
+ /// cloning the underlying `T`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn main() {
+ /// let x = vec![1];
+ /// let y = &&x;
+ /// let z = y.clone();
+ /// println!("{:p} {:p}", *y, z); // prints out the same pointer
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub CLONE_DOUBLE_REF,
+ correctness,
+ "using `clone` on `&&T`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `.to_string()` on an `&&T` where
+ /// `T` implements `ToString` directly (like `&&str` or `&&String`).
+ ///
+ /// ### Why is this bad?
+ /// This bypasses the specialized implementation of
+ /// `ToString` and instead goes through the more expensive string formatting
+ /// facilities.
+ ///
+ /// ### Example
+ /// ```rust
+ /// // Generic implementation for `T: Display` is used (slow)
+ /// ["foo", "bar"].iter().map(|s| s.to_string());
+ ///
+ /// // OK, the specialized impl is used
+ /// ["foo", "bar"].iter().map(|&s| s.to_string());
+ /// ```
+ #[clippy::version = "1.40.0"]
+ pub INEFFICIENT_TO_STRING,
+ pedantic,
+ "using `to_string` on `&&T` where `T: ToString`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `new` not returning a type that contains `Self`.
+ ///
+ /// ### Why is this bad?
+ /// As a convention, `new` methods are used to make a new
+ /// instance of a type.
+ ///
+ /// ### Example
+ /// In an impl block:
+ /// ```rust
+ /// # struct Foo;
+ /// # struct NotAFoo;
+ /// impl Foo {
+ /// fn new() -> NotAFoo {
+ /// # NotAFoo
+ /// }
+ /// }
+ /// ```
+ ///
+ /// ```rust
+ /// # struct Foo;
+ /// struct Bar(Foo);
+ /// impl Foo {
+ /// // Bad. The type name must contain `Self`
+ /// fn new() -> Bar {
+ /// # Bar(Foo)
+ /// }
+ /// }
+ /// ```
+ ///
+ /// ```rust
+ /// # struct Foo;
+ /// # struct FooError;
+ /// impl Foo {
+ /// // Good. Return type contains `Self`
+ /// fn new() -> Result<Foo, FooError> {
+ /// # Ok(Foo)
+ /// }
+ /// }
+ /// ```
+ ///
+ /// Or in a trait definition:
+ /// ```rust
+ /// pub trait Trait {
+ /// // Bad. The type name must contain `Self`
+ /// fn new();
+ /// }
+ /// ```
+ ///
+ /// ```rust
+ /// pub trait Trait {
+ /// // Good. Return type contains `Self`
+ /// fn new() -> Self;
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub NEW_RET_NO_SELF,
+ style,
+ "not returning type containing `Self` in a `new` method"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for string methods that receive a single-character
+ /// `str` as an argument, e.g., `_.split("x")`.
+ ///
+ /// ### Why is this bad?
+ /// Performing these methods using a `char` is faster than
+ /// using a `str`.
+ ///
+ /// ### Known problems
+ /// Does not catch multi-byte unicode characters.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// _.split("x");
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// _.split('x');
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub SINGLE_CHAR_PATTERN,
+ perf,
+ "using a single-character str where a char could be used, e.g., `_.split(\"x\")`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calling `.step_by(0)` on iterators which panics.
+ ///
+ /// ### Why is this bad?
+ /// This very much looks like an oversight. Use `panic!()` instead if you
+ /// actually intend to panic.
+ ///
+ /// ### Example
+ /// ```rust,should_panic
+ /// for x in (0..100).step_by(0) {
+ /// //..
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub ITERATOR_STEP_BY_ZERO,
+ correctness,
+ "using `Iterator::step_by(0)`, which will panic at runtime"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for indirect collection of populated `Option`
+ ///
+ /// ### Why is this bad?
+ /// `Option` is like a collection of 0-1 things, so `flatten`
+ /// automatically does this without suspicious-looking `unwrap` calls.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let _ = std::iter::empty::<Option<i32>>().filter(Option::is_some).map(Option::unwrap);
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let _ = std::iter::empty::<Option<i32>>().flatten();
+ /// ```
+ #[clippy::version = "1.53.0"]
+ pub OPTION_FILTER_MAP,
+ complexity,
+ "filtering `Option` for `Some` then force-unwrapping, which can be one type-safe operation"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for the use of `iter.nth(0)`.
+ ///
+ /// ### Why is this bad?
+ /// `iter.next()` is equivalent to
+ /// `iter.nth(0)`, as they both consume the next element,
+ /// but is more readable.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::collections::HashSet;
+ /// # let mut s = HashSet::new();
+ /// # s.insert(1);
+ /// let x = s.iter().nth(0);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # use std::collections::HashSet;
+ /// # let mut s = HashSet::new();
+ /// # s.insert(1);
+ /// let x = s.iter().next();
+ /// ```
+ #[clippy::version = "1.42.0"]
+ pub ITER_NTH_ZERO,
+ style,
+ "replace `iter.nth(0)` with `iter.next()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for use of `.iter().nth()` (and the related
+ /// `.iter_mut().nth()`) on standard library types with *O*(1) element access.
+ ///
+ /// ### Why is this bad?
+ /// `.get()` and `.get_mut()` are more efficient and more
+ /// readable.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let some_vec = vec![0, 1, 2, 3];
+ /// let bad_vec = some_vec.iter().nth(3);
+ /// let bad_slice = &some_vec[..].iter().nth(3);
+ /// ```
+ /// The correct use would be:
+ /// ```rust
+ /// let some_vec = vec![0, 1, 2, 3];
+ /// let bad_vec = some_vec.get(3);
+ /// let bad_slice = &some_vec[..].get(3);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub ITER_NTH,
+ perf,
+ "using `.iter().nth()` on a standard library type with O(1) element access"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for use of `.skip(x).next()` on iterators.
+ ///
+ /// ### Why is this bad?
+ /// `.nth(x)` is cleaner
+ ///
+ /// ### Example
+ /// ```rust
+ /// let some_vec = vec![0, 1, 2, 3];
+ /// let bad_vec = some_vec.iter().skip(3).next();
+ /// let bad_slice = &some_vec[..].iter().skip(3).next();
+ /// ```
+ /// The correct use would be:
+ /// ```rust
+ /// let some_vec = vec![0, 1, 2, 3];
+ /// let bad_vec = some_vec.iter().nth(3);
+ /// let bad_slice = &some_vec[..].iter().nth(3);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub ITER_SKIP_NEXT,
+ style,
+ "using `.skip(x).next()` on an iterator"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for use of `.drain(..)` on `Vec` and `VecDeque` for iteration.
+ ///
+ /// ### Why is this bad?
+ /// `.into_iter()` is simpler with better performance.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::collections::HashSet;
+ /// let mut foo = vec![0, 1, 2, 3];
+ /// let bar: HashSet<usize> = foo.drain(..).collect();
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # use std::collections::HashSet;
+ /// let foo = vec![0, 1, 2, 3];
+ /// let bar: HashSet<usize> = foo.into_iter().collect();
+ /// ```
+ #[clippy::version = "1.61.0"]
+ pub ITER_WITH_DRAIN,
+ nursery,
+ "replace `.drain(..)` with `.into_iter()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for using `x.get(x.len() - 1)` instead of
+ /// `x.last()`.
+ ///
+ /// ### Why is this bad?
+ /// Using `x.last()` is easier to read and has the same
+ /// result.
+ ///
+ /// Note that using `x[x.len() - 1]` is semantically different from
+ /// `x.last()`. Indexing into the array will panic on out-of-bounds
+ /// accesses, while `x.get()` and `x.last()` will return `None`.
+ ///
+ /// There is another lint (get_unwrap) that covers the case of using
+ /// `x.get(index).unwrap()` instead of `x[index]`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = vec![2, 3, 5];
+ /// let last_element = x.get(x.len() - 1);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let x = vec![2, 3, 5];
+ /// let last_element = x.last();
+ /// ```
+ #[clippy::version = "1.37.0"]
+ pub GET_LAST_WITH_LEN,
+ complexity,
+ "Using `x.get(x.len() - 1)` when `x.last()` is correct and simpler"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for use of `.get().unwrap()` (or
+ /// `.get_mut().unwrap`) on a standard library type which implements `Index`
+ ///
+ /// ### Why is this bad?
+ /// Using the Index trait (`[]`) is more clear and more
+ /// concise.
+ ///
+ /// ### Known problems
+ /// Not a replacement for error handling: Using either
+ /// `.unwrap()` or the Index trait (`[]`) carries the risk of causing a `panic`
+ /// if the value being accessed is `None`. If the use of `.get().unwrap()` is a
+ /// temporary placeholder for dealing with the `Option` type, then this does
+ /// not mitigate the need for error handling. If there is a chance that `.get()`
+ /// will be `None` in your program, then it is advisable that the `None` case
+ /// is handled in a future refactor instead of using `.unwrap()` or the Index
+ /// trait.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let mut some_vec = vec![0, 1, 2, 3];
+ /// let last = some_vec.get(3).unwrap();
+ /// *some_vec.get_mut(0).unwrap() = 1;
+ /// ```
+ /// The correct use would be:
+ /// ```rust
+ /// let mut some_vec = vec![0, 1, 2, 3];
+ /// let last = some_vec[3];
+ /// some_vec[0] = 1;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub GET_UNWRAP,
+ restriction,
+ "using `.get().unwrap()` or `.get_mut().unwrap()` when using `[]` would work instead"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for occurrences where one vector gets extended instead of append
+ ///
+ /// ### Why is this bad?
+ /// Using `append` instead of `extend` is more concise and faster
+ ///
+ /// ### Example
+ /// ```rust
+ /// let mut a = vec![1, 2, 3];
+ /// let mut b = vec![4, 5, 6];
+ ///
+ /// a.extend(b.drain(..));
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let mut a = vec![1, 2, 3];
+ /// let mut b = vec![4, 5, 6];
+ ///
+ /// a.append(&mut b);
+ /// ```
+ #[clippy::version = "1.55.0"]
+ pub EXTEND_WITH_DRAIN,
+ perf,
+ "using vec.append(&mut vec) to move the full range of a vector to another"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for the use of `.extend(s.chars())` where s is a
+ /// `&str` or `String`.
+ ///
+ /// ### Why is this bad?
+ /// `.push_str(s)` is clearer
+ ///
+ /// ### Example
+ /// ```rust
+ /// let abc = "abc";
+ /// let def = String::from("def");
+ /// let mut s = String::new();
+ /// s.extend(abc.chars());
+ /// s.extend(def.chars());
+ /// ```
+ /// The correct use would be:
+ /// ```rust
+ /// let abc = "abc";
+ /// let def = String::from("def");
+ /// let mut s = String::new();
+ /// s.push_str(abc);
+ /// s.push_str(&def);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub STRING_EXTEND_CHARS,
+ style,
+ "using `x.extend(s.chars())` where s is a `&str` or `String`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for the use of `.cloned().collect()` on slice to
+ /// create a `Vec`.
+ ///
+ /// ### Why is this bad?
+ /// `.to_vec()` is clearer
+ ///
+ /// ### Example
+ /// ```rust
+ /// let s = [1, 2, 3, 4, 5];
+ /// let s2: Vec<isize> = s[..].iter().cloned().collect();
+ /// ```
+ /// The better use would be:
+ /// ```rust
+ /// let s = [1, 2, 3, 4, 5];
+ /// let s2: Vec<isize> = s.to_vec();
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub ITER_CLONED_COLLECT,
+ style,
+ "using `.cloned().collect()` on slice to create a `Vec`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `_.chars().last()` or
+ /// `_.chars().next_back()` on a `str` to check if it ends with a given char.
+ ///
+ /// ### Why is this bad?
+ /// Readability, this can be written more concisely as
+ /// `_.ends_with(_)`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let name = "_";
+ /// name.chars().last() == Some('_') || name.chars().next_back() == Some('-');
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let name = "_";
+ /// name.ends_with('_') || name.ends_with('-');
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub CHARS_LAST_CMP,
+ style,
+ "using `.chars().last()` or `.chars().next_back()` to check if a string ends with a char"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `.as_ref()` or `.as_mut()` where the
+ /// types before and after the call are the same.
+ ///
+ /// ### Why is this bad?
+ /// The call is unnecessary.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # fn do_stuff(x: &[i32]) {}
+ /// let x: &[i32] = &[1, 2, 3, 4, 5];
+ /// do_stuff(x.as_ref());
+ /// ```
+ /// The correct use would be:
+ /// ```rust
+ /// # fn do_stuff(x: &[i32]) {}
+ /// let x: &[i32] = &[1, 2, 3, 4, 5];
+ /// do_stuff(x);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub USELESS_ASREF,
+ complexity,
+ "using `as_ref` where the types before and after the call are the same"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for using `fold` when a more succinct alternative exists.
+ /// Specifically, this checks for `fold`s which could be replaced by `any`, `all`,
+ /// `sum` or `product`.
+ ///
+ /// ### Why is this bad?
+ /// Readability.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # #[allow(unused)]
+ /// (0..3).fold(false, |acc, x| acc || x > 2);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// (0..3).any(|x| x > 2);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub UNNECESSARY_FOLD,
+ style,
+ "using `fold` when a more succinct alternative exists"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `filter_map` calls that could be replaced by `filter` or `map`.
+ /// More specifically it checks if the closure provided is only performing one of the
+ /// filter or map operations and suggests the appropriate option.
+ ///
+ /// ### Why is this bad?
+ /// Complexity. The intent is also clearer if only a single
+ /// operation is being performed.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let _ = (0..3).filter_map(|x| if x > 2 { Some(x) } else { None });
+ ///
+ /// // As there is no transformation of the argument this could be written as:
+ /// let _ = (0..3).filter(|&x| x > 2);
+ /// ```
+ ///
+ /// ```rust
+ /// let _ = (0..4).filter_map(|x| Some(x + 1));
+ ///
+ /// // As there is no conditional check on the argument this could be written as:
+ /// let _ = (0..4).map(|x| x + 1);
+ /// ```
+ #[clippy::version = "1.31.0"]
+ pub UNNECESSARY_FILTER_MAP,
+ complexity,
+ "using `filter_map` when a more succinct alternative exists"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `find_map` calls that could be replaced by `find` or `map`. More
+ /// specifically it checks if the closure provided is only performing one of the
+ /// find or map operations and suggests the appropriate option.
+ ///
+ /// ### Why is this bad?
+ /// Complexity. The intent is also clearer if only a single
+ /// operation is being performed.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let _ = (0..3).find_map(|x| if x > 2 { Some(x) } else { None });
+ ///
+ /// // As there is no transformation of the argument this could be written as:
+ /// let _ = (0..3).find(|&x| x > 2);
+ /// ```
+ ///
+ /// ```rust
+ /// let _ = (0..4).find_map(|x| Some(x + 1));
+ ///
+ /// // As there is no conditional check on the argument this could be written as:
+ /// let _ = (0..4).map(|x| x + 1).next();
+ /// ```
+ #[clippy::version = "1.61.0"]
+ pub UNNECESSARY_FIND_MAP,
+ complexity,
+ "using `find_map` when a more succinct alternative exists"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `into_iter` calls on references which should be replaced by `iter`
+ /// or `iter_mut`.
+ ///
+ /// ### Why is this bad?
+ /// Readability. Calling `into_iter` on a reference will not move out its
+ /// content into the resulting iterator, which is confusing. It is better just call `iter` or
+ /// `iter_mut` directly.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let vec = vec![3, 4, 5];
+ /// (&vec).into_iter();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let vec = vec![3, 4, 5];
+ /// (&vec).iter();
+ /// ```
+ #[clippy::version = "1.32.0"]
+ pub INTO_ITER_ON_REF,
+ style,
+ "using `.into_iter()` on a reference"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls to `map` followed by a `count`.
+ ///
+ /// ### Why is this bad?
+ /// It looks suspicious. Maybe `map` was confused with `filter`.
+ /// If the `map` call is intentional, this should be rewritten
+ /// using `inspect`. Or, if you intend to drive the iterator to
+ /// completion, you can just use `for_each` instead.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let _ = (0..3).map(|x| x + 2).count();
+ /// ```
+ #[clippy::version = "1.39.0"]
+ pub SUSPICIOUS_MAP,
+ suspicious,
+ "suspicious usage of map"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `MaybeUninit::uninit().assume_init()`.
+ ///
+ /// ### Why is this bad?
+ /// For most types, this is undefined behavior.
+ ///
+ /// ### Known problems
+ /// For now, we accept empty tuples and tuples / arrays
+ /// of `MaybeUninit`. There may be other types that allow uninitialized
+ /// data, but those are not yet rigorously defined.
+ ///
+ /// ### Example
+ /// ```rust
+ /// // Beware the UB
+ /// use std::mem::MaybeUninit;
+ ///
+ /// let _: usize = unsafe { MaybeUninit::uninit().assume_init() };
+ /// ```
+ ///
+ /// Note that the following is OK:
+ ///
+ /// ```rust
+ /// use std::mem::MaybeUninit;
+ ///
+ /// let _: [MaybeUninit<bool>; 5] = unsafe {
+ /// MaybeUninit::uninit().assume_init()
+ /// };
+ /// ```
+ #[clippy::version = "1.39.0"]
+ pub UNINIT_ASSUMED_INIT,
+ correctness,
+ "`MaybeUninit::uninit().assume_init()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `.checked_add/sub(x).unwrap_or(MAX/MIN)`.
+ ///
+ /// ### Why is this bad?
+ /// These can be written simply with `saturating_add/sub` methods.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let y: u32 = 0;
+ /// # let x: u32 = 100;
+ /// let add = x.checked_add(y).unwrap_or(u32::MAX);
+ /// let sub = x.checked_sub(y).unwrap_or(u32::MIN);
+ /// ```
+ ///
+ /// can be written using dedicated methods for saturating addition/subtraction as:
+ ///
+ /// ```rust
+ /// # let y: u32 = 0;
+ /// # let x: u32 = 100;
+ /// let add = x.saturating_add(y);
+ /// let sub = x.saturating_sub(y);
+ /// ```
+ #[clippy::version = "1.39.0"]
+ pub MANUAL_SATURATING_ARITHMETIC,
+ style,
+ "`.checked_add/sub(x).unwrap_or(MAX/MIN)`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `offset(_)`, `wrapping_`{`add`, `sub`}, etc. on raw pointers to
+ /// zero-sized types
+ ///
+ /// ### Why is this bad?
+ /// This is a no-op, and likely unintended
+ ///
+ /// ### Example
+ /// ```rust
+ /// unsafe { (&() as *const ()).offset(1) };
+ /// ```
+ #[clippy::version = "1.41.0"]
+ pub ZST_OFFSET,
+ correctness,
+ "Check for offset calculations on raw pointers to zero-sized types"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `FileType::is_file()`.
+ ///
+ /// ### Why is this bad?
+ /// When people testing a file type with `FileType::is_file`
+ /// they are testing whether a path is something they can get bytes from. But
+ /// `is_file` doesn't cover special file types in unix-like systems, and doesn't cover
+ /// symlink in windows. Using `!FileType::is_dir()` is a better way to that intention.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # || {
+ /// let metadata = std::fs::metadata("foo.txt")?;
+ /// let filetype = metadata.file_type();
+ ///
+ /// if filetype.is_file() {
+ /// // read file
+ /// }
+ /// # Ok::<_, std::io::Error>(())
+ /// # };
+ /// ```
+ ///
+ /// should be written as:
+ ///
+ /// ```rust
+ /// # || {
+ /// let metadata = std::fs::metadata("foo.txt")?;
+ /// let filetype = metadata.file_type();
+ ///
+ /// if !filetype.is_dir() {
+ /// // read file
+ /// }
+ /// # Ok::<_, std::io::Error>(())
+ /// # };
+ /// ```
+ #[clippy::version = "1.42.0"]
+ pub FILETYPE_IS_FILE,
+ restriction,
+ "`FileType::is_file` is not recommended to test for readable file type"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `_.as_ref().map(Deref::deref)` or it's aliases (such as String::as_str).
+ ///
+ /// ### Why is this bad?
+ /// Readability, this can be written more concisely as
+ /// `_.as_deref()`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let opt = Some("".to_string());
+ /// opt.as_ref().map(String::as_str)
+ /// # ;
+ /// ```
+ /// Can be written as
+ /// ```rust
+ /// # let opt = Some("".to_string());
+ /// opt.as_deref()
+ /// # ;
+ /// ```
+ #[clippy::version = "1.42.0"]
+ pub OPTION_AS_REF_DEREF,
+ complexity,
+ "using `as_ref().map(Deref::deref)`, which is more succinctly expressed as `as_deref()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `iter().next()` on a Slice or an Array
+ ///
+ /// ### Why is this bad?
+ /// These can be shortened into `.get()`
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let a = [1, 2, 3];
+ /// # let b = vec![1, 2, 3];
+ /// a[2..].iter().next();
+ /// b.iter().next();
+ /// ```
+ /// should be written as:
+ /// ```rust
+ /// # let a = [1, 2, 3];
+ /// # let b = vec![1, 2, 3];
+ /// a.get(2);
+ /// b.get(0);
+ /// ```
+ #[clippy::version = "1.46.0"]
+ pub ITER_NEXT_SLICE,
+ style,
+ "using `.iter().next()` on a sliced array, which can be shortened to just `.get()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Warns when using `push_str`/`insert_str` with a single-character string literal
+ /// where `push`/`insert` with a `char` would work fine.
+ ///
+ /// ### Why is this bad?
+ /// It's less clear that we are pushing a single character.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let mut string = String::new();
+ /// string.insert_str(0, "R");
+ /// string.push_str("R");
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let mut string = String::new();
+ /// string.insert(0, 'R');
+ /// string.push('R');
+ /// ```
+ #[clippy::version = "1.49.0"]
+ pub SINGLE_CHAR_ADD_STR,
+ style,
+ "`push_str()` or `insert_str()` used with a single-character string literal as parameter"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// As the counterpart to `or_fun_call`, this lint looks for unnecessary
+ /// lazily evaluated closures on `Option` and `Result`.
+ ///
+ /// This lint suggests changing the following functions, when eager evaluation results in
+ /// simpler code:
+ /// - `unwrap_or_else` to `unwrap_or`
+ /// - `and_then` to `and`
+ /// - `or_else` to `or`
+ /// - `get_or_insert_with` to `get_or_insert`
+ /// - `ok_or_else` to `ok_or`
+ ///
+ /// ### Why is this bad?
+ /// Using eager evaluation is shorter and simpler in some cases.
+ ///
+ /// ### Known problems
+ /// It is possible, but not recommended for `Deref` and `Index` to have
+ /// side effects. Eagerly evaluating them can change the semantics of the program.
+ ///
+ /// ### Example
+ /// ```rust
+ /// // example code where clippy issues a warning
+ /// let opt: Option<u32> = None;
+ ///
+ /// opt.unwrap_or_else(|| 42);
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let opt: Option<u32> = None;
+ ///
+ /// opt.unwrap_or(42);
+ /// ```
+ #[clippy::version = "1.48.0"]
+ pub UNNECESSARY_LAZY_EVALUATIONS,
+ style,
+ "using unnecessary lazy evaluation, which can be replaced with simpler eager evaluation"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `_.map(_).collect::<Result<(), _>()`.
+ ///
+ /// ### Why is this bad?
+ /// Using `try_for_each` instead is more readable and idiomatic.
+ ///
+ /// ### Example
+ /// ```rust
+ /// (0..3).map(|t| Err(t)).collect::<Result<(), _>>();
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// (0..3).try_for_each(|t| Err(t));
+ /// ```
+ #[clippy::version = "1.49.0"]
+ pub MAP_COLLECT_RESULT_UNIT,
+ style,
+ "using `.map(_).collect::<Result<(),_>()`, which can be replaced with `try_for_each`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `from_iter()` function calls on types that implement the `FromIterator`
+ /// trait.
+ ///
+ /// ### Why is this bad?
+ /// It is recommended style to use collect. See
+ /// [FromIterator documentation](https://doc.rust-lang.org/std/iter/trait.FromIterator.html)
+ ///
+ /// ### Example
+ /// ```rust
+ /// let five_fives = std::iter::repeat(5).take(5);
+ ///
+ /// let v = Vec::from_iter(five_fives);
+ ///
+ /// assert_eq!(v, vec![5, 5, 5, 5, 5]);
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let five_fives = std::iter::repeat(5).take(5);
+ ///
+ /// let v: Vec<i32> = five_fives.collect();
+ ///
+ /// assert_eq!(v, vec![5, 5, 5, 5, 5]);
+ /// ```
+ #[clippy::version = "1.49.0"]
+ pub FROM_ITER_INSTEAD_OF_COLLECT,
+ pedantic,
+ "use `.collect()` instead of `::from_iter()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `inspect().for_each()`.
+ ///
+ /// ### Why is this bad?
+ /// It is the same as performing the computation
+ /// inside `inspect` at the beginning of the closure in `for_each`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// [1,2,3,4,5].iter()
+ /// .inspect(|&x| println!("inspect the number: {}", x))
+ /// .for_each(|&x| {
+ /// assert!(x >= 0);
+ /// });
+ /// ```
+ /// Can be written as
+ /// ```rust
+ /// [1,2,3,4,5].iter()
+ /// .for_each(|&x| {
+ /// println!("inspect the number: {}", x);
+ /// assert!(x >= 0);
+ /// });
+ /// ```
+ #[clippy::version = "1.51.0"]
+ pub INSPECT_FOR_EACH,
+ complexity,
+ "using `.inspect().for_each()`, which can be replaced with `.for_each()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `filter_map(|x| x)`.
+ ///
+ /// ### Why is this bad?
+ /// Readability, this can be written more concisely by using `flatten`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let iter = vec![Some(1)].into_iter();
+ /// iter.filter_map(|x| x);
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # let iter = vec![Some(1)].into_iter();
+ /// iter.flatten();
+ /// ```
+ #[clippy::version = "1.52.0"]
+ pub FILTER_MAP_IDENTITY,
+ complexity,
+ "call to `filter_map` where `flatten` is sufficient"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for instances of `map(f)` where `f` is the identity function.
+ ///
+ /// ### Why is this bad?
+ /// It can be written more concisely without the call to `map`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = [1, 2, 3];
+ /// let y: Vec<_> = x.iter().map(|x| x).map(|x| 2*x).collect();
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let x = [1, 2, 3];
+ /// let y: Vec<_> = x.iter().map(|x| 2*x).collect();
+ /// ```
+ #[clippy::version = "1.47.0"]
+ pub MAP_IDENTITY,
+ complexity,
+ "using iterator.map(|x| x)"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for the use of `.bytes().nth()`.
+ ///
+ /// ### Why is this bad?
+ /// `.as_bytes().get()` is more efficient and more
+ /// readable.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # #[allow(unused)]
+ /// "Hello".bytes().nth(3);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # #[allow(unused)]
+ /// "Hello".as_bytes().get(3);
+ /// ```
+ #[clippy::version = "1.52.0"]
+ pub BYTES_NTH,
+ style,
+ "replace `.bytes().nth()` with `.as_bytes().get()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for the usage of `_.to_owned()`, `vec.to_vec()`, or similar when calling `_.clone()` would be clearer.
+ ///
+ /// ### Why is this bad?
+ /// These methods do the same thing as `_.clone()` but may be confusing as
+ /// to why we are calling `to_vec` on something that is already a `Vec` or calling `to_owned` on something that is already owned.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let a = vec![1, 2, 3];
+ /// let b = a.to_vec();
+ /// let c = a.to_owned();
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let a = vec![1, 2, 3];
+ /// let b = a.clone();
+ /// let c = a.clone();
+ /// ```
+ #[clippy::version = "1.52.0"]
+ pub IMPLICIT_CLONE,
+ pedantic,
+ "implicitly cloning a value by invoking a function on its dereferenced type"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for the use of `.iter().count()`.
+ ///
+ /// ### Why is this bad?
+ /// `.len()` is more efficient and more
+ /// readable.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # #![allow(unused)]
+ /// let some_vec = vec![0, 1, 2, 3];
+ ///
+ /// some_vec.iter().count();
+ /// &some_vec[..].iter().count();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let some_vec = vec![0, 1, 2, 3];
+ ///
+ /// some_vec.len();
+ /// &some_vec[..].len();
+ /// ```
+ #[clippy::version = "1.52.0"]
+ pub ITER_COUNT,
+ complexity,
+ "replace `.iter().count()` with `.len()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls to [`splitn`]
+ /// (https://doc.rust-lang.org/std/primitive.str.html#method.splitn) and
+ /// related functions with either zero or one splits.
+ ///
+ /// ### Why is this bad?
+ /// These calls don't actually split the value and are
+ /// likely to be intended as a different number.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let s = "";
+ /// for x in s.splitn(1, ":") {
+ /// // ..
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let s = "";
+ /// for x in s.splitn(2, ":") {
+ /// // ..
+ /// }
+ /// ```
+ #[clippy::version = "1.54.0"]
+ pub SUSPICIOUS_SPLITN,
+ correctness,
+ "checks for `.splitn(0, ..)` and `.splitn(1, ..)`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for manual implementations of `str::repeat`
+ ///
+ /// ### Why is this bad?
+ /// These are both harder to read, as well as less performant.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x: String = std::iter::repeat('x').take(10).collect();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let x: String = "x".repeat(10);
+ /// ```
+ #[clippy::version = "1.54.0"]
+ pub MANUAL_STR_REPEAT,
+ perf,
+ "manual implementation of `str::repeat`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usages of `str::splitn(2, _)`
+ ///
+ /// ### Why is this bad?
+ /// `split_once` is both clearer in intent and slightly more efficient.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// let s = "key=value=add";
+ /// let (key, value) = s.splitn(2, '=').next_tuple()?;
+ /// let value = s.splitn(2, '=').nth(1)?;
+ ///
+ /// let mut parts = s.splitn(2, '=');
+ /// let key = parts.next()?;
+ /// let value = parts.next()?;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// let s = "key=value=add";
+ /// let (key, value) = s.split_once('=')?;
+ /// let value = s.split_once('=')?.1;
+ ///
+ /// let (key, value) = s.split_once('=')?;
+ /// ```
+ ///
+ /// ### Limitations
+ /// The multiple statement variant currently only detects `iter.next()?`/`iter.next().unwrap()`
+ /// in two separate `let` statements that immediately follow the `splitn()`
+ #[clippy::version = "1.57.0"]
+ pub MANUAL_SPLIT_ONCE,
+ complexity,
+ "replace `.splitn(2, pat)` with `.split_once(pat)`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usages of `str::splitn` (or `str::rsplitn`) where using `str::split` would be the same.
+ /// ### Why is this bad?
+ /// The function `split` is simpler and there is no performance difference in these cases, considering
+ /// that both functions return a lazy iterator.
+ /// ### Example
+ /// ```rust
+ /// let str = "key=value=add";
+ /// let _ = str.splitn(3, '=').next().unwrap();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let str = "key=value=add";
+ /// let _ = str.split('=').next().unwrap();
+ /// ```
+ #[clippy::version = "1.59.0"]
+ pub NEEDLESS_SPLITN,
+ complexity,
+ "usages of `str::splitn` that can be replaced with `str::split`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for unnecessary calls to [`ToOwned::to_owned`](https://doc.rust-lang.org/std/borrow/trait.ToOwned.html#tymethod.to_owned)
+ /// and other `to_owned`-like functions.
+ ///
+ /// ### Why is this bad?
+ /// The unnecessary calls result in useless allocations.
+ ///
+ /// ### Known problems
+ /// `unnecessary_to_owned` can falsely trigger if `IntoIterator::into_iter` is applied to an
+ /// owned copy of a resource and the resource is later used mutably. See
+ /// [#8148](https://github.com/rust-lang/rust-clippy/issues/8148).
+ ///
+ /// ### Example
+ /// ```rust
+ /// let path = std::path::Path::new("x");
+ /// foo(&path.to_string_lossy().to_string());
+ /// fn foo(s: &str) {}
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let path = std::path::Path::new("x");
+ /// foo(&path.to_string_lossy());
+ /// fn foo(s: &str) {}
+ /// ```
+ #[clippy::version = "1.59.0"]
+ pub UNNECESSARY_TO_OWNED,
+ perf,
+ "unnecessary calls to `to_owned`-like functions"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for use of `.collect::<Vec<String>>().join("")` on iterators.
+ ///
+ /// ### Why is this bad?
+ /// `.collect::<String>()` is more concise and might be more performant
+ ///
+ /// ### Example
+ /// ```rust
+ /// let vector = vec!["hello", "world"];
+ /// let output = vector.iter().map(|item| item.to_uppercase()).collect::<Vec<String>>().join("");
+ /// println!("{}", output);
+ /// ```
+ /// The correct use would be:
+ /// ```rust
+ /// let vector = vec!["hello", "world"];
+ /// let output = vector.iter().map(|item| item.to_uppercase()).collect::<String>();
+ /// println!("{}", output);
+ /// ```
+ /// ### Known problems
+ /// While `.collect::<String>()` is sometimes more performant, there are cases where
+ /// using `.collect::<String>()` over `.collect::<Vec<String>>().join("")`
+ /// will prevent loop unrolling and will result in a negative performance impact.
+ ///
+ /// Additionally, differences have been observed between aarch64 and x86_64 assembly output,
+ /// with aarch64 tending to producing faster assembly in more cases when using `.collect::<String>()`
+ #[clippy::version = "1.61.0"]
+ pub UNNECESSARY_JOIN,
+ pedantic,
+ "using `.collect::<Vec<String>>().join(\"\")` on an iterator"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for no-op uses of `Option::{as_deref, as_deref_mut}`,
+ /// for example, `Option<&T>::as_deref()` returns the same type.
+ ///
+ /// ### Why is this bad?
+ /// Redundant code and improving readability.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let a = Some(&1);
+ /// let b = a.as_deref(); // goes from Option<&i32> to Option<&i32>
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let a = Some(&1);
+ /// let b = a;
+ /// ```
+ #[clippy::version = "1.57.0"]
+ pub NEEDLESS_OPTION_AS_DEREF,
+ complexity,
+ "no-op use of `deref` or `deref_mut` method to `Option`."
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Finds usages of [`char::is_digit`](https://doc.rust-lang.org/stable/std/primitive.char.html#method.is_digit) that
+ /// can be replaced with [`is_ascii_digit`](https://doc.rust-lang.org/stable/std/primitive.char.html#method.is_ascii_digit) or
+ /// [`is_ascii_hexdigit`](https://doc.rust-lang.org/stable/std/primitive.char.html#method.is_ascii_hexdigit).
+ ///
+ /// ### Why is this bad?
+ /// `is_digit(..)` is slower and requires specifying the radix.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let c: char = '6';
+ /// c.is_digit(10);
+ /// c.is_digit(16);
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let c: char = '6';
+ /// c.is_ascii_digit();
+ /// c.is_ascii_hexdigit();
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub IS_DIGIT_ASCII_RADIX,
+ style,
+ "use of `char::is_digit(..)` with literal radix of 10 or 16"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calling `take` function after `as_ref`.
+ ///
+ /// ### Why is this bad?
+ /// Redundant code. `take` writes `None` to its argument.
+ /// In this case the modification is useless as it's a temporary that cannot be read from afterwards.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = Some(3);
+ /// x.as_ref().take();
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let x = Some(3);
+ /// x.as_ref();
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub NEEDLESS_OPTION_TAKE,
+ complexity,
+ "using `.as_ref().take()` on a temporary value"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `replace` statements which have no effect.
+ ///
+ /// ### Why is this bad?
+ /// It's either a mistake or confusing.
+ ///
+ /// ### Example
+ /// ```rust
+ /// "1234".replace("12", "12");
+ /// "1234".replacen("12", "12", 1);
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub NO_EFFECT_REPLACE,
+ suspicious,
+ "replace with no effect"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usages of `.then_some(..).unwrap_or(..)`
+ ///
+ /// ### Why is this bad?
+ /// This can be written more clearly with `if .. else ..`
+ ///
+ /// ### Limitations
+ /// This lint currently only looks for usages of
+ /// `.then_some(..).unwrap_or(..)`, but will be expanded
+ /// to account for similar patterns.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = true;
+ /// x.then_some("a").unwrap_or("b");
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let x = true;
+ /// if x { "a" } else { "b" };
+ /// ```
+ #[clippy::version = "1.64.0"]
+ pub OBFUSCATED_IF_ELSE,
+ style,
+ "use of `.then_some(..).unwrap_or(..)` can be written \
+ more clearly with `if .. else ..`"
+}
+
+pub struct Methods {
+ avoid_breaking_exported_api: bool,
+ msrv: Option<RustcVersion>,
+ allow_expect_in_tests: bool,
+ allow_unwrap_in_tests: bool,
+}
+
+impl Methods {
+ #[must_use]
+ pub fn new(
+ avoid_breaking_exported_api: bool,
+ msrv: Option<RustcVersion>,
+ allow_expect_in_tests: bool,
+ allow_unwrap_in_tests: bool,
+ ) -> Self {
+ Self {
+ avoid_breaking_exported_api,
+ msrv,
+ allow_expect_in_tests,
+ allow_unwrap_in_tests,
+ }
+ }
+}
+
+impl_lint_pass!(Methods => [
+ UNWRAP_USED,
+ EXPECT_USED,
+ SHOULD_IMPLEMENT_TRAIT,
+ WRONG_SELF_CONVENTION,
+ OK_EXPECT,
+ UNWRAP_OR_ELSE_DEFAULT,
+ MAP_UNWRAP_OR,
+ RESULT_MAP_OR_INTO_OPTION,
+ OPTION_MAP_OR_NONE,
+ BIND_INSTEAD_OF_MAP,
+ OR_FUN_CALL,
+ OR_THEN_UNWRAP,
+ EXPECT_FUN_CALL,
+ CHARS_NEXT_CMP,
+ CHARS_LAST_CMP,
+ CLONE_ON_COPY,
+ CLONE_ON_REF_PTR,
+ CLONE_DOUBLE_REF,
+ ITER_OVEREAGER_CLONED,
+ CLONED_INSTEAD_OF_COPIED,
+ FLAT_MAP_OPTION,
+ INEFFICIENT_TO_STRING,
+ NEW_RET_NO_SELF,
+ SINGLE_CHAR_PATTERN,
+ SINGLE_CHAR_ADD_STR,
+ SEARCH_IS_SOME,
+ FILTER_NEXT,
+ SKIP_WHILE_NEXT,
+ FILTER_MAP_IDENTITY,
+ MAP_IDENTITY,
+ MANUAL_FILTER_MAP,
+ MANUAL_FIND_MAP,
+ OPTION_FILTER_MAP,
+ FILTER_MAP_NEXT,
+ FLAT_MAP_IDENTITY,
+ MAP_FLATTEN,
+ ITERATOR_STEP_BY_ZERO,
+ ITER_NEXT_SLICE,
+ ITER_COUNT,
+ ITER_NTH,
+ ITER_NTH_ZERO,
+ BYTES_NTH,
+ ITER_SKIP_NEXT,
+ GET_UNWRAP,
+ GET_LAST_WITH_LEN,
+ STRING_EXTEND_CHARS,
+ ITER_CLONED_COLLECT,
+ ITER_WITH_DRAIN,
+ USELESS_ASREF,
+ UNNECESSARY_FOLD,
+ UNNECESSARY_FILTER_MAP,
+ UNNECESSARY_FIND_MAP,
+ INTO_ITER_ON_REF,
+ SUSPICIOUS_MAP,
+ UNINIT_ASSUMED_INIT,
+ MANUAL_SATURATING_ARITHMETIC,
+ ZST_OFFSET,
+ FILETYPE_IS_FILE,
+ OPTION_AS_REF_DEREF,
+ UNNECESSARY_LAZY_EVALUATIONS,
+ MAP_COLLECT_RESULT_UNIT,
+ FROM_ITER_INSTEAD_OF_COLLECT,
+ INSPECT_FOR_EACH,
+ IMPLICIT_CLONE,
+ SUSPICIOUS_SPLITN,
+ MANUAL_STR_REPEAT,
+ EXTEND_WITH_DRAIN,
+ MANUAL_SPLIT_ONCE,
+ NEEDLESS_SPLITN,
+ UNNECESSARY_TO_OWNED,
+ UNNECESSARY_JOIN,
+ ERR_EXPECT,
+ NEEDLESS_OPTION_AS_DEREF,
+ IS_DIGIT_ASCII_RADIX,
+ NEEDLESS_OPTION_TAKE,
+ NO_EFFECT_REPLACE,
+ OBFUSCATED_IF_ELSE,
+]);
+
+/// Extracts a method call name, args, and `Span` of the method name.
+fn method_call<'tcx>(recv: &'tcx hir::Expr<'tcx>) -> Option<(&'tcx str, &'tcx [hir::Expr<'tcx>], Span)> {
+ if let ExprKind::MethodCall(path, args, _) = recv.kind {
+ if !args.iter().any(|e| e.span.from_expansion()) {
+ let name = path.ident.name.as_str();
+ return Some((name, args, path.ident.span));
+ }
+ }
+ None
+}
+
+impl<'tcx> LateLintPass<'tcx> for Methods {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
+ if expr.span.from_expansion() {
+ return;
+ }
+
+ self.check_methods(cx, expr);
+
+ match expr.kind {
+ hir::ExprKind::Call(func, args) => {
+ from_iter_instead_of_collect::check(cx, expr, args, func);
+ },
+ hir::ExprKind::MethodCall(method_call, args, _) => {
+ let method_span = method_call.ident.span;
+ or_fun_call::check(cx, expr, method_span, method_call.ident.as_str(), args);
+ expect_fun_call::check(cx, expr, method_span, method_call.ident.as_str(), args);
+ clone_on_copy::check(cx, expr, method_call.ident.name, args);
+ clone_on_ref_ptr::check(cx, expr, method_call.ident.name, args);
+ inefficient_to_string::check(cx, expr, method_call.ident.name, args);
+ single_char_add_str::check(cx, expr, args);
+ into_iter_on_ref::check(cx, expr, method_span, method_call.ident.name, args);
+ single_char_pattern::check(cx, expr, method_call.ident.name, args);
+ unnecessary_to_owned::check(cx, expr, method_call.ident.name, args, self.msrv);
+ },
+ hir::ExprKind::Binary(op, lhs, rhs) if op.node == hir::BinOpKind::Eq || op.node == hir::BinOpKind::Ne => {
+ let mut info = BinaryExprInfo {
+ expr,
+ chain: lhs,
+ other: rhs,
+ eq: op.node == hir::BinOpKind::Eq,
+ };
+ lint_binary_expr_with_method_call(cx, &mut info);
+ },
+ _ => (),
+ }
+ }
+
+ #[allow(clippy::too_many_lines)]
+ fn check_impl_item(&mut self, cx: &LateContext<'tcx>, impl_item: &'tcx hir::ImplItem<'_>) {
+ if in_external_macro(cx.sess(), impl_item.span) {
+ return;
+ }
+ let name = impl_item.ident.name.as_str();
+ let parent = cx.tcx.hir().get_parent_item(impl_item.hir_id());
+ let item = cx.tcx.hir().expect_item(parent);
+ let self_ty = cx.tcx.type_of(item.def_id);
+
+ let implements_trait = matches!(item.kind, hir::ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }));
+ if_chain! {
+ if let hir::ImplItemKind::Fn(ref sig, id) = impl_item.kind;
+ if let Some(first_arg) = iter_input_pats(sig.decl, cx.tcx.hir().body(id)).next();
+
+ let method_sig = cx.tcx.fn_sig(impl_item.def_id);
+ let method_sig = cx.tcx.erase_late_bound_regions(method_sig);
+
+ let first_arg_ty = method_sig.inputs().iter().next();
+
+ // check conventions w.r.t. conversion method names and predicates
+ if let Some(first_arg_ty) = first_arg_ty;
+
+ then {
+ // if this impl block implements a trait, lint in trait definition instead
+ if !implements_trait && cx.access_levels.is_exported(impl_item.def_id) {
+ // check missing trait implementations
+ for method_config in &TRAIT_METHODS {
+ if name == method_config.method_name &&
+ sig.decl.inputs.len() == method_config.param_count &&
+ method_config.output_type.matches(&sig.decl.output) &&
+ method_config.self_kind.matches(cx, self_ty, *first_arg_ty) &&
+ fn_header_equals(method_config.fn_header, sig.header) &&
+ method_config.lifetime_param_cond(impl_item)
+ {
+ span_lint_and_help(
+ cx,
+ SHOULD_IMPLEMENT_TRAIT,
+ impl_item.span,
+ &format!(
+ "method `{}` can be confused for the standard trait method `{}::{}`",
+ method_config.method_name,
+ method_config.trait_name,
+ method_config.method_name
+ ),
+ None,
+ &format!(
+ "consider implementing the trait `{}` or choosing a less ambiguous method name",
+ method_config.trait_name
+ )
+ );
+ }
+ }
+ }
+
+ if sig.decl.implicit_self.has_implicit_self()
+ && !(self.avoid_breaking_exported_api
+ && cx.access_levels.is_exported(impl_item.def_id))
+ {
+ wrong_self_convention::check(
+ cx,
+ name,
+ self_ty,
+ *first_arg_ty,
+ first_arg.pat.span,
+ implements_trait,
+ false
+ );
+ }
+ }
+ }
+
+ // if this impl block implements a trait, lint in trait definition instead
+ if implements_trait {
+ return;
+ }
+
+ if let hir::ImplItemKind::Fn(_, _) = impl_item.kind {
+ let ret_ty = return_ty(cx, impl_item.hir_id());
+
+ // walk the return type and check for Self (this does not check associated types)
+ if let Some(self_adt) = self_ty.ty_adt_def() {
+ if contains_adt_constructor(ret_ty, self_adt) {
+ return;
+ }
+ } else if contains_ty(ret_ty, self_ty) {
+ return;
+ }
+
+ // if return type is impl trait, check the associated types
+ if let ty::Opaque(def_id, _) = *ret_ty.kind() {
+ // one of the associated types must be Self
+ for &(predicate, _span) in cx.tcx.explicit_item_bounds(def_id) {
+ if let ty::PredicateKind::Projection(projection_predicate) = predicate.kind().skip_binder() {
+ let assoc_ty = match projection_predicate.term {
+ ty::Term::Ty(ty) => ty,
+ ty::Term::Const(_c) => continue,
+ };
+ // walk the associated type and check for Self
+ if let Some(self_adt) = self_ty.ty_adt_def() {
+ if contains_adt_constructor(assoc_ty, self_adt) {
+ return;
+ }
+ } else if contains_ty(assoc_ty, self_ty) {
+ return;
+ }
+ }
+ }
+ }
+
+ if name == "new" && ret_ty != self_ty {
+ span_lint(
+ cx,
+ NEW_RET_NO_SELF,
+ impl_item.span,
+ "methods called `new` usually return `Self`",
+ );
+ }
+ }
+ }
+
+ fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx TraitItem<'_>) {
+ if in_external_macro(cx.tcx.sess, item.span) {
+ return;
+ }
+
+ if_chain! {
+ if let TraitItemKind::Fn(ref sig, _) = item.kind;
+ if sig.decl.implicit_self.has_implicit_self();
+ if let Some(first_arg_ty) = sig.decl.inputs.iter().next();
+
+ then {
+ let first_arg_span = first_arg_ty.span;
+ let first_arg_ty = hir_ty_to_ty(cx.tcx, first_arg_ty);
+ let self_ty = TraitRef::identity(cx.tcx, item.def_id.to_def_id()).self_ty().skip_binder();
+ wrong_self_convention::check(
+ cx,
+ item.ident.name.as_str(),
+ self_ty,
+ first_arg_ty,
+ first_arg_span,
+ false,
+ true
+ );
+ }
+ }
+
+ if_chain! {
+ if item.ident.name == sym::new;
+ if let TraitItemKind::Fn(_, _) = item.kind;
+ let ret_ty = return_ty(cx, item.hir_id());
+ let self_ty = TraitRef::identity(cx.tcx, item.def_id.to_def_id()).self_ty().skip_binder();
+ if !contains_ty(ret_ty, self_ty);
+
+ then {
+ span_lint(
+ cx,
+ NEW_RET_NO_SELF,
+ item.span,
+ "methods called `new` usually return `Self`",
+ );
+ }
+ }
+ }
+
+ extract_msrv_attr!(LateContext);
+}
+
+impl Methods {
+ #[allow(clippy::too_many_lines)]
+ fn check_methods<'tcx>(&self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if let Some((name, [recv, args @ ..], span)) = method_call(expr) {
+ match (name, args) {
+ ("add" | "offset" | "sub" | "wrapping_offset" | "wrapping_add" | "wrapping_sub", [_arg]) => {
+ zst_offset::check(cx, expr, recv);
+ },
+ ("and_then", [arg]) => {
+ let biom_option_linted = bind_instead_of_map::OptionAndThenSome::check(cx, expr, recv, arg);
+ let biom_result_linted = bind_instead_of_map::ResultAndThenOk::check(cx, expr, recv, arg);
+ if !biom_option_linted && !biom_result_linted {
+ unnecessary_lazy_eval::check(cx, expr, recv, arg, "and");
+ }
+ },
+ ("as_deref" | "as_deref_mut", []) => {
+ needless_option_as_deref::check(cx, expr, recv, name);
+ },
+ ("as_mut", []) => useless_asref::check(cx, expr, "as_mut", recv),
+ ("as_ref", []) => useless_asref::check(cx, expr, "as_ref", recv),
+ ("assume_init", []) => uninit_assumed_init::check(cx, expr, recv),
+ ("cloned", []) => cloned_instead_of_copied::check(cx, expr, recv, span, self.msrv),
+ ("collect", []) => match method_call(recv) {
+ Some((name @ ("cloned" | "copied"), [recv2], _)) => {
+ iter_cloned_collect::check(cx, name, expr, recv2);
+ },
+ Some(("map", [m_recv, m_arg], _)) => {
+ map_collect_result_unit::check(cx, expr, m_recv, m_arg, recv);
+ },
+ Some(("take", [take_self_arg, take_arg], _)) => {
+ if meets_msrv(self.msrv, msrvs::STR_REPEAT) {
+ manual_str_repeat::check(cx, expr, recv, take_self_arg, take_arg);
+ }
+ },
+ _ => {},
+ },
+ ("count", []) => match method_call(recv) {
+ Some(("cloned", [recv2], _)) => iter_overeager_cloned::check(cx, expr, recv, recv2, true, false),
+ Some((name2 @ ("into_iter" | "iter" | "iter_mut"), [recv2], _)) => {
+ iter_count::check(cx, expr, recv2, name2);
+ },
+ Some(("map", [_, arg], _)) => suspicious_map::check(cx, expr, recv, arg),
+ _ => {},
+ },
+ ("drain", [arg]) => {
+ iter_with_drain::check(cx, expr, recv, span, arg);
+ },
+ ("expect", [_]) => match method_call(recv) {
+ Some(("ok", [recv], _)) => ok_expect::check(cx, expr, recv),
+ Some(("err", [recv], err_span)) => err_expect::check(cx, expr, recv, self.msrv, span, err_span),
+ _ => expect_used::check(cx, expr, recv, self.allow_expect_in_tests),
+ },
+ ("extend", [arg]) => {
+ string_extend_chars::check(cx, expr, recv, arg);
+ extend_with_drain::check(cx, expr, recv, arg);
+ },
+ ("filter_map", [arg]) => {
+ unnecessary_filter_map::check(cx, expr, arg, name);
+ filter_map_identity::check(cx, expr, arg, span);
+ },
+ ("find_map", [arg]) => {
+ unnecessary_filter_map::check(cx, expr, arg, name);
+ },
+ ("flat_map", [arg]) => {
+ flat_map_identity::check(cx, expr, arg, span);
+ flat_map_option::check(cx, expr, arg, span);
+ },
+ ("flatten", []) => match method_call(recv) {
+ Some(("map", [recv, map_arg], map_span)) => map_flatten::check(cx, expr, recv, map_arg, map_span),
+ Some(("cloned", [recv2], _)) => iter_overeager_cloned::check(cx, expr, recv, recv2, false, true),
+ _ => {},
+ },
+ ("fold", [init, acc]) => unnecessary_fold::check(cx, expr, init, acc, span),
+ ("for_each", [_]) => {
+ if let Some(("inspect", [_, _], span2)) = method_call(recv) {
+ inspect_for_each::check(cx, expr, span2);
+ }
+ },
+ ("get", [arg]) => get_last_with_len::check(cx, expr, recv, arg),
+ ("get_or_insert_with", [arg]) => unnecessary_lazy_eval::check(cx, expr, recv, arg, "get_or_insert"),
+ ("is_file", []) => filetype_is_file::check(cx, expr, recv),
+ ("is_digit", [radix]) => is_digit_ascii_radix::check(cx, expr, recv, radix, self.msrv),
+ ("is_none", []) => check_is_some_is_none(cx, expr, recv, false),
+ ("is_some", []) => check_is_some_is_none(cx, expr, recv, true),
+ ("join", [join_arg]) => {
+ if let Some(("collect", _, span)) = method_call(recv) {
+ unnecessary_join::check(cx, expr, recv, join_arg, span);
+ }
+ },
+ ("last", []) | ("skip", [_]) => {
+ if let Some((name2, [recv2, args2 @ ..], _span2)) = method_call(recv) {
+ if let ("cloned", []) = (name2, args2) {
+ iter_overeager_cloned::check(cx, expr, recv, recv2, false, false);
+ }
+ }
+ },
+ (name @ ("map" | "map_err"), [m_arg]) => {
+ if let Some((name, [recv2, args @ ..], span2)) = method_call(recv) {
+ match (name, args) {
+ ("as_mut", []) => option_as_ref_deref::check(cx, expr, recv2, m_arg, true, self.msrv),
+ ("as_ref", []) => option_as_ref_deref::check(cx, expr, recv2, m_arg, false, self.msrv),
+ ("filter", [f_arg]) => {
+ filter_map::check(cx, expr, recv2, f_arg, span2, recv, m_arg, span, false);
+ },
+ ("find", [f_arg]) => {
+ filter_map::check(cx, expr, recv2, f_arg, span2, recv, m_arg, span, true);
+ },
+ _ => {},
+ }
+ }
+ map_identity::check(cx, expr, recv, m_arg, name, span);
+ },
+ ("map_or", [def, map]) => option_map_or_none::check(cx, expr, recv, def, map),
+ ("next", []) => {
+ if let Some((name2, [recv2, args2 @ ..], _)) = method_call(recv) {
+ match (name2, args2) {
+ ("cloned", []) => iter_overeager_cloned::check(cx, expr, recv, recv2, false, false),
+ ("filter", [arg]) => filter_next::check(cx, expr, recv2, arg),
+ ("filter_map", [arg]) => filter_map_next::check(cx, expr, recv2, arg, self.msrv),
+ ("iter", []) => iter_next_slice::check(cx, expr, recv2),
+ ("skip", [arg]) => iter_skip_next::check(cx, expr, recv2, arg),
+ ("skip_while", [_]) => skip_while_next::check(cx, expr),
+ _ => {},
+ }
+ }
+ },
+ ("nth", [n_arg]) => match method_call(recv) {
+ Some(("bytes", [recv2], _)) => bytes_nth::check(cx, expr, recv2, n_arg),
+ Some(("cloned", [recv2], _)) => iter_overeager_cloned::check(cx, expr, recv, recv2, false, false),
+ Some(("iter", [recv2], _)) => iter_nth::check(cx, expr, recv2, recv, n_arg, false),
+ Some(("iter_mut", [recv2], _)) => iter_nth::check(cx, expr, recv2, recv, n_arg, true),
+ _ => iter_nth_zero::check(cx, expr, recv, n_arg),
+ },
+ ("ok_or_else", [arg]) => unnecessary_lazy_eval::check(cx, expr, recv, arg, "ok_or"),
+ ("or_else", [arg]) => {
+ if !bind_instead_of_map::ResultOrElseErrInfo::check(cx, expr, recv, arg) {
+ unnecessary_lazy_eval::check(cx, expr, recv, arg, "or");
+ }
+ },
+ ("splitn" | "rsplitn", [count_arg, pat_arg]) => {
+ if let Some((Constant::Int(count), _)) = constant(cx, cx.typeck_results(), count_arg) {
+ suspicious_splitn::check(cx, name, expr, recv, count);
+ str_splitn::check(cx, name, expr, recv, pat_arg, count, self.msrv);
+ }
+ },
+ ("splitn_mut" | "rsplitn_mut", [count_arg, _]) => {
+ if let Some((Constant::Int(count), _)) = constant(cx, cx.typeck_results(), count_arg) {
+ suspicious_splitn::check(cx, name, expr, recv, count);
+ }
+ },
+ ("step_by", [arg]) => iterator_step_by_zero::check(cx, expr, arg),
+ ("take", [_arg]) => {
+ if let Some((name2, [recv2, args2 @ ..], _span2)) = method_call(recv) {
+ if let ("cloned", []) = (name2, args2) {
+ iter_overeager_cloned::check(cx, expr, recv, recv2, false, false);
+ }
+ }
+ },
+ ("take", []) => needless_option_take::check(cx, expr, recv),
+ ("then", [arg]) => {
+ if !meets_msrv(self.msrv, msrvs::BOOL_THEN_SOME) {
+ return;
+ }
+ unnecessary_lazy_eval::check(cx, expr, recv, arg, "then_some");
+ },
+ ("to_os_string" | "to_owned" | "to_path_buf" | "to_vec", []) => {
+ implicit_clone::check(cx, name, expr, recv);
+ },
+ ("unwrap", []) => {
+ match method_call(recv) {
+ Some(("get", [recv, get_arg], _)) => {
+ get_unwrap::check(cx, expr, recv, get_arg, false);
+ },
+ Some(("get_mut", [recv, get_arg], _)) => {
+ get_unwrap::check(cx, expr, recv, get_arg, true);
+ },
+ Some(("or", [recv, or_arg], or_span)) => {
+ or_then_unwrap::check(cx, expr, recv, or_arg, or_span);
+ },
+ _ => {},
+ }
+ unwrap_used::check(cx, expr, recv, self.allow_unwrap_in_tests);
+ },
+ ("unwrap_or", [u_arg]) => match method_call(recv) {
+ Some((arith @ ("checked_add" | "checked_sub" | "checked_mul"), [lhs, rhs], _)) => {
+ manual_saturating_arithmetic::check(cx, expr, lhs, rhs, u_arg, &arith["checked_".len()..]);
+ },
+ Some(("map", [m_recv, m_arg], span)) => {
+ option_map_unwrap_or::check(cx, expr, m_recv, m_arg, recv, u_arg, span);
+ },
+ Some(("then_some", [t_recv, t_arg], _)) => {
+ obfuscated_if_else::check(cx, expr, t_recv, t_arg, u_arg);
+ },
+ _ => {},
+ },
+ ("unwrap_or_else", [u_arg]) => match method_call(recv) {
+ Some(("map", [recv, map_arg], _))
+ if map_unwrap_or::check(cx, expr, recv, map_arg, u_arg, self.msrv) => {},
+ _ => {
+ unwrap_or_else_default::check(cx, expr, recv, u_arg);
+ unnecessary_lazy_eval::check(cx, expr, recv, u_arg, "unwrap_or");
+ },
+ },
+ ("replace" | "replacen", [arg1, arg2] | [arg1, arg2, _]) => {
+ no_effect_replace::check(cx, expr, arg1, arg2);
+ },
+ _ => {},
+ }
+ }
+ }
+}
+
+fn check_is_some_is_none(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, is_some: bool) {
+ if let Some((name @ ("find" | "position" | "rposition"), [f_recv, arg], span)) = method_call(recv) {
+ search_is_some::check(cx, expr, name, is_some, f_recv, arg, recv, span);
+ }
+}
+
+/// Used for `lint_binary_expr_with_method_call`.
+#[derive(Copy, Clone)]
+struct BinaryExprInfo<'a> {
+ expr: &'a hir::Expr<'a>,
+ chain: &'a hir::Expr<'a>,
+ other: &'a hir::Expr<'a>,
+ eq: bool,
+}
+
+/// Checks for the `CHARS_NEXT_CMP` and `CHARS_LAST_CMP` lints.
+fn lint_binary_expr_with_method_call(cx: &LateContext<'_>, info: &mut BinaryExprInfo<'_>) {
+ macro_rules! lint_with_both_lhs_and_rhs {
+ ($func:expr, $cx:expr, $info:ident) => {
+ if !$func($cx, $info) {
+ ::std::mem::swap(&mut $info.chain, &mut $info.other);
+ if $func($cx, $info) {
+ return;
+ }
+ }
+ };
+ }
+
+ lint_with_both_lhs_and_rhs!(chars_next_cmp::check, cx, info);
+ lint_with_both_lhs_and_rhs!(chars_last_cmp::check, cx, info);
+ lint_with_both_lhs_and_rhs!(chars_next_cmp_with_unwrap::check, cx, info);
+ lint_with_both_lhs_and_rhs!(chars_last_cmp_with_unwrap::check, cx, info);
+}
+
+const FN_HEADER: hir::FnHeader = hir::FnHeader {
+ unsafety: hir::Unsafety::Normal,
+ constness: hir::Constness::NotConst,
+ asyncness: hir::IsAsync::NotAsync,
+ abi: rustc_target::spec::abi::Abi::Rust,
+};
+
+struct ShouldImplTraitCase {
+ trait_name: &'static str,
+ method_name: &'static str,
+ param_count: usize,
+ fn_header: hir::FnHeader,
+ // implicit self kind expected (none, self, &self, ...)
+ self_kind: SelfKind,
+ // checks against the output type
+ output_type: OutType,
+ // certain methods with explicit lifetimes can't implement the equivalent trait method
+ lint_explicit_lifetime: bool,
+}
+impl ShouldImplTraitCase {
+ const fn new(
+ trait_name: &'static str,
+ method_name: &'static str,
+ param_count: usize,
+ fn_header: hir::FnHeader,
+ self_kind: SelfKind,
+ output_type: OutType,
+ lint_explicit_lifetime: bool,
+ ) -> ShouldImplTraitCase {
+ ShouldImplTraitCase {
+ trait_name,
+ method_name,
+ param_count,
+ fn_header,
+ self_kind,
+ output_type,
+ lint_explicit_lifetime,
+ }
+ }
+
+ fn lifetime_param_cond(&self, impl_item: &hir::ImplItem<'_>) -> bool {
+ self.lint_explicit_lifetime
+ || !impl_item.generics.params.iter().any(|p| {
+ matches!(
+ p.kind,
+ hir::GenericParamKind::Lifetime {
+ kind: hir::LifetimeParamKind::Explicit
+ }
+ )
+ })
+ }
+}
+
+#[rustfmt::skip]
+const TRAIT_METHODS: [ShouldImplTraitCase; 30] = [
+ ShouldImplTraitCase::new("std::ops::Add", "add", 2, FN_HEADER, SelfKind::Value, OutType::Any, true),
+ ShouldImplTraitCase::new("std::convert::AsMut", "as_mut", 1, FN_HEADER, SelfKind::RefMut, OutType::Ref, true),
+ ShouldImplTraitCase::new("std::convert::AsRef", "as_ref", 1, FN_HEADER, SelfKind::Ref, OutType::Ref, true),
+ ShouldImplTraitCase::new("std::ops::BitAnd", "bitand", 2, FN_HEADER, SelfKind::Value, OutType::Any, true),
+ ShouldImplTraitCase::new("std::ops::BitOr", "bitor", 2, FN_HEADER, SelfKind::Value, OutType::Any, true),
+ ShouldImplTraitCase::new("std::ops::BitXor", "bitxor", 2, FN_HEADER, SelfKind::Value, OutType::Any, true),
+ ShouldImplTraitCase::new("std::borrow::Borrow", "borrow", 1, FN_HEADER, SelfKind::Ref, OutType::Ref, true),
+ ShouldImplTraitCase::new("std::borrow::BorrowMut", "borrow_mut", 1, FN_HEADER, SelfKind::RefMut, OutType::Ref, true),
+ ShouldImplTraitCase::new("std::clone::Clone", "clone", 1, FN_HEADER, SelfKind::Ref, OutType::Any, true),
+ ShouldImplTraitCase::new("std::cmp::Ord", "cmp", 2, FN_HEADER, SelfKind::Ref, OutType::Any, true),
+ // FIXME: default doesn't work
+ ShouldImplTraitCase::new("std::default::Default", "default", 0, FN_HEADER, SelfKind::No, OutType::Any, true),
+ ShouldImplTraitCase::new("std::ops::Deref", "deref", 1, FN_HEADER, SelfKind::Ref, OutType::Ref, true),
+ ShouldImplTraitCase::new("std::ops::DerefMut", "deref_mut", 1, FN_HEADER, SelfKind::RefMut, OutType::Ref, true),
+ ShouldImplTraitCase::new("std::ops::Div", "div", 2, FN_HEADER, SelfKind::Value, OutType::Any, true),
+ ShouldImplTraitCase::new("std::ops::Drop", "drop", 1, FN_HEADER, SelfKind::RefMut, OutType::Unit, true),
+ ShouldImplTraitCase::new("std::cmp::PartialEq", "eq", 2, FN_HEADER, SelfKind::Ref, OutType::Bool, true),
+ ShouldImplTraitCase::new("std::iter::FromIterator", "from_iter", 1, FN_HEADER, SelfKind::No, OutType::Any, true),
+ ShouldImplTraitCase::new("std::str::FromStr", "from_str", 1, FN_HEADER, SelfKind::No, OutType::Any, true),
+ ShouldImplTraitCase::new("std::hash::Hash", "hash", 2, FN_HEADER, SelfKind::Ref, OutType::Unit, true),
+ ShouldImplTraitCase::new("std::ops::Index", "index", 2, FN_HEADER, SelfKind::Ref, OutType::Ref, true),
+ ShouldImplTraitCase::new("std::ops::IndexMut", "index_mut", 2, FN_HEADER, SelfKind::RefMut, OutType::Ref, true),
+ ShouldImplTraitCase::new("std::iter::IntoIterator", "into_iter", 1, FN_HEADER, SelfKind::Value, OutType::Any, true),
+ ShouldImplTraitCase::new("std::ops::Mul", "mul", 2, FN_HEADER, SelfKind::Value, OutType::Any, true),
+ ShouldImplTraitCase::new("std::ops::Neg", "neg", 1, FN_HEADER, SelfKind::Value, OutType::Any, true),
+ ShouldImplTraitCase::new("std::iter::Iterator", "next", 1, FN_HEADER, SelfKind::RefMut, OutType::Any, false),
+ ShouldImplTraitCase::new("std::ops::Not", "not", 1, FN_HEADER, SelfKind::Value, OutType::Any, true),
+ ShouldImplTraitCase::new("std::ops::Rem", "rem", 2, FN_HEADER, SelfKind::Value, OutType::Any, true),
+ ShouldImplTraitCase::new("std::ops::Shl", "shl", 2, FN_HEADER, SelfKind::Value, OutType::Any, true),
+ ShouldImplTraitCase::new("std::ops::Shr", "shr", 2, FN_HEADER, SelfKind::Value, OutType::Any, true),
+ ShouldImplTraitCase::new("std::ops::Sub", "sub", 2, FN_HEADER, SelfKind::Value, OutType::Any, true),
+];
+
+#[derive(Clone, Copy, PartialEq, Eq, Debug)]
+enum SelfKind {
+ Value,
+ Ref,
+ RefMut,
+ No,
+}
+
+impl SelfKind {
+ fn matches<'a>(self, cx: &LateContext<'a>, parent_ty: Ty<'a>, ty: Ty<'a>) -> bool {
+ fn matches_value<'a>(cx: &LateContext<'a>, parent_ty: Ty<'a>, ty: Ty<'a>) -> bool {
+ if ty == parent_ty {
+ true
+ } else if ty.is_box() {
+ ty.boxed_ty() == parent_ty
+ } else if is_type_diagnostic_item(cx, ty, sym::Rc) || is_type_diagnostic_item(cx, ty, sym::Arc) {
+ if let ty::Adt(_, substs) = ty.kind() {
+ substs.types().next().map_or(false, |t| t == parent_ty)
+ } else {
+ false
+ }
+ } else {
+ false
+ }
+ }
+
+ fn matches_ref<'a>(cx: &LateContext<'a>, mutability: hir::Mutability, parent_ty: Ty<'a>, ty: Ty<'a>) -> bool {
+ if let ty::Ref(_, t, m) = *ty.kind() {
+ return m == mutability && t == parent_ty;
+ }
+
+ let trait_path = match mutability {
+ hir::Mutability::Not => &paths::ASREF_TRAIT,
+ hir::Mutability::Mut => &paths::ASMUT_TRAIT,
+ };
+
+ let trait_def_id = match get_trait_def_id(cx, trait_path) {
+ Some(did) => did,
+ None => return false,
+ };
+ implements_trait(cx, ty, trait_def_id, &[parent_ty.into()])
+ }
+
+ fn matches_none<'a>(cx: &LateContext<'a>, parent_ty: Ty<'a>, ty: Ty<'a>) -> bool {
+ !matches_value(cx, parent_ty, ty)
+ && !matches_ref(cx, hir::Mutability::Not, parent_ty, ty)
+ && !matches_ref(cx, hir::Mutability::Mut, parent_ty, ty)
+ }
+
+ match self {
+ Self::Value => matches_value(cx, parent_ty, ty),
+ Self::Ref => matches_ref(cx, hir::Mutability::Not, parent_ty, ty) || ty == parent_ty && is_copy(cx, ty),
+ Self::RefMut => matches_ref(cx, hir::Mutability::Mut, parent_ty, ty),
+ Self::No => matches_none(cx, parent_ty, ty),
+ }
+ }
+
+ #[must_use]
+ fn description(self) -> &'static str {
+ match self {
+ Self::Value => "`self` by value",
+ Self::Ref => "`self` by reference",
+ Self::RefMut => "`self` by mutable reference",
+ Self::No => "no `self`",
+ }
+ }
+}
+
+#[derive(Clone, Copy)]
+enum OutType {
+ Unit,
+ Bool,
+ Any,
+ Ref,
+}
+
+impl OutType {
+ fn matches(self, ty: &hir::FnRetTy<'_>) -> bool {
+ let is_unit = |ty: &hir::Ty<'_>| matches!(ty.kind, hir::TyKind::Tup(&[]));
+ match (self, ty) {
+ (Self::Unit, &hir::FnRetTy::DefaultReturn(_)) => true,
+ (Self::Unit, &hir::FnRetTy::Return(ty)) if is_unit(ty) => true,
+ (Self::Bool, &hir::FnRetTy::Return(ty)) if is_bool(ty) => true,
+ (Self::Any, &hir::FnRetTy::Return(ty)) if !is_unit(ty) => true,
+ (Self::Ref, &hir::FnRetTy::Return(ty)) => matches!(ty.kind, hir::TyKind::Rptr(_, _)),
+ _ => false,
+ }
+ }
+}
+
+fn is_bool(ty: &hir::Ty<'_>) -> bool {
+ if let hir::TyKind::Path(QPath::Resolved(_, path)) = ty.kind {
+ matches!(path.res, Res::PrimTy(PrimTy::Bool))
+ } else {
+ false
+ }
+}
+
+fn fn_header_equals(expected: hir::FnHeader, actual: hir::FnHeader) -> bool {
+ expected.constness == actual.constness
+ && expected.unsafety == actual.unsafety
+ && expected.asyncness == actual.asyncness
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/needless_option_as_deref.rs b/src/tools/clippy/clippy_lints/src/methods/needless_option_as_deref.rs
new file mode 100644
index 000000000..7030baf19
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/needless_option_as_deref.rs
@@ -0,0 +1,37 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::path_res;
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::usage::local_used_after_expr;
+use rustc_errors::Applicability;
+use rustc_hir::def::Res;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::NEEDLESS_OPTION_AS_DEREF;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, name: &str) {
+ let typeck = cx.typeck_results();
+ let outer_ty = typeck.expr_ty(expr);
+
+ if is_type_diagnostic_item(cx, outer_ty, sym::Option) && outer_ty == typeck.expr_ty(recv) {
+ if name == "as_deref_mut" && recv.is_syntactic_place_expr() {
+ let Res::Local(binding_id) = path_res(cx, recv) else { return };
+
+ if local_used_after_expr(cx, binding_id, recv) {
+ return;
+ }
+ }
+
+ span_lint_and_sugg(
+ cx,
+ NEEDLESS_OPTION_AS_DEREF,
+ expr.span,
+ "derefed type is same as origin",
+ "try this",
+ snippet_opt(cx, recv.span).unwrap(),
+ Applicability::MachineApplicable,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/needless_option_take.rs b/src/tools/clippy/clippy_lints/src/methods/needless_option_take.rs
new file mode 100644
index 000000000..829c118d2
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/needless_option_take.rs
@@ -0,0 +1,41 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::match_def_path;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::is_type_diagnostic_item;
+use rustc_errors::Applicability;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::NEEDLESS_OPTION_TAKE;
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, recv: &'tcx Expr<'_>) {
+ // Checks if expression type is equal to sym::Option and if the expr is not a syntactic place
+ if !recv.is_syntactic_place_expr() && is_expr_option(cx, recv) && has_expr_as_ref_path(cx, recv) {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ NEEDLESS_OPTION_TAKE,
+ expr.span,
+ "called `Option::take()` on a temporary value",
+ "try",
+ format!(
+ "{}",
+ snippet_with_applicability(cx, recv.span, "..", &mut applicability)
+ ),
+ applicability,
+ );
+ }
+}
+
+fn is_expr_option(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ let expr_type = cx.typeck_results().expr_ty(expr);
+ is_type_diagnostic_item(cx, expr_type, sym::Option)
+}
+
+fn has_expr_as_ref_path(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ if let Some(ref_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) {
+ return match_def_path(cx, ref_id, &["core", "option", "Option", "as_ref"]);
+ }
+ false
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/no_effect_replace.rs b/src/tools/clippy/clippy_lints/src/methods/no_effect_replace.rs
new file mode 100644
index 000000000..a76341855
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/no_effect_replace.rs
@@ -0,0 +1,47 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::SpanlessEq;
+use if_chain::if_chain;
+use rustc_ast::LitKind;
+use rustc_hir::ExprKind;
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::NO_EFFECT_REPLACE;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx rustc_hir::Expr<'_>,
+ arg1: &'tcx rustc_hir::Expr<'_>,
+ arg2: &'tcx rustc_hir::Expr<'_>,
+) {
+ let ty = cx.typeck_results().expr_ty(expr).peel_refs();
+ if !(ty.is_str() || is_type_diagnostic_item(cx, ty, sym::String)) {
+ return;
+ }
+
+ if_chain! {
+ if let ExprKind::Lit(spanned) = &arg1.kind;
+ if let Some(param1) = lit_string_value(&spanned.node);
+
+ if let ExprKind::Lit(spanned) = &arg2.kind;
+ if let LitKind::Str(param2, _) = &spanned.node;
+ if param1 == param2.as_str();
+
+ then {
+ span_lint(cx, NO_EFFECT_REPLACE, expr.span, "replacing text with itself");
+ }
+ }
+
+ if SpanlessEq::new(cx).eq_expr(arg1, arg2) {
+ span_lint(cx, NO_EFFECT_REPLACE, expr.span, "replacing text with itself");
+ }
+}
+
+fn lit_string_value(node: &LitKind) -> Option<String> {
+ match node {
+ LitKind::Char(value) => Some(value.to_string()),
+ LitKind::Str(value, _) => Some(value.as_str().to_owned()),
+ _ => None,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/obfuscated_if_else.rs b/src/tools/clippy/clippy_lints/src/methods/obfuscated_if_else.rs
new file mode 100644
index 000000000..4d7427b26
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/obfuscated_if_else.rs
@@ -0,0 +1,42 @@
+// run-rustfix
+
+use super::OBFUSCATED_IF_ELSE;
+use clippy_utils::{diagnostics::span_lint_and_sugg, source::snippet_with_applicability};
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx hir::Expr<'_>,
+ then_recv: &'tcx hir::Expr<'_>,
+ then_arg: &'tcx hir::Expr<'_>,
+ unwrap_arg: &'tcx hir::Expr<'_>,
+) {
+ // something.then_some(blah).unwrap_or(blah)
+ // ^^^^^^^^^-then_recv ^^^^-then_arg ^^^^- unwrap_arg
+ // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- expr
+
+ let recv_ty = cx.typeck_results().expr_ty(then_recv);
+
+ if recv_ty.is_bool() {
+ let mut applicability = Applicability::MachineApplicable;
+ let sugg = format!(
+ "if {} {{ {} }} else {{ {} }}",
+ snippet_with_applicability(cx, then_recv.span, "..", &mut applicability),
+ snippet_with_applicability(cx, then_arg.span, "..", &mut applicability),
+ snippet_with_applicability(cx, unwrap_arg.span, "..", &mut applicability)
+ );
+
+ span_lint_and_sugg(
+ cx,
+ OBFUSCATED_IF_ELSE,
+ expr.span,
+ "use of `.then_some(..).unwrap_or(..)` can be written \
+ more clearly with `if .. else ..`",
+ "try",
+ sugg,
+ applicability,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/ok_expect.rs b/src/tools/clippy/clippy_lints/src/methods/ok_expect.rs
new file mode 100644
index 000000000..d64a9f320
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/ok_expect.rs
@@ -0,0 +1,46 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::ty::{implements_trait, is_type_diagnostic_item};
+use if_chain::if_chain;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+use rustc_span::sym;
+
+use super::OK_EXPECT;
+
+/// lint use of `ok().expect()` for `Result`s
+pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr<'_>) {
+ if_chain! {
+ // lint if the caller of `ok()` is a `Result`
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv), sym::Result);
+ let result_type = cx.typeck_results().expr_ty(recv);
+ if let Some(error_type) = get_error_type(cx, result_type);
+ if has_debug_impl(error_type, cx);
+
+ then {
+ span_lint_and_help(
+ cx,
+ OK_EXPECT,
+ expr.span,
+ "called `ok().expect()` on a `Result` value",
+ None,
+ "you can call `expect()` directly on the `Result`",
+ );
+ }
+ }
+}
+
+/// Given a `Result<T, E>` type, return its error type (`E`).
+fn get_error_type<'a>(cx: &LateContext<'_>, ty: Ty<'a>) -> Option<Ty<'a>> {
+ match ty.kind() {
+ ty::Adt(_, substs) if is_type_diagnostic_item(cx, ty, sym::Result) => substs.types().nth(1),
+ _ => None,
+ }
+}
+
+/// This checks whether a given type is known to implement Debug.
+fn has_debug_impl<'tcx>(ty: Ty<'tcx>, cx: &LateContext<'tcx>) -> bool {
+ cx.tcx
+ .get_diagnostic_item(sym::Debug)
+ .map_or(false, |debug| implements_trait(cx, ty, debug, &[]))
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/option_as_ref_deref.rs b/src/tools/clippy/clippy_lints/src/methods/option_as_ref_deref.rs
new file mode 100644
index 000000000..20cad0f18
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/option_as_ref_deref.rs
@@ -0,0 +1,120 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{match_def_path, meets_msrv, msrvs, path_to_local_id, paths, peel_blocks};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_semver::RustcVersion;
+use rustc_span::sym;
+
+use super::OPTION_AS_REF_DEREF;
+
+/// lint use of `_.as_ref().map(Deref::deref)` for `Option`s
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &hir::Expr<'_>,
+ as_ref_recv: &hir::Expr<'_>,
+ map_arg: &hir::Expr<'_>,
+ is_mut: bool,
+ msrv: Option<RustcVersion>,
+) {
+ if !meets_msrv(msrv, msrvs::OPTION_AS_DEREF) {
+ return;
+ }
+
+ let same_mutability = |m| (is_mut && m == &hir::Mutability::Mut) || (!is_mut && m == &hir::Mutability::Not);
+
+ let option_ty = cx.typeck_results().expr_ty(as_ref_recv);
+ if !is_type_diagnostic_item(cx, option_ty, sym::Option) {
+ return;
+ }
+
+ let deref_aliases: [&[&str]; 9] = [
+ &paths::DEREF_TRAIT_METHOD,
+ &paths::DEREF_MUT_TRAIT_METHOD,
+ &paths::CSTRING_AS_C_STR,
+ &paths::OS_STRING_AS_OS_STR,
+ &paths::PATH_BUF_AS_PATH,
+ &paths::STRING_AS_STR,
+ &paths::STRING_AS_MUT_STR,
+ &paths::VEC_AS_SLICE,
+ &paths::VEC_AS_MUT_SLICE,
+ ];
+
+ let is_deref = match map_arg.kind {
+ hir::ExprKind::Path(ref expr_qpath) => cx
+ .qpath_res(expr_qpath, map_arg.hir_id)
+ .opt_def_id()
+ .map_or(false, |fun_def_id| {
+ deref_aliases.iter().any(|path| match_def_path(cx, fun_def_id, path))
+ }),
+ hir::ExprKind::Closure(&hir::Closure { body, .. }) => {
+ let closure_body = cx.tcx.hir().body(body);
+ let closure_expr = peel_blocks(&closure_body.value);
+
+ match &closure_expr.kind {
+ hir::ExprKind::MethodCall(_, args, _) => {
+ if_chain! {
+ if args.len() == 1;
+ if path_to_local_id(&args[0], closure_body.params[0].pat.hir_id);
+ let adj = cx
+ .typeck_results()
+ .expr_adjustments(&args[0])
+ .iter()
+ .map(|x| &x.kind)
+ .collect::<Box<[_]>>();
+ if let [ty::adjustment::Adjust::Deref(None), ty::adjustment::Adjust::Borrow(_)] = *adj;
+ then {
+ let method_did = cx.typeck_results().type_dependent_def_id(closure_expr.hir_id).unwrap();
+ deref_aliases.iter().any(|path| match_def_path(cx, method_did, path))
+ } else {
+ false
+ }
+ }
+ },
+ hir::ExprKind::AddrOf(hir::BorrowKind::Ref, m, inner) if same_mutability(m) => {
+ if_chain! {
+ if let hir::ExprKind::Unary(hir::UnOp::Deref, inner1) = inner.kind;
+ if let hir::ExprKind::Unary(hir::UnOp::Deref, inner2) = inner1.kind;
+ then {
+ path_to_local_id(inner2, closure_body.params[0].pat.hir_id)
+ } else {
+ false
+ }
+ }
+ },
+ _ => false,
+ }
+ },
+ _ => false,
+ };
+
+ if is_deref {
+ let current_method = if is_mut {
+ format!(".as_mut().map({})", snippet(cx, map_arg.span, ".."))
+ } else {
+ format!(".as_ref().map({})", snippet(cx, map_arg.span, ".."))
+ };
+ let method_hint = if is_mut { "as_deref_mut" } else { "as_deref" };
+ let hint = format!("{}.{}()", snippet(cx, as_ref_recv.span, ".."), method_hint);
+ let suggestion = format!("try using {} instead", method_hint);
+
+ let msg = format!(
+ "called `{0}` on an Option value. This can be done more directly \
+ by calling `{1}` instead",
+ current_method, hint
+ );
+ span_lint_and_sugg(
+ cx,
+ OPTION_AS_REF_DEREF,
+ expr.span,
+ &msg,
+ &suggestion,
+ hint,
+ Applicability::MachineApplicable,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs b/src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs
new file mode 100644
index 000000000..5a39b82b0
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs
@@ -0,0 +1,122 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{is_lang_ctor, path_def_id};
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_hir::LangItem::{OptionNone, OptionSome};
+use rustc_lint::LateContext;
+use rustc_middle::ty::DefIdTree;
+use rustc_span::symbol::sym;
+
+use super::OPTION_MAP_OR_NONE;
+use super::RESULT_MAP_OR_INTO_OPTION;
+
+// The expression inside a closure may or may not have surrounding braces
+// which causes problems when generating a suggestion.
+fn reduce_unit_expression<'a>(expr: &'a hir::Expr<'_>) -> Option<(&'a hir::Expr<'a>, &'a [hir::Expr<'a>])> {
+ match expr.kind {
+ hir::ExprKind::Call(func, arg_char) => Some((func, arg_char)),
+ hir::ExprKind::Block(block, _) => {
+ match (block.stmts, block.expr) {
+ (&[], Some(inner_expr)) => {
+ // If block only contains an expression,
+ // reduce `|x| { x + 1 }` to `|x| x + 1`
+ reduce_unit_expression(inner_expr)
+ },
+ _ => None,
+ }
+ },
+ _ => None,
+ }
+}
+
+/// lint use of `_.map_or(None, _)` for `Option`s and `Result`s
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx hir::Expr<'_>,
+ recv: &'tcx hir::Expr<'_>,
+ def_arg: &'tcx hir::Expr<'_>,
+ map_arg: &'tcx hir::Expr<'_>,
+) {
+ let is_option = is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv), sym::Option);
+ let is_result = is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv), sym::Result);
+
+ // There are two variants of this `map_or` lint:
+ // (1) using `map_or` as an adapter from `Result<T,E>` to `Option<T>`
+ // (2) using `map_or` as a combinator instead of `and_then`
+ //
+ // (For this lint) we don't care if any other type calls `map_or`
+ if !is_option && !is_result {
+ return;
+ }
+
+ let default_arg_is_none = if let hir::ExprKind::Path(ref qpath) = def_arg.kind {
+ is_lang_ctor(cx, qpath, OptionNone)
+ } else {
+ return;
+ };
+
+ if !default_arg_is_none {
+ // nothing to lint!
+ return;
+ }
+
+ let f_arg_is_some = if let hir::ExprKind::Path(ref qpath) = map_arg.kind {
+ is_lang_ctor(cx, qpath, OptionSome)
+ } else {
+ false
+ };
+
+ if is_option {
+ let self_snippet = snippet(cx, recv.span, "..");
+ if_chain! {
+ if let hir::ExprKind::Closure(&hir::Closure { body, fn_decl_span, .. }) = map_arg.kind;
+ let arg_snippet = snippet(cx, fn_decl_span, "..");
+ let body = cx.tcx.hir().body(body);
+ if let Some((func, [arg_char])) = reduce_unit_expression(&body.value);
+ if let Some(id) = path_def_id(cx, func).map(|ctor_id| cx.tcx.parent(ctor_id));
+ if Some(id) == cx.tcx.lang_items().option_some_variant();
+ then {
+ let func_snippet = snippet(cx, arg_char.span, "..");
+ let msg = "called `map_or(None, ..)` on an `Option` value. This can be done more directly by calling \
+ `map(..)` instead";
+ return span_lint_and_sugg(
+ cx,
+ OPTION_MAP_OR_NONE,
+ expr.span,
+ msg,
+ "try using `map` instead",
+ format!("{0}.map({1} {2})", self_snippet, arg_snippet,func_snippet),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+
+ let func_snippet = snippet(cx, map_arg.span, "..");
+ let msg = "called `map_or(None, ..)` on an `Option` value. This can be done more directly by calling \
+ `and_then(..)` instead";
+ span_lint_and_sugg(
+ cx,
+ OPTION_MAP_OR_NONE,
+ expr.span,
+ msg,
+ "try using `and_then` instead",
+ format!("{0}.and_then({1})", self_snippet, func_snippet),
+ Applicability::MachineApplicable,
+ );
+ } else if f_arg_is_some {
+ let msg = "called `map_or(None, Some)` on a `Result` value. This can be done more directly by calling \
+ `ok()` instead";
+ let self_snippet = snippet(cx, recv.span, "..");
+ span_lint_and_sugg(
+ cx,
+ RESULT_MAP_OR_INTO_OPTION,
+ expr.span,
+ msg,
+ "try using `ok` instead",
+ format!("{0}.ok()", self_snippet),
+ Applicability::MachineApplicable,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/option_map_unwrap_or.rs b/src/tools/clippy/clippy_lints/src/methods/option_map_unwrap_or.rs
new file mode 100644
index 000000000..6c641af59
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/option_map_unwrap_or.rs
@@ -0,0 +1,139 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::is_copy;
+use clippy_utils::ty::is_type_diagnostic_item;
+use rustc_data_structures::fx::FxHashSet;
+use rustc_errors::Applicability;
+use rustc_hir::intravisit::{walk_path, Visitor};
+use rustc_hir::{self, HirId, Path};
+use rustc_lint::LateContext;
+use rustc_middle::hir::nested_filter;
+use rustc_span::source_map::Span;
+use rustc_span::{sym, Symbol};
+
+use super::MAP_UNWRAP_OR;
+
+/// lint use of `map().unwrap_or()` for `Option`s
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &rustc_hir::Expr<'_>,
+ recv: &rustc_hir::Expr<'_>,
+ map_arg: &'tcx rustc_hir::Expr<'_>,
+ unwrap_recv: &rustc_hir::Expr<'_>,
+ unwrap_arg: &'tcx rustc_hir::Expr<'_>,
+ map_span: Span,
+) {
+ // lint if the caller of `map()` is an `Option`
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv), sym::Option) {
+ if !is_copy(cx, cx.typeck_results().expr_ty(unwrap_arg)) {
+ // Do not lint if the `map` argument uses identifiers in the `map`
+ // argument that are also used in the `unwrap_or` argument
+
+ let mut unwrap_visitor = UnwrapVisitor {
+ cx,
+ identifiers: FxHashSet::default(),
+ };
+ unwrap_visitor.visit_expr(unwrap_arg);
+
+ let mut map_expr_visitor = MapExprVisitor {
+ cx,
+ identifiers: unwrap_visitor.identifiers,
+ found_identifier: false,
+ };
+ map_expr_visitor.visit_expr(map_arg);
+
+ if map_expr_visitor.found_identifier {
+ return;
+ }
+ }
+
+ if unwrap_arg.span.ctxt() != map_span.ctxt() {
+ return;
+ }
+
+ let mut applicability = Applicability::MachineApplicable;
+ // get snippet for unwrap_or()
+ let unwrap_snippet = snippet_with_applicability(cx, unwrap_arg.span, "..", &mut applicability);
+ // lint message
+ // comparing the snippet from source to raw text ("None") below is safe
+ // because we already have checked the type.
+ let arg = if unwrap_snippet == "None" { "None" } else { "<a>" };
+ let unwrap_snippet_none = unwrap_snippet == "None";
+ let suggest = if unwrap_snippet_none {
+ "and_then(<f>)"
+ } else {
+ "map_or(<a>, <f>)"
+ };
+ let msg = &format!(
+ "called `map(<f>).unwrap_or({})` on an `Option` value. \
+ This can be done more directly by calling `{}` instead",
+ arg, suggest
+ );
+
+ span_lint_and_then(cx, MAP_UNWRAP_OR, expr.span, msg, |diag| {
+ let map_arg_span = map_arg.span;
+
+ let mut suggestion = vec![
+ (
+ map_span,
+ String::from(if unwrap_snippet_none { "and_then" } else { "map_or" }),
+ ),
+ (expr.span.with_lo(unwrap_recv.span.hi()), String::from("")),
+ ];
+
+ if !unwrap_snippet_none {
+ suggestion.push((map_arg_span.with_hi(map_arg_span.lo()), format!("{}, ", unwrap_snippet)));
+ }
+
+ diag.multipart_suggestion(&format!("use `{}` instead", suggest), suggestion, applicability);
+ });
+ }
+}
+
+struct UnwrapVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ identifiers: FxHashSet<Symbol>,
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for UnwrapVisitor<'a, 'tcx> {
+ type NestedFilter = nested_filter::All;
+
+ fn visit_path(&mut self, path: &'tcx Path<'_>, _id: HirId) {
+ self.identifiers.insert(ident(path));
+ walk_path(self, path);
+ }
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+}
+
+struct MapExprVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ identifiers: FxHashSet<Symbol>,
+ found_identifier: bool,
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for MapExprVisitor<'a, 'tcx> {
+ type NestedFilter = nested_filter::All;
+
+ fn visit_path(&mut self, path: &'tcx Path<'_>, _id: HirId) {
+ if self.identifiers.contains(&ident(path)) {
+ self.found_identifier = true;
+ return;
+ }
+ walk_path(self, path);
+ }
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+}
+
+fn ident(path: &Path<'_>) -> Symbol {
+ path.segments
+ .last()
+ .expect("segments should be composed of at least 1 element")
+ .ident
+ .name
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs b/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs
new file mode 100644
index 000000000..6af134019
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs
@@ -0,0 +1,175 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::eager_or_lazy::switch_to_lazy_eval;
+use clippy_utils::source::{snippet, snippet_with_macro_callsite};
+use clippy_utils::ty::{implements_trait, match_type};
+use clippy_utils::{contains_return, is_trait_item, last_path_segment, paths};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::source_map::Span;
+use rustc_span::symbol::{kw, sym};
+use std::borrow::Cow;
+
+use super::OR_FUN_CALL;
+
+/// Checks for the `OR_FUN_CALL` lint.
+#[allow(clippy::too_many_lines)]
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &hir::Expr<'_>,
+ method_span: Span,
+ name: &str,
+ args: &'tcx [hir::Expr<'_>],
+) {
+ /// Checks for `unwrap_or(T::new())` or `unwrap_or(T::default())`.
+ #[allow(clippy::too_many_arguments)]
+ fn check_unwrap_or_default(
+ cx: &LateContext<'_>,
+ name: &str,
+ fun: &hir::Expr<'_>,
+ arg: &hir::Expr<'_>,
+ or_has_args: bool,
+ span: Span,
+ method_span: Span,
+ ) -> bool {
+ let is_default_default = || is_trait_item(cx, fun, sym::Default);
+
+ let implements_default = |arg, default_trait_id| {
+ let arg_ty = cx.typeck_results().expr_ty(arg);
+ implements_trait(cx, arg_ty, default_trait_id, &[])
+ };
+
+ if_chain! {
+ if !or_has_args;
+ if name == "unwrap_or";
+ if let hir::ExprKind::Path(ref qpath) = fun.kind;
+ if let Some(default_trait_id) = cx.tcx.get_diagnostic_item(sym::Default);
+ let path = last_path_segment(qpath).ident.name;
+ // needs to target Default::default in particular or be *::new and have a Default impl
+ // available
+ if (matches!(path, kw::Default) && is_default_default())
+ || (matches!(path, sym::new) && implements_default(arg, default_trait_id));
+
+ then {
+ span_lint_and_sugg(
+ cx,
+ OR_FUN_CALL,
+ method_span.with_hi(span.hi()),
+ &format!("use of `{}` followed by a call to `{}`", name, path),
+ "try this",
+ "unwrap_or_default()".to_string(),
+ Applicability::MachineApplicable,
+ );
+
+ true
+ } else {
+ false
+ }
+ }
+ }
+
+ /// Checks for `*or(foo())`.
+ #[allow(clippy::too_many_arguments)]
+ fn check_general_case<'tcx>(
+ cx: &LateContext<'tcx>,
+ name: &str,
+ method_span: Span,
+ self_expr: &hir::Expr<'_>,
+ arg: &'tcx hir::Expr<'_>,
+ span: Span,
+ // None if lambda is required
+ fun_span: Option<Span>,
+ ) {
+ // (path, fn_has_argument, methods, suffix)
+ static KNOW_TYPES: [(&[&str], bool, &[&str], &str); 4] = [
+ (&paths::BTREEMAP_ENTRY, false, &["or_insert"], "with"),
+ (&paths::HASHMAP_ENTRY, false, &["or_insert"], "with"),
+ (&paths::OPTION, false, &["map_or", "ok_or", "or", "unwrap_or"], "else"),
+ (&paths::RESULT, true, &["or", "unwrap_or"], "else"),
+ ];
+
+ if_chain! {
+ if KNOW_TYPES.iter().any(|k| k.2.contains(&name));
+
+ if switch_to_lazy_eval(cx, arg);
+ if !contains_return(arg);
+
+ let self_ty = cx.typeck_results().expr_ty(self_expr);
+
+ if let Some(&(_, fn_has_arguments, poss, suffix)) =
+ KNOW_TYPES.iter().find(|&&i| match_type(cx, self_ty, i.0));
+
+ if poss.contains(&name);
+
+ then {
+ let macro_expanded_snipped;
+ let sugg: Cow<'_, str> = {
+ let (snippet_span, use_lambda) = match (fn_has_arguments, fun_span) {
+ (false, Some(fun_span)) => (fun_span, false),
+ _ => (arg.span, true),
+ };
+ let snippet = {
+ let not_macro_argument_snippet = snippet_with_macro_callsite(cx, snippet_span, "..");
+ if not_macro_argument_snippet == "vec![]" {
+ macro_expanded_snipped = snippet(cx, snippet_span, "..");
+ match macro_expanded_snipped.strip_prefix("$crate::vec::") {
+ Some(stripped) => Cow::from(stripped),
+ None => macro_expanded_snipped
+ }
+ }
+ else {
+ not_macro_argument_snippet
+ }
+ };
+
+ if use_lambda {
+ let l_arg = if fn_has_arguments { "_" } else { "" };
+ format!("|{}| {}", l_arg, snippet).into()
+ } else {
+ snippet
+ }
+ };
+ let span_replace_word = method_span.with_hi(span.hi());
+ span_lint_and_sugg(
+ cx,
+ OR_FUN_CALL,
+ span_replace_word,
+ &format!("use of `{}` followed by a function call", name),
+ "try this",
+ format!("{}_{}({})", name, suffix, sugg),
+ Applicability::HasPlaceholders,
+ );
+ }
+ }
+ }
+
+ if let [self_arg, arg] = args {
+ let inner_arg = if let hir::ExprKind::Block(
+ hir::Block {
+ stmts: [],
+ expr: Some(expr),
+ ..
+ },
+ _,
+ ) = arg.kind
+ {
+ expr
+ } else {
+ arg
+ };
+ match inner_arg.kind {
+ hir::ExprKind::Call(fun, or_args) => {
+ let or_has_args = !or_args.is_empty();
+ if !check_unwrap_or_default(cx, name, fun, arg, or_has_args, expr.span, method_span) {
+ let fun_span = if or_has_args { None } else { Some(fun.span) };
+ check_general_case(cx, name, method_span, self_arg, arg, expr.span, fun_span);
+ }
+ },
+ hir::ExprKind::Index(..) | hir::ExprKind::MethodCall(..) => {
+ check_general_case(cx, name, method_span, self_arg, arg, expr.span, None);
+ },
+ _ => (),
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/or_then_unwrap.rs b/src/tools/clippy/clippy_lints/src/methods/or_then_unwrap.rs
new file mode 100644
index 000000000..be5768c35
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/or_then_unwrap.rs
@@ -0,0 +1,68 @@
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{diagnostics::span_lint_and_sugg, is_lang_ctor};
+use rustc_errors::Applicability;
+use rustc_hir::{lang_items::LangItem, Expr, ExprKind};
+use rustc_lint::LateContext;
+use rustc_span::{sym, Span};
+
+use super::OR_THEN_UNWRAP;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ unwrap_expr: &Expr<'_>,
+ recv: &'tcx Expr<'tcx>,
+ or_arg: &'tcx Expr<'_>,
+ or_span: Span,
+) {
+ let ty = cx.typeck_results().expr_ty(recv); // get type of x (we later check if it's Option or Result)
+ let title;
+ let or_arg_content: Span;
+
+ if is_type_diagnostic_item(cx, ty, sym::Option) {
+ title = "found `.or(Some(…)).unwrap()`";
+ if let Some(content) = get_content_if_ctor_matches(cx, or_arg, LangItem::OptionSome) {
+ or_arg_content = content;
+ } else {
+ return;
+ }
+ } else if is_type_diagnostic_item(cx, ty, sym::Result) {
+ title = "found `.or(Ok(…)).unwrap()`";
+ if let Some(content) = get_content_if_ctor_matches(cx, or_arg, LangItem::ResultOk) {
+ or_arg_content = content;
+ } else {
+ return;
+ }
+ } else {
+ // Someone has implemented a struct with .or(...).unwrap() chaining,
+ // but it's not an Option or a Result, so bail
+ return;
+ }
+
+ let mut applicability = Applicability::MachineApplicable;
+ let suggestion = format!(
+ "unwrap_or({})",
+ snippet_with_applicability(cx, or_arg_content, "..", &mut applicability)
+ );
+
+ span_lint_and_sugg(
+ cx,
+ OR_THEN_UNWRAP,
+ unwrap_expr.span.with_lo(or_span.lo()),
+ title,
+ "try this",
+ suggestion,
+ applicability,
+ );
+}
+
+fn get_content_if_ctor_matches(cx: &LateContext<'_>, expr: &Expr<'_>, item: LangItem) -> Option<Span> {
+ if let ExprKind::Call(some_expr, [arg]) = expr.kind
+ && let ExprKind::Path(qpath) = &some_expr.kind
+ && is_lang_ctor(cx, qpath, item)
+ {
+ Some(arg.span)
+ } else {
+ None
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/search_is_some.rs b/src/tools/clippy/clippy_lints/src/methods/search_is_some.rs
new file mode 100644
index 000000000..7572ba3fe
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/search_is_some.rs
@@ -0,0 +1,156 @@
+use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_sugg};
+use clippy_utils::source::{snippet, snippet_with_applicability};
+use clippy_utils::sugg::deref_closure_args;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{is_trait_method, strip_pat_refs};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_hir::PatKind;
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::source_map::Span;
+use rustc_span::symbol::sym;
+
+use super::SEARCH_IS_SOME;
+
+/// lint searching an Iterator followed by `is_some()`
+/// or calling `find()` on a string followed by `is_some()` or `is_none()`
+#[allow(clippy::too_many_arguments, clippy::too_many_lines)]
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'_>,
+ expr: &'tcx hir::Expr<'_>,
+ search_method: &str,
+ is_some: bool,
+ search_recv: &hir::Expr<'_>,
+ search_arg: &'tcx hir::Expr<'_>,
+ is_some_recv: &hir::Expr<'_>,
+ method_span: Span,
+) {
+ let option_check_method = if is_some { "is_some" } else { "is_none" };
+ // lint if caller of search is an Iterator
+ if is_trait_method(cx, is_some_recv, sym::Iterator) {
+ let msg = format!(
+ "called `{}()` after searching an `Iterator` with `{}`",
+ option_check_method, search_method
+ );
+ let search_snippet = snippet(cx, search_arg.span, "..");
+ if search_snippet.lines().count() <= 1 {
+ // suggest `any(|x| ..)` instead of `any(|&x| ..)` for `find(|&x| ..).is_some()`
+ // suggest `any(|..| *..)` instead of `any(|..| **..)` for `find(|..| **..).is_some()`
+ let mut applicability = Applicability::MachineApplicable;
+ let any_search_snippet = if_chain! {
+ if search_method == "find";
+ if let hir::ExprKind::Closure(&hir::Closure { body, .. }) = search_arg.kind;
+ let closure_body = cx.tcx.hir().body(body);
+ if let Some(closure_arg) = closure_body.params.get(0);
+ then {
+ if let hir::PatKind::Ref(..) = closure_arg.pat.kind {
+ Some(search_snippet.replacen('&', "", 1))
+ } else if let PatKind::Binding(..) = strip_pat_refs(closure_arg.pat).kind {
+ // `find()` provides a reference to the item, but `any` does not,
+ // so we should fix item usages for suggestion
+ if let Some(closure_sugg) = deref_closure_args(cx, search_arg) {
+ applicability = closure_sugg.applicability;
+ Some(closure_sugg.suggestion)
+ } else {
+ Some(search_snippet.to_string())
+ }
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+ };
+ // add note if not multi-line
+ if is_some {
+ span_lint_and_sugg(
+ cx,
+ SEARCH_IS_SOME,
+ method_span.with_hi(expr.span.hi()),
+ &msg,
+ "use `any()` instead",
+ format!(
+ "any({})",
+ any_search_snippet.as_ref().map_or(&*search_snippet, String::as_str)
+ ),
+ applicability,
+ );
+ } else {
+ let iter = snippet(cx, search_recv.span, "..");
+ span_lint_and_sugg(
+ cx,
+ SEARCH_IS_SOME,
+ expr.span,
+ &msg,
+ "use `!_.any()` instead",
+ format!(
+ "!{}.any({})",
+ iter,
+ any_search_snippet.as_ref().map_or(&*search_snippet, String::as_str)
+ ),
+ applicability,
+ );
+ }
+ } else {
+ let hint = format!(
+ "this is more succinctly expressed by calling `any()`{}",
+ if option_check_method == "is_none" {
+ " with negation"
+ } else {
+ ""
+ }
+ );
+ span_lint_and_help(cx, SEARCH_IS_SOME, expr.span, &msg, None, &hint);
+ }
+ }
+ // lint if `find()` is called by `String` or `&str`
+ else if search_method == "find" {
+ let is_string_or_str_slice = |e| {
+ let self_ty = cx.typeck_results().expr_ty(e).peel_refs();
+ if is_type_diagnostic_item(cx, self_ty, sym::String) {
+ true
+ } else {
+ *self_ty.kind() == ty::Str
+ }
+ };
+ if_chain! {
+ if is_string_or_str_slice(search_recv);
+ if is_string_or_str_slice(search_arg);
+ then {
+ let msg = format!("called `{}()` after calling `find()` on a string", option_check_method);
+ match option_check_method {
+ "is_some" => {
+ let mut applicability = Applicability::MachineApplicable;
+ let find_arg = snippet_with_applicability(cx, search_arg.span, "..", &mut applicability);
+ span_lint_and_sugg(
+ cx,
+ SEARCH_IS_SOME,
+ method_span.with_hi(expr.span.hi()),
+ &msg,
+ "use `contains()` instead",
+ format!("contains({})", find_arg),
+ applicability,
+ );
+ },
+ "is_none" => {
+ let string = snippet(cx, search_recv.span, "..");
+ let mut applicability = Applicability::MachineApplicable;
+ let find_arg = snippet_with_applicability(cx, search_arg.span, "..", &mut applicability);
+ span_lint_and_sugg(
+ cx,
+ SEARCH_IS_SOME,
+ expr.span,
+ &msg,
+ "use `!_.contains()` instead",
+ format!("!{}.contains({})", string, find_arg),
+ applicability,
+ );
+ },
+ _ => (),
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/single_char_add_str.rs b/src/tools/clippy/clippy_lints/src/methods/single_char_add_str.rs
new file mode 100644
index 000000000..9a5fabcf7
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/single_char_add_str.rs
@@ -0,0 +1,14 @@
+use crate::methods::{single_char_insert_string, single_char_push_string};
+use clippy_utils::{match_def_path, paths};
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, args: &[hir::Expr<'_>]) {
+ if let Some(fn_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) {
+ if match_def_path(cx, fn_def_id, &paths::PUSH_STR) {
+ single_char_push_string::check(cx, expr, args);
+ } else if match_def_path(cx, fn_def_id, &paths::INSERT_STR) {
+ single_char_insert_string::check(cx, expr, args);
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/single_char_insert_string.rs b/src/tools/clippy/clippy_lints/src/methods/single_char_insert_string.rs
new file mode 100644
index 000000000..6cdc954c0
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/single_char_insert_string.rs
@@ -0,0 +1,28 @@
+use super::utils::get_hint_if_single_char_arg;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+
+use super::SINGLE_CHAR_ADD_STR;
+
+/// lint for length-1 `str`s as argument for `insert_str`
+pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, args: &[hir::Expr<'_>]) {
+ let mut applicability = Applicability::MachineApplicable;
+ if let Some(extension_string) = get_hint_if_single_char_arg(cx, &args[2], &mut applicability) {
+ let base_string_snippet =
+ snippet_with_applicability(cx, args[0].span.source_callsite(), "_", &mut applicability);
+ let pos_arg = snippet_with_applicability(cx, args[1].span, "..", &mut applicability);
+ let sugg = format!("{}.insert({}, {})", base_string_snippet, pos_arg, extension_string);
+ span_lint_and_sugg(
+ cx,
+ SINGLE_CHAR_ADD_STR,
+ expr.span,
+ "calling `insert_str()` using a single-character string literal",
+ "consider using `insert` with a character literal",
+ sugg,
+ applicability,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/single_char_pattern.rs b/src/tools/clippy/clippy_lints/src/methods/single_char_pattern.rs
new file mode 100644
index 000000000..bf9006c69
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/single_char_pattern.rs
@@ -0,0 +1,62 @@
+use super::utils::get_hint_if_single_char_arg;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::symbol::Symbol;
+
+use super::SINGLE_CHAR_PATTERN;
+
+const PATTERN_METHODS: [(&str, usize); 24] = [
+ ("contains", 1),
+ ("starts_with", 1),
+ ("ends_with", 1),
+ ("find", 1),
+ ("rfind", 1),
+ ("split", 1),
+ ("split_inclusive", 1),
+ ("rsplit", 1),
+ ("split_terminator", 1),
+ ("rsplit_terminator", 1),
+ ("splitn", 2),
+ ("rsplitn", 2),
+ ("split_once", 1),
+ ("rsplit_once", 1),
+ ("matches", 1),
+ ("rmatches", 1),
+ ("match_indices", 1),
+ ("rmatch_indices", 1),
+ ("strip_prefix", 1),
+ ("strip_suffix", 1),
+ ("trim_start_matches", 1),
+ ("trim_end_matches", 1),
+ ("replace", 1),
+ ("replacen", 1),
+];
+
+/// lint for length-1 `str`s for methods in `PATTERN_METHODS`
+pub(super) fn check(cx: &LateContext<'_>, _expr: &hir::Expr<'_>, method_name: Symbol, args: &[hir::Expr<'_>]) {
+ for &(method, pos) in &PATTERN_METHODS {
+ if_chain! {
+ if let ty::Ref(_, ty, _) = cx.typeck_results().expr_ty_adjusted(&args[0]).kind();
+ if *ty.kind() == ty::Str;
+ if method_name.as_str() == method && args.len() > pos;
+ let arg = &args[pos];
+ let mut applicability = Applicability::MachineApplicable;
+ if let Some(hint) = get_hint_if_single_char_arg(cx, arg, &mut applicability);
+ then {
+ span_lint_and_sugg(
+ cx,
+ SINGLE_CHAR_PATTERN,
+ arg.span,
+ "single-character string constant used as pattern",
+ "try using a `char` instead",
+ hint,
+ applicability,
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/single_char_push_string.rs b/src/tools/clippy/clippy_lints/src/methods/single_char_push_string.rs
new file mode 100644
index 000000000..0237d39cb
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/single_char_push_string.rs
@@ -0,0 +1,27 @@
+use super::utils::get_hint_if_single_char_arg;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+
+use super::SINGLE_CHAR_ADD_STR;
+
+/// lint for length-1 `str`s as argument for `push_str`
+pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, args: &[hir::Expr<'_>]) {
+ let mut applicability = Applicability::MachineApplicable;
+ if let Some(extension_string) = get_hint_if_single_char_arg(cx, &args[1], &mut applicability) {
+ let base_string_snippet =
+ snippet_with_applicability(cx, args[0].span.source_callsite(), "..", &mut applicability);
+ let sugg = format!("{}.push({})", base_string_snippet, extension_string);
+ span_lint_and_sugg(
+ cx,
+ SINGLE_CHAR_ADD_STR,
+ expr.span,
+ "calling `push_str()` using a single-character string literal",
+ "consider using `push` with a character literal",
+ sugg,
+ applicability,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/skip_while_next.rs b/src/tools/clippy/clippy_lints/src/methods/skip_while_next.rs
new file mode 100644
index 000000000..9f0b6c34e
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/skip_while_next.rs
@@ -0,0 +1,22 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::is_trait_method;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::SKIP_WHILE_NEXT;
+
+/// lint use of `skip_while().next()` for `Iterators`
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
+ // lint if caller of `.skip_while().next()` is an Iterator
+ if is_trait_method(cx, expr, sym::Iterator) {
+ span_lint_and_help(
+ cx,
+ SKIP_WHILE_NEXT,
+ expr.span,
+ "called `skip_while(<p>).next()` on an `Iterator`",
+ None,
+ "this is more succinctly expressed by calling `.find(!<p>)` instead",
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs b/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs
new file mode 100644
index 000000000..4ac738272
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs
@@ -0,0 +1,390 @@
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then};
+use clippy_utils::source::snippet_with_context;
+use clippy_utils::usage::local_used_after_expr;
+use clippy_utils::visitors::expr_visitor;
+use clippy_utils::{is_diag_item_method, match_def_path, meets_msrv, msrvs, path_to_local_id, paths};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::intravisit::Visitor;
+use rustc_hir::{
+ BindingAnnotation, Expr, ExprKind, HirId, LangItem, Local, MatchSource, Node, Pat, PatKind, QPath, Stmt, StmtKind,
+};
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_semver::RustcVersion;
+use rustc_span::{sym, Span, Symbol, SyntaxContext};
+
+use super::{MANUAL_SPLIT_ONCE, NEEDLESS_SPLITN};
+
+pub(super) fn check(
+ cx: &LateContext<'_>,
+ method_name: &str,
+ expr: &Expr<'_>,
+ self_arg: &Expr<'_>,
+ pat_arg: &Expr<'_>,
+ count: u128,
+ msrv: Option<RustcVersion>,
+) {
+ if count < 2 || !cx.typeck_results().expr_ty_adjusted(self_arg).peel_refs().is_str() {
+ return;
+ }
+
+ let needless = |usage_kind| match usage_kind {
+ IterUsageKind::Nth(n) => count > n + 1,
+ IterUsageKind::NextTuple => count > 2,
+ };
+ let manual = count == 2 && meets_msrv(msrv, msrvs::STR_SPLIT_ONCE);
+
+ match parse_iter_usage(cx, expr.span.ctxt(), cx.tcx.hir().parent_iter(expr.hir_id)) {
+ Some(usage) if needless(usage.kind) => lint_needless(cx, method_name, expr, self_arg, pat_arg),
+ Some(usage) if manual => check_manual_split_once(cx, method_name, expr, self_arg, pat_arg, &usage),
+ None if manual => {
+ check_manual_split_once_indirect(cx, method_name, expr, self_arg, pat_arg);
+ },
+ _ => {},
+ }
+}
+
+fn lint_needless(cx: &LateContext<'_>, method_name: &str, expr: &Expr<'_>, self_arg: &Expr<'_>, pat_arg: &Expr<'_>) {
+ let mut app = Applicability::MachineApplicable;
+ let r = if method_name == "splitn" { "" } else { "r" };
+
+ span_lint_and_sugg(
+ cx,
+ NEEDLESS_SPLITN,
+ expr.span,
+ &format!("unnecessary use of `{r}splitn`"),
+ "try this",
+ format!(
+ "{}.{r}split({})",
+ snippet_with_context(cx, self_arg.span, expr.span.ctxt(), "..", &mut app).0,
+ snippet_with_context(cx, pat_arg.span, expr.span.ctxt(), "..", &mut app).0,
+ ),
+ app,
+ );
+}
+
+fn check_manual_split_once(
+ cx: &LateContext<'_>,
+ method_name: &str,
+ expr: &Expr<'_>,
+ self_arg: &Expr<'_>,
+ pat_arg: &Expr<'_>,
+ usage: &IterUsage,
+) {
+ let ctxt = expr.span.ctxt();
+ let (msg, reverse) = if method_name == "splitn" {
+ ("manual implementation of `split_once`", false)
+ } else {
+ ("manual implementation of `rsplit_once`", true)
+ };
+
+ let mut app = Applicability::MachineApplicable;
+ let self_snip = snippet_with_context(cx, self_arg.span, ctxt, "..", &mut app).0;
+ let pat_snip = snippet_with_context(cx, pat_arg.span, ctxt, "..", &mut app).0;
+
+ let sugg = match usage.kind {
+ IterUsageKind::NextTuple => {
+ if reverse {
+ format!("{self_snip}.rsplit_once({pat_snip}).map(|(x, y)| (y, x))")
+ } else {
+ format!("{self_snip}.split_once({pat_snip})")
+ }
+ },
+ IterUsageKind::Nth(1) => {
+ let (r, field) = if reverse { ("r", 0) } else { ("", 1) };
+
+ match usage.unwrap_kind {
+ Some(UnwrapKind::Unwrap) => {
+ format!("{self_snip}.{r}split_once({pat_snip}).unwrap().{field}")
+ },
+ Some(UnwrapKind::QuestionMark) => {
+ format!("{self_snip}.{r}split_once({pat_snip})?.{field}")
+ },
+ None => {
+ format!("{self_snip}.{r}split_once({pat_snip}).map(|x| x.{field})")
+ },
+ }
+ },
+ IterUsageKind::Nth(_) => return,
+ };
+
+ span_lint_and_sugg(cx, MANUAL_SPLIT_ONCE, usage.span, msg, "try this", sugg, app);
+}
+
+/// checks for
+///
+/// ```
+/// let mut iter = "a.b.c".splitn(2, '.');
+/// let a = iter.next();
+/// let b = iter.next();
+/// ```
+fn check_manual_split_once_indirect(
+ cx: &LateContext<'_>,
+ method_name: &str,
+ expr: &Expr<'_>,
+ self_arg: &Expr<'_>,
+ pat_arg: &Expr<'_>,
+) -> Option<()> {
+ let ctxt = expr.span.ctxt();
+ let mut parents = cx.tcx.hir().parent_iter(expr.hir_id);
+ if let (_, Node::Local(local)) = parents.next()?
+ && let PatKind::Binding(BindingAnnotation::Mutable, iter_binding_id, iter_ident, None) = local.pat.kind
+ && let (iter_stmt_id, Node::Stmt(_)) = parents.next()?
+ && let (_, Node::Block(enclosing_block)) = parents.next()?
+
+ && let mut stmts = enclosing_block
+ .stmts
+ .iter()
+ .skip_while(|stmt| stmt.hir_id != iter_stmt_id)
+ .skip(1)
+
+ && let first = indirect_usage(cx, stmts.next()?, iter_binding_id, ctxt)?
+ && let second = indirect_usage(cx, stmts.next()?, iter_binding_id, ctxt)?
+ && first.unwrap_kind == second.unwrap_kind
+ && first.name != second.name
+ && !local_used_after_expr(cx, iter_binding_id, second.init_expr)
+ {
+ let (r, lhs, rhs) = if method_name == "splitn" {
+ ("", first.name, second.name)
+ } else {
+ ("r", second.name, first.name)
+ };
+ let msg = format!("manual implementation of `{r}split_once`");
+
+ let mut app = Applicability::MachineApplicable;
+ let self_snip = snippet_with_context(cx, self_arg.span, ctxt, "..", &mut app).0;
+ let pat_snip = snippet_with_context(cx, pat_arg.span, ctxt, "..", &mut app).0;
+
+ span_lint_and_then(cx, MANUAL_SPLIT_ONCE, local.span, &msg, |diag| {
+ diag.span_label(first.span, "first usage here");
+ diag.span_label(second.span, "second usage here");
+
+ let unwrap = match first.unwrap_kind {
+ UnwrapKind::Unwrap => ".unwrap()",
+ UnwrapKind::QuestionMark => "?",
+ };
+ diag.span_suggestion_verbose(
+ local.span,
+ &format!("try `{r}split_once`"),
+ format!("let ({lhs}, {rhs}) = {self_snip}.{r}split_once({pat_snip}){unwrap};"),
+ app,
+ );
+
+ let remove_msg = format!("remove the `{iter_ident}` usages");
+ diag.span_suggestion(
+ first.span,
+ &remove_msg,
+ "",
+ app,
+ );
+ diag.span_suggestion(
+ second.span,
+ &remove_msg,
+ "",
+ app,
+ );
+ });
+ }
+
+ Some(())
+}
+
+#[derive(Debug)]
+struct IndirectUsage<'a> {
+ name: Symbol,
+ span: Span,
+ init_expr: &'a Expr<'a>,
+ unwrap_kind: UnwrapKind,
+}
+
+/// returns `Some(IndirectUsage)` for e.g.
+///
+/// ```ignore
+/// let name = binding.next()?;
+/// let name = binding.next().unwrap();
+/// ```
+fn indirect_usage<'tcx>(
+ cx: &LateContext<'tcx>,
+ stmt: &Stmt<'tcx>,
+ binding: HirId,
+ ctxt: SyntaxContext,
+) -> Option<IndirectUsage<'tcx>> {
+ if let StmtKind::Local(Local {
+ pat:
+ Pat {
+ kind: PatKind::Binding(BindingAnnotation::Unannotated, _, ident, None),
+ ..
+ },
+ init: Some(init_expr),
+ hir_id: local_hir_id,
+ ..
+ }) = stmt.kind
+ {
+ let mut path_to_binding = None;
+ expr_visitor(cx, |expr| {
+ if path_to_local_id(expr, binding) {
+ path_to_binding = Some(expr);
+ }
+
+ path_to_binding.is_none()
+ })
+ .visit_expr(init_expr);
+
+ let mut parents = cx.tcx.hir().parent_iter(path_to_binding?.hir_id);
+ let iter_usage = parse_iter_usage(cx, ctxt, &mut parents)?;
+
+ let (parent_id, _) = parents.find(|(_, node)| {
+ !matches!(
+ node,
+ Node::Expr(Expr {
+ kind: ExprKind::Match(.., MatchSource::TryDesugar),
+ ..
+ })
+ )
+ })?;
+
+ if let IterUsage {
+ kind: IterUsageKind::Nth(0),
+ unwrap_kind: Some(unwrap_kind),
+ ..
+ } = iter_usage
+ {
+ if parent_id == *local_hir_id {
+ return Some(IndirectUsage {
+ name: ident.name,
+ span: stmt.span,
+ init_expr,
+ unwrap_kind,
+ });
+ }
+ }
+ }
+
+ None
+}
+
+#[derive(Debug, Clone, Copy)]
+enum IterUsageKind {
+ Nth(u128),
+ NextTuple,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+enum UnwrapKind {
+ Unwrap,
+ QuestionMark,
+}
+
+#[derive(Debug)]
+struct IterUsage {
+ kind: IterUsageKind,
+ unwrap_kind: Option<UnwrapKind>,
+ span: Span,
+}
+
+#[allow(clippy::too_many_lines)]
+fn parse_iter_usage<'tcx>(
+ cx: &LateContext<'tcx>,
+ ctxt: SyntaxContext,
+ mut iter: impl Iterator<Item = (HirId, Node<'tcx>)>,
+) -> Option<IterUsage> {
+ let (kind, span) = match iter.next() {
+ Some((_, Node::Expr(e))) if e.span.ctxt() == ctxt => {
+ let (name, args) = if let ExprKind::MethodCall(name, [_, args @ ..], _) = e.kind {
+ (name, args)
+ } else {
+ return None;
+ };
+ let did = cx.typeck_results().type_dependent_def_id(e.hir_id)?;
+ let iter_id = cx.tcx.get_diagnostic_item(sym::Iterator)?;
+
+ match (name.ident.as_str(), args) {
+ ("next", []) if cx.tcx.trait_of_item(did) == Some(iter_id) => (IterUsageKind::Nth(0), e.span),
+ ("next_tuple", []) => {
+ return if_chain! {
+ if match_def_path(cx, did, &paths::ITERTOOLS_NEXT_TUPLE);
+ if let ty::Adt(adt_def, subs) = cx.typeck_results().expr_ty(e).kind();
+ if cx.tcx.is_diagnostic_item(sym::Option, adt_def.did());
+ if let ty::Tuple(subs) = subs.type_at(0).kind();
+ if subs.len() == 2;
+ then {
+ Some(IterUsage {
+ kind: IterUsageKind::NextTuple,
+ span: e.span,
+ unwrap_kind: None
+ })
+ } else {
+ None
+ }
+ };
+ },
+ ("nth" | "skip", [idx_expr]) if cx.tcx.trait_of_item(did) == Some(iter_id) => {
+ if let Some((Constant::Int(idx), _)) = constant(cx, cx.typeck_results(), idx_expr) {
+ let span = if name.ident.as_str() == "nth" {
+ e.span
+ } else {
+ if_chain! {
+ if let Some((_, Node::Expr(next_expr))) = iter.next();
+ if let ExprKind::MethodCall(next_name, [_], _) = next_expr.kind;
+ if next_name.ident.name == sym::next;
+ if next_expr.span.ctxt() == ctxt;
+ if let Some(next_id) = cx.typeck_results().type_dependent_def_id(next_expr.hir_id);
+ if cx.tcx.trait_of_item(next_id) == Some(iter_id);
+ then {
+ next_expr.span
+ } else {
+ return None;
+ }
+ }
+ };
+ (IterUsageKind::Nth(idx), span)
+ } else {
+ return None;
+ }
+ },
+ _ => return None,
+ }
+ },
+ _ => return None,
+ };
+
+ let (unwrap_kind, span) = if let Some((_, Node::Expr(e))) = iter.next() {
+ match e.kind {
+ ExprKind::Call(
+ Expr {
+ kind: ExprKind::Path(QPath::LangItem(LangItem::TryTraitBranch, ..)),
+ ..
+ },
+ _,
+ ) => {
+ let parent_span = e.span.parent_callsite().unwrap();
+ if parent_span.ctxt() == ctxt {
+ (Some(UnwrapKind::QuestionMark), parent_span)
+ } else {
+ (None, span)
+ }
+ },
+ _ if e.span.ctxt() != ctxt => (None, span),
+ ExprKind::MethodCall(name, [_], _)
+ if name.ident.name == sym::unwrap
+ && cx
+ .typeck_results()
+ .type_dependent_def_id(e.hir_id)
+ .map_or(false, |id| is_diag_item_method(cx, id, sym::Option)) =>
+ {
+ (Some(UnwrapKind::Unwrap), e.span)
+ },
+ _ => (None, span),
+ }
+ } else {
+ (None, span)
+ };
+
+ Some(IterUsage {
+ kind,
+ unwrap_kind,
+ span,
+ })
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/string_extend_chars.rs b/src/tools/clippy/clippy_lints/src/methods/string_extend_chars.rs
new file mode 100644
index 000000000..d06658f2a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/string_extend_chars.rs
@@ -0,0 +1,45 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::method_chain_args;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::is_type_diagnostic_item;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::symbol::sym;
+
+use super::STRING_EXTEND_CHARS;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr<'_>, arg: &hir::Expr<'_>) {
+ let obj_ty = cx.typeck_results().expr_ty(recv).peel_refs();
+ if !is_type_diagnostic_item(cx, obj_ty, sym::String) {
+ return;
+ }
+ if let Some(arglists) = method_chain_args(arg, &["chars"]) {
+ let target = &arglists[0][0];
+ let self_ty = cx.typeck_results().expr_ty(target).peel_refs();
+ let ref_str = if *self_ty.kind() == ty::Str {
+ ""
+ } else if is_type_diagnostic_item(cx, self_ty, sym::String) {
+ "&"
+ } else {
+ return;
+ };
+
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ STRING_EXTEND_CHARS,
+ expr.span,
+ "calling `.extend(_.chars())`",
+ "try this",
+ format!(
+ "{}.push_str({}{})",
+ snippet_with_applicability(cx, recv.span, "..", &mut applicability),
+ ref_str,
+ snippet_with_applicability(cx, target.span, "..", &mut applicability)
+ ),
+ applicability,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/suspicious_map.rs b/src/tools/clippy/clippy_lints/src/methods/suspicious_map.rs
new file mode 100644
index 000000000..9c3375bf3
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/suspicious_map.rs
@@ -0,0 +1,36 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::usage::mutated_variables;
+use clippy_utils::{expr_or_init, is_trait_method};
+use if_chain::if_chain;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::SUSPICIOUS_MAP;
+
+pub fn check<'tcx>(cx: &LateContext<'tcx>, expr: &hir::Expr<'_>, count_recv: &hir::Expr<'_>, map_arg: &hir::Expr<'_>) {
+ if_chain! {
+ if is_trait_method(cx, count_recv, sym::Iterator);
+ let closure = expr_or_init(cx, map_arg);
+ if let Some(def_id) = cx.tcx.hir().opt_local_def_id(closure.hir_id);
+ if let Some(body_id) = cx.tcx.hir().maybe_body_owned_by(def_id);
+ let closure_body = cx.tcx.hir().body(body_id);
+ if !cx.typeck_results().expr_ty(&closure_body.value).is_unit();
+ then {
+ if let Some(map_mutated_vars) = mutated_variables(&closure_body.value, cx) {
+ // A variable is used mutably inside of the closure. Suppress the lint.
+ if !map_mutated_vars.is_empty() {
+ return;
+ }
+ }
+ span_lint_and_help(
+ cx,
+ SUSPICIOUS_MAP,
+ expr.span,
+ "this call to `map()` won't have an effect on the call to `count()`",
+ None,
+ "make sure you did not confuse `map` with `filter`, `for_each` or `inspect`",
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs b/src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs
new file mode 100644
index 000000000..55567d862
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs
@@ -0,0 +1,48 @@
+use clippy_utils::diagnostics::span_lint_and_note;
+use if_chain::if_chain;
+use rustc_ast::LitKind;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::LateContext;
+use rustc_span::source_map::Spanned;
+
+use super::SUSPICIOUS_SPLITN;
+
+pub(super) fn check(cx: &LateContext<'_>, method_name: &str, expr: &Expr<'_>, self_arg: &Expr<'_>, count: u128) {
+ if_chain! {
+ if count <= 1;
+ if let Some(call_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
+ if let Some(impl_id) = cx.tcx.impl_of_method(call_id);
+ if cx.tcx.impl_trait_ref(impl_id).is_none();
+ let self_ty = cx.tcx.type_of(impl_id);
+ if self_ty.is_slice() || self_ty.is_str();
+ then {
+ // Ignore empty slice and string literals when used with a literal count.
+ if matches!(self_arg.kind, ExprKind::Array([]))
+ || matches!(self_arg.kind, ExprKind::Lit(Spanned { node: LitKind::Str(s, _), .. }) if s.is_empty())
+ {
+ return;
+ }
+
+ let (msg, note_msg) = if count == 0 {
+ (format!("`{}` called with `0` splits", method_name),
+ "the resulting iterator will always return `None`")
+ } else {
+ (format!("`{}` called with `1` split", method_name),
+ if self_ty.is_slice() {
+ "the resulting iterator will always return the entire slice followed by `None`"
+ } else {
+ "the resulting iterator will always return the entire string followed by `None`"
+ })
+ };
+
+ span_lint_and_note(
+ cx,
+ SUSPICIOUS_SPLITN,
+ expr.span,
+ &msg,
+ None,
+ note_msg,
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/uninit_assumed_init.rs b/src/tools/clippy/clippy_lints/src/methods/uninit_assumed_init.rs
new file mode 100644
index 000000000..77d21f1d3
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/uninit_assumed_init.rs
@@ -0,0 +1,26 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::{is_expr_diagnostic_item, ty::is_uninit_value_valid_for_ty};
+use if_chain::if_chain;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::UNINIT_ASSUMED_INIT;
+
+/// lint for `MaybeUninit::uninit().assume_init()` (we already have the latter)
+pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr<'_>) {
+ if_chain! {
+ if let hir::ExprKind::Call(callee, args) = recv.kind;
+ if args.is_empty();
+ if is_expr_diagnostic_item(cx, callee, sym::maybe_uninit_uninit);
+ if !is_uninit_value_valid_for_ty(cx, cx.typeck_results().expr_ty_adjusted(expr));
+ then {
+ span_lint(
+ cx,
+ UNINIT_ASSUMED_INIT,
+ expr.span,
+ "this call for this type may be undefined behavior"
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_filter_map.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_filter_map.rs
new file mode 100644
index 000000000..bafa6fc58
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_filter_map.rs
@@ -0,0 +1,132 @@
+use super::utils::clone_or_copy_needed;
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::ty::is_copy;
+use clippy_utils::usage::mutated_variables;
+use clippy_utils::{is_lang_ctor, is_trait_method, path_to_local_id};
+use rustc_hir as hir;
+use rustc_hir::intravisit::{walk_expr, Visitor};
+use rustc_hir::LangItem::{OptionNone, OptionSome};
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::sym;
+
+use super::UNNECESSARY_FILTER_MAP;
+use super::UNNECESSARY_FIND_MAP;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, arg: &hir::Expr<'_>, name: &str) {
+ if !is_trait_method(cx, expr, sym::Iterator) {
+ return;
+ }
+
+ if let hir::ExprKind::Closure(&hir::Closure { body, .. }) = arg.kind {
+ let body = cx.tcx.hir().body(body);
+ let arg_id = body.params[0].pat.hir_id;
+ let mutates_arg =
+ mutated_variables(&body.value, cx).map_or(true, |used_mutably| used_mutably.contains(&arg_id));
+ let (clone_or_copy_needed, _) = clone_or_copy_needed(cx, body.params[0].pat, &body.value);
+
+ let (mut found_mapping, mut found_filtering) = check_expression(cx, arg_id, &body.value);
+
+ let mut return_visitor = ReturnVisitor::new(cx, arg_id);
+ return_visitor.visit_expr(&body.value);
+ found_mapping |= return_visitor.found_mapping;
+ found_filtering |= return_visitor.found_filtering;
+
+ let in_ty = cx.typeck_results().node_type(body.params[0].hir_id);
+ let sugg = if !found_filtering {
+ if name == "filter_map" { "map" } else { "map(..).next()" }
+ } else if !found_mapping && !mutates_arg && (!clone_or_copy_needed || is_copy(cx, in_ty)) {
+ match cx.typeck_results().expr_ty(&body.value).kind() {
+ ty::Adt(adt, subst)
+ if cx.tcx.is_diagnostic_item(sym::Option, adt.did()) && in_ty == subst.type_at(0) =>
+ {
+ if name == "filter_map" { "filter" } else { "find" }
+ },
+ _ => return,
+ }
+ } else {
+ return;
+ };
+ span_lint(
+ cx,
+ if name == "filter_map" {
+ UNNECESSARY_FILTER_MAP
+ } else {
+ UNNECESSARY_FIND_MAP
+ },
+ expr.span,
+ &format!("this `.{}` can be written more simply using `.{}`", name, sugg),
+ );
+ }
+}
+
+// returns (found_mapping, found_filtering)
+fn check_expression<'tcx>(cx: &LateContext<'tcx>, arg_id: hir::HirId, expr: &'tcx hir::Expr<'_>) -> (bool, bool) {
+ match &expr.kind {
+ hir::ExprKind::Call(func, args) => {
+ if let hir::ExprKind::Path(ref path) = func.kind {
+ if is_lang_ctor(cx, path, OptionSome) {
+ if path_to_local_id(&args[0], arg_id) {
+ return (false, false);
+ }
+ return (true, false);
+ }
+ }
+ (true, true)
+ },
+ hir::ExprKind::Block(block, _) => block
+ .expr
+ .as_ref()
+ .map_or((false, false), |expr| check_expression(cx, arg_id, expr)),
+ hir::ExprKind::Match(_, arms, _) => {
+ let mut found_mapping = false;
+ let mut found_filtering = false;
+ for arm in *arms {
+ let (m, f) = check_expression(cx, arg_id, arm.body);
+ found_mapping |= m;
+ found_filtering |= f;
+ }
+ (found_mapping, found_filtering)
+ },
+ // There must be an else_arm or there will be a type error
+ hir::ExprKind::If(_, if_arm, Some(else_arm)) => {
+ let if_check = check_expression(cx, arg_id, if_arm);
+ let else_check = check_expression(cx, arg_id, else_arm);
+ (if_check.0 | else_check.0, if_check.1 | else_check.1)
+ },
+ hir::ExprKind::Path(path) if is_lang_ctor(cx, path, OptionNone) => (false, true),
+ _ => (true, true),
+ }
+}
+
+struct ReturnVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ arg_id: hir::HirId,
+ // Found a non-None return that isn't Some(input)
+ found_mapping: bool,
+ // Found a return that isn't Some
+ found_filtering: bool,
+}
+
+impl<'a, 'tcx> ReturnVisitor<'a, 'tcx> {
+ fn new(cx: &'a LateContext<'tcx>, arg_id: hir::HirId) -> ReturnVisitor<'a, 'tcx> {
+ ReturnVisitor {
+ cx,
+ arg_id,
+ found_mapping: false,
+ found_filtering: false,
+ }
+ }
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for ReturnVisitor<'a, 'tcx> {
+ fn visit_expr(&mut self, expr: &'tcx hir::Expr<'_>) {
+ if let hir::ExprKind::Ret(Some(expr)) = &expr.kind {
+ let (found_mapping, found_filtering) = check_expression(self.cx, self.arg_id, expr);
+ self.found_mapping |= found_mapping;
+ self.found_filtering |= found_filtering;
+ } else {
+ walk_expr(self, expr);
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_fold.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_fold.rs
new file mode 100644
index 000000000..c3531d4d0
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_fold.rs
@@ -0,0 +1,95 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::{is_trait_method, path_to_local_id, peel_blocks, strip_pat_refs};
+use if_chain::if_chain;
+use rustc_ast::ast;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_hir::PatKind;
+use rustc_lint::LateContext;
+use rustc_span::{source_map::Span, sym};
+
+use super::UNNECESSARY_FOLD;
+
+pub(super) fn check(
+ cx: &LateContext<'_>,
+ expr: &hir::Expr<'_>,
+ init: &hir::Expr<'_>,
+ acc: &hir::Expr<'_>,
+ fold_span: Span,
+) {
+ fn check_fold_with_op(
+ cx: &LateContext<'_>,
+ expr: &hir::Expr<'_>,
+ acc: &hir::Expr<'_>,
+ fold_span: Span,
+ op: hir::BinOpKind,
+ replacement_method_name: &str,
+ replacement_has_args: bool,
+ ) {
+ if_chain! {
+ // Extract the body of the closure passed to fold
+ if let hir::ExprKind::Closure(&hir::Closure { body, .. }) = acc.kind;
+ let closure_body = cx.tcx.hir().body(body);
+ let closure_expr = peel_blocks(&closure_body.value);
+
+ // Check if the closure body is of the form `acc <op> some_expr(x)`
+ if let hir::ExprKind::Binary(ref bin_op, left_expr, right_expr) = closure_expr.kind;
+ if bin_op.node == op;
+
+ // Extract the names of the two arguments to the closure
+ if let [param_a, param_b] = closure_body.params;
+ if let PatKind::Binding(_, first_arg_id, ..) = strip_pat_refs(param_a.pat).kind;
+ if let PatKind::Binding(_, second_arg_id, second_arg_ident, _) = strip_pat_refs(param_b.pat).kind;
+
+ if path_to_local_id(left_expr, first_arg_id);
+ if replacement_has_args || path_to_local_id(right_expr, second_arg_id);
+
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ let sugg = if replacement_has_args {
+ format!(
+ "{replacement}(|{s}| {r})",
+ replacement = replacement_method_name,
+ s = second_arg_ident,
+ r = snippet_with_applicability(cx, right_expr.span, "EXPR", &mut applicability),
+ )
+ } else {
+ format!(
+ "{replacement}()",
+ replacement = replacement_method_name,
+ )
+ };
+
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_FOLD,
+ fold_span.with_hi(expr.span.hi()),
+ // TODO #2371 don't suggest e.g., .any(|x| f(x)) if we can suggest .any(f)
+ "this `.fold` can be written more succinctly using another method",
+ "try",
+ sugg,
+ applicability,
+ );
+ }
+ }
+ }
+
+ // Check that this is a call to Iterator::fold rather than just some function called fold
+ if !is_trait_method(cx, expr, sym::Iterator) {
+ return;
+ }
+
+ // Check if the first argument to .fold is a suitable literal
+ if let hir::ExprKind::Lit(ref lit) = init.kind {
+ match lit.node {
+ ast::LitKind::Bool(false) => check_fold_with_op(cx, expr, acc, fold_span, hir::BinOpKind::Or, "any", true),
+ ast::LitKind::Bool(true) => check_fold_with_op(cx, expr, acc, fold_span, hir::BinOpKind::And, "all", true),
+ ast::LitKind::Int(0, _) => check_fold_with_op(cx, expr, acc, fold_span, hir::BinOpKind::Add, "sum", false),
+ ast::LitKind::Int(1, _) => {
+ check_fold_with_op(cx, expr, acc, fold_span, hir::BinOpKind::Mul, "product", false);
+ },
+ _ => (),
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs
new file mode 100644
index 000000000..19037093e
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs
@@ -0,0 +1,104 @@
+use super::utils::clone_or_copy_needed;
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::higher::ForLoop;
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::{get_associated_type, get_iterator_item_ty, implements_trait};
+use clippy_utils::{fn_def_id, get_parent_expr};
+use rustc_errors::Applicability;
+use rustc_hir::{def_id::DefId, Expr, ExprKind, LangItem};
+use rustc_lint::LateContext;
+use rustc_span::{sym, Symbol};
+
+use super::UNNECESSARY_TO_OWNED;
+
+pub fn check(cx: &LateContext<'_>, expr: &Expr<'_>, method_name: Symbol, receiver: &Expr<'_>) -> bool {
+ if_chain! {
+ if let Some(parent) = get_parent_expr(cx, expr);
+ if let Some(callee_def_id) = fn_def_id(cx, parent);
+ if is_into_iter(cx, callee_def_id);
+ then {
+ check_for_loop_iter(cx, parent, method_name, receiver, false)
+ } else {
+ false
+ }
+ }
+}
+
+/// Checks whether `expr` is an iterator in a `for` loop and, if so, determines whether the
+/// iterated-over items could be iterated over by reference. The reason why `check` above does not
+/// include this code directly is so that it can be called from
+/// `unnecessary_into_owned::check_into_iter_call_arg`.
+pub fn check_for_loop_iter(
+ cx: &LateContext<'_>,
+ expr: &Expr<'_>,
+ method_name: Symbol,
+ receiver: &Expr<'_>,
+ cloned_before_iter: bool,
+) -> bool {
+ if_chain! {
+ if let Some(grandparent) = get_parent_expr(cx, expr).and_then(|parent| get_parent_expr(cx, parent));
+ if let Some(ForLoop { pat, body, .. }) = ForLoop::hir(grandparent);
+ let (clone_or_copy_needed, addr_of_exprs) = clone_or_copy_needed(cx, pat, body);
+ if !clone_or_copy_needed;
+ if let Some(receiver_snippet) = snippet_opt(cx, receiver.span);
+ then {
+ let snippet = if_chain! {
+ if let ExprKind::MethodCall(maybe_iter_method_name, [collection], _) = receiver.kind;
+ if maybe_iter_method_name.ident.name == sym::iter;
+
+ if let Some(iterator_trait_id) = cx.tcx.get_diagnostic_item(sym::Iterator);
+ let receiver_ty = cx.typeck_results().expr_ty(receiver);
+ if implements_trait(cx, receiver_ty, iterator_trait_id, &[]);
+ if let Some(iter_item_ty) = get_iterator_item_ty(cx, receiver_ty);
+
+ if let Some(into_iterator_trait_id) = cx.tcx.get_diagnostic_item(sym::IntoIterator);
+ let collection_ty = cx.typeck_results().expr_ty(collection);
+ if implements_trait(cx, collection_ty, into_iterator_trait_id, &[]);
+ if let Some(into_iter_item_ty) = get_associated_type(cx, collection_ty, into_iterator_trait_id, "Item");
+
+ if iter_item_ty == into_iter_item_ty;
+ if let Some(collection_snippet) = snippet_opt(cx, collection.span);
+ then {
+ collection_snippet
+ } else {
+ receiver_snippet
+ }
+ };
+ span_lint_and_then(
+ cx,
+ UNNECESSARY_TO_OWNED,
+ expr.span,
+ &format!("unnecessary use of `{}`", method_name),
+ |diag| {
+ // If `check_into_iter_call_arg` called `check_for_loop_iter` because a call to
+ // a `to_owned`-like function was removed, then the next suggestion may be
+ // incorrect. This is because the iterator that results from the call's removal
+ // could hold a reference to a resource that is used mutably. See
+ // https://github.com/rust-lang/rust-clippy/issues/8148.
+ let applicability = if cloned_before_iter {
+ Applicability::MaybeIncorrect
+ } else {
+ Applicability::MachineApplicable
+ };
+ diag.span_suggestion(expr.span, "use", snippet, applicability);
+ for addr_of_expr in addr_of_exprs {
+ match addr_of_expr.kind {
+ ExprKind::AddrOf(_, _, referent) => {
+ let span = addr_of_expr.span.with_hi(referent.span.lo());
+ diag.span_suggestion(span, "remove this `&`", "", applicability);
+ }
+ _ => unreachable!(),
+ }
+ }
+ }
+ );
+ return true;
+ }
+ }
+ false
+}
+
+/// Returns true if the named method is `IntoIterator::into_iter`.
+pub fn is_into_iter(cx: &LateContext<'_>, callee_def_id: DefId) -> bool {
+ cx.tcx.lang_items().require(LangItem::IntoIterIntoIter) == Ok(callee_def_id)
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_join.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_join.rs
new file mode 100644
index 000000000..973b8a7e6
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_join.rs
@@ -0,0 +1,41 @@
+use clippy_utils::{diagnostics::span_lint_and_sugg, ty::is_type_diagnostic_item};
+use rustc_ast::ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{Ref, Slice};
+use rustc_span::{sym, Span};
+
+use super::UNNECESSARY_JOIN;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'tcx>,
+ join_self_arg: &'tcx Expr<'tcx>,
+ join_arg: &'tcx Expr<'tcx>,
+ span: Span,
+) {
+ let applicability = Applicability::MachineApplicable;
+ let collect_output_adjusted_type = cx.typeck_results().expr_ty_adjusted(join_self_arg);
+ if_chain! {
+ // the turbofish for collect is ::<Vec<String>>
+ if let Ref(_, ref_type, _) = collect_output_adjusted_type.kind();
+ if let Slice(slice) = ref_type.kind();
+ if is_type_diagnostic_item(cx, *slice, sym::String);
+ // the argument for join is ""
+ if let ExprKind::Lit(spanned) = &join_arg.kind;
+ if let LitKind::Str(symbol, _) = spanned.node;
+ if symbol.is_empty();
+ then {
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_JOIN,
+ span.with_hi(expr.span.hi()),
+ r#"called `.collect<Vec<String>>().join("")` on an iterator"#,
+ "try using",
+ "collect::<String>()".to_owned(),
+ applicability,
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_lazy_eval.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_lazy_eval.rs
new file mode 100644
index 000000000..1876c7fb9
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_lazy_eval.rs
@@ -0,0 +1,70 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{eager_or_lazy, usage};
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::UNNECESSARY_LAZY_EVALUATIONS;
+
+/// lint use of `<fn>_else(simple closure)` for `Option`s and `Result`s that can be
+/// replaced with `<fn>(return value of simple closure)`
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx hir::Expr<'_>,
+ recv: &'tcx hir::Expr<'_>,
+ arg: &'tcx hir::Expr<'_>,
+ simplify_using: &str,
+) {
+ let is_option = is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv), sym::Option);
+ let is_result = is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv), sym::Result);
+ let is_bool = cx.typeck_results().expr_ty(recv).is_bool();
+
+ if is_option || is_result || is_bool {
+ if let hir::ExprKind::Closure(&hir::Closure { body, .. }) = arg.kind {
+ let body = cx.tcx.hir().body(body);
+ let body_expr = &body.value;
+
+ if usage::BindingUsageFinder::are_params_used(cx, body) {
+ return;
+ }
+
+ if eager_or_lazy::switch_to_eager_eval(cx, body_expr) {
+ let msg = if is_option {
+ "unnecessary closure used to substitute value for `Option::None`"
+ } else if is_result {
+ "unnecessary closure used to substitute value for `Result::Err`"
+ } else {
+ "unnecessary closure used with `bool::then`"
+ };
+ let applicability = if body
+ .params
+ .iter()
+ // bindings are checked to be unused above
+ .all(|param| matches!(param.pat.kind, hir::PatKind::Binding(..) | hir::PatKind::Wild))
+ {
+ Applicability::MachineApplicable
+ } else {
+ // replacing the lambda may break type inference
+ Applicability::MaybeIncorrect
+ };
+
+ // This is a duplicate of what's happening in clippy_lints::methods::method_call,
+ // which isn't ideal, We want to get the method call span,
+ // but prefer to avoid changing the signature of the function itself.
+ if let hir::ExprKind::MethodCall(_, _, span) = expr.kind {
+ span_lint_and_then(cx, UNNECESSARY_LAZY_EVALUATIONS, expr.span, msg, |diag| {
+ diag.span_suggestion(
+ span,
+ &format!("use `{}(..)` instead", simplify_using),
+ format!("{}({})", simplify_using, snippet(cx, body_expr.span, "..")),
+ applicability,
+ );
+ });
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs
new file mode 100644
index 000000000..b3276f139
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs
@@ -0,0 +1,431 @@
+use super::implicit_clone::is_clone_like;
+use super::unnecessary_iter_cloned::{self, is_into_iter};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::{
+ contains_ty, get_associated_type, get_iterator_item_ty, implements_trait, is_copy, peel_mid_ty_refs,
+};
+use clippy_utils::{meets_msrv, msrvs};
+
+use clippy_utils::{fn_def_id, get_parent_expr, is_diag_item_method, is_diag_trait_item};
+use rustc_errors::Applicability;
+use rustc_hir::{def_id::DefId, BorrowKind, Expr, ExprKind};
+use rustc_lint::LateContext;
+use rustc_middle::mir::Mutability;
+use rustc_middle::ty::adjustment::{Adjust, Adjustment, OverloadedDeref};
+use rustc_middle::ty::subst::{GenericArg, GenericArgKind, SubstsRef};
+use rustc_middle::ty::{self, PredicateKind, ProjectionPredicate, TraitPredicate, Ty};
+use rustc_semver::RustcVersion;
+use rustc_span::{sym, Symbol};
+use std::cmp::max;
+
+use super::UNNECESSARY_TO_OWNED;
+
+pub fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'tcx>,
+ method_name: Symbol,
+ args: &'tcx [Expr<'tcx>],
+ msrv: Option<RustcVersion>,
+) {
+ if_chain! {
+ if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
+ if let [receiver] = args;
+ then {
+ if is_cloned_or_copied(cx, method_name, method_def_id) {
+ unnecessary_iter_cloned::check(cx, expr, method_name, receiver);
+ } else if is_to_owned_like(cx, method_name, method_def_id) {
+ // At this point, we know the call is of a `to_owned`-like function. The functions
+ // `check_addr_of_expr` and `check_call_arg` determine whether the call is unnecessary
+ // based on its context, that is, whether it is a referent in an `AddrOf` expression, an
+ // argument in a `into_iter` call, or an argument in the call of some other function.
+ if check_addr_of_expr(cx, expr, method_name, method_def_id, receiver) {
+ return;
+ }
+ if check_into_iter_call_arg(cx, expr, method_name, receiver, msrv) {
+ return;
+ }
+ check_other_call_arg(cx, expr, method_name, receiver);
+ }
+ }
+ }
+}
+
+/// Checks whether `expr` is a referent in an `AddrOf` expression and, if so, determines whether its
+/// call of a `to_owned`-like function is unnecessary.
+#[allow(clippy::too_many_lines)]
+fn check_addr_of_expr(
+ cx: &LateContext<'_>,
+ expr: &Expr<'_>,
+ method_name: Symbol,
+ method_def_id: DefId,
+ receiver: &Expr<'_>,
+) -> bool {
+ if_chain! {
+ if let Some(parent) = get_parent_expr(cx, expr);
+ if let ExprKind::AddrOf(BorrowKind::Ref, Mutability::Not, _) = parent.kind;
+ let adjustments = cx.typeck_results().expr_adjustments(parent).iter().collect::<Vec<_>>();
+ if let
+ // For matching uses of `Cow::from`
+ [
+ Adjustment {
+ kind: Adjust::Deref(None),
+ target: referent_ty,
+ },
+ Adjustment {
+ kind: Adjust::Borrow(_),
+ target: target_ty,
+ },
+ ]
+ // For matching uses of arrays
+ | [
+ Adjustment {
+ kind: Adjust::Deref(None),
+ target: referent_ty,
+ },
+ Adjustment {
+ kind: Adjust::Borrow(_),
+ ..
+ },
+ Adjustment {
+ kind: Adjust::Pointer(_),
+ target: target_ty,
+ },
+ ]
+ // For matching everything else
+ | [
+ Adjustment {
+ kind: Adjust::Deref(None),
+ target: referent_ty,
+ },
+ Adjustment {
+ kind: Adjust::Deref(Some(OverloadedDeref { .. })),
+ ..
+ },
+ Adjustment {
+ kind: Adjust::Borrow(_),
+ target: target_ty,
+ },
+ ] = adjustments[..];
+ let receiver_ty = cx.typeck_results().expr_ty(receiver);
+ let (target_ty, n_target_refs) = peel_mid_ty_refs(*target_ty);
+ let (receiver_ty, n_receiver_refs) = peel_mid_ty_refs(receiver_ty);
+ // Only flag cases satisfying at least one of the following three conditions:
+ // * the referent and receiver types are distinct
+ // * the referent/receiver type is a copyable array
+ // * the method is `Cow::into_owned`
+ // This restriction is to ensure there is no overlap between `redundant_clone` and this
+ // lint. It also avoids the following false positive:
+ // https://github.com/rust-lang/rust-clippy/issues/8759
+ // Arrays are a bit of a corner case. Non-copyable arrays are handled by
+ // `redundant_clone`, but copyable arrays are not.
+ if *referent_ty != receiver_ty
+ || (matches!(referent_ty.kind(), ty::Array(..)) && is_copy(cx, *referent_ty))
+ || is_cow_into_owned(cx, method_name, method_def_id);
+ if let Some(receiver_snippet) = snippet_opt(cx, receiver.span);
+ then {
+ if receiver_ty == target_ty && n_target_refs >= n_receiver_refs {
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_TO_OWNED,
+ parent.span,
+ &format!("unnecessary use of `{}`", method_name),
+ "use",
+ format!(
+ "{:&>width$}{}",
+ "",
+ receiver_snippet,
+ width = n_target_refs - n_receiver_refs
+ ),
+ Applicability::MachineApplicable,
+ );
+ return true;
+ }
+ if_chain! {
+ if let Some(deref_trait_id) = cx.tcx.get_diagnostic_item(sym::Deref);
+ if implements_trait(cx, receiver_ty, deref_trait_id, &[]);
+ if get_associated_type(cx, receiver_ty, deref_trait_id, "Target") == Some(target_ty);
+ then {
+ if n_receiver_refs > 0 {
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_TO_OWNED,
+ parent.span,
+ &format!("unnecessary use of `{}`", method_name),
+ "use",
+ receiver_snippet,
+ Applicability::MachineApplicable,
+ );
+ } else {
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_TO_OWNED,
+ expr.span.with_lo(receiver.span.hi()),
+ &format!("unnecessary use of `{}`", method_name),
+ "remove this",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ }
+ return true;
+ }
+ }
+ if_chain! {
+ if let Some(as_ref_trait_id) = cx.tcx.get_diagnostic_item(sym::AsRef);
+ if implements_trait(cx, receiver_ty, as_ref_trait_id, &[GenericArg::from(target_ty)]);
+ then {
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_TO_OWNED,
+ parent.span,
+ &format!("unnecessary use of `{}`", method_name),
+ "use",
+ format!("{}.as_ref()", receiver_snippet),
+ Applicability::MachineApplicable,
+ );
+ return true;
+ }
+ }
+ }
+ }
+ false
+}
+
+/// Checks whether `expr` is an argument in an `into_iter` call and, if so, determines whether its
+/// call of a `to_owned`-like function is unnecessary.
+fn check_into_iter_call_arg(
+ cx: &LateContext<'_>,
+ expr: &Expr<'_>,
+ method_name: Symbol,
+ receiver: &Expr<'_>,
+ msrv: Option<RustcVersion>,
+) -> bool {
+ if_chain! {
+ if let Some(parent) = get_parent_expr(cx, expr);
+ if let Some(callee_def_id) = fn_def_id(cx, parent);
+ if is_into_iter(cx, callee_def_id);
+ if let Some(iterator_trait_id) = cx.tcx.get_diagnostic_item(sym::Iterator);
+ let parent_ty = cx.typeck_results().expr_ty(parent);
+ if implements_trait(cx, parent_ty, iterator_trait_id, &[]);
+ if let Some(item_ty) = get_iterator_item_ty(cx, parent_ty);
+ if let Some(receiver_snippet) = snippet_opt(cx, receiver.span);
+ then {
+ if unnecessary_iter_cloned::check_for_loop_iter(cx, parent, method_name, receiver, true) {
+ return true;
+ }
+ let cloned_or_copied = if is_copy(cx, item_ty) && meets_msrv(msrv, msrvs::ITERATOR_COPIED) {
+ "copied"
+ } else {
+ "cloned"
+ };
+ // The next suggestion may be incorrect because the removal of the `to_owned`-like
+ // function could cause the iterator to hold a reference to a resource that is used
+ // mutably. See https://github.com/rust-lang/rust-clippy/issues/8148.
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_TO_OWNED,
+ parent.span,
+ &format!("unnecessary use of `{}`", method_name),
+ "use",
+ format!("{}.iter().{}()", receiver_snippet, cloned_or_copied),
+ Applicability::MaybeIncorrect,
+ );
+ return true;
+ }
+ }
+ false
+}
+
+/// Checks whether `expr` is an argument in a function call and, if so, determines whether its call
+/// of a `to_owned`-like function is unnecessary.
+fn check_other_call_arg<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'tcx>,
+ method_name: Symbol,
+ receiver: &'tcx Expr<'tcx>,
+) -> bool {
+ if_chain! {
+ if let Some((maybe_call, maybe_arg)) = skip_addr_of_ancestors(cx, expr);
+ if let Some((callee_def_id, call_substs, call_args)) = get_callee_substs_and_args(cx, maybe_call);
+ let fn_sig = cx.tcx.fn_sig(callee_def_id).skip_binder();
+ if let Some(i) = call_args.iter().position(|arg| arg.hir_id == maybe_arg.hir_id);
+ if let Some(input) = fn_sig.inputs().get(i);
+ let (input, n_refs) = peel_mid_ty_refs(*input);
+ if let (trait_predicates, projection_predicates) = get_input_traits_and_projections(cx, callee_def_id, input);
+ if let Some(sized_def_id) = cx.tcx.lang_items().sized_trait();
+ if let [trait_predicate] = trait_predicates
+ .iter()
+ .filter(|trait_predicate| trait_predicate.def_id() != sized_def_id)
+ .collect::<Vec<_>>()[..];
+ if let Some(deref_trait_id) = cx.tcx.get_diagnostic_item(sym::Deref);
+ if let Some(as_ref_trait_id) = cx.tcx.get_diagnostic_item(sym::AsRef);
+ let receiver_ty = cx.typeck_results().expr_ty(receiver);
+ // If the callee has type parameters, they could appear in `projection_predicate.ty` or the
+ // types of `trait_predicate.trait_ref.substs`.
+ if if trait_predicate.def_id() == deref_trait_id {
+ if let [projection_predicate] = projection_predicates[..] {
+ let normalized_ty =
+ cx.tcx
+ .subst_and_normalize_erasing_regions(call_substs, cx.param_env, projection_predicate.term);
+ implements_trait(cx, receiver_ty, deref_trait_id, &[])
+ && get_associated_type(cx, receiver_ty, deref_trait_id, "Target")
+ .map_or(false, |ty| ty::Term::Ty(ty) == normalized_ty)
+ } else {
+ false
+ }
+ } else if trait_predicate.def_id() == as_ref_trait_id {
+ let composed_substs = compose_substs(
+ cx,
+ &trait_predicate.trait_ref.substs.iter().skip(1).collect::<Vec<_>>()[..],
+ call_substs,
+ );
+ implements_trait(cx, receiver_ty, as_ref_trait_id, &composed_substs)
+ } else {
+ false
+ };
+ // We can't add an `&` when the trait is `Deref` because `Target = &T` won't match
+ // `Target = T`.
+ if n_refs > 0 || is_copy(cx, receiver_ty) || trait_predicate.def_id() != deref_trait_id;
+ let n_refs = max(n_refs, if is_copy(cx, receiver_ty) { 0 } else { 1 });
+ // If the trait is `AsRef` and the input type variable `T` occurs in the output type, then
+ // `T` must not be instantiated with a reference
+ // (https://github.com/rust-lang/rust-clippy/issues/8507).
+ if (n_refs == 0 && !receiver_ty.is_ref())
+ || trait_predicate.def_id() != as_ref_trait_id
+ || !contains_ty(fn_sig.output(), input);
+ if let Some(receiver_snippet) = snippet_opt(cx, receiver.span);
+ then {
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_TO_OWNED,
+ maybe_arg.span,
+ &format!("unnecessary use of `{}`", method_name),
+ "use",
+ format!("{:&>width$}{}", "", receiver_snippet, width = n_refs),
+ Applicability::MachineApplicable,
+ );
+ return true;
+ }
+ }
+ false
+}
+
+/// Walks an expression's ancestors until it finds a non-`AddrOf` expression. Returns the first such
+/// expression found (if any) along with the immediately prior expression.
+fn skip_addr_of_ancestors<'tcx>(
+ cx: &LateContext<'tcx>,
+ mut expr: &'tcx Expr<'tcx>,
+) -> Option<(&'tcx Expr<'tcx>, &'tcx Expr<'tcx>)> {
+ while let Some(parent) = get_parent_expr(cx, expr) {
+ if let ExprKind::AddrOf(BorrowKind::Ref, Mutability::Not, _) = parent.kind {
+ expr = parent;
+ } else {
+ return Some((parent, expr));
+ }
+ }
+ None
+}
+
+/// Checks whether an expression is a function or method call and, if so, returns its `DefId`,
+/// `Substs`, and arguments.
+fn get_callee_substs_and_args<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'tcx>,
+) -> Option<(DefId, SubstsRef<'tcx>, &'tcx [Expr<'tcx>])> {
+ if_chain! {
+ if let ExprKind::Call(callee, args) = expr.kind;
+ let callee_ty = cx.typeck_results().expr_ty(callee);
+ if let ty::FnDef(callee_def_id, _) = callee_ty.kind();
+ then {
+ let substs = cx.typeck_results().node_substs(callee.hir_id);
+ return Some((*callee_def_id, substs, args));
+ }
+ }
+ if_chain! {
+ if let ExprKind::MethodCall(_, args, _) = expr.kind;
+ if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
+ then {
+ let substs = cx.typeck_results().node_substs(expr.hir_id);
+ return Some((method_def_id, substs, args));
+ }
+ }
+ None
+}
+
+/// Returns the `TraitPredicate`s and `ProjectionPredicate`s for a function's input type.
+fn get_input_traits_and_projections<'tcx>(
+ cx: &LateContext<'tcx>,
+ callee_def_id: DefId,
+ input: Ty<'tcx>,
+) -> (Vec<TraitPredicate<'tcx>>, Vec<ProjectionPredicate<'tcx>>) {
+ let mut trait_predicates = Vec::new();
+ let mut projection_predicates = Vec::new();
+ for (predicate, _) in cx.tcx.predicates_of(callee_def_id).predicates.iter() {
+ // `substs` should have 1 + n elements. The first is the type on the left hand side of an
+ // `as`. The remaining n are trait parameters.
+ let is_input_substs = |substs: SubstsRef<'tcx>| {
+ if_chain! {
+ if let Some(arg) = substs.iter().next();
+ if let GenericArgKind::Type(arg_ty) = arg.unpack();
+ if arg_ty == input;
+ then { true } else { false }
+ }
+ };
+ match predicate.kind().skip_binder() {
+ PredicateKind::Trait(trait_predicate) => {
+ if is_input_substs(trait_predicate.trait_ref.substs) {
+ trait_predicates.push(trait_predicate);
+ }
+ },
+ PredicateKind::Projection(projection_predicate) => {
+ if is_input_substs(projection_predicate.projection_ty.substs) {
+ projection_predicates.push(projection_predicate);
+ }
+ },
+ _ => {},
+ }
+ }
+ (trait_predicates, projection_predicates)
+}
+
+/// Composes two substitutions by applying the latter to the types of the former.
+fn compose_substs<'tcx>(
+ cx: &LateContext<'tcx>,
+ left: &[GenericArg<'tcx>],
+ right: SubstsRef<'tcx>,
+) -> Vec<GenericArg<'tcx>> {
+ left.iter()
+ .map(|arg| {
+ if let GenericArgKind::Type(arg_ty) = arg.unpack() {
+ let normalized_ty = cx.tcx.subst_and_normalize_erasing_regions(right, cx.param_env, arg_ty);
+ GenericArg::from(normalized_ty)
+ } else {
+ *arg
+ }
+ })
+ .collect()
+}
+
+/// Returns true if the named method is `Iterator::cloned` or `Iterator::copied`.
+fn is_cloned_or_copied(cx: &LateContext<'_>, method_name: Symbol, method_def_id: DefId) -> bool {
+ (method_name.as_str() == "cloned" || method_name.as_str() == "copied")
+ && is_diag_trait_item(cx, method_def_id, sym::Iterator)
+}
+
+/// Returns true if the named method can be used to convert the receiver to its "owned"
+/// representation.
+fn is_to_owned_like(cx: &LateContext<'_>, method_name: Symbol, method_def_id: DefId) -> bool {
+ is_clone_like(cx, method_name.as_str(), method_def_id)
+ || is_cow_into_owned(cx, method_name, method_def_id)
+ || is_to_string(cx, method_name, method_def_id)
+}
+
+/// Returns true if the named method is `Cow::into_owned`.
+fn is_cow_into_owned(cx: &LateContext<'_>, method_name: Symbol, method_def_id: DefId) -> bool {
+ method_name.as_str() == "into_owned" && is_diag_item_method(cx, method_def_id, sym::Cow)
+}
+
+/// Returns true if the named method is `ToString::to_string`.
+fn is_to_string(cx: &LateContext<'_>, method_name: Symbol, method_def_id: DefId) -> bool {
+ method_name == sym::to_string && is_diag_trait_item(cx, method_def_id, sym::ToString)
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/unwrap_or_else_default.rs b/src/tools/clippy/clippy_lints/src/methods/unwrap_or_else_default.rs
new file mode 100644
index 000000000..f3af281d6
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/unwrap_or_else_default.rs
@@ -0,0 +1,46 @@
+//! Lint for `some_result_or_option.unwrap_or_else(Default::default)`
+
+use super::UNWRAP_OR_ELSE_DEFAULT;
+use clippy_utils::{
+ diagnostics::span_lint_and_sugg, is_default_equivalent_call, source::snippet_with_applicability,
+ ty::is_type_diagnostic_item,
+};
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx hir::Expr<'_>,
+ recv: &'tcx hir::Expr<'_>,
+ u_arg: &'tcx hir::Expr<'_>,
+) {
+ // something.unwrap_or_else(Default::default)
+ // ^^^^^^^^^- recv ^^^^^^^^^^^^^^^^- u_arg
+ // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- expr
+ let recv_ty = cx.typeck_results().expr_ty(recv);
+ let is_option = is_type_diagnostic_item(cx, recv_ty, sym::Option);
+ let is_result = is_type_diagnostic_item(cx, recv_ty, sym::Result);
+
+ if_chain! {
+ if is_option || is_result;
+ if is_default_equivalent_call(cx, u_arg);
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+
+ span_lint_and_sugg(
+ cx,
+ UNWRAP_OR_ELSE_DEFAULT,
+ expr.span,
+ "use of `.unwrap_or_else(..)` to construct default value",
+ "try",
+ format!(
+ "{}.unwrap_or_default()",
+ snippet_with_applicability(cx, recv.span, "..", &mut applicability)
+ ),
+ applicability,
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/unwrap_used.rs b/src/tools/clippy/clippy_lints/src/methods/unwrap_used.rs
new file mode 100644
index 000000000..5c7610149
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/unwrap_used.rs
@@ -0,0 +1,40 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::is_in_test_function;
+use clippy_utils::ty::is_type_diagnostic_item;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::UNWRAP_USED;
+
+/// lint use of `unwrap()` for `Option`s and `Result`s
+pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr<'_>, allow_unwrap_in_tests: bool) {
+ let obj_ty = cx.typeck_results().expr_ty(recv).peel_refs();
+
+ let mess = if is_type_diagnostic_item(cx, obj_ty, sym::Option) {
+ Some((UNWRAP_USED, "an Option", "None"))
+ } else if is_type_diagnostic_item(cx, obj_ty, sym::Result) {
+ Some((UNWRAP_USED, "a Result", "Err"))
+ } else {
+ None
+ };
+
+ if allow_unwrap_in_tests && is_in_test_function(cx.tcx, expr.hir_id) {
+ return;
+ }
+
+ if let Some((lint, kind, none_value)) = mess {
+ span_lint_and_help(
+ cx,
+ lint,
+ expr.span,
+ &format!("used `unwrap()` on `{}` value", kind,),
+ None,
+ &format!(
+ "if you don't want to handle the `{}` case gracefully, consider \
+ using `expect()` to provide a better panic message",
+ none_value,
+ ),
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs b/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs
new file mode 100644
index 000000000..ca5d33ee8
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs
@@ -0,0 +1,45 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::walk_ptrs_ty_depth;
+use clippy_utils::{get_parent_expr, match_trait_method, paths};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+
+use super::USELESS_ASREF;
+
+/// Checks for the `USELESS_ASREF` lint.
+pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, call_name: &str, recvr: &hir::Expr<'_>) {
+ // when we get here, we've already checked that the call name is "as_ref" or "as_mut"
+ // check if the call is to the actual `AsRef` or `AsMut` trait
+ if match_trait_method(cx, expr, &paths::ASREF_TRAIT) || match_trait_method(cx, expr, &paths::ASMUT_TRAIT) {
+ // check if the type after `as_ref` or `as_mut` is the same as before
+ let rcv_ty = cx.typeck_results().expr_ty(recvr);
+ let res_ty = cx.typeck_results().expr_ty(expr);
+ let (base_res_ty, res_depth) = walk_ptrs_ty_depth(res_ty);
+ let (base_rcv_ty, rcv_depth) = walk_ptrs_ty_depth(rcv_ty);
+ if base_rcv_ty == base_res_ty && rcv_depth >= res_depth {
+ // allow the `as_ref` or `as_mut` if it is followed by another method call
+ if_chain! {
+ if let Some(parent) = get_parent_expr(cx, expr);
+ if let hir::ExprKind::MethodCall(segment, ..) = parent.kind;
+ if segment.ident.span != expr.span;
+ then {
+ return;
+ }
+ }
+
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ USELESS_ASREF,
+ expr.span,
+ &format!("this call to `{}` does nothing", call_name),
+ "try this",
+ snippet_with_applicability(cx, recvr.span, "..", &mut applicability).to_string(),
+ applicability,
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/utils.rs b/src/tools/clippy/clippy_lints/src/methods/utils.rs
new file mode 100644
index 000000000..3015531e8
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/utils.rs
@@ -0,0 +1,168 @@
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{get_parent_expr, path_to_local_id, usage};
+use if_chain::if_chain;
+use rustc_ast::ast;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_hir::intravisit::{walk_expr, Visitor};
+use rustc_hir::{BorrowKind, Expr, ExprKind, HirId, Mutability, Pat};
+use rustc_lint::LateContext;
+use rustc_middle::hir::nested_filter;
+use rustc_middle::ty::{self, Ty};
+use rustc_span::symbol::sym;
+
+pub(super) fn derefs_to_slice<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx hir::Expr<'tcx>,
+ ty: Ty<'tcx>,
+) -> Option<&'tcx hir::Expr<'tcx>> {
+ fn may_slice<'a>(cx: &LateContext<'a>, ty: Ty<'a>) -> bool {
+ match ty.kind() {
+ ty::Slice(_) => true,
+ ty::Adt(def, _) if def.is_box() => may_slice(cx, ty.boxed_ty()),
+ ty::Adt(..) => is_type_diagnostic_item(cx, ty, sym::Vec),
+ ty::Array(_, size) => size.try_eval_usize(cx.tcx, cx.param_env).is_some(),
+ ty::Ref(_, inner, _) => may_slice(cx, *inner),
+ _ => false,
+ }
+ }
+
+ if let hir::ExprKind::MethodCall(path, [self_arg, ..], _) = &expr.kind {
+ if path.ident.name == sym::iter && may_slice(cx, cx.typeck_results().expr_ty(self_arg)) {
+ Some(self_arg)
+ } else {
+ None
+ }
+ } else {
+ match ty.kind() {
+ ty::Slice(_) => Some(expr),
+ ty::Adt(def, _) if def.is_box() && may_slice(cx, ty.boxed_ty()) => Some(expr),
+ ty::Ref(_, inner, _) => {
+ if may_slice(cx, *inner) {
+ Some(expr)
+ } else {
+ None
+ }
+ },
+ _ => None,
+ }
+ }
+}
+
+pub(super) fn get_hint_if_single_char_arg(
+ cx: &LateContext<'_>,
+ arg: &hir::Expr<'_>,
+ applicability: &mut Applicability,
+) -> Option<String> {
+ if_chain! {
+ if let hir::ExprKind::Lit(lit) = &arg.kind;
+ if let ast::LitKind::Str(r, style) = lit.node;
+ let string = r.as_str();
+ if string.chars().count() == 1;
+ then {
+ let snip = snippet_with_applicability(cx, arg.span, string, applicability);
+ let ch = if let ast::StrStyle::Raw(nhash) = style {
+ let nhash = nhash as usize;
+ // for raw string: r##"a"##
+ &snip[(nhash + 2)..(snip.len() - 1 - nhash)]
+ } else {
+ // for regular string: "a"
+ &snip[1..(snip.len() - 1)]
+ };
+
+ let hint = format!("'{}'", match ch {
+ "'" => "\\'" ,
+ r"\" => "\\\\",
+ _ => ch,
+ });
+
+ Some(hint)
+ } else {
+ None
+ }
+ }
+}
+
+/// The core logic of `check_for_loop_iter` in `unnecessary_iter_cloned.rs`, this function wraps a
+/// use of `CloneOrCopyVisitor`.
+pub(super) fn clone_or_copy_needed<'tcx>(
+ cx: &LateContext<'tcx>,
+ pat: &Pat<'tcx>,
+ body: &'tcx Expr<'tcx>,
+) -> (bool, Vec<&'tcx Expr<'tcx>>) {
+ let mut visitor = CloneOrCopyVisitor {
+ cx,
+ binding_hir_ids: pat_bindings(pat),
+ clone_or_copy_needed: false,
+ addr_of_exprs: Vec::new(),
+ };
+ visitor.visit_expr(body);
+ (visitor.clone_or_copy_needed, visitor.addr_of_exprs)
+}
+
+/// Returns a vector of all `HirId`s bound by the pattern.
+fn pat_bindings(pat: &Pat<'_>) -> Vec<HirId> {
+ let mut collector = usage::ParamBindingIdCollector {
+ binding_hir_ids: Vec::new(),
+ };
+ collector.visit_pat(pat);
+ collector.binding_hir_ids
+}
+
+/// `clone_or_copy_needed` will be false when `CloneOrCopyVisitor` is done visiting if the only
+/// operations performed on `binding_hir_ids` are:
+/// * to take non-mutable references to them
+/// * to use them as non-mutable `&self` in method calls
+/// If any of `binding_hir_ids` is used in any other way, then `clone_or_copy_needed` will be true
+/// when `CloneOrCopyVisitor` is done visiting.
+struct CloneOrCopyVisitor<'cx, 'tcx> {
+ cx: &'cx LateContext<'tcx>,
+ binding_hir_ids: Vec<HirId>,
+ clone_or_copy_needed: bool,
+ addr_of_exprs: Vec<&'tcx Expr<'tcx>>,
+}
+
+impl<'cx, 'tcx> Visitor<'tcx> for CloneOrCopyVisitor<'cx, 'tcx> {
+ type NestedFilter = nested_filter::OnlyBodies;
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+
+ fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
+ walk_expr(self, expr);
+ if self.is_binding(expr) {
+ if let Some(parent) = get_parent_expr(self.cx, expr) {
+ match parent.kind {
+ ExprKind::AddrOf(BorrowKind::Ref, Mutability::Not, _) => {
+ self.addr_of_exprs.push(parent);
+ return;
+ },
+ ExprKind::MethodCall(_, args, _) => {
+ if_chain! {
+ if args.iter().skip(1).all(|arg| !self.is_binding(arg));
+ if let Some(method_def_id) = self.cx.typeck_results().type_dependent_def_id(parent.hir_id);
+ let method_ty = self.cx.tcx.type_of(method_def_id);
+ let self_ty = method_ty.fn_sig(self.cx.tcx).input(0).skip_binder();
+ if matches!(self_ty.kind(), ty::Ref(_, _, Mutability::Not));
+ then {
+ return;
+ }
+ }
+ },
+ _ => {},
+ }
+ }
+ self.clone_or_copy_needed = true;
+ }
+ }
+}
+
+impl<'cx, 'tcx> CloneOrCopyVisitor<'cx, 'tcx> {
+ fn is_binding(&self, expr: &Expr<'tcx>) -> bool {
+ self.binding_hir_ids
+ .iter()
+ .any(|hir_id| path_to_local_id(expr, *hir_id))
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/wrong_self_convention.rs b/src/tools/clippy/clippy_lints/src/methods/wrong_self_convention.rs
new file mode 100644
index 000000000..4b368d3ff
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/wrong_self_convention.rs
@@ -0,0 +1,154 @@
+use crate::methods::SelfKind;
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::ty::is_copy;
+use rustc_lint::LateContext;
+use rustc_middle::ty::Ty;
+use rustc_span::source_map::Span;
+use std::fmt;
+
+use super::WRONG_SELF_CONVENTION;
+
+#[rustfmt::skip]
+const CONVENTIONS: [(&[Convention], &[SelfKind]); 9] = [
+ (&[Convention::Eq("new")], &[SelfKind::No]),
+ (&[Convention::StartsWith("as_")], &[SelfKind::Ref, SelfKind::RefMut]),
+ (&[Convention::StartsWith("from_")], &[SelfKind::No]),
+ (&[Convention::StartsWith("into_")], &[SelfKind::Value]),
+ (&[Convention::StartsWith("is_")], &[SelfKind::RefMut, SelfKind::Ref, SelfKind::No]),
+ (&[Convention::Eq("to_mut")], &[SelfKind::RefMut]),
+ (&[Convention::StartsWith("to_"), Convention::EndsWith("_mut")], &[SelfKind::RefMut]),
+
+ // Conversion using `to_` can use borrowed (non-Copy types) or owned (Copy types).
+ // Source: https://rust-lang.github.io/api-guidelines/naming.html#ad-hoc-conversions-follow-as_-to_-into_-conventions-c-conv
+ (&[Convention::StartsWith("to_"), Convention::NotEndsWith("_mut"), Convention::IsSelfTypeCopy(false),
+ Convention::IsTraitItem(false), Convention::ImplementsTrait(false)], &[SelfKind::Ref]),
+ (&[Convention::StartsWith("to_"), Convention::NotEndsWith("_mut"), Convention::IsSelfTypeCopy(true),
+ Convention::IsTraitItem(false), Convention::ImplementsTrait(false)], &[SelfKind::Value]),
+];
+
+enum Convention {
+ Eq(&'static str),
+ StartsWith(&'static str),
+ EndsWith(&'static str),
+ NotEndsWith(&'static str),
+ IsSelfTypeCopy(bool),
+ ImplementsTrait(bool),
+ IsTraitItem(bool),
+}
+
+impl Convention {
+ #[must_use]
+ fn check<'tcx>(
+ &self,
+ cx: &LateContext<'tcx>,
+ self_ty: Ty<'tcx>,
+ other: &str,
+ implements_trait: bool,
+ is_trait_item: bool,
+ ) -> bool {
+ match *self {
+ Self::Eq(this) => this == other,
+ Self::StartsWith(this) => other.starts_with(this) && this != other,
+ Self::EndsWith(this) => other.ends_with(this) && this != other,
+ Self::NotEndsWith(this) => !Self::EndsWith(this).check(cx, self_ty, other, implements_trait, is_trait_item),
+ Self::IsSelfTypeCopy(is_true) => is_true == is_copy(cx, self_ty),
+ Self::ImplementsTrait(is_true) => is_true == implements_trait,
+ Self::IsTraitItem(is_true) => is_true == is_trait_item,
+ }
+ }
+}
+
+impl fmt::Display for Convention {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
+ match *self {
+ Self::Eq(this) => format!("`{}`", this).fmt(f),
+ Self::StartsWith(this) => format!("`{}*`", this).fmt(f),
+ Self::EndsWith(this) => format!("`*{}`", this).fmt(f),
+ Self::NotEndsWith(this) => format!("`~{}`", this).fmt(f),
+ Self::IsSelfTypeCopy(is_true) => {
+ format!("`self` type is{} `Copy`", if is_true { "" } else { " not" }).fmt(f)
+ },
+ Self::ImplementsTrait(is_true) => {
+ let (negation, s_suffix) = if is_true { ("", "s") } else { (" does not", "") };
+ format!("method{} implement{} a trait", negation, s_suffix).fmt(f)
+ },
+ Self::IsTraitItem(is_true) => {
+ let suffix = if is_true { " is" } else { " is not" };
+ format!("method{} a trait item", suffix).fmt(f)
+ },
+ }
+ }
+}
+
+#[allow(clippy::too_many_arguments)]
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ item_name: &str,
+ self_ty: Ty<'tcx>,
+ first_arg_ty: Ty<'tcx>,
+ first_arg_span: Span,
+ implements_trait: bool,
+ is_trait_item: bool,
+) {
+ if let Some((conventions, self_kinds)) = &CONVENTIONS.iter().find(|(convs, _)| {
+ convs
+ .iter()
+ .all(|conv| conv.check(cx, self_ty, item_name, implements_trait, is_trait_item))
+ }) {
+ // don't lint if it implements a trait but not willing to check `Copy` types conventions (see #7032)
+ if implements_trait
+ && !conventions
+ .iter()
+ .any(|conv| matches!(conv, Convention::IsSelfTypeCopy(_)))
+ {
+ return;
+ }
+ if !self_kinds.iter().any(|k| k.matches(cx, self_ty, first_arg_ty)) {
+ let suggestion = {
+ if conventions.len() > 1 {
+ // Don't mention `NotEndsWith` when there is also `StartsWith` convention present
+ let cut_ends_with_conv = conventions.iter().any(|conv| matches!(conv, Convention::StartsWith(_)))
+ && conventions
+ .iter()
+ .any(|conv| matches!(conv, Convention::NotEndsWith(_)));
+
+ let s = conventions
+ .iter()
+ .filter_map(|conv| {
+ if (cut_ends_with_conv && matches!(conv, Convention::NotEndsWith(_)))
+ || matches!(conv, Convention::ImplementsTrait(_))
+ || matches!(conv, Convention::IsTraitItem(_))
+ {
+ None
+ } else {
+ Some(conv.to_string())
+ }
+ })
+ .collect::<Vec<_>>()
+ .join(" and ");
+
+ format!("methods with the following characteristics: ({})", &s)
+ } else {
+ format!("methods called {}", &conventions[0])
+ }
+ };
+
+ span_lint_and_help(
+ cx,
+ WRONG_SELF_CONVENTION,
+ first_arg_span,
+ &format!(
+ "{} usually take {}",
+ suggestion,
+ &self_kinds
+ .iter()
+ .map(|k| k.description())
+ .collect::<Vec<_>>()
+ .join(" or ")
+ ),
+ None,
+ "consider choosing a less ambiguous name",
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/zst_offset.rs b/src/tools/clippy/clippy_lints/src/methods/zst_offset.rs
new file mode 100644
index 000000000..e9f268da6
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/zst_offset.rs
@@ -0,0 +1,18 @@
+use clippy_utils::diagnostics::span_lint;
+use if_chain::if_chain;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+
+use super::ZST_OFFSET;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr<'_>) {
+ if_chain! {
+ if let ty::RawPtr(ty::TypeAndMut { ty, .. }) = cx.typeck_results().expr_ty(recv).kind();
+ if let Ok(layout) = cx.tcx.layout_of(cx.param_env.and(*ty));
+ if layout.is_zst();
+ then {
+ span_lint(cx, ZST_OFFSET, expr.span, "offset calculation on zero-sized value");
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/minmax.rs b/src/tools/clippy/clippy_lints/src/minmax.rs
new file mode 100644
index 000000000..a081cde85
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/minmax.rs
@@ -0,0 +1,122 @@
+use clippy_utils::consts::{constant_simple, Constant};
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::{match_trait_method, paths};
+use if_chain::if_chain;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+use std::cmp::Ordering;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for expressions where `std::cmp::min` and `max` are
+ /// used to clamp values, but switched so that the result is constant.
+ ///
+ /// ### Why is this bad?
+ /// This is in all probability not the intended outcome. At
+ /// the least it hurts readability of the code.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// min(0, max(100, x))
+ ///
+ /// // or
+ ///
+ /// x.max(100).min(0)
+ /// ```
+ /// It will always be equal to `0`. Probably the author meant to clamp the value
+ /// between 0 and 100, but has erroneously swapped `min` and `max`.
+ #[clippy::version = "pre 1.29.0"]
+ pub MIN_MAX,
+ correctness,
+ "`min(_, max(_, _))` (or vice versa) with bounds clamping the result to a constant"
+}
+
+declare_lint_pass!(MinMaxPass => [MIN_MAX]);
+
+impl<'tcx> LateLintPass<'tcx> for MinMaxPass {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if let Some((outer_max, outer_c, oe)) = min_max(cx, expr) {
+ if let Some((inner_max, inner_c, ie)) = min_max(cx, oe) {
+ if outer_max == inner_max {
+ return;
+ }
+ match (
+ outer_max,
+ Constant::partial_cmp(cx.tcx, cx.typeck_results().expr_ty(ie), &outer_c, &inner_c),
+ ) {
+ (_, None) | (MinMax::Max, Some(Ordering::Less)) | (MinMax::Min, Some(Ordering::Greater)) => (),
+ _ => {
+ span_lint(
+ cx,
+ MIN_MAX,
+ expr.span,
+ "this `min`/`max` combination leads to constant result",
+ );
+ },
+ }
+ }
+ }
+ }
+}
+
+#[derive(PartialEq, Eq, Debug, Clone, Copy)]
+enum MinMax {
+ Min,
+ Max,
+}
+
+fn min_max<'a>(cx: &LateContext<'_>, expr: &'a Expr<'a>) -> Option<(MinMax, Constant, &'a Expr<'a>)> {
+ match expr.kind {
+ ExprKind::Call(path, args) => {
+ if let ExprKind::Path(ref qpath) = path.kind {
+ cx.typeck_results()
+ .qpath_res(qpath, path.hir_id)
+ .opt_def_id()
+ .and_then(|def_id| match cx.tcx.get_diagnostic_name(def_id) {
+ Some(sym::cmp_min) => fetch_const(cx, args, MinMax::Min),
+ Some(sym::cmp_max) => fetch_const(cx, args, MinMax::Max),
+ _ => None,
+ })
+ } else {
+ None
+ }
+ },
+ ExprKind::MethodCall(path, args, _) => {
+ if_chain! {
+ if let [obj, _] = args;
+ if cx.typeck_results().expr_ty(obj).is_floating_point() || match_trait_method(cx, expr, &paths::ORD);
+ then {
+ if path.ident.name == sym!(max) {
+ fetch_const(cx, args, MinMax::Max)
+ } else if path.ident.name == sym!(min) {
+ fetch_const(cx, args, MinMax::Min)
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+ }
+ },
+ _ => None,
+ }
+}
+
+fn fetch_const<'a>(cx: &LateContext<'_>, args: &'a [Expr<'a>], m: MinMax) -> Option<(MinMax, Constant, &'a Expr<'a>)> {
+ if args.len() != 2 {
+ return None;
+ }
+ constant_simple(cx, cx.typeck_results(), &args[0]).map_or_else(
+ || constant_simple(cx, cx.typeck_results(), &args[1]).map(|c| (m, c, &args[0])),
+ |c| {
+ if constant_simple(cx, cx.typeck_results(), &args[1]).is_none() {
+ // otherwise ignore
+ Some((m, c, &args[1]))
+ } else {
+ None
+ }
+ },
+ )
+}
diff --git a/src/tools/clippy/clippy_lints/src/misc.rs b/src/tools/clippy/clippy_lints/src/misc.rs
new file mode 100644
index 000000000..8224e80c9
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/misc.rs
@@ -0,0 +1,342 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg, span_lint_hir_and_then};
+use clippy_utils::source::{snippet, snippet_opt};
+use if_chain::if_chain;
+use rustc_ast::ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::intravisit::FnKind;
+use rustc_hir::{
+ self as hir, def, BinOpKind, BindingAnnotation, Body, Expr, ExprKind, FnDecl, HirId, Mutability, PatKind, Stmt,
+ StmtKind, TyKind,
+};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::hygiene::DesugaringKind;
+use rustc_span::source_map::{ExpnKind, Span};
+
+use clippy_utils::sugg::Sugg;
+use clippy_utils::{get_parent_expr, in_constant, iter_input_pats, last_path_segment, SpanlessEq};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for function arguments and let bindings denoted as
+ /// `ref`.
+ ///
+ /// ### Why is this bad?
+ /// The `ref` declaration makes the function take an owned
+ /// value, but turns the argument into a reference (which means that the value
+ /// is destroyed when exiting the function). This adds not much value: either
+ /// take a reference type, or take an owned value and create references in the
+ /// body.
+ ///
+ /// For let bindings, `let x = &foo;` is preferred over `let ref x = foo`. The
+ /// type of `x` is more obvious with the former.
+ ///
+ /// ### Known problems
+ /// If the argument is dereferenced within the function,
+ /// removing the `ref` will lead to errors. This can be fixed by removing the
+ /// dereferences, e.g., changing `*x` to `x` within the function.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn foo(ref _x: u8) {}
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// fn foo(_x: &u8) {}
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub TOPLEVEL_REF_ARG,
+ style,
+ "an entire binding declared as `ref`, in a function argument or a `let` statement"
+}
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for the use of bindings with a single leading
+ /// underscore.
+ ///
+ /// ### Why is this bad?
+ /// A single leading underscore is usually used to indicate
+ /// that a binding will not be used. Using such a binding breaks this
+ /// expectation.
+ ///
+ /// ### Known problems
+ /// The lint does not work properly with desugaring and
+ /// macro, it has been allowed in the mean time.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let _x = 0;
+ /// let y = _x + 1; // Here we are using `_x`, even though it has a leading
+ /// // underscore. We should rename `_x` to `x`
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub USED_UNDERSCORE_BINDING,
+ pedantic,
+ "using a binding which is prefixed with an underscore"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for the use of short circuit boolean conditions as
+ /// a
+ /// statement.
+ ///
+ /// ### Why is this bad?
+ /// Using a short circuit boolean condition as a statement
+ /// may hide the fact that the second part is executed or not depending on the
+ /// outcome of the first part.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// f() && g(); // We should write `if f() { g(); }`.
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub SHORT_CIRCUIT_STATEMENT,
+ complexity,
+ "using a short circuit boolean condition as a statement"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Catch casts from `0` to some pointer type
+ ///
+ /// ### Why is this bad?
+ /// This generally means `null` and is better expressed as
+ /// {`std`, `core`}`::ptr::`{`null`, `null_mut`}.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let a = 0 as *const u32;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let a = std::ptr::null::<u32>();
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub ZERO_PTR,
+ style,
+ "using `0 as *{const, mut} T`"
+}
+
+declare_lint_pass!(MiscLints => [
+ TOPLEVEL_REF_ARG,
+ USED_UNDERSCORE_BINDING,
+ SHORT_CIRCUIT_STATEMENT,
+ ZERO_PTR,
+]);
+
+impl<'tcx> LateLintPass<'tcx> for MiscLints {
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ k: FnKind<'tcx>,
+ decl: &'tcx FnDecl<'_>,
+ body: &'tcx Body<'_>,
+ span: Span,
+ _: HirId,
+ ) {
+ if let FnKind::Closure = k {
+ // Does not apply to closures
+ return;
+ }
+ if in_external_macro(cx.tcx.sess, span) {
+ return;
+ }
+ for arg in iter_input_pats(decl, body) {
+ if let PatKind::Binding(BindingAnnotation::Ref | BindingAnnotation::RefMut, ..) = arg.pat.kind {
+ span_lint(
+ cx,
+ TOPLEVEL_REF_ARG,
+ arg.pat.span,
+ "`ref` directly on a function argument is ignored. \
+ Consider using a reference type instead",
+ );
+ }
+ }
+ }
+
+ fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) {
+ if_chain! {
+ if !in_external_macro(cx.tcx.sess, stmt.span);
+ if let StmtKind::Local(local) = stmt.kind;
+ if let PatKind::Binding(an, .., name, None) = local.pat.kind;
+ if let Some(init) = local.init;
+ if an == BindingAnnotation::Ref || an == BindingAnnotation::RefMut;
+ then {
+ // use the macro callsite when the init span (but not the whole local span)
+ // comes from an expansion like `vec![1, 2, 3]` in `let ref _ = vec![1, 2, 3];`
+ let sugg_init = if init.span.from_expansion() && !local.span.from_expansion() {
+ Sugg::hir_with_macro_callsite(cx, init, "..")
+ } else {
+ Sugg::hir(cx, init, "..")
+ };
+ let (mutopt, initref) = if an == BindingAnnotation::RefMut {
+ ("mut ", sugg_init.mut_addr())
+ } else {
+ ("", sugg_init.addr())
+ };
+ let tyopt = if let Some(ty) = local.ty {
+ format!(": &{mutopt}{ty}", mutopt=mutopt, ty=snippet(cx, ty.span, ".."))
+ } else {
+ String::new()
+ };
+ span_lint_hir_and_then(
+ cx,
+ TOPLEVEL_REF_ARG,
+ init.hir_id,
+ local.pat.span,
+ "`ref` on an entire `let` pattern is discouraged, take a reference with `&` instead",
+ |diag| {
+ diag.span_suggestion(
+ stmt.span,
+ "try",
+ format!(
+ "let {name}{tyopt} = {initref};",
+ name=snippet(cx, name.span, ".."),
+ tyopt=tyopt,
+ initref=initref,
+ ),
+ Applicability::MachineApplicable,
+ );
+ }
+ );
+ }
+ };
+ if_chain! {
+ if let StmtKind::Semi(expr) = stmt.kind;
+ if let ExprKind::Binary(ref binop, a, b) = expr.kind;
+ if binop.node == BinOpKind::And || binop.node == BinOpKind::Or;
+ if let Some(sugg) = Sugg::hir_opt(cx, a);
+ then {
+ span_lint_hir_and_then(
+ cx,
+ SHORT_CIRCUIT_STATEMENT,
+ expr.hir_id,
+ stmt.span,
+ "boolean short circuit operator in statement may be clearer using an explicit test",
+ |diag| {
+ let sugg = if binop.node == BinOpKind::Or { !sugg } else { sugg };
+ diag.span_suggestion(
+ stmt.span,
+ "replace it with",
+ format!(
+ "if {} {{ {}; }}",
+ sugg,
+ &snippet(cx, b.span, ".."),
+ ),
+ Applicability::MachineApplicable, // snippet
+ );
+ });
+ }
+ };
+ }
+
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if let ExprKind::Cast(e, ty) = expr.kind {
+ check_cast(cx, expr.span, e, ty);
+ return;
+ }
+ if in_attributes_expansion(expr) || expr.span.is_desugaring(DesugaringKind::Await) {
+ // Don't lint things expanded by #[derive(...)], etc or `await` desugaring
+ return;
+ }
+ let sym;
+ let binding = match expr.kind {
+ ExprKind::Path(ref qpath) if !matches!(qpath, hir::QPath::LangItem(..)) => {
+ let binding = last_path_segment(qpath).ident.as_str();
+ if binding.starts_with('_') &&
+ !binding.starts_with("__") &&
+ binding != "_result" && // FIXME: #944
+ is_used(cx, expr) &&
+ // don't lint if the declaration is in a macro
+ non_macro_local(cx, cx.qpath_res(qpath, expr.hir_id))
+ {
+ Some(binding)
+ } else {
+ None
+ }
+ },
+ ExprKind::Field(_, ident) => {
+ sym = ident.name;
+ let name = sym.as_str();
+ if name.starts_with('_') && !name.starts_with("__") {
+ Some(name)
+ } else {
+ None
+ }
+ },
+ _ => None,
+ };
+ if let Some(binding) = binding {
+ span_lint(
+ cx,
+ USED_UNDERSCORE_BINDING,
+ expr.span,
+ &format!(
+ "used binding `{}` which is prefixed with an underscore. A leading \
+ underscore signals that a binding will not be used",
+ binding
+ ),
+ );
+ }
+ }
+}
+
+/// Heuristic to see if an expression is used. Should be compatible with
+/// `unused_variables`'s idea
+/// of what it means for an expression to be "used".
+fn is_used(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ get_parent_expr(cx, expr).map_or(true, |parent| match parent.kind {
+ ExprKind::Assign(_, rhs, _) | ExprKind::AssignOp(_, _, rhs) => SpanlessEq::new(cx).eq_expr(rhs, expr),
+ _ => is_used(cx, parent),
+ })
+}
+
+/// Tests whether an expression is in a macro expansion (e.g., something
+/// generated by `#[derive(...)]` or the like).
+fn in_attributes_expansion(expr: &Expr<'_>) -> bool {
+ use rustc_span::hygiene::MacroKind;
+ if expr.span.from_expansion() {
+ let data = expr.span.ctxt().outer_expn_data();
+ matches!(data.kind, ExpnKind::Macro(MacroKind::Attr | MacroKind::Derive, _))
+ } else {
+ false
+ }
+}
+
+/// Tests whether `res` is a variable defined outside a macro.
+fn non_macro_local(cx: &LateContext<'_>, res: def::Res) -> bool {
+ if let def::Res::Local(id) = res {
+ !cx.tcx.hir().span(id).from_expansion()
+ } else {
+ false
+ }
+}
+
+fn check_cast(cx: &LateContext<'_>, span: Span, e: &Expr<'_>, ty: &hir::Ty<'_>) {
+ if_chain! {
+ if let TyKind::Ptr(ref mut_ty) = ty.kind;
+ if let ExprKind::Lit(ref lit) = e.kind;
+ if let LitKind::Int(0, _) = lit.node;
+ if !in_constant(cx, e.hir_id);
+ then {
+ let (msg, sugg_fn) = match mut_ty.mutbl {
+ Mutability::Mut => ("`0 as *mut _` detected", "std::ptr::null_mut"),
+ Mutability::Not => ("`0 as *const _` detected", "std::ptr::null"),
+ };
+
+ let (sugg, appl) = if let TyKind::Infer = mut_ty.ty.kind {
+ (format!("{}()", sugg_fn), Applicability::MachineApplicable)
+ } else if let Some(mut_ty_snip) = snippet_opt(cx, mut_ty.ty.span) {
+ (format!("{}::<{}>()", sugg_fn, mut_ty_snip), Applicability::MachineApplicable)
+ } else {
+ // `MaybeIncorrect` as type inference may not work with the suggested code
+ (format!("{}()", sugg_fn), Applicability::MaybeIncorrect)
+ };
+ span_lint_and_sugg(cx, ZERO_PTR, span, msg, "try", sugg, appl);
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/misc_early/builtin_type_shadow.rs b/src/tools/clippy/clippy_lints/src/misc_early/builtin_type_shadow.rs
new file mode 100644
index 000000000..9f6b0bdc7
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/misc_early/builtin_type_shadow.rs
@@ -0,0 +1,19 @@
+use clippy_utils::diagnostics::span_lint;
+use rustc_ast::ast::{GenericParam, GenericParamKind};
+use rustc_hir::PrimTy;
+use rustc_lint::EarlyContext;
+
+use super::BUILTIN_TYPE_SHADOW;
+
+pub(super) fn check(cx: &EarlyContext<'_>, param: &GenericParam) {
+ if let GenericParamKind::Type { .. } = param.kind {
+ if let Some(prim_ty) = PrimTy::from_name(param.ident.name) {
+ span_lint(
+ cx,
+ BUILTIN_TYPE_SHADOW,
+ param.ident.span,
+ &format!("this generic shadows the built-in type `{}`", prim_ty.name()),
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/misc_early/double_neg.rs b/src/tools/clippy/clippy_lints/src/misc_early/double_neg.rs
new file mode 100644
index 000000000..06ba968fa
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/misc_early/double_neg.rs
@@ -0,0 +1,18 @@
+use clippy_utils::diagnostics::span_lint;
+use rustc_ast::ast::{Expr, ExprKind, UnOp};
+use rustc_lint::EarlyContext;
+
+use super::DOUBLE_NEG;
+
+pub(super) fn check(cx: &EarlyContext<'_>, expr: &Expr) {
+ if let ExprKind::Unary(UnOp::Neg, ref inner) = expr.kind {
+ if let ExprKind::Unary(UnOp::Neg, _) = inner.kind {
+ span_lint(
+ cx,
+ DOUBLE_NEG,
+ expr.span,
+ "`--x` could be misinterpreted as pre-decrement by C programmers, is usually a no-op",
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/misc_early/literal_suffix.rs b/src/tools/clippy/clippy_lints/src/misc_early/literal_suffix.rs
new file mode 100644
index 000000000..1165c19a0
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/misc_early/literal_suffix.rs
@@ -0,0 +1,38 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use rustc_ast::ast::Lit;
+use rustc_errors::Applicability;
+use rustc_lint::EarlyContext;
+
+use super::{SEPARATED_LITERAL_SUFFIX, UNSEPARATED_LITERAL_SUFFIX};
+
+pub(super) fn check(cx: &EarlyContext<'_>, lit: &Lit, lit_snip: &str, suffix: &str, sugg_type: &str) {
+ let maybe_last_sep_idx = if let Some(val) = lit_snip.len().checked_sub(suffix.len() + 1) {
+ val
+ } else {
+ return; // It's useless so shouldn't lint.
+ };
+ // Do not lint when literal is unsuffixed.
+ if !suffix.is_empty() {
+ if lit_snip.as_bytes()[maybe_last_sep_idx] == b'_' {
+ span_lint_and_sugg(
+ cx,
+ SEPARATED_LITERAL_SUFFIX,
+ lit.span,
+ &format!("{} type suffix should not be separated by an underscore", sugg_type),
+ "remove the underscore",
+ format!("{}{}", &lit_snip[..maybe_last_sep_idx], suffix),
+ Applicability::MachineApplicable,
+ );
+ } else {
+ span_lint_and_sugg(
+ cx,
+ UNSEPARATED_LITERAL_SUFFIX,
+ lit.span,
+ &format!("{} type suffix should be separated by an underscore", sugg_type),
+ "add an underscore",
+ format!("{}_{}", &lit_snip[..=maybe_last_sep_idx], suffix),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/misc_early/mixed_case_hex_literals.rs b/src/tools/clippy/clippy_lints/src/misc_early/mixed_case_hex_literals.rs
new file mode 100644
index 000000000..80e242131
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/misc_early/mixed_case_hex_literals.rs
@@ -0,0 +1,34 @@
+use clippy_utils::diagnostics::span_lint;
+use rustc_ast::ast::Lit;
+use rustc_lint::EarlyContext;
+
+use super::MIXED_CASE_HEX_LITERALS;
+
+pub(super) fn check(cx: &EarlyContext<'_>, lit: &Lit, suffix: &str, lit_snip: &str) {
+ let maybe_last_sep_idx = if let Some(val) = lit_snip.len().checked_sub(suffix.len() + 1) {
+ val
+ } else {
+ return; // It's useless so shouldn't lint.
+ };
+ if maybe_last_sep_idx <= 2 {
+ // It's meaningless or causes range error.
+ return;
+ }
+ let mut seen = (false, false);
+ for ch in lit_snip.as_bytes()[2..=maybe_last_sep_idx].iter() {
+ match ch {
+ b'a'..=b'f' => seen.0 = true,
+ b'A'..=b'F' => seen.1 = true,
+ _ => {},
+ }
+ if seen.0 && seen.1 {
+ span_lint(
+ cx,
+ MIXED_CASE_HEX_LITERALS,
+ lit.span,
+ "inconsistent casing in hexadecimal literal",
+ );
+ break;
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/misc_early/mod.rs b/src/tools/clippy/clippy_lints/src/misc_early/mod.rs
new file mode 100644
index 000000000..704918c0b
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/misc_early/mod.rs
@@ -0,0 +1,416 @@
+mod builtin_type_shadow;
+mod double_neg;
+mod literal_suffix;
+mod mixed_case_hex_literals;
+mod redundant_pattern;
+mod unneeded_field_pattern;
+mod unneeded_wildcard_pattern;
+mod zero_prefixed_literal;
+
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::source::snippet_opt;
+use rustc_ast::ast::{Expr, ExprKind, Generics, Lit, LitFloatType, LitIntType, LitKind, NodeId, Pat, PatKind};
+use rustc_ast::visit::FnKind;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for structure field patterns bound to wildcards.
+ ///
+ /// ### Why is this bad?
+ /// Using `..` instead is shorter and leaves the focus on
+ /// the fields that are actually bound.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # struct Foo {
+ /// # a: i32,
+ /// # b: i32,
+ /// # c: i32,
+ /// # }
+ /// let f = Foo { a: 0, b: 0, c: 0 };
+ ///
+ /// match f {
+ /// Foo { a: _, b: 0, .. } => {},
+ /// Foo { a: _, b: _, c: _ } => {},
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # struct Foo {
+ /// # a: i32,
+ /// # b: i32,
+ /// # c: i32,
+ /// # }
+ /// let f = Foo { a: 0, b: 0, c: 0 };
+ ///
+ /// match f {
+ /// Foo { b: 0, .. } => {},
+ /// Foo { .. } => {},
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub UNNEEDED_FIELD_PATTERN,
+ restriction,
+ "struct fields bound to a wildcard instead of using `..`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for function arguments having the similar names
+ /// differing by an underscore.
+ ///
+ /// ### Why is this bad?
+ /// It affects code readability.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn foo(a: i32, _a: i32) {}
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// fn bar(a: i32, _b: i32) {}
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub DUPLICATE_UNDERSCORE_ARGUMENT,
+ style,
+ "function arguments having names which only differ by an underscore"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Detects expressions of the form `--x`.
+ ///
+ /// ### Why is this bad?
+ /// It can mislead C/C++ programmers to think `x` was
+ /// decremented.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let mut x = 3;
+ /// --x;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub DOUBLE_NEG,
+ style,
+ "`--x`, which is a double negation of `x` and not a pre-decrement as in C/C++"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Warns on hexadecimal literals with mixed-case letter
+ /// digits.
+ ///
+ /// ### Why is this bad?
+ /// It looks confusing.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let _ =
+ /// 0x1a9BAcD
+ /// # ;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let _ =
+ /// 0x1A9BACD
+ /// # ;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MIXED_CASE_HEX_LITERALS,
+ style,
+ "hex literals whose letter digits are not consistently upper- or lowercased"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Warns if literal suffixes are not separated by an
+ /// underscore.
+ /// To enforce unseparated literal suffix style,
+ /// see the `separated_literal_suffix` lint.
+ ///
+ /// ### Why is this bad?
+ /// Suffix style should be consistent.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let _ =
+ /// 123832i32
+ /// # ;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let _ =
+ /// 123832_i32
+ /// # ;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub UNSEPARATED_LITERAL_SUFFIX,
+ restriction,
+ "literals whose suffix is not separated by an underscore"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Warns if literal suffixes are separated by an underscore.
+ /// To enforce separated literal suffix style,
+ /// see the `unseparated_literal_suffix` lint.
+ ///
+ /// ### Why is this bad?
+ /// Suffix style should be consistent.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let _ =
+ /// 123832_i32
+ /// # ;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let _ =
+ /// 123832i32
+ /// # ;
+ /// ```
+ #[clippy::version = "1.58.0"]
+ pub SEPARATED_LITERAL_SUFFIX,
+ restriction,
+ "literals whose suffix is separated by an underscore"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Warns if an integral constant literal starts with `0`.
+ ///
+ /// ### Why is this bad?
+ /// In some languages (including the infamous C language
+ /// and most of its
+ /// family), this marks an octal constant. In Rust however, this is a decimal
+ /// constant. This could
+ /// be confusing for both the writer and a reader of the constant.
+ ///
+ /// ### Example
+ ///
+ /// In Rust:
+ /// ```rust
+ /// fn main() {
+ /// let a = 0123;
+ /// println!("{}", a);
+ /// }
+ /// ```
+ ///
+ /// prints `123`, while in C:
+ ///
+ /// ```c
+ /// #include <stdio.h>
+ ///
+ /// int main() {
+ /// int a = 0123;
+ /// printf("%d\n", a);
+ /// }
+ /// ```
+ ///
+ /// prints `83` (as `83 == 0o123` while `123 == 0o173`).
+ #[clippy::version = "pre 1.29.0"]
+ pub ZERO_PREFIXED_LITERAL,
+ complexity,
+ "integer literals starting with `0`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Warns if a generic shadows a built-in type.
+ ///
+ /// ### Why is this bad?
+ /// This gives surprising type errors.
+ ///
+ /// ### Example
+ ///
+ /// ```ignore
+ /// impl<u32> Foo<u32> {
+ /// fn impl_func(&self) -> u32 {
+ /// 42
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub BUILTIN_TYPE_SHADOW,
+ style,
+ "shadowing a builtin type"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for patterns in the form `name @ _`.
+ ///
+ /// ### Why is this bad?
+ /// It's almost always more readable to just use direct
+ /// bindings.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let v = Some("abc");
+ /// match v {
+ /// Some(x) => (),
+ /// y @ _ => (),
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let v = Some("abc");
+ /// match v {
+ /// Some(x) => (),
+ /// y => (),
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub REDUNDANT_PATTERN,
+ style,
+ "using `name @ _` in a pattern"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for tuple patterns with a wildcard
+ /// pattern (`_`) is next to a rest pattern (`..`).
+ ///
+ /// _NOTE_: While `_, ..` means there is at least one element left, `..`
+ /// means there are 0 or more elements left. This can make a difference
+ /// when refactoring, but shouldn't result in errors in the refactored code,
+ /// since the wildcard pattern isn't used anyway.
+ ///
+ /// ### Why is this bad?
+ /// The wildcard pattern is unneeded as the rest pattern
+ /// can match that element as well.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # struct TupleStruct(u32, u32, u32);
+ /// # let t = TupleStruct(1, 2, 3);
+ /// match t {
+ /// TupleStruct(0, .., _) => (),
+ /// _ => (),
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # struct TupleStruct(u32, u32, u32);
+ /// # let t = TupleStruct(1, 2, 3);
+ /// match t {
+ /// TupleStruct(0, ..) => (),
+ /// _ => (),
+ /// }
+ /// ```
+ #[clippy::version = "1.40.0"]
+ pub UNNEEDED_WILDCARD_PATTERN,
+ complexity,
+ "tuple patterns with a wildcard pattern (`_`) is next to a rest pattern (`..`)"
+}
+
+declare_lint_pass!(MiscEarlyLints => [
+ UNNEEDED_FIELD_PATTERN,
+ DUPLICATE_UNDERSCORE_ARGUMENT,
+ DOUBLE_NEG,
+ MIXED_CASE_HEX_LITERALS,
+ UNSEPARATED_LITERAL_SUFFIX,
+ SEPARATED_LITERAL_SUFFIX,
+ ZERO_PREFIXED_LITERAL,
+ BUILTIN_TYPE_SHADOW,
+ REDUNDANT_PATTERN,
+ UNNEEDED_WILDCARD_PATTERN,
+]);
+
+impl EarlyLintPass for MiscEarlyLints {
+ fn check_generics(&mut self, cx: &EarlyContext<'_>, gen: &Generics) {
+ for param in &gen.params {
+ builtin_type_shadow::check(cx, param);
+ }
+ }
+
+ fn check_pat(&mut self, cx: &EarlyContext<'_>, pat: &Pat) {
+ unneeded_field_pattern::check(cx, pat);
+ redundant_pattern::check(cx, pat);
+ unneeded_wildcard_pattern::check(cx, pat);
+ }
+
+ fn check_fn(&mut self, cx: &EarlyContext<'_>, fn_kind: FnKind<'_>, _: Span, _: NodeId) {
+ let mut registered_names: FxHashMap<String, Span> = FxHashMap::default();
+
+ for arg in &fn_kind.decl().inputs {
+ if let PatKind::Ident(_, ident, None) = arg.pat.kind {
+ let arg_name = ident.to_string();
+
+ if let Some(arg_name) = arg_name.strip_prefix('_') {
+ if let Some(correspondence) = registered_names.get(arg_name) {
+ span_lint(
+ cx,
+ DUPLICATE_UNDERSCORE_ARGUMENT,
+ *correspondence,
+ &format!(
+ "`{}` already exists, having another argument having almost the same \
+ name makes code comprehension and documentation more difficult",
+ arg_name
+ ),
+ );
+ }
+ } else {
+ registered_names.insert(arg_name, arg.pat.span);
+ }
+ }
+ }
+ }
+
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
+ if in_external_macro(cx.sess(), expr.span) {
+ return;
+ }
+
+ if let ExprKind::Lit(ref lit) = expr.kind {
+ MiscEarlyLints::check_lit(cx, lit);
+ }
+ double_neg::check(cx, expr);
+ }
+}
+
+impl MiscEarlyLints {
+ fn check_lit(cx: &EarlyContext<'_>, lit: &Lit) {
+ // We test if first character in snippet is a number, because the snippet could be an expansion
+ // from a built-in macro like `line!()` or a proc-macro like `#[wasm_bindgen]`.
+ // Note that this check also covers special case that `line!()` is eagerly expanded by compiler.
+ // See <https://github.com/rust-lang/rust-clippy/issues/4507> for a regression.
+ // FIXME: Find a better way to detect those cases.
+ let lit_snip = match snippet_opt(cx, lit.span) {
+ Some(snip) if snip.chars().next().map_or(false, |c| c.is_ascii_digit()) => snip,
+ _ => return,
+ };
+
+ if let LitKind::Int(value, lit_int_type) = lit.kind {
+ let suffix = match lit_int_type {
+ LitIntType::Signed(ty) => ty.name_str(),
+ LitIntType::Unsigned(ty) => ty.name_str(),
+ LitIntType::Unsuffixed => "",
+ };
+ literal_suffix::check(cx, lit, &lit_snip, suffix, "integer");
+ if lit_snip.starts_with("0x") {
+ mixed_case_hex_literals::check(cx, lit, suffix, &lit_snip);
+ } else if lit_snip.starts_with("0b") || lit_snip.starts_with("0o") {
+ // nothing to do
+ } else if value != 0 && lit_snip.starts_with('0') {
+ zero_prefixed_literal::check(cx, lit, &lit_snip);
+ }
+ } else if let LitKind::Float(_, LitFloatType::Suffixed(float_ty)) = lit.kind {
+ let suffix = float_ty.name_str();
+ literal_suffix::check(cx, lit, &lit_snip, suffix, "float");
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/misc_early/redundant_pattern.rs b/src/tools/clippy/clippy_lints/src/misc_early/redundant_pattern.rs
new file mode 100644
index 000000000..525dbf775
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/misc_early/redundant_pattern.rs
@@ -0,0 +1,31 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use rustc_ast::ast::{BindingMode, Mutability, Pat, PatKind};
+use rustc_errors::Applicability;
+use rustc_lint::EarlyContext;
+
+use super::REDUNDANT_PATTERN;
+
+pub(super) fn check(cx: &EarlyContext<'_>, pat: &Pat) {
+ if let PatKind::Ident(left, ident, Some(ref right)) = pat.kind {
+ let left_binding = match left {
+ BindingMode::ByRef(Mutability::Mut) => "ref mut ",
+ BindingMode::ByRef(Mutability::Not) => "ref ",
+ BindingMode::ByValue(..) => "",
+ };
+
+ if let PatKind::Wild = right.kind {
+ span_lint_and_sugg(
+ cx,
+ REDUNDANT_PATTERN,
+ pat.span,
+ &format!(
+ "the `{} @ _` pattern can be written as just `{}`",
+ ident.name, ident.name,
+ ),
+ "try",
+ format!("{}{}", left_binding, ident.name),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/misc_early/unneeded_field_pattern.rs b/src/tools/clippy/clippy_lints/src/misc_early/unneeded_field_pattern.rs
new file mode 100644
index 000000000..fff533167
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/misc_early/unneeded_field_pattern.rs
@@ -0,0 +1,73 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
+use clippy_utils::source::snippet_opt;
+use rustc_ast::ast::{Pat, PatKind};
+use rustc_lint::EarlyContext;
+
+use super::UNNEEDED_FIELD_PATTERN;
+
+pub(super) fn check(cx: &EarlyContext<'_>, pat: &Pat) {
+ if let PatKind::Struct(_, ref npat, ref pfields, _) = pat.kind {
+ let mut wilds = 0;
+ let type_name = npat
+ .segments
+ .last()
+ .expect("A path must have at least one segment")
+ .ident
+ .name;
+
+ for field in pfields {
+ if let PatKind::Wild = field.pat.kind {
+ wilds += 1;
+ }
+ }
+ if !pfields.is_empty() && wilds == pfields.len() {
+ span_lint_and_help(
+ cx,
+ UNNEEDED_FIELD_PATTERN,
+ pat.span,
+ "all the struct fields are matched to a wildcard pattern, consider using `..`",
+ None,
+ &format!("try with `{} {{ .. }}` instead", type_name),
+ );
+ return;
+ }
+ if wilds > 0 {
+ for field in pfields {
+ if let PatKind::Wild = field.pat.kind {
+ wilds -= 1;
+ if wilds > 0 {
+ span_lint(
+ cx,
+ UNNEEDED_FIELD_PATTERN,
+ field.span,
+ "you matched a field with a wildcard pattern, consider using `..` instead",
+ );
+ } else {
+ let mut normal = vec![];
+
+ for field in pfields {
+ match field.pat.kind {
+ PatKind::Wild => {},
+ _ => {
+ if let Some(n) = snippet_opt(cx, field.span) {
+ normal.push(n);
+ }
+ },
+ }
+ }
+
+ span_lint_and_help(
+ cx,
+ UNNEEDED_FIELD_PATTERN,
+ field.span,
+ "you matched a field with a wildcard pattern, consider using `..` \
+ instead",
+ None,
+ &format!("try with `{} {{ {}, .. }}`", type_name, normal[..].join(", ")),
+ );
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/misc_early/unneeded_wildcard_pattern.rs b/src/tools/clippy/clippy_lints/src/misc_early/unneeded_wildcard_pattern.rs
new file mode 100644
index 000000000..df044538f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/misc_early/unneeded_wildcard_pattern.rs
@@ -0,0 +1,52 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use rustc_ast::ast::{Pat, PatKind};
+use rustc_errors::Applicability;
+use rustc_lint::EarlyContext;
+use rustc_span::source_map::Span;
+
+use super::UNNEEDED_WILDCARD_PATTERN;
+
+pub(super) fn check(cx: &EarlyContext<'_>, pat: &Pat) {
+ if let PatKind::TupleStruct(_, _, ref patterns) | PatKind::Tuple(ref patterns) = pat.kind {
+ if let Some(rest_index) = patterns.iter().position(|pat| pat.is_rest()) {
+ if let Some((left_index, left_pat)) = patterns[..rest_index]
+ .iter()
+ .rev()
+ .take_while(|pat| matches!(pat.kind, PatKind::Wild))
+ .enumerate()
+ .last()
+ {
+ span_lint(cx, left_pat.span.until(patterns[rest_index].span), left_index == 0);
+ }
+
+ if let Some((right_index, right_pat)) = patterns[rest_index + 1..]
+ .iter()
+ .take_while(|pat| matches!(pat.kind, PatKind::Wild))
+ .enumerate()
+ .last()
+ {
+ span_lint(
+ cx,
+ patterns[rest_index].span.shrink_to_hi().to(right_pat.span),
+ right_index == 0,
+ );
+ }
+ }
+ }
+}
+
+fn span_lint(cx: &EarlyContext<'_>, span: Span, only_one: bool) {
+ span_lint_and_sugg(
+ cx,
+ UNNEEDED_WILDCARD_PATTERN,
+ span,
+ if only_one {
+ "this pattern is unneeded as the `..` pattern can match that element"
+ } else {
+ "these patterns are unneeded as the `..` pattern can match those elements"
+ },
+ if only_one { "remove it" } else { "remove them" },
+ "".to_string(),
+ Applicability::MachineApplicable,
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/misc_early/zero_prefixed_literal.rs b/src/tools/clippy/clippy_lints/src/misc_early/zero_prefixed_literal.rs
new file mode 100644
index 000000000..4963bba82
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/misc_early/zero_prefixed_literal.rs
@@ -0,0 +1,29 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use rustc_ast::ast::Lit;
+use rustc_errors::Applicability;
+use rustc_lint::EarlyContext;
+
+use super::ZERO_PREFIXED_LITERAL;
+
+pub(super) fn check(cx: &EarlyContext<'_>, lit: &Lit, lit_snip: &str) {
+ span_lint_and_then(
+ cx,
+ ZERO_PREFIXED_LITERAL,
+ lit.span,
+ "this is a decimal constant",
+ |diag| {
+ diag.span_suggestion(
+ lit.span,
+ "if you mean to use a decimal constant, remove the `0` to avoid confusion",
+ lit_snip.trim_start_matches(|c| c == '_' || c == '0').to_string(),
+ Applicability::MaybeIncorrect,
+ );
+ diag.span_suggestion(
+ lit.span,
+ "if you mean to use an octal constant, use `0o`",
+ format!("0o{}", lit_snip.trim_start_matches(|c| c == '_' || c == '0')),
+ Applicability::MaybeIncorrect,
+ );
+ },
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/mismatching_type_param_order.rs b/src/tools/clippy/clippy_lints/src/mismatching_type_param_order.rs
new file mode 100644
index 000000000..f763e0d24
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/mismatching_type_param_order.rs
@@ -0,0 +1,122 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::{GenericArg, Item, ItemKind, QPath, Ty, TyKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::GenericParamDefKind;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for type parameters which are positioned inconsistently between
+ /// a type definition and impl block. Specifically, a parameter in an impl
+ /// block which has the same name as a parameter in the type def, but is in
+ /// a different place.
+ ///
+ /// ### Why is this bad?
+ /// Type parameters are determined by their position rather than name.
+ /// Naming type parameters inconsistently may cause you to refer to the
+ /// wrong type parameter.
+ ///
+ /// ### Limitations
+ /// This lint only applies to impl blocks with simple generic params, e.g.
+ /// `A`. If there is anything more complicated, such as a tuple, it will be
+ /// ignored.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct Foo<A, B> {
+ /// x: A,
+ /// y: B,
+ /// }
+ /// // inside the impl, B refers to Foo::A
+ /// impl<B, A> Foo<B, A> {}
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// struct Foo<A, B> {
+ /// x: A,
+ /// y: B,
+ /// }
+ /// impl<A, B> Foo<A, B> {}
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub MISMATCHING_TYPE_PARAM_ORDER,
+ pedantic,
+ "type parameter positioned inconsistently between type def and impl block"
+}
+declare_lint_pass!(TypeParamMismatch => [MISMATCHING_TYPE_PARAM_ORDER]);
+
+impl<'tcx> LateLintPass<'tcx> for TypeParamMismatch {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) {
+ if_chain! {
+ if !item.span.from_expansion();
+ if let ItemKind::Impl(imp) = &item.kind;
+ if let TyKind::Path(QPath::Resolved(_, path)) = &imp.self_ty.kind;
+ if let Some(segment) = path.segments.iter().next();
+ if let Some(generic_args) = segment.args;
+ if !generic_args.args.is_empty();
+ then {
+ // get the name and span of the generic parameters in the Impl
+ let mut impl_params = Vec::new();
+ for p in generic_args.args.iter() {
+ match p {
+ GenericArg::Type(Ty {kind: TyKind::Path(QPath::Resolved(_, path)), ..}) =>
+ impl_params.push((path.segments[0].ident.to_string(), path.span)),
+ GenericArg::Type(_) => return,
+ _ => (),
+ };
+ }
+
+ // find the type that the Impl is for
+ // only lint on struct/enum/union for now
+ let defid = match path.res {
+ Res::Def(DefKind::Struct | DefKind::Enum | DefKind::Union, defid) => defid,
+ _ => return,
+ };
+
+ // get the names of the generic parameters in the type
+ let type_params = &cx.tcx.generics_of(defid).params;
+ let type_param_names: Vec<_> = type_params.iter()
+ .filter_map(|p|
+ match p.kind {
+ GenericParamDefKind::Type {..} => Some(p.name.to_string()),
+ _ => None,
+ }
+ ).collect();
+ // hashmap of name -> index for mismatch_param_name
+ let type_param_names_hashmap: FxHashMap<&String, usize> =
+ type_param_names.iter().enumerate().map(|(i, param)| (param, i)).collect();
+
+ let type_name = segment.ident;
+ for (i, (impl_param_name, impl_param_span)) in impl_params.iter().enumerate() {
+ if mismatch_param_name(i, impl_param_name, &type_param_names_hashmap) {
+ let msg = format!("`{}` has a similarly named generic type parameter `{}` in its declaration, but in a different order",
+ type_name, impl_param_name);
+ let help = format!("try `{}`, or a name that does not conflict with `{}`'s generic params",
+ type_param_names[i], type_name);
+ span_lint_and_help(
+ cx,
+ MISMATCHING_TYPE_PARAM_ORDER,
+ *impl_param_span,
+ &msg,
+ None,
+ &help
+ );
+ }
+ }
+ }
+ }
+ }
+}
+
+// Checks if impl_param_name is the same as one of type_param_names,
+// and is in a different position
+fn mismatch_param_name(i: usize, impl_param_name: &String, type_param_names: &FxHashMap<&String, usize>) -> bool {
+ if let Some(j) = type_param_names.get(impl_param_name) {
+ if i != *j {
+ return true;
+ }
+ }
+ false
+}
diff --git a/src/tools/clippy/clippy_lints/src/missing_const_for_fn.rs b/src/tools/clippy/clippy_lints/src/missing_const_for_fn.rs
new file mode 100644
index 000000000..16d65966c
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/missing_const_for_fn.rs
@@ -0,0 +1,174 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::qualify_min_const_fn::is_min_const_fn;
+use clippy_utils::ty::has_drop;
+use clippy_utils::{fn_has_unsatisfiable_preds, is_entrypoint_fn, meets_msrv, msrvs, trait_ref_of_method};
+use rustc_hir as hir;
+use rustc_hir::def_id::CRATE_DEF_ID;
+use rustc_hir::intravisit::FnKind;
+use rustc_hir::{Body, Constness, FnDecl, GenericParamKind, HirId};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::lint::in_external_macro;
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::Span;
+use rustc_typeck::hir_ty_to_ty;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Suggests the use of `const` in functions and methods where possible.
+ ///
+ /// ### Why is this bad?
+ /// Not having the function const prevents callers of the function from being const as well.
+ ///
+ /// ### Known problems
+ /// Const functions are currently still being worked on, with some features only being available
+ /// on nightly. This lint does not consider all edge cases currently and the suggestions may be
+ /// incorrect if you are using this lint on stable.
+ ///
+ /// Also, the lint only runs one pass over the code. Consider these two non-const functions:
+ ///
+ /// ```rust
+ /// fn a() -> i32 {
+ /// 0
+ /// }
+ /// fn b() -> i32 {
+ /// a()
+ /// }
+ /// ```
+ ///
+ /// When running Clippy, the lint will only suggest to make `a` const, because `b` at this time
+ /// can't be const as it calls a non-const function. Making `a` const and running Clippy again,
+ /// will suggest to make `b` const, too.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # struct Foo {
+ /// # random_number: usize,
+ /// # }
+ /// # impl Foo {
+ /// fn new() -> Self {
+ /// Self { random_number: 42 }
+ /// }
+ /// # }
+ /// ```
+ ///
+ /// Could be a const fn:
+ ///
+ /// ```rust
+ /// # struct Foo {
+ /// # random_number: usize,
+ /// # }
+ /// # impl Foo {
+ /// const fn new() -> Self {
+ /// Self { random_number: 42 }
+ /// }
+ /// # }
+ /// ```
+ #[clippy::version = "1.34.0"]
+ pub MISSING_CONST_FOR_FN,
+ nursery,
+ "Lint functions definitions that could be made `const fn`"
+}
+
+impl_lint_pass!(MissingConstForFn => [MISSING_CONST_FOR_FN]);
+
+pub struct MissingConstForFn {
+ msrv: Option<RustcVersion>,
+}
+
+impl MissingConstForFn {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self { msrv }
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for MissingConstForFn {
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'_>,
+ kind: FnKind<'_>,
+ _: &FnDecl<'_>,
+ _: &Body<'_>,
+ span: Span,
+ hir_id: HirId,
+ ) {
+ if !meets_msrv(self.msrv, msrvs::CONST_IF_MATCH) {
+ return;
+ }
+
+ let def_id = cx.tcx.hir().local_def_id(hir_id);
+
+ if in_external_macro(cx.tcx.sess, span) || is_entrypoint_fn(cx, def_id.to_def_id()) {
+ return;
+ }
+
+ // Building MIR for `fn`s with unsatisfiable preds results in ICE.
+ if fn_has_unsatisfiable_preds(cx, def_id.to_def_id()) {
+ return;
+ }
+
+ // Perform some preliminary checks that rule out constness on the Clippy side. This way we
+ // can skip the actual const check and return early.
+ match kind {
+ FnKind::ItemFn(_, generics, header, ..) => {
+ let has_const_generic_params = generics
+ .params
+ .iter()
+ .any(|param| matches!(param.kind, GenericParamKind::Const { .. }));
+
+ if already_const(header) || has_const_generic_params {
+ return;
+ }
+ },
+ FnKind::Method(_, sig, ..) => {
+ if trait_ref_of_method(cx, def_id).is_some()
+ || already_const(sig.header)
+ || method_accepts_dropable(cx, sig.decl.inputs)
+ {
+ return;
+ }
+ },
+ FnKind::Closure => return,
+ }
+
+ // Const fns are not allowed as methods in a trait.
+ {
+ let parent = cx.tcx.hir().get_parent_item(hir_id);
+ if parent != CRATE_DEF_ID {
+ if let hir::Node::Item(item) = cx.tcx.hir().get_by_def_id(parent) {
+ if let hir::ItemKind::Trait(..) = &item.kind {
+ return;
+ }
+ }
+ }
+ }
+
+ let mir = cx.tcx.optimized_mir(def_id);
+
+ if let Err((span, err)) = is_min_const_fn(cx.tcx, mir, self.msrv) {
+ if cx.tcx.is_const_fn_raw(def_id.to_def_id()) {
+ cx.tcx.sess.span_err(span, err.as_ref());
+ }
+ } else {
+ span_lint(cx, MISSING_CONST_FOR_FN, span, "this could be a `const fn`");
+ }
+ }
+ extract_msrv_attr!(LateContext);
+}
+
+/// Returns true if any of the method parameters is a type that implements `Drop`. The method
+/// can't be made const then, because `drop` can't be const-evaluated.
+fn method_accepts_dropable(cx: &LateContext<'_>, param_tys: &[hir::Ty<'_>]) -> bool {
+ // If any of the params are droppable, return true
+ param_tys.iter().any(|hir_ty| {
+ let ty_ty = hir_ty_to_ty(cx.tcx, hir_ty);
+ has_drop(cx, ty_ty)
+ })
+}
+
+// We don't have to lint on something that's already `const`
+#[must_use]
+fn already_const(header: hir::FnHeader) -> bool {
+ header.constness == Constness::Const
+}
diff --git a/src/tools/clippy/clippy_lints/src/missing_doc.rs b/src/tools/clippy/clippy_lints/src/missing_doc.rs
new file mode 100644
index 000000000..88ba00292
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/missing_doc.rs
@@ -0,0 +1,180 @@
+// Note: More specifically this lint is largely inspired (aka copied) from
+// *rustc*'s
+// [`missing_doc`].
+//
+// [`missing_doc`]: https://github.com/rust-lang/rust/blob/cf9cf7c923eb01146971429044f216a3ca905e06/compiler/rustc_lint/src/builtin.rs#L415
+//
+
+use clippy_utils::attrs::is_doc_hidden;
+use clippy_utils::diagnostics::span_lint;
+use rustc_ast::ast;
+use rustc_hir as hir;
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::ty::DefIdTree;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::def_id::CRATE_DEF_ID;
+use rustc_span::source_map::Span;
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Warns if there is missing doc for any documentable item
+ /// (public or private).
+ ///
+ /// ### Why is this bad?
+ /// Doc is good. *rustc* has a `MISSING_DOCS`
+ /// allowed-by-default lint for
+ /// public members, but has no way to enforce documentation of private items.
+ /// This lint fixes that.
+ #[clippy::version = "pre 1.29.0"]
+ pub MISSING_DOCS_IN_PRIVATE_ITEMS,
+ restriction,
+ "detects missing documentation for public and private members"
+}
+
+pub struct MissingDoc {
+ /// Stack of whether #[doc(hidden)] is set
+ /// at each level which has lint attributes.
+ doc_hidden_stack: Vec<bool>,
+}
+
+impl Default for MissingDoc {
+ #[must_use]
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl MissingDoc {
+ #[must_use]
+ pub fn new() -> Self {
+ Self {
+ doc_hidden_stack: vec![false],
+ }
+ }
+
+ fn doc_hidden(&self) -> bool {
+ *self.doc_hidden_stack.last().expect("empty doc_hidden_stack")
+ }
+
+ fn check_missing_docs_attrs(
+ &self,
+ cx: &LateContext<'_>,
+ attrs: &[ast::Attribute],
+ sp: Span,
+ article: &'static str,
+ desc: &'static str,
+ ) {
+ // If we're building a test harness, then warning about
+ // documentation is probably not really relevant right now.
+ if cx.sess().opts.test {
+ return;
+ }
+
+ // `#[doc(hidden)]` disables missing_docs check.
+ if self.doc_hidden() {
+ return;
+ }
+
+ if sp.from_expansion() {
+ return;
+ }
+
+ let has_doc = attrs.iter().any(|a| a.doc_str().is_some());
+ if !has_doc {
+ span_lint(
+ cx,
+ MISSING_DOCS_IN_PRIVATE_ITEMS,
+ sp,
+ &format!("missing documentation for {} {}", article, desc),
+ );
+ }
+ }
+}
+
+impl_lint_pass!(MissingDoc => [MISSING_DOCS_IN_PRIVATE_ITEMS]);
+
+impl<'tcx> LateLintPass<'tcx> for MissingDoc {
+ fn enter_lint_attrs(&mut self, _: &LateContext<'tcx>, attrs: &'tcx [ast::Attribute]) {
+ let doc_hidden = self.doc_hidden() || is_doc_hidden(attrs);
+ self.doc_hidden_stack.push(doc_hidden);
+ }
+
+ fn exit_lint_attrs(&mut self, _: &LateContext<'tcx>, _: &'tcx [ast::Attribute]) {
+ self.doc_hidden_stack.pop().expect("empty doc_hidden_stack");
+ }
+
+ fn check_crate(&mut self, cx: &LateContext<'tcx>) {
+ let attrs = cx.tcx.hir().attrs(hir::CRATE_HIR_ID);
+ self.check_missing_docs_attrs(cx, attrs, cx.tcx.def_span(CRATE_DEF_ID), "the", "crate");
+ }
+
+ fn check_item(&mut self, cx: &LateContext<'tcx>, it: &'tcx hir::Item<'_>) {
+ match it.kind {
+ hir::ItemKind::Fn(..) => {
+ // ignore main()
+ if it.ident.name == sym::main {
+ let at_root = cx.tcx.local_parent(it.def_id) == CRATE_DEF_ID;
+ if at_root {
+ return;
+ }
+ }
+ },
+ hir::ItemKind::Const(..)
+ | hir::ItemKind::Enum(..)
+ | hir::ItemKind::Macro(..)
+ | hir::ItemKind::Mod(..)
+ | hir::ItemKind::Static(..)
+ | hir::ItemKind::Struct(..)
+ | hir::ItemKind::Trait(..)
+ | hir::ItemKind::TraitAlias(..)
+ | hir::ItemKind::TyAlias(..)
+ | hir::ItemKind::Union(..)
+ | hir::ItemKind::OpaqueTy(..) => {},
+ hir::ItemKind::ExternCrate(..)
+ | hir::ItemKind::ForeignMod { .. }
+ | hir::ItemKind::GlobalAsm(..)
+ | hir::ItemKind::Impl { .. }
+ | hir::ItemKind::Use(..) => return,
+ };
+
+ let (article, desc) = cx.tcx.article_and_description(it.def_id.to_def_id());
+
+ let attrs = cx.tcx.hir().attrs(it.hir_id());
+ self.check_missing_docs_attrs(cx, attrs, it.span, article, desc);
+ }
+
+ fn check_trait_item(&mut self, cx: &LateContext<'tcx>, trait_item: &'tcx hir::TraitItem<'_>) {
+ let (article, desc) = cx.tcx.article_and_description(trait_item.def_id.to_def_id());
+
+ let attrs = cx.tcx.hir().attrs(trait_item.hir_id());
+ self.check_missing_docs_attrs(cx, attrs, trait_item.span, article, desc);
+ }
+
+ fn check_impl_item(&mut self, cx: &LateContext<'tcx>, impl_item: &'tcx hir::ImplItem<'_>) {
+ // If the method is an impl for a trait, don't doc.
+ if let Some(cid) = cx.tcx.associated_item(impl_item.def_id).impl_container(cx.tcx) {
+ if cx.tcx.impl_trait_ref(cid).is_some() {
+ return;
+ }
+ } else {
+ return;
+ }
+
+ let (article, desc) = cx.tcx.article_and_description(impl_item.def_id.to_def_id());
+ let attrs = cx.tcx.hir().attrs(impl_item.hir_id());
+ self.check_missing_docs_attrs(cx, attrs, impl_item.span, article, desc);
+ }
+
+ fn check_field_def(&mut self, cx: &LateContext<'tcx>, sf: &'tcx hir::FieldDef<'_>) {
+ if !sf.is_positional() {
+ let attrs = cx.tcx.hir().attrs(sf.hir_id);
+ self.check_missing_docs_attrs(cx, attrs, sf.span, "a", "struct field");
+ }
+ }
+
+ fn check_variant(&mut self, cx: &LateContext<'tcx>, v: &'tcx hir::Variant<'_>) {
+ let attrs = cx.tcx.hir().attrs(v.id);
+ self.check_missing_docs_attrs(cx, attrs, v.span, "a", "variant");
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/missing_enforced_import_rename.rs b/src/tools/clippy/clippy_lints/src/missing_enforced_import_rename.rs
new file mode 100644
index 000000000..3d0a23822
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/missing_enforced_import_rename.rs
@@ -0,0 +1,102 @@
+use clippy_utils::{diagnostics::span_lint_and_sugg, source::snippet_opt};
+
+use rustc_data_structures::fx::FxHashMap;
+use rustc_errors::Applicability;
+use rustc_hir::{def::Res, def_id::DefId, Item, ItemKind, UseKind};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::Symbol;
+
+use crate::utils::conf::Rename;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for imports that do not rename the item as specified
+ /// in the `enforce-import-renames` config option.
+ ///
+ /// ### Why is this bad?
+ /// Consistency is important, if a project has defined import
+ /// renames they should be followed. More practically, some item names are too
+ /// vague outside of their defining scope this can enforce a more meaningful naming.
+ ///
+ /// ### Example
+ /// An example clippy.toml configuration:
+ /// ```toml
+ /// # clippy.toml
+ /// enforced-import-renames = [ { path = "serde_json::Value", rename = "JsonValue" }]
+ /// ```
+ ///
+ /// ```rust,ignore
+ /// use serde_json::Value;
+ /// ```
+ /// Use instead:
+ /// ```rust,ignore
+ /// use serde_json::Value as JsonValue;
+ /// ```
+ #[clippy::version = "1.55.0"]
+ pub MISSING_ENFORCED_IMPORT_RENAMES,
+ restriction,
+ "enforce import renames"
+}
+
+pub struct ImportRename {
+ conf_renames: Vec<Rename>,
+ renames: FxHashMap<DefId, Symbol>,
+}
+
+impl ImportRename {
+ pub fn new(conf_renames: Vec<Rename>) -> Self {
+ Self {
+ conf_renames,
+ renames: FxHashMap::default(),
+ }
+ }
+}
+
+impl_lint_pass!(ImportRename => [MISSING_ENFORCED_IMPORT_RENAMES]);
+
+impl LateLintPass<'_> for ImportRename {
+ fn check_crate(&mut self, cx: &LateContext<'_>) {
+ for Rename { path, rename } in &self.conf_renames {
+ if let Res::Def(_, id) = clippy_utils::def_path_res(cx, &path.split("::").collect::<Vec<_>>()) {
+ self.renames.insert(id, Symbol::intern(rename));
+ }
+ }
+ }
+
+ fn check_item(&mut self, cx: &LateContext<'_>, item: &Item<'_>) {
+ if_chain! {
+ if let ItemKind::Use(path, UseKind::Single) = &item.kind;
+ if let Res::Def(_, id) = path.res;
+ if let Some(name) = self.renames.get(&id);
+ // Remove semicolon since it is not present for nested imports
+ let span_without_semi = cx.sess().source_map().span_until_char(item.span, ';');
+ if let Some(snip) = snippet_opt(cx, span_without_semi);
+ if let Some(import) = match snip.split_once(" as ") {
+ None => Some(snip.as_str()),
+ Some((import, rename)) => {
+ if rename.trim() == name.as_str() {
+ None
+ } else {
+ Some(import.trim())
+ }
+ },
+ };
+ then {
+ span_lint_and_sugg(
+ cx,
+ MISSING_ENFORCED_IMPORT_RENAMES,
+ span_without_semi,
+ "this import should be renamed",
+ "try",
+ format!(
+ "{} as {}",
+ import,
+ name,
+ ),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/missing_inline.rs b/src/tools/clippy/clippy_lints/src/missing_inline.rs
new file mode 100644
index 000000000..07bc2ca5d
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/missing_inline.rs
@@ -0,0 +1,172 @@
+use clippy_utils::diagnostics::span_lint;
+use rustc_ast::ast;
+use rustc_hir as hir;
+use rustc_lint::{self, LateContext, LateLintPass, LintContext};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Span;
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// It lints if an exported function, method, trait method with default impl,
+ /// or trait method impl is not `#[inline]`.
+ ///
+ /// ### Why is this bad?
+ /// In general, it is not. Functions can be inlined across
+ /// crates when that's profitable as long as any form of LTO is used. When LTO is disabled,
+ /// functions that are not `#[inline]` cannot be inlined across crates. Certain types of crates
+ /// might intend for most of the methods in their public API to be able to be inlined across
+ /// crates even when LTO is disabled. For these types of crates, enabling this lint might make
+ /// sense. It allows the crate to require all exported methods to be `#[inline]` by default, and
+ /// then opt out for specific methods where this might not make sense.
+ ///
+ /// ### Example
+ /// ```rust
+ /// pub fn foo() {} // missing #[inline]
+ /// fn ok() {} // ok
+ /// #[inline] pub fn bar() {} // ok
+ /// #[inline(always)] pub fn baz() {} // ok
+ ///
+ /// pub trait Bar {
+ /// fn bar(); // ok
+ /// fn def_bar() {} // missing #[inline]
+ /// }
+ ///
+ /// struct Baz;
+ /// impl Baz {
+ /// fn private() {} // ok
+ /// }
+ ///
+ /// impl Bar for Baz {
+ /// fn bar() {} // ok - Baz is not exported
+ /// }
+ ///
+ /// pub struct PubBaz;
+ /// impl PubBaz {
+ /// fn private() {} // ok
+ /// pub fn not_private() {} // missing #[inline]
+ /// }
+ ///
+ /// impl Bar for PubBaz {
+ /// fn bar() {} // missing #[inline]
+ /// fn def_bar() {} // missing #[inline]
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MISSING_INLINE_IN_PUBLIC_ITEMS,
+ restriction,
+ "detects missing `#[inline]` attribute for public callables (functions, trait methods, methods...)"
+}
+
+fn check_missing_inline_attrs(cx: &LateContext<'_>, attrs: &[ast::Attribute], sp: Span, desc: &'static str) {
+ let has_inline = attrs.iter().any(|a| a.has_name(sym::inline));
+ if !has_inline {
+ span_lint(
+ cx,
+ MISSING_INLINE_IN_PUBLIC_ITEMS,
+ sp,
+ &format!("missing `#[inline]` for {}", desc),
+ );
+ }
+}
+
+fn is_executable_or_proc_macro(cx: &LateContext<'_>) -> bool {
+ use rustc_session::config::CrateType;
+
+ cx.tcx
+ .sess
+ .crate_types()
+ .iter()
+ .any(|t: &CrateType| matches!(t, CrateType::Executable | CrateType::ProcMacro))
+}
+
+declare_lint_pass!(MissingInline => [MISSING_INLINE_IN_PUBLIC_ITEMS]);
+
+impl<'tcx> LateLintPass<'tcx> for MissingInline {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, it: &'tcx hir::Item<'_>) {
+ if rustc_middle::lint::in_external_macro(cx.sess(), it.span) || is_executable_or_proc_macro(cx) {
+ return;
+ }
+
+ if !cx.access_levels.is_exported(it.def_id) {
+ return;
+ }
+ match it.kind {
+ hir::ItemKind::Fn(..) => {
+ let desc = "a function";
+ let attrs = cx.tcx.hir().attrs(it.hir_id());
+ check_missing_inline_attrs(cx, attrs, it.span, desc);
+ },
+ hir::ItemKind::Trait(ref _is_auto, ref _unsafe, _generics, _bounds, trait_items) => {
+ // note: we need to check if the trait is exported so we can't use
+ // `LateLintPass::check_trait_item` here.
+ for tit in trait_items {
+ let tit_ = cx.tcx.hir().trait_item(tit.id);
+ match tit_.kind {
+ hir::TraitItemKind::Const(..) | hir::TraitItemKind::Type(..) => {},
+ hir::TraitItemKind::Fn(..) => {
+ if cx.tcx.impl_defaultness(tit.id.def_id).has_value() {
+ // trait method with default body needs inline in case
+ // an impl is not provided
+ let desc = "a default trait method";
+ let item = cx.tcx.hir().trait_item(tit.id);
+ let attrs = cx.tcx.hir().attrs(item.hir_id());
+ check_missing_inline_attrs(cx, attrs, item.span, desc);
+ }
+ },
+ }
+ }
+ },
+ hir::ItemKind::Const(..)
+ | hir::ItemKind::Enum(..)
+ | hir::ItemKind::Macro(..)
+ | hir::ItemKind::Mod(..)
+ | hir::ItemKind::Static(..)
+ | hir::ItemKind::Struct(..)
+ | hir::ItemKind::TraitAlias(..)
+ | hir::ItemKind::GlobalAsm(..)
+ | hir::ItemKind::TyAlias(..)
+ | hir::ItemKind::Union(..)
+ | hir::ItemKind::OpaqueTy(..)
+ | hir::ItemKind::ExternCrate(..)
+ | hir::ItemKind::ForeignMod { .. }
+ | hir::ItemKind::Impl { .. }
+ | hir::ItemKind::Use(..) => {},
+ };
+ }
+
+ fn check_impl_item(&mut self, cx: &LateContext<'tcx>, impl_item: &'tcx hir::ImplItem<'_>) {
+ use rustc_middle::ty::{ImplContainer, TraitContainer};
+ if rustc_middle::lint::in_external_macro(cx.sess(), impl_item.span) || is_executable_or_proc_macro(cx) {
+ return;
+ }
+
+ // If the item being implemented is not exported, then we don't need #[inline]
+ if !cx.access_levels.is_exported(impl_item.def_id) {
+ return;
+ }
+
+ let desc = match impl_item.kind {
+ hir::ImplItemKind::Fn(..) => "a method",
+ hir::ImplItemKind::Const(..) | hir::ImplItemKind::TyAlias(_) => return,
+ };
+
+ let assoc_item = cx.tcx.associated_item(impl_item.def_id);
+ let container_id = assoc_item.container_id(cx.tcx);
+ let trait_def_id = match assoc_item.container {
+ TraitContainer => Some(container_id),
+ ImplContainer => cx.tcx.impl_trait_ref(container_id).map(|t| t.def_id),
+ };
+
+ if let Some(trait_def_id) = trait_def_id {
+ if trait_def_id.is_local() && !cx.access_levels.is_exported(impl_item.def_id) {
+ // If a trait is being implemented for an item, and the
+ // trait is not exported, we don't need #[inline]
+ return;
+ }
+ }
+
+ let attrs = cx.tcx.hir().attrs(impl_item.hir_id());
+ check_missing_inline_attrs(cx, attrs, impl_item.span, desc);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs b/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs
new file mode 100644
index 000000000..a2419c277
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs
@@ -0,0 +1,350 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_and_note};
+use clippy_utils::{get_parent_expr, path_to_local, path_to_local_id};
+use if_chain::if_chain;
+use rustc_hir::intravisit::{walk_expr, Visitor};
+use rustc_hir::{BinOpKind, Block, Expr, ExprKind, Guard, HirId, Local, Node, Stmt, StmtKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for a read and a write to the same variable where
+ /// whether the read occurs before or after the write depends on the evaluation
+ /// order of sub-expressions.
+ ///
+ /// ### Why is this bad?
+ /// It is often confusing to read. As described [here](https://doc.rust-lang.org/reference/expressions.html?highlight=subexpression#evaluation-order-of-operands),
+ /// the operands of these expressions are evaluated before applying the effects of the expression.
+ ///
+ /// ### Known problems
+ /// Code which intentionally depends on the evaluation
+ /// order, or which is correct for any evaluation order.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let mut x = 0;
+ ///
+ /// let a = {
+ /// x = 1;
+ /// 1
+ /// } + x;
+ /// // Unclear whether a is 1 or 2.
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let mut x = 0;
+ /// let tmp = {
+ /// x = 1;
+ /// 1
+ /// };
+ /// let a = tmp + x;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MIXED_READ_WRITE_IN_EXPRESSION,
+ restriction,
+ "whether a variable read occurs before a write depends on sub-expression evaluation order"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for diverging calls that are not match arms or
+ /// statements.
+ ///
+ /// ### Why is this bad?
+ /// It is often confusing to read. In addition, the
+ /// sub-expression evaluation order for Rust is not well documented.
+ ///
+ /// ### Known problems
+ /// Someone might want to use `some_bool || panic!()` as a
+ /// shorthand.
+ ///
+ /// ### Example
+ /// ```rust,no_run
+ /// # fn b() -> bool { true }
+ /// # fn c() -> bool { true }
+ /// let a = b() || panic!() || c();
+ /// // `c()` is dead, `panic!()` is only called if `b()` returns `false`
+ /// let x = (a, b, c, panic!());
+ /// // can simply be replaced by `panic!()`
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub DIVERGING_SUB_EXPRESSION,
+ complexity,
+ "whether an expression contains a diverging sub expression"
+}
+
+declare_lint_pass!(EvalOrderDependence => [MIXED_READ_WRITE_IN_EXPRESSION, DIVERGING_SUB_EXPRESSION]);
+
+impl<'tcx> LateLintPass<'tcx> for EvalOrderDependence {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ // Find a write to a local variable.
+ let var = if_chain! {
+ if let ExprKind::Assign(lhs, ..) | ExprKind::AssignOp(_, lhs, _) = expr.kind;
+ if let Some(var) = path_to_local(lhs);
+ if expr.span.desugaring_kind().is_none();
+ then { var } else { return; }
+ };
+ let mut visitor = ReadVisitor {
+ cx,
+ var,
+ write_expr: expr,
+ last_expr: expr,
+ };
+ check_for_unsequenced_reads(&mut visitor);
+ }
+ fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) {
+ match stmt.kind {
+ StmtKind::Local(local) => {
+ if let Local { init: Some(e), .. } = local {
+ DivergenceVisitor { cx }.visit_expr(e);
+ }
+ },
+ StmtKind::Expr(e) | StmtKind::Semi(e) => DivergenceVisitor { cx }.maybe_walk_expr(e),
+ StmtKind::Item(..) => {},
+ }
+ }
+}
+
+struct DivergenceVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+}
+
+impl<'a, 'tcx> DivergenceVisitor<'a, 'tcx> {
+ fn maybe_walk_expr(&mut self, e: &'tcx Expr<'_>) {
+ match e.kind {
+ ExprKind::Closure { .. } => {},
+ ExprKind::Match(e, arms, _) => {
+ self.visit_expr(e);
+ for arm in arms {
+ if let Some(Guard::If(if_expr)) = arm.guard {
+ self.visit_expr(if_expr);
+ }
+ // make sure top level arm expressions aren't linted
+ self.maybe_walk_expr(arm.body);
+ }
+ },
+ _ => walk_expr(self, e),
+ }
+ }
+ fn report_diverging_sub_expr(&mut self, e: &Expr<'_>) {
+ span_lint(self.cx, DIVERGING_SUB_EXPRESSION, e.span, "sub-expression diverges");
+ }
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for DivergenceVisitor<'a, 'tcx> {
+ fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
+ match e.kind {
+ ExprKind::Continue(_) | ExprKind::Break(_, _) | ExprKind::Ret(_) => self.report_diverging_sub_expr(e),
+ ExprKind::Call(func, _) => {
+ let typ = self.cx.typeck_results().expr_ty(func);
+ match typ.kind() {
+ ty::FnDef(..) | ty::FnPtr(_) => {
+ let sig = typ.fn_sig(self.cx.tcx);
+ if self.cx.tcx.erase_late_bound_regions(sig).output().kind() == &ty::Never {
+ self.report_diverging_sub_expr(e);
+ }
+ },
+ _ => {},
+ }
+ },
+ ExprKind::MethodCall(..) => {
+ let borrowed_table = self.cx.typeck_results();
+ if borrowed_table.expr_ty(e).is_never() {
+ self.report_diverging_sub_expr(e);
+ }
+ },
+ _ => {
+ // do not lint expressions referencing objects of type `!`, as that required a
+ // diverging expression
+ // to begin with
+ },
+ }
+ self.maybe_walk_expr(e);
+ }
+ fn visit_block(&mut self, _: &'tcx Block<'_>) {
+ // don't continue over blocks, LateLintPass already does that
+ }
+}
+
+/// Walks up the AST from the given write expression (`vis.write_expr`) looking
+/// for reads to the same variable that are unsequenced relative to the write.
+///
+/// This means reads for which there is a common ancestor between the read and
+/// the write such that
+///
+/// * evaluating the ancestor necessarily evaluates both the read and the write (for example, `&x`
+/// and `|| x = 1` don't necessarily evaluate `x`), and
+///
+/// * which one is evaluated first depends on the order of sub-expression evaluation. Blocks, `if`s,
+/// loops, `match`es, and the short-circuiting logical operators are considered to have a defined
+/// evaluation order.
+///
+/// When such a read is found, the lint is triggered.
+fn check_for_unsequenced_reads(vis: &mut ReadVisitor<'_, '_>) {
+ let map = &vis.cx.tcx.hir();
+ let mut cur_id = vis.write_expr.hir_id;
+ loop {
+ let parent_id = map.get_parent_node(cur_id);
+ if parent_id == cur_id {
+ break;
+ }
+ let parent_node = match map.find(parent_id) {
+ Some(parent) => parent,
+ None => break,
+ };
+
+ let stop_early = match parent_node {
+ Node::Expr(expr) => check_expr(vis, expr),
+ Node::Stmt(stmt) => check_stmt(vis, stmt),
+ Node::Item(_) => {
+ // We reached the top of the function, stop.
+ break;
+ },
+ _ => StopEarly::KeepGoing,
+ };
+ match stop_early {
+ StopEarly::Stop => break,
+ StopEarly::KeepGoing => {},
+ }
+
+ cur_id = parent_id;
+ }
+}
+
+/// Whether to stop early for the loop in `check_for_unsequenced_reads`. (If
+/// `check_expr` weren't an independent function, this would be unnecessary and
+/// we could just use `break`).
+enum StopEarly {
+ KeepGoing,
+ Stop,
+}
+
+fn check_expr<'a, 'tcx>(vis: &mut ReadVisitor<'a, 'tcx>, expr: &'tcx Expr<'_>) -> StopEarly {
+ if expr.hir_id == vis.last_expr.hir_id {
+ return StopEarly::KeepGoing;
+ }
+
+ match expr.kind {
+ ExprKind::Array(_)
+ | ExprKind::Tup(_)
+ | ExprKind::MethodCall(..)
+ | ExprKind::Call(_, _)
+ | ExprKind::Assign(..)
+ | ExprKind::Index(_, _)
+ | ExprKind::Repeat(_, _)
+ | ExprKind::Struct(_, _, _) => {
+ walk_expr(vis, expr);
+ },
+ ExprKind::Binary(op, _, _) | ExprKind::AssignOp(op, _, _) => {
+ if op.node == BinOpKind::And || op.node == BinOpKind::Or {
+ // x && y and x || y always evaluate x first, so these are
+ // strictly sequenced.
+ } else {
+ walk_expr(vis, expr);
+ }
+ },
+ ExprKind::Closure { .. } => {
+ // Either
+ //
+ // * `var` is defined in the closure body, in which case we've reached the top of the enclosing
+ // function and can stop, or
+ //
+ // * `var` is captured by the closure, in which case, because evaluating a closure does not evaluate
+ // its body, we don't necessarily have a write, so we need to stop to avoid generating false
+ // positives.
+ //
+ // This is also the only place we need to stop early (grrr).
+ return StopEarly::Stop;
+ },
+ // All other expressions either have only one child or strictly
+ // sequence the evaluation order of their sub-expressions.
+ _ => {},
+ }
+
+ vis.last_expr = expr;
+
+ StopEarly::KeepGoing
+}
+
+fn check_stmt<'a, 'tcx>(vis: &mut ReadVisitor<'a, 'tcx>, stmt: &'tcx Stmt<'_>) -> StopEarly {
+ match stmt.kind {
+ StmtKind::Expr(expr) | StmtKind::Semi(expr) => check_expr(vis, expr),
+ // If the declaration is of a local variable, check its initializer
+ // expression if it has one. Otherwise, keep going.
+ StmtKind::Local(local) => local
+ .init
+ .as_ref()
+ .map_or(StopEarly::KeepGoing, |expr| check_expr(vis, expr)),
+ StmtKind::Item(..) => StopEarly::KeepGoing,
+ }
+}
+
+/// A visitor that looks for reads from a variable.
+struct ReadVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ /// The ID of the variable we're looking for.
+ var: HirId,
+ /// The expressions where the write to the variable occurred (for reporting
+ /// in the lint).
+ write_expr: &'tcx Expr<'tcx>,
+ /// The last (highest in the AST) expression we've checked, so we know not
+ /// to recheck it.
+ last_expr: &'tcx Expr<'tcx>,
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for ReadVisitor<'a, 'tcx> {
+ fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
+ if expr.hir_id == self.last_expr.hir_id {
+ return;
+ }
+
+ if path_to_local_id(expr, self.var) {
+ // Check that this is a read, not a write.
+ if !is_in_assignment_position(self.cx, expr) {
+ span_lint_and_note(
+ self.cx,
+ MIXED_READ_WRITE_IN_EXPRESSION,
+ expr.span,
+ &format!("unsequenced read of `{}`", self.cx.tcx.hir().name(self.var)),
+ Some(self.write_expr.span),
+ "whether read occurs before this write depends on evaluation order",
+ );
+ }
+ }
+ match expr.kind {
+ // We're about to descend a closure. Since we don't know when (or
+ // if) the closure will be evaluated, any reads in it might not
+ // occur here (or ever). Like above, bail to avoid false positives.
+ ExprKind::Closure{..} |
+
+ // We want to avoid a false positive when a variable name occurs
+ // only to have its address taken, so we stop here. Technically,
+ // this misses some weird cases, eg.
+ //
+ // ```rust
+ // let mut x = 0;
+ // let a = foo(&{x = 1; x}, x);
+ // ```
+ //
+ // TODO: fix this
+ ExprKind::AddrOf(_, _, _) => {
+ return;
+ }
+ _ => {}
+ }
+
+ walk_expr(self, expr);
+ }
+}
+
+/// Returns `true` if `expr` is the LHS of an assignment, like `expr = ...`.
+fn is_in_assignment_position(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ if let Some(parent) = get_parent_expr(cx, expr) {
+ if let ExprKind::Assign(lhs, ..) = parent.kind {
+ return lhs.hir_id == expr.hir_id;
+ }
+ }
+ false
+}
diff --git a/src/tools/clippy/clippy_lints/src/module_style.rs b/src/tools/clippy/clippy_lints/src/module_style.rs
new file mode 100644
index 000000000..0a3936572
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/module_style.rs
@@ -0,0 +1,166 @@
+use rustc_ast::ast;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_lint::{EarlyContext, EarlyLintPass, Level, LintContext};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::{FileName, RealFileName, SourceFile, Span, SyntaxContext};
+use std::ffi::OsStr;
+use std::path::{Component, Path};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks that module layout uses only self named module files, bans `mod.rs` files.
+ ///
+ /// ### Why is this bad?
+ /// Having multiple module layout styles in a project can be confusing.
+ ///
+ /// ### Example
+ /// ```text
+ /// src/
+ /// stuff/
+ /// stuff_files.rs
+ /// mod.rs
+ /// lib.rs
+ /// ```
+ /// Use instead:
+ /// ```text
+ /// src/
+ /// stuff/
+ /// stuff_files.rs
+ /// stuff.rs
+ /// lib.rs
+ /// ```
+ #[clippy::version = "1.57.0"]
+ pub MOD_MODULE_FILES,
+ restriction,
+ "checks that module layout is consistent"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks that module layout uses only `mod.rs` files.
+ ///
+ /// ### Why is this bad?
+ /// Having multiple module layout styles in a project can be confusing.
+ ///
+ /// ### Example
+ /// ```text
+ /// src/
+ /// stuff/
+ /// stuff_files.rs
+ /// stuff.rs
+ /// lib.rs
+ /// ```
+ /// Use instead:
+ /// ```text
+ /// src/
+ /// stuff/
+ /// stuff_files.rs
+ /// mod.rs
+ /// lib.rs
+ /// ```
+
+ #[clippy::version = "1.57.0"]
+ pub SELF_NAMED_MODULE_FILES,
+ restriction,
+ "checks that module layout is consistent"
+}
+
+pub struct ModStyle;
+
+impl_lint_pass!(ModStyle => [MOD_MODULE_FILES, SELF_NAMED_MODULE_FILES]);
+
+impl EarlyLintPass for ModStyle {
+ fn check_crate(&mut self, cx: &EarlyContext<'_>, _: &ast::Crate) {
+ if cx.builder.lint_level(MOD_MODULE_FILES).0 == Level::Allow
+ && cx.builder.lint_level(SELF_NAMED_MODULE_FILES).0 == Level::Allow
+ {
+ return;
+ }
+
+ let files = cx.sess().source_map().files();
+
+ let RealFileName::LocalPath(trim_to_src) = &cx.sess().opts.working_dir else { return };
+
+ // `folder_segments` is all unique folder path segments `path/to/foo.rs` gives
+ // `[path, to]` but not foo
+ let mut folder_segments = FxHashSet::default();
+ // `mod_folders` is all the unique folder names that contain a mod.rs file
+ let mut mod_folders = FxHashSet::default();
+ // `file_map` maps file names to the full path including the file name
+ // `{ foo => path/to/foo.rs, .. }
+ let mut file_map = FxHashMap::default();
+ for file in files.iter() {
+ if let FileName::Real(RealFileName::LocalPath(lp)) = &file.name {
+ let path = if lp.is_relative() {
+ lp
+ } else if let Ok(relative) = lp.strip_prefix(trim_to_src) {
+ relative
+ } else {
+ continue;
+ };
+
+ if let Some(stem) = path.file_stem() {
+ file_map.insert(stem, (file, path));
+ }
+ process_paths_for_mod_files(path, &mut folder_segments, &mut mod_folders);
+ check_self_named_mod_exists(cx, path, file);
+ }
+ }
+
+ for folder in &folder_segments {
+ if !mod_folders.contains(folder) {
+ if let Some((file, path)) = file_map.get(folder) {
+ let mut correct = path.to_path_buf();
+ correct.pop();
+ correct.push(folder);
+ correct.push("mod.rs");
+ cx.struct_span_lint(
+ SELF_NAMED_MODULE_FILES,
+ Span::new(file.start_pos, file.start_pos, SyntaxContext::root(), None),
+ |build| {
+ let mut lint =
+ build.build(&format!("`mod.rs` files are required, found `{}`", path.display()));
+ lint.help(&format!("move `{}` to `{}`", path.display(), correct.display(),));
+ lint.emit();
+ },
+ );
+ }
+ }
+ }
+ }
+}
+
+/// For each `path` we add each folder component to `folder_segments` and if the file name
+/// is `mod.rs` we add it's parent folder to `mod_folders`.
+fn process_paths_for_mod_files<'a>(
+ path: &'a Path,
+ folder_segments: &mut FxHashSet<&'a OsStr>,
+ mod_folders: &mut FxHashSet<&'a OsStr>,
+) {
+ let mut comp = path.components().rev().peekable();
+ let _ = comp.next();
+ if path.ends_with("mod.rs") {
+ mod_folders.insert(comp.peek().map(|c| c.as_os_str()).unwrap_or_default());
+ }
+ let folders = comp.filter_map(|c| if let Component::Normal(s) = c { Some(s) } else { None });
+ folder_segments.extend(folders);
+}
+
+/// Checks every path for the presence of `mod.rs` files and emits the lint if found.
+fn check_self_named_mod_exists(cx: &EarlyContext<'_>, path: &Path, file: &SourceFile) {
+ if path.ends_with("mod.rs") {
+ let mut mod_file = path.to_path_buf();
+ mod_file.pop();
+ mod_file.set_extension("rs");
+
+ cx.struct_span_lint(
+ MOD_MODULE_FILES,
+ Span::new(file.start_pos, file.start_pos, SyntaxContext::root(), None),
+ |build| {
+ let mut lint = build.build(&format!("`mod.rs` files are not allowed, found `{}`", path.display()));
+ lint.help(&format!("move `{}` to `{}`", path.display(), mod_file.display(),));
+ lint.emit();
+ },
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/mut_key.rs b/src/tools/clippy/clippy_lints/src/mut_key.rs
new file mode 100644
index 000000000..4db103bbc
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/mut_key.rs
@@ -0,0 +1,175 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::trait_ref_of_method;
+use rustc_hir as hir;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::TypeVisitable;
+use rustc_middle::ty::{Adt, Array, Ref, Slice, Tuple, Ty};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Span;
+use rustc_span::symbol::sym;
+use std::iter;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for sets/maps with mutable key types.
+ ///
+ /// ### Why is this bad?
+ /// All of `HashMap`, `HashSet`, `BTreeMap` and
+ /// `BtreeSet` rely on either the hash or the order of keys be unchanging,
+ /// so having types with interior mutability is a bad idea.
+ ///
+ /// ### Known problems
+ ///
+ /// #### False Positives
+ /// It's correct to use a struct that contains interior mutability as a key, when its
+ /// implementation of `Hash` or `Ord` doesn't access any of the interior mutable types.
+ /// However, this lint is unable to recognize this, so it will often cause false positives in
+ /// theses cases. The `bytes` crate is a great example of this.
+ ///
+ /// #### False Negatives
+ /// For custom `struct`s/`enum`s, this lint is unable to check for interior mutability behind
+ /// indirection. For example, `struct BadKey<'a>(&'a Cell<usize>)` will be seen as immutable
+ /// and cause a false negative if its implementation of `Hash`/`Ord` accesses the `Cell`.
+ ///
+ /// This lint does check a few cases for indirection. Firstly, using some standard library
+ /// types (`Option`, `Result`, `Box`, `Rc`, `Arc`, `Vec`, `VecDeque`, `BTreeMap` and
+ /// `BTreeSet`) directly as keys (e.g. in `HashMap<Box<Cell<usize>>, ()>`) **will** trigger the
+ /// lint, because the impls of `Hash`/`Ord` for these types directly call `Hash`/`Ord` on their
+ /// contained type.
+ ///
+ /// Secondly, the implementations of `Hash` and `Ord` for raw pointers (`*const T` or `*mut T`)
+ /// apply only to the **address** of the contained value. Therefore, interior mutability
+ /// behind raw pointers (e.g. in `HashSet<*mut Cell<usize>>`) can't impact the value of `Hash`
+ /// or `Ord`, and therefore will not trigger this link. For more info, see issue
+ /// [#6745](https://github.com/rust-lang/rust-clippy/issues/6745).
+ ///
+ /// ### Example
+ /// ```rust
+ /// use std::cmp::{PartialEq, Eq};
+ /// use std::collections::HashSet;
+ /// use std::hash::{Hash, Hasher};
+ /// use std::sync::atomic::AtomicUsize;
+ ///# #[allow(unused)]
+ ///
+ /// struct Bad(AtomicUsize);
+ /// impl PartialEq for Bad {
+ /// fn eq(&self, rhs: &Self) -> bool {
+ /// ..
+ /// ; unimplemented!();
+ /// }
+ /// }
+ ///
+ /// impl Eq for Bad {}
+ ///
+ /// impl Hash for Bad {
+ /// fn hash<H: Hasher>(&self, h: &mut H) {
+ /// ..
+ /// ; unimplemented!();
+ /// }
+ /// }
+ ///
+ /// fn main() {
+ /// let _: HashSet<Bad> = HashSet::new();
+ /// }
+ /// ```
+ #[clippy::version = "1.42.0"]
+ pub MUTABLE_KEY_TYPE,
+ suspicious,
+ "Check for mutable `Map`/`Set` key type"
+}
+
+declare_lint_pass!(MutableKeyType => [ MUTABLE_KEY_TYPE ]);
+
+impl<'tcx> LateLintPass<'tcx> for MutableKeyType {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'tcx>) {
+ if let hir::ItemKind::Fn(ref sig, ..) = item.kind {
+ check_sig(cx, item.hir_id(), sig.decl);
+ }
+ }
+
+ fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::ImplItem<'tcx>) {
+ if let hir::ImplItemKind::Fn(ref sig, ..) = item.kind {
+ if trait_ref_of_method(cx, item.def_id).is_none() {
+ check_sig(cx, item.hir_id(), sig.decl);
+ }
+ }
+ }
+
+ fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::TraitItem<'tcx>) {
+ if let hir::TraitItemKind::Fn(ref sig, ..) = item.kind {
+ check_sig(cx, item.hir_id(), sig.decl);
+ }
+ }
+
+ fn check_local(&mut self, cx: &LateContext<'_>, local: &hir::Local<'_>) {
+ if let hir::PatKind::Wild = local.pat.kind {
+ return;
+ }
+ check_ty(cx, local.span, cx.typeck_results().pat_ty(local.pat));
+ }
+}
+
+fn check_sig<'tcx>(cx: &LateContext<'tcx>, item_hir_id: hir::HirId, decl: &hir::FnDecl<'_>) {
+ let fn_def_id = cx.tcx.hir().local_def_id(item_hir_id);
+ let fn_sig = cx.tcx.fn_sig(fn_def_id);
+ for (hir_ty, ty) in iter::zip(decl.inputs, fn_sig.inputs().skip_binder()) {
+ check_ty(cx, hir_ty.span, *ty);
+ }
+ check_ty(cx, decl.output.span(), cx.tcx.erase_late_bound_regions(fn_sig.output()));
+}
+
+// We want to lint 1. sets or maps with 2. not immutable key types and 3. no unerased
+// generics (because the compiler cannot ensure immutability for unknown types).
+fn check_ty<'tcx>(cx: &LateContext<'tcx>, span: Span, ty: Ty<'tcx>) {
+ let ty = ty.peel_refs();
+ if let Adt(def, substs) = ty.kind() {
+ let is_keyed_type = [sym::HashMap, sym::BTreeMap, sym::HashSet, sym::BTreeSet]
+ .iter()
+ .any(|diag_item| cx.tcx.is_diagnostic_item(*diag_item, def.did()));
+ if is_keyed_type && is_interior_mutable_type(cx, substs.type_at(0), span) {
+ span_lint(cx, MUTABLE_KEY_TYPE, span, "mutable key type");
+ }
+ }
+}
+
+/// Determines if a type contains interior mutability which would affect its implementation of
+/// [`Hash`] or [`Ord`].
+fn is_interior_mutable_type<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>, span: Span) -> bool {
+ match *ty.kind() {
+ Ref(_, inner_ty, mutbl) => mutbl == hir::Mutability::Mut || is_interior_mutable_type(cx, inner_ty, span),
+ Slice(inner_ty) => is_interior_mutable_type(cx, inner_ty, span),
+ Array(inner_ty, size) => {
+ size.try_eval_usize(cx.tcx, cx.param_env).map_or(true, |u| u != 0)
+ && is_interior_mutable_type(cx, inner_ty, span)
+ },
+ Tuple(fields) => fields.iter().any(|ty| is_interior_mutable_type(cx, ty, span)),
+ Adt(def, substs) => {
+ // Special case for collections in `std` who's impl of `Hash` or `Ord` delegates to
+ // that of their type parameters. Note: we don't include `HashSet` and `HashMap`
+ // because they have no impl for `Hash` or `Ord`.
+ let is_std_collection = [
+ sym::Option,
+ sym::Result,
+ sym::LinkedList,
+ sym::Vec,
+ sym::VecDeque,
+ sym::BTreeMap,
+ sym::BTreeSet,
+ sym::Rc,
+ sym::Arc,
+ ]
+ .iter()
+ .any(|diag_item| cx.tcx.is_diagnostic_item(*diag_item, def.did()));
+ let is_box = Some(def.did()) == cx.tcx.lang_items().owned_box();
+ if is_std_collection || is_box {
+ // The type is mutable if any of its type parameters are
+ substs.types().any(|ty| is_interior_mutable_type(cx, ty, span))
+ } else {
+ !ty.has_escaping_bound_vars()
+ && cx.tcx.layout_of(cx.param_env.and(ty)).is_ok()
+ && !ty.is_freeze(cx.tcx.at(span), cx.param_env)
+ }
+ },
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/mut_mut.rs b/src/tools/clippy/clippy_lints/src/mut_mut.rs
new file mode 100644
index 000000000..cb16f0004
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/mut_mut.rs
@@ -0,0 +1,114 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::higher;
+use rustc_hir as hir;
+use rustc_hir::intravisit;
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for instances of `mut mut` references.
+ ///
+ /// ### Why is this bad?
+ /// Multiple `mut`s don't add anything meaningful to the
+ /// source. This is either a copy'n'paste error, or it shows a fundamental
+ /// misunderstanding of references.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let mut y = 1;
+ /// let x = &mut &mut y;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MUT_MUT,
+ pedantic,
+ "usage of double-mut refs, e.g., `&mut &mut ...`"
+}
+
+declare_lint_pass!(MutMut => [MUT_MUT]);
+
+impl<'tcx> LateLintPass<'tcx> for MutMut {
+ fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx hir::Block<'_>) {
+ intravisit::walk_block(&mut MutVisitor { cx }, block);
+ }
+
+ fn check_ty(&mut self, cx: &LateContext<'tcx>, ty: &'tcx hir::Ty<'_>) {
+ use rustc_hir::intravisit::Visitor;
+
+ MutVisitor { cx }.visit_ty(ty);
+ }
+}
+
+pub struct MutVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+}
+
+impl<'a, 'tcx> intravisit::Visitor<'tcx> for MutVisitor<'a, 'tcx> {
+ fn visit_expr(&mut self, expr: &'tcx hir::Expr<'_>) {
+ if in_external_macro(self.cx.sess(), expr.span) {
+ return;
+ }
+
+ if let Some(higher::ForLoop { arg, body, .. }) = higher::ForLoop::hir(expr) {
+ // A `for` loop lowers to:
+ // ```rust
+ // match ::std::iter::Iterator::next(&mut iter) {
+ // // ^^^^
+ // ```
+ // Let's ignore the generated code.
+ intravisit::walk_expr(self, arg);
+ intravisit::walk_expr(self, body);
+ } else if let hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, e) = expr.kind {
+ if let hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, _) = e.kind {
+ span_lint(
+ self.cx,
+ MUT_MUT,
+ expr.span,
+ "generally you want to avoid `&mut &mut _` if possible",
+ );
+ } else if let ty::Ref(_, _, hir::Mutability::Mut) = self.cx.typeck_results().expr_ty(e).kind() {
+ span_lint(
+ self.cx,
+ MUT_MUT,
+ expr.span,
+ "this expression mutably borrows a mutable reference. Consider reborrowing",
+ );
+ }
+ }
+ }
+
+ fn visit_ty(&mut self, ty: &'tcx hir::Ty<'_>) {
+ if in_external_macro(self.cx.sess(), ty.span) {
+ return;
+ }
+
+ if let hir::TyKind::Rptr(
+ _,
+ hir::MutTy {
+ ty: pty,
+ mutbl: hir::Mutability::Mut,
+ },
+ ) = ty.kind
+ {
+ if let hir::TyKind::Rptr(
+ _,
+ hir::MutTy {
+ mutbl: hir::Mutability::Mut,
+ ..
+ },
+ ) = pty.kind
+ {
+ span_lint(
+ self.cx,
+ MUT_MUT,
+ ty.span,
+ "generally you want to avoid `&mut &mut _` if possible",
+ );
+ }
+ }
+
+ intravisit::walk_ty(self, ty);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/mut_mutex_lock.rs b/src/tools/clippy/clippy_lints/src/mut_mutex_lock.rs
new file mode 100644
index 000000000..b7f981faa
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/mut_mutex_lock.rs
@@ -0,0 +1,70 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::ty::is_type_diagnostic_item;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind, Mutability};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `&mut Mutex::lock` calls
+ ///
+ /// ### Why is this bad?
+ /// `Mutex::lock` is less efficient than
+ /// calling `Mutex::get_mut`. In addition you also have a statically
+ /// guarantee that the mutex isn't locked, instead of just a runtime
+ /// guarantee.
+ ///
+ /// ### Example
+ /// ```rust
+ /// use std::sync::{Arc, Mutex};
+ ///
+ /// let mut value_rc = Arc::new(Mutex::new(42_u8));
+ /// let value_mutex = Arc::get_mut(&mut value_rc).unwrap();
+ ///
+ /// let mut value = value_mutex.lock().unwrap();
+ /// *value += 1;
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// use std::sync::{Arc, Mutex};
+ ///
+ /// let mut value_rc = Arc::new(Mutex::new(42_u8));
+ /// let value_mutex = Arc::get_mut(&mut value_rc).unwrap();
+ ///
+ /// let value = value_mutex.get_mut().unwrap();
+ /// *value += 1;
+ /// ```
+ #[clippy::version = "1.49.0"]
+ pub MUT_MUTEX_LOCK,
+ style,
+ "`&mut Mutex::lock` does unnecessary locking"
+}
+
+declare_lint_pass!(MutMutexLock => [MUT_MUTEX_LOCK]);
+
+impl<'tcx> LateLintPass<'tcx> for MutMutexLock {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, ex: &'tcx Expr<'tcx>) {
+ if_chain! {
+ if let ExprKind::MethodCall(path, [self_arg, ..], _) = &ex.kind;
+ if path.ident.name == sym!(lock);
+ let ty = cx.typeck_results().expr_ty(self_arg);
+ if let ty::Ref(_, inner_ty, Mutability::Mut) = ty.kind();
+ if is_type_diagnostic_item(cx, *inner_ty, sym::Mutex);
+ then {
+ span_lint_and_sugg(
+ cx,
+ MUT_MUTEX_LOCK,
+ path.ident.span,
+ "calling `&mut Mutex::lock` unnecessarily locks an exclusive (mutable) reference",
+ "change this to",
+ "get_mut".to_owned(),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/mut_reference.rs b/src/tools/clippy/clippy_lints/src/mut_reference.rs
new file mode 100644
index 000000000..f434a655f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/mut_reference.rs
@@ -0,0 +1,95 @@
+use clippy_utils::diagnostics::span_lint;
+use rustc_hir::{BorrowKind, Expr, ExprKind, Mutability};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::subst::Subst;
+use rustc_middle::ty::{self, Ty};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use std::iter;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Detects passing a mutable reference to a function that only
+ /// requires an immutable reference.
+ ///
+ /// ### Why is this bad?
+ /// The mutable reference rules out all other references to
+ /// the value. Also the code misleads about the intent of the call site.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let mut vec = Vec::new();
+ /// # let mut value = 5;
+ /// vec.push(&mut value);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let mut vec = Vec::new();
+ /// # let value = 5;
+ /// vec.push(&value);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub UNNECESSARY_MUT_PASSED,
+ style,
+ "an argument passed as a mutable reference although the callee only demands an immutable reference"
+}
+
+declare_lint_pass!(UnnecessaryMutPassed => [UNNECESSARY_MUT_PASSED]);
+
+impl<'tcx> LateLintPass<'tcx> for UnnecessaryMutPassed {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ match e.kind {
+ ExprKind::Call(fn_expr, arguments) => {
+ if let ExprKind::Path(ref path) = fn_expr.kind {
+ check_arguments(
+ cx,
+ arguments,
+ cx.typeck_results().expr_ty(fn_expr),
+ &rustc_hir_pretty::to_string(rustc_hir_pretty::NO_ANN, |s| s.print_qpath(path, false)),
+ "function",
+ );
+ }
+ },
+ ExprKind::MethodCall(path, arguments, _) => {
+ let def_id = cx.typeck_results().type_dependent_def_id(e.hir_id).unwrap();
+ let substs = cx.typeck_results().node_substs(e.hir_id);
+ let method_type = cx.tcx.bound_type_of(def_id).subst(cx.tcx, substs);
+ check_arguments(cx, arguments, method_type, path.ident.as_str(), "method");
+ },
+ _ => (),
+ }
+ }
+}
+
+fn check_arguments<'tcx>(
+ cx: &LateContext<'tcx>,
+ arguments: &[Expr<'_>],
+ type_definition: Ty<'tcx>,
+ name: &str,
+ fn_kind: &str,
+) {
+ match type_definition.kind() {
+ ty::FnDef(..) | ty::FnPtr(_) => {
+ let parameters = type_definition.fn_sig(cx.tcx).skip_binder().inputs();
+ for (argument, parameter) in iter::zip(arguments, parameters) {
+ match parameter.kind() {
+ ty::Ref(_, _, Mutability::Not)
+ | ty::RawPtr(ty::TypeAndMut {
+ mutbl: Mutability::Not, ..
+ }) => {
+ if let ExprKind::AddrOf(BorrowKind::Ref, Mutability::Mut, _) = argument.kind {
+ span_lint(
+ cx,
+ UNNECESSARY_MUT_PASSED,
+ argument.span,
+ &format!("the {} `{}` doesn't need a mutable reference", fn_kind, name),
+ );
+ }
+ },
+ _ => (),
+ }
+ }
+ },
+ _ => (),
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs b/src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs
new file mode 100644
index 000000000..44fdf84c6
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs
@@ -0,0 +1,124 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::macros::{find_assert_eq_args, root_macro_call_first_node};
+use rustc_hir::intravisit::{walk_expr, Visitor};
+use rustc_hir::{BorrowKind, Expr, ExprKind, MatchSource, Mutability};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::hir::nested_filter;
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for function/method calls with a mutable
+ /// parameter in `debug_assert!`, `debug_assert_eq!` and `debug_assert_ne!` macros.
+ ///
+ /// ### Why is this bad?
+ /// In release builds `debug_assert!` macros are optimized out by the
+ /// compiler.
+ /// Therefore mutating something in a `debug_assert!` macro results in different behavior
+ /// between a release and debug build.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// debug_assert_eq!(vec![3].pop(), Some(3));
+ ///
+ /// // or
+ ///
+ /// # let mut x = 5;
+ /// # fn takes_a_mut_parameter(_: &mut u32) -> bool { unimplemented!() }
+ /// debug_assert!(takes_a_mut_parameter(&mut x));
+ /// ```
+ #[clippy::version = "1.40.0"]
+ pub DEBUG_ASSERT_WITH_MUT_CALL,
+ nursery,
+ "mutable arguments in `debug_assert{,_ne,_eq}!`"
+}
+
+declare_lint_pass!(DebugAssertWithMutCall => [DEBUG_ASSERT_WITH_MUT_CALL]);
+
+impl<'tcx> LateLintPass<'tcx> for DebugAssertWithMutCall {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ let Some(macro_call) = root_macro_call_first_node(cx, e) else { return };
+ let macro_name = cx.tcx.item_name(macro_call.def_id);
+ if !matches!(
+ macro_name.as_str(),
+ "debug_assert" | "debug_assert_eq" | "debug_assert_ne"
+ ) {
+ return;
+ }
+ let Some((lhs, rhs, _)) = find_assert_eq_args(cx, e, macro_call.expn) else { return };
+ for arg in [lhs, rhs] {
+ let mut visitor = MutArgVisitor::new(cx);
+ visitor.visit_expr(arg);
+ if let Some(span) = visitor.expr_span() {
+ span_lint(
+ cx,
+ DEBUG_ASSERT_WITH_MUT_CALL,
+ span,
+ &format!(
+ "do not call a function with mutable arguments inside of `{}!`",
+ macro_name
+ ),
+ );
+ }
+ }
+ }
+}
+
+struct MutArgVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ expr_span: Option<Span>,
+ found: bool,
+}
+
+impl<'a, 'tcx> MutArgVisitor<'a, 'tcx> {
+ fn new(cx: &'a LateContext<'tcx>) -> Self {
+ Self {
+ cx,
+ expr_span: None,
+ found: false,
+ }
+ }
+
+ fn expr_span(&self) -> Option<Span> {
+ if self.found { self.expr_span } else { None }
+ }
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for MutArgVisitor<'a, 'tcx> {
+ type NestedFilter = nested_filter::OnlyBodies;
+
+ fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
+ match expr.kind {
+ ExprKind::AddrOf(BorrowKind::Ref, Mutability::Mut, _) => {
+ self.found = true;
+ return;
+ },
+ ExprKind::If(..) => {
+ self.found = true;
+ return;
+ },
+ ExprKind::Path(_) => {
+ if let Some(adj) = self.cx.typeck_results().adjustments().get(expr.hir_id) {
+ if adj
+ .iter()
+ .any(|a| matches!(a.target.kind(), ty::Ref(_, _, Mutability::Mut)))
+ {
+ self.found = true;
+ return;
+ }
+ }
+ },
+ // Don't check await desugars
+ ExprKind::Match(_, _, MatchSource::AwaitDesugar) => return,
+ _ if !self.found => self.expr_span = Some(expr.span),
+ _ => return,
+ }
+ walk_expr(self, expr);
+ }
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/mutex_atomic.rs b/src/tools/clippy/clippy_lints/src/mutex_atomic.rs
new file mode 100644
index 000000000..a98577093
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/mutex_atomic.rs
@@ -0,0 +1,110 @@
+//! Checks for uses of mutex where an atomic value could be used
+//!
+//! This lint is **warn** by default
+
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::ty::is_type_diagnostic_item;
+use rustc_hir::Expr;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::{self, Ty};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usages of `Mutex<X>` where an atomic will do.
+ ///
+ /// ### Why is this bad?
+ /// Using a mutex just to make access to a plain bool or
+ /// reference sequential is shooting flies with cannons.
+ /// `std::sync::atomic::AtomicBool` and `std::sync::atomic::AtomicPtr` are leaner and
+ /// faster.
+ ///
+ /// ### Known problems
+ /// This lint cannot detect if the mutex is actually used
+ /// for waiting before a critical section.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let y = true;
+ /// # use std::sync::Mutex;
+ /// let x = Mutex::new(&y);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let y = true;
+ /// # use std::sync::atomic::AtomicBool;
+ /// let x = AtomicBool::new(y);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MUTEX_ATOMIC,
+ nursery,
+ "using a mutex where an atomic value could be used instead"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usages of `Mutex<X>` where `X` is an integral
+ /// type.
+ ///
+ /// ### Why is this bad?
+ /// Using a mutex just to make access to a plain integer
+ /// sequential is
+ /// shooting flies with cannons. `std::sync::atomic::AtomicUsize` is leaner and faster.
+ ///
+ /// ### Known problems
+ /// This lint cannot detect if the mutex is actually used
+ /// for waiting before a critical section.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::sync::Mutex;
+ /// let x = Mutex::new(0usize);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # use std::sync::atomic::AtomicUsize;
+ /// let x = AtomicUsize::new(0usize);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MUTEX_INTEGER,
+ nursery,
+ "using a mutex for an integer type"
+}
+
+declare_lint_pass!(Mutex => [MUTEX_ATOMIC, MUTEX_INTEGER]);
+
+impl<'tcx> LateLintPass<'tcx> for Mutex {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ let ty = cx.typeck_results().expr_ty(expr);
+ if let ty::Adt(_, subst) = ty.kind() {
+ if is_type_diagnostic_item(cx, ty, sym::Mutex) {
+ let mutex_param = subst.type_at(0);
+ if let Some(atomic_name) = get_atomic_name(mutex_param) {
+ let msg = format!(
+ "consider using an `{}` instead of a `Mutex` here; if you just want the locking \
+ behavior and not the internal type, consider using `Mutex<()>`",
+ atomic_name
+ );
+ match *mutex_param.kind() {
+ ty::Uint(t) if t != ty::UintTy::Usize => span_lint(cx, MUTEX_INTEGER, expr.span, &msg),
+ ty::Int(t) if t != ty::IntTy::Isize => span_lint(cx, MUTEX_INTEGER, expr.span, &msg),
+ _ => span_lint(cx, MUTEX_ATOMIC, expr.span, &msg),
+ };
+ }
+ }
+ }
+ }
+}
+
+fn get_atomic_name(ty: Ty<'_>) -> Option<&'static str> {
+ match ty.kind() {
+ ty::Bool => Some("AtomicBool"),
+ ty::Uint(_) => Some("AtomicUsize"),
+ ty::Int(_) => Some("AtomicIsize"),
+ ty::RawPtr(_) => Some("AtomicPtr"),
+ _ => None,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/needless_arbitrary_self_type.rs b/src/tools/clippy/clippy_lints/src/needless_arbitrary_self_type.rs
new file mode 100644
index 000000000..9838d3cad
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/needless_arbitrary_self_type.rs
@@ -0,0 +1,139 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use if_chain::if_chain;
+use rustc_ast::ast::{BindingMode, Lifetime, Mutability, Param, PatKind, Path, TyKind};
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::kw;
+use rustc_span::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// The lint checks for `self` in fn parameters that
+ /// specify the `Self`-type explicitly
+ /// ### Why is this bad?
+ /// Increases the amount and decreases the readability of code
+ ///
+ /// ### Example
+ /// ```rust
+ /// enum ValType {
+ /// I32,
+ /// I64,
+ /// F32,
+ /// F64,
+ /// }
+ ///
+ /// impl ValType {
+ /// pub fn bytes(self: Self) -> usize {
+ /// match self {
+ /// Self::I32 | Self::F32 => 4,
+ /// Self::I64 | Self::F64 => 8,
+ /// }
+ /// }
+ /// }
+ /// ```
+ ///
+ /// Could be rewritten as
+ ///
+ /// ```rust
+ /// enum ValType {
+ /// I32,
+ /// I64,
+ /// F32,
+ /// F64,
+ /// }
+ ///
+ /// impl ValType {
+ /// pub fn bytes(self) -> usize {
+ /// match self {
+ /// Self::I32 | Self::F32 => 4,
+ /// Self::I64 | Self::F64 => 8,
+ /// }
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "1.47.0"]
+ pub NEEDLESS_ARBITRARY_SELF_TYPE,
+ complexity,
+ "type of `self` parameter is already by default `Self`"
+}
+
+declare_lint_pass!(NeedlessArbitrarySelfType => [NEEDLESS_ARBITRARY_SELF_TYPE]);
+
+enum Mode {
+ Ref(Option<Lifetime>),
+ Value,
+}
+
+fn check_param_inner(cx: &EarlyContext<'_>, path: &Path, span: Span, binding_mode: &Mode, mutbl: Mutability) {
+ if_chain! {
+ if let [segment] = &path.segments[..];
+ if segment.ident.name == kw::SelfUpper;
+ then {
+ // In case we have a named lifetime, we check if the name comes from expansion.
+ // If it does, at this point we know the rest of the parameter was written by the user,
+ // so let them decide what the name of the lifetime should be.
+ // See #6089 for more details.
+ let mut applicability = Applicability::MachineApplicable;
+ let self_param = match (binding_mode, mutbl) {
+ (Mode::Ref(None), Mutability::Mut) => "&mut self".to_string(),
+ (Mode::Ref(Some(lifetime)), Mutability::Mut) => {
+ if lifetime.ident.span.from_expansion() {
+ applicability = Applicability::HasPlaceholders;
+ "&'_ mut self".to_string()
+ } else {
+ format!("&{} mut self", &lifetime.ident.name)
+ }
+ },
+ (Mode::Ref(None), Mutability::Not) => "&self".to_string(),
+ (Mode::Ref(Some(lifetime)), Mutability::Not) => {
+ if lifetime.ident.span.from_expansion() {
+ applicability = Applicability::HasPlaceholders;
+ "&'_ self".to_string()
+ } else {
+ format!("&{} self", &lifetime.ident.name)
+ }
+ },
+ (Mode::Value, Mutability::Mut) => "mut self".to_string(),
+ (Mode::Value, Mutability::Not) => "self".to_string(),
+ };
+
+ span_lint_and_sugg(
+ cx,
+ NEEDLESS_ARBITRARY_SELF_TYPE,
+ span,
+ "the type of the `self` parameter does not need to be arbitrary",
+ "consider to change this parameter to",
+ self_param,
+ applicability,
+ )
+ }
+ }
+}
+
+impl EarlyLintPass for NeedlessArbitrarySelfType {
+ fn check_param(&mut self, cx: &EarlyContext<'_>, p: &Param) {
+ // Bail out if the parameter it's not a receiver or was not written by the user
+ if !p.is_self() || p.span.from_expansion() {
+ return;
+ }
+
+ match &p.ty.kind {
+ TyKind::Path(None, path) => {
+ if let PatKind::Ident(BindingMode::ByValue(mutbl), _, _) = p.pat.kind {
+ check_param_inner(cx, path, p.span.to(p.ty.span), &Mode::Value, mutbl);
+ }
+ },
+ TyKind::Rptr(lifetime, mut_ty) => {
+ if_chain! {
+ if let TyKind::Path(None, path) = &mut_ty.ty.kind;
+ if let PatKind::Ident(BindingMode::ByValue(Mutability::Not), _, _) = p.pat.kind;
+ then {
+ check_param_inner(cx, path, p.span.to(p.ty.span), &Mode::Ref(*lifetime), mut_ty.mutbl);
+ }
+ }
+ },
+ _ => {},
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/needless_bool.rs b/src/tools/clippy/clippy_lints/src/needless_bool.rs
new file mode 100644
index 000000000..a4eec95b3
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/needless_bool.rs
@@ -0,0 +1,385 @@
+//! Checks for needless boolean results of if-else expressions
+//!
+//! This lint is **warn** by default
+
+use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg};
+use clippy_utils::higher;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::sugg::Sugg;
+use clippy_utils::{get_parent_node, is_else_clause, is_expn_of, peel_blocks, peel_blocks_with_stmt};
+use rustc_ast::ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::{BinOpKind, Block, Expr, ExprKind, HirId, Node, UnOp};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Spanned;
+use rustc_span::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for expressions of the form `if c { true } else {
+ /// false }` (or vice versa) and suggests using the condition directly.
+ ///
+ /// ### Why is this bad?
+ /// Redundant code.
+ ///
+ /// ### Known problems
+ /// Maybe false positives: Sometimes, the two branches are
+ /// painstakingly documented (which we, of course, do not detect), so they *may*
+ /// have some value. Even then, the documentation can be rewritten to match the
+ /// shorter code.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let x = true;
+ /// if x {
+ /// false
+ /// } else {
+ /// true
+ /// }
+ /// # ;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let x = true;
+ /// !x
+ /// # ;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub NEEDLESS_BOOL,
+ complexity,
+ "if-statements with plain booleans in the then- and else-clause, e.g., `if p { true } else { false }`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for expressions of the form `x == true`,
+ /// `x != true` and order comparisons such as `x < true` (or vice versa) and
+ /// suggest using the variable directly.
+ ///
+ /// ### Why is this bad?
+ /// Unnecessary code.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// if x == true {}
+ /// if y == false {}
+ /// ```
+ /// use `x` directly:
+ /// ```rust,ignore
+ /// if x {}
+ /// if !y {}
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub BOOL_COMPARISON,
+ complexity,
+ "comparing a variable to a boolean, e.g., `if x == true` or `if x != true`"
+}
+
+declare_lint_pass!(NeedlessBool => [NEEDLESS_BOOL]);
+
+fn condition_needs_parentheses(e: &Expr<'_>) -> bool {
+ let mut inner = e;
+ while let ExprKind::Binary(_, i, _)
+ | ExprKind::Call(i, _)
+ | ExprKind::Cast(i, _)
+ | ExprKind::Type(i, _)
+ | ExprKind::Index(i, _) = inner.kind
+ {
+ if matches!(
+ i.kind,
+ ExprKind::Block(..)
+ | ExprKind::ConstBlock(..)
+ | ExprKind::If(..)
+ | ExprKind::Loop(..)
+ | ExprKind::Match(..)
+ ) {
+ return true;
+ }
+ inner = i;
+ }
+ false
+}
+
+fn is_parent_stmt(cx: &LateContext<'_>, id: HirId) -> bool {
+ matches!(
+ get_parent_node(cx.tcx, id),
+ Some(Node::Stmt(..) | Node::Block(Block { stmts: &[], .. }))
+ )
+}
+
+impl<'tcx> LateLintPass<'tcx> for NeedlessBool {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ use self::Expression::{Bool, RetBool};
+ if e.span.from_expansion() {
+ return;
+ }
+ if let Some(higher::If {
+ cond,
+ then,
+ r#else: Some(r#else),
+ }) = higher::If::hir(e)
+ {
+ let reduce = |ret, not| {
+ let mut applicability = Applicability::MachineApplicable;
+ let snip = Sugg::hir_with_applicability(cx, cond, "<predicate>", &mut applicability);
+ let mut snip = if not { !snip } else { snip };
+
+ if ret {
+ snip = snip.make_return();
+ }
+
+ if is_else_clause(cx.tcx, e) {
+ snip = snip.blockify();
+ }
+
+ if condition_needs_parentheses(cond) && is_parent_stmt(cx, e.hir_id) {
+ snip = snip.maybe_par();
+ }
+
+ span_lint_and_sugg(
+ cx,
+ NEEDLESS_BOOL,
+ e.span,
+ "this if-then-else expression returns a bool literal",
+ "you can reduce it to",
+ snip.to_string(),
+ applicability,
+ );
+ };
+ if let Some((a, b)) = fetch_bool_block(then).and_then(|a| Some((a, fetch_bool_block(r#else)?))) {
+ match (a, b) {
+ (RetBool(true), RetBool(true)) | (Bool(true), Bool(true)) => {
+ span_lint(
+ cx,
+ NEEDLESS_BOOL,
+ e.span,
+ "this if-then-else expression will always return true",
+ );
+ },
+ (RetBool(false), RetBool(false)) | (Bool(false), Bool(false)) => {
+ span_lint(
+ cx,
+ NEEDLESS_BOOL,
+ e.span,
+ "this if-then-else expression will always return false",
+ );
+ },
+ (RetBool(true), RetBool(false)) => reduce(true, false),
+ (Bool(true), Bool(false)) => reduce(false, false),
+ (RetBool(false), RetBool(true)) => reduce(true, true),
+ (Bool(false), Bool(true)) => reduce(false, true),
+ _ => (),
+ }
+ }
+ }
+ }
+}
+
+declare_lint_pass!(BoolComparison => [BOOL_COMPARISON]);
+
+impl<'tcx> LateLintPass<'tcx> for BoolComparison {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ if e.span.from_expansion() {
+ return;
+ }
+
+ if let ExprKind::Binary(Spanned { node, .. }, ..) = e.kind {
+ let ignore_case = None::<(fn(_) -> _, &str)>;
+ let ignore_no_literal = None::<(fn(_, _) -> _, &str)>;
+ match node {
+ BinOpKind::Eq => {
+ let true_case = Some((|h| h, "equality checks against true are unnecessary"));
+ let false_case = Some((
+ |h: Sugg<'tcx>| !h,
+ "equality checks against false can be replaced by a negation",
+ ));
+ check_comparison(cx, e, true_case, false_case, true_case, false_case, ignore_no_literal);
+ },
+ BinOpKind::Ne => {
+ let true_case = Some((
+ |h: Sugg<'tcx>| !h,
+ "inequality checks against true can be replaced by a negation",
+ ));
+ let false_case = Some((|h| h, "inequality checks against false are unnecessary"));
+ check_comparison(cx, e, true_case, false_case, true_case, false_case, ignore_no_literal);
+ },
+ BinOpKind::Lt => check_comparison(
+ cx,
+ e,
+ ignore_case,
+ Some((|h| h, "greater than checks against false are unnecessary")),
+ Some((
+ |h: Sugg<'tcx>| !h,
+ "less than comparison against true can be replaced by a negation",
+ )),
+ ignore_case,
+ Some((
+ |l: Sugg<'tcx>, r: Sugg<'tcx>| (!l).bit_and(&r),
+ "order comparisons between booleans can be simplified",
+ )),
+ ),
+ BinOpKind::Gt => check_comparison(
+ cx,
+ e,
+ Some((
+ |h: Sugg<'tcx>| !h,
+ "less than comparison against true can be replaced by a negation",
+ )),
+ ignore_case,
+ ignore_case,
+ Some((|h| h, "greater than checks against false are unnecessary")),
+ Some((
+ |l: Sugg<'tcx>, r: Sugg<'tcx>| l.bit_and(&(!r)),
+ "order comparisons between booleans can be simplified",
+ )),
+ ),
+ _ => (),
+ }
+ }
+ }
+}
+
+struct ExpressionInfoWithSpan {
+ one_side_is_unary_not: bool,
+ left_span: Span,
+ right_span: Span,
+}
+
+fn is_unary_not(e: &Expr<'_>) -> (bool, Span) {
+ if let ExprKind::Unary(UnOp::Not, operand) = e.kind {
+ return (true, operand.span);
+ }
+ (false, e.span)
+}
+
+fn one_side_is_unary_not<'tcx>(left_side: &'tcx Expr<'_>, right_side: &'tcx Expr<'_>) -> ExpressionInfoWithSpan {
+ let left = is_unary_not(left_side);
+ let right = is_unary_not(right_side);
+
+ ExpressionInfoWithSpan {
+ one_side_is_unary_not: left.0 != right.0,
+ left_span: left.1,
+ right_span: right.1,
+ }
+}
+
+fn check_comparison<'a, 'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ left_true: Option<(impl FnOnce(Sugg<'a>) -> Sugg<'a>, &str)>,
+ left_false: Option<(impl FnOnce(Sugg<'a>) -> Sugg<'a>, &str)>,
+ right_true: Option<(impl FnOnce(Sugg<'a>) -> Sugg<'a>, &str)>,
+ right_false: Option<(impl FnOnce(Sugg<'a>) -> Sugg<'a>, &str)>,
+ no_literal: Option<(impl FnOnce(Sugg<'a>, Sugg<'a>) -> Sugg<'a>, &str)>,
+) {
+ if let ExprKind::Binary(op, left_side, right_side) = e.kind {
+ let (l_ty, r_ty) = (
+ cx.typeck_results().expr_ty(left_side),
+ cx.typeck_results().expr_ty(right_side),
+ );
+ if is_expn_of(left_side.span, "cfg").is_some() || is_expn_of(right_side.span, "cfg").is_some() {
+ return;
+ }
+ if l_ty.is_bool() && r_ty.is_bool() {
+ let mut applicability = Applicability::MachineApplicable;
+
+ if op.node == BinOpKind::Eq {
+ let expression_info = one_side_is_unary_not(left_side, right_side);
+ if expression_info.one_side_is_unary_not {
+ span_lint_and_sugg(
+ cx,
+ BOOL_COMPARISON,
+ e.span,
+ "this comparison might be written more concisely",
+ "try simplifying it as shown",
+ format!(
+ "{} != {}",
+ snippet_with_applicability(cx, expression_info.left_span, "..", &mut applicability),
+ snippet_with_applicability(cx, expression_info.right_span, "..", &mut applicability)
+ ),
+ applicability,
+ );
+ }
+ }
+
+ match (fetch_bool_expr(left_side), fetch_bool_expr(right_side)) {
+ (Some(true), None) => left_true.map_or((), |(h, m)| {
+ suggest_bool_comparison(cx, e, right_side, applicability, m, h);
+ }),
+ (None, Some(true)) => right_true.map_or((), |(h, m)| {
+ suggest_bool_comparison(cx, e, left_side, applicability, m, h);
+ }),
+ (Some(false), None) => left_false.map_or((), |(h, m)| {
+ suggest_bool_comparison(cx, e, right_side, applicability, m, h);
+ }),
+ (None, Some(false)) => right_false.map_or((), |(h, m)| {
+ suggest_bool_comparison(cx, e, left_side, applicability, m, h);
+ }),
+ (None, None) => no_literal.map_or((), |(h, m)| {
+ let left_side = Sugg::hir_with_applicability(cx, left_side, "..", &mut applicability);
+ let right_side = Sugg::hir_with_applicability(cx, right_side, "..", &mut applicability);
+ span_lint_and_sugg(
+ cx,
+ BOOL_COMPARISON,
+ e.span,
+ m,
+ "try simplifying it as shown",
+ h(left_side, right_side).to_string(),
+ applicability,
+ );
+ }),
+ _ => (),
+ }
+ }
+ }
+}
+
+fn suggest_bool_comparison<'a, 'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ expr: &Expr<'_>,
+ mut applicability: Applicability,
+ message: &str,
+ conv_hint: impl FnOnce(Sugg<'a>) -> Sugg<'a>,
+) {
+ let hint = if expr.span.from_expansion() {
+ if applicability != Applicability::Unspecified {
+ applicability = Applicability::MaybeIncorrect;
+ }
+ Sugg::hir_with_macro_callsite(cx, expr, "..")
+ } else {
+ Sugg::hir_with_applicability(cx, expr, "..", &mut applicability)
+ };
+ span_lint_and_sugg(
+ cx,
+ BOOL_COMPARISON,
+ e.span,
+ message,
+ "try simplifying it as shown",
+ conv_hint(hint).to_string(),
+ applicability,
+ );
+}
+
+enum Expression {
+ Bool(bool),
+ RetBool(bool),
+}
+
+fn fetch_bool_block(expr: &Expr<'_>) -> Option<Expression> {
+ match peel_blocks_with_stmt(expr).kind {
+ ExprKind::Ret(Some(ret)) => Some(Expression::RetBool(fetch_bool_expr(ret)?)),
+ _ => Some(Expression::Bool(fetch_bool_expr(expr)?)),
+ }
+}
+
+fn fetch_bool_expr(expr: &Expr<'_>) -> Option<bool> {
+ if let ExprKind::Lit(ref lit_ptr) = peel_blocks(expr).kind {
+ if let LitKind::Bool(value) = lit_ptr.node {
+ return Some(value);
+ }
+ }
+ None
+}
diff --git a/src/tools/clippy/clippy_lints/src/needless_borrowed_ref.rs b/src/tools/clippy/clippy_lints/src/needless_borrowed_ref.rs
new file mode 100644
index 000000000..05c012b92
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/needless_borrowed_ref.rs
@@ -0,0 +1,87 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet_with_applicability;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{BindingAnnotation, Mutability, Node, Pat, PatKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for bindings that destructure a reference and borrow the inner
+ /// value with `&ref`.
+ ///
+ /// ### Why is this bad?
+ /// This pattern has no effect in almost all cases.
+ ///
+ /// ### Known problems
+ /// In some cases, `&ref` is needed to avoid a lifetime mismatch error.
+ /// Example:
+ /// ```rust
+ /// fn foo(a: &Option<String>, b: &Option<String>) {
+ /// match (a, b) {
+ /// (None, &ref c) | (&ref c, None) => (),
+ /// (&Some(ref c), _) => (),
+ /// };
+ /// }
+ /// ```
+ ///
+ /// ### Example
+ /// ```rust
+ /// let mut v = Vec::<String>::new();
+ /// # #[allow(unused)]
+ /// v.iter_mut().filter(|&ref a| a.is_empty());
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let mut v = Vec::<String>::new();
+ /// # #[allow(unused)]
+ /// v.iter_mut().filter(|a| a.is_empty());
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub NEEDLESS_BORROWED_REFERENCE,
+ complexity,
+ "destructuring a reference and borrowing the inner value"
+}
+
+declare_lint_pass!(NeedlessBorrowedRef => [NEEDLESS_BORROWED_REFERENCE]);
+
+impl<'tcx> LateLintPass<'tcx> for NeedlessBorrowedRef {
+ fn check_pat(&mut self, cx: &LateContext<'tcx>, pat: &'tcx Pat<'_>) {
+ if pat.span.from_expansion() {
+ // OK, simple enough, lints doesn't check in macro.
+ return;
+ }
+
+ if_chain! {
+ // Only lint immutable refs, because `&mut ref T` may be useful.
+ if let PatKind::Ref(sub_pat, Mutability::Not) = pat.kind;
+
+ // Check sub_pat got a `ref` keyword (excluding `ref mut`).
+ if let PatKind::Binding(BindingAnnotation::Ref, .., spanned_name, _) = sub_pat.kind;
+ let parent_id = cx.tcx.hir().get_parent_node(pat.hir_id);
+ if let Some(parent_node) = cx.tcx.hir().find(parent_id);
+ then {
+ // do not recurse within patterns, as they may have other references
+ // XXXManishearth we can relax this constraint if we only check patterns
+ // with a single ref pattern inside them
+ if let Node::Pat(_) = parent_node {
+ return;
+ }
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_then(cx, NEEDLESS_BORROWED_REFERENCE, pat.span,
+ "this pattern takes a reference on something that is being de-referenced",
+ |diag| {
+ let hint = snippet_with_applicability(cx, spanned_name.span, "..", &mut applicability).into_owned();
+ diag.span_suggestion(
+ pat.span,
+ "try removing the `&ref` part and just keep",
+ hint,
+ applicability,
+ );
+ });
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/needless_continue.rs b/src/tools/clippy/clippy_lints/src/needless_continue.rs
new file mode 100644
index 000000000..98a3bce1f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/needless_continue.rs
@@ -0,0 +1,479 @@
+//! Checks for continue statements in loops that are redundant.
+//!
+//! For example, the lint would catch
+//!
+//! ```rust
+//! let mut a = 1;
+//! let x = true;
+//!
+//! while a < 5 {
+//! a = 6;
+//! if x {
+//! // ...
+//! } else {
+//! continue;
+//! }
+//! println!("Hello, world");
+//! }
+//! ```
+//!
+//! And suggest something like this:
+//!
+//! ```rust
+//! let mut a = 1;
+//! let x = true;
+//!
+//! while a < 5 {
+//! a = 6;
+//! if x {
+//! // ...
+//! println!("Hello, world");
+//! }
+//! }
+//! ```
+//!
+//! This lint is **warn** by default.
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::source::{indent_of, snippet, snippet_block};
+use rustc_ast::ast;
+use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// The lint checks for `if`-statements appearing in loops
+ /// that contain a `continue` statement in either their main blocks or their
+ /// `else`-blocks, when omitting the `else`-block possibly with some
+ /// rearrangement of code can make the code easier to understand.
+ ///
+ /// ### Why is this bad?
+ /// Having explicit `else` blocks for `if` statements
+ /// containing `continue` in their THEN branch adds unnecessary branching and
+ /// nesting to the code. Having an else block containing just `continue` can
+ /// also be better written by grouping the statements following the whole `if`
+ /// statement within the THEN block and omitting the else block completely.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # fn condition() -> bool { false }
+ /// # fn update_condition() {}
+ /// # let x = false;
+ /// while condition() {
+ /// update_condition();
+ /// if x {
+ /// // ...
+ /// } else {
+ /// continue;
+ /// }
+ /// println!("Hello, world");
+ /// }
+ /// ```
+ ///
+ /// Could be rewritten as
+ ///
+ /// ```rust
+ /// # fn condition() -> bool { false }
+ /// # fn update_condition() {}
+ /// # let x = false;
+ /// while condition() {
+ /// update_condition();
+ /// if x {
+ /// // ...
+ /// println!("Hello, world");
+ /// }
+ /// }
+ /// ```
+ ///
+ /// As another example, the following code
+ ///
+ /// ```rust
+ /// # fn waiting() -> bool { false }
+ /// loop {
+ /// if waiting() {
+ /// continue;
+ /// } else {
+ /// // Do something useful
+ /// }
+ /// # break;
+ /// }
+ /// ```
+ /// Could be rewritten as
+ ///
+ /// ```rust
+ /// # fn waiting() -> bool { false }
+ /// loop {
+ /// if waiting() {
+ /// continue;
+ /// }
+ /// // Do something useful
+ /// # break;
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub NEEDLESS_CONTINUE,
+ pedantic,
+ "`continue` statements that can be replaced by a rearrangement of code"
+}
+
+declare_lint_pass!(NeedlessContinue => [NEEDLESS_CONTINUE]);
+
+impl EarlyLintPass for NeedlessContinue {
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &ast::Expr) {
+ if !expr.span.from_expansion() {
+ check_and_warn(cx, expr);
+ }
+ }
+}
+
+/* This lint has to mainly deal with two cases of needless continue
+ * statements. */
+// Case 1 [Continue inside else block]:
+//
+// loop {
+// // region A
+// if cond {
+// // region B
+// } else {
+// continue;
+// }
+// // region C
+// }
+//
+// This code can better be written as follows:
+//
+// loop {
+// // region A
+// if cond {
+// // region B
+// // region C
+// }
+// }
+//
+// Case 2 [Continue inside then block]:
+//
+// loop {
+// // region A
+// if cond {
+// continue;
+// // potentially more code here.
+// } else {
+// // region B
+// }
+// // region C
+// }
+//
+//
+// This snippet can be refactored to:
+//
+// loop {
+// // region A
+// if !cond {
+// // region B
+// // region C
+// }
+// }
+//
+
+/// Given an expression, returns true if either of the following is true
+///
+/// - The expression is a `continue` node.
+/// - The expression node is a block with the first statement being a
+/// `continue`.
+fn needless_continue_in_else(else_expr: &ast::Expr, label: Option<&ast::Label>) -> bool {
+ match else_expr.kind {
+ ast::ExprKind::Block(ref else_block, _) => is_first_block_stmt_continue(else_block, label),
+ ast::ExprKind::Continue(l) => compare_labels(label, l.as_ref()),
+ _ => false,
+ }
+}
+
+fn is_first_block_stmt_continue(block: &ast::Block, label: Option<&ast::Label>) -> bool {
+ block.stmts.get(0).map_or(false, |stmt| match stmt.kind {
+ ast::StmtKind::Semi(ref e) | ast::StmtKind::Expr(ref e) => {
+ if let ast::ExprKind::Continue(ref l) = e.kind {
+ compare_labels(label, l.as_ref())
+ } else {
+ false
+ }
+ },
+ _ => false,
+ })
+}
+
+/// If the `continue` has a label, check it matches the label of the loop.
+fn compare_labels(loop_label: Option<&ast::Label>, continue_label: Option<&ast::Label>) -> bool {
+ match (loop_label, continue_label) {
+ // `loop { continue; }` or `'a loop { continue; }`
+ (_, None) => true,
+ // `loop { continue 'a; }`
+ (None, _) => false,
+ // `'a loop { continue 'a; }` or `'a loop { continue 'b; }`
+ (Some(x), Some(y)) => x.ident == y.ident,
+ }
+}
+
+/// If `expr` is a loop expression (while/while let/for/loop), calls `func` with
+/// the AST object representing the loop block of `expr`.
+fn with_loop_block<F>(expr: &ast::Expr, mut func: F)
+where
+ F: FnMut(&ast::Block, Option<&ast::Label>),
+{
+ if let ast::ExprKind::While(_, loop_block, label)
+ | ast::ExprKind::ForLoop(_, _, loop_block, label)
+ | ast::ExprKind::Loop(loop_block, label, ..) = &expr.kind
+ {
+ func(loop_block, label.as_ref());
+ }
+}
+
+/// If `stmt` is an if expression node with an `else` branch, calls func with
+/// the
+/// following:
+///
+/// - The `if` expression itself,
+/// - The `if` condition expression,
+/// - The `then` block, and
+/// - The `else` expression.
+fn with_if_expr<F>(stmt: &ast::Stmt, mut func: F)
+where
+ F: FnMut(&ast::Expr, &ast::Expr, &ast::Block, &ast::Expr),
+{
+ match stmt.kind {
+ ast::StmtKind::Semi(ref e) | ast::StmtKind::Expr(ref e) => {
+ if let ast::ExprKind::If(ref cond, ref if_block, Some(ref else_expr)) = e.kind {
+ func(e, cond, if_block, else_expr);
+ }
+ },
+ _ => {},
+ }
+}
+
+/// A type to distinguish between the two distinct cases this lint handles.
+#[derive(Copy, Clone, Debug)]
+enum LintType {
+ ContinueInsideElseBlock,
+ ContinueInsideThenBlock,
+}
+
+/// Data we pass around for construction of help messages.
+struct LintData<'a> {
+ /// The `if` expression encountered in the above loop.
+ if_expr: &'a ast::Expr,
+ /// The condition expression for the above `if`.
+ if_cond: &'a ast::Expr,
+ /// The `then` block of the `if` statement.
+ if_block: &'a ast::Block,
+ /// The `else` block of the `if` statement.
+ /// Note that we only work with `if` exprs that have an `else` branch.
+ else_expr: &'a ast::Expr,
+ /// The 0-based index of the `if` statement in the containing loop block.
+ stmt_idx: usize,
+ /// The statements of the loop block.
+ loop_block: &'a ast::Block,
+}
+
+const MSG_REDUNDANT_CONTINUE_EXPRESSION: &str = "this `continue` expression is redundant";
+
+const MSG_REDUNDANT_ELSE_BLOCK: &str = "this `else` block is redundant";
+
+const MSG_ELSE_BLOCK_NOT_NEEDED: &str = "there is no need for an explicit `else` block for this `if` \
+ expression";
+
+const DROP_ELSE_BLOCK_AND_MERGE_MSG: &str = "consider dropping the `else` clause and merging the code that \
+ follows (in the loop) with the `if` block";
+
+const DROP_ELSE_BLOCK_MSG: &str = "consider dropping the `else` clause";
+
+const DROP_CONTINUE_EXPRESSION_MSG: &str = "consider dropping the `continue` expression";
+
+fn emit_warning<'a>(cx: &EarlyContext<'_>, data: &'a LintData<'_>, header: &str, typ: LintType) {
+ // snip is the whole *help* message that appears after the warning.
+ // message is the warning message.
+ // expr is the expression which the lint warning message refers to.
+ let (snip, message, expr) = match typ {
+ LintType::ContinueInsideElseBlock => (
+ suggestion_snippet_for_continue_inside_else(cx, data),
+ MSG_REDUNDANT_ELSE_BLOCK,
+ data.else_expr,
+ ),
+ LintType::ContinueInsideThenBlock => (
+ suggestion_snippet_for_continue_inside_if(cx, data),
+ MSG_ELSE_BLOCK_NOT_NEEDED,
+ data.if_expr,
+ ),
+ };
+ span_lint_and_help(
+ cx,
+ NEEDLESS_CONTINUE,
+ expr.span,
+ message,
+ None,
+ &format!("{}\n{}", header, snip),
+ );
+}
+
+fn suggestion_snippet_for_continue_inside_if<'a>(cx: &EarlyContext<'_>, data: &'a LintData<'_>) -> String {
+ let cond_code = snippet(cx, data.if_cond.span, "..");
+
+ let continue_code = snippet_block(cx, data.if_block.span, "..", Some(data.if_expr.span));
+
+ let else_code = snippet_block(cx, data.else_expr.span, "..", Some(data.if_expr.span));
+
+ let indent_if = indent_of(cx, data.if_expr.span).unwrap_or(0);
+ format!(
+ "{indent}if {} {}\n{indent}{}",
+ cond_code,
+ continue_code,
+ else_code,
+ indent = " ".repeat(indent_if),
+ )
+}
+
+fn suggestion_snippet_for_continue_inside_else<'a>(cx: &EarlyContext<'_>, data: &'a LintData<'_>) -> String {
+ let cond_code = snippet(cx, data.if_cond.span, "..");
+
+ // Region B
+ let block_code = erode_from_back(&snippet_block(cx, data.if_block.span, "..", Some(data.if_expr.span)));
+
+ // Region C
+ // These is the code in the loop block that follows the if/else construction
+ // we are complaining about. We want to pull all of this code into the
+ // `then` block of the `if` statement.
+ let indent = span_of_first_expr_in_block(data.if_block)
+ .and_then(|span| indent_of(cx, span))
+ .unwrap_or(0);
+ let to_annex = data.loop_block.stmts[data.stmt_idx + 1..]
+ .iter()
+ .map(|stmt| {
+ let span = cx.sess().source_map().stmt_span(stmt.span, data.loop_block.span);
+ let snip = snippet_block(cx, span, "..", None).into_owned();
+ snip.lines()
+ .map(|line| format!("{}{}", " ".repeat(indent), line))
+ .collect::<Vec<_>>()
+ .join("\n")
+ })
+ .collect::<Vec<_>>()
+ .join("\n");
+
+ let indent_if = indent_of(cx, data.if_expr.span).unwrap_or(0);
+ format!(
+ "{indent_if}if {} {}\n{indent}// merged code follows:\n{}\n{indent_if}}}",
+ cond_code,
+ block_code,
+ to_annex,
+ indent = " ".repeat(indent),
+ indent_if = " ".repeat(indent_if),
+ )
+}
+
+fn check_and_warn<'a>(cx: &EarlyContext<'_>, expr: &'a ast::Expr) {
+ if_chain! {
+ if let ast::ExprKind::Loop(loop_block, ..) = &expr.kind;
+ if let Some(last_stmt) = loop_block.stmts.last();
+ if let ast::StmtKind::Expr(inner_expr) | ast::StmtKind::Semi(inner_expr) = &last_stmt.kind;
+ if let ast::ExprKind::Continue(_) = inner_expr.kind;
+ then {
+ span_lint_and_help(
+ cx,
+ NEEDLESS_CONTINUE,
+ last_stmt.span,
+ MSG_REDUNDANT_CONTINUE_EXPRESSION,
+ None,
+ DROP_CONTINUE_EXPRESSION_MSG,
+ );
+ }
+ }
+ with_loop_block(expr, |loop_block, label| {
+ for (i, stmt) in loop_block.stmts.iter().enumerate() {
+ with_if_expr(stmt, |if_expr, cond, then_block, else_expr| {
+ let data = &LintData {
+ stmt_idx: i,
+ if_expr,
+ if_cond: cond,
+ if_block: then_block,
+ else_expr,
+ loop_block,
+ };
+ if needless_continue_in_else(else_expr, label) {
+ emit_warning(
+ cx,
+ data,
+ DROP_ELSE_BLOCK_AND_MERGE_MSG,
+ LintType::ContinueInsideElseBlock,
+ );
+ } else if is_first_block_stmt_continue(then_block, label) {
+ emit_warning(cx, data, DROP_ELSE_BLOCK_MSG, LintType::ContinueInsideThenBlock);
+ }
+ });
+ }
+ });
+}
+
+/// Eats at `s` from the end till a closing brace `}` is encountered, and then continues eating
+/// till a non-whitespace character is found. e.g., the string. If no closing `}` is present, the
+/// string will be preserved.
+///
+/// ```rust
+/// {
+/// let x = 5;
+/// }
+/// ```
+///
+/// is transformed to
+///
+/// ```text
+/// {
+/// let x = 5;
+/// ```
+#[must_use]
+fn erode_from_back(s: &str) -> String {
+ let mut ret = s.to_string();
+ while ret.pop().map_or(false, |c| c != '}') {}
+ while let Some(c) = ret.pop() {
+ if !c.is_whitespace() {
+ ret.push(c);
+ break;
+ }
+ }
+ if ret.is_empty() { s.to_string() } else { ret }
+}
+
+fn span_of_first_expr_in_block(block: &ast::Block) -> Option<Span> {
+ block.stmts.get(0).map(|stmt| stmt.span)
+}
+
+#[cfg(test)]
+mod test {
+ use super::erode_from_back;
+
+ #[test]
+ #[rustfmt::skip]
+ fn test_erode_from_back() {
+ let input = "\
+{
+ let x = 5;
+ let y = format!(\"{}\", 42);
+}";
+
+ let expected = "\
+{
+ let x = 5;
+ let y = format!(\"{}\", 42);";
+
+ let got = erode_from_back(input);
+ assert_eq!(expected, got);
+ }
+
+ #[test]
+ #[rustfmt::skip]
+ fn test_erode_from_back_no_brace() {
+ let input = "\
+let x = 5;
+let y = something();
+";
+ let expected = input;
+ let got = erode_from_back(input);
+ assert_eq!(expected, got);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/needless_for_each.rs b/src/tools/clippy/clippy_lints/src/needless_for_each.rs
new file mode 100644
index 000000000..10e188ecb
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/needless_for_each.rs
@@ -0,0 +1,160 @@
+use rustc_errors::Applicability;
+use rustc_hir::{
+ intravisit::{walk_expr, Visitor},
+ Closure, Expr, ExprKind, Stmt, StmtKind,
+};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{source_map::Span, sym, Symbol};
+
+use if_chain::if_chain;
+
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::is_trait_method;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::has_iter_method;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `for_each` that would be more simply written as a
+ /// `for` loop.
+ ///
+ /// ### Why is this bad?
+ /// `for_each` may be used after applying iterator transformers like
+ /// `filter` for better readability and performance. It may also be used to fit a simple
+ /// operation on one line.
+ /// But when none of these apply, a simple `for` loop is more idiomatic.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let v = vec![0, 1, 2];
+ /// v.iter().for_each(|elem| {
+ /// println!("{}", elem);
+ /// })
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let v = vec![0, 1, 2];
+ /// for elem in v.iter() {
+ /// println!("{}", elem);
+ /// }
+ /// ```
+ #[clippy::version = "1.53.0"]
+ pub NEEDLESS_FOR_EACH,
+ pedantic,
+ "using `for_each` where a `for` loop would be simpler"
+}
+
+declare_lint_pass!(NeedlessForEach => [NEEDLESS_FOR_EACH]);
+
+impl<'tcx> LateLintPass<'tcx> for NeedlessForEach {
+ fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) {
+ let expr = match stmt.kind {
+ StmtKind::Expr(expr) | StmtKind::Semi(expr) => expr,
+ _ => return,
+ };
+
+ if_chain! {
+ // Check the method name is `for_each`.
+ if let ExprKind::MethodCall(method_name, [for_each_recv, for_each_arg], _) = expr.kind;
+ if method_name.ident.name == Symbol::intern("for_each");
+ // Check `for_each` is an associated function of `Iterator`.
+ if is_trait_method(cx, expr, sym::Iterator);
+ // Checks the receiver of `for_each` is also a method call.
+ if let ExprKind::MethodCall(_, [iter_recv], _) = for_each_recv.kind;
+ // Skip the lint if the call chain is too long. e.g. `v.field.iter().for_each()` or
+ // `v.foo().iter().for_each()` must be skipped.
+ if matches!(
+ iter_recv.kind,
+ ExprKind::Array(..) | ExprKind::Call(..) | ExprKind::Path(..)
+ );
+ // Checks the type of the `iter` method receiver is NOT a user defined type.
+ if has_iter_method(cx, cx.typeck_results().expr_ty(iter_recv)).is_some();
+ // Skip the lint if the body is not block because this is simpler than `for` loop.
+ // e.g. `v.iter().for_each(f)` is simpler and clearer than using `for` loop.
+ if let ExprKind::Closure(&Closure { body, .. }) = for_each_arg.kind;
+ let body = cx.tcx.hir().body(body);
+ if let ExprKind::Block(..) = body.value.kind;
+ then {
+ let mut ret_collector = RetCollector::default();
+ ret_collector.visit_expr(&body.value);
+
+ // Skip the lint if `return` is used in `Loop` in order not to suggest using `'label`.
+ if ret_collector.ret_in_loop {
+ return;
+ }
+
+ let (mut applicability, ret_suggs) = if ret_collector.spans.is_empty() {
+ (Applicability::MachineApplicable, None)
+ } else {
+ (
+ Applicability::MaybeIncorrect,
+ Some(
+ ret_collector
+ .spans
+ .into_iter()
+ .map(|span| (span, "continue".to_string()))
+ .collect(),
+ ),
+ )
+ };
+
+ let sugg = format!(
+ "for {} in {} {}",
+ snippet_with_applicability(cx, body.params[0].pat.span, "..", &mut applicability),
+ snippet_with_applicability(cx, for_each_recv.span, "..", &mut applicability),
+ snippet_with_applicability(cx, body.value.span, "..", &mut applicability),
+ );
+
+ span_lint_and_then(cx, NEEDLESS_FOR_EACH, stmt.span, "needless use of `for_each`", |diag| {
+ diag.span_suggestion(stmt.span, "try", sugg, applicability);
+ if let Some(ret_suggs) = ret_suggs {
+ diag.multipart_suggestion("...and replace `return` with `continue`", ret_suggs, applicability);
+ }
+ })
+ }
+ }
+ }
+}
+
+/// This type plays two roles.
+/// 1. Collect spans of `return` in the closure body.
+/// 2. Detect use of `return` in `Loop` in the closure body.
+///
+/// NOTE: The functionality of this type is similar to
+/// [`clippy_utils::visitors::find_all_ret_expressions`], but we can't use
+/// `find_all_ret_expressions` instead of this type. The reasons are:
+/// 1. `find_all_ret_expressions` passes the argument of `ExprKind::Ret` to a callback, but what we
+/// need here is `ExprKind::Ret` itself.
+/// 2. We can't trace current loop depth with `find_all_ret_expressions`.
+#[derive(Default)]
+struct RetCollector {
+ spans: Vec<Span>,
+ ret_in_loop: bool,
+ loop_depth: u16,
+}
+
+impl<'tcx> Visitor<'tcx> for RetCollector {
+ fn visit_expr(&mut self, expr: &Expr<'_>) {
+ match expr.kind {
+ ExprKind::Ret(..) => {
+ if self.loop_depth > 0 && !self.ret_in_loop {
+ self.ret_in_loop = true;
+ }
+
+ self.spans.push(expr.span);
+ },
+
+ ExprKind::Loop(..) => {
+ self.loop_depth += 1;
+ walk_expr(self, expr);
+ self.loop_depth -= 1;
+ return;
+ },
+
+ _ => {},
+ }
+
+ walk_expr(self, expr);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/needless_late_init.rs b/src/tools/clippy/clippy_lints/src/needless_late_init.rs
new file mode 100644
index 000000000..ff2999b1f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/needless_late_init.rs
@@ -0,0 +1,390 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::path_to_local;
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::needs_ordered_drop;
+use clippy_utils::visitors::{expr_visitor, expr_visitor_no_bodies, is_local_used};
+use rustc_errors::{Applicability, MultiSpan};
+use rustc_hir::intravisit::Visitor;
+use rustc_hir::{
+ BindingAnnotation, Block, Expr, ExprKind, HirId, Local, LocalSource, MatchSource, Node, Pat, PatKind, Stmt,
+ StmtKind,
+};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for late initializations that can be replaced by a `let` statement
+ /// with an initializer.
+ ///
+ /// ### Why is this bad?
+ /// Assigning in the `let` statement is less repetitive.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let a;
+ /// a = 1;
+ ///
+ /// let b;
+ /// match 3 {
+ /// 0 => b = "zero",
+ /// 1 => b = "one",
+ /// _ => b = "many",
+ /// }
+ ///
+ /// let c;
+ /// if true {
+ /// c = 1;
+ /// } else {
+ /// c = -1;
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let a = 1;
+ ///
+ /// let b = match 3 {
+ /// 0 => "zero",
+ /// 1 => "one",
+ /// _ => "many",
+ /// };
+ ///
+ /// let c = if true {
+ /// 1
+ /// } else {
+ /// -1
+ /// };
+ /// ```
+ #[clippy::version = "1.59.0"]
+ pub NEEDLESS_LATE_INIT,
+ style,
+ "late initializations that can be replaced by a `let` statement with an initializer"
+}
+declare_lint_pass!(NeedlessLateInit => [NEEDLESS_LATE_INIT]);
+
+fn contains_assign_expr<'tcx>(cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'tcx>) -> bool {
+ let mut seen = false;
+ expr_visitor(cx, |expr| {
+ if let ExprKind::Assign(..) = expr.kind {
+ seen = true;
+ }
+
+ !seen
+ })
+ .visit_stmt(stmt);
+
+ seen
+}
+
+fn contains_let(cond: &Expr<'_>) -> bool {
+ let mut seen = false;
+ expr_visitor_no_bodies(|expr| {
+ if let ExprKind::Let(_) = expr.kind {
+ seen = true;
+ }
+
+ !seen
+ })
+ .visit_expr(cond);
+
+ seen
+}
+
+fn stmt_needs_ordered_drop(cx: &LateContext<'_>, stmt: &Stmt<'_>) -> bool {
+ let StmtKind::Local(local) = stmt.kind else { return false };
+ !local.pat.walk_short(|pat| {
+ if let PatKind::Binding(.., None) = pat.kind {
+ !needs_ordered_drop(cx, cx.typeck_results().pat_ty(pat))
+ } else {
+ true
+ }
+ })
+}
+
+#[derive(Debug)]
+struct LocalAssign {
+ lhs_id: HirId,
+ lhs_span: Span,
+ rhs_span: Span,
+ span: Span,
+}
+
+impl LocalAssign {
+ fn from_expr(expr: &Expr<'_>, span: Span) -> Option<Self> {
+ if let ExprKind::Assign(lhs, rhs, _) = expr.kind {
+ if lhs.span.from_expansion() {
+ return None;
+ }
+
+ Some(Self {
+ lhs_id: path_to_local(lhs)?,
+ lhs_span: lhs.span,
+ rhs_span: rhs.span.source_callsite(),
+ span,
+ })
+ } else {
+ None
+ }
+ }
+
+ fn new<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, binding_id: HirId) -> Option<LocalAssign> {
+ let assign = match expr.kind {
+ ExprKind::Block(Block { expr: Some(expr), .. }, _) => Self::from_expr(expr, expr.span),
+ ExprKind::Block(block, _) => {
+ if_chain! {
+ if let Some((last, other_stmts)) = block.stmts.split_last();
+ if let StmtKind::Expr(expr) | StmtKind::Semi(expr) = last.kind;
+
+ let assign = Self::from_expr(expr, last.span)?;
+
+ // avoid visiting if not needed
+ if assign.lhs_id == binding_id;
+ if other_stmts.iter().all(|stmt| !contains_assign_expr(cx, stmt));
+
+ then {
+ Some(assign)
+ } else {
+ None
+ }
+ }
+ },
+ ExprKind::Assign(..) => Self::from_expr(expr, expr.span),
+ _ => None,
+ }?;
+
+ if assign.lhs_id == binding_id {
+ Some(assign)
+ } else {
+ None
+ }
+ }
+}
+
+fn assignment_suggestions<'tcx>(
+ cx: &LateContext<'tcx>,
+ binding_id: HirId,
+ exprs: impl IntoIterator<Item = &'tcx Expr<'tcx>>,
+) -> Option<(Applicability, Vec<(Span, String)>)> {
+ let mut assignments = Vec::new();
+
+ for expr in exprs {
+ let ty = cx.typeck_results().expr_ty(expr);
+
+ if ty.is_never() {
+ continue;
+ }
+ if !ty.is_unit() {
+ return None;
+ }
+
+ let assign = LocalAssign::new(cx, expr, binding_id)?;
+
+ assignments.push(assign);
+ }
+
+ let suggestions = assignments
+ .iter()
+ .flat_map(|assignment| {
+ [
+ assignment.span.until(assignment.rhs_span),
+ assignment.rhs_span.shrink_to_hi().with_hi(assignment.span.hi()),
+ ]
+ })
+ .map(|span| (span, String::new()))
+ .collect::<Vec<(Span, String)>>();
+
+ match suggestions.len() {
+ // All of `exprs` are never types
+ // https://github.com/rust-lang/rust-clippy/issues/8911
+ 0 => None,
+ 1 => Some((Applicability::MachineApplicable, suggestions)),
+ // multiple suggestions don't work with rustfix in multipart_suggest
+ // https://github.com/rust-lang/rustfix/issues/141
+ _ => Some((Applicability::Unspecified, suggestions)),
+ }
+}
+
+struct Usage<'tcx> {
+ stmt: &'tcx Stmt<'tcx>,
+ expr: &'tcx Expr<'tcx>,
+ needs_semi: bool,
+}
+
+fn first_usage<'tcx>(
+ cx: &LateContext<'tcx>,
+ binding_id: HirId,
+ local_stmt_id: HirId,
+ block: &'tcx Block<'tcx>,
+) -> Option<Usage<'tcx>> {
+ let significant_drop = needs_ordered_drop(cx, cx.typeck_results().node_type(binding_id));
+
+ block
+ .stmts
+ .iter()
+ .skip_while(|stmt| stmt.hir_id != local_stmt_id)
+ .skip(1)
+ .take_while(|stmt| !significant_drop || !stmt_needs_ordered_drop(cx, stmt))
+ .find(|&stmt| is_local_used(cx, stmt, binding_id))
+ .and_then(|stmt| match stmt.kind {
+ StmtKind::Expr(expr) => Some(Usage {
+ stmt,
+ expr,
+ needs_semi: true,
+ }),
+ StmtKind::Semi(expr) => Some(Usage {
+ stmt,
+ expr,
+ needs_semi: false,
+ }),
+ _ => None,
+ })
+}
+
+fn local_snippet_without_semicolon(cx: &LateContext<'_>, local: &Local<'_>) -> Option<String> {
+ let span = local.span.with_hi(match local.ty {
+ // let <pat>: <ty>;
+ // ~~~~~~~~~~~~~~~
+ Some(ty) => ty.span.hi(),
+ // let <pat>;
+ // ~~~~~~~~~
+ None => local.pat.span.hi(),
+ });
+
+ snippet_opt(cx, span)
+}
+
+fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ local: &'tcx Local<'tcx>,
+ local_stmt: &'tcx Stmt<'tcx>,
+ block: &'tcx Block<'tcx>,
+ binding_id: HirId,
+) -> Option<()> {
+ let usage = first_usage(cx, binding_id, local_stmt.hir_id, block)?;
+ let binding_name = cx.tcx.hir().opt_name(binding_id)?;
+ let let_snippet = local_snippet_without_semicolon(cx, local)?;
+
+ match usage.expr.kind {
+ ExprKind::Assign(..) => {
+ let assign = LocalAssign::new(cx, usage.expr, binding_id)?;
+ let mut msg_span = MultiSpan::from_spans(vec![local_stmt.span, assign.span]);
+ msg_span.push_span_label(local_stmt.span, "created here");
+ msg_span.push_span_label(assign.span, "initialised here");
+
+ span_lint_and_then(
+ cx,
+ NEEDLESS_LATE_INIT,
+ msg_span,
+ "unneeded late initialization",
+ |diag| {
+ diag.tool_only_span_suggestion(
+ local_stmt.span,
+ "remove the local",
+ "",
+ Applicability::MachineApplicable,
+ );
+
+ diag.span_suggestion(
+ assign.lhs_span,
+ &format!("declare `{}` here", binding_name),
+ let_snippet,
+ Applicability::MachineApplicable,
+ );
+ },
+ );
+ },
+ ExprKind::If(cond, then_expr, Some(else_expr)) if !contains_let(cond) => {
+ let (applicability, suggestions) = assignment_suggestions(cx, binding_id, [then_expr, else_expr])?;
+
+ span_lint_and_then(
+ cx,
+ NEEDLESS_LATE_INIT,
+ local_stmt.span,
+ "unneeded late initialization",
+ |diag| {
+ diag.tool_only_span_suggestion(local_stmt.span, "remove the local", String::new(), applicability);
+
+ diag.span_suggestion_verbose(
+ usage.stmt.span.shrink_to_lo(),
+ &format!("declare `{}` here", binding_name),
+ format!("{} = ", let_snippet),
+ applicability,
+ );
+
+ diag.multipart_suggestion("remove the assignments from the branches", suggestions, applicability);
+
+ if usage.needs_semi {
+ diag.span_suggestion(
+ usage.stmt.span.shrink_to_hi(),
+ "add a semicolon after the `if` expression",
+ ";",
+ applicability,
+ );
+ }
+ },
+ );
+ },
+ ExprKind::Match(_, arms, MatchSource::Normal) => {
+ let (applicability, suggestions) = assignment_suggestions(cx, binding_id, arms.iter().map(|arm| arm.body))?;
+
+ span_lint_and_then(
+ cx,
+ NEEDLESS_LATE_INIT,
+ local_stmt.span,
+ "unneeded late initialization",
+ |diag| {
+ diag.tool_only_span_suggestion(local_stmt.span, "remove the local", String::new(), applicability);
+
+ diag.span_suggestion_verbose(
+ usage.stmt.span.shrink_to_lo(),
+ &format!("declare `{}` here", binding_name),
+ format!("{} = ", let_snippet),
+ applicability,
+ );
+
+ diag.multipart_suggestion(
+ "remove the assignments from the `match` arms",
+ suggestions,
+ applicability,
+ );
+
+ if usage.needs_semi {
+ diag.span_suggestion(
+ usage.stmt.span.shrink_to_hi(),
+ "add a semicolon after the `match` expression",
+ ";",
+ applicability,
+ );
+ }
+ },
+ );
+ },
+ _ => {},
+ };
+
+ Some(())
+}
+
+impl<'tcx> LateLintPass<'tcx> for NeedlessLateInit {
+ fn check_local(&mut self, cx: &LateContext<'tcx>, local: &'tcx Local<'tcx>) {
+ let mut parents = cx.tcx.hir().parent_iter(local.hir_id);
+ if_chain! {
+ if let Local {
+ init: None,
+ pat: &Pat {
+ kind: PatKind::Binding(BindingAnnotation::Unannotated, binding_id, _, None),
+ ..
+ },
+ source: LocalSource::Normal,
+ ..
+ } = local;
+ if let Some((_, Node::Stmt(local_stmt))) = parents.next();
+ if let Some((_, Node::Block(block))) = parents.next();
+
+ then {
+ check(cx, local, local_stmt, block, binding_id);
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/needless_parens_on_range_literals.rs b/src/tools/clippy/clippy_lints/src/needless_parens_on_range_literals.rs
new file mode 100644
index 000000000..6e54b243c
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/needless_parens_on_range_literals.rs
@@ -0,0 +1,87 @@
+use clippy_utils::{
+ diagnostics::span_lint_and_then,
+ higher,
+ source::{snippet, snippet_with_applicability},
+};
+
+use rustc_ast::ast;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind};
+
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// The lint checks for parenthesis on literals in range statements that are
+ /// superfluous.
+ ///
+ /// ### Why is this bad?
+ /// Having superfluous parenthesis makes the code less readable
+ /// overhead when reading.
+ ///
+ /// ### Example
+ ///
+ /// ```rust
+ /// for i in (0)..10 {
+ /// println!("{i}");
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ ///
+ /// ```rust
+ /// for i in 0..10 {
+ /// println!("{i}");
+ /// }
+ /// ```
+ #[clippy::version = "1.63.0"]
+ pub NEEDLESS_PARENS_ON_RANGE_LITERALS,
+ style,
+ "needless parenthesis on range literals can be removed"
+}
+
+declare_lint_pass!(NeedlessParensOnRangeLiterals => [NEEDLESS_PARENS_ON_RANGE_LITERALS]);
+
+fn snippet_enclosed_in_parenthesis(snippet: &str) -> bool {
+ snippet.starts_with('(') && snippet.ends_with(')')
+}
+
+fn check_for_parens(cx: &LateContext<'_>, e: &Expr<'_>, is_start: bool) {
+ if is_start &&
+ let ExprKind::Lit(ref literal) = e.kind &&
+ let ast::LitKind::Float(_sym, ast::LitFloatType::Unsuffixed) = literal.node
+ {
+ // don't check floating point literals on the start expression of a range
+ return;
+ }
+ if_chain! {
+ if let ExprKind::Lit(ref literal) = e.kind;
+ // the indicator that parenthesis surround the literal is that the span of the expression and the literal differ
+ if (literal.span.data().hi - literal.span.data().lo) != (e.span.data().hi - e.span.data().lo);
+ // inspect the source code of the expression for parenthesis
+ if snippet_enclosed_in_parenthesis(&snippet(cx, e.span, ""));
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_then(cx, NEEDLESS_PARENS_ON_RANGE_LITERALS, e.span,
+ "needless parenthesis on range literals can be removed",
+ |diag| {
+ let suggestion = snippet_with_applicability(cx, literal.span, "_", &mut applicability);
+ diag.span_suggestion(e.span, "try", suggestion, applicability);
+ });
+ }
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for NeedlessParensOnRangeLiterals {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if let Some(higher::Range { start, end, .. }) = higher::Range::hir(expr) {
+ if let Some(start) = start {
+ check_for_parens(cx, start, true);
+ }
+ if let Some(end) = end {
+ check_for_parens(cx, end, false);
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs b/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs
new file mode 100644
index 000000000..0cbef1c95
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs
@@ -0,0 +1,347 @@
+use clippy_utils::diagnostics::{multispan_sugg, span_lint_and_then};
+use clippy_utils::ptr::get_spans;
+use clippy_utils::source::{snippet, snippet_opt};
+use clippy_utils::ty::{implements_trait, is_copy, is_type_diagnostic_item};
+use clippy_utils::{get_trait_def_id, is_self, paths};
+use if_chain::if_chain;
+use rustc_ast::ast::Attribute;
+use rustc_data_structures::fx::FxHashSet;
+use rustc_errors::{Applicability, Diagnostic};
+use rustc_hir::intravisit::FnKind;
+use rustc_hir::{BindingAnnotation, Body, FnDecl, GenericArg, HirId, Impl, ItemKind, Node, PatKind, QPath, TyKind};
+use rustc_hir::{HirIdMap, HirIdSet};
+use rustc_infer::infer::TyCtxtInferExt;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::mir::FakeReadCause;
+use rustc_middle::ty::{self, TypeVisitable};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::kw;
+use rustc_span::{sym, Span};
+use rustc_target::spec::abi::Abi;
+use rustc_trait_selection::traits;
+use rustc_trait_selection::traits::misc::can_type_implement_copy;
+use rustc_typeck::expr_use_visitor as euv;
+use std::borrow::Cow;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for functions taking arguments by value, but not
+ /// consuming them in its
+ /// body.
+ ///
+ /// ### Why is this bad?
+ /// Taking arguments by reference is more flexible and can
+ /// sometimes avoid
+ /// unnecessary allocations.
+ ///
+ /// ### Known problems
+ /// * This lint suggests taking an argument by reference,
+ /// however sometimes it is better to let users decide the argument type
+ /// (by using `Borrow` trait, for example), depending on how the function is used.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn foo(v: Vec<i32>) {
+ /// assert_eq!(v.len(), 42);
+ /// }
+ /// ```
+ /// should be
+ /// ```rust
+ /// fn foo(v: &[i32]) {
+ /// assert_eq!(v.len(), 42);
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub NEEDLESS_PASS_BY_VALUE,
+ pedantic,
+ "functions taking arguments by value, but not consuming them in its body"
+}
+
+declare_lint_pass!(NeedlessPassByValue => [NEEDLESS_PASS_BY_VALUE]);
+
+macro_rules! need {
+ ($e: expr) => {
+ if let Some(x) = $e {
+ x
+ } else {
+ return;
+ }
+ };
+}
+
+impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue {
+ #[expect(clippy::too_many_lines)]
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ kind: FnKind<'tcx>,
+ decl: &'tcx FnDecl<'_>,
+ body: &'tcx Body<'_>,
+ span: Span,
+ hir_id: HirId,
+ ) {
+ if span.from_expansion() {
+ return;
+ }
+
+ match kind {
+ FnKind::ItemFn(.., header) => {
+ let attrs = cx.tcx.hir().attrs(hir_id);
+ if header.abi != Abi::Rust || requires_exact_signature(attrs) {
+ return;
+ }
+ },
+ FnKind::Method(..) => (),
+ FnKind::Closure => return,
+ }
+
+ // Exclude non-inherent impls
+ if let Some(Node::Item(item)) = cx.tcx.hir().find(cx.tcx.hir().get_parent_node(hir_id)) {
+ if matches!(
+ item.kind,
+ ItemKind::Impl(Impl { of_trait: Some(_), .. }) | ItemKind::Trait(..)
+ ) {
+ return;
+ }
+ }
+
+ // Allow `Borrow` or functions to be taken by value
+ let allowed_traits = [
+ need!(cx.tcx.lang_items().fn_trait()),
+ need!(cx.tcx.lang_items().fn_once_trait()),
+ need!(cx.tcx.lang_items().fn_mut_trait()),
+ need!(get_trait_def_id(cx, &paths::RANGE_ARGUMENT_TRAIT)),
+ ];
+
+ let sized_trait = need!(cx.tcx.lang_items().sized_trait());
+
+ let fn_def_id = cx.tcx.hir().local_def_id(hir_id);
+
+ let preds = traits::elaborate_predicates(cx.tcx, cx.param_env.caller_bounds().iter())
+ .filter(|p| !p.is_global())
+ .filter_map(|obligation| {
+ // Note that we do not want to deal with qualified predicates here.
+ match obligation.predicate.kind().no_bound_vars() {
+ Some(ty::PredicateKind::Trait(pred)) if pred.def_id() != sized_trait => Some(pred),
+ _ => None,
+ }
+ })
+ .collect::<Vec<_>>();
+
+ // Collect moved variables and spans which will need dereferencings from the
+ // function body.
+ let MovedVariablesCtxt {
+ moved_vars,
+ spans_need_deref,
+ ..
+ } = {
+ let mut ctx = MovedVariablesCtxt::default();
+ cx.tcx.infer_ctxt().enter(|infcx| {
+ euv::ExprUseVisitor::new(&mut ctx, &infcx, fn_def_id, cx.param_env, cx.typeck_results())
+ .consume_body(body);
+ });
+ ctx
+ };
+
+ let fn_sig = cx.tcx.fn_sig(fn_def_id);
+ let fn_sig = cx.tcx.erase_late_bound_regions(fn_sig);
+
+ for (idx, ((input, &ty), arg)) in decl.inputs.iter().zip(fn_sig.inputs()).zip(body.params).enumerate() {
+ // All spans generated from a proc-macro invocation are the same...
+ if span == input.span {
+ return;
+ }
+
+ // Ignore `self`s.
+ if idx == 0 {
+ if let PatKind::Binding(.., ident, _) = arg.pat.kind {
+ if ident.name == kw::SelfLower {
+ continue;
+ }
+ }
+ }
+
+ //
+ // * Exclude a type that is specifically bounded by `Borrow`.
+ // * Exclude a type whose reference also fulfills its bound. (e.g., `std::convert::AsRef`,
+ // `serde::Serialize`)
+ let (implements_borrow_trait, all_borrowable_trait) = {
+ let preds = preds.iter().filter(|t| t.self_ty() == ty).collect::<Vec<_>>();
+
+ (
+ preds.iter().any(|t| cx.tcx.is_diagnostic_item(sym::Borrow, t.def_id())),
+ !preds.is_empty() && {
+ let ty_empty_region = cx.tcx.mk_imm_ref(cx.tcx.lifetimes.re_root_empty, ty);
+ preds.iter().all(|t| {
+ let ty_params = t.trait_ref.substs.iter().skip(1).collect::<Vec<_>>();
+ implements_trait(cx, ty_empty_region, t.def_id(), &ty_params)
+ })
+ },
+ )
+ };
+
+ if_chain! {
+ if !is_self(arg);
+ if !ty.is_mutable_ptr();
+ if !is_copy(cx, ty);
+ if !allowed_traits.iter().any(|&t| implements_trait(cx, ty, t, &[]));
+ if !implements_borrow_trait;
+ if !all_borrowable_trait;
+
+ if let PatKind::Binding(mode, canonical_id, ..) = arg.pat.kind;
+ if !moved_vars.contains(&canonical_id);
+ then {
+ if mode == BindingAnnotation::Mutable || mode == BindingAnnotation::RefMut {
+ continue;
+ }
+
+ // Dereference suggestion
+ let sugg = |diag: &mut Diagnostic| {
+ if let ty::Adt(def, ..) = ty.kind() {
+ if let Some(span) = cx.tcx.hir().span_if_local(def.did()) {
+ if can_type_implement_copy(
+ cx.tcx,
+ cx.param_env,
+ ty,
+ traits::ObligationCause::dummy_with_span(span),
+ ).is_ok() {
+ diag.span_help(span, "consider marking this type as `Copy`");
+ }
+ }
+ }
+
+ let deref_span = spans_need_deref.get(&canonical_id);
+ if_chain! {
+ if is_type_diagnostic_item(cx, ty, sym::Vec);
+ if let Some(clone_spans) =
+ get_spans(cx, Some(body.id()), idx, &[("clone", ".to_owned()")]);
+ if let TyKind::Path(QPath::Resolved(_, path)) = input.kind;
+ if let Some(elem_ty) = path.segments.iter()
+ .find(|seg| seg.ident.name == sym::Vec)
+ .and_then(|ps| ps.args.as_ref())
+ .map(|params| params.args.iter().find_map(|arg| match arg {
+ GenericArg::Type(ty) => Some(ty),
+ _ => None,
+ }).unwrap());
+ then {
+ let slice_ty = format!("&[{}]", snippet(cx, elem_ty.span, "_"));
+ diag.span_suggestion(
+ input.span,
+ "consider changing the type to",
+ slice_ty,
+ Applicability::Unspecified,
+ );
+
+ for (span, suggestion) in clone_spans {
+ diag.span_suggestion(
+ span,
+ snippet_opt(cx, span)
+ .map_or(
+ "change the call to".into(),
+ |x| Cow::from(format!("change `{}` to", x)),
+ )
+ .as_ref(),
+ suggestion,
+ Applicability::Unspecified,
+ );
+ }
+
+ // cannot be destructured, no need for `*` suggestion
+ assert!(deref_span.is_none());
+ return;
+ }
+ }
+
+ if is_type_diagnostic_item(cx, ty, sym::String) {
+ if let Some(clone_spans) =
+ get_spans(cx, Some(body.id()), idx, &[("clone", ".to_string()"), ("as_str", "")]) {
+ diag.span_suggestion(
+ input.span,
+ "consider changing the type to",
+ "&str",
+ Applicability::Unspecified,
+ );
+
+ for (span, suggestion) in clone_spans {
+ diag.span_suggestion(
+ span,
+ snippet_opt(cx, span)
+ .map_or(
+ "change the call to".into(),
+ |x| Cow::from(format!("change `{}` to", x))
+ )
+ .as_ref(),
+ suggestion,
+ Applicability::Unspecified,
+ );
+ }
+
+ assert!(deref_span.is_none());
+ return;
+ }
+ }
+
+ let mut spans = vec![(input.span, format!("&{}", snippet(cx, input.span, "_")))];
+
+ // Suggests adding `*` to dereference the added reference.
+ if let Some(deref_span) = deref_span {
+ spans.extend(
+ deref_span
+ .iter()
+ .copied()
+ .map(|span| (span, format!("*{}", snippet(cx, span, "<expr>")))),
+ );
+ spans.sort_by_key(|&(span, _)| span);
+ }
+ multispan_sugg(diag, "consider taking a reference instead", spans);
+ };
+
+ span_lint_and_then(
+ cx,
+ NEEDLESS_PASS_BY_VALUE,
+ input.span,
+ "this argument is passed by value, but not consumed in the function body",
+ sugg,
+ );
+ }
+ }
+ }
+ }
+}
+
+/// Functions marked with these attributes must have the exact signature.
+fn requires_exact_signature(attrs: &[Attribute]) -> bool {
+ attrs.iter().any(|attr| {
+ [sym::proc_macro, sym::proc_macro_attribute, sym::proc_macro_derive]
+ .iter()
+ .any(|&allow| attr.has_name(allow))
+ })
+}
+
+#[derive(Default)]
+struct MovedVariablesCtxt {
+ moved_vars: HirIdSet,
+ /// Spans which need to be prefixed with `*` for dereferencing the
+ /// suggested additional reference.
+ spans_need_deref: HirIdMap<FxHashSet<Span>>,
+}
+
+impl MovedVariablesCtxt {
+ fn move_common(&mut self, cmt: &euv::PlaceWithHirId<'_>) {
+ if let euv::PlaceBase::Local(vid) = cmt.place.base {
+ self.moved_vars.insert(vid);
+ }
+ }
+}
+
+impl<'tcx> euv::Delegate<'tcx> for MovedVariablesCtxt {
+ fn consume(&mut self, cmt: &euv::PlaceWithHirId<'tcx>, _: HirId) {
+ self.move_common(cmt);
+ }
+
+ fn borrow(&mut self, _: &euv::PlaceWithHirId<'tcx>, _: HirId, _: ty::BorrowKind) {}
+
+ fn mutate(&mut self, _: &euv::PlaceWithHirId<'tcx>, _: HirId) {}
+
+ fn fake_read(&mut self, _: &rustc_typeck::expr_use_visitor::PlaceWithHirId<'tcx>, _: FakeReadCause, _: HirId) {}
+}
diff --git a/src/tools/clippy/clippy_lints/src/needless_question_mark.rs b/src/tools/clippy/clippy_lints/src/needless_question_mark.rs
new file mode 100644
index 000000000..8f85b0059
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/needless_question_mark.rs
@@ -0,0 +1,143 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::is_lang_ctor;
+use clippy_utils::source::snippet;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::LangItem::{OptionSome, ResultOk};
+use rustc_hir::{AsyncGeneratorKind, Block, Body, Expr, ExprKind, GeneratorKind, LangItem, MatchSource, QPath};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Suggests alternatives for useless applications of `?` in terminating expressions
+ ///
+ /// ### Why is this bad?
+ /// There's no reason to use `?` to short-circuit when execution of the body will end there anyway.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct TO {
+ /// magic: Option<usize>,
+ /// }
+ ///
+ /// fn f(to: TO) -> Option<usize> {
+ /// Some(to.magic?)
+ /// }
+ ///
+ /// struct TR {
+ /// magic: Result<usize, bool>,
+ /// }
+ ///
+ /// fn g(tr: Result<TR, bool>) -> Result<usize, bool> {
+ /// tr.and_then(|t| Ok(t.magic?))
+ /// }
+ ///
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// struct TO {
+ /// magic: Option<usize>,
+ /// }
+ ///
+ /// fn f(to: TO) -> Option<usize> {
+ /// to.magic
+ /// }
+ ///
+ /// struct TR {
+ /// magic: Result<usize, bool>,
+ /// }
+ ///
+ /// fn g(tr: Result<TR, bool>) -> Result<usize, bool> {
+ /// tr.and_then(|t| t.magic)
+ /// }
+ /// ```
+ #[clippy::version = "1.51.0"]
+ pub NEEDLESS_QUESTION_MARK,
+ complexity,
+ "Suggest `value.inner_option` instead of `Some(value.inner_option?)`. The same goes for `Result<T, E>`."
+}
+
+declare_lint_pass!(NeedlessQuestionMark => [NEEDLESS_QUESTION_MARK]);
+
+impl LateLintPass<'_> for NeedlessQuestionMark {
+ /*
+ * The question mark operator is compatible with both Result<T, E> and Option<T>,
+ * from Rust 1.13 and 1.22 respectively.
+ */
+
+ /*
+ * What do we match:
+ * Expressions that look like this:
+ * Some(option?), Ok(result?)
+ *
+ * Where do we match:
+ * Last expression of a body
+ * Return statement
+ * A body's value (single line closure)
+ *
+ * What do we not match:
+ * Implicit calls to `from(..)` on the error value
+ */
+
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &'_ Expr<'_>) {
+ if let ExprKind::Ret(Some(e)) = expr.kind {
+ check(cx, e);
+ }
+ }
+
+ fn check_body(&mut self, cx: &LateContext<'_>, body: &'_ Body<'_>) {
+ if let Some(GeneratorKind::Async(AsyncGeneratorKind::Fn)) = body.generator_kind {
+ if let ExprKind::Block(
+ Block {
+ expr:
+ Some(Expr {
+ kind: ExprKind::DropTemps(async_body),
+ ..
+ }),
+ ..
+ },
+ _,
+ ) = body.value.kind
+ {
+ if let ExprKind::Block(Block { expr: Some(expr), .. }, ..) = async_body.kind {
+ check(cx, expr);
+ }
+ }
+ } else {
+ check(cx, body.value.peel_blocks());
+ }
+ }
+}
+
+fn check(cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if_chain! {
+ if let ExprKind::Call(path, [arg]) = &expr.kind;
+ if let ExprKind::Path(ref qpath) = &path.kind;
+ let sugg_remove = if is_lang_ctor(cx, qpath, OptionSome) {
+ "Some()"
+ } else if is_lang_ctor(cx, qpath, ResultOk) {
+ "Ok()"
+ } else {
+ return;
+ };
+ if let ExprKind::Match(inner_expr_with_q, _, MatchSource::TryDesugar) = &arg.kind;
+ if let ExprKind::Call(called, [inner_expr]) = &inner_expr_with_q.kind;
+ if let ExprKind::Path(QPath::LangItem(LangItem::TryTraitBranch, ..)) = &called.kind;
+ if expr.span.ctxt() == inner_expr.span.ctxt();
+ let expr_ty = cx.typeck_results().expr_ty(expr);
+ let inner_ty = cx.typeck_results().expr_ty(inner_expr);
+ if expr_ty == inner_ty;
+ then {
+ span_lint_and_sugg(
+ cx,
+ NEEDLESS_QUESTION_MARK,
+ expr.span,
+ "question mark operator is useless here",
+ &format!("try removing question mark and `{}`", sugg_remove),
+ format!("{}", snippet(cx, inner_expr.span, r#""...""#)),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/needless_update.rs b/src/tools/clippy/clippy_lints/src/needless_update.rs
new file mode 100644
index 000000000..0bd29d177
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/needless_update.rs
@@ -0,0 +1,70 @@
+use clippy_utils::diagnostics::span_lint;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for needlessly including a base struct on update
+ /// when all fields are changed anyway.
+ ///
+ /// This lint is not applied to structs marked with
+ /// [non_exhaustive](https://doc.rust-lang.org/reference/attributes/type_system.html).
+ ///
+ /// ### Why is this bad?
+ /// This will cost resources (because the base has to be
+ /// somewhere), and make the code less readable.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # struct Point {
+ /// # x: i32,
+ /// # y: i32,
+ /// # z: i32,
+ /// # }
+ /// # let zero_point = Point { x: 0, y: 0, z: 0 };
+ /// Point {
+ /// x: 1,
+ /// y: 1,
+ /// z: 1,
+ /// ..zero_point
+ /// };
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// // Missing field `z`
+ /// Point {
+ /// x: 1,
+ /// y: 1,
+ /// ..zero_point
+ /// };
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub NEEDLESS_UPDATE,
+ complexity,
+ "using `Foo { ..base }` when there are no missing fields"
+}
+
+declare_lint_pass!(NeedlessUpdate => [NEEDLESS_UPDATE]);
+
+impl<'tcx> LateLintPass<'tcx> for NeedlessUpdate {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if let ExprKind::Struct(_, fields, Some(base)) = expr.kind {
+ let ty = cx.typeck_results().expr_ty(expr);
+ if let ty::Adt(def, _) = ty.kind() {
+ if fields.len() == def.non_enum_variant().fields.len()
+ && !def.variant(0_usize.into()).is_field_list_non_exhaustive()
+ {
+ span_lint(
+ cx,
+ NEEDLESS_UPDATE,
+ base.span,
+ "struct update has no effect, all the fields in the struct have already been specified",
+ );
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/neg_cmp_op_on_partial_ord.rs b/src/tools/clippy/clippy_lints/src/neg_cmp_op_on_partial_ord.rs
new file mode 100644
index 000000000..a7e0e3578
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/neg_cmp_op_on_partial_ord.rs
@@ -0,0 +1,90 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::ty::implements_trait;
+use clippy_utils::{self, get_trait_def_id, paths};
+use if_chain::if_chain;
+use rustc_hir::{BinOpKind, Expr, ExprKind, UnOp};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for the usage of negated comparison operators on types which only implement
+ /// `PartialOrd` (e.g., `f64`).
+ ///
+ /// ### Why is this bad?
+ /// These operators make it easy to forget that the underlying types actually allow not only three
+ /// potential Orderings (Less, Equal, Greater) but also a fourth one (Uncomparable). This is
+ /// especially easy to miss if the operator based comparison result is negated.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let a = 1.0;
+ /// let b = f64::NAN;
+ ///
+ /// let not_less_or_equal = !(a <= b);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// use std::cmp::Ordering;
+ /// # let a = 1.0;
+ /// # let b = f64::NAN;
+ ///
+ /// let _not_less_or_equal = match a.partial_cmp(&b) {
+ /// None | Some(Ordering::Greater) => true,
+ /// _ => false,
+ /// };
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub NEG_CMP_OP_ON_PARTIAL_ORD,
+ complexity,
+ "The use of negated comparison operators on partially ordered types may produce confusing code."
+}
+
+declare_lint_pass!(NoNegCompOpForPartialOrd => [NEG_CMP_OP_ON_PARTIAL_ORD]);
+
+impl<'tcx> LateLintPass<'tcx> for NoNegCompOpForPartialOrd {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if_chain! {
+
+ if !in_external_macro(cx.sess(), expr.span);
+ if let ExprKind::Unary(UnOp::Not, inner) = expr.kind;
+ if let ExprKind::Binary(ref op, left, _) = inner.kind;
+ if let BinOpKind::Le | BinOpKind::Ge | BinOpKind::Lt | BinOpKind::Gt = op.node;
+
+ then {
+
+ let ty = cx.typeck_results().expr_ty(left);
+
+ let implements_ord = {
+ if let Some(id) = get_trait_def_id(cx, &paths::ORD) {
+ implements_trait(cx, ty, id, &[])
+ } else {
+ return;
+ }
+ };
+
+ let implements_partial_ord = {
+ if let Some(id) = cx.tcx.lang_items().partial_ord_trait() {
+ implements_trait(cx, ty, id, &[])
+ } else {
+ return;
+ }
+ };
+
+ if implements_partial_ord && !implements_ord {
+ span_lint(
+ cx,
+ NEG_CMP_OP_ON_PARTIAL_ORD,
+ expr.span,
+ "the use of negated comparison operators on partially ordered \
+ types produces code that is hard to read and refactor, please \
+ consider using the `partial_cmp` method instead, to make it \
+ clear that the two values could be incomparable"
+ );
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/neg_multiply.rs b/src/tools/clippy/clippy_lints/src/neg_multiply.rs
new file mode 100644
index 000000000..b087cfb36
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/neg_multiply.rs
@@ -0,0 +1,80 @@
+use clippy_utils::consts::{self, Constant};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::sugg::has_enclosing_paren;
+use if_chain::if_chain;
+use rustc_ast::util::parser::PREC_PREFIX;
+use rustc_errors::Applicability;
+use rustc_hir::{BinOpKind, Expr, ExprKind, UnOp};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for multiplication by -1 as a form of negation.
+ ///
+ /// ### Why is this bad?
+ /// It's more readable to just negate.
+ ///
+ /// ### Known problems
+ /// This only catches integers (for now).
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// let a = x * -1;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// let a = -x;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub NEG_MULTIPLY,
+ style,
+ "multiplying integers by `-1`"
+}
+
+declare_lint_pass!(NegMultiply => [NEG_MULTIPLY]);
+
+impl<'tcx> LateLintPass<'tcx> for NegMultiply {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ if let ExprKind::Binary(ref op, left, right) = e.kind {
+ if BinOpKind::Mul == op.node {
+ match (&left.kind, &right.kind) {
+ (&ExprKind::Unary(..), &ExprKind::Unary(..)) => {},
+ (&ExprKind::Unary(UnOp::Neg, lit), _) => check_mul(cx, e.span, lit, right),
+ (_, &ExprKind::Unary(UnOp::Neg, lit)) => check_mul(cx, e.span, lit, left),
+ _ => {},
+ }
+ }
+ }
+ }
+}
+
+fn check_mul(cx: &LateContext<'_>, span: Span, lit: &Expr<'_>, exp: &Expr<'_>) {
+ if_chain! {
+ if let ExprKind::Lit(ref l) = lit.kind;
+ if consts::lit_to_mir_constant(&l.node, cx.typeck_results().expr_ty_opt(lit)) == Constant::Int(1);
+ if cx.typeck_results().expr_ty(exp).is_integral();
+
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ let snip = snippet_with_applicability(cx, exp.span, "..", &mut applicability);
+ let suggestion = if exp.precedence().order() < PREC_PREFIX && !has_enclosing_paren(&snip) {
+ format!("-({})", snip)
+ } else {
+ format!("-{}", snip)
+ };
+ span_lint_and_sugg(
+ cx,
+ NEG_MULTIPLY,
+ span,
+ "this multiplication by -1 can be written more succinctly",
+ "consider using",
+ suggestion,
+ applicability,
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/new_without_default.rs b/src/tools/clippy/clippy_lints/src/new_without_default.rs
new file mode 100644
index 000000000..5c45ee6d9
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/new_without_default.rs
@@ -0,0 +1,169 @@
+use clippy_utils::diagnostics::span_lint_hir_and_then;
+use clippy_utils::return_ty;
+use clippy_utils::source::snippet;
+use clippy_utils::sugg::DiagnosticExt;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_hir::HirIdSet;
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for public types with a `pub fn new() -> Self` method and no
+ /// implementation of
+ /// [`Default`](https://doc.rust-lang.org/std/default/trait.Default.html).
+ ///
+ /// ### Why is this bad?
+ /// The user might expect to be able to use
+ /// [`Default`](https://doc.rust-lang.org/std/default/trait.Default.html) as the
+ /// type can be constructed without arguments.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// pub struct Foo(Bar);
+ ///
+ /// impl Foo {
+ /// pub fn new() -> Self {
+ /// Foo(Bar::new())
+ /// }
+ /// }
+ /// ```
+ ///
+ /// To fix the lint, add a `Default` implementation that delegates to `new`:
+ ///
+ /// ```ignore
+ /// pub struct Foo(Bar);
+ ///
+ /// impl Default for Foo {
+ /// fn default() -> Self {
+ /// Foo::new()
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub NEW_WITHOUT_DEFAULT,
+ style,
+ "`pub fn new() -> Self` method without `Default` implementation"
+}
+
+#[derive(Clone, Default)]
+pub struct NewWithoutDefault {
+ impling_types: Option<HirIdSet>,
+}
+
+impl_lint_pass!(NewWithoutDefault => [NEW_WITHOUT_DEFAULT]);
+
+impl<'tcx> LateLintPass<'tcx> for NewWithoutDefault {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) {
+ if let hir::ItemKind::Impl(hir::Impl {
+ of_trait: None,
+ generics,
+ self_ty: impl_self_ty,
+ items,
+ ..
+ }) = item.kind
+ {
+ for assoc_item in *items {
+ if assoc_item.kind == (hir::AssocItemKind::Fn { has_self: false }) {
+ let impl_item = cx.tcx.hir().impl_item(assoc_item.id);
+ if in_external_macro(cx.sess(), impl_item.span) {
+ return;
+ }
+ if let hir::ImplItemKind::Fn(ref sig, _) = impl_item.kind {
+ let name = impl_item.ident.name;
+ let id = impl_item.hir_id();
+ if sig.header.constness == hir::Constness::Const {
+ // can't be implemented by default
+ return;
+ }
+ if sig.header.unsafety == hir::Unsafety::Unsafe {
+ // can't be implemented for unsafe new
+ return;
+ }
+ if cx.tcx.is_doc_hidden(impl_item.def_id) {
+ // shouldn't be implemented when it is hidden in docs
+ return;
+ }
+ if !impl_item.generics.params.is_empty() {
+ // when the result of `new()` depends on a parameter we should not require
+ // an impl of `Default`
+ return;
+ }
+ if_chain! {
+ if sig.decl.inputs.is_empty();
+ if name == sym::new;
+ if cx.access_levels.is_reachable(impl_item.def_id);
+ let self_def_id = cx.tcx.hir().get_parent_item(id);
+ let self_ty = cx.tcx.type_of(self_def_id);
+ if self_ty == return_ty(cx, id);
+ if let Some(default_trait_id) = cx.tcx.get_diagnostic_item(sym::Default);
+ then {
+ if self.impling_types.is_none() {
+ let mut impls = HirIdSet::default();
+ cx.tcx.for_each_impl(default_trait_id, |d| {
+ if let Some(ty_def) = cx.tcx.type_of(d).ty_adt_def() {
+ if let Some(local_def_id) = ty_def.did().as_local() {
+ impls.insert(cx.tcx.hir().local_def_id_to_hir_id(local_def_id));
+ }
+ }
+ });
+ self.impling_types = Some(impls);
+ }
+
+ // Check if a Default implementation exists for the Self type, regardless of
+ // generics
+ if_chain! {
+ if let Some(ref impling_types) = self.impling_types;
+ if let Some(self_def) = cx.tcx.type_of(self_def_id).ty_adt_def();
+ if let Some(self_local_did) = self_def.did().as_local();
+ let self_id = cx.tcx.hir().local_def_id_to_hir_id(self_local_did);
+ if impling_types.contains(&self_id);
+ then {
+ return;
+ }
+ }
+
+ let generics_sugg = snippet(cx, generics.span, "");
+ let self_ty_fmt = self_ty.to_string();
+ let self_type_snip = snippet(cx, impl_self_ty.span, &self_ty_fmt);
+ span_lint_hir_and_then(
+ cx,
+ NEW_WITHOUT_DEFAULT,
+ id,
+ impl_item.span,
+ &format!(
+ "you should consider adding a `Default` implementation for `{}`",
+ self_type_snip
+ ),
+ |diag| {
+ diag.suggest_prepend_item(
+ cx,
+ item.span,
+ "try adding this",
+ &create_new_without_default_suggest_msg(&self_type_snip, &generics_sugg),
+ Applicability::MaybeIncorrect,
+ );
+ },
+ );
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+fn create_new_without_default_suggest_msg(self_type_snip: &str, generics_sugg: &str) -> String {
+ #[rustfmt::skip]
+ format!(
+"impl{} Default for {} {{
+ fn default() -> Self {{
+ Self::new()
+ }}
+}}", generics_sugg, self_type_snip)
+}
diff --git a/src/tools/clippy/clippy_lints/src/no_effect.rs b/src/tools/clippy/clippy_lints/src/no_effect.rs
new file mode 100644
index 000000000..819646bb6
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/no_effect.rs
@@ -0,0 +1,277 @@
+use clippy_utils::diagnostics::{span_lint_hir, span_lint_hir_and_then};
+use clippy_utils::is_lint_allowed;
+use clippy_utils::peel_blocks;
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::has_drop;
+use rustc_errors::Applicability;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::{is_range_literal, BinOpKind, BlockCheckMode, Expr, ExprKind, PatKind, Stmt, StmtKind, UnsafeSource};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use std::ops::Deref;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for statements which have no effect.
+ ///
+ /// ### Why is this bad?
+ /// Unlike dead code, these statements are actually
+ /// executed. However, as they have no effect, all they do is make the code less
+ /// readable.
+ ///
+ /// ### Example
+ /// ```rust
+ /// 0;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub NO_EFFECT,
+ complexity,
+ "statements with no effect"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for binding to underscore prefixed variable without side-effects.
+ ///
+ /// ### Why is this bad?
+ /// Unlike dead code, these bindings are actually
+ /// executed. However, as they have no effect and shouldn't be used further on, all they
+ /// do is make the code less readable.
+ ///
+ /// ### Known problems
+ /// Further usage of this variable is not checked, which can lead to false positives if it is
+ /// used later in the code.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// let _i_serve_no_purpose = 1;
+ /// ```
+ #[clippy::version = "1.58.0"]
+ pub NO_EFFECT_UNDERSCORE_BINDING,
+ pedantic,
+ "binding to `_` prefixed variable with no side-effect"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for expression statements that can be reduced to a
+ /// sub-expression.
+ ///
+ /// ### Why is this bad?
+ /// Expressions by themselves often have no side-effects.
+ /// Having such expressions reduces readability.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// compute_array()[0];
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub UNNECESSARY_OPERATION,
+ complexity,
+ "outer expressions with no effect"
+}
+
+declare_lint_pass!(NoEffect => [NO_EFFECT, UNNECESSARY_OPERATION, NO_EFFECT_UNDERSCORE_BINDING]);
+
+impl<'tcx> LateLintPass<'tcx> for NoEffect {
+ fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) {
+ if check_no_effect(cx, stmt) {
+ return;
+ }
+ check_unnecessary_operation(cx, stmt);
+ }
+}
+
+fn check_no_effect(cx: &LateContext<'_>, stmt: &Stmt<'_>) -> bool {
+ if let StmtKind::Semi(expr) = stmt.kind {
+ if has_no_effect(cx, expr) {
+ span_lint_hir(cx, NO_EFFECT, expr.hir_id, stmt.span, "statement with no effect");
+ return true;
+ }
+ } else if let StmtKind::Local(local) = stmt.kind {
+ if_chain! {
+ if !is_lint_allowed(cx, NO_EFFECT_UNDERSCORE_BINDING, local.hir_id);
+ if let Some(init) = local.init;
+ if local.els.is_none();
+ if !local.pat.span.from_expansion();
+ if has_no_effect(cx, init);
+ if let PatKind::Binding(_, _, ident, _) = local.pat.kind;
+ if ident.name.to_ident_string().starts_with('_');
+ then {
+ span_lint_hir(
+ cx,
+ NO_EFFECT_UNDERSCORE_BINDING,
+ init.hir_id,
+ stmt.span,
+ "binding to `_` prefixed variable with no side-effect"
+ );
+ return true;
+ }
+ }
+ }
+ false
+}
+
+fn has_no_effect(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ if expr.span.from_expansion() {
+ return false;
+ }
+ match peel_blocks(expr).kind {
+ ExprKind::Lit(..) | ExprKind::Closure { .. } => true,
+ ExprKind::Path(..) => !has_drop(cx, cx.typeck_results().expr_ty(expr)),
+ ExprKind::Index(a, b) | ExprKind::Binary(_, a, b) => has_no_effect(cx, a) && has_no_effect(cx, b),
+ ExprKind::Array(v) | ExprKind::Tup(v) => v.iter().all(|val| has_no_effect(cx, val)),
+ ExprKind::Repeat(inner, _)
+ | ExprKind::Cast(inner, _)
+ | ExprKind::Type(inner, _)
+ | ExprKind::Unary(_, inner)
+ | ExprKind::Field(inner, _)
+ | ExprKind::AddrOf(_, _, inner)
+ | ExprKind::Box(inner) => has_no_effect(cx, inner),
+ ExprKind::Struct(_, fields, ref base) => {
+ !has_drop(cx, cx.typeck_results().expr_ty(expr))
+ && fields.iter().all(|field| has_no_effect(cx, field.expr))
+ && base.as_ref().map_or(true, |base| has_no_effect(cx, base))
+ },
+ ExprKind::Call(callee, args) => {
+ if let ExprKind::Path(ref qpath) = callee.kind {
+ if cx.typeck_results().type_dependent_def(expr.hir_id).is_some() {
+ // type-dependent function call like `impl FnOnce for X`
+ return false;
+ }
+ let def_matched = matches!(
+ cx.qpath_res(qpath, callee.hir_id),
+ Res::Def(DefKind::Struct | DefKind::Variant | DefKind::Ctor(..), ..)
+ );
+ if def_matched || is_range_literal(expr) {
+ !has_drop(cx, cx.typeck_results().expr_ty(expr)) && args.iter().all(|arg| has_no_effect(cx, arg))
+ } else {
+ false
+ }
+ } else {
+ false
+ }
+ },
+ _ => false,
+ }
+}
+
+fn check_unnecessary_operation(cx: &LateContext<'_>, stmt: &Stmt<'_>) {
+ if_chain! {
+ if let StmtKind::Semi(expr) = stmt.kind;
+ if let Some(reduced) = reduce_expression(cx, expr);
+ if !&reduced.iter().any(|e| e.span.from_expansion());
+ then {
+ if let ExprKind::Index(..) = &expr.kind {
+ let snippet = if let (Some(arr), Some(func)) =
+ (snippet_opt(cx, reduced[0].span), snippet_opt(cx, reduced[1].span))
+ {
+ format!("assert!({}.len() > {});", &arr, &func)
+ } else {
+ return;
+ };
+ span_lint_hir_and_then(
+ cx,
+ UNNECESSARY_OPERATION,
+ expr.hir_id,
+ stmt.span,
+ "unnecessary operation",
+ |diag| {
+ diag.span_suggestion(
+ stmt.span,
+ "statement can be written as",
+ snippet,
+ Applicability::MaybeIncorrect,
+ );
+ },
+ );
+ } else {
+ let mut snippet = String::new();
+ for e in reduced {
+ if let Some(snip) = snippet_opt(cx, e.span) {
+ snippet.push_str(&snip);
+ snippet.push(';');
+ } else {
+ return;
+ }
+ }
+ span_lint_hir_and_then(
+ cx,
+ UNNECESSARY_OPERATION,
+ expr.hir_id,
+ stmt.span,
+ "unnecessary operation",
+ |diag| {
+ diag.span_suggestion(
+ stmt.span,
+ "statement can be reduced to",
+ snippet,
+ Applicability::MachineApplicable,
+ );
+ },
+ );
+ }
+ }
+ }
+}
+
+fn reduce_expression<'a>(cx: &LateContext<'_>, expr: &'a Expr<'a>) -> Option<Vec<&'a Expr<'a>>> {
+ if expr.span.from_expansion() {
+ return None;
+ }
+ match expr.kind {
+ ExprKind::Index(a, b) => Some(vec![a, b]),
+ ExprKind::Binary(ref binop, a, b) if binop.node != BinOpKind::And && binop.node != BinOpKind::Or => {
+ Some(vec![a, b])
+ },
+ ExprKind::Array(v) | ExprKind::Tup(v) => Some(v.iter().collect()),
+ ExprKind::Repeat(inner, _)
+ | ExprKind::Cast(inner, _)
+ | ExprKind::Type(inner, _)
+ | ExprKind::Unary(_, inner)
+ | ExprKind::Field(inner, _)
+ | ExprKind::AddrOf(_, _, inner)
+ | ExprKind::Box(inner) => reduce_expression(cx, inner).or_else(|| Some(vec![inner])),
+ ExprKind::Struct(_, fields, ref base) => {
+ if has_drop(cx, cx.typeck_results().expr_ty(expr)) {
+ None
+ } else {
+ Some(fields.iter().map(|f| &f.expr).chain(base).map(Deref::deref).collect())
+ }
+ },
+ ExprKind::Call(callee, args) => {
+ if let ExprKind::Path(ref qpath) = callee.kind {
+ if cx.typeck_results().type_dependent_def(expr.hir_id).is_some() {
+ // type-dependent function call like `impl FnOnce for X`
+ return None;
+ }
+ let res = cx.qpath_res(qpath, callee.hir_id);
+ match res {
+ Res::Def(DefKind::Struct | DefKind::Variant | DefKind::Ctor(..), ..)
+ if !has_drop(cx, cx.typeck_results().expr_ty(expr)) =>
+ {
+ Some(args.iter().collect())
+ },
+ _ => None,
+ }
+ } else {
+ None
+ }
+ },
+ ExprKind::Block(block, _) => {
+ if block.stmts.is_empty() {
+ block.expr.as_ref().and_then(|e| {
+ match block.rules {
+ BlockCheckMode::UnsafeBlock(UnsafeSource::UserProvided) => None,
+ BlockCheckMode::DefaultBlock => Some(vec![&**e]),
+ // in case of compiler-inserted signaling blocks
+ BlockCheckMode::UnsafeBlock(_) => reduce_expression(cx, e),
+ }
+ })
+ } else {
+ None
+ }
+ },
+ _ => None,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/non_copy_const.rs b/src/tools/clippy/clippy_lints/src/non_copy_const.rs
new file mode 100644
index 000000000..72c86f28b
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/non_copy_const.rs
@@ -0,0 +1,449 @@
+//! Checks for uses of const which the type is not `Freeze` (`Cell`-free).
+//!
+//! This lint is **warn** by default.
+
+use std::ptr;
+
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::in_constant;
+use clippy_utils::macros::macro_backtrace;
+use if_chain::if_chain;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::def_id::DefId;
+use rustc_hir::{
+ BodyId, Expr, ExprKind, HirId, Impl, ImplItem, ImplItemKind, Item, ItemKind, Node, TraitItem, TraitItemKind, UnOp,
+};
+use rustc_lint::{LateContext, LateLintPass, Lint};
+use rustc_middle::mir;
+use rustc_middle::mir::interpret::{ConstValue, ErrorHandled};
+use rustc_middle::ty::adjustment::Adjust;
+use rustc_middle::ty::{self, Ty};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{sym, InnerSpan, Span, DUMMY_SP};
+use rustc_typeck::hir_ty_to_ty;
+
+// FIXME: this is a correctness problem but there's no suitable
+// warn-by-default category.
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for declaration of `const` items which is interior
+ /// mutable (e.g., contains a `Cell`, `Mutex`, `AtomicXxxx`, etc.).
+ ///
+ /// ### Why is this bad?
+ /// Consts are copied everywhere they are referenced, i.e.,
+ /// every time you refer to the const a fresh instance of the `Cell` or `Mutex`
+ /// or `AtomicXxxx` will be created, which defeats the whole purpose of using
+ /// these types in the first place.
+ ///
+ /// The `const` should better be replaced by a `static` item if a global
+ /// variable is wanted, or replaced by a `const fn` if a constructor is wanted.
+ ///
+ /// ### Known problems
+ /// A "non-constant" const item is a legacy way to supply an
+ /// initialized value to downstream `static` items (e.g., the
+ /// `std::sync::ONCE_INIT` constant). In this case the use of `const` is legit,
+ /// and this lint should be suppressed.
+ ///
+ /// Even though the lint avoids triggering on a constant whose type has enums that have variants
+ /// with interior mutability, and its value uses non interior mutable variants (see
+ /// [#3962](https://github.com/rust-lang/rust-clippy/issues/3962) and
+ /// [#3825](https://github.com/rust-lang/rust-clippy/issues/3825) for examples);
+ /// it complains about associated constants without default values only based on its types;
+ /// which might not be preferable.
+ /// There're other enums plus associated constants cases that the lint cannot handle.
+ ///
+ /// Types that have underlying or potential interior mutability trigger the lint whether
+ /// the interior mutable field is used or not. See issues
+ /// [#5812](https://github.com/rust-lang/rust-clippy/issues/5812) and
+ ///
+ /// ### Example
+ /// ```rust
+ /// use std::sync::atomic::{AtomicUsize, Ordering::SeqCst};
+ ///
+ /// const CONST_ATOM: AtomicUsize = AtomicUsize::new(12);
+ /// CONST_ATOM.store(6, SeqCst); // the content of the atomic is unchanged
+ /// assert_eq!(CONST_ATOM.load(SeqCst), 12); // because the CONST_ATOM in these lines are distinct
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # use std::sync::atomic::{AtomicUsize, Ordering::SeqCst};
+ /// static STATIC_ATOM: AtomicUsize = AtomicUsize::new(15);
+ /// STATIC_ATOM.store(9, SeqCst);
+ /// assert_eq!(STATIC_ATOM.load(SeqCst), 9); // use a `static` item to refer to the same instance
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub DECLARE_INTERIOR_MUTABLE_CONST,
+ style,
+ "declaring `const` with interior mutability"
+}
+
+// FIXME: this is a correctness problem but there's no suitable
+// warn-by-default category.
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks if `const` items which is interior mutable (e.g.,
+ /// contains a `Cell`, `Mutex`, `AtomicXxxx`, etc.) has been borrowed directly.
+ ///
+ /// ### Why is this bad?
+ /// Consts are copied everywhere they are referenced, i.e.,
+ /// every time you refer to the const a fresh instance of the `Cell` or `Mutex`
+ /// or `AtomicXxxx` will be created, which defeats the whole purpose of using
+ /// these types in the first place.
+ ///
+ /// The `const` value should be stored inside a `static` item.
+ ///
+ /// ### Known problems
+ /// When an enum has variants with interior mutability, use of its non
+ /// interior mutable variants can generate false positives. See issue
+ /// [#3962](https://github.com/rust-lang/rust-clippy/issues/3962)
+ ///
+ /// Types that have underlying or potential interior mutability trigger the lint whether
+ /// the interior mutable field is used or not. See issues
+ /// [#5812](https://github.com/rust-lang/rust-clippy/issues/5812) and
+ /// [#3825](https://github.com/rust-lang/rust-clippy/issues/3825)
+ ///
+ /// ### Example
+ /// ```rust
+ /// use std::sync::atomic::{AtomicUsize, Ordering::SeqCst};
+ /// const CONST_ATOM: AtomicUsize = AtomicUsize::new(12);
+ ///
+ /// CONST_ATOM.store(6, SeqCst); // the content of the atomic is unchanged
+ /// assert_eq!(CONST_ATOM.load(SeqCst), 12); // because the CONST_ATOM in these lines are distinct
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// use std::sync::atomic::{AtomicUsize, Ordering::SeqCst};
+ /// const CONST_ATOM: AtomicUsize = AtomicUsize::new(12);
+ ///
+ /// static STATIC_ATOM: AtomicUsize = CONST_ATOM;
+ /// STATIC_ATOM.store(9, SeqCst);
+ /// assert_eq!(STATIC_ATOM.load(SeqCst), 9); // use a `static` item to refer to the same instance
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub BORROW_INTERIOR_MUTABLE_CONST,
+ style,
+ "referencing `const` with interior mutability"
+}
+
+fn is_unfrozen<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
+ // Ignore types whose layout is unknown since `is_freeze` reports every generic types as `!Freeze`,
+ // making it indistinguishable from `UnsafeCell`. i.e. it isn't a tool to prove a type is
+ // 'unfrozen'. However, this code causes a false negative in which
+ // a type contains a layout-unknown type, but also an unsafe cell like `const CELL: Cell<T>`.
+ // Yet, it's better than `ty.has_type_flags(TypeFlags::HAS_TY_PARAM | TypeFlags::HAS_PROJECTION)`
+ // since it works when a pointer indirection involves (`Cell<*const T>`).
+ // Making up a `ParamEnv` where every generic params and assoc types are `Freeze`is another option;
+ // but I'm not sure whether it's a decent way, if possible.
+ cx.tcx.layout_of(cx.param_env.and(ty)).is_ok() && !ty.is_freeze(cx.tcx.at(DUMMY_SP), cx.param_env)
+}
+
+fn is_value_unfrozen_raw<'tcx>(
+ cx: &LateContext<'tcx>,
+ result: Result<ConstValue<'tcx>, ErrorHandled>,
+ ty: Ty<'tcx>,
+) -> bool {
+ fn inner<'tcx>(cx: &LateContext<'tcx>, val: mir::ConstantKind<'tcx>) -> bool {
+ match val.ty().kind() {
+ // the fact that we have to dig into every structs to search enums
+ // leads us to the point checking `UnsafeCell` directly is the only option.
+ ty::Adt(ty_def, ..) if ty_def.is_unsafe_cell() => true,
+ ty::Array(..) | ty::Adt(..) | ty::Tuple(..) => {
+ let val = cx.tcx.destructure_mir_constant(cx.param_env, val);
+ val.fields.iter().any(|field| inner(cx, *field))
+ },
+ _ => false,
+ }
+ }
+ result.map_or_else(
+ |err| {
+ // Consider `TooGeneric` cases as being unfrozen.
+ // This causes a false positive where an assoc const whose type is unfrozen
+ // have a value that is a frozen variant with a generic param (an example is
+ // `declare_interior_mutable_const::enums::BothOfCellAndGeneric::GENERIC_VARIANT`).
+ // However, it prevents a number of false negatives that is, I think, important:
+ // 1. assoc consts in trait defs referring to consts of themselves
+ // (an example is `declare_interior_mutable_const::traits::ConcreteTypes::ANOTHER_ATOMIC`).
+ // 2. a path expr referring to assoc consts whose type is doesn't have
+ // any frozen variants in trait defs (i.e. without substitute for `Self`).
+ // (e.g. borrowing `borrow_interior_mutable_const::trait::ConcreteTypes::ATOMIC`)
+ // 3. similar to the false positive above;
+ // but the value is an unfrozen variant, or the type has no enums. (An example is
+ // `declare_interior_mutable_const::enums::BothOfCellAndGeneric::UNFROZEN_VARIANT`
+ // and `declare_interior_mutable_const::enums::BothOfCellAndGeneric::NO_ENUM`).
+ // One might be able to prevent these FNs correctly, and replace this with `false`;
+ // e.g. implementing `has_frozen_variant` described above, and not running this function
+ // when the type doesn't have any frozen variants would be the 'correct' way for the 2nd
+ // case (that actually removes another suboptimal behavior (I won't say 'false positive') where,
+ // similar to 2., but with the a frozen variant) (e.g. borrowing
+ // `borrow_interior_mutable_const::enums::AssocConsts::TO_BE_FROZEN_VARIANT`).
+ // I chose this way because unfrozen enums as assoc consts are rare (or, hopefully, none).
+ err == ErrorHandled::TooGeneric
+ },
+ |val| inner(cx, mir::ConstantKind::from_value(val, ty)),
+ )
+}
+
+fn is_value_unfrozen_poly<'tcx>(cx: &LateContext<'tcx>, body_id: BodyId, ty: Ty<'tcx>) -> bool {
+ let result = cx.tcx.const_eval_poly(body_id.hir_id.owner.to_def_id());
+ is_value_unfrozen_raw(cx, result, ty)
+}
+
+fn is_value_unfrozen_expr<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId, def_id: DefId, ty: Ty<'tcx>) -> bool {
+ let substs = cx.typeck_results().node_substs(hir_id);
+
+ let result = cx.tcx.const_eval_resolve(
+ cx.param_env,
+ ty::Unevaluated::new(ty::WithOptConstParam::unknown(def_id), substs),
+ None,
+ );
+ is_value_unfrozen_raw(cx, result, ty)
+}
+
+#[derive(Copy, Clone)]
+enum Source {
+ Item { item: Span },
+ Assoc { item: Span },
+ Expr { expr: Span },
+}
+
+impl Source {
+ #[must_use]
+ fn lint(&self) -> (&'static Lint, &'static str, Span) {
+ match self {
+ Self::Item { item } | Self::Assoc { item, .. } => (
+ DECLARE_INTERIOR_MUTABLE_CONST,
+ "a `const` item should never be interior mutable",
+ *item,
+ ),
+ Self::Expr { expr } => (
+ BORROW_INTERIOR_MUTABLE_CONST,
+ "a `const` item with interior mutability should not be borrowed",
+ *expr,
+ ),
+ }
+ }
+}
+
+fn lint(cx: &LateContext<'_>, source: Source) {
+ let (lint, msg, span) = source.lint();
+ span_lint_and_then(cx, lint, span, msg, |diag| {
+ if span.from_expansion() {
+ return; // Don't give suggestions into macros.
+ }
+ match source {
+ Source::Item { .. } => {
+ let const_kw_span = span.from_inner(InnerSpan::new(0, 5));
+ diag.span_label(const_kw_span, "make this a static item (maybe with lazy_static)");
+ },
+ Source::Assoc { .. } => (),
+ Source::Expr { .. } => {
+ diag.help("assign this const to a local or static variable, and use the variable here");
+ },
+ }
+ });
+}
+
+declare_lint_pass!(NonCopyConst => [DECLARE_INTERIOR_MUTABLE_CONST, BORROW_INTERIOR_MUTABLE_CONST]);
+
+impl<'tcx> LateLintPass<'tcx> for NonCopyConst {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, it: &'tcx Item<'_>) {
+ if let ItemKind::Const(hir_ty, body_id) = it.kind {
+ let ty = hir_ty_to_ty(cx.tcx, hir_ty);
+ if !ignored_macro(cx, it) && is_unfrozen(cx, ty) && is_value_unfrozen_poly(cx, body_id, ty) {
+ lint(cx, Source::Item { item: it.span });
+ }
+ }
+ }
+
+ fn check_trait_item(&mut self, cx: &LateContext<'tcx>, trait_item: &'tcx TraitItem<'_>) {
+ if let TraitItemKind::Const(hir_ty, body_id_opt) = &trait_item.kind {
+ let ty = hir_ty_to_ty(cx.tcx, hir_ty);
+
+ // Normalize assoc types because ones originated from generic params
+ // bounded other traits could have their bound.
+ let normalized = cx.tcx.normalize_erasing_regions(cx.param_env, ty);
+ if is_unfrozen(cx, normalized)
+ // When there's no default value, lint it only according to its type;
+ // in other words, lint consts whose value *could* be unfrozen, not definitely is.
+ // This feels inconsistent with how the lint treats generic types,
+ // which avoids linting types which potentially become unfrozen.
+ // One could check whether an unfrozen type have a *frozen variant*
+ // (like `body_id_opt.map_or_else(|| !has_frozen_variant(...), ...)`),
+ // and do the same as the case of generic types at impl items.
+ // Note that it isn't sufficient to check if it has an enum
+ // since all of that enum's variants can be unfrozen:
+ // i.e. having an enum doesn't necessary mean a type has a frozen variant.
+ // And, implementing it isn't a trivial task; it'll probably end up
+ // re-implementing the trait predicate evaluation specific to `Freeze`.
+ && body_id_opt.map_or(true, |body_id| is_value_unfrozen_poly(cx, body_id, normalized))
+ {
+ lint(cx, Source::Assoc { item: trait_item.span });
+ }
+ }
+ }
+
+ fn check_impl_item(&mut self, cx: &LateContext<'tcx>, impl_item: &'tcx ImplItem<'_>) {
+ if let ImplItemKind::Const(hir_ty, body_id) = &impl_item.kind {
+ let item_def_id = cx.tcx.hir().get_parent_item(impl_item.hir_id());
+ let item = cx.tcx.hir().expect_item(item_def_id);
+
+ match &item.kind {
+ ItemKind::Impl(Impl {
+ of_trait: Some(of_trait_ref),
+ ..
+ }) => {
+ if_chain! {
+ // Lint a trait impl item only when the definition is a generic type,
+ // assuming an assoc const is not meant to be an interior mutable type.
+ if let Some(of_trait_def_id) = of_trait_ref.trait_def_id();
+ if let Some(of_assoc_item) = cx
+ .tcx
+ .associated_item(impl_item.def_id)
+ .trait_item_def_id;
+ if cx
+ .tcx
+ .layout_of(cx.tcx.param_env(of_trait_def_id).and(
+ // Normalize assoc types because ones originated from generic params
+ // bounded other traits could have their bound at the trait defs;
+ // and, in that case, the definition is *not* generic.
+ cx.tcx.normalize_erasing_regions(
+ cx.tcx.param_env(of_trait_def_id),
+ cx.tcx.type_of(of_assoc_item),
+ ),
+ ))
+ .is_err();
+ // If there were a function like `has_frozen_variant` described above,
+ // we should use here as a frozen variant is a potential to be frozen
+ // similar to unknown layouts.
+ // e.g. `layout_of(...).is_err() || has_frozen_variant(...);`
+ let ty = hir_ty_to_ty(cx.tcx, hir_ty);
+ let normalized = cx.tcx.normalize_erasing_regions(cx.param_env, ty);
+ if is_unfrozen(cx, normalized);
+ if is_value_unfrozen_poly(cx, *body_id, normalized);
+ then {
+ lint(
+ cx,
+ Source::Assoc {
+ item: impl_item.span,
+ },
+ );
+ }
+ }
+ },
+ ItemKind::Impl(Impl { of_trait: None, .. }) => {
+ let ty = hir_ty_to_ty(cx.tcx, hir_ty);
+ // Normalize assoc types originated from generic params.
+ let normalized = cx.tcx.normalize_erasing_regions(cx.param_env, ty);
+
+ if is_unfrozen(cx, ty) && is_value_unfrozen_poly(cx, *body_id, normalized) {
+ lint(cx, Source::Assoc { item: impl_item.span });
+ }
+ },
+ _ => (),
+ }
+ }
+ }
+
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if let ExprKind::Path(qpath) = &expr.kind {
+ // Only lint if we use the const item inside a function.
+ if in_constant(cx, expr.hir_id) {
+ return;
+ }
+
+ // Make sure it is a const item.
+ let item_def_id = match cx.qpath_res(qpath, expr.hir_id) {
+ Res::Def(DefKind::Const | DefKind::AssocConst, did) => did,
+ _ => return,
+ };
+
+ // Climb up to resolve any field access and explicit referencing.
+ let mut cur_expr = expr;
+ let mut dereferenced_expr = expr;
+ let mut needs_check_adjustment = true;
+ loop {
+ let parent_id = cx.tcx.hir().get_parent_node(cur_expr.hir_id);
+ if parent_id == cur_expr.hir_id {
+ break;
+ }
+ if let Some(Node::Expr(parent_expr)) = cx.tcx.hir().find(parent_id) {
+ match &parent_expr.kind {
+ ExprKind::AddrOf(..) => {
+ // `&e` => `e` must be referenced.
+ needs_check_adjustment = false;
+ },
+ ExprKind::Field(..) => {
+ needs_check_adjustment = true;
+
+ // Check whether implicit dereferences happened;
+ // if so, no need to go further up
+ // because of the same reason as the `ExprKind::Unary` case.
+ if cx
+ .typeck_results()
+ .expr_adjustments(dereferenced_expr)
+ .iter()
+ .any(|adj| matches!(adj.kind, Adjust::Deref(_)))
+ {
+ break;
+ }
+
+ dereferenced_expr = parent_expr;
+ },
+ ExprKind::Index(e, _) if ptr::eq(&**e, cur_expr) => {
+ // `e[i]` => desugared to `*Index::index(&e, i)`,
+ // meaning `e` must be referenced.
+ // no need to go further up since a method call is involved now.
+ needs_check_adjustment = false;
+ break;
+ },
+ ExprKind::Unary(UnOp::Deref, _) => {
+ // `*e` => desugared to `*Deref::deref(&e)`,
+ // meaning `e` must be referenced.
+ // no need to go further up since a method call is involved now.
+ needs_check_adjustment = false;
+ break;
+ },
+ _ => break,
+ }
+ cur_expr = parent_expr;
+ } else {
+ break;
+ }
+ }
+
+ let ty = if needs_check_adjustment {
+ let adjustments = cx.typeck_results().expr_adjustments(dereferenced_expr);
+ if let Some(i) = adjustments
+ .iter()
+ .position(|adj| matches!(adj.kind, Adjust::Borrow(_) | Adjust::Deref(_)))
+ {
+ if i == 0 {
+ cx.typeck_results().expr_ty(dereferenced_expr)
+ } else {
+ adjustments[i - 1].target
+ }
+ } else {
+ // No borrow adjustments means the entire const is moved.
+ return;
+ }
+ } else {
+ cx.typeck_results().expr_ty(dereferenced_expr)
+ };
+
+ if is_unfrozen(cx, ty) && is_value_unfrozen_expr(cx, expr.hir_id, item_def_id, ty) {
+ lint(cx, Source::Expr { expr: expr.span });
+ }
+ }
+ }
+}
+
+fn ignored_macro(cx: &LateContext<'_>, it: &rustc_hir::Item<'_>) -> bool {
+ macro_backtrace(it.span).any(|macro_call| {
+ matches!(
+ cx.tcx.get_diagnostic_name(macro_call.def_id),
+ Some(sym::thread_local_macro)
+ )
+ })
+}
diff --git a/src/tools/clippy/clippy_lints/src/non_expressive_names.rs b/src/tools/clippy/clippy_lints/src/non_expressive_names.rs
new file mode 100644
index 000000000..b96af06b8
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/non_expressive_names.rs
@@ -0,0 +1,427 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_and_then};
+use rustc_ast::ast::{
+ self, Arm, AssocItem, AssocItemKind, Attribute, Block, FnDecl, Item, ItemKind, Local, Pat, PatKind,
+};
+use rustc_ast::visit::{walk_block, walk_expr, walk_pat, Visitor};
+use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::source_map::Span;
+use rustc_span::sym;
+use rustc_span::symbol::{Ident, Symbol};
+use std::cmp::Ordering;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for names that are very similar and thus confusing.
+ ///
+ /// ### Why is this bad?
+ /// It's hard to distinguish between names that differ only
+ /// by a single character.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// let checked_exp = something;
+ /// let checked_expr = something_else;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub SIMILAR_NAMES,
+ pedantic,
+ "similarly named items and bindings"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for too many variables whose name consists of a
+ /// single character.
+ ///
+ /// ### Why is this bad?
+ /// It's hard to memorize what a variable means without a
+ /// descriptive name.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// let (a, b, c, d, e, f, g) = (...);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MANY_SINGLE_CHAR_NAMES,
+ pedantic,
+ "too many single character bindings"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks if you have variables whose name consists of just
+ /// underscores and digits.
+ ///
+ /// ### Why is this bad?
+ /// It's hard to memorize what a variable means without a
+ /// descriptive name.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let _1 = 1;
+ /// let ___1 = 1;
+ /// let __1___2 = 11;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub JUST_UNDERSCORES_AND_DIGITS,
+ style,
+ "unclear name"
+}
+
+#[derive(Copy, Clone)]
+pub struct NonExpressiveNames {
+ pub single_char_binding_names_threshold: u64,
+}
+
+impl_lint_pass!(NonExpressiveNames => [SIMILAR_NAMES, MANY_SINGLE_CHAR_NAMES, JUST_UNDERSCORES_AND_DIGITS]);
+
+struct ExistingName {
+ interned: Symbol,
+ span: Span,
+ len: usize,
+ exemptions: &'static [&'static str],
+}
+
+struct SimilarNamesLocalVisitor<'a, 'tcx> {
+ names: Vec<ExistingName>,
+ cx: &'a EarlyContext<'tcx>,
+ lint: &'a NonExpressiveNames,
+
+ /// A stack of scopes containing the single-character bindings in each scope.
+ single_char_names: Vec<Vec<Ident>>,
+}
+
+impl<'a, 'tcx> SimilarNamesLocalVisitor<'a, 'tcx> {
+ fn check_single_char_names(&self) {
+ let num_single_char_names = self.single_char_names.iter().flatten().count();
+ let threshold = self.lint.single_char_binding_names_threshold;
+ if num_single_char_names as u64 > threshold {
+ let span = self
+ .single_char_names
+ .iter()
+ .flatten()
+ .map(|ident| ident.span)
+ .collect::<Vec<_>>();
+ span_lint(
+ self.cx,
+ MANY_SINGLE_CHAR_NAMES,
+ span,
+ &format!(
+ "{} bindings with single-character names in scope",
+ num_single_char_names
+ ),
+ );
+ }
+ }
+}
+
+// this list contains lists of names that are allowed to be similar
+// the assumption is that no name is ever contained in multiple lists.
+#[rustfmt::skip]
+const ALLOWED_TO_BE_SIMILAR: &[&[&str]] = &[
+ &["parsed", "parser"],
+ &["lhs", "rhs"],
+ &["tx", "rx"],
+ &["set", "get"],
+ &["args", "arms"],
+ &["qpath", "path"],
+ &["lit", "lint"],
+ &["wparam", "lparam"],
+ &["iter", "item"],
+];
+
+struct SimilarNamesNameVisitor<'a, 'tcx, 'b>(&'b mut SimilarNamesLocalVisitor<'a, 'tcx>);
+
+impl<'a, 'tcx, 'b> Visitor<'tcx> for SimilarNamesNameVisitor<'a, 'tcx, 'b> {
+ fn visit_pat(&mut self, pat: &'tcx Pat) {
+ match pat.kind {
+ PatKind::Ident(_, ident, _) => {
+ if !pat.span.from_expansion() {
+ self.check_ident(ident);
+ }
+ },
+ PatKind::Struct(_, _, ref fields, _) => {
+ for field in fields {
+ if !field.is_shorthand {
+ self.visit_pat(&field.pat);
+ }
+ }
+ },
+ // just go through the first pattern, as either all patterns
+ // bind the same bindings or rustc would have errored much earlier
+ PatKind::Or(ref pats) => self.visit_pat(&pats[0]),
+ _ => walk_pat(self, pat),
+ }
+ }
+}
+
+#[must_use]
+fn get_exemptions(interned_name: &str) -> Option<&'static [&'static str]> {
+ ALLOWED_TO_BE_SIMILAR
+ .iter()
+ .find(|&&list| allowed_to_be_similar(interned_name, list))
+ .copied()
+}
+
+#[must_use]
+fn allowed_to_be_similar(interned_name: &str, list: &[&str]) -> bool {
+ list.iter()
+ .any(|&name| interned_name.starts_with(name) || interned_name.ends_with(name))
+}
+
+impl<'a, 'tcx, 'b> SimilarNamesNameVisitor<'a, 'tcx, 'b> {
+ fn check_short_ident(&mut self, ident: Ident) {
+ // Ignore shadowing
+ if self
+ .0
+ .single_char_names
+ .iter()
+ .flatten()
+ .any(|id| id.name == ident.name)
+ {
+ return;
+ }
+
+ if let Some(scope) = &mut self.0.single_char_names.last_mut() {
+ scope.push(ident);
+ }
+ }
+
+ #[expect(clippy::too_many_lines)]
+ fn check_ident(&mut self, ident: Ident) {
+ let interned_name = ident.name.as_str();
+ if interned_name.chars().any(char::is_uppercase) {
+ return;
+ }
+ if interned_name.chars().all(|c| c.is_ascii_digit() || c == '_') {
+ span_lint(
+ self.0.cx,
+ JUST_UNDERSCORES_AND_DIGITS,
+ ident.span,
+ "consider choosing a more descriptive name",
+ );
+ return;
+ }
+ if interned_name.starts_with('_') {
+ // these bindings are typically unused or represent an ignored portion of a destructuring pattern
+ return;
+ }
+ let count = interned_name.chars().count();
+ if count < 3 {
+ if count == 1 {
+ self.check_short_ident(ident);
+ }
+ return;
+ }
+ for existing_name in &self.0.names {
+ if allowed_to_be_similar(interned_name, existing_name.exemptions) {
+ continue;
+ }
+ match existing_name.len.cmp(&count) {
+ Ordering::Greater => {
+ if existing_name.len - count != 1
+ || levenstein_not_1(interned_name, existing_name.interned.as_str())
+ {
+ continue;
+ }
+ },
+ Ordering::Less => {
+ if count - existing_name.len != 1
+ || levenstein_not_1(existing_name.interned.as_str(), interned_name)
+ {
+ continue;
+ }
+ },
+ Ordering::Equal => {
+ let mut interned_chars = interned_name.chars();
+ let interned_str = existing_name.interned.as_str();
+ let mut existing_chars = interned_str.chars();
+ let first_i = interned_chars.next().expect("we know we have at least one char");
+ let first_e = existing_chars.next().expect("we know we have at least one char");
+ let eq_or_numeric = |(a, b): (char, char)| a == b || a.is_numeric() && b.is_numeric();
+
+ if eq_or_numeric((first_i, first_e)) {
+ let last_i = interned_chars.next_back().expect("we know we have at least two chars");
+ let last_e = existing_chars.next_back().expect("we know we have at least two chars");
+ if eq_or_numeric((last_i, last_e)) {
+ if interned_chars
+ .zip(existing_chars)
+ .filter(|&ie| !eq_or_numeric(ie))
+ .count()
+ != 1
+ {
+ continue;
+ }
+ } else {
+ let second_last_i = interned_chars
+ .next_back()
+ .expect("we know we have at least three chars");
+ let second_last_e = existing_chars
+ .next_back()
+ .expect("we know we have at least three chars");
+ if !eq_or_numeric((second_last_i, second_last_e))
+ || second_last_i == '_'
+ || !interned_chars.zip(existing_chars).all(eq_or_numeric)
+ {
+ // allowed similarity foo_x, foo_y
+ // or too many chars differ (foo_x, boo_y) or (foox, booy)
+ continue;
+ }
+ }
+ } else {
+ let second_i = interned_chars.next().expect("we know we have at least two chars");
+ let second_e = existing_chars.next().expect("we know we have at least two chars");
+ if !eq_or_numeric((second_i, second_e))
+ || second_i == '_'
+ || !interned_chars.zip(existing_chars).all(eq_or_numeric)
+ {
+ // allowed similarity x_foo, y_foo
+ // or too many chars differ (x_foo, y_boo) or (xfoo, yboo)
+ continue;
+ }
+ }
+ },
+ }
+ span_lint_and_then(
+ self.0.cx,
+ SIMILAR_NAMES,
+ ident.span,
+ "binding's name is too similar to existing binding",
+ |diag| {
+ diag.span_note(existing_name.span, "existing binding defined here");
+ },
+ );
+ return;
+ }
+ self.0.names.push(ExistingName {
+ exemptions: get_exemptions(interned_name).unwrap_or(&[]),
+ interned: ident.name,
+ span: ident.span,
+ len: count,
+ });
+ }
+}
+
+impl<'a, 'b> SimilarNamesLocalVisitor<'a, 'b> {
+ /// ensure scoping rules work
+ fn apply<F: for<'c> Fn(&'c mut Self)>(&mut self, f: F) {
+ let n = self.names.len();
+ let single_char_count = self.single_char_names.len();
+ f(self);
+ self.names.truncate(n);
+ self.single_char_names.truncate(single_char_count);
+ }
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for SimilarNamesLocalVisitor<'a, 'tcx> {
+ fn visit_local(&mut self, local: &'tcx Local) {
+ if let Some((init, els)) = &local.kind.init_else_opt() {
+ self.apply(|this| walk_expr(this, init));
+ if let Some(els) = els {
+ self.apply(|this| walk_block(this, els));
+ }
+ }
+ // add the pattern after the expression because the bindings aren't available
+ // yet in the init
+ // expression
+ SimilarNamesNameVisitor(self).visit_pat(&local.pat);
+ }
+ fn visit_block(&mut self, blk: &'tcx Block) {
+ self.single_char_names.push(vec![]);
+
+ self.apply(|this| walk_block(this, blk));
+
+ self.check_single_char_names();
+ self.single_char_names.pop();
+ }
+ fn visit_arm(&mut self, arm: &'tcx Arm) {
+ self.single_char_names.push(vec![]);
+
+ self.apply(|this| {
+ SimilarNamesNameVisitor(this).visit_pat(&arm.pat);
+ this.apply(|this| walk_expr(this, &arm.body));
+ });
+
+ self.check_single_char_names();
+ self.single_char_names.pop();
+ }
+ fn visit_item(&mut self, _: &Item) {
+ // do not recurse into inner items
+ }
+}
+
+impl EarlyLintPass for NonExpressiveNames {
+ fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) {
+ if in_external_macro(cx.sess(), item.span) {
+ return;
+ }
+
+ if let ItemKind::Fn(box ast::Fn {
+ ref sig,
+ body: Some(ref blk),
+ ..
+ }) = item.kind
+ {
+ do_check(self, cx, &item.attrs, &sig.decl, blk);
+ }
+ }
+
+ fn check_impl_item(&mut self, cx: &EarlyContext<'_>, item: &AssocItem) {
+ if in_external_macro(cx.sess(), item.span) {
+ return;
+ }
+
+ if let AssocItemKind::Fn(box ast::Fn {
+ ref sig,
+ body: Some(ref blk),
+ ..
+ }) = item.kind
+ {
+ do_check(self, cx, &item.attrs, &sig.decl, blk);
+ }
+ }
+}
+
+fn do_check(lint: &mut NonExpressiveNames, cx: &EarlyContext<'_>, attrs: &[Attribute], decl: &FnDecl, blk: &Block) {
+ if !attrs.iter().any(|attr| attr.has_name(sym::test)) {
+ let mut visitor = SimilarNamesLocalVisitor {
+ names: Vec::new(),
+ cx,
+ lint,
+ single_char_names: vec![vec![]],
+ };
+
+ // initialize with function arguments
+ for arg in &decl.inputs {
+ SimilarNamesNameVisitor(&mut visitor).visit_pat(&arg.pat);
+ }
+ // walk all other bindings
+ walk_block(&mut visitor, blk);
+
+ visitor.check_single_char_names();
+ }
+}
+
+/// Precondition: `a_name.chars().count() < b_name.chars().count()`.
+#[must_use]
+fn levenstein_not_1(a_name: &str, b_name: &str) -> bool {
+ debug_assert!(a_name.chars().count() < b_name.chars().count());
+ let mut a_chars = a_name.chars();
+ let mut b_chars = b_name.chars();
+ while let (Some(a), Some(b)) = (a_chars.next(), b_chars.next()) {
+ if a == b {
+ continue;
+ }
+ if let Some(b2) = b_chars.next() {
+ // check if there's just one character inserted
+ return a != b2 || a_chars.ne(b_chars);
+ }
+ // tuple
+ // ntuple
+ return true;
+ }
+ // for item in items
+ true
+}
diff --git a/src/tools/clippy/clippy_lints/src/non_octal_unix_permissions.rs b/src/tools/clippy/clippy_lints/src/non_octal_unix_permissions.rs
new file mode 100644
index 000000000..ed022b9d5
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/non_octal_unix_permissions.rs
@@ -0,0 +1,106 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::{snippet_opt, snippet_with_applicability};
+use clippy_utils::ty::match_type;
+use clippy_utils::{match_def_path, paths};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for non-octal values used to set Unix file permissions.
+ ///
+ /// ### Why is this bad?
+ /// They will be converted into octal, creating potentially
+ /// unintended file permissions.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// use std::fs::OpenOptions;
+ /// use std::os::unix::fs::OpenOptionsExt;
+ ///
+ /// let mut options = OpenOptions::new();
+ /// options.mode(644);
+ /// ```
+ /// Use instead:
+ /// ```rust,ignore
+ /// use std::fs::OpenOptions;
+ /// use std::os::unix::fs::OpenOptionsExt;
+ ///
+ /// let mut options = OpenOptions::new();
+ /// options.mode(0o644);
+ /// ```
+ #[clippy::version = "1.53.0"]
+ pub NON_OCTAL_UNIX_PERMISSIONS,
+ correctness,
+ "use of non-octal value to set unix file permissions, which will be translated into octal"
+}
+
+declare_lint_pass!(NonOctalUnixPermissions => [NON_OCTAL_UNIX_PERMISSIONS]);
+
+impl<'tcx> LateLintPass<'tcx> for NonOctalUnixPermissions {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
+ match &expr.kind {
+ ExprKind::MethodCall(path, [func, param], _) => {
+ let obj_ty = cx.typeck_results().expr_ty(func).peel_refs();
+
+ if_chain! {
+ if (path.ident.name == sym!(mode)
+ && (match_type(cx, obj_ty, &paths::OPEN_OPTIONS)
+ || match_type(cx, obj_ty, &paths::DIR_BUILDER)))
+ || (path.ident.name == sym!(set_mode) && match_type(cx, obj_ty, &paths::PERMISSIONS));
+ if let ExprKind::Lit(_) = param.kind;
+
+ then {
+ let snip = match snippet_opt(cx, param.span) {
+ Some(s) => s,
+ _ => return,
+ };
+
+ if !snip.starts_with("0o") {
+ show_error(cx, param);
+ }
+ }
+ }
+ },
+ ExprKind::Call(func, [param]) => {
+ if_chain! {
+ if let ExprKind::Path(ref path) = func.kind;
+ if let Some(def_id) = cx.qpath_res(path, func.hir_id).opt_def_id();
+ if match_def_path(cx, def_id, &paths::PERMISSIONS_FROM_MODE);
+ if let ExprKind::Lit(_) = param.kind;
+
+ then {
+ let snip = match snippet_opt(cx, param.span) {
+ Some(s) => s,
+ _ => return,
+ };
+
+ if !snip.starts_with("0o") {
+ show_error(cx, param);
+ }
+ }
+ }
+ },
+ _ => {},
+ };
+ }
+}
+
+fn show_error(cx: &LateContext<'_>, param: &Expr<'_>) {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ NON_OCTAL_UNIX_PERMISSIONS,
+ param.span,
+ "using a non-octal value to set unix file permissions",
+ "consider using an octal literal instead",
+ format!(
+ "0o{}",
+ snippet_with_applicability(cx, param.span, "0o..", &mut applicability,),
+ ),
+ applicability,
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/non_send_fields_in_send_ty.rs b/src/tools/clippy/clippy_lints/src/non_send_fields_in_send_ty.rs
new file mode 100644
index 000000000..ddef7352d
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/non_send_fields_in_send_ty.rs
@@ -0,0 +1,251 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet;
+use clippy_utils::ty::{implements_trait, is_copy};
+use clippy_utils::{is_lint_allowed, match_def_path, paths};
+use rustc_ast::ImplPolarity;
+use rustc_hir::def_id::DefId;
+use rustc_hir::{FieldDef, Item, ItemKind, Node};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty::{self, subst::GenericArgKind, Ty};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// This lint warns about a `Send` implementation for a type that
+ /// contains fields that are not safe to be sent across threads.
+ /// It tries to detect fields that can cause a soundness issue
+ /// when sent to another thread (e.g., `Rc`) while allowing `!Send` fields
+ /// that are expected to exist in a `Send` type, such as raw pointers.
+ ///
+ /// ### Why is this bad?
+ /// Sending the struct to another thread effectively sends all of its fields,
+ /// and the fields that do not implement `Send` can lead to soundness bugs
+ /// such as data races when accessed in a thread
+ /// that is different from the thread that created it.
+ ///
+ /// See:
+ /// * [*The Rustonomicon* about *Send and Sync*](https://doc.rust-lang.org/nomicon/send-and-sync.html)
+ /// * [The documentation of `Send`](https://doc.rust-lang.org/std/marker/trait.Send.html)
+ ///
+ /// ### Known Problems
+ /// This lint relies on heuristics to distinguish types that are actually
+ /// unsafe to be sent across threads and `!Send` types that are expected to
+ /// exist in `Send` type. Its rule can filter out basic cases such as
+ /// `Vec<*const T>`, but it's not perfect. Feel free to create an issue if
+ /// you have a suggestion on how this heuristic can be improved.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// struct ExampleStruct<T> {
+ /// rc_is_not_send: Rc<String>,
+ /// unbounded_generic_field: T,
+ /// }
+ ///
+ /// // This impl is unsound because it allows sending `!Send` types through `ExampleStruct`
+ /// unsafe impl<T> Send for ExampleStruct<T> {}
+ /// ```
+ /// Use thread-safe types like [`std::sync::Arc`](https://doc.rust-lang.org/std/sync/struct.Arc.html)
+ /// or specify correct bounds on generic type parameters (`T: Send`).
+ #[clippy::version = "1.57.0"]
+ pub NON_SEND_FIELDS_IN_SEND_TY,
+ nursery,
+ "there is a field that is not safe to be sent to another thread in a `Send` struct"
+}
+
+#[derive(Copy, Clone)]
+pub struct NonSendFieldInSendTy {
+ enable_raw_pointer_heuristic: bool,
+}
+
+impl NonSendFieldInSendTy {
+ pub fn new(enable_raw_pointer_heuristic: bool) -> Self {
+ Self {
+ enable_raw_pointer_heuristic,
+ }
+ }
+}
+
+impl_lint_pass!(NonSendFieldInSendTy => [NON_SEND_FIELDS_IN_SEND_TY]);
+
+impl<'tcx> LateLintPass<'tcx> for NonSendFieldInSendTy {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
+ let ty_allowed_in_send = if self.enable_raw_pointer_heuristic {
+ ty_allowed_with_raw_pointer_heuristic
+ } else {
+ ty_allowed_without_raw_pointer_heuristic
+ };
+
+ // Checks if we are in `Send` impl item.
+ // We start from `Send` impl instead of `check_field_def()` because
+ // single `AdtDef` may have multiple `Send` impls due to generic
+ // parameters, and the lint is much easier to implement in this way.
+ if_chain! {
+ if !in_external_macro(cx.tcx.sess, item.span);
+ if let Some(send_trait) = cx.tcx.get_diagnostic_item(sym::Send);
+ if let ItemKind::Impl(hir_impl) = &item.kind;
+ if let Some(trait_ref) = &hir_impl.of_trait;
+ if let Some(trait_id) = trait_ref.trait_def_id();
+ if send_trait == trait_id;
+ if hir_impl.polarity == ImplPolarity::Positive;
+ if let Some(ty_trait_ref) = cx.tcx.impl_trait_ref(item.def_id);
+ if let self_ty = ty_trait_ref.self_ty();
+ if let ty::Adt(adt_def, impl_trait_substs) = self_ty.kind();
+ then {
+ let mut non_send_fields = Vec::new();
+
+ let hir_map = cx.tcx.hir();
+ for variant in adt_def.variants() {
+ for field in &variant.fields {
+ if_chain! {
+ if let Some(field_hir_id) = field
+ .did
+ .as_local()
+ .map(|local_def_id| hir_map.local_def_id_to_hir_id(local_def_id));
+ if !is_lint_allowed(cx, NON_SEND_FIELDS_IN_SEND_TY, field_hir_id);
+ if let field_ty = field.ty(cx.tcx, impl_trait_substs);
+ if !ty_allowed_in_send(cx, field_ty, send_trait);
+ if let Node::Field(field_def) = hir_map.get(field_hir_id);
+ then {
+ non_send_fields.push(NonSendField {
+ def: field_def,
+ ty: field_ty,
+ generic_params: collect_generic_params(field_ty),
+ })
+ }
+ }
+ }
+ }
+
+ if !non_send_fields.is_empty() {
+ span_lint_and_then(
+ cx,
+ NON_SEND_FIELDS_IN_SEND_TY,
+ item.span,
+ &format!(
+ "some fields in `{}` are not safe to be sent to another thread",
+ snippet(cx, hir_impl.self_ty.span, "Unknown")
+ ),
+ |diag| {
+ for field in non_send_fields {
+ diag.span_note(
+ field.def.span,
+ &format!("it is not safe to send field `{}` to another thread", field.def.ident.name),
+ );
+
+ match field.generic_params.len() {
+ 0 => diag.help("use a thread-safe type that implements `Send`"),
+ 1 if is_ty_param(field.ty) => diag.help(&format!("add `{}: Send` bound in `Send` impl", field.ty)),
+ _ => diag.help(&format!(
+ "add bounds on type parameter{} `{}` that satisfy `{}: Send`",
+ if field.generic_params.len() > 1 { "s" } else { "" },
+ field.generic_params_string(),
+ snippet(cx, field.def.ty.span, "Unknown"),
+ )),
+ };
+ }
+ },
+ );
+ }
+ }
+ }
+ }
+}
+
+struct NonSendField<'tcx> {
+ def: &'tcx FieldDef<'tcx>,
+ ty: Ty<'tcx>,
+ generic_params: Vec<Ty<'tcx>>,
+}
+
+impl<'tcx> NonSendField<'tcx> {
+ fn generic_params_string(&self) -> String {
+ self.generic_params
+ .iter()
+ .map(ToString::to_string)
+ .collect::<Vec<_>>()
+ .join(", ")
+ }
+}
+
+/// Given a type, collect all of its generic parameters.
+/// Example: `MyStruct<P, Box<Q, R>>` => `vec![P, Q, R]`
+fn collect_generic_params(ty: Ty<'_>) -> Vec<Ty<'_>> {
+ ty.walk()
+ .filter_map(|inner| match inner.unpack() {
+ GenericArgKind::Type(inner_ty) => Some(inner_ty),
+ _ => None,
+ })
+ .filter(|&inner_ty| is_ty_param(inner_ty))
+ .collect()
+}
+
+/// Be more strict when the heuristic is disabled
+fn ty_allowed_without_raw_pointer_heuristic<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>, send_trait: DefId) -> bool {
+ if implements_trait(cx, ty, send_trait, &[]) {
+ return true;
+ }
+
+ if is_copy(cx, ty) && !contains_pointer_like(cx, ty) {
+ return true;
+ }
+
+ false
+}
+
+/// Heuristic to allow cases like `Vec<*const u8>`
+fn ty_allowed_with_raw_pointer_heuristic<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>, send_trait: DefId) -> bool {
+ if implements_trait(cx, ty, send_trait, &[]) || is_copy(cx, ty) {
+ return true;
+ }
+
+ // The type is known to be `!Send` and `!Copy`
+ match ty.kind() {
+ ty::Tuple(fields) => fields
+ .iter()
+ .all(|ty| ty_allowed_with_raw_pointer_heuristic(cx, ty, send_trait)),
+ ty::Array(ty, _) | ty::Slice(ty) => ty_allowed_with_raw_pointer_heuristic(cx, *ty, send_trait),
+ ty::Adt(_, substs) => {
+ if contains_pointer_like(cx, ty) {
+ // descends only if ADT contains any raw pointers
+ substs.iter().all(|generic_arg| match generic_arg.unpack() {
+ GenericArgKind::Type(ty) => ty_allowed_with_raw_pointer_heuristic(cx, ty, send_trait),
+ // Lifetimes and const generics are not solid part of ADT and ignored
+ GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => true,
+ })
+ } else {
+ false
+ }
+ },
+ // Raw pointers are `!Send` but allowed by the heuristic
+ ty::RawPtr(_) => true,
+ _ => false,
+ }
+}
+
+/// Checks if the type contains any pointer-like types in substs (including nested ones)
+fn contains_pointer_like<'tcx>(cx: &LateContext<'tcx>, target_ty: Ty<'tcx>) -> bool {
+ for ty_node in target_ty.walk() {
+ if let GenericArgKind::Type(inner_ty) = ty_node.unpack() {
+ match inner_ty.kind() {
+ ty::RawPtr(_) => {
+ return true;
+ },
+ ty::Adt(adt_def, _) => {
+ if match_def_path(cx, adt_def.did(), &paths::PTR_NON_NULL) {
+ return true;
+ }
+ },
+ _ => (),
+ }
+ }
+ }
+
+ false
+}
+
+/// Returns `true` if the type is a type parameter such as `T`.
+fn is_ty_param(target_ty: Ty<'_>) -> bool {
+ matches!(target_ty.kind(), ty::Param(_))
+}
diff --git a/src/tools/clippy/clippy_lints/src/nonstandard_macro_braces.rs b/src/tools/clippy/clippy_lints/src/nonstandard_macro_braces.rs
new file mode 100644
index 000000000..4722c0310
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/nonstandard_macro_braces.rs
@@ -0,0 +1,282 @@
+use std::{
+ fmt,
+ hash::{Hash, Hasher},
+};
+
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::source::snippet_opt;
+use if_chain::if_chain;
+use rustc_ast::ast;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_hir::def_id::DefId;
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::hygiene::{ExpnKind, MacroKind};
+use rustc_span::{Span, Symbol};
+use serde::{de, Deserialize};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks that common macros are used with consistent bracing.
+ ///
+ /// ### Why is this bad?
+ /// This is mostly a consistency lint although using () or []
+ /// doesn't give you a semicolon in item position, which can be unexpected.
+ ///
+ /// ### Example
+ /// ```rust
+ /// vec!{1, 2, 3};
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// vec![1, 2, 3];
+ /// ```
+ #[clippy::version = "1.55.0"]
+ pub NONSTANDARD_MACRO_BRACES,
+ nursery,
+ "check consistent use of braces in macro"
+}
+
+const BRACES: &[(&str, &str)] = &[("(", ")"), ("{", "}"), ("[", "]")];
+
+/// The (name, (open brace, close brace), source snippet)
+type MacroInfo<'a> = (Symbol, &'a (String, String), String);
+
+#[derive(Clone, Debug, Default)]
+pub struct MacroBraces {
+ macro_braces: FxHashMap<String, (String, String)>,
+ done: FxHashSet<Span>,
+}
+
+impl MacroBraces {
+ pub fn new(conf: &FxHashSet<MacroMatcher>) -> Self {
+ let macro_braces = macro_braces(conf.clone());
+ Self {
+ macro_braces,
+ done: FxHashSet::default(),
+ }
+ }
+}
+
+impl_lint_pass!(MacroBraces => [NONSTANDARD_MACRO_BRACES]);
+
+impl EarlyLintPass for MacroBraces {
+ fn check_item(&mut self, cx: &EarlyContext<'_>, item: &ast::Item) {
+ if let Some((name, braces, snip)) = is_offending_macro(cx, item.span, self) {
+ let span = item.span.ctxt().outer_expn_data().call_site;
+ emit_help(cx, snip, braces, name, span);
+ self.done.insert(span);
+ }
+ }
+
+ fn check_stmt(&mut self, cx: &EarlyContext<'_>, stmt: &ast::Stmt) {
+ if let Some((name, braces, snip)) = is_offending_macro(cx, stmt.span, self) {
+ let span = stmt.span.ctxt().outer_expn_data().call_site;
+ emit_help(cx, snip, braces, name, span);
+ self.done.insert(span);
+ }
+ }
+
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &ast::Expr) {
+ if let Some((name, braces, snip)) = is_offending_macro(cx, expr.span, self) {
+ let span = expr.span.ctxt().outer_expn_data().call_site;
+ emit_help(cx, snip, braces, name, span);
+ self.done.insert(span);
+ }
+ }
+
+ fn check_ty(&mut self, cx: &EarlyContext<'_>, ty: &ast::Ty) {
+ if let Some((name, braces, snip)) = is_offending_macro(cx, ty.span, self) {
+ let span = ty.span.ctxt().outer_expn_data().call_site;
+ emit_help(cx, snip, braces, name, span);
+ self.done.insert(span);
+ }
+ }
+}
+
+fn is_offending_macro<'a>(cx: &EarlyContext<'_>, span: Span, mac_braces: &'a MacroBraces) -> Option<MacroInfo<'a>> {
+ let unnested_or_local = || {
+ !span.ctxt().outer_expn_data().call_site.from_expansion()
+ || span
+ .macro_backtrace()
+ .last()
+ .map_or(false, |e| e.macro_def_id.map_or(false, DefId::is_local))
+ };
+ if_chain! {
+ if let ExpnKind::Macro(MacroKind::Bang, mac_name) = span.ctxt().outer_expn_data().kind;
+ let name = mac_name.as_str();
+ if let Some(braces) = mac_braces.macro_braces.get(name);
+ if let Some(snip) = snippet_opt(cx, span.ctxt().outer_expn_data().call_site);
+ // we must check only invocation sites
+ // https://github.com/rust-lang/rust-clippy/issues/7422
+ if snip.starts_with(&format!("{}!", name));
+ if unnested_or_local();
+ // make formatting consistent
+ let c = snip.replace(' ', "");
+ if !c.starts_with(&format!("{}!{}", name, braces.0));
+ if !mac_braces.done.contains(&span.ctxt().outer_expn_data().call_site);
+ then {
+ Some((mac_name, braces, snip))
+ } else {
+ None
+ }
+ }
+}
+
+fn emit_help(cx: &EarlyContext<'_>, snip: String, braces: &(String, String), name: Symbol, span: Span) {
+ let with_space = &format!("! {}", braces.0);
+ let without_space = &format!("!{}", braces.0);
+ let mut help = snip;
+ for b in BRACES.iter().filter(|b| b.0 != braces.0) {
+ help = help.replace(b.0, &braces.0).replace(b.1, &braces.1);
+ // Only `{` traditionally has space before the brace
+ if braces.0 != "{" && help.contains(with_space) {
+ help = help.replace(with_space, without_space);
+ } else if braces.0 == "{" && help.contains(without_space) {
+ help = help.replace(without_space, with_space);
+ }
+ }
+ span_lint_and_help(
+ cx,
+ NONSTANDARD_MACRO_BRACES,
+ span,
+ &format!("use of irregular braces for `{}!` macro", name),
+ Some(span),
+ &format!("consider writing `{}`", help),
+ );
+}
+
+fn macro_braces(conf: FxHashSet<MacroMatcher>) -> FxHashMap<String, (String, String)> {
+ let mut braces = vec![
+ macro_matcher!(
+ name: "print",
+ braces: ("(", ")"),
+ ),
+ macro_matcher!(
+ name: "println",
+ braces: ("(", ")"),
+ ),
+ macro_matcher!(
+ name: "eprint",
+ braces: ("(", ")"),
+ ),
+ macro_matcher!(
+ name: "eprintln",
+ braces: ("(", ")"),
+ ),
+ macro_matcher!(
+ name: "write",
+ braces: ("(", ")"),
+ ),
+ macro_matcher!(
+ name: "writeln",
+ braces: ("(", ")"),
+ ),
+ macro_matcher!(
+ name: "format",
+ braces: ("(", ")"),
+ ),
+ macro_matcher!(
+ name: "format_args",
+ braces: ("(", ")"),
+ ),
+ macro_matcher!(
+ name: "vec",
+ braces: ("[", "]"),
+ ),
+ ]
+ .into_iter()
+ .collect::<FxHashMap<_, _>>();
+ // We want users items to override any existing items
+ for it in conf {
+ braces.insert(it.name, it.braces);
+ }
+ braces
+}
+
+macro_rules! macro_matcher {
+ (name: $name:expr, braces: ($open:expr, $close:expr) $(,)?) => {
+ ($name.to_owned(), ($open.to_owned(), $close.to_owned()))
+ };
+}
+pub(crate) use macro_matcher;
+
+#[derive(Clone, Debug)]
+pub struct MacroMatcher {
+ name: String,
+ braces: (String, String),
+}
+
+impl Hash for MacroMatcher {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.name.hash(state);
+ }
+}
+
+impl PartialEq for MacroMatcher {
+ fn eq(&self, other: &Self) -> bool {
+ self.name == other.name
+ }
+}
+impl Eq for MacroMatcher {}
+
+impl<'de> Deserialize<'de> for MacroMatcher {
+ fn deserialize<D>(deser: D) -> Result<Self, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ #[derive(Deserialize)]
+ #[serde(field_identifier, rename_all = "lowercase")]
+ enum Field {
+ Name,
+ Brace,
+ }
+ struct MacVisitor;
+ impl<'de> de::Visitor<'de> for MacVisitor {
+ type Value = MacroMatcher;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str("struct MacroMatcher")
+ }
+
+ fn visit_map<V>(self, mut map: V) -> Result<Self::Value, V::Error>
+ where
+ V: de::MapAccess<'de>,
+ {
+ let mut name = None;
+ let mut brace: Option<&str> = None;
+ while let Some(key) = map.next_key()? {
+ match key {
+ Field::Name => {
+ if name.is_some() {
+ return Err(de::Error::duplicate_field("name"));
+ }
+ name = Some(map.next_value()?);
+ },
+ Field::Brace => {
+ if brace.is_some() {
+ return Err(de::Error::duplicate_field("brace"));
+ }
+ brace = Some(map.next_value()?);
+ },
+ }
+ }
+ let name = name.ok_or_else(|| de::Error::missing_field("name"))?;
+ let brace = brace.ok_or_else(|| de::Error::missing_field("brace"))?;
+ Ok(MacroMatcher {
+ name,
+ braces: BRACES
+ .iter()
+ .find(|b| b.0 == brace)
+ .map(|(o, c)| ((*o).to_owned(), (*c).to_owned()))
+ .ok_or_else(|| {
+ de::Error::custom(&format!("expected one of `(`, `{{`, `[` found `{}`", brace))
+ })?,
+ })
+ }
+ }
+
+ const FIELDS: &[&str] = &["name", "brace"];
+ deser.deserialize_struct("MacroMatcher", FIELDS, MacVisitor)
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/octal_escapes.rs b/src/tools/clippy/clippy_lints/src/octal_escapes.rs
new file mode 100644
index 000000000..6ad6837f0
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/octal_escapes.rs
@@ -0,0 +1,151 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use rustc_ast::ast::{Expr, ExprKind};
+use rustc_ast::token::{Lit, LitKind};
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::Span;
+use std::fmt::Write;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `\0` escapes in string and byte literals that look like octal
+ /// character escapes in C.
+ ///
+ /// ### Why is this bad?
+ ///
+ /// C and other languages support octal character escapes in strings, where
+ /// a backslash is followed by up to three octal digits. For example, `\033`
+ /// stands for the ASCII character 27 (ESC). Rust does not support this
+ /// notation, but has the escape code `\0` which stands for a null
+ /// byte/character, and any following digits do not form part of the escape
+ /// sequence. Therefore, `\033` is not a compiler error but the result may
+ /// be surprising.
+ ///
+ /// ### Known problems
+ /// The actual meaning can be the intended one. `\x00` can be used in these
+ /// cases to be unambiguous.
+ ///
+ /// The lint does not trigger for format strings in `print!()`, `write!()`
+ /// and friends since the string is already preprocessed when Clippy lints
+ /// can see it.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let one = "\033[1m Bold? \033[0m"; // \033 intended as escape
+ /// let two = "\033\0"; // \033 intended as null-3-3
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let one = "\x1b[1mWill this be bold?\x1b[0m";
+ /// let two = "\x0033\x00";
+ /// ```
+ #[clippy::version = "1.59.0"]
+ pub OCTAL_ESCAPES,
+ suspicious,
+ "string escape sequences looking like octal characters"
+}
+
+declare_lint_pass!(OctalEscapes => [OCTAL_ESCAPES]);
+
+impl EarlyLintPass for OctalEscapes {
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
+ if in_external_macro(cx.sess(), expr.span) {
+ return;
+ }
+
+ if let ExprKind::Lit(lit) = &expr.kind {
+ if matches!(lit.token.kind, LitKind::Str) {
+ check_lit(cx, &lit.token, lit.span, true);
+ } else if matches!(lit.token.kind, LitKind::ByteStr) {
+ check_lit(cx, &lit.token, lit.span, false);
+ }
+ }
+ }
+}
+
+fn check_lit(cx: &EarlyContext<'_>, lit: &Lit, span: Span, is_string: bool) {
+ let contents = lit.symbol.as_str();
+ let mut iter = contents.char_indices().peekable();
+ let mut found = vec![];
+
+ // go through the string, looking for \0[0-7][0-7]?
+ while let Some((from, ch)) = iter.next() {
+ if ch == '\\' {
+ if let Some((_, '0')) = iter.next() {
+ // collect up to two further octal digits
+ if let Some((mut to, '0'..='7')) = iter.next() {
+ if let Some((_, '0'..='7')) = iter.peek() {
+ to += 1;
+ }
+ found.push((from, to + 1));
+ }
+ }
+ }
+ }
+
+ if found.is_empty() {
+ return;
+ }
+
+ // construct two suggestion strings, one with \x escapes with octal meaning
+ // as in C, and one with \x00 for null bytes.
+ let mut suggest_1 = if is_string { "\"" } else { "b\"" }.to_string();
+ let mut suggest_2 = suggest_1.clone();
+ let mut index = 0;
+ for (from, to) in found {
+ suggest_1.push_str(&contents[index..from]);
+ suggest_2.push_str(&contents[index..from]);
+
+ // construct a replacement escape
+ // the maximum value is \077, or \x3f, so u8 is sufficient here
+ if let Ok(n) = u8::from_str_radix(&contents[from + 1..to], 8) {
+ write!(suggest_1, "\\x{:02x}", n).unwrap();
+ }
+
+ // append the null byte as \x00 and the following digits literally
+ suggest_2.push_str("\\x00");
+ suggest_2.push_str(&contents[from + 2..to]);
+
+ index = to;
+ }
+ suggest_1.push_str(&contents[index..]);
+ suggest_1.push('"');
+ suggest_2.push_str(&contents[index..]);
+ suggest_2.push('"');
+
+ span_lint_and_then(
+ cx,
+ OCTAL_ESCAPES,
+ span,
+ &format!(
+ "octal-looking escape in {} literal",
+ if is_string { "string" } else { "byte string" }
+ ),
+ |diag| {
+ diag.help(&format!(
+ "octal escapes are not supported, `\\0` is always a null {}",
+ if is_string { "character" } else { "byte" }
+ ));
+ // suggestion 1: equivalent hex escape
+ diag.span_suggestion(
+ span,
+ "if an octal escape was intended, use the hexadecimal representation instead",
+ suggest_1,
+ Applicability::MaybeIncorrect,
+ );
+ // suggestion 2: unambiguous null byte
+ diag.span_suggestion(
+ span,
+ &format!(
+ "if the null {} is intended, disambiguate using",
+ if is_string { "character" } else { "byte" }
+ ),
+ suggest_2,
+ Applicability::MaybeIncorrect,
+ );
+ },
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs b/src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs
new file mode 100644
index 000000000..413a740be
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs
@@ -0,0 +1,660 @@
+use std::collections::VecDeque;
+
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::is_lint_allowed;
+use itertools::{izip, Itertools};
+use rustc_ast::{walk_list, Label, Mutability};
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_errors::Applicability;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::def_id::DefId;
+use rustc_hir::definitions::{DefPathData, DisambiguatedDefPathData};
+use rustc_hir::intravisit::{walk_expr, walk_stmt, FnKind, Visitor};
+use rustc_hir::{
+ Arm, Block, Body, Closure, Expr, ExprKind, Guard, HirId, ImplicitSelfKind, Let, Local, Pat, PatKind, Path,
+ PathSegment, QPath, Stmt, StmtKind, TyKind, UnOp,
+};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_middle::ty::{Ty, TyCtxt, TypeckResults};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::kw;
+use rustc_span::symbol::Ident;
+use rustc_span::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for arguments that are only used in recursion with no side-effects.
+ ///
+ /// ### Why is this bad?
+ /// It could contain a useless calculation and can make function simpler.
+ ///
+ /// The arguments can be involved in calculations and assignments but as long as
+ /// the calculations have no side-effects (function calls or mutating dereference)
+ /// and the assigned variables are also only in recursion, it is useless.
+ ///
+ /// ### Known problems
+ /// Too many code paths in the linting code are currently untested and prone to produce false
+ /// positives or are prone to have performance implications.
+ ///
+ /// In some cases, this would not catch all useless arguments.
+ ///
+ /// ```rust
+ /// fn foo(a: usize, b: usize) -> usize {
+ /// let f = |x| x + 1;
+ ///
+ /// if a == 0 {
+ /// 1
+ /// } else {
+ /// foo(a - 1, f(b))
+ /// }
+ /// }
+ /// ```
+ ///
+ /// For example, the argument `b` is only used in recursion, but the lint would not catch it.
+ ///
+ /// List of some examples that can not be caught:
+ /// - binary operation of non-primitive types
+ /// - closure usage
+ /// - some `break` relative operations
+ /// - struct pattern binding
+ ///
+ /// Also, when you recurse the function name with path segments, it is not possible to detect.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn f(a: usize, b: usize) -> usize {
+ /// if a == 0 {
+ /// 1
+ /// } else {
+ /// f(a - 1, b + 1)
+ /// }
+ /// }
+ /// # fn main() {
+ /// # print!("{}", f(1, 1));
+ /// # }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// fn f(a: usize) -> usize {
+ /// if a == 0 {
+ /// 1
+ /// } else {
+ /// f(a - 1)
+ /// }
+ /// }
+ /// # fn main() {
+ /// # print!("{}", f(1));
+ /// # }
+ /// ```
+ #[clippy::version = "1.61.0"]
+ pub ONLY_USED_IN_RECURSION,
+ nursery,
+ "arguments that is only used in recursion can be removed"
+}
+declare_lint_pass!(OnlyUsedInRecursion => [ONLY_USED_IN_RECURSION]);
+
+impl<'tcx> LateLintPass<'tcx> for OnlyUsedInRecursion {
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ kind: FnKind<'tcx>,
+ decl: &'tcx rustc_hir::FnDecl<'tcx>,
+ body: &'tcx Body<'tcx>,
+ _: Span,
+ id: HirId,
+ ) {
+ if is_lint_allowed(cx, ONLY_USED_IN_RECURSION, id) {
+ return;
+ }
+ if let FnKind::ItemFn(ident, ..) | FnKind::Method(ident, ..) = kind {
+ let def_id = id.owner.to_def_id();
+ let data = cx.tcx.def_path(def_id).data;
+
+ if data.len() > 1 {
+ match data.get(data.len() - 2) {
+ Some(DisambiguatedDefPathData {
+ data: DefPathData::Impl,
+ disambiguator,
+ }) if *disambiguator != 0 => return,
+ _ => {},
+ }
+ }
+
+ let has_self = !matches!(decl.implicit_self, ImplicitSelfKind::None);
+
+ let ty_res = cx.typeck_results();
+ let param_span = body
+ .params
+ .iter()
+ .flat_map(|param| {
+ let mut v = Vec::new();
+ param.pat.each_binding(|_, hir_id, span, ident| {
+ v.push((hir_id, span, ident));
+ });
+ v
+ })
+ .skip(if has_self { 1 } else { 0 })
+ .filter(|(_, _, ident)| !ident.name.as_str().starts_with('_'))
+ .collect_vec();
+
+ let params = body.params.iter().map(|param| param.pat).collect();
+
+ let mut visitor = SideEffectVisit {
+ graph: FxHashMap::default(),
+ has_side_effect: FxHashSet::default(),
+ ret_vars: Vec::new(),
+ contains_side_effect: false,
+ break_vars: FxHashMap::default(),
+ params,
+ fn_ident: ident,
+ fn_def_id: def_id,
+ is_method: matches!(kind, FnKind::Method(..)),
+ has_self,
+ ty_res,
+ tcx: cx.tcx,
+ visited_exprs: FxHashSet::default(),
+ };
+
+ visitor.visit_expr(&body.value);
+ let vars = std::mem::take(&mut visitor.ret_vars);
+ // this would set the return variables to side effect
+ visitor.add_side_effect(vars);
+
+ let mut queue = visitor.has_side_effect.iter().copied().collect::<VecDeque<_>>();
+
+ // a simple BFS to check all the variables that have side effect
+ while let Some(id) = queue.pop_front() {
+ if let Some(next) = visitor.graph.get(&id) {
+ for i in next {
+ if !visitor.has_side_effect.contains(i) {
+ visitor.has_side_effect.insert(*i);
+ queue.push_back(*i);
+ }
+ }
+ }
+ }
+
+ for (id, span, ident) in param_span {
+ // if the variable is not used in recursion, it would be marked as unused
+ if !visitor.has_side_effect.contains(&id) {
+ let mut queue = VecDeque::new();
+ let mut visited = FxHashSet::default();
+
+ queue.push_back(id);
+
+ // a simple BFS to check the graph can reach to itself
+ // if it can't, it means the variable is never used in recursion
+ while let Some(id) = queue.pop_front() {
+ if let Some(next) = visitor.graph.get(&id) {
+ for i in next {
+ if !visited.contains(i) {
+ visited.insert(id);
+ queue.push_back(*i);
+ }
+ }
+ }
+ }
+
+ if visited.contains(&id) {
+ span_lint_and_sugg(
+ cx,
+ ONLY_USED_IN_RECURSION,
+ span,
+ "parameter is only used in recursion",
+ "if this is intentional, prefix with an underscore",
+ format!("_{}", ident.name.as_str()),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ }
+ }
+ }
+ }
+}
+
+pub fn is_primitive(ty: Ty<'_>) -> bool {
+ let ty = ty.peel_refs();
+ ty.is_primitive() || ty.is_str()
+}
+
+pub fn is_array(ty: Ty<'_>) -> bool {
+ let ty = ty.peel_refs();
+ ty.is_array() || ty.is_array_slice()
+}
+
+/// This builds the graph of side effect.
+/// The edge `a -> b` means if `a` has side effect, `b` will have side effect.
+///
+/// There are some example in following code:
+/// ```rust, ignore
+/// let b = 1;
+/// let a = b; // a -> b
+/// let (c, d) = (a, b); // c -> b, d -> b
+///
+/// let e = if a == 0 { // e -> a
+/// c // e -> c
+/// } else {
+/// d // e -> d
+/// };
+/// ```
+pub struct SideEffectVisit<'tcx> {
+ graph: FxHashMap<HirId, FxHashSet<HirId>>,
+ has_side_effect: FxHashSet<HirId>,
+ // bool for if the variable was dereferenced from mutable reference
+ ret_vars: Vec<(HirId, bool)>,
+ contains_side_effect: bool,
+ // break label
+ break_vars: FxHashMap<Ident, Vec<(HirId, bool)>>,
+ params: Vec<&'tcx Pat<'tcx>>,
+ fn_ident: Ident,
+ fn_def_id: DefId,
+ is_method: bool,
+ has_self: bool,
+ ty_res: &'tcx TypeckResults<'tcx>,
+ tcx: TyCtxt<'tcx>,
+ visited_exprs: FxHashSet<HirId>,
+}
+
+impl<'tcx> Visitor<'tcx> for SideEffectVisit<'tcx> {
+ fn visit_stmt(&mut self, s: &'tcx Stmt<'tcx>) {
+ match s.kind {
+ StmtKind::Local(Local {
+ pat, init: Some(init), ..
+ }) => {
+ self.visit_pat_expr(pat, init, false);
+ },
+ StmtKind::Item(_) | StmtKind::Expr(_) | StmtKind::Semi(_) => {
+ walk_stmt(self, s);
+ },
+ StmtKind::Local(_) => {},
+ }
+ self.ret_vars.clear();
+ }
+
+ fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) {
+ if !self.visited_exprs.insert(ex.hir_id) {
+ return;
+ }
+ match ex.kind {
+ ExprKind::Array(exprs) | ExprKind::Tup(exprs) => {
+ self.ret_vars = exprs
+ .iter()
+ .flat_map(|expr| {
+ self.visit_expr(expr);
+ std::mem::take(&mut self.ret_vars)
+ })
+ .collect();
+ },
+ ExprKind::Call(callee, args) => self.visit_fn(callee, args),
+ ExprKind::MethodCall(path, args, _) => self.visit_method_call(path, args),
+ ExprKind::Binary(_, lhs, rhs) => {
+ self.visit_bin_op(lhs, rhs);
+ },
+ ExprKind::Unary(op, expr) => self.visit_un_op(op, expr),
+ ExprKind::Let(Let { pat, init, .. }) => self.visit_pat_expr(pat, init, false),
+ ExprKind::If(bind, then_expr, else_expr) => {
+ self.visit_if(bind, then_expr, else_expr);
+ },
+ ExprKind::Match(expr, arms, _) => self.visit_match(expr, arms),
+ // since analysing the closure is not easy, just set all variables in it to side-effect
+ ExprKind::Closure(&Closure { body, .. }) => {
+ let body = self.tcx.hir().body(body);
+ self.visit_body(body);
+ let vars = std::mem::take(&mut self.ret_vars);
+ self.add_side_effect(vars);
+ },
+ ExprKind::Loop(block, label, _, _) | ExprKind::Block(block, label) => {
+ self.visit_block_label(block, label);
+ },
+ ExprKind::Assign(bind, expr, _) => {
+ self.visit_assign(bind, expr);
+ },
+ ExprKind::AssignOp(_, bind, expr) => {
+ self.visit_assign(bind, expr);
+ self.visit_bin_op(bind, expr);
+ },
+ ExprKind::Field(expr, _) => {
+ self.visit_expr(expr);
+ if matches!(self.ty_res.expr_ty(expr).kind(), ty::Ref(_, _, Mutability::Mut)) {
+ self.ret_vars.iter_mut().for_each(|(_, b)| *b = true);
+ }
+ },
+ ExprKind::Index(expr, index) => {
+ self.visit_expr(expr);
+ let mut vars = std::mem::take(&mut self.ret_vars);
+ self.visit_expr(index);
+ self.ret_vars.append(&mut vars);
+
+ if !is_array(self.ty_res.expr_ty(expr)) {
+ self.add_side_effect(self.ret_vars.clone());
+ } else if matches!(self.ty_res.expr_ty(expr).kind(), ty::Ref(_, _, Mutability::Mut)) {
+ self.ret_vars.iter_mut().for_each(|(_, b)| *b = true);
+ }
+ },
+ ExprKind::Break(dest, Some(expr)) => {
+ self.visit_expr(expr);
+ if let Some(label) = dest.label {
+ self.break_vars
+ .entry(label.ident)
+ .or_insert(Vec::new())
+ .append(&mut self.ret_vars);
+ }
+ self.contains_side_effect = true;
+ },
+ ExprKind::Ret(Some(expr)) => {
+ self.visit_expr(expr);
+ let vars = std::mem::take(&mut self.ret_vars);
+ self.add_side_effect(vars);
+ self.contains_side_effect = true;
+ },
+ ExprKind::Break(_, None) | ExprKind::Continue(_) | ExprKind::Ret(None) => {
+ self.contains_side_effect = true;
+ },
+ ExprKind::Struct(_, exprs, expr) => {
+ let mut ret_vars = exprs
+ .iter()
+ .flat_map(|field| {
+ self.visit_expr(field.expr);
+ std::mem::take(&mut self.ret_vars)
+ })
+ .collect();
+
+ walk_list!(self, visit_expr, expr);
+ self.ret_vars.append(&mut ret_vars);
+ },
+ _ => walk_expr(self, ex),
+ }
+ }
+
+ fn visit_path(&mut self, path: &'tcx Path<'tcx>, _id: HirId) {
+ if let Res::Local(id) = path.res {
+ self.ret_vars.push((id, false));
+ }
+ }
+}
+
+impl<'tcx> SideEffectVisit<'tcx> {
+ fn visit_assign(&mut self, lhs: &'tcx Expr<'tcx>, rhs: &'tcx Expr<'tcx>) {
+ // Just support array and tuple unwrapping for now.
+ //
+ // ex) `(a, b) = (c, d);`
+ // The graph would look like this:
+ // a -> c
+ // b -> d
+ //
+ // This would minimize the connection of the side-effect graph.
+ match (&lhs.kind, &rhs.kind) {
+ (ExprKind::Array(lhs), ExprKind::Array(rhs)) | (ExprKind::Tup(lhs), ExprKind::Tup(rhs)) => {
+ // if not, it is a compile error
+ debug_assert!(lhs.len() == rhs.len());
+ izip!(*lhs, *rhs).for_each(|(lhs, rhs)| self.visit_assign(lhs, rhs));
+ },
+ // in other assigns, we have to connect all each other
+ // because they can be connected somehow
+ _ => {
+ self.visit_expr(lhs);
+ let lhs_vars = std::mem::take(&mut self.ret_vars);
+ self.visit_expr(rhs);
+ let rhs_vars = std::mem::take(&mut self.ret_vars);
+ self.connect_assign(&lhs_vars, &rhs_vars, false);
+ },
+ }
+ }
+
+ fn visit_block_label(&mut self, block: &'tcx Block<'tcx>, label: Option<Label>) {
+ self.visit_block(block);
+ let _ = label.and_then(|label| {
+ self.break_vars
+ .remove(&label.ident)
+ .map(|mut break_vars| self.ret_vars.append(&mut break_vars))
+ });
+ }
+
+ fn visit_bin_op(&mut self, lhs: &'tcx Expr<'tcx>, rhs: &'tcx Expr<'tcx>) {
+ self.visit_expr(lhs);
+ let mut ret_vars = std::mem::take(&mut self.ret_vars);
+ self.visit_expr(rhs);
+ self.ret_vars.append(&mut ret_vars);
+
+ // the binary operation between non primitive values are overloaded operators
+ // so they can have side-effects
+ if !is_primitive(self.ty_res.expr_ty(lhs)) || !is_primitive(self.ty_res.expr_ty(rhs)) {
+ self.ret_vars.iter().for_each(|id| {
+ self.has_side_effect.insert(id.0);
+ });
+ self.contains_side_effect = true;
+ }
+ }
+
+ fn visit_un_op(&mut self, op: UnOp, expr: &'tcx Expr<'tcx>) {
+ self.visit_expr(expr);
+ let ty = self.ty_res.expr_ty(expr);
+ // dereferencing a reference has no side-effect
+ if !is_primitive(ty) && !matches!((op, ty.kind()), (UnOp::Deref, ty::Ref(..))) {
+ self.add_side_effect(self.ret_vars.clone());
+ }
+
+ if matches!((op, ty.kind()), (UnOp::Deref, ty::Ref(_, _, Mutability::Mut))) {
+ self.ret_vars.iter_mut().for_each(|(_, b)| *b = true);
+ }
+ }
+
+ fn visit_pat_expr(&mut self, pat: &'tcx Pat<'tcx>, expr: &'tcx Expr<'tcx>, connect_self: bool) {
+ match (&pat.kind, &expr.kind) {
+ (PatKind::Tuple(pats, _), ExprKind::Tup(exprs)) => {
+ self.ret_vars = izip!(*pats, *exprs)
+ .flat_map(|(pat, expr)| {
+ self.visit_pat_expr(pat, expr, connect_self);
+ std::mem::take(&mut self.ret_vars)
+ })
+ .collect();
+ },
+ (PatKind::Slice(front_exprs, _, back_exprs), ExprKind::Array(exprs)) => {
+ let mut vars = izip!(*front_exprs, *exprs)
+ .flat_map(|(pat, expr)| {
+ self.visit_pat_expr(pat, expr, connect_self);
+ std::mem::take(&mut self.ret_vars)
+ })
+ .collect();
+ self.ret_vars = izip!(back_exprs.iter().rev(), exprs.iter().rev())
+ .flat_map(|(pat, expr)| {
+ self.visit_pat_expr(pat, expr, connect_self);
+ std::mem::take(&mut self.ret_vars)
+ })
+ .collect();
+ self.ret_vars.append(&mut vars);
+ },
+ _ => {
+ let mut lhs_vars = Vec::new();
+ pat.each_binding(|_, id, _, _| lhs_vars.push((id, false)));
+ self.visit_expr(expr);
+ let rhs_vars = std::mem::take(&mut self.ret_vars);
+ self.connect_assign(&lhs_vars, &rhs_vars, connect_self);
+ self.ret_vars = rhs_vars;
+ },
+ }
+ }
+
+ fn visit_fn(&mut self, callee: &'tcx Expr<'tcx>, args: &'tcx [Expr<'tcx>]) {
+ self.visit_expr(callee);
+ let mut ret_vars = std::mem::take(&mut self.ret_vars);
+ self.add_side_effect(ret_vars.clone());
+
+ let mut is_recursive = false;
+
+ if_chain! {
+ if !self.has_self;
+ if let ExprKind::Path(QPath::Resolved(_, path)) = callee.kind;
+ if let Res::Def(DefKind::Fn, def_id) = path.res;
+ if self.fn_def_id == def_id;
+ then {
+ is_recursive = true;
+ }
+ }
+
+ if_chain! {
+ if !self.has_self && self.is_method;
+ if let ExprKind::Path(QPath::TypeRelative(ty, segment)) = callee.kind;
+ if segment.ident == self.fn_ident;
+ if let TyKind::Path(QPath::Resolved(_, path)) = ty.kind;
+ if let Res::SelfTy{ .. } = path.res;
+ then {
+ is_recursive = true;
+ }
+ }
+
+ if is_recursive {
+ izip!(self.params.clone(), args).for_each(|(pat, expr)| {
+ self.visit_pat_expr(pat, expr, true);
+ self.ret_vars.clear();
+ });
+ } else {
+ // This would set arguments used in closure that does not have side-effect.
+ // Closure itself can be detected whether there is a side-effect, but the
+ // value of variable that is holding closure can change.
+ // So, we just check the variables.
+ self.ret_vars = args
+ .iter()
+ .flat_map(|expr| {
+ self.visit_expr(expr);
+ std::mem::take(&mut self.ret_vars)
+ })
+ .collect_vec()
+ .into_iter()
+ .map(|id| {
+ self.has_side_effect.insert(id.0);
+ id
+ })
+ .collect();
+ self.contains_side_effect = true;
+ }
+
+ self.ret_vars.append(&mut ret_vars);
+ }
+
+ fn visit_method_call(&mut self, path: &'tcx PathSegment<'tcx>, args: &'tcx [Expr<'tcx>]) {
+ if_chain! {
+ if self.is_method;
+ if path.ident == self.fn_ident;
+ if let ExprKind::Path(QPath::Resolved(_, path)) = args.first().unwrap().kind;
+ if let Res::Local(..) = path.res;
+ let ident = path.segments.last().unwrap().ident;
+ if ident.name == kw::SelfLower;
+ then {
+ izip!(self.params.clone(), args.iter())
+ .for_each(|(pat, expr)| {
+ self.visit_pat_expr(pat, expr, true);
+ self.ret_vars.clear();
+ });
+ } else {
+ self.ret_vars = args
+ .iter()
+ .flat_map(|expr| {
+ self.visit_expr(expr);
+ std::mem::take(&mut self.ret_vars)
+ })
+ .collect_vec()
+ .into_iter()
+ .map(|a| {
+ self.has_side_effect.insert(a.0);
+ a
+ })
+ .collect();
+ self.contains_side_effect = true;
+ }
+ }
+ }
+
+ fn visit_if(&mut self, bind: &'tcx Expr<'tcx>, then_expr: &'tcx Expr<'tcx>, else_expr: Option<&'tcx Expr<'tcx>>) {
+ let contains_side_effect = self.contains_side_effect;
+ self.contains_side_effect = false;
+ self.visit_expr(bind);
+ let mut vars = std::mem::take(&mut self.ret_vars);
+ self.visit_expr(then_expr);
+ let mut then_vars = std::mem::take(&mut self.ret_vars);
+ walk_list!(self, visit_expr, else_expr);
+ if self.contains_side_effect {
+ self.add_side_effect(vars.clone());
+ }
+ self.contains_side_effect |= contains_side_effect;
+ self.ret_vars.append(&mut vars);
+ self.ret_vars.append(&mut then_vars);
+ }
+
+ fn visit_match(&mut self, expr: &'tcx Expr<'tcx>, arms: &'tcx [Arm<'tcx>]) {
+ self.visit_expr(expr);
+ let mut expr_vars = std::mem::take(&mut self.ret_vars);
+ self.ret_vars = arms
+ .iter()
+ .flat_map(|arm| {
+ let contains_side_effect = self.contains_side_effect;
+ self.contains_side_effect = false;
+ // this would visit `expr` multiple times
+ // but couldn't think of a better way
+ self.visit_pat_expr(arm.pat, expr, false);
+ let mut vars = std::mem::take(&mut self.ret_vars);
+ let _ = arm.guard.as_ref().map(|guard| {
+ self.visit_expr(match guard {
+ Guard::If(expr) | Guard::IfLet(Let { init: expr, .. }) => expr,
+ });
+ vars.append(&mut self.ret_vars);
+ });
+ self.visit_expr(arm.body);
+ if self.contains_side_effect {
+ self.add_side_effect(vars.clone());
+ self.add_side_effect(expr_vars.clone());
+ }
+ self.contains_side_effect |= contains_side_effect;
+ vars.append(&mut self.ret_vars);
+ vars
+ })
+ .collect();
+ self.ret_vars.append(&mut expr_vars);
+ }
+
+ fn connect_assign(&mut self, lhs: &[(HirId, bool)], rhs: &[(HirId, bool)], connect_self: bool) {
+ // if mutable dereference is on assignment it can have side-effect
+ // (this can lead to parameter mutable dereference and change the original value)
+ // too hard to detect whether this value is from parameter, so this would all
+ // check mutable dereference assignment to side effect
+ lhs.iter().filter(|(_, b)| *b).for_each(|(id, _)| {
+ self.has_side_effect.insert(*id);
+ self.contains_side_effect = true;
+ });
+
+ // there is no connection
+ if lhs.is_empty() || rhs.is_empty() {
+ return;
+ }
+
+ // by connected rhs in cycle, the connections would decrease
+ // from `n * m` to `n + m`
+ // where `n` and `m` are length of `lhs` and `rhs`.
+
+ // unwrap is possible since rhs is not empty
+ let rhs_first = rhs.first().unwrap();
+ for (id, _) in lhs.iter() {
+ if connect_self || *id != rhs_first.0 {
+ self.graph
+ .entry(*id)
+ .or_insert_with(FxHashSet::default)
+ .insert(rhs_first.0);
+ }
+ }
+
+ let rhs = rhs.iter();
+ izip!(rhs.clone().cycle().skip(1), rhs).for_each(|(from, to)| {
+ if connect_self || from.0 != to.0 {
+ self.graph.entry(from.0).or_insert_with(FxHashSet::default).insert(to.0);
+ }
+ });
+ }
+
+ fn add_side_effect(&mut self, v: Vec<(HirId, bool)>) {
+ for (id, _) in v {
+ self.has_side_effect.insert(id);
+ self.contains_side_effect = true;
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/open_options.rs b/src/tools/clippy/clippy_lints/src/open_options.rs
new file mode 100644
index 000000000..5a0b50420
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/open_options.rs
@@ -0,0 +1,202 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::paths;
+use clippy_utils::ty::match_type;
+use rustc_ast::ast::LitKind;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::{Span, Spanned};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for duplicate open options as well as combinations
+ /// that make no sense.
+ ///
+ /// ### Why is this bad?
+ /// In the best case, the code will be harder to read than
+ /// necessary. I don't know the worst case.
+ ///
+ /// ### Example
+ /// ```rust
+ /// use std::fs::OpenOptions;
+ ///
+ /// OpenOptions::new().read(true).truncate(true);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub NONSENSICAL_OPEN_OPTIONS,
+ correctness,
+ "nonsensical combination of options for opening a file"
+}
+
+declare_lint_pass!(OpenOptions => [NONSENSICAL_OPEN_OPTIONS]);
+
+impl<'tcx> LateLintPass<'tcx> for OpenOptions {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ if let ExprKind::MethodCall(path, [self_arg, ..], _) = &e.kind {
+ let obj_ty = cx.typeck_results().expr_ty(self_arg).peel_refs();
+ if path.ident.name == sym!(open) && match_type(cx, obj_ty, &paths::OPEN_OPTIONS) {
+ let mut options = Vec::new();
+ get_open_options(cx, self_arg, &mut options);
+ check_open_options(cx, &options, e.span);
+ }
+ }
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+enum Argument {
+ True,
+ False,
+ Unknown,
+}
+
+#[derive(Debug)]
+enum OpenOption {
+ Write,
+ Read,
+ Truncate,
+ Create,
+ Append,
+}
+
+fn get_open_options(cx: &LateContext<'_>, argument: &Expr<'_>, options: &mut Vec<(OpenOption, Argument)>) {
+ if let ExprKind::MethodCall(path, arguments, _) = argument.kind {
+ let obj_ty = cx.typeck_results().expr_ty(&arguments[0]).peel_refs();
+
+ // Only proceed if this is a call on some object of type std::fs::OpenOptions
+ if match_type(cx, obj_ty, &paths::OPEN_OPTIONS) && arguments.len() >= 2 {
+ let argument_option = match arguments[1].kind {
+ ExprKind::Lit(ref span) => {
+ if let Spanned {
+ node: LitKind::Bool(lit),
+ ..
+ } = *span
+ {
+ if lit { Argument::True } else { Argument::False }
+ } else {
+ // The function is called with a literal which is not a boolean literal.
+ // This is theoretically possible, but not very likely.
+ return;
+ }
+ },
+ _ => Argument::Unknown,
+ };
+
+ match path.ident.as_str() {
+ "create" => {
+ options.push((OpenOption::Create, argument_option));
+ },
+ "append" => {
+ options.push((OpenOption::Append, argument_option));
+ },
+ "truncate" => {
+ options.push((OpenOption::Truncate, argument_option));
+ },
+ "read" => {
+ options.push((OpenOption::Read, argument_option));
+ },
+ "write" => {
+ options.push((OpenOption::Write, argument_option));
+ },
+ _ => (),
+ }
+
+ get_open_options(cx, &arguments[0], options);
+ }
+ }
+}
+
+fn check_open_options(cx: &LateContext<'_>, options: &[(OpenOption, Argument)], span: Span) {
+ let (mut create, mut append, mut truncate, mut read, mut write) = (false, false, false, false, false);
+ let (mut create_arg, mut append_arg, mut truncate_arg, mut read_arg, mut write_arg) =
+ (false, false, false, false, false);
+ // This code is almost duplicated (oh, the irony), but I haven't found a way to
+ // unify it.
+
+ for option in options {
+ match *option {
+ (OpenOption::Create, arg) => {
+ if create {
+ span_lint(
+ cx,
+ NONSENSICAL_OPEN_OPTIONS,
+ span,
+ "the method `create` is called more than once",
+ );
+ } else {
+ create = true;
+ }
+ create_arg = create_arg || (arg == Argument::True);
+ },
+ (OpenOption::Append, arg) => {
+ if append {
+ span_lint(
+ cx,
+ NONSENSICAL_OPEN_OPTIONS,
+ span,
+ "the method `append` is called more than once",
+ );
+ } else {
+ append = true;
+ }
+ append_arg = append_arg || (arg == Argument::True);
+ },
+ (OpenOption::Truncate, arg) => {
+ if truncate {
+ span_lint(
+ cx,
+ NONSENSICAL_OPEN_OPTIONS,
+ span,
+ "the method `truncate` is called more than once",
+ );
+ } else {
+ truncate = true;
+ }
+ truncate_arg = truncate_arg || (arg == Argument::True);
+ },
+ (OpenOption::Read, arg) => {
+ if read {
+ span_lint(
+ cx,
+ NONSENSICAL_OPEN_OPTIONS,
+ span,
+ "the method `read` is called more than once",
+ );
+ } else {
+ read = true;
+ }
+ read_arg = read_arg || (arg == Argument::True);
+ },
+ (OpenOption::Write, arg) => {
+ if write {
+ span_lint(
+ cx,
+ NONSENSICAL_OPEN_OPTIONS,
+ span,
+ "the method `write` is called more than once",
+ );
+ } else {
+ write = true;
+ }
+ write_arg = write_arg || (arg == Argument::True);
+ },
+ }
+ }
+
+ if read && truncate && read_arg && truncate_arg && !(write && write_arg) {
+ span_lint(
+ cx,
+ NONSENSICAL_OPEN_OPTIONS,
+ span,
+ "file opened with `truncate` and `read`",
+ );
+ }
+ if append && truncate && append_arg && truncate_arg {
+ span_lint(
+ cx,
+ NONSENSICAL_OPEN_OPTIONS,
+ span,
+ "file opened with `append` and `truncate`",
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/absurd_extreme_comparisons.rs b/src/tools/clippy/clippy_lints/src/operators/absurd_extreme_comparisons.rs
new file mode 100644
index 000000000..1ec4240af
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/absurd_extreme_comparisons.rs
@@ -0,0 +1,142 @@
+use rustc_hir::{BinOpKind, Expr, ExprKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+
+use clippy_utils::comparisons::{normalize_comparison, Rel};
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::source::snippet;
+use clippy_utils::ty::is_isize_or_usize;
+use clippy_utils::{clip, int_bits, unsext};
+
+use super::ABSURD_EXTREME_COMPARISONS;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ op: BinOpKind,
+ lhs: &'tcx Expr<'_>,
+ rhs: &'tcx Expr<'_>,
+) {
+ if let Some((culprit, result)) = detect_absurd_comparison(cx, op, lhs, rhs) {
+ let msg = "this comparison involving the minimum or maximum element for this \
+ type contains a case that is always true or always false";
+
+ let conclusion = match result {
+ AbsurdComparisonResult::AlwaysFalse => "this comparison is always false".to_owned(),
+ AbsurdComparisonResult::AlwaysTrue => "this comparison is always true".to_owned(),
+ AbsurdComparisonResult::InequalityImpossible => format!(
+ "the case where the two sides are not equal never occurs, consider using `{} == {}` \
+ instead",
+ snippet(cx, lhs.span, "lhs"),
+ snippet(cx, rhs.span, "rhs")
+ ),
+ };
+
+ let help = format!(
+ "because `{}` is the {} value for this type, {}",
+ snippet(cx, culprit.expr.span, "x"),
+ match culprit.which {
+ ExtremeType::Minimum => "minimum",
+ ExtremeType::Maximum => "maximum",
+ },
+ conclusion
+ );
+
+ span_lint_and_help(cx, ABSURD_EXTREME_COMPARISONS, expr.span, msg, None, &help);
+ }
+}
+
+enum ExtremeType {
+ Minimum,
+ Maximum,
+}
+
+struct ExtremeExpr<'a> {
+ which: ExtremeType,
+ expr: &'a Expr<'a>,
+}
+
+enum AbsurdComparisonResult {
+ AlwaysFalse,
+ AlwaysTrue,
+ InequalityImpossible,
+}
+
+fn is_cast_between_fixed_and_target<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> bool {
+ if let ExprKind::Cast(cast_exp, _) = expr.kind {
+ let precast_ty = cx.typeck_results().expr_ty(cast_exp);
+ let cast_ty = cx.typeck_results().expr_ty(expr);
+
+ return is_isize_or_usize(precast_ty) != is_isize_or_usize(cast_ty);
+ }
+
+ false
+}
+
+fn detect_absurd_comparison<'tcx>(
+ cx: &LateContext<'tcx>,
+ op: BinOpKind,
+ lhs: &'tcx Expr<'_>,
+ rhs: &'tcx Expr<'_>,
+) -> Option<(ExtremeExpr<'tcx>, AbsurdComparisonResult)> {
+ use AbsurdComparisonResult::{AlwaysFalse, AlwaysTrue, InequalityImpossible};
+ use ExtremeType::{Maximum, Minimum};
+ // absurd comparison only makes sense on primitive types
+ // primitive types don't implement comparison operators with each other
+ if cx.typeck_results().expr_ty(lhs) != cx.typeck_results().expr_ty(rhs) {
+ return None;
+ }
+
+ // comparisons between fix sized types and target sized types are considered unanalyzable
+ if is_cast_between_fixed_and_target(cx, lhs) || is_cast_between_fixed_and_target(cx, rhs) {
+ return None;
+ }
+
+ let (rel, normalized_lhs, normalized_rhs) = normalize_comparison(op, lhs, rhs)?;
+
+ let lx = detect_extreme_expr(cx, normalized_lhs);
+ let rx = detect_extreme_expr(cx, normalized_rhs);
+
+ Some(match rel {
+ Rel::Lt => {
+ match (lx, rx) {
+ (Some(l @ ExtremeExpr { which: Maximum, .. }), _) => (l, AlwaysFalse), // max < x
+ (_, Some(r @ ExtremeExpr { which: Minimum, .. })) => (r, AlwaysFalse), // x < min
+ _ => return None,
+ }
+ },
+ Rel::Le => {
+ match (lx, rx) {
+ (Some(l @ ExtremeExpr { which: Minimum, .. }), _) => (l, AlwaysTrue), // min <= x
+ (Some(l @ ExtremeExpr { which: Maximum, .. }), _) => (l, InequalityImpossible), // max <= x
+ (_, Some(r @ ExtremeExpr { which: Minimum, .. })) => (r, InequalityImpossible), // x <= min
+ (_, Some(r @ ExtremeExpr { which: Maximum, .. })) => (r, AlwaysTrue), // x <= max
+ _ => return None,
+ }
+ },
+ Rel::Ne | Rel::Eq => return None,
+ })
+}
+
+fn detect_extreme_expr<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> Option<ExtremeExpr<'tcx>> {
+ let ty = cx.typeck_results().expr_ty(expr);
+
+ let cv = constant(cx, cx.typeck_results(), expr)?.0;
+
+ let which = match (ty.kind(), cv) {
+ (&ty::Bool, Constant::Bool(false)) | (&ty::Uint(_), Constant::Int(0)) => ExtremeType::Minimum,
+ (&ty::Int(ity), Constant::Int(i)) if i == unsext(cx.tcx, i128::MIN >> (128 - int_bits(cx.tcx, ity)), ity) => {
+ ExtremeType::Minimum
+ },
+
+ (&ty::Bool, Constant::Bool(true)) => ExtremeType::Maximum,
+ (&ty::Int(ity), Constant::Int(i)) if i == unsext(cx.tcx, i128::MAX >> (128 - int_bits(cx.tcx, ity)), ity) => {
+ ExtremeType::Maximum
+ },
+ (&ty::Uint(uty), Constant::Int(i)) if clip(cx.tcx, u128::MAX, uty) == i => ExtremeType::Maximum,
+
+ _ => return None,
+ };
+ Some(ExtremeExpr { which, expr })
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/arithmetic.rs b/src/tools/clippy/clippy_lints/src/operators/arithmetic.rs
new file mode 100644
index 000000000..800cf249f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/arithmetic.rs
@@ -0,0 +1,119 @@
+#![allow(
+ // False positive
+ clippy::match_same_arms
+)]
+
+use super::ARITHMETIC;
+use clippy_utils::{consts::constant_simple, diagnostics::span_lint};
+use rustc_data_structures::fx::FxHashSet;
+use rustc_hir as hir;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::impl_lint_pass;
+use rustc_span::source_map::Span;
+
+const HARD_CODED_ALLOWED: &[&str] = &["std::num::Saturating", "std::string::String", "std::num::Wrapping"];
+
+#[derive(Debug)]
+pub struct Arithmetic {
+ allowed: FxHashSet<String>,
+ // Used to check whether expressions are constants, such as in enum discriminants and consts
+ const_span: Option<Span>,
+ expr_span: Option<Span>,
+}
+
+impl_lint_pass!(Arithmetic => [ARITHMETIC]);
+
+impl Arithmetic {
+ #[must_use]
+ pub fn new(mut allowed: FxHashSet<String>) -> Self {
+ allowed.extend(HARD_CODED_ALLOWED.iter().copied().map(String::from));
+ Self {
+ allowed,
+ const_span: None,
+ expr_span: None,
+ }
+ }
+
+ /// Checks if the given `expr` has any of the inner `allowed` elements.
+ fn is_allowed_ty(&self, cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> bool {
+ self.allowed.contains(
+ cx.typeck_results()
+ .expr_ty(expr)
+ .to_string()
+ .split('<')
+ .next()
+ .unwrap_or_default(),
+ )
+ }
+
+ fn issue_lint(&mut self, cx: &LateContext<'_>, expr: &hir::Expr<'_>) {
+ span_lint(cx, ARITHMETIC, expr.span, "arithmetic detected");
+ self.expr_span = Some(expr.span);
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for Arithmetic {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
+ if self.expr_span.is_some() {
+ return;
+ }
+ if let Some(span) = self.const_span && span.contains(expr.span) {
+ return;
+ }
+ match &expr.kind {
+ hir::ExprKind::Binary(op, lhs, rhs) | hir::ExprKind::AssignOp(op, lhs, rhs) => {
+ let (
+ hir::BinOpKind::Add
+ | hir::BinOpKind::Sub
+ | hir::BinOpKind::Mul
+ | hir::BinOpKind::Div
+ | hir::BinOpKind::Rem
+ | hir::BinOpKind::Shl
+ | hir::BinOpKind::Shr
+ ) = op.node else {
+ return;
+ };
+ if self.is_allowed_ty(cx, lhs) || self.is_allowed_ty(cx, rhs) {
+ return;
+ }
+ self.issue_lint(cx, expr);
+ },
+ hir::ExprKind::Unary(hir::UnOp::Neg, _) => {
+ // CTFE already takes care of things like `-1` that do not overflow.
+ if constant_simple(cx, cx.typeck_results(), expr).is_none() {
+ self.issue_lint(cx, expr);
+ }
+ },
+ _ => {},
+ }
+ }
+
+ fn check_body(&mut self, cx: &LateContext<'_>, body: &hir::Body<'_>) {
+ let body_owner = cx.tcx.hir().body_owner_def_id(body.id());
+ match cx.tcx.hir().body_owner_kind(body_owner) {
+ hir::BodyOwnerKind::Const | hir::BodyOwnerKind::Static(_) => {
+ let body_span = cx.tcx.def_span(body_owner);
+ if let Some(span) = self.const_span && span.contains(body_span) {
+ return;
+ }
+ self.const_span = Some(body_span);
+ },
+ hir::BodyOwnerKind::Closure | hir::BodyOwnerKind::Fn => {},
+ }
+ }
+
+ fn check_body_post(&mut self, cx: &LateContext<'_>, body: &hir::Body<'_>) {
+ let body_owner = cx.tcx.hir().body_owner(body.id());
+ let body_span = cx.tcx.hir().span(body_owner);
+ if let Some(span) = self.const_span && span.contains(body_span) {
+ return;
+ }
+ self.const_span = None;
+ }
+
+ fn check_expr_post(&mut self, _: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
+ if Some(expr.span) == self.expr_span {
+ self.expr_span = None;
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/assign_op_pattern.rs b/src/tools/clippy/clippy_lints/src/operators/assign_op_pattern.rs
new file mode 100644
index 000000000..945a09a64
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/assign_op_pattern.rs
@@ -0,0 +1,181 @@
+use clippy_utils::binop_traits;
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::implements_trait;
+use clippy_utils::{eq_expr_value, trait_ref_of_method};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_hir::intravisit::{walk_expr, Visitor};
+use rustc_lint::LateContext;
+use rustc_middle::mir::FakeReadCause;
+use rustc_middle::ty::BorrowKind;
+use rustc_trait_selection::infer::TyCtxtInferExt;
+use rustc_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId};
+
+use super::ASSIGN_OP_PATTERN;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx hir::Expr<'_>,
+ assignee: &'tcx hir::Expr<'_>,
+ e: &'tcx hir::Expr<'_>,
+) {
+ if let hir::ExprKind::Binary(op, l, r) = &e.kind {
+ let lint = |assignee: &hir::Expr<'_>, rhs: &hir::Expr<'_>| {
+ let ty = cx.typeck_results().expr_ty(assignee);
+ let rty = cx.typeck_results().expr_ty(rhs);
+ if_chain! {
+ if let Some((_, lang_item)) = binop_traits(op.node);
+ if let Ok(trait_id) = cx.tcx.lang_items().require(lang_item);
+ let parent_fn = cx.tcx.hir().get_parent_item(e.hir_id);
+ if trait_ref_of_method(cx, parent_fn)
+ .map_or(true, |t| t.path.res.def_id() != trait_id);
+ if implements_trait(cx, ty, trait_id, &[rty.into()]);
+ then {
+ // Primitive types execute assign-ops right-to-left. Every other type is left-to-right.
+ if !(ty.is_primitive() && rty.is_primitive()) {
+ // TODO: This will have false negatives as it doesn't check if the borrows are
+ // actually live at the end of their respective expressions.
+ let mut_borrows = mut_borrows_in_expr(cx, assignee);
+ let imm_borrows = imm_borrows_in_expr(cx, rhs);
+ if mut_borrows.iter().any(|id| imm_borrows.contains(id)) {
+ return;
+ }
+ }
+ span_lint_and_then(
+ cx,
+ ASSIGN_OP_PATTERN,
+ expr.span,
+ "manual implementation of an assign operation",
+ |diag| {
+ if let (Some(snip_a), Some(snip_r)) =
+ (snippet_opt(cx, assignee.span), snippet_opt(cx, rhs.span))
+ {
+ diag.span_suggestion(
+ expr.span,
+ "replace it with",
+ format!("{} {}= {}", snip_a, op.node.as_str(), snip_r),
+ Applicability::MachineApplicable,
+ );
+ }
+ },
+ );
+ }
+ }
+ };
+
+ let mut visitor = ExprVisitor {
+ assignee,
+ counter: 0,
+ cx,
+ };
+
+ walk_expr(&mut visitor, e);
+
+ if visitor.counter == 1 {
+ // a = a op b
+ if eq_expr_value(cx, assignee, l) {
+ lint(assignee, r);
+ }
+ // a = b commutative_op a
+ // Limited to primitive type as these ops are know to be commutative
+ if eq_expr_value(cx, assignee, r) && cx.typeck_results().expr_ty(assignee).is_primitive_ty() {
+ match op.node {
+ hir::BinOpKind::Add
+ | hir::BinOpKind::Mul
+ | hir::BinOpKind::And
+ | hir::BinOpKind::Or
+ | hir::BinOpKind::BitXor
+ | hir::BinOpKind::BitAnd
+ | hir::BinOpKind::BitOr => {
+ lint(assignee, l);
+ },
+ _ => {},
+ }
+ }
+ }
+ }
+}
+
+struct ExprVisitor<'a, 'tcx> {
+ assignee: &'a hir::Expr<'a>,
+ counter: u8,
+ cx: &'a LateContext<'tcx>,
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for ExprVisitor<'a, 'tcx> {
+ fn visit_expr(&mut self, expr: &'tcx hir::Expr<'_>) {
+ if eq_expr_value(self.cx, self.assignee, expr) {
+ self.counter += 1;
+ }
+
+ walk_expr(self, expr);
+ }
+}
+
+fn imm_borrows_in_expr(cx: &LateContext<'_>, e: &hir::Expr<'_>) -> hir::HirIdSet {
+ struct S(hir::HirIdSet);
+ impl Delegate<'_> for S {
+ fn borrow(&mut self, place: &PlaceWithHirId<'_>, _: hir::HirId, kind: BorrowKind) {
+ if matches!(kind, BorrowKind::ImmBorrow | BorrowKind::UniqueImmBorrow) {
+ self.0.insert(match place.place.base {
+ PlaceBase::Local(id) => id,
+ PlaceBase::Upvar(id) => id.var_path.hir_id,
+ _ => return,
+ });
+ }
+ }
+
+ fn consume(&mut self, _: &PlaceWithHirId<'_>, _: hir::HirId) {}
+ fn mutate(&mut self, _: &PlaceWithHirId<'_>, _: hir::HirId) {}
+ fn fake_read(&mut self, _: &PlaceWithHirId<'_>, _: FakeReadCause, _: hir::HirId) {}
+ fn copy(&mut self, _: &PlaceWithHirId<'_>, _: hir::HirId) {}
+ }
+
+ let mut s = S(hir::HirIdSet::default());
+ cx.tcx.infer_ctxt().enter(|infcx| {
+ let mut v = ExprUseVisitor::new(
+ &mut s,
+ &infcx,
+ cx.tcx.hir().body_owner_def_id(cx.enclosing_body.unwrap()),
+ cx.param_env,
+ cx.typeck_results(),
+ );
+ v.consume_expr(e);
+ });
+ s.0
+}
+
+fn mut_borrows_in_expr(cx: &LateContext<'_>, e: &hir::Expr<'_>) -> hir::HirIdSet {
+ struct S(hir::HirIdSet);
+ impl Delegate<'_> for S {
+ fn borrow(&mut self, place: &PlaceWithHirId<'_>, _: hir::HirId, kind: BorrowKind) {
+ if matches!(kind, BorrowKind::MutBorrow) {
+ self.0.insert(match place.place.base {
+ PlaceBase::Local(id) => id,
+ PlaceBase::Upvar(id) => id.var_path.hir_id,
+ _ => return,
+ });
+ }
+ }
+
+ fn consume(&mut self, _: &PlaceWithHirId<'_>, _: hir::HirId) {}
+ fn mutate(&mut self, _: &PlaceWithHirId<'_>, _: hir::HirId) {}
+ fn fake_read(&mut self, _: &PlaceWithHirId<'_>, _: FakeReadCause, _: hir::HirId) {}
+ fn copy(&mut self, _: &PlaceWithHirId<'_>, _: hir::HirId) {}
+ }
+
+ let mut s = S(hir::HirIdSet::default());
+ cx.tcx.infer_ctxt().enter(|infcx| {
+ let mut v = ExprUseVisitor::new(
+ &mut s,
+ &infcx,
+ cx.tcx.hir().body_owner_def_id(cx.enclosing_body.unwrap()),
+ cx.param_env,
+ cx.typeck_results(),
+ );
+ v.consume_expr(e);
+ });
+ s.0
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/bit_mask.rs b/src/tools/clippy/clippy_lints/src/operators/bit_mask.rs
new file mode 100644
index 000000000..74387fbc8
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/bit_mask.rs
@@ -0,0 +1,197 @@
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::span_lint;
+use rustc_hir::{BinOpKind, Expr, ExprKind};
+use rustc_lint::LateContext;
+use rustc_span::source_map::Span;
+
+use super::{BAD_BIT_MASK, INEFFECTIVE_BIT_MASK};
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ op: BinOpKind,
+ left: &'tcx Expr<'_>,
+ right: &'tcx Expr<'_>,
+) {
+ if op.is_comparison() {
+ if let Some(cmp_opt) = fetch_int_literal(cx, right) {
+ check_compare(cx, left, op, cmp_opt, e.span);
+ } else if let Some(cmp_val) = fetch_int_literal(cx, left) {
+ check_compare(cx, right, invert_cmp(op), cmp_val, e.span);
+ }
+ }
+}
+
+#[must_use]
+fn invert_cmp(cmp: BinOpKind) -> BinOpKind {
+ match cmp {
+ BinOpKind::Eq => BinOpKind::Eq,
+ BinOpKind::Ne => BinOpKind::Ne,
+ BinOpKind::Lt => BinOpKind::Gt,
+ BinOpKind::Gt => BinOpKind::Lt,
+ BinOpKind::Le => BinOpKind::Ge,
+ BinOpKind::Ge => BinOpKind::Le,
+ _ => BinOpKind::Or, // Dummy
+ }
+}
+
+fn check_compare(cx: &LateContext<'_>, bit_op: &Expr<'_>, cmp_op: BinOpKind, cmp_value: u128, span: Span) {
+ if let ExprKind::Binary(op, left, right) = &bit_op.kind {
+ if op.node != BinOpKind::BitAnd && op.node != BinOpKind::BitOr {
+ return;
+ }
+ fetch_int_literal(cx, right)
+ .or_else(|| fetch_int_literal(cx, left))
+ .map_or((), |mask| check_bit_mask(cx, op.node, cmp_op, mask, cmp_value, span));
+ }
+}
+
+#[allow(clippy::too_many_lines)]
+fn check_bit_mask(
+ cx: &LateContext<'_>,
+ bit_op: BinOpKind,
+ cmp_op: BinOpKind,
+ mask_value: u128,
+ cmp_value: u128,
+ span: Span,
+) {
+ match cmp_op {
+ BinOpKind::Eq | BinOpKind::Ne => match bit_op {
+ BinOpKind::BitAnd => {
+ if mask_value & cmp_value != cmp_value {
+ if cmp_value != 0 {
+ span_lint(
+ cx,
+ BAD_BIT_MASK,
+ span,
+ &format!(
+ "incompatible bit mask: `_ & {}` can never be equal to `{}`",
+ mask_value, cmp_value
+ ),
+ );
+ }
+ } else if mask_value == 0 {
+ span_lint(cx, BAD_BIT_MASK, span, "&-masking with zero");
+ }
+ },
+ BinOpKind::BitOr => {
+ if mask_value | cmp_value != cmp_value {
+ span_lint(
+ cx,
+ BAD_BIT_MASK,
+ span,
+ &format!(
+ "incompatible bit mask: `_ | {}` can never be equal to `{}`",
+ mask_value, cmp_value
+ ),
+ );
+ }
+ },
+ _ => (),
+ },
+ BinOpKind::Lt | BinOpKind::Ge => match bit_op {
+ BinOpKind::BitAnd => {
+ if mask_value < cmp_value {
+ span_lint(
+ cx,
+ BAD_BIT_MASK,
+ span,
+ &format!(
+ "incompatible bit mask: `_ & {}` will always be lower than `{}`",
+ mask_value, cmp_value
+ ),
+ );
+ } else if mask_value == 0 {
+ span_lint(cx, BAD_BIT_MASK, span, "&-masking with zero");
+ }
+ },
+ BinOpKind::BitOr => {
+ if mask_value >= cmp_value {
+ span_lint(
+ cx,
+ BAD_BIT_MASK,
+ span,
+ &format!(
+ "incompatible bit mask: `_ | {}` will never be lower than `{}`",
+ mask_value, cmp_value
+ ),
+ );
+ } else {
+ check_ineffective_lt(cx, span, mask_value, cmp_value, "|");
+ }
+ },
+ BinOpKind::BitXor => check_ineffective_lt(cx, span, mask_value, cmp_value, "^"),
+ _ => (),
+ },
+ BinOpKind::Le | BinOpKind::Gt => match bit_op {
+ BinOpKind::BitAnd => {
+ if mask_value <= cmp_value {
+ span_lint(
+ cx,
+ BAD_BIT_MASK,
+ span,
+ &format!(
+ "incompatible bit mask: `_ & {}` will never be higher than `{}`",
+ mask_value, cmp_value
+ ),
+ );
+ } else if mask_value == 0 {
+ span_lint(cx, BAD_BIT_MASK, span, "&-masking with zero");
+ }
+ },
+ BinOpKind::BitOr => {
+ if mask_value > cmp_value {
+ span_lint(
+ cx,
+ BAD_BIT_MASK,
+ span,
+ &format!(
+ "incompatible bit mask: `_ | {}` will always be higher than `{}`",
+ mask_value, cmp_value
+ ),
+ );
+ } else {
+ check_ineffective_gt(cx, span, mask_value, cmp_value, "|");
+ }
+ },
+ BinOpKind::BitXor => check_ineffective_gt(cx, span, mask_value, cmp_value, "^"),
+ _ => (),
+ },
+ _ => (),
+ }
+}
+
+fn check_ineffective_lt(cx: &LateContext<'_>, span: Span, m: u128, c: u128, op: &str) {
+ if c.is_power_of_two() && m < c {
+ span_lint(
+ cx,
+ INEFFECTIVE_BIT_MASK,
+ span,
+ &format!(
+ "ineffective bit mask: `x {} {}` compared to `{}`, is the same as x compared directly",
+ op, m, c
+ ),
+ );
+ }
+}
+
+fn check_ineffective_gt(cx: &LateContext<'_>, span: Span, m: u128, c: u128, op: &str) {
+ if (c + 1).is_power_of_two() && m <= c {
+ span_lint(
+ cx,
+ INEFFECTIVE_BIT_MASK,
+ span,
+ &format!(
+ "ineffective bit mask: `x {} {}` compared to `{}`, is the same as x compared directly",
+ op, m, c
+ ),
+ );
+ }
+}
+
+fn fetch_int_literal(cx: &LateContext<'_>, lit: &Expr<'_>) -> Option<u128> {
+ match constant(cx, cx.typeck_results(), lit)?.0 {
+ Constant::Int(n) => Some(n),
+ _ => None,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/cmp_nan.rs b/src/tools/clippy/clippy_lints/src/operators/cmp_nan.rs
new file mode 100644
index 000000000..786ae1552
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/cmp_nan.rs
@@ -0,0 +1,30 @@
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::in_constant;
+use rustc_hir::{BinOpKind, Expr};
+use rustc_lint::LateContext;
+
+use super::CMP_NAN;
+
+pub(super) fn check(cx: &LateContext<'_>, e: &Expr<'_>, op: BinOpKind, lhs: &Expr<'_>, rhs: &Expr<'_>) {
+ if op.is_comparison() && !in_constant(cx, e.hir_id) && (is_nan(cx, lhs) || is_nan(cx, rhs)) {
+ span_lint(
+ cx,
+ CMP_NAN,
+ e.span,
+ "doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead",
+ );
+ }
+}
+
+fn is_nan(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
+ if let Some((value, _)) = constant(cx, cx.typeck_results(), e) {
+ match value {
+ Constant::F32(num) => num.is_nan(),
+ Constant::F64(num) => num.is_nan(),
+ _ => false,
+ }
+ } else {
+ false
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/cmp_owned.rs b/src/tools/clippy/clippy_lints/src/operators/cmp_owned.rs
new file mode 100644
index 000000000..e1f9b5906
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/cmp_owned.rs
@@ -0,0 +1,147 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet;
+use clippy_utils::ty::{implements_trait, is_copy};
+use clippy_utils::{match_any_def_paths, path_def_id, paths};
+use rustc_errors::Applicability;
+use rustc_hir::{BinOpKind, Expr, ExprKind, UnOp};
+use rustc_lint::LateContext;
+use rustc_middle::ty::Ty;
+use rustc_span::symbol::sym;
+
+use super::CMP_OWNED;
+
+pub(super) fn check(cx: &LateContext<'_>, op: BinOpKind, lhs: &Expr<'_>, rhs: &Expr<'_>) {
+ if op.is_comparison() {
+ check_op(cx, lhs, rhs, true);
+ check_op(cx, rhs, lhs, false);
+ }
+}
+
+#[derive(Default)]
+struct EqImpl {
+ ty_eq_other: bool,
+ other_eq_ty: bool,
+}
+impl EqImpl {
+ fn is_implemented(&self) -> bool {
+ self.ty_eq_other || self.other_eq_ty
+ }
+}
+
+fn symmetric_partial_eq<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>, other: Ty<'tcx>) -> Option<EqImpl> {
+ cx.tcx.lang_items().eq_trait().map(|def_id| EqImpl {
+ ty_eq_other: implements_trait(cx, ty, def_id, &[other.into()]),
+ other_eq_ty: implements_trait(cx, other, def_id, &[ty.into()]),
+ })
+}
+
+fn check_op(cx: &LateContext<'_>, expr: &Expr<'_>, other: &Expr<'_>, left: bool) {
+ let typeck = cx.typeck_results();
+ let (arg, arg_span) = match expr.kind {
+ ExprKind::MethodCall(.., [arg], _)
+ if typeck
+ .type_dependent_def_id(expr.hir_id)
+ .and_then(|id| cx.tcx.trait_of_item(id))
+ .map_or(false, |id| {
+ matches!(cx.tcx.get_diagnostic_name(id), Some(sym::ToString | sym::ToOwned))
+ }) =>
+ {
+ (arg, arg.span)
+ },
+ ExprKind::Call(path, [arg])
+ if path_def_id(cx, path)
+ .and_then(|id| match_any_def_paths(cx, id, &[&paths::FROM_STR_METHOD, &paths::FROM_FROM]))
+ .map_or(false, |idx| match idx {
+ 0 => true,
+ 1 => !is_copy(cx, typeck.expr_ty(expr)),
+ _ => false,
+ }) =>
+ {
+ (arg, arg.span)
+ },
+ _ => return,
+ };
+
+ let arg_ty = typeck.expr_ty(arg);
+ let other_ty = typeck.expr_ty(other);
+
+ let without_deref = symmetric_partial_eq(cx, arg_ty, other_ty).unwrap_or_default();
+ let with_deref = arg_ty
+ .builtin_deref(true)
+ .and_then(|tam| symmetric_partial_eq(cx, tam.ty, other_ty))
+ .unwrap_or_default();
+
+ if !with_deref.is_implemented() && !without_deref.is_implemented() {
+ return;
+ }
+
+ let other_gets_derefed = matches!(other.kind, ExprKind::Unary(UnOp::Deref, _));
+
+ let lint_span = if other_gets_derefed {
+ expr.span.to(other.span)
+ } else {
+ expr.span
+ };
+
+ span_lint_and_then(
+ cx,
+ CMP_OWNED,
+ lint_span,
+ "this creates an owned instance just for comparison",
+ |diag| {
+ // This also catches `PartialEq` implementations that call `to_owned`.
+ if other_gets_derefed {
+ diag.span_label(lint_span, "try implementing the comparison without allocating");
+ return;
+ }
+
+ let arg_snip = snippet(cx, arg_span, "..");
+ let expr_snip;
+ let eq_impl;
+ if with_deref.is_implemented() {
+ expr_snip = format!("*{}", arg_snip);
+ eq_impl = with_deref;
+ } else {
+ expr_snip = arg_snip.to_string();
+ eq_impl = without_deref;
+ };
+
+ let span;
+ let hint;
+ if (eq_impl.ty_eq_other && left) || (eq_impl.other_eq_ty && !left) {
+ span = expr.span;
+ hint = expr_snip;
+ } else {
+ span = expr.span.to(other.span);
+
+ let cmp_span = if other.span < expr.span {
+ other.span.between(expr.span)
+ } else {
+ expr.span.between(other.span)
+ };
+ if eq_impl.ty_eq_other {
+ hint = format!(
+ "{}{}{}",
+ expr_snip,
+ snippet(cx, cmp_span, ".."),
+ snippet(cx, other.span, "..")
+ );
+ } else {
+ hint = format!(
+ "{}{}{}",
+ snippet(cx, other.span, ".."),
+ snippet(cx, cmp_span, ".."),
+ expr_snip
+ );
+ }
+ }
+
+ diag.span_suggestion(
+ span,
+ "try",
+ hint,
+ Applicability::MachineApplicable, // snippet
+ );
+ },
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/double_comparison.rs b/src/tools/clippy/clippy_lints/src/operators/double_comparison.rs
new file mode 100644
index 000000000..56a86d0ff
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/double_comparison.rs
@@ -0,0 +1,54 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::eq_expr_value;
+use clippy_utils::source::snippet_with_applicability;
+use rustc_errors::Applicability;
+use rustc_hir::{BinOpKind, Expr, ExprKind};
+use rustc_lint::LateContext;
+use rustc_span::source_map::Span;
+
+use super::DOUBLE_COMPARISONS;
+
+#[expect(clippy::similar_names)]
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, op: BinOpKind, lhs: &'tcx Expr<'_>, rhs: &'tcx Expr<'_>, span: Span) {
+ let (lkind, llhs, lrhs, rkind, rlhs, rrhs) = match (&lhs.kind, &rhs.kind) {
+ (ExprKind::Binary(lb, llhs, lrhs), ExprKind::Binary(rb, rlhs, rrhs)) => {
+ (lb.node, llhs, lrhs, rb.node, rlhs, rrhs)
+ },
+ _ => return,
+ };
+ if !(eq_expr_value(cx, llhs, rlhs) && eq_expr_value(cx, lrhs, rrhs)) {
+ return;
+ }
+ macro_rules! lint_double_comparison {
+ ($op:tt) => {{
+ let mut applicability = Applicability::MachineApplicable;
+ let lhs_str = snippet_with_applicability(cx, llhs.span, "", &mut applicability);
+ let rhs_str = snippet_with_applicability(cx, lrhs.span, "", &mut applicability);
+ let sugg = format!("{} {} {}", lhs_str, stringify!($op), rhs_str);
+ span_lint_and_sugg(
+ cx,
+ DOUBLE_COMPARISONS,
+ span,
+ "this binary expression can be simplified",
+ "try",
+ sugg,
+ applicability,
+ );
+ }};
+ }
+ match (op, lkind, rkind) {
+ (BinOpKind::Or, BinOpKind::Eq, BinOpKind::Lt) | (BinOpKind::Or, BinOpKind::Lt, BinOpKind::Eq) => {
+ lint_double_comparison!(<=);
+ },
+ (BinOpKind::Or, BinOpKind::Eq, BinOpKind::Gt) | (BinOpKind::Or, BinOpKind::Gt, BinOpKind::Eq) => {
+ lint_double_comparison!(>=);
+ },
+ (BinOpKind::Or, BinOpKind::Lt, BinOpKind::Gt) | (BinOpKind::Or, BinOpKind::Gt, BinOpKind::Lt) => {
+ lint_double_comparison!(!=);
+ },
+ (BinOpKind::And, BinOpKind::Le, BinOpKind::Ge) | (BinOpKind::And, BinOpKind::Ge, BinOpKind::Le) => {
+ lint_double_comparison!(==);
+ },
+ _ => (),
+ };
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/duration_subsec.rs b/src/tools/clippy/clippy_lints/src/operators/duration_subsec.rs
new file mode 100644
index 000000000..0d067d1e1
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/duration_subsec.rs
@@ -0,0 +1,44 @@
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::is_type_diagnostic_item;
+use rustc_errors::Applicability;
+use rustc_hir::{BinOpKind, Expr, ExprKind};
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::DURATION_SUBSEC;
+
+pub(crate) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ op: BinOpKind,
+ left: &'tcx Expr<'_>,
+ right: &'tcx Expr<'_>,
+) {
+ if op == BinOpKind::Div
+ && let ExprKind::MethodCall(method_path, [self_arg], _) = left.kind
+ && is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(self_arg).peel_refs(), sym::Duration)
+ && let Some((Constant::Int(divisor), _)) = constant(cx, cx.typeck_results(), right)
+ {
+ let suggested_fn = match (method_path.ident.as_str(), divisor) {
+ ("subsec_micros", 1_000) | ("subsec_nanos", 1_000_000) => "subsec_millis",
+ ("subsec_nanos", 1_000) => "subsec_micros",
+ _ => return,
+ };
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ DURATION_SUBSEC,
+ expr.span,
+ &format!("calling `{}()` is more concise than this calculation", suggested_fn),
+ "try",
+ format!(
+ "{}.{}()",
+ snippet_with_applicability(cx, self_arg.span, "_", &mut applicability),
+ suggested_fn
+ ),
+ applicability,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/eq_op.rs b/src/tools/clippy/clippy_lints/src/operators/eq_op.rs
new file mode 100644
index 000000000..44cf0bb06
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/eq_op.rs
@@ -0,0 +1,45 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::macros::{find_assert_eq_args, first_node_macro_backtrace};
+use clippy_utils::{ast_utils::is_useless_with_eq_exprs, eq_expr_value, is_in_test_function};
+use rustc_hir::{BinOpKind, Expr};
+use rustc_lint::LateContext;
+
+use super::EQ_OP;
+
+pub(crate) fn check_assert<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ if let Some((macro_call, macro_name))
+ = first_node_macro_backtrace(cx, e).find_map(|macro_call| {
+ let name = cx.tcx.item_name(macro_call.def_id);
+ matches!(name.as_str(), "assert_eq" | "assert_ne" | "debug_assert_eq" | "debug_assert_ne")
+ .then(|| (macro_call, name))
+ })
+ && let Some((lhs, rhs, _)) = find_assert_eq_args(cx, e, macro_call.expn)
+ && eq_expr_value(cx, lhs, rhs)
+ && macro_call.is_local()
+ && !is_in_test_function(cx.tcx, e.hir_id)
+ {
+ span_lint(
+ cx,
+ EQ_OP,
+ lhs.span.to(rhs.span),
+ &format!("identical args used in this `{}!` macro call", macro_name),
+ );
+ }
+}
+
+pub(crate) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ op: BinOpKind,
+ left: &'tcx Expr<'_>,
+ right: &'tcx Expr<'_>,
+) {
+ if is_useless_with_eq_exprs(op.into()) && eq_expr_value(cx, left, right) && !is_in_test_function(cx.tcx, e.hir_id) {
+ span_lint(
+ cx,
+ EQ_OP,
+ e.span,
+ &format!("equal expressions as operands to `{}`", op.as_str()),
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/erasing_op.rs b/src/tools/clippy/clippy_lints/src/operators/erasing_op.rs
new file mode 100644
index 000000000..066e08f3b
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/erasing_op.rs
@@ -0,0 +1,53 @@
+use clippy_utils::consts::{constant_simple, Constant};
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::ty::same_type_and_consts;
+
+use rustc_hir::{BinOpKind, Expr};
+use rustc_lint::LateContext;
+use rustc_middle::ty::TypeckResults;
+
+use super::ERASING_OP;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ op: BinOpKind,
+ left: &'tcx Expr<'_>,
+ right: &'tcx Expr<'_>,
+) {
+ let tck = cx.typeck_results();
+ match op {
+ BinOpKind::Mul | BinOpKind::BitAnd => {
+ check_op(cx, tck, left, right, e);
+ check_op(cx, tck, right, left, e);
+ },
+ BinOpKind::Div => check_op(cx, tck, left, right, e),
+ _ => (),
+ }
+}
+
+fn different_types(tck: &TypeckResults<'_>, input: &Expr<'_>, output: &Expr<'_>) -> bool {
+ let input_ty = tck.expr_ty(input).peel_refs();
+ let output_ty = tck.expr_ty(output).peel_refs();
+ !same_type_and_consts(input_ty, output_ty)
+}
+
+fn check_op<'tcx>(
+ cx: &LateContext<'tcx>,
+ tck: &TypeckResults<'tcx>,
+ op: &Expr<'tcx>,
+ other: &Expr<'tcx>,
+ parent: &Expr<'tcx>,
+) {
+ if constant_simple(cx, tck, op) == Some(Constant::Int(0)) {
+ if different_types(tck, other, parent) {
+ return;
+ }
+ span_lint(
+ cx,
+ ERASING_OP,
+ parent.span,
+ "this operation will always return zero. This is likely not the intended outcome",
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/float_cmp.rs b/src/tools/clippy/clippy_lints/src/operators/float_cmp.rs
new file mode 100644
index 000000000..0ef793443
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/float_cmp.rs
@@ -0,0 +1,139 @@
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::get_item_name;
+use clippy_utils::sugg::Sugg;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{BinOpKind, Expr, ExprKind, UnOp};
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+
+use super::{FLOAT_CMP, FLOAT_CMP_CONST};
+
+pub(crate) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ op: BinOpKind,
+ left: &'tcx Expr<'_>,
+ right: &'tcx Expr<'_>,
+) {
+ if (op == BinOpKind::Eq || op == BinOpKind::Ne) && (is_float(cx, left) || is_float(cx, right)) {
+ if is_allowed(cx, left) || is_allowed(cx, right) {
+ return;
+ }
+
+ // Allow comparing the results of signum()
+ if is_signum(cx, left) && is_signum(cx, right) {
+ return;
+ }
+
+ if let Some(name) = get_item_name(cx, expr) {
+ let name = name.as_str();
+ if name == "eq" || name == "ne" || name == "is_nan" || name.starts_with("eq_") || name.ends_with("_eq") {
+ return;
+ }
+ }
+ let is_comparing_arrays = is_array(cx, left) || is_array(cx, right);
+ let (lint, msg) = get_lint_and_message(
+ is_named_constant(cx, left) || is_named_constant(cx, right),
+ is_comparing_arrays,
+ );
+ span_lint_and_then(cx, lint, expr.span, msg, |diag| {
+ let lhs = Sugg::hir(cx, left, "..");
+ let rhs = Sugg::hir(cx, right, "..");
+
+ if !is_comparing_arrays {
+ diag.span_suggestion(
+ expr.span,
+ "consider comparing them within some margin of error",
+ format!(
+ "({}).abs() {} error_margin",
+ lhs - rhs,
+ if op == BinOpKind::Eq { '<' } else { '>' }
+ ),
+ Applicability::HasPlaceholders, // snippet
+ );
+ }
+ diag.note("`f32::EPSILON` and `f64::EPSILON` are available for the `error_margin`");
+ });
+ }
+}
+
+fn get_lint_and_message(
+ is_comparing_constants: bool,
+ is_comparing_arrays: bool,
+) -> (&'static rustc_lint::Lint, &'static str) {
+ if is_comparing_constants {
+ (
+ FLOAT_CMP_CONST,
+ if is_comparing_arrays {
+ "strict comparison of `f32` or `f64` constant arrays"
+ } else {
+ "strict comparison of `f32` or `f64` constant"
+ },
+ )
+ } else {
+ (
+ FLOAT_CMP,
+ if is_comparing_arrays {
+ "strict comparison of `f32` or `f64` arrays"
+ } else {
+ "strict comparison of `f32` or `f64`"
+ },
+ )
+ }
+}
+
+fn is_named_constant<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> bool {
+ if let Some((_, res)) = constant(cx, cx.typeck_results(), expr) {
+ res
+ } else {
+ false
+ }
+}
+
+fn is_allowed<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> bool {
+ match constant(cx, cx.typeck_results(), expr) {
+ Some((Constant::F32(f), _)) => f == 0.0 || f.is_infinite(),
+ Some((Constant::F64(f), _)) => f == 0.0 || f.is_infinite(),
+ Some((Constant::Vec(vec), _)) => vec.iter().all(|f| match f {
+ Constant::F32(f) => *f == 0.0 || (*f).is_infinite(),
+ Constant::F64(f) => *f == 0.0 || (*f).is_infinite(),
+ _ => false,
+ }),
+ _ => false,
+ }
+}
+
+// Return true if `expr` is the result of `signum()` invoked on a float value.
+fn is_signum(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ // The negation of a signum is still a signum
+ if let ExprKind::Unary(UnOp::Neg, child_expr) = expr.kind {
+ return is_signum(cx, child_expr);
+ }
+
+ if_chain! {
+ if let ExprKind::MethodCall(method_name, [ref self_arg, ..], _) = expr.kind;
+ if sym!(signum) == method_name.ident.name;
+ // Check that the receiver of the signum() is a float (expressions[0] is the receiver of
+ // the method call)
+ then {
+ return is_float(cx, self_arg);
+ }
+ }
+ false
+}
+
+fn is_float(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ let value = &cx.typeck_results().expr_ty(expr).peel_refs().kind();
+
+ if let ty::Array(arr_ty, _) = value {
+ return matches!(arr_ty.kind(), ty::Float(_));
+ };
+
+ matches!(value, ty::Float(_))
+}
+
+fn is_array(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ matches!(&cx.typeck_results().expr_ty(expr).peel_refs().kind(), ty::Array(_, _))
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/float_equality_without_abs.rs b/src/tools/clippy/clippy_lints/src/operators/float_equality_without_abs.rs
new file mode 100644
index 000000000..a0a8b6aab
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/float_equality_without_abs.rs
@@ -0,0 +1,71 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::{match_def_path, paths, sugg};
+use if_chain::if_chain;
+use rustc_ast::util::parser::AssocOp;
+use rustc_errors::Applicability;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::{BinOpKind, Expr, ExprKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::source_map::Spanned;
+
+use super::FLOAT_EQUALITY_WITHOUT_ABS;
+
+pub(crate) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ op: BinOpKind,
+ lhs: &'tcx Expr<'_>,
+ rhs: &'tcx Expr<'_>,
+) {
+ let (lhs, rhs) = match op {
+ BinOpKind::Lt => (lhs, rhs),
+ BinOpKind::Gt => (rhs, lhs),
+ _ => return,
+ };
+
+ if_chain! {
+ // left hand side is a subtraction
+ if let ExprKind::Binary(
+ Spanned {
+ node: BinOpKind::Sub,
+ ..
+ },
+ val_l,
+ val_r,
+ ) = lhs.kind;
+
+ // right hand side matches either f32::EPSILON or f64::EPSILON
+ if let ExprKind::Path(ref epsilon_path) = rhs.kind;
+ if let Res::Def(DefKind::AssocConst, def_id) = cx.qpath_res(epsilon_path, rhs.hir_id);
+ if match_def_path(cx, def_id, &paths::F32_EPSILON) || match_def_path(cx, def_id, &paths::F64_EPSILON);
+
+ // values of the subtractions on the left hand side are of the type float
+ let t_val_l = cx.typeck_results().expr_ty(val_l);
+ let t_val_r = cx.typeck_results().expr_ty(val_r);
+ if let ty::Float(_) = t_val_l.kind();
+ if let ty::Float(_) = t_val_r.kind();
+
+ then {
+ let sug_l = sugg::Sugg::hir(cx, val_l, "..");
+ let sug_r = sugg::Sugg::hir(cx, val_r, "..");
+ // format the suggestion
+ let suggestion = format!("{}.abs()", sugg::make_assoc(AssocOp::Subtract, &sug_l, &sug_r).maybe_par());
+ // spans the lint
+ span_lint_and_then(
+ cx,
+ FLOAT_EQUALITY_WITHOUT_ABS,
+ expr.span,
+ "float equality check without `.abs()`",
+ | diag | {
+ diag.span_suggestion(
+ lhs.span,
+ "add `.abs()`",
+ suggestion,
+ Applicability::MaybeIncorrect,
+ );
+ }
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/identity_op.rs b/src/tools/clippy/clippy_lints/src/operators/identity_op.rs
new file mode 100644
index 000000000..b48d6c4e2
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/identity_op.rs
@@ -0,0 +1,148 @@
+use clippy_utils::consts::{constant_full_int, constant_simple, Constant, FullInt};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::{clip, unsext};
+use rustc_errors::Applicability;
+use rustc_hir::{BinOpKind, Expr, ExprKind, Node};
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+use rustc_span::source_map::Span;
+
+use super::IDENTITY_OP;
+
+pub(crate) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ op: BinOpKind,
+ left: &'tcx Expr<'_>,
+ right: &'tcx Expr<'_>,
+) {
+ if !is_allowed(cx, op, left, right) {
+ match op {
+ BinOpKind::Add | BinOpKind::BitOr | BinOpKind::BitXor => {
+ check_op(cx, left, 0, expr.span, right.span, needs_parenthesis(cx, expr, right));
+ check_op(cx, right, 0, expr.span, left.span, Parens::Unneeded);
+ },
+ BinOpKind::Shl | BinOpKind::Shr | BinOpKind::Sub => {
+ check_op(cx, right, 0, expr.span, left.span, Parens::Unneeded);
+ },
+ BinOpKind::Mul => {
+ check_op(cx, left, 1, expr.span, right.span, needs_parenthesis(cx, expr, right));
+ check_op(cx, right, 1, expr.span, left.span, Parens::Unneeded);
+ },
+ BinOpKind::Div => check_op(cx, right, 1, expr.span, left.span, Parens::Unneeded),
+ BinOpKind::BitAnd => {
+ check_op(cx, left, -1, expr.span, right.span, needs_parenthesis(cx, expr, right));
+ check_op(cx, right, -1, expr.span, left.span, Parens::Unneeded);
+ },
+ BinOpKind::Rem => check_remainder(cx, left, right, expr.span, left.span),
+ _ => (),
+ }
+ }
+}
+
+#[derive(Copy, Clone)]
+enum Parens {
+ Needed,
+ Unneeded,
+}
+
+/// Checks if `left op right` needs parenthesis when reduced to `right`
+/// e.g. `0 + if b { 1 } else { 2 } + if b { 3 } else { 4 }` cannot be reduced
+/// to `if b { 1 } else { 2 } + if b { 3 } else { 4 }` where the `if` could be
+/// interpreted as a statement
+///
+/// See #8724
+fn needs_parenthesis(cx: &LateContext<'_>, binary: &Expr<'_>, right: &Expr<'_>) -> Parens {
+ match right.kind {
+ ExprKind::Binary(_, lhs, _) | ExprKind::Cast(lhs, _) => {
+ // ensure we're checking against the leftmost expression of `right`
+ //
+ // ~~~ `lhs`
+ // 0 + {4} * 2
+ // ~~~~~~~ `right`
+ return needs_parenthesis(cx, binary, lhs);
+ },
+ ExprKind::If(..) | ExprKind::Match(..) | ExprKind::Block(..) | ExprKind::Loop(..) => {},
+ _ => return Parens::Unneeded,
+ }
+
+ let mut prev_id = binary.hir_id;
+ for (_, node) in cx.tcx.hir().parent_iter(binary.hir_id) {
+ if let Node::Expr(expr) = node
+ && let ExprKind::Binary(_, lhs, _) | ExprKind::Cast(lhs, _) = expr.kind
+ && lhs.hir_id == prev_id
+ {
+ // keep going until we find a node that encompasses left of `binary`
+ prev_id = expr.hir_id;
+ continue;
+ }
+
+ match node {
+ Node::Block(_) | Node::Stmt(_) => break,
+ _ => return Parens::Unneeded,
+ };
+ }
+
+ Parens::Needed
+}
+
+fn is_allowed(cx: &LateContext<'_>, cmp: BinOpKind, left: &Expr<'_>, right: &Expr<'_>) -> bool {
+ // This lint applies to integers
+ !cx.typeck_results().expr_ty(left).peel_refs().is_integral()
+ || !cx.typeck_results().expr_ty(right).peel_refs().is_integral()
+ // `1 << 0` is a common pattern in bit manipulation code
+ || (cmp == BinOpKind::Shl
+ && constant_simple(cx, cx.typeck_results(), right) == Some(Constant::Int(0))
+ && constant_simple(cx, cx.typeck_results(), left) == Some(Constant::Int(1)))
+}
+
+fn check_remainder(cx: &LateContext<'_>, left: &Expr<'_>, right: &Expr<'_>, span: Span, arg: Span) {
+ let lhs_const = constant_full_int(cx, cx.typeck_results(), left);
+ let rhs_const = constant_full_int(cx, cx.typeck_results(), right);
+ if match (lhs_const, rhs_const) {
+ (Some(FullInt::S(lv)), Some(FullInt::S(rv))) => lv.abs() < rv.abs(),
+ (Some(FullInt::U(lv)), Some(FullInt::U(rv))) => lv < rv,
+ _ => return,
+ } {
+ span_ineffective_operation(cx, span, arg, Parens::Unneeded);
+ }
+}
+
+fn check_op(cx: &LateContext<'_>, e: &Expr<'_>, m: i8, span: Span, arg: Span, parens: Parens) {
+ if let Some(Constant::Int(v)) = constant_simple(cx, cx.typeck_results(), e).map(Constant::peel_refs) {
+ let check = match *cx.typeck_results().expr_ty(e).peel_refs().kind() {
+ ty::Int(ity) => unsext(cx.tcx, -1_i128, ity),
+ ty::Uint(uty) => clip(cx.tcx, !0, uty),
+ _ => return,
+ };
+ if match m {
+ 0 => v == 0,
+ -1 => v == check,
+ 1 => v == 1,
+ _ => unreachable!(),
+ } {
+ span_ineffective_operation(cx, span, arg, parens);
+ }
+ }
+}
+
+fn span_ineffective_operation(cx: &LateContext<'_>, span: Span, arg: Span, parens: Parens) {
+ let mut applicability = Applicability::MachineApplicable;
+ let expr_snippet = snippet_with_applicability(cx, arg, "..", &mut applicability);
+
+ let suggestion = match parens {
+ Parens::Needed => format!("({expr_snippet})"),
+ Parens::Unneeded => expr_snippet.into_owned(),
+ };
+
+ span_lint_and_sugg(
+ cx,
+ IDENTITY_OP,
+ span,
+ "this operation has no effect",
+ "consider reducing it to",
+ suggestion,
+ applicability,
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/integer_division.rs b/src/tools/clippy/clippy_lints/src/operators/integer_division.rs
new file mode 100644
index 000000000..631d10f4a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/integer_division.rs
@@ -0,0 +1,27 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+
+use super::INTEGER_DIVISION;
+
+pub(crate) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx hir::Expr<'_>,
+ op: hir::BinOpKind,
+ left: &'tcx hir::Expr<'_>,
+ right: &'tcx hir::Expr<'_>,
+) {
+ if op == hir::BinOpKind::Div
+ && cx.typeck_results().expr_ty(left).is_integral()
+ && cx.typeck_results().expr_ty(right).is_integral()
+ {
+ span_lint_and_help(
+ cx,
+ INTEGER_DIVISION,
+ expr.span,
+ "integer division",
+ None,
+ "division of integers may cause loss of precision. consider using floats",
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/misrefactored_assign_op.rs b/src/tools/clippy/clippy_lints/src/operators/misrefactored_assign_op.rs
new file mode 100644
index 000000000..0024384d9
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/misrefactored_assign_op.rs
@@ -0,0 +1,84 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::eq_expr_value;
+use clippy_utils::source::snippet_opt;
+use clippy_utils::sugg;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+
+use super::MISREFACTORED_ASSIGN_OP;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx hir::Expr<'_>,
+ op: hir::BinOpKind,
+ lhs: &'tcx hir::Expr<'_>,
+ rhs: &'tcx hir::Expr<'_>,
+) {
+ if let hir::ExprKind::Binary(binop, l, r) = &rhs.kind {
+ if op != binop.node {
+ return;
+ }
+ // lhs op= l op r
+ if eq_expr_value(cx, lhs, l) {
+ lint_misrefactored_assign_op(cx, expr, op, rhs, lhs, r);
+ }
+ // lhs op= l commutative_op r
+ if is_commutative(op) && eq_expr_value(cx, lhs, r) {
+ lint_misrefactored_assign_op(cx, expr, op, rhs, lhs, l);
+ }
+ }
+}
+
+fn lint_misrefactored_assign_op(
+ cx: &LateContext<'_>,
+ expr: &hir::Expr<'_>,
+ op: hir::BinOpKind,
+ rhs: &hir::Expr<'_>,
+ assignee: &hir::Expr<'_>,
+ rhs_other: &hir::Expr<'_>,
+) {
+ span_lint_and_then(
+ cx,
+ MISREFACTORED_ASSIGN_OP,
+ expr.span,
+ "variable appears on both sides of an assignment operation",
+ |diag| {
+ if let (Some(snip_a), Some(snip_r)) = (snippet_opt(cx, assignee.span), snippet_opt(cx, rhs_other.span)) {
+ let a = &sugg::Sugg::hir(cx, assignee, "..");
+ let r = &sugg::Sugg::hir(cx, rhs, "..");
+ let long = format!("{} = {}", snip_a, sugg::make_binop(op.into(), a, r));
+ diag.span_suggestion(
+ expr.span,
+ &format!(
+ "did you mean `{} = {} {} {}` or `{}`? Consider replacing it with",
+ snip_a,
+ snip_a,
+ op.as_str(),
+ snip_r,
+ long
+ ),
+ format!("{} {}= {}", snip_a, op.as_str(), snip_r),
+ Applicability::MaybeIncorrect,
+ );
+ diag.span_suggestion(
+ expr.span,
+ "or",
+ long,
+ Applicability::MaybeIncorrect, // snippet
+ );
+ }
+ },
+ );
+}
+
+#[must_use]
+fn is_commutative(op: hir::BinOpKind) -> bool {
+ use rustc_hir::BinOpKind::{
+ Add, And, BitAnd, BitOr, BitXor, Div, Eq, Ge, Gt, Le, Lt, Mul, Ne, Or, Rem, Shl, Shr, Sub,
+ };
+ match op {
+ Add | Mul | And | Or | BitXor | BitAnd | BitOr | Eq | Ne => true,
+ Sub | Div | Rem | Shl | Shr | Lt | Le | Ge | Gt => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/mod.rs b/src/tools/clippy/clippy_lints/src/operators/mod.rs
new file mode 100644
index 000000000..bb6d99406
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/mod.rs
@@ -0,0 +1,888 @@
+mod absurd_extreme_comparisons;
+mod assign_op_pattern;
+mod bit_mask;
+mod cmp_nan;
+mod cmp_owned;
+mod double_comparison;
+mod duration_subsec;
+mod eq_op;
+mod erasing_op;
+mod float_cmp;
+mod float_equality_without_abs;
+mod identity_op;
+mod integer_division;
+mod misrefactored_assign_op;
+mod modulo_arithmetic;
+mod modulo_one;
+mod needless_bitwise_bool;
+mod numeric_arithmetic;
+mod op_ref;
+mod ptr_eq;
+mod self_assignment;
+mod verbose_bit_mask;
+
+pub(crate) mod arithmetic;
+
+use rustc_hir::{Body, Expr, ExprKind, UnOp};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for comparisons where one side of the relation is
+ /// either the minimum or maximum value for its type and warns if it involves a
+ /// case that is always true or always false. Only integer and boolean types are
+ /// checked.
+ ///
+ /// ### Why is this bad?
+ /// An expression like `min <= x` may misleadingly imply
+ /// that it is possible for `x` to be less than the minimum. Expressions like
+ /// `max < x` are probably mistakes.
+ ///
+ /// ### Known problems
+ /// For `usize` the size of the current compile target will
+ /// be assumed (e.g., 64 bits on 64 bit systems). This means code that uses such
+ /// a comparison to detect target pointer width will trigger this lint. One can
+ /// use `mem::sizeof` and compare its value or conditional compilation
+ /// attributes
+ /// like `#[cfg(target_pointer_width = "64")] ..` instead.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let vec: Vec<isize> = Vec::new();
+ /// if vec.len() <= 0 {}
+ /// if 100 > i32::MAX {}
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub ABSURD_EXTREME_COMPARISONS,
+ correctness,
+ "a comparison with a maximum or minimum value that is always true or false"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for any kind of arithmetic operation of any type.
+ ///
+ /// Operators like `+`, `-`, `*` or `<<` are usually capable of overflowing according to the [Rust
+ /// Reference](https://doc.rust-lang.org/reference/expressions/operator-expr.html#overflow),
+ /// or can panic (`/`, `%`). Known safe built-in types like `Wrapping` or `Saturing` are filtered
+ /// away.
+ ///
+ /// ### Why is this bad?
+ /// Integer overflow will trigger a panic in debug builds or will wrap in
+ /// release mode. Division by zero will cause a panic in either mode. In some applications one
+ /// wants explicitly checked, wrapping or saturating arithmetic.
+ ///
+ /// #### Example
+ /// ```rust
+ /// # let a = 0;
+ /// a + 1;
+ /// ```
+ ///
+ /// Third-party types also tend to overflow.
+ ///
+ /// #### Example
+ /// ```ignore,rust
+ /// use rust_decimal::Decimal;
+ /// let _n = Decimal::MAX + Decimal::MAX;
+ /// ```
+ ///
+ /// ### Allowed types
+ /// Custom allowed types can be specified through the "arithmetic-allowed" filter.
+ #[clippy::version = "1.64.0"]
+ pub ARITHMETIC,
+ restriction,
+ "any arithmetic expression that could overflow or panic"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for integer arithmetic operations which could overflow or panic.
+ ///
+ /// Specifically, checks for any operators (`+`, `-`, `*`, `<<`, etc) which are capable
+ /// of overflowing according to the [Rust
+ /// Reference](https://doc.rust-lang.org/reference/expressions/operator-expr.html#overflow),
+ /// or which can panic (`/`, `%`). No bounds analysis or sophisticated reasoning is
+ /// attempted.
+ ///
+ /// ### Why is this bad?
+ /// Integer overflow will trigger a panic in debug builds or will wrap in
+ /// release mode. Division by zero will cause a panic in either mode. In some applications one
+ /// wants explicitly checked, wrapping or saturating arithmetic.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let a = 0;
+ /// a + 1;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub INTEGER_ARITHMETIC,
+ restriction,
+ "any integer arithmetic expression which could overflow or panic"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for float arithmetic.
+ ///
+ /// ### Why is this bad?
+ /// For some embedded systems or kernel development, it
+ /// can be useful to rule out floating-point numbers.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let a = 0.0;
+ /// a + 1.0;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub FLOAT_ARITHMETIC,
+ restriction,
+ "any floating-point arithmetic statement"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `a = a op b` or `a = b commutative_op a`
+ /// patterns.
+ ///
+ /// ### Why is this bad?
+ /// These can be written as the shorter `a op= b`.
+ ///
+ /// ### Known problems
+ /// While forbidden by the spec, `OpAssign` traits may have
+ /// implementations that differ from the regular `Op` impl.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let mut a = 5;
+ /// let b = 0;
+ /// // ...
+ ///
+ /// a = a + b;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let mut a = 5;
+ /// let b = 0;
+ /// // ...
+ ///
+ /// a += b;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub ASSIGN_OP_PATTERN,
+ style,
+ "assigning the result of an operation on a variable to that same variable"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `a op= a op b` or `a op= b op a` patterns.
+ ///
+ /// ### Why is this bad?
+ /// Most likely these are bugs where one meant to write `a
+ /// op= b`.
+ ///
+ /// ### Known problems
+ /// Clippy cannot know for sure if `a op= a op b` should have
+ /// been `a = a op a op b` or `a = a op b`/`a op= b`. Therefore, it suggests both.
+ /// If `a op= a op b` is really the correct behavior it should be
+ /// written as `a = a op a op b` as it's less confusing.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let mut a = 5;
+ /// let b = 2;
+ /// // ...
+ /// a += a + b;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MISREFACTORED_ASSIGN_OP,
+ suspicious,
+ "having a variable on both sides of an assign op"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for incompatible bit masks in comparisons.
+ ///
+ /// The formula for detecting if an expression of the type `_ <bit_op> m
+ /// <cmp_op> c` (where `<bit_op>` is one of {`&`, `|`} and `<cmp_op>` is one of
+ /// {`!=`, `>=`, `>`, `!=`, `>=`, `>`}) can be determined from the following
+ /// table:
+ ///
+ /// |Comparison |Bit Op|Example |is always|Formula |
+ /// |------------|------|-------------|---------|----------------------|
+ /// |`==` or `!=`| `&` |`x & 2 == 3` |`false` |`c & m != c` |
+ /// |`<` or `>=`| `&` |`x & 2 < 3` |`true` |`m < c` |
+ /// |`>` or `<=`| `&` |`x & 1 > 1` |`false` |`m <= c` |
+ /// |`==` or `!=`| `\|` |`x \| 1 == 0`|`false` |`c \| m != c` |
+ /// |`<` or `>=`| `\|` |`x \| 1 < 1` |`false` |`m >= c` |
+ /// |`<=` or `>` | `\|` |`x \| 1 > 0` |`true` |`m > c` |
+ ///
+ /// ### Why is this bad?
+ /// If the bits that the comparison cares about are always
+ /// set to zero or one by the bit mask, the comparison is constant `true` or
+ /// `false` (depending on mask, compared value, and operators).
+ ///
+ /// So the code is actively misleading, and the only reason someone would write
+ /// this intentionally is to win an underhanded Rust contest or create a
+ /// test-case for this lint.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let x = 1;
+ /// if (x & 1 == 2) { }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub BAD_BIT_MASK,
+ correctness,
+ "expressions of the form `_ & mask == select` that will only ever return `true` or `false`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for bit masks in comparisons which can be removed
+ /// without changing the outcome. The basic structure can be seen in the
+ /// following table:
+ ///
+ /// |Comparison| Bit Op |Example |equals |
+ /// |----------|----------|------------|-------|
+ /// |`>` / `<=`|`\|` / `^`|`x \| 2 > 3`|`x > 3`|
+ /// |`<` / `>=`|`\|` / `^`|`x ^ 1 < 4` |`x < 4`|
+ ///
+ /// ### Why is this bad?
+ /// Not equally evil as [`bad_bit_mask`](#bad_bit_mask),
+ /// but still a bit misleading, because the bit mask is ineffective.
+ ///
+ /// ### Known problems
+ /// False negatives: This lint will only match instances
+ /// where we have figured out the math (which is for a power-of-two compared
+ /// value). This means things like `x | 1 >= 7` (which would be better written
+ /// as `x >= 6`) will not be reported (but bit masks like this are fairly
+ /// uncommon).
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let x = 1;
+ /// if (x | 1 > 3) { }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub INEFFECTIVE_BIT_MASK,
+ correctness,
+ "expressions where a bit mask will be rendered useless by a comparison, e.g., `(x | 1) > 2`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for bit masks that can be replaced by a call
+ /// to `trailing_zeros`
+ ///
+ /// ### Why is this bad?
+ /// `x.trailing_zeros() > 4` is much clearer than `x & 15
+ /// == 0`
+ ///
+ /// ### Known problems
+ /// llvm generates better code for `x & 15 == 0` on x86
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let x = 1;
+ /// if x & 0b1111 == 0 { }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub VERBOSE_BIT_MASK,
+ pedantic,
+ "expressions where a bit mask is less readable than the corresponding method call"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for double comparisons that could be simplified to a single expression.
+ ///
+ ///
+ /// ### Why is this bad?
+ /// Readability.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let x = 1;
+ /// # let y = 2;
+ /// if x == y || x < y {}
+ /// ```
+ ///
+ /// Use instead:
+ ///
+ /// ```rust
+ /// # let x = 1;
+ /// # let y = 2;
+ /// if x <= y {}
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub DOUBLE_COMPARISONS,
+ complexity,
+ "unnecessary double comparisons that can be simplified"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calculation of subsecond microseconds or milliseconds
+ /// from other `Duration` methods.
+ ///
+ /// ### Why is this bad?
+ /// It's more concise to call `Duration::subsec_micros()` or
+ /// `Duration::subsec_millis()` than to calculate them.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::time::Duration;
+ /// # let duration = Duration::new(5, 0);
+ /// let micros = duration.subsec_nanos() / 1_000;
+ /// let millis = duration.subsec_nanos() / 1_000_000;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # use std::time::Duration;
+ /// # let duration = Duration::new(5, 0);
+ /// let micros = duration.subsec_micros();
+ /// let millis = duration.subsec_millis();
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub DURATION_SUBSEC,
+ complexity,
+ "checks for calculation of subsecond microseconds or milliseconds"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for equal operands to comparison, logical and
+ /// bitwise, difference and division binary operators (`==`, `>`, etc., `&&`,
+ /// `||`, `&`, `|`, `^`, `-` and `/`).
+ ///
+ /// ### Why is this bad?
+ /// This is usually just a typo or a copy and paste error.
+ ///
+ /// ### Known problems
+ /// False negatives: We had some false positives regarding
+ /// calls (notably [racer](https://github.com/phildawes/racer) had one instance
+ /// of `x.pop() && x.pop()`), so we removed matching any function or method
+ /// calls. We may introduce a list of known pure functions in the future.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let x = 1;
+ /// if x + 1 == x + 1 {}
+ ///
+ /// // or
+ ///
+ /// # let a = 3;
+ /// # let b = 4;
+ /// assert_eq!(a, a);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub EQ_OP,
+ correctness,
+ "equal operands on both sides of a comparison or bitwise combination (e.g., `x == x`)"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for arguments to `==` which have their address
+ /// taken to satisfy a bound
+ /// and suggests to dereference the other argument instead
+ ///
+ /// ### Why is this bad?
+ /// It is more idiomatic to dereference the other argument.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// &x == y
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// x == *y
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub OP_REF,
+ style,
+ "taking a reference to satisfy the type constraints on `==`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for erasing operations, e.g., `x * 0`.
+ ///
+ /// ### Why is this bad?
+ /// The whole expression can be replaced by zero.
+ /// This is most likely not the intended outcome and should probably be
+ /// corrected
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = 1;
+ /// 0 / x;
+ /// 0 * x;
+ /// x & 0;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub ERASING_OP,
+ correctness,
+ "using erasing operations, e.g., `x * 0` or `y & 0`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for statements of the form `(a - b) < f32::EPSILON` or
+ /// `(a - b) < f64::EPSILON`. Notes the missing `.abs()`.
+ ///
+ /// ### Why is this bad?
+ /// The code without `.abs()` is more likely to have a bug.
+ ///
+ /// ### Known problems
+ /// If the user can ensure that b is larger than a, the `.abs()` is
+ /// technically unnecessary. However, it will make the code more robust and doesn't have any
+ /// large performance implications. If the abs call was deliberately left out for performance
+ /// reasons, it is probably better to state this explicitly in the code, which then can be done
+ /// with an allow.
+ ///
+ /// ### Example
+ /// ```rust
+ /// pub fn is_roughly_equal(a: f32, b: f32) -> bool {
+ /// (a - b) < f32::EPSILON
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// pub fn is_roughly_equal(a: f32, b: f32) -> bool {
+ /// (a - b).abs() < f32::EPSILON
+ /// }
+ /// ```
+ #[clippy::version = "1.48.0"]
+ pub FLOAT_EQUALITY_WITHOUT_ABS,
+ suspicious,
+ "float equality check without `.abs()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for identity operations, e.g., `x + 0`.
+ ///
+ /// ### Why is this bad?
+ /// This code can be removed without changing the
+ /// meaning. So it just obscures what's going on. Delete it mercilessly.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let x = 1;
+ /// x / 1 + 0 * 1 - 0 | 0;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub IDENTITY_OP,
+ complexity,
+ "using identity operations, e.g., `x + 0` or `y / 1`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for division of integers
+ ///
+ /// ### Why is this bad?
+ /// When outside of some very specific algorithms,
+ /// integer division is very often a mistake because it discards the
+ /// remainder.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = 3 / 2;
+ /// println!("{}", x);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let x = 3f32 / 2f32;
+ /// println!("{}", x);
+ /// ```
+ #[clippy::version = "1.37.0"]
+ pub INTEGER_DIVISION,
+ restriction,
+ "integer division may cause loss of precision"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for comparisons to NaN.
+ ///
+ /// ### Why is this bad?
+ /// NaN does not compare meaningfully to anything – not
+ /// even itself – so those comparisons are simply wrong.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let x = 1.0;
+ /// if x == f32::NAN { }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let x = 1.0f32;
+ /// if x.is_nan() { }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub CMP_NAN,
+ correctness,
+ "comparisons to `NAN`, which will always return false, probably not intended"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for conversions to owned values just for the sake
+ /// of a comparison.
+ ///
+ /// ### Why is this bad?
+ /// The comparison can operate on a reference, so creating
+ /// an owned value effectively throws it away directly afterwards, which is
+ /// needlessly consuming code and heap space.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let x = "foo";
+ /// # let y = String::from("foo");
+ /// if x.to_owned() == y {}
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let x = "foo";
+ /// # let y = String::from("foo");
+ /// if x == y {}
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub CMP_OWNED,
+ perf,
+ "creating owned instances for comparing with others, e.g., `x == \"foo\".to_string()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for (in-)equality comparisons on floating-point
+ /// values (apart from zero), except in functions called `*eq*` (which probably
+ /// implement equality for a type involving floats).
+ ///
+ /// ### Why is this bad?
+ /// Floating point calculations are usually imprecise, so
+ /// asking if two values are *exactly* equal is asking for trouble. For a good
+ /// guide on what to do, see [the floating point
+ /// guide](http://www.floating-point-gui.de/errors/comparison).
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = 1.2331f64;
+ /// let y = 1.2332f64;
+ ///
+ /// if y == 1.23f64 { }
+ /// if y != x {} // where both are floats
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let x = 1.2331f64;
+ /// # let y = 1.2332f64;
+ /// let error_margin = f64::EPSILON; // Use an epsilon for comparison
+ /// // Or, if Rust <= 1.42, use `std::f64::EPSILON` constant instead.
+ /// // let error_margin = std::f64::EPSILON;
+ /// if (y - 1.23f64).abs() < error_margin { }
+ /// if (y - x).abs() > error_margin { }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub FLOAT_CMP,
+ pedantic,
+ "using `==` or `!=` on float values instead of comparing difference with an epsilon"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for (in-)equality comparisons on floating-point
+ /// value and constant, except in functions called `*eq*` (which probably
+ /// implement equality for a type involving floats).
+ ///
+ /// ### Why is this bad?
+ /// Floating point calculations are usually imprecise, so
+ /// asking if two values are *exactly* equal is asking for trouble. For a good
+ /// guide on what to do, see [the floating point
+ /// guide](http://www.floating-point-gui.de/errors/comparison).
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x: f64 = 1.0;
+ /// const ONE: f64 = 1.00;
+ ///
+ /// if x == ONE { } // where both are floats
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let x: f64 = 1.0;
+ /// # const ONE: f64 = 1.00;
+ /// let error_margin = f64::EPSILON; // Use an epsilon for comparison
+ /// // Or, if Rust <= 1.42, use `std::f64::EPSILON` constant instead.
+ /// // let error_margin = std::f64::EPSILON;
+ /// if (x - ONE).abs() < error_margin { }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub FLOAT_CMP_CONST,
+ restriction,
+ "using `==` or `!=` on float constants instead of comparing difference with an epsilon"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for getting the remainder of a division by one or minus
+ /// one.
+ ///
+ /// ### Why is this bad?
+ /// The result for a divisor of one can only ever be zero; for
+ /// minus one it can cause panic/overflow (if the left operand is the minimal value of
+ /// the respective integer type) or results in zero. No one will write such code
+ /// deliberately, unless trying to win an Underhanded Rust Contest. Even for that
+ /// contest, it's probably a bad idea. Use something more underhanded.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let x = 1;
+ /// let a = x % 1;
+ /// let a = x % -1;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MODULO_ONE,
+ correctness,
+ "taking a number modulo +/-1, which can either panic/overflow or always returns 0"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for modulo arithmetic.
+ ///
+ /// ### Why is this bad?
+ /// The results of modulo (%) operation might differ
+ /// depending on the language, when negative numbers are involved.
+ /// If you interop with different languages it might be beneficial
+ /// to double check all places that use modulo arithmetic.
+ ///
+ /// For example, in Rust `17 % -3 = 2`, but in Python `17 % -3 = -1`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = -17 % 3;
+ /// ```
+ #[clippy::version = "1.42.0"]
+ pub MODULO_ARITHMETIC,
+ restriction,
+ "any modulo arithmetic statement"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for uses of bitwise and/or operators between booleans, where performance may be improved by using
+ /// a lazy and.
+ ///
+ /// ### Why is this bad?
+ /// The bitwise operators do not support short-circuiting, so it may hinder code performance.
+ /// Additionally, boolean logic "masked" as bitwise logic is not caught by lints like `unnecessary_fold`
+ ///
+ /// ### Known problems
+ /// This lint evaluates only when the right side is determined to have no side effects. At this time, that
+ /// determination is quite conservative.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let (x,y) = (true, false);
+ /// if x & !y {} // where both x and y are booleans
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let (x,y) = (true, false);
+ /// if x && !y {}
+ /// ```
+ #[clippy::version = "1.54.0"]
+ pub NEEDLESS_BITWISE_BOOL,
+ pedantic,
+ "Boolean expressions that use bitwise rather than lazy operators"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Use `std::ptr::eq` when applicable
+ ///
+ /// ### Why is this bad?
+ /// `ptr::eq` can be used to compare `&T` references
+ /// (which coerce to `*const T` implicitly) by their address rather than
+ /// comparing the values they point to.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let a = &[1, 2, 3];
+ /// let b = &[1, 2, 3];
+ ///
+ /// assert!(a as *const _ as usize == b as *const _ as usize);
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let a = &[1, 2, 3];
+ /// let b = &[1, 2, 3];
+ ///
+ /// assert!(std::ptr::eq(a, b));
+ /// ```
+ #[clippy::version = "1.49.0"]
+ pub PTR_EQ,
+ style,
+ "use `std::ptr::eq` when comparing raw pointers"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for explicit self-assignments.
+ ///
+ /// ### Why is this bad?
+ /// Self-assignments are redundant and unlikely to be
+ /// intentional.
+ ///
+ /// ### Known problems
+ /// If expression contains any deref coercions or
+ /// indexing operations they are assumed not to have any side effects.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct Event {
+ /// x: i32,
+ /// }
+ ///
+ /// fn copy_position(a: &mut Event, b: &Event) {
+ /// a.x = a.x;
+ /// }
+ /// ```
+ ///
+ /// Should be:
+ /// ```rust
+ /// struct Event {
+ /// x: i32,
+ /// }
+ ///
+ /// fn copy_position(a: &mut Event, b: &Event) {
+ /// a.x = b.x;
+ /// }
+ /// ```
+ #[clippy::version = "1.48.0"]
+ pub SELF_ASSIGNMENT,
+ correctness,
+ "explicit self-assignment"
+}
+
+pub struct Operators {
+ arithmetic_context: numeric_arithmetic::Context,
+ verbose_bit_mask_threshold: u64,
+}
+impl_lint_pass!(Operators => [
+ ABSURD_EXTREME_COMPARISONS,
+ ARITHMETIC,
+ INTEGER_ARITHMETIC,
+ FLOAT_ARITHMETIC,
+ ASSIGN_OP_PATTERN,
+ MISREFACTORED_ASSIGN_OP,
+ BAD_BIT_MASK,
+ INEFFECTIVE_BIT_MASK,
+ VERBOSE_BIT_MASK,
+ DOUBLE_COMPARISONS,
+ DURATION_SUBSEC,
+ EQ_OP,
+ OP_REF,
+ ERASING_OP,
+ FLOAT_EQUALITY_WITHOUT_ABS,
+ IDENTITY_OP,
+ INTEGER_DIVISION,
+ CMP_NAN,
+ CMP_OWNED,
+ FLOAT_CMP,
+ FLOAT_CMP_CONST,
+ MODULO_ONE,
+ MODULO_ARITHMETIC,
+ NEEDLESS_BITWISE_BOOL,
+ PTR_EQ,
+ SELF_ASSIGNMENT,
+]);
+impl Operators {
+ pub fn new(verbose_bit_mask_threshold: u64) -> Self {
+ Self {
+ arithmetic_context: numeric_arithmetic::Context::default(),
+ verbose_bit_mask_threshold,
+ }
+ }
+}
+impl<'tcx> LateLintPass<'tcx> for Operators {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ eq_op::check_assert(cx, e);
+ match e.kind {
+ ExprKind::Binary(op, lhs, rhs) => {
+ if !e.span.from_expansion() {
+ absurd_extreme_comparisons::check(cx, e, op.node, lhs, rhs);
+ if !(macro_with_not_op(lhs) || macro_with_not_op(rhs)) {
+ eq_op::check(cx, e, op.node, lhs, rhs);
+ op_ref::check(cx, e, op.node, lhs, rhs);
+ }
+ erasing_op::check(cx, e, op.node, lhs, rhs);
+ identity_op::check(cx, e, op.node, lhs, rhs);
+ needless_bitwise_bool::check(cx, e, op.node, lhs, rhs);
+ ptr_eq::check(cx, e, op.node, lhs, rhs);
+ }
+ self.arithmetic_context.check_binary(cx, e, op.node, lhs, rhs);
+ bit_mask::check(cx, e, op.node, lhs, rhs);
+ verbose_bit_mask::check(cx, e, op.node, lhs, rhs, self.verbose_bit_mask_threshold);
+ double_comparison::check(cx, op.node, lhs, rhs, e.span);
+ duration_subsec::check(cx, e, op.node, lhs, rhs);
+ float_equality_without_abs::check(cx, e, op.node, lhs, rhs);
+ integer_division::check(cx, e, op.node, lhs, rhs);
+ cmp_nan::check(cx, e, op.node, lhs, rhs);
+ cmp_owned::check(cx, op.node, lhs, rhs);
+ float_cmp::check(cx, e, op.node, lhs, rhs);
+ modulo_one::check(cx, e, op.node, rhs);
+ modulo_arithmetic::check(cx, e, op.node, lhs, rhs);
+ },
+ ExprKind::AssignOp(op, lhs, rhs) => {
+ self.arithmetic_context.check_binary(cx, e, op.node, lhs, rhs);
+ misrefactored_assign_op::check(cx, e, op.node, lhs, rhs);
+ modulo_arithmetic::check(cx, e, op.node, lhs, rhs);
+ },
+ ExprKind::Assign(lhs, rhs, _) => {
+ assign_op_pattern::check(cx, e, lhs, rhs);
+ self_assignment::check(cx, e, lhs, rhs);
+ },
+ ExprKind::Unary(op, arg) => {
+ if op == UnOp::Neg {
+ self.arithmetic_context.check_negate(cx, e, arg);
+ }
+ },
+ _ => (),
+ }
+ }
+
+ fn check_expr_post(&mut self, _: &LateContext<'_>, e: &Expr<'_>) {
+ self.arithmetic_context.expr_post(e.hir_id);
+ }
+
+ fn check_body(&mut self, cx: &LateContext<'tcx>, b: &'tcx Body<'_>) {
+ self.arithmetic_context.enter_body(cx, b);
+ }
+
+ fn check_body_post(&mut self, cx: &LateContext<'tcx>, b: &'tcx Body<'_>) {
+ self.arithmetic_context.body_post(cx, b);
+ }
+}
+
+fn macro_with_not_op(e: &Expr<'_>) -> bool {
+ if let ExprKind::Unary(_, e) = e.kind {
+ e.span.from_expansion()
+ } else {
+ false
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/modulo_arithmetic.rs b/src/tools/clippy/clippy_lints/src/operators/modulo_arithmetic.rs
new file mode 100644
index 000000000..af4e74947
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/modulo_arithmetic.rs
@@ -0,0 +1,126 @@
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::sext;
+use if_chain::if_chain;
+use rustc_hir::{BinOpKind, Expr};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+use std::fmt::Display;
+
+use super::MODULO_ARITHMETIC;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ op: BinOpKind,
+ lhs: &'tcx Expr<'_>,
+ rhs: &'tcx Expr<'_>,
+) {
+ if op == BinOpKind::Rem {
+ let lhs_operand = analyze_operand(lhs, cx, e);
+ let rhs_operand = analyze_operand(rhs, cx, e);
+ if_chain! {
+ if let Some(lhs_operand) = lhs_operand;
+ if let Some(rhs_operand) = rhs_operand;
+ then {
+ check_const_operands(cx, e, &lhs_operand, &rhs_operand);
+ }
+ else {
+ check_non_const_operands(cx, e, lhs);
+ }
+ }
+ };
+}
+
+struct OperandInfo {
+ string_representation: Option<String>,
+ is_negative: bool,
+ is_integral: bool,
+}
+
+fn analyze_operand(operand: &Expr<'_>, cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<OperandInfo> {
+ match constant(cx, cx.typeck_results(), operand) {
+ Some((Constant::Int(v), _)) => match *cx.typeck_results().expr_ty(expr).kind() {
+ ty::Int(ity) => {
+ let value = sext(cx.tcx, v, ity);
+ return Some(OperandInfo {
+ string_representation: Some(value.to_string()),
+ is_negative: value < 0,
+ is_integral: true,
+ });
+ },
+ ty::Uint(_) => {
+ return Some(OperandInfo {
+ string_representation: None,
+ is_negative: false,
+ is_integral: true,
+ });
+ },
+ _ => {},
+ },
+ Some((Constant::F32(f), _)) => {
+ return Some(floating_point_operand_info(&f));
+ },
+ Some((Constant::F64(f), _)) => {
+ return Some(floating_point_operand_info(&f));
+ },
+ _ => {},
+ }
+ None
+}
+
+fn floating_point_operand_info<T: Display + PartialOrd + From<f32>>(f: &T) -> OperandInfo {
+ OperandInfo {
+ string_representation: Some(format!("{:.3}", *f)),
+ is_negative: *f < 0.0.into(),
+ is_integral: false,
+ }
+}
+
+fn might_have_negative_value(t: Ty<'_>) -> bool {
+ t.is_signed() || t.is_floating_point()
+}
+
+fn check_const_operands<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ lhs_operand: &OperandInfo,
+ rhs_operand: &OperandInfo,
+) {
+ if lhs_operand.is_negative ^ rhs_operand.is_negative {
+ span_lint_and_then(
+ cx,
+ MODULO_ARITHMETIC,
+ expr.span,
+ &format!(
+ "you are using modulo operator on constants with different signs: `{} % {}`",
+ lhs_operand.string_representation.as_ref().unwrap(),
+ rhs_operand.string_representation.as_ref().unwrap()
+ ),
+ |diag| {
+ diag.note("double check for expected result especially when interoperating with different languages");
+ if lhs_operand.is_integral {
+ diag.note("or consider using `rem_euclid` or similar function");
+ }
+ },
+ );
+ }
+}
+
+fn check_non_const_operands<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, operand: &Expr<'_>) {
+ let operand_type = cx.typeck_results().expr_ty(operand);
+ if might_have_negative_value(operand_type) {
+ span_lint_and_then(
+ cx,
+ MODULO_ARITHMETIC,
+ expr.span,
+ "you are using modulo operator on types that might have different signs",
+ |diag| {
+ diag.note("double check for expected result especially when interoperating with different languages");
+ if operand_type.is_integral() {
+ diag.note("or consider using `rem_euclid` or similar function");
+ }
+ },
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/modulo_one.rs b/src/tools/clippy/clippy_lints/src/operators/modulo_one.rs
new file mode 100644
index 000000000..54eea1483
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/modulo_one.rs
@@ -0,0 +1,26 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::{is_integer_const, unsext};
+use rustc_hir::{BinOpKind, Expr};
+use rustc_lint::LateContext;
+use rustc_middle::ty;
+
+use super::MODULO_ONE;
+
+pub(crate) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, op: BinOpKind, right: &Expr<'_>) {
+ if op == BinOpKind::Rem {
+ if is_integer_const(cx, right, 1) {
+ span_lint(cx, MODULO_ONE, expr.span, "any number modulo 1 will be 0");
+ }
+
+ if let ty::Int(ity) = cx.typeck_results().expr_ty(right).kind() {
+ if is_integer_const(cx, right, unsext(cx.tcx, -1, *ity)) {
+ span_lint(
+ cx,
+ MODULO_ONE,
+ expr.span,
+ "any number modulo -1 will panic/overflow or result in 0",
+ );
+ }
+ };
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/needless_bitwise_bool.rs b/src/tools/clippy/clippy_lints/src/operators/needless_bitwise_bool.rs
new file mode 100644
index 000000000..e902235a0
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/needless_bitwise_bool.rs
@@ -0,0 +1,36 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet_opt;
+use rustc_errors::Applicability;
+use rustc_hir::{BinOpKind, Expr, ExprKind};
+use rustc_lint::LateContext;
+
+use super::NEEDLESS_BITWISE_BOOL;
+
+pub(super) fn check(cx: &LateContext<'_>, e: &Expr<'_>, op: BinOpKind, lhs: &Expr<'_>, rhs: &Expr<'_>) {
+ let op_str = match op {
+ BinOpKind::BitAnd => "&&",
+ BinOpKind::BitOr => "||",
+ _ => return,
+ };
+ if matches!(
+ rhs.kind,
+ ExprKind::Call(..) | ExprKind::MethodCall(..) | ExprKind::Binary(..) | ExprKind::Unary(..)
+ ) && cx.typeck_results().expr_ty(e).is_bool()
+ && !rhs.can_have_side_effects()
+ {
+ span_lint_and_then(
+ cx,
+ NEEDLESS_BITWISE_BOOL,
+ e.span,
+ "use of bitwise operator instead of lazy operator between booleans",
+ |diag| {
+ if let Some(lhs_snip) = snippet_opt(cx, lhs.span)
+ && let Some(rhs_snip) = snippet_opt(cx, rhs.span)
+ {
+ let sugg = format!("{} {} {}", lhs_snip, op_str, rhs_snip);
+ diag.span_suggestion(e.span, "try", sugg, Applicability::MachineApplicable);
+ }
+ },
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/numeric_arithmetic.rs b/src/tools/clippy/clippy_lints/src/operators/numeric_arithmetic.rs
new file mode 100644
index 000000000..b6097710d
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/numeric_arithmetic.rs
@@ -0,0 +1,128 @@
+use clippy_utils::consts::constant_simple;
+use clippy_utils::diagnostics::span_lint;
+use rustc_hir as hir;
+use rustc_lint::LateContext;
+use rustc_span::source_map::Span;
+
+use super::{FLOAT_ARITHMETIC, INTEGER_ARITHMETIC};
+
+#[derive(Default)]
+pub struct Context {
+ expr_id: Option<hir::HirId>,
+ /// This field is used to check whether expressions are constants, such as in enum discriminants
+ /// and consts
+ const_span: Option<Span>,
+}
+impl Context {
+ fn skip_expr(&mut self, e: &hir::Expr<'_>) -> bool {
+ self.expr_id.is_some() || self.const_span.map_or(false, |span| span.contains(e.span))
+ }
+
+ pub fn check_binary<'tcx>(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ expr: &'tcx hir::Expr<'_>,
+ op: hir::BinOpKind,
+ l: &'tcx hir::Expr<'_>,
+ r: &'tcx hir::Expr<'_>,
+ ) {
+ if self.skip_expr(expr) {
+ return;
+ }
+ match op {
+ hir::BinOpKind::And
+ | hir::BinOpKind::Or
+ | hir::BinOpKind::BitAnd
+ | hir::BinOpKind::BitOr
+ | hir::BinOpKind::BitXor
+ | hir::BinOpKind::Eq
+ | hir::BinOpKind::Lt
+ | hir::BinOpKind::Le
+ | hir::BinOpKind::Ne
+ | hir::BinOpKind::Ge
+ | hir::BinOpKind::Gt => return,
+ _ => (),
+ }
+
+ let (l_ty, r_ty) = (cx.typeck_results().expr_ty(l), cx.typeck_results().expr_ty(r));
+ if l_ty.peel_refs().is_integral() && r_ty.peel_refs().is_integral() {
+ match op {
+ hir::BinOpKind::Div | hir::BinOpKind::Rem => match &r.kind {
+ hir::ExprKind::Lit(_lit) => (),
+ hir::ExprKind::Unary(hir::UnOp::Neg, expr) => {
+ if let hir::ExprKind::Lit(lit) = &expr.kind {
+ if let rustc_ast::ast::LitKind::Int(1, _) = lit.node {
+ span_lint(cx, INTEGER_ARITHMETIC, expr.span, "integer arithmetic detected");
+ self.expr_id = Some(expr.hir_id);
+ }
+ }
+ },
+ _ => {
+ span_lint(cx, INTEGER_ARITHMETIC, expr.span, "integer arithmetic detected");
+ self.expr_id = Some(expr.hir_id);
+ },
+ },
+ _ => {
+ span_lint(cx, INTEGER_ARITHMETIC, expr.span, "integer arithmetic detected");
+ self.expr_id = Some(expr.hir_id);
+ },
+ }
+ } else if r_ty.peel_refs().is_floating_point() && r_ty.peel_refs().is_floating_point() {
+ span_lint(cx, FLOAT_ARITHMETIC, expr.span, "floating-point arithmetic detected");
+ self.expr_id = Some(expr.hir_id);
+ }
+ }
+
+ pub fn check_negate<'tcx>(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>, arg: &'tcx hir::Expr<'_>) {
+ if self.skip_expr(expr) {
+ return;
+ }
+ let ty = cx.typeck_results().expr_ty(arg);
+ if constant_simple(cx, cx.typeck_results(), expr).is_none() {
+ if ty.is_integral() {
+ span_lint(cx, INTEGER_ARITHMETIC, expr.span, "integer arithmetic detected");
+ self.expr_id = Some(expr.hir_id);
+ } else if ty.is_floating_point() {
+ span_lint(cx, FLOAT_ARITHMETIC, expr.span, "floating-point arithmetic detected");
+ self.expr_id = Some(expr.hir_id);
+ }
+ }
+ }
+
+ pub fn expr_post(&mut self, id: hir::HirId) {
+ if Some(id) == self.expr_id {
+ self.expr_id = None;
+ }
+ }
+
+ pub fn enter_body(&mut self, cx: &LateContext<'_>, body: &hir::Body<'_>) {
+ let body_owner = cx.tcx.hir().body_owner(body.id());
+ let body_owner_def_id = cx.tcx.hir().local_def_id(body_owner);
+
+ match cx.tcx.hir().body_owner_kind(body_owner_def_id) {
+ hir::BodyOwnerKind::Static(_) | hir::BodyOwnerKind::Const => {
+ let body_span = cx.tcx.hir().span_with_body(body_owner);
+
+ if let Some(span) = self.const_span {
+ if span.contains(body_span) {
+ return;
+ }
+ }
+ self.const_span = Some(body_span);
+ },
+ hir::BodyOwnerKind::Fn | hir::BodyOwnerKind::Closure => (),
+ }
+ }
+
+ pub fn body_post(&mut self, cx: &LateContext<'_>, body: &hir::Body<'_>) {
+ let body_owner = cx.tcx.hir().body_owner(body.id());
+ let body_span = cx.tcx.hir().span_with_body(body_owner);
+
+ if let Some(span) = self.const_span {
+ if span.contains(body_span) {
+ return;
+ }
+ }
+ self.const_span = None;
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/op_ref.rs b/src/tools/clippy/clippy_lints/src/operators/op_ref.rs
new file mode 100644
index 000000000..1805672e3
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/op_ref.rs
@@ -0,0 +1,218 @@
+use clippy_utils::diagnostics::{multispan_sugg, span_lint_and_then};
+use clippy_utils::get_enclosing_block;
+use clippy_utils::source::snippet;
+use clippy_utils::ty::{implements_trait, is_copy};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{def::Res, def_id::DefId, BinOpKind, BorrowKind, Expr, ExprKind, GenericArg, ItemKind, QPath, TyKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+
+use super::OP_REF;
+
+#[expect(clippy::similar_names, clippy::too_many_lines)]
+pub(crate) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ op: BinOpKind,
+ left: &'tcx Expr<'_>,
+ right: &'tcx Expr<'_>,
+) {
+ let (trait_id, requires_ref) = match op {
+ BinOpKind::Add => (cx.tcx.lang_items().add_trait(), false),
+ BinOpKind::Sub => (cx.tcx.lang_items().sub_trait(), false),
+ BinOpKind::Mul => (cx.tcx.lang_items().mul_trait(), false),
+ BinOpKind::Div => (cx.tcx.lang_items().div_trait(), false),
+ BinOpKind::Rem => (cx.tcx.lang_items().rem_trait(), false),
+ // don't lint short circuiting ops
+ BinOpKind::And | BinOpKind::Or => return,
+ BinOpKind::BitXor => (cx.tcx.lang_items().bitxor_trait(), false),
+ BinOpKind::BitAnd => (cx.tcx.lang_items().bitand_trait(), false),
+ BinOpKind::BitOr => (cx.tcx.lang_items().bitor_trait(), false),
+ BinOpKind::Shl => (cx.tcx.lang_items().shl_trait(), false),
+ BinOpKind::Shr => (cx.tcx.lang_items().shr_trait(), false),
+ BinOpKind::Ne | BinOpKind::Eq => (cx.tcx.lang_items().eq_trait(), true),
+ BinOpKind::Lt | BinOpKind::Le | BinOpKind::Ge | BinOpKind::Gt => {
+ (cx.tcx.lang_items().partial_ord_trait(), true)
+ },
+ };
+ if let Some(trait_id) = trait_id {
+ match (&left.kind, &right.kind) {
+ // do not suggest to dereference literals
+ (&ExprKind::Lit(..), _) | (_, &ExprKind::Lit(..)) => {},
+ // &foo == &bar
+ (&ExprKind::AddrOf(BorrowKind::Ref, _, l), &ExprKind::AddrOf(BorrowKind::Ref, _, r)) => {
+ let lty = cx.typeck_results().expr_ty(l);
+ let rty = cx.typeck_results().expr_ty(r);
+ let lcpy = is_copy(cx, lty);
+ let rcpy = is_copy(cx, rty);
+ if let Some((self_ty, other_ty)) = in_impl(cx, e, trait_id) {
+ if (are_equal(cx, rty, self_ty) && are_equal(cx, lty, other_ty))
+ || (are_equal(cx, rty, other_ty) && are_equal(cx, lty, self_ty))
+ {
+ return; // Don't lint
+ }
+ }
+ // either operator autorefs or both args are copyable
+ if (requires_ref || (lcpy && rcpy)) && implements_trait(cx, lty, trait_id, &[rty.into()]) {
+ span_lint_and_then(
+ cx,
+ OP_REF,
+ e.span,
+ "needlessly taken reference of both operands",
+ |diag| {
+ let lsnip = snippet(cx, l.span, "...").to_string();
+ let rsnip = snippet(cx, r.span, "...").to_string();
+ multispan_sugg(
+ diag,
+ "use the values directly",
+ vec![(left.span, lsnip), (right.span, rsnip)],
+ );
+ },
+ );
+ } else if lcpy
+ && !rcpy
+ && implements_trait(cx, lty, trait_id, &[cx.typeck_results().expr_ty(right).into()])
+ {
+ span_lint_and_then(
+ cx,
+ OP_REF,
+ e.span,
+ "needlessly taken reference of left operand",
+ |diag| {
+ let lsnip = snippet(cx, l.span, "...").to_string();
+ diag.span_suggestion(
+ left.span,
+ "use the left value directly",
+ lsnip,
+ Applicability::MaybeIncorrect, // FIXME #2597
+ );
+ },
+ );
+ } else if !lcpy
+ && rcpy
+ && implements_trait(cx, cx.typeck_results().expr_ty(left), trait_id, &[rty.into()])
+ {
+ span_lint_and_then(
+ cx,
+ OP_REF,
+ e.span,
+ "needlessly taken reference of right operand",
+ |diag| {
+ let rsnip = snippet(cx, r.span, "...").to_string();
+ diag.span_suggestion(
+ right.span,
+ "use the right value directly",
+ rsnip,
+ Applicability::MaybeIncorrect, // FIXME #2597
+ );
+ },
+ );
+ }
+ },
+ // &foo == bar
+ (&ExprKind::AddrOf(BorrowKind::Ref, _, l), _) => {
+ let lty = cx.typeck_results().expr_ty(l);
+ if let Some((self_ty, other_ty)) = in_impl(cx, e, trait_id) {
+ let rty = cx.typeck_results().expr_ty(right);
+ if (are_equal(cx, rty, self_ty) && are_equal(cx, lty, other_ty))
+ || (are_equal(cx, rty, other_ty) && are_equal(cx, lty, self_ty))
+ {
+ return; // Don't lint
+ }
+ }
+ let lcpy = is_copy(cx, lty);
+ if (requires_ref || lcpy)
+ && implements_trait(cx, lty, trait_id, &[cx.typeck_results().expr_ty(right).into()])
+ {
+ span_lint_and_then(
+ cx,
+ OP_REF,
+ e.span,
+ "needlessly taken reference of left operand",
+ |diag| {
+ let lsnip = snippet(cx, l.span, "...").to_string();
+ diag.span_suggestion(
+ left.span,
+ "use the left value directly",
+ lsnip,
+ Applicability::MaybeIncorrect, // FIXME #2597
+ );
+ },
+ );
+ }
+ },
+ // foo == &bar
+ (_, &ExprKind::AddrOf(BorrowKind::Ref, _, r)) => {
+ let rty = cx.typeck_results().expr_ty(r);
+ if let Some((self_ty, other_ty)) = in_impl(cx, e, trait_id) {
+ let lty = cx.typeck_results().expr_ty(left);
+ if (are_equal(cx, rty, self_ty) && are_equal(cx, lty, other_ty))
+ || (are_equal(cx, rty, other_ty) && are_equal(cx, lty, self_ty))
+ {
+ return; // Don't lint
+ }
+ }
+ let rcpy = is_copy(cx, rty);
+ if (requires_ref || rcpy)
+ && implements_trait(cx, cx.typeck_results().expr_ty(left), trait_id, &[rty.into()])
+ {
+ span_lint_and_then(cx, OP_REF, e.span, "taken reference of right operand", |diag| {
+ let rsnip = snippet(cx, r.span, "...").to_string();
+ diag.span_suggestion(
+ right.span,
+ "use the right value directly",
+ rsnip,
+ Applicability::MaybeIncorrect, // FIXME #2597
+ );
+ });
+ }
+ },
+ _ => {},
+ }
+ }
+}
+
+fn in_impl<'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ bin_op: DefId,
+) -> Option<(&'tcx rustc_hir::Ty<'tcx>, &'tcx rustc_hir::Ty<'tcx>)> {
+ if_chain! {
+ if let Some(block) = get_enclosing_block(cx, e.hir_id);
+ if let Some(impl_def_id) = cx.tcx.impl_of_method(block.hir_id.owner.to_def_id());
+ let item = cx.tcx.hir().expect_item(impl_def_id.expect_local());
+ if let ItemKind::Impl(item) = &item.kind;
+ if let Some(of_trait) = &item.of_trait;
+ if let Some(seg) = of_trait.path.segments.last();
+ if let Some(Res::Def(_, trait_id)) = seg.res;
+ if trait_id == bin_op;
+ if let Some(generic_args) = seg.args;
+ if let Some(GenericArg::Type(other_ty)) = generic_args.args.last();
+
+ then {
+ Some((item.self_ty, other_ty))
+ }
+ else {
+ None
+ }
+ }
+}
+
+fn are_equal<'tcx>(cx: &LateContext<'tcx>, middle_ty: Ty<'_>, hir_ty: &rustc_hir::Ty<'_>) -> bool {
+ if_chain! {
+ if let ty::Adt(adt_def, _) = middle_ty.kind();
+ if let Some(local_did) = adt_def.did().as_local();
+ let item = cx.tcx.hir().expect_item(local_did);
+ let middle_ty_id = item.def_id.to_def_id();
+ if let TyKind::Path(QPath::Resolved(_, path)) = hir_ty.kind;
+ if let Res::Def(_, hir_ty_id) = path.res;
+
+ then {
+ hir_ty_id == middle_ty_id
+ }
+ else {
+ false
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/ptr_eq.rs b/src/tools/clippy/clippy_lints/src/operators/ptr_eq.rs
new file mode 100644
index 000000000..1aefc2741
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/ptr_eq.rs
@@ -0,0 +1,65 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_opt;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{BinOpKind, Expr, ExprKind};
+use rustc_lint::LateContext;
+
+use super::PTR_EQ;
+
+static LINT_MSG: &str = "use `std::ptr::eq` when comparing raw pointers";
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ op: BinOpKind,
+ left: &'tcx Expr<'_>,
+ right: &'tcx Expr<'_>,
+) {
+ if BinOpKind::Eq == op {
+ let (left, right) = match (expr_as_cast_to_usize(cx, left), expr_as_cast_to_usize(cx, right)) {
+ (Some(lhs), Some(rhs)) => (lhs, rhs),
+ _ => (left, right),
+ };
+
+ if_chain! {
+ if let Some(left_var) = expr_as_cast_to_raw_pointer(cx, left);
+ if let Some(right_var) = expr_as_cast_to_raw_pointer(cx, right);
+ if let Some(left_snip) = snippet_opt(cx, left_var.span);
+ if let Some(right_snip) = snippet_opt(cx, right_var.span);
+ then {
+ span_lint_and_sugg(
+ cx,
+ PTR_EQ,
+ expr.span,
+ LINT_MSG,
+ "try",
+ format!("std::ptr::eq({}, {})", left_snip, right_snip),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+}
+
+// If the given expression is a cast to a usize, return the lhs of the cast
+// E.g., `foo as *const _ as usize` returns `foo as *const _`.
+fn expr_as_cast_to_usize<'tcx>(cx: &LateContext<'tcx>, cast_expr: &'tcx Expr<'_>) -> Option<&'tcx Expr<'tcx>> {
+ if cx.typeck_results().expr_ty(cast_expr) == cx.tcx.types.usize {
+ if let ExprKind::Cast(expr, _) = cast_expr.kind {
+ return Some(expr);
+ }
+ }
+ None
+}
+
+// If the given expression is a cast to a `*const` pointer, return the lhs of the cast
+// E.g., `foo as *const _` returns `foo`.
+fn expr_as_cast_to_raw_pointer<'tcx>(cx: &LateContext<'tcx>, cast_expr: &'tcx Expr<'_>) -> Option<&'tcx Expr<'tcx>> {
+ if cx.typeck_results().expr_ty(cast_expr).is_unsafe_ptr() {
+ if let ExprKind::Cast(expr, _) = cast_expr.kind {
+ return Some(expr);
+ }
+ }
+ None
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/self_assignment.rs b/src/tools/clippy/clippy_lints/src/operators/self_assignment.rs
new file mode 100644
index 000000000..9d6bec05b
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/self_assignment.rs
@@ -0,0 +1,20 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::eq_expr_value;
+use clippy_utils::source::snippet;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+
+use super::SELF_ASSIGNMENT;
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>, lhs: &'tcx Expr<'_>, rhs: &'tcx Expr<'_>) {
+ if eq_expr_value(cx, lhs, rhs) {
+ let lhs = snippet(cx, lhs.span, "<lhs>");
+ let rhs = snippet(cx, rhs.span, "<rhs>");
+ span_lint(
+ cx,
+ SELF_ASSIGNMENT,
+ e.span,
+ &format!("self-assignment of `{}` to `{}`", rhs, lhs),
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/verbose_bit_mask.rs b/src/tools/clippy/clippy_lints/src/operators/verbose_bit_mask.rs
new file mode 100644
index 000000000..ff85fd554
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/verbose_bit_mask.rs
@@ -0,0 +1,44 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::sugg::Sugg;
+use rustc_ast::ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::{BinOpKind, Expr, ExprKind};
+use rustc_lint::LateContext;
+
+use super::VERBOSE_BIT_MASK;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ op: BinOpKind,
+ left: &'tcx Expr<'_>,
+ right: &'tcx Expr<'_>,
+ threshold: u64,
+) {
+ if BinOpKind::Eq == op
+ && let ExprKind::Binary(op1, left1, right1) = &left.kind
+ && BinOpKind::BitAnd == op1.node
+ && let ExprKind::Lit(lit) = &right1.kind
+ && let LitKind::Int(n, _) = lit.node
+ && let ExprKind::Lit(lit1) = &right.kind
+ && let LitKind::Int(0, _) = lit1.node
+ && n.leading_zeros() == n.count_zeros()
+ && n > u128::from(threshold)
+ {
+ span_lint_and_then(
+ cx,
+ VERBOSE_BIT_MASK,
+ e.span,
+ "bit mask could be simplified with a call to `trailing_zeros`",
+ |diag| {
+ let sugg = Sugg::hir(cx, left1, "...").maybe_par();
+ diag.span_suggestion(
+ e.span,
+ "try",
+ format!("{}.trailing_zeros() >= {}", sugg, n.count_ones()),
+ Applicability::MaybeIncorrect,
+ );
+ },
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/option_env_unwrap.rs b/src/tools/clippy/clippy_lints/src/option_env_unwrap.rs
new file mode 100644
index 000000000..3f5286ba0
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/option_env_unwrap.rs
@@ -0,0 +1,56 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::is_direct_expn_of;
+use if_chain::if_chain;
+use rustc_ast::ast::{Expr, ExprKind};
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `option_env!(...).unwrap()` and
+ /// suggests usage of the `env!` macro.
+ ///
+ /// ### Why is this bad?
+ /// Unwrapping the result of `option_env!` will panic
+ /// at run-time if the environment variable doesn't exist, whereas `env!`
+ /// catches it at compile-time.
+ ///
+ /// ### Example
+ /// ```rust,no_run
+ /// let _ = option_env!("HOME").unwrap();
+ /// ```
+ ///
+ /// Is better expressed as:
+ ///
+ /// ```rust,no_run
+ /// let _ = env!("HOME");
+ /// ```
+ #[clippy::version = "1.43.0"]
+ pub OPTION_ENV_UNWRAP,
+ correctness,
+ "using `option_env!(...).unwrap()` to get environment variable"
+}
+
+declare_lint_pass!(OptionEnvUnwrap => [OPTION_ENV_UNWRAP]);
+
+impl EarlyLintPass for OptionEnvUnwrap {
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
+ if_chain! {
+ if let ExprKind::MethodCall(path_segment, args, _) = &expr.kind;
+ if matches!(path_segment.ident.name, sym::expect | sym::unwrap);
+ if let ExprKind::Call(caller, _) = &args[0].kind;
+ if is_direct_expn_of(caller.span, "option_env").is_some();
+ then {
+ span_lint_and_help(
+ cx,
+ OPTION_ENV_UNWRAP,
+ expr.span,
+ "this will panic at run-time if the environment variable doesn't exist at compile-time",
+ None,
+ "consider using the `env!` macro instead"
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/option_if_let_else.rs b/src/tools/clippy/clippy_lints/src/option_if_let_else.rs
new file mode 100644
index 000000000..44f153cff
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/option_if_let_else.rs
@@ -0,0 +1,186 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::sugg::Sugg;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{
+ can_move_expr_to_closure, eager_or_lazy, higher, in_constant, is_else_clause, is_lang_ctor, peel_blocks,
+ peel_hir_expr_while, CaptureKind,
+};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::LangItem::OptionSome;
+use rustc_hir::{def::Res, BindingAnnotation, Expr, ExprKind, Mutability, PatKind, Path, QPath, UnOp};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Lints usage of `if let Some(v) = ... { y } else { x }` which is more
+ /// idiomatically done with `Option::map_or` (if the else bit is a pure
+ /// expression) or `Option::map_or_else` (if the else bit is an impure
+ /// expression).
+ ///
+ /// ### Why is this bad?
+ /// Using the dedicated functions of the `Option` type is clearer and
+ /// more concise than an `if let` expression.
+ ///
+ /// ### Known problems
+ /// This lint uses a deliberately conservative metric for checking
+ /// if the inside of either body contains breaks or continues which will
+ /// cause it to not suggest a fix if either block contains a loop with
+ /// continues or breaks contained within the loop.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let optional: Option<u32> = Some(0);
+ /// # fn do_complicated_function() -> u32 { 5 };
+ /// let _ = if let Some(foo) = optional {
+ /// foo
+ /// } else {
+ /// 5
+ /// };
+ /// let _ = if let Some(foo) = optional {
+ /// foo
+ /// } else {
+ /// let y = do_complicated_function();
+ /// y*y
+ /// };
+ /// ```
+ ///
+ /// should be
+ ///
+ /// ```rust
+ /// # let optional: Option<u32> = Some(0);
+ /// # fn do_complicated_function() -> u32 { 5 };
+ /// let _ = optional.map_or(5, |foo| foo);
+ /// let _ = optional.map_or_else(||{
+ /// let y = do_complicated_function();
+ /// y*y
+ /// }, |foo| foo);
+ /// ```
+ #[clippy::version = "1.47.0"]
+ pub OPTION_IF_LET_ELSE,
+ nursery,
+ "reimplementation of Option::map_or"
+}
+
+declare_lint_pass!(OptionIfLetElse => [OPTION_IF_LET_ELSE]);
+
+/// Returns true iff the given expression is the result of calling `Result::ok`
+fn is_result_ok(cx: &LateContext<'_>, expr: &'_ Expr<'_>) -> bool {
+ if let ExprKind::MethodCall(path, &[ref receiver], _) = &expr.kind {
+ path.ident.name.as_str() == "ok"
+ && is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(receiver), sym::Result)
+ } else {
+ false
+ }
+}
+
+/// A struct containing information about occurrences of the
+/// `if let Some(..) = .. else` construct that this lint detects.
+struct OptionIfLetElseOccurrence {
+ option: String,
+ method_sugg: String,
+ some_expr: String,
+ none_expr: String,
+}
+
+fn format_option_in_sugg(cx: &LateContext<'_>, cond_expr: &Expr<'_>, as_ref: bool, as_mut: bool) -> String {
+ format!(
+ "{}{}",
+ Sugg::hir_with_macro_callsite(cx, cond_expr, "..").maybe_par(),
+ if as_mut {
+ ".as_mut()"
+ } else if as_ref {
+ ".as_ref()"
+ } else {
+ ""
+ }
+ )
+}
+
+/// If this expression is the option if let/else construct we're detecting, then
+/// this function returns an `OptionIfLetElseOccurrence` struct with details if
+/// this construct is found, or None if this construct is not found.
+fn detect_option_if_let_else<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>) -> Option<OptionIfLetElseOccurrence> {
+ if_chain! {
+ if !expr.span.from_expansion(); // Don't lint macros, because it behaves weirdly
+ if !in_constant(cx, expr.hir_id);
+ if let Some(higher::IfLet { let_pat, let_expr, if_then, if_else: Some(if_else) })
+ = higher::IfLet::hir(cx, expr);
+ if !is_else_clause(cx.tcx, expr);
+ if !is_result_ok(cx, let_expr); // Don't lint on Result::ok because a different lint does it already
+ if let PatKind::TupleStruct(struct_qpath, [inner_pat], _) = &let_pat.kind;
+ if is_lang_ctor(cx, struct_qpath, OptionSome);
+ if let PatKind::Binding(bind_annotation, _, id, None) = &inner_pat.kind;
+ if let Some(some_captures) = can_move_expr_to_closure(cx, if_then);
+ if let Some(none_captures) = can_move_expr_to_closure(cx, if_else);
+ if some_captures
+ .iter()
+ .filter_map(|(id, &c)| none_captures.get(id).map(|&c2| (c, c2)))
+ .all(|(x, y)| x.is_imm_ref() && y.is_imm_ref());
+
+ then {
+ let capture_mut = if bind_annotation == &BindingAnnotation::Mutable { "mut " } else { "" };
+ let some_body = peel_blocks(if_then);
+ let none_body = peel_blocks(if_else);
+ let method_sugg = if eager_or_lazy::switch_to_eager_eval(cx, none_body) { "map_or" } else { "map_or_else" };
+ let capture_name = id.name.to_ident_string();
+ let (as_ref, as_mut) = match &let_expr.kind {
+ ExprKind::AddrOf(_, Mutability::Not, _) => (true, false),
+ ExprKind::AddrOf(_, Mutability::Mut, _) => (false, true),
+ _ => (bind_annotation == &BindingAnnotation::Ref, bind_annotation == &BindingAnnotation::RefMut),
+ };
+ let cond_expr = match let_expr.kind {
+ // Pointer dereferencing happens automatically, so we can omit it in the suggestion
+ ExprKind::Unary(UnOp::Deref, expr) | ExprKind::AddrOf(_, _, expr) => expr,
+ _ => let_expr,
+ };
+ // Check if captures the closure will need conflict with borrows made in the scrutinee.
+ // TODO: check all the references made in the scrutinee expression. This will require interacting
+ // with the borrow checker. Currently only `<local>[.<field>]*` is checked for.
+ if as_ref || as_mut {
+ let e = peel_hir_expr_while(cond_expr, |e| match e.kind {
+ ExprKind::Field(e, _) | ExprKind::AddrOf(_, _, e) => Some(e),
+ _ => None,
+ });
+ if let ExprKind::Path(QPath::Resolved(None, Path { res: Res::Local(local_id), .. })) = e.kind {
+ match some_captures.get(local_id)
+ .or_else(|| (method_sugg == "map_or_else").then_some(()).and_then(|_| none_captures.get(local_id)))
+ {
+ Some(CaptureKind::Value | CaptureKind::Ref(Mutability::Mut)) => return None,
+ Some(CaptureKind::Ref(Mutability::Not)) if as_mut => return None,
+ Some(CaptureKind::Ref(Mutability::Not)) | None => (),
+ }
+ }
+ }
+ Some(OptionIfLetElseOccurrence {
+ option: format_option_in_sugg(cx, cond_expr, as_ref, as_mut),
+ method_sugg: method_sugg.to_string(),
+ some_expr: format!("|{}{}| {}", capture_mut, capture_name, Sugg::hir_with_macro_callsite(cx, some_body, "..")),
+ none_expr: format!("{}{}", if method_sugg == "map_or" { "" } else { "|| " }, Sugg::hir_with_macro_callsite(cx, none_body, "..")),
+ })
+ } else {
+ None
+ }
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for OptionIfLetElse {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &Expr<'tcx>) {
+ if let Some(detection) = detect_option_if_let_else(cx, expr) {
+ span_lint_and_sugg(
+ cx,
+ OPTION_IF_LET_ELSE,
+ expr.span,
+ format!("use Option::{} instead of an if let/else", detection.method_sugg).as_str(),
+ "try",
+ format!(
+ "{}.{}({}, {})",
+ detection.option, detection.method_sugg, detection.none_expr, detection.some_expr,
+ ),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/overflow_check_conditional.rs b/src/tools/clippy/clippy_lints/src/overflow_check_conditional.rs
new file mode 100644
index 000000000..6dabbd480
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/overflow_check_conditional.rs
@@ -0,0 +1,75 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::SpanlessEq;
+use if_chain::if_chain;
+use rustc_hir::{BinOpKind, Expr, ExprKind, QPath};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Detects classic underflow/overflow checks.
+ ///
+ /// ### Why is this bad?
+ /// Most classic C underflow/overflow checks will fail in
+ /// Rust. Users can use functions like `overflowing_*` and `wrapping_*` instead.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let a = 1;
+ /// # let b = 2;
+ /// a + b < a;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub OVERFLOW_CHECK_CONDITIONAL,
+ complexity,
+ "overflow checks inspired by C which are likely to panic"
+}
+
+declare_lint_pass!(OverflowCheckConditional => [OVERFLOW_CHECK_CONDITIONAL]);
+
+const OVERFLOW_MSG: &str = "you are trying to use classic C overflow conditions that will fail in Rust";
+const UNDERFLOW_MSG: &str = "you are trying to use classic C underflow conditions that will fail in Rust";
+
+impl<'tcx> LateLintPass<'tcx> for OverflowCheckConditional {
+ // a + b < a, a > a + b, a < a - b, a - b > a
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ let eq = |l, r| SpanlessEq::new(cx).eq_path_segment(l, r);
+ if_chain! {
+ if let ExprKind::Binary(ref op, first, second) = expr.kind;
+ if let ExprKind::Binary(ref op2, ident1, ident2) = first.kind;
+ if let ExprKind::Path(QPath::Resolved(_, path1)) = ident1.kind;
+ if let ExprKind::Path(QPath::Resolved(_, path2)) = ident2.kind;
+ if let ExprKind::Path(QPath::Resolved(_, path3)) = second.kind;
+ if eq(&path1.segments[0], &path3.segments[0]) || eq(&path2.segments[0], &path3.segments[0]);
+ if cx.typeck_results().expr_ty(ident1).is_integral();
+ if cx.typeck_results().expr_ty(ident2).is_integral();
+ then {
+ if op.node == BinOpKind::Lt && op2.node == BinOpKind::Add {
+ span_lint(cx, OVERFLOW_CHECK_CONDITIONAL, expr.span, OVERFLOW_MSG);
+ }
+ if op.node == BinOpKind::Gt && op2.node == BinOpKind::Sub {
+ span_lint(cx, OVERFLOW_CHECK_CONDITIONAL, expr.span, UNDERFLOW_MSG);
+ }
+ }
+ }
+
+ if_chain! {
+ if let ExprKind::Binary(ref op, first, second) = expr.kind;
+ if let ExprKind::Binary(ref op2, ident1, ident2) = second.kind;
+ if let ExprKind::Path(QPath::Resolved(_, path1)) = ident1.kind;
+ if let ExprKind::Path(QPath::Resolved(_, path2)) = ident2.kind;
+ if let ExprKind::Path(QPath::Resolved(_, path3)) = first.kind;
+ if eq(&path1.segments[0], &path3.segments[0]) || eq(&path2.segments[0], &path3.segments[0]);
+ if cx.typeck_results().expr_ty(ident1).is_integral();
+ if cx.typeck_results().expr_ty(ident2).is_integral();
+ then {
+ if op.node == BinOpKind::Gt && op2.node == BinOpKind::Add {
+ span_lint(cx, OVERFLOW_CHECK_CONDITIONAL, expr.span, OVERFLOW_MSG);
+ }
+ if op.node == BinOpKind::Lt && op2.node == BinOpKind::Sub {
+ span_lint(cx, OVERFLOW_CHECK_CONDITIONAL, expr.span, UNDERFLOW_MSG);
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/panic_in_result_fn.rs b/src/tools/clippy/clippy_lints/src/panic_in_result_fn.rs
new file mode 100644
index 000000000..21acf003d
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/panic_in_result_fn.rs
@@ -0,0 +1,87 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::macros::root_macro_call_first_node;
+use clippy_utils::return_ty;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::visitors::expr_visitor_no_bodies;
+use rustc_hir as hir;
+use rustc_hir::intravisit::{FnKind, Visitor};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{sym, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `panic!`, `unimplemented!`, `todo!`, `unreachable!` or assertions in a function of type result.
+ ///
+ /// ### Why is this bad?
+ /// For some codebases, it is desirable for functions of type result to return an error instead of crashing. Hence panicking macros should be avoided.
+ ///
+ /// ### Known problems
+ /// Functions called from a function returning a `Result` may invoke a panicking macro. This is not checked.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn result_with_panic() -> Result<bool, String>
+ /// {
+ /// panic!("error");
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// fn result_without_panic() -> Result<bool, String> {
+ /// Err(String::from("error"))
+ /// }
+ /// ```
+ #[clippy::version = "1.48.0"]
+ pub PANIC_IN_RESULT_FN,
+ restriction,
+ "functions of type `Result<..>` that contain `panic!()`, `todo!()`, `unreachable()`, `unimplemented()` or assertion"
+}
+
+declare_lint_pass!(PanicInResultFn => [PANIC_IN_RESULT_FN]);
+
+impl<'tcx> LateLintPass<'tcx> for PanicInResultFn {
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ fn_kind: FnKind<'tcx>,
+ _: &'tcx hir::FnDecl<'tcx>,
+ body: &'tcx hir::Body<'tcx>,
+ span: Span,
+ hir_id: hir::HirId,
+ ) {
+ if !matches!(fn_kind, FnKind::Closure) && is_type_diagnostic_item(cx, return_ty(cx, hir_id), sym::Result) {
+ lint_impl_body(cx, span, body);
+ }
+ }
+}
+
+fn lint_impl_body<'tcx>(cx: &LateContext<'tcx>, impl_span: Span, body: &'tcx hir::Body<'tcx>) {
+ let mut panics = Vec::new();
+ expr_visitor_no_bodies(|expr| {
+ let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return true };
+ if matches!(
+ cx.tcx.item_name(macro_call.def_id).as_str(),
+ "unimplemented" | "unreachable" | "panic" | "todo" | "assert" | "assert_eq" | "assert_ne"
+ ) {
+ panics.push(macro_call.span);
+ return false;
+ }
+ true
+ })
+ .visit_expr(&body.value);
+ if !panics.is_empty() {
+ span_lint_and_then(
+ cx,
+ PANIC_IN_RESULT_FN,
+ impl_span,
+ "used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`",
+ move |diag| {
+ diag.help(
+ "`unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing",
+ );
+ diag.span_note(panics, "return Err() instead of panicking");
+ },
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/panic_unimplemented.rs b/src/tools/clippy/clippy_lints/src/panic_unimplemented.rs
new file mode 100644
index 000000000..2f3007658
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/panic_unimplemented.rs
@@ -0,0 +1,116 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::macros::{is_panic, root_macro_call_first_node};
+use rustc_hir::Expr;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `panic!`.
+ ///
+ /// ### Why is this bad?
+ /// `panic!` will stop the execution of the executable
+ ///
+ /// ### Example
+ /// ```no_run
+ /// panic!("even with a good reason");
+ /// ```
+ #[clippy::version = "1.40.0"]
+ pub PANIC,
+ restriction,
+ "usage of the `panic!` macro"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `unimplemented!`.
+ ///
+ /// ### Why is this bad?
+ /// This macro should not be present in production code
+ ///
+ /// ### Example
+ /// ```no_run
+ /// unimplemented!();
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub UNIMPLEMENTED,
+ restriction,
+ "`unimplemented!` should not be present in production code"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `todo!`.
+ ///
+ /// ### Why is this bad?
+ /// This macro should not be present in production code
+ ///
+ /// ### Example
+ /// ```no_run
+ /// todo!();
+ /// ```
+ #[clippy::version = "1.40.0"]
+ pub TODO,
+ restriction,
+ "`todo!` should not be present in production code"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `unreachable!`.
+ ///
+ /// ### Why is this bad?
+ /// This macro can cause code to panic
+ ///
+ /// ### Example
+ /// ```no_run
+ /// unreachable!();
+ /// ```
+ #[clippy::version = "1.40.0"]
+ pub UNREACHABLE,
+ restriction,
+ "usage of the `unreachable!` macro"
+}
+
+declare_lint_pass!(PanicUnimplemented => [UNIMPLEMENTED, UNREACHABLE, TODO, PANIC]);
+
+impl<'tcx> LateLintPass<'tcx> for PanicUnimplemented {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return };
+ if is_panic(cx, macro_call.def_id) {
+ if cx.tcx.hir().is_inside_const_context(expr.hir_id) {
+ return;
+ }
+
+ span_lint(
+ cx,
+ PANIC,
+ macro_call.span,
+ "`panic` should not be present in production code",
+ );
+ return;
+ }
+ match cx.tcx.item_name(macro_call.def_id).as_str() {
+ "todo" => {
+ span_lint(
+ cx,
+ TODO,
+ macro_call.span,
+ "`todo` should not be present in production code",
+ );
+ },
+ "unimplemented" => {
+ span_lint(
+ cx,
+ UNIMPLEMENTED,
+ macro_call.span,
+ "`unimplemented` should not be present in production code",
+ );
+ },
+ "unreachable" => {
+ span_lint(cx, UNREACHABLE, macro_call.span, "usage of the `unreachable!` macro");
+ },
+ _ => {},
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/partialeq_ne_impl.rs b/src/tools/clippy/clippy_lints/src/partialeq_ne_impl.rs
new file mode 100644
index 000000000..09ac514d0
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/partialeq_ne_impl.rs
@@ -0,0 +1,57 @@
+use clippy_utils::diagnostics::span_lint_hir;
+use if_chain::if_chain;
+use rustc_hir::{Impl, Item, ItemKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for manual re-implementations of `PartialEq::ne`.
+ ///
+ /// ### Why is this bad?
+ /// `PartialEq::ne` is required to always return the
+ /// negated result of `PartialEq::eq`, which is exactly what the default
+ /// implementation does. Therefore, there should never be any need to
+ /// re-implement it.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct Foo;
+ ///
+ /// impl PartialEq for Foo {
+ /// fn eq(&self, other: &Foo) -> bool { true }
+ /// fn ne(&self, other: &Foo) -> bool { !(self == other) }
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub PARTIALEQ_NE_IMPL,
+ complexity,
+ "re-implementing `PartialEq::ne`"
+}
+
+declare_lint_pass!(PartialEqNeImpl => [PARTIALEQ_NE_IMPL]);
+
+impl<'tcx> LateLintPass<'tcx> for PartialEqNeImpl {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
+ if_chain! {
+ if let ItemKind::Impl(Impl { of_trait: Some(ref trait_ref), items: impl_items, .. }) = item.kind;
+ if !cx.tcx.has_attr(item.def_id.to_def_id(), sym::automatically_derived);
+ if let Some(eq_trait) = cx.tcx.lang_items().eq_trait();
+ if trait_ref.path.res.def_id() == eq_trait;
+ then {
+ for impl_item in *impl_items {
+ if impl_item.ident.name == sym::ne {
+ span_lint_hir(
+ cx,
+ PARTIALEQ_NE_IMPL,
+ impl_item.id.hir_id(),
+ impl_item.span,
+ "re-implementing `PartialEq::ne` is unnecessary",
+ );
+ }
+ }
+ }
+ };
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs b/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs
new file mode 100644
index 000000000..5fa4fd748
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs
@@ -0,0 +1,313 @@
+use std::cmp;
+use std::iter;
+
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet;
+use clippy_utils::ty::{for_each_top_level_late_bound_region, is_copy};
+use clippy_utils::{is_self, is_self_ty};
+use core::ops::ControlFlow;
+use if_chain::if_chain;
+use rustc_ast::attr;
+use rustc_data_structures::fx::FxHashSet;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_hir::intravisit::FnKind;
+use rustc_hir::{BindingAnnotation, Body, FnDecl, HirId, Impl, ItemKind, MutTy, Mutability, Node, PatKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::adjustment::{Adjust, PointerCast};
+use rustc_middle::ty::layout::LayoutOf;
+use rustc_middle::ty::{self, RegionKind};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::def_id::LocalDefId;
+use rustc_span::{sym, Span};
+use rustc_target::spec::abi::Abi;
+use rustc_target::spec::Target;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for functions taking arguments by reference, where
+ /// the argument type is `Copy` and small enough to be more efficient to always
+ /// pass by value.
+ ///
+ /// ### Why is this bad?
+ /// In many calling conventions instances of structs will
+ /// be passed through registers if they fit into two or less general purpose
+ /// registers.
+ ///
+ /// ### Known problems
+ /// This lint is target register size dependent, it is
+ /// limited to 32-bit to try and reduce portability problems between 32 and
+ /// 64-bit, but if you are compiling for 8 or 16-bit targets then the limit
+ /// will be different.
+ ///
+ /// The configuration option `trivial_copy_size_limit` can be set to override
+ /// this limit for a project.
+ ///
+ /// This lint attempts to allow passing arguments by reference if a reference
+ /// to that argument is returned. This is implemented by comparing the lifetime
+ /// of the argument and return value for equality. However, this can cause
+ /// false positives in cases involving multiple lifetimes that are bounded by
+ /// each other.
+ ///
+ /// Also, it does not take account of other similar cases where getting memory addresses
+ /// matters; namely, returning the pointer to the argument in question,
+ /// and passing the argument, as both references and pointers,
+ /// to a function that needs the memory address. For further details, refer to
+ /// [this issue](https://github.com/rust-lang/rust-clippy/issues/5953)
+ /// that explains a real case in which this false positive
+ /// led to an **undefined behavior** introduced with unsafe code.
+ ///
+ /// ### Example
+ ///
+ /// ```rust
+ /// fn foo(v: &u32) {}
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// fn foo(v: u32) {}
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub TRIVIALLY_COPY_PASS_BY_REF,
+ pedantic,
+ "functions taking small copyable arguments by reference"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for functions taking arguments by value, where
+ /// the argument type is `Copy` and large enough to be worth considering
+ /// passing by reference. Does not trigger if the function is being exported,
+ /// because that might induce API breakage, if the parameter is declared as mutable,
+ /// or if the argument is a `self`.
+ ///
+ /// ### Why is this bad?
+ /// Arguments passed by value might result in an unnecessary
+ /// shallow copy, taking up more space in the stack and requiring a call to
+ /// `memcpy`, which can be expensive.
+ ///
+ /// ### Example
+ /// ```rust
+ /// #[derive(Clone, Copy)]
+ /// struct TooLarge([u8; 2048]);
+ ///
+ /// fn foo(v: TooLarge) {}
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # #[derive(Clone, Copy)]
+ /// # struct TooLarge([u8; 2048]);
+ /// fn foo(v: &TooLarge) {}
+ /// ```
+ #[clippy::version = "1.49.0"]
+ pub LARGE_TYPES_PASSED_BY_VALUE,
+ pedantic,
+ "functions taking large arguments by value"
+}
+
+#[derive(Copy, Clone)]
+pub struct PassByRefOrValue {
+ ref_min_size: u64,
+ value_max_size: u64,
+ avoid_breaking_exported_api: bool,
+}
+
+impl<'tcx> PassByRefOrValue {
+ pub fn new(
+ ref_min_size: Option<u64>,
+ value_max_size: u64,
+ avoid_breaking_exported_api: bool,
+ target: &Target,
+ ) -> Self {
+ let ref_min_size = ref_min_size.unwrap_or_else(|| {
+ let bit_width = u64::from(target.pointer_width);
+ // Cap the calculated bit width at 32-bits to reduce
+ // portability problems between 32 and 64-bit targets
+ let bit_width = cmp::min(bit_width, 32);
+ #[expect(clippy::integer_division)]
+ let byte_width = bit_width / 8;
+ // Use a limit of 2 times the register byte width
+ byte_width * 2
+ });
+
+ Self {
+ ref_min_size,
+ value_max_size,
+ avoid_breaking_exported_api,
+ }
+ }
+
+ fn check_poly_fn(&mut self, cx: &LateContext<'tcx>, def_id: LocalDefId, decl: &FnDecl<'_>, span: Option<Span>) {
+ if self.avoid_breaking_exported_api && cx.access_levels.is_exported(def_id) {
+ return;
+ }
+
+ let fn_sig = cx.tcx.fn_sig(def_id);
+ let fn_body = cx.enclosing_body.map(|id| cx.tcx.hir().body(id));
+
+ // Gather all the lifetimes found in the output type which may affect whether
+ // `TRIVIALLY_COPY_PASS_BY_REF` should be linted.
+ let mut output_regions = FxHashSet::default();
+ for_each_top_level_late_bound_region(fn_sig.skip_binder().output(), |region| -> ControlFlow<!> {
+ output_regions.insert(region);
+ ControlFlow::Continue(())
+ });
+
+ for (index, (input, ty)) in iter::zip(
+ decl.inputs,
+ fn_sig.skip_binder().inputs().iter().map(|&ty| fn_sig.rebind(ty)),
+ )
+ .enumerate()
+ {
+ // All spans generated from a proc-macro invocation are the same...
+ match span {
+ Some(s) if s == input.span => continue,
+ _ => (),
+ }
+
+ match *ty.skip_binder().kind() {
+ ty::Ref(lt, ty, Mutability::Not) => {
+ match lt.kind() {
+ RegionKind::ReLateBound(index, region)
+ if index.as_u32() == 0 && output_regions.contains(&region) =>
+ {
+ continue;
+ },
+ // Early bound regions on functions are either from the containing item, are bounded by another
+ // lifetime, or are used as a bound for a type or lifetime.
+ RegionKind::ReEarlyBound(..) => continue,
+ _ => (),
+ }
+
+ let ty = cx.tcx.erase_late_bound_regions(fn_sig.rebind(ty));
+ if is_copy(cx, ty)
+ && let Some(size) = cx.layout_of(ty).ok().map(|l| l.size.bytes())
+ && size <= self.ref_min_size
+ && let hir::TyKind::Rptr(_, MutTy { ty: decl_ty, .. }) = input.kind
+ {
+ if let Some(typeck) = cx.maybe_typeck_results() {
+ // Don't lint if an unsafe pointer is created.
+ // TODO: Limit the check only to unsafe pointers to the argument (or part of the argument)
+ // which escape the current function.
+ if typeck.node_types().iter().any(|(_, &ty)| ty.is_unsafe_ptr())
+ || typeck
+ .adjustments()
+ .iter()
+ .flat_map(|(_, a)| a)
+ .any(|a| matches!(a.kind, Adjust::Pointer(PointerCast::UnsafeFnPointer)))
+ {
+ continue;
+ }
+ }
+ let value_type = if fn_body.and_then(|body| body.params.get(index)).map_or(false, is_self) {
+ "self".into()
+ } else {
+ snippet(cx, decl_ty.span, "_").into()
+ };
+ span_lint_and_sugg(
+ cx,
+ TRIVIALLY_COPY_PASS_BY_REF,
+ input.span,
+ &format!("this argument ({} byte) is passed by reference, but would be more efficient if passed by value (limit: {} byte)", size, self.ref_min_size),
+ "consider passing by value instead",
+ value_type,
+ Applicability::Unspecified,
+ );
+ }
+ },
+
+ ty::Adt(_, _) | ty::Array(_, _) | ty::Tuple(_) => {
+ // if function has a body and parameter is annotated with mut, ignore
+ if let Some(param) = fn_body.and_then(|body| body.params.get(index)) {
+ match param.pat.kind {
+ PatKind::Binding(BindingAnnotation::Unannotated, _, _, _) => {},
+ _ => continue,
+ }
+ }
+ let ty = cx.tcx.erase_late_bound_regions(ty);
+
+ if_chain! {
+ if is_copy(cx, ty);
+ if !is_self_ty(input);
+ if let Some(size) = cx.layout_of(ty).ok().map(|l| l.size.bytes());
+ if size > self.value_max_size;
+ then {
+ span_lint_and_sugg(
+ cx,
+ LARGE_TYPES_PASSED_BY_VALUE,
+ input.span,
+ &format!("this argument ({} byte) is passed by value, but might be more efficient if passed by reference (limit: {} byte)", size, self.value_max_size),
+ "consider passing by reference instead",
+ format!("&{}", snippet(cx, input.span, "_")),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ }
+ },
+
+ _ => {},
+ }
+ }
+ }
+}
+
+impl_lint_pass!(PassByRefOrValue => [TRIVIALLY_COPY_PASS_BY_REF, LARGE_TYPES_PASSED_BY_VALUE]);
+
+impl<'tcx> LateLintPass<'tcx> for PassByRefOrValue {
+ fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::TraitItem<'_>) {
+ if item.span.from_expansion() {
+ return;
+ }
+
+ if let hir::TraitItemKind::Fn(method_sig, _) = &item.kind {
+ self.check_poly_fn(cx, item.def_id, method_sig.decl, None);
+ }
+ }
+
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ kind: FnKind<'tcx>,
+ decl: &'tcx FnDecl<'_>,
+ _body: &'tcx Body<'_>,
+ span: Span,
+ hir_id: HirId,
+ ) {
+ if span.from_expansion() {
+ return;
+ }
+
+ match kind {
+ FnKind::ItemFn(.., header) => {
+ if header.abi != Abi::Rust {
+ return;
+ }
+ let attrs = cx.tcx.hir().attrs(hir_id);
+ for a in attrs {
+ if let Some(meta_items) = a.meta_item_list() {
+ if a.has_name(sym::proc_macro_derive)
+ || (a.has_name(sym::inline) && attr::list_contains_name(&meta_items, sym::always))
+ {
+ return;
+ }
+ }
+ }
+ },
+ FnKind::Method(..) => (),
+ FnKind::Closure => return,
+ }
+
+ // Exclude non-inherent impls
+ if let Some(Node::Item(item)) = cx.tcx.hir().find(cx.tcx.hir().get_parent_node(hir_id)) {
+ if matches!(
+ item.kind,
+ ItemKind::Impl(Impl { of_trait: Some(_), .. }) | ItemKind::Trait(..)
+ ) {
+ return;
+ }
+ }
+
+ self.check_poly_fn(cx, cx.tcx.hir().local_def_id(hir_id), decl, Some(span));
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/path_buf_push_overwrite.rs b/src/tools/clippy/clippy_lints/src/path_buf_push_overwrite.rs
new file mode 100644
index 000000000..3f940ce61
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/path_buf_push_overwrite.rs
@@ -0,0 +1,74 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::ty::is_type_diagnostic_item;
+use if_chain::if_chain;
+use rustc_ast::ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::sym;
+use std::path::{Component, Path};
+
+declare_clippy_lint! {
+ /// ### What it does
+ ///* Checks for [push](https://doc.rust-lang.org/std/path/struct.PathBuf.html#method.push)
+ /// calls on `PathBuf` that can cause overwrites.
+ ///
+ /// ### Why is this bad?
+ /// Calling `push` with a root path at the start can overwrite the
+ /// previous defined path.
+ ///
+ /// ### Example
+ /// ```rust
+ /// use std::path::PathBuf;
+ ///
+ /// let mut x = PathBuf::from("/foo");
+ /// x.push("/bar");
+ /// assert_eq!(x, PathBuf::from("/bar"));
+ /// ```
+ /// Could be written:
+ ///
+ /// ```rust
+ /// use std::path::PathBuf;
+ ///
+ /// let mut x = PathBuf::from("/foo");
+ /// x.push("bar");
+ /// assert_eq!(x, PathBuf::from("/foo/bar"));
+ /// ```
+ #[clippy::version = "1.36.0"]
+ pub PATH_BUF_PUSH_OVERWRITE,
+ nursery,
+ "calling `push` with file system root on `PathBuf` can overwrite it"
+}
+
+declare_lint_pass!(PathBufPushOverwrite => [PATH_BUF_PUSH_OVERWRITE]);
+
+impl<'tcx> LateLintPass<'tcx> for PathBufPushOverwrite {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if_chain! {
+ if let ExprKind::MethodCall(path, args, _) = expr.kind;
+ if path.ident.name == sym!(push);
+ if args.len() == 2;
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(&args[0]).peel_refs(), sym::PathBuf);
+ if let Some(get_index_arg) = args.get(1);
+ if let ExprKind::Lit(ref lit) = get_index_arg.kind;
+ if let LitKind::Str(ref path_lit, _) = lit.node;
+ if let pushed_path = Path::new(path_lit.as_str());
+ if let Some(pushed_path_lit) = pushed_path.to_str();
+ if pushed_path.has_root();
+ if let Some(root) = pushed_path.components().next();
+ if root == Component::RootDir;
+ then {
+ span_lint_and_sugg(
+ cx,
+ PATH_BUF_PUSH_OVERWRITE,
+ lit.span,
+ "calling `push` with '/' or '\\' (file system root) will overwrite the previous path definition",
+ "try",
+ format!("\"{}\"", pushed_path_lit.trim_start_matches(|c| c == '/' || c == '\\')),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs b/src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs
new file mode 100644
index 000000000..a4d265111
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs
@@ -0,0 +1,194 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_hir::{
+ intravisit, Body, Expr, ExprKind, FnDecl, HirId, Let, LocalSource, Mutability, Pat, PatKind, Stmt, StmtKind,
+};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for patterns that aren't exact representations of the types
+ /// they are applied to.
+ ///
+ /// To satisfy this lint, you will have to adjust either the expression that is matched
+ /// against or the pattern itself, as well as the bindings that are introduced by the
+ /// adjusted patterns. For matching you will have to either dereference the expression
+ /// with the `*` operator, or amend the patterns to explicitly match against `&<pattern>`
+ /// or `&mut <pattern>` depending on the reference mutability. For the bindings you need
+ /// to use the inverse. You can leave them as plain bindings if you wish for the value
+ /// to be copied, but you must use `ref mut <variable>` or `ref <variable>` to construct
+ /// a reference into the matched structure.
+ ///
+ /// If you are looking for a way to learn about ownership semantics in more detail, it
+ /// is recommended to look at IDE options available to you to highlight types, lifetimes
+ /// and reference semantics in your code. The available tooling would expose these things
+ /// in a general way even outside of the various pattern matching mechanics. Of course
+ /// this lint can still be used to highlight areas of interest and ensure a good understanding
+ /// of ownership semantics.
+ ///
+ /// ### Why is this bad?
+ /// It isn't bad in general. But in some contexts it can be desirable
+ /// because it increases ownership hints in the code, and will guard against some changes
+ /// in ownership.
+ ///
+ /// ### Example
+ /// This example shows the basic adjustments necessary to satisfy the lint. Note how
+ /// the matched expression is explicitly dereferenced with `*` and the `inner` variable
+ /// is bound to a shared borrow via `ref inner`.
+ ///
+ /// ```rust,ignore
+ /// // Bad
+ /// let value = &Some(Box::new(23));
+ /// match value {
+ /// Some(inner) => println!("{}", inner),
+ /// None => println!("none"),
+ /// }
+ ///
+ /// // Good
+ /// let value = &Some(Box::new(23));
+ /// match *value {
+ /// Some(ref inner) => println!("{}", inner),
+ /// None => println!("none"),
+ /// }
+ /// ```
+ ///
+ /// The following example demonstrates one of the advantages of the more verbose style.
+ /// Note how the second version uses `ref mut a` to explicitly declare `a` a shared mutable
+ /// borrow, while `b` is simply taken by value. This ensures that the loop body cannot
+ /// accidentally modify the wrong part of the structure.
+ ///
+ /// ```rust,ignore
+ /// // Bad
+ /// let mut values = vec![(2, 3), (3, 4)];
+ /// for (a, b) in &mut values {
+ /// *a += *b;
+ /// }
+ ///
+ /// // Good
+ /// let mut values = vec![(2, 3), (3, 4)];
+ /// for &mut (ref mut a, b) in &mut values {
+ /// *a += b;
+ /// }
+ /// ```
+ #[clippy::version = "1.47.0"]
+ pub PATTERN_TYPE_MISMATCH,
+ restriction,
+ "type of pattern does not match the expression type"
+}
+
+declare_lint_pass!(PatternTypeMismatch => [PATTERN_TYPE_MISMATCH]);
+
+impl<'tcx> LateLintPass<'tcx> for PatternTypeMismatch {
+ fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) {
+ if let StmtKind::Local(local) = stmt.kind {
+ if in_external_macro(cx.sess(), local.pat.span) {
+ return;
+ }
+ let deref_possible = match local.source {
+ LocalSource::Normal => DerefPossible::Possible,
+ _ => DerefPossible::Impossible,
+ };
+ apply_lint(cx, local.pat, deref_possible);
+ }
+ }
+
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if let ExprKind::Match(_, arms, _) = expr.kind {
+ for arm in arms {
+ let pat = &arm.pat;
+ if apply_lint(cx, pat, DerefPossible::Possible) {
+ break;
+ }
+ }
+ }
+ if let ExprKind::Let(Let { pat, .. }) = expr.kind {
+ apply_lint(cx, pat, DerefPossible::Possible);
+ }
+ }
+
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ _: intravisit::FnKind<'tcx>,
+ _: &'tcx FnDecl<'_>,
+ body: &'tcx Body<'_>,
+ _: Span,
+ _: HirId,
+ ) {
+ for param in body.params {
+ apply_lint(cx, param.pat, DerefPossible::Impossible);
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy)]
+enum DerefPossible {
+ Possible,
+ Impossible,
+}
+
+fn apply_lint<'tcx>(cx: &LateContext<'tcx>, pat: &Pat<'_>, deref_possible: DerefPossible) -> bool {
+ let maybe_mismatch = find_first_mismatch(cx, pat);
+ if let Some((span, mutability, level)) = maybe_mismatch {
+ span_lint_and_help(
+ cx,
+ PATTERN_TYPE_MISMATCH,
+ span,
+ "type of pattern does not match the expression type",
+ None,
+ &format!(
+ "{}explicitly match against a `{}` pattern and adjust the enclosed variable bindings",
+ match (deref_possible, level) {
+ (DerefPossible::Possible, Level::Top) => "use `*` to dereference the match expression or ",
+ _ => "",
+ },
+ match mutability {
+ Mutability::Mut => "&mut _",
+ Mutability::Not => "&_",
+ },
+ ),
+ );
+ true
+ } else {
+ false
+ }
+}
+
+#[derive(Debug, Copy, Clone)]
+enum Level {
+ Top,
+ Lower,
+}
+
+fn find_first_mismatch<'tcx>(cx: &LateContext<'tcx>, pat: &Pat<'_>) -> Option<(Span, Mutability, Level)> {
+ let mut result = None;
+ pat.walk(|p| {
+ if result.is_some() {
+ return false;
+ }
+ if in_external_macro(cx.sess(), p.span) {
+ return true;
+ }
+ let adjust_pat = match p.kind {
+ PatKind::Or([p, ..]) => p,
+ _ => p,
+ };
+ if let Some(adjustments) = cx.typeck_results().pat_adjustments().get(adjust_pat.hir_id) {
+ if let [first, ..] = **adjustments {
+ if let ty::Ref(.., mutability) = *first.kind() {
+ let level = if p.hir_id == pat.hir_id {
+ Level::Top
+ } else {
+ Level::Lower
+ };
+ result = Some((p.span, mutability, level));
+ }
+ }
+ }
+ result.is_none()
+ });
+ result
+}
diff --git a/src/tools/clippy/clippy_lints/src/precedence.rs b/src/tools/clippy/clippy_lints/src/precedence.rs
new file mode 100644
index 000000000..cc0533c9f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/precedence.rs
@@ -0,0 +1,161 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use if_chain::if_chain;
+use rustc_ast::ast::{BinOpKind, Expr, ExprKind, LitKind, UnOp};
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Spanned;
+
+const ALLOWED_ODD_FUNCTIONS: [&str; 14] = [
+ "asin",
+ "asinh",
+ "atan",
+ "atanh",
+ "cbrt",
+ "fract",
+ "round",
+ "signum",
+ "sin",
+ "sinh",
+ "tan",
+ "tanh",
+ "to_degrees",
+ "to_radians",
+];
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for operations where precedence may be unclear
+ /// and suggests to add parentheses. Currently it catches the following:
+ /// * mixed usage of arithmetic and bit shifting/combining operators without
+ /// parentheses
+ /// * a "negative" numeric literal (which is really a unary `-` followed by a
+ /// numeric literal)
+ /// followed by a method call
+ ///
+ /// ### Why is this bad?
+ /// Not everyone knows the precedence of those operators by
+ /// heart, so expressions like these may trip others trying to reason about the
+ /// code.
+ ///
+ /// ### Example
+ /// * `1 << 2 + 3` equals 32, while `(1 << 2) + 3` equals 7
+ /// * `-1i32.abs()` equals -1, while `(-1i32).abs()` equals 1
+ #[clippy::version = "pre 1.29.0"]
+ pub PRECEDENCE,
+ complexity,
+ "operations where precedence may be unclear"
+}
+
+declare_lint_pass!(Precedence => [PRECEDENCE]);
+
+impl EarlyLintPass for Precedence {
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
+ if expr.span.from_expansion() {
+ return;
+ }
+
+ if let ExprKind::Binary(Spanned { node: op, .. }, ref left, ref right) = expr.kind {
+ let span_sugg = |expr: &Expr, sugg, appl| {
+ span_lint_and_sugg(
+ cx,
+ PRECEDENCE,
+ expr.span,
+ "operator precedence can trip the unwary",
+ "consider parenthesizing your expression",
+ sugg,
+ appl,
+ );
+ };
+
+ if !is_bit_op(op) {
+ return;
+ }
+ let mut applicability = Applicability::MachineApplicable;
+ match (is_arith_expr(left), is_arith_expr(right)) {
+ (true, true) => {
+ let sugg = format!(
+ "({}) {} ({})",
+ snippet_with_applicability(cx, left.span, "..", &mut applicability),
+ op.to_string(),
+ snippet_with_applicability(cx, right.span, "..", &mut applicability)
+ );
+ span_sugg(expr, sugg, applicability);
+ },
+ (true, false) => {
+ let sugg = format!(
+ "({}) {} {}",
+ snippet_with_applicability(cx, left.span, "..", &mut applicability),
+ op.to_string(),
+ snippet_with_applicability(cx, right.span, "..", &mut applicability)
+ );
+ span_sugg(expr, sugg, applicability);
+ },
+ (false, true) => {
+ let sugg = format!(
+ "{} {} ({})",
+ snippet_with_applicability(cx, left.span, "..", &mut applicability),
+ op.to_string(),
+ snippet_with_applicability(cx, right.span, "..", &mut applicability)
+ );
+ span_sugg(expr, sugg, applicability);
+ },
+ (false, false) => (),
+ }
+ }
+
+ if let ExprKind::Unary(UnOp::Neg, operand) = &expr.kind {
+ let mut arg = operand;
+
+ let mut all_odd = true;
+ while let ExprKind::MethodCall(path_segment, args, _) = &arg.kind {
+ let path_segment_str = path_segment.ident.name.as_str();
+ all_odd &= ALLOWED_ODD_FUNCTIONS
+ .iter()
+ .any(|odd_function| **odd_function == *path_segment_str);
+ arg = args.first().expect("A method always has a receiver.");
+ }
+
+ if_chain! {
+ if !all_odd;
+ if let ExprKind::Lit(lit) = &arg.kind;
+ if let LitKind::Int(..) | LitKind::Float(..) = &lit.kind;
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ PRECEDENCE,
+ expr.span,
+ "unary minus has lower precedence than method call",
+ "consider adding parentheses to clarify your intent",
+ format!(
+ "-({})",
+ snippet_with_applicability(cx, operand.span, "..", &mut applicability)
+ ),
+ applicability,
+ );
+ }
+ }
+ }
+ }
+}
+
+fn is_arith_expr(expr: &Expr) -> bool {
+ match expr.kind {
+ ExprKind::Binary(Spanned { node: op, .. }, _, _) => is_arith_op(op),
+ _ => false,
+ }
+}
+
+#[must_use]
+fn is_bit_op(op: BinOpKind) -> bool {
+ use rustc_ast::ast::BinOpKind::{BitAnd, BitOr, BitXor, Shl, Shr};
+ matches!(op, BitXor | BitAnd | BitOr | Shl | Shr)
+}
+
+#[must_use]
+fn is_arith_op(op: BinOpKind) -> bool {
+ use rustc_ast::ast::BinOpKind::{Add, Div, Mul, Rem, Sub};
+ matches!(op, Add | Sub | Mul | Div | Rem)
+}
diff --git a/src/tools/clippy/clippy_lints/src/ptr.rs b/src/tools/clippy/clippy_lints/src/ptr.rs
new file mode 100644
index 000000000..3c5ea2d94
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/ptr.rs
@@ -0,0 +1,684 @@
+//! Checks for usage of `&Vec[_]` and `&String`.
+
+use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg, span_lint_and_then, span_lint_hir_and_then};
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::expr_sig;
+use clippy_utils::visitors::contains_unsafe_block;
+use clippy_utils::{get_expr_use_or_unification_node, is_lint_allowed, path_def_id, path_to_local, paths};
+use if_chain::if_chain;
+use rustc_errors::{Applicability, MultiSpan};
+use rustc_hir::def_id::DefId;
+use rustc_hir::hir_id::HirIdMap;
+use rustc_hir::intravisit::{walk_expr, Visitor};
+use rustc_hir::{
+ self as hir, AnonConst, BinOpKind, BindingAnnotation, Body, Expr, ExprKind, FnRetTy, FnSig, GenericArg,
+ ImplItemKind, ItemKind, Lifetime, LifetimeName, Mutability, Node, Param, ParamName, PatKind, QPath, TraitFn,
+ TraitItem, TraitItemKind, TyKind, Unsafety,
+};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::hir::nested_filter;
+use rustc_middle::ty::{self, Ty};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Span;
+use rustc_span::sym;
+use rustc_span::symbol::Symbol;
+use std::fmt;
+use std::iter;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// This lint checks for function arguments of type `&String`, `&Vec`,
+ /// `&PathBuf`, and `Cow<_>`. It will also suggest you replace `.clone()` calls
+ /// with the appropriate `.to_owned()`/`to_string()` calls.
+ ///
+ /// ### Why is this bad?
+ /// Requiring the argument to be of the specific size
+ /// makes the function less useful for no benefit; slices in the form of `&[T]`
+ /// or `&str` usually suffice and can be obtained from other types, too.
+ ///
+ /// ### Known problems
+ /// There may be `fn(&Vec)`-typed references pointing to your function.
+ /// If you have them, you will get a compiler error after applying this lint's
+ /// suggestions. You then have the choice to undo your changes or change the
+ /// type of the reference.
+ ///
+ /// Note that if the function is part of your public interface, there may be
+ /// other crates referencing it, of which you may not be aware. Carefully
+ /// deprecate the function before applying the lint suggestions in this case.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// fn foo(&Vec<u32>) { .. }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```ignore
+ /// fn foo(&[u32]) { .. }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub PTR_ARG,
+ style,
+ "fn arguments of the type `&Vec<...>` or `&String`, suggesting to use `&[...]` or `&str` instead, respectively"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// This lint checks for equality comparisons with `ptr::null`
+ ///
+ /// ### Why is this bad?
+ /// It's easier and more readable to use the inherent
+ /// `.is_null()`
+ /// method instead
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// use std::ptr;
+ ///
+ /// if x == ptr::null {
+ /// // ..
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// if x.is_null() {
+ /// // ..
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub CMP_NULL,
+ style,
+ "comparing a pointer to a null pointer, suggesting to use `.is_null()` instead"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// This lint checks for functions that take immutable references and return
+ /// mutable ones. This will not trigger if no unsafe code exists as there
+ /// are multiple safe functions which will do this transformation
+ ///
+ /// To be on the conservative side, if there's at least one mutable
+ /// reference with the output lifetime, this lint will not trigger.
+ ///
+ /// ### Why is this bad?
+ /// Creating a mutable reference which can be repeatably derived from an
+ /// immutable reference is unsound as it allows creating multiple live
+ /// mutable references to the same object.
+ ///
+ /// This [error](https://github.com/rust-lang/rust/issues/39465) actually
+ /// lead to an interim Rust release 1.15.1.
+ ///
+ /// ### Known problems
+ /// This pattern is used by memory allocators to allow allocating multiple
+ /// objects while returning mutable references to each one. So long as
+ /// different mutable references are returned each time such a function may
+ /// be safe.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// fn foo(&Foo) -> &mut Bar { .. }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MUT_FROM_REF,
+ correctness,
+ "fns that create mutable refs from immutable ref args"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// This lint checks for invalid usages of `ptr::null`.
+ ///
+ /// ### Why is this bad?
+ /// This causes undefined behavior.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// // Undefined behavior
+ /// unsafe { std::slice::from_raw_parts(ptr::null(), 0); }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```ignore
+ /// unsafe { std::slice::from_raw_parts(NonNull::dangling().as_ptr(), 0); }
+ /// ```
+ #[clippy::version = "1.53.0"]
+ pub INVALID_NULL_PTR_USAGE,
+ correctness,
+ "invalid usage of a null pointer, suggesting `NonNull::dangling()` instead"
+}
+
+declare_lint_pass!(Ptr => [PTR_ARG, CMP_NULL, MUT_FROM_REF, INVALID_NULL_PTR_USAGE]);
+
+impl<'tcx> LateLintPass<'tcx> for Ptr {
+ fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx TraitItem<'_>) {
+ if let TraitItemKind::Fn(sig, trait_method) = &item.kind {
+ if matches!(trait_method, TraitFn::Provided(_)) {
+ // Handled by check body.
+ return;
+ }
+
+ check_mut_from_ref(cx, sig, None);
+ for arg in check_fn_args(
+ cx,
+ cx.tcx.fn_sig(item.def_id).skip_binder().inputs(),
+ sig.decl.inputs,
+ &[],
+ )
+ .filter(|arg| arg.mutability() == Mutability::Not)
+ {
+ span_lint_hir_and_then(cx, PTR_ARG, arg.emission_id, arg.span, &arg.build_msg(), |diag| {
+ diag.span_suggestion(
+ arg.span,
+ "change this to",
+ format!("{}{}", arg.ref_prefix, arg.deref_ty.display(cx)),
+ Applicability::Unspecified,
+ );
+ });
+ }
+ }
+ }
+
+ fn check_body(&mut self, cx: &LateContext<'tcx>, body: &'tcx Body<'_>) {
+ let hir = cx.tcx.hir();
+ let mut parents = hir.parent_iter(body.value.hir_id);
+ let (item_id, sig, is_trait_item) = match parents.next() {
+ Some((_, Node::Item(i))) => {
+ if let ItemKind::Fn(sig, ..) = &i.kind {
+ (i.def_id, sig, false)
+ } else {
+ return;
+ }
+ },
+ Some((_, Node::ImplItem(i))) => {
+ if !matches!(parents.next(),
+ Some((_, Node::Item(i))) if matches!(&i.kind, ItemKind::Impl(i) if i.of_trait.is_none())
+ ) {
+ return;
+ }
+ if let ImplItemKind::Fn(sig, _) = &i.kind {
+ (i.def_id, sig, false)
+ } else {
+ return;
+ }
+ },
+ Some((_, Node::TraitItem(i))) => {
+ if let TraitItemKind::Fn(sig, _) = &i.kind {
+ (i.def_id, sig, true)
+ } else {
+ return;
+ }
+ },
+ _ => return,
+ };
+
+ check_mut_from_ref(cx, sig, Some(body));
+ let decl = sig.decl;
+ let sig = cx.tcx.fn_sig(item_id).skip_binder();
+ let lint_args: Vec<_> = check_fn_args(cx, sig.inputs(), decl.inputs, body.params)
+ .filter(|arg| !is_trait_item || arg.mutability() == Mutability::Not)
+ .collect();
+ let results = check_ptr_arg_usage(cx, body, &lint_args);
+
+ for (result, args) in results.iter().zip(lint_args.iter()).filter(|(r, _)| !r.skip) {
+ span_lint_hir_and_then(cx, PTR_ARG, args.emission_id, args.span, &args.build_msg(), |diag| {
+ diag.multipart_suggestion(
+ "change this to",
+ iter::once((args.span, format!("{}{}", args.ref_prefix, args.deref_ty.display(cx))))
+ .chain(result.replacements.iter().map(|r| {
+ (
+ r.expr_span,
+ format!("{}{}", snippet_opt(cx, r.self_span).unwrap(), r.replacement),
+ )
+ }))
+ .collect(),
+ Applicability::Unspecified,
+ );
+ });
+ }
+ }
+
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if let ExprKind::Binary(ref op, l, r) = expr.kind {
+ if (op.node == BinOpKind::Eq || op.node == BinOpKind::Ne) && (is_null_path(cx, l) || is_null_path(cx, r)) {
+ span_lint(
+ cx,
+ CMP_NULL,
+ expr.span,
+ "comparing with null is better expressed by the `.is_null()` method",
+ );
+ }
+ } else {
+ check_invalid_ptr_usage(cx, expr);
+ }
+ }
+}
+
+fn check_invalid_ptr_usage<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ // (fn_path, arg_indices) - `arg_indices` are the `arg` positions where null would cause U.B.
+ const INVALID_NULL_PTR_USAGE_TABLE: [(&[&str], &[usize]); 16] = [
+ (&paths::SLICE_FROM_RAW_PARTS, &[0]),
+ (&paths::SLICE_FROM_RAW_PARTS_MUT, &[0]),
+ (&paths::PTR_COPY, &[0, 1]),
+ (&paths::PTR_COPY_NONOVERLAPPING, &[0, 1]),
+ (&paths::PTR_READ, &[0]),
+ (&paths::PTR_READ_UNALIGNED, &[0]),
+ (&paths::PTR_READ_VOLATILE, &[0]),
+ (&paths::PTR_REPLACE, &[0]),
+ (&paths::PTR_SLICE_FROM_RAW_PARTS, &[0]),
+ (&paths::PTR_SLICE_FROM_RAW_PARTS_MUT, &[0]),
+ (&paths::PTR_SWAP, &[0, 1]),
+ (&paths::PTR_SWAP_NONOVERLAPPING, &[0, 1]),
+ (&paths::PTR_WRITE, &[0]),
+ (&paths::PTR_WRITE_UNALIGNED, &[0]),
+ (&paths::PTR_WRITE_VOLATILE, &[0]),
+ (&paths::PTR_WRITE_BYTES, &[0]),
+ ];
+
+ if_chain! {
+ if let ExprKind::Call(fun, args) = expr.kind;
+ if let ExprKind::Path(ref qpath) = fun.kind;
+ if let Some(fun_def_id) = cx.qpath_res(qpath, fun.hir_id).opt_def_id();
+ let fun_def_path = cx.get_def_path(fun_def_id).into_iter().map(Symbol::to_ident_string).collect::<Vec<_>>();
+ if let Some(&(_, arg_indices)) = INVALID_NULL_PTR_USAGE_TABLE
+ .iter()
+ .find(|&&(fn_path, _)| fn_path == fun_def_path);
+ then {
+ for &arg_idx in arg_indices {
+ if let Some(arg) = args.get(arg_idx).filter(|arg| is_null_path(cx, arg)) {
+ span_lint_and_sugg(
+ cx,
+ INVALID_NULL_PTR_USAGE,
+ arg.span,
+ "pointer must be non-null",
+ "change this to",
+ "core::ptr::NonNull::dangling().as_ptr()".to_string(),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+ }
+}
+
+#[derive(Default)]
+struct PtrArgResult {
+ skip: bool,
+ replacements: Vec<PtrArgReplacement>,
+}
+
+struct PtrArgReplacement {
+ expr_span: Span,
+ self_span: Span,
+ replacement: &'static str,
+}
+
+struct PtrArg<'tcx> {
+ idx: usize,
+ emission_id: hir::HirId,
+ span: Span,
+ ty_did: DefId,
+ ty_name: Symbol,
+ method_renames: &'static [(&'static str, &'static str)],
+ ref_prefix: RefPrefix,
+ deref_ty: DerefTy<'tcx>,
+}
+impl PtrArg<'_> {
+ fn build_msg(&self) -> String {
+ format!(
+ "writing `&{}{}` instead of `&{}{}` involves a new object where a slice will do",
+ self.ref_prefix.mutability.prefix_str(),
+ self.ty_name,
+ self.ref_prefix.mutability.prefix_str(),
+ self.deref_ty.argless_str(),
+ )
+ }
+
+ fn mutability(&self) -> Mutability {
+ self.ref_prefix.mutability
+ }
+}
+
+struct RefPrefix {
+ lt: LifetimeName,
+ mutability: Mutability,
+}
+impl fmt::Display for RefPrefix {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ use fmt::Write;
+ f.write_char('&')?;
+ match self.lt {
+ LifetimeName::Param(_, ParamName::Plain(name)) => {
+ name.fmt(f)?;
+ f.write_char(' ')?;
+ },
+ LifetimeName::Infer => f.write_str("'_ ")?,
+ LifetimeName::Static => f.write_str("'static ")?,
+ _ => (),
+ }
+ f.write_str(self.mutability.prefix_str())
+ }
+}
+
+struct DerefTyDisplay<'a, 'tcx>(&'a LateContext<'tcx>, &'a DerefTy<'tcx>);
+impl fmt::Display for DerefTyDisplay<'_, '_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ use std::fmt::Write;
+ match self.1 {
+ DerefTy::Str => f.write_str("str"),
+ DerefTy::Path => f.write_str("Path"),
+ DerefTy::Slice(hir_ty, ty) => {
+ f.write_char('[')?;
+ match hir_ty.and_then(|s| snippet_opt(self.0, s)) {
+ Some(s) => f.write_str(&s)?,
+ None => ty.fmt(f)?,
+ }
+ f.write_char(']')
+ },
+ }
+ }
+}
+
+enum DerefTy<'tcx> {
+ Str,
+ Path,
+ Slice(Option<Span>, Ty<'tcx>),
+}
+impl<'tcx> DerefTy<'tcx> {
+ fn argless_str(&self) -> &'static str {
+ match *self {
+ Self::Str => "str",
+ Self::Path => "Path",
+ Self::Slice(..) => "[_]",
+ }
+ }
+
+ fn display<'a>(&'a self, cx: &'a LateContext<'tcx>) -> DerefTyDisplay<'a, 'tcx> {
+ DerefTyDisplay(cx, self)
+ }
+}
+
+fn check_fn_args<'cx, 'tcx: 'cx>(
+ cx: &'cx LateContext<'tcx>,
+ tys: &'tcx [Ty<'tcx>],
+ hir_tys: &'tcx [hir::Ty<'tcx>],
+ params: &'tcx [Param<'tcx>],
+) -> impl Iterator<Item = PtrArg<'tcx>> + 'cx {
+ tys.iter()
+ .zip(hir_tys.iter())
+ .enumerate()
+ .filter_map(|(i, (ty, hir_ty))| {
+ if_chain! {
+ if let ty::Ref(_, ty, mutability) = *ty.kind();
+ if let ty::Adt(adt, substs) = *ty.kind();
+
+ if let TyKind::Rptr(lt, ref ty) = hir_ty.kind;
+ if let TyKind::Path(QPath::Resolved(None, path)) = ty.ty.kind;
+
+ // Check that the name as typed matches the actual name of the type.
+ // e.g. `fn foo(_: &Foo)` shouldn't trigger the lint when `Foo` is an alias for `Vec`
+ if let [.., name] = path.segments;
+ if cx.tcx.item_name(adt.did()) == name.ident.name;
+
+ then {
+ let emission_id = params.get(i).map_or(hir_ty.hir_id, |param| param.hir_id);
+ let (method_renames, deref_ty) = match cx.tcx.get_diagnostic_name(adt.did()) {
+ Some(sym::Vec) => (
+ [("clone", ".to_owned()")].as_slice(),
+ DerefTy::Slice(
+ name.args
+ .and_then(|args| args.args.first())
+ .and_then(|arg| if let GenericArg::Type(ty) = arg {
+ Some(ty.span)
+ } else {
+ None
+ }),
+ substs.type_at(0),
+ ),
+ ),
+ Some(sym::String) => (
+ [("clone", ".to_owned()"), ("as_str", "")].as_slice(),
+ DerefTy::Str,
+ ),
+ Some(sym::PathBuf) => (
+ [("clone", ".to_path_buf()"), ("as_path", "")].as_slice(),
+ DerefTy::Path,
+ ),
+ Some(sym::Cow) if mutability == Mutability::Not => {
+ let ty_name = name.args
+ .and_then(|args| {
+ args.args.iter().find_map(|a| match a {
+ GenericArg::Type(x) => Some(x),
+ _ => None,
+ })
+ })
+ .and_then(|arg| snippet_opt(cx, arg.span))
+ .unwrap_or_else(|| substs.type_at(1).to_string());
+ span_lint_hir_and_then(
+ cx,
+ PTR_ARG,
+ emission_id,
+ hir_ty.span,
+ "using a reference to `Cow` is not recommended",
+ |diag| {
+ diag.span_suggestion(
+ hir_ty.span,
+ "change this to",
+ format!("&{}{}", mutability.prefix_str(), ty_name),
+ Applicability::Unspecified,
+ );
+ }
+ );
+ return None;
+ },
+ _ => return None,
+ };
+ return Some(PtrArg {
+ idx: i,
+ emission_id,
+ span: hir_ty.span,
+ ty_did: adt.did(),
+ ty_name: name.ident.name,
+ method_renames,
+ ref_prefix: RefPrefix {
+ lt: lt.name,
+ mutability,
+ },
+ deref_ty,
+ });
+ }
+ }
+ None
+ })
+}
+
+fn check_mut_from_ref<'tcx>(cx: &LateContext<'tcx>, sig: &FnSig<'_>, body: Option<&'tcx Body<'_>>) {
+ if let FnRetTy::Return(ty) = sig.decl.output
+ && let Some((out, Mutability::Mut, _)) = get_rptr_lm(ty)
+ {
+ let out_region = cx.tcx.named_region(out.hir_id);
+ let args: Option<Vec<_>> = sig
+ .decl
+ .inputs
+ .iter()
+ .filter_map(get_rptr_lm)
+ .filter(|&(lt, _, _)| cx.tcx.named_region(lt.hir_id) == out_region)
+ .map(|(_, mutability, span)| (mutability == Mutability::Not).then_some(span))
+ .collect();
+ if let Some(args) = args
+ && !args.is_empty()
+ && body.map_or(true, |body| {
+ sig.header.unsafety == Unsafety::Unsafe || contains_unsafe_block(cx, &body.value)
+ })
+ {
+ span_lint_and_then(
+ cx,
+ MUT_FROM_REF,
+ ty.span,
+ "mutable borrow from immutable input(s)",
+ |diag| {
+ let ms = MultiSpan::from_spans(args);
+ diag.span_note(ms, "immutable borrow here");
+ },
+ );
+ }
+ }
+}
+
+#[expect(clippy::too_many_lines)]
+fn check_ptr_arg_usage<'tcx>(cx: &LateContext<'tcx>, body: &'tcx Body<'_>, args: &[PtrArg<'tcx>]) -> Vec<PtrArgResult> {
+ struct V<'cx, 'tcx> {
+ cx: &'cx LateContext<'tcx>,
+ /// Map from a local id to which argument it came from (index into `Self::args` and
+ /// `Self::results`)
+ bindings: HirIdMap<usize>,
+ /// The arguments being checked.
+ args: &'cx [PtrArg<'tcx>],
+ /// The results for each argument (len should match args.len)
+ results: Vec<PtrArgResult>,
+ /// The number of arguments which can't be linted. Used to return early.
+ skip_count: usize,
+ }
+ impl<'tcx> Visitor<'tcx> for V<'_, 'tcx> {
+ type NestedFilter = nested_filter::OnlyBodies;
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+
+ fn visit_anon_const(&mut self, _: &'tcx AnonConst) {}
+
+ fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
+ if self.skip_count == self.args.len() {
+ return;
+ }
+
+ // Check if this is local we care about
+ let args_idx = match path_to_local(e).and_then(|id| self.bindings.get(&id)) {
+ Some(&i) => i,
+ None => return walk_expr(self, e),
+ };
+ let args = &self.args[args_idx];
+ let result = &mut self.results[args_idx];
+
+ // Helper function to handle early returns.
+ let mut set_skip_flag = || {
+ if !result.skip {
+ self.skip_count += 1;
+ }
+ result.skip = true;
+ };
+
+ match get_expr_use_or_unification_node(self.cx.tcx, e) {
+ Some((Node::Stmt(_), _)) => (),
+ Some((Node::Local(l), _)) => {
+ // Only trace simple bindings. e.g `let x = y;`
+ if let PatKind::Binding(BindingAnnotation::Unannotated, id, _, None) = l.pat.kind {
+ self.bindings.insert(id, args_idx);
+ } else {
+ set_skip_flag();
+ }
+ },
+ Some((Node::Expr(e), child_id)) => match e.kind {
+ ExprKind::Call(f, expr_args) => {
+ let i = expr_args.iter().position(|arg| arg.hir_id == child_id).unwrap_or(0);
+ if expr_sig(self.cx, f).and_then(|sig| sig.input(i)).map_or(true, |ty| {
+ match *ty.skip_binder().peel_refs().kind() {
+ ty::Param(_) => true,
+ ty::Adt(def, _) => def.did() == args.ty_did,
+ _ => false,
+ }
+ }) {
+ // Passed to a function taking the non-dereferenced type.
+ set_skip_flag();
+ }
+ },
+ ExprKind::MethodCall(name, expr_args @ [self_arg, ..], _) => {
+ let i = expr_args.iter().position(|arg| arg.hir_id == child_id).unwrap_or(0);
+ if i == 0 {
+ // Check if the method can be renamed.
+ let name = name.ident.as_str();
+ if let Some((_, replacement)) = args.method_renames.iter().find(|&&(x, _)| x == name) {
+ result.replacements.push(PtrArgReplacement {
+ expr_span: e.span,
+ self_span: self_arg.span,
+ replacement,
+ });
+ return;
+ }
+ }
+
+ let id = if let Some(x) = self.cx.typeck_results().type_dependent_def_id(e.hir_id) {
+ x
+ } else {
+ set_skip_flag();
+ return;
+ };
+
+ match *self.cx.tcx.fn_sig(id).skip_binder().inputs()[i].peel_refs().kind() {
+ ty::Param(_) => {
+ set_skip_flag();
+ },
+ // If the types match check for methods which exist on both types. e.g. `Vec::len` and
+ // `slice::len`
+ ty::Adt(def, _) if def.did() == args.ty_did => {
+ set_skip_flag();
+ },
+ _ => (),
+ }
+ },
+ // Indexing is fine for currently supported types.
+ ExprKind::Index(e, _) if e.hir_id == child_id => (),
+ _ => set_skip_flag(),
+ },
+ _ => set_skip_flag(),
+ }
+ }
+ }
+
+ let mut skip_count = 0;
+ let mut results = args.iter().map(|_| PtrArgResult::default()).collect::<Vec<_>>();
+ let mut v = V {
+ cx,
+ bindings: args
+ .iter()
+ .enumerate()
+ .filter_map(|(i, arg)| {
+ let param = &body.params[arg.idx];
+ match param.pat.kind {
+ PatKind::Binding(BindingAnnotation::Unannotated, id, _, None)
+ if !is_lint_allowed(cx, PTR_ARG, param.hir_id) =>
+ {
+ Some((id, i))
+ },
+ _ => {
+ skip_count += 1;
+ results[i].skip = true;
+ None
+ },
+ }
+ })
+ .collect(),
+ args,
+ results,
+ skip_count,
+ };
+ v.visit_expr(&body.value);
+ v.results
+}
+
+fn get_rptr_lm<'tcx>(ty: &'tcx hir::Ty<'tcx>) -> Option<(&'tcx Lifetime, Mutability, Span)> {
+ if let TyKind::Rptr(ref lt, ref m) = ty.kind {
+ Some((lt, m.mutbl, ty.span))
+ } else {
+ None
+ }
+}
+
+fn is_null_path(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ if let ExprKind::Call(pathexp, []) = expr.kind {
+ path_def_id(cx, pathexp).map_or(false, |id| {
+ matches!(cx.tcx.get_diagnostic_name(id), Some(sym::ptr_null | sym::ptr_null_mut))
+ })
+ } else {
+ false
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs b/src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs
new file mode 100644
index 000000000..b907f38af
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs
@@ -0,0 +1,153 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg};
+use clippy_utils::source::snippet_opt;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+use std::fmt;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of the `offset` pointer method with a `usize` casted to an
+ /// `isize`.
+ ///
+ /// ### Why is this bad?
+ /// If we’re always increasing the pointer address, we can avoid the numeric
+ /// cast by using the `add` method instead.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let vec = vec![b'a', b'b', b'c'];
+ /// let ptr = vec.as_ptr();
+ /// let offset = 1_usize;
+ ///
+ /// unsafe {
+ /// ptr.offset(offset as isize);
+ /// }
+ /// ```
+ ///
+ /// Could be written:
+ ///
+ /// ```rust
+ /// let vec = vec![b'a', b'b', b'c'];
+ /// let ptr = vec.as_ptr();
+ /// let offset = 1_usize;
+ ///
+ /// unsafe {
+ /// ptr.add(offset);
+ /// }
+ /// ```
+ #[clippy::version = "1.30.0"]
+ pub PTR_OFFSET_WITH_CAST,
+ complexity,
+ "unneeded pointer offset cast"
+}
+
+declare_lint_pass!(PtrOffsetWithCast => [PTR_OFFSET_WITH_CAST]);
+
+impl<'tcx> LateLintPass<'tcx> for PtrOffsetWithCast {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ // Check if the expressions is a ptr.offset or ptr.wrapping_offset method call
+ let (receiver_expr, arg_expr, method) = match expr_as_ptr_offset_call(cx, expr) {
+ Some(call_arg) => call_arg,
+ None => return,
+ };
+
+ // Check if the argument to the method call is a cast from usize
+ let cast_lhs_expr = match expr_as_cast_from_usize(cx, arg_expr) {
+ Some(cast_lhs_expr) => cast_lhs_expr,
+ None => return,
+ };
+
+ let msg = format!("use of `{}` with a `usize` casted to an `isize`", method);
+ if let Some(sugg) = build_suggestion(cx, method, receiver_expr, cast_lhs_expr) {
+ span_lint_and_sugg(
+ cx,
+ PTR_OFFSET_WITH_CAST,
+ expr.span,
+ &msg,
+ "try",
+ sugg,
+ Applicability::MachineApplicable,
+ );
+ } else {
+ span_lint(cx, PTR_OFFSET_WITH_CAST, expr.span, &msg);
+ }
+ }
+}
+
+// If the given expression is a cast from a usize, return the lhs of the cast
+fn expr_as_cast_from_usize<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> {
+ if let ExprKind::Cast(cast_lhs_expr, _) = expr.kind {
+ if is_expr_ty_usize(cx, cast_lhs_expr) {
+ return Some(cast_lhs_expr);
+ }
+ }
+ None
+}
+
+// If the given expression is a ptr::offset or ptr::wrapping_offset method call, return the
+// receiver, the arg of the method call, and the method.
+fn expr_as_ptr_offset_call<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+) -> Option<(&'tcx Expr<'tcx>, &'tcx Expr<'tcx>, Method)> {
+ if let ExprKind::MethodCall(path_segment, [arg_0, arg_1, ..], _) = &expr.kind {
+ if is_expr_ty_raw_ptr(cx, arg_0) {
+ if path_segment.ident.name == sym::offset {
+ return Some((arg_0, arg_1, Method::Offset));
+ }
+ if path_segment.ident.name == sym!(wrapping_offset) {
+ return Some((arg_0, arg_1, Method::WrappingOffset));
+ }
+ }
+ }
+ None
+}
+
+// Is the type of the expression a usize?
+fn is_expr_ty_usize<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>) -> bool {
+ cx.typeck_results().expr_ty(expr) == cx.tcx.types.usize
+}
+
+// Is the type of the expression a raw pointer?
+fn is_expr_ty_raw_ptr<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>) -> bool {
+ cx.typeck_results().expr_ty(expr).is_unsafe_ptr()
+}
+
+fn build_suggestion<'tcx>(
+ cx: &LateContext<'tcx>,
+ method: Method,
+ receiver_expr: &Expr<'_>,
+ cast_lhs_expr: &Expr<'_>,
+) -> Option<String> {
+ let receiver = snippet_opt(cx, receiver_expr.span)?;
+ let cast_lhs = snippet_opt(cx, cast_lhs_expr.span)?;
+ Some(format!("{}.{}({})", receiver, method.suggestion(), cast_lhs))
+}
+
+#[derive(Copy, Clone)]
+enum Method {
+ Offset,
+ WrappingOffset,
+}
+
+impl Method {
+ #[must_use]
+ fn suggestion(self) -> &'static str {
+ match self {
+ Self::Offset => "add",
+ Self::WrappingOffset => "wrapping_add",
+ }
+ }
+}
+
+impl fmt::Display for Method {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Self::Offset => write!(f, "offset"),
+ Self::WrappingOffset => write!(f, "wrapping_offset"),
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/pub_use.rs b/src/tools/clippy/clippy_lints/src/pub_use.rs
new file mode 100644
index 000000000..9d2b0cedb
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/pub_use.rs
@@ -0,0 +1,56 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_ast::ast::{Item, ItemKind, VisibilityKind};
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ ///
+ /// Restricts the usage of `pub use ...`
+ ///
+ /// ### Why is this bad?
+ ///
+ /// `pub use` is usually fine, but a project may wish to limit `pub use` instances to prevent
+ /// unintentional exports or to encourage placing exported items directly in public modules
+ ///
+ /// ### Example
+ /// ```rust
+ /// pub mod outer {
+ /// mod inner {
+ /// pub struct Test {}
+ /// }
+ /// pub use inner::Test;
+ /// }
+ ///
+ /// use outer::Test;
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// pub mod outer {
+ /// pub struct Test {}
+ /// }
+ ///
+ /// use outer::Test;
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub PUB_USE,
+ restriction,
+ "restricts the usage of `pub use`"
+}
+declare_lint_pass!(PubUse => [PUB_USE]);
+
+impl EarlyLintPass for PubUse {
+ fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) {
+ if let ItemKind::Use(_) = item.kind &&
+ let VisibilityKind::Public = item.vis.kind {
+ span_lint_and_help(
+ cx,
+ PUB_USE,
+ item.span,
+ "using `pub use`",
+ None,
+ "move the exported item to a public module instead",
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/question_mark.rs b/src/tools/clippy/clippy_lints/src/question_mark.rs
new file mode 100644
index 000000000..fd0a53839
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/question_mark.rs
@@ -0,0 +1,231 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::higher;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{
+ eq_expr_value, get_parent_node, is_else_clause, is_lang_ctor, path_to_local, path_to_local_id, peel_blocks,
+ peel_blocks_with_stmt,
+};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::LangItem::{OptionNone, OptionSome, ResultErr, ResultOk};
+use rustc_hir::{BindingAnnotation, Expr, ExprKind, Node, PatKind, PathSegment, QPath};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::Ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{sym, symbol::Symbol};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for expressions that could be replaced by the question mark operator.
+ ///
+ /// ### Why is this bad?
+ /// Question mark usage is more idiomatic.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// if option.is_none() {
+ /// return None;
+ /// }
+ /// ```
+ ///
+ /// Could be written:
+ ///
+ /// ```ignore
+ /// option?;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub QUESTION_MARK,
+ style,
+ "checks for expressions that could be replaced by the question mark operator"
+}
+
+declare_lint_pass!(QuestionMark => [QUESTION_MARK]);
+
+enum IfBlockType<'hir> {
+ /// An `if x.is_xxx() { a } else { b } ` expression.
+ ///
+ /// Contains: caller (x), caller_type, call_sym (is_xxx), if_then (a), if_else (b)
+ IfIs(
+ &'hir Expr<'hir>,
+ Ty<'hir>,
+ Symbol,
+ &'hir Expr<'hir>,
+ Option<&'hir Expr<'hir>>,
+ ),
+ /// An `if let Xxx(a) = b { c } else { d }` expression.
+ ///
+ /// Contains: let_pat_qpath (Xxx), let_pat_type, let_pat_sym (a), let_expr (b), if_then (c),
+ /// if_else (d)
+ IfLet(
+ &'hir QPath<'hir>,
+ Ty<'hir>,
+ Symbol,
+ &'hir Expr<'hir>,
+ &'hir Expr<'hir>,
+ Option<&'hir Expr<'hir>>,
+ ),
+}
+
+/// Checks if the given expression on the given context matches the following structure:
+///
+/// ```ignore
+/// if option.is_none() {
+/// return None;
+/// }
+/// ```
+///
+/// ```ignore
+/// if result.is_err() {
+/// return result;
+/// }
+/// ```
+///
+/// If it matches, it will suggest to use the question mark operator instead
+fn check_is_none_or_err_and_early_return<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>) {
+ if_chain! {
+ if let Some(higher::If { cond, then, r#else }) = higher::If::hir(expr);
+ if !is_else_clause(cx.tcx, expr);
+ if let ExprKind::MethodCall(segment, args, _) = &cond.kind;
+ if let Some(caller) = args.get(0);
+ let caller_ty = cx.typeck_results().expr_ty(caller);
+ let if_block = IfBlockType::IfIs(caller, caller_ty, segment.ident.name, then, r#else);
+ if is_early_return(sym::Option, cx, &if_block) || is_early_return(sym::Result, cx, &if_block);
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ let receiver_str = snippet_with_applicability(cx, caller.span, "..", &mut applicability);
+ let by_ref = !caller_ty.is_copy_modulo_regions(cx.tcx.at(caller.span), cx.param_env) &&
+ !matches!(caller.kind, ExprKind::Call(..) | ExprKind::MethodCall(..));
+ let sugg = if let Some(else_inner) = r#else {
+ if eq_expr_value(cx, caller, peel_blocks(else_inner)) {
+ format!("Some({}?)", receiver_str)
+ } else {
+ return;
+ }
+ } else {
+ format!("{}{}?;", receiver_str, if by_ref { ".as_ref()" } else { "" })
+ };
+
+ span_lint_and_sugg(
+ cx,
+ QUESTION_MARK,
+ expr.span,
+ "this block may be rewritten with the `?` operator",
+ "replace it with",
+ sugg,
+ applicability,
+ );
+ }
+ }
+}
+
+fn check_if_let_some_or_err_and_early_return<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>) {
+ if_chain! {
+ if let Some(higher::IfLet { let_pat, let_expr, if_then, if_else }) = higher::IfLet::hir(cx, expr);
+ if !is_else_clause(cx.tcx, expr);
+ if let PatKind::TupleStruct(ref path1, [field], None) = let_pat.kind;
+ if let PatKind::Binding(annot, bind_id, ident, _) = field.kind;
+ let caller_ty = cx.typeck_results().expr_ty(let_expr);
+ let if_block = IfBlockType::IfLet(path1, caller_ty, ident.name, let_expr, if_then, if_else);
+ if (is_early_return(sym::Option, cx, &if_block) && path_to_local_id(peel_blocks(if_then), bind_id))
+ || is_early_return(sym::Result, cx, &if_block);
+ if if_else.map(|e| eq_expr_value(cx, let_expr, peel_blocks(e))).filter(|e| *e).is_none();
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ let receiver_str = snippet_with_applicability(cx, let_expr.span, "..", &mut applicability);
+ let by_ref = matches!(annot, BindingAnnotation::Ref | BindingAnnotation::RefMut);
+ let requires_semi = matches!(get_parent_node(cx.tcx, expr.hir_id), Some(Node::Stmt(_)));
+ let sugg = format!(
+ "{}{}?{}",
+ receiver_str,
+ if by_ref { ".as_ref()" } else { "" },
+ if requires_semi { ";" } else { "" }
+ );
+ span_lint_and_sugg(
+ cx,
+ QUESTION_MARK,
+ expr.span,
+ "this block may be rewritten with the `?` operator",
+ "replace it with",
+ sugg,
+ applicability,
+ );
+ }
+ }
+}
+
+fn is_early_return(smbl: Symbol, cx: &LateContext<'_>, if_block: &IfBlockType<'_>) -> bool {
+ match *if_block {
+ IfBlockType::IfIs(caller, caller_ty, call_sym, if_then, _) => {
+ // If the block could be identified as `if x.is_none()/is_err()`,
+ // we then only need to check the if_then return to see if it is none/err.
+ is_type_diagnostic_item(cx, caller_ty, smbl)
+ && expr_return_none_or_err(smbl, cx, if_then, caller, None)
+ && match smbl {
+ sym::Option => call_sym == sym!(is_none),
+ sym::Result => call_sym == sym!(is_err),
+ _ => false,
+ }
+ },
+ IfBlockType::IfLet(qpath, let_expr_ty, let_pat_sym, let_expr, if_then, if_else) => {
+ is_type_diagnostic_item(cx, let_expr_ty, smbl)
+ && match smbl {
+ sym::Option => {
+ // We only need to check `if let Some(x) = option` not `if let None = option`,
+ // because the later one will be suggested as `if option.is_none()` thus causing conflict.
+ is_lang_ctor(cx, qpath, OptionSome)
+ && if_else.is_some()
+ && expr_return_none_or_err(smbl, cx, if_else.unwrap(), let_expr, None)
+ },
+ sym::Result => {
+ (is_lang_ctor(cx, qpath, ResultOk)
+ && if_else.is_some()
+ && expr_return_none_or_err(smbl, cx, if_else.unwrap(), let_expr, Some(let_pat_sym)))
+ || is_lang_ctor(cx, qpath, ResultErr)
+ && expr_return_none_or_err(smbl, cx, if_then, let_expr, Some(let_pat_sym))
+ },
+ _ => false,
+ }
+ },
+ }
+}
+
+fn expr_return_none_or_err(
+ smbl: Symbol,
+ cx: &LateContext<'_>,
+ expr: &Expr<'_>,
+ cond_expr: &Expr<'_>,
+ err_sym: Option<Symbol>,
+) -> bool {
+ match peel_blocks_with_stmt(expr).kind {
+ ExprKind::Ret(Some(ret_expr)) => expr_return_none_or_err(smbl, cx, ret_expr, cond_expr, err_sym),
+ ExprKind::Path(ref qpath) => match smbl {
+ sym::Option => is_lang_ctor(cx, qpath, OptionNone),
+ sym::Result => path_to_local(expr).is_some() && path_to_local(expr) == path_to_local(cond_expr),
+ _ => false,
+ },
+ ExprKind::Call(call_expr, args_expr) => {
+ if_chain! {
+ if smbl == sym::Result;
+ if let ExprKind::Path(QPath::Resolved(_, path)) = &call_expr.kind;
+ if let Some(segment) = path.segments.first();
+ if let Some(err_sym) = err_sym;
+ if let Some(arg) = args_expr.first();
+ if let ExprKind::Path(QPath::Resolved(_, arg_path)) = &arg.kind;
+ if let Some(PathSegment { ident, .. }) = arg_path.segments.first();
+ then {
+ return segment.ident.name == sym::Err && err_sym == ident.name;
+ }
+ }
+ false
+ },
+ _ => false,
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for QuestionMark {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ check_is_none_or_err_and_early_return(cx, expr);
+ check_if_let_some_or_err_and_early_return(cx, expr);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/ranges.rs b/src/tools/clippy/clippy_lints/src/ranges.rs
new file mode 100644
index 000000000..547d4da81
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/ranges.rs
@@ -0,0 +1,598 @@
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg, span_lint_and_then};
+use clippy_utils::source::{snippet, snippet_opt, snippet_with_applicability};
+use clippy_utils::sugg::Sugg;
+use clippy_utils::{get_parent_expr, in_constant, is_integer_const, meets_msrv, msrvs, path_to_local};
+use clippy_utils::{higher, SpanlessEq};
+use if_chain::if_chain;
+use rustc_ast::ast::RangeLimits;
+use rustc_errors::Applicability;
+use rustc_hir::{BinOpKind, Expr, ExprKind, HirId, PathSegment, QPath};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::source_map::{Span, Spanned};
+use rustc_span::sym;
+use std::cmp::Ordering;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for zipping a collection with the range of
+ /// `0.._.len()`.
+ ///
+ /// ### Why is this bad?
+ /// The code is better expressed with `.enumerate()`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let x = vec![1];
+ /// let _ = x.iter().zip(0..x.len());
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let x = vec![1];
+ /// let _ = x.iter().enumerate();
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub RANGE_ZIP_WITH_LEN,
+ complexity,
+ "zipping iterator with a range when `enumerate()` would do"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for exclusive ranges where 1 is added to the
+ /// upper bound, e.g., `x..(y+1)`.
+ ///
+ /// ### Why is this bad?
+ /// The code is more readable with an inclusive range
+ /// like `x..=y`.
+ ///
+ /// ### Known problems
+ /// Will add unnecessary pair of parentheses when the
+ /// expression is not wrapped in a pair but starts with an opening parenthesis
+ /// and ends with a closing one.
+ /// I.e., `let _ = (f()+1)..(f()+1)` results in `let _ = ((f()+1)..=f())`.
+ ///
+ /// Also in many cases, inclusive ranges are still slower to run than
+ /// exclusive ranges, because they essentially add an extra branch that
+ /// LLVM may fail to hoist out of the loop.
+ ///
+ /// This will cause a warning that cannot be fixed if the consumer of the
+ /// range only accepts a specific range type, instead of the generic
+ /// `RangeBounds` trait
+ /// ([#3307](https://github.com/rust-lang/rust-clippy/issues/3307)).
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let x = 0;
+ /// # let y = 1;
+ /// for i in x..(y+1) {
+ /// // ..
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let x = 0;
+ /// # let y = 1;
+ /// for i in x..=y {
+ /// // ..
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub RANGE_PLUS_ONE,
+ pedantic,
+ "`x..(y+1)` reads better as `x..=y`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for inclusive ranges where 1 is subtracted from
+ /// the upper bound, e.g., `x..=(y-1)`.
+ ///
+ /// ### Why is this bad?
+ /// The code is more readable with an exclusive range
+ /// like `x..y`.
+ ///
+ /// ### Known problems
+ /// This will cause a warning that cannot be fixed if
+ /// the consumer of the range only accepts a specific range type, instead of
+ /// the generic `RangeBounds` trait
+ /// ([#3307](https://github.com/rust-lang/rust-clippy/issues/3307)).
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let x = 0;
+ /// # let y = 1;
+ /// for i in x..=(y-1) {
+ /// // ..
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let x = 0;
+ /// # let y = 1;
+ /// for i in x..y {
+ /// // ..
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub RANGE_MINUS_ONE,
+ pedantic,
+ "`x..=(y-1)` reads better as `x..y`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for range expressions `x..y` where both `x` and `y`
+ /// are constant and `x` is greater or equal to `y`.
+ ///
+ /// ### Why is this bad?
+ /// Empty ranges yield no values so iterating them is a no-op.
+ /// Moreover, trying to use a reversed range to index a slice will panic at run-time.
+ ///
+ /// ### Example
+ /// ```rust,no_run
+ /// fn main() {
+ /// (10..=0).for_each(|x| println!("{}", x));
+ ///
+ /// let arr = [1, 2, 3, 4, 5];
+ /// let sub = &arr[3..1];
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// fn main() {
+ /// (0..=10).rev().for_each(|x| println!("{}", x));
+ ///
+ /// let arr = [1, 2, 3, 4, 5];
+ /// let sub = &arr[1..3];
+ /// }
+ /// ```
+ #[clippy::version = "1.45.0"]
+ pub REVERSED_EMPTY_RANGES,
+ correctness,
+ "reversing the limits of range expressions, resulting in empty ranges"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for expressions like `x >= 3 && x < 8` that could
+ /// be more readably expressed as `(3..8).contains(x)`.
+ ///
+ /// ### Why is this bad?
+ /// `contains` expresses the intent better and has less
+ /// failure modes (such as fencepost errors or using `||` instead of `&&`).
+ ///
+ /// ### Example
+ /// ```rust
+ /// // given
+ /// let x = 6;
+ ///
+ /// assert!(x >= 3 && x < 8);
+ /// ```
+ /// Use instead:
+ /// ```rust
+ ///# let x = 6;
+ /// assert!((3..8).contains(&x));
+ /// ```
+ #[clippy::version = "1.49.0"]
+ pub MANUAL_RANGE_CONTAINS,
+ style,
+ "manually reimplementing {`Range`, `RangeInclusive`}`::contains`"
+}
+
+pub struct Ranges {
+ msrv: Option<RustcVersion>,
+}
+
+impl Ranges {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self { msrv }
+ }
+}
+
+impl_lint_pass!(Ranges => [
+ RANGE_ZIP_WITH_LEN,
+ RANGE_PLUS_ONE,
+ RANGE_MINUS_ONE,
+ REVERSED_EMPTY_RANGES,
+ MANUAL_RANGE_CONTAINS,
+]);
+
+impl<'tcx> LateLintPass<'tcx> for Ranges {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ match expr.kind {
+ ExprKind::MethodCall(path, args, _) => {
+ check_range_zip_with_len(cx, path, args, expr.span);
+ },
+ ExprKind::Binary(ref op, l, r) => {
+ if meets_msrv(self.msrv, msrvs::RANGE_CONTAINS) {
+ check_possible_range_contains(cx, op.node, l, r, expr, expr.span);
+ }
+ },
+ _ => {},
+ }
+
+ check_exclusive_range_plus_one(cx, expr);
+ check_inclusive_range_minus_one(cx, expr);
+ check_reversed_empty_range(cx, expr);
+ }
+ extract_msrv_attr!(LateContext);
+}
+
+fn check_possible_range_contains(
+ cx: &LateContext<'_>,
+ op: BinOpKind,
+ left: &Expr<'_>,
+ right: &Expr<'_>,
+ expr: &Expr<'_>,
+ span: Span,
+) {
+ if in_constant(cx, expr.hir_id) {
+ return;
+ }
+
+ let combine_and = match op {
+ BinOpKind::And | BinOpKind::BitAnd => true,
+ BinOpKind::Or | BinOpKind::BitOr => false,
+ _ => return,
+ };
+ // value, name, order (higher/lower), inclusiveness
+ if let (Some(l), Some(r)) = (check_range_bounds(cx, left), check_range_bounds(cx, right)) {
+ // we only lint comparisons on the same name and with different
+ // direction
+ if l.id != r.id || l.ord == r.ord {
+ return;
+ }
+ let ord = Constant::partial_cmp(cx.tcx, cx.typeck_results().expr_ty(l.expr), &l.val, &r.val);
+ if combine_and && ord == Some(r.ord) {
+ // order lower bound and upper bound
+ let (l_span, u_span, l_inc, u_inc) = if r.ord == Ordering::Less {
+ (l.val_span, r.val_span, l.inc, r.inc)
+ } else {
+ (r.val_span, l.val_span, r.inc, l.inc)
+ };
+ // we only lint inclusive lower bounds
+ if !l_inc {
+ return;
+ }
+ let (range_type, range_op) = if u_inc {
+ ("RangeInclusive", "..=")
+ } else {
+ ("Range", "..")
+ };
+ let mut applicability = Applicability::MachineApplicable;
+ let name = snippet_with_applicability(cx, l.name_span, "_", &mut applicability);
+ let lo = snippet_with_applicability(cx, l_span, "_", &mut applicability);
+ let hi = snippet_with_applicability(cx, u_span, "_", &mut applicability);
+ let space = if lo.ends_with('.') { " " } else { "" };
+ span_lint_and_sugg(
+ cx,
+ MANUAL_RANGE_CONTAINS,
+ span,
+ &format!("manual `{}::contains` implementation", range_type),
+ "use",
+ format!("({}{}{}{}).contains(&{})", lo, space, range_op, hi, name),
+ applicability,
+ );
+ } else if !combine_and && ord == Some(l.ord) {
+ // `!_.contains(_)`
+ // order lower bound and upper bound
+ let (l_span, u_span, l_inc, u_inc) = if l.ord == Ordering::Less {
+ (l.val_span, r.val_span, l.inc, r.inc)
+ } else {
+ (r.val_span, l.val_span, r.inc, l.inc)
+ };
+ if l_inc {
+ return;
+ }
+ let (range_type, range_op) = if u_inc {
+ ("Range", "..")
+ } else {
+ ("RangeInclusive", "..=")
+ };
+ let mut applicability = Applicability::MachineApplicable;
+ let name = snippet_with_applicability(cx, l.name_span, "_", &mut applicability);
+ let lo = snippet_with_applicability(cx, l_span, "_", &mut applicability);
+ let hi = snippet_with_applicability(cx, u_span, "_", &mut applicability);
+ let space = if lo.ends_with('.') { " " } else { "" };
+ span_lint_and_sugg(
+ cx,
+ MANUAL_RANGE_CONTAINS,
+ span,
+ &format!("manual `!{}::contains` implementation", range_type),
+ "use",
+ format!("!({}{}{}{}).contains(&{})", lo, space, range_op, hi, name),
+ applicability,
+ );
+ }
+ }
+
+ // If the LHS is the same operator, we have to recurse to get the "real" RHS, since they have
+ // the same operator precedence
+ if_chain! {
+ if let ExprKind::Binary(ref lhs_op, _left, new_lhs) = left.kind;
+ if op == lhs_op.node;
+ let new_span = Span::new(new_lhs.span.lo(), right.span.hi(), expr.span.ctxt(), expr.span.parent());
+ if let Some(snip) = &snippet_opt(cx, new_span);
+ // Do not continue if we have mismatched number of parens, otherwise the suggestion is wrong
+ if snip.matches('(').count() == snip.matches(')').count();
+ then {
+ check_possible_range_contains(cx, op, new_lhs, right, expr, new_span);
+ }
+ }
+}
+
+struct RangeBounds<'a> {
+ val: Constant,
+ expr: &'a Expr<'a>,
+ id: HirId,
+ name_span: Span,
+ val_span: Span,
+ ord: Ordering,
+ inc: bool,
+}
+
+// Takes a binary expression such as x <= 2 as input
+// Breaks apart into various pieces, such as the value of the number,
+// hir id of the variable, and direction/inclusiveness of the operator
+fn check_range_bounds<'a>(cx: &'a LateContext<'_>, ex: &'a Expr<'_>) -> Option<RangeBounds<'a>> {
+ if let ExprKind::Binary(ref op, l, r) = ex.kind {
+ let (inclusive, ordering) = match op.node {
+ BinOpKind::Gt => (false, Ordering::Greater),
+ BinOpKind::Ge => (true, Ordering::Greater),
+ BinOpKind::Lt => (false, Ordering::Less),
+ BinOpKind::Le => (true, Ordering::Less),
+ _ => return None,
+ };
+ if let Some(id) = path_to_local(l) {
+ if let Some((c, _)) = constant(cx, cx.typeck_results(), r) {
+ return Some(RangeBounds {
+ val: c,
+ expr: r,
+ id,
+ name_span: l.span,
+ val_span: r.span,
+ ord: ordering,
+ inc: inclusive,
+ });
+ }
+ } else if let Some(id) = path_to_local(r) {
+ if let Some((c, _)) = constant(cx, cx.typeck_results(), l) {
+ return Some(RangeBounds {
+ val: c,
+ expr: l,
+ id,
+ name_span: r.span,
+ val_span: l.span,
+ ord: ordering.reverse(),
+ inc: inclusive,
+ });
+ }
+ }
+ }
+ None
+}
+
+fn check_range_zip_with_len(cx: &LateContext<'_>, path: &PathSegment<'_>, args: &[Expr<'_>], span: Span) {
+ if_chain! {
+ if path.ident.as_str() == "zip";
+ if let [iter, zip_arg] = args;
+ // `.iter()` call
+ if let ExprKind::MethodCall(iter_path, iter_args, _) = iter.kind;
+ if iter_path.ident.name == sym::iter;
+ // range expression in `.zip()` call: `0..x.len()`
+ if let Some(higher::Range { start: Some(start), end: Some(end), .. }) = higher::Range::hir(zip_arg);
+ if is_integer_const(cx, start, 0);
+ // `.len()` call
+ if let ExprKind::MethodCall(len_path, len_args, _) = end.kind;
+ if len_path.ident.name == sym::len && len_args.len() == 1;
+ // `.iter()` and `.len()` called on same `Path`
+ if let ExprKind::Path(QPath::Resolved(_, iter_path)) = iter_args[0].kind;
+ if let ExprKind::Path(QPath::Resolved(_, len_path)) = len_args[0].kind;
+ if SpanlessEq::new(cx).eq_path_segments(&iter_path.segments, &len_path.segments);
+ then {
+ span_lint(cx,
+ RANGE_ZIP_WITH_LEN,
+ span,
+ &format!("it is more idiomatic to use `{}.iter().enumerate()`",
+ snippet(cx, iter_args[0].span, "_"))
+ );
+ }
+ }
+}
+
+// exclusive range plus one: `x..(y+1)`
+fn check_exclusive_range_plus_one(cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if_chain! {
+ if let Some(higher::Range {
+ start,
+ end: Some(end),
+ limits: RangeLimits::HalfOpen
+ }) = higher::Range::hir(expr);
+ if let Some(y) = y_plus_one(cx, end);
+ then {
+ let span = if expr.span.from_expansion() {
+ expr.span
+ .ctxt()
+ .outer_expn_data()
+ .call_site
+ } else {
+ expr.span
+ };
+ span_lint_and_then(
+ cx,
+ RANGE_PLUS_ONE,
+ span,
+ "an inclusive range would be more readable",
+ |diag| {
+ let start = start.map_or(String::new(), |x| Sugg::hir(cx, x, "x").maybe_par().to_string());
+ let end = Sugg::hir(cx, y, "y").maybe_par();
+ if let Some(is_wrapped) = &snippet_opt(cx, span) {
+ if is_wrapped.starts_with('(') && is_wrapped.ends_with(')') {
+ diag.span_suggestion(
+ span,
+ "use",
+ format!("({}..={})", start, end),
+ Applicability::MaybeIncorrect,
+ );
+ } else {
+ diag.span_suggestion(
+ span,
+ "use",
+ format!("{}..={}", start, end),
+ Applicability::MachineApplicable, // snippet
+ );
+ }
+ }
+ },
+ );
+ }
+ }
+}
+
+// inclusive range minus one: `x..=(y-1)`
+fn check_inclusive_range_minus_one(cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if_chain! {
+ if let Some(higher::Range { start, end: Some(end), limits: RangeLimits::Closed }) = higher::Range::hir(expr);
+ if let Some(y) = y_minus_one(cx, end);
+ then {
+ span_lint_and_then(
+ cx,
+ RANGE_MINUS_ONE,
+ expr.span,
+ "an exclusive range would be more readable",
+ |diag| {
+ let start = start.map_or(String::new(), |x| Sugg::hir(cx, x, "x").maybe_par().to_string());
+ let end = Sugg::hir(cx, y, "y").maybe_par();
+ diag.span_suggestion(
+ expr.span,
+ "use",
+ format!("{}..{}", start, end),
+ Applicability::MachineApplicable, // snippet
+ );
+ },
+ );
+ }
+ }
+}
+
+fn check_reversed_empty_range(cx: &LateContext<'_>, expr: &Expr<'_>) {
+ fn inside_indexing_expr(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ matches!(
+ get_parent_expr(cx, expr),
+ Some(Expr {
+ kind: ExprKind::Index(..),
+ ..
+ })
+ )
+ }
+
+ fn is_for_loop_arg(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ let mut cur_expr = expr;
+ while let Some(parent_expr) = get_parent_expr(cx, cur_expr) {
+ match higher::ForLoop::hir(parent_expr) {
+ Some(higher::ForLoop { arg, .. }) if arg.hir_id == expr.hir_id => return true,
+ _ => cur_expr = parent_expr,
+ }
+ }
+
+ false
+ }
+
+ fn is_empty_range(limits: RangeLimits, ordering: Ordering) -> bool {
+ match limits {
+ RangeLimits::HalfOpen => ordering != Ordering::Less,
+ RangeLimits::Closed => ordering == Ordering::Greater,
+ }
+ }
+
+ if_chain! {
+ if let Some(higher::Range { start: Some(start), end: Some(end), limits }) = higher::Range::hir(expr);
+ let ty = cx.typeck_results().expr_ty(start);
+ if let ty::Int(_) | ty::Uint(_) = ty.kind();
+ if let Some((start_idx, _)) = constant(cx, cx.typeck_results(), start);
+ if let Some((end_idx, _)) = constant(cx, cx.typeck_results(), end);
+ if let Some(ordering) = Constant::partial_cmp(cx.tcx, ty, &start_idx, &end_idx);
+ if is_empty_range(limits, ordering);
+ then {
+ if inside_indexing_expr(cx, expr) {
+ // Avoid linting `N..N` as it has proven to be useful, see #5689 and #5628 ...
+ if ordering != Ordering::Equal {
+ span_lint(
+ cx,
+ REVERSED_EMPTY_RANGES,
+ expr.span,
+ "this range is reversed and using it to index a slice will panic at run-time",
+ );
+ }
+ // ... except in for loop arguments for backwards compatibility with `reverse_range_loop`
+ } else if ordering != Ordering::Equal || is_for_loop_arg(cx, expr) {
+ span_lint_and_then(
+ cx,
+ REVERSED_EMPTY_RANGES,
+ expr.span,
+ "this range is empty so it will yield no values",
+ |diag| {
+ if ordering != Ordering::Equal {
+ let start_snippet = snippet(cx, start.span, "_");
+ let end_snippet = snippet(cx, end.span, "_");
+ let dots = match limits {
+ RangeLimits::HalfOpen => "..",
+ RangeLimits::Closed => "..="
+ };
+
+ diag.span_suggestion(
+ expr.span,
+ "consider using the following if you are attempting to iterate over this \
+ range in reverse",
+ format!("({}{}{}).rev()", end_snippet, dots, start_snippet),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ },
+ );
+ }
+ }
+ }
+}
+
+fn y_plus_one<'t>(cx: &LateContext<'_>, expr: &'t Expr<'_>) -> Option<&'t Expr<'t>> {
+ match expr.kind {
+ ExprKind::Binary(
+ Spanned {
+ node: BinOpKind::Add, ..
+ },
+ lhs,
+ rhs,
+ ) => {
+ if is_integer_const(cx, lhs, 1) {
+ Some(rhs)
+ } else if is_integer_const(cx, rhs, 1) {
+ Some(lhs)
+ } else {
+ None
+ }
+ },
+ _ => None,
+ }
+}
+
+fn y_minus_one<'t>(cx: &LateContext<'_>, expr: &'t Expr<'_>) -> Option<&'t Expr<'t>> {
+ match expr.kind {
+ ExprKind::Binary(
+ Spanned {
+ node: BinOpKind::Sub, ..
+ },
+ lhs,
+ rhs,
+ ) if is_integer_const(cx, rhs, 1) => Some(lhs),
+ _ => None,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs b/src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs
new file mode 100644
index 000000000..8db8c4e9b
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs
@@ -0,0 +1,139 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::higher::VecArgs;
+use clippy_utils::last_path_segment;
+use clippy_utils::macros::root_macro_call_first_node;
+use clippy_utils::paths;
+use clippy_utils::source::{indent_of, snippet};
+use clippy_utils::ty::match_type;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind, QPath, TyKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{sym, Span, Symbol};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for reference-counted pointers (`Arc`, `Rc`, `rc::Weak`, and `sync::Weak`)
+ /// in `vec![elem; len]`
+ ///
+ /// ### Why is this bad?
+ /// This will create `elem` once and clone it `len` times - doing so with `Arc`/`Rc`/`Weak`
+ /// is a bit misleading, as it will create references to the same pointer, rather
+ /// than different instances.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let v = vec![std::sync::Arc::new("some data".to_string()); 100];
+ /// // or
+ /// let v = vec![std::rc::Rc::new("some data".to_string()); 100];
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// // Initialize each value separately:
+ /// let mut data = Vec::with_capacity(100);
+ /// for _ in 0..100 {
+ /// data.push(std::rc::Rc::new("some data".to_string()));
+ /// }
+ ///
+ /// // Or if you want clones of the same reference,
+ /// // Create the reference beforehand to clarify that
+ /// // it should be cloned for each value
+ /// let data = std::rc::Rc::new("some data".to_string());
+ /// let v = vec![data; 100];
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub RC_CLONE_IN_VEC_INIT,
+ suspicious,
+ "initializing reference-counted pointer in `vec![elem; len]`"
+}
+declare_lint_pass!(RcCloneInVecInit => [RC_CLONE_IN_VEC_INIT]);
+
+impl LateLintPass<'_> for RcCloneInVecInit {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
+ let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return; };
+ let Some(VecArgs::Repeat(elem, len)) = VecArgs::hir(cx, expr) else { return; };
+ let Some((symbol, func_span)) = ref_init(cx, elem) else { return; };
+
+ emit_lint(cx, symbol, macro_call.span, elem, len, func_span);
+ }
+}
+
+fn loop_init_suggestion(elem: &str, len: &str, indent: &str) -> String {
+ format!(
+ r#"{{
+{indent} let mut v = Vec::with_capacity({len});
+{indent} (0..{len}).for_each(|_| v.push({elem}));
+{indent} v
+{indent}}}"#
+ )
+}
+
+fn extract_suggestion(elem: &str, len: &str, indent: &str) -> String {
+ format!(
+ "{{
+{indent} let data = {elem};
+{indent} vec![data; {len}]
+{indent}}}"
+ )
+}
+
+fn emit_lint(cx: &LateContext<'_>, symbol: Symbol, lint_span: Span, elem: &Expr<'_>, len: &Expr<'_>, func_span: Span) {
+ let symbol_name = symbol.as_str();
+
+ span_lint_and_then(
+ cx,
+ RC_CLONE_IN_VEC_INIT,
+ lint_span,
+ "initializing a reference-counted pointer in `vec![elem; len]`",
+ |diag| {
+ let len_snippet = snippet(cx, len.span, "..");
+ let elem_snippet = format!("{}(..)", snippet(cx, elem.span.with_hi(func_span.hi()), ".."));
+ let indentation = " ".repeat(indent_of(cx, lint_span).unwrap_or(0));
+ let loop_init_suggestion = loop_init_suggestion(&elem_snippet, len_snippet.as_ref(), &indentation);
+ let extract_suggestion = extract_suggestion(&elem_snippet, len_snippet.as_ref(), &indentation);
+
+ diag.note(format!("each element will point to the same `{symbol_name}` instance"));
+ diag.span_suggestion(
+ lint_span,
+ format!("consider initializing each `{symbol_name}` element individually"),
+ loop_init_suggestion,
+ Applicability::HasPlaceholders,
+ );
+ diag.span_suggestion(
+ lint_span,
+ format!(
+ "or if this is intentional, consider extracting the `{symbol_name}` initialization to a variable"
+ ),
+ extract_suggestion,
+ Applicability::HasPlaceholders,
+ );
+ },
+ );
+}
+
+/// Checks whether the given `expr` is a call to `Arc::new`, `Rc::new`, or evaluates to a `Weak`
+fn ref_init(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<(Symbol, Span)> {
+ if_chain! {
+ if let ExprKind::Call(func, _args) = expr.kind;
+ if let ExprKind::Path(ref func_path @ QPath::TypeRelative(ty, _)) = func.kind;
+ if let TyKind::Path(ref ty_path) = ty.kind;
+ if let Some(def_id) = cx.qpath_res(ty_path, ty.hir_id).opt_def_id();
+
+ then {
+ if last_path_segment(func_path).ident.name == sym::new
+ && let Some(symbol) = cx
+ .tcx
+ .get_diagnostic_name(def_id)
+ .filter(|symbol| symbol == &sym::Arc || symbol == &sym::Rc) {
+ return Some((symbol, func.span));
+ }
+
+ let ty_path = cx.typeck_results().expr_ty(expr);
+ if match_type(cx, ty_path, &paths::WEAK_RC) || match_type(cx, ty_path, &paths::WEAK_ARC) {
+ return Some((Symbol::intern("Weak"), func.span));
+ }
+ }
+ }
+
+ None
+}
diff --git a/src/tools/clippy/clippy_lints/src/read_zero_byte_vec.rs b/src/tools/clippy/clippy_lints/src/read_zero_byte_vec.rs
new file mode 100644
index 000000000..9538a8104
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/read_zero_byte_vec.rs
@@ -0,0 +1,142 @@
+use clippy_utils::{
+ diagnostics::{span_lint, span_lint_and_sugg},
+ higher::{get_vec_init_kind, VecInitKind},
+ source::snippet,
+ visitors::expr_visitor_no_bodies,
+};
+use hir::{intravisit::Visitor, ExprKind, Local, PatKind, PathSegment, QPath, StmtKind};
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// This lint catches reads into a zero-length `Vec`.
+ /// Especially in the case of a call to `with_capacity`, this lint warns that read
+ /// gets the number of bytes from the `Vec`'s length, not its capacity.
+ ///
+ /// ### Why is this bad?
+ /// Reading zero bytes is almost certainly not the intended behavior.
+ ///
+ /// ### Known problems
+ /// In theory, a very unusual read implementation could assign some semantic meaning
+ /// to zero-byte reads. But it seems exceptionally unlikely that code intending to do
+ /// a zero-byte read would allocate a `Vec` for it.
+ ///
+ /// ### Example
+ /// ```rust
+ /// use std::io;
+ /// fn foo<F: io::Read>(mut f: F) {
+ /// let mut data = Vec::with_capacity(100);
+ /// f.read(&mut data).unwrap();
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// use std::io;
+ /// fn foo<F: io::Read>(mut f: F) {
+ /// let mut data = Vec::with_capacity(100);
+ /// data.resize(100, 0);
+ /// f.read(&mut data).unwrap();
+ /// }
+ /// ```
+ #[clippy::version = "1.63.0"]
+ pub READ_ZERO_BYTE_VEC,
+ correctness,
+ "checks for reads into a zero-length `Vec`"
+}
+declare_lint_pass!(ReadZeroByteVec => [READ_ZERO_BYTE_VEC]);
+
+impl<'tcx> LateLintPass<'tcx> for ReadZeroByteVec {
+ fn check_block(&mut self, cx: &LateContext<'tcx>, block: &hir::Block<'tcx>) {
+ for (idx, stmt) in block.stmts.iter().enumerate() {
+ if !stmt.span.from_expansion()
+ // matches `let v = Vec::new();`
+ && let StmtKind::Local(local) = stmt.kind
+ && let Local { pat, init: Some(init), .. } = local
+ && let PatKind::Binding(_, _, ident, _) = pat.kind
+ && let Some(vec_init_kind) = get_vec_init_kind(cx, init)
+ {
+ // finds use of `_.read(&mut v)`
+ let mut read_found = false;
+ let mut visitor = expr_visitor_no_bodies(|expr| {
+ if let ExprKind::MethodCall(path, [_self, arg], _) = expr.kind
+ && let PathSegment { ident: read_or_read_exact, .. } = *path
+ && matches!(read_or_read_exact.as_str(), "read" | "read_exact")
+ && let ExprKind::AddrOf(_, hir::Mutability::Mut, inner) = arg.kind
+ && let ExprKind::Path(QPath::Resolved(None, inner_path)) = inner.kind
+ && let [inner_seg] = inner_path.segments
+ && ident.name == inner_seg.ident.name
+ {
+ read_found = true;
+ }
+ !read_found
+ });
+
+ let next_stmt_span;
+ if idx == block.stmts.len() - 1 {
+ // case { .. stmt; expr }
+ if let Some(e) = block.expr {
+ visitor.visit_expr(e);
+ next_stmt_span = e.span;
+ } else {
+ return;
+ }
+ } else {
+ // case { .. stmt; stmt; .. }
+ let next_stmt = &block.stmts[idx + 1];
+ visitor.visit_stmt(next_stmt);
+ next_stmt_span = next_stmt.span;
+ }
+ drop(visitor);
+
+ if read_found && !next_stmt_span.from_expansion() {
+ let applicability = Applicability::MaybeIncorrect;
+ match vec_init_kind {
+ VecInitKind::WithConstCapacity(len) => {
+ span_lint_and_sugg(
+ cx,
+ READ_ZERO_BYTE_VEC,
+ next_stmt_span,
+ "reading zero byte data to `Vec`",
+ "try",
+ format!("{}.resize({}, 0); {}",
+ ident.as_str(),
+ len,
+ snippet(cx, next_stmt_span, "..")
+ ),
+ applicability,
+ );
+ }
+ VecInitKind::WithExprCapacity(hir_id) => {
+ let e = cx.tcx.hir().expect_expr(hir_id);
+ span_lint_and_sugg(
+ cx,
+ READ_ZERO_BYTE_VEC,
+ next_stmt_span,
+ "reading zero byte data to `Vec`",
+ "try",
+ format!("{}.resize({}, 0); {}",
+ ident.as_str(),
+ snippet(cx, e.span, ".."),
+ snippet(cx, next_stmt_span, "..")
+ ),
+ applicability,
+ );
+ }
+ _ => {
+ span_lint(
+ cx,
+ READ_ZERO_BYTE_VEC,
+ next_stmt_span,
+ "reading zero byte data to `Vec`",
+ );
+
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/redundant_clone.rs b/src/tools/clippy/clippy_lints/src/redundant_clone.rs
new file mode 100644
index 000000000..eddca6045
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/redundant_clone.rs
@@ -0,0 +1,776 @@
+use clippy_utils::diagnostics::{span_lint_hir, span_lint_hir_and_then};
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::{has_drop, is_copy, is_type_diagnostic_item, walk_ptrs_ty_depth};
+use clippy_utils::{fn_has_unsatisfiable_preds, match_def_path, paths};
+use if_chain::if_chain;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_errors::Applicability;
+use rustc_hir::intravisit::FnKind;
+use rustc_hir::{def_id, Body, FnDecl, HirId};
+use rustc_index::bit_set::{BitSet, HybridBitSet};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::mir::{
+ self, traversal,
+ visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor as _},
+ Mutability,
+};
+use rustc_middle::ty::{self, visit::TypeVisitor, Ty};
+use rustc_mir_dataflow::{Analysis, AnalysisDomain, CallReturnPlaces, GenKill, GenKillAnalysis, ResultsCursor};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::{BytePos, Span};
+use rustc_span::sym;
+use std::ops::ControlFlow;
+
+macro_rules! unwrap_or_continue {
+ ($x:expr) => {
+ match $x {
+ Some(x) => x,
+ None => continue,
+ }
+ };
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for a redundant `clone()` (and its relatives) which clones an owned
+ /// value that is going to be dropped without further use.
+ ///
+ /// ### Why is this bad?
+ /// It is not always possible for the compiler to eliminate useless
+ /// allocations and deallocations generated by redundant `clone()`s.
+ ///
+ /// ### Known problems
+ /// False-negatives: analysis performed by this lint is conservative and limited.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::path::Path;
+ /// # #[derive(Clone)]
+ /// # struct Foo;
+ /// # impl Foo {
+ /// # fn new() -> Self { Foo {} }
+ /// # }
+ /// # fn call(x: Foo) {}
+ /// {
+ /// let x = Foo::new();
+ /// call(x.clone());
+ /// call(x.clone()); // this can just pass `x`
+ /// }
+ ///
+ /// ["lorem", "ipsum"].join(" ").to_string();
+ ///
+ /// Path::new("/a/b").join("c").to_path_buf();
+ /// ```
+ #[clippy::version = "1.32.0"]
+ pub REDUNDANT_CLONE,
+ perf,
+ "`clone()` of an owned value that is going to be dropped immediately"
+}
+
+declare_lint_pass!(RedundantClone => [REDUNDANT_CLONE]);
+
+impl<'tcx> LateLintPass<'tcx> for RedundantClone {
+ #[expect(clippy::too_many_lines)]
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ _: FnKind<'tcx>,
+ _: &'tcx FnDecl<'_>,
+ body: &'tcx Body<'_>,
+ _: Span,
+ _: HirId,
+ ) {
+ let def_id = cx.tcx.hir().body_owner_def_id(body.id());
+
+ // Building MIR for `fn`s with unsatisfiable preds results in ICE.
+ if fn_has_unsatisfiable_preds(cx, def_id.to_def_id()) {
+ return;
+ }
+
+ let mir = cx.tcx.optimized_mir(def_id.to_def_id());
+
+ let possible_origin = {
+ let mut vis = PossibleOriginVisitor::new(mir);
+ vis.visit_body(mir);
+ vis.into_map(cx)
+ };
+ let maybe_storage_live_result = MaybeStorageLive
+ .into_engine(cx.tcx, mir)
+ .pass_name("redundant_clone")
+ .iterate_to_fixpoint()
+ .into_results_cursor(mir);
+ let mut possible_borrower = {
+ let mut vis = PossibleBorrowerVisitor::new(cx, mir, possible_origin);
+ vis.visit_body(mir);
+ vis.into_map(cx, maybe_storage_live_result)
+ };
+
+ for (bb, bbdata) in mir.basic_blocks().iter_enumerated() {
+ let terminator = bbdata.terminator();
+
+ if terminator.source_info.span.from_expansion() {
+ continue;
+ }
+
+ // Give up on loops
+ if terminator.successors().any(|s| s == bb) {
+ continue;
+ }
+
+ let (fn_def_id, arg, arg_ty, clone_ret) =
+ unwrap_or_continue!(is_call_with_ref_arg(cx, mir, &terminator.kind));
+
+ let from_borrow = match_def_path(cx, fn_def_id, &paths::CLONE_TRAIT_METHOD)
+ || match_def_path(cx, fn_def_id, &paths::TO_OWNED_METHOD)
+ || (match_def_path(cx, fn_def_id, &paths::TO_STRING_METHOD)
+ && is_type_diagnostic_item(cx, arg_ty, sym::String));
+
+ let from_deref = !from_borrow
+ && (match_def_path(cx, fn_def_id, &paths::PATH_TO_PATH_BUF)
+ || match_def_path(cx, fn_def_id, &paths::OS_STR_TO_OS_STRING));
+
+ if !from_borrow && !from_deref {
+ continue;
+ }
+
+ if let ty::Adt(def, _) = arg_ty.kind() {
+ if def.is_manually_drop() {
+ continue;
+ }
+ }
+
+ // `{ arg = &cloned; clone(move arg); }` or `{ arg = &cloned; to_path_buf(arg); }`
+ let (cloned, cannot_move_out) = unwrap_or_continue!(find_stmt_assigns_to(cx, mir, arg, from_borrow, bb));
+
+ let loc = mir::Location {
+ block: bb,
+ statement_index: bbdata.statements.len(),
+ };
+
+ // `Local` to be cloned, and a local of `clone` call's destination
+ let (local, ret_local) = if from_borrow {
+ // `res = clone(arg)` can be turned into `res = move arg;`
+ // if `arg` is the only borrow of `cloned` at this point.
+
+ if cannot_move_out || !possible_borrower.only_borrowers(&[arg], cloned, loc) {
+ continue;
+ }
+
+ (cloned, clone_ret)
+ } else {
+ // `arg` is a reference as it is `.deref()`ed in the previous block.
+ // Look into the predecessor block and find out the source of deref.
+
+ let ps = &mir.basic_blocks.predecessors()[bb];
+ if ps.len() != 1 {
+ continue;
+ }
+ let pred_terminator = mir[ps[0]].terminator();
+
+ // receiver of the `deref()` call
+ let (pred_arg, deref_clone_ret) = if_chain! {
+ if let Some((pred_fn_def_id, pred_arg, pred_arg_ty, res)) =
+ is_call_with_ref_arg(cx, mir, &pred_terminator.kind);
+ if res == cloned;
+ if cx.tcx.is_diagnostic_item(sym::deref_method, pred_fn_def_id);
+ if is_type_diagnostic_item(cx, pred_arg_ty, sym::PathBuf)
+ || is_type_diagnostic_item(cx, pred_arg_ty, sym::OsString);
+ then {
+ (pred_arg, res)
+ } else {
+ continue;
+ }
+ };
+
+ let (local, cannot_move_out) =
+ unwrap_or_continue!(find_stmt_assigns_to(cx, mir, pred_arg, true, ps[0]));
+ let loc = mir::Location {
+ block: bb,
+ statement_index: mir.basic_blocks()[bb].statements.len(),
+ };
+
+ // This can be turned into `res = move local` if `arg` and `cloned` are not borrowed
+ // at the last statement:
+ //
+ // ```
+ // pred_arg = &local;
+ // cloned = deref(pred_arg);
+ // arg = &cloned;
+ // StorageDead(pred_arg);
+ // res = to_path_buf(cloned);
+ // ```
+ if cannot_move_out || !possible_borrower.only_borrowers(&[arg, cloned], local, loc) {
+ continue;
+ }
+
+ (local, deref_clone_ret)
+ };
+
+ let clone_usage = if local == ret_local {
+ CloneUsage {
+ cloned_used: false,
+ cloned_consume_or_mutate_loc: None,
+ clone_consumed_or_mutated: true,
+ }
+ } else {
+ let clone_usage = visit_clone_usage(local, ret_local, mir, bb);
+ if clone_usage.cloned_used && clone_usage.clone_consumed_or_mutated {
+ // cloned value is used, and the clone is modified or moved
+ continue;
+ } else if let Some(loc) = clone_usage.cloned_consume_or_mutate_loc {
+ // cloned value is mutated, and the clone is alive.
+ if possible_borrower.local_is_alive_at(ret_local, loc) {
+ continue;
+ }
+ }
+ clone_usage
+ };
+
+ let span = terminator.source_info.span;
+ let scope = terminator.source_info.scope;
+ let node = mir.source_scopes[scope]
+ .local_data
+ .as_ref()
+ .assert_crate_local()
+ .lint_root;
+
+ if_chain! {
+ if let Some(snip) = snippet_opt(cx, span);
+ if let Some(dot) = snip.rfind('.');
+ then {
+ let sugg_span = span.with_lo(
+ span.lo() + BytePos(u32::try_from(dot).unwrap())
+ );
+ let mut app = Applicability::MaybeIncorrect;
+
+ let call_snip = &snip[dot + 1..];
+ // Machine applicable when `call_snip` looks like `foobar()`
+ if let Some(call_snip) = call_snip.strip_suffix("()").map(str::trim) {
+ if call_snip.as_bytes().iter().all(|b| b.is_ascii_alphabetic() || *b == b'_') {
+ app = Applicability::MachineApplicable;
+ }
+ }
+
+ span_lint_hir_and_then(cx, REDUNDANT_CLONE, node, sugg_span, "redundant clone", |diag| {
+ diag.span_suggestion(
+ sugg_span,
+ "remove this",
+ "",
+ app,
+ );
+ if clone_usage.cloned_used {
+ diag.span_note(
+ span,
+ "cloned value is neither consumed nor mutated",
+ );
+ } else {
+ diag.span_note(
+ span.with_hi(span.lo() + BytePos(u32::try_from(dot).unwrap())),
+ "this value is dropped without further use",
+ );
+ }
+ });
+ } else {
+ span_lint_hir(cx, REDUNDANT_CLONE, node, span, "redundant clone");
+ }
+ }
+ }
+ }
+}
+
+/// If `kind` is `y = func(x: &T)` where `T: !Copy`, returns `(DefId of func, x, T, y)`.
+fn is_call_with_ref_arg<'tcx>(
+ cx: &LateContext<'tcx>,
+ mir: &'tcx mir::Body<'tcx>,
+ kind: &'tcx mir::TerminatorKind<'tcx>,
+) -> Option<(def_id::DefId, mir::Local, Ty<'tcx>, mir::Local)> {
+ if_chain! {
+ if let mir::TerminatorKind::Call { func, args, destination, .. } = kind;
+ if args.len() == 1;
+ if let mir::Operand::Move(mir::Place { local, .. }) = &args[0];
+ if let ty::FnDef(def_id, _) = *func.ty(mir, cx.tcx).kind();
+ if let (inner_ty, 1) = walk_ptrs_ty_depth(args[0].ty(mir, cx.tcx));
+ if !is_copy(cx, inner_ty);
+ then {
+ Some((def_id, *local, inner_ty, destination.as_local()?))
+ } else {
+ None
+ }
+ }
+}
+
+type CannotMoveOut = bool;
+
+/// Finds the first `to = (&)from`, and returns
+/// ``Some((from, whether `from` cannot be moved out))``.
+fn find_stmt_assigns_to<'tcx>(
+ cx: &LateContext<'tcx>,
+ mir: &mir::Body<'tcx>,
+ to_local: mir::Local,
+ by_ref: bool,
+ bb: mir::BasicBlock,
+) -> Option<(mir::Local, CannotMoveOut)> {
+ let rvalue = mir.basic_blocks()[bb].statements.iter().rev().find_map(|stmt| {
+ if let mir::StatementKind::Assign(box (mir::Place { local, .. }, v)) = &stmt.kind {
+ return if *local == to_local { Some(v) } else { None };
+ }
+
+ None
+ })?;
+
+ match (by_ref, rvalue) {
+ (true, mir::Rvalue::Ref(_, _, place)) | (false, mir::Rvalue::Use(mir::Operand::Copy(place))) => {
+ Some(base_local_and_movability(cx, mir, *place))
+ },
+ (false, mir::Rvalue::Ref(_, _, place)) => {
+ if let [mir::ProjectionElem::Deref] = place.as_ref().projection {
+ Some(base_local_and_movability(cx, mir, *place))
+ } else {
+ None
+ }
+ },
+ _ => None,
+ }
+}
+
+/// Extracts and returns the undermost base `Local` of given `place`. Returns `place` itself
+/// if it is already a `Local`.
+///
+/// Also reports whether given `place` cannot be moved out.
+fn base_local_and_movability<'tcx>(
+ cx: &LateContext<'tcx>,
+ mir: &mir::Body<'tcx>,
+ place: mir::Place<'tcx>,
+) -> (mir::Local, CannotMoveOut) {
+ use rustc_middle::mir::PlaceRef;
+
+ // Dereference. You cannot move things out from a borrowed value.
+ let mut deref = false;
+ // Accessing a field of an ADT that has `Drop`. Moving the field out will cause E0509.
+ let mut field = false;
+ // If projection is a slice index then clone can be removed only if the
+ // underlying type implements Copy
+ let mut slice = false;
+
+ let PlaceRef { local, mut projection } = place.as_ref();
+ while let [base @ .., elem] = projection {
+ projection = base;
+ deref |= matches!(elem, mir::ProjectionElem::Deref);
+ field |= matches!(elem, mir::ProjectionElem::Field(..))
+ && has_drop(cx, mir::Place::ty_from(local, projection, &mir.local_decls, cx.tcx).ty);
+ slice |= matches!(elem, mir::ProjectionElem::Index(..))
+ && !is_copy(cx, mir::Place::ty_from(local, projection, &mir.local_decls, cx.tcx).ty);
+ }
+
+ (local, deref || field || slice)
+}
+
+#[derive(Default)]
+struct CloneUsage {
+ /// Whether the cloned value is used after the clone.
+ cloned_used: bool,
+ /// The first location where the cloned value is consumed or mutated, if any.
+ cloned_consume_or_mutate_loc: Option<mir::Location>,
+ /// Whether the clone value is mutated.
+ clone_consumed_or_mutated: bool,
+}
+fn visit_clone_usage(cloned: mir::Local, clone: mir::Local, mir: &mir::Body<'_>, bb: mir::BasicBlock) -> CloneUsage {
+ struct V {
+ cloned: mir::Local,
+ clone: mir::Local,
+ result: CloneUsage,
+ }
+ impl<'tcx> mir::visit::Visitor<'tcx> for V {
+ fn visit_basic_block_data(&mut self, block: mir::BasicBlock, data: &mir::BasicBlockData<'tcx>) {
+ let statements = &data.statements;
+ for (statement_index, statement) in statements.iter().enumerate() {
+ self.visit_statement(statement, mir::Location { block, statement_index });
+ }
+
+ self.visit_terminator(
+ data.terminator(),
+ mir::Location {
+ block,
+ statement_index: statements.len(),
+ },
+ );
+ }
+
+ fn visit_place(&mut self, place: &mir::Place<'tcx>, ctx: PlaceContext, loc: mir::Location) {
+ let local = place.local;
+
+ if local == self.cloned
+ && !matches!(
+ ctx,
+ PlaceContext::MutatingUse(MutatingUseContext::Drop) | PlaceContext::NonUse(_)
+ )
+ {
+ self.result.cloned_used = true;
+ self.result.cloned_consume_or_mutate_loc = self.result.cloned_consume_or_mutate_loc.or_else(|| {
+ matches!(
+ ctx,
+ PlaceContext::NonMutatingUse(NonMutatingUseContext::Move)
+ | PlaceContext::MutatingUse(MutatingUseContext::Borrow)
+ )
+ .then(|| loc)
+ });
+ } else if local == self.clone {
+ match ctx {
+ PlaceContext::NonMutatingUse(NonMutatingUseContext::Move)
+ | PlaceContext::MutatingUse(MutatingUseContext::Borrow) => {
+ self.result.clone_consumed_or_mutated = true;
+ },
+ _ => {},
+ }
+ }
+ }
+ }
+
+ let init = CloneUsage {
+ cloned_used: false,
+ cloned_consume_or_mutate_loc: None,
+ // Consider non-temporary clones consumed.
+ // TODO: Actually check for mutation of non-temporaries.
+ clone_consumed_or_mutated: mir.local_kind(clone) != mir::LocalKind::Temp,
+ };
+ traversal::ReversePostorder::new(mir, bb)
+ .skip(1)
+ .fold(init, |usage, (tbb, tdata)| {
+ // Short-circuit
+ if (usage.cloned_used && usage.clone_consumed_or_mutated) ||
+ // Give up on loops
+ tdata.terminator().successors().any(|s| s == bb)
+ {
+ return CloneUsage {
+ cloned_used: true,
+ clone_consumed_or_mutated: true,
+ ..usage
+ };
+ }
+
+ let mut v = V {
+ cloned,
+ clone,
+ result: usage,
+ };
+ v.visit_basic_block_data(tbb, tdata);
+ v.result
+ })
+}
+
+/// Determines liveness of each local purely based on `StorageLive`/`Dead`.
+#[derive(Copy, Clone)]
+struct MaybeStorageLive;
+
+impl<'tcx> AnalysisDomain<'tcx> for MaybeStorageLive {
+ type Domain = BitSet<mir::Local>;
+ const NAME: &'static str = "maybe_storage_live";
+
+ fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain {
+ // bottom = dead
+ BitSet::new_empty(body.local_decls.len())
+ }
+
+ fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {
+ for arg in body.args_iter() {
+ state.insert(arg);
+ }
+ }
+}
+
+impl<'tcx> GenKillAnalysis<'tcx> for MaybeStorageLive {
+ type Idx = mir::Local;
+
+ fn statement_effect(&self, trans: &mut impl GenKill<Self::Idx>, stmt: &mir::Statement<'tcx>, _: mir::Location) {
+ match stmt.kind {
+ mir::StatementKind::StorageLive(l) => trans.gen(l),
+ mir::StatementKind::StorageDead(l) => trans.kill(l),
+ _ => (),
+ }
+ }
+
+ fn terminator_effect(
+ &self,
+ _trans: &mut impl GenKill<Self::Idx>,
+ _terminator: &mir::Terminator<'tcx>,
+ _loc: mir::Location,
+ ) {
+ }
+
+ fn call_return_effect(
+ &self,
+ _trans: &mut impl GenKill<Self::Idx>,
+ _block: mir::BasicBlock,
+ _return_places: CallReturnPlaces<'_, 'tcx>,
+ ) {
+ // Nothing to do when a call returns successfully
+ }
+}
+
+/// Collects the possible borrowers of each local.
+/// For example, `b = &a; c = &a;` will make `b` and (transitively) `c`
+/// possible borrowers of `a`.
+struct PossibleBorrowerVisitor<'a, 'tcx> {
+ possible_borrower: TransitiveRelation,
+ body: &'a mir::Body<'tcx>,
+ cx: &'a LateContext<'tcx>,
+ possible_origin: FxHashMap<mir::Local, HybridBitSet<mir::Local>>,
+}
+
+impl<'a, 'tcx> PossibleBorrowerVisitor<'a, 'tcx> {
+ fn new(
+ cx: &'a LateContext<'tcx>,
+ body: &'a mir::Body<'tcx>,
+ possible_origin: FxHashMap<mir::Local, HybridBitSet<mir::Local>>,
+ ) -> Self {
+ Self {
+ possible_borrower: TransitiveRelation::default(),
+ cx,
+ body,
+ possible_origin,
+ }
+ }
+
+ fn into_map(
+ self,
+ cx: &LateContext<'tcx>,
+ maybe_live: ResultsCursor<'tcx, 'tcx, MaybeStorageLive>,
+ ) -> PossibleBorrowerMap<'a, 'tcx> {
+ let mut map = FxHashMap::default();
+ for row in (1..self.body.local_decls.len()).map(mir::Local::from_usize) {
+ if is_copy(cx, self.body.local_decls[row].ty) {
+ continue;
+ }
+
+ let mut borrowers = self.possible_borrower.reachable_from(row, self.body.local_decls.len());
+ borrowers.remove(mir::Local::from_usize(0));
+ if !borrowers.is_empty() {
+ map.insert(row, borrowers);
+ }
+ }
+
+ let bs = BitSet::new_empty(self.body.local_decls.len());
+ PossibleBorrowerMap {
+ map,
+ maybe_live,
+ bitset: (bs.clone(), bs),
+ }
+ }
+}
+
+impl<'a, 'tcx> mir::visit::Visitor<'tcx> for PossibleBorrowerVisitor<'a, 'tcx> {
+ fn visit_assign(&mut self, place: &mir::Place<'tcx>, rvalue: &mir::Rvalue<'_>, _location: mir::Location) {
+ let lhs = place.local;
+ match rvalue {
+ mir::Rvalue::Ref(_, _, borrowed) => {
+ self.possible_borrower.add(borrowed.local, lhs);
+ },
+ other => {
+ if ContainsRegion
+ .visit_ty(place.ty(&self.body.local_decls, self.cx.tcx).ty)
+ .is_continue()
+ {
+ return;
+ }
+ rvalue_locals(other, |rhs| {
+ if lhs != rhs {
+ self.possible_borrower.add(rhs, lhs);
+ }
+ });
+ },
+ }
+ }
+
+ fn visit_terminator(&mut self, terminator: &mir::Terminator<'_>, _loc: mir::Location) {
+ if let mir::TerminatorKind::Call {
+ args,
+ destination: mir::Place { local: dest, .. },
+ ..
+ } = &terminator.kind
+ {
+ // TODO add doc
+ // If the call returns something with lifetimes,
+ // let's conservatively assume the returned value contains lifetime of all the arguments.
+ // For example, given `let y: Foo<'a> = foo(x)`, `y` is considered to be a possible borrower of `x`.
+
+ let mut immutable_borrowers = vec![];
+ let mut mutable_borrowers = vec![];
+
+ for op in args {
+ match op {
+ mir::Operand::Copy(p) | mir::Operand::Move(p) => {
+ if let ty::Ref(_, _, Mutability::Mut) = self.body.local_decls[p.local].ty.kind() {
+ mutable_borrowers.push(p.local);
+ } else {
+ immutable_borrowers.push(p.local);
+ }
+ },
+ mir::Operand::Constant(..) => (),
+ }
+ }
+
+ let mut mutable_variables: Vec<mir::Local> = mutable_borrowers
+ .iter()
+ .filter_map(|r| self.possible_origin.get(r))
+ .flat_map(HybridBitSet::iter)
+ .collect();
+
+ if ContainsRegion.visit_ty(self.body.local_decls[*dest].ty).is_break() {
+ mutable_variables.push(*dest);
+ }
+
+ for y in mutable_variables {
+ for x in &immutable_borrowers {
+ self.possible_borrower.add(*x, y);
+ }
+ for x in &mutable_borrowers {
+ self.possible_borrower.add(*x, y);
+ }
+ }
+ }
+ }
+}
+
+/// Collect possible borrowed for every `&mut` local.
+/// For example, `_1 = &mut _2` generate _1: {_2,...}
+/// Known Problems: not sure all borrowed are tracked
+struct PossibleOriginVisitor<'a, 'tcx> {
+ possible_origin: TransitiveRelation,
+ body: &'a mir::Body<'tcx>,
+}
+
+impl<'a, 'tcx> PossibleOriginVisitor<'a, 'tcx> {
+ fn new(body: &'a mir::Body<'tcx>) -> Self {
+ Self {
+ possible_origin: TransitiveRelation::default(),
+ body,
+ }
+ }
+
+ fn into_map(self, cx: &LateContext<'tcx>) -> FxHashMap<mir::Local, HybridBitSet<mir::Local>> {
+ let mut map = FxHashMap::default();
+ for row in (1..self.body.local_decls.len()).map(mir::Local::from_usize) {
+ if is_copy(cx, self.body.local_decls[row].ty) {
+ continue;
+ }
+
+ let mut borrowers = self.possible_origin.reachable_from(row, self.body.local_decls.len());
+ borrowers.remove(mir::Local::from_usize(0));
+ if !borrowers.is_empty() {
+ map.insert(row, borrowers);
+ }
+ }
+ map
+ }
+}
+
+impl<'a, 'tcx> mir::visit::Visitor<'tcx> for PossibleOriginVisitor<'a, 'tcx> {
+ fn visit_assign(&mut self, place: &mir::Place<'tcx>, rvalue: &mir::Rvalue<'_>, _location: mir::Location) {
+ let lhs = place.local;
+ match rvalue {
+ // Only consider `&mut`, which can modify origin place
+ mir::Rvalue::Ref(_, rustc_middle::mir::BorrowKind::Mut { .. }, borrowed) |
+ // _2: &mut _;
+ // _3 = move _2
+ mir::Rvalue::Use(mir::Operand::Move(borrowed)) |
+ // _3 = move _2 as &mut _;
+ mir::Rvalue::Cast(_, mir::Operand::Move(borrowed), _)
+ => {
+ self.possible_origin.add(lhs, borrowed.local);
+ },
+ _ => {},
+ }
+ }
+}
+
+struct ContainsRegion;
+
+impl TypeVisitor<'_> for ContainsRegion {
+ type BreakTy = ();
+
+ fn visit_region(&mut self, _: ty::Region<'_>) -> ControlFlow<Self::BreakTy> {
+ ControlFlow::BREAK
+ }
+}
+
+fn rvalue_locals(rvalue: &mir::Rvalue<'_>, mut visit: impl FnMut(mir::Local)) {
+ use rustc_middle::mir::Rvalue::{Aggregate, BinaryOp, Cast, CheckedBinaryOp, Repeat, UnaryOp, Use};
+
+ let mut visit_op = |op: &mir::Operand<'_>| match op {
+ mir::Operand::Copy(p) | mir::Operand::Move(p) => visit(p.local),
+ mir::Operand::Constant(..) => (),
+ };
+
+ match rvalue {
+ Use(op) | Repeat(op, _) | Cast(_, op, _) | UnaryOp(_, op) => visit_op(op),
+ Aggregate(_, ops) => ops.iter().for_each(visit_op),
+ BinaryOp(_, box (lhs, rhs)) | CheckedBinaryOp(_, box (lhs, rhs)) => {
+ visit_op(lhs);
+ visit_op(rhs);
+ },
+ _ => (),
+ }
+}
+
+/// Result of `PossibleBorrowerVisitor`.
+struct PossibleBorrowerMap<'a, 'tcx> {
+ /// Mapping `Local -> its possible borrowers`
+ map: FxHashMap<mir::Local, HybridBitSet<mir::Local>>,
+ maybe_live: ResultsCursor<'a, 'tcx, MaybeStorageLive>,
+ // Caches to avoid allocation of `BitSet` on every query
+ bitset: (BitSet<mir::Local>, BitSet<mir::Local>),
+}
+
+impl PossibleBorrowerMap<'_, '_> {
+ /// Returns true if the set of borrowers of `borrowed` living at `at` matches with `borrowers`.
+ fn only_borrowers(&mut self, borrowers: &[mir::Local], borrowed: mir::Local, at: mir::Location) -> bool {
+ self.maybe_live.seek_after_primary_effect(at);
+
+ self.bitset.0.clear();
+ let maybe_live = &mut self.maybe_live;
+ if let Some(bitset) = self.map.get(&borrowed) {
+ for b in bitset.iter().filter(move |b| maybe_live.contains(*b)) {
+ self.bitset.0.insert(b);
+ }
+ } else {
+ return false;
+ }
+
+ self.bitset.1.clear();
+ for b in borrowers {
+ self.bitset.1.insert(*b);
+ }
+
+ self.bitset.0 == self.bitset.1
+ }
+
+ fn local_is_alive_at(&mut self, local: mir::Local, at: mir::Location) -> bool {
+ self.maybe_live.seek_after_primary_effect(at);
+ self.maybe_live.contains(local)
+ }
+}
+
+#[derive(Default)]
+struct TransitiveRelation {
+ relations: FxHashMap<mir::Local, Vec<mir::Local>>,
+}
+impl TransitiveRelation {
+ fn add(&mut self, a: mir::Local, b: mir::Local) {
+ self.relations.entry(a).or_default().push(b);
+ }
+
+ fn reachable_from(&self, a: mir::Local, domain_size: usize) -> HybridBitSet<mir::Local> {
+ let mut seen = HybridBitSet::new_empty(domain_size);
+ let mut stack = vec![a];
+ while let Some(u) = stack.pop() {
+ if let Some(edges) = self.relations.get(&u) {
+ for &v in edges {
+ if seen.insert(v) {
+ stack.push(v);
+ }
+ }
+ }
+ }
+ seen
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/redundant_closure_call.rs b/src/tools/clippy/clippy_lints/src/redundant_closure_call.rs
new file mode 100644
index 000000000..f5a93ceba
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/redundant_closure_call.rs
@@ -0,0 +1,157 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_and_then};
+use clippy_utils::source::snippet_with_applicability;
+use if_chain::if_chain;
+use rustc_ast::ast;
+use rustc_ast::visit as ast_visit;
+use rustc_ast::visit::Visitor as AstVisitor;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_hir::intravisit as hir_visit;
+use rustc_hir::intravisit::Visitor as HirVisitor;
+use rustc_lint::{EarlyContext, EarlyLintPass, LateContext, LateLintPass, LintContext};
+use rustc_middle::hir::nested_filter;
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Detects closures called in the same expression where they
+ /// are defined.
+ ///
+ /// ### Why is this bad?
+ /// It is unnecessarily adding to the expression's
+ /// complexity.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let a = (|| 42)();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let a = 42;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub REDUNDANT_CLOSURE_CALL,
+ complexity,
+ "throwaway closures called in the expression they are defined"
+}
+
+declare_lint_pass!(RedundantClosureCall => [REDUNDANT_CLOSURE_CALL]);
+
+// Used to find `return` statements or equivalents e.g., `?`
+struct ReturnVisitor {
+ found_return: bool,
+}
+
+impl ReturnVisitor {
+ #[must_use]
+ fn new() -> Self {
+ Self { found_return: false }
+ }
+}
+
+impl<'ast> ast_visit::Visitor<'ast> for ReturnVisitor {
+ fn visit_expr(&mut self, ex: &'ast ast::Expr) {
+ if let ast::ExprKind::Ret(_) | ast::ExprKind::Try(_) = ex.kind {
+ self.found_return = true;
+ }
+
+ ast_visit::walk_expr(self, ex);
+ }
+}
+
+impl EarlyLintPass for RedundantClosureCall {
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &ast::Expr) {
+ if in_external_macro(cx.sess(), expr.span) {
+ return;
+ }
+ if_chain! {
+ if let ast::ExprKind::Call(ref paren, _) = expr.kind;
+ if let ast::ExprKind::Paren(ref closure) = paren.kind;
+ if let ast::ExprKind::Closure(_, _, _, _, ref decl, ref block, _) = closure.kind;
+ then {
+ let mut visitor = ReturnVisitor::new();
+ visitor.visit_expr(block);
+ if !visitor.found_return {
+ span_lint_and_then(
+ cx,
+ REDUNDANT_CLOSURE_CALL,
+ expr.span,
+ "try not to call a closure in the expression where it is declared",
+ |diag| {
+ if decl.inputs.is_empty() {
+ let mut app = Applicability::MachineApplicable;
+ let hint =
+ snippet_with_applicability(cx, block.span, "..", &mut app).into_owned();
+ diag.span_suggestion(expr.span, "try doing something like", hint, app);
+ }
+ },
+ );
+ }
+ }
+ }
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for RedundantClosureCall {
+ fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx hir::Block<'_>) {
+ fn count_closure_usage<'a, 'tcx>(
+ cx: &'a LateContext<'tcx>,
+ block: &'tcx hir::Block<'_>,
+ path: &'tcx hir::Path<'tcx>,
+ ) -> usize {
+ struct ClosureUsageCount<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ path: &'tcx hir::Path<'tcx>,
+ count: usize,
+ }
+ impl<'a, 'tcx> hir_visit::Visitor<'tcx> for ClosureUsageCount<'a, 'tcx> {
+ type NestedFilter = nested_filter::OnlyBodies;
+
+ fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
+ if_chain! {
+ if let hir::ExprKind::Call(closure, _) = expr.kind;
+ if let hir::ExprKind::Path(hir::QPath::Resolved(_, path)) = closure.kind;
+ if self.path.segments[0].ident == path.segments[0].ident;
+ if self.path.res == path.res;
+ then {
+ self.count += 1;
+ }
+ }
+ hir_visit::walk_expr(self, expr);
+ }
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+ }
+ let mut closure_usage_count = ClosureUsageCount { cx, path, count: 0 };
+ closure_usage_count.visit_block(block);
+ closure_usage_count.count
+ }
+
+ for w in block.stmts.windows(2) {
+ if_chain! {
+ if let hir::StmtKind::Local(local) = w[0].kind;
+ if let Option::Some(t) = local.init;
+ if let hir::ExprKind::Closure { .. } = t.kind;
+ if let hir::PatKind::Binding(_, _, ident, _) = local.pat.kind;
+ if let hir::StmtKind::Semi(second) = w[1].kind;
+ if let hir::ExprKind::Assign(_, call, _) = second.kind;
+ if let hir::ExprKind::Call(closure, _) = call.kind;
+ if let hir::ExprKind::Path(hir::QPath::Resolved(_, path)) = closure.kind;
+ if ident == path.segments[0].ident;
+ if count_closure_usage(cx, block, path) == 1;
+ then {
+ span_lint(
+ cx,
+ REDUNDANT_CLOSURE_CALL,
+ second.span,
+ "closure called just once immediately after it was declared",
+ );
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/redundant_else.rs b/src/tools/clippy/clippy_lints/src/redundant_else.rs
new file mode 100644
index 000000000..73088ce1a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/redundant_else.rs
@@ -0,0 +1,138 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_ast::ast::{Block, Expr, ExprKind, Stmt, StmtKind};
+use rustc_ast::visit::{walk_expr, Visitor};
+use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `else` blocks that can be removed without changing semantics.
+ ///
+ /// ### Why is this bad?
+ /// The `else` block adds unnecessary indentation and verbosity.
+ ///
+ /// ### Known problems
+ /// Some may prefer to keep the `else` block for clarity.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn my_func(count: u32) {
+ /// if count == 0 {
+ /// print!("Nothing to do");
+ /// return;
+ /// } else {
+ /// print!("Moving on...");
+ /// }
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// fn my_func(count: u32) {
+ /// if count == 0 {
+ /// print!("Nothing to do");
+ /// return;
+ /// }
+ /// print!("Moving on...");
+ /// }
+ /// ```
+ #[clippy::version = "1.50.0"]
+ pub REDUNDANT_ELSE,
+ pedantic,
+ "`else` branch that can be removed without changing semantics"
+}
+
+declare_lint_pass!(RedundantElse => [REDUNDANT_ELSE]);
+
+impl EarlyLintPass for RedundantElse {
+ fn check_stmt(&mut self, cx: &EarlyContext<'_>, stmt: &Stmt) {
+ if in_external_macro(cx.sess(), stmt.span) {
+ return;
+ }
+ // Only look at expressions that are a whole statement
+ let expr: &Expr = match &stmt.kind {
+ StmtKind::Expr(expr) | StmtKind::Semi(expr) => expr,
+ _ => return,
+ };
+ // if else
+ let (mut then, mut els): (&Block, &Expr) = match &expr.kind {
+ ExprKind::If(_, then, Some(els)) => (then, els),
+ _ => return,
+ };
+ loop {
+ if !BreakVisitor::default().check_block(then) {
+ // then block does not always break
+ return;
+ }
+ match &els.kind {
+ // else if else
+ ExprKind::If(_, next_then, Some(next_els)) => {
+ then = next_then;
+ els = next_els;
+ continue;
+ },
+ // else if without else
+ ExprKind::If(..) => return,
+ // done
+ _ => break,
+ }
+ }
+ span_lint_and_help(
+ cx,
+ REDUNDANT_ELSE,
+ els.span,
+ "redundant else block",
+ None,
+ "remove the `else` block and move the contents out",
+ );
+ }
+}
+
+/// Call `check` functions to check if an expression always breaks control flow
+#[derive(Default)]
+struct BreakVisitor {
+ is_break: bool,
+}
+
+impl<'ast> Visitor<'ast> for BreakVisitor {
+ fn visit_block(&mut self, block: &'ast Block) {
+ self.is_break = match block.stmts.as_slice() {
+ [.., last] => self.check_stmt(last),
+ _ => false,
+ };
+ }
+
+ fn visit_expr(&mut self, expr: &'ast Expr) {
+ self.is_break = match expr.kind {
+ ExprKind::Break(..) | ExprKind::Continue(..) | ExprKind::Ret(..) => true,
+ ExprKind::Match(_, ref arms) => arms.iter().all(|arm| self.check_expr(&arm.body)),
+ ExprKind::If(_, ref then, Some(ref els)) => self.check_block(then) && self.check_expr(els),
+ ExprKind::If(_, _, None)
+ // ignore loops for simplicity
+ | ExprKind::While(..) | ExprKind::ForLoop(..) | ExprKind::Loop(..) => false,
+ _ => {
+ walk_expr(self, expr);
+ return;
+ },
+ };
+ }
+}
+
+impl BreakVisitor {
+ fn check<T>(&mut self, item: T, visit: fn(&mut Self, T)) -> bool {
+ visit(self, item);
+ std::mem::replace(&mut self.is_break, false)
+ }
+
+ fn check_block(&mut self, block: &Block) -> bool {
+ self.check(block, Self::visit_block)
+ }
+
+ fn check_expr(&mut self, expr: &Expr) -> bool {
+ self.check(expr, Self::visit_expr)
+ }
+
+ fn check_stmt(&mut self, stmt: &Stmt) -> bool {
+ self.check(stmt, Self::visit_stmt)
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/redundant_field_names.rs b/src/tools/clippy/clippy_lints/src/redundant_field_names.rs
new file mode 100644
index 000000000..40b03068f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/redundant_field_names.rs
@@ -0,0 +1,86 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::{meets_msrv, msrvs};
+use rustc_ast::ast::{Expr, ExprKind};
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for fields in struct literals where shorthands
+ /// could be used.
+ ///
+ /// ### Why is this bad?
+ /// If the field and variable names are the same,
+ /// the field name is redundant.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let bar: u8 = 123;
+ ///
+ /// struct Foo {
+ /// bar: u8,
+ /// }
+ ///
+ /// let foo = Foo { bar: bar };
+ /// ```
+ /// the last line can be simplified to
+ /// ```ignore
+ /// let foo = Foo { bar };
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub REDUNDANT_FIELD_NAMES,
+ style,
+ "checks for fields in struct literals where shorthands could be used"
+}
+
+pub struct RedundantFieldNames {
+ msrv: Option<RustcVersion>,
+}
+
+impl RedundantFieldNames {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self { msrv }
+ }
+}
+
+impl_lint_pass!(RedundantFieldNames => [REDUNDANT_FIELD_NAMES]);
+
+impl EarlyLintPass for RedundantFieldNames {
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
+ if !meets_msrv(self.msrv, msrvs::FIELD_INIT_SHORTHAND) {
+ return;
+ }
+
+ if in_external_macro(cx.sess(), expr.span) {
+ return;
+ }
+ if let ExprKind::Struct(ref se) = expr.kind {
+ for field in &se.fields {
+ if field.is_shorthand {
+ continue;
+ }
+ if let ExprKind::Path(None, path) = &field.expr.kind {
+ if path.segments.len() == 1
+ && path.segments[0].ident == field.ident
+ && path.segments[0].args.is_none()
+ {
+ span_lint_and_sugg(
+ cx,
+ REDUNDANT_FIELD_NAMES,
+ field.span,
+ "redundant field names in struct initialization",
+ "replace it with",
+ field.ident.to_string(),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+ }
+ }
+ extract_msrv_attr!(EarlyContext);
+}
diff --git a/src/tools/clippy/clippy_lints/src/redundant_pub_crate.rs b/src/tools/clippy/clippy_lints/src/redundant_pub_crate.rs
new file mode 100644
index 000000000..323326381
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/redundant_pub_crate.rs
@@ -0,0 +1,94 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use rustc_errors::Applicability;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::{Item, ItemKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::def_id::CRATE_DEF_ID;
+use rustc_span::hygiene::MacroKind;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for items declared `pub(crate)` that are not crate visible because they
+ /// are inside a private module.
+ ///
+ /// ### Why is this bad?
+ /// Writing `pub(crate)` is misleading when it's redundant due to the parent
+ /// module's visibility.
+ ///
+ /// ### Example
+ /// ```rust
+ /// mod internal {
+ /// pub(crate) fn internal_fn() { }
+ /// }
+ /// ```
+ /// This function is not visible outside the module and it can be declared with `pub` or
+ /// private visibility
+ /// ```rust
+ /// mod internal {
+ /// pub fn internal_fn() { }
+ /// }
+ /// ```
+ #[clippy::version = "1.44.0"]
+ pub REDUNDANT_PUB_CRATE,
+ nursery,
+ "Using `pub(crate)` visibility on items that are not crate visible due to the visibility of the module that contains them."
+}
+
+#[derive(Default)]
+pub struct RedundantPubCrate {
+ is_exported: Vec<bool>,
+}
+
+impl_lint_pass!(RedundantPubCrate => [REDUNDANT_PUB_CRATE]);
+
+impl<'tcx> LateLintPass<'tcx> for RedundantPubCrate {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) {
+ if_chain! {
+ if cx.tcx.visibility(item.def_id) == ty::Visibility::Restricted(CRATE_DEF_ID.to_def_id());
+ if !cx.access_levels.is_exported(item.def_id) && self.is_exported.last() == Some(&false);
+ if is_not_macro_export(item);
+ then {
+ let span = item.span.with_hi(item.ident.span.hi());
+ let descr = cx.tcx.def_kind(item.def_id).descr(item.def_id.to_def_id());
+ span_lint_and_then(
+ cx,
+ REDUNDANT_PUB_CRATE,
+ span,
+ &format!("pub(crate) {} inside private module", descr),
+ |diag| {
+ diag.span_suggestion(
+ item.vis_span,
+ "consider using",
+ "pub".to_string(),
+ Applicability::MachineApplicable,
+ );
+ },
+ );
+ }
+ }
+
+ if let ItemKind::Mod { .. } = item.kind {
+ self.is_exported.push(cx.access_levels.is_exported(item.def_id));
+ }
+ }
+
+ fn check_item_post(&mut self, _cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) {
+ if let ItemKind::Mod { .. } = item.kind {
+ self.is_exported.pop().expect("unbalanced check_item/check_item_post");
+ }
+ }
+}
+
+fn is_not_macro_export<'tcx>(item: &'tcx Item<'tcx>) -> bool {
+ if let ItemKind::Use(path, _) = item.kind {
+ if let Res::Def(DefKind::Macro(MacroKind::Bang), _) = path.res {
+ return false;
+ }
+ } else if let ItemKind::Macro(..) = item.kind {
+ return false;
+ }
+
+ true
+}
diff --git a/src/tools/clippy/clippy_lints/src/redundant_slicing.rs b/src/tools/clippy/clippy_lints/src/redundant_slicing.rs
new file mode 100644
index 000000000..db6c97f37
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/redundant_slicing.rs
@@ -0,0 +1,169 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::get_parent_expr;
+use clippy_utils::source::snippet_with_context;
+use clippy_utils::ty::{is_type_lang_item, peel_mid_ty_refs};
+use if_chain::if_chain;
+use rustc_ast::util::parser::PREC_PREFIX;
+use rustc_errors::Applicability;
+use rustc_hir::{BorrowKind, Expr, ExprKind, LangItem, Mutability};
+use rustc_lint::{LateContext, LateLintPass, Lint};
+use rustc_middle::ty::adjustment::{Adjust, AutoBorrow, AutoBorrowMutability};
+use rustc_middle::ty::subst::GenericArg;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for redundant slicing expressions which use the full range, and
+ /// do not change the type.
+ ///
+ /// ### Why is this bad?
+ /// It unnecessarily adds complexity to the expression.
+ ///
+ /// ### Known problems
+ /// If the type being sliced has an implementation of `Index<RangeFull>`
+ /// that actually changes anything then it can't be removed. However, this would be surprising
+ /// to people reading the code and should have a note with it.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// fn get_slice(x: &[u32]) -> &[u32] {
+ /// &x[..]
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```ignore
+ /// fn get_slice(x: &[u32]) -> &[u32] {
+ /// x
+ /// }
+ /// ```
+ #[clippy::version = "1.51.0"]
+ pub REDUNDANT_SLICING,
+ complexity,
+ "redundant slicing of the whole range of a type"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for slicing expressions which are equivalent to dereferencing the
+ /// value.
+ ///
+ /// ### Why is this bad?
+ /// Some people may prefer to dereference rather than slice.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let vec = vec![1, 2, 3];
+ /// let slice = &vec[..];
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let vec = vec![1, 2, 3];
+ /// let slice = &*vec;
+ /// ```
+ #[clippy::version = "1.61.0"]
+ pub DEREF_BY_SLICING,
+ restriction,
+ "slicing instead of dereferencing"
+}
+
+declare_lint_pass!(RedundantSlicing => [REDUNDANT_SLICING, DEREF_BY_SLICING]);
+
+static REDUNDANT_SLICING_LINT: (&Lint, &str) = (REDUNDANT_SLICING, "redundant slicing of the whole range");
+static DEREF_BY_SLICING_LINT: (&Lint, &str) = (DEREF_BY_SLICING, "slicing when dereferencing would work");
+
+impl<'tcx> LateLintPass<'tcx> for RedundantSlicing {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if expr.span.from_expansion() {
+ return;
+ }
+
+ let ctxt = expr.span.ctxt();
+ if_chain! {
+ if let ExprKind::AddrOf(BorrowKind::Ref, mutability, addressee) = expr.kind;
+ if addressee.span.ctxt() == ctxt;
+ if let ExprKind::Index(indexed, range) = addressee.kind;
+ if is_type_lang_item(cx, cx.typeck_results().expr_ty_adjusted(range), LangItem::RangeFull);
+ then {
+ let (expr_ty, expr_ref_count) = peel_mid_ty_refs(cx.typeck_results().expr_ty(expr));
+ let (indexed_ty, indexed_ref_count) = peel_mid_ty_refs(cx.typeck_results().expr_ty(indexed));
+ let parent_expr = get_parent_expr(cx, expr);
+ let needs_parens_for_prefix = parent_expr.map_or(false, |parent| {
+ parent.precedence().order() > PREC_PREFIX
+ });
+ let mut app = Applicability::MachineApplicable;
+
+ let ((lint, msg), help, sugg) = if expr_ty == indexed_ty {
+ if expr_ref_count > indexed_ref_count {
+ // Indexing takes self by reference and can't return a reference to that
+ // reference as it's a local variable. The only way this could happen is if
+ // `self` contains a reference to the `Self` type. If this occurs then the
+ // lint no longer applies as it's essentially a field access, which is not
+ // redundant.
+ return;
+ }
+ let deref_count = indexed_ref_count - expr_ref_count;
+
+ let (lint, reborrow_str, help_str) = if mutability == Mutability::Mut {
+ // The slice was used to reborrow the mutable reference.
+ (DEREF_BY_SLICING_LINT, "&mut *", "reborrow the original value instead")
+ } else if matches!(
+ parent_expr,
+ Some(Expr {
+ kind: ExprKind::AddrOf(BorrowKind::Ref, Mutability::Mut, _),
+ ..
+ })
+ ) || cx.typeck_results().expr_adjustments(expr).first().map_or(false, |a| {
+ matches!(a.kind, Adjust::Borrow(AutoBorrow::Ref(_, AutoBorrowMutability::Mut { .. })))
+ }) {
+ // The slice was used to make a temporary reference.
+ (DEREF_BY_SLICING_LINT, "&*", "reborrow the original value instead")
+ } else if deref_count != 0 {
+ (DEREF_BY_SLICING_LINT, "", "dereference the original value instead")
+ } else {
+ (REDUNDANT_SLICING_LINT, "", "use the original value instead")
+ };
+
+ let snip = snippet_with_context(cx, indexed.span, ctxt, "..", &mut app).0;
+ let sugg = if (deref_count != 0 || !reborrow_str.is_empty()) && needs_parens_for_prefix {
+ format!("({}{}{})", reborrow_str, "*".repeat(deref_count), snip)
+ } else {
+ format!("{}{}{}", reborrow_str, "*".repeat(deref_count), snip)
+ };
+
+ (lint, help_str, sugg)
+ } else if let Some(target_id) = cx.tcx.lang_items().deref_target() {
+ if let Ok(deref_ty) = cx.tcx.try_normalize_erasing_regions(
+ cx.param_env,
+ cx.tcx.mk_projection(target_id, cx.tcx.mk_substs([GenericArg::from(indexed_ty)].into_iter())),
+ ) {
+ if deref_ty == expr_ty {
+ let snip = snippet_with_context(cx, indexed.span, ctxt, "..", &mut app).0;
+ let sugg = if needs_parens_for_prefix {
+ format!("(&{}{}*{})", mutability.prefix_str(), "*".repeat(indexed_ref_count), snip)
+ } else {
+ format!("&{}{}*{}", mutability.prefix_str(), "*".repeat(indexed_ref_count), snip)
+ };
+ (DEREF_BY_SLICING_LINT, "dereference the original value instead", sugg)
+ } else {
+ return;
+ }
+ } else {
+ return;
+ }
+ } else {
+ return;
+ };
+
+ span_lint_and_sugg(
+ cx,
+ lint,
+ expr.span,
+ msg,
+ help,
+ sugg,
+ app,
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/redundant_static_lifetimes.rs b/src/tools/clippy/clippy_lints/src/redundant_static_lifetimes.rs
new file mode 100644
index 000000000..f8801f769
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/redundant_static_lifetimes.rs
@@ -0,0 +1,117 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet;
+use clippy_utils::{meets_msrv, msrvs};
+use rustc_ast::ast::{Item, ItemKind, Ty, TyKind};
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for constants and statics with an explicit `'static` lifetime.
+ ///
+ /// ### Why is this bad?
+ /// Adding `'static` to every reference can create very
+ /// complicated types.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// const FOO: &'static [(&'static str, &'static str, fn(&Bar) -> bool)] =
+ /// &[...]
+ /// static FOO: &'static [(&'static str, &'static str, fn(&Bar) -> bool)] =
+ /// &[...]
+ /// ```
+ /// This code can be rewritten as
+ /// ```ignore
+ /// const FOO: &[(&str, &str, fn(&Bar) -> bool)] = &[...]
+ /// static FOO: &[(&str, &str, fn(&Bar) -> bool)] = &[...]
+ /// ```
+ #[clippy::version = "1.37.0"]
+ pub REDUNDANT_STATIC_LIFETIMES,
+ style,
+ "Using explicit `'static` lifetime for constants or statics when elision rules would allow omitting them."
+}
+
+pub struct RedundantStaticLifetimes {
+ msrv: Option<RustcVersion>,
+}
+
+impl RedundantStaticLifetimes {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self { msrv }
+ }
+}
+
+impl_lint_pass!(RedundantStaticLifetimes => [REDUNDANT_STATIC_LIFETIMES]);
+
+impl RedundantStaticLifetimes {
+ // Recursively visit types
+ fn visit_type(&mut self, ty: &Ty, cx: &EarlyContext<'_>, reason: &str) {
+ match ty.kind {
+ // Be careful of nested structures (arrays and tuples)
+ TyKind::Array(ref ty, _) | TyKind::Slice(ref ty) => {
+ self.visit_type(ty, cx, reason);
+ },
+ TyKind::Tup(ref tup) => {
+ for tup_ty in tup {
+ self.visit_type(tup_ty, cx, reason);
+ }
+ },
+ // This is what we are looking for !
+ TyKind::Rptr(ref optional_lifetime, ref borrow_type) => {
+ // Match the 'static lifetime
+ if let Some(lifetime) = *optional_lifetime {
+ match borrow_type.ty.kind {
+ TyKind::Path(..) | TyKind::Slice(..) | TyKind::Array(..) | TyKind::Tup(..) => {
+ if lifetime.ident.name == rustc_span::symbol::kw::StaticLifetime {
+ let snip = snippet(cx, borrow_type.ty.span, "<type>");
+ let sugg = format!("&{}", snip);
+ span_lint_and_then(
+ cx,
+ REDUNDANT_STATIC_LIFETIMES,
+ lifetime.ident.span,
+ reason,
+ |diag| {
+ diag.span_suggestion(
+ ty.span,
+ "consider removing `'static`",
+ sugg,
+ Applicability::MachineApplicable, //snippet
+ );
+ },
+ );
+ }
+ },
+ _ => {},
+ }
+ }
+ self.visit_type(&borrow_type.ty, cx, reason);
+ },
+ _ => {},
+ }
+ }
+}
+
+impl EarlyLintPass for RedundantStaticLifetimes {
+ fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) {
+ if !meets_msrv(self.msrv, msrvs::STATIC_IN_CONST) {
+ return;
+ }
+
+ if !item.span.from_expansion() {
+ if let ItemKind::Const(_, ref var_type, _) = item.kind {
+ self.visit_type(var_type, cx, "constants have by default a `'static` lifetime");
+ // Don't check associated consts because `'static` cannot be elided on those (issue
+ // #2438)
+ }
+
+ if let ItemKind::Static(ref var_type, _, _) = item.kind {
+ self.visit_type(var_type, cx, "statics have by default a `'static` lifetime");
+ }
+ }
+ }
+
+ extract_msrv_attr!(EarlyContext);
+}
diff --git a/src/tools/clippy/clippy_lints/src/ref_option_ref.rs b/src/tools/clippy/clippy_lints/src/ref_option_ref.rs
new file mode 100644
index 000000000..909d6971a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/ref_option_ref.rs
@@ -0,0 +1,71 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::last_path_segment;
+use clippy_utils::source::snippet;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{GenericArg, Mutability, Ty, TyKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `&Option<&T>`.
+ ///
+ /// ### Why is this bad?
+ /// Since `&` is Copy, it's useless to have a
+ /// reference on `Option<&T>`.
+ ///
+ /// ### Known problems
+ /// It may be irrelevant to use this lint on
+ /// public API code as it will make a breaking change to apply it.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// let x: &Option<&u32> = &Some(&0u32);
+ /// ```
+ /// Use instead:
+ /// ```rust,ignore
+ /// let x: Option<&u32> = Some(&0u32);
+ /// ```
+ #[clippy::version = "1.49.0"]
+ pub REF_OPTION_REF,
+ pedantic,
+ "use `Option<&T>` instead of `&Option<&T>`"
+}
+
+declare_lint_pass!(RefOptionRef => [REF_OPTION_REF]);
+
+impl<'tcx> LateLintPass<'tcx> for RefOptionRef {
+ fn check_ty(&mut self, cx: &LateContext<'tcx>, ty: &'tcx Ty<'tcx>) {
+ if_chain! {
+ if let TyKind::Rptr(_, ref mut_ty) = ty.kind;
+ if mut_ty.mutbl == Mutability::Not;
+ if let TyKind::Path(ref qpath) = &mut_ty.ty.kind;
+ let last = last_path_segment(qpath);
+ if let Some(res) = last.res;
+ if let Some(def_id) = res.opt_def_id();
+
+ if cx.tcx.is_diagnostic_item(sym::Option, def_id);
+ if let Some(params) = last_path_segment(qpath).args ;
+ if !params.parenthesized;
+ if let Some(inner_ty) = params.args.iter().find_map(|arg| match arg {
+ GenericArg::Type(inner_ty) => Some(inner_ty),
+ _ => None,
+ });
+ if let TyKind::Rptr(_, _) = inner_ty.kind;
+
+ then {
+ span_lint_and_sugg(
+ cx,
+ REF_OPTION_REF,
+ ty.span,
+ "since `&` implements the `Copy` trait, `&Option<&T>` can be simplified to `Option<&T>`",
+ "try",
+ format!("Option<{}>", &snippet(cx, inner_ty.span, "..")),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/reference.rs b/src/tools/clippy/clippy_lints/src/reference.rs
new file mode 100644
index 000000000..a642e2da3
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/reference.rs
@@ -0,0 +1,105 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::{snippet_opt, snippet_with_applicability};
+use if_chain::if_chain;
+use rustc_ast::ast::{Expr, ExprKind, Mutability, UnOp};
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::BytePos;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `*&` and `*&mut` in expressions.
+ ///
+ /// ### Why is this bad?
+ /// Immediately dereferencing a reference is no-op and
+ /// makes the code less clear.
+ ///
+ /// ### Known problems
+ /// Multiple dereference/addrof pairs are not handled so
+ /// the suggested fix for `x = **&&y` is `x = *&y`, which is still incorrect.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// let a = f(*&mut b);
+ /// let c = *&d;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// let a = f(b);
+ /// let c = d;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub DEREF_ADDROF,
+ complexity,
+ "use of `*&` or `*&mut` in an expression"
+}
+
+declare_lint_pass!(DerefAddrOf => [DEREF_ADDROF]);
+
+fn without_parens(mut e: &Expr) -> &Expr {
+ while let ExprKind::Paren(ref child_e) = e.kind {
+ e = child_e;
+ }
+ e
+}
+
+impl EarlyLintPass for DerefAddrOf {
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &Expr) {
+ if_chain! {
+ if let ExprKind::Unary(UnOp::Deref, ref deref_target) = e.kind;
+ if let ExprKind::AddrOf(_, ref mutability, ref addrof_target) = without_parens(deref_target).kind;
+ if deref_target.span.ctxt() == e.span.ctxt();
+ if !addrof_target.span.from_expansion();
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ let sugg = if e.span.from_expansion() {
+ if let Some(macro_source) = snippet_opt(cx, e.span) {
+ // Remove leading whitespace from the given span
+ // e.g: ` $visitor` turns into `$visitor`
+ let trim_leading_whitespaces = |span| {
+ snippet_opt(cx, span).and_then(|snip| {
+ #[expect(clippy::cast_possible_truncation)]
+ snip.find(|c: char| !c.is_whitespace()).map(|pos| {
+ span.lo() + BytePos(pos as u32)
+ })
+ }).map_or(span, |start_no_whitespace| e.span.with_lo(start_no_whitespace))
+ };
+
+ let mut generate_snippet = |pattern: &str| {
+ #[expect(clippy::cast_possible_truncation)]
+ macro_source.rfind(pattern).map(|pattern_pos| {
+ let rpos = pattern_pos + pattern.len();
+ let span_after_ref = e.span.with_lo(BytePos(e.span.lo().0 + rpos as u32));
+ let span = trim_leading_whitespaces(span_after_ref);
+ snippet_with_applicability(cx, span, "_", &mut applicability)
+ })
+ };
+
+ if *mutability == Mutability::Mut {
+ generate_snippet("mut")
+ } else {
+ generate_snippet("&")
+ }
+ } else {
+ Some(snippet_with_applicability(cx, e.span, "_", &mut applicability))
+ }
+ } else {
+ Some(snippet_with_applicability(cx, addrof_target.span, "_", &mut applicability))
+ };
+ if let Some(sugg) = sugg {
+ span_lint_and_sugg(
+ cx,
+ DEREF_ADDROF,
+ e.span,
+ "immediately dereferencing a reference",
+ "try this",
+ sugg.to_string(),
+ applicability,
+ );
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/regex.rs b/src/tools/clippy/clippy_lints/src/regex.rs
new file mode 100644
index 000000000..f9a9b0691
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/regex.rs
@@ -0,0 +1,208 @@
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
+use clippy_utils::{match_def_path, paths};
+use if_chain::if_chain;
+use rustc_ast::ast::{LitKind, StrStyle};
+use rustc_hir::{BorrowKind, Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::{BytePos, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks [regex](https://crates.io/crates/regex) creation
+ /// (with `Regex::new`, `RegexBuilder::new`, or `RegexSet::new`) for correct
+ /// regex syntax.
+ ///
+ /// ### Why is this bad?
+ /// This will lead to a runtime panic.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// Regex::new("(")
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub INVALID_REGEX,
+ correctness,
+ "invalid regular expressions"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for trivial [regex](https://crates.io/crates/regex)
+ /// creation (with `Regex::new`, `RegexBuilder::new`, or `RegexSet::new`).
+ ///
+ /// ### Why is this bad?
+ /// Matching the regex can likely be replaced by `==` or
+ /// `str::starts_with`, `str::ends_with` or `std::contains` or other `str`
+ /// methods.
+ ///
+ /// ### Known problems
+ /// If the same regex is going to be applied to multiple
+ /// inputs, the precomputations done by `Regex` construction can give
+ /// significantly better performance than any of the `str`-based methods.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// Regex::new("^foobar")
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub TRIVIAL_REGEX,
+ nursery,
+ "trivial regular expressions"
+}
+
+declare_lint_pass!(Regex => [INVALID_REGEX, TRIVIAL_REGEX]);
+
+impl<'tcx> LateLintPass<'tcx> for Regex {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if_chain! {
+ if let ExprKind::Call(fun, args) = expr.kind;
+ if let ExprKind::Path(ref qpath) = fun.kind;
+ if args.len() == 1;
+ if let Some(def_id) = cx.qpath_res(qpath, fun.hir_id).opt_def_id();
+ then {
+ if match_def_path(cx, def_id, &paths::REGEX_NEW) ||
+ match_def_path(cx, def_id, &paths::REGEX_BUILDER_NEW) {
+ check_regex(cx, &args[0], true);
+ } else if match_def_path(cx, def_id, &paths::REGEX_BYTES_NEW) ||
+ match_def_path(cx, def_id, &paths::REGEX_BYTES_BUILDER_NEW) {
+ check_regex(cx, &args[0], false);
+ } else if match_def_path(cx, def_id, &paths::REGEX_SET_NEW) {
+ check_set(cx, &args[0], true);
+ } else if match_def_path(cx, def_id, &paths::REGEX_BYTES_SET_NEW) {
+ check_set(cx, &args[0], false);
+ }
+ }
+ }
+ }
+}
+
+#[must_use]
+fn str_span(base: Span, c: regex_syntax::ast::Span, offset: u8) -> Span {
+ let offset = u32::from(offset);
+ let end = base.lo() + BytePos(u32::try_from(c.end.offset).expect("offset too large") + offset);
+ let start = base.lo() + BytePos(u32::try_from(c.start.offset).expect("offset too large") + offset);
+ assert!(start <= end);
+ Span::new(start, end, base.ctxt(), base.parent())
+}
+
+fn const_str<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) -> Option<String> {
+ constant(cx, cx.typeck_results(), e).and_then(|(c, _)| match c {
+ Constant::Str(s) => Some(s),
+ _ => None,
+ })
+}
+
+fn is_trivial_regex(s: &regex_syntax::hir::Hir) -> Option<&'static str> {
+ use regex_syntax::hir::Anchor::{EndText, StartText};
+ use regex_syntax::hir::HirKind::{Alternation, Anchor, Concat, Empty, Literal};
+
+ let is_literal = |e: &[regex_syntax::hir::Hir]| e.iter().all(|e| matches!(*e.kind(), Literal(_)));
+
+ match *s.kind() {
+ Empty | Anchor(_) => Some("the regex is unlikely to be useful as it is"),
+ Literal(_) => Some("consider using `str::contains`"),
+ Alternation(ref exprs) => {
+ if exprs.iter().all(|e| e.kind().is_empty()) {
+ Some("the regex is unlikely to be useful as it is")
+ } else {
+ None
+ }
+ },
+ Concat(ref exprs) => match (exprs[0].kind(), exprs[exprs.len() - 1].kind()) {
+ (&Anchor(StartText), &Anchor(EndText)) if exprs[1..(exprs.len() - 1)].is_empty() => {
+ Some("consider using `str::is_empty`")
+ },
+ (&Anchor(StartText), &Anchor(EndText)) if is_literal(&exprs[1..(exprs.len() - 1)]) => {
+ Some("consider using `==` on `str`s")
+ },
+ (&Anchor(StartText), &Literal(_)) if is_literal(&exprs[1..]) => Some("consider using `str::starts_with`"),
+ (&Literal(_), &Anchor(EndText)) if is_literal(&exprs[1..(exprs.len() - 1)]) => {
+ Some("consider using `str::ends_with`")
+ },
+ _ if is_literal(exprs) => Some("consider using `str::contains`"),
+ _ => None,
+ },
+ _ => None,
+ }
+}
+
+fn check_set<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, utf8: bool) {
+ if_chain! {
+ if let ExprKind::AddrOf(BorrowKind::Ref, _, expr) = expr.kind;
+ if let ExprKind::Array(exprs) = expr.kind;
+ then {
+ for expr in exprs {
+ check_regex(cx, expr, utf8);
+ }
+ }
+ }
+}
+
+fn check_regex<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, utf8: bool) {
+ let mut parser = regex_syntax::ParserBuilder::new()
+ .unicode(true)
+ .allow_invalid_utf8(!utf8)
+ .build();
+
+ if let ExprKind::Lit(ref lit) = expr.kind {
+ if let LitKind::Str(ref r, style) = lit.node {
+ let r = r.as_str();
+ let offset = if let StrStyle::Raw(n) = style { 2 + n } else { 1 };
+ match parser.parse(r) {
+ Ok(r) => {
+ if let Some(repl) = is_trivial_regex(&r) {
+ span_lint_and_help(cx, TRIVIAL_REGEX, expr.span, "trivial regex", None, repl);
+ }
+ },
+ Err(regex_syntax::Error::Parse(e)) => {
+ span_lint(
+ cx,
+ INVALID_REGEX,
+ str_span(expr.span, *e.span(), offset),
+ &format!("regex syntax error: {}", e.kind()),
+ );
+ },
+ Err(regex_syntax::Error::Translate(e)) => {
+ span_lint(
+ cx,
+ INVALID_REGEX,
+ str_span(expr.span, *e.span(), offset),
+ &format!("regex syntax error: {}", e.kind()),
+ );
+ },
+ Err(e) => {
+ span_lint(cx, INVALID_REGEX, expr.span, &format!("regex syntax error: {}", e));
+ },
+ }
+ }
+ } else if let Some(r) = const_str(cx, expr) {
+ match parser.parse(&r) {
+ Ok(r) => {
+ if let Some(repl) = is_trivial_regex(&r) {
+ span_lint_and_help(cx, TRIVIAL_REGEX, expr.span, "trivial regex", None, repl);
+ }
+ },
+ Err(regex_syntax::Error::Parse(e)) => {
+ span_lint(
+ cx,
+ INVALID_REGEX,
+ expr.span,
+ &format!("regex syntax error on position {}: {}", e.span().start.offset, e.kind()),
+ );
+ },
+ Err(regex_syntax::Error::Translate(e)) => {
+ span_lint(
+ cx,
+ INVALID_REGEX,
+ expr.span,
+ &format!("regex syntax error on position {}: {}", e.span().start.offset, e.kind()),
+ );
+ },
+ Err(e) => {
+ span_lint(cx, INVALID_REGEX, expr.span, &format!("regex syntax error: {}", e));
+ },
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/renamed_lints.rs b/src/tools/clippy/clippy_lints/src/renamed_lints.rs
new file mode 100644
index 000000000..ba03ef937
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/renamed_lints.rs
@@ -0,0 +1,40 @@
+// This file is managed by `cargo dev rename_lint`. Prefer using that when possible.
+
+#[rustfmt::skip]
+pub static RENAMED_LINTS: &[(&str, &str)] = &[
+ ("clippy::block_in_if_condition_expr", "clippy::blocks_in_if_conditions"),
+ ("clippy::block_in_if_condition_stmt", "clippy::blocks_in_if_conditions"),
+ ("clippy::box_vec", "clippy::box_collection"),
+ ("clippy::const_static_lifetime", "clippy::redundant_static_lifetimes"),
+ ("clippy::cyclomatic_complexity", "clippy::cognitive_complexity"),
+ ("clippy::disallowed_method", "clippy::disallowed_methods"),
+ ("clippy::disallowed_type", "clippy::disallowed_types"),
+ ("clippy::eval_order_dependence", "clippy::mixed_read_write_in_expression"),
+ ("clippy::for_loop_over_option", "clippy::for_loops_over_fallibles"),
+ ("clippy::for_loop_over_result", "clippy::for_loops_over_fallibles"),
+ ("clippy::identity_conversion", "clippy::useless_conversion"),
+ ("clippy::if_let_some_result", "clippy::match_result_ok"),
+ ("clippy::new_without_default_derive", "clippy::new_without_default"),
+ ("clippy::option_and_then_some", "clippy::bind_instead_of_map"),
+ ("clippy::option_expect_used", "clippy::expect_used"),
+ ("clippy::option_map_unwrap_or", "clippy::map_unwrap_or"),
+ ("clippy::option_map_unwrap_or_else", "clippy::map_unwrap_or"),
+ ("clippy::option_unwrap_used", "clippy::unwrap_used"),
+ ("clippy::ref_in_deref", "clippy::needless_borrow"),
+ ("clippy::result_expect_used", "clippy::expect_used"),
+ ("clippy::result_map_unwrap_or_else", "clippy::map_unwrap_or"),
+ ("clippy::result_unwrap_used", "clippy::unwrap_used"),
+ ("clippy::single_char_push_str", "clippy::single_char_add_str"),
+ ("clippy::stutter", "clippy::module_name_repetitions"),
+ ("clippy::to_string_in_display", "clippy::recursive_format_impl"),
+ ("clippy::zero_width_space", "clippy::invisible_characters"),
+ ("clippy::drop_bounds", "drop_bounds"),
+ ("clippy::into_iter_on_array", "array_into_iter"),
+ ("clippy::invalid_atomic_ordering", "invalid_atomic_ordering"),
+ ("clippy::invalid_ref", "invalid_value"),
+ ("clippy::mem_discriminant_non_enum", "enum_intrinsics_non_enums"),
+ ("clippy::panic_params", "non_fmt_panics"),
+ ("clippy::temporary_cstring_as_ptr", "temporary_cstring_as_ptr"),
+ ("clippy::unknown_clippy_lints", "unknown_lints"),
+ ("clippy::unused_label", "unused_labels"),
+];
diff --git a/src/tools/clippy/clippy_lints/src/repeat_once.rs b/src/tools/clippy/clippy_lints/src/repeat_once.rs
new file mode 100644
index 000000000..898c70ace
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/repeat_once.rs
@@ -0,0 +1,89 @@
+use clippy_utils::consts::{constant_context, Constant};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet;
+use clippy_utils::ty::is_type_diagnostic_item;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `.repeat(1)` and suggest the following method for each types.
+ /// - `.to_string()` for `str`
+ /// - `.clone()` for `String`
+ /// - `.to_vec()` for `slice`
+ ///
+ /// The lint will evaluate constant expressions and values as arguments of `.repeat(..)` and emit a message if
+ /// they are equivalent to `1`. (Related discussion in [rust-clippy#7306](https://github.com/rust-lang/rust-clippy/issues/7306))
+ ///
+ /// ### Why is this bad?
+ /// For example, `String.repeat(1)` is equivalent to `.clone()`. If cloning
+ /// the string is the intention behind this, `clone()` should be used.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn main() {
+ /// let x = String::from("hello world").repeat(1);
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// fn main() {
+ /// let x = String::from("hello world").clone();
+ /// }
+ /// ```
+ #[clippy::version = "1.47.0"]
+ pub REPEAT_ONCE,
+ complexity,
+ "using `.repeat(1)` instead of `String.clone()`, `str.to_string()` or `slice.to_vec()` "
+}
+
+declare_lint_pass!(RepeatOnce => [REPEAT_ONCE]);
+
+impl<'tcx> LateLintPass<'tcx> for RepeatOnce {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &'tcx Expr<'_>) {
+ if_chain! {
+ if let ExprKind::MethodCall(path, [receiver, count], _) = &expr.kind;
+ if path.ident.name == sym!(repeat);
+ if constant_context(cx, cx.typeck_results()).expr(count) == Some(Constant::Int(1));
+ if !receiver.span.from_expansion();
+ then {
+ let ty = cx.typeck_results().expr_ty(receiver).peel_refs();
+ if ty.is_str() {
+ span_lint_and_sugg(
+ cx,
+ REPEAT_ONCE,
+ expr.span,
+ "calling `repeat(1)` on str",
+ "consider using `.to_string()` instead",
+ format!("{}.to_string()", snippet(cx, receiver.span, r#""...""#)),
+ Applicability::MachineApplicable,
+ );
+ } else if ty.builtin_index().is_some() {
+ span_lint_and_sugg(
+ cx,
+ REPEAT_ONCE,
+ expr.span,
+ "calling `repeat(1)` on slice",
+ "consider using `.to_vec()` instead",
+ format!("{}.to_vec()", snippet(cx, receiver.span, r#""...""#)),
+ Applicability::MachineApplicable,
+ );
+ } else if is_type_diagnostic_item(cx, ty, sym::String) {
+ span_lint_and_sugg(
+ cx,
+ REPEAT_ONCE,
+ expr.span,
+ "calling `repeat(1)` on a string literal",
+ "consider using `.clone()` instead",
+ format!("{}.clone()", snippet(cx, receiver.span, r#""...""#)),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/return_self_not_must_use.rs b/src/tools/clippy/clippy_lints/src/return_self_not_must_use.rs
new file mode 100644
index 000000000..60be6bd33
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/return_self_not_must_use.rs
@@ -0,0 +1,134 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::ty::is_must_use_ty;
+use clippy_utils::{nth_arg, return_ty};
+use rustc_hir::def_id::LocalDefId;
+use rustc_hir::intravisit::FnKind;
+use rustc_hir::{Body, FnDecl, HirId, TraitItem, TraitItemKind};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{sym, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// This lint warns when a method returning `Self` doesn't have the `#[must_use]` attribute.
+ ///
+ /// ### Why is this bad?
+ /// Methods returning `Self` often create new values, having the `#[must_use]` attribute
+ /// prevents users from "forgetting" to use the newly created value.
+ ///
+ /// The `#[must_use]` attribute can be added to the type itself to ensure that instances
+ /// are never forgotten. Functions returning a type marked with `#[must_use]` will not be
+ /// linted, as the usage is already enforced by the type attribute.
+ ///
+ /// ### Limitations
+ /// This lint is only applied on methods taking a `self` argument. It would be mostly noise
+ /// if it was added on constructors for example.
+ ///
+ /// ### Example
+ /// ```rust
+ /// pub struct Bar;
+ /// impl Bar {
+ /// // Missing attribute
+ /// pub fn bar(&self) -> Self {
+ /// Self
+ /// }
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # {
+ /// // It's better to have the `#[must_use]` attribute on the method like this:
+ /// pub struct Bar;
+ /// impl Bar {
+ /// #[must_use]
+ /// pub fn bar(&self) -> Self {
+ /// Self
+ /// }
+ /// }
+ /// # }
+ ///
+ /// # {
+ /// // Or on the type definition like this:
+ /// #[must_use]
+ /// pub struct Bar;
+ /// impl Bar {
+ /// pub fn bar(&self) -> Self {
+ /// Self
+ /// }
+ /// }
+ /// # }
+ /// ```
+ #[clippy::version = "1.59.0"]
+ pub RETURN_SELF_NOT_MUST_USE,
+ pedantic,
+ "missing `#[must_use]` annotation on a method returning `Self`"
+}
+
+declare_lint_pass!(ReturnSelfNotMustUse => [RETURN_SELF_NOT_MUST_USE]);
+
+fn check_method(cx: &LateContext<'_>, decl: &FnDecl<'_>, fn_def: LocalDefId, span: Span, hir_id: HirId) {
+ if_chain! {
+ // If it comes from an external macro, better ignore it.
+ if !in_external_macro(cx.sess(), span);
+ if decl.implicit_self.has_implicit_self();
+ // We only show this warning for public exported methods.
+ if cx.access_levels.is_exported(fn_def);
+ // We don't want to emit this lint if the `#[must_use]` attribute is already there.
+ if !cx.tcx.hir().attrs(hir_id).iter().any(|attr| attr.has_name(sym::must_use));
+ if cx.tcx.visibility(fn_def.to_def_id()).is_public();
+ let ret_ty = return_ty(cx, hir_id);
+ let self_arg = nth_arg(cx, hir_id, 0);
+ // If `Self` has the same type as the returned type, then we want to warn.
+ //
+ // For this check, we don't want to remove the reference on the returned type because if
+ // there is one, we shouldn't emit a warning!
+ if self_arg.peel_refs() == ret_ty;
+ // If `Self` is already marked as `#[must_use]`, no need for the attribute here.
+ if !is_must_use_ty(cx, ret_ty);
+
+ then {
+ span_lint_and_help(
+ cx,
+ RETURN_SELF_NOT_MUST_USE,
+ span,
+ "missing `#[must_use]` attribute on a method returning `Self`",
+ None,
+ "consider adding the `#[must_use]` attribute to the method or directly to the `Self` type"
+ );
+ }
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for ReturnSelfNotMustUse {
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ kind: FnKind<'tcx>,
+ decl: &'tcx FnDecl<'tcx>,
+ _: &'tcx Body<'tcx>,
+ span: Span,
+ hir_id: HirId,
+ ) {
+ if_chain! {
+ // We are only interested in methods, not in functions or associated functions.
+ if matches!(kind, FnKind::Method(_, _));
+ if let Some(fn_def) = cx.tcx.hir().opt_local_def_id(hir_id);
+ if let Some(impl_def) = cx.tcx.impl_of_method(fn_def.to_def_id());
+ // We don't want this method to be te implementation of a trait because the
+ // `#[must_use]` should be put on the trait definition directly.
+ if cx.tcx.trait_id_of_impl(impl_def).is_none();
+
+ then {
+ check_method(cx, decl, fn_def, span, hir_id);
+ }
+ }
+ }
+
+ fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx TraitItem<'tcx>) {
+ if let TraitItemKind::Fn(ref sig, _) = item.kind {
+ check_method(cx, sig.decl, item.def_id, item.span, item.hir_id());
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/returns.rs b/src/tools/clippy/clippy_lints/src/returns.rs
new file mode 100644
index 000000000..1d9a2abf7
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/returns.rs
@@ -0,0 +1,333 @@
+use clippy_utils::diagnostics::span_lint_hir_and_then;
+use clippy_utils::source::{snippet_opt, snippet_with_context};
+use clippy_utils::{fn_def_id, path_to_local_id};
+use if_chain::if_chain;
+use rustc_ast::ast::Attribute;
+use rustc_errors::Applicability;
+use rustc_hir::intravisit::{walk_expr, FnKind, Visitor};
+use rustc_hir::{Block, Body, Expr, ExprKind, FnDecl, HirId, MatchSource, PatKind, StmtKind};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty::subst::GenericArgKind;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Span;
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `let`-bindings, which are subsequently
+ /// returned.
+ ///
+ /// ### Why is this bad?
+ /// It is just extraneous code. Remove it to make your code
+ /// more rusty.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn foo() -> String {
+ /// let x = String::new();
+ /// x
+ /// }
+ /// ```
+ /// instead, use
+ /// ```
+ /// fn foo() -> String {
+ /// String::new()
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub LET_AND_RETURN,
+ style,
+ "creating a let-binding and then immediately returning it like `let x = expr; x` at the end of a block"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for return statements at the end of a block.
+ ///
+ /// ### Why is this bad?
+ /// Removing the `return` and semicolon will make the code
+ /// more rusty.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn foo(x: usize) -> usize {
+ /// return x;
+ /// }
+ /// ```
+ /// simplify to
+ /// ```rust
+ /// fn foo(x: usize) -> usize {
+ /// x
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub NEEDLESS_RETURN,
+ style,
+ "using a return statement like `return expr;` where an expression would suffice"
+}
+
+#[derive(PartialEq, Eq, Copy, Clone)]
+enum RetReplacement {
+ Empty,
+ Block,
+ Unit,
+}
+
+declare_lint_pass!(Return => [LET_AND_RETURN, NEEDLESS_RETURN]);
+
+impl<'tcx> LateLintPass<'tcx> for Return {
+ fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx Block<'_>) {
+ // we need both a let-binding stmt and an expr
+ if_chain! {
+ if let Some(retexpr) = block.expr;
+ if let Some(stmt) = block.stmts.iter().last();
+ if let StmtKind::Local(local) = &stmt.kind;
+ if local.ty.is_none();
+ if cx.tcx.hir().attrs(local.hir_id).is_empty();
+ if let Some(initexpr) = &local.init;
+ if let PatKind::Binding(_, local_id, _, _) = local.pat.kind;
+ if path_to_local_id(retexpr, local_id);
+ if !last_statement_borrows(cx, initexpr);
+ if !in_external_macro(cx.sess(), initexpr.span);
+ if !in_external_macro(cx.sess(), retexpr.span);
+ if !local.span.from_expansion();
+ then {
+ span_lint_hir_and_then(
+ cx,
+ LET_AND_RETURN,
+ retexpr.hir_id,
+ retexpr.span,
+ "returning the result of a `let` binding from a block",
+ |err| {
+ err.span_label(local.span, "unnecessary `let` binding");
+
+ if let Some(mut snippet) = snippet_opt(cx, initexpr.span) {
+ if !cx.typeck_results().expr_adjustments(retexpr).is_empty() {
+ snippet.push_str(" as _");
+ }
+ err.multipart_suggestion(
+ "return the expression directly",
+ vec![
+ (local.span, String::new()),
+ (retexpr.span, snippet),
+ ],
+ Applicability::MachineApplicable,
+ );
+ } else {
+ err.span_help(initexpr.span, "this expression can be directly returned");
+ }
+ },
+ );
+ }
+ }
+ }
+
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ kind: FnKind<'tcx>,
+ _: &'tcx FnDecl<'tcx>,
+ body: &'tcx Body<'tcx>,
+ _: Span,
+ _: HirId,
+ ) {
+ match kind {
+ FnKind::Closure => {
+ // when returning without value in closure, replace this `return`
+ // with an empty block to prevent invalid suggestion (see #6501)
+ let replacement = if let ExprKind::Ret(None) = &body.value.kind {
+ RetReplacement::Block
+ } else {
+ RetReplacement::Empty
+ };
+ check_final_expr(cx, &body.value, Some(body.value.span), replacement);
+ },
+ FnKind::ItemFn(..) | FnKind::Method(..) => {
+ if let ExprKind::Block(block, _) = body.value.kind {
+ check_block_return(cx, block);
+ }
+ },
+ }
+ }
+}
+
+fn attr_is_cfg(attr: &Attribute) -> bool {
+ attr.meta_item_list().is_some() && attr.has_name(sym::cfg)
+}
+
+fn check_block_return<'tcx>(cx: &LateContext<'tcx>, block: &Block<'tcx>) {
+ if let Some(expr) = block.expr {
+ check_final_expr(cx, expr, Some(expr.span), RetReplacement::Empty);
+ } else if let Some(stmt) = block.stmts.iter().last() {
+ match stmt.kind {
+ StmtKind::Expr(expr) | StmtKind::Semi(expr) => {
+ check_final_expr(cx, expr, Some(stmt.span), RetReplacement::Empty);
+ },
+ _ => (),
+ }
+ }
+}
+
+fn check_final_expr<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'tcx>,
+ span: Option<Span>,
+ replacement: RetReplacement,
+) {
+ match expr.kind {
+ // simple return is always "bad"
+ ExprKind::Ret(ref inner) => {
+ // allow `#[cfg(a)] return a; #[cfg(b)] return b;`
+ let attrs = cx.tcx.hir().attrs(expr.hir_id);
+ if !attrs.iter().any(attr_is_cfg) {
+ let borrows = inner.map_or(false, |inner| last_statement_borrows(cx, inner));
+ if !borrows {
+ emit_return_lint(
+ cx,
+ inner.map_or(expr.hir_id, |inner| inner.hir_id),
+ span.expect("`else return` is not possible"),
+ inner.as_ref().map(|i| i.span),
+ replacement,
+ );
+ }
+ }
+ },
+ // a whole block? check it!
+ ExprKind::Block(block, _) => {
+ check_block_return(cx, block);
+ },
+ ExprKind::If(_, then, else_clause_opt) => {
+ if let ExprKind::Block(ifblock, _) = then.kind {
+ check_block_return(cx, ifblock);
+ }
+ if let Some(else_clause) = else_clause_opt {
+ check_final_expr(cx, else_clause, None, RetReplacement::Empty);
+ }
+ },
+ // a match expr, check all arms
+ // an if/if let expr, check both exprs
+ // note, if without else is going to be a type checking error anyways
+ // (except for unit type functions) so we don't match it
+ ExprKind::Match(_, arms, MatchSource::Normal) => {
+ for arm in arms.iter() {
+ check_final_expr(cx, arm.body, Some(arm.body.span), RetReplacement::Unit);
+ }
+ },
+ ExprKind::DropTemps(expr) => check_final_expr(cx, expr, None, RetReplacement::Empty),
+ _ => (),
+ }
+}
+
+fn emit_return_lint(
+ cx: &LateContext<'_>,
+ emission_place: HirId,
+ ret_span: Span,
+ inner_span: Option<Span>,
+ replacement: RetReplacement,
+) {
+ if ret_span.from_expansion() {
+ return;
+ }
+ match inner_span {
+ Some(inner_span) => {
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_hir_and_then(
+ cx,
+ NEEDLESS_RETURN,
+ emission_place,
+ ret_span,
+ "unneeded `return` statement",
+ |diag| {
+ let (snippet, _) = snippet_with_context(cx, inner_span, ret_span.ctxt(), "..", &mut applicability);
+ diag.span_suggestion(ret_span, "remove `return`", snippet, applicability);
+ },
+ );
+ },
+ None => match replacement {
+ RetReplacement::Empty => {
+ span_lint_hir_and_then(
+ cx,
+ NEEDLESS_RETURN,
+ emission_place,
+ ret_span,
+ "unneeded `return` statement",
+ |diag| {
+ diag.span_suggestion(
+ ret_span,
+ "remove `return`",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ },
+ );
+ },
+ RetReplacement::Block => {
+ span_lint_hir_and_then(
+ cx,
+ NEEDLESS_RETURN,
+ emission_place,
+ ret_span,
+ "unneeded `return` statement",
+ |diag| {
+ diag.span_suggestion(
+ ret_span,
+ "replace `return` with an empty block",
+ "{}".to_string(),
+ Applicability::MachineApplicable,
+ );
+ },
+ );
+ },
+ RetReplacement::Unit => {
+ span_lint_hir_and_then(
+ cx,
+ NEEDLESS_RETURN,
+ emission_place,
+ ret_span,
+ "unneeded `return` statement",
+ |diag| {
+ diag.span_suggestion(
+ ret_span,
+ "replace `return` with a unit value",
+ "()".to_string(),
+ Applicability::MachineApplicable,
+ );
+ },
+ );
+ },
+ },
+ }
+}
+
+fn last_statement_borrows<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> bool {
+ let mut visitor = BorrowVisitor { cx, borrows: false };
+ walk_expr(&mut visitor, expr);
+ visitor.borrows
+}
+
+struct BorrowVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ borrows: bool,
+}
+
+impl<'tcx> Visitor<'tcx> for BorrowVisitor<'_, 'tcx> {
+ fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
+ if self.borrows || expr.span.from_expansion() {
+ return;
+ }
+
+ if let Some(def_id) = fn_def_id(self.cx, expr) {
+ self.borrows = self
+ .cx
+ .tcx
+ .fn_sig(def_id)
+ .output()
+ .skip_binder()
+ .walk()
+ .any(|arg| matches!(arg.unpack(), GenericArgKind::Lifetime(_)));
+ }
+
+ walk_expr(self, expr);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/same_name_method.rs b/src/tools/clippy/clippy_lints/src/same_name_method.rs
new file mode 100644
index 000000000..20184d54b
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/same_name_method.rs
@@ -0,0 +1,166 @@
+use clippy_utils::diagnostics::span_lint_hir_and_then;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::{HirId, Impl, ItemKind, Node, Path, QPath, TraitRef, TyKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::AssocKind;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::Symbol;
+use rustc_span::Span;
+use std::collections::{BTreeMap, BTreeSet};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// It lints if a struct has two methods with the same name:
+ /// one from a trait, another not from trait.
+ ///
+ /// ### Why is this bad?
+ /// Confusing.
+ ///
+ /// ### Example
+ /// ```rust
+ /// trait T {
+ /// fn foo(&self) {}
+ /// }
+ ///
+ /// struct S;
+ ///
+ /// impl T for S {
+ /// fn foo(&self) {}
+ /// }
+ ///
+ /// impl S {
+ /// fn foo(&self) {}
+ /// }
+ /// ```
+ #[clippy::version = "1.57.0"]
+ pub SAME_NAME_METHOD,
+ restriction,
+ "two method with same name"
+}
+
+declare_lint_pass!(SameNameMethod => [SAME_NAME_METHOD]);
+
+struct ExistingName {
+ impl_methods: BTreeMap<Symbol, (Span, HirId)>,
+ trait_methods: BTreeMap<Symbol, Vec<Span>>,
+}
+
+impl<'tcx> LateLintPass<'tcx> for SameNameMethod {
+ #[expect(clippy::too_many_lines)]
+ fn check_crate_post(&mut self, cx: &LateContext<'tcx>) {
+ let mut map = FxHashMap::<Res, ExistingName>::default();
+
+ for id in cx.tcx.hir().items() {
+ if matches!(cx.tcx.def_kind(id.def_id), DefKind::Impl)
+ && let item = cx.tcx.hir().item(id)
+ && let ItemKind::Impl(Impl {
+ items,
+ of_trait,
+ self_ty,
+ ..
+ }) = &item.kind
+ && let TyKind::Path(QPath::Resolved(_, Path { res, .. })) = self_ty.kind
+ {
+ if !map.contains_key(res) {
+ map.insert(
+ *res,
+ ExistingName {
+ impl_methods: BTreeMap::new(),
+ trait_methods: BTreeMap::new(),
+ },
+ );
+ }
+ let existing_name = map.get_mut(res).unwrap();
+
+ match of_trait {
+ Some(trait_ref) => {
+ let mut methods_in_trait: BTreeSet<Symbol> = if_chain! {
+ if let Some(Node::TraitRef(TraitRef { path, .. })) =
+ cx.tcx.hir().find(trait_ref.hir_ref_id);
+ if let Res::Def(DefKind::Trait, did) = path.res;
+ then{
+ // FIXME: if
+ // `rustc_middle::ty::assoc::AssocItems::items` is public,
+ // we can iterate its keys instead of `in_definition_order`,
+ // which's more efficient
+ cx.tcx
+ .associated_items(did)
+ .in_definition_order()
+ .filter(|assoc_item| {
+ matches!(assoc_item.kind, AssocKind::Fn)
+ })
+ .map(|assoc_item| assoc_item.name)
+ .collect()
+ }else{
+ BTreeSet::new()
+ }
+ };
+
+ let mut check_trait_method = |method_name: Symbol, trait_method_span: Span| {
+ if let Some((impl_span, hir_id)) = existing_name.impl_methods.get(&method_name) {
+ span_lint_hir_and_then(
+ cx,
+ SAME_NAME_METHOD,
+ *hir_id,
+ *impl_span,
+ "method's name is the same as an existing method in a trait",
+ |diag| {
+ diag.span_note(
+ trait_method_span,
+ &format!("existing `{}` defined here", method_name),
+ );
+ },
+ );
+ }
+ if let Some(v) = existing_name.trait_methods.get_mut(&method_name) {
+ v.push(trait_method_span);
+ } else {
+ existing_name.trait_methods.insert(method_name, vec![trait_method_span]);
+ }
+ };
+
+ for impl_item_ref in (*items).iter().filter(|impl_item_ref| {
+ matches!(impl_item_ref.kind, rustc_hir::AssocItemKind::Fn { .. })
+ }) {
+ let method_name = impl_item_ref.ident.name;
+ methods_in_trait.remove(&method_name);
+ check_trait_method(method_name, impl_item_ref.span);
+ }
+
+ for method_name in methods_in_trait {
+ check_trait_method(method_name, item.span);
+ }
+ },
+ None => {
+ for impl_item_ref in (*items).iter().filter(|impl_item_ref| {
+ matches!(impl_item_ref.kind, rustc_hir::AssocItemKind::Fn { .. })
+ }) {
+ let method_name = impl_item_ref.ident.name;
+ let impl_span = impl_item_ref.span;
+ let hir_id = impl_item_ref.id.hir_id();
+ if let Some(trait_spans) = existing_name.trait_methods.get(&method_name) {
+ span_lint_hir_and_then(
+ cx,
+ SAME_NAME_METHOD,
+ hir_id,
+ impl_span,
+ "method's name is the same as an existing method in a trait",
+ |diag| {
+ // TODO should we `span_note` on every trait?
+ // iterate on trait_spans?
+ diag.span_note(
+ trait_spans[0],
+ &format!("existing `{}` defined here", method_name),
+ );
+ },
+ );
+ }
+ existing_name.impl_methods.insert(method_name, (impl_span, hir_id));
+ }
+ },
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/self_named_constructors.rs b/src/tools/clippy/clippy_lints/src/self_named_constructors.rs
new file mode 100644
index 000000000..d07c26d7c
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/self_named_constructors.rs
@@ -0,0 +1,91 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::return_ty;
+use clippy_utils::ty::{contains_adt_constructor, contains_ty};
+use rustc_hir::{Impl, ImplItem, ImplItemKind, ItemKind, Node};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Warns when constructors have the same name as their types.
+ ///
+ /// ### Why is this bad?
+ /// Repeating the name of the type is redundant.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// struct Foo {}
+ ///
+ /// impl Foo {
+ /// pub fn foo() -> Foo {
+ /// Foo {}
+ /// }
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust,ignore
+ /// struct Foo {}
+ ///
+ /// impl Foo {
+ /// pub fn new() -> Foo {
+ /// Foo {}
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "1.55.0"]
+ pub SELF_NAMED_CONSTRUCTORS,
+ style,
+ "method should not have the same name as the type it is implemented for"
+}
+
+declare_lint_pass!(SelfNamedConstructors => [SELF_NAMED_CONSTRUCTORS]);
+
+impl<'tcx> LateLintPass<'tcx> for SelfNamedConstructors {
+ fn check_impl_item(&mut self, cx: &LateContext<'tcx>, impl_item: &'tcx ImplItem<'_>) {
+ match impl_item.kind {
+ ImplItemKind::Fn(ref sig, _) => {
+ if sig.decl.implicit_self.has_implicit_self() {
+ return;
+ }
+ },
+ _ => return,
+ }
+
+ let parent = cx.tcx.hir().get_parent_item(impl_item.hir_id());
+ let item = cx.tcx.hir().expect_item(parent);
+ let self_ty = cx.tcx.type_of(item.def_id);
+ let ret_ty = return_ty(cx, impl_item.hir_id());
+
+ // Do not check trait impls
+ if matches!(item.kind, ItemKind::Impl(Impl { of_trait: Some(_), .. })) {
+ return;
+ }
+
+ // Ensure method is constructor-like
+ if let Some(self_adt) = self_ty.ty_adt_def() {
+ if !contains_adt_constructor(ret_ty, self_adt) {
+ return;
+ }
+ } else if !contains_ty(ret_ty, self_ty) {
+ return;
+ }
+
+ if_chain! {
+ if let Some(self_def) = self_ty.ty_adt_def();
+ if let Some(self_local_did) = self_def.did().as_local();
+ let self_id = cx.tcx.hir().local_def_id_to_hir_id(self_local_did);
+ if let Some(Node::Item(x)) = cx.tcx.hir().find(self_id);
+ let type_name = x.ident.name.as_str().to_lowercase();
+ if impl_item.ident.name.as_str() == type_name || impl_item.ident.name.as_str().replace('_', "") == type_name;
+
+ then {
+ span_lint(
+ cx,
+ SELF_NAMED_CONSTRUCTORS,
+ impl_item.span,
+ &format!("constructor `{}` has the same name as the type", impl_item.ident.name),
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/semicolon_if_nothing_returned.rs b/src/tools/clippy/clippy_lints/src/semicolon_if_nothing_returned.rs
new file mode 100644
index 000000000..729694da4
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/semicolon_if_nothing_returned.rs
@@ -0,0 +1,70 @@
+use crate::rustc_lint::LintContext;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_macro_callsite;
+use clippy_utils::sugg;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Block, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Looks for blocks of expressions and fires if the last expression returns
+ /// `()` but is not followed by a semicolon.
+ ///
+ /// ### Why is this bad?
+ /// The semicolon might be optional but when extending the block with new
+ /// code, it doesn't require a change in previous last line.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn main() {
+ /// println!("Hello world")
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// fn main() {
+ /// println!("Hello world");
+ /// }
+ /// ```
+ #[clippy::version = "1.52.0"]
+ pub SEMICOLON_IF_NOTHING_RETURNED,
+ pedantic,
+ "add a semicolon if nothing is returned"
+}
+
+declare_lint_pass!(SemicolonIfNothingReturned => [SEMICOLON_IF_NOTHING_RETURNED]);
+
+impl<'tcx> LateLintPass<'tcx> for SemicolonIfNothingReturned {
+ fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx Block<'tcx>) {
+ if_chain! {
+ if !block.span.from_expansion();
+ if let Some(expr) = block.expr;
+ let t_expr = cx.typeck_results().expr_ty(expr);
+ if t_expr.is_unit();
+ if let snippet = snippet_with_macro_callsite(cx, expr.span, "}");
+ if !snippet.ends_with('}') && !snippet.ends_with(';');
+ if cx.sess().source_map().is_multiline(block.span);
+ then {
+ // filter out the desugared `for` loop
+ if let ExprKind::DropTemps(..) = &expr.kind {
+ return;
+ }
+
+ let sugg = sugg::Sugg::hir_with_macro_callsite(cx, expr, "..");
+ let suggestion = format!("{0};", sugg);
+ span_lint_and_sugg(
+ cx,
+ SEMICOLON_IF_NOTHING_RETURNED,
+ expr.span.source_callsite(),
+ "consider adding a `;` to the last statement for consistent formatting",
+ "add a `;` here",
+ suggestion,
+ Applicability::MaybeIncorrect,
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/serde_api.rs b/src/tools/clippy/clippy_lints/src/serde_api.rs
new file mode 100644
index 000000000..fc1c2af92
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/serde_api.rs
@@ -0,0 +1,60 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::{get_trait_def_id, paths};
+use rustc_hir::{Impl, Item, ItemKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for mis-uses of the serde API.
+ ///
+ /// ### Why is this bad?
+ /// Serde is very finnicky about how its API should be
+ /// used, but the type system can't be used to enforce it (yet?).
+ ///
+ /// ### Example
+ /// Implementing `Visitor::visit_string` but not
+ /// `Visitor::visit_str`.
+ #[clippy::version = "pre 1.29.0"]
+ pub SERDE_API_MISUSE,
+ correctness,
+ "various things that will negatively affect your serde experience"
+}
+
+declare_lint_pass!(SerdeApi => [SERDE_API_MISUSE]);
+
+impl<'tcx> LateLintPass<'tcx> for SerdeApi {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
+ if let ItemKind::Impl(Impl {
+ of_trait: Some(ref trait_ref),
+ items,
+ ..
+ }) = item.kind
+ {
+ let did = trait_ref.path.res.def_id();
+ if let Some(visit_did) = get_trait_def_id(cx, &paths::SERDE_DE_VISITOR) {
+ if did == visit_did {
+ let mut seen_str = None;
+ let mut seen_string = None;
+ for item in *items {
+ match item.ident.as_str() {
+ "visit_str" => seen_str = Some(item.span),
+ "visit_string" => seen_string = Some(item.span),
+ _ => {},
+ }
+ }
+ if let Some(span) = seen_string {
+ if seen_str.is_none() {
+ span_lint(
+ cx,
+ SERDE_API_MISUSE,
+ span,
+ "you should not implement `visit_string` without also implementing `visit_str`",
+ );
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/shadow.rs b/src/tools/clippy/clippy_lints/src/shadow.rs
new file mode 100644
index 000000000..5dcdab5b8
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/shadow.rs
@@ -0,0 +1,252 @@
+use clippy_utils::diagnostics::span_lint_and_note;
+use clippy_utils::source::snippet;
+use clippy_utils::visitors::is_local_used;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_hir::def::Res;
+use rustc_hir::def_id::LocalDefId;
+use rustc_hir::hir_id::ItemLocalId;
+use rustc_hir::{Block, Body, BodyOwnerKind, Expr, ExprKind, HirId, Let, Node, Pat, PatKind, QPath, UnOp};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::{Span, Symbol};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for bindings that shadow other bindings already in
+ /// scope, while just changing reference level or mutability.
+ ///
+ /// ### Why is this bad?
+ /// Not much, in fact it's a very common pattern in Rust
+ /// code. Still, some may opt to avoid it in their code base, they can set this
+ /// lint to `Warn`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let x = 1;
+ /// let x = &x;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let x = 1;
+ /// let y = &x; // use different variable name
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub SHADOW_SAME,
+ restriction,
+ "rebinding a name to itself, e.g., `let mut x = &mut x`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for bindings that shadow other bindings already in
+ /// scope, while reusing the original value.
+ ///
+ /// ### Why is this bad?
+ /// Not too much, in fact it's a common pattern in Rust
+ /// code. Still, some argue that name shadowing like this hurts readability,
+ /// because a value may be bound to different things depending on position in
+ /// the code.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = 2;
+ /// let x = x + 1;
+ /// ```
+ /// use different variable name:
+ /// ```rust
+ /// let x = 2;
+ /// let y = x + 1;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub SHADOW_REUSE,
+ restriction,
+ "rebinding a name to an expression that re-uses the original value, e.g., `let x = x + 1`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for bindings that shadow other bindings already in
+ /// scope, either without an initialization or with one that does not even use
+ /// the original value.
+ ///
+ /// ### Why is this bad?
+ /// Name shadowing can hurt readability, especially in
+ /// large code bases, because it is easy to lose track of the active binding at
+ /// any place in the code. This can be alleviated by either giving more specific
+ /// names to bindings or introducing more scopes to contain the bindings.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let y = 1;
+ /// # let z = 2;
+ /// let x = y;
+ /// let x = z; // shadows the earlier binding
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let y = 1;
+ /// # let z = 2;
+ /// let x = y;
+ /// let w = z; // use different variable name
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub SHADOW_UNRELATED,
+ restriction,
+ "rebinding a name without even using the original value"
+}
+
+#[derive(Default)]
+pub(crate) struct Shadow {
+ bindings: Vec<(FxHashMap<Symbol, Vec<ItemLocalId>>, LocalDefId)>,
+}
+
+impl_lint_pass!(Shadow => [SHADOW_SAME, SHADOW_REUSE, SHADOW_UNRELATED]);
+
+impl<'tcx> LateLintPass<'tcx> for Shadow {
+ fn check_pat(&mut self, cx: &LateContext<'tcx>, pat: &'tcx Pat<'_>) {
+ let (id, ident) = match pat.kind {
+ PatKind::Binding(_, hir_id, ident, _) => (hir_id, ident),
+ _ => return,
+ };
+
+ if pat.span.desugaring_kind().is_some() {
+ return;
+ }
+
+ if ident.span.from_expansion() || ident.span.is_dummy() {
+ return;
+ }
+
+ let HirId { owner, local_id } = id;
+ // get (or insert) the list of items for this owner and symbol
+ let (ref mut data, scope_owner) = *self.bindings.last_mut().unwrap();
+ let items_with_name = data.entry(ident.name).or_default();
+
+ // check other bindings with the same name, most recently seen first
+ for &prev in items_with_name.iter().rev() {
+ if prev == local_id {
+ // repeated binding in an `Or` pattern
+ return;
+ }
+
+ if is_shadow(cx, scope_owner, prev, local_id) {
+ let prev_hir_id = HirId { owner, local_id: prev };
+ lint_shadow(cx, pat, prev_hir_id, ident.span);
+ // only lint against the "nearest" shadowed binding
+ break;
+ }
+ }
+ // store the binding
+ items_with_name.push(local_id);
+ }
+
+ fn check_body(&mut self, cx: &LateContext<'_>, body: &Body<'_>) {
+ let hir = cx.tcx.hir();
+ let owner_id = hir.body_owner_def_id(body.id());
+ if !matches!(hir.body_owner_kind(owner_id), BodyOwnerKind::Closure) {
+ self.bindings.push((FxHashMap::default(), owner_id));
+ }
+ }
+
+ fn check_body_post(&mut self, cx: &LateContext<'_>, body: &Body<'_>) {
+ let hir = cx.tcx.hir();
+ if !matches!(
+ hir.body_owner_kind(hir.body_owner_def_id(body.id())),
+ BodyOwnerKind::Closure
+ ) {
+ self.bindings.pop();
+ }
+ }
+}
+
+fn is_shadow(cx: &LateContext<'_>, owner: LocalDefId, first: ItemLocalId, second: ItemLocalId) -> bool {
+ let scope_tree = cx.tcx.region_scope_tree(owner.to_def_id());
+ if let Some(first_scope) = scope_tree.var_scope(first) {
+ if let Some(second_scope) = scope_tree.var_scope(second) {
+ return scope_tree.is_subscope_of(second_scope, first_scope);
+ }
+ }
+
+ false
+}
+
+fn lint_shadow(cx: &LateContext<'_>, pat: &Pat<'_>, shadowed: HirId, span: Span) {
+ let (lint, msg) = match find_init(cx, pat.hir_id) {
+ Some(expr) if is_self_shadow(cx, pat, expr, shadowed) => {
+ let msg = format!(
+ "`{}` is shadowed by itself in `{}`",
+ snippet(cx, pat.span, "_"),
+ snippet(cx, expr.span, "..")
+ );
+ (SHADOW_SAME, msg)
+ },
+ Some(expr) if is_local_used(cx, expr, shadowed) => {
+ let msg = format!("`{}` is shadowed", snippet(cx, pat.span, "_"));
+ (SHADOW_REUSE, msg)
+ },
+ _ => {
+ let msg = format!("`{}` shadows a previous, unrelated binding", snippet(cx, pat.span, "_"));
+ (SHADOW_UNRELATED, msg)
+ },
+ };
+ span_lint_and_note(
+ cx,
+ lint,
+ span,
+ &msg,
+ Some(cx.tcx.hir().span(shadowed)),
+ "previous binding is here",
+ );
+}
+
+/// Returns true if the expression is a simple transformation of a local binding such as `&x`
+fn is_self_shadow(cx: &LateContext<'_>, pat: &Pat<'_>, mut expr: &Expr<'_>, hir_id: HirId) -> bool {
+ let hir = cx.tcx.hir();
+ let is_direct_binding = hir
+ .parent_iter(pat.hir_id)
+ .map_while(|(_id, node)| match node {
+ Node::Pat(pat) => Some(pat),
+ _ => None,
+ })
+ .all(|pat| matches!(pat.kind, PatKind::Ref(..) | PatKind::Or(_)));
+ if !is_direct_binding {
+ return false;
+ }
+ loop {
+ expr = match expr.kind {
+ ExprKind::Box(e)
+ | ExprKind::AddrOf(_, _, e)
+ | ExprKind::Block(
+ &Block {
+ stmts: [],
+ expr: Some(e),
+ ..
+ },
+ _,
+ )
+ | ExprKind::Unary(UnOp::Deref, e) => e,
+ ExprKind::Path(QPath::Resolved(None, path)) => break path.res == Res::Local(hir_id),
+ _ => break false,
+ }
+ }
+}
+
+/// Finds the "init" expression for a pattern: `let <pat> = <init>;` (or `if let`) or
+/// `match <init> { .., <pat> => .., .. }`
+fn find_init<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId) -> Option<&'tcx Expr<'tcx>> {
+ for (_, node) in cx.tcx.hir().parent_iter(hir_id) {
+ let init = match node {
+ Node::Arm(_) | Node::Pat(_) => continue,
+ Node::Expr(expr) => match expr.kind {
+ ExprKind::Match(e, _, _) | ExprKind::Let(&Let { init: e, .. }) => Some(e),
+ _ => None,
+ },
+ Node::Local(local) => local.init,
+ _ => None,
+ };
+ return init;
+ }
+ None
+}
diff --git a/src/tools/clippy/clippy_lints/src/single_char_lifetime_names.rs b/src/tools/clippy/clippy_lints/src/single_char_lifetime_names.rs
new file mode 100644
index 000000000..3dc995e2f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/single_char_lifetime_names.rs
@@ -0,0 +1,63 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_ast::ast::{GenericParam, GenericParamKind};
+use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for lifetimes with names which are one character
+ /// long.
+ ///
+ /// ### Why is this bad?
+ /// A single character is likely not enough to express the
+ /// purpose of a lifetime. Using a longer name can make code
+ /// easier to understand, especially for those who are new to
+ /// Rust.
+ ///
+ /// ### Known problems
+ /// Rust programmers and learning resources tend to use single
+ /// character lifetimes, so this lint is at odds with the
+ /// ecosystem at large. In addition, the lifetime's purpose may
+ /// be obvious or, rarely, expressible in one character.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct DiagnosticCtx<'a> {
+ /// source: &'a str,
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// struct DiagnosticCtx<'src> {
+ /// source: &'src str,
+ /// }
+ /// ```
+ #[clippy::version = "1.60.0"]
+ pub SINGLE_CHAR_LIFETIME_NAMES,
+ restriction,
+ "warns against single-character lifetime names"
+}
+
+declare_lint_pass!(SingleCharLifetimeNames => [SINGLE_CHAR_LIFETIME_NAMES]);
+
+impl EarlyLintPass for SingleCharLifetimeNames {
+ fn check_generic_param(&mut self, ctx: &EarlyContext<'_>, param: &GenericParam) {
+ if in_external_macro(ctx.sess(), param.ident.span) {
+ return;
+ }
+
+ if let GenericParamKind::Lifetime = param.kind {
+ if !param.is_placeholder && param.ident.as_str().len() <= 2 {
+ span_lint_and_help(
+ ctx,
+ SINGLE_CHAR_LIFETIME_NAMES,
+ param.ident.span,
+ "single-character lifetime names are likely uninformative",
+ None,
+ "use a more informative name",
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs b/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs
new file mode 100644
index 000000000..66b795130
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs
@@ -0,0 +1,175 @@
+use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_sugg};
+use rustc_ast::{ptr::P, Crate, Item, ItemKind, MacroDef, ModKind, UseTreeKind};
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{edition::Edition, symbol::kw, Span, Symbol};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checking for imports with single component use path.
+ ///
+ /// ### Why is this bad?
+ /// Import with single component use path such as `use cratename;`
+ /// is not necessary, and thus should be removed.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// use regex;
+ ///
+ /// fn main() {
+ /// regex::Regex::new(r"^\d{4}-\d{2}-\d{2}$").unwrap();
+ /// }
+ /// ```
+ /// Better as
+ /// ```rust,ignore
+ /// fn main() {
+ /// regex::Regex::new(r"^\d{4}-\d{2}-\d{2}$").unwrap();
+ /// }
+ /// ```
+ #[clippy::version = "1.43.0"]
+ pub SINGLE_COMPONENT_PATH_IMPORTS,
+ style,
+ "imports with single component path are redundant"
+}
+
+declare_lint_pass!(SingleComponentPathImports => [SINGLE_COMPONENT_PATH_IMPORTS]);
+
+impl EarlyLintPass for SingleComponentPathImports {
+ fn check_crate(&mut self, cx: &EarlyContext<'_>, krate: &Crate) {
+ if cx.sess().opts.edition < Edition::Edition2018 {
+ return;
+ }
+ check_mod(cx, &krate.items);
+ }
+}
+
+fn check_mod(cx: &EarlyContext<'_>, items: &[P<Item>]) {
+ // keep track of imports reused with `self` keyword,
+ // such as `self::crypto_hash` in the example below
+ // ```rust,ignore
+ // use self::crypto_hash::{Algorithm, Hasher};
+ // ```
+ let mut imports_reused_with_self = Vec::new();
+
+ // keep track of single use statements
+ // such as `crypto_hash` in the example below
+ // ```rust,ignore
+ // use crypto_hash;
+ // ```
+ let mut single_use_usages = Vec::new();
+
+ // keep track of macros defined in the module as we don't want it to trigger on this (#7106)
+ // ```rust,ignore
+ // macro_rules! foo { () => {} };
+ // pub(crate) use foo;
+ // ```
+ let mut macros = Vec::new();
+
+ for item in items {
+ track_uses(
+ cx,
+ item,
+ &mut imports_reused_with_self,
+ &mut single_use_usages,
+ &mut macros,
+ );
+ }
+
+ for (name, span, can_suggest) in single_use_usages {
+ if !imports_reused_with_self.contains(&name) {
+ if can_suggest {
+ span_lint_and_sugg(
+ cx,
+ SINGLE_COMPONENT_PATH_IMPORTS,
+ span,
+ "this import is redundant",
+ "remove it entirely",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ } else {
+ span_lint_and_help(
+ cx,
+ SINGLE_COMPONENT_PATH_IMPORTS,
+ span,
+ "this import is redundant",
+ None,
+ "remove this import",
+ );
+ }
+ }
+ }
+}
+
+fn track_uses(
+ cx: &EarlyContext<'_>,
+ item: &Item,
+ imports_reused_with_self: &mut Vec<Symbol>,
+ single_use_usages: &mut Vec<(Symbol, Span, bool)>,
+ macros: &mut Vec<Symbol>,
+) {
+ if item.span.from_expansion() || item.vis.kind.is_pub() {
+ return;
+ }
+
+ match &item.kind {
+ ItemKind::Mod(_, ModKind::Loaded(ref items, ..)) => {
+ check_mod(cx, items);
+ },
+ ItemKind::MacroDef(MacroDef { macro_rules: true, .. }) => {
+ macros.push(item.ident.name);
+ },
+ ItemKind::Use(use_tree) => {
+ let segments = &use_tree.prefix.segments;
+
+ // keep track of `use some_module;` usages
+ if segments.len() == 1 {
+ if let UseTreeKind::Simple(None, _, _) = use_tree.kind {
+ let name = segments[0].ident.name;
+ if !macros.contains(&name) {
+ single_use_usages.push((name, item.span, true));
+ }
+ }
+ return;
+ }
+
+ if segments.is_empty() {
+ // keep track of `use {some_module, some_other_module};` usages
+ if let UseTreeKind::Nested(trees) = &use_tree.kind {
+ for tree in trees {
+ let segments = &tree.0.prefix.segments;
+ if segments.len() == 1 {
+ if let UseTreeKind::Simple(None, _, _) = tree.0.kind {
+ let name = segments[0].ident.name;
+ if !macros.contains(&name) {
+ single_use_usages.push((name, tree.0.span, false));
+ }
+ }
+ }
+ }
+ }
+ } else {
+ // keep track of `use self::some_module` usages
+ if segments[0].ident.name == kw::SelfLower {
+ // simple case such as `use self::module::SomeStruct`
+ if segments.len() > 1 {
+ imports_reused_with_self.push(segments[1].ident.name);
+ return;
+ }
+
+ // nested case such as `use self::{module1::Struct1, module2::Struct2}`
+ if let UseTreeKind::Nested(trees) = &use_tree.kind {
+ for tree in trees {
+ let segments = &tree.0.prefix.segments;
+ if !segments.is_empty() {
+ imports_reused_with_self.push(segments[0].ident.name);
+ }
+ }
+ }
+ }
+ }
+ },
+ _ => {},
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/size_of_in_element_count.rs b/src/tools/clippy/clippy_lints/src/size_of_in_element_count.rs
new file mode 100644
index 000000000..bfb9f0d01
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/size_of_in_element_count.rs
@@ -0,0 +1,154 @@
+//! Lint on use of `size_of` or `size_of_val` of T in an expression
+//! expecting a count of T
+
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::{match_def_path, paths};
+use if_chain::if_chain;
+use rustc_hir::BinOpKind;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::{self, Ty, TypeAndMut};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Detects expressions where
+ /// `size_of::<T>` or `size_of_val::<T>` is used as a
+ /// count of elements of type `T`
+ ///
+ /// ### Why is this bad?
+ /// These functions expect a count
+ /// of `T` and not a number of bytes
+ ///
+ /// ### Example
+ /// ```rust,no_run
+ /// # use std::ptr::copy_nonoverlapping;
+ /// # use std::mem::size_of;
+ /// const SIZE: usize = 128;
+ /// let x = [2u8; SIZE];
+ /// let mut y = [2u8; SIZE];
+ /// unsafe { copy_nonoverlapping(x.as_ptr(), y.as_mut_ptr(), size_of::<u8>() * SIZE) };
+ /// ```
+ #[clippy::version = "1.50.0"]
+ pub SIZE_OF_IN_ELEMENT_COUNT,
+ correctness,
+ "using `size_of::<T>` or `size_of_val::<T>` where a count of elements of `T` is expected"
+}
+
+declare_lint_pass!(SizeOfInElementCount => [SIZE_OF_IN_ELEMENT_COUNT]);
+
+fn get_size_of_ty<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, inverted: bool) -> Option<Ty<'tcx>> {
+ match expr.kind {
+ ExprKind::Call(count_func, _func_args) => {
+ if_chain! {
+ if !inverted;
+ if let ExprKind::Path(ref count_func_qpath) = count_func.kind;
+ if let Some(def_id) = cx.qpath_res(count_func_qpath, count_func.hir_id).opt_def_id();
+ if matches!(cx.tcx.get_diagnostic_name(def_id), Some(sym::mem_size_of | sym::mem_size_of_val));
+ then {
+ cx.typeck_results().node_substs(count_func.hir_id).types().next()
+ } else {
+ None
+ }
+ }
+ },
+ ExprKind::Binary(op, left, right) if BinOpKind::Mul == op.node => {
+ get_size_of_ty(cx, left, inverted).or_else(|| get_size_of_ty(cx, right, inverted))
+ },
+ ExprKind::Binary(op, left, right) if BinOpKind::Div == op.node => {
+ get_size_of_ty(cx, left, inverted).or_else(|| get_size_of_ty(cx, right, !inverted))
+ },
+ ExprKind::Cast(expr, _) => get_size_of_ty(cx, expr, inverted),
+ _ => None,
+ }
+}
+
+fn get_pointee_ty_and_count_expr<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+) -> Option<(Ty<'tcx>, &'tcx Expr<'tcx>)> {
+ const FUNCTIONS: [&[&str]; 8] = [
+ &paths::PTR_COPY_NONOVERLAPPING,
+ &paths::PTR_COPY,
+ &paths::PTR_WRITE_BYTES,
+ &paths::PTR_SWAP_NONOVERLAPPING,
+ &paths::PTR_SLICE_FROM_RAW_PARTS,
+ &paths::PTR_SLICE_FROM_RAW_PARTS_MUT,
+ &paths::SLICE_FROM_RAW_PARTS,
+ &paths::SLICE_FROM_RAW_PARTS_MUT,
+ ];
+ const METHODS: [&str; 11] = [
+ "write_bytes",
+ "copy_to",
+ "copy_from",
+ "copy_to_nonoverlapping",
+ "copy_from_nonoverlapping",
+ "add",
+ "wrapping_add",
+ "sub",
+ "wrapping_sub",
+ "offset",
+ "wrapping_offset",
+ ];
+
+ if_chain! {
+ // Find calls to ptr::{copy, copy_nonoverlapping}
+ // and ptr::{swap_nonoverlapping, write_bytes},
+ if let ExprKind::Call(func, [.., count]) = expr.kind;
+ if let ExprKind::Path(ref func_qpath) = func.kind;
+ if let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id();
+ if FUNCTIONS.iter().any(|func_path| match_def_path(cx, def_id, func_path));
+
+ // Get the pointee type
+ if let Some(pointee_ty) = cx.typeck_results().node_substs(func.hir_id).types().next();
+ then {
+ return Some((pointee_ty, count));
+ }
+ };
+ if_chain! {
+ // Find calls to copy_{from,to}{,_nonoverlapping} and write_bytes methods
+ if let ExprKind::MethodCall(method_path, [ptr_self, .., count], _) = expr.kind;
+ let method_ident = method_path.ident.as_str();
+ if METHODS.iter().any(|m| *m == method_ident);
+
+ // Get the pointee type
+ if let ty::RawPtr(TypeAndMut { ty: pointee_ty, .. }) =
+ cx.typeck_results().expr_ty(ptr_self).kind();
+ then {
+ return Some((*pointee_ty, count));
+ }
+ };
+ None
+}
+
+impl<'tcx> LateLintPass<'tcx> for SizeOfInElementCount {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ const HELP_MSG: &str = "use a count of elements instead of a count of bytes\
+ , it already gets multiplied by the size of the type";
+
+ const LINT_MSG: &str = "found a count of bytes \
+ instead of a count of elements of `T`";
+
+ if_chain! {
+ // Find calls to functions with an element count parameter and get
+ // the pointee type and count parameter expression
+ if let Some((pointee_ty, count_expr)) = get_pointee_ty_and_count_expr(cx, expr);
+
+ // Find a size_of call in the count parameter expression and
+ // check that it's the same type
+ if let Some(ty_used_for_size_of) = get_size_of_ty(cx, count_expr, false);
+ if pointee_ty == ty_used_for_size_of;
+ then {
+ span_lint_and_help(
+ cx,
+ SIZE_OF_IN_ELEMENT_COUNT,
+ count_expr.span,
+ LINT_MSG,
+ None,
+ HELP_MSG
+ );
+ }
+ };
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/slow_vector_initialization.rs b/src/tools/clippy/clippy_lints/src/slow_vector_initialization.rs
new file mode 100644
index 000000000..2c8aa17e8
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/slow_vector_initialization.rs
@@ -0,0 +1,312 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::sugg::Sugg;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{get_enclosing_block, is_expr_path_def_path, path_to_local, path_to_local_id, paths, SpanlessEq};
+use if_chain::if_chain;
+use rustc_ast::ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::intravisit::{walk_block, walk_expr, walk_stmt, Visitor};
+use rustc_hir::{BindingAnnotation, Block, Expr, ExprKind, HirId, PatKind, QPath, Stmt, StmtKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks slow zero-filled vector initialization
+ ///
+ /// ### Why is this bad?
+ /// These structures are non-idiomatic and less efficient than simply using
+ /// `vec![0; len]`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use core::iter::repeat;
+ /// # let len = 4;
+ /// let mut vec1 = Vec::with_capacity(len);
+ /// vec1.resize(len, 0);
+ ///
+ /// let mut vec1 = Vec::with_capacity(len);
+ /// vec1.resize(vec1.capacity(), 0);
+ ///
+ /// let mut vec2 = Vec::with_capacity(len);
+ /// vec2.extend(repeat(0).take(len));
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # let len = 4;
+ /// let mut vec1 = vec![0; len];
+ /// let mut vec2 = vec![0; len];
+ /// ```
+ #[clippy::version = "1.32.0"]
+ pub SLOW_VECTOR_INITIALIZATION,
+ perf,
+ "slow vector initialization"
+}
+
+declare_lint_pass!(SlowVectorInit => [SLOW_VECTOR_INITIALIZATION]);
+
+/// `VecAllocation` contains data regarding a vector allocated with `with_capacity` and then
+/// assigned to a variable. For example, `let mut vec = Vec::with_capacity(0)` or
+/// `vec = Vec::with_capacity(0)`
+struct VecAllocation<'tcx> {
+ /// HirId of the variable
+ local_id: HirId,
+
+ /// Reference to the expression which allocates the vector
+ allocation_expr: &'tcx Expr<'tcx>,
+
+ /// Reference to the expression used as argument on `with_capacity` call. This is used
+ /// to only match slow zero-filling idioms of the same length than vector initialization.
+ len_expr: &'tcx Expr<'tcx>,
+}
+
+/// Type of slow initialization
+enum InitializationType<'tcx> {
+ /// Extend is a slow initialization with the form `vec.extend(repeat(0).take(..))`
+ Extend(&'tcx Expr<'tcx>),
+
+ /// Resize is a slow initialization with the form `vec.resize(.., 0)`
+ Resize(&'tcx Expr<'tcx>),
+}
+
+impl<'tcx> LateLintPass<'tcx> for SlowVectorInit {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ // Matches initialization on reassignements. For example: `vec = Vec::with_capacity(100)`
+ if_chain! {
+ if let ExprKind::Assign(left, right, _) = expr.kind;
+
+ // Extract variable
+ if let Some(local_id) = path_to_local(left);
+
+ // Extract len argument
+ if let Some(len_arg) = Self::is_vec_with_capacity(cx, right);
+
+ then {
+ let vi = VecAllocation {
+ local_id,
+ allocation_expr: right,
+ len_expr: len_arg,
+ };
+
+ Self::search_initialization(cx, vi, expr.hir_id);
+ }
+ }
+ }
+
+ fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) {
+ // Matches statements which initializes vectors. For example: `let mut vec = Vec::with_capacity(10)`
+ if_chain! {
+ if let StmtKind::Local(local) = stmt.kind;
+ if let PatKind::Binding(BindingAnnotation::Mutable, local_id, _, None) = local.pat.kind;
+ if let Some(init) = local.init;
+ if let Some(len_arg) = Self::is_vec_with_capacity(cx, init);
+
+ then {
+ let vi = VecAllocation {
+ local_id,
+ allocation_expr: init,
+ len_expr: len_arg,
+ };
+
+ Self::search_initialization(cx, vi, stmt.hir_id);
+ }
+ }
+ }
+}
+
+impl SlowVectorInit {
+ /// Checks if the given expression is `Vec::with_capacity(..)`. It will return the expression
+ /// of the first argument of `with_capacity` call if it matches or `None` if it does not.
+ fn is_vec_with_capacity<'tcx>(cx: &LateContext<'_>, expr: &Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> {
+ if_chain! {
+ if let ExprKind::Call(func, [arg]) = expr.kind;
+ if let ExprKind::Path(QPath::TypeRelative(ty, name)) = func.kind;
+ if name.ident.as_str() == "with_capacity";
+ if is_type_diagnostic_item(cx, cx.typeck_results().node_type(ty.hir_id), sym::Vec);
+ then {
+ Some(arg)
+ } else {
+ None
+ }
+ }
+ }
+
+ /// Search initialization for the given vector
+ fn search_initialization<'tcx>(cx: &LateContext<'tcx>, vec_alloc: VecAllocation<'tcx>, parent_node: HirId) {
+ let enclosing_body = get_enclosing_block(cx, parent_node);
+
+ if enclosing_body.is_none() {
+ return;
+ }
+
+ let mut v = VectorInitializationVisitor {
+ cx,
+ vec_alloc,
+ slow_expression: None,
+ initialization_found: false,
+ };
+
+ v.visit_block(enclosing_body.unwrap());
+
+ if let Some(ref allocation_expr) = v.slow_expression {
+ Self::lint_initialization(cx, allocation_expr, &v.vec_alloc);
+ }
+ }
+
+ fn lint_initialization<'tcx>(
+ cx: &LateContext<'tcx>,
+ initialization: &InitializationType<'tcx>,
+ vec_alloc: &VecAllocation<'_>,
+ ) {
+ match initialization {
+ InitializationType::Extend(e) | InitializationType::Resize(e) => {
+ Self::emit_lint(cx, e, vec_alloc, "slow zero-filling initialization");
+ },
+ };
+ }
+
+ fn emit_lint<'tcx>(cx: &LateContext<'tcx>, slow_fill: &Expr<'_>, vec_alloc: &VecAllocation<'_>, msg: &str) {
+ let len_expr = Sugg::hir(cx, vec_alloc.len_expr, "len");
+
+ span_lint_and_then(cx, SLOW_VECTOR_INITIALIZATION, slow_fill.span, msg, |diag| {
+ diag.span_suggestion(
+ vec_alloc.allocation_expr.span,
+ "consider replace allocation with",
+ format!("vec![0; {}]", len_expr),
+ Applicability::Unspecified,
+ );
+ });
+ }
+}
+
+/// `VectorInitializationVisitor` searches for unsafe or slow vector initializations for the given
+/// vector.
+struct VectorInitializationVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+
+ /// Contains the information.
+ vec_alloc: VecAllocation<'tcx>,
+
+ /// Contains the slow initialization expression, if one was found.
+ slow_expression: Option<InitializationType<'tcx>>,
+
+ /// `true` if the initialization of the vector has been found on the visited block.
+ initialization_found: bool,
+}
+
+impl<'a, 'tcx> VectorInitializationVisitor<'a, 'tcx> {
+ /// Checks if the given expression is extending a vector with `repeat(0).take(..)`
+ fn search_slow_extend_filling(&mut self, expr: &'tcx Expr<'_>) {
+ if_chain! {
+ if self.initialization_found;
+ if let ExprKind::MethodCall(path, [self_arg, extend_arg], _) = expr.kind;
+ if path_to_local_id(self_arg, self.vec_alloc.local_id);
+ if path.ident.name == sym!(extend);
+ if self.is_repeat_take(extend_arg);
+
+ then {
+ self.slow_expression = Some(InitializationType::Extend(expr));
+ }
+ }
+ }
+
+ /// Checks if the given expression is resizing a vector with 0
+ fn search_slow_resize_filling(&mut self, expr: &'tcx Expr<'_>) {
+ if self.initialization_found
+ && let ExprKind::MethodCall(path, [self_arg, len_arg, fill_arg], _) = expr.kind
+ && path_to_local_id(self_arg, self.vec_alloc.local_id)
+ && path.ident.name == sym!(resize)
+ // Check that is filled with 0
+ && let ExprKind::Lit(ref lit) = fill_arg.kind
+ && let LitKind::Int(0, _) = lit.node {
+ // Check that len expression is equals to `with_capacity` expression
+ if SpanlessEq::new(self.cx).eq_expr(len_arg, self.vec_alloc.len_expr) {
+ self.slow_expression = Some(InitializationType::Resize(expr));
+ } else if let ExprKind::MethodCall(path, _, _) = len_arg.kind && path.ident.as_str() == "capacity" {
+ self.slow_expression = Some(InitializationType::Resize(expr));
+ }
+ }
+ }
+
+ /// Returns `true` if give expression is `repeat(0).take(...)`
+ fn is_repeat_take(&self, expr: &Expr<'_>) -> bool {
+ if_chain! {
+ if let ExprKind::MethodCall(take_path, take_args, _) = expr.kind;
+ if take_path.ident.name == sym!(take);
+
+ // Check that take is applied to `repeat(0)`
+ if let Some(repeat_expr) = take_args.get(0);
+ if self.is_repeat_zero(repeat_expr);
+
+ if let Some(len_arg) = take_args.get(1);
+
+ then {
+ // Check that len expression is equals to `with_capacity` expression
+ if SpanlessEq::new(self.cx).eq_expr(len_arg, self.vec_alloc.len_expr) {
+ return true;
+ } else if let ExprKind::MethodCall(path, _, _) = len_arg.kind && path.ident.as_str() == "capacity" {
+ return true;
+ }
+ }
+ }
+
+ false
+ }
+
+ /// Returns `true` if given expression is `repeat(0)`
+ fn is_repeat_zero(&self, expr: &Expr<'_>) -> bool {
+ if_chain! {
+ if let ExprKind::Call(fn_expr, [repeat_arg]) = expr.kind;
+ if is_expr_path_def_path(self.cx, fn_expr, &paths::ITER_REPEAT);
+ if let ExprKind::Lit(ref lit) = repeat_arg.kind;
+ if let LitKind::Int(0, _) = lit.node;
+
+ then {
+ true
+ } else {
+ false
+ }
+ }
+ }
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for VectorInitializationVisitor<'a, 'tcx> {
+ fn visit_stmt(&mut self, stmt: &'tcx Stmt<'_>) {
+ if self.initialization_found {
+ match stmt.kind {
+ StmtKind::Expr(expr) | StmtKind::Semi(expr) => {
+ self.search_slow_extend_filling(expr);
+ self.search_slow_resize_filling(expr);
+ },
+ _ => (),
+ }
+
+ self.initialization_found = false;
+ } else {
+ walk_stmt(self, stmt);
+ }
+ }
+
+ fn visit_block(&mut self, block: &'tcx Block<'_>) {
+ if self.initialization_found {
+ if let Some(s) = block.stmts.get(0) {
+ self.visit_stmt(s);
+ }
+
+ self.initialization_found = false;
+ } else {
+ walk_block(self, block);
+ }
+ }
+
+ fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
+ // Skip all the expressions previous to the vector initialization
+ if self.vec_alloc.allocation_expr.hir_id == expr.hir_id {
+ self.initialization_found = true;
+ }
+
+ walk_expr(self, expr);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/stable_sort_primitive.rs b/src/tools/clippy/clippy_lints/src/stable_sort_primitive.rs
new file mode 100644
index 000000000..a6c685df7
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/stable_sort_primitive.rs
@@ -0,0 +1,145 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::{is_slice_of_primitives, sugg::Sugg};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// When sorting primitive values (integers, bools, chars, as well
+ /// as arrays, slices, and tuples of such items), it is typically better to
+ /// use an unstable sort than a stable sort.
+ ///
+ /// ### Why is this bad?
+ /// Typically, using a stable sort consumes more memory and cpu cycles.
+ /// Because values which compare equal are identical, preserving their
+ /// relative order (the guarantee that a stable sort provides) means
+ /// nothing, while the extra costs still apply.
+ ///
+ /// ### Known problems
+ ///
+ /// As pointed out in
+ /// [issue #8241](https://github.com/rust-lang/rust-clippy/issues/8241),
+ /// a stable sort can instead be significantly faster for certain scenarios
+ /// (eg. when a sorted vector is extended with new data and resorted).
+ ///
+ /// For more information and benchmarking results, please refer to the
+ /// issue linked above.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let mut vec = vec![2, 1, 3];
+ /// vec.sort();
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let mut vec = vec![2, 1, 3];
+ /// vec.sort_unstable();
+ /// ```
+ #[clippy::version = "1.47.0"]
+ pub STABLE_SORT_PRIMITIVE,
+ pedantic,
+ "use of sort() when sort_unstable() is equivalent"
+}
+
+declare_lint_pass!(StableSortPrimitive => [STABLE_SORT_PRIMITIVE]);
+
+/// The three "kinds" of sorts
+enum SortingKind {
+ Vanilla,
+ /* The other kinds of lint are currently commented out because they
+ * can map distinct values to equal ones. If the key function is
+ * provably one-to-one, or if the Cmp function conserves equality,
+ * then they could be linted on, but I don't know if we can check
+ * for that. */
+
+ /* ByKey,
+ * ByCmp, */
+}
+impl SortingKind {
+ /// The name of the stable version of this kind of sort
+ fn stable_name(&self) -> &str {
+ match self {
+ SortingKind::Vanilla => "sort",
+ /* SortingKind::ByKey => "sort_by_key",
+ * SortingKind::ByCmp => "sort_by", */
+ }
+ }
+ /// The name of the unstable version of this kind of sort
+ fn unstable_name(&self) -> &str {
+ match self {
+ SortingKind::Vanilla => "sort_unstable",
+ /* SortingKind::ByKey => "sort_unstable_by_key",
+ * SortingKind::ByCmp => "sort_unstable_by", */
+ }
+ }
+ /// Takes the name of a function call and returns the kind of sort
+ /// that corresponds to that function name (or None if it isn't)
+ fn from_stable_name(name: &str) -> Option<SortingKind> {
+ match name {
+ "sort" => Some(SortingKind::Vanilla),
+ // "sort_by" => Some(SortingKind::ByCmp),
+ // "sort_by_key" => Some(SortingKind::ByKey),
+ _ => None,
+ }
+ }
+}
+
+/// A detected instance of this lint
+struct LintDetection {
+ slice_name: String,
+ method: SortingKind,
+ method_args: String,
+ slice_type: String,
+}
+
+fn detect_stable_sort_primitive(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<LintDetection> {
+ if_chain! {
+ if let ExprKind::MethodCall(method_name, args, _) = &expr.kind;
+ if let Some(slice) = &args.get(0);
+ if let Some(method) = SortingKind::from_stable_name(method_name.ident.name.as_str());
+ if let Some(slice_type) = is_slice_of_primitives(cx, slice);
+ then {
+ let args_str = args.iter().skip(1).map(|arg| Sugg::hir(cx, arg, "..").to_string()).collect::<Vec<String>>().join(", ");
+ Some(LintDetection { slice_name: Sugg::hir(cx, slice, "..").to_string(), method, method_args: args_str, slice_type })
+ } else {
+ None
+ }
+ }
+}
+
+impl LateLintPass<'_> for StableSortPrimitive {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if let Some(detection) = detect_stable_sort_primitive(cx, expr) {
+ span_lint_and_then(
+ cx,
+ STABLE_SORT_PRIMITIVE,
+ expr.span,
+ format!(
+ "used `{}` on primitive type `{}`",
+ detection.method.stable_name(),
+ detection.slice_type,
+ )
+ .as_str(),
+ |diag| {
+ diag.span_suggestion(
+ expr.span,
+ "try",
+ format!(
+ "{}.{}({})",
+ detection.slice_name,
+ detection.method.unstable_name(),
+ detection.method_args,
+ ),
+ Applicability::MachineApplicable,
+ );
+ diag.note(
+ "an unstable sort typically performs faster without any observable difference for this data type",
+ );
+ },
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs b/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs
new file mode 100644
index 000000000..ffd63cc68
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs
@@ -0,0 +1,148 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_hir::{def::Res, HirId, Path, PathSegment};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::{sym, symbol::kw, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ ///
+ /// Finds items imported through `std` when available through `core`.
+ ///
+ /// ### Why is this bad?
+ ///
+ /// Crates which have `no_std` compatibility may wish to ensure types are imported from core to ensure
+ /// disabling `std` does not cause the crate to fail to compile. This lint is also useful for crates
+ /// migrating to become `no_std` compatible.
+ ///
+ /// ### Example
+ /// ```rust
+ /// use std::hash::Hasher;
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// use core::hash::Hasher;
+ /// ```
+ #[clippy::version = "1.64.0"]
+ pub STD_INSTEAD_OF_CORE,
+ restriction,
+ "type is imported from std when available in core"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ ///
+ /// Finds items imported through `std` when available through `alloc`.
+ ///
+ /// ### Why is this bad?
+ ///
+ /// Crates which have `no_std` compatibility and require alloc may wish to ensure types are imported from
+ /// alloc to ensure disabling `std` does not cause the crate to fail to compile. This lint is also useful
+ /// for crates migrating to become `no_std` compatible.
+ ///
+ /// ### Example
+ /// ```rust
+ /// use std::vec::Vec;
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # extern crate alloc;
+ /// use alloc::vec::Vec;
+ /// ```
+ #[clippy::version = "1.64.0"]
+ pub STD_INSTEAD_OF_ALLOC,
+ restriction,
+ "type is imported from std when available in alloc"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ ///
+ /// Finds items imported through `alloc` when available through `core`.
+ ///
+ /// ### Why is this bad?
+ ///
+ /// Crates which have `no_std` compatibility and may optionally require alloc may wish to ensure types are
+ /// imported from core to ensure disabling `alloc` does not cause the crate to fail to compile. This lint
+ /// is also useful for crates migrating to become `no_std` compatible.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # extern crate alloc;
+ /// use alloc::slice::from_ref;
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// use core::slice::from_ref;
+ /// ```
+ #[clippy::version = "1.64.0"]
+ pub ALLOC_INSTEAD_OF_CORE,
+ restriction,
+ "type is imported from alloc when available in core"
+}
+
+#[derive(Default)]
+pub struct StdReexports {
+ // Paths which can be either a module or a macro (e.g. `std::env`) will cause this check to happen
+ // twice. First for the mod, second for the macro. This is used to avoid the lint reporting for the macro
+ // when the path could be also be used to access the module.
+ prev_span: Span,
+}
+impl_lint_pass!(StdReexports => [STD_INSTEAD_OF_CORE, STD_INSTEAD_OF_ALLOC, ALLOC_INSTEAD_OF_CORE]);
+
+impl<'tcx> LateLintPass<'tcx> for StdReexports {
+ fn check_path(&mut self, cx: &LateContext<'tcx>, path: &Path<'tcx>, _: HirId) {
+ if let Res::Def(_, def_id) = path.res
+ && let Some(first_segment) = get_first_segment(path)
+ {
+ let (lint, msg, help) = match first_segment.ident.name {
+ sym::std => match cx.tcx.crate_name(def_id.krate) {
+ sym::core => (
+ STD_INSTEAD_OF_CORE,
+ "used import from `std` instead of `core`",
+ "consider importing the item from `core`",
+ ),
+ sym::alloc => (
+ STD_INSTEAD_OF_ALLOC,
+ "used import from `std` instead of `alloc`",
+ "consider importing the item from `alloc`",
+ ),
+ _ => {
+ self.prev_span = path.span;
+ return;
+ },
+ },
+ sym::alloc => {
+ if cx.tcx.crate_name(def_id.krate) == sym::core {
+ (
+ ALLOC_INSTEAD_OF_CORE,
+ "used import from `alloc` instead of `core`",
+ "consider importing the item from `core`",
+ )
+ } else {
+ self.prev_span = path.span;
+ return;
+ }
+ },
+ _ => return,
+ };
+ if path.span != self.prev_span {
+ span_lint_and_help(cx, lint, path.span, msg, None, help);
+ self.prev_span = path.span;
+ }
+ }
+ }
+}
+
+/// Returns the first named segment of a [`Path`].
+///
+/// If this is a global path (such as `::std::fmt::Debug`), then the segment after [`kw::PathRoot`]
+/// is returned.
+fn get_first_segment<'tcx>(path: &Path<'tcx>) -> Option<&'tcx PathSegment<'tcx>> {
+ match path.segments {
+ // A global path will have PathRoot as the first segment. In this case, return the segment after.
+ [x, y, ..] if x.ident.name == kw::PathRoot => Some(y),
+ [x, ..] => Some(x),
+ _ => None,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/strings.rs b/src/tools/clippy/clippy_lints/src/strings.rs
new file mode 100644
index 000000000..22eb06b36
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/strings.rs
@@ -0,0 +1,517 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_and_help, span_lint_and_sugg};
+use clippy_utils::source::{snippet, snippet_with_applicability};
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{get_parent_expr, is_lint_allowed, match_function_call, method_calls, paths};
+use clippy_utils::{peel_blocks, SpanlessEq};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::def_id::DefId;
+use rustc_hir::{BinOpKind, BorrowKind, Expr, ExprKind, LangItem, QPath};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Spanned;
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for string appends of the form `x = x + y` (without
+ /// `let`!).
+ ///
+ /// ### Why is this bad?
+ /// It's not really bad, but some people think that the
+ /// `.push_str(_)` method is more readable.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let mut x = "Hello".to_owned();
+ /// x = x + ", World";
+ ///
+ /// // More readable
+ /// x += ", World";
+ /// x.push_str(", World");
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub STRING_ADD_ASSIGN,
+ pedantic,
+ "using `x = x + ..` where x is a `String` instead of `push_str()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for all instances of `x + _` where `x` is of type
+ /// `String`, but only if [`string_add_assign`](#string_add_assign) does *not*
+ /// match.
+ ///
+ /// ### Why is this bad?
+ /// It's not bad in and of itself. However, this particular
+ /// `Add` implementation is asymmetric (the other operand need not be `String`,
+ /// but `x` does), while addition as mathematically defined is symmetric, also
+ /// the `String::push_str(_)` function is a perfectly good replacement.
+ /// Therefore, some dislike it and wish not to have it in their code.
+ ///
+ /// That said, other people think that string addition, having a long tradition
+ /// in other languages is actually fine, which is why we decided to make this
+ /// particular lint `allow` by default.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = "Hello".to_owned();
+ /// x + ", World";
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let mut x = "Hello".to_owned();
+ /// x.push_str(", World");
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub STRING_ADD,
+ restriction,
+ "using `x + ..` where x is a `String` instead of `push_str()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for the `as_bytes` method called on string literals
+ /// that contain only ASCII characters.
+ ///
+ /// ### Why is this bad?
+ /// Byte string literals (e.g., `b"foo"`) can be used
+ /// instead. They are shorter but less discoverable than `as_bytes()`.
+ ///
+ /// ### Known problems
+ /// `"str".as_bytes()` and the suggested replacement of `b"str"` are not
+ /// equivalent because they have different types. The former is `&[u8]`
+ /// while the latter is `&[u8; 3]`. That means in general they will have a
+ /// different set of methods and different trait implementations.
+ ///
+ /// ```compile_fail
+ /// fn f(v: Vec<u8>) {}
+ ///
+ /// f("...".as_bytes().to_owned()); // works
+ /// f(b"...".to_owned()); // does not work, because arg is [u8; 3] not Vec<u8>
+ ///
+ /// fn g(r: impl std::io::Read) {}
+ ///
+ /// g("...".as_bytes()); // works
+ /// g(b"..."); // does not work
+ /// ```
+ ///
+ /// The actual equivalent of `"str".as_bytes()` with the same type is not
+ /// `b"str"` but `&b"str"[..]`, which is a great deal of punctuation and not
+ /// more readable than a function call.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let bstr = "a byte string".as_bytes();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let bstr = b"a byte string";
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub STRING_LIT_AS_BYTES,
+ nursery,
+ "calling `as_bytes` on a string literal instead of using a byte string literal"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for slice operations on strings
+ ///
+ /// ### Why is this bad?
+ /// UTF-8 characters span multiple bytes, and it is easy to inadvertently confuse character
+ /// counts and string indices. This may lead to panics, and should warrant some test cases
+ /// containing wide UTF-8 characters. This lint is most useful in code that should avoid
+ /// panics at all costs.
+ ///
+ /// ### Known problems
+ /// Probably lots of false positives. If an index comes from a known valid position (e.g.
+ /// obtained via `char_indices` over the same string), it is totally OK.
+ ///
+ /// # Example
+ /// ```rust,should_panic
+ /// &"Ölkanne"[1..];
+ /// ```
+ #[clippy::version = "1.58.0"]
+ pub STRING_SLICE,
+ restriction,
+ "slicing a string"
+}
+
+declare_lint_pass!(StringAdd => [STRING_ADD, STRING_ADD_ASSIGN, STRING_SLICE]);
+
+impl<'tcx> LateLintPass<'tcx> for StringAdd {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ if in_external_macro(cx.sess(), e.span) {
+ return;
+ }
+ match e.kind {
+ ExprKind::Binary(
+ Spanned {
+ node: BinOpKind::Add, ..
+ },
+ left,
+ _,
+ ) => {
+ if is_string(cx, left) {
+ if !is_lint_allowed(cx, STRING_ADD_ASSIGN, e.hir_id) {
+ let parent = get_parent_expr(cx, e);
+ if let Some(p) = parent {
+ if let ExprKind::Assign(target, _, _) = p.kind {
+ // avoid duplicate matches
+ if SpanlessEq::new(cx).eq_expr(target, left) {
+ return;
+ }
+ }
+ }
+ }
+ span_lint(
+ cx,
+ STRING_ADD,
+ e.span,
+ "you added something to a string. Consider using `String::push_str()` instead",
+ );
+ }
+ },
+ ExprKind::Assign(target, src, _) => {
+ if is_string(cx, target) && is_add(cx, src, target) {
+ span_lint(
+ cx,
+ STRING_ADD_ASSIGN,
+ e.span,
+ "you assigned the result of adding something to this string. Consider using \
+ `String::push_str()` instead",
+ );
+ }
+ },
+ ExprKind::Index(target, _idx) => {
+ let e_ty = cx.typeck_results().expr_ty(target).peel_refs();
+ if matches!(e_ty.kind(), ty::Str) || is_type_diagnostic_item(cx, e_ty, sym::String) {
+ span_lint(
+ cx,
+ STRING_SLICE,
+ e.span,
+ "indexing into a string may panic if the index is within a UTF-8 character",
+ );
+ }
+ },
+ _ => {},
+ }
+ }
+}
+
+fn is_string(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
+ is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(e).peel_refs(), sym::String)
+}
+
+fn is_add(cx: &LateContext<'_>, src: &Expr<'_>, target: &Expr<'_>) -> bool {
+ match peel_blocks(src).kind {
+ ExprKind::Binary(
+ Spanned {
+ node: BinOpKind::Add, ..
+ },
+ left,
+ _,
+ ) => SpanlessEq::new(cx).eq_expr(target, left),
+ _ => false,
+ }
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Check if the string is transformed to byte array and casted back to string.
+ ///
+ /// ### Why is this bad?
+ /// It's unnecessary, the string can be used directly.
+ ///
+ /// ### Example
+ /// ```rust
+ /// std::str::from_utf8(&"Hello World!".as_bytes()[6..11]).unwrap();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// &"Hello World!"[6..11];
+ /// ```
+ #[clippy::version = "1.50.0"]
+ pub STRING_FROM_UTF8_AS_BYTES,
+ complexity,
+ "casting string slices to byte slices and back"
+}
+
+// Max length a b"foo" string can take
+const MAX_LENGTH_BYTE_STRING_LIT: usize = 32;
+
+declare_lint_pass!(StringLitAsBytes => [STRING_LIT_AS_BYTES, STRING_FROM_UTF8_AS_BYTES]);
+
+impl<'tcx> LateLintPass<'tcx> for StringLitAsBytes {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ use rustc_ast::LitKind;
+
+ if_chain! {
+ // Find std::str::converts::from_utf8
+ if let Some(args) = match_function_call(cx, e, &paths::STR_FROM_UTF8);
+
+ // Find string::as_bytes
+ if let ExprKind::AddrOf(BorrowKind::Ref, _, args) = args[0].kind;
+ if let ExprKind::Index(left, right) = args.kind;
+ let (method_names, expressions, _) = method_calls(left, 1);
+ if method_names.len() == 1;
+ if expressions.len() == 1;
+ if expressions[0].len() == 1;
+ if method_names[0] == sym!(as_bytes);
+
+ // Check for slicer
+ if let ExprKind::Struct(QPath::LangItem(LangItem::Range, ..), _, _) = right.kind;
+
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ let string_expression = &expressions[0][0];
+
+ let snippet_app = snippet_with_applicability(
+ cx,
+ string_expression.span, "..",
+ &mut applicability,
+ );
+
+ span_lint_and_sugg(
+ cx,
+ STRING_FROM_UTF8_AS_BYTES,
+ e.span,
+ "calling a slice of `as_bytes()` with `from_utf8` should be not necessary",
+ "try",
+ format!("Some(&{}[{}])", snippet_app, snippet(cx, right.span, "..")),
+ applicability
+ )
+ }
+ }
+
+ if_chain! {
+ if let ExprKind::MethodCall(path, args, _) = &e.kind;
+ if path.ident.name == sym!(as_bytes);
+ if let ExprKind::Lit(lit) = &args[0].kind;
+ if let LitKind::Str(lit_content, _) = &lit.node;
+ then {
+ let callsite = snippet(cx, args[0].span.source_callsite(), r#""foo""#);
+ let mut applicability = Applicability::MachineApplicable;
+ if callsite.starts_with("include_str!") {
+ span_lint_and_sugg(
+ cx,
+ STRING_LIT_AS_BYTES,
+ e.span,
+ "calling `as_bytes()` on `include_str!(..)`",
+ "consider using `include_bytes!(..)` instead",
+ snippet_with_applicability(cx, args[0].span, r#""foo""#, &mut applicability).replacen(
+ "include_str",
+ "include_bytes",
+ 1,
+ ),
+ applicability,
+ );
+ } else if lit_content.as_str().is_ascii()
+ && lit_content.as_str().len() <= MAX_LENGTH_BYTE_STRING_LIT
+ && !args[0].span.from_expansion()
+ {
+ span_lint_and_sugg(
+ cx,
+ STRING_LIT_AS_BYTES,
+ e.span,
+ "calling `as_bytes()` on a string literal",
+ "consider using a byte string literal instead",
+ format!(
+ "b{}",
+ snippet_with_applicability(cx, args[0].span, r#""foo""#, &mut applicability)
+ ),
+ applicability,
+ );
+ }
+ }
+ }
+
+ if_chain! {
+ if let ExprKind::MethodCall(path, [recv], _) = &e.kind;
+ if path.ident.name == sym!(into_bytes);
+ if let ExprKind::MethodCall(path, [recv], _) = &recv.kind;
+ if matches!(path.ident.name.as_str(), "to_owned" | "to_string");
+ if let ExprKind::Lit(lit) = &recv.kind;
+ if let LitKind::Str(lit_content, _) = &lit.node;
+
+ if lit_content.as_str().is_ascii();
+ if lit_content.as_str().len() <= MAX_LENGTH_BYTE_STRING_LIT;
+ if !recv.span.from_expansion();
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+
+ span_lint_and_sugg(
+ cx,
+ STRING_LIT_AS_BYTES,
+ e.span,
+ "calling `into_bytes()` on a string literal",
+ "consider using a byte string literal instead",
+ format!(
+ "b{}.to_vec()",
+ snippet_with_applicability(cx, recv.span, r#""..""#, &mut applicability)
+ ),
+ applicability,
+ );
+ }
+ }
+ }
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// This lint checks for `.to_string()` method calls on values of type `&str`.
+ ///
+ /// ### Why is this bad?
+ /// The `to_string` method is also used on other types to convert them to a string.
+ /// When called on a `&str` it turns the `&str` into the owned variant `String`, which can be better
+ /// expressed with `.to_owned()`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// // example code where clippy issues a warning
+ /// let _ = "str".to_string();
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// // example code which does not raise clippy warning
+ /// let _ = "str".to_owned();
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub STR_TO_STRING,
+ restriction,
+ "using `to_string()` on a `&str`, which should be `to_owned()`"
+}
+
+declare_lint_pass!(StrToString => [STR_TO_STRING]);
+
+impl<'tcx> LateLintPass<'tcx> for StrToString {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &Expr<'_>) {
+ if_chain! {
+ if let ExprKind::MethodCall(path, [self_arg, ..], _) = &expr.kind;
+ if path.ident.name == sym::to_string;
+ let ty = cx.typeck_results().expr_ty(self_arg);
+ if let ty::Ref(_, ty, ..) = ty.kind();
+ if *ty.kind() == ty::Str;
+ then {
+ span_lint_and_help(
+ cx,
+ STR_TO_STRING,
+ expr.span,
+ "`to_string()` called on a `&str`",
+ None,
+ "consider using `.to_owned()`",
+ );
+ }
+ }
+ }
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// This lint checks for `.to_string()` method calls on values of type `String`.
+ ///
+ /// ### Why is this bad?
+ /// The `to_string` method is also used on other types to convert them to a string.
+ /// When called on a `String` it only clones the `String`, which can be better expressed with `.clone()`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// // example code where clippy issues a warning
+ /// let msg = String::from("Hello World");
+ /// let _ = msg.to_string();
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// // example code which does not raise clippy warning
+ /// let msg = String::from("Hello World");
+ /// let _ = msg.clone();
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub STRING_TO_STRING,
+ restriction,
+ "using `to_string()` on a `String`, which should be `clone()`"
+}
+
+declare_lint_pass!(StringToString => [STRING_TO_STRING]);
+
+impl<'tcx> LateLintPass<'tcx> for StringToString {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &Expr<'_>) {
+ if_chain! {
+ if let ExprKind::MethodCall(path, [self_arg, ..], _) = &expr.kind;
+ if path.ident.name == sym::to_string;
+ let ty = cx.typeck_results().expr_ty(self_arg);
+ if is_type_diagnostic_item(cx, ty, sym::String);
+ then {
+ span_lint_and_help(
+ cx,
+ STRING_TO_STRING,
+ expr.span,
+ "`to_string()` called on a `String`",
+ None,
+ "consider using `.clone()`",
+ );
+ }
+ }
+ }
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Warns about calling `str::trim` (or variants) before `str::split_whitespace`.
+ ///
+ /// ### Why is this bad?
+ /// `split_whitespace` already ignores leading and trailing whitespace.
+ ///
+ /// ### Example
+ /// ```rust
+ /// " A B C ".trim().split_whitespace();
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// " A B C ".split_whitespace();
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub TRIM_SPLIT_WHITESPACE,
+ style,
+ "using `str::trim()` or alike before `str::split_whitespace`"
+}
+declare_lint_pass!(TrimSplitWhitespace => [TRIM_SPLIT_WHITESPACE]);
+
+impl<'tcx> LateLintPass<'tcx> for TrimSplitWhitespace {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &Expr<'_>) {
+ let tyckres = cx.typeck_results();
+ if_chain! {
+ if let ExprKind::MethodCall(path, [split_recv], split_ws_span) = expr.kind;
+ if path.ident.name == sym!(split_whitespace);
+ if let Some(split_ws_def_id) = tyckres.type_dependent_def_id(expr.hir_id);
+ if cx.tcx.is_diagnostic_item(sym::str_split_whitespace, split_ws_def_id);
+ if let ExprKind::MethodCall(path, [_trim_recv], trim_span) = split_recv.kind;
+ if let trim_fn_name @ ("trim" | "trim_start" | "trim_end") = path.ident.name.as_str();
+ if let Some(trim_def_id) = tyckres.type_dependent_def_id(split_recv.hir_id);
+ if is_one_of_trim_diagnostic_items(cx, trim_def_id);
+ then {
+ span_lint_and_sugg(
+ cx,
+ TRIM_SPLIT_WHITESPACE,
+ trim_span.with_hi(split_ws_span.lo()),
+ &format!("found call to `str::{}` before `str::split_whitespace`", trim_fn_name),
+ &format!("remove `{}()`", trim_fn_name),
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+}
+
+fn is_one_of_trim_diagnostic_items(cx: &LateContext<'_>, trim_def_id: DefId) -> bool {
+ cx.tcx.is_diagnostic_item(sym::str_trim, trim_def_id)
+ || cx.tcx.is_diagnostic_item(sym::str_trim_start, trim_def_id)
+ || cx.tcx.is_diagnostic_item(sym::str_trim_end, trim_def_id)
+}
diff --git a/src/tools/clippy/clippy_lints/src/strlen_on_c_strings.rs b/src/tools/clippy/clippy_lints/src/strlen_on_c_strings.rs
new file mode 100644
index 000000000..7bc9cf742
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/strlen_on_c_strings.rs
@@ -0,0 +1,88 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_context;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::visitors::is_expr_unsafe;
+use clippy_utils::{get_parent_node, match_libc_symbol};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Block, BlockCheckMode, Expr, ExprKind, Node, UnsafeSource};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `libc::strlen` on a `CString` or `CStr` value,
+ /// and suggest calling `as_bytes().len()` or `to_bytes().len()` respectively instead.
+ ///
+ /// ### Why is this bad?
+ /// This avoids calling an unsafe `libc` function.
+ /// Currently, it also avoids calculating the length.
+ ///
+ /// ### Example
+ /// ```rust, ignore
+ /// use std::ffi::CString;
+ /// let cstring = CString::new("foo").expect("CString::new failed");
+ /// let len = unsafe { libc::strlen(cstring.as_ptr()) };
+ /// ```
+ /// Use instead:
+ /// ```rust, no_run
+ /// use std::ffi::CString;
+ /// let cstring = CString::new("foo").expect("CString::new failed");
+ /// let len = cstring.as_bytes().len();
+ /// ```
+ #[clippy::version = "1.55.0"]
+ pub STRLEN_ON_C_STRINGS,
+ complexity,
+ "using `libc::strlen` on a `CString` or `CStr` value, while `as_bytes().len()` or `to_bytes().len()` respectively can be used instead"
+}
+
+declare_lint_pass!(StrlenOnCStrings => [STRLEN_ON_C_STRINGS]);
+
+impl<'tcx> LateLintPass<'tcx> for StrlenOnCStrings {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if_chain! {
+ if !expr.span.from_expansion();
+ if let ExprKind::Call(func, [recv]) = expr.kind;
+ if let ExprKind::Path(path) = &func.kind;
+ if let Some(did) = cx.qpath_res(path, func.hir_id).opt_def_id();
+ if match_libc_symbol(cx, did, "strlen");
+ if let ExprKind::MethodCall(path, [self_arg], _) = recv.kind;
+ if !recv.span.from_expansion();
+ if path.ident.name == sym::as_ptr;
+ then {
+ let ctxt = expr.span.ctxt();
+ let span = match get_parent_node(cx.tcx, expr.hir_id) {
+ Some(Node::Block(&Block {
+ rules: BlockCheckMode::UnsafeBlock(UnsafeSource::UserProvided), span, ..
+ }))
+ if span.ctxt() == ctxt && !is_expr_unsafe(cx, self_arg) => {
+ span
+ }
+ _ => expr.span,
+ };
+
+ let ty = cx.typeck_results().expr_ty(self_arg).peel_refs();
+ let mut app = Applicability::MachineApplicable;
+ let val_name = snippet_with_context(cx, self_arg.span, ctxt, "..", &mut app).0;
+ let method_name = if is_type_diagnostic_item(cx, ty, sym::cstring_type) {
+ "as_bytes"
+ } else if is_type_diagnostic_item(cx, ty, sym::CStr) {
+ "to_bytes"
+ } else {
+ return;
+ };
+
+ span_lint_and_sugg(
+ cx,
+ STRLEN_ON_C_STRINGS,
+ span,
+ "using `libc::strlen` on a `CString` or `CStr` value",
+ "try this",
+ format!("{}.{}().len()", val_name, method_name),
+ app,
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/suspicious_operation_groupings.rs b/src/tools/clippy/clippy_lints/src/suspicious_operation_groupings.rs
new file mode 100644
index 000000000..fe8859905
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/suspicious_operation_groupings.rs
@@ -0,0 +1,693 @@
+use clippy_utils::ast_utils::{eq_id, is_useless_with_eq_exprs, IdentIter};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use core::ops::{Add, AddAssign};
+use if_chain::if_chain;
+use rustc_ast::ast::{BinOpKind, Expr, ExprKind, StmtKind};
+use rustc_data_structures::fx::FxHashSet;
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Spanned;
+use rustc_span::symbol::Ident;
+use rustc_span::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for unlikely usages of binary operators that are almost
+ /// certainly typos and/or copy/paste errors, given the other usages
+ /// of binary operators nearby.
+ ///
+ /// ### Why is this bad?
+ /// They are probably bugs and if they aren't then they look like bugs
+ /// and you should add a comment explaining why you are doing such an
+ /// odd set of operations.
+ ///
+ /// ### Known problems
+ /// There may be some false positives if you are trying to do something
+ /// unusual that happens to look like a typo.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct Vec3 {
+ /// x: f64,
+ /// y: f64,
+ /// z: f64,
+ /// }
+ ///
+ /// impl Eq for Vec3 {}
+ ///
+ /// impl PartialEq for Vec3 {
+ /// fn eq(&self, other: &Self) -> bool {
+ /// // This should trigger the lint because `self.x` is compared to `other.y`
+ /// self.x == other.y && self.y == other.y && self.z == other.z
+ /// }
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # struct Vec3 {
+ /// # x: f64,
+ /// # y: f64,
+ /// # z: f64,
+ /// # }
+ /// // same as above except:
+ /// impl PartialEq for Vec3 {
+ /// fn eq(&self, other: &Self) -> bool {
+ /// // Note we now compare other.x to self.x
+ /// self.x == other.x && self.y == other.y && self.z == other.z
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "1.50.0"]
+ pub SUSPICIOUS_OPERATION_GROUPINGS,
+ nursery,
+ "groupings of binary operations that look suspiciously like typos"
+}
+
+declare_lint_pass!(SuspiciousOperationGroupings => [SUSPICIOUS_OPERATION_GROUPINGS]);
+
+impl EarlyLintPass for SuspiciousOperationGroupings {
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
+ if expr.span.from_expansion() {
+ return;
+ }
+
+ if let Some(binops) = extract_related_binops(&expr.kind) {
+ check_binops(cx, &binops.iter().collect::<Vec<_>>());
+
+ let mut op_types = Vec::with_capacity(binops.len());
+ // We could use a hashmap, etc. to avoid being O(n*m) here, but
+ // we want the lints to be emitted in a consistent order. Besides,
+ // m, (the number of distinct `BinOpKind`s in `binops`)
+ // will often be small, and does have an upper limit.
+ binops.iter().map(|b| b.op).for_each(|op| {
+ if !op_types.contains(&op) {
+ op_types.push(op);
+ }
+ });
+
+ for op_type in op_types {
+ let ops: Vec<_> = binops.iter().filter(|b| b.op == op_type).collect();
+
+ check_binops(cx, &ops);
+ }
+ }
+ }
+}
+
+fn check_binops(cx: &EarlyContext<'_>, binops: &[&BinaryOp<'_>]) {
+ let binop_count = binops.len();
+ if binop_count < 2 {
+ // Single binary operation expressions would likely be false
+ // positives.
+ return;
+ }
+
+ let mut one_ident_difference_count = 0;
+ let mut no_difference_info = None;
+ let mut double_difference_info = None;
+ let mut expected_ident_loc = None;
+
+ let mut paired_identifiers = FxHashSet::default();
+
+ for (i, BinaryOp { left, right, op, .. }) in binops.iter().enumerate() {
+ match ident_difference_expr(left, right) {
+ IdentDifference::NoDifference => {
+ if is_useless_with_eq_exprs(*op) {
+ // The `eq_op` lint should catch this in this case.
+ return;
+ }
+
+ no_difference_info = Some(i);
+ },
+ IdentDifference::Single(ident_loc) => {
+ one_ident_difference_count += 1;
+ if let Some(previous_expected) = expected_ident_loc {
+ if previous_expected != ident_loc {
+ // This expression doesn't match the form we're
+ // looking for.
+ return;
+ }
+ } else {
+ expected_ident_loc = Some(ident_loc);
+ }
+
+ // If there was only a single difference, all other idents
+ // must have been the same, and thus were paired.
+ for id in skip_index(IdentIter::from(*left), ident_loc.index) {
+ paired_identifiers.insert(id);
+ }
+ },
+ IdentDifference::Double(ident_loc1, ident_loc2) => {
+ double_difference_info = Some((i, ident_loc1, ident_loc2));
+ },
+ IdentDifference::Multiple | IdentDifference::NonIdent => {
+ // It's too hard to know whether this is a bug or not.
+ return;
+ },
+ }
+ }
+
+ let mut applicability = Applicability::MachineApplicable;
+
+ if let Some(expected_loc) = expected_ident_loc {
+ match (no_difference_info, double_difference_info) {
+ (Some(i), None) => attempt_to_emit_no_difference_lint(cx, binops, i, expected_loc),
+ (None, Some((double_difference_index, ident_loc1, ident_loc2))) => {
+ if_chain! {
+ if one_ident_difference_count == binop_count - 1;
+ if let Some(binop) = binops.get(double_difference_index);
+ then {
+ let changed_loc = if ident_loc1 == expected_loc {
+ ident_loc2
+ } else if ident_loc2 == expected_loc {
+ ident_loc1
+ } else {
+ // This expression doesn't match the form we're
+ // looking for.
+ return;
+ };
+
+ if let Some(sugg) = ident_swap_sugg(
+ cx,
+ &paired_identifiers,
+ binop,
+ changed_loc,
+ &mut applicability,
+ ) {
+ emit_suggestion(
+ cx,
+ binop.span,
+ sugg,
+ applicability,
+ );
+ }
+ }
+ }
+ },
+ _ => {},
+ }
+ }
+}
+
+fn attempt_to_emit_no_difference_lint(
+ cx: &EarlyContext<'_>,
+ binops: &[&BinaryOp<'_>],
+ i: usize,
+ expected_loc: IdentLocation,
+) {
+ if let Some(binop) = binops.get(i).copied() {
+ // We need to try and figure out which identifier we should
+ // suggest using instead. Since there could be multiple
+ // replacement candidates in a given expression, and we're
+ // just taking the first one, we may get some bad lint
+ // messages.
+ let mut applicability = Applicability::MaybeIncorrect;
+
+ // We assume that the correct ident is one used elsewhere in
+ // the other binops, in a place that there was a single
+ // difference between idents before.
+ let old_left_ident = get_ident(binop.left, expected_loc);
+ let old_right_ident = get_ident(binop.right, expected_loc);
+
+ for b in skip_index(binops.iter(), i) {
+ if_chain! {
+ if let (Some(old_ident), Some(new_ident)) =
+ (old_left_ident, get_ident(b.left, expected_loc));
+ if old_ident != new_ident;
+ if let Some(sugg) = suggestion_with_swapped_ident(
+ cx,
+ binop.left,
+ expected_loc,
+ new_ident,
+ &mut applicability,
+ );
+ then {
+ emit_suggestion(
+ cx,
+ binop.span,
+ replace_left_sugg(cx, binop, &sugg, &mut applicability),
+ applicability,
+ );
+ return;
+ }
+ }
+
+ if_chain! {
+ if let (Some(old_ident), Some(new_ident)) =
+ (old_right_ident, get_ident(b.right, expected_loc));
+ if old_ident != new_ident;
+ if let Some(sugg) = suggestion_with_swapped_ident(
+ cx,
+ binop.right,
+ expected_loc,
+ new_ident,
+ &mut applicability,
+ );
+ then {
+ emit_suggestion(
+ cx,
+ binop.span,
+ replace_right_sugg(cx, binop, &sugg, &mut applicability),
+ applicability,
+ );
+ return;
+ }
+ }
+ }
+ }
+}
+
+fn emit_suggestion(cx: &EarlyContext<'_>, span: Span, sugg: String, applicability: Applicability) {
+ span_lint_and_sugg(
+ cx,
+ SUSPICIOUS_OPERATION_GROUPINGS,
+ span,
+ "this sequence of operators looks suspiciously like a bug",
+ "did you mean",
+ sugg,
+ applicability,
+ );
+}
+
+fn ident_swap_sugg(
+ cx: &EarlyContext<'_>,
+ paired_identifiers: &FxHashSet<Ident>,
+ binop: &BinaryOp<'_>,
+ location: IdentLocation,
+ applicability: &mut Applicability,
+) -> Option<String> {
+ let left_ident = get_ident(binop.left, location)?;
+ let right_ident = get_ident(binop.right, location)?;
+
+ let sugg = match (
+ paired_identifiers.contains(&left_ident),
+ paired_identifiers.contains(&right_ident),
+ ) {
+ (true, true) | (false, false) => {
+ // We don't have a good guess of what ident should be
+ // used instead, in these cases.
+ *applicability = Applicability::MaybeIncorrect;
+
+ // We arbitrarily choose one side to suggest changing,
+ // since we don't have a better guess. If the user
+ // ends up duplicating a clause, the `logic_bug` lint
+ // should catch it.
+
+ let right_suggestion = suggestion_with_swapped_ident(cx, binop.right, location, left_ident, applicability)?;
+
+ replace_right_sugg(cx, binop, &right_suggestion, applicability)
+ },
+ (false, true) => {
+ // We haven't seen a pair involving the left one, so
+ // it's probably what is wanted.
+
+ let right_suggestion = suggestion_with_swapped_ident(cx, binop.right, location, left_ident, applicability)?;
+
+ replace_right_sugg(cx, binop, &right_suggestion, applicability)
+ },
+ (true, false) => {
+ // We haven't seen a pair involving the right one, so
+ // it's probably what is wanted.
+ let left_suggestion = suggestion_with_swapped_ident(cx, binop.left, location, right_ident, applicability)?;
+
+ replace_left_sugg(cx, binop, &left_suggestion, applicability)
+ },
+ };
+
+ Some(sugg)
+}
+
+fn replace_left_sugg(
+ cx: &EarlyContext<'_>,
+ binop: &BinaryOp<'_>,
+ left_suggestion: &str,
+ applicability: &mut Applicability,
+) -> String {
+ format!(
+ "{} {} {}",
+ left_suggestion,
+ binop.op.to_string(),
+ snippet_with_applicability(cx, binop.right.span, "..", applicability),
+ )
+}
+
+fn replace_right_sugg(
+ cx: &EarlyContext<'_>,
+ binop: &BinaryOp<'_>,
+ right_suggestion: &str,
+ applicability: &mut Applicability,
+) -> String {
+ format!(
+ "{} {} {}",
+ snippet_with_applicability(cx, binop.left.span, "..", applicability),
+ binop.op.to_string(),
+ right_suggestion,
+ )
+}
+
+#[derive(Clone, Debug)]
+struct BinaryOp<'exprs> {
+ op: BinOpKind,
+ span: Span,
+ left: &'exprs Expr,
+ right: &'exprs Expr,
+}
+
+impl<'exprs> BinaryOp<'exprs> {
+ fn new(op: BinOpKind, span: Span, (left, right): (&'exprs Expr, &'exprs Expr)) -> Self {
+ Self { op, span, left, right }
+ }
+}
+
+fn strip_non_ident_wrappers(expr: &Expr) -> &Expr {
+ let mut output = expr;
+ loop {
+ output = match &output.kind {
+ ExprKind::Paren(ref inner) | ExprKind::Unary(_, ref inner) => inner,
+ _ => {
+ return output;
+ },
+ };
+ }
+}
+
+fn extract_related_binops(kind: &ExprKind) -> Option<Vec<BinaryOp<'_>>> {
+ append_opt_vecs(chained_binops(kind), if_statement_binops(kind))
+}
+
+fn if_statement_binops(kind: &ExprKind) -> Option<Vec<BinaryOp<'_>>> {
+ match kind {
+ ExprKind::If(ref condition, _, _) => chained_binops(&condition.kind),
+ ExprKind::Paren(ref e) => if_statement_binops(&e.kind),
+ ExprKind::Block(ref block, _) => {
+ let mut output = None;
+ for stmt in &block.stmts {
+ match stmt.kind {
+ StmtKind::Expr(ref e) | StmtKind::Semi(ref e) => {
+ output = append_opt_vecs(output, if_statement_binops(&e.kind));
+ },
+ _ => {},
+ }
+ }
+ output
+ },
+ _ => None,
+ }
+}
+
+fn append_opt_vecs<A>(target_opt: Option<Vec<A>>, source_opt: Option<Vec<A>>) -> Option<Vec<A>> {
+ match (target_opt, source_opt) {
+ (Some(mut target), Some(source)) => {
+ target.reserve(source.len());
+ for op in source {
+ target.push(op);
+ }
+ Some(target)
+ },
+ (Some(v), None) | (None, Some(v)) => Some(v),
+ (None, None) => None,
+ }
+}
+
+fn chained_binops(kind: &ExprKind) -> Option<Vec<BinaryOp<'_>>> {
+ match kind {
+ ExprKind::Binary(_, left_outer, right_outer) => chained_binops_helper(left_outer, right_outer),
+ ExprKind::Paren(ref e) | ExprKind::Unary(_, ref e) => chained_binops(&e.kind),
+ _ => None,
+ }
+}
+
+fn chained_binops_helper<'expr>(left_outer: &'expr Expr, right_outer: &'expr Expr) -> Option<Vec<BinaryOp<'expr>>> {
+ match (&left_outer.kind, &right_outer.kind) {
+ (
+ ExprKind::Paren(ref left_e) | ExprKind::Unary(_, ref left_e),
+ ExprKind::Paren(ref right_e) | ExprKind::Unary(_, ref right_e),
+ ) => chained_binops_helper(left_e, right_e),
+ (ExprKind::Paren(ref left_e) | ExprKind::Unary(_, ref left_e), _) => chained_binops_helper(left_e, right_outer),
+ (_, ExprKind::Paren(ref right_e) | ExprKind::Unary(_, ref right_e)) => {
+ chained_binops_helper(left_outer, right_e)
+ },
+ (
+ ExprKind::Binary(Spanned { node: left_op, .. }, ref left_left, ref left_right),
+ ExprKind::Binary(Spanned { node: right_op, .. }, ref right_left, ref right_right),
+ ) => match (
+ chained_binops_helper(left_left, left_right),
+ chained_binops_helper(right_left, right_right),
+ ) {
+ (Some(mut left_ops), Some(right_ops)) => {
+ left_ops.reserve(right_ops.len());
+ for op in right_ops {
+ left_ops.push(op);
+ }
+ Some(left_ops)
+ },
+ (Some(mut left_ops), _) => {
+ left_ops.push(BinaryOp::new(*right_op, right_outer.span, (right_left, right_right)));
+ Some(left_ops)
+ },
+ (_, Some(mut right_ops)) => {
+ right_ops.insert(0, BinaryOp::new(*left_op, left_outer.span, (left_left, left_right)));
+ Some(right_ops)
+ },
+ (None, None) => Some(vec![
+ BinaryOp::new(*left_op, left_outer.span, (left_left, left_right)),
+ BinaryOp::new(*right_op, right_outer.span, (right_left, right_right)),
+ ]),
+ },
+ _ => None,
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Default, Debug)]
+struct IdentLocation {
+ index: usize,
+}
+
+impl Add for IdentLocation {
+ type Output = IdentLocation;
+
+ fn add(self, other: Self) -> Self::Output {
+ Self {
+ index: self.index + other.index,
+ }
+ }
+}
+
+impl AddAssign for IdentLocation {
+ fn add_assign(&mut self, other: Self) {
+ *self = *self + other;
+ }
+}
+
+#[derive(Clone, Copy, Debug)]
+enum IdentDifference {
+ NoDifference,
+ Single(IdentLocation),
+ Double(IdentLocation, IdentLocation),
+ Multiple,
+ NonIdent,
+}
+
+impl Add for IdentDifference {
+ type Output = IdentDifference;
+
+ fn add(self, other: Self) -> Self::Output {
+ match (self, other) {
+ (Self::NoDifference, output) | (output, Self::NoDifference) => output,
+ (Self::Multiple, _)
+ | (_, Self::Multiple)
+ | (Self::Double(_, _), Self::Single(_))
+ | (Self::Single(_) | Self::Double(_, _), Self::Double(_, _)) => Self::Multiple,
+ (Self::NonIdent, _) | (_, Self::NonIdent) => Self::NonIdent,
+ (Self::Single(il1), Self::Single(il2)) => Self::Double(il1, il2),
+ }
+ }
+}
+
+impl AddAssign for IdentDifference {
+ fn add_assign(&mut self, other: Self) {
+ *self = *self + other;
+ }
+}
+
+impl IdentDifference {
+ /// Returns true if learning about more differences will not change the value
+ /// of this `IdentDifference`, and false otherwise.
+ fn is_complete(&self) -> bool {
+ match self {
+ Self::NoDifference | Self::Single(_) | Self::Double(_, _) => false,
+ Self::Multiple | Self::NonIdent => true,
+ }
+ }
+}
+
+fn ident_difference_expr(left: &Expr, right: &Expr) -> IdentDifference {
+ ident_difference_expr_with_base_location(left, right, IdentLocation::default()).0
+}
+
+fn ident_difference_expr_with_base_location(
+ left: &Expr,
+ right: &Expr,
+ mut base: IdentLocation,
+) -> (IdentDifference, IdentLocation) {
+ // Ideally, this function should not use IdentIter because it should return
+ // early if the expressions have any non-ident differences. We want that early
+ // return because if without that restriction the lint would lead to false
+ // positives.
+ //
+ // But, we cannot (easily?) use a `rustc_ast::visit::Visitor`, since we need
+ // the two expressions to be walked in lockstep. And without a `Visitor`, we'd
+ // have to do all the AST traversal ourselves, which is a lot of work, since to
+ // do it properly we'd need to be able to handle more or less every possible
+ // AST node since `Item`s can be written inside `Expr`s.
+ //
+ // In practice, it seems likely that expressions, above a certain size, that
+ // happen to use the exact same idents in the exact same order, and which are
+ // not structured the same, would be rare. Therefore it seems likely that if
+ // we do only the first layer of matching ourselves and eventually fallback on
+ // IdentIter, then the output of this function will be almost always be correct
+ // in practice.
+ //
+ // If it turns out that problematic cases are more prevalent than we assume,
+ // then we should be able to change this function to do the correct traversal,
+ // without needing to change the rest of the code.
+
+ #![allow(clippy::enum_glob_use)]
+ use ExprKind::*;
+
+ match (
+ &strip_non_ident_wrappers(left).kind,
+ &strip_non_ident_wrappers(right).kind,
+ ) {
+ (Yield(_), Yield(_))
+ | (Try(_), Try(_))
+ | (Paren(_), Paren(_))
+ | (Repeat(_, _), Repeat(_, _))
+ | (Struct(_), Struct(_))
+ | (MacCall(_), MacCall(_))
+ | (InlineAsm(_), InlineAsm(_))
+ | (Ret(_), Ret(_))
+ | (Continue(_), Continue(_))
+ | (Break(_, _), Break(_, _))
+ | (AddrOf(_, _, _), AddrOf(_, _, _))
+ | (Path(_, _), Path(_, _))
+ | (Range(_, _, _), Range(_, _, _))
+ | (Index(_, _), Index(_, _))
+ | (Field(_, _), Field(_, _))
+ | (AssignOp(_, _, _), AssignOp(_, _, _))
+ | (Assign(_, _, _), Assign(_, _, _))
+ | (TryBlock(_), TryBlock(_))
+ | (Await(_), Await(_))
+ | (Async(_, _, _), Async(_, _, _))
+ | (Block(_, _), Block(_, _))
+ | (Closure(_, _, _, _, _, _, _), Closure(_, _, _, _, _, _, _))
+ | (Match(_, _), Match(_, _))
+ | (Loop(_, _), Loop(_, _))
+ | (ForLoop(_, _, _, _), ForLoop(_, _, _, _))
+ | (While(_, _, _), While(_, _, _))
+ | (If(_, _, _), If(_, _, _))
+ | (Let(_, _, _), Let(_, _, _))
+ | (Type(_, _), Type(_, _))
+ | (Cast(_, _), Cast(_, _))
+ | (Lit(_), Lit(_))
+ | (Unary(_, _), Unary(_, _))
+ | (Binary(_, _, _), Binary(_, _, _))
+ | (Tup(_), Tup(_))
+ | (MethodCall(_, _, _), MethodCall(_, _, _))
+ | (Call(_, _), Call(_, _))
+ | (ConstBlock(_), ConstBlock(_))
+ | (Array(_), Array(_))
+ | (Box(_), Box(_)) => {
+ // keep going
+ },
+ _ => {
+ return (IdentDifference::NonIdent, base);
+ },
+ }
+
+ let mut difference = IdentDifference::NoDifference;
+
+ for (left_attr, right_attr) in left.attrs.iter().zip(right.attrs.iter()) {
+ let (new_difference, new_base) =
+ ident_difference_via_ident_iter_with_base_location(left_attr, right_attr, base);
+ base = new_base;
+ difference += new_difference;
+ if difference.is_complete() {
+ return (difference, base);
+ }
+ }
+
+ let (new_difference, new_base) = ident_difference_via_ident_iter_with_base_location(left, right, base);
+ base = new_base;
+ difference += new_difference;
+
+ (difference, base)
+}
+
+fn ident_difference_via_ident_iter_with_base_location<Iterable: Into<IdentIter>>(
+ left: Iterable,
+ right: Iterable,
+ mut base: IdentLocation,
+) -> (IdentDifference, IdentLocation) {
+ // See the note in `ident_difference_expr_with_base_location` about `IdentIter`
+ let mut difference = IdentDifference::NoDifference;
+
+ let mut left_iterator = left.into();
+ let mut right_iterator = right.into();
+
+ loop {
+ match (left_iterator.next(), right_iterator.next()) {
+ (Some(left_ident), Some(right_ident)) => {
+ if !eq_id(left_ident, right_ident) {
+ difference += IdentDifference::Single(base);
+ if difference.is_complete() {
+ return (difference, base);
+ }
+ }
+ },
+ (Some(_), None) | (None, Some(_)) => {
+ return (IdentDifference::NonIdent, base);
+ },
+ (None, None) => {
+ return (difference, base);
+ },
+ }
+ base += IdentLocation { index: 1 };
+ }
+}
+
+fn get_ident(expr: &Expr, location: IdentLocation) -> Option<Ident> {
+ IdentIter::from(expr).nth(location.index)
+}
+
+fn suggestion_with_swapped_ident(
+ cx: &EarlyContext<'_>,
+ expr: &Expr,
+ location: IdentLocation,
+ new_ident: Ident,
+ applicability: &mut Applicability,
+) -> Option<String> {
+ get_ident(expr, location).and_then(|current_ident| {
+ if eq_id(current_ident, new_ident) {
+ // We never want to suggest a non-change
+ return None;
+ }
+
+ Some(format!(
+ "{}{}{}",
+ snippet_with_applicability(cx, expr.span.with_hi(current_ident.span.lo()), "..", applicability),
+ new_ident,
+ snippet_with_applicability(cx, expr.span.with_lo(current_ident.span.hi()), "..", applicability),
+ ))
+ })
+}
+
+fn skip_index<A, Iter>(iter: Iter, index: usize) -> impl Iterator<Item = A>
+where
+ Iter: Iterator<Item = A>,
+{
+ iter.enumerate()
+ .filter_map(move |(i, a)| if i == index { None } else { Some(a) })
+}
diff --git a/src/tools/clippy/clippy_lints/src/suspicious_trait_impl.rs b/src/tools/clippy/clippy_lints/src/suspicious_trait_impl.rs
new file mode 100644
index 000000000..4294464db
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/suspicious_trait_impl.rs
@@ -0,0 +1,116 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::{binop_traits, trait_ref_of_method, BINOP_TRAITS, OP_ASSIGN_TRAITS};
+use if_chain::if_chain;
+use rustc_hir as hir;
+use rustc_hir::intravisit::{walk_expr, Visitor};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Lints for suspicious operations in impls of arithmetic operators, e.g.
+ /// subtracting elements in an Add impl.
+ ///
+ /// ### Why is this bad?
+ /// This is probably a typo or copy-and-paste error and not intended.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// impl Add for Foo {
+ /// type Output = Foo;
+ ///
+ /// fn add(self, other: Foo) -> Foo {
+ /// Foo(self.0 - other.0)
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub SUSPICIOUS_ARITHMETIC_IMPL,
+ suspicious,
+ "suspicious use of operators in impl of arithmetic trait"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Lints for suspicious operations in impls of OpAssign, e.g.
+ /// subtracting elements in an AddAssign impl.
+ ///
+ /// ### Why is this bad?
+ /// This is probably a typo or copy-and-paste error and not intended.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// impl AddAssign for Foo {
+ /// fn add_assign(&mut self, other: Foo) {
+ /// *self = *self - other;
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub SUSPICIOUS_OP_ASSIGN_IMPL,
+ suspicious,
+ "suspicious use of operators in impl of OpAssign trait"
+}
+
+declare_lint_pass!(SuspiciousImpl => [SUSPICIOUS_ARITHMETIC_IMPL, SUSPICIOUS_OP_ASSIGN_IMPL]);
+
+impl<'tcx> LateLintPass<'tcx> for SuspiciousImpl {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
+ if_chain! {
+ if let hir::ExprKind::Binary(binop, _, _) | hir::ExprKind::AssignOp(binop, ..) = expr.kind;
+ if let Some((binop_trait_lang, op_assign_trait_lang)) = binop_traits(binop.node);
+ if let Ok(binop_trait_id) = cx.tcx.lang_items().require(binop_trait_lang);
+ if let Ok(op_assign_trait_id) = cx.tcx.lang_items().require(op_assign_trait_lang);
+
+ // Check for more than one binary operation in the implemented function
+ // Linting when multiple operations are involved can result in false positives
+ let parent_fn = cx.tcx.hir().get_parent_item(expr.hir_id);
+ if let hir::Node::ImplItem(impl_item) = cx.tcx.hir().get_by_def_id(parent_fn);
+ if let hir::ImplItemKind::Fn(_, body_id) = impl_item.kind;
+ let body = cx.tcx.hir().body(body_id);
+ let parent_fn = cx.tcx.hir().get_parent_item(expr.hir_id);
+ if let Some(trait_ref) = trait_ref_of_method(cx, parent_fn);
+ let trait_id = trait_ref.path.res.def_id();
+ if ![binop_trait_id, op_assign_trait_id].contains(&trait_id);
+ if let Some(&(_, lint)) = [
+ (&BINOP_TRAITS, SUSPICIOUS_ARITHMETIC_IMPL),
+ (&OP_ASSIGN_TRAITS, SUSPICIOUS_OP_ASSIGN_IMPL),
+ ]
+ .iter()
+ .find(|&(ts, _)| ts.iter().any(|&t| Ok(trait_id) == cx.tcx.lang_items().require(t)));
+ if count_binops(&body.value) == 1;
+ then {
+ span_lint(
+ cx,
+ lint,
+ binop.span,
+ &format!("suspicious use of `{}` in `{}` impl", binop.node.as_str(), cx.tcx.item_name(trait_id)),
+ );
+ }
+ }
+ }
+}
+
+fn count_binops(expr: &hir::Expr<'_>) -> u32 {
+ let mut visitor = BinaryExprVisitor::default();
+ visitor.visit_expr(expr);
+ visitor.nb_binops
+}
+
+#[derive(Default)]
+struct BinaryExprVisitor {
+ nb_binops: u32,
+}
+
+impl<'tcx> Visitor<'tcx> for BinaryExprVisitor {
+ fn visit_expr(&mut self, expr: &'tcx hir::Expr<'_>) {
+ match expr.kind {
+ hir::ExprKind::Binary(..)
+ | hir::ExprKind::Unary(hir::UnOp::Not | hir::UnOp::Neg, _)
+ | hir::ExprKind::AssignOp(..) => self.nb_binops += 1,
+ _ => {},
+ }
+
+ walk_expr(self, expr);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/swap.rs b/src/tools/clippy/clippy_lints/src/swap.rs
new file mode 100644
index 000000000..1885f3ca4
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/swap.rs
@@ -0,0 +1,258 @@
+use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then};
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::sugg::Sugg;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{can_mut_borrow_both, eq_expr_value, std_or_core};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{BinOpKind, Block, Expr, ExprKind, PatKind, QPath, Stmt, StmtKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Spanned;
+use rustc_span::{sym, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for manual swapping.
+ ///
+ /// ### Why is this bad?
+ /// The `std::mem::swap` function exposes the intent better
+ /// without deinitializing or copying either variable.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let mut a = 42;
+ /// let mut b = 1337;
+ ///
+ /// let t = b;
+ /// b = a;
+ /// a = t;
+ /// ```
+ /// Use std::mem::swap():
+ /// ```rust
+ /// let mut a = 1;
+ /// let mut b = 2;
+ /// std::mem::swap(&mut a, &mut b);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub MANUAL_SWAP,
+ complexity,
+ "manual swap of two variables"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `foo = bar; bar = foo` sequences.
+ ///
+ /// ### Why is this bad?
+ /// This looks like a failed attempt to swap.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let mut a = 1;
+ /// # let mut b = 2;
+ /// a = b;
+ /// b = a;
+ /// ```
+ /// If swapping is intended, use `swap()` instead:
+ /// ```rust
+ /// # let mut a = 1;
+ /// # let mut b = 2;
+ /// std::mem::swap(&mut a, &mut b);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub ALMOST_SWAPPED,
+ correctness,
+ "`foo = bar; bar = foo` sequence"
+}
+
+declare_lint_pass!(Swap => [MANUAL_SWAP, ALMOST_SWAPPED]);
+
+impl<'tcx> LateLintPass<'tcx> for Swap {
+ fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx Block<'_>) {
+ check_manual_swap(cx, block);
+ check_suspicious_swap(cx, block);
+ check_xor_swap(cx, block);
+ }
+}
+
+fn generate_swap_warning(cx: &LateContext<'_>, e1: &Expr<'_>, e2: &Expr<'_>, span: Span, is_xor_based: bool) {
+ let mut applicability = Applicability::MachineApplicable;
+
+ if !can_mut_borrow_both(cx, e1, e2) {
+ if let ExprKind::Index(lhs1, idx1) = e1.kind {
+ if let ExprKind::Index(lhs2, idx2) = e2.kind {
+ if eq_expr_value(cx, lhs1, lhs2) {
+ let ty = cx.typeck_results().expr_ty(lhs1).peel_refs();
+
+ if matches!(ty.kind(), ty::Slice(_))
+ || matches!(ty.kind(), ty::Array(_, _))
+ || is_type_diagnostic_item(cx, ty, sym::Vec)
+ || is_type_diagnostic_item(cx, ty, sym::VecDeque)
+ {
+ let slice = Sugg::hir_with_applicability(cx, lhs1, "<slice>", &mut applicability);
+ span_lint_and_sugg(
+ cx,
+ MANUAL_SWAP,
+ span,
+ &format!("this looks like you are swapping elements of `{}` manually", slice),
+ "try",
+ format!(
+ "{}.swap({}, {})",
+ slice.maybe_par(),
+ snippet_with_applicability(cx, idx1.span, "..", &mut applicability),
+ snippet_with_applicability(cx, idx2.span, "..", &mut applicability),
+ ),
+ applicability,
+ );
+ }
+ }
+ }
+ }
+ return;
+ }
+
+ let first = Sugg::hir_with_applicability(cx, e1, "..", &mut applicability);
+ let second = Sugg::hir_with_applicability(cx, e2, "..", &mut applicability);
+ let Some(sugg) = std_or_core(cx) else { return };
+
+ span_lint_and_then(
+ cx,
+ MANUAL_SWAP,
+ span,
+ &format!("this looks like you are swapping `{}` and `{}` manually", first, second),
+ |diag| {
+ diag.span_suggestion(
+ span,
+ "try",
+ format!("{}::mem::swap({}, {})", sugg, first.mut_addr(), second.mut_addr()),
+ applicability,
+ );
+ if !is_xor_based {
+ diag.note(&format!("or maybe you should use `{}::mem::replace`?", sugg));
+ }
+ },
+ );
+}
+
+/// Implementation of the `MANUAL_SWAP` lint.
+fn check_manual_swap(cx: &LateContext<'_>, block: &Block<'_>) {
+ for w in block.stmts.windows(3) {
+ if_chain! {
+ // let t = foo();
+ if let StmtKind::Local(tmp) = w[0].kind;
+ if let Some(tmp_init) = tmp.init;
+ if let PatKind::Binding(.., ident, None) = tmp.pat.kind;
+
+ // foo() = bar();
+ if let StmtKind::Semi(first) = w[1].kind;
+ if let ExprKind::Assign(lhs1, rhs1, _) = first.kind;
+
+ // bar() = t;
+ if let StmtKind::Semi(second) = w[2].kind;
+ if let ExprKind::Assign(lhs2, rhs2, _) = second.kind;
+ if let ExprKind::Path(QPath::Resolved(None, rhs2)) = rhs2.kind;
+ if rhs2.segments.len() == 1;
+
+ if ident.name == rhs2.segments[0].ident.name;
+ if eq_expr_value(cx, tmp_init, lhs1);
+ if eq_expr_value(cx, rhs1, lhs2);
+ then {
+ let span = w[0].span.to(second.span);
+ generate_swap_warning(cx, lhs1, lhs2, span, false);
+ }
+ }
+ }
+}
+
+/// Implementation of the `ALMOST_SWAPPED` lint.
+fn check_suspicious_swap(cx: &LateContext<'_>, block: &Block<'_>) {
+ for w in block.stmts.windows(2) {
+ if_chain! {
+ if let StmtKind::Semi(first) = w[0].kind;
+ if let StmtKind::Semi(second) = w[1].kind;
+ if first.span.ctxt() == second.span.ctxt();
+ if let ExprKind::Assign(lhs0, rhs0, _) = first.kind;
+ if let ExprKind::Assign(lhs1, rhs1, _) = second.kind;
+ if eq_expr_value(cx, lhs0, rhs1);
+ if eq_expr_value(cx, lhs1, rhs0);
+ then {
+ let lhs0 = Sugg::hir_opt(cx, lhs0);
+ let rhs0 = Sugg::hir_opt(cx, rhs0);
+ let (what, lhs, rhs) = if let (Some(first), Some(second)) = (lhs0, rhs0) {
+ (
+ format!(" `{}` and `{}`", first, second),
+ first.mut_addr().to_string(),
+ second.mut_addr().to_string(),
+ )
+ } else {
+ (String::new(), String::new(), String::new())
+ };
+
+ let span = first.span.to(second.span);
+ let Some(sugg) = std_or_core(cx) else { return };
+
+ span_lint_and_then(cx,
+ ALMOST_SWAPPED,
+ span,
+ &format!("this looks like you are trying to swap{}", what),
+ |diag| {
+ if !what.is_empty() {
+ diag.span_suggestion(
+ span,
+ "try",
+ format!(
+ "{}::mem::swap({}, {})",
+ sugg,
+ lhs,
+ rhs,
+ ),
+ Applicability::MaybeIncorrect,
+ );
+ diag.note(
+ &format!("or maybe you should use `{}::mem::replace`?", sugg)
+ );
+ }
+ });
+ }
+ }
+ }
+}
+
+/// Implementation of the xor case for `MANUAL_SWAP` lint.
+fn check_xor_swap(cx: &LateContext<'_>, block: &Block<'_>) {
+ for window in block.stmts.windows(3) {
+ if_chain! {
+ if let Some((lhs0, rhs0)) = extract_sides_of_xor_assign(&window[0]);
+ if let Some((lhs1, rhs1)) = extract_sides_of_xor_assign(&window[1]);
+ if let Some((lhs2, rhs2)) = extract_sides_of_xor_assign(&window[2]);
+ if eq_expr_value(cx, lhs0, rhs1);
+ if eq_expr_value(cx, lhs2, rhs1);
+ if eq_expr_value(cx, lhs1, rhs0);
+ if eq_expr_value(cx, lhs1, rhs2);
+ then {
+ let span = window[0].span.to(window[2].span);
+ generate_swap_warning(cx, lhs0, rhs0, span, true);
+ }
+ };
+ }
+}
+
+/// Returns the lhs and rhs of an xor assignment statement.
+fn extract_sides_of_xor_assign<'a, 'hir>(stmt: &'a Stmt<'hir>) -> Option<(&'a Expr<'hir>, &'a Expr<'hir>)> {
+ if let StmtKind::Semi(expr) = stmt.kind {
+ if let ExprKind::AssignOp(
+ Spanned {
+ node: BinOpKind::BitXor,
+ ..
+ },
+ lhs,
+ rhs,
+ ) = expr.kind
+ {
+ return Some((lhs, rhs));
+ }
+ }
+ None
+}
diff --git a/src/tools/clippy/clippy_lints/src/swap_ptr_to_ref.rs b/src/tools/clippy/clippy_lints/src/swap_ptr_to_ref.rs
new file mode 100644
index 000000000..3cbbda80f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/swap_ptr_to_ref.rs
@@ -0,0 +1,80 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet_with_context;
+use clippy_utils::{match_def_path, path_def_id, paths};
+use rustc_errors::Applicability;
+use rustc_hir::{BorrowKind, Expr, ExprKind, Mutability, UnOp};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{Span, SyntaxContext};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls to `core::mem::swap` where either parameter is derived from a pointer
+ ///
+ /// ### Why is this bad?
+ /// When at least one parameter to `swap` is derived from a pointer it may overlap with the
+ /// other. This would then lead to undefined behavior.
+ ///
+ /// ### Example
+ /// ```rust
+ /// unsafe fn swap(x: &[*mut u32], y: &[*mut u32]) {
+ /// for (&x, &y) in x.iter().zip(y) {
+ /// core::mem::swap(&mut *x, &mut *y);
+ /// }
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// unsafe fn swap(x: &[*mut u32], y: &[*mut u32]) {
+ /// for (&x, &y) in x.iter().zip(y) {
+ /// core::ptr::swap(x, y);
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "1.63.0"]
+ pub SWAP_PTR_TO_REF,
+ suspicious,
+ "call to `mem::swap` using pointer derived references"
+}
+declare_lint_pass!(SwapPtrToRef => [SWAP_PTR_TO_REF]);
+
+impl LateLintPass<'_> for SwapPtrToRef {
+ fn check_expr(&mut self, cx: &LateContext<'_>, e: &Expr<'_>) {
+ if let ExprKind::Call(fn_expr, [arg1, arg2]) = e.kind
+ && let Some(fn_id) = path_def_id(cx, fn_expr)
+ && match_def_path(cx, fn_id, &paths::MEM_SWAP)
+ && let ctxt = e.span.ctxt()
+ && let (from_ptr1, arg1_span) = is_ptr_to_ref(cx, arg1, ctxt)
+ && let (from_ptr2, arg2_span) = is_ptr_to_ref(cx, arg2, ctxt)
+ && (from_ptr1 || from_ptr2)
+ {
+ span_lint_and_then(
+ cx,
+ SWAP_PTR_TO_REF,
+ e.span,
+ "call to `core::mem::swap` with a parameter derived from a raw pointer",
+ |diag| {
+ if !((from_ptr1 && arg1_span.is_none()) || (from_ptr2 && arg2_span.is_none())) {
+ let mut app = Applicability::MachineApplicable;
+ let snip1 = snippet_with_context(cx, arg1_span.unwrap_or(arg1.span), ctxt, "..", &mut app).0;
+ let snip2 = snippet_with_context(cx, arg2_span.unwrap_or(arg2.span), ctxt, "..", &mut app).0;
+ diag.span_suggestion(e.span, "use ptr::swap", format!("core::ptr::swap({}, {})", snip1, snip2), app);
+ }
+ }
+ );
+ }
+ }
+}
+
+/// Checks if the expression converts a mutable pointer to a mutable reference. If it is, also
+/// returns the span of the pointer expression if it's suitable for making a suggestion.
+fn is_ptr_to_ref(cx: &LateContext<'_>, e: &Expr<'_>, ctxt: SyntaxContext) -> (bool, Option<Span>) {
+ if let ExprKind::AddrOf(BorrowKind::Ref, Mutability::Mut, borrowed_expr) = e.kind
+ && let ExprKind::Unary(UnOp::Deref, derefed_expr) = borrowed_expr.kind
+ && cx.typeck_results().expr_ty(derefed_expr).is_unsafe_ptr()
+ {
+ (true, (borrowed_expr.span.ctxt() == ctxt || derefed_expr.span.ctxt() == ctxt).then_some(derefed_expr.span))
+ } else {
+ (false, None)
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/tabs_in_doc_comments.rs b/src/tools/clippy/clippy_lints/src/tabs_in_doc_comments.rs
new file mode 100644
index 000000000..e223aea29
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/tabs_in_doc_comments.rs
@@ -0,0 +1,230 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use rustc_ast::ast;
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::{BytePos, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks doc comments for usage of tab characters.
+ ///
+ /// ### Why is this bad?
+ /// The rust style-guide promotes spaces instead of tabs for indentation.
+ /// To keep a consistent view on the source, also doc comments should not have tabs.
+ /// Also, explaining ascii-diagrams containing tabs can get displayed incorrectly when the
+ /// display settings of the author and reader differ.
+ ///
+ /// ### Example
+ /// ```rust
+ /// ///
+ /// /// Struct to hold two strings:
+ /// /// - first one
+ /// /// - second one
+ /// pub struct DoubleString {
+ /// ///
+ /// /// - First String:
+ /// /// - needs to be inside here
+ /// first_string: String,
+ /// ///
+ /// /// - Second String:
+ /// /// - needs to be inside here
+ /// second_string: String,
+ ///}
+ /// ```
+ ///
+ /// Will be converted to:
+ /// ```rust
+ /// ///
+ /// /// Struct to hold two strings:
+ /// /// - first one
+ /// /// - second one
+ /// pub struct DoubleString {
+ /// ///
+ /// /// - First String:
+ /// /// - needs to be inside here
+ /// first_string: String,
+ /// ///
+ /// /// - Second String:
+ /// /// - needs to be inside here
+ /// second_string: String,
+ ///}
+ /// ```
+ #[clippy::version = "1.41.0"]
+ pub TABS_IN_DOC_COMMENTS,
+ style,
+ "using tabs in doc comments is not recommended"
+}
+
+declare_lint_pass!(TabsInDocComments => [TABS_IN_DOC_COMMENTS]);
+
+impl TabsInDocComments {
+ fn warn_if_tabs_in_doc(cx: &EarlyContext<'_>, attr: &ast::Attribute) {
+ if let ast::AttrKind::DocComment(_, comment) = attr.kind {
+ let comment = comment.as_str();
+
+ for (lo, hi) in get_chunks_of_tabs(comment) {
+ // +3 skips the opening delimiter
+ let new_span = Span::new(
+ attr.span.lo() + BytePos(3 + lo),
+ attr.span.lo() + BytePos(3 + hi),
+ attr.span.ctxt(),
+ attr.span.parent(),
+ );
+ span_lint_and_sugg(
+ cx,
+ TABS_IN_DOC_COMMENTS,
+ new_span,
+ "using tabs in doc comments is not recommended",
+ "consider using four spaces per tab",
+ " ".repeat((hi - lo) as usize),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ }
+ }
+}
+
+impl EarlyLintPass for TabsInDocComments {
+ fn check_attribute(&mut self, cx: &EarlyContext<'_>, attribute: &ast::Attribute) {
+ Self::warn_if_tabs_in_doc(cx, attribute);
+ }
+}
+
+///
+/// scans the string for groups of tabs and returns the start(inclusive) and end positions
+/// (exclusive) of all groups
+/// e.g. "sd\tasd\t\taa" will be converted to [(2, 3), (6, 8)] as
+/// 012 3456 7 89
+/// ^-^ ^---^
+fn get_chunks_of_tabs(the_str: &str) -> Vec<(u32, u32)> {
+ let line_length_way_to_long = "doc comment longer than 2^32 chars";
+ let mut spans: Vec<(u32, u32)> = vec![];
+ let mut current_start: u32 = 0;
+
+ // tracker to decide if the last group of tabs is not closed by a non-tab character
+ let mut is_active = false;
+
+ // Note that we specifically need the char _byte_ indices here, not the positional indexes
+ // within the char array to deal with multi-byte characters properly. `char_indices` does
+ // exactly that. It provides an iterator over tuples of the form `(byte position, char)`.
+ let char_indices: Vec<_> = the_str.char_indices().collect();
+
+ if let [(_, '\t')] = char_indices.as_slice() {
+ return vec![(0, 1)];
+ }
+
+ for entry in char_indices.windows(2) {
+ match entry {
+ [(_, '\t'), (_, '\t')] => {
+ // either string starts with double tab, then we have to set it active,
+ // otherwise is_active is true anyway
+ is_active = true;
+ },
+ [(_, _), (index_b, '\t')] => {
+ // as ['\t', '\t'] is excluded, this has to be a start of a tab group,
+ // set indices accordingly
+ is_active = true;
+ current_start = u32::try_from(*index_b).unwrap();
+ },
+ [(_, '\t'), (index_b, _)] => {
+ // this now has to be an end of the group, hence we have to push a new tuple
+ is_active = false;
+ spans.push((current_start, u32::try_from(*index_b).unwrap()));
+ },
+ _ => {},
+ }
+ }
+
+ // only possible when tabs are at the end, insert last group
+ if is_active {
+ spans.push((
+ current_start,
+ u32::try_from(char_indices.last().unwrap().0 + 1).expect(line_length_way_to_long),
+ ));
+ }
+
+ spans
+}
+
+#[cfg(test)]
+mod tests_for_get_chunks_of_tabs {
+ use super::get_chunks_of_tabs;
+
+ #[test]
+ fn test_unicode_han_string() {
+ let res = get_chunks_of_tabs(" \u{4f4d}\t");
+
+ assert_eq!(res, vec![(4, 5)]);
+ }
+
+ #[test]
+ fn test_empty_string() {
+ let res = get_chunks_of_tabs("");
+
+ assert_eq!(res, vec![]);
+ }
+
+ #[test]
+ fn test_simple() {
+ let res = get_chunks_of_tabs("sd\t\t\taa");
+
+ assert_eq!(res, vec![(2, 5)]);
+ }
+
+ #[test]
+ fn test_only_t() {
+ let res = get_chunks_of_tabs("\t\t");
+
+ assert_eq!(res, vec![(0, 2)]);
+ }
+
+ #[test]
+ fn test_only_one_t() {
+ let res = get_chunks_of_tabs("\t");
+
+ assert_eq!(res, vec![(0, 1)]);
+ }
+
+ #[test]
+ fn test_double() {
+ let res = get_chunks_of_tabs("sd\tasd\t\taa");
+
+ assert_eq!(res, vec![(2, 3), (6, 8)]);
+ }
+
+ #[test]
+ fn test_start() {
+ let res = get_chunks_of_tabs("\t\taa");
+
+ assert_eq!(res, vec![(0, 2)]);
+ }
+
+ #[test]
+ fn test_end() {
+ let res = get_chunks_of_tabs("aa\t\t");
+
+ assert_eq!(res, vec![(2, 4)]);
+ }
+
+ #[test]
+ fn test_start_single() {
+ let res = get_chunks_of_tabs("\taa");
+
+ assert_eq!(res, vec![(0, 1)]);
+ }
+
+ #[test]
+ fn test_end_single() {
+ let res = get_chunks_of_tabs("aa\t");
+
+ assert_eq!(res, vec![(2, 3)]);
+ }
+
+ #[test]
+ fn test_no_tabs() {
+ let res = get_chunks_of_tabs("dsfs");
+
+ assert_eq!(res, vec![]);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/temporary_assignment.rs b/src/tools/clippy/clippy_lints/src/temporary_assignment.rs
new file mode 100644
index 000000000..3766b8f8e
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/temporary_assignment.rs
@@ -0,0 +1,44 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::is_adjusted;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for construction of a structure or tuple just to
+ /// assign a value in it.
+ ///
+ /// ### Why is this bad?
+ /// Readability. If the structure is only created to be
+ /// updated, why not write the structure you want in the first place?
+ ///
+ /// ### Example
+ /// ```rust
+ /// (0, 0).0 = 1
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub TEMPORARY_ASSIGNMENT,
+ complexity,
+ "assignments to temporaries"
+}
+
+fn is_temporary(expr: &Expr<'_>) -> bool {
+ matches!(&expr.kind, ExprKind::Struct(..) | ExprKind::Tup(..))
+}
+
+declare_lint_pass!(TemporaryAssignment => [TEMPORARY_ASSIGNMENT]);
+
+impl<'tcx> LateLintPass<'tcx> for TemporaryAssignment {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if let ExprKind::Assign(target, ..) = &expr.kind {
+ let mut base = target;
+ while let ExprKind::Field(f, _) | ExprKind::Index(f, _) = &base.kind {
+ base = f;
+ }
+ if is_temporary(base) && !is_adjusted(cx, base) {
+ span_lint(cx, TEMPORARY_ASSIGNMENT, expr.span, "assignment to temporary");
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/to_digit_is_some.rs b/src/tools/clippy/clippy_lints/src/to_digit_is_some.rs
new file mode 100644
index 000000000..aa6c01b3a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/to_digit_is_some.rs
@@ -0,0 +1,99 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::match_def_path;
+use clippy_utils::source::snippet_with_applicability;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `.to_digit(..).is_some()` on `char`s.
+ ///
+ /// ### Why is this bad?
+ /// This is a convoluted way of checking if a `char` is a digit. It's
+ /// more straight forward to use the dedicated `is_digit` method.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let c = 'c';
+ /// # let radix = 10;
+ /// let is_digit = c.to_digit(radix).is_some();
+ /// ```
+ /// can be written as:
+ /// ```
+ /// # let c = 'c';
+ /// # let radix = 10;
+ /// let is_digit = c.is_digit(radix);
+ /// ```
+ #[clippy::version = "1.41.0"]
+ pub TO_DIGIT_IS_SOME,
+ style,
+ "`char.is_digit()` is clearer"
+}
+
+declare_lint_pass!(ToDigitIsSome => [TO_DIGIT_IS_SOME]);
+
+impl<'tcx> LateLintPass<'tcx> for ToDigitIsSome {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
+ if_chain! {
+ if let hir::ExprKind::MethodCall(is_some_path, is_some_args, _) = &expr.kind;
+ if is_some_path.ident.name.as_str() == "is_some";
+ if let [to_digit_expr] = &**is_some_args;
+ then {
+ let match_result = match &to_digit_expr.kind {
+ hir::ExprKind::MethodCall(to_digits_path, to_digit_args, _) => {
+ if_chain! {
+ if let [char_arg, radix_arg] = &**to_digit_args;
+ if to_digits_path.ident.name.as_str() == "to_digit";
+ let char_arg_ty = cx.typeck_results().expr_ty_adjusted(char_arg);
+ if *char_arg_ty.kind() == ty::Char;
+ then {
+ Some((true, char_arg, radix_arg))
+ } else {
+ None
+ }
+ }
+ }
+ hir::ExprKind::Call(to_digits_call, to_digit_args) => {
+ if_chain! {
+ if let [char_arg, radix_arg] = &**to_digit_args;
+ if let hir::ExprKind::Path(to_digits_path) = &to_digits_call.kind;
+ if let to_digits_call_res = cx.qpath_res(to_digits_path, to_digits_call.hir_id);
+ if let Some(to_digits_def_id) = to_digits_call_res.opt_def_id();
+ if match_def_path(cx, to_digits_def_id, &["core", "char", "methods", "<impl char>", "to_digit"]);
+ then {
+ Some((false, char_arg, radix_arg))
+ } else {
+ None
+ }
+ }
+ }
+ _ => None
+ };
+
+ if let Some((is_method_call, char_arg, radix_arg)) = match_result {
+ let mut applicability = Applicability::MachineApplicable;
+ let char_arg_snip = snippet_with_applicability(cx, char_arg.span, "_", &mut applicability);
+ let radix_snip = snippet_with_applicability(cx, radix_arg.span, "_", &mut applicability);
+
+ span_lint_and_sugg(
+ cx,
+ TO_DIGIT_IS_SOME,
+ expr.span,
+ "use of `.to_digit(..).is_some()`",
+ "try this",
+ if is_method_call {
+ format!("{}.is_digit({})", char_arg_snip, radix_snip)
+ } else {
+ format!("char::is_digit({}, {})", char_arg_snip, radix_snip)
+ },
+ applicability,
+ );
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/trailing_empty_array.rs b/src/tools/clippy/clippy_lints/src/trailing_empty_array.rs
new file mode 100644
index 000000000..58cc057a3
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/trailing_empty_array.rs
@@ -0,0 +1,78 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_hir::{HirId, Item, ItemKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::Const;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Displays a warning when a struct with a trailing zero-sized array is declared without a `repr` attribute.
+ ///
+ /// ### Why is this bad?
+ /// Zero-sized arrays aren't very useful in Rust itself, so such a struct is likely being created to pass to C code or in some other situation where control over memory layout matters (for example, in conjunction with manual allocation to make it easy to compute the offset of the array). Either way, `#[repr(C)]` (or another `repr` attribute) is needed.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct RarelyUseful {
+ /// some_field: u32,
+ /// last: [u32; 0],
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// #[repr(C)]
+ /// struct MoreOftenUseful {
+ /// some_field: usize,
+ /// last: [u32; 0],
+ /// }
+ /// ```
+ #[clippy::version = "1.58.0"]
+ pub TRAILING_EMPTY_ARRAY,
+ nursery,
+ "struct with a trailing zero-sized array but without `#[repr(C)]` or another `repr` attribute"
+}
+declare_lint_pass!(TrailingEmptyArray => [TRAILING_EMPTY_ARRAY]);
+
+impl<'tcx> LateLintPass<'tcx> for TrailingEmptyArray {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) {
+ if is_struct_with_trailing_zero_sized_array(cx, item) && !has_repr_attr(cx, item.hir_id()) {
+ span_lint_and_help(
+ cx,
+ TRAILING_EMPTY_ARRAY,
+ item.span,
+ "trailing zero-sized array in a struct which is not marked with a `repr` attribute",
+ None,
+ &format!(
+ "consider annotating `{}` with `#[repr(C)]` or another `repr` attribute",
+ cx.tcx.def_path_str(item.def_id.to_def_id())
+ ),
+ );
+ }
+ }
+}
+
+fn is_struct_with_trailing_zero_sized_array(cx: &LateContext<'_>, item: &Item<'_>) -> bool {
+ if_chain! {
+ // First check if last field is an array
+ if let ItemKind::Struct(data, _) = &item.kind;
+ if let Some(last_field) = data.fields().last();
+ if let rustc_hir::TyKind::Array(_, rustc_hir::ArrayLen::Body(length)) = last_field.ty.kind;
+
+ // Then check if that that array zero-sized
+ let length_ldid = cx.tcx.hir().local_def_id(length.hir_id);
+ let length = Const::from_anon_const(cx.tcx, length_ldid);
+ let length = length.try_eval_usize(cx.tcx, cx.param_env);
+ if let Some(length) = length;
+ then {
+ length == 0
+ } else {
+ false
+ }
+ }
+}
+
+fn has_repr_attr(cx: &LateContext<'_>, hir_id: HirId) -> bool {
+ cx.tcx.hir().attrs(hir_id).iter().any(|attr| attr.has_name(sym::repr))
+}
diff --git a/src/tools/clippy/clippy_lints/src/trait_bounds.rs b/src/tools/clippy/clippy_lints/src/trait_bounds.rs
new file mode 100644
index 000000000..0a42a31fb
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/trait_bounds.rs
@@ -0,0 +1,376 @@
+use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_sugg};
+use clippy_utils::source::{snippet, snippet_opt, snippet_with_applicability};
+use clippy_utils::{SpanlessEq, SpanlessHash};
+use core::hash::{Hash, Hasher};
+use if_chain::if_chain;
+use itertools::Itertools;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::unhash::UnhashMap;
+use rustc_errors::Applicability;
+use rustc_hir::def::Res;
+use rustc_hir::{
+ GenericArg, GenericBound, Generics, Item, ItemKind, Node, Path, PathSegment, PredicateOrigin, QPath,
+ TraitBoundModifier, TraitItem, TraitRef, Ty, TyKind, WherePredicate,
+};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::{BytePos, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// This lint warns about unnecessary type repetitions in trait bounds
+ ///
+ /// ### Why is this bad?
+ /// Repeating the type for every bound makes the code
+ /// less readable than combining the bounds
+ ///
+ /// ### Example
+ /// ```rust
+ /// pub fn foo<T>(t: T) where T: Copy, T: Clone {}
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// pub fn foo<T>(t: T) where T: Copy + Clone {}
+ /// ```
+ #[clippy::version = "1.38.0"]
+ pub TYPE_REPETITION_IN_BOUNDS,
+ nursery,
+ "types are repeated unnecessary in trait bounds use `+` instead of using `T: _, T: _`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for cases where generics are being used and multiple
+ /// syntax specifications for trait bounds are used simultaneously.
+ ///
+ /// ### Why is this bad?
+ /// Duplicate bounds makes the code
+ /// less readable than specifying them only once.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn func<T: Clone + Default>(arg: T) where T: Clone + Default {}
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # mod hidden {
+ /// fn func<T: Clone + Default>(arg: T) {}
+ /// # }
+ ///
+ /// // or
+ ///
+ /// fn func<T>(arg: T) where T: Clone + Default {}
+ /// ```
+ ///
+ /// ```rust
+ /// fn foo<T: Default + Default>(bar: T) {}
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// fn foo<T: Default>(bar: T) {}
+ /// ```
+ ///
+ /// ```rust
+ /// fn foo<T>(bar: T) where T: Default + Default {}
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// fn foo<T>(bar: T) where T: Default {}
+ /// ```
+ #[clippy::version = "1.47.0"]
+ pub TRAIT_DUPLICATION_IN_BOUNDS,
+ nursery,
+ "check if the same trait bounds are specified more than once during a generic declaration"
+}
+
+#[derive(Copy, Clone)]
+pub struct TraitBounds {
+ max_trait_bounds: u64,
+}
+
+impl TraitBounds {
+ #[must_use]
+ pub fn new(max_trait_bounds: u64) -> Self {
+ Self { max_trait_bounds }
+ }
+}
+
+impl_lint_pass!(TraitBounds => [TYPE_REPETITION_IN_BOUNDS, TRAIT_DUPLICATION_IN_BOUNDS]);
+
+impl<'tcx> LateLintPass<'tcx> for TraitBounds {
+ fn check_generics(&mut self, cx: &LateContext<'tcx>, gen: &'tcx Generics<'_>) {
+ self.check_type_repetition(cx, gen);
+ check_trait_bound_duplication(cx, gen);
+ check_bounds_or_where_duplication(cx, gen);
+ }
+
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) {
+ // special handling for self trait bounds as these are not considered generics
+ // ie. trait Foo: Display {}
+ if let Item {
+ kind: ItemKind::Trait(_, _, _, bounds, ..),
+ ..
+ } = item
+ {
+ rollup_traits(cx, bounds, "these bounds contain repeated elements");
+ }
+ }
+
+ fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx TraitItem<'tcx>) {
+ let mut self_bounds_map = FxHashMap::default();
+
+ for predicate in item.generics.predicates {
+ if_chain! {
+ if let WherePredicate::BoundPredicate(ref bound_predicate) = predicate;
+ if bound_predicate.origin != PredicateOrigin::ImplTrait;
+ if !bound_predicate.span.from_expansion();
+ if let TyKind::Path(QPath::Resolved(_, Path { segments, .. })) = bound_predicate.bounded_ty.kind;
+ if let Some(PathSegment {
+ res: Some(Res::SelfTy{ trait_: Some(def_id), alias_to: _ }), ..
+ }) = segments.first();
+ if let Some(
+ Node::Item(
+ Item {
+ kind: ItemKind::Trait(_, _, _, self_bounds, _),
+ .. }
+ )
+ ) = cx.tcx.hir().get_if_local(*def_id);
+ then {
+ if self_bounds_map.is_empty() {
+ for bound in self_bounds.iter() {
+ let Some((self_res, self_segments, _)) = get_trait_info_from_bound(bound) else { continue };
+ self_bounds_map.insert(self_res, self_segments);
+ }
+ }
+
+ bound_predicate
+ .bounds
+ .iter()
+ .filter_map(get_trait_info_from_bound)
+ .for_each(|(trait_item_res, trait_item_segments, span)| {
+ if let Some(self_segments) = self_bounds_map.get(&trait_item_res) {
+ if SpanlessEq::new(cx).eq_path_segments(self_segments, trait_item_segments) {
+ span_lint_and_help(
+ cx,
+ TRAIT_DUPLICATION_IN_BOUNDS,
+ span,
+ "this trait bound is already specified in trait declaration",
+ None,
+ "consider removing this trait bound",
+ );
+ }
+ }
+ });
+ }
+ }
+ }
+ }
+}
+
+impl TraitBounds {
+ fn check_type_repetition<'tcx>(self, cx: &LateContext<'tcx>, gen: &'tcx Generics<'_>) {
+ struct SpanlessTy<'cx, 'tcx> {
+ ty: &'tcx Ty<'tcx>,
+ cx: &'cx LateContext<'tcx>,
+ }
+ impl PartialEq for SpanlessTy<'_, '_> {
+ fn eq(&self, other: &Self) -> bool {
+ let mut eq = SpanlessEq::new(self.cx);
+ eq.inter_expr().eq_ty(self.ty, other.ty)
+ }
+ }
+ impl Hash for SpanlessTy<'_, '_> {
+ fn hash<H: Hasher>(&self, h: &mut H) {
+ let mut t = SpanlessHash::new(self.cx);
+ t.hash_ty(self.ty);
+ h.write_u64(t.finish());
+ }
+ }
+ impl Eq for SpanlessTy<'_, '_> {}
+
+ if gen.span.from_expansion() {
+ return;
+ }
+ let mut map: UnhashMap<SpanlessTy<'_, '_>, Vec<&GenericBound<'_>>> = UnhashMap::default();
+ let mut applicability = Applicability::MaybeIncorrect;
+ for bound in gen.predicates {
+ if_chain! {
+ if let WherePredicate::BoundPredicate(ref p) = bound;
+ if p.origin != PredicateOrigin::ImplTrait;
+ if p.bounds.len() as u64 <= self.max_trait_bounds;
+ if !p.span.from_expansion();
+ if let Some(ref v) = map.insert(
+ SpanlessTy { ty: p.bounded_ty, cx },
+ p.bounds.iter().collect::<Vec<_>>()
+ );
+
+ then {
+ let trait_bounds = v
+ .iter()
+ .copied()
+ .chain(p.bounds.iter())
+ .filter_map(get_trait_info_from_bound)
+ .map(|(_, _, span)| snippet_with_applicability(cx, span, "..", &mut applicability))
+ .join(" + ");
+ let hint_string = format!(
+ "consider combining the bounds: `{}: {}`",
+ snippet(cx, p.bounded_ty.span, "_"),
+ trait_bounds,
+ );
+ span_lint_and_help(
+ cx,
+ TYPE_REPETITION_IN_BOUNDS,
+ p.span,
+ "this type has already been used as a bound predicate",
+ None,
+ &hint_string,
+ );
+ }
+ }
+ }
+ }
+}
+
+fn check_trait_bound_duplication(cx: &LateContext<'_>, gen: &'_ Generics<'_>) {
+ if gen.span.from_expansion() || gen.params.is_empty() || gen.predicates.is_empty() {
+ return;
+ }
+
+ let mut map = FxHashMap::<_, Vec<_>>::default();
+ for predicate in gen.predicates {
+ if_chain! {
+ if let WherePredicate::BoundPredicate(ref bound_predicate) = predicate;
+ if bound_predicate.origin != PredicateOrigin::ImplTrait;
+ if !bound_predicate.span.from_expansion();
+ if let TyKind::Path(QPath::Resolved(_, Path { segments, .. })) = bound_predicate.bounded_ty.kind;
+ if let Some(segment) = segments.first();
+ then {
+ for (res_where, _, span_where) in bound_predicate.bounds.iter().filter_map(get_trait_info_from_bound) {
+ let trait_resolutions_direct = map.entry(segment.ident).or_default();
+ if let Some((_, span_direct)) = trait_resolutions_direct
+ .iter()
+ .find(|(res_direct, _)| *res_direct == res_where) {
+ span_lint_and_help(
+ cx,
+ TRAIT_DUPLICATION_IN_BOUNDS,
+ *span_direct,
+ "this trait bound is already specified in the where clause",
+ None,
+ "consider removing this trait bound",
+ );
+ }
+ else {
+ trait_resolutions_direct.push((res_where, span_where));
+ }
+ }
+ }
+ }
+ }
+}
+
+#[derive(PartialEq, Eq, Hash, Debug)]
+struct ComparableTraitRef(Res, Vec<Res>);
+
+fn check_bounds_or_where_duplication(cx: &LateContext<'_>, gen: &'_ Generics<'_>) {
+ if gen.span.from_expansion() {
+ return;
+ }
+
+ for predicate in gen.predicates {
+ if let WherePredicate::BoundPredicate(ref bound_predicate) = predicate {
+ let msg = if predicate.in_where_clause() {
+ "these where clauses contain repeated elements"
+ } else {
+ "these bounds contain repeated elements"
+ };
+ rollup_traits(cx, bound_predicate.bounds, msg);
+ }
+ }
+}
+
+fn get_trait_info_from_bound<'a>(bound: &'a GenericBound<'_>) -> Option<(Res, &'a [PathSegment<'a>], Span)> {
+ if let GenericBound::Trait(t, tbm) = bound {
+ let trait_path = t.trait_ref.path;
+ let trait_span = {
+ let path_span = trait_path.span;
+ if let TraitBoundModifier::Maybe = tbm {
+ path_span.with_lo(path_span.lo() - BytePos(1)) // include the `?`
+ } else {
+ path_span
+ }
+ };
+ Some((trait_path.res, trait_path.segments, trait_span))
+ } else {
+ None
+ }
+}
+
+// FIXME: ComparableTraitRef does not support nested bounds needed for associated_type_bounds
+fn into_comparable_trait_ref(trait_ref: &TraitRef<'_>) -> ComparableTraitRef {
+ ComparableTraitRef(
+ trait_ref.path.res,
+ trait_ref
+ .path
+ .segments
+ .iter()
+ .filter_map(|segment| {
+ // get trait bound type arguments
+ Some(segment.args?.args.iter().filter_map(|arg| {
+ if_chain! {
+ if let GenericArg::Type(ty) = arg;
+ if let TyKind::Path(QPath::Resolved(_, path)) = ty.kind;
+ then { return Some(path.res) }
+ }
+ None
+ }))
+ })
+ .flatten()
+ .collect(),
+ )
+}
+
+fn rollup_traits(cx: &LateContext<'_>, bounds: &[GenericBound<'_>], msg: &str) {
+ let mut map = FxHashMap::default();
+ let mut repeated_res = false;
+
+ let only_comparable_trait_refs = |bound: &GenericBound<'_>| {
+ if let GenericBound::Trait(t, _) = bound {
+ Some((into_comparable_trait_ref(&t.trait_ref), t.span))
+ } else {
+ None
+ }
+ };
+
+ for bound in bounds.iter().filter_map(only_comparable_trait_refs) {
+ let (comparable_bound, span_direct) = bound;
+ if map.insert(comparable_bound, span_direct).is_some() {
+ repeated_res = true;
+ }
+ }
+
+ if_chain! {
+ if repeated_res;
+ if let [first_trait, .., last_trait] = bounds;
+ then {
+ let all_trait_span = first_trait.span().to(last_trait.span());
+
+ let mut traits = map.values()
+ .filter_map(|span| snippet_opt(cx, *span))
+ .collect::<Vec<_>>();
+ traits.sort_unstable();
+ let traits = traits.join(" + ");
+
+ span_lint_and_sugg(
+ cx,
+ TRAIT_DUPLICATION_IN_BOUNDS,
+ all_trait_span,
+ msg,
+ "try",
+ traits,
+ Applicability::MachineApplicable
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/transmute/crosspointer_transmute.rs b/src/tools/clippy/clippy_lints/src/transmute/crosspointer_transmute.rs
new file mode 100644
index 000000000..25d0543c8
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/transmute/crosspointer_transmute.rs
@@ -0,0 +1,37 @@
+use super::CROSSPOINTER_TRANSMUTE;
+use clippy_utils::diagnostics::span_lint;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+
+/// Checks for `crosspointer_transmute` lint.
+/// Returns `true` if it's triggered, otherwise returns `false`.
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>, from_ty: Ty<'tcx>, to_ty: Ty<'tcx>) -> bool {
+ match (&from_ty.kind(), &to_ty.kind()) {
+ (ty::RawPtr(from_ptr), _) if from_ptr.ty == to_ty => {
+ span_lint(
+ cx,
+ CROSSPOINTER_TRANSMUTE,
+ e.span,
+ &format!(
+ "transmute from a type (`{}`) to the type that it points to (`{}`)",
+ from_ty, to_ty
+ ),
+ );
+ true
+ },
+ (_, ty::RawPtr(to_ptr)) if to_ptr.ty == from_ty => {
+ span_lint(
+ cx,
+ CROSSPOINTER_TRANSMUTE,
+ e.span,
+ &format!(
+ "transmute from a type (`{}`) to a pointer to that type (`{}`)",
+ from_ty, to_ty
+ ),
+ );
+ true
+ },
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/transmute/mod.rs b/src/tools/clippy/clippy_lints/src/transmute/mod.rs
new file mode 100644
index 000000000..5f3e98144
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/transmute/mod.rs
@@ -0,0 +1,460 @@
+mod crosspointer_transmute;
+mod transmute_float_to_int;
+mod transmute_int_to_bool;
+mod transmute_int_to_char;
+mod transmute_int_to_float;
+mod transmute_num_to_bytes;
+mod transmute_ptr_to_ptr;
+mod transmute_ptr_to_ref;
+mod transmute_ref_to_ref;
+mod transmute_undefined_repr;
+mod transmutes_expressible_as_ptr_casts;
+mod unsound_collection_transmute;
+mod useless_transmute;
+mod utils;
+mod wrong_transmute;
+
+use clippy_utils::in_constant;
+use if_chain::if_chain;
+use rustc_hir::{Expr, ExprKind, QPath};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::symbol::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for transmutes that can't ever be correct on any
+ /// architecture.
+ ///
+ /// ### Why is this bad?
+ /// It's basically guaranteed to be undefined behavior.
+ ///
+ /// ### Known problems
+ /// When accessing C, users might want to store pointer
+ /// sized objects in `extradata` arguments to save an allocation.
+ ///
+ /// ### Example
+ /// ```ignore
+ /// let ptr: *const T = core::intrinsics::transmute('x')
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub WRONG_TRANSMUTE,
+ correctness,
+ "transmutes that are confusing at best, undefined behavior at worst and always useless"
+}
+
+// FIXME: Move this to `complexity` again, after #5343 is fixed
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for transmutes to the original type of the object
+ /// and transmutes that could be a cast.
+ ///
+ /// ### Why is this bad?
+ /// Readability. The code tricks people into thinking that
+ /// something complex is going on.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// core::intrinsics::transmute(t); // where the result type is the same as `t`'s
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub USELESS_TRANSMUTE,
+ complexity,
+ "transmutes that have the same to and from types or could be a cast/coercion"
+}
+
+// FIXME: Merge this lint with USELESS_TRANSMUTE once that is out of the nursery.
+declare_clippy_lint! {
+ /// ### What it does
+ ///Checks for transmutes that could be a pointer cast.
+ ///
+ /// ### Why is this bad?
+ /// Readability. The code tricks people into thinking that
+ /// something complex is going on.
+ ///
+ /// ### Example
+ ///
+ /// ```rust
+ /// # let p: *const [i32] = &[];
+ /// unsafe { std::mem::transmute::<*const [i32], *const [u16]>(p) };
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # let p: *const [i32] = &[];
+ /// p as *const [u16];
+ /// ```
+ #[clippy::version = "1.47.0"]
+ pub TRANSMUTES_EXPRESSIBLE_AS_PTR_CASTS,
+ complexity,
+ "transmutes that could be a pointer cast"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for transmutes between a type `T` and `*T`.
+ ///
+ /// ### Why is this bad?
+ /// It's easy to mistakenly transmute between a type and a
+ /// pointer to that type.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// core::intrinsics::transmute(t) // where the result type is the same as
+ /// // `*t` or `&t`'s
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub CROSSPOINTER_TRANSMUTE,
+ complexity,
+ "transmutes that have to or from types that are a pointer to the other"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for transmutes from a pointer to a reference.
+ ///
+ /// ### Why is this bad?
+ /// This can always be rewritten with `&` and `*`.
+ ///
+ /// ### Known problems
+ /// - `mem::transmute` in statics and constants is stable from Rust 1.46.0,
+ /// while dereferencing raw pointer is not stable yet.
+ /// If you need to do this in those places,
+ /// you would have to use `transmute` instead.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// unsafe {
+ /// let _: &T = std::mem::transmute(p); // where p: *const T
+ /// }
+ ///
+ /// // can be written:
+ /// let _: &T = &*p;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub TRANSMUTE_PTR_TO_REF,
+ complexity,
+ "transmutes from a pointer to a reference type"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for transmutes from an integer to a `char`.
+ ///
+ /// ### Why is this bad?
+ /// Not every integer is a Unicode scalar value.
+ ///
+ /// ### Known problems
+ /// - [`from_u32`] which this lint suggests using is slower than `transmute`
+ /// as it needs to validate the input.
+ /// If you are certain that the input is always a valid Unicode scalar value,
+ /// use [`from_u32_unchecked`] which is as fast as `transmute`
+ /// but has a semantically meaningful name.
+ /// - You might want to handle `None` returned from [`from_u32`] instead of calling `unwrap`.
+ ///
+ /// [`from_u32`]: https://doc.rust-lang.org/std/char/fn.from_u32.html
+ /// [`from_u32_unchecked`]: https://doc.rust-lang.org/std/char/fn.from_u32_unchecked.html
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = 1_u32;
+ /// unsafe {
+ /// let _: char = std::mem::transmute(x); // where x: u32
+ /// }
+ ///
+ /// // should be:
+ /// let _ = std::char::from_u32(x).unwrap();
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub TRANSMUTE_INT_TO_CHAR,
+ complexity,
+ "transmutes from an integer to a `char`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for transmutes from a `&[u8]` to a `&str`.
+ ///
+ /// ### Why is this bad?
+ /// Not every byte slice is a valid UTF-8 string.
+ ///
+ /// ### Known problems
+ /// - [`from_utf8`] which this lint suggests using is slower than `transmute`
+ /// as it needs to validate the input.
+ /// If you are certain that the input is always a valid UTF-8,
+ /// use [`from_utf8_unchecked`] which is as fast as `transmute`
+ /// but has a semantically meaningful name.
+ /// - You might want to handle errors returned from [`from_utf8`] instead of calling `unwrap`.
+ ///
+ /// [`from_utf8`]: https://doc.rust-lang.org/std/str/fn.from_utf8.html
+ /// [`from_utf8_unchecked`]: https://doc.rust-lang.org/std/str/fn.from_utf8_unchecked.html
+ ///
+ /// ### Example
+ /// ```rust
+ /// let b: &[u8] = &[1_u8, 2_u8];
+ /// unsafe {
+ /// let _: &str = std::mem::transmute(b); // where b: &[u8]
+ /// }
+ ///
+ /// // should be:
+ /// let _ = std::str::from_utf8(b).unwrap();
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub TRANSMUTE_BYTES_TO_STR,
+ complexity,
+ "transmutes from a `&[u8]` to a `&str`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for transmutes from an integer to a `bool`.
+ ///
+ /// ### Why is this bad?
+ /// This might result in an invalid in-memory representation of a `bool`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = 1_u8;
+ /// unsafe {
+ /// let _: bool = std::mem::transmute(x); // where x: u8
+ /// }
+ ///
+ /// // should be:
+ /// let _: bool = x != 0;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub TRANSMUTE_INT_TO_BOOL,
+ complexity,
+ "transmutes from an integer to a `bool`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for transmutes from an integer to a float.
+ ///
+ /// ### Why is this bad?
+ /// Transmutes are dangerous and error-prone, whereas `from_bits` is intuitive
+ /// and safe.
+ ///
+ /// ### Example
+ /// ```rust
+ /// unsafe {
+ /// let _: f32 = std::mem::transmute(1_u32); // where x: u32
+ /// }
+ ///
+ /// // should be:
+ /// let _: f32 = f32::from_bits(1_u32);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub TRANSMUTE_INT_TO_FLOAT,
+ complexity,
+ "transmutes from an integer to a float"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for transmutes from a float to an integer.
+ ///
+ /// ### Why is this bad?
+ /// Transmutes are dangerous and error-prone, whereas `to_bits` is intuitive
+ /// and safe.
+ ///
+ /// ### Example
+ /// ```rust
+ /// unsafe {
+ /// let _: u32 = std::mem::transmute(1f32);
+ /// }
+ ///
+ /// // should be:
+ /// let _: u32 = 1f32.to_bits();
+ /// ```
+ #[clippy::version = "1.41.0"]
+ pub TRANSMUTE_FLOAT_TO_INT,
+ complexity,
+ "transmutes from a float to an integer"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for transmutes from a number to an array of `u8`
+ ///
+ /// ### Why this is bad?
+ /// Transmutes are dangerous and error-prone, whereas `to_ne_bytes`
+ /// is intuitive and safe.
+ ///
+ /// ### Example
+ /// ```rust
+ /// unsafe {
+ /// let x: [u8; 8] = std::mem::transmute(1i64);
+ /// }
+ ///
+ /// // should be
+ /// let x: [u8; 8] = 0i64.to_ne_bytes();
+ /// ```
+ #[clippy::version = "1.58.0"]
+ pub TRANSMUTE_NUM_TO_BYTES,
+ complexity,
+ "transmutes from a number to an array of `u8`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for transmutes from a pointer to a pointer, or
+ /// from a reference to a reference.
+ ///
+ /// ### Why is this bad?
+ /// Transmutes are dangerous, and these can instead be
+ /// written as casts.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let ptr = &1u32 as *const u32;
+ /// unsafe {
+ /// // pointer-to-pointer transmute
+ /// let _: *const f32 = std::mem::transmute(ptr);
+ /// // ref-ref transmute
+ /// let _: &f32 = std::mem::transmute(&1u32);
+ /// }
+ /// // These can be respectively written:
+ /// let _ = ptr as *const f32;
+ /// let _ = unsafe{ &*(&1u32 as *const u32 as *const f32) };
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub TRANSMUTE_PTR_TO_PTR,
+ pedantic,
+ "transmutes from a pointer to a pointer / a reference to a reference"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for transmutes between collections whose
+ /// types have different ABI, size or alignment.
+ ///
+ /// ### Why is this bad?
+ /// This is undefined behavior.
+ ///
+ /// ### Known problems
+ /// Currently, we cannot know whether a type is a
+ /// collection, so we just lint the ones that come with `std`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// // different size, therefore likely out-of-bounds memory access
+ /// // You absolutely do not want this in your code!
+ /// unsafe {
+ /// std::mem::transmute::<_, Vec<u32>>(vec![2_u16])
+ /// };
+ /// ```
+ ///
+ /// You must always iterate, map and collect the values:
+ ///
+ /// ```rust
+ /// vec![2_u16].into_iter().map(u32::from).collect::<Vec<_>>();
+ /// ```
+ #[clippy::version = "1.40.0"]
+ pub UNSOUND_COLLECTION_TRANSMUTE,
+ correctness,
+ "transmute between collections of layout-incompatible types"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for transmutes between types which do not have a representation defined relative to
+ /// each other.
+ ///
+ /// ### Why is this bad?
+ /// The results of such a transmute are not defined.
+ ///
+ /// ### Known problems
+ /// This lint has had multiple problems in the past and was moved to `nursery`. See issue
+ /// [#8496](https://github.com/rust-lang/rust-clippy/issues/8496) for more details.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct Foo<T>(u32, T);
+ /// let _ = unsafe { core::mem::transmute::<Foo<u32>, Foo<i32>>(Foo(0u32, 0u32)) };
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// #[repr(C)]
+ /// struct Foo<T>(u32, T);
+ /// let _ = unsafe { core::mem::transmute::<Foo<u32>, Foo<i32>>(Foo(0u32, 0u32)) };
+ /// ```
+ #[clippy::version = "1.60.0"]
+ pub TRANSMUTE_UNDEFINED_REPR,
+ nursery,
+ "transmute to or from a type with an undefined representation"
+}
+
+pub struct Transmute {
+ msrv: Option<RustcVersion>,
+}
+impl_lint_pass!(Transmute => [
+ CROSSPOINTER_TRANSMUTE,
+ TRANSMUTE_PTR_TO_REF,
+ TRANSMUTE_PTR_TO_PTR,
+ USELESS_TRANSMUTE,
+ WRONG_TRANSMUTE,
+ TRANSMUTE_INT_TO_CHAR,
+ TRANSMUTE_BYTES_TO_STR,
+ TRANSMUTE_INT_TO_BOOL,
+ TRANSMUTE_INT_TO_FLOAT,
+ TRANSMUTE_FLOAT_TO_INT,
+ TRANSMUTE_NUM_TO_BYTES,
+ UNSOUND_COLLECTION_TRANSMUTE,
+ TRANSMUTES_EXPRESSIBLE_AS_PTR_CASTS,
+ TRANSMUTE_UNDEFINED_REPR,
+]);
+impl Transmute {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self { msrv }
+ }
+}
+impl<'tcx> LateLintPass<'tcx> for Transmute {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ if_chain! {
+ if let ExprKind::Call(path_expr, [arg]) = e.kind;
+ if let ExprKind::Path(QPath::Resolved(None, path)) = path_expr.kind;
+ if let Some(def_id) = path.res.opt_def_id();
+ if cx.tcx.is_diagnostic_item(sym::transmute, def_id);
+ then {
+ // Avoid suggesting non-const operations in const contexts:
+ // - from/to bits (https://github.com/rust-lang/rust/issues/73736)
+ // - dereferencing raw pointers (https://github.com/rust-lang/rust/issues/51911)
+ // - char conversions (https://github.com/rust-lang/rust/issues/89259)
+ let const_context = in_constant(cx, e.hir_id);
+
+ let from_ty = cx.typeck_results().expr_ty_adjusted(arg);
+ // Adjustments for `to_ty` happen after the call to `transmute`, so don't use them.
+ let to_ty = cx.typeck_results().expr_ty(e);
+
+ // If useless_transmute is triggered, the other lints can be skipped.
+ if useless_transmute::check(cx, e, from_ty, to_ty, arg) {
+ return;
+ }
+
+ let linted = wrong_transmute::check(cx, e, from_ty, to_ty)
+ | crosspointer_transmute::check(cx, e, from_ty, to_ty)
+ | transmute_ptr_to_ref::check(cx, e, from_ty, to_ty, arg, path, self.msrv)
+ | transmute_int_to_char::check(cx, e, from_ty, to_ty, arg, const_context)
+ | transmute_ref_to_ref::check(cx, e, from_ty, to_ty, arg, const_context)
+ | transmute_ptr_to_ptr::check(cx, e, from_ty, to_ty, arg)
+ | transmute_int_to_bool::check(cx, e, from_ty, to_ty, arg)
+ | transmute_int_to_float::check(cx, e, from_ty, to_ty, arg, const_context)
+ | transmute_float_to_int::check(cx, e, from_ty, to_ty, arg, const_context)
+ | transmute_num_to_bytes::check(cx, e, from_ty, to_ty, arg, const_context)
+ | (
+ unsound_collection_transmute::check(cx, e, from_ty, to_ty)
+ || transmute_undefined_repr::check(cx, e, from_ty, to_ty)
+ );
+
+ if !linted {
+ transmutes_expressible_as_ptr_casts::check(cx, e, from_ty, to_ty, arg);
+ }
+ }
+ }
+ }
+
+ extract_msrv_attr!(LateContext);
+}
diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_float_to_int.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_float_to_int.rs
new file mode 100644
index 000000000..1bde977cf
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_float_to_int.rs
@@ -0,0 +1,65 @@
+use super::TRANSMUTE_FLOAT_TO_INT;
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::sugg;
+use if_chain::if_chain;
+use rustc_ast as ast;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind, UnOp};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+
+/// Checks for `transmute_float_to_int` lint.
+/// Returns `true` if it's triggered, otherwise returns `false`.
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ from_ty: Ty<'tcx>,
+ to_ty: Ty<'tcx>,
+ mut arg: &'tcx Expr<'_>,
+ const_context: bool,
+) -> bool {
+ match (&from_ty.kind(), &to_ty.kind()) {
+ (ty::Float(float_ty), ty::Int(_) | ty::Uint(_)) if !const_context => {
+ span_lint_and_then(
+ cx,
+ TRANSMUTE_FLOAT_TO_INT,
+ e.span,
+ &format!("transmute from a `{}` to a `{}`", from_ty, to_ty),
+ |diag| {
+ let mut sugg = sugg::Sugg::hir(cx, arg, "..");
+
+ if let ExprKind::Unary(UnOp::Neg, inner_expr) = &arg.kind {
+ arg = inner_expr;
+ }
+
+ if_chain! {
+ // if the expression is a float literal and it is unsuffixed then
+ // add a suffix so the suggestion is valid and unambiguous
+ if let ExprKind::Lit(lit) = &arg.kind;
+ if let ast::LitKind::Float(_, ast::LitFloatType::Unsuffixed) = lit.node;
+ then {
+ let op = format!("{}{}", sugg, float_ty.name_str()).into();
+ match sugg {
+ sugg::Sugg::MaybeParen(_) => sugg = sugg::Sugg::MaybeParen(op),
+ _ => sugg = sugg::Sugg::NonParen(op)
+ }
+ }
+ }
+
+ sugg = sugg::Sugg::NonParen(format!("{}.to_bits()", sugg.maybe_par()).into());
+
+ // cast the result of `to_bits` if `to_ty` is signed
+ sugg = if let ty::Int(int_ty) = to_ty.kind() {
+ sugg.as_ty(int_ty.name_str().to_string())
+ } else {
+ sugg
+ };
+
+ diag.span_suggestion(e.span, "consider using", sugg, Applicability::Unspecified);
+ },
+ );
+ true
+ },
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_bool.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_bool.rs
new file mode 100644
index 000000000..8c50b58ca
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_bool.rs
@@ -0,0 +1,42 @@
+use super::TRANSMUTE_INT_TO_BOOL;
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::sugg;
+use rustc_ast as ast;
+use rustc_errors::Applicability;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+use std::borrow::Cow;
+
+/// Checks for `transmute_int_to_bool` lint.
+/// Returns `true` if it's triggered, otherwise returns `false`.
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ from_ty: Ty<'tcx>,
+ to_ty: Ty<'tcx>,
+ arg: &'tcx Expr<'_>,
+) -> bool {
+ match (&from_ty.kind(), &to_ty.kind()) {
+ (ty::Int(ty::IntTy::I8) | ty::Uint(ty::UintTy::U8), ty::Bool) => {
+ span_lint_and_then(
+ cx,
+ TRANSMUTE_INT_TO_BOOL,
+ e.span,
+ &format!("transmute from a `{}` to a `bool`", from_ty),
+ |diag| {
+ let arg = sugg::Sugg::hir(cx, arg, "..");
+ let zero = sugg::Sugg::NonParen(Cow::from("0"));
+ diag.span_suggestion(
+ e.span,
+ "consider using",
+ sugg::make_binop(ast::BinOpKind::Ne, &arg, &zero).to_string(),
+ Applicability::Unspecified,
+ );
+ },
+ );
+ true
+ },
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_char.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_char.rs
new file mode 100644
index 000000000..9e1823c37
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_char.rs
@@ -0,0 +1,46 @@
+use super::TRANSMUTE_INT_TO_CHAR;
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::sugg;
+use rustc_ast as ast;
+use rustc_errors::Applicability;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+
+/// Checks for `transmute_int_to_char` lint.
+/// Returns `true` if it's triggered, otherwise returns `false`.
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ from_ty: Ty<'tcx>,
+ to_ty: Ty<'tcx>,
+ arg: &'tcx Expr<'_>,
+ const_context: bool,
+) -> bool {
+ match (&from_ty.kind(), &to_ty.kind()) {
+ (ty::Int(ty::IntTy::I32) | ty::Uint(ty::UintTy::U32), &ty::Char) if !const_context => {
+ span_lint_and_then(
+ cx,
+ TRANSMUTE_INT_TO_CHAR,
+ e.span,
+ &format!("transmute from a `{}` to a `char`", from_ty),
+ |diag| {
+ let arg = sugg::Sugg::hir(cx, arg, "..");
+ let arg = if let ty::Int(_) = from_ty.kind() {
+ arg.as_ty(ast::UintTy::U32.name_str())
+ } else {
+ arg
+ };
+ diag.span_suggestion(
+ e.span,
+ "consider using",
+ format!("std::char::from_u32({}).unwrap()", arg),
+ Applicability::Unspecified,
+ );
+ },
+ );
+ true
+ },
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_float.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_float.rs
new file mode 100644
index 000000000..b8703052e
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_float.rs
@@ -0,0 +1,48 @@
+use super::TRANSMUTE_INT_TO_FLOAT;
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::sugg;
+use rustc_errors::Applicability;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+
+/// Checks for `transmute_int_to_float` lint.
+/// Returns `true` if it's triggered, otherwise returns `false`.
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ from_ty: Ty<'tcx>,
+ to_ty: Ty<'tcx>,
+ arg: &'tcx Expr<'_>,
+ const_context: bool,
+) -> bool {
+ match (&from_ty.kind(), &to_ty.kind()) {
+ (ty::Int(_) | ty::Uint(_), ty::Float(_)) if !const_context => {
+ span_lint_and_then(
+ cx,
+ TRANSMUTE_INT_TO_FLOAT,
+ e.span,
+ &format!("transmute from a `{}` to a `{}`", from_ty, to_ty),
+ |diag| {
+ let arg = sugg::Sugg::hir(cx, arg, "..");
+ let arg = if let ty::Int(int_ty) = from_ty.kind() {
+ arg.as_ty(format!(
+ "u{}",
+ int_ty.bit_width().map_or_else(|| "size".to_string(), |v| v.to_string())
+ ))
+ } else {
+ arg
+ };
+ diag.span_suggestion(
+ e.span,
+ "consider using",
+ format!("{}::from_bits({})", to_ty, arg),
+ Applicability::Unspecified,
+ );
+ },
+ );
+ true
+ },
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_num_to_bytes.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_num_to_bytes.rs
new file mode 100644
index 000000000..52d193d11
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_num_to_bytes.rs
@@ -0,0 +1,49 @@
+use super::TRANSMUTE_NUM_TO_BYTES;
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::sugg;
+use rustc_errors::Applicability;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty, UintTy};
+
+/// Checks for `transmute_int_to_float` lint.
+/// Returns `true` if it's triggered, otherwise returns `false`.
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ from_ty: Ty<'tcx>,
+ to_ty: Ty<'tcx>,
+ arg: &'tcx Expr<'_>,
+ const_context: bool,
+) -> bool {
+ match (&from_ty.kind(), &to_ty.kind()) {
+ (ty::Int(_) | ty::Uint(_) | ty::Float(_), ty::Array(arr_ty, _)) => {
+ if !matches!(arr_ty.kind(), ty::Uint(UintTy::U8)) {
+ return false;
+ }
+ if matches!(from_ty.kind(), ty::Float(_)) && const_context {
+ // TODO: Remove when const_float_bits_conv is stabilized
+ // rust#72447
+ return false;
+ }
+
+ span_lint_and_then(
+ cx,
+ TRANSMUTE_NUM_TO_BYTES,
+ e.span,
+ &format!("transmute from a `{}` to a `{}`", from_ty, to_ty),
+ |diag| {
+ let arg = sugg::Sugg::hir(cx, arg, "..");
+ diag.span_suggestion(
+ e.span,
+ "consider using `to_ne_bytes()`",
+ format!("{}.to_ne_bytes()", arg),
+ Applicability::Unspecified,
+ );
+ },
+ );
+ true
+ },
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ptr.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ptr.rs
new file mode 100644
index 000000000..31a9b69ca
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ptr.rs
@@ -0,0 +1,36 @@
+use super::TRANSMUTE_PTR_TO_PTR;
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::sugg;
+use rustc_errors::Applicability;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+
+/// Checks for `transmute_ptr_to_ptr` lint.
+/// Returns `true` if it's triggered, otherwise returns `false`.
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ from_ty: Ty<'tcx>,
+ to_ty: Ty<'tcx>,
+ arg: &'tcx Expr<'_>,
+) -> bool {
+ match (&from_ty.kind(), &to_ty.kind()) {
+ (ty::RawPtr(_), ty::RawPtr(to_ty)) => {
+ span_lint_and_then(
+ cx,
+ TRANSMUTE_PTR_TO_PTR,
+ e.span,
+ "transmute from a pointer to a pointer",
+ |diag| {
+ if let Some(arg) = sugg::Sugg::hir_opt(cx, arg) {
+ let sugg = arg.as_ty(cx.tcx.mk_ptr(*to_ty));
+ diag.span_suggestion(e.span, "try", sugg, Applicability::Unspecified);
+ }
+ },
+ );
+ true
+ },
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ref.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ref.rs
new file mode 100644
index 000000000..5eb03275b
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ref.rs
@@ -0,0 +1,84 @@
+use super::TRANSMUTE_PTR_TO_REF;
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::{meets_msrv, msrvs, sugg};
+use rustc_errors::Applicability;
+use rustc_hir::{self as hir, Expr, GenericArg, Mutability, Path, TyKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty, TypeVisitable};
+use rustc_semver::RustcVersion;
+
+/// Checks for `transmute_ptr_to_ref` lint.
+/// Returns `true` if it's triggered, otherwise returns `false`.
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ from_ty: Ty<'tcx>,
+ to_ty: Ty<'tcx>,
+ arg: &'tcx Expr<'_>,
+ path: &'tcx Path<'_>,
+ msrv: Option<RustcVersion>,
+) -> bool {
+ match (&from_ty.kind(), &to_ty.kind()) {
+ (ty::RawPtr(from_ptr_ty), ty::Ref(_, to_ref_ty, mutbl)) => {
+ span_lint_and_then(
+ cx,
+ TRANSMUTE_PTR_TO_REF,
+ e.span,
+ &format!(
+ "transmute from a pointer type (`{}`) to a reference type (`{}`)",
+ from_ty, to_ty
+ ),
+ |diag| {
+ let arg = sugg::Sugg::hir(cx, arg, "..");
+ let (deref, cast) = if *mutbl == Mutability::Mut {
+ ("&mut *", "*mut")
+ } else {
+ ("&*", "*const")
+ };
+ let mut app = Applicability::MachineApplicable;
+
+ let sugg = if let Some(ty) = get_explicit_type(path) {
+ let ty_snip = snippet_with_applicability(cx, ty.span, "..", &mut app);
+ if meets_msrv(msrv, msrvs::POINTER_CAST) {
+ format!("{}{}.cast::<{}>()", deref, arg.maybe_par(), ty_snip)
+ } else if from_ptr_ty.has_erased_regions() {
+ sugg::make_unop(deref, arg.as_ty(format!("{} () as {} {}", cast, cast, ty_snip)))
+ .to_string()
+ } else {
+ sugg::make_unop(deref, arg.as_ty(format!("{} {}", cast, ty_snip))).to_string()
+ }
+ } else if from_ptr_ty.ty == *to_ref_ty {
+ if from_ptr_ty.has_erased_regions() {
+ if meets_msrv(msrv, msrvs::POINTER_CAST) {
+ format!("{}{}.cast::<{}>()", deref, arg.maybe_par(), to_ref_ty)
+ } else {
+ sugg::make_unop(deref, arg.as_ty(format!("{} () as {} {}", cast, cast, to_ref_ty)))
+ .to_string()
+ }
+ } else {
+ sugg::make_unop(deref, arg).to_string()
+ }
+ } else {
+ sugg::make_unop(deref, arg.as_ty(format!("{} {}", cast, to_ref_ty))).to_string()
+ };
+
+ diag.span_suggestion(e.span, "try", sugg, app);
+ },
+ );
+ true
+ },
+ _ => false,
+ }
+}
+
+/// Gets the type `Bar` in `…::transmute<Foo, &Bar>`.
+fn get_explicit_type<'tcx>(path: &'tcx Path<'tcx>) -> Option<&'tcx hir::Ty<'tcx>> {
+ if let GenericArg::Type(ty) = path.segments.last()?.args?.args.get(1)?
+ && let TyKind::Rptr(_, ty) = &ty.kind
+ {
+ Some(ty.ty)
+ } else {
+ None
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_ref_to_ref.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_ref_to_ref.rs
new file mode 100644
index 000000000..707a11d36
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_ref_to_ref.rs
@@ -0,0 +1,89 @@
+use super::{TRANSMUTE_BYTES_TO_STR, TRANSMUTE_PTR_TO_PTR};
+use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then};
+use clippy_utils::source::snippet;
+use clippy_utils::sugg;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, Mutability};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+
+/// Checks for `transmute_bytes_to_str` and `transmute_ptr_to_ptr` lints.
+/// Returns `true` if either one triggered, otherwise returns `false`.
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ from_ty: Ty<'tcx>,
+ to_ty: Ty<'tcx>,
+ arg: &'tcx Expr<'_>,
+ const_context: bool,
+) -> bool {
+ let mut triggered = false;
+
+ if let (ty::Ref(_, ty_from, from_mutbl), ty::Ref(_, ty_to, to_mutbl)) = (&from_ty.kind(), &to_ty.kind()) {
+ if_chain! {
+ if let (&ty::Slice(slice_ty), &ty::Str) = (&ty_from.kind(), &ty_to.kind());
+ if let ty::Uint(ty::UintTy::U8) = slice_ty.kind();
+ if from_mutbl == to_mutbl;
+ then {
+ let postfix = if *from_mutbl == Mutability::Mut {
+ "_mut"
+ } else {
+ ""
+ };
+
+ let snippet = snippet(cx, arg.span, "..");
+
+ span_lint_and_sugg(
+ cx,
+ TRANSMUTE_BYTES_TO_STR,
+ e.span,
+ &format!("transmute from a `{}` to a `{}`", from_ty, to_ty),
+ "consider using",
+ if const_context {
+ format!("std::str::from_utf8_unchecked{postfix}({snippet})")
+ } else {
+ format!("std::str::from_utf8{postfix}({snippet}).unwrap()")
+ },
+ Applicability::MaybeIncorrect,
+ );
+ triggered = true;
+ } else {
+ if (cx.tcx.erase_regions(from_ty) != cx.tcx.erase_regions(to_ty))
+ && !const_context {
+ span_lint_and_then(
+ cx,
+ TRANSMUTE_PTR_TO_PTR,
+ e.span,
+ "transmute from a reference to a reference",
+ |diag| if let Some(arg) = sugg::Sugg::hir_opt(cx, arg) {
+ let ty_from_and_mut = ty::TypeAndMut {
+ ty: *ty_from,
+ mutbl: *from_mutbl
+ };
+ let ty_to_and_mut = ty::TypeAndMut { ty: *ty_to, mutbl: *to_mutbl };
+ let sugg_paren = arg
+ .as_ty(cx.tcx.mk_ptr(ty_from_and_mut))
+ .as_ty(cx.tcx.mk_ptr(ty_to_and_mut));
+ let sugg = if *to_mutbl == Mutability::Mut {
+ sugg_paren.mut_addr_deref()
+ } else {
+ sugg_paren.addr_deref()
+ };
+ diag.span_suggestion(
+ e.span,
+ "try",
+ sugg,
+ Applicability::Unspecified,
+ );
+ },
+ );
+
+ triggered = true;
+ }
+ }
+ }
+ }
+
+ triggered
+}
diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs
new file mode 100644
index 000000000..20b348fc1
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs
@@ -0,0 +1,372 @@
+use super::TRANSMUTE_UNDEFINED_REPR;
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::ty::is_c_void;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty::subst::{Subst, SubstsRef};
+use rustc_middle::ty::{self, IntTy, Ty, TypeAndMut, UintTy};
+use rustc_span::Span;
+
+#[allow(clippy::too_many_lines)]
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ from_ty_orig: Ty<'tcx>,
+ to_ty_orig: Ty<'tcx>,
+) -> bool {
+ let mut from_ty = cx.tcx.erase_regions(from_ty_orig);
+ let mut to_ty = cx.tcx.erase_regions(to_ty_orig);
+
+ while from_ty != to_ty {
+ match reduce_refs(cx, e.span, from_ty, to_ty) {
+ ReducedTys::FromFatPtr {
+ unsized_ty,
+ to_ty: to_sub_ty,
+ } => match reduce_ty(cx, to_sub_ty) {
+ ReducedTy::TypeErasure => break,
+ ReducedTy::UnorderedFields(ty) if is_size_pair(ty) => break,
+ ReducedTy::Ref(to_sub_ty) => {
+ from_ty = unsized_ty;
+ to_ty = to_sub_ty;
+ continue;
+ },
+ _ => {
+ span_lint_and_then(
+ cx,
+ TRANSMUTE_UNDEFINED_REPR,
+ e.span,
+ &format!("transmute from `{}` which has an undefined layout", from_ty_orig),
+ |diag| {
+ if from_ty_orig.peel_refs() != unsized_ty {
+ diag.note(&format!("the contained type `&{}` has an undefined layout", unsized_ty));
+ }
+ },
+ );
+ return true;
+ },
+ },
+ ReducedTys::ToFatPtr {
+ unsized_ty,
+ from_ty: from_sub_ty,
+ } => match reduce_ty(cx, from_sub_ty) {
+ ReducedTy::TypeErasure => break,
+ ReducedTy::UnorderedFields(ty) if is_size_pair(ty) => break,
+ ReducedTy::Ref(from_sub_ty) => {
+ from_ty = from_sub_ty;
+ to_ty = unsized_ty;
+ continue;
+ },
+ _ => {
+ span_lint_and_then(
+ cx,
+ TRANSMUTE_UNDEFINED_REPR,
+ e.span,
+ &format!("transmute to `{}` which has an undefined layout", to_ty_orig),
+ |diag| {
+ if to_ty_orig.peel_refs() != unsized_ty {
+ diag.note(&format!("the contained type `&{}` has an undefined layout", unsized_ty));
+ }
+ },
+ );
+ return true;
+ },
+ },
+ ReducedTys::ToPtr {
+ from_ty: from_sub_ty,
+ to_ty: to_sub_ty,
+ } => match reduce_ty(cx, from_sub_ty) {
+ ReducedTy::UnorderedFields(from_ty) => {
+ span_lint_and_then(
+ cx,
+ TRANSMUTE_UNDEFINED_REPR,
+ e.span,
+ &format!("transmute from `{}` which has an undefined layout", from_ty_orig),
+ |diag| {
+ if from_ty_orig.peel_refs() != from_ty {
+ diag.note(&format!("the contained type `{}` has an undefined layout", from_ty));
+ }
+ },
+ );
+ return true;
+ },
+ ReducedTy::Ref(from_sub_ty) => {
+ from_ty = from_sub_ty;
+ to_ty = to_sub_ty;
+ continue;
+ },
+ _ => break,
+ },
+ ReducedTys::FromPtr {
+ from_ty: from_sub_ty,
+ to_ty: to_sub_ty,
+ } => match reduce_ty(cx, to_sub_ty) {
+ ReducedTy::UnorderedFields(to_ty) => {
+ span_lint_and_then(
+ cx,
+ TRANSMUTE_UNDEFINED_REPR,
+ e.span,
+ &format!("transmute to `{}` which has an undefined layout", to_ty_orig),
+ |diag| {
+ if to_ty_orig.peel_refs() != to_ty {
+ diag.note(&format!("the contained type `{}` has an undefined layout", to_ty));
+ }
+ },
+ );
+ return true;
+ },
+ ReducedTy::Ref(to_sub_ty) => {
+ from_ty = from_sub_ty;
+ to_ty = to_sub_ty;
+ continue;
+ },
+ _ => break,
+ },
+ ReducedTys::Other {
+ from_ty: from_sub_ty,
+ to_ty: to_sub_ty,
+ } => match (reduce_ty(cx, from_sub_ty), reduce_ty(cx, to_sub_ty)) {
+ (ReducedTy::TypeErasure, _) | (_, ReducedTy::TypeErasure) => return false,
+ (ReducedTy::UnorderedFields(from_ty), ReducedTy::UnorderedFields(to_ty)) if from_ty != to_ty => {
+ let same_adt_did = if let (ty::Adt(from_def, from_subs), ty::Adt(to_def, to_subs))
+ = (from_ty.kind(), to_ty.kind())
+ && from_def == to_def
+ {
+ if same_except_params(from_subs, to_subs) {
+ return false;
+ }
+ Some(from_def.did())
+ } else {
+ None
+ };
+ span_lint_and_then(
+ cx,
+ TRANSMUTE_UNDEFINED_REPR,
+ e.span,
+ &format!(
+ "transmute from `{}` to `{}`, both of which have an undefined layout",
+ from_ty_orig, to_ty_orig
+ ),
+ |diag| {
+ if let Some(same_adt_did) = same_adt_did {
+ diag.note(&format!(
+ "two instances of the same generic type (`{}`) may have different layouts",
+ cx.tcx.item_name(same_adt_did)
+ ));
+ } else {
+ if from_ty_orig.peel_refs() != from_ty {
+ diag.note(&format!("the contained type `{}` has an undefined layout", from_ty));
+ }
+ if to_ty_orig.peel_refs() != to_ty {
+ diag.note(&format!("the contained type `{}` has an undefined layout", to_ty));
+ }
+ }
+ },
+ );
+ return true;
+ },
+ (
+ ReducedTy::UnorderedFields(from_ty),
+ ReducedTy::Other(_) | ReducedTy::OrderedFields(_) | ReducedTy::Ref(_),
+ ) => {
+ span_lint_and_then(
+ cx,
+ TRANSMUTE_UNDEFINED_REPR,
+ e.span,
+ &format!("transmute from `{}` which has an undefined layout", from_ty_orig),
+ |diag| {
+ if from_ty_orig.peel_refs() != from_ty {
+ diag.note(&format!("the contained type `{}` has an undefined layout", from_ty));
+ }
+ },
+ );
+ return true;
+ },
+ (
+ ReducedTy::Other(_) | ReducedTy::OrderedFields(_) | ReducedTy::Ref(_),
+ ReducedTy::UnorderedFields(to_ty),
+ ) => {
+ span_lint_and_then(
+ cx,
+ TRANSMUTE_UNDEFINED_REPR,
+ e.span,
+ &format!("transmute into `{}` which has an undefined layout", to_ty_orig),
+ |diag| {
+ if to_ty_orig.peel_refs() != to_ty {
+ diag.note(&format!("the contained type `{}` has an undefined layout", to_ty));
+ }
+ },
+ );
+ return true;
+ },
+ (ReducedTy::Ref(from_sub_ty), ReducedTy::Ref(to_sub_ty)) => {
+ from_ty = from_sub_ty;
+ to_ty = to_sub_ty;
+ continue;
+ },
+ (
+ ReducedTy::OrderedFields(_) | ReducedTy::Ref(_) | ReducedTy::Other(_) | ReducedTy::Param,
+ ReducedTy::OrderedFields(_) | ReducedTy::Ref(_) | ReducedTy::Other(_) | ReducedTy::Param,
+ )
+ | (
+ ReducedTy::UnorderedFields(_) | ReducedTy::Param,
+ ReducedTy::UnorderedFields(_) | ReducedTy::Param,
+ ) => break,
+ },
+ }
+ }
+
+ false
+}
+
+enum ReducedTys<'tcx> {
+ FromFatPtr { unsized_ty: Ty<'tcx>, to_ty: Ty<'tcx> },
+ ToFatPtr { unsized_ty: Ty<'tcx>, from_ty: Ty<'tcx> },
+ ToPtr { from_ty: Ty<'tcx>, to_ty: Ty<'tcx> },
+ FromPtr { from_ty: Ty<'tcx>, to_ty: Ty<'tcx> },
+ Other { from_ty: Ty<'tcx>, to_ty: Ty<'tcx> },
+}
+
+/// Remove references so long as both types are references.
+fn reduce_refs<'tcx>(
+ cx: &LateContext<'tcx>,
+ span: Span,
+ mut from_ty: Ty<'tcx>,
+ mut to_ty: Ty<'tcx>,
+) -> ReducedTys<'tcx> {
+ loop {
+ return match (from_ty.kind(), to_ty.kind()) {
+ (
+ &(ty::Ref(_, from_sub_ty, _) | ty::RawPtr(TypeAndMut { ty: from_sub_ty, .. })),
+ &(ty::Ref(_, to_sub_ty, _) | ty::RawPtr(TypeAndMut { ty: to_sub_ty, .. })),
+ ) => {
+ from_ty = from_sub_ty;
+ to_ty = to_sub_ty;
+ continue;
+ },
+ (&(ty::Ref(_, unsized_ty, _) | ty::RawPtr(TypeAndMut { ty: unsized_ty, .. })), _)
+ if !unsized_ty.is_sized(cx.tcx.at(span), cx.param_env) =>
+ {
+ ReducedTys::FromFatPtr { unsized_ty, to_ty }
+ },
+ (_, &(ty::Ref(_, unsized_ty, _) | ty::RawPtr(TypeAndMut { ty: unsized_ty, .. })))
+ if !unsized_ty.is_sized(cx.tcx.at(span), cx.param_env) =>
+ {
+ ReducedTys::ToFatPtr { unsized_ty, from_ty }
+ },
+ (&(ty::Ref(_, from_ty, _) | ty::RawPtr(TypeAndMut { ty: from_ty, .. })), _) => {
+ ReducedTys::FromPtr { from_ty, to_ty }
+ },
+ (_, &(ty::Ref(_, to_ty, _) | ty::RawPtr(TypeAndMut { ty: to_ty, .. }))) => {
+ ReducedTys::ToPtr { from_ty, to_ty }
+ },
+ _ => ReducedTys::Other { from_ty, to_ty },
+ };
+ }
+}
+
+enum ReducedTy<'tcx> {
+ /// The type can be used for type erasure.
+ TypeErasure,
+ /// The type is a struct containing either zero non-zero sized fields, or multiple non-zero
+ /// sized fields with a defined order.
+ OrderedFields(Ty<'tcx>),
+ /// The type is a struct containing multiple non-zero sized fields with no defined order.
+ UnorderedFields(Ty<'tcx>),
+ /// The type is a reference to the contained type.
+ Ref(Ty<'tcx>),
+ /// The type is a generic parameter.
+ Param,
+ /// Any other type.
+ Other(Ty<'tcx>),
+}
+
+/// Reduce structs containing a single non-zero sized field to it's contained type.
+fn reduce_ty<'tcx>(cx: &LateContext<'tcx>, mut ty: Ty<'tcx>) -> ReducedTy<'tcx> {
+ loop {
+ ty = cx.tcx.try_normalize_erasing_regions(cx.param_env, ty).unwrap_or(ty);
+ return match *ty.kind() {
+ ty::Array(sub_ty, _) if matches!(sub_ty.kind(), ty::Int(_) | ty::Uint(_)) => ReducedTy::TypeErasure,
+ ty::Array(sub_ty, _) | ty::Slice(sub_ty) => {
+ ty = sub_ty;
+ continue;
+ },
+ ty::Tuple(args) if args.is_empty() => ReducedTy::TypeErasure,
+ ty::Tuple(args) => {
+ let mut iter = args.iter();
+ let Some(sized_ty) = iter.find(|&ty| !is_zero_sized_ty(cx, ty)) else {
+ return ReducedTy::OrderedFields(ty);
+ };
+ if iter.all(|ty| is_zero_sized_ty(cx, ty)) {
+ ty = sized_ty;
+ continue;
+ }
+ ReducedTy::UnorderedFields(ty)
+ },
+ ty::Adt(def, substs) if def.is_struct() => {
+ let mut iter = def
+ .non_enum_variant()
+ .fields
+ .iter()
+ .map(|f| cx.tcx.bound_type_of(f.did).subst(cx.tcx, substs));
+ let Some(sized_ty) = iter.find(|&ty| !is_zero_sized_ty(cx, ty)) else {
+ return ReducedTy::TypeErasure;
+ };
+ if iter.all(|ty| is_zero_sized_ty(cx, ty)) {
+ ty = sized_ty;
+ continue;
+ }
+ if def.repr().inhibit_struct_field_reordering_opt() {
+ ReducedTy::OrderedFields(ty)
+ } else {
+ ReducedTy::UnorderedFields(ty)
+ }
+ },
+ ty::Adt(def, _) if def.is_enum() && (def.variants().is_empty() || is_c_void(cx, ty)) => {
+ ReducedTy::TypeErasure
+ },
+ // TODO: Check if the conversion to or from at least one of a union's fields is valid.
+ ty::Adt(def, _) if def.is_union() => ReducedTy::TypeErasure,
+ ty::Foreign(_) => ReducedTy::TypeErasure,
+ ty::Ref(_, ty, _) => ReducedTy::Ref(ty),
+ ty::RawPtr(ty) => ReducedTy::Ref(ty.ty),
+ ty::Param(_) => ReducedTy::Param,
+ _ => ReducedTy::Other(ty),
+ };
+ }
+}
+
+fn is_zero_sized_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
+ if_chain! {
+ if let Ok(ty) = cx.tcx.try_normalize_erasing_regions(cx.param_env, ty);
+ if let Ok(layout) = cx.tcx.layout_of(cx.param_env.and(ty));
+ then {
+ layout.layout.size().bytes() == 0
+ } else {
+ false
+ }
+ }
+}
+
+fn is_size_pair(ty: Ty<'_>) -> bool {
+ if let ty::Tuple(tys) = *ty.kind()
+ && let [ty1, ty2] = &**tys
+ {
+ matches!(ty1.kind(), ty::Int(IntTy::Isize) | ty::Uint(UintTy::Usize))
+ && matches!(ty2.kind(), ty::Int(IntTy::Isize) | ty::Uint(UintTy::Usize))
+ } else {
+ false
+ }
+}
+
+fn same_except_params<'tcx>(subs1: SubstsRef<'tcx>, subs2: SubstsRef<'tcx>) -> bool {
+ // TODO: check const parameters as well. Currently this will consider `Array<5>` the same as
+ // `Array<6>`
+ for (ty1, ty2) in subs1.types().zip(subs2.types()).filter(|(ty1, ty2)| ty1 != ty2) {
+ match (ty1.kind(), ty2.kind()) {
+ (ty::Param(_), _) | (_, ty::Param(_)) => (),
+ (ty::Adt(adt1, subs1), ty::Adt(adt2, subs2)) if adt1 == adt2 && same_except_params(subs1, subs2) => (),
+ _ => return false,
+ }
+ }
+ true
+}
diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs b/src/tools/clippy/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs
new file mode 100644
index 000000000..626d7cd46
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs
@@ -0,0 +1,39 @@
+use super::utils::can_be_expressed_as_pointer_cast;
+use super::TRANSMUTES_EXPRESSIBLE_AS_PTR_CASTS;
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::sugg;
+use rustc_errors::Applicability;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty::Ty;
+
+/// Checks for `transmutes_expressible_as_ptr_casts` lint.
+/// Returns `true` if it's triggered, otherwise returns `false`.
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ from_ty: Ty<'tcx>,
+ to_ty: Ty<'tcx>,
+ arg: &'tcx Expr<'_>,
+) -> bool {
+ if can_be_expressed_as_pointer_cast(cx, e, from_ty, to_ty) {
+ span_lint_and_then(
+ cx,
+ TRANSMUTES_EXPRESSIBLE_AS_PTR_CASTS,
+ e.span,
+ &format!(
+ "transmute from `{}` to `{}` which could be expressed as a pointer cast instead",
+ from_ty, to_ty
+ ),
+ |diag| {
+ if let Some(arg) = sugg::Sugg::hir_opt(cx, arg) {
+ let sugg = arg.as_ty(&to_ty.to_string()).to_string();
+ diag.span_suggestion(e.span, "try", sugg, Applicability::MachineApplicable);
+ }
+ },
+ );
+ true
+ } else {
+ false
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/transmute/unsound_collection_transmute.rs b/src/tools/clippy/clippy_lints/src/transmute/unsound_collection_transmute.rs
new file mode 100644
index 000000000..831b0d450
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/transmute/unsound_collection_transmute.rs
@@ -0,0 +1,52 @@
+use super::utils::is_layout_incompatible;
+use super::UNSOUND_COLLECTION_TRANSMUTE;
+use clippy_utils::diagnostics::span_lint;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+use rustc_span::symbol::sym;
+
+/// Checks for `unsound_collection_transmute` lint.
+/// Returns `true` if it's triggered, otherwise returns `false`.
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>, from_ty: Ty<'tcx>, to_ty: Ty<'tcx>) -> bool {
+ match (&from_ty.kind(), &to_ty.kind()) {
+ (ty::Adt(from_adt, from_substs), ty::Adt(to_adt, to_substs)) => {
+ if from_adt.did() != to_adt.did() {
+ return false;
+ }
+ if !matches!(
+ cx.tcx.get_diagnostic_name(to_adt.did()),
+ Some(
+ sym::BTreeMap
+ | sym::BTreeSet
+ | sym::BinaryHeap
+ | sym::HashMap
+ | sym::HashSet
+ | sym::Vec
+ | sym::VecDeque
+ )
+ ) {
+ return false;
+ }
+ if from_substs
+ .types()
+ .zip(to_substs.types())
+ .any(|(from_ty, to_ty)| is_layout_incompatible(cx, from_ty, to_ty))
+ {
+ span_lint(
+ cx,
+ UNSOUND_COLLECTION_TRANSMUTE,
+ e.span,
+ &format!(
+ "transmute from `{}` to `{}` with mismatched layout is unsound",
+ from_ty, to_ty
+ ),
+ );
+ true
+ } else {
+ false
+ }
+ },
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/transmute/useless_transmute.rs b/src/tools/clippy/clippy_lints/src/transmute/useless_transmute.rs
new file mode 100644
index 000000000..8122cd716
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/transmute/useless_transmute.rs
@@ -0,0 +1,72 @@
+use super::USELESS_TRANSMUTE;
+use clippy_utils::diagnostics::{span_lint, span_lint_and_then};
+use clippy_utils::sugg;
+use rustc_errors::Applicability;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty, TypeVisitable};
+
+/// Checks for `useless_transmute` lint.
+/// Returns `true` if it's triggered, otherwise returns `false`.
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ from_ty: Ty<'tcx>,
+ to_ty: Ty<'tcx>,
+ arg: &'tcx Expr<'_>,
+) -> bool {
+ match (&from_ty.kind(), &to_ty.kind()) {
+ _ if from_ty == to_ty && !from_ty.has_erased_regions() => {
+ span_lint(
+ cx,
+ USELESS_TRANSMUTE,
+ e.span,
+ &format!("transmute from a type (`{}`) to itself", from_ty),
+ );
+ true
+ },
+ (ty::Ref(_, rty, rty_mutbl), ty::RawPtr(ptr_ty)) => {
+ // No way to give the correct suggestion here. Avoid linting for now.
+ if !rty.has_erased_regions() {
+ span_lint_and_then(
+ cx,
+ USELESS_TRANSMUTE,
+ e.span,
+ "transmute from a reference to a pointer",
+ |diag| {
+ if let Some(arg) = sugg::Sugg::hir_opt(cx, arg) {
+ let rty_and_mut = ty::TypeAndMut {
+ ty: *rty,
+ mutbl: *rty_mutbl,
+ };
+
+ let sugg = if *ptr_ty == rty_and_mut {
+ arg.as_ty(to_ty)
+ } else {
+ arg.as_ty(cx.tcx.mk_ptr(rty_and_mut)).as_ty(to_ty)
+ };
+
+ diag.span_suggestion(e.span, "try", sugg, Applicability::Unspecified);
+ }
+ },
+ );
+ }
+ true
+ },
+ (ty::Int(_) | ty::Uint(_), ty::RawPtr(_)) => {
+ span_lint_and_then(
+ cx,
+ USELESS_TRANSMUTE,
+ e.span,
+ "transmute from an integer to a pointer",
+ |diag| {
+ if let Some(arg) = sugg::Sugg::hir_opt(cx, arg) {
+ diag.span_suggestion(e.span, "try", arg.as_ty(&to_ty.to_string()), Applicability::Unspecified);
+ }
+ },
+ );
+ true
+ },
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/transmute/utils.rs b/src/tools/clippy/clippy_lints/src/transmute/utils.rs
new file mode 100644
index 000000000..74927570b
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/transmute/utils.rs
@@ -0,0 +1,76 @@
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty::{cast::CastKind, Ty};
+use rustc_span::DUMMY_SP;
+use rustc_typeck::check::{cast::CastCheck, FnCtxt, Inherited};
+
+// check if the component types of the transmuted collection and the result have different ABI,
+// size or alignment
+pub(super) fn is_layout_incompatible<'tcx>(cx: &LateContext<'tcx>, from: Ty<'tcx>, to: Ty<'tcx>) -> bool {
+ if let Ok(from) = cx.tcx.try_normalize_erasing_regions(cx.param_env, from)
+ && let Ok(to) = cx.tcx.try_normalize_erasing_regions(cx.param_env, to)
+ && let Ok(from_layout) = cx.tcx.layout_of(cx.param_env.and(from))
+ && let Ok(to_layout) = cx.tcx.layout_of(cx.param_env.and(to))
+ {
+ from_layout.size != to_layout.size || from_layout.align.abi != to_layout.align.abi
+ } else {
+ // no idea about layout, so don't lint
+ false
+ }
+}
+
+/// Check if the type conversion can be expressed as a pointer cast, instead of
+/// a transmute. In certain cases, including some invalid casts from array
+/// references to pointers, this may cause additional errors to be emitted and/or
+/// ICE error messages. This function will panic if that occurs.
+pub(super) fn can_be_expressed_as_pointer_cast<'tcx>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'_>,
+ from_ty: Ty<'tcx>,
+ to_ty: Ty<'tcx>,
+) -> bool {
+ use CastKind::{AddrPtrCast, ArrayPtrCast, FnPtrAddrCast, FnPtrPtrCast, PtrAddrCast, PtrPtrCast};
+ matches!(
+ check_cast(cx, e, from_ty, to_ty),
+ Some(PtrPtrCast | PtrAddrCast | AddrPtrCast | ArrayPtrCast | FnPtrPtrCast | FnPtrAddrCast)
+ )
+}
+
+/// If a cast from `from_ty` to `to_ty` is valid, returns an Ok containing the kind of
+/// the cast. In certain cases, including some invalid casts from array references
+/// to pointers, this may cause additional errors to be emitted and/or ICE error
+/// messages. This function will panic if that occurs.
+fn check_cast<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>, from_ty: Ty<'tcx>, to_ty: Ty<'tcx>) -> Option<CastKind> {
+ let hir_id = e.hir_id;
+ let local_def_id = hir_id.owner;
+
+ Inherited::build(cx.tcx, local_def_id).enter(|inherited| {
+ let fn_ctxt = FnCtxt::new(&inherited, cx.param_env, hir_id);
+
+ // If we already have errors, we can't be sure we can pointer cast.
+ assert!(
+ !fn_ctxt.errors_reported_since_creation(),
+ "Newly created FnCtxt contained errors"
+ );
+
+ if let Ok(check) = CastCheck::new(
+ &fn_ctxt, e, from_ty, to_ty,
+ // We won't show any error to the user, so we don't care what the span is here.
+ DUMMY_SP, DUMMY_SP,
+ ) {
+ let res = check.do_check(&fn_ctxt);
+
+ // do_check's documentation says that it might return Ok and create
+ // errors in the fcx instead of returning Err in some cases. Those cases
+ // should be filtered out before getting here.
+ assert!(
+ !fn_ctxt.errors_reported_since_creation(),
+ "`fn_ctxt` contained errors after cast check!"
+ );
+
+ res.ok()
+ } else {
+ None
+ }
+ })
+}
diff --git a/src/tools/clippy/clippy_lints/src/transmute/wrong_transmute.rs b/src/tools/clippy/clippy_lints/src/transmute/wrong_transmute.rs
new file mode 100644
index 000000000..2118f3d69
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/transmute/wrong_transmute.rs
@@ -0,0 +1,22 @@
+use super::WRONG_TRANSMUTE;
+use clippy_utils::diagnostics::span_lint;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+
+/// Checks for `wrong_transmute` lint.
+/// Returns `true` if it's triggered, otherwise returns `false`.
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>, from_ty: Ty<'tcx>, to_ty: Ty<'tcx>) -> bool {
+ match (&from_ty.kind(), &to_ty.kind()) {
+ (ty::Float(_) | ty::Char, ty::Ref(..) | ty::RawPtr(_)) => {
+ span_lint(
+ cx,
+ WRONG_TRANSMUTE,
+ e.span,
+ &format!("transmute from a `{}` to a pointer", from_ty),
+ );
+ true
+ },
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/transmuting_null.rs b/src/tools/clippy/clippy_lints/src/transmuting_null.rs
new file mode 100644
index 000000000..7939dfedc
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/transmuting_null.rs
@@ -0,0 +1,89 @@
+use clippy_utils::consts::{constant_context, Constant};
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::is_expr_diagnostic_item;
+use if_chain::if_chain;
+use rustc_ast::LitKind;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for transmute calls which would receive a null pointer.
+ ///
+ /// ### Why is this bad?
+ /// Transmuting a null pointer is undefined behavior.
+ ///
+ /// ### Known problems
+ /// Not all cases can be detected at the moment of this writing.
+ /// For example, variables which hold a null pointer and are then fed to a `transmute`
+ /// call, aren't detectable yet.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let null_ref: &u64 = unsafe { std::mem::transmute(0 as *const u64) };
+ /// ```
+ #[clippy::version = "1.35.0"]
+ pub TRANSMUTING_NULL,
+ correctness,
+ "transmutes from a null pointer to a reference, which is undefined behavior"
+}
+
+declare_lint_pass!(TransmutingNull => [TRANSMUTING_NULL]);
+
+const LINT_MSG: &str = "transmuting a known null pointer into a reference";
+
+impl<'tcx> LateLintPass<'tcx> for TransmutingNull {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if in_external_macro(cx.sess(), expr.span) {
+ return;
+ }
+
+ if_chain! {
+ if let ExprKind::Call(func, [arg]) = expr.kind;
+ if is_expr_diagnostic_item(cx, func, sym::transmute);
+
+ then {
+ // Catching transmute over constants that resolve to `null`.
+ let mut const_eval_context = constant_context(cx, cx.typeck_results());
+ if_chain! {
+ if let ExprKind::Path(ref _qpath) = arg.kind;
+ if let Some(Constant::RawPtr(x)) = const_eval_context.expr(arg);
+ if x == 0;
+ then {
+ span_lint(cx, TRANSMUTING_NULL, expr.span, LINT_MSG)
+ }
+ }
+
+ // Catching:
+ // `std::mem::transmute(0 as *const i32)`
+ if_chain! {
+ if let ExprKind::Cast(inner_expr, _cast_ty) = arg.kind;
+ if let ExprKind::Lit(ref lit) = inner_expr.kind;
+ if let LitKind::Int(0, _) = lit.node;
+ then {
+ span_lint(cx, TRANSMUTING_NULL, expr.span, LINT_MSG)
+ }
+ }
+
+ // Catching:
+ // `std::mem::transmute(std::ptr::null::<i32>())`
+ if_chain! {
+ if let ExprKind::Call(func1, []) = arg.kind;
+ if is_expr_diagnostic_item(cx, func1, sym::ptr_null);
+ then {
+ span_lint(cx, TRANSMUTING_NULL, expr.span, LINT_MSG)
+ }
+ }
+
+ // FIXME:
+ // Also catch transmutations of variables which are known nulls.
+ // To do this, MIR const propagation seems to be the better tool.
+ // Whenever MIR const prop routines are more developed, this will
+ // become available. As of this writing (25/03/19) it is not yet.
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/types/borrowed_box.rs b/src/tools/clippy/clippy_lints/src/types/borrowed_box.rs
new file mode 100644
index 000000000..94945b2e1
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/types/borrowed_box.rs
@@ -0,0 +1,115 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{self as hir, GenericArg, GenericBounds, GenericParamKind};
+use rustc_hir::{HirId, Lifetime, MutTy, Mutability, Node, QPath, TyKind};
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::BORROWED_BOX;
+
+pub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, lt: &Lifetime, mut_ty: &MutTy<'_>) -> bool {
+ match mut_ty.ty.kind {
+ TyKind::Path(ref qpath) => {
+ let hir_id = mut_ty.ty.hir_id;
+ let def = cx.qpath_res(qpath, hir_id);
+ if_chain! {
+ if let Some(def_id) = def.opt_def_id();
+ if Some(def_id) == cx.tcx.lang_items().owned_box();
+ if let QPath::Resolved(None, path) = *qpath;
+ if let [ref bx] = *path.segments;
+ if let Some(params) = bx.args;
+ if !params.parenthesized;
+ if let Some(inner) = params.args.iter().find_map(|arg| match arg {
+ GenericArg::Type(ty) => Some(ty),
+ _ => None,
+ });
+ then {
+ if is_any_trait(cx, inner) {
+ // Ignore `Box<Any>` types; see issue #1884 for details.
+ return false;
+ }
+
+ let ltopt = if lt.name.is_anonymous() {
+ String::new()
+ } else {
+ format!("{} ", lt.name.ident().as_str())
+ };
+
+ if mut_ty.mutbl == Mutability::Mut {
+ // Ignore `&mut Box<T>` types; see issue #2907 for
+ // details.
+ return false;
+ }
+
+ // When trait objects or opaque types have lifetime or auto-trait bounds,
+ // we need to add parentheses to avoid a syntax error due to its ambiguity.
+ // Originally reported as the issue #3128.
+ let inner_snippet = snippet(cx, inner.span, "..");
+ let suggestion = match &inner.kind {
+ TyKind::TraitObject(bounds, lt_bound, _) if bounds.len() > 1 || !lt_bound.is_elided() => {
+ format!("&{}({})", ltopt, &inner_snippet)
+ },
+ TyKind::Path(qpath)
+ if get_bounds_if_impl_trait(cx, qpath, inner.hir_id)
+ .map_or(false, |bounds| bounds.len() > 1) =>
+ {
+ format!("&{}({})", ltopt, &inner_snippet)
+ },
+ _ => format!("&{}{}", ltopt, &inner_snippet),
+ };
+ span_lint_and_sugg(
+ cx,
+ BORROWED_BOX,
+ hir_ty.span,
+ "you seem to be trying to use `&Box<T>`. Consider using just `&T`",
+ "try",
+ suggestion,
+ // To make this `MachineApplicable`, at least one needs to check if it isn't a trait item
+ // because the trait impls of it will break otherwise;
+ // and there may be other cases that result in invalid code.
+ // For example, type coercion doesn't work nicely.
+ Applicability::Unspecified,
+ );
+ return true;
+ }
+ };
+ false
+ },
+ _ => false,
+ }
+}
+
+// Returns true if given type is `Any` trait.
+fn is_any_trait(cx: &LateContext<'_>, t: &hir::Ty<'_>) -> bool {
+ if_chain! {
+ if let TyKind::TraitObject(traits, ..) = t.kind;
+ if !traits.is_empty();
+ if let Some(trait_did) = traits[0].trait_ref.trait_def_id();
+ // Only Send/Sync can be used as additional traits, so it is enough to
+ // check only the first trait.
+ if cx.tcx.is_diagnostic_item(sym::Any, trait_did);
+ then {
+ return true;
+ }
+ }
+
+ false
+}
+
+fn get_bounds_if_impl_trait<'tcx>(cx: &LateContext<'tcx>, qpath: &QPath<'_>, id: HirId) -> Option<GenericBounds<'tcx>> {
+ if_chain! {
+ if let Some(did) = cx.qpath_res(qpath, id).opt_def_id();
+ if let Some(Node::GenericParam(generic_param)) = cx.tcx.hir().get_if_local(did);
+ if let GenericParamKind::Type { synthetic, .. } = generic_param.kind;
+ if synthetic;
+ if let Some(generics) = cx.tcx.hir().get_generics(id.owner);
+ if let Some(pred) = generics.bounds_for_param(did.expect_local()).next();
+ then {
+ Some(pred.bounds)
+ } else {
+ None
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/types/box_collection.rs b/src/tools/clippy/clippy_lints/src/types/box_collection.rs
new file mode 100644
index 000000000..ba51404d2
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/types/box_collection.rs
@@ -0,0 +1,54 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::{path_def_id, qpath_generic_tys};
+use rustc_hir::{self as hir, def_id::DefId, QPath};
+use rustc_lint::LateContext;
+use rustc_span::{sym, Symbol};
+
+use super::BOX_COLLECTION;
+
+pub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, qpath: &QPath<'_>, def_id: DefId) -> bool {
+ if_chain! {
+ if Some(def_id) == cx.tcx.lang_items().owned_box();
+ if let Some(item_type) = get_std_collection(cx, qpath);
+ then {
+ let generic = match item_type {
+ sym::String => "",
+ _ => "<..>",
+ };
+
+ let box_content = format!("{outer}{generic}", outer = item_type);
+ span_lint_and_help(
+ cx,
+ BOX_COLLECTION,
+ hir_ty.span,
+ &format!(
+ "you seem to be trying to use `Box<{box_content}>`. Consider using just `{box_content}`"),
+ None,
+ &format!(
+ "`{box_content}` is already on the heap, `Box<{box_content}>` makes an extra allocation")
+ );
+ true
+ } else {
+ false
+ }
+ }
+}
+
+fn get_std_collection(cx: &LateContext<'_>, qpath: &QPath<'_>) -> Option<Symbol> {
+ let param = qpath_generic_tys(qpath).next()?;
+ let id = path_def_id(cx, param)?;
+ cx.tcx.get_diagnostic_name(id).filter(|&name| {
+ matches!(
+ name,
+ sym::HashMap
+ | sym::String
+ | sym::Vec
+ | sym::HashSet
+ | sym::VecDeque
+ | sym::LinkedList
+ | sym::BTreeMap
+ | sym::BTreeSet
+ | sym::BinaryHeap
+ )
+ })
+}
diff --git a/src/tools/clippy/clippy_lints/src/types/linked_list.rs b/src/tools/clippy/clippy_lints/src/types/linked_list.rs
new file mode 100644
index 000000000..5fb708741
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/types/linked_list.rs
@@ -0,0 +1,22 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_hir::{self as hir, def_id::DefId};
+use rustc_lint::LateContext;
+use rustc_span::symbol::sym;
+
+use super::LINKEDLIST;
+
+pub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, def_id: DefId) -> bool {
+ if cx.tcx.is_diagnostic_item(sym::LinkedList, def_id) {
+ span_lint_and_help(
+ cx,
+ LINKEDLIST,
+ hir_ty.span,
+ "you seem to be using a `LinkedList`! Perhaps you meant some other data structure?",
+ None,
+ "a `VecDeque` might work",
+ );
+ true
+ } else {
+ false
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/types/mod.rs b/src/tools/clippy/clippy_lints/src/types/mod.rs
new file mode 100644
index 000000000..353a6f6b8
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/types/mod.rs
@@ -0,0 +1,574 @@
+mod borrowed_box;
+mod box_collection;
+mod linked_list;
+mod option_option;
+mod rc_buffer;
+mod rc_mutex;
+mod redundant_allocation;
+mod type_complexity;
+mod utils;
+mod vec_box;
+
+use rustc_hir as hir;
+use rustc_hir::intravisit::FnKind;
+use rustc_hir::{
+ Body, FnDecl, FnRetTy, GenericArg, HirId, ImplItem, ImplItemKind, Item, ItemKind, Local, MutTy, QPath, TraitItem,
+ TraitItemKind, TyKind,
+};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::source_map::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for use of `Box<T>` where T is a collection such as Vec anywhere in the code.
+ /// Check the [Box documentation](https://doc.rust-lang.org/std/boxed/index.html) for more information.
+ ///
+ /// ### Why is this bad?
+ /// Collections already keeps their contents in a separate area on
+ /// the heap. So if you `Box` them, you just add another level of indirection
+ /// without any benefit whatsoever.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// struct X {
+ /// values: Box<Vec<Foo>>,
+ /// }
+ /// ```
+ ///
+ /// Better:
+ ///
+ /// ```rust,ignore
+ /// struct X {
+ /// values: Vec<Foo>,
+ /// }
+ /// ```
+ #[clippy::version = "1.57.0"]
+ pub BOX_COLLECTION,
+ perf,
+ "usage of `Box<Vec<T>>`, vector elements are already on the heap"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for use of `Vec<Box<T>>` where T: Sized anywhere in the code.
+ /// Check the [Box documentation](https://doc.rust-lang.org/std/boxed/index.html) for more information.
+ ///
+ /// ### Why is this bad?
+ /// `Vec` already keeps its contents in a separate area on
+ /// the heap. So if you `Box` its contents, you just add another level of indirection.
+ ///
+ /// ### Known problems
+ /// Vec<Box<T: Sized>> makes sense if T is a large type (see [#3530](https://github.com/rust-lang/rust-clippy/issues/3530),
+ /// 1st comment).
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct X {
+ /// values: Vec<Box<i32>>,
+ /// }
+ /// ```
+ ///
+ /// Better:
+ ///
+ /// ```rust
+ /// struct X {
+ /// values: Vec<i32>,
+ /// }
+ /// ```
+ #[clippy::version = "1.33.0"]
+ pub VEC_BOX,
+ complexity,
+ "usage of `Vec<Box<T>>` where T: Sized, vector elements are already on the heap"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for use of `Option<Option<_>>` in function signatures and type
+ /// definitions
+ ///
+ /// ### Why is this bad?
+ /// `Option<_>` represents an optional value. `Option<Option<_>>`
+ /// represents an optional optional value which is logically the same thing as an optional
+ /// value but has an unneeded extra level of wrapping.
+ ///
+ /// If you have a case where `Some(Some(_))`, `Some(None)` and `None` are distinct cases,
+ /// consider a custom `enum` instead, with clear names for each case.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn get_data() -> Option<Option<u32>> {
+ /// None
+ /// }
+ /// ```
+ ///
+ /// Better:
+ ///
+ /// ```rust
+ /// pub enum Contents {
+ /// Data(Vec<u8>), // Was Some(Some(Vec<u8>))
+ /// NotYetFetched, // Was Some(None)
+ /// None, // Was None
+ /// }
+ ///
+ /// fn get_data() -> Contents {
+ /// Contents::None
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub OPTION_OPTION,
+ pedantic,
+ "usage of `Option<Option<T>>`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of any `LinkedList`, suggesting to use a
+ /// `Vec` or a `VecDeque` (formerly called `RingBuf`).
+ ///
+ /// ### Why is this bad?
+ /// Gankro says:
+ ///
+ /// > The TL;DR of `LinkedList` is that it's built on a massive amount of
+ /// pointers and indirection.
+ /// > It wastes memory, it has terrible cache locality, and is all-around slow.
+ /// `RingBuf`, while
+ /// > "only" amortized for push/pop, should be faster in the general case for
+ /// almost every possible
+ /// > workload, and isn't even amortized at all if you can predict the capacity
+ /// you need.
+ /// >
+ /// > `LinkedList`s are only really good if you're doing a lot of merging or
+ /// splitting of lists.
+ /// > This is because they can just mangle some pointers instead of actually
+ /// copying the data. Even
+ /// > if you're doing a lot of insertion in the middle of the list, `RingBuf`
+ /// can still be better
+ /// > because of how expensive it is to seek to the middle of a `LinkedList`.
+ ///
+ /// ### Known problems
+ /// False positives – the instances where using a
+ /// `LinkedList` makes sense are few and far between, but they can still happen.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::collections::LinkedList;
+ /// let x: LinkedList<usize> = LinkedList::new();
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub LINKEDLIST,
+ pedantic,
+ "usage of LinkedList, usually a vector is faster, or a more specialized data structure like a `VecDeque`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for use of `&Box<T>` anywhere in the code.
+ /// Check the [Box documentation](https://doc.rust-lang.org/std/boxed/index.html) for more information.
+ ///
+ /// ### Why is this bad?
+ /// A `&Box<T>` parameter requires the function caller to box `T` first before passing it to a function.
+ /// Using `&T` defines a concrete type for the parameter and generalizes the function, this would also
+ /// auto-deref to `&T` at the function call site if passed a `&Box<T>`.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// fn foo(bar: &Box<T>) { ... }
+ /// ```
+ ///
+ /// Better:
+ ///
+ /// ```rust,ignore
+ /// fn foo(bar: &T) { ... }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub BORROWED_BOX,
+ complexity,
+ "a borrow of a boxed type"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for use of redundant allocations anywhere in the code.
+ ///
+ /// ### Why is this bad?
+ /// Expressions such as `Rc<&T>`, `Rc<Rc<T>>`, `Rc<Arc<T>>`, `Rc<Box<T>>`, `Arc<&T>`, `Arc<Rc<T>>`,
+ /// `Arc<Arc<T>>`, `Arc<Box<T>>`, `Box<&T>`, `Box<Rc<T>>`, `Box<Arc<T>>`, `Box<Box<T>>`, add an unnecessary level of indirection.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::rc::Rc;
+ /// fn foo(bar: Rc<&usize>) {}
+ /// ```
+ ///
+ /// Better:
+ ///
+ /// ```rust
+ /// fn foo(bar: &usize) {}
+ /// ```
+ #[clippy::version = "1.44.0"]
+ pub REDUNDANT_ALLOCATION,
+ perf,
+ "redundant allocation"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `Rc<T>` and `Arc<T>` when `T` is a mutable buffer type such as `String` or `Vec`.
+ ///
+ /// ### Why is this bad?
+ /// Expressions such as `Rc<String>` usually have no advantage over `Rc<str>`, since
+ /// it is larger and involves an extra level of indirection, and doesn't implement `Borrow<str>`.
+ ///
+ /// While mutating a buffer type would still be possible with `Rc::get_mut()`, it only
+ /// works if there are no additional references yet, which usually defeats the purpose of
+ /// enclosing it in a shared ownership type. Instead, additionally wrapping the inner
+ /// type with an interior mutable container (such as `RefCell` or `Mutex`) would normally
+ /// be used.
+ ///
+ /// ### Known problems
+ /// This pattern can be desirable to avoid the overhead of a `RefCell` or `Mutex` for
+ /// cases where mutation only happens before there are any additional references.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// # use std::rc::Rc;
+ /// fn foo(interned: Rc<String>) { ... }
+ /// ```
+ ///
+ /// Better:
+ ///
+ /// ```rust,ignore
+ /// fn foo(interned: Rc<str>) { ... }
+ /// ```
+ #[clippy::version = "1.48.0"]
+ pub RC_BUFFER,
+ restriction,
+ "shared ownership of a buffer type"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for types used in structs, parameters and `let`
+ /// declarations above a certain complexity threshold.
+ ///
+ /// ### Why is this bad?
+ /// Too complex types make the code less readable. Consider
+ /// using a `type` definition to simplify them.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::rc::Rc;
+ /// struct Foo {
+ /// inner: Rc<Vec<Vec<Box<(u32, u32, u32, u32)>>>>,
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub TYPE_COMPLEXITY,
+ complexity,
+ "usage of very complex types that might be better factored into `type` definitions"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `Rc<Mutex<T>>`.
+ ///
+ /// ### Why is this bad?
+ /// `Rc` is used in single thread and `Mutex` is used in multi thread.
+ /// Consider using `Rc<RefCell<T>>` in single thread or `Arc<Mutex<T>>` in multi thread.
+ ///
+ /// ### Known problems
+ /// Sometimes combining generic types can lead to the requirement that a
+ /// type use Rc in conjunction with Mutex. We must consider those cases false positives, but
+ /// alas they are quite hard to rule out. Luckily they are also rare.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// use std::rc::Rc;
+ /// use std::sync::Mutex;
+ /// fn foo(interned: Rc<Mutex<i32>>) { ... }
+ /// ```
+ ///
+ /// Better:
+ ///
+ /// ```rust,ignore
+ /// use std::rc::Rc;
+ /// use std::cell::RefCell
+ /// fn foo(interned: Rc<RefCell<i32>>) { ... }
+ /// ```
+ #[clippy::version = "1.55.0"]
+ pub RC_MUTEX,
+ restriction,
+ "usage of `Rc<Mutex<T>>`"
+}
+
+pub struct Types {
+ vec_box_size_threshold: u64,
+ type_complexity_threshold: u64,
+ avoid_breaking_exported_api: bool,
+}
+
+impl_lint_pass!(Types => [BOX_COLLECTION, VEC_BOX, OPTION_OPTION, LINKEDLIST, BORROWED_BOX, REDUNDANT_ALLOCATION, RC_BUFFER, RC_MUTEX, TYPE_COMPLEXITY]);
+
+impl<'tcx> LateLintPass<'tcx> for Types {
+ fn check_fn(&mut self, cx: &LateContext<'_>, _: FnKind<'_>, decl: &FnDecl<'_>, _: &Body<'_>, _: Span, id: HirId) {
+ let is_in_trait_impl =
+ if let Some(hir::Node::Item(item)) = cx.tcx.hir().find_by_def_id(cx.tcx.hir().get_parent_item(id)) {
+ matches!(item.kind, ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }))
+ } else {
+ false
+ };
+
+ let is_exported = cx.access_levels.is_exported(cx.tcx.hir().local_def_id(id));
+
+ self.check_fn_decl(
+ cx,
+ decl,
+ CheckTyContext {
+ is_in_trait_impl,
+ is_exported,
+ ..CheckTyContext::default()
+ },
+ );
+ }
+
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
+ let is_exported = cx.access_levels.is_exported(item.def_id);
+
+ match item.kind {
+ ItemKind::Static(ty, _, _) | ItemKind::Const(ty, _) => self.check_ty(
+ cx,
+ ty,
+ CheckTyContext {
+ is_exported,
+ ..CheckTyContext::default()
+ },
+ ),
+ // functions, enums, structs, impls and traits are covered
+ _ => (),
+ }
+ }
+
+ fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx ImplItem<'_>) {
+ match item.kind {
+ ImplItemKind::Const(ty, _) => {
+ let is_in_trait_impl = if let Some(hir::Node::Item(item)) =
+ cx.tcx.hir().find_by_def_id(cx.tcx.hir().get_parent_item(item.hir_id()))
+ {
+ matches!(item.kind, ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }))
+ } else {
+ false
+ };
+
+ self.check_ty(
+ cx,
+ ty,
+ CheckTyContext {
+ is_in_trait_impl,
+ ..CheckTyContext::default()
+ },
+ );
+ },
+ // Methods are covered by check_fn.
+ // Type aliases are ignored because oftentimes it's impossible to
+ // make type alias declaration in trait simpler, see #1013
+ ImplItemKind::Fn(..) | ImplItemKind::TyAlias(..) => (),
+ }
+ }
+
+ fn check_field_def(&mut self, cx: &LateContext<'_>, field: &hir::FieldDef<'_>) {
+ let is_exported = cx.access_levels.is_exported(cx.tcx.hir().local_def_id(field.hir_id));
+
+ self.check_ty(
+ cx,
+ field.ty,
+ CheckTyContext {
+ is_exported,
+ ..CheckTyContext::default()
+ },
+ );
+ }
+
+ fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &TraitItem<'_>) {
+ let is_exported = cx.access_levels.is_exported(item.def_id);
+
+ let context = CheckTyContext {
+ is_exported,
+ ..CheckTyContext::default()
+ };
+
+ match item.kind {
+ TraitItemKind::Const(ty, _) | TraitItemKind::Type(_, Some(ty)) => {
+ self.check_ty(cx, ty, context);
+ },
+ TraitItemKind::Fn(ref sig, _) => self.check_fn_decl(cx, sig.decl, context),
+ TraitItemKind::Type(..) => (),
+ }
+ }
+
+ fn check_local(&mut self, cx: &LateContext<'_>, local: &Local<'_>) {
+ if let Some(ty) = local.ty {
+ self.check_ty(
+ cx,
+ ty,
+ CheckTyContext {
+ is_local: true,
+ ..CheckTyContext::default()
+ },
+ );
+ }
+ }
+}
+
+impl Types {
+ pub fn new(vec_box_size_threshold: u64, type_complexity_threshold: u64, avoid_breaking_exported_api: bool) -> Self {
+ Self {
+ vec_box_size_threshold,
+ type_complexity_threshold,
+ avoid_breaking_exported_api,
+ }
+ }
+
+ fn check_fn_decl(&mut self, cx: &LateContext<'_>, decl: &FnDecl<'_>, context: CheckTyContext) {
+ // Ignore functions in trait implementations as they are usually forced by the trait definition.
+ //
+ // FIXME: ideally we would like to warn *if the complicated type can be simplified*, but it's hard
+ // to check.
+ if context.is_in_trait_impl {
+ return;
+ }
+
+ for input in decl.inputs {
+ self.check_ty(cx, input, context);
+ }
+
+ if let FnRetTy::Return(ty) = decl.output {
+ self.check_ty(cx, ty, context);
+ }
+ }
+
+ /// Recursively check for `TypePass` lints in the given type. Stop at the first
+ /// lint found.
+ ///
+ /// The parameter `is_local` distinguishes the context of the type.
+ fn check_ty(&mut self, cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, mut context: CheckTyContext) {
+ if hir_ty.span.from_expansion() {
+ return;
+ }
+
+ // Skip trait implementations; see issue #605.
+ if context.is_in_trait_impl {
+ return;
+ }
+
+ if !context.is_nested_call && type_complexity::check(cx, hir_ty, self.type_complexity_threshold) {
+ return;
+ }
+
+ match hir_ty.kind {
+ TyKind::Path(ref qpath) if !context.is_local => {
+ let hir_id = hir_ty.hir_id;
+ let res = cx.qpath_res(qpath, hir_id);
+ if let Some(def_id) = res.opt_def_id() {
+ if self.is_type_change_allowed(context) {
+ // All lints that are being checked in this block are guarded by
+ // the `avoid_breaking_exported_api` configuration. When adding a
+ // new lint, please also add the name to the configuration documentation
+ // in `clippy_lints::utils::conf.rs`
+
+ let mut triggered = false;
+ triggered |= box_collection::check(cx, hir_ty, qpath, def_id);
+ triggered |= redundant_allocation::check(cx, hir_ty, qpath, def_id);
+ triggered |= rc_buffer::check(cx, hir_ty, qpath, def_id);
+ triggered |= vec_box::check(cx, hir_ty, qpath, def_id, self.vec_box_size_threshold);
+ triggered |= option_option::check(cx, hir_ty, qpath, def_id);
+ triggered |= linked_list::check(cx, hir_ty, def_id);
+ triggered |= rc_mutex::check(cx, hir_ty, qpath, def_id);
+
+ if triggered {
+ return;
+ }
+ }
+ }
+ match *qpath {
+ QPath::Resolved(Some(ty), p) => {
+ context.is_nested_call = true;
+ self.check_ty(cx, ty, context);
+ for ty in p.segments.iter().flat_map(|seg| {
+ seg.args
+ .as_ref()
+ .map_or_else(|| [].iter(), |params| params.args.iter())
+ .filter_map(|arg| match arg {
+ GenericArg::Type(ty) => Some(ty),
+ _ => None,
+ })
+ }) {
+ self.check_ty(cx, ty, context);
+ }
+ },
+ QPath::Resolved(None, p) => {
+ context.is_nested_call = true;
+ for ty in p.segments.iter().flat_map(|seg| {
+ seg.args
+ .as_ref()
+ .map_or_else(|| [].iter(), |params| params.args.iter())
+ .filter_map(|arg| match arg {
+ GenericArg::Type(ty) => Some(ty),
+ _ => None,
+ })
+ }) {
+ self.check_ty(cx, ty, context);
+ }
+ },
+ QPath::TypeRelative(ty, seg) => {
+ context.is_nested_call = true;
+ self.check_ty(cx, ty, context);
+ if let Some(params) = seg.args {
+ for ty in params.args.iter().filter_map(|arg| match arg {
+ GenericArg::Type(ty) => Some(ty),
+ _ => None,
+ }) {
+ self.check_ty(cx, ty, context);
+ }
+ }
+ },
+ QPath::LangItem(..) => {},
+ }
+ },
+ TyKind::Rptr(ref lt, ref mut_ty) => {
+ context.is_nested_call = true;
+ if !borrowed_box::check(cx, hir_ty, lt, mut_ty) {
+ self.check_ty(cx, mut_ty.ty, context);
+ }
+ },
+ TyKind::Slice(ty) | TyKind::Array(ty, _) | TyKind::Ptr(MutTy { ty, .. }) => {
+ context.is_nested_call = true;
+ self.check_ty(cx, ty, context);
+ },
+ TyKind::Tup(tys) => {
+ context.is_nested_call = true;
+ for ty in tys {
+ self.check_ty(cx, ty, context);
+ }
+ },
+ _ => {},
+ }
+ }
+
+ /// This function checks if the type is allowed to change in the current context
+ /// based on the `avoid_breaking_exported_api` configuration
+ fn is_type_change_allowed(&self, context: CheckTyContext) -> bool {
+ !(context.is_exported && self.avoid_breaking_exported_api)
+ }
+}
+
+#[allow(clippy::struct_excessive_bools)]
+#[derive(Clone, Copy, Default)]
+struct CheckTyContext {
+ is_in_trait_impl: bool,
+ /// `true` for types on local variables.
+ is_local: bool,
+ /// `true` for types that are part of the public API.
+ is_exported: bool,
+ is_nested_call: bool,
+}
diff --git a/src/tools/clippy/clippy_lints/src/types/option_option.rs b/src/tools/clippy/clippy_lints/src/types/option_option.rs
new file mode 100644
index 000000000..8767e3c30
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/types/option_option.rs
@@ -0,0 +1,28 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::{path_def_id, qpath_generic_tys};
+use if_chain::if_chain;
+use rustc_hir::{self as hir, def_id::DefId, QPath};
+use rustc_lint::LateContext;
+use rustc_span::symbol::sym;
+
+use super::OPTION_OPTION;
+
+pub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, qpath: &QPath<'_>, def_id: DefId) -> bool {
+ if_chain! {
+ if cx.tcx.is_diagnostic_item(sym::Option, def_id);
+ if let Some(arg) = qpath_generic_tys(qpath).next();
+ if path_def_id(cx, arg) == Some(def_id);
+ then {
+ span_lint(
+ cx,
+ OPTION_OPTION,
+ hir_ty.span,
+ "consider using `Option<T>` instead of `Option<Option<T>>` or a custom \
+ enum if you need to distinguish all 3 cases",
+ );
+ true
+ } else {
+ false
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/types/rc_buffer.rs b/src/tools/clippy/clippy_lints/src/types/rc_buffer.rs
new file mode 100644
index 000000000..4d72a29e8
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/types/rc_buffer.rs
@@ -0,0 +1,106 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::{path_def_id, qpath_generic_tys};
+use rustc_errors::Applicability;
+use rustc_hir::{self as hir, def_id::DefId, QPath, TyKind};
+use rustc_lint::LateContext;
+use rustc_span::symbol::sym;
+
+use super::RC_BUFFER;
+
+pub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, qpath: &QPath<'_>, def_id: DefId) -> bool {
+ if cx.tcx.is_diagnostic_item(sym::Rc, def_id) {
+ if let Some(alternate) = match_buffer_type(cx, qpath) {
+ span_lint_and_sugg(
+ cx,
+ RC_BUFFER,
+ hir_ty.span,
+ "usage of `Rc<T>` when T is a buffer type",
+ "try",
+ format!("Rc<{}>", alternate),
+ Applicability::MachineApplicable,
+ );
+ } else {
+ let Some(ty) = qpath_generic_tys(qpath).next() else { return false };
+ let Some(id) = path_def_id(cx, ty) else { return false };
+ if !cx.tcx.is_diagnostic_item(sym::Vec, id) {
+ return false;
+ }
+ let qpath = match &ty.kind {
+ TyKind::Path(qpath) => qpath,
+ _ => return false,
+ };
+ let inner_span = match qpath_generic_tys(qpath).next() {
+ Some(ty) => ty.span,
+ None => return false,
+ };
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ RC_BUFFER,
+ hir_ty.span,
+ "usage of `Rc<T>` when T is a buffer type",
+ "try",
+ format!(
+ "Rc<[{}]>",
+ snippet_with_applicability(cx, inner_span, "..", &mut applicability)
+ ),
+ Applicability::MachineApplicable,
+ );
+ return true;
+ }
+ } else if cx.tcx.is_diagnostic_item(sym::Arc, def_id) {
+ if let Some(alternate) = match_buffer_type(cx, qpath) {
+ span_lint_and_sugg(
+ cx,
+ RC_BUFFER,
+ hir_ty.span,
+ "usage of `Arc<T>` when T is a buffer type",
+ "try",
+ format!("Arc<{}>", alternate),
+ Applicability::MachineApplicable,
+ );
+ } else if let Some(ty) = qpath_generic_tys(qpath).next() {
+ let Some(id) = path_def_id(cx, ty) else { return false };
+ if !cx.tcx.is_diagnostic_item(sym::Vec, id) {
+ return false;
+ }
+ let qpath = match &ty.kind {
+ TyKind::Path(qpath) => qpath,
+ _ => return false,
+ };
+ let inner_span = match qpath_generic_tys(qpath).next() {
+ Some(ty) => ty.span,
+ None => return false,
+ };
+ let mut applicability = Applicability::MachineApplicable;
+ span_lint_and_sugg(
+ cx,
+ RC_BUFFER,
+ hir_ty.span,
+ "usage of `Arc<T>` when T is a buffer type",
+ "try",
+ format!(
+ "Arc<[{}]>",
+ snippet_with_applicability(cx, inner_span, "..", &mut applicability)
+ ),
+ Applicability::MachineApplicable,
+ );
+ return true;
+ }
+ }
+
+ false
+}
+
+fn match_buffer_type(cx: &LateContext<'_>, qpath: &QPath<'_>) -> Option<&'static str> {
+ let ty = qpath_generic_tys(qpath).next()?;
+ let id = path_def_id(cx, ty)?;
+ let path = match cx.tcx.get_diagnostic_name(id)? {
+ sym::String => "str",
+ sym::OsString => "std::ffi::OsStr",
+ sym::PathBuf => "std::path::Path",
+ _ => return None,
+ };
+ Some(path)
+}
diff --git a/src/tools/clippy/clippy_lints/src/types/rc_mutex.rs b/src/tools/clippy/clippy_lints/src/types/rc_mutex.rs
new file mode 100644
index 000000000..a75972cf3
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/types/rc_mutex.rs
@@ -0,0 +1,30 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::{path_def_id, qpath_generic_tys};
+use if_chain::if_chain;
+use rustc_hir::{self as hir, def_id::DefId, QPath};
+use rustc_lint::LateContext;
+use rustc_span::symbol::sym;
+
+use super::RC_MUTEX;
+
+pub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, qpath: &QPath<'_>, def_id: DefId) -> bool {
+ if_chain! {
+ if cx.tcx.is_diagnostic_item(sym::Rc, def_id) ;
+ if let Some(arg) = qpath_generic_tys(qpath).next();
+ if let Some(id) = path_def_id(cx, arg);
+ if cx.tcx.is_diagnostic_item(sym::Mutex, id);
+ then {
+ span_lint_and_help(
+ cx,
+ RC_MUTEX,
+ hir_ty.span,
+ "usage of `Rc<Mutex<_>>`",
+ None,
+ "consider using `Rc<RefCell<_>>` or `Arc<Mutex<_>>` instead",
+ );
+ return true;
+ }
+ }
+
+ false
+}
diff --git a/src/tools/clippy/clippy_lints/src/types/redundant_allocation.rs b/src/tools/clippy/clippy_lints/src/types/redundant_allocation.rs
new file mode 100644
index 000000000..a1312fcda
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/types/redundant_allocation.rs
@@ -0,0 +1,115 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::{snippet, snippet_with_applicability};
+use clippy_utils::{path_def_id, qpath_generic_tys};
+use rustc_errors::Applicability;
+use rustc_hir::{self as hir, def_id::DefId, QPath, TyKind};
+use rustc_lint::LateContext;
+use rustc_span::symbol::sym;
+use rustc_typeck::hir_ty_to_ty;
+
+use super::{utils, REDUNDANT_ALLOCATION};
+
+pub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, qpath: &QPath<'_>, def_id: DefId) -> bool {
+ let outer_sym = if Some(def_id) == cx.tcx.lang_items().owned_box() {
+ "Box"
+ } else if cx.tcx.is_diagnostic_item(sym::Rc, def_id) {
+ "Rc"
+ } else if cx.tcx.is_diagnostic_item(sym::Arc, def_id) {
+ "Arc"
+ } else {
+ return false;
+ };
+
+ if let Some(span) = utils::match_borrows_parameter(cx, qpath) {
+ let mut applicability = Applicability::MaybeIncorrect;
+ let generic_snippet = snippet_with_applicability(cx, span, "..", &mut applicability);
+ span_lint_and_then(
+ cx,
+ REDUNDANT_ALLOCATION,
+ hir_ty.span,
+ &format!("usage of `{}<{}>`", outer_sym, generic_snippet),
+ |diag| {
+ diag.span_suggestion(hir_ty.span, "try", format!("{}", generic_snippet), applicability);
+ diag.note(&format!(
+ "`{generic}` is already a pointer, `{outer}<{generic}>` allocates a pointer on the heap",
+ outer = outer_sym,
+ generic = generic_snippet
+ ));
+ },
+ );
+ return true;
+ }
+
+ let Some(ty) = qpath_generic_tys(qpath).next() else { return false };
+ let Some(id) = path_def_id(cx, ty) else { return false };
+ let (inner_sym, ty) = match cx.tcx.get_diagnostic_name(id) {
+ Some(sym::Arc) => ("Arc", ty),
+ Some(sym::Rc) => ("Rc", ty),
+ _ if Some(id) == cx.tcx.lang_items().owned_box() => ("Box", ty),
+ _ => return false,
+ };
+
+ let inner_qpath = match &ty.kind {
+ TyKind::Path(inner_qpath) => inner_qpath,
+ _ => return false,
+ };
+ let inner_span = match qpath_generic_tys(inner_qpath).next() {
+ Some(ty) => {
+ // Reallocation of a fat pointer causes it to become thin. `hir_ty_to_ty` is safe to use
+ // here because `mod.rs` guarantees this lint is only run on types outside of bodies and
+ // is not run on locals.
+ if !hir_ty_to_ty(cx.tcx, ty).is_sized(cx.tcx.at(ty.span), cx.param_env) {
+ return false;
+ }
+ ty.span
+ },
+ None => return false,
+ };
+ if inner_sym == outer_sym {
+ let mut applicability = Applicability::MaybeIncorrect;
+ let generic_snippet = snippet_with_applicability(cx, inner_span, "..", &mut applicability);
+ span_lint_and_then(
+ cx,
+ REDUNDANT_ALLOCATION,
+ hir_ty.span,
+ &format!("usage of `{}<{}<{}>>`", outer_sym, inner_sym, generic_snippet),
+ |diag| {
+ diag.span_suggestion(
+ hir_ty.span,
+ "try",
+ format!("{}<{}>", outer_sym, generic_snippet),
+ applicability,
+ );
+ diag.note(&format!(
+ "`{inner}<{generic}>` is already on the heap, `{outer}<{inner}<{generic}>>` makes an extra allocation",
+ outer = outer_sym,
+ inner = inner_sym,
+ generic = generic_snippet
+ ));
+ },
+ );
+ } else {
+ let generic_snippet = snippet(cx, inner_span, "..");
+ span_lint_and_then(
+ cx,
+ REDUNDANT_ALLOCATION,
+ hir_ty.span,
+ &format!("usage of `{}<{}<{}>>`", outer_sym, inner_sym, generic_snippet),
+ |diag| {
+ diag.note(&format!(
+ "`{inner}<{generic}>` is already on the heap, `{outer}<{inner}<{generic}>>` makes an extra allocation",
+ outer = outer_sym,
+ inner = inner_sym,
+ generic = generic_snippet
+ ));
+ diag.help(&format!(
+ "consider using just `{outer}<{generic}>` or `{inner}<{generic}>`",
+ outer = outer_sym,
+ inner = inner_sym,
+ generic = generic_snippet
+ ));
+ },
+ );
+ }
+ true
+}
diff --git a/src/tools/clippy/clippy_lints/src/types/type_complexity.rs b/src/tools/clippy/clippy_lints/src/types/type_complexity.rs
new file mode 100644
index 000000000..5ca4023aa
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/types/type_complexity.rs
@@ -0,0 +1,78 @@
+use clippy_utils::diagnostics::span_lint;
+use rustc_hir as hir;
+use rustc_hir::intravisit::{walk_inf, walk_ty, Visitor};
+use rustc_hir::{GenericParamKind, TyKind};
+use rustc_lint::LateContext;
+use rustc_target::spec::abi::Abi;
+
+use super::TYPE_COMPLEXITY;
+
+pub(super) fn check(cx: &LateContext<'_>, ty: &hir::Ty<'_>, type_complexity_threshold: u64) -> bool {
+ let score = {
+ let mut visitor = TypeComplexityVisitor { score: 0, nest: 1 };
+ visitor.visit_ty(ty);
+ visitor.score
+ };
+
+ if score > type_complexity_threshold {
+ span_lint(
+ cx,
+ TYPE_COMPLEXITY,
+ ty.span,
+ "very complex type used. Consider factoring parts into `type` definitions",
+ );
+ true
+ } else {
+ false
+ }
+}
+
+/// Walks a type and assigns a complexity score to it.
+struct TypeComplexityVisitor {
+ /// total complexity score of the type
+ score: u64,
+ /// current nesting level
+ nest: u64,
+}
+
+impl<'tcx> Visitor<'tcx> for TypeComplexityVisitor {
+ fn visit_infer(&mut self, inf: &'tcx hir::InferArg) {
+ self.score += 1;
+ walk_inf(self, inf);
+ }
+
+ fn visit_ty(&mut self, ty: &'tcx hir::Ty<'_>) {
+ let (add_score, sub_nest) = match ty.kind {
+ // _, &x and *x have only small overhead; don't mess with nesting level
+ TyKind::Infer | TyKind::Ptr(..) | TyKind::Rptr(..) => (1, 0),
+
+ // the "normal" components of a type: named types, arrays/tuples
+ TyKind::Path(..) | TyKind::Slice(..) | TyKind::Tup(..) | TyKind::Array(..) => (10 * self.nest, 1),
+
+ // function types bring a lot of overhead
+ TyKind::BareFn(bare) if bare.abi == Abi::Rust => (50 * self.nest, 1),
+
+ TyKind::TraitObject(param_bounds, _, _) => {
+ let has_lifetime_parameters = param_bounds.iter().any(|bound| {
+ bound
+ .bound_generic_params
+ .iter()
+ .any(|gen| matches!(gen.kind, GenericParamKind::Lifetime { .. }))
+ });
+ if has_lifetime_parameters {
+ // complex trait bounds like A<'a, 'b>
+ (50 * self.nest, 1)
+ } else {
+ // simple trait bounds like A + B
+ (20 * self.nest, 0)
+ }
+ },
+
+ _ => (0, 0),
+ };
+ self.score += add_score;
+ self.nest += sub_nest;
+ walk_ty(self, ty);
+ self.nest -= sub_nest;
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/types/utils.rs b/src/tools/clippy/clippy_lints/src/types/utils.rs
new file mode 100644
index 000000000..0fa75f8f0
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/types/utils.rs
@@ -0,0 +1,22 @@
+use clippy_utils::last_path_segment;
+use if_chain::if_chain;
+use rustc_hir::{GenericArg, QPath, TyKind};
+use rustc_lint::LateContext;
+use rustc_span::source_map::Span;
+
+pub(super) fn match_borrows_parameter(_cx: &LateContext<'_>, qpath: &QPath<'_>) -> Option<Span> {
+ let last = last_path_segment(qpath);
+ if_chain! {
+ if let Some(params) = last.args;
+ if !params.parenthesized;
+ if let Some(ty) = params.args.iter().find_map(|arg| match arg {
+ GenericArg::Type(ty) => Some(ty),
+ _ => None,
+ });
+ if let TyKind::Rptr(..) = ty.kind;
+ then {
+ return Some(ty.span);
+ }
+ }
+ None
+}
diff --git a/src/tools/clippy/clippy_lints/src/types/vec_box.rs b/src/tools/clippy/clippy_lints/src/types/vec_box.rs
new file mode 100644
index 000000000..b2f536ca7
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/types/vec_box.rs
@@ -0,0 +1,64 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::last_path_segment;
+use clippy_utils::source::snippet;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{self as hir, def_id::DefId, GenericArg, QPath, TyKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty::layout::LayoutOf;
+use rustc_middle::ty::TypeVisitable;
+use rustc_span::symbol::sym;
+use rustc_typeck::hir_ty_to_ty;
+
+use super::VEC_BOX;
+
+pub(super) fn check(
+ cx: &LateContext<'_>,
+ hir_ty: &hir::Ty<'_>,
+ qpath: &QPath<'_>,
+ def_id: DefId,
+ box_size_threshold: u64,
+) -> bool {
+ if cx.tcx.is_diagnostic_item(sym::Vec, def_id) {
+ if_chain! {
+ // Get the _ part of Vec<_>
+ if let Some(last) = last_path_segment(qpath).args;
+ if let Some(ty) = last.args.iter().find_map(|arg| match arg {
+ GenericArg::Type(ty) => Some(ty),
+ _ => None,
+ });
+ // ty is now _ at this point
+ if let TyKind::Path(ref ty_qpath) = ty.kind;
+ let res = cx.qpath_res(ty_qpath, ty.hir_id);
+ if let Some(def_id) = res.opt_def_id();
+ if Some(def_id) == cx.tcx.lang_items().owned_box();
+ // At this point, we know ty is Box<T>, now get T
+ if let Some(last) = last_path_segment(ty_qpath).args;
+ if let Some(boxed_ty) = last.args.iter().find_map(|arg| match arg {
+ GenericArg::Type(ty) => Some(ty),
+ _ => None,
+ });
+ let ty_ty = hir_ty_to_ty(cx.tcx, boxed_ty);
+ if !ty_ty.has_escaping_bound_vars();
+ if ty_ty.is_sized(cx.tcx.at(ty.span), cx.param_env);
+ if let Ok(ty_ty_size) = cx.layout_of(ty_ty).map(|l| l.size.bytes());
+ if ty_ty_size <= box_size_threshold;
+ then {
+ span_lint_and_sugg(
+ cx,
+ VEC_BOX,
+ hir_ty.span,
+ "`Vec<T>` is already on the heap, the boxing is unnecessary",
+ "try",
+ format!("Vec<{}>", snippet(cx, boxed_ty.span, "..")),
+ Applicability::MachineApplicable,
+ );
+ true
+ } else {
+ false
+ }
+ }
+ } else {
+ false
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/undocumented_unsafe_blocks.rs b/src/tools/clippy/clippy_lints/src/undocumented_unsafe_blocks.rs
new file mode 100644
index 000000000..d2e675a78
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/undocumented_unsafe_blocks.rs
@@ -0,0 +1,358 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::source::walk_span_to_context;
+use clippy_utils::{get_parent_node, is_lint_allowed};
+use rustc_data_structures::sync::Lrc;
+use rustc_hir as hir;
+use rustc_hir::{Block, BlockCheckMode, ItemKind, Node, UnsafeSource};
+use rustc_lexer::{tokenize, TokenKind};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{BytePos, Pos, Span, SyntaxContext};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `unsafe` blocks and impls without a `// SAFETY: ` comment
+ /// explaining why the unsafe operations performed inside
+ /// the block are safe.
+ ///
+ /// Note the comment must appear on the line(s) preceding the unsafe block
+ /// with nothing appearing in between. The following is ok:
+ /// ```ignore
+ /// foo(
+ /// // SAFETY:
+ /// // This is a valid safety comment
+ /// unsafe { *x }
+ /// )
+ /// ```
+ /// But neither of these are:
+ /// ```ignore
+ /// // SAFETY:
+ /// // This is not a valid safety comment
+ /// foo(
+ /// /* SAFETY: Neither is this */ unsafe { *x },
+ /// );
+ /// ```
+ ///
+ /// ### Why is this bad?
+ /// Undocumented unsafe blocks and impls can make it difficult to
+ /// read and maintain code, as well as uncover unsoundness
+ /// and bugs.
+ ///
+ /// ### Example
+ /// ```rust
+ /// use std::ptr::NonNull;
+ /// let a = &mut 42;
+ ///
+ /// let ptr = unsafe { NonNull::new_unchecked(a) };
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// use std::ptr::NonNull;
+ /// let a = &mut 42;
+ ///
+ /// // SAFETY: references are guaranteed to be non-null.
+ /// let ptr = unsafe { NonNull::new_unchecked(a) };
+ /// ```
+ #[clippy::version = "1.58.0"]
+ pub UNDOCUMENTED_UNSAFE_BLOCKS,
+ restriction,
+ "creating an unsafe block without explaining why it is safe"
+}
+
+declare_lint_pass!(UndocumentedUnsafeBlocks => [UNDOCUMENTED_UNSAFE_BLOCKS]);
+
+impl LateLintPass<'_> for UndocumentedUnsafeBlocks {
+ fn check_block(&mut self, cx: &LateContext<'_>, block: &'_ Block<'_>) {
+ if block.rules == BlockCheckMode::UnsafeBlock(UnsafeSource::UserProvided)
+ && !in_external_macro(cx.tcx.sess, block.span)
+ && !is_lint_allowed(cx, UNDOCUMENTED_UNSAFE_BLOCKS, block.hir_id)
+ && !is_unsafe_from_proc_macro(cx, block.span)
+ && !block_has_safety_comment(cx, block)
+ {
+ let source_map = cx.tcx.sess.source_map();
+ let span = if source_map.is_multiline(block.span) {
+ source_map.span_until_char(block.span, '\n')
+ } else {
+ block.span
+ };
+
+ span_lint_and_help(
+ cx,
+ UNDOCUMENTED_UNSAFE_BLOCKS,
+ span,
+ "unsafe block missing a safety comment",
+ None,
+ "consider adding a safety comment on the preceding line",
+ );
+ }
+ }
+
+ fn check_item(&mut self, cx: &LateContext<'_>, item: &hir::Item<'_>) {
+ if let hir::ItemKind::Impl(imple) = item.kind
+ && imple.unsafety == hir::Unsafety::Unsafe
+ && !in_external_macro(cx.tcx.sess, item.span)
+ && !is_lint_allowed(cx, UNDOCUMENTED_UNSAFE_BLOCKS, item.hir_id())
+ && !is_unsafe_from_proc_macro(cx, item.span)
+ && !item_has_safety_comment(cx, item)
+ {
+ let source_map = cx.tcx.sess.source_map();
+ let span = if source_map.is_multiline(item.span) {
+ source_map.span_until_char(item.span, '\n')
+ } else {
+ item.span
+ };
+
+ span_lint_and_help(
+ cx,
+ UNDOCUMENTED_UNSAFE_BLOCKS,
+ span,
+ "unsafe impl missing a safety comment",
+ None,
+ "consider adding a safety comment on the preceding line",
+ );
+ }
+ }
+}
+
+fn is_unsafe_from_proc_macro(cx: &LateContext<'_>, span: Span) -> bool {
+ let source_map = cx.sess().source_map();
+ let file_pos = source_map.lookup_byte_offset(span.lo());
+ file_pos
+ .sf
+ .src
+ .as_deref()
+ .and_then(|src| src.get(file_pos.pos.to_usize()..))
+ .map_or(true, |src| !src.starts_with("unsafe"))
+}
+
+/// Checks if the lines immediately preceding the block contain a safety comment.
+fn block_has_safety_comment(cx: &LateContext<'_>, block: &hir::Block<'_>) -> bool {
+ // This intentionally ignores text before the start of a function so something like:
+ // ```
+ // // SAFETY: reason
+ // fn foo() { unsafe { .. } }
+ // ```
+ // won't work. This is to avoid dealing with where such a comment should be place relative to
+ // attributes and doc comments.
+
+ span_from_macro_expansion_has_safety_comment(cx, block.span) || span_in_body_has_safety_comment(cx, block.span)
+}
+
+/// Checks if the lines immediately preceding the item contain a safety comment.
+#[allow(clippy::collapsible_match)]
+fn item_has_safety_comment(cx: &LateContext<'_>, item: &hir::Item<'_>) -> bool {
+ if span_from_macro_expansion_has_safety_comment(cx, item.span) {
+ return true;
+ }
+
+ if item.span.ctxt() == SyntaxContext::root() {
+ if let Some(parent_node) = get_parent_node(cx.tcx, item.hir_id()) {
+ let comment_start = match parent_node {
+ Node::Crate(parent_mod) => {
+ comment_start_before_impl_in_mod(cx, parent_mod, parent_mod.spans.inner_span, item)
+ },
+ Node::Item(parent_item) => {
+ if let ItemKind::Mod(parent_mod) = &parent_item.kind {
+ comment_start_before_impl_in_mod(cx, parent_mod, parent_item.span, item)
+ } else {
+ // Doesn't support impls in this position. Pretend a comment was found.
+ return true;
+ }
+ },
+ Node::Stmt(stmt) => {
+ if let Some(stmt_parent) = get_parent_node(cx.tcx, stmt.hir_id) {
+ match stmt_parent {
+ Node::Block(block) => walk_span_to_context(block.span, SyntaxContext::root()).map(Span::lo),
+ _ => {
+ // Doesn't support impls in this position. Pretend a comment was found.
+ return true;
+ },
+ }
+ } else {
+ // Problem getting the parent node. Pretend a comment was found.
+ return true;
+ }
+ },
+ _ => {
+ // Doesn't support impls in this position. Pretend a comment was found.
+ return true;
+ },
+ };
+
+ let source_map = cx.sess().source_map();
+ if let Some(comment_start) = comment_start
+ && let Ok(unsafe_line) = source_map.lookup_line(item.span.lo())
+ && let Ok(comment_start_line) = source_map.lookup_line(comment_start)
+ && Lrc::ptr_eq(&unsafe_line.sf, &comment_start_line.sf)
+ && let Some(src) = unsafe_line.sf.src.as_deref()
+ {
+ unsafe_line.sf.lines(|lines| {
+ comment_start_line.line < unsafe_line.line && text_has_safety_comment(
+ src,
+ &lines[comment_start_line.line + 1..=unsafe_line.line],
+ unsafe_line.sf.start_pos.to_usize(),
+ )
+ })
+ } else {
+ // Problem getting source text. Pretend a comment was found.
+ true
+ }
+ } else {
+ // No parent node. Pretend a comment was found.
+ true
+ }
+ } else {
+ false
+ }
+}
+
+fn comment_start_before_impl_in_mod(
+ cx: &LateContext<'_>,
+ parent_mod: &hir::Mod<'_>,
+ parent_mod_span: Span,
+ imple: &hir::Item<'_>,
+) -> Option<BytePos> {
+ parent_mod.item_ids.iter().enumerate().find_map(|(idx, item_id)| {
+ if *item_id == imple.item_id() {
+ if idx == 0 {
+ // mod A { /* comment */ unsafe impl T {} ... }
+ // ^------------------------------------------^ returns the start of this span
+ // ^---------------------^ finally checks comments in this range
+ if let Some(sp) = walk_span_to_context(parent_mod_span, SyntaxContext::root()) {
+ return Some(sp.lo());
+ }
+ } else {
+ // some_item /* comment */ unsafe impl T {}
+ // ^-------^ returns the end of this span
+ // ^---------------^ finally checks comments in this range
+ let prev_item = cx.tcx.hir().item(parent_mod.item_ids[idx - 1]);
+ if let Some(sp) = walk_span_to_context(prev_item.span, SyntaxContext::root()) {
+ return Some(sp.hi());
+ }
+ }
+ }
+ None
+ })
+}
+
+fn span_from_macro_expansion_has_safety_comment(cx: &LateContext<'_>, span: Span) -> bool {
+ let source_map = cx.sess().source_map();
+ let ctxt = span.ctxt();
+ if ctxt == SyntaxContext::root() {
+ false
+ } else {
+ // From a macro expansion. Get the text from the start of the macro declaration to start of the
+ // unsafe block.
+ // macro_rules! foo { () => { stuff }; (x) => { unsafe { stuff } }; }
+ // ^--------------------------------------------^
+ if let Ok(unsafe_line) = source_map.lookup_line(span.lo())
+ && let Ok(macro_line) = source_map.lookup_line(ctxt.outer_expn_data().def_site.lo())
+ && Lrc::ptr_eq(&unsafe_line.sf, &macro_line.sf)
+ && let Some(src) = unsafe_line.sf.src.as_deref()
+ {
+ unsafe_line.sf.lines(|lines| {
+ macro_line.line < unsafe_line.line && text_has_safety_comment(
+ src,
+ &lines[macro_line.line + 1..=unsafe_line.line],
+ unsafe_line.sf.start_pos.to_usize(),
+ )
+ })
+ } else {
+ // Problem getting source text. Pretend a comment was found.
+ true
+ }
+ }
+}
+
+fn get_body_search_span(cx: &LateContext<'_>) -> Option<Span> {
+ let body = cx.enclosing_body?;
+ let map = cx.tcx.hir();
+ let mut span = map.body(body).value.span;
+ for (_, node) in map.parent_iter(body.hir_id) {
+ match node {
+ Node::Expr(e) => span = e.span,
+ Node::Block(_) | Node::Arm(_) | Node::Stmt(_) | Node::Local(_) => (),
+ _ => break,
+ }
+ }
+ Some(span)
+}
+
+fn span_in_body_has_safety_comment(cx: &LateContext<'_>, span: Span) -> bool {
+ let source_map = cx.sess().source_map();
+ let ctxt = span.ctxt();
+ if ctxt == SyntaxContext::root()
+ && let Some(search_span) = get_body_search_span(cx)
+ {
+ if let Ok(unsafe_line) = source_map.lookup_line(span.lo())
+ && let Some(body_span) = walk_span_to_context(search_span, SyntaxContext::root())
+ && let Ok(body_line) = source_map.lookup_line(body_span.lo())
+ && Lrc::ptr_eq(&unsafe_line.sf, &body_line.sf)
+ && let Some(src) = unsafe_line.sf.src.as_deref()
+ {
+ // Get the text from the start of function body to the unsafe block.
+ // fn foo() { some_stuff; unsafe { stuff }; other_stuff; }
+ // ^-------------^
+ unsafe_line.sf.lines(|lines| {
+ body_line.line < unsafe_line.line && text_has_safety_comment(
+ src,
+ &lines[body_line.line + 1..=unsafe_line.line],
+ unsafe_line.sf.start_pos.to_usize(),
+ )
+ })
+ } else {
+ // Problem getting source text. Pretend a comment was found.
+ true
+ }
+ } else {
+ false
+ }
+}
+
+/// Checks if the given text has a safety comment for the immediately proceeding line.
+fn text_has_safety_comment(src: &str, line_starts: &[BytePos], offset: usize) -> bool {
+ let mut lines = line_starts
+ .array_windows::<2>()
+ .rev()
+ .map_while(|[start, end]| {
+ let start = start.to_usize() - offset;
+ let end = end.to_usize() - offset;
+ src.get(start..end).map(|text| (start, text.trim_start()))
+ })
+ .filter(|(_, text)| !text.is_empty());
+
+ let Some((line_start, line)) = lines.next() else {
+ return false;
+ };
+ // Check for a sequence of line comments.
+ if line.starts_with("//") {
+ let mut line = line;
+ loop {
+ if line.to_ascii_uppercase().contains("SAFETY:") {
+ return true;
+ }
+ match lines.next() {
+ Some((_, x)) if x.starts_with("//") => line = x,
+ _ => return false,
+ }
+ }
+ }
+ // No line comments; look for the start of a block comment.
+ // This will only find them if they are at the start of a line.
+ let (mut line_start, mut line) = (line_start, line);
+ loop {
+ if line.starts_with("/*") {
+ let src = src[line_start..line_starts.last().unwrap().to_usize() - offset].trim_start();
+ let mut tokens = tokenize(src);
+ return src[..tokens.next().unwrap().len as usize]
+ .to_ascii_uppercase()
+ .contains("SAFETY:")
+ && tokens.all(|t| t.kind == TokenKind::Whitespace);
+ }
+ match lines.next() {
+ Some(x) => (line_start, line) = x,
+ None => return false,
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/unicode.rs b/src/tools/clippy/clippy_lints/src/unicode.rs
new file mode 100644
index 000000000..cc64d17be
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unicode.rs
@@ -0,0 +1,142 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::is_lint_allowed;
+use clippy_utils::source::snippet;
+use rustc_ast::ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind, HirId};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Span;
+use unicode_normalization::UnicodeNormalization;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for invisible Unicode characters in the code.
+ ///
+ /// ### Why is this bad?
+ /// Having an invisible character in the code makes for all
+ /// sorts of April fools, but otherwise is very much frowned upon.
+ ///
+ /// ### Example
+ /// You don't see it, but there may be a zero-width space or soft hyphen
+ /// some­where in this text.
+ #[clippy::version = "1.49.0"]
+ pub INVISIBLE_CHARACTERS,
+ correctness,
+ "using an invisible character in a string literal, which is confusing"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for non-ASCII characters in string and char literals.
+ ///
+ /// ### Why is this bad?
+ /// Yeah, we know, the 90's called and wanted their charset
+ /// back. Even so, there still are editors and other programs out there that
+ /// don't work well with Unicode. So if the code is meant to be used
+ /// internationally, on multiple operating systems, or has other portability
+ /// requirements, activating this lint could be useful.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = String::from("€");
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let x = String::from("\u{20ac}");
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub NON_ASCII_LITERAL,
+ restriction,
+ "using any literal non-ASCII chars in a string literal instead of using the `\\u` escape"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for string literals that contain Unicode in a form
+ /// that is not equal to its
+ /// [NFC-recomposition](http://www.unicode.org/reports/tr15/#Norm_Forms).
+ ///
+ /// ### Why is this bad?
+ /// If such a string is compared to another, the results
+ /// may be surprising.
+ ///
+ /// ### Example
+ /// You may not see it, but "à"" and "à"" aren't the same string. The
+ /// former when escaped is actually `"a\u{300}"` while the latter is `"\u{e0}"`.
+ #[clippy::version = "pre 1.29.0"]
+ pub UNICODE_NOT_NFC,
+ pedantic,
+ "using a Unicode literal not in NFC normal form (see [Unicode tr15](http://www.unicode.org/reports/tr15/) for further information)"
+}
+
+declare_lint_pass!(Unicode => [INVISIBLE_CHARACTERS, NON_ASCII_LITERAL, UNICODE_NOT_NFC]);
+
+impl LateLintPass<'_> for Unicode {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &'_ Expr<'_>) {
+ if let ExprKind::Lit(ref lit) = expr.kind {
+ if let LitKind::Str(_, _) | LitKind::Char(_) = lit.node {
+ check_str(cx, lit.span, expr.hir_id);
+ }
+ }
+ }
+}
+
+fn escape<T: Iterator<Item = char>>(s: T) -> String {
+ let mut result = String::new();
+ for c in s {
+ if c as u32 > 0x7F {
+ for d in c.escape_unicode() {
+ result.push(d);
+ }
+ } else {
+ result.push(c);
+ }
+ }
+ result
+}
+
+fn check_str(cx: &LateContext<'_>, span: Span, id: HirId) {
+ let string = snippet(cx, span, "");
+ if string.chars().any(|c| ['\u{200B}', '\u{ad}', '\u{2060}'].contains(&c)) {
+ span_lint_and_sugg(
+ cx,
+ INVISIBLE_CHARACTERS,
+ span,
+ "invisible character detected",
+ "consider replacing the string with",
+ string
+ .replace('\u{200B}', "\\u{200B}")
+ .replace('\u{ad}', "\\u{AD}")
+ .replace('\u{2060}', "\\u{2060}"),
+ Applicability::MachineApplicable,
+ );
+ }
+ if string.chars().any(|c| c as u32 > 0x7F) {
+ span_lint_and_sugg(
+ cx,
+ NON_ASCII_LITERAL,
+ span,
+ "literal non-ASCII character detected",
+ "consider replacing the string with",
+ if is_lint_allowed(cx, UNICODE_NOT_NFC, id) {
+ escape(string.chars())
+ } else {
+ escape(string.nfc())
+ },
+ Applicability::MachineApplicable,
+ );
+ }
+ if is_lint_allowed(cx, NON_ASCII_LITERAL, id) && string.chars().zip(string.nfc()).any(|(a, b)| a != b) {
+ span_lint_and_sugg(
+ cx,
+ UNICODE_NOT_NFC,
+ span,
+ "non-NFC Unicode sequence detected",
+ "consider replacing the string with",
+ string.nfc().collect::<String>(),
+ Applicability::MachineApplicable,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/uninit_vec.rs b/src/tools/clippy/clippy_lints/src/uninit_vec.rs
new file mode 100644
index 000000000..9f4c5555f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/uninit_vec.rs
@@ -0,0 +1,224 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_and_then};
+use clippy_utils::higher::{get_vec_init_kind, VecInitKind};
+use clippy_utils::ty::{is_type_diagnostic_item, is_uninit_value_valid_for_ty};
+use clippy_utils::{is_lint_allowed, path_to_local_id, peel_hir_expr_while, SpanlessEq};
+use rustc_hir::{Block, Expr, ExprKind, HirId, PatKind, PathSegment, Stmt, StmtKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{sym, Span};
+
+// TODO: add `ReadBuf` (RFC 2930) in "How to fix" once it is available in std
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `set_len()` call that creates `Vec` with uninitialized elements.
+ /// This is commonly caused by calling `set_len()` right after allocating or
+ /// reserving a buffer with `new()`, `default()`, `with_capacity()`, or `reserve()`.
+ ///
+ /// ### Why is this bad?
+ /// It creates a `Vec` with uninitialized data, which leads to
+ /// undefined behavior with most safe operations. Notably, uninitialized
+ /// `Vec<u8>` must not be used with generic `Read`.
+ ///
+ /// Moreover, calling `set_len()` on a `Vec` created with `new()` or `default()`
+ /// creates out-of-bound values that lead to heap memory corruption when used.
+ ///
+ /// ### Known Problems
+ /// This lint only checks directly adjacent statements.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// let mut vec: Vec<u8> = Vec::with_capacity(1000);
+ /// unsafe { vec.set_len(1000); }
+ /// reader.read(&mut vec); // undefined behavior!
+ /// ```
+ ///
+ /// ### How to fix?
+ /// 1. Use an initialized buffer:
+ /// ```rust,ignore
+ /// let mut vec: Vec<u8> = vec![0; 1000];
+ /// reader.read(&mut vec);
+ /// ```
+ /// 2. Wrap the content in `MaybeUninit`:
+ /// ```rust,ignore
+ /// let mut vec: Vec<MaybeUninit<T>> = Vec::with_capacity(1000);
+ /// vec.set_len(1000); // `MaybeUninit` can be uninitialized
+ /// ```
+ /// 3. If you are on nightly, `Vec::spare_capacity_mut()` is available:
+ /// ```rust,ignore
+ /// let mut vec: Vec<u8> = Vec::with_capacity(1000);
+ /// let remaining = vec.spare_capacity_mut(); // `&mut [MaybeUninit<u8>]`
+ /// // perform initialization with `remaining`
+ /// vec.set_len(...); // Safe to call `set_len()` on initialized part
+ /// ```
+ #[clippy::version = "1.58.0"]
+ pub UNINIT_VEC,
+ correctness,
+ "Vec with uninitialized data"
+}
+
+declare_lint_pass!(UninitVec => [UNINIT_VEC]);
+
+// FIXME: update to a visitor-based implementation.
+// Threads: https://github.com/rust-lang/rust-clippy/pull/7682#discussion_r710998368
+impl<'tcx> LateLintPass<'tcx> for UninitVec {
+ fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx Block<'_>) {
+ if !in_external_macro(cx.tcx.sess, block.span) {
+ for w in block.stmts.windows(2) {
+ if let StmtKind::Expr(expr) | StmtKind::Semi(expr) = w[1].kind {
+ handle_uninit_vec_pair(cx, &w[0], expr);
+ }
+ }
+
+ if let (Some(stmt), Some(expr)) = (block.stmts.last(), block.expr) {
+ handle_uninit_vec_pair(cx, stmt, expr);
+ }
+ }
+ }
+}
+
+fn handle_uninit_vec_pair<'tcx>(
+ cx: &LateContext<'tcx>,
+ maybe_init_or_reserve: &'tcx Stmt<'tcx>,
+ maybe_set_len: &'tcx Expr<'tcx>,
+) {
+ if_chain! {
+ if let Some(vec) = extract_init_or_reserve_target(cx, maybe_init_or_reserve);
+ if let Some((set_len_self, call_span)) = extract_set_len_self(cx, maybe_set_len);
+ if vec.location.eq_expr(cx, set_len_self);
+ if let ty::Ref(_, vec_ty, _) = cx.typeck_results().expr_ty_adjusted(set_len_self).kind();
+ if let ty::Adt(_, substs) = vec_ty.kind();
+ // `#[allow(...)]` attribute can be set on enclosing unsafe block of `set_len()`
+ if !is_lint_allowed(cx, UNINIT_VEC, maybe_set_len.hir_id);
+ then {
+ if vec.has_capacity() {
+ // with_capacity / reserve -> set_len
+
+ // Check T of Vec<T>
+ if !is_uninit_value_valid_for_ty(cx, substs.type_at(0)) {
+ // FIXME: #7698, false positive of the internal lints
+ #[expect(clippy::collapsible_span_lint_calls)]
+ span_lint_and_then(
+ cx,
+ UNINIT_VEC,
+ vec![call_span, maybe_init_or_reserve.span],
+ "calling `set_len()` immediately after reserving a buffer creates uninitialized values",
+ |diag| {
+ diag.help("initialize the buffer or wrap the content in `MaybeUninit`");
+ },
+ );
+ }
+ } else {
+ // new / default -> set_len
+ span_lint(
+ cx,
+ UNINIT_VEC,
+ vec![call_span, maybe_init_or_reserve.span],
+ "calling `set_len()` on empty `Vec` creates out-of-bound values",
+ );
+ }
+ }
+ }
+}
+
+/// The target `Vec` that is initialized or reserved
+#[derive(Clone, Copy)]
+struct TargetVec<'tcx> {
+ location: VecLocation<'tcx>,
+ /// `None` if `reserve()`
+ init_kind: Option<VecInitKind>,
+}
+
+impl TargetVec<'_> {
+ pub fn has_capacity(self) -> bool {
+ !matches!(self.init_kind, Some(VecInitKind::New | VecInitKind::Default))
+ }
+}
+
+#[derive(Clone, Copy)]
+enum VecLocation<'tcx> {
+ Local(HirId),
+ Expr(&'tcx Expr<'tcx>),
+}
+
+impl<'tcx> VecLocation<'tcx> {
+ pub fn eq_expr(self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> bool {
+ match self {
+ VecLocation::Local(hir_id) => path_to_local_id(expr, hir_id),
+ VecLocation::Expr(self_expr) => SpanlessEq::new(cx).eq_expr(self_expr, expr),
+ }
+ }
+}
+
+/// Finds the target location where the result of `Vec` initialization is stored
+/// or `self` expression for `Vec::reserve()`.
+fn extract_init_or_reserve_target<'tcx>(cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'tcx>) -> Option<TargetVec<'tcx>> {
+ match stmt.kind {
+ StmtKind::Local(local) => {
+ if_chain! {
+ if let Some(init_expr) = local.init;
+ if let PatKind::Binding(_, hir_id, _, None) = local.pat.kind;
+ if let Some(init_kind) = get_vec_init_kind(cx, init_expr);
+ then {
+ return Some(TargetVec {
+ location: VecLocation::Local(hir_id),
+ init_kind: Some(init_kind),
+ })
+ }
+ }
+ },
+ StmtKind::Expr(expr) | StmtKind::Semi(expr) => match expr.kind {
+ ExprKind::Assign(lhs, rhs, _span) => {
+ if let Some(init_kind) = get_vec_init_kind(cx, rhs) {
+ return Some(TargetVec {
+ location: VecLocation::Expr(lhs),
+ init_kind: Some(init_kind),
+ });
+ }
+ },
+ ExprKind::MethodCall(path, [self_expr, _], _) if is_reserve(cx, path, self_expr) => {
+ return Some(TargetVec {
+ location: VecLocation::Expr(self_expr),
+ init_kind: None,
+ });
+ },
+ _ => (),
+ },
+ StmtKind::Item(_) => (),
+ }
+ None
+}
+
+fn is_reserve(cx: &LateContext<'_>, path: &PathSegment<'_>, self_expr: &Expr<'_>) -> bool {
+ is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(self_expr).peel_refs(), sym::Vec)
+ && path.ident.name.as_str() == "reserve"
+}
+
+/// Returns self if the expression is `Vec::set_len()`
+fn extract_set_len_self<'tcx>(cx: &LateContext<'_>, expr: &'tcx Expr<'_>) -> Option<(&'tcx Expr<'tcx>, Span)> {
+ // peel unsafe blocks in `unsafe { vec.set_len() }`
+ let expr = peel_hir_expr_while(expr, |e| {
+ if let ExprKind::Block(block, _) = e.kind {
+ // Extract the first statement/expression
+ match (block.stmts.get(0).map(|stmt| &stmt.kind), block.expr) {
+ (None, Some(expr)) => Some(expr),
+ (Some(StmtKind::Expr(expr) | StmtKind::Semi(expr)), _) => Some(expr),
+ _ => None,
+ }
+ } else {
+ None
+ }
+ });
+ match expr.kind {
+ ExprKind::MethodCall(path, [self_expr, _], _) => {
+ let self_type = cx.typeck_results().expr_ty(self_expr).peel_refs();
+ if is_type_diagnostic_item(cx, self_type, sym::Vec) && path.ident.name.as_str() == "set_len" {
+ Some((self_expr, expr.span))
+ } else {
+ None
+ }
+ },
+ _ => None,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/unit_hash.rs b/src/tools/clippy/clippy_lints/src/unit_hash.rs
new file mode 100644
index 000000000..88ca0cb20
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unit_hash.rs
@@ -0,0 +1,78 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Detects `().hash(_)`.
+ ///
+ /// ### Why is this bad?
+ /// Hashing a unit value doesn't do anything as the implementation of `Hash` for `()` is a no-op.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::hash::Hash;
+ /// # use std::collections::hash_map::DefaultHasher;
+ /// # enum Foo { Empty, WithValue(u8) }
+ /// # use Foo::*;
+ /// # let mut state = DefaultHasher::new();
+ /// # let my_enum = Foo::Empty;
+ /// match my_enum {
+ /// Empty => ().hash(&mut state),
+ /// WithValue(x) => x.hash(&mut state),
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # use std::hash::Hash;
+ /// # use std::collections::hash_map::DefaultHasher;
+ /// # enum Foo { Empty, WithValue(u8) }
+ /// # use Foo::*;
+ /// # let mut state = DefaultHasher::new();
+ /// # let my_enum = Foo::Empty;
+ /// match my_enum {
+ /// Empty => 0_u8.hash(&mut state),
+ /// WithValue(x) => x.hash(&mut state),
+ /// }
+ /// ```
+ #[clippy::version = "1.58.0"]
+ pub UNIT_HASH,
+ correctness,
+ "hashing a unit value, which does nothing"
+}
+declare_lint_pass!(UnitHash => [UNIT_HASH]);
+
+impl<'tcx> LateLintPass<'tcx> for UnitHash {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
+ if_chain! {
+ if let ExprKind::MethodCall(name_ident, args, _) = &expr.kind;
+ if name_ident.ident.name == sym::hash;
+ if let [recv, state_param] = args;
+ if cx.typeck_results().expr_ty(recv).is_unit();
+ then {
+ span_lint_and_then(
+ cx,
+ UNIT_HASH,
+ expr.span,
+ "this call to `hash` on the unit type will do nothing",
+ |diag| {
+ diag.span_suggestion(
+ expr.span,
+ "remove the call to `hash` or consider using",
+ format!(
+ "0_u8.hash({})",
+ snippet(cx, state_param.span, ".."),
+ ),
+ Applicability::MaybeIncorrect,
+ );
+ diag.note("the implementation of `Hash` for `()` is a no-op");
+ }
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/unit_return_expecting_ord.rs b/src/tools/clippy/clippy_lints/src/unit_return_expecting_ord.rs
new file mode 100644
index 000000000..b0fce91ab
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unit_return_expecting_ord.rs
@@ -0,0 +1,184 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
+use clippy_utils::{get_trait_def_id, paths};
+use if_chain::if_chain;
+use rustc_hir::def_id::DefId;
+use rustc_hir::{Closure, Expr, ExprKind, StmtKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_middle::ty::{GenericPredicates, PredicateKind, ProjectionPredicate, TraitPredicate};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{BytePos, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for functions that expect closures of type
+ /// Fn(...) -> Ord where the implemented closure returns the unit type.
+ /// The lint also suggests to remove the semi-colon at the end of the statement if present.
+ ///
+ /// ### Why is this bad?
+ /// Likely, returning the unit type is unintentional, and
+ /// could simply be caused by an extra semi-colon. Since () implements Ord
+ /// it doesn't cause a compilation error.
+ /// This is the same reasoning behind the unit_cmp lint.
+ ///
+ /// ### Known problems
+ /// If returning unit is intentional, then there is no
+ /// way of specifying this without triggering needless_return lint
+ ///
+ /// ### Example
+ /// ```rust
+ /// let mut twins = vec!((1, 1), (2, 2));
+ /// twins.sort_by_key(|x| { x.1; });
+ /// ```
+ #[clippy::version = "1.47.0"]
+ pub UNIT_RETURN_EXPECTING_ORD,
+ correctness,
+ "fn arguments of type Fn(...) -> Ord returning the unit type ()."
+}
+
+declare_lint_pass!(UnitReturnExpectingOrd => [UNIT_RETURN_EXPECTING_ORD]);
+
+fn get_trait_predicates_for_trait_id<'tcx>(
+ cx: &LateContext<'tcx>,
+ generics: GenericPredicates<'tcx>,
+ trait_id: Option<DefId>,
+) -> Vec<TraitPredicate<'tcx>> {
+ let mut preds = Vec::new();
+ for (pred, _) in generics.predicates {
+ if_chain! {
+ if let PredicateKind::Trait(poly_trait_pred) = pred.kind().skip_binder();
+ let trait_pred = cx.tcx.erase_late_bound_regions(pred.kind().rebind(poly_trait_pred));
+ if let Some(trait_def_id) = trait_id;
+ if trait_def_id == trait_pred.trait_ref.def_id;
+ then {
+ preds.push(trait_pred);
+ }
+ }
+ }
+ preds
+}
+
+fn get_projection_pred<'tcx>(
+ cx: &LateContext<'tcx>,
+ generics: GenericPredicates<'tcx>,
+ trait_pred: TraitPredicate<'tcx>,
+) -> Option<ProjectionPredicate<'tcx>> {
+ generics.predicates.iter().find_map(|(proj_pred, _)| {
+ if let ty::PredicateKind::Projection(pred) = proj_pred.kind().skip_binder() {
+ let projection_pred = cx.tcx.erase_late_bound_regions(proj_pred.kind().rebind(pred));
+ if projection_pred.projection_ty.substs == trait_pred.trait_ref.substs {
+ return Some(projection_pred);
+ }
+ }
+ None
+ })
+}
+
+fn get_args_to_check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> Vec<(usize, String)> {
+ let mut args_to_check = Vec::new();
+ if let Some(def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) {
+ let fn_sig = cx.tcx.fn_sig(def_id);
+ let generics = cx.tcx.predicates_of(def_id);
+ let fn_mut_preds = get_trait_predicates_for_trait_id(cx, generics, cx.tcx.lang_items().fn_mut_trait());
+ let ord_preds = get_trait_predicates_for_trait_id(cx, generics, get_trait_def_id(cx, &paths::ORD));
+ let partial_ord_preds =
+ get_trait_predicates_for_trait_id(cx, generics, cx.tcx.lang_items().partial_ord_trait());
+ // Trying to call erase_late_bound_regions on fn_sig.inputs() gives the following error
+ // The trait `rustc::ty::TypeFoldable<'_>` is not implemented for
+ // `&[rustc_middle::ty::Ty<'_>]`
+ let inputs_output = cx.tcx.erase_late_bound_regions(fn_sig.inputs_and_output());
+ inputs_output
+ .iter()
+ .rev()
+ .skip(1)
+ .rev()
+ .enumerate()
+ .for_each(|(i, inp)| {
+ for trait_pred in &fn_mut_preds {
+ if_chain! {
+ if trait_pred.self_ty() == inp;
+ if let Some(return_ty_pred) = get_projection_pred(cx, generics, *trait_pred);
+ then {
+ if ord_preds.iter().any(|ord| Some(ord.self_ty()) == return_ty_pred.term.ty()) {
+ args_to_check.push((i, "Ord".to_string()));
+ } else if partial_ord_preds.iter().any(|pord| {
+ pord.self_ty() == return_ty_pred.term.ty().unwrap()
+ }) {
+ args_to_check.push((i, "PartialOrd".to_string()));
+ }
+ }
+ }
+ }
+ });
+ }
+ args_to_check
+}
+
+fn check_arg<'tcx>(cx: &LateContext<'tcx>, arg: &'tcx Expr<'tcx>) -> Option<(Span, Option<Span>)> {
+ if_chain! {
+ if let ExprKind::Closure(&Closure { body, fn_decl_span, .. }) = arg.kind;
+ if let ty::Closure(_def_id, substs) = &cx.typeck_results().node_type(arg.hir_id).kind();
+ let ret_ty = substs.as_closure().sig().output();
+ let ty = cx.tcx.erase_late_bound_regions(ret_ty);
+ if ty.is_unit();
+ then {
+ let body = cx.tcx.hir().body(body);
+ if_chain! {
+ if let ExprKind::Block(block, _) = body.value.kind;
+ if block.expr.is_none();
+ if let Some(stmt) = block.stmts.last();
+ if let StmtKind::Semi(_) = stmt.kind;
+ then {
+ let data = stmt.span.data();
+ // Make a span out of the semicolon for the help message
+ Some((fn_decl_span, Some(data.with_lo(data.hi-BytePos(1)))))
+ } else {
+ Some((fn_decl_span, None))
+ }
+ }
+ } else {
+ None
+ }
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for UnitReturnExpectingOrd {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
+ if let ExprKind::MethodCall(_, args, _) = expr.kind {
+ let arg_indices = get_args_to_check(cx, expr);
+ for (i, trait_name) in arg_indices {
+ if i < args.len() {
+ match check_arg(cx, &args[i]) {
+ Some((span, None)) => {
+ span_lint(
+ cx,
+ UNIT_RETURN_EXPECTING_ORD,
+ span,
+ &format!(
+ "this closure returns \
+ the unit type which also implements {}",
+ trait_name
+ ),
+ );
+ },
+ Some((span, Some(last_semi))) => {
+ span_lint_and_help(
+ cx,
+ UNIT_RETURN_EXPECTING_ORD,
+ span,
+ &format!(
+ "this closure returns \
+ the unit type which also implements {}",
+ trait_name
+ ),
+ Some(last_semi),
+ "probably caused by this trailing semicolon",
+ );
+ },
+ None => {},
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/unit_types/let_unit_value.rs b/src/tools/clippy/clippy_lints/src/unit_types/let_unit_value.rs
new file mode 100644
index 000000000..aec028d5c
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unit_types/let_unit_value.rs
@@ -0,0 +1,165 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::get_parent_node;
+use clippy_utils::source::snippet_with_macro_callsite;
+use clippy_utils::visitors::{for_each_local_assignment, for_each_value_source};
+use core::ops::ControlFlow;
+use rustc_errors::Applicability;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::{Expr, ExprKind, HirId, HirIdSet, Local, Node, PatKind, QPath, TyKind};
+use rustc_lint::{LateContext, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty;
+
+use super::LET_UNIT_VALUE;
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, local: &'tcx Local<'_>) {
+ if let Some(init) = local.init
+ && !local.pat.span.from_expansion()
+ && !in_external_macro(cx.sess(), local.span)
+ && cx.typeck_results().pat_ty(local.pat).is_unit()
+ {
+ if (local.ty.map_or(false, |ty| !matches!(ty.kind, TyKind::Infer))
+ || matches!(local.pat.kind, PatKind::Tuple([], None)))
+ && expr_needs_inferred_result(cx, init)
+ {
+ if !matches!(local.pat.kind, PatKind::Wild | PatKind::Tuple([], None)) {
+ span_lint_and_then(
+ cx,
+ LET_UNIT_VALUE,
+ local.span,
+ "this let-binding has unit value",
+ |diag| {
+ diag.span_suggestion(
+ local.pat.span,
+ "use a wild (`_`) binding",
+ "_",
+ Applicability::MaybeIncorrect, // snippet
+ );
+ },
+ );
+ }
+ } else {
+ span_lint_and_then(
+ cx,
+ LET_UNIT_VALUE,
+ local.span,
+ "this let-binding has unit value",
+ |diag| {
+ if let Some(expr) = &local.init {
+ let snip = snippet_with_macro_callsite(cx, expr.span, "()");
+ diag.span_suggestion(
+ local.span,
+ "omit the `let` binding",
+ format!("{snip};"),
+ Applicability::MachineApplicable, // snippet
+ );
+ }
+ },
+ );
+ }
+ }
+}
+
+/// Checks sub-expressions which create the value returned by the given expression for whether
+/// return value inference is needed. This checks through locals to see if they also need inference
+/// at this point.
+///
+/// e.g.
+/// ```rust,ignore
+/// let bar = foo();
+/// let x: u32 = if true { baz() } else { bar };
+/// ```
+/// Here the sources of the value assigned to `x` would be `baz()`, and `foo()` via the
+/// initialization of `bar`. If both `foo` and `baz` have a return type which require type
+/// inference then this function would return `true`.
+fn expr_needs_inferred_result<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) -> bool {
+ // The locals used for initialization which have yet to be checked.
+ let mut locals_to_check = Vec::new();
+ // All the locals which have been added to `locals_to_check`. Needed to prevent cycles.
+ let mut seen_locals = HirIdSet::default();
+ if !each_value_source_needs_inference(cx, e, &mut locals_to_check, &mut seen_locals) {
+ return false;
+ }
+ while let Some(id) = locals_to_check.pop() {
+ if let Some(Node::Local(l)) = get_parent_node(cx.tcx, id) {
+ if !l.ty.map_or(true, |ty| matches!(ty.kind, TyKind::Infer)) {
+ return false;
+ }
+ if let Some(e) = l.init {
+ if !each_value_source_needs_inference(cx, e, &mut locals_to_check, &mut seen_locals) {
+ return false;
+ }
+ } else if for_each_local_assignment(cx, id, |e| {
+ if each_value_source_needs_inference(cx, e, &mut locals_to_check, &mut seen_locals) {
+ ControlFlow::Continue(())
+ } else {
+ ControlFlow::Break(())
+ }
+ })
+ .is_break()
+ {
+ return false;
+ }
+ }
+ }
+
+ true
+}
+
+fn each_value_source_needs_inference(
+ cx: &LateContext<'_>,
+ e: &Expr<'_>,
+ locals_to_check: &mut Vec<HirId>,
+ seen_locals: &mut HirIdSet,
+) -> bool {
+ for_each_value_source(e, &mut |e| {
+ if needs_inferred_result_ty(cx, e, locals_to_check, seen_locals) {
+ ControlFlow::Continue(())
+ } else {
+ ControlFlow::Break(())
+ }
+ })
+ .is_continue()
+}
+
+fn needs_inferred_result_ty(
+ cx: &LateContext<'_>,
+ e: &Expr<'_>,
+ locals_to_check: &mut Vec<HirId>,
+ seen_locals: &mut HirIdSet,
+) -> bool {
+ let (id, args) = match e.kind {
+ ExprKind::Call(
+ Expr {
+ kind: ExprKind::Path(ref path),
+ hir_id,
+ ..
+ },
+ args,
+ ) => match cx.qpath_res(path, *hir_id) {
+ Res::Def(DefKind::AssocFn | DefKind::Fn, id) => (id, args),
+ _ => return false,
+ },
+ ExprKind::MethodCall(_, args, _) => match cx.typeck_results().type_dependent_def_id(e.hir_id) {
+ Some(id) => (id, args),
+ None => return false,
+ },
+ ExprKind::Path(QPath::Resolved(None, path)) => {
+ if let Res::Local(id) = path.res
+ && seen_locals.insert(id)
+ {
+ locals_to_check.push(id);
+ }
+ return true;
+ },
+ _ => return false,
+ };
+ let sig = cx.tcx.fn_sig(id).skip_binder();
+ if let ty::Param(output_ty) = *sig.output().kind() {
+ sig.inputs().iter().zip(args).all(|(&ty, arg)| {
+ !ty.is_param(output_ty.index) || each_value_source_needs_inference(cx, arg, locals_to_check, seen_locals)
+ })
+ } else {
+ false
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/unit_types/mod.rs b/src/tools/clippy/clippy_lints/src/unit_types/mod.rs
new file mode 100644
index 000000000..6aa86a57c
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unit_types/mod.rs
@@ -0,0 +1,110 @@
+mod let_unit_value;
+mod unit_arg;
+mod unit_cmp;
+mod utils;
+
+use rustc_hir::{Expr, Local};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for binding a unit value.
+ ///
+ /// ### Why is this bad?
+ /// A unit value cannot usefully be used anywhere. So
+ /// binding one is kind of pointless.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = {
+ /// 1;
+ /// };
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub LET_UNIT_VALUE,
+ style,
+ "creating a `let` binding to a value of unit type, which usually can't be used afterwards"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for comparisons to unit. This includes all binary
+ /// comparisons (like `==` and `<`) and asserts.
+ ///
+ /// ### Why is this bad?
+ /// Unit is always equal to itself, and thus is just a
+ /// clumsily written constant. Mostly this happens when someone accidentally
+ /// adds semicolons at the end of the operands.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # fn foo() {};
+ /// # fn bar() {};
+ /// # fn baz() {};
+ /// if {
+ /// foo();
+ /// } == {
+ /// bar();
+ /// } {
+ /// baz();
+ /// }
+ /// ```
+ /// is equal to
+ /// ```rust
+ /// # fn foo() {};
+ /// # fn bar() {};
+ /// # fn baz() {};
+ /// {
+ /// foo();
+ /// bar();
+ /// baz();
+ /// }
+ /// ```
+ ///
+ /// For asserts:
+ /// ```rust
+ /// # fn foo() {};
+ /// # fn bar() {};
+ /// assert_eq!({ foo(); }, { bar(); });
+ /// ```
+ /// will always succeed
+ #[clippy::version = "pre 1.29.0"]
+ pub UNIT_CMP,
+ correctness,
+ "comparing unit values"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for passing a unit value as an argument to a function without using a
+ /// unit literal (`()`).
+ ///
+ /// ### Why is this bad?
+ /// This is likely the result of an accidental semicolon.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// foo({
+ /// let a = bar();
+ /// baz(a);
+ /// })
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub UNIT_ARG,
+ complexity,
+ "passing unit to a function"
+}
+
+declare_lint_pass!(UnitTypes => [LET_UNIT_VALUE, UNIT_CMP, UNIT_ARG]);
+
+impl<'tcx> LateLintPass<'tcx> for UnitTypes {
+ fn check_local(&mut self, cx: &LateContext<'tcx>, local: &'tcx Local<'tcx>) {
+ let_unit_value::check(cx, local);
+ }
+
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
+ unit_cmp::check(cx, expr);
+ unit_arg::check(cx, expr);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/unit_types/unit_arg.rs b/src/tools/clippy/clippy_lints/src/unit_types/unit_arg.rs
new file mode 100644
index 000000000..97d92f10e
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unit_types/unit_arg.rs
@@ -0,0 +1,207 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::{indent_of, reindent_multiline, snippet_opt};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{self as hir, Block, Expr, ExprKind, MatchSource, Node, StmtKind};
+use rustc_lint::LateContext;
+
+use super::{utils, UNIT_ARG};
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if expr.span.from_expansion() {
+ return;
+ }
+
+ // apparently stuff in the desugaring of `?` can trigger this
+ // so check for that here
+ // only the calls to `Try::from_error` is marked as desugared,
+ // so we need to check both the current Expr and its parent.
+ if is_questionmark_desugar_marked_call(expr) {
+ return;
+ }
+ let map = &cx.tcx.hir();
+ let opt_parent_node = map.find(map.get_parent_node(expr.hir_id));
+ if_chain! {
+ if let Some(hir::Node::Expr(parent_expr)) = opt_parent_node;
+ if is_questionmark_desugar_marked_call(parent_expr);
+ then {
+ return;
+ }
+ }
+
+ match expr.kind {
+ ExprKind::Call(_, args) | ExprKind::MethodCall(_, args, _) => {
+ let args_to_recover = args
+ .iter()
+ .filter(|arg| {
+ if cx.typeck_results().expr_ty(arg).is_unit() && !utils::is_unit_literal(arg) {
+ !matches!(
+ &arg.kind,
+ ExprKind::Match(.., MatchSource::TryDesugar) | ExprKind::Path(..)
+ )
+ } else {
+ false
+ }
+ })
+ .collect::<Vec<_>>();
+ if !args_to_recover.is_empty() {
+ lint_unit_args(cx, expr, &args_to_recover);
+ }
+ },
+ _ => (),
+ }
+}
+
+fn is_questionmark_desugar_marked_call(expr: &Expr<'_>) -> bool {
+ use rustc_span::hygiene::DesugaringKind;
+ if let ExprKind::Call(callee, _) = expr.kind {
+ callee.span.is_desugaring(DesugaringKind::QuestionMark)
+ } else {
+ false
+ }
+}
+
+fn lint_unit_args(cx: &LateContext<'_>, expr: &Expr<'_>, args_to_recover: &[&Expr<'_>]) {
+ let mut applicability = Applicability::MachineApplicable;
+ let (singular, plural) = if args_to_recover.len() > 1 {
+ ("", "s")
+ } else {
+ ("a ", "")
+ };
+ span_lint_and_then(
+ cx,
+ UNIT_ARG,
+ expr.span,
+ &format!("passing {}unit value{} to a function", singular, plural),
+ |db| {
+ let mut or = "";
+ args_to_recover
+ .iter()
+ .filter_map(|arg| {
+ if_chain! {
+ if let ExprKind::Block(block, _) = arg.kind;
+ if block.expr.is_none();
+ if let Some(last_stmt) = block.stmts.iter().last();
+ if let StmtKind::Semi(last_expr) = last_stmt.kind;
+ if let Some(snip) = snippet_opt(cx, last_expr.span);
+ then {
+ Some((
+ last_stmt.span,
+ snip,
+ ))
+ }
+ else {
+ None
+ }
+ }
+ })
+ .for_each(|(span, sugg)| {
+ db.span_suggestion(
+ span,
+ "remove the semicolon from the last statement in the block",
+ sugg,
+ Applicability::MaybeIncorrect,
+ );
+ or = "or ";
+ applicability = Applicability::MaybeIncorrect;
+ });
+
+ let arg_snippets: Vec<String> = args_to_recover
+ .iter()
+ .filter_map(|arg| snippet_opt(cx, arg.span))
+ .collect();
+ let arg_snippets_without_empty_blocks: Vec<String> = args_to_recover
+ .iter()
+ .filter(|arg| !is_empty_block(arg))
+ .filter_map(|arg| snippet_opt(cx, arg.span))
+ .collect();
+
+ if let Some(call_snippet) = snippet_opt(cx, expr.span) {
+ let sugg = fmt_stmts_and_call(
+ cx,
+ expr,
+ &call_snippet,
+ &arg_snippets,
+ &arg_snippets_without_empty_blocks,
+ );
+
+ if arg_snippets_without_empty_blocks.is_empty() {
+ db.multipart_suggestion(
+ &format!("use {}unit literal{} instead", singular, plural),
+ args_to_recover
+ .iter()
+ .map(|arg| (arg.span, "()".to_string()))
+ .collect::<Vec<_>>(),
+ applicability,
+ );
+ } else {
+ let plural = arg_snippets_without_empty_blocks.len() > 1;
+ let empty_or_s = if plural { "s" } else { "" };
+ let it_or_them = if plural { "them" } else { "it" };
+ db.span_suggestion(
+ expr.span,
+ &format!(
+ "{}move the expression{} in front of the call and replace {} with the unit literal `()`",
+ or, empty_or_s, it_or_them
+ ),
+ sugg,
+ applicability,
+ );
+ }
+ }
+ },
+ );
+}
+
+fn is_empty_block(expr: &Expr<'_>) -> bool {
+ matches!(
+ expr.kind,
+ ExprKind::Block(
+ Block {
+ stmts: &[],
+ expr: None,
+ ..
+ },
+ _,
+ )
+ )
+}
+
+fn fmt_stmts_and_call(
+ cx: &LateContext<'_>,
+ call_expr: &Expr<'_>,
+ call_snippet: &str,
+ args_snippets: &[impl AsRef<str>],
+ non_empty_block_args_snippets: &[impl AsRef<str>],
+) -> String {
+ let call_expr_indent = indent_of(cx, call_expr.span).unwrap_or(0);
+ let call_snippet_with_replacements = args_snippets
+ .iter()
+ .fold(call_snippet.to_owned(), |acc, arg| acc.replacen(arg.as_ref(), "()", 1));
+
+ let mut stmts_and_call = non_empty_block_args_snippets
+ .iter()
+ .map(|it| it.as_ref().to_owned())
+ .collect::<Vec<_>>();
+ stmts_and_call.push(call_snippet_with_replacements);
+ stmts_and_call = stmts_and_call
+ .into_iter()
+ .map(|v| reindent_multiline(v.into(), true, Some(call_expr_indent)).into_owned())
+ .collect();
+
+ let mut stmts_and_call_snippet = stmts_and_call.join(&format!("{}{}", ";\n", " ".repeat(call_expr_indent)));
+ // expr is not in a block statement or result expression position, wrap in a block
+ let parent_node = cx.tcx.hir().find(cx.tcx.hir().get_parent_node(call_expr.hir_id));
+ if !matches!(parent_node, Some(Node::Block(_))) && !matches!(parent_node, Some(Node::Stmt(_))) {
+ let block_indent = call_expr_indent + 4;
+ stmts_and_call_snippet =
+ reindent_multiline(stmts_and_call_snippet.into(), true, Some(block_indent)).into_owned();
+ stmts_and_call_snippet = format!(
+ "{{\n{}{}\n{}}}",
+ " ".repeat(block_indent),
+ &stmts_and_call_snippet,
+ " ".repeat(call_expr_indent)
+ );
+ }
+ stmts_and_call_snippet
+}
diff --git a/src/tools/clippy/clippy_lints/src/unit_types/unit_cmp.rs b/src/tools/clippy/clippy_lints/src/unit_types/unit_cmp.rs
new file mode 100644
index 000000000..1dd8895eb
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unit_types/unit_cmp.rs
@@ -0,0 +1,50 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::macros::{find_assert_eq_args, root_macro_call_first_node};
+use rustc_hir::{BinOpKind, Expr, ExprKind};
+use rustc_lint::LateContext;
+
+use super::UNIT_CMP;
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if expr.span.from_expansion() {
+ if let Some(macro_call) = root_macro_call_first_node(cx, expr) {
+ let macro_name = cx.tcx.item_name(macro_call.def_id);
+ let result = match macro_name.as_str() {
+ "assert_eq" | "debug_assert_eq" => "succeed",
+ "assert_ne" | "debug_assert_ne" => "fail",
+ _ => return,
+ };
+ let Some ((left, _, _)) = find_assert_eq_args(cx, expr, macro_call.expn) else { return };
+ if !cx.typeck_results().expr_ty(left).is_unit() {
+ return;
+ }
+ span_lint(
+ cx,
+ UNIT_CMP,
+ macro_call.span,
+ &format!("`{}` of unit values detected. This will always {}", macro_name, result),
+ );
+ }
+ return;
+ }
+
+ if let ExprKind::Binary(ref cmp, left, _) = expr.kind {
+ let op = cmp.node;
+ if op.is_comparison() && cx.typeck_results().expr_ty(left).is_unit() {
+ let result = match op {
+ BinOpKind::Eq | BinOpKind::Le | BinOpKind::Ge => "true",
+ _ => "false",
+ };
+ span_lint(
+ cx,
+ UNIT_CMP,
+ expr.span,
+ &format!(
+ "{}-comparison of unit values detected. This will always be {}",
+ op.as_str(),
+ result
+ ),
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/unit_types/utils.rs b/src/tools/clippy/clippy_lints/src/unit_types/utils.rs
new file mode 100644
index 000000000..9a3750b23
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unit_types/utils.rs
@@ -0,0 +1,5 @@
+use rustc_hir::{Expr, ExprKind};
+
+pub(super) fn is_unit_literal(expr: &Expr<'_>) -> bool {
+ matches!(expr.kind, ExprKind::Tup(slice) if slice.is_empty())
+}
diff --git a/src/tools/clippy/clippy_lints/src/unnamed_address.rs b/src/tools/clippy/clippy_lints/src/unnamed_address.rs
new file mode 100644
index 000000000..0bcafde65
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unnamed_address.rs
@@ -0,0 +1,132 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
+use clippy_utils::{match_def_path, paths};
+use if_chain::if_chain;
+use rustc_hir::{BinOpKind, Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for comparisons with an address of a function item.
+ ///
+ /// ### Why is this bad?
+ /// Function item address is not guaranteed to be unique and could vary
+ /// between different code generation units. Furthermore different function items could have
+ /// the same address after being merged together.
+ ///
+ /// ### Example
+ /// ```rust
+ /// type F = fn();
+ /// fn a() {}
+ /// let f: F = a;
+ /// if f == a {
+ /// // ...
+ /// }
+ /// ```
+ #[clippy::version = "1.44.0"]
+ pub FN_ADDRESS_COMPARISONS,
+ correctness,
+ "comparison with an address of a function item"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for comparisons with an address of a trait vtable.
+ ///
+ /// ### Why is this bad?
+ /// Comparing trait objects pointers compares an vtable addresses which
+ /// are not guaranteed to be unique and could vary between different code generation units.
+ /// Furthermore vtables for different types could have the same address after being merged
+ /// together.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// let a: Rc<dyn Trait> = ...
+ /// let b: Rc<dyn Trait> = ...
+ /// if Rc::ptr_eq(&a, &b) {
+ /// ...
+ /// }
+ /// ```
+ #[clippy::version = "1.44.0"]
+ pub VTABLE_ADDRESS_COMPARISONS,
+ correctness,
+ "comparison with an address of a trait vtable"
+}
+
+declare_lint_pass!(UnnamedAddress => [FN_ADDRESS_COMPARISONS, VTABLE_ADDRESS_COMPARISONS]);
+
+impl LateLintPass<'_> for UnnamedAddress {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
+ fn is_comparison(binop: BinOpKind) -> bool {
+ matches!(
+ binop,
+ BinOpKind::Eq | BinOpKind::Lt | BinOpKind::Le | BinOpKind::Ne | BinOpKind::Ge | BinOpKind::Gt
+ )
+ }
+
+ fn is_trait_ptr(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ match cx.typeck_results().expr_ty_adjusted(expr).kind() {
+ ty::RawPtr(ty::TypeAndMut { ty, .. }) => ty.is_trait(),
+ _ => false,
+ }
+ }
+
+ fn is_fn_def(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ matches!(cx.typeck_results().expr_ty(expr).kind(), ty::FnDef(..))
+ }
+
+ if_chain! {
+ if let ExprKind::Binary(binop, left, right) = expr.kind;
+ if is_comparison(binop.node);
+ if is_trait_ptr(cx, left) && is_trait_ptr(cx, right);
+ then {
+ span_lint_and_help(
+ cx,
+ VTABLE_ADDRESS_COMPARISONS,
+ expr.span,
+ "comparing trait object pointers compares a non-unique vtable address",
+ None,
+ "consider extracting and comparing data pointers only",
+ );
+ }
+ }
+
+ if_chain! {
+ if let ExprKind::Call(func, [ref _left, ref _right]) = expr.kind;
+ if let ExprKind::Path(ref func_qpath) = func.kind;
+ if let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id();
+ if match_def_path(cx, def_id, &paths::PTR_EQ) ||
+ match_def_path(cx, def_id, &paths::RC_PTR_EQ) ||
+ match_def_path(cx, def_id, &paths::ARC_PTR_EQ);
+ let ty_param = cx.typeck_results().node_substs(func.hir_id).type_at(0);
+ if ty_param.is_trait();
+ then {
+ span_lint_and_help(
+ cx,
+ VTABLE_ADDRESS_COMPARISONS,
+ expr.span,
+ "comparing trait object pointers compares a non-unique vtable address",
+ None,
+ "consider extracting and comparing data pointers only",
+ );
+ }
+ }
+
+ if_chain! {
+ if let ExprKind::Binary(binop, left, right) = expr.kind;
+ if is_comparison(binop.node);
+ if cx.typeck_results().expr_ty_adjusted(left).is_fn_ptr();
+ if cx.typeck_results().expr_ty_adjusted(right).is_fn_ptr();
+ if is_fn_def(cx, left) || is_fn_def(cx, right);
+ then {
+ span_lint(
+ cx,
+ FN_ADDRESS_COMPARISONS,
+ expr.span,
+ "comparing with a non-unique address of a function item",
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_owned_empty_strings.rs b/src/tools/clippy/clippy_lints/src/unnecessary_owned_empty_strings.rs
new file mode 100644
index 000000000..8a4f4c0ad
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unnecessary_owned_empty_strings.rs
@@ -0,0 +1,81 @@
+use clippy_utils::{diagnostics::span_lint_and_sugg, ty::is_type_diagnostic_item};
+use clippy_utils::{match_def_path, paths};
+use if_chain::if_chain;
+use rustc_ast::ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::{BorrowKind, Expr, ExprKind, Mutability};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ ///
+ /// Detects cases of owned empty strings being passed as an argument to a function expecting `&str`
+ ///
+ /// ### Why is this bad?
+ ///
+ /// This results in longer and less readable code
+ ///
+ /// ### Example
+ /// ```rust
+ /// vec!["1", "2", "3"].join(&String::new());
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// vec!["1", "2", "3"].join("");
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub UNNECESSARY_OWNED_EMPTY_STRINGS,
+ style,
+ "detects cases of references to owned empty strings being passed as an argument to a function expecting `&str`"
+}
+declare_lint_pass!(UnnecessaryOwnedEmptyStrings => [UNNECESSARY_OWNED_EMPTY_STRINGS]);
+
+impl<'tcx> LateLintPass<'tcx> for UnnecessaryOwnedEmptyStrings {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
+ if_chain! {
+ if let ExprKind::AddrOf(BorrowKind::Ref, Mutability::Not, inner_expr) = expr.kind;
+ if let ExprKind::Call(fun, args) = inner_expr.kind;
+ if let ExprKind::Path(ref qpath) = fun.kind;
+ if let Some(fun_def_id) = cx.qpath_res(qpath, fun.hir_id).opt_def_id();
+ if let ty::Ref(_, inner_str, _) = cx.typeck_results().expr_ty_adjusted(expr).kind();
+ if inner_str.is_str();
+ then {
+ if match_def_path(cx, fun_def_id, &paths::STRING_NEW) {
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_OWNED_EMPTY_STRINGS,
+ expr.span,
+ "usage of `&String::new()` for a function expecting a `&str` argument",
+ "try",
+ "\"\"".to_owned(),
+ Applicability::MachineApplicable,
+ );
+ } else {
+ if_chain! {
+ if match_def_path(cx, fun_def_id, &paths::FROM_FROM);
+ if let [.., last_arg] = args;
+ if let ExprKind::Lit(spanned) = &last_arg.kind;
+ if let LitKind::Str(symbol, _) = spanned.node;
+ if symbol.is_empty();
+ let inner_expr_type = cx.typeck_results().expr_ty(inner_expr);
+ if is_type_diagnostic_item(cx, inner_expr_type, sym::String);
+ then {
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_OWNED_EMPTY_STRINGS,
+ expr.span,
+ "usage of `&String::from(\"\")` for a function expecting a `&str` argument",
+ "try",
+ "\"\"".to_owned(),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_self_imports.rs b/src/tools/clippy/clippy_lints/src/unnecessary_self_imports.rs
new file mode 100644
index 000000000..839a4bdab
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unnecessary_self_imports.rs
@@ -0,0 +1,70 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use if_chain::if_chain;
+use rustc_ast::{Item, ItemKind, UseTreeKind};
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::kw;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for imports ending in `::{self}`.
+ ///
+ /// ### Why is this bad?
+ /// In most cases, this can be written much more cleanly by omitting `::{self}`.
+ ///
+ /// ### Known problems
+ /// Removing `::{self}` will cause any non-module items at the same path to also be imported.
+ /// This might cause a naming conflict (https://github.com/rust-lang/rustfmt/issues/3568). This lint makes no attempt
+ /// to detect this scenario and that is why it is a restriction lint.
+ ///
+ /// ### Example
+ /// ```rust
+ /// use std::io::{self};
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// use std::io;
+ /// ```
+ #[clippy::version = "1.53.0"]
+ pub UNNECESSARY_SELF_IMPORTS,
+ restriction,
+ "imports ending in `::{self}`, which can be omitted"
+}
+
+declare_lint_pass!(UnnecessarySelfImports => [UNNECESSARY_SELF_IMPORTS]);
+
+impl EarlyLintPass for UnnecessarySelfImports {
+ fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) {
+ if_chain! {
+ if let ItemKind::Use(use_tree) = &item.kind;
+ if let UseTreeKind::Nested(nodes) = &use_tree.kind;
+ if let [(self_tree, _)] = &**nodes;
+ if let [self_seg] = &*self_tree.prefix.segments;
+ if self_seg.ident.name == kw::SelfLower;
+ if let Some(last_segment) = use_tree.prefix.segments.last();
+
+ then {
+ span_lint_and_then(
+ cx,
+ UNNECESSARY_SELF_IMPORTS,
+ item.span,
+ "import ending with `::{self}`",
+ |diag| {
+ diag.span_suggestion(
+ last_segment.span().with_hi(item.span.hi()),
+ "consider omitting `::{self}`",
+ format!(
+ "{}{};",
+ last_segment.ident,
+ if let UseTreeKind::Simple(Some(alias), ..) = self_tree.kind { format!(" as {}", alias) } else { String::new() },
+ ),
+ Applicability::MaybeIncorrect,
+ );
+ diag.note("this will slightly change semantics; any non-module items at the same path will also be imported");
+ },
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_sort_by.rs b/src/tools/clippy/clippy_lints/src/unnecessary_sort_by.rs
new file mode 100644
index 000000000..ea5aadbbc
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unnecessary_sort_by.rs
@@ -0,0 +1,258 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::sugg::Sugg;
+use clippy_utils::ty::{implements_trait, is_type_diagnostic_item};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Closure, Expr, ExprKind, Mutability, Param, Pat, PatKind, Path, PathSegment, QPath};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::{self, subst::GenericArgKind};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+use rustc_span::symbol::Ident;
+use std::iter;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Detects uses of `Vec::sort_by` passing in a closure
+ /// which compares the two arguments, either directly or indirectly.
+ ///
+ /// ### Why is this bad?
+ /// It is more clear to use `Vec::sort_by_key` (or `Vec::sort` if
+ /// possible) than to use `Vec::sort_by` and a more complicated
+ /// closure.
+ ///
+ /// ### Known problems
+ /// If the suggested `Vec::sort_by_key` uses Reverse and it isn't already
+ /// imported by a use statement, then it will need to be added manually.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # struct A;
+ /// # impl A { fn foo(&self) {} }
+ /// # let mut vec: Vec<A> = Vec::new();
+ /// vec.sort_by(|a, b| a.foo().cmp(&b.foo()));
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # struct A;
+ /// # impl A { fn foo(&self) {} }
+ /// # let mut vec: Vec<A> = Vec::new();
+ /// vec.sort_by_key(|a| a.foo());
+ /// ```
+ #[clippy::version = "1.46.0"]
+ pub UNNECESSARY_SORT_BY,
+ complexity,
+ "Use of `Vec::sort_by` when `Vec::sort_by_key` or `Vec::sort` would be clearer"
+}
+
+declare_lint_pass!(UnnecessarySortBy => [UNNECESSARY_SORT_BY]);
+
+enum LintTrigger {
+ Sort(SortDetection),
+ SortByKey(SortByKeyDetection),
+}
+
+struct SortDetection {
+ vec_name: String,
+ unstable: bool,
+}
+
+struct SortByKeyDetection {
+ vec_name: String,
+ closure_arg: String,
+ closure_body: String,
+ reverse: bool,
+ unstable: bool,
+}
+
+/// Detect if the two expressions are mirrored (identical, except one
+/// contains a and the other replaces it with b)
+fn mirrored_exprs(a_expr: &Expr<'_>, a_ident: &Ident, b_expr: &Expr<'_>, b_ident: &Ident) -> bool {
+ match (&a_expr.kind, &b_expr.kind) {
+ // Two boxes with mirrored contents
+ (ExprKind::Box(left_expr), ExprKind::Box(right_expr)) => {
+ mirrored_exprs(left_expr, a_ident, right_expr, b_ident)
+ },
+ // Two arrays with mirrored contents
+ (ExprKind::Array(left_exprs), ExprKind::Array(right_exprs)) => {
+ iter::zip(*left_exprs, *right_exprs).all(|(left, right)| mirrored_exprs(left, a_ident, right, b_ident))
+ },
+ // The two exprs are function calls.
+ // Check to see that the function itself and its arguments are mirrored
+ (ExprKind::Call(left_expr, left_args), ExprKind::Call(right_expr, right_args)) => {
+ mirrored_exprs(left_expr, a_ident, right_expr, b_ident)
+ && iter::zip(*left_args, *right_args).all(|(left, right)| mirrored_exprs(left, a_ident, right, b_ident))
+ },
+ // The two exprs are method calls.
+ // Check to see that the function is the same and the arguments are mirrored
+ // This is enough because the receiver of the method is listed in the arguments
+ (ExprKind::MethodCall(left_segment, left_args, _), ExprKind::MethodCall(right_segment, right_args, _)) => {
+ left_segment.ident == right_segment.ident
+ && iter::zip(*left_args, *right_args).all(|(left, right)| mirrored_exprs(left, a_ident, right, b_ident))
+ },
+ // Two tuples with mirrored contents
+ (ExprKind::Tup(left_exprs), ExprKind::Tup(right_exprs)) => {
+ iter::zip(*left_exprs, *right_exprs).all(|(left, right)| mirrored_exprs(left, a_ident, right, b_ident))
+ },
+ // Two binary ops, which are the same operation and which have mirrored arguments
+ (ExprKind::Binary(left_op, left_left, left_right), ExprKind::Binary(right_op, right_left, right_right)) => {
+ left_op.node == right_op.node
+ && mirrored_exprs(left_left, a_ident, right_left, b_ident)
+ && mirrored_exprs(left_right, a_ident, right_right, b_ident)
+ },
+ // Two unary ops, which are the same operation and which have the same argument
+ (ExprKind::Unary(left_op, left_expr), ExprKind::Unary(right_op, right_expr)) => {
+ left_op == right_op && mirrored_exprs(left_expr, a_ident, right_expr, b_ident)
+ },
+ // The two exprs are literals of some kind
+ (ExprKind::Lit(left_lit), ExprKind::Lit(right_lit)) => left_lit.node == right_lit.node,
+ (ExprKind::Cast(left, _), ExprKind::Cast(right, _)) => mirrored_exprs(left, a_ident, right, b_ident),
+ (ExprKind::DropTemps(left_block), ExprKind::DropTemps(right_block)) => {
+ mirrored_exprs(left_block, a_ident, right_block, b_ident)
+ },
+ (ExprKind::Field(left_expr, left_ident), ExprKind::Field(right_expr, right_ident)) => {
+ left_ident.name == right_ident.name && mirrored_exprs(left_expr, a_ident, right_expr, right_ident)
+ },
+ // Two paths: either one is a and the other is b, or they're identical to each other
+ (
+ ExprKind::Path(QPath::Resolved(
+ _,
+ Path {
+ segments: left_segments,
+ ..
+ },
+ )),
+ ExprKind::Path(QPath::Resolved(
+ _,
+ Path {
+ segments: right_segments,
+ ..
+ },
+ )),
+ ) => {
+ (iter::zip(*left_segments, *right_segments).all(|(left, right)| left.ident == right.ident)
+ && left_segments
+ .iter()
+ .all(|seg| &seg.ident != a_ident && &seg.ident != b_ident))
+ || (left_segments.len() == 1
+ && &left_segments[0].ident == a_ident
+ && right_segments.len() == 1
+ && &right_segments[0].ident == b_ident)
+ },
+ // Matching expressions, but one or both is borrowed
+ (
+ ExprKind::AddrOf(left_kind, Mutability::Not, left_expr),
+ ExprKind::AddrOf(right_kind, Mutability::Not, right_expr),
+ ) => left_kind == right_kind && mirrored_exprs(left_expr, a_ident, right_expr, b_ident),
+ (_, ExprKind::AddrOf(_, Mutability::Not, right_expr)) => mirrored_exprs(a_expr, a_ident, right_expr, b_ident),
+ (ExprKind::AddrOf(_, Mutability::Not, left_expr), _) => mirrored_exprs(left_expr, a_ident, b_expr, b_ident),
+ _ => false,
+ }
+}
+
+fn detect_lint(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<LintTrigger> {
+ if_chain! {
+ if let ExprKind::MethodCall(name_ident, args, _) = &expr.kind;
+ if let name = name_ident.ident.name.to_ident_string();
+ if name == "sort_by" || name == "sort_unstable_by";
+ if let [vec, Expr { kind: ExprKind::Closure(Closure { body: closure_body_id, .. }), .. }] = args;
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(vec), sym::Vec);
+ if let closure_body = cx.tcx.hir().body(*closure_body_id);
+ if let &[
+ Param { pat: Pat { kind: PatKind::Binding(_, _, left_ident, _), .. }, ..},
+ Param { pat: Pat { kind: PatKind::Binding(_, _, right_ident, _), .. }, .. }
+ ] = &closure_body.params;
+ if let ExprKind::MethodCall(method_path, [ref left_expr, ref right_expr], _) = &closure_body.value.kind;
+ if method_path.ident.name == sym::cmp;
+ then {
+ let (closure_body, closure_arg, reverse) = if mirrored_exprs(
+ left_expr,
+ left_ident,
+ right_expr,
+ right_ident
+ ) {
+ (Sugg::hir(cx, left_expr, "..").to_string(), left_ident.name.to_string(), false)
+ } else if mirrored_exprs(left_expr, right_ident, right_expr, left_ident) {
+ (Sugg::hir(cx, left_expr, "..").to_string(), right_ident.name.to_string(), true)
+ } else {
+ return None;
+ };
+ let vec_name = Sugg::hir(cx, &args[0], "..").to_string();
+ let unstable = name == "sort_unstable_by";
+
+ if_chain! {
+ if let ExprKind::Path(QPath::Resolved(_, Path {
+ segments: [PathSegment { ident: left_name, .. }], ..
+ })) = &left_expr.kind;
+ if left_name == left_ident;
+ if cx.tcx.get_diagnostic_item(sym::Ord).map_or(false, |id| {
+ implements_trait(cx, cx.typeck_results().expr_ty(left_expr), id, &[])
+ });
+ then {
+ return Some(LintTrigger::Sort(SortDetection { vec_name, unstable }));
+ }
+ }
+
+ if !expr_borrows(cx, left_expr) {
+ return Some(LintTrigger::SortByKey(SortByKeyDetection {
+ vec_name,
+ closure_arg,
+ closure_body,
+ reverse,
+ unstable,
+ }));
+ }
+ }
+ }
+
+ None
+}
+
+fn expr_borrows(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ let ty = cx.typeck_results().expr_ty(expr);
+ matches!(ty.kind(), ty::Ref(..)) || ty.walk().any(|arg| matches!(arg.unpack(), GenericArgKind::Lifetime(_)))
+}
+
+impl LateLintPass<'_> for UnnecessarySortBy {
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
+ match detect_lint(cx, expr) {
+ Some(LintTrigger::SortByKey(trigger)) => span_lint_and_sugg(
+ cx,
+ UNNECESSARY_SORT_BY,
+ expr.span,
+ "use Vec::sort_by_key here instead",
+ "try",
+ format!(
+ "{}.sort{}_by_key(|{}| {})",
+ trigger.vec_name,
+ if trigger.unstable { "_unstable" } else { "" },
+ trigger.closure_arg,
+ if trigger.reverse {
+ format!("std::cmp::Reverse({})", trigger.closure_body)
+ } else {
+ trigger.closure_body.to_string()
+ },
+ ),
+ if trigger.reverse {
+ Applicability::MaybeIncorrect
+ } else {
+ Applicability::MachineApplicable
+ },
+ ),
+ Some(LintTrigger::Sort(trigger)) => span_lint_and_sugg(
+ cx,
+ UNNECESSARY_SORT_BY,
+ expr.span,
+ "use Vec::sort here instead",
+ "try",
+ format!(
+ "{}.sort{}()",
+ trigger.vec_name,
+ if trigger.unstable { "_unstable" } else { "" },
+ ),
+ Applicability::MachineApplicable,
+ ),
+ None => {},
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs b/src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs
new file mode 100644
index 000000000..f4f5a4336
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs
@@ -0,0 +1,177 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet;
+use clippy_utils::{contains_return, is_lang_ctor, return_ty, visitors::find_all_ret_expressions};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::intravisit::FnKind;
+use rustc_hir::LangItem::{OptionSome, ResultOk};
+use rustc_hir::{Body, ExprKind, FnDecl, HirId, Impl, ItemKind, Node};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::symbol::sym;
+use rustc_span::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for private functions that only return `Ok` or `Some`.
+ ///
+ /// ### Why is this bad?
+ /// It is not meaningful to wrap values when no `None` or `Err` is returned.
+ ///
+ /// ### Known problems
+ /// There can be false positives if the function signature is designed to
+ /// fit some external requirement.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn get_cool_number(a: bool, b: bool) -> Option<i32> {
+ /// if a && b {
+ /// return Some(50);
+ /// }
+ /// if a {
+ /// Some(0)
+ /// } else {
+ /// Some(10)
+ /// }
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// fn get_cool_number(a: bool, b: bool) -> i32 {
+ /// if a && b {
+ /// return 50;
+ /// }
+ /// if a {
+ /// 0
+ /// } else {
+ /// 10
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "1.50.0"]
+ pub UNNECESSARY_WRAPS,
+ pedantic,
+ "functions that only return `Ok` or `Some`"
+}
+
+pub struct UnnecessaryWraps {
+ avoid_breaking_exported_api: bool,
+}
+
+impl_lint_pass!(UnnecessaryWraps => [UNNECESSARY_WRAPS]);
+
+impl UnnecessaryWraps {
+ pub fn new(avoid_breaking_exported_api: bool) -> Self {
+ Self {
+ avoid_breaking_exported_api,
+ }
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for UnnecessaryWraps {
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ fn_kind: FnKind<'tcx>,
+ fn_decl: &FnDecl<'tcx>,
+ body: &Body<'tcx>,
+ span: Span,
+ hir_id: HirId,
+ ) {
+ // Abort if public function/method or closure.
+ match fn_kind {
+ FnKind::ItemFn(..) | FnKind::Method(..) => {
+ let def_id = cx.tcx.hir().local_def_id(hir_id);
+ if self.avoid_breaking_exported_api && cx.access_levels.is_exported(def_id) {
+ return;
+ }
+ },
+ FnKind::Closure => return,
+ }
+
+ // Abort if the method is implementing a trait or of it a trait method.
+ if let Some(Node::Item(item)) = cx.tcx.hir().find(cx.tcx.hir().get_parent_node(hir_id)) {
+ if matches!(
+ item.kind,
+ ItemKind::Impl(Impl { of_trait: Some(_), .. }) | ItemKind::Trait(..)
+ ) {
+ return;
+ }
+ }
+
+ // Get the wrapper and inner types, if can't, abort.
+ let (return_type_label, lang_item, inner_type) = if let ty::Adt(adt_def, subst) = return_ty(cx, hir_id).kind() {
+ if cx.tcx.is_diagnostic_item(sym::Option, adt_def.did()) {
+ ("Option", OptionSome, subst.type_at(0))
+ } else if cx.tcx.is_diagnostic_item(sym::Result, adt_def.did()) {
+ ("Result", ResultOk, subst.type_at(0))
+ } else {
+ return;
+ }
+ } else {
+ return;
+ };
+
+ // Check if all return expression respect the following condition and collect them.
+ let mut suggs = Vec::new();
+ let can_sugg = find_all_ret_expressions(cx, &body.value, |ret_expr| {
+ if_chain! {
+ if !ret_expr.span.from_expansion();
+ // Check if a function call.
+ if let ExprKind::Call(func, [arg]) = ret_expr.kind;
+ // Check if OPTION_SOME or RESULT_OK, depending on return type.
+ if let ExprKind::Path(qpath) = &func.kind;
+ if is_lang_ctor(cx, qpath, lang_item);
+ // Make sure the function argument does not contain a return expression.
+ if !contains_return(arg);
+ then {
+ suggs.push(
+ (
+ ret_expr.span,
+ if inner_type.is_unit() {
+ "".to_string()
+ } else {
+ snippet(cx, arg.span.source_callsite(), "..").to_string()
+ }
+ )
+ );
+ true
+ } else {
+ false
+ }
+ }
+ });
+
+ if can_sugg && !suggs.is_empty() {
+ let (lint_msg, return_type_sugg_msg, return_type_sugg, body_sugg_msg) = if inner_type.is_unit() {
+ (
+ "this function's return value is unnecessary".to_string(),
+ "remove the return type...".to_string(),
+ snippet(cx, fn_decl.output.span(), "..").to_string(),
+ "...and then remove returned values",
+ )
+ } else {
+ (
+ format!(
+ "this function's return value is unnecessarily wrapped by `{}`",
+ return_type_label
+ ),
+ format!("remove `{}` from the return type...", return_type_label),
+ inner_type.to_string(),
+ "...and then change returning expressions",
+ )
+ };
+
+ span_lint_and_then(cx, UNNECESSARY_WRAPS, span, lint_msg.as_str(), |diag| {
+ diag.span_suggestion(
+ fn_decl.output.span(),
+ return_type_sugg_msg.as_str(),
+ return_type_sugg,
+ Applicability::MaybeIncorrect,
+ );
+ diag.multipart_suggestion(body_sugg_msg, suggs, Applicability::MaybeIncorrect);
+ });
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs b/src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs
new file mode 100644
index 000000000..04e2f301b
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs
@@ -0,0 +1,428 @@
+#![allow(clippy::wildcard_imports, clippy::enum_glob_use)]
+
+use clippy_utils::ast_utils::{eq_field_pat, eq_id, eq_maybe_qself, eq_pat, eq_path};
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::{meets_msrv, msrvs, over};
+use rustc_ast::mut_visit::*;
+use rustc_ast::ptr::P;
+use rustc_ast::{self as ast, Mutability, Pat, PatKind, PatKind::*, DUMMY_NODE_ID};
+use rustc_ast_pretty::pprust;
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::DUMMY_SP;
+
+use std::cell::Cell;
+use std::mem;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for unnested or-patterns, e.g., `Some(0) | Some(2)` and
+ /// suggests replacing the pattern with a nested one, `Some(0 | 2)`.
+ ///
+ /// Another way to think of this is that it rewrites patterns in
+ /// *disjunctive normal form (DNF)* into *conjunctive normal form (CNF)*.
+ ///
+ /// ### Why is this bad?
+ /// In the example above, `Some` is repeated, which unnecessarily complicates the pattern.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn main() {
+ /// if let Some(0) | Some(2) = Some(0) {}
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// fn main() {
+ /// if let Some(0 | 2) = Some(0) {}
+ /// }
+ /// ```
+ #[clippy::version = "1.46.0"]
+ pub UNNESTED_OR_PATTERNS,
+ pedantic,
+ "unnested or-patterns, e.g., `Foo(Bar) | Foo(Baz) instead of `Foo(Bar | Baz)`"
+}
+
+#[derive(Clone, Copy)]
+pub struct UnnestedOrPatterns {
+ msrv: Option<RustcVersion>,
+}
+
+impl UnnestedOrPatterns {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self { msrv }
+ }
+}
+
+impl_lint_pass!(UnnestedOrPatterns => [UNNESTED_OR_PATTERNS]);
+
+impl EarlyLintPass for UnnestedOrPatterns {
+ fn check_arm(&mut self, cx: &EarlyContext<'_>, a: &ast::Arm) {
+ if meets_msrv(self.msrv, msrvs::OR_PATTERNS) {
+ lint_unnested_or_patterns(cx, &a.pat);
+ }
+ }
+
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &ast::Expr) {
+ if meets_msrv(self.msrv, msrvs::OR_PATTERNS) {
+ if let ast::ExprKind::Let(pat, _, _) = &e.kind {
+ lint_unnested_or_patterns(cx, pat);
+ }
+ }
+ }
+
+ fn check_param(&mut self, cx: &EarlyContext<'_>, p: &ast::Param) {
+ if meets_msrv(self.msrv, msrvs::OR_PATTERNS) {
+ lint_unnested_or_patterns(cx, &p.pat);
+ }
+ }
+
+ fn check_local(&mut self, cx: &EarlyContext<'_>, l: &ast::Local) {
+ if meets_msrv(self.msrv, msrvs::OR_PATTERNS) {
+ lint_unnested_or_patterns(cx, &l.pat);
+ }
+ }
+
+ extract_msrv_attr!(EarlyContext);
+}
+
+fn lint_unnested_or_patterns(cx: &EarlyContext<'_>, pat: &Pat) {
+ if let Ident(.., None) | Lit(_) | Wild | Path(..) | Range(..) | Rest | MacCall(_) = pat.kind {
+ // This is a leaf pattern, so cloning is unprofitable.
+ return;
+ }
+
+ let mut pat = P(pat.clone());
+
+ // Nix all the paren patterns everywhere so that they aren't in our way.
+ remove_all_parens(&mut pat);
+
+ // Transform all unnested or-patterns into nested ones, and if there were none, quit.
+ if !unnest_or_patterns(&mut pat) {
+ return;
+ }
+
+ span_lint_and_then(cx, UNNESTED_OR_PATTERNS, pat.span, "unnested or-patterns", |db| {
+ insert_necessary_parens(&mut pat);
+ db.span_suggestion_verbose(
+ pat.span,
+ "nest the patterns",
+ pprust::pat_to_string(&pat),
+ Applicability::MachineApplicable,
+ );
+ });
+}
+
+/// Remove all `(p)` patterns in `pat`.
+fn remove_all_parens(pat: &mut P<Pat>) {
+ struct Visitor;
+ impl MutVisitor for Visitor {
+ fn visit_pat(&mut self, pat: &mut P<Pat>) {
+ noop_visit_pat(pat, self);
+ let inner = match &mut pat.kind {
+ Paren(i) => mem::replace(&mut i.kind, Wild),
+ _ => return,
+ };
+ pat.kind = inner;
+ }
+ }
+ Visitor.visit_pat(pat);
+}
+
+/// Insert parens where necessary according to Rust's precedence rules for patterns.
+fn insert_necessary_parens(pat: &mut P<Pat>) {
+ struct Visitor;
+ impl MutVisitor for Visitor {
+ fn visit_pat(&mut self, pat: &mut P<Pat>) {
+ use ast::{BindingMode::*, Mutability::*};
+ noop_visit_pat(pat, self);
+ let target = match &mut pat.kind {
+ // `i @ a | b`, `box a | b`, and `& mut? a | b`.
+ Ident(.., Some(p)) | Box(p) | Ref(p, _) if matches!(&p.kind, Or(ps) if ps.len() > 1) => p,
+ Ref(p, Not) if matches!(p.kind, Ident(ByValue(Mut), ..)) => p, // `&(mut x)`
+ _ => return,
+ };
+ target.kind = Paren(P(take_pat(target)));
+ }
+ }
+ Visitor.visit_pat(pat);
+}
+
+/// Unnest or-patterns `p0 | ... | p1` in the pattern `pat`.
+/// For example, this would transform `Some(0) | FOO | Some(2)` into `Some(0 | 2) | FOO`.
+fn unnest_or_patterns(pat: &mut P<Pat>) -> bool {
+ struct Visitor {
+ changed: bool,
+ }
+ impl MutVisitor for Visitor {
+ fn visit_pat(&mut self, p: &mut P<Pat>) {
+ // This is a bottom up transformation, so recurse first.
+ noop_visit_pat(p, self);
+
+ // Don't have an or-pattern? Just quit early on.
+ let alternatives = match &mut p.kind {
+ Or(ps) => ps,
+ _ => return,
+ };
+
+ // Collapse or-patterns directly nested in or-patterns.
+ let mut idx = 0;
+ let mut this_level_changed = false;
+ while idx < alternatives.len() {
+ let inner = if let Or(ps) = &mut alternatives[idx].kind {
+ mem::take(ps)
+ } else {
+ idx += 1;
+ continue;
+ };
+ this_level_changed = true;
+ alternatives.splice(idx..=idx, inner);
+ }
+
+ // Focus on `p_n` and then try to transform all `p_i` where `i > n`.
+ let mut focus_idx = 0;
+ while focus_idx < alternatives.len() {
+ this_level_changed |= transform_with_focus_on_idx(alternatives, focus_idx);
+ focus_idx += 1;
+ }
+ self.changed |= this_level_changed;
+
+ // Deal with `Some(Some(0)) | Some(Some(1))`.
+ if this_level_changed {
+ noop_visit_pat(p, self);
+ }
+ }
+ }
+
+ let mut visitor = Visitor { changed: false };
+ visitor.visit_pat(pat);
+ visitor.changed
+}
+
+/// Match `$scrutinee` against `$pat` and extract `$then` from it.
+/// Panics if there is no match.
+macro_rules! always_pat {
+ ($scrutinee:expr, $pat:pat => $then:expr) => {
+ match $scrutinee {
+ $pat => $then,
+ _ => unreachable!(),
+ }
+ };
+}
+
+/// Focus on `focus_idx` in `alternatives`,
+/// attempting to extend it with elements of the same constructor `C`
+/// in `alternatives[focus_idx + 1..]`.
+fn transform_with_focus_on_idx(alternatives: &mut Vec<P<Pat>>, focus_idx: usize) -> bool {
+ // Extract the kind; we'll need to make some changes in it.
+ let mut focus_kind = mem::replace(&mut alternatives[focus_idx].kind, PatKind::Wild);
+ // We'll focus on `alternatives[focus_idx]`,
+ // so we're draining from `alternatives[focus_idx + 1..]`.
+ let start = focus_idx + 1;
+
+ // We're trying to find whatever kind (~"constructor") we found in `alternatives[start..]`.
+ let changed = match &mut focus_kind {
+ // These pattern forms are "leafs" and do not have sub-patterns.
+ // Therefore they are not some form of constructor `C`,
+ // with which a pattern `C(p_0)` may be formed,
+ // which we would want to join with other `C(p_j)`s.
+ Ident(.., None) | Lit(_) | Wild | Path(..) | Range(..) | Rest | MacCall(_)
+ // Skip immutable refs, as grouping them saves few characters,
+ // and almost always requires adding parens (increasing noisiness).
+ // In the case of only two patterns, replacement adds net characters.
+ | Ref(_, Mutability::Not)
+ // Dealt with elsewhere.
+ | Or(_) | Paren(_) => false,
+ // Transform `box x | ... | box y` into `box (x | y)`.
+ //
+ // The cases below until `Slice(...)` deal with *singleton* products.
+ // These patterns have the shape `C(p)`, and not e.g., `C(p0, ..., pn)`.
+ Box(target) => extend_with_matching(
+ target, start, alternatives,
+ |k| matches!(k, Box(_)),
+ |k| always_pat!(k, Box(p) => p),
+ ),
+ // Transform `&mut x | ... | &mut y` into `&mut (x | y)`.
+ Ref(target, Mutability::Mut) => extend_with_matching(
+ target, start, alternatives,
+ |k| matches!(k, Ref(_, Mutability::Mut)),
+ |k| always_pat!(k, Ref(p, _) => p),
+ ),
+ // Transform `b @ p0 | ... b @ p1` into `b @ (p0 | p1)`.
+ Ident(b1, i1, Some(target)) => extend_with_matching(
+ target, start, alternatives,
+ // Binding names must match.
+ |k| matches!(k, Ident(b2, i2, Some(_)) if b1 == b2 && eq_id(*i1, *i2)),
+ |k| always_pat!(k, Ident(_, _, Some(p)) => p),
+ ),
+ // Transform `[pre, x, post] | ... | [pre, y, post]` into `[pre, x | y, post]`.
+ Slice(ps1) => extend_with_matching_product(
+ ps1, start, alternatives,
+ |k, ps1, idx| matches!(k, Slice(ps2) if eq_pre_post(ps1, ps2, idx)),
+ |k| always_pat!(k, Slice(ps) => ps),
+ ),
+ // Transform `(pre, x, post) | ... | (pre, y, post)` into `(pre, x | y, post)`.
+ Tuple(ps1) => extend_with_matching_product(
+ ps1, start, alternatives,
+ |k, ps1, idx| matches!(k, Tuple(ps2) if eq_pre_post(ps1, ps2, idx)),
+ |k| always_pat!(k, Tuple(ps) => ps),
+ ),
+ // Transform `S(pre, x, post) | ... | S(pre, y, post)` into `S(pre, x | y, post)`.
+ TupleStruct(qself1, path1, ps1) => extend_with_matching_product(
+ ps1, start, alternatives,
+ |k, ps1, idx| matches!(
+ k,
+ TupleStruct(qself2, path2, ps2)
+ if eq_maybe_qself(qself1, qself2) && eq_path(path1, path2) && eq_pre_post(ps1, ps2, idx)
+ ),
+ |k| always_pat!(k, TupleStruct(_, _, ps) => ps),
+ ),
+ // Transform a record pattern `S { fp_0, ..., fp_n }`.
+ Struct(qself1, path1, fps1, rest1) => extend_with_struct_pat(qself1, path1, fps1, *rest1, start, alternatives),
+ };
+
+ alternatives[focus_idx].kind = focus_kind;
+ changed
+}
+
+/// Here we focusing on a record pattern `S { fp_0, ..., fp_n }`.
+/// In particular, for a record pattern, the order in which the field patterns is irrelevant.
+/// So when we fixate on some `ident_k: pat_k`, we try to find `ident_k` in the other pattern
+/// and check that all `fp_i` where `i ∈ ((0...n) \ k)` between two patterns are equal.
+fn extend_with_struct_pat(
+ qself1: &Option<ast::QSelf>,
+ path1: &ast::Path,
+ fps1: &mut [ast::PatField],
+ rest1: bool,
+ start: usize,
+ alternatives: &mut Vec<P<Pat>>,
+) -> bool {
+ (0..fps1.len()).any(|idx| {
+ let pos_in_2 = Cell::new(None); // The element `k`.
+ let tail_or = drain_matching(
+ start,
+ alternatives,
+ |k| {
+ matches!(k, Struct(qself2, path2, fps2, rest2)
+ if rest1 == *rest2 // If one struct pattern has `..` so must the other.
+ && eq_maybe_qself(qself1, qself2)
+ && eq_path(path1, path2)
+ && fps1.len() == fps2.len()
+ && fps1.iter().enumerate().all(|(idx_1, fp1)| {
+ if idx_1 == idx {
+ // In the case of `k`, we merely require identical field names
+ // so that we will transform into `ident_k: p1_k | p2_k`.
+ let pos = fps2.iter().position(|fp2| eq_id(fp1.ident, fp2.ident));
+ pos_in_2.set(pos);
+ pos.is_some()
+ } else {
+ fps2.iter().any(|fp2| eq_field_pat(fp1, fp2))
+ }
+ }))
+ },
+ // Extract `p2_k`.
+ |k| always_pat!(k, Struct(_, _, mut fps, _) => fps.swap_remove(pos_in_2.take().unwrap()).pat),
+ );
+ extend_with_tail_or(&mut fps1[idx].pat, tail_or)
+ })
+}
+
+/// Like `extend_with_matching` but for products with > 1 factor, e.g., `C(p_0, ..., p_n)`.
+/// Here, the idea is that we fixate on some `p_k` in `C`,
+/// allowing it to vary between two `targets` and `ps2` (returned by `extract`),
+/// while also requiring `ps1[..n] ~ ps2[..n]` (pre) and `ps1[n + 1..] ~ ps2[n + 1..]` (post),
+/// where `~` denotes semantic equality.
+fn extend_with_matching_product(
+ targets: &mut [P<Pat>],
+ start: usize,
+ alternatives: &mut Vec<P<Pat>>,
+ predicate: impl Fn(&PatKind, &[P<Pat>], usize) -> bool,
+ extract: impl Fn(PatKind) -> Vec<P<Pat>>,
+) -> bool {
+ (0..targets.len()).any(|idx| {
+ let tail_or = drain_matching(
+ start,
+ alternatives,
+ |k| predicate(k, targets, idx),
+ |k| extract(k).swap_remove(idx),
+ );
+ extend_with_tail_or(&mut targets[idx], tail_or)
+ })
+}
+
+/// Extract the pattern from the given one and replace it with `Wild`.
+/// This is meant for temporarily swapping out the pattern for manipulation.
+fn take_pat(from: &mut Pat) -> Pat {
+ let dummy = Pat {
+ id: DUMMY_NODE_ID,
+ kind: Wild,
+ span: DUMMY_SP,
+ tokens: None,
+ };
+ mem::replace(from, dummy)
+}
+
+/// Extend `target` as an or-pattern with the alternatives
+/// in `tail_or` if there are any and return if there were.
+fn extend_with_tail_or(target: &mut Pat, tail_or: Vec<P<Pat>>) -> bool {
+ fn extend(target: &mut Pat, mut tail_or: Vec<P<Pat>>) {
+ match target {
+ // On an existing or-pattern in the target, append to it.
+ Pat { kind: Or(ps), .. } => ps.append(&mut tail_or),
+ // Otherwise convert the target to an or-pattern.
+ target => {
+ let mut init_or = vec![P(take_pat(target))];
+ init_or.append(&mut tail_or);
+ target.kind = Or(init_or);
+ },
+ }
+ }
+
+ let changed = !tail_or.is_empty();
+ if changed {
+ // Extend the target.
+ extend(target, tail_or);
+ }
+ changed
+}
+
+// Extract all inner patterns in `alternatives` matching our `predicate`.
+// Only elements beginning with `start` are considered for extraction.
+fn drain_matching(
+ start: usize,
+ alternatives: &mut Vec<P<Pat>>,
+ predicate: impl Fn(&PatKind) -> bool,
+ extract: impl Fn(PatKind) -> P<Pat>,
+) -> Vec<P<Pat>> {
+ let mut tail_or = vec![];
+ let mut idx = 0;
+ for pat in alternatives.drain_filter(|p| {
+ // Check if we should extract, but only if `idx >= start`.
+ idx += 1;
+ idx > start && predicate(&p.kind)
+ }) {
+ tail_or.push(extract(pat.into_inner().kind));
+ }
+ tail_or
+}
+
+fn extend_with_matching(
+ target: &mut Pat,
+ start: usize,
+ alternatives: &mut Vec<P<Pat>>,
+ predicate: impl Fn(&PatKind) -> bool,
+ extract: impl Fn(PatKind) -> P<Pat>,
+) -> bool {
+ extend_with_tail_or(target, drain_matching(start, alternatives, predicate, extract))
+}
+
+/// Are the patterns in `ps1` and `ps2` equal save for `ps1[idx]` compared to `ps2[idx]`?
+fn eq_pre_post(ps1: &[P<Pat>], ps2: &[P<Pat>], idx: usize) -> bool {
+ ps1.len() == ps2.len()
+ && ps1[idx].is_rest() == ps2[idx].is_rest() // Avoid `[x, ..] | [x, 0]` => `[x, .. | 0]`.
+ && over(&ps1[..idx], &ps2[..idx], |l, r| eq_pat(l, r))
+ && over(&ps1[idx + 1..], &ps2[idx + 1..], |l, r| eq_pat(l, r))
+}
diff --git a/src/tools/clippy/clippy_lints/src/unsafe_removed_from_name.rs b/src/tools/clippy/clippy_lints/src/unsafe_removed_from_name.rs
new file mode 100644
index 000000000..64f7a055c
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unsafe_removed_from_name.rs
@@ -0,0 +1,79 @@
+use clippy_utils::diagnostics::span_lint;
+use rustc_ast::ast::{Item, ItemKind, UseTree, UseTreeKind};
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Span;
+use rustc_span::symbol::Ident;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for imports that remove "unsafe" from an item's
+ /// name.
+ ///
+ /// ### Why is this bad?
+ /// Renaming makes it less clear which traits and
+ /// structures are unsafe.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// use std::cell::{UnsafeCell as TotallySafeCell};
+ ///
+ /// extern crate crossbeam;
+ /// use crossbeam::{spawn_unsafe as spawn};
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub UNSAFE_REMOVED_FROM_NAME,
+ style,
+ "`unsafe` removed from API names on import"
+}
+
+declare_lint_pass!(UnsafeNameRemoval => [UNSAFE_REMOVED_FROM_NAME]);
+
+impl EarlyLintPass for UnsafeNameRemoval {
+ fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) {
+ if let ItemKind::Use(ref use_tree) = item.kind {
+ check_use_tree(use_tree, cx, item.span);
+ }
+ }
+}
+
+fn check_use_tree(use_tree: &UseTree, cx: &EarlyContext<'_>, span: Span) {
+ match use_tree.kind {
+ UseTreeKind::Simple(Some(new_name), ..) => {
+ let old_name = use_tree
+ .prefix
+ .segments
+ .last()
+ .expect("use paths cannot be empty")
+ .ident;
+ unsafe_to_safe_check(old_name, new_name, cx, span);
+ },
+ UseTreeKind::Simple(None, ..) | UseTreeKind::Glob => {},
+ UseTreeKind::Nested(ref nested_use_tree) => {
+ for &(ref use_tree, _) in nested_use_tree {
+ check_use_tree(use_tree, cx, span);
+ }
+ },
+ }
+}
+
+fn unsafe_to_safe_check(old_name: Ident, new_name: Ident, cx: &EarlyContext<'_>, span: Span) {
+ let old_str = old_name.name.as_str();
+ let new_str = new_name.name.as_str();
+ if contains_unsafe(old_str) && !contains_unsafe(new_str) {
+ span_lint(
+ cx,
+ UNSAFE_REMOVED_FROM_NAME,
+ span,
+ &format!(
+ "removed `unsafe` from the name of `{}` in use as `{}`",
+ old_str, new_str
+ ),
+ );
+ }
+}
+
+#[must_use]
+fn contains_unsafe(name: &str) -> bool {
+ name.contains("Unsafe") || name.contains("unsafe")
+}
diff --git a/src/tools/clippy/clippy_lints/src/unused_async.rs b/src/tools/clippy/clippy_lints/src/unused_async.rs
new file mode 100644
index 000000000..a832dfccc
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unused_async.rs
@@ -0,0 +1,86 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_hir::intravisit::{walk_expr, walk_fn, FnKind, Visitor};
+use rustc_hir::{Body, Expr, ExprKind, FnDecl, HirId, IsAsync, YieldSource};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::hir::nested_filter;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for functions that are declared `async` but have no `.await`s inside of them.
+ ///
+ /// ### Why is this bad?
+ /// Async functions with no async code create overhead, both mentally and computationally.
+ /// Callers of async methods either need to be calling from an async function themselves or run it on an executor, both of which
+ /// causes runtime overhead and hassle for the caller.
+ ///
+ /// ### Example
+ /// ```rust
+ /// async fn get_random_number() -> i64 {
+ /// 4 // Chosen by fair dice roll. Guaranteed to be random.
+ /// }
+ /// let number_future = get_random_number();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// fn get_random_number_improved() -> i64 {
+ /// 4 // Chosen by fair dice roll. Guaranteed to be random.
+ /// }
+ /// let number_future = async { get_random_number_improved() };
+ /// ```
+ #[clippy::version = "1.54.0"]
+ pub UNUSED_ASYNC,
+ pedantic,
+ "finds async functions with no await statements"
+}
+
+declare_lint_pass!(UnusedAsync => [UNUSED_ASYNC]);
+
+struct AsyncFnVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ found_await: bool,
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for AsyncFnVisitor<'a, 'tcx> {
+ type NestedFilter = nested_filter::OnlyBodies;
+
+ fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) {
+ if let ExprKind::Yield(_, YieldSource::Await { .. }) = ex.kind {
+ self.found_await = true;
+ }
+ walk_expr(self, ex);
+ }
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for UnusedAsync {
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ fn_kind: FnKind<'tcx>,
+ fn_decl: &'tcx FnDecl<'tcx>,
+ body: &Body<'tcx>,
+ span: Span,
+ hir_id: HirId,
+ ) {
+ if !span.from_expansion() && fn_kind.asyncness() == IsAsync::Async {
+ let mut visitor = AsyncFnVisitor { cx, found_await: false };
+ walk_fn(&mut visitor, fn_kind, fn_decl, body.id(), span, hir_id);
+ if !visitor.found_await {
+ span_lint_and_help(
+ cx,
+ UNUSED_ASYNC,
+ span,
+ "unused `async` for function with no await statements",
+ None,
+ "consider removing the `async` from this function",
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/unused_io_amount.rs b/src/tools/clippy/clippy_lints/src/unused_io_amount.rs
new file mode 100644
index 000000000..323cf83ff
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unused_io_amount.rs
@@ -0,0 +1,170 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
+use clippy_utils::{is_try, match_trait_method, paths};
+use rustc_hir as hir;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for unused written/read amount.
+ ///
+ /// ### Why is this bad?
+ /// `io::Write::write(_vectored)` and
+ /// `io::Read::read(_vectored)` are not guaranteed to
+ /// process the entire buffer. They return how many bytes were processed, which
+ /// might be smaller
+ /// than a given buffer's length. If you don't need to deal with
+ /// partial-write/read, use
+ /// `write_all`/`read_exact` instead.
+ ///
+ /// When working with asynchronous code (either with the `futures`
+ /// crate or with `tokio`), a similar issue exists for
+ /// `AsyncWriteExt::write()` and `AsyncReadExt::read()` : these
+ /// functions are also not guaranteed to process the entire
+ /// buffer. Your code should either handle partial-writes/reads, or
+ /// call the `write_all`/`read_exact` methods on those traits instead.
+ ///
+ /// ### Known problems
+ /// Detects only common patterns.
+ ///
+ /// ### Examples
+ /// ```rust,ignore
+ /// use std::io;
+ /// fn foo<W: io::Write>(w: &mut W) -> io::Result<()> {
+ /// // must be `w.write_all(b"foo")?;`
+ /// w.write(b"foo")?;
+ /// Ok(())
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub UNUSED_IO_AMOUNT,
+ correctness,
+ "unused written/read amount"
+}
+
+declare_lint_pass!(UnusedIoAmount => [UNUSED_IO_AMOUNT]);
+
+impl<'tcx> LateLintPass<'tcx> for UnusedIoAmount {
+ fn check_stmt(&mut self, cx: &LateContext<'_>, s: &hir::Stmt<'_>) {
+ let expr = match s.kind {
+ hir::StmtKind::Semi(expr) | hir::StmtKind::Expr(expr) => expr,
+ _ => return,
+ };
+
+ match expr.kind {
+ hir::ExprKind::Match(res, _, _) if is_try(cx, expr).is_some() => {
+ if let hir::ExprKind::Call(func, [ref arg_0, ..]) = res.kind {
+ if matches!(
+ func.kind,
+ hir::ExprKind::Path(hir::QPath::LangItem(hir::LangItem::TryTraitBranch, ..))
+ ) {
+ check_map_error(cx, arg_0, expr);
+ }
+ } else {
+ check_map_error(cx, res, expr);
+ }
+ },
+ hir::ExprKind::MethodCall(path, [ref arg_0, ..], _) => match path.ident.as_str() {
+ "expect" | "unwrap" | "unwrap_or" | "unwrap_or_else" => {
+ check_map_error(cx, arg_0, expr);
+ },
+ _ => (),
+ },
+ _ => (),
+ }
+ }
+}
+
+/// If `expr` is an (e).await, return the inner expression "e" that's being
+/// waited on. Otherwise return None.
+fn try_remove_await<'a>(expr: &'a hir::Expr<'a>) -> Option<&hir::Expr<'a>> {
+ if let hir::ExprKind::Match(expr, _, hir::MatchSource::AwaitDesugar) = expr.kind {
+ if let hir::ExprKind::Call(func, [ref arg_0, ..]) = expr.kind {
+ if matches!(
+ func.kind,
+ hir::ExprKind::Path(hir::QPath::LangItem(hir::LangItem::IntoFutureIntoFuture, ..))
+ ) {
+ return Some(arg_0);
+ }
+ }
+ }
+
+ None
+}
+
+fn check_map_error(cx: &LateContext<'_>, call: &hir::Expr<'_>, expr: &hir::Expr<'_>) {
+ let mut call = call;
+ while let hir::ExprKind::MethodCall(path, args, _) = call.kind {
+ if matches!(path.ident.as_str(), "or" | "or_else" | "ok") {
+ call = &args[0];
+ } else {
+ break;
+ }
+ }
+
+ if let Some(call) = try_remove_await(call) {
+ check_method_call(cx, call, expr, true);
+ } else {
+ check_method_call(cx, call, expr, false);
+ }
+}
+
+fn check_method_call(cx: &LateContext<'_>, call: &hir::Expr<'_>, expr: &hir::Expr<'_>, is_await: bool) {
+ if let hir::ExprKind::MethodCall(path, _, _) = call.kind {
+ let symbol = path.ident.as_str();
+ let read_trait = if is_await {
+ match_trait_method(cx, call, &paths::FUTURES_IO_ASYNCREADEXT)
+ || match_trait_method(cx, call, &paths::TOKIO_IO_ASYNCREADEXT)
+ } else {
+ match_trait_method(cx, call, &paths::IO_READ)
+ };
+ let write_trait = if is_await {
+ match_trait_method(cx, call, &paths::FUTURES_IO_ASYNCWRITEEXT)
+ || match_trait_method(cx, call, &paths::TOKIO_IO_ASYNCWRITEEXT)
+ } else {
+ match_trait_method(cx, call, &paths::IO_WRITE)
+ };
+
+ match (read_trait, write_trait, symbol, is_await) {
+ (true, _, "read", false) => span_lint_and_help(
+ cx,
+ UNUSED_IO_AMOUNT,
+ expr.span,
+ "read amount is not handled",
+ None,
+ "use `Read::read_exact` instead, or handle partial reads",
+ ),
+ (true, _, "read", true) => span_lint_and_help(
+ cx,
+ UNUSED_IO_AMOUNT,
+ expr.span,
+ "read amount is not handled",
+ None,
+ "use `AsyncReadExt::read_exact` instead, or handle partial reads",
+ ),
+ (true, _, "read_vectored", _) => {
+ span_lint(cx, UNUSED_IO_AMOUNT, expr.span, "read amount is not handled");
+ },
+ (_, true, "write", false) => span_lint_and_help(
+ cx,
+ UNUSED_IO_AMOUNT,
+ expr.span,
+ "written amount is not handled",
+ None,
+ "use `Write::write_all` instead, or handle partial writes",
+ ),
+ (_, true, "write", true) => span_lint_and_help(
+ cx,
+ UNUSED_IO_AMOUNT,
+ expr.span,
+ "written amount is not handled",
+ None,
+ "use `AsyncWriteExt::write_all` instead, or handle partial writes",
+ ),
+ (_, true, "write_vectored", _) => {
+ span_lint(cx, UNUSED_IO_AMOUNT, expr.span, "written amount is not handled");
+ },
+ _ => (),
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/unused_rounding.rs b/src/tools/clippy/clippy_lints/src/unused_rounding.rs
new file mode 100644
index 000000000..306afe441
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unused_rounding.rs
@@ -0,0 +1,69 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use rustc_ast::ast::{Expr, ExprKind, LitFloatType, LitKind};
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ ///
+ /// Detects cases where a whole-number literal float is being rounded, using
+ /// the `floor`, `ceil`, or `round` methods.
+ ///
+ /// ### Why is this bad?
+ ///
+ /// This is unnecessary and confusing to the reader. Doing this is probably a mistake.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = 1f32.ceil();
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let x = 1f32;
+ /// ```
+ #[clippy::version = "1.62.0"]
+ pub UNUSED_ROUNDING,
+ nursery,
+ "Uselessly rounding a whole number floating-point literal"
+}
+declare_lint_pass!(UnusedRounding => [UNUSED_ROUNDING]);
+
+fn is_useless_rounding(expr: &Expr) -> Option<(&str, String)> {
+ if let ExprKind::MethodCall(name_ident, args, _) = &expr.kind
+ && let method_name = name_ident.ident.name.as_str()
+ && (method_name == "ceil" || method_name == "round" || method_name == "floor")
+ && !args.is_empty()
+ && let ExprKind::Lit(spanned) = &args[0].kind
+ && let LitKind::Float(symbol, ty) = spanned.kind {
+ let f = symbol.as_str().parse::<f64>().unwrap();
+ let f_str = symbol.to_string() + if let LitFloatType::Suffixed(ty) = ty {
+ ty.name_str()
+ } else {
+ ""
+ };
+ if f.fract() == 0.0 {
+ Some((method_name, f_str))
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+}
+
+impl EarlyLintPass for UnusedRounding {
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
+ if let Some((method_name, float)) = is_useless_rounding(expr) {
+ span_lint_and_sugg(
+ cx,
+ UNUSED_ROUNDING,
+ expr.span,
+ &format!("used the `{}` method with a whole number float", method_name),
+ &format!("remove the `{}` method call", method_name),
+ float,
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/unused_self.rs b/src/tools/clippy/clippy_lints/src/unused_self.rs
new file mode 100644
index 000000000..51c65d898
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unused_self.rs
@@ -0,0 +1,80 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::visitors::is_local_used;
+use if_chain::if_chain;
+use rustc_hir::{Impl, ImplItem, ImplItemKind, ItemKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks methods that contain a `self` argument but don't use it
+ ///
+ /// ### Why is this bad?
+ /// It may be clearer to define the method as an associated function instead
+ /// of an instance method if it doesn't require `self`.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// struct A;
+ /// impl A {
+ /// fn method(&self) {}
+ /// }
+ /// ```
+ ///
+ /// Could be written:
+ ///
+ /// ```rust,ignore
+ /// struct A;
+ /// impl A {
+ /// fn method() {}
+ /// }
+ /// ```
+ #[clippy::version = "1.40.0"]
+ pub UNUSED_SELF,
+ pedantic,
+ "methods that contain a `self` argument but don't use it"
+}
+
+pub struct UnusedSelf {
+ avoid_breaking_exported_api: bool,
+}
+
+impl_lint_pass!(UnusedSelf => [UNUSED_SELF]);
+
+impl UnusedSelf {
+ pub fn new(avoid_breaking_exported_api: bool) -> Self {
+ Self {
+ avoid_breaking_exported_api,
+ }
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for UnusedSelf {
+ fn check_impl_item(&mut self, cx: &LateContext<'tcx>, impl_item: &ImplItem<'_>) {
+ if impl_item.span.from_expansion() {
+ return;
+ }
+ let parent = cx.tcx.hir().get_parent_item(impl_item.hir_id());
+ let parent_item = cx.tcx.hir().expect_item(parent);
+ let assoc_item = cx.tcx.associated_item(impl_item.def_id);
+ if_chain! {
+ if let ItemKind::Impl(Impl { of_trait: None, .. }) = parent_item.kind;
+ if assoc_item.fn_has_self_parameter;
+ if let ImplItemKind::Fn(.., body_id) = &impl_item.kind;
+ if !cx.access_levels.is_exported(impl_item.def_id) || !self.avoid_breaking_exported_api;
+ let body = cx.tcx.hir().body(*body_id);
+ if let [self_param, ..] = body.params;
+ if !is_local_used(cx, body, self_param.pat.hir_id);
+ then {
+ span_lint_and_help(
+ cx,
+ UNUSED_SELF,
+ self_param.span,
+ "unused `self` argument",
+ None,
+ "consider refactoring to a associated function",
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/unused_unit.rs b/src/tools/clippy/clippy_lints/src/unused_unit.rs
new file mode 100644
index 000000000..52585e595
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unused_unit.rs
@@ -0,0 +1,148 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::{position_before_rarrow, snippet_opt};
+use if_chain::if_chain;
+use rustc_ast::ast;
+use rustc_ast::visit::FnKind;
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Span;
+use rustc_span::BytePos;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for unit (`()`) expressions that can be removed.
+ ///
+ /// ### Why is this bad?
+ /// Such expressions add no value, but can make the code
+ /// less readable. Depending on formatting they can make a `break` or `return`
+ /// statement look like a function call.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn return_unit() -> () {
+ /// ()
+ /// }
+ /// ```
+ /// is equivalent to
+ /// ```rust
+ /// fn return_unit() {}
+ /// ```
+ #[clippy::version = "1.31.0"]
+ pub UNUSED_UNIT,
+ style,
+ "needless unit expression"
+}
+
+declare_lint_pass!(UnusedUnit => [UNUSED_UNIT]);
+
+impl EarlyLintPass for UnusedUnit {
+ fn check_fn(&mut self, cx: &EarlyContext<'_>, kind: FnKind<'_>, span: Span, _: ast::NodeId) {
+ if_chain! {
+ if let ast::FnRetTy::Ty(ref ty) = kind.decl().output;
+ if let ast::TyKind::Tup(ref vals) = ty.kind;
+ if vals.is_empty() && !ty.span.from_expansion() && get_def(span) == get_def(ty.span);
+ then {
+ lint_unneeded_unit_return(cx, ty, span);
+ }
+ }
+ }
+
+ fn check_block(&mut self, cx: &EarlyContext<'_>, block: &ast::Block) {
+ if_chain! {
+ if let Some(stmt) = block.stmts.last();
+ if let ast::StmtKind::Expr(ref expr) = stmt.kind;
+ if is_unit_expr(expr);
+ let ctxt = block.span.ctxt();
+ if stmt.span.ctxt() == ctxt && expr.span.ctxt() == ctxt;
+ then {
+ let sp = expr.span;
+ span_lint_and_sugg(
+ cx,
+ UNUSED_UNIT,
+ sp,
+ "unneeded unit expression",
+ "remove the final `()`",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+
+ fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &ast::Expr) {
+ match e.kind {
+ ast::ExprKind::Ret(Some(ref expr)) | ast::ExprKind::Break(_, Some(ref expr)) => {
+ if is_unit_expr(expr) && !expr.span.from_expansion() {
+ span_lint_and_sugg(
+ cx,
+ UNUSED_UNIT,
+ expr.span,
+ "unneeded `()`",
+ "remove the `()`",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ }
+ },
+ _ => (),
+ }
+ }
+
+ fn check_poly_trait_ref(&mut self, cx: &EarlyContext<'_>, poly: &ast::PolyTraitRef, _: &ast::TraitBoundModifier) {
+ let segments = &poly.trait_ref.path.segments;
+
+ if_chain! {
+ if segments.len() == 1;
+ if ["Fn", "FnMut", "FnOnce"].contains(&segments[0].ident.name.as_str());
+ if let Some(args) = &segments[0].args;
+ if let ast::GenericArgs::Parenthesized(generic_args) = &**args;
+ if let ast::FnRetTy::Ty(ty) = &generic_args.output;
+ if ty.kind.is_unit();
+ then {
+ lint_unneeded_unit_return(cx, ty, generic_args.span);
+ }
+ }
+ }
+}
+
+// get the def site
+#[must_use]
+fn get_def(span: Span) -> Option<Span> {
+ if span.from_expansion() {
+ Some(span.ctxt().outer_expn_data().def_site)
+ } else {
+ None
+ }
+}
+
+// is this expr a `()` unit?
+fn is_unit_expr(expr: &ast::Expr) -> bool {
+ if let ast::ExprKind::Tup(ref vals) = expr.kind {
+ vals.is_empty()
+ } else {
+ false
+ }
+}
+
+fn lint_unneeded_unit_return(cx: &EarlyContext<'_>, ty: &ast::Ty, span: Span) {
+ let (ret_span, appl) =
+ snippet_opt(cx, span.with_hi(ty.span.hi())).map_or((ty.span, Applicability::MaybeIncorrect), |fn_source| {
+ position_before_rarrow(&fn_source).map_or((ty.span, Applicability::MaybeIncorrect), |rpos| {
+ (
+ #[expect(clippy::cast_possible_truncation)]
+ ty.span.with_lo(BytePos(span.lo().0 + rpos as u32)),
+ Applicability::MachineApplicable,
+ )
+ })
+ });
+ span_lint_and_sugg(
+ cx,
+ UNUSED_UNIT,
+ ret_span,
+ "unneeded unit return type",
+ "remove the `-> ()`",
+ String::new(),
+ appl,
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/unwrap.rs b/src/tools/clippy/clippy_lints/src/unwrap.rs
new file mode 100644
index 000000000..d3f9e5abf
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unwrap.rs
@@ -0,0 +1,331 @@
+use clippy_utils::diagnostics::span_lint_hir_and_then;
+use clippy_utils::higher;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{path_to_local, usage::is_potentially_mutated};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::intravisit::{walk_expr, walk_fn, FnKind, Visitor};
+use rustc_hir::{BinOpKind, Body, Expr, ExprKind, FnDecl, HirId, PathSegment, UnOp};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::hir::nested_filter;
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty::Ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Span;
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls of `unwrap[_err]()` that cannot fail.
+ ///
+ /// ### Why is this bad?
+ /// Using `if let` or `match` is more idiomatic.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let option = Some(0);
+ /// # fn do_something_with(_x: usize) {}
+ /// if option.is_some() {
+ /// do_something_with(option.unwrap())
+ /// }
+ /// ```
+ ///
+ /// Could be written:
+ ///
+ /// ```rust
+ /// # let option = Some(0);
+ /// # fn do_something_with(_x: usize) {}
+ /// if let Some(value) = option {
+ /// do_something_with(value)
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub UNNECESSARY_UNWRAP,
+ complexity,
+ "checks for calls of `unwrap[_err]()` that cannot fail"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls of `unwrap[_err]()` that will always fail.
+ ///
+ /// ### Why is this bad?
+ /// If panicking is desired, an explicit `panic!()` should be used.
+ ///
+ /// ### Known problems
+ /// This lint only checks `if` conditions not assignments.
+ /// So something like `let x: Option<()> = None; x.unwrap();` will not be recognized.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let option = Some(0);
+ /// # fn do_something_with(_x: usize) {}
+ /// if option.is_none() {
+ /// do_something_with(option.unwrap())
+ /// }
+ /// ```
+ ///
+ /// This code will always panic. The if condition should probably be inverted.
+ #[clippy::version = "pre 1.29.0"]
+ pub PANICKING_UNWRAP,
+ correctness,
+ "checks for calls of `unwrap[_err]()` that will always fail"
+}
+
+/// Visitor that keeps track of which variables are unwrappable.
+struct UnwrappableVariablesVisitor<'a, 'tcx> {
+ unwrappables: Vec<UnwrapInfo<'tcx>>,
+ cx: &'a LateContext<'tcx>,
+}
+
+/// What kind of unwrappable this is.
+#[derive(Copy, Clone, Debug)]
+enum UnwrappableKind {
+ Option,
+ Result,
+}
+
+impl UnwrappableKind {
+ fn success_variant_pattern(self) -> &'static str {
+ match self {
+ UnwrappableKind::Option => "Some(..)",
+ UnwrappableKind::Result => "Ok(..)",
+ }
+ }
+
+ fn error_variant_pattern(self) -> &'static str {
+ match self {
+ UnwrappableKind::Option => "None",
+ UnwrappableKind::Result => "Err(..)",
+ }
+ }
+}
+
+/// Contains information about whether a variable can be unwrapped.
+#[derive(Copy, Clone, Debug)]
+struct UnwrapInfo<'tcx> {
+ /// The variable that is checked
+ local_id: HirId,
+ /// The if itself
+ if_expr: &'tcx Expr<'tcx>,
+ /// The check, like `x.is_ok()`
+ check: &'tcx Expr<'tcx>,
+ /// The check's name, like `is_ok`
+ check_name: &'tcx PathSegment<'tcx>,
+ /// The branch where the check takes place, like `if x.is_ok() { .. }`
+ branch: &'tcx Expr<'tcx>,
+ /// Whether `is_some()` or `is_ok()` was called (as opposed to `is_err()` or `is_none()`).
+ safe_to_unwrap: bool,
+ /// What kind of unwrappable this is.
+ kind: UnwrappableKind,
+ /// If the check is the entire condition (`if x.is_ok()`) or only a part of it (`foo() &&
+ /// x.is_ok()`)
+ is_entire_condition: bool,
+}
+
+/// Collects the information about unwrappable variables from an if condition
+/// The `invert` argument tells us whether the condition is negated.
+fn collect_unwrap_info<'tcx>(
+ cx: &LateContext<'tcx>,
+ if_expr: &'tcx Expr<'_>,
+ expr: &'tcx Expr<'_>,
+ branch: &'tcx Expr<'_>,
+ invert: bool,
+ is_entire_condition: bool,
+) -> Vec<UnwrapInfo<'tcx>> {
+ fn is_relevant_option_call(cx: &LateContext<'_>, ty: Ty<'_>, method_name: &str) -> bool {
+ is_type_diagnostic_item(cx, ty, sym::Option) && ["is_some", "is_none"].contains(&method_name)
+ }
+
+ fn is_relevant_result_call(cx: &LateContext<'_>, ty: Ty<'_>, method_name: &str) -> bool {
+ is_type_diagnostic_item(cx, ty, sym::Result) && ["is_ok", "is_err"].contains(&method_name)
+ }
+
+ if let ExprKind::Binary(op, left, right) = &expr.kind {
+ match (invert, op.node) {
+ (false, BinOpKind::And | BinOpKind::BitAnd) | (true, BinOpKind::Or | BinOpKind::BitOr) => {
+ let mut unwrap_info = collect_unwrap_info(cx, if_expr, left, branch, invert, false);
+ unwrap_info.append(&mut collect_unwrap_info(cx, if_expr, right, branch, invert, false));
+ return unwrap_info;
+ },
+ _ => (),
+ }
+ } else if let ExprKind::Unary(UnOp::Not, expr) = &expr.kind {
+ return collect_unwrap_info(cx, if_expr, expr, branch, !invert, false);
+ } else {
+ if_chain! {
+ if let ExprKind::MethodCall(method_name, args, _) = &expr.kind;
+ if let Some(local_id) = path_to_local(&args[0]);
+ let ty = cx.typeck_results().expr_ty(&args[0]);
+ let name = method_name.ident.as_str();
+ if is_relevant_option_call(cx, ty, name) || is_relevant_result_call(cx, ty, name);
+ then {
+ assert!(args.len() == 1);
+ let unwrappable = match name {
+ "is_some" | "is_ok" => true,
+ "is_err" | "is_none" => false,
+ _ => unreachable!(),
+ };
+ let safe_to_unwrap = unwrappable != invert;
+ let kind = if is_type_diagnostic_item(cx, ty, sym::Option) {
+ UnwrappableKind::Option
+ } else {
+ UnwrappableKind::Result
+ };
+
+ return vec![
+ UnwrapInfo {
+ local_id,
+ if_expr,
+ check: expr,
+ check_name: method_name,
+ branch,
+ safe_to_unwrap,
+ kind,
+ is_entire_condition,
+ }
+ ]
+ }
+ }
+ }
+ Vec::new()
+}
+
+impl<'a, 'tcx> UnwrappableVariablesVisitor<'a, 'tcx> {
+ fn visit_branch(
+ &mut self,
+ if_expr: &'tcx Expr<'_>,
+ cond: &'tcx Expr<'_>,
+ branch: &'tcx Expr<'_>,
+ else_branch: bool,
+ ) {
+ let prev_len = self.unwrappables.len();
+ for unwrap_info in collect_unwrap_info(self.cx, if_expr, cond, branch, else_branch, true) {
+ if is_potentially_mutated(unwrap_info.local_id, cond, self.cx)
+ || is_potentially_mutated(unwrap_info.local_id, branch, self.cx)
+ {
+ // if the variable is mutated, we don't know whether it can be unwrapped:
+ continue;
+ }
+ self.unwrappables.push(unwrap_info);
+ }
+ walk_expr(self, branch);
+ self.unwrappables.truncate(prev_len);
+ }
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for UnwrappableVariablesVisitor<'a, 'tcx> {
+ type NestedFilter = nested_filter::OnlyBodies;
+
+ fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
+ // Shouldn't lint when `expr` is in macro.
+ if in_external_macro(self.cx.tcx.sess, expr.span) {
+ return;
+ }
+ if let Some(higher::If { cond, then, r#else }) = higher::If::hir(expr) {
+ walk_expr(self, cond);
+ self.visit_branch(expr, cond, then, false);
+ if let Some(else_inner) = r#else {
+ self.visit_branch(expr, cond, else_inner, true);
+ }
+ } else {
+ // find `unwrap[_err]()` calls:
+ if_chain! {
+ if let ExprKind::MethodCall(method_name, [self_arg, ..], _) = expr.kind;
+ if let Some(id) = path_to_local(self_arg);
+ if [sym::unwrap, sym::expect, sym!(unwrap_err)].contains(&method_name.ident.name);
+ let call_to_unwrap = [sym::unwrap, sym::expect].contains(&method_name.ident.name);
+ if let Some(unwrappable) = self.unwrappables.iter()
+ .find(|u| u.local_id == id);
+ // Span contexts should not differ with the conditional branch
+ let span_ctxt = expr.span.ctxt();
+ if unwrappable.branch.span.ctxt() == span_ctxt;
+ if unwrappable.check.span.ctxt() == span_ctxt;
+ then {
+ if call_to_unwrap == unwrappable.safe_to_unwrap {
+ let is_entire_condition = unwrappable.is_entire_condition;
+ let unwrappable_variable_name = self.cx.tcx.hir().name(unwrappable.local_id);
+ let suggested_pattern = if call_to_unwrap {
+ unwrappable.kind.success_variant_pattern()
+ } else {
+ unwrappable.kind.error_variant_pattern()
+ };
+
+ span_lint_hir_and_then(
+ self.cx,
+ UNNECESSARY_UNWRAP,
+ expr.hir_id,
+ expr.span,
+ &format!(
+ "called `{}` on `{}` after checking its variant with `{}`",
+ method_name.ident.name,
+ unwrappable_variable_name,
+ unwrappable.check_name.ident.as_str(),
+ ),
+ |diag| {
+ if is_entire_condition {
+ diag.span_suggestion(
+ unwrappable.check.span.with_lo(unwrappable.if_expr.span.lo()),
+ "try",
+ format!(
+ "if let {} = {}",
+ suggested_pattern,
+ unwrappable_variable_name,
+ ),
+ // We don't track how the unwrapped value is used inside the
+ // block or suggest deleting the unwrap, so we can't offer a
+ // fixable solution.
+ Applicability::Unspecified,
+ );
+ } else {
+ diag.span_label(unwrappable.check.span, "the check is happening here");
+ diag.help("try using `if let` or `match`");
+ }
+ },
+ );
+ } else {
+ span_lint_hir_and_then(
+ self.cx,
+ PANICKING_UNWRAP,
+ expr.hir_id,
+ expr.span,
+ &format!("this call to `{}()` will always panic",
+ method_name.ident.name),
+ |diag| { diag.span_label(unwrappable.check.span, "because of this check"); },
+ );
+ }
+ }
+ }
+ walk_expr(self, expr);
+ }
+ }
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+}
+
+declare_lint_pass!(Unwrap => [PANICKING_UNWRAP, UNNECESSARY_UNWRAP]);
+
+impl<'tcx> LateLintPass<'tcx> for Unwrap {
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ kind: FnKind<'tcx>,
+ decl: &'tcx FnDecl<'_>,
+ body: &'tcx Body<'_>,
+ span: Span,
+ fn_id: HirId,
+ ) {
+ if span.from_expansion() {
+ return;
+ }
+
+ let mut v = UnwrappableVariablesVisitor {
+ cx,
+ unwrappables: Vec::new(),
+ };
+
+ walk_fn(&mut v, kind, decl, body.id(), span, fn_id);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/unwrap_in_result.rs b/src/tools/clippy/clippy_lints/src/unwrap_in_result.rs
new file mode 100644
index 000000000..b32be238c
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/unwrap_in_result.rs
@@ -0,0 +1,133 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{method_chain_args, return_ty};
+use if_chain::if_chain;
+use rustc_hir as hir;
+use rustc_hir::intravisit::{self, Visitor};
+use rustc_hir::{Expr, ImplItemKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::{sym, Span};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for functions of type `Result` that contain `expect()` or `unwrap()`
+ ///
+ /// ### Why is this bad?
+ /// These functions promote recoverable errors to non-recoverable errors which may be undesirable in code bases which wish to avoid panics.
+ ///
+ /// ### Known problems
+ /// This can cause false positives in functions that handle both recoverable and non recoverable errors.
+ ///
+ /// ### Example
+ /// Before:
+ /// ```rust
+ /// fn divisible_by_3(i_str: String) -> Result<(), String> {
+ /// let i = i_str
+ /// .parse::<i32>()
+ /// .expect("cannot divide the input by three");
+ ///
+ /// if i % 3 != 0 {
+ /// Err("Number is not divisible by 3")?
+ /// }
+ ///
+ /// Ok(())
+ /// }
+ /// ```
+ ///
+ /// After:
+ /// ```rust
+ /// fn divisible_by_3(i_str: String) -> Result<(), String> {
+ /// let i = i_str
+ /// .parse::<i32>()
+ /// .map_err(|e| format!("cannot divide the input by three: {}", e))?;
+ ///
+ /// if i % 3 != 0 {
+ /// Err("Number is not divisible by 3")?
+ /// }
+ ///
+ /// Ok(())
+ /// }
+ /// ```
+ #[clippy::version = "1.48.0"]
+ pub UNWRAP_IN_RESULT,
+ restriction,
+ "functions of type `Result<..>` or `Option`<...> that contain `expect()` or `unwrap()`"
+}
+
+declare_lint_pass!(UnwrapInResult=> [UNWRAP_IN_RESULT]);
+
+impl<'tcx> LateLintPass<'tcx> for UnwrapInResult {
+ fn check_impl_item(&mut self, cx: &LateContext<'tcx>, impl_item: &'tcx hir::ImplItem<'_>) {
+ if_chain! {
+ // first check if it's a method or function
+ if let hir::ImplItemKind::Fn(ref _signature, _) = impl_item.kind;
+ // checking if its return type is `result` or `option`
+ if is_type_diagnostic_item(cx, return_ty(cx, impl_item.hir_id()), sym::Result)
+ || is_type_diagnostic_item(cx, return_ty(cx, impl_item.hir_id()), sym::Option);
+ then {
+ lint_impl_body(cx, impl_item.span, impl_item);
+ }
+ }
+ }
+}
+
+struct FindExpectUnwrap<'a, 'tcx> {
+ lcx: &'a LateContext<'tcx>,
+ typeck_results: &'tcx ty::TypeckResults<'tcx>,
+ result: Vec<Span>,
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for FindExpectUnwrap<'a, 'tcx> {
+ fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
+ // check for `expect`
+ if let Some(arglists) = method_chain_args(expr, &["expect"]) {
+ let receiver_ty = self.typeck_results.expr_ty(&arglists[0][0]).peel_refs();
+ if is_type_diagnostic_item(self.lcx, receiver_ty, sym::Option)
+ || is_type_diagnostic_item(self.lcx, receiver_ty, sym::Result)
+ {
+ self.result.push(expr.span);
+ }
+ }
+
+ // check for `unwrap`
+ if let Some(arglists) = method_chain_args(expr, &["unwrap"]) {
+ let receiver_ty = self.typeck_results.expr_ty(&arglists[0][0]).peel_refs();
+ if is_type_diagnostic_item(self.lcx, receiver_ty, sym::Option)
+ || is_type_diagnostic_item(self.lcx, receiver_ty, sym::Result)
+ {
+ self.result.push(expr.span);
+ }
+ }
+
+ // and check sub-expressions
+ intravisit::walk_expr(self, expr);
+ }
+}
+
+fn lint_impl_body<'tcx>(cx: &LateContext<'tcx>, impl_span: Span, impl_item: &'tcx hir::ImplItem<'_>) {
+ if let ImplItemKind::Fn(_, body_id) = impl_item.kind {
+ let body = cx.tcx.hir().body(body_id);
+ let mut fpu = FindExpectUnwrap {
+ lcx: cx,
+ typeck_results: cx.tcx.typeck(impl_item.def_id),
+ result: Vec::new(),
+ };
+ fpu.visit_expr(&body.value);
+
+ // if we've found one, lint
+ if !fpu.result.is_empty() {
+ span_lint_and_then(
+ cx,
+ UNWRAP_IN_RESULT,
+ impl_span,
+ "used unwrap or expect in a function that returns result or option",
+ move |diag| {
+ diag.help("unwrap and expect should not be used in a function that returns result or option");
+ diag.span_note(fpu.result, "potential non-recoverable error(s)");
+ },
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/upper_case_acronyms.rs b/src/tools/clippy/clippy_lints/src/upper_case_acronyms.rs
new file mode 100644
index 000000000..02bf09ed5
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/upper_case_acronyms.rs
@@ -0,0 +1,127 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use itertools::Itertools;
+use rustc_errors::Applicability;
+use rustc_hir::{Item, ItemKind};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::symbol::Ident;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for fully capitalized names and optionally names containing a capitalized acronym.
+ ///
+ /// ### Why is this bad?
+ /// In CamelCase, acronyms count as one word.
+ /// See [naming conventions](https://rust-lang.github.io/api-guidelines/naming.html#casing-conforms-to-rfc-430-c-case)
+ /// for more.
+ ///
+ /// By default, the lint only triggers on fully-capitalized names.
+ /// You can use the `upper-case-acronyms-aggressive: true` config option to enable linting
+ /// on all camel case names
+ ///
+ /// ### Known problems
+ /// When two acronyms are contiguous, the lint can't tell where
+ /// the first acronym ends and the second starts, so it suggests to lowercase all of
+ /// the letters in the second acronym.
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct HTTPResponse;
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// struct HttpResponse;
+ /// ```
+ #[clippy::version = "1.51.0"]
+ pub UPPER_CASE_ACRONYMS,
+ style,
+ "capitalized acronyms are against the naming convention"
+}
+
+#[derive(Default)]
+pub struct UpperCaseAcronyms {
+ avoid_breaking_exported_api: bool,
+ upper_case_acronyms_aggressive: bool,
+}
+
+impl UpperCaseAcronyms {
+ pub fn new(avoid_breaking_exported_api: bool, aggressive: bool) -> Self {
+ Self {
+ avoid_breaking_exported_api,
+ upper_case_acronyms_aggressive: aggressive,
+ }
+ }
+}
+
+impl_lint_pass!(UpperCaseAcronyms => [UPPER_CASE_ACRONYMS]);
+
+fn correct_ident(ident: &str) -> String {
+ let ident = ident.chars().rev().collect::<String>();
+ let fragments = ident
+ .split_inclusive(|x: char| !x.is_ascii_lowercase())
+ .rev()
+ .map(|x| x.chars().rev().collect::<String>());
+
+ let mut ident = fragments.clone().next().unwrap();
+ for (ref prev, ref curr) in fragments.tuple_windows() {
+ if [prev, curr]
+ .iter()
+ .all(|s| s.len() == 1 && s.chars().next().unwrap().is_ascii_uppercase())
+ {
+ ident.push_str(&curr.to_ascii_lowercase());
+ } else {
+ ident.push_str(curr);
+ }
+ }
+ ident
+}
+
+fn check_ident(cx: &LateContext<'_>, ident: &Ident, be_aggressive: bool) {
+ let span = ident.span;
+ let ident = ident.as_str();
+ let corrected = correct_ident(ident);
+ // warn if we have pure-uppercase idents
+ // assume that two-letter words are some kind of valid abbreviation like FP for false positive
+ // (and don't warn)
+ if (ident.chars().all(|c| c.is_ascii_uppercase()) && ident.len() > 2)
+ // otherwise, warn if we have SOmeTHING lIKE THIs but only warn with the aggressive
+ // upper-case-acronyms-aggressive config option enabled
+ || (be_aggressive && ident != corrected)
+ {
+ span_lint_and_sugg(
+ cx,
+ UPPER_CASE_ACRONYMS,
+ span,
+ &format!("name `{}` contains a capitalized acronym", ident),
+ "consider making the acronym lowercase, except the initial letter",
+ corrected,
+ Applicability::MaybeIncorrect,
+ );
+ }
+}
+
+impl LateLintPass<'_> for UpperCaseAcronyms {
+ fn check_item(&mut self, cx: &LateContext<'_>, it: &Item<'_>) {
+ // do not lint public items or in macros
+ if in_external_macro(cx.sess(), it.span)
+ || (self.avoid_breaking_exported_api && cx.access_levels.is_exported(it.def_id))
+ {
+ return;
+ }
+ match it.kind {
+ ItemKind::TyAlias(..) | ItemKind::Struct(..) | ItemKind::Trait(..) => {
+ check_ident(cx, &it.ident, self.upper_case_acronyms_aggressive);
+ },
+ ItemKind::Enum(ref enumdef, _) => {
+ // check enum variants separately because again we only want to lint on private enums and
+ // the fn check_variant does not know about the vis of the enum of its variants
+ enumdef
+ .variants
+ .iter()
+ .for_each(|variant| check_ident(cx, &variant.ident, self.upper_case_acronyms_aggressive));
+ },
+ _ => {},
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/use_self.rs b/src/tools/clippy/clippy_lints/src/use_self.rs
new file mode 100644
index 000000000..486ea5e5c
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/use_self.rs
@@ -0,0 +1,320 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::ty::same_type_and_consts;
+use clippy_utils::{meets_msrv, msrvs};
+use if_chain::if_chain;
+use rustc_data_structures::fx::FxHashSet;
+use rustc_errors::Applicability;
+use rustc_hir::{
+ self as hir,
+ def::{CtorOf, DefKind, Res},
+ def_id::LocalDefId,
+ intravisit::{walk_inf, walk_ty, Visitor},
+ Expr, ExprKind, FnRetTy, FnSig, GenericArg, HirId, Impl, ImplItemKind, Item, ItemKind, Pat, PatKind, Path, QPath,
+ TyKind,
+};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::Span;
+use rustc_typeck::hir_ty_to_ty;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for unnecessary repetition of structure name when a
+ /// replacement with `Self` is applicable.
+ ///
+ /// ### Why is this bad?
+ /// Unnecessary repetition. Mixed use of `Self` and struct
+ /// name
+ /// feels inconsistent.
+ ///
+ /// ### Known problems
+ /// - Unaddressed false negative in fn bodies of trait implementations
+ /// - False positive with associated types in traits (#4140)
+ ///
+ /// ### Example
+ /// ```rust
+ /// struct Foo;
+ /// impl Foo {
+ /// fn new() -> Foo {
+ /// Foo {}
+ /// }
+ /// }
+ /// ```
+ /// could be
+ /// ```rust
+ /// struct Foo;
+ /// impl Foo {
+ /// fn new() -> Self {
+ /// Self {}
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub USE_SELF,
+ nursery,
+ "unnecessary structure name repetition whereas `Self` is applicable"
+}
+
+#[derive(Default)]
+pub struct UseSelf {
+ msrv: Option<RustcVersion>,
+ stack: Vec<StackItem>,
+}
+
+impl UseSelf {
+ #[must_use]
+ pub fn new(msrv: Option<RustcVersion>) -> Self {
+ Self {
+ msrv,
+ ..Self::default()
+ }
+ }
+}
+
+#[derive(Debug)]
+enum StackItem {
+ Check {
+ impl_id: LocalDefId,
+ in_body: u32,
+ types_to_skip: FxHashSet<HirId>,
+ },
+ NoCheck,
+}
+
+impl_lint_pass!(UseSelf => [USE_SELF]);
+
+const SEGMENTS_MSG: &str = "segments should be composed of at least 1 element";
+
+impl<'tcx> LateLintPass<'tcx> for UseSelf {
+ fn check_item(&mut self, _cx: &LateContext<'_>, item: &Item<'_>) {
+ if matches!(item.kind, ItemKind::OpaqueTy(_)) {
+ // skip over `ItemKind::OpaqueTy` in order to lint `foo() -> impl <..>`
+ return;
+ }
+ // We push the self types of `impl`s on a stack here. Only the top type on the stack is
+ // relevant for linting, since this is the self type of the `impl` we're currently in. To
+ // avoid linting on nested items, we push `StackItem::NoCheck` on the stack to signal, that
+ // we're in an `impl` or nested item, that we don't want to lint
+ let stack_item = if_chain! {
+ if let ItemKind::Impl(Impl { self_ty, .. }) = item.kind;
+ if let TyKind::Path(QPath::Resolved(_, item_path)) = self_ty.kind;
+ let parameters = &item_path.segments.last().expect(SEGMENTS_MSG).args;
+ if parameters.as_ref().map_or(true, |params| {
+ !params.parenthesized && !params.args.iter().any(|arg| matches!(arg, GenericArg::Lifetime(_)))
+ });
+ then {
+ StackItem::Check {
+ impl_id: item.def_id,
+ in_body: 0,
+ types_to_skip: std::iter::once(self_ty.hir_id).collect(),
+ }
+ } else {
+ StackItem::NoCheck
+ }
+ };
+ self.stack.push(stack_item);
+ }
+
+ fn check_item_post(&mut self, _: &LateContext<'_>, item: &Item<'_>) {
+ if !matches!(item.kind, ItemKind::OpaqueTy(_)) {
+ self.stack.pop();
+ }
+ }
+
+ fn check_impl_item(&mut self, cx: &LateContext<'_>, impl_item: &hir::ImplItem<'_>) {
+ // We want to skip types in trait `impl`s that aren't declared as `Self` in the trait
+ // declaration. The collection of those types is all this method implementation does.
+ if_chain! {
+ if let ImplItemKind::Fn(FnSig { decl, .. }, ..) = impl_item.kind;
+ if let Some(&mut StackItem::Check {
+ impl_id,
+ ref mut types_to_skip,
+ ..
+ }) = self.stack.last_mut();
+ if let Some(impl_trait_ref) = cx.tcx.impl_trait_ref(impl_id);
+ then {
+ // `self_ty` is the semantic self type of `impl <trait> for <type>`. This cannot be
+ // `Self`.
+ let self_ty = impl_trait_ref.self_ty();
+
+ // `trait_method_sig` is the signature of the function, how it is declared in the
+ // trait, not in the impl of the trait.
+ let trait_method = cx
+ .tcx
+ .associated_item(impl_item.def_id)
+ .trait_item_def_id
+ .expect("impl method matches a trait method");
+ let trait_method_sig = cx.tcx.fn_sig(trait_method);
+ let trait_method_sig = cx.tcx.erase_late_bound_regions(trait_method_sig);
+
+ // `impl_inputs_outputs` is an iterator over the types (`hir::Ty`) declared in the
+ // implementation of the trait.
+ let output_hir_ty = if let FnRetTy::Return(ty) = &decl.output {
+ Some(&**ty)
+ } else {
+ None
+ };
+ let impl_inputs_outputs = decl.inputs.iter().chain(output_hir_ty);
+
+ // `impl_hir_ty` (of type `hir::Ty`) represents the type written in the signature.
+ //
+ // `trait_sem_ty` (of type `ty::Ty`) is the semantic type for the signature in the
+ // trait declaration. This is used to check if `Self` was used in the trait
+ // declaration.
+ //
+ // If `any`where in the `trait_sem_ty` the `self_ty` was used verbatim (as opposed
+ // to `Self`), we want to skip linting that type and all subtypes of it. This
+ // avoids suggestions to e.g. replace `Vec<u8>` with `Vec<Self>`, in an `impl Trait
+ // for u8`, when the trait always uses `Vec<u8>`.
+ //
+ // See also https://github.com/rust-lang/rust-clippy/issues/2894.
+ for (impl_hir_ty, trait_sem_ty) in impl_inputs_outputs.zip(trait_method_sig.inputs_and_output) {
+ if trait_sem_ty.walk().any(|inner| inner == self_ty.into()) {
+ let mut visitor = SkipTyCollector::default();
+ visitor.visit_ty(impl_hir_ty);
+ types_to_skip.extend(visitor.types_to_skip);
+ }
+ }
+ }
+ }
+ }
+
+ fn check_body(&mut self, _: &LateContext<'_>, _: &hir::Body<'_>) {
+ // `hir_ty_to_ty` cannot be called in `Body`s or it will panic (sometimes). But in bodies
+ // we can use `cx.typeck_results.node_type(..)` to get the `ty::Ty` from a `hir::Ty`.
+ // However the `node_type()` method can *only* be called in bodies.
+ if let Some(&mut StackItem::Check { ref mut in_body, .. }) = self.stack.last_mut() {
+ *in_body = in_body.saturating_add(1);
+ }
+ }
+
+ fn check_body_post(&mut self, _: &LateContext<'_>, _: &hir::Body<'_>) {
+ if let Some(&mut StackItem::Check { ref mut in_body, .. }) = self.stack.last_mut() {
+ *in_body = in_body.saturating_sub(1);
+ }
+ }
+
+ fn check_ty(&mut self, cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>) {
+ if_chain! {
+ if !hir_ty.span.from_expansion();
+ if meets_msrv(self.msrv, msrvs::TYPE_ALIAS_ENUM_VARIANTS);
+ if let Some(&StackItem::Check {
+ impl_id,
+ in_body,
+ ref types_to_skip,
+ }) = self.stack.last();
+ if let TyKind::Path(QPath::Resolved(_, path)) = hir_ty.kind;
+ if !matches!(path.res, Res::SelfTy { .. } | Res::Def(DefKind::TyParam, _));
+ if !types_to_skip.contains(&hir_ty.hir_id);
+ let ty = if in_body > 0 {
+ cx.typeck_results().node_type(hir_ty.hir_id)
+ } else {
+ hir_ty_to_ty(cx.tcx, hir_ty)
+ };
+ if same_type_and_consts(ty, cx.tcx.type_of(impl_id));
+ let hir = cx.tcx.hir();
+ // prevents false positive on `#[derive(serde::Deserialize)]`
+ if !hir.span(hir.get_parent_node(hir_ty.hir_id)).in_derive_expansion();
+ then {
+ span_lint(cx, hir_ty.span);
+ }
+ }
+ }
+
+ fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
+ if_chain! {
+ if !expr.span.from_expansion();
+ if meets_msrv(self.msrv, msrvs::TYPE_ALIAS_ENUM_VARIANTS);
+ if let Some(&StackItem::Check { impl_id, .. }) = self.stack.last();
+ if cx.typeck_results().expr_ty(expr) == cx.tcx.type_of(impl_id);
+ then {} else { return; }
+ }
+ match expr.kind {
+ ExprKind::Struct(QPath::Resolved(_, path), ..) => match path.res {
+ Res::SelfTy { .. } => (),
+ Res::Def(DefKind::Variant, _) => lint_path_to_variant(cx, path),
+ _ => span_lint(cx, path.span),
+ },
+ // tuple struct instantiation (`Foo(arg)` or `Enum::Foo(arg)`)
+ ExprKind::Call(fun, _) => {
+ if let ExprKind::Path(QPath::Resolved(_, path)) = fun.kind {
+ if let Res::Def(DefKind::Ctor(ctor_of, _), ..) = path.res {
+ match ctor_of {
+ CtorOf::Variant => lint_path_to_variant(cx, path),
+ CtorOf::Struct => span_lint(cx, path.span),
+ }
+ }
+ }
+ },
+ // unit enum variants (`Enum::A`)
+ ExprKind::Path(QPath::Resolved(_, path)) => lint_path_to_variant(cx, path),
+ _ => (),
+ }
+ }
+
+ fn check_pat(&mut self, cx: &LateContext<'_>, pat: &Pat<'_>) {
+ if_chain! {
+ if !pat.span.from_expansion();
+ if meets_msrv(self.msrv, msrvs::TYPE_ALIAS_ENUM_VARIANTS);
+ if let Some(&StackItem::Check { impl_id, .. }) = self.stack.last();
+ // get the path from the pattern
+ if let PatKind::Path(QPath::Resolved(_, path))
+ | PatKind::TupleStruct(QPath::Resolved(_, path), _, _)
+ | PatKind::Struct(QPath::Resolved(_, path), _, _) = pat.kind;
+ if cx.typeck_results().pat_ty(pat) == cx.tcx.type_of(impl_id);
+ then {
+ match path.res {
+ Res::Def(DefKind::Ctor(ctor_of, _), ..) => match ctor_of {
+ CtorOf::Variant => lint_path_to_variant(cx, path),
+ CtorOf::Struct => span_lint(cx, path.span),
+ },
+ Res::Def(DefKind::Variant, ..) => lint_path_to_variant(cx, path),
+ Res::Def(DefKind::Struct, ..) => span_lint(cx, path.span),
+ _ => ()
+ }
+ }
+ }
+ }
+
+ extract_msrv_attr!(LateContext);
+}
+
+#[derive(Default)]
+struct SkipTyCollector {
+ types_to_skip: Vec<HirId>,
+}
+
+impl<'tcx> Visitor<'tcx> for SkipTyCollector {
+ fn visit_infer(&mut self, inf: &hir::InferArg) {
+ self.types_to_skip.push(inf.hir_id);
+
+ walk_inf(self, inf);
+ }
+ fn visit_ty(&mut self, hir_ty: &hir::Ty<'_>) {
+ self.types_to_skip.push(hir_ty.hir_id);
+
+ walk_ty(self, hir_ty);
+ }
+}
+
+fn span_lint(cx: &LateContext<'_>, span: Span) {
+ span_lint_and_sugg(
+ cx,
+ USE_SELF,
+ span,
+ "unnecessary structure name repetition",
+ "use the applicable keyword",
+ "Self".to_owned(),
+ Applicability::MachineApplicable,
+ );
+}
+
+fn lint_path_to_variant(cx: &LateContext<'_>, path: &Path<'_>) {
+ if let [.., self_seg, _variant] = path.segments {
+ let span = path
+ .span
+ .with_hi(self_seg.args().span_ext().unwrap_or(self_seg.ident.span).hi());
+ span_lint(cx, span);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/useless_conversion.rs b/src/tools/clippy/clippy_lints/src/useless_conversion.rs
new file mode 100644
index 000000000..fe29bf29d
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/useless_conversion.rs
@@ -0,0 +1,189 @@
+use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_sugg};
+use clippy_utils::source::{snippet, snippet_with_macro_callsite};
+use clippy_utils::sugg::Sugg;
+use clippy_utils::ty::{is_type_diagnostic_item, same_type_and_consts};
+use clippy_utils::{get_parent_expr, is_trait_method, match_def_path, paths};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind, HirId, MatchSource};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `Into`, `TryInto`, `From`, `TryFrom`, or `IntoIter` calls
+ /// which uselessly convert to the same type.
+ ///
+ /// ### Why is this bad?
+ /// Redundant code.
+ ///
+ /// ### Example
+ /// ```rust
+ /// // format!() returns a `String`
+ /// let s: String = format!("hello").into();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let s: String = format!("hello");
+ /// ```
+ #[clippy::version = "1.45.0"]
+ pub USELESS_CONVERSION,
+ complexity,
+ "calls to `Into`, `TryInto`, `From`, `TryFrom`, or `IntoIter` which perform useless conversions to the same type"
+}
+
+#[derive(Default)]
+pub struct UselessConversion {
+ try_desugar_arm: Vec<HirId>,
+}
+
+impl_lint_pass!(UselessConversion => [USELESS_CONVERSION]);
+
+#[expect(clippy::too_many_lines)]
+impl<'tcx> LateLintPass<'tcx> for UselessConversion {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ if e.span.from_expansion() {
+ return;
+ }
+
+ if Some(&e.hir_id) == self.try_desugar_arm.last() {
+ return;
+ }
+
+ match e.kind {
+ ExprKind::Match(_, arms, MatchSource::TryDesugar) => {
+ let e = match arms[0].body.kind {
+ ExprKind::Ret(Some(e)) | ExprKind::Break(_, Some(e)) => e,
+ _ => return,
+ };
+ if let ExprKind::Call(_, args) = e.kind {
+ self.try_desugar_arm.push(args[0].hir_id);
+ }
+ },
+
+ ExprKind::MethodCall(name, .., args, _) => {
+ if is_trait_method(cx, e, sym::Into) && name.ident.as_str() == "into" {
+ let a = cx.typeck_results().expr_ty(e);
+ let b = cx.typeck_results().expr_ty(&args[0]);
+ if same_type_and_consts(a, b) {
+ let sugg = snippet_with_macro_callsite(cx, args[0].span, "<expr>").to_string();
+ span_lint_and_sugg(
+ cx,
+ USELESS_CONVERSION,
+ e.span,
+ &format!("useless conversion to the same type: `{}`", b),
+ "consider removing `.into()`",
+ sugg,
+ Applicability::MachineApplicable, // snippet
+ );
+ }
+ }
+ if is_trait_method(cx, e, sym::IntoIterator) && name.ident.name == sym::into_iter {
+ if let Some(parent_expr) = get_parent_expr(cx, e) {
+ if let ExprKind::MethodCall(parent_name, ..) = parent_expr.kind {
+ if parent_name.ident.name != sym::into_iter {
+ return;
+ }
+ }
+ }
+ let a = cx.typeck_results().expr_ty(e);
+ let b = cx.typeck_results().expr_ty(&args[0]);
+ if same_type_and_consts(a, b) {
+ let sugg = snippet(cx, args[0].span, "<expr>").into_owned();
+ span_lint_and_sugg(
+ cx,
+ USELESS_CONVERSION,
+ e.span,
+ &format!("useless conversion to the same type: `{}`", b),
+ "consider removing `.into_iter()`",
+ sugg,
+ Applicability::MachineApplicable, // snippet
+ );
+ }
+ }
+ if_chain! {
+ if is_trait_method(cx, e, sym::TryInto) && name.ident.name == sym::try_into;
+ let a = cx.typeck_results().expr_ty(e);
+ let b = cx.typeck_results().expr_ty(&args[0]);
+ if is_type_diagnostic_item(cx, a, sym::Result);
+ if let ty::Adt(_, substs) = a.kind();
+ if let Some(a_type) = substs.types().next();
+ if same_type_and_consts(a_type, b);
+
+ then {
+ span_lint_and_help(
+ cx,
+ USELESS_CONVERSION,
+ e.span,
+ &format!("useless conversion to the same type: `{}`", b),
+ None,
+ "consider removing `.try_into()`",
+ );
+ }
+ }
+ },
+
+ ExprKind::Call(path, args) => {
+ if_chain! {
+ if args.len() == 1;
+ if let ExprKind::Path(ref qpath) = path.kind;
+ if let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id();
+ then {
+ let a = cx.typeck_results().expr_ty(e);
+ let b = cx.typeck_results().expr_ty(&args[0]);
+ if_chain! {
+ if match_def_path(cx, def_id, &paths::TRY_FROM);
+ if is_type_diagnostic_item(cx, a, sym::Result);
+ if let ty::Adt(_, substs) = a.kind();
+ if let Some(a_type) = substs.types().next();
+ if same_type_and_consts(a_type, b);
+
+ then {
+ let hint = format!("consider removing `{}()`", snippet(cx, path.span, "TryFrom::try_from"));
+ span_lint_and_help(
+ cx,
+ USELESS_CONVERSION,
+ e.span,
+ &format!("useless conversion to the same type: `{}`", b),
+ None,
+ &hint,
+ );
+ }
+ }
+
+ if_chain! {
+ if match_def_path(cx, def_id, &paths::FROM_FROM);
+ if same_type_and_consts(a, b);
+
+ then {
+ let sugg = Sugg::hir_with_macro_callsite(cx, &args[0], "<expr>").maybe_par();
+ let sugg_msg =
+ format!("consider removing `{}()`", snippet(cx, path.span, "From::from"));
+ span_lint_and_sugg(
+ cx,
+ USELESS_CONVERSION,
+ e.span,
+ &format!("useless conversion to the same type: `{}`", b),
+ &sugg_msg,
+ sugg.to_string(),
+ Applicability::MachineApplicable, // snippet
+ );
+ }
+ }
+ }
+ }
+ },
+
+ _ => {},
+ }
+ }
+
+ fn check_expr_post(&mut self, _: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ if Some(&e.hir_id) == self.try_desugar_arm.last() {
+ self.try_desugar_arm.pop();
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/utils/author.rs b/src/tools/clippy/clippy_lints/src/utils/author.rs
new file mode 100644
index 000000000..c0726868f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/utils/author.rs
@@ -0,0 +1,741 @@
+//! A group of attributes that can be attached to Rust code in order
+//! to generate a clippy lint detecting said code automatically.
+
+use clippy_utils::{get_attr, higher};
+use rustc_ast::ast::{LitFloatType, LitKind};
+use rustc_ast::LitIntType;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_hir as hir;
+use rustc_hir::{ArrayLen, Closure, ExprKind, FnRetTy, HirId, Lit, PatKind, QPath, StmtKind, TyKind};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::{Ident, Symbol};
+use std::fmt::{Display, Formatter, Write as _};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Generates clippy code that detects the offending pattern
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// // ./tests/ui/my_lint.rs
+ /// fn foo() {
+ /// // detect the following pattern
+ /// #[clippy::author]
+ /// if x == 42 {
+ /// // but ignore everything from here on
+ /// #![clippy::author = "ignore"]
+ /// }
+ /// ()
+ /// }
+ /// ```
+ ///
+ /// Running `TESTNAME=ui/my_lint cargo uitest` will produce
+ /// a `./tests/ui/new_lint.stdout` file with the generated code:
+ ///
+ /// ```rust,ignore
+ /// // ./tests/ui/new_lint.stdout
+ /// if_chain! {
+ /// if let ExprKind::If(ref cond, ref then, None) = item.kind,
+ /// if let ExprKind::Binary(BinOp::Eq, ref left, ref right) = cond.kind,
+ /// if let ExprKind::Path(ref path) = left.kind,
+ /// if let ExprKind::Lit(ref lit) = right.kind,
+ /// if let LitKind::Int(42, _) = lit.node,
+ /// then {
+ /// // report your lint here
+ /// }
+ /// }
+ /// ```
+ pub LINT_AUTHOR,
+ internal_warn,
+ "helper for writing lints"
+}
+
+declare_lint_pass!(Author => [LINT_AUTHOR]);
+
+/// Writes a line of output with indentation added
+macro_rules! out {
+ ($($t:tt)*) => {
+ println!(" {}", format_args!($($t)*))
+ };
+}
+
+/// The variables passed in are replaced with `&Binding`s where the `value` field is set
+/// to the original value of the variable. The `name` field is set to the name of the variable
+/// (using `stringify!`) and is adjusted to avoid duplicate names.
+/// Note that the `Binding` may be printed directly to output the `name`.
+macro_rules! bind {
+ ($self:ident $(, $name:ident)+) => {
+ $(let $name = & $self.bind(stringify!($name), $name);)+
+ };
+}
+
+/// Transforms the given `Option<T>` variables into `OptionPat<Binding<T>>`.
+/// This displays as `Some($name)` or `None` when printed. The name of the inner binding
+/// is set to the name of the variable passed to the macro.
+macro_rules! opt_bind {
+ ($self:ident $(, $name:ident)+) => {
+ $(let $name = OptionPat::new($name.map(|o| $self.bind(stringify!($name), o)));)+
+ };
+}
+
+/// Creates a `Binding` that accesses the field of an existing `Binding`
+macro_rules! field {
+ ($binding:ident.$field:ident) => {
+ &Binding {
+ name: $binding.name.to_string() + stringify!(.$field),
+ value: $binding.value.$field,
+ }
+ };
+}
+
+fn prelude() {
+ println!("if_chain! {{");
+}
+
+fn done() {
+ println!(" then {{");
+ println!(" // report your lint here");
+ println!(" }}");
+ println!("}}");
+}
+
+impl<'tcx> LateLintPass<'tcx> for Author {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) {
+ check_item(cx, item.hir_id());
+ }
+
+ fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::ImplItem<'_>) {
+ check_item(cx, item.hir_id());
+ }
+
+ fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::TraitItem<'_>) {
+ check_item(cx, item.hir_id());
+ }
+
+ fn check_arm(&mut self, cx: &LateContext<'tcx>, arm: &'tcx hir::Arm<'_>) {
+ check_node(cx, arm.hir_id, |v| {
+ v.arm(&v.bind("arm", arm));
+ });
+ }
+
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
+ check_node(cx, expr.hir_id, |v| {
+ v.expr(&v.bind("expr", expr));
+ });
+ }
+
+ fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx hir::Stmt<'_>) {
+ match stmt.kind {
+ StmtKind::Expr(e) | StmtKind::Semi(e) if has_attr(cx, e.hir_id) => return,
+ _ => {},
+ }
+ check_node(cx, stmt.hir_id, |v| {
+ v.stmt(&v.bind("stmt", stmt));
+ });
+ }
+}
+
+fn check_item(cx: &LateContext<'_>, hir_id: HirId) {
+ let hir = cx.tcx.hir();
+ if let Some(body_id) = hir.maybe_body_owned_by(hir_id.expect_owner()) {
+ check_node(cx, hir_id, |v| {
+ v.expr(&v.bind("expr", &hir.body(body_id).value));
+ });
+ }
+}
+
+fn check_node(cx: &LateContext<'_>, hir_id: HirId, f: impl Fn(&PrintVisitor<'_, '_>)) {
+ if has_attr(cx, hir_id) {
+ prelude();
+ f(&PrintVisitor::new(cx));
+ done();
+ }
+}
+
+struct Binding<T> {
+ name: String,
+ value: T,
+}
+
+impl<T> Display for Binding<T> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ f.write_str(&self.name)
+ }
+}
+
+struct OptionPat<T> {
+ pub opt: Option<T>,
+}
+
+impl<T> OptionPat<T> {
+ fn new(opt: Option<T>) -> Self {
+ Self { opt }
+ }
+
+ fn if_some(&self, f: impl Fn(&T)) {
+ if let Some(t) = &self.opt {
+ f(t);
+ }
+ }
+}
+
+impl<T: Display> Display for OptionPat<T> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ match &self.opt {
+ None => f.write_str("None"),
+ Some(node) => write!(f, "Some({node})"),
+ }
+ }
+}
+
+struct PrintVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ /// Fields are the current index that needs to be appended to pattern
+ /// binding names
+ ids: std::cell::Cell<FxHashMap<&'static str, u32>>,
+}
+
+#[allow(clippy::unused_self)]
+impl<'a, 'tcx> PrintVisitor<'a, 'tcx> {
+ fn new(cx: &'a LateContext<'tcx>) -> Self {
+ Self {
+ cx,
+ ids: std::cell::Cell::default(),
+ }
+ }
+
+ fn next(&self, s: &'static str) -> String {
+ let mut ids = self.ids.take();
+ let out = match *ids.entry(s).and_modify(|n| *n += 1).or_default() {
+ // first usage of the name, use it as is
+ 0 => s.to_string(),
+ // append a number starting with 1
+ n => format!("{s}{n}"),
+ };
+ self.ids.set(ids);
+ out
+ }
+
+ fn bind<T>(&self, name: &'static str, value: T) -> Binding<T> {
+ let name = self.next(name);
+ Binding { name, value }
+ }
+
+ fn option<T: Copy>(&self, option: &Binding<Option<T>>, name: &'static str, f: impl Fn(&Binding<T>)) {
+ match option.value {
+ None => out!("if {option}.is_none();"),
+ Some(value) => {
+ let value = &self.bind(name, value);
+ out!("if let Some({value}) = {option};");
+ f(value);
+ },
+ }
+ }
+
+ fn slice<T>(&self, slice: &Binding<&[T]>, f: impl Fn(&Binding<&T>)) {
+ if slice.value.is_empty() {
+ out!("if {slice}.is_empty();");
+ } else {
+ out!("if {slice}.len() == {};", slice.value.len());
+ for (i, value) in slice.value.iter().enumerate() {
+ let name = format!("{slice}[{i}]");
+ f(&Binding { name, value });
+ }
+ }
+ }
+
+ fn destination(&self, destination: &Binding<hir::Destination>) {
+ self.option(field!(destination.label), "label", |label| {
+ self.ident(field!(label.ident));
+ });
+ }
+
+ fn ident(&self, ident: &Binding<Ident>) {
+ out!("if {ident}.as_str() == {:?};", ident.value.as_str());
+ }
+
+ fn symbol(&self, symbol: &Binding<Symbol>) {
+ out!("if {symbol}.as_str() == {:?};", symbol.value.as_str());
+ }
+
+ fn qpath(&self, qpath: &Binding<&QPath<'_>>) {
+ if let QPath::LangItem(lang_item, ..) = *qpath.value {
+ out!("if matches!({qpath}, QPath::LangItem(LangItem::{lang_item:?}, _));");
+ } else {
+ out!("if match_qpath({qpath}, &[{}]);", path_to_string(qpath.value));
+ }
+ }
+
+ fn lit(&self, lit: &Binding<&Lit>) {
+ let kind = |kind| out!("if let LitKind::{kind} = {lit}.node;");
+ macro_rules! kind {
+ ($($t:tt)*) => (kind(format_args!($($t)*)));
+ }
+
+ match lit.value.node {
+ LitKind::Bool(val) => kind!("Bool({val:?})"),
+ LitKind::Char(c) => kind!("Char({c:?})"),
+ LitKind::Err(val) => kind!("Err({val})"),
+ LitKind::Byte(b) => kind!("Byte({b})"),
+ LitKind::Int(i, suffix) => {
+ let int_ty = match suffix {
+ LitIntType::Signed(int_ty) => format!("LitIntType::Signed(IntTy::{int_ty:?})"),
+ LitIntType::Unsigned(uint_ty) => format!("LitIntType::Unsigned(UintTy::{uint_ty:?})"),
+ LitIntType::Unsuffixed => String::from("LitIntType::Unsuffixed"),
+ };
+ kind!("Int({i}, {int_ty})");
+ },
+ LitKind::Float(_, suffix) => {
+ let float_ty = match suffix {
+ LitFloatType::Suffixed(suffix_ty) => format!("LitFloatType::Suffixed(FloatTy::{suffix_ty:?})"),
+ LitFloatType::Unsuffixed => String::from("LitFloatType::Unsuffixed"),
+ };
+ kind!("Float(_, {float_ty})");
+ },
+ LitKind::ByteStr(ref vec) => {
+ bind!(self, vec);
+ kind!("ByteStr(ref {vec})");
+ out!("if let [{:?}] = **{vec};", vec.value);
+ },
+ LitKind::Str(s, _) => {
+ bind!(self, s);
+ kind!("Str({s}, _)");
+ self.symbol(s);
+ },
+ }
+ }
+
+ fn arm(&self, arm: &Binding<&hir::Arm<'_>>) {
+ self.pat(field!(arm.pat));
+ match arm.value.guard {
+ None => out!("if {arm}.guard.is_none();"),
+ Some(hir::Guard::If(expr)) => {
+ bind!(self, expr);
+ out!("if let Some(Guard::If({expr})) = {arm}.guard;");
+ self.expr(expr);
+ },
+ Some(hir::Guard::IfLet(let_expr)) => {
+ bind!(self, let_expr);
+ out!("if let Some(Guard::IfLet({let_expr}) = {arm}.guard;");
+ self.pat(field!(let_expr.pat));
+ self.expr(field!(let_expr.init));
+ },
+ }
+ self.expr(field!(arm.body));
+ }
+
+ #[allow(clippy::too_many_lines)]
+ fn expr(&self, expr: &Binding<&hir::Expr<'_>>) {
+ if let Some(higher::While { condition, body }) = higher::While::hir(expr.value) {
+ bind!(self, condition, body);
+ out!(
+ "if let Some(higher::While {{ condition: {condition}, body: {body} }}) \
+ = higher::While::hir({expr});"
+ );
+ self.expr(condition);
+ self.expr(body);
+ return;
+ }
+
+ if let Some(higher::WhileLet {
+ let_pat,
+ let_expr,
+ if_then,
+ }) = higher::WhileLet::hir(expr.value)
+ {
+ bind!(self, let_pat, let_expr, if_then);
+ out!(
+ "if let Some(higher::WhileLet {{ let_pat: {let_pat}, let_expr: {let_expr}, if_then: {if_then} }}) \
+ = higher::WhileLet::hir({expr});"
+ );
+ self.pat(let_pat);
+ self.expr(let_expr);
+ self.expr(if_then);
+ return;
+ }
+
+ if let Some(higher::ForLoop { pat, arg, body, .. }) = higher::ForLoop::hir(expr.value) {
+ bind!(self, pat, arg, body);
+ out!(
+ "if let Some(higher::ForLoop {{ pat: {pat}, arg: {arg}, body: {body}, .. }}) \
+ = higher::ForLoop::hir({expr});"
+ );
+ self.pat(pat);
+ self.expr(arg);
+ self.expr(body);
+ return;
+ }
+
+ let kind = |kind| out!("if let ExprKind::{kind} = {expr}.kind;");
+ macro_rules! kind {
+ ($($t:tt)*) => (kind(format_args!($($t)*)));
+ }
+
+ match expr.value.kind {
+ ExprKind::Let(let_expr) => {
+ bind!(self, let_expr);
+ kind!("Let({let_expr})");
+ self.pat(field!(let_expr.pat));
+ // Does what ExprKind::Cast does, only adds a clause for the type
+ // if it's a path
+ if let Some(TyKind::Path(ref qpath)) = let_expr.value.ty.as_ref().map(|ty| &ty.kind) {
+ bind!(self, qpath);
+ out!("if let TyKind::Path(ref {qpath}) = {let_expr}.ty.kind;");
+ self.qpath(qpath);
+ }
+ self.expr(field!(let_expr.init));
+ },
+ ExprKind::Box(inner) => {
+ bind!(self, inner);
+ kind!("Box({inner})");
+ self.expr(inner);
+ },
+ ExprKind::Array(elements) => {
+ bind!(self, elements);
+ kind!("Array({elements})");
+ self.slice(elements, |e| self.expr(e));
+ },
+ ExprKind::Call(func, args) => {
+ bind!(self, func, args);
+ kind!("Call({func}, {args})");
+ self.expr(func);
+ self.slice(args, |e| self.expr(e));
+ },
+ ExprKind::MethodCall(method_name, args, _) => {
+ bind!(self, method_name, args);
+ kind!("MethodCall({method_name}, {args}, _)");
+ self.ident(field!(method_name.ident));
+ self.slice(args, |e| self.expr(e));
+ },
+ ExprKind::Tup(elements) => {
+ bind!(self, elements);
+ kind!("Tup({elements})");
+ self.slice(elements, |e| self.expr(e));
+ },
+ ExprKind::Binary(op, left, right) => {
+ bind!(self, op, left, right);
+ kind!("Binary({op}, {left}, {right})");
+ out!("if BinOpKind::{:?} == {op}.node;", op.value.node);
+ self.expr(left);
+ self.expr(right);
+ },
+ ExprKind::Unary(op, inner) => {
+ bind!(self, inner);
+ kind!("Unary(UnOp::{op:?}, {inner})");
+ self.expr(inner);
+ },
+ ExprKind::Lit(ref lit) => {
+ bind!(self, lit);
+ kind!("Lit(ref {lit})");
+ self.lit(lit);
+ },
+ ExprKind::Cast(expr, cast_ty) => {
+ bind!(self, expr, cast_ty);
+ kind!("Cast({expr}, {cast_ty})");
+ if let TyKind::Path(ref qpath) = cast_ty.value.kind {
+ bind!(self, qpath);
+ out!("if let TyKind::Path(ref {qpath}) = {cast_ty}.kind;");
+ self.qpath(qpath);
+ }
+ self.expr(expr);
+ },
+ ExprKind::Type(expr, _ty) => {
+ bind!(self, expr);
+ kind!("Type({expr}, _)");
+ self.expr(expr);
+ },
+ ExprKind::Loop(body, label, des, _) => {
+ bind!(self, body);
+ opt_bind!(self, label);
+ kind!("Loop({body}, {label}, LoopSource::{des:?}, _)");
+ self.block(body);
+ label.if_some(|l| self.ident(field!(l.ident)));
+ },
+ ExprKind::If(cond, then, else_expr) => {
+ bind!(self, cond, then);
+ opt_bind!(self, else_expr);
+ kind!("If({cond}, {then}, {else_expr})");
+ self.expr(cond);
+ self.expr(then);
+ else_expr.if_some(|e| self.expr(e));
+ },
+ ExprKind::Match(scrutinee, arms, des) => {
+ bind!(self, scrutinee, arms);
+ kind!("Match({scrutinee}, {arms}, MatchSource::{des:?})");
+ self.expr(scrutinee);
+ self.slice(arms, |arm| self.arm(arm));
+ },
+ ExprKind::Closure(&Closure {
+ capture_clause,
+ fn_decl,
+ body: body_id,
+ movability,
+ ..
+ }) => {
+ let movability = OptionPat::new(movability.map(|m| format!("Movability::{m:?}")));
+
+ let ret_ty = match fn_decl.output {
+ FnRetTy::DefaultReturn(_) => "FnRetTy::DefaultReturn(_)",
+ FnRetTy::Return(_) => "FnRetTy::Return(_ty)",
+ };
+
+ bind!(self, fn_decl, body_id);
+ kind!("Closure(CaptureBy::{capture_clause:?}, {fn_decl}, {body_id}, _, {movability})");
+ out!("if let {ret_ty} = {fn_decl}.output;");
+ self.body(body_id);
+ },
+ ExprKind::Yield(sub, source) => {
+ bind!(self, sub);
+ kind!("Yield(sub, YieldSource::{source:?})");
+ self.expr(sub);
+ },
+ ExprKind::Block(block, label) => {
+ bind!(self, block);
+ opt_bind!(self, label);
+ kind!("Block({block}, {label})");
+ self.block(block);
+ label.if_some(|l| self.ident(field!(l.ident)));
+ },
+ ExprKind::Assign(target, value, _) => {
+ bind!(self, target, value);
+ kind!("Assign({target}, {value}, _span)");
+ self.expr(target);
+ self.expr(value);
+ },
+ ExprKind::AssignOp(op, target, value) => {
+ bind!(self, op, target, value);
+ kind!("AssignOp({op}, {target}, {value})");
+ out!("if BinOpKind::{:?} == {op}.node;", op.value.node);
+ self.expr(target);
+ self.expr(value);
+ },
+ ExprKind::Field(object, field_name) => {
+ bind!(self, object, field_name);
+ kind!("Field({object}, {field_name})");
+ self.ident(field_name);
+ self.expr(object);
+ },
+ ExprKind::Index(object, index) => {
+ bind!(self, object, index);
+ kind!("Index({object}, {index})");
+ self.expr(object);
+ self.expr(index);
+ },
+ ExprKind::Path(ref qpath) => {
+ bind!(self, qpath);
+ kind!("Path(ref {qpath})");
+ self.qpath(qpath);
+ },
+ ExprKind::AddrOf(kind, mutability, inner) => {
+ bind!(self, inner);
+ kind!("AddrOf(BorrowKind::{kind:?}, Mutability::{mutability:?}, {inner})");
+ self.expr(inner);
+ },
+ ExprKind::Break(destination, value) => {
+ bind!(self, destination);
+ opt_bind!(self, value);
+ kind!("Break({destination}, {value})");
+ self.destination(destination);
+ value.if_some(|e| self.expr(e));
+ },
+ ExprKind::Continue(destination) => {
+ bind!(self, destination);
+ kind!("Continue({destination})");
+ self.destination(destination);
+ },
+ ExprKind::Ret(value) => {
+ opt_bind!(self, value);
+ kind!("Ret({value})");
+ value.if_some(|e| self.expr(e));
+ },
+ ExprKind::InlineAsm(_) => {
+ kind!("InlineAsm(_)");
+ out!("// unimplemented: `ExprKind::InlineAsm` is not further destructured at the moment");
+ },
+ ExprKind::Struct(qpath, fields, base) => {
+ bind!(self, qpath, fields);
+ opt_bind!(self, base);
+ kind!("Struct({qpath}, {fields}, {base})");
+ self.qpath(qpath);
+ self.slice(fields, |field| {
+ self.ident(field!(field.ident));
+ self.expr(field!(field.expr));
+ });
+ base.if_some(|e| self.expr(e));
+ },
+ ExprKind::ConstBlock(_) => kind!("ConstBlock(_)"),
+ ExprKind::Repeat(value, length) => {
+ bind!(self, value, length);
+ kind!("Repeat({value}, {length})");
+ self.expr(value);
+ match length.value {
+ ArrayLen::Infer(..) => out!("if let ArrayLen::Infer(..) = length;"),
+ ArrayLen::Body(anon_const) => {
+ bind!(self, anon_const);
+ out!("if let ArrayLen::Body({anon_const}) = {length};");
+ self.body(field!(anon_const.body));
+ },
+ }
+ },
+ ExprKind::Err => kind!("Err"),
+ ExprKind::DropTemps(expr) => {
+ bind!(self, expr);
+ kind!("DropTemps({expr})");
+ self.expr(expr);
+ },
+ }
+ }
+
+ fn block(&self, block: &Binding<&hir::Block<'_>>) {
+ self.slice(field!(block.stmts), |stmt| self.stmt(stmt));
+ self.option(field!(block.expr), "trailing_expr", |expr| {
+ self.expr(expr);
+ });
+ }
+
+ fn body(&self, body_id: &Binding<hir::BodyId>) {
+ let expr = &self.cx.tcx.hir().body(body_id.value).value;
+ bind!(self, expr);
+ out!("let {expr} = &cx.tcx.hir().body({body_id}).value;");
+ self.expr(expr);
+ }
+
+ fn pat(&self, pat: &Binding<&hir::Pat<'_>>) {
+ let kind = |kind| out!("if let PatKind::{kind} = {pat}.kind;");
+ macro_rules! kind {
+ ($($t:tt)*) => (kind(format_args!($($t)*)));
+ }
+
+ match pat.value.kind {
+ PatKind::Wild => kind!("Wild"),
+ PatKind::Binding(anno, .., name, sub) => {
+ bind!(self, name);
+ opt_bind!(self, sub);
+ kind!("Binding(BindingAnnotation::{anno:?}, _, {name}, {sub})");
+ self.ident(name);
+ sub.if_some(|p| self.pat(p));
+ },
+ PatKind::Struct(ref qpath, fields, ignore) => {
+ bind!(self, qpath, fields);
+ kind!("Struct(ref {qpath}, {fields}, {ignore})");
+ self.qpath(qpath);
+ self.slice(fields, |field| {
+ self.ident(field!(field.ident));
+ self.pat(field!(field.pat));
+ });
+ },
+ PatKind::Or(fields) => {
+ bind!(self, fields);
+ kind!("Or({fields})");
+ self.slice(fields, |pat| self.pat(pat));
+ },
+ PatKind::TupleStruct(ref qpath, fields, skip_pos) => {
+ bind!(self, qpath, fields);
+ kind!("TupleStruct(ref {qpath}, {fields}, {skip_pos:?})");
+ self.qpath(qpath);
+ self.slice(fields, |pat| self.pat(pat));
+ },
+ PatKind::Path(ref qpath) => {
+ bind!(self, qpath);
+ kind!("Path(ref {qpath})");
+ self.qpath(qpath);
+ },
+ PatKind::Tuple(fields, skip_pos) => {
+ bind!(self, fields);
+ kind!("Tuple({fields}, {skip_pos:?})");
+ self.slice(fields, |field| self.pat(field));
+ },
+ PatKind::Box(pat) => {
+ bind!(self, pat);
+ kind!("Box({pat})");
+ self.pat(pat);
+ },
+ PatKind::Ref(pat, muta) => {
+ bind!(self, pat);
+ kind!("Ref({pat}, Mutability::{muta:?})");
+ self.pat(pat);
+ },
+ PatKind::Lit(lit_expr) => {
+ bind!(self, lit_expr);
+ kind!("Lit({lit_expr})");
+ self.expr(lit_expr);
+ },
+ PatKind::Range(start, end, end_kind) => {
+ opt_bind!(self, start, end);
+ kind!("Range({start}, {end}, RangeEnd::{end_kind:?})");
+ start.if_some(|e| self.expr(e));
+ end.if_some(|e| self.expr(e));
+ },
+ PatKind::Slice(start, middle, end) => {
+ bind!(self, start, end);
+ opt_bind!(self, middle);
+ kind!("Slice({start}, {middle}, {end})");
+ middle.if_some(|p| self.pat(p));
+ self.slice(start, |pat| self.pat(pat));
+ self.slice(end, |pat| self.pat(pat));
+ },
+ }
+ }
+
+ fn stmt(&self, stmt: &Binding<&hir::Stmt<'_>>) {
+ let kind = |kind| out!("if let StmtKind::{kind} = {stmt}.kind;");
+ macro_rules! kind {
+ ($($t:tt)*) => (kind(format_args!($($t)*)));
+ }
+
+ match stmt.value.kind {
+ StmtKind::Local(local) => {
+ bind!(self, local);
+ kind!("Local({local})");
+ self.option(field!(local.init), "init", |init| {
+ self.expr(init);
+ });
+ self.pat(field!(local.pat));
+ },
+ StmtKind::Item(_) => kind!("Item(item_id)"),
+ StmtKind::Expr(e) => {
+ bind!(self, e);
+ kind!("Expr({e})");
+ self.expr(e);
+ },
+ StmtKind::Semi(e) => {
+ bind!(self, e);
+ kind!("Semi({e})");
+ self.expr(e);
+ },
+ }
+ }
+}
+
+fn has_attr(cx: &LateContext<'_>, hir_id: hir::HirId) -> bool {
+ let attrs = cx.tcx.hir().attrs(hir_id);
+ get_attr(cx.sess(), attrs, "author").count() > 0
+}
+
+fn path_to_string(path: &QPath<'_>) -> String {
+ fn inner(s: &mut String, path: &QPath<'_>) {
+ match *path {
+ QPath::Resolved(_, path) => {
+ for (i, segment) in path.segments.iter().enumerate() {
+ if i > 0 {
+ *s += ", ";
+ }
+ write!(s, "{:?}", segment.ident.as_str()).unwrap();
+ }
+ },
+ QPath::TypeRelative(ty, segment) => match &ty.kind {
+ hir::TyKind::Path(inner_path) => {
+ inner(s, inner_path);
+ *s += ", ";
+ write!(s, "{:?}", segment.ident.as_str()).unwrap();
+ },
+ other => write!(s, "/* unimplemented: {:?}*/", other).unwrap(),
+ },
+ QPath::LangItem(..) => panic!("path_to_string: called for lang item qpath"),
+ }
+ }
+ let mut s = String::new();
+ inner(&mut s, path);
+ s
+}
diff --git a/src/tools/clippy/clippy_lints/src/utils/conf.rs b/src/tools/clippy/clippy_lints/src/utils/conf.rs
new file mode 100644
index 000000000..6e033b3be
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/utils/conf.rs
@@ -0,0 +1,534 @@
+//! Read configurations files.
+
+#![allow(clippy::module_name_repetitions)]
+
+use serde::de::{Deserializer, IgnoredAny, IntoDeserializer, MapAccess, Visitor};
+use serde::Deserialize;
+use std::error::Error;
+use std::path::{Path, PathBuf};
+use std::str::FromStr;
+use std::{cmp, env, fmt, fs, io, iter};
+
+#[rustfmt::skip]
+const DEFAULT_DOC_VALID_IDENTS: &[&str] = &[
+ "KiB", "MiB", "GiB", "TiB", "PiB", "EiB",
+ "DirectX",
+ "ECMAScript",
+ "GPLv2", "GPLv3",
+ "GitHub", "GitLab",
+ "IPv4", "IPv6",
+ "ClojureScript", "CoffeeScript", "JavaScript", "PureScript", "TypeScript",
+ "NaN", "NaNs",
+ "OAuth", "GraphQL",
+ "OCaml",
+ "OpenGL", "OpenMP", "OpenSSH", "OpenSSL", "OpenStreetMap", "OpenDNS",
+ "WebGL",
+ "TensorFlow",
+ "TrueType",
+ "iOS", "macOS", "FreeBSD",
+ "TeX", "LaTeX", "BibTeX", "BibLaTeX",
+ "MinGW",
+ "CamelCase",
+];
+const DEFAULT_BLACKLISTED_NAMES: &[&str] = &["foo", "baz", "quux"];
+
+/// Holds information used by `MISSING_ENFORCED_IMPORT_RENAMES` lint.
+#[derive(Clone, Debug, Deserialize)]
+pub struct Rename {
+ pub path: String,
+ pub rename: String,
+}
+
+/// A single disallowed method, used by the `DISALLOWED_METHODS` lint.
+#[derive(Clone, Debug, Deserialize)]
+#[serde(untagged)]
+pub enum DisallowedMethod {
+ Simple(String),
+ WithReason { path: String, reason: Option<String> },
+}
+
+impl DisallowedMethod {
+ pub fn path(&self) -> &str {
+ let (Self::Simple(path) | Self::WithReason { path, .. }) = self;
+
+ path
+ }
+}
+
+/// A single disallowed type, used by the `DISALLOWED_TYPES` lint.
+#[derive(Clone, Debug, Deserialize)]
+#[serde(untagged)]
+pub enum DisallowedType {
+ Simple(String),
+ WithReason { path: String, reason: Option<String> },
+}
+
+/// Conf with parse errors
+#[derive(Default)]
+pub struct TryConf {
+ pub conf: Conf,
+ pub errors: Vec<Box<dyn Error>>,
+}
+
+impl TryConf {
+ fn from_error(error: impl Error + 'static) -> Self {
+ Self {
+ conf: Conf::default(),
+ errors: vec![Box::new(error)],
+ }
+ }
+}
+
+#[derive(Debug)]
+struct ConfError(String);
+
+impl fmt::Display for ConfError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ <String as fmt::Display>::fmt(&self.0, f)
+ }
+}
+
+impl Error for ConfError {}
+
+fn conf_error(s: String) -> Box<dyn Error> {
+ Box::new(ConfError(s))
+}
+
+macro_rules! define_Conf {
+ ($(
+ $(#[doc = $doc:literal])+
+ $(#[conf_deprecated($dep:literal)])?
+ ($name:ident: $ty:ty = $default:expr),
+ )*) => {
+ /// Clippy lint configuration
+ pub struct Conf {
+ $($(#[doc = $doc])+ pub $name: $ty,)*
+ }
+
+ mod defaults {
+ $(pub fn $name() -> $ty { $default })*
+ }
+
+ impl Default for Conf {
+ fn default() -> Self {
+ Self { $($name: defaults::$name(),)* }
+ }
+ }
+
+ impl<'de> Deserialize<'de> for TryConf {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de> {
+ deserializer.deserialize_map(ConfVisitor)
+ }
+ }
+
+ #[derive(Deserialize)]
+ #[serde(field_identifier, rename_all = "kebab-case")]
+ #[allow(non_camel_case_types)]
+ enum Field { $($name,)* third_party, }
+
+ struct ConfVisitor;
+
+ impl<'de> Visitor<'de> for ConfVisitor {
+ type Value = TryConf;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str("Conf")
+ }
+
+ fn visit_map<V>(self, mut map: V) -> Result<Self::Value, V::Error> where V: MapAccess<'de> {
+ let mut errors = Vec::new();
+ $(let mut $name = None;)*
+ // could get `Field` here directly, but get `str` first for diagnostics
+ while let Some(name) = map.next_key::<&str>()? {
+ match Field::deserialize(name.into_deserializer())? {
+ $(Field::$name => {
+ $(errors.push(conf_error(format!("deprecated field `{}`. {}", name, $dep)));)?
+ match map.next_value() {
+ Err(e) => errors.push(conf_error(e.to_string())),
+ Ok(value) => match $name {
+ Some(_) => errors.push(conf_error(format!("duplicate field `{}`", name))),
+ None => $name = Some(value),
+ }
+ }
+ })*
+ // white-listed; ignore
+ Field::third_party => drop(map.next_value::<IgnoredAny>())
+ }
+ }
+ let conf = Conf { $($name: $name.unwrap_or_else(defaults::$name),)* };
+ Ok(TryConf { conf, errors })
+ }
+ }
+
+ #[cfg(feature = "internal")]
+ pub mod metadata {
+ use crate::utils::internal_lints::metadata_collector::ClippyConfiguration;
+
+ macro_rules! wrap_option {
+ () => (None);
+ ($x:literal) => (Some($x));
+ }
+
+ pub(crate) fn get_configuration_metadata() -> Vec<ClippyConfiguration> {
+ vec![
+ $(
+ {
+ let deprecation_reason = wrap_option!($($dep)?);
+
+ ClippyConfiguration::new(
+ stringify!($name),
+ stringify!($ty),
+ format!("{:?}", super::defaults::$name()),
+ concat!($($doc, '\n',)*),
+ deprecation_reason,
+ )
+ },
+ )+
+ ]
+ }
+ }
+ };
+}
+
+define_Conf! {
+ /// Lint: Arithmetic.
+ ///
+ /// Suppress checking of the passed type names.
+ (arithmetic_allowed: rustc_data_structures::fx::FxHashSet<String> = <_>::default()),
+ /// Lint: ENUM_VARIANT_NAMES, LARGE_TYPES_PASSED_BY_VALUE, TRIVIALLY_COPY_PASS_BY_REF, UNNECESSARY_WRAPS, UNUSED_SELF, UPPER_CASE_ACRONYMS, WRONG_SELF_CONVENTION, BOX_COLLECTION, REDUNDANT_ALLOCATION, RC_BUFFER, VEC_BOX, OPTION_OPTION, LINKEDLIST, RC_MUTEX.
+ ///
+ /// Suppress lints whenever the suggested change would cause breakage for other crates.
+ (avoid_breaking_exported_api: bool = true),
+ /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED.
+ ///
+ /// The minimum rust version that the project supports
+ (msrv: Option<String> = None),
+ /// Lint: BLACKLISTED_NAME.
+ ///
+ /// The list of blacklisted names to lint about. NB: `bar` is not here since it has legitimate uses. The value
+ /// `".."` can be used as part of the list to indicate, that the configured values should be appended to the
+ /// default configuration of Clippy. By default any configuraction will replace the default value.
+ (blacklisted_names: Vec<String> = super::DEFAULT_BLACKLISTED_NAMES.iter().map(ToString::to_string).collect()),
+ /// Lint: COGNITIVE_COMPLEXITY.
+ ///
+ /// The maximum cognitive complexity a function can have
+ (cognitive_complexity_threshold: u64 = 25),
+ /// DEPRECATED LINT: CYCLOMATIC_COMPLEXITY.
+ ///
+ /// Use the Cognitive Complexity lint instead.
+ #[conf_deprecated("Please use `cognitive-complexity-threshold` instead")]
+ (cyclomatic_complexity_threshold: Option<u64> = None),
+ /// Lint: DOC_MARKDOWN.
+ ///
+ /// The list of words this lint should not consider as identifiers needing ticks. The value
+ /// `".."` can be used as part of the list to indicate, that the configured values should be appended to the
+ /// default configuration of Clippy. By default any configuraction will replace the default value. For example:
+ /// * `doc-valid-idents = ["ClipPy"]` would replace the default list with `["ClipPy"]`.
+ /// * `doc-valid-idents = ["ClipPy", ".."]` would append `ClipPy` to the default list.
+ ///
+ /// Default list:
+ (doc_valid_idents: Vec<String> = super::DEFAULT_DOC_VALID_IDENTS.iter().map(ToString::to_string).collect()),
+ /// Lint: TOO_MANY_ARGUMENTS.
+ ///
+ /// The maximum number of argument a function or method can have
+ (too_many_arguments_threshold: u64 = 7),
+ /// Lint: TYPE_COMPLEXITY.
+ ///
+ /// The maximum complexity a type can have
+ (type_complexity_threshold: u64 = 250),
+ /// Lint: MANY_SINGLE_CHAR_NAMES.
+ ///
+ /// The maximum number of single char bindings a scope may have
+ (single_char_binding_names_threshold: u64 = 4),
+ /// Lint: BOXED_LOCAL, USELESS_VEC.
+ ///
+ /// The maximum size of objects (in bytes) that will be linted. Larger objects are ok on the heap
+ (too_large_for_stack: u64 = 200),
+ /// Lint: ENUM_VARIANT_NAMES.
+ ///
+ /// The minimum number of enum variants for the lints about variant names to trigger
+ (enum_variant_name_threshold: u64 = 3),
+ /// Lint: LARGE_ENUM_VARIANT.
+ ///
+ /// The maximum size of an enum's variant to avoid box suggestion
+ (enum_variant_size_threshold: u64 = 200),
+ /// Lint: VERBOSE_BIT_MASK.
+ ///
+ /// The maximum allowed size of a bit mask before suggesting to use 'trailing_zeros'
+ (verbose_bit_mask_threshold: u64 = 1),
+ /// Lint: DECIMAL_LITERAL_REPRESENTATION.
+ ///
+ /// The lower bound for linting decimal literals
+ (literal_representation_threshold: u64 = 16384),
+ /// Lint: TRIVIALLY_COPY_PASS_BY_REF.
+ ///
+ /// The maximum size (in bytes) to consider a `Copy` type for passing by value instead of by reference.
+ (trivial_copy_size_limit: Option<u64> = None),
+ /// Lint: LARGE_TYPE_PASS_BY_MOVE.
+ ///
+ /// The minimum size (in bytes) to consider a type for passing by reference instead of by value.
+ (pass_by_value_size_limit: u64 = 256),
+ /// Lint: TOO_MANY_LINES.
+ ///
+ /// The maximum number of lines a function or method can have
+ (too_many_lines_threshold: u64 = 100),
+ /// Lint: LARGE_STACK_ARRAYS, LARGE_CONST_ARRAYS.
+ ///
+ /// The maximum allowed size for arrays on the stack
+ (array_size_threshold: u64 = 512_000),
+ /// Lint: VEC_BOX.
+ ///
+ /// The size of the boxed type in bytes, where boxing in a `Vec` is allowed
+ (vec_box_size_threshold: u64 = 4096),
+ /// Lint: TYPE_REPETITION_IN_BOUNDS.
+ ///
+ /// The maximum number of bounds a trait can have to be linted
+ (max_trait_bounds: u64 = 3),
+ /// Lint: STRUCT_EXCESSIVE_BOOLS.
+ ///
+ /// The maximum number of bool fields a struct can have
+ (max_struct_bools: u64 = 3),
+ /// Lint: FN_PARAMS_EXCESSIVE_BOOLS.
+ ///
+ /// The maximum number of bool parameters a function can have
+ (max_fn_params_bools: u64 = 3),
+ /// Lint: WILDCARD_IMPORTS.
+ ///
+ /// Whether to allow certain wildcard imports (prelude, super in tests).
+ (warn_on_all_wildcard_imports: bool = false),
+ /// Lint: DISALLOWED_METHODS.
+ ///
+ /// The list of disallowed methods, written as fully qualified paths.
+ (disallowed_methods: Vec<crate::utils::conf::DisallowedMethod> = Vec::new()),
+ /// Lint: DISALLOWED_TYPES.
+ ///
+ /// The list of disallowed types, written as fully qualified paths.
+ (disallowed_types: Vec<crate::utils::conf::DisallowedType> = Vec::new()),
+ /// Lint: UNREADABLE_LITERAL.
+ ///
+ /// Should the fraction of a decimal be linted to include separators.
+ (unreadable_literal_lint_fractions: bool = true),
+ /// Lint: UPPER_CASE_ACRONYMS.
+ ///
+ /// Enables verbose mode. Triggers if there is more than one uppercase char next to each other
+ (upper_case_acronyms_aggressive: bool = false),
+ /// Lint: _CARGO_COMMON_METADATA.
+ ///
+ /// For internal testing only, ignores the current `publish` settings in the Cargo manifest.
+ (cargo_ignore_publish: bool = false),
+ /// Lint: NONSTANDARD_MACRO_BRACES.
+ ///
+ /// Enforce the named macros always use the braces specified.
+ ///
+ /// A `MacroMatcher` can be added like so `{ name = "macro_name", brace = "(" }`. If the macro
+ /// is could be used with a full path two `MacroMatcher`s have to be added one with the full path
+ /// `crate_name::macro_name` and one with just the macro name.
+ (standard_macro_braces: Vec<crate::nonstandard_macro_braces::MacroMatcher> = Vec::new()),
+ /// Lint: MISSING_ENFORCED_IMPORT_RENAMES.
+ ///
+ /// The list of imports to always rename, a fully qualified path followed by the rename.
+ (enforced_import_renames: Vec<crate::utils::conf::Rename> = Vec::new()),
+ /// Lint: DISALLOWED_SCRIPT_IDENTS.
+ ///
+ /// The list of unicode scripts allowed to be used in the scope.
+ (allowed_scripts: Vec<String> = ["Latin"].iter().map(ToString::to_string).collect()),
+ /// Lint: NON_SEND_FIELDS_IN_SEND_TY.
+ ///
+ /// Whether to apply the raw pointer heuristic to determine if a type is `Send`.
+ (enable_raw_pointer_heuristic_for_send: bool = true),
+ /// Lint: INDEX_REFUTABLE_SLICE.
+ ///
+ /// When Clippy suggests using a slice pattern, this is the maximum number of elements allowed in
+ /// the slice pattern that is suggested. If more elements would be necessary, the lint is suppressed.
+ /// For example, `[_, _, _, e, ..]` is a slice pattern with 4 elements.
+ (max_suggested_slice_pattern_length: u64 = 3),
+ /// Lint: AWAIT_HOLDING_INVALID_TYPE
+ (await_holding_invalid_types: Vec<crate::utils::conf::DisallowedType> = Vec::new()),
+ /// Lint: LARGE_INCLUDE_FILE.
+ ///
+ /// The maximum size of a file included via `include_bytes!()` or `include_str!()`, in bytes
+ (max_include_file_size: u64 = 1_000_000),
+ /// Lint: EXPECT_USED.
+ ///
+ /// Whether `expect` should be allowed in test functions
+ (allow_expect_in_tests: bool = false),
+ /// Lint: UNWRAP_USED.
+ ///
+ /// Whether `unwrap` should be allowed in test functions
+ (allow_unwrap_in_tests: bool = false),
+ /// Lint: DBG_MACRO.
+ ///
+ /// Whether `dbg!` should be allowed in test functions
+ (allow_dbg_in_tests: bool = false),
+}
+
+/// Search for the configuration file.
+pub fn lookup_conf_file() -> io::Result<Option<PathBuf>> {
+ /// Possible filename to search for.
+ const CONFIG_FILE_NAMES: [&str; 2] = [".clippy.toml", "clippy.toml"];
+
+ // Start looking for a config file in CLIPPY_CONF_DIR, or failing that, CARGO_MANIFEST_DIR.
+ // If neither of those exist, use ".".
+ let mut current = env::var_os("CLIPPY_CONF_DIR")
+ .or_else(|| env::var_os("CARGO_MANIFEST_DIR"))
+ .map_or_else(|| PathBuf::from("."), PathBuf::from);
+
+ let mut found_config: Option<PathBuf> = None;
+
+ loop {
+ for config_file_name in &CONFIG_FILE_NAMES {
+ if let Ok(config_file) = current.join(config_file_name).canonicalize() {
+ match fs::metadata(&config_file) {
+ Err(e) if e.kind() == io::ErrorKind::NotFound => {},
+ Err(e) => return Err(e),
+ Ok(md) if md.is_dir() => {},
+ Ok(_) => {
+ // warn if we happen to find two config files #8323
+ if let Some(ref found_config_) = found_config {
+ eprintln!(
+ "Using config file `{}`\nWarning: `{}` will be ignored.",
+ found_config_.display(),
+ config_file.display(),
+ );
+ } else {
+ found_config = Some(config_file);
+ }
+ },
+ }
+ }
+ }
+
+ if found_config.is_some() {
+ return Ok(found_config);
+ }
+
+ // If the current directory has no parent, we're done searching.
+ if !current.pop() {
+ return Ok(None);
+ }
+ }
+}
+
+/// Read the `toml` configuration file.
+///
+/// In case of error, the function tries to continue as much as possible.
+pub fn read(path: &Path) -> TryConf {
+ let content = match fs::read_to_string(path) {
+ Err(e) => return TryConf::from_error(e),
+ Ok(content) => content,
+ };
+ match toml::from_str::<TryConf>(&content) {
+ Ok(mut conf) => {
+ extend_vec_if_indicator_present(&mut conf.conf.doc_valid_idents, DEFAULT_DOC_VALID_IDENTS);
+ extend_vec_if_indicator_present(&mut conf.conf.blacklisted_names, DEFAULT_BLACKLISTED_NAMES);
+
+ conf
+ },
+ Err(e) => TryConf::from_error(e),
+ }
+}
+
+fn extend_vec_if_indicator_present(vec: &mut Vec<String>, default: &[&str]) {
+ if vec.contains(&"..".to_string()) {
+ vec.extend(default.iter().map(ToString::to_string));
+ }
+}
+
+const SEPARATOR_WIDTH: usize = 4;
+
+// Check whether the error is "unknown field" and, if so, list the available fields sorted and at
+// least one per line, more if `CLIPPY_TERMINAL_WIDTH` is set and allows it.
+pub fn format_error(error: Box<dyn Error>) -> String {
+ let s = error.to_string();
+
+ if_chain! {
+ if error.downcast::<toml::de::Error>().is_ok();
+ if let Some((prefix, mut fields, suffix)) = parse_unknown_field_message(&s);
+ then {
+ use fmt::Write;
+
+ fields.sort_unstable();
+
+ let (rows, column_widths) = calculate_dimensions(&fields);
+
+ let mut msg = String::from(prefix);
+ for row in 0..rows {
+ write!(msg, "\n").unwrap();
+ for (column, column_width) in column_widths.iter().copied().enumerate() {
+ let index = column * rows + row;
+ let field = fields.get(index).copied().unwrap_or_default();
+ write!(
+ msg,
+ "{:separator_width$}{:field_width$}",
+ " ",
+ field,
+ separator_width = SEPARATOR_WIDTH,
+ field_width = column_width
+ )
+ .unwrap();
+ }
+ }
+ write!(msg, "\n{}", suffix).unwrap();
+ msg
+ } else {
+ s
+ }
+ }
+}
+
+// `parse_unknown_field_message` will become unnecessary if
+// https://github.com/alexcrichton/toml-rs/pull/364 is merged.
+fn parse_unknown_field_message(s: &str) -> Option<(&str, Vec<&str>, &str)> {
+ // An "unknown field" message has the following form:
+ // unknown field `UNKNOWN`, expected one of `FIELD0`, `FIELD1`, ..., `FIELDN` at line X column Y
+ // ^^ ^^^^ ^^
+ if_chain! {
+ if s.starts_with("unknown field");
+ let slices = s.split("`, `").collect::<Vec<_>>();
+ let n = slices.len();
+ if n >= 2;
+ if let Some((prefix, first_field)) = slices[0].rsplit_once(" `");
+ if let Some((last_field, suffix)) = slices[n - 1].split_once("` ");
+ then {
+ let fields = iter::once(first_field)
+ .chain(slices[1..n - 1].iter().copied())
+ .chain(iter::once(last_field))
+ .collect::<Vec<_>>();
+ Some((prefix, fields, suffix))
+ } else {
+ None
+ }
+ }
+}
+
+fn calculate_dimensions(fields: &[&str]) -> (usize, Vec<usize>) {
+ let columns = env::var("CLIPPY_TERMINAL_WIDTH")
+ .ok()
+ .and_then(|s| <usize as FromStr>::from_str(&s).ok())
+ .map_or(1, |terminal_width| {
+ let max_field_width = fields.iter().map(|field| field.len()).max().unwrap();
+ cmp::max(1, terminal_width / (SEPARATOR_WIDTH + max_field_width))
+ });
+
+ let rows = (fields.len() + (columns - 1)) / columns;
+
+ let column_widths = (0..columns)
+ .map(|column| {
+ if column < columns - 1 {
+ (0..rows)
+ .map(|row| {
+ let index = column * rows + row;
+ let field = fields.get(index).copied().unwrap_or_default();
+ field.len()
+ })
+ .max()
+ .unwrap()
+ } else {
+ // Avoid adding extra space to the last column.
+ 0
+ }
+ })
+ .collect::<Vec<_>>();
+
+ (rows, column_widths)
+}
diff --git a/src/tools/clippy/clippy_lints/src/utils/dump_hir.rs b/src/tools/clippy/clippy_lints/src/utils/dump_hir.rs
new file mode 100644
index 000000000..01efc527a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/utils/dump_hir.rs
@@ -0,0 +1,55 @@
+use clippy_utils::get_attr;
+use rustc_hir as hir;
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// It formats the attached node with `{:#?}` and writes the result to the
+ /// standard output. This is intended for debugging.
+ ///
+ /// ### Examples
+ /// ```rs
+ /// #[clippy::dump]
+ /// use std::mem;
+ ///
+ /// #[clippy::dump]
+ /// fn foo(input: u32) -> u64 {
+ /// input as u64
+ /// }
+ /// ```
+ pub DUMP_HIR,
+ internal_warn,
+ "helper to dump info about code"
+}
+
+declare_lint_pass!(DumpHir => [DUMP_HIR]);
+
+impl<'tcx> LateLintPass<'tcx> for DumpHir {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) {
+ if has_attr(cx, item.hir_id()) {
+ println!("{item:#?}");
+ }
+ }
+
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
+ if has_attr(cx, expr.hir_id) {
+ println!("{expr:#?}");
+ }
+ }
+
+ fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx hir::Stmt<'_>) {
+ match stmt.kind {
+ hir::StmtKind::Expr(e) | hir::StmtKind::Semi(e) if has_attr(cx, e.hir_id) => return,
+ _ => {},
+ }
+ if has_attr(cx, stmt.hir_id) {
+ println!("{stmt:#?}");
+ }
+ }
+}
+
+fn has_attr(cx: &LateContext<'_>, hir_id: hir::HirId) -> bool {
+ let attrs = cx.tcx.hir().attrs(hir_id);
+ get_attr(cx.sess(), attrs, "dump").count() > 0
+}
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints.rs
new file mode 100644
index 000000000..b30965329
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints.rs
@@ -0,0 +1,1436 @@
+use crate::utils::internal_lints::metadata_collector::is_deprecated_lint;
+use clippy_utils::consts::{constant_simple, Constant};
+use clippy_utils::diagnostics::{span_lint, span_lint_and_help, span_lint_and_sugg, span_lint_and_then};
+use clippy_utils::macros::root_macro_call_first_node;
+use clippy_utils::source::snippet;
+use clippy_utils::ty::match_type;
+use clippy_utils::{
+ def_path_res, higher, is_else_clause, is_expn_of, is_expr_path_def_path, is_lint_allowed, match_def_path,
+ method_calls, paths, peel_blocks_with_stmt, SpanlessEq,
+};
+use if_chain::if_chain;
+use rustc_ast as ast;
+use rustc_ast::ast::{Crate, ItemKind, LitKind, ModKind, NodeId};
+use rustc_ast::visit::FnKind;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::def_id::DefId;
+use rustc_hir::hir_id::CRATE_HIR_ID;
+use rustc_hir::intravisit::Visitor;
+use rustc_hir::{
+ BinOpKind, Block, Closure, Expr, ExprKind, HirId, Item, Local, MutTy, Mutability, Node, Path, Stmt, StmtKind, Ty,
+ TyKind, UnOp,
+};
+use rustc_lint::{EarlyContext, EarlyLintPass, LateContext, LateLintPass, LintContext};
+use rustc_middle::hir::nested_filter;
+use rustc_middle::mir::interpret::ConstValue;
+use rustc_middle::ty::{self, fast_reject::SimplifiedTypeGen, subst::GenericArgKind, FloatTy};
+use rustc_semver::RustcVersion;
+use rustc_session::{declare_lint_pass, declare_tool_lint, impl_lint_pass};
+use rustc_span::source_map::Spanned;
+use rustc_span::symbol::Symbol;
+use rustc_span::{sym, BytePos, Span};
+use rustc_typeck::hir_ty_to_ty;
+
+use std::borrow::{Borrow, Cow};
+
+#[cfg(feature = "internal")]
+pub mod metadata_collector;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for various things we like to keep tidy in clippy.
+ ///
+ /// ### Why is this bad?
+ /// We like to pretend we're an example of tidy code.
+ ///
+ /// ### Example
+ /// Wrong ordering of the util::paths constants.
+ pub CLIPPY_LINTS_INTERNAL,
+ internal,
+ "various things that will negatively affect your clippy experience"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Ensures every lint is associated to a `LintPass`.
+ ///
+ /// ### Why is this bad?
+ /// The compiler only knows lints via a `LintPass`. Without
+ /// putting a lint to a `LintPass::get_lints()`'s return, the compiler will not
+ /// know the name of the lint.
+ ///
+ /// ### Known problems
+ /// Only checks for lints associated using the
+ /// `declare_lint_pass!`, `impl_lint_pass!`, and `lint_array!` macros.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// declare_lint! { pub LINT_1, ... }
+ /// declare_lint! { pub LINT_2, ... }
+ /// declare_lint! { pub FORGOTTEN_LINT, ... }
+ /// // ...
+ /// declare_lint_pass!(Pass => [LINT_1, LINT_2]);
+ /// // missing FORGOTTEN_LINT
+ /// ```
+ pub LINT_WITHOUT_LINT_PASS,
+ internal,
+ "declaring a lint without associating it in a LintPass"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls to `cx.span_lint*` and suggests to use the `utils::*`
+ /// variant of the function.
+ ///
+ /// ### Why is this bad?
+ /// The `utils::*` variants also add a link to the Clippy documentation to the
+ /// warning/error messages.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// cx.span_lint(LINT_NAME, "message");
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// utils::span_lint(cx, LINT_NAME, "message");
+ /// ```
+ pub COMPILER_LINT_FUNCTIONS,
+ internal,
+ "usage of the lint functions of the compiler instead of the utils::* variant"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls to `cx.outer().expn_data()` and suggests to use
+ /// the `cx.outer_expn_data()`
+ ///
+ /// ### Why is this bad?
+ /// `cx.outer_expn_data()` is faster and more concise.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// expr.span.ctxt().outer().expn_data()
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// expr.span.ctxt().outer_expn_data()
+ /// ```
+ pub OUTER_EXPN_EXPN_DATA,
+ internal,
+ "using `cx.outer_expn().expn_data()` instead of `cx.outer_expn_data()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Not an actual lint. This lint is only meant for testing our customized internal compiler
+ /// error message by calling `panic`.
+ ///
+ /// ### Why is this bad?
+ /// ICE in large quantities can damage your teeth
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// 🍦🍦🍦🍦🍦
+ /// ```
+ pub PRODUCE_ICE,
+ internal,
+ "this message should not appear anywhere as we ICE before and don't emit the lint"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for cases of an auto-generated lint without an updated description,
+ /// i.e. `default lint description`.
+ ///
+ /// ### Why is this bad?
+ /// Indicates that the lint is not finished.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// declare_lint! { pub COOL_LINT, nursery, "default lint description" }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// declare_lint! { pub COOL_LINT, nursery, "a great new lint" }
+ /// ```
+ pub DEFAULT_LINT,
+ internal,
+ "found 'default lint description' in a lint declaration"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Lints `span_lint_and_then` function calls, where the
+ /// closure argument has only one statement and that statement is a method
+ /// call to `span_suggestion`, `span_help`, `span_note` (using the same
+ /// span), `help` or `note`.
+ ///
+ /// These usages of `span_lint_and_then` should be replaced with one of the
+ /// wrapper functions `span_lint_and_sugg`, span_lint_and_help`, or
+ /// `span_lint_and_note`.
+ ///
+ /// ### Why is this bad?
+ /// Using the wrapper `span_lint_and_*` functions, is more
+ /// convenient, readable and less error prone.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// span_lint_and_then(cx, TEST_LINT, expr.span, lint_msg, |diag| {
+ /// diag.span_suggestion(
+ /// expr.span,
+ /// help_msg,
+ /// sugg.to_string(),
+ /// Applicability::MachineApplicable,
+ /// );
+ /// });
+ /// span_lint_and_then(cx, TEST_LINT, expr.span, lint_msg, |diag| {
+ /// diag.span_help(expr.span, help_msg);
+ /// });
+ /// span_lint_and_then(cx, TEST_LINT, expr.span, lint_msg, |diag| {
+ /// diag.help(help_msg);
+ /// });
+ /// span_lint_and_then(cx, TEST_LINT, expr.span, lint_msg, |diag| {
+ /// diag.span_note(expr.span, note_msg);
+ /// });
+ /// span_lint_and_then(cx, TEST_LINT, expr.span, lint_msg, |diag| {
+ /// diag.note(note_msg);
+ /// });
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// span_lint_and_sugg(
+ /// cx,
+ /// TEST_LINT,
+ /// expr.span,
+ /// lint_msg,
+ /// help_msg,
+ /// sugg.to_string(),
+ /// Applicability::MachineApplicable,
+ /// );
+ /// span_lint_and_help(cx, TEST_LINT, expr.span, lint_msg, Some(expr.span), help_msg);
+ /// span_lint_and_help(cx, TEST_LINT, expr.span, lint_msg, None, help_msg);
+ /// span_lint_and_note(cx, TEST_LINT, expr.span, lint_msg, Some(expr.span), note_msg);
+ /// span_lint_and_note(cx, TEST_LINT, expr.span, lint_msg, None, note_msg);
+ /// ```
+ pub COLLAPSIBLE_SPAN_LINT_CALLS,
+ internal,
+ "found collapsible `span_lint_and_then` calls"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls to `utils::match_type()` on a type diagnostic item
+ /// and suggests to use `utils::is_type_diagnostic_item()` instead.
+ ///
+ /// ### Why is this bad?
+ /// `utils::is_type_diagnostic_item()` does not require hardcoded paths.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// utils::match_type(cx, ty, &paths::VEC)
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// utils::is_type_diagnostic_item(cx, ty, sym::Vec)
+ /// ```
+ pub MATCH_TYPE_ON_DIAGNOSTIC_ITEM,
+ internal,
+ "using `utils::match_type()` instead of `utils::is_type_diagnostic_item()`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks the paths module for invalid paths.
+ ///
+ /// ### Why is this bad?
+ /// It indicates a bug in the code.
+ ///
+ /// ### Example
+ /// None.
+ pub INVALID_PATHS,
+ internal,
+ "invalid path"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for interning symbols that have already been pre-interned and defined as constants.
+ ///
+ /// ### Why is this bad?
+ /// It's faster and easier to use the symbol constant.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// let _ = sym!(f32);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// let _ = sym::f32;
+ /// ```
+ pub INTERNING_DEFINED_SYMBOL,
+ internal,
+ "interning a symbol that is pre-interned and defined as a constant"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for unnecessary conversion from Symbol to a string.
+ ///
+ /// ### Why is this bad?
+ /// It's faster use symbols directly instead of strings.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// symbol.as_str() == "clippy";
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// symbol == sym::clippy;
+ /// ```
+ pub UNNECESSARY_SYMBOL_STR,
+ internal,
+ "unnecessary conversion between Symbol and string"
+}
+
+declare_clippy_lint! {
+ /// Finds unidiomatic usage of `if_chain!`
+ pub IF_CHAIN_STYLE,
+ internal,
+ "non-idiomatic `if_chain!` usage"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for invalid `clippy::version` attributes.
+ ///
+ /// Valid values are:
+ /// * "pre 1.29.0"
+ /// * any valid semantic version
+ pub INVALID_CLIPPY_VERSION_ATTRIBUTE,
+ internal,
+ "found an invalid `clippy::version` attribute"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for declared clippy lints without the `clippy::version` attribute.
+ ///
+ pub MISSING_CLIPPY_VERSION_ATTRIBUTE,
+ internal,
+ "found clippy lint without `clippy::version` attribute"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Check that the `extract_msrv_attr!` macro is used, when a lint has a MSRV.
+ ///
+ pub MISSING_MSRV_ATTR_IMPL,
+ internal,
+ "checking if all necessary steps were taken when adding a MSRV to a lint"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for cases of an auto-generated deprecated lint without an updated reason,
+ /// i.e. `"default deprecation note"`.
+ ///
+ /// ### Why is this bad?
+ /// Indicates that the documentation is incomplete.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// declare_deprecated_lint! {
+ /// /// ### What it does
+ /// /// Nothing. This lint has been deprecated.
+ /// ///
+ /// /// ### Deprecation reason
+ /// /// TODO
+ /// #[clippy::version = "1.63.0"]
+ /// pub COOL_LINT,
+ /// "default deprecation note"
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// declare_deprecated_lint! {
+ /// /// ### What it does
+ /// /// Nothing. This lint has been deprecated.
+ /// ///
+ /// /// ### Deprecation reason
+ /// /// This lint has been replaced by `cooler_lint`
+ /// #[clippy::version = "1.63.0"]
+ /// pub COOL_LINT,
+ /// "this lint has been replaced by `cooler_lint`"
+ /// }
+ /// ```
+ pub DEFAULT_DEPRECATION_REASON,
+ internal,
+ "found 'default deprecation note' in a deprecated lint declaration"
+}
+
+declare_lint_pass!(ClippyLintsInternal => [CLIPPY_LINTS_INTERNAL]);
+
+impl EarlyLintPass for ClippyLintsInternal {
+ fn check_crate(&mut self, cx: &EarlyContext<'_>, krate: &Crate) {
+ if let Some(utils) = krate.items.iter().find(|item| item.ident.name.as_str() == "utils") {
+ if let ItemKind::Mod(_, ModKind::Loaded(ref items, ..)) = utils.kind {
+ if let Some(paths) = items.iter().find(|item| item.ident.name.as_str() == "paths") {
+ if let ItemKind::Mod(_, ModKind::Loaded(ref items, ..)) = paths.kind {
+ let mut last_name: Option<&str> = None;
+ for item in items {
+ let name = item.ident.as_str();
+ if let Some(last_name) = last_name {
+ if *last_name > *name {
+ span_lint(
+ cx,
+ CLIPPY_LINTS_INTERNAL,
+ item.span,
+ "this constant should be before the previous constant due to lexical \
+ ordering",
+ );
+ }
+ }
+ last_name = Some(name);
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+#[derive(Clone, Debug, Default)]
+pub struct LintWithoutLintPass {
+ declared_lints: FxHashMap<Symbol, Span>,
+ registered_lints: FxHashSet<Symbol>,
+}
+
+impl_lint_pass!(LintWithoutLintPass => [DEFAULT_LINT, LINT_WITHOUT_LINT_PASS, INVALID_CLIPPY_VERSION_ATTRIBUTE, MISSING_CLIPPY_VERSION_ATTRIBUTE, DEFAULT_DEPRECATION_REASON]);
+
+impl<'tcx> LateLintPass<'tcx> for LintWithoutLintPass {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
+ if is_lint_allowed(cx, DEFAULT_LINT, item.hir_id())
+ || is_lint_allowed(cx, DEFAULT_DEPRECATION_REASON, item.hir_id())
+ {
+ return;
+ }
+
+ if let hir::ItemKind::Static(ty, Mutability::Not, body_id) = item.kind {
+ let is_lint_ref_ty = is_lint_ref_type(cx, ty);
+ if is_deprecated_lint(cx, ty) || is_lint_ref_ty {
+ check_invalid_clippy_version_attribute(cx, item);
+
+ let expr = &cx.tcx.hir().body(body_id).value;
+ let fields;
+ if is_lint_ref_ty {
+ if let ExprKind::AddrOf(_, _, inner_exp) = expr.kind
+ && let ExprKind::Struct(_, struct_fields, _) = inner_exp.kind {
+ fields = struct_fields;
+ } else {
+ return;
+ }
+ } else if let ExprKind::Struct(_, struct_fields, _) = expr.kind {
+ fields = struct_fields;
+ } else {
+ return;
+ }
+
+ let field = fields
+ .iter()
+ .find(|f| f.ident.as_str() == "desc")
+ .expect("lints must have a description field");
+
+ if let ExprKind::Lit(Spanned {
+ node: LitKind::Str(ref sym, _),
+ ..
+ }) = field.expr.kind
+ {
+ let sym_str = sym.as_str();
+ if is_lint_ref_ty {
+ if sym_str == "default lint description" {
+ span_lint(
+ cx,
+ DEFAULT_LINT,
+ item.span,
+ &format!("the lint `{}` has the default lint description", item.ident.name),
+ );
+ }
+
+ self.declared_lints.insert(item.ident.name, item.span);
+ } else if sym_str == "default deprecation note" {
+ span_lint(
+ cx,
+ DEFAULT_DEPRECATION_REASON,
+ item.span,
+ &format!("the lint `{}` has the default deprecation reason", item.ident.name),
+ );
+ }
+ }
+ }
+ } else if let Some(macro_call) = root_macro_call_first_node(cx, item) {
+ if !matches!(
+ cx.tcx.item_name(macro_call.def_id).as_str(),
+ "impl_lint_pass" | "declare_lint_pass"
+ ) {
+ return;
+ }
+ if let hir::ItemKind::Impl(hir::Impl {
+ of_trait: None,
+ items: impl_item_refs,
+ ..
+ }) = item.kind
+ {
+ let mut collector = LintCollector {
+ output: &mut self.registered_lints,
+ cx,
+ };
+ let body_id = cx.tcx.hir().body_owned_by(
+ impl_item_refs
+ .iter()
+ .find(|iiref| iiref.ident.as_str() == "get_lints")
+ .expect("LintPass needs to implement get_lints")
+ .id
+ .hir_id(),
+ );
+ collector.visit_expr(&cx.tcx.hir().body(body_id).value);
+ }
+ }
+ }
+
+ fn check_crate_post(&mut self, cx: &LateContext<'tcx>) {
+ if is_lint_allowed(cx, LINT_WITHOUT_LINT_PASS, CRATE_HIR_ID) {
+ return;
+ }
+
+ for (lint_name, &lint_span) in &self.declared_lints {
+ // When using the `declare_tool_lint!` macro, the original `lint_span`'s
+ // file points to "<rustc macros>".
+ // `compiletest-rs` thinks that's an error in a different file and
+ // just ignores it. This causes the test in compile-fail/lint_pass
+ // not able to capture the error.
+ // Therefore, we need to climb the macro expansion tree and find the
+ // actual span that invoked `declare_tool_lint!`:
+ let lint_span = lint_span.ctxt().outer_expn_data().call_site;
+
+ if !self.registered_lints.contains(lint_name) {
+ span_lint(
+ cx,
+ LINT_WITHOUT_LINT_PASS,
+ lint_span,
+ &format!("the lint `{}` is not added to any `LintPass`", lint_name),
+ );
+ }
+ }
+ }
+}
+
+fn is_lint_ref_type<'tcx>(cx: &LateContext<'tcx>, ty: &Ty<'_>) -> bool {
+ if let TyKind::Rptr(
+ _,
+ MutTy {
+ ty: inner,
+ mutbl: Mutability::Not,
+ },
+ ) = ty.kind
+ {
+ if let TyKind::Path(ref path) = inner.kind {
+ if let Res::Def(DefKind::Struct, def_id) = cx.qpath_res(path, inner.hir_id) {
+ return match_def_path(cx, def_id, &paths::LINT);
+ }
+ }
+ }
+
+ false
+}
+
+fn check_invalid_clippy_version_attribute(cx: &LateContext<'_>, item: &'_ Item<'_>) {
+ if let Some(value) = extract_clippy_version_value(cx, item) {
+ // The `sym!` macro doesn't work as it only expects a single token.
+ // It's better to keep it this way and have a direct `Symbol::intern` call here.
+ if value == Symbol::intern("pre 1.29.0") {
+ return;
+ }
+
+ if RustcVersion::parse(value.as_str()).is_err() {
+ span_lint_and_help(
+ cx,
+ INVALID_CLIPPY_VERSION_ATTRIBUTE,
+ item.span,
+ "this item has an invalid `clippy::version` attribute",
+ None,
+ "please use a valid sematic version, see `doc/adding_lints.md`",
+ );
+ }
+ } else {
+ span_lint_and_help(
+ cx,
+ MISSING_CLIPPY_VERSION_ATTRIBUTE,
+ item.span,
+ "this lint is missing the `clippy::version` attribute or version value",
+ None,
+ "please use a `clippy::version` attribute, see `doc/adding_lints.md`",
+ );
+ }
+}
+
+/// This function extracts the version value of a `clippy::version` attribute if the given value has
+/// one
+fn extract_clippy_version_value(cx: &LateContext<'_>, item: &'_ Item<'_>) -> Option<Symbol> {
+ let attrs = cx.tcx.hir().attrs(item.hir_id());
+ attrs.iter().find_map(|attr| {
+ if_chain! {
+ // Identify attribute
+ if let ast::AttrKind::Normal(ref attr_kind, _) = &attr.kind;
+ if let [tool_name, attr_name] = &attr_kind.path.segments[..];
+ if tool_name.ident.name == sym::clippy;
+ if attr_name.ident.name == sym::version;
+ if let Some(version) = attr.value_str();
+ then {
+ Some(version)
+ } else {
+ None
+ }
+ }
+ })
+}
+
+struct LintCollector<'a, 'tcx> {
+ output: &'a mut FxHashSet<Symbol>,
+ cx: &'a LateContext<'tcx>,
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for LintCollector<'a, 'tcx> {
+ type NestedFilter = nested_filter::All;
+
+ fn visit_path(&mut self, path: &'tcx Path<'_>, _: HirId) {
+ if path.segments.len() == 1 {
+ self.output.insert(path.segments[0].ident.name);
+ }
+ }
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+}
+
+#[derive(Clone, Default)]
+pub struct CompilerLintFunctions {
+ map: FxHashMap<&'static str, &'static str>,
+}
+
+impl CompilerLintFunctions {
+ #[must_use]
+ pub fn new() -> Self {
+ let mut map = FxHashMap::default();
+ map.insert("span_lint", "utils::span_lint");
+ map.insert("struct_span_lint", "utils::span_lint");
+ map.insert("lint", "utils::span_lint");
+ map.insert("span_lint_note", "utils::span_lint_and_note");
+ map.insert("span_lint_help", "utils::span_lint_and_help");
+ Self { map }
+ }
+}
+
+impl_lint_pass!(CompilerLintFunctions => [COMPILER_LINT_FUNCTIONS]);
+
+impl<'tcx> LateLintPass<'tcx> for CompilerLintFunctions {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if is_lint_allowed(cx, COMPILER_LINT_FUNCTIONS, expr.hir_id) {
+ return;
+ }
+
+ if_chain! {
+ if let ExprKind::MethodCall(path, [self_arg, ..], _) = &expr.kind;
+ let fn_name = path.ident;
+ if let Some(sugg) = self.map.get(fn_name.as_str());
+ let ty = cx.typeck_results().expr_ty(self_arg).peel_refs();
+ if match_type(cx, ty, &paths::EARLY_CONTEXT)
+ || match_type(cx, ty, &paths::LATE_CONTEXT);
+ then {
+ span_lint_and_help(
+ cx,
+ COMPILER_LINT_FUNCTIONS,
+ path.ident.span,
+ "usage of a compiler lint function",
+ None,
+ &format!("please use the Clippy variant of this function: `{}`", sugg),
+ );
+ }
+ }
+ }
+}
+
+declare_lint_pass!(OuterExpnDataPass => [OUTER_EXPN_EXPN_DATA]);
+
+impl<'tcx> LateLintPass<'tcx> for OuterExpnDataPass {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
+ if is_lint_allowed(cx, OUTER_EXPN_EXPN_DATA, expr.hir_id) {
+ return;
+ }
+
+ let (method_names, arg_lists, spans) = method_calls(expr, 2);
+ let method_names: Vec<&str> = method_names.iter().map(Symbol::as_str).collect();
+ if_chain! {
+ if let ["expn_data", "outer_expn"] = method_names.as_slice();
+ let args = arg_lists[1];
+ if args.len() == 1;
+ let self_arg = &args[0];
+ let self_ty = cx.typeck_results().expr_ty(self_arg).peel_refs();
+ if match_type(cx, self_ty, &paths::SYNTAX_CONTEXT);
+ then {
+ span_lint_and_sugg(
+ cx,
+ OUTER_EXPN_EXPN_DATA,
+ spans[1].with_hi(expr.span.hi()),
+ "usage of `outer_expn().expn_data()`",
+ "try",
+ "outer_expn_data()".to_string(),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+}
+
+declare_lint_pass!(ProduceIce => [PRODUCE_ICE]);
+
+impl EarlyLintPass for ProduceIce {
+ fn check_fn(&mut self, _: &EarlyContext<'_>, fn_kind: FnKind<'_>, _: Span, _: NodeId) {
+ assert!(!is_trigger_fn(fn_kind), "Would you like some help with that?");
+ }
+}
+
+fn is_trigger_fn(fn_kind: FnKind<'_>) -> bool {
+ match fn_kind {
+ FnKind::Fn(_, ident, ..) => ident.name.as_str() == "it_looks_like_you_are_trying_to_kill_clippy",
+ FnKind::Closure(..) => false,
+ }
+}
+
+declare_lint_pass!(CollapsibleCalls => [COLLAPSIBLE_SPAN_LINT_CALLS]);
+
+impl<'tcx> LateLintPass<'tcx> for CollapsibleCalls {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
+ if is_lint_allowed(cx, COLLAPSIBLE_SPAN_LINT_CALLS, expr.hir_id) {
+ return;
+ }
+
+ if_chain! {
+ if let ExprKind::Call(func, and_then_args) = expr.kind;
+ if is_expr_path_def_path(cx, func, &["clippy_utils", "diagnostics", "span_lint_and_then"]);
+ if and_then_args.len() == 5;
+ if let ExprKind::Closure(&Closure { body, .. }) = &and_then_args[4].kind;
+ let body = cx.tcx.hir().body(body);
+ let only_expr = peel_blocks_with_stmt(&body.value);
+ if let ExprKind::MethodCall(ps, span_call_args, _) = &only_expr.kind;
+ if let ExprKind::Path(..) = span_call_args[0].kind;
+ then {
+ let and_then_snippets = get_and_then_snippets(cx, and_then_args);
+ let mut sle = SpanlessEq::new(cx).deny_side_effects();
+ match ps.ident.as_str() {
+ "span_suggestion" if sle.eq_expr(&and_then_args[2], &span_call_args[1]) => {
+ suggest_suggestion(cx, expr, &and_then_snippets, &span_suggestion_snippets(cx, span_call_args));
+ },
+ "span_help" if sle.eq_expr(&and_then_args[2], &span_call_args[1]) => {
+ let help_snippet = snippet(cx, span_call_args[2].span, r#""...""#);
+ suggest_help(cx, expr, &and_then_snippets, help_snippet.borrow(), true);
+ },
+ "span_note" if sle.eq_expr(&and_then_args[2], &span_call_args[1]) => {
+ let note_snippet = snippet(cx, span_call_args[2].span, r#""...""#);
+ suggest_note(cx, expr, &and_then_snippets, note_snippet.borrow(), true);
+ },
+ "help" => {
+ let help_snippet = snippet(cx, span_call_args[1].span, r#""...""#);
+ suggest_help(cx, expr, &and_then_snippets, help_snippet.borrow(), false);
+ }
+ "note" => {
+ let note_snippet = snippet(cx, span_call_args[1].span, r#""...""#);
+ suggest_note(cx, expr, &and_then_snippets, note_snippet.borrow(), false);
+ }
+ _ => (),
+ }
+ }
+ }
+ }
+}
+
+struct AndThenSnippets<'a> {
+ cx: Cow<'a, str>,
+ lint: Cow<'a, str>,
+ span: Cow<'a, str>,
+ msg: Cow<'a, str>,
+}
+
+fn get_and_then_snippets<'a, 'hir>(cx: &LateContext<'_>, and_then_snippets: &'hir [Expr<'hir>]) -> AndThenSnippets<'a> {
+ let cx_snippet = snippet(cx, and_then_snippets[0].span, "cx");
+ let lint_snippet = snippet(cx, and_then_snippets[1].span, "..");
+ let span_snippet = snippet(cx, and_then_snippets[2].span, "span");
+ let msg_snippet = snippet(cx, and_then_snippets[3].span, r#""...""#);
+
+ AndThenSnippets {
+ cx: cx_snippet,
+ lint: lint_snippet,
+ span: span_snippet,
+ msg: msg_snippet,
+ }
+}
+
+struct SpanSuggestionSnippets<'a> {
+ help: Cow<'a, str>,
+ sugg: Cow<'a, str>,
+ applicability: Cow<'a, str>,
+}
+
+fn span_suggestion_snippets<'a, 'hir>(
+ cx: &LateContext<'_>,
+ span_call_args: &'hir [Expr<'hir>],
+) -> SpanSuggestionSnippets<'a> {
+ let help_snippet = snippet(cx, span_call_args[2].span, r#""...""#);
+ let sugg_snippet = snippet(cx, span_call_args[3].span, "..");
+ let applicability_snippet = snippet(cx, span_call_args[4].span, "Applicability::MachineApplicable");
+
+ SpanSuggestionSnippets {
+ help: help_snippet,
+ sugg: sugg_snippet,
+ applicability: applicability_snippet,
+ }
+}
+
+fn suggest_suggestion(
+ cx: &LateContext<'_>,
+ expr: &Expr<'_>,
+ and_then_snippets: &AndThenSnippets<'_>,
+ span_suggestion_snippets: &SpanSuggestionSnippets<'_>,
+) {
+ span_lint_and_sugg(
+ cx,
+ COLLAPSIBLE_SPAN_LINT_CALLS,
+ expr.span,
+ "this call is collapsible",
+ "collapse into",
+ format!(
+ "span_lint_and_sugg({}, {}, {}, {}, {}, {}, {})",
+ and_then_snippets.cx,
+ and_then_snippets.lint,
+ and_then_snippets.span,
+ and_then_snippets.msg,
+ span_suggestion_snippets.help,
+ span_suggestion_snippets.sugg,
+ span_suggestion_snippets.applicability
+ ),
+ Applicability::MachineApplicable,
+ );
+}
+
+fn suggest_help(
+ cx: &LateContext<'_>,
+ expr: &Expr<'_>,
+ and_then_snippets: &AndThenSnippets<'_>,
+ help: &str,
+ with_span: bool,
+) {
+ let option_span = if with_span {
+ format!("Some({})", and_then_snippets.span)
+ } else {
+ "None".to_string()
+ };
+
+ span_lint_and_sugg(
+ cx,
+ COLLAPSIBLE_SPAN_LINT_CALLS,
+ expr.span,
+ "this call is collapsible",
+ "collapse into",
+ format!(
+ "span_lint_and_help({}, {}, {}, {}, {}, {})",
+ and_then_snippets.cx,
+ and_then_snippets.lint,
+ and_then_snippets.span,
+ and_then_snippets.msg,
+ &option_span,
+ help
+ ),
+ Applicability::MachineApplicable,
+ );
+}
+
+fn suggest_note(
+ cx: &LateContext<'_>,
+ expr: &Expr<'_>,
+ and_then_snippets: &AndThenSnippets<'_>,
+ note: &str,
+ with_span: bool,
+) {
+ let note_span = if with_span {
+ format!("Some({})", and_then_snippets.span)
+ } else {
+ "None".to_string()
+ };
+
+ span_lint_and_sugg(
+ cx,
+ COLLAPSIBLE_SPAN_LINT_CALLS,
+ expr.span,
+ "this call is collapsible",
+ "collapse into",
+ format!(
+ "span_lint_and_note({}, {}, {}, {}, {}, {})",
+ and_then_snippets.cx,
+ and_then_snippets.lint,
+ and_then_snippets.span,
+ and_then_snippets.msg,
+ note_span,
+ note
+ ),
+ Applicability::MachineApplicable,
+ );
+}
+
+declare_lint_pass!(MatchTypeOnDiagItem => [MATCH_TYPE_ON_DIAGNOSTIC_ITEM]);
+
+impl<'tcx> LateLintPass<'tcx> for MatchTypeOnDiagItem {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
+ if is_lint_allowed(cx, MATCH_TYPE_ON_DIAGNOSTIC_ITEM, expr.hir_id) {
+ return;
+ }
+
+ if_chain! {
+ // Check if this is a call to utils::match_type()
+ if let ExprKind::Call(fn_path, [context, ty, ty_path]) = expr.kind;
+ if is_expr_path_def_path(cx, fn_path, &["clippy_utils", "ty", "match_type"]);
+ // Extract the path to the matched type
+ if let Some(segments) = path_to_matched_type(cx, ty_path);
+ let segments: Vec<&str> = segments.iter().map(Symbol::as_str).collect();
+ if let Some(ty_did) = def_path_res(cx, &segments[..]).opt_def_id();
+ // Check if the matched type is a diagnostic item
+ if let Some(item_name) = cx.tcx.get_diagnostic_name(ty_did);
+ then {
+ // TODO: check paths constants from external crates.
+ let cx_snippet = snippet(cx, context.span, "_");
+ let ty_snippet = snippet(cx, ty.span, "_");
+
+ span_lint_and_sugg(
+ cx,
+ MATCH_TYPE_ON_DIAGNOSTIC_ITEM,
+ expr.span,
+ "usage of `clippy_utils::ty::match_type()` on a type diagnostic item",
+ "try",
+ format!("clippy_utils::ty::is_type_diagnostic_item({}, {}, sym::{})", cx_snippet, ty_snippet, item_name),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ }
+ }
+}
+
+fn path_to_matched_type(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> Option<Vec<Symbol>> {
+ use rustc_hir::ItemKind;
+
+ match &expr.kind {
+ ExprKind::AddrOf(.., expr) => return path_to_matched_type(cx, expr),
+ ExprKind::Path(qpath) => match cx.qpath_res(qpath, expr.hir_id) {
+ Res::Local(hir_id) => {
+ let parent_id = cx.tcx.hir().get_parent_node(hir_id);
+ if let Some(Node::Local(local)) = cx.tcx.hir().find(parent_id) {
+ if let Some(init) = local.init {
+ return path_to_matched_type(cx, init);
+ }
+ }
+ },
+ Res::Def(DefKind::Const | DefKind::Static(..), def_id) => {
+ if let Some(Node::Item(item)) = cx.tcx.hir().get_if_local(def_id) {
+ if let ItemKind::Const(.., body_id) | ItemKind::Static(.., body_id) = item.kind {
+ let body = cx.tcx.hir().body(body_id);
+ return path_to_matched_type(cx, &body.value);
+ }
+ }
+ },
+ _ => {},
+ },
+ ExprKind::Array(exprs) => {
+ let segments: Vec<Symbol> = exprs
+ .iter()
+ .filter_map(|expr| {
+ if let ExprKind::Lit(lit) = &expr.kind {
+ if let LitKind::Str(sym, _) = lit.node {
+ return Some(sym);
+ }
+ }
+
+ None
+ })
+ .collect();
+
+ if segments.len() == exprs.len() {
+ return Some(segments);
+ }
+ },
+ _ => {},
+ }
+
+ None
+}
+
+// This is not a complete resolver for paths. It works on all the paths currently used in the paths
+// module. That's all it does and all it needs to do.
+pub fn check_path(cx: &LateContext<'_>, path: &[&str]) -> bool {
+ if def_path_res(cx, path) != Res::Err {
+ return true;
+ }
+
+ // Some implementations can't be found by `path_to_res`, particularly inherent
+ // implementations of native types. Check lang items.
+ let path_syms: Vec<_> = path.iter().map(|p| Symbol::intern(p)).collect();
+ let lang_items = cx.tcx.lang_items();
+ // This list isn't complete, but good enough for our current list of paths.
+ let incoherent_impls = [
+ SimplifiedTypeGen::FloatSimplifiedType(FloatTy::F32),
+ SimplifiedTypeGen::FloatSimplifiedType(FloatTy::F64),
+ SimplifiedTypeGen::SliceSimplifiedType,
+ SimplifiedTypeGen::StrSimplifiedType,
+ ]
+ .iter()
+ .flat_map(|&ty| cx.tcx.incoherent_impls(ty));
+ for item_def_id in lang_items.items().iter().flatten().chain(incoherent_impls) {
+ let lang_item_path = cx.get_def_path(*item_def_id);
+ if path_syms.starts_with(&lang_item_path) {
+ if let [item] = &path_syms[lang_item_path.len()..] {
+ if matches!(
+ cx.tcx.def_kind(*item_def_id),
+ DefKind::Mod | DefKind::Enum | DefKind::Trait
+ ) {
+ for child in cx.tcx.module_children(*item_def_id) {
+ if child.ident.name == *item {
+ return true;
+ }
+ }
+ } else {
+ for child in cx.tcx.associated_item_def_ids(*item_def_id) {
+ if cx.tcx.item_name(*child) == *item {
+ return true;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ false
+}
+
+declare_lint_pass!(InvalidPaths => [INVALID_PATHS]);
+
+impl<'tcx> LateLintPass<'tcx> for InvalidPaths {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
+ let local_def_id = &cx.tcx.parent_module(item.hir_id());
+ let mod_name = &cx.tcx.item_name(local_def_id.to_def_id());
+ if_chain! {
+ if mod_name.as_str() == "paths";
+ if let hir::ItemKind::Const(ty, body_id) = item.kind;
+ let ty = hir_ty_to_ty(cx.tcx, ty);
+ if let ty::Array(el_ty, _) = &ty.kind();
+ if let ty::Ref(_, el_ty, _) = &el_ty.kind();
+ if el_ty.is_str();
+ let body = cx.tcx.hir().body(body_id);
+ let typeck_results = cx.tcx.typeck_body(body_id);
+ if let Some(Constant::Vec(path)) = constant_simple(cx, typeck_results, &body.value);
+ let path: Vec<&str> = path.iter().map(|x| {
+ if let Constant::Str(s) = x {
+ s.as_str()
+ } else {
+ // We checked the type of the constant above
+ unreachable!()
+ }
+ }).collect();
+ if !check_path(cx, &path[..]);
+ then {
+ span_lint(cx, INVALID_PATHS, item.span, "invalid path");
+ }
+ }
+ }
+}
+
+#[derive(Default)]
+pub struct InterningDefinedSymbol {
+ // Maps the symbol value to the constant DefId.
+ symbol_map: FxHashMap<u32, DefId>,
+}
+
+impl_lint_pass!(InterningDefinedSymbol => [INTERNING_DEFINED_SYMBOL, UNNECESSARY_SYMBOL_STR]);
+
+impl<'tcx> LateLintPass<'tcx> for InterningDefinedSymbol {
+ fn check_crate(&mut self, cx: &LateContext<'_>) {
+ if !self.symbol_map.is_empty() {
+ return;
+ }
+
+ for &module in &[&paths::KW_MODULE, &paths::SYM_MODULE] {
+ if let Some(def_id) = def_path_res(cx, module).opt_def_id() {
+ for item in cx.tcx.module_children(def_id).iter() {
+ if_chain! {
+ if let Res::Def(DefKind::Const, item_def_id) = item.res;
+ let ty = cx.tcx.type_of(item_def_id);
+ if match_type(cx, ty, &paths::SYMBOL);
+ if let Ok(ConstValue::Scalar(value)) = cx.tcx.const_eval_poly(item_def_id);
+ if let Ok(value) = value.to_u32();
+ then {
+ self.symbol_map.insert(value, item_def_id);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if_chain! {
+ if let ExprKind::Call(func, [arg]) = &expr.kind;
+ if let ty::FnDef(def_id, _) = cx.typeck_results().expr_ty(func).kind();
+ if match_def_path(cx, *def_id, &paths::SYMBOL_INTERN);
+ if let Some(Constant::Str(arg)) = constant_simple(cx, cx.typeck_results(), arg);
+ let value = Symbol::intern(&arg).as_u32();
+ if let Some(&def_id) = self.symbol_map.get(&value);
+ then {
+ span_lint_and_sugg(
+ cx,
+ INTERNING_DEFINED_SYMBOL,
+ is_expn_of(expr.span, "sym").unwrap_or(expr.span),
+ "interning a defined symbol",
+ "try",
+ cx.tcx.def_path_str(def_id),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ if let ExprKind::Binary(op, left, right) = expr.kind {
+ if matches!(op.node, BinOpKind::Eq | BinOpKind::Ne) {
+ let data = [
+ (left, self.symbol_str_expr(left, cx)),
+ (right, self.symbol_str_expr(right, cx)),
+ ];
+ match data {
+ // both operands are a symbol string
+ [(_, Some(left)), (_, Some(right))] => {
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_SYMBOL_STR,
+ expr.span,
+ "unnecessary `Symbol` to string conversion",
+ "try",
+ format!(
+ "{} {} {}",
+ left.as_symbol_snippet(cx),
+ op.node.as_str(),
+ right.as_symbol_snippet(cx),
+ ),
+ Applicability::MachineApplicable,
+ );
+ },
+ // one of the operands is a symbol string
+ [(expr, Some(symbol)), _] | [_, (expr, Some(symbol))] => {
+ // creating an owned string for comparison
+ if matches!(symbol, SymbolStrExpr::Expr { is_to_owned: true, .. }) {
+ span_lint_and_sugg(
+ cx,
+ UNNECESSARY_SYMBOL_STR,
+ expr.span,
+ "unnecessary string allocation",
+ "try",
+ format!("{}.as_str()", symbol.as_symbol_snippet(cx)),
+ Applicability::MachineApplicable,
+ );
+ }
+ },
+ // nothing found
+ [(_, None), (_, None)] => {},
+ }
+ }
+ }
+ }
+}
+
+impl InterningDefinedSymbol {
+ fn symbol_str_expr<'tcx>(&self, expr: &'tcx Expr<'tcx>, cx: &LateContext<'tcx>) -> Option<SymbolStrExpr<'tcx>> {
+ static IDENT_STR_PATHS: &[&[&str]] = &[&paths::IDENT_AS_STR, &paths::TO_STRING_METHOD];
+ static SYMBOL_STR_PATHS: &[&[&str]] = &[
+ &paths::SYMBOL_AS_STR,
+ &paths::SYMBOL_TO_IDENT_STRING,
+ &paths::TO_STRING_METHOD,
+ ];
+ let call = if_chain! {
+ if let ExprKind::AddrOf(_, _, e) = expr.kind;
+ if let ExprKind::Unary(UnOp::Deref, e) = e.kind;
+ then { e } else { expr }
+ };
+ if_chain! {
+ // is a method call
+ if let ExprKind::MethodCall(_, [item], _) = call.kind;
+ if let Some(did) = cx.typeck_results().type_dependent_def_id(call.hir_id);
+ let ty = cx.typeck_results().expr_ty(item);
+ // ...on either an Ident or a Symbol
+ if let Some(is_ident) = if match_type(cx, ty, &paths::SYMBOL) {
+ Some(false)
+ } else if match_type(cx, ty, &paths::IDENT) {
+ Some(true)
+ } else {
+ None
+ };
+ // ...which converts it to a string
+ let paths = if is_ident { IDENT_STR_PATHS } else { SYMBOL_STR_PATHS };
+ if let Some(path) = paths.iter().find(|path| match_def_path(cx, did, path));
+ then {
+ let is_to_owned = path.last().unwrap().ends_with("string");
+ return Some(SymbolStrExpr::Expr {
+ item,
+ is_ident,
+ is_to_owned,
+ });
+ }
+ }
+ // is a string constant
+ if let Some(Constant::Str(s)) = constant_simple(cx, cx.typeck_results(), expr) {
+ let value = Symbol::intern(&s).as_u32();
+ // ...which matches a symbol constant
+ if let Some(&def_id) = self.symbol_map.get(&value) {
+ return Some(SymbolStrExpr::Const(def_id));
+ }
+ }
+ None
+ }
+}
+
+enum SymbolStrExpr<'tcx> {
+ /// a string constant with a corresponding symbol constant
+ Const(DefId),
+ /// a "symbol to string" expression like `symbol.as_str()`
+ Expr {
+ /// part that evaluates to `Symbol` or `Ident`
+ item: &'tcx Expr<'tcx>,
+ is_ident: bool,
+ /// whether an owned `String` is created like `to_ident_string()`
+ is_to_owned: bool,
+ },
+}
+
+impl<'tcx> SymbolStrExpr<'tcx> {
+ /// Returns a snippet that evaluates to a `Symbol` and is const if possible
+ fn as_symbol_snippet(&self, cx: &LateContext<'_>) -> Cow<'tcx, str> {
+ match *self {
+ Self::Const(def_id) => cx.tcx.def_path_str(def_id).into(),
+ Self::Expr { item, is_ident, .. } => {
+ let mut snip = snippet(cx, item.span.source_callsite(), "..");
+ if is_ident {
+ // get `Ident.name`
+ snip.to_mut().push_str(".name");
+ }
+ snip
+ },
+ }
+ }
+}
+
+declare_lint_pass!(IfChainStyle => [IF_CHAIN_STYLE]);
+
+impl<'tcx> LateLintPass<'tcx> for IfChainStyle {
+ fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx hir::Block<'_>) {
+ let (local, after, if_chain_span) = if_chain! {
+ if let [Stmt { kind: StmtKind::Local(local), .. }, after @ ..] = block.stmts;
+ if let Some(if_chain_span) = is_expn_of(block.span, "if_chain");
+ then { (local, after, if_chain_span) } else { return }
+ };
+ if is_first_if_chain_expr(cx, block.hir_id, if_chain_span) {
+ span_lint(
+ cx,
+ IF_CHAIN_STYLE,
+ if_chain_local_span(cx, local, if_chain_span),
+ "`let` expression should be above the `if_chain!`",
+ );
+ } else if local.span.ctxt() == block.span.ctxt() && is_if_chain_then(after, block.expr, if_chain_span) {
+ span_lint(
+ cx,
+ IF_CHAIN_STYLE,
+ if_chain_local_span(cx, local, if_chain_span),
+ "`let` expression should be inside `then { .. }`",
+ );
+ }
+ }
+
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
+ let (cond, then, els) = if let Some(higher::IfOrIfLet { cond, r#else, then }) = higher::IfOrIfLet::hir(expr) {
+ (cond, then, r#else.is_some())
+ } else {
+ return;
+ };
+ let then_block = match then.kind {
+ ExprKind::Block(block, _) => block,
+ _ => return,
+ };
+ let if_chain_span = is_expn_of(expr.span, "if_chain");
+ if !els {
+ check_nested_if_chains(cx, expr, then_block, if_chain_span);
+ }
+ let if_chain_span = match if_chain_span {
+ None => return,
+ Some(span) => span,
+ };
+ // check for `if a && b;`
+ if_chain! {
+ if let ExprKind::Binary(op, _, _) = cond.kind;
+ if op.node == BinOpKind::And;
+ if cx.sess().source_map().is_multiline(cond.span);
+ then {
+ span_lint(cx, IF_CHAIN_STYLE, cond.span, "`if a && b;` should be `if a; if b;`");
+ }
+ }
+ if is_first_if_chain_expr(cx, expr.hir_id, if_chain_span)
+ && is_if_chain_then(then_block.stmts, then_block.expr, if_chain_span)
+ {
+ span_lint(cx, IF_CHAIN_STYLE, expr.span, "`if_chain!` only has one `if`");
+ }
+ }
+}
+
+fn check_nested_if_chains(
+ cx: &LateContext<'_>,
+ if_expr: &Expr<'_>,
+ then_block: &Block<'_>,
+ if_chain_span: Option<Span>,
+) {
+ #[rustfmt::skip]
+ let (head, tail) = match *then_block {
+ Block { stmts, expr: Some(tail), .. } => (stmts, tail),
+ Block {
+ stmts: &[
+ ref head @ ..,
+ Stmt { kind: StmtKind::Expr(tail) | StmtKind::Semi(tail), .. }
+ ],
+ ..
+ } => (head, tail),
+ _ => return,
+ };
+ if_chain! {
+ if let Some(higher::IfOrIfLet { r#else: None, .. }) = higher::IfOrIfLet::hir(tail);
+ let sm = cx.sess().source_map();
+ if head
+ .iter()
+ .all(|stmt| matches!(stmt.kind, StmtKind::Local(..)) && !sm.is_multiline(stmt.span));
+ if if_chain_span.is_some() || !is_else_clause(cx.tcx, if_expr);
+ then {} else { return }
+ }
+ let (span, msg) = match (if_chain_span, is_expn_of(tail.span, "if_chain")) {
+ (None, Some(_)) => (if_expr.span, "this `if` can be part of the inner `if_chain!`"),
+ (Some(_), None) => (tail.span, "this `if` can be part of the outer `if_chain!`"),
+ (Some(a), Some(b)) if a != b => (b, "this `if_chain!` can be merged with the outer `if_chain!`"),
+ _ => return,
+ };
+ span_lint_and_then(cx, IF_CHAIN_STYLE, span, msg, |diag| {
+ let (span, msg) = match head {
+ [] => return,
+ [stmt] => (stmt.span, "this `let` statement can also be in the `if_chain!`"),
+ [a, .., b] => (
+ a.span.to(b.span),
+ "these `let` statements can also be in the `if_chain!`",
+ ),
+ };
+ diag.span_help(span, msg);
+ });
+}
+
+fn is_first_if_chain_expr(cx: &LateContext<'_>, hir_id: HirId, if_chain_span: Span) -> bool {
+ cx.tcx
+ .hir()
+ .parent_iter(hir_id)
+ .find(|(_, node)| {
+ #[rustfmt::skip]
+ !matches!(node, Node::Expr(Expr { kind: ExprKind::Block(..), .. }) | Node::Stmt(_))
+ })
+ .map_or(false, |(id, _)| {
+ is_expn_of(cx.tcx.hir().span(id), "if_chain") != Some(if_chain_span)
+ })
+}
+
+/// Checks a trailing slice of statements and expression of a `Block` to see if they are part
+/// of the `then {..}` portion of an `if_chain!`
+fn is_if_chain_then(stmts: &[Stmt<'_>], expr: Option<&Expr<'_>>, if_chain_span: Span) -> bool {
+ let span = if let [stmt, ..] = stmts {
+ stmt.span
+ } else if let Some(expr) = expr {
+ expr.span
+ } else {
+ // empty `then {}`
+ return true;
+ };
+ is_expn_of(span, "if_chain").map_or(true, |span| span != if_chain_span)
+}
+
+/// Creates a `Span` for `let x = ..;` in an `if_chain!` call.
+fn if_chain_local_span(cx: &LateContext<'_>, local: &Local<'_>, if_chain_span: Span) -> Span {
+ let mut span = local.pat.span;
+ if let Some(init) = local.init {
+ span = span.to(init.span);
+ }
+ span.adjust(if_chain_span.ctxt().outer_expn());
+ let sm = cx.sess().source_map();
+ let span = sm.span_extend_to_prev_str(span, "let", false, true).unwrap_or(span);
+ let span = sm.span_extend_to_next_char(span, ';', false);
+ Span::new(
+ span.lo() - BytePos(3),
+ span.hi() + BytePos(1),
+ span.ctxt(),
+ span.parent(),
+ )
+}
+
+declare_lint_pass!(MsrvAttrImpl => [MISSING_MSRV_ATTR_IMPL]);
+
+impl LateLintPass<'_> for MsrvAttrImpl {
+ fn check_item(&mut self, cx: &LateContext<'_>, item: &hir::Item<'_>) {
+ if_chain! {
+ if let hir::ItemKind::Impl(hir::Impl {
+ of_trait: Some(lint_pass_trait_ref),
+ self_ty,
+ items,
+ ..
+ }) = &item.kind;
+ if let Some(lint_pass_trait_def_id) = lint_pass_trait_ref.trait_def_id();
+ let is_late_pass = match_def_path(cx, lint_pass_trait_def_id, &paths::LATE_LINT_PASS);
+ if is_late_pass || match_def_path(cx, lint_pass_trait_def_id, &paths::EARLY_LINT_PASS);
+ let self_ty = hir_ty_to_ty(cx.tcx, self_ty);
+ if let ty::Adt(self_ty_def, _) = self_ty.kind();
+ if self_ty_def.is_struct();
+ if self_ty_def.all_fields().any(|f| {
+ cx.tcx
+ .type_of(f.did)
+ .walk()
+ .filter(|t| matches!(t.unpack(), GenericArgKind::Type(_)))
+ .any(|t| match_type(cx, t.expect_ty(), &paths::RUSTC_VERSION))
+ });
+ if !items.iter().any(|item| item.ident.name == sym!(enter_lint_attrs));
+ then {
+ let context = if is_late_pass { "LateContext" } else { "EarlyContext" };
+ let lint_pass = if is_late_pass { "LateLintPass" } else { "EarlyLintPass" };
+ let span = cx.sess().source_map().span_through_char(item.span, '{');
+ span_lint_and_sugg(
+ cx,
+ MISSING_MSRV_ATTR_IMPL,
+ span,
+ &format!("`extract_msrv_attr!` macro missing from `{lint_pass}` implementation"),
+ &format!("add `extract_msrv_attr!({context})` to the `{lint_pass}` implementation"),
+ format!("{}\n extract_msrv_attr!({context});", snippet(cx, span, "..")),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs
new file mode 100644
index 000000000..92934c16d
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs
@@ -0,0 +1,1169 @@
+//! This lint is used to collect metadata about clippy lints. This metadata is exported as a json
+//! file and then used to generate the [clippy lint list](https://rust-lang.github.io/rust-clippy/master/index.html)
+//!
+//! This module and therefore the entire lint is guarded by a feature flag called `internal`
+//!
+//! The module transforms all lint names to ascii lowercase to ensure that we don't have mismatches
+//! during any comparison or mapping. (Please take care of this, it's not fun to spend time on such
+//! a simple mistake)
+
+use crate::renamed_lints::RENAMED_LINTS;
+use crate::utils::internal_lints::{extract_clippy_version_value, is_lint_ref_type};
+
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::ty::{match_type, walk_ptrs_ty_depth};
+use clippy_utils::{last_path_segment, match_def_path, match_function_call, match_path, paths};
+use if_chain::if_chain;
+use rustc_ast as ast;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_hir::{
+ self as hir, def::DefKind, intravisit, intravisit::Visitor, Closure, ExprKind, Item, ItemKind, Mutability, QPath,
+};
+use rustc_lint::{CheckLintNameResult, LateContext, LateLintPass, LintContext, LintId};
+use rustc_middle::hir::nested_filter;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::symbol::Ident;
+use rustc_span::{sym, Loc, Span, Symbol};
+use serde::{ser::SerializeStruct, Serialize, Serializer};
+use std::collections::BinaryHeap;
+use std::fmt;
+use std::fmt::Write as _;
+use std::fs::{self, OpenOptions};
+use std::io::prelude::*;
+use std::path::Path;
+use std::path::PathBuf;
+use std::process::Command;
+
+/// This is the output file of the lint collector.
+const OUTPUT_FILE: &str = "../util/gh-pages/lints.json";
+/// These lints are excluded from the export.
+const BLACK_LISTED_LINTS: &[&str] = &["lint_author", "dump_hir", "internal_metadata_collector"];
+/// These groups will be ignored by the lint group matcher. This is useful for collections like
+/// `clippy::all`
+const IGNORED_LINT_GROUPS: [&str; 1] = ["clippy::all"];
+/// Lints within this group will be excluded from the collection. These groups
+/// have to be defined without the `clippy::` prefix.
+const EXCLUDED_LINT_GROUPS: [&str; 1] = ["internal"];
+/// Collected deprecated lint will be assigned to this group in the JSON output
+const DEPRECATED_LINT_GROUP_STR: &str = "deprecated";
+/// This is the lint level for deprecated lints that will be displayed in the lint list
+const DEPRECATED_LINT_LEVEL: &str = "none";
+/// This array holds Clippy's lint groups with their corresponding default lint level. The
+/// lint level for deprecated lints is set in `DEPRECATED_LINT_LEVEL`.
+const DEFAULT_LINT_LEVELS: &[(&str, &str)] = &[
+ ("correctness", "deny"),
+ ("suspicious", "warn"),
+ ("restriction", "allow"),
+ ("style", "warn"),
+ ("pedantic", "allow"),
+ ("complexity", "warn"),
+ ("perf", "warn"),
+ ("cargo", "allow"),
+ ("nursery", "allow"),
+];
+/// This prefix is in front of the lint groups in the lint store. The prefix will be trimmed
+/// to only keep the actual lint group in the output.
+const CLIPPY_LINT_GROUP_PREFIX: &str = "clippy::";
+
+/// This template will be used to format the configuration section in the lint documentation.
+/// The `configurations` parameter will be replaced with one or multiple formatted
+/// `ClippyConfiguration` instances. See `CONFIGURATION_VALUE_TEMPLATE` for further customizations
+macro_rules! CONFIGURATION_SECTION_TEMPLATE {
+ () => {
+ r#"
+### Configuration
+This lint has the following configuration variables:
+
+{configurations}
+"#
+ };
+}
+/// This template will be used to format an individual `ClippyConfiguration` instance in the
+/// lint documentation.
+///
+/// The format function will provide strings for the following parameters: `name`, `ty`, `doc` and
+/// `default`
+macro_rules! CONFIGURATION_VALUE_TEMPLATE {
+ () => {
+ "* `{name}`: `{ty}`: {doc} (defaults to `{default}`)\n"
+ };
+}
+
+macro_rules! RENAMES_SECTION_TEMPLATE {
+ () => {
+ r#"
+### Past names
+
+{names}
+"#
+ };
+}
+macro_rules! RENAME_VALUE_TEMPLATE {
+ () => {
+ "* `{name}`\n"
+ };
+}
+
+const LINT_EMISSION_FUNCTIONS: [&[&str]; 7] = [
+ &["clippy_utils", "diagnostics", "span_lint"],
+ &["clippy_utils", "diagnostics", "span_lint_and_help"],
+ &["clippy_utils", "diagnostics", "span_lint_and_note"],
+ &["clippy_utils", "diagnostics", "span_lint_hir"],
+ &["clippy_utils", "diagnostics", "span_lint_and_sugg"],
+ &["clippy_utils", "diagnostics", "span_lint_and_then"],
+ &["clippy_utils", "diagnostics", "span_lint_hir_and_then"],
+];
+const SUGGESTION_DIAGNOSTIC_BUILDER_METHODS: [(&str, bool); 9] = [
+ ("span_suggestion", false),
+ ("span_suggestion_short", false),
+ ("span_suggestion_verbose", false),
+ ("span_suggestion_hidden", false),
+ ("tool_only_span_suggestion", false),
+ ("multipart_suggestion", true),
+ ("multipart_suggestions", true),
+ ("tool_only_multipart_suggestion", true),
+ ("span_suggestions", true),
+];
+const SUGGESTION_FUNCTIONS: [&[&str]; 2] = [
+ &["clippy_utils", "diagnostics", "multispan_sugg"],
+ &["clippy_utils", "diagnostics", "multispan_sugg_with_applicability"],
+];
+const DEPRECATED_LINT_TYPE: [&str; 3] = ["clippy_lints", "deprecated_lints", "ClippyDeprecatedLint"];
+
+/// The index of the applicability name of `paths::APPLICABILITY_VALUES`
+const APPLICABILITY_NAME_INDEX: usize = 2;
+/// This applicability will be set for unresolved applicability values.
+const APPLICABILITY_UNRESOLVED_STR: &str = "Unresolved";
+/// The version that will be displayed if none has been defined
+const VERSION_DEFAULT_STR: &str = "Unknown";
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Collects metadata about clippy lints for the website.
+ ///
+ /// This lint will be used to report problems of syntax parsing. You should hopefully never
+ /// see this but never say never I guess ^^
+ ///
+ /// ### Why is this bad?
+ /// This is not a bad thing but definitely a hacky way to do it. See
+ /// issue [#4310](https://github.com/rust-lang/rust-clippy/issues/4310) for a discussion
+ /// about the implementation.
+ ///
+ /// ### Known problems
+ /// Hopefully none. It would be pretty uncool to have a problem here :)
+ ///
+ /// ### Example output
+ /// ```json,ignore
+ /// {
+ /// "id": "internal_metadata_collector",
+ /// "id_span": {
+ /// "path": "clippy_lints/src/utils/internal_lints/metadata_collector.rs",
+ /// "line": 1
+ /// },
+ /// "group": "clippy::internal",
+ /// "docs": " ### What it does\nCollects metadata about clippy lints for the website. [...] "
+ /// }
+ /// ```
+ #[clippy::version = "1.56.0"]
+ pub INTERNAL_METADATA_COLLECTOR,
+ internal_warn,
+ "A busy bee collection metadata about lints"
+}
+
+impl_lint_pass!(MetadataCollector => [INTERNAL_METADATA_COLLECTOR]);
+
+#[allow(clippy::module_name_repetitions)]
+#[derive(Debug, Clone)]
+pub struct MetadataCollector {
+ /// All collected lints
+ ///
+ /// We use a Heap here to have the lints added in alphabetic order in the export
+ lints: BinaryHeap<LintMetadata>,
+ applicability_info: FxHashMap<String, ApplicabilityInfo>,
+ config: Vec<ClippyConfiguration>,
+ clippy_project_root: PathBuf,
+}
+
+impl MetadataCollector {
+ pub fn new() -> Self {
+ Self {
+ lints: BinaryHeap::<LintMetadata>::default(),
+ applicability_info: FxHashMap::<String, ApplicabilityInfo>::default(),
+ config: collect_configs(),
+ clippy_project_root: std::env::current_dir()
+ .expect("failed to get current dir")
+ .ancestors()
+ .nth(1)
+ .expect("failed to get project root")
+ .to_path_buf(),
+ }
+ }
+
+ fn get_lint_configs(&self, lint_name: &str) -> Option<String> {
+ self.config
+ .iter()
+ .filter(|config| config.lints.iter().any(|lint| lint == lint_name))
+ .map(ToString::to_string)
+ .reduce(|acc, x| acc + &x)
+ .map(|configurations| format!(CONFIGURATION_SECTION_TEMPLATE!(), configurations = configurations))
+ }
+}
+
+impl Drop for MetadataCollector {
+ /// You might ask: How hacky is this?
+ /// My answer: YES
+ fn drop(&mut self) {
+ // The metadata collector gets dropped twice, this makes sure that we only write
+ // when the list is full
+ if self.lints.is_empty() {
+ return;
+ }
+
+ let mut applicability_info = std::mem::take(&mut self.applicability_info);
+
+ // Mapping the final data
+ let mut lints = std::mem::take(&mut self.lints).into_sorted_vec();
+ for x in &mut lints {
+ x.applicability = Some(applicability_info.remove(&x.id).unwrap_or_default());
+ replace_produces(&x.id, &mut x.docs, &self.clippy_project_root);
+ }
+
+ collect_renames(&mut lints);
+
+ // Outputting
+ if Path::new(OUTPUT_FILE).exists() {
+ fs::remove_file(OUTPUT_FILE).unwrap();
+ }
+ let mut file = OpenOptions::new().write(true).create(true).open(OUTPUT_FILE).unwrap();
+ writeln!(file, "{}", serde_json::to_string_pretty(&lints).unwrap()).unwrap();
+ }
+}
+
+#[derive(Debug, Clone, Serialize, PartialEq, Eq, PartialOrd, Ord)]
+struct LintMetadata {
+ id: String,
+ id_span: SerializableSpan,
+ group: String,
+ level: String,
+ docs: String,
+ version: String,
+ /// This field is only used in the output and will only be
+ /// mapped shortly before the actual output.
+ applicability: Option<ApplicabilityInfo>,
+}
+
+impl LintMetadata {
+ fn new(
+ id: String,
+ id_span: SerializableSpan,
+ group: String,
+ level: &'static str,
+ version: String,
+ docs: String,
+ ) -> Self {
+ Self {
+ id,
+ id_span,
+ group,
+ level: level.to_string(),
+ version,
+ docs,
+ applicability: None,
+ }
+ }
+}
+
+fn replace_produces(lint_name: &str, docs: &mut String, clippy_project_root: &Path) {
+ let mut doc_lines = docs.lines().map(ToString::to_string).collect::<Vec<_>>();
+ let mut lines = doc_lines.iter_mut();
+
+ 'outer: loop {
+ // Find the start of the example
+
+ // ```rust
+ loop {
+ match lines.next() {
+ Some(line) if line.trim_start().starts_with("```rust") => {
+ if line.contains("ignore") || line.contains("no_run") {
+ // A {{produces}} marker may have been put on a ignored code block by mistake,
+ // just seek to the end of the code block and continue checking.
+ if lines.any(|line| line.trim_start().starts_with("```")) {
+ continue;
+ }
+
+ panic!("lint `{}` has an unterminated code block", lint_name)
+ }
+
+ break;
+ },
+ Some(line) if line.trim_start() == "{{produces}}" => {
+ panic!(
+ "lint `{}` has marker {{{{produces}}}} with an ignored or missing code block",
+ lint_name
+ )
+ },
+ Some(line) => {
+ let line = line.trim();
+ // These are the two most common markers of the corrections section
+ if line.eq_ignore_ascii_case("Use instead:") || line.eq_ignore_ascii_case("Could be written as:") {
+ break 'outer;
+ }
+ },
+ None => break 'outer,
+ }
+ }
+
+ // Collect the example
+ let mut example = Vec::new();
+ loop {
+ match lines.next() {
+ Some(line) if line.trim_start() == "```" => break,
+ Some(line) => example.push(line),
+ None => panic!("lint `{}` has an unterminated code block", lint_name),
+ }
+ }
+
+ // Find the {{produces}} and attempt to generate the output
+ loop {
+ match lines.next() {
+ Some(line) if line.is_empty() => {},
+ Some(line) if line.trim() == "{{produces}}" => {
+ let output = get_lint_output(lint_name, &example, clippy_project_root);
+ line.replace_range(
+ ..,
+ &format!(
+ "<details>\
+ <summary>Produces</summary>\n\
+ \n\
+ ```text\n\
+ {}\n\
+ ```\n\
+ </details>",
+ output
+ ),
+ );
+
+ break;
+ },
+ // No {{produces}}, we can move on to the next example
+ Some(_) => break,
+ None => break 'outer,
+ }
+ }
+ }
+
+ *docs = cleanup_docs(&doc_lines);
+}
+
+fn get_lint_output(lint_name: &str, example: &[&mut String], clippy_project_root: &Path) -> String {
+ let dir = tempfile::tempdir().unwrap_or_else(|e| panic!("failed to create temp dir: {e}"));
+ let file = dir.path().join("lint_example.rs");
+
+ let mut source = String::new();
+ let unhidden = example
+ .iter()
+ .map(|line| line.trim_start().strip_prefix("# ").unwrap_or(line));
+
+ // Get any attributes
+ let mut lines = unhidden.peekable();
+ while let Some(line) = lines.peek() {
+ if line.starts_with("#!") {
+ source.push_str(line);
+ source.push('\n');
+ lines.next();
+ } else {
+ break;
+ }
+ }
+
+ let needs_main = !example.iter().any(|line| line.contains("fn main"));
+ if needs_main {
+ source.push_str("fn main() {\n");
+ }
+
+ for line in lines {
+ source.push_str(line);
+ source.push('\n');
+ }
+
+ if needs_main {
+ source.push_str("}\n");
+ }
+
+ if let Err(e) = fs::write(&file, &source) {
+ panic!("failed to write to `{}`: {e}", file.as_path().to_string_lossy());
+ }
+
+ let prefixed_name = format!("{}{lint_name}", CLIPPY_LINT_GROUP_PREFIX);
+
+ let mut cmd = Command::new("cargo");
+
+ cmd.current_dir(clippy_project_root)
+ .env("CARGO_INCREMENTAL", "0")
+ .env("CLIPPY_ARGS", "")
+ .env("CLIPPY_DISABLE_DOCS_LINKS", "1")
+ // We need to disable this to enable all lints
+ .env("ENABLE_METADATA_COLLECTION", "0")
+ .args(["run", "--bin", "clippy-driver"])
+ .args(["--target-dir", "./clippy_lints/target"])
+ .args(["--", "--error-format=json"])
+ .args(["--edition", "2021"])
+ .arg("-Cdebuginfo=0")
+ .args(["-A", "clippy::all"])
+ .args(["-W", &prefixed_name])
+ .args(["-L", "./target/debug"])
+ .args(["-Z", "no-codegen"]);
+
+ let output = cmd
+ .arg(file.as_path())
+ .output()
+ .unwrap_or_else(|e| panic!("failed to run `{:?}`: {e}", cmd));
+
+ let tmp_file_path = file.to_string_lossy();
+ let stderr = std::str::from_utf8(&output.stderr).unwrap();
+ let msgs = stderr
+ .lines()
+ .filter(|line| line.starts_with('{'))
+ .map(|line| serde_json::from_str(line).unwrap())
+ .collect::<Vec<serde_json::Value>>();
+
+ let mut rendered = String::new();
+ let iter = msgs
+ .iter()
+ .filter(|msg| matches!(&msg["code"]["code"], serde_json::Value::String(s) if s == &prefixed_name));
+
+ for message in iter {
+ let rendered_part = message["rendered"].as_str().expect("rendered field should exist");
+ rendered.push_str(rendered_part);
+ }
+
+ if rendered.is_empty() {
+ let rendered: Vec<&str> = msgs.iter().filter_map(|msg| msg["rendered"].as_str()).collect();
+ let non_json: Vec<&str> = stderr.lines().filter(|line| !line.starts_with('{')).collect();
+ panic!(
+ "did not find lint `{}` in output of example, got:\n{}\n{}",
+ lint_name,
+ non_json.join("\n"),
+ rendered.join("\n")
+ );
+ }
+
+ // The reader doesn't need to see `/tmp/.tmpfiy2Qd/lint_example.rs` :)
+ rendered.trim_end().replace(&*tmp_file_path, "lint_example.rs")
+}
+
+#[derive(Debug, Clone, Serialize, PartialEq, Eq, PartialOrd, Ord)]
+struct SerializableSpan {
+ path: String,
+ line: usize,
+}
+
+impl fmt::Display for SerializableSpan {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}:{}", self.path.rsplit('/').next().unwrap_or_default(), self.line)
+ }
+}
+
+impl SerializableSpan {
+ fn from_item(cx: &LateContext<'_>, item: &Item<'_>) -> Self {
+ Self::from_span(cx, item.ident.span)
+ }
+
+ fn from_span(cx: &LateContext<'_>, span: Span) -> Self {
+ let loc: Loc = cx.sess().source_map().lookup_char_pos(span.lo());
+
+ Self {
+ path: format!("{}", loc.file.name.prefer_remapped()),
+ line: loc.line,
+ }
+ }
+}
+
+#[derive(Debug, Clone, Default, PartialEq, Eq, PartialOrd, Ord)]
+struct ApplicabilityInfo {
+ /// Indicates if any of the lint emissions uses multiple spans. This is related to
+ /// [rustfix#141](https://github.com/rust-lang/rustfix/issues/141) as such suggestions can
+ /// currently not be applied automatically.
+ is_multi_part_suggestion: bool,
+ applicability: Option<usize>,
+}
+
+impl Serialize for ApplicabilityInfo {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ let mut s = serializer.serialize_struct("ApplicabilityInfo", 2)?;
+ s.serialize_field("is_multi_part_suggestion", &self.is_multi_part_suggestion)?;
+ if let Some(index) = self.applicability {
+ s.serialize_field(
+ "applicability",
+ &paths::APPLICABILITY_VALUES[index][APPLICABILITY_NAME_INDEX],
+ )?;
+ } else {
+ s.serialize_field("applicability", APPLICABILITY_UNRESOLVED_STR)?;
+ }
+ s.end()
+ }
+}
+
+// ==================================================================
+// Configuration
+// ==================================================================
+#[derive(Debug, Clone, Default)]
+pub struct ClippyConfiguration {
+ name: String,
+ config_type: &'static str,
+ default: String,
+ lints: Vec<String>,
+ doc: String,
+ #[allow(dead_code)]
+ deprecation_reason: Option<&'static str>,
+}
+
+impl ClippyConfiguration {
+ pub fn new(
+ name: &'static str,
+ config_type: &'static str,
+ default: String,
+ doc_comment: &'static str,
+ deprecation_reason: Option<&'static str>,
+ ) -> Self {
+ let (lints, doc) = parse_config_field_doc(doc_comment)
+ .unwrap_or_else(|| (vec![], "[ERROR] MALFORMED DOC COMMENT".to_string()));
+
+ Self {
+ name: to_kebab(name),
+ lints,
+ doc,
+ config_type,
+ default,
+ deprecation_reason,
+ }
+ }
+}
+
+fn collect_configs() -> Vec<ClippyConfiguration> {
+ crate::utils::conf::metadata::get_configuration_metadata()
+}
+
+/// This parses the field documentation of the config struct.
+///
+/// ```rust, ignore
+/// parse_config_field_doc(cx, "Lint: LINT_NAME_1, LINT_NAME_2. Papa penguin, papa penguin")
+/// ```
+///
+/// Would yield:
+/// ```rust, ignore
+/// Some(["lint_name_1", "lint_name_2"], "Papa penguin, papa penguin")
+/// ```
+fn parse_config_field_doc(doc_comment: &str) -> Option<(Vec<String>, String)> {
+ const DOC_START: &str = " Lint: ";
+ if_chain! {
+ if doc_comment.starts_with(DOC_START);
+ if let Some(split_pos) = doc_comment.find('.');
+ then {
+ let mut doc_comment = doc_comment.to_string();
+ let mut documentation = doc_comment.split_off(split_pos);
+
+ // Extract lints
+ doc_comment.make_ascii_lowercase();
+ let lints: Vec<String> = doc_comment.split_off(DOC_START.len()).split(", ").map(str::to_string).collect();
+
+ // Format documentation correctly
+ // split off leading `.` from lint name list and indent for correct formatting
+ documentation = documentation.trim_start_matches('.').trim().replace("\n ", "\n ");
+
+ Some((lints, documentation))
+ } else {
+ None
+ }
+ }
+}
+
+/// Transforms a given `snake_case_string` to a tasty `kebab-case-string`
+fn to_kebab(config_name: &str) -> String {
+ config_name.replace('_', "-")
+}
+
+impl fmt::Display for ClippyConfiguration {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> fmt::Result {
+ write!(
+ f,
+ CONFIGURATION_VALUE_TEMPLATE!(),
+ name = self.name,
+ ty = self.config_type,
+ doc = self.doc,
+ default = self.default
+ )
+ }
+}
+
+// ==================================================================
+// Lint pass
+// ==================================================================
+impl<'hir> LateLintPass<'hir> for MetadataCollector {
+ /// Collecting lint declarations like:
+ /// ```rust, ignore
+ /// declare_clippy_lint! {
+ /// /// ### What it does
+ /// /// Something IDK.
+ /// pub SOME_LINT,
+ /// internal,
+ /// "Who am I?"
+ /// }
+ /// ```
+ fn check_item(&mut self, cx: &LateContext<'hir>, item: &'hir Item<'_>) {
+ if let ItemKind::Static(ty, Mutability::Not, _) = item.kind {
+ // Normal lint
+ if_chain! {
+ // item validation
+ if is_lint_ref_type(cx, ty);
+ // blacklist check
+ let lint_name = sym_to_string(item.ident.name).to_ascii_lowercase();
+ if !BLACK_LISTED_LINTS.contains(&lint_name.as_str());
+ // metadata extraction
+ if let Some((group, level)) = get_lint_group_and_level_or_lint(cx, &lint_name, item);
+ if let Some(mut raw_docs) = extract_attr_docs_or_lint(cx, item);
+ then {
+ if let Some(configuration_section) = self.get_lint_configs(&lint_name) {
+ raw_docs.push_str(&configuration_section);
+ }
+ let version = get_lint_version(cx, item);
+
+ self.lints.push(LintMetadata::new(
+ lint_name,
+ SerializableSpan::from_item(cx, item),
+ group,
+ level,
+ version,
+ raw_docs,
+ ));
+ }
+ }
+
+ if_chain! {
+ if is_deprecated_lint(cx, ty);
+ // blacklist check
+ let lint_name = sym_to_string(item.ident.name).to_ascii_lowercase();
+ if !BLACK_LISTED_LINTS.contains(&lint_name.as_str());
+ // Metadata the little we can get from a deprecated lint
+ if let Some(raw_docs) = extract_attr_docs_or_lint(cx, item);
+ then {
+ let version = get_lint_version(cx, item);
+
+ self.lints.push(LintMetadata::new(
+ lint_name,
+ SerializableSpan::from_item(cx, item),
+ DEPRECATED_LINT_GROUP_STR.to_string(),
+ DEPRECATED_LINT_LEVEL,
+ version,
+ raw_docs,
+ ));
+ }
+ }
+ }
+ }
+
+ /// Collecting constant applicability from the actual lint emissions
+ ///
+ /// Example:
+ /// ```rust, ignore
+ /// span_lint_and_sugg(
+ /// cx,
+ /// SOME_LINT,
+ /// item.span,
+ /// "Le lint message",
+ /// "Here comes help:",
+ /// "#![allow(clippy::all)]",
+ /// Applicability::MachineApplicable, // <-- Extracts this constant value
+ /// );
+ /// ```
+ fn check_expr(&mut self, cx: &LateContext<'hir>, expr: &'hir hir::Expr<'_>) {
+ if let Some(args) = match_lint_emission(cx, expr) {
+ let emission_info = extract_emission_info(cx, args);
+ if emission_info.is_empty() {
+ // See:
+ // - src/misc.rs:734:9
+ // - src/methods/mod.rs:3545:13
+ // - src/methods/mod.rs:3496:13
+ // We are basically unable to resolve the lint name itself.
+ return;
+ }
+
+ for (lint_name, applicability, is_multi_part) in emission_info {
+ let app_info = self.applicability_info.entry(lint_name).or_default();
+ app_info.applicability = applicability;
+ app_info.is_multi_part_suggestion = is_multi_part;
+ }
+ }
+ }
+}
+
+// ==================================================================
+// Lint definition extraction
+// ==================================================================
+fn sym_to_string(sym: Symbol) -> String {
+ sym.as_str().to_string()
+}
+
+fn extract_attr_docs_or_lint(cx: &LateContext<'_>, item: &Item<'_>) -> Option<String> {
+ extract_attr_docs(cx, item).or_else(|| {
+ lint_collection_error_item(cx, item, "could not collect the lint documentation");
+ None
+ })
+}
+
+/// This function collects all documentation that has been added to an item using
+/// `#[doc = r""]` attributes. Several attributes are aggravated using line breaks
+///
+/// ```ignore
+/// #[doc = r"Hello world!"]
+/// #[doc = r"=^.^="]
+/// struct SomeItem {}
+/// ```
+///
+/// Would result in `Hello world!\n=^.^=\n`
+fn extract_attr_docs(cx: &LateContext<'_>, item: &Item<'_>) -> Option<String> {
+ let attrs = cx.tcx.hir().attrs(item.hir_id());
+ let mut lines = attrs.iter().filter_map(ast::Attribute::doc_str);
+
+ if let Some(line) = lines.next() {
+ let raw_docs = lines.fold(String::from(line.as_str()) + "\n", |s, line| s + line.as_str() + "\n");
+ return Some(raw_docs);
+ }
+
+ None
+}
+
+/// This function may modify the doc comment to ensure that the string can be displayed using a
+/// markdown viewer in Clippy's lint list. The following modifications could be applied:
+/// * Removal of leading space after a new line. (Important to display tables)
+/// * Ensures that code blocks only contain language information
+fn cleanup_docs(docs_collection: &Vec<String>) -> String {
+ let mut in_code_block = false;
+ let mut is_code_block_rust = false;
+
+ let mut docs = String::new();
+ for line in docs_collection {
+ // Rustdoc hides code lines starting with `# ` and this removes them from Clippy's lint list :)
+ if is_code_block_rust && line.trim_start().starts_with("# ") {
+ continue;
+ }
+
+ // The line should be represented in the lint list, even if it's just an empty line
+ docs.push('\n');
+ if let Some(info) = line.trim_start().strip_prefix("```") {
+ in_code_block = !in_code_block;
+ is_code_block_rust = false;
+ if in_code_block {
+ let lang = info
+ .trim()
+ .split(',')
+ // remove rustdoc directives
+ .find(|&s| !matches!(s, "" | "ignore" | "no_run" | "should_panic"))
+ // if no language is present, fill in "rust"
+ .unwrap_or("rust");
+ docs.push_str("```");
+ docs.push_str(lang);
+
+ is_code_block_rust = lang == "rust";
+ continue;
+ }
+ }
+ // This removes the leading space that the macro translation introduces
+ if let Some(stripped_doc) = line.strip_prefix(' ') {
+ docs.push_str(stripped_doc);
+ } else if !line.is_empty() {
+ docs.push_str(line);
+ }
+ }
+
+ docs
+}
+
+fn get_lint_version(cx: &LateContext<'_>, item: &Item<'_>) -> String {
+ extract_clippy_version_value(cx, item).map_or_else(
+ || VERSION_DEFAULT_STR.to_string(),
+ |version| version.as_str().to_string(),
+ )
+}
+
+fn get_lint_group_and_level_or_lint(
+ cx: &LateContext<'_>,
+ lint_name: &str,
+ item: &Item<'_>,
+) -> Option<(String, &'static str)> {
+ let result = cx.lint_store.check_lint_name(
+ lint_name,
+ Some(sym::clippy),
+ &[Ident::with_dummy_span(sym::clippy)].into_iter().collect(),
+ );
+ if let CheckLintNameResult::Tool(Ok(lint_lst)) = result {
+ if let Some(group) = get_lint_group(cx, lint_lst[0]) {
+ if EXCLUDED_LINT_GROUPS.contains(&group.as_str()) {
+ return None;
+ }
+
+ if let Some(level) = get_lint_level_from_group(&group) {
+ Some((group, level))
+ } else {
+ lint_collection_error_item(
+ cx,
+ item,
+ &format!("Unable to determine lint level for found group `{}`", group),
+ );
+ None
+ }
+ } else {
+ lint_collection_error_item(cx, item, "Unable to determine lint group");
+ None
+ }
+ } else {
+ lint_collection_error_item(cx, item, "Unable to find lint in lint_store");
+ None
+ }
+}
+
+fn get_lint_group(cx: &LateContext<'_>, lint_id: LintId) -> Option<String> {
+ for (group_name, lints, _) in cx.lint_store.get_lint_groups() {
+ if IGNORED_LINT_GROUPS.contains(&group_name) {
+ continue;
+ }
+
+ if lints.iter().any(|group_lint| *group_lint == lint_id) {
+ let group = group_name.strip_prefix(CLIPPY_LINT_GROUP_PREFIX).unwrap_or(group_name);
+ return Some((*group).to_string());
+ }
+ }
+
+ None
+}
+
+fn get_lint_level_from_group(lint_group: &str) -> Option<&'static str> {
+ DEFAULT_LINT_LEVELS
+ .iter()
+ .find_map(|(group_name, group_level)| (*group_name == lint_group).then_some(*group_level))
+}
+
+pub(super) fn is_deprecated_lint(cx: &LateContext<'_>, ty: &hir::Ty<'_>) -> bool {
+ if let hir::TyKind::Path(ref path) = ty.kind {
+ if let hir::def::Res::Def(DefKind::Struct, def_id) = cx.qpath_res(path, ty.hir_id) {
+ return match_def_path(cx, def_id, &DEPRECATED_LINT_TYPE);
+ }
+ }
+
+ false
+}
+
+fn collect_renames(lints: &mut Vec<LintMetadata>) {
+ for lint in lints {
+ let mut collected = String::new();
+ let mut names = vec![lint.id.clone()];
+
+ loop {
+ if let Some(lint_name) = names.pop() {
+ for (k, v) in RENAMED_LINTS {
+ if_chain! {
+ if let Some(name) = v.strip_prefix(CLIPPY_LINT_GROUP_PREFIX);
+ if name == lint_name;
+ if let Some(past_name) = k.strip_prefix(CLIPPY_LINT_GROUP_PREFIX);
+ then {
+ write!(collected, RENAME_VALUE_TEMPLATE!(), name = past_name).unwrap();
+ names.push(past_name.to_string());
+ }
+ }
+ }
+
+ continue;
+ }
+
+ break;
+ }
+
+ if !collected.is_empty() {
+ write!(&mut lint.docs, RENAMES_SECTION_TEMPLATE!(), names = collected).unwrap();
+ }
+ }
+}
+
+// ==================================================================
+// Lint emission
+// ==================================================================
+fn lint_collection_error_item(cx: &LateContext<'_>, item: &Item<'_>, message: &str) {
+ span_lint(
+ cx,
+ INTERNAL_METADATA_COLLECTOR,
+ item.ident.span,
+ &format!("metadata collection error for `{}`: {}", item.ident.name, message),
+ );
+}
+
+// ==================================================================
+// Applicability
+// ==================================================================
+/// This function checks if a given expression is equal to a simple lint emission function call.
+/// It will return the function arguments if the emission matched any function.
+fn match_lint_emission<'hir>(cx: &LateContext<'hir>, expr: &'hir hir::Expr<'_>) -> Option<&'hir [hir::Expr<'hir>]> {
+ LINT_EMISSION_FUNCTIONS
+ .iter()
+ .find_map(|emission_fn| match_function_call(cx, expr, emission_fn))
+}
+
+fn take_higher_applicability(a: Option<usize>, b: Option<usize>) -> Option<usize> {
+ a.map_or(b, |a| a.max(b.unwrap_or_default()).into())
+}
+
+fn extract_emission_info<'hir>(
+ cx: &LateContext<'hir>,
+ args: &'hir [hir::Expr<'hir>],
+) -> Vec<(String, Option<usize>, bool)> {
+ let mut lints = Vec::new();
+ let mut applicability = None;
+ let mut multi_part = false;
+
+ for arg in args {
+ let (arg_ty, _) = walk_ptrs_ty_depth(cx.typeck_results().expr_ty(arg));
+
+ if match_type(cx, arg_ty, &paths::LINT) {
+ // If we found the lint arg, extract the lint name
+ let mut resolved_lints = resolve_lints(cx, arg);
+ lints.append(&mut resolved_lints);
+ } else if match_type(cx, arg_ty, &paths::APPLICABILITY) {
+ applicability = resolve_applicability(cx, arg);
+ } else if arg_ty.is_closure() {
+ multi_part |= check_is_multi_part(cx, arg);
+ applicability = applicability.or_else(|| resolve_applicability(cx, arg));
+ }
+ }
+
+ lints
+ .into_iter()
+ .map(|lint_name| (lint_name, applicability, multi_part))
+ .collect()
+}
+
+/// Resolves the possible lints that this expression could reference
+fn resolve_lints<'hir>(cx: &LateContext<'hir>, expr: &'hir hir::Expr<'hir>) -> Vec<String> {
+ let mut resolver = LintResolver::new(cx);
+ resolver.visit_expr(expr);
+ resolver.lints
+}
+
+/// This function tries to resolve the linked applicability to the given expression.
+fn resolve_applicability<'hir>(cx: &LateContext<'hir>, expr: &'hir hir::Expr<'hir>) -> Option<usize> {
+ let mut resolver = ApplicabilityResolver::new(cx);
+ resolver.visit_expr(expr);
+ resolver.complete()
+}
+
+fn check_is_multi_part<'hir>(cx: &LateContext<'hir>, closure_expr: &'hir hir::Expr<'hir>) -> bool {
+ if let ExprKind::Closure(&Closure { body, .. }) = closure_expr.kind {
+ let mut scanner = IsMultiSpanScanner::new(cx);
+ intravisit::walk_body(&mut scanner, cx.tcx.hir().body(body));
+ return scanner.is_multi_part();
+ } else if let Some(local) = get_parent_local(cx, closure_expr) {
+ if let Some(local_init) = local.init {
+ return check_is_multi_part(cx, local_init);
+ }
+ }
+
+ false
+}
+
+struct LintResolver<'a, 'hir> {
+ cx: &'a LateContext<'hir>,
+ lints: Vec<String>,
+}
+
+impl<'a, 'hir> LintResolver<'a, 'hir> {
+ fn new(cx: &'a LateContext<'hir>) -> Self {
+ Self {
+ cx,
+ lints: Vec::<String>::default(),
+ }
+ }
+}
+
+impl<'a, 'hir> intravisit::Visitor<'hir> for LintResolver<'a, 'hir> {
+ type NestedFilter = nested_filter::All;
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+
+ fn visit_expr(&mut self, expr: &'hir hir::Expr<'hir>) {
+ if_chain! {
+ if let ExprKind::Path(qpath) = &expr.kind;
+ if let QPath::Resolved(_, path) = qpath;
+
+ let (expr_ty, _) = walk_ptrs_ty_depth(self.cx.typeck_results().expr_ty(expr));
+ if match_type(self.cx, expr_ty, &paths::LINT);
+ then {
+ if let hir::def::Res::Def(DefKind::Static(..), _) = path.res {
+ let lint_name = last_path_segment(qpath).ident.name;
+ self.lints.push(sym_to_string(lint_name).to_ascii_lowercase());
+ } else if let Some(local) = get_parent_local(self.cx, expr) {
+ if let Some(local_init) = local.init {
+ intravisit::walk_expr(self, local_init);
+ }
+ }
+ }
+ }
+
+ intravisit::walk_expr(self, expr);
+ }
+}
+
+/// This visitor finds the highest applicability value in the visited expressions
+struct ApplicabilityResolver<'a, 'hir> {
+ cx: &'a LateContext<'hir>,
+ /// This is the index of highest `Applicability` for `paths::APPLICABILITY_VALUES`
+ applicability_index: Option<usize>,
+}
+
+impl<'a, 'hir> ApplicabilityResolver<'a, 'hir> {
+ fn new(cx: &'a LateContext<'hir>) -> Self {
+ Self {
+ cx,
+ applicability_index: None,
+ }
+ }
+
+ fn add_new_index(&mut self, new_index: usize) {
+ self.applicability_index = take_higher_applicability(self.applicability_index, Some(new_index));
+ }
+
+ fn complete(self) -> Option<usize> {
+ self.applicability_index
+ }
+}
+
+impl<'a, 'hir> intravisit::Visitor<'hir> for ApplicabilityResolver<'a, 'hir> {
+ type NestedFilter = nested_filter::All;
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+
+ fn visit_path(&mut self, path: &'hir hir::Path<'hir>, _id: hir::HirId) {
+ for (index, enum_value) in paths::APPLICABILITY_VALUES.iter().enumerate() {
+ if match_path(path, enum_value) {
+ self.add_new_index(index);
+ return;
+ }
+ }
+ }
+
+ fn visit_expr(&mut self, expr: &'hir hir::Expr<'hir>) {
+ let (expr_ty, _) = walk_ptrs_ty_depth(self.cx.typeck_results().expr_ty(expr));
+
+ if_chain! {
+ if match_type(self.cx, expr_ty, &paths::APPLICABILITY);
+ if let Some(local) = get_parent_local(self.cx, expr);
+ if let Some(local_init) = local.init;
+ then {
+ intravisit::walk_expr(self, local_init);
+ }
+ };
+
+ intravisit::walk_expr(self, expr);
+ }
+}
+
+/// This returns the parent local node if the expression is a reference one
+fn get_parent_local<'hir>(cx: &LateContext<'hir>, expr: &'hir hir::Expr<'hir>) -> Option<&'hir hir::Local<'hir>> {
+ if let ExprKind::Path(QPath::Resolved(_, path)) = expr.kind {
+ if let hir::def::Res::Local(local_hir) = path.res {
+ return get_parent_local_hir_id(cx, local_hir);
+ }
+ }
+
+ None
+}
+
+fn get_parent_local_hir_id<'hir>(cx: &LateContext<'hir>, hir_id: hir::HirId) -> Option<&'hir hir::Local<'hir>> {
+ let map = cx.tcx.hir();
+
+ match map.find(map.get_parent_node(hir_id)) {
+ Some(hir::Node::Local(local)) => Some(local),
+ Some(hir::Node::Pat(pattern)) => get_parent_local_hir_id(cx, pattern.hir_id),
+ _ => None,
+ }
+}
+
+/// This visitor finds the highest applicability value in the visited expressions
+struct IsMultiSpanScanner<'a, 'hir> {
+ cx: &'a LateContext<'hir>,
+ suggestion_count: usize,
+}
+
+impl<'a, 'hir> IsMultiSpanScanner<'a, 'hir> {
+ fn new(cx: &'a LateContext<'hir>) -> Self {
+ Self {
+ cx,
+ suggestion_count: 0,
+ }
+ }
+
+ /// Add a new single expression suggestion to the counter
+ fn add_single_span_suggestion(&mut self) {
+ self.suggestion_count += 1;
+ }
+
+ /// Signals that a suggestion with possible multiple spans was found
+ fn add_multi_part_suggestion(&mut self) {
+ self.suggestion_count += 2;
+ }
+
+ /// Checks if the suggestions include multiple spans
+ fn is_multi_part(&self) -> bool {
+ self.suggestion_count > 1
+ }
+}
+
+impl<'a, 'hir> intravisit::Visitor<'hir> for IsMultiSpanScanner<'a, 'hir> {
+ type NestedFilter = nested_filter::All;
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+
+ fn visit_expr(&mut self, expr: &'hir hir::Expr<'hir>) {
+ // Early return if the lint is already multi span
+ if self.is_multi_part() {
+ return;
+ }
+
+ match &expr.kind {
+ ExprKind::Call(fn_expr, _args) => {
+ let found_function = SUGGESTION_FUNCTIONS
+ .iter()
+ .any(|func_path| match_function_call(self.cx, fn_expr, func_path).is_some());
+ if found_function {
+ // These functions are all multi part suggestions
+ self.add_single_span_suggestion();
+ }
+ },
+ ExprKind::MethodCall(path, arg, _arg_span) => {
+ let (self_ty, _) = walk_ptrs_ty_depth(self.cx.typeck_results().expr_ty(&arg[0]));
+ if match_type(self.cx, self_ty, &paths::DIAGNOSTIC_BUILDER) {
+ let called_method = path.ident.name.as_str().to_string();
+ for (method_name, is_multi_part) in &SUGGESTION_DIAGNOSTIC_BUILDER_METHODS {
+ if *method_name == called_method {
+ if *is_multi_part {
+ self.add_multi_part_suggestion();
+ } else {
+ self.add_single_span_suggestion();
+ }
+ break;
+ }
+ }
+ }
+ },
+ _ => {},
+ }
+
+ intravisit::walk_expr(self, expr);
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/utils/mod.rs b/src/tools/clippy/clippy_lints/src/utils/mod.rs
new file mode 100644
index 000000000..787e9fd98
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/utils/mod.rs
@@ -0,0 +1,5 @@
+pub mod author;
+pub mod conf;
+pub mod dump_hir;
+#[cfg(feature = "internal")]
+pub mod internal_lints;
diff --git a/src/tools/clippy/clippy_lints/src/vec.rs b/src/tools/clippy/clippy_lints/src/vec.rs
new file mode 100644
index 000000000..297a80e57
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/vec.rs
@@ -0,0 +1,164 @@
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::higher;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::is_copy;
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{BorrowKind, Expr, ExprKind, Mutability};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::layout::LayoutOf;
+use rustc_middle::ty::{self, Ty};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::source_map::Span;
+
+#[expect(clippy::module_name_repetitions)]
+#[derive(Copy, Clone)]
+pub struct UselessVec {
+ pub too_large_for_stack: u64,
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `&vec![..]` when using `&[..]` would
+ /// be possible.
+ ///
+ /// ### Why is this bad?
+ /// This is less efficient.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn foo(_x: &[u8]) {}
+ ///
+ /// foo(&vec![1, 2]);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # fn foo(_x: &[u8]) {}
+ /// foo(&[1, 2]);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub USELESS_VEC,
+ perf,
+ "useless `vec!`"
+}
+
+impl_lint_pass!(UselessVec => [USELESS_VEC]);
+
+impl<'tcx> LateLintPass<'tcx> for UselessVec {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ // search for `&vec![_]` expressions where the adjusted type is `&[_]`
+ if_chain! {
+ if let ty::Ref(_, ty, _) = cx.typeck_results().expr_ty_adjusted(expr).kind();
+ if let ty::Slice(..) = ty.kind();
+ if let ExprKind::AddrOf(BorrowKind::Ref, mutability, addressee) = expr.kind;
+ if let Some(vec_args) = higher::VecArgs::hir(cx, addressee);
+ then {
+ self.check_vec_macro(cx, &vec_args, mutability, expr.span);
+ }
+ }
+
+ // search for `for _ in vec![…]`
+ if_chain! {
+ if let Some(higher::ForLoop { arg, .. }) = higher::ForLoop::hir(expr);
+ if let Some(vec_args) = higher::VecArgs::hir(cx, arg);
+ if is_copy(cx, vec_type(cx.typeck_results().expr_ty_adjusted(arg)));
+ then {
+ // report the error around the `vec!` not inside `<std macros>:`
+ let span = arg.span.ctxt().outer_expn_data().call_site;
+ self.check_vec_macro(cx, &vec_args, Mutability::Not, span);
+ }
+ }
+ }
+}
+
+impl UselessVec {
+ fn check_vec_macro<'tcx>(
+ self,
+ cx: &LateContext<'tcx>,
+ vec_args: &higher::VecArgs<'tcx>,
+ mutability: Mutability,
+ span: Span,
+ ) {
+ let mut applicability = Applicability::MachineApplicable;
+ let snippet = match *vec_args {
+ higher::VecArgs::Repeat(elem, len) => {
+ if let Some((Constant::Int(len_constant), _)) = constant(cx, cx.typeck_results(), len) {
+ #[expect(clippy::cast_possible_truncation)]
+ if len_constant as u64 * size_of(cx, elem) > self.too_large_for_stack {
+ return;
+ }
+
+ match mutability {
+ Mutability::Mut => {
+ format!(
+ "&mut [{}; {}]",
+ snippet_with_applicability(cx, elem.span, "elem", &mut applicability),
+ snippet_with_applicability(cx, len.span, "len", &mut applicability)
+ )
+ },
+ Mutability::Not => {
+ format!(
+ "&[{}; {}]",
+ snippet_with_applicability(cx, elem.span, "elem", &mut applicability),
+ snippet_with_applicability(cx, len.span, "len", &mut applicability)
+ )
+ },
+ }
+ } else {
+ return;
+ }
+ },
+ higher::VecArgs::Vec(args) => {
+ if let Some(last) = args.iter().last() {
+ if args.len() as u64 * size_of(cx, last) > self.too_large_for_stack {
+ return;
+ }
+ let span = args[0].span.to(last.span);
+
+ match mutability {
+ Mutability::Mut => {
+ format!(
+ "&mut [{}]",
+ snippet_with_applicability(cx, span, "..", &mut applicability)
+ )
+ },
+ Mutability::Not => {
+ format!("&[{}]", snippet_with_applicability(cx, span, "..", &mut applicability))
+ },
+ }
+ } else {
+ match mutability {
+ Mutability::Mut => "&mut []".into(),
+ Mutability::Not => "&[]".into(),
+ }
+ }
+ },
+ };
+
+ span_lint_and_sugg(
+ cx,
+ USELESS_VEC,
+ span,
+ "useless use of `vec!`",
+ "you can use a slice directly",
+ snippet,
+ applicability,
+ );
+ }
+}
+
+fn size_of(cx: &LateContext<'_>, expr: &Expr<'_>) -> u64 {
+ let ty = cx.typeck_results().expr_ty_adjusted(expr);
+ cx.layout_of(ty).map_or(0, |l| l.size.bytes())
+}
+
+/// Returns the item type of the vector (i.e., the `T` in `Vec<T>`).
+fn vec_type(ty: Ty<'_>) -> Ty<'_> {
+ if let ty::Adt(_, substs) = ty.kind() {
+ substs.type_at(0)
+ } else {
+ panic!("The type of `vec!` is a not a struct?");
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/vec_init_then_push.rs b/src/tools/clippy/clippy_lints/src/vec_init_then_push.rs
new file mode 100644
index 000000000..d77a21d66
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/vec_init_then_push.rs
@@ -0,0 +1,225 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::higher::{get_vec_init_kind, VecInitKind};
+use clippy_utils::source::snippet;
+use clippy_utils::visitors::for_each_local_use_after_expr;
+use clippy_utils::{get_parent_expr, path_to_local_id};
+use core::ops::ControlFlow;
+use rustc_errors::Applicability;
+use rustc_hir::def::Res;
+use rustc_hir::{
+ BindingAnnotation, Block, Expr, ExprKind, HirId, Local, Mutability, PatKind, QPath, Stmt, StmtKind, UnOp,
+};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::{Span, Symbol};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for calls to `push` immediately after creating a new `Vec`.
+ ///
+ /// If the `Vec` is created using `with_capacity` this will only lint if the capacity is a
+ /// constant and the number of pushes is greater than or equal to the initial capacity.
+ ///
+ /// If the `Vec` is extended after the initial sequence of pushes and it was default initialized
+ /// then this will only lint after there were at least four pushes. This number may change in
+ /// the future.
+ ///
+ /// ### Why is this bad?
+ /// The `vec![]` macro is both more performant and easier to read than
+ /// multiple `push` calls.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let mut v = Vec::new();
+ /// v.push(0);
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let v = vec![0];
+ /// ```
+ #[clippy::version = "1.51.0"]
+ pub VEC_INIT_THEN_PUSH,
+ perf,
+ "`push` immediately after `Vec` creation"
+}
+
+impl_lint_pass!(VecInitThenPush => [VEC_INIT_THEN_PUSH]);
+
+#[derive(Default)]
+pub struct VecInitThenPush {
+ searcher: Option<VecPushSearcher>,
+}
+
+struct VecPushSearcher {
+ local_id: HirId,
+ init: VecInitKind,
+ lhs_is_let: bool,
+ let_ty_span: Option<Span>,
+ name: Symbol,
+ err_span: Span,
+ found: u128,
+ last_push_expr: HirId,
+}
+impl VecPushSearcher {
+ fn display_err(&self, cx: &LateContext<'_>) {
+ let required_pushes_before_extension = match self.init {
+ _ if self.found == 0 => return,
+ VecInitKind::WithConstCapacity(x) if x > self.found => return,
+ VecInitKind::WithConstCapacity(x) => x,
+ VecInitKind::WithExprCapacity(_) => return,
+ _ => 3,
+ };
+
+ let mut needs_mut = false;
+ let res = for_each_local_use_after_expr(cx, self.local_id, self.last_push_expr, |e| {
+ let Some(parent) = get_parent_expr(cx, e) else {
+ return ControlFlow::Continue(())
+ };
+ let adjusted_ty = cx.typeck_results().expr_ty_adjusted(e);
+ let adjusted_mut = adjusted_ty.ref_mutability().unwrap_or(Mutability::Not);
+ needs_mut |= adjusted_mut == Mutability::Mut;
+ match parent.kind {
+ ExprKind::AddrOf(_, Mutability::Mut, _) => {
+ needs_mut = true;
+ return ControlFlow::Break(true);
+ },
+ ExprKind::Unary(UnOp::Deref, _) | ExprKind::Index(..) if !needs_mut => {
+ let mut last_place = parent;
+ while let Some(parent) = get_parent_expr(cx, last_place) {
+ if matches!(parent.kind, ExprKind::Unary(UnOp::Deref, _) | ExprKind::Field(..))
+ || matches!(parent.kind, ExprKind::Index(e, _) if e.hir_id == last_place.hir_id)
+ {
+ last_place = parent;
+ } else {
+ break;
+ }
+ }
+ needs_mut |= cx.typeck_results().expr_ty_adjusted(last_place).ref_mutability()
+ == Some(Mutability::Mut)
+ || get_parent_expr(cx, last_place)
+ .map_or(false, |e| matches!(e.kind, ExprKind::AddrOf(_, Mutability::Mut, _)));
+ },
+ ExprKind::MethodCall(_, [recv, ..], _)
+ if recv.hir_id == e.hir_id
+ && adjusted_mut == Mutability::Mut
+ && !adjusted_ty.peel_refs().is_slice() =>
+ {
+ // No need to set `needs_mut` to true. The receiver will be either explicitly borrowed, or it will
+ // be implicitly borrowed via an adjustment. Both of these cases are already handled by this point.
+ return ControlFlow::Break(true);
+ },
+ ExprKind::Assign(lhs, ..) if e.hir_id == lhs.hir_id => {
+ needs_mut = true;
+ return ControlFlow::Break(false);
+ },
+ _ => (),
+ }
+ ControlFlow::Continue(())
+ });
+
+ // Avoid allocating small `Vec`s when they'll be extended right after.
+ if res == ControlFlow::Break(true) && self.found <= required_pushes_before_extension {
+ return;
+ }
+
+ let mut s = if self.lhs_is_let {
+ String::from("let ")
+ } else {
+ String::new()
+ };
+ if needs_mut {
+ s.push_str("mut ");
+ }
+ s.push_str(self.name.as_str());
+ if let Some(span) = self.let_ty_span {
+ s.push_str(": ");
+ s.push_str(&snippet(cx, span, "_"));
+ }
+ s.push_str(" = vec![..];");
+
+ span_lint_and_sugg(
+ cx,
+ VEC_INIT_THEN_PUSH,
+ self.err_span,
+ "calls to `push` immediately after creation",
+ "consider using the `vec![]` macro",
+ s,
+ Applicability::HasPlaceholders,
+ );
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for VecInitThenPush {
+ fn check_block(&mut self, _: &LateContext<'tcx>, _: &'tcx Block<'tcx>) {
+ self.searcher = None;
+ }
+
+ fn check_local(&mut self, cx: &LateContext<'tcx>, local: &'tcx Local<'tcx>) {
+ if let Some(init_expr) = local.init
+ && let PatKind::Binding(BindingAnnotation::Mutable, id, name, None) = local.pat.kind
+ && !in_external_macro(cx.sess(), local.span)
+ && let Some(init) = get_vec_init_kind(cx, init_expr)
+ && !matches!(init, VecInitKind::WithExprCapacity(_))
+ {
+ self.searcher = Some(VecPushSearcher {
+ local_id: id,
+ init,
+ lhs_is_let: true,
+ name: name.name,
+ let_ty_span: local.ty.map(|ty| ty.span),
+ err_span: local.span,
+ found: 0,
+ last_push_expr: init_expr.hir_id,
+ });
+ }
+ }
+
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if self.searcher.is_none()
+ && let ExprKind::Assign(left, right, _) = expr.kind
+ && let ExprKind::Path(QPath::Resolved(None, path)) = left.kind
+ && let [name] = &path.segments
+ && let Res::Local(id) = path.res
+ && !in_external_macro(cx.sess(), expr.span)
+ && let Some(init) = get_vec_init_kind(cx, right)
+ && !matches!(init, VecInitKind::WithExprCapacity(_))
+ {
+ self.searcher = Some(VecPushSearcher {
+ local_id: id,
+ init,
+ lhs_is_let: false,
+ let_ty_span: None,
+ name: name.ident.name,
+ err_span: expr.span,
+ found: 0,
+ last_push_expr: expr.hir_id,
+ });
+ }
+ }
+
+ fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) {
+ if let Some(searcher) = self.searcher.take() {
+ if let StmtKind::Expr(expr) | StmtKind::Semi(expr) = stmt.kind
+ && let ExprKind::MethodCall(name, [self_arg, _], _) = expr.kind
+ && path_to_local_id(self_arg, searcher.local_id)
+ && name.ident.as_str() == "push"
+ {
+ self.searcher = Some(VecPushSearcher {
+ found: searcher.found + 1,
+ err_span: searcher.err_span.to(stmt.span),
+ last_push_expr: expr.hir_id,
+ .. searcher
+ });
+ } else {
+ searcher.display_err(cx);
+ }
+ }
+ }
+
+ fn check_block_post(&mut self, cx: &LateContext<'tcx>, _: &'tcx Block<'tcx>) {
+ if let Some(searcher) = self.searcher.take() {
+ searcher.display_err(cx);
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/vec_resize_to_zero.rs b/src/tools/clippy/clippy_lints/src/vec_resize_to_zero.rs
new file mode 100644
index 000000000..0fee3e812
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/vec_resize_to_zero.rs
@@ -0,0 +1,64 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::{match_def_path, paths};
+use if_chain::if_chain;
+use rustc_ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::source_map::Spanned;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Finds occurrences of `Vec::resize(0, an_int)`
+ ///
+ /// ### Why is this bad?
+ /// This is probably an argument inversion mistake.
+ ///
+ /// ### Example
+ /// ```rust
+ /// vec!(1, 2, 3, 4, 5).resize(0, 5)
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// vec!(1, 2, 3, 4, 5).clear()
+ /// ```
+ #[clippy::version = "1.46.0"]
+ pub VEC_RESIZE_TO_ZERO,
+ correctness,
+ "emptying a vector with `resize(0, an_int)` instead of `clear()` is probably an argument inversion mistake"
+}
+
+declare_lint_pass!(VecResizeToZero => [VEC_RESIZE_TO_ZERO]);
+
+impl<'tcx> LateLintPass<'tcx> for VecResizeToZero {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ if_chain! {
+ if let hir::ExprKind::MethodCall(path_segment, args, _) = expr.kind;
+ if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
+ if match_def_path(cx, method_def_id, &paths::VEC_RESIZE) && args.len() == 3;
+ if let ExprKind::Lit(Spanned { node: LitKind::Int(0, _), .. }) = args[1].kind;
+ if let ExprKind::Lit(Spanned { node: LitKind::Int(..), .. }) = args[2].kind;
+ then {
+ let method_call_span = expr.span.with_lo(path_segment.ident.span.lo());
+ span_lint_and_then(
+ cx,
+ VEC_RESIZE_TO_ZERO,
+ expr.span,
+ "emptying a vector with `resize`",
+ |db| {
+ db.help("the arguments may be inverted...");
+ db.span_suggestion(
+ method_call_span,
+ "...or you can empty the vector with",
+ "clear()".to_string(),
+ Applicability::MaybeIncorrect,
+ );
+ },
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/verbose_file_reads.rs b/src/tools/clippy/clippy_lints/src/verbose_file_reads.rs
new file mode 100644
index 000000000..8e2ddd225
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/verbose_file_reads.rs
@@ -0,0 +1,88 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::paths;
+use clippy_utils::ty::match_type;
+use if_chain::if_chain;
+use rustc_hir::{Expr, ExprKind, QPath};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for use of File::read_to_end and File::read_to_string.
+ ///
+ /// ### Why is this bad?
+ /// `fs::{read, read_to_string}` provide the same functionality when `buf` is empty with fewer imports and no intermediate values.
+ /// See also: [fs::read docs](https://doc.rust-lang.org/std/fs/fn.read.html), [fs::read_to_string docs](https://doc.rust-lang.org/std/fs/fn.read_to_string.html)
+ ///
+ /// ### Example
+ /// ```rust,no_run
+ /// # use std::io::Read;
+ /// # use std::fs::File;
+ /// let mut f = File::open("foo.txt").unwrap();
+ /// let mut bytes = Vec::new();
+ /// f.read_to_end(&mut bytes).unwrap();
+ /// ```
+ /// Can be written more concisely as
+ /// ```rust,no_run
+ /// # use std::fs;
+ /// let mut bytes = fs::read("foo.txt").unwrap();
+ /// ```
+ #[clippy::version = "1.44.0"]
+ pub VERBOSE_FILE_READS,
+ restriction,
+ "use of `File::read_to_end` or `File::read_to_string`"
+}
+
+declare_lint_pass!(VerboseFileReads => [VERBOSE_FILE_READS]);
+
+impl<'tcx> LateLintPass<'tcx> for VerboseFileReads {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
+ if is_file_read_to_end(cx, expr) {
+ span_lint_and_help(
+ cx,
+ VERBOSE_FILE_READS,
+ expr.span,
+ "use of `File::read_to_end`",
+ None,
+ "consider using `fs::read` instead",
+ );
+ } else if is_file_read_to_string(cx, expr) {
+ span_lint_and_help(
+ cx,
+ VERBOSE_FILE_READS,
+ expr.span,
+ "use of `File::read_to_string`",
+ None,
+ "consider using `fs::read_to_string` instead",
+ );
+ }
+ }
+}
+
+fn is_file_read_to_end<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> bool {
+ if_chain! {
+ if let ExprKind::MethodCall(method_name, exprs, _) = expr.kind;
+ if method_name.ident.as_str() == "read_to_end";
+ if let ExprKind::Path(QPath::Resolved(None, _)) = &exprs[0].kind;
+ let ty = cx.typeck_results().expr_ty(&exprs[0]);
+ if match_type(cx, ty, &paths::FILE);
+ then {
+ return true
+ }
+ }
+ false
+}
+
+fn is_file_read_to_string<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> bool {
+ if_chain! {
+ if let ExprKind::MethodCall(method_name, exprs, _) = expr.kind;
+ if method_name.ident.as_str() == "read_to_string";
+ if let ExprKind::Path(QPath::Resolved(None, _)) = &exprs[0].kind;
+ let ty = cx.typeck_results().expr_ty(&exprs[0]);
+ if match_type(cx, ty, &paths::FILE);
+ then {
+ return true
+ }
+ }
+ false
+}
diff --git a/src/tools/clippy/clippy_lints/src/wildcard_imports.rs b/src/tools/clippy/clippy_lints/src/wildcard_imports.rs
new file mode 100644
index 000000000..5418eca38
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/wildcard_imports.rs
@@ -0,0 +1,222 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::is_test_module_or_function;
+use clippy_utils::source::{snippet, snippet_with_applicability};
+use if_chain::if_chain;
+use rustc_errors::Applicability;
+use rustc_hir::{
+ def::{DefKind, Res},
+ Item, ItemKind, PathSegment, UseKind,
+};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::symbol::kw;
+use rustc_span::{sym, BytePos};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `use Enum::*`.
+ ///
+ /// ### Why is this bad?
+ /// It is usually better style to use the prefixed name of
+ /// an enumeration variant, rather than importing variants.
+ ///
+ /// ### Known problems
+ /// Old-style enumerations that prefix the variants are
+ /// still around.
+ ///
+ /// ### Example
+ /// ```rust
+ /// use std::cmp::Ordering::*;
+ ///
+ /// # fn foo(_: std::cmp::Ordering) {}
+ /// foo(Less);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// use std::cmp::Ordering;
+ ///
+ /// # fn foo(_: Ordering) {}
+ /// foo(Ordering::Less)
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub ENUM_GLOB_USE,
+ pedantic,
+ "use items that import all variants of an enum"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for wildcard imports `use _::*`.
+ ///
+ /// ### Why is this bad?
+ /// wildcard imports can pollute the namespace. This is especially bad if
+ /// you try to import something through a wildcard, that already has been imported by name from
+ /// a different source:
+ ///
+ /// ```rust,ignore
+ /// use crate1::foo; // Imports a function named foo
+ /// use crate2::*; // Has a function named foo
+ ///
+ /// foo(); // Calls crate1::foo
+ /// ```
+ ///
+ /// This can lead to confusing error messages at best and to unexpected behavior at worst.
+ ///
+ /// ### Exceptions
+ /// Wildcard imports are allowed from modules named `prelude`. Many crates (including the standard library)
+ /// provide modules named "prelude" specifically designed for wildcard import.
+ ///
+ /// `use super::*` is allowed in test modules. This is defined as any module with "test" in the name.
+ ///
+ /// These exceptions can be disabled using the `warn-on-all-wildcard-imports` configuration flag.
+ ///
+ /// ### Known problems
+ /// If macros are imported through the wildcard, this macro is not included
+ /// by the suggestion and has to be added by hand.
+ ///
+ /// Applying the suggestion when explicit imports of the things imported with a glob import
+ /// exist, may result in `unused_imports` warnings.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// use crate1::*;
+ ///
+ /// foo();
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust,ignore
+ /// use crate1::foo;
+ ///
+ /// foo();
+ /// ```
+ #[clippy::version = "1.43.0"]
+ pub WILDCARD_IMPORTS,
+ pedantic,
+ "lint `use _::*` statements"
+}
+
+#[derive(Default)]
+pub struct WildcardImports {
+ warn_on_all: bool,
+ test_modules_deep: u32,
+}
+
+impl WildcardImports {
+ pub fn new(warn_on_all: bool) -> Self {
+ Self {
+ warn_on_all,
+ test_modules_deep: 0,
+ }
+ }
+}
+
+impl_lint_pass!(WildcardImports => [ENUM_GLOB_USE, WILDCARD_IMPORTS]);
+
+impl LateLintPass<'_> for WildcardImports {
+ fn check_item(&mut self, cx: &LateContext<'_>, item: &Item<'_>) {
+ if is_test_module_or_function(cx.tcx, item) {
+ self.test_modules_deep = self.test_modules_deep.saturating_add(1);
+ }
+ let module = cx.tcx.parent_module_from_def_id(item.def_id);
+ if cx.tcx.visibility(item.def_id) != ty::Visibility::Restricted(module.to_def_id()) {
+ return;
+ }
+ if_chain! {
+ if let ItemKind::Use(use_path, UseKind::Glob) = &item.kind;
+ if self.warn_on_all || !self.check_exceptions(item, use_path.segments);
+ let used_imports = cx.tcx.names_imported_by_glob_use(item.def_id);
+ if !used_imports.is_empty(); // Already handled by `unused_imports`
+ then {
+ let mut applicability = Applicability::MachineApplicable;
+ let import_source_snippet = snippet_with_applicability(cx, use_path.span, "..", &mut applicability);
+ let (span, braced_glob) = if import_source_snippet.is_empty() {
+ // This is a `_::{_, *}` import
+ // In this case `use_path.span` is empty and ends directly in front of the `*`,
+ // so we need to extend it by one byte.
+ (
+ use_path.span.with_hi(use_path.span.hi() + BytePos(1)),
+ true,
+ )
+ } else {
+ // In this case, the `use_path.span` ends right before the `::*`, so we need to
+ // extend it up to the `*`. Since it is hard to find the `*` in weird
+ // formattings like `use _ :: *;`, we extend it up to, but not including the
+ // `;`. In nested imports, like `use _::{inner::*, _}` there is no `;` and we
+ // can just use the end of the item span
+ let mut span = use_path.span.with_hi(item.span.hi());
+ if snippet(cx, span, "").ends_with(';') {
+ span = use_path.span.with_hi(item.span.hi() - BytePos(1));
+ }
+ (
+ span, false,
+ )
+ };
+
+ let imports_string = if used_imports.len() == 1 {
+ used_imports.iter().next().unwrap().to_string()
+ } else {
+ let mut imports = used_imports
+ .iter()
+ .map(ToString::to_string)
+ .collect::<Vec<_>>();
+ imports.sort();
+ if braced_glob {
+ imports.join(", ")
+ } else {
+ format!("{{{}}}", imports.join(", "))
+ }
+ };
+
+ let sugg = if braced_glob {
+ imports_string
+ } else {
+ format!("{}::{}", import_source_snippet, imports_string)
+ };
+
+ let (lint, message) = if let Res::Def(DefKind::Enum, _) = use_path.res {
+ (ENUM_GLOB_USE, "usage of wildcard import for enum variants")
+ } else {
+ (WILDCARD_IMPORTS, "usage of wildcard import")
+ };
+
+ span_lint_and_sugg(
+ cx,
+ lint,
+ span,
+ message,
+ "try",
+ sugg,
+ applicability,
+ );
+ }
+ }
+ }
+
+ fn check_item_post(&mut self, cx: &LateContext<'_>, item: &Item<'_>) {
+ if is_test_module_or_function(cx.tcx, item) {
+ self.test_modules_deep = self.test_modules_deep.saturating_sub(1);
+ }
+ }
+}
+
+impl WildcardImports {
+ fn check_exceptions(&self, item: &Item<'_>, segments: &[PathSegment<'_>]) -> bool {
+ item.span.from_expansion()
+ || is_prelude_import(segments)
+ || (is_super_only_import(segments) && self.test_modules_deep > 0)
+ }
+}
+
+// Allow "...prelude::..::*" imports.
+// Many crates have a prelude, and it is imported as a glob by design.
+fn is_prelude_import(segments: &[PathSegment<'_>]) -> bool {
+ segments.iter().any(|ps| ps.ident.name == sym::prelude)
+}
+
+// Allow "super::*" imports in tests.
+fn is_super_only_import(segments: &[PathSegment<'_>]) -> bool {
+ segments.len() == 1 && segments[0].ident.name == kw::Super
+}
diff --git a/src/tools/clippy/clippy_lints/src/write.rs b/src/tools/clippy/clippy_lints/src/write.rs
new file mode 100644
index 000000000..32718200c
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/write.rs
@@ -0,0 +1,709 @@
+use std::borrow::Cow;
+use std::iter;
+use std::ops::{Deref, Range};
+
+use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg, span_lint_and_then};
+use clippy_utils::source::{snippet_opt, snippet_with_applicability};
+use rustc_ast::ast::{Expr, ExprKind, Impl, Item, ItemKind, MacCall, Path, StrLit, StrStyle};
+use rustc_ast::token::{self, LitKind};
+use rustc_ast::tokenstream::TokenStream;
+use rustc_errors::{Applicability, DiagnosticBuilder};
+use rustc_lexer::unescape::{self, EscapeError};
+use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
+use rustc_parse::parser;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::symbol::{kw, Symbol};
+use rustc_span::{sym, BytePos, InnerSpan, Span, DUMMY_SP};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// This lint warns when you use `println!("")` to
+ /// print a newline.
+ ///
+ /// ### Why is this bad?
+ /// You should use `println!()`, which is simpler.
+ ///
+ /// ### Example
+ /// ```rust
+ /// println!("");
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// println!();
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub PRINTLN_EMPTY_STRING,
+ style,
+ "using `println!(\"\")` with an empty string"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// This lint warns when you use `print!()` with a format
+ /// string that ends in a newline.
+ ///
+ /// ### Why is this bad?
+ /// You should use `println!()` instead, which appends the
+ /// newline.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let name = "World";
+ /// print!("Hello {}!\n", name);
+ /// ```
+ /// use println!() instead
+ /// ```rust
+ /// # let name = "World";
+ /// println!("Hello {}!", name);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub PRINT_WITH_NEWLINE,
+ style,
+ "using `print!()` with a format string that ends in a single newline"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for printing on *stdout*. The purpose of this lint
+ /// is to catch debugging remnants.
+ ///
+ /// ### Why is this bad?
+ /// People often print on *stdout* while debugging an
+ /// application and might forget to remove those prints afterward.
+ ///
+ /// ### Known problems
+ /// * Only catches `print!` and `println!` calls.
+ /// * The lint level is unaffected by crate attributes. The level can still
+ /// be set for functions, modules and other items. To change the level for
+ /// the entire crate, please use command line flags. More information and a
+ /// configuration example can be found in [clippy#6610].
+ ///
+ /// [clippy#6610]: https://github.com/rust-lang/rust-clippy/issues/6610#issuecomment-977120558
+ ///
+ /// ### Example
+ /// ```rust
+ /// println!("Hello world!");
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub PRINT_STDOUT,
+ restriction,
+ "printing on stdout"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for printing on *stderr*. The purpose of this lint
+ /// is to catch debugging remnants.
+ ///
+ /// ### Why is this bad?
+ /// People often print on *stderr* while debugging an
+ /// application and might forget to remove those prints afterward.
+ ///
+ /// ### Known problems
+ /// * Only catches `eprint!` and `eprintln!` calls.
+ /// * The lint level is unaffected by crate attributes. The level can still
+ /// be set for functions, modules and other items. To change the level for
+ /// the entire crate, please use command line flags. More information and a
+ /// configuration example can be found in [clippy#6610].
+ ///
+ /// [clippy#6610]: https://github.com/rust-lang/rust-clippy/issues/6610#issuecomment-977120558
+ ///
+ /// ### Example
+ /// ```rust
+ /// eprintln!("Hello world!");
+ /// ```
+ #[clippy::version = "1.50.0"]
+ pub PRINT_STDERR,
+ restriction,
+ "printing on stderr"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for use of `Debug` formatting. The purpose of this
+ /// lint is to catch debugging remnants.
+ ///
+ /// ### Why is this bad?
+ /// The purpose of the `Debug` trait is to facilitate
+ /// debugging Rust code. It should not be used in user-facing output.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let foo = "bar";
+ /// println!("{:?}", foo);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub USE_DEBUG,
+ restriction,
+ "use of `Debug`-based formatting"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// This lint warns about the use of literals as `print!`/`println!` args.
+ ///
+ /// ### Why is this bad?
+ /// Using literals as `println!` args is inefficient
+ /// (c.f., https://github.com/matthiaskrgr/rust-str-bench) and unnecessary
+ /// (i.e., just put the literal in the format string)
+ ///
+ /// ### Known problems
+ /// Will also warn with macro calls as arguments that expand to literals
+ /// -- e.g., `println!("{}", env!("FOO"))`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// println!("{}", "foo");
+ /// ```
+ /// use the literal without formatting:
+ /// ```rust
+ /// println!("foo");
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub PRINT_LITERAL,
+ style,
+ "printing a literal with a format string"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// This lint warns when you use `writeln!(buf, "")` to
+ /// print a newline.
+ ///
+ /// ### Why is this bad?
+ /// You should use `writeln!(buf)`, which is simpler.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::fmt::Write;
+ /// # let mut buf = String::new();
+ /// writeln!(buf, "");
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # use std::fmt::Write;
+ /// # let mut buf = String::new();
+ /// writeln!(buf);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub WRITELN_EMPTY_STRING,
+ style,
+ "using `writeln!(buf, \"\")` with an empty string"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// This lint warns when you use `write!()` with a format
+ /// string that
+ /// ends in a newline.
+ ///
+ /// ### Why is this bad?
+ /// You should use `writeln!()` instead, which appends the
+ /// newline.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::fmt::Write;
+ /// # let mut buf = String::new();
+ /// # let name = "World";
+ /// write!(buf, "Hello {}!\n", name);
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # use std::fmt::Write;
+ /// # let mut buf = String::new();
+ /// # let name = "World";
+ /// writeln!(buf, "Hello {}!", name);
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub WRITE_WITH_NEWLINE,
+ style,
+ "using `write!()` with a format string that ends in a single newline"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// This lint warns about the use of literals as `write!`/`writeln!` args.
+ ///
+ /// ### Why is this bad?
+ /// Using literals as `writeln!` args is inefficient
+ /// (c.f., https://github.com/matthiaskrgr/rust-str-bench) and unnecessary
+ /// (i.e., just put the literal in the format string)
+ ///
+ /// ### Known problems
+ /// Will also warn with macro calls as arguments that expand to literals
+ /// -- e.g., `writeln!(buf, "{}", env!("FOO"))`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::fmt::Write;
+ /// # let mut buf = String::new();
+ /// writeln!(buf, "{}", "foo");
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// # use std::fmt::Write;
+ /// # let mut buf = String::new();
+ /// writeln!(buf, "foo");
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub WRITE_LITERAL,
+ style,
+ "writing a literal with a format string"
+}
+
+#[derive(Default)]
+pub struct Write {
+ in_debug_impl: bool,
+}
+
+impl_lint_pass!(Write => [
+ PRINT_WITH_NEWLINE,
+ PRINTLN_EMPTY_STRING,
+ PRINT_STDOUT,
+ PRINT_STDERR,
+ USE_DEBUG,
+ PRINT_LITERAL,
+ WRITE_WITH_NEWLINE,
+ WRITELN_EMPTY_STRING,
+ WRITE_LITERAL
+]);
+
+impl EarlyLintPass for Write {
+ fn check_item(&mut self, _: &EarlyContext<'_>, item: &Item) {
+ if let ItemKind::Impl(box Impl {
+ of_trait: Some(trait_ref),
+ ..
+ }) = &item.kind
+ {
+ let trait_name = trait_ref
+ .path
+ .segments
+ .iter()
+ .last()
+ .expect("path has at least one segment")
+ .ident
+ .name;
+ if trait_name == sym::Debug {
+ self.in_debug_impl = true;
+ }
+ }
+ }
+
+ fn check_item_post(&mut self, _: &EarlyContext<'_>, _: &Item) {
+ self.in_debug_impl = false;
+ }
+
+ fn check_mac(&mut self, cx: &EarlyContext<'_>, mac: &MacCall) {
+ fn is_build_script(cx: &EarlyContext<'_>) -> bool {
+ // Cargo sets the crate name for build scripts to `build_script_build`
+ cx.sess()
+ .opts
+ .crate_name
+ .as_ref()
+ .map_or(false, |crate_name| crate_name == "build_script_build")
+ }
+
+ if mac.path == sym!(print) {
+ if !is_build_script(cx) {
+ span_lint(cx, PRINT_STDOUT, mac.span(), "use of `print!`");
+ }
+ self.lint_print_with_newline(cx, mac);
+ } else if mac.path == sym!(println) {
+ if !is_build_script(cx) {
+ span_lint(cx, PRINT_STDOUT, mac.span(), "use of `println!`");
+ }
+ self.lint_println_empty_string(cx, mac);
+ } else if mac.path == sym!(eprint) {
+ span_lint(cx, PRINT_STDERR, mac.span(), "use of `eprint!`");
+ self.lint_print_with_newline(cx, mac);
+ } else if mac.path == sym!(eprintln) {
+ span_lint(cx, PRINT_STDERR, mac.span(), "use of `eprintln!`");
+ self.lint_println_empty_string(cx, mac);
+ } else if mac.path == sym!(write) {
+ if let (Some(fmt_str), dest) = self.check_tts(cx, mac.args.inner_tokens(), true) {
+ if check_newlines(&fmt_str) {
+ let (nl_span, only_nl) = newline_span(&fmt_str);
+ let nl_span = match (dest, only_nl) {
+ // Special case of `write!(buf, "\n")`: Mark everything from the end of
+ // `buf` for removal so no trailing comma [`writeln!(buf, )`] remains.
+ (Some(dest_expr), true) => nl_span.with_lo(dest_expr.span.hi()),
+ _ => nl_span,
+ };
+ span_lint_and_then(
+ cx,
+ WRITE_WITH_NEWLINE,
+ mac.span(),
+ "using `write!()` with a format string that ends in a single newline",
+ |err| {
+ err.multipart_suggestion(
+ "use `writeln!()` instead",
+ vec![(mac.path.span, String::from("writeln")), (nl_span, String::new())],
+ Applicability::MachineApplicable,
+ );
+ },
+ );
+ }
+ }
+ } else if mac.path == sym!(writeln) {
+ if let (Some(fmt_str), expr) = self.check_tts(cx, mac.args.inner_tokens(), true) {
+ if fmt_str.symbol == kw::Empty {
+ let mut applicability = Applicability::MachineApplicable;
+ let suggestion = if let Some(e) = expr {
+ snippet_with_applicability(cx, e.span, "v", &mut applicability)
+ } else {
+ applicability = Applicability::HasPlaceholders;
+ Cow::Borrowed("v")
+ };
+
+ span_lint_and_sugg(
+ cx,
+ WRITELN_EMPTY_STRING,
+ mac.span(),
+ format!("using `writeln!({}, \"\")`", suggestion).as_str(),
+ "replace it with",
+ format!("writeln!({})", suggestion),
+ applicability,
+ );
+ }
+ }
+ }
+ }
+}
+
+/// Given a format string that ends in a newline and its span, calculates the span of the
+/// newline, or the format string itself if the format string consists solely of a newline.
+/// Return this and a boolean indicating whether it only consisted of a newline.
+fn newline_span(fmtstr: &StrLit) -> (Span, bool) {
+ let sp = fmtstr.span;
+ let contents = fmtstr.symbol.as_str();
+
+ if contents == r"\n" {
+ return (sp, true);
+ }
+
+ let newline_sp_hi = sp.hi()
+ - match fmtstr.style {
+ StrStyle::Cooked => BytePos(1),
+ StrStyle::Raw(hashes) => BytePos((1 + hashes).into()),
+ };
+
+ let newline_sp_len = if contents.ends_with('\n') {
+ BytePos(1)
+ } else if contents.ends_with(r"\n") {
+ BytePos(2)
+ } else {
+ panic!("expected format string to contain a newline");
+ };
+
+ (sp.with_lo(newline_sp_hi - newline_sp_len).with_hi(newline_sp_hi), false)
+}
+
+/// Stores a list of replacement spans for each argument, but only if all the replacements used an
+/// empty format string.
+#[derive(Default)]
+struct SimpleFormatArgs {
+ unnamed: Vec<Vec<Span>>,
+ named: Vec<(Symbol, Vec<Span>)>,
+}
+impl SimpleFormatArgs {
+ fn get_unnamed(&self) -> impl Iterator<Item = &[Span]> {
+ self.unnamed.iter().map(|x| match x.as_slice() {
+ // Ignore the dummy span added from out of order format arguments.
+ [DUMMY_SP] => &[],
+ x => x,
+ })
+ }
+
+ fn get_named(&self, n: &Path) -> &[Span] {
+ self.named.iter().find(|x| *n == x.0).map_or(&[], |x| x.1.as_slice())
+ }
+
+ fn push(&mut self, arg: rustc_parse_format::Argument<'_>, span: Span) {
+ use rustc_parse_format::{
+ AlignUnknown, ArgumentImplicitlyIs, ArgumentIs, ArgumentNamed, CountImplied, FormatSpec,
+ };
+
+ const SIMPLE: FormatSpec<'_> = FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: 0,
+ precision: CountImplied,
+ precision_span: None,
+ width: CountImplied,
+ width_span: None,
+ ty: "",
+ ty_span: None,
+ };
+
+ match arg.position {
+ ArgumentIs(n) | ArgumentImplicitlyIs(n) => {
+ if self.unnamed.len() <= n {
+ // Use a dummy span to mark all unseen arguments.
+ self.unnamed.resize_with(n, || vec![DUMMY_SP]);
+ if arg.format == SIMPLE {
+ self.unnamed.push(vec![span]);
+ } else {
+ self.unnamed.push(Vec::new());
+ }
+ } else {
+ let args = &mut self.unnamed[n];
+ match (args.as_mut_slice(), arg.format == SIMPLE) {
+ // A non-empty format string has been seen already.
+ ([], _) => (),
+ // Replace the dummy span, if it exists.
+ ([dummy @ DUMMY_SP], true) => *dummy = span,
+ ([_, ..], true) => args.push(span),
+ ([_, ..], false) => *args = Vec::new(),
+ }
+ }
+ },
+ ArgumentNamed(n) => {
+ let n = Symbol::intern(n);
+ if let Some(x) = self.named.iter_mut().find(|x| x.0 == n) {
+ match x.1.as_slice() {
+ // A non-empty format string has been seen already.
+ [] => (),
+ [_, ..] if arg.format == SIMPLE => x.1.push(span),
+ [_, ..] => x.1 = Vec::new(),
+ }
+ } else if arg.format == SIMPLE {
+ self.named.push((n, vec![span]));
+ } else {
+ self.named.push((n, Vec::new()));
+ }
+ },
+ };
+ }
+}
+
+impl Write {
+ /// Parses a format string into a collection of spans for each argument. This only keeps track
+ /// of empty format arguments. Will also lint usages of debug format strings outside of debug
+ /// impls.
+ fn parse_fmt_string(&self, cx: &EarlyContext<'_>, str_lit: &StrLit) -> Option<SimpleFormatArgs> {
+ use rustc_parse_format::{ParseMode, Parser, Piece};
+
+ let str_sym = str_lit.symbol_unescaped.as_str();
+ let style = match str_lit.style {
+ StrStyle::Cooked => None,
+ StrStyle::Raw(n) => Some(n as usize),
+ };
+
+ let mut parser = Parser::new(str_sym, style, snippet_opt(cx, str_lit.span), false, ParseMode::Format);
+ let mut args = SimpleFormatArgs::default();
+
+ while let Some(arg) = parser.next() {
+ let arg = match arg {
+ Piece::String(_) => continue,
+ Piece::NextArgument(arg) => arg,
+ };
+ let span = parser
+ .arg_places
+ .last()
+ .map_or(DUMMY_SP, |&x| str_lit.span.from_inner(InnerSpan::new(x.start, x.end)));
+
+ if !self.in_debug_impl && arg.format.ty == "?" {
+ // FIXME: modify rustc's fmt string parser to give us the current span
+ span_lint(cx, USE_DEBUG, span, "use of `Debug`-based formatting");
+ }
+
+ args.push(arg, span);
+ }
+
+ parser.errors.is_empty().then_some(args)
+ }
+
+ /// Checks the arguments of `print[ln]!` and `write[ln]!` calls. It will return a tuple of two
+ /// `Option`s. The first `Option` of the tuple is the macro's format string. It includes
+ /// the contents of the string, whether it's a raw string, and the span of the literal in the
+ /// source. The second `Option` in the tuple is, in the `write[ln]!` case, the expression the
+ /// `format_str` should be written to.
+ ///
+ /// Example:
+ ///
+ /// Calling this function on
+ /// ```rust
+ /// # use std::fmt::Write;
+ /// # let mut buf = String::new();
+ /// # let something = "something";
+ /// writeln!(buf, "string to write: {}", something);
+ /// ```
+ /// will return
+ /// ```rust,ignore
+ /// (Some("string to write: {}"), Some(buf))
+ /// ```
+ fn check_tts<'a>(&self, cx: &EarlyContext<'a>, tts: TokenStream, is_write: bool) -> (Option<StrLit>, Option<Expr>) {
+ let mut parser = parser::Parser::new(&cx.sess().parse_sess, tts, false, None);
+ let expr = if is_write {
+ match parser
+ .parse_expr()
+ .map(rustc_ast::ptr::P::into_inner)
+ .map_err(DiagnosticBuilder::cancel)
+ {
+ // write!(e, ...)
+ Ok(p) if parser.eat(&token::Comma) => Some(p),
+ // write!(e) or error
+ e => return (None, e.ok()),
+ }
+ } else {
+ None
+ };
+
+ let fmtstr = match parser.parse_str_lit() {
+ Ok(fmtstr) => fmtstr,
+ Err(_) => return (None, expr),
+ };
+
+ let args = match self.parse_fmt_string(cx, &fmtstr) {
+ Some(args) => args,
+ None => return (Some(fmtstr), expr),
+ };
+
+ let lint = if is_write { WRITE_LITERAL } else { PRINT_LITERAL };
+ let mut unnamed_args = args.get_unnamed();
+ loop {
+ if !parser.eat(&token::Comma) {
+ return (Some(fmtstr), expr);
+ }
+
+ let comma_span = parser.prev_token.span;
+ let token_expr = if let Ok(expr) = parser.parse_expr().map_err(DiagnosticBuilder::cancel) {
+ expr
+ } else {
+ return (Some(fmtstr), None);
+ };
+ let (fmt_spans, lit) = match &token_expr.kind {
+ ExprKind::Lit(lit) => (unnamed_args.next().unwrap_or(&[]), lit),
+ ExprKind::Assign(lhs, rhs, _) => match (&lhs.kind, &rhs.kind) {
+ (ExprKind::Path(_, p), ExprKind::Lit(lit)) => (args.get_named(p), lit),
+ _ => continue,
+ },
+ _ => {
+ unnamed_args.next();
+ continue;
+ },
+ };
+
+ let replacement: String = match lit.token.kind {
+ LitKind::StrRaw(_) | LitKind::ByteStrRaw(_) if matches!(fmtstr.style, StrStyle::Raw(_)) => {
+ lit.token.symbol.as_str().replace('{', "{{").replace('}', "}}")
+ },
+ LitKind::Str | LitKind::ByteStr if matches!(fmtstr.style, StrStyle::Cooked) => {
+ lit.token.symbol.as_str().replace('{', "{{").replace('}', "}}")
+ },
+ LitKind::StrRaw(_)
+ | LitKind::Str
+ | LitKind::ByteStrRaw(_)
+ | LitKind::ByteStr
+ | LitKind::Integer
+ | LitKind::Float
+ | LitKind::Err => continue,
+ LitKind::Byte | LitKind::Char => match lit.token.symbol.as_str() {
+ "\"" if matches!(fmtstr.style, StrStyle::Cooked) => "\\\"",
+ "\"" if matches!(fmtstr.style, StrStyle::Raw(0)) => continue,
+ "\\\\" if matches!(fmtstr.style, StrStyle::Raw(_)) => "\\",
+ "\\'" => "'",
+ "{" => "{{",
+ "}" => "}}",
+ x if matches!(fmtstr.style, StrStyle::Raw(_)) && x.starts_with('\\') => continue,
+ x => x,
+ }
+ .into(),
+ LitKind::Bool => lit.token.symbol.as_str().deref().into(),
+ };
+
+ if !fmt_spans.is_empty() {
+ span_lint_and_then(
+ cx,
+ lint,
+ token_expr.span,
+ "literal with an empty format string",
+ |diag| {
+ diag.multipart_suggestion(
+ "try this",
+ iter::once((comma_span.to(token_expr.span), String::new()))
+ .chain(fmt_spans.iter().copied().zip(iter::repeat(replacement)))
+ .collect(),
+ Applicability::MachineApplicable,
+ );
+ },
+ );
+ }
+ }
+ }
+
+ fn lint_println_empty_string(&self, cx: &EarlyContext<'_>, mac: &MacCall) {
+ if let (Some(fmt_str), _) = self.check_tts(cx, mac.args.inner_tokens(), false) {
+ if fmt_str.symbol == kw::Empty {
+ let name = mac.path.segments[0].ident.name;
+ span_lint_and_sugg(
+ cx,
+ PRINTLN_EMPTY_STRING,
+ mac.span(),
+ &format!("using `{}!(\"\")`", name),
+ "replace it with",
+ format!("{}!()", name),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+
+ fn lint_print_with_newline(&self, cx: &EarlyContext<'_>, mac: &MacCall) {
+ if let (Some(fmt_str), _) = self.check_tts(cx, mac.args.inner_tokens(), false) {
+ if check_newlines(&fmt_str) {
+ let name = mac.path.segments[0].ident.name;
+ let suggested = format!("{}ln", name);
+ span_lint_and_then(
+ cx,
+ PRINT_WITH_NEWLINE,
+ mac.span(),
+ &format!("using `{}!()` with a format string that ends in a single newline", name),
+ |err| {
+ err.multipart_suggestion(
+ &format!("use `{}!` instead", suggested),
+ vec![(mac.path.span, suggested), (newline_span(&fmt_str).0, String::new())],
+ Applicability::MachineApplicable,
+ );
+ },
+ );
+ }
+ }
+ }
+}
+
+/// Checks if the format string contains a single newline that terminates it.
+///
+/// Literal and escaped newlines are both checked (only literal for raw strings).
+fn check_newlines(fmtstr: &StrLit) -> bool {
+ let mut has_internal_newline = false;
+ let mut last_was_cr = false;
+ let mut should_lint = false;
+
+ let contents = fmtstr.symbol.as_str();
+
+ let mut cb = |r: Range<usize>, c: Result<char, EscapeError>| {
+ let c = c.unwrap();
+
+ if r.end == contents.len() && c == '\n' && !last_was_cr && !has_internal_newline {
+ should_lint = true;
+ } else {
+ last_was_cr = c == '\r';
+ if c == '\n' {
+ has_internal_newline = true;
+ }
+ }
+ };
+
+ match fmtstr.style {
+ StrStyle::Cooked => unescape::unescape_literal(contents, unescape::Mode::Str, &mut cb),
+ StrStyle::Raw(_) => unescape::unescape_literal(contents, unescape::Mode::RawStr, &mut cb),
+ }
+
+ should_lint
+}
diff --git a/src/tools/clippy/clippy_lints/src/zero_div_zero.rs b/src/tools/clippy/clippy_lints/src/zero_div_zero.rs
new file mode 100644
index 000000000..50d3c079f
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/zero_div_zero.rs
@@ -0,0 +1,67 @@
+use clippy_utils::consts::{constant_simple, Constant};
+use clippy_utils::diagnostics::span_lint_and_help;
+use if_chain::if_chain;
+use rustc_hir::{BinOpKind, Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `0.0 / 0.0`.
+ ///
+ /// ### Why is this bad?
+ /// It's less readable than `f32::NAN` or `f64::NAN`.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let nan = 0.0f32 / 0.0;
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// let nan = f32::NAN;
+ /// ```
+ #[clippy::version = "pre 1.29.0"]
+ pub ZERO_DIVIDED_BY_ZERO,
+ complexity,
+ "usage of `0.0 / 0.0` to obtain NaN instead of `f32::NAN` or `f64::NAN`"
+}
+
+declare_lint_pass!(ZeroDiv => [ZERO_DIVIDED_BY_ZERO]);
+
+impl<'tcx> LateLintPass<'tcx> for ZeroDiv {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
+ // check for instances of 0.0/0.0
+ if_chain! {
+ if let ExprKind::Binary(ref op, left, right) = expr.kind;
+ if op.node == BinOpKind::Div;
+ // TODO - constant_simple does not fold many operations involving floats.
+ // That's probably fine for this lint - it's pretty unlikely that someone would
+ // do something like 0.0/(2.0 - 2.0), but it would be nice to warn on that case too.
+ if let Some(lhs_value) = constant_simple(cx, cx.typeck_results(), left);
+ if let Some(rhs_value) = constant_simple(cx, cx.typeck_results(), right);
+ if Constant::F32(0.0) == lhs_value || Constant::F64(0.0) == lhs_value;
+ if Constant::F32(0.0) == rhs_value || Constant::F64(0.0) == rhs_value;
+ then {
+ // since we're about to suggest a use of f32::NAN or f64::NAN,
+ // match the precision of the literals that are given.
+ let float_type = match (lhs_value, rhs_value) {
+ (Constant::F64(_), _)
+ | (_, Constant::F64(_)) => "f64",
+ _ => "f32"
+ };
+ span_lint_and_help(
+ cx,
+ ZERO_DIVIDED_BY_ZERO,
+ expr.span,
+ "constant division of `0.0` with `0.0` will always result in NaN",
+ None,
+ &format!(
+ "consider using `{}::NAN` if you would like a constant representing NaN",
+ float_type,
+ ),
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/zero_sized_map_values.rs b/src/tools/clippy/clippy_lints/src/zero_sized_map_values.rs
new file mode 100644
index 000000000..8dc43c0e2
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/zero_sized_map_values.rs
@@ -0,0 +1,94 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::ty::{is_normalizable, is_type_diagnostic_item};
+use if_chain::if_chain;
+use rustc_hir::{self as hir, HirId, ItemKind, Node};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::layout::LayoutOf as _;
+use rustc_middle::ty::{Adt, Ty, TypeVisitable};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+use rustc_typeck::hir_ty_to_ty;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for maps with zero-sized value types anywhere in the code.
+ ///
+ /// ### Why is this bad?
+ /// Since there is only a single value for a zero-sized type, a map
+ /// containing zero sized values is effectively a set. Using a set in that case improves
+ /// readability and communicates intent more clearly.
+ ///
+ /// ### Known problems
+ /// * A zero-sized type cannot be recovered later if it contains private fields.
+ /// * This lints the signature of public items
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::collections::HashMap;
+ /// fn unique_words(text: &str) -> HashMap<&str, ()> {
+ /// todo!();
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # use std::collections::HashSet;
+ /// fn unique_words(text: &str) -> HashSet<&str> {
+ /// todo!();
+ /// }
+ /// ```
+ #[clippy::version = "1.50.0"]
+ pub ZERO_SIZED_MAP_VALUES,
+ pedantic,
+ "usage of map with zero-sized value type"
+}
+
+declare_lint_pass!(ZeroSizedMapValues => [ZERO_SIZED_MAP_VALUES]);
+
+impl LateLintPass<'_> for ZeroSizedMapValues {
+ fn check_ty(&mut self, cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>) {
+ if_chain! {
+ if !hir_ty.span.from_expansion();
+ if !in_trait_impl(cx, hir_ty.hir_id);
+ let ty = ty_from_hir_ty(cx, hir_ty);
+ if is_type_diagnostic_item(cx, ty, sym::HashMap) || is_type_diagnostic_item(cx, ty, sym::BTreeMap);
+ if let Adt(_, substs) = ty.kind();
+ let ty = substs.type_at(1);
+ // Fixes https://github.com/rust-lang/rust-clippy/issues/7447 because of
+ // https://github.com/rust-lang/rust/blob/master/compiler/rustc_middle/src/ty/sty.rs#L968
+ if !ty.has_escaping_bound_vars();
+ // Do this to prevent `layout_of` crashing, being unable to fully normalize `ty`.
+ if is_normalizable(cx, cx.param_env, ty);
+ if let Ok(layout) = cx.layout_of(ty);
+ if layout.is_zst();
+ then {
+ span_lint_and_help(cx, ZERO_SIZED_MAP_VALUES, hir_ty.span, "map with zero-sized value type", None, "consider using a set instead");
+ }
+ }
+ }
+}
+
+fn in_trait_impl(cx: &LateContext<'_>, hir_id: HirId) -> bool {
+ let parent_id = cx.tcx.hir().get_parent_item(hir_id);
+ let second_parent_id = cx
+ .tcx
+ .hir()
+ .get_parent_item(cx.tcx.hir().local_def_id_to_hir_id(parent_id));
+ if let Some(Node::Item(item)) = cx.tcx.hir().find_by_def_id(second_parent_id) {
+ if let ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }) = item.kind {
+ return true;
+ }
+ }
+ false
+}
+
+fn ty_from_hir_ty<'tcx>(cx: &LateContext<'tcx>, hir_ty: &hir::Ty<'_>) -> Ty<'tcx> {
+ cx.maybe_typeck_results()
+ .and_then(|results| {
+ if results.hir_owner == hir_ty.hir_id.owner {
+ results.node_type_opt(hir_ty.hir_id)
+ } else {
+ None
+ }
+ })
+ .unwrap_or_else(|| hir_ty_to_ty(cx.tcx, hir_ty))
+}
diff --git a/src/tools/clippy/clippy_utils/Cargo.toml b/src/tools/clippy/clippy_utils/Cargo.toml
new file mode 100644
index 000000000..bb443bdc1
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/Cargo.toml
@@ -0,0 +1,18 @@
+[package]
+name = "clippy_utils"
+version = "0.1.64"
+edition = "2021"
+publish = false
+
+[dependencies]
+arrayvec = { version = "0.7", default-features = false }
+if_chain = "1.0"
+rustc-semver = "1.1"
+
+[features]
+deny-warnings = []
+internal = []
+
+[package.metadata.rust-analyzer]
+# This crate uses #[feature(rustc_private)]
+rustc_private = true
diff --git a/src/tools/clippy/clippy_utils/src/ast_utils.rs b/src/tools/clippy/clippy_utils/src/ast_utils.rs
new file mode 100644
index 000000000..b22602632
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/ast_utils.rs
@@ -0,0 +1,710 @@
+//! Utilities for manipulating and extracting information from `rustc_ast::ast`.
+//!
+//! - The `eq_foobar` functions test for semantic equality but ignores `NodeId`s and `Span`s.
+
+#![allow(clippy::similar_names, clippy::wildcard_imports, clippy::enum_glob_use)]
+
+use crate::{both, over};
+use rustc_ast::ptr::P;
+use rustc_ast::{self as ast, *};
+use rustc_span::symbol::Ident;
+use std::mem;
+
+pub mod ident_iter;
+pub use ident_iter::IdentIter;
+
+pub fn is_useless_with_eq_exprs(kind: BinOpKind) -> bool {
+ use BinOpKind::*;
+ matches!(
+ kind,
+ Sub | Div | Eq | Lt | Le | Gt | Ge | Ne | And | Or | BitXor | BitAnd | BitOr
+ )
+}
+
+/// Checks if each element in the first slice is contained within the latter as per `eq_fn`.
+pub fn unordered_over<X>(left: &[X], right: &[X], mut eq_fn: impl FnMut(&X, &X) -> bool) -> bool {
+ left.len() == right.len() && left.iter().all(|l| right.iter().any(|r| eq_fn(l, r)))
+}
+
+pub fn eq_id(l: Ident, r: Ident) -> bool {
+ l.name == r.name
+}
+
+pub fn eq_pat(l: &Pat, r: &Pat) -> bool {
+ use PatKind::*;
+ match (&l.kind, &r.kind) {
+ (Paren(l), _) => eq_pat(l, r),
+ (_, Paren(r)) => eq_pat(l, r),
+ (Wild, Wild) | (Rest, Rest) => true,
+ (Lit(l), Lit(r)) => eq_expr(l, r),
+ (Ident(b1, i1, s1), Ident(b2, i2, s2)) => b1 == b2 && eq_id(*i1, *i2) && both(s1, s2, |l, r| eq_pat(l, r)),
+ (Range(lf, lt, le), Range(rf, rt, re)) => {
+ eq_expr_opt(lf, rf) && eq_expr_opt(lt, rt) && eq_range_end(&le.node, &re.node)
+ },
+ (Box(l), Box(r))
+ | (Ref(l, Mutability::Not), Ref(r, Mutability::Not))
+ | (Ref(l, Mutability::Mut), Ref(r, Mutability::Mut)) => eq_pat(l, r),
+ (Tuple(l), Tuple(r)) | (Slice(l), Slice(r)) => over(l, r, |l, r| eq_pat(l, r)),
+ (Path(lq, lp), Path(rq, rp)) => both(lq, rq, eq_qself) && eq_path(lp, rp),
+ (TupleStruct(lqself, lp, lfs), TupleStruct(rqself, rp, rfs)) => {
+ eq_maybe_qself(lqself, rqself) && eq_path(lp, rp) && over(lfs, rfs, |l, r| eq_pat(l, r))
+ },
+ (Struct(lqself, lp, lfs, lr), Struct(rqself, rp, rfs, rr)) => {
+ lr == rr && eq_maybe_qself(lqself, rqself) && eq_path(lp, rp) && unordered_over(lfs, rfs, eq_field_pat)
+ },
+ (Or(ls), Or(rs)) => unordered_over(ls, rs, |l, r| eq_pat(l, r)),
+ (MacCall(l), MacCall(r)) => eq_mac_call(l, r),
+ _ => false,
+ }
+}
+
+pub fn eq_range_end(l: &RangeEnd, r: &RangeEnd) -> bool {
+ match (l, r) {
+ (RangeEnd::Excluded, RangeEnd::Excluded) => true,
+ (RangeEnd::Included(l), RangeEnd::Included(r)) => {
+ matches!(l, RangeSyntax::DotDotEq) == matches!(r, RangeSyntax::DotDotEq)
+ },
+ _ => false,
+ }
+}
+
+pub fn eq_field_pat(l: &PatField, r: &PatField) -> bool {
+ l.is_placeholder == r.is_placeholder
+ && eq_id(l.ident, r.ident)
+ && eq_pat(&l.pat, &r.pat)
+ && over(&l.attrs, &r.attrs, eq_attr)
+}
+
+pub fn eq_qself(l: &QSelf, r: &QSelf) -> bool {
+ l.position == r.position && eq_ty(&l.ty, &r.ty)
+}
+
+pub fn eq_maybe_qself(l: &Option<QSelf>, r: &Option<QSelf>) -> bool {
+ match (l, r) {
+ (Some(l), Some(r)) => eq_qself(l, r),
+ (None, None) => true,
+ _ => false,
+ }
+}
+
+pub fn eq_path(l: &Path, r: &Path) -> bool {
+ over(&l.segments, &r.segments, eq_path_seg)
+}
+
+pub fn eq_path_seg(l: &PathSegment, r: &PathSegment) -> bool {
+ eq_id(l.ident, r.ident) && both(&l.args, &r.args, |l, r| eq_generic_args(l, r))
+}
+
+pub fn eq_generic_args(l: &GenericArgs, r: &GenericArgs) -> bool {
+ match (l, r) {
+ (GenericArgs::AngleBracketed(l), GenericArgs::AngleBracketed(r)) => over(&l.args, &r.args, eq_angle_arg),
+ (GenericArgs::Parenthesized(l), GenericArgs::Parenthesized(r)) => {
+ over(&l.inputs, &r.inputs, |l, r| eq_ty(l, r)) && eq_fn_ret_ty(&l.output, &r.output)
+ },
+ _ => false,
+ }
+}
+
+pub fn eq_angle_arg(l: &AngleBracketedArg, r: &AngleBracketedArg) -> bool {
+ match (l, r) {
+ (AngleBracketedArg::Arg(l), AngleBracketedArg::Arg(r)) => eq_generic_arg(l, r),
+ (AngleBracketedArg::Constraint(l), AngleBracketedArg::Constraint(r)) => eq_assoc_constraint(l, r),
+ _ => false,
+ }
+}
+
+pub fn eq_generic_arg(l: &GenericArg, r: &GenericArg) -> bool {
+ match (l, r) {
+ (GenericArg::Lifetime(l), GenericArg::Lifetime(r)) => eq_id(l.ident, r.ident),
+ (GenericArg::Type(l), GenericArg::Type(r)) => eq_ty(l, r),
+ (GenericArg::Const(l), GenericArg::Const(r)) => eq_expr(&l.value, &r.value),
+ _ => false,
+ }
+}
+
+pub fn eq_expr_opt(l: &Option<P<Expr>>, r: &Option<P<Expr>>) -> bool {
+ both(l, r, |l, r| eq_expr(l, r))
+}
+
+pub fn eq_struct_rest(l: &StructRest, r: &StructRest) -> bool {
+ match (l, r) {
+ (StructRest::Base(lb), StructRest::Base(rb)) => eq_expr(lb, rb),
+ (StructRest::Rest(_), StructRest::Rest(_)) | (StructRest::None, StructRest::None) => true,
+ _ => false,
+ }
+}
+
+pub fn eq_expr(l: &Expr, r: &Expr) -> bool {
+ use ExprKind::*;
+ if !over(&l.attrs, &r.attrs, eq_attr) {
+ return false;
+ }
+ match (&l.kind, &r.kind) {
+ (Paren(l), _) => eq_expr(l, r),
+ (_, Paren(r)) => eq_expr(l, r),
+ (Err, Err) => true,
+ (Box(l), Box(r)) | (Try(l), Try(r)) | (Await(l), Await(r)) => eq_expr(l, r),
+ (Array(l), Array(r)) | (Tup(l), Tup(r)) => over(l, r, |l, r| eq_expr(l, r)),
+ (Repeat(le, ls), Repeat(re, rs)) => eq_expr(le, re) && eq_expr(&ls.value, &rs.value),
+ (Call(lc, la), Call(rc, ra)) => eq_expr(lc, rc) && over(la, ra, |l, r| eq_expr(l, r)),
+ (MethodCall(lc, la, _), MethodCall(rc, ra, _)) => eq_path_seg(lc, rc) && over(la, ra, |l, r| eq_expr(l, r)),
+ (Binary(lo, ll, lr), Binary(ro, rl, rr)) => lo.node == ro.node && eq_expr(ll, rl) && eq_expr(lr, rr),
+ (Unary(lo, l), Unary(ro, r)) => mem::discriminant(lo) == mem::discriminant(ro) && eq_expr(l, r),
+ (Lit(l), Lit(r)) => l.kind == r.kind,
+ (Cast(l, lt), Cast(r, rt)) | (Type(l, lt), Type(r, rt)) => eq_expr(l, r) && eq_ty(lt, rt),
+ (Let(lp, le, _), Let(rp, re, _)) => eq_pat(lp, rp) && eq_expr(le, re),
+ (If(lc, lt, le), If(rc, rt, re)) => eq_expr(lc, rc) && eq_block(lt, rt) && eq_expr_opt(le, re),
+ (While(lc, lt, ll), While(rc, rt, rl)) => eq_label(ll, rl) && eq_expr(lc, rc) && eq_block(lt, rt),
+ (ForLoop(lp, li, lt, ll), ForLoop(rp, ri, rt, rl)) => {
+ eq_label(ll, rl) && eq_pat(lp, rp) && eq_expr(li, ri) && eq_block(lt, rt)
+ },
+ (Loop(lt, ll), Loop(rt, rl)) => eq_label(ll, rl) && eq_block(lt, rt),
+ (Block(lb, ll), Block(rb, rl)) => eq_label(ll, rl) && eq_block(lb, rb),
+ (TryBlock(l), TryBlock(r)) => eq_block(l, r),
+ (Yield(l), Yield(r)) | (Ret(l), Ret(r)) => eq_expr_opt(l, r),
+ (Break(ll, le), Break(rl, re)) => eq_label(ll, rl) && eq_expr_opt(le, re),
+ (Continue(ll), Continue(rl)) => eq_label(ll, rl),
+ (Assign(l1, l2, _), Assign(r1, r2, _)) | (Index(l1, l2), Index(r1, r2)) => eq_expr(l1, r1) && eq_expr(l2, r2),
+ (AssignOp(lo, lp, lv), AssignOp(ro, rp, rv)) => lo.node == ro.node && eq_expr(lp, rp) && eq_expr(lv, rv),
+ (Field(lp, lf), Field(rp, rf)) => eq_id(*lf, *rf) && eq_expr(lp, rp),
+ (Match(ls, la), Match(rs, ra)) => eq_expr(ls, rs) && over(la, ra, eq_arm),
+ (Closure(lb, lc, la, lm, lf, le, _), Closure(rb, rc, ra, rm, rf, re, _)) => {
+ eq_closure_binder(lb, rb)
+ && lc == rc
+ && la.is_async() == ra.is_async()
+ && lm == rm
+ && eq_fn_decl(lf, rf)
+ && eq_expr(le, re)
+ },
+ (Async(lc, _, lb), Async(rc, _, rb)) => lc == rc && eq_block(lb, rb),
+ (Range(lf, lt, ll), Range(rf, rt, rl)) => ll == rl && eq_expr_opt(lf, rf) && eq_expr_opt(lt, rt),
+ (AddrOf(lbk, lm, le), AddrOf(rbk, rm, re)) => lbk == rbk && lm == rm && eq_expr(le, re),
+ (Path(lq, lp), Path(rq, rp)) => both(lq, rq, eq_qself) && eq_path(lp, rp),
+ (MacCall(l), MacCall(r)) => eq_mac_call(l, r),
+ (Struct(lse), Struct(rse)) => {
+ eq_maybe_qself(&lse.qself, &rse.qself)
+ && eq_path(&lse.path, &rse.path)
+ && eq_struct_rest(&lse.rest, &rse.rest)
+ && unordered_over(&lse.fields, &rse.fields, eq_field)
+ },
+ _ => false,
+ }
+}
+
+pub fn eq_field(l: &ExprField, r: &ExprField) -> bool {
+ l.is_placeholder == r.is_placeholder
+ && eq_id(l.ident, r.ident)
+ && eq_expr(&l.expr, &r.expr)
+ && over(&l.attrs, &r.attrs, eq_attr)
+}
+
+pub fn eq_arm(l: &Arm, r: &Arm) -> bool {
+ l.is_placeholder == r.is_placeholder
+ && eq_pat(&l.pat, &r.pat)
+ && eq_expr(&l.body, &r.body)
+ && eq_expr_opt(&l.guard, &r.guard)
+ && over(&l.attrs, &r.attrs, eq_attr)
+}
+
+pub fn eq_label(l: &Option<Label>, r: &Option<Label>) -> bool {
+ both(l, r, |l, r| eq_id(l.ident, r.ident))
+}
+
+pub fn eq_block(l: &Block, r: &Block) -> bool {
+ l.rules == r.rules && over(&l.stmts, &r.stmts, eq_stmt)
+}
+
+pub fn eq_stmt(l: &Stmt, r: &Stmt) -> bool {
+ use StmtKind::*;
+ match (&l.kind, &r.kind) {
+ (Local(l), Local(r)) => {
+ eq_pat(&l.pat, &r.pat)
+ && both(&l.ty, &r.ty, |l, r| eq_ty(l, r))
+ && eq_local_kind(&l.kind, &r.kind)
+ && over(&l.attrs, &r.attrs, eq_attr)
+ },
+ (Item(l), Item(r)) => eq_item(l, r, eq_item_kind),
+ (Expr(l), Expr(r)) | (Semi(l), Semi(r)) => eq_expr(l, r),
+ (Empty, Empty) => true,
+ (MacCall(l), MacCall(r)) => {
+ l.style == r.style && eq_mac_call(&l.mac, &r.mac) && over(&l.attrs, &r.attrs, eq_attr)
+ },
+ _ => false,
+ }
+}
+
+pub fn eq_local_kind(l: &LocalKind, r: &LocalKind) -> bool {
+ use LocalKind::*;
+ match (l, r) {
+ (Decl, Decl) => true,
+ (Init(l), Init(r)) => eq_expr(l, r),
+ (InitElse(li, le), InitElse(ri, re)) => eq_expr(li, ri) && eq_block(le, re),
+ _ => false,
+ }
+}
+
+pub fn eq_item<K>(l: &Item<K>, r: &Item<K>, mut eq_kind: impl FnMut(&K, &K) -> bool) -> bool {
+ eq_id(l.ident, r.ident) && over(&l.attrs, &r.attrs, eq_attr) && eq_vis(&l.vis, &r.vis) && eq_kind(&l.kind, &r.kind)
+}
+
+#[expect(clippy::too_many_lines)] // Just a big match statement
+pub fn eq_item_kind(l: &ItemKind, r: &ItemKind) -> bool {
+ use ItemKind::*;
+ match (l, r) {
+ (ExternCrate(l), ExternCrate(r)) => l == r,
+ (Use(l), Use(r)) => eq_use_tree(l, r),
+ (Static(lt, lm, le), Static(rt, rm, re)) => lm == rm && eq_ty(lt, rt) && eq_expr_opt(le, re),
+ (Const(ld, lt, le), Const(rd, rt, re)) => eq_defaultness(*ld, *rd) && eq_ty(lt, rt) && eq_expr_opt(le, re),
+ (
+ Fn(box ast::Fn {
+ defaultness: ld,
+ sig: lf,
+ generics: lg,
+ body: lb,
+ }),
+ Fn(box ast::Fn {
+ defaultness: rd,
+ sig: rf,
+ generics: rg,
+ body: rb,
+ }),
+ ) => {
+ eq_defaultness(*ld, *rd) && eq_fn_sig(lf, rf) && eq_generics(lg, rg) && both(lb, rb, |l, r| eq_block(l, r))
+ },
+ (Mod(lu, lmk), Mod(ru, rmk)) => {
+ lu == ru
+ && match (lmk, rmk) {
+ (ModKind::Loaded(litems, linline, _), ModKind::Loaded(ritems, rinline, _)) => {
+ linline == rinline && over(litems, ritems, |l, r| eq_item(l, r, eq_item_kind))
+ },
+ (ModKind::Unloaded, ModKind::Unloaded) => true,
+ _ => false,
+ }
+ },
+ (ForeignMod(l), ForeignMod(r)) => {
+ both(&l.abi, &r.abi, eq_str_lit) && over(&l.items, &r.items, |l, r| eq_item(l, r, eq_foreign_item_kind))
+ },
+ (
+ TyAlias(box ast::TyAlias {
+ defaultness: ld,
+ generics: lg,
+ bounds: lb,
+ ty: lt,
+ ..
+ }),
+ TyAlias(box ast::TyAlias {
+ defaultness: rd,
+ generics: rg,
+ bounds: rb,
+ ty: rt,
+ ..
+ }),
+ ) => {
+ eq_defaultness(*ld, *rd)
+ && eq_generics(lg, rg)
+ && over(lb, rb, eq_generic_bound)
+ && both(lt, rt, |l, r| eq_ty(l, r))
+ },
+ (Enum(le, lg), Enum(re, rg)) => over(&le.variants, &re.variants, eq_variant) && eq_generics(lg, rg),
+ (Struct(lv, lg), Struct(rv, rg)) | (Union(lv, lg), Union(rv, rg)) => {
+ eq_variant_data(lv, rv) && eq_generics(lg, rg)
+ },
+ (
+ Trait(box ast::Trait {
+ is_auto: la,
+ unsafety: lu,
+ generics: lg,
+ bounds: lb,
+ items: li,
+ }),
+ Trait(box ast::Trait {
+ is_auto: ra,
+ unsafety: ru,
+ generics: rg,
+ bounds: rb,
+ items: ri,
+ }),
+ ) => {
+ la == ra
+ && matches!(lu, Unsafe::No) == matches!(ru, Unsafe::No)
+ && eq_generics(lg, rg)
+ && over(lb, rb, eq_generic_bound)
+ && over(li, ri, |l, r| eq_item(l, r, eq_assoc_item_kind))
+ },
+ (TraitAlias(lg, lb), TraitAlias(rg, rb)) => eq_generics(lg, rg) && over(lb, rb, eq_generic_bound),
+ (
+ Impl(box ast::Impl {
+ unsafety: lu,
+ polarity: lp,
+ defaultness: ld,
+ constness: lc,
+ generics: lg,
+ of_trait: lot,
+ self_ty: lst,
+ items: li,
+ }),
+ Impl(box ast::Impl {
+ unsafety: ru,
+ polarity: rp,
+ defaultness: rd,
+ constness: rc,
+ generics: rg,
+ of_trait: rot,
+ self_ty: rst,
+ items: ri,
+ }),
+ ) => {
+ matches!(lu, Unsafe::No) == matches!(ru, Unsafe::No)
+ && matches!(lp, ImplPolarity::Positive) == matches!(rp, ImplPolarity::Positive)
+ && eq_defaultness(*ld, *rd)
+ && matches!(lc, ast::Const::No) == matches!(rc, ast::Const::No)
+ && eq_generics(lg, rg)
+ && both(lot, rot, |l, r| eq_path(&l.path, &r.path))
+ && eq_ty(lst, rst)
+ && over(li, ri, |l, r| eq_item(l, r, eq_assoc_item_kind))
+ },
+ (MacCall(l), MacCall(r)) => eq_mac_call(l, r),
+ (MacroDef(l), MacroDef(r)) => l.macro_rules == r.macro_rules && eq_mac_args(&l.body, &r.body),
+ _ => false,
+ }
+}
+
+pub fn eq_foreign_item_kind(l: &ForeignItemKind, r: &ForeignItemKind) -> bool {
+ use ForeignItemKind::*;
+ match (l, r) {
+ (Static(lt, lm, le), Static(rt, rm, re)) => lm == rm && eq_ty(lt, rt) && eq_expr_opt(le, re),
+ (
+ Fn(box ast::Fn {
+ defaultness: ld,
+ sig: lf,
+ generics: lg,
+ body: lb,
+ }),
+ Fn(box ast::Fn {
+ defaultness: rd,
+ sig: rf,
+ generics: rg,
+ body: rb,
+ }),
+ ) => {
+ eq_defaultness(*ld, *rd) && eq_fn_sig(lf, rf) && eq_generics(lg, rg) && both(lb, rb, |l, r| eq_block(l, r))
+ },
+ (
+ TyAlias(box ast::TyAlias {
+ defaultness: ld,
+ generics: lg,
+ bounds: lb,
+ ty: lt,
+ ..
+ }),
+ TyAlias(box ast::TyAlias {
+ defaultness: rd,
+ generics: rg,
+ bounds: rb,
+ ty: rt,
+ ..
+ }),
+ ) => {
+ eq_defaultness(*ld, *rd)
+ && eq_generics(lg, rg)
+ && over(lb, rb, eq_generic_bound)
+ && both(lt, rt, |l, r| eq_ty(l, r))
+ },
+ (MacCall(l), MacCall(r)) => eq_mac_call(l, r),
+ _ => false,
+ }
+}
+
+pub fn eq_assoc_item_kind(l: &AssocItemKind, r: &AssocItemKind) -> bool {
+ use AssocItemKind::*;
+ match (l, r) {
+ (Const(ld, lt, le), Const(rd, rt, re)) => eq_defaultness(*ld, *rd) && eq_ty(lt, rt) && eq_expr_opt(le, re),
+ (
+ Fn(box ast::Fn {
+ defaultness: ld,
+ sig: lf,
+ generics: lg,
+ body: lb,
+ }),
+ Fn(box ast::Fn {
+ defaultness: rd,
+ sig: rf,
+ generics: rg,
+ body: rb,
+ }),
+ ) => {
+ eq_defaultness(*ld, *rd) && eq_fn_sig(lf, rf) && eq_generics(lg, rg) && both(lb, rb, |l, r| eq_block(l, r))
+ },
+ (
+ TyAlias(box ast::TyAlias {
+ defaultness: ld,
+ generics: lg,
+ bounds: lb,
+ ty: lt,
+ ..
+ }),
+ TyAlias(box ast::TyAlias {
+ defaultness: rd,
+ generics: rg,
+ bounds: rb,
+ ty: rt,
+ ..
+ }),
+ ) => {
+ eq_defaultness(*ld, *rd)
+ && eq_generics(lg, rg)
+ && over(lb, rb, eq_generic_bound)
+ && both(lt, rt, |l, r| eq_ty(l, r))
+ },
+ (MacCall(l), MacCall(r)) => eq_mac_call(l, r),
+ _ => false,
+ }
+}
+
+pub fn eq_variant(l: &Variant, r: &Variant) -> bool {
+ l.is_placeholder == r.is_placeholder
+ && over(&l.attrs, &r.attrs, eq_attr)
+ && eq_vis(&l.vis, &r.vis)
+ && eq_id(l.ident, r.ident)
+ && eq_variant_data(&l.data, &r.data)
+ && both(&l.disr_expr, &r.disr_expr, |l, r| eq_expr(&l.value, &r.value))
+}
+
+pub fn eq_variant_data(l: &VariantData, r: &VariantData) -> bool {
+ use VariantData::*;
+ match (l, r) {
+ (Unit(_), Unit(_)) => true,
+ (Struct(l, _), Struct(r, _)) | (Tuple(l, _), Tuple(r, _)) => over(l, r, eq_struct_field),
+ _ => false,
+ }
+}
+
+pub fn eq_struct_field(l: &FieldDef, r: &FieldDef) -> bool {
+ l.is_placeholder == r.is_placeholder
+ && over(&l.attrs, &r.attrs, eq_attr)
+ && eq_vis(&l.vis, &r.vis)
+ && both(&l.ident, &r.ident, |l, r| eq_id(*l, *r))
+ && eq_ty(&l.ty, &r.ty)
+}
+
+pub fn eq_fn_sig(l: &FnSig, r: &FnSig) -> bool {
+ eq_fn_decl(&l.decl, &r.decl) && eq_fn_header(&l.header, &r.header)
+}
+
+pub fn eq_fn_header(l: &FnHeader, r: &FnHeader) -> bool {
+ matches!(l.unsafety, Unsafe::No) == matches!(r.unsafety, Unsafe::No)
+ && l.asyncness.is_async() == r.asyncness.is_async()
+ && matches!(l.constness, Const::No) == matches!(r.constness, Const::No)
+ && eq_ext(&l.ext, &r.ext)
+}
+
+pub fn eq_generics(l: &Generics, r: &Generics) -> bool {
+ over(&l.params, &r.params, eq_generic_param)
+ && over(&l.where_clause.predicates, &r.where_clause.predicates, |l, r| {
+ eq_where_predicate(l, r)
+ })
+}
+
+pub fn eq_where_predicate(l: &WherePredicate, r: &WherePredicate) -> bool {
+ use WherePredicate::*;
+ match (l, r) {
+ (BoundPredicate(l), BoundPredicate(r)) => {
+ over(&l.bound_generic_params, &r.bound_generic_params, |l, r| {
+ eq_generic_param(l, r)
+ }) && eq_ty(&l.bounded_ty, &r.bounded_ty)
+ && over(&l.bounds, &r.bounds, eq_generic_bound)
+ },
+ (RegionPredicate(l), RegionPredicate(r)) => {
+ eq_id(l.lifetime.ident, r.lifetime.ident) && over(&l.bounds, &r.bounds, eq_generic_bound)
+ },
+ (EqPredicate(l), EqPredicate(r)) => eq_ty(&l.lhs_ty, &r.lhs_ty) && eq_ty(&l.rhs_ty, &r.rhs_ty),
+ _ => false,
+ }
+}
+
+pub fn eq_use_tree(l: &UseTree, r: &UseTree) -> bool {
+ eq_path(&l.prefix, &r.prefix) && eq_use_tree_kind(&l.kind, &r.kind)
+}
+
+pub fn eq_anon_const(l: &AnonConst, r: &AnonConst) -> bool {
+ eq_expr(&l.value, &r.value)
+}
+
+pub fn eq_use_tree_kind(l: &UseTreeKind, r: &UseTreeKind) -> bool {
+ use UseTreeKind::*;
+ match (l, r) {
+ (Glob, Glob) => true,
+ (Simple(l, _, _), Simple(r, _, _)) => both(l, r, |l, r| eq_id(*l, *r)),
+ (Nested(l), Nested(r)) => over(l, r, |(l, _), (r, _)| eq_use_tree(l, r)),
+ _ => false,
+ }
+}
+
+pub fn eq_defaultness(l: Defaultness, r: Defaultness) -> bool {
+ matches!(
+ (l, r),
+ (Defaultness::Final, Defaultness::Final) | (Defaultness::Default(_), Defaultness::Default(_))
+ )
+}
+
+pub fn eq_vis(l: &Visibility, r: &Visibility) -> bool {
+ use VisibilityKind::*;
+ match (&l.kind, &r.kind) {
+ (Public, Public) | (Inherited, Inherited) => true,
+ (Restricted { path: l, .. }, Restricted { path: r, .. }) => eq_path(l, r),
+ _ => false,
+ }
+}
+
+pub fn eq_fn_decl(l: &FnDecl, r: &FnDecl) -> bool {
+ eq_fn_ret_ty(&l.output, &r.output)
+ && over(&l.inputs, &r.inputs, |l, r| {
+ l.is_placeholder == r.is_placeholder
+ && eq_pat(&l.pat, &r.pat)
+ && eq_ty(&l.ty, &r.ty)
+ && over(&l.attrs, &r.attrs, eq_attr)
+ })
+}
+
+pub fn eq_closure_binder(l: &ClosureBinder, r: &ClosureBinder) -> bool {
+ match (l, r) {
+ (ClosureBinder::NotPresent, ClosureBinder::NotPresent) => true,
+ (ClosureBinder::For { generic_params: lp, .. }, ClosureBinder::For { generic_params: rp, .. }) => {
+ lp.len() == rp.len() && std::iter::zip(lp.iter(), rp.iter()).all(|(l, r)| eq_generic_param(l, r))
+ },
+ _ => false,
+ }
+}
+
+pub fn eq_fn_ret_ty(l: &FnRetTy, r: &FnRetTy) -> bool {
+ match (l, r) {
+ (FnRetTy::Default(_), FnRetTy::Default(_)) => true,
+ (FnRetTy::Ty(l), FnRetTy::Ty(r)) => eq_ty(l, r),
+ _ => false,
+ }
+}
+
+pub fn eq_ty(l: &Ty, r: &Ty) -> bool {
+ use TyKind::*;
+ match (&l.kind, &r.kind) {
+ (Paren(l), _) => eq_ty(l, r),
+ (_, Paren(r)) => eq_ty(l, r),
+ (Never, Never) | (Infer, Infer) | (ImplicitSelf, ImplicitSelf) | (Err, Err) | (CVarArgs, CVarArgs) => true,
+ (Slice(l), Slice(r)) => eq_ty(l, r),
+ (Array(le, ls), Array(re, rs)) => eq_ty(le, re) && eq_expr(&ls.value, &rs.value),
+ (Ptr(l), Ptr(r)) => l.mutbl == r.mutbl && eq_ty(&l.ty, &r.ty),
+ (Rptr(ll, l), Rptr(rl, r)) => {
+ both(ll, rl, |l, r| eq_id(l.ident, r.ident)) && l.mutbl == r.mutbl && eq_ty(&l.ty, &r.ty)
+ },
+ (BareFn(l), BareFn(r)) => {
+ l.unsafety == r.unsafety
+ && eq_ext(&l.ext, &r.ext)
+ && over(&l.generic_params, &r.generic_params, eq_generic_param)
+ && eq_fn_decl(&l.decl, &r.decl)
+ },
+ (Tup(l), Tup(r)) => over(l, r, |l, r| eq_ty(l, r)),
+ (Path(lq, lp), Path(rq, rp)) => both(lq, rq, eq_qself) && eq_path(lp, rp),
+ (TraitObject(lg, ls), TraitObject(rg, rs)) => ls == rs && over(lg, rg, eq_generic_bound),
+ (ImplTrait(_, lg), ImplTrait(_, rg)) => over(lg, rg, eq_generic_bound),
+ (Typeof(l), Typeof(r)) => eq_expr(&l.value, &r.value),
+ (MacCall(l), MacCall(r)) => eq_mac_call(l, r),
+ _ => false,
+ }
+}
+
+pub fn eq_ext(l: &Extern, r: &Extern) -> bool {
+ use Extern::*;
+ match (l, r) {
+ (None, None) | (Implicit(_), Implicit(_)) => true,
+ (Explicit(l, _), Explicit(r, _)) => eq_str_lit(l, r),
+ _ => false,
+ }
+}
+
+pub fn eq_str_lit(l: &StrLit, r: &StrLit) -> bool {
+ l.style == r.style && l.symbol == r.symbol && l.suffix == r.suffix
+}
+
+pub fn eq_poly_ref_trait(l: &PolyTraitRef, r: &PolyTraitRef) -> bool {
+ eq_path(&l.trait_ref.path, &r.trait_ref.path)
+ && over(&l.bound_generic_params, &r.bound_generic_params, |l, r| {
+ eq_generic_param(l, r)
+ })
+}
+
+pub fn eq_generic_param(l: &GenericParam, r: &GenericParam) -> bool {
+ use GenericParamKind::*;
+ l.is_placeholder == r.is_placeholder
+ && eq_id(l.ident, r.ident)
+ && over(&l.bounds, &r.bounds, eq_generic_bound)
+ && match (&l.kind, &r.kind) {
+ (Lifetime, Lifetime) => true,
+ (Type { default: l }, Type { default: r }) => both(l, r, |l, r| eq_ty(l, r)),
+ (
+ Const {
+ ty: lt,
+ kw_span: _,
+ default: ld,
+ },
+ Const {
+ ty: rt,
+ kw_span: _,
+ default: rd,
+ },
+ ) => eq_ty(lt, rt) && both(ld, rd, eq_anon_const),
+ _ => false,
+ }
+ && over(&l.attrs, &r.attrs, eq_attr)
+}
+
+pub fn eq_generic_bound(l: &GenericBound, r: &GenericBound) -> bool {
+ use GenericBound::*;
+ match (l, r) {
+ (Trait(ptr1, tbm1), Trait(ptr2, tbm2)) => tbm1 == tbm2 && eq_poly_ref_trait(ptr1, ptr2),
+ (Outlives(l), Outlives(r)) => eq_id(l.ident, r.ident),
+ _ => false,
+ }
+}
+
+fn eq_term(l: &Term, r: &Term) -> bool {
+ match (l, r) {
+ (Term::Ty(l), Term::Ty(r)) => eq_ty(l, r),
+ (Term::Const(l), Term::Const(r)) => eq_anon_const(l, r),
+ _ => false,
+ }
+}
+
+pub fn eq_assoc_constraint(l: &AssocConstraint, r: &AssocConstraint) -> bool {
+ use AssocConstraintKind::*;
+ eq_id(l.ident, r.ident)
+ && match (&l.kind, &r.kind) {
+ (Equality { term: l }, Equality { term: r }) => eq_term(l, r),
+ (Bound { bounds: l }, Bound { bounds: r }) => over(l, r, eq_generic_bound),
+ _ => false,
+ }
+}
+
+pub fn eq_mac_call(l: &MacCall, r: &MacCall) -> bool {
+ eq_path(&l.path, &r.path) && eq_mac_args(&l.args, &r.args)
+}
+
+pub fn eq_attr(l: &Attribute, r: &Attribute) -> bool {
+ use AttrKind::*;
+ l.style == r.style
+ && match (&l.kind, &r.kind) {
+ (DocComment(l1, l2), DocComment(r1, r2)) => l1 == r1 && l2 == r2,
+ (Normal(l, _), Normal(r, _)) => eq_path(&l.path, &r.path) && eq_mac_args(&l.args, &r.args),
+ _ => false,
+ }
+}
+
+pub fn eq_mac_args(l: &MacArgs, r: &MacArgs) -> bool {
+ use MacArgs::*;
+ match (l, r) {
+ (Empty, Empty) => true,
+ (Delimited(_, ld, lts), Delimited(_, rd, rts)) => ld == rd && lts.eq_unspanned(rts),
+ (Eq(_, MacArgsEq::Ast(le)), Eq(_, MacArgsEq::Ast(re))) => eq_expr(le, re),
+ (Eq(_, MacArgsEq::Hir(ll)), Eq(_, MacArgsEq::Hir(rl))) => ll.kind == rl.kind,
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_utils/src/ast_utils/ident_iter.rs b/src/tools/clippy/clippy_utils/src/ast_utils/ident_iter.rs
new file mode 100644
index 000000000..eefcbabd8
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/ast_utils/ident_iter.rs
@@ -0,0 +1,45 @@
+use core::iter::FusedIterator;
+use rustc_ast::visit::{walk_attribute, walk_expr, Visitor};
+use rustc_ast::{Attribute, Expr};
+use rustc_span::symbol::Ident;
+
+pub struct IdentIter(std::vec::IntoIter<Ident>);
+
+impl Iterator for IdentIter {
+ type Item = Ident;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.0.next()
+ }
+}
+
+impl FusedIterator for IdentIter {}
+
+impl From<&Expr> for IdentIter {
+ fn from(expr: &Expr) -> Self {
+ let mut visitor = IdentCollector::default();
+
+ walk_expr(&mut visitor, expr);
+
+ IdentIter(visitor.0.into_iter())
+ }
+}
+
+impl From<&Attribute> for IdentIter {
+ fn from(attr: &Attribute) -> Self {
+ let mut visitor = IdentCollector::default();
+
+ walk_attribute(&mut visitor, attr);
+
+ IdentIter(visitor.0.into_iter())
+ }
+}
+
+#[derive(Default)]
+struct IdentCollector(Vec<Ident>);
+
+impl Visitor<'_> for IdentCollector {
+ fn visit_ident(&mut self, ident: Ident) {
+ self.0.push(ident);
+ }
+}
diff --git a/src/tools/clippy/clippy_utils/src/attrs.rs b/src/tools/clippy/clippy_utils/src/attrs.rs
new file mode 100644
index 000000000..186bba09d
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/attrs.rs
@@ -0,0 +1,159 @@
+use rustc_ast::ast;
+use rustc_ast::attr;
+use rustc_errors::Applicability;
+use rustc_session::Session;
+use rustc_span::sym;
+use std::str::FromStr;
+
+/// Deprecation status of attributes known by Clippy.
+pub enum DeprecationStatus {
+ /// Attribute is deprecated
+ Deprecated,
+ /// Attribute is deprecated and was replaced by the named attribute
+ Replaced(&'static str),
+ None,
+}
+
+#[rustfmt::skip]
+pub const BUILTIN_ATTRIBUTES: &[(&str, DeprecationStatus)] = &[
+ ("author", DeprecationStatus::None),
+ ("version", DeprecationStatus::None),
+ ("cognitive_complexity", DeprecationStatus::None),
+ ("cyclomatic_complexity", DeprecationStatus::Replaced("cognitive_complexity")),
+ ("dump", DeprecationStatus::None),
+ ("msrv", DeprecationStatus::None),
+ ("has_significant_drop", DeprecationStatus::None),
+];
+
+pub struct LimitStack {
+ stack: Vec<u64>,
+}
+
+impl Drop for LimitStack {
+ fn drop(&mut self) {
+ assert_eq!(self.stack.len(), 1);
+ }
+}
+
+impl LimitStack {
+ #[must_use]
+ pub fn new(limit: u64) -> Self {
+ Self { stack: vec![limit] }
+ }
+ pub fn limit(&self) -> u64 {
+ *self.stack.last().expect("there should always be a value in the stack")
+ }
+ pub fn push_attrs(&mut self, sess: &Session, attrs: &[ast::Attribute], name: &'static str) {
+ let stack = &mut self.stack;
+ parse_attrs(sess, attrs, name, |val| stack.push(val));
+ }
+ pub fn pop_attrs(&mut self, sess: &Session, attrs: &[ast::Attribute], name: &'static str) {
+ let stack = &mut self.stack;
+ parse_attrs(sess, attrs, name, |val| assert_eq!(stack.pop(), Some(val)));
+ }
+}
+
+pub fn get_attr<'a>(
+ sess: &'a Session,
+ attrs: &'a [ast::Attribute],
+ name: &'static str,
+) -> impl Iterator<Item = &'a ast::Attribute> {
+ attrs.iter().filter(move |attr| {
+ let attr = if let ast::AttrKind::Normal(ref attr, _) = attr.kind {
+ attr
+ } else {
+ return false;
+ };
+ let attr_segments = &attr.path.segments;
+ if attr_segments.len() == 2 && attr_segments[0].ident.name == sym::clippy {
+ BUILTIN_ATTRIBUTES
+ .iter()
+ .find_map(|&(builtin_name, ref deprecation_status)| {
+ if attr_segments[1].ident.name.as_str() == builtin_name {
+ Some(deprecation_status)
+ } else {
+ None
+ }
+ })
+ .map_or_else(
+ || {
+ sess.span_err(attr_segments[1].ident.span, "usage of unknown attribute");
+ false
+ },
+ |deprecation_status| {
+ let mut diag =
+ sess.struct_span_err(attr_segments[1].ident.span, "usage of deprecated attribute");
+ match *deprecation_status {
+ DeprecationStatus::Deprecated => {
+ diag.emit();
+ false
+ },
+ DeprecationStatus::Replaced(new_name) => {
+ diag.span_suggestion(
+ attr_segments[1].ident.span,
+ "consider using",
+ new_name,
+ Applicability::MachineApplicable,
+ );
+ diag.emit();
+ false
+ },
+ DeprecationStatus::None => {
+ diag.cancel();
+ attr_segments[1].ident.name.as_str() == name
+ },
+ }
+ },
+ )
+ } else {
+ false
+ }
+ })
+}
+
+fn parse_attrs<F: FnMut(u64)>(sess: &Session, attrs: &[ast::Attribute], name: &'static str, mut f: F) {
+ for attr in get_attr(sess, attrs, name) {
+ if let Some(ref value) = attr.value_str() {
+ if let Ok(value) = FromStr::from_str(value.as_str()) {
+ f(value);
+ } else {
+ sess.span_err(attr.span, "not a number");
+ }
+ } else {
+ sess.span_err(attr.span, "bad clippy attribute");
+ }
+ }
+}
+
+pub fn get_unique_inner_attr(sess: &Session, attrs: &[ast::Attribute], name: &'static str) -> Option<ast::Attribute> {
+ let mut unique_attr = None;
+ for attr in get_attr(sess, attrs, name) {
+ match attr.style {
+ ast::AttrStyle::Inner if unique_attr.is_none() => unique_attr = Some(attr.clone()),
+ ast::AttrStyle::Inner => {
+ sess.struct_span_err(attr.span, &format!("`{}` is defined multiple times", name))
+ .span_note(unique_attr.as_ref().unwrap().span, "first definition found here")
+ .emit();
+ },
+ ast::AttrStyle::Outer => {
+ sess.span_err(attr.span, &format!("`{}` cannot be an outer attribute", name));
+ },
+ }
+ }
+ unique_attr
+}
+
+/// Return true if the attributes contain any of `proc_macro`,
+/// `proc_macro_derive` or `proc_macro_attribute`, false otherwise
+pub fn is_proc_macro(sess: &Session, attrs: &[ast::Attribute]) -> bool {
+ attrs.iter().any(|attr| sess.is_proc_macro_attr(attr))
+}
+
+/// Return true if the attributes contain `#[doc(hidden)]`
+pub fn is_doc_hidden(attrs: &[ast::Attribute]) -> bool {
+ attrs
+ .iter()
+ .filter(|attr| attr.has_name(sym::doc))
+ .filter_map(ast::Attribute::meta_item_list)
+ .any(|l| attr::list_contains_name(&l, sym::hidden))
+}
diff --git a/src/tools/clippy/clippy_utils/src/comparisons.rs b/src/tools/clippy/clippy_utils/src/comparisons.rs
new file mode 100644
index 000000000..7a18d5e81
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/comparisons.rs
@@ -0,0 +1,36 @@
+//! Utility functions about comparison operators.
+
+#![deny(clippy::missing_docs_in_private_items)]
+
+use rustc_hir::{BinOpKind, Expr};
+
+#[derive(PartialEq, Eq, Debug, Copy, Clone)]
+/// Represent a normalized comparison operator.
+pub enum Rel {
+ /// `<`
+ Lt,
+ /// `<=`
+ Le,
+ /// `==`
+ Eq,
+ /// `!=`
+ Ne,
+}
+
+/// Put the expression in the form `lhs < rhs`, `lhs <= rhs`, `lhs == rhs` or
+/// `lhs != rhs`.
+pub fn normalize_comparison<'a>(
+ op: BinOpKind,
+ lhs: &'a Expr<'a>,
+ rhs: &'a Expr<'a>,
+) -> Option<(Rel, &'a Expr<'a>, &'a Expr<'a>)> {
+ match op {
+ BinOpKind::Lt => Some((Rel::Lt, lhs, rhs)),
+ BinOpKind::Le => Some((Rel::Le, lhs, rhs)),
+ BinOpKind::Gt => Some((Rel::Lt, rhs, lhs)),
+ BinOpKind::Ge => Some((Rel::Le, rhs, lhs)),
+ BinOpKind::Eq => Some((Rel::Eq, rhs, lhs)),
+ BinOpKind::Ne => Some((Rel::Ne, rhs, lhs)),
+ _ => None,
+ }
+}
diff --git a/src/tools/clippy/clippy_utils/src/consts.rs b/src/tools/clippy/clippy_utils/src/consts.rs
new file mode 100644
index 000000000..351a3f4ae
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/consts.rs
@@ -0,0 +1,652 @@
+#![allow(clippy::float_cmp)]
+
+use crate::{clip, is_direct_expn_of, sext, unsext};
+use if_chain::if_chain;
+use rustc_ast::ast::{self, LitFloatType, LitKind};
+use rustc_data_structures::sync::Lrc;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::{BinOp, BinOpKind, Block, Expr, ExprKind, HirId, Item, ItemKind, Node, QPath, UnOp};
+use rustc_lint::LateContext;
+use rustc_middle::mir;
+use rustc_middle::mir::interpret::Scalar;
+use rustc_middle::ty::subst::{Subst, SubstsRef};
+use rustc_middle::ty::{self, EarlyBinder, FloatTy, ScalarInt, Ty, TyCtxt};
+use rustc_middle::{bug, span_bug};
+use rustc_span::symbol::Symbol;
+use std::cmp::Ordering::{self, Equal};
+use std::hash::{Hash, Hasher};
+use std::iter;
+
+/// A `LitKind`-like enum to fold constant `Expr`s into.
+#[derive(Debug, Clone)]
+pub enum Constant {
+ /// A `String` (e.g., "abc").
+ Str(String),
+ /// A binary string (e.g., `b"abc"`).
+ Binary(Lrc<[u8]>),
+ /// A single `char` (e.g., `'a'`).
+ Char(char),
+ /// An integer's bit representation.
+ Int(u128),
+ /// An `f32`.
+ F32(f32),
+ /// An `f64`.
+ F64(f64),
+ /// `true` or `false`.
+ Bool(bool),
+ /// An array of constants.
+ Vec(Vec<Constant>),
+ /// Also an array, but with only one constant, repeated N times.
+ Repeat(Box<Constant>, u64),
+ /// A tuple of constants.
+ Tuple(Vec<Constant>),
+ /// A raw pointer.
+ RawPtr(u128),
+ /// A reference
+ Ref(Box<Constant>),
+ /// A literal with syntax error.
+ Err(Symbol),
+}
+
+impl PartialEq for Constant {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (&Self::Str(ref ls), &Self::Str(ref rs)) => ls == rs,
+ (&Self::Binary(ref l), &Self::Binary(ref r)) => l == r,
+ (&Self::Char(l), &Self::Char(r)) => l == r,
+ (&Self::Int(l), &Self::Int(r)) => l == r,
+ (&Self::F64(l), &Self::F64(r)) => {
+ // We want `Fw32 == FwAny` and `FwAny == Fw64`, and by transitivity we must have
+ // `Fw32 == Fw64`, so don’t compare them.
+ // `to_bits` is required to catch non-matching 0.0, -0.0, and NaNs.
+ l.to_bits() == r.to_bits()
+ },
+ (&Self::F32(l), &Self::F32(r)) => {
+ // We want `Fw32 == FwAny` and `FwAny == Fw64`, and by transitivity we must have
+ // `Fw32 == Fw64`, so don’t compare them.
+ // `to_bits` is required to catch non-matching 0.0, -0.0, and NaNs.
+ f64::from(l).to_bits() == f64::from(r).to_bits()
+ },
+ (&Self::Bool(l), &Self::Bool(r)) => l == r,
+ (&Self::Vec(ref l), &Self::Vec(ref r)) | (&Self::Tuple(ref l), &Self::Tuple(ref r)) => l == r,
+ (&Self::Repeat(ref lv, ref ls), &Self::Repeat(ref rv, ref rs)) => ls == rs && lv == rv,
+ (&Self::Ref(ref lb), &Self::Ref(ref rb)) => *lb == *rb,
+ // TODO: are there inter-type equalities?
+ _ => false,
+ }
+ }
+}
+
+impl Hash for Constant {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ std::mem::discriminant(self).hash(state);
+ match *self {
+ Self::Str(ref s) => {
+ s.hash(state);
+ },
+ Self::Binary(ref b) => {
+ b.hash(state);
+ },
+ Self::Char(c) => {
+ c.hash(state);
+ },
+ Self::Int(i) => {
+ i.hash(state);
+ },
+ Self::F32(f) => {
+ f64::from(f).to_bits().hash(state);
+ },
+ Self::F64(f) => {
+ f.to_bits().hash(state);
+ },
+ Self::Bool(b) => {
+ b.hash(state);
+ },
+ Self::Vec(ref v) | Self::Tuple(ref v) => {
+ v.hash(state);
+ },
+ Self::Repeat(ref c, l) => {
+ c.hash(state);
+ l.hash(state);
+ },
+ Self::RawPtr(u) => {
+ u.hash(state);
+ },
+ Self::Ref(ref r) => {
+ r.hash(state);
+ },
+ Self::Err(ref s) => {
+ s.hash(state);
+ },
+ }
+ }
+}
+
+impl Constant {
+ pub fn partial_cmp(tcx: TyCtxt<'_>, cmp_type: Ty<'_>, left: &Self, right: &Self) -> Option<Ordering> {
+ match (left, right) {
+ (&Self::Str(ref ls), &Self::Str(ref rs)) => Some(ls.cmp(rs)),
+ (&Self::Char(ref l), &Self::Char(ref r)) => Some(l.cmp(r)),
+ (&Self::Int(l), &Self::Int(r)) => match *cmp_type.kind() {
+ ty::Int(int_ty) => Some(sext(tcx, l, int_ty).cmp(&sext(tcx, r, int_ty))),
+ ty::Uint(_) => Some(l.cmp(&r)),
+ _ => bug!("Not an int type"),
+ },
+ (&Self::F64(l), &Self::F64(r)) => l.partial_cmp(&r),
+ (&Self::F32(l), &Self::F32(r)) => l.partial_cmp(&r),
+ (&Self::Bool(ref l), &Self::Bool(ref r)) => Some(l.cmp(r)),
+ (&Self::Tuple(ref l), &Self::Tuple(ref r)) | (&Self::Vec(ref l), &Self::Vec(ref r)) => iter::zip(l, r)
+ .map(|(li, ri)| Self::partial_cmp(tcx, cmp_type, li, ri))
+ .find(|r| r.map_or(true, |o| o != Ordering::Equal))
+ .unwrap_or_else(|| Some(l.len().cmp(&r.len()))),
+ (&Self::Repeat(ref lv, ref ls), &Self::Repeat(ref rv, ref rs)) => {
+ match Self::partial_cmp(tcx, cmp_type, lv, rv) {
+ Some(Equal) => Some(ls.cmp(rs)),
+ x => x,
+ }
+ },
+ (&Self::Ref(ref lb), &Self::Ref(ref rb)) => Self::partial_cmp(tcx, cmp_type, lb, rb),
+ // TODO: are there any useful inter-type orderings?
+ _ => None,
+ }
+ }
+
+ /// Returns the integer value or `None` if `self` or `val_type` is not integer type.
+ pub fn int_value(&self, cx: &LateContext<'_>, val_type: Ty<'_>) -> Option<FullInt> {
+ if let Constant::Int(const_int) = *self {
+ match *val_type.kind() {
+ ty::Int(ity) => Some(FullInt::S(sext(cx.tcx, const_int, ity))),
+ ty::Uint(_) => Some(FullInt::U(const_int)),
+ _ => None,
+ }
+ } else {
+ None
+ }
+ }
+
+ #[must_use]
+ pub fn peel_refs(mut self) -> Self {
+ while let Constant::Ref(r) = self {
+ self = *r;
+ }
+ self
+ }
+}
+
+/// Parses a `LitKind` to a `Constant`.
+pub fn lit_to_mir_constant(lit: &LitKind, ty: Option<Ty<'_>>) -> Constant {
+ match *lit {
+ LitKind::Str(ref is, _) => Constant::Str(is.to_string()),
+ LitKind::Byte(b) => Constant::Int(u128::from(b)),
+ LitKind::ByteStr(ref s) => Constant::Binary(Lrc::clone(s)),
+ LitKind::Char(c) => Constant::Char(c),
+ LitKind::Int(n, _) => Constant::Int(n),
+ LitKind::Float(ref is, LitFloatType::Suffixed(fty)) => match fty {
+ ast::FloatTy::F32 => Constant::F32(is.as_str().parse().unwrap()),
+ ast::FloatTy::F64 => Constant::F64(is.as_str().parse().unwrap()),
+ },
+ LitKind::Float(ref is, LitFloatType::Unsuffixed) => match ty.expect("type of float is known").kind() {
+ ty::Float(FloatTy::F32) => Constant::F32(is.as_str().parse().unwrap()),
+ ty::Float(FloatTy::F64) => Constant::F64(is.as_str().parse().unwrap()),
+ _ => bug!(),
+ },
+ LitKind::Bool(b) => Constant::Bool(b),
+ LitKind::Err(s) => Constant::Err(s),
+ }
+}
+
+pub fn constant<'tcx>(
+ lcx: &LateContext<'tcx>,
+ typeck_results: &ty::TypeckResults<'tcx>,
+ e: &Expr<'_>,
+) -> Option<(Constant, bool)> {
+ let mut cx = ConstEvalLateContext {
+ lcx,
+ typeck_results,
+ param_env: lcx.param_env,
+ needed_resolution: false,
+ substs: lcx.tcx.intern_substs(&[]),
+ };
+ cx.expr(e).map(|cst| (cst, cx.needed_resolution))
+}
+
+pub fn constant_simple<'tcx>(
+ lcx: &LateContext<'tcx>,
+ typeck_results: &ty::TypeckResults<'tcx>,
+ e: &Expr<'_>,
+) -> Option<Constant> {
+ constant(lcx, typeck_results, e).and_then(|(cst, res)| if res { None } else { Some(cst) })
+}
+
+pub fn constant_full_int<'tcx>(
+ lcx: &LateContext<'tcx>,
+ typeck_results: &ty::TypeckResults<'tcx>,
+ e: &Expr<'_>,
+) -> Option<FullInt> {
+ constant_simple(lcx, typeck_results, e)?.int_value(lcx, typeck_results.expr_ty(e))
+}
+
+#[derive(Copy, Clone, Debug, Eq)]
+pub enum FullInt {
+ S(i128),
+ U(u128),
+}
+
+impl PartialEq for FullInt {
+ #[must_use]
+ fn eq(&self, other: &Self) -> bool {
+ self.cmp(other) == Ordering::Equal
+ }
+}
+
+impl PartialOrd for FullInt {
+ #[must_use]
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+impl Ord for FullInt {
+ #[must_use]
+ fn cmp(&self, other: &Self) -> Ordering {
+ use FullInt::{S, U};
+
+ fn cmp_s_u(s: i128, u: u128) -> Ordering {
+ u128::try_from(s).map_or(Ordering::Less, |x| x.cmp(&u))
+ }
+
+ match (*self, *other) {
+ (S(s), S(o)) => s.cmp(&o),
+ (U(s), U(o)) => s.cmp(&o),
+ (S(s), U(o)) => cmp_s_u(s, o),
+ (U(s), S(o)) => cmp_s_u(o, s).reverse(),
+ }
+ }
+}
+
+/// Creates a `ConstEvalLateContext` from the given `LateContext` and `TypeckResults`.
+pub fn constant_context<'a, 'tcx>(
+ lcx: &'a LateContext<'tcx>,
+ typeck_results: &'a ty::TypeckResults<'tcx>,
+) -> ConstEvalLateContext<'a, 'tcx> {
+ ConstEvalLateContext {
+ lcx,
+ typeck_results,
+ param_env: lcx.param_env,
+ needed_resolution: false,
+ substs: lcx.tcx.intern_substs(&[]),
+ }
+}
+
+pub struct ConstEvalLateContext<'a, 'tcx> {
+ lcx: &'a LateContext<'tcx>,
+ typeck_results: &'a ty::TypeckResults<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ needed_resolution: bool,
+ substs: SubstsRef<'tcx>,
+}
+
+impl<'a, 'tcx> ConstEvalLateContext<'a, 'tcx> {
+ /// Simple constant folding: Insert an expression, get a constant or none.
+ pub fn expr(&mut self, e: &Expr<'_>) -> Option<Constant> {
+ match e.kind {
+ ExprKind::Path(ref qpath) => self.fetch_path(qpath, e.hir_id, self.typeck_results.expr_ty(e)),
+ ExprKind::Block(block, _) => self.block(block),
+ ExprKind::Lit(ref lit) => {
+ if is_direct_expn_of(e.span, "cfg").is_some() {
+ None
+ } else {
+ Some(lit_to_mir_constant(&lit.node, self.typeck_results.expr_ty_opt(e)))
+ }
+ },
+ ExprKind::Array(vec) => self.multi(vec).map(Constant::Vec),
+ ExprKind::Tup(tup) => self.multi(tup).map(Constant::Tuple),
+ ExprKind::Repeat(value, _) => {
+ let n = match self.typeck_results.expr_ty(e).kind() {
+ ty::Array(_, n) => n.try_eval_usize(self.lcx.tcx, self.lcx.param_env)?,
+ _ => span_bug!(e.span, "typeck error"),
+ };
+ self.expr(value).map(|v| Constant::Repeat(Box::new(v), n))
+ },
+ ExprKind::Unary(op, operand) => self.expr(operand).and_then(|o| match op {
+ UnOp::Not => self.constant_not(&o, self.typeck_results.expr_ty(e)),
+ UnOp::Neg => self.constant_negate(&o, self.typeck_results.expr_ty(e)),
+ UnOp::Deref => Some(if let Constant::Ref(r) = o { *r } else { o }),
+ }),
+ ExprKind::If(cond, then, ref otherwise) => self.ifthenelse(cond, then, *otherwise),
+ ExprKind::Binary(op, left, right) => self.binop(op, left, right),
+ ExprKind::Call(callee, args) => {
+ // We only handle a few const functions for now.
+ if_chain! {
+ if args.is_empty();
+ if let ExprKind::Path(qpath) = &callee.kind;
+ let res = self.typeck_results.qpath_res(qpath, callee.hir_id);
+ if let Some(def_id) = res.opt_def_id();
+ let def_path = self.lcx.get_def_path(def_id);
+ let def_path: Vec<&str> = def_path.iter().take(4).map(Symbol::as_str).collect();
+ if let ["core", "num", int_impl, "max_value"] = *def_path;
+ then {
+ let value = match int_impl {
+ "<impl i8>" => i8::MAX as u128,
+ "<impl i16>" => i16::MAX as u128,
+ "<impl i32>" => i32::MAX as u128,
+ "<impl i64>" => i64::MAX as u128,
+ "<impl i128>" => i128::MAX as u128,
+ _ => return None,
+ };
+ Some(Constant::Int(value))
+ } else {
+ None
+ }
+ }
+ },
+ ExprKind::Index(arr, index) => self.index(arr, index),
+ ExprKind::AddrOf(_, _, inner) => self.expr(inner).map(|r| Constant::Ref(Box::new(r))),
+ // TODO: add other expressions.
+ _ => None,
+ }
+ }
+
+ #[expect(clippy::cast_possible_wrap)]
+ fn constant_not(&self, o: &Constant, ty: Ty<'_>) -> Option<Constant> {
+ use self::Constant::{Bool, Int};
+ match *o {
+ Bool(b) => Some(Bool(!b)),
+ Int(value) => {
+ let value = !value;
+ match *ty.kind() {
+ ty::Int(ity) => Some(Int(unsext(self.lcx.tcx, value as i128, ity))),
+ ty::Uint(ity) => Some(Int(clip(self.lcx.tcx, value, ity))),
+ _ => None,
+ }
+ },
+ _ => None,
+ }
+ }
+
+ fn constant_negate(&self, o: &Constant, ty: Ty<'_>) -> Option<Constant> {
+ use self::Constant::{Int, F32, F64};
+ match *o {
+ Int(value) => {
+ let ity = match *ty.kind() {
+ ty::Int(ity) => ity,
+ _ => return None,
+ };
+ // sign extend
+ let value = sext(self.lcx.tcx, value, ity);
+ let value = value.checked_neg()?;
+ // clear unused bits
+ Some(Int(unsext(self.lcx.tcx, value, ity)))
+ },
+ F32(f) => Some(F32(-f)),
+ F64(f) => Some(F64(-f)),
+ _ => None,
+ }
+ }
+
+ /// Create `Some(Vec![..])` of all constants, unless there is any
+ /// non-constant part.
+ fn multi(&mut self, vec: &[Expr<'_>]) -> Option<Vec<Constant>> {
+ vec.iter().map(|elem| self.expr(elem)).collect::<Option<_>>()
+ }
+
+ /// Lookup a possibly constant expression from an `ExprKind::Path`.
+ fn fetch_path(&mut self, qpath: &QPath<'_>, id: HirId, ty: Ty<'tcx>) -> Option<Constant> {
+ let res = self.typeck_results.qpath_res(qpath, id);
+ match res {
+ Res::Def(DefKind::Const | DefKind::AssocConst, def_id) => {
+ // Check if this constant is based on `cfg!(..)`,
+ // which is NOT constant for our purposes.
+ if let Some(node) = self.lcx.tcx.hir().get_if_local(def_id) &&
+ let Node::Item(&Item {
+ kind: ItemKind::Const(_, body_id),
+ ..
+ }) = node &&
+ let Node::Expr(&Expr {
+ kind: ExprKind::Lit(_),
+ span,
+ ..
+ }) = self.lcx.tcx.hir().get(body_id.hir_id) &&
+ is_direct_expn_of(span, "cfg").is_some() {
+ return None;
+ }
+
+ let substs = self.typeck_results.node_substs(id);
+ let substs = if self.substs.is_empty() {
+ substs
+ } else {
+ EarlyBinder(substs).subst(self.lcx.tcx, self.substs)
+ };
+
+ let result = self
+ .lcx
+ .tcx
+ .const_eval_resolve(
+ self.param_env,
+ ty::Unevaluated::new(ty::WithOptConstParam::unknown(def_id), substs),
+ None,
+ )
+ .ok()
+ .map(|val| rustc_middle::mir::ConstantKind::from_value(val, ty))?;
+ let result = miri_to_const(self.lcx.tcx, result);
+ if result.is_some() {
+ self.needed_resolution = true;
+ }
+ result
+ },
+ // FIXME: cover all usable cases.
+ _ => None,
+ }
+ }
+
+ fn index(&mut self, lhs: &'_ Expr<'_>, index: &'_ Expr<'_>) -> Option<Constant> {
+ let lhs = self.expr(lhs);
+ let index = self.expr(index);
+
+ match (lhs, index) {
+ (Some(Constant::Vec(vec)), Some(Constant::Int(index))) => match vec.get(index as usize) {
+ Some(Constant::F32(x)) => Some(Constant::F32(*x)),
+ Some(Constant::F64(x)) => Some(Constant::F64(*x)),
+ _ => None,
+ },
+ (Some(Constant::Vec(vec)), _) => {
+ if !vec.is_empty() && vec.iter().all(|x| *x == vec[0]) {
+ match vec.get(0) {
+ Some(Constant::F32(x)) => Some(Constant::F32(*x)),
+ Some(Constant::F64(x)) => Some(Constant::F64(*x)),
+ _ => None,
+ }
+ } else {
+ None
+ }
+ },
+ _ => None,
+ }
+ }
+
+ /// A block can only yield a constant if it only has one constant expression.
+ fn block(&mut self, block: &Block<'_>) -> Option<Constant> {
+ if block.stmts.is_empty() {
+ block.expr.as_ref().and_then(|b| self.expr(b))
+ } else {
+ None
+ }
+ }
+
+ fn ifthenelse(&mut self, cond: &Expr<'_>, then: &Expr<'_>, otherwise: Option<&Expr<'_>>) -> Option<Constant> {
+ if let Some(Constant::Bool(b)) = self.expr(cond) {
+ if b {
+ self.expr(then)
+ } else {
+ otherwise.as_ref().and_then(|expr| self.expr(expr))
+ }
+ } else {
+ None
+ }
+ }
+
+ fn binop(&mut self, op: BinOp, left: &Expr<'_>, right: &Expr<'_>) -> Option<Constant> {
+ let l = self.expr(left)?;
+ let r = self.expr(right);
+ match (l, r) {
+ (Constant::Int(l), Some(Constant::Int(r))) => match *self.typeck_results.expr_ty_opt(left)?.kind() {
+ ty::Int(ity) => {
+ let l = sext(self.lcx.tcx, l, ity);
+ let r = sext(self.lcx.tcx, r, ity);
+ let zext = |n: i128| Constant::Int(unsext(self.lcx.tcx, n, ity));
+ match op.node {
+ BinOpKind::Add => l.checked_add(r).map(zext),
+ BinOpKind::Sub => l.checked_sub(r).map(zext),
+ BinOpKind::Mul => l.checked_mul(r).map(zext),
+ BinOpKind::Div if r != 0 => l.checked_div(r).map(zext),
+ BinOpKind::Rem if r != 0 => l.checked_rem(r).map(zext),
+ BinOpKind::Shr => l.checked_shr(r.try_into().expect("invalid shift")).map(zext),
+ BinOpKind::Shl => l.checked_shl(r.try_into().expect("invalid shift")).map(zext),
+ BinOpKind::BitXor => Some(zext(l ^ r)),
+ BinOpKind::BitOr => Some(zext(l | r)),
+ BinOpKind::BitAnd => Some(zext(l & r)),
+ BinOpKind::Eq => Some(Constant::Bool(l == r)),
+ BinOpKind::Ne => Some(Constant::Bool(l != r)),
+ BinOpKind::Lt => Some(Constant::Bool(l < r)),
+ BinOpKind::Le => Some(Constant::Bool(l <= r)),
+ BinOpKind::Ge => Some(Constant::Bool(l >= r)),
+ BinOpKind::Gt => Some(Constant::Bool(l > r)),
+ _ => None,
+ }
+ },
+ ty::Uint(_) => match op.node {
+ BinOpKind::Add => l.checked_add(r).map(Constant::Int),
+ BinOpKind::Sub => l.checked_sub(r).map(Constant::Int),
+ BinOpKind::Mul => l.checked_mul(r).map(Constant::Int),
+ BinOpKind::Div => l.checked_div(r).map(Constant::Int),
+ BinOpKind::Rem => l.checked_rem(r).map(Constant::Int),
+ BinOpKind::Shr => l.checked_shr(r.try_into().expect("shift too large")).map(Constant::Int),
+ BinOpKind::Shl => l.checked_shl(r.try_into().expect("shift too large")).map(Constant::Int),
+ BinOpKind::BitXor => Some(Constant::Int(l ^ r)),
+ BinOpKind::BitOr => Some(Constant::Int(l | r)),
+ BinOpKind::BitAnd => Some(Constant::Int(l & r)),
+ BinOpKind::Eq => Some(Constant::Bool(l == r)),
+ BinOpKind::Ne => Some(Constant::Bool(l != r)),
+ BinOpKind::Lt => Some(Constant::Bool(l < r)),
+ BinOpKind::Le => Some(Constant::Bool(l <= r)),
+ BinOpKind::Ge => Some(Constant::Bool(l >= r)),
+ BinOpKind::Gt => Some(Constant::Bool(l > r)),
+ _ => None,
+ },
+ _ => None,
+ },
+ (Constant::F32(l), Some(Constant::F32(r))) => match op.node {
+ BinOpKind::Add => Some(Constant::F32(l + r)),
+ BinOpKind::Sub => Some(Constant::F32(l - r)),
+ BinOpKind::Mul => Some(Constant::F32(l * r)),
+ BinOpKind::Div => Some(Constant::F32(l / r)),
+ BinOpKind::Rem => Some(Constant::F32(l % r)),
+ BinOpKind::Eq => Some(Constant::Bool(l == r)),
+ BinOpKind::Ne => Some(Constant::Bool(l != r)),
+ BinOpKind::Lt => Some(Constant::Bool(l < r)),
+ BinOpKind::Le => Some(Constant::Bool(l <= r)),
+ BinOpKind::Ge => Some(Constant::Bool(l >= r)),
+ BinOpKind::Gt => Some(Constant::Bool(l > r)),
+ _ => None,
+ },
+ (Constant::F64(l), Some(Constant::F64(r))) => match op.node {
+ BinOpKind::Add => Some(Constant::F64(l + r)),
+ BinOpKind::Sub => Some(Constant::F64(l - r)),
+ BinOpKind::Mul => Some(Constant::F64(l * r)),
+ BinOpKind::Div => Some(Constant::F64(l / r)),
+ BinOpKind::Rem => Some(Constant::F64(l % r)),
+ BinOpKind::Eq => Some(Constant::Bool(l == r)),
+ BinOpKind::Ne => Some(Constant::Bool(l != r)),
+ BinOpKind::Lt => Some(Constant::Bool(l < r)),
+ BinOpKind::Le => Some(Constant::Bool(l <= r)),
+ BinOpKind::Ge => Some(Constant::Bool(l >= r)),
+ BinOpKind::Gt => Some(Constant::Bool(l > r)),
+ _ => None,
+ },
+ (l, r) => match (op.node, l, r) {
+ (BinOpKind::And, Constant::Bool(false), _) => Some(Constant::Bool(false)),
+ (BinOpKind::Or, Constant::Bool(true), _) => Some(Constant::Bool(true)),
+ (BinOpKind::And, Constant::Bool(true), Some(r)) | (BinOpKind::Or, Constant::Bool(false), Some(r)) => {
+ Some(r)
+ },
+ (BinOpKind::BitXor, Constant::Bool(l), Some(Constant::Bool(r))) => Some(Constant::Bool(l ^ r)),
+ (BinOpKind::BitAnd, Constant::Bool(l), Some(Constant::Bool(r))) => Some(Constant::Bool(l & r)),
+ (BinOpKind::BitOr, Constant::Bool(l), Some(Constant::Bool(r))) => Some(Constant::Bool(l | r)),
+ _ => None,
+ },
+ }
+ }
+}
+
+pub fn miri_to_const<'tcx>(tcx: TyCtxt<'tcx>, result: mir::ConstantKind<'tcx>) -> Option<Constant> {
+ use rustc_middle::mir::interpret::ConstValue;
+ match result {
+ mir::ConstantKind::Val(ConstValue::Scalar(Scalar::Int(int)), _) => {
+ match result.ty().kind() {
+ ty::Bool => Some(Constant::Bool(int == ScalarInt::TRUE)),
+ ty::Uint(_) | ty::Int(_) => Some(Constant::Int(int.assert_bits(int.size()))),
+ ty::Float(FloatTy::F32) => Some(Constant::F32(f32::from_bits(
+ int.try_into().expect("invalid f32 bit representation"),
+ ))),
+ ty::Float(FloatTy::F64) => Some(Constant::F64(f64::from_bits(
+ int.try_into().expect("invalid f64 bit representation"),
+ ))),
+ ty::RawPtr(type_and_mut) => {
+ if let ty::Uint(_) = type_and_mut.ty.kind() {
+ return Some(Constant::RawPtr(int.assert_bits(int.size())));
+ }
+ None
+ },
+ // FIXME: implement other conversions.
+ _ => None,
+ }
+ },
+ mir::ConstantKind::Val(ConstValue::Slice { data, start, end }, _) => match result.ty().kind() {
+ ty::Ref(_, tam, _) => match tam.kind() {
+ ty::Str => String::from_utf8(
+ data.inner()
+ .inspect_with_uninit_and_ptr_outside_interpreter(start..end)
+ .to_owned(),
+ )
+ .ok()
+ .map(Constant::Str),
+ _ => None,
+ },
+ _ => None,
+ },
+ mir::ConstantKind::Val(ConstValue::ByRef { alloc, offset: _ }, _) => match result.ty().kind() {
+ ty::Array(sub_type, len) => match sub_type.kind() {
+ ty::Float(FloatTy::F32) => match len.kind().try_to_machine_usize(tcx) {
+ Some(len) => alloc
+ .inner()
+ .inspect_with_uninit_and_ptr_outside_interpreter(0..(4 * usize::try_from(len).unwrap()))
+ .to_owned()
+ .array_chunks::<4>()
+ .map(|&chunk| Some(Constant::F32(f32::from_le_bytes(chunk))))
+ .collect::<Option<Vec<Constant>>>()
+ .map(Constant::Vec),
+ _ => None,
+ },
+ ty::Float(FloatTy::F64) => match len.kind().try_to_machine_usize(tcx) {
+ Some(len) => alloc
+ .inner()
+ .inspect_with_uninit_and_ptr_outside_interpreter(0..(8 * usize::try_from(len).unwrap()))
+ .to_owned()
+ .array_chunks::<8>()
+ .map(|&chunk| Some(Constant::F64(f64::from_le_bytes(chunk))))
+ .collect::<Option<Vec<Constant>>>()
+ .map(Constant::Vec),
+ _ => None,
+ },
+ // FIXME: implement other array type conversions.
+ _ => None,
+ },
+ _ => None,
+ },
+ // FIXME: implement other conversions.
+ _ => None,
+ }
+}
diff --git a/src/tools/clippy/clippy_utils/src/diagnostics.rs b/src/tools/clippy/clippy_utils/src/diagnostics.rs
new file mode 100644
index 000000000..7f55db3b3
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/diagnostics.rs
@@ -0,0 +1,249 @@
+//! Clippy wrappers around rustc's diagnostic functions.
+//!
+//! These functions are used by the `INTERNAL_METADATA_COLLECTOR` lint to collect the corresponding
+//! lint applicability. Please make sure that you update the `LINT_EMISSION_FUNCTIONS` variable in
+//! `clippy_lints::utils::internal_lints::metadata_collector` when a new function is added
+//! or renamed.
+//!
+//! Thank you!
+//! ~The `INTERNAL_METADATA_COLLECTOR` lint
+
+use rustc_errors::{Applicability, Diagnostic, MultiSpan};
+use rustc_hir::HirId;
+use rustc_lint::{LateContext, Lint, LintContext};
+use rustc_span::source_map::Span;
+use std::env;
+
+fn docs_link(diag: &mut Diagnostic, lint: &'static Lint) {
+ if env::var("CLIPPY_DISABLE_DOCS_LINKS").is_err() {
+ if let Some(lint) = lint.name_lower().strip_prefix("clippy::") {
+ diag.help(&format!(
+ "for further information visit https://rust-lang.github.io/rust-clippy/{}/index.html#{}",
+ &option_env!("RUST_RELEASE_NUM").map_or("master".to_string(), |n| {
+ // extract just major + minor version and ignore patch versions
+ format!("rust-{}", n.rsplit_once('.').unwrap().1)
+ }),
+ lint
+ ));
+ }
+ }
+}
+
+/// Emit a basic lint message with a `msg` and a `span`.
+///
+/// This is the most primitive of our lint emission methods and can
+/// be a good way to get a new lint started.
+///
+/// Usually it's nicer to provide more context for lint messages.
+/// Be sure the output is understandable when you use this method.
+///
+/// # Example
+///
+/// ```ignore
+/// error: usage of mem::forget on Drop type
+/// --> $DIR/mem_forget.rs:17:5
+/// |
+/// 17 | std::mem::forget(seven);
+/// | ^^^^^^^^^^^^^^^^^^^^^^^
+/// ```
+pub fn span_lint<T: LintContext>(cx: &T, lint: &'static Lint, sp: impl Into<MultiSpan>, msg: &str) {
+ cx.struct_span_lint(lint, sp, |diag| {
+ let mut diag = diag.build(msg);
+ docs_link(&mut diag, lint);
+ diag.emit();
+ });
+}
+
+/// Same as `span_lint` but with an extra `help` message.
+///
+/// Use this if you want to provide some general help but
+/// can't provide a specific machine applicable suggestion.
+///
+/// The `help` message can be optionally attached to a `Span`.
+///
+/// If you change the signature, remember to update the internal lint `CollapsibleCalls`
+///
+/// # Example
+///
+/// ```text
+/// error: constant division of 0.0 with 0.0 will always result in NaN
+/// --> $DIR/zero_div_zero.rs:6:25
+/// |
+/// 6 | let other_f64_nan = 0.0f64 / 0.0;
+/// | ^^^^^^^^^^^^
+/// |
+/// = help: consider using `f64::NAN` if you would like a constant representing NaN
+/// ```
+pub fn span_lint_and_help<'a, T: LintContext>(
+ cx: &'a T,
+ lint: &'static Lint,
+ span: impl Into<MultiSpan>,
+ msg: &str,
+ help_span: Option<Span>,
+ help: &str,
+) {
+ cx.struct_span_lint(lint, span, |diag| {
+ let mut diag = diag.build(msg);
+ if let Some(help_span) = help_span {
+ diag.span_help(help_span, help);
+ } else {
+ diag.help(help);
+ }
+ docs_link(&mut diag, lint);
+ diag.emit();
+ });
+}
+
+/// Like `span_lint` but with a `note` section instead of a `help` message.
+///
+/// The `note` message is presented separately from the main lint message
+/// and is attached to a specific span:
+///
+/// If you change the signature, remember to update the internal lint `CollapsibleCalls`
+///
+/// # Example
+///
+/// ```text
+/// error: calls to `std::mem::forget` with a reference instead of an owned value. Forgetting a reference does nothing.
+/// --> $DIR/drop_forget_ref.rs:10:5
+/// |
+/// 10 | forget(&SomeStruct);
+/// | ^^^^^^^^^^^^^^^^^^^
+/// |
+/// = note: `-D clippy::forget-ref` implied by `-D warnings`
+/// note: argument has type &SomeStruct
+/// --> $DIR/drop_forget_ref.rs:10:12
+/// |
+/// 10 | forget(&SomeStruct);
+/// | ^^^^^^^^^^^
+/// ```
+pub fn span_lint_and_note<'a, T: LintContext>(
+ cx: &'a T,
+ lint: &'static Lint,
+ span: impl Into<MultiSpan>,
+ msg: &str,
+ note_span: Option<Span>,
+ note: &str,
+) {
+ cx.struct_span_lint(lint, span, |diag| {
+ let mut diag = diag.build(msg);
+ if let Some(note_span) = note_span {
+ diag.span_note(note_span, note);
+ } else {
+ diag.note(note);
+ }
+ docs_link(&mut diag, lint);
+ diag.emit();
+ });
+}
+
+/// Like `span_lint` but allows to add notes, help and suggestions using a closure.
+///
+/// If you need to customize your lint output a lot, use this function.
+/// If you change the signature, remember to update the internal lint `CollapsibleCalls`
+pub fn span_lint_and_then<C, S, F>(cx: &C, lint: &'static Lint, sp: S, msg: &str, f: F)
+where
+ C: LintContext,
+ S: Into<MultiSpan>,
+ F: FnOnce(&mut Diagnostic),
+{
+ cx.struct_span_lint(lint, sp, |diag| {
+ let mut diag = diag.build(msg);
+ f(&mut diag);
+ docs_link(&mut diag, lint);
+ diag.emit();
+ });
+}
+
+pub fn span_lint_hir(
+ cx: &LateContext<'_>,
+ lint: &'static Lint,
+ hir_id: HirId,
+ sp: Span,
+ msg: &str,
+) {
+ cx.tcx.struct_span_lint_hir(lint, hir_id, sp, |diag| {
+ let mut diag = diag.build(msg);
+ docs_link(&mut diag, lint);
+ diag.emit();
+ });
+}
+
+pub fn span_lint_hir_and_then(
+ cx: &LateContext<'_>,
+ lint: &'static Lint,
+ hir_id: HirId,
+ sp: impl Into<MultiSpan>,
+ msg: &str,
+ f: impl FnOnce(&mut Diagnostic),
+) {
+ cx.tcx.struct_span_lint_hir(lint, hir_id, sp, |diag| {
+ let mut diag = diag.build(msg);
+ f(&mut diag);
+ docs_link(&mut diag, lint);
+ diag.emit();
+ });
+}
+
+/// Add a span lint with a suggestion on how to fix it.
+///
+/// These suggestions can be parsed by rustfix to allow it to automatically fix your code.
+/// In the example below, `help` is `"try"` and `sugg` is the suggested replacement `".any(|x| x >
+/// 2)"`.
+///
+/// If you change the signature, remember to update the internal lint `CollapsibleCalls`
+///
+/// # Example
+///
+/// ```text
+/// error: This `.fold` can be more succinctly expressed as `.any`
+/// --> $DIR/methods.rs:390:13
+/// |
+/// 390 | let _ = (0..3).fold(false, |acc, x| acc || x > 2);
+/// | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `.any(|x| x > 2)`
+/// |
+/// = note: `-D fold-any` implied by `-D warnings`
+/// ```
+#[cfg_attr(feature = "internal", allow(clippy::collapsible_span_lint_calls))]
+pub fn span_lint_and_sugg<'a, T: LintContext>(
+ cx: &'a T,
+ lint: &'static Lint,
+ sp: Span,
+ msg: &str,
+ help: &str,
+ sugg: String,
+ applicability: Applicability,
+) {
+ span_lint_and_then(cx, lint, sp, msg, |diag| {
+ diag.span_suggestion(sp, help, sugg, applicability);
+ });
+}
+
+/// Create a suggestion made from several `span → replacement`.
+///
+/// Note: in the JSON format (used by `compiletest_rs`), the help message will
+/// appear once per
+/// replacement. In human-readable format though, it only appears once before
+/// the whole suggestion.
+pub fn multispan_sugg<I>(diag: &mut Diagnostic, help_msg: &str, sugg: I)
+where
+ I: IntoIterator<Item = (Span, String)>,
+{
+ multispan_sugg_with_applicability(diag, help_msg, Applicability::Unspecified, sugg);
+}
+
+/// Create a suggestion made from several `span → replacement`.
+///
+/// rustfix currently doesn't support the automatic application of suggestions with
+/// multiple spans. This is tracked in issue [rustfix#141](https://github.com/rust-lang/rustfix/issues/141).
+/// Suggestions with multiple spans will be silently ignored.
+pub fn multispan_sugg_with_applicability<I>(
+ diag: &mut Diagnostic,
+ help_msg: &str,
+ applicability: Applicability,
+ sugg: I,
+) where
+ I: IntoIterator<Item = (Span, String)>,
+{
+ diag.multipart_suggestion(help_msg, sugg.into_iter().collect(), applicability);
+}
diff --git a/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs b/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs
new file mode 100644
index 000000000..730724b95
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs
@@ -0,0 +1,234 @@
+//! Utilities for evaluating whether eagerly evaluated expressions can be made lazy and vice versa.
+//!
+//! Things to consider:
+//! - has the expression side-effects?
+//! - is the expression computationally expensive?
+//!
+//! See lints:
+//! - unnecessary-lazy-evaluations
+//! - or-fun-call
+//! - option-if-let-else
+
+use crate::ty::{all_predicates_of, is_copy};
+use crate::visitors::is_const_evaluatable;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::intravisit::{walk_expr, Visitor};
+use rustc_hir::{def_id::DefId, Block, Expr, ExprKind, QPath, UnOp};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, PredicateKind};
+use rustc_span::{sym, Symbol};
+use std::cmp;
+use std::ops;
+
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
+enum EagernessSuggestion {
+ // The expression is cheap and should be evaluated eagerly
+ Eager,
+ // The expression may be cheap, so don't suggested lazy evaluation; or the expression may not be safe to switch to
+ // eager evaluation.
+ NoChange,
+ // The expression is likely expensive and should be evaluated lazily.
+ Lazy,
+ // The expression cannot be placed into a closure.
+ ForceNoChange,
+}
+impl ops::BitOr for EagernessSuggestion {
+ type Output = Self;
+ fn bitor(self, rhs: Self) -> Self {
+ cmp::max(self, rhs)
+ }
+}
+impl ops::BitOrAssign for EagernessSuggestion {
+ fn bitor_assign(&mut self, rhs: Self) {
+ *self = *self | rhs;
+ }
+}
+
+/// Determine the eagerness of the given function call.
+fn fn_eagerness<'tcx>(
+ cx: &LateContext<'tcx>,
+ fn_id: DefId,
+ name: Symbol,
+ args: &'tcx [Expr<'_>],
+) -> EagernessSuggestion {
+ use EagernessSuggestion::{Eager, Lazy, NoChange};
+ let name = name.as_str();
+
+ let ty = match cx.tcx.impl_of_method(fn_id) {
+ Some(id) => cx.tcx.type_of(id),
+ None => return Lazy,
+ };
+
+ if (name.starts_with("as_") || name == "len" || name == "is_empty") && args.len() == 1 {
+ if matches!(
+ cx.tcx.crate_name(fn_id.krate),
+ sym::std | sym::core | sym::alloc | sym::proc_macro
+ ) {
+ Eager
+ } else {
+ NoChange
+ }
+ } else if let ty::Adt(def, subs) = ty.kind() {
+ // Types where the only fields are generic types (or references to) with no trait bounds other
+ // than marker traits.
+ // Due to the limited operations on these types functions should be fairly cheap.
+ if def
+ .variants()
+ .iter()
+ .flat_map(|v| v.fields.iter())
+ .any(|x| matches!(cx.tcx.type_of(x.did).peel_refs().kind(), ty::Param(_)))
+ && all_predicates_of(cx.tcx, fn_id).all(|(pred, _)| match pred.kind().skip_binder() {
+ PredicateKind::Trait(pred) => cx.tcx.trait_def(pred.trait_ref.def_id).is_marker,
+ _ => true,
+ })
+ && subs.types().all(|x| matches!(x.peel_refs().kind(), ty::Param(_)))
+ {
+ // Limit the function to either `(self) -> bool` or `(&self) -> bool`
+ match &**cx.tcx.fn_sig(fn_id).skip_binder().inputs_and_output {
+ [arg, res] if !arg.is_mutable_ptr() && arg.peel_refs() == ty && res.is_bool() => NoChange,
+ _ => Lazy,
+ }
+ } else {
+ Lazy
+ }
+ } else {
+ Lazy
+ }
+}
+
+#[expect(clippy::too_many_lines)]
+fn expr_eagerness<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) -> EagernessSuggestion {
+ struct V<'cx, 'tcx> {
+ cx: &'cx LateContext<'tcx>,
+ eagerness: EagernessSuggestion,
+ }
+
+ impl<'cx, 'tcx> Visitor<'tcx> for V<'cx, 'tcx> {
+ fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
+ use EagernessSuggestion::{ForceNoChange, Lazy, NoChange};
+ if self.eagerness == ForceNoChange {
+ return;
+ }
+ match e.kind {
+ ExprKind::Call(
+ &Expr {
+ kind: ExprKind::Path(ref path),
+ hir_id,
+ ..
+ },
+ args,
+ ) => match self.cx.qpath_res(path, hir_id) {
+ Res::Def(DefKind::Ctor(..) | DefKind::Variant, _) | Res::SelfCtor(_) => (),
+ Res::Def(_, id) if self.cx.tcx.is_promotable_const_fn(id) => (),
+ // No need to walk the arguments here, `is_const_evaluatable` already did
+ Res::Def(..) if is_const_evaluatable(self.cx, e) => {
+ self.eagerness |= NoChange;
+ return;
+ },
+ Res::Def(_, id) => match path {
+ QPath::Resolved(_, p) => {
+ self.eagerness |= fn_eagerness(self.cx, id, p.segments.last().unwrap().ident.name, args);
+ },
+ QPath::TypeRelative(_, name) => {
+ self.eagerness |= fn_eagerness(self.cx, id, name.ident.name, args);
+ },
+ QPath::LangItem(..) => self.eagerness = Lazy,
+ },
+ _ => self.eagerness = Lazy,
+ },
+ // No need to walk the arguments here, `is_const_evaluatable` already did
+ ExprKind::MethodCall(..) if is_const_evaluatable(self.cx, e) => {
+ self.eagerness |= NoChange;
+ return;
+ },
+ ExprKind::MethodCall(name, args, _) => {
+ self.eagerness |= self
+ .cx
+ .typeck_results()
+ .type_dependent_def_id(e.hir_id)
+ .map_or(Lazy, |id| fn_eagerness(self.cx, id, name.ident.name, args));
+ },
+ ExprKind::Index(_, e) => {
+ let ty = self.cx.typeck_results().expr_ty_adjusted(e);
+ if is_copy(self.cx, ty) && !ty.is_ref() {
+ self.eagerness |= NoChange;
+ } else {
+ self.eagerness = Lazy;
+ }
+ },
+
+ // Dereferences should be cheap, but dereferencing a raw pointer earlier may not be safe.
+ ExprKind::Unary(UnOp::Deref, e) if !self.cx.typeck_results().expr_ty(e).is_unsafe_ptr() => (),
+ ExprKind::Unary(UnOp::Deref, _) => self.eagerness |= NoChange,
+
+ ExprKind::Unary(_, e)
+ if matches!(
+ self.cx.typeck_results().expr_ty(e).kind(),
+ ty::Bool | ty::Int(_) | ty::Uint(_),
+ ) => {},
+ ExprKind::Binary(_, lhs, rhs)
+ if self.cx.typeck_results().expr_ty(lhs).is_primitive()
+ && self.cx.typeck_results().expr_ty(rhs).is_primitive() => {},
+
+ // Can't be moved into a closure
+ ExprKind::Break(..)
+ | ExprKind::Continue(_)
+ | ExprKind::Ret(_)
+ | ExprKind::InlineAsm(_)
+ | ExprKind::Yield(..)
+ | ExprKind::Err => {
+ self.eagerness = ForceNoChange;
+ return;
+ },
+
+ // Memory allocation, custom operator, loop, or call to an unknown function
+ ExprKind::Box(_)
+ | ExprKind::Unary(..)
+ | ExprKind::Binary(..)
+ | ExprKind::Loop(..)
+ | ExprKind::Call(..) => self.eagerness = Lazy,
+
+ ExprKind::ConstBlock(_)
+ | ExprKind::Array(_)
+ | ExprKind::Tup(_)
+ | ExprKind::Lit(_)
+ | ExprKind::Cast(..)
+ | ExprKind::Type(..)
+ | ExprKind::DropTemps(_)
+ | ExprKind::Let(..)
+ | ExprKind::If(..)
+ | ExprKind::Match(..)
+ | ExprKind::Closure { .. }
+ | ExprKind::Field(..)
+ | ExprKind::Path(_)
+ | ExprKind::AddrOf(..)
+ | ExprKind::Struct(..)
+ | ExprKind::Repeat(..)
+ | ExprKind::Block(Block { stmts: [], .. }, _) => (),
+
+ // Assignment might be to a local defined earlier, so don't eagerly evaluate.
+ // Blocks with multiple statements might be expensive, so don't eagerly evaluate.
+ // TODO: Actually check if either of these are true here.
+ ExprKind::Assign(..) | ExprKind::AssignOp(..) | ExprKind::Block(..) => self.eagerness |= NoChange,
+ }
+ walk_expr(self, e);
+ }
+ }
+
+ let mut v = V {
+ cx,
+ eagerness: EagernessSuggestion::Eager,
+ };
+ v.visit_expr(e);
+ v.eagerness
+}
+
+/// Whether the given expression should be changed to evaluate eagerly
+pub fn switch_to_eager_eval<'tcx>(cx: &'_ LateContext<'tcx>, expr: &'tcx Expr<'_>) -> bool {
+ expr_eagerness(cx, expr) == EagernessSuggestion::Eager
+}
+
+/// Whether the given expression should be changed to evaluate lazily
+pub fn switch_to_lazy_eval<'tcx>(cx: &'_ LateContext<'tcx>, expr: &'tcx Expr<'_>) -> bool {
+ expr_eagerness(cx, expr) == EagernessSuggestion::Lazy
+}
diff --git a/src/tools/clippy/clippy_utils/src/higher.rs b/src/tools/clippy/clippy_utils/src/higher.rs
new file mode 100644
index 000000000..4604ae5c2
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/higher.rs
@@ -0,0 +1,469 @@
+//! This module contains functions that retrieve specific elements.
+
+#![deny(clippy::missing_docs_in_private_items)]
+
+use crate::consts::{constant_simple, Constant};
+use crate::ty::is_type_diagnostic_item;
+use crate::{is_expn_of, match_def_path, paths};
+use if_chain::if_chain;
+use rustc_ast::ast;
+use rustc_hir as hir;
+use rustc_hir::{Arm, Block, Expr, ExprKind, HirId, LoopSource, MatchSource, Node, Pat, QPath};
+use rustc_lint::LateContext;
+use rustc_span::{sym, symbol, Span};
+
+/// The essential nodes of a desugared for loop as well as the entire span:
+/// `for pat in arg { body }` becomes `(pat, arg, body)`. Return `(pat, arg, body, span)`.
+pub struct ForLoop<'tcx> {
+ /// `for` loop item
+ pub pat: &'tcx hir::Pat<'tcx>,
+ /// `IntoIterator` argument
+ pub arg: &'tcx hir::Expr<'tcx>,
+ /// `for` loop body
+ pub body: &'tcx hir::Expr<'tcx>,
+ /// Compare this against `hir::Destination.target`
+ pub loop_id: HirId,
+ /// entire `for` loop span
+ pub span: Span,
+}
+
+impl<'tcx> ForLoop<'tcx> {
+ /// Parses a desugared `for` loop
+ pub fn hir(expr: &Expr<'tcx>) -> Option<Self> {
+ if_chain! {
+ if let hir::ExprKind::DropTemps(e) = expr.kind;
+ if let hir::ExprKind::Match(iterexpr, [arm], hir::MatchSource::ForLoopDesugar) = e.kind;
+ if let hir::ExprKind::Call(_, [arg]) = iterexpr.kind;
+ if let hir::ExprKind::Loop(block, ..) = arm.body.kind;
+ if let [stmt] = block.stmts;
+ if let hir::StmtKind::Expr(e) = stmt.kind;
+ if let hir::ExprKind::Match(_, [_, some_arm], _) = e.kind;
+ if let hir::PatKind::Struct(_, [field], _) = some_arm.pat.kind;
+ then {
+ return Some(Self {
+ pat: field.pat,
+ arg,
+ body: some_arm.body,
+ loop_id: arm.body.hir_id,
+ span: expr.span.ctxt().outer_expn_data().call_site,
+ });
+ }
+ }
+ None
+ }
+}
+
+/// An `if` expression without `DropTemps`
+pub struct If<'hir> {
+ /// `if` condition
+ pub cond: &'hir Expr<'hir>,
+ /// `if` then expression
+ pub then: &'hir Expr<'hir>,
+ /// `else` expression
+ pub r#else: Option<&'hir Expr<'hir>>,
+}
+
+impl<'hir> If<'hir> {
+ #[inline]
+ /// Parses an `if` expression
+ pub const fn hir(expr: &Expr<'hir>) -> Option<Self> {
+ if let ExprKind::If(
+ Expr {
+ kind: ExprKind::DropTemps(cond),
+ ..
+ },
+ then,
+ r#else,
+ ) = expr.kind
+ {
+ Some(Self { cond, then, r#else })
+ } else {
+ None
+ }
+ }
+}
+
+/// An `if let` expression
+pub struct IfLet<'hir> {
+ /// `if let` pattern
+ pub let_pat: &'hir Pat<'hir>,
+ /// `if let` scrutinee
+ pub let_expr: &'hir Expr<'hir>,
+ /// `if let` then expression
+ pub if_then: &'hir Expr<'hir>,
+ /// `if let` else expression
+ pub if_else: Option<&'hir Expr<'hir>>,
+}
+
+impl<'hir> IfLet<'hir> {
+ /// Parses an `if let` expression
+ pub fn hir(cx: &LateContext<'_>, expr: &Expr<'hir>) -> Option<Self> {
+ if let ExprKind::If(
+ Expr {
+ kind:
+ ExprKind::Let(hir::Let {
+ pat: let_pat,
+ init: let_expr,
+ ..
+ }),
+ ..
+ },
+ if_then,
+ if_else,
+ ) = expr.kind
+ {
+ let mut iter = cx.tcx.hir().parent_iter(expr.hir_id);
+ if let Some((_, Node::Block(Block { stmts: [], .. }))) = iter.next() {
+ if let Some((
+ _,
+ Node::Expr(Expr {
+ kind: ExprKind::Loop(_, _, LoopSource::While, _),
+ ..
+ }),
+ )) = iter.next()
+ {
+ // while loop desugar
+ return None;
+ }
+ }
+ return Some(Self {
+ let_pat,
+ let_expr,
+ if_then,
+ if_else,
+ });
+ }
+ None
+ }
+}
+
+/// An `if let` or `match` expression. Useful for lints that trigger on one or the other.
+pub enum IfLetOrMatch<'hir> {
+ /// Any `match` expression
+ Match(&'hir Expr<'hir>, &'hir [Arm<'hir>], MatchSource),
+ /// scrutinee, pattern, then block, else block
+ IfLet(
+ &'hir Expr<'hir>,
+ &'hir Pat<'hir>,
+ &'hir Expr<'hir>,
+ Option<&'hir Expr<'hir>>,
+ ),
+}
+
+impl<'hir> IfLetOrMatch<'hir> {
+ /// Parses an `if let` or `match` expression
+ pub fn parse(cx: &LateContext<'_>, expr: &Expr<'hir>) -> Option<Self> {
+ match expr.kind {
+ ExprKind::Match(expr, arms, source) => Some(Self::Match(expr, arms, source)),
+ _ => IfLet::hir(cx, expr).map(
+ |IfLet {
+ let_expr,
+ let_pat,
+ if_then,
+ if_else,
+ }| { Self::IfLet(let_expr, let_pat, if_then, if_else) },
+ ),
+ }
+ }
+}
+
+/// An `if` or `if let` expression
+pub struct IfOrIfLet<'hir> {
+ /// `if` condition that is maybe a `let` expression
+ pub cond: &'hir Expr<'hir>,
+ /// `if` then expression
+ pub then: &'hir Expr<'hir>,
+ /// `else` expression
+ pub r#else: Option<&'hir Expr<'hir>>,
+}
+
+impl<'hir> IfOrIfLet<'hir> {
+ #[inline]
+ /// Parses an `if` or `if let` expression
+ pub const fn hir(expr: &Expr<'hir>) -> Option<Self> {
+ if let ExprKind::If(cond, then, r#else) = expr.kind {
+ if let ExprKind::DropTemps(new_cond) = cond.kind {
+ return Some(Self {
+ cond: new_cond,
+ r#else,
+ then,
+ });
+ }
+ if let ExprKind::Let(..) = cond.kind {
+ return Some(Self { cond, then, r#else });
+ }
+ }
+ None
+ }
+}
+
+/// Represent a range akin to `ast::ExprKind::Range`.
+#[derive(Debug, Copy, Clone)]
+pub struct Range<'a> {
+ /// The lower bound of the range, or `None` for ranges such as `..X`.
+ pub start: Option<&'a hir::Expr<'a>>,
+ /// The upper bound of the range, or `None` for ranges such as `X..`.
+ pub end: Option<&'a hir::Expr<'a>>,
+ /// Whether the interval is open or closed.
+ pub limits: ast::RangeLimits,
+}
+
+impl<'a> Range<'a> {
+ /// Higher a `hir` range to something similar to `ast::ExprKind::Range`.
+ pub fn hir(expr: &'a hir::Expr<'_>) -> Option<Range<'a>> {
+ /// Finds the field named `name` in the field. Always return `Some` for
+ /// convenience.
+ fn get_field<'c>(name: &str, fields: &'c [hir::ExprField<'_>]) -> Option<&'c hir::Expr<'c>> {
+ let expr = &fields.iter().find(|field| field.ident.name.as_str() == name)?.expr;
+ Some(expr)
+ }
+
+ match expr.kind {
+ hir::ExprKind::Call(path, args)
+ if matches!(
+ path.kind,
+ hir::ExprKind::Path(hir::QPath::LangItem(hir::LangItem::RangeInclusiveNew, ..))
+ ) =>
+ {
+ Some(Range {
+ start: Some(&args[0]),
+ end: Some(&args[1]),
+ limits: ast::RangeLimits::Closed,
+ })
+ },
+ hir::ExprKind::Struct(path, fields, None) => match &path {
+ hir::QPath::LangItem(hir::LangItem::RangeFull, ..) => Some(Range {
+ start: None,
+ end: None,
+ limits: ast::RangeLimits::HalfOpen,
+ }),
+ hir::QPath::LangItem(hir::LangItem::RangeFrom, ..) => Some(Range {
+ start: Some(get_field("start", fields)?),
+ end: None,
+ limits: ast::RangeLimits::HalfOpen,
+ }),
+ hir::QPath::LangItem(hir::LangItem::Range, ..) => Some(Range {
+ start: Some(get_field("start", fields)?),
+ end: Some(get_field("end", fields)?),
+ limits: ast::RangeLimits::HalfOpen,
+ }),
+ hir::QPath::LangItem(hir::LangItem::RangeToInclusive, ..) => Some(Range {
+ start: None,
+ end: Some(get_field("end", fields)?),
+ limits: ast::RangeLimits::Closed,
+ }),
+ hir::QPath::LangItem(hir::LangItem::RangeTo, ..) => Some(Range {
+ start: None,
+ end: Some(get_field("end", fields)?),
+ limits: ast::RangeLimits::HalfOpen,
+ }),
+ _ => None,
+ },
+ _ => None,
+ }
+ }
+}
+
+/// Represent the pre-expansion arguments of a `vec!` invocation.
+pub enum VecArgs<'a> {
+ /// `vec![elem; len]`
+ Repeat(&'a hir::Expr<'a>, &'a hir::Expr<'a>),
+ /// `vec![a, b, c]`
+ Vec(&'a [hir::Expr<'a>]),
+}
+
+impl<'a> VecArgs<'a> {
+ /// Returns the arguments of the `vec!` macro if this expression was expanded
+ /// from `vec!`.
+ pub fn hir(cx: &LateContext<'_>, expr: &'a hir::Expr<'_>) -> Option<VecArgs<'a>> {
+ if_chain! {
+ if let hir::ExprKind::Call(fun, args) = expr.kind;
+ if let hir::ExprKind::Path(ref qpath) = fun.kind;
+ if is_expn_of(fun.span, "vec").is_some();
+ if let Some(fun_def_id) = cx.qpath_res(qpath, fun.hir_id).opt_def_id();
+ then {
+ return if match_def_path(cx, fun_def_id, &paths::VEC_FROM_ELEM) && args.len() == 2 {
+ // `vec![elem; size]` case
+ Some(VecArgs::Repeat(&args[0], &args[1]))
+ } else if match_def_path(cx, fun_def_id, &paths::SLICE_INTO_VEC) && args.len() == 1 {
+ // `vec![a, b, c]` case
+ if_chain! {
+ if let hir::ExprKind::Box(boxed) = args[0].kind;
+ if let hir::ExprKind::Array(args) = boxed.kind;
+ then {
+ return Some(VecArgs::Vec(args));
+ }
+ }
+
+ None
+ } else if match_def_path(cx, fun_def_id, &paths::VEC_NEW) && args.is_empty() {
+ Some(VecArgs::Vec(&[]))
+ } else {
+ None
+ };
+ }
+ }
+
+ None
+ }
+}
+
+/// A desugared `while` loop
+pub struct While<'hir> {
+ /// `while` loop condition
+ pub condition: &'hir Expr<'hir>,
+ /// `while` loop body
+ pub body: &'hir Expr<'hir>,
+}
+
+impl<'hir> While<'hir> {
+ #[inline]
+ /// Parses a desugared `while` loop
+ pub const fn hir(expr: &Expr<'hir>) -> Option<Self> {
+ if let ExprKind::Loop(
+ Block {
+ expr:
+ Some(Expr {
+ kind:
+ ExprKind::If(
+ Expr {
+ kind: ExprKind::DropTemps(condition),
+ ..
+ },
+ body,
+ _,
+ ),
+ ..
+ }),
+ ..
+ },
+ _,
+ LoopSource::While,
+ _,
+ ) = expr.kind
+ {
+ return Some(Self { condition, body });
+ }
+ None
+ }
+}
+
+/// A desugared `while let` loop
+pub struct WhileLet<'hir> {
+ /// `while let` loop item pattern
+ pub let_pat: &'hir Pat<'hir>,
+ /// `while let` loop scrutinee
+ pub let_expr: &'hir Expr<'hir>,
+ /// `while let` loop body
+ pub if_then: &'hir Expr<'hir>,
+}
+
+impl<'hir> WhileLet<'hir> {
+ #[inline]
+ /// Parses a desugared `while let` loop
+ pub const fn hir(expr: &Expr<'hir>) -> Option<Self> {
+ if let ExprKind::Loop(
+ Block {
+ expr:
+ Some(Expr {
+ kind:
+ ExprKind::If(
+ Expr {
+ kind:
+ ExprKind::Let(hir::Let {
+ pat: let_pat,
+ init: let_expr,
+ ..
+ }),
+ ..
+ },
+ if_then,
+ _,
+ ),
+ ..
+ }),
+ ..
+ },
+ _,
+ LoopSource::While,
+ _,
+ ) = expr.kind
+ {
+ return Some(Self {
+ let_pat,
+ let_expr,
+ if_then,
+ });
+ }
+ None
+ }
+}
+
+/// Converts a hir binary operator to the corresponding `ast` type.
+#[must_use]
+pub fn binop(op: hir::BinOpKind) -> ast::BinOpKind {
+ match op {
+ hir::BinOpKind::Eq => ast::BinOpKind::Eq,
+ hir::BinOpKind::Ge => ast::BinOpKind::Ge,
+ hir::BinOpKind::Gt => ast::BinOpKind::Gt,
+ hir::BinOpKind::Le => ast::BinOpKind::Le,
+ hir::BinOpKind::Lt => ast::BinOpKind::Lt,
+ hir::BinOpKind::Ne => ast::BinOpKind::Ne,
+ hir::BinOpKind::Or => ast::BinOpKind::Or,
+ hir::BinOpKind::Add => ast::BinOpKind::Add,
+ hir::BinOpKind::And => ast::BinOpKind::And,
+ hir::BinOpKind::BitAnd => ast::BinOpKind::BitAnd,
+ hir::BinOpKind::BitOr => ast::BinOpKind::BitOr,
+ hir::BinOpKind::BitXor => ast::BinOpKind::BitXor,
+ hir::BinOpKind::Div => ast::BinOpKind::Div,
+ hir::BinOpKind::Mul => ast::BinOpKind::Mul,
+ hir::BinOpKind::Rem => ast::BinOpKind::Rem,
+ hir::BinOpKind::Shl => ast::BinOpKind::Shl,
+ hir::BinOpKind::Shr => ast::BinOpKind::Shr,
+ hir::BinOpKind::Sub => ast::BinOpKind::Sub,
+ }
+}
+
+/// A parsed `Vec` initialization expression
+#[derive(Clone, Copy)]
+pub enum VecInitKind {
+ /// `Vec::new()`
+ New,
+ /// `Vec::default()` or `Default::default()`
+ Default,
+ /// `Vec::with_capacity(123)`
+ WithConstCapacity(u128),
+ /// `Vec::with_capacity(slice.len())`
+ WithExprCapacity(HirId),
+}
+
+/// Checks if given expression is an initialization of `Vec` and returns its kind.
+pub fn get_vec_init_kind<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> Option<VecInitKind> {
+ if let ExprKind::Call(func, args) = expr.kind {
+ match func.kind {
+ ExprKind::Path(QPath::TypeRelative(ty, name))
+ if is_type_diagnostic_item(cx, cx.typeck_results().node_type(ty.hir_id), sym::Vec) =>
+ {
+ if name.ident.name == sym::new {
+ return Some(VecInitKind::New);
+ } else if name.ident.name == symbol::kw::Default {
+ return Some(VecInitKind::Default);
+ } else if name.ident.name.as_str() == "with_capacity" {
+ let arg = args.get(0)?;
+ return match constant_simple(cx, cx.typeck_results(), arg) {
+ Some(Constant::Int(num)) => Some(VecInitKind::WithConstCapacity(num)),
+ _ => Some(VecInitKind::WithExprCapacity(arg.hir_id)),
+ };
+ };
+ },
+ ExprKind::Path(QPath::Resolved(_, path))
+ if match_def_path(cx, path.res.opt_def_id()?, &paths::DEFAULT_TRAIT_METHOD)
+ && is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(expr), sym::Vec) =>
+ {
+ return Some(VecInitKind::Default);
+ },
+ _ => (),
+ }
+ }
+ None
+}
diff --git a/src/tools/clippy/clippy_utils/src/hir_utils.rs b/src/tools/clippy/clippy_utils/src/hir_utils.rs
new file mode 100644
index 000000000..1834e2a2d
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/hir_utils.rs
@@ -0,0 +1,1031 @@
+use crate::consts::constant_simple;
+use crate::macros::macro_backtrace;
+use crate::source::snippet_opt;
+use rustc_ast::ast::InlineAsmTemplatePiece;
+use rustc_data_structures::fx::FxHasher;
+use rustc_hir::def::Res;
+use rustc_hir::HirIdMap;
+use rustc_hir::{
+ ArrayLen, BinOpKind, Block, BodyId, Closure, Expr, ExprField, ExprKind, FnRetTy, GenericArg, GenericArgs, Guard,
+ HirId, InlineAsmOperand, Let, Lifetime, LifetimeName, ParamName, Pat, PatField, PatKind, Path, PathSegment, QPath,
+ Stmt, StmtKind, Ty, TyKind, TypeBinding,
+};
+use rustc_lexer::{tokenize, TokenKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty::TypeckResults;
+use rustc_span::{sym, Symbol};
+use std::hash::{Hash, Hasher};
+
+/// Callback that is called when two expressions are not equal in the sense of `SpanlessEq`, but
+/// other conditions would make them equal.
+type SpanlessEqCallback<'a> = dyn FnMut(&Expr<'_>, &Expr<'_>) -> bool + 'a;
+
+/// Type used to check whether two ast are the same. This is different from the
+/// operator `==` on ast types as this operator would compare true equality with
+/// ID and span.
+///
+/// Note that some expressions kinds are not considered but could be added.
+pub struct SpanlessEq<'a, 'tcx> {
+ /// Context used to evaluate constant expressions.
+ cx: &'a LateContext<'tcx>,
+ maybe_typeck_results: Option<(&'tcx TypeckResults<'tcx>, &'tcx TypeckResults<'tcx>)>,
+ allow_side_effects: bool,
+ expr_fallback: Option<Box<SpanlessEqCallback<'a>>>,
+}
+
+impl<'a, 'tcx> SpanlessEq<'a, 'tcx> {
+ pub fn new(cx: &'a LateContext<'tcx>) -> Self {
+ Self {
+ cx,
+ maybe_typeck_results: cx.maybe_typeck_results().map(|x| (x, x)),
+ allow_side_effects: true,
+ expr_fallback: None,
+ }
+ }
+
+ /// Consider expressions containing potential side effects as not equal.
+ #[must_use]
+ pub fn deny_side_effects(self) -> Self {
+ Self {
+ allow_side_effects: false,
+ ..self
+ }
+ }
+
+ #[must_use]
+ pub fn expr_fallback(self, expr_fallback: impl FnMut(&Expr<'_>, &Expr<'_>) -> bool + 'a) -> Self {
+ Self {
+ expr_fallback: Some(Box::new(expr_fallback)),
+ ..self
+ }
+ }
+
+ /// Use this method to wrap comparisons that may involve inter-expression context.
+ /// See `self.locals`.
+ pub fn inter_expr(&mut self) -> HirEqInterExpr<'_, 'a, 'tcx> {
+ HirEqInterExpr {
+ inner: self,
+ locals: HirIdMap::default(),
+ }
+ }
+
+ pub fn eq_block(&mut self, left: &Block<'_>, right: &Block<'_>) -> bool {
+ self.inter_expr().eq_block(left, right)
+ }
+
+ pub fn eq_expr(&mut self, left: &Expr<'_>, right: &Expr<'_>) -> bool {
+ self.inter_expr().eq_expr(left, right)
+ }
+
+ pub fn eq_path(&mut self, left: &Path<'_>, right: &Path<'_>) -> bool {
+ self.inter_expr().eq_path(left, right)
+ }
+
+ pub fn eq_path_segment(&mut self, left: &PathSegment<'_>, right: &PathSegment<'_>) -> bool {
+ self.inter_expr().eq_path_segment(left, right)
+ }
+
+ pub fn eq_path_segments(&mut self, left: &[PathSegment<'_>], right: &[PathSegment<'_>]) -> bool {
+ self.inter_expr().eq_path_segments(left, right)
+ }
+}
+
+pub struct HirEqInterExpr<'a, 'b, 'tcx> {
+ inner: &'a mut SpanlessEq<'b, 'tcx>,
+
+ // When binding are declared, the binding ID in the left expression is mapped to the one on the
+ // right. For example, when comparing `{ let x = 1; x + 2 }` and `{ let y = 1; y + 2 }`,
+ // these blocks are considered equal since `x` is mapped to `y`.
+ pub locals: HirIdMap<HirId>,
+}
+
+impl HirEqInterExpr<'_, '_, '_> {
+ pub fn eq_stmt(&mut self, left: &Stmt<'_>, right: &Stmt<'_>) -> bool {
+ match (&left.kind, &right.kind) {
+ (&StmtKind::Local(l), &StmtKind::Local(r)) => {
+ // This additional check ensures that the type of the locals are equivalent even if the init
+ // expression or type have some inferred parts.
+ if let Some((typeck_lhs, typeck_rhs)) = self.inner.maybe_typeck_results {
+ let l_ty = typeck_lhs.pat_ty(l.pat);
+ let r_ty = typeck_rhs.pat_ty(r.pat);
+ if l_ty != r_ty {
+ return false;
+ }
+ }
+
+ // eq_pat adds the HirIds to the locals map. We therefor call it last to make sure that
+ // these only get added if the init and type is equal.
+ both(&l.init, &r.init, |l, r| self.eq_expr(l, r))
+ && both(&l.ty, &r.ty, |l, r| self.eq_ty(l, r))
+ && both(&l.els, &r.els, |l, r| self.eq_block(l, r))
+ && self.eq_pat(l.pat, r.pat)
+ },
+ (&StmtKind::Expr(l), &StmtKind::Expr(r)) | (&StmtKind::Semi(l), &StmtKind::Semi(r)) => self.eq_expr(l, r),
+ _ => false,
+ }
+ }
+
+ /// Checks whether two blocks are the same.
+ fn eq_block(&mut self, left: &Block<'_>, right: &Block<'_>) -> bool {
+ match (left.stmts, left.expr, right.stmts, right.expr) {
+ ([], None, [], None) => {
+ // For empty blocks, check to see if the tokens are equal. This will catch the case where a macro
+ // expanded to nothing, or the cfg attribute was used.
+ let (left, right) = match (
+ snippet_opt(self.inner.cx, left.span),
+ snippet_opt(self.inner.cx, right.span),
+ ) {
+ (Some(left), Some(right)) => (left, right),
+ _ => return true,
+ };
+ let mut left_pos = 0;
+ let left = tokenize(&left)
+ .map(|t| {
+ let end = left_pos + t.len as usize;
+ let s = &left[left_pos..end];
+ left_pos = end;
+ (t, s)
+ })
+ .filter(|(t, _)| {
+ !matches!(
+ t.kind,
+ TokenKind::LineComment { .. } | TokenKind::BlockComment { .. } | TokenKind::Whitespace
+ )
+ })
+ .map(|(_, s)| s);
+ let mut right_pos = 0;
+ let right = tokenize(&right)
+ .map(|t| {
+ let end = right_pos + t.len as usize;
+ let s = &right[right_pos..end];
+ right_pos = end;
+ (t, s)
+ })
+ .filter(|(t, _)| {
+ !matches!(
+ t.kind,
+ TokenKind::LineComment { .. } | TokenKind::BlockComment { .. } | TokenKind::Whitespace
+ )
+ })
+ .map(|(_, s)| s);
+ left.eq(right)
+ },
+ _ => {
+ over(left.stmts, right.stmts, |l, r| self.eq_stmt(l, r))
+ && both(&left.expr, &right.expr, |l, r| self.eq_expr(l, r))
+ },
+ }
+ }
+
+ fn should_ignore(&mut self, expr: &Expr<'_>) -> bool {
+ macro_backtrace(expr.span).last().map_or(false, |macro_call| {
+ matches!(
+ &self.inner.cx.tcx.get_diagnostic_name(macro_call.def_id),
+ Some(sym::todo_macro | sym::unimplemented_macro)
+ )
+ })
+ }
+
+ pub fn eq_array_length(&mut self, left: ArrayLen, right: ArrayLen) -> bool {
+ match (left, right) {
+ (ArrayLen::Infer(..), ArrayLen::Infer(..)) => true,
+ (ArrayLen::Body(l_ct), ArrayLen::Body(r_ct)) => self.eq_body(l_ct.body, r_ct.body),
+ (_, _) => false,
+ }
+ }
+
+ pub fn eq_body(&mut self, left: BodyId, right: BodyId) -> bool {
+ // swap out TypeckResults when hashing a body
+ let old_maybe_typeck_results = self.inner.maybe_typeck_results.replace((
+ self.inner.cx.tcx.typeck_body(left),
+ self.inner.cx.tcx.typeck_body(right),
+ ));
+ let res = self.eq_expr(
+ &self.inner.cx.tcx.hir().body(left).value,
+ &self.inner.cx.tcx.hir().body(right).value,
+ );
+ self.inner.maybe_typeck_results = old_maybe_typeck_results;
+ res
+ }
+
+ #[expect(clippy::similar_names)]
+ pub fn eq_expr(&mut self, left: &Expr<'_>, right: &Expr<'_>) -> bool {
+ if !self.inner.allow_side_effects && left.span.ctxt() != right.span.ctxt() {
+ return false;
+ }
+
+ if let Some((typeck_lhs, typeck_rhs)) = self.inner.maybe_typeck_results {
+ if let (Some(l), Some(r)) = (
+ constant_simple(self.inner.cx, typeck_lhs, left),
+ constant_simple(self.inner.cx, typeck_rhs, right),
+ ) {
+ if l == r {
+ return true;
+ }
+ }
+ }
+
+ let is_eq = match (
+ reduce_exprkind(self.inner.cx, &left.kind),
+ reduce_exprkind(self.inner.cx, &right.kind),
+ ) {
+ (&ExprKind::AddrOf(lb, l_mut, le), &ExprKind::AddrOf(rb, r_mut, re)) => {
+ lb == rb && l_mut == r_mut && self.eq_expr(le, re)
+ },
+ (&ExprKind::Continue(li), &ExprKind::Continue(ri)) => {
+ both(&li.label, &ri.label, |l, r| l.ident.name == r.ident.name)
+ },
+ (&ExprKind::Assign(ll, lr, _), &ExprKind::Assign(rl, rr, _)) => {
+ self.inner.allow_side_effects && self.eq_expr(ll, rl) && self.eq_expr(lr, rr)
+ },
+ (&ExprKind::AssignOp(ref lo, ll, lr), &ExprKind::AssignOp(ref ro, rl, rr)) => {
+ self.inner.allow_side_effects && lo.node == ro.node && self.eq_expr(ll, rl) && self.eq_expr(lr, rr)
+ },
+ (&ExprKind::Block(l, _), &ExprKind::Block(r, _)) => self.eq_block(l, r),
+ (&ExprKind::Binary(l_op, ll, lr), &ExprKind::Binary(r_op, rl, rr)) => {
+ l_op.node == r_op.node && self.eq_expr(ll, rl) && self.eq_expr(lr, rr)
+ || swap_binop(l_op.node, ll, lr).map_or(false, |(l_op, ll, lr)| {
+ l_op == r_op.node && self.eq_expr(ll, rl) && self.eq_expr(lr, rr)
+ })
+ },
+ (&ExprKind::Break(li, ref le), &ExprKind::Break(ri, ref re)) => {
+ both(&li.label, &ri.label, |l, r| l.ident.name == r.ident.name)
+ && both(le, re, |l, r| self.eq_expr(l, r))
+ },
+ (&ExprKind::Box(l), &ExprKind::Box(r)) => self.eq_expr(l, r),
+ (&ExprKind::Call(l_fun, l_args), &ExprKind::Call(r_fun, r_args)) => {
+ self.inner.allow_side_effects && self.eq_expr(l_fun, r_fun) && self.eq_exprs(l_args, r_args)
+ },
+ (&ExprKind::Cast(lx, lt), &ExprKind::Cast(rx, rt)) | (&ExprKind::Type(lx, lt), &ExprKind::Type(rx, rt)) => {
+ self.eq_expr(lx, rx) && self.eq_ty(lt, rt)
+ },
+ (&ExprKind::Field(l_f_exp, ref l_f_ident), &ExprKind::Field(r_f_exp, ref r_f_ident)) => {
+ l_f_ident.name == r_f_ident.name && self.eq_expr(l_f_exp, r_f_exp)
+ },
+ (&ExprKind::Index(la, li), &ExprKind::Index(ra, ri)) => self.eq_expr(la, ra) && self.eq_expr(li, ri),
+ (&ExprKind::If(lc, lt, ref le), &ExprKind::If(rc, rt, ref re)) => {
+ self.eq_expr(lc, rc) && self.eq_expr(lt, rt) && both(le, re, |l, r| self.eq_expr(l, r))
+ },
+ (&ExprKind::Let(l), &ExprKind::Let(r)) => {
+ self.eq_pat(l.pat, r.pat) && both(&l.ty, &r.ty, |l, r| self.eq_ty(l, r)) && self.eq_expr(l.init, r.init)
+ },
+ (&ExprKind::Lit(ref l), &ExprKind::Lit(ref r)) => l.node == r.node,
+ (&ExprKind::Loop(lb, ref ll, ref lls, _), &ExprKind::Loop(rb, ref rl, ref rls, _)) => {
+ lls == rls && self.eq_block(lb, rb) && both(ll, rl, |l, r| l.ident.name == r.ident.name)
+ },
+ (&ExprKind::Match(le, la, ref ls), &ExprKind::Match(re, ra, ref rs)) => {
+ ls == rs
+ && self.eq_expr(le, re)
+ && over(la, ra, |l, r| {
+ self.eq_pat(l.pat, r.pat)
+ && both(&l.guard, &r.guard, |l, r| self.eq_guard(l, r))
+ && self.eq_expr(l.body, r.body)
+ })
+ },
+ (&ExprKind::MethodCall(l_path, l_args, _), &ExprKind::MethodCall(r_path, r_args, _)) => {
+ self.inner.allow_side_effects && self.eq_path_segment(l_path, r_path) && self.eq_exprs(l_args, r_args)
+ },
+ (&ExprKind::Repeat(le, ll), &ExprKind::Repeat(re, rl)) => {
+ self.eq_expr(le, re) && self.eq_array_length(ll, rl)
+ },
+ (&ExprKind::Ret(ref l), &ExprKind::Ret(ref r)) => both(l, r, |l, r| self.eq_expr(l, r)),
+ (&ExprKind::Path(ref l), &ExprKind::Path(ref r)) => self.eq_qpath(l, r),
+ (&ExprKind::Struct(l_path, lf, ref lo), &ExprKind::Struct(r_path, rf, ref ro)) => {
+ self.eq_qpath(l_path, r_path)
+ && both(lo, ro, |l, r| self.eq_expr(l, r))
+ && over(lf, rf, |l, r| self.eq_expr_field(l, r))
+ },
+ (&ExprKind::Tup(l_tup), &ExprKind::Tup(r_tup)) => self.eq_exprs(l_tup, r_tup),
+ (&ExprKind::Unary(l_op, le), &ExprKind::Unary(r_op, re)) => l_op == r_op && self.eq_expr(le, re),
+ (&ExprKind::Array(l), &ExprKind::Array(r)) => self.eq_exprs(l, r),
+ (&ExprKind::DropTemps(le), &ExprKind::DropTemps(re)) => self.eq_expr(le, re),
+ _ => false,
+ };
+ (is_eq && (!self.should_ignore(left) || !self.should_ignore(right)))
+ || self.inner.expr_fallback.as_mut().map_or(false, |f| f(left, right))
+ }
+
+ fn eq_exprs(&mut self, left: &[Expr<'_>], right: &[Expr<'_>]) -> bool {
+ over(left, right, |l, r| self.eq_expr(l, r))
+ }
+
+ fn eq_expr_field(&mut self, left: &ExprField<'_>, right: &ExprField<'_>) -> bool {
+ left.ident.name == right.ident.name && self.eq_expr(left.expr, right.expr)
+ }
+
+ fn eq_guard(&mut self, left: &Guard<'_>, right: &Guard<'_>) -> bool {
+ match (left, right) {
+ (Guard::If(l), Guard::If(r)) => self.eq_expr(l, r),
+ (Guard::IfLet(l), Guard::IfLet(r)) => {
+ self.eq_pat(l.pat, r.pat) && both(&l.ty, &r.ty, |l, r| self.eq_ty(l, r)) && self.eq_expr(l.init, r.init)
+ },
+ _ => false,
+ }
+ }
+
+ fn eq_generic_arg(&mut self, left: &GenericArg<'_>, right: &GenericArg<'_>) -> bool {
+ match (left, right) {
+ (GenericArg::Const(l), GenericArg::Const(r)) => self.eq_body(l.value.body, r.value.body),
+ (GenericArg::Lifetime(l_lt), GenericArg::Lifetime(r_lt)) => Self::eq_lifetime(l_lt, r_lt),
+ (GenericArg::Type(l_ty), GenericArg::Type(r_ty)) => self.eq_ty(l_ty, r_ty),
+ (GenericArg::Infer(l_inf), GenericArg::Infer(r_inf)) => self.eq_ty(&l_inf.to_ty(), &r_inf.to_ty()),
+ _ => false,
+ }
+ }
+
+ fn eq_lifetime(left: &Lifetime, right: &Lifetime) -> bool {
+ left.name == right.name
+ }
+
+ fn eq_pat_field(&mut self, left: &PatField<'_>, right: &PatField<'_>) -> bool {
+ let (PatField { ident: li, pat: lp, .. }, PatField { ident: ri, pat: rp, .. }) = (&left, &right);
+ li.name == ri.name && self.eq_pat(lp, rp)
+ }
+
+ /// Checks whether two patterns are the same.
+ fn eq_pat(&mut self, left: &Pat<'_>, right: &Pat<'_>) -> bool {
+ match (&left.kind, &right.kind) {
+ (&PatKind::Box(l), &PatKind::Box(r)) => self.eq_pat(l, r),
+ (&PatKind::Struct(ref lp, la, ..), &PatKind::Struct(ref rp, ra, ..)) => {
+ self.eq_qpath(lp, rp) && over(la, ra, |l, r| self.eq_pat_field(l, r))
+ },
+ (&PatKind::TupleStruct(ref lp, la, ls), &PatKind::TupleStruct(ref rp, ra, rs)) => {
+ self.eq_qpath(lp, rp) && over(la, ra, |l, r| self.eq_pat(l, r)) && ls == rs
+ },
+ (&PatKind::Binding(lb, li, _, ref lp), &PatKind::Binding(rb, ri, _, ref rp)) => {
+ let eq = lb == rb && both(lp, rp, |l, r| self.eq_pat(l, r));
+ if eq {
+ self.locals.insert(li, ri);
+ }
+ eq
+ },
+ (&PatKind::Path(ref l), &PatKind::Path(ref r)) => self.eq_qpath(l, r),
+ (&PatKind::Lit(l), &PatKind::Lit(r)) => self.eq_expr(l, r),
+ (&PatKind::Tuple(l, ls), &PatKind::Tuple(r, rs)) => ls == rs && over(l, r, |l, r| self.eq_pat(l, r)),
+ (&PatKind::Range(ref ls, ref le, li), &PatKind::Range(ref rs, ref re, ri)) => {
+ both(ls, rs, |a, b| self.eq_expr(a, b)) && both(le, re, |a, b| self.eq_expr(a, b)) && (li == ri)
+ },
+ (&PatKind::Ref(le, ref lm), &PatKind::Ref(re, ref rm)) => lm == rm && self.eq_pat(le, re),
+ (&PatKind::Slice(ls, ref li, le), &PatKind::Slice(rs, ref ri, re)) => {
+ over(ls, rs, |l, r| self.eq_pat(l, r))
+ && over(le, re, |l, r| self.eq_pat(l, r))
+ && both(li, ri, |l, r| self.eq_pat(l, r))
+ },
+ (&PatKind::Wild, &PatKind::Wild) => true,
+ _ => false,
+ }
+ }
+
+ #[expect(clippy::similar_names)]
+ fn eq_qpath(&mut self, left: &QPath<'_>, right: &QPath<'_>) -> bool {
+ match (left, right) {
+ (&QPath::Resolved(ref lty, lpath), &QPath::Resolved(ref rty, rpath)) => {
+ both(lty, rty, |l, r| self.eq_ty(l, r)) && self.eq_path(lpath, rpath)
+ },
+ (&QPath::TypeRelative(lty, lseg), &QPath::TypeRelative(rty, rseg)) => {
+ self.eq_ty(lty, rty) && self.eq_path_segment(lseg, rseg)
+ },
+ (&QPath::LangItem(llang_item, ..), &QPath::LangItem(rlang_item, ..)) => llang_item == rlang_item,
+ _ => false,
+ }
+ }
+
+ pub fn eq_path(&mut self, left: &Path<'_>, right: &Path<'_>) -> bool {
+ match (left.res, right.res) {
+ (Res::Local(l), Res::Local(r)) => l == r || self.locals.get(&l) == Some(&r),
+ (Res::Local(_), _) | (_, Res::Local(_)) => false,
+ _ => over(left.segments, right.segments, |l, r| self.eq_path_segment(l, r)),
+ }
+ }
+
+ fn eq_path_parameters(&mut self, left: &GenericArgs<'_>, right: &GenericArgs<'_>) -> bool {
+ if !(left.parenthesized || right.parenthesized) {
+ over(left.args, right.args, |l, r| self.eq_generic_arg(l, r)) // FIXME(flip1995): may not work
+ && over(left.bindings, right.bindings, |l, r| self.eq_type_binding(l, r))
+ } else if left.parenthesized && right.parenthesized {
+ over(left.inputs(), right.inputs(), |l, r| self.eq_ty(l, r))
+ && both(&Some(&left.bindings[0].ty()), &Some(&right.bindings[0].ty()), |l, r| {
+ self.eq_ty(l, r)
+ })
+ } else {
+ false
+ }
+ }
+
+ pub fn eq_path_segments(&mut self, left: &[PathSegment<'_>], right: &[PathSegment<'_>]) -> bool {
+ left.len() == right.len() && left.iter().zip(right).all(|(l, r)| self.eq_path_segment(l, r))
+ }
+
+ pub fn eq_path_segment(&mut self, left: &PathSegment<'_>, right: &PathSegment<'_>) -> bool {
+ // The == of idents doesn't work with different contexts,
+ // we have to be explicit about hygiene
+ left.ident.name == right.ident.name && both(&left.args, &right.args, |l, r| self.eq_path_parameters(l, r))
+ }
+
+ pub fn eq_ty(&mut self, left: &Ty<'_>, right: &Ty<'_>) -> bool {
+ match (&left.kind, &right.kind) {
+ (&TyKind::Slice(l_vec), &TyKind::Slice(r_vec)) => self.eq_ty(l_vec, r_vec),
+ (&TyKind::Array(lt, ll), &TyKind::Array(rt, rl)) => self.eq_ty(lt, rt) && self.eq_array_length(ll, rl),
+ (&TyKind::Ptr(ref l_mut), &TyKind::Ptr(ref r_mut)) => {
+ l_mut.mutbl == r_mut.mutbl && self.eq_ty(l_mut.ty, r_mut.ty)
+ },
+ (&TyKind::Rptr(_, ref l_rmut), &TyKind::Rptr(_, ref r_rmut)) => {
+ l_rmut.mutbl == r_rmut.mutbl && self.eq_ty(l_rmut.ty, r_rmut.ty)
+ },
+ (&TyKind::Path(ref l), &TyKind::Path(ref r)) => self.eq_qpath(l, r),
+ (&TyKind::Tup(l), &TyKind::Tup(r)) => over(l, r, |l, r| self.eq_ty(l, r)),
+ (&TyKind::Infer, &TyKind::Infer) => true,
+ _ => false,
+ }
+ }
+
+ fn eq_type_binding(&mut self, left: &TypeBinding<'_>, right: &TypeBinding<'_>) -> bool {
+ left.ident.name == right.ident.name && self.eq_ty(left.ty(), right.ty())
+ }
+}
+
+/// Some simple reductions like `{ return }` => `return`
+fn reduce_exprkind<'hir>(cx: &LateContext<'_>, kind: &'hir ExprKind<'hir>) -> &'hir ExprKind<'hir> {
+ if let ExprKind::Block(block, _) = kind {
+ match (block.stmts, block.expr) {
+ // From an `if let` expression without an `else` block. The arm for the implicit wild pattern is an empty
+ // block with an empty span.
+ ([], None) if block.span.is_empty() => &ExprKind::Tup(&[]),
+ // `{}` => `()`
+ ([], None) => match snippet_opt(cx, block.span) {
+ // Don't reduce if there are any tokens contained in the braces
+ Some(snip)
+ if tokenize(&snip)
+ .map(|t| t.kind)
+ .filter(|t| {
+ !matches!(
+ t,
+ TokenKind::LineComment { .. } | TokenKind::BlockComment { .. } | TokenKind::Whitespace
+ )
+ })
+ .ne([TokenKind::OpenBrace, TokenKind::CloseBrace].iter().copied()) =>
+ {
+ kind
+ },
+ _ => &ExprKind::Tup(&[]),
+ },
+ ([], Some(expr)) => match expr.kind {
+ // `{ return .. }` => `return ..`
+ ExprKind::Ret(..) => &expr.kind,
+ _ => kind,
+ },
+ ([stmt], None) => match stmt.kind {
+ StmtKind::Expr(expr) | StmtKind::Semi(expr) => match expr.kind {
+ // `{ return ..; }` => `return ..`
+ ExprKind::Ret(..) => &expr.kind,
+ _ => kind,
+ },
+ _ => kind,
+ },
+ _ => kind,
+ }
+ } else {
+ kind
+ }
+}
+
+fn swap_binop<'a>(
+ binop: BinOpKind,
+ lhs: &'a Expr<'a>,
+ rhs: &'a Expr<'a>,
+) -> Option<(BinOpKind, &'a Expr<'a>, &'a Expr<'a>)> {
+ match binop {
+ BinOpKind::Add | BinOpKind::Eq | BinOpKind::Ne | BinOpKind::BitAnd | BinOpKind::BitXor | BinOpKind::BitOr => {
+ Some((binop, rhs, lhs))
+ },
+ BinOpKind::Lt => Some((BinOpKind::Gt, rhs, lhs)),
+ BinOpKind::Le => Some((BinOpKind::Ge, rhs, lhs)),
+ BinOpKind::Ge => Some((BinOpKind::Le, rhs, lhs)),
+ BinOpKind::Gt => Some((BinOpKind::Lt, rhs, lhs)),
+ BinOpKind::Mul // Not always commutative, e.g. with matrices. See issue #5698
+ | BinOpKind::Shl
+ | BinOpKind::Shr
+ | BinOpKind::Rem
+ | BinOpKind::Sub
+ | BinOpKind::Div
+ | BinOpKind::And
+ | BinOpKind::Or => None,
+ }
+}
+
+/// Checks if the two `Option`s are both `None` or some equal values as per
+/// `eq_fn`.
+pub fn both<X>(l: &Option<X>, r: &Option<X>, mut eq_fn: impl FnMut(&X, &X) -> bool) -> bool {
+ l.as_ref()
+ .map_or_else(|| r.is_none(), |x| r.as_ref().map_or(false, |y| eq_fn(x, y)))
+}
+
+/// Checks if two slices are equal as per `eq_fn`.
+pub fn over<X>(left: &[X], right: &[X], mut eq_fn: impl FnMut(&X, &X) -> bool) -> bool {
+ left.len() == right.len() && left.iter().zip(right).all(|(x, y)| eq_fn(x, y))
+}
+
+/// Counts how many elements of the slices are equal as per `eq_fn`.
+pub fn count_eq<X: Sized>(
+ left: &mut dyn Iterator<Item = X>,
+ right: &mut dyn Iterator<Item = X>,
+ mut eq_fn: impl FnMut(&X, &X) -> bool,
+) -> usize {
+ left.zip(right).take_while(|(l, r)| eq_fn(l, r)).count()
+}
+
+/// Checks if two expressions evaluate to the same value, and don't contain any side effects.
+pub fn eq_expr_value(cx: &LateContext<'_>, left: &Expr<'_>, right: &Expr<'_>) -> bool {
+ SpanlessEq::new(cx).deny_side_effects().eq_expr(left, right)
+}
+
+/// Type used to hash an ast element. This is different from the `Hash` trait
+/// on ast types as this
+/// trait would consider IDs and spans.
+///
+/// All expressions kind are hashed, but some might have a weaker hash.
+pub struct SpanlessHash<'a, 'tcx> {
+ /// Context used to evaluate constant expressions.
+ cx: &'a LateContext<'tcx>,
+ maybe_typeck_results: Option<&'tcx TypeckResults<'tcx>>,
+ s: FxHasher,
+}
+
+impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
+ pub fn new(cx: &'a LateContext<'tcx>) -> Self {
+ Self {
+ cx,
+ maybe_typeck_results: cx.maybe_typeck_results(),
+ s: FxHasher::default(),
+ }
+ }
+
+ pub fn finish(self) -> u64 {
+ self.s.finish()
+ }
+
+ pub fn hash_block(&mut self, b: &Block<'_>) {
+ for s in b.stmts {
+ self.hash_stmt(s);
+ }
+
+ if let Some(e) = b.expr {
+ self.hash_expr(e);
+ }
+
+ std::mem::discriminant(&b.rules).hash(&mut self.s);
+ }
+
+ #[expect(clippy::too_many_lines)]
+ pub fn hash_expr(&mut self, e: &Expr<'_>) {
+ let simple_const = self
+ .maybe_typeck_results
+ .and_then(|typeck_results| constant_simple(self.cx, typeck_results, e));
+
+ // const hashing may result in the same hash as some unrelated node, so add a sort of
+ // discriminant depending on which path we're choosing next
+ simple_const.hash(&mut self.s);
+ if simple_const.is_some() {
+ return;
+ }
+
+ std::mem::discriminant(&e.kind).hash(&mut self.s);
+
+ match e.kind {
+ ExprKind::AddrOf(kind, m, e) => {
+ std::mem::discriminant(&kind).hash(&mut self.s);
+ m.hash(&mut self.s);
+ self.hash_expr(e);
+ },
+ ExprKind::Continue(i) => {
+ if let Some(i) = i.label {
+ self.hash_name(i.ident.name);
+ }
+ },
+ ExprKind::Assign(l, r, _) => {
+ self.hash_expr(l);
+ self.hash_expr(r);
+ },
+ ExprKind::AssignOp(ref o, l, r) => {
+ std::mem::discriminant(&o.node).hash(&mut self.s);
+ self.hash_expr(l);
+ self.hash_expr(r);
+ },
+ ExprKind::Block(b, _) => {
+ self.hash_block(b);
+ },
+ ExprKind::Binary(op, l, r) => {
+ std::mem::discriminant(&op.node).hash(&mut self.s);
+ self.hash_expr(l);
+ self.hash_expr(r);
+ },
+ ExprKind::Break(i, ref j) => {
+ if let Some(i) = i.label {
+ self.hash_name(i.ident.name);
+ }
+ if let Some(j) = *j {
+ self.hash_expr(j);
+ }
+ },
+ ExprKind::Box(e) | ExprKind::DropTemps(e) | ExprKind::Yield(e, _) => {
+ self.hash_expr(e);
+ },
+ ExprKind::Call(fun, args) => {
+ self.hash_expr(fun);
+ self.hash_exprs(args);
+ },
+ ExprKind::Cast(e, ty) | ExprKind::Type(e, ty) => {
+ self.hash_expr(e);
+ self.hash_ty(ty);
+ },
+ ExprKind::Closure(&Closure {
+ capture_clause, body, ..
+ }) => {
+ std::mem::discriminant(&capture_clause).hash(&mut self.s);
+ // closures inherit TypeckResults
+ self.hash_expr(&self.cx.tcx.hir().body(body).value);
+ },
+ ExprKind::Field(e, ref f) => {
+ self.hash_expr(e);
+ self.hash_name(f.name);
+ },
+ ExprKind::Index(a, i) => {
+ self.hash_expr(a);
+ self.hash_expr(i);
+ },
+ ExprKind::InlineAsm(asm) => {
+ for piece in asm.template {
+ match piece {
+ InlineAsmTemplatePiece::String(s) => s.hash(&mut self.s),
+ InlineAsmTemplatePiece::Placeholder {
+ operand_idx,
+ modifier,
+ span: _,
+ } => {
+ operand_idx.hash(&mut self.s);
+ modifier.hash(&mut self.s);
+ },
+ }
+ }
+ asm.options.hash(&mut self.s);
+ for (op, _op_sp) in asm.operands {
+ match op {
+ InlineAsmOperand::In { reg, expr } => {
+ reg.hash(&mut self.s);
+ self.hash_expr(expr);
+ },
+ InlineAsmOperand::Out { reg, late, expr } => {
+ reg.hash(&mut self.s);
+ late.hash(&mut self.s);
+ if let Some(expr) = expr {
+ self.hash_expr(expr);
+ }
+ },
+ InlineAsmOperand::InOut { reg, late, expr } => {
+ reg.hash(&mut self.s);
+ late.hash(&mut self.s);
+ self.hash_expr(expr);
+ },
+ InlineAsmOperand::SplitInOut {
+ reg,
+ late,
+ in_expr,
+ out_expr,
+ } => {
+ reg.hash(&mut self.s);
+ late.hash(&mut self.s);
+ self.hash_expr(in_expr);
+ if let Some(out_expr) = out_expr {
+ self.hash_expr(out_expr);
+ }
+ },
+ InlineAsmOperand::Const { anon_const } | InlineAsmOperand::SymFn { anon_const } => {
+ self.hash_body(anon_const.body);
+ },
+ InlineAsmOperand::SymStatic { path, def_id: _ } => self.hash_qpath(path),
+ }
+ }
+ },
+ ExprKind::Let(Let { pat, init, ty, .. }) => {
+ self.hash_expr(init);
+ if let Some(ty) = ty {
+ self.hash_ty(ty);
+ }
+ self.hash_pat(pat);
+ },
+ ExprKind::Err => {},
+ ExprKind::Lit(ref l) => {
+ l.node.hash(&mut self.s);
+ },
+ ExprKind::Loop(b, ref i, ..) => {
+ self.hash_block(b);
+ if let Some(i) = *i {
+ self.hash_name(i.ident.name);
+ }
+ },
+ ExprKind::If(cond, then, ref else_opt) => {
+ self.hash_expr(cond);
+ self.hash_expr(then);
+ if let Some(e) = *else_opt {
+ self.hash_expr(e);
+ }
+ },
+ ExprKind::Match(e, arms, ref s) => {
+ self.hash_expr(e);
+
+ for arm in arms {
+ self.hash_pat(arm.pat);
+ if let Some(ref e) = arm.guard {
+ self.hash_guard(e);
+ }
+ self.hash_expr(arm.body);
+ }
+
+ s.hash(&mut self.s);
+ },
+ ExprKind::MethodCall(path, args, ref _fn_span) => {
+ self.hash_name(path.ident.name);
+ self.hash_exprs(args);
+ },
+ ExprKind::ConstBlock(ref l_id) => {
+ self.hash_body(l_id.body);
+ },
+ ExprKind::Repeat(e, len) => {
+ self.hash_expr(e);
+ self.hash_array_length(len);
+ },
+ ExprKind::Ret(ref e) => {
+ if let Some(e) = *e {
+ self.hash_expr(e);
+ }
+ },
+ ExprKind::Path(ref qpath) => {
+ self.hash_qpath(qpath);
+ },
+ ExprKind::Struct(path, fields, ref expr) => {
+ self.hash_qpath(path);
+
+ for f in fields {
+ self.hash_name(f.ident.name);
+ self.hash_expr(f.expr);
+ }
+
+ if let Some(e) = *expr {
+ self.hash_expr(e);
+ }
+ },
+ ExprKind::Tup(tup) => {
+ self.hash_exprs(tup);
+ },
+ ExprKind::Array(v) => {
+ self.hash_exprs(v);
+ },
+ ExprKind::Unary(lop, le) => {
+ std::mem::discriminant(&lop).hash(&mut self.s);
+ self.hash_expr(le);
+ },
+ }
+ }
+
+ pub fn hash_exprs(&mut self, e: &[Expr<'_>]) {
+ for e in e {
+ self.hash_expr(e);
+ }
+ }
+
+ pub fn hash_name(&mut self, n: Symbol) {
+ n.hash(&mut self.s);
+ }
+
+ pub fn hash_qpath(&mut self, p: &QPath<'_>) {
+ match *p {
+ QPath::Resolved(_, path) => {
+ self.hash_path(path);
+ },
+ QPath::TypeRelative(_, path) => {
+ self.hash_name(path.ident.name);
+ },
+ QPath::LangItem(lang_item, ..) => {
+ std::mem::discriminant(&lang_item).hash(&mut self.s);
+ },
+ }
+ // self.maybe_typeck_results.unwrap().qpath_res(p, id).hash(&mut self.s);
+ }
+
+ pub fn hash_pat(&mut self, pat: &Pat<'_>) {
+ std::mem::discriminant(&pat.kind).hash(&mut self.s);
+ match pat.kind {
+ PatKind::Binding(ann, _, _, pat) => {
+ std::mem::discriminant(&ann).hash(&mut self.s);
+ if let Some(pat) = pat {
+ self.hash_pat(pat);
+ }
+ },
+ PatKind::Box(pat) => self.hash_pat(pat),
+ PatKind::Lit(expr) => self.hash_expr(expr),
+ PatKind::Or(pats) => {
+ for pat in pats {
+ self.hash_pat(pat);
+ }
+ },
+ PatKind::Path(ref qpath) => self.hash_qpath(qpath),
+ PatKind::Range(s, e, i) => {
+ if let Some(s) = s {
+ self.hash_expr(s);
+ }
+ if let Some(e) = e {
+ self.hash_expr(e);
+ }
+ std::mem::discriminant(&i).hash(&mut self.s);
+ },
+ PatKind::Ref(pat, mu) => {
+ self.hash_pat(pat);
+ std::mem::discriminant(&mu).hash(&mut self.s);
+ },
+ PatKind::Slice(l, m, r) => {
+ for pat in l {
+ self.hash_pat(pat);
+ }
+ if let Some(pat) = m {
+ self.hash_pat(pat);
+ }
+ for pat in r {
+ self.hash_pat(pat);
+ }
+ },
+ PatKind::Struct(ref qpath, fields, e) => {
+ self.hash_qpath(qpath);
+ for f in fields {
+ self.hash_name(f.ident.name);
+ self.hash_pat(f.pat);
+ }
+ e.hash(&mut self.s);
+ },
+ PatKind::Tuple(pats, e) => {
+ for pat in pats {
+ self.hash_pat(pat);
+ }
+ e.hash(&mut self.s);
+ },
+ PatKind::TupleStruct(ref qpath, pats, e) => {
+ self.hash_qpath(qpath);
+ for pat in pats {
+ self.hash_pat(pat);
+ }
+ e.hash(&mut self.s);
+ },
+ PatKind::Wild => {},
+ }
+ }
+
+ pub fn hash_path(&mut self, path: &Path<'_>) {
+ match path.res {
+ // constant hash since equality is dependant on inter-expression context
+ // e.g. The expressions `if let Some(x) = foo() {}` and `if let Some(y) = foo() {}` are considered equal
+ // even though the binding names are different and they have different `HirId`s.
+ Res::Local(_) => 1_usize.hash(&mut self.s),
+ _ => {
+ for seg in path.segments {
+ self.hash_name(seg.ident.name);
+ self.hash_generic_args(seg.args().args);
+ }
+ },
+ }
+ }
+
+ pub fn hash_stmt(&mut self, b: &Stmt<'_>) {
+ std::mem::discriminant(&b.kind).hash(&mut self.s);
+
+ match &b.kind {
+ StmtKind::Local(local) => {
+ self.hash_pat(local.pat);
+ if let Some(init) = local.init {
+ self.hash_expr(init);
+ }
+ if let Some(els) = local.els {
+ self.hash_block(els);
+ }
+ },
+ StmtKind::Item(..) => {},
+ StmtKind::Expr(expr) | StmtKind::Semi(expr) => {
+ self.hash_expr(expr);
+ },
+ }
+ }
+
+ pub fn hash_guard(&mut self, g: &Guard<'_>) {
+ match g {
+ Guard::If(expr) | Guard::IfLet(Let { init: expr, .. }) => {
+ self.hash_expr(expr);
+ },
+ }
+ }
+
+ pub fn hash_lifetime(&mut self, lifetime: Lifetime) {
+ std::mem::discriminant(&lifetime.name).hash(&mut self.s);
+ if let LifetimeName::Param(param_id, ref name) = lifetime.name {
+ std::mem::discriminant(name).hash(&mut self.s);
+ param_id.hash(&mut self.s);
+ match name {
+ ParamName::Plain(ref ident) => {
+ ident.name.hash(&mut self.s);
+ },
+ ParamName::Fresh | ParamName::Error => {},
+ }
+ }
+ }
+
+ pub fn hash_ty(&mut self, ty: &Ty<'_>) {
+ std::mem::discriminant(&ty.kind).hash(&mut self.s);
+ self.hash_tykind(&ty.kind);
+ }
+
+ pub fn hash_tykind(&mut self, ty: &TyKind<'_>) {
+ match ty {
+ TyKind::Slice(ty) => {
+ self.hash_ty(ty);
+ },
+ &TyKind::Array(ty, len) => {
+ self.hash_ty(ty);
+ self.hash_array_length(len);
+ },
+ TyKind::Ptr(ref mut_ty) => {
+ self.hash_ty(mut_ty.ty);
+ mut_ty.mutbl.hash(&mut self.s);
+ },
+ TyKind::Rptr(lifetime, ref mut_ty) => {
+ self.hash_lifetime(*lifetime);
+ self.hash_ty(mut_ty.ty);
+ mut_ty.mutbl.hash(&mut self.s);
+ },
+ TyKind::BareFn(bfn) => {
+ bfn.unsafety.hash(&mut self.s);
+ bfn.abi.hash(&mut self.s);
+ for arg in bfn.decl.inputs {
+ self.hash_ty(arg);
+ }
+ std::mem::discriminant(&bfn.decl.output).hash(&mut self.s);
+ match bfn.decl.output {
+ FnRetTy::DefaultReturn(_) => {},
+ FnRetTy::Return(ty) => {
+ self.hash_ty(ty);
+ },
+ }
+ bfn.decl.c_variadic.hash(&mut self.s);
+ },
+ TyKind::Tup(ty_list) => {
+ for ty in *ty_list {
+ self.hash_ty(ty);
+ }
+ },
+ TyKind::Path(ref qpath) => self.hash_qpath(qpath),
+ TyKind::OpaqueDef(_, arg_list) => {
+ self.hash_generic_args(arg_list);
+ },
+ TyKind::TraitObject(_, lifetime, _) => {
+ self.hash_lifetime(*lifetime);
+ },
+ TyKind::Typeof(anon_const) => {
+ self.hash_body(anon_const.body);
+ },
+ TyKind::Err | TyKind::Infer | TyKind::Never => {},
+ }
+ }
+
+ pub fn hash_array_length(&mut self, length: ArrayLen) {
+ match length {
+ ArrayLen::Infer(..) => {},
+ ArrayLen::Body(anon_const) => self.hash_body(anon_const.body),
+ }
+ }
+
+ pub fn hash_body(&mut self, body_id: BodyId) {
+ // swap out TypeckResults when hashing a body
+ let old_maybe_typeck_results = self.maybe_typeck_results.replace(self.cx.tcx.typeck_body(body_id));
+ self.hash_expr(&self.cx.tcx.hir().body(body_id).value);
+ self.maybe_typeck_results = old_maybe_typeck_results;
+ }
+
+ fn hash_generic_args(&mut self, arg_list: &[GenericArg<'_>]) {
+ for arg in arg_list {
+ match *arg {
+ GenericArg::Lifetime(l) => self.hash_lifetime(l),
+ GenericArg::Type(ref ty) => self.hash_ty(ty),
+ GenericArg::Const(ref ca) => self.hash_body(ca.value.body),
+ GenericArg::Infer(ref inf) => self.hash_ty(&inf.to_ty()),
+ }
+ }
+ }
+}
+
+pub fn hash_stmt(cx: &LateContext<'_>, s: &Stmt<'_>) -> u64 {
+ let mut h = SpanlessHash::new(cx);
+ h.hash_stmt(s);
+ h.finish()
+}
+
+pub fn hash_expr(cx: &LateContext<'_>, e: &Expr<'_>) -> u64 {
+ let mut h = SpanlessHash::new(cx);
+ h.hash_expr(e);
+ h.finish()
+}
diff --git a/src/tools/clippy/clippy_utils/src/lib.rs b/src/tools/clippy/clippy_utils/src/lib.rs
new file mode 100644
index 000000000..8322df862
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/lib.rs
@@ -0,0 +1,2304 @@
+#![feature(array_chunks)]
+#![feature(box_patterns)]
+#![feature(control_flow_enum)]
+#![feature(let_else)]
+#![feature(let_chains)]
+#![feature(lint_reasons)]
+#![feature(once_cell)]
+#![feature(rustc_private)]
+#![recursion_limit = "512"]
+#![cfg_attr(feature = "deny-warnings", deny(warnings))]
+#![allow(clippy::missing_errors_doc, clippy::missing_panics_doc, clippy::must_use_candidate)]
+// warn on the same lints as `clippy_lints`
+#![warn(trivial_casts, trivial_numeric_casts)]
+// warn on lints, that are included in `rust-lang/rust`s bootstrap
+#![warn(rust_2018_idioms, unused_lifetimes)]
+// warn on rustc internal lints
+#![warn(rustc::internal)]
+
+// FIXME: switch to something more ergonomic here, once available.
+// (Currently there is no way to opt into sysroot crates without `extern crate`.)
+extern crate rustc_ast;
+extern crate rustc_ast_pretty;
+extern crate rustc_attr;
+extern crate rustc_data_structures;
+extern crate rustc_errors;
+extern crate rustc_hir;
+extern crate rustc_infer;
+extern crate rustc_lexer;
+extern crate rustc_lint;
+extern crate rustc_middle;
+extern crate rustc_session;
+extern crate rustc_span;
+extern crate rustc_target;
+extern crate rustc_trait_selection;
+extern crate rustc_typeck;
+
+#[macro_use]
+pub mod sym_helper;
+
+pub mod ast_utils;
+pub mod attrs;
+pub mod comparisons;
+pub mod consts;
+pub mod diagnostics;
+pub mod eager_or_lazy;
+pub mod higher;
+mod hir_utils;
+pub mod macros;
+pub mod msrvs;
+pub mod numeric_literal;
+pub mod paths;
+pub mod ptr;
+pub mod qualify_min_const_fn;
+pub mod source;
+pub mod str_utils;
+pub mod sugg;
+pub mod ty;
+pub mod usage;
+pub mod visitors;
+
+pub use self::attrs::*;
+pub use self::hir_utils::{
+ both, count_eq, eq_expr_value, hash_expr, hash_stmt, over, HirEqInterExpr, SpanlessEq, SpanlessHash,
+};
+
+use std::collections::hash_map::Entry;
+use std::hash::BuildHasherDefault;
+use std::sync::OnceLock;
+use std::sync::{Mutex, MutexGuard};
+
+use if_chain::if_chain;
+use rustc_ast::ast::{self, LitKind};
+use rustc_ast::Attribute;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::unhash::UnhashMap;
+use rustc_hir as hir;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, CRATE_DEF_ID};
+use rustc_hir::hir_id::{HirIdMap, HirIdSet};
+use rustc_hir::intravisit::{walk_expr, FnKind, Visitor};
+use rustc_hir::LangItem::{OptionNone, ResultErr, ResultOk};
+use rustc_hir::{
+ def, Arm, ArrayLen, BindingAnnotation, Block, BlockCheckMode, Body, Closure, Constness, Destination, Expr,
+ ExprKind, FnDecl, HirId, Impl, ImplItem, ImplItemKind, IsAsync, Item, ItemKind, LangItem, Local, MatchSource,
+ Mutability, Node, Param, Pat, PatKind, Path, PathSegment, PrimTy, QPath, Stmt, StmtKind, TraitItem, TraitItemKind,
+ TraitRef, TyKind, UnOp,
+};
+use rustc_lint::{LateContext, Level, Lint, LintContext};
+use rustc_middle::hir::place::PlaceBase;
+use rustc_middle::ty as rustc_ty;
+use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow};
+use rustc_middle::ty::binding::BindingMode;
+use rustc_middle::ty::fast_reject::SimplifiedTypeGen::{
+ ArraySimplifiedType, BoolSimplifiedType, CharSimplifiedType, FloatSimplifiedType, IntSimplifiedType,
+ PtrSimplifiedType, SliceSimplifiedType, StrSimplifiedType, UintSimplifiedType,
+};
+use rustc_middle::ty::{
+ layout::IntegerExt, BorrowKind, ClosureKind, DefIdTree, Ty, TyCtxt, TypeAndMut, TypeVisitable, UpvarCapture,
+};
+use rustc_middle::ty::{FloatTy, IntTy, UintTy};
+use rustc_semver::RustcVersion;
+use rustc_session::Session;
+use rustc_span::hygiene::{ExpnKind, MacroKind};
+use rustc_span::source_map::original_sp;
+use rustc_span::sym;
+use rustc_span::symbol::{kw, Symbol};
+use rustc_span::{Span, DUMMY_SP};
+use rustc_target::abi::Integer;
+
+use crate::consts::{constant, Constant};
+use crate::ty::{can_partially_move_ty, expr_sig, is_copy, is_recursively_primitive_type, ty_is_fn_once_param};
+use crate::visitors::expr_visitor_no_bodies;
+
+pub fn parse_msrv(msrv: &str, sess: Option<&Session>, span: Option<Span>) -> Option<RustcVersion> {
+ if let Ok(version) = RustcVersion::parse(msrv) {
+ return Some(version);
+ } else if let Some(sess) = sess {
+ if let Some(span) = span {
+ sess.span_err(span, &format!("`{}` is not a valid Rust version", msrv));
+ }
+ }
+ None
+}
+
+pub fn meets_msrv(msrv: Option<RustcVersion>, lint_msrv: RustcVersion) -> bool {
+ msrv.map_or(true, |msrv| msrv.meets(lint_msrv))
+}
+
+#[macro_export]
+macro_rules! extract_msrv_attr {
+ ($context:ident) => {
+ fn enter_lint_attrs(&mut self, cx: &rustc_lint::$context<'_>, attrs: &[rustc_ast::ast::Attribute]) {
+ let sess = rustc_lint::LintContext::sess(cx);
+ match $crate::get_unique_inner_attr(sess, attrs, "msrv") {
+ Some(msrv_attr) => {
+ if let Some(msrv) = msrv_attr.value_str() {
+ self.msrv = $crate::parse_msrv(&msrv.to_string(), Some(sess), Some(msrv_attr.span));
+ } else {
+ sess.span_err(msrv_attr.span, "bad clippy attribute");
+ }
+ },
+ _ => (),
+ }
+ }
+ };
+}
+
+/// If the given expression is a local binding, find the initializer expression.
+/// If that initializer expression is another local binding, find its initializer again.
+/// This process repeats as long as possible (but usually no more than once). Initializer
+/// expressions with adjustments are ignored. If this is not desired, use [`find_binding_init`]
+/// instead.
+///
+/// Examples:
+/// ```
+/// let abc = 1;
+/// // ^ output
+/// let def = abc;
+/// dbg!(def);
+/// // ^^^ input
+///
+/// // or...
+/// let abc = 1;
+/// let def = abc + 2;
+/// // ^^^^^^^ output
+/// dbg!(def);
+/// // ^^^ input
+/// ```
+pub fn expr_or_init<'a, 'b, 'tcx: 'b>(cx: &LateContext<'tcx>, mut expr: &'a Expr<'b>) -> &'a Expr<'b> {
+ while let Some(init) = path_to_local(expr)
+ .and_then(|id| find_binding_init(cx, id))
+ .filter(|init| cx.typeck_results().expr_adjustments(init).is_empty())
+ {
+ expr = init;
+ }
+ expr
+}
+
+/// Finds the initializer expression for a local binding. Returns `None` if the binding is mutable.
+/// By only considering immutable bindings, we guarantee that the returned expression represents the
+/// value of the binding wherever it is referenced.
+///
+/// Example: For `let x = 1`, if the `HirId` of `x` is provided, the `Expr` `1` is returned.
+/// Note: If you have an expression that references a binding `x`, use `path_to_local` to get the
+/// canonical binding `HirId`.
+pub fn find_binding_init<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId) -> Option<&'tcx Expr<'tcx>> {
+ let hir = cx.tcx.hir();
+ if_chain! {
+ if let Some(Node::Pat(pat)) = hir.find(hir_id);
+ if matches!(pat.kind, PatKind::Binding(BindingAnnotation::Unannotated, ..));
+ let parent = hir.get_parent_node(hir_id);
+ if let Some(Node::Local(local)) = hir.find(parent);
+ then {
+ return local.init;
+ }
+ }
+ None
+}
+
+/// Returns `true` if the given `NodeId` is inside a constant context
+///
+/// # Example
+///
+/// ```rust,ignore
+/// if in_constant(cx, expr.hir_id) {
+/// // Do something
+/// }
+/// ```
+pub fn in_constant(cx: &LateContext<'_>, id: HirId) -> bool {
+ let parent_id = cx.tcx.hir().get_parent_item(id);
+ match cx.tcx.hir().get_by_def_id(parent_id) {
+ Node::Item(&Item {
+ kind: ItemKind::Const(..) | ItemKind::Static(..),
+ ..
+ })
+ | Node::TraitItem(&TraitItem {
+ kind: TraitItemKind::Const(..),
+ ..
+ })
+ | Node::ImplItem(&ImplItem {
+ kind: ImplItemKind::Const(..),
+ ..
+ })
+ | Node::AnonConst(_) => true,
+ Node::Item(&Item {
+ kind: ItemKind::Fn(ref sig, ..),
+ ..
+ })
+ | Node::ImplItem(&ImplItem {
+ kind: ImplItemKind::Fn(ref sig, _),
+ ..
+ }) => sig.header.constness == Constness::Const,
+ _ => false,
+ }
+}
+
+/// Checks if a `QPath` resolves to a constructor of a `LangItem`.
+/// For example, use this to check whether a function call or a pattern is `Some(..)`.
+pub fn is_lang_ctor(cx: &LateContext<'_>, qpath: &QPath<'_>, lang_item: LangItem) -> bool {
+ if let QPath::Resolved(_, path) = qpath {
+ if let Res::Def(DefKind::Ctor(..), ctor_id) = path.res {
+ if let Ok(item_id) = cx.tcx.lang_items().require(lang_item) {
+ return cx.tcx.parent(ctor_id) == item_id;
+ }
+ }
+ }
+ false
+}
+
+pub fn is_unit_expr(expr: &Expr<'_>) -> bool {
+ matches!(
+ expr.kind,
+ ExprKind::Block(
+ Block {
+ stmts: [],
+ expr: None,
+ ..
+ },
+ _
+ ) | ExprKind::Tup([])
+ )
+}
+
+/// Checks if given pattern is a wildcard (`_`)
+pub fn is_wild(pat: &Pat<'_>) -> bool {
+ matches!(pat.kind, PatKind::Wild)
+}
+
+/// Checks if the method call given in `expr` belongs to the given trait.
+/// This is a deprecated function, consider using [`is_trait_method`].
+pub fn match_trait_method(cx: &LateContext<'_>, expr: &Expr<'_>, path: &[&str]) -> bool {
+ let def_id = cx.typeck_results().type_dependent_def_id(expr.hir_id).unwrap();
+ let trt_id = cx.tcx.trait_of_item(def_id);
+ trt_id.map_or(false, |trt_id| match_def_path(cx, trt_id, path))
+}
+
+/// Checks if a method is defined in an impl of a diagnostic item
+pub fn is_diag_item_method(cx: &LateContext<'_>, def_id: DefId, diag_item: Symbol) -> bool {
+ if let Some(impl_did) = cx.tcx.impl_of_method(def_id) {
+ if let Some(adt) = cx.tcx.type_of(impl_did).ty_adt_def() {
+ return cx.tcx.is_diagnostic_item(diag_item, adt.did());
+ }
+ }
+ false
+}
+
+/// Checks if a method is in a diagnostic item trait
+pub fn is_diag_trait_item(cx: &LateContext<'_>, def_id: DefId, diag_item: Symbol) -> bool {
+ if let Some(trait_did) = cx.tcx.trait_of_item(def_id) {
+ return cx.tcx.is_diagnostic_item(diag_item, trait_did);
+ }
+ false
+}
+
+/// Checks if the method call given in `expr` belongs to the given trait.
+pub fn is_trait_method(cx: &LateContext<'_>, expr: &Expr<'_>, diag_item: Symbol) -> bool {
+ cx.typeck_results()
+ .type_dependent_def_id(expr.hir_id)
+ .map_or(false, |did| is_diag_trait_item(cx, did, diag_item))
+}
+
+/// Checks if the given expression is a path referring an item on the trait
+/// that is marked with the given diagnostic item.
+///
+/// For checking method call expressions instead of path expressions, use
+/// [`is_trait_method`].
+///
+/// For example, this can be used to find if an expression like `u64::default`
+/// refers to an item of the trait `Default`, which is associated with the
+/// `diag_item` of `sym::Default`.
+pub fn is_trait_item(cx: &LateContext<'_>, expr: &Expr<'_>, diag_item: Symbol) -> bool {
+ if let hir::ExprKind::Path(ref qpath) = expr.kind {
+ cx.qpath_res(qpath, expr.hir_id)
+ .opt_def_id()
+ .map_or(false, |def_id| is_diag_trait_item(cx, def_id, diag_item))
+ } else {
+ false
+ }
+}
+
+pub fn last_path_segment<'tcx>(path: &QPath<'tcx>) -> &'tcx PathSegment<'tcx> {
+ match *path {
+ QPath::Resolved(_, path) => path.segments.last().expect("A path must have at least one segment"),
+ QPath::TypeRelative(_, seg) => seg,
+ QPath::LangItem(..) => panic!("last_path_segment: lang item has no path segments"),
+ }
+}
+
+pub fn qpath_generic_tys<'tcx>(qpath: &QPath<'tcx>) -> impl Iterator<Item = &'tcx hir::Ty<'tcx>> {
+ last_path_segment(qpath)
+ .args
+ .map_or(&[][..], |a| a.args)
+ .iter()
+ .filter_map(|a| match a {
+ hir::GenericArg::Type(ty) => Some(ty),
+ _ => None,
+ })
+}
+
+/// THIS METHOD IS DEPRECATED and will eventually be removed since it does not match against the
+/// entire path or resolved `DefId`. Prefer using `match_def_path`. Consider getting a `DefId` from
+/// `QPath::Resolved.1.res.opt_def_id()`.
+///
+/// Matches a `QPath` against a slice of segment string literals.
+///
+/// There is also `match_path` if you are dealing with a `rustc_hir::Path` instead of a
+/// `rustc_hir::QPath`.
+///
+/// # Examples
+/// ```rust,ignore
+/// match_qpath(path, &["std", "rt", "begin_unwind"])
+/// ```
+pub fn match_qpath(path: &QPath<'_>, segments: &[&str]) -> bool {
+ match *path {
+ QPath::Resolved(_, path) => match_path(path, segments),
+ QPath::TypeRelative(ty, segment) => match ty.kind {
+ TyKind::Path(ref inner_path) => {
+ if let [prefix @ .., end] = segments {
+ if match_qpath(inner_path, prefix) {
+ return segment.ident.name.as_str() == *end;
+ }
+ }
+ false
+ },
+ _ => false,
+ },
+ QPath::LangItem(..) => false,
+ }
+}
+
+/// If the expression is a path, resolves it to a `DefId` and checks if it matches the given path.
+///
+/// Please use `is_expr_diagnostic_item` if the target is a diagnostic item.
+pub fn is_expr_path_def_path(cx: &LateContext<'_>, expr: &Expr<'_>, segments: &[&str]) -> bool {
+ path_def_id(cx, expr).map_or(false, |id| match_def_path(cx, id, segments))
+}
+
+/// If the expression is a path, resolves it to a `DefId` and checks if it matches the given
+/// diagnostic item.
+pub fn is_expr_diagnostic_item(cx: &LateContext<'_>, expr: &Expr<'_>, diag_item: Symbol) -> bool {
+ path_def_id(cx, expr).map_or(false, |id| cx.tcx.is_diagnostic_item(diag_item, id))
+}
+
+/// THIS METHOD IS DEPRECATED and will eventually be removed since it does not match against the
+/// entire path or resolved `DefId`. Prefer using `match_def_path`. Consider getting a `DefId` from
+/// `QPath::Resolved.1.res.opt_def_id()`.
+///
+/// Matches a `Path` against a slice of segment string literals.
+///
+/// There is also `match_qpath` if you are dealing with a `rustc_hir::QPath` instead of a
+/// `rustc_hir::Path`.
+///
+/// # Examples
+///
+/// ```rust,ignore
+/// if match_path(&trait_ref.path, &paths::HASH) {
+/// // This is the `std::hash::Hash` trait.
+/// }
+///
+/// if match_path(ty_path, &["rustc", "lint", "Lint"]) {
+/// // This is a `rustc_middle::lint::Lint`.
+/// }
+/// ```
+pub fn match_path(path: &Path<'_>, segments: &[&str]) -> bool {
+ path.segments
+ .iter()
+ .rev()
+ .zip(segments.iter().rev())
+ .all(|(a, b)| a.ident.name.as_str() == *b)
+}
+
+/// If the expression is a path to a local, returns the canonical `HirId` of the local.
+pub fn path_to_local(expr: &Expr<'_>) -> Option<HirId> {
+ if let ExprKind::Path(QPath::Resolved(None, path)) = expr.kind {
+ if let Res::Local(id) = path.res {
+ return Some(id);
+ }
+ }
+ None
+}
+
+/// Returns true if the expression is a path to a local with the specified `HirId`.
+/// Use this function to see if an expression matches a function argument or a match binding.
+pub fn path_to_local_id(expr: &Expr<'_>, id: HirId) -> bool {
+ path_to_local(expr) == Some(id)
+}
+
+pub trait MaybePath<'hir> {
+ fn hir_id(&self) -> HirId;
+ fn qpath_opt(&self) -> Option<&QPath<'hir>>;
+}
+
+macro_rules! maybe_path {
+ ($ty:ident, $kind:ident) => {
+ impl<'hir> MaybePath<'hir> for hir::$ty<'hir> {
+ fn hir_id(&self) -> HirId {
+ self.hir_id
+ }
+ fn qpath_opt(&self) -> Option<&QPath<'hir>> {
+ match &self.kind {
+ hir::$kind::Path(qpath) => Some(qpath),
+ _ => None,
+ }
+ }
+ }
+ };
+}
+maybe_path!(Expr, ExprKind);
+maybe_path!(Pat, PatKind);
+maybe_path!(Ty, TyKind);
+
+/// If `maybe_path` is a path node, resolves it, otherwise returns `Res::Err`
+pub fn path_res<'tcx>(cx: &LateContext<'_>, maybe_path: &impl MaybePath<'tcx>) -> Res {
+ match maybe_path.qpath_opt() {
+ None => Res::Err,
+ Some(qpath) => cx.qpath_res(qpath, maybe_path.hir_id()),
+ }
+}
+
+/// If `maybe_path` is a path node which resolves to an item, retrieves the item ID
+pub fn path_def_id<'tcx>(cx: &LateContext<'_>, maybe_path: &impl MaybePath<'tcx>) -> Option<DefId> {
+ path_res(cx, maybe_path).opt_def_id()
+}
+
+/// Resolves a def path like `std::vec::Vec`.
+/// This function is expensive and should be used sparingly.
+pub fn def_path_res(cx: &LateContext<'_>, path: &[&str]) -> Res {
+ fn item_child_by_name(tcx: TyCtxt<'_>, def_id: DefId, name: &str) -> Option<Res> {
+ match tcx.def_kind(def_id) {
+ DefKind::Mod | DefKind::Enum | DefKind::Trait => tcx
+ .module_children(def_id)
+ .iter()
+ .find(|item| item.ident.name.as_str() == name)
+ .map(|child| child.res.expect_non_local()),
+ DefKind::Impl => tcx
+ .associated_item_def_ids(def_id)
+ .iter()
+ .copied()
+ .find(|assoc_def_id| tcx.item_name(*assoc_def_id).as_str() == name)
+ .map(|assoc_def_id| Res::Def(tcx.def_kind(assoc_def_id), assoc_def_id)),
+ _ => None,
+ }
+ }
+ fn find_primitive<'tcx>(tcx: TyCtxt<'tcx>, name: &str) -> impl Iterator<Item = DefId> + 'tcx {
+ let single = |ty| tcx.incoherent_impls(ty).iter().copied();
+ let empty = || [].iter().copied();
+ match name {
+ "bool" => single(BoolSimplifiedType),
+ "char" => single(CharSimplifiedType),
+ "str" => single(StrSimplifiedType),
+ "array" => single(ArraySimplifiedType),
+ "slice" => single(SliceSimplifiedType),
+ // FIXME: rustdoc documents these two using just `pointer`.
+ //
+ // Maybe this is something we should do here too.
+ "const_ptr" => single(PtrSimplifiedType(Mutability::Not)),
+ "mut_ptr" => single(PtrSimplifiedType(Mutability::Mut)),
+ "isize" => single(IntSimplifiedType(IntTy::Isize)),
+ "i8" => single(IntSimplifiedType(IntTy::I8)),
+ "i16" => single(IntSimplifiedType(IntTy::I16)),
+ "i32" => single(IntSimplifiedType(IntTy::I32)),
+ "i64" => single(IntSimplifiedType(IntTy::I64)),
+ "i128" => single(IntSimplifiedType(IntTy::I128)),
+ "usize" => single(UintSimplifiedType(UintTy::Usize)),
+ "u8" => single(UintSimplifiedType(UintTy::U8)),
+ "u16" => single(UintSimplifiedType(UintTy::U16)),
+ "u32" => single(UintSimplifiedType(UintTy::U32)),
+ "u64" => single(UintSimplifiedType(UintTy::U64)),
+ "u128" => single(UintSimplifiedType(UintTy::U128)),
+ "f32" => single(FloatSimplifiedType(FloatTy::F32)),
+ "f64" => single(FloatSimplifiedType(FloatTy::F64)),
+ _ => empty(),
+ }
+ }
+ fn find_crate(tcx: TyCtxt<'_>, name: &str) -> Option<DefId> {
+ tcx.crates(())
+ .iter()
+ .copied()
+ .find(|&num| tcx.crate_name(num).as_str() == name)
+ .map(CrateNum::as_def_id)
+ }
+
+ let (base, first, path) = match *path {
+ [base, first, ref path @ ..] => (base, first, path),
+ [primitive] => {
+ return PrimTy::from_name(Symbol::intern(primitive)).map_or(Res::Err, Res::PrimTy);
+ },
+ _ => return Res::Err,
+ };
+ let tcx = cx.tcx;
+ let starts = find_primitive(tcx, base)
+ .chain(find_crate(tcx, base))
+ .filter_map(|id| item_child_by_name(tcx, id, first));
+
+ for first in starts {
+ let last = path
+ .iter()
+ .copied()
+ // for each segment, find the child item
+ .try_fold(first, |res, segment| {
+ let def_id = res.def_id();
+ if let Some(item) = item_child_by_name(tcx, def_id, segment) {
+ Some(item)
+ } else if matches!(res, Res::Def(DefKind::Enum | DefKind::Struct, _)) {
+ // it is not a child item so check inherent impl items
+ tcx.inherent_impls(def_id)
+ .iter()
+ .find_map(|&impl_def_id| item_child_by_name(tcx, impl_def_id, segment))
+ } else {
+ None
+ }
+ });
+
+ if let Some(last) = last {
+ return last;
+ }
+ }
+
+ Res::Err
+}
+
+/// Convenience function to get the `DefId` of a trait by path.
+/// It could be a trait or trait alias.
+pub fn get_trait_def_id(cx: &LateContext<'_>, path: &[&str]) -> Option<DefId> {
+ match def_path_res(cx, path) {
+ Res::Def(DefKind::Trait | DefKind::TraitAlias, trait_id) => Some(trait_id),
+ _ => None,
+ }
+}
+
+/// Gets the `hir::TraitRef` of the trait the given method is implemented for.
+///
+/// Use this if you want to find the `TraitRef` of the `Add` trait in this example:
+///
+/// ```rust
+/// struct Point(isize, isize);
+///
+/// impl std::ops::Add for Point {
+/// type Output = Self;
+///
+/// fn add(self, other: Self) -> Self {
+/// Point(0, 0)
+/// }
+/// }
+/// ```
+pub fn trait_ref_of_method<'tcx>(cx: &LateContext<'tcx>, def_id: LocalDefId) -> Option<&'tcx TraitRef<'tcx>> {
+ // Get the implemented trait for the current function
+ let hir_id = cx.tcx.hir().local_def_id_to_hir_id(def_id);
+ let parent_impl = cx.tcx.hir().get_parent_item(hir_id);
+ if_chain! {
+ if parent_impl != CRATE_DEF_ID;
+ if let hir::Node::Item(item) = cx.tcx.hir().get_by_def_id(parent_impl);
+ if let hir::ItemKind::Impl(impl_) = &item.kind;
+ then {
+ return impl_.of_trait.as_ref();
+ }
+ }
+ None
+}
+
+/// This method will return tuple of projection stack and root of the expression,
+/// used in `can_mut_borrow_both`.
+///
+/// For example, if `e` represents the `v[0].a.b[x]`
+/// this method will return a tuple, composed of a `Vec`
+/// containing the `Expr`s for `v[0], v[0].a, v[0].a.b, v[0].a.b[x]`
+/// and an `Expr` for root of them, `v`
+fn projection_stack<'a, 'hir>(mut e: &'a Expr<'hir>) -> (Vec<&'a Expr<'hir>>, &'a Expr<'hir>) {
+ let mut result = vec![];
+ let root = loop {
+ match e.kind {
+ ExprKind::Index(ep, _) | ExprKind::Field(ep, _) => {
+ result.push(e);
+ e = ep;
+ },
+ _ => break e,
+ };
+ };
+ result.reverse();
+ (result, root)
+}
+
+/// Gets the mutability of the custom deref adjustment, if any.
+pub fn expr_custom_deref_adjustment(cx: &LateContext<'_>, e: &Expr<'_>) -> Option<Mutability> {
+ cx.typeck_results()
+ .expr_adjustments(e)
+ .iter()
+ .find_map(|a| match a.kind {
+ Adjust::Deref(Some(d)) => Some(Some(d.mutbl)),
+ Adjust::Deref(None) => None,
+ _ => Some(None),
+ })
+ .and_then(|x| x)
+}
+
+/// Checks if two expressions can be mutably borrowed simultaneously
+/// and they aren't dependent on borrowing same thing twice
+pub fn can_mut_borrow_both(cx: &LateContext<'_>, e1: &Expr<'_>, e2: &Expr<'_>) -> bool {
+ let (s1, r1) = projection_stack(e1);
+ let (s2, r2) = projection_stack(e2);
+ if !eq_expr_value(cx, r1, r2) {
+ return true;
+ }
+ if expr_custom_deref_adjustment(cx, r1).is_some() || expr_custom_deref_adjustment(cx, r2).is_some() {
+ return false;
+ }
+
+ for (x1, x2) in s1.iter().zip(s2.iter()) {
+ if expr_custom_deref_adjustment(cx, x1).is_some() || expr_custom_deref_adjustment(cx, x2).is_some() {
+ return false;
+ }
+
+ match (&x1.kind, &x2.kind) {
+ (ExprKind::Field(_, i1), ExprKind::Field(_, i2)) => {
+ if i1 != i2 {
+ return true;
+ }
+ },
+ (ExprKind::Index(_, i1), ExprKind::Index(_, i2)) => {
+ if !eq_expr_value(cx, i1, i2) {
+ return false;
+ }
+ },
+ _ => return false,
+ }
+ }
+ false
+}
+
+/// Returns true if the `def_id` associated with the `path` is recognized as a "default-equivalent"
+/// constructor from the std library
+fn is_default_equivalent_ctor(cx: &LateContext<'_>, def_id: DefId, path: &QPath<'_>) -> bool {
+ let std_types_symbols = &[
+ sym::String,
+ sym::Vec,
+ sym::VecDeque,
+ sym::LinkedList,
+ sym::HashMap,
+ sym::BTreeMap,
+ sym::HashSet,
+ sym::BTreeSet,
+ sym::BinaryHeap,
+ ];
+
+ if let QPath::TypeRelative(_, method) = path {
+ if method.ident.name == sym::new {
+ if let Some(impl_did) = cx.tcx.impl_of_method(def_id) {
+ if let Some(adt) = cx.tcx.type_of(impl_did).ty_adt_def() {
+ return std_types_symbols
+ .iter()
+ .any(|&symbol| cx.tcx.is_diagnostic_item(symbol, adt.did()));
+ }
+ }
+ }
+ }
+ false
+}
+
+/// Return true if the expr is equal to `Default::default` when evaluated.
+pub fn is_default_equivalent_call(cx: &LateContext<'_>, repl_func: &Expr<'_>) -> bool {
+ if_chain! {
+ if let hir::ExprKind::Path(ref repl_func_qpath) = repl_func.kind;
+ if let Some(repl_def_id) = cx.qpath_res(repl_func_qpath, repl_func.hir_id).opt_def_id();
+ if is_diag_trait_item(cx, repl_def_id, sym::Default)
+ || is_default_equivalent_ctor(cx, repl_def_id, repl_func_qpath);
+ then { true } else { false }
+ }
+}
+
+/// Returns true if the expr is equal to `Default::default()` of it's type when evaluated.
+/// It doesn't cover all cases, for example indirect function calls (some of std
+/// functions are supported) but it is the best we have.
+pub fn is_default_equivalent(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
+ match &e.kind {
+ ExprKind::Lit(lit) => match lit.node {
+ LitKind::Bool(false) | LitKind::Int(0, _) => true,
+ LitKind::Str(s, _) => s.is_empty(),
+ _ => false,
+ },
+ ExprKind::Tup(items) | ExprKind::Array(items) => items.iter().all(|x| is_default_equivalent(cx, x)),
+ ExprKind::Repeat(x, ArrayLen::Body(len)) => if_chain! {
+ if let ExprKind::Lit(ref const_lit) = cx.tcx.hir().body(len.body).value.kind;
+ if let LitKind::Int(v, _) = const_lit.node;
+ if v <= 32 && is_default_equivalent(cx, x);
+ then {
+ true
+ }
+ else {
+ false
+ }
+ },
+ ExprKind::Call(repl_func, _) => is_default_equivalent_call(cx, repl_func),
+ ExprKind::Path(qpath) => is_lang_ctor(cx, qpath, OptionNone),
+ ExprKind::AddrOf(rustc_hir::BorrowKind::Ref, _, expr) => matches!(expr.kind, ExprKind::Array([])),
+ _ => false,
+ }
+}
+
+/// Checks if the top level expression can be moved into a closure as is.
+/// Currently checks for:
+/// * Break/Continue outside the given loop HIR ids.
+/// * Yield/Return statements.
+/// * Inline assembly.
+/// * Usages of a field of a local where the type of the local can be partially moved.
+///
+/// For example, given the following function:
+///
+/// ```
+/// fn f<'a>(iter: &mut impl Iterator<Item = (usize, &'a mut String)>) {
+/// for item in iter {
+/// let s = item.1;
+/// if item.0 > 10 {
+/// continue;
+/// } else {
+/// s.clear();
+/// }
+/// }
+/// }
+/// ```
+///
+/// When called on the expression `item.0` this will return false unless the local `item` is in the
+/// `ignore_locals` set. The type `(usize, &mut String)` can have the second element moved, so it
+/// isn't always safe to move into a closure when only a single field is needed.
+///
+/// When called on the `continue` expression this will return false unless the outer loop expression
+/// is in the `loop_ids` set.
+///
+/// Note that this check is not recursive, so passing the `if` expression will always return true
+/// even though sub-expressions might return false.
+pub fn can_move_expr_to_closure_no_visit<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ loop_ids: &[HirId],
+ ignore_locals: &HirIdSet,
+) -> bool {
+ match expr.kind {
+ ExprKind::Break(Destination { target_id: Ok(id), .. }, _)
+ | ExprKind::Continue(Destination { target_id: Ok(id), .. })
+ if loop_ids.contains(&id) =>
+ {
+ true
+ },
+ ExprKind::Break(..)
+ | ExprKind::Continue(_)
+ | ExprKind::Ret(_)
+ | ExprKind::Yield(..)
+ | ExprKind::InlineAsm(_) => false,
+ // Accessing a field of a local value can only be done if the type isn't
+ // partially moved.
+ ExprKind::Field(
+ &Expr {
+ hir_id,
+ kind:
+ ExprKind::Path(QPath::Resolved(
+ _,
+ Path {
+ res: Res::Local(local_id),
+ ..
+ },
+ )),
+ ..
+ },
+ _,
+ ) if !ignore_locals.contains(local_id) && can_partially_move_ty(cx, cx.typeck_results().node_type(hir_id)) => {
+ // TODO: check if the local has been partially moved. Assume it has for now.
+ false
+ },
+ _ => true,
+ }
+}
+
+/// How a local is captured by a closure
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum CaptureKind {
+ Value,
+ Ref(Mutability),
+}
+impl CaptureKind {
+ pub fn is_imm_ref(self) -> bool {
+ self == Self::Ref(Mutability::Not)
+ }
+}
+impl std::ops::BitOr for CaptureKind {
+ type Output = Self;
+ fn bitor(self, rhs: Self) -> Self::Output {
+ match (self, rhs) {
+ (CaptureKind::Value, _) | (_, CaptureKind::Value) => CaptureKind::Value,
+ (CaptureKind::Ref(Mutability::Mut), CaptureKind::Ref(_))
+ | (CaptureKind::Ref(_), CaptureKind::Ref(Mutability::Mut)) => CaptureKind::Ref(Mutability::Mut),
+ (CaptureKind::Ref(Mutability::Not), CaptureKind::Ref(Mutability::Not)) => CaptureKind::Ref(Mutability::Not),
+ }
+ }
+}
+impl std::ops::BitOrAssign for CaptureKind {
+ fn bitor_assign(&mut self, rhs: Self) {
+ *self = *self | rhs;
+ }
+}
+
+/// Given an expression referencing a local, determines how it would be captured in a closure.
+/// Note as this will walk up to parent expressions until the capture can be determined it should
+/// only be used while making a closure somewhere a value is consumed. e.g. a block, match arm, or
+/// function argument (other than a receiver).
+pub fn capture_local_usage<'tcx>(cx: &LateContext<'tcx>, e: &Expr<'_>) -> CaptureKind {
+ fn pat_capture_kind(cx: &LateContext<'_>, pat: &Pat<'_>) -> CaptureKind {
+ let mut capture = CaptureKind::Ref(Mutability::Not);
+ pat.each_binding_or_first(&mut |_, id, span, _| match cx
+ .typeck_results()
+ .extract_binding_mode(cx.sess(), id, span)
+ .unwrap()
+ {
+ BindingMode::BindByValue(_) if !is_copy(cx, cx.typeck_results().node_type(id)) => {
+ capture = CaptureKind::Value;
+ },
+ BindingMode::BindByReference(Mutability::Mut) if capture != CaptureKind::Value => {
+ capture = CaptureKind::Ref(Mutability::Mut);
+ },
+ _ => (),
+ });
+ capture
+ }
+
+ debug_assert!(matches!(
+ e.kind,
+ ExprKind::Path(QPath::Resolved(None, Path { res: Res::Local(_), .. }))
+ ));
+
+ let mut child_id = e.hir_id;
+ let mut capture = CaptureKind::Value;
+ let mut capture_expr_ty = e;
+
+ for (parent_id, parent) in cx.tcx.hir().parent_iter(e.hir_id) {
+ if let [
+ Adjustment {
+ kind: Adjust::Deref(_) | Adjust::Borrow(AutoBorrow::Ref(..)),
+ target,
+ },
+ ref adjust @ ..,
+ ] = *cx
+ .typeck_results()
+ .adjustments()
+ .get(child_id)
+ .map_or(&[][..], |x| &**x)
+ {
+ if let rustc_ty::RawPtr(TypeAndMut { mutbl: mutability, .. }) | rustc_ty::Ref(_, _, mutability) =
+ *adjust.last().map_or(target, |a| a.target).kind()
+ {
+ return CaptureKind::Ref(mutability);
+ }
+ }
+
+ match parent {
+ Node::Expr(e) => match e.kind {
+ ExprKind::AddrOf(_, mutability, _) => return CaptureKind::Ref(mutability),
+ ExprKind::Index(..) | ExprKind::Unary(UnOp::Deref, _) => capture = CaptureKind::Ref(Mutability::Not),
+ ExprKind::Assign(lhs, ..) | ExprKind::AssignOp(_, lhs, _) if lhs.hir_id == child_id => {
+ return CaptureKind::Ref(Mutability::Mut);
+ },
+ ExprKind::Field(..) => {
+ if capture == CaptureKind::Value {
+ capture_expr_ty = e;
+ }
+ },
+ ExprKind::Let(let_expr) => {
+ let mutability = match pat_capture_kind(cx, let_expr.pat) {
+ CaptureKind::Value => Mutability::Not,
+ CaptureKind::Ref(m) => m,
+ };
+ return CaptureKind::Ref(mutability);
+ },
+ ExprKind::Match(_, arms, _) => {
+ let mut mutability = Mutability::Not;
+ for capture in arms.iter().map(|arm| pat_capture_kind(cx, arm.pat)) {
+ match capture {
+ CaptureKind::Value => break,
+ CaptureKind::Ref(Mutability::Mut) => mutability = Mutability::Mut,
+ CaptureKind::Ref(Mutability::Not) => (),
+ }
+ }
+ return CaptureKind::Ref(mutability);
+ },
+ _ => break,
+ },
+ Node::Local(l) => match pat_capture_kind(cx, l.pat) {
+ CaptureKind::Value => break,
+ capture @ CaptureKind::Ref(_) => return capture,
+ },
+ _ => break,
+ }
+
+ child_id = parent_id;
+ }
+
+ if capture == CaptureKind::Value && is_copy(cx, cx.typeck_results().expr_ty(capture_expr_ty)) {
+ // Copy types are never automatically captured by value.
+ CaptureKind::Ref(Mutability::Not)
+ } else {
+ capture
+ }
+}
+
+/// Checks if the expression can be moved into a closure as is. This will return a list of captures
+/// if so, otherwise, `None`.
+pub fn can_move_expr_to_closure<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> Option<HirIdMap<CaptureKind>> {
+ struct V<'cx, 'tcx> {
+ cx: &'cx LateContext<'tcx>,
+ // Stack of potential break targets contained in the expression.
+ loops: Vec<HirId>,
+ /// Local variables created in the expression. These don't need to be captured.
+ locals: HirIdSet,
+ /// Whether this expression can be turned into a closure.
+ allow_closure: bool,
+ /// Locals which need to be captured, and whether they need to be by value, reference, or
+ /// mutable reference.
+ captures: HirIdMap<CaptureKind>,
+ }
+ impl<'tcx> Visitor<'tcx> for V<'_, 'tcx> {
+ fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
+ if !self.allow_closure {
+ return;
+ }
+
+ match e.kind {
+ ExprKind::Path(QPath::Resolved(None, &Path { res: Res::Local(l), .. })) => {
+ if !self.locals.contains(&l) {
+ let cap = capture_local_usage(self.cx, e);
+ self.captures.entry(l).and_modify(|e| *e |= cap).or_insert(cap);
+ }
+ },
+ ExprKind::Closure { .. } => {
+ let closure_id = self.cx.tcx.hir().local_def_id(e.hir_id);
+ for capture in self.cx.typeck_results().closure_min_captures_flattened(closure_id) {
+ let local_id = match capture.place.base {
+ PlaceBase::Local(id) => id,
+ PlaceBase::Upvar(var) => var.var_path.hir_id,
+ _ => continue,
+ };
+ if !self.locals.contains(&local_id) {
+ let capture = match capture.info.capture_kind {
+ UpvarCapture::ByValue => CaptureKind::Value,
+ UpvarCapture::ByRef(kind) => match kind {
+ BorrowKind::ImmBorrow => CaptureKind::Ref(Mutability::Not),
+ BorrowKind::UniqueImmBorrow | BorrowKind::MutBorrow => {
+ CaptureKind::Ref(Mutability::Mut)
+ },
+ },
+ };
+ self.captures
+ .entry(local_id)
+ .and_modify(|e| *e |= capture)
+ .or_insert(capture);
+ }
+ }
+ },
+ ExprKind::Loop(b, ..) => {
+ self.loops.push(e.hir_id);
+ self.visit_block(b);
+ self.loops.pop();
+ },
+ _ => {
+ self.allow_closure &= can_move_expr_to_closure_no_visit(self.cx, e, &self.loops, &self.locals);
+ walk_expr(self, e);
+ },
+ }
+ }
+
+ fn visit_pat(&mut self, p: &'tcx Pat<'tcx>) {
+ p.each_binding_or_first(&mut |_, id, _, _| {
+ self.locals.insert(id);
+ });
+ }
+ }
+
+ let mut v = V {
+ cx,
+ allow_closure: true,
+ loops: Vec::new(),
+ locals: HirIdSet::default(),
+ captures: HirIdMap::default(),
+ };
+ v.visit_expr(expr);
+ v.allow_closure.then_some(v.captures)
+}
+
+/// Returns the method names and argument list of nested method call expressions that make up
+/// `expr`. method/span lists are sorted with the most recent call first.
+pub fn method_calls<'tcx>(
+ expr: &'tcx Expr<'tcx>,
+ max_depth: usize,
+) -> (Vec<Symbol>, Vec<&'tcx [Expr<'tcx>]>, Vec<Span>) {
+ let mut method_names = Vec::with_capacity(max_depth);
+ let mut arg_lists = Vec::with_capacity(max_depth);
+ let mut spans = Vec::with_capacity(max_depth);
+
+ let mut current = expr;
+ for _ in 0..max_depth {
+ if let ExprKind::MethodCall(path, args, _) = &current.kind {
+ if args.iter().any(|e| e.span.from_expansion()) {
+ break;
+ }
+ method_names.push(path.ident.name);
+ arg_lists.push(&**args);
+ spans.push(path.ident.span);
+ current = &args[0];
+ } else {
+ break;
+ }
+ }
+
+ (method_names, arg_lists, spans)
+}
+
+/// Matches an `Expr` against a chain of methods, and return the matched `Expr`s.
+///
+/// For example, if `expr` represents the `.baz()` in `foo.bar().baz()`,
+/// `method_chain_args(expr, &["bar", "baz"])` will return a `Vec`
+/// containing the `Expr`s for
+/// `.bar()` and `.baz()`
+pub fn method_chain_args<'a>(expr: &'a Expr<'_>, methods: &[&str]) -> Option<Vec<&'a [Expr<'a>]>> {
+ let mut current = expr;
+ let mut matched = Vec::with_capacity(methods.len());
+ for method_name in methods.iter().rev() {
+ // method chains are stored last -> first
+ if let ExprKind::MethodCall(path, args, _) = current.kind {
+ if path.ident.name.as_str() == *method_name {
+ if args.iter().any(|e| e.span.from_expansion()) {
+ return None;
+ }
+ matched.push(args); // build up `matched` backwards
+ current = &args[0]; // go to parent expression
+ } else {
+ return None;
+ }
+ } else {
+ return None;
+ }
+ }
+ // Reverse `matched` so that it is in the same order as `methods`.
+ matched.reverse();
+ Some(matched)
+}
+
+/// Returns `true` if the provided `def_id` is an entrypoint to a program.
+pub fn is_entrypoint_fn(cx: &LateContext<'_>, def_id: DefId) -> bool {
+ cx.tcx
+ .entry_fn(())
+ .map_or(false, |(entry_fn_def_id, _)| def_id == entry_fn_def_id)
+}
+
+/// Returns `true` if the expression is in the program's `#[panic_handler]`.
+pub fn is_in_panic_handler(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
+ let parent = cx.tcx.hir().get_parent_item(e.hir_id);
+ Some(parent.to_def_id()) == cx.tcx.lang_items().panic_impl()
+}
+
+/// Gets the name of the item the expression is in, if available.
+pub fn get_item_name(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<Symbol> {
+ let parent_id = cx.tcx.hir().get_parent_item(expr.hir_id);
+ match cx.tcx.hir().find_by_def_id(parent_id) {
+ Some(
+ Node::Item(Item { ident, .. })
+ | Node::TraitItem(TraitItem { ident, .. })
+ | Node::ImplItem(ImplItem { ident, .. }),
+ ) => Some(ident.name),
+ _ => None,
+ }
+}
+
+pub struct ContainsName {
+ pub name: Symbol,
+ pub result: bool,
+}
+
+impl<'tcx> Visitor<'tcx> for ContainsName {
+ fn visit_name(&mut self, _: Span, name: Symbol) {
+ if self.name == name {
+ self.result = true;
+ }
+ }
+}
+
+/// Checks if an `Expr` contains a certain name.
+pub fn contains_name(name: Symbol, expr: &Expr<'_>) -> bool {
+ let mut cn = ContainsName { name, result: false };
+ cn.visit_expr(expr);
+ cn.result
+}
+
+/// Returns `true` if `expr` contains a return expression
+pub fn contains_return(expr: &hir::Expr<'_>) -> bool {
+ let mut found = false;
+ expr_visitor_no_bodies(|expr| {
+ if !found {
+ if let hir::ExprKind::Ret(..) = &expr.kind {
+ found = true;
+ }
+ }
+ !found
+ })
+ .visit_expr(expr);
+ found
+}
+
+/// Extends the span to the beginning of the spans line, incl. whitespaces.
+///
+/// ```rust
+/// let x = ();
+/// // ^^
+/// // will be converted to
+/// let x = ();
+/// // ^^^^^^^^^^^^^^
+/// ```
+fn line_span<T: LintContext>(cx: &T, span: Span) -> Span {
+ let span = original_sp(span, DUMMY_SP);
+ let source_map_and_line = cx.sess().source_map().lookup_line(span.lo()).unwrap();
+ let line_no = source_map_and_line.line;
+ let line_start = source_map_and_line.sf.lines(|lines| lines[line_no]);
+ span.with_lo(line_start)
+}
+
+/// Gets the parent node, if any.
+pub fn get_parent_node(tcx: TyCtxt<'_>, id: HirId) -> Option<Node<'_>> {
+ tcx.hir().parent_iter(id).next().map(|(_, node)| node)
+}
+
+/// Gets the parent expression, if any –- this is useful to constrain a lint.
+pub fn get_parent_expr<'tcx>(cx: &LateContext<'tcx>, e: &Expr<'_>) -> Option<&'tcx Expr<'tcx>> {
+ get_parent_expr_for_hir(cx, e.hir_id)
+}
+
+/// This retrieves the parent for the given `HirId` if it's an expression. This is useful for
+/// constraint lints
+pub fn get_parent_expr_for_hir<'tcx>(cx: &LateContext<'tcx>, hir_id: hir::HirId) -> Option<&'tcx Expr<'tcx>> {
+ match get_parent_node(cx.tcx, hir_id) {
+ Some(Node::Expr(parent)) => Some(parent),
+ _ => None,
+ }
+}
+
+pub fn get_enclosing_block<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId) -> Option<&'tcx Block<'tcx>> {
+ let map = &cx.tcx.hir();
+ let enclosing_node = map
+ .get_enclosing_scope(hir_id)
+ .and_then(|enclosing_id| map.find(enclosing_id));
+ enclosing_node.and_then(|node| match node {
+ Node::Block(block) => Some(block),
+ Node::Item(&Item {
+ kind: ItemKind::Fn(_, _, eid),
+ ..
+ })
+ | Node::ImplItem(&ImplItem {
+ kind: ImplItemKind::Fn(_, eid),
+ ..
+ }) => match cx.tcx.hir().body(eid).value.kind {
+ ExprKind::Block(block, _) => Some(block),
+ _ => None,
+ },
+ _ => None,
+ })
+}
+
+/// Gets the loop or closure enclosing the given expression, if any.
+pub fn get_enclosing_loop_or_multi_call_closure<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &Expr<'_>,
+) -> Option<&'tcx Expr<'tcx>> {
+ for (_, node) in cx.tcx.hir().parent_iter(expr.hir_id) {
+ match node {
+ Node::Expr(e) => match e.kind {
+ ExprKind::Closure { .. } => {
+ if let rustc_ty::Closure(_, subs) = cx.typeck_results().expr_ty(e).kind()
+ && subs.as_closure().kind() == ClosureKind::FnOnce
+ {
+ continue;
+ }
+ let is_once = walk_to_expr_usage(cx, e, |node, id| {
+ let Node::Expr(e) = node else {
+ return None;
+ };
+ match e.kind {
+ ExprKind::Call(f, _) if f.hir_id == id => Some(()),
+ ExprKind::Call(f, args) => {
+ let i = args.iter().position(|arg| arg.hir_id == id)?;
+ let sig = expr_sig(cx, f)?;
+ let predicates = sig
+ .predicates_id()
+ .map_or(cx.param_env, |id| cx.tcx.param_env(id))
+ .caller_bounds();
+ sig.input(i).and_then(|ty| {
+ ty_is_fn_once_param(cx.tcx, ty.skip_binder(), predicates).then_some(())
+ })
+ },
+ ExprKind::MethodCall(_, args, _) => {
+ let i = args.iter().position(|arg| arg.hir_id == id)?;
+ let id = cx.typeck_results().type_dependent_def_id(e.hir_id)?;
+ let ty = cx.tcx.fn_sig(id).skip_binder().inputs()[i];
+ ty_is_fn_once_param(cx.tcx, ty, cx.tcx.param_env(id).caller_bounds()).then_some(())
+ },
+ _ => None,
+ }
+ })
+ .is_some();
+ if !is_once {
+ return Some(e);
+ }
+ },
+ ExprKind::Loop(..) => return Some(e),
+ _ => (),
+ },
+ Node::Stmt(_) | Node::Block(_) | Node::Local(_) | Node::Arm(_) => (),
+ _ => break,
+ }
+ }
+ None
+}
+
+/// Gets the parent node if it's an impl block.
+pub fn get_parent_as_impl(tcx: TyCtxt<'_>, id: HirId) -> Option<&Impl<'_>> {
+ match tcx.hir().parent_iter(id).next() {
+ Some((
+ _,
+ Node::Item(Item {
+ kind: ItemKind::Impl(imp),
+ ..
+ }),
+ )) => Some(imp),
+ _ => None,
+ }
+}
+
+/// Removes blocks around an expression, only if the block contains just one expression
+/// and no statements. Unsafe blocks are not removed.
+///
+/// Examples:
+/// * `{}` -> `{}`
+/// * `{ x }` -> `x`
+/// * `{{ x }}` -> `x`
+/// * `{ x; }` -> `{ x; }`
+/// * `{ x; y }` -> `{ x; y }`
+/// * `{ unsafe { x } }` -> `unsafe { x }`
+pub fn peel_blocks<'a>(mut expr: &'a Expr<'a>) -> &'a Expr<'a> {
+ while let ExprKind::Block(
+ Block {
+ stmts: [],
+ expr: Some(inner),
+ rules: BlockCheckMode::DefaultBlock,
+ ..
+ },
+ _,
+ ) = expr.kind
+ {
+ expr = inner;
+ }
+ expr
+}
+
+/// Removes blocks around an expression, only if the block contains just one expression
+/// or just one expression statement with a semicolon. Unsafe blocks are not removed.
+///
+/// Examples:
+/// * `{}` -> `{}`
+/// * `{ x }` -> `x`
+/// * `{ x; }` -> `x`
+/// * `{{ x; }}` -> `x`
+/// * `{ x; y }` -> `{ x; y }`
+/// * `{ unsafe { x } }` -> `unsafe { x }`
+pub fn peel_blocks_with_stmt<'a>(mut expr: &'a Expr<'a>) -> &'a Expr<'a> {
+ while let ExprKind::Block(
+ Block {
+ stmts: [],
+ expr: Some(inner),
+ rules: BlockCheckMode::DefaultBlock,
+ ..
+ }
+ | Block {
+ stmts:
+ [
+ Stmt {
+ kind: StmtKind::Expr(inner) | StmtKind::Semi(inner),
+ ..
+ },
+ ],
+ expr: None,
+ rules: BlockCheckMode::DefaultBlock,
+ ..
+ },
+ _,
+ ) = expr.kind
+ {
+ expr = inner;
+ }
+ expr
+}
+
+/// Checks if the given expression is the else clause of either an `if` or `if let` expression.
+pub fn is_else_clause(tcx: TyCtxt<'_>, expr: &Expr<'_>) -> bool {
+ let mut iter = tcx.hir().parent_iter(expr.hir_id);
+ match iter.next() {
+ Some((
+ _,
+ Node::Expr(Expr {
+ kind: ExprKind::If(_, _, Some(else_expr)),
+ ..
+ }),
+ )) => else_expr.hir_id == expr.hir_id,
+ _ => false,
+ }
+}
+
+/// Checks whether the given expression is a constant integer of the given value.
+/// unlike `is_integer_literal`, this version does const folding
+pub fn is_integer_const(cx: &LateContext<'_>, e: &Expr<'_>, value: u128) -> bool {
+ if is_integer_literal(e, value) {
+ return true;
+ }
+ let enclosing_body = cx.tcx.hir().enclosing_body_owner(e.hir_id);
+ if let Some((Constant::Int(v), _)) = constant(cx, cx.tcx.typeck(enclosing_body), e) {
+ return value == v;
+ }
+ false
+}
+
+/// Checks whether the given expression is a constant literal of the given value.
+pub fn is_integer_literal(expr: &Expr<'_>, value: u128) -> bool {
+ // FIXME: use constant folding
+ if let ExprKind::Lit(ref spanned) = expr.kind {
+ if let LitKind::Int(v, _) = spanned.node {
+ return v == value;
+ }
+ }
+ false
+}
+
+/// Returns `true` if the given `Expr` has been coerced before.
+///
+/// Examples of coercions can be found in the Nomicon at
+/// <https://doc.rust-lang.org/nomicon/coercions.html>.
+///
+/// See `rustc_middle::ty::adjustment::Adjustment` and `rustc_typeck::check::coercion` for more
+/// information on adjustments and coercions.
+pub fn is_adjusted(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
+ cx.typeck_results().adjustments().get(e.hir_id).is_some()
+}
+
+/// Returns the pre-expansion span if this comes from an expansion of the
+/// macro `name`.
+/// See also [`is_direct_expn_of`].
+#[must_use]
+pub fn is_expn_of(mut span: Span, name: &str) -> Option<Span> {
+ loop {
+ if span.from_expansion() {
+ let data = span.ctxt().outer_expn_data();
+ let new_span = data.call_site;
+
+ if let ExpnKind::Macro(MacroKind::Bang, mac_name) = data.kind {
+ if mac_name.as_str() == name {
+ return Some(new_span);
+ }
+ }
+
+ span = new_span;
+ } else {
+ return None;
+ }
+ }
+}
+
+/// Returns the pre-expansion span if the span directly comes from an expansion
+/// of the macro `name`.
+/// The difference with [`is_expn_of`] is that in
+/// ```rust
+/// # macro_rules! foo { ($name:tt!$args:tt) => { $name!$args } }
+/// # macro_rules! bar { ($e:expr) => { $e } }
+/// foo!(bar!(42));
+/// ```
+/// `42` is considered expanded from `foo!` and `bar!` by `is_expn_of` but only
+/// from `bar!` by `is_direct_expn_of`.
+#[must_use]
+pub fn is_direct_expn_of(span: Span, name: &str) -> Option<Span> {
+ if span.from_expansion() {
+ let data = span.ctxt().outer_expn_data();
+ let new_span = data.call_site;
+
+ if let ExpnKind::Macro(MacroKind::Bang, mac_name) = data.kind {
+ if mac_name.as_str() == name {
+ return Some(new_span);
+ }
+ }
+ }
+
+ None
+}
+
+/// Convenience function to get the return type of a function.
+pub fn return_ty<'tcx>(cx: &LateContext<'tcx>, fn_item: hir::HirId) -> Ty<'tcx> {
+ let fn_def_id = cx.tcx.hir().local_def_id(fn_item);
+ let ret_ty = cx.tcx.fn_sig(fn_def_id).output();
+ cx.tcx.erase_late_bound_regions(ret_ty)
+}
+
+/// Convenience function to get the nth argument type of a function.
+pub fn nth_arg<'tcx>(cx: &LateContext<'tcx>, fn_item: hir::HirId, nth: usize) -> Ty<'tcx> {
+ let fn_def_id = cx.tcx.hir().local_def_id(fn_item);
+ let arg = cx.tcx.fn_sig(fn_def_id).input(nth);
+ cx.tcx.erase_late_bound_regions(arg)
+}
+
+/// Checks if an expression is constructing a tuple-like enum variant or struct
+pub fn is_ctor_or_promotable_const_function(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ if let ExprKind::Call(fun, _) = expr.kind {
+ if let ExprKind::Path(ref qp) = fun.kind {
+ let res = cx.qpath_res(qp, fun.hir_id);
+ return match res {
+ def::Res::Def(DefKind::Variant | DefKind::Ctor(..), ..) => true,
+ def::Res::Def(_, def_id) => cx.tcx.is_promotable_const_fn(def_id),
+ _ => false,
+ };
+ }
+ }
+ false
+}
+
+/// Returns `true` if a pattern is refutable.
+// TODO: should be implemented using rustc/mir_build/thir machinery
+pub fn is_refutable(cx: &LateContext<'_>, pat: &Pat<'_>) -> bool {
+ fn is_enum_variant(cx: &LateContext<'_>, qpath: &QPath<'_>, id: HirId) -> bool {
+ matches!(
+ cx.qpath_res(qpath, id),
+ def::Res::Def(DefKind::Variant, ..) | Res::Def(DefKind::Ctor(def::CtorOf::Variant, _), _)
+ )
+ }
+
+ fn are_refutable<'a, I: IntoIterator<Item = &'a Pat<'a>>>(cx: &LateContext<'_>, i: I) -> bool {
+ i.into_iter().any(|pat| is_refutable(cx, pat))
+ }
+
+ match pat.kind {
+ PatKind::Wild => false,
+ PatKind::Binding(_, _, _, pat) => pat.map_or(false, |pat| is_refutable(cx, pat)),
+ PatKind::Box(pat) | PatKind::Ref(pat, _) => is_refutable(cx, pat),
+ PatKind::Lit(..) | PatKind::Range(..) => true,
+ PatKind::Path(ref qpath) => is_enum_variant(cx, qpath, pat.hir_id),
+ PatKind::Or(pats) => {
+ // TODO: should be the honest check, that pats is exhaustive set
+ are_refutable(cx, pats)
+ },
+ PatKind::Tuple(pats, _) => are_refutable(cx, pats),
+ PatKind::Struct(ref qpath, fields, _) => {
+ is_enum_variant(cx, qpath, pat.hir_id) || are_refutable(cx, fields.iter().map(|field| field.pat))
+ },
+ PatKind::TupleStruct(ref qpath, pats, _) => is_enum_variant(cx, qpath, pat.hir_id) || are_refutable(cx, pats),
+ PatKind::Slice(head, middle, tail) => {
+ match &cx.typeck_results().node_type(pat.hir_id).kind() {
+ rustc_ty::Slice(..) => {
+ // [..] is the only irrefutable slice pattern.
+ !head.is_empty() || middle.is_none() || !tail.is_empty()
+ },
+ rustc_ty::Array(..) => are_refutable(cx, head.iter().chain(middle).chain(tail.iter())),
+ _ => {
+ // unreachable!()
+ true
+ },
+ }
+ },
+ }
+}
+
+/// If the pattern is an `or` pattern, call the function once for each sub pattern. Otherwise, call
+/// the function once on the given pattern.
+pub fn recurse_or_patterns<'tcx, F: FnMut(&'tcx Pat<'tcx>)>(pat: &'tcx Pat<'tcx>, mut f: F) {
+ if let PatKind::Or(pats) = pat.kind {
+ pats.iter().for_each(f);
+ } else {
+ f(pat);
+ }
+}
+
+pub fn is_self(slf: &Param<'_>) -> bool {
+ if let PatKind::Binding(.., name, _) = slf.pat.kind {
+ name.name == kw::SelfLower
+ } else {
+ false
+ }
+}
+
+pub fn is_self_ty(slf: &hir::Ty<'_>) -> bool {
+ if let TyKind::Path(QPath::Resolved(None, path)) = slf.kind {
+ if let Res::SelfTy { .. } = path.res {
+ return true;
+ }
+ }
+ false
+}
+
+pub fn iter_input_pats<'tcx>(decl: &FnDecl<'_>, body: &'tcx Body<'_>) -> impl Iterator<Item = &'tcx Param<'tcx>> {
+ (0..decl.inputs.len()).map(move |i| &body.params[i])
+}
+
+/// Checks if a given expression is a match expression expanded from the `?`
+/// operator or the `try` macro.
+pub fn is_try<'tcx>(cx: &LateContext<'_>, expr: &'tcx Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> {
+ fn is_ok(cx: &LateContext<'_>, arm: &Arm<'_>) -> bool {
+ if_chain! {
+ if let PatKind::TupleStruct(ref path, pat, None) = arm.pat.kind;
+ if is_lang_ctor(cx, path, ResultOk);
+ if let PatKind::Binding(_, hir_id, _, None) = pat[0].kind;
+ if path_to_local_id(arm.body, hir_id);
+ then {
+ return true;
+ }
+ }
+ false
+ }
+
+ fn is_err(cx: &LateContext<'_>, arm: &Arm<'_>) -> bool {
+ if let PatKind::TupleStruct(ref path, _, _) = arm.pat.kind {
+ is_lang_ctor(cx, path, ResultErr)
+ } else {
+ false
+ }
+ }
+
+ if let ExprKind::Match(_, arms, ref source) = expr.kind {
+ // desugared from a `?` operator
+ if *source == MatchSource::TryDesugar {
+ return Some(expr);
+ }
+
+ if_chain! {
+ if arms.len() == 2;
+ if arms[0].guard.is_none();
+ if arms[1].guard.is_none();
+ if (is_ok(cx, &arms[0]) && is_err(cx, &arms[1])) || (is_ok(cx, &arms[1]) && is_err(cx, &arms[0]));
+ then {
+ return Some(expr);
+ }
+ }
+ }
+
+ None
+}
+
+/// Returns `true` if the lint is allowed in the current context. This is useful for
+/// skipping long running code when it's unnecessary
+///
+/// This function should check the lint level for the same node, that the lint will
+/// be emitted at. If the information is buffered to be emitted at a later point, please
+/// make sure to use `span_lint_hir` functions to emit the lint. This ensures that
+/// expectations at the checked nodes will be fulfilled.
+pub fn is_lint_allowed(cx: &LateContext<'_>, lint: &'static Lint, id: HirId) -> bool {
+ cx.tcx.lint_level_at_node(lint, id).0 == Level::Allow
+}
+
+pub fn strip_pat_refs<'hir>(mut pat: &'hir Pat<'hir>) -> &'hir Pat<'hir> {
+ while let PatKind::Ref(subpat, _) = pat.kind {
+ pat = subpat;
+ }
+ pat
+}
+
+pub fn int_bits(tcx: TyCtxt<'_>, ity: rustc_ty::IntTy) -> u64 {
+ Integer::from_int_ty(&tcx, ity).size().bits()
+}
+
+#[expect(clippy::cast_possible_wrap)]
+/// Turn a constant int byte representation into an i128
+pub fn sext(tcx: TyCtxt<'_>, u: u128, ity: rustc_ty::IntTy) -> i128 {
+ let amt = 128 - int_bits(tcx, ity);
+ ((u as i128) << amt) >> amt
+}
+
+#[expect(clippy::cast_sign_loss)]
+/// clip unused bytes
+pub fn unsext(tcx: TyCtxt<'_>, u: i128, ity: rustc_ty::IntTy) -> u128 {
+ let amt = 128 - int_bits(tcx, ity);
+ ((u as u128) << amt) >> amt
+}
+
+/// clip unused bytes
+pub fn clip(tcx: TyCtxt<'_>, u: u128, ity: rustc_ty::UintTy) -> u128 {
+ let bits = Integer::from_uint_ty(&tcx, ity).size().bits();
+ let amt = 128 - bits;
+ (u << amt) >> amt
+}
+
+pub fn has_attr(attrs: &[ast::Attribute], symbol: Symbol) -> bool {
+ attrs.iter().any(|attr| attr.has_name(symbol))
+}
+
+pub fn any_parent_has_attr(tcx: TyCtxt<'_>, node: HirId, symbol: Symbol) -> bool {
+ let map = &tcx.hir();
+ let mut prev_enclosing_node = None;
+ let mut enclosing_node = node;
+ while Some(enclosing_node) != prev_enclosing_node {
+ if has_attr(map.attrs(enclosing_node), symbol) {
+ return true;
+ }
+ prev_enclosing_node = Some(enclosing_node);
+ enclosing_node = map.local_def_id_to_hir_id(map.get_parent_item(enclosing_node));
+ }
+
+ false
+}
+
+pub fn any_parent_is_automatically_derived(tcx: TyCtxt<'_>, node: HirId) -> bool {
+ any_parent_has_attr(tcx, node, sym::automatically_derived)
+}
+
+/// Matches a function call with the given path and returns the arguments.
+///
+/// Usage:
+///
+/// ```rust,ignore
+/// if let Some(args) = match_function_call(cx, cmp_max_call, &paths::CMP_MAX);
+/// ```
+pub fn match_function_call<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ path: &[&str],
+) -> Option<&'tcx [Expr<'tcx>]> {
+ if_chain! {
+ if let ExprKind::Call(fun, args) = expr.kind;
+ if let ExprKind::Path(ref qpath) = fun.kind;
+ if let Some(fun_def_id) = cx.qpath_res(qpath, fun.hir_id).opt_def_id();
+ if match_def_path(cx, fun_def_id, path);
+ then {
+ return Some(args);
+ }
+ };
+ None
+}
+
+/// Checks if the given `DefId` matches any of the paths. Returns the index of matching path, if
+/// any.
+///
+/// Please use `tcx.get_diagnostic_name` if the targets are all diagnostic items.
+pub fn match_any_def_paths(cx: &LateContext<'_>, did: DefId, paths: &[&[&str]]) -> Option<usize> {
+ let search_path = cx.get_def_path(did);
+ paths
+ .iter()
+ .position(|p| p.iter().map(|x| Symbol::intern(x)).eq(search_path.iter().copied()))
+}
+
+/// Checks if the given `DefId` matches the path.
+pub fn match_def_path<'tcx>(cx: &LateContext<'tcx>, did: DefId, syms: &[&str]) -> bool {
+ // We should probably move to Symbols in Clippy as well rather than interning every time.
+ let path = cx.get_def_path(did);
+ syms.iter().map(|x| Symbol::intern(x)).eq(path.iter().copied())
+}
+
+/// Checks if the given `DefId` matches the `libc` item.
+pub fn match_libc_symbol(cx: &LateContext<'_>, did: DefId, name: &str) -> bool {
+ let path = cx.get_def_path(did);
+ // libc is meant to be used as a flat list of names, but they're all actually defined in different
+ // modules based on the target platform. Ignore everything but crate name and the item name.
+ path.first().map_or(false, |s| s.as_str() == "libc") && path.last().map_or(false, |s| s.as_str() == name)
+}
+
+/// Returns the list of condition expressions and the list of blocks in a
+/// sequence of `if/else`.
+/// E.g., this returns `([a, b], [c, d, e])` for the expression
+/// `if a { c } else if b { d } else { e }`.
+pub fn if_sequence<'tcx>(mut expr: &'tcx Expr<'tcx>) -> (Vec<&'tcx Expr<'tcx>>, Vec<&'tcx Block<'tcx>>) {
+ let mut conds = Vec::new();
+ let mut blocks: Vec<&Block<'_>> = Vec::new();
+
+ while let Some(higher::IfOrIfLet { cond, then, r#else }) = higher::IfOrIfLet::hir(expr) {
+ conds.push(cond);
+ if let ExprKind::Block(block, _) = then.kind {
+ blocks.push(block);
+ } else {
+ panic!("ExprKind::If node is not an ExprKind::Block");
+ }
+
+ if let Some(else_expr) = r#else {
+ expr = else_expr;
+ } else {
+ break;
+ }
+ }
+
+ // final `else {..}`
+ if !blocks.is_empty() {
+ if let ExprKind::Block(block, _) = expr.kind {
+ blocks.push(block);
+ }
+ }
+
+ (conds, blocks)
+}
+
+/// Checks if the given function kind is an async function.
+pub fn is_async_fn(kind: FnKind<'_>) -> bool {
+ matches!(kind, FnKind::ItemFn(_, _, header) if header.asyncness == IsAsync::Async)
+}
+
+/// Peels away all the compiler generated code surrounding the body of an async function,
+pub fn get_async_fn_body<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'_>) -> Option<&'tcx Expr<'tcx>> {
+ if let ExprKind::Call(
+ _,
+ &[
+ Expr {
+ kind: ExprKind::Closure(&Closure { body, .. }),
+ ..
+ },
+ ],
+ ) = body.value.kind
+ {
+ if let ExprKind::Block(
+ Block {
+ stmts: [],
+ expr:
+ Some(Expr {
+ kind: ExprKind::DropTemps(expr),
+ ..
+ }),
+ ..
+ },
+ _,
+ ) = tcx.hir().body(body).value.kind
+ {
+ return Some(expr);
+ }
+ };
+ None
+}
+
+// check if expr is calling method or function with #[must_use] attribute
+pub fn is_must_use_func_call(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ let did = match expr.kind {
+ ExprKind::Call(path, _) => if_chain! {
+ if let ExprKind::Path(ref qpath) = path.kind;
+ if let def::Res::Def(_, did) = cx.qpath_res(qpath, path.hir_id);
+ then {
+ Some(did)
+ } else {
+ None
+ }
+ },
+ ExprKind::MethodCall(..) => cx.typeck_results().type_dependent_def_id(expr.hir_id),
+ _ => None,
+ };
+
+ did.map_or(false, |did| cx.tcx.has_attr(did, sym::must_use))
+}
+
+/// Checks if an expression represents the identity function
+/// Only examines closures and `std::convert::identity`
+pub fn is_expr_identity_function(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ /// Checks if a function's body represents the identity function. Looks for bodies of the form:
+ /// * `|x| x`
+ /// * `|x| return x`
+ /// * `|x| { return x }`
+ /// * `|x| { return x; }`
+ fn is_body_identity_function(cx: &LateContext<'_>, func: &Body<'_>) -> bool {
+ let id = if_chain! {
+ if let [param] = func.params;
+ if let PatKind::Binding(_, id, _, _) = param.pat.kind;
+ then {
+ id
+ } else {
+ return false;
+ }
+ };
+
+ let mut expr = &func.value;
+ loop {
+ match expr.kind {
+ #[rustfmt::skip]
+ ExprKind::Block(&Block { stmts: [], expr: Some(e), .. }, _, )
+ | ExprKind::Ret(Some(e)) => expr = e,
+ #[rustfmt::skip]
+ ExprKind::Block(&Block { stmts: [stmt], expr: None, .. }, _) => {
+ if_chain! {
+ if let StmtKind::Semi(e) | StmtKind::Expr(e) = stmt.kind;
+ if let ExprKind::Ret(Some(ret_val)) = e.kind;
+ then {
+ expr = ret_val;
+ } else {
+ return false;
+ }
+ }
+ },
+ _ => return path_to_local_id(expr, id) && cx.typeck_results().expr_adjustments(expr).is_empty(),
+ }
+ }
+ }
+
+ match expr.kind {
+ ExprKind::Closure(&Closure { body, .. }) => is_body_identity_function(cx, cx.tcx.hir().body(body)),
+ _ => path_def_id(cx, expr).map_or(false, |id| match_def_path(cx, id, &paths::CONVERT_IDENTITY)),
+ }
+}
+
+/// Gets the node where an expression is either used, or it's type is unified with another branch.
+/// Returns both the node and the `HirId` of the closest child node.
+pub fn get_expr_use_or_unification_node<'tcx>(tcx: TyCtxt<'tcx>, expr: &Expr<'_>) -> Option<(Node<'tcx>, HirId)> {
+ let mut child_id = expr.hir_id;
+ let mut iter = tcx.hir().parent_iter(child_id);
+ loop {
+ match iter.next() {
+ None => break None,
+ Some((id, Node::Block(_))) => child_id = id,
+ Some((id, Node::Arm(arm))) if arm.body.hir_id == child_id => child_id = id,
+ Some((_, Node::Expr(expr))) => match expr.kind {
+ ExprKind::Match(_, [arm], _) if arm.hir_id == child_id => child_id = expr.hir_id,
+ ExprKind::Block(..) | ExprKind::DropTemps(_) => child_id = expr.hir_id,
+ ExprKind::If(_, then_expr, None) if then_expr.hir_id == child_id => break None,
+ _ => break Some((Node::Expr(expr), child_id)),
+ },
+ Some((_, node)) => break Some((node, child_id)),
+ }
+ }
+}
+
+/// Checks if the result of an expression is used, or it's type is unified with another branch.
+pub fn is_expr_used_or_unified(tcx: TyCtxt<'_>, expr: &Expr<'_>) -> bool {
+ !matches!(
+ get_expr_use_or_unification_node(tcx, expr),
+ None | Some((
+ Node::Stmt(Stmt {
+ kind: StmtKind::Expr(_)
+ | StmtKind::Semi(_)
+ | StmtKind::Local(Local {
+ pat: Pat {
+ kind: PatKind::Wild,
+ ..
+ },
+ ..
+ }),
+ ..
+ }),
+ _
+ ))
+ )
+}
+
+/// Checks if the expression is the final expression returned from a block.
+pub fn is_expr_final_block_expr(tcx: TyCtxt<'_>, expr: &Expr<'_>) -> bool {
+ matches!(get_parent_node(tcx, expr.hir_id), Some(Node::Block(..)))
+}
+
+pub fn std_or_core(cx: &LateContext<'_>) -> Option<&'static str> {
+ if !is_no_std_crate(cx) {
+ Some("std")
+ } else if !is_no_core_crate(cx) {
+ Some("core")
+ } else {
+ None
+ }
+}
+
+pub fn is_no_std_crate(cx: &LateContext<'_>) -> bool {
+ cx.tcx.hir().attrs(hir::CRATE_HIR_ID).iter().any(|attr| {
+ if let ast::AttrKind::Normal(ref attr, _) = attr.kind {
+ attr.path == sym::no_std
+ } else {
+ false
+ }
+ })
+}
+
+pub fn is_no_core_crate(cx: &LateContext<'_>) -> bool {
+ cx.tcx.hir().attrs(hir::CRATE_HIR_ID).iter().any(|attr| {
+ if let ast::AttrKind::Normal(ref attr, _) = attr.kind {
+ attr.path == sym::no_core
+ } else {
+ false
+ }
+ })
+}
+
+/// Check if parent of a hir node is a trait implementation block.
+/// For example, `f` in
+/// ```rust
+/// # struct S;
+/// # trait Trait { fn f(); }
+/// impl Trait for S {
+/// fn f() {}
+/// }
+/// ```
+pub fn is_trait_impl_item(cx: &LateContext<'_>, hir_id: HirId) -> bool {
+ if let Some(Node::Item(item)) = cx.tcx.hir().find(cx.tcx.hir().get_parent_node(hir_id)) {
+ matches!(item.kind, ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }))
+ } else {
+ false
+ }
+}
+
+/// Check if it's even possible to satisfy the `where` clause for the item.
+///
+/// `trivial_bounds` feature allows functions with unsatisfiable bounds, for example:
+///
+/// ```ignore
+/// fn foo() where i32: Iterator {
+/// for _ in 2i32 {}
+/// }
+/// ```
+pub fn fn_has_unsatisfiable_preds(cx: &LateContext<'_>, did: DefId) -> bool {
+ use rustc_trait_selection::traits;
+ let predicates = cx
+ .tcx
+ .predicates_of(did)
+ .predicates
+ .iter()
+ .filter_map(|(p, _)| if p.is_global() { Some(*p) } else { None });
+ traits::impossible_predicates(
+ cx.tcx,
+ traits::elaborate_predicates(cx.tcx, predicates)
+ .map(|o| o.predicate)
+ .collect::<Vec<_>>(),
+ )
+}
+
+/// Returns the `DefId` of the callee if the given expression is a function or method call.
+pub fn fn_def_id(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<DefId> {
+ match &expr.kind {
+ ExprKind::MethodCall(..) => cx.typeck_results().type_dependent_def_id(expr.hir_id),
+ ExprKind::Call(
+ Expr {
+ kind: ExprKind::Path(qpath),
+ hir_id: path_hir_id,
+ ..
+ },
+ ..,
+ ) => {
+ // Only return Fn-like DefIds, not the DefIds of statics/consts/etc that contain or
+ // deref to fn pointers, dyn Fn, impl Fn - #8850
+ if let Res::Def(DefKind::Fn | DefKind::Ctor(..) | DefKind::AssocFn, id) =
+ cx.typeck_results().qpath_res(qpath, *path_hir_id)
+ {
+ Some(id)
+ } else {
+ None
+ }
+ },
+ _ => None,
+ }
+}
+
+/// Returns Option<String> where String is a textual representation of the type encapsulated in the
+/// slice iff the given expression is a slice of primitives (as defined in the
+/// `is_recursively_primitive_type` function) and None otherwise.
+pub fn is_slice_of_primitives(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<String> {
+ let expr_type = cx.typeck_results().expr_ty_adjusted(expr);
+ let expr_kind = expr_type.kind();
+ let is_primitive = match expr_kind {
+ rustc_ty::Slice(element_type) => is_recursively_primitive_type(*element_type),
+ rustc_ty::Ref(_, inner_ty, _) if matches!(inner_ty.kind(), &rustc_ty::Slice(_)) => {
+ if let rustc_ty::Slice(element_type) = inner_ty.kind() {
+ is_recursively_primitive_type(*element_type)
+ } else {
+ unreachable!()
+ }
+ },
+ _ => false,
+ };
+
+ if is_primitive {
+ // if we have wrappers like Array, Slice or Tuple, print these
+ // and get the type enclosed in the slice ref
+ match expr_type.peel_refs().walk().nth(1).unwrap().expect_ty().kind() {
+ rustc_ty::Slice(..) => return Some("slice".into()),
+ rustc_ty::Array(..) => return Some("array".into()),
+ rustc_ty::Tuple(..) => return Some("tuple".into()),
+ _ => {
+ // is_recursively_primitive_type() should have taken care
+ // of the rest and we can rely on the type that is found
+ let refs_peeled = expr_type.peel_refs();
+ return Some(refs_peeled.walk().last().unwrap().to_string());
+ },
+ }
+ }
+ None
+}
+
+/// returns list of all pairs (a, b) from `exprs` such that `eq(a, b)`
+/// `hash` must be comformed with `eq`
+pub fn search_same<T, Hash, Eq>(exprs: &[T], hash: Hash, eq: Eq) -> Vec<(&T, &T)>
+where
+ Hash: Fn(&T) -> u64,
+ Eq: Fn(&T, &T) -> bool,
+{
+ match exprs {
+ [a, b] if eq(a, b) => return vec![(a, b)],
+ _ if exprs.len() <= 2 => return vec![],
+ _ => {},
+ }
+
+ let mut match_expr_list: Vec<(&T, &T)> = Vec::new();
+
+ let mut map: UnhashMap<u64, Vec<&_>> =
+ UnhashMap::with_capacity_and_hasher(exprs.len(), BuildHasherDefault::default());
+
+ for expr in exprs {
+ match map.entry(hash(expr)) {
+ Entry::Occupied(mut o) => {
+ for o in o.get() {
+ if eq(o, expr) {
+ match_expr_list.push((o, expr));
+ }
+ }
+ o.get_mut().push(expr);
+ },
+ Entry::Vacant(v) => {
+ v.insert(vec![expr]);
+ },
+ }
+ }
+
+ match_expr_list
+}
+
+/// Peels off all references on the pattern. Returns the underlying pattern and the number of
+/// references removed.
+pub fn peel_hir_pat_refs<'a>(pat: &'a Pat<'a>) -> (&'a Pat<'a>, usize) {
+ fn peel<'a>(pat: &'a Pat<'a>, count: usize) -> (&'a Pat<'a>, usize) {
+ if let PatKind::Ref(pat, _) = pat.kind {
+ peel(pat, count + 1)
+ } else {
+ (pat, count)
+ }
+ }
+ peel(pat, 0)
+}
+
+/// Peels of expressions while the given closure returns `Some`.
+pub fn peel_hir_expr_while<'tcx>(
+ mut expr: &'tcx Expr<'tcx>,
+ mut f: impl FnMut(&'tcx Expr<'tcx>) -> Option<&'tcx Expr<'tcx>>,
+) -> &'tcx Expr<'tcx> {
+ while let Some(e) = f(expr) {
+ expr = e;
+ }
+ expr
+}
+
+/// Peels off up to the given number of references on the expression. Returns the underlying
+/// expression and the number of references removed.
+pub fn peel_n_hir_expr_refs<'a>(expr: &'a Expr<'a>, count: usize) -> (&'a Expr<'a>, usize) {
+ let mut remaining = count;
+ let e = peel_hir_expr_while(expr, |e| match e.kind {
+ ExprKind::AddrOf(ast::BorrowKind::Ref, _, e) if remaining != 0 => {
+ remaining -= 1;
+ Some(e)
+ },
+ _ => None,
+ });
+ (e, count - remaining)
+}
+
+/// Peels off all references on the expression. Returns the underlying expression and the number of
+/// references removed.
+pub fn peel_hir_expr_refs<'a>(expr: &'a Expr<'a>) -> (&'a Expr<'a>, usize) {
+ let mut count = 0;
+ let e = peel_hir_expr_while(expr, |e| match e.kind {
+ ExprKind::AddrOf(ast::BorrowKind::Ref, _, e) => {
+ count += 1;
+ Some(e)
+ },
+ _ => None,
+ });
+ (e, count)
+}
+
+/// Peels off all references on the type. Returns the underlying type and the number of references
+/// removed.
+pub fn peel_hir_ty_refs<'a>(mut ty: &'a hir::Ty<'a>) -> (&'a hir::Ty<'a>, usize) {
+ let mut count = 0;
+ loop {
+ match &ty.kind {
+ TyKind::Rptr(_, ref_ty) => {
+ ty = ref_ty.ty;
+ count += 1;
+ },
+ _ => break (ty, count),
+ }
+ }
+}
+
+/// Removes `AddrOf` operators (`&`) or deref operators (`*`), but only if a reference type is
+/// dereferenced. An overloaded deref such as `Vec` to slice would not be removed.
+pub fn peel_ref_operators<'hir>(cx: &LateContext<'_>, mut expr: &'hir Expr<'hir>) -> &'hir Expr<'hir> {
+ loop {
+ match expr.kind {
+ ExprKind::AddrOf(_, _, e) => expr = e,
+ ExprKind::Unary(UnOp::Deref, e) if cx.typeck_results().expr_ty(e).is_ref() => expr = e,
+ _ => break,
+ }
+ }
+ expr
+}
+
+pub fn is_hir_ty_cfg_dependant(cx: &LateContext<'_>, ty: &hir::Ty<'_>) -> bool {
+ if let TyKind::Path(QPath::Resolved(_, path)) = ty.kind {
+ if let Res::Def(_, def_id) = path.res {
+ return cx.tcx.has_attr(def_id, sym::cfg) || cx.tcx.has_attr(def_id, sym::cfg_attr);
+ }
+ }
+ false
+}
+
+static TEST_ITEM_NAMES_CACHE: OnceLock<Mutex<FxHashMap<LocalDefId, Vec<Symbol>>>> = OnceLock::new();
+
+fn with_test_item_names(tcx: TyCtxt<'_>, module: LocalDefId, f: impl Fn(&[Symbol]) -> bool) -> bool {
+ let cache = TEST_ITEM_NAMES_CACHE.get_or_init(|| Mutex::new(FxHashMap::default()));
+ let mut map: MutexGuard<'_, FxHashMap<LocalDefId, Vec<Symbol>>> = cache.lock().unwrap();
+ let value = map.entry(module);
+ match value {
+ Entry::Occupied(entry) => f(entry.get()),
+ Entry::Vacant(entry) => {
+ let mut names = Vec::new();
+ for id in tcx.hir().module_items(module) {
+ if matches!(tcx.def_kind(id.def_id), DefKind::Const)
+ && let item = tcx.hir().item(id)
+ && let ItemKind::Const(ty, _body) = item.kind {
+ if let TyKind::Path(QPath::Resolved(_, path)) = ty.kind {
+ // We could also check for the type name `test::TestDescAndFn`
+ if let Res::Def(DefKind::Struct, _) = path.res {
+ let has_test_marker = tcx
+ .hir()
+ .attrs(item.hir_id())
+ .iter()
+ .any(|a| a.has_name(sym::rustc_test_marker));
+ if has_test_marker {
+ names.push(item.ident.name);
+ }
+ }
+ }
+ }
+ }
+ names.sort_unstable();
+ f(entry.insert(names))
+ },
+ }
+}
+
+/// Checks if the function containing the given `HirId` is a `#[test]` function
+///
+/// Note: Add `// compile-flags: --test` to UI tests with a `#[test]` function
+pub fn is_in_test_function(tcx: TyCtxt<'_>, id: hir::HirId) -> bool {
+ with_test_item_names(tcx, tcx.parent_module(id), |names| {
+ tcx.hir()
+ .parent_iter(id)
+ // Since you can nest functions we need to collect all until we leave
+ // function scope
+ .any(|(_id, node)| {
+ if let Node::Item(item) = node {
+ if let ItemKind::Fn(_, _, _) = item.kind {
+ // Note that we have sorted the item names in the visitor,
+ // so the binary_search gets the same as `contains`, but faster.
+ return names.binary_search(&item.ident.name).is_ok();
+ }
+ }
+ false
+ })
+ })
+}
+
+/// Checks if the item containing the given `HirId` has `#[cfg(test)]` attribute applied
+///
+/// Note: Add `// compile-flags: --test` to UI tests with a `#[cfg(test)]` function
+pub fn is_in_cfg_test(tcx: TyCtxt<'_>, id: hir::HirId) -> bool {
+ fn is_cfg_test(attr: &Attribute) -> bool {
+ if attr.has_name(sym::cfg)
+ && let Some(items) = attr.meta_item_list()
+ && let [item] = &*items
+ && item.has_name(sym::test)
+ {
+ true
+ } else {
+ false
+ }
+ }
+ tcx.hir()
+ .parent_iter(id)
+ .flat_map(|(parent_id, _)| tcx.hir().attrs(parent_id))
+ .any(is_cfg_test)
+}
+
+/// Checks whether item either has `test` attribute applied, or
+/// is a module with `test` in its name.
+///
+/// Note: Add `// compile-flags: --test` to UI tests with a `#[test]` function
+pub fn is_test_module_or_function(tcx: TyCtxt<'_>, item: &Item<'_>) -> bool {
+ is_in_test_function(tcx, item.hir_id())
+ || matches!(item.kind, ItemKind::Mod(..))
+ && item.ident.name.as_str().split('_').any(|a| a == "test" || a == "tests")
+}
+
+/// Walks the HIR tree from the given expression, up to the node where the value produced by the
+/// expression is consumed. Calls the function for every node encountered this way until it returns
+/// `Some`.
+///
+/// This allows walking through `if`, `match`, `break`, block expressions to find where the value
+/// produced by the expression is consumed.
+pub fn walk_to_expr_usage<'tcx, T>(
+ cx: &LateContext<'tcx>,
+ e: &Expr<'tcx>,
+ mut f: impl FnMut(Node<'tcx>, HirId) -> Option<T>,
+) -> Option<T> {
+ let map = cx.tcx.hir();
+ let mut iter = map.parent_iter(e.hir_id);
+ let mut child_id = e.hir_id;
+
+ while let Some((parent_id, parent)) = iter.next() {
+ if let Some(x) = f(parent, child_id) {
+ return Some(x);
+ }
+ let parent = match parent {
+ Node::Expr(e) => e,
+ Node::Block(Block { expr: Some(body), .. }) | Node::Arm(Arm { body, .. }) if body.hir_id == child_id => {
+ child_id = parent_id;
+ continue;
+ },
+ Node::Arm(a) if a.body.hir_id == child_id => {
+ child_id = parent_id;
+ continue;
+ },
+ _ => return None,
+ };
+ match parent.kind {
+ ExprKind::If(child, ..) | ExprKind::Match(child, ..) if child.hir_id != child_id => child_id = parent_id,
+ ExprKind::Break(Destination { target_id: Ok(id), .. }, _) => {
+ child_id = id;
+ iter = map.parent_iter(id);
+ },
+ ExprKind::Block(..) => child_id = parent_id,
+ _ => return None,
+ }
+ }
+ None
+}
+
+macro_rules! op_utils {
+ ($($name:ident $assign:ident)*) => {
+ /// Binary operation traits like `LangItem::Add`
+ pub static BINOP_TRAITS: &[LangItem] = &[$(LangItem::$name,)*];
+
+ /// Operator-Assign traits like `LangItem::AddAssign`
+ pub static OP_ASSIGN_TRAITS: &[LangItem] = &[$(LangItem::$assign,)*];
+
+ /// Converts `BinOpKind::Add` to `(LangItem::Add, LangItem::AddAssign)`, for example
+ pub fn binop_traits(kind: hir::BinOpKind) -> Option<(LangItem, LangItem)> {
+ match kind {
+ $(hir::BinOpKind::$name => Some((LangItem::$name, LangItem::$assign)),)*
+ _ => None,
+ }
+ }
+ };
+}
+
+op_utils! {
+ Add AddAssign
+ Sub SubAssign
+ Mul MulAssign
+ Div DivAssign
+ Rem RemAssign
+ BitXor BitXorAssign
+ BitAnd BitAndAssign
+ BitOr BitOrAssign
+ Shl ShlAssign
+ Shr ShrAssign
+}
diff --git a/src/tools/clippy/clippy_utils/src/macros.rs b/src/tools/clippy/clippy_utils/src/macros.rs
new file mode 100644
index 000000000..a268e339b
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/macros.rs
@@ -0,0 +1,583 @@
+#![allow(clippy::similar_names)] // `expr` and `expn`
+
+use crate::visitors::expr_visitor_no_bodies;
+
+use arrayvec::ArrayVec;
+use if_chain::if_chain;
+use rustc_ast::ast::LitKind;
+use rustc_hir::intravisit::Visitor;
+use rustc_hir::{self as hir, Expr, ExprKind, HirId, Node, QPath};
+use rustc_lint::LateContext;
+use rustc_span::def_id::DefId;
+use rustc_span::hygiene::{self, MacroKind, SyntaxContext};
+use rustc_span::{sym, ExpnData, ExpnId, ExpnKind, Span, Symbol};
+use std::ops::ControlFlow;
+
+const FORMAT_MACRO_DIAG_ITEMS: &[Symbol] = &[
+ sym::assert_eq_macro,
+ sym::assert_macro,
+ sym::assert_ne_macro,
+ sym::debug_assert_eq_macro,
+ sym::debug_assert_macro,
+ sym::debug_assert_ne_macro,
+ sym::eprint_macro,
+ sym::eprintln_macro,
+ sym::format_args_macro,
+ sym::format_macro,
+ sym::print_macro,
+ sym::println_macro,
+ sym::std_panic_macro,
+ sym::write_macro,
+ sym::writeln_macro,
+];
+
+/// Returns true if a given Macro `DefId` is a format macro (e.g. `println!`)
+pub fn is_format_macro(cx: &LateContext<'_>, macro_def_id: DefId) -> bool {
+ if let Some(name) = cx.tcx.get_diagnostic_name(macro_def_id) {
+ FORMAT_MACRO_DIAG_ITEMS.contains(&name)
+ } else {
+ false
+ }
+}
+
+/// A macro call, like `vec![1, 2, 3]`.
+///
+/// Use `tcx.item_name(macro_call.def_id)` to get the macro name.
+/// Even better is to check if it is a diagnostic item.
+///
+/// This structure is similar to `ExpnData` but it precludes desugaring expansions.
+#[derive(Debug)]
+pub struct MacroCall {
+ /// Macro `DefId`
+ pub def_id: DefId,
+ /// Kind of macro
+ pub kind: MacroKind,
+ /// The expansion produced by the macro call
+ pub expn: ExpnId,
+ /// Span of the macro call site
+ pub span: Span,
+}
+
+impl MacroCall {
+ pub fn is_local(&self) -> bool {
+ span_is_local(self.span)
+ }
+}
+
+/// Returns an iterator of expansions that created the given span
+pub fn expn_backtrace(mut span: Span) -> impl Iterator<Item = (ExpnId, ExpnData)> {
+ std::iter::from_fn(move || {
+ let ctxt = span.ctxt();
+ if ctxt == SyntaxContext::root() {
+ return None;
+ }
+ let expn = ctxt.outer_expn();
+ let data = expn.expn_data();
+ span = data.call_site;
+ Some((expn, data))
+ })
+}
+
+/// Checks whether the span is from the root expansion or a locally defined macro
+pub fn span_is_local(span: Span) -> bool {
+ !span.from_expansion() || expn_is_local(span.ctxt().outer_expn())
+}
+
+/// Checks whether the expansion is the root expansion or a locally defined macro
+pub fn expn_is_local(expn: ExpnId) -> bool {
+ if expn == ExpnId::root() {
+ return true;
+ }
+ let data = expn.expn_data();
+ let backtrace = expn_backtrace(data.call_site);
+ std::iter::once((expn, data))
+ .chain(backtrace)
+ .find_map(|(_, data)| data.macro_def_id)
+ .map_or(true, DefId::is_local)
+}
+
+/// Returns an iterator of macro expansions that created the given span.
+/// Note that desugaring expansions are skipped.
+pub fn macro_backtrace(span: Span) -> impl Iterator<Item = MacroCall> {
+ expn_backtrace(span).filter_map(|(expn, data)| match data {
+ ExpnData {
+ kind: ExpnKind::Macro(kind, _),
+ macro_def_id: Some(def_id),
+ call_site: span,
+ ..
+ } => Some(MacroCall {
+ def_id,
+ kind,
+ expn,
+ span,
+ }),
+ _ => None,
+ })
+}
+
+/// If the macro backtrace of `span` has a macro call at the root expansion
+/// (i.e. not a nested macro call), returns `Some` with the `MacroCall`
+pub fn root_macro_call(span: Span) -> Option<MacroCall> {
+ macro_backtrace(span).last()
+}
+
+/// Like [`root_macro_call`], but only returns `Some` if `node` is the "first node"
+/// produced by the macro call, as in [`first_node_in_macro`].
+pub fn root_macro_call_first_node(cx: &LateContext<'_>, node: &impl HirNode) -> Option<MacroCall> {
+ if first_node_in_macro(cx, node) != Some(ExpnId::root()) {
+ return None;
+ }
+ root_macro_call(node.span())
+}
+
+/// Like [`macro_backtrace`], but only returns macro calls where `node` is the "first node" of the
+/// macro call, as in [`first_node_in_macro`].
+pub fn first_node_macro_backtrace(cx: &LateContext<'_>, node: &impl HirNode) -> impl Iterator<Item = MacroCall> {
+ let span = node.span();
+ first_node_in_macro(cx, node)
+ .into_iter()
+ .flat_map(move |expn| macro_backtrace(span).take_while(move |macro_call| macro_call.expn != expn))
+}
+
+/// If `node` is the "first node" in a macro expansion, returns `Some` with the `ExpnId` of the
+/// macro call site (i.e. the parent of the macro expansion). This generally means that `node`
+/// is the outermost node of an entire macro expansion, but there are some caveats noted below.
+/// This is useful for finding macro calls while visiting the HIR without processing the macro call
+/// at every node within its expansion.
+///
+/// If you already have immediate access to the parent node, it is simpler to
+/// just check the context of that span directly (e.g. `parent.span.from_expansion()`).
+///
+/// If a macro call is in statement position, it expands to one or more statements.
+/// In that case, each statement *and* their immediate descendants will all yield `Some`
+/// with the `ExpnId` of the containing block.
+///
+/// A node may be the "first node" of multiple macro calls in a macro backtrace.
+/// The expansion of the outermost macro call site is returned in such cases.
+pub fn first_node_in_macro(cx: &LateContext<'_>, node: &impl HirNode) -> Option<ExpnId> {
+ // get the macro expansion or return `None` if not found
+ // `macro_backtrace` importantly ignores desugaring expansions
+ let expn = macro_backtrace(node.span()).next()?.expn;
+
+ // get the parent node, possibly skipping over a statement
+ // if the parent is not found, it is sensible to return `Some(root)`
+ let hir = cx.tcx.hir();
+ let mut parent_iter = hir.parent_iter(node.hir_id());
+ let (parent_id, _) = match parent_iter.next() {
+ None => return Some(ExpnId::root()),
+ Some((_, Node::Stmt(_))) => match parent_iter.next() {
+ None => return Some(ExpnId::root()),
+ Some(next) => next,
+ },
+ Some(next) => next,
+ };
+
+ // get the macro expansion of the parent node
+ let parent_span = hir.span(parent_id);
+ let Some(parent_macro_call) = macro_backtrace(parent_span).next() else {
+ // the parent node is not in a macro
+ return Some(ExpnId::root());
+ };
+
+ if parent_macro_call.expn.is_descendant_of(expn) {
+ // `node` is input to a macro call
+ return None;
+ }
+
+ Some(parent_macro_call.expn)
+}
+
+/* Specific Macro Utils */
+
+/// Is `def_id` of `std::panic`, `core::panic` or any inner implementation macros
+pub fn is_panic(cx: &LateContext<'_>, def_id: DefId) -> bool {
+ let Some(name) = cx.tcx.get_diagnostic_name(def_id) else { return false };
+ matches!(
+ name.as_str(),
+ "core_panic_macro"
+ | "std_panic_macro"
+ | "core_panic_2015_macro"
+ | "std_panic_2015_macro"
+ | "core_panic_2021_macro"
+ )
+}
+
+pub enum PanicExpn<'a> {
+ /// No arguments - `panic!()`
+ Empty,
+ /// A string literal or any `&str` - `panic!("message")` or `panic!(message)`
+ Str(&'a Expr<'a>),
+ /// A single argument that implements `Display` - `panic!("{}", object)`
+ Display(&'a Expr<'a>),
+ /// Anything else - `panic!("error {}: {}", a, b)`
+ Format(FormatArgsExpn<'a>),
+}
+
+impl<'a> PanicExpn<'a> {
+ pub fn parse(cx: &LateContext<'_>, expr: &'a Expr<'a>) -> Option<Self> {
+ if !macro_backtrace(expr.span).any(|macro_call| is_panic(cx, macro_call.def_id)) {
+ return None;
+ }
+ let ExprKind::Call(callee, [arg]) = &expr.kind else { return None };
+ let ExprKind::Path(QPath::Resolved(_, path)) = &callee.kind else { return None };
+ let result = match path.segments.last().unwrap().ident.as_str() {
+ "panic" if arg.span.ctxt() == expr.span.ctxt() => Self::Empty,
+ "panic" | "panic_str" => Self::Str(arg),
+ "panic_display" => {
+ let ExprKind::AddrOf(_, _, e) = &arg.kind else { return None };
+ Self::Display(e)
+ },
+ "panic_fmt" => Self::Format(FormatArgsExpn::parse(cx, arg)?),
+ _ => return None,
+ };
+ Some(result)
+ }
+}
+
+/// Finds the arguments of an `assert!` or `debug_assert!` macro call within the macro expansion
+pub fn find_assert_args<'a>(
+ cx: &LateContext<'_>,
+ expr: &'a Expr<'a>,
+ expn: ExpnId,
+) -> Option<(&'a Expr<'a>, PanicExpn<'a>)> {
+ find_assert_args_inner(cx, expr, expn).map(|([e], p)| (e, p))
+}
+
+/// Finds the arguments of an `assert_eq!` or `debug_assert_eq!` macro call within the macro
+/// expansion
+pub fn find_assert_eq_args<'a>(
+ cx: &LateContext<'_>,
+ expr: &'a Expr<'a>,
+ expn: ExpnId,
+) -> Option<(&'a Expr<'a>, &'a Expr<'a>, PanicExpn<'a>)> {
+ find_assert_args_inner(cx, expr, expn).map(|([a, b], p)| (a, b, p))
+}
+
+fn find_assert_args_inner<'a, const N: usize>(
+ cx: &LateContext<'_>,
+ expr: &'a Expr<'a>,
+ expn: ExpnId,
+) -> Option<([&'a Expr<'a>; N], PanicExpn<'a>)> {
+ let macro_id = expn.expn_data().macro_def_id?;
+ let (expr, expn) = match cx.tcx.item_name(macro_id).as_str().strip_prefix("debug_") {
+ None => (expr, expn),
+ Some(inner_name) => find_assert_within_debug_assert(cx, expr, expn, Symbol::intern(inner_name))?,
+ };
+ let mut args = ArrayVec::new();
+ let mut panic_expn = None;
+ expr_visitor_no_bodies(|e| {
+ if args.is_full() {
+ if panic_expn.is_none() && e.span.ctxt() != expr.span.ctxt() {
+ panic_expn = PanicExpn::parse(cx, e);
+ }
+ panic_expn.is_none()
+ } else if is_assert_arg(cx, e, expn) {
+ args.push(e);
+ false
+ } else {
+ true
+ }
+ })
+ .visit_expr(expr);
+ let args = args.into_inner().ok()?;
+ // if no `panic!(..)` is found, use `PanicExpn::Empty`
+ // to indicate that the default assertion message is used
+ let panic_expn = panic_expn.unwrap_or(PanicExpn::Empty);
+ Some((args, panic_expn))
+}
+
+fn find_assert_within_debug_assert<'a>(
+ cx: &LateContext<'_>,
+ expr: &'a Expr<'a>,
+ expn: ExpnId,
+ assert_name: Symbol,
+) -> Option<(&'a Expr<'a>, ExpnId)> {
+ let mut found = None;
+ expr_visitor_no_bodies(|e| {
+ if found.is_some() || !e.span.from_expansion() {
+ return false;
+ }
+ let e_expn = e.span.ctxt().outer_expn();
+ if e_expn == expn {
+ return true;
+ }
+ if e_expn.expn_data().macro_def_id.map(|id| cx.tcx.item_name(id)) == Some(assert_name) {
+ found = Some((e, e_expn));
+ }
+ false
+ })
+ .visit_expr(expr);
+ found
+}
+
+fn is_assert_arg(cx: &LateContext<'_>, expr: &Expr<'_>, assert_expn: ExpnId) -> bool {
+ if !expr.span.from_expansion() {
+ return true;
+ }
+ let result = macro_backtrace(expr.span).try_for_each(|macro_call| {
+ if macro_call.expn == assert_expn {
+ ControlFlow::Break(false)
+ } else {
+ match cx.tcx.item_name(macro_call.def_id) {
+ // `cfg!(debug_assertions)` in `debug_assert!`
+ sym::cfg => ControlFlow::CONTINUE,
+ // assert!(other_macro!(..))
+ _ => ControlFlow::Break(true),
+ }
+ }
+ });
+ match result {
+ ControlFlow::Break(is_assert_arg) => is_assert_arg,
+ ControlFlow::Continue(()) => true,
+ }
+}
+
+/// A parsed `format_args!` expansion
+#[derive(Debug)]
+pub struct FormatArgsExpn<'tcx> {
+ /// Span of the first argument, the format string
+ pub format_string_span: Span,
+ /// The format string split by formatted args like `{..}`
+ pub format_string_parts: Vec<Symbol>,
+ /// Values passed after the format string
+ pub value_args: Vec<&'tcx Expr<'tcx>>,
+ /// Each element is a `value_args` index and a formatting trait (e.g. `sym::Debug`)
+ pub formatters: Vec<(usize, Symbol)>,
+ /// List of `fmt::v1::Argument { .. }` expressions. If this is empty,
+ /// then `formatters` represents the format args (`{..}`).
+ /// If this is non-empty, it represents the format args, and the `position`
+ /// parameters within the struct expressions are indexes of `formatters`.
+ pub specs: Vec<&'tcx Expr<'tcx>>,
+}
+
+impl<'tcx> FormatArgsExpn<'tcx> {
+ /// Parses an expanded `format_args!` or `format_args_nl!` invocation
+ pub fn parse(cx: &LateContext<'_>, expr: &'tcx Expr<'tcx>) -> Option<Self> {
+ macro_backtrace(expr.span).find(|macro_call| {
+ matches!(
+ cx.tcx.item_name(macro_call.def_id),
+ sym::const_format_args | sym::format_args | sym::format_args_nl
+ )
+ })?;
+ let mut format_string_span: Option<Span> = None;
+ let mut format_string_parts: Vec<Symbol> = Vec::new();
+ let mut value_args: Vec<&Expr<'_>> = Vec::new();
+ let mut formatters: Vec<(usize, Symbol)> = Vec::new();
+ let mut specs: Vec<&Expr<'_>> = Vec::new();
+ expr_visitor_no_bodies(|e| {
+ // if we're still inside of the macro definition...
+ if e.span.ctxt() == expr.span.ctxt() {
+ // ArgumentV1::new_<format_trait>(<value>)
+ if_chain! {
+ if let ExprKind::Call(callee, [val]) = e.kind;
+ if let ExprKind::Path(QPath::TypeRelative(ty, seg)) = callee.kind;
+ if let hir::TyKind::Path(QPath::Resolved(_, path)) = ty.kind;
+ if path.segments.last().unwrap().ident.name == sym::ArgumentV1;
+ if seg.ident.name.as_str().starts_with("new_");
+ then {
+ let val_idx = if_chain! {
+ if val.span.ctxt() == expr.span.ctxt();
+ if let ExprKind::Field(_, field) = val.kind;
+ if let Ok(idx) = field.name.as_str().parse();
+ then {
+ // tuple index
+ idx
+ } else {
+ // assume the value expression is passed directly
+ formatters.len()
+ }
+ };
+ let fmt_trait = match seg.ident.name.as_str() {
+ "new_display" => "Display",
+ "new_debug" => "Debug",
+ "new_lower_exp" => "LowerExp",
+ "new_upper_exp" => "UpperExp",
+ "new_octal" => "Octal",
+ "new_pointer" => "Pointer",
+ "new_binary" => "Binary",
+ "new_lower_hex" => "LowerHex",
+ "new_upper_hex" => "UpperHex",
+ _ => unreachable!(),
+ };
+ formatters.push((val_idx, Symbol::intern(fmt_trait)));
+ }
+ }
+ if let ExprKind::Struct(QPath::Resolved(_, path), ..) = e.kind {
+ if path.segments.last().unwrap().ident.name == sym::Argument {
+ specs.push(e);
+ }
+ }
+ // walk through the macro expansion
+ return true;
+ }
+ // assume that the first expr with a differing context represents
+ // (and has the span of) the format string
+ if format_string_span.is_none() {
+ format_string_span = Some(e.span);
+ let span = e.span;
+ // walk the expr and collect string literals which are format string parts
+ expr_visitor_no_bodies(|e| {
+ if e.span.ctxt() != span.ctxt() {
+ // defensive check, probably doesn't happen
+ return false;
+ }
+ if let ExprKind::Lit(lit) = &e.kind {
+ if let LitKind::Str(symbol, _s) = lit.node {
+ format_string_parts.push(symbol);
+ }
+ }
+ true
+ })
+ .visit_expr(e);
+ } else {
+ // assume that any further exprs with a differing context are value args
+ value_args.push(e);
+ }
+ // don't walk anything not from the macro expansion (e.a. inputs)
+ false
+ })
+ .visit_expr(expr);
+ Some(FormatArgsExpn {
+ format_string_span: format_string_span?,
+ format_string_parts,
+ value_args,
+ formatters,
+ specs,
+ })
+ }
+
+ /// Finds a nested call to `format_args!` within a `format!`-like macro call
+ pub fn find_nested(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, expn_id: ExpnId) -> Option<Self> {
+ let mut format_args = None;
+ expr_visitor_no_bodies(|e| {
+ if format_args.is_some() {
+ return false;
+ }
+ let e_ctxt = e.span.ctxt();
+ if e_ctxt == expr.span.ctxt() {
+ return true;
+ }
+ if e_ctxt.outer_expn().is_descendant_of(expn_id) {
+ format_args = FormatArgsExpn::parse(cx, e);
+ }
+ false
+ })
+ .visit_expr(expr);
+ format_args
+ }
+
+ /// Returns a vector of `FormatArgsArg`.
+ pub fn args(&self) -> Option<Vec<FormatArgsArg<'tcx>>> {
+ if self.specs.is_empty() {
+ let args = std::iter::zip(&self.value_args, &self.formatters)
+ .map(|(value, &(_, format_trait))| FormatArgsArg {
+ value,
+ format_trait,
+ spec: None,
+ })
+ .collect();
+ return Some(args);
+ }
+ self.specs
+ .iter()
+ .map(|spec| {
+ if_chain! {
+ // struct `core::fmt::rt::v1::Argument`
+ if let ExprKind::Struct(_, fields, _) = spec.kind;
+ if let Some(position_field) = fields.iter().find(|f| f.ident.name == sym::position);
+ if let ExprKind::Lit(lit) = &position_field.expr.kind;
+ if let LitKind::Int(position, _) = lit.node;
+ if let Ok(i) = usize::try_from(position);
+ if let Some(&(j, format_trait)) = self.formatters.get(i);
+ then {
+ Some(FormatArgsArg {
+ value: self.value_args[j],
+ format_trait,
+ spec: Some(spec),
+ })
+ } else {
+ None
+ }
+ }
+ })
+ .collect()
+ }
+
+ /// Source callsite span of all inputs
+ pub fn inputs_span(&self) -> Span {
+ match *self.value_args {
+ [] => self.format_string_span,
+ [.., last] => self
+ .format_string_span
+ .to(hygiene::walk_chain(last.span, self.format_string_span.ctxt())),
+ }
+ }
+}
+
+/// Type representing a `FormatArgsExpn`'s format arguments
+pub struct FormatArgsArg<'tcx> {
+ /// An element of `value_args` according to `position`
+ pub value: &'tcx Expr<'tcx>,
+ /// An element of `args` according to `position`
+ pub format_trait: Symbol,
+ /// An element of `specs`
+ pub spec: Option<&'tcx Expr<'tcx>>,
+}
+
+impl<'tcx> FormatArgsArg<'tcx> {
+ /// Returns true if any formatting parameters are used that would have an effect on strings,
+ /// like `{:+2}` instead of just `{}`.
+ pub fn has_string_formatting(&self) -> bool {
+ self.spec.map_or(false, |spec| {
+ // `!` because these conditions check that `self` is unformatted.
+ !if_chain! {
+ // struct `core::fmt::rt::v1::Argument`
+ if let ExprKind::Struct(_, fields, _) = spec.kind;
+ if let Some(format_field) = fields.iter().find(|f| f.ident.name == sym::format);
+ // struct `core::fmt::rt::v1::FormatSpec`
+ if let ExprKind::Struct(_, subfields, _) = format_field.expr.kind;
+ if subfields.iter().all(|field| match field.ident.name {
+ sym::precision | sym::width => match field.expr.kind {
+ ExprKind::Path(QPath::Resolved(_, path)) => {
+ path.segments.last().unwrap().ident.name == sym::Implied
+ }
+ _ => false,
+ }
+ _ => true,
+ });
+ then { true } else { false }
+ }
+ })
+ }
+}
+
+/// A node with a `HirId` and a `Span`
+pub trait HirNode {
+ fn hir_id(&self) -> HirId;
+ fn span(&self) -> Span;
+}
+
+macro_rules! impl_hir_node {
+ ($($t:ident),*) => {
+ $(impl HirNode for hir::$t<'_> {
+ fn hir_id(&self) -> HirId {
+ self.hir_id
+ }
+ fn span(&self) -> Span {
+ self.span
+ }
+ })*
+ };
+}
+
+impl_hir_node!(Expr, Pat);
+
+impl HirNode for hir::Item<'_> {
+ fn hir_id(&self) -> HirId {
+ self.hir_id()
+ }
+
+ fn span(&self) -> Span {
+ self.span
+ }
+}
diff --git a/src/tools/clippy/clippy_utils/src/msrvs.rs b/src/tools/clippy/clippy_utils/src/msrvs.rs
new file mode 100644
index 000000000..9e238c6f1
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/msrvs.rs
@@ -0,0 +1,39 @@
+use rustc_semver::RustcVersion;
+
+macro_rules! msrv_aliases {
+ ($($major:literal,$minor:literal,$patch:literal {
+ $($name:ident),* $(,)?
+ })*) => {
+ $($(
+ pub const $name: RustcVersion = RustcVersion::new($major, $minor, $patch);
+ )*)*
+ };
+}
+
+// names may refer to stabilized feature flags or library items
+msrv_aliases! {
+ 1,62,0 { BOOL_THEN_SOME }
+ 1,53,0 { OR_PATTERNS, MANUAL_BITS, BTREE_MAP_RETAIN, BTREE_SET_RETAIN }
+ 1,52,0 { STR_SPLIT_ONCE, REM_EUCLID_CONST }
+ 1,51,0 { BORROW_AS_PTR, UNSIGNED_ABS }
+ 1,50,0 { BOOL_THEN }
+ 1,47,0 { TAU }
+ 1,46,0 { CONST_IF_MATCH }
+ 1,45,0 { STR_STRIP_PREFIX }
+ 1,43,0 { LOG2_10, LOG10_2 }
+ 1,42,0 { MATCHES_MACRO, SLICE_PATTERNS, PTR_SLICE_RAW_PARTS }
+ 1,41,0 { RE_REBALANCING_COHERENCE, RESULT_MAP_OR_ELSE }
+ 1,40,0 { MEM_TAKE, NON_EXHAUSTIVE, OPTION_AS_DEREF }
+ 1,38,0 { POINTER_CAST, REM_EUCLID }
+ 1,37,0 { TYPE_ALIAS_ENUM_VARIANTS }
+ 1,36,0 { ITERATOR_COPIED }
+ 1,35,0 { OPTION_COPIED, RANGE_CONTAINS }
+ 1,34,0 { TRY_FROM }
+ 1,30,0 { ITERATOR_FIND_MAP, TOOL_ATTRIBUTES }
+ 1,28,0 { FROM_BOOL }
+ 1,26,0 { RANGE_INCLUSIVE, STRING_RETAIN }
+ 1,18,0 { HASH_MAP_RETAIN, HASH_SET_RETAIN }
+ 1,17,0 { FIELD_INIT_SHORTHAND, STATIC_IN_CONST, EXPECT_ERR }
+ 1,16,0 { STR_REPEAT }
+ 1,24,0 { IS_ASCII_DIGIT }
+}
diff --git a/src/tools/clippy/clippy_utils/src/numeric_literal.rs b/src/tools/clippy/clippy_utils/src/numeric_literal.rs
new file mode 100644
index 000000000..3fb5415ce
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/numeric_literal.rs
@@ -0,0 +1,248 @@
+use rustc_ast::ast::{Lit, LitFloatType, LitIntType, LitKind};
+use std::iter;
+
+#[derive(Debug, PartialEq, Eq, Copy, Clone)]
+pub enum Radix {
+ Binary,
+ Octal,
+ Decimal,
+ Hexadecimal,
+}
+
+impl Radix {
+ /// Returns a reasonable digit group size for this radix.
+ #[must_use]
+ fn suggest_grouping(self) -> usize {
+ match self {
+ Self::Binary | Self::Hexadecimal => 4,
+ Self::Octal | Self::Decimal => 3,
+ }
+ }
+}
+
+/// A helper method to format numeric literals with digit grouping.
+/// `lit` must be a valid numeric literal without suffix.
+pub fn format(lit: &str, type_suffix: Option<&str>, float: bool) -> String {
+ NumericLiteral::new(lit, type_suffix, float).format()
+}
+
+#[derive(Debug)]
+pub struct NumericLiteral<'a> {
+ /// Which radix the literal was represented in.
+ pub radix: Radix,
+ /// The radix prefix, if present.
+ pub prefix: Option<&'a str>,
+
+ /// The integer part of the number.
+ pub integer: &'a str,
+ /// The fraction part of the number.
+ pub fraction: Option<&'a str>,
+ /// The exponent separator (b'e' or b'E') including preceding underscore if present
+ /// and the exponent part.
+ pub exponent: Option<(&'a str, &'a str)>,
+
+ /// The type suffix, including preceding underscore if present.
+ pub suffix: Option<&'a str>,
+}
+
+impl<'a> NumericLiteral<'a> {
+ pub fn from_lit(src: &'a str, lit: &Lit) -> Option<NumericLiteral<'a>> {
+ NumericLiteral::from_lit_kind(src, &lit.kind)
+ }
+
+ pub fn from_lit_kind(src: &'a str, lit_kind: &LitKind) -> Option<NumericLiteral<'a>> {
+ let unsigned_src = src.strip_prefix('-').map_or(src, |s| s);
+ if lit_kind.is_numeric()
+ && unsigned_src
+ .trim_start()
+ .chars()
+ .next()
+ .map_or(false, |c| c.is_ascii_digit())
+ {
+ let (unsuffixed, suffix) = split_suffix(src, lit_kind);
+ let float = matches!(lit_kind, LitKind::Float(..));
+ Some(NumericLiteral::new(unsuffixed, suffix, float))
+ } else {
+ None
+ }
+ }
+
+ #[must_use]
+ pub fn new(lit: &'a str, suffix: Option<&'a str>, float: bool) -> Self {
+ // Determine delimiter for radix prefix, if present, and radix.
+ let radix = if lit.starts_with("0x") {
+ Radix::Hexadecimal
+ } else if lit.starts_with("0b") {
+ Radix::Binary
+ } else if lit.starts_with("0o") {
+ Radix::Octal
+ } else {
+ Radix::Decimal
+ };
+
+ // Grab part of the literal after prefix, if present.
+ let (prefix, mut sans_prefix) = if radix == Radix::Decimal {
+ (None, lit)
+ } else {
+ let (p, s) = lit.split_at(2);
+ (Some(p), s)
+ };
+
+ if suffix.is_some() && sans_prefix.ends_with('_') {
+ // The '_' before the suffix isn't part of the digits
+ sans_prefix = &sans_prefix[..sans_prefix.len() - 1];
+ }
+
+ let (integer, fraction, exponent) = Self::split_digit_parts(sans_prefix, float);
+
+ Self {
+ radix,
+ prefix,
+ integer,
+ fraction,
+ exponent,
+ suffix,
+ }
+ }
+
+ pub fn is_decimal(&self) -> bool {
+ self.radix == Radix::Decimal
+ }
+
+ pub fn split_digit_parts(digits: &str, float: bool) -> (&str, Option<&str>, Option<(&str, &str)>) {
+ let mut integer = digits;
+ let mut fraction = None;
+ let mut exponent = None;
+
+ if float {
+ for (i, c) in digits.char_indices() {
+ match c {
+ '.' => {
+ integer = &digits[..i];
+ fraction = Some(&digits[i + 1..]);
+ },
+ 'e' | 'E' => {
+ let exp_start = if digits[..i].ends_with('_') { i - 1 } else { i };
+
+ if integer.len() > exp_start {
+ integer = &digits[..exp_start];
+ } else {
+ fraction = Some(&digits[integer.len() + 1..exp_start]);
+ };
+ exponent = Some((&digits[exp_start..=i], &digits[i + 1..]));
+ break;
+ },
+ _ => {},
+ }
+ }
+ }
+
+ (integer, fraction, exponent)
+ }
+
+ /// Returns literal formatted in a sensible way.
+ pub fn format(&self) -> String {
+ let mut output = String::new();
+
+ if let Some(prefix) = self.prefix {
+ output.push_str(prefix);
+ }
+
+ let group_size = self.radix.suggest_grouping();
+
+ Self::group_digits(
+ &mut output,
+ self.integer,
+ group_size,
+ true,
+ self.radix == Radix::Hexadecimal,
+ );
+
+ if let Some(fraction) = self.fraction {
+ output.push('.');
+ Self::group_digits(&mut output, fraction, group_size, false, false);
+ }
+
+ if let Some((separator, exponent)) = self.exponent {
+ if exponent != "0" {
+ output.push_str(separator);
+ Self::group_digits(&mut output, exponent, group_size, true, false);
+ }
+ }
+
+ if let Some(suffix) = self.suffix {
+ if output.ends_with('.') {
+ output.push('0');
+ }
+ output.push('_');
+ output.push_str(suffix);
+ }
+
+ output
+ }
+
+ pub fn group_digits(output: &mut String, input: &str, group_size: usize, partial_group_first: bool, pad: bool) {
+ debug_assert!(group_size > 0);
+
+ let mut digits = input.chars().filter(|&c| c != '_');
+
+ // The exponent may have a sign, output it early, otherwise it will be
+ // treated as a digit
+ if digits.clone().next() == Some('-') {
+ let _ = digits.next();
+ output.push('-');
+ }
+
+ let first_group_size;
+
+ if partial_group_first {
+ first_group_size = (digits.clone().count() - 1) % group_size + 1;
+ if pad {
+ for _ in 0..group_size - first_group_size {
+ output.push('0');
+ }
+ }
+ } else {
+ first_group_size = group_size;
+ }
+
+ for _ in 0..first_group_size {
+ if let Some(digit) = digits.next() {
+ output.push(digit);
+ }
+ }
+
+ for (c, i) in iter::zip(digits, (0..group_size).cycle()) {
+ if i == 0 {
+ output.push('_');
+ }
+ output.push(c);
+ }
+ }
+}
+
+fn split_suffix<'a>(src: &'a str, lit_kind: &LitKind) -> (&'a str, Option<&'a str>) {
+ debug_assert!(lit_kind.is_numeric());
+ lit_suffix_length(lit_kind).map_or((src, None), |suffix_length| {
+ let (unsuffixed, suffix) = src.split_at(src.len() - suffix_length);
+ (unsuffixed, Some(suffix))
+ })
+}
+
+fn lit_suffix_length(lit_kind: &LitKind) -> Option<usize> {
+ debug_assert!(lit_kind.is_numeric());
+ let suffix = match lit_kind {
+ LitKind::Int(_, int_lit_kind) => match int_lit_kind {
+ LitIntType::Signed(int_ty) => Some(int_ty.name_str()),
+ LitIntType::Unsigned(uint_ty) => Some(uint_ty.name_str()),
+ LitIntType::Unsuffixed => None,
+ },
+ LitKind::Float(_, float_lit_kind) => match float_lit_kind {
+ LitFloatType::Suffixed(float_ty) => Some(float_ty.name_str()),
+ LitFloatType::Unsuffixed => None,
+ },
+ _ => None,
+ };
+
+ suffix.map(str::len)
+}
diff --git a/src/tools/clippy/clippy_utils/src/paths.rs b/src/tools/clippy/clippy_utils/src/paths.rs
new file mode 100644
index 000000000..05429d05d
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/paths.rs
@@ -0,0 +1,196 @@
+//! This module contains paths to types and functions Clippy needs to know
+//! about.
+//!
+//! Whenever possible, please consider diagnostic items over hardcoded paths.
+//! See <https://github.com/rust-lang/rust-clippy/issues/5393> for more information.
+
+#[cfg(feature = "internal")]
+pub const APPLICABILITY: [&str; 2] = ["rustc_lint_defs", "Applicability"];
+#[cfg(feature = "internal")]
+pub const APPLICABILITY_VALUES: [[&str; 3]; 4] = [
+ ["rustc_lint_defs", "Applicability", "Unspecified"],
+ ["rustc_lint_defs", "Applicability", "HasPlaceholders"],
+ ["rustc_lint_defs", "Applicability", "MaybeIncorrect"],
+ ["rustc_lint_defs", "Applicability", "MachineApplicable"],
+];
+#[cfg(feature = "internal")]
+pub const DIAGNOSTIC_BUILDER: [&str; 3] = ["rustc_errors", "diagnostic_builder", "DiagnosticBuilder"];
+pub const ARC_PTR_EQ: [&str; 4] = ["alloc", "sync", "Arc", "ptr_eq"];
+pub const ASMUT_TRAIT: [&str; 3] = ["core", "convert", "AsMut"];
+pub const ASREF_TRAIT: [&str; 3] = ["core", "convert", "AsRef"];
+pub const BTREEMAP_CONTAINS_KEY: [&str; 6] = ["alloc", "collections", "btree", "map", "BTreeMap", "contains_key"];
+pub const BTREEMAP_ENTRY: [&str; 6] = ["alloc", "collections", "btree", "map", "entry", "Entry"];
+pub const BTREEMAP_INSERT: [&str; 6] = ["alloc", "collections", "btree", "map", "BTreeMap", "insert"];
+pub const BTREESET_ITER: [&str; 6] = ["alloc", "collections", "btree", "set", "BTreeSet", "iter"];
+pub const CLONE_TRAIT_METHOD: [&str; 4] = ["core", "clone", "Clone", "clone"];
+pub const COW: [&str; 3] = ["alloc", "borrow", "Cow"];
+pub const CORE_ITER_COLLECT: [&str; 6] = ["core", "iter", "traits", "iterator", "Iterator", "collect"];
+pub const CORE_ITER_CLONED: [&str; 6] = ["core", "iter", "traits", "iterator", "Iterator", "cloned"];
+pub const CORE_ITER_COPIED: [&str; 6] = ["core", "iter", "traits", "iterator", "Iterator", "copied"];
+pub const CORE_ITER_FILTER: [&str; 6] = ["core", "iter", "traits", "iterator", "Iterator", "filter"];
+pub const CORE_ITER_INTO_ITER: [&str; 6] = ["core", "iter", "traits", "collect", "IntoIterator", "into_iter"];
+pub const CSTRING_AS_C_STR: [&str; 5] = ["alloc", "ffi", "c_str", "CString", "as_c_str"];
+pub const DEFAULT_TRAIT_METHOD: [&str; 4] = ["core", "default", "Default", "default"];
+pub const DEREF_MUT_TRAIT_METHOD: [&str; 5] = ["core", "ops", "deref", "DerefMut", "deref_mut"];
+/// Preferably use the diagnostic item `sym::deref_method` where possible
+pub const DEREF_TRAIT_METHOD: [&str; 5] = ["core", "ops", "deref", "Deref", "deref"];
+pub const DIR_BUILDER: [&str; 3] = ["std", "fs", "DirBuilder"];
+pub const DISPLAY_TRAIT: [&str; 3] = ["core", "fmt", "Display"];
+#[cfg(feature = "internal")]
+pub const EARLY_CONTEXT: [&str; 2] = ["rustc_lint", "EarlyContext"];
+#[cfg(feature = "internal")]
+pub const EARLY_LINT_PASS: [&str; 3] = ["rustc_lint", "passes", "EarlyLintPass"];
+pub const EXIT: [&str; 3] = ["std", "process", "exit"];
+pub const F32_EPSILON: [&str; 4] = ["core", "f32", "<impl f32>", "EPSILON"];
+pub const F64_EPSILON: [&str; 4] = ["core", "f64", "<impl f64>", "EPSILON"];
+pub const FILE: [&str; 3] = ["std", "fs", "File"];
+pub const FILE_TYPE: [&str; 3] = ["std", "fs", "FileType"];
+pub const FROM_FROM: [&str; 4] = ["core", "convert", "From", "from"];
+pub const FROM_ITERATOR_METHOD: [&str; 6] = ["core", "iter", "traits", "collect", "FromIterator", "from_iter"];
+pub const FROM_STR_METHOD: [&str; 5] = ["core", "str", "traits", "FromStr", "from_str"];
+pub const FUTURE_FROM_GENERATOR: [&str; 3] = ["core", "future", "from_generator"];
+#[expect(clippy::invalid_paths)] // internal lints do not know about all external crates
+pub const FUTURES_IO_ASYNCREADEXT: [&str; 3] = ["futures_util", "io", "AsyncReadExt"];
+#[expect(clippy::invalid_paths)] // internal lints do not know about all external crates
+pub const FUTURES_IO_ASYNCWRITEEXT: [&str; 3] = ["futures_util", "io", "AsyncWriteExt"];
+pub const HASHMAP_CONTAINS_KEY: [&str; 6] = ["std", "collections", "hash", "map", "HashMap", "contains_key"];
+pub const HASHMAP_ENTRY: [&str; 5] = ["std", "collections", "hash", "map", "Entry"];
+pub const HASHMAP_INSERT: [&str; 6] = ["std", "collections", "hash", "map", "HashMap", "insert"];
+pub const HASHSET_ITER: [&str; 6] = ["std", "collections", "hash", "set", "HashSet", "iter"];
+#[cfg(feature = "internal")]
+pub const IDENT: [&str; 3] = ["rustc_span", "symbol", "Ident"];
+#[cfg(feature = "internal")]
+pub const IDENT_AS_STR: [&str; 4] = ["rustc_span", "symbol", "Ident", "as_str"];
+pub const INDEX: [&str; 3] = ["core", "ops", "Index"];
+pub const INDEX_MUT: [&str; 3] = ["core", "ops", "IndexMut"];
+pub const INSERT_STR: [&str; 4] = ["alloc", "string", "String", "insert_str"];
+pub const IO_READ: [&str; 3] = ["std", "io", "Read"];
+pub const IO_WRITE: [&str; 3] = ["std", "io", "Write"];
+pub const IPADDR_V4: [&str; 5] = ["std", "net", "ip", "IpAddr", "V4"];
+pub const IPADDR_V6: [&str; 5] = ["std", "net", "ip", "IpAddr", "V6"];
+pub const ITER_COUNT: [&str; 6] = ["core", "iter", "traits", "iterator", "Iterator", "count"];
+pub const ITER_EMPTY: [&str; 5] = ["core", "iter", "sources", "empty", "Empty"];
+pub const ITER_REPEAT: [&str; 5] = ["core", "iter", "sources", "repeat", "repeat"];
+#[expect(clippy::invalid_paths)] // internal lints do not know about all external crates
+pub const ITERTOOLS_NEXT_TUPLE: [&str; 3] = ["itertools", "Itertools", "next_tuple"];
+#[cfg(feature = "internal")]
+pub const KW_MODULE: [&str; 3] = ["rustc_span", "symbol", "kw"];
+#[cfg(feature = "internal")]
+pub const LATE_CONTEXT: [&str; 2] = ["rustc_lint", "LateContext"];
+#[cfg(feature = "internal")]
+pub const LATE_LINT_PASS: [&str; 3] = ["rustc_lint", "passes", "LateLintPass"];
+#[cfg(feature = "internal")]
+pub const LINT: [&str; 2] = ["rustc_lint_defs", "Lint"];
+pub const MEM_SWAP: [&str; 3] = ["core", "mem", "swap"];
+pub const MUTEX_GUARD: [&str; 4] = ["std", "sync", "mutex", "MutexGuard"];
+pub const OPEN_OPTIONS: [&str; 3] = ["std", "fs", "OpenOptions"];
+/// Preferably use the diagnostic item `sym::Option` where possible
+pub const OPTION: [&str; 3] = ["core", "option", "Option"];
+pub const OPTION_NONE: [&str; 4] = ["core", "option", "Option", "None"];
+pub const OPTION_SOME: [&str; 4] = ["core", "option", "Option", "Some"];
+pub const ORD: [&str; 3] = ["core", "cmp", "Ord"];
+pub const OS_STRING_AS_OS_STR: [&str; 5] = ["std", "ffi", "os_str", "OsString", "as_os_str"];
+pub const OS_STR_TO_OS_STRING: [&str; 5] = ["std", "ffi", "os_str", "OsStr", "to_os_string"];
+pub const PARKING_LOT_MUTEX_GUARD: [&str; 3] = ["lock_api", "mutex", "MutexGuard"];
+pub const PARKING_LOT_RWLOCK_READ_GUARD: [&str; 3] = ["lock_api", "rwlock", "RwLockReadGuard"];
+pub const PARKING_LOT_RWLOCK_WRITE_GUARD: [&str; 3] = ["lock_api", "rwlock", "RwLockWriteGuard"];
+pub const PATH_BUF_AS_PATH: [&str; 4] = ["std", "path", "PathBuf", "as_path"];
+pub const PATH_TO_PATH_BUF: [&str; 4] = ["std", "path", "Path", "to_path_buf"];
+pub const PERMISSIONS: [&str; 3] = ["std", "fs", "Permissions"];
+#[cfg_attr(not(unix), allow(clippy::invalid_paths))]
+pub const PERMISSIONS_FROM_MODE: [&str; 6] = ["std", "os", "unix", "fs", "PermissionsExt", "from_mode"];
+pub const POLL: [&str; 4] = ["core", "task", "poll", "Poll"];
+pub const POLL_PENDING: [&str; 5] = ["core", "task", "poll", "Poll", "Pending"];
+pub const POLL_READY: [&str; 5] = ["core", "task", "poll", "Poll", "Ready"];
+pub const PTR_COPY: [&str; 3] = ["core", "intrinsics", "copy"];
+pub const PTR_COPY_NONOVERLAPPING: [&str; 3] = ["core", "intrinsics", "copy_nonoverlapping"];
+pub const PTR_EQ: [&str; 3] = ["core", "ptr", "eq"];
+pub const PTR_SLICE_FROM_RAW_PARTS: [&str; 3] = ["core", "ptr", "slice_from_raw_parts"];
+pub const PTR_SLICE_FROM_RAW_PARTS_MUT: [&str; 3] = ["core", "ptr", "slice_from_raw_parts_mut"];
+pub const PTR_SWAP_NONOVERLAPPING: [&str; 3] = ["core", "ptr", "swap_nonoverlapping"];
+pub const PTR_READ: [&str; 3] = ["core", "ptr", "read"];
+pub const PTR_READ_UNALIGNED: [&str; 3] = ["core", "ptr", "read_unaligned"];
+pub const PTR_READ_VOLATILE: [&str; 3] = ["core", "ptr", "read_volatile"];
+pub const PTR_REPLACE: [&str; 3] = ["core", "ptr", "replace"];
+pub const PTR_SWAP: [&str; 3] = ["core", "ptr", "swap"];
+pub const PTR_UNALIGNED_VOLATILE_LOAD: [&str; 3] = ["core", "intrinsics", "unaligned_volatile_load"];
+pub const PTR_UNALIGNED_VOLATILE_STORE: [&str; 3] = ["core", "intrinsics", "unaligned_volatile_store"];
+pub const PTR_WRITE: [&str; 3] = ["core", "ptr", "write"];
+pub const PTR_WRITE_BYTES: [&str; 3] = ["core", "intrinsics", "write_bytes"];
+pub const PTR_WRITE_UNALIGNED: [&str; 3] = ["core", "ptr", "write_unaligned"];
+pub const PTR_WRITE_VOLATILE: [&str; 3] = ["core", "ptr", "write_volatile"];
+pub const PUSH_STR: [&str; 4] = ["alloc", "string", "String", "push_str"];
+pub const RANGE_ARGUMENT_TRAIT: [&str; 3] = ["core", "ops", "RangeBounds"];
+pub const RC_PTR_EQ: [&str; 4] = ["alloc", "rc", "Rc", "ptr_eq"];
+pub const REFCELL_REF: [&str; 3] = ["core", "cell", "Ref"];
+pub const REFCELL_REFMUT: [&str; 3] = ["core", "cell", "RefMut"];
+#[expect(clippy::invalid_paths)] // internal lints do not know about all external crates
+pub const REGEX_BUILDER_NEW: [&str; 5] = ["regex", "re_builder", "unicode", "RegexBuilder", "new"];
+#[expect(clippy::invalid_paths)] // internal lints do not know about all external crates
+pub const REGEX_BYTES_BUILDER_NEW: [&str; 5] = ["regex", "re_builder", "bytes", "RegexBuilder", "new"];
+#[expect(clippy::invalid_paths)] // internal lints do not know about all external crates
+pub const REGEX_BYTES_NEW: [&str; 4] = ["regex", "re_bytes", "Regex", "new"];
+#[expect(clippy::invalid_paths)] // internal lints do not know about all external crates
+pub const REGEX_BYTES_SET_NEW: [&str; 5] = ["regex", "re_set", "bytes", "RegexSet", "new"];
+#[expect(clippy::invalid_paths)] // internal lints do not know about all external crates
+pub const REGEX_NEW: [&str; 4] = ["regex", "re_unicode", "Regex", "new"];
+#[expect(clippy::invalid_paths)] // internal lints do not know about all external crates
+pub const REGEX_SET_NEW: [&str; 5] = ["regex", "re_set", "unicode", "RegexSet", "new"];
+/// Preferably use the diagnostic item `sym::Result` where possible
+pub const RESULT: [&str; 3] = ["core", "result", "Result"];
+pub const RESULT_ERR: [&str; 4] = ["core", "result", "Result", "Err"];
+pub const RESULT_OK: [&str; 4] = ["core", "result", "Result", "Ok"];
+#[cfg(feature = "internal")]
+pub const RUSTC_VERSION: [&str; 2] = ["rustc_semver", "RustcVersion"];
+pub const RWLOCK_READ_GUARD: [&str; 4] = ["std", "sync", "rwlock", "RwLockReadGuard"];
+pub const RWLOCK_WRITE_GUARD: [&str; 4] = ["std", "sync", "rwlock", "RwLockWriteGuard"];
+pub const SERDE_DESERIALIZE: [&str; 3] = ["serde", "de", "Deserialize"];
+pub const SERDE_DE_VISITOR: [&str; 3] = ["serde", "de", "Visitor"];
+pub const SLICE_FROM_RAW_PARTS: [&str; 4] = ["core", "slice", "raw", "from_raw_parts"];
+pub const SLICE_FROM_RAW_PARTS_MUT: [&str; 4] = ["core", "slice", "raw", "from_raw_parts_mut"];
+pub const SLICE_GET: [&str; 4] = ["core", "slice", "<impl [T]>", "get"];
+pub const SLICE_INTO_VEC: [&str; 4] = ["alloc", "slice", "<impl [T]>", "into_vec"];
+pub const SLICE_INTO: [&str; 4] = ["core", "slice", "<impl [T]>", "iter"];
+pub const SLICE_ITER: [&str; 4] = ["core", "slice", "iter", "Iter"];
+pub const STDERR: [&str; 4] = ["std", "io", "stdio", "stderr"];
+pub const STDOUT: [&str; 4] = ["std", "io", "stdio", "stdout"];
+pub const CONVERT_IDENTITY: [&str; 3] = ["core", "convert", "identity"];
+pub const STD_FS_CREATE_DIR: [&str; 3] = ["std", "fs", "create_dir"];
+pub const STRING_AS_MUT_STR: [&str; 4] = ["alloc", "string", "String", "as_mut_str"];
+pub const STRING_AS_STR: [&str; 4] = ["alloc", "string", "String", "as_str"];
+pub const STRING_NEW: [&str; 4] = ["alloc", "string", "String", "new"];
+pub const STR_BYTES: [&str; 4] = ["core", "str", "<impl str>", "bytes"];
+pub const STR_CHARS: [&str; 4] = ["core", "str", "<impl str>", "chars"];
+pub const STR_ENDS_WITH: [&str; 4] = ["core", "str", "<impl str>", "ends_with"];
+pub const STR_FROM_UTF8: [&str; 4] = ["core", "str", "converts", "from_utf8"];
+pub const STR_FROM_UTF8_UNCHECKED: [&str; 4] = ["core", "str", "converts", "from_utf8_unchecked"];
+pub const STR_LEN: [&str; 4] = ["core", "str", "<impl str>", "len"];
+pub const STR_STARTS_WITH: [&str; 4] = ["core", "str", "<impl str>", "starts_with"];
+#[cfg(feature = "internal")]
+pub const SYMBOL: [&str; 3] = ["rustc_span", "symbol", "Symbol"];
+#[cfg(feature = "internal")]
+pub const SYMBOL_AS_STR: [&str; 4] = ["rustc_span", "symbol", "Symbol", "as_str"];
+#[cfg(feature = "internal")]
+pub const SYMBOL_INTERN: [&str; 4] = ["rustc_span", "symbol", "Symbol", "intern"];
+#[cfg(feature = "internal")]
+pub const SYMBOL_TO_IDENT_STRING: [&str; 4] = ["rustc_span", "symbol", "Symbol", "to_ident_string"];
+#[cfg(feature = "internal")]
+pub const SYM_MODULE: [&str; 3] = ["rustc_span", "symbol", "sym"];
+#[cfg(feature = "internal")]
+pub const SYNTAX_CONTEXT: [&str; 3] = ["rustc_span", "hygiene", "SyntaxContext"];
+pub const TO_OWNED_METHOD: [&str; 4] = ["alloc", "borrow", "ToOwned", "to_owned"];
+pub const TO_STRING_METHOD: [&str; 4] = ["alloc", "string", "ToString", "to_string"];
+#[expect(clippy::invalid_paths)] // internal lints do not know about all external crates
+pub const TOKIO_IO_ASYNCREADEXT: [&str; 5] = ["tokio", "io", "util", "async_read_ext", "AsyncReadExt"];
+#[expect(clippy::invalid_paths)] // internal lints do not know about all external crates
+pub const TOKIO_IO_ASYNCWRITEEXT: [&str; 5] = ["tokio", "io", "util", "async_write_ext", "AsyncWriteExt"];
+pub const TRY_FROM: [&str; 4] = ["core", "convert", "TryFrom", "try_from"];
+pub const VEC_AS_MUT_SLICE: [&str; 4] = ["alloc", "vec", "Vec", "as_mut_slice"];
+pub const VEC_AS_SLICE: [&str; 4] = ["alloc", "vec", "Vec", "as_slice"];
+pub const VEC_DEQUE_ITER: [&str; 5] = ["alloc", "collections", "vec_deque", "VecDeque", "iter"];
+pub const VEC_FROM_ELEM: [&str; 3] = ["alloc", "vec", "from_elem"];
+pub const VEC_NEW: [&str; 4] = ["alloc", "vec", "Vec", "new"];
+pub const VEC_RESIZE: [&str; 4] = ["alloc", "vec", "Vec", "resize"];
+pub const WEAK_ARC: [&str; 3] = ["alloc", "sync", "Weak"];
+pub const WEAK_RC: [&str; 3] = ["alloc", "rc", "Weak"];
+pub const PTR_NON_NULL: [&str; 4] = ["core", "ptr", "non_null", "NonNull"];
diff --git a/src/tools/clippy/clippy_utils/src/ptr.rs b/src/tools/clippy/clippy_utils/src/ptr.rs
new file mode 100644
index 000000000..649b7b994
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/ptr.rs
@@ -0,0 +1,57 @@
+use crate::source::snippet;
+use crate::visitors::expr_visitor_no_bodies;
+use crate::{path_to_local_id, strip_pat_refs};
+use rustc_hir::intravisit::Visitor;
+use rustc_hir::{Body, BodyId, ExprKind, HirId, PatKind};
+use rustc_lint::LateContext;
+use rustc_span::Span;
+use std::borrow::Cow;
+
+pub fn get_spans(
+ cx: &LateContext<'_>,
+ opt_body_id: Option<BodyId>,
+ idx: usize,
+ replacements: &[(&'static str, &'static str)],
+) -> Option<Vec<(Span, Cow<'static, str>)>> {
+ if let Some(body) = opt_body_id.map(|id| cx.tcx.hir().body(id)) {
+ if let PatKind::Binding(_, binding_id, _, _) = strip_pat_refs(body.params[idx].pat).kind {
+ extract_clone_suggestions(cx, binding_id, replacements, body)
+ } else {
+ Some(vec![])
+ }
+ } else {
+ Some(vec![])
+ }
+}
+
+fn extract_clone_suggestions<'tcx>(
+ cx: &LateContext<'tcx>,
+ id: HirId,
+ replace: &[(&'static str, &'static str)],
+ body: &'tcx Body<'_>,
+) -> Option<Vec<(Span, Cow<'static, str>)>> {
+ let mut abort = false;
+ let mut spans = Vec::new();
+ expr_visitor_no_bodies(|expr| {
+ if abort {
+ return false;
+ }
+ if let ExprKind::MethodCall(seg, [recv], _) = expr.kind {
+ if path_to_local_id(recv, id) {
+ if seg.ident.name.as_str() == "capacity" {
+ abort = true;
+ return false;
+ }
+ for &(fn_name, suffix) in replace {
+ if seg.ident.name.as_str() == fn_name {
+ spans.push((expr.span, snippet(cx, recv.span, "_") + suffix));
+ return false;
+ }
+ }
+ }
+ }
+ !abort
+ })
+ .visit_body(body);
+ if abort { None } else { Some(spans) }
+}
diff --git a/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs b/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs
new file mode 100644
index 000000000..3bf75bcbe
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs
@@ -0,0 +1,371 @@
+// This code used to be a part of `rustc` but moved to Clippy as a result of
+// https://github.com/rust-lang/rust/issues/76618. Because of that, it contains unused code and some
+// of terminologies might not be relevant in the context of Clippy. Note that its behavior might
+// differ from the time of `rustc` even if the name stays the same.
+
+use rustc_hir as hir;
+use rustc_hir::def_id::DefId;
+use rustc_middle::mir::{
+ Body, CastKind, NullOp, Operand, Place, ProjectionElem, Rvalue, Statement, StatementKind, Terminator,
+ TerminatorKind,
+};
+use rustc_middle::ty::subst::GenericArgKind;
+use rustc_middle::ty::{self, adjustment::PointerCast, Ty, TyCtxt};
+use rustc_semver::RustcVersion;
+use rustc_span::symbol::sym;
+use rustc_span::Span;
+use std::borrow::Cow;
+
+type McfResult = Result<(), (Span, Cow<'static, str>)>;
+
+pub fn is_min_const_fn<'a, 'tcx>(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, msrv: Option<RustcVersion>) -> McfResult {
+ let def_id = body.source.def_id();
+ let mut current = def_id;
+ loop {
+ let predicates = tcx.predicates_of(current);
+ for (predicate, _) in predicates.predicates {
+ match predicate.kind().skip_binder() {
+ ty::PredicateKind::RegionOutlives(_)
+ | ty::PredicateKind::TypeOutlives(_)
+ | ty::PredicateKind::WellFormed(_)
+ | ty::PredicateKind::Projection(_)
+ | ty::PredicateKind::ConstEvaluatable(..)
+ | ty::PredicateKind::ConstEquate(..)
+ | ty::PredicateKind::Trait(..)
+ | ty::PredicateKind::TypeWellFormedFromEnv(..) => continue,
+ ty::PredicateKind::ObjectSafe(_) => panic!("object safe predicate on function: {:#?}", predicate),
+ ty::PredicateKind::ClosureKind(..) => panic!("closure kind predicate on function: {:#?}", predicate),
+ ty::PredicateKind::Subtype(_) => panic!("subtype predicate on function: {:#?}", predicate),
+ ty::PredicateKind::Coerce(_) => panic!("coerce predicate on function: {:#?}", predicate),
+ }
+ }
+ match predicates.parent {
+ Some(parent) => current = parent,
+ None => break,
+ }
+ }
+
+ for local in &body.local_decls {
+ check_ty(tcx, local.ty, local.source_info.span)?;
+ }
+ // impl trait is gone in MIR, so check the return type manually
+ check_ty(
+ tcx,
+ tcx.fn_sig(def_id).output().skip_binder(),
+ body.local_decls.iter().next().unwrap().source_info.span,
+ )?;
+
+ for bb in body.basic_blocks() {
+ check_terminator(tcx, body, bb.terminator(), msrv)?;
+ for stmt in &bb.statements {
+ check_statement(tcx, body, def_id, stmt)?;
+ }
+ }
+ Ok(())
+}
+
+fn check_ty<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, span: Span) -> McfResult {
+ for arg in ty.walk() {
+ let ty = match arg.unpack() {
+ GenericArgKind::Type(ty) => ty,
+
+ // No constraints on lifetimes or constants, except potentially
+ // constants' types, but `walk` will get to them as well.
+ GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => continue,
+ };
+
+ match ty.kind() {
+ ty::Ref(_, _, hir::Mutability::Mut) => {
+ return Err((span, "mutable references in const fn are unstable".into()));
+ },
+ ty::Opaque(..) => return Err((span, "`impl Trait` in const fn is unstable".into())),
+ ty::FnPtr(..) => {
+ return Err((span, "function pointers in const fn are unstable".into()));
+ },
+ ty::Dynamic(preds, _) => {
+ for pred in preds.iter() {
+ match pred.skip_binder() {
+ ty::ExistentialPredicate::AutoTrait(_) | ty::ExistentialPredicate::Projection(_) => {
+ return Err((
+ span,
+ "trait bounds other than `Sized` \
+ on const fn parameters are unstable"
+ .into(),
+ ));
+ },
+ ty::ExistentialPredicate::Trait(trait_ref) => {
+ if Some(trait_ref.def_id) != tcx.lang_items().sized_trait() {
+ return Err((
+ span,
+ "trait bounds other than `Sized` \
+ on const fn parameters are unstable"
+ .into(),
+ ));
+ }
+ },
+ }
+ }
+ },
+ _ => {},
+ }
+ }
+ Ok(())
+}
+
+fn check_rvalue<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ body: &Body<'tcx>,
+ def_id: DefId,
+ rvalue: &Rvalue<'tcx>,
+ span: Span,
+) -> McfResult {
+ match rvalue {
+ Rvalue::ThreadLocalRef(_) => Err((span, "cannot access thread local storage in const fn".into())),
+ Rvalue::Len(place) | Rvalue::Discriminant(place) | Rvalue::Ref(_, _, place) | Rvalue::AddressOf(_, place) => {
+ check_place(tcx, *place, span, body)
+ },
+ Rvalue::CopyForDeref(place) => check_place(tcx, *place, span, body),
+ Rvalue::Repeat(operand, _)
+ | Rvalue::Use(operand)
+ | Rvalue::Cast(
+ CastKind::PointerFromExposedAddress
+ | CastKind::Misc
+ | CastKind::Pointer(PointerCast::MutToConstPointer | PointerCast::ArrayToPointer),
+ operand,
+ _,
+ ) => check_operand(tcx, operand, span, body),
+ Rvalue::Cast(
+ CastKind::Pointer(
+ PointerCast::UnsafeFnPointer | PointerCast::ClosureFnPointer(_) | PointerCast::ReifyFnPointer,
+ ),
+ _,
+ _,
+ ) => Err((span, "function pointer casts are not allowed in const fn".into())),
+ Rvalue::Cast(CastKind::Pointer(PointerCast::Unsize), op, cast_ty) => {
+ let pointee_ty = if let Some(deref_ty) = cast_ty.builtin_deref(true) {
+ deref_ty.ty
+ } else {
+ // We cannot allow this for now.
+ return Err((span, "unsizing casts are only allowed for references right now".into()));
+ };
+ let unsized_ty = tcx.struct_tail_erasing_lifetimes(pointee_ty, tcx.param_env(def_id));
+ if let ty::Slice(_) | ty::Str = unsized_ty.kind() {
+ check_operand(tcx, op, span, body)?;
+ // Casting/coercing things to slices is fine.
+ Ok(())
+ } else {
+ // We just can't allow trait objects until we have figured out trait method calls.
+ Err((span, "unsizing casts are not allowed in const fn".into()))
+ }
+ },
+ Rvalue::Cast(CastKind::PointerExposeAddress, _, _) => {
+ Err((span, "casting pointers to ints is unstable in const fn".into()))
+ },
+ // binops are fine on integers
+ Rvalue::BinaryOp(_, box (lhs, rhs)) | Rvalue::CheckedBinaryOp(_, box (lhs, rhs)) => {
+ check_operand(tcx, lhs, span, body)?;
+ check_operand(tcx, rhs, span, body)?;
+ let ty = lhs.ty(body, tcx);
+ if ty.is_integral() || ty.is_bool() || ty.is_char() {
+ Ok(())
+ } else {
+ Err((
+ span,
+ "only int, `bool` and `char` operations are stable in const fn".into(),
+ ))
+ }
+ },
+ Rvalue::NullaryOp(NullOp::SizeOf | NullOp::AlignOf, _) | Rvalue::ShallowInitBox(_, _) => Ok(()),
+ Rvalue::UnaryOp(_, operand) => {
+ let ty = operand.ty(body, tcx);
+ if ty.is_integral() || ty.is_bool() {
+ check_operand(tcx, operand, span, body)
+ } else {
+ Err((span, "only int and `bool` operations are stable in const fn".into()))
+ }
+ },
+ Rvalue::Aggregate(_, operands) => {
+ for operand in operands {
+ check_operand(tcx, operand, span, body)?;
+ }
+ Ok(())
+ },
+ }
+}
+
+fn check_statement<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ body: &Body<'tcx>,
+ def_id: DefId,
+ statement: &Statement<'tcx>,
+) -> McfResult {
+ let span = statement.source_info.span;
+ match &statement.kind {
+ StatementKind::Assign(box (place, rval)) => {
+ check_place(tcx, *place, span, body)?;
+ check_rvalue(tcx, body, def_id, rval, span)
+ },
+
+ StatementKind::FakeRead(box (_, place)) => check_place(tcx, *place, span, body),
+ // just an assignment
+ StatementKind::SetDiscriminant { place, .. } | StatementKind::Deinit(place) => {
+ check_place(tcx, **place, span, body)
+ },
+
+ StatementKind::CopyNonOverlapping(box rustc_middle::mir::CopyNonOverlapping { dst, src, count }) => {
+ check_operand(tcx, dst, span, body)?;
+ check_operand(tcx, src, span, body)?;
+ check_operand(tcx, count, span, body)
+ },
+ // These are all NOPs
+ StatementKind::StorageLive(_)
+ | StatementKind::StorageDead(_)
+ | StatementKind::Retag { .. }
+ | StatementKind::AscribeUserType(..)
+ | StatementKind::Coverage(..)
+ | StatementKind::Nop => Ok(()),
+ }
+}
+
+fn check_operand<'tcx>(tcx: TyCtxt<'tcx>, operand: &Operand<'tcx>, span: Span, body: &Body<'tcx>) -> McfResult {
+ match operand {
+ Operand::Move(place) | Operand::Copy(place) => check_place(tcx, *place, span, body),
+ Operand::Constant(c) => match c.check_static_ptr(tcx) {
+ Some(_) => Err((span, "cannot access `static` items in const fn".into())),
+ None => Ok(()),
+ },
+ }
+}
+
+fn check_place<'tcx>(tcx: TyCtxt<'tcx>, place: Place<'tcx>, span: Span, body: &Body<'tcx>) -> McfResult {
+ let mut cursor = place.projection.as_ref();
+ while let [ref proj_base @ .., elem] = *cursor {
+ cursor = proj_base;
+ match elem {
+ ProjectionElem::Field(..) => {
+ let base_ty = Place::ty_from(place.local, proj_base, body, tcx).ty;
+ if let Some(def) = base_ty.ty_adt_def() {
+ // No union field accesses in `const fn`
+ if def.is_union() {
+ return Err((span, "accessing union fields is unstable".into()));
+ }
+ }
+ },
+ ProjectionElem::ConstantIndex { .. }
+ | ProjectionElem::Downcast(..)
+ | ProjectionElem::Subslice { .. }
+ | ProjectionElem::Deref
+ | ProjectionElem::Index(_) => {},
+ }
+ }
+
+ Ok(())
+}
+
+fn check_terminator<'a, 'tcx>(
+ tcx: TyCtxt<'tcx>,
+ body: &'a Body<'tcx>,
+ terminator: &Terminator<'tcx>,
+ msrv: Option<RustcVersion>,
+) -> McfResult {
+ let span = terminator.source_info.span;
+ match &terminator.kind {
+ TerminatorKind::FalseEdge { .. }
+ | TerminatorKind::FalseUnwind { .. }
+ | TerminatorKind::Goto { .. }
+ | TerminatorKind::Return
+ | TerminatorKind::Resume
+ | TerminatorKind::Unreachable => Ok(()),
+
+ TerminatorKind::Drop { place, .. } => check_place(tcx, *place, span, body),
+ TerminatorKind::DropAndReplace { place, value, .. } => {
+ check_place(tcx, *place, span, body)?;
+ check_operand(tcx, value, span, body)
+ },
+
+ TerminatorKind::SwitchInt {
+ discr,
+ switch_ty: _,
+ targets: _,
+ } => check_operand(tcx, discr, span, body),
+
+ TerminatorKind::Abort => Err((span, "abort is not stable in const fn".into())),
+ TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => {
+ Err((span, "const fn generators are unstable".into()))
+ },
+
+ TerminatorKind::Call {
+ func,
+ args,
+ from_hir_call: _,
+ destination: _,
+ target: _,
+ cleanup: _,
+ fn_span: _,
+ } => {
+ let fn_ty = func.ty(body, tcx);
+ if let ty::FnDef(fn_def_id, _) = *fn_ty.kind() {
+ if !is_const_fn(tcx, fn_def_id, msrv) {
+ return Err((
+ span,
+ format!(
+ "can only call other `const fn` within a `const fn`, \
+ but `{:?}` is not stable as `const fn`",
+ func,
+ )
+ .into(),
+ ));
+ }
+
+ // HACK: This is to "unstabilize" the `transmute` intrinsic
+ // within const fns. `transmute` is allowed in all other const contexts.
+ // This won't really scale to more intrinsics or functions. Let's allow const
+ // transmutes in const fn before we add more hacks to this.
+ if tcx.is_intrinsic(fn_def_id) && tcx.item_name(fn_def_id) == sym::transmute {
+ return Err((
+ span,
+ "can only call `transmute` from const items, not `const fn`".into(),
+ ));
+ }
+
+ check_operand(tcx, func, span, body)?;
+
+ for arg in args {
+ check_operand(tcx, arg, span, body)?;
+ }
+ Ok(())
+ } else {
+ Err((span, "can only call other const fns within const fn".into()))
+ }
+ },
+
+ TerminatorKind::Assert {
+ cond,
+ expected: _,
+ msg: _,
+ target: _,
+ cleanup: _,
+ } => check_operand(tcx, cond, span, body),
+
+ TerminatorKind::InlineAsm { .. } => Err((span, "cannot use inline assembly in const fn".into())),
+ }
+}
+
+fn is_const_fn(tcx: TyCtxt<'_>, def_id: DefId, msrv: Option<RustcVersion>) -> bool {
+ tcx.is_const_fn(def_id)
+ && tcx.lookup_const_stability(def_id).map_or(true, |const_stab| {
+ if let rustc_attr::StabilityLevel::Stable { since, .. } = const_stab.level {
+ // Checking MSRV is manually necessary because `rustc` has no such concept. This entire
+ // function could be removed if `rustc` provided a MSRV-aware version of `is_const_fn`.
+ // as a part of an unimplemented MSRV check https://github.com/rust-lang/rust/issues/65262.
+ crate::meets_msrv(
+ msrv,
+ RustcVersion::parse(since.as_str())
+ .expect("`rustc_attr::StabilityLevel::Stable::since` is ill-formatted"),
+ )
+ } else {
+ // Unstable const fn with the feature enabled.
+ msrv.is_none()
+ }
+ })
+}
diff --git a/src/tools/clippy/clippy_utils/src/source.rs b/src/tools/clippy/clippy_utils/src/source.rs
new file mode 100644
index 000000000..1197fe914
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/source.rs
@@ -0,0 +1,508 @@
+//! Utils for extracting, inspecting or transforming source code
+
+#![allow(clippy::module_name_repetitions)]
+
+use crate::line_span;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LintContext};
+use rustc_span::hygiene;
+use rustc_span::source_map::SourceMap;
+use rustc_span::{BytePos, Pos, Span, SpanData, SyntaxContext};
+use std::borrow::Cow;
+
+/// Checks if the span starts with the given text. This will return false if the span crosses
+/// multiple files or if source is not available.
+///
+/// This is used to check for proc macros giving unhelpful spans to things.
+pub fn span_starts_with<T: LintContext>(cx: &T, span: Span, text: &str) -> bool {
+ fn helper(sm: &SourceMap, span: Span, text: &str) -> bool {
+ let pos = sm.lookup_byte_offset(span.lo());
+ let Some(ref src) = pos.sf.src else {
+ return false;
+ };
+ let end = span.hi() - pos.sf.start_pos;
+ src.get(pos.pos.0 as usize..end.0 as usize)
+ // Expression spans can include wrapping parenthesis. Remove them first.
+ .map_or(false, |s| s.trim_start_matches('(').starts_with(text))
+ }
+ helper(cx.sess().source_map(), span, text)
+}
+
+/// Like `snippet_block`, but add braces if the expr is not an `ExprKind::Block`.
+/// Also takes an `Option<String>` which can be put inside the braces.
+pub fn expr_block<'a, T: LintContext>(
+ cx: &T,
+ expr: &Expr<'_>,
+ option: Option<String>,
+ default: &'a str,
+ indent_relative_to: Option<Span>,
+) -> Cow<'a, str> {
+ let code = snippet_block(cx, expr.span, default, indent_relative_to);
+ let string = option.unwrap_or_default();
+ if expr.span.from_expansion() {
+ Cow::Owned(format!("{{ {} }}", snippet_with_macro_callsite(cx, expr.span, default)))
+ } else if let ExprKind::Block(_, _) = expr.kind {
+ Cow::Owned(format!("{}{}", code, string))
+ } else if string.is_empty() {
+ Cow::Owned(format!("{{ {} }}", code))
+ } else {
+ Cow::Owned(format!("{{\n{};\n{}\n}}", code, string))
+ }
+}
+
+/// Returns a new Span that extends the original Span to the first non-whitespace char of the first
+/// line.
+///
+/// ```rust,ignore
+/// let x = ();
+/// // ^^
+/// // will be converted to
+/// let x = ();
+/// // ^^^^^^^^^^
+/// ```
+pub fn first_line_of_span<T: LintContext>(cx: &T, span: Span) -> Span {
+ first_char_in_first_line(cx, span).map_or(span, |first_char_pos| span.with_lo(first_char_pos))
+}
+
+fn first_char_in_first_line<T: LintContext>(cx: &T, span: Span) -> Option<BytePos> {
+ let line_span = line_span(cx, span);
+ snippet_opt(cx, line_span).and_then(|snip| {
+ snip.find(|c: char| !c.is_whitespace())
+ .map(|pos| line_span.lo() + BytePos::from_usize(pos))
+ })
+}
+
+/// Returns the indentation of the line of a span
+///
+/// ```rust,ignore
+/// let x = ();
+/// // ^^ -- will return 0
+/// let x = ();
+/// // ^^ -- will return 4
+/// ```
+pub fn indent_of<T: LintContext>(cx: &T, span: Span) -> Option<usize> {
+ snippet_opt(cx, line_span(cx, span)).and_then(|snip| snip.find(|c: char| !c.is_whitespace()))
+}
+
+/// Gets a snippet of the indentation of the line of a span
+pub fn snippet_indent<T: LintContext>(cx: &T, span: Span) -> Option<String> {
+ snippet_opt(cx, line_span(cx, span)).map(|mut s| {
+ let len = s.len() - s.trim_start().len();
+ s.truncate(len);
+ s
+ })
+}
+
+// If the snippet is empty, it's an attribute that was inserted during macro
+// expansion and we want to ignore those, because they could come from external
+// sources that the user has no control over.
+// For some reason these attributes don't have any expansion info on them, so
+// we have to check it this way until there is a better way.
+pub fn is_present_in_source<T: LintContext>(cx: &T, span: Span) -> bool {
+ if let Some(snippet) = snippet_opt(cx, span) {
+ if snippet.is_empty() {
+ return false;
+ }
+ }
+ true
+}
+
+/// Returns the position just before rarrow
+///
+/// ```rust,ignore
+/// fn into(self) -> () {}
+/// ^
+/// // in case of unformatted code
+/// fn into2(self)-> () {}
+/// ^
+/// fn into3(self) -> () {}
+/// ^
+/// ```
+pub fn position_before_rarrow(s: &str) -> Option<usize> {
+ s.rfind("->").map(|rpos| {
+ let mut rpos = rpos;
+ let chars: Vec<char> = s.chars().collect();
+ while rpos > 1 {
+ if let Some(c) = chars.get(rpos - 1) {
+ if c.is_whitespace() {
+ rpos -= 1;
+ continue;
+ }
+ }
+ break;
+ }
+ rpos
+ })
+}
+
+/// Reindent a multiline string with possibility of ignoring the first line.
+#[expect(clippy::needless_pass_by_value)]
+pub fn reindent_multiline(s: Cow<'_, str>, ignore_first: bool, indent: Option<usize>) -> Cow<'_, str> {
+ let s_space = reindent_multiline_inner(&s, ignore_first, indent, ' ');
+ let s_tab = reindent_multiline_inner(&s_space, ignore_first, indent, '\t');
+ reindent_multiline_inner(&s_tab, ignore_first, indent, ' ').into()
+}
+
+fn reindent_multiline_inner(s: &str, ignore_first: bool, indent: Option<usize>, ch: char) -> String {
+ let x = s
+ .lines()
+ .skip(usize::from(ignore_first))
+ .filter_map(|l| {
+ if l.is_empty() {
+ None
+ } else {
+ // ignore empty lines
+ Some(l.char_indices().find(|&(_, x)| x != ch).unwrap_or((l.len(), ch)).0)
+ }
+ })
+ .min()
+ .unwrap_or(0);
+ let indent = indent.unwrap_or(0);
+ s.lines()
+ .enumerate()
+ .map(|(i, l)| {
+ if (ignore_first && i == 0) || l.is_empty() {
+ l.to_owned()
+ } else if x > indent {
+ l.split_at(x - indent).1.to_owned()
+ } else {
+ " ".repeat(indent - x) + l
+ }
+ })
+ .collect::<Vec<String>>()
+ .join("\n")
+}
+
+/// Converts a span to a code snippet if available, otherwise returns the default.
+///
+/// This is useful if you want to provide suggestions for your lint or more generally, if you want
+/// to convert a given `Span` to a `str`. To create suggestions consider using
+/// [`snippet_with_applicability`] to ensure that the applicability stays correct.
+///
+/// # Example
+/// ```rust,ignore
+/// // Given two spans one for `value` and one for the `init` expression.
+/// let value = Vec::new();
+/// // ^^^^^ ^^^^^^^^^^
+/// // span1 span2
+///
+/// // The snipped call would return the corresponding code snippet
+/// snippet(cx, span1, "..") // -> "value"
+/// snippet(cx, span2, "..") // -> "Vec::new()"
+/// ```
+pub fn snippet<'a, T: LintContext>(cx: &T, span: Span, default: &'a str) -> Cow<'a, str> {
+ snippet_opt(cx, span).map_or_else(|| Cow::Borrowed(default), From::from)
+}
+
+/// Same as [`snippet`], but it adapts the applicability level by following rules:
+///
+/// - Applicability level `Unspecified` will never be changed.
+/// - If the span is inside a macro, change the applicability level to `MaybeIncorrect`.
+/// - If the default value is used and the applicability level is `MachineApplicable`, change it to
+/// `HasPlaceholders`
+pub fn snippet_with_applicability<'a, T: LintContext>(
+ cx: &T,
+ span: Span,
+ default: &'a str,
+ applicability: &mut Applicability,
+) -> Cow<'a, str> {
+ if *applicability != Applicability::Unspecified && span.from_expansion() {
+ *applicability = Applicability::MaybeIncorrect;
+ }
+ snippet_opt(cx, span).map_or_else(
+ || {
+ if *applicability == Applicability::MachineApplicable {
+ *applicability = Applicability::HasPlaceholders;
+ }
+ Cow::Borrowed(default)
+ },
+ From::from,
+ )
+}
+
+/// Same as `snippet`, but should only be used when it's clear that the input span is
+/// not a macro argument.
+pub fn snippet_with_macro_callsite<'a, T: LintContext>(cx: &T, span: Span, default: &'a str) -> Cow<'a, str> {
+ snippet(cx, span.source_callsite(), default)
+}
+
+/// Converts a span to a code snippet. Returns `None` if not available.
+pub fn snippet_opt<T: LintContext>(cx: &T, span: Span) -> Option<String> {
+ cx.sess().source_map().span_to_snippet(span).ok()
+}
+
+/// Converts a span (from a block) to a code snippet if available, otherwise use default.
+///
+/// This trims the code of indentation, except for the first line. Use it for blocks or block-like
+/// things which need to be printed as such.
+///
+/// The `indent_relative_to` arg can be used, to provide a span, where the indentation of the
+/// resulting snippet of the given span.
+///
+/// # Example
+///
+/// ```rust,ignore
+/// snippet_block(cx, block.span, "..", None)
+/// // where, `block` is the block of the if expr
+/// if x {
+/// y;
+/// }
+/// // will return the snippet
+/// {
+/// y;
+/// }
+/// ```
+///
+/// ```rust,ignore
+/// snippet_block(cx, block.span, "..", Some(if_expr.span))
+/// // where, `block` is the block of the if expr
+/// if x {
+/// y;
+/// }
+/// // will return the snippet
+/// {
+/// y;
+/// } // aligned with `if`
+/// ```
+/// Note that the first line of the snippet always has 0 indentation.
+pub fn snippet_block<'a, T: LintContext>(
+ cx: &T,
+ span: Span,
+ default: &'a str,
+ indent_relative_to: Option<Span>,
+) -> Cow<'a, str> {
+ let snip = snippet(cx, span, default);
+ let indent = indent_relative_to.and_then(|s| indent_of(cx, s));
+ reindent_multiline(snip, true, indent)
+}
+
+/// Same as `snippet_block`, but adapts the applicability level by the rules of
+/// `snippet_with_applicability`.
+pub fn snippet_block_with_applicability<'a, T: LintContext>(
+ cx: &T,
+ span: Span,
+ default: &'a str,
+ indent_relative_to: Option<Span>,
+ applicability: &mut Applicability,
+) -> Cow<'a, str> {
+ let snip = snippet_with_applicability(cx, span, default, applicability);
+ let indent = indent_relative_to.and_then(|s| indent_of(cx, s));
+ reindent_multiline(snip, true, indent)
+}
+
+/// Same as `snippet_with_applicability`, but first walks the span up to the given context. This
+/// will result in the macro call, rather then the expansion, if the span is from a child context.
+/// If the span is not from a child context, it will be used directly instead.
+///
+/// e.g. Given the expression `&vec![]`, getting a snippet from the span for `vec![]` as a HIR node
+/// would result in `box []`. If given the context of the address of expression, this function will
+/// correctly get a snippet of `vec![]`.
+///
+/// This will also return whether or not the snippet is a macro call.
+pub fn snippet_with_context<'a>(
+ cx: &LateContext<'_>,
+ span: Span,
+ outer: SyntaxContext,
+ default: &'a str,
+ applicability: &mut Applicability,
+) -> (Cow<'a, str>, bool) {
+ let (span, is_macro_call) = walk_span_to_context(span, outer).map_or_else(
+ || {
+ // The span is from a macro argument, and the outer context is the macro using the argument
+ if *applicability != Applicability::Unspecified {
+ *applicability = Applicability::MaybeIncorrect;
+ }
+ // TODO: get the argument span.
+ (span, false)
+ },
+ |outer_span| (outer_span, span.ctxt() != outer),
+ );
+
+ (
+ snippet_with_applicability(cx, span, default, applicability),
+ is_macro_call,
+ )
+}
+
+/// Walks the span up to the target context, thereby returning the macro call site if the span is
+/// inside a macro expansion, or the original span if it is not. Note this will return `None` in the
+/// case of the span being in a macro expansion, but the target context is from expanding a macro
+/// argument.
+///
+/// Given the following
+///
+/// ```rust,ignore
+/// macro_rules! m { ($e:expr) => { f($e) }; }
+/// g(m!(0))
+/// ```
+///
+/// If called with a span of the call to `f` and a context of the call to `g` this will return a
+/// span containing `m!(0)`. However, if called with a span of the literal `0` this will give a span
+/// containing `0` as the context is the same as the outer context.
+///
+/// This will traverse through multiple macro calls. Given the following:
+///
+/// ```rust,ignore
+/// macro_rules! m { ($e:expr) => { n!($e, 0) }; }
+/// macro_rules! n { ($e:expr, $f:expr) => { f($e, $f) }; }
+/// g(m!(0))
+/// ```
+///
+/// If called with a span of the call to `f` and a context of the call to `g` this will return a
+/// span containing `m!(0)`.
+pub fn walk_span_to_context(span: Span, outer: SyntaxContext) -> Option<Span> {
+ let outer_span = hygiene::walk_chain(span, outer);
+ (outer_span.ctxt() == outer).then_some(outer_span)
+}
+
+/// Removes block comments from the given `Vec` of lines.
+///
+/// # Examples
+///
+/// ```rust,ignore
+/// without_block_comments(vec!["/*", "foo", "*/"]);
+/// // => vec![]
+///
+/// without_block_comments(vec!["bar", "/*", "foo", "*/"]);
+/// // => vec!["bar"]
+/// ```
+pub fn without_block_comments(lines: Vec<&str>) -> Vec<&str> {
+ let mut without = vec![];
+
+ let mut nest_level = 0;
+
+ for line in lines {
+ if line.contains("/*") {
+ nest_level += 1;
+ continue;
+ } else if line.contains("*/") {
+ nest_level -= 1;
+ continue;
+ }
+
+ if nest_level == 0 {
+ without.push(line);
+ }
+ }
+
+ without
+}
+
+/// Trims the whitespace from the start and the end of the span.
+pub fn trim_span(sm: &SourceMap, span: Span) -> Span {
+ let data = span.data();
+ let sf: &_ = &sm.lookup_source_file(data.lo);
+ let Some(src) = sf.src.as_deref() else {
+ return span;
+ };
+ let Some(snip) = &src.get((data.lo - sf.start_pos).to_usize()..(data.hi - sf.start_pos).to_usize()) else {
+ return span;
+ };
+ let trim_start = snip.len() - snip.trim_start().len();
+ let trim_end = snip.len() - snip.trim_end().len();
+ SpanData {
+ lo: data.lo + BytePos::from_usize(trim_start),
+ hi: data.hi - BytePos::from_usize(trim_end),
+ ctxt: data.ctxt,
+ parent: data.parent,
+ }
+ .span()
+}
+
+#[cfg(test)]
+mod test {
+ use super::{reindent_multiline, without_block_comments};
+
+ #[test]
+ fn test_reindent_multiline_single_line() {
+ assert_eq!("", reindent_multiline("".into(), false, None));
+ assert_eq!("...", reindent_multiline("...".into(), false, None));
+ assert_eq!("...", reindent_multiline(" ...".into(), false, None));
+ assert_eq!("...", reindent_multiline("\t...".into(), false, None));
+ assert_eq!("...", reindent_multiline("\t\t...".into(), false, None));
+ }
+
+ #[test]
+ #[rustfmt::skip]
+ fn test_reindent_multiline_block() {
+ assert_eq!("\
+ if x {
+ y
+ } else {
+ z
+ }", reindent_multiline(" if x {
+ y
+ } else {
+ z
+ }".into(), false, None));
+ assert_eq!("\
+ if x {
+ \ty
+ } else {
+ \tz
+ }", reindent_multiline(" if x {
+ \ty
+ } else {
+ \tz
+ }".into(), false, None));
+ }
+
+ #[test]
+ #[rustfmt::skip]
+ fn test_reindent_multiline_empty_line() {
+ assert_eq!("\
+ if x {
+ y
+
+ } else {
+ z
+ }", reindent_multiline(" if x {
+ y
+
+ } else {
+ z
+ }".into(), false, None));
+ }
+
+ #[test]
+ #[rustfmt::skip]
+ fn test_reindent_multiline_lines_deeper() {
+ assert_eq!("\
+ if x {
+ y
+ } else {
+ z
+ }", reindent_multiline("\
+ if x {
+ y
+ } else {
+ z
+ }".into(), true, Some(8)));
+ }
+
+ #[test]
+ fn test_without_block_comments_lines_without_block_comments() {
+ let result = without_block_comments(vec!["/*", "", "*/"]);
+ println!("result: {:?}", result);
+ assert!(result.is_empty());
+
+ let result = without_block_comments(vec!["", "/*", "", "*/", "#[crate_type = \"lib\"]", "/*", "", "*/", ""]);
+ assert_eq!(result, vec!["", "#[crate_type = \"lib\"]", ""]);
+
+ let result = without_block_comments(vec!["/* rust", "", "*/"]);
+ assert!(result.is_empty());
+
+ let result = without_block_comments(vec!["/* one-line comment */"]);
+ assert!(result.is_empty());
+
+ let result = without_block_comments(vec!["/* nested", "/* multi-line", "comment", "*/", "test", "*/"]);
+ assert!(result.is_empty());
+
+ let result = without_block_comments(vec!["/* nested /* inline /* comment */ test */ */"]);
+ assert!(result.is_empty());
+
+ let result = without_block_comments(vec!["foo", "bar", "baz"]);
+ assert_eq!(result, vec!["foo", "bar", "baz"]);
+ }
+}
diff --git a/src/tools/clippy/clippy_utils/src/str_utils.rs b/src/tools/clippy/clippy_utils/src/str_utils.rs
new file mode 100644
index 000000000..03a9d3c25
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/str_utils.rs
@@ -0,0 +1,325 @@
+/// Dealing with sting indices can be hard, this struct ensures that both the
+/// character and byte index are provided for correct indexing.
+#[derive(Debug, Default, PartialEq, Eq)]
+pub struct StrIndex {
+ pub char_index: usize,
+ pub byte_index: usize,
+}
+
+impl StrIndex {
+ pub fn new(char_index: usize, byte_index: usize) -> Self {
+ Self { char_index, byte_index }
+ }
+}
+
+/// Returns the index of the character after the first camel-case component of `s`.
+///
+/// ```
+/// # use clippy_utils::str_utils::{camel_case_until, StrIndex};
+/// assert_eq!(camel_case_until("AbcDef"), StrIndex::new(6, 6));
+/// assert_eq!(camel_case_until("ABCD"), StrIndex::new(0, 0));
+/// assert_eq!(camel_case_until("AbcDD"), StrIndex::new(3, 3));
+/// assert_eq!(camel_case_until("Abc\u{f6}\u{f6}DD"), StrIndex::new(5, 7));
+/// ```
+#[must_use]
+pub fn camel_case_until(s: &str) -> StrIndex {
+ let mut iter = s.char_indices().enumerate();
+ if let Some((_char_index, (_, first))) = iter.next() {
+ if !first.is_uppercase() {
+ return StrIndex::new(0, 0);
+ }
+ } else {
+ return StrIndex::new(0, 0);
+ }
+ let mut up = true;
+ let mut last_index = StrIndex::new(0, 0);
+ for (char_index, (byte_index, c)) in iter {
+ if up {
+ if c.is_lowercase() {
+ up = false;
+ } else {
+ return last_index;
+ }
+ } else if c.is_uppercase() {
+ up = true;
+ last_index.byte_index = byte_index;
+ last_index.char_index = char_index;
+ } else if !c.is_lowercase() {
+ return StrIndex::new(char_index, byte_index);
+ }
+ }
+
+ if up {
+ last_index
+ } else {
+ StrIndex::new(s.chars().count(), s.len())
+ }
+}
+
+/// Returns index of the first camel-case component of `s`.
+///
+/// ```
+/// # use clippy_utils::str_utils::{camel_case_start, StrIndex};
+/// assert_eq!(camel_case_start("AbcDef"), StrIndex::new(0, 0));
+/// assert_eq!(camel_case_start("abcDef"), StrIndex::new(3, 3));
+/// assert_eq!(camel_case_start("ABCD"), StrIndex::new(4, 4));
+/// assert_eq!(camel_case_start("abcd"), StrIndex::new(4, 4));
+/// assert_eq!(camel_case_start("\u{f6}\u{f6}cd"), StrIndex::new(4, 6));
+/// ```
+#[must_use]
+pub fn camel_case_start(s: &str) -> StrIndex {
+ camel_case_start_from_idx(s, 0)
+}
+
+/// Returns `StrIndex` of the last camel-case component of `s[idx..]`.
+///
+/// ```
+/// # use clippy_utils::str_utils::{camel_case_start_from_idx, StrIndex};
+/// assert_eq!(camel_case_start_from_idx("AbcDef", 0), StrIndex::new(0, 0));
+/// assert_eq!(camel_case_start_from_idx("AbcDef", 1), StrIndex::new(3, 3));
+/// assert_eq!(camel_case_start_from_idx("AbcDefGhi", 0), StrIndex::new(0, 0));
+/// assert_eq!(camel_case_start_from_idx("AbcDefGhi", 1), StrIndex::new(3, 3));
+/// assert_eq!(camel_case_start_from_idx("Abcdefg", 1), StrIndex::new(7, 7));
+/// ```
+pub fn camel_case_start_from_idx(s: &str, start_idx: usize) -> StrIndex {
+ let char_count = s.chars().count();
+ let range = 0..char_count;
+ let mut iter = range.rev().zip(s.char_indices().rev());
+ if let Some((_, (_, first))) = iter.next() {
+ if !first.is_lowercase() {
+ return StrIndex::new(char_count, s.len());
+ }
+ } else {
+ return StrIndex::new(char_count, s.len());
+ }
+
+ let mut down = true;
+ let mut last_index = StrIndex::new(char_count, s.len());
+ for (char_index, (byte_index, c)) in iter {
+ if byte_index < start_idx {
+ break;
+ }
+ if down {
+ if c.is_uppercase() {
+ down = false;
+ last_index.byte_index = byte_index;
+ last_index.char_index = char_index;
+ } else if !c.is_lowercase() {
+ return last_index;
+ }
+ } else if c.is_lowercase() {
+ down = true;
+ } else if c.is_uppercase() {
+ last_index.byte_index = byte_index;
+ last_index.char_index = char_index;
+ } else {
+ return last_index;
+ }
+ }
+
+ last_index
+}
+
+/// Get the indexes of camel case components of a string `s`
+///
+/// ```
+/// # use clippy_utils::str_utils::{camel_case_indices, StrIndex};
+/// assert_eq!(
+/// camel_case_indices("AbcDef"),
+/// vec![StrIndex::new(0, 0), StrIndex::new(3, 3), StrIndex::new(6, 6)]
+/// );
+/// assert_eq!(
+/// camel_case_indices("abcDef"),
+/// vec![StrIndex::new(3, 3), StrIndex::new(6, 6)]
+/// );
+/// ```
+pub fn camel_case_indices(s: &str) -> Vec<StrIndex> {
+ let mut result = Vec::new();
+ let mut str_idx = camel_case_start(s);
+
+ while str_idx.byte_index < s.len() {
+ let next_idx = str_idx.byte_index + 1;
+ result.push(str_idx);
+ str_idx = camel_case_start_from_idx(s, next_idx);
+ }
+ result.push(str_idx);
+
+ result
+}
+
+/// Split camel case string into a vector of its components
+///
+/// ```
+/// # use clippy_utils::str_utils::{camel_case_split, StrIndex};
+/// assert_eq!(camel_case_split("AbcDef"), vec!["Abc", "Def"]);
+/// ```
+pub fn camel_case_split(s: &str) -> Vec<&str> {
+ let mut offsets = camel_case_indices(s)
+ .iter()
+ .map(|e| e.byte_index)
+ .collect::<Vec<usize>>();
+ if offsets[0] != 0 {
+ offsets.insert(0, 0);
+ }
+
+ offsets.windows(2).map(|w| &s[w[0]..w[1]]).collect()
+}
+
+/// Dealing with sting comparison can be complicated, this struct ensures that both the
+/// character and byte count are provided for correct indexing.
+#[derive(Debug, Default, PartialEq, Eq)]
+pub struct StrCount {
+ pub char_count: usize,
+ pub byte_count: usize,
+}
+
+impl StrCount {
+ pub fn new(char_count: usize, byte_count: usize) -> Self {
+ Self { char_count, byte_count }
+ }
+}
+
+/// Returns the number of chars that match from the start
+///
+/// ```
+/// # use clippy_utils::str_utils::{count_match_start, StrCount};
+/// assert_eq!(count_match_start("hello_mouse", "hello_penguin"), StrCount::new(6, 6));
+/// assert_eq!(count_match_start("hello_clippy", "bye_bugs"), StrCount::new(0, 0));
+/// assert_eq!(count_match_start("hello_world", "hello_world"), StrCount::new(11, 11));
+/// assert_eq!(count_match_start("T\u{f6}ffT\u{f6}ff", "T\u{f6}ff"), StrCount::new(4, 5));
+/// ```
+#[must_use]
+pub fn count_match_start(str1: &str, str2: &str) -> StrCount {
+ // (char_index, char1)
+ let char_count = str1.chars().count();
+ let iter1 = (0..=char_count).zip(str1.chars());
+ // (byte_index, char2)
+ let iter2 = str2.char_indices();
+
+ iter1
+ .zip(iter2)
+ .take_while(|((_, c1), (_, c2))| c1 == c2)
+ .last()
+ .map_or_else(StrCount::default, |((char_index, _), (byte_index, character))| {
+ StrCount::new(char_index + 1, byte_index + character.len_utf8())
+ })
+}
+
+/// Returns the number of chars and bytes that match from the end
+///
+/// ```
+/// # use clippy_utils::str_utils::{count_match_end, StrCount};
+/// assert_eq!(count_match_end("hello_cat", "bye_cat"), StrCount::new(4, 4));
+/// assert_eq!(count_match_end("if_item_thing", "enum_value"), StrCount::new(0, 0));
+/// assert_eq!(count_match_end("Clippy", "Clippy"), StrCount::new(6, 6));
+/// assert_eq!(count_match_end("MyT\u{f6}ff", "YourT\u{f6}ff"), StrCount::new(4, 5));
+/// ```
+#[must_use]
+pub fn count_match_end(str1: &str, str2: &str) -> StrCount {
+ let char_count = str1.chars().count();
+ if char_count == 0 {
+ return StrCount::default();
+ }
+
+ // (char_index, char1)
+ let iter1 = (0..char_count).rev().zip(str1.chars().rev());
+ // (byte_index, char2)
+ let byte_count = str2.len();
+ let iter2 = str2.char_indices().rev();
+
+ iter1
+ .zip(iter2)
+ .take_while(|((_, c1), (_, c2))| c1 == c2)
+ .last()
+ .map_or_else(StrCount::default, |((char_index, _), (byte_index, _))| {
+ StrCount::new(char_count - char_index, byte_count - byte_index)
+ })
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ #[test]
+ fn camel_case_start_full() {
+ assert_eq!(camel_case_start("AbcDef"), StrIndex::new(0, 0));
+ assert_eq!(camel_case_start("Abc"), StrIndex::new(0, 0));
+ assert_eq!(camel_case_start("ABcd"), StrIndex::new(0, 0));
+ assert_eq!(camel_case_start("ABcdEf"), StrIndex::new(0, 0));
+ assert_eq!(camel_case_start("AabABcd"), StrIndex::new(0, 0));
+ }
+
+ #[test]
+ fn camel_case_start_partial() {
+ assert_eq!(camel_case_start("abcDef"), StrIndex::new(3, 3));
+ assert_eq!(camel_case_start("aDbc"), StrIndex::new(1, 1));
+ assert_eq!(camel_case_start("aabABcd"), StrIndex::new(3, 3));
+ assert_eq!(camel_case_start("\u{f6}\u{f6}AabABcd"), StrIndex::new(2, 4));
+ }
+
+ #[test]
+ fn camel_case_start_not() {
+ assert_eq!(camel_case_start("AbcDef_"), StrIndex::new(7, 7));
+ assert_eq!(camel_case_start("AbcDD"), StrIndex::new(5, 5));
+ assert_eq!(camel_case_start("all_small"), StrIndex::new(9, 9));
+ assert_eq!(camel_case_start("\u{f6}_all_small"), StrIndex::new(11, 12));
+ }
+
+ #[test]
+ fn camel_case_start_caps() {
+ assert_eq!(camel_case_start("ABCD"), StrIndex::new(4, 4));
+ }
+
+ #[test]
+ fn camel_case_until_full() {
+ assert_eq!(camel_case_until("AbcDef"), StrIndex::new(6, 6));
+ assert_eq!(camel_case_until("Abc"), StrIndex::new(3, 3));
+ assert_eq!(camel_case_until("Abc\u{f6}\u{f6}\u{f6}"), StrIndex::new(6, 9));
+ }
+
+ #[test]
+ fn camel_case_until_not() {
+ assert_eq!(camel_case_until("abcDef"), StrIndex::new(0, 0));
+ assert_eq!(camel_case_until("aDbc"), StrIndex::new(0, 0));
+ }
+
+ #[test]
+ fn camel_case_until_partial() {
+ assert_eq!(camel_case_until("AbcDef_"), StrIndex::new(6, 6));
+ assert_eq!(camel_case_until("CallTypeC"), StrIndex::new(8, 8));
+ assert_eq!(camel_case_until("AbcDD"), StrIndex::new(3, 3));
+ assert_eq!(camel_case_until("Abc\u{f6}\u{f6}DD"), StrIndex::new(5, 7));
+ }
+
+ #[test]
+ fn until_caps() {
+ assert_eq!(camel_case_until("ABCD"), StrIndex::new(0, 0));
+ }
+
+ #[test]
+ fn camel_case_start_from_idx_full() {
+ assert_eq!(camel_case_start_from_idx("AbcDef", 0), StrIndex::new(0, 0));
+ assert_eq!(camel_case_start_from_idx("AbcDef", 1), StrIndex::new(3, 3));
+ assert_eq!(camel_case_start_from_idx("AbcDef", 4), StrIndex::new(6, 6));
+ assert_eq!(camel_case_start_from_idx("AbcDefGhi", 0), StrIndex::new(0, 0));
+ assert_eq!(camel_case_start_from_idx("AbcDefGhi", 1), StrIndex::new(3, 3));
+ assert_eq!(camel_case_start_from_idx("Abcdefg", 1), StrIndex::new(7, 7));
+ }
+
+ #[test]
+ fn camel_case_indices_full() {
+ assert_eq!(camel_case_indices("Abc\u{f6}\u{f6}DD"), vec![StrIndex::new(7, 9)]);
+ }
+
+ #[test]
+ fn camel_case_split_full() {
+ assert_eq!(camel_case_split("A"), vec!["A"]);
+ assert_eq!(camel_case_split("AbcDef"), vec!["Abc", "Def"]);
+ assert_eq!(camel_case_split("Abc"), vec!["Abc"]);
+ assert_eq!(camel_case_split("abcDef"), vec!["abc", "Def"]);
+ assert_eq!(
+ camel_case_split("\u{f6}\u{f6}AabABcd"),
+ vec!["\u{f6}\u{f6}", "Aab", "A", "Bcd"]
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_utils/src/sugg.rs b/src/tools/clippy/clippy_utils/src/sugg.rs
new file mode 100644
index 000000000..bad291dfc
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/sugg.rs
@@ -0,0 +1,1099 @@
+//! Contains utility functions to generate suggestions.
+#![deny(clippy::missing_docs_in_private_items)]
+
+use crate::source::{snippet, snippet_opt, snippet_with_applicability, snippet_with_macro_callsite};
+use crate::ty::expr_sig;
+use crate::{get_parent_expr_for_hir, higher};
+use rustc_ast::util::parser::AssocOp;
+use rustc_ast::{ast, token};
+use rustc_ast_pretty::pprust::token_kind_to_string;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_hir::{Closure, ExprKind, HirId, MutTy, TyKind};
+use rustc_infer::infer::TyCtxtInferExt;
+use rustc_lint::{EarlyContext, LateContext, LintContext};
+use rustc_middle::hir::place::ProjectionKind;
+use rustc_middle::mir::{FakeReadCause, Mutability};
+use rustc_middle::ty;
+use rustc_span::source_map::{BytePos, CharPos, Pos, Span, SyntaxContext};
+use rustc_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId};
+use std::borrow::Cow;
+use std::fmt::{Display, Write as _};
+use std::ops::{Add, Neg, Not, Sub};
+
+/// A helper type to build suggestion correctly handling parentheses.
+#[derive(Clone, PartialEq)]
+pub enum Sugg<'a> {
+ /// An expression that never needs parentheses such as `1337` or `[0; 42]`.
+ NonParen(Cow<'a, str>),
+ /// An expression that does not fit in other variants.
+ MaybeParen(Cow<'a, str>),
+ /// A binary operator expression, including `as`-casts and explicit type
+ /// coercion.
+ BinOp(AssocOp, Cow<'a, str>, Cow<'a, str>),
+}
+
+/// Literal constant `0`, for convenience.
+pub const ZERO: Sugg<'static> = Sugg::NonParen(Cow::Borrowed("0"));
+/// Literal constant `1`, for convenience.
+pub const ONE: Sugg<'static> = Sugg::NonParen(Cow::Borrowed("1"));
+/// a constant represents an empty string, for convenience.
+pub const EMPTY: Sugg<'static> = Sugg::NonParen(Cow::Borrowed(""));
+
+impl Display for Sugg<'_> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
+ match *self {
+ Sugg::NonParen(ref s) | Sugg::MaybeParen(ref s) => s.fmt(f),
+ Sugg::BinOp(op, ref lhs, ref rhs) => binop_to_string(op, lhs, rhs).fmt(f),
+ }
+ }
+}
+
+#[expect(clippy::wrong_self_convention)] // ok, because of the function `as_ty` method
+impl<'a> Sugg<'a> {
+ /// Prepare a suggestion from an expression.
+ pub fn hir_opt(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> Option<Self> {
+ let get_snippet = |span| snippet(cx, span, "");
+ snippet_opt(cx, expr.span).map(|_| Self::hir_from_snippet(expr, get_snippet))
+ }
+
+ /// Convenience function around `hir_opt` for suggestions with a default
+ /// text.
+ pub fn hir(cx: &LateContext<'_>, expr: &hir::Expr<'_>, default: &'a str) -> Self {
+ Self::hir_opt(cx, expr).unwrap_or(Sugg::NonParen(Cow::Borrowed(default)))
+ }
+
+ /// Same as `hir`, but it adapts the applicability level by following rules:
+ ///
+ /// - Applicability level `Unspecified` will never be changed.
+ /// - If the span is inside a macro, change the applicability level to `MaybeIncorrect`.
+ /// - If the default value is used and the applicability level is `MachineApplicable`, change it
+ /// to
+ /// `HasPlaceholders`
+ pub fn hir_with_applicability(
+ cx: &LateContext<'_>,
+ expr: &hir::Expr<'_>,
+ default: &'a str,
+ applicability: &mut Applicability,
+ ) -> Self {
+ if *applicability != Applicability::Unspecified && expr.span.from_expansion() {
+ *applicability = Applicability::MaybeIncorrect;
+ }
+ Self::hir_opt(cx, expr).unwrap_or_else(|| {
+ if *applicability == Applicability::MachineApplicable {
+ *applicability = Applicability::HasPlaceholders;
+ }
+ Sugg::NonParen(Cow::Borrowed(default))
+ })
+ }
+
+ /// Same as `hir`, but will use the pre expansion span if the `expr` was in a macro.
+ pub fn hir_with_macro_callsite(cx: &LateContext<'_>, expr: &hir::Expr<'_>, default: &'a str) -> Self {
+ let get_snippet = |span| snippet_with_macro_callsite(cx, span, default);
+ Self::hir_from_snippet(expr, get_snippet)
+ }
+
+ /// Same as `hir`, but first walks the span up to the given context. This will result in the
+ /// macro call, rather then the expansion, if the span is from a child context. If the span is
+ /// not from a child context, it will be used directly instead.
+ ///
+ /// e.g. Given the expression `&vec![]`, getting a snippet from the span for `vec![]` as a HIR
+ /// node would result in `box []`. If given the context of the address of expression, this
+ /// function will correctly get a snippet of `vec![]`.
+ pub fn hir_with_context(
+ cx: &LateContext<'_>,
+ expr: &hir::Expr<'_>,
+ ctxt: SyntaxContext,
+ default: &'a str,
+ applicability: &mut Applicability,
+ ) -> Self {
+ if expr.span.ctxt() == ctxt {
+ Self::hir_from_snippet(expr, |span| snippet(cx, span, default))
+ } else {
+ let snip = snippet_with_applicability(cx, expr.span, default, applicability);
+ Sugg::NonParen(snip)
+ }
+ }
+
+ /// Generate a suggestion for an expression with the given snippet. This is used by the `hir_*`
+ /// function variants of `Sugg`, since these use different snippet functions.
+ fn hir_from_snippet(expr: &hir::Expr<'_>, get_snippet: impl Fn(Span) -> Cow<'a, str>) -> Self {
+ if let Some(range) = higher::Range::hir(expr) {
+ let op = match range.limits {
+ ast::RangeLimits::HalfOpen => AssocOp::DotDot,
+ ast::RangeLimits::Closed => AssocOp::DotDotEq,
+ };
+ let start = range.start.map_or("".into(), |expr| get_snippet(expr.span));
+ let end = range.end.map_or("".into(), |expr| get_snippet(expr.span));
+
+ return Sugg::BinOp(op, start, end);
+ }
+
+ match expr.kind {
+ hir::ExprKind::AddrOf(..)
+ | hir::ExprKind::Box(..)
+ | hir::ExprKind::If(..)
+ | hir::ExprKind::Let(..)
+ | hir::ExprKind::Closure { .. }
+ | hir::ExprKind::Unary(..)
+ | hir::ExprKind::Match(..) => Sugg::MaybeParen(get_snippet(expr.span)),
+ hir::ExprKind::Continue(..)
+ | hir::ExprKind::Yield(..)
+ | hir::ExprKind::Array(..)
+ | hir::ExprKind::Block(..)
+ | hir::ExprKind::Break(..)
+ | hir::ExprKind::Call(..)
+ | hir::ExprKind::Field(..)
+ | hir::ExprKind::Index(..)
+ | hir::ExprKind::InlineAsm(..)
+ | hir::ExprKind::ConstBlock(..)
+ | hir::ExprKind::Lit(..)
+ | hir::ExprKind::Loop(..)
+ | hir::ExprKind::MethodCall(..)
+ | hir::ExprKind::Path(..)
+ | hir::ExprKind::Repeat(..)
+ | hir::ExprKind::Ret(..)
+ | hir::ExprKind::Struct(..)
+ | hir::ExprKind::Tup(..)
+ | hir::ExprKind::DropTemps(_)
+ | hir::ExprKind::Err => Sugg::NonParen(get_snippet(expr.span)),
+ hir::ExprKind::Assign(lhs, rhs, _) => {
+ Sugg::BinOp(AssocOp::Assign, get_snippet(lhs.span), get_snippet(rhs.span))
+ },
+ hir::ExprKind::AssignOp(op, lhs, rhs) => {
+ Sugg::BinOp(hirbinop2assignop(op), get_snippet(lhs.span), get_snippet(rhs.span))
+ },
+ hir::ExprKind::Binary(op, lhs, rhs) => Sugg::BinOp(
+ AssocOp::from_ast_binop(op.node.into()),
+ get_snippet(lhs.span),
+ get_snippet(rhs.span),
+ ),
+ hir::ExprKind::Cast(lhs, ty) => Sugg::BinOp(AssocOp::As, get_snippet(lhs.span), get_snippet(ty.span)),
+ hir::ExprKind::Type(lhs, ty) => Sugg::BinOp(AssocOp::Colon, get_snippet(lhs.span), get_snippet(ty.span)),
+ }
+ }
+
+ /// Prepare a suggestion from an expression.
+ pub fn ast(cx: &EarlyContext<'_>, expr: &ast::Expr, default: &'a str) -> Self {
+ use rustc_ast::ast::RangeLimits;
+
+ let get_whole_snippet = || {
+ if expr.span.from_expansion() {
+ snippet_with_macro_callsite(cx, expr.span, default)
+ } else {
+ snippet(cx, expr.span, default)
+ }
+ };
+
+ match expr.kind {
+ ast::ExprKind::AddrOf(..)
+ | ast::ExprKind::Box(..)
+ | ast::ExprKind::Closure { .. }
+ | ast::ExprKind::If(..)
+ | ast::ExprKind::Let(..)
+ | ast::ExprKind::Unary(..)
+ | ast::ExprKind::Match(..) => Sugg::MaybeParen(get_whole_snippet()),
+ ast::ExprKind::Async(..)
+ | ast::ExprKind::Block(..)
+ | ast::ExprKind::Break(..)
+ | ast::ExprKind::Call(..)
+ | ast::ExprKind::Continue(..)
+ | ast::ExprKind::Yield(..)
+ | ast::ExprKind::Field(..)
+ | ast::ExprKind::ForLoop(..)
+ | ast::ExprKind::Index(..)
+ | ast::ExprKind::InlineAsm(..)
+ | ast::ExprKind::ConstBlock(..)
+ | ast::ExprKind::Lit(..)
+ | ast::ExprKind::Loop(..)
+ | ast::ExprKind::MacCall(..)
+ | ast::ExprKind::MethodCall(..)
+ | ast::ExprKind::Paren(..)
+ | ast::ExprKind::Underscore
+ | ast::ExprKind::Path(..)
+ | ast::ExprKind::Repeat(..)
+ | ast::ExprKind::Ret(..)
+ | ast::ExprKind::Yeet(..)
+ | ast::ExprKind::Struct(..)
+ | ast::ExprKind::Try(..)
+ | ast::ExprKind::TryBlock(..)
+ | ast::ExprKind::Tup(..)
+ | ast::ExprKind::Array(..)
+ | ast::ExprKind::While(..)
+ | ast::ExprKind::Await(..)
+ | ast::ExprKind::Err => Sugg::NonParen(get_whole_snippet()),
+ ast::ExprKind::Range(ref lhs, ref rhs, RangeLimits::HalfOpen) => Sugg::BinOp(
+ AssocOp::DotDot,
+ lhs.as_ref().map_or("".into(), |lhs| snippet(cx, lhs.span, default)),
+ rhs.as_ref().map_or("".into(), |rhs| snippet(cx, rhs.span, default)),
+ ),
+ ast::ExprKind::Range(ref lhs, ref rhs, RangeLimits::Closed) => Sugg::BinOp(
+ AssocOp::DotDotEq,
+ lhs.as_ref().map_or("".into(), |lhs| snippet(cx, lhs.span, default)),
+ rhs.as_ref().map_or("".into(), |rhs| snippet(cx, rhs.span, default)),
+ ),
+ ast::ExprKind::Assign(ref lhs, ref rhs, _) => Sugg::BinOp(
+ AssocOp::Assign,
+ snippet(cx, lhs.span, default),
+ snippet(cx, rhs.span, default),
+ ),
+ ast::ExprKind::AssignOp(op, ref lhs, ref rhs) => Sugg::BinOp(
+ astbinop2assignop(op),
+ snippet(cx, lhs.span, default),
+ snippet(cx, rhs.span, default),
+ ),
+ ast::ExprKind::Binary(op, ref lhs, ref rhs) => Sugg::BinOp(
+ AssocOp::from_ast_binop(op.node),
+ snippet(cx, lhs.span, default),
+ snippet(cx, rhs.span, default),
+ ),
+ ast::ExprKind::Cast(ref lhs, ref ty) => Sugg::BinOp(
+ AssocOp::As,
+ snippet(cx, lhs.span, default),
+ snippet(cx, ty.span, default),
+ ),
+ ast::ExprKind::Type(ref lhs, ref ty) => Sugg::BinOp(
+ AssocOp::Colon,
+ snippet(cx, lhs.span, default),
+ snippet(cx, ty.span, default),
+ ),
+ }
+ }
+
+ /// Convenience method to create the `<lhs> && <rhs>` suggestion.
+ pub fn and(self, rhs: &Self) -> Sugg<'static> {
+ make_binop(ast::BinOpKind::And, &self, rhs)
+ }
+
+ /// Convenience method to create the `<lhs> & <rhs>` suggestion.
+ pub fn bit_and(self, rhs: &Self) -> Sugg<'static> {
+ make_binop(ast::BinOpKind::BitAnd, &self, rhs)
+ }
+
+ /// Convenience method to create the `<lhs> as <rhs>` suggestion.
+ pub fn as_ty<R: Display>(self, rhs: R) -> Sugg<'static> {
+ make_assoc(AssocOp::As, &self, &Sugg::NonParen(rhs.to_string().into()))
+ }
+
+ /// Convenience method to create the `&<expr>` suggestion.
+ pub fn addr(self) -> Sugg<'static> {
+ make_unop("&", self)
+ }
+
+ /// Convenience method to create the `&mut <expr>` suggestion.
+ pub fn mut_addr(self) -> Sugg<'static> {
+ make_unop("&mut ", self)
+ }
+
+ /// Convenience method to create the `*<expr>` suggestion.
+ pub fn deref(self) -> Sugg<'static> {
+ make_unop("*", self)
+ }
+
+ /// Convenience method to create the `&*<expr>` suggestion. Currently this
+ /// is needed because `sugg.deref().addr()` produces an unnecessary set of
+ /// parentheses around the deref.
+ pub fn addr_deref(self) -> Sugg<'static> {
+ make_unop("&*", self)
+ }
+
+ /// Convenience method to create the `&mut *<expr>` suggestion. Currently
+ /// this is needed because `sugg.deref().mut_addr()` produces an unnecessary
+ /// set of parentheses around the deref.
+ pub fn mut_addr_deref(self) -> Sugg<'static> {
+ make_unop("&mut *", self)
+ }
+
+ /// Convenience method to transform suggestion into a return call
+ pub fn make_return(self) -> Sugg<'static> {
+ Sugg::NonParen(Cow::Owned(format!("return {}", self)))
+ }
+
+ /// Convenience method to transform suggestion into a block
+ /// where the suggestion is a trailing expression
+ pub fn blockify(self) -> Sugg<'static> {
+ Sugg::NonParen(Cow::Owned(format!("{{ {} }}", self)))
+ }
+
+ /// Convenience method to create the `<lhs>..<rhs>` or `<lhs>...<rhs>`
+ /// suggestion.
+ pub fn range(self, end: &Self, limit: ast::RangeLimits) -> Sugg<'static> {
+ match limit {
+ ast::RangeLimits::HalfOpen => make_assoc(AssocOp::DotDot, &self, end),
+ ast::RangeLimits::Closed => make_assoc(AssocOp::DotDotEq, &self, end),
+ }
+ }
+
+ /// Adds parentheses to any expression that might need them. Suitable to the
+ /// `self` argument of a method call
+ /// (e.g., to build `bar.foo()` or `(1 + 2).foo()`).
+ #[must_use]
+ pub fn maybe_par(self) -> Self {
+ match self {
+ Sugg::NonParen(..) => self,
+ // `(x)` and `(x).y()` both don't need additional parens.
+ Sugg::MaybeParen(sugg) => {
+ if has_enclosing_paren(&sugg) {
+ Sugg::MaybeParen(sugg)
+ } else {
+ Sugg::NonParen(format!("({})", sugg).into())
+ }
+ },
+ Sugg::BinOp(op, lhs, rhs) => {
+ let sugg = binop_to_string(op, &lhs, &rhs);
+ Sugg::NonParen(format!("({})", sugg).into())
+ },
+ }
+ }
+}
+
+/// Generates a string from the operator and both sides.
+fn binop_to_string(op: AssocOp, lhs: &str, rhs: &str) -> String {
+ match op {
+ AssocOp::Add
+ | AssocOp::Subtract
+ | AssocOp::Multiply
+ | AssocOp::Divide
+ | AssocOp::Modulus
+ | AssocOp::LAnd
+ | AssocOp::LOr
+ | AssocOp::BitXor
+ | AssocOp::BitAnd
+ | AssocOp::BitOr
+ | AssocOp::ShiftLeft
+ | AssocOp::ShiftRight
+ | AssocOp::Equal
+ | AssocOp::Less
+ | AssocOp::LessEqual
+ | AssocOp::NotEqual
+ | AssocOp::Greater
+ | AssocOp::GreaterEqual => format!(
+ "{} {} {}",
+ lhs,
+ op.to_ast_binop().expect("Those are AST ops").to_string(),
+ rhs
+ ),
+ AssocOp::Assign => format!("{} = {}", lhs, rhs),
+ AssocOp::AssignOp(op) => {
+ format!("{} {}= {}", lhs, token_kind_to_string(&token::BinOp(op)), rhs)
+ },
+ AssocOp::As => format!("{} as {}", lhs, rhs),
+ AssocOp::DotDot => format!("{}..{}", lhs, rhs),
+ AssocOp::DotDotEq => format!("{}..={}", lhs, rhs),
+ AssocOp::Colon => format!("{}: {}", lhs, rhs),
+ }
+}
+
+/// Return `true` if `sugg` is enclosed in parenthesis.
+pub fn has_enclosing_paren(sugg: impl AsRef<str>) -> bool {
+ let mut chars = sugg.as_ref().chars();
+ if chars.next() == Some('(') {
+ let mut depth = 1;
+ for c in &mut chars {
+ if c == '(' {
+ depth += 1;
+ } else if c == ')' {
+ depth -= 1;
+ }
+ if depth == 0 {
+ break;
+ }
+ }
+ chars.next().is_none()
+ } else {
+ false
+ }
+}
+
+/// Copied from the rust standard library, and then edited
+macro_rules! forward_binop_impls_to_ref {
+ (impl $imp:ident, $method:ident for $t:ty, type Output = $o:ty) => {
+ impl $imp<$t> for &$t {
+ type Output = $o;
+
+ fn $method(self, other: $t) -> $o {
+ $imp::$method(self, &other)
+ }
+ }
+
+ impl $imp<&$t> for $t {
+ type Output = $o;
+
+ fn $method(self, other: &$t) -> $o {
+ $imp::$method(&self, other)
+ }
+ }
+
+ impl $imp for $t {
+ type Output = $o;
+
+ fn $method(self, other: $t) -> $o {
+ $imp::$method(&self, &other)
+ }
+ }
+ };
+}
+
+impl Add for &Sugg<'_> {
+ type Output = Sugg<'static>;
+ fn add(self, rhs: &Sugg<'_>) -> Sugg<'static> {
+ make_binop(ast::BinOpKind::Add, self, rhs)
+ }
+}
+
+impl Sub for &Sugg<'_> {
+ type Output = Sugg<'static>;
+ fn sub(self, rhs: &Sugg<'_>) -> Sugg<'static> {
+ make_binop(ast::BinOpKind::Sub, self, rhs)
+ }
+}
+
+forward_binop_impls_to_ref!(impl Add, add for Sugg<'_>, type Output = Sugg<'static>);
+forward_binop_impls_to_ref!(impl Sub, sub for Sugg<'_>, type Output = Sugg<'static>);
+
+impl Neg for Sugg<'_> {
+ type Output = Sugg<'static>;
+ fn neg(self) -> Sugg<'static> {
+ make_unop("-", self)
+ }
+}
+
+impl<'a> Not for Sugg<'a> {
+ type Output = Sugg<'a>;
+ fn not(self) -> Sugg<'a> {
+ use AssocOp::{Equal, Greater, GreaterEqual, Less, LessEqual, NotEqual};
+
+ if let Sugg::BinOp(op, lhs, rhs) = self {
+ let to_op = match op {
+ Equal => NotEqual,
+ NotEqual => Equal,
+ Less => GreaterEqual,
+ GreaterEqual => Less,
+ Greater => LessEqual,
+ LessEqual => Greater,
+ _ => return make_unop("!", Sugg::BinOp(op, lhs, rhs)),
+ };
+ Sugg::BinOp(to_op, lhs, rhs)
+ } else {
+ make_unop("!", self)
+ }
+ }
+}
+
+/// Helper type to display either `foo` or `(foo)`.
+struct ParenHelper<T> {
+ /// `true` if parentheses are needed.
+ paren: bool,
+ /// The main thing to display.
+ wrapped: T,
+}
+
+impl<T> ParenHelper<T> {
+ /// Builds a `ParenHelper`.
+ fn new(paren: bool, wrapped: T) -> Self {
+ Self { paren, wrapped }
+ }
+}
+
+impl<T: Display> Display for ParenHelper<T> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
+ if self.paren {
+ write!(f, "({})", self.wrapped)
+ } else {
+ self.wrapped.fmt(f)
+ }
+ }
+}
+
+/// Builds the string for `<op><expr>` adding parenthesis when necessary.
+///
+/// For convenience, the operator is taken as a string because all unary
+/// operators have the same
+/// precedence.
+pub fn make_unop(op: &str, expr: Sugg<'_>) -> Sugg<'static> {
+ Sugg::MaybeParen(format!("{}{}", op, expr.maybe_par()).into())
+}
+
+/// Builds the string for `<lhs> <op> <rhs>` adding parenthesis when necessary.
+///
+/// Precedence of shift operator relative to other arithmetic operation is
+/// often confusing so
+/// parenthesis will always be added for a mix of these.
+pub fn make_assoc(op: AssocOp, lhs: &Sugg<'_>, rhs: &Sugg<'_>) -> Sugg<'static> {
+ /// Returns `true` if the operator is a shift operator `<<` or `>>`.
+ fn is_shift(op: AssocOp) -> bool {
+ matches!(op, AssocOp::ShiftLeft | AssocOp::ShiftRight)
+ }
+
+ /// Returns `true` if the operator is an arithmetic operator
+ /// (i.e., `+`, `-`, `*`, `/`, `%`).
+ fn is_arith(op: AssocOp) -> bool {
+ matches!(
+ op,
+ AssocOp::Add | AssocOp::Subtract | AssocOp::Multiply | AssocOp::Divide | AssocOp::Modulus
+ )
+ }
+
+ /// Returns `true` if the operator `op` needs parenthesis with the operator
+ /// `other` in the direction `dir`.
+ fn needs_paren(op: AssocOp, other: AssocOp, dir: Associativity) -> bool {
+ other.precedence() < op.precedence()
+ || (other.precedence() == op.precedence()
+ && ((op != other && associativity(op) != dir)
+ || (op == other && associativity(op) != Associativity::Both)))
+ || is_shift(op) && is_arith(other)
+ || is_shift(other) && is_arith(op)
+ }
+
+ let lhs_paren = if let Sugg::BinOp(lop, _, _) = *lhs {
+ needs_paren(op, lop, Associativity::Left)
+ } else {
+ false
+ };
+
+ let rhs_paren = if let Sugg::BinOp(rop, _, _) = *rhs {
+ needs_paren(op, rop, Associativity::Right)
+ } else {
+ false
+ };
+
+ let lhs = ParenHelper::new(lhs_paren, lhs).to_string();
+ let rhs = ParenHelper::new(rhs_paren, rhs).to_string();
+ Sugg::BinOp(op, lhs.into(), rhs.into())
+}
+
+/// Convenience wrapper around `make_assoc` and `AssocOp::from_ast_binop`.
+pub fn make_binop(op: ast::BinOpKind, lhs: &Sugg<'_>, rhs: &Sugg<'_>) -> Sugg<'static> {
+ make_assoc(AssocOp::from_ast_binop(op), lhs, rhs)
+}
+
+#[derive(PartialEq, Eq, Clone, Copy)]
+/// Operator associativity.
+enum Associativity {
+ /// The operator is both left-associative and right-associative.
+ Both,
+ /// The operator is left-associative.
+ Left,
+ /// The operator is not associative.
+ None,
+ /// The operator is right-associative.
+ Right,
+}
+
+/// Returns the associativity/fixity of an operator. The difference with
+/// `AssocOp::fixity` is that an operator can be both left and right associative
+/// (such as `+`: `a + b + c == (a + b) + c == a + (b + c)`.
+///
+/// Chained `as` and explicit `:` type coercion never need inner parenthesis so
+/// they are considered
+/// associative.
+#[must_use]
+fn associativity(op: AssocOp) -> Associativity {
+ use rustc_ast::util::parser::AssocOp::{
+ Add, As, Assign, AssignOp, BitAnd, BitOr, BitXor, Colon, Divide, DotDot, DotDotEq, Equal, Greater,
+ GreaterEqual, LAnd, LOr, Less, LessEqual, Modulus, Multiply, NotEqual, ShiftLeft, ShiftRight, Subtract,
+ };
+
+ match op {
+ Assign | AssignOp(_) => Associativity::Right,
+ Add | BitAnd | BitOr | BitXor | LAnd | LOr | Multiply | As | Colon => Associativity::Both,
+ Divide | Equal | Greater | GreaterEqual | Less | LessEqual | Modulus | NotEqual | ShiftLeft | ShiftRight
+ | Subtract => Associativity::Left,
+ DotDot | DotDotEq => Associativity::None,
+ }
+}
+
+/// Converts a `hir::BinOp` to the corresponding assigning binary operator.
+fn hirbinop2assignop(op: hir::BinOp) -> AssocOp {
+ use rustc_ast::token::BinOpToken::{And, Caret, Minus, Or, Percent, Plus, Shl, Shr, Slash, Star};
+
+ AssocOp::AssignOp(match op.node {
+ hir::BinOpKind::Add => Plus,
+ hir::BinOpKind::BitAnd => And,
+ hir::BinOpKind::BitOr => Or,
+ hir::BinOpKind::BitXor => Caret,
+ hir::BinOpKind::Div => Slash,
+ hir::BinOpKind::Mul => Star,
+ hir::BinOpKind::Rem => Percent,
+ hir::BinOpKind::Shl => Shl,
+ hir::BinOpKind::Shr => Shr,
+ hir::BinOpKind::Sub => Minus,
+
+ hir::BinOpKind::And
+ | hir::BinOpKind::Eq
+ | hir::BinOpKind::Ge
+ | hir::BinOpKind::Gt
+ | hir::BinOpKind::Le
+ | hir::BinOpKind::Lt
+ | hir::BinOpKind::Ne
+ | hir::BinOpKind::Or => panic!("This operator does not exist"),
+ })
+}
+
+/// Converts an `ast::BinOp` to the corresponding assigning binary operator.
+fn astbinop2assignop(op: ast::BinOp) -> AssocOp {
+ use rustc_ast::ast::BinOpKind::{
+ Add, And, BitAnd, BitOr, BitXor, Div, Eq, Ge, Gt, Le, Lt, Mul, Ne, Or, Rem, Shl, Shr, Sub,
+ };
+ use rustc_ast::token::BinOpToken;
+
+ AssocOp::AssignOp(match op.node {
+ Add => BinOpToken::Plus,
+ BitAnd => BinOpToken::And,
+ BitOr => BinOpToken::Or,
+ BitXor => BinOpToken::Caret,
+ Div => BinOpToken::Slash,
+ Mul => BinOpToken::Star,
+ Rem => BinOpToken::Percent,
+ Shl => BinOpToken::Shl,
+ Shr => BinOpToken::Shr,
+ Sub => BinOpToken::Minus,
+ And | Eq | Ge | Gt | Le | Lt | Ne | Or => panic!("This operator does not exist"),
+ })
+}
+
+/// Returns the indentation before `span` if there are nothing but `[ \t]`
+/// before it on its line.
+fn indentation<T: LintContext>(cx: &T, span: Span) -> Option<String> {
+ let lo = cx.sess().source_map().lookup_char_pos(span.lo());
+ lo.file
+ .get_line(lo.line - 1 /* line numbers in `Loc` are 1-based */)
+ .and_then(|line| {
+ if let Some((pos, _)) = line.char_indices().find(|&(_, c)| c != ' ' && c != '\t') {
+ // We can mix char and byte positions here because we only consider `[ \t]`.
+ if lo.col == CharPos(pos) {
+ Some(line[..pos].into())
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+ })
+}
+
+/// Convenience extension trait for `Diagnostic`.
+pub trait DiagnosticExt<T: LintContext> {
+ /// Suggests to add an attribute to an item.
+ ///
+ /// Correctly handles indentation of the attribute and item.
+ ///
+ /// # Example
+ ///
+ /// ```rust,ignore
+ /// diag.suggest_item_with_attr(cx, item, "#[derive(Default)]");
+ /// ```
+ fn suggest_item_with_attr<D: Display + ?Sized>(
+ &mut self,
+ cx: &T,
+ item: Span,
+ msg: &str,
+ attr: &D,
+ applicability: Applicability,
+ );
+
+ /// Suggest to add an item before another.
+ ///
+ /// The item should not be indented (except for inner indentation).
+ ///
+ /// # Example
+ ///
+ /// ```rust,ignore
+ /// diag.suggest_prepend_item(cx, item,
+ /// "fn foo() {
+ /// bar();
+ /// }");
+ /// ```
+ fn suggest_prepend_item(&mut self, cx: &T, item: Span, msg: &str, new_item: &str, applicability: Applicability);
+
+ /// Suggest to completely remove an item.
+ ///
+ /// This will remove an item and all following whitespace until the next non-whitespace
+ /// character. This should work correctly if item is on the same indentation level as the
+ /// following item.
+ ///
+ /// # Example
+ ///
+ /// ```rust,ignore
+ /// diag.suggest_remove_item(cx, item, "remove this")
+ /// ```
+ fn suggest_remove_item(&mut self, cx: &T, item: Span, msg: &str, applicability: Applicability);
+}
+
+impl<T: LintContext> DiagnosticExt<T> for rustc_errors::Diagnostic {
+ fn suggest_item_with_attr<D: Display + ?Sized>(
+ &mut self,
+ cx: &T,
+ item: Span,
+ msg: &str,
+ attr: &D,
+ applicability: Applicability,
+ ) {
+ if let Some(indent) = indentation(cx, item) {
+ let span = item.with_hi(item.lo());
+
+ self.span_suggestion(span, msg, format!("{}\n{}", attr, indent), applicability);
+ }
+ }
+
+ fn suggest_prepend_item(&mut self, cx: &T, item: Span, msg: &str, new_item: &str, applicability: Applicability) {
+ if let Some(indent) = indentation(cx, item) {
+ let span = item.with_hi(item.lo());
+
+ let mut first = true;
+ let new_item = new_item
+ .lines()
+ .map(|l| {
+ if first {
+ first = false;
+ format!("{}\n", l)
+ } else {
+ format!("{}{}\n", indent, l)
+ }
+ })
+ .collect::<String>();
+
+ self.span_suggestion(span, msg, format!("{}\n{}", new_item, indent), applicability);
+ }
+ }
+
+ fn suggest_remove_item(&mut self, cx: &T, item: Span, msg: &str, applicability: Applicability) {
+ let mut remove_span = item;
+ let hi = cx.sess().source_map().next_point(remove_span).hi();
+ let fmpos = cx.sess().source_map().lookup_byte_offset(hi);
+
+ if let Some(ref src) = fmpos.sf.src {
+ let non_whitespace_offset = src[fmpos.pos.to_usize()..].find(|c| c != ' ' && c != '\t' && c != '\n');
+
+ if let Some(non_whitespace_offset) = non_whitespace_offset {
+ remove_span = remove_span
+ .with_hi(remove_span.hi() + BytePos(non_whitespace_offset.try_into().expect("offset too large")));
+ }
+ }
+
+ self.span_suggestion(remove_span, msg, "", applicability);
+ }
+}
+
+/// Suggestion results for handling closure
+/// args dereferencing and borrowing
+pub struct DerefClosure {
+ /// confidence on the built suggestion
+ pub applicability: Applicability,
+ /// gradually built suggestion
+ pub suggestion: String,
+}
+
+/// Build suggestion gradually by handling closure arg specific usages,
+/// such as explicit deref and borrowing cases.
+/// Returns `None` if no such use cases have been triggered in closure body
+///
+/// note: this only works on single line immutable closures with exactly one input parameter.
+pub fn deref_closure_args<'tcx>(cx: &LateContext<'_>, closure: &'tcx hir::Expr<'_>) -> Option<DerefClosure> {
+ if let hir::ExprKind::Closure(&Closure { fn_decl, body, .. }) = closure.kind {
+ let closure_body = cx.tcx.hir().body(body);
+ // is closure arg a type annotated double reference (i.e.: `|x: &&i32| ...`)
+ // a type annotation is present if param `kind` is different from `TyKind::Infer`
+ let closure_arg_is_type_annotated_double_ref = if let TyKind::Rptr(_, MutTy { ty, .. }) = fn_decl.inputs[0].kind
+ {
+ matches!(ty.kind, TyKind::Rptr(_, MutTy { .. }))
+ } else {
+ false
+ };
+
+ let mut visitor = DerefDelegate {
+ cx,
+ closure_span: closure.span,
+ closure_arg_is_type_annotated_double_ref,
+ next_pos: closure.span.lo(),
+ suggestion_start: String::new(),
+ applicability: Applicability::MachineApplicable,
+ };
+
+ let fn_def_id = cx.tcx.hir().local_def_id(closure.hir_id);
+ cx.tcx.infer_ctxt().enter(|infcx| {
+ ExprUseVisitor::new(&mut visitor, &infcx, fn_def_id, cx.param_env, cx.typeck_results())
+ .consume_body(closure_body);
+ });
+
+ if !visitor.suggestion_start.is_empty() {
+ return Some(DerefClosure {
+ applicability: visitor.applicability,
+ suggestion: visitor.finish(),
+ });
+ }
+ }
+ None
+}
+
+/// Visitor struct used for tracking down
+/// dereferencing and borrowing of closure's args
+struct DerefDelegate<'a, 'tcx> {
+ /// The late context of the lint
+ cx: &'a LateContext<'tcx>,
+ /// The span of the input closure to adapt
+ closure_span: Span,
+ /// Indicates if the arg of the closure is a type annotated double reference
+ closure_arg_is_type_annotated_double_ref: bool,
+ /// last position of the span to gradually build the suggestion
+ next_pos: BytePos,
+ /// starting part of the gradually built suggestion
+ suggestion_start: String,
+ /// confidence on the built suggestion
+ applicability: Applicability,
+}
+
+impl<'tcx> DerefDelegate<'_, 'tcx> {
+ /// build final suggestion:
+ /// - create the ending part of suggestion
+ /// - concatenate starting and ending parts
+ /// - potentially remove needless borrowing
+ pub fn finish(&mut self) -> String {
+ let end_span = Span::new(self.next_pos, self.closure_span.hi(), self.closure_span.ctxt(), None);
+ let end_snip = snippet_with_applicability(self.cx, end_span, "..", &mut self.applicability);
+ let sugg = format!("{}{}", self.suggestion_start, end_snip);
+ if self.closure_arg_is_type_annotated_double_ref {
+ sugg.replacen('&', "", 1)
+ } else {
+ sugg
+ }
+ }
+
+ /// indicates whether the function from `parent_expr` takes its args by double reference
+ fn func_takes_arg_by_double_ref(&self, parent_expr: &'tcx hir::Expr<'_>, cmt_hir_id: HirId) -> bool {
+ let ty = match parent_expr.kind {
+ ExprKind::MethodCall(_, call_args, _) => {
+ if let Some(sig) = self
+ .cx
+ .typeck_results()
+ .type_dependent_def_id(parent_expr.hir_id)
+ .map(|did| self.cx.tcx.fn_sig(did).skip_binder())
+ {
+ call_args
+ .iter()
+ .position(|arg| arg.hir_id == cmt_hir_id)
+ .map(|i| sig.inputs()[i])
+ } else {
+ return false;
+ }
+ },
+ ExprKind::Call(func, call_args) => {
+ if let Some(sig) = expr_sig(self.cx, func) {
+ call_args
+ .iter()
+ .position(|arg| arg.hir_id == cmt_hir_id)
+ .and_then(|i| sig.input(i))
+ .map(ty::Binder::skip_binder)
+ } else {
+ return false;
+ }
+ },
+ _ => return false,
+ };
+
+ ty.map_or(false, |ty| matches!(ty.kind(), ty::Ref(_, inner, _) if inner.is_ref()))
+ }
+}
+
+impl<'tcx> Delegate<'tcx> for DerefDelegate<'_, 'tcx> {
+ fn consume(&mut self, _: &PlaceWithHirId<'tcx>, _: HirId) {}
+
+ fn borrow(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId, _: ty::BorrowKind) {
+ if let PlaceBase::Local(id) = cmt.place.base {
+ let map = self.cx.tcx.hir();
+ let span = map.span(cmt.hir_id);
+ let start_span = Span::new(self.next_pos, span.lo(), span.ctxt(), None);
+ let mut start_snip = snippet_with_applicability(self.cx, start_span, "..", &mut self.applicability);
+
+ // identifier referring to the variable currently triggered (i.e.: `fp`)
+ let ident_str = map.name(id).to_string();
+ // full identifier that includes projection (i.e.: `fp.field`)
+ let ident_str_with_proj = snippet(self.cx, span, "..").to_string();
+
+ if cmt.place.projections.is_empty() {
+ // handle item without any projection, that needs an explicit borrowing
+ // i.e.: suggest `&x` instead of `x`
+ let _ = write!(self.suggestion_start, "{}&{}", start_snip, ident_str);
+ } else {
+ // cases where a parent `Call` or `MethodCall` is using the item
+ // i.e.: suggest `.contains(&x)` for `.find(|x| [1, 2, 3].contains(x)).is_none()`
+ //
+ // Note about method calls:
+ // - compiler automatically dereference references if the target type is a reference (works also for
+ // function call)
+ // - `self` arguments in the case of `x.is_something()` are also automatically (de)referenced, and
+ // no projection should be suggested
+ if let Some(parent_expr) = get_parent_expr_for_hir(self.cx, cmt.hir_id) {
+ match &parent_expr.kind {
+ // given expression is the self argument and will be handled completely by the compiler
+ // i.e.: `|x| x.is_something()`
+ ExprKind::MethodCall(_, [self_expr, ..], _) if self_expr.hir_id == cmt.hir_id => {
+ let _ = write!(self.suggestion_start, "{}{}", start_snip, ident_str_with_proj);
+ self.next_pos = span.hi();
+ return;
+ },
+ // item is used in a call
+ // i.e.: `Call`: `|x| please(x)` or `MethodCall`: `|x| [1, 2, 3].contains(x)`
+ ExprKind::Call(_, [call_args @ ..]) | ExprKind::MethodCall(_, [_, call_args @ ..], _) => {
+ let expr = self.cx.tcx.hir().expect_expr(cmt.hir_id);
+ let arg_ty_kind = self.cx.typeck_results().expr_ty(expr).kind();
+
+ if matches!(arg_ty_kind, ty::Ref(_, _, Mutability::Not)) {
+ // suggest ampersand if call function is taking args by double reference
+ let takes_arg_by_double_ref =
+ self.func_takes_arg_by_double_ref(parent_expr, cmt.hir_id);
+
+ // compiler will automatically dereference field or index projection, so no need
+ // to suggest ampersand, but full identifier that includes projection is required
+ let has_field_or_index_projection =
+ cmt.place.projections.iter().any(|proj| {
+ matches!(proj.kind, ProjectionKind::Field(..) | ProjectionKind::Index)
+ });
+
+ // no need to bind again if the function doesn't take arg by double ref
+ // and if the item is already a double ref
+ let ident_sugg = if !call_args.is_empty()
+ && !takes_arg_by_double_ref
+ && (self.closure_arg_is_type_annotated_double_ref || has_field_or_index_projection)
+ {
+ let ident = if has_field_or_index_projection {
+ ident_str_with_proj
+ } else {
+ ident_str
+ };
+ format!("{}{}", start_snip, ident)
+ } else {
+ format!("{}&{}", start_snip, ident_str)
+ };
+ self.suggestion_start.push_str(&ident_sugg);
+ self.next_pos = span.hi();
+ return;
+ }
+
+ self.applicability = Applicability::Unspecified;
+ },
+ _ => (),
+ }
+ }
+
+ let mut replacement_str = ident_str;
+ let mut projections_handled = false;
+ cmt.place.projections.iter().enumerate().for_each(|(i, proj)| {
+ match proj.kind {
+ // Field projection like `|v| v.foo`
+ // no adjustment needed here, as field projections are handled by the compiler
+ ProjectionKind::Field(..) => match cmt.place.ty_before_projection(i).kind() {
+ ty::Adt(..) | ty::Tuple(_) => {
+ replacement_str = ident_str_with_proj.clone();
+ projections_handled = true;
+ },
+ _ => (),
+ },
+ // Index projection like `|x| foo[x]`
+ // the index is dropped so we can't get it to build the suggestion,
+ // so the span is set-up again to get more code, using `span.hi()` (i.e.: `foo[x]`)
+ // instead of `span.lo()` (i.e.: `foo`)
+ ProjectionKind::Index => {
+ let start_span = Span::new(self.next_pos, span.hi(), span.ctxt(), None);
+ start_snip = snippet_with_applicability(self.cx, start_span, "..", &mut self.applicability);
+ replacement_str.clear();
+ projections_handled = true;
+ },
+ // note: unable to trigger `Subslice` kind in tests
+ ProjectionKind::Subslice => (),
+ ProjectionKind::Deref => {
+ // Explicit derefs are typically handled later on, but
+ // some items do not need explicit deref, such as array accesses,
+ // so we mark them as already processed
+ // i.e.: don't suggest `*sub[1..4].len()` for `|sub| sub[1..4].len() == 3`
+ if let ty::Ref(_, inner, _) = cmt.place.ty_before_projection(i).kind() {
+ if matches!(inner.kind(), ty::Ref(_, innermost, _) if innermost.is_array()) {
+ projections_handled = true;
+ }
+ }
+ },
+ }
+ });
+
+ // handle `ProjectionKind::Deref` by removing one explicit deref
+ // if no special case was detected (i.e.: suggest `*x` instead of `**x`)
+ if !projections_handled {
+ let last_deref = cmt
+ .place
+ .projections
+ .iter()
+ .rposition(|proj| proj.kind == ProjectionKind::Deref);
+
+ if let Some(pos) = last_deref {
+ let mut projections = cmt.place.projections.clone();
+ projections.truncate(pos);
+
+ for item in projections {
+ if item.kind == ProjectionKind::Deref {
+ replacement_str = format!("*{}", replacement_str);
+ }
+ }
+ }
+ }
+
+ let _ = write!(self.suggestion_start, "{}{}", start_snip, replacement_str);
+ }
+ self.next_pos = span.hi();
+ }
+ }
+
+ fn mutate(&mut self, _: &PlaceWithHirId<'tcx>, _: HirId) {}
+
+ fn fake_read(&mut self, _: &rustc_typeck::expr_use_visitor::PlaceWithHirId<'tcx>, _: FakeReadCause, _: HirId) {}
+}
+
+#[cfg(test)]
+mod test {
+ use super::Sugg;
+
+ use rustc_ast::util::parser::AssocOp;
+ use std::borrow::Cow;
+
+ const SUGGESTION: Sugg<'static> = Sugg::NonParen(Cow::Borrowed("function_call()"));
+
+ #[test]
+ fn make_return_transform_sugg_into_a_return_call() {
+ assert_eq!("return function_call()", SUGGESTION.make_return().to_string());
+ }
+
+ #[test]
+ fn blockify_transforms_sugg_into_a_block() {
+ assert_eq!("{ function_call() }", SUGGESTION.blockify().to_string());
+ }
+
+ #[test]
+ fn binop_maybe_par() {
+ let sugg = Sugg::BinOp(AssocOp::Add, "1".into(), "1".into());
+ assert_eq!("(1 + 1)", sugg.maybe_par().to_string());
+
+ let sugg = Sugg::BinOp(AssocOp::Add, "(1 + 1)".into(), "(1 + 1)".into());
+ assert_eq!("((1 + 1) + (1 + 1))", sugg.maybe_par().to_string());
+ }
+ #[test]
+ fn not_op() {
+ use AssocOp::{Add, Equal, Greater, GreaterEqual, LAnd, LOr, Less, LessEqual, NotEqual};
+
+ fn test_not(op: AssocOp, correct: &str) {
+ let sugg = Sugg::BinOp(op, "x".into(), "y".into());
+ assert_eq!((!sugg).to_string(), correct);
+ }
+
+ // Invert the comparison operator.
+ test_not(Equal, "x != y");
+ test_not(NotEqual, "x == y");
+ test_not(Less, "x >= y");
+ test_not(LessEqual, "x > y");
+ test_not(Greater, "x <= y");
+ test_not(GreaterEqual, "x < y");
+
+ // Other operators are inverted like !(..).
+ test_not(Add, "!(x + y)");
+ test_not(LAnd, "!(x && y)");
+ test_not(LOr, "!(x || y)");
+ }
+}
diff --git a/src/tools/clippy/clippy_utils/src/sym_helper.rs b/src/tools/clippy/clippy_utils/src/sym_helper.rs
new file mode 100644
index 000000000..f47dc80eb
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/sym_helper.rs
@@ -0,0 +1,7 @@
+#[macro_export]
+/// Convenience wrapper around rustc's `Symbol::intern`
+macro_rules! sym {
+ ($tt:tt) => {
+ rustc_span::symbol::Symbol::intern(stringify!($tt))
+ };
+}
diff --git a/src/tools/clippy/clippy_utils/src/ty.rs b/src/tools/clippy/clippy_utils/src/ty.rs
new file mode 100644
index 000000000..a05d633d9
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/ty.rs
@@ -0,0 +1,829 @@
+//! Util methods for [`rustc_middle::ty`]
+
+#![allow(clippy::module_name_repetitions)]
+
+use core::ops::ControlFlow;
+use rustc_ast::ast::Mutability;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_hir as hir;
+use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res};
+use rustc_hir::def_id::DefId;
+use rustc_hir::{Expr, FnDecl, LangItem, TyKind, Unsafety};
+use rustc_infer::infer::TyCtxtInferExt;
+use rustc_lint::LateContext;
+use rustc_middle::mir::interpret::{ConstValue, Scalar};
+use rustc_middle::ty::subst::{GenericArg, GenericArgKind, Subst};
+use rustc_middle::ty::{
+ self, AdtDef, Binder, BoundRegion, DefIdTree, FnSig, IntTy, ParamEnv, Predicate, PredicateKind, ProjectionTy,
+ Region, RegionKind, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitor, UintTy, VariantDef, VariantDiscr,
+};
+use rustc_span::symbol::Ident;
+use rustc_span::{sym, Span, Symbol, DUMMY_SP};
+use rustc_target::abi::{Size, VariantIdx};
+use rustc_trait_selection::infer::InferCtxtExt;
+use rustc_trait_selection::traits::query::normalize::AtExt;
+use std::iter;
+
+use crate::{match_def_path, path_res, paths};
+
+// Checks if the given type implements copy.
+pub fn is_copy<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
+ ty.is_copy_modulo_regions(cx.tcx.at(DUMMY_SP), cx.param_env)
+}
+
+/// Checks whether a type can be partially moved.
+pub fn can_partially_move_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
+ if has_drop(cx, ty) || is_copy(cx, ty) {
+ return false;
+ }
+ match ty.kind() {
+ ty::Param(_) => false,
+ ty::Adt(def, subs) => def.all_fields().any(|f| !is_copy(cx, f.ty(cx.tcx, subs))),
+ _ => true,
+ }
+}
+
+/// Walks into `ty` and returns `true` if any inner type is the same as `other_ty`
+pub fn contains_ty<'tcx>(ty: Ty<'tcx>, other_ty: Ty<'tcx>) -> bool {
+ ty.walk().any(|inner| match inner.unpack() {
+ GenericArgKind::Type(inner_ty) => other_ty == inner_ty,
+ GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => false,
+ })
+}
+
+/// Walks into `ty` and returns `true` if any inner type is an instance of the given adt
+/// constructor.
+pub fn contains_adt_constructor<'tcx>(ty: Ty<'tcx>, adt: AdtDef<'tcx>) -> bool {
+ ty.walk().any(|inner| match inner.unpack() {
+ GenericArgKind::Type(inner_ty) => inner_ty.ty_adt_def() == Some(adt),
+ GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => false,
+ })
+}
+
+/// Resolves `<T as Iterator>::Item` for `T`
+/// Do not invoke without first verifying that the type implements `Iterator`
+pub fn get_iterator_item_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<Ty<'tcx>> {
+ cx.tcx
+ .get_diagnostic_item(sym::Iterator)
+ .and_then(|iter_did| get_associated_type(cx, ty, iter_did, "Item"))
+}
+
+/// Returns the associated type `name` for `ty` as an implementation of `trait_id`.
+/// Do not invoke without first verifying that the type implements the trait.
+pub fn get_associated_type<'tcx>(
+ cx: &LateContext<'tcx>,
+ ty: Ty<'tcx>,
+ trait_id: DefId,
+ name: &str,
+) -> Option<Ty<'tcx>> {
+ cx.tcx
+ .associated_items(trait_id)
+ .find_by_name_and_kind(cx.tcx, Ident::from_str(name), ty::AssocKind::Type, trait_id)
+ .and_then(|assoc| {
+ let proj = cx.tcx.mk_projection(assoc.def_id, cx.tcx.mk_substs_trait(ty, &[]));
+ cx.tcx.try_normalize_erasing_regions(cx.param_env, proj).ok()
+ })
+}
+
+/// Get the diagnostic name of a type, e.g. `sym::HashMap`. To check if a type
+/// implements a trait marked with a diagnostic item use [`implements_trait`].
+///
+/// For a further exploitation what diagnostic items are see [diagnostic items] in
+/// rustc-dev-guide.
+///
+/// [Diagnostic Items]: https://rustc-dev-guide.rust-lang.org/diagnostics/diagnostic-items.html
+pub fn get_type_diagnostic_name(cx: &LateContext<'_>, ty: Ty<'_>) -> Option<Symbol> {
+ match ty.kind() {
+ ty::Adt(adt, _) => cx.tcx.get_diagnostic_name(adt.did()),
+ _ => None,
+ }
+}
+
+/// Returns true if ty has `iter` or `iter_mut` methods
+pub fn has_iter_method(cx: &LateContext<'_>, probably_ref_ty: Ty<'_>) -> Option<Symbol> {
+ // FIXME: instead of this hard-coded list, we should check if `<adt>::iter`
+ // exists and has the desired signature. Unfortunately FnCtxt is not exported
+ // so we can't use its `lookup_method` method.
+ let into_iter_collections: &[Symbol] = &[
+ sym::Vec,
+ sym::Option,
+ sym::Result,
+ sym::BTreeMap,
+ sym::BTreeSet,
+ sym::VecDeque,
+ sym::LinkedList,
+ sym::BinaryHeap,
+ sym::HashSet,
+ sym::HashMap,
+ sym::PathBuf,
+ sym::Path,
+ sym::Receiver,
+ ];
+
+ let ty_to_check = match probably_ref_ty.kind() {
+ ty::Ref(_, ty_to_check, _) => *ty_to_check,
+ _ => probably_ref_ty,
+ };
+
+ let def_id = match ty_to_check.kind() {
+ ty::Array(..) => return Some(sym::array),
+ ty::Slice(..) => return Some(sym::slice),
+ ty::Adt(adt, _) => adt.did(),
+ _ => return None,
+ };
+
+ for &name in into_iter_collections {
+ if cx.tcx.is_diagnostic_item(name, def_id) {
+ return Some(cx.tcx.item_name(def_id));
+ }
+ }
+ None
+}
+
+/// Checks whether a type implements a trait.
+/// The function returns false in case the type contains an inference variable.
+///
+/// See:
+/// * [`get_trait_def_id`](super::get_trait_def_id) to get a trait [`DefId`].
+/// * [Common tools for writing lints] for an example how to use this function and other options.
+///
+/// [Common tools for writing lints]: https://github.com/rust-lang/rust-clippy/blob/master/book/src/development/common_tools_writing_lints.md#checking-if-a-type-implements-a-specific-trait
+pub fn implements_trait<'tcx>(
+ cx: &LateContext<'tcx>,
+ ty: Ty<'tcx>,
+ trait_id: DefId,
+ ty_params: &[GenericArg<'tcx>],
+) -> bool {
+ implements_trait_with_env(cx.tcx, cx.param_env, ty, trait_id, ty_params)
+}
+
+/// Same as `implements_trait` but allows using a `ParamEnv` different from the lint context.
+pub fn implements_trait_with_env<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ param_env: ParamEnv<'tcx>,
+ ty: Ty<'tcx>,
+ trait_id: DefId,
+ ty_params: &[GenericArg<'tcx>],
+) -> bool {
+ // Clippy shouldn't have infer types
+ assert!(!ty.needs_infer());
+
+ let ty = tcx.erase_regions(ty);
+ if ty.has_escaping_bound_vars() {
+ return false;
+ }
+ let ty_params = tcx.mk_substs(ty_params.iter());
+ tcx.infer_ctxt().enter(|infcx| {
+ infcx
+ .type_implements_trait(trait_id, ty, ty_params, param_env)
+ .must_apply_modulo_regions()
+ })
+}
+
+/// Checks whether this type implements `Drop`.
+pub fn has_drop<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
+ match ty.ty_adt_def() {
+ Some(def) => def.has_dtor(cx.tcx),
+ None => false,
+ }
+}
+
+// Returns whether the type has #[must_use] attribute
+pub fn is_must_use_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
+ match ty.kind() {
+ ty::Adt(adt, _) => cx.tcx.has_attr(adt.did(), sym::must_use),
+ ty::Foreign(did) => cx.tcx.has_attr(*did, sym::must_use),
+ ty::Slice(ty) | ty::Array(ty, _) | ty::RawPtr(ty::TypeAndMut { ty, .. }) | ty::Ref(_, ty, _) => {
+ // for the Array case we don't need to care for the len == 0 case
+ // because we don't want to lint functions returning empty arrays
+ is_must_use_ty(cx, *ty)
+ },
+ ty::Tuple(substs) => substs.iter().any(|ty| is_must_use_ty(cx, ty)),
+ ty::Opaque(def_id, _) => {
+ for (predicate, _) in cx.tcx.explicit_item_bounds(*def_id) {
+ if let ty::PredicateKind::Trait(trait_predicate) = predicate.kind().skip_binder() {
+ if cx.tcx.has_attr(trait_predicate.trait_ref.def_id, sym::must_use) {
+ return true;
+ }
+ }
+ }
+ false
+ },
+ ty::Dynamic(binder, _) => {
+ for predicate in binder.iter() {
+ if let ty::ExistentialPredicate::Trait(ref trait_ref) = predicate.skip_binder() {
+ if cx.tcx.has_attr(trait_ref.def_id, sym::must_use) {
+ return true;
+ }
+ }
+ }
+ false
+ },
+ _ => false,
+ }
+}
+
+// FIXME: Per https://doc.rust-lang.org/nightly/nightly-rustc/rustc_trait_selection/infer/at/struct.At.html#method.normalize
+// this function can be removed once the `normalize` method does not panic when normalization does
+// not succeed
+/// Checks if `Ty` is normalizable. This function is useful
+/// to avoid crashes on `layout_of`.
+pub fn is_normalizable<'tcx>(cx: &LateContext<'tcx>, param_env: ty::ParamEnv<'tcx>, ty: Ty<'tcx>) -> bool {
+ is_normalizable_helper(cx, param_env, ty, &mut FxHashMap::default())
+}
+
+fn is_normalizable_helper<'tcx>(
+ cx: &LateContext<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ ty: Ty<'tcx>,
+ cache: &mut FxHashMap<Ty<'tcx>, bool>,
+) -> bool {
+ if let Some(&cached_result) = cache.get(&ty) {
+ return cached_result;
+ }
+ // prevent recursive loops, false-negative is better than endless loop leading to stack overflow
+ cache.insert(ty, false);
+ let result = cx.tcx.infer_ctxt().enter(|infcx| {
+ let cause = rustc_middle::traits::ObligationCause::dummy();
+ if infcx.at(&cause, param_env).normalize(ty).is_ok() {
+ match ty.kind() {
+ ty::Adt(def, substs) => def.variants().iter().all(|variant| {
+ variant
+ .fields
+ .iter()
+ .all(|field| is_normalizable_helper(cx, param_env, field.ty(cx.tcx, substs), cache))
+ }),
+ _ => ty.walk().all(|generic_arg| match generic_arg.unpack() {
+ GenericArgKind::Type(inner_ty) if inner_ty != ty => {
+ is_normalizable_helper(cx, param_env, inner_ty, cache)
+ },
+ _ => true, // if inner_ty == ty, we've already checked it
+ }),
+ }
+ } else {
+ false
+ }
+ });
+ cache.insert(ty, result);
+ result
+}
+
+/// Returns `true` if the given type is a non aggregate primitive (a `bool` or `char`, any
+/// integer or floating-point number type). For checking aggregation of primitive types (e.g.
+/// tuples and slices of primitive type) see `is_recursively_primitive_type`
+pub fn is_non_aggregate_primitive_type(ty: Ty<'_>) -> bool {
+ matches!(ty.kind(), ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Float(_))
+}
+
+/// Returns `true` if the given type is a primitive (a `bool` or `char`, any integer or
+/// floating-point number type, a `str`, or an array, slice, or tuple of those types).
+pub fn is_recursively_primitive_type(ty: Ty<'_>) -> bool {
+ match *ty.kind() {
+ ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Str => true,
+ ty::Ref(_, inner, _) if *inner.kind() == ty::Str => true,
+ ty::Array(inner_type, _) | ty::Slice(inner_type) => is_recursively_primitive_type(inner_type),
+ ty::Tuple(inner_types) => inner_types.iter().all(is_recursively_primitive_type),
+ _ => false,
+ }
+}
+
+/// Checks if the type is a reference equals to a diagnostic item
+pub fn is_type_ref_to_diagnostic_item(cx: &LateContext<'_>, ty: Ty<'_>, diag_item: Symbol) -> bool {
+ match ty.kind() {
+ ty::Ref(_, ref_ty, _) => match ref_ty.kind() {
+ ty::Adt(adt, _) => cx.tcx.is_diagnostic_item(diag_item, adt.did()),
+ _ => false,
+ },
+ _ => false,
+ }
+}
+
+/// Checks if the type is equal to a diagnostic item. To check if a type implements a
+/// trait marked with a diagnostic item use [`implements_trait`].
+///
+/// For a further exploitation what diagnostic items are see [diagnostic items] in
+/// rustc-dev-guide.
+///
+/// ---
+///
+/// If you change the signature, remember to update the internal lint `MatchTypeOnDiagItem`
+///
+/// [Diagnostic Items]: https://rustc-dev-guide.rust-lang.org/diagnostics/diagnostic-items.html
+pub fn is_type_diagnostic_item(cx: &LateContext<'_>, ty: Ty<'_>, diag_item: Symbol) -> bool {
+ match ty.kind() {
+ ty::Adt(adt, _) => cx.tcx.is_diagnostic_item(diag_item, adt.did()),
+ _ => false,
+ }
+}
+
+/// Checks if the type is equal to a lang item.
+///
+/// Returns `false` if the `LangItem` is not defined.
+pub fn is_type_lang_item(cx: &LateContext<'_>, ty: Ty<'_>, lang_item: hir::LangItem) -> bool {
+ match ty.kind() {
+ ty::Adt(adt, _) => cx
+ .tcx
+ .lang_items()
+ .require(lang_item)
+ .map_or(false, |li| li == adt.did()),
+ _ => false,
+ }
+}
+
+/// Return `true` if the passed `typ` is `isize` or `usize`.
+pub fn is_isize_or_usize(typ: Ty<'_>) -> bool {
+ matches!(typ.kind(), ty::Int(IntTy::Isize) | ty::Uint(UintTy::Usize))
+}
+
+/// Checks if type is struct, enum or union type with the given def path.
+///
+/// If the type is a diagnostic item, use `is_type_diagnostic_item` instead.
+/// If you change the signature, remember to update the internal lint `MatchTypeOnDiagItem`
+pub fn match_type(cx: &LateContext<'_>, ty: Ty<'_>, path: &[&str]) -> bool {
+ match ty.kind() {
+ ty::Adt(adt, _) => match_def_path(cx, adt.did(), path),
+ _ => false,
+ }
+}
+
+/// Checks if the drop order for a type matters. Some std types implement drop solely to
+/// deallocate memory. For these types, and composites containing them, changing the drop order
+/// won't result in any observable side effects.
+pub fn needs_ordered_drop<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
+ fn needs_ordered_drop_inner<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>, seen: &mut FxHashSet<Ty<'tcx>>) -> bool {
+ if !seen.insert(ty) {
+ return false;
+ }
+ if !ty.has_significant_drop(cx.tcx, cx.param_env) {
+ false
+ }
+ // Check for std types which implement drop, but only for memory allocation.
+ else if is_type_lang_item(cx, ty, LangItem::OwnedBox)
+ || matches!(
+ get_type_diagnostic_name(cx, ty),
+ Some(sym::HashSet | sym::Rc | sym::Arc | sym::cstring_type)
+ )
+ || match_type(cx, ty, &paths::WEAK_RC)
+ || match_type(cx, ty, &paths::WEAK_ARC)
+ {
+ // Check all of the generic arguments.
+ if let ty::Adt(_, subs) = ty.kind() {
+ subs.types().any(|ty| needs_ordered_drop_inner(cx, ty, seen))
+ } else {
+ true
+ }
+ } else if !cx
+ .tcx
+ .lang_items()
+ .drop_trait()
+ .map_or(false, |id| implements_trait(cx, ty, id, &[]))
+ {
+ // This type doesn't implement drop, so no side effects here.
+ // Check if any component type has any.
+ match ty.kind() {
+ ty::Tuple(fields) => fields.iter().any(|ty| needs_ordered_drop_inner(cx, ty, seen)),
+ ty::Array(ty, _) => needs_ordered_drop_inner(cx, *ty, seen),
+ ty::Adt(adt, subs) => adt
+ .all_fields()
+ .map(|f| f.ty(cx.tcx, subs))
+ .any(|ty| needs_ordered_drop_inner(cx, ty, seen)),
+ _ => true,
+ }
+ } else {
+ true
+ }
+ }
+
+ needs_ordered_drop_inner(cx, ty, &mut FxHashSet::default())
+}
+
+/// Peels off all references on the type. Returns the underlying type and the number of references
+/// removed.
+pub fn peel_mid_ty_refs(ty: Ty<'_>) -> (Ty<'_>, usize) {
+ fn peel(ty: Ty<'_>, count: usize) -> (Ty<'_>, usize) {
+ if let ty::Ref(_, ty, _) = ty.kind() {
+ peel(*ty, count + 1)
+ } else {
+ (ty, count)
+ }
+ }
+ peel(ty, 0)
+}
+
+/// Peels off all references on the type.Returns the underlying type, the number of references
+/// removed, and whether the pointer is ultimately mutable or not.
+pub fn peel_mid_ty_refs_is_mutable(ty: Ty<'_>) -> (Ty<'_>, usize, Mutability) {
+ fn f(ty: Ty<'_>, count: usize, mutability: Mutability) -> (Ty<'_>, usize, Mutability) {
+ match ty.kind() {
+ ty::Ref(_, ty, Mutability::Mut) => f(*ty, count + 1, mutability),
+ ty::Ref(_, ty, Mutability::Not) => f(*ty, count + 1, Mutability::Not),
+ _ => (ty, count, mutability),
+ }
+ }
+ f(ty, 0, Mutability::Mut)
+}
+
+/// Returns `true` if the given type is an `unsafe` function.
+pub fn type_is_unsafe_function<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
+ match ty.kind() {
+ ty::FnDef(..) | ty::FnPtr(_) => ty.fn_sig(cx.tcx).unsafety() == Unsafety::Unsafe,
+ _ => false,
+ }
+}
+
+/// Returns the base type for HIR references and pointers.
+pub fn walk_ptrs_hir_ty<'tcx>(ty: &'tcx hir::Ty<'tcx>) -> &'tcx hir::Ty<'tcx> {
+ match ty.kind {
+ TyKind::Ptr(ref mut_ty) | TyKind::Rptr(_, ref mut_ty) => walk_ptrs_hir_ty(mut_ty.ty),
+ _ => ty,
+ }
+}
+
+/// Returns the base type for references and raw pointers, and count reference
+/// depth.
+pub fn walk_ptrs_ty_depth(ty: Ty<'_>) -> (Ty<'_>, usize) {
+ fn inner(ty: Ty<'_>, depth: usize) -> (Ty<'_>, usize) {
+ match ty.kind() {
+ ty::Ref(_, ty, _) => inner(*ty, depth + 1),
+ _ => (ty, depth),
+ }
+ }
+ inner(ty, 0)
+}
+
+/// Returns `true` if types `a` and `b` are same types having same `Const` generic args,
+/// otherwise returns `false`
+pub fn same_type_and_consts<'tcx>(a: Ty<'tcx>, b: Ty<'tcx>) -> bool {
+ match (&a.kind(), &b.kind()) {
+ (&ty::Adt(did_a, substs_a), &ty::Adt(did_b, substs_b)) => {
+ if did_a != did_b {
+ return false;
+ }
+
+ substs_a
+ .iter()
+ .zip(substs_b.iter())
+ .all(|(arg_a, arg_b)| match (arg_a.unpack(), arg_b.unpack()) {
+ (GenericArgKind::Const(inner_a), GenericArgKind::Const(inner_b)) => inner_a == inner_b,
+ (GenericArgKind::Type(type_a), GenericArgKind::Type(type_b)) => {
+ same_type_and_consts(type_a, type_b)
+ },
+ _ => true,
+ })
+ },
+ _ => a == b,
+ }
+}
+
+/// Checks if a given type looks safe to be uninitialized.
+pub fn is_uninit_value_valid_for_ty(cx: &LateContext<'_>, ty: Ty<'_>) -> bool {
+ match *ty.kind() {
+ ty::Array(component, _) => is_uninit_value_valid_for_ty(cx, component),
+ ty::Tuple(types) => types.iter().all(|ty| is_uninit_value_valid_for_ty(cx, ty)),
+ ty::Adt(adt, _) => cx.tcx.lang_items().maybe_uninit() == Some(adt.did()),
+ _ => false,
+ }
+}
+
+/// Gets an iterator over all predicates which apply to the given item.
+pub fn all_predicates_of(tcx: TyCtxt<'_>, id: DefId) -> impl Iterator<Item = &(Predicate<'_>, Span)> {
+ let mut next_id = Some(id);
+ iter::from_fn(move || {
+ next_id.take().map(|id| {
+ let preds = tcx.predicates_of(id);
+ next_id = preds.parent;
+ preds.predicates.iter()
+ })
+ })
+ .flatten()
+}
+
+/// A signature for a function like type.
+#[derive(Clone, Copy)]
+pub enum ExprFnSig<'tcx> {
+ Sig(Binder<'tcx, FnSig<'tcx>>, Option<DefId>),
+ Closure(Option<&'tcx FnDecl<'tcx>>, Binder<'tcx, FnSig<'tcx>>),
+ Trait(Binder<'tcx, Ty<'tcx>>, Option<Binder<'tcx, Ty<'tcx>>>),
+}
+impl<'tcx> ExprFnSig<'tcx> {
+ /// Gets the argument type at the given offset. This will return `None` when the index is out of
+ /// bounds only for variadic functions, otherwise this will panic.
+ pub fn input(self, i: usize) -> Option<Binder<'tcx, Ty<'tcx>>> {
+ match self {
+ Self::Sig(sig, _) => {
+ if sig.c_variadic() {
+ sig.inputs().map_bound(|inputs| inputs.get(i).copied()).transpose()
+ } else {
+ Some(sig.input(i))
+ }
+ },
+ Self::Closure(_, sig) => Some(sig.input(0).map_bound(|ty| ty.tuple_fields()[i])),
+ Self::Trait(inputs, _) => Some(inputs.map_bound(|ty| ty.tuple_fields()[i])),
+ }
+ }
+
+ /// Gets the argument type at the given offset. For closures this will also get the type as
+ /// written. This will return `None` when the index is out of bounds only for variadic
+ /// functions, otherwise this will panic.
+ pub fn input_with_hir(self, i: usize) -> Option<(Option<&'tcx hir::Ty<'tcx>>, Binder<'tcx, Ty<'tcx>>)> {
+ match self {
+ Self::Sig(sig, _) => {
+ if sig.c_variadic() {
+ sig.inputs()
+ .map_bound(|inputs| inputs.get(i).copied())
+ .transpose()
+ .map(|arg| (None, arg))
+ } else {
+ Some((None, sig.input(i)))
+ }
+ },
+ Self::Closure(decl, sig) => Some((
+ decl.and_then(|decl| decl.inputs.get(i)),
+ sig.input(0).map_bound(|ty| ty.tuple_fields()[i]),
+ )),
+ Self::Trait(inputs, _) => Some((None, inputs.map_bound(|ty| ty.tuple_fields()[i]))),
+ }
+ }
+
+ /// Gets the result type, if one could be found. Note that the result type of a trait may not be
+ /// specified.
+ pub fn output(self) -> Option<Binder<'tcx, Ty<'tcx>>> {
+ match self {
+ Self::Sig(sig, _) | Self::Closure(_, sig) => Some(sig.output()),
+ Self::Trait(_, output) => output,
+ }
+ }
+
+ pub fn predicates_id(&self) -> Option<DefId> {
+ if let ExprFnSig::Sig(_, id) = *self { id } else { None }
+ }
+}
+
+/// If the expression is function like, get the signature for it.
+pub fn expr_sig<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>) -> Option<ExprFnSig<'tcx>> {
+ if let Res::Def(DefKind::Fn | DefKind::Ctor(_, CtorKind::Fn) | DefKind::AssocFn, id) = path_res(cx, expr) {
+ Some(ExprFnSig::Sig(cx.tcx.fn_sig(id), Some(id)))
+ } else {
+ ty_sig(cx, cx.typeck_results().expr_ty_adjusted(expr).peel_refs())
+ }
+}
+
+fn ty_sig<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<ExprFnSig<'tcx>> {
+ if ty.is_box() {
+ return ty_sig(cx, ty.boxed_ty());
+ }
+ match *ty.kind() {
+ ty::Closure(id, subs) => {
+ let decl = id
+ .as_local()
+ .and_then(|id| cx.tcx.hir().fn_decl_by_hir_id(cx.tcx.hir().local_def_id_to_hir_id(id)));
+ Some(ExprFnSig::Closure(decl, subs.as_closure().sig()))
+ },
+ ty::FnDef(id, subs) => Some(ExprFnSig::Sig(cx.tcx.bound_fn_sig(id).subst(cx.tcx, subs), Some(id))),
+ ty::Opaque(id, _) => ty_sig(cx, cx.tcx.type_of(id)),
+ ty::FnPtr(sig) => Some(ExprFnSig::Sig(sig, None)),
+ ty::Dynamic(bounds, _) => {
+ let lang_items = cx.tcx.lang_items();
+ match bounds.principal() {
+ Some(bound)
+ if Some(bound.def_id()) == lang_items.fn_trait()
+ || Some(bound.def_id()) == lang_items.fn_once_trait()
+ || Some(bound.def_id()) == lang_items.fn_mut_trait() =>
+ {
+ let output = bounds
+ .projection_bounds()
+ .find(|p| lang_items.fn_once_output().map_or(false, |id| id == p.item_def_id()))
+ .map(|p| p.map_bound(|p| p.term.ty().unwrap()));
+ Some(ExprFnSig::Trait(bound.map_bound(|b| b.substs.type_at(0)), output))
+ },
+ _ => None,
+ }
+ },
+ ty::Projection(proj) => match cx.tcx.try_normalize_erasing_regions(cx.param_env, ty) {
+ Ok(normalized_ty) if normalized_ty != ty => ty_sig(cx, normalized_ty),
+ _ => sig_for_projection(cx, proj).or_else(|| sig_from_bounds(cx, ty)),
+ },
+ ty::Param(_) => sig_from_bounds(cx, ty),
+ _ => None,
+ }
+}
+
+fn sig_from_bounds<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<ExprFnSig<'tcx>> {
+ let mut inputs = None;
+ let mut output = None;
+ let lang_items = cx.tcx.lang_items();
+
+ for (pred, _) in all_predicates_of(cx.tcx, cx.typeck_results().hir_owner.to_def_id()) {
+ match pred.kind().skip_binder() {
+ PredicateKind::Trait(p)
+ if (lang_items.fn_trait() == Some(p.def_id())
+ || lang_items.fn_mut_trait() == Some(p.def_id())
+ || lang_items.fn_once_trait() == Some(p.def_id()))
+ && p.self_ty() == ty =>
+ {
+ if inputs.is_some() {
+ // Multiple different fn trait impls. Is this even allowed?
+ return None;
+ }
+ inputs = Some(pred.kind().rebind(p.trait_ref.substs.type_at(1)));
+ },
+ PredicateKind::Projection(p)
+ if Some(p.projection_ty.item_def_id) == lang_items.fn_once_output()
+ && p.projection_ty.self_ty() == ty =>
+ {
+ if output.is_some() {
+ // Multiple different fn trait impls. Is this even allowed?
+ return None;
+ }
+ output = Some(pred.kind().rebind(p.term.ty().unwrap()));
+ },
+ _ => (),
+ }
+ }
+
+ inputs.map(|ty| ExprFnSig::Trait(ty, output))
+}
+
+fn sig_for_projection<'tcx>(cx: &LateContext<'tcx>, ty: ProjectionTy<'tcx>) -> Option<ExprFnSig<'tcx>> {
+ let mut inputs = None;
+ let mut output = None;
+ let lang_items = cx.tcx.lang_items();
+
+ for pred in cx
+ .tcx
+ .bound_explicit_item_bounds(ty.item_def_id)
+ .transpose_iter()
+ .map(|x| x.map_bound(|(p, _)| p))
+ {
+ match pred.0.kind().skip_binder() {
+ PredicateKind::Trait(p)
+ if (lang_items.fn_trait() == Some(p.def_id())
+ || lang_items.fn_mut_trait() == Some(p.def_id())
+ || lang_items.fn_once_trait() == Some(p.def_id())) =>
+ {
+ if inputs.is_some() {
+ // Multiple different fn trait impls. Is this even allowed?
+ return None;
+ }
+ inputs = Some(
+ pred.map_bound(|pred| pred.kind().rebind(p.trait_ref.substs.type_at(1)))
+ .subst(cx.tcx, ty.substs),
+ );
+ },
+ PredicateKind::Projection(p) if Some(p.projection_ty.item_def_id) == lang_items.fn_once_output() => {
+ if output.is_some() {
+ // Multiple different fn trait impls. Is this even allowed?
+ return None;
+ }
+ output = Some(
+ pred.map_bound(|pred| pred.kind().rebind(p.term.ty().unwrap()))
+ .subst(cx.tcx, ty.substs),
+ );
+ },
+ _ => (),
+ }
+ }
+
+ inputs.map(|ty| ExprFnSig::Trait(ty, output))
+}
+
+#[derive(Clone, Copy)]
+pub enum EnumValue {
+ Unsigned(u128),
+ Signed(i128),
+}
+impl core::ops::Add<u32> for EnumValue {
+ type Output = Self;
+ fn add(self, n: u32) -> Self::Output {
+ match self {
+ Self::Unsigned(x) => Self::Unsigned(x + u128::from(n)),
+ Self::Signed(x) => Self::Signed(x + i128::from(n)),
+ }
+ }
+}
+
+/// Attempts to read the given constant as though it were an an enum value.
+#[expect(clippy::cast_possible_truncation, clippy::cast_possible_wrap)]
+pub fn read_explicit_enum_value(tcx: TyCtxt<'_>, id: DefId) -> Option<EnumValue> {
+ if let Ok(ConstValue::Scalar(Scalar::Int(value))) = tcx.const_eval_poly(id) {
+ match tcx.type_of(id).kind() {
+ ty::Int(_) => Some(EnumValue::Signed(match value.size().bytes() {
+ 1 => i128::from(value.assert_bits(Size::from_bytes(1)) as u8 as i8),
+ 2 => i128::from(value.assert_bits(Size::from_bytes(2)) as u16 as i16),
+ 4 => i128::from(value.assert_bits(Size::from_bytes(4)) as u32 as i32),
+ 8 => i128::from(value.assert_bits(Size::from_bytes(8)) as u64 as i64),
+ 16 => value.assert_bits(Size::from_bytes(16)) as i128,
+ _ => return None,
+ })),
+ ty::Uint(_) => Some(EnumValue::Unsigned(match value.size().bytes() {
+ 1 => value.assert_bits(Size::from_bytes(1)),
+ 2 => value.assert_bits(Size::from_bytes(2)),
+ 4 => value.assert_bits(Size::from_bytes(4)),
+ 8 => value.assert_bits(Size::from_bytes(8)),
+ 16 => value.assert_bits(Size::from_bytes(16)),
+ _ => return None,
+ })),
+ _ => None,
+ }
+ } else {
+ None
+ }
+}
+
+/// Gets the value of the given variant.
+pub fn get_discriminant_value(tcx: TyCtxt<'_>, adt: AdtDef<'_>, i: VariantIdx) -> EnumValue {
+ let variant = &adt.variant(i);
+ match variant.discr {
+ VariantDiscr::Explicit(id) => read_explicit_enum_value(tcx, id).unwrap(),
+ VariantDiscr::Relative(x) => match adt.variant((i.as_usize() - x as usize).into()).discr {
+ VariantDiscr::Explicit(id) => read_explicit_enum_value(tcx, id).unwrap() + x,
+ VariantDiscr::Relative(_) => EnumValue::Unsigned(x.into()),
+ },
+ }
+}
+
+/// Check if the given type is either `core::ffi::c_void`, `std::os::raw::c_void`, or one of the
+/// platform specific `libc::<platform>::c_void` types in libc.
+pub fn is_c_void(cx: &LateContext<'_>, ty: Ty<'_>) -> bool {
+ if let ty::Adt(adt, _) = ty.kind()
+ && let &[krate, .., name] = &*cx.get_def_path(adt.did())
+ && let sym::libc | sym::core | sym::std = krate
+ && name.as_str() == "c_void"
+ {
+ true
+ } else {
+ false
+ }
+}
+
+pub fn for_each_top_level_late_bound_region<B>(
+ ty: Ty<'_>,
+ f: impl FnMut(BoundRegion) -> ControlFlow<B>,
+) -> ControlFlow<B> {
+ struct V<F> {
+ index: u32,
+ f: F,
+ }
+ impl<'tcx, B, F: FnMut(BoundRegion) -> ControlFlow<B>> TypeVisitor<'tcx> for V<F> {
+ type BreakTy = B;
+ fn visit_region(&mut self, r: Region<'tcx>) -> ControlFlow<Self::BreakTy> {
+ if let RegionKind::ReLateBound(idx, bound) = r.kind() && idx.as_u32() == self.index {
+ (self.f)(bound)
+ } else {
+ ControlFlow::Continue(())
+ }
+ }
+ fn visit_binder<T: TypeVisitable<'tcx>>(&mut self, t: &Binder<'tcx, T>) -> ControlFlow<Self::BreakTy> {
+ self.index += 1;
+ let res = t.super_visit_with(self);
+ self.index -= 1;
+ res
+ }
+ }
+ ty.visit_with(&mut V { index: 0, f })
+}
+
+/// Gets the struct or enum variant from the given `Res`
+pub fn variant_of_res<'tcx>(cx: &LateContext<'tcx>, res: Res) -> Option<&'tcx VariantDef> {
+ match res {
+ Res::Def(DefKind::Struct, id) => Some(cx.tcx.adt_def(id).non_enum_variant()),
+ Res::Def(DefKind::Variant, id) => Some(cx.tcx.adt_def(cx.tcx.parent(id)).variant_with_id(id)),
+ Res::Def(DefKind::Ctor(CtorOf::Struct, _), id) => Some(cx.tcx.adt_def(cx.tcx.parent(id)).non_enum_variant()),
+ Res::Def(DefKind::Ctor(CtorOf::Variant, _), id) => {
+ let var_id = cx.tcx.parent(id);
+ Some(cx.tcx.adt_def(cx.tcx.parent(var_id)).variant_with_id(var_id))
+ },
+ Res::SelfCtor(id) => Some(cx.tcx.type_of(id).ty_adt_def().unwrap().non_enum_variant()),
+ _ => None,
+ }
+}
+
+/// Checks if the type is a type parameter implementing `FnOnce`, but not `FnMut`.
+pub fn ty_is_fn_once_param<'tcx>(tcx: TyCtxt<'_>, ty: Ty<'tcx>, predicates: &'tcx [Predicate<'_>]) -> bool {
+ let ty::Param(ty) = *ty.kind() else {
+ return false;
+ };
+ let lang = tcx.lang_items();
+ let (Some(fn_once_id), Some(fn_mut_id), Some(fn_id))
+ = (lang.fn_once_trait(), lang.fn_mut_trait(), lang.fn_trait())
+ else {
+ return false;
+ };
+ predicates
+ .iter()
+ .try_fold(false, |found, p| {
+ if let PredicateKind::Trait(p) = p.kind().skip_binder()
+ && let ty::Param(self_ty) = p.trait_ref.self_ty().kind()
+ && ty.index == self_ty.index
+ {
+ // This should use `super_traits_of`, but that's a private function.
+ if p.trait_ref.def_id == fn_once_id {
+ return Some(true);
+ } else if p.trait_ref.def_id == fn_mut_id || p.trait_ref.def_id == fn_id {
+ return None;
+ }
+ }
+ Some(found)
+ })
+ .unwrap_or(false)
+}
diff --git a/src/tools/clippy/clippy_utils/src/usage.rs b/src/tools/clippy/clippy_utils/src/usage.rs
new file mode 100644
index 000000000..3af5dfb62
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/usage.rs
@@ -0,0 +1,216 @@
+use crate as utils;
+use crate::visitors::{expr_visitor, expr_visitor_no_bodies};
+use rustc_hir as hir;
+use rustc_hir::intravisit::{self, Visitor};
+use rustc_hir::HirIdSet;
+use rustc_hir::{Expr, ExprKind, HirId, Node};
+use rustc_infer::infer::TyCtxtInferExt;
+use rustc_lint::LateContext;
+use rustc_middle::hir::nested_filter;
+use rustc_middle::mir::FakeReadCause;
+use rustc_middle::ty;
+use rustc_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId};
+
+/// Returns a set of mutated local variable IDs, or `None` if mutations could not be determined.
+pub fn mutated_variables<'tcx>(expr: &'tcx Expr<'_>, cx: &LateContext<'tcx>) -> Option<HirIdSet> {
+ let mut delegate = MutVarsDelegate {
+ used_mutably: HirIdSet::default(),
+ skip: false,
+ };
+ cx.tcx.infer_ctxt().enter(|infcx| {
+ ExprUseVisitor::new(
+ &mut delegate,
+ &infcx,
+ expr.hir_id.owner,
+ cx.param_env,
+ cx.typeck_results(),
+ )
+ .walk_expr(expr);
+ });
+
+ if delegate.skip {
+ return None;
+ }
+ Some(delegate.used_mutably)
+}
+
+pub fn is_potentially_mutated<'tcx>(variable: HirId, expr: &'tcx Expr<'_>, cx: &LateContext<'tcx>) -> bool {
+ mutated_variables(expr, cx).map_or(true, |mutated| mutated.contains(&variable))
+}
+
+struct MutVarsDelegate {
+ used_mutably: HirIdSet,
+ skip: bool,
+}
+
+impl<'tcx> MutVarsDelegate {
+ fn update(&mut self, cat: &PlaceWithHirId<'tcx>) {
+ match cat.place.base {
+ PlaceBase::Local(id) => {
+ self.used_mutably.insert(id);
+ },
+ PlaceBase::Upvar(_) => {
+ //FIXME: This causes false negatives. We can't get the `NodeId` from
+ //`Categorization::Upvar(_)`. So we search for any `Upvar`s in the
+ //`while`-body, not just the ones in the condition.
+ self.skip = true;
+ },
+ _ => {},
+ }
+ }
+}
+
+impl<'tcx> Delegate<'tcx> for MutVarsDelegate {
+ fn consume(&mut self, _: &PlaceWithHirId<'tcx>, _: HirId) {}
+
+ fn borrow(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId, bk: ty::BorrowKind) {
+ if bk == ty::BorrowKind::MutBorrow {
+ self.update(cmt);
+ }
+ }
+
+ fn mutate(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId) {
+ self.update(cmt);
+ }
+
+ fn fake_read(&mut self, _: &rustc_typeck::expr_use_visitor::PlaceWithHirId<'tcx>, _: FakeReadCause, _: HirId) {}
+}
+
+pub struct ParamBindingIdCollector {
+ pub binding_hir_ids: Vec<hir::HirId>,
+}
+impl<'tcx> ParamBindingIdCollector {
+ fn collect_binding_hir_ids(body: &'tcx hir::Body<'tcx>) -> Vec<hir::HirId> {
+ let mut hir_ids: Vec<hir::HirId> = Vec::new();
+ for param in body.params.iter() {
+ let mut finder = ParamBindingIdCollector {
+ binding_hir_ids: Vec::new(),
+ };
+ finder.visit_param(param);
+ for hir_id in &finder.binding_hir_ids {
+ hir_ids.push(*hir_id);
+ }
+ }
+ hir_ids
+ }
+}
+impl<'tcx> intravisit::Visitor<'tcx> for ParamBindingIdCollector {
+ fn visit_pat(&mut self, pat: &'tcx hir::Pat<'tcx>) {
+ if let hir::PatKind::Binding(_, hir_id, ..) = pat.kind {
+ self.binding_hir_ids.push(hir_id);
+ }
+ intravisit::walk_pat(self, pat);
+ }
+}
+
+pub struct BindingUsageFinder<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ binding_ids: Vec<hir::HirId>,
+ usage_found: bool,
+}
+impl<'a, 'tcx> BindingUsageFinder<'a, 'tcx> {
+ pub fn are_params_used(cx: &'a LateContext<'tcx>, body: &'tcx hir::Body<'tcx>) -> bool {
+ let mut finder = BindingUsageFinder {
+ cx,
+ binding_ids: ParamBindingIdCollector::collect_binding_hir_ids(body),
+ usage_found: false,
+ };
+ finder.visit_body(body);
+ finder.usage_found
+ }
+}
+impl<'a, 'tcx> intravisit::Visitor<'tcx> for BindingUsageFinder<'a, 'tcx> {
+ type NestedFilter = nested_filter::OnlyBodies;
+
+ fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
+ if !self.usage_found {
+ intravisit::walk_expr(self, expr);
+ }
+ }
+
+ fn visit_path(&mut self, path: &'tcx hir::Path<'tcx>, _: hir::HirId) {
+ if let hir::def::Res::Local(id) = path.res {
+ if self.binding_ids.contains(&id) {
+ self.usage_found = true;
+ }
+ }
+ }
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+}
+
+pub fn contains_return_break_continue_macro(expression: &Expr<'_>) -> bool {
+ let mut seen_return_break_continue = false;
+ expr_visitor_no_bodies(|ex| {
+ if seen_return_break_continue {
+ return false;
+ }
+ match &ex.kind {
+ ExprKind::Ret(..) | ExprKind::Break(..) | ExprKind::Continue(..) => {
+ seen_return_break_continue = true;
+ },
+ // Something special could be done here to handle while or for loop
+ // desugaring, as this will detect a break if there's a while loop
+ // or a for loop inside the expression.
+ _ => {
+ if ex.span.from_expansion() {
+ seen_return_break_continue = true;
+ }
+ },
+ }
+ !seen_return_break_continue
+ })
+ .visit_expr(expression);
+ seen_return_break_continue
+}
+
+pub fn local_used_after_expr(cx: &LateContext<'_>, local_id: HirId, after: &Expr<'_>) -> bool {
+ let Some(block) = utils::get_enclosing_block(cx, local_id) else { return false };
+
+ // for _ in 1..3 {
+ // local
+ // }
+ //
+ // let closure = || local;
+ // closure();
+ // closure();
+ let in_loop_or_closure = cx
+ .tcx
+ .hir()
+ .parent_iter(after.hir_id)
+ .take_while(|&(id, _)| id != block.hir_id)
+ .any(|(_, node)| {
+ matches!(
+ node,
+ Node::Expr(Expr {
+ kind: ExprKind::Loop(..) | ExprKind::Closure { .. },
+ ..
+ })
+ )
+ });
+ if in_loop_or_closure {
+ return true;
+ }
+
+ let mut used_after_expr = false;
+ let mut past_expr = false;
+ expr_visitor(cx, |expr| {
+ if used_after_expr {
+ return false;
+ }
+
+ if expr.hir_id == after.hir_id {
+ past_expr = true;
+ return false;
+ }
+
+ if past_expr && utils::path_to_local_id(expr, local_id) {
+ used_after_expr = true;
+ }
+ !used_after_expr
+ })
+ .visit_block(block);
+ used_after_expr
+}
diff --git a/src/tools/clippy/clippy_utils/src/visitors.rs b/src/tools/clippy/clippy_utils/src/visitors.rs
new file mode 100644
index 000000000..bae8ad9f5
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/visitors.rs
@@ -0,0 +1,733 @@
+use crate::ty::needs_ordered_drop;
+use crate::{get_enclosing_block, path_to_local_id};
+use core::ops::ControlFlow;
+use rustc_hir as hir;
+use rustc_hir::def::{CtorKind, DefKind, Res};
+use rustc_hir::intravisit::{self, walk_block, walk_expr, Visitor};
+use rustc_hir::{
+ Arm, Block, BlockCheckMode, Body, BodyId, Expr, ExprKind, HirId, ItemId, ItemKind, Let, Pat, QPath, Stmt, UnOp,
+ UnsafeSource, Unsafety,
+};
+use rustc_lint::LateContext;
+use rustc_middle::hir::map::Map;
+use rustc_middle::hir::nested_filter;
+use rustc_middle::ty::adjustment::Adjust;
+use rustc_middle::ty::{self, Ty, TypeckResults};
+use rustc_span::Span;
+
+mod internal {
+ /// Trait for visitor functions to control whether or not to descend to child nodes. Implemented
+ /// for only two types. `()` always descends. `Descend` allows controlled descent.
+ pub trait Continue {
+ fn descend(&self) -> bool;
+ }
+}
+use internal::Continue;
+
+impl Continue for () {
+ fn descend(&self) -> bool {
+ true
+ }
+}
+
+/// Allows for controlled descent when using visitor functions. Use `()` instead when always
+/// descending into child nodes.
+#[derive(Clone, Copy)]
+pub enum Descend {
+ Yes,
+ No,
+}
+impl From<bool> for Descend {
+ fn from(from: bool) -> Self {
+ if from { Self::Yes } else { Self::No }
+ }
+}
+impl Continue for Descend {
+ fn descend(&self) -> bool {
+ matches!(self, Self::Yes)
+ }
+}
+
+/// Calls the given function once for each expression contained. This does not enter any bodies or
+/// nested items.
+pub fn for_each_expr<'tcx, B, C: Continue>(
+ node: impl Visitable<'tcx>,
+ f: impl FnMut(&'tcx Expr<'tcx>) -> ControlFlow<B, C>,
+) -> Option<B> {
+ struct V<B, F> {
+ f: F,
+ res: Option<B>,
+ }
+ impl<'tcx, B, C: Continue, F: FnMut(&'tcx Expr<'tcx>) -> ControlFlow<B, C>> Visitor<'tcx> for V<B, F> {
+ fn visit_expr(&mut self, e: &'tcx Expr<'tcx>) {
+ if self.res.is_some() {
+ return;
+ }
+ match (self.f)(e) {
+ ControlFlow::Continue(c) if c.descend() => walk_expr(self, e),
+ ControlFlow::Break(b) => self.res = Some(b),
+ ControlFlow::Continue(_) => (),
+ }
+ }
+
+ // Avoid unnecessary `walk_*` calls.
+ fn visit_ty(&mut self, _: &'tcx hir::Ty<'tcx>) {}
+ fn visit_pat(&mut self, _: &'tcx Pat<'tcx>) {}
+ fn visit_qpath(&mut self, _: &'tcx QPath<'tcx>, _: HirId, _: Span) {}
+ // Avoid monomorphising all `visit_*` functions.
+ fn visit_nested_item(&mut self, _: ItemId) {}
+ }
+ let mut v = V { f, res: None };
+ node.visit(&mut v);
+ v.res
+}
+
+/// Convenience method for creating a `Visitor` with just `visit_expr` overridden and nested
+/// bodies (i.e. closures) are visited.
+/// If the callback returns `true`, the expr just provided to the callback is walked.
+#[must_use]
+pub fn expr_visitor<'tcx>(cx: &LateContext<'tcx>, f: impl FnMut(&'tcx Expr<'tcx>) -> bool) -> impl Visitor<'tcx> {
+ struct V<'tcx, F> {
+ hir: Map<'tcx>,
+ f: F,
+ }
+ impl<'tcx, F: FnMut(&'tcx Expr<'tcx>) -> bool> Visitor<'tcx> for V<'tcx, F> {
+ type NestedFilter = nested_filter::OnlyBodies;
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.hir
+ }
+
+ fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
+ if (self.f)(expr) {
+ walk_expr(self, expr);
+ }
+ }
+ }
+ V { hir: cx.tcx.hir(), f }
+}
+
+/// Convenience method for creating a `Visitor` with just `visit_expr` overridden and nested
+/// bodies (i.e. closures) are not visited.
+/// If the callback returns `true`, the expr just provided to the callback is walked.
+#[must_use]
+pub fn expr_visitor_no_bodies<'tcx>(f: impl FnMut(&'tcx Expr<'tcx>) -> bool) -> impl Visitor<'tcx> {
+ struct V<F>(F);
+ impl<'tcx, F: FnMut(&'tcx Expr<'tcx>) -> bool> Visitor<'tcx> for V<F> {
+ fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
+ if (self.0)(e) {
+ walk_expr(self, e);
+ }
+ }
+ }
+ V(f)
+}
+
+/// returns `true` if expr contains match expr desugared from try
+fn contains_try(expr: &hir::Expr<'_>) -> bool {
+ let mut found = false;
+ expr_visitor_no_bodies(|e| {
+ if !found {
+ found = matches!(e.kind, hir::ExprKind::Match(_, _, hir::MatchSource::TryDesugar));
+ }
+ !found
+ })
+ .visit_expr(expr);
+ found
+}
+
+pub fn find_all_ret_expressions<'hir, F>(_cx: &LateContext<'_>, expr: &'hir hir::Expr<'hir>, callback: F) -> bool
+where
+ F: FnMut(&'hir hir::Expr<'hir>) -> bool,
+{
+ struct RetFinder<F> {
+ in_stmt: bool,
+ failed: bool,
+ cb: F,
+ }
+
+ struct WithStmtGuarg<'a, F> {
+ val: &'a mut RetFinder<F>,
+ prev_in_stmt: bool,
+ }
+
+ impl<F> RetFinder<F> {
+ fn inside_stmt(&mut self, in_stmt: bool) -> WithStmtGuarg<'_, F> {
+ let prev_in_stmt = std::mem::replace(&mut self.in_stmt, in_stmt);
+ WithStmtGuarg {
+ val: self,
+ prev_in_stmt,
+ }
+ }
+ }
+
+ impl<F> std::ops::Deref for WithStmtGuarg<'_, F> {
+ type Target = RetFinder<F>;
+
+ fn deref(&self) -> &Self::Target {
+ self.val
+ }
+ }
+
+ impl<F> std::ops::DerefMut for WithStmtGuarg<'_, F> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ self.val
+ }
+ }
+
+ impl<F> Drop for WithStmtGuarg<'_, F> {
+ fn drop(&mut self) {
+ self.val.in_stmt = self.prev_in_stmt;
+ }
+ }
+
+ impl<'hir, F: FnMut(&'hir hir::Expr<'hir>) -> bool> intravisit::Visitor<'hir> for RetFinder<F> {
+ fn visit_stmt(&mut self, stmt: &'hir hir::Stmt<'_>) {
+ intravisit::walk_stmt(&mut *self.inside_stmt(true), stmt);
+ }
+
+ fn visit_expr(&mut self, expr: &'hir hir::Expr<'_>) {
+ if self.failed {
+ return;
+ }
+ if self.in_stmt {
+ match expr.kind {
+ hir::ExprKind::Ret(Some(expr)) => self.inside_stmt(false).visit_expr(expr),
+ _ => intravisit::walk_expr(self, expr),
+ }
+ } else {
+ match expr.kind {
+ hir::ExprKind::If(cond, then, else_opt) => {
+ self.inside_stmt(true).visit_expr(cond);
+ self.visit_expr(then);
+ if let Some(el) = else_opt {
+ self.visit_expr(el);
+ }
+ },
+ hir::ExprKind::Match(cond, arms, _) => {
+ self.inside_stmt(true).visit_expr(cond);
+ for arm in arms {
+ self.visit_expr(arm.body);
+ }
+ },
+ hir::ExprKind::Block(..) => intravisit::walk_expr(self, expr),
+ hir::ExprKind::Ret(Some(expr)) => self.visit_expr(expr),
+ _ => self.failed |= !(self.cb)(expr),
+ }
+ }
+ }
+ }
+
+ !contains_try(expr) && {
+ let mut ret_finder = RetFinder {
+ in_stmt: false,
+ failed: false,
+ cb: callback,
+ };
+ ret_finder.visit_expr(expr);
+ !ret_finder.failed
+ }
+}
+
+/// A type which can be visited.
+pub trait Visitable<'tcx> {
+ /// Calls the corresponding `visit_*` function on the visitor.
+ fn visit<V: Visitor<'tcx>>(self, visitor: &mut V);
+}
+macro_rules! visitable_ref {
+ ($t:ident, $f:ident) => {
+ impl<'tcx> Visitable<'tcx> for &'tcx $t<'tcx> {
+ fn visit<V: Visitor<'tcx>>(self, visitor: &mut V) {
+ visitor.$f(self);
+ }
+ }
+ };
+}
+visitable_ref!(Arm, visit_arm);
+visitable_ref!(Block, visit_block);
+visitable_ref!(Body, visit_body);
+visitable_ref!(Expr, visit_expr);
+visitable_ref!(Stmt, visit_stmt);
+
+// impl<'tcx, I: IntoIterator> Visitable<'tcx> for I
+// where
+// I::Item: Visitable<'tcx>,
+// {
+// fn visit<V: Visitor<'tcx>>(self, visitor: &mut V) {
+// for x in self {
+// x.visit(visitor);
+// }
+// }
+// }
+
+/// Checks if the given resolved path is used in the given body.
+pub fn is_res_used(cx: &LateContext<'_>, res: Res, body: BodyId) -> bool {
+ let mut found = false;
+ expr_visitor(cx, |e| {
+ if found {
+ return false;
+ }
+
+ if let ExprKind::Path(p) = &e.kind {
+ if cx.qpath_res(p, e.hir_id) == res {
+ found = true;
+ }
+ }
+ !found
+ })
+ .visit_expr(&cx.tcx.hir().body(body).value);
+ found
+}
+
+/// Checks if the given local is used.
+pub fn is_local_used<'tcx>(cx: &LateContext<'tcx>, visitable: impl Visitable<'tcx>, id: HirId) -> bool {
+ let mut is_used = false;
+ let mut visitor = expr_visitor(cx, |expr| {
+ if !is_used {
+ is_used = path_to_local_id(expr, id);
+ }
+ !is_used
+ });
+ visitable.visit(&mut visitor);
+ drop(visitor);
+ is_used
+}
+
+/// Checks if the given expression is a constant.
+pub fn is_const_evaluatable<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) -> bool {
+ struct V<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ is_const: bool,
+ }
+ impl<'tcx> Visitor<'tcx> for V<'_, 'tcx> {
+ type NestedFilter = nested_filter::OnlyBodies;
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+
+ fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
+ if !self.is_const {
+ return;
+ }
+ match e.kind {
+ ExprKind::ConstBlock(_) => return,
+ ExprKind::Call(
+ &Expr {
+ kind: ExprKind::Path(ref p),
+ hir_id,
+ ..
+ },
+ _,
+ ) if self
+ .cx
+ .qpath_res(p, hir_id)
+ .opt_def_id()
+ .map_or(false, |id| self.cx.tcx.is_const_fn_raw(id)) => {},
+ ExprKind::MethodCall(..)
+ if self
+ .cx
+ .typeck_results()
+ .type_dependent_def_id(e.hir_id)
+ .map_or(false, |id| self.cx.tcx.is_const_fn_raw(id)) => {},
+ ExprKind::Binary(_, lhs, rhs)
+ if self.cx.typeck_results().expr_ty(lhs).peel_refs().is_primitive_ty()
+ && self.cx.typeck_results().expr_ty(rhs).peel_refs().is_primitive_ty() => {},
+ ExprKind::Unary(UnOp::Deref, e) if self.cx.typeck_results().expr_ty(e).is_ref() => (),
+ ExprKind::Unary(_, e) if self.cx.typeck_results().expr_ty(e).peel_refs().is_primitive_ty() => (),
+ ExprKind::Index(base, _)
+ if matches!(
+ self.cx.typeck_results().expr_ty(base).peel_refs().kind(),
+ ty::Slice(_) | ty::Array(..)
+ ) => {},
+ ExprKind::Path(ref p)
+ if matches!(
+ self.cx.qpath_res(p, e.hir_id),
+ Res::Def(
+ DefKind::Const
+ | DefKind::AssocConst
+ | DefKind::AnonConst
+ | DefKind::ConstParam
+ | DefKind::Ctor(..)
+ | DefKind::Fn
+ | DefKind::AssocFn,
+ _
+ ) | Res::SelfCtor(_)
+ ) => {},
+
+ ExprKind::AddrOf(..)
+ | ExprKind::Array(_)
+ | ExprKind::Block(..)
+ | ExprKind::Cast(..)
+ | ExprKind::DropTemps(_)
+ | ExprKind::Field(..)
+ | ExprKind::If(..)
+ | ExprKind::Let(..)
+ | ExprKind::Lit(_)
+ | ExprKind::Match(..)
+ | ExprKind::Repeat(..)
+ | ExprKind::Struct(..)
+ | ExprKind::Tup(_)
+ | ExprKind::Type(..) => (),
+
+ _ => {
+ self.is_const = false;
+ return;
+ },
+ }
+ walk_expr(self, e);
+ }
+ }
+
+ let mut v = V { cx, is_const: true };
+ v.visit_expr(e);
+ v.is_const
+}
+
+/// Checks if the given expression performs an unsafe operation outside of an unsafe block.
+pub fn is_expr_unsafe<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) -> bool {
+ struct V<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ is_unsafe: bool,
+ }
+ impl<'tcx> Visitor<'tcx> for V<'_, 'tcx> {
+ type NestedFilter = nested_filter::OnlyBodies;
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+ fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
+ if self.is_unsafe {
+ return;
+ }
+ match e.kind {
+ ExprKind::Unary(UnOp::Deref, e) if self.cx.typeck_results().expr_ty(e).is_unsafe_ptr() => {
+ self.is_unsafe = true;
+ },
+ ExprKind::MethodCall(..)
+ if self
+ .cx
+ .typeck_results()
+ .type_dependent_def_id(e.hir_id)
+ .map_or(false, |id| self.cx.tcx.fn_sig(id).unsafety() == Unsafety::Unsafe) =>
+ {
+ self.is_unsafe = true;
+ },
+ ExprKind::Call(func, _) => match *self.cx.typeck_results().expr_ty(func).peel_refs().kind() {
+ ty::FnDef(id, _) if self.cx.tcx.fn_sig(id).unsafety() == Unsafety::Unsafe => self.is_unsafe = true,
+ ty::FnPtr(sig) if sig.unsafety() == Unsafety::Unsafe => self.is_unsafe = true,
+ _ => walk_expr(self, e),
+ },
+ ExprKind::Path(ref p)
+ if self
+ .cx
+ .qpath_res(p, e.hir_id)
+ .opt_def_id()
+ .map_or(false, |id| self.cx.tcx.is_mutable_static(id)) =>
+ {
+ self.is_unsafe = true;
+ },
+ _ => walk_expr(self, e),
+ }
+ }
+ fn visit_block(&mut self, b: &'tcx Block<'_>) {
+ if !matches!(b.rules, BlockCheckMode::UnsafeBlock(_)) {
+ walk_block(self, b);
+ }
+ }
+ fn visit_nested_item(&mut self, id: ItemId) {
+ if let ItemKind::Impl(i) = &self.cx.tcx.hir().item(id).kind {
+ self.is_unsafe = i.unsafety == Unsafety::Unsafe;
+ }
+ }
+ }
+ let mut v = V { cx, is_unsafe: false };
+ v.visit_expr(e);
+ v.is_unsafe
+}
+
+/// Checks if the given expression contains an unsafe block
+pub fn contains_unsafe_block<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'tcx>) -> bool {
+ struct V<'cx, 'tcx> {
+ cx: &'cx LateContext<'tcx>,
+ found_unsafe: bool,
+ }
+ impl<'tcx> Visitor<'tcx> for V<'_, 'tcx> {
+ type NestedFilter = nested_filter::OnlyBodies;
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+
+ fn visit_block(&mut self, b: &'tcx Block<'_>) {
+ if self.found_unsafe {
+ return;
+ }
+ if b.rules == BlockCheckMode::UnsafeBlock(UnsafeSource::UserProvided) {
+ self.found_unsafe = true;
+ return;
+ }
+ walk_block(self, b);
+ }
+ }
+ let mut v = V {
+ cx,
+ found_unsafe: false,
+ };
+ v.visit_expr(e);
+ v.found_unsafe
+}
+
+/// Runs the given function for each sub-expression producing the final value consumed by the parent
+/// of the give expression.
+///
+/// e.g. for the following expression
+/// ```rust,ignore
+/// if foo {
+/// f(0)
+/// } else {
+/// 1 + 1
+/// }
+/// ```
+/// this will pass both `f(0)` and `1+1` to the given function.
+pub fn for_each_value_source<'tcx, B>(
+ e: &'tcx Expr<'tcx>,
+ f: &mut impl FnMut(&'tcx Expr<'tcx>) -> ControlFlow<B>,
+) -> ControlFlow<B> {
+ match e.kind {
+ ExprKind::Block(Block { expr: Some(e), .. }, _) => for_each_value_source(e, f),
+ ExprKind::Match(_, arms, _) => {
+ for arm in arms {
+ for_each_value_source(arm.body, f)?;
+ }
+ ControlFlow::Continue(())
+ },
+ ExprKind::If(_, if_expr, Some(else_expr)) => {
+ for_each_value_source(if_expr, f)?;
+ for_each_value_source(else_expr, f)
+ },
+ ExprKind::DropTemps(e) => for_each_value_source(e, f),
+ _ => f(e),
+ }
+}
+
+/// Runs the given function for each path expression referencing the given local which occur after
+/// the given expression.
+pub fn for_each_local_use_after_expr<'tcx, B>(
+ cx: &LateContext<'tcx>,
+ local_id: HirId,
+ expr_id: HirId,
+ f: impl FnMut(&'tcx Expr<'tcx>) -> ControlFlow<B>,
+) -> ControlFlow<B> {
+ struct V<'cx, 'tcx, F, B> {
+ cx: &'cx LateContext<'tcx>,
+ local_id: HirId,
+ expr_id: HirId,
+ found: bool,
+ res: ControlFlow<B>,
+ f: F,
+ }
+ impl<'cx, 'tcx, F: FnMut(&'tcx Expr<'tcx>) -> ControlFlow<B>, B> Visitor<'tcx> for V<'cx, 'tcx, F, B> {
+ type NestedFilter = nested_filter::OnlyBodies;
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+
+ fn visit_expr(&mut self, e: &'tcx Expr<'tcx>) {
+ if !self.found {
+ if e.hir_id == self.expr_id {
+ self.found = true;
+ } else {
+ walk_expr(self, e);
+ }
+ return;
+ }
+ if self.res.is_break() {
+ return;
+ }
+ if path_to_local_id(e, self.local_id) {
+ self.res = (self.f)(e);
+ } else {
+ walk_expr(self, e);
+ }
+ }
+ }
+
+ if let Some(b) = get_enclosing_block(cx, local_id) {
+ let mut v = V {
+ cx,
+ local_id,
+ expr_id,
+ found: false,
+ res: ControlFlow::Continue(()),
+ f,
+ };
+ v.visit_block(b);
+ v.res
+ } else {
+ ControlFlow::Continue(())
+ }
+}
+
+// Calls the given function for every unconsumed temporary created by the expression. Note the
+// function is only guaranteed to be called for types which need to be dropped, but it may be called
+// for other types.
+pub fn for_each_unconsumed_temporary<'tcx, B>(
+ cx: &LateContext<'tcx>,
+ e: &'tcx Expr<'tcx>,
+ mut f: impl FnMut(Ty<'tcx>) -> ControlFlow<B>,
+) -> ControlFlow<B> {
+ // Todo: Handle partially consumed values.
+ fn helper<'tcx, B>(
+ typeck: &'tcx TypeckResults<'tcx>,
+ consume: bool,
+ e: &'tcx Expr<'tcx>,
+ f: &mut impl FnMut(Ty<'tcx>) -> ControlFlow<B>,
+ ) -> ControlFlow<B> {
+ if !consume
+ || matches!(
+ typeck.expr_adjustments(e),
+ [adjust, ..] if matches!(adjust.kind, Adjust::Borrow(_) | Adjust::Deref(_))
+ )
+ {
+ match e.kind {
+ ExprKind::Path(QPath::Resolved(None, p))
+ if matches!(p.res, Res::Def(DefKind::Ctor(_, CtorKind::Const), _)) =>
+ {
+ f(typeck.expr_ty(e))?;
+ },
+ ExprKind::Path(_)
+ | ExprKind::Unary(UnOp::Deref, _)
+ | ExprKind::Index(..)
+ | ExprKind::Field(..)
+ | ExprKind::AddrOf(..) => (),
+ _ => f(typeck.expr_ty(e))?,
+ }
+ }
+ match e.kind {
+ ExprKind::AddrOf(_, _, e)
+ | ExprKind::Field(e, _)
+ | ExprKind::Unary(UnOp::Deref, e)
+ | ExprKind::Match(e, ..)
+ | ExprKind::Let(&Let { init: e, .. }) => {
+ helper(typeck, false, e, f)?;
+ },
+ ExprKind::Block(&Block { expr: Some(e), .. }, _)
+ | ExprKind::Box(e)
+ | ExprKind::Cast(e, _)
+ | ExprKind::Unary(_, e) => {
+ helper(typeck, true, e, f)?;
+ },
+ ExprKind::Call(callee, args) => {
+ helper(typeck, true, callee, f)?;
+ for arg in args {
+ helper(typeck, true, arg, f)?;
+ }
+ },
+ ExprKind::MethodCall(_, args, _) | ExprKind::Tup(args) | ExprKind::Array(args) => {
+ for arg in args {
+ helper(typeck, true, arg, f)?;
+ }
+ },
+ ExprKind::Index(borrowed, consumed)
+ | ExprKind::Assign(borrowed, consumed, _)
+ | ExprKind::AssignOp(_, borrowed, consumed) => {
+ helper(typeck, false, borrowed, f)?;
+ helper(typeck, true, consumed, f)?;
+ },
+ ExprKind::Binary(_, lhs, rhs) => {
+ helper(typeck, true, lhs, f)?;
+ helper(typeck, true, rhs, f)?;
+ },
+ ExprKind::Struct(_, fields, default) => {
+ for field in fields {
+ helper(typeck, true, field.expr, f)?;
+ }
+ if let Some(default) = default {
+ helper(typeck, false, default, f)?;
+ }
+ },
+ ExprKind::If(cond, then, else_expr) => {
+ helper(typeck, true, cond, f)?;
+ helper(typeck, true, then, f)?;
+ if let Some(else_expr) = else_expr {
+ helper(typeck, true, else_expr, f)?;
+ }
+ },
+ ExprKind::Type(e, _) => {
+ helper(typeck, consume, e, f)?;
+ },
+
+ // Either drops temporaries, jumps out of the current expression, or has no sub expression.
+ ExprKind::DropTemps(_)
+ | ExprKind::Ret(_)
+ | ExprKind::Break(..)
+ | ExprKind::Yield(..)
+ | ExprKind::Block(..)
+ | ExprKind::Loop(..)
+ | ExprKind::Repeat(..)
+ | ExprKind::Lit(_)
+ | ExprKind::ConstBlock(_)
+ | ExprKind::Closure { .. }
+ | ExprKind::Path(_)
+ | ExprKind::Continue(_)
+ | ExprKind::InlineAsm(_)
+ | ExprKind::Err => (),
+ }
+ ControlFlow::Continue(())
+ }
+ helper(cx.typeck_results(), true, e, &mut f)
+}
+
+pub fn any_temporaries_need_ordered_drop<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'tcx>) -> bool {
+ for_each_unconsumed_temporary(cx, e, |ty| {
+ if needs_ordered_drop(cx, ty) {
+ ControlFlow::Break(())
+ } else {
+ ControlFlow::Continue(())
+ }
+ })
+ .is_break()
+}
+
+/// Runs the given function for each path expression referencing the given local which occur after
+/// the given expression.
+pub fn for_each_local_assignment<'tcx, B>(
+ cx: &LateContext<'tcx>,
+ local_id: HirId,
+ f: impl FnMut(&'tcx Expr<'tcx>) -> ControlFlow<B>,
+) -> ControlFlow<B> {
+ struct V<'cx, 'tcx, F, B> {
+ cx: &'cx LateContext<'tcx>,
+ local_id: HirId,
+ res: ControlFlow<B>,
+ f: F,
+ }
+ impl<'cx, 'tcx, F: FnMut(&'tcx Expr<'tcx>) -> ControlFlow<B>, B> Visitor<'tcx> for V<'cx, 'tcx, F, B> {
+ type NestedFilter = nested_filter::OnlyBodies;
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+
+ fn visit_expr(&mut self, e: &'tcx Expr<'tcx>) {
+ if let ExprKind::Assign(lhs, rhs, _) = e.kind
+ && self.res.is_continue()
+ && path_to_local_id(lhs, self.local_id)
+ {
+ self.res = (self.f)(rhs);
+ self.visit_expr(rhs);
+ } else {
+ walk_expr(self, e);
+ }
+ }
+ }
+
+ if let Some(b) = get_enclosing_block(cx, local_id) {
+ let mut v = V {
+ cx,
+ local_id,
+ res: ControlFlow::Continue(()),
+ f,
+ };
+ v.visit_block(b);
+ v.res
+ } else {
+ ControlFlow::Continue(())
+ }
+}
diff --git a/src/tools/clippy/etc/relicense/RELICENSE_DOCUMENTATION.md b/src/tools/clippy/etc/relicense/RELICENSE_DOCUMENTATION.md
new file mode 100644
index 000000000..fcd7abbf3
--- /dev/null
+++ b/src/tools/clippy/etc/relicense/RELICENSE_DOCUMENTATION.md
@@ -0,0 +1,69 @@
+This repository was previously licensed under MPL-2.0, however in #3093
+([archive](http://web.archive.org/web/20181005185227/https://github.com/rust-lang-nursery/rust-clippy/issues/3093),
+[screenshot](https://user-images.githubusercontent.com/1617736/46573505-5b856880-c94b-11e8-9a14-981c889b4981.png)) we
+relicensed it to the Rust license (dual licensed as Apache v2 / MIT)
+
+At the time, the contributors were those listed in contributors.txt.
+
+We opened a bunch of issues asking for an explicit relicensing approval. Screenshots of all these issues at the time of
+relicensing are archived on GitHub. We also have saved Wayback Machine copies of these:
+
+- #3094
+ ([archive](http://web.archive.org/web/20181005191247/https://github.com/rust-lang-nursery/rust-clippy/issues/3094),
+ [screenshot](https://user-images.githubusercontent.com/1617736/46573506-5b856880-c94b-11e8-8a44-51cb40bc16ee.png))
+- #3095
+ ([archive](http://web.archive.org/web/20181005184416/https://github.com/rust-lang-nursery/rust-clippy/issues/3095),
+ [screenshot](https://user-images.githubusercontent.com/1617736/46573507-5c1dff00-c94b-11e8-912a-4bd6b5f838f5.png))
+- #3096
+ ([archive](http://web.archive.org/web/20181005184802/https://github.com/rust-lang-nursery/rust-clippy/issues/3096),
+ [screenshot](https://user-images.githubusercontent.com/1617736/46573508-5c1dff00-c94b-11e8-9425-2464f7260ff0.png))
+- #3097
+ ([archive](http://web.archive.org/web/20181005184821/https://github.com/rust-lang-nursery/rust-clippy/issues/3097),
+ [screenshot](https://user-images.githubusercontent.com/1617736/46573509-5c1dff00-c94b-11e8-8ba2-53f687984fe7.png))
+- #3098
+ ([archive](http://web.archive.org/web/20181005184900/https://github.com/rust-lang-nursery/rust-clippy/issues/3098),
+ [screenshot](https://user-images.githubusercontent.com/1617736/46573510-5c1dff00-c94b-11e8-8f64-371698401c60.png))
+- #3099
+ ([archive](http://web.archive.org/web/20181005184901/https://github.com/rust-lang-nursery/rust-clippy/issues/3099),
+ [screenshot](https://user-images.githubusercontent.com/1617736/46573511-5c1dff00-c94b-11e8-8e20-7d0eeb392b95.png))
+- #3100
+ ([archive](http://web.archive.org/web/20181005184901/https://github.com/rust-lang-nursery/rust-clippy/issues/3100),
+ [screenshot](https://user-images.githubusercontent.com/1617736/46573512-5c1dff00-c94b-11e8-8a13-7d758ed3563d.png))
+- #3230
+ ([archive](http://web.archive.org/web/20181005184903/https://github.com/rust-lang-nursery/rust-clippy/issues/3230),
+ [screenshot](https://user-images.githubusercontent.com/1617736/46573513-5cb69580-c94b-11e8-86b1-14ce82741e5c.png))
+
+The usernames of commenters on these issues can be found in relicense_comments.txt
+
+There are a couple people in relicense_comments.txt who are not found in contributors.txt:
+
+- @EpocSquadron has [made minor text contributions to the
+ README](https://github.com/rust-lang/rust-clippy/commits?author=EpocSquadron) which have since been overwritten, and
+ doesn't count
+- @JayKickliter [agreed to the relicense on their pull
+ request](https://github.com/rust-lang/rust-clippy/pull/3195#issuecomment-423781016)
+ ([archive](https://web.archive.org/web/20181005190730/https://github.com/rust-lang/rust-clippy/pull/3195),
+ [screenshot](https://user-images.githubusercontent.com/1617736/46573514-5cb69580-c94b-11e8-8ffb-05a5bd02e2cc.png)
+
+- @sanmai-NL's [contribution](https://github.com/rust-lang/rust-clippy/commits?author=sanmai-NL) is a minor one-word
+ addition which doesn't count for copyright assignment
+- @zmt00's [contributions](https://github.com/rust-lang/rust-clippy/commits?author=zmt00) are minor typo fixes and don't
+ count
+- @VKlayd has [nonminor contributions](https://github.com/rust-lang/rust-clippy/commits?author=VKlayd) which we rewrote
+ (see below)
+- @wartman4404 has [nonminor contributions](https://github.com/rust-lang/rust-clippy/commits?author=wartman4404) which
+ we rewrote (see below)
+
+
+Two of these contributors had nonminor contributions (#2184, #427) requiring a rewrite, carried out in #3251
+([archive](http://web.archive.org/web/20181005192411/https://github.com/rust-lang-nursery/rust-clippy/pull/3251),
+[screenshot](https://user-images.githubusercontent.com/1617736/46573515-5cb69580-c94b-11e8-86e5-b456452121b2.png))
+
+First, I (Manishearth) removed the lints they had added. I then documented at a high level what the lints did in #3251,
+asking for co-maintainers who had not seen the code for the lints to rewrite them. #2814 was rewritten by @phansch, and
+#427 was rewritten by @oli-obk, who did not recall having previously seen the code they were rewriting.
+
+------
+
+Since this document was written, @JayKickliter and @sanmai-ML added their consent in #3230
+([archive](http://web.archive.org/web/20181006171926/https://github.com/rust-lang-nursery/rust-clippy/issues/3230))
diff --git a/src/tools/clippy/etc/relicense/contributors.txt b/src/tools/clippy/etc/relicense/contributors.txt
new file mode 100644
index 000000000..e81ebf214
--- /dev/null
+++ b/src/tools/clippy/etc/relicense/contributors.txt
@@ -0,0 +1,232 @@
+0ndorio
+0xbsec
+17cupsofcoffee
+Aaron1011
+Aaronepower
+aaudiber
+afck
+alexcrichton
+AlexEne
+alexeyzab
+alexheretic
+alexreg
+alusch
+andersk
+aochagavia
+apasel422
+Arnavion
+AtheMathmo
+auscompgeek
+AVerm
+badboy
+Baelyk
+BenoitZugmeyer
+bestouff
+birkenfeld
+bjgill
+bkchr
+Bobo1239
+bood
+bootandy
+b-r-u
+budziq
+CAD97
+Caemor
+camsteffen
+carols10cents
+CBenoit
+cesarb
+cgm616
+chrisduerr
+chrisvittal
+chyvonomys
+clarcharr
+clippered
+commandline
+cramertj
+csmoe
+ctjhoa
+cuviper
+CYBAI
+darArch
+DarkEld3r
+dashed
+daubaris
+d-dorazio
+debris
+dereckson
+detrumi
+devonhollowood
+dtolnay
+durka
+dwijnand
+eddyb
+elliottneilclark
+elpiel
+ensch
+EpicatSupercell
+EpocSquadron
+erickt
+estk
+etaoins
+F001
+fanzier
+FauxFaux
+fhartwig
+flip1995
+Fraser999
+Frederick888
+frewsxcv
+gbip
+gendx
+gibfahn
+gnieto
+gnzlbg
+goodmanjonathan
+guido4000
+GuillaumeGomez
+Hanaasagi
+hdhoang
+HMPerson1
+hobofan
+iKevinY
+illicitonion
+imp
+inrustwetrust
+ishitatsuyuki
+Jascha-N
+jayhardee9
+JayKickliter
+JDemler
+jedisct1
+jmquigs
+joelgallant
+joeratt
+josephDunne
+JoshMcguigan
+joshtriplett
+jugglerchris
+karyon
+Keats
+kennytm
+Kha
+killercup
+kimsnj
+KitFreddura
+koivunej
+kraai
+kvikas
+LaurentMazare
+letheed
+llogiq
+lo48576
+lpesk
+lucab
+luisbg
+lukasstevens
+Machtan
+MaloJaffre
+Manishearth
+marcusklaas
+mark-i-m
+martiansideofthemoon
+martinlindhe
+mathstuf
+mati865
+matthiaskrgr
+mattyhall
+mbrubeck
+mcarton
+memoryleak47
+messense
+michaelrutherford
+mikerite
+mipli
+mockersf
+montrivo
+mrecachinas
+Mrmaxmeier
+mrmonday
+ms2300
+Ms2ger
+musoke
+nathan
+Nemo157
+NiekGr
+niklasf
+nrc
+nweston
+o01eg
+ogham
+oli-obk
+ordovicia
+pengowen123
+pgerber
+phansch
+philipturnbull
+pickfire
+pietro
+PixelPirate
+pizzaiter
+PSeitz
+Pyriphlegethon
+pythonesque
+quininer
+Rantanen
+rcoh
+reiner-dolp
+reujab
+Robzz
+samueltardieu
+sanmai-NL
+sanxiyn
+scott-linder
+scottmcm
+scurest
+senden9
+shahn
+shepmaster
+shnewto
+shssoichiro
+siiptuo
+sinkuu
+skade
+sourcefrog
+sourcejedi
+steveklabnik
+sunfishcode
+sunjay
+swgillespie
+Techcable
+terry90
+theemathas
+thekidxp
+theotherphil
+TimNN
+TomasKralCZ
+tomprince
+topecongiro
+tspiteri
+Twisol
+U007D
+uHOOCCOOHu
+untitaker
+upsuper
+utaal
+utam0k
+vi
+VKlayd
+Vlad-Shcherbina
+vorner
+wafflespeanut
+wartman4404
+waywardmonkeys
+yaahallo
+yangby-cryptape
+yati-sagade
+ykrivopalov
+ysimonson
+zayenz
+zmanian
+zmbush
+zmt00
diff --git a/src/tools/clippy/etc/relicense/relicense_comments.txt b/src/tools/clippy/etc/relicense/relicense_comments.txt
new file mode 100644
index 000000000..52c25eb20
--- /dev/null
+++ b/src/tools/clippy/etc/relicense/relicense_comments.txt
@@ -0,0 +1,227 @@
+0ndorio
+0xbsec
+17cupsofcoffee
+Aaron1011
+Aaronepower
+aaudiber
+afck
+alexcrichton
+AlexEne
+alexeyzab
+alexheretic
+alexreg
+alusch
+andersk
+aochagavia
+apasel422
+Arnavion
+AtheMathmo
+auscompgeek
+AVerm
+badboy
+Baelyk
+BenoitZugmeyer
+bestouff
+birkenfeld
+bjgill
+bkchr
+Bobo1239
+bood
+bootandy
+b-r-u
+budziq
+CAD97
+Caemor
+camsteffen
+carols10cents
+CBenoit
+cesarb
+cgm616
+chrisduerr
+chrisvittal
+chyvonomys
+clarcharr
+clippered
+commandline
+cramertj
+csmoe
+ctjhoa
+cuviper
+CYBAI
+darArch
+DarkEld3r
+dashed
+daubaris
+d-dorazio
+debris
+dereckson
+detrumi
+devonhollowood
+dtolnay
+durka
+dwijnand
+eddyb
+elliottneilclark
+elpiel
+ensch
+EpicatSupercell
+erickt
+estk
+etaoins
+F001
+fanzier
+FauxFaux
+fhartwig
+flip1995
+Fraser999
+Frederick888
+frewsxcv
+gbip
+gendx
+gibfahn
+gnieto
+gnzlbg
+goodmanjonathan
+guido4000
+GuillaumeGomez
+Hanaasagi
+hdhoang
+HMPerson1
+hobofan
+iKevinY
+illicitonion
+imp
+inrustwetrust
+ishitatsuyuki
+Jascha-N
+jayhardee9
+JDemler
+jedisct1
+jmquigs
+joelgallant
+joeratt
+josephDunne
+JoshMcguigan
+joshtriplett
+jugglerchris
+karyon
+Keats
+kennytm
+Kha
+killercup
+kimsnj
+KitFreddura
+koivunej
+kraai
+kvikas
+LaurentMazare
+letheed
+llogiq
+lo48576
+lpesk
+lucab
+luisbg
+lukasstevens
+Machtan
+MaloJaffre
+Manishearth
+marcusklaas
+mark-i-m
+martiansideofthemoon
+martinlindhe
+mathstuf
+mati865
+matthiaskrgr
+mattyhall
+mbrubeck
+mcarton
+memoryleak47
+messense
+michaelrutherford
+mikerite
+mipli
+mockersf
+montrivo
+mrecachinas
+Mrmaxmeier
+mrmonday
+ms2300
+Ms2ger
+musoke
+nathan
+Nemo157
+NiekGr
+niklasf
+nrc
+nweston
+o01eg
+ogham
+oli-obk
+ordovicia
+pengowen123
+pgerber
+phansch
+philipturnbull
+pickfire
+pietro
+PixelPirate
+pizzaiter
+PSeitz
+Pyriphlegethon
+pythonesque
+quininer
+Rantanen
+rcoh
+reiner-dolp
+reujab
+Robzz
+samueltardieu
+sanxiyn
+scott-linder
+scottmcm
+scurest
+senden9
+shahn
+shepmaster
+shnewto
+shssoichiro
+siiptuo
+sinkuu
+skade
+sourcefrog
+sourcejedi
+steveklabnik
+sunfishcode
+sunjay
+swgillespie
+Techcable
+terry90
+theemathas
+thekidxp
+theotherphil
+TimNN
+TomasKralCZ
+tommilligan
+tomprince
+topecongiro
+tspiteri
+Twisol
+U007D
+uHOOCCOOHu
+untitaker
+upsuper
+utaal
+utam0k
+vi
+Vlad-Shcherbina
+vorner
+wafflespeanut
+waywardmonkeys
+yaahallo
+yangby-cryptape
+yati-sagade
+ykrivopalov
+ysimonson
+zayenz
+zmanian
+zmbush
diff --git a/src/tools/clippy/lintcheck/Cargo.toml b/src/tools/clippy/lintcheck/Cargo.toml
new file mode 100644
index 000000000..737c845c0
--- /dev/null
+++ b/src/tools/clippy/lintcheck/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "lintcheck"
+version = "0.0.1"
+description = "tool to monitor impact of changes in Clippy's lints on a part of the ecosystem"
+readme = "README.md"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/rust-clippy"
+categories = ["development-tools"]
+edition = "2021"
+publish = false
+
+[dependencies]
+cargo_metadata = "0.14"
+clap = "3.2"
+flate2 = "1.0"
+rayon = "1.5.1"
+serde = { version = "1.0", features = ["derive"] }
+tar = "0.4"
+toml = "0.5"
+ureq = "2.2"
+walkdir = "2.3"
+
+[features]
+deny-warnings = []
diff --git a/src/tools/clippy/lintcheck/README.md b/src/tools/clippy/lintcheck/README.md
new file mode 100644
index 000000000..6f3d23382
--- /dev/null
+++ b/src/tools/clippy/lintcheck/README.md
@@ -0,0 +1,77 @@
+## `cargo lintcheck`
+
+Runs clippy on a fixed set of crates read from
+`lintcheck/lintcheck_crates.toml` and saves logs of the lint warnings into the
+repo. We can then check the diff and spot new or disappearing warnings.
+
+From the repo root, run:
+
+```
+cargo run --target-dir lintcheck/target --manifest-path lintcheck/Cargo.toml
+```
+
+or
+
+```
+cargo lintcheck
+```
+
+By default the logs will be saved into
+`lintcheck-logs/lintcheck_crates_logs.txt`.
+
+You can set a custom sources.toml by adding `--crates-toml custom.toml` or using
+`LINTCHECK_TOML="custom.toml"` where `custom.toml` must be a relative path from
+the repo root.
+
+The results will then be saved to `lintcheck-logs/custom_logs.toml`.
+
+### Configuring the Crate Sources
+
+The sources to check are saved in a `toml` file. There are three types of
+sources.
+
+1. Crates-io Source
+
+ ```toml
+ bitflags = {name = "bitflags", versions = ['1.2.1']}
+ ```
+ Requires a "name" and one or multiple "versions" to be checked.
+
+2. `git` Source
+ ````toml
+ puffin = {name = "puffin", git_url = "https://github.com/EmbarkStudios/puffin", git_hash = "02dd4a3"}
+ ````
+ Requires a name, the url to the repo and unique identifier of a commit,
+ branch or tag which is checked out before linting. There is no way to always
+ check `HEAD` because that would lead to changing lint-results as the repo
+ would get updated. If `git_url` or `git_hash` is missing, an error will be
+ thrown.
+
+3. Local Dependency
+ ```toml
+ clippy = {name = "clippy", path = "/home/user/clippy"}
+ ```
+ For when you want to add a repository that is not published yet.
+
+#### Command Line Options (optional)
+
+```toml
+bitflags = {name = "bitflags", versions = ['1.2.1'], options = ['-Wclippy::pedantic', '-Wclippy::cargo']}
+```
+
+It is possible to specify command line options for each crate. This makes it
+possible to only check a crate for certain lint groups. If no options are
+specified, the lint groups `clippy::all`, `clippy::pedantic`, and
+`clippy::cargo` are checked. If an empty array is specified only `clippy::all`
+is checked.
+
+**Note:** `-Wclippy::all` is always enabled by default, unless `-Aclippy::all`
+is explicitly specified in the options.
+
+### Fix mode
+You can run `./lintcheck/target/debug/lintcheck --fix` which will run Clippy with `--fix` and
+print a warning if Clippy's suggestions fail to apply (if the resulting code does not build).
+This lets us spot bad suggestions or false positives automatically in some cases.
+
+Please note that the target dir should be cleaned afterwards since clippy will modify
+the downloaded sources which can lead to unexpected results when running lintcheck again afterwards.
diff --git a/src/tools/clippy/lintcheck/lintcheck_crates.toml b/src/tools/clippy/lintcheck/lintcheck_crates.toml
new file mode 100644
index 000000000..4fbae8614
--- /dev/null
+++ b/src/tools/clippy/lintcheck/lintcheck_crates.toml
@@ -0,0 +1,35 @@
+[crates]
+# some of these are from cargotest
+cargo = {name = "cargo", versions = ['0.49.0']}
+iron = {name = "iron", versions = ['0.6.1']}
+ripgrep = {name = "ripgrep", versions = ['12.1.1']}
+xsv = {name = "xsv", versions = ['0.13.0']}
+# commented out because of 173K clippy::match_same_arms msgs in language_type.rs
+#tokei = { name = "tokei", versions = ['12.0.4']}
+rayon = {name = "rayon", versions = ['1.5.0']}
+serde = {name = "serde", versions = ['1.0.118']}
+# top 10 crates.io dls
+bitflags = {name = "bitflags", versions = ['1.2.1']}
+# crash = {name = "clippy_crash", path = "/tmp/clippy_crash"}
+libc = {name = "libc", versions = ['0.2.81']}
+log = {name = "log", versions = ['0.4.11']}
+proc-macro2 = {name = "proc-macro2", versions = ['1.0.24']}
+quote = {name = "quote", versions = ['1.0.7']}
+rand = {name = "rand", versions = ['0.7.3']}
+rand_core = {name = "rand_core", versions = ['0.6.0']}
+regex = {name = "regex", versions = ['1.3.2']}
+syn = {name = "syn", versions = ['1.0.54']}
+unicode-xid = {name = "unicode-xid", versions = ['0.2.1']}
+# some more of dtolnays crates
+anyhow = {name = "anyhow", versions = ['1.0.38']}
+async-trait = {name = "async-trait", versions = ['0.1.42']}
+cxx = {name = "cxx", versions = ['1.0.32']}
+ryu = {name = "ryu", versions = ['1.0.5']}
+serde_yaml = {name = "serde_yaml", versions = ['0.8.17']}
+thiserror = {name = "thiserror", versions = ['1.0.24']}
+# some embark crates, there are other interesting crates but
+# unfortunately adding them increases lintcheck runtime drastically
+cfg-expr = {name = "cfg-expr", versions = ['0.7.1']}
+puffin = {name = "puffin", git_url = "https://github.com/EmbarkStudios/puffin", git_hash = "02dd4a3"}
+rpmalloc = {name = "rpmalloc", versions = ['0.2.0']}
+tame-oidc = {name = "tame-oidc", versions = ['0.1.0']}
diff --git a/src/tools/clippy/lintcheck/src/config.rs b/src/tools/clippy/lintcheck/src/config.rs
new file mode 100644
index 000000000..1742cf677
--- /dev/null
+++ b/src/tools/clippy/lintcheck/src/config.rs
@@ -0,0 +1,124 @@
+use clap::{Arg, ArgAction, ArgMatches, Command};
+use std::env;
+use std::path::PathBuf;
+
+fn get_clap_config() -> ArgMatches {
+ Command::new("lintcheck")
+ .about("run clippy on a set of crates and check output")
+ .args([
+ Arg::new("only")
+ .action(ArgAction::Set)
+ .value_name("CRATE")
+ .long("only")
+ .help("Only process a single crate of the list"),
+ Arg::new("crates-toml")
+ .action(ArgAction::Set)
+ .value_name("CRATES-SOURCES-TOML-PATH")
+ .long("crates-toml")
+ .help("Set the path for a crates.toml where lintcheck should read the sources from"),
+ Arg::new("threads")
+ .action(ArgAction::Set)
+ .value_name("N")
+ .value_parser(clap::value_parser!(usize))
+ .short('j')
+ .long("jobs")
+ .help("Number of threads to use, 0 automatic choice"),
+ Arg::new("fix")
+ .long("fix")
+ .help("Runs cargo clippy --fix and checks if all suggestions apply"),
+ Arg::new("filter")
+ .long("filter")
+ .action(ArgAction::Append)
+ .value_name("clippy_lint_name")
+ .help("Apply a filter to only collect specified lints, this also overrides `allow` attributes"),
+ Arg::new("markdown")
+ .long("markdown")
+ .help("Change the reports table to use markdown links"),
+ ])
+ .get_matches()
+}
+
+#[derive(Debug)]
+pub(crate) struct LintcheckConfig {
+ /// max number of jobs to spawn (default 1)
+ pub max_jobs: usize,
+ /// we read the sources to check from here
+ pub sources_toml_path: PathBuf,
+ /// we save the clippy lint results here
+ pub lintcheck_results_path: PathBuf,
+ /// Check only a specified package
+ pub only: Option<String>,
+ /// whether to just run --fix and not collect all the warnings
+ pub fix: bool,
+ /// A list of lints that this lintcheck run should focus on
+ pub lint_filter: Vec<String>,
+ /// Indicate if the output should support markdown syntax
+ pub markdown: bool,
+}
+
+impl LintcheckConfig {
+ pub fn new() -> Self {
+ let clap_config = get_clap_config();
+
+ // first, check if we got anything passed via the LINTCHECK_TOML env var,
+ // if not, ask clap if we got any value for --crates-toml <foo>
+ // if not, use the default "lintcheck/lintcheck_crates.toml"
+ let sources_toml = env::var("LINTCHECK_TOML").unwrap_or_else(|_| {
+ clap_config
+ .get_one::<String>("crates-toml")
+ .map(|s| &**s)
+ .unwrap_or("lintcheck/lintcheck_crates.toml")
+ .into()
+ });
+
+ let markdown = clap_config.contains_id("markdown");
+ let sources_toml_path = PathBuf::from(sources_toml);
+
+ // for the path where we save the lint results, get the filename without extension (so for
+ // wasd.toml, use "wasd"...)
+ let filename: PathBuf = sources_toml_path.file_stem().unwrap().into();
+ let lintcheck_results_path = PathBuf::from(format!(
+ "lintcheck-logs/{}_logs.{}",
+ filename.display(),
+ if markdown { "md" } else { "txt" }
+ ));
+
+ // look at the --threads arg, if 0 is passed, ask rayon rayon how many threads it would spawn and
+ // use half of that for the physical core count
+ // by default use a single thread
+ let max_jobs = match clap_config.get_one::<usize>("threads") {
+ Some(&0) => {
+ // automatic choice
+ // Rayon seems to return thread count so half that for core count
+ (rayon::current_num_threads() / 2) as usize
+ },
+ Some(&threads) => threads,
+ // no -j passed, use a single thread
+ None => 1,
+ };
+
+ let lint_filter: Vec<String> = clap_config
+ .get_many::<String>("filter")
+ .map(|iter| {
+ iter.map(|lint_name| {
+ let mut filter = lint_name.replace('_', "-");
+ if !filter.starts_with("clippy::") {
+ filter.insert_str(0, "clippy::");
+ }
+ filter
+ })
+ .collect()
+ })
+ .unwrap_or_default();
+
+ LintcheckConfig {
+ max_jobs,
+ sources_toml_path,
+ lintcheck_results_path,
+ only: clap_config.get_one::<String>("only").map(String::from),
+ fix: clap_config.contains_id("fix"),
+ lint_filter,
+ markdown,
+ }
+ }
+}
diff --git a/src/tools/clippy/lintcheck/src/main.rs b/src/tools/clippy/lintcheck/src/main.rs
new file mode 100644
index 000000000..9ee25280f
--- /dev/null
+++ b/src/tools/clippy/lintcheck/src/main.rs
@@ -0,0 +1,814 @@
+// Run clippy on a fixed set of crates and collect the warnings.
+// This helps observing the impact clippy changes have on a set of real-world code (and not just our
+// testsuite).
+//
+// When a new lint is introduced, we can search the results for new warnings and check for false
+// positives.
+
+#![allow(clippy::collapsible_else_if)]
+
+mod config;
+
+use config::LintcheckConfig;
+
+use std::collections::HashMap;
+use std::env;
+use std::fmt::Write as _;
+use std::fs::write;
+use std::io::ErrorKind;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+use std::sync::atomic::{AtomicUsize, Ordering};
+use std::thread;
+use std::time::Duration;
+
+use cargo_metadata::diagnostic::DiagnosticLevel;
+use cargo_metadata::Message;
+use rayon::prelude::*;
+use serde::{Deserialize, Serialize};
+use walkdir::{DirEntry, WalkDir};
+
+#[cfg(not(windows))]
+const CLIPPY_DRIVER_PATH: &str = "target/debug/clippy-driver";
+#[cfg(not(windows))]
+const CARGO_CLIPPY_PATH: &str = "target/debug/cargo-clippy";
+
+#[cfg(windows)]
+const CLIPPY_DRIVER_PATH: &str = "target/debug/clippy-driver.exe";
+#[cfg(windows)]
+const CARGO_CLIPPY_PATH: &str = "target/debug/cargo-clippy.exe";
+
+const LINTCHECK_DOWNLOADS: &str = "target/lintcheck/downloads";
+const LINTCHECK_SOURCES: &str = "target/lintcheck/sources";
+
+/// List of sources to check, loaded from a .toml file
+#[derive(Debug, Serialize, Deserialize)]
+struct SourceList {
+ crates: HashMap<String, TomlCrate>,
+}
+
+/// A crate source stored inside the .toml
+/// will be translated into on one of the `CrateSource` variants
+#[derive(Debug, Serialize, Deserialize)]
+struct TomlCrate {
+ name: String,
+ versions: Option<Vec<String>>,
+ git_url: Option<String>,
+ git_hash: Option<String>,
+ path: Option<String>,
+ options: Option<Vec<String>>,
+}
+
+/// Represents an archive we download from crates.io, or a git repo, or a local repo/folder
+/// Once processed (downloaded/extracted/cloned/copied...), this will be translated into a `Crate`
+#[derive(Debug, Serialize, Deserialize, Eq, Hash, PartialEq, Ord, PartialOrd)]
+enum CrateSource {
+ CratesIo {
+ name: String,
+ version: String,
+ options: Option<Vec<String>>,
+ },
+ Git {
+ name: String,
+ url: String,
+ commit: String,
+ options: Option<Vec<String>>,
+ },
+ Path {
+ name: String,
+ path: PathBuf,
+ options: Option<Vec<String>>,
+ },
+}
+
+/// Represents the actual source code of a crate that we ran "cargo clippy" on
+#[derive(Debug)]
+struct Crate {
+ version: String,
+ name: String,
+ // path to the extracted sources that clippy can check
+ path: PathBuf,
+ options: Option<Vec<String>>,
+}
+
+/// A single warning that clippy issued while checking a `Crate`
+#[derive(Debug)]
+struct ClippyWarning {
+ crate_name: String,
+ file: String,
+ line: usize,
+ column: usize,
+ lint_type: String,
+ message: String,
+ is_ice: bool,
+}
+
+#[allow(unused)]
+impl ClippyWarning {
+ fn new(cargo_message: Message, krate: &Crate) -> Option<Self> {
+ let diag = match cargo_message {
+ Message::CompilerMessage(message) => message.message,
+ _ => return None,
+ };
+
+ let lint_type = diag.code?.code;
+ if !(lint_type.contains("clippy") || diag.message.contains("clippy"))
+ || diag.message.contains("could not read cargo metadata")
+ {
+ return None;
+ }
+
+ let span = diag.spans.into_iter().find(|span| span.is_primary)?;
+
+ let file = match Path::new(&span.file_name).strip_prefix(env!("CARGO_HOME")) {
+ Ok(stripped) => format!("$CARGO_HOME/{}", stripped.display()),
+ Err(_) => format!(
+ "target/lintcheck/sources/{}-{}/{}",
+ krate.name, krate.version, span.file_name
+ ),
+ };
+
+ Some(Self {
+ crate_name: krate.name.clone(),
+ file,
+ line: span.line_start,
+ column: span.column_start,
+ lint_type,
+ message: diag.message,
+ is_ice: diag.level == DiagnosticLevel::Ice,
+ })
+ }
+
+ fn to_output(&self, markdown: bool) -> String {
+ let file_with_pos = format!("{}:{}:{}", &self.file, &self.line, &self.column);
+ if markdown {
+ let lint = format!("`{}`", self.lint_type);
+
+ let mut file = self.file.clone();
+ if !file.starts_with('$') {
+ file.insert_str(0, "../");
+ }
+
+ let mut output = String::from("| ");
+ let _ = write!(output, "[`{}`]({}#L{})", file_with_pos, file, self.line);
+ let _ = write!(output, r#" | {:<50} | "{}" |"#, lint, self.message);
+ output.push('\n');
+ output
+ } else {
+ format!("{} {} \"{}\"\n", file_with_pos, self.lint_type, self.message)
+ }
+ }
+}
+
+fn get(path: &str) -> Result<ureq::Response, ureq::Error> {
+ const MAX_RETRIES: u8 = 4;
+ let mut retries = 0;
+ loop {
+ match ureq::get(path).call() {
+ Ok(res) => return Ok(res),
+ Err(e) if retries >= MAX_RETRIES => return Err(e),
+ Err(ureq::Error::Transport(e)) => eprintln!("Error: {}", e),
+ Err(e) => return Err(e),
+ }
+ eprintln!("retrying in {} seconds...", retries);
+ thread::sleep(Duration::from_secs(retries as u64));
+ retries += 1;
+ }
+}
+
+impl CrateSource {
+ /// Makes the sources available on the disk for clippy to check.
+ /// Clones a git repo and checks out the specified commit or downloads a crate from crates.io or
+ /// copies a local folder
+ fn download_and_extract(&self) -> Crate {
+ match self {
+ CrateSource::CratesIo { name, version, options } => {
+ let extract_dir = PathBuf::from(LINTCHECK_SOURCES);
+ let krate_download_dir = PathBuf::from(LINTCHECK_DOWNLOADS);
+
+ // url to download the crate from crates.io
+ let url = format!("https://crates.io/api/v1/crates/{}/{}/download", name, version);
+ println!("Downloading and extracting {} {} from {}", name, version, url);
+ create_dirs(&krate_download_dir, &extract_dir);
+
+ let krate_file_path = krate_download_dir.join(format!("{}-{}.crate.tar.gz", name, version));
+ // don't download/extract if we already have done so
+ if !krate_file_path.is_file() {
+ // create a file path to download and write the crate data into
+ let mut krate_dest = std::fs::File::create(&krate_file_path).unwrap();
+ let mut krate_req = get(&url).unwrap().into_reader();
+ // copy the crate into the file
+ std::io::copy(&mut krate_req, &mut krate_dest).unwrap();
+
+ // unzip the tarball
+ let ungz_tar = flate2::read::GzDecoder::new(std::fs::File::open(&krate_file_path).unwrap());
+ // extract the tar archive
+ let mut archive = tar::Archive::new(ungz_tar);
+ archive.unpack(&extract_dir).expect("Failed to extract!");
+ }
+ // crate is extracted, return a new Krate object which contains the path to the extracted
+ // sources that clippy can check
+ Crate {
+ version: version.clone(),
+ name: name.clone(),
+ path: extract_dir.join(format!("{}-{}/", name, version)),
+ options: options.clone(),
+ }
+ },
+ CrateSource::Git {
+ name,
+ url,
+ commit,
+ options,
+ } => {
+ let repo_path = {
+ let mut repo_path = PathBuf::from(LINTCHECK_SOURCES);
+ // add a -git suffix in case we have the same crate from crates.io and a git repo
+ repo_path.push(format!("{}-git", name));
+ repo_path
+ };
+ // clone the repo if we have not done so
+ if !repo_path.is_dir() {
+ println!("Cloning {} and checking out {}", url, commit);
+ if !Command::new("git")
+ .arg("clone")
+ .arg(url)
+ .arg(&repo_path)
+ .status()
+ .expect("Failed to clone git repo!")
+ .success()
+ {
+ eprintln!("Failed to clone {} into {}", url, repo_path.display())
+ }
+ }
+ // check out the commit/branch/whatever
+ if !Command::new("git")
+ .arg("checkout")
+ .arg(commit)
+ .current_dir(&repo_path)
+ .status()
+ .expect("Failed to check out commit")
+ .success()
+ {
+ eprintln!("Failed to checkout {} of repo at {}", commit, repo_path.display())
+ }
+
+ Crate {
+ version: commit.clone(),
+ name: name.clone(),
+ path: repo_path,
+ options: options.clone(),
+ }
+ },
+ CrateSource::Path { name, path, options } => {
+ // copy path into the dest_crate_root but skip directories that contain a CACHEDIR.TAG file.
+ // The target/ directory contains a CACHEDIR.TAG file so it is the most commonly skipped directory
+ // as a result of this filter.
+ let dest_crate_root = PathBuf::from(LINTCHECK_SOURCES).join(name);
+ if dest_crate_root.exists() {
+ println!("Deleting existing directory at {:?}", dest_crate_root);
+ std::fs::remove_dir_all(&dest_crate_root).unwrap();
+ }
+
+ println!("Copying {:?} to {:?}", path, dest_crate_root);
+
+ fn is_cache_dir(entry: &DirEntry) -> bool {
+ std::fs::read(entry.path().join("CACHEDIR.TAG"))
+ .map(|x| x.starts_with(b"Signature: 8a477f597d28d172789f06886806bc55"))
+ .unwrap_or(false)
+ }
+
+ for entry in WalkDir::new(path).into_iter().filter_entry(|e| !is_cache_dir(e)) {
+ let entry = entry.unwrap();
+ let entry_path = entry.path();
+ let relative_entry_path = entry_path.strip_prefix(path).unwrap();
+ let dest_path = dest_crate_root.join(relative_entry_path);
+ let metadata = entry_path.symlink_metadata().unwrap();
+
+ if metadata.is_dir() {
+ std::fs::create_dir(dest_path).unwrap();
+ } else if metadata.is_file() {
+ std::fs::copy(entry_path, dest_path).unwrap();
+ }
+ }
+
+ Crate {
+ version: String::from("local"),
+ name: name.clone(),
+ path: dest_crate_root,
+ options: options.clone(),
+ }
+ },
+ }
+ }
+}
+
+impl Crate {
+ /// Run `cargo clippy` on the `Crate` and collect and return all the lint warnings that clippy
+ /// issued
+ fn run_clippy_lints(
+ &self,
+ cargo_clippy_path: &Path,
+ target_dir_index: &AtomicUsize,
+ total_crates_to_lint: usize,
+ config: &LintcheckConfig,
+ lint_filter: &Vec<String>,
+ ) -> Vec<ClippyWarning> {
+ // advance the atomic index by one
+ let index = target_dir_index.fetch_add(1, Ordering::SeqCst);
+ // "loop" the index within 0..thread_limit
+ let thread_index = index % config.max_jobs;
+ let perc = (index * 100) / total_crates_to_lint;
+
+ if config.max_jobs == 1 {
+ println!(
+ "{}/{} {}% Linting {} {}",
+ index, total_crates_to_lint, perc, &self.name, &self.version
+ );
+ } else {
+ println!(
+ "{}/{} {}% Linting {} {} in target dir {:?}",
+ index, total_crates_to_lint, perc, &self.name, &self.version, thread_index
+ );
+ }
+
+ let cargo_clippy_path = std::fs::canonicalize(cargo_clippy_path).unwrap();
+
+ let shared_target_dir = clippy_project_root().join("target/lintcheck/shared_target_dir");
+
+ let mut args = if config.fix {
+ vec!["--fix", "--"]
+ } else {
+ vec!["--", "--message-format=json", "--"]
+ };
+
+ if let Some(options) = &self.options {
+ for opt in options {
+ args.push(opt);
+ }
+ } else {
+ args.extend(&["-Wclippy::pedantic", "-Wclippy::cargo"])
+ }
+
+ if lint_filter.is_empty() {
+ args.push("--cap-lints=warn");
+ } else {
+ args.push("--cap-lints=allow");
+ args.extend(lint_filter.iter().map(|filter| filter.as_str()))
+ }
+
+ let all_output = std::process::Command::new(&cargo_clippy_path)
+ // use the looping index to create individual target dirs
+ .env(
+ "CARGO_TARGET_DIR",
+ shared_target_dir.join(format!("_{:?}", thread_index)),
+ )
+ // lint warnings will look like this:
+ // src/cargo/ops/cargo_compile.rs:127:35: warning: usage of `FromIterator::from_iter`
+ .args(&args)
+ .current_dir(&self.path)
+ .output()
+ .unwrap_or_else(|error| {
+ panic!(
+ "Encountered error:\n{:?}\ncargo_clippy_path: {}\ncrate path:{}\n",
+ error,
+ &cargo_clippy_path.display(),
+ &self.path.display()
+ );
+ });
+ let stdout = String::from_utf8_lossy(&all_output.stdout);
+ let stderr = String::from_utf8_lossy(&all_output.stderr);
+ let status = &all_output.status;
+
+ if !status.success() {
+ eprintln!(
+ "\nWARNING: bad exit status after checking {} {} \n",
+ self.name, self.version
+ );
+ }
+
+ if config.fix {
+ if let Some(stderr) = stderr
+ .lines()
+ .find(|line| line.contains("failed to automatically apply fixes suggested by rustc to crate"))
+ {
+ let subcrate = &stderr[63..];
+ println!(
+ "ERROR: failed to apply some suggetion to {} / to (sub)crate {}",
+ self.name, subcrate
+ );
+ }
+ // fast path, we don't need the warnings anyway
+ return Vec::new();
+ }
+
+ // get all clippy warnings and ICEs
+ let warnings: Vec<ClippyWarning> = Message::parse_stream(stdout.as_bytes())
+ .filter_map(|msg| ClippyWarning::new(msg.unwrap(), &self))
+ .collect();
+
+ warnings
+ }
+}
+
+/// Builds clippy inside the repo to make sure we have a clippy executable we can use.
+fn build_clippy() {
+ let status = Command::new("cargo")
+ .arg("build")
+ .status()
+ .expect("Failed to build clippy!");
+ if !status.success() {
+ eprintln!("Error: Failed to compile Clippy!");
+ std::process::exit(1);
+ }
+}
+
+/// Read a `toml` file and return a list of `CrateSources` that we want to check with clippy
+fn read_crates(toml_path: &Path) -> Vec<CrateSource> {
+ let toml_content: String =
+ std::fs::read_to_string(&toml_path).unwrap_or_else(|_| panic!("Failed to read {}", toml_path.display()));
+ let crate_list: SourceList =
+ toml::from_str(&toml_content).unwrap_or_else(|e| panic!("Failed to parse {}: \n{}", toml_path.display(), e));
+ // parse the hashmap of the toml file into a list of crates
+ let tomlcrates: Vec<TomlCrate> = crate_list
+ .crates
+ .into_iter()
+ .map(|(_cratename, tomlcrate)| tomlcrate)
+ .collect();
+
+ // flatten TomlCrates into CrateSources (one TomlCrates may represent several versions of a crate =>
+ // multiple Cratesources)
+ let mut crate_sources = Vec::new();
+ tomlcrates.into_iter().for_each(|tk| {
+ if let Some(ref path) = tk.path {
+ crate_sources.push(CrateSource::Path {
+ name: tk.name.clone(),
+ path: PathBuf::from(path),
+ options: tk.options.clone(),
+ });
+ } else if let Some(ref versions) = tk.versions {
+ // if we have multiple versions, save each one
+ versions.iter().for_each(|ver| {
+ crate_sources.push(CrateSource::CratesIo {
+ name: tk.name.clone(),
+ version: ver.to_string(),
+ options: tk.options.clone(),
+ });
+ })
+ } else if tk.git_url.is_some() && tk.git_hash.is_some() {
+ // otherwise, we should have a git source
+ crate_sources.push(CrateSource::Git {
+ name: tk.name.clone(),
+ url: tk.git_url.clone().unwrap(),
+ commit: tk.git_hash.clone().unwrap(),
+ options: tk.options.clone(),
+ });
+ } else {
+ panic!("Invalid crate source: {tk:?}");
+ }
+
+ // if we have a version as well as a git data OR only one git data, something is funky
+ if tk.versions.is_some() && (tk.git_url.is_some() || tk.git_hash.is_some())
+ || tk.git_hash.is_some() != tk.git_url.is_some()
+ {
+ eprintln!("tomlkrate: {:?}", tk);
+ if tk.git_hash.is_some() != tk.git_url.is_some() {
+ panic!("Error: Encountered TomlCrate with only one of git_hash and git_url!");
+ }
+ if tk.path.is_some() && (tk.git_hash.is_some() || tk.versions.is_some()) {
+ panic!("Error: TomlCrate can only have one of 'git_.*', 'version' or 'path' fields");
+ }
+ unreachable!("Failed to translate TomlCrate into CrateSource!");
+ }
+ });
+ // sort the crates
+ crate_sources.sort();
+
+ crate_sources
+}
+
+/// Generate a short list of occurring lints-types and their count
+fn gather_stats(clippy_warnings: &[ClippyWarning]) -> (String, HashMap<&String, usize>) {
+ // count lint type occurrences
+ let mut counter: HashMap<&String, usize> = HashMap::new();
+ clippy_warnings
+ .iter()
+ .for_each(|wrn| *counter.entry(&wrn.lint_type).or_insert(0) += 1);
+
+ // collect into a tupled list for sorting
+ let mut stats: Vec<(&&String, &usize)> = counter.iter().map(|(lint, count)| (lint, count)).collect();
+ // sort by "000{count} {clippy::lintname}"
+ // to not have a lint with 200 and 2 warnings take the same spot
+ stats.sort_by_key(|(lint, count)| format!("{:0>4}, {}", count, lint));
+
+ let mut header = String::from("| lint | count |\n");
+ header.push_str("| -------------------------------------------------- | ----- |\n");
+ let stats_string = stats
+ .iter()
+ .map(|(lint, count)| format!("| {:<50} | {:>4} |\n", lint, count))
+ .fold(header, |mut table, line| {
+ table.push_str(&line);
+ table
+ });
+
+ (stats_string, counter)
+}
+
+/// check if the latest modification of the logfile is older than the modification date of the
+/// clippy binary, if this is true, we should clean the lintchec shared target directory and recheck
+fn lintcheck_needs_rerun(lintcheck_logs_path: &Path) -> bool {
+ if !lintcheck_logs_path.exists() {
+ return true;
+ }
+
+ let clippy_modified: std::time::SystemTime = {
+ let mut times = [CLIPPY_DRIVER_PATH, CARGO_CLIPPY_PATH].iter().map(|p| {
+ std::fs::metadata(p)
+ .expect("failed to get metadata of file")
+ .modified()
+ .expect("failed to get modification date")
+ });
+ // the oldest modification of either of the binaries
+ std::cmp::max(times.next().unwrap(), times.next().unwrap())
+ };
+
+ let logs_modified: std::time::SystemTime = std::fs::metadata(lintcheck_logs_path)
+ .expect("failed to get metadata of file")
+ .modified()
+ .expect("failed to get modification date");
+
+ // time is represented in seconds since X
+ // logs_modified 2 and clippy_modified 5 means clippy binary is older and we need to recheck
+ logs_modified < clippy_modified
+}
+
+fn main() {
+ // assert that we launch lintcheck from the repo root (via cargo lintcheck)
+ if std::fs::metadata("lintcheck/Cargo.toml").is_err() {
+ eprintln!("lintcheck needs to be run from clippy's repo root!\nUse `cargo lintcheck` alternatively.");
+ std::process::exit(3);
+ }
+
+ let config = LintcheckConfig::new();
+
+ println!("Compiling clippy...");
+ build_clippy();
+ println!("Done compiling");
+
+ // if the clippy bin is newer than our logs, throw away target dirs to force clippy to
+ // refresh the logs
+ if lintcheck_needs_rerun(&config.lintcheck_results_path) {
+ let shared_target_dir = "target/lintcheck/shared_target_dir";
+ // if we get an Err here, the shared target dir probably does simply not exist
+ if let Ok(metadata) = std::fs::metadata(&shared_target_dir) {
+ if metadata.is_dir() {
+ println!("Clippy is newer than lint check logs, clearing lintcheck shared target dir...");
+ std::fs::remove_dir_all(&shared_target_dir)
+ .expect("failed to remove target/lintcheck/shared_target_dir");
+ }
+ }
+ }
+
+ let cargo_clippy_path: PathBuf = PathBuf::from(CARGO_CLIPPY_PATH)
+ .canonicalize()
+ .expect("failed to canonicalize path to clippy binary");
+
+ // assert that clippy is found
+ assert!(
+ cargo_clippy_path.is_file(),
+ "target/debug/cargo-clippy binary not found! {}",
+ cargo_clippy_path.display()
+ );
+
+ let clippy_ver = std::process::Command::new(CARGO_CLIPPY_PATH)
+ .arg("--version")
+ .output()
+ .map(|o| String::from_utf8_lossy(&o.stdout).into_owned())
+ .expect("could not get clippy version!");
+
+ // download and extract the crates, then run clippy on them and collect clippy's warnings
+ // flatten into one big list of warnings
+
+ let crates = read_crates(&config.sources_toml_path);
+ let old_stats = read_stats_from_file(&config.lintcheck_results_path);
+
+ let counter = AtomicUsize::new(1);
+ let lint_filter: Vec<String> = config
+ .lint_filter
+ .iter()
+ .map(|filter| {
+ let mut filter = filter.clone();
+ filter.insert_str(0, "--force-warn=");
+ filter
+ })
+ .collect();
+
+ let crates: Vec<Crate> = crates
+ .into_iter()
+ .filter(|krate| {
+ if let Some(only_one_crate) = &config.only {
+ let name = match krate {
+ CrateSource::CratesIo { name, .. }
+ | CrateSource::Git { name, .. }
+ | CrateSource::Path { name, .. } => name,
+ };
+
+ name == only_one_crate
+ } else {
+ true
+ }
+ })
+ .map(|krate| krate.download_and_extract())
+ .collect();
+
+ if crates.is_empty() {
+ eprintln!(
+ "ERROR: could not find crate '{}' in lintcheck/lintcheck_crates.toml",
+ config.only.unwrap(),
+ );
+ std::process::exit(1);
+ }
+
+ // run parallel with rayon
+
+ // This helps when we check many small crates with dep-trees that don't have a lot of branches in
+ // order to achieve some kind of parallelism
+
+ rayon::ThreadPoolBuilder::new()
+ .num_threads(config.max_jobs)
+ .build_global()
+ .unwrap();
+
+ let clippy_warnings: Vec<ClippyWarning> = crates
+ .par_iter()
+ .flat_map(|krate| krate.run_clippy_lints(&cargo_clippy_path, &counter, crates.len(), &config, &lint_filter))
+ .collect();
+
+ // if we are in --fix mode, don't change the log files, terminate here
+ if config.fix {
+ return;
+ }
+
+ // generate some stats
+ let (stats_formatted, new_stats) = gather_stats(&clippy_warnings);
+
+ // grab crashes/ICEs, save the crate name and the ice message
+ let ices: Vec<(&String, &String)> = clippy_warnings
+ .iter()
+ .filter(|warning| warning.is_ice)
+ .map(|w| (&w.crate_name, &w.message))
+ .collect();
+
+ let mut all_msgs: Vec<String> = clippy_warnings
+ .iter()
+ .map(|warn| warn.to_output(config.markdown))
+ .collect();
+ all_msgs.sort();
+ all_msgs.push("\n\n### Stats:\n\n".into());
+ all_msgs.push(stats_formatted);
+
+ // save the text into lintcheck-logs/logs.txt
+ let mut text = clippy_ver; // clippy version number on top
+ text.push_str("\n### Reports\n\n");
+ if config.markdown {
+ text.push_str("| file | lint | message |\n");
+ text.push_str("| --- | --- | --- |\n");
+ }
+ write!(text, "{}", all_msgs.join("")).unwrap();
+ text.push_str("\n\n### ICEs:\n");
+ for (cratename, msg) in ices.iter() {
+ let _ = write!(text, "{}: '{}'", cratename, msg);
+ }
+
+ println!("Writing logs to {}", config.lintcheck_results_path.display());
+ std::fs::create_dir_all(config.lintcheck_results_path.parent().unwrap()).unwrap();
+ write(&config.lintcheck_results_path, text).unwrap();
+
+ print_stats(old_stats, new_stats, &config.lint_filter);
+}
+
+/// read the previous stats from the lintcheck-log file
+fn read_stats_from_file(file_path: &Path) -> HashMap<String, usize> {
+ let file_content: String = match std::fs::read_to_string(file_path).ok() {
+ Some(content) => content,
+ None => {
+ return HashMap::new();
+ },
+ };
+
+ let lines: Vec<String> = file_content.lines().map(ToString::to_string).collect();
+
+ lines
+ .iter()
+ .skip_while(|line| line.as_str() != "### Stats:")
+ // Skipping the table header and the `Stats:` label
+ .skip(4)
+ .take_while(|line| line.starts_with("| "))
+ .filter_map(|line| {
+ let mut spl = line.split('|');
+ // Skip the first `|` symbol
+ spl.next();
+ if let (Some(lint), Some(count)) = (spl.next(), spl.next()) {
+ Some((lint.trim().to_string(), count.trim().parse::<usize>().unwrap()))
+ } else {
+ None
+ }
+ })
+ .collect::<HashMap<String, usize>>()
+}
+
+/// print how lint counts changed between runs
+fn print_stats(old_stats: HashMap<String, usize>, new_stats: HashMap<&String, usize>, lint_filter: &Vec<String>) {
+ let same_in_both_hashmaps = old_stats
+ .iter()
+ .filter(|(old_key, old_val)| new_stats.get::<&String>(&old_key) == Some(old_val))
+ .map(|(k, v)| (k.to_string(), *v))
+ .collect::<Vec<(String, usize)>>();
+
+ let mut old_stats_deduped = old_stats;
+ let mut new_stats_deduped = new_stats;
+
+ // remove duplicates from both hashmaps
+ same_in_both_hashmaps.iter().for_each(|(k, v)| {
+ assert!(old_stats_deduped.remove(k) == Some(*v));
+ assert!(new_stats_deduped.remove(k) == Some(*v));
+ });
+
+ println!("\nStats:");
+
+ // list all new counts (key is in new stats but not in old stats)
+ new_stats_deduped
+ .iter()
+ .filter(|(new_key, _)| old_stats_deduped.get::<str>(&new_key).is_none())
+ .for_each(|(new_key, new_value)| {
+ println!("{} 0 => {}", new_key, new_value);
+ });
+
+ // list all changed counts (key is in both maps but value differs)
+ new_stats_deduped
+ .iter()
+ .filter(|(new_key, _new_val)| old_stats_deduped.get::<str>(&new_key).is_some())
+ .for_each(|(new_key, new_val)| {
+ let old_val = old_stats_deduped.get::<str>(&new_key).unwrap();
+ println!("{} {} => {}", new_key, old_val, new_val);
+ });
+
+ // list all gone counts (key is in old status but not in new stats)
+ old_stats_deduped
+ .iter()
+ .filter(|(old_key, _)| new_stats_deduped.get::<&String>(&old_key).is_none())
+ .filter(|(old_key, _)| lint_filter.is_empty() || lint_filter.contains(old_key))
+ .for_each(|(old_key, old_value)| {
+ println!("{} {} => 0", old_key, old_value);
+ });
+}
+
+/// Create necessary directories to run the lintcheck tool.
+///
+/// # Panics
+///
+/// This function panics if creating one of the dirs fails.
+fn create_dirs(krate_download_dir: &Path, extract_dir: &Path) {
+ std::fs::create_dir("target/lintcheck/").unwrap_or_else(|err| {
+ if err.kind() != ErrorKind::AlreadyExists {
+ panic!("cannot create lintcheck target dir");
+ }
+ });
+ std::fs::create_dir(&krate_download_dir).unwrap_or_else(|err| {
+ if err.kind() != ErrorKind::AlreadyExists {
+ panic!("cannot create crate download dir");
+ }
+ });
+ std::fs::create_dir(&extract_dir).unwrap_or_else(|err| {
+ if err.kind() != ErrorKind::AlreadyExists {
+ panic!("cannot create crate extraction dir");
+ }
+ });
+}
+
+/// Returns the path to the Clippy project directory
+#[must_use]
+fn clippy_project_root() -> &'static Path {
+ Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap()
+}
+
+#[test]
+fn lintcheck_test() {
+ let args = [
+ "run",
+ "--target-dir",
+ "lintcheck/target",
+ "--manifest-path",
+ "./lintcheck/Cargo.toml",
+ "--",
+ "--crates-toml",
+ "lintcheck/test_sources.toml",
+ ];
+ let status = std::process::Command::new("cargo")
+ .args(&args)
+ .current_dir("..") // repo root
+ .status();
+ //.output();
+
+ assert!(status.unwrap().success());
+}
diff --git a/src/tools/clippy/lintcheck/test_sources.toml b/src/tools/clippy/lintcheck/test_sources.toml
new file mode 100644
index 000000000..4b0eb71ef
--- /dev/null
+++ b/src/tools/clippy/lintcheck/test_sources.toml
@@ -0,0 +1,4 @@
+[crates]
+cc = {name = "cc", versions = ['1.0.67']}
+home = {name = "home", git_url = "https://github.com/brson/home", git_hash = "32044e53dfbdcd32bafad3109d1fbab805fc0f40"}
+rustc_tools_util = {name = "rustc_tools_util", versions = ['0.2.0']}
diff --git a/src/tools/clippy/rust-toolchain b/src/tools/clippy/rust-toolchain
new file mode 100644
index 000000000..23ba7c712
--- /dev/null
+++ b/src/tools/clippy/rust-toolchain
@@ -0,0 +1,3 @@
+[toolchain]
+channel = "nightly-2022-07-28"
+components = ["cargo", "llvm-tools-preview", "rust-src", "rust-std", "rustc", "rustc-dev", "rustfmt"]
diff --git a/src/tools/clippy/rustc_tools_util/Cargo.toml b/src/tools/clippy/rustc_tools_util/Cargo.toml
new file mode 100644
index 000000000..9554d4d6c
--- /dev/null
+++ b/src/tools/clippy/rustc_tools_util/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "rustc_tools_util"
+version = "0.2.0"
+description = "small helper to generate version information for git packages"
+repository = "https://github.com/rust-lang/rust-clippy"
+readme = "README.md"
+license = "MIT OR Apache-2.0"
+keywords = ["rustc", "tool", "git", "version", "hash"]
+categories = ["development-tools"]
+edition = "2018"
+
+[dependencies]
+
+[features]
+deny-warnings = []
diff --git a/src/tools/clippy/rustc_tools_util/LICENSE-APACHE b/src/tools/clippy/rustc_tools_util/LICENSE-APACHE
new file mode 120000
index 000000000..965b606f3
--- /dev/null
+++ b/src/tools/clippy/rustc_tools_util/LICENSE-APACHE
@@ -0,0 +1 @@
+../LICENSE-APACHE \ No newline at end of file
diff --git a/src/tools/clippy/rustc_tools_util/LICENSE-MIT b/src/tools/clippy/rustc_tools_util/LICENSE-MIT
new file mode 120000
index 000000000..76219eb72
--- /dev/null
+++ b/src/tools/clippy/rustc_tools_util/LICENSE-MIT
@@ -0,0 +1 @@
+../LICENSE-MIT \ No newline at end of file
diff --git a/src/tools/clippy/rustc_tools_util/README.md b/src/tools/clippy/rustc_tools_util/README.md
new file mode 100644
index 000000000..01891b51d
--- /dev/null
+++ b/src/tools/clippy/rustc_tools_util/README.md
@@ -0,0 +1,62 @@
+# rustc_tools_util
+
+A small tool to help you generate version information
+for packages installed from a git repo
+
+## Usage
+
+Add a `build.rs` file to your repo and list it in `Cargo.toml`
+````
+build = "build.rs"
+````
+
+List rustc_tools_util as regular AND build dependency.
+````
+[dependencies]
+rustc_tools_util = "0.1"
+
+[build-dependencies]
+rustc_tools_util = "0.1"
+````
+
+In `build.rs`, generate the data in your `main()`
+````rust
+fn main() {
+ println!(
+ "cargo:rustc-env=GIT_HASH={}",
+ rustc_tools_util::get_commit_hash().unwrap_or_default()
+ );
+ println!(
+ "cargo:rustc-env=COMMIT_DATE={}",
+ rustc_tools_util::get_commit_date().unwrap_or_default()
+ );
+ println!(
+ "cargo:rustc-env=RUSTC_RELEASE_CHANNEL={}",
+ rustc_tools_util::get_channel().unwrap_or_default()
+ );
+}
+
+````
+
+Use the version information in your main.rs
+````rust
+use rustc_tools_util::*;
+
+fn show_version() {
+ let version_info = rustc_tools_util::get_version_info!();
+ println!("{}", version_info);
+}
+````
+This gives the following output in clippy:
+`clippy 0.0.212 (a416c5e 2018-12-14)`
+
+
+## License
+
+Copyright 2014-2022 The Rust Project Developers
+
+Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+<LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+option. All files in the project carrying such notice may not be
+copied, modified, or distributed except according to those terms.
diff --git a/src/tools/clippy/rustc_tools_util/src/lib.rs b/src/tools/clippy/rustc_tools_util/src/lib.rs
new file mode 100644
index 000000000..5f289918a
--- /dev/null
+++ b/src/tools/clippy/rustc_tools_util/src/lib.rs
@@ -0,0 +1,162 @@
+#![cfg_attr(feature = "deny-warnings", deny(warnings))]
+
+use std::env;
+
+#[macro_export]
+macro_rules! get_version_info {
+ () => {{
+ let major = env!("CARGO_PKG_VERSION_MAJOR").parse::<u8>().unwrap();
+ let minor = env!("CARGO_PKG_VERSION_MINOR").parse::<u8>().unwrap();
+ let patch = env!("CARGO_PKG_VERSION_PATCH").parse::<u16>().unwrap();
+ let crate_name = String::from(env!("CARGO_PKG_NAME"));
+
+ let host_compiler = option_env!("RUSTC_RELEASE_CHANNEL").map(str::to_string);
+ let commit_hash = option_env!("GIT_HASH").map(str::to_string);
+ let commit_date = option_env!("COMMIT_DATE").map(str::to_string);
+
+ VersionInfo {
+ major,
+ minor,
+ patch,
+ host_compiler,
+ commit_hash,
+ commit_date,
+ crate_name,
+ }
+ }};
+}
+
+// some code taken and adapted from RLS and cargo
+pub struct VersionInfo {
+ pub major: u8,
+ pub minor: u8,
+ pub patch: u16,
+ pub host_compiler: Option<String>,
+ pub commit_hash: Option<String>,
+ pub commit_date: Option<String>,
+ pub crate_name: String,
+}
+
+impl std::fmt::Display for VersionInfo {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let hash = self.commit_hash.clone().unwrap_or_default();
+ let hash_trimmed = hash.trim();
+
+ let date = self.commit_date.clone().unwrap_or_default();
+ let date_trimmed = date.trim();
+
+ if (hash_trimmed.len() + date_trimmed.len()) > 0 {
+ write!(
+ f,
+ "{} {}.{}.{} ({} {})",
+ self.crate_name, self.major, self.minor, self.patch, hash_trimmed, date_trimmed,
+ )?;
+ } else {
+ write!(f, "{} {}.{}.{}", self.crate_name, self.major, self.minor, self.patch)?;
+ }
+
+ Ok(())
+ }
+}
+
+impl std::fmt::Debug for VersionInfo {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(
+ f,
+ "VersionInfo {{ crate_name: \"{}\", major: {}, minor: {}, patch: {}",
+ self.crate_name, self.major, self.minor, self.patch,
+ )?;
+ if self.commit_hash.is_some() {
+ write!(
+ f,
+ ", commit_hash: \"{}\", commit_date: \"{}\" }}",
+ self.commit_hash.clone().unwrap_or_default().trim(),
+ self.commit_date.clone().unwrap_or_default().trim()
+ )?;
+ } else {
+ write!(f, " }}")?;
+ }
+
+ Ok(())
+ }
+}
+
+#[must_use]
+pub fn get_commit_hash() -> Option<String> {
+ std::process::Command::new("git")
+ .args(&["rev-parse", "--short", "HEAD"])
+ .output()
+ .ok()
+ .and_then(|r| String::from_utf8(r.stdout).ok())
+}
+
+#[must_use]
+pub fn get_commit_date() -> Option<String> {
+ std::process::Command::new("git")
+ .args(&["log", "-1", "--date=short", "--pretty=format:%cd"])
+ .output()
+ .ok()
+ .and_then(|r| String::from_utf8(r.stdout).ok())
+}
+
+#[must_use]
+pub fn get_channel() -> String {
+ match env::var("CFG_RELEASE_CHANNEL") {
+ Ok(channel) => channel,
+ Err(_) => {
+ // if that failed, try to ask rustc -V, do some parsing and find out
+ match std::process::Command::new("rustc")
+ .arg("-V")
+ .output()
+ .ok()
+ .and_then(|r| String::from_utf8(r.stdout).ok())
+ {
+ Some(rustc_output) => {
+ if rustc_output.contains("beta") {
+ String::from("beta")
+ } else if rustc_output.contains("stable") {
+ String::from("stable")
+ } else {
+ // default to nightly if we fail to parse
+ String::from("nightly")
+ }
+ },
+ // default to nightly
+ None => String::from("nightly"),
+ }
+ },
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ #[test]
+ fn test_struct_local() {
+ let vi = get_version_info!();
+ assert_eq!(vi.major, 0);
+ assert_eq!(vi.minor, 2);
+ assert_eq!(vi.patch, 0);
+ assert_eq!(vi.crate_name, "rustc_tools_util");
+ // hard to make positive tests for these since they will always change
+ assert!(vi.commit_hash.is_none());
+ assert!(vi.commit_date.is_none());
+ }
+
+ #[test]
+ fn test_display_local() {
+ let vi = get_version_info!();
+ assert_eq!(vi.to_string(), "rustc_tools_util 0.2.0");
+ }
+
+ #[test]
+ fn test_debug_local() {
+ let vi = get_version_info!();
+ let s = format!("{:?}", vi);
+ assert_eq!(
+ s,
+ "VersionInfo { crate_name: \"rustc_tools_util\", major: 0, minor: 2, patch: 0 }"
+ );
+ }
+}
diff --git a/src/tools/clippy/rustfmt.toml b/src/tools/clippy/rustfmt.toml
new file mode 100644
index 000000000..10d397620
--- /dev/null
+++ b/src/tools/clippy/rustfmt.toml
@@ -0,0 +1,7 @@
+max_width = 120
+comment_width = 100
+match_block_trailing_comma = true
+wrap_comments = true
+edition = "2021"
+error_on_line_overflow = true
+version = "Two"
diff --git a/src/tools/clippy/src/driver.rs b/src/tools/clippy/src/driver.rs
new file mode 100644
index 000000000..c1ec2bd5b
--- /dev/null
+++ b/src/tools/clippy/src/driver.rs
@@ -0,0 +1,353 @@
+#![feature(rustc_private)]
+#![feature(once_cell)]
+#![cfg_attr(feature = "deny-warnings", deny(warnings))]
+// warn on lints, that are included in `rust-lang/rust`s bootstrap
+#![warn(rust_2018_idioms, unused_lifetimes)]
+// warn on rustc internal lints
+#![warn(rustc::internal)]
+
+// FIXME: switch to something more ergonomic here, once available.
+// (Currently there is no way to opt into sysroot crates without `extern crate`.)
+extern crate rustc_driver;
+extern crate rustc_errors;
+extern crate rustc_interface;
+extern crate rustc_session;
+extern crate rustc_span;
+
+use rustc_interface::interface;
+use rustc_session::parse::ParseSess;
+use rustc_span::symbol::Symbol;
+use rustc_tools_util::VersionInfo;
+
+use std::borrow::Cow;
+use std::env;
+use std::ops::Deref;
+use std::panic;
+use std::path::{Path, PathBuf};
+use std::process::{exit, Command};
+use std::sync::LazyLock;
+
+/// If a command-line option matches `find_arg`, then apply the predicate `pred` on its value. If
+/// true, then return it. The parameter is assumed to be either `--arg=value` or `--arg value`.
+fn arg_value<'a, T: Deref<Target = str>>(
+ args: &'a [T],
+ find_arg: &str,
+ pred: impl Fn(&str) -> bool,
+) -> Option<&'a str> {
+ let mut args = args.iter().map(Deref::deref);
+ while let Some(arg) = args.next() {
+ let mut arg = arg.splitn(2, '=');
+ if arg.next() != Some(find_arg) {
+ continue;
+ }
+
+ match arg.next().or_else(|| args.next()) {
+ Some(v) if pred(v) => return Some(v),
+ _ => {},
+ }
+ }
+ None
+}
+
+#[test]
+fn test_arg_value() {
+ let args = &["--bar=bar", "--foobar", "123", "--foo"];
+
+ assert_eq!(arg_value(&[] as &[&str], "--foobar", |_| true), None);
+ assert_eq!(arg_value(args, "--bar", |_| false), None);
+ assert_eq!(arg_value(args, "--bar", |_| true), Some("bar"));
+ assert_eq!(arg_value(args, "--bar", |p| p == "bar"), Some("bar"));
+ assert_eq!(arg_value(args, "--bar", |p| p == "foo"), None);
+ assert_eq!(arg_value(args, "--foobar", |p| p == "foo"), None);
+ assert_eq!(arg_value(args, "--foobar", |p| p == "123"), Some("123"));
+ assert_eq!(arg_value(args, "--foobar", |p| p.contains("12")), Some("123"));
+ assert_eq!(arg_value(args, "--foo", |_| true), None);
+}
+
+fn track_clippy_args(parse_sess: &mut ParseSess, args_env_var: &Option<String>) {
+ parse_sess.env_depinfo.get_mut().insert((
+ Symbol::intern("CLIPPY_ARGS"),
+ args_env_var.as_deref().map(Symbol::intern),
+ ));
+}
+
+struct DefaultCallbacks;
+impl rustc_driver::Callbacks for DefaultCallbacks {}
+
+/// This is different from `DefaultCallbacks` that it will inform Cargo to track the value of
+/// `CLIPPY_ARGS` environment variable.
+struct RustcCallbacks {
+ clippy_args_var: Option<String>,
+}
+
+impl rustc_driver::Callbacks for RustcCallbacks {
+ fn config(&mut self, config: &mut interface::Config) {
+ let clippy_args_var = self.clippy_args_var.take();
+ config.parse_sess_created = Some(Box::new(move |parse_sess| {
+ track_clippy_args(parse_sess, &clippy_args_var);
+ }));
+ }
+}
+
+struct ClippyCallbacks {
+ clippy_args_var: Option<String>,
+}
+
+impl rustc_driver::Callbacks for ClippyCallbacks {
+ // JUSTIFICATION: necessary in clippy driver to set `mir_opt_level`
+ #[cfg_attr(not(bootstrap), allow(rustc::bad_opt_access))]
+ fn config(&mut self, config: &mut interface::Config) {
+ let previous = config.register_lints.take();
+ let clippy_args_var = self.clippy_args_var.take();
+ config.parse_sess_created = Some(Box::new(move |parse_sess| {
+ track_clippy_args(parse_sess, &clippy_args_var);
+ }));
+ config.register_lints = Some(Box::new(move |sess, lint_store| {
+ // technically we're ~guaranteed that this is none but might as well call anything that
+ // is there already. Certainly it can't hurt.
+ if let Some(previous) = &previous {
+ (previous)(sess, lint_store);
+ }
+
+ let conf = clippy_lints::read_conf(sess);
+ clippy_lints::register_plugins(lint_store, sess, &conf);
+ clippy_lints::register_pre_expansion_lints(lint_store, sess, &conf);
+ clippy_lints::register_renamed(lint_store);
+ }));
+
+ // FIXME: #4825; This is required, because Clippy lints that are based on MIR have to be
+ // run on the unoptimized MIR. On the other hand this results in some false negatives. If
+ // MIR passes can be enabled / disabled separately, we should figure out, what passes to
+ // use for Clippy.
+ config.opts.unstable_opts.mir_opt_level = Some(0);
+ }
+}
+
+fn display_help() {
+ println!(
+ "\
+Checks a package to catch common mistakes and improve your Rust code.
+
+Usage:
+ cargo clippy [options] [--] [<opts>...]
+
+Common options:
+ -h, --help Print this message
+ --rustc Pass all args to rustc
+ -V, --version Print version info and exit
+
+Other options are the same as `cargo check`.
+
+To allow or deny a lint from the command line you can use `cargo clippy --`
+with:
+
+ -W --warn OPT Set lint warnings
+ -A --allow OPT Set lint allowed
+ -D --deny OPT Set lint denied
+ -F --forbid OPT Set lint forbidden
+
+You can use tool lints to allow or deny lints from your code, eg.:
+
+ #[allow(clippy::needless_lifetimes)]
+"
+ );
+}
+
+const BUG_REPORT_URL: &str = "https://github.com/rust-lang/rust-clippy/issues/new";
+
+type PanicCallback = dyn Fn(&panic::PanicInfo<'_>) + Sync + Send + 'static;
+static ICE_HOOK: LazyLock<Box<PanicCallback>> = LazyLock::new(|| {
+ let hook = panic::take_hook();
+ panic::set_hook(Box::new(|info| report_clippy_ice(info, BUG_REPORT_URL)));
+ hook
+});
+
+fn report_clippy_ice(info: &panic::PanicInfo<'_>, bug_report_url: &str) {
+ // Invoke our ICE handler, which prints the actual panic message and optionally a backtrace
+ (*ICE_HOOK)(info);
+
+ // Separate the output with an empty line
+ eprintln!();
+
+ let fallback_bundle = rustc_errors::fallback_fluent_bundle(rustc_errors::DEFAULT_LOCALE_RESOURCES, false);
+ let emitter = Box::new(rustc_errors::emitter::EmitterWriter::stderr(
+ rustc_errors::ColorConfig::Auto,
+ None,
+ None,
+ fallback_bundle,
+ false,
+ false,
+ None,
+ false,
+ ));
+ let handler = rustc_errors::Handler::with_emitter(true, None, emitter);
+
+ // a .span_bug or .bug call has already printed what
+ // it wants to print.
+ if !info.payload().is::<rustc_errors::ExplicitBug>() {
+ let mut d = rustc_errors::Diagnostic::new(rustc_errors::Level::Bug, "unexpected panic");
+ handler.emit_diagnostic(&mut d);
+ }
+
+ let version_info = rustc_tools_util::get_version_info!();
+
+ let xs: Vec<Cow<'static, str>> = vec![
+ "the compiler unexpectedly panicked. this is a bug.".into(),
+ format!("we would appreciate a bug report: {}", bug_report_url).into(),
+ format!("Clippy version: {}", version_info).into(),
+ ];
+
+ for note in &xs {
+ handler.note_without_error(note.as_ref());
+ }
+
+ // If backtraces are enabled, also print the query stack
+ let backtrace = env::var_os("RUST_BACKTRACE").map_or(false, |x| &x != "0");
+
+ let num_frames = if backtrace { None } else { Some(2) };
+
+ interface::try_print_query_stack(&handler, num_frames);
+}
+
+fn toolchain_path(home: Option<String>, toolchain: Option<String>) -> Option<PathBuf> {
+ home.and_then(|home| {
+ toolchain.map(|toolchain| {
+ let mut path = PathBuf::from(home);
+ path.push("toolchains");
+ path.push(toolchain);
+ path
+ })
+ })
+}
+
+#[allow(clippy::too_many_lines)]
+pub fn main() {
+ rustc_driver::init_rustc_env_logger();
+ LazyLock::force(&ICE_HOOK);
+ exit(rustc_driver::catch_with_exit_code(move || {
+ let mut orig_args: Vec<String> = env::args().collect();
+
+ // Get the sysroot, looking from most specific to this invocation to the least:
+ // - command line
+ // - runtime environment
+ // - SYSROOT
+ // - RUSTUP_HOME, MULTIRUST_HOME, RUSTUP_TOOLCHAIN, MULTIRUST_TOOLCHAIN
+ // - sysroot from rustc in the path
+ // - compile-time environment
+ // - SYSROOT
+ // - RUSTUP_HOME, MULTIRUST_HOME, RUSTUP_TOOLCHAIN, MULTIRUST_TOOLCHAIN
+ let sys_root_arg = arg_value(&orig_args, "--sysroot", |_| true);
+ let have_sys_root_arg = sys_root_arg.is_some();
+ let sys_root = sys_root_arg
+ .map(PathBuf::from)
+ .or_else(|| std::env::var("SYSROOT").ok().map(PathBuf::from))
+ .or_else(|| {
+ let home = std::env::var("RUSTUP_HOME")
+ .or_else(|_| std::env::var("MULTIRUST_HOME"))
+ .ok();
+ let toolchain = std::env::var("RUSTUP_TOOLCHAIN")
+ .or_else(|_| std::env::var("MULTIRUST_TOOLCHAIN"))
+ .ok();
+ toolchain_path(home, toolchain)
+ })
+ .or_else(|| {
+ Command::new("rustc")
+ .arg("--print")
+ .arg("sysroot")
+ .output()
+ .ok()
+ .and_then(|out| String::from_utf8(out.stdout).ok())
+ .map(|s| PathBuf::from(s.trim()))
+ })
+ .or_else(|| option_env!("SYSROOT").map(PathBuf::from))
+ .or_else(|| {
+ let home = option_env!("RUSTUP_HOME")
+ .or(option_env!("MULTIRUST_HOME"))
+ .map(ToString::to_string);
+ let toolchain = option_env!("RUSTUP_TOOLCHAIN")
+ .or(option_env!("MULTIRUST_TOOLCHAIN"))
+ .map(ToString::to_string);
+ toolchain_path(home, toolchain)
+ })
+ .map(|pb| pb.to_string_lossy().to_string())
+ .expect("need to specify SYSROOT env var during clippy compilation, or use rustup or multirust");
+
+ // make "clippy-driver --rustc" work like a subcommand that passes further args to "rustc"
+ // for example `clippy-driver --rustc --version` will print the rustc version that clippy-driver
+ // uses
+ if let Some(pos) = orig_args.iter().position(|arg| arg == "--rustc") {
+ orig_args.remove(pos);
+ orig_args[0] = "rustc".to_string();
+
+ // if we call "rustc", we need to pass --sysroot here as well
+ let mut args: Vec<String> = orig_args.clone();
+ if !have_sys_root_arg {
+ args.extend(vec!["--sysroot".into(), sys_root]);
+ };
+
+ return rustc_driver::RunCompiler::new(&args, &mut DefaultCallbacks).run();
+ }
+
+ if orig_args.iter().any(|a| a == "--version" || a == "-V") {
+ let version_info = rustc_tools_util::get_version_info!();
+ println!("{}", version_info);
+ exit(0);
+ }
+
+ // Setting RUSTC_WRAPPER causes Cargo to pass 'rustc' as the first argument.
+ // We're invoking the compiler programmatically, so we ignore this/
+ let wrapper_mode = orig_args.get(1).map(Path::new).and_then(Path::file_stem) == Some("rustc".as_ref());
+
+ if wrapper_mode {
+ // we still want to be able to invoke it normally though
+ orig_args.remove(1);
+ }
+
+ if !wrapper_mode && (orig_args.iter().any(|a| a == "--help" || a == "-h") || orig_args.len() == 1) {
+ display_help();
+ exit(0);
+ }
+
+ // this conditional check for the --sysroot flag is there so users can call
+ // `clippy_driver` directly
+ // without having to pass --sysroot or anything
+ let mut args: Vec<String> = orig_args.clone();
+ if !have_sys_root_arg {
+ args.extend(vec!["--sysroot".into(), sys_root]);
+ };
+
+ let mut no_deps = false;
+ let clippy_args_var = env::var("CLIPPY_ARGS").ok();
+ let clippy_args = clippy_args_var
+ .as_deref()
+ .unwrap_or_default()
+ .split("__CLIPPY_HACKERY__")
+ .filter_map(|s| match s {
+ "" => None,
+ "--no-deps" => {
+ no_deps = true;
+ None
+ },
+ _ => Some(s.to_string()),
+ })
+ .chain(vec!["--cfg".into(), r#"feature="cargo-clippy""#.into()])
+ .collect::<Vec<String>>();
+
+ // We enable Clippy if one of the following conditions is met
+ // - IF Clippy is run on its test suite OR
+ // - IF Clippy is run on the main crate, not on deps (`!cap_lints_allow`) THEN
+ // - IF `--no-deps` is not set (`!no_deps`) OR
+ // - IF `--no-deps` is set and Clippy is run on the specified primary package
+ let cap_lints_allow = arg_value(&orig_args, "--cap-lints", |val| val == "allow").is_some()
+ && arg_value(&orig_args, "--force-warn", |val| val.contains("clippy::")).is_none();
+ let in_primary_package = env::var("CARGO_PRIMARY_PACKAGE").is_ok();
+
+ let clippy_enabled = !cap_lints_allow && (!no_deps || in_primary_package);
+ if clippy_enabled {
+ args.extend(clippy_args);
+ rustc_driver::RunCompiler::new(&args, &mut ClippyCallbacks { clippy_args_var }).run()
+ } else {
+ rustc_driver::RunCompiler::new(&args, &mut RustcCallbacks { clippy_args_var }).run()
+ }
+ }))
+}
diff --git a/src/tools/clippy/src/main.rs b/src/tools/clippy/src/main.rs
new file mode 100644
index 000000000..9ee4a40cb
--- /dev/null
+++ b/src/tools/clippy/src/main.rs
@@ -0,0 +1,194 @@
+#![cfg_attr(feature = "deny-warnings", deny(warnings))]
+// warn on lints, that are included in `rust-lang/rust`s bootstrap
+#![warn(rust_2018_idioms, unused_lifetimes)]
+
+use rustc_tools_util::VersionInfo;
+use std::env;
+use std::path::PathBuf;
+use std::process::{self, Command};
+
+const CARGO_CLIPPY_HELP: &str = r#"Checks a package to catch common mistakes and improve your Rust code.
+
+Usage:
+ cargo clippy [options] [--] [<opts>...]
+
+Common options:
+ --no-deps Run Clippy only on the given crate, without linting the dependencies
+ --fix Automatically apply lint suggestions. This flag implies `--no-deps`
+ -h, --help Print this message
+ -V, --version Print version info and exit
+
+Other options are the same as `cargo check`.
+
+To allow or deny a lint from the command line you can use `cargo clippy --`
+with:
+
+ -W --warn OPT Set lint warnings
+ -A --allow OPT Set lint allowed
+ -D --deny OPT Set lint denied
+ -F --forbid OPT Set lint forbidden
+
+You can use tool lints to allow or deny lints from your code, eg.:
+
+ #[allow(clippy::needless_lifetimes)]
+"#;
+
+fn show_help() {
+ println!("{}", CARGO_CLIPPY_HELP);
+}
+
+fn show_version() {
+ let version_info = rustc_tools_util::get_version_info!();
+ println!("{}", version_info);
+}
+
+pub fn main() {
+ // Check for version and help flags even when invoked as 'cargo-clippy'
+ if env::args().any(|a| a == "--help" || a == "-h") {
+ show_help();
+ return;
+ }
+
+ if env::args().any(|a| a == "--version" || a == "-V") {
+ show_version();
+ return;
+ }
+
+ if let Err(code) = process(env::args().skip(2)) {
+ process::exit(code);
+ }
+}
+
+struct ClippyCmd {
+ cargo_subcommand: &'static str,
+ args: Vec<String>,
+ clippy_args: Vec<String>,
+}
+
+impl ClippyCmd {
+ fn new<I>(mut old_args: I) -> Self
+ where
+ I: Iterator<Item = String>,
+ {
+ let mut cargo_subcommand = "check";
+ let mut args = vec![];
+ let mut clippy_args: Vec<String> = vec![];
+
+ for arg in old_args.by_ref() {
+ match arg.as_str() {
+ "--fix" => {
+ cargo_subcommand = "fix";
+ continue;
+ },
+ "--no-deps" => {
+ clippy_args.push("--no-deps".into());
+ continue;
+ },
+ "--" => break,
+ _ => {},
+ }
+
+ args.push(arg);
+ }
+
+ clippy_args.append(&mut (old_args.collect()));
+ if cargo_subcommand == "fix" && !clippy_args.iter().any(|arg| arg == "--no-deps") {
+ clippy_args.push("--no-deps".into());
+ }
+
+ Self {
+ cargo_subcommand,
+ args,
+ clippy_args,
+ }
+ }
+
+ fn path() -> PathBuf {
+ let mut path = env::current_exe()
+ .expect("current executable path invalid")
+ .with_file_name("clippy-driver");
+
+ if cfg!(windows) {
+ path.set_extension("exe");
+ }
+
+ path
+ }
+
+ fn into_std_cmd(self) -> Command {
+ let mut cmd = Command::new("cargo");
+ let clippy_args: String = self
+ .clippy_args
+ .iter()
+ .map(|arg| format!("{}__CLIPPY_HACKERY__", arg))
+ .collect();
+
+ // Currently, `CLIPPY_TERMINAL_WIDTH` is used only to format "unknown field" error messages.
+ let terminal_width = termize::dimensions().map_or(0, |(w, _)| w);
+
+ cmd.env("RUSTC_WORKSPACE_WRAPPER", Self::path())
+ .env("CLIPPY_ARGS", clippy_args)
+ .env("CLIPPY_TERMINAL_WIDTH", terminal_width.to_string())
+ .arg(self.cargo_subcommand)
+ .args(&self.args);
+
+ cmd
+ }
+}
+
+fn process<I>(old_args: I) -> Result<(), i32>
+where
+ I: Iterator<Item = String>,
+{
+ let cmd = ClippyCmd::new(old_args);
+
+ let mut cmd = cmd.into_std_cmd();
+
+ let exit_status = cmd
+ .spawn()
+ .expect("could not run cargo")
+ .wait()
+ .expect("failed to wait for cargo?");
+
+ if exit_status.success() {
+ Ok(())
+ } else {
+ Err(exit_status.code().unwrap_or(-1))
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::ClippyCmd;
+
+ #[test]
+ fn fix() {
+ let args = "cargo clippy --fix".split_whitespace().map(ToString::to_string);
+ let cmd = ClippyCmd::new(args);
+ assert_eq!("fix", cmd.cargo_subcommand);
+ assert!(!cmd.args.iter().any(|arg| arg.ends_with("unstable-options")));
+ }
+
+ #[test]
+ fn fix_implies_no_deps() {
+ let args = "cargo clippy --fix".split_whitespace().map(ToString::to_string);
+ let cmd = ClippyCmd::new(args);
+ assert!(cmd.clippy_args.iter().any(|arg| arg == "--no-deps"));
+ }
+
+ #[test]
+ fn no_deps_not_duplicated_with_fix() {
+ let args = "cargo clippy --fix -- --no-deps"
+ .split_whitespace()
+ .map(ToString::to_string);
+ let cmd = ClippyCmd::new(args);
+ assert_eq!(cmd.clippy_args.iter().filter(|arg| *arg == "--no-deps").count(), 1);
+ }
+
+ #[test]
+ fn check() {
+ let args = "cargo clippy".split_whitespace().map(ToString::to_string);
+ let cmd = ClippyCmd::new(args);
+ assert_eq!("check", cmd.cargo_subcommand);
+ }
+}
diff --git a/src/tools/clippy/tests/check-fmt.rs b/src/tools/clippy/tests/check-fmt.rs
new file mode 100644
index 000000000..0defd45b6
--- /dev/null
+++ b/src/tools/clippy/tests/check-fmt.rs
@@ -0,0 +1,28 @@
+#![cfg_attr(feature = "deny-warnings", deny(warnings))]
+#![warn(rust_2018_idioms, unused_lifetimes)]
+
+use std::path::PathBuf;
+use std::process::Command;
+
+#[test]
+fn fmt() {
+ if option_env!("RUSTC_TEST_SUITE").is_some() || option_env!("NO_FMT_TEST").is_some() {
+ return;
+ }
+
+ let root_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
+ let output = Command::new("cargo")
+ .current_dir(root_dir)
+ .args(&["dev", "fmt", "--check"])
+ .output()
+ .unwrap();
+
+ println!("status: {}", output.status);
+ println!("stdout: {}", String::from_utf8_lossy(&output.stdout));
+ println!("stderr: {}", String::from_utf8_lossy(&output.stderr));
+
+ assert!(
+ output.status.success(),
+ "Formatting check failed. Run `cargo dev fmt` to update formatting."
+ );
+}
diff --git a/src/tools/clippy/tests/clippy.toml b/src/tools/clippy/tests/clippy.toml
new file mode 100644
index 000000000..5eb7ac035
--- /dev/null
+++ b/src/tools/clippy/tests/clippy.toml
@@ -0,0 +1 @@
+# default config for tests, overrides clippy.toml at the project root
diff --git a/src/tools/clippy/tests/compile-test.rs b/src/tools/clippy/tests/compile-test.rs
new file mode 100644
index 000000000..92ac1a2be
--- /dev/null
+++ b/src/tools/clippy/tests/compile-test.rs
@@ -0,0 +1,509 @@
+#![feature(test)] // compiletest_rs requires this attribute
+#![feature(once_cell)]
+#![feature(is_sorted)]
+#![cfg_attr(feature = "deny-warnings", deny(warnings))]
+#![warn(rust_2018_idioms, unused_lifetimes)]
+
+use compiletest_rs as compiletest;
+use compiletest_rs::common::Mode as TestMode;
+
+use std::collections::HashMap;
+use std::env::{self, remove_var, set_var, var_os};
+use std::ffi::{OsStr, OsString};
+use std::fs;
+use std::io;
+use std::path::{Path, PathBuf};
+use std::sync::LazyLock;
+use test_utils::IS_RUSTC_TEST_SUITE;
+
+mod test_utils;
+
+// whether to run internal tests or not
+const RUN_INTERNAL_TESTS: bool = cfg!(feature = "internal");
+
+/// All crates used in UI tests are listed here
+static TEST_DEPENDENCIES: &[&str] = &[
+ "clippy_lints",
+ "clippy_utils",
+ "derive_new",
+ "futures",
+ "if_chain",
+ "itertools",
+ "quote",
+ "regex",
+ "serde",
+ "serde_derive",
+ "syn",
+ "tokio",
+ "parking_lot",
+ "rustc_semver",
+];
+
+// Test dependencies may need an `extern crate` here to ensure that they show up
+// in the depinfo file (otherwise cargo thinks they are unused)
+#[allow(unused_extern_crates)]
+extern crate clippy_lints;
+#[allow(unused_extern_crates)]
+extern crate clippy_utils;
+#[allow(unused_extern_crates)]
+extern crate derive_new;
+#[allow(unused_extern_crates)]
+extern crate futures;
+#[allow(unused_extern_crates)]
+extern crate if_chain;
+#[allow(unused_extern_crates)]
+extern crate itertools;
+#[allow(unused_extern_crates)]
+extern crate parking_lot;
+#[allow(unused_extern_crates)]
+extern crate quote;
+#[allow(unused_extern_crates)]
+extern crate rustc_semver;
+#[allow(unused_extern_crates)]
+extern crate syn;
+#[allow(unused_extern_crates)]
+extern crate tokio;
+
+/// Produces a string with an `--extern` flag for all UI test crate
+/// dependencies.
+///
+/// The dependency files are located by parsing the depinfo file for this test
+/// module. This assumes the `-Z binary-dep-depinfo` flag is enabled. All test
+/// dependencies must be added to Cargo.toml at the project root. Test
+/// dependencies that are not *directly* used by this test module require an
+/// `extern crate` declaration.
+static EXTERN_FLAGS: LazyLock<String> = LazyLock::new(|| {
+ let current_exe_depinfo = {
+ let mut path = env::current_exe().unwrap();
+ path.set_extension("d");
+ fs::read_to_string(path).unwrap()
+ };
+ let mut crates: HashMap<&str, &str> = HashMap::with_capacity(TEST_DEPENDENCIES.len());
+ for line in current_exe_depinfo.lines() {
+ // each dependency is expected to have a Makefile rule like `/path/to/crate-hash.rlib:`
+ let parse_name_path = || {
+ if line.starts_with(char::is_whitespace) {
+ return None;
+ }
+ let path_str = line.strip_suffix(':')?;
+ let path = Path::new(path_str);
+ if !matches!(path.extension()?.to_str()?, "rlib" | "so" | "dylib" | "dll") {
+ return None;
+ }
+ let (name, _hash) = path.file_stem()?.to_str()?.rsplit_once('-')?;
+ // the "lib" prefix is not present for dll files
+ let name = name.strip_prefix("lib").unwrap_or(name);
+ Some((name, path_str))
+ };
+ if let Some((name, path)) = parse_name_path() {
+ if TEST_DEPENDENCIES.contains(&name) {
+ // A dependency may be listed twice if it is available in sysroot,
+ // and the sysroot dependencies are listed first. As of the writing,
+ // this only seems to apply to if_chain.
+ crates.insert(name, path);
+ }
+ }
+ }
+ let not_found: Vec<&str> = TEST_DEPENDENCIES
+ .iter()
+ .copied()
+ .filter(|n| !crates.contains_key(n))
+ .collect();
+ assert!(
+ not_found.is_empty(),
+ "dependencies not found in depinfo: {:?}\n\
+ help: Make sure the `-Z binary-dep-depinfo` rust flag is enabled\n\
+ help: Try adding to dev-dependencies in Cargo.toml\n\
+ help: Be sure to also add `extern crate ...;` to tests/compile-test.rs",
+ not_found,
+ );
+ crates
+ .into_iter()
+ .map(|(name, path)| format!(" --extern {}={}", name, path))
+ .collect()
+});
+
+fn base_config(test_dir: &str) -> compiletest::Config {
+ let mut config = compiletest::Config {
+ edition: Some("2021".into()),
+ mode: TestMode::Ui,
+ ..Default::default()
+ };
+
+ if let Ok(filters) = env::var("TESTNAME") {
+ config.filters = filters.split(',').map(ToString::to_string).collect();
+ }
+
+ if let Some(path) = option_env!("RUSTC_LIB_PATH") {
+ let path = PathBuf::from(path);
+ config.run_lib_path = path.clone();
+ config.compile_lib_path = path;
+ }
+ let current_exe_path = env::current_exe().unwrap();
+ let deps_path = current_exe_path.parent().unwrap();
+ let profile_path = deps_path.parent().unwrap();
+
+ // Using `-L dependency={}` enforces that external dependencies are added with `--extern`.
+ // This is valuable because a) it allows us to monitor what external dependencies are used
+ // and b) it ensures that conflicting rlibs are resolved properly.
+ let host_libs = option_env!("HOST_LIBS")
+ .map(|p| format!(" -L dependency={}", Path::new(p).join("deps").display()))
+ .unwrap_or_default();
+ config.target_rustcflags = Some(format!(
+ "--emit=metadata -Dwarnings -Zui-testing -L dependency={}{}{}",
+ deps_path.display(),
+ host_libs,
+ &*EXTERN_FLAGS,
+ ));
+
+ config.src_base = Path::new("tests").join(test_dir);
+ config.build_base = profile_path.join("test").join(test_dir);
+ config.rustc_path = profile_path.join(if cfg!(windows) {
+ "clippy-driver.exe"
+ } else {
+ "clippy-driver"
+ });
+ config
+}
+
+fn run_ui() {
+ let mut config = base_config("ui");
+ config.rustfix_coverage = true;
+ // use tests/clippy.toml
+ let _g = VarGuard::set("CARGO_MANIFEST_DIR", fs::canonicalize("tests").unwrap());
+ let _threads = VarGuard::set(
+ "RUST_TEST_THREADS",
+ // if RUST_TEST_THREADS is set, adhere to it, otherwise override it
+ env::var("RUST_TEST_THREADS").unwrap_or_else(|_| {
+ std::thread::available_parallelism()
+ .map_or(1, std::num::NonZeroUsize::get)
+ .to_string()
+ }),
+ );
+ compiletest::run_tests(&config);
+ check_rustfix_coverage();
+}
+
+fn run_internal_tests() {
+ // only run internal tests with the internal-tests feature
+ if !RUN_INTERNAL_TESTS {
+ return;
+ }
+ let config = base_config("ui-internal");
+ compiletest::run_tests(&config);
+}
+
+fn run_ui_toml() {
+ fn run_tests(config: &compiletest::Config, mut tests: Vec<tester::TestDescAndFn>) -> Result<bool, io::Error> {
+ let mut result = true;
+ let opts = compiletest::test_opts(config);
+ for dir in fs::read_dir(&config.src_base)? {
+ let dir = dir?;
+ if !dir.file_type()?.is_dir() {
+ continue;
+ }
+ let dir_path = dir.path();
+ let _g = VarGuard::set("CARGO_MANIFEST_DIR", &dir_path);
+ for file in fs::read_dir(&dir_path)? {
+ let file = file?;
+ let file_path = file.path();
+ if file.file_type()?.is_dir() {
+ continue;
+ }
+ if file_path.extension() != Some(OsStr::new("rs")) {
+ continue;
+ }
+ let paths = compiletest::common::TestPaths {
+ file: file_path,
+ base: config.src_base.clone(),
+ relative_dir: dir_path.file_name().unwrap().into(),
+ };
+ let test_name = compiletest::make_test_name(config, &paths);
+ let index = tests
+ .iter()
+ .position(|test| test.desc.name == test_name)
+ .expect("The test should be in there");
+ result &= tester::run_tests_console(&opts, vec![tests.swap_remove(index)])?;
+ }
+ }
+ Ok(result)
+ }
+
+ let mut config = base_config("ui-toml");
+ config.src_base = config.src_base.canonicalize().unwrap();
+
+ let tests = compiletest::make_tests(&config);
+
+ let res = run_tests(&config, tests);
+ match res {
+ Ok(true) => {},
+ Ok(false) => panic!("Some tests failed"),
+ Err(e) => {
+ panic!("I/O failure during tests: {:?}", e);
+ },
+ }
+}
+
+fn run_ui_cargo() {
+ fn run_tests(
+ config: &compiletest::Config,
+ filters: &[String],
+ mut tests: Vec<tester::TestDescAndFn>,
+ ) -> Result<bool, io::Error> {
+ let mut result = true;
+ let opts = compiletest::test_opts(config);
+
+ for dir in fs::read_dir(&config.src_base)? {
+ let dir = dir?;
+ if !dir.file_type()?.is_dir() {
+ continue;
+ }
+
+ // Use the filter if provided
+ let dir_path = dir.path();
+ for filter in filters {
+ if !dir_path.ends_with(filter) {
+ continue;
+ }
+ }
+
+ for case in fs::read_dir(&dir_path)? {
+ let case = case?;
+ if !case.file_type()?.is_dir() {
+ continue;
+ }
+
+ let src_path = case.path().join("src");
+
+ // When switching between branches, if the previous branch had a test
+ // that the current branch does not have, the directory is not removed
+ // because an ignored Cargo.lock file exists.
+ if !src_path.exists() {
+ continue;
+ }
+
+ env::set_current_dir(&src_path)?;
+
+ let cargo_toml_path = case.path().join("Cargo.toml");
+ let cargo_content = fs::read(&cargo_toml_path)?;
+ let cargo_parsed: toml::Value = toml::from_str(
+ std::str::from_utf8(&cargo_content).expect("`Cargo.toml` is not a valid utf-8 file!"),
+ )
+ .expect("Can't parse `Cargo.toml`");
+
+ let _g = VarGuard::set("CARGO_MANIFEST_DIR", case.path());
+ let _h = VarGuard::set(
+ "CARGO_PKG_RUST_VERSION",
+ cargo_parsed
+ .get("package")
+ .and_then(|p| p.get("rust-version"))
+ .and_then(toml::Value::as_str)
+ .unwrap_or(""),
+ );
+
+ for file in fs::read_dir(&src_path)? {
+ let file = file?;
+ if file.file_type()?.is_dir() {
+ continue;
+ }
+
+ // Search for the main file to avoid running a test for each file in the project
+ let file_path = file.path();
+ match file_path.file_name().and_then(OsStr::to_str) {
+ Some("main.rs") => {},
+ _ => continue,
+ }
+ let _g = VarGuard::set("CLIPPY_CONF_DIR", case.path());
+ let paths = compiletest::common::TestPaths {
+ file: file_path,
+ base: config.src_base.clone(),
+ relative_dir: src_path.strip_prefix(&config.src_base).unwrap().into(),
+ };
+ let test_name = compiletest::make_test_name(config, &paths);
+ let index = tests
+ .iter()
+ .position(|test| test.desc.name == test_name)
+ .expect("The test should be in there");
+ result &= tester::run_tests_console(&opts, vec![tests.swap_remove(index)])?;
+ }
+ }
+ }
+ Ok(result)
+ }
+
+ if IS_RUSTC_TEST_SUITE {
+ return;
+ }
+
+ let mut config = base_config("ui-cargo");
+ config.src_base = config.src_base.canonicalize().unwrap();
+
+ let tests = compiletest::make_tests(&config);
+
+ let current_dir = env::current_dir().unwrap();
+ let res = run_tests(&config, &config.filters, tests);
+ env::set_current_dir(current_dir).unwrap();
+
+ match res {
+ Ok(true) => {},
+ Ok(false) => panic!("Some tests failed"),
+ Err(e) => {
+ panic!("I/O failure during tests: {:?}", e);
+ },
+ }
+}
+
+#[test]
+fn compile_test() {
+ set_var("CLIPPY_DISABLE_DOCS_LINKS", "true");
+ run_ui();
+ run_ui_toml();
+ run_ui_cargo();
+ run_internal_tests();
+}
+
+const RUSTFIX_COVERAGE_KNOWN_EXCEPTIONS: &[&str] = &[
+ "assign_ops2.rs",
+ "borrow_deref_ref_unfixable.rs",
+ "cast_size_32bit.rs",
+ "char_lit_as_u8.rs",
+ "cmp_owned/without_suggestion.rs",
+ "dbg_macro.rs",
+ "deref_addrof_double_trigger.rs",
+ "doc/unbalanced_ticks.rs",
+ "eprint_with_newline.rs",
+ "explicit_counter_loop.rs",
+ "iter_skip_next_unfixable.rs",
+ "let_and_return.rs",
+ "literals.rs",
+ "map_flatten.rs",
+ "map_unwrap_or.rs",
+ "match_bool.rs",
+ "mem_replace_macro.rs",
+ "needless_arbitrary_self_type_unfixable.rs",
+ "needless_borrow_pat.rs",
+ "needless_for_each_unfixable.rs",
+ "nonminimal_bool.rs",
+ "print_literal.rs",
+ "print_with_newline.rs",
+ "redundant_static_lifetimes_multiple.rs",
+ "ref_binding_to_reference.rs",
+ "repl_uninit.rs",
+ "result_map_unit_fn_unfixable.rs",
+ "search_is_some.rs",
+ "single_component_path_imports_nested_first.rs",
+ "string_add.rs",
+ "toplevel_ref_arg_non_rustfix.rs",
+ "trait_duplication_in_bounds.rs",
+ "unit_arg.rs",
+ "unnecessary_clone.rs",
+ "unnecessary_lazy_eval_unfixable.rs",
+ "write_literal.rs",
+ "write_literal_2.rs",
+ "write_with_newline.rs",
+];
+
+fn check_rustfix_coverage() {
+ let missing_coverage_path = Path::new("target/debug/test/ui/rustfix_missing_coverage.txt");
+
+ if let Ok(missing_coverage_contents) = std::fs::read_to_string(missing_coverage_path) {
+ assert!(RUSTFIX_COVERAGE_KNOWN_EXCEPTIONS.iter().is_sorted_by_key(Path::new));
+
+ for rs_path in missing_coverage_contents.lines() {
+ if Path::new(rs_path).starts_with("tests/ui/crashes") {
+ continue;
+ }
+ let filename = Path::new(rs_path).strip_prefix("tests/ui/").unwrap();
+ assert!(
+ RUSTFIX_COVERAGE_KNOWN_EXCEPTIONS
+ .binary_search_by_key(&filename, Path::new)
+ .is_ok(),
+ "`{}` runs `MachineApplicable` diagnostics but is missing a `run-rustfix` annotation. \
+ Please either add `// run-rustfix` at the top of the file or add the file to \
+ `RUSTFIX_COVERAGE_KNOWN_EXCEPTIONS` in `tests/compile-test.rs`.",
+ rs_path,
+ );
+ }
+ }
+}
+
+#[test]
+fn rustfix_coverage_known_exceptions_accuracy() {
+ for filename in RUSTFIX_COVERAGE_KNOWN_EXCEPTIONS {
+ let rs_path = Path::new("tests/ui").join(filename);
+ assert!(
+ rs_path.exists(),
+ "`{}` does not exist",
+ rs_path.strip_prefix(env!("CARGO_MANIFEST_DIR")).unwrap().display()
+ );
+ let fixed_path = rs_path.with_extension("fixed");
+ assert!(
+ !fixed_path.exists(),
+ "`{}` exists",
+ fixed_path.strip_prefix(env!("CARGO_MANIFEST_DIR")).unwrap().display()
+ );
+ }
+}
+
+#[test]
+fn ui_cargo_toml_metadata() {
+ let ui_cargo_path = Path::new("tests/ui-cargo");
+ let cargo_common_metadata_path = ui_cargo_path.join("cargo_common_metadata");
+ let publish_exceptions =
+ ["fail_publish", "fail_publish_true", "pass_publish_empty"].map(|path| cargo_common_metadata_path.join(path));
+
+ for entry in walkdir::WalkDir::new(ui_cargo_path) {
+ let entry = entry.unwrap();
+ let path = entry.path();
+ if path.file_name() != Some(OsStr::new("Cargo.toml")) {
+ continue;
+ }
+
+ let toml = fs::read_to_string(path).unwrap().parse::<toml::Value>().unwrap();
+
+ let package = toml.as_table().unwrap().get("package").unwrap().as_table().unwrap();
+
+ let name = package.get("name").unwrap().as_str().unwrap().replace('-', "_");
+ assert!(
+ path.parent()
+ .unwrap()
+ .components()
+ .map(|component| component.as_os_str().to_string_lossy().replace('-', "_"))
+ .any(|s| *s == name)
+ || path.starts_with(&cargo_common_metadata_path),
+ "{:?} has incorrect package name",
+ path
+ );
+
+ let publish = package.get("publish").and_then(toml::Value::as_bool).unwrap_or(true);
+ assert!(
+ !publish || publish_exceptions.contains(&path.parent().unwrap().to_path_buf()),
+ "{:?} lacks `publish = false`",
+ path
+ );
+ }
+}
+
+/// Restores an env var on drop
+#[must_use]
+struct VarGuard {
+ key: &'static str,
+ value: Option<OsString>,
+}
+
+impl VarGuard {
+ fn set(key: &'static str, val: impl AsRef<OsStr>) -> Self {
+ let value = var_os(key);
+ set_var(key, val);
+ Self { key, value }
+ }
+}
+
+impl Drop for VarGuard {
+ fn drop(&mut self) {
+ match self.value.as_deref() {
+ None => remove_var(self.key),
+ Some(value) => set_var(self.key, value),
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/dogfood.rs b/src/tools/clippy/tests/dogfood.rs
new file mode 100644
index 000000000..5697e8680
--- /dev/null
+++ b/src/tools/clippy/tests/dogfood.rs
@@ -0,0 +1,104 @@
+//! This test is a part of quality control and makes clippy eat what it produces. Awesome lints and
+//! long error messages
+//!
+//! See [Eating your own dog food](https://en.wikipedia.org/wiki/Eating_your_own_dog_food) for context
+
+#![feature(once_cell)]
+#![cfg_attr(feature = "deny-warnings", deny(warnings))]
+#![warn(rust_2018_idioms, unused_lifetimes)]
+
+use std::path::PathBuf;
+use std::process::Command;
+use test_utils::IS_RUSTC_TEST_SUITE;
+
+mod test_utils;
+
+#[test]
+fn dogfood_clippy() {
+ if IS_RUSTC_TEST_SUITE {
+ return;
+ }
+
+ // "" is the root package
+ for package in &["", "clippy_dev", "clippy_lints", "clippy_utils", "rustc_tools_util"] {
+ run_clippy_for_package(package, &["-D", "clippy::all", "-D", "clippy::pedantic"]);
+ }
+}
+
+#[test]
+#[ignore]
+#[cfg(feature = "internal")]
+fn run_metadata_collection_lint() {
+ use std::fs::File;
+ use std::time::SystemTime;
+
+ // Setup for validation
+ let metadata_output_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("util/gh-pages/lints.json");
+ let start_time = SystemTime::now();
+
+ // Run collection as is
+ std::env::set_var("ENABLE_METADATA_COLLECTION", "1");
+ run_clippy_for_package("clippy_lints", &["-A", "unfulfilled_lint_expectations"]);
+
+ // Check if cargo caching got in the way
+ if let Ok(file) = File::open(metadata_output_path) {
+ if let Ok(metadata) = file.metadata() {
+ if let Ok(last_modification) = metadata.modified() {
+ if last_modification > start_time {
+ // The output file has been modified. Most likely by a hungry
+ // metadata collection monster. So We'll return.
+ return;
+ }
+ }
+ }
+ }
+
+ // Force cargo to invalidate the caches
+ filetime::set_file_mtime(
+ PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("clippy_lints/src/lib.rs"),
+ filetime::FileTime::now(),
+ )
+ .unwrap();
+
+ // Running the collection again
+ run_clippy_for_package("clippy_lints", &["-A", "unfulfilled_lint_expectations"]);
+}
+
+fn run_clippy_for_package(project: &str, args: &[&str]) {
+ let root_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
+
+ let mut command = Command::new(&*test_utils::CARGO_CLIPPY_PATH);
+
+ command
+ .current_dir(root_dir.join(project))
+ .env("CARGO_INCREMENTAL", "0")
+ .arg("clippy")
+ .arg("--all-targets")
+ .arg("--all-features");
+
+ if let Ok(dogfood_args) = std::env::var("__CLIPPY_DOGFOOD_ARGS") {
+ for arg in dogfood_args.split_whitespace() {
+ command.arg(arg);
+ }
+ }
+
+ command.arg("--").args(args);
+ command.arg("-Cdebuginfo=0"); // disable debuginfo to generate less data in the target dir
+
+ if cfg!(feature = "internal") {
+ // internal lints only exist if we build with the internal feature
+ command.args(&["-D", "clippy::internal"]);
+ } else {
+ // running a clippy built without internal lints on the clippy source
+ // that contains e.g. `allow(clippy::invalid_paths)`
+ command.args(&["-A", "unknown_lints"]);
+ }
+
+ let output = command.output().unwrap();
+
+ println!("status: {}", output.status);
+ println!("stdout: {}", String::from_utf8_lossy(&output.stdout));
+ println!("stderr: {}", String::from_utf8_lossy(&output.stderr));
+
+ assert!(output.status.success());
+}
diff --git a/src/tools/clippy/tests/integration.rs b/src/tools/clippy/tests/integration.rs
new file mode 100644
index 000000000..c64425fa0
--- /dev/null
+++ b/src/tools/clippy/tests/integration.rs
@@ -0,0 +1,89 @@
+#![cfg(feature = "integration")]
+#![cfg_attr(feature = "deny-warnings", deny(warnings))]
+#![warn(rust_2018_idioms, unused_lifetimes)]
+
+use std::env;
+use std::ffi::OsStr;
+use std::process::Command;
+
+#[cfg_attr(feature = "integration", test)]
+fn integration_test() {
+ let repo_name = env::var("INTEGRATION").expect("`INTEGRATION` var not set");
+ let repo_url = format!("https://github.com/{}", repo_name);
+ let crate_name = repo_name
+ .split('/')
+ .nth(1)
+ .expect("repo name should have format `<org>/<name>`");
+
+ let mut repo_dir = tempfile::tempdir().expect("couldn't create temp dir").into_path();
+ repo_dir.push(crate_name);
+
+ let st = Command::new("git")
+ .args(&[
+ OsStr::new("clone"),
+ OsStr::new("--depth=1"),
+ OsStr::new(&repo_url),
+ OsStr::new(&repo_dir),
+ ])
+ .status()
+ .expect("unable to run git");
+ assert!(st.success());
+
+ let root_dir = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"));
+ let target_dir = std::path::Path::new(&root_dir).join("target");
+ let clippy_binary = target_dir.join(env!("PROFILE")).join("cargo-clippy");
+
+ let output = Command::new(clippy_binary)
+ .current_dir(repo_dir)
+ .env("RUST_BACKTRACE", "full")
+ .env("CARGO_TARGET_DIR", target_dir)
+ .args(&[
+ "clippy",
+ "--all-targets",
+ "--all-features",
+ "--",
+ "--cap-lints",
+ "warn",
+ "-Wclippy::pedantic",
+ "-Wclippy::nursery",
+ ])
+ .output()
+ .expect("unable to run clippy");
+
+ let stderr = String::from_utf8_lossy(&output.stderr);
+ if stderr.contains("internal compiler error") {
+ let backtrace_start = stderr
+ .find("thread 'rustc' panicked at")
+ .expect("start of backtrace not found");
+ let backtrace_end = stderr
+ .rfind("error: internal compiler error")
+ .expect("end of backtrace not found");
+
+ panic!(
+ "internal compiler error\nBacktrace:\n\n{}",
+ &stderr[backtrace_start..backtrace_end]
+ );
+ } else if stderr.contains("query stack during panic") {
+ panic!("query stack during panic in the output");
+ } else if stderr.contains("E0463") {
+ // Encountering E0463 (can't find crate for `x`) did _not_ cause the build to fail in the
+ // past. Even though it should have. That's why we explicitly panic here.
+ // See PR #3552 and issue #3523 for more background.
+ panic!("error: E0463");
+ } else if stderr.contains("E0514") {
+ panic!("incompatible crate versions");
+ } else if stderr.contains("failed to run `rustc` to learn about target-specific information") {
+ panic!("couldn't find librustc_driver, consider setting `LD_LIBRARY_PATH`");
+ } else {
+ assert!(
+ !stderr.contains("toolchain") || !stderr.contains("is not installed"),
+ "missing required toolchain"
+ );
+ }
+
+ match output.status.code() {
+ Some(0) => println!("Compilation successful"),
+ Some(code) => eprintln!("Compilation failed. Exit code: {}", code),
+ None => panic!("Process terminated by signal"),
+ }
+}
diff --git a/src/tools/clippy/tests/lint_message_convention.rs b/src/tools/clippy/tests/lint_message_convention.rs
new file mode 100644
index 000000000..c3aae1a9a
--- /dev/null
+++ b/src/tools/clippy/tests/lint_message_convention.rs
@@ -0,0 +1,116 @@
+#![feature(once_cell)]
+#![cfg_attr(feature = "deny-warnings", deny(warnings))]
+#![warn(rust_2018_idioms, unused_lifetimes)]
+
+use std::ffi::OsStr;
+use std::path::PathBuf;
+use std::sync::LazyLock;
+
+use regex::RegexSet;
+
+#[derive(Debug)]
+struct Message {
+ path: PathBuf,
+ bad_lines: Vec<String>,
+}
+
+impl Message {
+ fn new(path: PathBuf) -> Self {
+ // we don't want the first letter after "error: ", "help: " ... to be capitalized
+ // also no punctuation (except for "?" ?) at the end of a line
+ static REGEX_SET: LazyLock<RegexSet> = LazyLock::new(|| {
+ RegexSet::new(&[
+ r"error: [A-Z]",
+ r"help: [A-Z]",
+ r"warning: [A-Z]",
+ r"note: [A-Z]",
+ r"try this: [A-Z]",
+ r"error: .*[.!]$",
+ r"help: .*[.!]$",
+ r"warning: .*[.!]$",
+ r"note: .*[.!]$",
+ r"try this: .*[.!]$",
+ ])
+ .unwrap()
+ });
+
+ // sometimes the first character is capitalized and it is legal (like in "C-like enum variants") or
+ // we want to ask a question ending in "?"
+ static EXCEPTIONS_SET: LazyLock<RegexSet> = LazyLock::new(|| {
+ RegexSet::new(&[
+ r"\.\.\.$",
+ r".*C-like enum variant discriminant is not portable to 32-bit targets",
+ r".*Intel x86 assembly syntax used",
+ r".*AT&T x86 assembly syntax used",
+ r"note: Clippy version: .*",
+ r"the compiler unexpectedly panicked. this is a bug.",
+ ])
+ .unwrap()
+ });
+
+ let content: String = std::fs::read_to_string(&path).unwrap();
+
+ let bad_lines = content
+ .lines()
+ .filter(|line| REGEX_SET.matches(line).matched_any())
+ // ignore exceptions
+ .filter(|line| !EXCEPTIONS_SET.matches(line).matched_any())
+ .map(ToOwned::to_owned)
+ .collect::<Vec<String>>();
+
+ Message { path, bad_lines }
+ }
+}
+
+#[test]
+fn lint_message_convention() {
+ // disable the test inside the rustc test suite
+ if option_env!("RUSTC_TEST_SUITE").is_some() {
+ return;
+ }
+
+ // make sure that lint messages:
+ // * are not capitalized
+ // * don't have punctuation at the end of the last sentence
+
+ // these directories have interesting tests
+ let test_dirs = ["ui", "ui-cargo", "ui-internal", "ui-toml"]
+ .iter()
+ .map(PathBuf::from)
+ .map(|p| {
+ let base = PathBuf::from("tests");
+ base.join(p)
+ });
+
+ // gather all .stderr files
+ let tests = test_dirs
+ .flat_map(|dir| {
+ std::fs::read_dir(dir)
+ .expect("failed to read dir")
+ .map(|direntry| direntry.unwrap().path())
+ })
+ .filter(|file| matches!(file.extension().map(OsStr::to_str), Some(Some("stderr"))));
+
+ // get all files that have any "bad lines" in them
+ let bad_tests: Vec<Message> = tests
+ .map(Message::new)
+ .filter(|message| !message.bad_lines.is_empty())
+ .collect();
+
+ for message in &bad_tests {
+ eprintln!(
+ "error: the test '{}' contained the following nonconforming lines :",
+ message.path.display()
+ );
+ message.bad_lines.iter().for_each(|line| eprintln!("{}", line));
+ eprintln!("\n\n");
+ }
+
+ eprintln!(
+ "\n\n\nLint message should not start with a capital letter and should not have punctuation at the end of the message unless multiple sentences are needed."
+ );
+ eprintln!("Check out the rustc-dev-guide for more information:");
+ eprintln!("https://rustc-dev-guide.rust-lang.org/diagnostics.html#diagnostic-structure\n\n\n");
+
+ assert!(bad_tests.is_empty());
+}
diff --git a/src/tools/clippy/tests/missing-test-files.rs b/src/tools/clippy/tests/missing-test-files.rs
new file mode 100644
index 000000000..7d6edc2b1
--- /dev/null
+++ b/src/tools/clippy/tests/missing-test-files.rs
@@ -0,0 +1,69 @@
+#![cfg_attr(feature = "deny-warnings", deny(warnings))]
+#![warn(rust_2018_idioms, unused_lifetimes)]
+#![allow(clippy::assertions_on_constants)]
+#![feature(path_file_prefix)]
+
+use std::cmp::Ordering;
+use std::ffi::OsStr;
+use std::fs::{self, DirEntry};
+use std::path::Path;
+
+#[test]
+fn test_missing_tests() {
+ let missing_files = explore_directory(Path::new("./tests"));
+ if !missing_files.is_empty() {
+ assert!(
+ false,
+ "Didn't see a test file for the following files:\n\n{}\n",
+ missing_files
+ .iter()
+ .map(|s| format!("\t{}", s))
+ .collect::<Vec<_>>()
+ .join("\n")
+ );
+ }
+}
+
+// Test for missing files.
+fn explore_directory(dir: &Path) -> Vec<String> {
+ let mut missing_files: Vec<String> = Vec::new();
+ let mut current_file = String::new();
+ let mut files: Vec<DirEntry> = fs::read_dir(dir).unwrap().filter_map(Result::ok).collect();
+ files.sort_by(|x, y| {
+ match x.path().file_prefix().cmp(&y.path().file_prefix()) {
+ Ordering::Equal => (),
+ ord => return ord,
+ }
+ // Sort rs files before the others if they share the same prefix. So when we see
+ // the file prefix for the first time and it's not a rust file, it means the rust
+ // file has to be missing.
+ match (
+ x.path().extension().and_then(OsStr::to_str),
+ y.path().extension().and_then(OsStr::to_str),
+ ) {
+ (Some("rs"), _) => Ordering::Less,
+ (_, Some("rs")) => Ordering::Greater,
+ _ => Ordering::Equal,
+ }
+ });
+ for entry in &files {
+ let path = entry.path();
+ if path.is_dir() {
+ missing_files.extend(explore_directory(&path));
+ } else {
+ let file_prefix = path.file_prefix().unwrap().to_str().unwrap().to_string();
+ if let Some(ext) = path.extension() {
+ match ext.to_str().unwrap() {
+ "rs" => current_file = file_prefix.clone(),
+ "stderr" | "stdout" => {
+ if file_prefix != current_file {
+ missing_files.push(path.to_str().unwrap().to_string());
+ }
+ },
+ _ => continue,
+ };
+ }
+ }
+ }
+ missing_files
+}
diff --git a/src/tools/clippy/tests/test_utils/mod.rs b/src/tools/clippy/tests/test_utils/mod.rs
new file mode 100644
index 000000000..ea8c54e08
--- /dev/null
+++ b/src/tools/clippy/tests/test_utils/mod.rs
@@ -0,0 +1,13 @@
+#![allow(dead_code)] // see https://github.com/rust-lang/rust/issues/46379
+
+use std::path::PathBuf;
+use std::sync::LazyLock;
+
+pub static CARGO_CLIPPY_PATH: LazyLock<PathBuf> = LazyLock::new(|| {
+ let mut path = std::env::current_exe().unwrap();
+ assert!(path.pop()); // deps
+ path.set_file_name("cargo-clippy");
+ path
+});
+
+pub const IS_RUSTC_TEST_SUITE: bool = option_env!("RUSTC_TEST_SUITE").is_some();
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/Cargo.toml b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/Cargo.toml
new file mode 100644
index 000000000..bc8e428f8
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "cargo_common_metadata_fail"
+version = "0.1.0"
+publish = false
+
+[workspace]
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/clippy.toml b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/clippy.toml
new file mode 100644
index 000000000..de4f04b24
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/clippy.toml
@@ -0,0 +1 @@
+cargo-ignore-publish = true
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/src/main.rs
new file mode 100644
index 000000000..27841e18a
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/src/main.rs
@@ -0,0 +1,4 @@
+// compile-flags: --crate-name=cargo_common_metadata
+#![warn(clippy::cargo_common_metadata)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/src/main.stderr b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/src/main.stderr
new file mode 100644
index 000000000..86953142b
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/src/main.stderr
@@ -0,0 +1,16 @@
+error: package `cargo_common_metadata_fail` is missing `package.description` metadata
+ |
+ = note: `-D clippy::cargo-common-metadata` implied by `-D warnings`
+
+error: package `cargo_common_metadata_fail` is missing `either package.license or package.license_file` metadata
+
+error: package `cargo_common_metadata_fail` is missing `package.repository` metadata
+
+error: package `cargo_common_metadata_fail` is missing `package.readme` metadata
+
+error: package `cargo_common_metadata_fail` is missing `package.keywords` metadata
+
+error: package `cargo_common_metadata_fail` is missing `package.categories` metadata
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish/Cargo.toml b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish/Cargo.toml
new file mode 100644
index 000000000..5005b83f5
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "cargo_common_metadata_fail_publish"
+version = "0.1.0"
+publish = ["some-registry-name"]
+
+[workspace]
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish/src/main.rs
new file mode 100644
index 000000000..27841e18a
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish/src/main.rs
@@ -0,0 +1,4 @@
+// compile-flags: --crate-name=cargo_common_metadata
+#![warn(clippy::cargo_common_metadata)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish/src/main.stderr b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish/src/main.stderr
new file mode 100644
index 000000000..ac1b5e8e9
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish/src/main.stderr
@@ -0,0 +1,16 @@
+error: package `cargo_common_metadata_fail_publish` is missing `package.description` metadata
+ |
+ = note: `-D clippy::cargo-common-metadata` implied by `-D warnings`
+
+error: package `cargo_common_metadata_fail_publish` is missing `either package.license or package.license_file` metadata
+
+error: package `cargo_common_metadata_fail_publish` is missing `package.repository` metadata
+
+error: package `cargo_common_metadata_fail_publish` is missing `package.readme` metadata
+
+error: package `cargo_common_metadata_fail_publish` is missing `package.keywords` metadata
+
+error: package `cargo_common_metadata_fail_publish` is missing `package.categories` metadata
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish_true/Cargo.toml b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish_true/Cargo.toml
new file mode 100644
index 000000000..51858eecd
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish_true/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "cargo_common_metadata_fail_publish_true"
+version = "0.1.0"
+publish = true
+
+[workspace]
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish_true/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish_true/src/main.rs
new file mode 100644
index 000000000..27841e18a
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish_true/src/main.rs
@@ -0,0 +1,4 @@
+// compile-flags: --crate-name=cargo_common_metadata
+#![warn(clippy::cargo_common_metadata)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish_true/src/main.stderr b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish_true/src/main.stderr
new file mode 100644
index 000000000..be32c0dc4
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish_true/src/main.stderr
@@ -0,0 +1,16 @@
+error: package `cargo_common_metadata_fail_publish_true` is missing `package.description` metadata
+ |
+ = note: `-D clippy::cargo-common-metadata` implied by `-D warnings`
+
+error: package `cargo_common_metadata_fail_publish_true` is missing `either package.license or package.license_file` metadata
+
+error: package `cargo_common_metadata_fail_publish_true` is missing `package.repository` metadata
+
+error: package `cargo_common_metadata_fail_publish_true` is missing `package.readme` metadata
+
+error: package `cargo_common_metadata_fail_publish_true` is missing `package.keywords` metadata
+
+error: package `cargo_common_metadata_fail_publish_true` is missing `package.categories` metadata
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass/Cargo.toml b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass/Cargo.toml
new file mode 100644
index 000000000..9f6e51fb4
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "cargo_common_metadata_pass"
+version = "0.1.0"
+publish = false
+description = "A test package for the cargo_common_metadata lint"
+repository = "https://github.com/someone/cargo_common_metadata"
+readme = "README.md"
+license = "MIT OR Apache-2.0"
+keywords = ["metadata", "lint", "clippy"]
+categories = ["development-tools::testing"]
+
+[workspace]
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass/clippy.toml b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass/clippy.toml
new file mode 100644
index 000000000..de4f04b24
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass/clippy.toml
@@ -0,0 +1 @@
+cargo-ignore-publish = true
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass/src/main.rs
new file mode 100644
index 000000000..27841e18a
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass/src/main.rs
@@ -0,0 +1,4 @@
+// compile-flags: --crate-name=cargo_common_metadata
+#![warn(clippy::cargo_common_metadata)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_empty/Cargo.toml b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_empty/Cargo.toml
new file mode 100644
index 000000000..828efee3a
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_empty/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "cargo_common_metadata_pass_publish_empty"
+version = "0.1.0"
+publish = []
+
+[workspace]
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_empty/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_empty/src/main.rs
new file mode 100644
index 000000000..27841e18a
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_empty/src/main.rs
@@ -0,0 +1,4 @@
+// compile-flags: --crate-name=cargo_common_metadata
+#![warn(clippy::cargo_common_metadata)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_false/Cargo.toml b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_false/Cargo.toml
new file mode 100644
index 000000000..45a5bf7c5
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_false/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "cargo_common_metadata_pass_publish_false"
+version = "0.1.0"
+publish = false
+
+[workspace]
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_false/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_false/src/main.rs
new file mode 100644
index 000000000..27841e18a
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_false/src/main.rs
@@ -0,0 +1,4 @@
+// compile-flags: --crate-name=cargo_common_metadata
+#![warn(clippy::cargo_common_metadata)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_diff/Cargo.toml b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_diff/Cargo.toml
new file mode 100644
index 000000000..946d1b366
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_diff/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "fail-both-diff"
+version = "0.1.0"
+rust-version = "1.56"
+publish = false
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_diff/clippy.toml b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_diff/clippy.toml
new file mode 100644
index 000000000..abe19b3a0
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_diff/clippy.toml
@@ -0,0 +1 @@
+msrv = "1.59"
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_diff/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_diff/src/main.rs
new file mode 100644
index 000000000..5b91d5508
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_diff/src/main.rs
@@ -0,0 +1,11 @@
+#![deny(clippy::use_self)]
+
+pub struct Foo;
+
+impl Foo {
+ pub fn bar() -> Foo {
+ Foo
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_diff/src/main.stderr b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_diff/src/main.stderr
new file mode 100644
index 000000000..9a7d802dc
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_diff/src/main.stderr
@@ -0,0 +1,16 @@
+warning: the MSRV in `clippy.toml` and `Cargo.toml` differ; using `1.59.0` from `clippy.toml`
+
+error: unnecessary structure name repetition
+ --> $DIR/main.rs:6:21
+ |
+LL | pub fn bar() -> Foo {
+ | ^^^ help: use the applicable keyword: `Self`
+ |
+note: the lint level is defined here
+ --> $DIR/main.rs:1:9
+ |
+LL | #![deny(clippy::use_self)]
+ | ^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error; 1 warning emitted
+
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_same/Cargo.toml b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_same/Cargo.toml
new file mode 100644
index 000000000..46b92a105
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_same/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "fail-both-same"
+version = "0.1.0"
+rust-version = "1.57.0"
+publish = false
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_same/clippy.toml b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_same/clippy.toml
new file mode 100644
index 000000000..5cccb362c
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_same/clippy.toml
@@ -0,0 +1 @@
+msrv = "1.57"
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_same/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_same/src/main.rs
new file mode 100644
index 000000000..5b91d5508
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_same/src/main.rs
@@ -0,0 +1,11 @@
+#![deny(clippy::use_self)]
+
+pub struct Foo;
+
+impl Foo {
+ pub fn bar() -> Foo {
+ Foo
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_same/src/main.stderr b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_same/src/main.stderr
new file mode 100644
index 000000000..a280e1bac
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_both_same/src/main.stderr
@@ -0,0 +1,14 @@
+error: unnecessary structure name repetition
+ --> $DIR/main.rs:6:21
+ |
+LL | pub fn bar() -> Foo {
+ | ^^^ help: use the applicable keyword: `Self`
+ |
+note: the lint level is defined here
+ --> $DIR/main.rs:1:9
+ |
+LL | #![deny(clippy::use_self)]
+ | ^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_cargo/Cargo.toml b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_cargo/Cargo.toml
new file mode 100644
index 000000000..189cc9f68
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_cargo/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "fail-cargo"
+version = "0.1.0"
+rust-version = "1.56.1"
+publish = false
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_cargo/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_cargo/src/main.rs
new file mode 100644
index 000000000..5b91d5508
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_cargo/src/main.rs
@@ -0,0 +1,11 @@
+#![deny(clippy::use_self)]
+
+pub struct Foo;
+
+impl Foo {
+ pub fn bar() -> Foo {
+ Foo
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_cargo/src/main.stderr b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_cargo/src/main.stderr
new file mode 100644
index 000000000..a280e1bac
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_cargo/src/main.stderr
@@ -0,0 +1,14 @@
+error: unnecessary structure name repetition
+ --> $DIR/main.rs:6:21
+ |
+LL | pub fn bar() -> Foo {
+ | ^^^ help: use the applicable keyword: `Self`
+ |
+note: the lint level is defined here
+ --> $DIR/main.rs:1:9
+ |
+LL | #![deny(clippy::use_self)]
+ | ^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_clippy/Cargo.toml b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_clippy/Cargo.toml
new file mode 100644
index 000000000..bdb7f261d
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_clippy/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "fail-clippy"
+version = "0.1.0"
+publish = false
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_clippy/clippy.toml b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_clippy/clippy.toml
new file mode 100644
index 000000000..ddbdbc1fa
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_clippy/clippy.toml
@@ -0,0 +1 @@
+msrv = "1.58"
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_clippy/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_clippy/src/main.rs
new file mode 100644
index 000000000..5b91d5508
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_clippy/src/main.rs
@@ -0,0 +1,11 @@
+#![deny(clippy::use_self)]
+
+pub struct Foo;
+
+impl Foo {
+ pub fn bar() -> Foo {
+ Foo
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_clippy/src/main.stderr b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_clippy/src/main.stderr
new file mode 100644
index 000000000..a280e1bac
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_clippy/src/main.stderr
@@ -0,0 +1,14 @@
+error: unnecessary structure name repetition
+ --> $DIR/main.rs:6:21
+ |
+LL | pub fn bar() -> Foo {
+ | ^^^ help: use the applicable keyword: `Self`
+ |
+note: the lint level is defined here
+ --> $DIR/main.rs:1:9
+ |
+LL | #![deny(clippy::use_self)]
+ | ^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_file_attr/Cargo.toml b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_file_attr/Cargo.toml
new file mode 100644
index 000000000..84448ea41
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_file_attr/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "fail-file-attr"
+version = "0.1.0"
+rust-version = "1.13"
+publish = false
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_file_attr/clippy.toml b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_file_attr/clippy.toml
new file mode 100644
index 000000000..ea5d80659
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_file_attr/clippy.toml
@@ -0,0 +1 @@
+msrv = "1.13.0"
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_file_attr/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_file_attr/src/main.rs
new file mode 100644
index 000000000..bcbffa82a
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_file_attr/src/main.rs
@@ -0,0 +1,16 @@
+// FIXME: this should produce a warning, because the attribute says 1.58 and the cargo.toml file
+// says 1.13
+
+#![feature(custom_inner_attributes)]
+#![clippy::msrv = "1.58.0"]
+#![deny(clippy::use_self)]
+
+pub struct Foo;
+
+impl Foo {
+ pub fn bar() -> Foo {
+ Foo
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_file_attr/src/main.stderr b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_file_attr/src/main.stderr
new file mode 100644
index 000000000..88f6e0092
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/fail_file_attr/src/main.stderr
@@ -0,0 +1,14 @@
+error: unnecessary structure name repetition
+ --> $DIR/main.rs:11:21
+ |
+LL | pub fn bar() -> Foo {
+ | ^^^ help: use the applicable keyword: `Self`
+ |
+note: the lint level is defined here
+ --> $DIR/main.rs:6:9
+ |
+LL | #![deny(clippy::use_self)]
+ | ^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_both_same/Cargo.toml b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_both_same/Cargo.toml
new file mode 100644
index 000000000..809c0e748
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_both_same/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "pass-both-same"
+version = "0.1.0"
+rust-version = "1.13.0"
+publish = false
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_both_same/clippy.toml b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_both_same/clippy.toml
new file mode 100644
index 000000000..5e8e48b63
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_both_same/clippy.toml
@@ -0,0 +1 @@
+msrv = "1.13"
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_both_same/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_both_same/src/main.rs
new file mode 100644
index 000000000..5b91d5508
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_both_same/src/main.rs
@@ -0,0 +1,11 @@
+#![deny(clippy::use_self)]
+
+pub struct Foo;
+
+impl Foo {
+ pub fn bar() -> Foo {
+ Foo
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_cargo/Cargo.toml b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_cargo/Cargo.toml
new file mode 100644
index 000000000..32d404f84
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_cargo/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "pass-cargo"
+version = "0.1.0"
+rust-version = "1.13.0"
+publish = false
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_cargo/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_cargo/src/main.rs
new file mode 100644
index 000000000..5b91d5508
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_cargo/src/main.rs
@@ -0,0 +1,11 @@
+#![deny(clippy::use_self)]
+
+pub struct Foo;
+
+impl Foo {
+ pub fn bar() -> Foo {
+ Foo
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_clippy/Cargo.toml b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_clippy/Cargo.toml
new file mode 100644
index 000000000..cc937d6e6
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_clippy/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "pass-clippy"
+version = "0.1.0"
+publish = false
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_clippy/clippy.toml b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_clippy/clippy.toml
new file mode 100644
index 000000000..5e8e48b63
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_clippy/clippy.toml
@@ -0,0 +1 @@
+msrv = "1.13"
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_clippy/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_clippy/src/main.rs
new file mode 100644
index 000000000..5b91d5508
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_clippy/src/main.rs
@@ -0,0 +1,11 @@
+#![deny(clippy::use_self)]
+
+pub struct Foo;
+
+impl Foo {
+ pub fn bar() -> Foo {
+ Foo
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_file_attr/Cargo.toml b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_file_attr/Cargo.toml
new file mode 100644
index 000000000..8ef689880
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_file_attr/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "pass-file-attr"
+version = "0.1.0"
+rust-version = "1.59"
+publish = false
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_file_attr/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_file_attr/src/main.rs
new file mode 100644
index 000000000..27fe4771d
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/pass_file_attr/src/main.rs
@@ -0,0 +1,13 @@
+#![feature(custom_inner_attributes)]
+#![clippy::msrv = "1.13.0"]
+#![deny(clippy::use_self)]
+
+pub struct Foo;
+
+impl Foo {
+ pub fn bar() -> Foo {
+ Foo
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/warn_both_diff/Cargo.toml b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/warn_both_diff/Cargo.toml
new file mode 100644
index 000000000..e9f94594f
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/warn_both_diff/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "warn-both-diff"
+version = "0.1.0"
+rust-version = "1.56.0"
+publish = false
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/warn_both_diff/clippy.toml b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/warn_both_diff/clippy.toml
new file mode 100644
index 000000000..5e8e48b63
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/warn_both_diff/clippy.toml
@@ -0,0 +1 @@
+msrv = "1.13"
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/warn_both_diff/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/warn_both_diff/src/main.rs
new file mode 100644
index 000000000..5b91d5508
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/warn_both_diff/src/main.rs
@@ -0,0 +1,11 @@
+#![deny(clippy::use_self)]
+
+pub struct Foo;
+
+impl Foo {
+ pub fn bar() -> Foo {
+ Foo
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_rust_version/warn_both_diff/src/main.stderr b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/warn_both_diff/src/main.stderr
new file mode 100644
index 000000000..eeae5b7b2
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/cargo_rust_version/warn_both_diff/src/main.stderr
@@ -0,0 +1,4 @@
+warning: the MSRV in `clippy.toml` and `Cargo.toml` differ; using `1.13.0` from `clippy.toml`
+
+warning: 1 warning emitted
+
diff --git a/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/Cargo.toml b/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/Cargo.toml
new file mode 100644
index 000000000..bf3c817de
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/Cargo.toml
@@ -0,0 +1,5 @@
+[package]
+name = "duplicate_mod"
+edition = "2021"
+publish = false
+version = "0.1.0"
diff --git a/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/a.rs b/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/a.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/a.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/b.rs b/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/b.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/b.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/c.rs b/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/c.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/c.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/d.rs b/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/d.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/d.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/from_other_module.rs b/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/from_other_module.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/from_other_module.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/main.rs b/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/main.rs
new file mode 100644
index 000000000..6478e65ac
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/main.rs
@@ -0,0 +1,28 @@
+#![feature(lint_reasons)]
+
+mod a;
+
+mod b;
+#[path = "b.rs"]
+mod b2;
+
+mod c;
+#[path = "c.rs"]
+mod c2;
+#[path = "c.rs"]
+mod c3;
+
+mod from_other_module;
+mod other_module;
+
+mod d;
+#[path = "d.rs"]
+mod d2;
+#[path = "d.rs"]
+#[expect(clippy::duplicate_mod)]
+mod d3;
+#[path = "d.rs"]
+#[allow(clippy::duplicate_mod)]
+mod d4;
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/main.stderr b/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/main.stderr
new file mode 100644
index 000000000..b450a2b18
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/main.stderr
@@ -0,0 +1,53 @@
+error: file is loaded as a module multiple times: `$DIR/b.rs`
+ --> $DIR/main.rs:5:1
+ |
+LL | mod b;
+ | ^^^^^^ first loaded here
+LL | / #[path = "b.rs"]
+LL | | mod b2;
+ | |_______^ loaded again here
+ |
+ = note: `-D clippy::duplicate-mod` implied by `-D warnings`
+ = help: replace all but one `mod` item with `use` items
+
+error: file is loaded as a module multiple times: `$DIR/c.rs`
+ --> $DIR/main.rs:9:1
+ |
+LL | mod c;
+ | ^^^^^^ first loaded here
+LL | / #[path = "c.rs"]
+LL | | mod c2;
+ | |_______^ loaded again here
+LL | / #[path = "c.rs"]
+LL | | mod c3;
+ | |_______^ loaded again here
+ |
+ = help: replace all but one `mod` item with `use` items
+
+error: file is loaded as a module multiple times: `$DIR/d.rs`
+ --> $DIR/main.rs:18:1
+ |
+LL | mod d;
+ | ^^^^^^ first loaded here
+LL | / #[path = "d.rs"]
+LL | | mod d2;
+ | |_______^ loaded again here
+ |
+ = help: replace all but one `mod` item with `use` items
+
+error: file is loaded as a module multiple times: `$DIR/from_other_module.rs`
+ --> $DIR/main.rs:15:1
+ |
+LL | mod from_other_module;
+ | ^^^^^^^^^^^^^^^^^^^^^^ first loaded here
+ |
+ ::: $DIR/other_module/mod.rs:1:1
+ |
+LL | / #[path = "../from_other_module.rs"]
+LL | | mod m;
+ | |______^ loaded again here
+ |
+ = help: replace all but one `mod` item with `use` items
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/other_module/mod.rs b/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/other_module/mod.rs
new file mode 100644
index 000000000..36ce7286a
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/duplicate_mod/fail/src/other_module/mod.rs
@@ -0,0 +1,2 @@
+#[path = "../from_other_module.rs"]
+mod m;
diff --git a/src/tools/clippy/tests/ui-cargo/feature_name/fail/Cargo.toml b/src/tools/clippy/tests/ui-cargo/feature_name/fail/Cargo.toml
new file mode 100644
index 000000000..97d51462a
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/feature_name/fail/Cargo.toml
@@ -0,0 +1,21 @@
+
+# Content that triggers the lint goes here
+
+[package]
+name = "feature_name"
+version = "0.1.0"
+publish = false
+
+[workspace]
+
+[features]
+use-qwq = []
+use_qwq = []
+with-owo = []
+with_owo = []
+qvq-support = []
+qvq_support = []
+no-qaq = []
+no_qaq = []
+not-orz = []
+not_orz = []
diff --git a/src/tools/clippy/tests/ui-cargo/feature_name/fail/src/main.rs b/src/tools/clippy/tests/ui-cargo/feature_name/fail/src/main.rs
new file mode 100644
index 000000000..64f01a98c
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/feature_name/fail/src/main.rs
@@ -0,0 +1,7 @@
+// compile-flags: --crate-name=feature_name
+#![warn(clippy::redundant_feature_names)]
+#![warn(clippy::negative_feature_names)]
+
+fn main() {
+ // test code goes here
+}
diff --git a/src/tools/clippy/tests/ui-cargo/feature_name/fail/src/main.stderr b/src/tools/clippy/tests/ui-cargo/feature_name/fail/src/main.stderr
new file mode 100644
index 000000000..b9e6cb49b
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/feature_name/fail/src/main.stderr
@@ -0,0 +1,44 @@
+error: the "no-" prefix in the feature name "no-qaq" is negative
+ |
+ = note: `-D clippy::negative-feature-names` implied by `-D warnings`
+ = help: consider renaming the feature to "qaq", but make sure the feature adds functionality
+
+error: the "no_" prefix in the feature name "no_qaq" is negative
+ |
+ = help: consider renaming the feature to "qaq", but make sure the feature adds functionality
+
+error: the "not-" prefix in the feature name "not-orz" is negative
+ |
+ = help: consider renaming the feature to "orz", but make sure the feature adds functionality
+
+error: the "not_" prefix in the feature name "not_orz" is negative
+ |
+ = help: consider renaming the feature to "orz", but make sure the feature adds functionality
+
+error: the "-support" suffix in the feature name "qvq-support" is redundant
+ |
+ = note: `-D clippy::redundant-feature-names` implied by `-D warnings`
+ = help: consider renaming the feature to "qvq"
+
+error: the "_support" suffix in the feature name "qvq_support" is redundant
+ |
+ = help: consider renaming the feature to "qvq"
+
+error: the "use-" prefix in the feature name "use-qwq" is redundant
+ |
+ = help: consider renaming the feature to "qwq"
+
+error: the "use_" prefix in the feature name "use_qwq" is redundant
+ |
+ = help: consider renaming the feature to "qwq"
+
+error: the "with-" prefix in the feature name "with-owo" is redundant
+ |
+ = help: consider renaming the feature to "owo"
+
+error: the "with_" prefix in the feature name "with_owo" is redundant
+ |
+ = help: consider renaming the feature to "owo"
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui-cargo/feature_name/pass/Cargo.toml b/src/tools/clippy/tests/ui-cargo/feature_name/pass/Cargo.toml
new file mode 100644
index 000000000..cf947312b
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/feature_name/pass/Cargo.toml
@@ -0,0 +1,9 @@
+
+# This file should not trigger the lint
+
+[package]
+name = "feature_name"
+version = "0.1.0"
+publish = false
+
+[workspace]
diff --git a/src/tools/clippy/tests/ui-cargo/feature_name/pass/src/main.rs b/src/tools/clippy/tests/ui-cargo/feature_name/pass/src/main.rs
new file mode 100644
index 000000000..64f01a98c
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/feature_name/pass/src/main.rs
@@ -0,0 +1,7 @@
+// compile-flags: --crate-name=feature_name
+#![warn(clippy::redundant_feature_names)]
+#![warn(clippy::negative_feature_names)]
+
+fn main() {
+ // test code goes here
+}
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/Cargo.toml b/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/Cargo.toml
new file mode 100644
index 000000000..b3d36a9fb
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "fail-mod"
+version = "0.1.0"
+edition = "2018"
+publish = false
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/bad/inner.rs b/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/bad/inner.rs
new file mode 100644
index 000000000..91cd540a2
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/bad/inner.rs
@@ -0,0 +1 @@
+pub mod stuff;
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/bad/inner/stuff.rs b/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/bad/inner/stuff.rs
new file mode 100644
index 000000000..7713fa9d3
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/bad/inner/stuff.rs
@@ -0,0 +1,3 @@
+pub mod most;
+
+pub struct Inner;
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/bad/inner/stuff/most.rs b/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/bad/inner/stuff/most.rs
new file mode 100644
index 000000000..5a5eaf967
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/bad/inner/stuff/most.rs
@@ -0,0 +1 @@
+pub struct Snarks;
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/bad/mod.rs b/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/bad/mod.rs
new file mode 100644
index 000000000..a12734db7
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/bad/mod.rs
@@ -0,0 +1,3 @@
+pub mod inner;
+
+pub struct Thing;
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/main.rs b/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/main.rs
new file mode 100644
index 000000000..3e985d4e9
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/main.rs
@@ -0,0 +1,9 @@
+#![warn(clippy::self_named_module_files)]
+
+mod bad;
+
+fn main() {
+ let _ = bad::Thing;
+ let _ = bad::inner::stuff::Inner;
+ let _ = bad::inner::stuff::most::Snarks;
+}
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/main.stderr b/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/main.stderr
new file mode 100644
index 000000000..e2010e998
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/module_style/fail_mod/src/main.stderr
@@ -0,0 +1,19 @@
+error: `mod.rs` files are required, found `bad/inner.rs`
+ --> $DIR/bad/inner.rs:1:1
+ |
+LL | pub mod stuff;
+ | ^
+ |
+ = note: `-D clippy::self-named-module-files` implied by `-D warnings`
+ = help: move `bad/inner.rs` to `bad/inner/mod.rs`
+
+error: `mod.rs` files are required, found `bad/inner/stuff.rs`
+ --> $DIR/bad/inner/stuff.rs:1:1
+ |
+LL | pub mod most;
+ | ^
+ |
+ = help: move `bad/inner/stuff.rs` to `bad/inner/stuff/mod.rs`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/Cargo.toml b/src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/Cargo.toml
new file mode 100644
index 000000000..3610d13c1
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "fail-no-mod"
+version = "0.1.0"
+edition = "2018"
+publish = false
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/src/bad/mod.rs b/src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/src/bad/mod.rs
new file mode 100644
index 000000000..f19ab10d5
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/src/bad/mod.rs
@@ -0,0 +1 @@
+pub struct Thing;
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/src/main.rs b/src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/src/main.rs
new file mode 100644
index 000000000..c6e9045b8
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/src/main.rs
@@ -0,0 +1,7 @@
+#![warn(clippy::mod_module_files)]
+
+mod bad;
+
+fn main() {
+ let _ = bad::Thing;
+}
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/src/main.stderr b/src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/src/main.stderr
new file mode 100644
index 000000000..f91940209
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/module_style/fail_no_mod/src/main.stderr
@@ -0,0 +1,11 @@
+error: `mod.rs` files are not allowed, found `bad/mod.rs`
+ --> $DIR/bad/mod.rs:1:1
+ |
+LL | pub struct Thing;
+ | ^
+ |
+ = note: `-D clippy::mod-module-files` implied by `-D warnings`
+ = help: move `bad/mod.rs` to `bad.rs`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/pass_mod/Cargo.toml b/src/tools/clippy/tests/ui-cargo/module_style/pass_mod/Cargo.toml
new file mode 100644
index 000000000..1c2991695
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/module_style/pass_mod/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "pass-mod"
+version = "0.1.0"
+edition = "2018"
+publish = false
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/pass_mod/src/bad/mod.rs b/src/tools/clippy/tests/ui-cargo/module_style/pass_mod/src/bad/mod.rs
new file mode 100644
index 000000000..f19ab10d5
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/module_style/pass_mod/src/bad/mod.rs
@@ -0,0 +1 @@
+pub struct Thing;
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/pass_mod/src/main.rs b/src/tools/clippy/tests/ui-cargo/module_style/pass_mod/src/main.rs
new file mode 100644
index 000000000..9e08715fc
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/module_style/pass_mod/src/main.rs
@@ -0,0 +1,10 @@
+#![warn(clippy::self_named_module_files)]
+
+mod bad;
+mod more;
+
+fn main() {
+ let _ = bad::Thing;
+ let _ = more::foo::Foo;
+ let _ = more::inner::Inner;
+}
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/pass_mod/src/more/foo.rs b/src/tools/clippy/tests/ui-cargo/module_style/pass_mod/src/more/foo.rs
new file mode 100644
index 000000000..4a835673a
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/module_style/pass_mod/src/more/foo.rs
@@ -0,0 +1 @@
+pub struct Foo;
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/pass_mod/src/more/inner/mod.rs b/src/tools/clippy/tests/ui-cargo/module_style/pass_mod/src/more/inner/mod.rs
new file mode 100644
index 000000000..aa84f78cc
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/module_style/pass_mod/src/more/inner/mod.rs
@@ -0,0 +1 @@
+pub struct Inner;
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/pass_mod/src/more/mod.rs b/src/tools/clippy/tests/ui-cargo/module_style/pass_mod/src/more/mod.rs
new file mode 100644
index 000000000..d79569f78
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/module_style/pass_mod/src/more/mod.rs
@@ -0,0 +1,2 @@
+pub mod foo;
+pub mod inner;
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/pass_no_mod/Cargo.toml b/src/tools/clippy/tests/ui-cargo/module_style/pass_no_mod/Cargo.toml
new file mode 100644
index 000000000..4180aaf51
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/module_style/pass_no_mod/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "pass-no-mod"
+version = "0.1.0"
+edition = "2018"
+publish = false
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/pass_no_mod/src/good.rs b/src/tools/clippy/tests/ui-cargo/module_style/pass_no_mod/src/good.rs
new file mode 100644
index 000000000..f19ab10d5
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/module_style/pass_no_mod/src/good.rs
@@ -0,0 +1 @@
+pub struct Thing;
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/pass_no_mod/src/main.rs b/src/tools/clippy/tests/ui-cargo/module_style/pass_no_mod/src/main.rs
new file mode 100644
index 000000000..50211a340
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/module_style/pass_no_mod/src/main.rs
@@ -0,0 +1,7 @@
+#![warn(clippy::mod_module_files)]
+
+mod good;
+
+fn main() {
+ let _ = good::Thing;
+}
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_config_files/no_warn/Cargo.toml b/src/tools/clippy/tests/ui-cargo/multiple_config_files/no_warn/Cargo.toml
new file mode 100644
index 000000000..7eb56cc4e
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/multiple_config_files/no_warn/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "no_warn"
+version = "0.1.0"
+edition = "2021"
+publish = false
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_config_files/no_warn/clippy.toml b/src/tools/clippy/tests/ui-cargo/multiple_config_files/no_warn/clippy.toml
new file mode 100644
index 000000000..cda8d17ee
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/multiple_config_files/no_warn/clippy.toml
@@ -0,0 +1 @@
+avoid-breaking-exported-api = false
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_config_files/no_warn/src/main.rs b/src/tools/clippy/tests/ui-cargo/multiple_config_files/no_warn/src/main.rs
new file mode 100644
index 000000000..e7a11a969
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/multiple_config_files/no_warn/src/main.rs
@@ -0,0 +1,3 @@
+fn main() {
+ println!("Hello, world!");
+}
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/.clippy.toml b/src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/.clippy.toml
new file mode 100644
index 000000000..cda8d17ee
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/.clippy.toml
@@ -0,0 +1 @@
+avoid-breaking-exported-api = false
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/Cargo.toml b/src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/Cargo.toml
new file mode 100644
index 000000000..b4847d070
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "warn"
+version = "0.1.0"
+edition = "2021"
+publish = false
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/clippy.toml b/src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/clippy.toml
new file mode 100644
index 000000000..cda8d17ee
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/clippy.toml
@@ -0,0 +1 @@
+avoid-breaking-exported-api = false
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/src/main.rs b/src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/src/main.rs
new file mode 100644
index 000000000..e7a11a969
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/src/main.rs
@@ -0,0 +1,3 @@
+fn main() {
+ println!("Hello, world!");
+}
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/src/main.stderr b/src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/src/main.stderr
new file mode 100644
index 000000000..98697e001
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/multiple_config_files/warn/src/main.stderr
@@ -0,0 +1,2 @@
+Using config file `$SRC_DIR/.clippy.toml`
+Warning: `$SRC_DIR/clippy.toml` will be ignored.
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/5041_allow_dev_build/Cargo.toml b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/5041_allow_dev_build/Cargo.toml
new file mode 100644
index 000000000..278bebbbd
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/5041_allow_dev_build/Cargo.toml
@@ -0,0 +1,19 @@
+# Should not lint for dev or build dependencies. See issue 5041.
+
+[package]
+name = "multiple_crate_versions"
+version = "0.1.0"
+publish = false
+
+[workspace]
+
+# One of the versions of winapi is only a dev dependency: allowed
+[dependencies]
+ctrlc = "=3.1.0"
+[dev-dependencies]
+ansi_term = "=0.11.0"
+
+# Both versions of winapi are a build dependency: allowed
+[build-dependencies]
+ctrlc = "=3.1.0"
+ansi_term = "=0.11.0"
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/5041_allow_dev_build/src/main.rs b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/5041_allow_dev_build/src/main.rs
new file mode 100644
index 000000000..1b2d3ec94
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/5041_allow_dev_build/src/main.rs
@@ -0,0 +1,4 @@
+// compile-flags: --crate-name=multiple_crate_versions
+#![warn(clippy::multiple_crate_versions)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/Cargo.lock b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/Cargo.lock
new file mode 100644
index 000000000..7e96aa36f
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/Cargo.lock
@@ -0,0 +1,109 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+[[package]]
+name = "ansi_term"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
+dependencies = [
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "bitflags"
+version = "0.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4efd02e230a02e18f92fc2735f44597385ed02ad8f831e7c1c1156ee5e1ab3a5"
+
+[[package]]
+name = "cfg-if"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
+
+[[package]]
+name = "ctrlc"
+version = "3.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "653abc99aa905f693d89df4797fadc08085baee379db92be9f2496cefe8a6f2c"
+dependencies = [
+ "kernel32-sys",
+ "nix",
+ "winapi 0.2.8",
+]
+
+[[package]]
+name = "kernel32-sys"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
+dependencies = [
+ "winapi 0.2.8",
+ "winapi-build",
+]
+
+[[package]]
+name = "libc"
+version = "0.2.71"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9457b06509d27052635f90d6466700c65095fdf75409b3fbdd903e988b886f49"
+
+[[package]]
+name = "multiple_crate_versions"
+version = "0.1.0"
+dependencies = [
+ "ansi_term",
+ "ctrlc",
+]
+
+[[package]]
+name = "nix"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a2c5afeb0198ec7be8569d666644b574345aad2e95a53baf3a532da3e0f3fb32"
+dependencies = [
+ "bitflags",
+ "cfg-if",
+ "libc",
+ "void",
+]
+
+[[package]]
+name = "void"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
+
+[[package]]
+name = "winapi"
+version = "0.2.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-build"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/Cargo.toml b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/Cargo.toml
new file mode 100644
index 000000000..4f97b0113
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/Cargo.toml
@@ -0,0 +1,10 @@
+[package]
+name = "multiple_crate_versions"
+version = "0.1.0"
+publish = false
+
+[workspace]
+
+[dependencies]
+ctrlc = "=3.1.0"
+ansi_term = "=0.11.0"
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/src/main.rs b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/src/main.rs
new file mode 100644
index 000000000..1b2d3ec94
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/src/main.rs
@@ -0,0 +1,4 @@
+// compile-flags: --crate-name=multiple_crate_versions
+#![warn(clippy::multiple_crate_versions)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/src/main.stderr b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/src/main.stderr
new file mode 100644
index 000000000..f3113e093
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/src/main.stderr
@@ -0,0 +1,6 @@
+error: multiple versions for dependency `winapi`: 0.2.8, 0.3.9
+ |
+ = note: `-D clippy::multiple-crate-versions` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/pass/Cargo.toml b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/pass/Cargo.toml
new file mode 100644
index 000000000..6c46571c5
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/pass/Cargo.toml
@@ -0,0 +1,10 @@
+[package]
+name = "multiple_crate_versions"
+version = "0.1.0"
+publish = false
+
+[workspace]
+
+[dependencies]
+regex = "1.3.7"
+serde = "1.0.110"
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/pass/src/main.rs b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/pass/src/main.rs
new file mode 100644
index 000000000..1b2d3ec94
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/pass/src/main.rs
@@ -0,0 +1,4 @@
+// compile-flags: --crate-name=multiple_crate_versions
+#![warn(clippy::multiple_crate_versions)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/update-all-references.sh b/src/tools/clippy/tests/ui-cargo/update-all-references.sh
new file mode 100755
index 000000000..4391499a1
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/update-all-references.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+echo "Please use 'cargo dev bless' instead."
diff --git a/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/Cargo.toml b/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/Cargo.toml
new file mode 100644
index 000000000..3e1a02cbb
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "wildcard_dependencies"
+version = "0.1.0"
+publish = false
+
+[workspace]
+
+[dependencies]
+regex = "*"
diff --git a/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/src/main.rs b/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/src/main.rs
new file mode 100644
index 000000000..581babfea
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/src/main.rs
@@ -0,0 +1,4 @@
+// compile-flags: --crate-name=wildcard_dependencies
+#![warn(clippy::wildcard_dependencies)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/src/main.stderr b/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/src/main.stderr
new file mode 100644
index 000000000..9e65d2f99
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/src/main.stderr
@@ -0,0 +1,6 @@
+error: wildcard dependency for `regex`
+ |
+ = note: `-D clippy::wildcard-dependencies` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/pass/Cargo.toml b/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/pass/Cargo.toml
new file mode 100644
index 000000000..f844cab09
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/pass/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "wildcard_dependencies"
+version = "0.1.0"
+publish = false
+
+[workspace]
+
+[dependencies]
+regex = "1"
diff --git a/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/pass/src/main.rs b/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/pass/src/main.rs
new file mode 100644
index 000000000..581babfea
--- /dev/null
+++ b/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/pass/src/main.rs
@@ -0,0 +1,4 @@
+// compile-flags: --crate-name=wildcard_dependencies
+#![warn(clippy::wildcard_dependencies)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-internal/check_clippy_version_attribute.rs b/src/tools/clippy/tests/ui-internal/check_clippy_version_attribute.rs
new file mode 100644
index 000000000..31acac89c
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/check_clippy_version_attribute.rs
@@ -0,0 +1,87 @@
+#![deny(clippy::internal)]
+#![feature(rustc_private)]
+
+#[macro_use]
+extern crate rustc_middle;
+#[macro_use]
+extern crate rustc_session;
+extern crate rustc_lint;
+
+///////////////////////
+// Valid descriptions
+///////////////////////
+declare_tool_lint! {
+ #[clippy::version = "pre 1.29.0"]
+ pub clippy::VALID_ONE,
+ Warn,
+ "One",
+ report_in_external_macro: true
+}
+
+declare_tool_lint! {
+ #[clippy::version = "1.29.0"]
+ pub clippy::VALID_TWO,
+ Warn,
+ "Two",
+ report_in_external_macro: true
+}
+
+declare_tool_lint! {
+ #[clippy::version = "1.59.0"]
+ pub clippy::VALID_THREE,
+ Warn,
+ "Three",
+ report_in_external_macro: true
+}
+
+///////////////////////
+// Invalid attributes
+///////////////////////
+declare_tool_lint! {
+ #[clippy::version = "1.2.3.4.5.6"]
+ pub clippy::INVALID_ONE,
+ Warn,
+ "One",
+ report_in_external_macro: true
+}
+
+declare_tool_lint! {
+ #[clippy::version = "I'm a string"]
+ pub clippy::INVALID_TWO,
+ Warn,
+ "Two",
+ report_in_external_macro: true
+}
+
+///////////////////////
+// Missing attribute test
+///////////////////////
+declare_tool_lint! {
+ #[clippy::version]
+ pub clippy::MISSING_ATTRIBUTE_ONE,
+ Warn,
+ "Two",
+ report_in_external_macro: true
+}
+
+declare_tool_lint! {
+ pub clippy::MISSING_ATTRIBUTE_TWO,
+ Warn,
+ "Two",
+ report_in_external_macro: true
+}
+
+#[allow(clippy::missing_clippy_version_attribute)]
+mod internal_clippy_lints {
+ declare_tool_lint! {
+ pub clippy::ALLOW_MISSING_ATTRIBUTE_ONE,
+ Warn,
+ "Two",
+ report_in_external_macro: true
+ }
+}
+
+use crate::internal_clippy_lints::ALLOW_MISSING_ATTRIBUTE_ONE;
+declare_lint_pass!(Pass2 => [VALID_ONE, VALID_TWO, VALID_THREE, INVALID_ONE, INVALID_TWO, MISSING_ATTRIBUTE_ONE, MISSING_ATTRIBUTE_TWO, ALLOW_MISSING_ATTRIBUTE_ONE]);
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-internal/check_clippy_version_attribute.stderr b/src/tools/clippy/tests/ui-internal/check_clippy_version_attribute.stderr
new file mode 100644
index 000000000..533107588
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/check_clippy_version_attribute.stderr
@@ -0,0 +1,68 @@
+error: this item has an invalid `clippy::version` attribute
+ --> $DIR/check_clippy_version_attribute.rs:40:1
+ |
+LL | / declare_tool_lint! {
+LL | | #[clippy::version = "1.2.3.4.5.6"]
+LL | | pub clippy::INVALID_ONE,
+LL | | Warn,
+LL | | "One",
+LL | | report_in_external_macro: true
+LL | | }
+ | |_^
+ |
+note: the lint level is defined here
+ --> $DIR/check_clippy_version_attribute.rs:1:9
+ |
+LL | #![deny(clippy::internal)]
+ | ^^^^^^^^^^^^^^^^
+ = note: `#[deny(clippy::invalid_clippy_version_attribute)]` implied by `#[deny(clippy::internal)]`
+ = help: please use a valid sematic version, see `doc/adding_lints.md`
+ = note: this error originates in the macro `$crate::declare_tool_lint` which comes from the expansion of the macro `declare_tool_lint` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: this item has an invalid `clippy::version` attribute
+ --> $DIR/check_clippy_version_attribute.rs:48:1
+ |
+LL | / declare_tool_lint! {
+LL | | #[clippy::version = "I'm a string"]
+LL | | pub clippy::INVALID_TWO,
+LL | | Warn,
+LL | | "Two",
+LL | | report_in_external_macro: true
+LL | | }
+ | |_^
+ |
+ = help: please use a valid sematic version, see `doc/adding_lints.md`
+ = note: this error originates in the macro `$crate::declare_tool_lint` which comes from the expansion of the macro `declare_tool_lint` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: this lint is missing the `clippy::version` attribute or version value
+ --> $DIR/check_clippy_version_attribute.rs:59:1
+ |
+LL | / declare_tool_lint! {
+LL | | #[clippy::version]
+LL | | pub clippy::MISSING_ATTRIBUTE_ONE,
+LL | | Warn,
+LL | | "Two",
+LL | | report_in_external_macro: true
+LL | | }
+ | |_^
+ |
+ = note: `#[deny(clippy::missing_clippy_version_attribute)]` implied by `#[deny(clippy::internal)]`
+ = help: please use a `clippy::version` attribute, see `doc/adding_lints.md`
+ = note: this error originates in the macro `$crate::declare_tool_lint` which comes from the expansion of the macro `declare_tool_lint` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: this lint is missing the `clippy::version` attribute or version value
+ --> $DIR/check_clippy_version_attribute.rs:67:1
+ |
+LL | / declare_tool_lint! {
+LL | | pub clippy::MISSING_ATTRIBUTE_TWO,
+LL | | Warn,
+LL | | "Two",
+LL | | report_in_external_macro: true
+LL | | }
+ | |_^
+ |
+ = help: please use a `clippy::version` attribute, see `doc/adding_lints.md`
+ = note: this error originates in the macro `$crate::declare_tool_lint` which comes from the expansion of the macro `declare_tool_lint` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.fixed b/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.fixed
new file mode 100644
index 000000000..9f299d7de
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.fixed
@@ -0,0 +1,57 @@
+// run-rustfix
+#![deny(clippy::internal)]
+#![allow(clippy::missing_clippy_version_attribute)]
+#![feature(rustc_private)]
+
+extern crate clippy_utils;
+extern crate rustc_ast;
+extern crate rustc_errors;
+extern crate rustc_lint;
+extern crate rustc_session;
+extern crate rustc_span;
+
+use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_note, span_lint_and_sugg, span_lint_and_then};
+use rustc_ast::ast::Expr;
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_tool_lint! {
+ pub clippy::TEST_LINT,
+ Warn,
+ "",
+ report_in_external_macro: true
+}
+
+declare_lint_pass!(Pass => [TEST_LINT]);
+
+impl EarlyLintPass for Pass {
+ fn check_expr(&mut self, cx: &EarlyContext, expr: &Expr) {
+ let lint_msg = "lint message";
+ let help_msg = "help message";
+ let note_msg = "note message";
+ let sugg = "new_call()";
+ let predicate = true;
+
+ span_lint_and_sugg(cx, TEST_LINT, expr.span, lint_msg, help_msg, sugg.to_string(), Applicability::MachineApplicable);
+ span_lint_and_help(cx, TEST_LINT, expr.span, lint_msg, Some(expr.span), help_msg);
+ span_lint_and_help(cx, TEST_LINT, expr.span, lint_msg, None, help_msg);
+ span_lint_and_note(cx, TEST_LINT, expr.span, lint_msg, Some(expr.span), note_msg);
+ span_lint_and_note(cx, TEST_LINT, expr.span, lint_msg, None, note_msg);
+
+ // This expr shouldn't trigger this lint.
+ span_lint_and_then(cx, TEST_LINT, expr.span, lint_msg, |db| {
+ db.note(note_msg);
+ if predicate {
+ db.note(note_msg);
+ }
+ });
+
+ // Issue #8798
+ span_lint_and_then(cx, TEST_LINT, expr.span, lint_msg, |db| {
+ db.help(help_msg).help(help_msg);
+ });
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.rs b/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.rs
new file mode 100644
index 000000000..2b113f555
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.rs
@@ -0,0 +1,67 @@
+// run-rustfix
+#![deny(clippy::internal)]
+#![allow(clippy::missing_clippy_version_attribute)]
+#![feature(rustc_private)]
+
+extern crate clippy_utils;
+extern crate rustc_ast;
+extern crate rustc_errors;
+extern crate rustc_lint;
+extern crate rustc_session;
+extern crate rustc_span;
+
+use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_note, span_lint_and_sugg, span_lint_and_then};
+use rustc_ast::ast::Expr;
+use rustc_errors::Applicability;
+use rustc_lint::{EarlyContext, EarlyLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_tool_lint! {
+ pub clippy::TEST_LINT,
+ Warn,
+ "",
+ report_in_external_macro: true
+}
+
+declare_lint_pass!(Pass => [TEST_LINT]);
+
+impl EarlyLintPass for Pass {
+ fn check_expr(&mut self, cx: &EarlyContext, expr: &Expr) {
+ let lint_msg = "lint message";
+ let help_msg = "help message";
+ let note_msg = "note message";
+ let sugg = "new_call()";
+ let predicate = true;
+
+ span_lint_and_then(cx, TEST_LINT, expr.span, lint_msg, |db| {
+ db.span_suggestion(expr.span, help_msg, sugg.to_string(), Applicability::MachineApplicable);
+ });
+ span_lint_and_then(cx, TEST_LINT, expr.span, lint_msg, |db| {
+ db.span_help(expr.span, help_msg);
+ });
+ span_lint_and_then(cx, TEST_LINT, expr.span, lint_msg, |db| {
+ db.help(help_msg);
+ });
+ span_lint_and_then(cx, TEST_LINT, expr.span, lint_msg, |db| {
+ db.span_note(expr.span, note_msg);
+ });
+ span_lint_and_then(cx, TEST_LINT, expr.span, lint_msg, |db| {
+ db.note(note_msg);
+ });
+
+ // This expr shouldn't trigger this lint.
+ span_lint_and_then(cx, TEST_LINT, expr.span, lint_msg, |db| {
+ db.note(note_msg);
+ if predicate {
+ db.note(note_msg);
+ }
+ });
+
+ // Issue #8798
+ span_lint_and_then(cx, TEST_LINT, expr.span, lint_msg, |db| {
+ db.help(help_msg).help(help_msg);
+ });
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.stderr b/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.stderr
new file mode 100644
index 000000000..0852fe65a
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.stderr
@@ -0,0 +1,49 @@
+error: this call is collapsible
+ --> $DIR/collapsible_span_lint_calls.rs:36:9
+ |
+LL | / span_lint_and_then(cx, TEST_LINT, expr.span, lint_msg, |db| {
+LL | | db.span_suggestion(expr.span, help_msg, sugg.to_string(), Applicability::MachineApplicable);
+LL | | });
+ | |__________^ help: collapse into: `span_lint_and_sugg(cx, TEST_LINT, expr.span, lint_msg, help_msg, sugg.to_string(), Applicability::MachineApplicable)`
+ |
+note: the lint level is defined here
+ --> $DIR/collapsible_span_lint_calls.rs:2:9
+ |
+LL | #![deny(clippy::internal)]
+ | ^^^^^^^^^^^^^^^^
+ = note: `#[deny(clippy::collapsible_span_lint_calls)]` implied by `#[deny(clippy::internal)]`
+
+error: this call is collapsible
+ --> $DIR/collapsible_span_lint_calls.rs:39:9
+ |
+LL | / span_lint_and_then(cx, TEST_LINT, expr.span, lint_msg, |db| {
+LL | | db.span_help(expr.span, help_msg);
+LL | | });
+ | |__________^ help: collapse into: `span_lint_and_help(cx, TEST_LINT, expr.span, lint_msg, Some(expr.span), help_msg)`
+
+error: this call is collapsible
+ --> $DIR/collapsible_span_lint_calls.rs:42:9
+ |
+LL | / span_lint_and_then(cx, TEST_LINT, expr.span, lint_msg, |db| {
+LL | | db.help(help_msg);
+LL | | });
+ | |__________^ help: collapse into: `span_lint_and_help(cx, TEST_LINT, expr.span, lint_msg, None, help_msg)`
+
+error: this call is collapsible
+ --> $DIR/collapsible_span_lint_calls.rs:45:9
+ |
+LL | / span_lint_and_then(cx, TEST_LINT, expr.span, lint_msg, |db| {
+LL | | db.span_note(expr.span, note_msg);
+LL | | });
+ | |__________^ help: collapse into: `span_lint_and_note(cx, TEST_LINT, expr.span, lint_msg, Some(expr.span), note_msg)`
+
+error: this call is collapsible
+ --> $DIR/collapsible_span_lint_calls.rs:48:9
+ |
+LL | / span_lint_and_then(cx, TEST_LINT, expr.span, lint_msg, |db| {
+LL | | db.note(note_msg);
+LL | | });
+ | |__________^ help: collapse into: `span_lint_and_note(cx, TEST_LINT, expr.span, lint_msg, None, note_msg)`
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui-internal/custom_ice_message.rs b/src/tools/clippy/tests/ui-internal/custom_ice_message.rs
new file mode 100644
index 000000000..5057a0183
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/custom_ice_message.rs
@@ -0,0 +1,11 @@
+// rustc-env:RUST_BACKTRACE=0
+// normalize-stderr-test: "Clippy version: .*" -> "Clippy version: foo"
+// normalize-stderr-test: "internal_lints.rs:\d*:\d*" -> "internal_lints.rs"
+// normalize-stderr-test: "', .*clippy_lints" -> "', clippy_lints"
+
+#![deny(clippy::internal)]
+#![allow(clippy::missing_clippy_version_attribute)]
+
+fn it_looks_like_you_are_trying_to_kill_clippy() {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-internal/custom_ice_message.stderr b/src/tools/clippy/tests/ui-internal/custom_ice_message.stderr
new file mode 100644
index 000000000..a1b8e2ee1
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/custom_ice_message.stderr
@@ -0,0 +1,13 @@
+thread 'rustc' panicked at 'Would you like some help with that?', clippy_lints/src/utils/internal_lints.rs
+note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
+
+error: internal compiler error: unexpected panic
+
+note: the compiler unexpectedly panicked. this is a bug.
+
+note: we would appreciate a bug report: https://github.com/rust-lang/rust-clippy/issues/new
+
+note: Clippy version: foo
+
+query stack during panic:
+end of query stack
diff --git a/src/tools/clippy/tests/ui-internal/default_deprecation_reason.rs b/src/tools/clippy/tests/ui-internal/default_deprecation_reason.rs
new file mode 100644
index 000000000..c8961d5e1
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/default_deprecation_reason.rs
@@ -0,0 +1,30 @@
+#![deny(clippy::internal)]
+#![feature(rustc_private)]
+
+#[macro_use]
+extern crate clippy_lints;
+use clippy_lints::deprecated_lints::ClippyDeprecatedLint;
+
+declare_deprecated_lint! {
+ /// ### What it does
+ /// Nothing. This lint has been deprecated.
+ ///
+ /// ### Deprecation reason
+ /// TODO
+ #[clippy::version = "1.63.0"]
+ pub COOL_LINT_DEFAULT,
+ "default deprecation note"
+}
+
+declare_deprecated_lint! {
+ /// ### What it does
+ /// Nothing. This lint has been deprecated.
+ ///
+ /// ### Deprecation reason
+ /// This lint has been replaced by `cooler_lint`
+ #[clippy::version = "1.63.0"]
+ pub COOL_LINT,
+ "this lint has been replaced by `cooler_lint`"
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-internal/default_deprecation_reason.stderr b/src/tools/clippy/tests/ui-internal/default_deprecation_reason.stderr
new file mode 100644
index 000000000..ca26b649f
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/default_deprecation_reason.stderr
@@ -0,0 +1,22 @@
+error: the lint `COOL_LINT_DEFAULT` has the default deprecation reason
+ --> $DIR/default_deprecation_reason.rs:8:1
+ |
+LL | / declare_deprecated_lint! {
+LL | | /// ### What it does
+LL | | /// Nothing. This lint has been deprecated.
+LL | | ///
+... |
+LL | | "default deprecation note"
+LL | | }
+ | |_^
+ |
+note: the lint level is defined here
+ --> $DIR/default_deprecation_reason.rs:1:9
+ |
+LL | #![deny(clippy::internal)]
+ | ^^^^^^^^^^^^^^^^
+ = note: `#[deny(clippy::default_deprecation_reason)]` implied by `#[deny(clippy::internal)]`
+ = note: this error originates in the macro `declare_deprecated_lint` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-internal/default_lint.rs b/src/tools/clippy/tests/ui-internal/default_lint.rs
new file mode 100644
index 000000000..da29aedb2
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/default_lint.rs
@@ -0,0 +1,28 @@
+#![deny(clippy::internal)]
+#![allow(clippy::missing_clippy_version_attribute)]
+#![feature(rustc_private)]
+
+#[macro_use]
+extern crate rustc_middle;
+#[macro_use]
+extern crate rustc_session;
+extern crate rustc_lint;
+
+declare_tool_lint! {
+ pub clippy::TEST_LINT,
+ Warn,
+ "",
+ report_in_external_macro: true
+}
+
+declare_tool_lint! {
+ pub clippy::TEST_LINT_DEFAULT,
+ Warn,
+ "default lint description",
+ report_in_external_macro: true
+}
+
+declare_lint_pass!(Pass => [TEST_LINT]);
+declare_lint_pass!(Pass2 => [TEST_LINT_DEFAULT]);
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-internal/default_lint.stderr b/src/tools/clippy/tests/ui-internal/default_lint.stderr
new file mode 100644
index 000000000..8961bd462
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/default_lint.stderr
@@ -0,0 +1,21 @@
+error: the lint `TEST_LINT_DEFAULT` has the default lint description
+ --> $DIR/default_lint.rs:18:1
+ |
+LL | / declare_tool_lint! {
+LL | | pub clippy::TEST_LINT_DEFAULT,
+LL | | Warn,
+LL | | "default lint description",
+LL | | report_in_external_macro: true
+LL | | }
+ | |_^
+ |
+note: the lint level is defined here
+ --> $DIR/default_lint.rs:1:9
+ |
+LL | #![deny(clippy::internal)]
+ | ^^^^^^^^^^^^^^^^
+ = note: `#[deny(clippy::default_lint)]` implied by `#[deny(clippy::internal)]`
+ = note: this error originates in the macro `$crate::declare_tool_lint` which comes from the expansion of the macro `declare_tool_lint` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-internal/if_chain_style.rs b/src/tools/clippy/tests/ui-internal/if_chain_style.rs
new file mode 100644
index 000000000..b0d89e038
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/if_chain_style.rs
@@ -0,0 +1,92 @@
+#![warn(clippy::if_chain_style)]
+#![allow(clippy::no_effect, clippy::nonminimal_bool, clippy::missing_clippy_version_attribute)]
+
+extern crate if_chain;
+
+use if_chain::if_chain;
+
+fn main() {
+ if true {
+ let x = "";
+ // `if_chain!` inside `if`
+ if_chain! {
+ if true;
+ if true;
+ then {}
+ }
+ }
+ if_chain! {
+ if true
+ // multi-line AND'ed conditions
+ && false;
+ if let Some(1) = Some(1);
+ // `let` before `then`
+ let x = "";
+ then {
+ ();
+ }
+ }
+ if_chain! {
+ // single `if` condition
+ if true;
+ then {
+ let x = "";
+ // nested if
+ if true {}
+ }
+ }
+ if_chain! {
+ // starts with `let ..`
+ let x = "";
+ if let Some(1) = Some(1);
+ then {
+ let x = "";
+ let x = "";
+ // nested if_chain!
+ if_chain! {
+ if true;
+ if true;
+ then {}
+ }
+ }
+ }
+}
+
+fn negative() {
+ if true {
+ ();
+ if_chain! {
+ if true;
+ if true;
+ then { (); }
+ }
+ }
+ if_chain! {
+ if true;
+ let x = "";
+ if true;
+ then { (); }
+ }
+ if_chain! {
+ if true;
+ if true;
+ then {
+ if true { 1 } else { 2 }
+ } else {
+ 3
+ }
+ };
+ if true {
+ if_chain! {
+ if true;
+ if true;
+ then {}
+ }
+ } else if false {
+ if_chain! {
+ if true;
+ if false;
+ then {}
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui-internal/if_chain_style.stderr b/src/tools/clippy/tests/ui-internal/if_chain_style.stderr
new file mode 100644
index 000000000..24106510e
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/if_chain_style.stderr
@@ -0,0 +1,85 @@
+error: this `if` can be part of the inner `if_chain!`
+ --> $DIR/if_chain_style.rs:9:5
+ |
+LL | / if true {
+LL | | let x = "";
+LL | | // `if_chain!` inside `if`
+LL | | if_chain! {
+... |
+LL | | }
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::if-chain-style` implied by `-D warnings`
+help: this `let` statement can also be in the `if_chain!`
+ --> $DIR/if_chain_style.rs:10:9
+ |
+LL | let x = "";
+ | ^^^^^^^^^^^
+
+error: `if a && b;` should be `if a; if b;`
+ --> $DIR/if_chain_style.rs:19:12
+ |
+LL | if true
+ | ____________^
+LL | | // multi-line AND'ed conditions
+LL | | && false;
+ | |____________________^
+
+error: `let` expression should be inside `then { .. }`
+ --> $DIR/if_chain_style.rs:24:9
+ |
+LL | let x = "";
+ | ^^^^^^^^^^^
+
+error: this `if` can be part of the outer `if_chain!`
+ --> $DIR/if_chain_style.rs:35:13
+ |
+LL | if true {}
+ | ^^^^^^^^^^
+ |
+help: this `let` statement can also be in the `if_chain!`
+ --> $DIR/if_chain_style.rs:33:13
+ |
+LL | let x = "";
+ | ^^^^^^^^^^^
+
+error: `if_chain!` only has one `if`
+ --> $DIR/if_chain_style.rs:29:5
+ |
+LL | / if_chain! {
+LL | | // single `if` condition
+LL | | if true;
+LL | | then {
+... |
+LL | | }
+LL | | }
+ | |_____^
+ |
+ = note: this error originates in the macro `__if_chain` which comes from the expansion of the macro `if_chain` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: `let` expression should be above the `if_chain!`
+ --> $DIR/if_chain_style.rs:40:9
+ |
+LL | let x = "";
+ | ^^^^^^^^^^^
+
+error: this `if_chain!` can be merged with the outer `if_chain!`
+ --> $DIR/if_chain_style.rs:46:13
+ |
+LL | / if_chain! {
+LL | | if true;
+LL | | if true;
+LL | | then {}
+LL | | }
+ | |_____________^
+ |
+help: these `let` statements can also be in the `if_chain!`
+ --> $DIR/if_chain_style.rs:43:13
+ |
+LL | / let x = "";
+LL | | let x = "";
+ | |_______________________^
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui-internal/interning_defined_symbol.fixed b/src/tools/clippy/tests/ui-internal/interning_defined_symbol.fixed
new file mode 100644
index 000000000..eaea218e1
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/interning_defined_symbol.fixed
@@ -0,0 +1,37 @@
+// run-rustfix
+#![deny(clippy::internal)]
+#![allow(clippy::missing_clippy_version_attribute, clippy::let_unit_value)]
+#![feature(rustc_private)]
+
+extern crate rustc_span;
+
+use rustc_span::symbol::Symbol;
+
+macro_rules! sym {
+ ($tt:tt) => {
+ rustc_span::symbol::Symbol::intern(stringify!($tt))
+ };
+}
+
+fn main() {
+ // Direct use of Symbol::intern
+ let _ = rustc_span::sym::f32;
+
+ // Using a sym macro
+ let _ = rustc_span::sym::f32;
+
+ // Correct suggestion when symbol isn't stringified constant name
+ let _ = rustc_span::sym::proc_dash_macro;
+
+ // interning a keyword
+ let _ = rustc_span::symbol::kw::SelfLower;
+
+ // Interning a symbol that is not defined
+ let _ = Symbol::intern("xyz123");
+ let _ = sym!(xyz123);
+
+ // Using a different `intern` function
+ let _ = intern("f32");
+}
+
+fn intern(_: &str) {}
diff --git a/src/tools/clippy/tests/ui-internal/interning_defined_symbol.rs b/src/tools/clippy/tests/ui-internal/interning_defined_symbol.rs
new file mode 100644
index 000000000..7efebb8fa
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/interning_defined_symbol.rs
@@ -0,0 +1,37 @@
+// run-rustfix
+#![deny(clippy::internal)]
+#![allow(clippy::missing_clippy_version_attribute, clippy::let_unit_value)]
+#![feature(rustc_private)]
+
+extern crate rustc_span;
+
+use rustc_span::symbol::Symbol;
+
+macro_rules! sym {
+ ($tt:tt) => {
+ rustc_span::symbol::Symbol::intern(stringify!($tt))
+ };
+}
+
+fn main() {
+ // Direct use of Symbol::intern
+ let _ = Symbol::intern("f32");
+
+ // Using a sym macro
+ let _ = sym!(f32);
+
+ // Correct suggestion when symbol isn't stringified constant name
+ let _ = Symbol::intern("proc-macro");
+
+ // interning a keyword
+ let _ = Symbol::intern("self");
+
+ // Interning a symbol that is not defined
+ let _ = Symbol::intern("xyz123");
+ let _ = sym!(xyz123);
+
+ // Using a different `intern` function
+ let _ = intern("f32");
+}
+
+fn intern(_: &str) {}
diff --git a/src/tools/clippy/tests/ui-internal/interning_defined_symbol.stderr b/src/tools/clippy/tests/ui-internal/interning_defined_symbol.stderr
new file mode 100644
index 000000000..4e99636e6
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/interning_defined_symbol.stderr
@@ -0,0 +1,33 @@
+error: interning a defined symbol
+ --> $DIR/interning_defined_symbol.rs:18:13
+ |
+LL | let _ = Symbol::intern("f32");
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try: `rustc_span::sym::f32`
+ |
+note: the lint level is defined here
+ --> $DIR/interning_defined_symbol.rs:2:9
+ |
+LL | #![deny(clippy::internal)]
+ | ^^^^^^^^^^^^^^^^
+ = note: `#[deny(clippy::interning_defined_symbol)]` implied by `#[deny(clippy::internal)]`
+
+error: interning a defined symbol
+ --> $DIR/interning_defined_symbol.rs:21:13
+ |
+LL | let _ = sym!(f32);
+ | ^^^^^^^^^ help: try: `rustc_span::sym::f32`
+
+error: interning a defined symbol
+ --> $DIR/interning_defined_symbol.rs:24:13
+ |
+LL | let _ = Symbol::intern("proc-macro");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `rustc_span::sym::proc_dash_macro`
+
+error: interning a defined symbol
+ --> $DIR/interning_defined_symbol.rs:27:13
+ |
+LL | let _ = Symbol::intern("self");
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `rustc_span::symbol::kw::SelfLower`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.fixed b/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.fixed
new file mode 100644
index 000000000..900a8fffd
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.fixed
@@ -0,0 +1,40 @@
+// run-rustfix
+
+#![deny(clippy::internal)]
+#![allow(clippy::missing_clippy_version_attribute)]
+#![feature(rustc_private)]
+
+extern crate rustc_ast;
+extern crate rustc_hir;
+extern crate rustc_lint;
+extern crate rustc_middle;
+#[macro_use]
+extern crate rustc_session;
+use clippy_utils::extract_msrv_attr;
+use rustc_hir::Expr;
+use rustc_lint::{EarlyContext, EarlyLintPass, LateContext, LateLintPass};
+use rustc_semver::RustcVersion;
+
+declare_lint! {
+ pub TEST_LINT,
+ Warn,
+ ""
+}
+
+struct Pass {
+ msrv: Option<RustcVersion>,
+}
+
+impl_lint_pass!(Pass => [TEST_LINT]);
+
+impl LateLintPass<'_> for Pass {
+ extract_msrv_attr!(LateContext);
+ fn check_expr(&mut self, _: &LateContext<'_>, _: &Expr<'_>) {}
+}
+
+impl EarlyLintPass for Pass {
+ extract_msrv_attr!(EarlyContext);
+ fn check_expr(&mut self, _: &EarlyContext<'_>, _: &rustc_ast::Expr) {}
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.rs b/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.rs
new file mode 100644
index 000000000..4bc8164db
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.rs
@@ -0,0 +1,38 @@
+// run-rustfix
+
+#![deny(clippy::internal)]
+#![allow(clippy::missing_clippy_version_attribute)]
+#![feature(rustc_private)]
+
+extern crate rustc_ast;
+extern crate rustc_hir;
+extern crate rustc_lint;
+extern crate rustc_middle;
+#[macro_use]
+extern crate rustc_session;
+use clippy_utils::extract_msrv_attr;
+use rustc_hir::Expr;
+use rustc_lint::{EarlyContext, EarlyLintPass, LateContext, LateLintPass};
+use rustc_semver::RustcVersion;
+
+declare_lint! {
+ pub TEST_LINT,
+ Warn,
+ ""
+}
+
+struct Pass {
+ msrv: Option<RustcVersion>,
+}
+
+impl_lint_pass!(Pass => [TEST_LINT]);
+
+impl LateLintPass<'_> for Pass {
+ fn check_expr(&mut self, _: &LateContext<'_>, _: &Expr<'_>) {}
+}
+
+impl EarlyLintPass for Pass {
+ fn check_expr(&mut self, _: &EarlyContext<'_>, _: &rustc_ast::Expr) {}
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.stderr b/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.stderr
new file mode 100644
index 000000000..ddc06f0be
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.stderr
@@ -0,0 +1,32 @@
+error: `extract_msrv_attr!` macro missing from `LateLintPass` implementation
+ --> $DIR/invalid_msrv_attr_impl.rs:30:1
+ |
+LL | impl LateLintPass<'_> for Pass {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/invalid_msrv_attr_impl.rs:3:9
+ |
+LL | #![deny(clippy::internal)]
+ | ^^^^^^^^^^^^^^^^
+ = note: `#[deny(clippy::missing_msrv_attr_impl)]` implied by `#[deny(clippy::internal)]`
+help: add `extract_msrv_attr!(LateContext)` to the `LateLintPass` implementation
+ |
+LL + impl LateLintPass<'_> for Pass {
+LL + extract_msrv_attr!(LateContext);
+ |
+
+error: `extract_msrv_attr!` macro missing from `EarlyLintPass` implementation
+ --> $DIR/invalid_msrv_attr_impl.rs:34:1
+ |
+LL | impl EarlyLintPass for Pass {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: add `extract_msrv_attr!(EarlyContext)` to the `EarlyLintPass` implementation
+ |
+LL + impl EarlyLintPass for Pass {
+LL + extract_msrv_attr!(EarlyContext);
+ |
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui-internal/invalid_paths.rs b/src/tools/clippy/tests/ui-internal/invalid_paths.rs
new file mode 100644
index 000000000..b823ff7fe
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/invalid_paths.rs
@@ -0,0 +1,27 @@
+#![warn(clippy::internal)]
+#![allow(clippy::missing_clippy_version_attribute)]
+
+mod paths {
+ // Good path
+ pub const ANY_TRAIT: [&str; 3] = ["std", "any", "Any"];
+
+ // Path to method on inherent impl of a primitive type
+ pub const F32_EPSILON: [&str; 4] = ["core", "f32", "<impl f32>", "EPSILON"];
+
+ // Path to method on inherent impl
+ pub const ARC_PTR_EQ: [&str; 4] = ["alloc", "sync", "Arc", "ptr_eq"];
+
+ // Path with empty segment
+ pub const TRANSMUTE: [&str; 4] = ["core", "intrinsics", "", "transmute"];
+
+ // Path with bad crate
+ pub const BAD_CRATE_PATH: [&str; 2] = ["bad", "path"];
+
+ // Path with bad module
+ pub const BAD_MOD_PATH: [&str; 2] = ["std", "xxx"];
+
+ // Path to method on an enum inherent impl
+ pub const OPTION_IS_SOME: [&str; 4] = ["core", "option", "Option", "is_some"];
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-internal/invalid_paths.stderr b/src/tools/clippy/tests/ui-internal/invalid_paths.stderr
new file mode 100644
index 000000000..0e8508869
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/invalid_paths.stderr
@@ -0,0 +1,22 @@
+error: invalid path
+ --> $DIR/invalid_paths.rs:15:5
+ |
+LL | pub const TRANSMUTE: [&str; 4] = ["core", "intrinsics", "", "transmute"];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::invalid-paths` implied by `-D warnings`
+
+error: invalid path
+ --> $DIR/invalid_paths.rs:18:5
+ |
+LL | pub const BAD_CRATE_PATH: [&str; 2] = ["bad", "path"];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: invalid path
+ --> $DIR/invalid_paths.rs:21:5
+ |
+LL | pub const BAD_MOD_PATH: [&str; 2] = ["std", "xxx"];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui-internal/lint_without_lint_pass.rs b/src/tools/clippy/tests/ui-internal/lint_without_lint_pass.rs
new file mode 100644
index 000000000..1fd03cfe3
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/lint_without_lint_pass.rs
@@ -0,0 +1,45 @@
+#![deny(clippy::internal)]
+#![allow(clippy::missing_clippy_version_attribute)]
+#![feature(rustc_private)]
+
+#[macro_use]
+extern crate rustc_middle;
+#[macro_use]
+extern crate rustc_session;
+extern crate rustc_lint;
+use rustc_lint::LintPass;
+
+declare_tool_lint! {
+ pub clippy::TEST_LINT,
+ Warn,
+ "",
+ report_in_external_macro: true
+}
+
+declare_tool_lint! {
+ pub clippy::TEST_LINT_REGISTERED,
+ Warn,
+ "",
+ report_in_external_macro: true
+}
+
+declare_tool_lint! {
+ pub clippy::TEST_LINT_REGISTERED_ONLY_IMPL,
+ Warn,
+ "",
+ report_in_external_macro: true
+}
+
+pub struct Pass;
+impl LintPass for Pass {
+ fn name(&self) -> &'static str {
+ "TEST_LINT"
+ }
+}
+
+declare_lint_pass!(Pass2 => [TEST_LINT_REGISTERED]);
+
+pub struct Pass3;
+impl_lint_pass!(Pass3 => [TEST_LINT_REGISTERED_ONLY_IMPL]);
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-internal/lint_without_lint_pass.stderr b/src/tools/clippy/tests/ui-internal/lint_without_lint_pass.stderr
new file mode 100644
index 000000000..de04920b8
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/lint_without_lint_pass.stderr
@@ -0,0 +1,21 @@
+error: the lint `TEST_LINT` is not added to any `LintPass`
+ --> $DIR/lint_without_lint_pass.rs:12:1
+ |
+LL | / declare_tool_lint! {
+LL | | pub clippy::TEST_LINT,
+LL | | Warn,
+LL | | "",
+LL | | report_in_external_macro: true
+LL | | }
+ | |_^
+ |
+note: the lint level is defined here
+ --> $DIR/lint_without_lint_pass.rs:1:9
+ |
+LL | #![deny(clippy::internal)]
+ | ^^^^^^^^^^^^^^^^
+ = note: `#[deny(clippy::lint_without_lint_pass)]` implied by `#[deny(clippy::internal)]`
+ = note: this error originates in the macro `declare_tool_lint` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-internal/match_type_on_diag_item.rs b/src/tools/clippy/tests/ui-internal/match_type_on_diag_item.rs
new file mode 100644
index 000000000..4b41ff15e
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/match_type_on_diag_item.rs
@@ -0,0 +1,39 @@
+#![deny(clippy::internal)]
+#![allow(clippy::missing_clippy_version_attribute)]
+#![feature(rustc_private)]
+
+extern crate clippy_utils;
+extern crate rustc_hir;
+extern crate rustc_lint;
+extern crate rustc_middle;
+
+#[macro_use]
+extern crate rustc_session;
+use clippy_utils::{paths, ty::match_type};
+use rustc_hir::Expr;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::Ty;
+
+declare_lint! {
+ pub TEST_LINT,
+ Warn,
+ ""
+}
+
+declare_lint_pass!(Pass => [TEST_LINT]);
+
+static OPTION: [&str; 3] = ["core", "option", "Option"];
+
+impl<'tcx> LateLintPass<'tcx> for Pass {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr) {
+ let ty = cx.typeck_results().expr_ty(expr);
+
+ let _ = match_type(cx, ty, &OPTION);
+ let _ = match_type(cx, ty, &["core", "result", "Result"]);
+
+ let rc_path = &["alloc", "rc", "Rc"];
+ let _ = clippy_utils::ty::match_type(cx, ty, rc_path);
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-internal/match_type_on_diag_item.stderr b/src/tools/clippy/tests/ui-internal/match_type_on_diag_item.stderr
new file mode 100644
index 000000000..e3cb6b6c2
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/match_type_on_diag_item.stderr
@@ -0,0 +1,27 @@
+error: usage of `clippy_utils::ty::match_type()` on a type diagnostic item
+ --> $DIR/match_type_on_diag_item.rs:31:17
+ |
+LL | let _ = match_type(cx, ty, &OPTION);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `clippy_utils::ty::is_type_diagnostic_item(cx, ty, sym::Option)`
+ |
+note: the lint level is defined here
+ --> $DIR/match_type_on_diag_item.rs:1:9
+ |
+LL | #![deny(clippy::internal)]
+ | ^^^^^^^^^^^^^^^^
+ = note: `#[deny(clippy::match_type_on_diagnostic_item)]` implied by `#[deny(clippy::internal)]`
+
+error: usage of `clippy_utils::ty::match_type()` on a type diagnostic item
+ --> $DIR/match_type_on_diag_item.rs:32:17
+ |
+LL | let _ = match_type(cx, ty, &["core", "result", "Result"]);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `clippy_utils::ty::is_type_diagnostic_item(cx, ty, sym::Result)`
+
+error: usage of `clippy_utils::ty::match_type()` on a type diagnostic item
+ --> $DIR/match_type_on_diag_item.rs:35:17
+ |
+LL | let _ = clippy_utils::ty::match_type(cx, ty, rc_path);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `clippy_utils::ty::is_type_diagnostic_item(cx, ty, sym::Rc)`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui-internal/outer_expn_data.fixed b/src/tools/clippy/tests/ui-internal/outer_expn_data.fixed
new file mode 100644
index 000000000..bb82faf0c
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/outer_expn_data.fixed
@@ -0,0 +1,29 @@
+// run-rustfix
+
+#![deny(clippy::internal)]
+#![allow(clippy::missing_clippy_version_attribute)]
+#![feature(rustc_private)]
+
+extern crate rustc_hir;
+extern crate rustc_lint;
+extern crate rustc_middle;
+#[macro_use]
+extern crate rustc_session;
+use rustc_hir::Expr;
+use rustc_lint::{LateContext, LateLintPass};
+
+declare_lint! {
+ pub TEST_LINT,
+ Warn,
+ ""
+}
+
+declare_lint_pass!(Pass => [TEST_LINT]);
+
+impl<'tcx> LateLintPass<'tcx> for Pass {
+ fn check_expr(&mut self, _cx: &LateContext<'tcx>, expr: &'tcx Expr) {
+ let _ = expr.span.ctxt().outer_expn_data();
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-internal/outer_expn_data.rs b/src/tools/clippy/tests/ui-internal/outer_expn_data.rs
new file mode 100644
index 000000000..187d468b3
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/outer_expn_data.rs
@@ -0,0 +1,29 @@
+// run-rustfix
+
+#![deny(clippy::internal)]
+#![allow(clippy::missing_clippy_version_attribute)]
+#![feature(rustc_private)]
+
+extern crate rustc_hir;
+extern crate rustc_lint;
+extern crate rustc_middle;
+#[macro_use]
+extern crate rustc_session;
+use rustc_hir::Expr;
+use rustc_lint::{LateContext, LateLintPass};
+
+declare_lint! {
+ pub TEST_LINT,
+ Warn,
+ ""
+}
+
+declare_lint_pass!(Pass => [TEST_LINT]);
+
+impl<'tcx> LateLintPass<'tcx> for Pass {
+ fn check_expr(&mut self, _cx: &LateContext<'tcx>, expr: &'tcx Expr) {
+ let _ = expr.span.ctxt().outer_expn().expn_data();
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-internal/outer_expn_data.stderr b/src/tools/clippy/tests/ui-internal/outer_expn_data.stderr
new file mode 100644
index 000000000..afef69678
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/outer_expn_data.stderr
@@ -0,0 +1,15 @@
+error: usage of `outer_expn().expn_data()`
+ --> $DIR/outer_expn_data.rs:25:34
+ |
+LL | let _ = expr.span.ctxt().outer_expn().expn_data();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `outer_expn_data()`
+ |
+note: the lint level is defined here
+ --> $DIR/outer_expn_data.rs:3:9
+ |
+LL | #![deny(clippy::internal)]
+ | ^^^^^^^^^^^^^^^^
+ = note: `#[deny(clippy::outer_expn_expn_data)]` implied by `#[deny(clippy::internal)]`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.fixed b/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.fixed
new file mode 100644
index 000000000..6033d06e4
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.fixed
@@ -0,0 +1,21 @@
+// run-rustfix
+#![feature(rustc_private)]
+#![deny(clippy::internal)]
+#![allow(
+ clippy::borrow_deref_ref,
+ clippy::unnecessary_operation,
+ unused_must_use,
+ clippy::missing_clippy_version_attribute
+)]
+
+extern crate rustc_span;
+
+use rustc_span::symbol::{Ident, Symbol};
+
+fn main() {
+ Symbol::intern("foo") == rustc_span::sym::clippy;
+ Symbol::intern("foo") == rustc_span::symbol::kw::SelfLower;
+ Symbol::intern("foo") != rustc_span::symbol::kw::SelfUpper;
+ Ident::empty().name == rustc_span::sym::clippy;
+ rustc_span::sym::clippy == Ident::empty().name;
+}
diff --git a/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.rs b/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.rs
new file mode 100644
index 000000000..1bb5d55f0
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.rs
@@ -0,0 +1,21 @@
+// run-rustfix
+#![feature(rustc_private)]
+#![deny(clippy::internal)]
+#![allow(
+ clippy::borrow_deref_ref,
+ clippy::unnecessary_operation,
+ unused_must_use,
+ clippy::missing_clippy_version_attribute
+)]
+
+extern crate rustc_span;
+
+use rustc_span::symbol::{Ident, Symbol};
+
+fn main() {
+ Symbol::intern("foo").as_str() == "clippy";
+ Symbol::intern("foo").to_string() == "self";
+ Symbol::intern("foo").to_ident_string() != "Self";
+ &*Ident::empty().as_str() == "clippy";
+ "clippy" == Ident::empty().to_string();
+}
diff --git a/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.stderr b/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.stderr
new file mode 100644
index 000000000..a1f507f33
--- /dev/null
+++ b/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.stderr
@@ -0,0 +1,39 @@
+error: unnecessary `Symbol` to string conversion
+ --> $DIR/unnecessary_symbol_str.rs:16:5
+ |
+LL | Symbol::intern("foo").as_str() == "clippy";
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Symbol::intern("foo") == rustc_span::sym::clippy`
+ |
+note: the lint level is defined here
+ --> $DIR/unnecessary_symbol_str.rs:3:9
+ |
+LL | #![deny(clippy::internal)]
+ | ^^^^^^^^^^^^^^^^
+ = note: `#[deny(clippy::unnecessary_symbol_str)]` implied by `#[deny(clippy::internal)]`
+
+error: unnecessary `Symbol` to string conversion
+ --> $DIR/unnecessary_symbol_str.rs:17:5
+ |
+LL | Symbol::intern("foo").to_string() == "self";
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Symbol::intern("foo") == rustc_span::symbol::kw::SelfLower`
+
+error: unnecessary `Symbol` to string conversion
+ --> $DIR/unnecessary_symbol_str.rs:18:5
+ |
+LL | Symbol::intern("foo").to_ident_string() != "Self";
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Symbol::intern("foo") != rustc_span::symbol::kw::SelfUpper`
+
+error: unnecessary `Symbol` to string conversion
+ --> $DIR/unnecessary_symbol_str.rs:19:5
+ |
+LL | &*Ident::empty().as_str() == "clippy";
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Ident::empty().name == rustc_span::sym::clippy`
+
+error: unnecessary `Symbol` to string conversion
+ --> $DIR/unnecessary_symbol_str.rs:20:5
+ |
+LL | "clippy" == Ident::empty().to_string();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `rustc_span::sym::clippy == Ident::empty().name`
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/arithmetic_allowed/arithmetic_allowed.rs b/src/tools/clippy/tests/ui-toml/arithmetic_allowed/arithmetic_allowed.rs
new file mode 100644
index 000000000..195fabdbf
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/arithmetic_allowed/arithmetic_allowed.rs
@@ -0,0 +1,24 @@
+#![warn(clippy::arithmetic)]
+
+use core::ops::Add;
+
+#[derive(Clone, Copy)]
+struct Point {
+ x: i32,
+ y: i32,
+}
+
+impl Add for Point {
+ type Output = Self;
+
+ fn add(self, other: Self) -> Self {
+ todo!()
+ }
+}
+
+fn main() {
+ let _ = Point { x: 1, y: 0 } + Point { x: 2, y: 3 };
+
+ let point: Point = Point { x: 1, y: 0 };
+ let _ = point + point;
+}
diff --git a/src/tools/clippy/tests/ui-toml/arithmetic_allowed/clippy.toml b/src/tools/clippy/tests/ui-toml/arithmetic_allowed/clippy.toml
new file mode 100644
index 000000000..cc40570b1
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/arithmetic_allowed/clippy.toml
@@ -0,0 +1 @@
+arithmetic-allowed = ["Point"]
diff --git a/src/tools/clippy/tests/ui-toml/await_holding_invalid_type/await_holding_invalid_type.rs b/src/tools/clippy/tests/ui-toml/await_holding_invalid_type/await_holding_invalid_type.rs
new file mode 100644
index 000000000..fbef5c456
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/await_holding_invalid_type/await_holding_invalid_type.rs
@@ -0,0 +1,41 @@
+#![warn(clippy::await_holding_invalid_type)]
+use std::net::Ipv4Addr;
+
+async fn bad() -> u32 {
+ let _x = String::from("hello");
+ baz().await
+}
+
+async fn bad_reason() -> u32 {
+ let _x = Ipv4Addr::new(127, 0, 0, 1);
+ baz().await
+}
+
+async fn good() -> u32 {
+ {
+ let _x = String::from("hi!");
+ let _y = Ipv4Addr::new(127, 0, 0, 1);
+ }
+ baz().await;
+ let _x = String::from("hi!");
+ 47
+}
+
+async fn baz() -> u32 {
+ 42
+}
+
+#[allow(clippy::manual_async_fn)]
+fn block_bad() -> impl std::future::Future<Output = u32> {
+ async move {
+ let _x = String::from("hi!");
+ baz().await
+ }
+}
+
+fn main() {
+ good();
+ bad();
+ bad_reason();
+ block_bad();
+}
diff --git a/src/tools/clippy/tests/ui-toml/await_holding_invalid_type/await_holding_invalid_type.stderr b/src/tools/clippy/tests/ui-toml/await_holding_invalid_type/await_holding_invalid_type.stderr
new file mode 100644
index 000000000..62c45b546
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/await_holding_invalid_type/await_holding_invalid_type.stderr
@@ -0,0 +1,25 @@
+error: `std::string::String` may not be held across an `await` point per `clippy.toml`
+ --> $DIR/await_holding_invalid_type.rs:5:9
+ |
+LL | let _x = String::from("hello");
+ | ^^
+ |
+ = note: `-D clippy::await-holding-invalid-type` implied by `-D warnings`
+ = note: strings are bad
+
+error: `std::net::Ipv4Addr` may not be held across an `await` point per `clippy.toml`
+ --> $DIR/await_holding_invalid_type.rs:10:9
+ |
+LL | let _x = Ipv4Addr::new(127, 0, 0, 1);
+ | ^^
+
+error: `std::string::String` may not be held across an `await` point per `clippy.toml`
+ --> $DIR/await_holding_invalid_type.rs:31:13
+ |
+LL | let _x = String::from("hi!");
+ | ^^
+ |
+ = note: strings are bad
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/await_holding_invalid_type/clippy.toml b/src/tools/clippy/tests/ui-toml/await_holding_invalid_type/clippy.toml
new file mode 100644
index 000000000..79990096b
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/await_holding_invalid_type/clippy.toml
@@ -0,0 +1,4 @@
+await-holding-invalid-types = [
+ { path = "std::string::String", reason = "strings are bad" },
+ "std::net::Ipv4Addr",
+]
diff --git a/src/tools/clippy/tests/ui-toml/bad_toml/clippy.toml b/src/tools/clippy/tests/ui-toml/bad_toml/clippy.toml
new file mode 100644
index 000000000..823e01a33
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/bad_toml/clippy.toml
@@ -0,0 +1,2 @@
+fn this_is_obviously(not: a, toml: file) {
+}
diff --git a/src/tools/clippy/tests/ui-toml/bad_toml/conf_bad_toml.rs b/src/tools/clippy/tests/ui-toml/bad_toml/conf_bad_toml.rs
new file mode 100644
index 000000000..f328e4d9d
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/bad_toml/conf_bad_toml.rs
@@ -0,0 +1 @@
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-toml/bad_toml/conf_bad_toml.stderr b/src/tools/clippy/tests/ui-toml/bad_toml/conf_bad_toml.stderr
new file mode 100644
index 000000000..28c1a568a
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/bad_toml/conf_bad_toml.stderr
@@ -0,0 +1,4 @@
+error: error reading Clippy's configuration file `$DIR/clippy.toml`: expected an equals, found an identifier at line 1 column 4
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-toml/bad_toml_type/clippy.toml b/src/tools/clippy/tests/ui-toml/bad_toml_type/clippy.toml
new file mode 100644
index 000000000..168675394
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/bad_toml_type/clippy.toml
@@ -0,0 +1 @@
+blacklisted-names = 42
diff --git a/src/tools/clippy/tests/ui-toml/bad_toml_type/conf_bad_type.rs b/src/tools/clippy/tests/ui-toml/bad_toml_type/conf_bad_type.rs
new file mode 100644
index 000000000..f328e4d9d
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/bad_toml_type/conf_bad_type.rs
@@ -0,0 +1 @@
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-toml/bad_toml_type/conf_bad_type.stderr b/src/tools/clippy/tests/ui-toml/bad_toml_type/conf_bad_type.stderr
new file mode 100644
index 000000000..c7bc261de
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/bad_toml_type/conf_bad_type.stderr
@@ -0,0 +1,4 @@
+error: error reading Clippy's configuration file `$DIR/clippy.toml`: invalid type: integer `42`, expected a sequence for key `blacklisted-names`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-toml/blacklisted_names_append/blacklisted_names.rs b/src/tools/clippy/tests/ui-toml/blacklisted_names_append/blacklisted_names.rs
new file mode 100644
index 000000000..fb2395cf9
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/blacklisted_names_append/blacklisted_names.rs
@@ -0,0 +1,10 @@
+#[warn(clippy::blacklisted_name)]
+
+fn main() {
+ // `foo` is part of the default configuration
+ let foo = "bar";
+ // `ducks` was unrightfully blacklisted
+ let ducks = ["quack", "quack"];
+ // `fox` is okay
+ let fox = ["what", "does", "the", "fox", "say", "?"];
+}
diff --git a/src/tools/clippy/tests/ui-toml/blacklisted_names_append/blacklisted_names.stderr b/src/tools/clippy/tests/ui-toml/blacklisted_names_append/blacklisted_names.stderr
new file mode 100644
index 000000000..9169bb0e8
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/blacklisted_names_append/blacklisted_names.stderr
@@ -0,0 +1,16 @@
+error: use of a blacklisted/placeholder name `foo`
+ --> $DIR/blacklisted_names.rs:5:9
+ |
+LL | let foo = "bar";
+ | ^^^
+ |
+ = note: `-D clippy::blacklisted-name` implied by `-D warnings`
+
+error: use of a blacklisted/placeholder name `ducks`
+ --> $DIR/blacklisted_names.rs:7:9
+ |
+LL | let ducks = ["quack", "quack"];
+ | ^^^^^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/blacklisted_names_append/clippy.toml b/src/tools/clippy/tests/ui-toml/blacklisted_names_append/clippy.toml
new file mode 100644
index 000000000..0e052ef50
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/blacklisted_names_append/clippy.toml
@@ -0,0 +1 @@
+blacklisted-names = ["ducks", ".."]
diff --git a/src/tools/clippy/tests/ui-toml/blacklisted_names_replace/blacklisted_names.rs b/src/tools/clippy/tests/ui-toml/blacklisted_names_replace/blacklisted_names.rs
new file mode 100644
index 000000000..fb2395cf9
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/blacklisted_names_replace/blacklisted_names.rs
@@ -0,0 +1,10 @@
+#[warn(clippy::blacklisted_name)]
+
+fn main() {
+ // `foo` is part of the default configuration
+ let foo = "bar";
+ // `ducks` was unrightfully blacklisted
+ let ducks = ["quack", "quack"];
+ // `fox` is okay
+ let fox = ["what", "does", "the", "fox", "say", "?"];
+}
diff --git a/src/tools/clippy/tests/ui-toml/blacklisted_names_replace/blacklisted_names.stderr b/src/tools/clippy/tests/ui-toml/blacklisted_names_replace/blacklisted_names.stderr
new file mode 100644
index 000000000..ec6f7f084
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/blacklisted_names_replace/blacklisted_names.stderr
@@ -0,0 +1,10 @@
+error: use of a blacklisted/placeholder name `ducks`
+ --> $DIR/blacklisted_names.rs:7:9
+ |
+LL | let ducks = ["quack", "quack"];
+ | ^^^^^
+ |
+ = note: `-D clippy::blacklisted-name` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-toml/blacklisted_names_replace/clippy.toml b/src/tools/clippy/tests/ui-toml/blacklisted_names_replace/clippy.toml
new file mode 100644
index 000000000..4582f1c06
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/blacklisted_names_replace/clippy.toml
@@ -0,0 +1 @@
+blacklisted-names = ["ducks"]
diff --git a/src/tools/clippy/tests/ui-toml/conf_deprecated_key/clippy.toml b/src/tools/clippy/tests/ui-toml/conf_deprecated_key/clippy.toml
new file mode 100644
index 000000000..ac47b1950
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/conf_deprecated_key/clippy.toml
@@ -0,0 +1,6 @@
+# that one is an error
+cyclomatic-complexity-threshold = 42
+
+# that one is white-listed
+[third-party]
+clippy-feature = "nightly"
diff --git a/src/tools/clippy/tests/ui-toml/conf_deprecated_key/conf_deprecated_key.rs b/src/tools/clippy/tests/ui-toml/conf_deprecated_key/conf_deprecated_key.rs
new file mode 100644
index 000000000..f328e4d9d
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/conf_deprecated_key/conf_deprecated_key.rs
@@ -0,0 +1 @@
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-toml/conf_deprecated_key/conf_deprecated_key.stderr b/src/tools/clippy/tests/ui-toml/conf_deprecated_key/conf_deprecated_key.stderr
new file mode 100644
index 000000000..90021a034
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/conf_deprecated_key/conf_deprecated_key.stderr
@@ -0,0 +1,4 @@
+error: error reading Clippy's configuration file `$DIR/clippy.toml`: deprecated field `cyclomatic-complexity-threshold`. Please use `cognitive-complexity-threshold` instead
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-toml/dbg_macro/clippy.toml b/src/tools/clippy/tests/ui-toml/dbg_macro/clippy.toml
new file mode 100644
index 000000000..4296655a0
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/dbg_macro/clippy.toml
@@ -0,0 +1 @@
+allow-dbg-in-tests = true
diff --git a/src/tools/clippy/tests/ui-toml/dbg_macro/dbg_macro.rs b/src/tools/clippy/tests/ui-toml/dbg_macro/dbg_macro.rs
new file mode 100644
index 000000000..5d9ce18f6
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/dbg_macro/dbg_macro.rs
@@ -0,0 +1,39 @@
+// compile-flags: --test
+#![warn(clippy::dbg_macro)]
+
+fn foo(n: u32) -> u32 {
+ if let Some(n) = dbg!(n.checked_sub(4)) { n } else { n }
+}
+
+fn factorial(n: u32) -> u32 {
+ if dbg!(n <= 1) {
+ dbg!(1)
+ } else {
+ dbg!(n * factorial(n - 1))
+ }
+}
+
+fn main() {
+ dbg!(42);
+ dbg!(dbg!(dbg!(42)));
+ foo(3) + dbg!(factorial(4));
+ dbg!(1, 2, dbg!(3, 4));
+ dbg!(1, 2, 3, 4, 5);
+}
+
+#[test]
+pub fn issue8481() {
+ dbg!(1);
+}
+
+#[cfg(test)]
+fn foo2() {
+ dbg!(1);
+}
+
+#[cfg(test)]
+mod mod1 {
+ fn func() {
+ dbg!(1);
+ }
+}
diff --git a/src/tools/clippy/tests/ui-toml/dbg_macro/dbg_macro.stderr b/src/tools/clippy/tests/ui-toml/dbg_macro/dbg_macro.stderr
new file mode 100644
index 000000000..46efb86dc
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/dbg_macro/dbg_macro.stderr
@@ -0,0 +1,102 @@
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:5:22
+ |
+LL | if let Some(n) = dbg!(n.checked_sub(4)) { n } else { n }
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::dbg-macro` implied by `-D warnings`
+help: ensure to avoid having uses of it in version control
+ |
+LL | if let Some(n) = n.checked_sub(4) { n } else { n }
+ | ~~~~~~~~~~~~~~~~
+
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:9:8
+ |
+LL | if dbg!(n <= 1) {
+ | ^^^^^^^^^^^^
+ |
+help: ensure to avoid having uses of it in version control
+ |
+LL | if n <= 1 {
+ | ~~~~~~
+
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:10:9
+ |
+LL | dbg!(1)
+ | ^^^^^^^
+ |
+help: ensure to avoid having uses of it in version control
+ |
+LL | 1
+ |
+
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:12:9
+ |
+LL | dbg!(n * factorial(n - 1))
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: ensure to avoid having uses of it in version control
+ |
+LL | n * factorial(n - 1)
+ |
+
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:17:5
+ |
+LL | dbg!(42);
+ | ^^^^^^^^
+ |
+help: ensure to avoid having uses of it in version control
+ |
+LL | 42;
+ | ~~
+
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:18:5
+ |
+LL | dbg!(dbg!(dbg!(42)));
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+help: ensure to avoid having uses of it in version control
+ |
+LL | dbg!(dbg!(42));
+ | ~~~~~~~~~~~~~~
+
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:19:14
+ |
+LL | foo(3) + dbg!(factorial(4));
+ | ^^^^^^^^^^^^^^^^^^
+ |
+help: ensure to avoid having uses of it in version control
+ |
+LL | foo(3) + factorial(4);
+ | ~~~~~~~~~~~~
+
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:20:5
+ |
+LL | dbg!(1, 2, dbg!(3, 4));
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: ensure to avoid having uses of it in version control
+ |
+LL | (1, 2, dbg!(3, 4));
+ | ~~~~~~~~~~~~~~~~~~
+
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:21:5
+ |
+LL | dbg!(1, 2, 3, 4, 5);
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+help: ensure to avoid having uses of it in version control
+ |
+LL | (1, 2, 3, 4, 5);
+ | ~~~~~~~~~~~~~~~
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/doc_valid_idents_append/clippy.toml b/src/tools/clippy/tests/ui-toml/doc_valid_idents_append/clippy.toml
new file mode 100644
index 000000000..daf327685
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/doc_valid_idents_append/clippy.toml
@@ -0,0 +1 @@
+doc-valid-idents = ["ClipPy", ".."]
diff --git a/src/tools/clippy/tests/ui-toml/doc_valid_idents_append/doc_markdown.rs b/src/tools/clippy/tests/ui-toml/doc_valid_idents_append/doc_markdown.rs
new file mode 100644
index 000000000..327a592e9
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/doc_valid_idents_append/doc_markdown.rs
@@ -0,0 +1,12 @@
+#![warn(clippy::doc_markdown)]
+
+/// This is a special interface for ClipPy which doesn't require backticks
+fn allowed_name() {}
+
+/// OAuth and LaTeX are inside Clippy's default list.
+fn default_name() {}
+
+/// TestItemThingyOfCoolness might sound cool but is not on the list and should be linted.
+fn unknown_name() {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-toml/doc_valid_idents_append/doc_markdown.stderr b/src/tools/clippy/tests/ui-toml/doc_valid_idents_append/doc_markdown.stderr
new file mode 100644
index 000000000..0f767c9b8
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/doc_valid_idents_append/doc_markdown.stderr
@@ -0,0 +1,14 @@
+error: item in documentation is missing backticks
+ --> $DIR/doc_markdown.rs:9:5
+ |
+LL | /// TestItemThingyOfCoolness might sound cool but is not on the list and should be linted.
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::doc-markdown` implied by `-D warnings`
+help: try
+ |
+LL | /// `TestItemThingyOfCoolness` might sound cool but is not on the list and should be linted.
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-toml/doc_valid_idents_replace/clippy.toml b/src/tools/clippy/tests/ui-toml/doc_valid_idents_replace/clippy.toml
new file mode 100644
index 000000000..70bc477b0
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/doc_valid_idents_replace/clippy.toml
@@ -0,0 +1 @@
+doc-valid-idents = ["ClipPy"]
diff --git a/src/tools/clippy/tests/ui-toml/doc_valid_idents_replace/doc_markdown.rs b/src/tools/clippy/tests/ui-toml/doc_valid_idents_replace/doc_markdown.rs
new file mode 100644
index 000000000..327a592e9
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/doc_valid_idents_replace/doc_markdown.rs
@@ -0,0 +1,12 @@
+#![warn(clippy::doc_markdown)]
+
+/// This is a special interface for ClipPy which doesn't require backticks
+fn allowed_name() {}
+
+/// OAuth and LaTeX are inside Clippy's default list.
+fn default_name() {}
+
+/// TestItemThingyOfCoolness might sound cool but is not on the list and should be linted.
+fn unknown_name() {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-toml/doc_valid_idents_replace/doc_markdown.stderr b/src/tools/clippy/tests/ui-toml/doc_valid_idents_replace/doc_markdown.stderr
new file mode 100644
index 000000000..e0613eb86
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/doc_valid_idents_replace/doc_markdown.stderr
@@ -0,0 +1,36 @@
+error: item in documentation is missing backticks
+ --> $DIR/doc_markdown.rs:6:5
+ |
+LL | /// OAuth and LaTeX are inside Clippy's default list.
+ | ^^^^^
+ |
+ = note: `-D clippy::doc-markdown` implied by `-D warnings`
+help: try
+ |
+LL | /// `OAuth` and LaTeX are inside Clippy's default list.
+ | ~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc_markdown.rs:6:15
+ |
+LL | /// OAuth and LaTeX are inside Clippy's default list.
+ | ^^^^^
+ |
+help: try
+ |
+LL | /// OAuth and `LaTeX` are inside Clippy's default list.
+ | ~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc_markdown.rs:9:5
+ |
+LL | /// TestItemThingyOfCoolness might sound cool but is not on the list and should be linted.
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// `TestItemThingyOfCoolness` might sound cool but is not on the list and should be linted.
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/expect_used/clippy.toml b/src/tools/clippy/tests/ui-toml/expect_used/clippy.toml
new file mode 100644
index 000000000..6933b8164
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/expect_used/clippy.toml
@@ -0,0 +1 @@
+allow-expect-in-tests = true
diff --git a/src/tools/clippy/tests/ui-toml/expect_used/expect_used.rs b/src/tools/clippy/tests/ui-toml/expect_used/expect_used.rs
new file mode 100644
index 000000000..22dcd3ae9
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/expect_used/expect_used.rs
@@ -0,0 +1,29 @@
+// compile-flags: --test
+#![warn(clippy::expect_used)]
+
+fn expect_option() {
+ let opt = Some(0);
+ let _ = opt.expect("");
+}
+
+fn expect_result() {
+ let res: Result<u8, ()> = Ok(0);
+ let _ = res.expect("");
+}
+
+fn main() {
+ expect_option();
+ expect_result();
+}
+
+#[test]
+fn test_expect_option() {
+ let opt = Some(0);
+ let _ = opt.expect("");
+}
+
+#[test]
+fn test_expect_result() {
+ let res: Result<u8, ()> = Ok(0);
+ let _ = res.expect("");
+}
diff --git a/src/tools/clippy/tests/ui-toml/expect_used/expect_used.stderr b/src/tools/clippy/tests/ui-toml/expect_used/expect_used.stderr
new file mode 100644
index 000000000..9cb2199ed
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/expect_used/expect_used.stderr
@@ -0,0 +1,19 @@
+error: used `expect()` on `an Option` value
+ --> $DIR/expect_used.rs:6:13
+ |
+LL | let _ = opt.expect("");
+ | ^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::expect-used` implied by `-D warnings`
+ = help: if this value is an `None`, it will panic
+
+error: used `expect()` on `a Result` value
+ --> $DIR/expect_used.rs:11:13
+ |
+LL | let _ = res.expect("");
+ | ^^^^^^^^^^^^^^
+ |
+ = help: if this value is an `Err`, it will panic
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/fn_params_excessive_bools/clippy.toml b/src/tools/clippy/tests/ui-toml/fn_params_excessive_bools/clippy.toml
new file mode 100644
index 000000000..022eec3e0
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/fn_params_excessive_bools/clippy.toml
@@ -0,0 +1 @@
+max-fn-params-bools = 1
diff --git a/src/tools/clippy/tests/ui-toml/fn_params_excessive_bools/test.rs b/src/tools/clippy/tests/ui-toml/fn_params_excessive_bools/test.rs
new file mode 100644
index 000000000..42897b389
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/fn_params_excessive_bools/test.rs
@@ -0,0 +1,6 @@
+#![warn(clippy::fn_params_excessive_bools)]
+
+fn f(_: bool) {}
+fn g(_: bool, _: bool) {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-toml/fn_params_excessive_bools/test.stderr b/src/tools/clippy/tests/ui-toml/fn_params_excessive_bools/test.stderr
new file mode 100644
index 000000000..d05adc3d3
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/fn_params_excessive_bools/test.stderr
@@ -0,0 +1,11 @@
+error: more than 1 bools in function parameters
+ --> $DIR/test.rs:4:1
+ |
+LL | fn g(_: bool, _: bool) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::fn-params-excessive-bools` implied by `-D warnings`
+ = help: consider refactoring bools into two-variant enums
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-toml/functions_maxlines/clippy.toml b/src/tools/clippy/tests/ui-toml/functions_maxlines/clippy.toml
new file mode 100644
index 000000000..951dbb523
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/functions_maxlines/clippy.toml
@@ -0,0 +1 @@
+too-many-lines-threshold = 1
diff --git a/src/tools/clippy/tests/ui-toml/functions_maxlines/test.rs b/src/tools/clippy/tests/ui-toml/functions_maxlines/test.rs
new file mode 100644
index 000000000..4ac037854
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/functions_maxlines/test.rs
@@ -0,0 +1,60 @@
+#![warn(clippy::too_many_lines)]
+#![allow(clippy::let_unit_value)]
+
+// This function should be considered one line.
+fn many_comments_but_one_line_of_code() {
+ /* println!("This is good."); */
+ // println!("This is good.");
+ /* */ // println!("This is good.");
+ /* */ // println!("This is good.");
+ /* */ // println!("This is good.");
+ /* */ // println!("This is good.");
+ /* println!("This is good.");
+ println!("This is good.");
+ println!("This is good."); */
+ println!("This is good.");
+}
+
+// This should be considered two and a fail.
+fn too_many_lines() {
+ println!("This is bad.");
+ println!("This is bad.");
+}
+
+// This should only fail once (#7517).
+async fn async_too_many_lines() {
+ println!("This is bad.");
+ println!("This is bad.");
+}
+
+// This should fail only once, without failing on the closure.
+fn closure_too_many_lines() {
+ let _ = {
+ println!("This is bad.");
+ println!("This is bad.");
+ };
+}
+
+// This should be considered one line.
+#[rustfmt::skip]
+fn comment_starts_after_code() {
+ let _ = 5; /* closing comment. */ /*
+ this line shouldn't be counted theoretically.
+ */
+}
+
+// This should be considered one line.
+fn comment_after_code() {
+ let _ = 5; /* this line should get counted once. */
+}
+
+// This should fail since it is technically two lines.
+#[rustfmt::skip]
+fn comment_before_code() {
+ let _ = "test";
+ /* This comment extends to the front of
+ the code but this line should still count. */ let _ = 5;
+}
+
+// This should be considered one line.
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-toml/functions_maxlines/test.stderr b/src/tools/clippy/tests/ui-toml/functions_maxlines/test.stderr
new file mode 100644
index 000000000..dc255bdca
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/functions_maxlines/test.stderr
@@ -0,0 +1,43 @@
+error: this function has too many lines (2/1)
+ --> $DIR/test.rs:19:1
+ |
+LL | / fn too_many_lines() {
+LL | | println!("This is bad.");
+LL | | println!("This is bad.");
+LL | | }
+ | |_^
+ |
+ = note: `-D clippy::too-many-lines` implied by `-D warnings`
+
+error: this function has too many lines (4/1)
+ --> $DIR/test.rs:25:1
+ |
+LL | / async fn async_too_many_lines() {
+LL | | println!("This is bad.");
+LL | | println!("This is bad.");
+LL | | }
+ | |_^
+
+error: this function has too many lines (4/1)
+ --> $DIR/test.rs:31:1
+ |
+LL | / fn closure_too_many_lines() {
+LL | | let _ = {
+LL | | println!("This is bad.");
+LL | | println!("This is bad.");
+LL | | };
+LL | | }
+ | |_^
+
+error: this function has too many lines (2/1)
+ --> $DIR/test.rs:53:1
+ |
+LL | / fn comment_before_code() {
+LL | | let _ = "test";
+LL | | /* This comment extends to the front of
+LL | | the code but this line should still count. */ let _ = 5;
+LL | | }
+ | |_^
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/good_toml_no_false_negatives/clippy.toml b/src/tools/clippy/tests/ui-toml/good_toml_no_false_negatives/clippy.toml
new file mode 100644
index 000000000..a1dd6b2f0
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/good_toml_no_false_negatives/clippy.toml
@@ -0,0 +1,3 @@
+# that one is white-listed
+[third-party]
+clippy-feature = "nightly"
diff --git a/src/tools/clippy/tests/ui-toml/good_toml_no_false_negatives/conf_no_false_negatives.rs b/src/tools/clippy/tests/ui-toml/good_toml_no_false_negatives/conf_no_false_negatives.rs
new file mode 100644
index 000000000..f328e4d9d
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/good_toml_no_false_negatives/conf_no_false_negatives.rs
@@ -0,0 +1 @@
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-toml/invalid_min_rust_version/clippy.toml b/src/tools/clippy/tests/ui-toml/invalid_min_rust_version/clippy.toml
new file mode 100644
index 000000000..088b12b2d
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/invalid_min_rust_version/clippy.toml
@@ -0,0 +1 @@
+msrv = "invalid.version"
diff --git a/src/tools/clippy/tests/ui-toml/invalid_min_rust_version/invalid_min_rust_version.rs b/src/tools/clippy/tests/ui-toml/invalid_min_rust_version/invalid_min_rust_version.rs
new file mode 100644
index 000000000..2ebf28645
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/invalid_min_rust_version/invalid_min_rust_version.rs
@@ -0,0 +1,3 @@
+#![allow(clippy::redundant_clone)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-toml/invalid_min_rust_version/invalid_min_rust_version.stderr b/src/tools/clippy/tests/ui-toml/invalid_min_rust_version/invalid_min_rust_version.stderr
new file mode 100644
index 000000000..e9d8fd2e0
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/invalid_min_rust_version/invalid_min_rust_version.stderr
@@ -0,0 +1,4 @@
+error: error reading Clippy's configuration file. `invalid.version` is not a valid Rust version
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-toml/large_include_file/clippy.toml b/src/tools/clippy/tests/ui-toml/large_include_file/clippy.toml
new file mode 100644
index 000000000..ea34bf9fb
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/large_include_file/clippy.toml
@@ -0,0 +1 @@
+max-include-file-size = 600
diff --git a/src/tools/clippy/tests/ui-toml/large_include_file/large_include_file.rs b/src/tools/clippy/tests/ui-toml/large_include_file/large_include_file.rs
new file mode 100644
index 000000000..f3dbb6ad1
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/large_include_file/large_include_file.rs
@@ -0,0 +1,16 @@
+#![warn(clippy::large_include_file)]
+
+// Good
+const GOOD_INCLUDE_BYTES: &[u8; 581] = include_bytes!("large_include_file.rs");
+const GOOD_INCLUDE_STR: &str = include_str!("large_include_file.rs");
+
+#[allow(clippy::large_include_file)]
+const ALLOWED_TOO_BIG_INCLUDE_BYTES: &[u8; 654] = include_bytes!("too_big.txt");
+#[allow(clippy::large_include_file)]
+const ALLOWED_TOO_BIG_INCLUDE_STR: &str = include_str!("too_big.txt");
+
+// Bad
+const TOO_BIG_INCLUDE_BYTES: &[u8; 654] = include_bytes!("too_big.txt");
+const TOO_BIG_INCLUDE_STR: &str = include_str!("too_big.txt");
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-toml/large_include_file/large_include_file.stderr b/src/tools/clippy/tests/ui-toml/large_include_file/large_include_file.stderr
new file mode 100644
index 000000000..6a685a583
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/large_include_file/large_include_file.stderr
@@ -0,0 +1,21 @@
+error: attempted to include a large file
+ --> $DIR/large_include_file.rs:13:43
+ |
+LL | const TOO_BIG_INCLUDE_BYTES: &[u8; 654] = include_bytes!("too_big.txt");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::large-include-file` implied by `-D warnings`
+ = note: the configuration allows a maximum size of 600 bytes
+ = note: this error originates in the macro `include_bytes` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: attempted to include a large file
+ --> $DIR/large_include_file.rs:14:35
+ |
+LL | const TOO_BIG_INCLUDE_STR: &str = include_str!("too_big.txt");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: the configuration allows a maximum size of 600 bytes
+ = note: this error originates in the macro `include_str` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/large_include_file/too_big.txt b/src/tools/clippy/tests/ui-toml/large_include_file/too_big.txt
new file mode 100644
index 000000000..9829c46bc
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/large_include_file/too_big.txt
@@ -0,0 +1 @@
+Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Maecenas accumsan lacus vel facilisis volutpat. Etiam dignissim diam quis enim lobortis scelerisque fermentum dui faucibus. Tellus id interdum velit laoreet id donec ultrices. Est ultricies integer quis auctor elit sed vulputate. Erat velit scelerisque in dictum non consectetur a erat nam. Sed blandit libero volutpat sed. Tortor condimentum lacinia quis vel eros. Enim ut tellus elementum sagittis vitae et leo duis. Congue mauris rhoncus aenean vel elit scelerisque. Id consectetur purus ut faucibus pulvinar elementum integer. \ No newline at end of file
diff --git a/src/tools/clippy/tests/ui-toml/lint_decimal_readability/clippy.toml b/src/tools/clippy/tests/ui-toml/lint_decimal_readability/clippy.toml
new file mode 100644
index 000000000..6feaf7d5c
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/lint_decimal_readability/clippy.toml
@@ -0,0 +1 @@
+unreadable-literal-lint-fractions = false \ No newline at end of file
diff --git a/src/tools/clippy/tests/ui-toml/lint_decimal_readability/test.rs b/src/tools/clippy/tests/ui-toml/lint_decimal_readability/test.rs
new file mode 100644
index 000000000..2498672d7
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/lint_decimal_readability/test.rs
@@ -0,0 +1,23 @@
+#![allow(clippy::excessive_precision)]
+#[deny(clippy::unreadable_literal)]
+
+fn allow_inconsistent_digit_grouping() {
+ #![allow(clippy::inconsistent_digit_grouping)]
+ let _pass1 = 100_200_300.123456789;
+}
+
+fn main() {
+ allow_inconsistent_digit_grouping();
+
+ let _pass1 = 100_200_300.100_200_300;
+ let _pass2 = 1.123456789;
+ let _pass3 = 1.0;
+ let _pass4 = 10000.00001;
+ let _pass5 = 1.123456789e1;
+
+ // due to clippy::inconsistent-digit-grouping
+ let _fail1 = 100_200_300.123456789;
+
+ // fail due to the integer part
+ let _fail2 = 100200300.300200100;
+}
diff --git a/src/tools/clippy/tests/ui-toml/lint_decimal_readability/test.stderr b/src/tools/clippy/tests/ui-toml/lint_decimal_readability/test.stderr
new file mode 100644
index 000000000..be505bda4
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/lint_decimal_readability/test.stderr
@@ -0,0 +1,10 @@
+error: digits grouped inconsistently by underscores
+ --> $DIR/test.rs:19:18
+ |
+LL | let _fail1 = 100_200_300.123456789;
+ | ^^^^^^^^^^^^^^^^^^^^^ help: consider: `100_200_300.123_456_789`
+ |
+ = note: `-D clippy::inconsistent-digit-grouping` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/clippy.toml b/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/clippy.toml
new file mode 100644
index 000000000..78c7e63b4
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/clippy.toml
@@ -0,0 +1 @@
+max-suggested-slice-pattern-length = 8
diff --git a/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.rs b/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.rs
new file mode 100644
index 000000000..21849a14f
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.rs
@@ -0,0 +1,23 @@
+#![deny(clippy::index_refutable_slice)]
+
+fn below_limit() {
+ let slice: Option<&[u32]> = Some(&[1, 2, 3]);
+ if let Some(slice) = slice {
+ // This would usually not be linted but is included now due to the
+ // index limit in the config file
+ println!("{}", slice[7]);
+ }
+}
+
+fn above_limit() {
+ let slice: Option<&[u32]> = Some(&[1, 2, 3]);
+ if let Some(slice) = slice {
+ // This will not be linted as 8 is above the limit
+ println!("{}", slice[8]);
+ }
+}
+
+fn main() {
+ below_limit();
+ above_limit();
+}
diff --git a/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.stderr b/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.stderr
new file mode 100644
index 000000000..d319e65d0
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.stderr
@@ -0,0 +1,22 @@
+error: this binding can be a slice pattern to avoid indexing
+ --> $DIR/index_refutable_slice.rs:5:17
+ |
+LL | if let Some(slice) = slice {
+ | ^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/index_refutable_slice.rs:1:9
+ |
+LL | #![deny(clippy::index_refutable_slice)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+help: try using a slice pattern here
+ |
+LL | if let Some([_, _, _, _, _, _, _, slice_7, ..]) = slice {
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+help: and replace the index expressions here
+ |
+LL | println!("{}", slice_7);
+ | ~~~~~~~
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-toml/min_rust_version/clippy.toml b/src/tools/clippy/tests/ui-toml/min_rust_version/clippy.toml
new file mode 100644
index 000000000..8e17d8074
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/min_rust_version/clippy.toml
@@ -0,0 +1 @@
+msrv = "1.0.0"
diff --git a/src/tools/clippy/tests/ui-toml/min_rust_version/min_rust_version.rs b/src/tools/clippy/tests/ui-toml/min_rust_version/min_rust_version.rs
new file mode 100644
index 000000000..1e3ec123a
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/min_rust_version/min_rust_version.rs
@@ -0,0 +1,98 @@
+#![allow(clippy::redundant_clone, clippy::unnecessary_operation)]
+#![warn(clippy::manual_non_exhaustive, clippy::borrow_as_ptr, clippy::manual_bits)]
+
+use std::mem::{size_of, size_of_val};
+use std::ops::Deref;
+
+mod enums {
+ enum E {
+ A,
+ B,
+ #[doc(hidden)]
+ _C,
+ }
+
+ // user forgot to remove the marker
+ #[non_exhaustive]
+ enum Ep {
+ A,
+ B,
+ #[doc(hidden)]
+ _C,
+ }
+}
+
+fn option_as_ref_deref() {
+ let mut opt = Some(String::from("123"));
+
+ let _ = opt.as_ref().map(String::as_str);
+ let _ = opt.as_ref().map(|x| x.as_str());
+ let _ = opt.as_mut().map(String::as_mut_str);
+ let _ = opt.as_mut().map(|x| x.as_mut_str());
+}
+
+fn match_like_matches() {
+ let _y = match Some(5) {
+ Some(0) => true,
+ _ => false,
+ };
+}
+
+fn match_same_arms() {
+ match (1, 2, 3) {
+ (1, .., 3) => 42,
+ (.., 3) => 42, //~ ERROR match arms have same body
+ _ => 0,
+ };
+}
+
+fn match_same_arms2() {
+ let _ = match Some(42) {
+ Some(_) => 24,
+ None => 24, //~ ERROR match arms have same body
+ };
+}
+
+fn manual_strip_msrv() {
+ let s = "hello, world!";
+ if s.starts_with("hello, ") {
+ assert_eq!(s["hello, ".len()..].to_uppercase(), "WORLD!");
+ }
+}
+
+fn check_index_refutable_slice() {
+ // This shouldn't trigger `clippy::index_refutable_slice` as the suggestion
+ // would only be valid from 1.42.0 onward
+ let slice: Option<&[u32]> = Some(&[1]);
+ if let Some(slice) = slice {
+ println!("{}", slice[0]);
+ }
+}
+
+fn map_clone_suggest_copied() {
+ // This should still trigger the lint but suggest `cloned()` instead of `copied()`
+ let _: Option<u64> = Some(&16).map(|b| *b);
+}
+
+fn borrow_as_ptr() {
+ let val = 1;
+ let _p = &val as *const i32;
+
+ let mut val_mut = 1;
+ let _p_mut = &mut val_mut as *mut i32;
+}
+
+fn manual_bits() {
+ size_of::<i8>() * 8;
+ size_of_val(&0u32) * 8;
+}
+
+fn main() {
+ option_as_ref_deref();
+ match_like_matches();
+ match_same_arms();
+ match_same_arms2();
+ manual_strip_msrv();
+ check_index_refutable_slice();
+ borrow_as_ptr();
+}
diff --git a/src/tools/clippy/tests/ui-toml/min_rust_version/min_rust_version.stderr b/src/tools/clippy/tests/ui-toml/min_rust_version/min_rust_version.stderr
new file mode 100644
index 000000000..5dae5af7e
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/min_rust_version/min_rust_version.stderr
@@ -0,0 +1,10 @@
+error: you are using an explicit closure for cloning elements
+ --> $DIR/min_rust_version.rs:74:26
+ |
+LL | let _: Option<u64> = Some(&16).map(|b| *b);
+ | ^^^^^^^^^^^^^^^^^^^^^ help: consider calling the dedicated `cloned` method: `Some(&16).cloned()`
+ |
+ = note: `-D clippy::map-clone` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-toml/missing_enforced_import_rename/clippy.toml b/src/tools/clippy/tests/ui-toml/missing_enforced_import_rename/clippy.toml
new file mode 100644
index 000000000..05ba82287
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/missing_enforced_import_rename/clippy.toml
@@ -0,0 +1,10 @@
+enforced-import-renames = [
+ { path = "std::option::Option", rename = "Maybe" },
+ { path = "std::process::Child", rename = "Kid" },
+ { path = "std::process::exit", rename = "goodbye" },
+ { path = "std::collections::BTreeMap", rename = "Map" },
+ { path = "std::clone", rename = "foo" },
+ { path = "std::thread::sleep", rename = "thread_sleep" },
+ { path = "std::any::type_name", rename = "ident" },
+ { path = "std::sync::Mutex", rename = "StdMutie" }
+]
diff --git a/src/tools/clippy/tests/ui-toml/missing_enforced_import_rename/conf_missing_enforced_import_rename.rs b/src/tools/clippy/tests/ui-toml/missing_enforced_import_rename/conf_missing_enforced_import_rename.rs
new file mode 100644
index 000000000..f60058c86
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/missing_enforced_import_rename/conf_missing_enforced_import_rename.rs
@@ -0,0 +1,16 @@
+#![warn(clippy::missing_enforced_import_renames)]
+
+use std::alloc as colla;
+use std::option::Option as Maybe;
+use std::process::{exit as wrong_exit, Child as Kid};
+use std::thread::sleep;
+#[rustfmt::skip]
+use std::{
+ any::{type_name, Any},
+ clone,
+ sync :: Mutex,
+};
+
+fn main() {
+ use std::collections::BTreeMap as OopsWrongRename;
+}
diff --git a/src/tools/clippy/tests/ui-toml/missing_enforced_import_rename/conf_missing_enforced_import_rename.stderr b/src/tools/clippy/tests/ui-toml/missing_enforced_import_rename/conf_missing_enforced_import_rename.stderr
new file mode 100644
index 000000000..45de8fdff
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/missing_enforced_import_rename/conf_missing_enforced_import_rename.stderr
@@ -0,0 +1,40 @@
+error: this import should be renamed
+ --> $DIR/conf_missing_enforced_import_rename.rs:5:20
+ |
+LL | use std::process::{exit as wrong_exit, Child as Kid};
+ | ^^^^^^^^^^^^^^^^^^ help: try: `exit as goodbye`
+ |
+ = note: `-D clippy::missing-enforced-import-renames` implied by `-D warnings`
+
+error: this import should be renamed
+ --> $DIR/conf_missing_enforced_import_rename.rs:6:1
+ |
+LL | use std::thread::sleep;
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `use std::thread::sleep as thread_sleep`
+
+error: this import should be renamed
+ --> $DIR/conf_missing_enforced_import_rename.rs:9:11
+ |
+LL | any::{type_name, Any},
+ | ^^^^^^^^^ help: try: `type_name as ident`
+
+error: this import should be renamed
+ --> $DIR/conf_missing_enforced_import_rename.rs:10:5
+ |
+LL | clone,
+ | ^^^^^ help: try: `clone as foo`
+
+error: this import should be renamed
+ --> $DIR/conf_missing_enforced_import_rename.rs:11:5
+ |
+LL | sync :: Mutex,
+ | ^^^^^^^^^^^^^ help: try: `sync :: Mutex as StdMutie`
+
+error: this import should be renamed
+ --> $DIR/conf_missing_enforced_import_rename.rs:15:5
+ |
+LL | use std::collections::BTreeMap as OopsWrongRename;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `use std::collections::BTreeMap as Map`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/nonstandard_macro_braces/auxiliary/proc_macro_derive.rs b/src/tools/clippy/tests/ui-toml/nonstandard_macro_braces/auxiliary/proc_macro_derive.rs
new file mode 100644
index 000000000..6452189a4
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/nonstandard_macro_braces/auxiliary/proc_macro_derive.rs
@@ -0,0 +1,18 @@
+// compile-flags: --emit=link
+// no-prefer-dynamic
+
+#![crate_type = "proc-macro"]
+
+extern crate proc_macro;
+
+use proc_macro::TokenStream;
+
+#[proc_macro_derive(DeriveSomething)]
+pub fn derive(_: TokenStream) -> TokenStream {
+ "fn _f() -> Vec<u8> { vec![] }".parse().unwrap()
+}
+
+#[proc_macro]
+pub fn foo_bar(_: TokenStream) -> TokenStream {
+ "fn issue_7422() { eprintln!(); }".parse().unwrap()
+}
diff --git a/src/tools/clippy/tests/ui-toml/nonstandard_macro_braces/clippy.toml b/src/tools/clippy/tests/ui-toml/nonstandard_macro_braces/clippy.toml
new file mode 100644
index 000000000..bced8948a
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/nonstandard_macro_braces/clippy.toml
@@ -0,0 +1,6 @@
+standard-macro-braces = [
+ { name = "quote", brace = "{" },
+ { name = "quote::quote", brace = "{" },
+ { name = "eprint", brace = "[" },
+ { name = "type_pos", brace = "[" },
+]
diff --git a/src/tools/clippy/tests/ui-toml/nonstandard_macro_braces/conf_nonstandard_macro_braces.rs b/src/tools/clippy/tests/ui-toml/nonstandard_macro_braces/conf_nonstandard_macro_braces.rs
new file mode 100644
index 000000000..5b4adc868
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/nonstandard_macro_braces/conf_nonstandard_macro_braces.rs
@@ -0,0 +1,60 @@
+// aux-build:proc_macro_derive.rs
+
+#![warn(clippy::nonstandard_macro_braces)]
+
+extern crate proc_macro_derive;
+extern crate quote;
+
+use quote::quote;
+
+#[derive(proc_macro_derive::DeriveSomething)]
+pub struct S;
+
+proc_macro_derive::foo_bar!();
+
+#[rustfmt::skip]
+macro_rules! test {
+ () => {
+ vec!{0, 0, 0}
+ };
+}
+
+#[rustfmt::skip]
+macro_rules! test2 {
+ ($($arg:tt)*) => {
+ format_args!($($arg)*)
+ };
+}
+
+macro_rules! type_pos {
+ ($what:ty) => {
+ Vec<$what>
+ };
+}
+
+macro_rules! printlnfoo {
+ ($thing:expr) => {
+ println!("{}", $thing)
+ };
+}
+
+#[rustfmt::skip]
+fn main() {
+ let _ = vec! {1, 2, 3};
+ let _ = format!["ugh {} stop being such a good compiler", "hello"];
+ let _ = quote!(let x = 1;);
+ let _ = quote::quote!(match match match);
+ let _ = test!(); // trigger when macro def is inside our own crate
+ let _ = vec![1,2,3];
+
+ let _ = quote::quote! {true || false};
+ let _ = vec! [0 ,0 ,0];
+ let _ = format!("fds{}fds", 10);
+ let _ = test2!["{}{}{}", 1, 2, 3];
+
+ let _: type_pos!(usize) = vec![];
+
+ eprint!("test if user config overrides defaults");
+
+ printlnfoo!["test if printlnfoo is triggered by println"];
+}
diff --git a/src/tools/clippy/tests/ui-toml/nonstandard_macro_braces/conf_nonstandard_macro_braces.stderr b/src/tools/clippy/tests/ui-toml/nonstandard_macro_braces/conf_nonstandard_macro_braces.stderr
new file mode 100644
index 000000000..039b23b1b
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/nonstandard_macro_braces/conf_nonstandard_macro_braces.stderr
@@ -0,0 +1,94 @@
+error: use of irregular braces for `vec!` macro
+ --> $DIR/conf_nonstandard_macro_braces.rs:43:13
+ |
+LL | let _ = vec! {1, 2, 3};
+ | ^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::nonstandard-macro-braces` implied by `-D warnings`
+help: consider writing `vec![1, 2, 3]`
+ --> $DIR/conf_nonstandard_macro_braces.rs:43:13
+ |
+LL | let _ = vec! {1, 2, 3};
+ | ^^^^^^^^^^^^^^
+
+error: use of irregular braces for `format!` macro
+ --> $DIR/conf_nonstandard_macro_braces.rs:44:13
+ |
+LL | let _ = format!["ugh {} stop being such a good compiler", "hello"];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: consider writing `format!("ugh () stop being such a good compiler", "hello")`
+ --> $DIR/conf_nonstandard_macro_braces.rs:44:13
+ |
+LL | let _ = format!["ugh {} stop being such a good compiler", "hello"];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: use of irregular braces for `quote!` macro
+ --> $DIR/conf_nonstandard_macro_braces.rs:45:13
+ |
+LL | let _ = quote!(let x = 1;);
+ | ^^^^^^^^^^^^^^^^^^
+ |
+help: consider writing `quote! {let x = 1;}`
+ --> $DIR/conf_nonstandard_macro_braces.rs:45:13
+ |
+LL | let _ = quote!(let x = 1;);
+ | ^^^^^^^^^^^^^^^^^^
+
+error: use of irregular braces for `quote::quote!` macro
+ --> $DIR/conf_nonstandard_macro_braces.rs:46:13
+ |
+LL | let _ = quote::quote!(match match match);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: consider writing `quote::quote! {match match match}`
+ --> $DIR/conf_nonstandard_macro_braces.rs:46:13
+ |
+LL | let _ = quote::quote!(match match match);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: use of irregular braces for `vec!` macro
+ --> $DIR/conf_nonstandard_macro_braces.rs:18:9
+ |
+LL | vec!{0, 0, 0}
+ | ^^^^^^^^^^^^^
+...
+LL | let _ = test!(); // trigger when macro def is inside our own crate
+ | ------- in this macro invocation
+ |
+help: consider writing `vec![0, 0, 0]`
+ --> $DIR/conf_nonstandard_macro_braces.rs:18:9
+ |
+LL | vec!{0, 0, 0}
+ | ^^^^^^^^^^^^^
+...
+LL | let _ = test!(); // trigger when macro def is inside our own crate
+ | ------- in this macro invocation
+ = note: this error originates in the macro `test` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: use of irregular braces for `type_pos!` macro
+ --> $DIR/conf_nonstandard_macro_braces.rs:55:12
+ |
+LL | let _: type_pos!(usize) = vec![];
+ | ^^^^^^^^^^^^^^^^
+ |
+help: consider writing `type_pos![usize]`
+ --> $DIR/conf_nonstandard_macro_braces.rs:55:12
+ |
+LL | let _: type_pos!(usize) = vec![];
+ | ^^^^^^^^^^^^^^^^
+
+error: use of irregular braces for `eprint!` macro
+ --> $DIR/conf_nonstandard_macro_braces.rs:57:5
+ |
+LL | eprint!("test if user config overrides defaults");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: consider writing `eprint!["test if user config overrides defaults"]`
+ --> $DIR/conf_nonstandard_macro_braces.rs:57:5
+ |
+LL | eprint!("test if user config overrides defaults");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/strict_non_send_fields_in_send_ty/clippy.toml b/src/tools/clippy/tests/ui-toml/strict_non_send_fields_in_send_ty/clippy.toml
new file mode 100644
index 000000000..a942709d1
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/strict_non_send_fields_in_send_ty/clippy.toml
@@ -0,0 +1 @@
+enable-raw-pointer-heuristic-for-send = false
diff --git a/src/tools/clippy/tests/ui-toml/strict_non_send_fields_in_send_ty/test.rs b/src/tools/clippy/tests/ui-toml/strict_non_send_fields_in_send_ty/test.rs
new file mode 100644
index 000000000..90c2439dc
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/strict_non_send_fields_in_send_ty/test.rs
@@ -0,0 +1,43 @@
+#![warn(clippy::non_send_fields_in_send_ty)]
+#![feature(extern_types)]
+
+use std::rc::Rc;
+
+// Basic tests should not be affected
+pub struct NoGeneric {
+ rc_is_not_send: Rc<String>,
+}
+
+unsafe impl Send for NoGeneric {}
+
+pub struct MultiField<T> {
+ field1: T,
+ field2: T,
+ field3: T,
+}
+
+unsafe impl<T> Send for MultiField<T> {}
+
+pub enum MyOption<T> {
+ MySome(T),
+ MyNone,
+}
+
+unsafe impl<T> Send for MyOption<T> {}
+
+// All fields are disallowed when raw pointer heuristic is off
+extern "C" {
+ type NonSend;
+}
+
+pub struct HeuristicTest {
+ field1: Vec<*const NonSend>,
+ field2: [*const NonSend; 3],
+ field3: (*const NonSend, *const NonSend, *const NonSend),
+ field4: (*const NonSend, Rc<u8>),
+ field5: Vec<Vec<*const NonSend>>,
+}
+
+unsafe impl Send for HeuristicTest {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-toml/strict_non_send_fields_in_send_ty/test.stderr b/src/tools/clippy/tests/ui-toml/strict_non_send_fields_in_send_ty/test.stderr
new file mode 100644
index 000000000..49eecf18b
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/strict_non_send_fields_in_send_ty/test.stderr
@@ -0,0 +1,91 @@
+error: some fields in `NoGeneric` are not safe to be sent to another thread
+ --> $DIR/test.rs:11:1
+ |
+LL | unsafe impl Send for NoGeneric {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::non-send-fields-in-send-ty` implied by `-D warnings`
+note: it is not safe to send field `rc_is_not_send` to another thread
+ --> $DIR/test.rs:8:5
+ |
+LL | rc_is_not_send: Rc<String>,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = help: use a thread-safe type that implements `Send`
+
+error: some fields in `MultiField<T>` are not safe to be sent to another thread
+ --> $DIR/test.rs:19:1
+ |
+LL | unsafe impl<T> Send for MultiField<T> {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: it is not safe to send field `field1` to another thread
+ --> $DIR/test.rs:14:5
+ |
+LL | field1: T,
+ | ^^^^^^^^^
+ = help: add `T: Send` bound in `Send` impl
+note: it is not safe to send field `field2` to another thread
+ --> $DIR/test.rs:15:5
+ |
+LL | field2: T,
+ | ^^^^^^^^^
+ = help: add `T: Send` bound in `Send` impl
+note: it is not safe to send field `field3` to another thread
+ --> $DIR/test.rs:16:5
+ |
+LL | field3: T,
+ | ^^^^^^^^^
+ = help: add `T: Send` bound in `Send` impl
+
+error: some fields in `MyOption<T>` are not safe to be sent to another thread
+ --> $DIR/test.rs:26:1
+ |
+LL | unsafe impl<T> Send for MyOption<T> {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: it is not safe to send field `0` to another thread
+ --> $DIR/test.rs:22:12
+ |
+LL | MySome(T),
+ | ^
+ = help: add `T: Send` bound in `Send` impl
+
+error: some fields in `HeuristicTest` are not safe to be sent to another thread
+ --> $DIR/test.rs:41:1
+ |
+LL | unsafe impl Send for HeuristicTest {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: it is not safe to send field `field1` to another thread
+ --> $DIR/test.rs:34:5
+ |
+LL | field1: Vec<*const NonSend>,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = help: use a thread-safe type that implements `Send`
+note: it is not safe to send field `field2` to another thread
+ --> $DIR/test.rs:35:5
+ |
+LL | field2: [*const NonSend; 3],
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = help: use a thread-safe type that implements `Send`
+note: it is not safe to send field `field3` to another thread
+ --> $DIR/test.rs:36:5
+ |
+LL | field3: (*const NonSend, *const NonSend, *const NonSend),
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = help: use a thread-safe type that implements `Send`
+note: it is not safe to send field `field4` to another thread
+ --> $DIR/test.rs:37:5
+ |
+LL | field4: (*const NonSend, Rc<u8>),
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = help: use a thread-safe type that implements `Send`
+note: it is not safe to send field `field5` to another thread
+ --> $DIR/test.rs:38:5
+ |
+LL | field5: Vec<Vec<*const NonSend>>,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = help: use a thread-safe type that implements `Send`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/struct_excessive_bools/clippy.toml b/src/tools/clippy/tests/ui-toml/struct_excessive_bools/clippy.toml
new file mode 100644
index 000000000..3912ab542
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/struct_excessive_bools/clippy.toml
@@ -0,0 +1 @@
+max-struct-bools = 0
diff --git a/src/tools/clippy/tests/ui-toml/struct_excessive_bools/test.rs b/src/tools/clippy/tests/ui-toml/struct_excessive_bools/test.rs
new file mode 100644
index 000000000..32dd80246
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/struct_excessive_bools/test.rs
@@ -0,0 +1,9 @@
+#![warn(clippy::struct_excessive_bools)]
+
+struct S {
+ a: bool,
+}
+
+struct Foo;
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-toml/struct_excessive_bools/test.stderr b/src/tools/clippy/tests/ui-toml/struct_excessive_bools/test.stderr
new file mode 100644
index 000000000..65861d10d
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/struct_excessive_bools/test.stderr
@@ -0,0 +1,13 @@
+error: more than 0 bools in a struct
+ --> $DIR/test.rs:3:1
+ |
+LL | / struct S {
+LL | | a: bool,
+LL | | }
+ | |_^
+ |
+ = note: `-D clippy::struct-excessive-bools` implied by `-D warnings`
+ = help: consider using a state machine or refactoring bools into two-variant enums
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-toml/toml_blacklist/clippy.toml b/src/tools/clippy/tests/ui-toml/toml_blacklist/clippy.toml
new file mode 100644
index 000000000..6abe5a3bb
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/toml_blacklist/clippy.toml
@@ -0,0 +1 @@
+blacklisted-names = ["toto", "tata", "titi"]
diff --git a/src/tools/clippy/tests/ui-toml/toml_blacklist/conf_french_blacklisted_name.rs b/src/tools/clippy/tests/ui-toml/toml_blacklist/conf_french_blacklisted_name.rs
new file mode 100644
index 000000000..cb35d0e85
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/toml_blacklist/conf_french_blacklisted_name.rs
@@ -0,0 +1,20 @@
+#![allow(dead_code)]
+#![allow(clippy::single_match)]
+#![allow(unused_variables)]
+#![warn(clippy::blacklisted_name)]
+
+fn test(toto: ()) {}
+
+fn main() {
+ let toto = 42;
+ let tata = 42;
+ let titi = 42;
+
+ let tatab = 42;
+ let tatatataic = 42;
+
+ match (42, Some(1337), Some(0)) {
+ (toto, Some(tata), titi @ Some(_)) => (),
+ _ => (),
+ }
+}
diff --git a/src/tools/clippy/tests/ui-toml/toml_blacklist/conf_french_blacklisted_name.stderr b/src/tools/clippy/tests/ui-toml/toml_blacklist/conf_french_blacklisted_name.stderr
new file mode 100644
index 000000000..84ba77851
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/toml_blacklist/conf_french_blacklisted_name.stderr
@@ -0,0 +1,46 @@
+error: use of a blacklisted/placeholder name `toto`
+ --> $DIR/conf_french_blacklisted_name.rs:6:9
+ |
+LL | fn test(toto: ()) {}
+ | ^^^^
+ |
+ = note: `-D clippy::blacklisted-name` implied by `-D warnings`
+
+error: use of a blacklisted/placeholder name `toto`
+ --> $DIR/conf_french_blacklisted_name.rs:9:9
+ |
+LL | let toto = 42;
+ | ^^^^
+
+error: use of a blacklisted/placeholder name `tata`
+ --> $DIR/conf_french_blacklisted_name.rs:10:9
+ |
+LL | let tata = 42;
+ | ^^^^
+
+error: use of a blacklisted/placeholder name `titi`
+ --> $DIR/conf_french_blacklisted_name.rs:11:9
+ |
+LL | let titi = 42;
+ | ^^^^
+
+error: use of a blacklisted/placeholder name `toto`
+ --> $DIR/conf_french_blacklisted_name.rs:17:10
+ |
+LL | (toto, Some(tata), titi @ Some(_)) => (),
+ | ^^^^
+
+error: use of a blacklisted/placeholder name `tata`
+ --> $DIR/conf_french_blacklisted_name.rs:17:21
+ |
+LL | (toto, Some(tata), titi @ Some(_)) => (),
+ | ^^^^
+
+error: use of a blacklisted/placeholder name `titi`
+ --> $DIR/conf_french_blacklisted_name.rs:17:28
+ |
+LL | (toto, Some(tata), titi @ Some(_)) => (),
+ | ^^^^
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/toml_disallowed_methods/clippy.toml b/src/tools/clippy/tests/ui-toml/toml_disallowed_methods/clippy.toml
new file mode 100644
index 000000000..c902d2112
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/toml_disallowed_methods/clippy.toml
@@ -0,0 +1,10 @@
+disallowed-methods = [
+ # just a string is shorthand for path only
+ "std::iter::Iterator::sum",
+ "f32::clamp",
+ "slice::sort_unstable",
+ # can give path and reason with an inline table
+ { path = "regex::Regex::is_match", reason = "no matching allowed" },
+ # can use an inline table but omit reason
+ { path = "regex::Regex::new" },
+]
diff --git a/src/tools/clippy/tests/ui-toml/toml_disallowed_methods/conf_disallowed_methods.rs b/src/tools/clippy/tests/ui-toml/toml_disallowed_methods/conf_disallowed_methods.rs
new file mode 100644
index 000000000..3397fa1ec
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/toml_disallowed_methods/conf_disallowed_methods.rs
@@ -0,0 +1,23 @@
+#![warn(clippy::disallowed_methods)]
+
+extern crate regex;
+use regex::Regex;
+
+fn main() {
+ let re = Regex::new(r"ab.*c").unwrap();
+ re.is_match("abc");
+
+ let mut a = vec![1, 2, 3, 4];
+ a.iter().sum::<i32>();
+
+ a.sort_unstable();
+
+ let _ = 2.0f32.clamp(3.0f32, 4.0f32);
+ let _ = 2.0f64.clamp(3.0f64, 4.0f64);
+
+ let indirect: fn(&str) -> Result<Regex, regex::Error> = Regex::new;
+ let re = indirect(".").unwrap();
+
+ let in_call = Box::new(f32::clamp);
+ let in_method_call = ["^", "$"].into_iter().map(Regex::new);
+}
diff --git a/src/tools/clippy/tests/ui-toml/toml_disallowed_methods/conf_disallowed_methods.stderr b/src/tools/clippy/tests/ui-toml/toml_disallowed_methods/conf_disallowed_methods.stderr
new file mode 100644
index 000000000..5cbb56754
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/toml_disallowed_methods/conf_disallowed_methods.stderr
@@ -0,0 +1,54 @@
+error: use of a disallowed method `regex::Regex::new`
+ --> $DIR/conf_disallowed_methods.rs:7:14
+ |
+LL | let re = Regex::new(r"ab.*c").unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::disallowed-methods` implied by `-D warnings`
+
+error: use of a disallowed method `regex::Regex::is_match`
+ --> $DIR/conf_disallowed_methods.rs:8:5
+ |
+LL | re.is_match("abc");
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = note: no matching allowed (from clippy.toml)
+
+error: use of a disallowed method `std::iter::Iterator::sum`
+ --> $DIR/conf_disallowed_methods.rs:11:5
+ |
+LL | a.iter().sum::<i32>();
+ | ^^^^^^^^^^^^^^^^^^^^^
+
+error: use of a disallowed method `slice::sort_unstable`
+ --> $DIR/conf_disallowed_methods.rs:13:5
+ |
+LL | a.sort_unstable();
+ | ^^^^^^^^^^^^^^^^^
+
+error: use of a disallowed method `f32::clamp`
+ --> $DIR/conf_disallowed_methods.rs:15:13
+ |
+LL | let _ = 2.0f32.clamp(3.0f32, 4.0f32);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: use of a disallowed method `regex::Regex::new`
+ --> $DIR/conf_disallowed_methods.rs:18:61
+ |
+LL | let indirect: fn(&str) -> Result<Regex, regex::Error> = Regex::new;
+ | ^^^^^^^^^^
+
+error: use of a disallowed method `f32::clamp`
+ --> $DIR/conf_disallowed_methods.rs:21:28
+ |
+LL | let in_call = Box::new(f32::clamp);
+ | ^^^^^^^^^^
+
+error: use of a disallowed method `regex::Regex::new`
+ --> $DIR/conf_disallowed_methods.rs:22:53
+ |
+LL | let in_method_call = ["^", "$"].into_iter().map(Regex::new);
+ | ^^^^^^^^^^
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/toml_disallowed_types/clippy.toml b/src/tools/clippy/tests/ui-toml/toml_disallowed_types/clippy.toml
new file mode 100644
index 000000000..6cb9e2ef9
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/toml_disallowed_types/clippy.toml
@@ -0,0 +1,15 @@
+disallowed-types = [
+ "std::collections::HashMap",
+ "std::sync::atomic::AtomicU32",
+ "syn::TypePath",
+ "proc_macro2::Ident",
+ "std::thread::Thread",
+ "std::time::Instant",
+ "std::io::Read",
+ "std::primitive::usize",
+ "bool",
+ # can give path and reason with an inline table
+ { path = "std::net::Ipv4Addr", reason = "no IPv4 allowed" },
+ # can use an inline table but omit reason
+ { path = "std::net::TcpListener" },
+]
diff --git a/src/tools/clippy/tests/ui-toml/toml_disallowed_types/conf_disallowed_types.rs b/src/tools/clippy/tests/ui-toml/toml_disallowed_types/conf_disallowed_types.rs
new file mode 100644
index 000000000..7f28efd67
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/toml_disallowed_types/conf_disallowed_types.rs
@@ -0,0 +1,42 @@
+#![warn(clippy::disallowed_types)]
+
+extern crate quote;
+extern crate syn;
+
+use std::sync as foo;
+use std::sync::atomic::AtomicU32;
+use std::time::Instant as Sneaky;
+
+struct HashMap;
+
+fn bad_return_type() -> fn() -> Sneaky {
+ todo!()
+}
+
+fn bad_arg_type(_: impl Fn(Sneaky) -> foo::atomic::AtomicU32) {}
+
+fn trait_obj(_: &dyn std::io::Read) {}
+
+fn full_and_single_path_prim(_: usize, _: bool) {}
+
+fn const_generics<const C: usize>() {}
+
+struct GenArg<const U: usize>([u8; U]);
+
+static BAD: foo::atomic::AtomicPtr<()> = foo::atomic::AtomicPtr::new(std::ptr::null_mut());
+
+fn ip(_: std::net::Ipv4Addr) {}
+
+fn listener(_: std::net::TcpListener) {}
+
+#[allow(clippy::diverging_sub_expression)]
+fn main() {
+ let _: std::collections::HashMap<(), ()> = std::collections::HashMap::new();
+ let _ = Sneaky::now();
+ let _ = foo::atomic::AtomicU32::new(0);
+ static FOO: std::sync::atomic::AtomicU32 = foo::atomic::AtomicU32::new(1);
+ let _: std::collections::BTreeMap<(), syn::TypePath> = Default::default();
+ let _ = syn::Ident::new("", todo!());
+ let _ = HashMap;
+ let _: usize = 64_usize;
+}
diff --git a/src/tools/clippy/tests/ui-toml/toml_disallowed_types/conf_disallowed_types.stderr b/src/tools/clippy/tests/ui-toml/toml_disallowed_types/conf_disallowed_types.stderr
new file mode 100644
index 000000000..e3ece799c
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/toml_disallowed_types/conf_disallowed_types.stderr
@@ -0,0 +1,132 @@
+error: `std::sync::atomic::AtomicU32` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:7:1
+ |
+LL | use std::sync::atomic::AtomicU32;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::disallowed-types` implied by `-D warnings`
+
+error: `std::time::Instant` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:8:1
+ |
+LL | use std::time::Instant as Sneaky;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `std::time::Instant` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:12:33
+ |
+LL | fn bad_return_type() -> fn() -> Sneaky {
+ | ^^^^^^
+
+error: `std::time::Instant` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:16:28
+ |
+LL | fn bad_arg_type(_: impl Fn(Sneaky) -> foo::atomic::AtomicU32) {}
+ | ^^^^^^
+
+error: `std::sync::atomic::AtomicU32` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:16:39
+ |
+LL | fn bad_arg_type(_: impl Fn(Sneaky) -> foo::atomic::AtomicU32) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: `std::io::Read` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:18:22
+ |
+LL | fn trait_obj(_: &dyn std::io::Read) {}
+ | ^^^^^^^^^^^^^
+
+error: `usize` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:20:33
+ |
+LL | fn full_and_single_path_prim(_: usize, _: bool) {}
+ | ^^^^^
+
+error: `bool` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:20:43
+ |
+LL | fn full_and_single_path_prim(_: usize, _: bool) {}
+ | ^^^^
+
+error: `usize` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:22:28
+ |
+LL | fn const_generics<const C: usize>() {}
+ | ^^^^^
+
+error: `usize` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:24:24
+ |
+LL | struct GenArg<const U: usize>([u8; U]);
+ | ^^^^^
+
+error: `std::net::Ipv4Addr` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:28:10
+ |
+LL | fn ip(_: std::net::Ipv4Addr) {}
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = note: no IPv4 allowed (from clippy.toml)
+
+error: `std::net::TcpListener` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:30:16
+ |
+LL | fn listener(_: std::net::TcpListener) {}
+ | ^^^^^^^^^^^^^^^^^^^^^
+
+error: `std::collections::HashMap` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:34:48
+ |
+LL | let _: std::collections::HashMap<(), ()> = std::collections::HashMap::new();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `std::collections::HashMap` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:34:12
+ |
+LL | let _: std::collections::HashMap<(), ()> = std::collections::HashMap::new();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `std::time::Instant` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:35:13
+ |
+LL | let _ = Sneaky::now();
+ | ^^^^^^
+
+error: `std::sync::atomic::AtomicU32` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:36:13
+ |
+LL | let _ = foo::atomic::AtomicU32::new(0);
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: `std::sync::atomic::AtomicU32` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:37:17
+ |
+LL | static FOO: std::sync::atomic::AtomicU32 = foo::atomic::AtomicU32::new(1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `std::sync::atomic::AtomicU32` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:37:48
+ |
+LL | static FOO: std::sync::atomic::AtomicU32 = foo::atomic::AtomicU32::new(1);
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: `syn::TypePath` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:38:43
+ |
+LL | let _: std::collections::BTreeMap<(), syn::TypePath> = Default::default();
+ | ^^^^^^^^^^^^^
+
+error: `syn::Ident` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:39:13
+ |
+LL | let _ = syn::Ident::new("", todo!());
+ | ^^^^^^^^^^
+
+error: `usize` is not allowed according to config
+ --> $DIR/conf_disallowed_types.rs:41:12
+ |
+LL | let _: usize = 64_usize;
+ | ^^^^^
+
+error: aborting due to 21 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/toml_trivially_copy/clippy.toml b/src/tools/clippy/tests/ui-toml/toml_trivially_copy/clippy.toml
new file mode 100644
index 000000000..3b96f1fd0
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/toml_trivially_copy/clippy.toml
@@ -0,0 +1 @@
+trivial-copy-size-limit = 2
diff --git a/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.rs b/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.rs
new file mode 100644
index 000000000..fb0e226f3
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.rs
@@ -0,0 +1,20 @@
+// normalize-stderr-test "\(\d+ byte\)" -> "(N byte)"
+// normalize-stderr-test "\(limit: \d+ byte\)" -> "(limit: N byte)"
+
+#![deny(clippy::trivially_copy_pass_by_ref)]
+
+#[derive(Copy, Clone)]
+struct Foo(u8);
+
+#[derive(Copy, Clone)]
+struct Bar(u32);
+
+fn good(a: &mut u32, b: u32, c: &Bar, d: &u32) {}
+
+fn bad(x: &u16, y: &Foo) {}
+
+fn main() {
+ let (mut a, b, c, d, x, y) = (0, 0, Bar(0), 0, 0, Foo(0));
+ good(&mut a, b, &c, &d);
+ bad(&x, &y);
+}
diff --git a/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.stderr b/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.stderr
new file mode 100644
index 000000000..b3ef5928e
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.stderr
@@ -0,0 +1,20 @@
+error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
+ --> $DIR/test.rs:14:11
+ |
+LL | fn bad(x: &u16, y: &Foo) {}
+ | ^^^^ help: consider passing by value instead: `u16`
+ |
+note: the lint level is defined here
+ --> $DIR/test.rs:4:9
+ |
+LL | #![deny(clippy::trivially_copy_pass_by_ref)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
+ --> $DIR/test.rs:14:20
+ |
+LL | fn bad(x: &u16, y: &Foo) {}
+ | ^^^^ help: consider passing by value instead: `Foo`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/toml_unknown_key/clippy.toml b/src/tools/clippy/tests/ui-toml/toml_unknown_key/clippy.toml
new file mode 100644
index 000000000..554b87cc5
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/toml_unknown_key/clippy.toml
@@ -0,0 +1,6 @@
+# that one is an error
+foobar = 42
+
+# that one is white-listed
+[third-party]
+clippy-feature = "nightly"
diff --git a/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.rs b/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.rs
new file mode 100644
index 000000000..f328e4d9d
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.rs
@@ -0,0 +1 @@
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr b/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr
new file mode 100644
index 000000000..fe5139c47
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr
@@ -0,0 +1,45 @@
+error: error reading Clippy's configuration file `$DIR/clippy.toml`: unknown field `foobar`, expected one of
+ allow-dbg-in-tests
+ allow-expect-in-tests
+ allow-unwrap-in-tests
+ allowed-scripts
+ arithmetic-allowed
+ array-size-threshold
+ avoid-breaking-exported-api
+ await-holding-invalid-types
+ blacklisted-names
+ cargo-ignore-publish
+ cognitive-complexity-threshold
+ cyclomatic-complexity-threshold
+ disallowed-methods
+ disallowed-types
+ doc-valid-idents
+ enable-raw-pointer-heuristic-for-send
+ enforced-import-renames
+ enum-variant-name-threshold
+ enum-variant-size-threshold
+ literal-representation-threshold
+ max-fn-params-bools
+ max-include-file-size
+ max-struct-bools
+ max-suggested-slice-pattern-length
+ max-trait-bounds
+ msrv
+ pass-by-value-size-limit
+ single-char-binding-names-threshold
+ standard-macro-braces
+ third-party
+ too-large-for-stack
+ too-many-arguments-threshold
+ too-many-lines-threshold
+ trivial-copy-size-limit
+ type-complexity-threshold
+ unreadable-literal-lint-fractions
+ upper-case-acronyms-aggressive
+ vec-box-size-threshold
+ verbose-bit-mask-threshold
+ warn-on-all-wildcard-imports
+ at line 5 column 1
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui-toml/unwrap_used/clippy.toml b/src/tools/clippy/tests/ui-toml/unwrap_used/clippy.toml
new file mode 100644
index 000000000..154626ef4
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/unwrap_used/clippy.toml
@@ -0,0 +1 @@
+allow-unwrap-in-tests = true
diff --git a/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.rs b/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.rs
new file mode 100644
index 000000000..0e82fb20e
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.rs
@@ -0,0 +1,73 @@
+// compile-flags: --test
+
+#![allow(unused_mut, clippy::get_first, clippy::from_iter_instead_of_collect)]
+#![warn(clippy::unwrap_used)]
+#![deny(clippy::get_unwrap)]
+
+use std::collections::BTreeMap;
+use std::collections::HashMap;
+use std::collections::VecDeque;
+
+struct GetFalsePositive {
+ arr: [u32; 3],
+}
+
+impl GetFalsePositive {
+ fn get(&self, pos: usize) -> Option<&u32> {
+ self.arr.get(pos)
+ }
+ fn get_mut(&mut self, pos: usize) -> Option<&mut u32> {
+ self.arr.get_mut(pos)
+ }
+}
+
+fn main() {
+ let mut boxed_slice: Box<[u8]> = Box::new([0, 1, 2, 3]);
+ let mut some_slice = &mut [0, 1, 2, 3];
+ let mut some_vec = vec![0, 1, 2, 3];
+ let mut some_vecdeque: VecDeque<_> = some_vec.iter().cloned().collect();
+ let mut some_hashmap: HashMap<u8, char> = HashMap::from_iter(vec![(1, 'a'), (2, 'b')]);
+ let mut some_btreemap: BTreeMap<u8, char> = BTreeMap::from_iter(vec![(1, 'a'), (2, 'b')]);
+ let mut false_positive = GetFalsePositive { arr: [0, 1, 2] };
+
+ {
+ // Test `get().unwrap()`
+ let _ = boxed_slice.get(1).unwrap();
+ let _ = some_slice.get(0).unwrap();
+ let _ = some_vec.get(0).unwrap();
+ let _ = some_vecdeque.get(0).unwrap();
+ let _ = some_hashmap.get(&1).unwrap();
+ let _ = some_btreemap.get(&1).unwrap();
+ #[allow(clippy::unwrap_used)]
+ let _ = false_positive.get(0).unwrap();
+ // Test with deref
+ let _: u8 = *boxed_slice.get(1).unwrap();
+ }
+
+ {
+ // Test `get_mut().unwrap()`
+ *boxed_slice.get_mut(0).unwrap() = 1;
+ *some_slice.get_mut(0).unwrap() = 1;
+ *some_vec.get_mut(0).unwrap() = 1;
+ *some_vecdeque.get_mut(0).unwrap() = 1;
+ // Check false positives
+ #[allow(clippy::unwrap_used)]
+ {
+ *some_hashmap.get_mut(&1).unwrap() = 'b';
+ *some_btreemap.get_mut(&1).unwrap() = 'b';
+ *false_positive.get_mut(0).unwrap() = 1;
+ }
+ }
+
+ {
+ // Test `get().unwrap().foo()` and `get_mut().unwrap().bar()`
+ let _ = some_vec.get(0..1).unwrap().to_vec();
+ let _ = some_vec.get_mut(0..1).unwrap().to_vec();
+ }
+}
+
+#[test]
+fn test() {
+ let boxed_slice: Box<[u8]> = Box::new([0, 1, 2, 3]);
+ let _ = boxed_slice.get(1).unwrap();
+}
diff --git a/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.stderr b/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.stderr
new file mode 100644
index 000000000..6bcfa0a8b
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.stderr
@@ -0,0 +1,197 @@
+error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise
+ --> $DIR/unwrap_used.rs:35:17
+ |
+LL | let _ = boxed_slice.get(1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&boxed_slice[1]`
+ |
+note: the lint level is defined here
+ --> $DIR/unwrap_used.rs:5:9
+ |
+LL | #![deny(clippy::get_unwrap)]
+ | ^^^^^^^^^^^^^^^^^^
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/unwrap_used.rs:35:17
+ |
+LL | let _ = boxed_slice.get(1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::unwrap-used` implied by `-D warnings`
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise
+ --> $DIR/unwrap_used.rs:36:17
+ |
+LL | let _ = some_slice.get(0).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_slice[0]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/unwrap_used.rs:36:17
+ |
+LL | let _ = some_slice.get(0).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get().unwrap()` on a Vec. Using `[]` is more clear and more concise
+ --> $DIR/unwrap_used.rs:37:17
+ |
+LL | let _ = some_vec.get(0).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_vec[0]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/unwrap_used.rs:37:17
+ |
+LL | let _ = some_vec.get(0).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get().unwrap()` on a VecDeque. Using `[]` is more clear and more concise
+ --> $DIR/unwrap_used.rs:38:17
+ |
+LL | let _ = some_vecdeque.get(0).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_vecdeque[0]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/unwrap_used.rs:38:17
+ |
+LL | let _ = some_vecdeque.get(0).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get().unwrap()` on a HashMap. Using `[]` is more clear and more concise
+ --> $DIR/unwrap_used.rs:39:17
+ |
+LL | let _ = some_hashmap.get(&1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_hashmap[&1]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/unwrap_used.rs:39:17
+ |
+LL | let _ = some_hashmap.get(&1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get().unwrap()` on a BTreeMap. Using `[]` is more clear and more concise
+ --> $DIR/unwrap_used.rs:40:17
+ |
+LL | let _ = some_btreemap.get(&1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_btreemap[&1]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/unwrap_used.rs:40:17
+ |
+LL | let _ = some_btreemap.get(&1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise
+ --> $DIR/unwrap_used.rs:44:21
+ |
+LL | let _: u8 = *boxed_slice.get(1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `boxed_slice[1]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/unwrap_used.rs:44:22
+ |
+LL | let _: u8 = *boxed_slice.get(1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get_mut().unwrap()` on a slice. Using `[]` is more clear and more concise
+ --> $DIR/unwrap_used.rs:49:9
+ |
+LL | *boxed_slice.get_mut(0).unwrap() = 1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `boxed_slice[0]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/unwrap_used.rs:49:10
+ |
+LL | *boxed_slice.get_mut(0).unwrap() = 1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get_mut().unwrap()` on a slice. Using `[]` is more clear and more concise
+ --> $DIR/unwrap_used.rs:50:9
+ |
+LL | *some_slice.get_mut(0).unwrap() = 1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_slice[0]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/unwrap_used.rs:50:10
+ |
+LL | *some_slice.get_mut(0).unwrap() = 1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get_mut().unwrap()` on a Vec. Using `[]` is more clear and more concise
+ --> $DIR/unwrap_used.rs:51:9
+ |
+LL | *some_vec.get_mut(0).unwrap() = 1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vec[0]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/unwrap_used.rs:51:10
+ |
+LL | *some_vec.get_mut(0).unwrap() = 1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get_mut().unwrap()` on a VecDeque. Using `[]` is more clear and more concise
+ --> $DIR/unwrap_used.rs:52:9
+ |
+LL | *some_vecdeque.get_mut(0).unwrap() = 1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vecdeque[0]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/unwrap_used.rs:52:10
+ |
+LL | *some_vecdeque.get_mut(0).unwrap() = 1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get().unwrap()` on a Vec. Using `[]` is more clear and more concise
+ --> $DIR/unwrap_used.rs:64:17
+ |
+LL | let _ = some_vec.get(0..1).unwrap().to_vec();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vec[0..1]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/unwrap_used.rs:64:17
+ |
+LL | let _ = some_vec.get(0..1).unwrap().to_vec();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get_mut().unwrap()` on a Vec. Using `[]` is more clear and more concise
+ --> $DIR/unwrap_used.rs:65:17
+ |
+LL | let _ = some_vec.get_mut(0..1).unwrap().to_vec();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vec[0..1]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/unwrap_used.rs:65:17
+ |
+LL | let _ = some_vec.get_mut(0..1).unwrap().to_vec();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise
+ --> $DIR/unwrap_used.rs:72:13
+ |
+LL | let _ = boxed_slice.get(1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&boxed_slice[1]`
+
+error: aborting due to 27 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/update-all-references.sh b/src/tools/clippy/tests/ui-toml/update-all-references.sh
new file mode 100755
index 000000000..4391499a1
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/update-all-references.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+echo "Please use 'cargo dev bless' instead."
diff --git a/src/tools/clippy/tests/ui-toml/upper_case_acronyms_aggressive/clippy.toml b/src/tools/clippy/tests/ui-toml/upper_case_acronyms_aggressive/clippy.toml
new file mode 100644
index 000000000..cc94ec53e
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/upper_case_acronyms_aggressive/clippy.toml
@@ -0,0 +1 @@
+upper-case-acronyms-aggressive = true
diff --git a/src/tools/clippy/tests/ui-toml/upper_case_acronyms_aggressive/upper_case_acronyms.rs b/src/tools/clippy/tests/ui-toml/upper_case_acronyms_aggressive/upper_case_acronyms.rs
new file mode 100644
index 000000000..1a5cf1b19
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/upper_case_acronyms_aggressive/upper_case_acronyms.rs
@@ -0,0 +1,44 @@
+#![warn(clippy::upper_case_acronyms)]
+
+struct HTTPResponse; // not linted by default, but with cfg option
+
+struct CString; // not linted
+
+enum Flags {
+ NS, // not linted
+ CWR,
+ ECE,
+ URG,
+ ACK,
+ PSH,
+ RST,
+ SYN,
+ FIN,
+}
+
+// linted with cfg option, beware that lint suggests `GccllvmSomething` instead of
+// `GccLlvmSomething`
+struct GCCLLVMSomething;
+
+// don't warn on public items
+pub struct MIXEDCapital;
+
+pub struct FULLCAPITAL;
+
+// enum variants should not be linted if the num is pub
+pub enum ParseError<T> {
+ FULLCAPITAL(u8),
+ MIXEDCapital(String),
+ Utf8(std::string::FromUtf8Error),
+ Parse(T, String),
+}
+
+// private, do lint here
+enum ParseErrorPrivate<T> {
+ WASD(u8),
+ WASDMixed(String),
+ Utf8(std::string::FromUtf8Error),
+ Parse(T, String),
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-toml/upper_case_acronyms_aggressive/upper_case_acronyms.stderr b/src/tools/clippy/tests/ui-toml/upper_case_acronyms_aggressive/upper_case_acronyms.stderr
new file mode 100644
index 000000000..02f29bbef
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/upper_case_acronyms_aggressive/upper_case_acronyms.stderr
@@ -0,0 +1,82 @@
+error: name `HTTPResponse` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:3:8
+ |
+LL | struct HTTPResponse; // not linted by default, but with cfg option
+ | ^^^^^^^^^^^^ help: consider making the acronym lowercase, except the initial letter: `HttpResponse`
+ |
+ = note: `-D clippy::upper-case-acronyms` implied by `-D warnings`
+
+error: name `NS` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:8:5
+ |
+LL | NS, // not linted
+ | ^^ help: consider making the acronym lowercase, except the initial letter (notice the capitalization): `Ns`
+
+error: name `CWR` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:9:5
+ |
+LL | CWR,
+ | ^^^ help: consider making the acronym lowercase, except the initial letter: `Cwr`
+
+error: name `ECE` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:10:5
+ |
+LL | ECE,
+ | ^^^ help: consider making the acronym lowercase, except the initial letter: `Ece`
+
+error: name `URG` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:11:5
+ |
+LL | URG,
+ | ^^^ help: consider making the acronym lowercase, except the initial letter: `Urg`
+
+error: name `ACK` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:12:5
+ |
+LL | ACK,
+ | ^^^ help: consider making the acronym lowercase, except the initial letter (notice the capitalization): `Ack`
+
+error: name `PSH` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:13:5
+ |
+LL | PSH,
+ | ^^^ help: consider making the acronym lowercase, except the initial letter: `Psh`
+
+error: name `RST` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:14:5
+ |
+LL | RST,
+ | ^^^ help: consider making the acronym lowercase, except the initial letter: `Rst`
+
+error: name `SYN` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:15:5
+ |
+LL | SYN,
+ | ^^^ help: consider making the acronym lowercase, except the initial letter: `Syn`
+
+error: name `FIN` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:16:5
+ |
+LL | FIN,
+ | ^^^ help: consider making the acronym lowercase, except the initial letter: `Fin`
+
+error: name `GCCLLVMSomething` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:21:8
+ |
+LL | struct GCCLLVMSomething;
+ | ^^^^^^^^^^^^^^^^ help: consider making the acronym lowercase, except the initial letter: `GccllvmSomething`
+
+error: name `WASD` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:38:5
+ |
+LL | WASD(u8),
+ | ^^^^ help: consider making the acronym lowercase, except the initial letter: `Wasd`
+
+error: name `WASDMixed` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:39:5
+ |
+LL | WASDMixed(String),
+ | ^^^^^^^^^ help: consider making the acronym lowercase, except the initial letter: `WasdMixed`
+
+error: aborting due to 13 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/vec_box_sized/clippy.toml b/src/tools/clippy/tests/ui-toml/vec_box_sized/clippy.toml
new file mode 100644
index 000000000..039ea47fc
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/vec_box_sized/clippy.toml
@@ -0,0 +1 @@
+vec-box-size-threshold = 4
diff --git a/src/tools/clippy/tests/ui-toml/vec_box_sized/test.rs b/src/tools/clippy/tests/ui-toml/vec_box_sized/test.rs
new file mode 100644
index 000000000..bf04bee16
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/vec_box_sized/test.rs
@@ -0,0 +1,15 @@
+struct S {
+ x: u64,
+}
+
+struct C {
+ y: u16,
+}
+
+struct Foo(Vec<Box<u8>>);
+struct Bar(Vec<Box<u32>>);
+struct Baz(Vec<Box<(u32, u32)>>);
+struct BarBaz(Vec<Box<S>>);
+struct FooBarBaz(Vec<Box<C>>);
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui-toml/vec_box_sized/test.stderr b/src/tools/clippy/tests/ui-toml/vec_box_sized/test.stderr
new file mode 100644
index 000000000..cf194de3c
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/vec_box_sized/test.stderr
@@ -0,0 +1,22 @@
+error: `Vec<T>` is already on the heap, the boxing is unnecessary
+ --> $DIR/test.rs:9:12
+ |
+LL | struct Foo(Vec<Box<u8>>);
+ | ^^^^^^^^^^^^ help: try: `Vec<u8>`
+ |
+ = note: `-D clippy::vec-box` implied by `-D warnings`
+
+error: `Vec<T>` is already on the heap, the boxing is unnecessary
+ --> $DIR/test.rs:10:12
+ |
+LL | struct Bar(Vec<Box<u32>>);
+ | ^^^^^^^^^^^^^ help: try: `Vec<u32>`
+
+error: `Vec<T>` is already on the heap, the boxing is unnecessary
+ --> $DIR/test.rs:13:18
+ |
+LL | struct FooBarBaz(Vec<Box<C>>);
+ | ^^^^^^^^^^^ help: try: `Vec<C>`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/zero_single_char_names/clippy.toml b/src/tools/clippy/tests/ui-toml/zero_single_char_names/clippy.toml
new file mode 100644
index 000000000..42a1067b9
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/zero_single_char_names/clippy.toml
@@ -0,0 +1 @@
+single-char-binding-names-threshold = 0
diff --git a/src/tools/clippy/tests/ui-toml/zero_single_char_names/zero_single_char_names.rs b/src/tools/clippy/tests/ui-toml/zero_single_char_names/zero_single_char_names.rs
new file mode 100644
index 000000000..22aaa242b
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/zero_single_char_names/zero_single_char_names.rs
@@ -0,0 +1,3 @@
+#![warn(clippy::many_single_char_names)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/absurd-extreme-comparisons.rs b/src/tools/clippy/tests/ui/absurd-extreme-comparisons.rs
new file mode 100644
index 000000000..f682b280c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/absurd-extreme-comparisons.rs
@@ -0,0 +1,61 @@
+#![warn(clippy::absurd_extreme_comparisons)]
+#![allow(
+ unused,
+ clippy::eq_op,
+ clippy::no_effect,
+ clippy::unnecessary_operation,
+ clippy::needless_pass_by_value
+)]
+
+#[rustfmt::skip]
+fn main() {
+ const Z: u32 = 0;
+ let u: u32 = 42;
+ u <= 0;
+ u <= Z;
+ u < Z;
+ Z >= u;
+ Z > u;
+ u > u32::MAX;
+ u >= u32::MAX;
+ u32::MAX < u;
+ u32::MAX <= u;
+ 1-1 > u;
+ u >= !0;
+ u <= 12 - 2*6;
+ let i: i8 = 0;
+ i < -127 - 1;
+ i8::MAX >= i;
+ 3-7 < i32::MIN;
+ let b = false;
+ b >= true;
+ false > b;
+ u > 0; // ok
+ // this is handled by clippy::unit_cmp
+ () < {};
+}
+
+use std::cmp::{Ordering, PartialEq, PartialOrd};
+
+#[derive(PartialEq, Eq, PartialOrd)]
+pub struct U(u64);
+
+impl PartialEq<u32> for U {
+ fn eq(&self, other: &u32) -> bool {
+ self.eq(&U(u64::from(*other)))
+ }
+}
+impl PartialOrd<u32> for U {
+ fn partial_cmp(&self, other: &u32) -> Option<Ordering> {
+ self.partial_cmp(&U(u64::from(*other)))
+ }
+}
+
+pub fn foo(val: U) -> bool {
+ val > u32::MAX
+}
+
+pub fn bar(len: u64) -> bool {
+ // This is OK as we are casting from target sized to fixed size
+ len >= usize::MAX as u64
+}
diff --git a/src/tools/clippy/tests/ui/absurd-extreme-comparisons.stderr b/src/tools/clippy/tests/ui/absurd-extreme-comparisons.stderr
new file mode 100644
index 000000000..6de554378
--- /dev/null
+++ b/src/tools/clippy/tests/ui/absurd-extreme-comparisons.stderr
@@ -0,0 +1,147 @@
+error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false
+ --> $DIR/absurd-extreme-comparisons.rs:14:5
+ |
+LL | u <= 0;
+ | ^^^^^^
+ |
+ = note: `-D clippy::absurd-extreme-comparisons` implied by `-D warnings`
+ = help: because `0` is the minimum value for this type, the case where the two sides are not equal never occurs, consider using `u == 0` instead
+
+error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false
+ --> $DIR/absurd-extreme-comparisons.rs:15:5
+ |
+LL | u <= Z;
+ | ^^^^^^
+ |
+ = help: because `Z` is the minimum value for this type, the case where the two sides are not equal never occurs, consider using `u == Z` instead
+
+error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false
+ --> $DIR/absurd-extreme-comparisons.rs:16:5
+ |
+LL | u < Z;
+ | ^^^^^
+ |
+ = help: because `Z` is the minimum value for this type, this comparison is always false
+
+error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false
+ --> $DIR/absurd-extreme-comparisons.rs:17:5
+ |
+LL | Z >= u;
+ | ^^^^^^
+ |
+ = help: because `Z` is the minimum value for this type, the case where the two sides are not equal never occurs, consider using `Z == u` instead
+
+error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false
+ --> $DIR/absurd-extreme-comparisons.rs:18:5
+ |
+LL | Z > u;
+ | ^^^^^
+ |
+ = help: because `Z` is the minimum value for this type, this comparison is always false
+
+error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false
+ --> $DIR/absurd-extreme-comparisons.rs:19:5
+ |
+LL | u > u32::MAX;
+ | ^^^^^^^^^^^^
+ |
+ = help: because `u32::MAX` is the maximum value for this type, this comparison is always false
+
+error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false
+ --> $DIR/absurd-extreme-comparisons.rs:20:5
+ |
+LL | u >= u32::MAX;
+ | ^^^^^^^^^^^^^
+ |
+ = help: because `u32::MAX` is the maximum value for this type, the case where the two sides are not equal never occurs, consider using `u == u32::MAX` instead
+
+error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false
+ --> $DIR/absurd-extreme-comparisons.rs:21:5
+ |
+LL | u32::MAX < u;
+ | ^^^^^^^^^^^^
+ |
+ = help: because `u32::MAX` is the maximum value for this type, this comparison is always false
+
+error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false
+ --> $DIR/absurd-extreme-comparisons.rs:22:5
+ |
+LL | u32::MAX <= u;
+ | ^^^^^^^^^^^^^
+ |
+ = help: because `u32::MAX` is the maximum value for this type, the case where the two sides are not equal never occurs, consider using `u32::MAX == u` instead
+
+error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false
+ --> $DIR/absurd-extreme-comparisons.rs:23:5
+ |
+LL | 1-1 > u;
+ | ^^^^^^^
+ |
+ = help: because `1-1` is the minimum value for this type, this comparison is always false
+
+error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false
+ --> $DIR/absurd-extreme-comparisons.rs:24:5
+ |
+LL | u >= !0;
+ | ^^^^^^^
+ |
+ = help: because `!0` is the maximum value for this type, the case where the two sides are not equal never occurs, consider using `u == !0` instead
+
+error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false
+ --> $DIR/absurd-extreme-comparisons.rs:25:5
+ |
+LL | u <= 12 - 2*6;
+ | ^^^^^^^^^^^^^
+ |
+ = help: because `12 - 2*6` is the minimum value for this type, the case where the two sides are not equal never occurs, consider using `u == 12 - 2*6` instead
+
+error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false
+ --> $DIR/absurd-extreme-comparisons.rs:27:5
+ |
+LL | i < -127 - 1;
+ | ^^^^^^^^^^^^
+ |
+ = help: because `-127 - 1` is the minimum value for this type, this comparison is always false
+
+error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false
+ --> $DIR/absurd-extreme-comparisons.rs:28:5
+ |
+LL | i8::MAX >= i;
+ | ^^^^^^^^^^^^
+ |
+ = help: because `i8::MAX` is the maximum value for this type, this comparison is always true
+
+error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false
+ --> $DIR/absurd-extreme-comparisons.rs:29:5
+ |
+LL | 3-7 < i32::MIN;
+ | ^^^^^^^^^^^^^^
+ |
+ = help: because `i32::MIN` is the minimum value for this type, this comparison is always false
+
+error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false
+ --> $DIR/absurd-extreme-comparisons.rs:31:5
+ |
+LL | b >= true;
+ | ^^^^^^^^^
+ |
+ = help: because `true` is the maximum value for this type, the case where the two sides are not equal never occurs, consider using `b == true` instead
+
+error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false
+ --> $DIR/absurd-extreme-comparisons.rs:32:5
+ |
+LL | false > b;
+ | ^^^^^^^^^
+ |
+ = help: because `false` is the minimum value for this type, this comparison is always false
+
+error: <-comparison of unit values detected. This will always be false
+ --> $DIR/absurd-extreme-comparisons.rs:35:5
+ |
+LL | () < {};
+ | ^^^^^^^
+ |
+ = note: `#[deny(clippy::unit_cmp)]` on by default
+
+error: aborting due to 18 previous errors
+
diff --git a/src/tools/clippy/tests/ui/allow_attributes_without_reason.rs b/src/tools/clippy/tests/ui/allow_attributes_without_reason.rs
new file mode 100644
index 000000000..1a0d4e886
--- /dev/null
+++ b/src/tools/clippy/tests/ui/allow_attributes_without_reason.rs
@@ -0,0 +1,14 @@
+#![feature(lint_reasons)]
+#![deny(clippy::allow_attributes_without_reason)]
+
+// These should trigger the lint
+#[allow(dead_code)]
+#[allow(dead_code, deprecated)]
+// These should be fine
+#[allow(dead_code, reason = "This should be allowed")]
+#[warn(dyn_drop, reason = "Warnings can also have reasons")]
+#[warn(deref_nullptr)]
+#[deny(deref_nullptr)]
+#[forbid(deref_nullptr)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/allow_attributes_without_reason.stderr b/src/tools/clippy/tests/ui/allow_attributes_without_reason.stderr
new file mode 100644
index 000000000..cd040a144
--- /dev/null
+++ b/src/tools/clippy/tests/ui/allow_attributes_without_reason.stderr
@@ -0,0 +1,23 @@
+error: `allow` attribute without specifying a reason
+ --> $DIR/allow_attributes_without_reason.rs:5:1
+ |
+LL | #[allow(dead_code)]
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/allow_attributes_without_reason.rs:2:9
+ |
+LL | #![deny(clippy::allow_attributes_without_reason)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = help: try adding a reason at the end with `, reason = ".."`
+
+error: `allow` attribute without specifying a reason
+ --> $DIR/allow_attributes_without_reason.rs:6:1
+ |
+LL | #[allow(dead_code, deprecated)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: try adding a reason at the end with `, reason = ".."`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/almost_complete_letter_range.fixed b/src/tools/clippy/tests/ui/almost_complete_letter_range.fixed
new file mode 100644
index 000000000..e69b40f35
--- /dev/null
+++ b/src/tools/clippy/tests/ui/almost_complete_letter_range.fixed
@@ -0,0 +1,67 @@
+// run-rustfix
+// edition:2018
+
+#![feature(custom_inner_attributes)]
+#![feature(exclusive_range_pattern)]
+#![feature(stmt_expr_attributes)]
+#![warn(clippy::almost_complete_letter_range)]
+#![allow(ellipsis_inclusive_range_patterns)]
+#![allow(clippy::needless_parens_on_range_literals)]
+
+macro_rules! a {
+ () => {
+ 'a'
+ };
+}
+
+fn main() {
+ #[rustfmt::skip]
+ {
+ let _ = ('a') ..='z';
+ let _ = 'A' ..= ('Z');
+ }
+
+ let _ = 'b'..'z';
+ let _ = 'B'..'Z';
+
+ let _ = (b'a')..=(b'z');
+ let _ = b'A'..=b'Z';
+
+ let _ = b'b'..b'z';
+ let _ = b'B'..b'Z';
+
+ let _ = a!()..='z';
+
+ let _ = match 0u8 {
+ b'a'..=b'z' if true => 1,
+ b'A'..=b'Z' if true => 2,
+ b'b'..b'z' => 3,
+ b'B'..b'Z' => 4,
+ _ => 5,
+ };
+
+ let _ = match 'x' {
+ 'a'..='z' if true => 1,
+ 'A'..='Z' if true => 2,
+ 'b'..'z' => 3,
+ 'B'..'Z' => 4,
+ _ => 5,
+ };
+}
+
+fn _under_msrv() {
+ #![clippy::msrv = "1.25"]
+ let _ = match 'a' {
+ 'a'...'z' => 1,
+ _ => 2,
+ };
+}
+
+fn _meets_msrv() {
+ #![clippy::msrv = "1.26"]
+ let _ = 'a'..='z';
+ let _ = match 'a' {
+ 'a'..='z' => 1,
+ _ => 2,
+ };
+}
diff --git a/src/tools/clippy/tests/ui/almost_complete_letter_range.rs b/src/tools/clippy/tests/ui/almost_complete_letter_range.rs
new file mode 100644
index 000000000..f2240981d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/almost_complete_letter_range.rs
@@ -0,0 +1,67 @@
+// run-rustfix
+// edition:2018
+
+#![feature(custom_inner_attributes)]
+#![feature(exclusive_range_pattern)]
+#![feature(stmt_expr_attributes)]
+#![warn(clippy::almost_complete_letter_range)]
+#![allow(ellipsis_inclusive_range_patterns)]
+#![allow(clippy::needless_parens_on_range_literals)]
+
+macro_rules! a {
+ () => {
+ 'a'
+ };
+}
+
+fn main() {
+ #[rustfmt::skip]
+ {
+ let _ = ('a') ..'z';
+ let _ = 'A' .. ('Z');
+ }
+
+ let _ = 'b'..'z';
+ let _ = 'B'..'Z';
+
+ let _ = (b'a')..(b'z');
+ let _ = b'A'..b'Z';
+
+ let _ = b'b'..b'z';
+ let _ = b'B'..b'Z';
+
+ let _ = a!()..'z';
+
+ let _ = match 0u8 {
+ b'a'..b'z' if true => 1,
+ b'A'..b'Z' if true => 2,
+ b'b'..b'z' => 3,
+ b'B'..b'Z' => 4,
+ _ => 5,
+ };
+
+ let _ = match 'x' {
+ 'a'..'z' if true => 1,
+ 'A'..'Z' if true => 2,
+ 'b'..'z' => 3,
+ 'B'..'Z' => 4,
+ _ => 5,
+ };
+}
+
+fn _under_msrv() {
+ #![clippy::msrv = "1.25"]
+ let _ = match 'a' {
+ 'a'..'z' => 1,
+ _ => 2,
+ };
+}
+
+fn _meets_msrv() {
+ #![clippy::msrv = "1.26"]
+ let _ = 'a'..'z';
+ let _ = match 'a' {
+ 'a'..'z' => 1,
+ _ => 2,
+ };
+}
diff --git a/src/tools/clippy/tests/ui/almost_complete_letter_range.stderr b/src/tools/clippy/tests/ui/almost_complete_letter_range.stderr
new file mode 100644
index 000000000..5b5dc40ee
--- /dev/null
+++ b/src/tools/clippy/tests/ui/almost_complete_letter_range.stderr
@@ -0,0 +1,100 @@
+error: almost complete ascii letter range
+ --> $DIR/almost_complete_letter_range.rs:20:17
+ |
+LL | let _ = ('a') ..'z';
+ | ^^^^^^--^^^
+ | |
+ | help: use an inclusive range: `..=`
+ |
+ = note: `-D clippy::almost-complete-letter-range` implied by `-D warnings`
+
+error: almost complete ascii letter range
+ --> $DIR/almost_complete_letter_range.rs:21:17
+ |
+LL | let _ = 'A' .. ('Z');
+ | ^^^^--^^^^^^
+ | |
+ | help: use an inclusive range: `..=`
+
+error: almost complete ascii letter range
+ --> $DIR/almost_complete_letter_range.rs:27:13
+ |
+LL | let _ = (b'a')..(b'z');
+ | ^^^^^^--^^^^^^
+ | |
+ | help: use an inclusive range: `..=`
+
+error: almost complete ascii letter range
+ --> $DIR/almost_complete_letter_range.rs:28:13
+ |
+LL | let _ = b'A'..b'Z';
+ | ^^^^--^^^^
+ | |
+ | help: use an inclusive range: `..=`
+
+error: almost complete ascii letter range
+ --> $DIR/almost_complete_letter_range.rs:33:13
+ |
+LL | let _ = a!()..'z';
+ | ^^^^--^^^
+ | |
+ | help: use an inclusive range: `..=`
+
+error: almost complete ascii letter range
+ --> $DIR/almost_complete_letter_range.rs:36:9
+ |
+LL | b'a'..b'z' if true => 1,
+ | ^^^^--^^^^
+ | |
+ | help: use an inclusive range: `..=`
+
+error: almost complete ascii letter range
+ --> $DIR/almost_complete_letter_range.rs:37:9
+ |
+LL | b'A'..b'Z' if true => 2,
+ | ^^^^--^^^^
+ | |
+ | help: use an inclusive range: `..=`
+
+error: almost complete ascii letter range
+ --> $DIR/almost_complete_letter_range.rs:44:9
+ |
+LL | 'a'..'z' if true => 1,
+ | ^^^--^^^
+ | |
+ | help: use an inclusive range: `..=`
+
+error: almost complete ascii letter range
+ --> $DIR/almost_complete_letter_range.rs:45:9
+ |
+LL | 'A'..'Z' if true => 2,
+ | ^^^--^^^
+ | |
+ | help: use an inclusive range: `..=`
+
+error: almost complete ascii letter range
+ --> $DIR/almost_complete_letter_range.rs:55:9
+ |
+LL | 'a'..'z' => 1,
+ | ^^^--^^^
+ | |
+ | help: use an inclusive range: `...`
+
+error: almost complete ascii letter range
+ --> $DIR/almost_complete_letter_range.rs:62:13
+ |
+LL | let _ = 'a'..'z';
+ | ^^^--^^^
+ | |
+ | help: use an inclusive range: `..=`
+
+error: almost complete ascii letter range
+ --> $DIR/almost_complete_letter_range.rs:64:9
+ |
+LL | 'a'..'z' => 1,
+ | ^^^--^^^
+ | |
+ | help: use an inclusive range: `..=`
+
+error: aborting due to 12 previous errors
+
diff --git a/src/tools/clippy/tests/ui/approx_const.rs b/src/tools/clippy/tests/ui/approx_const.rs
new file mode 100644
index 000000000..ccdbd34f7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/approx_const.rs
@@ -0,0 +1,64 @@
+#[warn(clippy::approx_constant)]
+#[allow(clippy::similar_names)]
+fn main() {
+ let my_e = 2.7182;
+ let almost_e = 2.718;
+ let no_e = 2.71;
+
+ let my_1_frac_pi = 0.3183;
+ let no_1_frac_pi = 0.31;
+
+ let my_frac_1_sqrt_2 = 0.70710678;
+ let almost_frac_1_sqrt_2 = 0.70711;
+ let my_frac_1_sqrt_2 = 0.707;
+
+ let my_frac_2_pi = 0.63661977;
+ let no_frac_2_pi = 0.636;
+
+ let my_frac_2_sq_pi = 1.128379;
+ let no_frac_2_sq_pi = 1.128;
+
+ let my_frac_pi_2 = 1.57079632679;
+ let no_frac_pi_2 = 1.5705;
+
+ let my_frac_pi_3 = 1.04719755119;
+ let no_frac_pi_3 = 1.047;
+
+ let my_frac_pi_4 = 0.785398163397;
+ let no_frac_pi_4 = 0.785;
+
+ let my_frac_pi_6 = 0.523598775598;
+ let no_frac_pi_6 = 0.523;
+
+ let my_frac_pi_8 = 0.3926990816987;
+ let no_frac_pi_8 = 0.392;
+
+ let my_ln_10 = 2.302585092994046;
+ let no_ln_10 = 2.303;
+
+ let my_ln_2 = 0.6931471805599453;
+ let no_ln_2 = 0.693;
+
+ let my_log10_e = 0.4342944819032518;
+ let no_log10_e = 0.434;
+
+ let my_log2_e = 1.4426950408889634;
+ let no_log2_e = 1.442;
+
+ let log2_10 = 3.321928094887362;
+ let no_log2_10 = 3.321;
+
+ let log10_2 = 0.301029995663981;
+ let no_log10_2 = 0.301;
+
+ let my_pi = 3.1415;
+ let almost_pi = 3.14;
+ let no_pi = 3.15;
+
+ let my_sq2 = 1.4142;
+ let no_sq2 = 1.414;
+
+ let my_tau = 6.2832;
+ let almost_tau = 6.28;
+ let no_tau = 6.3;
+}
diff --git a/src/tools/clippy/tests/ui/approx_const.stderr b/src/tools/clippy/tests/ui/approx_const.stderr
new file mode 100644
index 000000000..4da1b8215
--- /dev/null
+++ b/src/tools/clippy/tests/ui/approx_const.stderr
@@ -0,0 +1,187 @@
+error: approximate value of `f{32, 64}::consts::E` found
+ --> $DIR/approx_const.rs:4:16
+ |
+LL | let my_e = 2.7182;
+ | ^^^^^^
+ |
+ = note: `-D clippy::approx-constant` implied by `-D warnings`
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::E` found
+ --> $DIR/approx_const.rs:5:20
+ |
+LL | let almost_e = 2.718;
+ | ^^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::FRAC_1_PI` found
+ --> $DIR/approx_const.rs:8:24
+ |
+LL | let my_1_frac_pi = 0.3183;
+ | ^^^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::FRAC_1_SQRT_2` found
+ --> $DIR/approx_const.rs:11:28
+ |
+LL | let my_frac_1_sqrt_2 = 0.70710678;
+ | ^^^^^^^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::FRAC_1_SQRT_2` found
+ --> $DIR/approx_const.rs:12:32
+ |
+LL | let almost_frac_1_sqrt_2 = 0.70711;
+ | ^^^^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::FRAC_2_PI` found
+ --> $DIR/approx_const.rs:15:24
+ |
+LL | let my_frac_2_pi = 0.63661977;
+ | ^^^^^^^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::FRAC_2_SQRT_PI` found
+ --> $DIR/approx_const.rs:18:27
+ |
+LL | let my_frac_2_sq_pi = 1.128379;
+ | ^^^^^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::FRAC_PI_2` found
+ --> $DIR/approx_const.rs:21:24
+ |
+LL | let my_frac_pi_2 = 1.57079632679;
+ | ^^^^^^^^^^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::FRAC_PI_3` found
+ --> $DIR/approx_const.rs:24:24
+ |
+LL | let my_frac_pi_3 = 1.04719755119;
+ | ^^^^^^^^^^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::FRAC_PI_4` found
+ --> $DIR/approx_const.rs:27:24
+ |
+LL | let my_frac_pi_4 = 0.785398163397;
+ | ^^^^^^^^^^^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::FRAC_PI_6` found
+ --> $DIR/approx_const.rs:30:24
+ |
+LL | let my_frac_pi_6 = 0.523598775598;
+ | ^^^^^^^^^^^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::FRAC_PI_8` found
+ --> $DIR/approx_const.rs:33:24
+ |
+LL | let my_frac_pi_8 = 0.3926990816987;
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::LN_10` found
+ --> $DIR/approx_const.rs:36:20
+ |
+LL | let my_ln_10 = 2.302585092994046;
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::LN_2` found
+ --> $DIR/approx_const.rs:39:19
+ |
+LL | let my_ln_2 = 0.6931471805599453;
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::LOG10_E` found
+ --> $DIR/approx_const.rs:42:22
+ |
+LL | let my_log10_e = 0.4342944819032518;
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::LOG2_E` found
+ --> $DIR/approx_const.rs:45:21
+ |
+LL | let my_log2_e = 1.4426950408889634;
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::LOG2_10` found
+ --> $DIR/approx_const.rs:48:19
+ |
+LL | let log2_10 = 3.321928094887362;
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::LOG10_2` found
+ --> $DIR/approx_const.rs:51:19
+ |
+LL | let log10_2 = 0.301029995663981;
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::PI` found
+ --> $DIR/approx_const.rs:54:17
+ |
+LL | let my_pi = 3.1415;
+ | ^^^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::PI` found
+ --> $DIR/approx_const.rs:55:21
+ |
+LL | let almost_pi = 3.14;
+ | ^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::SQRT_2` found
+ --> $DIR/approx_const.rs:58:18
+ |
+LL | let my_sq2 = 1.4142;
+ | ^^^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::TAU` found
+ --> $DIR/approx_const.rs:61:18
+ |
+LL | let my_tau = 6.2832;
+ | ^^^^^^
+ |
+ = help: consider using the constant directly
+
+error: approximate value of `f{32, 64}::consts::TAU` found
+ --> $DIR/approx_const.rs:62:22
+ |
+LL | let almost_tau = 6.28;
+ | ^^^^
+ |
+ = help: consider using the constant directly
+
+error: aborting due to 23 previous errors
+
diff --git a/src/tools/clippy/tests/ui/arithmetic.fixed b/src/tools/clippy/tests/ui/arithmetic.fixed
new file mode 100644
index 000000000..a2a1c4394
--- /dev/null
+++ b/src/tools/clippy/tests/ui/arithmetic.fixed
@@ -0,0 +1,27 @@
+// run-rustfix
+
+#![allow(clippy::unnecessary_owned_empty_strings)]
+#![feature(saturating_int_impl)]
+#![warn(clippy::arithmetic)]
+
+use core::num::{Saturating, Wrapping};
+
+pub fn hard_coded_allowed() {
+ let _ = Saturating(0u32) + Saturating(0u32);
+ let _ = String::new() + "";
+ let _ = Wrapping(0u32) + Wrapping(0u32);
+
+ let saturating: Saturating<u32> = Saturating(0u32);
+ let string: String = String::new();
+ let wrapping: Wrapping<u32> = Wrapping(0u32);
+
+ let inferred_saturating = saturating + saturating;
+ let inferred_string = string + "";
+ let inferred_wrapping = wrapping + wrapping;
+
+ let _ = inferred_saturating + inferred_saturating;
+ let _ = inferred_string + "";
+ let _ = inferred_wrapping + inferred_wrapping;
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/arithmetic.rs b/src/tools/clippy/tests/ui/arithmetic.rs
new file mode 100644
index 000000000..a2a1c4394
--- /dev/null
+++ b/src/tools/clippy/tests/ui/arithmetic.rs
@@ -0,0 +1,27 @@
+// run-rustfix
+
+#![allow(clippy::unnecessary_owned_empty_strings)]
+#![feature(saturating_int_impl)]
+#![warn(clippy::arithmetic)]
+
+use core::num::{Saturating, Wrapping};
+
+pub fn hard_coded_allowed() {
+ let _ = Saturating(0u32) + Saturating(0u32);
+ let _ = String::new() + "";
+ let _ = Wrapping(0u32) + Wrapping(0u32);
+
+ let saturating: Saturating<u32> = Saturating(0u32);
+ let string: String = String::new();
+ let wrapping: Wrapping<u32> = Wrapping(0u32);
+
+ let inferred_saturating = saturating + saturating;
+ let inferred_string = string + "";
+ let inferred_wrapping = wrapping + wrapping;
+
+ let _ = inferred_saturating + inferred_saturating;
+ let _ = inferred_string + "";
+ let _ = inferred_wrapping + inferred_wrapping;
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/as_conversions.rs b/src/tools/clippy/tests/ui/as_conversions.rs
new file mode 100644
index 000000000..ba4394def
--- /dev/null
+++ b/src/tools/clippy/tests/ui/as_conversions.rs
@@ -0,0 +1,20 @@
+// aux-build:macro_rules.rs
+
+#![warn(clippy::as_conversions)]
+#![allow(clippy::borrow_as_ptr)]
+
+#[macro_use]
+extern crate macro_rules;
+
+fn with_external_macro() {
+ as_conv_with_arg!(0u32 as u64);
+ as_conv!();
+}
+
+fn main() {
+ let i = 0u32 as u64;
+
+ let j = &i as *const u64 as *mut u64;
+
+ with_external_macro();
+}
diff --git a/src/tools/clippy/tests/ui/as_conversions.stderr b/src/tools/clippy/tests/ui/as_conversions.stderr
new file mode 100644
index 000000000..d11b56171
--- /dev/null
+++ b/src/tools/clippy/tests/ui/as_conversions.stderr
@@ -0,0 +1,27 @@
+error: using a potentially dangerous silent `as` conversion
+ --> $DIR/as_conversions.rs:15:13
+ |
+LL | let i = 0u32 as u64;
+ | ^^^^^^^^^^^
+ |
+ = note: `-D clippy::as-conversions` implied by `-D warnings`
+ = help: consider using a safe wrapper for this conversion
+
+error: using a potentially dangerous silent `as` conversion
+ --> $DIR/as_conversions.rs:17:13
+ |
+LL | let j = &i as *const u64 as *mut u64;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a safe wrapper for this conversion
+
+error: using a potentially dangerous silent `as` conversion
+ --> $DIR/as_conversions.rs:17:13
+ |
+LL | let j = &i as *const u64 as *mut u64;
+ | ^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a safe wrapper for this conversion
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/as_underscore.fixed b/src/tools/clippy/tests/ui/as_underscore.fixed
new file mode 100644
index 000000000..948f6d8e6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/as_underscore.fixed
@@ -0,0 +1,13 @@
+// run-rustfix
+
+#![warn(clippy::as_underscore)]
+
+fn foo(_n: usize) {}
+
+fn main() {
+ let n: u16 = 256;
+ foo(n as usize);
+
+ let n = 0_u128;
+ let _n: u8 = n as u8;
+}
diff --git a/src/tools/clippy/tests/ui/as_underscore.rs b/src/tools/clippy/tests/ui/as_underscore.rs
new file mode 100644
index 000000000..97785ed08
--- /dev/null
+++ b/src/tools/clippy/tests/ui/as_underscore.rs
@@ -0,0 +1,13 @@
+// run-rustfix
+
+#![warn(clippy::as_underscore)]
+
+fn foo(_n: usize) {}
+
+fn main() {
+ let n: u16 = 256;
+ foo(n as _);
+
+ let n = 0_u128;
+ let _n: u8 = n as _;
+}
diff --git a/src/tools/clippy/tests/ui/as_underscore.stderr b/src/tools/clippy/tests/ui/as_underscore.stderr
new file mode 100644
index 000000000..d7cd58d96
--- /dev/null
+++ b/src/tools/clippy/tests/ui/as_underscore.stderr
@@ -0,0 +1,20 @@
+error: using `as _` conversion
+ --> $DIR/as_underscore.rs:9:9
+ |
+LL | foo(n as _);
+ | ^^^^^-
+ | |
+ | help: consider giving the type explicitly: `usize`
+ |
+ = note: `-D clippy::as-underscore` implied by `-D warnings`
+
+error: using `as _` conversion
+ --> $DIR/as_underscore.rs:12:18
+ |
+LL | let _n: u8 = n as _;
+ | ^^^^^-
+ | |
+ | help: consider giving the type explicitly: `u8`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/asm_syntax.rs b/src/tools/clippy/tests/ui/asm_syntax.rs
new file mode 100644
index 000000000..0220bf333
--- /dev/null
+++ b/src/tools/clippy/tests/ui/asm_syntax.rs
@@ -0,0 +1,34 @@
+// only-x86_64
+// ignore-aarch64
+
+#[warn(clippy::inline_asm_x86_intel_syntax)]
+mod warn_intel {
+ pub(super) unsafe fn use_asm() {
+ use std::arch::asm;
+ asm!("");
+ asm!("", options());
+ asm!("", options(nostack));
+ asm!("", options(att_syntax));
+ asm!("", options(nostack, att_syntax));
+ }
+}
+
+#[warn(clippy::inline_asm_x86_att_syntax)]
+mod warn_att {
+ pub(super) unsafe fn use_asm() {
+ use std::arch::asm;
+ asm!("");
+ asm!("", options());
+ asm!("", options(nostack));
+ asm!("", options(att_syntax));
+ asm!("", options(nostack, att_syntax));
+ }
+}
+
+#[cfg(target_arch = "x86_64")]
+fn main() {
+ unsafe {
+ warn_att::use_asm();
+ warn_intel::use_asm();
+ }
+}
diff --git a/src/tools/clippy/tests/ui/asm_syntax.stderr b/src/tools/clippy/tests/ui/asm_syntax.stderr
new file mode 100644
index 000000000..e9b150121
--- /dev/null
+++ b/src/tools/clippy/tests/ui/asm_syntax.stderr
@@ -0,0 +1,44 @@
+error: Intel x86 assembly syntax used
+ --> $DIR/asm_syntax.rs:8:9
+ |
+LL | asm!("");
+ | ^^^^^^^^
+ |
+ = note: `-D clippy::inline-asm-x86-intel-syntax` implied by `-D warnings`
+ = help: use AT&T x86 assembly syntax
+
+error: Intel x86 assembly syntax used
+ --> $DIR/asm_syntax.rs:9:9
+ |
+LL | asm!("", options());
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use AT&T x86 assembly syntax
+
+error: Intel x86 assembly syntax used
+ --> $DIR/asm_syntax.rs:10:9
+ |
+LL | asm!("", options(nostack));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use AT&T x86 assembly syntax
+
+error: AT&T x86 assembly syntax used
+ --> $DIR/asm_syntax.rs:23:9
+ |
+LL | asm!("", options(att_syntax));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::inline-asm-x86-att-syntax` implied by `-D warnings`
+ = help: use Intel x86 assembly syntax
+
+error: AT&T x86 assembly syntax used
+ --> $DIR/asm_syntax.rs:24:9
+ |
+LL | asm!("", options(nostack, att_syntax));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use Intel x86 assembly syntax
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/assertions_on_constants.rs b/src/tools/clippy/tests/ui/assertions_on_constants.rs
new file mode 100644
index 000000000..7bea9563d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/assertions_on_constants.rs
@@ -0,0 +1,39 @@
+#![allow(non_fmt_panics, clippy::needless_bool)]
+
+macro_rules! assert_const {
+ ($len:expr) => {
+ assert!($len > 0);
+ debug_assert!($len < 0);
+ };
+}
+fn main() {
+ assert!(true);
+ assert!(false);
+ assert!(true, "true message");
+ assert!(false, "false message");
+
+ let msg = "panic message";
+ assert!(false, "{}", msg.to_uppercase());
+
+ const B: bool = true;
+ assert!(B);
+
+ const C: bool = false;
+ assert!(C);
+ assert!(C, "C message");
+
+ debug_assert!(true);
+ // Don't lint this, since there is no better way for expressing "Only panic in debug mode".
+ debug_assert!(false); // #3948
+ assert_const!(3);
+ assert_const!(-1);
+
+ // Don't lint if based on `cfg!(..)`:
+ assert!(cfg!(feature = "hey") || cfg!(not(feature = "asdf")));
+
+ let flag: bool = cfg!(not(feature = "asdf"));
+ assert!(flag);
+
+ const CFG_FLAG: &bool = &cfg!(feature = "hey");
+ assert!(!CFG_FLAG);
+}
diff --git a/src/tools/clippy/tests/ui/assertions_on_constants.stderr b/src/tools/clippy/tests/ui/assertions_on_constants.stderr
new file mode 100644
index 000000000..e1f818814
--- /dev/null
+++ b/src/tools/clippy/tests/ui/assertions_on_constants.stderr
@@ -0,0 +1,75 @@
+error: `assert!(true)` will be optimized out by the compiler
+ --> $DIR/assertions_on_constants.rs:10:5
+ |
+LL | assert!(true);
+ | ^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::assertions-on-constants` implied by `-D warnings`
+ = help: remove it
+
+error: `assert!(false)` should probably be replaced
+ --> $DIR/assertions_on_constants.rs:11:5
+ |
+LL | assert!(false);
+ | ^^^^^^^^^^^^^^
+ |
+ = help: use `panic!()` or `unreachable!()`
+
+error: `assert!(true)` will be optimized out by the compiler
+ --> $DIR/assertions_on_constants.rs:12:5
+ |
+LL | assert!(true, "true message");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: remove it
+
+error: `assert!(false, ..)` should probably be replaced
+ --> $DIR/assertions_on_constants.rs:13:5
+ |
+LL | assert!(false, "false message");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use `panic!(..)` or `unreachable!(..)`
+
+error: `assert!(false, ..)` should probably be replaced
+ --> $DIR/assertions_on_constants.rs:16:5
+ |
+LL | assert!(false, "{}", msg.to_uppercase());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use `panic!(..)` or `unreachable!(..)`
+
+error: `assert!(true)` will be optimized out by the compiler
+ --> $DIR/assertions_on_constants.rs:19:5
+ |
+LL | assert!(B);
+ | ^^^^^^^^^^
+ |
+ = help: remove it
+
+error: `assert!(false)` should probably be replaced
+ --> $DIR/assertions_on_constants.rs:22:5
+ |
+LL | assert!(C);
+ | ^^^^^^^^^^
+ |
+ = help: use `panic!()` or `unreachable!()`
+
+error: `assert!(false, ..)` should probably be replaced
+ --> $DIR/assertions_on_constants.rs:23:5
+ |
+LL | assert!(C, "C message");
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use `panic!(..)` or `unreachable!(..)`
+
+error: `debug_assert!(true)` will be optimized out by the compiler
+ --> $DIR/assertions_on_constants.rs:25:5
+ |
+LL | debug_assert!(true);
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: remove it
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/assertions_on_result_states.fixed b/src/tools/clippy/tests/ui/assertions_on_result_states.fixed
new file mode 100644
index 000000000..7bde72e4b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/assertions_on_result_states.fixed
@@ -0,0 +1,69 @@
+// run-rustfix
+#![warn(clippy::assertions_on_result_states)]
+
+use std::result::Result;
+
+struct Foo;
+
+#[derive(Debug)]
+struct DebugFoo;
+
+#[derive(Copy, Clone, Debug)]
+struct CopyFoo;
+
+macro_rules! get_ok_macro {
+ () => {
+ Ok::<_, DebugFoo>(Foo)
+ };
+}
+
+fn main() {
+ // test ok
+ let r: Result<Foo, DebugFoo> = Ok(Foo);
+ debug_assert!(r.is_ok());
+ r.unwrap();
+
+ // test ok with non-debug error type
+ let r: Result<Foo, Foo> = Ok(Foo);
+ assert!(r.is_ok());
+
+ // test temporary ok
+ fn get_ok() -> Result<Foo, DebugFoo> {
+ Ok(Foo)
+ }
+ get_ok().unwrap();
+
+ // test macro ok
+ get_ok_macro!().unwrap();
+
+ // test ok that shouldn't be moved
+ let r: Result<CopyFoo, DebugFoo> = Ok(CopyFoo);
+ fn test_ref_unmoveable_ok(r: &Result<CopyFoo, DebugFoo>) {
+ assert!(r.is_ok());
+ }
+ test_ref_unmoveable_ok(&r);
+ assert!(r.is_ok());
+ r.unwrap();
+
+ // test ok that is copied
+ let r: Result<CopyFoo, CopyFoo> = Ok(CopyFoo);
+ r.unwrap();
+ r.unwrap();
+
+ // test reference to ok
+ let r: Result<CopyFoo, CopyFoo> = Ok(CopyFoo);
+ fn test_ref_copy_ok(r: &Result<CopyFoo, CopyFoo>) {
+ r.unwrap();
+ }
+ test_ref_copy_ok(&r);
+ r.unwrap();
+
+ // test err
+ let r: Result<DebugFoo, Foo> = Err(Foo);
+ debug_assert!(r.is_err());
+ r.unwrap_err();
+
+ // test err with non-debug value type
+ let r: Result<Foo, Foo> = Err(Foo);
+ assert!(r.is_err());
+}
diff --git a/src/tools/clippy/tests/ui/assertions_on_result_states.rs b/src/tools/clippy/tests/ui/assertions_on_result_states.rs
new file mode 100644
index 000000000..4c5af81ef
--- /dev/null
+++ b/src/tools/clippy/tests/ui/assertions_on_result_states.rs
@@ -0,0 +1,69 @@
+// run-rustfix
+#![warn(clippy::assertions_on_result_states)]
+
+use std::result::Result;
+
+struct Foo;
+
+#[derive(Debug)]
+struct DebugFoo;
+
+#[derive(Copy, Clone, Debug)]
+struct CopyFoo;
+
+macro_rules! get_ok_macro {
+ () => {
+ Ok::<_, DebugFoo>(Foo)
+ };
+}
+
+fn main() {
+ // test ok
+ let r: Result<Foo, DebugFoo> = Ok(Foo);
+ debug_assert!(r.is_ok());
+ assert!(r.is_ok());
+
+ // test ok with non-debug error type
+ let r: Result<Foo, Foo> = Ok(Foo);
+ assert!(r.is_ok());
+
+ // test temporary ok
+ fn get_ok() -> Result<Foo, DebugFoo> {
+ Ok(Foo)
+ }
+ assert!(get_ok().is_ok());
+
+ // test macro ok
+ assert!(get_ok_macro!().is_ok());
+
+ // test ok that shouldn't be moved
+ let r: Result<CopyFoo, DebugFoo> = Ok(CopyFoo);
+ fn test_ref_unmoveable_ok(r: &Result<CopyFoo, DebugFoo>) {
+ assert!(r.is_ok());
+ }
+ test_ref_unmoveable_ok(&r);
+ assert!(r.is_ok());
+ r.unwrap();
+
+ // test ok that is copied
+ let r: Result<CopyFoo, CopyFoo> = Ok(CopyFoo);
+ assert!(r.is_ok());
+ r.unwrap();
+
+ // test reference to ok
+ let r: Result<CopyFoo, CopyFoo> = Ok(CopyFoo);
+ fn test_ref_copy_ok(r: &Result<CopyFoo, CopyFoo>) {
+ assert!(r.is_ok());
+ }
+ test_ref_copy_ok(&r);
+ r.unwrap();
+
+ // test err
+ let r: Result<DebugFoo, Foo> = Err(Foo);
+ debug_assert!(r.is_err());
+ assert!(r.is_err());
+
+ // test err with non-debug value type
+ let r: Result<Foo, Foo> = Err(Foo);
+ assert!(r.is_err());
+}
diff --git a/src/tools/clippy/tests/ui/assertions_on_result_states.stderr b/src/tools/clippy/tests/ui/assertions_on_result_states.stderr
new file mode 100644
index 000000000..13c2dd877
--- /dev/null
+++ b/src/tools/clippy/tests/ui/assertions_on_result_states.stderr
@@ -0,0 +1,40 @@
+error: called `assert!` with `Result::is_ok`
+ --> $DIR/assertions_on_result_states.rs:24:5
+ |
+LL | assert!(r.is_ok());
+ | ^^^^^^^^^^^^^^^^^^ help: replace with: `r.unwrap()`
+ |
+ = note: `-D clippy::assertions-on-result-states` implied by `-D warnings`
+
+error: called `assert!` with `Result::is_ok`
+ --> $DIR/assertions_on_result_states.rs:34:5
+ |
+LL | assert!(get_ok().is_ok());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `get_ok().unwrap()`
+
+error: called `assert!` with `Result::is_ok`
+ --> $DIR/assertions_on_result_states.rs:37:5
+ |
+LL | assert!(get_ok_macro!().is_ok());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `get_ok_macro!().unwrap()`
+
+error: called `assert!` with `Result::is_ok`
+ --> $DIR/assertions_on_result_states.rs:50:5
+ |
+LL | assert!(r.is_ok());
+ | ^^^^^^^^^^^^^^^^^^ help: replace with: `r.unwrap()`
+
+error: called `assert!` with `Result::is_ok`
+ --> $DIR/assertions_on_result_states.rs:56:9
+ |
+LL | assert!(r.is_ok());
+ | ^^^^^^^^^^^^^^^^^^ help: replace with: `r.unwrap()`
+
+error: called `assert!` with `Result::is_err`
+ --> $DIR/assertions_on_result_states.rs:64:5
+ |
+LL | assert!(r.is_err());
+ | ^^^^^^^^^^^^^^^^^^^ help: replace with: `r.unwrap_err()`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/assign_ops.fixed b/src/tools/clippy/tests/ui/assign_ops.fixed
new file mode 100644
index 000000000..da034b51c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/assign_ops.fixed
@@ -0,0 +1,32 @@
+// run-rustfix
+
+use core::num::Wrapping;
+
+#[allow(dead_code, unused_assignments)]
+#[warn(clippy::assign_op_pattern)]
+fn main() {
+ let mut a = 5;
+ a += 1;
+ a += 1;
+ a -= 1;
+ a *= 99;
+ a *= 42;
+ a /= 2;
+ a %= 5;
+ a &= 1;
+ a = 1 - a;
+ a = 5 / a;
+ a = 42 % a;
+ a = 6 << a;
+ let mut s = String::new();
+ s += "bla";
+
+ // Issue #9180
+ let mut a = Wrapping(0u32);
+ a += Wrapping(1u32);
+ let mut v = vec![0u32, 1u32];
+ v[0] += v[1];
+ let mut v = vec![Wrapping(0u32), Wrapping(1u32)];
+ v[0] = v[0] + v[1];
+ let _ = || v[0] = v[0] + v[1];
+}
diff --git a/src/tools/clippy/tests/ui/assign_ops.rs b/src/tools/clippy/tests/ui/assign_ops.rs
new file mode 100644
index 000000000..337bb02c8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/assign_ops.rs
@@ -0,0 +1,32 @@
+// run-rustfix
+
+use core::num::Wrapping;
+
+#[allow(dead_code, unused_assignments)]
+#[warn(clippy::assign_op_pattern)]
+fn main() {
+ let mut a = 5;
+ a = a + 1;
+ a = 1 + a;
+ a = a - 1;
+ a = a * 99;
+ a = 42 * a;
+ a = a / 2;
+ a = a % 5;
+ a = a & 1;
+ a = 1 - a;
+ a = 5 / a;
+ a = 42 % a;
+ a = 6 << a;
+ let mut s = String::new();
+ s = s + "bla";
+
+ // Issue #9180
+ let mut a = Wrapping(0u32);
+ a = a + Wrapping(1u32);
+ let mut v = vec![0u32, 1u32];
+ v[0] = v[0] + v[1];
+ let mut v = vec![Wrapping(0u32), Wrapping(1u32)];
+ v[0] = v[0] + v[1];
+ let _ = || v[0] = v[0] + v[1];
+}
diff --git a/src/tools/clippy/tests/ui/assign_ops.stderr b/src/tools/clippy/tests/ui/assign_ops.stderr
new file mode 100644
index 000000000..63a938ab4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/assign_ops.stderr
@@ -0,0 +1,70 @@
+error: manual implementation of an assign operation
+ --> $DIR/assign_ops.rs:9:5
+ |
+LL | a = a + 1;
+ | ^^^^^^^^^ help: replace it with: `a += 1`
+ |
+ = note: `-D clippy::assign-op-pattern` implied by `-D warnings`
+
+error: manual implementation of an assign operation
+ --> $DIR/assign_ops.rs:10:5
+ |
+LL | a = 1 + a;
+ | ^^^^^^^^^ help: replace it with: `a += 1`
+
+error: manual implementation of an assign operation
+ --> $DIR/assign_ops.rs:11:5
+ |
+LL | a = a - 1;
+ | ^^^^^^^^^ help: replace it with: `a -= 1`
+
+error: manual implementation of an assign operation
+ --> $DIR/assign_ops.rs:12:5
+ |
+LL | a = a * 99;
+ | ^^^^^^^^^^ help: replace it with: `a *= 99`
+
+error: manual implementation of an assign operation
+ --> $DIR/assign_ops.rs:13:5
+ |
+LL | a = 42 * a;
+ | ^^^^^^^^^^ help: replace it with: `a *= 42`
+
+error: manual implementation of an assign operation
+ --> $DIR/assign_ops.rs:14:5
+ |
+LL | a = a / 2;
+ | ^^^^^^^^^ help: replace it with: `a /= 2`
+
+error: manual implementation of an assign operation
+ --> $DIR/assign_ops.rs:15:5
+ |
+LL | a = a % 5;
+ | ^^^^^^^^^ help: replace it with: `a %= 5`
+
+error: manual implementation of an assign operation
+ --> $DIR/assign_ops.rs:16:5
+ |
+LL | a = a & 1;
+ | ^^^^^^^^^ help: replace it with: `a &= 1`
+
+error: manual implementation of an assign operation
+ --> $DIR/assign_ops.rs:22:5
+ |
+LL | s = s + "bla";
+ | ^^^^^^^^^^^^^ help: replace it with: `s += "bla"`
+
+error: manual implementation of an assign operation
+ --> $DIR/assign_ops.rs:26:5
+ |
+LL | a = a + Wrapping(1u32);
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `a += Wrapping(1u32)`
+
+error: manual implementation of an assign operation
+ --> $DIR/assign_ops.rs:28:5
+ |
+LL | v[0] = v[0] + v[1];
+ | ^^^^^^^^^^^^^^^^^^ help: replace it with: `v[0] += v[1]`
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/assign_ops2.rs b/src/tools/clippy/tests/ui/assign_ops2.rs
new file mode 100644
index 000000000..f6d3a8fa3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/assign_ops2.rs
@@ -0,0 +1,55 @@
+#[allow(unused_assignments)]
+#[warn(clippy::misrefactored_assign_op, clippy::assign_op_pattern)]
+fn main() {
+ let mut a = 5;
+ a += a + 1;
+ a += 1 + a;
+ a -= a - 1;
+ a *= a * 99;
+ a *= 42 * a;
+ a /= a / 2;
+ a %= a % 5;
+ a &= a & 1;
+ a *= a * a;
+ a = a * a * a;
+ a = a * 42 * a;
+ a = a * 2 + a;
+ a -= 1 - a;
+ a /= 5 / a;
+ a %= 42 % a;
+ a <<= 6 << a;
+}
+
+// check that we don't lint on op assign impls, because that's just the way to impl them
+
+use std::ops::{Mul, MulAssign};
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct Wrap(i64);
+
+impl Mul<i64> for Wrap {
+ type Output = Self;
+
+ fn mul(self, rhs: i64) -> Self {
+ Wrap(self.0 * rhs)
+ }
+}
+
+impl MulAssign<i64> for Wrap {
+ fn mul_assign(&mut self, rhs: i64) {
+ *self = *self * rhs
+ }
+}
+
+fn cow_add_assign() {
+ use std::borrow::Cow;
+ let mut buf = Cow::Owned(String::from("bar"));
+ let cows = Cow::Borrowed("foo");
+
+ // this can be linted
+ buf = buf + cows.clone();
+
+ // this should not as cow<str> Add is not commutative
+ buf = cows + buf;
+ println!("{}", buf);
+}
diff --git a/src/tools/clippy/tests/ui/assign_ops2.stderr b/src/tools/clippy/tests/ui/assign_ops2.stderr
new file mode 100644
index 000000000..04b1dc93d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/assign_ops2.stderr
@@ -0,0 +1,146 @@
+error: variable appears on both sides of an assignment operation
+ --> $DIR/assign_ops2.rs:5:5
+ |
+LL | a += a + 1;
+ | ^^^^^^^^^^
+ |
+ = note: `-D clippy::misrefactored-assign-op` implied by `-D warnings`
+help: did you mean `a = a + 1` or `a = a + a + 1`? Consider replacing it with
+ |
+LL | a += 1;
+ | ~~~~~~
+help: or
+ |
+LL | a = a + a + 1;
+ | ~~~~~~~~~~~~~
+
+error: variable appears on both sides of an assignment operation
+ --> $DIR/assign_ops2.rs:6:5
+ |
+LL | a += 1 + a;
+ | ^^^^^^^^^^
+ |
+help: did you mean `a = a + 1` or `a = a + 1 + a`? Consider replacing it with
+ |
+LL | a += 1;
+ | ~~~~~~
+help: or
+ |
+LL | a = a + 1 + a;
+ | ~~~~~~~~~~~~~
+
+error: variable appears on both sides of an assignment operation
+ --> $DIR/assign_ops2.rs:7:5
+ |
+LL | a -= a - 1;
+ | ^^^^^^^^^^
+ |
+help: did you mean `a = a - 1` or `a = a - (a - 1)`? Consider replacing it with
+ |
+LL | a -= 1;
+ | ~~~~~~
+help: or
+ |
+LL | a = a - (a - 1);
+ | ~~~~~~~~~~~~~~~
+
+error: variable appears on both sides of an assignment operation
+ --> $DIR/assign_ops2.rs:8:5
+ |
+LL | a *= a * 99;
+ | ^^^^^^^^^^^
+ |
+help: did you mean `a = a * 99` or `a = a * a * 99`? Consider replacing it with
+ |
+LL | a *= 99;
+ | ~~~~~~~
+help: or
+ |
+LL | a = a * a * 99;
+ | ~~~~~~~~~~~~~~
+
+error: variable appears on both sides of an assignment operation
+ --> $DIR/assign_ops2.rs:9:5
+ |
+LL | a *= 42 * a;
+ | ^^^^^^^^^^^
+ |
+help: did you mean `a = a * 42` or `a = a * 42 * a`? Consider replacing it with
+ |
+LL | a *= 42;
+ | ~~~~~~~
+help: or
+ |
+LL | a = a * 42 * a;
+ | ~~~~~~~~~~~~~~
+
+error: variable appears on both sides of an assignment operation
+ --> $DIR/assign_ops2.rs:10:5
+ |
+LL | a /= a / 2;
+ | ^^^^^^^^^^
+ |
+help: did you mean `a = a / 2` or `a = a / (a / 2)`? Consider replacing it with
+ |
+LL | a /= 2;
+ | ~~~~~~
+help: or
+ |
+LL | a = a / (a / 2);
+ | ~~~~~~~~~~~~~~~
+
+error: variable appears on both sides of an assignment operation
+ --> $DIR/assign_ops2.rs:11:5
+ |
+LL | a %= a % 5;
+ | ^^^^^^^^^^
+ |
+help: did you mean `a = a % 5` or `a = a % (a % 5)`? Consider replacing it with
+ |
+LL | a %= 5;
+ | ~~~~~~
+help: or
+ |
+LL | a = a % (a % 5);
+ | ~~~~~~~~~~~~~~~
+
+error: variable appears on both sides of an assignment operation
+ --> $DIR/assign_ops2.rs:12:5
+ |
+LL | a &= a & 1;
+ | ^^^^^^^^^^
+ |
+help: did you mean `a = a & 1` or `a = a & a & 1`? Consider replacing it with
+ |
+LL | a &= 1;
+ | ~~~~~~
+help: or
+ |
+LL | a = a & a & 1;
+ | ~~~~~~~~~~~~~
+
+error: variable appears on both sides of an assignment operation
+ --> $DIR/assign_ops2.rs:13:5
+ |
+LL | a *= a * a;
+ | ^^^^^^^^^^
+ |
+help: did you mean `a = a * a` or `a = a * a * a`? Consider replacing it with
+ |
+LL | a *= a;
+ | ~~~~~~
+help: or
+ |
+LL | a = a * a * a;
+ | ~~~~~~~~~~~~~
+
+error: manual implementation of an assign operation
+ --> $DIR/assign_ops2.rs:50:5
+ |
+LL | buf = buf + cows.clone();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `buf += cows.clone()`
+ |
+ = note: `-D clippy::assign-op-pattern` implied by `-D warnings`
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/async_yields_async.fixed b/src/tools/clippy/tests/ui/async_yields_async.fixed
new file mode 100644
index 000000000..3cf380d2b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/async_yields_async.fixed
@@ -0,0 +1,78 @@
+// run-rustfix
+#![feature(lint_reasons)]
+#![feature(async_closure)]
+#![warn(clippy::async_yields_async)]
+
+use core::future::Future;
+use core::pin::Pin;
+use core::task::{Context, Poll};
+
+struct CustomFutureType;
+
+impl Future for CustomFutureType {
+ type Output = u8;
+
+ fn poll(self: Pin<&mut Self>, _: &mut Context) -> Poll<Self::Output> {
+ Poll::Ready(3)
+ }
+}
+
+fn custom_future_type_ctor() -> CustomFutureType {
+ CustomFutureType
+}
+
+async fn f() -> CustomFutureType {
+ // Don't warn for functions since you have to explicitly declare their
+ // return types.
+ CustomFutureType
+}
+
+#[rustfmt::skip]
+fn main() {
+ let _f = {
+ 3
+ };
+ let _g = async {
+ 3
+ };
+ let _h = async {
+ async {
+ 3
+ }.await
+ };
+ let _i = async {
+ CustomFutureType.await
+ };
+ let _i = async || {
+ 3
+ };
+ let _j = async || {
+ async {
+ 3
+ }.await
+ };
+ let _k = async || {
+ CustomFutureType.await
+ };
+ let _l = async || CustomFutureType.await;
+ let _m = async || {
+ println!("I'm bored");
+ // Some more stuff
+
+ // Finally something to await
+ CustomFutureType.await
+ };
+ let _n = async || custom_future_type_ctor();
+ let _o = async || f();
+}
+
+#[rustfmt::skip]
+#[allow(dead_code)]
+fn check_expect_suppression() {
+ #[expect(clippy::async_yields_async)]
+ let _j = async || {
+ async {
+ 3
+ }
+ };
+}
diff --git a/src/tools/clippy/tests/ui/async_yields_async.rs b/src/tools/clippy/tests/ui/async_yields_async.rs
new file mode 100644
index 000000000..dd4131b60
--- /dev/null
+++ b/src/tools/clippy/tests/ui/async_yields_async.rs
@@ -0,0 +1,78 @@
+// run-rustfix
+#![feature(lint_reasons)]
+#![feature(async_closure)]
+#![warn(clippy::async_yields_async)]
+
+use core::future::Future;
+use core::pin::Pin;
+use core::task::{Context, Poll};
+
+struct CustomFutureType;
+
+impl Future for CustomFutureType {
+ type Output = u8;
+
+ fn poll(self: Pin<&mut Self>, _: &mut Context) -> Poll<Self::Output> {
+ Poll::Ready(3)
+ }
+}
+
+fn custom_future_type_ctor() -> CustomFutureType {
+ CustomFutureType
+}
+
+async fn f() -> CustomFutureType {
+ // Don't warn for functions since you have to explicitly declare their
+ // return types.
+ CustomFutureType
+}
+
+#[rustfmt::skip]
+fn main() {
+ let _f = {
+ 3
+ };
+ let _g = async {
+ 3
+ };
+ let _h = async {
+ async {
+ 3
+ }
+ };
+ let _i = async {
+ CustomFutureType
+ };
+ let _i = async || {
+ 3
+ };
+ let _j = async || {
+ async {
+ 3
+ }
+ };
+ let _k = async || {
+ CustomFutureType
+ };
+ let _l = async || CustomFutureType;
+ let _m = async || {
+ println!("I'm bored");
+ // Some more stuff
+
+ // Finally something to await
+ CustomFutureType
+ };
+ let _n = async || custom_future_type_ctor();
+ let _o = async || f();
+}
+
+#[rustfmt::skip]
+#[allow(dead_code)]
+fn check_expect_suppression() {
+ #[expect(clippy::async_yields_async)]
+ let _j = async || {
+ async {
+ 3
+ }
+ };
+}
diff --git a/src/tools/clippy/tests/ui/async_yields_async.stderr b/src/tools/clippy/tests/ui/async_yields_async.stderr
new file mode 100644
index 000000000..b0c4215e7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/async_yields_async.stderr
@@ -0,0 +1,96 @@
+error: an async construct yields a type which is itself awaitable
+ --> $DIR/async_yields_async.rs:39:9
+ |
+LL | let _h = async {
+ | ____________________-
+LL | | async {
+ | |_________^
+LL | || 3
+LL | || }
+ | ||_________^ awaitable value not awaited
+LL | | };
+ | |_____- outer async construct
+ |
+ = note: `-D clippy::async-yields-async` implied by `-D warnings`
+help: consider awaiting this value
+ |
+LL ~ async {
+LL + 3
+LL + }.await
+ |
+
+error: an async construct yields a type which is itself awaitable
+ --> $DIR/async_yields_async.rs:44:9
+ |
+LL | let _i = async {
+ | ____________________-
+LL | | CustomFutureType
+ | | ^^^^^^^^^^^^^^^^
+ | | |
+ | | awaitable value not awaited
+ | | help: consider awaiting this value: `CustomFutureType.await`
+LL | | };
+ | |_____- outer async construct
+
+error: an async construct yields a type which is itself awaitable
+ --> $DIR/async_yields_async.rs:50:9
+ |
+LL | let _j = async || {
+ | _______________________-
+LL | | async {
+ | |_________^
+LL | || 3
+LL | || }
+ | ||_________^ awaitable value not awaited
+LL | | };
+ | |_____- outer async construct
+ |
+help: consider awaiting this value
+ |
+LL ~ async {
+LL + 3
+LL + }.await
+ |
+
+error: an async construct yields a type which is itself awaitable
+ --> $DIR/async_yields_async.rs:55:9
+ |
+LL | let _k = async || {
+ | _______________________-
+LL | | CustomFutureType
+ | | ^^^^^^^^^^^^^^^^
+ | | |
+ | | awaitable value not awaited
+ | | help: consider awaiting this value: `CustomFutureType.await`
+LL | | };
+ | |_____- outer async construct
+
+error: an async construct yields a type which is itself awaitable
+ --> $DIR/async_yields_async.rs:57:23
+ |
+LL | let _l = async || CustomFutureType;
+ | ^^^^^^^^^^^^^^^^
+ | |
+ | outer async construct
+ | awaitable value not awaited
+ | help: consider awaiting this value: `CustomFutureType.await`
+
+error: an async construct yields a type which is itself awaitable
+ --> $DIR/async_yields_async.rs:63:9
+ |
+LL | let _m = async || {
+ | _______________________-
+LL | | println!("I'm bored");
+LL | | // Some more stuff
+LL | |
+LL | | // Finally something to await
+LL | | CustomFutureType
+ | | ^^^^^^^^^^^^^^^^
+ | | |
+ | | awaitable value not awaited
+ | | help: consider awaiting this value: `CustomFutureType.await`
+LL | | };
+ | |_____- outer async construct
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/attrs.rs b/src/tools/clippy/tests/ui/attrs.rs
new file mode 100644
index 000000000..8df6e1942
--- /dev/null
+++ b/src/tools/clippy/tests/ui/attrs.rs
@@ -0,0 +1,45 @@
+#![warn(clippy::inline_always, clippy::deprecated_semver)]
+#![allow(clippy::assertions_on_constants)]
+#![allow(clippy::missing_docs_in_private_items, clippy::panic, clippy::unreachable)]
+
+#[inline(always)]
+fn test_attr_lint() {
+ assert!(true)
+}
+
+#[inline(always)]
+fn false_positive_expr() {
+ unreachable!()
+}
+
+#[inline(always)]
+fn false_positive_stmt() {
+ unreachable!();
+}
+
+#[inline(always)]
+fn empty_and_false_positive_stmt() {
+ unreachable!();
+}
+
+#[deprecated(since = "forever")]
+pub const SOME_CONST: u8 = 42;
+
+#[deprecated(since = "1")]
+pub const ANOTHER_CONST: u8 = 23;
+
+#[deprecated(since = "0.1.1")]
+pub const YET_ANOTHER_CONST: u8 = 0;
+
+fn main() {
+ test_attr_lint();
+ if false {
+ false_positive_expr()
+ }
+ if false {
+ false_positive_stmt()
+ }
+ if false {
+ empty_and_false_positive_stmt()
+ }
+}
diff --git a/src/tools/clippy/tests/ui/attrs.stderr b/src/tools/clippy/tests/ui/attrs.stderr
new file mode 100644
index 000000000..df4e9e20b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/attrs.stderr
@@ -0,0 +1,24 @@
+error: you have declared `#[inline(always)]` on `test_attr_lint`. This is usually a bad idea
+ --> $DIR/attrs.rs:5:1
+ |
+LL | #[inline(always)]
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::inline-always` implied by `-D warnings`
+
+error: the since field must contain a semver-compliant version
+ --> $DIR/attrs.rs:25:14
+ |
+LL | #[deprecated(since = "forever")]
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::deprecated-semver` implied by `-D warnings`
+
+error: the since field must contain a semver-compliant version
+ --> $DIR/attrs.rs:28:14
+ |
+LL | #[deprecated(since = "1")]
+ | ^^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/author.rs b/src/tools/clippy/tests/ui/author.rs
new file mode 100644
index 000000000..0a1be3568
--- /dev/null
+++ b/src/tools/clippy/tests/ui/author.rs
@@ -0,0 +1,4 @@
+fn main() {
+ #[clippy::author]
+ let x: char = 0x45 as char;
+}
diff --git a/src/tools/clippy/tests/ui/author.stdout b/src/tools/clippy/tests/ui/author.stdout
new file mode 100644
index 000000000..312586303
--- /dev/null
+++ b/src/tools/clippy/tests/ui/author.stdout
@@ -0,0 +1,14 @@
+if_chain! {
+ if let StmtKind::Local(local) = stmt.kind;
+ if let Some(init) = local.init;
+ if let ExprKind::Cast(expr, cast_ty) = init.kind;
+ if let TyKind::Path(ref qpath) = cast_ty.kind;
+ if match_qpath(qpath, &["char"]);
+ if let ExprKind::Lit(ref lit) = expr.kind;
+ if let LitKind::Int(69, LitIntType::Unsuffixed) = lit.node;
+ if let PatKind::Binding(BindingAnnotation::Unannotated, _, name, None) = local.pat.kind;
+ if name.as_str() == "x";
+ then {
+ // report your lint here
+ }
+}
diff --git a/src/tools/clippy/tests/ui/author/blocks.rs b/src/tools/clippy/tests/ui/author/blocks.rs
new file mode 100644
index 000000000..a7335c01b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/author/blocks.rs
@@ -0,0 +1,24 @@
+// edition:2018
+
+#![allow(redundant_semicolons, clippy::no_effect)]
+#![feature(stmt_expr_attributes)]
+#![feature(async_closure)]
+
+#[rustfmt::skip]
+fn main() {
+ #[clippy::author]
+ {
+ let x = 42i32;
+ let _t = 1f32;
+
+ -x;
+ };
+ #[clippy::author]
+ {
+ let expr = String::new();
+ drop(expr)
+ };
+
+ #[clippy::author]
+ async move || {};
+}
diff --git a/src/tools/clippy/tests/ui/author/blocks.stdout b/src/tools/clippy/tests/ui/author/blocks.stdout
new file mode 100644
index 000000000..2fc4a7d1f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/author/blocks.stdout
@@ -0,0 +1,64 @@
+if_chain! {
+ if let ExprKind::Block(block, None) = expr.kind;
+ if block.stmts.len() == 3;
+ if let StmtKind::Local(local) = block.stmts[0].kind;
+ if let Some(init) = local.init;
+ if let ExprKind::Lit(ref lit) = init.kind;
+ if let LitKind::Int(42, LitIntType::Signed(IntTy::I32)) = lit.node;
+ if let PatKind::Binding(BindingAnnotation::Unannotated, _, name, None) = local.pat.kind;
+ if name.as_str() == "x";
+ if let StmtKind::Local(local1) = block.stmts[1].kind;
+ if let Some(init1) = local1.init;
+ if let ExprKind::Lit(ref lit1) = init1.kind;
+ if let LitKind::Float(_, LitFloatType::Suffixed(FloatTy::F32)) = lit1.node;
+ if let PatKind::Binding(BindingAnnotation::Unannotated, _, name1, None) = local1.pat.kind;
+ if name1.as_str() == "_t";
+ if let StmtKind::Semi(e) = block.stmts[2].kind;
+ if let ExprKind::Unary(UnOp::Neg, inner) = e.kind;
+ if let ExprKind::Path(ref qpath) = inner.kind;
+ if match_qpath(qpath, &["x"]);
+ if block.expr.is_none();
+ then {
+ // report your lint here
+ }
+}
+if_chain! {
+ if let ExprKind::Block(block, None) = expr.kind;
+ if block.stmts.len() == 1;
+ if let StmtKind::Local(local) = block.stmts[0].kind;
+ if let Some(init) = local.init;
+ if let ExprKind::Call(func, args) = init.kind;
+ if let ExprKind::Path(ref qpath) = func.kind;
+ if match_qpath(qpath, &["String", "new"]);
+ if args.is_empty();
+ if let PatKind::Binding(BindingAnnotation::Unannotated, _, name, None) = local.pat.kind;
+ if name.as_str() == "expr";
+ if let Some(trailing_expr) = block.expr;
+ if let ExprKind::Call(func1, args1) = trailing_expr.kind;
+ if let ExprKind::Path(ref qpath1) = func1.kind;
+ if match_qpath(qpath1, &["drop"]);
+ if args1.len() == 1;
+ if let ExprKind::Path(ref qpath2) = args1[0].kind;
+ if match_qpath(qpath2, &["expr"]);
+ then {
+ // report your lint here
+ }
+}
+if_chain! {
+ if let ExprKind::Closure(CaptureBy::Value, fn_decl, body_id, _, None) = expr.kind;
+ if let FnRetTy::DefaultReturn(_) = fn_decl.output;
+ let expr1 = &cx.tcx.hir().body(body_id).value;
+ if let ExprKind::Call(func, args) = expr1.kind;
+ if let ExprKind::Path(ref qpath) = func.kind;
+ if matches!(qpath, QPath::LangItem(LangItem::FromGenerator, _));
+ if args.len() == 1;
+ if let ExprKind::Closure(CaptureBy::Value, fn_decl1, body_id1, _, Some(Movability::Static)) = args[0].kind;
+ if let FnRetTy::DefaultReturn(_) = fn_decl1.output;
+ let expr2 = &cx.tcx.hir().body(body_id1).value;
+ if let ExprKind::Block(block, None) = expr2.kind;
+ if block.stmts.is_empty();
+ if block.expr.is_none();
+ then {
+ // report your lint here
+ }
+}
diff --git a/src/tools/clippy/tests/ui/author/call.rs b/src/tools/clippy/tests/ui/author/call.rs
new file mode 100644
index 000000000..e99c3c41d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/author/call.rs
@@ -0,0 +1,4 @@
+fn main() {
+ #[clippy::author]
+ let _ = ::std::cmp::min(3, 4);
+}
diff --git a/src/tools/clippy/tests/ui/author/call.stdout b/src/tools/clippy/tests/ui/author/call.stdout
new file mode 100644
index 000000000..266312d63
--- /dev/null
+++ b/src/tools/clippy/tests/ui/author/call.stdout
@@ -0,0 +1,16 @@
+if_chain! {
+ if let StmtKind::Local(local) = stmt.kind;
+ if let Some(init) = local.init;
+ if let ExprKind::Call(func, args) = init.kind;
+ if let ExprKind::Path(ref qpath) = func.kind;
+ if match_qpath(qpath, &["{{root}}", "std", "cmp", "min"]);
+ if args.len() == 2;
+ if let ExprKind::Lit(ref lit) = args[0].kind;
+ if let LitKind::Int(3, LitIntType::Unsuffixed) = lit.node;
+ if let ExprKind::Lit(ref lit1) = args[1].kind;
+ if let LitKind::Int(4, LitIntType::Unsuffixed) = lit1.node;
+ if let PatKind::Wild = local.pat.kind;
+ then {
+ // report your lint here
+ }
+}
diff --git a/src/tools/clippy/tests/ui/author/if.rs b/src/tools/clippy/tests/ui/author/if.rs
new file mode 100644
index 000000000..946088ab3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/author/if.rs
@@ -0,0 +1,17 @@
+#[allow(clippy::all)]
+
+fn main() {
+ #[clippy::author]
+ let _ = if true {
+ 1 == 1;
+ } else {
+ 2 == 2;
+ };
+
+ let a = true;
+
+ #[clippy::author]
+ if let true = a {
+ } else {
+ };
+}
diff --git a/src/tools/clippy/tests/ui/author/if.stdout b/src/tools/clippy/tests/ui/author/if.stdout
new file mode 100644
index 000000000..8d92849b3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/author/if.stdout
@@ -0,0 +1,50 @@
+if_chain! {
+ if let StmtKind::Local(local) = stmt.kind;
+ if let Some(init) = local.init;
+ if let ExprKind::If(cond, then, Some(else_expr)) = init.kind;
+ if let ExprKind::DropTemps(expr) = cond.kind;
+ if let ExprKind::Lit(ref lit) = expr.kind;
+ if let LitKind::Bool(true) = lit.node;
+ if let ExprKind::Block(block, None) = then.kind;
+ if block.stmts.len() == 1;
+ if let StmtKind::Semi(e) = block.stmts[0].kind;
+ if let ExprKind::Binary(op, left, right) = e.kind;
+ if BinOpKind::Eq == op.node;
+ if let ExprKind::Lit(ref lit1) = left.kind;
+ if let LitKind::Int(1, LitIntType::Unsuffixed) = lit1.node;
+ if let ExprKind::Lit(ref lit2) = right.kind;
+ if let LitKind::Int(1, LitIntType::Unsuffixed) = lit2.node;
+ if block.expr.is_none();
+ if let ExprKind::Block(block1, None) = else_expr.kind;
+ if block1.stmts.len() == 1;
+ if let StmtKind::Semi(e1) = block1.stmts[0].kind;
+ if let ExprKind::Binary(op1, left1, right1) = e1.kind;
+ if BinOpKind::Eq == op1.node;
+ if let ExprKind::Lit(ref lit3) = left1.kind;
+ if let LitKind::Int(2, LitIntType::Unsuffixed) = lit3.node;
+ if let ExprKind::Lit(ref lit4) = right1.kind;
+ if let LitKind::Int(2, LitIntType::Unsuffixed) = lit4.node;
+ if block1.expr.is_none();
+ if let PatKind::Wild = local.pat.kind;
+ then {
+ // report your lint here
+ }
+}
+if_chain! {
+ if let ExprKind::If(cond, then, Some(else_expr)) = expr.kind;
+ if let ExprKind::Let(let_expr) = cond.kind;
+ if let PatKind::Lit(lit_expr) = let_expr.pat.kind;
+ if let ExprKind::Lit(ref lit) = lit_expr.kind;
+ if let LitKind::Bool(true) = lit.node;
+ if let ExprKind::Path(ref qpath) = let_expr.init.kind;
+ if match_qpath(qpath, &["a"]);
+ if let ExprKind::Block(block, None) = then.kind;
+ if block.stmts.is_empty();
+ if block.expr.is_none();
+ if let ExprKind::Block(block1, None) = else_expr.kind;
+ if block1.stmts.is_empty();
+ if block1.expr.is_none();
+ then {
+ // report your lint here
+ }
+}
diff --git a/src/tools/clippy/tests/ui/author/issue_3849.rs b/src/tools/clippy/tests/ui/author/issue_3849.rs
new file mode 100644
index 000000000..bae4570e5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/author/issue_3849.rs
@@ -0,0 +1,14 @@
+#![allow(dead_code)]
+#![allow(clippy::zero_ptr)]
+#![allow(clippy::transmute_ptr_to_ref)]
+#![allow(clippy::transmuting_null)]
+
+pub const ZPTR: *const usize = 0 as *const _;
+
+fn main() {
+ unsafe {
+ #[clippy::author]
+ let _: &i32 = std::mem::transmute(ZPTR);
+ let _: &i32 = std::mem::transmute(0 as *const i32);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/author/issue_3849.stdout b/src/tools/clippy/tests/ui/author/issue_3849.stdout
new file mode 100644
index 000000000..bce4bc702
--- /dev/null
+++ b/src/tools/clippy/tests/ui/author/issue_3849.stdout
@@ -0,0 +1,14 @@
+if_chain! {
+ if let StmtKind::Local(local) = stmt.kind;
+ if let Some(init) = local.init;
+ if let ExprKind::Call(func, args) = init.kind;
+ if let ExprKind::Path(ref qpath) = func.kind;
+ if match_qpath(qpath, &["std", "mem", "transmute"]);
+ if args.len() == 1;
+ if let ExprKind::Path(ref qpath1) = args[0].kind;
+ if match_qpath(qpath1, &["ZPTR"]);
+ if let PatKind::Wild = local.pat.kind;
+ then {
+ // report your lint here
+ }
+}
diff --git a/src/tools/clippy/tests/ui/author/loop.rs b/src/tools/clippy/tests/ui/author/loop.rs
new file mode 100644
index 000000000..d6de21631
--- /dev/null
+++ b/src/tools/clippy/tests/ui/author/loop.rs
@@ -0,0 +1,36 @@
+#![feature(stmt_expr_attributes)]
+#![allow(clippy::never_loop, clippy::while_immutable_condition)]
+
+fn main() {
+ #[clippy::author]
+ for y in 0..10 {
+ let z = y;
+ }
+
+ #[clippy::author]
+ for _ in 0..10 {
+ break;
+ }
+
+ #[clippy::author]
+ 'label: for _ in 0..10 {
+ break 'label;
+ }
+
+ let a = true;
+
+ #[clippy::author]
+ while a {
+ break;
+ }
+
+ #[clippy::author]
+ while let true = a {
+ break;
+ }
+
+ #[clippy::author]
+ loop {
+ break;
+ }
+}
diff --git a/src/tools/clippy/tests/ui/author/loop.stdout b/src/tools/clippy/tests/ui/author/loop.stdout
new file mode 100644
index 000000000..3d9560f69
--- /dev/null
+++ b/src/tools/clippy/tests/ui/author/loop.stdout
@@ -0,0 +1,113 @@
+if_chain! {
+ if let Some(higher::ForLoop { pat: pat, arg: arg, body: body, .. }) = higher::ForLoop::hir(expr);
+ if let PatKind::Binding(BindingAnnotation::Unannotated, _, name, None) = pat.kind;
+ if name.as_str() == "y";
+ if let ExprKind::Struct(qpath, fields, None) = arg.kind;
+ if matches!(qpath, QPath::LangItem(LangItem::Range, _));
+ if fields.len() == 2;
+ if fields[0].ident.as_str() == "start";
+ if let ExprKind::Lit(ref lit) = fields[0].expr.kind;
+ if let LitKind::Int(0, LitIntType::Unsuffixed) = lit.node;
+ if fields[1].ident.as_str() == "end";
+ if let ExprKind::Lit(ref lit1) = fields[1].expr.kind;
+ if let LitKind::Int(10, LitIntType::Unsuffixed) = lit1.node;
+ if let ExprKind::Block(block, None) = body.kind;
+ if block.stmts.len() == 1;
+ if let StmtKind::Local(local) = block.stmts[0].kind;
+ if let Some(init) = local.init;
+ if let ExprKind::Path(ref qpath1) = init.kind;
+ if match_qpath(qpath1, &["y"]);
+ if let PatKind::Binding(BindingAnnotation::Unannotated, _, name1, None) = local.pat.kind;
+ if name1.as_str() == "z";
+ if block.expr.is_none();
+ then {
+ // report your lint here
+ }
+}
+if_chain! {
+ if let Some(higher::ForLoop { pat: pat, arg: arg, body: body, .. }) = higher::ForLoop::hir(expr);
+ if let PatKind::Wild = pat.kind;
+ if let ExprKind::Struct(qpath, fields, None) = arg.kind;
+ if matches!(qpath, QPath::LangItem(LangItem::Range, _));
+ if fields.len() == 2;
+ if fields[0].ident.as_str() == "start";
+ if let ExprKind::Lit(ref lit) = fields[0].expr.kind;
+ if let LitKind::Int(0, LitIntType::Unsuffixed) = lit.node;
+ if fields[1].ident.as_str() == "end";
+ if let ExprKind::Lit(ref lit1) = fields[1].expr.kind;
+ if let LitKind::Int(10, LitIntType::Unsuffixed) = lit1.node;
+ if let ExprKind::Block(block, None) = body.kind;
+ if block.stmts.len() == 1;
+ if let StmtKind::Semi(e) = block.stmts[0].kind;
+ if let ExprKind::Break(destination, None) = e.kind;
+ if destination.label.is_none();
+ if block.expr.is_none();
+ then {
+ // report your lint here
+ }
+}
+if_chain! {
+ if let Some(higher::ForLoop { pat: pat, arg: arg, body: body, .. }) = higher::ForLoop::hir(expr);
+ if let PatKind::Wild = pat.kind;
+ if let ExprKind::Struct(qpath, fields, None) = arg.kind;
+ if matches!(qpath, QPath::LangItem(LangItem::Range, _));
+ if fields.len() == 2;
+ if fields[0].ident.as_str() == "start";
+ if let ExprKind::Lit(ref lit) = fields[0].expr.kind;
+ if let LitKind::Int(0, LitIntType::Unsuffixed) = lit.node;
+ if fields[1].ident.as_str() == "end";
+ if let ExprKind::Lit(ref lit1) = fields[1].expr.kind;
+ if let LitKind::Int(10, LitIntType::Unsuffixed) = lit1.node;
+ if let ExprKind::Block(block, None) = body.kind;
+ if block.stmts.len() == 1;
+ if let StmtKind::Semi(e) = block.stmts[0].kind;
+ if let ExprKind::Break(destination, None) = e.kind;
+ if let Some(label) = destination.label;
+ if label.ident.as_str() == "'label";
+ if block.expr.is_none();
+ then {
+ // report your lint here
+ }
+}
+if_chain! {
+ if let Some(higher::While { condition: condition, body: body }) = higher::While::hir(expr);
+ if let ExprKind::Path(ref qpath) = condition.kind;
+ if match_qpath(qpath, &["a"]);
+ if let ExprKind::Block(block, None) = body.kind;
+ if block.stmts.len() == 1;
+ if let StmtKind::Semi(e) = block.stmts[0].kind;
+ if let ExprKind::Break(destination, None) = e.kind;
+ if destination.label.is_none();
+ if block.expr.is_none();
+ then {
+ // report your lint here
+ }
+}
+if_chain! {
+ if let Some(higher::WhileLet { let_pat: let_pat, let_expr: let_expr, if_then: if_then }) = higher::WhileLet::hir(expr);
+ if let PatKind::Lit(lit_expr) = let_pat.kind;
+ if let ExprKind::Lit(ref lit) = lit_expr.kind;
+ if let LitKind::Bool(true) = lit.node;
+ if let ExprKind::Path(ref qpath) = let_expr.kind;
+ if match_qpath(qpath, &["a"]);
+ if let ExprKind::Block(block, None) = if_then.kind;
+ if block.stmts.len() == 1;
+ if let StmtKind::Semi(e) = block.stmts[0].kind;
+ if let ExprKind::Break(destination, None) = e.kind;
+ if destination.label.is_none();
+ if block.expr.is_none();
+ then {
+ // report your lint here
+ }
+}
+if_chain! {
+ if let ExprKind::Loop(body, None, LoopSource::Loop, _) = expr.kind;
+ if body.stmts.len() == 1;
+ if let StmtKind::Semi(e) = body.stmts[0].kind;
+ if let ExprKind::Break(destination, None) = e.kind;
+ if destination.label.is_none();
+ if body.expr.is_none();
+ then {
+ // report your lint here
+ }
+}
diff --git a/src/tools/clippy/tests/ui/author/matches.rs b/src/tools/clippy/tests/ui/author/matches.rs
new file mode 100644
index 000000000..674e07ec2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/author/matches.rs
@@ -0,0 +1,13 @@
+#![allow(clippy::let_and_return)]
+
+fn main() {
+ #[clippy::author]
+ let a = match 42 {
+ 16 => 5,
+ 17 => {
+ let x = 3;
+ x
+ },
+ _ => 1,
+ };
+}
diff --git a/src/tools/clippy/tests/ui/author/matches.stdout b/src/tools/clippy/tests/ui/author/matches.stdout
new file mode 100644
index 000000000..38444a009
--- /dev/null
+++ b/src/tools/clippy/tests/ui/author/matches.stdout
@@ -0,0 +1,38 @@
+if_chain! {
+ if let StmtKind::Local(local) = stmt.kind;
+ if let Some(init) = local.init;
+ if let ExprKind::Match(scrutinee, arms, MatchSource::Normal) = init.kind;
+ if let ExprKind::Lit(ref lit) = scrutinee.kind;
+ if let LitKind::Int(42, LitIntType::Unsuffixed) = lit.node;
+ if arms.len() == 3;
+ if let PatKind::Lit(lit_expr) = arms[0].pat.kind;
+ if let ExprKind::Lit(ref lit1) = lit_expr.kind;
+ if let LitKind::Int(16, LitIntType::Unsuffixed) = lit1.node;
+ if arms[0].guard.is_none();
+ if let ExprKind::Lit(ref lit2) = arms[0].body.kind;
+ if let LitKind::Int(5, LitIntType::Unsuffixed) = lit2.node;
+ if let PatKind::Lit(lit_expr1) = arms[1].pat.kind;
+ if let ExprKind::Lit(ref lit3) = lit_expr1.kind;
+ if let LitKind::Int(17, LitIntType::Unsuffixed) = lit3.node;
+ if arms[1].guard.is_none();
+ if let ExprKind::Block(block, None) = arms[1].body.kind;
+ if block.stmts.len() == 1;
+ if let StmtKind::Local(local1) = block.stmts[0].kind;
+ if let Some(init1) = local1.init;
+ if let ExprKind::Lit(ref lit4) = init1.kind;
+ if let LitKind::Int(3, LitIntType::Unsuffixed) = lit4.node;
+ if let PatKind::Binding(BindingAnnotation::Unannotated, _, name, None) = local1.pat.kind;
+ if name.as_str() == "x";
+ if let Some(trailing_expr) = block.expr;
+ if let ExprKind::Path(ref qpath) = trailing_expr.kind;
+ if match_qpath(qpath, &["x"]);
+ if let PatKind::Wild = arms[2].pat.kind;
+ if arms[2].guard.is_none();
+ if let ExprKind::Lit(ref lit5) = arms[2].body.kind;
+ if let LitKind::Int(1, LitIntType::Unsuffixed) = lit5.node;
+ if let PatKind::Binding(BindingAnnotation::Unannotated, _, name1, None) = local.pat.kind;
+ if name1.as_str() == "a";
+ then {
+ // report your lint here
+ }
+}
diff --git a/src/tools/clippy/tests/ui/author/repeat.rs b/src/tools/clippy/tests/ui/author/repeat.rs
new file mode 100644
index 000000000..d8e9d589e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/author/repeat.rs
@@ -0,0 +1,5 @@
+#[allow(clippy::no_effect)]
+fn main() {
+ #[clippy::author]
+ [1_u8; 5];
+}
diff --git a/src/tools/clippy/tests/ui/author/repeat.stdout b/src/tools/clippy/tests/ui/author/repeat.stdout
new file mode 100644
index 000000000..471bbce4f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/author/repeat.stdout
@@ -0,0 +1,12 @@
+if_chain! {
+ if let ExprKind::Repeat(value, length) = expr.kind;
+ if let ExprKind::Lit(ref lit) = value.kind;
+ if let LitKind::Int(1, LitIntType::Unsigned(UintTy::U8)) = lit.node;
+ if let ArrayLen::Body(anon_const) = length;
+ let expr1 = &cx.tcx.hir().body(anon_const.body).value;
+ if let ExprKind::Lit(ref lit1) = expr1.kind;
+ if let LitKind::Int(5, LitIntType::Unsuffixed) = lit1.node;
+ then {
+ // report your lint here
+ }
+}
diff --git a/src/tools/clippy/tests/ui/author/struct.rs b/src/tools/clippy/tests/ui/author/struct.rs
new file mode 100644
index 000000000..5fdf3433a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/author/struct.rs
@@ -0,0 +1,40 @@
+#[allow(clippy::unnecessary_operation, clippy::single_match)]
+fn main() {
+ struct Test {
+ field: u32,
+ }
+
+ #[clippy::author]
+ Test {
+ field: if true { 1 } else { 0 },
+ };
+
+ let test = Test { field: 1 };
+
+ match test {
+ #[clippy::author]
+ Test { field: 1 } => {},
+ _ => {},
+ }
+
+ struct TestTuple(u32);
+
+ let test_tuple = TestTuple(1);
+
+ match test_tuple {
+ #[clippy::author]
+ TestTuple(1) => {},
+ _ => {},
+ }
+
+ struct TestMethodCall(u32);
+
+ impl TestMethodCall {
+ fn test(&self) {}
+ }
+
+ let test_method_call = TestMethodCall(1);
+
+ #[clippy::author]
+ test_method_call.test();
+}
diff --git a/src/tools/clippy/tests/ui/author/struct.stdout b/src/tools/clippy/tests/ui/author/struct.stdout
new file mode 100644
index 000000000..5e78b7c9d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/author/struct.stdout
@@ -0,0 +1,64 @@
+if_chain! {
+ if let ExprKind::Struct(qpath, fields, None) = expr.kind;
+ if match_qpath(qpath, &["Test"]);
+ if fields.len() == 1;
+ if fields[0].ident.as_str() == "field";
+ if let ExprKind::If(cond, then, Some(else_expr)) = fields[0].expr.kind;
+ if let ExprKind::DropTemps(expr1) = cond.kind;
+ if let ExprKind::Lit(ref lit) = expr1.kind;
+ if let LitKind::Bool(true) = lit.node;
+ if let ExprKind::Block(block, None) = then.kind;
+ if block.stmts.is_empty();
+ if let Some(trailing_expr) = block.expr;
+ if let ExprKind::Lit(ref lit1) = trailing_expr.kind;
+ if let LitKind::Int(1, LitIntType::Unsuffixed) = lit1.node;
+ if let ExprKind::Block(block1, None) = else_expr.kind;
+ if block1.stmts.is_empty();
+ if let Some(trailing_expr1) = block1.expr;
+ if let ExprKind::Lit(ref lit2) = trailing_expr1.kind;
+ if let LitKind::Int(0, LitIntType::Unsuffixed) = lit2.node;
+ then {
+ // report your lint here
+ }
+}
+if_chain! {
+ if let PatKind::Struct(ref qpath, fields, false) = arm.pat.kind;
+ if match_qpath(qpath, &["Test"]);
+ if fields.len() == 1;
+ if fields[0].ident.as_str() == "field";
+ if let PatKind::Lit(lit_expr) = fields[0].pat.kind;
+ if let ExprKind::Lit(ref lit) = lit_expr.kind;
+ if let LitKind::Int(1, LitIntType::Unsuffixed) = lit.node;
+ if arm.guard.is_none();
+ if let ExprKind::Block(block, None) = arm.body.kind;
+ if block.stmts.is_empty();
+ if block.expr.is_none();
+ then {
+ // report your lint here
+ }
+}
+if_chain! {
+ if let PatKind::TupleStruct(ref qpath, fields, None) = arm.pat.kind;
+ if match_qpath(qpath, &["TestTuple"]);
+ if fields.len() == 1;
+ if let PatKind::Lit(lit_expr) = fields[0].kind;
+ if let ExprKind::Lit(ref lit) = lit_expr.kind;
+ if let LitKind::Int(1, LitIntType::Unsuffixed) = lit.node;
+ if arm.guard.is_none();
+ if let ExprKind::Block(block, None) = arm.body.kind;
+ if block.stmts.is_empty();
+ if block.expr.is_none();
+ then {
+ // report your lint here
+ }
+}
+if_chain! {
+ if let ExprKind::MethodCall(method_name, args, _) = expr.kind;
+ if method_name.ident.as_str() == "test";
+ if args.len() == 1;
+ if let ExprKind::Path(ref qpath) = args[0].kind;
+ if match_qpath(qpath, &["test_method_call"]);
+ then {
+ // report your lint here
+ }
+}
diff --git a/src/tools/clippy/tests/ui/auxiliary/doc_unsafe_macros.rs b/src/tools/clippy/tests/ui/auxiliary/doc_unsafe_macros.rs
new file mode 100644
index 000000000..869672d1e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/auxiliary/doc_unsafe_macros.rs
@@ -0,0 +1,8 @@
+#[macro_export]
+macro_rules! undocd_unsafe {
+ () => {
+ pub unsafe fn oy_vey() {
+ unimplemented!();
+ }
+ };
+}
diff --git a/src/tools/clippy/tests/ui/auxiliary/implicit_hasher_macros.rs b/src/tools/clippy/tests/ui/auxiliary/implicit_hasher_macros.rs
new file mode 100644
index 000000000..1eb77c531
--- /dev/null
+++ b/src/tools/clippy/tests/ui/auxiliary/implicit_hasher_macros.rs
@@ -0,0 +1,6 @@
+#[macro_export]
+macro_rules! implicit_hasher_fn {
+ () => {
+ pub fn f(input: &HashMap<u32, u32>) {}
+ };
+}
diff --git a/src/tools/clippy/tests/ui/auxiliary/macro_rules.rs b/src/tools/clippy/tests/ui/auxiliary/macro_rules.rs
new file mode 100644
index 000000000..83a0af6b8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/auxiliary/macro_rules.rs
@@ -0,0 +1,142 @@
+#![allow(dead_code)]
+
+//! Used to test that certain lints don't trigger in imported external macros
+
+#[macro_export]
+macro_rules! foofoo {
+ () => {
+ loop {}
+ };
+}
+
+#[macro_export]
+macro_rules! must_use_unit {
+ () => {
+ #[must_use]
+ fn foo() {}
+ };
+}
+
+#[macro_export]
+macro_rules! try_err {
+ () => {
+ pub fn try_err_fn() -> Result<i32, i32> {
+ let err: i32 = 1;
+ // To avoid warnings during rustfix
+ if true { Err(err)? } else { Ok(2) }
+ }
+ };
+}
+
+#[macro_export]
+macro_rules! string_add {
+ () => {
+ let y = "".to_owned();
+ let z = y + "...";
+ };
+}
+
+#[macro_export]
+macro_rules! take_external {
+ ($s:expr) => {
+ std::mem::replace($s, Default::default())
+ };
+}
+
+#[macro_export]
+macro_rules! option_env_unwrap_external {
+ ($env: expr) => {
+ option_env!($env).unwrap()
+ };
+ ($env: expr, $message: expr) => {
+ option_env!($env).expect($message)
+ };
+}
+
+#[macro_export]
+macro_rules! ref_arg_binding {
+ () => {
+ let ref _y = 42;
+ };
+}
+
+#[macro_export]
+macro_rules! ref_arg_function {
+ () => {
+ fn fun_example(ref _x: usize) {}
+ };
+}
+
+#[macro_export]
+macro_rules! as_conv_with_arg {
+ (0u32 as u64) => {
+ ()
+ };
+}
+
+#[macro_export]
+macro_rules! as_conv {
+ () => {
+ 0u32 as u64
+ };
+}
+
+#[macro_export]
+macro_rules! large_enum_variant {
+ () => {
+ enum LargeEnumInMacro {
+ A(i32),
+ B([i32; 8000]),
+ }
+ };
+}
+
+#[macro_export]
+macro_rules! field_reassign_with_default {
+ () => {
+ #[derive(Default)]
+ struct A {
+ pub i: i32,
+ pub j: i64,
+ }
+ fn lint() {
+ let mut a: A = Default::default();
+ a.i = 42;
+ a;
+ }
+ };
+}
+
+#[macro_export]
+macro_rules! default_numeric_fallback {
+ () => {
+ let x = 22;
+ };
+}
+
+#[macro_export]
+macro_rules! mut_mut {
+ () => {
+ let mut_mut_ty: &mut &mut u32 = &mut &mut 1u32;
+ };
+}
+
+#[macro_export]
+macro_rules! ptr_as_ptr_cast {
+ ($ptr: ident) => {
+ $ptr as *const i32
+ };
+}
+
+#[macro_export]
+macro_rules! manual_rem_euclid {
+ () => {
+ let value: i32 = 5;
+ let _: i32 = ((value % 4) + 4) % 4;
+ };
+}
+
+#[macro_export]
+macro_rules! equatable_if_let {
+ ($a:ident) => {{ if let 2 = $a {} }};
+}
diff --git a/src/tools/clippy/tests/ui/auxiliary/macro_use_helper.rs b/src/tools/clippy/tests/ui/auxiliary/macro_use_helper.rs
new file mode 100644
index 000000000..ecb55d8cb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/auxiliary/macro_use_helper.rs
@@ -0,0 +1,60 @@
+extern crate macro_rules;
+
+// STMT
+#[macro_export]
+macro_rules! pub_macro {
+ () => {
+ let _ = "hello Mr. Vonnegut";
+ };
+}
+
+pub mod inner {
+ pub use super::*;
+
+ // RE-EXPORT
+ // this will stick in `inner` module
+ pub use macro_rules::foofoo;
+ pub use macro_rules::try_err;
+
+ pub mod nested {
+ pub use macro_rules::string_add;
+ }
+
+ // ITEM
+ #[macro_export]
+ macro_rules! inner_mod_macro {
+ () => {
+ #[allow(dead_code)]
+ pub struct Tardis;
+ };
+ }
+}
+
+// EXPR
+#[macro_export]
+macro_rules! function_macro {
+ () => {
+ if true {
+ } else {
+ }
+ };
+}
+
+// TYPE
+#[macro_export]
+macro_rules! ty_macro {
+ () => {
+ Vec<u8>
+ };
+}
+
+mod extern_exports {
+ pub(super) mod private_inner {
+ #[macro_export]
+ macro_rules! pub_in_private_macro {
+ ($name:ident) => {
+ let $name = String::from("secrets and lies");
+ };
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/auxiliary/non-exhaustive-enum.rs b/src/tools/clippy/tests/ui/auxiliary/non-exhaustive-enum.rs
new file mode 100644
index 000000000..420232f9f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/auxiliary/non-exhaustive-enum.rs
@@ -0,0 +1,8 @@
+// Stripped down version of the ErrorKind enum of std
+#[non_exhaustive]
+pub enum ErrorKind {
+ NotFound,
+ PermissionDenied,
+ #[doc(hidden)]
+ Uncategorized,
+}
diff --git a/src/tools/clippy/tests/ui/auxiliary/option_helpers.rs b/src/tools/clippy/tests/ui/auxiliary/option_helpers.rs
new file mode 100644
index 000000000..f9bc9436b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/auxiliary/option_helpers.rs
@@ -0,0 +1,64 @@
+#![allow(dead_code, unused_variables, clippy::return_self_not_must_use)]
+
+/// Utility macro to test linting behavior in `option_methods()`
+/// The lints included in `option_methods()` should not lint if the call to map is partially
+/// within a macro
+#[macro_export]
+macro_rules! opt_map {
+ ($opt:expr, $map:expr) => {
+ ($opt).map($map)
+ };
+}
+
+/// Struct to generate false positive for Iterator-based lints
+#[derive(Copy, Clone)]
+pub struct IteratorFalsePositives {
+ pub foo: u32,
+}
+
+impl IteratorFalsePositives {
+ pub fn filter(self) -> IteratorFalsePositives {
+ self
+ }
+
+ pub fn next(self) -> IteratorFalsePositives {
+ self
+ }
+
+ pub fn find(self) -> Option<u32> {
+ Some(self.foo)
+ }
+
+ pub fn position(self) -> Option<u32> {
+ Some(self.foo)
+ }
+
+ pub fn rposition(self) -> Option<u32> {
+ Some(self.foo)
+ }
+
+ pub fn nth(self, n: usize) -> Option<u32> {
+ Some(self.foo)
+ }
+
+ pub fn skip(self, _: usize) -> IteratorFalsePositives {
+ self
+ }
+
+ pub fn skip_while(self) -> IteratorFalsePositives {
+ self
+ }
+
+ pub fn count(self) -> usize {
+ self.foo as usize
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct IteratorMethodFalsePositives;
+
+impl IteratorMethodFalsePositives {
+ pub fn filter(&self, _s: i32) -> std::vec::IntoIter<i32> {
+ unimplemented!();
+ }
+}
diff --git a/src/tools/clippy/tests/ui/auxiliary/proc_macro_attr.rs b/src/tools/clippy/tests/ui/auxiliary/proc_macro_attr.rs
new file mode 100644
index 000000000..ae2cc2492
--- /dev/null
+++ b/src/tools/clippy/tests/ui/auxiliary/proc_macro_attr.rs
@@ -0,0 +1,101 @@
+// compile-flags: --emit=link
+// no-prefer-dynamic
+
+#![crate_type = "proc-macro"]
+#![feature(repr128, proc_macro_hygiene, proc_macro_quote, box_patterns)]
+#![allow(incomplete_features)]
+#![allow(clippy::useless_conversion)]
+
+extern crate proc_macro;
+extern crate quote;
+extern crate syn;
+
+use proc_macro::TokenStream;
+use quote::{quote, quote_spanned};
+use syn::parse_macro_input;
+use syn::spanned::Spanned;
+use syn::token::Star;
+use syn::{
+ parse_quote, FnArg, ImplItem, ItemImpl, ItemTrait, Lifetime, Pat, PatIdent, PatType, Signature, TraitItem, Type,
+};
+
+#[proc_macro_attribute]
+pub fn dummy(_args: TokenStream, input: TokenStream) -> TokenStream {
+ input
+}
+
+#[proc_macro_attribute]
+pub fn fake_async_trait(_args: TokenStream, input: TokenStream) -> TokenStream {
+ let mut item = parse_macro_input!(input as ItemTrait);
+ for inner in &mut item.items {
+ if let TraitItem::Method(method) = inner {
+ let sig = &method.sig;
+ let block = &mut method.default;
+ if let Some(block) = block {
+ let brace = block.brace_token;
+
+ let my_block = quote_spanned!( brace.span => {
+ // Should not trigger `empty_line_after_outer_attr`
+ #[crate_type = "lib"]
+ #sig #block
+ Vec::new()
+ });
+ *block = parse_quote!(#my_block);
+ }
+ }
+ }
+ TokenStream::from(quote!(#item))
+}
+
+#[proc_macro_attribute]
+pub fn rename_my_lifetimes(_args: TokenStream, input: TokenStream) -> TokenStream {
+ fn make_name(count: usize) -> String {
+ format!("'life{}", count)
+ }
+
+ fn mut_receiver_of(sig: &mut Signature) -> Option<&mut FnArg> {
+ let arg = sig.inputs.first_mut()?;
+ if let FnArg::Typed(PatType { pat, .. }) = arg {
+ if let Pat::Ident(PatIdent { ident, .. }) = &**pat {
+ if ident == "self" {
+ return Some(arg);
+ }
+ }
+ }
+ None
+ }
+
+ let mut elided = 0;
+ let mut item = parse_macro_input!(input as ItemImpl);
+
+ // Look for methods having arbitrary self type taken by &mut ref
+ for inner in &mut item.items {
+ if let ImplItem::Method(method) = inner {
+ if let Some(FnArg::Typed(pat_type)) = mut_receiver_of(&mut method.sig) {
+ if let box Type::Reference(reference) = &mut pat_type.ty {
+ // Target only unnamed lifetimes
+ let name = match &reference.lifetime {
+ Some(lt) if lt.ident == "_" => make_name(elided),
+ None => make_name(elided),
+ _ => continue,
+ };
+ elided += 1;
+
+ // HACK: Syn uses `Span` from the proc_macro2 crate, and does not seem to reexport it.
+ // In order to avoid adding the dependency, get a default span from a non-existent token.
+ // A default span is needed to mark the code as coming from expansion.
+ let span = Star::default().span();
+
+ // Replace old lifetime with the named one
+ let lifetime = Lifetime::new(&name, span);
+ reference.lifetime = Some(parse_quote!(#lifetime));
+
+ // Add lifetime to the generics of the method
+ method.sig.generics.params.push(parse_quote!(#lifetime));
+ }
+ }
+ }
+ }
+
+ TokenStream::from(quote!(#item))
+}
diff --git a/src/tools/clippy/tests/ui/auxiliary/proc_macro_derive.rs b/src/tools/clippy/tests/ui/auxiliary/proc_macro_derive.rs
new file mode 100644
index 000000000..a89a06308
--- /dev/null
+++ b/src/tools/clippy/tests/ui/auxiliary/proc_macro_derive.rs
@@ -0,0 +1,88 @@
+// compile-flags: --emit=link
+// no-prefer-dynamic
+
+#![crate_type = "proc-macro"]
+#![feature(repr128, proc_macro_quote)]
+#![allow(incomplete_features)]
+#![allow(clippy::field_reassign_with_default)]
+#![allow(clippy::eq_op)]
+
+extern crate proc_macro;
+
+use proc_macro::{quote, TokenStream};
+
+#[proc_macro_derive(DeriveSomething)]
+pub fn derive(_: TokenStream) -> TokenStream {
+ // Should not trigger `used_underscore_binding`
+ let _inside_derive = 1;
+ assert_eq!(_inside_derive, _inside_derive);
+
+ let output = quote! {
+ // Should not trigger `useless_attribute`
+ #[allow(dead_code)]
+ extern crate rustc_middle;
+ };
+ output
+}
+
+#[proc_macro_derive(FieldReassignWithDefault)]
+pub fn derive_foo(_input: TokenStream) -> TokenStream {
+ quote! {
+ #[derive(Default)]
+ struct A {
+ pub i: i32,
+ pub j: i64,
+ }
+ #[automatically_derived]
+ fn lint() {
+ let mut a: A = Default::default();
+ a.i = 42;
+ a;
+ }
+ }
+}
+
+#[proc_macro_derive(StructAUseSelf)]
+pub fn derive_use_self(_input: TokenStream) -> proc_macro::TokenStream {
+ quote! {
+ struct A;
+ impl A {
+ fn new() -> A {
+ A
+ }
+ }
+ }
+}
+
+#[proc_macro_derive(ClippyMiniMacroTest)]
+pub fn mini_macro(_: TokenStream) -> TokenStream {
+ quote!(
+ #[allow(unused)]
+ fn needless_take_by_value(s: String) {
+ println!("{}", s.len());
+ }
+ #[allow(unused)]
+ fn needless_loop(items: &[u8]) {
+ for i in 0..items.len() {
+ println!("{}", items[i]);
+ }
+ }
+ fn line_wrapper() {
+ println!("{}", line!());
+ }
+ )
+}
+
+#[proc_macro_derive(ExtraLifetimeDerive)]
+#[allow(unused)]
+pub fn extra_lifetime(_input: TokenStream) -> TokenStream {
+ quote!(
+ pub struct ExtraLifetime;
+
+ impl<'b> ExtraLifetime {
+ pub fn something<'c>() -> Self {
+ Self
+ }
+ }
+ )
+}
diff --git a/src/tools/clippy/tests/ui/auxiliary/proc_macro_suspicious_else_formatting.rs b/src/tools/clippy/tests/ui/auxiliary/proc_macro_suspicious_else_formatting.rs
new file mode 100644
index 000000000..a2ef0fe82
--- /dev/null
+++ b/src/tools/clippy/tests/ui/auxiliary/proc_macro_suspicious_else_formatting.rs
@@ -0,0 +1,74 @@
+// compile-flags: --emit=link
+// no-prefer-dynamic
+
+#![crate_type = "proc-macro"]
+
+extern crate proc_macro;
+use proc_macro::{token_stream, Delimiter, Group, Ident, Span, TokenStream, TokenTree};
+
+fn read_ident(iter: &mut token_stream::IntoIter) -> Ident {
+ match iter.next() {
+ Some(TokenTree::Ident(i)) => i,
+ _ => panic!("expected ident"),
+ }
+}
+
+#[proc_macro_derive(DeriveBadSpan)]
+pub fn derive_bad_span(input: TokenStream) -> TokenStream {
+ let mut input = input.into_iter();
+ assert_eq!(read_ident(&mut input).to_string(), "struct");
+ let ident = read_ident(&mut input);
+ let mut tys = match input.next() {
+ Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Parenthesis => g.stream().into_iter(),
+ _ => panic!(),
+ };
+ let field1 = read_ident(&mut tys);
+ tys.next();
+ let field2 = read_ident(&mut tys);
+
+ <TokenStream as FromIterator<TokenTree>>::from_iter(
+ [
+ Ident::new("impl", Span::call_site()).into(),
+ ident.into(),
+ Group::new(
+ Delimiter::Brace,
+ <TokenStream as FromIterator<TokenTree>>::from_iter(
+ [
+ Ident::new("fn", Span::call_site()).into(),
+ Ident::new("_foo", Span::call_site()).into(),
+ Group::new(Delimiter::Parenthesis, TokenStream::new()).into(),
+ Group::new(
+ Delimiter::Brace,
+ <TokenStream as FromIterator<TokenTree>>::from_iter(
+ [
+ Ident::new("if", field1.span()).into(),
+ Ident::new("true", field1.span()).into(),
+ {
+ let mut group = Group::new(Delimiter::Brace, TokenStream::new());
+ group.set_span(field1.span());
+ group.into()
+ },
+ Ident::new("if", field2.span()).into(),
+ Ident::new("true", field2.span()).into(),
+ {
+ let mut group = Group::new(Delimiter::Brace, TokenStream::new());
+ group.set_span(field2.span());
+ group.into()
+ },
+ ]
+ .iter()
+ .cloned(),
+ ),
+ )
+ .into(),
+ ]
+ .iter()
+ .cloned(),
+ ),
+ )
+ .into(),
+ ]
+ .iter()
+ .cloned(),
+ )
+}
diff --git a/src/tools/clippy/tests/ui/auxiliary/proc_macro_unsafe.rs b/src/tools/clippy/tests/ui/auxiliary/proc_macro_unsafe.rs
new file mode 100644
index 000000000..3c40f7746
--- /dev/null
+++ b/src/tools/clippy/tests/ui/auxiliary/proc_macro_unsafe.rs
@@ -0,0 +1,18 @@
+// compile-flags: --emit=link
+// no-prefer-dynamic
+
+#![crate_type = "proc-macro"]
+
+extern crate proc_macro;
+
+use proc_macro::{Delimiter, Group, Ident, TokenStream, TokenTree};
+
+#[proc_macro]
+pub fn unsafe_block(input: TokenStream) -> TokenStream {
+ let span = input.into_iter().next().unwrap().span();
+ TokenStream::from_iter([TokenTree::Ident(Ident::new("unsafe", span)), {
+ let mut group = Group::new(Delimiter::Brace, TokenStream::new());
+ group.set_span(span);
+ TokenTree::Group(group)
+ }])
+}
diff --git a/src/tools/clippy/tests/ui/auxiliary/proc_macro_with_span.rs b/src/tools/clippy/tests/ui/auxiliary/proc_macro_with_span.rs
new file mode 100644
index 000000000..8ea631f2b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/auxiliary/proc_macro_with_span.rs
@@ -0,0 +1,32 @@
+// compile-flags: --emit=link
+// no-prefer-dynamic
+
+#![crate_type = "proc-macro"]
+
+extern crate proc_macro;
+
+use proc_macro::{token_stream::IntoIter, Group, Span, TokenStream, TokenTree};
+
+#[proc_macro]
+pub fn with_span(input: TokenStream) -> TokenStream {
+ let mut iter = input.into_iter();
+ let span = iter.next().unwrap().span();
+ let mut res = TokenStream::new();
+ write_with_span(span, iter, &mut res);
+ res
+}
+
+fn write_with_span(s: Span, input: IntoIter, out: &mut TokenStream) {
+ for mut tt in input {
+ if let TokenTree::Group(g) = tt {
+ let mut stream = TokenStream::new();
+ write_with_span(s, g.stream().into_iter(), &mut stream);
+ let mut group = Group::new(g.delimiter(), stream);
+ group.set_span(s);
+ out.extend([TokenTree::Group(group)]);
+ } else {
+ tt.set_span(s);
+ out.extend([tt]);
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/auxiliary/test_macro.rs b/src/tools/clippy/tests/ui/auxiliary/test_macro.rs
new file mode 100644
index 000000000..624ca892a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/auxiliary/test_macro.rs
@@ -0,0 +1,11 @@
+pub trait A {}
+
+macro_rules! __implicit_hasher_test_macro {
+ (impl< $($impl_arg:tt),* > for $kind:ty where $($bounds:tt)*) => {
+ __implicit_hasher_test_macro!( ($($impl_arg),*) ($kind) ($($bounds)*) );
+ };
+
+ (($($impl_arg:tt)*) ($($kind_arg:tt)*) ($($bounds:tt)*)) => {
+ impl< $($impl_arg)* > test_macro::A for $($kind_arg)* where $($bounds)* { }
+ };
+}
diff --git a/src/tools/clippy/tests/ui/auxiliary/use_self_macro.rs b/src/tools/clippy/tests/ui/auxiliary/use_self_macro.rs
new file mode 100644
index 000000000..a8a85b4ba
--- /dev/null
+++ b/src/tools/clippy/tests/ui/auxiliary/use_self_macro.rs
@@ -0,0 +1,15 @@
+macro_rules! use_self {
+ (
+ impl $ty:ident {
+ fn func(&$this:ident) {
+ [fields($($field:ident)*)]
+ }
+ }
+ ) => (
+ impl $ty {
+ fn func(&$this) {
+ let $ty { $($field),* } = $this;
+ }
+ }
+ )
+}
diff --git a/src/tools/clippy/tests/ui/auxiliary/wildcard_imports_helper.rs b/src/tools/clippy/tests/ui/auxiliary/wildcard_imports_helper.rs
new file mode 100644
index 000000000..d75cdd625
--- /dev/null
+++ b/src/tools/clippy/tests/ui/auxiliary/wildcard_imports_helper.rs
@@ -0,0 +1,27 @@
+pub use crate::extern_exports::*;
+
+pub fn extern_foo() {}
+pub fn extern_bar() {}
+
+pub struct ExternA;
+
+pub mod inner {
+ pub mod inner_for_self_import {
+ pub fn inner_extern_foo() {}
+ pub fn inner_extern_bar() {}
+ }
+}
+
+mod extern_exports {
+ pub fn extern_exported() {}
+ pub struct ExternExportedStruct;
+ pub enum ExternExportedEnum {
+ A,
+ }
+}
+
+pub mod prelude {
+ pub mod v1 {
+ pub struct PreludeModAnywhere;
+ }
+}
diff --git a/src/tools/clippy/tests/ui/await_holding_lock.rs b/src/tools/clippy/tests/ui/await_holding_lock.rs
new file mode 100644
index 000000000..57e5b5504
--- /dev/null
+++ b/src/tools/clippy/tests/ui/await_holding_lock.rs
@@ -0,0 +1,192 @@
+#![warn(clippy::await_holding_lock)]
+
+// When adding or modifying a test, please do the same for parking_lot::Mutex.
+mod std_mutex {
+ use super::baz;
+ use std::sync::{Mutex, RwLock};
+
+ pub async fn bad(x: &Mutex<u32>) -> u32 {
+ let guard = x.lock().unwrap();
+ baz().await
+ }
+
+ pub async fn good(x: &Mutex<u32>) -> u32 {
+ {
+ let guard = x.lock().unwrap();
+ let y = *guard + 1;
+ }
+ baz().await;
+ let guard = x.lock().unwrap();
+ 47
+ }
+
+ pub async fn bad_rw(x: &RwLock<u32>) -> u32 {
+ let guard = x.read().unwrap();
+ baz().await
+ }
+
+ pub async fn bad_rw_write(x: &RwLock<u32>) -> u32 {
+ let mut guard = x.write().unwrap();
+ baz().await
+ }
+
+ pub async fn good_rw(x: &RwLock<u32>) -> u32 {
+ {
+ let guard = x.read().unwrap();
+ let y = *guard + 1;
+ }
+ {
+ let mut guard = x.write().unwrap();
+ *guard += 1;
+ }
+ baz().await;
+ let guard = x.read().unwrap();
+ 47
+ }
+
+ pub async fn also_bad(x: &Mutex<u32>) -> u32 {
+ let first = baz().await;
+
+ let guard = x.lock().unwrap();
+
+ let second = baz().await;
+
+ let third = baz().await;
+
+ first + second + third
+ }
+
+ pub async fn not_good(x: &Mutex<u32>) -> u32 {
+ let first = baz().await;
+
+ let second = {
+ let guard = x.lock().unwrap();
+ baz().await
+ };
+
+ let third = baz().await;
+
+ first + second + third
+ }
+
+ #[allow(clippy::manual_async_fn)]
+ pub fn block_bad(x: &Mutex<u32>) -> impl std::future::Future<Output = u32> + '_ {
+ async move {
+ let guard = x.lock().unwrap();
+ baz().await
+ }
+ }
+}
+
+// When adding or modifying a test, please do the same for std::Mutex.
+mod parking_lot_mutex {
+ use super::baz;
+ use parking_lot::{Mutex, RwLock};
+
+ pub async fn bad(x: &Mutex<u32>) -> u32 {
+ let guard = x.lock();
+ baz().await
+ }
+
+ pub async fn good(x: &Mutex<u32>) -> u32 {
+ {
+ let guard = x.lock();
+ let y = *guard + 1;
+ }
+ baz().await;
+ let guard = x.lock();
+ 47
+ }
+
+ pub async fn bad_rw(x: &RwLock<u32>) -> u32 {
+ let guard = x.read();
+ baz().await
+ }
+
+ pub async fn bad_rw_write(x: &RwLock<u32>) -> u32 {
+ let mut guard = x.write();
+ baz().await
+ }
+
+ pub async fn good_rw(x: &RwLock<u32>) -> u32 {
+ {
+ let guard = x.read();
+ let y = *guard + 1;
+ }
+ {
+ let mut guard = x.write();
+ *guard += 1;
+ }
+ baz().await;
+ let guard = x.read();
+ 47
+ }
+
+ pub async fn also_bad(x: &Mutex<u32>) -> u32 {
+ let first = baz().await;
+
+ let guard = x.lock();
+
+ let second = baz().await;
+
+ let third = baz().await;
+
+ first + second + third
+ }
+
+ pub async fn not_good(x: &Mutex<u32>) -> u32 {
+ let first = baz().await;
+
+ let second = {
+ let guard = x.lock();
+ baz().await
+ };
+
+ let third = baz().await;
+
+ first + second + third
+ }
+
+ #[allow(clippy::manual_async_fn)]
+ pub fn block_bad(x: &Mutex<u32>) -> impl std::future::Future<Output = u32> + '_ {
+ async move {
+ let guard = x.lock();
+ baz().await
+ }
+ }
+}
+
+async fn baz() -> u32 {
+ 42
+}
+
+async fn no_await(x: std::sync::Mutex<u32>) {
+ let mut guard = x.lock().unwrap();
+ *guard += 1;
+}
+
+// FIXME: FP, because the `MutexGuard` is dropped before crossing the await point. This is
+// something the needs to be fixed in rustc. There's already drop-tracking, but this is currently
+// disabled, see rust-lang/rust#93751. This case isn't picked up by drop-tracking though. If the
+// `*guard += 1` is removed it is picked up.
+async fn dropped_before_await(x: std::sync::Mutex<u32>) {
+ let mut guard = x.lock().unwrap();
+ *guard += 1;
+ drop(guard);
+ baz().await;
+}
+
+fn main() {
+ let m = std::sync::Mutex::new(100);
+ std_mutex::good(&m);
+ std_mutex::bad(&m);
+ std_mutex::also_bad(&m);
+ std_mutex::not_good(&m);
+ std_mutex::block_bad(&m);
+
+ let m = parking_lot::Mutex::new(100);
+ parking_lot_mutex::good(&m);
+ parking_lot_mutex::bad(&m);
+ parking_lot_mutex::also_bad(&m);
+ parking_lot_mutex::not_good(&m);
+}
diff --git a/src/tools/clippy/tests/ui/await_holding_lock.stderr b/src/tools/clippy/tests/ui/await_holding_lock.stderr
new file mode 100644
index 000000000..976da8d92
--- /dev/null
+++ b/src/tools/clippy/tests/ui/await_holding_lock.stderr
@@ -0,0 +1,208 @@
+error: this `MutexGuard` is held across an `await` point
+ --> $DIR/await_holding_lock.rs:9:13
+ |
+LL | let guard = x.lock().unwrap();
+ | ^^^^^
+ |
+ = note: `-D clippy::await-holding-lock` implied by `-D warnings`
+ = help: consider using an async-aware `Mutex` type or ensuring the `MutexGuard` is dropped before calling await
+note: these are all the `await` points this lock is held through
+ --> $DIR/await_holding_lock.rs:9:9
+ |
+LL | / let guard = x.lock().unwrap();
+LL | | baz().await
+LL | | }
+ | |_____^
+
+error: this `MutexGuard` is held across an `await` point
+ --> $DIR/await_holding_lock.rs:24:13
+ |
+LL | let guard = x.read().unwrap();
+ | ^^^^^
+ |
+ = help: consider using an async-aware `Mutex` type or ensuring the `MutexGuard` is dropped before calling await
+note: these are all the `await` points this lock is held through
+ --> $DIR/await_holding_lock.rs:24:9
+ |
+LL | / let guard = x.read().unwrap();
+LL | | baz().await
+LL | | }
+ | |_____^
+
+error: this `MutexGuard` is held across an `await` point
+ --> $DIR/await_holding_lock.rs:29:13
+ |
+LL | let mut guard = x.write().unwrap();
+ | ^^^^^^^^^
+ |
+ = help: consider using an async-aware `Mutex` type or ensuring the `MutexGuard` is dropped before calling await
+note: these are all the `await` points this lock is held through
+ --> $DIR/await_holding_lock.rs:29:9
+ |
+LL | / let mut guard = x.write().unwrap();
+LL | | baz().await
+LL | | }
+ | |_____^
+
+error: this `MutexGuard` is held across an `await` point
+ --> $DIR/await_holding_lock.rs:50:13
+ |
+LL | let guard = x.lock().unwrap();
+ | ^^^^^
+ |
+ = help: consider using an async-aware `Mutex` type or ensuring the `MutexGuard` is dropped before calling await
+note: these are all the `await` points this lock is held through
+ --> $DIR/await_holding_lock.rs:50:9
+ |
+LL | / let guard = x.lock().unwrap();
+LL | |
+LL | | let second = baz().await;
+LL | |
+... |
+LL | | first + second + third
+LL | | }
+ | |_____^
+
+error: this `MutexGuard` is held across an `await` point
+ --> $DIR/await_holding_lock.rs:63:17
+ |
+LL | let guard = x.lock().unwrap();
+ | ^^^^^
+ |
+ = help: consider using an async-aware `Mutex` type or ensuring the `MutexGuard` is dropped before calling await
+note: these are all the `await` points this lock is held through
+ --> $DIR/await_holding_lock.rs:63:13
+ |
+LL | / let guard = x.lock().unwrap();
+LL | | baz().await
+LL | | };
+ | |_________^
+
+error: this `MutexGuard` is held across an `await` point
+ --> $DIR/await_holding_lock.rs:75:17
+ |
+LL | let guard = x.lock().unwrap();
+ | ^^^^^
+ |
+ = help: consider using an async-aware `Mutex` type or ensuring the `MutexGuard` is dropped before calling await
+note: these are all the `await` points this lock is held through
+ --> $DIR/await_holding_lock.rs:75:13
+ |
+LL | / let guard = x.lock().unwrap();
+LL | | baz().await
+LL | | }
+ | |_________^
+
+error: this `MutexGuard` is held across an `await` point
+ --> $DIR/await_holding_lock.rs:87:13
+ |
+LL | let guard = x.lock();
+ | ^^^^^
+ |
+ = help: consider using an async-aware `Mutex` type or ensuring the `MutexGuard` is dropped before calling await
+note: these are all the `await` points this lock is held through
+ --> $DIR/await_holding_lock.rs:87:9
+ |
+LL | / let guard = x.lock();
+LL | | baz().await
+LL | | }
+ | |_____^
+
+error: this `MutexGuard` is held across an `await` point
+ --> $DIR/await_holding_lock.rs:102:13
+ |
+LL | let guard = x.read();
+ | ^^^^^
+ |
+ = help: consider using an async-aware `Mutex` type or ensuring the `MutexGuard` is dropped before calling await
+note: these are all the `await` points this lock is held through
+ --> $DIR/await_holding_lock.rs:102:9
+ |
+LL | / let guard = x.read();
+LL | | baz().await
+LL | | }
+ | |_____^
+
+error: this `MutexGuard` is held across an `await` point
+ --> $DIR/await_holding_lock.rs:107:13
+ |
+LL | let mut guard = x.write();
+ | ^^^^^^^^^
+ |
+ = help: consider using an async-aware `Mutex` type or ensuring the `MutexGuard` is dropped before calling await
+note: these are all the `await` points this lock is held through
+ --> $DIR/await_holding_lock.rs:107:9
+ |
+LL | / let mut guard = x.write();
+LL | | baz().await
+LL | | }
+ | |_____^
+
+error: this `MutexGuard` is held across an `await` point
+ --> $DIR/await_holding_lock.rs:128:13
+ |
+LL | let guard = x.lock();
+ | ^^^^^
+ |
+ = help: consider using an async-aware `Mutex` type or ensuring the `MutexGuard` is dropped before calling await
+note: these are all the `await` points this lock is held through
+ --> $DIR/await_holding_lock.rs:128:9
+ |
+LL | / let guard = x.lock();
+LL | |
+LL | | let second = baz().await;
+LL | |
+... |
+LL | | first + second + third
+LL | | }
+ | |_____^
+
+error: this `MutexGuard` is held across an `await` point
+ --> $DIR/await_holding_lock.rs:141:17
+ |
+LL | let guard = x.lock();
+ | ^^^^^
+ |
+ = help: consider using an async-aware `Mutex` type or ensuring the `MutexGuard` is dropped before calling await
+note: these are all the `await` points this lock is held through
+ --> $DIR/await_holding_lock.rs:141:13
+ |
+LL | / let guard = x.lock();
+LL | | baz().await
+LL | | };
+ | |_________^
+
+error: this `MutexGuard` is held across an `await` point
+ --> $DIR/await_holding_lock.rs:153:17
+ |
+LL | let guard = x.lock();
+ | ^^^^^
+ |
+ = help: consider using an async-aware `Mutex` type or ensuring the `MutexGuard` is dropped before calling await
+note: these are all the `await` points this lock is held through
+ --> $DIR/await_holding_lock.rs:153:13
+ |
+LL | / let guard = x.lock();
+LL | | baz().await
+LL | | }
+ | |_________^
+
+error: this `MutexGuard` is held across an `await` point
+ --> $DIR/await_holding_lock.rs:173:9
+ |
+LL | let mut guard = x.lock().unwrap();
+ | ^^^^^^^^^
+ |
+ = help: consider using an async-aware `Mutex` type or ensuring the `MutexGuard` is dropped before calling await
+note: these are all the `await` points this lock is held through
+ --> $DIR/await_holding_lock.rs:173:5
+ |
+LL | / let mut guard = x.lock().unwrap();
+LL | | *guard += 1;
+LL | | drop(guard);
+LL | | baz().await;
+LL | | }
+ | |_^
+
+error: aborting due to 13 previous errors
+
diff --git a/src/tools/clippy/tests/ui/await_holding_refcell_ref.rs b/src/tools/clippy/tests/ui/await_holding_refcell_ref.rs
new file mode 100644
index 000000000..23b7095de
--- /dev/null
+++ b/src/tools/clippy/tests/ui/await_holding_refcell_ref.rs
@@ -0,0 +1,85 @@
+#![warn(clippy::await_holding_refcell_ref)]
+
+use std::cell::RefCell;
+
+async fn bad(x: &RefCell<u32>) -> u32 {
+ let b = x.borrow();
+ baz().await
+}
+
+async fn bad_mut(x: &RefCell<u32>) -> u32 {
+ let b = x.borrow_mut();
+ baz().await
+}
+
+async fn good(x: &RefCell<u32>) -> u32 {
+ {
+ let b = x.borrow_mut();
+ let y = *b + 1;
+ }
+ baz().await;
+ let b = x.borrow_mut();
+ 47
+}
+
+async fn baz() -> u32 {
+ 42
+}
+
+async fn also_bad(x: &RefCell<u32>) -> u32 {
+ let first = baz().await;
+
+ let b = x.borrow_mut();
+
+ let second = baz().await;
+
+ let third = baz().await;
+
+ first + second + third
+}
+
+async fn less_bad(x: &RefCell<u32>) -> u32 {
+ let first = baz().await;
+
+ let b = x.borrow_mut();
+
+ let second = baz().await;
+
+ drop(b);
+
+ let third = baz().await;
+
+ first + second + third
+}
+
+async fn not_good(x: &RefCell<u32>) -> u32 {
+ let first = baz().await;
+
+ let second = {
+ let b = x.borrow_mut();
+ baz().await
+ };
+
+ let third = baz().await;
+
+ first + second + third
+}
+
+#[allow(clippy::manual_async_fn)]
+fn block_bad(x: &RefCell<u32>) -> impl std::future::Future<Output = u32> + '_ {
+ async move {
+ let b = x.borrow_mut();
+ baz().await
+ }
+}
+
+fn main() {
+ let rc = RefCell::new(100);
+ good(&rc);
+ bad(&rc);
+ bad_mut(&rc);
+ also_bad(&rc);
+ less_bad(&rc);
+ not_good(&rc);
+ block_bad(&rc);
+}
diff --git a/src/tools/clippy/tests/ui/await_holding_refcell_ref.stderr b/src/tools/clippy/tests/ui/await_holding_refcell_ref.stderr
new file mode 100644
index 000000000..4339fca73
--- /dev/null
+++ b/src/tools/clippy/tests/ui/await_holding_refcell_ref.stderr
@@ -0,0 +1,101 @@
+error: this `RefCell` reference is held across an `await` point
+ --> $DIR/await_holding_refcell_ref.rs:6:9
+ |
+LL | let b = x.borrow();
+ | ^
+ |
+ = note: `-D clippy::await-holding-refcell-ref` implied by `-D warnings`
+ = help: ensure the reference is dropped before calling `await`
+note: these are all the `await` points this reference is held through
+ --> $DIR/await_holding_refcell_ref.rs:6:5
+ |
+LL | / let b = x.borrow();
+LL | | baz().await
+LL | | }
+ | |_^
+
+error: this `RefCell` reference is held across an `await` point
+ --> $DIR/await_holding_refcell_ref.rs:11:9
+ |
+LL | let b = x.borrow_mut();
+ | ^
+ |
+ = help: ensure the reference is dropped before calling `await`
+note: these are all the `await` points this reference is held through
+ --> $DIR/await_holding_refcell_ref.rs:11:5
+ |
+LL | / let b = x.borrow_mut();
+LL | | baz().await
+LL | | }
+ | |_^
+
+error: this `RefCell` reference is held across an `await` point
+ --> $DIR/await_holding_refcell_ref.rs:32:9
+ |
+LL | let b = x.borrow_mut();
+ | ^
+ |
+ = help: ensure the reference is dropped before calling `await`
+note: these are all the `await` points this reference is held through
+ --> $DIR/await_holding_refcell_ref.rs:32:5
+ |
+LL | / let b = x.borrow_mut();
+LL | |
+LL | | let second = baz().await;
+LL | |
+... |
+LL | | first + second + third
+LL | | }
+ | |_^
+
+error: this `RefCell` reference is held across an `await` point
+ --> $DIR/await_holding_refcell_ref.rs:44:9
+ |
+LL | let b = x.borrow_mut();
+ | ^
+ |
+ = help: ensure the reference is dropped before calling `await`
+note: these are all the `await` points this reference is held through
+ --> $DIR/await_holding_refcell_ref.rs:44:5
+ |
+LL | / let b = x.borrow_mut();
+LL | |
+LL | | let second = baz().await;
+LL | |
+... |
+LL | | first + second + third
+LL | | }
+ | |_^
+
+error: this `RefCell` reference is held across an `await` point
+ --> $DIR/await_holding_refcell_ref.rs:59:13
+ |
+LL | let b = x.borrow_mut();
+ | ^
+ |
+ = help: ensure the reference is dropped before calling `await`
+note: these are all the `await` points this reference is held through
+ --> $DIR/await_holding_refcell_ref.rs:59:9
+ |
+LL | / let b = x.borrow_mut();
+LL | | baz().await
+LL | | };
+ | |_____^
+
+error: this `RefCell` reference is held across an `await` point
+ --> $DIR/await_holding_refcell_ref.rs:71:13
+ |
+LL | let b = x.borrow_mut();
+ | ^
+ |
+ = help: ensure the reference is dropped before calling `await`
+note: these are all the `await` points this reference is held through
+ --> $DIR/await_holding_refcell_ref.rs:71:9
+ |
+LL | / let b = x.borrow_mut();
+LL | | baz().await
+LL | | }
+ | |_____^
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/bind_instead_of_map.fixed b/src/tools/clippy/tests/ui/bind_instead_of_map.fixed
new file mode 100644
index 000000000..5815550d7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bind_instead_of_map.fixed
@@ -0,0 +1,25 @@
+// run-rustfix
+#![deny(clippy::bind_instead_of_map)]
+
+// need a main anyway, use it get rid of unused warnings too
+pub fn main() {
+ let x = Some(5);
+ // the easiest cases
+ let _ = x;
+ let _ = x.map(|o| o + 1);
+ // and an easy counter-example
+ let _ = x.and_then(|o| if o < 32 { Some(o) } else { None });
+
+ // Different type
+ let x: Result<u32, &str> = Ok(1);
+ let _ = x;
+}
+
+pub fn foo() -> Option<String> {
+ let x = Some(String::from("hello"));
+ Some("hello".to_owned()).and_then(|s| Some(format!("{}{}", s, x?)))
+}
+
+pub fn example2(x: bool) -> Option<&'static str> {
+ Some("a").and_then(|s| Some(if x { s } else { return None }))
+}
diff --git a/src/tools/clippy/tests/ui/bind_instead_of_map.rs b/src/tools/clippy/tests/ui/bind_instead_of_map.rs
new file mode 100644
index 000000000..623b100a4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bind_instead_of_map.rs
@@ -0,0 +1,25 @@
+// run-rustfix
+#![deny(clippy::bind_instead_of_map)]
+
+// need a main anyway, use it get rid of unused warnings too
+pub fn main() {
+ let x = Some(5);
+ // the easiest cases
+ let _ = x.and_then(Some);
+ let _ = x.and_then(|o| Some(o + 1));
+ // and an easy counter-example
+ let _ = x.and_then(|o| if o < 32 { Some(o) } else { None });
+
+ // Different type
+ let x: Result<u32, &str> = Ok(1);
+ let _ = x.and_then(Ok);
+}
+
+pub fn foo() -> Option<String> {
+ let x = Some(String::from("hello"));
+ Some("hello".to_owned()).and_then(|s| Some(format!("{}{}", s, x?)))
+}
+
+pub fn example2(x: bool) -> Option<&'static str> {
+ Some("a").and_then(|s| Some(if x { s } else { return None }))
+}
diff --git a/src/tools/clippy/tests/ui/bind_instead_of_map.stderr b/src/tools/clippy/tests/ui/bind_instead_of_map.stderr
new file mode 100644
index 000000000..24c6b7f9e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bind_instead_of_map.stderr
@@ -0,0 +1,26 @@
+error: using `Option.and_then(Some)`, which is a no-op
+ --> $DIR/bind_instead_of_map.rs:8:13
+ |
+LL | let _ = x.and_then(Some);
+ | ^^^^^^^^^^^^^^^^ help: use the expression directly: `x`
+ |
+note: the lint level is defined here
+ --> $DIR/bind_instead_of_map.rs:2:9
+ |
+LL | #![deny(clippy::bind_instead_of_map)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: using `Option.and_then(|x| Some(y))`, which is more succinctly expressed as `map(|x| y)`
+ --> $DIR/bind_instead_of_map.rs:9:13
+ |
+LL | let _ = x.and_then(|o| Some(o + 1));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `x.map(|o| o + 1)`
+
+error: using `Result.and_then(Ok)`, which is a no-op
+ --> $DIR/bind_instead_of_map.rs:15:13
+ |
+LL | let _ = x.and_then(Ok);
+ | ^^^^^^^^^^^^^^ help: use the expression directly: `x`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.fixed b/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.fixed
new file mode 100644
index 000000000..e15898432
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.fixed
@@ -0,0 +1,62 @@
+// run-rustfix
+#![deny(clippy::bind_instead_of_map)]
+#![allow(clippy::blocks_in_if_conditions)]
+
+pub fn main() {
+ let _ = Some("42").map(|s| if s.len() < 42 { 0 } else { s.len() });
+ let _ = Some("42").and_then(|s| if s.len() < 42 { None } else { Some(s.len()) });
+
+ let _ = Ok::<_, ()>("42").map(|s| if s.len() < 42 { 0 } else { s.len() });
+ let _ = Ok::<_, ()>("42").and_then(|s| if s.len() < 42 { Err(()) } else { Ok(s.len()) });
+
+ let _ = Err::<(), _>("42").map_err(|s| if s.len() < 42 { s.len() + 20 } else { s.len() });
+ let _ = Err::<(), _>("42").or_else(|s| if s.len() < 42 { Ok(()) } else { Err(s.len()) });
+
+ hard_example();
+ macro_example();
+}
+
+fn hard_example() {
+ Some("42").map(|s| {
+ if {
+ if s == "43" {
+ return 43;
+ }
+ s == "42"
+ } {
+ return 45;
+ }
+ match s.len() {
+ 10 => 2,
+ 20 => {
+ if foo() {
+ return {
+ if foo() {
+ return 20;
+ }
+ println!("foo");
+ 3
+ };
+ }
+ 20
+ },
+ 40 => 30,
+ _ => 1,
+ }
+ });
+}
+
+fn foo() -> bool {
+ true
+}
+
+macro_rules! m {
+ () => {
+ Some(10)
+ };
+}
+
+fn macro_example() {
+ let _ = Some("").and_then(|s| if s.len() == 20 { m!() } else { Some(20) });
+ let _ = Some("").map(|s| if s.len() == 20 { m!() } else { Some(20) });
+}
diff --git a/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.rs b/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.rs
new file mode 100644
index 000000000..49944403f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.rs
@@ -0,0 +1,62 @@
+// run-rustfix
+#![deny(clippy::bind_instead_of_map)]
+#![allow(clippy::blocks_in_if_conditions)]
+
+pub fn main() {
+ let _ = Some("42").and_then(|s| if s.len() < 42 { Some(0) } else { Some(s.len()) });
+ let _ = Some("42").and_then(|s| if s.len() < 42 { None } else { Some(s.len()) });
+
+ let _ = Ok::<_, ()>("42").and_then(|s| if s.len() < 42 { Ok(0) } else { Ok(s.len()) });
+ let _ = Ok::<_, ()>("42").and_then(|s| if s.len() < 42 { Err(()) } else { Ok(s.len()) });
+
+ let _ = Err::<(), _>("42").or_else(|s| if s.len() < 42 { Err(s.len() + 20) } else { Err(s.len()) });
+ let _ = Err::<(), _>("42").or_else(|s| if s.len() < 42 { Ok(()) } else { Err(s.len()) });
+
+ hard_example();
+ macro_example();
+}
+
+fn hard_example() {
+ Some("42").and_then(|s| {
+ if {
+ if s == "43" {
+ return Some(43);
+ }
+ s == "42"
+ } {
+ return Some(45);
+ }
+ match s.len() {
+ 10 => Some(2),
+ 20 => {
+ if foo() {
+ return {
+ if foo() {
+ return Some(20);
+ }
+ println!("foo");
+ Some(3)
+ };
+ }
+ Some(20)
+ },
+ 40 => Some(30),
+ _ => Some(1),
+ }
+ });
+}
+
+fn foo() -> bool {
+ true
+}
+
+macro_rules! m {
+ () => {
+ Some(10)
+ };
+}
+
+fn macro_example() {
+ let _ = Some("").and_then(|s| if s.len() == 20 { m!() } else { Some(20) });
+ let _ = Some("").and_then(|s| if s.len() == 20 { Some(m!()) } else { Some(Some(20)) });
+}
diff --git a/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.stderr b/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.stderr
new file mode 100644
index 000000000..0152a93fe
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.stderr
@@ -0,0 +1,91 @@
+error: using `Option.and_then(|x| Some(y))`, which is more succinctly expressed as `map(|x| y)`
+ --> $DIR/bind_instead_of_map_multipart.rs:6:13
+ |
+LL | let _ = Some("42").and_then(|s| if s.len() < 42 { Some(0) } else { Some(s.len()) });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/bind_instead_of_map_multipart.rs:2:9
+ |
+LL | #![deny(clippy::bind_instead_of_map)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+help: try this
+ |
+LL | let _ = Some("42").map(|s| if s.len() < 42 { 0 } else { s.len() });
+ | ~~~ ~ ~~~~~~~
+
+error: using `Result.and_then(|x| Ok(y))`, which is more succinctly expressed as `map(|x| y)`
+ --> $DIR/bind_instead_of_map_multipart.rs:9:13
+ |
+LL | let _ = Ok::<_, ()>("42").and_then(|s| if s.len() < 42 { Ok(0) } else { Ok(s.len()) });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try this
+ |
+LL | let _ = Ok::<_, ()>("42").map(|s| if s.len() < 42 { 0 } else { s.len() });
+ | ~~~ ~ ~~~~~~~
+
+error: using `Result.or_else(|x| Err(y))`, which is more succinctly expressed as `map_err(|x| y)`
+ --> $DIR/bind_instead_of_map_multipart.rs:12:13
+ |
+LL | let _ = Err::<(), _>("42").or_else(|s| if s.len() < 42 { Err(s.len() + 20) } else { Err(s.len()) });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try this
+ |
+LL | let _ = Err::<(), _>("42").map_err(|s| if s.len() < 42 { s.len() + 20 } else { s.len() });
+ | ~~~~~~~ ~~~~~~~~~~~~ ~~~~~~~
+
+error: using `Option.and_then(|x| Some(y))`, which is more succinctly expressed as `map(|x| y)`
+ --> $DIR/bind_instead_of_map_multipart.rs:20:5
+ |
+LL | / Some("42").and_then(|s| {
+LL | | if {
+LL | | if s == "43" {
+LL | | return Some(43);
+... |
+LL | | }
+LL | | });
+ | |______^
+ |
+help: try this
+ |
+LL ~ Some("42").map(|s| {
+LL | if {
+LL | if s == "43" {
+LL ~ return 43;
+LL | }
+LL | s == "42"
+LL | } {
+LL ~ return 45;
+LL | }
+LL | match s.len() {
+LL ~ 10 => 2,
+LL | 20 => {
+ ...
+LL | if foo() {
+LL ~ return 20;
+LL | }
+LL | println!("foo");
+LL ~ 3
+LL | };
+LL | }
+LL ~ 20
+LL | },
+LL ~ 40 => 30,
+LL ~ _ => 1,
+ |
+
+error: using `Option.and_then(|x| Some(y))`, which is more succinctly expressed as `map(|x| y)`
+ --> $DIR/bind_instead_of_map_multipart.rs:61:13
+ |
+LL | let _ = Some("").and_then(|s| if s.len() == 20 { Some(m!()) } else { Some(Some(20)) });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try this
+ |
+LL | let _ = Some("").map(|s| if s.len() == 20 { m!() } else { Some(20) });
+ | ~~~ ~~~~ ~~~~~~~~
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/bit_masks.rs b/src/tools/clippy/tests/ui/bit_masks.rs
new file mode 100644
index 000000000..cfb493fb5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bit_masks.rs
@@ -0,0 +1,63 @@
+const THREE_BITS: i64 = 7;
+const EVEN_MORE_REDIRECTION: i64 = THREE_BITS;
+
+#[warn(clippy::bad_bit_mask)]
+#[allow(
+ clippy::ineffective_bit_mask,
+ clippy::identity_op,
+ clippy::no_effect,
+ clippy::unnecessary_operation
+)]
+fn main() {
+ let x = 5;
+
+ x & 0 == 0;
+ x & 1 == 1; //ok, distinguishes bit 0
+ x & 1 == 0; //ok, compared with zero
+ x & 2 == 1;
+ x | 0 == 0; //ok, equals x == 0 (maybe warn?)
+ x | 1 == 3; //ok, equals x == 2 || x == 3
+ x | 3 == 3; //ok, equals x <= 3
+ x | 3 == 2;
+
+ x & 1 > 1;
+ x & 2 > 1; // ok, distinguishes x & 2 == 2 from x & 2 == 0
+ x & 2 < 1; // ok, distinguishes x & 2 == 2 from x & 2 == 0
+ x | 1 > 1; // ok (if a bit silly), equals x > 1
+ x | 2 > 1;
+ x | 2 <= 2; // ok (if a bit silly), equals x <= 2
+
+ x & 192 == 128; // ok, tests for bit 7 and not bit 6
+ x & 0xffc0 == 0xfe80; // ok
+
+ // this also now works with constants
+ x & THREE_BITS == 8;
+ x | EVEN_MORE_REDIRECTION < 7;
+
+ 0 & x == 0;
+ 1 | x > 1;
+
+ // and should now also match uncommon usage
+ 1 < 2 | x;
+ 2 == 3 | x;
+ 1 == x & 2;
+
+ x | 1 > 2; // no error, because we allowed ineffective bit masks
+ ineffective();
+}
+
+#[warn(clippy::ineffective_bit_mask)]
+#[allow(clippy::bad_bit_mask, clippy::no_effect, clippy::unnecessary_operation)]
+fn ineffective() {
+ let x = 5;
+
+ x | 1 > 3;
+ x | 1 < 4;
+ x | 1 <= 3;
+ x | 1 >= 8;
+
+ x | 1 > 2; // not an error (yet), better written as x >= 2
+ x | 1 >= 7; // not an error (yet), better written as x >= 6
+ x | 3 > 4; // not an error (yet), better written as x >= 4
+ x | 4 <= 19;
+}
diff --git a/src/tools/clippy/tests/ui/bit_masks.stderr b/src/tools/clippy/tests/ui/bit_masks.stderr
new file mode 100644
index 000000000..dc5ad6dfb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bit_masks.stderr
@@ -0,0 +1,110 @@
+error: &-masking with zero
+ --> $DIR/bit_masks.rs:14:5
+ |
+LL | x & 0 == 0;
+ | ^^^^^^^^^^
+ |
+ = note: `-D clippy::bad-bit-mask` implied by `-D warnings`
+
+error: this operation will always return zero. This is likely not the intended outcome
+ --> $DIR/bit_masks.rs:14:5
+ |
+LL | x & 0 == 0;
+ | ^^^^^
+ |
+ = note: `#[deny(clippy::erasing_op)]` on by default
+
+error: incompatible bit mask: `_ & 2` can never be equal to `1`
+ --> $DIR/bit_masks.rs:17:5
+ |
+LL | x & 2 == 1;
+ | ^^^^^^^^^^
+
+error: incompatible bit mask: `_ | 3` can never be equal to `2`
+ --> $DIR/bit_masks.rs:21:5
+ |
+LL | x | 3 == 2;
+ | ^^^^^^^^^^
+
+error: incompatible bit mask: `_ & 1` will never be higher than `1`
+ --> $DIR/bit_masks.rs:23:5
+ |
+LL | x & 1 > 1;
+ | ^^^^^^^^^
+
+error: incompatible bit mask: `_ | 2` will always be higher than `1`
+ --> $DIR/bit_masks.rs:27:5
+ |
+LL | x | 2 > 1;
+ | ^^^^^^^^^
+
+error: incompatible bit mask: `_ & 7` can never be equal to `8`
+ --> $DIR/bit_masks.rs:34:5
+ |
+LL | x & THREE_BITS == 8;
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: incompatible bit mask: `_ | 7` will never be lower than `7`
+ --> $DIR/bit_masks.rs:35:5
+ |
+LL | x | EVEN_MORE_REDIRECTION < 7;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: &-masking with zero
+ --> $DIR/bit_masks.rs:37:5
+ |
+LL | 0 & x == 0;
+ | ^^^^^^^^^^
+
+error: this operation will always return zero. This is likely not the intended outcome
+ --> $DIR/bit_masks.rs:37:5
+ |
+LL | 0 & x == 0;
+ | ^^^^^
+
+error: incompatible bit mask: `_ | 2` will always be higher than `1`
+ --> $DIR/bit_masks.rs:41:5
+ |
+LL | 1 < 2 | x;
+ | ^^^^^^^^^
+
+error: incompatible bit mask: `_ | 3` can never be equal to `2`
+ --> $DIR/bit_masks.rs:42:5
+ |
+LL | 2 == 3 | x;
+ | ^^^^^^^^^^
+
+error: incompatible bit mask: `_ & 2` can never be equal to `1`
+ --> $DIR/bit_masks.rs:43:5
+ |
+LL | 1 == x & 2;
+ | ^^^^^^^^^^
+
+error: ineffective bit mask: `x | 1` compared to `3`, is the same as x compared directly
+ --> $DIR/bit_masks.rs:54:5
+ |
+LL | x | 1 > 3;
+ | ^^^^^^^^^
+ |
+ = note: `-D clippy::ineffective-bit-mask` implied by `-D warnings`
+
+error: ineffective bit mask: `x | 1` compared to `4`, is the same as x compared directly
+ --> $DIR/bit_masks.rs:55:5
+ |
+LL | x | 1 < 4;
+ | ^^^^^^^^^
+
+error: ineffective bit mask: `x | 1` compared to `3`, is the same as x compared directly
+ --> $DIR/bit_masks.rs:56:5
+ |
+LL | x | 1 <= 3;
+ | ^^^^^^^^^^
+
+error: ineffective bit mask: `x | 1` compared to `8`, is the same as x compared directly
+ --> $DIR/bit_masks.rs:57:5
+ |
+LL | x | 1 >= 8;
+ | ^^^^^^^^^^
+
+error: aborting due to 17 previous errors
+
diff --git a/src/tools/clippy/tests/ui/blacklisted_name.rs b/src/tools/clippy/tests/ui/blacklisted_name.rs
new file mode 100644
index 000000000..27df732a0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/blacklisted_name.rs
@@ -0,0 +1,57 @@
+#![allow(
+ dead_code,
+ clippy::similar_names,
+ clippy::single_match,
+ clippy::toplevel_ref_arg,
+ unused_mut,
+ unused_variables
+)]
+#![warn(clippy::blacklisted_name)]
+
+fn test(foo: ()) {}
+
+fn main() {
+ let foo = 42;
+ let baz = 42;
+ let quux = 42;
+ // Unlike these others, `bar` is actually considered an acceptable name.
+ // Among many other legitimate uses, bar commonly refers to a period of time in music.
+ // See https://github.com/rust-lang/rust-clippy/issues/5225.
+ let bar = 42;
+
+ let food = 42;
+ let foodstuffs = 42;
+ let bazaar = 42;
+
+ match (42, Some(1337), Some(0)) {
+ (foo, Some(baz), quux @ Some(_)) => (),
+ _ => (),
+ }
+}
+
+fn issue_1647(mut foo: u8) {
+ let mut baz = 0;
+ if let Some(mut quux) = Some(42) {}
+}
+
+fn issue_1647_ref() {
+ let ref baz = 0;
+ if let Some(ref quux) = Some(42) {}
+}
+
+fn issue_1647_ref_mut() {
+ let ref mut baz = 0;
+ if let Some(ref mut quux) = Some(42) {}
+}
+
+mod tests {
+ fn issue_7305() {
+ // `blacklisted_name` lint should not be triggered inside of the test code.
+ let foo = 0;
+
+ // Check that even in nested functions warning is still not triggered.
+ fn nested() {
+ let foo = 0;
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/blacklisted_name.stderr b/src/tools/clippy/tests/ui/blacklisted_name.stderr
new file mode 100644
index 000000000..70dbdaece
--- /dev/null
+++ b/src/tools/clippy/tests/ui/blacklisted_name.stderr
@@ -0,0 +1,88 @@
+error: use of a blacklisted/placeholder name `foo`
+ --> $DIR/blacklisted_name.rs:11:9
+ |
+LL | fn test(foo: ()) {}
+ | ^^^
+ |
+ = note: `-D clippy::blacklisted-name` implied by `-D warnings`
+
+error: use of a blacklisted/placeholder name `foo`
+ --> $DIR/blacklisted_name.rs:14:9
+ |
+LL | let foo = 42;
+ | ^^^
+
+error: use of a blacklisted/placeholder name `baz`
+ --> $DIR/blacklisted_name.rs:15:9
+ |
+LL | let baz = 42;
+ | ^^^
+
+error: use of a blacklisted/placeholder name `quux`
+ --> $DIR/blacklisted_name.rs:16:9
+ |
+LL | let quux = 42;
+ | ^^^^
+
+error: use of a blacklisted/placeholder name `foo`
+ --> $DIR/blacklisted_name.rs:27:10
+ |
+LL | (foo, Some(baz), quux @ Some(_)) => (),
+ | ^^^
+
+error: use of a blacklisted/placeholder name `baz`
+ --> $DIR/blacklisted_name.rs:27:20
+ |
+LL | (foo, Some(baz), quux @ Some(_)) => (),
+ | ^^^
+
+error: use of a blacklisted/placeholder name `quux`
+ --> $DIR/blacklisted_name.rs:27:26
+ |
+LL | (foo, Some(baz), quux @ Some(_)) => (),
+ | ^^^^
+
+error: use of a blacklisted/placeholder name `foo`
+ --> $DIR/blacklisted_name.rs:32:19
+ |
+LL | fn issue_1647(mut foo: u8) {
+ | ^^^
+
+error: use of a blacklisted/placeholder name `baz`
+ --> $DIR/blacklisted_name.rs:33:13
+ |
+LL | let mut baz = 0;
+ | ^^^
+
+error: use of a blacklisted/placeholder name `quux`
+ --> $DIR/blacklisted_name.rs:34:21
+ |
+LL | if let Some(mut quux) = Some(42) {}
+ | ^^^^
+
+error: use of a blacklisted/placeholder name `baz`
+ --> $DIR/blacklisted_name.rs:38:13
+ |
+LL | let ref baz = 0;
+ | ^^^
+
+error: use of a blacklisted/placeholder name `quux`
+ --> $DIR/blacklisted_name.rs:39:21
+ |
+LL | if let Some(ref quux) = Some(42) {}
+ | ^^^^
+
+error: use of a blacklisted/placeholder name `baz`
+ --> $DIR/blacklisted_name.rs:43:17
+ |
+LL | let ref mut baz = 0;
+ | ^^^
+
+error: use of a blacklisted/placeholder name `quux`
+ --> $DIR/blacklisted_name.rs:44:25
+ |
+LL | if let Some(ref mut quux) = Some(42) {}
+ | ^^^^
+
+error: aborting due to 14 previous errors
+
diff --git a/src/tools/clippy/tests/ui/blanket_clippy_restriction_lints.rs b/src/tools/clippy/tests/ui/blanket_clippy_restriction_lints.rs
new file mode 100644
index 000000000..d055f1752
--- /dev/null
+++ b/src/tools/clippy/tests/ui/blanket_clippy_restriction_lints.rs
@@ -0,0 +1,8 @@
+#![warn(clippy::blanket_clippy_restriction_lints)]
+
+//! Test that the whole restriction group is not enabled
+#![warn(clippy::restriction)]
+#![deny(clippy::restriction)]
+#![forbid(clippy::restriction)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/blanket_clippy_restriction_lints.stderr b/src/tools/clippy/tests/ui/blanket_clippy_restriction_lints.stderr
new file mode 100644
index 000000000..537557f8b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/blanket_clippy_restriction_lints.stderr
@@ -0,0 +1,27 @@
+error: restriction lints are not meant to be all enabled
+ --> $DIR/blanket_clippy_restriction_lints.rs:4:9
+ |
+LL | #![warn(clippy::restriction)]
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::blanket-clippy-restriction-lints` implied by `-D warnings`
+ = help: try enabling only the lints you really need
+
+error: restriction lints are not meant to be all enabled
+ --> $DIR/blanket_clippy_restriction_lints.rs:5:9
+ |
+LL | #![deny(clippy::restriction)]
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: try enabling only the lints you really need
+
+error: restriction lints are not meant to be all enabled
+ --> $DIR/blanket_clippy_restriction_lints.rs:6:11
+ |
+LL | #![forbid(clippy::restriction)]
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: try enabling only the lints you really need
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/blocks_in_if_conditions.fixed b/src/tools/clippy/tests/ui/blocks_in_if_conditions.fixed
new file mode 100644
index 000000000..e6e40a994
--- /dev/null
+++ b/src/tools/clippy/tests/ui/blocks_in_if_conditions.fixed
@@ -0,0 +1,65 @@
+// run-rustfix
+#![warn(clippy::blocks_in_if_conditions)]
+#![allow(unused, clippy::let_and_return)]
+#![warn(clippy::nonminimal_bool)]
+
+macro_rules! blocky {
+ () => {{ true }};
+}
+
+macro_rules! blocky_too {
+ () => {{
+ let r = true;
+ r
+ }};
+}
+
+fn macro_if() {
+ if blocky!() {}
+
+ if blocky_too!() {}
+}
+
+fn condition_has_block() -> i32 {
+ let res = {
+ let x = 3;
+ x == 3
+ }; if res {
+ 6
+ } else {
+ 10
+ }
+}
+
+fn condition_has_block_with_single_expression() -> i32 {
+ if true { 6 } else { 10 }
+}
+
+fn condition_is_normal() -> i32 {
+ let x = 3;
+ if x == 3 { 6 } else { 10 }
+}
+
+fn condition_is_unsafe_block() {
+ let a: i32 = 1;
+
+ // this should not warn because the condition is an unsafe block
+ if unsafe { 1u32 == std::mem::transmute(a) } {
+ println!("1u32 == a");
+ }
+}
+
+fn block_in_assert() {
+ let opt = Some(42);
+ assert!(
+ opt.as_ref()
+ .map(|val| {
+ let mut v = val * 2;
+ v -= 1;
+ v * 3
+ })
+ .is_some()
+ );
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/blocks_in_if_conditions.rs b/src/tools/clippy/tests/ui/blocks_in_if_conditions.rs
new file mode 100644
index 000000000..69387ff57
--- /dev/null
+++ b/src/tools/clippy/tests/ui/blocks_in_if_conditions.rs
@@ -0,0 +1,65 @@
+// run-rustfix
+#![warn(clippy::blocks_in_if_conditions)]
+#![allow(unused, clippy::let_and_return)]
+#![warn(clippy::nonminimal_bool)]
+
+macro_rules! blocky {
+ () => {{ true }};
+}
+
+macro_rules! blocky_too {
+ () => {{
+ let r = true;
+ r
+ }};
+}
+
+fn macro_if() {
+ if blocky!() {}
+
+ if blocky_too!() {}
+}
+
+fn condition_has_block() -> i32 {
+ if {
+ let x = 3;
+ x == 3
+ } {
+ 6
+ } else {
+ 10
+ }
+}
+
+fn condition_has_block_with_single_expression() -> i32 {
+ if { true } { 6 } else { 10 }
+}
+
+fn condition_is_normal() -> i32 {
+ let x = 3;
+ if true && x == 3 { 6 } else { 10 }
+}
+
+fn condition_is_unsafe_block() {
+ let a: i32 = 1;
+
+ // this should not warn because the condition is an unsafe block
+ if unsafe { 1u32 == std::mem::transmute(a) } {
+ println!("1u32 == a");
+ }
+}
+
+fn block_in_assert() {
+ let opt = Some(42);
+ assert!(
+ opt.as_ref()
+ .map(|val| {
+ let mut v = val * 2;
+ v -= 1;
+ v * 3
+ })
+ .is_some()
+ );
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/blocks_in_if_conditions.stderr b/src/tools/clippy/tests/ui/blocks_in_if_conditions.stderr
new file mode 100644
index 000000000..079f2feb5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/blocks_in_if_conditions.stderr
@@ -0,0 +1,34 @@
+error: in an `if` condition, avoid complex blocks or closures with blocks; instead, move the block or closure higher and bind it with a `let`
+ --> $DIR/blocks_in_if_conditions.rs:24:5
+ |
+LL | / if {
+LL | | let x = 3;
+LL | | x == 3
+LL | | } {
+ | |_____^
+ |
+ = note: `-D clippy::blocks-in-if-conditions` implied by `-D warnings`
+help: try
+ |
+LL ~ let res = {
+LL + let x = 3;
+LL + x == 3
+LL ~ }; if res {
+ |
+
+error: omit braces around single expression condition
+ --> $DIR/blocks_in_if_conditions.rs:35:8
+ |
+LL | if { true } { 6 } else { 10 }
+ | ^^^^^^^^ help: try: `true`
+
+error: this boolean expression can be simplified
+ --> $DIR/blocks_in_if_conditions.rs:40:8
+ |
+LL | if true && x == 3 { 6 } else { 10 }
+ | ^^^^^^^^^^^^^^ help: try: `x == 3`
+ |
+ = note: `-D clippy::nonminimal-bool` implied by `-D warnings`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/blocks_in_if_conditions_closure.rs b/src/tools/clippy/tests/ui/blocks_in_if_conditions_closure.rs
new file mode 100644
index 000000000..169589f6d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/blocks_in_if_conditions_closure.rs
@@ -0,0 +1,64 @@
+#![warn(clippy::blocks_in_if_conditions)]
+#![allow(unused, clippy::let_and_return)]
+
+fn predicate<F: FnOnce(T) -> bool, T>(pfn: F, val: T) -> bool {
+ pfn(val)
+}
+
+fn pred_test() {
+ let v = 3;
+ let sky = "blue";
+ // This is a sneaky case, where the block isn't directly in the condition,
+ // but is actually inside a closure that the condition is using.
+ // The same principle applies -- add some extra expressions to make sure
+ // linter isn't confused by them.
+ if v == 3
+ && sky == "blue"
+ && predicate(
+ |x| {
+ let target = 3;
+ x == target
+ },
+ v,
+ )
+ {}
+
+ if predicate(
+ |x| {
+ let target = 3;
+ x == target
+ },
+ v,
+ ) {}
+}
+
+fn closure_without_block() {
+ if predicate(|x| x == 3, 6) {}
+}
+
+fn macro_in_closure() {
+ let option = Some(true);
+
+ if option.unwrap_or_else(|| unimplemented!()) {
+ unimplemented!()
+ }
+}
+
+fn closure(_: impl FnMut()) -> bool {
+ true
+}
+
+fn function_with_empty_closure() {
+ if closure(|| {}) {}
+}
+
+#[rustfmt::skip]
+fn main() {
+ let mut range = 0..10;
+ range.all(|i| {i < 10} );
+
+ let v = vec![1, 2, 3];
+ if v.into_iter().any(|x| {x == 4}) {
+ println!("contains 4!");
+ }
+}
diff --git a/src/tools/clippy/tests/ui/blocks_in_if_conditions_closure.stderr b/src/tools/clippy/tests/ui/blocks_in_if_conditions_closure.stderr
new file mode 100644
index 000000000..941d604dd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/blocks_in_if_conditions_closure.stderr
@@ -0,0 +1,24 @@
+error: in an `if` condition, avoid complex blocks or closures with blocks; instead, move the block or closure higher and bind it with a `let`
+ --> $DIR/blocks_in_if_conditions_closure.rs:18:17
+ |
+LL | |x| {
+ | _________________^
+LL | | let target = 3;
+LL | | x == target
+LL | | },
+ | |_____________^
+ |
+ = note: `-D clippy::blocks-in-if-conditions` implied by `-D warnings`
+
+error: in an `if` condition, avoid complex blocks or closures with blocks; instead, move the block or closure higher and bind it with a `let`
+ --> $DIR/blocks_in_if_conditions_closure.rs:27:13
+ |
+LL | |x| {
+ | _____________^
+LL | | let target = 3;
+LL | | x == target
+LL | | },
+ | |_________^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/bool_assert_comparison.rs b/src/tools/clippy/tests/ui/bool_assert_comparison.rs
new file mode 100644
index 000000000..ec4d6f3ff
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bool_assert_comparison.rs
@@ -0,0 +1,122 @@
+#![warn(clippy::bool_assert_comparison)]
+
+use std::ops::Not;
+
+macro_rules! a {
+ () => {
+ true
+ };
+}
+macro_rules! b {
+ () => {
+ true
+ };
+}
+
+// Implements the Not trait but with an output type
+// that's not bool. Should not suggest a rewrite
+#[derive(Debug)]
+enum ImplNotTraitWithoutBool {
+ VariantX(bool),
+ VariantY(u32),
+}
+
+impl PartialEq<bool> for ImplNotTraitWithoutBool {
+ fn eq(&self, other: &bool) -> bool {
+ match *self {
+ ImplNotTraitWithoutBool::VariantX(b) => b == *other,
+ _ => false,
+ }
+ }
+}
+
+impl Not for ImplNotTraitWithoutBool {
+ type Output = Self;
+
+ fn not(self) -> Self::Output {
+ match self {
+ ImplNotTraitWithoutBool::VariantX(b) => ImplNotTraitWithoutBool::VariantX(!b),
+ ImplNotTraitWithoutBool::VariantY(0) => ImplNotTraitWithoutBool::VariantY(1),
+ ImplNotTraitWithoutBool::VariantY(_) => ImplNotTraitWithoutBool::VariantY(0),
+ }
+ }
+}
+
+// This type implements the Not trait with an Output of
+// type bool. Using assert!(..) must be suggested
+#[derive(Debug)]
+struct ImplNotTraitWithBool;
+
+impl PartialEq<bool> for ImplNotTraitWithBool {
+ fn eq(&self, other: &bool) -> bool {
+ false
+ }
+}
+
+impl Not for ImplNotTraitWithBool {
+ type Output = bool;
+
+ fn not(self) -> Self::Output {
+ true
+ }
+}
+
+fn main() {
+ let a = ImplNotTraitWithoutBool::VariantX(true);
+ let b = ImplNotTraitWithBool;
+
+ assert_eq!("a".len(), 1);
+ assert_eq!("a".is_empty(), false);
+ assert_eq!("".is_empty(), true);
+ assert_eq!(true, "".is_empty());
+ assert_eq!(a!(), b!());
+ assert_eq!(a!(), "".is_empty());
+ assert_eq!("".is_empty(), b!());
+ assert_eq!(a, true);
+ assert_eq!(b, true);
+
+ assert_ne!("a".len(), 1);
+ assert_ne!("a".is_empty(), false);
+ assert_ne!("".is_empty(), true);
+ assert_ne!(true, "".is_empty());
+ assert_ne!(a!(), b!());
+ assert_ne!(a!(), "".is_empty());
+ assert_ne!("".is_empty(), b!());
+ assert_ne!(a, true);
+ assert_ne!(b, true);
+
+ debug_assert_eq!("a".len(), 1);
+ debug_assert_eq!("a".is_empty(), false);
+ debug_assert_eq!("".is_empty(), true);
+ debug_assert_eq!(true, "".is_empty());
+ debug_assert_eq!(a!(), b!());
+ debug_assert_eq!(a!(), "".is_empty());
+ debug_assert_eq!("".is_empty(), b!());
+ debug_assert_eq!(a, true);
+ debug_assert_eq!(b, true);
+
+ debug_assert_ne!("a".len(), 1);
+ debug_assert_ne!("a".is_empty(), false);
+ debug_assert_ne!("".is_empty(), true);
+ debug_assert_ne!(true, "".is_empty());
+ debug_assert_ne!(a!(), b!());
+ debug_assert_ne!(a!(), "".is_empty());
+ debug_assert_ne!("".is_empty(), b!());
+ debug_assert_ne!(a, true);
+ debug_assert_ne!(b, true);
+
+ // assert with error messages
+ assert_eq!("a".len(), 1, "tadam {}", 1);
+ assert_eq!("a".len(), 1, "tadam {}", true);
+ assert_eq!("a".is_empty(), false, "tadam {}", 1);
+ assert_eq!("a".is_empty(), false, "tadam {}", true);
+ assert_eq!(false, "a".is_empty(), "tadam {}", true);
+ assert_eq!(a, true, "tadam {}", false);
+
+ debug_assert_eq!("a".len(), 1, "tadam {}", 1);
+ debug_assert_eq!("a".len(), 1, "tadam {}", true);
+ debug_assert_eq!("a".is_empty(), false, "tadam {}", 1);
+ debug_assert_eq!("a".is_empty(), false, "tadam {}", true);
+ debug_assert_eq!(false, "a".is_empty(), "tadam {}", true);
+ debug_assert_eq!(a, true, "tadam {}", false);
+}
diff --git a/src/tools/clippy/tests/ui/bool_assert_comparison.stderr b/src/tools/clippy/tests/ui/bool_assert_comparison.stderr
new file mode 100644
index 000000000..377d51be4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bool_assert_comparison.stderr
@@ -0,0 +1,136 @@
+error: used `assert_eq!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:69:5
+ |
+LL | assert_eq!("a".is_empty(), false);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)`
+ |
+ = note: `-D clippy::bool-assert-comparison` implied by `-D warnings`
+
+error: used `assert_eq!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:70:5
+ |
+LL | assert_eq!("".is_empty(), true);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)`
+
+error: used `assert_eq!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:71:5
+ |
+LL | assert_eq!(true, "".is_empty());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)`
+
+error: used `assert_eq!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:76:5
+ |
+LL | assert_eq!(b, true);
+ | ^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)`
+
+error: used `assert_ne!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:79:5
+ |
+LL | assert_ne!("a".is_empty(), false);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)`
+
+error: used `assert_ne!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:80:5
+ |
+LL | assert_ne!("".is_empty(), true);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)`
+
+error: used `assert_ne!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:81:5
+ |
+LL | assert_ne!(true, "".is_empty());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)`
+
+error: used `assert_ne!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:86:5
+ |
+LL | assert_ne!(b, true);
+ | ^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)`
+
+error: used `debug_assert_eq!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:89:5
+ |
+LL | debug_assert_eq!("a".is_empty(), false);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)`
+
+error: used `debug_assert_eq!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:90:5
+ |
+LL | debug_assert_eq!("".is_empty(), true);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)`
+
+error: used `debug_assert_eq!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:91:5
+ |
+LL | debug_assert_eq!(true, "".is_empty());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)`
+
+error: used `debug_assert_eq!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:96:5
+ |
+LL | debug_assert_eq!(b, true);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)`
+
+error: used `debug_assert_ne!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:99:5
+ |
+LL | debug_assert_ne!("a".is_empty(), false);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)`
+
+error: used `debug_assert_ne!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:100:5
+ |
+LL | debug_assert_ne!("".is_empty(), true);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)`
+
+error: used `debug_assert_ne!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:101:5
+ |
+LL | debug_assert_ne!(true, "".is_empty());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)`
+
+error: used `debug_assert_ne!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:106:5
+ |
+LL | debug_assert_ne!(b, true);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)`
+
+error: used `assert_eq!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:111:5
+ |
+LL | assert_eq!("a".is_empty(), false, "tadam {}", 1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)`
+
+error: used `assert_eq!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:112:5
+ |
+LL | assert_eq!("a".is_empty(), false, "tadam {}", true);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)`
+
+error: used `assert_eq!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:113:5
+ |
+LL | assert_eq!(false, "a".is_empty(), "tadam {}", true);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)`
+
+error: used `debug_assert_eq!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:118:5
+ |
+LL | debug_assert_eq!("a".is_empty(), false, "tadam {}", 1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)`
+
+error: used `debug_assert_eq!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:119:5
+ |
+LL | debug_assert_eq!("a".is_empty(), false, "tadam {}", true);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)`
+
+error: used `debug_assert_eq!` with a literal bool
+ --> $DIR/bool_assert_comparison.rs:120:5
+ |
+LL | debug_assert_eq!(false, "a".is_empty(), "tadam {}", true);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)`
+
+error: aborting due to 22 previous errors
+
diff --git a/src/tools/clippy/tests/ui/bool_comparison.fixed b/src/tools/clippy/tests/ui/bool_comparison.fixed
new file mode 100644
index 000000000..5a012ff4d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bool_comparison.fixed
@@ -0,0 +1,167 @@
+// run-rustfix
+
+#![warn(clippy::bool_comparison)]
+
+fn main() {
+ let x = true;
+ if x {
+ "yes"
+ } else {
+ "no"
+ };
+ if !x {
+ "yes"
+ } else {
+ "no"
+ };
+ if x {
+ "yes"
+ } else {
+ "no"
+ };
+ if !x {
+ "yes"
+ } else {
+ "no"
+ };
+ if !x {
+ "yes"
+ } else {
+ "no"
+ };
+ if x {
+ "yes"
+ } else {
+ "no"
+ };
+ if !x {
+ "yes"
+ } else {
+ "no"
+ };
+ if x {
+ "yes"
+ } else {
+ "no"
+ };
+ if !x {
+ "yes"
+ } else {
+ "no"
+ };
+ if x {
+ "yes"
+ } else {
+ "no"
+ };
+ if x {
+ "yes"
+ } else {
+ "no"
+ };
+ if !x {
+ "yes"
+ } else {
+ "no"
+ };
+ let y = true;
+ if !x & y {
+ "yes"
+ } else {
+ "no"
+ };
+ if x & !y {
+ "yes"
+ } else {
+ "no"
+ };
+}
+
+#[allow(dead_code)]
+fn issue3703() {
+ struct Foo;
+ impl PartialEq<bool> for Foo {
+ fn eq(&self, _: &bool) -> bool {
+ true
+ }
+ }
+ impl PartialEq<Foo> for bool {
+ fn eq(&self, _: &Foo) -> bool {
+ true
+ }
+ }
+ impl PartialOrd<bool> for Foo {
+ fn partial_cmp(&self, _: &bool) -> Option<std::cmp::Ordering> {
+ None
+ }
+ }
+ impl PartialOrd<Foo> for bool {
+ fn partial_cmp(&self, _: &Foo) -> Option<std::cmp::Ordering> {
+ None
+ }
+ }
+
+ if Foo == true {}
+ if true == Foo {}
+ if Foo != true {}
+ if true != Foo {}
+ if Foo == false {}
+ if false == Foo {}
+ if Foo != false {}
+ if false != Foo {}
+ if Foo < false {}
+ if false < Foo {}
+}
+
+#[allow(dead_code)]
+fn issue4983() {
+ let a = true;
+ let b = false;
+
+ if a != b {};
+ if a != b {};
+ if a == b {};
+ if !a == !b {};
+
+ if b != a {};
+ if b != a {};
+ if b == a {};
+ if !b == !a {};
+}
+
+macro_rules! m {
+ ($func:ident) => {
+ $func()
+ };
+}
+
+fn func() -> bool {
+ true
+}
+
+#[allow(dead_code)]
+fn issue3973() {
+ // ok, don't lint on `cfg` invocation
+ if false == cfg!(feature = "debugging") {}
+ if cfg!(feature = "debugging") == false {}
+ if true == cfg!(feature = "debugging") {}
+ if cfg!(feature = "debugging") == true {}
+
+ // lint, could be simplified
+ if !m!(func) {}
+ if !m!(func) {}
+ if m!(func) {}
+ if m!(func) {}
+
+ // no lint with a variable
+ let is_debug = false;
+ if is_debug == cfg!(feature = "debugging") {}
+ if cfg!(feature = "debugging") == is_debug {}
+ if is_debug == m!(func) {}
+ if m!(func) == is_debug {}
+ let is_debug = true;
+ if is_debug == cfg!(feature = "debugging") {}
+ if cfg!(feature = "debugging") == is_debug {}
+ if is_debug == m!(func) {}
+ if m!(func) == is_debug {}
+}
diff --git a/src/tools/clippy/tests/ui/bool_comparison.rs b/src/tools/clippy/tests/ui/bool_comparison.rs
new file mode 100644
index 000000000..c534bc25c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bool_comparison.rs
@@ -0,0 +1,167 @@
+// run-rustfix
+
+#![warn(clippy::bool_comparison)]
+
+fn main() {
+ let x = true;
+ if x == true {
+ "yes"
+ } else {
+ "no"
+ };
+ if x == false {
+ "yes"
+ } else {
+ "no"
+ };
+ if true == x {
+ "yes"
+ } else {
+ "no"
+ };
+ if false == x {
+ "yes"
+ } else {
+ "no"
+ };
+ if x != true {
+ "yes"
+ } else {
+ "no"
+ };
+ if x != false {
+ "yes"
+ } else {
+ "no"
+ };
+ if true != x {
+ "yes"
+ } else {
+ "no"
+ };
+ if false != x {
+ "yes"
+ } else {
+ "no"
+ };
+ if x < true {
+ "yes"
+ } else {
+ "no"
+ };
+ if false < x {
+ "yes"
+ } else {
+ "no"
+ };
+ if x > false {
+ "yes"
+ } else {
+ "no"
+ };
+ if true > x {
+ "yes"
+ } else {
+ "no"
+ };
+ let y = true;
+ if x < y {
+ "yes"
+ } else {
+ "no"
+ };
+ if x > y {
+ "yes"
+ } else {
+ "no"
+ };
+}
+
+#[allow(dead_code)]
+fn issue3703() {
+ struct Foo;
+ impl PartialEq<bool> for Foo {
+ fn eq(&self, _: &bool) -> bool {
+ true
+ }
+ }
+ impl PartialEq<Foo> for bool {
+ fn eq(&self, _: &Foo) -> bool {
+ true
+ }
+ }
+ impl PartialOrd<bool> for Foo {
+ fn partial_cmp(&self, _: &bool) -> Option<std::cmp::Ordering> {
+ None
+ }
+ }
+ impl PartialOrd<Foo> for bool {
+ fn partial_cmp(&self, _: &Foo) -> Option<std::cmp::Ordering> {
+ None
+ }
+ }
+
+ if Foo == true {}
+ if true == Foo {}
+ if Foo != true {}
+ if true != Foo {}
+ if Foo == false {}
+ if false == Foo {}
+ if Foo != false {}
+ if false != Foo {}
+ if Foo < false {}
+ if false < Foo {}
+}
+
+#[allow(dead_code)]
+fn issue4983() {
+ let a = true;
+ let b = false;
+
+ if a == !b {};
+ if !a == b {};
+ if a == b {};
+ if !a == !b {};
+
+ if b == !a {};
+ if !b == a {};
+ if b == a {};
+ if !b == !a {};
+}
+
+macro_rules! m {
+ ($func:ident) => {
+ $func()
+ };
+}
+
+fn func() -> bool {
+ true
+}
+
+#[allow(dead_code)]
+fn issue3973() {
+ // ok, don't lint on `cfg` invocation
+ if false == cfg!(feature = "debugging") {}
+ if cfg!(feature = "debugging") == false {}
+ if true == cfg!(feature = "debugging") {}
+ if cfg!(feature = "debugging") == true {}
+
+ // lint, could be simplified
+ if false == m!(func) {}
+ if m!(func) == false {}
+ if true == m!(func) {}
+ if m!(func) == true {}
+
+ // no lint with a variable
+ let is_debug = false;
+ if is_debug == cfg!(feature = "debugging") {}
+ if cfg!(feature = "debugging") == is_debug {}
+ if is_debug == m!(func) {}
+ if m!(func) == is_debug {}
+ let is_debug = true;
+ if is_debug == cfg!(feature = "debugging") {}
+ if cfg!(feature = "debugging") == is_debug {}
+ if is_debug == m!(func) {}
+ if m!(func) == is_debug {}
+}
diff --git a/src/tools/clippy/tests/ui/bool_comparison.stderr b/src/tools/clippy/tests/ui/bool_comparison.stderr
new file mode 100644
index 000000000..31522d4a5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bool_comparison.stderr
@@ -0,0 +1,136 @@
+error: equality checks against true are unnecessary
+ --> $DIR/bool_comparison.rs:7:8
+ |
+LL | if x == true {
+ | ^^^^^^^^^ help: try simplifying it as shown: `x`
+ |
+ = note: `-D clippy::bool-comparison` implied by `-D warnings`
+
+error: equality checks against false can be replaced by a negation
+ --> $DIR/bool_comparison.rs:12:8
+ |
+LL | if x == false {
+ | ^^^^^^^^^^ help: try simplifying it as shown: `!x`
+
+error: equality checks against true are unnecessary
+ --> $DIR/bool_comparison.rs:17:8
+ |
+LL | if true == x {
+ | ^^^^^^^^^ help: try simplifying it as shown: `x`
+
+error: equality checks against false can be replaced by a negation
+ --> $DIR/bool_comparison.rs:22:8
+ |
+LL | if false == x {
+ | ^^^^^^^^^^ help: try simplifying it as shown: `!x`
+
+error: inequality checks against true can be replaced by a negation
+ --> $DIR/bool_comparison.rs:27:8
+ |
+LL | if x != true {
+ | ^^^^^^^^^ help: try simplifying it as shown: `!x`
+
+error: inequality checks against false are unnecessary
+ --> $DIR/bool_comparison.rs:32:8
+ |
+LL | if x != false {
+ | ^^^^^^^^^^ help: try simplifying it as shown: `x`
+
+error: inequality checks against true can be replaced by a negation
+ --> $DIR/bool_comparison.rs:37:8
+ |
+LL | if true != x {
+ | ^^^^^^^^^ help: try simplifying it as shown: `!x`
+
+error: inequality checks against false are unnecessary
+ --> $DIR/bool_comparison.rs:42:8
+ |
+LL | if false != x {
+ | ^^^^^^^^^^ help: try simplifying it as shown: `x`
+
+error: less than comparison against true can be replaced by a negation
+ --> $DIR/bool_comparison.rs:47:8
+ |
+LL | if x < true {
+ | ^^^^^^^^ help: try simplifying it as shown: `!x`
+
+error: greater than checks against false are unnecessary
+ --> $DIR/bool_comparison.rs:52:8
+ |
+LL | if false < x {
+ | ^^^^^^^^^ help: try simplifying it as shown: `x`
+
+error: greater than checks against false are unnecessary
+ --> $DIR/bool_comparison.rs:57:8
+ |
+LL | if x > false {
+ | ^^^^^^^^^ help: try simplifying it as shown: `x`
+
+error: less than comparison against true can be replaced by a negation
+ --> $DIR/bool_comparison.rs:62:8
+ |
+LL | if true > x {
+ | ^^^^^^^^ help: try simplifying it as shown: `!x`
+
+error: order comparisons between booleans can be simplified
+ --> $DIR/bool_comparison.rs:68:8
+ |
+LL | if x < y {
+ | ^^^^^ help: try simplifying it as shown: `!x & y`
+
+error: order comparisons between booleans can be simplified
+ --> $DIR/bool_comparison.rs:73:8
+ |
+LL | if x > y {
+ | ^^^^^ help: try simplifying it as shown: `x & !y`
+
+error: this comparison might be written more concisely
+ --> $DIR/bool_comparison.rs:121:8
+ |
+LL | if a == !b {};
+ | ^^^^^^^ help: try simplifying it as shown: `a != b`
+
+error: this comparison might be written more concisely
+ --> $DIR/bool_comparison.rs:122:8
+ |
+LL | if !a == b {};
+ | ^^^^^^^ help: try simplifying it as shown: `a != b`
+
+error: this comparison might be written more concisely
+ --> $DIR/bool_comparison.rs:126:8
+ |
+LL | if b == !a {};
+ | ^^^^^^^ help: try simplifying it as shown: `b != a`
+
+error: this comparison might be written more concisely
+ --> $DIR/bool_comparison.rs:127:8
+ |
+LL | if !b == a {};
+ | ^^^^^^^ help: try simplifying it as shown: `b != a`
+
+error: equality checks against false can be replaced by a negation
+ --> $DIR/bool_comparison.rs:151:8
+ |
+LL | if false == m!(func) {}
+ | ^^^^^^^^^^^^^^^^^ help: try simplifying it as shown: `!m!(func)`
+
+error: equality checks against false can be replaced by a negation
+ --> $DIR/bool_comparison.rs:152:8
+ |
+LL | if m!(func) == false {}
+ | ^^^^^^^^^^^^^^^^^ help: try simplifying it as shown: `!m!(func)`
+
+error: equality checks against true are unnecessary
+ --> $DIR/bool_comparison.rs:153:8
+ |
+LL | if true == m!(func) {}
+ | ^^^^^^^^^^^^^^^^ help: try simplifying it as shown: `m!(func)`
+
+error: equality checks against true are unnecessary
+ --> $DIR/bool_comparison.rs:154:8
+ |
+LL | if m!(func) == true {}
+ | ^^^^^^^^^^^^^^^^ help: try simplifying it as shown: `m!(func)`
+
+error: aborting due to 22 previous errors
+
diff --git a/src/tools/clippy/tests/ui/borrow_as_ptr.fixed b/src/tools/clippy/tests/ui/borrow_as_ptr.fixed
new file mode 100644
index 000000000..ff5c6a8c3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/borrow_as_ptr.fixed
@@ -0,0 +1,10 @@
+// run-rustfix
+#![warn(clippy::borrow_as_ptr)]
+
+fn main() {
+ let val = 1;
+ let _p = std::ptr::addr_of!(val);
+
+ let mut val_mut = 1;
+ let _p_mut = std::ptr::addr_of_mut!(val_mut);
+}
diff --git a/src/tools/clippy/tests/ui/borrow_as_ptr.rs b/src/tools/clippy/tests/ui/borrow_as_ptr.rs
new file mode 100644
index 000000000..0f62ec6ee
--- /dev/null
+++ b/src/tools/clippy/tests/ui/borrow_as_ptr.rs
@@ -0,0 +1,10 @@
+// run-rustfix
+#![warn(clippy::borrow_as_ptr)]
+
+fn main() {
+ let val = 1;
+ let _p = &val as *const i32;
+
+ let mut val_mut = 1;
+ let _p_mut = &mut val_mut as *mut i32;
+}
diff --git a/src/tools/clippy/tests/ui/borrow_as_ptr.stderr b/src/tools/clippy/tests/ui/borrow_as_ptr.stderr
new file mode 100644
index 000000000..be1ed7330
--- /dev/null
+++ b/src/tools/clippy/tests/ui/borrow_as_ptr.stderr
@@ -0,0 +1,16 @@
+error: borrow as raw pointer
+ --> $DIR/borrow_as_ptr.rs:6:14
+ |
+LL | let _p = &val as *const i32;
+ | ^^^^^^^^^^^^^^^^^^ help: try: `std::ptr::addr_of!(val)`
+ |
+ = note: `-D clippy::borrow-as-ptr` implied by `-D warnings`
+
+error: borrow as raw pointer
+ --> $DIR/borrow_as_ptr.rs:9:18
+ |
+LL | let _p_mut = &mut val_mut as *mut i32;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `std::ptr::addr_of_mut!(val_mut)`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/borrow_as_ptr_no_std.fixed b/src/tools/clippy/tests/ui/borrow_as_ptr_no_std.fixed
new file mode 100644
index 000000000..eaba3b1c2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/borrow_as_ptr_no_std.fixed
@@ -0,0 +1,22 @@
+// run-rustfix
+#![warn(clippy::borrow_as_ptr)]
+#![feature(lang_items, start, libc)]
+#![no_std]
+
+#[start]
+fn main(_argc: isize, _argv: *const *const u8) -> isize {
+ let val = 1;
+ let _p = core::ptr::addr_of!(val);
+
+ let mut val_mut = 1;
+ let _p_mut = core::ptr::addr_of_mut!(val_mut);
+ 0
+}
+
+#[panic_handler]
+fn panic(_info: &core::panic::PanicInfo) -> ! {
+ loop {}
+}
+
+#[lang = "eh_personality"]
+extern "C" fn eh_personality() {}
diff --git a/src/tools/clippy/tests/ui/borrow_as_ptr_no_std.rs b/src/tools/clippy/tests/ui/borrow_as_ptr_no_std.rs
new file mode 100644
index 000000000..d83f9d1f8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/borrow_as_ptr_no_std.rs
@@ -0,0 +1,22 @@
+// run-rustfix
+#![warn(clippy::borrow_as_ptr)]
+#![feature(lang_items, start, libc)]
+#![no_std]
+
+#[start]
+fn main(_argc: isize, _argv: *const *const u8) -> isize {
+ let val = 1;
+ let _p = &val as *const i32;
+
+ let mut val_mut = 1;
+ let _p_mut = &mut val_mut as *mut i32;
+ 0
+}
+
+#[panic_handler]
+fn panic(_info: &core::panic::PanicInfo) -> ! {
+ loop {}
+}
+
+#[lang = "eh_personality"]
+extern "C" fn eh_personality() {}
diff --git a/src/tools/clippy/tests/ui/borrow_as_ptr_no_std.stderr b/src/tools/clippy/tests/ui/borrow_as_ptr_no_std.stderr
new file mode 100644
index 000000000..84c8ba7d0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/borrow_as_ptr_no_std.stderr
@@ -0,0 +1,16 @@
+error: borrow as raw pointer
+ --> $DIR/borrow_as_ptr_no_std.rs:9:14
+ |
+LL | let _p = &val as *const i32;
+ | ^^^^^^^^^^^^^^^^^^ help: try: `core::ptr::addr_of!(val)`
+ |
+ = note: `-D clippy::borrow-as-ptr` implied by `-D warnings`
+
+error: borrow as raw pointer
+ --> $DIR/borrow_as_ptr_no_std.rs:12:18
+ |
+LL | let _p_mut = &mut val_mut as *mut i32;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `core::ptr::addr_of_mut!(val_mut)`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/borrow_box.rs b/src/tools/clippy/tests/ui/borrow_box.rs
new file mode 100644
index 000000000..b606f773c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/borrow_box.rs
@@ -0,0 +1,115 @@
+#![deny(clippy::borrowed_box)]
+#![allow(clippy::blacklisted_name)]
+#![allow(unused_variables)]
+#![allow(dead_code)]
+
+use std::fmt::Display;
+
+pub fn test1(foo: &mut Box<bool>) {
+ // Although this function could be changed to "&mut bool",
+ // avoiding the Box, mutable references to boxes are not
+ // flagged by this lint.
+ //
+ // This omission is intentional: By passing a mutable Box,
+ // the memory location of the pointed-to object could be
+ // modified. By passing a mutable reference, the contents
+ // could change, but not the location.
+ println!("{:?}", foo)
+}
+
+pub fn test2() {
+ let foo: &Box<bool>;
+}
+
+struct Test3<'a> {
+ foo: &'a Box<bool>,
+}
+
+trait Test4 {
+ fn test4(a: &Box<bool>);
+}
+
+impl<'a> Test4 for Test3<'a> {
+ fn test4(a: &Box<bool>) {
+ unimplemented!();
+ }
+}
+
+use std::any::Any;
+
+pub fn test5(foo: &mut Box<dyn Any>) {
+ println!("{:?}", foo)
+}
+
+pub fn test6() {
+ let foo: &Box<dyn Any>;
+}
+
+struct Test7<'a> {
+ foo: &'a Box<dyn Any>,
+}
+
+trait Test8 {
+ fn test8(a: &Box<dyn Any>);
+}
+
+impl<'a> Test8 for Test7<'a> {
+ fn test8(a: &Box<dyn Any>) {
+ unimplemented!();
+ }
+}
+
+pub fn test9(foo: &mut Box<dyn Any + Send + Sync>) {
+ let _ = foo;
+}
+
+pub fn test10() {
+ let foo: &Box<dyn Any + Send + 'static>;
+}
+
+struct Test11<'a> {
+ foo: &'a Box<dyn Any + Send>,
+}
+
+trait Test12 {
+ fn test4(a: &Box<dyn Any + 'static>);
+}
+
+impl<'a> Test12 for Test11<'a> {
+ fn test4(a: &Box<dyn Any + 'static>) {
+ unimplemented!();
+ }
+}
+
+pub fn test13(boxed_slice: &mut Box<[i32]>) {
+ // Unconditionally replaces the box pointer.
+ //
+ // This cannot be accomplished if "&mut [i32]" is passed,
+ // and provides a test case where passing a reference to
+ // a Box is valid.
+ let mut data = vec![12];
+ *boxed_slice = data.into_boxed_slice();
+}
+
+// The suggestion should include proper parentheses to avoid a syntax error.
+pub fn test14(_display: &Box<dyn Display>) {}
+pub fn test15(_display: &Box<dyn Display + Send>) {}
+pub fn test16<'a>(_display: &'a Box<dyn Display + 'a>) {}
+
+pub fn test17(_display: &Box<impl Display>) {}
+pub fn test18(_display: &Box<impl Display + Send>) {}
+pub fn test19<'a>(_display: &'a Box<impl Display + 'a>) {}
+
+// This exists only to check what happens when parentheses are already present.
+// Even though the current implementation doesn't put extra parentheses,
+// it's fine that unnecessary parentheses appear in the future for some reason.
+pub fn test20(_display: &Box<(dyn Display + Send)>) {}
+
+fn main() {
+ test1(&mut Box::new(false));
+ test2();
+ test5(&mut (Box::new(false) as Box<dyn Any>));
+ test6();
+ test9(&mut (Box::new(false) as Box<dyn Any + Send + Sync>));
+ test10();
+}
diff --git a/src/tools/clippy/tests/ui/borrow_box.stderr b/src/tools/clippy/tests/ui/borrow_box.stderr
new file mode 100644
index 000000000..3eac32815
--- /dev/null
+++ b/src/tools/clippy/tests/ui/borrow_box.stderr
@@ -0,0 +1,68 @@
+error: you seem to be trying to use `&Box<T>`. Consider using just `&T`
+ --> $DIR/borrow_box.rs:21:14
+ |
+LL | let foo: &Box<bool>;
+ | ^^^^^^^^^^ help: try: `&bool`
+ |
+note: the lint level is defined here
+ --> $DIR/borrow_box.rs:1:9
+ |
+LL | #![deny(clippy::borrowed_box)]
+ | ^^^^^^^^^^^^^^^^^^^^
+
+error: you seem to be trying to use `&Box<T>`. Consider using just `&T`
+ --> $DIR/borrow_box.rs:25:10
+ |
+LL | foo: &'a Box<bool>,
+ | ^^^^^^^^^^^^^ help: try: `&'a bool`
+
+error: you seem to be trying to use `&Box<T>`. Consider using just `&T`
+ --> $DIR/borrow_box.rs:29:17
+ |
+LL | fn test4(a: &Box<bool>);
+ | ^^^^^^^^^^ help: try: `&bool`
+
+error: you seem to be trying to use `&Box<T>`. Consider using just `&T`
+ --> $DIR/borrow_box.rs:95:25
+ |
+LL | pub fn test14(_display: &Box<dyn Display>) {}
+ | ^^^^^^^^^^^^^^^^^ help: try: `&dyn Display`
+
+error: you seem to be trying to use `&Box<T>`. Consider using just `&T`
+ --> $DIR/borrow_box.rs:96:25
+ |
+LL | pub fn test15(_display: &Box<dyn Display + Send>) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&(dyn Display + Send)`
+
+error: you seem to be trying to use `&Box<T>`. Consider using just `&T`
+ --> $DIR/borrow_box.rs:97:29
+ |
+LL | pub fn test16<'a>(_display: &'a Box<dyn Display + 'a>) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&'a (dyn Display + 'a)`
+
+error: you seem to be trying to use `&Box<T>`. Consider using just `&T`
+ --> $DIR/borrow_box.rs:99:25
+ |
+LL | pub fn test17(_display: &Box<impl Display>) {}
+ | ^^^^^^^^^^^^^^^^^^ help: try: `&impl Display`
+
+error: you seem to be trying to use `&Box<T>`. Consider using just `&T`
+ --> $DIR/borrow_box.rs:100:25
+ |
+LL | pub fn test18(_display: &Box<impl Display + Send>) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&(impl Display + Send)`
+
+error: you seem to be trying to use `&Box<T>`. Consider using just `&T`
+ --> $DIR/borrow_box.rs:101:29
+ |
+LL | pub fn test19<'a>(_display: &'a Box<impl Display + 'a>) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&'a (impl Display + 'a)`
+
+error: you seem to be trying to use `&Box<T>`. Consider using just `&T`
+ --> $DIR/borrow_box.rs:106:25
+ |
+LL | pub fn test20(_display: &Box<(dyn Display + Send)>) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&(dyn Display + Send)`
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/borrow_deref_ref.fixed b/src/tools/clippy/tests/ui/borrow_deref_ref.fixed
new file mode 100644
index 000000000..bf4691c5b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/borrow_deref_ref.fixed
@@ -0,0 +1,59 @@
+// run-rustfix
+
+#![allow(dead_code, unused_variables)]
+
+fn main() {}
+
+mod should_lint {
+ fn one_help() {
+ let a = &12;
+ let b = a;
+
+ let b = &mut bar(&12);
+ }
+
+ fn bar(x: &u32) -> &u32 {
+ x
+ }
+}
+
+// this mod explains why we should not lint `&mut &* (&T)`
+mod should_not_lint1 {
+ fn foo(x: &mut &u32) {
+ *x = &1;
+ }
+
+ fn main() {
+ let mut x = &0;
+ foo(&mut &*x); // should not lint
+ assert_eq!(*x, 0);
+
+ foo(&mut x);
+ assert_eq!(*x, 1);
+ }
+}
+
+// similar to should_not_lint1
+mod should_not_lint2 {
+ struct S<'a> {
+ a: &'a u32,
+ b: u32,
+ }
+
+ fn main() {
+ let s = S { a: &1, b: 1 };
+ let x = &mut &*s.a;
+ *x = &2;
+ }
+}
+
+// this mod explains why we should not lint `& &* (&T)`
+mod false_negative {
+ fn foo() {
+ let x = &12;
+ let addr_x = &x as *const _ as usize;
+ let addr_y = &x as *const _ as usize; // assert ok
+ // let addr_y = &x as *const _ as usize; // assert fail
+ assert_ne!(addr_x, addr_y);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/borrow_deref_ref.rs b/src/tools/clippy/tests/ui/borrow_deref_ref.rs
new file mode 100644
index 000000000..28c005fdb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/borrow_deref_ref.rs
@@ -0,0 +1,59 @@
+// run-rustfix
+
+#![allow(dead_code, unused_variables)]
+
+fn main() {}
+
+mod should_lint {
+ fn one_help() {
+ let a = &12;
+ let b = &*a;
+
+ let b = &mut &*bar(&12);
+ }
+
+ fn bar(x: &u32) -> &u32 {
+ x
+ }
+}
+
+// this mod explains why we should not lint `&mut &* (&T)`
+mod should_not_lint1 {
+ fn foo(x: &mut &u32) {
+ *x = &1;
+ }
+
+ fn main() {
+ let mut x = &0;
+ foo(&mut &*x); // should not lint
+ assert_eq!(*x, 0);
+
+ foo(&mut x);
+ assert_eq!(*x, 1);
+ }
+}
+
+// similar to should_not_lint1
+mod should_not_lint2 {
+ struct S<'a> {
+ a: &'a u32,
+ b: u32,
+ }
+
+ fn main() {
+ let s = S { a: &1, b: 1 };
+ let x = &mut &*s.a;
+ *x = &2;
+ }
+}
+
+// this mod explains why we should not lint `& &* (&T)`
+mod false_negative {
+ fn foo() {
+ let x = &12;
+ let addr_x = &x as *const _ as usize;
+ let addr_y = &&*x as *const _ as usize; // assert ok
+ // let addr_y = &x as *const _ as usize; // assert fail
+ assert_ne!(addr_x, addr_y);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/borrow_deref_ref.stderr b/src/tools/clippy/tests/ui/borrow_deref_ref.stderr
new file mode 100644
index 000000000..d72de37c6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/borrow_deref_ref.stderr
@@ -0,0 +1,22 @@
+error: deref on an immutable reference
+ --> $DIR/borrow_deref_ref.rs:10:17
+ |
+LL | let b = &*a;
+ | ^^^ help: if you would like to reborrow, try removing `&*`: `a`
+ |
+ = note: `-D clippy::borrow-deref-ref` implied by `-D warnings`
+
+error: deref on an immutable reference
+ --> $DIR/borrow_deref_ref.rs:12:22
+ |
+LL | let b = &mut &*bar(&12);
+ | ^^^^^^^^^^ help: if you would like to reborrow, try removing `&*`: `bar(&12)`
+
+error: deref on an immutable reference
+ --> $DIR/borrow_deref_ref.rs:55:23
+ |
+LL | let addr_y = &&*x as *const _ as usize; // assert ok
+ | ^^^ help: if you would like to reborrow, try removing `&*`: `x`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/borrow_deref_ref_unfixable.rs b/src/tools/clippy/tests/ui/borrow_deref_ref_unfixable.rs
new file mode 100644
index 000000000..a8e2bbfef
--- /dev/null
+++ b/src/tools/clippy/tests/ui/borrow_deref_ref_unfixable.rs
@@ -0,0 +1,10 @@
+#![allow(dead_code, unused_variables)]
+
+fn main() {}
+
+mod should_lint {
+ fn two_helps() {
+ let s = &String::new();
+ let x: &str = &*s;
+ }
+}
diff --git a/src/tools/clippy/tests/ui/borrow_deref_ref_unfixable.stderr b/src/tools/clippy/tests/ui/borrow_deref_ref_unfixable.stderr
new file mode 100644
index 000000000..738b01e7e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/borrow_deref_ref_unfixable.stderr
@@ -0,0 +1,18 @@
+error: deref on an immutable reference
+ --> $DIR/borrow_deref_ref_unfixable.rs:8:23
+ |
+LL | let x: &str = &*s;
+ | ^^^
+ |
+ = note: `-D clippy::borrow-deref-ref` implied by `-D warnings`
+help: if you would like to reborrow, try removing `&*`
+ |
+LL | let x: &str = s;
+ | ~
+help: if you would like to deref, try using `&**`
+ |
+LL | let x: &str = &**s;
+ | ~~~~
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/borrow_interior_mutable_const/auxiliary/helper.rs b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/auxiliary/helper.rs
new file mode 100644
index 000000000..f13733af3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/auxiliary/helper.rs
@@ -0,0 +1,17 @@
+// this file solely exists to test constants defined in foreign crates.
+// As the most common case is the `http` crate, it replicates `http::HeadewrName`'s structure.
+
+#![allow(clippy::declare_interior_mutable_const)]
+#![allow(unused_tuple_struct_fields)]
+
+use std::sync::atomic::AtomicUsize;
+
+enum Private<T> {
+ ToBeUnfrozen(T),
+ Frozen(usize),
+}
+
+pub struct Wrapper(Private<AtomicUsize>);
+
+pub const WRAPPED_PRIVATE_UNFROZEN_VARIANT: Wrapper = Wrapper(Private::ToBeUnfrozen(AtomicUsize::new(6)));
+pub const WRAPPED_PRIVATE_FROZEN_VARIANT: Wrapper = Wrapper(Private::Frozen(7));
diff --git a/src/tools/clippy/tests/ui/borrow_interior_mutable_const/enums.rs b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/enums.rs
new file mode 100644
index 000000000..5027db445
--- /dev/null
+++ b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/enums.rs
@@ -0,0 +1,101 @@
+// aux-build:helper.rs
+
+#![warn(clippy::borrow_interior_mutable_const)]
+#![allow(clippy::declare_interior_mutable_const)]
+
+// this file (mostly) replicates its `declare` counterpart. Please see it for more discussions.
+
+extern crate helper;
+
+use std::cell::Cell;
+use std::sync::atomic::AtomicUsize;
+
+enum OptionalCell {
+ Unfrozen(Cell<bool>),
+ Frozen,
+}
+
+const UNFROZEN_VARIANT: OptionalCell = OptionalCell::Unfrozen(Cell::new(true));
+const FROZEN_VARIANT: OptionalCell = OptionalCell::Frozen;
+
+fn borrow_optional_cell() {
+ let _ = &UNFROZEN_VARIANT; //~ ERROR interior mutability
+ let _ = &FROZEN_VARIANT;
+}
+
+trait AssocConsts {
+ const TO_BE_UNFROZEN_VARIANT: OptionalCell;
+ const TO_BE_FROZEN_VARIANT: OptionalCell;
+
+ const DEFAULTED_ON_UNFROZEN_VARIANT: OptionalCell = OptionalCell::Unfrozen(Cell::new(false));
+ const DEFAULTED_ON_FROZEN_VARIANT: OptionalCell = OptionalCell::Frozen;
+
+ fn function() {
+ // This is the "suboptimal behavior" mentioned in `is_value_unfrozen`
+ // caused by a similar reason to unfrozen types without any default values
+ // get linted even if it has frozen variants'.
+ let _ = &Self::TO_BE_FROZEN_VARIANT; //~ ERROR interior mutable
+
+ // The lint ignores default values because an impl of this trait can set
+ // an unfrozen variant to `DEFAULTED_ON_FROZEN_VARIANT` and use the default impl for `function`.
+ let _ = &Self::DEFAULTED_ON_FROZEN_VARIANT; //~ ERROR interior mutable
+ }
+}
+
+impl AssocConsts for u64 {
+ const TO_BE_UNFROZEN_VARIANT: OptionalCell = OptionalCell::Unfrozen(Cell::new(false));
+ const TO_BE_FROZEN_VARIANT: OptionalCell = OptionalCell::Frozen;
+
+ fn function() {
+ let _ = &<Self as AssocConsts>::TO_BE_UNFROZEN_VARIANT; //~ ERROR interior mutable
+ let _ = &<Self as AssocConsts>::TO_BE_FROZEN_VARIANT;
+ let _ = &Self::DEFAULTED_ON_UNFROZEN_VARIANT; //~ ERROR interior mutable
+ let _ = &Self::DEFAULTED_ON_FROZEN_VARIANT;
+ }
+}
+
+trait AssocTypes {
+ type ToBeUnfrozen;
+
+ const TO_BE_UNFROZEN_VARIANT: Option<Self::ToBeUnfrozen>;
+ const TO_BE_FROZEN_VARIANT: Option<Self::ToBeUnfrozen>;
+
+ // there's no need to test here because it's the exactly same as `trait::AssocTypes`
+ fn function();
+}
+
+impl AssocTypes for u64 {
+ type ToBeUnfrozen = AtomicUsize;
+
+ const TO_BE_UNFROZEN_VARIANT: Option<Self::ToBeUnfrozen> = Some(Self::ToBeUnfrozen::new(4)); //~ ERROR interior mutable
+ const TO_BE_FROZEN_VARIANT: Option<Self::ToBeUnfrozen> = None;
+
+ fn function() {
+ let _ = &<Self as AssocTypes>::TO_BE_UNFROZEN_VARIANT; //~ ERROR interior mutable
+ let _ = &<Self as AssocTypes>::TO_BE_FROZEN_VARIANT;
+ }
+}
+
+enum BothOfCellAndGeneric<T> {
+ Unfrozen(Cell<*const T>),
+ Generic(*const T),
+ Frozen(usize),
+}
+
+impl<T> BothOfCellAndGeneric<T> {
+ const UNFROZEN_VARIANT: BothOfCellAndGeneric<T> = BothOfCellAndGeneric::Unfrozen(Cell::new(std::ptr::null())); //~ ERROR interior mutable
+ const GENERIC_VARIANT: BothOfCellAndGeneric<T> = BothOfCellAndGeneric::Generic(std::ptr::null()); //~ ERROR interior mutable
+ const FROZEN_VARIANT: BothOfCellAndGeneric<T> = BothOfCellAndGeneric::Frozen(5);
+
+ fn function() {
+ let _ = &Self::UNFROZEN_VARIANT; //~ ERROR interior mutability
+ let _ = &Self::GENERIC_VARIANT; //~ ERROR interior mutability
+ let _ = &Self::FROZEN_VARIANT;
+ }
+}
+
+fn main() {
+ // constants defined in foreign crates
+ let _ = &helper::WRAPPED_PRIVATE_UNFROZEN_VARIANT; //~ ERROR interior mutability
+ let _ = &helper::WRAPPED_PRIVATE_FROZEN_VARIANT;
+}
diff --git a/src/tools/clippy/tests/ui/borrow_interior_mutable_const/enums.stderr b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/enums.stderr
new file mode 100644
index 000000000..654a1ee7d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/enums.stderr
@@ -0,0 +1,75 @@
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/enums.rs:22:14
+ |
+LL | let _ = &UNFROZEN_VARIANT; //~ ERROR interior mutability
+ | ^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::borrow-interior-mutable-const` implied by `-D warnings`
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/enums.rs:37:18
+ |
+LL | let _ = &Self::TO_BE_FROZEN_VARIANT; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/enums.rs:41:18
+ |
+LL | let _ = &Self::DEFAULTED_ON_FROZEN_VARIANT; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/enums.rs:50:18
+ |
+LL | let _ = &<Self as AssocConsts>::TO_BE_UNFROZEN_VARIANT; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/enums.rs:52:18
+ |
+LL | let _ = &Self::DEFAULTED_ON_UNFROZEN_VARIANT; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/enums.rs:74:18
+ |
+LL | let _ = &<Self as AssocTypes>::TO_BE_UNFROZEN_VARIANT; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/enums.rs:91:18
+ |
+LL | let _ = &Self::UNFROZEN_VARIANT; //~ ERROR interior mutability
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/enums.rs:92:18
+ |
+LL | let _ = &Self::GENERIC_VARIANT; //~ ERROR interior mutability
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/enums.rs:99:14
+ |
+LL | let _ = &helper::WRAPPED_PRIVATE_UNFROZEN_VARIANT; //~ ERROR interior mutability
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.rs b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.rs
new file mode 100644
index 000000000..eefeb1dec
--- /dev/null
+++ b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.rs
@@ -0,0 +1,104 @@
+#![warn(clippy::borrow_interior_mutable_const)]
+#![allow(clippy::declare_interior_mutable_const, clippy::needless_borrow)]
+#![allow(const_item_mutation)]
+
+use std::borrow::Cow;
+use std::cell::{Cell, UnsafeCell};
+use std::fmt::Display;
+use std::sync::atomic::{AtomicUsize, Ordering};
+use std::sync::Once;
+
+const ATOMIC: AtomicUsize = AtomicUsize::new(5);
+const CELL: Cell<usize> = Cell::new(6);
+const ATOMIC_TUPLE: ([AtomicUsize; 1], Vec<AtomicUsize>, u8) = ([ATOMIC], Vec::new(), 7);
+const INTEGER: u8 = 8;
+const STRING: String = String::new();
+const STR: &str = "012345";
+const COW: Cow<str> = Cow::Borrowed("abcdef");
+const NO_ANN: &dyn Display = &70;
+static STATIC_TUPLE: (AtomicUsize, String) = (ATOMIC, STRING);
+const ONCE_INIT: Once = Once::new();
+
+// This is just a pointer that can be safely dereferenced,
+// it's semantically the same as `&'static T`;
+// but it isn't allowed to make a static reference from an arbitrary integer value at the moment.
+// For more information, please see the issue #5918.
+pub struct StaticRef<T> {
+ ptr: *const T,
+}
+
+impl<T> StaticRef<T> {
+ /// Create a new `StaticRef` from a raw pointer
+ ///
+ /// ## Safety
+ ///
+ /// Callers must pass in a reference to statically allocated memory which
+ /// does not overlap with other values.
+ pub const unsafe fn new(ptr: *const T) -> StaticRef<T> {
+ StaticRef { ptr }
+ }
+}
+
+impl<T> std::ops::Deref for StaticRef<T> {
+ type Target = T;
+
+ fn deref(&self) -> &'static T {
+ unsafe { &*self.ptr }
+ }
+}
+
+// use a tuple to make sure referencing a field behind a pointer isn't linted.
+const CELL_REF: StaticRef<(UnsafeCell<u32>,)> = unsafe { StaticRef::new(std::ptr::null()) };
+
+fn main() {
+ ATOMIC.store(1, Ordering::SeqCst); //~ ERROR interior mutability
+ assert_eq!(ATOMIC.load(Ordering::SeqCst), 5); //~ ERROR interior mutability
+
+ let _once = ONCE_INIT;
+ let _once_ref = &ONCE_INIT; //~ ERROR interior mutability
+ let _once_ref_2 = &&ONCE_INIT; //~ ERROR interior mutability
+ let _once_ref_4 = &&&&ONCE_INIT; //~ ERROR interior mutability
+ let _once_mut = &mut ONCE_INIT; //~ ERROR interior mutability
+ let _atomic_into_inner = ATOMIC.into_inner();
+ // these should be all fine.
+ let _twice = (ONCE_INIT, ONCE_INIT);
+ let _ref_twice = &(ONCE_INIT, ONCE_INIT);
+ let _ref_once = &(ONCE_INIT, ONCE_INIT).0;
+ let _array_twice = [ONCE_INIT, ONCE_INIT];
+ let _ref_array_twice = &[ONCE_INIT, ONCE_INIT];
+ let _ref_array_once = &[ONCE_INIT, ONCE_INIT][0];
+
+ // referencing projection is still bad.
+ let _ = &ATOMIC_TUPLE; //~ ERROR interior mutability
+ let _ = &ATOMIC_TUPLE.0; //~ ERROR interior mutability
+ let _ = &(&&&&ATOMIC_TUPLE).0; //~ ERROR interior mutability
+ let _ = &ATOMIC_TUPLE.0[0]; //~ ERROR interior mutability
+ let _ = ATOMIC_TUPLE.0[0].load(Ordering::SeqCst); //~ ERROR interior mutability
+ let _ = &*ATOMIC_TUPLE.1;
+ let _ = &ATOMIC_TUPLE.2;
+ let _ = (&&&&ATOMIC_TUPLE).0;
+ let _ = (&&&&ATOMIC_TUPLE).2;
+ let _ = ATOMIC_TUPLE.0;
+ let _ = ATOMIC_TUPLE.0[0]; //~ ERROR interior mutability
+ let _ = ATOMIC_TUPLE.1.into_iter();
+ let _ = ATOMIC_TUPLE.2;
+ let _ = &{ ATOMIC_TUPLE };
+
+ CELL.set(2); //~ ERROR interior mutability
+ assert_eq!(CELL.get(), 6); //~ ERROR interior mutability
+
+ assert_eq!(INTEGER, 8);
+ assert!(STRING.is_empty());
+
+ let a = ATOMIC;
+ a.store(4, Ordering::SeqCst);
+ assert_eq!(a.load(Ordering::SeqCst), 4);
+
+ STATIC_TUPLE.0.store(3, Ordering::SeqCst);
+ assert_eq!(STATIC_TUPLE.0.load(Ordering::SeqCst), 3);
+ assert!(STATIC_TUPLE.1.is_empty());
+
+ assert_eq!(NO_ANN.to_string(), "70"); // should never lint this.
+
+ let _ = &CELL_REF.0;
+}
diff --git a/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.stderr b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.stderr
new file mode 100644
index 000000000..9a908cf30
--- /dev/null
+++ b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.stderr
@@ -0,0 +1,115 @@
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/others.rs:54:5
+ |
+LL | ATOMIC.store(1, Ordering::SeqCst); //~ ERROR interior mutability
+ | ^^^^^^
+ |
+ = note: `-D clippy::borrow-interior-mutable-const` implied by `-D warnings`
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/others.rs:55:16
+ |
+LL | assert_eq!(ATOMIC.load(Ordering::SeqCst), 5); //~ ERROR interior mutability
+ | ^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/others.rs:58:22
+ |
+LL | let _once_ref = &ONCE_INIT; //~ ERROR interior mutability
+ | ^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/others.rs:59:25
+ |
+LL | let _once_ref_2 = &&ONCE_INIT; //~ ERROR interior mutability
+ | ^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/others.rs:60:27
+ |
+LL | let _once_ref_4 = &&&&ONCE_INIT; //~ ERROR interior mutability
+ | ^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/others.rs:61:26
+ |
+LL | let _once_mut = &mut ONCE_INIT; //~ ERROR interior mutability
+ | ^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/others.rs:72:14
+ |
+LL | let _ = &ATOMIC_TUPLE; //~ ERROR interior mutability
+ | ^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/others.rs:73:14
+ |
+LL | let _ = &ATOMIC_TUPLE.0; //~ ERROR interior mutability
+ | ^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/others.rs:74:19
+ |
+LL | let _ = &(&&&&ATOMIC_TUPLE).0; //~ ERROR interior mutability
+ | ^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/others.rs:75:14
+ |
+LL | let _ = &ATOMIC_TUPLE.0[0]; //~ ERROR interior mutability
+ | ^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/others.rs:76:13
+ |
+LL | let _ = ATOMIC_TUPLE.0[0].load(Ordering::SeqCst); //~ ERROR interior mutability
+ | ^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/others.rs:82:13
+ |
+LL | let _ = ATOMIC_TUPLE.0[0]; //~ ERROR interior mutability
+ | ^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/others.rs:87:5
+ |
+LL | CELL.set(2); //~ ERROR interior mutability
+ | ^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/others.rs:88:16
+ |
+LL | assert_eq!(CELL.get(), 6); //~ ERROR interior mutability
+ | ^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: aborting due to 14 previous errors
+
diff --git a/src/tools/clippy/tests/ui/borrow_interior_mutable_const/traits.rs b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/traits.rs
new file mode 100644
index 000000000..06b5d62e8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/traits.rs
@@ -0,0 +1,202 @@
+#![warn(clippy::borrow_interior_mutable_const)]
+#![allow(clippy::declare_interior_mutable_const)]
+
+// this file replicates its `declare` counterpart. Please see it for more discussions.
+
+use std::borrow::Cow;
+use std::cell::Cell;
+use std::sync::atomic::{AtomicUsize, Ordering};
+
+trait ConcreteTypes {
+ const ATOMIC: AtomicUsize;
+ const STRING: String;
+
+ fn function() {
+ let _ = &Self::ATOMIC; //~ ERROR interior mutable
+ let _ = &Self::STRING;
+ }
+}
+
+impl ConcreteTypes for u64 {
+ const ATOMIC: AtomicUsize = AtomicUsize::new(9);
+ const STRING: String = String::new();
+
+ fn function() {
+ // Lint this again since implementers can choose not to borrow it.
+ let _ = &Self::ATOMIC; //~ ERROR interior mutable
+ let _ = &Self::STRING;
+ }
+}
+
+// a helper trait used below
+trait ConstDefault {
+ const DEFAULT: Self;
+}
+
+trait GenericTypes<T, U> {
+ const TO_REMAIN_GENERIC: T;
+ const TO_BE_CONCRETE: U;
+
+ fn function() {
+ let _ = &Self::TO_REMAIN_GENERIC;
+ }
+}
+
+impl<T: ConstDefault> GenericTypes<T, AtomicUsize> for Vec<T> {
+ const TO_REMAIN_GENERIC: T = T::DEFAULT;
+ const TO_BE_CONCRETE: AtomicUsize = AtomicUsize::new(11);
+
+ fn function() {
+ let _ = &Self::TO_REMAIN_GENERIC;
+ let _ = &Self::TO_BE_CONCRETE; //~ ERROR interior mutable
+ }
+}
+
+// a helper type used below
+pub struct Wrapper<T>(T);
+
+trait AssocTypes {
+ type ToBeFrozen;
+ type ToBeUnfrozen;
+ type ToBeGenericParam;
+
+ const TO_BE_FROZEN: Self::ToBeFrozen;
+ const TO_BE_UNFROZEN: Self::ToBeUnfrozen;
+ const WRAPPED_TO_BE_UNFROZEN: Wrapper<Self::ToBeUnfrozen>;
+ const WRAPPED_TO_BE_GENERIC_PARAM: Wrapper<Self::ToBeGenericParam>;
+
+ fn function() {
+ let _ = &Self::TO_BE_FROZEN;
+ let _ = &Self::WRAPPED_TO_BE_UNFROZEN;
+ }
+}
+
+impl<T: ConstDefault> AssocTypes for Vec<T> {
+ type ToBeFrozen = u16;
+ type ToBeUnfrozen = AtomicUsize;
+ type ToBeGenericParam = T;
+
+ const TO_BE_FROZEN: Self::ToBeFrozen = 12;
+ const TO_BE_UNFROZEN: Self::ToBeUnfrozen = AtomicUsize::new(13);
+ const WRAPPED_TO_BE_UNFROZEN: Wrapper<Self::ToBeUnfrozen> = Wrapper(AtomicUsize::new(14));
+ const WRAPPED_TO_BE_GENERIC_PARAM: Wrapper<Self::ToBeGenericParam> = Wrapper(T::DEFAULT);
+
+ fn function() {
+ let _ = &Self::TO_BE_FROZEN;
+ let _ = &Self::TO_BE_UNFROZEN; //~ ERROR interior mutable
+ let _ = &Self::WRAPPED_TO_BE_UNFROZEN; //~ ERROR interior mutable
+ let _ = &Self::WRAPPED_TO_BE_GENERIC_PARAM;
+ }
+}
+
+// a helper trait used below
+trait AssocTypesHelper {
+ type NotToBeBounded;
+ type ToBeBounded;
+
+ const NOT_TO_BE_BOUNDED: Self::NotToBeBounded;
+}
+
+trait AssocTypesFromGenericParam<T>
+where
+ T: AssocTypesHelper<ToBeBounded = AtomicUsize>,
+{
+ const NOT_BOUNDED: T::NotToBeBounded;
+ const BOUNDED: T::ToBeBounded;
+
+ fn function() {
+ let _ = &Self::NOT_BOUNDED;
+ let _ = &Self::BOUNDED; //~ ERROR interior mutable
+ }
+}
+
+impl<T> AssocTypesFromGenericParam<T> for Vec<T>
+where
+ T: AssocTypesHelper<ToBeBounded = AtomicUsize>,
+{
+ const NOT_BOUNDED: T::NotToBeBounded = T::NOT_TO_BE_BOUNDED;
+ const BOUNDED: T::ToBeBounded = AtomicUsize::new(15);
+
+ fn function() {
+ let _ = &Self::NOT_BOUNDED;
+ let _ = &Self::BOUNDED; //~ ERROR interior mutable
+ }
+}
+
+trait SelfType: Sized {
+ const SELF: Self;
+ const WRAPPED_SELF: Option<Self>;
+
+ fn function() {
+ let _ = &Self::SELF;
+ let _ = &Self::WRAPPED_SELF;
+ }
+}
+
+impl SelfType for u64 {
+ const SELF: Self = 16;
+ const WRAPPED_SELF: Option<Self> = Some(20);
+
+ fn function() {
+ let _ = &Self::SELF;
+ let _ = &Self::WRAPPED_SELF;
+ }
+}
+
+impl SelfType for AtomicUsize {
+ const SELF: Self = AtomicUsize::new(17);
+ const WRAPPED_SELF: Option<Self> = Some(AtomicUsize::new(21));
+
+ fn function() {
+ let _ = &Self::SELF; //~ ERROR interior mutable
+ let _ = &Self::WRAPPED_SELF; //~ ERROR interior mutable
+ }
+}
+
+trait BothOfCellAndGeneric<T> {
+ const DIRECT: Cell<T>;
+ const INDIRECT: Cell<*const T>;
+
+ fn function() {
+ let _ = &Self::DIRECT;
+ let _ = &Self::INDIRECT; //~ ERROR interior mutable
+ }
+}
+
+impl<T: ConstDefault> BothOfCellAndGeneric<T> for Vec<T> {
+ const DIRECT: Cell<T> = Cell::new(T::DEFAULT);
+ const INDIRECT: Cell<*const T> = Cell::new(std::ptr::null());
+
+ fn function() {
+ let _ = &Self::DIRECT;
+ let _ = &Self::INDIRECT; //~ ERROR interior mutable
+ }
+}
+
+struct Local<T>(T);
+
+impl<T> Local<T>
+where
+ T: ConstDefault + AssocTypesHelper<ToBeBounded = AtomicUsize>,
+{
+ const ATOMIC: AtomicUsize = AtomicUsize::new(18);
+ const COW: Cow<'static, str> = Cow::Borrowed("tuvwxy");
+
+ const GENERIC_TYPE: T = T::DEFAULT;
+
+ const ASSOC_TYPE: T::NotToBeBounded = T::NOT_TO_BE_BOUNDED;
+ const BOUNDED_ASSOC_TYPE: T::ToBeBounded = AtomicUsize::new(19);
+
+ fn function() {
+ let _ = &Self::ATOMIC; //~ ERROR interior mutable
+ let _ = &Self::COW;
+ let _ = &Self::GENERIC_TYPE;
+ let _ = &Self::ASSOC_TYPE;
+ let _ = &Self::BOUNDED_ASSOC_TYPE; //~ ERROR interior mutable
+ }
+}
+
+fn main() {
+ u64::ATOMIC.store(5, Ordering::SeqCst); //~ ERROR interior mutability
+ assert_eq!(u64::ATOMIC.load(Ordering::SeqCst), 9); //~ ERROR interior mutability
+}
diff --git a/src/tools/clippy/tests/ui/borrow_interior_mutable_const/traits.stderr b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/traits.stderr
new file mode 100644
index 000000000..8f26403ab
--- /dev/null
+++ b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/traits.stderr
@@ -0,0 +1,123 @@
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/traits.rs:15:18
+ |
+LL | let _ = &Self::ATOMIC; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^
+ |
+ = note: `-D clippy::borrow-interior-mutable-const` implied by `-D warnings`
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/traits.rs:26:18
+ |
+LL | let _ = &Self::ATOMIC; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/traits.rs:51:18
+ |
+LL | let _ = &Self::TO_BE_CONCRETE; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/traits.rs:86:18
+ |
+LL | let _ = &Self::TO_BE_UNFROZEN; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/traits.rs:87:18
+ |
+LL | let _ = &Self::WRAPPED_TO_BE_UNFROZEN; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/traits.rs:109:18
+ |
+LL | let _ = &Self::BOUNDED; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/traits.rs:122:18
+ |
+LL | let _ = &Self::BOUNDED; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/traits.rs:151:18
+ |
+LL | let _ = &Self::SELF; //~ ERROR interior mutable
+ | ^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/traits.rs:152:18
+ |
+LL | let _ = &Self::WRAPPED_SELF; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/traits.rs:162:18
+ |
+LL | let _ = &Self::INDIRECT; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/traits.rs:172:18
+ |
+LL | let _ = &Self::INDIRECT; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/traits.rs:191:18
+ |
+LL | let _ = &Self::ATOMIC; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/traits.rs:195:18
+ |
+LL | let _ = &Self::BOUNDED_ASSOC_TYPE; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/traits.rs:200:5
+ |
+LL | u64::ATOMIC.store(5, Ordering::SeqCst); //~ ERROR interior mutability
+ | ^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: a `const` item with interior mutability should not be borrowed
+ --> $DIR/traits.rs:201:16
+ |
+LL | assert_eq!(u64::ATOMIC.load(Ordering::SeqCst), 9); //~ ERROR interior mutability
+ | ^^^^^^^^^^^
+ |
+ = help: assign this const to a local or static variable, and use the variable here
+
+error: aborting due to 15 previous errors
+
diff --git a/src/tools/clippy/tests/ui/box_collection.rs b/src/tools/clippy/tests/ui/box_collection.rs
new file mode 100644
index 000000000..1a74cdb3f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/box_collection.rs
@@ -0,0 +1,56 @@
+#![warn(clippy::all)]
+#![allow(
+ clippy::boxed_local,
+ clippy::needless_pass_by_value,
+ clippy::blacklisted_name,
+ unused
+)]
+
+use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, LinkedList, VecDeque};
+
+macro_rules! boxit {
+ ($init:expr, $x:ty) => {
+ let _: Box<$x> = Box::new($init);
+ };
+}
+
+fn test_macro() {
+ boxit!(Vec::new(), Vec<u8>);
+}
+
+fn test1(foo: Box<Vec<bool>>) {}
+
+fn test2(foo: Box<dyn Fn(Vec<u32>)>) {
+ // pass if #31 is fixed
+ foo(vec![1, 2, 3])
+}
+
+fn test3(foo: Box<String>) {}
+
+fn test4(foo: Box<HashMap<String, String>>) {}
+
+fn test5(foo: Box<HashSet<i64>>) {}
+
+fn test6(foo: Box<VecDeque<i32>>) {}
+
+fn test7(foo: Box<LinkedList<i16>>) {}
+
+fn test8(foo: Box<BTreeMap<i8, String>>) {}
+
+fn test9(foo: Box<BTreeSet<u64>>) {}
+
+fn test10(foo: Box<BinaryHeap<u32>>) {}
+
+fn test_local_not_linted() {
+ let _: Box<Vec<bool>>;
+}
+
+// All of these test should be allowed because they are part of the
+// public api and `avoid_breaking_exported_api` is `false` by default.
+pub fn pub_test(foo: Box<Vec<bool>>) {}
+
+pub fn pub_test_ret() -> Box<Vec<bool>> {
+ Box::new(Vec::new())
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/box_collection.stderr b/src/tools/clippy/tests/ui/box_collection.stderr
new file mode 100644
index 000000000..2b28598de
--- /dev/null
+++ b/src/tools/clippy/tests/ui/box_collection.stderr
@@ -0,0 +1,75 @@
+error: you seem to be trying to use `Box<Vec<..>>`. Consider using just `Vec<..>`
+ --> $DIR/box_collection.rs:21:15
+ |
+LL | fn test1(foo: Box<Vec<bool>>) {}
+ | ^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::box-collection` implied by `-D warnings`
+ = help: `Vec<..>` is already on the heap, `Box<Vec<..>>` makes an extra allocation
+
+error: you seem to be trying to use `Box<String>`. Consider using just `String`
+ --> $DIR/box_collection.rs:28:15
+ |
+LL | fn test3(foo: Box<String>) {}
+ | ^^^^^^^^^^^
+ |
+ = help: `String` is already on the heap, `Box<String>` makes an extra allocation
+
+error: you seem to be trying to use `Box<HashMap<..>>`. Consider using just `HashMap<..>`
+ --> $DIR/box_collection.rs:30:15
+ |
+LL | fn test4(foo: Box<HashMap<String, String>>) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: `HashMap<..>` is already on the heap, `Box<HashMap<..>>` makes an extra allocation
+
+error: you seem to be trying to use `Box<HashSet<..>>`. Consider using just `HashSet<..>`
+ --> $DIR/box_collection.rs:32:15
+ |
+LL | fn test5(foo: Box<HashSet<i64>>) {}
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = help: `HashSet<..>` is already on the heap, `Box<HashSet<..>>` makes an extra allocation
+
+error: you seem to be trying to use `Box<VecDeque<..>>`. Consider using just `VecDeque<..>`
+ --> $DIR/box_collection.rs:34:15
+ |
+LL | fn test6(foo: Box<VecDeque<i32>>) {}
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = help: `VecDeque<..>` is already on the heap, `Box<VecDeque<..>>` makes an extra allocation
+
+error: you seem to be trying to use `Box<LinkedList<..>>`. Consider using just `LinkedList<..>`
+ --> $DIR/box_collection.rs:36:15
+ |
+LL | fn test7(foo: Box<LinkedList<i16>>) {}
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: `LinkedList<..>` is already on the heap, `Box<LinkedList<..>>` makes an extra allocation
+
+error: you seem to be trying to use `Box<BTreeMap<..>>`. Consider using just `BTreeMap<..>`
+ --> $DIR/box_collection.rs:38:15
+ |
+LL | fn test8(foo: Box<BTreeMap<i8, String>>) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: `BTreeMap<..>` is already on the heap, `Box<BTreeMap<..>>` makes an extra allocation
+
+error: you seem to be trying to use `Box<BTreeSet<..>>`. Consider using just `BTreeSet<..>`
+ --> $DIR/box_collection.rs:40:15
+ |
+LL | fn test9(foo: Box<BTreeSet<u64>>) {}
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = help: `BTreeSet<..>` is already on the heap, `Box<BTreeSet<..>>` makes an extra allocation
+
+error: you seem to be trying to use `Box<BinaryHeap<..>>`. Consider using just `BinaryHeap<..>`
+ --> $DIR/box_collection.rs:42:16
+ |
+LL | fn test10(foo: Box<BinaryHeap<u32>>) {}
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: `BinaryHeap<..>` is already on the heap, `Box<BinaryHeap<..>>` makes an extra allocation
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/boxed_local.rs b/src/tools/clippy/tests/ui/boxed_local.rs
new file mode 100644
index 000000000..4639f00a8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/boxed_local.rs
@@ -0,0 +1,209 @@
+#![feature(box_syntax)]
+#![feature(lint_reasons)]
+#![allow(
+ clippy::borrowed_box,
+ clippy::needless_pass_by_value,
+ clippy::unused_unit,
+ clippy::redundant_clone,
+ clippy::match_single_binding
+)]
+#![warn(clippy::boxed_local)]
+
+#[derive(Clone)]
+struct A;
+
+impl A {
+ fn foo(&self) {}
+}
+
+trait Z {
+ fn bar(&self);
+}
+
+impl Z for A {
+ fn bar(&self) {
+ //nothing
+ }
+}
+
+fn main() {}
+
+fn ok_box_trait(boxed_trait: &Box<dyn Z>) {
+ let boxed_local = boxed_trait;
+ // done
+}
+
+fn warn_call() {
+ let x = box A;
+ x.foo();
+}
+
+fn warn_arg(x: Box<A>) {
+ x.foo();
+}
+
+fn nowarn_closure_arg() {
+ let x = Some(box A);
+ x.map_or((), |x| take_ref(&x));
+}
+
+fn warn_rename_call() {
+ let x = box A;
+
+ let y = x;
+ y.foo(); // via autoderef
+}
+
+fn warn_notuse() {
+ let bz = box A;
+}
+
+fn warn_pass() {
+ let bz = box A;
+ take_ref(&bz); // via deref coercion
+}
+
+fn nowarn_return() -> Box<A> {
+ box A // moved out, "escapes"
+}
+
+fn nowarn_move() {
+ let bx = box A;
+ drop(bx) // moved in, "escapes"
+}
+fn nowarn_call() {
+ let bx = box A;
+ bx.clone(); // method only available to Box, not via autoderef
+}
+
+fn nowarn_pass() {
+ let bx = box A;
+ take_box(&bx); // fn needs &Box
+}
+
+fn take_box(x: &Box<A>) {}
+fn take_ref(x: &A) {}
+
+fn nowarn_ref_take() {
+ // false positive, should actually warn
+ let x = box A;
+ let y = &x;
+ take_box(y);
+}
+
+fn nowarn_match() {
+ let x = box A; // moved into a match
+ match x {
+ y => drop(y),
+ }
+}
+
+fn warn_match() {
+ let x = box A;
+ match &x {
+ // not moved
+ y => (),
+ }
+}
+
+fn nowarn_large_array() {
+ // should not warn, is large array
+ // and should not be on stack
+ let x = box [1; 10000];
+ match &x {
+ // not moved
+ y => (),
+ }
+}
+
+/// ICE regression test
+pub trait Foo {
+ type Item;
+}
+
+impl<'a> Foo for &'a () {
+ type Item = ();
+}
+
+pub struct PeekableSeekable<I: Foo> {
+ _peeked: I::Item,
+}
+
+pub fn new(_needs_name: Box<PeekableSeekable<&()>>) -> () {}
+
+/// Regression for #916, #1123
+///
+/// This shouldn't warn for `boxed_local`as the implementation of a trait
+/// can't change much about the trait definition.
+trait BoxedAction {
+ fn do_sth(self: Box<Self>);
+}
+
+impl BoxedAction for u64 {
+ fn do_sth(self: Box<Self>) {
+ println!("{}", *self)
+ }
+}
+
+/// Regression for #1478
+///
+/// This shouldn't warn for `boxed_local`as self itself is a box type.
+trait MyTrait {
+ fn do_sth(self);
+}
+
+impl<T> MyTrait for Box<T> {
+ fn do_sth(self) {}
+}
+
+// Issue #3739 - capture in closures
+mod issue_3739 {
+ use super::A;
+
+ fn consume<T>(_: T) {}
+ fn borrow<T>(_: &T) {}
+
+ fn closure_consume(x: Box<A>) {
+ let _ = move || {
+ consume(x);
+ };
+ }
+
+ fn closure_borrow(x: Box<A>) {
+ let _ = || {
+ borrow(&x);
+ };
+ }
+}
+
+/// Issue #5542
+///
+/// This shouldn't warn for `boxed_local` as it is intended to called from non-Rust code.
+pub extern "C" fn do_not_warn_me(_c_pointer: Box<String>) -> () {}
+
+#[rustfmt::skip] // Forces rustfmt to not add ABI
+pub extern fn do_not_warn_me_no_abi(_c_pointer: Box<String>) -> () {}
+
+// Issue #4804 - default implementation in trait
+mod issue4804 {
+ trait DefaultTraitImplTest {
+ // don't warn on `self`
+ fn default_impl(self: Box<Self>) -> u32 {
+ 5
+ }
+
+ // warn on `x: Box<u32>`
+ fn default_impl_x(self: Box<Self>, x: Box<u32>) -> u32 {
+ 4
+ }
+ }
+
+ trait WarnTrait {
+ // warn on `x: Box<u32>`
+ fn foo(x: Box<u32>) {}
+ }
+}
+
+fn check_expect(#[expect(clippy::boxed_local)] x: Box<A>) {
+ x.foo();
+}
diff --git a/src/tools/clippy/tests/ui/boxed_local.stderr b/src/tools/clippy/tests/ui/boxed_local.stderr
new file mode 100644
index 000000000..9036529f3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/boxed_local.stderr
@@ -0,0 +1,28 @@
+error: local variable doesn't need to be boxed here
+ --> $DIR/boxed_local.rs:41:13
+ |
+LL | fn warn_arg(x: Box<A>) {
+ | ^
+ |
+ = note: `-D clippy::boxed-local` implied by `-D warnings`
+
+error: local variable doesn't need to be boxed here
+ --> $DIR/boxed_local.rs:132:12
+ |
+LL | pub fn new(_needs_name: Box<PeekableSeekable<&()>>) -> () {}
+ | ^^^^^^^^^^^
+
+error: local variable doesn't need to be boxed here
+ --> $DIR/boxed_local.rs:196:44
+ |
+LL | fn default_impl_x(self: Box<Self>, x: Box<u32>) -> u32 {
+ | ^
+
+error: local variable doesn't need to be boxed here
+ --> $DIR/boxed_local.rs:203:16
+ |
+LL | fn foo(x: Box<u32>) {}
+ | ^
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/branches_sharing_code/false_positives.rs b/src/tools/clippy/tests/ui/branches_sharing_code/false_positives.rs
new file mode 100644
index 000000000..5e3a1a296
--- /dev/null
+++ b/src/tools/clippy/tests/ui/branches_sharing_code/false_positives.rs
@@ -0,0 +1,95 @@
+#![allow(dead_code)]
+#![deny(clippy::if_same_then_else, clippy::branches_sharing_code)]
+
+use std::sync::Mutex;
+
+// ##################################
+// # Issue clippy#7369
+// ##################################
+#[derive(Debug)]
+pub struct FooBar {
+ foo: Vec<u32>,
+}
+
+impl FooBar {
+ pub fn bar(&mut self) {
+ if true {
+ self.foo.pop();
+ } else {
+ self.baz();
+
+ self.foo.pop();
+
+ self.baz()
+ }
+ }
+
+ fn baz(&mut self) {}
+}
+
+fn foo(x: u32, y: u32) -> u32 {
+ x / y
+}
+
+fn main() {
+ let x = (1, 2);
+ let _ = if true {
+ let (x, y) = x;
+ foo(x, y)
+ } else {
+ let (y, x) = x;
+ foo(x, y)
+ };
+
+ let m = Mutex::new(0u32);
+ let l = m.lock().unwrap();
+ let _ = if true {
+ drop(l);
+ println!("foo");
+ m.lock().unwrap();
+ 0
+ } else if *l == 0 {
+ drop(l);
+ println!("foo");
+ println!("bar");
+ m.lock().unwrap();
+ 1
+ } else {
+ drop(l);
+ println!("foo");
+ println!("baz");
+ m.lock().unwrap();
+ 2
+ };
+
+ if true {
+ let _guard = m.lock();
+ println!("foo");
+ } else {
+ println!("foo");
+ }
+
+ if true {
+ let _guard = m.lock();
+ println!("foo");
+ println!("bar");
+ } else {
+ let _guard = m.lock();
+ println!("foo");
+ println!("baz");
+ }
+
+ let mut c = 0;
+ for _ in 0..5 {
+ if c == 0 {
+ c += 1;
+ println!("0");
+ } else if c == 1 {
+ c += 1;
+ println!("1");
+ } else {
+ c += 1;
+ println!("more");
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/branches_sharing_code/shared_at_bottom.rs b/src/tools/clippy/tests/ui/branches_sharing_code/shared_at_bottom.rs
new file mode 100644
index 000000000..12f550d9c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/branches_sharing_code/shared_at_bottom.rs
@@ -0,0 +1,223 @@
+#![allow(dead_code, clippy::equatable_if_let)]
+#![deny(clippy::if_same_then_else, clippy::branches_sharing_code)]
+
+// This tests the branches_sharing_code lint at the end of blocks
+
+fn simple_examples() {
+ let x = 1;
+
+ let _ = if x == 7 {
+ println!("Branch I");
+ let start_value = 0;
+ println!("=^.^=");
+
+ // Same but not moveable due to `start_value`
+ let _ = start_value;
+
+ // The rest is self contained and moveable => Only lint the rest
+ let result = false;
+ println!("Block end!");
+ result
+ } else {
+ println!("Branch II");
+ let start_value = 8;
+ println!("xD");
+
+ // Same but not moveable due to `start_value`
+ let _ = start_value;
+
+ // The rest is self contained and moveable => Only lint the rest
+ let result = false;
+ println!("Block end!");
+ result
+ };
+
+ // Else if block
+ if x == 9 {
+ println!("The index is: 6");
+
+ println!("Same end of block");
+ } else if x == 8 {
+ println!("The index is: 4");
+
+ // We should only get a lint trigger for the last statement
+ println!("This is also eq with the else block");
+ println!("Same end of block");
+ } else {
+ println!("This is also eq with the else block");
+ println!("Same end of block");
+ }
+
+ // Use of outer scope value
+ let outer_scope_value = "I'm outside the if block";
+ if x < 99 {
+ let z = "How are you";
+ println!("I'm a local because I use the value `z`: `{}`", z);
+
+ println!(
+ "I'm moveable because I know: `outer_scope_value`: '{}'",
+ outer_scope_value
+ );
+ } else {
+ let z = 45678000;
+ println!("I'm a local because I use the value `z`: `{}`", z);
+
+ println!(
+ "I'm moveable because I know: `outer_scope_value`: '{}'",
+ outer_scope_value
+ );
+ }
+
+ if x == 9 {
+ if x == 8 {
+ // No parent!!
+ println!("---");
+ println!("Hello World");
+ } else {
+ println!("Hello World");
+ }
+ }
+}
+
+/// Simple examples where the move can cause some problems due to moved values
+fn simple_but_suggestion_is_invalid() {
+ let x = 16;
+
+ // Local value
+ let later_used_value = 17;
+ if x == 9 {
+ let _ = 9;
+ let later_used_value = "A string value";
+ println!("{}", later_used_value);
+ } else {
+ let later_used_value = "A string value";
+ println!("{}", later_used_value);
+ // I'm expecting a note about this
+ }
+ println!("{}", later_used_value);
+
+ // outer function
+ if x == 78 {
+ let simple_examples = "I now identify as a &str :)";
+ println!("This is the new simple_example: {}", simple_examples);
+ } else {
+ println!("Separator print statement");
+
+ let simple_examples = "I now identify as a &str :)";
+ println!("This is the new simple_example: {}", simple_examples);
+ }
+ simple_examples();
+}
+
+/// Tests where the blocks are not linted due to the used value scope
+fn not_moveable_due_to_value_scope() {
+ let x = 18;
+
+ // Using a local value in the moved code
+ if x == 9 {
+ let y = 18;
+ println!("y is: `{}`", y);
+ } else {
+ let y = "A string";
+ println!("y is: `{}`", y);
+ }
+
+ // Using a local value in the expression
+ let _ = if x == 0 {
+ let mut result = x + 1;
+
+ println!("1. Doing some calculations");
+ println!("2. Some more calculations");
+ println!("3. Setting result");
+
+ result
+ } else {
+ let mut result = x - 1;
+
+ println!("1. Doing some calculations");
+ println!("2. Some more calculations");
+ println!("3. Setting result");
+
+ result
+ };
+
+ let _ = if x == 7 {
+ let z1 = 100;
+ println!("z1: {}", z1);
+
+ let z2 = z1;
+ println!("z2: {}", z2);
+
+ z2
+ } else {
+ let z1 = 300;
+ println!("z1: {}", z1);
+
+ let z2 = z1;
+ println!("z2: {}", z2);
+
+ z2
+ };
+}
+
+/// This should add a note to the lint msg since the moved expression is not `()`
+fn added_note_for_expression_use() -> u32 {
+ let x = 9;
+
+ let _ = if x == 7 {
+ x << 2
+ } else {
+ let _ = 6;
+ x << 2
+ };
+
+ if x == 9 {
+ x * 4
+ } else {
+ let _ = 17;
+ x * 4
+ }
+}
+
+#[rustfmt::skip]
+fn test_suggestion_with_weird_formatting() {
+ let x = 9;
+ let mut a = 0;
+ let mut b = 0;
+
+ // The error message still looks weird tbh but this is the best I can do
+ // for weird formatting
+ if x == 17 { b = 1; a = 0x99; } else { a = 0x99; }
+}
+
+fn fp_test() {
+ let x = 17;
+
+ if x == 18 {
+ let y = 19;
+ if y < x {
+ println!("Trigger")
+ }
+ } else {
+ let z = 166;
+ if z < x {
+ println!("Trigger")
+ }
+ }
+}
+
+fn fp_if_let_issue7054() {
+ // This shouldn't trigger the lint
+ let string;
+ let _x = if let true = true {
+ ""
+ } else if true {
+ string = "x".to_owned();
+ &string
+ } else {
+ string = "y".to_owned();
+ &string
+ };
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/branches_sharing_code/shared_at_bottom.stderr b/src/tools/clippy/tests/ui/branches_sharing_code/shared_at_bottom.stderr
new file mode 100644
index 000000000..5e1a68d21
--- /dev/null
+++ b/src/tools/clippy/tests/ui/branches_sharing_code/shared_at_bottom.stderr
@@ -0,0 +1,143 @@
+error: all if blocks contain the same code at the end
+ --> $DIR/shared_at_bottom.rs:30:5
+ |
+LL | / let result = false;
+LL | | println!("Block end!");
+LL | | result
+LL | | };
+ | |_____^
+ |
+note: the lint level is defined here
+ --> $DIR/shared_at_bottom.rs:2:36
+ |
+LL | #![deny(clippy::if_same_then_else, clippy::branches_sharing_code)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = note: the end suggestion probably needs some adjustments to use the expression result correctly
+help: consider moving these statements after the if
+ |
+LL ~ }
+LL + let result = false;
+LL + println!("Block end!");
+LL ~ result;
+ |
+
+error: all if blocks contain the same code at the end
+ --> $DIR/shared_at_bottom.rs:48:5
+ |
+LL | / println!("Same end of block");
+LL | | }
+ | |_____^
+ |
+help: consider moving these statements after the if
+ |
+LL ~ }
+LL + println!("Same end of block");
+ |
+
+error: all if blocks contain the same code at the end
+ --> $DIR/shared_at_bottom.rs:65:5
+ |
+LL | / println!(
+LL | | "I'm moveable because I know: `outer_scope_value`: '{}'",
+LL | | outer_scope_value
+LL | | );
+LL | | }
+ | |_____^
+ |
+help: consider moving these statements after the if
+ |
+LL ~ }
+LL + println!(
+LL + "I'm moveable because I know: `outer_scope_value`: '{}'",
+LL + outer_scope_value
+LL + );
+ |
+
+error: all if blocks contain the same code at the end
+ --> $DIR/shared_at_bottom.rs:77:9
+ |
+LL | / println!("Hello World");
+LL | | }
+ | |_________^
+ |
+help: consider moving these statements after the if
+ |
+LL ~ }
+LL + println!("Hello World");
+ |
+
+error: all if blocks contain the same code at the end
+ --> $DIR/shared_at_bottom.rs:93:5
+ |
+LL | / let later_used_value = "A string value";
+LL | | println!("{}", later_used_value);
+LL | | // I'm expecting a note about this
+LL | | }
+ | |_____^
+ |
+ = warning: some moved values might need to be renamed to avoid wrong references
+help: consider moving these statements after the if
+ |
+LL ~ }
+LL + let later_used_value = "A string value";
+LL + println!("{}", later_used_value);
+ |
+
+error: all if blocks contain the same code at the end
+ --> $DIR/shared_at_bottom.rs:106:5
+ |
+LL | / let simple_examples = "I now identify as a &str :)";
+LL | | println!("This is the new simple_example: {}", simple_examples);
+LL | | }
+ | |_____^
+ |
+ = warning: some moved values might need to be renamed to avoid wrong references
+help: consider moving these statements after the if
+ |
+LL ~ }
+LL + let simple_examples = "I now identify as a &str :)";
+LL + println!("This is the new simple_example: {}", simple_examples);
+ |
+
+error: all if blocks contain the same code at the end
+ --> $DIR/shared_at_bottom.rs:171:5
+ |
+LL | / x << 2
+LL | | };
+ | |_____^
+ |
+ = note: the end suggestion probably needs some adjustments to use the expression result correctly
+help: consider moving these statements after the if
+ |
+LL ~ }
+LL ~ x << 2;
+ |
+
+error: all if blocks contain the same code at the end
+ --> $DIR/shared_at_bottom.rs:178:5
+ |
+LL | / x * 4
+LL | | }
+ | |_____^
+ |
+ = note: the end suggestion probably needs some adjustments to use the expression result correctly
+help: consider moving these statements after the if
+ |
+LL ~ }
+LL + x * 4
+ |
+
+error: all if blocks contain the same code at the end
+ --> $DIR/shared_at_bottom.rs:190:44
+ |
+LL | if x == 17 { b = 1; a = 0x99; } else { a = 0x99; }
+ | ^^^^^^^^^^^
+ |
+help: consider moving these statements after the if
+ |
+LL ~ if x == 17 { b = 1; a = 0x99; } else { }
+LL + a = 0x99;
+ |
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/branches_sharing_code/shared_at_top.rs b/src/tools/clippy/tests/ui/branches_sharing_code/shared_at_top.rs
new file mode 100644
index 000000000..bdeb0a395
--- /dev/null
+++ b/src/tools/clippy/tests/ui/branches_sharing_code/shared_at_top.rs
@@ -0,0 +1,114 @@
+#![allow(dead_code, clippy::mixed_read_write_in_expression)]
+#![deny(clippy::if_same_then_else, clippy::branches_sharing_code)]
+
+// This tests the branches_sharing_code lint at the start of blocks
+
+fn simple_examples() {
+ let x = 0;
+
+ // Simple
+ if true {
+ println!("Hello World!");
+ println!("I'm branch nr: 1");
+ } else {
+ println!("Hello World!");
+ println!("I'm branch nr: 2");
+ }
+
+ // Else if
+ if x == 0 {
+ let y = 9;
+ println!("The value y was set to: `{}`", y);
+ let _z = y;
+
+ println!("I'm the true start index of arrays");
+ } else if x == 1 {
+ let y = 9;
+ println!("The value y was set to: `{}`", y);
+ let _z = y;
+
+ println!("I start counting from 1 so my array starts from `1`");
+ } else {
+ let y = 9;
+ println!("The value y was set to: `{}`", y);
+ let _z = y;
+
+ println!("Ha, Pascal allows you to start the array where you want")
+ }
+
+ // Return a value
+ let _ = if x == 7 {
+ let y = 16;
+ println!("What can I say except: \"you're welcome?\"");
+ let _ = y;
+ x
+ } else {
+ let y = 16;
+ println!("Thank you");
+ y
+ };
+}
+
+/// Simple examples where the move can cause some problems due to moved values
+fn simple_but_suggestion_is_invalid() {
+ let x = 10;
+
+ // Can't be automatically moved because used_value_name is getting used again
+ let used_value_name = 19;
+ if x == 10 {
+ let used_value_name = "Different type";
+ println!("Str: {}", used_value_name);
+ let _ = 1;
+ } else {
+ let used_value_name = "Different type";
+ println!("Str: {}", used_value_name);
+ let _ = 2;
+ }
+ let _ = used_value_name;
+
+ // This can be automatically moved as `can_be_overridden` is not used again
+ let can_be_overridden = 8;
+ let _ = can_be_overridden;
+ if x == 11 {
+ let can_be_overridden = "Move me";
+ println!("I'm also moveable");
+ let _ = 111;
+ } else {
+ let can_be_overridden = "Move me";
+ println!("I'm also moveable");
+ let _ = 222;
+ }
+}
+
+/// This function tests that the `IS_SAME_THAN_ELSE` only covers the lint if it's enabled.
+fn check_if_same_than_else_mask() {
+ let x = 2021;
+
+ #[allow(clippy::if_same_then_else)]
+ if x == 2020 {
+ println!("This should trigger the `SHARED_CODE_IN_IF_BLOCKS` lint.");
+ println!("Because `IF_SAME_THEN_ELSE` is allowed here");
+ } else {
+ println!("This should trigger the `SHARED_CODE_IN_IF_BLOCKS` lint.");
+ println!("Because `IF_SAME_THEN_ELSE` is allowed here");
+ }
+
+ if x == 2019 {
+ println!("This should trigger `IS_SAME_THAN_ELSE` as usual");
+ } else {
+ println!("This should trigger `IS_SAME_THAN_ELSE` as usual");
+ }
+}
+
+#[allow(clippy::vec_init_then_push)]
+fn pf_local_with_inferred_type_issue7053() {
+ if true {
+ let mut v = Vec::new();
+ v.push(0);
+ } else {
+ let mut v = Vec::new();
+ v.push("");
+ };
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/branches_sharing_code/shared_at_top.stderr b/src/tools/clippy/tests/ui/branches_sharing_code/shared_at_top.stderr
new file mode 100644
index 000000000..d890b12ec
--- /dev/null
+++ b/src/tools/clippy/tests/ui/branches_sharing_code/shared_at_top.stderr
@@ -0,0 +1,121 @@
+error: all if blocks contain the same code at the start
+ --> $DIR/shared_at_top.rs:10:5
+ |
+LL | / if true {
+LL | | println!("Hello World!");
+ | |_________________________________^
+ |
+note: the lint level is defined here
+ --> $DIR/shared_at_top.rs:2:36
+ |
+LL | #![deny(clippy::if_same_then_else, clippy::branches_sharing_code)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+help: consider moving these statements before the if
+ |
+LL ~ println!("Hello World!");
+LL + if true {
+ |
+
+error: all if blocks contain the same code at the start
+ --> $DIR/shared_at_top.rs:19:5
+ |
+LL | / if x == 0 {
+LL | | let y = 9;
+LL | | println!("The value y was set to: `{}`", y);
+LL | | let _z = y;
+ | |___________________^
+ |
+ = warning: some moved values might need to be renamed to avoid wrong references
+help: consider moving these statements before the if
+ |
+LL ~ let y = 9;
+LL + println!("The value y was set to: `{}`", y);
+LL + let _z = y;
+LL + if x == 0 {
+ |
+
+error: all if blocks contain the same code at the start
+ --> $DIR/shared_at_top.rs:40:5
+ |
+LL | / let _ = if x == 7 {
+LL | | let y = 16;
+ | |___________________^
+ |
+help: consider moving these statements before the if
+ |
+LL ~ let y = 16;
+LL + let _ = if x == 7 {
+ |
+
+error: all if blocks contain the same code at the start
+ --> $DIR/shared_at_top.rs:58:5
+ |
+LL | / if x == 10 {
+LL | | let used_value_name = "Different type";
+LL | | println!("Str: {}", used_value_name);
+ | |_____________________________________________^
+ |
+ = warning: some moved values might need to be renamed to avoid wrong references
+help: consider moving these statements before the if
+ |
+LL ~ let used_value_name = "Different type";
+LL + println!("Str: {}", used_value_name);
+LL + if x == 10 {
+ |
+
+error: all if blocks contain the same code at the start
+ --> $DIR/shared_at_top.rs:72:5
+ |
+LL | / if x == 11 {
+LL | | let can_be_overridden = "Move me";
+LL | | println!("I'm also moveable");
+ | |______________________________________^
+ |
+ = warning: some moved values might need to be renamed to avoid wrong references
+help: consider moving these statements before the if
+ |
+LL ~ let can_be_overridden = "Move me";
+LL + println!("I'm also moveable");
+LL + if x == 11 {
+ |
+
+error: all if blocks contain the same code at the start
+ --> $DIR/shared_at_top.rs:88:5
+ |
+LL | / if x == 2020 {
+LL | | println!("This should trigger the `SHARED_CODE_IN_IF_BLOCKS` lint.");
+LL | | println!("Because `IF_SAME_THEN_ELSE` is allowed here");
+ | |________________________________________________________________^
+ |
+help: consider moving these statements before the if
+ |
+LL ~ println!("This should trigger the `SHARED_CODE_IN_IF_BLOCKS` lint.");
+LL + println!("Because `IF_SAME_THEN_ELSE` is allowed here");
+LL + if x == 2020 {
+ |
+
+error: this `if` has identical blocks
+ --> $DIR/shared_at_top.rs:96:18
+ |
+LL | if x == 2019 {
+ | __________________^
+LL | | println!("This should trigger `IS_SAME_THAN_ELSE` as usual");
+LL | | } else {
+ | |_____^
+ |
+note: the lint level is defined here
+ --> $DIR/shared_at_top.rs:2:9
+ |
+LL | #![deny(clippy::if_same_then_else, clippy::branches_sharing_code)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+note: same as this
+ --> $DIR/shared_at_top.rs:98:12
+ |
+LL | } else {
+ | ____________^
+LL | | println!("This should trigger `IS_SAME_THAN_ELSE` as usual");
+LL | | }
+ | |_____^
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/branches_sharing_code/shared_at_top_and_bottom.rs b/src/tools/clippy/tests/ui/branches_sharing_code/shared_at_top_and_bottom.rs
new file mode 100644
index 000000000..deefdad32
--- /dev/null
+++ b/src/tools/clippy/tests/ui/branches_sharing_code/shared_at_top_and_bottom.rs
@@ -0,0 +1,119 @@
+#![allow(dead_code)]
+#![deny(clippy::if_same_then_else, clippy::branches_sharing_code)]
+
+// branches_sharing_code at the top and bottom of the if blocks
+
+struct DataPack {
+ id: u32,
+ name: String,
+ some_data: Vec<u8>,
+}
+
+fn overlapping_eq_regions() {
+ let x = 9;
+
+ // Overlap with separator
+ if x == 7 {
+ let t = 7;
+ let _overlap_start = t * 2;
+ let _overlap_end = 2 * t;
+ let _u = 9;
+ } else {
+ let t = 7;
+ let _overlap_start = t * 2;
+ let _overlap_end = 2 * t;
+ println!("Overlap separator");
+ let _overlap_start = t * 2;
+ let _overlap_end = 2 * t;
+ let _u = 9;
+ }
+
+ // Overlap with separator
+ if x == 99 {
+ let r = 7;
+ let _overlap_start = r;
+ let _overlap_middle = r * r;
+ let _overlap_end = r * r * r;
+ let z = "end";
+ } else {
+ let r = 7;
+ let _overlap_start = r;
+ let _overlap_middle = r * r;
+ let _overlap_middle = r * r;
+ let _overlap_end = r * r * r;
+ let z = "end";
+ }
+}
+
+fn complexer_example() {
+ fn gen_id(x: u32, y: u32) -> u32 {
+ let x = x & 0x0000_ffff;
+ let y = (y & 0xffff_0000) << 16;
+ x | y
+ }
+
+ fn process_data(data: DataPack) {
+ let _ = data;
+ }
+
+ let x = 8;
+ let y = 9;
+ if (x > 7 && y < 13) || (x + y) % 2 == 1 {
+ let a = 0xcafe;
+ let b = 0xffff00ff;
+ let e_id = gen_id(a, b);
+
+ println!("From the a `{}` to the b `{}`", a, b);
+
+ let pack = DataPack {
+ id: e_id,
+ name: "Player 1".to_string(),
+ some_data: vec![0x12, 0x34, 0x56, 0x78, 0x90],
+ };
+ process_data(pack);
+ } else {
+ let a = 0xcafe;
+ let b = 0xffff00ff;
+ let e_id = gen_id(a, b);
+
+ println!("The new ID is '{}'", e_id);
+
+ let pack = DataPack {
+ id: e_id,
+ name: "Player 1".to_string(),
+ some_data: vec![0x12, 0x34, 0x56, 0x78, 0x90],
+ };
+ process_data(pack);
+ }
+}
+
+/// This should add a note to the lint msg since the moved expression is not `()`
+fn added_note_for_expression_use() -> u32 {
+ let x = 9;
+
+ let _ = if x == 7 {
+ let _ = 19;
+
+ let _splitter = 6;
+
+ x << 2
+ } else {
+ let _ = 19;
+
+ x << 2
+ };
+
+ if x == 9 {
+ let _ = 17;
+
+ let _splitter = 6;
+
+ x * 4
+ } else {
+ let _ = 17;
+
+ x * 4
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/branches_sharing_code/shared_at_top_and_bottom.stderr b/src/tools/clippy/tests/ui/branches_sharing_code/shared_at_top_and_bottom.stderr
new file mode 100644
index 000000000..a270f637f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/branches_sharing_code/shared_at_top_and_bottom.stderr
@@ -0,0 +1,155 @@
+error: all if blocks contain the same code at both the start and the end
+ --> $DIR/shared_at_top_and_bottom.rs:16:5
+ |
+LL | / if x == 7 {
+LL | | let t = 7;
+LL | | let _overlap_start = t * 2;
+LL | | let _overlap_end = 2 * t;
+ | |_________________________________^
+ |
+note: the lint level is defined here
+ --> $DIR/shared_at_top_and_bottom.rs:2:36
+ |
+LL | #![deny(clippy::if_same_then_else, clippy::branches_sharing_code)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+note: this code is shared at the end
+ --> $DIR/shared_at_top_and_bottom.rs:28:5
+ |
+LL | / let _u = 9;
+LL | | }
+ | |_____^
+help: consider moving these statements before the if
+ |
+LL ~ let t = 7;
+LL + let _overlap_start = t * 2;
+LL + let _overlap_end = 2 * t;
+LL + if x == 7 {
+ |
+help: consider moving these statements after the if
+ |
+LL ~ }
+LL + let _u = 9;
+ |
+
+error: all if blocks contain the same code at both the start and the end
+ --> $DIR/shared_at_top_and_bottom.rs:32:5
+ |
+LL | / if x == 99 {
+LL | | let r = 7;
+LL | | let _overlap_start = r;
+LL | | let _overlap_middle = r * r;
+ | |____________________________________^
+ |
+note: this code is shared at the end
+ --> $DIR/shared_at_top_and_bottom.rs:43:5
+ |
+LL | / let _overlap_end = r * r * r;
+LL | | let z = "end";
+LL | | }
+ | |_____^
+ = warning: some moved values might need to be renamed to avoid wrong references
+help: consider moving these statements before the if
+ |
+LL ~ let r = 7;
+LL + let _overlap_start = r;
+LL + let _overlap_middle = r * r;
+LL + if x == 99 {
+ |
+help: consider moving these statements after the if
+ |
+LL ~ }
+LL + let _overlap_end = r * r * r;
+LL + let z = "end";
+ |
+
+error: all if blocks contain the same code at both the start and the end
+ --> $DIR/shared_at_top_and_bottom.rs:61:5
+ |
+LL | / if (x > 7 && y < 13) || (x + y) % 2 == 1 {
+LL | | let a = 0xcafe;
+LL | | let b = 0xffff00ff;
+LL | | let e_id = gen_id(a, b);
+ | |________________________________^
+ |
+note: this code is shared at the end
+ --> $DIR/shared_at_top_and_bottom.rs:81:5
+ |
+LL | / let pack = DataPack {
+LL | | id: e_id,
+LL | | name: "Player 1".to_string(),
+LL | | some_data: vec![0x12, 0x34, 0x56, 0x78, 0x90],
+LL | | };
+LL | | process_data(pack);
+LL | | }
+ | |_____^
+ = warning: some moved values might need to be renamed to avoid wrong references
+help: consider moving these statements before the if
+ |
+LL ~ let a = 0xcafe;
+LL + let b = 0xffff00ff;
+LL + let e_id = gen_id(a, b);
+LL + if (x > 7 && y < 13) || (x + y) % 2 == 1 {
+ |
+help: consider moving these statements after the if
+ |
+LL ~ }
+LL + let pack = DataPack {
+LL + id: e_id,
+LL + name: "Player 1".to_string(),
+LL + some_data: vec![0x12, 0x34, 0x56, 0x78, 0x90],
+LL + };
+LL + process_data(pack);
+ |
+
+error: all if blocks contain the same code at both the start and the end
+ --> $DIR/shared_at_top_and_bottom.rs:94:5
+ |
+LL | / let _ = if x == 7 {
+LL | | let _ = 19;
+ | |___________________^
+ |
+note: this code is shared at the end
+ --> $DIR/shared_at_top_and_bottom.rs:103:5
+ |
+LL | / x << 2
+LL | | };
+ | |_____^
+ = note: the end suggestion probably needs some adjustments to use the expression result correctly
+help: consider moving these statements before the if
+ |
+LL ~ let _ = 19;
+LL + let _ = if x == 7 {
+ |
+help: consider moving these statements after the if
+ |
+LL ~ }
+LL ~ x << 2;
+ |
+
+error: all if blocks contain the same code at both the start and the end
+ --> $DIR/shared_at_top_and_bottom.rs:106:5
+ |
+LL | / if x == 9 {
+LL | | let _ = 17;
+ | |___________________^
+ |
+note: this code is shared at the end
+ --> $DIR/shared_at_top_and_bottom.rs:115:5
+ |
+LL | / x * 4
+LL | | }
+ | |_____^
+ = note: the end suggestion probably needs some adjustments to use the expression result correctly
+help: consider moving these statements before the if
+ |
+LL ~ let _ = 17;
+LL + if x == 9 {
+ |
+help: consider moving these statements after the if
+ |
+LL ~ }
+LL + x * 4
+ |
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/branches_sharing_code/valid_if_blocks.rs b/src/tools/clippy/tests/ui/branches_sharing_code/valid_if_blocks.rs
new file mode 100644
index 000000000..a26141be2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/branches_sharing_code/valid_if_blocks.rs
@@ -0,0 +1,155 @@
+#![allow(dead_code, clippy::mixed_read_write_in_expression)]
+#![deny(clippy::if_same_then_else, clippy::branches_sharing_code)]
+
+// This tests valid if blocks that shouldn't trigger the lint
+
+// Tests with value references are includes in "shared_code_at_bottom.rs"
+
+fn valid_examples() {
+ let x = 2;
+
+ // The edge statements are different
+ if x == 9 {
+ let y = 1 << 5;
+
+ println!("This is the same: vvv");
+ let _z = y;
+ println!("The block expression is different");
+
+ println!("Different end 1");
+ } else {
+ let y = 1 << 7;
+
+ println!("This is the same: vvv");
+ let _z = y;
+ println!("The block expression is different");
+
+ println!("Different end 2");
+ }
+
+ // No else
+ if x == 2 {
+ println!("Hello world!");
+ println!("Hello back, how are you?");
+
+ // This is different vvvv
+ println!("Howdy stranger =^.^=");
+
+ println!("Bye Bye World");
+ } else if x == 9 {
+ println!("Hello world!");
+ println!("Hello back, how are you?");
+
+ // This is different vvvv
+ println!("Hello reviewer :D");
+
+ println!("Bye Bye World");
+ }
+
+ // Overlapping statements only in else if blocks -> Don't lint
+ if x == 0 {
+ println!("I'm important!")
+ } else if x == 17 {
+ println!("I share code in else if");
+
+ println!("x is 17");
+ } else {
+ println!("I share code in else if");
+
+ println!("x is nether x nor 17");
+ }
+
+ // Mutability is different
+ if x == 13 {
+ let mut y = 9;
+ println!("Value y is: {}", y);
+ y += 16;
+ let _z1 = y;
+ } else {
+ let y = 9;
+ println!("Value y is: {}", y);
+ let _z2 = y;
+ }
+
+ // Same blocks but at start and bottom so no `if_same_then_else` lint
+ if x == 418 {
+ let y = 9;
+ let z = 8;
+ let _ = (x, y, z);
+ // Don't tell the programmer, my code is also in the else block
+ } else if x == 419 {
+ println!("+-----------+");
+ println!("| |");
+ println!("| O O |");
+ println!("| ° |");
+ println!("| \\_____/ |");
+ println!("| |");
+ println!("+-----------+");
+ } else {
+ let y = 9;
+ let z = 8;
+ let _ = (x, y, z);
+ // I'm so much better than the x == 418 block. Trust me
+ }
+
+ let x = 1;
+ if true {
+ println!("{}", x);
+ } else {
+ let x = 2;
+ println!("{}", x);
+ }
+
+ // Let's test empty blocks
+ if false {
+ } else {
+ }
+}
+
+/// This makes sure that the `if_same_then_else` masks the `shared_code_in_if_blocks` lint
+fn trigger_other_lint() {
+ let x = 0;
+ let y = 1;
+
+ // Same block
+ if x == 0 {
+ let u = 19;
+ println!("How are u today?");
+ let _ = "This is a string";
+ } else {
+ let u = 19;
+ println!("How are u today?");
+ let _ = "This is a string";
+ }
+
+ // Only same expression
+ let _ = if x == 6 { 7 } else { 7 };
+
+ // Same in else if block
+ let _ = if x == 67 {
+ println!("Well I'm the most important block");
+ "I'm a pretty string"
+ } else if x == 68 {
+ println!("I'm a doppelgänger");
+ // Don't listen to my clone below
+
+ if y == 90 { "=^.^=" } else { ":D" }
+ } else {
+ // Don't listen to my clone above
+ println!("I'm a doppelgänger");
+
+ if y == 90 { "=^.^=" } else { ":D" }
+ };
+
+ if x == 0 {
+ println!("I'm single");
+ } else if x == 68 {
+ println!("I'm a doppelgänger");
+ // Don't listen to my clone below
+ } else {
+ // Don't listen to my clone above
+ println!("I'm a doppelgänger");
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/branches_sharing_code/valid_if_blocks.stderr b/src/tools/clippy/tests/ui/branches_sharing_code/valid_if_blocks.stderr
new file mode 100644
index 000000000..a815995e7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/branches_sharing_code/valid_if_blocks.stderr
@@ -0,0 +1,101 @@
+error: this `if` has identical blocks
+ --> $DIR/valid_if_blocks.rs:104:14
+ |
+LL | if false {
+ | ______________^
+LL | | } else {
+ | |_____^
+ |
+note: the lint level is defined here
+ --> $DIR/valid_if_blocks.rs:2:9
+ |
+LL | #![deny(clippy::if_same_then_else, clippy::branches_sharing_code)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+note: same as this
+ --> $DIR/valid_if_blocks.rs:105:12
+ |
+LL | } else {
+ | ____________^
+LL | | }
+ | |_____^
+
+error: this `if` has identical blocks
+ --> $DIR/valid_if_blocks.rs:115:15
+ |
+LL | if x == 0 {
+ | _______________^
+LL | | let u = 19;
+LL | | println!("How are u today?");
+LL | | let _ = "This is a string";
+LL | | } else {
+ | |_____^
+ |
+note: same as this
+ --> $DIR/valid_if_blocks.rs:119:12
+ |
+LL | } else {
+ | ____________^
+LL | | let u = 19;
+LL | | println!("How are u today?");
+LL | | let _ = "This is a string";
+LL | | }
+ | |_____^
+
+error: this `if` has identical blocks
+ --> $DIR/valid_if_blocks.rs:126:23
+ |
+LL | let _ = if x == 6 { 7 } else { 7 };
+ | ^^^^^
+ |
+note: same as this
+ --> $DIR/valid_if_blocks.rs:126:34
+ |
+LL | let _ = if x == 6 { 7 } else { 7 };
+ | ^^^^^
+
+error: this `if` has identical blocks
+ --> $DIR/valid_if_blocks.rs:132:23
+ |
+LL | } else if x == 68 {
+ | _______________________^
+LL | | println!("I'm a doppelgänger");
+LL | | // Don't listen to my clone below
+LL | |
+LL | | if y == 90 { "=^.^=" } else { ":D" }
+LL | | } else {
+ | |_____^
+ |
+note: same as this
+ --> $DIR/valid_if_blocks.rs:137:12
+ |
+LL | } else {
+ | ____________^
+LL | | // Don't listen to my clone above
+LL | | println!("I'm a doppelgänger");
+LL | |
+LL | | if y == 90 { "=^.^=" } else { ":D" }
+LL | | };
+ | |_____^
+
+error: this `if` has identical blocks
+ --> $DIR/valid_if_blocks.rs:146:23
+ |
+LL | } else if x == 68 {
+ | _______________________^
+LL | | println!("I'm a doppelgänger");
+LL | | // Don't listen to my clone below
+LL | | } else {
+ | |_____^
+ |
+note: same as this
+ --> $DIR/valid_if_blocks.rs:149:12
+ |
+LL | } else {
+ | ____________^
+LL | | // Don't listen to my clone above
+LL | | println!("I'm a doppelgänger");
+LL | | }
+ | |_____^
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/builtin_type_shadow.rs b/src/tools/clippy/tests/ui/builtin_type_shadow.rs
new file mode 100644
index 000000000..69b8b6a0e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/builtin_type_shadow.rs
@@ -0,0 +1,9 @@
+#![warn(clippy::builtin_type_shadow)]
+#![allow(non_camel_case_types)]
+
+fn foo<u32>(a: u32) -> u32 {
+ 42
+ // ^ rustc's type error
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/builtin_type_shadow.stderr b/src/tools/clippy/tests/ui/builtin_type_shadow.stderr
new file mode 100644
index 000000000..47a8a1e62
--- /dev/null
+++ b/src/tools/clippy/tests/ui/builtin_type_shadow.stderr
@@ -0,0 +1,24 @@
+error: this generic shadows the built-in type `u32`
+ --> $DIR/builtin_type_shadow.rs:4:8
+ |
+LL | fn foo<u32>(a: u32) -> u32 {
+ | ^^^
+ |
+ = note: `-D clippy::builtin-type-shadow` implied by `-D warnings`
+
+error[E0308]: mismatched types
+ --> $DIR/builtin_type_shadow.rs:5:5
+ |
+LL | fn foo<u32>(a: u32) -> u32 {
+ | --- --- expected `u32` because of return type
+ | |
+ | this type parameter
+LL | 42
+ | ^^ expected type parameter `u32`, found integer
+ |
+ = note: expected type parameter `u32`
+ found type `{integer}`
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0308`.
diff --git a/src/tools/clippy/tests/ui/bytecount.rs b/src/tools/clippy/tests/ui/bytecount.rs
new file mode 100644
index 000000000..d3ad26921
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bytecount.rs
@@ -0,0 +1,26 @@
+#![allow(clippy::needless_borrow)]
+
+#[deny(clippy::naive_bytecount)]
+fn main() {
+ let x = vec![0_u8; 16];
+
+ let _ = x.iter().filter(|&&a| a == 0).count(); // naive byte count
+
+ let _ = (&x[..]).iter().filter(|&a| *a == 0).count(); // naive byte count
+
+ let _ = x.iter().filter(|a| **a > 0).count(); // not an equality count, OK.
+
+ let _ = x.iter().map(|a| a + 1).filter(|&a| a < 15).count(); // not a slice
+
+ let b = 0;
+
+ let _ = x.iter().filter(|_| b > 0).count(); // woah there
+
+ let _ = x.iter().filter(|_a| b == b + 1).count(); // nothing to see here, move along
+
+ let _ = x.iter().filter(|a| b + 1 == **a).count(); // naive byte count
+
+ let y = vec![0_u16; 3];
+
+ let _ = y.iter().filter(|&&a| a == 0).count(); // naive count, but not bytes
+}
diff --git a/src/tools/clippy/tests/ui/bytecount.stderr b/src/tools/clippy/tests/ui/bytecount.stderr
new file mode 100644
index 000000000..68d838c1f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bytecount.stderr
@@ -0,0 +1,26 @@
+error: you appear to be counting bytes the naive way
+ --> $DIR/bytecount.rs:7:13
+ |
+LL | let _ = x.iter().filter(|&&a| a == 0).count(); // naive byte count
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using the bytecount crate: `bytecount::count(x, 0)`
+ |
+note: the lint level is defined here
+ --> $DIR/bytecount.rs:3:8
+ |
+LL | #[deny(clippy::naive_bytecount)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+
+error: you appear to be counting bytes the naive way
+ --> $DIR/bytecount.rs:9:13
+ |
+LL | let _ = (&x[..]).iter().filter(|&a| *a == 0).count(); // naive byte count
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using the bytecount crate: `bytecount::count((&x[..]), 0)`
+
+error: you appear to be counting bytes the naive way
+ --> $DIR/bytecount.rs:21:13
+ |
+LL | let _ = x.iter().filter(|a| b + 1 == **a).count(); // naive byte count
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using the bytecount crate: `bytecount::count(x, b + 1)`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/bytes_count_to_len.fixed b/src/tools/clippy/tests/ui/bytes_count_to_len.fixed
new file mode 100644
index 000000000..860642363
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bytes_count_to_len.fixed
@@ -0,0 +1,34 @@
+// run-rustfix
+#![warn(clippy::bytes_count_to_len)]
+use std::fs::File;
+use std::io::Read;
+
+fn main() {
+ // should fix, because type is String
+ let _ = String::from("foo").len();
+
+ let s1 = String::from("foo");
+ let _ = s1.len();
+
+ // should fix, because type is &str
+ let _ = "foo".len();
+
+ let s2 = "foo";
+ let _ = s2.len();
+
+ // make sure using count() normally doesn't trigger warning
+ let vector = [0, 1, 2];
+ let _ = vector.iter().count();
+
+ // The type is slice, so should not fix
+ let _ = &[1, 2, 3].bytes().count();
+
+ let bytes: &[u8] = &[1, 2, 3];
+ bytes.bytes().count();
+
+ // The type is File, so should not fix
+ let _ = File::open("foobar").unwrap().bytes().count();
+
+ let f = File::open("foobar").unwrap();
+ let _ = f.bytes().count();
+}
diff --git a/src/tools/clippy/tests/ui/bytes_count_to_len.rs b/src/tools/clippy/tests/ui/bytes_count_to_len.rs
new file mode 100644
index 000000000..162730c28
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bytes_count_to_len.rs
@@ -0,0 +1,34 @@
+// run-rustfix
+#![warn(clippy::bytes_count_to_len)]
+use std::fs::File;
+use std::io::Read;
+
+fn main() {
+ // should fix, because type is String
+ let _ = String::from("foo").bytes().count();
+
+ let s1 = String::from("foo");
+ let _ = s1.bytes().count();
+
+ // should fix, because type is &str
+ let _ = "foo".bytes().count();
+
+ let s2 = "foo";
+ let _ = s2.bytes().count();
+
+ // make sure using count() normally doesn't trigger warning
+ let vector = [0, 1, 2];
+ let _ = vector.iter().count();
+
+ // The type is slice, so should not fix
+ let _ = &[1, 2, 3].bytes().count();
+
+ let bytes: &[u8] = &[1, 2, 3];
+ bytes.bytes().count();
+
+ // The type is File, so should not fix
+ let _ = File::open("foobar").unwrap().bytes().count();
+
+ let f = File::open("foobar").unwrap();
+ let _ = f.bytes().count();
+}
diff --git a/src/tools/clippy/tests/ui/bytes_count_to_len.stderr b/src/tools/clippy/tests/ui/bytes_count_to_len.stderr
new file mode 100644
index 000000000..224deb779
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bytes_count_to_len.stderr
@@ -0,0 +1,28 @@
+error: using long and hard to read `.bytes().count()`
+ --> $DIR/bytes_count_to_len.rs:8:13
+ |
+LL | let _ = String::from("foo").bytes().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.len()` instead: `String::from("foo").len()`
+ |
+ = note: `-D clippy::bytes-count-to-len` implied by `-D warnings`
+
+error: using long and hard to read `.bytes().count()`
+ --> $DIR/bytes_count_to_len.rs:11:13
+ |
+LL | let _ = s1.bytes().count();
+ | ^^^^^^^^^^^^^^^^^^ help: consider calling `.len()` instead: `s1.len()`
+
+error: using long and hard to read `.bytes().count()`
+ --> $DIR/bytes_count_to_len.rs:14:13
+ |
+LL | let _ = "foo".bytes().count();
+ | ^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.len()` instead: `"foo".len()`
+
+error: using long and hard to read `.bytes().count()`
+ --> $DIR/bytes_count_to_len.rs:17:13
+ |
+LL | let _ = s2.bytes().count();
+ | ^^^^^^^^^^^^^^^^^^ help: consider calling `.len()` instead: `s2.len()`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/bytes_nth.fixed b/src/tools/clippy/tests/ui/bytes_nth.fixed
new file mode 100644
index 000000000..b1fb2e16b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bytes_nth.fixed
@@ -0,0 +1,11 @@
+// run-rustfix
+
+#![allow(clippy::unnecessary_operation)]
+#![warn(clippy::bytes_nth)]
+
+fn main() {
+ let s = String::from("String");
+ let _ = s.as_bytes().get(3);
+ let _ = &s.as_bytes().get(3);
+ let _ = s[..].as_bytes().get(3);
+}
diff --git a/src/tools/clippy/tests/ui/bytes_nth.rs b/src/tools/clippy/tests/ui/bytes_nth.rs
new file mode 100644
index 000000000..034c54e6a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bytes_nth.rs
@@ -0,0 +1,11 @@
+// run-rustfix
+
+#![allow(clippy::unnecessary_operation)]
+#![warn(clippy::bytes_nth)]
+
+fn main() {
+ let s = String::from("String");
+ let _ = s.bytes().nth(3);
+ let _ = &s.bytes().nth(3);
+ let _ = s[..].bytes().nth(3);
+}
diff --git a/src/tools/clippy/tests/ui/bytes_nth.stderr b/src/tools/clippy/tests/ui/bytes_nth.stderr
new file mode 100644
index 000000000..9851d4791
--- /dev/null
+++ b/src/tools/clippy/tests/ui/bytes_nth.stderr
@@ -0,0 +1,22 @@
+error: called `.bytes().nth()` on a `String`
+ --> $DIR/bytes_nth.rs:8:13
+ |
+LL | let _ = s.bytes().nth(3);
+ | ^^^^^^^^^^^^^^^^ help: try: `s.as_bytes().get(3)`
+ |
+ = note: `-D clippy::bytes-nth` implied by `-D warnings`
+
+error: called `.bytes().nth()` on a `String`
+ --> $DIR/bytes_nth.rs:9:14
+ |
+LL | let _ = &s.bytes().nth(3);
+ | ^^^^^^^^^^^^^^^^ help: try: `s.as_bytes().get(3)`
+
+error: called `.bytes().nth()` on a `str`
+ --> $DIR/bytes_nth.rs:10:13
+ |
+LL | let _ = s[..].bytes().nth(3);
+ | ^^^^^^^^^^^^^^^^^^^^ help: try: `s[..].as_bytes().get(3)`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/case_sensitive_file_extension_comparisons.rs b/src/tools/clippy/tests/ui/case_sensitive_file_extension_comparisons.rs
new file mode 100644
index 000000000..0d65071af
--- /dev/null
+++ b/src/tools/clippy/tests/ui/case_sensitive_file_extension_comparisons.rs
@@ -0,0 +1,44 @@
+#![warn(clippy::case_sensitive_file_extension_comparisons)]
+
+use std::string::String;
+
+struct TestStruct;
+
+impl TestStruct {
+ fn ends_with(self, arg: &str) {}
+}
+
+fn is_rust_file(filename: &str) -> bool {
+ filename.ends_with(".rs")
+}
+
+fn main() {
+ // std::string::String and &str should trigger the lint failure with .ext12
+ let _ = String::from("").ends_with(".ext12");
+ let _ = "str".ends_with(".ext12");
+
+ // The test struct should not trigger the lint failure with .ext12
+ TestStruct {}.ends_with(".ext12");
+
+ // std::string::String and &str should trigger the lint failure with .EXT12
+ let _ = String::from("").ends_with(".EXT12");
+ let _ = "str".ends_with(".EXT12");
+
+ // The test struct should not trigger the lint failure with .EXT12
+ TestStruct {}.ends_with(".EXT12");
+
+ // Should not trigger the lint failure with .eXT12
+ let _ = String::from("").ends_with(".eXT12");
+ let _ = "str".ends_with(".eXT12");
+ TestStruct {}.ends_with(".eXT12");
+
+ // Should not trigger the lint failure with .EXT123 (too long)
+ let _ = String::from("").ends_with(".EXT123");
+ let _ = "str".ends_with(".EXT123");
+ TestStruct {}.ends_with(".EXT123");
+
+ // Shouldn't fail if it doesn't start with a dot
+ let _ = String::from("").ends_with("a.ext");
+ let _ = "str".ends_with("a.extA");
+ TestStruct {}.ends_with("a.ext");
+}
diff --git a/src/tools/clippy/tests/ui/case_sensitive_file_extension_comparisons.stderr b/src/tools/clippy/tests/ui/case_sensitive_file_extension_comparisons.stderr
new file mode 100644
index 000000000..05b98169f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/case_sensitive_file_extension_comparisons.stderr
@@ -0,0 +1,43 @@
+error: case-sensitive file extension comparison
+ --> $DIR/case_sensitive_file_extension_comparisons.rs:12:14
+ |
+LL | filename.ends_with(".rs")
+ | ^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::case-sensitive-file-extension-comparisons` implied by `-D warnings`
+ = help: consider using a case-insensitive comparison instead
+
+error: case-sensitive file extension comparison
+ --> $DIR/case_sensitive_file_extension_comparisons.rs:17:30
+ |
+LL | let _ = String::from("").ends_with(".ext12");
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a case-insensitive comparison instead
+
+error: case-sensitive file extension comparison
+ --> $DIR/case_sensitive_file_extension_comparisons.rs:18:19
+ |
+LL | let _ = "str".ends_with(".ext12");
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a case-insensitive comparison instead
+
+error: case-sensitive file extension comparison
+ --> $DIR/case_sensitive_file_extension_comparisons.rs:24:30
+ |
+LL | let _ = String::from("").ends_with(".EXT12");
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a case-insensitive comparison instead
+
+error: case-sensitive file extension comparison
+ --> $DIR/case_sensitive_file_extension_comparisons.rs:25:19
+ |
+LL | let _ = "str".ends_with(".EXT12");
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a case-insensitive comparison instead
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/cast.rs b/src/tools/clippy/tests/ui/cast.rs
new file mode 100644
index 000000000..e6031e9ad
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast.rs
@@ -0,0 +1,262 @@
+#![feature(repr128)]
+#![allow(incomplete_features)]
+#![warn(
+ clippy::cast_precision_loss,
+ clippy::cast_possible_truncation,
+ clippy::cast_sign_loss,
+ clippy::cast_possible_wrap
+)]
+#![allow(clippy::cast_abs_to_unsigned, clippy::no_effect, clippy::unnecessary_operation)]
+
+fn main() {
+ // Test clippy::cast_precision_loss
+ let x0 = 1i32;
+ x0 as f32;
+ let x1 = 1i64;
+ x1 as f32;
+ x1 as f64;
+ let x2 = 1u32;
+ x2 as f32;
+ let x3 = 1u64;
+ x3 as f32;
+ x3 as f64;
+ // Test clippy::cast_possible_truncation
+ 1f32 as i32;
+ 1f32 as u32;
+ 1f64 as f32;
+ 1i32 as i8;
+ 1i32 as u8;
+ 1f64 as isize;
+ 1f64 as usize;
+ // Test clippy::cast_possible_wrap
+ 1u8 as i8;
+ 1u16 as i16;
+ 1u32 as i32;
+ 1u64 as i64;
+ 1usize as isize;
+ // Test clippy::cast_sign_loss
+ 1i32 as u32;
+ -1i32 as u32;
+ 1isize as usize;
+ -1isize as usize;
+ 0i8 as u8;
+ i8::MAX as u8;
+ i16::MAX as u16;
+ i32::MAX as u32;
+ i64::MAX as u64;
+ i128::MAX as u128;
+
+ (-1i8).abs() as u8;
+ (-1i16).abs() as u16;
+ (-1i32).abs() as u32;
+ (-1i64).abs() as u64;
+ (-1isize).abs() as usize;
+
+ (-1i8).checked_abs().unwrap() as u8;
+ (-1i16).checked_abs().unwrap() as u16;
+ (-1i32).checked_abs().unwrap() as u32;
+ (-1i64).checked_abs().unwrap() as u64;
+ (-1isize).checked_abs().unwrap() as usize;
+
+ (-1i8).rem_euclid(1i8) as u8;
+ (-1i8).rem_euclid(1i8) as u16;
+ (-1i16).rem_euclid(1i16) as u16;
+ (-1i16).rem_euclid(1i16) as u32;
+ (-1i32).rem_euclid(1i32) as u32;
+ (-1i32).rem_euclid(1i32) as u64;
+ (-1i64).rem_euclid(1i64) as u64;
+ (-1i64).rem_euclid(1i64) as u128;
+ (-1isize).rem_euclid(1isize) as usize;
+ (1i8).rem_euclid(-1i8) as u8;
+ (1i8).rem_euclid(-1i8) as u16;
+ (1i16).rem_euclid(-1i16) as u16;
+ (1i16).rem_euclid(-1i16) as u32;
+ (1i32).rem_euclid(-1i32) as u32;
+ (1i32).rem_euclid(-1i32) as u64;
+ (1i64).rem_euclid(-1i64) as u64;
+ (1i64).rem_euclid(-1i64) as u128;
+ (1isize).rem_euclid(-1isize) as usize;
+
+ (-1i8).checked_rem_euclid(1i8).unwrap() as u8;
+ (-1i8).checked_rem_euclid(1i8).unwrap() as u16;
+ (-1i16).checked_rem_euclid(1i16).unwrap() as u16;
+ (-1i16).checked_rem_euclid(1i16).unwrap() as u32;
+ (-1i32).checked_rem_euclid(1i32).unwrap() as u32;
+ (-1i32).checked_rem_euclid(1i32).unwrap() as u64;
+ (-1i64).checked_rem_euclid(1i64).unwrap() as u64;
+ (-1i64).checked_rem_euclid(1i64).unwrap() as u128;
+ (-1isize).checked_rem_euclid(1isize).unwrap() as usize;
+ (1i8).checked_rem_euclid(-1i8).unwrap() as u8;
+ (1i8).checked_rem_euclid(-1i8).unwrap() as u16;
+ (1i16).checked_rem_euclid(-1i16).unwrap() as u16;
+ (1i16).checked_rem_euclid(-1i16).unwrap() as u32;
+ (1i32).checked_rem_euclid(-1i32).unwrap() as u32;
+ (1i32).checked_rem_euclid(-1i32).unwrap() as u64;
+ (1i64).checked_rem_euclid(-1i64).unwrap() as u64;
+ (1i64).checked_rem_euclid(-1i64).unwrap() as u128;
+ (1isize).checked_rem_euclid(-1isize).unwrap() as usize;
+
+ // no lint for `cast_possible_truncation`
+ // with `signum` method call (see issue #5395)
+ let x: i64 = 5;
+ let _ = x.signum() as i32;
+
+ let s = x.signum();
+ let _ = s as i32;
+
+ // Test for signed min
+ (-99999999999i64).min(1) as i8; // should be linted because signed
+
+ // Test for various operations that remove enough bits for the result to fit
+ (999999u64 & 1) as u8;
+ (999999u64 % 15) as u8;
+ (999999u64 / 0x1_0000_0000_0000) as u16;
+ ({ 999999u64 >> 56 }) as u8;
+ ({
+ let x = 999999u64;
+ x.min(1)
+ }) as u8;
+ 999999u64.clamp(0, 255) as u8;
+ 999999u64.clamp(0, 256) as u8; // should still be linted
+
+ #[derive(Clone, Copy)]
+ enum E1 {
+ A,
+ B,
+ C,
+ }
+ impl E1 {
+ fn test(self) {
+ let _ = self as u8; // Don't lint. `0..=2` fits in u8
+ }
+ }
+
+ #[derive(Clone, Copy)]
+ enum E2 {
+ A = 255,
+ B,
+ }
+ impl E2 {
+ fn test(self) {
+ let _ = self as u8;
+ let _ = Self::B as u8;
+ let _ = self as i16; // Don't lint. `255..=256` fits in i16
+ let _ = Self::A as u8; // Don't lint.
+ }
+ }
+
+ #[derive(Clone, Copy)]
+ enum E3 {
+ A = -1,
+ B,
+ C = 50,
+ }
+ impl E3 {
+ fn test(self) {
+ let _ = self as i8; // Don't lint. `-1..=50` fits in i8
+ }
+ }
+
+ #[derive(Clone, Copy)]
+ enum E4 {
+ A = -128,
+ B,
+ }
+ impl E4 {
+ fn test(self) {
+ let _ = self as i8; // Don't lint. `-128..=-127` fits in i8
+ }
+ }
+
+ #[derive(Clone, Copy)]
+ enum E5 {
+ A = -129,
+ B = 127,
+ }
+ impl E5 {
+ fn test(self) {
+ let _ = self as i8;
+ let _ = Self::A as i8;
+ let _ = self as i16; // Don't lint. `-129..=127` fits in i16
+ let _ = Self::B as u8; // Don't lint.
+ }
+ }
+
+ #[derive(Clone, Copy)]
+ #[repr(u32)]
+ enum E6 {
+ A = u16::MAX as u32,
+ B,
+ }
+ impl E6 {
+ fn test(self) {
+ let _ = self as i16;
+ let _ = Self::A as u16; // Don't lint. `2^16-1` fits in u16
+ let _ = self as u32; // Don't lint. `2^16-1..=2^16` fits in u32
+ let _ = Self::A as u16; // Don't lint.
+ }
+ }
+
+ #[derive(Clone, Copy)]
+ #[repr(u64)]
+ enum E7 {
+ A = u32::MAX as u64,
+ B,
+ }
+ impl E7 {
+ fn test(self) {
+ let _ = self as usize;
+ let _ = Self::A as usize; // Don't lint.
+ let _ = self as u64; // Don't lint. `2^32-1..=2^32` fits in u64
+ }
+ }
+
+ #[derive(Clone, Copy)]
+ #[repr(i128)]
+ enum E8 {
+ A = i128::MIN,
+ B,
+ C = 0,
+ D = i128::MAX,
+ }
+ impl E8 {
+ fn test(self) {
+ let _ = self as i128; // Don't lint. `-(2^127)..=2^127-1` fits it i128
+ }
+ }
+
+ #[derive(Clone, Copy)]
+ #[repr(u128)]
+ enum E9 {
+ A,
+ B = u128::MAX,
+ }
+ impl E9 {
+ fn test(self) {
+ let _ = Self::A as u8; // Don't lint.
+ let _ = self as u128; // Don't lint. `0..=2^128-1` fits in u128
+ }
+ }
+
+ #[derive(Clone, Copy)]
+ #[repr(usize)]
+ enum E10 {
+ A,
+ B = u32::MAX as usize,
+ }
+ impl E10 {
+ fn test(self) {
+ let _ = self as u16;
+ let _ = Self::B as u32; // Don't lint.
+ let _ = self as u64; // Don't lint.
+ }
+ }
+}
+
+fn avoid_subtract_overflow(q: u32) {
+ let c = (q >> 16) as u8;
+ c as usize;
+
+ let c = (q / 1000) as u8;
+ c as usize;
+}
diff --git a/src/tools/clippy/tests/ui/cast.stderr b/src/tools/clippy/tests/ui/cast.stderr
new file mode 100644
index 000000000..0c63b4af3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast.stderr
@@ -0,0 +1,210 @@
+error: casting `i32` to `f32` causes a loss of precision (`i32` is 32 bits wide, but `f32`'s mantissa is only 23 bits wide)
+ --> $DIR/cast.rs:14:5
+ |
+LL | x0 as f32;
+ | ^^^^^^^^^
+ |
+ = note: `-D clippy::cast-precision-loss` implied by `-D warnings`
+
+error: casting `i64` to `f32` causes a loss of precision (`i64` is 64 bits wide, but `f32`'s mantissa is only 23 bits wide)
+ --> $DIR/cast.rs:16:5
+ |
+LL | x1 as f32;
+ | ^^^^^^^^^
+
+error: casting `i64` to `f64` causes a loss of precision (`i64` is 64 bits wide, but `f64`'s mantissa is only 52 bits wide)
+ --> $DIR/cast.rs:17:5
+ |
+LL | x1 as f64;
+ | ^^^^^^^^^
+
+error: casting `u32` to `f32` causes a loss of precision (`u32` is 32 bits wide, but `f32`'s mantissa is only 23 bits wide)
+ --> $DIR/cast.rs:19:5
+ |
+LL | x2 as f32;
+ | ^^^^^^^^^
+
+error: casting `u64` to `f32` causes a loss of precision (`u64` is 64 bits wide, but `f32`'s mantissa is only 23 bits wide)
+ --> $DIR/cast.rs:21:5
+ |
+LL | x3 as f32;
+ | ^^^^^^^^^
+
+error: casting `u64` to `f64` causes a loss of precision (`u64` is 64 bits wide, but `f64`'s mantissa is only 52 bits wide)
+ --> $DIR/cast.rs:22:5
+ |
+LL | x3 as f64;
+ | ^^^^^^^^^
+
+error: casting `f32` to `i32` may truncate the value
+ --> $DIR/cast.rs:24:5
+ |
+LL | 1f32 as i32;
+ | ^^^^^^^^^^^
+ |
+ = note: `-D clippy::cast-possible-truncation` implied by `-D warnings`
+
+error: casting `f32` to `u32` may truncate the value
+ --> $DIR/cast.rs:25:5
+ |
+LL | 1f32 as u32;
+ | ^^^^^^^^^^^
+
+error: casting `f32` to `u32` may lose the sign of the value
+ --> $DIR/cast.rs:25:5
+ |
+LL | 1f32 as u32;
+ | ^^^^^^^^^^^
+ |
+ = note: `-D clippy::cast-sign-loss` implied by `-D warnings`
+
+error: casting `f64` to `f32` may truncate the value
+ --> $DIR/cast.rs:26:5
+ |
+LL | 1f64 as f32;
+ | ^^^^^^^^^^^
+
+error: casting `i32` to `i8` may truncate the value
+ --> $DIR/cast.rs:27:5
+ |
+LL | 1i32 as i8;
+ | ^^^^^^^^^^
+
+error: casting `i32` to `u8` may truncate the value
+ --> $DIR/cast.rs:28:5
+ |
+LL | 1i32 as u8;
+ | ^^^^^^^^^^
+
+error: casting `f64` to `isize` may truncate the value
+ --> $DIR/cast.rs:29:5
+ |
+LL | 1f64 as isize;
+ | ^^^^^^^^^^^^^
+
+error: casting `f64` to `usize` may truncate the value
+ --> $DIR/cast.rs:30:5
+ |
+LL | 1f64 as usize;
+ | ^^^^^^^^^^^^^
+
+error: casting `f64` to `usize` may lose the sign of the value
+ --> $DIR/cast.rs:30:5
+ |
+LL | 1f64 as usize;
+ | ^^^^^^^^^^^^^
+
+error: casting `u8` to `i8` may wrap around the value
+ --> $DIR/cast.rs:32:5
+ |
+LL | 1u8 as i8;
+ | ^^^^^^^^^
+ |
+ = note: `-D clippy::cast-possible-wrap` implied by `-D warnings`
+
+error: casting `u16` to `i16` may wrap around the value
+ --> $DIR/cast.rs:33:5
+ |
+LL | 1u16 as i16;
+ | ^^^^^^^^^^^
+
+error: casting `u32` to `i32` may wrap around the value
+ --> $DIR/cast.rs:34:5
+ |
+LL | 1u32 as i32;
+ | ^^^^^^^^^^^
+
+error: casting `u64` to `i64` may wrap around the value
+ --> $DIR/cast.rs:35:5
+ |
+LL | 1u64 as i64;
+ | ^^^^^^^^^^^
+
+error: casting `usize` to `isize` may wrap around the value
+ --> $DIR/cast.rs:36:5
+ |
+LL | 1usize as isize;
+ | ^^^^^^^^^^^^^^^
+
+error: casting `i32` to `u32` may lose the sign of the value
+ --> $DIR/cast.rs:39:5
+ |
+LL | -1i32 as u32;
+ | ^^^^^^^^^^^^
+
+error: casting `isize` to `usize` may lose the sign of the value
+ --> $DIR/cast.rs:41:5
+ |
+LL | -1isize as usize;
+ | ^^^^^^^^^^^^^^^^
+
+error: casting `i64` to `i8` may truncate the value
+ --> $DIR/cast.rs:108:5
+ |
+LL | (-99999999999i64).min(1) as i8; // should be linted because signed
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: casting `u64` to `u8` may truncate the value
+ --> $DIR/cast.rs:120:5
+ |
+LL | 999999u64.clamp(0, 256) as u8; // should still be linted
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: casting `main::E2` to `u8` may truncate the value
+ --> $DIR/cast.rs:141:21
+ |
+LL | let _ = self as u8;
+ | ^^^^^^^^^^
+
+error: casting `main::E2::B` to `u8` will truncate the value
+ --> $DIR/cast.rs:142:21
+ |
+LL | let _ = Self::B as u8;
+ | ^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::cast-enum-truncation` implied by `-D warnings`
+
+error: casting `main::E5` to `i8` may truncate the value
+ --> $DIR/cast.rs:178:21
+ |
+LL | let _ = self as i8;
+ | ^^^^^^^^^^
+
+error: casting `main::E5::A` to `i8` will truncate the value
+ --> $DIR/cast.rs:179:21
+ |
+LL | let _ = Self::A as i8;
+ | ^^^^^^^^^^^^^
+
+error: casting `main::E6` to `i16` may truncate the value
+ --> $DIR/cast.rs:193:21
+ |
+LL | let _ = self as i16;
+ | ^^^^^^^^^^^
+
+error: casting `main::E7` to `usize` may truncate the value on targets with 32-bit wide pointers
+ --> $DIR/cast.rs:208:21
+ |
+LL | let _ = self as usize;
+ | ^^^^^^^^^^^^^
+
+error: casting `main::E10` to `u16` may truncate the value
+ --> $DIR/cast.rs:249:21
+ |
+LL | let _ = self as u16;
+ | ^^^^^^^^^^^
+
+error: casting `u32` to `u8` may truncate the value
+ --> $DIR/cast.rs:257:13
+ |
+LL | let c = (q >> 16) as u8;
+ | ^^^^^^^^^^^^^^^
+
+error: casting `u32` to `u8` may truncate the value
+ --> $DIR/cast.rs:260:13
+ |
+LL | let c = (q / 1000) as u8;
+ | ^^^^^^^^^^^^^^^^
+
+error: aborting due to 33 previous errors
+
diff --git a/src/tools/clippy/tests/ui/cast_abs_to_unsigned.fixed b/src/tools/clippy/tests/ui/cast_abs_to_unsigned.fixed
new file mode 100644
index 000000000..a68b32b09
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_abs_to_unsigned.fixed
@@ -0,0 +1,29 @@
+// run-rustfix
+#![warn(clippy::cast_abs_to_unsigned)]
+
+fn main() {
+ let x: i32 = -42;
+ let y: u32 = x.unsigned_abs();
+ println!("The absolute value of {} is {}", x, y);
+
+ let a: i32 = -3;
+ let _: usize = a.unsigned_abs() as usize;
+ let _: usize = a.unsigned_abs() as _;
+ let _ = a.unsigned_abs() as usize;
+
+ let a: i64 = -3;
+ let _ = a.unsigned_abs() as usize;
+ let _ = a.unsigned_abs() as u8;
+ let _ = a.unsigned_abs() as u16;
+ let _ = a.unsigned_abs() as u32;
+ let _ = a.unsigned_abs();
+ let _ = a.unsigned_abs() as u128;
+
+ let a: isize = -3;
+ let _ = a.unsigned_abs();
+ let _ = a.unsigned_abs() as u8;
+ let _ = a.unsigned_abs() as u16;
+ let _ = a.unsigned_abs() as u32;
+ let _ = a.unsigned_abs() as u64;
+ let _ = a.unsigned_abs() as u128;
+}
diff --git a/src/tools/clippy/tests/ui/cast_abs_to_unsigned.rs b/src/tools/clippy/tests/ui/cast_abs_to_unsigned.rs
new file mode 100644
index 000000000..110fbc6c2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_abs_to_unsigned.rs
@@ -0,0 +1,29 @@
+// run-rustfix
+#![warn(clippy::cast_abs_to_unsigned)]
+
+fn main() {
+ let x: i32 = -42;
+ let y: u32 = x.abs() as u32;
+ println!("The absolute value of {} is {}", x, y);
+
+ let a: i32 = -3;
+ let _: usize = a.abs() as usize;
+ let _: usize = a.abs() as _;
+ let _ = a.abs() as usize;
+
+ let a: i64 = -3;
+ let _ = a.abs() as usize;
+ let _ = a.abs() as u8;
+ let _ = a.abs() as u16;
+ let _ = a.abs() as u32;
+ let _ = a.abs() as u64;
+ let _ = a.abs() as u128;
+
+ let a: isize = -3;
+ let _ = a.abs() as usize;
+ let _ = a.abs() as u8;
+ let _ = a.abs() as u16;
+ let _ = a.abs() as u32;
+ let _ = a.abs() as u64;
+ let _ = a.abs() as u128;
+}
diff --git a/src/tools/clippy/tests/ui/cast_abs_to_unsigned.stderr b/src/tools/clippy/tests/ui/cast_abs_to_unsigned.stderr
new file mode 100644
index 000000000..02c24e106
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_abs_to_unsigned.stderr
@@ -0,0 +1,100 @@
+error: casting the result of `i32::abs()` to u32
+ --> $DIR/cast_abs_to_unsigned.rs:6:18
+ |
+LL | let y: u32 = x.abs() as u32;
+ | ^^^^^^^^^^^^^^ help: replace with: `x.unsigned_abs()`
+ |
+ = note: `-D clippy::cast-abs-to-unsigned` implied by `-D warnings`
+
+error: casting the result of `i32::abs()` to usize
+ --> $DIR/cast_abs_to_unsigned.rs:10:20
+ |
+LL | let _: usize = a.abs() as usize;
+ | ^^^^^^^ help: replace with: `a.unsigned_abs()`
+
+error: casting the result of `i32::abs()` to usize
+ --> $DIR/cast_abs_to_unsigned.rs:11:20
+ |
+LL | let _: usize = a.abs() as _;
+ | ^^^^^^^ help: replace with: `a.unsigned_abs()`
+
+error: casting the result of `i32::abs()` to usize
+ --> $DIR/cast_abs_to_unsigned.rs:12:13
+ |
+LL | let _ = a.abs() as usize;
+ | ^^^^^^^ help: replace with: `a.unsigned_abs()`
+
+error: casting the result of `i64::abs()` to usize
+ --> $DIR/cast_abs_to_unsigned.rs:15:13
+ |
+LL | let _ = a.abs() as usize;
+ | ^^^^^^^ help: replace with: `a.unsigned_abs()`
+
+error: casting the result of `i64::abs()` to u8
+ --> $DIR/cast_abs_to_unsigned.rs:16:13
+ |
+LL | let _ = a.abs() as u8;
+ | ^^^^^^^ help: replace with: `a.unsigned_abs()`
+
+error: casting the result of `i64::abs()` to u16
+ --> $DIR/cast_abs_to_unsigned.rs:17:13
+ |
+LL | let _ = a.abs() as u16;
+ | ^^^^^^^ help: replace with: `a.unsigned_abs()`
+
+error: casting the result of `i64::abs()` to u32
+ --> $DIR/cast_abs_to_unsigned.rs:18:13
+ |
+LL | let _ = a.abs() as u32;
+ | ^^^^^^^ help: replace with: `a.unsigned_abs()`
+
+error: casting the result of `i64::abs()` to u64
+ --> $DIR/cast_abs_to_unsigned.rs:19:13
+ |
+LL | let _ = a.abs() as u64;
+ | ^^^^^^^^^^^^^^ help: replace with: `a.unsigned_abs()`
+
+error: casting the result of `i64::abs()` to u128
+ --> $DIR/cast_abs_to_unsigned.rs:20:13
+ |
+LL | let _ = a.abs() as u128;
+ | ^^^^^^^ help: replace with: `a.unsigned_abs()`
+
+error: casting the result of `isize::abs()` to usize
+ --> $DIR/cast_abs_to_unsigned.rs:23:13
+ |
+LL | let _ = a.abs() as usize;
+ | ^^^^^^^^^^^^^^^^ help: replace with: `a.unsigned_abs()`
+
+error: casting the result of `isize::abs()` to u8
+ --> $DIR/cast_abs_to_unsigned.rs:24:13
+ |
+LL | let _ = a.abs() as u8;
+ | ^^^^^^^ help: replace with: `a.unsigned_abs()`
+
+error: casting the result of `isize::abs()` to u16
+ --> $DIR/cast_abs_to_unsigned.rs:25:13
+ |
+LL | let _ = a.abs() as u16;
+ | ^^^^^^^ help: replace with: `a.unsigned_abs()`
+
+error: casting the result of `isize::abs()` to u32
+ --> $DIR/cast_abs_to_unsigned.rs:26:13
+ |
+LL | let _ = a.abs() as u32;
+ | ^^^^^^^ help: replace with: `a.unsigned_abs()`
+
+error: casting the result of `isize::abs()` to u64
+ --> $DIR/cast_abs_to_unsigned.rs:27:13
+ |
+LL | let _ = a.abs() as u64;
+ | ^^^^^^^ help: replace with: `a.unsigned_abs()`
+
+error: casting the result of `isize::abs()` to u128
+ --> $DIR/cast_abs_to_unsigned.rs:28:13
+ |
+LL | let _ = a.abs() as u128;
+ | ^^^^^^^ help: replace with: `a.unsigned_abs()`
+
+error: aborting due to 16 previous errors
+
diff --git a/src/tools/clippy/tests/ui/cast_alignment.rs b/src/tools/clippy/tests/ui/cast_alignment.rs
new file mode 100644
index 000000000..95bb883df
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_alignment.rs
@@ -0,0 +1,51 @@
+//! Test casts for alignment issues
+
+#![feature(rustc_private)]
+#![feature(core_intrinsics)]
+extern crate libc;
+
+#[warn(clippy::cast_ptr_alignment)]
+#[allow(
+ clippy::no_effect,
+ clippy::unnecessary_operation,
+ clippy::cast_lossless,
+ clippy::borrow_as_ptr
+)]
+
+fn main() {
+ /* These should be warned against */
+
+ // cast to more-strictly-aligned type
+ (&1u8 as *const u8) as *const u16;
+ (&mut 1u8 as *mut u8) as *mut u16;
+
+ // cast to more-strictly-aligned type, but with the `pointer::cast` function.
+ (&1u8 as *const u8).cast::<u16>();
+ (&mut 1u8 as *mut u8).cast::<u16>();
+
+ /* These should be ok */
+
+ // not a pointer type
+ 1u8 as u16;
+ // cast to less-strictly-aligned type
+ (&1u16 as *const u16) as *const u8;
+ (&mut 1u16 as *mut u16) as *mut u8;
+ // For c_void, we should trust the user. See #2677
+ (&1u32 as *const u32 as *const std::os::raw::c_void) as *const u32;
+ (&1u32 as *const u32 as *const libc::c_void) as *const u32;
+ // For ZST, we should trust the user. See #4256
+ (&1u32 as *const u32 as *const ()) as *const u32;
+
+ // Issue #2881
+ let mut data = [0u8, 0u8];
+ unsafe {
+ let ptr = &data as *const [u8; 2] as *const u8;
+ let _ = (ptr as *const u16).read_unaligned();
+ let _ = core::ptr::read_unaligned(ptr as *const u16);
+ let _ = core::intrinsics::unaligned_volatile_load(ptr as *const u16);
+ let ptr = &mut data as *mut [u8; 2] as *mut u8;
+ (ptr as *mut u16).write_unaligned(0);
+ core::ptr::write_unaligned(ptr as *mut u16, 0);
+ core::intrinsics::unaligned_volatile_store(ptr as *mut u16, 0);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/cast_alignment.stderr b/src/tools/clippy/tests/ui/cast_alignment.stderr
new file mode 100644
index 000000000..5df2b5b10
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_alignment.stderr
@@ -0,0 +1,28 @@
+error: casting from `*const u8` to a more-strictly-aligned pointer (`*const u16`) (1 < 2 bytes)
+ --> $DIR/cast_alignment.rs:19:5
+ |
+LL | (&1u8 as *const u8) as *const u16;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::cast-ptr-alignment` implied by `-D warnings`
+
+error: casting from `*mut u8` to a more-strictly-aligned pointer (`*mut u16`) (1 < 2 bytes)
+ --> $DIR/cast_alignment.rs:20:5
+ |
+LL | (&mut 1u8 as *mut u8) as *mut u16;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: casting from `*const u8` to a more-strictly-aligned pointer (`*const u16`) (1 < 2 bytes)
+ --> $DIR/cast_alignment.rs:23:5
+ |
+LL | (&1u8 as *const u8).cast::<u16>();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: casting from `*mut u8` to a more-strictly-aligned pointer (`*mut u16`) (1 < 2 bytes)
+ --> $DIR/cast_alignment.rs:24:5
+ |
+LL | (&mut 1u8 as *mut u8).cast::<u16>();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/cast_enum_constructor.rs b/src/tools/clippy/tests/ui/cast_enum_constructor.rs
new file mode 100644
index 000000000..0193454ad
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_enum_constructor.rs
@@ -0,0 +1,17 @@
+#![warn(clippy::cast_enum_constructor)]
+#![allow(clippy::fn_to_numeric_cast)]
+
+fn main() {
+ enum Foo {
+ Y(u32),
+ }
+
+ enum Bar {
+ X,
+ }
+
+ let _ = Foo::Y as usize;
+ let _ = Foo::Y as isize;
+ let _ = Foo::Y as fn(u32) -> Foo;
+ let _ = Bar::X as usize;
+}
diff --git a/src/tools/clippy/tests/ui/cast_enum_constructor.stderr b/src/tools/clippy/tests/ui/cast_enum_constructor.stderr
new file mode 100644
index 000000000..710909dd2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_enum_constructor.stderr
@@ -0,0 +1,16 @@
+error: cast of an enum tuple constructor to an integer
+ --> $DIR/cast_enum_constructor.rs:13:13
+ |
+LL | let _ = Foo::Y as usize;
+ | ^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::cast-enum-constructor` implied by `-D warnings`
+
+error: cast of an enum tuple constructor to an integer
+ --> $DIR/cast_enum_constructor.rs:14:13
+ |
+LL | let _ = Foo::Y as isize;
+ | ^^^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/cast_lossless_bool.fixed b/src/tools/clippy/tests/ui/cast_lossless_bool.fixed
new file mode 100644
index 000000000..9e2da45c3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_lossless_bool.fixed
@@ -0,0 +1,42 @@
+// run-rustfix
+
+#![allow(dead_code)]
+#![warn(clippy::cast_lossless)]
+
+fn main() {
+ // Test clippy::cast_lossless with casts to integer types
+ let _ = u8::from(true);
+ let _ = u16::from(true);
+ let _ = u32::from(true);
+ let _ = u64::from(true);
+ let _ = u128::from(true);
+ let _ = usize::from(true);
+
+ let _ = i8::from(true);
+ let _ = i16::from(true);
+ let _ = i32::from(true);
+ let _ = i64::from(true);
+ let _ = i128::from(true);
+ let _ = isize::from(true);
+
+ // Test with an expression wrapped in parens
+ let _ = u16::from(true | false);
+}
+
+// The lint would suggest using `u32::from(input)` here but the `XX::from` function is not const,
+// so we skip the lint if the expression is in a const fn.
+// See #3656
+const fn abc(input: bool) -> u32 {
+ input as u32
+}
+
+// Same as the above issue. We can't suggest `::from` in const fns in impls
+mod cast_lossless_in_impl {
+ struct A;
+
+ impl A {
+ pub const fn convert(x: bool) -> u64 {
+ x as u64
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/cast_lossless_bool.rs b/src/tools/clippy/tests/ui/cast_lossless_bool.rs
new file mode 100644
index 000000000..b6f6c59a0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_lossless_bool.rs
@@ -0,0 +1,42 @@
+// run-rustfix
+
+#![allow(dead_code)]
+#![warn(clippy::cast_lossless)]
+
+fn main() {
+ // Test clippy::cast_lossless with casts to integer types
+ let _ = true as u8;
+ let _ = true as u16;
+ let _ = true as u32;
+ let _ = true as u64;
+ let _ = true as u128;
+ let _ = true as usize;
+
+ let _ = true as i8;
+ let _ = true as i16;
+ let _ = true as i32;
+ let _ = true as i64;
+ let _ = true as i128;
+ let _ = true as isize;
+
+ // Test with an expression wrapped in parens
+ let _ = (true | false) as u16;
+}
+
+// The lint would suggest using `u32::from(input)` here but the `XX::from` function is not const,
+// so we skip the lint if the expression is in a const fn.
+// See #3656
+const fn abc(input: bool) -> u32 {
+ input as u32
+}
+
+// Same as the above issue. We can't suggest `::from` in const fns in impls
+mod cast_lossless_in_impl {
+ struct A;
+
+ impl A {
+ pub const fn convert(x: bool) -> u64 {
+ x as u64
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/cast_lossless_bool.stderr b/src/tools/clippy/tests/ui/cast_lossless_bool.stderr
new file mode 100644
index 000000000..6b1483360
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_lossless_bool.stderr
@@ -0,0 +1,82 @@
+error: casting `bool` to `u8` is more cleanly stated with `u8::from(_)`
+ --> $DIR/cast_lossless_bool.rs:8:13
+ |
+LL | let _ = true as u8;
+ | ^^^^^^^^^^ help: try: `u8::from(true)`
+ |
+ = note: `-D clippy::cast-lossless` implied by `-D warnings`
+
+error: casting `bool` to `u16` is more cleanly stated with `u16::from(_)`
+ --> $DIR/cast_lossless_bool.rs:9:13
+ |
+LL | let _ = true as u16;
+ | ^^^^^^^^^^^ help: try: `u16::from(true)`
+
+error: casting `bool` to `u32` is more cleanly stated with `u32::from(_)`
+ --> $DIR/cast_lossless_bool.rs:10:13
+ |
+LL | let _ = true as u32;
+ | ^^^^^^^^^^^ help: try: `u32::from(true)`
+
+error: casting `bool` to `u64` is more cleanly stated with `u64::from(_)`
+ --> $DIR/cast_lossless_bool.rs:11:13
+ |
+LL | let _ = true as u64;
+ | ^^^^^^^^^^^ help: try: `u64::from(true)`
+
+error: casting `bool` to `u128` is more cleanly stated with `u128::from(_)`
+ --> $DIR/cast_lossless_bool.rs:12:13
+ |
+LL | let _ = true as u128;
+ | ^^^^^^^^^^^^ help: try: `u128::from(true)`
+
+error: casting `bool` to `usize` is more cleanly stated with `usize::from(_)`
+ --> $DIR/cast_lossless_bool.rs:13:13
+ |
+LL | let _ = true as usize;
+ | ^^^^^^^^^^^^^ help: try: `usize::from(true)`
+
+error: casting `bool` to `i8` is more cleanly stated with `i8::from(_)`
+ --> $DIR/cast_lossless_bool.rs:15:13
+ |
+LL | let _ = true as i8;
+ | ^^^^^^^^^^ help: try: `i8::from(true)`
+
+error: casting `bool` to `i16` is more cleanly stated with `i16::from(_)`
+ --> $DIR/cast_lossless_bool.rs:16:13
+ |
+LL | let _ = true as i16;
+ | ^^^^^^^^^^^ help: try: `i16::from(true)`
+
+error: casting `bool` to `i32` is more cleanly stated with `i32::from(_)`
+ --> $DIR/cast_lossless_bool.rs:17:13
+ |
+LL | let _ = true as i32;
+ | ^^^^^^^^^^^ help: try: `i32::from(true)`
+
+error: casting `bool` to `i64` is more cleanly stated with `i64::from(_)`
+ --> $DIR/cast_lossless_bool.rs:18:13
+ |
+LL | let _ = true as i64;
+ | ^^^^^^^^^^^ help: try: `i64::from(true)`
+
+error: casting `bool` to `i128` is more cleanly stated with `i128::from(_)`
+ --> $DIR/cast_lossless_bool.rs:19:13
+ |
+LL | let _ = true as i128;
+ | ^^^^^^^^^^^^ help: try: `i128::from(true)`
+
+error: casting `bool` to `isize` is more cleanly stated with `isize::from(_)`
+ --> $DIR/cast_lossless_bool.rs:20:13
+ |
+LL | let _ = true as isize;
+ | ^^^^^^^^^^^^^ help: try: `isize::from(true)`
+
+error: casting `bool` to `u16` is more cleanly stated with `u16::from(_)`
+ --> $DIR/cast_lossless_bool.rs:23:13
+ |
+LL | let _ = (true | false) as u16;
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try: `u16::from(true | false)`
+
+error: aborting due to 13 previous errors
+
diff --git a/src/tools/clippy/tests/ui/cast_lossless_float.fixed b/src/tools/clippy/tests/ui/cast_lossless_float.fixed
new file mode 100644
index 000000000..32a9c1c4a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_lossless_float.fixed
@@ -0,0 +1,45 @@
+// run-rustfix
+
+#![allow(clippy::no_effect, clippy::unnecessary_operation, dead_code)]
+#![warn(clippy::cast_lossless)]
+
+fn main() {
+ // Test clippy::cast_lossless with casts to floating-point types
+ let x0 = 1i8;
+ let _ = f32::from(x0);
+ let _ = f64::from(x0);
+ let x1 = 1u8;
+ let _ = f32::from(x1);
+ let _ = f64::from(x1);
+ let x2 = 1i16;
+ let _ = f32::from(x2);
+ let _ = f64::from(x2);
+ let x3 = 1u16;
+ let _ = f32::from(x3);
+ let _ = f64::from(x3);
+ let x4 = 1i32;
+ let _ = f64::from(x4);
+ let x5 = 1u32;
+ let _ = f64::from(x5);
+
+ // Test with casts from floating-point types
+ let _ = f64::from(1.0f32);
+}
+
+// The lint would suggest using `f64::from(input)` here but the `XX::from` function is not const,
+// so we skip the lint if the expression is in a const fn.
+// See #3656
+const fn abc(input: f32) -> f64 {
+ input as f64
+}
+
+// Same as the above issue. We can't suggest `::from` in const fns in impls
+mod cast_lossless_in_impl {
+ struct A;
+
+ impl A {
+ pub const fn convert(x: f32) -> f64 {
+ x as f64
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/cast_lossless_float.rs b/src/tools/clippy/tests/ui/cast_lossless_float.rs
new file mode 100644
index 000000000..6f5ddcfe0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_lossless_float.rs
@@ -0,0 +1,45 @@
+// run-rustfix
+
+#![allow(clippy::no_effect, clippy::unnecessary_operation, dead_code)]
+#![warn(clippy::cast_lossless)]
+
+fn main() {
+ // Test clippy::cast_lossless with casts to floating-point types
+ let x0 = 1i8;
+ let _ = x0 as f32;
+ let _ = x0 as f64;
+ let x1 = 1u8;
+ let _ = x1 as f32;
+ let _ = x1 as f64;
+ let x2 = 1i16;
+ let _ = x2 as f32;
+ let _ = x2 as f64;
+ let x3 = 1u16;
+ let _ = x3 as f32;
+ let _ = x3 as f64;
+ let x4 = 1i32;
+ let _ = x4 as f64;
+ let x5 = 1u32;
+ let _ = x5 as f64;
+
+ // Test with casts from floating-point types
+ let _ = 1.0f32 as f64;
+}
+
+// The lint would suggest using `f64::from(input)` here but the `XX::from` function is not const,
+// so we skip the lint if the expression is in a const fn.
+// See #3656
+const fn abc(input: f32) -> f64 {
+ input as f64
+}
+
+// Same as the above issue. We can't suggest `::from` in const fns in impls
+mod cast_lossless_in_impl {
+ struct A;
+
+ impl A {
+ pub const fn convert(x: f32) -> f64 {
+ x as f64
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/cast_lossless_float.stderr b/src/tools/clippy/tests/ui/cast_lossless_float.stderr
new file mode 100644
index 000000000..8326d40be
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_lossless_float.stderr
@@ -0,0 +1,70 @@
+error: casting `i8` to `f32` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_float.rs:9:13
+ |
+LL | let _ = x0 as f32;
+ | ^^^^^^^^^ help: try: `f32::from(x0)`
+ |
+ = note: `-D clippy::cast-lossless` implied by `-D warnings`
+
+error: casting `i8` to `f64` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_float.rs:10:13
+ |
+LL | let _ = x0 as f64;
+ | ^^^^^^^^^ help: try: `f64::from(x0)`
+
+error: casting `u8` to `f32` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_float.rs:12:13
+ |
+LL | let _ = x1 as f32;
+ | ^^^^^^^^^ help: try: `f32::from(x1)`
+
+error: casting `u8` to `f64` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_float.rs:13:13
+ |
+LL | let _ = x1 as f64;
+ | ^^^^^^^^^ help: try: `f64::from(x1)`
+
+error: casting `i16` to `f32` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_float.rs:15:13
+ |
+LL | let _ = x2 as f32;
+ | ^^^^^^^^^ help: try: `f32::from(x2)`
+
+error: casting `i16` to `f64` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_float.rs:16:13
+ |
+LL | let _ = x2 as f64;
+ | ^^^^^^^^^ help: try: `f64::from(x2)`
+
+error: casting `u16` to `f32` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_float.rs:18:13
+ |
+LL | let _ = x3 as f32;
+ | ^^^^^^^^^ help: try: `f32::from(x3)`
+
+error: casting `u16` to `f64` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_float.rs:19:13
+ |
+LL | let _ = x3 as f64;
+ | ^^^^^^^^^ help: try: `f64::from(x3)`
+
+error: casting `i32` to `f64` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_float.rs:21:13
+ |
+LL | let _ = x4 as f64;
+ | ^^^^^^^^^ help: try: `f64::from(x4)`
+
+error: casting `u32` to `f64` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_float.rs:23:13
+ |
+LL | let _ = x5 as f64;
+ | ^^^^^^^^^ help: try: `f64::from(x5)`
+
+error: casting `f32` to `f64` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_float.rs:26:13
+ |
+LL | let _ = 1.0f32 as f64;
+ | ^^^^^^^^^^^^^ help: try: `f64::from(1.0f32)`
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/cast_lossless_integer.fixed b/src/tools/clippy/tests/ui/cast_lossless_integer.fixed
new file mode 100644
index 000000000..72a708b40
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_lossless_integer.fixed
@@ -0,0 +1,47 @@
+// run-rustfix
+
+#![allow(clippy::no_effect, clippy::unnecessary_operation, dead_code)]
+#![warn(clippy::cast_lossless)]
+
+fn main() {
+ // Test clippy::cast_lossless with casts to integer types
+ let _ = i16::from(1i8);
+ let _ = i32::from(1i8);
+ let _ = i64::from(1i8);
+ let _ = i16::from(1u8);
+ let _ = i32::from(1u8);
+ let _ = i64::from(1u8);
+ let _ = u16::from(1u8);
+ let _ = u32::from(1u8);
+ let _ = u64::from(1u8);
+ let _ = i32::from(1i16);
+ let _ = i64::from(1i16);
+ let _ = i32::from(1u16);
+ let _ = i64::from(1u16);
+ let _ = u32::from(1u16);
+ let _ = u64::from(1u16);
+ let _ = i64::from(1i32);
+ let _ = i64::from(1u32);
+ let _ = u64::from(1u32);
+
+ // Test with an expression wrapped in parens
+ let _ = u16::from(1u8 + 1u8);
+}
+
+// The lint would suggest using `f64::from(input)` here but the `XX::from` function is not const,
+// so we skip the lint if the expression is in a const fn.
+// See #3656
+const fn abc(input: u16) -> u32 {
+ input as u32
+}
+
+// Same as the above issue. We can't suggest `::from` in const fns in impls
+mod cast_lossless_in_impl {
+ struct A;
+
+ impl A {
+ pub const fn convert(x: u32) -> u64 {
+ x as u64
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/cast_lossless_integer.rs b/src/tools/clippy/tests/ui/cast_lossless_integer.rs
new file mode 100644
index 000000000..34bb47181
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_lossless_integer.rs
@@ -0,0 +1,47 @@
+// run-rustfix
+
+#![allow(clippy::no_effect, clippy::unnecessary_operation, dead_code)]
+#![warn(clippy::cast_lossless)]
+
+fn main() {
+ // Test clippy::cast_lossless with casts to integer types
+ let _ = 1i8 as i16;
+ let _ = 1i8 as i32;
+ let _ = 1i8 as i64;
+ let _ = 1u8 as i16;
+ let _ = 1u8 as i32;
+ let _ = 1u8 as i64;
+ let _ = 1u8 as u16;
+ let _ = 1u8 as u32;
+ let _ = 1u8 as u64;
+ let _ = 1i16 as i32;
+ let _ = 1i16 as i64;
+ let _ = 1u16 as i32;
+ let _ = 1u16 as i64;
+ let _ = 1u16 as u32;
+ let _ = 1u16 as u64;
+ let _ = 1i32 as i64;
+ let _ = 1u32 as i64;
+ let _ = 1u32 as u64;
+
+ // Test with an expression wrapped in parens
+ let _ = (1u8 + 1u8) as u16;
+}
+
+// The lint would suggest using `f64::from(input)` here but the `XX::from` function is not const,
+// so we skip the lint if the expression is in a const fn.
+// See #3656
+const fn abc(input: u16) -> u32 {
+ input as u32
+}
+
+// Same as the above issue. We can't suggest `::from` in const fns in impls
+mod cast_lossless_in_impl {
+ struct A;
+
+ impl A {
+ pub const fn convert(x: u32) -> u64 {
+ x as u64
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/cast_lossless_integer.stderr b/src/tools/clippy/tests/ui/cast_lossless_integer.stderr
new file mode 100644
index 000000000..721b94876
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_lossless_integer.stderr
@@ -0,0 +1,118 @@
+error: casting `i8` to `i16` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_integer.rs:8:13
+ |
+LL | let _ = 1i8 as i16;
+ | ^^^^^^^^^^ help: try: `i16::from(1i8)`
+ |
+ = note: `-D clippy::cast-lossless` implied by `-D warnings`
+
+error: casting `i8` to `i32` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_integer.rs:9:13
+ |
+LL | let _ = 1i8 as i32;
+ | ^^^^^^^^^^ help: try: `i32::from(1i8)`
+
+error: casting `i8` to `i64` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_integer.rs:10:13
+ |
+LL | let _ = 1i8 as i64;
+ | ^^^^^^^^^^ help: try: `i64::from(1i8)`
+
+error: casting `u8` to `i16` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_integer.rs:11:13
+ |
+LL | let _ = 1u8 as i16;
+ | ^^^^^^^^^^ help: try: `i16::from(1u8)`
+
+error: casting `u8` to `i32` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_integer.rs:12:13
+ |
+LL | let _ = 1u8 as i32;
+ | ^^^^^^^^^^ help: try: `i32::from(1u8)`
+
+error: casting `u8` to `i64` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_integer.rs:13:13
+ |
+LL | let _ = 1u8 as i64;
+ | ^^^^^^^^^^ help: try: `i64::from(1u8)`
+
+error: casting `u8` to `u16` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_integer.rs:14:13
+ |
+LL | let _ = 1u8 as u16;
+ | ^^^^^^^^^^ help: try: `u16::from(1u8)`
+
+error: casting `u8` to `u32` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_integer.rs:15:13
+ |
+LL | let _ = 1u8 as u32;
+ | ^^^^^^^^^^ help: try: `u32::from(1u8)`
+
+error: casting `u8` to `u64` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_integer.rs:16:13
+ |
+LL | let _ = 1u8 as u64;
+ | ^^^^^^^^^^ help: try: `u64::from(1u8)`
+
+error: casting `i16` to `i32` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_integer.rs:17:13
+ |
+LL | let _ = 1i16 as i32;
+ | ^^^^^^^^^^^ help: try: `i32::from(1i16)`
+
+error: casting `i16` to `i64` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_integer.rs:18:13
+ |
+LL | let _ = 1i16 as i64;
+ | ^^^^^^^^^^^ help: try: `i64::from(1i16)`
+
+error: casting `u16` to `i32` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_integer.rs:19:13
+ |
+LL | let _ = 1u16 as i32;
+ | ^^^^^^^^^^^ help: try: `i32::from(1u16)`
+
+error: casting `u16` to `i64` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_integer.rs:20:13
+ |
+LL | let _ = 1u16 as i64;
+ | ^^^^^^^^^^^ help: try: `i64::from(1u16)`
+
+error: casting `u16` to `u32` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_integer.rs:21:13
+ |
+LL | let _ = 1u16 as u32;
+ | ^^^^^^^^^^^ help: try: `u32::from(1u16)`
+
+error: casting `u16` to `u64` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_integer.rs:22:13
+ |
+LL | let _ = 1u16 as u64;
+ | ^^^^^^^^^^^ help: try: `u64::from(1u16)`
+
+error: casting `i32` to `i64` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_integer.rs:23:13
+ |
+LL | let _ = 1i32 as i64;
+ | ^^^^^^^^^^^ help: try: `i64::from(1i32)`
+
+error: casting `u32` to `i64` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_integer.rs:24:13
+ |
+LL | let _ = 1u32 as i64;
+ | ^^^^^^^^^^^ help: try: `i64::from(1u32)`
+
+error: casting `u32` to `u64` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_integer.rs:25:13
+ |
+LL | let _ = 1u32 as u64;
+ | ^^^^^^^^^^^ help: try: `u64::from(1u32)`
+
+error: casting `u8` to `u16` may become silently lossy if you later change the type
+ --> $DIR/cast_lossless_integer.rs:28:13
+ |
+LL | let _ = (1u8 + 1u8) as u16;
+ | ^^^^^^^^^^^^^^^^^^ help: try: `u16::from(1u8 + 1u8)`
+
+error: aborting due to 19 previous errors
+
diff --git a/src/tools/clippy/tests/ui/cast_ref_to_mut.rs b/src/tools/clippy/tests/ui/cast_ref_to_mut.rs
new file mode 100644
index 000000000..c48a734ba
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_ref_to_mut.rs
@@ -0,0 +1,31 @@
+#![warn(clippy::cast_ref_to_mut)]
+#![allow(clippy::no_effect, clippy::borrow_as_ptr)]
+
+extern "C" {
+ // N.B., mutability can be easily incorrect in FFI calls -- as
+ // in C, the default is mutable pointers.
+ fn ffi(c: *mut u8);
+ fn int_ffi(c: *mut i32);
+}
+
+fn main() {
+ let s = String::from("Hello");
+ let a = &s;
+ unsafe {
+ let num = &3i32;
+ let mut_num = &mut 3i32;
+ // Should be warned against
+ (*(a as *const _ as *mut String)).push_str(" world");
+ *(a as *const _ as *mut _) = String::from("Replaced");
+ *(a as *const _ as *mut String) += " world";
+ // Shouldn't be warned against
+ println!("{}", *(num as *const _ as *const i16));
+ println!("{}", *(mut_num as *mut _ as *mut i16));
+ ffi(a.as_ptr() as *mut _);
+ int_ffi(num as *const _ as *mut _);
+ int_ffi(&3 as *const _ as *mut _);
+ let mut value = 3;
+ let value: *const i32 = &mut value;
+ *(value as *const i16 as *mut i16) = 42;
+ }
+}
diff --git a/src/tools/clippy/tests/ui/cast_ref_to_mut.stderr b/src/tools/clippy/tests/ui/cast_ref_to_mut.stderr
new file mode 100644
index 000000000..aacd99437
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_ref_to_mut.stderr
@@ -0,0 +1,22 @@
+error: casting `&T` to `&mut T` may cause undefined behavior, consider instead using an `UnsafeCell`
+ --> $DIR/cast_ref_to_mut.rs:18:9
+ |
+LL | (*(a as *const _ as *mut String)).push_str(" world");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::cast-ref-to-mut` implied by `-D warnings`
+
+error: casting `&T` to `&mut T` may cause undefined behavior, consider instead using an `UnsafeCell`
+ --> $DIR/cast_ref_to_mut.rs:19:9
+ |
+LL | *(a as *const _ as *mut _) = String::from("Replaced");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: casting `&T` to `&mut T` may cause undefined behavior, consider instead using an `UnsafeCell`
+ --> $DIR/cast_ref_to_mut.rs:20:9
+ |
+LL | *(a as *const _ as *mut String) += " world";
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/cast_size.rs b/src/tools/clippy/tests/ui/cast_size.rs
new file mode 100644
index 000000000..595109be4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_size.rs
@@ -0,0 +1,35 @@
+// ignore-32bit
+#[warn(
+ clippy::cast_precision_loss,
+ clippy::cast_possible_truncation,
+ clippy::cast_sign_loss,
+ clippy::cast_possible_wrap,
+ clippy::cast_lossless
+)]
+#[allow(clippy::no_effect, clippy::unnecessary_operation)]
+fn main() {
+ // Casting from *size
+ 1isize as i8;
+ let x0 = 1isize;
+ let x1 = 1usize;
+ x0 as f64;
+ x1 as f64;
+ x0 as f32;
+ x1 as f32;
+ 1isize as i32;
+ 1isize as u32;
+ 1usize as u32;
+ 1usize as i32;
+ // Casting to *size
+ 1i64 as isize;
+ 1i64 as usize;
+ 1u64 as isize;
+ 1u64 as usize;
+ 1u32 as isize;
+ 1u32 as usize; // Should not trigger any lint
+ 1i32 as isize; // Neither should this
+ 1i32 as usize;
+ // Big integer literal to float
+ 999_999_999 as f32;
+ 9_999_999_999_999_999usize as f64;
+}
diff --git a/src/tools/clippy/tests/ui/cast_size.stderr b/src/tools/clippy/tests/ui/cast_size.stderr
new file mode 100644
index 000000000..95552f2e2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_size.stderr
@@ -0,0 +1,116 @@
+error: casting `isize` to `i8` may truncate the value
+ --> $DIR/cast_size.rs:12:5
+ |
+LL | 1isize as i8;
+ | ^^^^^^^^^^^^
+ |
+ = note: `-D clippy::cast-possible-truncation` implied by `-D warnings`
+
+error: casting `isize` to `f64` causes a loss of precision on targets with 64-bit wide pointers (`isize` is 64 bits wide, but `f64`'s mantissa is only 52 bits wide)
+ --> $DIR/cast_size.rs:15:5
+ |
+LL | x0 as f64;
+ | ^^^^^^^^^
+ |
+ = note: `-D clippy::cast-precision-loss` implied by `-D warnings`
+
+error: casting `usize` to `f64` causes a loss of precision on targets with 64-bit wide pointers (`usize` is 64 bits wide, but `f64`'s mantissa is only 52 bits wide)
+ --> $DIR/cast_size.rs:16:5
+ |
+LL | x1 as f64;
+ | ^^^^^^^^^
+
+error: casting `isize` to `f32` causes a loss of precision (`isize` is 32 or 64 bits wide, but `f32`'s mantissa is only 23 bits wide)
+ --> $DIR/cast_size.rs:17:5
+ |
+LL | x0 as f32;
+ | ^^^^^^^^^
+
+error: casting `usize` to `f32` causes a loss of precision (`usize` is 32 or 64 bits wide, but `f32`'s mantissa is only 23 bits wide)
+ --> $DIR/cast_size.rs:18:5
+ |
+LL | x1 as f32;
+ | ^^^^^^^^^
+
+error: casting `isize` to `i32` may truncate the value on targets with 64-bit wide pointers
+ --> $DIR/cast_size.rs:19:5
+ |
+LL | 1isize as i32;
+ | ^^^^^^^^^^^^^
+
+error: casting `isize` to `u32` may truncate the value on targets with 64-bit wide pointers
+ --> $DIR/cast_size.rs:20:5
+ |
+LL | 1isize as u32;
+ | ^^^^^^^^^^^^^
+
+error: casting `usize` to `u32` may truncate the value on targets with 64-bit wide pointers
+ --> $DIR/cast_size.rs:21:5
+ |
+LL | 1usize as u32;
+ | ^^^^^^^^^^^^^
+
+error: casting `usize` to `i32` may truncate the value on targets with 64-bit wide pointers
+ --> $DIR/cast_size.rs:22:5
+ |
+LL | 1usize as i32;
+ | ^^^^^^^^^^^^^
+
+error: casting `usize` to `i32` may wrap around the value on targets with 32-bit wide pointers
+ --> $DIR/cast_size.rs:22:5
+ |
+LL | 1usize as i32;
+ | ^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::cast-possible-wrap` implied by `-D warnings`
+
+error: casting `i64` to `isize` may truncate the value on targets with 32-bit wide pointers
+ --> $DIR/cast_size.rs:24:5
+ |
+LL | 1i64 as isize;
+ | ^^^^^^^^^^^^^
+
+error: casting `i64` to `usize` may truncate the value on targets with 32-bit wide pointers
+ --> $DIR/cast_size.rs:25:5
+ |
+LL | 1i64 as usize;
+ | ^^^^^^^^^^^^^
+
+error: casting `u64` to `isize` may truncate the value on targets with 32-bit wide pointers
+ --> $DIR/cast_size.rs:26:5
+ |
+LL | 1u64 as isize;
+ | ^^^^^^^^^^^^^
+
+error: casting `u64` to `isize` may wrap around the value on targets with 64-bit wide pointers
+ --> $DIR/cast_size.rs:26:5
+ |
+LL | 1u64 as isize;
+ | ^^^^^^^^^^^^^
+
+error: casting `u64` to `usize` may truncate the value on targets with 32-bit wide pointers
+ --> $DIR/cast_size.rs:27:5
+ |
+LL | 1u64 as usize;
+ | ^^^^^^^^^^^^^
+
+error: casting `u32` to `isize` may wrap around the value on targets with 32-bit wide pointers
+ --> $DIR/cast_size.rs:28:5
+ |
+LL | 1u32 as isize;
+ | ^^^^^^^^^^^^^
+
+error: casting `i32` to `f32` causes a loss of precision (`i32` is 32 bits wide, but `f32`'s mantissa is only 23 bits wide)
+ --> $DIR/cast_size.rs:33:5
+ |
+LL | 999_999_999 as f32;
+ | ^^^^^^^^^^^^^^^^^^
+
+error: casting `usize` to `f64` causes a loss of precision on targets with 64-bit wide pointers (`usize` is 64 bits wide, but `f64`'s mantissa is only 52 bits wide)
+ --> $DIR/cast_size.rs:34:5
+ |
+LL | 9_999_999_999_999_999usize as f64;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 18 previous errors
+
diff --git a/src/tools/clippy/tests/ui/cast_size_32bit.rs b/src/tools/clippy/tests/ui/cast_size_32bit.rs
new file mode 100644
index 000000000..99aac6dec
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_size_32bit.rs
@@ -0,0 +1,35 @@
+// ignore-64bit
+#[warn(
+ clippy::cast_precision_loss,
+ clippy::cast_possible_truncation,
+ clippy::cast_sign_loss,
+ clippy::cast_possible_wrap,
+ clippy::cast_lossless
+)]
+#[allow(clippy::no_effect, clippy::unnecessary_operation)]
+fn main() {
+ // Casting from *size
+ 1isize as i8;
+ let x0 = 1isize;
+ let x1 = 1usize;
+ x0 as f64;
+ x1 as f64;
+ x0 as f32;
+ x1 as f32;
+ 1isize as i32;
+ 1isize as u32;
+ 1usize as u32;
+ 1usize as i32;
+ // Casting to *size
+ 1i64 as isize;
+ 1i64 as usize;
+ 1u64 as isize;
+ 1u64 as usize;
+ 1u32 as isize;
+ 1u32 as usize; // Should not trigger any lint
+ 1i32 as isize; // Neither should this
+ 1i32 as usize;
+ // Big integer literal to float
+ 999_999_999 as f32;
+ 3_999_999_999usize as f64;
+}
diff --git a/src/tools/clippy/tests/ui/cast_size_32bit.stderr b/src/tools/clippy/tests/ui/cast_size_32bit.stderr
new file mode 100644
index 000000000..8990c3ba7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_size_32bit.stderr
@@ -0,0 +1,118 @@
+error: casting `isize` to `i8` may truncate the value
+ --> $DIR/cast_size_32bit.rs:12:5
+ |
+LL | 1isize as i8;
+ | ^^^^^^^^^^^^
+ |
+ = note: `-D clippy::cast-possible-truncation` implied by `-D warnings`
+
+error: casting `isize` to `f64` causes a loss of precision on targets with 64-bit wide pointers (`isize` is 64 bits wide, but `f64`'s mantissa is only 52 bits wide)
+ --> $DIR/cast_size_32bit.rs:15:5
+ |
+LL | x0 as f64;
+ | ^^^^^^^^^
+ |
+ = note: `-D clippy::cast-precision-loss` implied by `-D warnings`
+
+error: casting `usize` to `f64` causes a loss of precision on targets with 64-bit wide pointers (`usize` is 64 bits wide, but `f64`'s mantissa is only 52 bits wide)
+ --> $DIR/cast_size_32bit.rs:16:5
+ |
+LL | x1 as f64;
+ | ^^^^^^^^^
+
+error: casting `isize` to `f32` causes a loss of precision (`isize` is 32 or 64 bits wide, but `f32`'s mantissa is only 23 bits wide)
+ --> $DIR/cast_size_32bit.rs:17:5
+ |
+LL | x0 as f32;
+ | ^^^^^^^^^
+
+error: casting `usize` to `f32` causes a loss of precision (`usize` is 32 or 64 bits wide, but `f32`'s mantissa is only 23 bits wide)
+ --> $DIR/cast_size_32bit.rs:18:5
+ |
+LL | x1 as f32;
+ | ^^^^^^^^^
+
+error: casting `isize` to `i32` may truncate the value on targets with 64-bit wide pointers
+ --> $DIR/cast_size_32bit.rs:19:5
+ |
+LL | 1isize as i32;
+ | ^^^^^^^^^^^^^
+
+error: casting `isize` to `u32` may truncate the value on targets with 64-bit wide pointers
+ --> $DIR/cast_size_32bit.rs:20:5
+ |
+LL | 1isize as u32;
+ | ^^^^^^^^^^^^^
+
+error: casting `usize` to `u32` may truncate the value on targets with 64-bit wide pointers
+ --> $DIR/cast_size_32bit.rs:21:5
+ |
+LL | 1usize as u32;
+ | ^^^^^^^^^^^^^
+
+error: casting `usize` to `i32` may truncate the value on targets with 64-bit wide pointers
+ --> $DIR/cast_size_32bit.rs:22:5
+ |
+LL | 1usize as i32;
+ | ^^^^^^^^^^^^^
+
+error: casting `usize` to `i32` may wrap around the value on targets with 32-bit wide pointers
+ --> $DIR/cast_size_32bit.rs:22:5
+ |
+LL | 1usize as i32;
+ | ^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::cast-possible-wrap` implied by `-D warnings`
+
+error: casting `i64` to `isize` may truncate the value on targets with 32-bit wide pointers
+ --> $DIR/cast_size_32bit.rs:24:5
+ |
+LL | 1i64 as isize;
+ | ^^^^^^^^^^^^^
+
+error: casting `i64` to `usize` may truncate the value on targets with 32-bit wide pointers
+ --> $DIR/cast_size_32bit.rs:25:5
+ |
+LL | 1i64 as usize;
+ | ^^^^^^^^^^^^^
+
+error: casting `u64` to `isize` may truncate the value on targets with 32-bit wide pointers
+ --> $DIR/cast_size_32bit.rs:26:5
+ |
+LL | 1u64 as isize;
+ | ^^^^^^^^^^^^^
+
+error: casting `u64` to `isize` may wrap around the value on targets with 64-bit wide pointers
+ --> $DIR/cast_size_32bit.rs:26:5
+ |
+LL | 1u64 as isize;
+ | ^^^^^^^^^^^^^
+
+error: casting `u64` to `usize` may truncate the value on targets with 32-bit wide pointers
+ --> $DIR/cast_size_32bit.rs:27:5
+ |
+LL | 1u64 as usize;
+ | ^^^^^^^^^^^^^
+
+error: casting `u32` to `isize` may wrap around the value on targets with 32-bit wide pointers
+ --> $DIR/cast_size_32bit.rs:28:5
+ |
+LL | 1u32 as isize;
+ | ^^^^^^^^^^^^^
+
+error: casting `i32` to `f32` causes a loss of precision (`i32` is 32 bits wide, but `f32`'s mantissa is only 23 bits wide)
+ --> $DIR/cast_size_32bit.rs:33:5
+ |
+LL | 999_999_999 as f32;
+ | ^^^^^^^^^^^^^^^^^^
+
+error: casting integer literal to `f64` is unnecessary
+ --> $DIR/cast_size_32bit.rs:34:5
+ |
+LL | 3_999_999_999usize as f64;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `3_999_999_999_f64`
+ |
+ = note: `-D clippy::unnecessary-cast` implied by `-D warnings`
+
+error: aborting due to 18 previous errors
+
diff --git a/src/tools/clippy/tests/ui/cast_slice_different_sizes.rs b/src/tools/clippy/tests/ui/cast_slice_different_sizes.rs
new file mode 100644
index 000000000..24d7eb28a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_slice_different_sizes.rs
@@ -0,0 +1,82 @@
+#![allow(clippy::let_unit_value)]
+
+fn main() {
+ let x: [i32; 3] = [1_i32, 2, 3];
+ let r_x = &x;
+ // Check casting through multiple bindings
+ // Because it's separate, it does not check the cast back to something of the same size
+ let a = r_x as *const [i32];
+ let b = a as *const [u8];
+ let c = b as *const [u32];
+
+ // loses data
+ let loss = r_x as *const [i32] as *const [u8];
+
+ // Cast back to same size but different type loses no data, just type conversion
+ // This is weird code but there's no reason for this lint specifically to fire *twice* on it
+ let restore = r_x as *const [i32] as *const [u8] as *const [u32];
+
+ // Check casting through blocks is detected
+ let loss_block_1 = { r_x as *const [i32] } as *const [u8];
+ let loss_block_2 = {
+ let _ = ();
+ r_x as *const [i32]
+ } as *const [u8];
+
+ // Check that resores of the same size are detected through blocks
+ let restore_block_1 = { r_x as *const [i32] } as *const [u8] as *const [u32];
+ let restore_block_2 = { ({ r_x as *const [i32] }) as *const [u8] } as *const [u32];
+ let restore_block_3 = {
+ let _ = ();
+ ({
+ let _ = ();
+ r_x as *const [i32]
+ }) as *const [u8]
+ } as *const [u32];
+
+ // Check that the result of a long chain of casts is detected
+ let long_chain_loss = r_x as *const [i32] as *const [u32] as *const [u16] as *const [i8] as *const [u8];
+ let long_chain_restore =
+ r_x as *const [i32] as *const [u32] as *const [u16] as *const [i8] as *const [u8] as *const [u32];
+}
+
+// foo and foo2 should not fire, they're the same size
+fn foo(x: *mut [u8]) -> *mut [u8] {
+ x as *mut [u8]
+}
+
+fn foo2(x: *mut [u8]) -> *mut [u8] {
+ x as *mut _
+}
+
+// Test that casts as part of function returns work
+fn bar(x: *mut [u16]) -> *mut [u8] {
+ x as *mut [u8]
+}
+
+fn uwu(x: *mut [u16]) -> *mut [u8] {
+ x as *mut _
+}
+
+fn bar2(x: *mut [u16]) -> *mut [u8] {
+ x as _
+}
+
+// constify
+fn bar3(x: *mut [u16]) -> *const [u8] {
+ x as _
+}
+
+// unconstify
+fn bar4(x: *const [u16]) -> *mut [u8] {
+ x as _
+}
+
+// function returns plus blocks
+fn blocks(x: *mut [u16]) -> *mut [u8] {
+ ({ x }) as _
+}
+
+fn more_blocks(x: *mut [u16]) -> *mut [u8] {
+ { ({ x }) as _ }
+}
diff --git a/src/tools/clippy/tests/ui/cast_slice_different_sizes.stderr b/src/tools/clippy/tests/ui/cast_slice_different_sizes.stderr
new file mode 100644
index 000000000..40721dcd0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cast_slice_different_sizes.stderr
@@ -0,0 +1,121 @@
+error: casting between raw pointers to `[i32]` (element size 4) and `[u8]` (element size 1) does not adjust the count
+ --> $DIR/cast_slice_different_sizes.rs:9:13
+ |
+LL | let b = a as *const [u8];
+ | ^^^^^^^^^^^^^^^^ help: replace with `ptr::slice_from_raw_parts`: `core::ptr::slice_from_raw_parts(a as *const u8, ..)`
+ |
+ = note: `#[deny(clippy::cast_slice_different_sizes)]` on by default
+
+error: casting between raw pointers to `[u8]` (element size 1) and `[u32]` (element size 4) does not adjust the count
+ --> $DIR/cast_slice_different_sizes.rs:10:13
+ |
+LL | let c = b as *const [u32];
+ | ^^^^^^^^^^^^^^^^^ help: replace with `ptr::slice_from_raw_parts`: `core::ptr::slice_from_raw_parts(b as *const u32, ..)`
+
+error: casting between raw pointers to `[i32]` (element size 4) and `[u8]` (element size 1) does not adjust the count
+ --> $DIR/cast_slice_different_sizes.rs:13:16
+ |
+LL | let loss = r_x as *const [i32] as *const [u8];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with `ptr::slice_from_raw_parts`: `core::ptr::slice_from_raw_parts(r_x as *const [i32] as *const u8, ..)`
+
+error: casting between raw pointers to `[i32]` (element size 4) and `[u8]` (element size 1) does not adjust the count
+ --> $DIR/cast_slice_different_sizes.rs:20:24
+ |
+LL | let loss_block_1 = { r_x as *const [i32] } as *const [u8];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with `ptr::slice_from_raw_parts`: `core::ptr::slice_from_raw_parts({ r_x as *const [i32] } as *const u8, ..)`
+
+error: casting between raw pointers to `[i32]` (element size 4) and `[u8]` (element size 1) does not adjust the count
+ --> $DIR/cast_slice_different_sizes.rs:21:24
+ |
+LL | let loss_block_2 = {
+ | ________________________^
+LL | | let _ = ();
+LL | | r_x as *const [i32]
+LL | | } as *const [u8];
+ | |____________________^
+ |
+help: replace with `ptr::slice_from_raw_parts`
+ |
+LL ~ let loss_block_2 = core::ptr::slice_from_raw_parts({
+LL + let _ = ();
+LL + r_x as *const [i32]
+LL ~ } as *const u8, ..);
+ |
+
+error: casting between raw pointers to `[i32]` (element size 4) and `[u8]` (element size 1) does not adjust the count
+ --> $DIR/cast_slice_different_sizes.rs:38:27
+ |
+LL | let long_chain_loss = r_x as *const [i32] as *const [u32] as *const [u16] as *const [i8] as *const [u8];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with `ptr::slice_from_raw_parts`: `core::ptr::slice_from_raw_parts(r_x as *const [i32] as *const u8, ..)`
+
+error: casting between raw pointers to `[u16]` (element size 2) and `[u8]` (element size 1) does not adjust the count
+ --> $DIR/cast_slice_different_sizes.rs:53:36
+ |
+LL | fn bar(x: *mut [u16]) -> *mut [u8] {
+ | ____________________________________^
+LL | | x as *mut [u8]
+LL | | }
+ | |_^ help: replace with `ptr::slice_from_raw_parts_mut`: `core::ptr::slice_from_raw_parts_mut(x as *mut u8, ..)`
+
+error: casting between raw pointers to `[u16]` (element size 2) and `[u8]` (element size 1) does not adjust the count
+ --> $DIR/cast_slice_different_sizes.rs:57:36
+ |
+LL | fn uwu(x: *mut [u16]) -> *mut [u8] {
+ | ____________________________________^
+LL | | x as *mut _
+LL | | }
+ | |_^ help: replace with `ptr::slice_from_raw_parts_mut`: `core::ptr::slice_from_raw_parts_mut(x as *mut u8, ..)`
+
+error: casting between raw pointers to `[u16]` (element size 2) and `[u8]` (element size 1) does not adjust the count
+ --> $DIR/cast_slice_different_sizes.rs:61:37
+ |
+LL | fn bar2(x: *mut [u16]) -> *mut [u8] {
+ | _____________________________________^
+LL | | x as _
+LL | | }
+ | |_^ help: replace with `ptr::slice_from_raw_parts_mut`: `core::ptr::slice_from_raw_parts_mut(x as *mut u8, ..)`
+
+error: casting between raw pointers to `[u16]` (element size 2) and `[u8]` (element size 1) does not adjust the count
+ --> $DIR/cast_slice_different_sizes.rs:66:39
+ |
+LL | fn bar3(x: *mut [u16]) -> *const [u8] {
+ | _______________________________________^
+LL | | x as _
+LL | | }
+ | |_^ help: replace with `ptr::slice_from_raw_parts`: `core::ptr::slice_from_raw_parts(x as *const u8, ..)`
+
+error: casting between raw pointers to `[u16]` (element size 2) and `[u8]` (element size 1) does not adjust the count
+ --> $DIR/cast_slice_different_sizes.rs:71:39
+ |
+LL | fn bar4(x: *const [u16]) -> *mut [u8] {
+ | _______________________________________^
+LL | | x as _
+LL | | }
+ | |_^ help: replace with `ptr::slice_from_raw_parts_mut`: `core::ptr::slice_from_raw_parts_mut(x as *mut u8, ..)`
+
+error: casting between raw pointers to `[u16]` (element size 2) and `[u8]` (element size 1) does not adjust the count
+ --> $DIR/cast_slice_different_sizes.rs:76:39
+ |
+LL | fn blocks(x: *mut [u16]) -> *mut [u8] {
+ | _______________________________________^
+LL | | ({ x }) as _
+LL | | }
+ | |_^ help: replace with `ptr::slice_from_raw_parts_mut`: `core::ptr::slice_from_raw_parts_mut(({ x }) as *mut u8, ..)`
+
+error: casting between raw pointers to `[u16]` (element size 2) and `[u8]` (element size 1) does not adjust the count
+ --> $DIR/cast_slice_different_sizes.rs:80:44
+ |
+LL | fn more_blocks(x: *mut [u16]) -> *mut [u8] {
+ | ____________________________________________^
+LL | | { ({ x }) as _ }
+LL | | }
+ | |_^ help: replace with `ptr::slice_from_raw_parts_mut`: `core::ptr::slice_from_raw_parts_mut(({ x }) as *mut u8, ..)`
+
+error: casting between raw pointers to `[u16]` (element size 2) and `[u8]` (element size 1) does not adjust the count
+ --> $DIR/cast_slice_different_sizes.rs:81:5
+ |
+LL | { ({ x }) as _ }
+ | ^^^^^^^^^^^^^^^^ help: replace with `ptr::slice_from_raw_parts_mut`: `core::ptr::slice_from_raw_parts_mut(({ x }) as *mut u8, ..)`
+
+error: aborting due to 14 previous errors
+
diff --git a/src/tools/clippy/tests/ui/cfg_attr_rustfmt.fixed b/src/tools/clippy/tests/ui/cfg_attr_rustfmt.fixed
new file mode 100644
index 000000000..061a4ab9b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cfg_attr_rustfmt.fixed
@@ -0,0 +1,31 @@
+// run-rustfix
+#![feature(stmt_expr_attributes)]
+
+#![allow(unused, clippy::no_effect, clippy::unnecessary_operation)]
+#![warn(clippy::deprecated_cfg_attr)]
+
+// This doesn't get linted, see known problems
+#![cfg_attr(rustfmt, rustfmt_skip)]
+
+#[rustfmt::skip]
+trait Foo
+{
+fn foo(
+);
+}
+
+fn skip_on_statements() {
+ #[rustfmt::skip]
+ 5+3;
+}
+
+#[rustfmt::skip]
+fn main() {
+ foo::f();
+}
+
+mod foo {
+ #![cfg_attr(rustfmt, rustfmt_skip)]
+
+ pub fn f() {}
+}
diff --git a/src/tools/clippy/tests/ui/cfg_attr_rustfmt.rs b/src/tools/clippy/tests/ui/cfg_attr_rustfmt.rs
new file mode 100644
index 000000000..035169fab
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cfg_attr_rustfmt.rs
@@ -0,0 +1,31 @@
+// run-rustfix
+#![feature(stmt_expr_attributes)]
+
+#![allow(unused, clippy::no_effect, clippy::unnecessary_operation)]
+#![warn(clippy::deprecated_cfg_attr)]
+
+// This doesn't get linted, see known problems
+#![cfg_attr(rustfmt, rustfmt_skip)]
+
+#[rustfmt::skip]
+trait Foo
+{
+fn foo(
+);
+}
+
+fn skip_on_statements() {
+ #[cfg_attr(rustfmt, rustfmt::skip)]
+ 5+3;
+}
+
+#[cfg_attr(rustfmt, rustfmt_skip)]
+fn main() {
+ foo::f();
+}
+
+mod foo {
+ #![cfg_attr(rustfmt, rustfmt_skip)]
+
+ pub fn f() {}
+}
diff --git a/src/tools/clippy/tests/ui/cfg_attr_rustfmt.stderr b/src/tools/clippy/tests/ui/cfg_attr_rustfmt.stderr
new file mode 100644
index 000000000..c1efd47db
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cfg_attr_rustfmt.stderr
@@ -0,0 +1,16 @@
+error: `cfg_attr` is deprecated for rustfmt and got replaced by tool attributes
+ --> $DIR/cfg_attr_rustfmt.rs:18:5
+ |
+LL | #[cfg_attr(rustfmt, rustfmt::skip)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `#[rustfmt::skip]`
+ |
+ = note: `-D clippy::deprecated-cfg-attr` implied by `-D warnings`
+
+error: `cfg_attr` is deprecated for rustfmt and got replaced by tool attributes
+ --> $DIR/cfg_attr_rustfmt.rs:22:1
+ |
+LL | #[cfg_attr(rustfmt, rustfmt_skip)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `#[rustfmt::skip]`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/char_lit_as_u8.rs b/src/tools/clippy/tests/ui/char_lit_as_u8.rs
new file mode 100644
index 000000000..0a53a3d64
--- /dev/null
+++ b/src/tools/clippy/tests/ui/char_lit_as_u8.rs
@@ -0,0 +1,5 @@
+#![warn(clippy::char_lit_as_u8)]
+
+fn main() {
+ let _ = '❤' as u8; // no suggestion, since a byte literal won't work.
+}
diff --git a/src/tools/clippy/tests/ui/char_lit_as_u8.stderr b/src/tools/clippy/tests/ui/char_lit_as_u8.stderr
new file mode 100644
index 000000000..b9836d2f2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/char_lit_as_u8.stderr
@@ -0,0 +1,11 @@
+error: casting a character literal to `u8` truncates
+ --> $DIR/char_lit_as_u8.rs:4:13
+ |
+LL | let _ = '❤' as u8; // no suggestion, since a byte literal won't work.
+ | ^^^^^^^^^
+ |
+ = note: `-D clippy::char-lit-as-u8` implied by `-D warnings`
+ = note: `char` is four bytes wide, but `u8` is a single byte
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/char_lit_as_u8_suggestions.fixed b/src/tools/clippy/tests/ui/char_lit_as_u8_suggestions.fixed
new file mode 100644
index 000000000..3dc3cb4e7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/char_lit_as_u8_suggestions.fixed
@@ -0,0 +1,10 @@
+// run-rustfix
+
+#![warn(clippy::char_lit_as_u8)]
+
+fn main() {
+ let _ = b'a';
+ let _ = b'\n';
+ let _ = b'\0';
+ let _ = b'\x01';
+}
diff --git a/src/tools/clippy/tests/ui/char_lit_as_u8_suggestions.rs b/src/tools/clippy/tests/ui/char_lit_as_u8_suggestions.rs
new file mode 100644
index 000000000..d379a0234
--- /dev/null
+++ b/src/tools/clippy/tests/ui/char_lit_as_u8_suggestions.rs
@@ -0,0 +1,10 @@
+// run-rustfix
+
+#![warn(clippy::char_lit_as_u8)]
+
+fn main() {
+ let _ = 'a' as u8;
+ let _ = '\n' as u8;
+ let _ = '\0' as u8;
+ let _ = '\x01' as u8;
+}
diff --git a/src/tools/clippy/tests/ui/char_lit_as_u8_suggestions.stderr b/src/tools/clippy/tests/ui/char_lit_as_u8_suggestions.stderr
new file mode 100644
index 000000000..bf7cb1607
--- /dev/null
+++ b/src/tools/clippy/tests/ui/char_lit_as_u8_suggestions.stderr
@@ -0,0 +1,35 @@
+error: casting a character literal to `u8` truncates
+ --> $DIR/char_lit_as_u8_suggestions.rs:6:13
+ |
+LL | let _ = 'a' as u8;
+ | ^^^^^^^^^ help: use a byte literal instead: `b'a'`
+ |
+ = note: `-D clippy::char-lit-as-u8` implied by `-D warnings`
+ = note: `char` is four bytes wide, but `u8` is a single byte
+
+error: casting a character literal to `u8` truncates
+ --> $DIR/char_lit_as_u8_suggestions.rs:7:13
+ |
+LL | let _ = '/n' as u8;
+ | ^^^^^^^^^^ help: use a byte literal instead: `b'/n'`
+ |
+ = note: `char` is four bytes wide, but `u8` is a single byte
+
+error: casting a character literal to `u8` truncates
+ --> $DIR/char_lit_as_u8_suggestions.rs:8:13
+ |
+LL | let _ = '/0' as u8;
+ | ^^^^^^^^^^ help: use a byte literal instead: `b'/0'`
+ |
+ = note: `char` is four bytes wide, but `u8` is a single byte
+
+error: casting a character literal to `u8` truncates
+ --> $DIR/char_lit_as_u8_suggestions.rs:9:13
+ |
+LL | let _ = '/x01' as u8;
+ | ^^^^^^^^^^^^ help: use a byte literal instead: `b'/x01'`
+ |
+ = note: `char` is four bytes wide, but `u8` is a single byte
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/checked_conversions.fixed b/src/tools/clippy/tests/ui/checked_conversions.fixed
new file mode 100644
index 000000000..cb7100bc9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/checked_conversions.fixed
@@ -0,0 +1,79 @@
+// run-rustfix
+
+#![allow(
+ clippy::cast_lossless,
+ // Int::max_value will be deprecated in the future
+ deprecated,
+)]
+#![warn(clippy::checked_conversions)]
+
+// Positive tests
+
+// Signed to unsigned
+
+pub fn i64_to_u32(value: i64) {
+ let _ = u32::try_from(value).is_ok();
+ let _ = u32::try_from(value).is_ok();
+}
+
+pub fn i64_to_u16(value: i64) {
+ let _ = u16::try_from(value).is_ok();
+ let _ = u16::try_from(value).is_ok();
+}
+
+pub fn isize_to_u8(value: isize) {
+ let _ = u8::try_from(value).is_ok();
+ let _ = u8::try_from(value).is_ok();
+}
+
+// Signed to signed
+
+pub fn i64_to_i32(value: i64) {
+ let _ = i32::try_from(value).is_ok();
+ let _ = i32::try_from(value).is_ok();
+}
+
+pub fn i64_to_i16(value: i64) {
+ let _ = i16::try_from(value).is_ok();
+ let _ = i16::try_from(value).is_ok();
+}
+
+// Unsigned to X
+
+pub fn u32_to_i32(value: u32) {
+ let _ = i32::try_from(value).is_ok();
+ let _ = i32::try_from(value).is_ok();
+}
+
+pub fn usize_to_isize(value: usize) {
+ let _ = isize::try_from(value).is_ok() && value as i32 == 5;
+ let _ = isize::try_from(value).is_ok() && value as i32 == 5;
+}
+
+pub fn u32_to_u16(value: u32) {
+ let _ = u16::try_from(value).is_ok() && value as i32 == 5;
+ let _ = u16::try_from(value).is_ok() && value as i32 == 5;
+}
+
+// Negative tests
+
+pub fn no_i64_to_i32(value: i64) {
+ let _ = value <= (i32::max_value() as i64) && value >= 0;
+ let _ = value <= (i32::MAX as i64) && value >= 0;
+}
+
+pub fn no_isize_to_u8(value: isize) {
+ let _ = value <= (u8::max_value() as isize) && value >= (u8::min_value() as isize);
+ let _ = value <= (u8::MAX as isize) && value >= (u8::MIN as isize);
+}
+
+pub fn i8_to_u8(value: i8) {
+ let _ = value >= 0;
+}
+
+// Do not lint
+pub const fn issue_8898(i: u32) -> bool {
+ i <= i32::MAX as u32
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/checked_conversions.rs b/src/tools/clippy/tests/ui/checked_conversions.rs
new file mode 100644
index 000000000..ed4e06923
--- /dev/null
+++ b/src/tools/clippy/tests/ui/checked_conversions.rs
@@ -0,0 +1,79 @@
+// run-rustfix
+
+#![allow(
+ clippy::cast_lossless,
+ // Int::max_value will be deprecated in the future
+ deprecated,
+)]
+#![warn(clippy::checked_conversions)]
+
+// Positive tests
+
+// Signed to unsigned
+
+pub fn i64_to_u32(value: i64) {
+ let _ = value <= (u32::max_value() as i64) && value >= 0;
+ let _ = value <= (u32::MAX as i64) && value >= 0;
+}
+
+pub fn i64_to_u16(value: i64) {
+ let _ = value <= i64::from(u16::max_value()) && value >= 0;
+ let _ = value <= i64::from(u16::MAX) && value >= 0;
+}
+
+pub fn isize_to_u8(value: isize) {
+ let _ = value <= (u8::max_value() as isize) && value >= 0;
+ let _ = value <= (u8::MAX as isize) && value >= 0;
+}
+
+// Signed to signed
+
+pub fn i64_to_i32(value: i64) {
+ let _ = value <= (i32::max_value() as i64) && value >= (i32::min_value() as i64);
+ let _ = value <= (i32::MAX as i64) && value >= (i32::MIN as i64);
+}
+
+pub fn i64_to_i16(value: i64) {
+ let _ = value <= i64::from(i16::max_value()) && value >= i64::from(i16::min_value());
+ let _ = value <= i64::from(i16::MAX) && value >= i64::from(i16::MIN);
+}
+
+// Unsigned to X
+
+pub fn u32_to_i32(value: u32) {
+ let _ = value <= i32::max_value() as u32;
+ let _ = value <= i32::MAX as u32;
+}
+
+pub fn usize_to_isize(value: usize) {
+ let _ = value <= isize::max_value() as usize && value as i32 == 5;
+ let _ = value <= isize::MAX as usize && value as i32 == 5;
+}
+
+pub fn u32_to_u16(value: u32) {
+ let _ = value <= u16::max_value() as u32 && value as i32 == 5;
+ let _ = value <= u16::MAX as u32 && value as i32 == 5;
+}
+
+// Negative tests
+
+pub fn no_i64_to_i32(value: i64) {
+ let _ = value <= (i32::max_value() as i64) && value >= 0;
+ let _ = value <= (i32::MAX as i64) && value >= 0;
+}
+
+pub fn no_isize_to_u8(value: isize) {
+ let _ = value <= (u8::max_value() as isize) && value >= (u8::min_value() as isize);
+ let _ = value <= (u8::MAX as isize) && value >= (u8::MIN as isize);
+}
+
+pub fn i8_to_u8(value: i8) {
+ let _ = value >= 0;
+}
+
+// Do not lint
+pub const fn issue_8898(i: u32) -> bool {
+ i <= i32::MAX as u32
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/checked_conversions.stderr b/src/tools/clippy/tests/ui/checked_conversions.stderr
new file mode 100644
index 000000000..2e5180405
--- /dev/null
+++ b/src/tools/clippy/tests/ui/checked_conversions.stderr
@@ -0,0 +1,100 @@
+error: checked cast can be simplified
+ --> $DIR/checked_conversions.rs:15:13
+ |
+LL | let _ = value <= (u32::max_value() as i64) && value >= 0;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `u32::try_from(value).is_ok()`
+ |
+ = note: `-D clippy::checked-conversions` implied by `-D warnings`
+
+error: checked cast can be simplified
+ --> $DIR/checked_conversions.rs:16:13
+ |
+LL | let _ = value <= (u32::MAX as i64) && value >= 0;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `u32::try_from(value).is_ok()`
+
+error: checked cast can be simplified
+ --> $DIR/checked_conversions.rs:20:13
+ |
+LL | let _ = value <= i64::from(u16::max_value()) && value >= 0;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `u16::try_from(value).is_ok()`
+
+error: checked cast can be simplified
+ --> $DIR/checked_conversions.rs:21:13
+ |
+LL | let _ = value <= i64::from(u16::MAX) && value >= 0;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `u16::try_from(value).is_ok()`
+
+error: checked cast can be simplified
+ --> $DIR/checked_conversions.rs:25:13
+ |
+LL | let _ = value <= (u8::max_value() as isize) && value >= 0;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `u8::try_from(value).is_ok()`
+
+error: checked cast can be simplified
+ --> $DIR/checked_conversions.rs:26:13
+ |
+LL | let _ = value <= (u8::MAX as isize) && value >= 0;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `u8::try_from(value).is_ok()`
+
+error: checked cast can be simplified
+ --> $DIR/checked_conversions.rs:32:13
+ |
+LL | let _ = value <= (i32::max_value() as i64) && value >= (i32::min_value() as i64);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `i32::try_from(value).is_ok()`
+
+error: checked cast can be simplified
+ --> $DIR/checked_conversions.rs:33:13
+ |
+LL | let _ = value <= (i32::MAX as i64) && value >= (i32::MIN as i64);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `i32::try_from(value).is_ok()`
+
+error: checked cast can be simplified
+ --> $DIR/checked_conversions.rs:37:13
+ |
+LL | let _ = value <= i64::from(i16::max_value()) && value >= i64::from(i16::min_value());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `i16::try_from(value).is_ok()`
+
+error: checked cast can be simplified
+ --> $DIR/checked_conversions.rs:38:13
+ |
+LL | let _ = value <= i64::from(i16::MAX) && value >= i64::from(i16::MIN);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `i16::try_from(value).is_ok()`
+
+error: checked cast can be simplified
+ --> $DIR/checked_conversions.rs:44:13
+ |
+LL | let _ = value <= i32::max_value() as u32;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `i32::try_from(value).is_ok()`
+
+error: checked cast can be simplified
+ --> $DIR/checked_conversions.rs:45:13
+ |
+LL | let _ = value <= i32::MAX as u32;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `i32::try_from(value).is_ok()`
+
+error: checked cast can be simplified
+ --> $DIR/checked_conversions.rs:49:13
+ |
+LL | let _ = value <= isize::max_value() as usize && value as i32 == 5;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `isize::try_from(value).is_ok()`
+
+error: checked cast can be simplified
+ --> $DIR/checked_conversions.rs:50:13
+ |
+LL | let _ = value <= isize::MAX as usize && value as i32 == 5;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `isize::try_from(value).is_ok()`
+
+error: checked cast can be simplified
+ --> $DIR/checked_conversions.rs:54:13
+ |
+LL | let _ = value <= u16::max_value() as u32 && value as i32 == 5;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `u16::try_from(value).is_ok()`
+
+error: checked cast can be simplified
+ --> $DIR/checked_conversions.rs:55:13
+ |
+LL | let _ = value <= u16::MAX as u32 && value as i32 == 5;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `u16::try_from(value).is_ok()`
+
+error: aborting due to 16 previous errors
+
diff --git a/src/tools/clippy/tests/ui/checked_unwrap/complex_conditionals.rs b/src/tools/clippy/tests/ui/checked_unwrap/complex_conditionals.rs
new file mode 100644
index 000000000..ec082c73b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/checked_unwrap/complex_conditionals.rs
@@ -0,0 +1,54 @@
+#![deny(clippy::panicking_unwrap, clippy::unnecessary_unwrap)]
+#![allow(clippy::if_same_then_else, clippy::branches_sharing_code)]
+
+fn test_complex_conditions() {
+ let x: Result<(), ()> = Ok(());
+ let y: Result<(), ()> = Ok(());
+ if x.is_ok() && y.is_err() {
+ x.unwrap(); // unnecessary
+ x.unwrap_err(); // will panic
+ y.unwrap(); // will panic
+ y.unwrap_err(); // unnecessary
+ } else {
+ // not statically determinable whether any of the following will always succeed or always fail:
+ x.unwrap();
+ x.unwrap_err();
+ y.unwrap();
+ y.unwrap_err();
+ }
+
+ if x.is_ok() || y.is_ok() {
+ // not statically determinable whether any of the following will always succeed or always fail:
+ x.unwrap();
+ y.unwrap();
+ } else {
+ x.unwrap(); // will panic
+ x.unwrap_err(); // unnecessary
+ y.unwrap(); // will panic
+ y.unwrap_err(); // unnecessary
+ }
+ let z: Result<(), ()> = Ok(());
+ if x.is_ok() && !(y.is_ok() || z.is_err()) {
+ x.unwrap(); // unnecessary
+ x.unwrap_err(); // will panic
+ y.unwrap(); // will panic
+ y.unwrap_err(); // unnecessary
+ z.unwrap(); // unnecessary
+ z.unwrap_err(); // will panic
+ }
+ if x.is_ok() || !(y.is_ok() && z.is_err()) {
+ // not statically determinable whether any of the following will always succeed or always fail:
+ x.unwrap();
+ y.unwrap();
+ z.unwrap();
+ } else {
+ x.unwrap(); // will panic
+ x.unwrap_err(); // unnecessary
+ y.unwrap(); // unnecessary
+ y.unwrap_err(); // will panic
+ z.unwrap(); // will panic
+ z.unwrap_err(); // unnecessary
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/checked_unwrap/complex_conditionals.stderr b/src/tools/clippy/tests/ui/checked_unwrap/complex_conditionals.stderr
new file mode 100644
index 000000000..46c6f6970
--- /dev/null
+++ b/src/tools/clippy/tests/ui/checked_unwrap/complex_conditionals.stderr
@@ -0,0 +1,211 @@
+error: called `unwrap` on `x` after checking its variant with `is_ok`
+ --> $DIR/complex_conditionals.rs:8:9
+ |
+LL | if x.is_ok() && y.is_err() {
+ | --------- the check is happening here
+LL | x.unwrap(); // unnecessary
+ | ^^^^^^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/complex_conditionals.rs:1:35
+ |
+LL | #![deny(clippy::panicking_unwrap, clippy::unnecessary_unwrap)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = help: try using `if let` or `match`
+
+error: this call to `unwrap_err()` will always panic
+ --> $DIR/complex_conditionals.rs:9:9
+ |
+LL | if x.is_ok() && y.is_err() {
+ | --------- because of this check
+LL | x.unwrap(); // unnecessary
+LL | x.unwrap_err(); // will panic
+ | ^^^^^^^^^^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/complex_conditionals.rs:1:9
+ |
+LL | #![deny(clippy::panicking_unwrap, clippy::unnecessary_unwrap)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this call to `unwrap()` will always panic
+ --> $DIR/complex_conditionals.rs:10:9
+ |
+LL | if x.is_ok() && y.is_err() {
+ | ---------- because of this check
+...
+LL | y.unwrap(); // will panic
+ | ^^^^^^^^^^
+
+error: called `unwrap_err` on `y` after checking its variant with `is_err`
+ --> $DIR/complex_conditionals.rs:11:9
+ |
+LL | if x.is_ok() && y.is_err() {
+ | ---------- the check is happening here
+...
+LL | y.unwrap_err(); // unnecessary
+ | ^^^^^^^^^^^^^^
+ |
+ = help: try using `if let` or `match`
+
+error: this call to `unwrap()` will always panic
+ --> $DIR/complex_conditionals.rs:25:9
+ |
+LL | if x.is_ok() || y.is_ok() {
+ | --------- because of this check
+...
+LL | x.unwrap(); // will panic
+ | ^^^^^^^^^^
+
+error: called `unwrap_err` on `x` after checking its variant with `is_ok`
+ --> $DIR/complex_conditionals.rs:26:9
+ |
+LL | if x.is_ok() || y.is_ok() {
+ | --------- the check is happening here
+...
+LL | x.unwrap_err(); // unnecessary
+ | ^^^^^^^^^^^^^^
+ |
+ = help: try using `if let` or `match`
+
+error: this call to `unwrap()` will always panic
+ --> $DIR/complex_conditionals.rs:27:9
+ |
+LL | if x.is_ok() || y.is_ok() {
+ | --------- because of this check
+...
+LL | y.unwrap(); // will panic
+ | ^^^^^^^^^^
+
+error: called `unwrap_err` on `y` after checking its variant with `is_ok`
+ --> $DIR/complex_conditionals.rs:28:9
+ |
+LL | if x.is_ok() || y.is_ok() {
+ | --------- the check is happening here
+...
+LL | y.unwrap_err(); // unnecessary
+ | ^^^^^^^^^^^^^^
+ |
+ = help: try using `if let` or `match`
+
+error: called `unwrap` on `x` after checking its variant with `is_ok`
+ --> $DIR/complex_conditionals.rs:32:9
+ |
+LL | if x.is_ok() && !(y.is_ok() || z.is_err()) {
+ | --------- the check is happening here
+LL | x.unwrap(); // unnecessary
+ | ^^^^^^^^^^
+ |
+ = help: try using `if let` or `match`
+
+error: this call to `unwrap_err()` will always panic
+ --> $DIR/complex_conditionals.rs:33:9
+ |
+LL | if x.is_ok() && !(y.is_ok() || z.is_err()) {
+ | --------- because of this check
+LL | x.unwrap(); // unnecessary
+LL | x.unwrap_err(); // will panic
+ | ^^^^^^^^^^^^^^
+
+error: this call to `unwrap()` will always panic
+ --> $DIR/complex_conditionals.rs:34:9
+ |
+LL | if x.is_ok() && !(y.is_ok() || z.is_err()) {
+ | --------- because of this check
+...
+LL | y.unwrap(); // will panic
+ | ^^^^^^^^^^
+
+error: called `unwrap_err` on `y` after checking its variant with `is_ok`
+ --> $DIR/complex_conditionals.rs:35:9
+ |
+LL | if x.is_ok() && !(y.is_ok() || z.is_err()) {
+ | --------- the check is happening here
+...
+LL | y.unwrap_err(); // unnecessary
+ | ^^^^^^^^^^^^^^
+ |
+ = help: try using `if let` or `match`
+
+error: called `unwrap` on `z` after checking its variant with `is_err`
+ --> $DIR/complex_conditionals.rs:36:9
+ |
+LL | if x.is_ok() && !(y.is_ok() || z.is_err()) {
+ | ---------- the check is happening here
+...
+LL | z.unwrap(); // unnecessary
+ | ^^^^^^^^^^
+ |
+ = help: try using `if let` or `match`
+
+error: this call to `unwrap_err()` will always panic
+ --> $DIR/complex_conditionals.rs:37:9
+ |
+LL | if x.is_ok() && !(y.is_ok() || z.is_err()) {
+ | ---------- because of this check
+...
+LL | z.unwrap_err(); // will panic
+ | ^^^^^^^^^^^^^^
+
+error: this call to `unwrap()` will always panic
+ --> $DIR/complex_conditionals.rs:45:9
+ |
+LL | if x.is_ok() || !(y.is_ok() && z.is_err()) {
+ | --------- because of this check
+...
+LL | x.unwrap(); // will panic
+ | ^^^^^^^^^^
+
+error: called `unwrap_err` on `x` after checking its variant with `is_ok`
+ --> $DIR/complex_conditionals.rs:46:9
+ |
+LL | if x.is_ok() || !(y.is_ok() && z.is_err()) {
+ | --------- the check is happening here
+...
+LL | x.unwrap_err(); // unnecessary
+ | ^^^^^^^^^^^^^^
+ |
+ = help: try using `if let` or `match`
+
+error: called `unwrap` on `y` after checking its variant with `is_ok`
+ --> $DIR/complex_conditionals.rs:47:9
+ |
+LL | if x.is_ok() || !(y.is_ok() && z.is_err()) {
+ | --------- the check is happening here
+...
+LL | y.unwrap(); // unnecessary
+ | ^^^^^^^^^^
+ |
+ = help: try using `if let` or `match`
+
+error: this call to `unwrap_err()` will always panic
+ --> $DIR/complex_conditionals.rs:48:9
+ |
+LL | if x.is_ok() || !(y.is_ok() && z.is_err()) {
+ | --------- because of this check
+...
+LL | y.unwrap_err(); // will panic
+ | ^^^^^^^^^^^^^^
+
+error: this call to `unwrap()` will always panic
+ --> $DIR/complex_conditionals.rs:49:9
+ |
+LL | if x.is_ok() || !(y.is_ok() && z.is_err()) {
+ | ---------- because of this check
+...
+LL | z.unwrap(); // will panic
+ | ^^^^^^^^^^
+
+error: called `unwrap_err` on `z` after checking its variant with `is_err`
+ --> $DIR/complex_conditionals.rs:50:9
+ |
+LL | if x.is_ok() || !(y.is_ok() && z.is_err()) {
+ | ---------- the check is happening here
+...
+LL | z.unwrap_err(); // unnecessary
+ | ^^^^^^^^^^^^^^
+ |
+ = help: try using `if let` or `match`
+
+error: aborting due to 20 previous errors
+
diff --git a/src/tools/clippy/tests/ui/checked_unwrap/complex_conditionals_nested.rs b/src/tools/clippy/tests/ui/checked_unwrap/complex_conditionals_nested.rs
new file mode 100644
index 000000000..043ea4148
--- /dev/null
+++ b/src/tools/clippy/tests/ui/checked_unwrap/complex_conditionals_nested.rs
@@ -0,0 +1,15 @@
+#![deny(clippy::panicking_unwrap, clippy::unnecessary_unwrap)]
+#![allow(clippy::if_same_then_else, clippy::branches_sharing_code)]
+
+fn test_nested() {
+ fn nested() {
+ let x = Some(());
+ if x.is_some() {
+ x.unwrap(); // unnecessary
+ } else {
+ x.unwrap(); // will panic
+ }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/checked_unwrap/complex_conditionals_nested.stderr b/src/tools/clippy/tests/ui/checked_unwrap/complex_conditionals_nested.stderr
new file mode 100644
index 000000000..542ab5330
--- /dev/null
+++ b/src/tools/clippy/tests/ui/checked_unwrap/complex_conditionals_nested.stderr
@@ -0,0 +1,31 @@
+error: called `unwrap` on `x` after checking its variant with `is_some`
+ --> $DIR/complex_conditionals_nested.rs:8:13
+ |
+LL | if x.is_some() {
+ | -------------- help: try: `if let Some(..) = x`
+LL | x.unwrap(); // unnecessary
+ | ^^^^^^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/complex_conditionals_nested.rs:1:35
+ |
+LL | #![deny(clippy::panicking_unwrap, clippy::unnecessary_unwrap)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this call to `unwrap()` will always panic
+ --> $DIR/complex_conditionals_nested.rs:10:13
+ |
+LL | if x.is_some() {
+ | ----------- because of this check
+...
+LL | x.unwrap(); // will panic
+ | ^^^^^^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/complex_conditionals_nested.rs:1:9
+ |
+LL | #![deny(clippy::panicking_unwrap, clippy::unnecessary_unwrap)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/checked_unwrap/simple_conditionals.rs b/src/tools/clippy/tests/ui/checked_unwrap/simple_conditionals.rs
new file mode 100644
index 000000000..82dce8197
--- /dev/null
+++ b/src/tools/clippy/tests/ui/checked_unwrap/simple_conditionals.rs
@@ -0,0 +1,102 @@
+#![feature(lint_reasons)]
+#![deny(clippy::panicking_unwrap, clippy::unnecessary_unwrap)]
+#![allow(clippy::if_same_then_else, clippy::branches_sharing_code)]
+
+macro_rules! m {
+ ($a:expr) => {
+ if $a.is_some() {
+ $a.unwrap(); // unnecessary
+ }
+ };
+}
+
+macro_rules! checks_in_param {
+ ($a:expr, $b:expr) => {
+ if $a {
+ $b;
+ }
+ };
+}
+
+macro_rules! checks_unwrap {
+ ($a:expr, $b:expr) => {
+ if $a.is_some() {
+ $b;
+ }
+ };
+}
+
+macro_rules! checks_some {
+ ($a:expr, $b:expr) => {
+ if $a {
+ $b.unwrap();
+ }
+ };
+}
+
+fn main() {
+ let x = Some(());
+ if x.is_some() {
+ x.unwrap(); // unnecessary
+ x.expect("an error message"); // unnecessary
+ } else {
+ x.unwrap(); // will panic
+ x.expect("an error message"); // will panic
+ }
+ if x.is_none() {
+ x.unwrap(); // will panic
+ } else {
+ x.unwrap(); // unnecessary
+ }
+ m!(x);
+ checks_in_param!(x.is_some(), x.unwrap()); // ok
+ checks_unwrap!(x, x.unwrap()); // ok
+ checks_some!(x.is_some(), x); // ok
+ let mut x: Result<(), ()> = Ok(());
+ if x.is_ok() {
+ x.unwrap(); // unnecessary
+ x.expect("an error message"); // unnecessary
+ x.unwrap_err(); // will panic
+ } else {
+ x.unwrap(); // will panic
+ x.expect("an error message"); // will panic
+ x.unwrap_err(); // unnecessary
+ }
+ if x.is_err() {
+ x.unwrap(); // will panic
+ x.unwrap_err(); // unnecessary
+ } else {
+ x.unwrap(); // unnecessary
+ x.unwrap_err(); // will panic
+ }
+ if x.is_ok() {
+ x = Err(());
+ // not unnecessary because of mutation of x
+ // it will always panic but the lint is not smart enough to see this (it only
+ // checks if conditions).
+ x.unwrap();
+ } else {
+ x = Ok(());
+ // not unnecessary because of mutation of x
+ // it will always panic but the lint is not smart enough to see this (it
+ // only checks if conditions).
+ x.unwrap_err();
+ }
+
+ assert!(x.is_ok(), "{:?}", x.unwrap_err()); // ok, it's a common test pattern
+}
+
+fn check_expect() {
+ let x = Some(());
+ if x.is_some() {
+ #[expect(clippy::unnecessary_unwrap)]
+ x.unwrap(); // unnecessary
+ #[expect(clippy::unnecessary_unwrap)]
+ x.expect("an error message"); // unnecessary
+ } else {
+ #[expect(clippy::panicking_unwrap)]
+ x.unwrap(); // will panic
+ #[expect(clippy::panicking_unwrap)]
+ x.expect("an error message"); // will panic
+ }
+}
diff --git a/src/tools/clippy/tests/ui/checked_unwrap/simple_conditionals.stderr b/src/tools/clippy/tests/ui/checked_unwrap/simple_conditionals.stderr
new file mode 100644
index 000000000..ef6882742
--- /dev/null
+++ b/src/tools/clippy/tests/ui/checked_unwrap/simple_conditionals.stderr
@@ -0,0 +1,167 @@
+error: called `unwrap` on `x` after checking its variant with `is_some`
+ --> $DIR/simple_conditionals.rs:40:9
+ |
+LL | if x.is_some() {
+ | -------------- help: try: `if let Some(..) = x`
+LL | x.unwrap(); // unnecessary
+ | ^^^^^^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/simple_conditionals.rs:2:35
+ |
+LL | #![deny(clippy::panicking_unwrap, clippy::unnecessary_unwrap)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: called `expect` on `x` after checking its variant with `is_some`
+ --> $DIR/simple_conditionals.rs:41:9
+ |
+LL | if x.is_some() {
+ | -------------- help: try: `if let Some(..) = x`
+LL | x.unwrap(); // unnecessary
+LL | x.expect("an error message"); // unnecessary
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this call to `unwrap()` will always panic
+ --> $DIR/simple_conditionals.rs:43:9
+ |
+LL | if x.is_some() {
+ | ----------- because of this check
+...
+LL | x.unwrap(); // will panic
+ | ^^^^^^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/simple_conditionals.rs:2:9
+ |
+LL | #![deny(clippy::panicking_unwrap, clippy::unnecessary_unwrap)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this call to `expect()` will always panic
+ --> $DIR/simple_conditionals.rs:44:9
+ |
+LL | if x.is_some() {
+ | ----------- because of this check
+...
+LL | x.expect("an error message"); // will panic
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this call to `unwrap()` will always panic
+ --> $DIR/simple_conditionals.rs:47:9
+ |
+LL | if x.is_none() {
+ | ----------- because of this check
+LL | x.unwrap(); // will panic
+ | ^^^^^^^^^^
+
+error: called `unwrap` on `x` after checking its variant with `is_none`
+ --> $DIR/simple_conditionals.rs:49:9
+ |
+LL | if x.is_none() {
+ | -------------- help: try: `if let Some(..) = x`
+...
+LL | x.unwrap(); // unnecessary
+ | ^^^^^^^^^^
+
+error: called `unwrap` on `x` after checking its variant with `is_some`
+ --> $DIR/simple_conditionals.rs:8:13
+ |
+LL | if $a.is_some() {
+ | --------------- help: try: `if let Some(..) = x`
+LL | $a.unwrap(); // unnecessary
+ | ^^^^^^^^^^^
+...
+LL | m!(x);
+ | ----- in this macro invocation
+ |
+ = note: this error originates in the macro `m` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: called `unwrap` on `x` after checking its variant with `is_ok`
+ --> $DIR/simple_conditionals.rs:57:9
+ |
+LL | if x.is_ok() {
+ | ------------ help: try: `if let Ok(..) = x`
+LL | x.unwrap(); // unnecessary
+ | ^^^^^^^^^^
+
+error: called `expect` on `x` after checking its variant with `is_ok`
+ --> $DIR/simple_conditionals.rs:58:9
+ |
+LL | if x.is_ok() {
+ | ------------ help: try: `if let Ok(..) = x`
+LL | x.unwrap(); // unnecessary
+LL | x.expect("an error message"); // unnecessary
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this call to `unwrap_err()` will always panic
+ --> $DIR/simple_conditionals.rs:59:9
+ |
+LL | if x.is_ok() {
+ | --------- because of this check
+...
+LL | x.unwrap_err(); // will panic
+ | ^^^^^^^^^^^^^^
+
+error: this call to `unwrap()` will always panic
+ --> $DIR/simple_conditionals.rs:61:9
+ |
+LL | if x.is_ok() {
+ | --------- because of this check
+...
+LL | x.unwrap(); // will panic
+ | ^^^^^^^^^^
+
+error: this call to `expect()` will always panic
+ --> $DIR/simple_conditionals.rs:62:9
+ |
+LL | if x.is_ok() {
+ | --------- because of this check
+...
+LL | x.expect("an error message"); // will panic
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: called `unwrap_err` on `x` after checking its variant with `is_ok`
+ --> $DIR/simple_conditionals.rs:63:9
+ |
+LL | if x.is_ok() {
+ | ------------ help: try: `if let Err(..) = x`
+...
+LL | x.unwrap_err(); // unnecessary
+ | ^^^^^^^^^^^^^^
+
+error: this call to `unwrap()` will always panic
+ --> $DIR/simple_conditionals.rs:66:9
+ |
+LL | if x.is_err() {
+ | ---------- because of this check
+LL | x.unwrap(); // will panic
+ | ^^^^^^^^^^
+
+error: called `unwrap_err` on `x` after checking its variant with `is_err`
+ --> $DIR/simple_conditionals.rs:67:9
+ |
+LL | if x.is_err() {
+ | ------------- help: try: `if let Err(..) = x`
+LL | x.unwrap(); // will panic
+LL | x.unwrap_err(); // unnecessary
+ | ^^^^^^^^^^^^^^
+
+error: called `unwrap` on `x` after checking its variant with `is_err`
+ --> $DIR/simple_conditionals.rs:69:9
+ |
+LL | if x.is_err() {
+ | ------------- help: try: `if let Ok(..) = x`
+...
+LL | x.unwrap(); // unnecessary
+ | ^^^^^^^^^^
+
+error: this call to `unwrap_err()` will always panic
+ --> $DIR/simple_conditionals.rs:70:9
+ |
+LL | if x.is_err() {
+ | ---------- because of this check
+...
+LL | x.unwrap_err(); // will panic
+ | ^^^^^^^^^^^^^^
+
+error: aborting due to 17 previous errors
+
diff --git a/src/tools/clippy/tests/ui/clone_on_copy.fixed b/src/tools/clippy/tests/ui/clone_on_copy.fixed
new file mode 100644
index 000000000..dc0627626
--- /dev/null
+++ b/src/tools/clippy/tests/ui/clone_on_copy.fixed
@@ -0,0 +1,74 @@
+// run-rustfix
+
+#![allow(
+ unused,
+ clippy::redundant_clone,
+ clippy::deref_addrof,
+ clippy::no_effect,
+ clippy::unnecessary_operation,
+ clippy::vec_init_then_push,
+ clippy::toplevel_ref_arg,
+ clippy::needless_borrow
+)]
+
+use std::cell::RefCell;
+use std::rc::{self, Rc};
+use std::sync::{self, Arc};
+
+fn main() {}
+
+fn is_ascii(ch: char) -> bool {
+ ch.is_ascii()
+}
+
+fn clone_on_copy() {
+ 42;
+
+ vec![1].clone(); // ok, not a Copy type
+ Some(vec![1]).clone(); // ok, not a Copy type
+ *(&42);
+
+ let rc = RefCell::new(0);
+ *rc.borrow();
+
+ let x = 0u32;
+ x.rotate_left(1);
+
+ #[derive(Clone, Copy)]
+ struct Foo;
+ impl Foo {
+ fn clone(&self) -> u32 {
+ 0
+ }
+ }
+ Foo.clone(); // ok, this is not the clone trait
+
+ macro_rules! m {
+ ($e:expr) => {{ $e }};
+ }
+ m!(42);
+
+ struct Wrap([u32; 2]);
+ impl core::ops::Deref for Wrap {
+ type Target = [u32; 2];
+ fn deref(&self) -> &[u32; 2] {
+ &self.0
+ }
+ }
+ let x = Wrap([0, 0]);
+ (*x)[0];
+
+ let x = 42;
+ let ref y = x.clone(); // ok, binds by reference
+ let ref mut y = x.clone(); // ok, binds by reference
+
+ // Issue #4348
+ let mut x = 43;
+ let _ = &x.clone(); // ok, getting a ref
+ 'a'.clone().make_ascii_uppercase(); // ok, clone and then mutate
+ is_ascii('z');
+
+ // Issue #5436
+ let mut vec = Vec::new();
+ vec.push(42);
+}
diff --git a/src/tools/clippy/tests/ui/clone_on_copy.rs b/src/tools/clippy/tests/ui/clone_on_copy.rs
new file mode 100644
index 000000000..8c39d0d55
--- /dev/null
+++ b/src/tools/clippy/tests/ui/clone_on_copy.rs
@@ -0,0 +1,74 @@
+// run-rustfix
+
+#![allow(
+ unused,
+ clippy::redundant_clone,
+ clippy::deref_addrof,
+ clippy::no_effect,
+ clippy::unnecessary_operation,
+ clippy::vec_init_then_push,
+ clippy::toplevel_ref_arg,
+ clippy::needless_borrow
+)]
+
+use std::cell::RefCell;
+use std::rc::{self, Rc};
+use std::sync::{self, Arc};
+
+fn main() {}
+
+fn is_ascii(ch: char) -> bool {
+ ch.is_ascii()
+}
+
+fn clone_on_copy() {
+ 42.clone();
+
+ vec![1].clone(); // ok, not a Copy type
+ Some(vec![1]).clone(); // ok, not a Copy type
+ (&42).clone();
+
+ let rc = RefCell::new(0);
+ rc.borrow().clone();
+
+ let x = 0u32;
+ x.clone().rotate_left(1);
+
+ #[derive(Clone, Copy)]
+ struct Foo;
+ impl Foo {
+ fn clone(&self) -> u32 {
+ 0
+ }
+ }
+ Foo.clone(); // ok, this is not the clone trait
+
+ macro_rules! m {
+ ($e:expr) => {{ $e }};
+ }
+ m!(42).clone();
+
+ struct Wrap([u32; 2]);
+ impl core::ops::Deref for Wrap {
+ type Target = [u32; 2];
+ fn deref(&self) -> &[u32; 2] {
+ &self.0
+ }
+ }
+ let x = Wrap([0, 0]);
+ x.clone()[0];
+
+ let x = 42;
+ let ref y = x.clone(); // ok, binds by reference
+ let ref mut y = x.clone(); // ok, binds by reference
+
+ // Issue #4348
+ let mut x = 43;
+ let _ = &x.clone(); // ok, getting a ref
+ 'a'.clone().make_ascii_uppercase(); // ok, clone and then mutate
+ is_ascii('z'.clone());
+
+ // Issue #5436
+ let mut vec = Vec::new();
+ vec.push(42.clone());
+}
diff --git a/src/tools/clippy/tests/ui/clone_on_copy.stderr b/src/tools/clippy/tests/ui/clone_on_copy.stderr
new file mode 100644
index 000000000..861543d0a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/clone_on_copy.stderr
@@ -0,0 +1,52 @@
+error: using `clone` on type `i32` which implements the `Copy` trait
+ --> $DIR/clone_on_copy.rs:25:5
+ |
+LL | 42.clone();
+ | ^^^^^^^^^^ help: try removing the `clone` call: `42`
+ |
+ = note: `-D clippy::clone-on-copy` implied by `-D warnings`
+
+error: using `clone` on type `i32` which implements the `Copy` trait
+ --> $DIR/clone_on_copy.rs:29:5
+ |
+LL | (&42).clone();
+ | ^^^^^^^^^^^^^ help: try dereferencing it: `*(&42)`
+
+error: using `clone` on type `i32` which implements the `Copy` trait
+ --> $DIR/clone_on_copy.rs:32:5
+ |
+LL | rc.borrow().clone();
+ | ^^^^^^^^^^^^^^^^^^^ help: try dereferencing it: `*rc.borrow()`
+
+error: using `clone` on type `u32` which implements the `Copy` trait
+ --> $DIR/clone_on_copy.rs:35:5
+ |
+LL | x.clone().rotate_left(1);
+ | ^^^^^^^^^ help: try removing the `clone` call: `x`
+
+error: using `clone` on type `i32` which implements the `Copy` trait
+ --> $DIR/clone_on_copy.rs:49:5
+ |
+LL | m!(42).clone();
+ | ^^^^^^^^^^^^^^ help: try removing the `clone` call: `m!(42)`
+
+error: using `clone` on type `[u32; 2]` which implements the `Copy` trait
+ --> $DIR/clone_on_copy.rs:59:5
+ |
+LL | x.clone()[0];
+ | ^^^^^^^^^ help: try dereferencing it: `(*x)`
+
+error: using `clone` on type `char` which implements the `Copy` trait
+ --> $DIR/clone_on_copy.rs:69:14
+ |
+LL | is_ascii('z'.clone());
+ | ^^^^^^^^^^^ help: try removing the `clone` call: `'z'`
+
+error: using `clone` on type `i32` which implements the `Copy` trait
+ --> $DIR/clone_on_copy.rs:73:14
+ |
+LL | vec.push(42.clone());
+ | ^^^^^^^^^^ help: try removing the `clone` call: `42`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/clone_on_copy_impl.rs b/src/tools/clippy/tests/ui/clone_on_copy_impl.rs
new file mode 100644
index 000000000..8f9f2a0db
--- /dev/null
+++ b/src/tools/clippy/tests/ui/clone_on_copy_impl.rs
@@ -0,0 +1,22 @@
+use std::fmt;
+use std::marker::PhantomData;
+
+pub struct Key<T> {
+ #[doc(hidden)]
+ pub __name: &'static str,
+ #[doc(hidden)]
+ pub __phantom: PhantomData<T>,
+}
+
+impl<T> Copy for Key<T> {}
+
+impl<T> Clone for Key<T> {
+ fn clone(&self) -> Self {
+ Key {
+ __name: self.__name,
+ __phantom: self.__phantom,
+ }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/cloned_instead_of_copied.fixed b/src/tools/clippy/tests/ui/cloned_instead_of_copied.fixed
new file mode 100644
index 000000000..4eb999e18
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cloned_instead_of_copied.fixed
@@ -0,0 +1,15 @@
+// run-rustfix
+#![warn(clippy::cloned_instead_of_copied)]
+
+fn main() {
+ // yay
+ let _ = [1].iter().copied();
+ let _ = vec!["hi"].iter().copied();
+ let _ = Some(&1).copied();
+ let _ = Box::new([1].iter()).copied();
+ let _ = Box::new(Some(&1)).copied();
+
+ // nay
+ let _ = [String::new()].iter().cloned();
+ let _ = Some(&String::new()).cloned();
+}
diff --git a/src/tools/clippy/tests/ui/cloned_instead_of_copied.rs b/src/tools/clippy/tests/ui/cloned_instead_of_copied.rs
new file mode 100644
index 000000000..894496c0e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cloned_instead_of_copied.rs
@@ -0,0 +1,15 @@
+// run-rustfix
+#![warn(clippy::cloned_instead_of_copied)]
+
+fn main() {
+ // yay
+ let _ = [1].iter().cloned();
+ let _ = vec!["hi"].iter().cloned();
+ let _ = Some(&1).cloned();
+ let _ = Box::new([1].iter()).cloned();
+ let _ = Box::new(Some(&1)).cloned();
+
+ // nay
+ let _ = [String::new()].iter().cloned();
+ let _ = Some(&String::new()).cloned();
+}
diff --git a/src/tools/clippy/tests/ui/cloned_instead_of_copied.stderr b/src/tools/clippy/tests/ui/cloned_instead_of_copied.stderr
new file mode 100644
index 000000000..e0707d321
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cloned_instead_of_copied.stderr
@@ -0,0 +1,34 @@
+error: used `cloned` where `copied` could be used instead
+ --> $DIR/cloned_instead_of_copied.rs:6:24
+ |
+LL | let _ = [1].iter().cloned();
+ | ^^^^^^ help: try: `copied`
+ |
+ = note: `-D clippy::cloned-instead-of-copied` implied by `-D warnings`
+
+error: used `cloned` where `copied` could be used instead
+ --> $DIR/cloned_instead_of_copied.rs:7:31
+ |
+LL | let _ = vec!["hi"].iter().cloned();
+ | ^^^^^^ help: try: `copied`
+
+error: used `cloned` where `copied` could be used instead
+ --> $DIR/cloned_instead_of_copied.rs:8:22
+ |
+LL | let _ = Some(&1).cloned();
+ | ^^^^^^ help: try: `copied`
+
+error: used `cloned` where `copied` could be used instead
+ --> $DIR/cloned_instead_of_copied.rs:9:34
+ |
+LL | let _ = Box::new([1].iter()).cloned();
+ | ^^^^^^ help: try: `copied`
+
+error: used `cloned` where `copied` could be used instead
+ --> $DIR/cloned_instead_of_copied.rs:10:32
+ |
+LL | let _ = Box::new(Some(&1)).cloned();
+ | ^^^^^^ help: try: `copied`
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/cmp_nan.rs b/src/tools/clippy/tests/ui/cmp_nan.rs
new file mode 100644
index 000000000..64ca52b01
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cmp_nan.rs
@@ -0,0 +1,34 @@
+const NAN_F32: f32 = f32::NAN;
+const NAN_F64: f64 = f64::NAN;
+
+#[warn(clippy::cmp_nan)]
+#[allow(clippy::float_cmp, clippy::no_effect, clippy::unnecessary_operation)]
+fn main() {
+ let x = 5f32;
+ x == f32::NAN;
+ x != f32::NAN;
+ x < f32::NAN;
+ x > f32::NAN;
+ x <= f32::NAN;
+ x >= f32::NAN;
+ x == NAN_F32;
+ x != NAN_F32;
+ x < NAN_F32;
+ x > NAN_F32;
+ x <= NAN_F32;
+ x >= NAN_F32;
+
+ let y = 0f64;
+ y == f64::NAN;
+ y != f64::NAN;
+ y < f64::NAN;
+ y > f64::NAN;
+ y <= f64::NAN;
+ y >= f64::NAN;
+ y == NAN_F64;
+ y != NAN_F64;
+ y < NAN_F64;
+ y > NAN_F64;
+ y <= NAN_F64;
+ y >= NAN_F64;
+}
diff --git a/src/tools/clippy/tests/ui/cmp_nan.stderr b/src/tools/clippy/tests/ui/cmp_nan.stderr
new file mode 100644
index 000000000..867516661
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cmp_nan.stderr
@@ -0,0 +1,148 @@
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:8:5
+ |
+LL | x == f32::NAN;
+ | ^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::cmp-nan` implied by `-D warnings`
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:9:5
+ |
+LL | x != f32::NAN;
+ | ^^^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:10:5
+ |
+LL | x < f32::NAN;
+ | ^^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:11:5
+ |
+LL | x > f32::NAN;
+ | ^^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:12:5
+ |
+LL | x <= f32::NAN;
+ | ^^^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:13:5
+ |
+LL | x >= f32::NAN;
+ | ^^^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:14:5
+ |
+LL | x == NAN_F32;
+ | ^^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:15:5
+ |
+LL | x != NAN_F32;
+ | ^^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:16:5
+ |
+LL | x < NAN_F32;
+ | ^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:17:5
+ |
+LL | x > NAN_F32;
+ | ^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:18:5
+ |
+LL | x <= NAN_F32;
+ | ^^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:19:5
+ |
+LL | x >= NAN_F32;
+ | ^^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:22:5
+ |
+LL | y == f64::NAN;
+ | ^^^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:23:5
+ |
+LL | y != f64::NAN;
+ | ^^^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:24:5
+ |
+LL | y < f64::NAN;
+ | ^^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:25:5
+ |
+LL | y > f64::NAN;
+ | ^^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:26:5
+ |
+LL | y <= f64::NAN;
+ | ^^^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:27:5
+ |
+LL | y >= f64::NAN;
+ | ^^^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:28:5
+ |
+LL | y == NAN_F64;
+ | ^^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:29:5
+ |
+LL | y != NAN_F64;
+ | ^^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:30:5
+ |
+LL | y < NAN_F64;
+ | ^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:31:5
+ |
+LL | y > NAN_F64;
+ | ^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:32:5
+ |
+LL | y <= NAN_F64;
+ | ^^^^^^^^^^^^
+
+error: doomed comparison with `NAN`, use `{f32,f64}::is_nan()` instead
+ --> $DIR/cmp_nan.rs:33:5
+ |
+LL | y >= NAN_F64;
+ | ^^^^^^^^^^^^
+
+error: aborting due to 24 previous errors
+
diff --git a/src/tools/clippy/tests/ui/cmp_null.rs b/src/tools/clippy/tests/ui/cmp_null.rs
new file mode 100644
index 000000000..2d2d04178
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cmp_null.rs
@@ -0,0 +1,17 @@
+#![warn(clippy::cmp_null)]
+#![allow(unused_mut)]
+
+use std::ptr;
+
+fn main() {
+ let x = 0;
+ let p: *const usize = &x;
+ if p == ptr::null() {
+ println!("This is surprising!");
+ }
+ let mut y = 0;
+ let mut m: *mut usize = &mut y;
+ if m == ptr::null_mut() {
+ println!("This is surprising, too!");
+ }
+}
diff --git a/src/tools/clippy/tests/ui/cmp_null.stderr b/src/tools/clippy/tests/ui/cmp_null.stderr
new file mode 100644
index 000000000..a1f4c70fb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cmp_null.stderr
@@ -0,0 +1,16 @@
+error: comparing with null is better expressed by the `.is_null()` method
+ --> $DIR/cmp_null.rs:9:8
+ |
+LL | if p == ptr::null() {
+ | ^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::cmp-null` implied by `-D warnings`
+
+error: comparing with null is better expressed by the `.is_null()` method
+ --> $DIR/cmp_null.rs:14:8
+ |
+LL | if m == ptr::null_mut() {
+ | ^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/cmp_owned/asymmetric_partial_eq.fixed b/src/tools/clippy/tests/ui/cmp_owned/asymmetric_partial_eq.fixed
new file mode 100644
index 000000000..abd059c23
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cmp_owned/asymmetric_partial_eq.fixed
@@ -0,0 +1,93 @@
+// run-rustfix
+#![allow(unused, clippy::redundant_clone, clippy::derive_partial_eq_without_eq)] // See #5700
+
+// Define the types in each module to avoid trait impls leaking between modules.
+macro_rules! impl_types {
+ () => {
+ #[derive(PartialEq)]
+ pub struct Owned;
+
+ pub struct Borrowed;
+
+ impl ToOwned for Borrowed {
+ type Owned = Owned;
+ fn to_owned(&self) -> Owned {
+ Owned {}
+ }
+ }
+
+ impl std::borrow::Borrow<Borrowed> for Owned {
+ fn borrow(&self) -> &Borrowed {
+ static VALUE: Borrowed = Borrowed {};
+ &VALUE
+ }
+ }
+ };
+}
+
+// Only Borrowed == Owned is implemented
+mod borrowed_eq_owned {
+ impl_types!();
+
+ impl PartialEq<Owned> for Borrowed {
+ fn eq(&self, _: &Owned) -> bool {
+ true
+ }
+ }
+
+ pub fn compare() {
+ let owned = Owned {};
+ let borrowed = Borrowed {};
+
+ if borrowed == owned {}
+ if borrowed == owned {}
+ }
+}
+
+// Only Owned == Borrowed is implemented
+mod owned_eq_borrowed {
+ impl_types!();
+
+ impl PartialEq<Borrowed> for Owned {
+ fn eq(&self, _: &Borrowed) -> bool {
+ true
+ }
+ }
+
+ fn compare() {
+ let owned = Owned {};
+ let borrowed = Borrowed {};
+
+ if owned == borrowed {}
+ if owned == borrowed {}
+ }
+}
+
+mod issue_4874 {
+ impl_types!();
+
+ // NOTE: PartialEq<Borrowed> for T can't be implemented due to the orphan rules
+ impl<T> PartialEq<T> for Borrowed
+ where
+ T: AsRef<str> + ?Sized,
+ {
+ fn eq(&self, _: &T) -> bool {
+ true
+ }
+ }
+
+ impl std::fmt::Display for Borrowed {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "borrowed")
+ }
+ }
+
+ fn compare() {
+ let borrowed = Borrowed {};
+
+ if borrowed == "Hi" {}
+ if borrowed == "Hi" {}
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/cmp_owned/asymmetric_partial_eq.rs b/src/tools/clippy/tests/ui/cmp_owned/asymmetric_partial_eq.rs
new file mode 100644
index 000000000..020ef5f84
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cmp_owned/asymmetric_partial_eq.rs
@@ -0,0 +1,93 @@
+// run-rustfix
+#![allow(unused, clippy::redundant_clone, clippy::derive_partial_eq_without_eq)] // See #5700
+
+// Define the types in each module to avoid trait impls leaking between modules.
+macro_rules! impl_types {
+ () => {
+ #[derive(PartialEq)]
+ pub struct Owned;
+
+ pub struct Borrowed;
+
+ impl ToOwned for Borrowed {
+ type Owned = Owned;
+ fn to_owned(&self) -> Owned {
+ Owned {}
+ }
+ }
+
+ impl std::borrow::Borrow<Borrowed> for Owned {
+ fn borrow(&self) -> &Borrowed {
+ static VALUE: Borrowed = Borrowed {};
+ &VALUE
+ }
+ }
+ };
+}
+
+// Only Borrowed == Owned is implemented
+mod borrowed_eq_owned {
+ impl_types!();
+
+ impl PartialEq<Owned> for Borrowed {
+ fn eq(&self, _: &Owned) -> bool {
+ true
+ }
+ }
+
+ pub fn compare() {
+ let owned = Owned {};
+ let borrowed = Borrowed {};
+
+ if borrowed.to_owned() == owned {}
+ if owned == borrowed.to_owned() {}
+ }
+}
+
+// Only Owned == Borrowed is implemented
+mod owned_eq_borrowed {
+ impl_types!();
+
+ impl PartialEq<Borrowed> for Owned {
+ fn eq(&self, _: &Borrowed) -> bool {
+ true
+ }
+ }
+
+ fn compare() {
+ let owned = Owned {};
+ let borrowed = Borrowed {};
+
+ if owned == borrowed.to_owned() {}
+ if borrowed.to_owned() == owned {}
+ }
+}
+
+mod issue_4874 {
+ impl_types!();
+
+ // NOTE: PartialEq<Borrowed> for T can't be implemented due to the orphan rules
+ impl<T> PartialEq<T> for Borrowed
+ where
+ T: AsRef<str> + ?Sized,
+ {
+ fn eq(&self, _: &T) -> bool {
+ true
+ }
+ }
+
+ impl std::fmt::Display for Borrowed {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "borrowed")
+ }
+ }
+
+ fn compare() {
+ let borrowed = Borrowed {};
+
+ if "Hi" == borrowed.to_string() {}
+ if borrowed.to_string() == "Hi" {}
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/cmp_owned/asymmetric_partial_eq.stderr b/src/tools/clippy/tests/ui/cmp_owned/asymmetric_partial_eq.stderr
new file mode 100644
index 000000000..43bf8851f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cmp_owned/asymmetric_partial_eq.stderr
@@ -0,0 +1,46 @@
+error: this creates an owned instance just for comparison
+ --> $DIR/asymmetric_partial_eq.rs:42:12
+ |
+LL | if borrowed.to_owned() == owned {}
+ | ^^^^^^^^^^^^^^^^^^^ help: try: `borrowed`
+ |
+ = note: `-D clippy::cmp-owned` implied by `-D warnings`
+
+error: this creates an owned instance just for comparison
+ --> $DIR/asymmetric_partial_eq.rs:43:21
+ |
+LL | if owned == borrowed.to_owned() {}
+ | ---------^^^^^^^^^^^^^^^^^^^
+ | |
+ | help: try: `borrowed == owned`
+
+error: this creates an owned instance just for comparison
+ --> $DIR/asymmetric_partial_eq.rs:61:21
+ |
+LL | if owned == borrowed.to_owned() {}
+ | ^^^^^^^^^^^^^^^^^^^ help: try: `borrowed`
+
+error: this creates an owned instance just for comparison
+ --> $DIR/asymmetric_partial_eq.rs:62:12
+ |
+LL | if borrowed.to_owned() == owned {}
+ | ^^^^^^^^^^^^^^^^^^^---------
+ | |
+ | help: try: `owned == borrowed`
+
+error: this creates an owned instance just for comparison
+ --> $DIR/asymmetric_partial_eq.rs:88:20
+ |
+LL | if "Hi" == borrowed.to_string() {}
+ | --------^^^^^^^^^^^^^^^^^^^^
+ | |
+ | help: try: `borrowed == "Hi"`
+
+error: this creates an owned instance just for comparison
+ --> $DIR/asymmetric_partial_eq.rs:89:12
+ |
+LL | if borrowed.to_string() == "Hi" {}
+ | ^^^^^^^^^^^^^^^^^^^^ help: try: `borrowed`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/cmp_owned/comparison_flip.fixed b/src/tools/clippy/tests/ui/cmp_owned/comparison_flip.fixed
new file mode 100644
index 000000000..44e41bdd1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cmp_owned/comparison_flip.fixed
@@ -0,0 +1,29 @@
+// run-rustfix
+
+use std::fmt::{self, Display};
+
+fn main() {
+ let a = Foo;
+
+ if a != "bar" {
+ println!("foo");
+ }
+
+ if a != "bar" {
+ println!("foo");
+ }
+}
+
+struct Foo;
+
+impl Display for Foo {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "foo")
+ }
+}
+
+impl PartialEq<&str> for Foo {
+ fn eq(&self, other: &&str) -> bool {
+ "foo" == *other
+ }
+}
diff --git a/src/tools/clippy/tests/ui/cmp_owned/comparison_flip.rs b/src/tools/clippy/tests/ui/cmp_owned/comparison_flip.rs
new file mode 100644
index 000000000..662673abb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cmp_owned/comparison_flip.rs
@@ -0,0 +1,29 @@
+// run-rustfix
+
+use std::fmt::{self, Display};
+
+fn main() {
+ let a = Foo;
+
+ if a.to_string() != "bar" {
+ println!("foo");
+ }
+
+ if "bar" != a.to_string() {
+ println!("foo");
+ }
+}
+
+struct Foo;
+
+impl Display for Foo {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "foo")
+ }
+}
+
+impl PartialEq<&str> for Foo {
+ fn eq(&self, other: &&str) -> bool {
+ "foo" == *other
+ }
+}
diff --git a/src/tools/clippy/tests/ui/cmp_owned/comparison_flip.stderr b/src/tools/clippy/tests/ui/cmp_owned/comparison_flip.stderr
new file mode 100644
index 000000000..e4d0d822b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cmp_owned/comparison_flip.stderr
@@ -0,0 +1,18 @@
+error: this creates an owned instance just for comparison
+ --> $DIR/comparison_flip.rs:8:8
+ |
+LL | if a.to_string() != "bar" {
+ | ^^^^^^^^^^^^^ help: try: `a`
+ |
+ = note: `-D clippy::cmp-owned` implied by `-D warnings`
+
+error: this creates an owned instance just for comparison
+ --> $DIR/comparison_flip.rs:12:17
+ |
+LL | if "bar" != a.to_string() {
+ | ---------^^^^^^^^^^^^^
+ | |
+ | help: try: `a != "bar"`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.fixed b/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.fixed
new file mode 100644
index 000000000..b28c4378e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.fixed
@@ -0,0 +1,72 @@
+// run-rustfix
+
+#[warn(clippy::cmp_owned)]
+#[allow(clippy::unnecessary_operation, clippy::no_effect, unused_must_use, clippy::eq_op)]
+fn main() {
+ fn with_to_string(x: &str) {
+ x != "foo";
+
+ "foo" != x;
+ }
+
+ let x = "oh";
+
+ with_to_string(x);
+
+ x != "foo";
+
+ x != "foo";
+
+ 42.to_string() == "42";
+
+ Foo == Foo;
+
+ "abc".chars().filter(|c| *c != 'X');
+
+ "abc".chars().filter(|c| *c != 'X');
+}
+
+struct Foo;
+
+impl PartialEq for Foo {
+ // Allow this here, because it emits the lint
+ // without a suggestion. This is tested in
+ // `tests/ui/cmp_owned/without_suggestion.rs`
+ #[allow(clippy::cmp_owned)]
+ fn eq(&self, other: &Self) -> bool {
+ self.to_owned() == *other
+ }
+}
+
+impl ToOwned for Foo {
+ type Owned = Bar;
+ fn to_owned(&self) -> Bar {
+ Bar
+ }
+}
+
+#[derive(PartialEq, Eq)]
+struct Bar;
+
+impl PartialEq<Foo> for Bar {
+ fn eq(&self, _: &Foo) -> bool {
+ true
+ }
+}
+
+impl std::borrow::Borrow<Foo> for Bar {
+ fn borrow(&self) -> &Foo {
+ static FOO: Foo = Foo;
+ &FOO
+ }
+}
+
+#[derive(PartialEq, Eq)]
+struct Baz;
+
+impl ToOwned for Baz {
+ type Owned = Baz;
+ fn to_owned(&self) -> Baz {
+ Baz
+ }
+}
diff --git a/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.rs b/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.rs
new file mode 100644
index 000000000..c1089010f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.rs
@@ -0,0 +1,72 @@
+// run-rustfix
+
+#[warn(clippy::cmp_owned)]
+#[allow(clippy::unnecessary_operation, clippy::no_effect, unused_must_use, clippy::eq_op)]
+fn main() {
+ fn with_to_string(x: &str) {
+ x != "foo".to_string();
+
+ "foo".to_string() != x;
+ }
+
+ let x = "oh";
+
+ with_to_string(x);
+
+ x != "foo".to_owned();
+
+ x != String::from("foo");
+
+ 42.to_string() == "42";
+
+ Foo.to_owned() == Foo;
+
+ "abc".chars().filter(|c| c.to_owned() != 'X');
+
+ "abc".chars().filter(|c| *c != 'X');
+}
+
+struct Foo;
+
+impl PartialEq for Foo {
+ // Allow this here, because it emits the lint
+ // without a suggestion. This is tested in
+ // `tests/ui/cmp_owned/without_suggestion.rs`
+ #[allow(clippy::cmp_owned)]
+ fn eq(&self, other: &Self) -> bool {
+ self.to_owned() == *other
+ }
+}
+
+impl ToOwned for Foo {
+ type Owned = Bar;
+ fn to_owned(&self) -> Bar {
+ Bar
+ }
+}
+
+#[derive(PartialEq, Eq)]
+struct Bar;
+
+impl PartialEq<Foo> for Bar {
+ fn eq(&self, _: &Foo) -> bool {
+ true
+ }
+}
+
+impl std::borrow::Borrow<Foo> for Bar {
+ fn borrow(&self) -> &Foo {
+ static FOO: Foo = Foo;
+ &FOO
+ }
+}
+
+#[derive(PartialEq, Eq)]
+struct Baz;
+
+impl ToOwned for Baz {
+ type Owned = Baz;
+ fn to_owned(&self) -> Baz {
+ Baz
+ }
+}
diff --git a/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.stderr b/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.stderr
new file mode 100644
index 000000000..2f333e6ea
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.stderr
@@ -0,0 +1,40 @@
+error: this creates an owned instance just for comparison
+ --> $DIR/with_suggestion.rs:7:14
+ |
+LL | x != "foo".to_string();
+ | ^^^^^^^^^^^^^^^^^ help: try: `"foo"`
+ |
+ = note: `-D clippy::cmp-owned` implied by `-D warnings`
+
+error: this creates an owned instance just for comparison
+ --> $DIR/with_suggestion.rs:9:9
+ |
+LL | "foo".to_string() != x;
+ | ^^^^^^^^^^^^^^^^^ help: try: `"foo"`
+
+error: this creates an owned instance just for comparison
+ --> $DIR/with_suggestion.rs:16:10
+ |
+LL | x != "foo".to_owned();
+ | ^^^^^^^^^^^^^^^^ help: try: `"foo"`
+
+error: this creates an owned instance just for comparison
+ --> $DIR/with_suggestion.rs:18:10
+ |
+LL | x != String::from("foo");
+ | ^^^^^^^^^^^^^^^^^^^ help: try: `"foo"`
+
+error: this creates an owned instance just for comparison
+ --> $DIR/with_suggestion.rs:22:5
+ |
+LL | Foo.to_owned() == Foo;
+ | ^^^^^^^^^^^^^^ help: try: `Foo`
+
+error: this creates an owned instance just for comparison
+ --> $DIR/with_suggestion.rs:24:30
+ |
+LL | "abc".chars().filter(|c| c.to_owned() != 'X');
+ | ^^^^^^^^^^^^ help: try: `*c`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/cmp_owned/without_suggestion.rs b/src/tools/clippy/tests/ui/cmp_owned/without_suggestion.rs
new file mode 100644
index 000000000..d8a202cb6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cmp_owned/without_suggestion.rs
@@ -0,0 +1,75 @@
+#[allow(clippy::unnecessary_operation)]
+#[allow(clippy::implicit_clone)]
+
+fn main() {
+ let x = &Baz;
+ let y = &Baz;
+ y.to_owned() == *x;
+
+ let x = &&Baz;
+ let y = &Baz;
+ y.to_owned() == **x;
+
+ let x = 0u32;
+ let y = U32Wrapper(x);
+ let _ = U32Wrapper::from(x) == y;
+}
+
+struct Foo;
+
+impl PartialEq for Foo {
+ fn eq(&self, other: &Self) -> bool {
+ self.to_owned() == *other
+ }
+}
+
+impl ToOwned for Foo {
+ type Owned = Bar;
+ fn to_owned(&self) -> Bar {
+ Bar
+ }
+}
+
+#[derive(PartialEq, Eq)]
+struct Baz;
+
+impl ToOwned for Baz {
+ type Owned = Baz;
+ fn to_owned(&self) -> Baz {
+ Baz
+ }
+}
+
+#[derive(PartialEq, Eq)]
+struct Bar;
+
+impl PartialEq<Foo> for Bar {
+ fn eq(&self, _: &Foo) -> bool {
+ true
+ }
+}
+
+impl std::borrow::Borrow<Foo> for Bar {
+ fn borrow(&self) -> &Foo {
+ static FOO: Foo = Foo;
+ &FOO
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
+struct U32Wrapper(u32);
+impl From<u32> for U32Wrapper {
+ fn from(x: u32) -> Self {
+ Self(x)
+ }
+}
+impl PartialEq<u32> for U32Wrapper {
+ fn eq(&self, other: &u32) -> bool {
+ self.0 == *other
+ }
+}
+impl PartialEq<U32Wrapper> for u32 {
+ fn eq(&self, other: &U32Wrapper) -> bool {
+ *self == other.0
+ }
+}
diff --git a/src/tools/clippy/tests/ui/cmp_owned/without_suggestion.stderr b/src/tools/clippy/tests/ui/cmp_owned/without_suggestion.stderr
new file mode 100644
index 000000000..d2dd14d8e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cmp_owned/without_suggestion.stderr
@@ -0,0 +1,22 @@
+error: this creates an owned instance just for comparison
+ --> $DIR/without_suggestion.rs:7:5
+ |
+LL | y.to_owned() == *x;
+ | ^^^^^^^^^^^^^^^^^^ try implementing the comparison without allocating
+ |
+ = note: `-D clippy::cmp-owned` implied by `-D warnings`
+
+error: this creates an owned instance just for comparison
+ --> $DIR/without_suggestion.rs:11:5
+ |
+LL | y.to_owned() == **x;
+ | ^^^^^^^^^^^^^^^^^^^ try implementing the comparison without allocating
+
+error: this creates an owned instance just for comparison
+ --> $DIR/without_suggestion.rs:22:9
+ |
+LL | self.to_owned() == *other
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ try implementing the comparison without allocating
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/cognitive_complexity.rs b/src/tools/clippy/tests/ui/cognitive_complexity.rs
new file mode 100644
index 000000000..912e6788a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cognitive_complexity.rs
@@ -0,0 +1,395 @@
+#![allow(clippy::all)]
+#![warn(clippy::cognitive_complexity)]
+#![allow(unused, unused_crate_dependencies)]
+
+#[rustfmt::skip]
+fn main() {
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+ if true {
+ println!("a");
+ }
+}
+
+#[clippy::cognitive_complexity = "1"]
+fn kaboom() {
+ let n = 0;
+ 'a: for i in 0..20 {
+ 'b: for j in i..20 {
+ for k in j..20 {
+ if k == 5 {
+ break 'b;
+ }
+ if j == 3 && k == 6 {
+ continue 'a;
+ }
+ if k == j {
+ continue;
+ }
+ println!("bake");
+ }
+ }
+ println!("cake");
+ }
+}
+
+fn bloo() {
+ match 42 {
+ 0 => println!("hi"),
+ 1 => println!("hai"),
+ 2 => println!("hey"),
+ 3 => println!("hallo"),
+ 4 => println!("hello"),
+ 5 => println!("salut"),
+ 6 => println!("good morning"),
+ 7 => println!("good evening"),
+ 8 => println!("good afternoon"),
+ 9 => println!("good night"),
+ 10 => println!("bonjour"),
+ 11 => println!("hej"),
+ 12 => println!("hej hej"),
+ 13 => println!("greetings earthling"),
+ 14 => println!("take us to you leader"),
+ 15 | 17 | 19 | 21 | 23 | 25 | 27 | 29 | 31 | 33 => println!("take us to you leader"),
+ 35 | 37 | 39 | 41 | 43 | 45 | 47 | 49 | 51 | 53 => println!("there is no undefined behavior"),
+ 55 | 57 | 59 | 61 | 63 | 65 | 67 | 69 | 71 | 73 => println!("I know borrow-fu"),
+ _ => println!("bye"),
+ }
+}
+
+// Short circuiting operations don't increase the complexity of a function.
+// Note that the minimum complexity of a function is 1.
+#[clippy::cognitive_complexity = "1"]
+fn lots_of_short_circuits() -> bool {
+ true && false && true && false && true && false && true
+}
+
+#[clippy::cognitive_complexity = "1"]
+fn lots_of_short_circuits2() -> bool {
+ true || false || true || false || true || false || true
+}
+
+#[clippy::cognitive_complexity = "1"]
+fn baa() {
+ let x = || match 99 {
+ 0 => 0,
+ 1 => 1,
+ 2 => 2,
+ 4 => 4,
+ 6 => 6,
+ 9 => 9,
+ _ => 42,
+ };
+ if x() == 42 {
+ println!("x");
+ } else {
+ println!("not x");
+ }
+}
+
+#[clippy::cognitive_complexity = "1"]
+fn bar() {
+ match 99 {
+ 0 => println!("hi"),
+ _ => println!("bye"),
+ }
+}
+
+#[test]
+#[clippy::cognitive_complexity = "1"]
+/// Tests are usually complex but simple at the same time. `clippy::cognitive_complexity` used to
+/// give lots of false-positives in tests.
+fn dont_warn_on_tests() {
+ match 99 {
+ 0 => println!("hi"),
+ _ => println!("bye"),
+ }
+}
+
+#[clippy::cognitive_complexity = "1"]
+fn barr() {
+ match 99 {
+ 0 => println!("hi"),
+ 1 => println!("bla"),
+ 2 | 3 => println!("blub"),
+ _ => println!("bye"),
+ }
+}
+
+#[clippy::cognitive_complexity = "1"]
+fn barr2() {
+ match 99 {
+ 0 => println!("hi"),
+ 1 => println!("bla"),
+ 2 | 3 => println!("blub"),
+ _ => println!("bye"),
+ }
+ match 99 {
+ 0 => println!("hi"),
+ 1 => println!("bla"),
+ 2 | 3 => println!("blub"),
+ _ => println!("bye"),
+ }
+}
+
+#[clippy::cognitive_complexity = "1"]
+fn barrr() {
+ match 99 {
+ 0 => println!("hi"),
+ 1 => panic!("bla"),
+ 2 | 3 => println!("blub"),
+ _ => println!("bye"),
+ }
+}
+
+#[clippy::cognitive_complexity = "1"]
+fn barrr2() {
+ match 99 {
+ 0 => println!("hi"),
+ 1 => panic!("bla"),
+ 2 | 3 => println!("blub"),
+ _ => println!("bye"),
+ }
+ match 99 {
+ 0 => println!("hi"),
+ 1 => panic!("bla"),
+ 2 | 3 => println!("blub"),
+ _ => println!("bye"),
+ }
+}
+
+#[clippy::cognitive_complexity = "1"]
+fn barrrr() {
+ match 99 {
+ 0 => println!("hi"),
+ 1 => println!("bla"),
+ 2 | 3 => panic!("blub"),
+ _ => println!("bye"),
+ }
+}
+
+#[clippy::cognitive_complexity = "1"]
+fn barrrr2() {
+ match 99 {
+ 0 => println!("hi"),
+ 1 => println!("bla"),
+ 2 | 3 => panic!("blub"),
+ _ => println!("bye"),
+ }
+ match 99 {
+ 0 => println!("hi"),
+ 1 => println!("bla"),
+ 2 | 3 => panic!("blub"),
+ _ => println!("bye"),
+ }
+}
+
+#[clippy::cognitive_complexity = "1"]
+fn cake() {
+ if 4 == 5 {
+ println!("yea");
+ } else {
+ panic!("meh");
+ }
+ println!("whee");
+}
+
+#[clippy::cognitive_complexity = "1"]
+pub fn read_file(input_path: &str) -> String {
+ use std::fs::File;
+ use std::io::{Read, Write};
+ use std::path::Path;
+ let mut file = match File::open(&Path::new(input_path)) {
+ Ok(f) => f,
+ Err(err) => {
+ panic!("Can't open {}: {}", input_path, err);
+ },
+ };
+
+ let mut bytes = Vec::new();
+
+ match file.read_to_end(&mut bytes) {
+ Ok(..) => {},
+ Err(_) => {
+ panic!("Can't read {}", input_path);
+ },
+ };
+
+ match String::from_utf8(bytes) {
+ Ok(contents) => contents,
+ Err(_) => {
+ panic!("{} is not UTF-8 encoded", input_path);
+ },
+ }
+}
+
+enum Void {}
+
+#[clippy::cognitive_complexity = "1"]
+fn void(void: Void) {
+ if true {
+ match void {}
+ }
+}
+
+#[clippy::cognitive_complexity = "1"]
+fn mcarton_sees_all() {
+ panic!("meh");
+ panic!("möh");
+}
+
+#[clippy::cognitive_complexity = "1"]
+fn try_() -> Result<i32, &'static str> {
+ match 5 {
+ 5 => Ok(5),
+ _ => return Err("bla"),
+ }
+}
+
+#[clippy::cognitive_complexity = "1"]
+fn try_again() -> Result<i32, &'static str> {
+ let _ = Ok(42)?;
+ let _ = Ok(43)?;
+ let _ = Ok(44)?;
+ let _ = Ok(45)?;
+ let _ = Ok(46)?;
+ let _ = Ok(47)?;
+ let _ = Ok(48)?;
+ let _ = Ok(49)?;
+ match 5 {
+ 5 => Ok(5),
+ _ => return Err("bla"),
+ }
+}
+
+#[clippy::cognitive_complexity = "1"]
+fn early() -> Result<i32, &'static str> {
+ return Ok(5);
+ return Ok(5);
+ return Ok(5);
+ return Ok(5);
+ return Ok(5);
+ return Ok(5);
+ return Ok(5);
+ return Ok(5);
+ return Ok(5);
+}
+
+#[rustfmt::skip]
+#[clippy::cognitive_complexity = "1"]
+fn early_ret() -> i32 {
+ let a = if true { 42 } else { return 0; };
+ let a = if a < 99 { 42 } else { return 0; };
+ let a = if a < 99 { 42 } else { return 0; };
+ let a = if a < 99 { 42 } else { return 0; };
+ let a = if a < 99 { 42 } else { return 0; };
+ let a = if a < 99 { 42 } else { return 0; };
+ let a = if a < 99 { 42 } else { return 0; };
+ let a = if a < 99 { 42 } else { return 0; };
+ let a = if a < 99 { 42 } else { return 0; };
+ let a = if a < 99 { 42 } else { return 0; };
+ let a = if a < 99 { 42 } else { return 0; };
+ let a = if a < 99 { 42 } else { return 0; };
+ match 5 {
+ 5 => 5,
+ _ => return 6,
+ }
+}
+
+#[clippy::cognitive_complexity = "1"]
+fn closures() {
+ let x = |a: i32, b: i32| -> i32 {
+ if true {
+ println!("moo");
+ }
+
+ a + b
+ };
+}
+
+struct Moo;
+
+#[clippy::cognitive_complexity = "1"]
+impl Moo {
+ fn moo(&self) {
+ if true {
+ println!("moo");
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/cognitive_complexity.stderr b/src/tools/clippy/tests/ui/cognitive_complexity.stderr
new file mode 100644
index 000000000..a0ddc673a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cognitive_complexity.stderr
@@ -0,0 +1,139 @@
+error: the function has a cognitive complexity of (28/25)
+ --> $DIR/cognitive_complexity.rs:6:4
+ |
+LL | fn main() {
+ | ^^^^
+ |
+ = note: `-D clippy::cognitive-complexity` implied by `-D warnings`
+ = help: you could split it up into multiple smaller functions
+
+error: the function has a cognitive complexity of (7/1)
+ --> $DIR/cognitive_complexity.rs:91:4
+ |
+LL | fn kaboom() {
+ | ^^^^^^
+ |
+ = help: you could split it up into multiple smaller functions
+
+error: the function has a cognitive complexity of (2/1)
+ --> $DIR/cognitive_complexity.rs:149:4
+ |
+LL | fn baa() {
+ | ^^^
+ |
+ = help: you could split it up into multiple smaller functions
+
+error: the function has a cognitive complexity of (2/1)
+ --> $DIR/cognitive_complexity.rs:150:13
+ |
+LL | let x = || match 99 {
+ | ^^
+ |
+ = help: you could split it up into multiple smaller functions
+
+error: the function has a cognitive complexity of (2/1)
+ --> $DIR/cognitive_complexity.rs:167:4
+ |
+LL | fn bar() {
+ | ^^^
+ |
+ = help: you could split it up into multiple smaller functions
+
+error: the function has a cognitive complexity of (2/1)
+ --> $DIR/cognitive_complexity.rs:186:4
+ |
+LL | fn barr() {
+ | ^^^^
+ |
+ = help: you could split it up into multiple smaller functions
+
+error: the function has a cognitive complexity of (3/1)
+ --> $DIR/cognitive_complexity.rs:196:4
+ |
+LL | fn barr2() {
+ | ^^^^^
+ |
+ = help: you could split it up into multiple smaller functions
+
+error: the function has a cognitive complexity of (2/1)
+ --> $DIR/cognitive_complexity.rs:212:4
+ |
+LL | fn barrr() {
+ | ^^^^^
+ |
+ = help: you could split it up into multiple smaller functions
+
+error: the function has a cognitive complexity of (3/1)
+ --> $DIR/cognitive_complexity.rs:222:4
+ |
+LL | fn barrr2() {
+ | ^^^^^^
+ |
+ = help: you could split it up into multiple smaller functions
+
+error: the function has a cognitive complexity of (2/1)
+ --> $DIR/cognitive_complexity.rs:238:4
+ |
+LL | fn barrrr() {
+ | ^^^^^^
+ |
+ = help: you could split it up into multiple smaller functions
+
+error: the function has a cognitive complexity of (3/1)
+ --> $DIR/cognitive_complexity.rs:248:4
+ |
+LL | fn barrrr2() {
+ | ^^^^^^^
+ |
+ = help: you could split it up into multiple smaller functions
+
+error: the function has a cognitive complexity of (2/1)
+ --> $DIR/cognitive_complexity.rs:264:4
+ |
+LL | fn cake() {
+ | ^^^^
+ |
+ = help: you could split it up into multiple smaller functions
+
+error: the function has a cognitive complexity of (4/1)
+ --> $DIR/cognitive_complexity.rs:274:8
+ |
+LL | pub fn read_file(input_path: &str) -> String {
+ | ^^^^^^^^^
+ |
+ = help: you could split it up into multiple smaller functions
+
+error: the function has a cognitive complexity of (2/1)
+ --> $DIR/cognitive_complexity.rs:305:4
+ |
+LL | fn void(void: Void) {
+ | ^^^^
+ |
+ = help: you could split it up into multiple smaller functions
+
+error: the function has a cognitive complexity of (8/1)
+ --> $DIR/cognitive_complexity.rs:356:4
+ |
+LL | fn early_ret() -> i32 {
+ | ^^^^^^^^^
+ |
+ = help: you could split it up into multiple smaller functions
+
+error: the function has a cognitive complexity of (2/1)
+ --> $DIR/cognitive_complexity.rs:377:13
+ |
+LL | let x = |a: i32, b: i32| -> i32 {
+ | ^^^^^^^^^^^^^^^^
+ |
+ = help: you could split it up into multiple smaller functions
+
+error: the function has a cognitive complexity of (2/1)
+ --> $DIR/cognitive_complexity.rs:390:8
+ |
+LL | fn moo(&self) {
+ | ^^^
+ |
+ = help: you could split it up into multiple smaller functions
+
+error: aborting due to 17 previous errors
+
diff --git a/src/tools/clippy/tests/ui/cognitive_complexity_attr_used.rs b/src/tools/clippy/tests/ui/cognitive_complexity_attr_used.rs
new file mode 100644
index 000000000..771a26fc9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cognitive_complexity_attr_used.rs
@@ -0,0 +1,15 @@
+#![warn(unused, clippy::cognitive_complexity)]
+#![allow(unused_crate_dependencies)]
+
+fn main() {
+ kaboom();
+}
+
+#[clippy::cognitive_complexity = "0"]
+fn kaboom() {
+ if 42 == 43 {
+ panic!();
+ } else if "cake" == "lie" {
+ println!("what?");
+ }
+}
diff --git a/src/tools/clippy/tests/ui/cognitive_complexity_attr_used.stderr b/src/tools/clippy/tests/ui/cognitive_complexity_attr_used.stderr
new file mode 100644
index 000000000..f5ff53dda
--- /dev/null
+++ b/src/tools/clippy/tests/ui/cognitive_complexity_attr_used.stderr
@@ -0,0 +1,11 @@
+error: the function has a cognitive complexity of (3/0)
+ --> $DIR/cognitive_complexity_attr_used.rs:9:4
+ |
+LL | fn kaboom() {
+ | ^^^^^^
+ |
+ = note: `-D clippy::cognitive-complexity` implied by `-D warnings`
+ = help: you could split it up into multiple smaller functions
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/collapsible_else_if.fixed b/src/tools/clippy/tests/ui/collapsible_else_if.fixed
new file mode 100644
index 000000000..d6a5a7850
--- /dev/null
+++ b/src/tools/clippy/tests/ui/collapsible_else_if.fixed
@@ -0,0 +1,84 @@
+// run-rustfix
+#![allow(clippy::assertions_on_constants, clippy::equatable_if_let)]
+
+#[rustfmt::skip]
+#[warn(clippy::collapsible_if)]
+#[warn(clippy::collapsible_else_if)]
+
+fn main() {
+ let x = "hello";
+ let y = "world";
+ // Collapse `else { if .. }` to `else if ..`
+ if x == "hello" {
+ print!("Hello ");
+ } else if y == "world" {
+ println!("world!")
+ }
+
+ if x == "hello" {
+ print!("Hello ");
+ } else if let Some(42) = Some(42) {
+ println!("world!")
+ }
+
+ if x == "hello" {
+ print!("Hello ");
+ } else if y == "world" {
+ println!("world")
+ }
+ else {
+ println!("!")
+ }
+
+ if x == "hello" {
+ print!("Hello ");
+ } else if let Some(42) = Some(42) {
+ println!("world")
+ }
+ else {
+ println!("!")
+ }
+
+ if let Some(42) = Some(42) {
+ print!("Hello ");
+ } else if let Some(42) = Some(42) {
+ println!("world")
+ }
+ else {
+ println!("!")
+ }
+
+ if let Some(42) = Some(42) {
+ print!("Hello ");
+ } else if x == "hello" {
+ println!("world")
+ }
+ else {
+ println!("!")
+ }
+
+ if let Some(42) = Some(42) {
+ print!("Hello ");
+ } else if let Some(42) = Some(42) {
+ println!("world")
+ }
+ else {
+ println!("!")
+ }
+
+ if x == "hello" {
+ print!("Hello ");
+ } else {
+ #[cfg(not(roflol))]
+ if y == "world" {
+ println!("world!")
+ }
+ }
+}
+
+#[rustfmt::skip]
+#[allow(dead_code)]
+fn issue_7318() {
+ if true { println!("I've been resolved!")
+ }else if false {}
+}
diff --git a/src/tools/clippy/tests/ui/collapsible_else_if.rs b/src/tools/clippy/tests/ui/collapsible_else_if.rs
new file mode 100644
index 000000000..4399fc8b2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/collapsible_else_if.rs
@@ -0,0 +1,100 @@
+// run-rustfix
+#![allow(clippy::assertions_on_constants, clippy::equatable_if_let)]
+
+#[rustfmt::skip]
+#[warn(clippy::collapsible_if)]
+#[warn(clippy::collapsible_else_if)]
+
+fn main() {
+ let x = "hello";
+ let y = "world";
+ // Collapse `else { if .. }` to `else if ..`
+ if x == "hello" {
+ print!("Hello ");
+ } else {
+ if y == "world" {
+ println!("world!")
+ }
+ }
+
+ if x == "hello" {
+ print!("Hello ");
+ } else {
+ if let Some(42) = Some(42) {
+ println!("world!")
+ }
+ }
+
+ if x == "hello" {
+ print!("Hello ");
+ } else {
+ if y == "world" {
+ println!("world")
+ }
+ else {
+ println!("!")
+ }
+ }
+
+ if x == "hello" {
+ print!("Hello ");
+ } else {
+ if let Some(42) = Some(42) {
+ println!("world")
+ }
+ else {
+ println!("!")
+ }
+ }
+
+ if let Some(42) = Some(42) {
+ print!("Hello ");
+ } else {
+ if let Some(42) = Some(42) {
+ println!("world")
+ }
+ else {
+ println!("!")
+ }
+ }
+
+ if let Some(42) = Some(42) {
+ print!("Hello ");
+ } else {
+ if x == "hello" {
+ println!("world")
+ }
+ else {
+ println!("!")
+ }
+ }
+
+ if let Some(42) = Some(42) {
+ print!("Hello ");
+ } else {
+ if let Some(42) = Some(42) {
+ println!("world")
+ }
+ else {
+ println!("!")
+ }
+ }
+
+ if x == "hello" {
+ print!("Hello ");
+ } else {
+ #[cfg(not(roflol))]
+ if y == "world" {
+ println!("world!")
+ }
+ }
+}
+
+#[rustfmt::skip]
+#[allow(dead_code)]
+fn issue_7318() {
+ if true { println!("I've been resolved!")
+ }else{
+ if false {}
+ }
+}
diff --git a/src/tools/clippy/tests/ui/collapsible_else_if.stderr b/src/tools/clippy/tests/ui/collapsible_else_if.stderr
new file mode 100644
index 000000000..45b2094c9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/collapsible_else_if.stderr
@@ -0,0 +1,163 @@
+error: this `else { if .. }` block can be collapsed
+ --> $DIR/collapsible_else_if.rs:14:12
+ |
+LL | } else {
+ | ____________^
+LL | | if y == "world" {
+LL | | println!("world!")
+LL | | }
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::collapsible-else-if` implied by `-D warnings`
+help: collapse nested if block
+ |
+LL ~ } else if y == "world" {
+LL + println!("world!")
+LL + }
+ |
+
+error: this `else { if .. }` block can be collapsed
+ --> $DIR/collapsible_else_if.rs:22:12
+ |
+LL | } else {
+ | ____________^
+LL | | if let Some(42) = Some(42) {
+LL | | println!("world!")
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: collapse nested if block
+ |
+LL ~ } else if let Some(42) = Some(42) {
+LL + println!("world!")
+LL + }
+ |
+
+error: this `else { if .. }` block can be collapsed
+ --> $DIR/collapsible_else_if.rs:30:12
+ |
+LL | } else {
+ | ____________^
+LL | | if y == "world" {
+LL | | println!("world")
+LL | | }
+... |
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: collapse nested if block
+ |
+LL ~ } else if y == "world" {
+LL + println!("world")
+LL + }
+LL + else {
+LL + println!("!")
+LL + }
+ |
+
+error: this `else { if .. }` block can be collapsed
+ --> $DIR/collapsible_else_if.rs:41:12
+ |
+LL | } else {
+ | ____________^
+LL | | if let Some(42) = Some(42) {
+LL | | println!("world")
+LL | | }
+... |
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: collapse nested if block
+ |
+LL ~ } else if let Some(42) = Some(42) {
+LL + println!("world")
+LL + }
+LL + else {
+LL + println!("!")
+LL + }
+ |
+
+error: this `else { if .. }` block can be collapsed
+ --> $DIR/collapsible_else_if.rs:52:12
+ |
+LL | } else {
+ | ____________^
+LL | | if let Some(42) = Some(42) {
+LL | | println!("world")
+LL | | }
+... |
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: collapse nested if block
+ |
+LL ~ } else if let Some(42) = Some(42) {
+LL + println!("world")
+LL + }
+LL + else {
+LL + println!("!")
+LL + }
+ |
+
+error: this `else { if .. }` block can be collapsed
+ --> $DIR/collapsible_else_if.rs:63:12
+ |
+LL | } else {
+ | ____________^
+LL | | if x == "hello" {
+LL | | println!("world")
+LL | | }
+... |
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: collapse nested if block
+ |
+LL ~ } else if x == "hello" {
+LL + println!("world")
+LL + }
+LL + else {
+LL + println!("!")
+LL + }
+ |
+
+error: this `else { if .. }` block can be collapsed
+ --> $DIR/collapsible_else_if.rs:74:12
+ |
+LL | } else {
+ | ____________^
+LL | | if let Some(42) = Some(42) {
+LL | | println!("world")
+LL | | }
+... |
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: collapse nested if block
+ |
+LL ~ } else if let Some(42) = Some(42) {
+LL + println!("world")
+LL + }
+LL + else {
+LL + println!("!")
+LL + }
+ |
+
+error: this `else { if .. }` block can be collapsed
+ --> $DIR/collapsible_else_if.rs:97:10
+ |
+LL | }else{
+ | __________^
+LL | | if false {}
+LL | | }
+ | |_____^ help: collapse nested if block: `if false {}`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/collapsible_if.fixed b/src/tools/clippy/tests/ui/collapsible_if.fixed
new file mode 100644
index 000000000..5b0e4a473
--- /dev/null
+++ b/src/tools/clippy/tests/ui/collapsible_if.fixed
@@ -0,0 +1,148 @@
+// run-rustfix
+#![allow(clippy::assertions_on_constants, clippy::equatable_if_let)]
+
+#[rustfmt::skip]
+#[warn(clippy::collapsible_if)]
+fn main() {
+ let x = "hello";
+ let y = "world";
+ if x == "hello" && y == "world" {
+ println!("Hello world!");
+ }
+
+ if (x == "hello" || x == "world") && (y == "world" || y == "hello") {
+ println!("Hello world!");
+ }
+
+ if x == "hello" && x == "world" && (y == "world" || y == "hello") {
+ println!("Hello world!");
+ }
+
+ if (x == "hello" || x == "world") && y == "world" && y == "hello" {
+ println!("Hello world!");
+ }
+
+ if x == "hello" && x == "world" && y == "world" && y == "hello" {
+ println!("Hello world!");
+ }
+
+ if 42 == 1337 && 'a' != 'A' {
+ println!("world!")
+ }
+
+ // Works because any if with an else statement cannot be collapsed.
+ if x == "hello" {
+ if y == "world" {
+ println!("Hello world!");
+ }
+ } else {
+ println!("Not Hello world");
+ }
+
+ if x == "hello" {
+ if y == "world" {
+ println!("Hello world!");
+ } else {
+ println!("Hello something else");
+ }
+ }
+
+ if x == "hello" {
+ print!("Hello ");
+ if y == "world" {
+ println!("world!")
+ }
+ }
+
+ if true {
+ } else {
+ assert!(true); // assert! is just an `if`
+ }
+
+
+ // The following tests check for the fix of https://github.com/rust-lang/rust-clippy/issues/798
+ if x == "hello" {// Not collapsible
+ if y == "world" {
+ println!("Hello world!");
+ }
+ }
+
+ if x == "hello" { // Not collapsible
+ if y == "world" {
+ println!("Hello world!");
+ }
+ }
+
+ if x == "hello" {
+ // Not collapsible
+ if y == "world" {
+ println!("Hello world!");
+ }
+ }
+
+ if x == "hello" && y == "world" { // Collapsible
+ println!("Hello world!");
+ }
+
+ if x == "hello" {
+ print!("Hello ");
+ } else {
+ // Not collapsible
+ if y == "world" {
+ println!("world!")
+ }
+ }
+
+ if x == "hello" {
+ print!("Hello ");
+ } else {
+ // Not collapsible
+ if let Some(42) = Some(42) {
+ println!("world!")
+ }
+ }
+
+ if x == "hello" {
+ /* Not collapsible */
+ if y == "world" {
+ println!("Hello world!");
+ }
+ }
+
+ if x == "hello" { /* Not collapsible */
+ if y == "world" {
+ println!("Hello world!");
+ }
+ }
+
+ // Test behavior wrt. `let_chains`.
+ // None of the cases below should be collapsed.
+ fn truth() -> bool { true }
+
+ // Prefix:
+ if let 0 = 1 {
+ if truth() {}
+ }
+
+ // Suffix:
+ if truth() {
+ if let 0 = 1 {}
+ }
+
+ // Midfix:
+ if truth() {
+ if let 0 = 1 {
+ if truth() {}
+ }
+ }
+
+ // Fix #5962
+ if matches!(true, true) && matches!(true, true) {}
+
+ if true {
+ #[cfg(not(teehee))]
+ if true {
+ println!("Hello world!");
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/collapsible_if.rs b/src/tools/clippy/tests/ui/collapsible_if.rs
new file mode 100644
index 000000000..cd231a5d7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/collapsible_if.rs
@@ -0,0 +1,164 @@
+// run-rustfix
+#![allow(clippy::assertions_on_constants, clippy::equatable_if_let)]
+
+#[rustfmt::skip]
+#[warn(clippy::collapsible_if)]
+fn main() {
+ let x = "hello";
+ let y = "world";
+ if x == "hello" {
+ if y == "world" {
+ println!("Hello world!");
+ }
+ }
+
+ if x == "hello" || x == "world" {
+ if y == "world" || y == "hello" {
+ println!("Hello world!");
+ }
+ }
+
+ if x == "hello" && x == "world" {
+ if y == "world" || y == "hello" {
+ println!("Hello world!");
+ }
+ }
+
+ if x == "hello" || x == "world" {
+ if y == "world" && y == "hello" {
+ println!("Hello world!");
+ }
+ }
+
+ if x == "hello" && x == "world" {
+ if y == "world" && y == "hello" {
+ println!("Hello world!");
+ }
+ }
+
+ if 42 == 1337 {
+ if 'a' != 'A' {
+ println!("world!")
+ }
+ }
+
+ // Works because any if with an else statement cannot be collapsed.
+ if x == "hello" {
+ if y == "world" {
+ println!("Hello world!");
+ }
+ } else {
+ println!("Not Hello world");
+ }
+
+ if x == "hello" {
+ if y == "world" {
+ println!("Hello world!");
+ } else {
+ println!("Hello something else");
+ }
+ }
+
+ if x == "hello" {
+ print!("Hello ");
+ if y == "world" {
+ println!("world!")
+ }
+ }
+
+ if true {
+ } else {
+ assert!(true); // assert! is just an `if`
+ }
+
+
+ // The following tests check for the fix of https://github.com/rust-lang/rust-clippy/issues/798
+ if x == "hello" {// Not collapsible
+ if y == "world" {
+ println!("Hello world!");
+ }
+ }
+
+ if x == "hello" { // Not collapsible
+ if y == "world" {
+ println!("Hello world!");
+ }
+ }
+
+ if x == "hello" {
+ // Not collapsible
+ if y == "world" {
+ println!("Hello world!");
+ }
+ }
+
+ if x == "hello" {
+ if y == "world" { // Collapsible
+ println!("Hello world!");
+ }
+ }
+
+ if x == "hello" {
+ print!("Hello ");
+ } else {
+ // Not collapsible
+ if y == "world" {
+ println!("world!")
+ }
+ }
+
+ if x == "hello" {
+ print!("Hello ");
+ } else {
+ // Not collapsible
+ if let Some(42) = Some(42) {
+ println!("world!")
+ }
+ }
+
+ if x == "hello" {
+ /* Not collapsible */
+ if y == "world" {
+ println!("Hello world!");
+ }
+ }
+
+ if x == "hello" { /* Not collapsible */
+ if y == "world" {
+ println!("Hello world!");
+ }
+ }
+
+ // Test behavior wrt. `let_chains`.
+ // None of the cases below should be collapsed.
+ fn truth() -> bool { true }
+
+ // Prefix:
+ if let 0 = 1 {
+ if truth() {}
+ }
+
+ // Suffix:
+ if truth() {
+ if let 0 = 1 {}
+ }
+
+ // Midfix:
+ if truth() {
+ if let 0 = 1 {
+ if truth() {}
+ }
+ }
+
+ // Fix #5962
+ if matches!(true, true) {
+ if matches!(true, true) {}
+ }
+
+ if true {
+ #[cfg(not(teehee))]
+ if true {
+ println!("Hello world!");
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/collapsible_if.stderr b/src/tools/clippy/tests/ui/collapsible_if.stderr
new file mode 100644
index 000000000..674961238
--- /dev/null
+++ b/src/tools/clippy/tests/ui/collapsible_if.stderr
@@ -0,0 +1,130 @@
+error: this `if` statement can be collapsed
+ --> $DIR/collapsible_if.rs:9:5
+ |
+LL | / if x == "hello" {
+LL | | if y == "world" {
+LL | | println!("Hello world!");
+LL | | }
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::collapsible-if` implied by `-D warnings`
+help: collapse nested if block
+ |
+LL ~ if x == "hello" && y == "world" {
+LL + println!("Hello world!");
+LL + }
+ |
+
+error: this `if` statement can be collapsed
+ --> $DIR/collapsible_if.rs:15:5
+ |
+LL | / if x == "hello" || x == "world" {
+LL | | if y == "world" || y == "hello" {
+LL | | println!("Hello world!");
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: collapse nested if block
+ |
+LL ~ if (x == "hello" || x == "world") && (y == "world" || y == "hello") {
+LL + println!("Hello world!");
+LL + }
+ |
+
+error: this `if` statement can be collapsed
+ --> $DIR/collapsible_if.rs:21:5
+ |
+LL | / if x == "hello" && x == "world" {
+LL | | if y == "world" || y == "hello" {
+LL | | println!("Hello world!");
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: collapse nested if block
+ |
+LL ~ if x == "hello" && x == "world" && (y == "world" || y == "hello") {
+LL + println!("Hello world!");
+LL + }
+ |
+
+error: this `if` statement can be collapsed
+ --> $DIR/collapsible_if.rs:27:5
+ |
+LL | / if x == "hello" || x == "world" {
+LL | | if y == "world" && y == "hello" {
+LL | | println!("Hello world!");
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: collapse nested if block
+ |
+LL ~ if (x == "hello" || x == "world") && y == "world" && y == "hello" {
+LL + println!("Hello world!");
+LL + }
+ |
+
+error: this `if` statement can be collapsed
+ --> $DIR/collapsible_if.rs:33:5
+ |
+LL | / if x == "hello" && x == "world" {
+LL | | if y == "world" && y == "hello" {
+LL | | println!("Hello world!");
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: collapse nested if block
+ |
+LL ~ if x == "hello" && x == "world" && y == "world" && y == "hello" {
+LL + println!("Hello world!");
+LL + }
+ |
+
+error: this `if` statement can be collapsed
+ --> $DIR/collapsible_if.rs:39:5
+ |
+LL | / if 42 == 1337 {
+LL | | if 'a' != 'A' {
+LL | | println!("world!")
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: collapse nested if block
+ |
+LL ~ if 42 == 1337 && 'a' != 'A' {
+LL + println!("world!")
+LL + }
+ |
+
+error: this `if` statement can be collapsed
+ --> $DIR/collapsible_if.rs:95:5
+ |
+LL | / if x == "hello" {
+LL | | if y == "world" { // Collapsible
+LL | | println!("Hello world!");
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: collapse nested if block
+ |
+LL ~ if x == "hello" && y == "world" { // Collapsible
+LL + println!("Hello world!");
+LL + }
+ |
+
+error: this `if` statement can be collapsed
+ --> $DIR/collapsible_if.rs:154:5
+ |
+LL | / if matches!(true, true) {
+LL | | if matches!(true, true) {}
+LL | | }
+ | |_____^ help: collapse nested if block: `if matches!(true, true) && matches!(true, true) {}`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/collapsible_match.rs b/src/tools/clippy/tests/ui/collapsible_match.rs
new file mode 100644
index 000000000..603ae7dc9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/collapsible_match.rs
@@ -0,0 +1,265 @@
+#![warn(clippy::collapsible_match)]
+#![allow(
+ clippy::needless_return,
+ clippy::no_effect,
+ clippy::single_match,
+ clippy::equatable_if_let
+)]
+
+fn lint_cases(opt_opt: Option<Option<u32>>, res_opt: Result<Option<u32>, String>) {
+ // match without block
+ match res_opt {
+ Ok(val) => match val {
+ Some(n) => foo(n),
+ _ => return,
+ },
+ _ => return,
+ }
+
+ // match with block
+ match res_opt {
+ Ok(val) => match val {
+ Some(n) => foo(n),
+ _ => return,
+ },
+ _ => return,
+ }
+
+ // if let, if let
+ if let Ok(val) = res_opt {
+ if let Some(n) = val {
+ take(n);
+ }
+ }
+
+ // if let else, if let else
+ if let Ok(val) = res_opt {
+ if let Some(n) = val {
+ take(n);
+ } else {
+ return;
+ }
+ } else {
+ return;
+ }
+
+ // if let, match
+ if let Ok(val) = res_opt {
+ match val {
+ Some(n) => foo(n),
+ _ => (),
+ }
+ }
+
+ // match, if let
+ match res_opt {
+ Ok(val) => {
+ if let Some(n) = val {
+ take(n);
+ }
+ },
+ _ => {},
+ }
+
+ // if let else, match
+ if let Ok(val) = res_opt {
+ match val {
+ Some(n) => foo(n),
+ _ => return,
+ }
+ } else {
+ return;
+ }
+
+ // match, if let else
+ match res_opt {
+ Ok(val) => {
+ if let Some(n) = val {
+ take(n);
+ } else {
+ return;
+ }
+ },
+ _ => return,
+ }
+
+ // None in inner match same as outer wild branch
+ match res_opt {
+ Ok(val) => match val {
+ Some(n) => foo(n),
+ None => return,
+ },
+ _ => return,
+ }
+
+ // None in outer match same as inner wild branch
+ match opt_opt {
+ Some(val) => match val {
+ Some(n) => foo(n),
+ _ => return,
+ },
+ None => return,
+ }
+}
+
+fn negative_cases(res_opt: Result<Option<u32>, String>, res_res: Result<Result<u32, String>, String>) {
+ while let Some(x) = make() {
+ if let Some(1) = x {
+ todo!();
+ }
+ }
+ // no wild pattern in outer match
+ match res_opt {
+ Ok(val) => match val {
+ Some(n) => foo(n),
+ _ => return,
+ },
+ Err(_) => return,
+ }
+
+ // inner branch is not wild or None
+ match res_res {
+ Ok(val) => match val {
+ Ok(n) => foo(n),
+ Err(_) => return,
+ },
+ _ => return,
+ }
+
+ // statement before inner match
+ match res_opt {
+ Ok(val) => {
+ "hi buddy";
+ match val {
+ Some(n) => foo(n),
+ _ => return,
+ }
+ },
+ _ => return,
+ }
+
+ // statement after inner match
+ match res_opt {
+ Ok(val) => {
+ match val {
+ Some(n) => foo(n),
+ _ => return,
+ }
+ "hi buddy";
+ },
+ _ => return,
+ }
+
+ // wild branches do not match
+ match res_opt {
+ Ok(val) => match val {
+ Some(n) => foo(n),
+ _ => {
+ "sup";
+ return;
+ },
+ },
+ _ => return,
+ }
+
+ // binding used in if guard
+ match res_opt {
+ Ok(val) if val.is_some() => match val {
+ Some(n) => foo(n),
+ _ => return,
+ },
+ _ => return,
+ }
+
+ // binding used in inner match body
+ match res_opt {
+ Ok(val) => match val {
+ Some(_) => take(val),
+ _ => return,
+ },
+ _ => return,
+ }
+
+ // if guard on inner match
+ {
+ match res_opt {
+ Ok(val) => match val {
+ Some(n) if make() => foo(n),
+ _ => return,
+ },
+ _ => return,
+ }
+ match res_opt {
+ Ok(val) => match val {
+ _ => make(),
+ _ if make() => return,
+ },
+ _ => return,
+ }
+ }
+
+ // differing macro contexts
+ {
+ macro_rules! mac {
+ ($val:ident) => {
+ match $val {
+ Some(n) => foo(n),
+ _ => return,
+ }
+ };
+ }
+ match res_opt {
+ Ok(val) => mac!(val),
+ _ => return,
+ }
+ }
+
+ // OR pattern
+ enum E<T> {
+ A(T),
+ B(T),
+ C(T),
+ };
+ match make::<E<Option<u32>>>() {
+ E::A(val) | E::B(val) => match val {
+ Some(n) => foo(n),
+ _ => return,
+ },
+ _ => return,
+ }
+ match make::<Option<E<u32>>>() {
+ Some(val) => match val {
+ E::A(val) | E::B(val) => foo(val),
+ _ => return,
+ },
+ _ => return,
+ }
+ if let Ok(val) = res_opt {
+ if let Some(n) = val {
+ let _ = || {
+ // usage in closure
+ println!("{:?}", val);
+ };
+ }
+ }
+ let _: &dyn std::any::Any = match &Some(Some(1)) {
+ Some(e) => match e {
+ Some(e) => e,
+ e => e,
+ },
+ // else branch looks the same but the binding is different
+ e => e,
+ };
+}
+
+fn make<T>() -> T {
+ unimplemented!()
+}
+
+fn foo<T, U>(t: T) -> U {
+ unimplemented!()
+}
+
+fn take<T>(t: T) {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/collapsible_match.stderr b/src/tools/clippy/tests/ui/collapsible_match.stderr
new file mode 100644
index 000000000..5f18b6935
--- /dev/null
+++ b/src/tools/clippy/tests/ui/collapsible_match.stderr
@@ -0,0 +1,179 @@
+error: this `match` can be collapsed into the outer `match`
+ --> $DIR/collapsible_match.rs:12:20
+ |
+LL | Ok(val) => match val {
+ | ____________________^
+LL | | Some(n) => foo(n),
+LL | | _ => return,
+LL | | },
+ | |_________^
+ |
+ = note: `-D clippy::collapsible-match` implied by `-D warnings`
+help: the outer pattern can be modified to include the inner pattern
+ --> $DIR/collapsible_match.rs:12:12
+ |
+LL | Ok(val) => match val {
+ | ^^^ replace this binding
+LL | Some(n) => foo(n),
+ | ^^^^^^^ with this pattern
+
+error: this `match` can be collapsed into the outer `match`
+ --> $DIR/collapsible_match.rs:21:20
+ |
+LL | Ok(val) => match val {
+ | ____________________^
+LL | | Some(n) => foo(n),
+LL | | _ => return,
+LL | | },
+ | |_________^
+ |
+help: the outer pattern can be modified to include the inner pattern
+ --> $DIR/collapsible_match.rs:21:12
+ |
+LL | Ok(val) => match val {
+ | ^^^ replace this binding
+LL | Some(n) => foo(n),
+ | ^^^^^^^ with this pattern
+
+error: this `if let` can be collapsed into the outer `if let`
+ --> $DIR/collapsible_match.rs:30:9
+ |
+LL | / if let Some(n) = val {
+LL | | take(n);
+LL | | }
+ | |_________^
+ |
+help: the outer pattern can be modified to include the inner pattern
+ --> $DIR/collapsible_match.rs:29:15
+ |
+LL | if let Ok(val) = res_opt {
+ | ^^^ replace this binding
+LL | if let Some(n) = val {
+ | ^^^^^^^ with this pattern
+
+error: this `if let` can be collapsed into the outer `if let`
+ --> $DIR/collapsible_match.rs:37:9
+ |
+LL | / if let Some(n) = val {
+LL | | take(n);
+LL | | } else {
+LL | | return;
+LL | | }
+ | |_________^
+ |
+help: the outer pattern can be modified to include the inner pattern
+ --> $DIR/collapsible_match.rs:36:15
+ |
+LL | if let Ok(val) = res_opt {
+ | ^^^ replace this binding
+LL | if let Some(n) = val {
+ | ^^^^^^^ with this pattern
+
+error: this `match` can be collapsed into the outer `if let`
+ --> $DIR/collapsible_match.rs:48:9
+ |
+LL | / match val {
+LL | | Some(n) => foo(n),
+LL | | _ => (),
+LL | | }
+ | |_________^
+ |
+help: the outer pattern can be modified to include the inner pattern
+ --> $DIR/collapsible_match.rs:47:15
+ |
+LL | if let Ok(val) = res_opt {
+ | ^^^ replace this binding
+LL | match val {
+LL | Some(n) => foo(n),
+ | ^^^^^^^ with this pattern
+
+error: this `if let` can be collapsed into the outer `match`
+ --> $DIR/collapsible_match.rs:57:13
+ |
+LL | / if let Some(n) = val {
+LL | | take(n);
+LL | | }
+ | |_____________^
+ |
+help: the outer pattern can be modified to include the inner pattern
+ --> $DIR/collapsible_match.rs:56:12
+ |
+LL | Ok(val) => {
+ | ^^^ replace this binding
+LL | if let Some(n) = val {
+ | ^^^^^^^ with this pattern
+
+error: this `match` can be collapsed into the outer `if let`
+ --> $DIR/collapsible_match.rs:66:9
+ |
+LL | / match val {
+LL | | Some(n) => foo(n),
+LL | | _ => return,
+LL | | }
+ | |_________^
+ |
+help: the outer pattern can be modified to include the inner pattern
+ --> $DIR/collapsible_match.rs:65:15
+ |
+LL | if let Ok(val) = res_opt {
+ | ^^^ replace this binding
+LL | match val {
+LL | Some(n) => foo(n),
+ | ^^^^^^^ with this pattern
+
+error: this `if let` can be collapsed into the outer `match`
+ --> $DIR/collapsible_match.rs:77:13
+ |
+LL | / if let Some(n) = val {
+LL | | take(n);
+LL | | } else {
+LL | | return;
+LL | | }
+ | |_____________^
+ |
+help: the outer pattern can be modified to include the inner pattern
+ --> $DIR/collapsible_match.rs:76:12
+ |
+LL | Ok(val) => {
+ | ^^^ replace this binding
+LL | if let Some(n) = val {
+ | ^^^^^^^ with this pattern
+
+error: this `match` can be collapsed into the outer `match`
+ --> $DIR/collapsible_match.rs:88:20
+ |
+LL | Ok(val) => match val {
+ | ____________________^
+LL | | Some(n) => foo(n),
+LL | | None => return,
+LL | | },
+ | |_________^
+ |
+help: the outer pattern can be modified to include the inner pattern
+ --> $DIR/collapsible_match.rs:88:12
+ |
+LL | Ok(val) => match val {
+ | ^^^ replace this binding
+LL | Some(n) => foo(n),
+ | ^^^^^^^ with this pattern
+
+error: this `match` can be collapsed into the outer `match`
+ --> $DIR/collapsible_match.rs:97:22
+ |
+LL | Some(val) => match val {
+ | ______________________^
+LL | | Some(n) => foo(n),
+LL | | _ => return,
+LL | | },
+ | |_________^
+ |
+help: the outer pattern can be modified to include the inner pattern
+ --> $DIR/collapsible_match.rs:97:14
+ |
+LL | Some(val) => match val {
+ | ^^^ replace this binding
+LL | Some(n) => foo(n),
+ | ^^^^^^^ with this pattern
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/collapsible_match2.rs b/src/tools/clippy/tests/ui/collapsible_match2.rs
new file mode 100644
index 000000000..c8fb0a39e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/collapsible_match2.rs
@@ -0,0 +1,87 @@
+#![warn(clippy::collapsible_match)]
+#![allow(
+ clippy::needless_return,
+ clippy::no_effect,
+ clippy::single_match,
+ clippy::needless_borrow
+)]
+
+fn lint_cases(opt_opt: Option<Option<u32>>, res_opt: Result<Option<u32>, String>) {
+ // if guards on outer match
+ {
+ match res_opt {
+ Ok(val) if make() => match val {
+ Some(n) => foo(n),
+ _ => return,
+ },
+ _ => return,
+ }
+ match res_opt {
+ Ok(val) => match val {
+ Some(n) => foo(n),
+ _ => return,
+ },
+ _ if make() => return,
+ _ => return,
+ }
+ }
+
+ // macro
+ {
+ macro_rules! mac {
+ ($outer:expr => $pat:pat, $e:expr => $inner_pat:pat, $then:expr) => {
+ match $outer {
+ $pat => match $e {
+ $inner_pat => $then,
+ _ => return,
+ },
+ _ => return,
+ }
+ };
+ }
+ // Lint this since the patterns are not defined by the macro.
+ // Allows the lint to work on if_chain! for example.
+ // Fixing the lint requires knowledge of the specific macro, but we optimistically assume that
+ // there is still a better way to write this.
+ mac!(res_opt => Ok(val), val => Some(n), foo(n));
+ }
+
+ // deref reference value
+ match Some(&[1]) {
+ Some(s) => match *s {
+ [n] => foo(n),
+ _ => (),
+ },
+ _ => (),
+ }
+
+ // ref pattern and deref
+ match Some(&[1]) {
+ Some(ref s) => match s {
+ [n] => foo(n),
+ _ => (),
+ },
+ _ => (),
+ }
+}
+
+fn no_lint() {
+ // deref inner value (cannot pattern match with Vec)
+ match Some(vec![1]) {
+ Some(s) => match *s {
+ [n] => foo(n),
+ _ => (),
+ },
+ _ => (),
+ }
+}
+
+fn make<T>() -> T {
+ unimplemented!()
+}
+
+fn foo<T, U>(t: T) -> U {
+ unimplemented!()
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/collapsible_match2.stderr b/src/tools/clippy/tests/ui/collapsible_match2.stderr
new file mode 100644
index 000000000..fe64e4693
--- /dev/null
+++ b/src/tools/clippy/tests/ui/collapsible_match2.stderr
@@ -0,0 +1,97 @@
+error: this `match` can be collapsed into the outer `match`
+ --> $DIR/collapsible_match2.rs:13:34
+ |
+LL | Ok(val) if make() => match val {
+ | __________________________________^
+LL | | Some(n) => foo(n),
+LL | | _ => return,
+LL | | },
+ | |_____________^
+ |
+ = note: `-D clippy::collapsible-match` implied by `-D warnings`
+help: the outer pattern can be modified to include the inner pattern
+ --> $DIR/collapsible_match2.rs:13:16
+ |
+LL | Ok(val) if make() => match val {
+ | ^^^ replace this binding
+LL | Some(n) => foo(n),
+ | ^^^^^^^ with this pattern
+
+error: this `match` can be collapsed into the outer `match`
+ --> $DIR/collapsible_match2.rs:20:24
+ |
+LL | Ok(val) => match val {
+ | ________________________^
+LL | | Some(n) => foo(n),
+LL | | _ => return,
+LL | | },
+ | |_____________^
+ |
+help: the outer pattern can be modified to include the inner pattern
+ --> $DIR/collapsible_match2.rs:20:16
+ |
+LL | Ok(val) => match val {
+ | ^^^ replace this binding
+LL | Some(n) => foo(n),
+ | ^^^^^^^ with this pattern
+
+error: this `match` can be collapsed into the outer `match`
+ --> $DIR/collapsible_match2.rs:34:29
+ |
+LL | $pat => match $e {
+ | _____________________________^
+LL | | $inner_pat => $then,
+LL | | _ => return,
+LL | | },
+ | |_____________________^
+...
+LL | mac!(res_opt => Ok(val), val => Some(n), foo(n));
+ | ------------------------------------------------ in this macro invocation
+ |
+help: the outer pattern can be modified to include the inner pattern
+ --> $DIR/collapsible_match2.rs:46:28
+ |
+LL | mac!(res_opt => Ok(val), val => Some(n), foo(n));
+ | ^^^ ^^^^^^^ with this pattern
+ | |
+ | replace this binding
+ = note: this error originates in the macro `mac` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: this `match` can be collapsed into the outer `match`
+ --> $DIR/collapsible_match2.rs:51:20
+ |
+LL | Some(s) => match *s {
+ | ____________________^
+LL | | [n] => foo(n),
+LL | | _ => (),
+LL | | },
+ | |_________^
+ |
+help: the outer pattern can be modified to include the inner pattern
+ --> $DIR/collapsible_match2.rs:51:14
+ |
+LL | Some(s) => match *s {
+ | ^ replace this binding
+LL | [n] => foo(n),
+ | ^^^ with this pattern
+
+error: this `match` can be collapsed into the outer `match`
+ --> $DIR/collapsible_match2.rs:60:24
+ |
+LL | Some(ref s) => match s {
+ | ________________________^
+LL | | [n] => foo(n),
+LL | | _ => (),
+LL | | },
+ | |_________^
+ |
+help: the outer pattern can be modified to include the inner pattern
+ --> $DIR/collapsible_match2.rs:60:14
+ |
+LL | Some(ref s) => match s {
+ | ^^^^^ replace this binding
+LL | [n] => foo(n),
+ | ^^^ with this pattern
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/comparison_chain.rs b/src/tools/clippy/tests/ui/comparison_chain.rs
new file mode 100644
index 000000000..c12c6a310
--- /dev/null
+++ b/src/tools/clippy/tests/ui/comparison_chain.rs
@@ -0,0 +1,234 @@
+#![allow(dead_code)]
+#![warn(clippy::comparison_chain)]
+
+fn a() {}
+fn b() {}
+fn c() {}
+
+fn f(x: u8, y: u8, z: u8) {
+ // Ignored: Only one branch
+ if x > y {
+ a()
+ }
+
+ if x > y {
+ a()
+ } else if x < y {
+ b()
+ }
+
+ // Ignored: Only one explicit conditional
+ if x > y {
+ a()
+ } else {
+ b()
+ }
+
+ if x > y {
+ a()
+ } else if x < y {
+ b()
+ } else {
+ c()
+ }
+
+ if x > y {
+ a()
+ } else if y > x {
+ b()
+ } else {
+ c()
+ }
+
+ if x > 1 {
+ a()
+ } else if x < 1 {
+ b()
+ } else if x == 1 {
+ c()
+ }
+
+ // Ignored: Binop args are not equivalent
+ if x > 1 {
+ a()
+ } else if y > 1 {
+ b()
+ } else {
+ c()
+ }
+
+ // Ignored: Binop args are not equivalent
+ if x > y {
+ a()
+ } else if x > z {
+ b()
+ } else if y > z {
+ c()
+ }
+
+ // Ignored: Not binary comparisons
+ if true {
+ a()
+ } else if false {
+ b()
+ } else {
+ c()
+ }
+}
+
+#[allow(clippy::float_cmp)]
+fn g(x: f64, y: f64, z: f64) {
+ // Ignored: f64 doesn't implement Ord
+ if x > y {
+ a()
+ } else if x < y {
+ b()
+ }
+
+ // Ignored: f64 doesn't implement Ord
+ if x > y {
+ a()
+ } else if x < y {
+ b()
+ } else {
+ c()
+ }
+
+ // Ignored: f64 doesn't implement Ord
+ if x > y {
+ a()
+ } else if y > x {
+ b()
+ } else {
+ c()
+ }
+
+ // Ignored: f64 doesn't implement Ord
+ if x > 1.0 {
+ a()
+ } else if x < 1.0 {
+ b()
+ } else if x == 1.0 {
+ c()
+ }
+}
+
+fn h<T: Ord>(x: T, y: T, z: T) {
+ if x > y {
+ a()
+ } else if x < y {
+ b()
+ }
+
+ if x > y {
+ a()
+ } else if x < y {
+ b()
+ } else {
+ c()
+ }
+
+ if x > y {
+ a()
+ } else if y > x {
+ b()
+ } else {
+ c()
+ }
+}
+
+// The following uses should be ignored
+mod issue_5212 {
+ use super::{a, b, c};
+ fn foo() -> u8 {
+ 21
+ }
+
+ fn same_operation_equals() {
+ // operands are fixed
+
+ if foo() == 42 {
+ a()
+ } else if foo() == 42 {
+ b()
+ }
+
+ if foo() == 42 {
+ a()
+ } else if foo() == 42 {
+ b()
+ } else {
+ c()
+ }
+
+ // operands are transposed
+
+ if foo() == 42 {
+ a()
+ } else if 42 == foo() {
+ b()
+ }
+ }
+
+ fn same_operation_not_equals() {
+ // operands are fixed
+
+ if foo() > 42 {
+ a()
+ } else if foo() > 42 {
+ b()
+ }
+
+ if foo() > 42 {
+ a()
+ } else if foo() > 42 {
+ b()
+ } else {
+ c()
+ }
+
+ if foo() < 42 {
+ a()
+ } else if foo() < 42 {
+ b()
+ }
+
+ if foo() < 42 {
+ a()
+ } else if foo() < 42 {
+ b()
+ } else {
+ c()
+ }
+ }
+}
+
+enum Sign {
+ Negative,
+ Positive,
+ Zero,
+}
+
+impl Sign {
+ const fn sign_i8(n: i8) -> Self {
+ if n == 0 {
+ Sign::Zero
+ } else if n > 0 {
+ Sign::Positive
+ } else {
+ Sign::Negative
+ }
+ }
+}
+
+const fn sign_i8(n: i8) -> Sign {
+ if n == 0 {
+ Sign::Zero
+ } else if n > 0 {
+ Sign::Positive
+ } else {
+ Sign::Negative
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/comparison_chain.stderr b/src/tools/clippy/tests/ui/comparison_chain.stderr
new file mode 100644
index 000000000..be25a80dd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/comparison_chain.stderr
@@ -0,0 +1,97 @@
+error: `if` chain can be rewritten with `match`
+ --> $DIR/comparison_chain.rs:14:5
+ |
+LL | / if x > y {
+LL | | a()
+LL | | } else if x < y {
+LL | | b()
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::comparison-chain` implied by `-D warnings`
+ = help: consider rewriting the `if` chain to use `cmp` and `match`
+
+error: `if` chain can be rewritten with `match`
+ --> $DIR/comparison_chain.rs:27:5
+ |
+LL | / if x > y {
+LL | | a()
+LL | | } else if x < y {
+LL | | b()
+LL | | } else {
+LL | | c()
+LL | | }
+ | |_____^
+ |
+ = help: consider rewriting the `if` chain to use `cmp` and `match`
+
+error: `if` chain can be rewritten with `match`
+ --> $DIR/comparison_chain.rs:35:5
+ |
+LL | / if x > y {
+LL | | a()
+LL | | } else if y > x {
+LL | | b()
+LL | | } else {
+LL | | c()
+LL | | }
+ | |_____^
+ |
+ = help: consider rewriting the `if` chain to use `cmp` and `match`
+
+error: `if` chain can be rewritten with `match`
+ --> $DIR/comparison_chain.rs:43:5
+ |
+LL | / if x > 1 {
+LL | | a()
+LL | | } else if x < 1 {
+LL | | b()
+LL | | } else if x == 1 {
+LL | | c()
+LL | | }
+ | |_____^
+ |
+ = help: consider rewriting the `if` chain to use `cmp` and `match`
+
+error: `if` chain can be rewritten with `match`
+ --> $DIR/comparison_chain.rs:117:5
+ |
+LL | / if x > y {
+LL | | a()
+LL | | } else if x < y {
+LL | | b()
+LL | | }
+ | |_____^
+ |
+ = help: consider rewriting the `if` chain to use `cmp` and `match`
+
+error: `if` chain can be rewritten with `match`
+ --> $DIR/comparison_chain.rs:123:5
+ |
+LL | / if x > y {
+LL | | a()
+LL | | } else if x < y {
+LL | | b()
+LL | | } else {
+LL | | c()
+LL | | }
+ | |_____^
+ |
+ = help: consider rewriting the `if` chain to use `cmp` and `match`
+
+error: `if` chain can be rewritten with `match`
+ --> $DIR/comparison_chain.rs:131:5
+ |
+LL | / if x > y {
+LL | | a()
+LL | | } else if y > x {
+LL | | b()
+LL | | } else {
+LL | | c()
+LL | | }
+ | |_____^
+ |
+ = help: consider rewriting the `if` chain to use `cmp` and `match`
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/comparison_to_empty.fixed b/src/tools/clippy/tests/ui/comparison_to_empty.fixed
new file mode 100644
index 000000000..261024cac
--- /dev/null
+++ b/src/tools/clippy/tests/ui/comparison_to_empty.fixed
@@ -0,0 +1,23 @@
+// run-rustfix
+
+#![warn(clippy::comparison_to_empty)]
+
+fn main() {
+ // Disallow comparisons to empty
+ let s = String::new();
+ let _ = s.is_empty();
+ let _ = !s.is_empty();
+
+ let v = vec![0];
+ let _ = v.is_empty();
+ let _ = !v.is_empty();
+
+ // Allow comparisons to non-empty
+ let s = String::new();
+ let _ = s == " ";
+ let _ = s != " ";
+
+ let v = vec![0];
+ let _ = v == [0];
+ let _ = v != [0];
+}
diff --git a/src/tools/clippy/tests/ui/comparison_to_empty.rs b/src/tools/clippy/tests/ui/comparison_to_empty.rs
new file mode 100644
index 000000000..98ddd9749
--- /dev/null
+++ b/src/tools/clippy/tests/ui/comparison_to_empty.rs
@@ -0,0 +1,23 @@
+// run-rustfix
+
+#![warn(clippy::comparison_to_empty)]
+
+fn main() {
+ // Disallow comparisons to empty
+ let s = String::new();
+ let _ = s == "";
+ let _ = s != "";
+
+ let v = vec![0];
+ let _ = v == [];
+ let _ = v != [];
+
+ // Allow comparisons to non-empty
+ let s = String::new();
+ let _ = s == " ";
+ let _ = s != " ";
+
+ let v = vec![0];
+ let _ = v == [0];
+ let _ = v != [0];
+}
diff --git a/src/tools/clippy/tests/ui/comparison_to_empty.stderr b/src/tools/clippy/tests/ui/comparison_to_empty.stderr
new file mode 100644
index 000000000..f69d6bd52
--- /dev/null
+++ b/src/tools/clippy/tests/ui/comparison_to_empty.stderr
@@ -0,0 +1,28 @@
+error: comparison to empty slice
+ --> $DIR/comparison_to_empty.rs:8:13
+ |
+LL | let _ = s == "";
+ | ^^^^^^^ help: using `is_empty` is clearer and more explicit: `s.is_empty()`
+ |
+ = note: `-D clippy::comparison-to-empty` implied by `-D warnings`
+
+error: comparison to empty slice
+ --> $DIR/comparison_to_empty.rs:9:13
+ |
+LL | let _ = s != "";
+ | ^^^^^^^ help: using `!is_empty` is clearer and more explicit: `!s.is_empty()`
+
+error: comparison to empty slice
+ --> $DIR/comparison_to_empty.rs:12:13
+ |
+LL | let _ = v == [];
+ | ^^^^^^^ help: using `is_empty` is clearer and more explicit: `v.is_empty()`
+
+error: comparison to empty slice
+ --> $DIR/comparison_to_empty.rs:13:13
+ |
+LL | let _ = v != [];
+ | ^^^^^^^ help: using `!is_empty` is clearer and more explicit: `!v.is_empty()`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/copy_iterator.rs b/src/tools/clippy/tests/ui/copy_iterator.rs
new file mode 100644
index 000000000..ae67ebded
--- /dev/null
+++ b/src/tools/clippy/tests/ui/copy_iterator.rs
@@ -0,0 +1,21 @@
+#![warn(clippy::copy_iterator)]
+
+#[derive(Copy, Clone)]
+struct Countdown(u8);
+
+impl Iterator for Countdown {
+ type Item = u8;
+
+ fn next(&mut self) -> Option<u8> {
+ self.0.checked_sub(1).map(|c| {
+ self.0 = c;
+ c
+ })
+ }
+}
+
+fn main() {
+ let my_iterator = Countdown(5);
+ assert_eq!(my_iterator.take(1).count(), 1);
+ assert_eq!(my_iterator.count(), 5);
+}
diff --git a/src/tools/clippy/tests/ui/copy_iterator.stderr b/src/tools/clippy/tests/ui/copy_iterator.stderr
new file mode 100644
index 000000000..f8ce6af79
--- /dev/null
+++ b/src/tools/clippy/tests/ui/copy_iterator.stderr
@@ -0,0 +1,17 @@
+error: you are implementing `Iterator` on a `Copy` type
+ --> $DIR/copy_iterator.rs:6:1
+ |
+LL | / impl Iterator for Countdown {
+LL | | type Item = u8;
+LL | |
+LL | | fn next(&mut self) -> Option<u8> {
+... |
+LL | | }
+LL | | }
+ | |_^
+ |
+ = note: `-D clippy::copy-iterator` implied by `-D warnings`
+ = note: consider implementing `IntoIterator` instead
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crashes/associated-constant-ice.rs b/src/tools/clippy/tests/ui/crashes/associated-constant-ice.rs
new file mode 100644
index 000000000..948deba3e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/associated-constant-ice.rs
@@ -0,0 +1,13 @@
+/// Test for https://github.com/rust-lang/rust-clippy/issues/1698
+
+pub trait Trait {
+ const CONSTANT: u8;
+}
+
+impl Trait for u8 {
+ const CONSTANT: u8 = 2;
+}
+
+fn main() {
+ println!("{}", u8::CONSTANT * 10);
+}
diff --git a/src/tools/clippy/tests/ui/crashes/auxiliary/ice-4727-aux.rs b/src/tools/clippy/tests/ui/crashes/auxiliary/ice-4727-aux.rs
new file mode 100644
index 000000000..58a20caf6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/auxiliary/ice-4727-aux.rs
@@ -0,0 +1,9 @@
+pub trait Trait {
+ fn fun(par: &str) -> &str;
+}
+
+impl Trait for str {
+ fn fun(par: &str) -> &str {
+ &par[0..1]
+ }
+}
diff --git a/src/tools/clippy/tests/ui/crashes/auxiliary/ice-7272-aux.rs b/src/tools/clippy/tests/ui/crashes/auxiliary/ice-7272-aux.rs
new file mode 100644
index 000000000..780797e3c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/auxiliary/ice-7272-aux.rs
@@ -0,0 +1,14 @@
+pub fn warn<T>(_: T) {}
+
+macro_rules! define_macro {
+ ($d:tt $lower:ident $upper:ident) => {
+ #[macro_export]
+ macro_rules! $upper {
+ ($arg:tt) => {
+ $crate::$lower($arg)
+ };
+ }
+ };
+}
+
+define_macro! {$ warn WARNING}
diff --git a/src/tools/clippy/tests/ui/crashes/auxiliary/ice-7868-aux.rs b/src/tools/clippy/tests/ui/crashes/auxiliary/ice-7868-aux.rs
new file mode 100644
index 000000000..bee29894b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/auxiliary/ice-7868-aux.rs
@@ -0,0 +1,3 @@
+fn zero() {
+ unsafe { 0 };
+}
diff --git a/src/tools/clippy/tests/ui/crashes/auxiliary/ice-7934-aux.rs b/src/tools/clippy/tests/ui/crashes/auxiliary/ice-7934-aux.rs
new file mode 100644
index 000000000..4afbf027b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/auxiliary/ice-7934-aux.rs
@@ -0,0 +1,4 @@
+fn zero() {
+ // SAFETY:
+ unsafe { 0 };
+}
diff --git a/src/tools/clippy/tests/ui/crashes/auxiliary/ice-8681-aux.rs b/src/tools/clippy/tests/ui/crashes/auxiliary/ice-8681-aux.rs
new file mode 100644
index 000000000..95b631513
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/auxiliary/ice-8681-aux.rs
@@ -0,0 +1,6 @@
+pub fn foo(x: &u32) -> u32 {
+ /* Safety:
+ * This is totally ok.
+ */
+ unsafe { *(x as *const u32) }
+}
diff --git a/src/tools/clippy/tests/ui/crashes/auxiliary/proc_macro_crash.rs b/src/tools/clippy/tests/ui/crashes/auxiliary/proc_macro_crash.rs
new file mode 100644
index 000000000..5ff2af7cd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/auxiliary/proc_macro_crash.rs
@@ -0,0 +1,38 @@
+// compile-flags: --emit=link
+// no-prefer-dynamic
+// ^ compiletest by default builds all aux files as dylibs, but we don't want that for proc-macro
+// crates. If we don't set this, compiletest will override the `crate_type` attribute below and
+// compile this as dylib. Removing this then causes the test to fail because a `dylib` crate can't
+// contain a proc-macro.
+
+#![feature(repr128)]
+#![allow(incomplete_features)]
+#![crate_type = "proc-macro"]
+
+extern crate proc_macro;
+
+use proc_macro::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
+
+#[proc_macro]
+pub fn macro_test(input_stream: TokenStream) -> TokenStream {
+ let first_token = input_stream.into_iter().next().unwrap();
+ let span = first_token.span();
+
+ TokenStream::from_iter(vec![
+ TokenTree::Ident(Ident::new("fn", Span::call_site())),
+ TokenTree::Ident(Ident::new("code", Span::call_site())),
+ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
+ TokenTree::Group(Group::new(Delimiter::Brace, {
+ let mut clause = Group::new(Delimiter::Brace, TokenStream::new());
+ clause.set_span(span);
+
+ TokenStream::from_iter(vec![
+ TokenTree::Ident(Ident::new("if", Span::call_site())),
+ TokenTree::Ident(Ident::new("true", Span::call_site())),
+ TokenTree::Group(clause.clone()),
+ TokenTree::Ident(Ident::new("else", Span::call_site())),
+ TokenTree::Group(clause),
+ ])
+ })),
+ ])
+}
diff --git a/src/tools/clippy/tests/ui/crashes/auxiliary/use_self_macro.rs b/src/tools/clippy/tests/ui/crashes/auxiliary/use_self_macro.rs
new file mode 100644
index 000000000..a8a85b4ba
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/auxiliary/use_self_macro.rs
@@ -0,0 +1,15 @@
+macro_rules! use_self {
+ (
+ impl $ty:ident {
+ fn func(&$this:ident) {
+ [fields($($field:ident)*)]
+ }
+ }
+ ) => (
+ impl $ty {
+ fn func(&$this) {
+ let $ty { $($field),* } = $this;
+ }
+ }
+ )
+}
diff --git a/src/tools/clippy/tests/ui/crashes/cc_seme.rs b/src/tools/clippy/tests/ui/crashes/cc_seme.rs
new file mode 100644
index 000000000..98588be9c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/cc_seme.rs
@@ -0,0 +1,27 @@
+#[allow(dead_code)]
+
+/// Test for https://github.com/rust-lang/rust-clippy/issues/478
+
+enum Baz {
+ One,
+ Two,
+}
+
+struct Test {
+ t: Option<usize>,
+ b: Baz,
+}
+
+fn main() {}
+
+pub fn foo() {
+ use Baz::*;
+ let x = Test { t: Some(0), b: One };
+
+ match x {
+ Test { t: Some(_), b: One } => unreachable!(),
+ Test { t: Some(42), b: Two } => unreachable!(),
+ Test { t: None, .. } => unreachable!(),
+ Test { .. } => unreachable!(),
+ }
+}
diff --git a/src/tools/clippy/tests/ui/crashes/enum-glob-import-crate.rs b/src/tools/clippy/tests/ui/crashes/enum-glob-import-crate.rs
new file mode 100644
index 000000000..dca32aa3b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/enum-glob-import-crate.rs
@@ -0,0 +1,6 @@
+#![deny(clippy::all)]
+#![allow(unused_imports)]
+
+use std::*;
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-1588.rs b/src/tools/clippy/tests/ui/crashes/ice-1588.rs
new file mode 100644
index 000000000..b0a3d11bc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-1588.rs
@@ -0,0 +1,13 @@
+#![allow(clippy::all)]
+
+/// Test for https://github.com/rust-lang/rust-clippy/issues/1588
+
+fn main() {
+ match 1 {
+ 1 => {},
+ 2 => {
+ [0; 1];
+ },
+ _ => {},
+ }
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-1782.rs b/src/tools/clippy/tests/ui/crashes/ice-1782.rs
new file mode 100644
index 000000000..81af88962
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-1782.rs
@@ -0,0 +1,26 @@
+#![allow(dead_code, unused_variables)]
+
+/// Should not trigger an ICE in `SpanlessEq` / `consts::constant`
+///
+/// Issue: https://github.com/rust-lang/rust-clippy/issues/1782
+use std::{mem, ptr};
+
+fn spanless_eq_ice() {
+ let txt = "something";
+ match txt {
+ "something" => unsafe {
+ ptr::write(
+ ptr::null_mut() as *mut u32,
+ mem::transmute::<[u8; 4], _>([0, 0, 0, 255]),
+ )
+ },
+ _ => unsafe {
+ ptr::write(
+ ptr::null_mut() as *mut u32,
+ mem::transmute::<[u8; 4], _>([13, 246, 24, 255]),
+ )
+ },
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-1969.rs b/src/tools/clippy/tests/ui/crashes/ice-1969.rs
new file mode 100644
index 000000000..96a8fe6c2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-1969.rs
@@ -0,0 +1,13 @@
+#![allow(clippy::all)]
+
+/// Test for https://github.com/rust-lang/rust-clippy/issues/1969
+
+fn main() {}
+
+pub trait Convert {
+ type Action: From<*const f64>;
+
+ fn convert(val: *const f64) -> Self::Action {
+ val.into()
+ }
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-2499.rs b/src/tools/clippy/tests/ui/crashes/ice-2499.rs
new file mode 100644
index 000000000..45b3b1869
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-2499.rs
@@ -0,0 +1,26 @@
+#![allow(dead_code, clippy::char_lit_as_u8, clippy::needless_bool)]
+
+/// Should not trigger an ICE in `SpanlessHash` / `consts::constant`
+///
+/// Issue: https://github.com/rust-lang/rust-clippy/issues/2499
+
+fn f(s: &[u8]) -> bool {
+ let t = s[0] as char;
+
+ match t {
+ 'E' | 'W' => {},
+ 'T' => {
+ if s[0..4] != ['0' as u8; 4] {
+ return false;
+ } else {
+ return true;
+ }
+ },
+ _ => {
+ return false;
+ },
+ }
+ true
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-2594.rs b/src/tools/clippy/tests/ui/crashes/ice-2594.rs
new file mode 100644
index 000000000..3f3986b6f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-2594.rs
@@ -0,0 +1,20 @@
+#![allow(dead_code, unused_variables)]
+
+/// Should not trigger an ICE in `SpanlessHash` / `consts::constant`
+///
+/// Issue: https://github.com/rust-lang/rust-clippy/issues/2594
+
+fn spanless_hash_ice() {
+ let txt = "something";
+ let empty_header: [u8; 1] = [1; 1];
+
+ match txt {
+ "something" => {
+ let mut headers = [empty_header; 1];
+ },
+ "" => (),
+ _ => (),
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-2727.rs b/src/tools/clippy/tests/ui/crashes/ice-2727.rs
new file mode 100644
index 000000000..56024abc8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-2727.rs
@@ -0,0 +1,7 @@
+/// Test for https://github.com/rust-lang/rust-clippy/issues/2727
+
+pub fn f(new: fn()) {
+ new();
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-2760.rs b/src/tools/clippy/tests/ui/crashes/ice-2760.rs
new file mode 100644
index 000000000..f1a229f3f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-2760.rs
@@ -0,0 +1,23 @@
+#![allow(
+ unused_variables,
+ clippy::blacklisted_name,
+ clippy::needless_pass_by_value,
+ dead_code
+)]
+
+/// This should not compile-fail with:
+///
+/// error[E0277]: the trait bound `T: Foo` is not satisfied
+// See rust-lang/rust-clippy#2760.
+
+trait Foo {
+ type Bar;
+}
+
+struct Baz<T: Foo> {
+ bar: T::Bar,
+}
+
+fn take<T: Foo>(baz: Baz<T>) {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-2774.rs b/src/tools/clippy/tests/ui/crashes/ice-2774.rs
new file mode 100644
index 000000000..88cfa1f92
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-2774.rs
@@ -0,0 +1,27 @@
+use std::collections::HashSet;
+
+// See rust-lang/rust-clippy#2774.
+
+#[derive(Eq, PartialEq, Debug, Hash)]
+pub struct Bar {
+ foo: Foo,
+}
+
+#[derive(Eq, PartialEq, Debug, Hash)]
+pub struct Foo;
+
+#[allow(clippy::implicit_hasher)]
+// This should not cause a "cannot relate bound region" ICE.
+pub fn add_barfoos_to_foos<'a>(bars: &HashSet<&'a Bar>) {
+ let mut foos = HashSet::new();
+ foos.extend(bars.iter().map(|b| &b.foo));
+}
+
+#[allow(clippy::implicit_hasher)]
+// Also, this should not cause a "cannot relate bound region" ICE.
+pub fn add_barfoos_to_foos2(bars: &HashSet<&Bar>) {
+ let mut foos = HashSet::new();
+ foos.extend(bars.iter().map(|b| &b.foo));
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-2774.stderr b/src/tools/clippy/tests/ui/crashes/ice-2774.stderr
new file mode 100644
index 000000000..0c2d48f93
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-2774.stderr
@@ -0,0 +1,10 @@
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/ice-2774.rs:15:1
+ |
+LL | pub fn add_barfoos_to_foos<'a>(bars: &HashSet<&'a Bar>) {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::needless-lifetimes` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-2862.rs b/src/tools/clippy/tests/ui/crashes/ice-2862.rs
new file mode 100644
index 000000000..8326e3663
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-2862.rs
@@ -0,0 +1,16 @@
+/// Test for https://github.com/rust-lang/rust-clippy/issues/2862
+
+pub trait FooMap {
+ fn map<B, F: Fn() -> B>(&self, f: F) -> B;
+}
+
+impl FooMap for bool {
+ fn map<B, F: Fn() -> B>(&self, f: F) -> B {
+ f()
+ }
+}
+
+fn main() {
+ let a = true;
+ a.map(|| false);
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-2865.rs b/src/tools/clippy/tests/ui/crashes/ice-2865.rs
new file mode 100644
index 000000000..c62981396
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-2865.rs
@@ -0,0 +1,16 @@
+#![allow(dead_code, clippy::extra_unused_lifetimes)]
+
+/// Test for https://github.com/rust-lang/rust-clippy/issues/2865
+
+struct Ice {
+ size: String,
+}
+
+impl<'a> From<String> for Ice {
+ fn from(_: String) -> Self {
+ let text = || "iceberg".to_string();
+ Self { size: text() }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-3151.rs b/src/tools/clippy/tests/ui/crashes/ice-3151.rs
new file mode 100644
index 000000000..268ba86fc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-3151.rs
@@ -0,0 +1,15 @@
+/// Test for https://github.com/rust-lang/rust-clippy/issues/3151
+
+#[derive(Clone)]
+pub struct HashMap<V, S> {
+ hash_builder: S,
+ table: RawTable<V>,
+}
+
+#[derive(Clone)]
+pub struct RawTable<V> {
+ size: usize,
+ val: V,
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-3462.rs b/src/tools/clippy/tests/ui/crashes/ice-3462.rs
new file mode 100644
index 000000000..02c49aa0d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-3462.rs
@@ -0,0 +1,23 @@
+#![warn(clippy::all)]
+#![allow(clippy::blacklisted_name, clippy::equatable_if_let)]
+#![allow(unused)]
+
+/// Test for https://github.com/rust-lang/rust-clippy/issues/3462
+
+enum Foo {
+ Bar,
+ Baz,
+}
+
+fn bar(foo: Foo) {
+ macro_rules! baz {
+ () => {
+ if let Foo::Bar = foo {}
+ };
+ }
+
+ baz!();
+ baz!();
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-360.rs b/src/tools/clippy/tests/ui/crashes/ice-360.rs
new file mode 100644
index 000000000..6555c19ca
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-360.rs
@@ -0,0 +1,12 @@
+fn main() {}
+
+fn no_panic<T>(slice: &[T]) {
+ let mut iter = slice.iter();
+ loop {
+ let _ = match iter.next() {
+ Some(ele) => ele,
+ None => break,
+ };
+ loop {}
+ }
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-360.stderr b/src/tools/clippy/tests/ui/crashes/ice-360.stderr
new file mode 100644
index 000000000..0eb7bb12b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-360.stderr
@@ -0,0 +1,25 @@
+error: this loop could be written as a `while let` loop
+ --> $DIR/ice-360.rs:5:5
+ |
+LL | / loop {
+LL | | let _ = match iter.next() {
+LL | | Some(ele) => ele,
+LL | | None => break,
+LL | | };
+LL | | loop {}
+LL | | }
+ | |_____^ help: try: `while let Some(ele) = iter.next() { .. }`
+ |
+ = note: `-D clippy::while-let-loop` implied by `-D warnings`
+
+error: empty `loop {}` wastes CPU cycles
+ --> $DIR/ice-360.rs:10:9
+ |
+LL | loop {}
+ | ^^^^^^^
+ |
+ = note: `-D clippy::empty-loop` implied by `-D warnings`
+ = help: you should either use `panic!()` or add `std::thread::sleep(..);` to the loop body
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-3717.rs b/src/tools/clippy/tests/ui/crashes/ice-3717.rs
new file mode 100644
index 000000000..f50714643
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-3717.rs
@@ -0,0 +1,10 @@
+#![deny(clippy::implicit_hasher)]
+
+use std::collections::HashSet;
+
+fn main() {}
+
+pub fn ice_3717(_: &HashSet<usize>) {
+ let _ = [0u8; 0];
+ let _: HashSet<usize> = HashSet::new();
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-3717.stderr b/src/tools/clippy/tests/ui/crashes/ice-3717.stderr
new file mode 100644
index 000000000..4d3d617b6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-3717.stderr
@@ -0,0 +1,22 @@
+error: parameter of type `HashSet` should be generalized over different hashers
+ --> $DIR/ice-3717.rs:7:21
+ |
+LL | pub fn ice_3717(_: &HashSet<usize>) {
+ | ^^^^^^^^^^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/ice-3717.rs:1:9
+ |
+LL | #![deny(clippy::implicit_hasher)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+help: consider adding a type parameter
+ |
+LL | pub fn ice_3717<S: ::std::hash::BuildHasher + Default>(_: &HashSet<usize, S>) {
+ | +++++++++++++++++++++++++++++++++++++++ ~~~~~~~~~~~~~~~~~
+help: ...and use generic constructor
+ |
+LL | let _: HashSet<usize> = HashSet::default();
+ | ~~~~~~~~~~~~~~~~~~
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-3741.rs b/src/tools/clippy/tests/ui/crashes/ice-3741.rs
new file mode 100644
index 000000000..1253ddcfa
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-3741.rs
@@ -0,0 +1,10 @@
+// aux-build:proc_macro_crash.rs
+
+#![warn(clippy::suspicious_else_formatting)]
+
+extern crate proc_macro_crash;
+use proc_macro_crash::macro_test;
+
+fn main() {
+ macro_test!(2);
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-3747.rs b/src/tools/clippy/tests/ui/crashes/ice-3747.rs
new file mode 100644
index 000000000..cdf018cbc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-3747.rs
@@ -0,0 +1,17 @@
+/// Test for https://github.com/rust-lang/rust-clippy/issues/3747
+
+macro_rules! a {
+ ( $pub:tt $($attr:tt)* ) => {
+ $($attr)* $pub fn say_hello() {}
+ };
+}
+
+macro_rules! b {
+ () => {
+ a! { pub }
+ };
+}
+
+b! {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-3891.rs b/src/tools/clippy/tests/ui/crashes/ice-3891.rs
new file mode 100644
index 000000000..05c5134c8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-3891.rs
@@ -0,0 +1,3 @@
+fn main() {
+ 1x;
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-3891.stderr b/src/tools/clippy/tests/ui/crashes/ice-3891.stderr
new file mode 100644
index 000000000..59469ec58
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-3891.stderr
@@ -0,0 +1,10 @@
+error: invalid suffix `x` for number literal
+ --> $DIR/ice-3891.rs:2:5
+ |
+LL | 1x;
+ | ^^ invalid suffix `x`
+ |
+ = help: the suffix must be one of the numeric types (`u32`, `isize`, `f32`, etc.)
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-3969.rs b/src/tools/clippy/tests/ui/crashes/ice-3969.rs
new file mode 100644
index 000000000..9b68cac7f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-3969.rs
@@ -0,0 +1,50 @@
+// https://github.com/rust-lang/rust-clippy/issues/3969
+// used to crash: error: internal compiler error:
+// src/librustc_traits/normalize_erasing_regions.rs:43: could not fully normalize `<i32 as
+// std::iter::Iterator>::Item test from rustc ./ui/trivial-bounds/trivial-bounds-inconsistent.rs
+
+// Check that tautalogically false bounds are accepted, and are used
+// in type inference.
+#![feature(trivial_bounds)]
+#![allow(unused)]
+trait A {}
+
+impl A for i32 {}
+
+struct Dst<X: ?Sized> {
+ x: X,
+}
+
+struct TwoStrs(str, str)
+where
+ str: Sized;
+
+fn unsized_local()
+where
+ for<'a> Dst<dyn A + 'a>: Sized,
+{
+ let x: Dst<dyn A> = *(Box::new(Dst { x: 1 }) as Box<Dst<dyn A>>);
+}
+
+fn return_str() -> str
+where
+ str: Sized,
+{
+ *"Sized".to_string().into_boxed_str()
+}
+
+fn use_op(s: String) -> String
+where
+ String: ::std::ops::Neg<Output = String>,
+{
+ -s
+}
+
+fn use_for()
+where
+ i32: Iterator,
+{
+ for _ in 2i32 {}
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-3969.stderr b/src/tools/clippy/tests/ui/crashes/ice-3969.stderr
new file mode 100644
index 000000000..790180808
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-3969.stderr
@@ -0,0 +1,34 @@
+error: trait bound str: std::marker::Sized does not depend on any type or lifetime parameters
+ --> $DIR/ice-3969.rs:20:10
+ |
+LL | str: Sized;
+ | ^^^^^
+ |
+ = note: `-D trivial-bounds` implied by `-D warnings`
+
+error: trait bound for<'a> Dst<(dyn A + 'a)>: std::marker::Sized does not depend on any type or lifetime parameters
+ --> $DIR/ice-3969.rs:24:30
+ |
+LL | for<'a> Dst<dyn A + 'a>: Sized,
+ | ^^^^^
+
+error: trait bound str: std::marker::Sized does not depend on any type or lifetime parameters
+ --> $DIR/ice-3969.rs:31:10
+ |
+LL | str: Sized,
+ | ^^^^^
+
+error: trait bound std::string::String: std::ops::Neg does not depend on any type or lifetime parameters
+ --> $DIR/ice-3969.rs:38:13
+ |
+LL | String: ::std::ops::Neg<Output = String>,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: trait bound i32: std::iter::Iterator does not depend on any type or lifetime parameters
+ --> $DIR/ice-3969.rs:45:10
+ |
+LL | i32: Iterator,
+ | ^^^^^^^^
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-4121.rs b/src/tools/clippy/tests/ui/crashes/ice-4121.rs
new file mode 100644
index 000000000..e1a142fdc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-4121.rs
@@ -0,0 +1,13 @@
+use std::mem;
+
+pub struct Foo<A, B>(A, B);
+
+impl<A, B> Foo<A, B> {
+ const HOST_SIZE: usize = mem::size_of::<B>();
+
+ pub fn crash() -> bool {
+ Self::HOST_SIZE == 0
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-4545.rs b/src/tools/clippy/tests/ui/crashes/ice-4545.rs
new file mode 100644
index 000000000..d9c9c2096
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-4545.rs
@@ -0,0 +1,14 @@
+fn repro() {
+ trait Foo {
+ type Bar;
+ }
+
+ #[allow(dead_code)]
+ struct Baz<T: Foo> {
+ field: T::Bar,
+ }
+}
+
+fn main() {
+ repro();
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-4579.rs b/src/tools/clippy/tests/ui/crashes/ice-4579.rs
new file mode 100644
index 000000000..2e7e279f8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-4579.rs
@@ -0,0 +1,13 @@
+#![allow(clippy::single_match)]
+
+use std::ptr;
+
+fn main() {
+ match Some(0_usize) {
+ Some(_) => {
+ let s = "012345";
+ unsafe { ptr::read(s.as_ptr().offset(1) as *const [u8; 5]) };
+ },
+ _ => (),
+ };
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-4671.rs b/src/tools/clippy/tests/ui/crashes/ice-4671.rs
new file mode 100644
index 000000000..64e8e7769
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-4671.rs
@@ -0,0 +1,21 @@
+#![warn(clippy::use_self)]
+
+#[macro_use]
+#[path = "auxiliary/use_self_macro.rs"]
+mod use_self_macro;
+
+struct Foo {
+ a: u32,
+}
+
+use_self! {
+ impl Foo {
+ fn func(&self) {
+ [fields(
+ a
+ )]
+ }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-4727.rs b/src/tools/clippy/tests/ui/crashes/ice-4727.rs
new file mode 100644
index 000000000..2a4bc83f5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-4727.rs
@@ -0,0 +1,6 @@
+#![warn(clippy::use_self)]
+
+#[path = "auxiliary/ice-4727-aux.rs"]
+mod aux;
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-4760.rs b/src/tools/clippy/tests/ui/crashes/ice-4760.rs
new file mode 100644
index 000000000..08b069617
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-4760.rs
@@ -0,0 +1,9 @@
+const COUNT: usize = 2;
+struct Thing;
+trait Dummy {}
+
+const _: () = {
+ impl Dummy for Thing where [i32; COUNT]: Sized {}
+};
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-4775.rs b/src/tools/clippy/tests/ui/crashes/ice-4775.rs
new file mode 100644
index 000000000..405e3039e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-4775.rs
@@ -0,0 +1,11 @@
+pub struct ArrayWrapper<const N: usize>([usize; N]);
+
+impl<const N: usize> ArrayWrapper<{ N }> {
+ pub fn ice(&self) {
+ for i in self.0.iter() {
+ println!("{}", i);
+ }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-4968.rs b/src/tools/clippy/tests/ui/crashes/ice-4968.rs
new file mode 100644
index 000000000..e0510d942
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-4968.rs
@@ -0,0 +1,21 @@
+// check-pass
+
+// Test for https://github.com/rust-lang/rust-clippy/issues/4968
+
+#![warn(clippy::unsound_collection_transmute)]
+#![allow(clippy::transmute_undefined_repr)]
+
+trait Trait {
+ type Assoc;
+}
+
+use std::mem::{self, ManuallyDrop};
+
+#[allow(unused)]
+fn func<T: Trait>(slice: Vec<T::Assoc>) {
+ unsafe {
+ let _: Vec<ManuallyDrop<T::Assoc>> = mem::transmute(slice);
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-5207.rs b/src/tools/clippy/tests/ui/crashes/ice-5207.rs
new file mode 100644
index 000000000..f463f78a9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-5207.rs
@@ -0,0 +1,5 @@
+// Regression test for https://github.com/rust-lang/rust-clippy/issues/5207
+
+pub async fn bar<'a, T: 'a>(_: T) {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-5223.rs b/src/tools/clippy/tests/ui/crashes/ice-5223.rs
new file mode 100644
index 000000000..e3b3b27a6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-5223.rs
@@ -0,0 +1,15 @@
+// Regression test for #5233
+#![warn(clippy::indexing_slicing, clippy::iter_cloned_collect)]
+
+pub struct KotomineArray<T, const N: usize> {
+ arr: [T; N],
+}
+
+impl<T: std::clone::Clone, const N: usize> KotomineArray<T, N> {
+ pub fn ice(self) {
+ let _ = self.arr[..];
+ let _ = self.arr.iter().cloned().collect::<Vec<_>>();
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-5238.rs b/src/tools/clippy/tests/ui/crashes/ice-5238.rs
new file mode 100644
index 000000000..989eb6d44
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-5238.rs
@@ -0,0 +1,9 @@
+// Regression test for #5238 / https://github.com/rust-lang/rust/pull/69562
+
+#![feature(generators, generator_trait)]
+
+fn main() {
+ let _ = || {
+ yield;
+ };
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-5389.rs b/src/tools/clippy/tests/ui/crashes/ice-5389.rs
new file mode 100644
index 000000000..de2621990
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-5389.rs
@@ -0,0 +1,13 @@
+#![allow(clippy::explicit_counter_loop)]
+
+fn main() {
+ let v = vec![1, 2, 3];
+ let mut i = 0;
+ let max_storage_size = [0; 128 * 1024];
+ for item in &v {
+ bar(i, *item);
+ i += 1;
+ }
+}
+
+fn bar(_: usize, _: u32) {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-5497.rs b/src/tools/clippy/tests/ui/crashes/ice-5497.rs
new file mode 100644
index 000000000..0769bce5f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-5497.rs
@@ -0,0 +1,11 @@
+// reduced from rustc issue-69020-assoc-const-arith-overflow.rs
+pub fn main() {}
+
+pub trait Foo {
+ const OOB: i32;
+}
+
+impl<T: Foo> Foo for Vec<T> {
+ const OOB: i32 = [1][1] + T::OOB;
+ //~^ ERROR operation will panic
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-5497.stderr b/src/tools/clippy/tests/ui/crashes/ice-5497.stderr
new file mode 100644
index 000000000..e75e7dc91
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-5497.stderr
@@ -0,0 +1,10 @@
+error: this operation will panic at runtime
+ --> $DIR/ice-5497.rs:9:22
+ |
+LL | const OOB: i32 = [1][1] + T::OOB;
+ | ^^^^^^ index out of bounds: the length is 1 but the index is 1
+ |
+ = note: `#[deny(unconditional_panic)]` on by default
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-5579.rs b/src/tools/clippy/tests/ui/crashes/ice-5579.rs
new file mode 100644
index 000000000..e1842c73f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-5579.rs
@@ -0,0 +1,17 @@
+trait IsErr {
+ fn is_err(&self, err: &str) -> bool;
+}
+
+impl<T> IsErr for Option<T> {
+ fn is_err(&self, _err: &str) -> bool {
+ true
+ }
+}
+
+fn main() {
+ let t = Some(1);
+
+ if t.is_err("") {
+ t.unwrap();
+ }
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-5835.rs b/src/tools/clippy/tests/ui/crashes/ice-5835.rs
new file mode 100644
index 000000000..5e99cb432
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-5835.rs
@@ -0,0 +1,9 @@
+#[rustfmt::skip]
+pub struct Foo {
+ /// 位
+ /// ^ Do not remove this tab character.
+ /// It was required to trigger the ICE.
+ pub bar: u8,
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-5835.stderr b/src/tools/clippy/tests/ui/crashes/ice-5835.stderr
new file mode 100644
index 000000000..c972bcb60
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-5835.stderr
@@ -0,0 +1,10 @@
+error: using tabs in doc comments is not recommended
+ --> $DIR/ice-5835.rs:3:10
+ |
+LL | /// 位
+ | ^^^^ help: consider using four spaces per tab
+ |
+ = note: `-D clippy::tabs-in-doc-comments` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-5872.rs b/src/tools/clippy/tests/ui/crashes/ice-5872.rs
new file mode 100644
index 000000000..68afa8f8c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-5872.rs
@@ -0,0 +1,5 @@
+#![warn(clippy::needless_collect)]
+
+fn main() {
+ let _ = vec![1, 2, 3].into_iter().collect::<Vec<_>>().is_empty();
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-5872.stderr b/src/tools/clippy/tests/ui/crashes/ice-5872.stderr
new file mode 100644
index 000000000..a60ca345c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-5872.stderr
@@ -0,0 +1,10 @@
+error: avoid using `collect()` when not needed
+ --> $DIR/ice-5872.rs:4:39
+ |
+LL | let _ = vec![1, 2, 3].into_iter().collect::<Vec<_>>().is_empty();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `next().is_none()`
+ |
+ = note: `-D clippy::needless-collect` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-5944.rs b/src/tools/clippy/tests/ui/crashes/ice-5944.rs
new file mode 100644
index 000000000..ce46bc1ac
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-5944.rs
@@ -0,0 +1,14 @@
+#![warn(clippy::repeat_once)]
+#![allow(clippy::let_unit_value)]
+
+trait Repeat {
+ fn repeat(&self) {}
+}
+
+impl Repeat for usize {
+ fn repeat(&self) {}
+}
+
+fn main() {
+ let _ = 42.repeat();
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6139.rs b/src/tools/clippy/tests/ui/crashes/ice-6139.rs
new file mode 100644
index 000000000..f3966e47f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-6139.rs
@@ -0,0 +1,7 @@
+trait T<'a> {}
+
+fn foo(_: Vec<Box<dyn T<'_>>>) {}
+
+fn main() {
+ foo(vec![]);
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6153.rs b/src/tools/clippy/tests/ui/crashes/ice-6153.rs
new file mode 100644
index 000000000..9f73f39f1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-6153.rs
@@ -0,0 +1,9 @@
+pub struct S<'a, 'e>(&'a str, &'e str);
+
+pub type T<'a, 'e> = std::collections::HashMap<S<'a, 'e>, ()>;
+
+impl<'e, 'a: 'e> S<'a, 'e> {
+ pub fn foo(_a: &str, _b: &str, _map: &T) {}
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6179.rs b/src/tools/clippy/tests/ui/crashes/ice-6179.rs
new file mode 100644
index 000000000..4fe92d356
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-6179.rs
@@ -0,0 +1,21 @@
+//! This is a minimal reproducer for the ICE in https://github.com/rust-lang/rust-clippy/pull/6179.
+//! The ICE is mainly caused by using `hir_ty_to_ty`. See the discussion in the PR for details.
+
+#![warn(clippy::use_self)]
+#![allow(dead_code)]
+
+struct Foo;
+
+impl Foo {
+ fn new() -> Self {
+ impl Foo {
+ fn bar() {}
+ }
+
+ let _: _ = 1;
+
+ Self {}
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6250.rs b/src/tools/clippy/tests/ui/crashes/ice-6250.rs
new file mode 100644
index 000000000..c33580ff6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-6250.rs
@@ -0,0 +1,16 @@
+// originally from glacier/fixed/77218.rs
+// ice while adjusting...
+
+pub struct Cache {
+ data: Vec<i32>,
+}
+
+pub fn list_data(cache: &Cache, key: usize) {
+ for reference in vec![1, 2, 3] {
+ if
+ /* let */
+ Some(reference) = cache.data.get(key) {
+ unimplemented!()
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6250.stderr b/src/tools/clippy/tests/ui/crashes/ice-6250.stderr
new file mode 100644
index 000000000..878897c41
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-6250.stderr
@@ -0,0 +1,30 @@
+error[E0601]: `main` function not found in crate `ice_6250`
+ --> $DIR/ice-6250.rs:16:2
+ |
+LL | }
+ | ^ consider adding a `main` function to `$DIR/ice-6250.rs`
+
+error[E0308]: mismatched types
+ --> $DIR/ice-6250.rs:12:14
+ |
+LL | for reference in vec![1, 2, 3] {
+ | --------- expected due to the type of this binding
+...
+LL | Some(reference) = cache.data.get(key) {
+ | ^^^^^^^^^ expected integer, found `&i32`
+ |
+help: consider dereferencing the borrow
+ |
+LL | Some(*reference) = cache.data.get(key) {
+ | +
+
+error[E0308]: mismatched types
+ --> $DIR/ice-6250.rs:12:9
+ |
+LL | Some(reference) = cache.data.get(key) {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `bool`, found `()`
+
+error: aborting due to 3 previous errors
+
+Some errors have detailed explanations: E0308, E0601.
+For more information about an error, try `rustc --explain E0308`.
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6251.rs b/src/tools/clippy/tests/ui/crashes/ice-6251.rs
new file mode 100644
index 000000000..6aa779aae
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-6251.rs
@@ -0,0 +1,6 @@
+// originally from glacier/fixed/77329.rs
+// assertion failed: `(left == right) ; different DefIds
+
+fn bug<T>() -> impl Iterator<Item = [(); { |x: [u8]| x }]> {
+ std::iter::empty()
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6251.stderr b/src/tools/clippy/tests/ui/crashes/ice-6251.stderr
new file mode 100644
index 000000000..8da2965c6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-6251.stderr
@@ -0,0 +1,41 @@
+error[E0601]: `main` function not found in crate `ice_6251`
+ --> $DIR/ice-6251.rs:6:2
+ |
+LL | }
+ | ^ consider adding a `main` function to `$DIR/ice-6251.rs`
+
+error[E0277]: the size for values of type `[u8]` cannot be known at compilation time
+ --> $DIR/ice-6251.rs:4:45
+ |
+LL | fn bug<T>() -> impl Iterator<Item = [(); { |x: [u8]| x }]> {
+ | ^ doesn't have a size known at compile-time
+ |
+ = help: the trait `std::marker::Sized` is not implemented for `[u8]`
+ = help: unsized fn params are gated as an unstable feature
+help: function arguments must have a statically known size, borrowed types always have a known size
+ |
+LL | fn bug<T>() -> impl Iterator<Item = [(); { |x: &[u8]| x }]> {
+ | +
+
+error[E0277]: the size for values of type `[u8]` cannot be known at compilation time
+ --> $DIR/ice-6251.rs:4:54
+ |
+LL | fn bug<T>() -> impl Iterator<Item = [(); { |x: [u8]| x }]> {
+ | ^ doesn't have a size known at compile-time
+ |
+ = help: the trait `std::marker::Sized` is not implemented for `[u8]`
+ = note: the return type of a function must have a statically known size
+
+error[E0308]: mismatched types
+ --> $DIR/ice-6251.rs:4:44
+ |
+LL | fn bug<T>() -> impl Iterator<Item = [(); { |x: [u8]| x }]> {
+ | ^^^^^^^^^^^ expected `usize`, found closure
+ |
+ = note: expected type `usize`
+ found closure `[closure@$DIR/ice-6251.rs:4:44: 4:53]`
+
+error: aborting due to 4 previous errors
+
+Some errors have detailed explanations: E0277, E0308, E0601.
+For more information about an error, try `rustc --explain E0277`.
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6252.rs b/src/tools/clippy/tests/ui/crashes/ice-6252.rs
new file mode 100644
index 000000000..0ccf0aae9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-6252.rs
@@ -0,0 +1,14 @@
+// originally from glacier fixed/77919.rs
+// encountered errors resolving bounds after type-checking
+trait TypeVal<T> {
+ const VAL: T;
+}
+struct Five;
+struct Multiply<N, M> {
+ _n: PhantomData,
+}
+impl<N, M> TypeVal<usize> for Multiply<N, M> where N: TypeVal<VAL> {}
+
+fn main() {
+ [1; <Multiply<Five, Five>>::VAL];
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6252.stderr b/src/tools/clippy/tests/ui/crashes/ice-6252.stderr
new file mode 100644
index 000000000..638e4a548
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-6252.stderr
@@ -0,0 +1,36 @@
+error[E0412]: cannot find type `PhantomData` in this scope
+ --> $DIR/ice-6252.rs:8:9
+ |
+LL | _n: PhantomData,
+ | ^^^^^^^^^^^ not found in this scope
+ |
+help: consider importing one of these items
+ |
+LL | use core::marker::PhantomData;
+ |
+LL | use serde::__private::PhantomData;
+ |
+LL | use std::marker::PhantomData;
+ |
+
+error[E0412]: cannot find type `VAL` in this scope
+ --> $DIR/ice-6252.rs:10:63
+ |
+LL | impl<N, M> TypeVal<usize> for Multiply<N, M> where N: TypeVal<VAL> {}
+ | - ^^^ not found in this scope
+ | |
+ | help: you might be missing a type parameter: `, VAL`
+
+error[E0046]: not all trait items implemented, missing: `VAL`
+ --> $DIR/ice-6252.rs:10:1
+ |
+LL | const VAL: T;
+ | ------------ `VAL` from trait
+...
+LL | impl<N, M> TypeVal<usize> for Multiply<N, M> where N: TypeVal<VAL> {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ missing `VAL` in implementation
+
+error: aborting due to 3 previous errors
+
+Some errors have detailed explanations: E0046, E0412.
+For more information about an error, try `rustc --explain E0046`.
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6254.rs b/src/tools/clippy/tests/ui/crashes/ice-6254.rs
new file mode 100644
index 000000000..a2a60a169
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-6254.rs
@@ -0,0 +1,16 @@
+// originally from ./src/test/ui/pattern/usefulness/consts-opaque.rs
+// panicked at 'assertion failed: rows.iter().all(|r| r.len() == v.len())',
+// compiler/rustc_mir_build/src/thir/pattern/_match.rs:2030:5
+
+#[allow(clippy::derive_partial_eq_without_eq)]
+#[derive(PartialEq)]
+struct Foo(i32);
+const FOO_REF_REF: &&Foo = &&Foo(42);
+
+fn main() {
+ // This used to cause an ICE (https://github.com/rust-lang/rust/issues/78071)
+ match FOO_REF_REF {
+ FOO_REF_REF => {},
+ Foo(_) => {},
+ }
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6254.stderr b/src/tools/clippy/tests/ui/crashes/ice-6254.stderr
new file mode 100644
index 000000000..f37ab2e9b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-6254.stderr
@@ -0,0 +1,12 @@
+error: to use a constant of type `Foo` in a pattern, `Foo` must be annotated with `#[derive(PartialEq, Eq)]`
+ --> $DIR/ice-6254.rs:13:9
+ |
+LL | FOO_REF_REF => {},
+ | ^^^^^^^^^^^
+ |
+ = note: `-D indirect-structural-match` implied by `-D warnings`
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #62411 <https://github.com/rust-lang/rust/issues/62411>
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6255.rs b/src/tools/clippy/tests/ui/crashes/ice-6255.rs
new file mode 100644
index 000000000..bd4a81d98
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-6255.rs
@@ -0,0 +1,15 @@
+// originally from rustc ./src/test/ui/macros/issue-78325-inconsistent-resolution.rs
+// inconsistent resolution for a macro
+
+macro_rules! define_other_core {
+ ( ) => {
+ extern crate std as core;
+ //~^ ERROR macro-expanded `extern crate` items cannot shadow names passed with `--extern`
+ };
+}
+
+fn main() {
+ core::panic!();
+}
+
+define_other_core!();
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6255.stderr b/src/tools/clippy/tests/ui/crashes/ice-6255.stderr
new file mode 100644
index 000000000..db0cb25e3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-6255.stderr
@@ -0,0 +1,13 @@
+error: macro-expanded `extern crate` items cannot shadow names passed with `--extern`
+ --> $DIR/ice-6255.rs:6:9
+ |
+LL | extern crate std as core;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | define_other_core!();
+ | -------------------- in this macro invocation
+ |
+ = note: this error originates in the macro `define_other_core` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6256.rs b/src/tools/clippy/tests/ui/crashes/ice-6256.rs
new file mode 100644
index 000000000..67308263d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-6256.rs
@@ -0,0 +1,15 @@
+// originally from rustc ./src/test/ui/regions/issue-78262.rs
+// ICE: to get the signature of a closure, use substs.as_closure().sig() not fn_sig()
+#![allow(clippy::upper_case_acronyms)]
+
+trait TT {}
+
+impl dyn TT {
+ fn func(&self) {}
+}
+
+#[rustfmt::skip]
+fn main() {
+ let f = |x: &dyn TT| x.func(); //[default]~ ERROR: mismatched types
+ //[nll]~^ ERROR: borrowed data escapes outside of closure
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6256.stderr b/src/tools/clippy/tests/ui/crashes/ice-6256.stderr
new file mode 100644
index 000000000..9cfcccf1e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-6256.stderr
@@ -0,0 +1,14 @@
+error[E0521]: borrowed data escapes outside of closure
+ --> $DIR/ice-6256.rs:13:26
+ |
+LL | let f = |x: &dyn TT| x.func(); //[default]~ ERROR: mismatched types
+ | - - ^^^^^^^^
+ | | | |
+ | | | `x` escapes the closure body here
+ | | | argument requires that `'1` must outlive `'static`
+ | | let's call the lifetime of this reference `'1`
+ | `x` is a reference that is only valid in the closure body
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0521`.
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6332.rs b/src/tools/clippy/tests/ui/crashes/ice-6332.rs
new file mode 100644
index 000000000..9dc92aa50
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-6332.rs
@@ -0,0 +1,11 @@
+fn cmark_check() {
+ let mut link_err = false;
+ macro_rules! cmark_error {
+ ($bad:expr) => {
+ *$bad = true;
+ };
+ }
+ cmark_error!(&mut link_err);
+}
+
+pub fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6539.rs b/src/tools/clippy/tests/ui/crashes/ice-6539.rs
new file mode 100644
index 000000000..ac6c3e4ab
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-6539.rs
@@ -0,0 +1,16 @@
+// The test for the ICE 6539: https://github.com/rust-lang/rust-clippy/issues/6539.
+// The cause is that `zero_sized_map_values` used `layout_of` with types from type aliases,
+// which is essentially the same as the ICE 4968.
+// Note that only type aliases with associated types caused the crash this time,
+// not others such as trait impls.
+
+use std::collections::{BTreeMap, HashMap};
+
+pub trait Trait {
+ type Assoc;
+}
+
+type TypeAlias<T> = HashMap<(), <T as Trait>::Assoc>;
+type TypeAlias2<T> = BTreeMap<(), <T as Trait>::Assoc>;
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6792.rs b/src/tools/clippy/tests/ui/crashes/ice-6792.rs
new file mode 100644
index 000000000..9cbafc716
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-6792.rs
@@ -0,0 +1,20 @@
+//! This is a reproducer for the ICE 6792: https://github.com/rust-lang/rust-clippy/issues/6792.
+//! The ICE is caused by using `TyCtxt::type_of(assoc_type_id)`.
+
+trait Trait {
+ type Ty;
+
+ fn broken() -> Self::Ty;
+}
+
+struct Foo;
+
+impl Trait for Foo {
+ type Ty = Foo;
+
+ fn broken() -> Self::Ty {
+ Self::Ty {}
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6793.rs b/src/tools/clippy/tests/ui/crashes/ice-6793.rs
new file mode 100644
index 000000000..12a4a0d25
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-6793.rs
@@ -0,0 +1,23 @@
+//! This is a reproducer for the ICE 6793: https://github.com/rust-lang/rust-clippy/issues/6793.
+//! The ICE is caused by using `TyCtxt::type_of(assoc_type_id)`, which is the same as the ICE 6792.
+
+trait Trait {
+ type Ty: 'static + Clone;
+
+ fn broken() -> Self::Ty;
+}
+
+#[derive(Clone)]
+struct MyType {
+ x: i32,
+}
+
+impl Trait for MyType {
+ type Ty = MyType;
+
+ fn broken() -> Self::Ty {
+ Self::Ty { x: 1 }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6840.rs b/src/tools/clippy/tests/ui/crashes/ice-6840.rs
new file mode 100644
index 000000000..d789f60c5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-6840.rs
@@ -0,0 +1,31 @@
+//! This is a reproducer for the ICE 6840: https://github.com/rust-lang/rust-clippy/issues/6840.
+//! The ICE is caused by `TyCtxt::layout_of` and `is_normalizable` not being strict enough
+#![allow(dead_code)]
+use std::collections::HashMap;
+
+pub trait Rule {
+ type DependencyKey;
+}
+
+pub struct RuleEdges<R: Rule> {
+ dependencies: R::DependencyKey,
+}
+
+type RuleDependencyEdges<R> = HashMap<u32, RuleEdges<R>>;
+
+// reproducer from the GitHub issue ends here
+// but check some additional variants
+type RuleDependencyEdgesArray<R> = HashMap<u32, [RuleEdges<R>; 8]>;
+type RuleDependencyEdgesSlice<R> = HashMap<u32, &'static [RuleEdges<R>]>;
+type RuleDependencyEdgesRef<R> = HashMap<u32, &'static RuleEdges<R>>;
+type RuleDependencyEdgesRaw<R> = HashMap<u32, *const RuleEdges<R>>;
+type RuleDependencyEdgesTuple<R> = HashMap<u32, (RuleEdges<R>, RuleEdges<R>)>;
+
+// and an additional checks to make sure fix doesn't have stack-overflow issue
+// on self-containing types
+pub struct SelfContaining {
+ inner: Box<SelfContaining>,
+}
+type SelfContainingEdges = HashMap<u32, SelfContaining>;
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-700.rs b/src/tools/clippy/tests/ui/crashes/ice-700.rs
new file mode 100644
index 000000000..0cbceedbd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-700.rs
@@ -0,0 +1,9 @@
+#![deny(clippy::all)]
+
+/// Test for https://github.com/rust-lang/rust-clippy/issues/700
+
+fn core() {}
+
+fn main() {
+ core();
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-7012.rs b/src/tools/clippy/tests/ui/crashes/ice-7012.rs
new file mode 100644
index 000000000..60bdbc4f1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-7012.rs
@@ -0,0 +1,17 @@
+#![allow(clippy::all)]
+
+enum _MyOption {
+ None,
+ Some(()),
+}
+
+impl _MyOption {
+ fn _foo(&self) {
+ match self {
+ &Self::Some(_) => {},
+ _ => {},
+ }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-7126.rs b/src/tools/clippy/tests/ui/crashes/ice-7126.rs
new file mode 100644
index 000000000..ca563ba09
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-7126.rs
@@ -0,0 +1,14 @@
+// This test requires a feature gated const fn and will stop working in the future.
+
+#![feature(const_btree_new)]
+
+use std::collections::BTreeMap;
+
+struct Foo(BTreeMap<i32, i32>);
+impl Foo {
+ fn new() -> Self {
+ Self(BTreeMap::new())
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-7169.rs b/src/tools/clippy/tests/ui/crashes/ice-7169.rs
new file mode 100644
index 000000000..82095febc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-7169.rs
@@ -0,0 +1,9 @@
+#[derive(Default)]
+struct A<T> {
+ a: Vec<A<T>>,
+ b: T,
+}
+
+fn main() {
+ if let Ok(_) = Ok::<_, ()>(A::<String>::default()) {}
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-7169.stderr b/src/tools/clippy/tests/ui/crashes/ice-7169.stderr
new file mode 100644
index 000000000..5a9cd3238
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-7169.stderr
@@ -0,0 +1,10 @@
+error: redundant pattern matching, consider using `is_ok()`
+ --> $DIR/ice-7169.rs:8:12
+ |
+LL | if let Ok(_) = Ok::<_, ()>(A::<String>::default()) {}
+ | -------^^^^^-------------------------------------- help: try this: `if Ok::<_, ()>(A::<String>::default()).is_ok()`
+ |
+ = note: `-D clippy::redundant-pattern-matching` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-7231.rs b/src/tools/clippy/tests/ui/crashes/ice-7231.rs
new file mode 100644
index 000000000..4ad0d3513
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-7231.rs
@@ -0,0 +1,9 @@
+#![allow(clippy::never_loop)]
+
+async fn f() {
+ loop {
+ break;
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-7272.rs b/src/tools/clippy/tests/ui/crashes/ice-7272.rs
new file mode 100644
index 000000000..57ab6ca14
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-7272.rs
@@ -0,0 +1,12 @@
+// aux-build:ice-7272-aux.rs
+
+#![allow(clippy::no_effect)]
+
+extern crate ice_7272_aux;
+
+use ice_7272_aux::*;
+
+pub fn main() {
+ || WARNING!("Style changed!");
+ || "}{";
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-7340.rs b/src/tools/clippy/tests/ui/crashes/ice-7340.rs
new file mode 100644
index 000000000..7d2351d60
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-7340.rs
@@ -0,0 +1,6 @@
+#![allow(clippy::no_effect)]
+
+fn main() {
+ const CONSTANT: usize = 8;
+ [1; 1 % CONSTANT];
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-7410.rs b/src/tools/clippy/tests/ui/crashes/ice-7410.rs
new file mode 100644
index 000000000..85fa42103
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-7410.rs
@@ -0,0 +1,32 @@
+// compile-flags: -Clink-arg=-nostartfiles
+// ignore-macos
+// ignore-windows
+
+#![feature(lang_items, start, libc)]
+#![no_std]
+#![allow(clippy::if_same_then_else)]
+#![allow(clippy::redundant_pattern_matching)]
+
+use core::panic::PanicInfo;
+
+struct S;
+
+impl Drop for S {
+ fn drop(&mut self) {}
+}
+
+#[start]
+fn main(argc: isize, argv: *const *const u8) -> isize {
+ if let Some(_) = Some(S) {
+ } else {
+ }
+ 0
+}
+
+#[panic_handler]
+fn panic(_info: &PanicInfo) -> ! {
+ loop {}
+}
+
+#[lang = "eh_personality"]
+extern "C" fn eh_personality() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-7423.rs b/src/tools/clippy/tests/ui/crashes/ice-7423.rs
new file mode 100644
index 000000000..31340b012
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-7423.rs
@@ -0,0 +1,13 @@
+pub trait Trait {
+ fn f();
+}
+
+impl Trait for usize {
+ fn f() {
+ extern "C" {
+ fn g() -> usize;
+ }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-7868.rs b/src/tools/clippy/tests/ui/crashes/ice-7868.rs
new file mode 100644
index 000000000..c6932164e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-7868.rs
@@ -0,0 +1,7 @@
+#![warn(clippy::undocumented_unsafe_blocks)]
+#![allow(clippy::no_effect)]
+
+#[path = "auxiliary/ice-7868-aux.rs"]
+mod zero;
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-7868.stderr b/src/tools/clippy/tests/ui/crashes/ice-7868.stderr
new file mode 100644
index 000000000..1a33e6475
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-7868.stderr
@@ -0,0 +1,11 @@
+error: unsafe block missing a safety comment
+ --> $DIR/auxiliary/ice-7868-aux.rs:2:5
+ |
+LL | unsafe { 0 };
+ | ^^^^^^^^^^^^
+ |
+ = note: `-D clippy::undocumented-unsafe-blocks` implied by `-D warnings`
+ = help: consider adding a safety comment on the preceding line
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-7869.rs b/src/tools/clippy/tests/ui/crashes/ice-7869.rs
new file mode 100644
index 000000000..8f97a063a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-7869.rs
@@ -0,0 +1,7 @@
+enum Tila {
+ TyöAlkoi,
+ TyöKeskeytyi,
+ TyöValmis,
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-7869.stderr b/src/tools/clippy/tests/ui/crashes/ice-7869.stderr
new file mode 100644
index 000000000..4fa9fb27e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-7869.stderr
@@ -0,0 +1,15 @@
+error: all variants have the same prefix: `Työ`
+ --> $DIR/ice-7869.rs:1:1
+ |
+LL | / enum Tila {
+LL | | TyöAlkoi,
+LL | | TyöKeskeytyi,
+LL | | TyöValmis,
+LL | | }
+ | |_^
+ |
+ = note: `-D clippy::enum-variant-names` implied by `-D warnings`
+ = help: remove the prefixes and use full paths to the variants instead of glob imports
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-7934.rs b/src/tools/clippy/tests/ui/crashes/ice-7934.rs
new file mode 100644
index 000000000..a4691c413
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-7934.rs
@@ -0,0 +1,7 @@
+#![warn(clippy::undocumented_unsafe_blocks)]
+#![allow(clippy::no_effect)]
+
+#[path = "auxiliary/ice-7934-aux.rs"]
+mod zero;
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-8250.rs b/src/tools/clippy/tests/ui/crashes/ice-8250.rs
new file mode 100644
index 000000000..d9a5ee116
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-8250.rs
@@ -0,0 +1,6 @@
+fn _f(s: &str) -> Option<()> {
+ let _ = s[1..].splitn(2, '.').next()?;
+ Some(())
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-8250.stderr b/src/tools/clippy/tests/ui/crashes/ice-8250.stderr
new file mode 100644
index 000000000..8ed8f3b3a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-8250.stderr
@@ -0,0 +1,10 @@
+error: unnecessary use of `splitn`
+ --> $DIR/ice-8250.rs:2:13
+ |
+LL | let _ = s[1..].splitn(2, '.').next()?;
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `s[1..].split('.')`
+ |
+ = note: `-D clippy::needless-splitn` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-8386.rs b/src/tools/clippy/tests/ui/crashes/ice-8386.rs
new file mode 100644
index 000000000..3e38b1408
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-8386.rs
@@ -0,0 +1,3 @@
+fn f(x: u32, mut arg: &String) {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-8681.rs b/src/tools/clippy/tests/ui/crashes/ice-8681.rs
new file mode 100644
index 000000000..ee14f011f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-8681.rs
@@ -0,0 +1,10 @@
+// aux-build: ice-8681-aux.rs
+
+#![warn(clippy::undocumented_unsafe_blocks)]
+
+#[path = "auxiliary/ice-8681-aux.rs"]
+mod ice_8681_aux;
+
+fn main() {
+ let _ = ice_8681_aux::foo(&0u32);
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-8821.rs b/src/tools/clippy/tests/ui/crashes/ice-8821.rs
new file mode 100644
index 000000000..fb87b79ae
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-8821.rs
@@ -0,0 +1,8 @@
+#![warn(clippy::let_unit_value)]
+
+fn f() {}
+static FN: fn() = f;
+
+fn main() {
+ let _: () = FN();
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-8821.stderr b/src/tools/clippy/tests/ui/crashes/ice-8821.stderr
new file mode 100644
index 000000000..486096e0a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-8821.stderr
@@ -0,0 +1,10 @@
+error: this let-binding has unit value
+ --> $DIR/ice-8821.rs:7:5
+ |
+LL | let _: () = FN();
+ | ^^^^^^^^^^^^^^^^^ help: omit the `let` binding: `FN();`
+ |
+ = note: `-D clippy::let-unit-value` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-8850.rs b/src/tools/clippy/tests/ui/crashes/ice-8850.rs
new file mode 100644
index 000000000..f2747ab22
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-8850.rs
@@ -0,0 +1,27 @@
+fn fn_pointer_static() -> usize {
+ static FN: fn() -> usize = || 1;
+ let res = FN() + 1;
+ res
+}
+
+fn fn_pointer_const() -> usize {
+ const FN: fn() -> usize = || 1;
+ let res = FN() + 1;
+ res
+}
+
+fn deref_to_dyn_fn() -> usize {
+ struct Derefs;
+ impl std::ops::Deref for Derefs {
+ type Target = dyn Fn() -> usize;
+
+ fn deref(&self) -> &Self::Target {
+ &|| 2
+ }
+ }
+ static FN: Derefs = Derefs;
+ let res = FN() + 1;
+ res
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-8850.stderr b/src/tools/clippy/tests/ui/crashes/ice-8850.stderr
new file mode 100644
index 000000000..620fd1eda
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-8850.stderr
@@ -0,0 +1,45 @@
+error: returning the result of a `let` binding from a block
+ --> $DIR/ice-8850.rs:4:5
+ |
+LL | let res = FN() + 1;
+ | ------------------- unnecessary `let` binding
+LL | res
+ | ^^^
+ |
+ = note: `-D clippy::let-and-return` implied by `-D warnings`
+help: return the expression directly
+ |
+LL ~
+LL ~ FN() + 1
+ |
+
+error: returning the result of a `let` binding from a block
+ --> $DIR/ice-8850.rs:10:5
+ |
+LL | let res = FN() + 1;
+ | ------------------- unnecessary `let` binding
+LL | res
+ | ^^^
+ |
+help: return the expression directly
+ |
+LL ~
+LL ~ FN() + 1
+ |
+
+error: returning the result of a `let` binding from a block
+ --> $DIR/ice-8850.rs:24:5
+ |
+LL | let res = FN() + 1;
+ | ------------------- unnecessary `let` binding
+LL | res
+ | ^^^
+ |
+help: return the expression directly
+ |
+LL ~
+LL ~ FN() + 1
+ |
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-9041.rs b/src/tools/clippy/tests/ui/crashes/ice-9041.rs
new file mode 100644
index 000000000..55cc9bc99
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-9041.rs
@@ -0,0 +1,8 @@
+pub struct Thing;
+
+pub fn has_thing(things: &[Thing]) -> bool {
+ let is_thing_ready = |_peer: &Thing| -> bool { todo!() };
+ things.iter().find(|p| is_thing_ready(p)).is_some()
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-9041.stderr b/src/tools/clippy/tests/ui/crashes/ice-9041.stderr
new file mode 100644
index 000000000..f5038f0a8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-9041.stderr
@@ -0,0 +1,10 @@
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/ice-9041.rs:5:19
+ |
+LL | things.iter().find(|p| is_thing_ready(p)).is_some()
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|p| is_thing_ready(&p))`
+ |
+ = note: `-D clippy::search-is-some` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-9238.rs b/src/tools/clippy/tests/ui/crashes/ice-9238.rs
new file mode 100644
index 000000000..ee6abd519
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-9238.rs
@@ -0,0 +1,12 @@
+#![allow(incomplete_features)]
+#![feature(generic_const_exprs)]
+#![warn(clippy::branches_sharing_code)]
+
+const fn f() -> usize {
+ 2
+}
+const C: [f64; f()] = [0f64; f()];
+
+fn main() {
+ let _ = if true { C[0] } else { C[1] };
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-9242.rs b/src/tools/clippy/tests/ui/crashes/ice-9242.rs
new file mode 100644
index 000000000..0099e6e2f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-9242.rs
@@ -0,0 +1,8 @@
+enum E {
+ X(),
+ Y,
+}
+
+fn main() {
+ let _ = if let E::X() = E::X() { 1 } else { 2 };
+}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-96721.rs b/src/tools/clippy/tests/ui/crashes/ice-96721.rs
new file mode 100644
index 000000000..4b3fb7640
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-96721.rs
@@ -0,0 +1,10 @@
+macro_rules! foo {
+ () => {
+ "bar.rs"
+ };
+}
+
+#[path = foo!()] //~ ERROR malformed `path` attribute
+mod abc {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-96721.stderr b/src/tools/clippy/tests/ui/crashes/ice-96721.stderr
new file mode 100644
index 000000000..78c567b8e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice-96721.stderr
@@ -0,0 +1,8 @@
+error: malformed `path` attribute input
+ --> $DIR/ice-96721.rs:7:1
+ |
+LL | #[path = foo!()] //~ ERROR malformed `path` attribute
+ | ^^^^^^^^^^^^^^^^ help: must be of the form: `#[path = "file"]`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice_exacte_size.rs b/src/tools/clippy/tests/ui/crashes/ice_exacte_size.rs
new file mode 100644
index 000000000..30e4b11ec
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/ice_exacte_size.rs
@@ -0,0 +1,19 @@
+#![deny(clippy::all)]
+
+/// Test for https://github.com/rust-lang/rust-clippy/issues/1336
+
+#[allow(dead_code)]
+struct Foo;
+
+impl Iterator for Foo {
+ type Item = ();
+
+ fn next(&mut self) -> Option<()> {
+ let _ = self.len() == 0;
+ unimplemented!()
+ }
+}
+
+impl ExactSizeIterator for Foo {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/if_same_then_else.rs b/src/tools/clippy/tests/ui/crashes/if_same_then_else.rs
new file mode 100644
index 000000000..2f9132929
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/if_same_then_else.rs
@@ -0,0 +1,16 @@
+#![allow(clippy::comparison_chain)]
+#![deny(clippy::if_same_then_else)]
+
+/// Test for https://github.com/rust-lang/rust-clippy/issues/2426
+
+fn main() {}
+
+pub fn foo(a: i32, b: i32) -> Option<&'static str> {
+ if a == b {
+ None
+ } else if a > b {
+ Some("a pfeil b")
+ } else {
+ None
+ }
+}
diff --git a/src/tools/clippy/tests/ui/crashes/implements-trait.rs b/src/tools/clippy/tests/ui/crashes/implements-trait.rs
new file mode 100644
index 000000000..4502b0147
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/implements-trait.rs
@@ -0,0 +1,5 @@
+#[allow(clippy::needless_borrowed_reference)]
+fn main() {
+ let mut v = Vec::<String>::new();
+ let _ = v.iter_mut().filter(|&ref a| a.is_empty());
+}
diff --git a/src/tools/clippy/tests/ui/crashes/inherent_impl.rs b/src/tools/clippy/tests/ui/crashes/inherent_impl.rs
new file mode 100644
index 000000000..aeb27b5ba
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/inherent_impl.rs
@@ -0,0 +1,26 @@
+#![deny(clippy::multiple_inherent_impl)]
+
+/// Test for https://github.com/rust-lang/rust-clippy/issues/4578
+
+macro_rules! impl_foo {
+ ($struct:ident) => {
+ impl $struct {
+ fn foo() {}
+ }
+ };
+}
+
+macro_rules! impl_bar {
+ ($struct:ident) => {
+ impl $struct {
+ fn bar() {}
+ }
+ };
+}
+
+struct MyStruct;
+
+impl_foo!(MyStruct);
+impl_bar!(MyStruct);
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/issue-825.rs b/src/tools/clippy/tests/ui/crashes/issue-825.rs
new file mode 100644
index 000000000..05696e3d7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/issue-825.rs
@@ -0,0 +1,25 @@
+#![allow(warnings)]
+
+/// Test for https://github.com/rust-lang/rust-clippy/issues/825
+
+// this should compile in a reasonable amount of time
+fn rust_type_id(name: &str) {
+ if "bool" == &name[..]
+ || "uint" == &name[..]
+ || "u8" == &name[..]
+ || "u16" == &name[..]
+ || "u32" == &name[..]
+ || "f32" == &name[..]
+ || "f64" == &name[..]
+ || "i8" == &name[..]
+ || "i16" == &name[..]
+ || "i32" == &name[..]
+ || "i64" == &name[..]
+ || "Self" == &name[..]
+ || "str" == &name[..]
+ {
+ unreachable!();
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/issues_loop_mut_cond.rs b/src/tools/clippy/tests/ui/crashes/issues_loop_mut_cond.rs
new file mode 100644
index 000000000..bb238c81e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/issues_loop_mut_cond.rs
@@ -0,0 +1,28 @@
+#![allow(dead_code)]
+
+/// Issue: https://github.com/rust-lang/rust-clippy/issues/2596
+pub fn loop_on_block_condition(u: &mut isize) {
+ while { *u < 0 } {
+ *u += 1;
+ }
+}
+
+/// https://github.com/rust-lang/rust-clippy/issues/2584
+fn loop_with_unsafe_condition(ptr: *const u8) {
+ let mut len = 0;
+ while unsafe { *ptr.offset(len) } != 0 {
+ len += 1;
+ }
+}
+
+/// https://github.com/rust-lang/rust-clippy/issues/2710
+static mut RUNNING: bool = true;
+fn loop_on_static_condition() {
+ unsafe {
+ while RUNNING {
+ RUNNING = false;
+ }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/match_same_arms_const.rs b/src/tools/clippy/tests/ui/crashes/match_same_arms_const.rs
new file mode 100644
index 000000000..94c939665
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/match_same_arms_const.rs
@@ -0,0 +1,18 @@
+#![deny(clippy::match_same_arms)]
+
+/// Test for https://github.com/rust-lang/rust-clippy/issues/2427
+
+const PRICE_OF_SWEETS: u32 = 5;
+const PRICE_OF_KINDNESS: u32 = 0;
+const PRICE_OF_DRINKS: u32 = 5;
+
+pub fn price(thing: &str) -> u32 {
+ match thing {
+ "rolo" => PRICE_OF_SWEETS,
+ "advice" => PRICE_OF_KINDNESS,
+ "juice" => PRICE_OF_DRINKS,
+ _ => panic!(),
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/mut_mut_macro.rs b/src/tools/clippy/tests/ui/crashes/mut_mut_macro.rs
new file mode 100644
index 000000000..a238e7896
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/mut_mut_macro.rs
@@ -0,0 +1,34 @@
+#![deny(clippy::mut_mut, clippy::zero_ptr, clippy::cmp_nan)]
+#![allow(dead_code)]
+
+// FIXME: compiletest + extern crates doesn't work together. To make this test work, it would need
+// the following three lines and the lazy_static crate.
+//
+// #[macro_use]
+// extern crate lazy_static;
+// use std::collections::HashMap;
+
+/// ensure that we don't suggest `is_nan` and `is_null` inside constants
+/// FIXME: once const fn is stable, suggest these functions again in constants
+
+const BAA: *const i32 = 0 as *const i32;
+static mut BAR: *const i32 = BAA;
+static mut FOO: *const i32 = 0 as *const i32;
+static mut BUH: bool = 42.0 < f32::NAN;
+
+#[allow(unused_variables, unused_mut)]
+fn main() {
+ /*
+ lazy_static! {
+ static ref MUT_MAP : HashMap<usize, &'static str> = {
+ let mut m = HashMap::new();
+ m.insert(0, "zero");
+ m
+ };
+ static ref MUT_COUNT : usize = MUT_MAP.len();
+ }
+ assert_eq!(*MUT_COUNT, 1);
+ */
+ // FIXME: don't lint in array length, requires `check_body`
+ //let _ = [""; (42.0 < f32::NAN) as usize];
+}
diff --git a/src/tools/clippy/tests/ui/crashes/needless_borrow_fp.rs b/src/tools/clippy/tests/ui/crashes/needless_borrow_fp.rs
new file mode 100644
index 000000000..4f61c7682
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/needless_borrow_fp.rs
@@ -0,0 +1,7 @@
+#[deny(clippy::all)]
+#[derive(Debug)]
+pub enum Error {
+ Type(&'static str),
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/needless_lifetimes_impl_trait.rs b/src/tools/clippy/tests/ui/crashes/needless_lifetimes_impl_trait.rs
new file mode 100644
index 000000000..376ff97ba
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/needless_lifetimes_impl_trait.rs
@@ -0,0 +1,20 @@
+#![deny(clippy::needless_lifetimes)]
+#![allow(dead_code)]
+
+trait Foo {}
+
+struct Bar;
+
+struct Baz<'a> {
+ bar: &'a Bar,
+}
+
+impl<'a> Foo for Baz<'a> {}
+
+impl Bar {
+ fn baz<'a>(&'a self) -> impl Foo + 'a {
+ Baz { bar: self }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/needless_lifetimes_impl_trait.stderr b/src/tools/clippy/tests/ui/crashes/needless_lifetimes_impl_trait.stderr
new file mode 100644
index 000000000..d68bbe788
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/needless_lifetimes_impl_trait.stderr
@@ -0,0 +1,14 @@
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes_impl_trait.rs:15:5
+ |
+LL | fn baz<'a>(&'a self) -> impl Foo + 'a {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/needless_lifetimes_impl_trait.rs:1:9
+ |
+LL | #![deny(clippy::needless_lifetimes)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crashes/regressions.rs b/src/tools/clippy/tests/ui/crashes/regressions.rs
new file mode 100644
index 000000000..6f9d98bbf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/regressions.rs
@@ -0,0 +1,11 @@
+#![allow(clippy::blacklisted_name)]
+
+pub fn foo(bar: *const u8) {
+ println!("{:#p}", bar);
+}
+
+// Regression test for https://github.com/rust-lang/rust-clippy/issues/4917
+/// <foo
+struct A;
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/returns.rs b/src/tools/clippy/tests/ui/crashes/returns.rs
new file mode 100644
index 000000000..8021ed460
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/returns.rs
@@ -0,0 +1,23 @@
+/// Test for https://github.com/rust-lang/rust-clippy/issues/1346
+
+#[deny(warnings)]
+fn cfg_return() -> i32 {
+ #[cfg(unix)]
+ return 1;
+ #[cfg(not(unix))]
+ return 2;
+}
+
+#[deny(warnings)]
+fn cfg_let_and_return() -> i32 {
+ #[cfg(unix)]
+ let x = 1;
+ #[cfg(not(unix))]
+ let x = 2;
+ x
+}
+
+fn main() {
+ cfg_return();
+ cfg_let_and_return();
+}
diff --git a/src/tools/clippy/tests/ui/crashes/shadow.rs b/src/tools/clippy/tests/ui/crashes/shadow.rs
new file mode 100644
index 000000000..843e8ef64
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/shadow.rs
@@ -0,0 +1,6 @@
+fn main() {
+ let x: [i32; {
+ let u = 2;
+ 4
+ }] = [2; { 4 }];
+}
diff --git a/src/tools/clippy/tests/ui/crashes/single-match-else.rs b/src/tools/clippy/tests/ui/crashes/single-match-else.rs
new file mode 100644
index 000000000..1ba7ac082
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/single-match-else.rs
@@ -0,0 +1,11 @@
+#![warn(clippy::single_match_else)]
+
+//! Test for https://github.com/rust-lang/rust-clippy/issues/1588
+
+fn main() {
+ let n = match (42, 43) {
+ (42, n) => n,
+ _ => panic!("typeck error"),
+ };
+ assert_eq!(n, 43);
+}
diff --git a/src/tools/clippy/tests/ui/crashes/third-party/clippy.toml b/src/tools/clippy/tests/ui/crashes/third-party/clippy.toml
new file mode 100644
index 000000000..9f87de20b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/third-party/clippy.toml
@@ -0,0 +1,3 @@
+# this is ignored by Clippy, but allowed for other tools like clippy-service
+[third-party]
+clippy-feature = "nightly"
diff --git a/src/tools/clippy/tests/ui/crashes/third-party/conf_allowlisted.rs b/src/tools/clippy/tests/ui/crashes/third-party/conf_allowlisted.rs
new file mode 100644
index 000000000..f328e4d9d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/third-party/conf_allowlisted.rs
@@ -0,0 +1 @@
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/trivial_bounds.rs b/src/tools/clippy/tests/ui/crashes/trivial_bounds.rs
new file mode 100644
index 000000000..60105a821
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/trivial_bounds.rs
@@ -0,0 +1,11 @@
+#![feature(trivial_bounds)]
+#![allow(unused, trivial_bounds)]
+
+fn test_trivial_bounds()
+where
+ i32: Iterator,
+{
+ for _ in 2i32 {}
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crashes/used_underscore_binding_macro.rs b/src/tools/clippy/tests/ui/crashes/used_underscore_binding_macro.rs
new file mode 100644
index 000000000..901eb4e50
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crashes/used_underscore_binding_macro.rs
@@ -0,0 +1,16 @@
+use serde::Deserialize;
+
+/// Tests that we do not lint for unused underscores in a `MacroAttribute`
+/// expansion
+#[deny(clippy::used_underscore_binding)]
+#[derive(Deserialize)]
+struct MacroAttributesTest {
+ _foo: u32,
+}
+
+#[test]
+fn macro_attributes_test() {
+ let _ = MacroAttributesTest { _foo: 0 };
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/crate_in_macro_def.fixed b/src/tools/clippy/tests/ui/crate_in_macro_def.fixed
new file mode 100644
index 000000000..9fc594be3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crate_in_macro_def.fixed
@@ -0,0 +1,56 @@
+// run-rustfix
+#![warn(clippy::crate_in_macro_def)]
+
+mod hygienic {
+ #[macro_export]
+ macro_rules! print_message_hygienic {
+ () => {
+ println!("{}", $crate::hygienic::MESSAGE);
+ };
+ }
+
+ pub const MESSAGE: &str = "Hello!";
+}
+
+mod unhygienic {
+ #[macro_export]
+ macro_rules! print_message_unhygienic {
+ () => {
+ println!("{}", $crate::unhygienic::MESSAGE);
+ };
+ }
+
+ pub const MESSAGE: &str = "Hello!";
+}
+
+mod unhygienic_intentionally {
+ // For cases where the use of `crate` is intentional, applying `allow` to the macro definition
+ // should suppress the lint.
+ #[allow(clippy::crate_in_macro_def)]
+ #[macro_export]
+ macro_rules! print_message_unhygienic_intentionally {
+ () => {
+ println!("{}", crate::CALLER_PROVIDED_MESSAGE);
+ };
+ }
+}
+
+#[macro_use]
+mod not_exported {
+ macro_rules! print_message_not_exported {
+ () => {
+ println!("{}", crate::not_exported::MESSAGE);
+ };
+ }
+
+ pub const MESSAGE: &str = "Hello!";
+}
+
+fn main() {
+ print_message_hygienic!();
+ print_message_unhygienic!();
+ print_message_unhygienic_intentionally!();
+ print_message_not_exported!();
+}
+
+pub const CALLER_PROVIDED_MESSAGE: &str = "Hello!";
diff --git a/src/tools/clippy/tests/ui/crate_in_macro_def.rs b/src/tools/clippy/tests/ui/crate_in_macro_def.rs
new file mode 100644
index 000000000..ac456108e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crate_in_macro_def.rs
@@ -0,0 +1,56 @@
+// run-rustfix
+#![warn(clippy::crate_in_macro_def)]
+
+mod hygienic {
+ #[macro_export]
+ macro_rules! print_message_hygienic {
+ () => {
+ println!("{}", $crate::hygienic::MESSAGE);
+ };
+ }
+
+ pub const MESSAGE: &str = "Hello!";
+}
+
+mod unhygienic {
+ #[macro_export]
+ macro_rules! print_message_unhygienic {
+ () => {
+ println!("{}", crate::unhygienic::MESSAGE);
+ };
+ }
+
+ pub const MESSAGE: &str = "Hello!";
+}
+
+mod unhygienic_intentionally {
+ // For cases where the use of `crate` is intentional, applying `allow` to the macro definition
+ // should suppress the lint.
+ #[allow(clippy::crate_in_macro_def)]
+ #[macro_export]
+ macro_rules! print_message_unhygienic_intentionally {
+ () => {
+ println!("{}", crate::CALLER_PROVIDED_MESSAGE);
+ };
+ }
+}
+
+#[macro_use]
+mod not_exported {
+ macro_rules! print_message_not_exported {
+ () => {
+ println!("{}", crate::not_exported::MESSAGE);
+ };
+ }
+
+ pub const MESSAGE: &str = "Hello!";
+}
+
+fn main() {
+ print_message_hygienic!();
+ print_message_unhygienic!();
+ print_message_unhygienic_intentionally!();
+ print_message_not_exported!();
+}
+
+pub const CALLER_PROVIDED_MESSAGE: &str = "Hello!";
diff --git a/src/tools/clippy/tests/ui/crate_in_macro_def.stderr b/src/tools/clippy/tests/ui/crate_in_macro_def.stderr
new file mode 100644
index 000000000..9ac5937dc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crate_in_macro_def.stderr
@@ -0,0 +1,10 @@
+error: `crate` references the macro call's crate
+ --> $DIR/crate_in_macro_def.rs:19:28
+ |
+LL | println!("{}", crate::unhygienic::MESSAGE);
+ | ^^^^^ help: to reference the macro definition's crate, use: `$crate`
+ |
+ = note: `-D clippy::crate-in-macro-def` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crate_level_checks/entrypoint_recursion.rs b/src/tools/clippy/tests/ui/crate_level_checks/entrypoint_recursion.rs
new file mode 100644
index 000000000..1b3bcece6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crate_level_checks/entrypoint_recursion.rs
@@ -0,0 +1,11 @@
+// ignore-macos
+
+#![feature(rustc_attrs)]
+
+#[warn(clippy::main_recursion)]
+#[allow(unconditional_recursion)]
+#[rustc_main]
+fn a() {
+ println!("Hello, World!");
+ a();
+}
diff --git a/src/tools/clippy/tests/ui/crate_level_checks/entrypoint_recursion.stderr b/src/tools/clippy/tests/ui/crate_level_checks/entrypoint_recursion.stderr
new file mode 100644
index 000000000..459cf12a1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crate_level_checks/entrypoint_recursion.stderr
@@ -0,0 +1,11 @@
+error: recursing into entrypoint `a`
+ --> $DIR/entrypoint_recursion.rs:10:5
+ |
+LL | a();
+ | ^
+ |
+ = note: `-D clippy::main-recursion` implied by `-D warnings`
+ = help: consider using another function for this recursion
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crate_level_checks/no_std_main_recursion.rs b/src/tools/clippy/tests/ui/crate_level_checks/no_std_main_recursion.rs
new file mode 100644
index 000000000..4a5c597dd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crate_level_checks/no_std_main_recursion.rs
@@ -0,0 +1,33 @@
+// compile-flags: -Clink-arg=-nostartfiles
+// ignore-macos
+// ignore-windows
+
+#![feature(lang_items, start, libc)]
+#![no_std]
+
+use core::panic::PanicInfo;
+use core::sync::atomic::{AtomicUsize, Ordering};
+
+static N: AtomicUsize = AtomicUsize::new(0);
+
+#[warn(clippy::main_recursion)]
+#[start]
+fn main(_argc: isize, _argv: *const *const u8) -> isize {
+ let x = N.load(Ordering::Relaxed);
+ N.store(x + 1, Ordering::Relaxed);
+
+ if x < 3 {
+ main(_argc, _argv);
+ }
+
+ 0
+}
+
+#[allow(clippy::empty_loop)]
+#[panic_handler]
+fn panic(_info: &PanicInfo) -> ! {
+ loop {}
+}
+
+#[lang = "eh_personality"]
+extern "C" fn eh_personality() {}
diff --git a/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.rs b/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.rs
new file mode 100644
index 000000000..d3571eaf0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.rs
@@ -0,0 +1,14 @@
+#![no_std]
+#![feature(lang_items, start, libc)]
+#![crate_type = "lib"]
+
+use core::panic::PanicInfo;
+
+#[warn(clippy::all)]
+fn main() {
+ let mut a = 42;
+ let mut b = 1337;
+
+ a = b;
+ b = a;
+}
diff --git a/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.stderr b/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.stderr
new file mode 100644
index 000000000..48152d8ad
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.stderr
@@ -0,0 +1,12 @@
+error: this looks like you are trying to swap `a` and `b`
+ --> $DIR/no_std_swap.rs:12:5
+ |
+LL | / a = b;
+LL | | b = a;
+ | |_________^ help: try: `core::mem::swap(&mut a, &mut b)`
+ |
+ = note: `-D clippy::almost-swapped` implied by `-D warnings`
+ = note: or maybe you should use `core::mem::replace`?
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/crate_level_checks/std_main_recursion.rs b/src/tools/clippy/tests/ui/crate_level_checks/std_main_recursion.rs
new file mode 100644
index 000000000..89ff66099
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crate_level_checks/std_main_recursion.rs
@@ -0,0 +1,6 @@
+#[warn(clippy::main_recursion)]
+#[allow(unconditional_recursion)]
+fn main() {
+ println!("Hello, World!");
+ main();
+}
diff --git a/src/tools/clippy/tests/ui/crate_level_checks/std_main_recursion.stderr b/src/tools/clippy/tests/ui/crate_level_checks/std_main_recursion.stderr
new file mode 100644
index 000000000..0a260f9d2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/crate_level_checks/std_main_recursion.stderr
@@ -0,0 +1,11 @@
+error: recursing into entrypoint `main`
+ --> $DIR/std_main_recursion.rs:5:5
+ |
+LL | main();
+ | ^^^^
+ |
+ = note: `-D clippy::main-recursion` implied by `-D warnings`
+ = help: consider using another function for this recursion
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/create_dir.fixed b/src/tools/clippy/tests/ui/create_dir.fixed
new file mode 100644
index 000000000..8ed53a56a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/create_dir.fixed
@@ -0,0 +1,17 @@
+// run-rustfix
+#![allow(unused_must_use)]
+#![warn(clippy::create_dir)]
+
+use std::fs::create_dir_all;
+
+fn create_dir() {}
+
+fn main() {
+ // Should be warned
+ create_dir_all("foo");
+ create_dir_all("bar").unwrap();
+
+ // Shouldn't be warned
+ create_dir();
+ std::fs::create_dir_all("foobar");
+}
diff --git a/src/tools/clippy/tests/ui/create_dir.rs b/src/tools/clippy/tests/ui/create_dir.rs
new file mode 100644
index 000000000..19c8fc24b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/create_dir.rs
@@ -0,0 +1,17 @@
+// run-rustfix
+#![allow(unused_must_use)]
+#![warn(clippy::create_dir)]
+
+use std::fs::create_dir_all;
+
+fn create_dir() {}
+
+fn main() {
+ // Should be warned
+ std::fs::create_dir("foo");
+ std::fs::create_dir("bar").unwrap();
+
+ // Shouldn't be warned
+ create_dir();
+ std::fs::create_dir_all("foobar");
+}
diff --git a/src/tools/clippy/tests/ui/create_dir.stderr b/src/tools/clippy/tests/ui/create_dir.stderr
new file mode 100644
index 000000000..67298fc47
--- /dev/null
+++ b/src/tools/clippy/tests/ui/create_dir.stderr
@@ -0,0 +1,16 @@
+error: calling `std::fs::create_dir` where there may be a better way
+ --> $DIR/create_dir.rs:11:5
+ |
+LL | std::fs::create_dir("foo");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `std::fs::create_dir_all` instead: `create_dir_all("foo")`
+ |
+ = note: `-D clippy::create-dir` implied by `-D warnings`
+
+error: calling `std::fs::create_dir` where there may be a better way
+ --> $DIR/create_dir.rs:12:5
+ |
+LL | std::fs::create_dir("bar").unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `std::fs::create_dir_all` instead: `create_dir_all("bar")`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/dbg_macro.rs b/src/tools/clippy/tests/ui/dbg_macro.rs
new file mode 100644
index 000000000..25294e8c7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/dbg_macro.rs
@@ -0,0 +1,60 @@
+// compile-flags: --test
+#![warn(clippy::dbg_macro)]
+
+fn foo(n: u32) -> u32 {
+ if let Some(n) = dbg!(n.checked_sub(4)) { n } else { n }
+}
+
+fn factorial(n: u32) -> u32 {
+ if dbg!(n <= 1) {
+ dbg!(1)
+ } else {
+ dbg!(n * factorial(n - 1))
+ }
+}
+
+fn main() {
+ dbg!(42);
+ dbg!(dbg!(dbg!(42)));
+ foo(3) + dbg!(factorial(4));
+ dbg!(1, 2, dbg!(3, 4));
+ dbg!(1, 2, 3, 4, 5);
+}
+
+mod issue7274 {
+ trait Thing<'b> {
+ fn foo(&self);
+ }
+
+ macro_rules! define_thing {
+ ($thing:ident, $body:expr) => {
+ impl<'a> Thing<'a> for $thing {
+ fn foo<'b>(&self) {
+ $body
+ }
+ }
+ };
+ }
+
+ struct MyThing;
+ define_thing!(MyThing, {
+ dbg!(2);
+ });
+}
+
+#[test]
+pub fn issue8481() {
+ dbg!(1);
+}
+
+#[cfg(test)]
+fn foo2() {
+ dbg!(1);
+}
+
+#[cfg(test)]
+mod mod1 {
+ fn func() {
+ dbg!(1);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/dbg_macro.stderr b/src/tools/clippy/tests/ui/dbg_macro.stderr
new file mode 100644
index 000000000..e6a65b46d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/dbg_macro.stderr
@@ -0,0 +1,146 @@
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:5:22
+ |
+LL | if let Some(n) = dbg!(n.checked_sub(4)) { n } else { n }
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::dbg-macro` implied by `-D warnings`
+help: ensure to avoid having uses of it in version control
+ |
+LL | if let Some(n) = n.checked_sub(4) { n } else { n }
+ | ~~~~~~~~~~~~~~~~
+
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:9:8
+ |
+LL | if dbg!(n <= 1) {
+ | ^^^^^^^^^^^^
+ |
+help: ensure to avoid having uses of it in version control
+ |
+LL | if n <= 1 {
+ | ~~~~~~
+
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:10:9
+ |
+LL | dbg!(1)
+ | ^^^^^^^
+ |
+help: ensure to avoid having uses of it in version control
+ |
+LL | 1
+ |
+
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:12:9
+ |
+LL | dbg!(n * factorial(n - 1))
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: ensure to avoid having uses of it in version control
+ |
+LL | n * factorial(n - 1)
+ |
+
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:17:5
+ |
+LL | dbg!(42);
+ | ^^^^^^^^
+ |
+help: ensure to avoid having uses of it in version control
+ |
+LL | 42;
+ | ~~
+
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:18:5
+ |
+LL | dbg!(dbg!(dbg!(42)));
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+help: ensure to avoid having uses of it in version control
+ |
+LL | dbg!(dbg!(42));
+ | ~~~~~~~~~~~~~~
+
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:19:14
+ |
+LL | foo(3) + dbg!(factorial(4));
+ | ^^^^^^^^^^^^^^^^^^
+ |
+help: ensure to avoid having uses of it in version control
+ |
+LL | foo(3) + factorial(4);
+ | ~~~~~~~~~~~~
+
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:20:5
+ |
+LL | dbg!(1, 2, dbg!(3, 4));
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: ensure to avoid having uses of it in version control
+ |
+LL | (1, 2, dbg!(3, 4));
+ | ~~~~~~~~~~~~~~~~~~
+
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:21:5
+ |
+LL | dbg!(1, 2, 3, 4, 5);
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+help: ensure to avoid having uses of it in version control
+ |
+LL | (1, 2, 3, 4, 5);
+ | ~~~~~~~~~~~~~~~
+
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:41:9
+ |
+LL | dbg!(2);
+ | ^^^^^^^
+ |
+help: ensure to avoid having uses of it in version control
+ |
+LL | 2;
+ | ~
+
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:47:5
+ |
+LL | dbg!(1);
+ | ^^^^^^^
+ |
+help: ensure to avoid having uses of it in version control
+ |
+LL | 1;
+ | ~
+
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:52:5
+ |
+LL | dbg!(1);
+ | ^^^^^^^
+ |
+help: ensure to avoid having uses of it in version control
+ |
+LL | 1;
+ | ~
+
+error: `dbg!` macro is intended as a debugging tool
+ --> $DIR/dbg_macro.rs:58:9
+ |
+LL | dbg!(1);
+ | ^^^^^^^
+ |
+help: ensure to avoid having uses of it in version control
+ |
+LL | 1;
+ | ~
+
+error: aborting due to 13 previous errors
+
diff --git a/src/tools/clippy/tests/ui/debug_assert_with_mut_call.rs b/src/tools/clippy/tests/ui/debug_assert_with_mut_call.rs
new file mode 100644
index 000000000..46faa0a7b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/debug_assert_with_mut_call.rs
@@ -0,0 +1,133 @@
+#![feature(custom_inner_attributes)]
+#![rustfmt::skip]
+#![warn(clippy::debug_assert_with_mut_call)]
+#![allow(clippy::redundant_closure_call, clippy::get_first)]
+
+
+struct S;
+
+impl S {
+ fn bool_self_ref(&self) -> bool { false }
+ fn bool_self_mut(&mut self) -> bool { false }
+ fn bool_self_ref_arg_ref(&self, _: &u32) -> bool { false }
+ fn bool_self_ref_arg_mut(&self, _: &mut u32) -> bool { false }
+ fn bool_self_mut_arg_ref(&mut self, _: &u32) -> bool { false }
+ fn bool_self_mut_arg_mut(&mut self, _: &mut u32) -> bool { false }
+
+ fn u32_self_ref(&self) -> u32 { 0 }
+ fn u32_self_mut(&mut self) -> u32 { 0 }
+ fn u32_self_ref_arg_ref(&self, _: &u32) -> u32 { 0 }
+ fn u32_self_ref_arg_mut(&self, _: &mut u32) -> u32 { 0 }
+ fn u32_self_mut_arg_ref(&mut self, _: &u32) -> u32 { 0 }
+ fn u32_self_mut_arg_mut(&mut self, _: &mut u32) -> u32 { 0 }
+}
+
+fn bool_ref(_: &u32) -> bool { false }
+fn bool_mut(_: &mut u32) -> bool { false }
+fn u32_ref(_: &u32) -> u32 { 0 }
+fn u32_mut(_: &mut u32) -> u32 { 0 }
+
+fn func_non_mutable() {
+ debug_assert!(bool_ref(&3));
+ debug_assert!(!bool_ref(&3));
+
+ debug_assert_eq!(0, u32_ref(&3));
+ debug_assert_eq!(u32_ref(&3), 0);
+
+ debug_assert_ne!(1, u32_ref(&3));
+ debug_assert_ne!(u32_ref(&3), 1);
+}
+
+fn func_mutable() {
+ debug_assert!(bool_mut(&mut 3));
+ debug_assert!(!bool_mut(&mut 3));
+
+ debug_assert_eq!(0, u32_mut(&mut 3));
+ debug_assert_eq!(u32_mut(&mut 3), 0);
+
+ debug_assert_ne!(1, u32_mut(&mut 3));
+ debug_assert_ne!(u32_mut(&mut 3), 1);
+}
+
+fn method_non_mutable() {
+ debug_assert!(S.bool_self_ref());
+ debug_assert!(S.bool_self_ref_arg_ref(&3));
+
+ debug_assert_eq!(S.u32_self_ref(), 0);
+ debug_assert_eq!(S.u32_self_ref_arg_ref(&3), 0);
+
+ debug_assert_ne!(S.u32_self_ref(), 1);
+ debug_assert_ne!(S.u32_self_ref_arg_ref(&3), 1);
+}
+
+fn method_mutable() {
+ debug_assert!(S.bool_self_mut());
+ debug_assert!(!S.bool_self_mut());
+ debug_assert!(S.bool_self_ref_arg_mut(&mut 3));
+ debug_assert!(S.bool_self_mut_arg_ref(&3));
+ debug_assert!(S.bool_self_mut_arg_mut(&mut 3));
+
+ debug_assert_eq!(S.u32_self_mut(), 0);
+ debug_assert_eq!(S.u32_self_mut_arg_ref(&3), 0);
+ debug_assert_eq!(S.u32_self_ref_arg_mut(&mut 3), 0);
+ debug_assert_eq!(S.u32_self_mut_arg_mut(&mut 3), 0);
+
+ debug_assert_ne!(S.u32_self_mut(), 1);
+ debug_assert_ne!(S.u32_self_mut_arg_ref(&3), 1);
+ debug_assert_ne!(S.u32_self_ref_arg_mut(&mut 3), 1);
+ debug_assert_ne!(S.u32_self_mut_arg_mut(&mut 3), 1);
+}
+
+fn misc() {
+ // with variable
+ let mut v: Vec<u32> = vec![1, 2, 3, 4];
+ debug_assert_eq!(v.get(0), Some(&1));
+ debug_assert_ne!(v[0], 2);
+ debug_assert_eq!(v.pop(), Some(1));
+ debug_assert_ne!(Some(3), v.pop());
+
+ let a = &mut 3;
+ debug_assert!(bool_mut(a));
+
+ // nested
+ debug_assert!(!(bool_ref(&u32_mut(&mut 3))));
+
+ // chained
+ debug_assert_eq!(v.pop().unwrap(), 3);
+
+ // format args
+ debug_assert!(bool_ref(&3), "w/o format");
+ debug_assert!(bool_mut(&mut 3), "w/o format");
+ debug_assert!(bool_ref(&3), "{} format", "w/");
+ debug_assert!(bool_mut(&mut 3), "{} format", "w/");
+
+ // sub block
+ let mut x = 42_u32;
+ debug_assert!({
+ bool_mut(&mut x);
+ x > 10
+ });
+
+ // closures
+ debug_assert!((|| {
+ let mut x = 42;
+ bool_mut(&mut x);
+ x > 10
+ })());
+}
+
+async fn debug_await() {
+ debug_assert!(async {
+ true
+ }.await);
+}
+
+fn main() {
+ func_non_mutable();
+ func_mutable();
+ method_non_mutable();
+ method_mutable();
+
+ misc();
+ debug_await();
+}
diff --git a/src/tools/clippy/tests/ui/debug_assert_with_mut_call.stderr b/src/tools/clippy/tests/ui/debug_assert_with_mut_call.stderr
new file mode 100644
index 000000000..a2ca71b57
--- /dev/null
+++ b/src/tools/clippy/tests/ui/debug_assert_with_mut_call.stderr
@@ -0,0 +1,172 @@
+error: do not call a function with mutable arguments inside of `debug_assert!`
+ --> $DIR/debug_assert_with_mut_call.rs:42:19
+ |
+LL | debug_assert!(bool_mut(&mut 3));
+ | ^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::debug-assert-with-mut-call` implied by `-D warnings`
+
+error: do not call a function with mutable arguments inside of `debug_assert!`
+ --> $DIR/debug_assert_with_mut_call.rs:43:20
+ |
+LL | debug_assert!(!bool_mut(&mut 3));
+ | ^^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert_eq!`
+ --> $DIR/debug_assert_with_mut_call.rs:45:25
+ |
+LL | debug_assert_eq!(0, u32_mut(&mut 3));
+ | ^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert_eq!`
+ --> $DIR/debug_assert_with_mut_call.rs:46:22
+ |
+LL | debug_assert_eq!(u32_mut(&mut 3), 0);
+ | ^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert_ne!`
+ --> $DIR/debug_assert_with_mut_call.rs:48:25
+ |
+LL | debug_assert_ne!(1, u32_mut(&mut 3));
+ | ^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert_ne!`
+ --> $DIR/debug_assert_with_mut_call.rs:49:22
+ |
+LL | debug_assert_ne!(u32_mut(&mut 3), 1);
+ | ^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert!`
+ --> $DIR/debug_assert_with_mut_call.rs:64:19
+ |
+LL | debug_assert!(S.bool_self_mut());
+ | ^^^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert!`
+ --> $DIR/debug_assert_with_mut_call.rs:65:20
+ |
+LL | debug_assert!(!S.bool_self_mut());
+ | ^^^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert!`
+ --> $DIR/debug_assert_with_mut_call.rs:66:19
+ |
+LL | debug_assert!(S.bool_self_ref_arg_mut(&mut 3));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert!`
+ --> $DIR/debug_assert_with_mut_call.rs:67:19
+ |
+LL | debug_assert!(S.bool_self_mut_arg_ref(&3));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert!`
+ --> $DIR/debug_assert_with_mut_call.rs:68:19
+ |
+LL | debug_assert!(S.bool_self_mut_arg_mut(&mut 3));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert_eq!`
+ --> $DIR/debug_assert_with_mut_call.rs:70:22
+ |
+LL | debug_assert_eq!(S.u32_self_mut(), 0);
+ | ^^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert_eq!`
+ --> $DIR/debug_assert_with_mut_call.rs:71:22
+ |
+LL | debug_assert_eq!(S.u32_self_mut_arg_ref(&3), 0);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert_eq!`
+ --> $DIR/debug_assert_with_mut_call.rs:72:22
+ |
+LL | debug_assert_eq!(S.u32_self_ref_arg_mut(&mut 3), 0);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert_eq!`
+ --> $DIR/debug_assert_with_mut_call.rs:73:22
+ |
+LL | debug_assert_eq!(S.u32_self_mut_arg_mut(&mut 3), 0);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert_ne!`
+ --> $DIR/debug_assert_with_mut_call.rs:75:22
+ |
+LL | debug_assert_ne!(S.u32_self_mut(), 1);
+ | ^^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert_ne!`
+ --> $DIR/debug_assert_with_mut_call.rs:76:22
+ |
+LL | debug_assert_ne!(S.u32_self_mut_arg_ref(&3), 1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert_ne!`
+ --> $DIR/debug_assert_with_mut_call.rs:77:22
+ |
+LL | debug_assert_ne!(S.u32_self_ref_arg_mut(&mut 3), 1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert_ne!`
+ --> $DIR/debug_assert_with_mut_call.rs:78:22
+ |
+LL | debug_assert_ne!(S.u32_self_mut_arg_mut(&mut 3), 1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert_eq!`
+ --> $DIR/debug_assert_with_mut_call.rs:86:22
+ |
+LL | debug_assert_eq!(v.pop(), Some(1));
+ | ^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert_ne!`
+ --> $DIR/debug_assert_with_mut_call.rs:87:31
+ |
+LL | debug_assert_ne!(Some(3), v.pop());
+ | ^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert!`
+ --> $DIR/debug_assert_with_mut_call.rs:90:19
+ |
+LL | debug_assert!(bool_mut(a));
+ | ^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert!`
+ --> $DIR/debug_assert_with_mut_call.rs:93:31
+ |
+LL | debug_assert!(!(bool_ref(&u32_mut(&mut 3))));
+ | ^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert_eq!`
+ --> $DIR/debug_assert_with_mut_call.rs:96:22
+ |
+LL | debug_assert_eq!(v.pop().unwrap(), 3);
+ | ^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert!`
+ --> $DIR/debug_assert_with_mut_call.rs:100:19
+ |
+LL | debug_assert!(bool_mut(&mut 3), "w/o format");
+ | ^^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert!`
+ --> $DIR/debug_assert_with_mut_call.rs:102:19
+ |
+LL | debug_assert!(bool_mut(&mut 3), "{} format", "w/");
+ | ^^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert!`
+ --> $DIR/debug_assert_with_mut_call.rs:107:9
+ |
+LL | bool_mut(&mut x);
+ | ^^^^^^^^^^^^^^^^
+
+error: do not call a function with mutable arguments inside of `debug_assert!`
+ --> $DIR/debug_assert_with_mut_call.rs:114:9
+ |
+LL | bool_mut(&mut x);
+ | ^^^^^^^^^^^^^^^^
+
+error: aborting due to 28 previous errors
+
diff --git a/src/tools/clippy/tests/ui/decimal_literal_representation.fixed b/src/tools/clippy/tests/ui/decimal_literal_representation.fixed
new file mode 100644
index 000000000..de3914651
--- /dev/null
+++ b/src/tools/clippy/tests/ui/decimal_literal_representation.fixed
@@ -0,0 +1,27 @@
+// run-rustfix
+
+#[warn(clippy::decimal_literal_representation)]
+#[allow(unused_variables)]
+#[rustfmt::skip]
+fn main() {
+ let good = ( // Hex:
+ 127, // 0x7F
+ 256, // 0x100
+ 511, // 0x1FF
+ 2048, // 0x800
+ 4090, // 0xFFA
+ 16_371, // 0x3FF3
+ 61_683, // 0xF0F3
+ 2_131_750_925, // 0x7F0F_F00D
+ );
+ let bad = ( // Hex:
+ 0x8005, // 0x8005
+ 0xFF00, // 0xFF00
+ 0x7F0F_F00F, // 0x7F0F_F00F
+ 0x7FFF_FFFF, // 0x7FFF_FFFF
+ #[allow(overflowing_literals)]
+ 0xF0F0_F0F0, // 0xF0F0_F0F0
+ 0x8005_usize, // 0x8005_usize
+ 0x7F0F_F00F_isize, // 0x7F0F_F00F_isize
+ );
+}
diff --git a/src/tools/clippy/tests/ui/decimal_literal_representation.rs b/src/tools/clippy/tests/ui/decimal_literal_representation.rs
new file mode 100644
index 000000000..55d07698e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/decimal_literal_representation.rs
@@ -0,0 +1,27 @@
+// run-rustfix
+
+#[warn(clippy::decimal_literal_representation)]
+#[allow(unused_variables)]
+#[rustfmt::skip]
+fn main() {
+ let good = ( // Hex:
+ 127, // 0x7F
+ 256, // 0x100
+ 511, // 0x1FF
+ 2048, // 0x800
+ 4090, // 0xFFA
+ 16_371, // 0x3FF3
+ 61_683, // 0xF0F3
+ 2_131_750_925, // 0x7F0F_F00D
+ );
+ let bad = ( // Hex:
+ 32_773, // 0x8005
+ 65_280, // 0xFF00
+ 2_131_750_927, // 0x7F0F_F00F
+ 2_147_483_647, // 0x7FFF_FFFF
+ #[allow(overflowing_literals)]
+ 4_042_322_160, // 0xF0F0_F0F0
+ 32_773usize, // 0x8005_usize
+ 2_131_750_927isize, // 0x7F0F_F00F_isize
+ );
+}
diff --git a/src/tools/clippy/tests/ui/decimal_literal_representation.stderr b/src/tools/clippy/tests/ui/decimal_literal_representation.stderr
new file mode 100644
index 000000000..8d50c8f83
--- /dev/null
+++ b/src/tools/clippy/tests/ui/decimal_literal_representation.stderr
@@ -0,0 +1,46 @@
+error: integer literal has a better hexadecimal representation
+ --> $DIR/decimal_literal_representation.rs:18:9
+ |
+LL | 32_773, // 0x8005
+ | ^^^^^^ help: consider: `0x8005`
+ |
+ = note: `-D clippy::decimal-literal-representation` implied by `-D warnings`
+
+error: integer literal has a better hexadecimal representation
+ --> $DIR/decimal_literal_representation.rs:19:9
+ |
+LL | 65_280, // 0xFF00
+ | ^^^^^^ help: consider: `0xFF00`
+
+error: integer literal has a better hexadecimal representation
+ --> $DIR/decimal_literal_representation.rs:20:9
+ |
+LL | 2_131_750_927, // 0x7F0F_F00F
+ | ^^^^^^^^^^^^^ help: consider: `0x7F0F_F00F`
+
+error: integer literal has a better hexadecimal representation
+ --> $DIR/decimal_literal_representation.rs:21:9
+ |
+LL | 2_147_483_647, // 0x7FFF_FFFF
+ | ^^^^^^^^^^^^^ help: consider: `0x7FFF_FFFF`
+
+error: integer literal has a better hexadecimal representation
+ --> $DIR/decimal_literal_representation.rs:23:9
+ |
+LL | 4_042_322_160, // 0xF0F0_F0F0
+ | ^^^^^^^^^^^^^ help: consider: `0xF0F0_F0F0`
+
+error: integer literal has a better hexadecimal representation
+ --> $DIR/decimal_literal_representation.rs:24:9
+ |
+LL | 32_773usize, // 0x8005_usize
+ | ^^^^^^^^^^^ help: consider: `0x8005_usize`
+
+error: integer literal has a better hexadecimal representation
+ --> $DIR/decimal_literal_representation.rs:25:9
+ |
+LL | 2_131_750_927isize, // 0x7F0F_F00F_isize
+ | ^^^^^^^^^^^^^^^^^^ help: consider: `0x7F0F_F00F_isize`
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/declare_interior_mutable_const/enums.rs b/src/tools/clippy/tests/ui/declare_interior_mutable_const/enums.rs
new file mode 100644
index 000000000..f44518694
--- /dev/null
+++ b/src/tools/clippy/tests/ui/declare_interior_mutable_const/enums.rs
@@ -0,0 +1,123 @@
+#![warn(clippy::declare_interior_mutable_const)]
+
+use std::cell::Cell;
+use std::sync::atomic::AtomicUsize;
+
+enum OptionalCell {
+ Unfrozen(Cell<bool>),
+ Frozen,
+}
+
+// a constant with enums should be linted only when the used variant is unfrozen (#3962).
+const UNFROZEN_VARIANT: OptionalCell = OptionalCell::Unfrozen(Cell::new(true)); //~ ERROR interior mutable
+const FROZEN_VARIANT: OptionalCell = OptionalCell::Frozen;
+
+const fn unfrozen_variant() -> OptionalCell {
+ OptionalCell::Unfrozen(Cell::new(false))
+}
+
+const fn frozen_variant() -> OptionalCell {
+ OptionalCell::Frozen
+}
+
+const UNFROZEN_VARIANT_FROM_FN: OptionalCell = unfrozen_variant(); //~ ERROR interior mutable
+const FROZEN_VARIANT_FROM_FN: OptionalCell = frozen_variant();
+
+enum NestedInnermost {
+ Unfrozen(AtomicUsize),
+ Frozen,
+}
+
+struct NestedInner {
+ inner: NestedInnermost,
+}
+
+enum NestedOuter {
+ NestedInner(NestedInner),
+ NotNested(usize),
+}
+
+struct NestedOutermost {
+ outer: NestedOuter,
+}
+
+// a constant with enums should be linted according to its value, no matter how structs involve.
+const NESTED_UNFROZEN_VARIANT: NestedOutermost = NestedOutermost {
+ outer: NestedOuter::NestedInner(NestedInner {
+ inner: NestedInnermost::Unfrozen(AtomicUsize::new(2)),
+ }),
+}; //~ ERROR interior mutable
+const NESTED_FROZEN_VARIANT: NestedOutermost = NestedOutermost {
+ outer: NestedOuter::NestedInner(NestedInner {
+ inner: NestedInnermost::Frozen,
+ }),
+};
+
+trait AssocConsts {
+ // When there's no default value, lint it only according to its type.
+ // Further details are on the corresponding code (`NonCopyConst::check_trait_item`).
+ const TO_BE_UNFROZEN_VARIANT: OptionalCell; //~ ERROR interior mutable
+ const TO_BE_FROZEN_VARIANT: OptionalCell; //~ ERROR interior mutable
+
+ // Lint default values accordingly.
+ const DEFAULTED_ON_UNFROZEN_VARIANT: OptionalCell = OptionalCell::Unfrozen(Cell::new(false)); //~ ERROR interior mutable
+ const DEFAULTED_ON_FROZEN_VARIANT: OptionalCell = OptionalCell::Frozen;
+}
+
+// The lint doesn't trigger for an assoc constant in a trait impl with an unfrozen type even if it
+// has enums. Further details are on the corresponding code in 'NonCopyConst::check_impl_item'.
+impl AssocConsts for u64 {
+ const TO_BE_UNFROZEN_VARIANT: OptionalCell = OptionalCell::Unfrozen(Cell::new(false));
+ const TO_BE_FROZEN_VARIANT: OptionalCell = OptionalCell::Frozen;
+
+ // even if this sets an unfrozen variant, the lint ignores it.
+ const DEFAULTED_ON_FROZEN_VARIANT: OptionalCell = OptionalCell::Unfrozen(Cell::new(false));
+}
+
+// At first, I thought I'd need to check every patterns in `trait.rs`; but, what matters
+// here are values; and I think substituted generics at definitions won't appear in MIR.
+trait AssocTypes {
+ type ToBeUnfrozen;
+
+ const TO_BE_UNFROZEN_VARIANT: Option<Self::ToBeUnfrozen>;
+ const TO_BE_FROZEN_VARIANT: Option<Self::ToBeUnfrozen>;
+}
+
+impl AssocTypes for u64 {
+ type ToBeUnfrozen = AtomicUsize;
+
+ const TO_BE_UNFROZEN_VARIANT: Option<Self::ToBeUnfrozen> = Some(Self::ToBeUnfrozen::new(4)); //~ ERROR interior mutable
+ const TO_BE_FROZEN_VARIANT: Option<Self::ToBeUnfrozen> = None;
+}
+
+// Use raw pointers since direct generics have a false negative at the type level.
+enum BothOfCellAndGeneric<T> {
+ Unfrozen(Cell<*const T>),
+ Generic(*const T),
+ Frozen(usize),
+}
+
+impl<T> BothOfCellAndGeneric<T> {
+ const UNFROZEN_VARIANT: BothOfCellAndGeneric<T> = BothOfCellAndGeneric::Unfrozen(Cell::new(std::ptr::null())); //~ ERROR interior mutable
+
+ // This is a false positive. The argument about this is on `is_value_unfrozen_raw`
+ const GENERIC_VARIANT: BothOfCellAndGeneric<T> = BothOfCellAndGeneric::Generic(std::ptr::null()); //~ ERROR interior mutable
+
+ const FROZEN_VARIANT: BothOfCellAndGeneric<T> = BothOfCellAndGeneric::Frozen(5);
+
+ // This is what is likely to be a false negative when one tries to fix
+ // the `GENERIC_VARIANT` false positive.
+ const NO_ENUM: Cell<*const T> = Cell::new(std::ptr::null()); //~ ERROR interior mutable
+}
+
+// associated types here is basically the same as the one above.
+trait BothOfCellAndGenericWithAssocType {
+ type AssocType;
+
+ const UNFROZEN_VARIANT: BothOfCellAndGeneric<Self::AssocType> =
+ BothOfCellAndGeneric::Unfrozen(Cell::new(std::ptr::null())); //~ ERROR interior mutable
+ const GENERIC_VARIANT: BothOfCellAndGeneric<Self::AssocType> = BothOfCellAndGeneric::Generic(std::ptr::null()); //~ ERROR interior mutable
+ const FROZEN_VARIANT: BothOfCellAndGeneric<Self::AssocType> = BothOfCellAndGeneric::Frozen(5);
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/declare_interior_mutable_const/enums.stderr b/src/tools/clippy/tests/ui/declare_interior_mutable_const/enums.stderr
new file mode 100644
index 000000000..84198d546
--- /dev/null
+++ b/src/tools/clippy/tests/ui/declare_interior_mutable_const/enums.stderr
@@ -0,0 +1,89 @@
+error: a `const` item should never be interior mutable
+ --> $DIR/enums.rs:12:1
+ |
+LL | const UNFROZEN_VARIANT: OptionalCell = OptionalCell::Unfrozen(Cell::new(true)); //~ ERROR interior mutable
+ | -----^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | make this a static item (maybe with lazy_static)
+ |
+ = note: `-D clippy::declare-interior-mutable-const` implied by `-D warnings`
+
+error: a `const` item should never be interior mutable
+ --> $DIR/enums.rs:23:1
+ |
+LL | const UNFROZEN_VARIANT_FROM_FN: OptionalCell = unfrozen_variant(); //~ ERROR interior mutable
+ | -----^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | make this a static item (maybe with lazy_static)
+
+error: a `const` item should never be interior mutable
+ --> $DIR/enums.rs:45:1
+ |
+LL | const NESTED_UNFROZEN_VARIANT: NestedOutermost = NestedOutermost {
+ | ^----
+ | |
+ | _make this a static item (maybe with lazy_static)
+ | |
+LL | | outer: NestedOuter::NestedInner(NestedInner {
+LL | | inner: NestedInnermost::Unfrozen(AtomicUsize::new(2)),
+LL | | }),
+LL | | }; //~ ERROR interior mutable
+ | |__^
+
+error: a `const` item should never be interior mutable
+ --> $DIR/enums.rs:59:5
+ |
+LL | const TO_BE_UNFROZEN_VARIANT: OptionalCell; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: a `const` item should never be interior mutable
+ --> $DIR/enums.rs:60:5
+ |
+LL | const TO_BE_FROZEN_VARIANT: OptionalCell; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: a `const` item should never be interior mutable
+ --> $DIR/enums.rs:63:5
+ |
+LL | const DEFAULTED_ON_UNFROZEN_VARIANT: OptionalCell = OptionalCell::Unfrozen(Cell::new(false)); //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: a `const` item should never be interior mutable
+ --> $DIR/enums.rs:89:5
+ |
+LL | const TO_BE_UNFROZEN_VARIANT: Option<Self::ToBeUnfrozen> = Some(Self::ToBeUnfrozen::new(4)); //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: a `const` item should never be interior mutable
+ --> $DIR/enums.rs:101:5
+ |
+LL | const UNFROZEN_VARIANT: BothOfCellAndGeneric<T> = BothOfCellAndGeneric::Unfrozen(Cell::new(std::ptr::null())); //~ ERROR interior mut...
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: a `const` item should never be interior mutable
+ --> $DIR/enums.rs:104:5
+ |
+LL | const GENERIC_VARIANT: BothOfCellAndGeneric<T> = BothOfCellAndGeneric::Generic(std::ptr::null()); //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: a `const` item should never be interior mutable
+ --> $DIR/enums.rs:110:5
+ |
+LL | const NO_ENUM: Cell<*const T> = Cell::new(std::ptr::null()); //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: a `const` item should never be interior mutable
+ --> $DIR/enums.rs:117:5
+ |
+LL | / const UNFROZEN_VARIANT: BothOfCellAndGeneric<Self::AssocType> =
+LL | | BothOfCellAndGeneric::Unfrozen(Cell::new(std::ptr::null())); //~ ERROR interior mutable
+ | |____________________________________________________________________^
+
+error: a `const` item should never be interior mutable
+ --> $DIR/enums.rs:119:5
+ |
+LL | const GENERIC_VARIANT: BothOfCellAndGeneric<Self::AssocType> = BothOfCellAndGeneric::Generic(std::ptr::null()); //~ ERROR interior mu...
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 12 previous errors
+
diff --git a/src/tools/clippy/tests/ui/declare_interior_mutable_const/others.rs b/src/tools/clippy/tests/ui/declare_interior_mutable_const/others.rs
new file mode 100644
index 000000000..896596b56
--- /dev/null
+++ b/src/tools/clippy/tests/ui/declare_interior_mutable_const/others.rs
@@ -0,0 +1,55 @@
+#![warn(clippy::declare_interior_mutable_const)]
+
+use std::borrow::Cow;
+use std::cell::Cell;
+use std::fmt::Display;
+use std::sync::atomic::AtomicUsize;
+use std::sync::Once;
+
+const ATOMIC: AtomicUsize = AtomicUsize::new(5); //~ ERROR interior mutable
+const CELL: Cell<usize> = Cell::new(6); //~ ERROR interior mutable
+const ATOMIC_TUPLE: ([AtomicUsize; 1], Vec<AtomicUsize>, u8) = ([ATOMIC], Vec::new(), 7);
+//~^ ERROR interior mutable
+
+macro_rules! declare_const {
+ ($name:ident: $ty:ty = $e:expr) => {
+ const $name: $ty = $e;
+ };
+}
+declare_const!(_ONCE: Once = Once::new()); //~ ERROR interior mutable
+
+// const ATOMIC_REF: &AtomicUsize = &AtomicUsize::new(7); // This will simply trigger E0492.
+
+const INTEGER: u8 = 8;
+const STRING: String = String::new();
+const STR: &str = "012345";
+const COW: Cow<str> = Cow::Borrowed("abcdef");
+//^ note: a const item of Cow is used in the `postgres` package.
+
+const NO_ANN: &dyn Display = &70;
+
+static STATIC_TUPLE: (AtomicUsize, String) = (ATOMIC, STRING);
+//^ there should be no lints on this line
+
+mod issue_8493 {
+ use std::cell::Cell;
+
+ thread_local! {
+ static _BAR: Cell<i32> = const { Cell::new(0) };
+ }
+
+ macro_rules! issue_8493 {
+ () => {
+ const _BAZ: Cell<usize> = Cell::new(0); //~ ERROR interior mutable
+ static _FOOBAR: () = {
+ thread_local! {
+ static _VAR: Cell<i32> = const { Cell::new(0) };
+ }
+ };
+ };
+ }
+
+ issue_8493!();
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/declare_interior_mutable_const/others.stderr b/src/tools/clippy/tests/ui/declare_interior_mutable_const/others.stderr
new file mode 100644
index 000000000..1fd6d7322
--- /dev/null
+++ b/src/tools/clippy/tests/ui/declare_interior_mutable_const/others.stderr
@@ -0,0 +1,50 @@
+error: a `const` item should never be interior mutable
+ --> $DIR/others.rs:9:1
+ |
+LL | const ATOMIC: AtomicUsize = AtomicUsize::new(5); //~ ERROR interior mutable
+ | -----^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | make this a static item (maybe with lazy_static)
+ |
+ = note: `-D clippy::declare-interior-mutable-const` implied by `-D warnings`
+
+error: a `const` item should never be interior mutable
+ --> $DIR/others.rs:10:1
+ |
+LL | const CELL: Cell<usize> = Cell::new(6); //~ ERROR interior mutable
+ | -----^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | make this a static item (maybe with lazy_static)
+
+error: a `const` item should never be interior mutable
+ --> $DIR/others.rs:11:1
+ |
+LL | const ATOMIC_TUPLE: ([AtomicUsize; 1], Vec<AtomicUsize>, u8) = ([ATOMIC], Vec::new(), 7);
+ | -----^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | make this a static item (maybe with lazy_static)
+
+error: a `const` item should never be interior mutable
+ --> $DIR/others.rs:16:9
+ |
+LL | const $name: $ty = $e;
+ | ^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | declare_const!(_ONCE: Once = Once::new()); //~ ERROR interior mutable
+ | ----------------------------------------- in this macro invocation
+ |
+ = note: this error originates in the macro `declare_const` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: a `const` item should never be interior mutable
+ --> $DIR/others.rs:43:13
+ |
+LL | const _BAZ: Cell<usize> = Cell::new(0); //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | issue_8493!();
+ | ------------- in this macro invocation
+ |
+ = note: this error originates in the macro `issue_8493` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/declare_interior_mutable_const/traits.rs b/src/tools/clippy/tests/ui/declare_interior_mutable_const/traits.rs
new file mode 100644
index 000000000..256a336db
--- /dev/null
+++ b/src/tools/clippy/tests/ui/declare_interior_mutable_const/traits.rs
@@ -0,0 +1,150 @@
+#![warn(clippy::declare_interior_mutable_const)]
+
+use std::borrow::Cow;
+use std::cell::Cell;
+use std::sync::atomic::AtomicUsize;
+
+macro_rules! declare_const {
+ ($name:ident: $ty:ty = $e:expr) => {
+ const $name: $ty = $e;
+ };
+}
+
+// a constant whose type is a concrete type should be linted at the definition site.
+trait ConcreteTypes {
+ const ATOMIC: AtomicUsize; //~ ERROR interior mutable
+ const INTEGER: u64;
+ const STRING: String;
+ declare_const!(ANOTHER_ATOMIC: AtomicUsize = Self::ATOMIC); //~ ERROR interior mutable
+}
+
+impl ConcreteTypes for u64 {
+ const ATOMIC: AtomicUsize = AtomicUsize::new(9);
+ const INTEGER: u64 = 10;
+ const STRING: String = String::new();
+}
+
+// a helper trait used below
+trait ConstDefault {
+ const DEFAULT: Self;
+}
+
+// a constant whose type is a generic type should be linted at the implementation site.
+trait GenericTypes<T, U> {
+ const TO_REMAIN_GENERIC: T;
+ const TO_BE_CONCRETE: U;
+
+ const HAVING_DEFAULT: T = Self::TO_REMAIN_GENERIC;
+ declare_const!(IN_MACRO: T = Self::TO_REMAIN_GENERIC);
+}
+
+impl<T: ConstDefault> GenericTypes<T, AtomicUsize> for u64 {
+ const TO_REMAIN_GENERIC: T = T::DEFAULT;
+ const TO_BE_CONCRETE: AtomicUsize = AtomicUsize::new(11); //~ ERROR interior mutable
+}
+
+// a helper type used below
+struct Wrapper<T>(T);
+
+// a constant whose type is an associated type should be linted at the implementation site, too.
+trait AssocTypes {
+ type ToBeFrozen;
+ type ToBeUnfrozen;
+ type ToBeGenericParam;
+
+ const TO_BE_FROZEN: Self::ToBeFrozen;
+ const TO_BE_UNFROZEN: Self::ToBeUnfrozen;
+ const WRAPPED_TO_BE_UNFROZEN: Wrapper<Self::ToBeUnfrozen>;
+ // to ensure it can handle things when a generic type remains after normalization.
+ const WRAPPED_TO_BE_GENERIC_PARAM: Wrapper<Self::ToBeGenericParam>;
+}
+
+impl<T: ConstDefault> AssocTypes for Vec<T> {
+ type ToBeFrozen = u16;
+ type ToBeUnfrozen = AtomicUsize;
+ type ToBeGenericParam = T;
+
+ const TO_BE_FROZEN: Self::ToBeFrozen = 12;
+ const TO_BE_UNFROZEN: Self::ToBeUnfrozen = AtomicUsize::new(13); //~ ERROR interior mutable
+ const WRAPPED_TO_BE_UNFROZEN: Wrapper<Self::ToBeUnfrozen> = Wrapper(AtomicUsize::new(14)); //~ ERROR interior mutable
+ const WRAPPED_TO_BE_GENERIC_PARAM: Wrapper<Self::ToBeGenericParam> = Wrapper(T::DEFAULT);
+}
+
+// a helper trait used below
+trait AssocTypesHelper {
+ type NotToBeBounded;
+ type ToBeBounded;
+
+ const NOT_TO_BE_BOUNDED: Self::NotToBeBounded;
+}
+
+// a constant whose type is an assoc type originated from a generic param bounded at the definition
+// site should be linted at there.
+trait AssocTypesFromGenericParam<T>
+where
+ T: AssocTypesHelper<ToBeBounded = AtomicUsize>,
+{
+ const NOT_BOUNDED: T::NotToBeBounded;
+ const BOUNDED: T::ToBeBounded; //~ ERROR interior mutable
+}
+
+impl<T> AssocTypesFromGenericParam<T> for u64
+where
+ T: AssocTypesHelper<ToBeBounded = AtomicUsize>,
+{
+ // an associated type could remain unknown in a trait impl.
+ const NOT_BOUNDED: T::NotToBeBounded = T::NOT_TO_BE_BOUNDED;
+ const BOUNDED: T::ToBeBounded = AtomicUsize::new(15);
+}
+
+// a constant whose type is `Self` should be linted at the implementation site as well.
+// (`Option` requires `Sized` bound.)
+trait SelfType: Sized {
+ const SELF: Self;
+ // this was the one in the original issue (#5050).
+ const WRAPPED_SELF: Option<Self>;
+}
+
+impl SelfType for u64 {
+ const SELF: Self = 16;
+ const WRAPPED_SELF: Option<Self> = Some(20);
+}
+
+impl SelfType for AtomicUsize {
+ // this (interior mutable `Self` const) exists in `parking_lot`.
+ // `const_trait_impl` will replace it in the future, hopefully.
+ const SELF: Self = AtomicUsize::new(17); //~ ERROR interior mutable
+ const WRAPPED_SELF: Option<Self> = Some(AtomicUsize::new(21)); //~ ERROR interior mutable
+}
+
+// Even though a constant contains a generic type, if it also have an interior mutable type,
+// it should be linted at the definition site.
+trait BothOfCellAndGeneric<T> {
+ // this is a false negative in the current implementation.
+ const DIRECT: Cell<T>;
+ const INDIRECT: Cell<*const T>; //~ ERROR interior mutable
+}
+
+impl<T: ConstDefault> BothOfCellAndGeneric<T> for u64 {
+ const DIRECT: Cell<T> = Cell::new(T::DEFAULT);
+ const INDIRECT: Cell<*const T> = Cell::new(std::ptr::null());
+}
+
+struct Local<T>(T);
+
+// a constant in an inherent impl are essentially the same as a normal const item
+// except there can be a generic or associated type.
+impl<T> Local<T>
+where
+ T: ConstDefault + AssocTypesHelper<ToBeBounded = AtomicUsize>,
+{
+ const ATOMIC: AtomicUsize = AtomicUsize::new(18); //~ ERROR interior mutable
+ const COW: Cow<'static, str> = Cow::Borrowed("tuvwxy");
+
+ const GENERIC_TYPE: T = T::DEFAULT;
+
+ const ASSOC_TYPE: T::NotToBeBounded = T::NOT_TO_BE_BOUNDED;
+ const BOUNDED_ASSOC_TYPE: T::ToBeBounded = AtomicUsize::new(19); //~ ERROR interior mutable
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/declare_interior_mutable_const/traits.stderr b/src/tools/clippy/tests/ui/declare_interior_mutable_const/traits.stderr
new file mode 100644
index 000000000..7debe059f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/declare_interior_mutable_const/traits.stderr
@@ -0,0 +1,75 @@
+error: a `const` item should never be interior mutable
+ --> $DIR/traits.rs:15:5
+ |
+LL | const ATOMIC: AtomicUsize; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::declare-interior-mutable-const` implied by `-D warnings`
+
+error: a `const` item should never be interior mutable
+ --> $DIR/traits.rs:9:9
+ |
+LL | const $name: $ty = $e;
+ | ^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | declare_const!(ANOTHER_ATOMIC: AtomicUsize = Self::ATOMIC); //~ ERROR interior mutable
+ | ---------------------------------------------------------- in this macro invocation
+ |
+ = note: this error originates in the macro `declare_const` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: a `const` item should never be interior mutable
+ --> $DIR/traits.rs:43:5
+ |
+LL | const TO_BE_CONCRETE: AtomicUsize = AtomicUsize::new(11); //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: a `const` item should never be interior mutable
+ --> $DIR/traits.rs:68:5
+ |
+LL | const TO_BE_UNFROZEN: Self::ToBeUnfrozen = AtomicUsize::new(13); //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: a `const` item should never be interior mutable
+ --> $DIR/traits.rs:69:5
+ |
+LL | const WRAPPED_TO_BE_UNFROZEN: Wrapper<Self::ToBeUnfrozen> = Wrapper(AtomicUsize::new(14)); //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: a `const` item should never be interior mutable
+ --> $DIR/traits.rs:88:5
+ |
+LL | const BOUNDED: T::ToBeBounded; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: a `const` item should never be interior mutable
+ --> $DIR/traits.rs:116:5
+ |
+LL | const SELF: Self = AtomicUsize::new(17); //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: a `const` item should never be interior mutable
+ --> $DIR/traits.rs:117:5
+ |
+LL | const WRAPPED_SELF: Option<Self> = Some(AtomicUsize::new(21)); //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: a `const` item should never be interior mutable
+ --> $DIR/traits.rs:125:5
+ |
+LL | const INDIRECT: Cell<*const T>; //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: a `const` item should never be interior mutable
+ --> $DIR/traits.rs:141:5
+ |
+LL | const ATOMIC: AtomicUsize = AtomicUsize::new(18); //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: a `const` item should never be interior mutable
+ --> $DIR/traits.rs:147:5
+ |
+LL | const BOUNDED_ASSOC_TYPE: T::ToBeBounded = AtomicUsize::new(19); //~ ERROR interior mutable
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/def_id_nocore.rs b/src/tools/clippy/tests/ui/def_id_nocore.rs
new file mode 100644
index 000000000..156c88e2e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/def_id_nocore.rs
@@ -0,0 +1,31 @@
+// ignore-windows
+// ignore-macos
+
+#![feature(no_core, lang_items, start)]
+#![no_core]
+#![allow(clippy::missing_safety_doc)]
+
+#[link(name = "c")]
+extern "C" {}
+
+#[lang = "sized"]
+pub trait Sized {}
+#[lang = "copy"]
+pub trait Copy {}
+#[lang = "freeze"]
+pub unsafe trait Freeze {}
+
+#[lang = "start"]
+fn start<T>(_main: fn() -> T, _argc: isize, _argv: *const *const u8) -> isize {
+ 0
+}
+
+fn main() {}
+
+struct A;
+
+impl A {
+ pub fn as_ref(self) -> &'static str {
+ "A"
+ }
+}
diff --git a/src/tools/clippy/tests/ui/def_id_nocore.stderr b/src/tools/clippy/tests/ui/def_id_nocore.stderr
new file mode 100644
index 000000000..40d355e9a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/def_id_nocore.stderr
@@ -0,0 +1,11 @@
+error: methods called `as_*` usually take `self` by reference or `self` by mutable reference
+ --> $DIR/def_id_nocore.rs:28:19
+ |
+LL | pub fn as_ref(self) -> &'static str {
+ | ^^^^
+ |
+ = note: `-D clippy::wrong-self-convention` implied by `-D warnings`
+ = help: consider choosing a less ambiguous name
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/default_instead_of_iter_empty.fixed b/src/tools/clippy/tests/ui/default_instead_of_iter_empty.fixed
new file mode 100644
index 000000000..f1abfdcd6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/default_instead_of_iter_empty.fixed
@@ -0,0 +1,21 @@
+// run-rustfix
+#![warn(clippy::default_instead_of_iter_empty)]
+#![allow(dead_code)]
+use std::collections::HashMap;
+
+#[derive(Default)]
+struct Iter {
+ iter: std::iter::Empty<usize>,
+}
+
+fn main() {
+ // Do lint.
+ let _ = std::iter::empty::<usize>();
+ let _ = std::iter::empty::<HashMap<usize, usize>>();
+ let _foo: std::iter::Empty<usize> = std::iter::empty();
+
+ // Do not lint.
+ let _ = Vec::<usize>::default();
+ let _ = String::default();
+ let _ = Iter::default();
+}
diff --git a/src/tools/clippy/tests/ui/default_instead_of_iter_empty.rs b/src/tools/clippy/tests/ui/default_instead_of_iter_empty.rs
new file mode 100644
index 000000000..2630519c4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/default_instead_of_iter_empty.rs
@@ -0,0 +1,21 @@
+// run-rustfix
+#![warn(clippy::default_instead_of_iter_empty)]
+#![allow(dead_code)]
+use std::collections::HashMap;
+
+#[derive(Default)]
+struct Iter {
+ iter: std::iter::Empty<usize>,
+}
+
+fn main() {
+ // Do lint.
+ let _ = std::iter::Empty::<usize>::default();
+ let _ = std::iter::Empty::<HashMap<usize, usize>>::default();
+ let _foo: std::iter::Empty<usize> = std::iter::Empty::default();
+
+ // Do not lint.
+ let _ = Vec::<usize>::default();
+ let _ = String::default();
+ let _ = Iter::default();
+}
diff --git a/src/tools/clippy/tests/ui/default_instead_of_iter_empty.stderr b/src/tools/clippy/tests/ui/default_instead_of_iter_empty.stderr
new file mode 100644
index 000000000..460fc84de
--- /dev/null
+++ b/src/tools/clippy/tests/ui/default_instead_of_iter_empty.stderr
@@ -0,0 +1,22 @@
+error: `std::iter::empty()` is the more idiomatic way
+ --> $DIR/default_instead_of_iter_empty.rs:13:13
+ |
+LL | let _ = std::iter::Empty::<usize>::default();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `std::iter::empty::<usize>()`
+ |
+ = note: `-D clippy::default-instead-of-iter-empty` implied by `-D warnings`
+
+error: `std::iter::empty()` is the more idiomatic way
+ --> $DIR/default_instead_of_iter_empty.rs:14:13
+ |
+LL | let _ = std::iter::Empty::<HashMap<usize, usize>>::default();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `std::iter::empty::<HashMap<usize, usize>>()`
+
+error: `std::iter::empty()` is the more idiomatic way
+ --> $DIR/default_instead_of_iter_empty.rs:15:41
+ |
+LL | let _foo: std::iter::Empty<usize> = std::iter::Empty::default();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `std::iter::empty()`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/default_numeric_fallback_f64.fixed b/src/tools/clippy/tests/ui/default_numeric_fallback_f64.fixed
new file mode 100644
index 000000000..a28bff767
--- /dev/null
+++ b/src/tools/clippy/tests/ui/default_numeric_fallback_f64.fixed
@@ -0,0 +1,177 @@
+// run-rustfix
+// aux-build:macro_rules.rs
+
+#![warn(clippy::default_numeric_fallback)]
+#![allow(
+ unused,
+ clippy::never_loop,
+ clippy::no_effect,
+ clippy::unnecessary_operation,
+ clippy::branches_sharing_code,
+ clippy::match_single_binding,
+ clippy::let_unit_value
+)]
+
+#[macro_use]
+extern crate macro_rules;
+
+mod basic_expr {
+ fn test() {
+ // Should lint unsuffixed literals typed `f64`.
+ let x = 0.12_f64;
+ let x = [1.0_f64, 2.0_f64, 3.0_f64];
+ let x = if true { (1.0_f64, 2.0_f64) } else { (3.0_f64, 4.0_f64) };
+ let x = match 1.0_f64 {
+ _ => 1.0_f64,
+ };
+
+ // Should NOT lint suffixed literals.
+ let x = 0.12_f64;
+
+ // Should NOT lint literals in init expr if `Local` has a type annotation.
+ let x: f64 = 0.1;
+ let x: [f64; 3] = [1., 2., 3.];
+ let x: (f64, f64) = if true { (1., 2.) } else { (3., 4.) };
+ let x: _ = 1.;
+ }
+}
+
+mod nested_local {
+ fn test() {
+ let x: _ = {
+ // Should lint this because this literal is not bound to any types.
+ let y = 1.0_f64;
+
+ // Should NOT lint this because this literal is bound to `_` of outer `Local`.
+ 1.
+ };
+
+ let x: _ = if true {
+ // Should lint this because this literal is not bound to any types.
+ let y = 1.0_f64;
+
+ // Should NOT lint this because this literal is bound to `_` of outer `Local`.
+ 1.
+ } else {
+ // Should lint this because this literal is not bound to any types.
+ let y = 1.0_f64;
+
+ // Should NOT lint this because this literal is bound to `_` of outer `Local`.
+ 2.
+ };
+ }
+}
+
+mod function_def {
+ fn ret_f64() -> f64 {
+ // Even though the output type is specified,
+ // this unsuffixed literal is linted to reduce heuristics and keep codebase simple.
+ 1.0_f64
+ }
+
+ fn test() {
+ // Should lint this because return type is inferred to `f64` and NOT bound to a concrete
+ // type.
+ let f = || -> _ { 1.0_f64 };
+
+ // Even though the output type is specified,
+ // this unsuffixed literal is linted to reduce heuristics and keep codebase simple.
+ let f = || -> f64 { 1.0_f64 };
+ }
+}
+
+mod function_calls {
+ fn concrete_arg(f: f64) {}
+
+ fn generic_arg<T>(t: T) {}
+
+ fn test() {
+ // Should NOT lint this because the argument type is bound to a concrete type.
+ concrete_arg(1.);
+
+ // Should lint this because the argument type is inferred to `f64` and NOT bound to a concrete type.
+ generic_arg(1.0_f64);
+
+ // Should lint this because the argument type is inferred to `f64` and NOT bound to a concrete type.
+ let x: _ = generic_arg(1.0_f64);
+ }
+}
+
+mod struct_ctor {
+ struct ConcreteStruct {
+ x: f64,
+ }
+
+ struct GenericStruct<T> {
+ x: T,
+ }
+
+ fn test() {
+ // Should NOT lint this because the field type is bound to a concrete type.
+ ConcreteStruct { x: 1. };
+
+ // Should lint this because the field type is inferred to `f64` and NOT bound to a concrete type.
+ GenericStruct { x: 1.0_f64 };
+
+ // Should lint this because the field type is inferred to `f64` and NOT bound to a concrete type.
+ let _ = GenericStruct { x: 1.0_f64 };
+ }
+}
+
+mod enum_ctor {
+ enum ConcreteEnum {
+ X(f64),
+ }
+
+ enum GenericEnum<T> {
+ X(T),
+ }
+
+ fn test() {
+ // Should NOT lint this because the field type is bound to a concrete type.
+ ConcreteEnum::X(1.);
+
+ // Should lint this because the field type is inferred to `f64` and NOT bound to a concrete type.
+ GenericEnum::X(1.0_f64);
+ }
+}
+
+mod method_calls {
+ struct StructForMethodCallTest;
+
+ impl StructForMethodCallTest {
+ fn concrete_arg(&self, f: f64) {}
+
+ fn generic_arg<T>(&self, t: T) {}
+ }
+
+ fn test() {
+ let s = StructForMethodCallTest {};
+
+ // Should NOT lint this because the argument type is bound to a concrete type.
+ s.concrete_arg(1.);
+
+ // Should lint this because the argument type is bound to a concrete type.
+ s.generic_arg(1.0_f64);
+ }
+}
+
+mod in_macro {
+ macro_rules! internal_macro {
+ () => {
+ let x = 22.0_f64;
+ };
+ }
+
+ // Should lint in internal macro.
+ fn internal() {
+ internal_macro!();
+ }
+
+ // Should NOT lint in external macro.
+ fn external() {
+ default_numeric_fallback!();
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/default_numeric_fallback_f64.rs b/src/tools/clippy/tests/ui/default_numeric_fallback_f64.rs
new file mode 100644
index 000000000..b48435cc7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/default_numeric_fallback_f64.rs
@@ -0,0 +1,177 @@
+// run-rustfix
+// aux-build:macro_rules.rs
+
+#![warn(clippy::default_numeric_fallback)]
+#![allow(
+ unused,
+ clippy::never_loop,
+ clippy::no_effect,
+ clippy::unnecessary_operation,
+ clippy::branches_sharing_code,
+ clippy::match_single_binding,
+ clippy::let_unit_value
+)]
+
+#[macro_use]
+extern crate macro_rules;
+
+mod basic_expr {
+ fn test() {
+ // Should lint unsuffixed literals typed `f64`.
+ let x = 0.12;
+ let x = [1., 2., 3.];
+ let x = if true { (1., 2.) } else { (3., 4.) };
+ let x = match 1. {
+ _ => 1.,
+ };
+
+ // Should NOT lint suffixed literals.
+ let x = 0.12_f64;
+
+ // Should NOT lint literals in init expr if `Local` has a type annotation.
+ let x: f64 = 0.1;
+ let x: [f64; 3] = [1., 2., 3.];
+ let x: (f64, f64) = if true { (1., 2.) } else { (3., 4.) };
+ let x: _ = 1.;
+ }
+}
+
+mod nested_local {
+ fn test() {
+ let x: _ = {
+ // Should lint this because this literal is not bound to any types.
+ let y = 1.;
+
+ // Should NOT lint this because this literal is bound to `_` of outer `Local`.
+ 1.
+ };
+
+ let x: _ = if true {
+ // Should lint this because this literal is not bound to any types.
+ let y = 1.;
+
+ // Should NOT lint this because this literal is bound to `_` of outer `Local`.
+ 1.
+ } else {
+ // Should lint this because this literal is not bound to any types.
+ let y = 1.;
+
+ // Should NOT lint this because this literal is bound to `_` of outer `Local`.
+ 2.
+ };
+ }
+}
+
+mod function_def {
+ fn ret_f64() -> f64 {
+ // Even though the output type is specified,
+ // this unsuffixed literal is linted to reduce heuristics and keep codebase simple.
+ 1.
+ }
+
+ fn test() {
+ // Should lint this because return type is inferred to `f64` and NOT bound to a concrete
+ // type.
+ let f = || -> _ { 1. };
+
+ // Even though the output type is specified,
+ // this unsuffixed literal is linted to reduce heuristics and keep codebase simple.
+ let f = || -> f64 { 1. };
+ }
+}
+
+mod function_calls {
+ fn concrete_arg(f: f64) {}
+
+ fn generic_arg<T>(t: T) {}
+
+ fn test() {
+ // Should NOT lint this because the argument type is bound to a concrete type.
+ concrete_arg(1.);
+
+ // Should lint this because the argument type is inferred to `f64` and NOT bound to a concrete type.
+ generic_arg(1.);
+
+ // Should lint this because the argument type is inferred to `f64` and NOT bound to a concrete type.
+ let x: _ = generic_arg(1.);
+ }
+}
+
+mod struct_ctor {
+ struct ConcreteStruct {
+ x: f64,
+ }
+
+ struct GenericStruct<T> {
+ x: T,
+ }
+
+ fn test() {
+ // Should NOT lint this because the field type is bound to a concrete type.
+ ConcreteStruct { x: 1. };
+
+ // Should lint this because the field type is inferred to `f64` and NOT bound to a concrete type.
+ GenericStruct { x: 1. };
+
+ // Should lint this because the field type is inferred to `f64` and NOT bound to a concrete type.
+ let _ = GenericStruct { x: 1. };
+ }
+}
+
+mod enum_ctor {
+ enum ConcreteEnum {
+ X(f64),
+ }
+
+ enum GenericEnum<T> {
+ X(T),
+ }
+
+ fn test() {
+ // Should NOT lint this because the field type is bound to a concrete type.
+ ConcreteEnum::X(1.);
+
+ // Should lint this because the field type is inferred to `f64` and NOT bound to a concrete type.
+ GenericEnum::X(1.);
+ }
+}
+
+mod method_calls {
+ struct StructForMethodCallTest;
+
+ impl StructForMethodCallTest {
+ fn concrete_arg(&self, f: f64) {}
+
+ fn generic_arg<T>(&self, t: T) {}
+ }
+
+ fn test() {
+ let s = StructForMethodCallTest {};
+
+ // Should NOT lint this because the argument type is bound to a concrete type.
+ s.concrete_arg(1.);
+
+ // Should lint this because the argument type is bound to a concrete type.
+ s.generic_arg(1.);
+ }
+}
+
+mod in_macro {
+ macro_rules! internal_macro {
+ () => {
+ let x = 22.;
+ };
+ }
+
+ // Should lint in internal macro.
+ fn internal() {
+ internal_macro!();
+ }
+
+ // Should NOT lint in external macro.
+ fn external() {
+ default_numeric_fallback!();
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/default_numeric_fallback_f64.stderr b/src/tools/clippy/tests/ui/default_numeric_fallback_f64.stderr
new file mode 100644
index 000000000..f8b6c7746
--- /dev/null
+++ b/src/tools/clippy/tests/ui/default_numeric_fallback_f64.stderr
@@ -0,0 +1,147 @@
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:21:17
+ |
+LL | let x = 0.12;
+ | ^^^^ help: consider adding suffix: `0.12_f64`
+ |
+ = note: `-D clippy::default-numeric-fallback` implied by `-D warnings`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:22:18
+ |
+LL | let x = [1., 2., 3.];
+ | ^^ help: consider adding suffix: `1.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:22:22
+ |
+LL | let x = [1., 2., 3.];
+ | ^^ help: consider adding suffix: `2.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:22:26
+ |
+LL | let x = [1., 2., 3.];
+ | ^^ help: consider adding suffix: `3.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:23:28
+ |
+LL | let x = if true { (1., 2.) } else { (3., 4.) };
+ | ^^ help: consider adding suffix: `1.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:23:32
+ |
+LL | let x = if true { (1., 2.) } else { (3., 4.) };
+ | ^^ help: consider adding suffix: `2.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:23:46
+ |
+LL | let x = if true { (1., 2.) } else { (3., 4.) };
+ | ^^ help: consider adding suffix: `3.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:23:50
+ |
+LL | let x = if true { (1., 2.) } else { (3., 4.) };
+ | ^^ help: consider adding suffix: `4.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:24:23
+ |
+LL | let x = match 1. {
+ | ^^ help: consider adding suffix: `1.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:25:18
+ |
+LL | _ => 1.,
+ | ^^ help: consider adding suffix: `1.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:43:21
+ |
+LL | let y = 1.;
+ | ^^ help: consider adding suffix: `1.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:51:21
+ |
+LL | let y = 1.;
+ | ^^ help: consider adding suffix: `1.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:57:21
+ |
+LL | let y = 1.;
+ | ^^ help: consider adding suffix: `1.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:69:9
+ |
+LL | 1.
+ | ^^ help: consider adding suffix: `1.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:75:27
+ |
+LL | let f = || -> _ { 1. };
+ | ^^ help: consider adding suffix: `1.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:79:29
+ |
+LL | let f = || -> f64 { 1. };
+ | ^^ help: consider adding suffix: `1.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:93:21
+ |
+LL | generic_arg(1.);
+ | ^^ help: consider adding suffix: `1.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:96:32
+ |
+LL | let x: _ = generic_arg(1.);
+ | ^^ help: consider adding suffix: `1.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:114:28
+ |
+LL | GenericStruct { x: 1. };
+ | ^^ help: consider adding suffix: `1.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:117:36
+ |
+LL | let _ = GenericStruct { x: 1. };
+ | ^^ help: consider adding suffix: `1.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:135:24
+ |
+LL | GenericEnum::X(1.);
+ | ^^ help: consider adding suffix: `1.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:155:23
+ |
+LL | s.generic_arg(1.);
+ | ^^ help: consider adding suffix: `1.0_f64`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_f64.rs:162:21
+ |
+LL | let x = 22.;
+ | ^^^ help: consider adding suffix: `22.0_f64`
+...
+LL | internal_macro!();
+ | ----------------- in this macro invocation
+ |
+ = note: this error originates in the macro `internal_macro` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 23 previous errors
+
diff --git a/src/tools/clippy/tests/ui/default_numeric_fallback_i32.fixed b/src/tools/clippy/tests/ui/default_numeric_fallback_i32.fixed
new file mode 100644
index 000000000..55451cf2f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/default_numeric_fallback_i32.fixed
@@ -0,0 +1,182 @@
+// run-rustfix
+// aux-build:macro_rules.rs
+
+#![feature(lint_reasons)]
+#![warn(clippy::default_numeric_fallback)]
+#![allow(
+ unused,
+ clippy::never_loop,
+ clippy::no_effect,
+ clippy::unnecessary_operation,
+ clippy::branches_sharing_code,
+ clippy::let_unit_value
+)]
+
+#[macro_use]
+extern crate macro_rules;
+
+mod basic_expr {
+ fn test() {
+ // Should lint unsuffixed literals typed `i32`.
+ let x = 22_i32;
+ let x = [1_i32, 2_i32, 3_i32];
+ let x = if true { (1_i32, 2_i32) } else { (3_i32, 4_i32) };
+ let x = match 1_i32 {
+ 1_i32 => 1_i32,
+ _ => 2_i32,
+ };
+
+ // Should NOT lint suffixed literals.
+ let x = 22_i32;
+
+ // Should NOT lint literals in init expr if `Local` has a type annotation.
+ let x: [i32; 3] = [1, 2, 3];
+ let x: (i32, i32) = if true { (1, 2) } else { (3, 4) };
+ let x: _ = 1;
+ }
+}
+
+mod nested_local {
+ fn test() {
+ let x: _ = {
+ // Should lint this because this literal is not bound to any types.
+ let y = 1_i32;
+
+ // Should NOT lint this because this literal is bound to `_` of outer `Local`.
+ 1
+ };
+
+ let x: _ = if true {
+ // Should lint this because this literal is not bound to any types.
+ let y = 1_i32;
+
+ // Should NOT lint this because this literal is bound to `_` of outer `Local`.
+ 1
+ } else {
+ // Should lint this because this literal is not bound to any types.
+ let y = 1_i32;
+
+ // Should NOT lint this because this literal is bound to `_` of outer `Local`.
+ 2
+ };
+ }
+}
+
+mod function_def {
+ fn ret_i32() -> i32 {
+ // Even though the output type is specified,
+ // this unsuffixed literal is linted to reduce heuristics and keep codebase simple.
+ 1_i32
+ }
+
+ fn test() {
+ // Should lint this because return type is inferred to `i32` and NOT bound to a concrete
+ // type.
+ let f = || -> _ { 1_i32 };
+
+ // Even though the output type is specified,
+ // this unsuffixed literal is linted to reduce heuristics and keep codebase simple.
+ let f = || -> i32 { 1_i32 };
+ }
+}
+
+mod function_calls {
+ fn concrete_arg(x: i32) {}
+
+ fn generic_arg<T>(t: T) {}
+
+ fn test() {
+ // Should NOT lint this because the argument type is bound to a concrete type.
+ concrete_arg(1);
+
+ // Should lint this because the argument type is inferred to `i32` and NOT bound to a concrete type.
+ generic_arg(1_i32);
+
+ // Should lint this because the argument type is inferred to `i32` and NOT bound to a concrete type.
+ let x: _ = generic_arg(1_i32);
+ }
+}
+
+mod struct_ctor {
+ struct ConcreteStruct {
+ x: i32,
+ }
+
+ struct GenericStruct<T> {
+ x: T,
+ }
+
+ fn test() {
+ // Should NOT lint this because the field type is bound to a concrete type.
+ ConcreteStruct { x: 1 };
+
+ // Should lint this because the field type is inferred to `i32` and NOT bound to a concrete type.
+ GenericStruct { x: 1_i32 };
+
+ // Should lint this because the field type is inferred to `i32` and NOT bound to a concrete type.
+ let _ = GenericStruct { x: 1_i32 };
+ }
+}
+
+mod enum_ctor {
+ enum ConcreteEnum {
+ X(i32),
+ }
+
+ enum GenericEnum<T> {
+ X(T),
+ }
+
+ fn test() {
+ // Should NOT lint this because the field type is bound to a concrete type.
+ ConcreteEnum::X(1);
+
+ // Should lint this because the field type is inferred to `i32` and NOT bound to a concrete type.
+ GenericEnum::X(1_i32);
+ }
+}
+
+mod method_calls {
+ struct StructForMethodCallTest;
+
+ impl StructForMethodCallTest {
+ fn concrete_arg(&self, x: i32) {}
+
+ fn generic_arg<T>(&self, t: T) {}
+ }
+
+ fn test() {
+ let s = StructForMethodCallTest {};
+
+ // Should NOT lint this because the argument type is bound to a concrete type.
+ s.concrete_arg(1);
+
+ // Should lint this because the argument type is bound to a concrete type.
+ s.generic_arg(1_i32);
+ }
+}
+
+mod in_macro {
+ macro_rules! internal_macro {
+ () => {
+ let x = 22_i32;
+ };
+ }
+
+ // Should lint in internal macro.
+ fn internal() {
+ internal_macro!();
+ }
+
+ // Should NOT lint in external macro.
+ fn external() {
+ default_numeric_fallback!();
+ }
+}
+
+fn check_expect_suppression() {
+ #[expect(clippy::default_numeric_fallback)]
+ let x = 21;
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/default_numeric_fallback_i32.rs b/src/tools/clippy/tests/ui/default_numeric_fallback_i32.rs
new file mode 100644
index 000000000..62d72f2fe
--- /dev/null
+++ b/src/tools/clippy/tests/ui/default_numeric_fallback_i32.rs
@@ -0,0 +1,182 @@
+// run-rustfix
+// aux-build:macro_rules.rs
+
+#![feature(lint_reasons)]
+#![warn(clippy::default_numeric_fallback)]
+#![allow(
+ unused,
+ clippy::never_loop,
+ clippy::no_effect,
+ clippy::unnecessary_operation,
+ clippy::branches_sharing_code,
+ clippy::let_unit_value
+)]
+
+#[macro_use]
+extern crate macro_rules;
+
+mod basic_expr {
+ fn test() {
+ // Should lint unsuffixed literals typed `i32`.
+ let x = 22;
+ let x = [1, 2, 3];
+ let x = if true { (1, 2) } else { (3, 4) };
+ let x = match 1 {
+ 1 => 1,
+ _ => 2,
+ };
+
+ // Should NOT lint suffixed literals.
+ let x = 22_i32;
+
+ // Should NOT lint literals in init expr if `Local` has a type annotation.
+ let x: [i32; 3] = [1, 2, 3];
+ let x: (i32, i32) = if true { (1, 2) } else { (3, 4) };
+ let x: _ = 1;
+ }
+}
+
+mod nested_local {
+ fn test() {
+ let x: _ = {
+ // Should lint this because this literal is not bound to any types.
+ let y = 1;
+
+ // Should NOT lint this because this literal is bound to `_` of outer `Local`.
+ 1
+ };
+
+ let x: _ = if true {
+ // Should lint this because this literal is not bound to any types.
+ let y = 1;
+
+ // Should NOT lint this because this literal is bound to `_` of outer `Local`.
+ 1
+ } else {
+ // Should lint this because this literal is not bound to any types.
+ let y = 1;
+
+ // Should NOT lint this because this literal is bound to `_` of outer `Local`.
+ 2
+ };
+ }
+}
+
+mod function_def {
+ fn ret_i32() -> i32 {
+ // Even though the output type is specified,
+ // this unsuffixed literal is linted to reduce heuristics and keep codebase simple.
+ 1
+ }
+
+ fn test() {
+ // Should lint this because return type is inferred to `i32` and NOT bound to a concrete
+ // type.
+ let f = || -> _ { 1 };
+
+ // Even though the output type is specified,
+ // this unsuffixed literal is linted to reduce heuristics and keep codebase simple.
+ let f = || -> i32 { 1 };
+ }
+}
+
+mod function_calls {
+ fn concrete_arg(x: i32) {}
+
+ fn generic_arg<T>(t: T) {}
+
+ fn test() {
+ // Should NOT lint this because the argument type is bound to a concrete type.
+ concrete_arg(1);
+
+ // Should lint this because the argument type is inferred to `i32` and NOT bound to a concrete type.
+ generic_arg(1);
+
+ // Should lint this because the argument type is inferred to `i32` and NOT bound to a concrete type.
+ let x: _ = generic_arg(1);
+ }
+}
+
+mod struct_ctor {
+ struct ConcreteStruct {
+ x: i32,
+ }
+
+ struct GenericStruct<T> {
+ x: T,
+ }
+
+ fn test() {
+ // Should NOT lint this because the field type is bound to a concrete type.
+ ConcreteStruct { x: 1 };
+
+ // Should lint this because the field type is inferred to `i32` and NOT bound to a concrete type.
+ GenericStruct { x: 1 };
+
+ // Should lint this because the field type is inferred to `i32` and NOT bound to a concrete type.
+ let _ = GenericStruct { x: 1 };
+ }
+}
+
+mod enum_ctor {
+ enum ConcreteEnum {
+ X(i32),
+ }
+
+ enum GenericEnum<T> {
+ X(T),
+ }
+
+ fn test() {
+ // Should NOT lint this because the field type is bound to a concrete type.
+ ConcreteEnum::X(1);
+
+ // Should lint this because the field type is inferred to `i32` and NOT bound to a concrete type.
+ GenericEnum::X(1);
+ }
+}
+
+mod method_calls {
+ struct StructForMethodCallTest;
+
+ impl StructForMethodCallTest {
+ fn concrete_arg(&self, x: i32) {}
+
+ fn generic_arg<T>(&self, t: T) {}
+ }
+
+ fn test() {
+ let s = StructForMethodCallTest {};
+
+ // Should NOT lint this because the argument type is bound to a concrete type.
+ s.concrete_arg(1);
+
+ // Should lint this because the argument type is bound to a concrete type.
+ s.generic_arg(1);
+ }
+}
+
+mod in_macro {
+ macro_rules! internal_macro {
+ () => {
+ let x = 22;
+ };
+ }
+
+ // Should lint in internal macro.
+ fn internal() {
+ internal_macro!();
+ }
+
+ // Should NOT lint in external macro.
+ fn external() {
+ default_numeric_fallback!();
+ }
+}
+
+fn check_expect_suppression() {
+ #[expect(clippy::default_numeric_fallback)]
+ let x = 21;
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/default_numeric_fallback_i32.stderr b/src/tools/clippy/tests/ui/default_numeric_fallback_i32.stderr
new file mode 100644
index 000000000..f7c5e724c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/default_numeric_fallback_i32.stderr
@@ -0,0 +1,159 @@
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:21:17
+ |
+LL | let x = 22;
+ | ^^ help: consider adding suffix: `22_i32`
+ |
+ = note: `-D clippy::default-numeric-fallback` implied by `-D warnings`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:22:18
+ |
+LL | let x = [1, 2, 3];
+ | ^ help: consider adding suffix: `1_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:22:21
+ |
+LL | let x = [1, 2, 3];
+ | ^ help: consider adding suffix: `2_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:22:24
+ |
+LL | let x = [1, 2, 3];
+ | ^ help: consider adding suffix: `3_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:23:28
+ |
+LL | let x = if true { (1, 2) } else { (3, 4) };
+ | ^ help: consider adding suffix: `1_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:23:31
+ |
+LL | let x = if true { (1, 2) } else { (3, 4) };
+ | ^ help: consider adding suffix: `2_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:23:44
+ |
+LL | let x = if true { (1, 2) } else { (3, 4) };
+ | ^ help: consider adding suffix: `3_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:23:47
+ |
+LL | let x = if true { (1, 2) } else { (3, 4) };
+ | ^ help: consider adding suffix: `4_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:24:23
+ |
+LL | let x = match 1 {
+ | ^ help: consider adding suffix: `1_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:25:13
+ |
+LL | 1 => 1,
+ | ^ help: consider adding suffix: `1_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:25:18
+ |
+LL | 1 => 1,
+ | ^ help: consider adding suffix: `1_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:26:18
+ |
+LL | _ => 2,
+ | ^ help: consider adding suffix: `2_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:43:21
+ |
+LL | let y = 1;
+ | ^ help: consider adding suffix: `1_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:51:21
+ |
+LL | let y = 1;
+ | ^ help: consider adding suffix: `1_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:57:21
+ |
+LL | let y = 1;
+ | ^ help: consider adding suffix: `1_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:69:9
+ |
+LL | 1
+ | ^ help: consider adding suffix: `1_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:75:27
+ |
+LL | let f = || -> _ { 1 };
+ | ^ help: consider adding suffix: `1_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:79:29
+ |
+LL | let f = || -> i32 { 1 };
+ | ^ help: consider adding suffix: `1_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:93:21
+ |
+LL | generic_arg(1);
+ | ^ help: consider adding suffix: `1_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:96:32
+ |
+LL | let x: _ = generic_arg(1);
+ | ^ help: consider adding suffix: `1_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:114:28
+ |
+LL | GenericStruct { x: 1 };
+ | ^ help: consider adding suffix: `1_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:117:36
+ |
+LL | let _ = GenericStruct { x: 1 };
+ | ^ help: consider adding suffix: `1_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:135:24
+ |
+LL | GenericEnum::X(1);
+ | ^ help: consider adding suffix: `1_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:155:23
+ |
+LL | s.generic_arg(1);
+ | ^ help: consider adding suffix: `1_i32`
+
+error: default numeric fallback might occur
+ --> $DIR/default_numeric_fallback_i32.rs:162:21
+ |
+LL | let x = 22;
+ | ^^ help: consider adding suffix: `22_i32`
+...
+LL | internal_macro!();
+ | ----------------- in this macro invocation
+ |
+ = note: this error originates in the macro `internal_macro` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 25 previous errors
+
diff --git a/src/tools/clippy/tests/ui/default_trait_access.fixed b/src/tools/clippy/tests/ui/default_trait_access.fixed
new file mode 100644
index 000000000..264dd4efa
--- /dev/null
+++ b/src/tools/clippy/tests/ui/default_trait_access.fixed
@@ -0,0 +1,99 @@
+// run-rustfix
+
+#![allow(unused_imports, dead_code)]
+#![deny(clippy::default_trait_access)]
+
+use std::default;
+use std::default::Default as D2;
+use std::string;
+
+fn main() {
+ let s1: String = std::string::String::default();
+
+ let s2 = String::default();
+
+ let s3: String = std::string::String::default();
+
+ let s4: String = std::string::String::default();
+
+ let s5 = string::String::default();
+
+ let s6: String = std::string::String::default();
+
+ let s7 = std::string::String::default();
+
+ let s8: String = DefaultFactory::make_t_badly();
+
+ let s9: String = DefaultFactory::make_t_nicely();
+
+ let s10 = DerivedDefault::default();
+
+ let s11: GenericDerivedDefault<String> = GenericDerivedDefault::default();
+
+ let s12 = GenericDerivedDefault::<String>::default();
+
+ let s13 = TupleDerivedDefault::default();
+
+ let s14: TupleDerivedDefault = TupleDerivedDefault::default();
+
+ let s15: ArrayDerivedDefault = ArrayDerivedDefault::default();
+
+ let s16 = ArrayDerivedDefault::default();
+
+ let s17: TupleStructDerivedDefault = TupleStructDerivedDefault::default();
+
+ let s18 = TupleStructDerivedDefault::default();
+
+ let s19 = <DerivedDefault as Default>::default();
+
+ let s20 = UpdateSyntax {
+ s: "foo",
+ ..Default::default()
+ };
+
+ println!(
+ "[{}] [{}] [{}] [{}] [{}] [{}] [{}] [{}] [{}] [{:?}] [{:?}] [{:?}] [{:?}] [{:?}] [{:?}] [{:?}] [{:?}] [{:?}] [{:?}] [{:?}]",
+ s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, s16, s17, s18, s19, s20,
+ );
+}
+
+struct DefaultFactory;
+
+impl DefaultFactory {
+ pub fn make_t_badly<T: Default>() -> T {
+ Default::default()
+ }
+
+ pub fn make_t_nicely<T: Default>() -> T {
+ T::default()
+ }
+}
+
+#[derive(Debug, Default)]
+struct DerivedDefault {
+ pub s: String,
+}
+
+#[derive(Debug, Default)]
+struct GenericDerivedDefault<T: Default + std::fmt::Debug> {
+ pub s: T,
+}
+
+#[derive(Debug, Default)]
+struct TupleDerivedDefault {
+ pub s: (String, String),
+}
+
+#[derive(Debug, Default)]
+struct ArrayDerivedDefault {
+ pub s: [String; 10],
+}
+
+#[derive(Debug, Default)]
+struct TupleStructDerivedDefault(String);
+
+#[derive(Debug, Default)]
+struct UpdateSyntax {
+ pub s: &'static str,
+ pub u: u64,
+}
diff --git a/src/tools/clippy/tests/ui/default_trait_access.rs b/src/tools/clippy/tests/ui/default_trait_access.rs
new file mode 100644
index 000000000..a0930fab8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/default_trait_access.rs
@@ -0,0 +1,99 @@
+// run-rustfix
+
+#![allow(unused_imports, dead_code)]
+#![deny(clippy::default_trait_access)]
+
+use std::default;
+use std::default::Default as D2;
+use std::string;
+
+fn main() {
+ let s1: String = Default::default();
+
+ let s2 = String::default();
+
+ let s3: String = D2::default();
+
+ let s4: String = std::default::Default::default();
+
+ let s5 = string::String::default();
+
+ let s6: String = default::Default::default();
+
+ let s7 = std::string::String::default();
+
+ let s8: String = DefaultFactory::make_t_badly();
+
+ let s9: String = DefaultFactory::make_t_nicely();
+
+ let s10 = DerivedDefault::default();
+
+ let s11: GenericDerivedDefault<String> = Default::default();
+
+ let s12 = GenericDerivedDefault::<String>::default();
+
+ let s13 = TupleDerivedDefault::default();
+
+ let s14: TupleDerivedDefault = Default::default();
+
+ let s15: ArrayDerivedDefault = Default::default();
+
+ let s16 = ArrayDerivedDefault::default();
+
+ let s17: TupleStructDerivedDefault = Default::default();
+
+ let s18 = TupleStructDerivedDefault::default();
+
+ let s19 = <DerivedDefault as Default>::default();
+
+ let s20 = UpdateSyntax {
+ s: "foo",
+ ..Default::default()
+ };
+
+ println!(
+ "[{}] [{}] [{}] [{}] [{}] [{}] [{}] [{}] [{}] [{:?}] [{:?}] [{:?}] [{:?}] [{:?}] [{:?}] [{:?}] [{:?}] [{:?}] [{:?}] [{:?}]",
+ s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, s16, s17, s18, s19, s20,
+ );
+}
+
+struct DefaultFactory;
+
+impl DefaultFactory {
+ pub fn make_t_badly<T: Default>() -> T {
+ Default::default()
+ }
+
+ pub fn make_t_nicely<T: Default>() -> T {
+ T::default()
+ }
+}
+
+#[derive(Debug, Default)]
+struct DerivedDefault {
+ pub s: String,
+}
+
+#[derive(Debug, Default)]
+struct GenericDerivedDefault<T: Default + std::fmt::Debug> {
+ pub s: T,
+}
+
+#[derive(Debug, Default)]
+struct TupleDerivedDefault {
+ pub s: (String, String),
+}
+
+#[derive(Debug, Default)]
+struct ArrayDerivedDefault {
+ pub s: [String; 10],
+}
+
+#[derive(Debug, Default)]
+struct TupleStructDerivedDefault(String);
+
+#[derive(Debug, Default)]
+struct UpdateSyntax {
+ pub s: &'static str,
+ pub u: u64,
+}
diff --git a/src/tools/clippy/tests/ui/default_trait_access.stderr b/src/tools/clippy/tests/ui/default_trait_access.stderr
new file mode 100644
index 000000000..df8a5b94d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/default_trait_access.stderr
@@ -0,0 +1,56 @@
+error: calling `std::string::String::default()` is more clear than this expression
+ --> $DIR/default_trait_access.rs:11:22
+ |
+LL | let s1: String = Default::default();
+ | ^^^^^^^^^^^^^^^^^^ help: try: `std::string::String::default()`
+ |
+note: the lint level is defined here
+ --> $DIR/default_trait_access.rs:4:9
+ |
+LL | #![deny(clippy::default_trait_access)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: calling `std::string::String::default()` is more clear than this expression
+ --> $DIR/default_trait_access.rs:15:22
+ |
+LL | let s3: String = D2::default();
+ | ^^^^^^^^^^^^^ help: try: `std::string::String::default()`
+
+error: calling `std::string::String::default()` is more clear than this expression
+ --> $DIR/default_trait_access.rs:17:22
+ |
+LL | let s4: String = std::default::Default::default();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `std::string::String::default()`
+
+error: calling `std::string::String::default()` is more clear than this expression
+ --> $DIR/default_trait_access.rs:21:22
+ |
+LL | let s6: String = default::Default::default();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `std::string::String::default()`
+
+error: calling `GenericDerivedDefault::default()` is more clear than this expression
+ --> $DIR/default_trait_access.rs:31:46
+ |
+LL | let s11: GenericDerivedDefault<String> = Default::default();
+ | ^^^^^^^^^^^^^^^^^^ help: try: `GenericDerivedDefault::default()`
+
+error: calling `TupleDerivedDefault::default()` is more clear than this expression
+ --> $DIR/default_trait_access.rs:37:36
+ |
+LL | let s14: TupleDerivedDefault = Default::default();
+ | ^^^^^^^^^^^^^^^^^^ help: try: `TupleDerivedDefault::default()`
+
+error: calling `ArrayDerivedDefault::default()` is more clear than this expression
+ --> $DIR/default_trait_access.rs:39:36
+ |
+LL | let s15: ArrayDerivedDefault = Default::default();
+ | ^^^^^^^^^^^^^^^^^^ help: try: `ArrayDerivedDefault::default()`
+
+error: calling `TupleStructDerivedDefault::default()` is more clear than this expression
+ --> $DIR/default_trait_access.rs:43:42
+ |
+LL | let s17: TupleStructDerivedDefault = Default::default();
+ | ^^^^^^^^^^^^^^^^^^ help: try: `TupleStructDerivedDefault::default()`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/default_union_representation.rs b/src/tools/clippy/tests/ui/default_union_representation.rs
new file mode 100644
index 000000000..93b2d33da
--- /dev/null
+++ b/src/tools/clippy/tests/ui/default_union_representation.rs
@@ -0,0 +1,78 @@
+#![feature(transparent_unions)]
+#![warn(clippy::default_union_representation)]
+
+union NoAttribute {
+ a: i32,
+ b: u32,
+}
+
+#[repr(C)]
+union ReprC {
+ a: i32,
+ b: u32,
+}
+
+#[repr(packed)]
+union ReprPacked {
+ a: i32,
+ b: u32,
+}
+
+#[repr(C, packed)]
+union ReprCPacked {
+ a: i32,
+ b: u32,
+}
+
+#[repr(C, align(32))]
+union ReprCAlign {
+ a: i32,
+ b: u32,
+}
+
+#[repr(align(32))]
+union ReprAlign {
+ a: i32,
+ b: u32,
+}
+
+union SingleZST {
+ f0: (),
+}
+union ZSTsAndField1 {
+ f0: u32,
+ f1: (),
+ f2: (),
+ f3: (),
+}
+union ZSTsAndField2 {
+ f0: (),
+ f1: (),
+ f2: u32,
+ f3: (),
+}
+union ZSTAndTwoFields {
+ f0: u32,
+ f1: u64,
+ f2: (),
+}
+
+#[repr(C)]
+union CZSTAndTwoFields {
+ f0: u32,
+ f1: u64,
+ f2: (),
+}
+
+#[repr(transparent)]
+union ReprTransparent {
+ a: i32,
+}
+
+#[repr(transparent)]
+union ReprTransparentZST {
+ a: i32,
+ b: (),
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/default_union_representation.stderr b/src/tools/clippy/tests/ui/default_union_representation.stderr
new file mode 100644
index 000000000..138884af8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/default_union_representation.stderr
@@ -0,0 +1,48 @@
+error: this union has the default representation
+ --> $DIR/default_union_representation.rs:4:1
+ |
+LL | / union NoAttribute {
+LL | | a: i32,
+LL | | b: u32,
+LL | | }
+ | |_^
+ |
+ = note: `-D clippy::default-union-representation` implied by `-D warnings`
+ = help: consider annotating `NoAttribute` with `#[repr(C)]` to explicitly specify memory layout
+
+error: this union has the default representation
+ --> $DIR/default_union_representation.rs:16:1
+ |
+LL | / union ReprPacked {
+LL | | a: i32,
+LL | | b: u32,
+LL | | }
+ | |_^
+ |
+ = help: consider annotating `ReprPacked` with `#[repr(C)]` to explicitly specify memory layout
+
+error: this union has the default representation
+ --> $DIR/default_union_representation.rs:34:1
+ |
+LL | / union ReprAlign {
+LL | | a: i32,
+LL | | b: u32,
+LL | | }
+ | |_^
+ |
+ = help: consider annotating `ReprAlign` with `#[repr(C)]` to explicitly specify memory layout
+
+error: this union has the default representation
+ --> $DIR/default_union_representation.rs:54:1
+ |
+LL | / union ZSTAndTwoFields {
+LL | | f0: u32,
+LL | | f1: u64,
+LL | | f2: (),
+LL | | }
+ | |_^
+ |
+ = help: consider annotating `ZSTAndTwoFields` with `#[repr(C)]` to explicitly specify memory layout
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/deprecated.rs b/src/tools/clippy/tests/ui/deprecated.rs
new file mode 100644
index 000000000..07270bd76
--- /dev/null
+++ b/src/tools/clippy/tests/ui/deprecated.rs
@@ -0,0 +1,22 @@
+// This file was generated by `cargo dev update_lints`.
+// Use that command to update this file and do not edit by hand.
+// Manual edits will be overwritten.
+
+#![warn(clippy::should_assert_eq)]
+#![warn(clippy::extend_from_slice)]
+#![warn(clippy::range_step_by_zero)]
+#![warn(clippy::unstable_as_slice)]
+#![warn(clippy::unstable_as_mut_slice)]
+#![warn(clippy::misaligned_transmute)]
+#![warn(clippy::assign_ops)]
+#![warn(clippy::if_let_redundant_pattern_matching)]
+#![warn(clippy::unsafe_vector_initialization)]
+#![warn(clippy::unused_collect)]
+#![warn(clippy::replace_consts)]
+#![warn(clippy::regex_macro)]
+#![warn(clippy::find_map)]
+#![warn(clippy::filter_map)]
+#![warn(clippy::pub_enum_variant_names)]
+#![warn(clippy::wrong_pub_self_convention)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/deprecated.stderr b/src/tools/clippy/tests/ui/deprecated.stderr
new file mode 100644
index 000000000..0e142ac8f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/deprecated.stderr
@@ -0,0 +1,100 @@
+error: lint `clippy::should_assert_eq` has been removed: `assert!()` will be more flexible with RFC 2011
+ --> $DIR/deprecated.rs:5:9
+ |
+LL | #![warn(clippy::should_assert_eq)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D renamed-and-removed-lints` implied by `-D warnings`
+
+error: lint `clippy::extend_from_slice` has been removed: `.extend_from_slice(_)` is a faster way to extend a Vec by a slice
+ --> $DIR/deprecated.rs:6:9
+ |
+LL | #![warn(clippy::extend_from_slice)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: lint `clippy::range_step_by_zero` has been removed: `iterator.step_by(0)` panics nowadays
+ --> $DIR/deprecated.rs:7:9
+ |
+LL | #![warn(clippy::range_step_by_zero)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: lint `clippy::unstable_as_slice` has been removed: `Vec::as_slice` has been stabilized in 1.7
+ --> $DIR/deprecated.rs:8:9
+ |
+LL | #![warn(clippy::unstable_as_slice)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: lint `clippy::unstable_as_mut_slice` has been removed: `Vec::as_mut_slice` has been stabilized in 1.7
+ --> $DIR/deprecated.rs:9:9
+ |
+LL | #![warn(clippy::unstable_as_mut_slice)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: lint `clippy::misaligned_transmute` has been removed: this lint has been split into cast_ptr_alignment and transmute_ptr_to_ptr
+ --> $DIR/deprecated.rs:10:9
+ |
+LL | #![warn(clippy::misaligned_transmute)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: lint `clippy::assign_ops` has been removed: using compound assignment operators (e.g., `+=`) is harmless
+ --> $DIR/deprecated.rs:11:9
+ |
+LL | #![warn(clippy::assign_ops)]
+ | ^^^^^^^^^^^^^^^^^^
+
+error: lint `clippy::if_let_redundant_pattern_matching` has been removed: this lint has been changed to redundant_pattern_matching
+ --> $DIR/deprecated.rs:12:9
+ |
+LL | #![warn(clippy::if_let_redundant_pattern_matching)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: lint `clippy::unsafe_vector_initialization` has been removed: the replacement suggested by this lint had substantially different behavior
+ --> $DIR/deprecated.rs:13:9
+ |
+LL | #![warn(clippy::unsafe_vector_initialization)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: lint `clippy::unused_collect` has been removed: `collect` has been marked as #[must_use] in rustc and that covers all cases of this lint
+ --> $DIR/deprecated.rs:14:9
+ |
+LL | #![warn(clippy::unused_collect)]
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: lint `clippy::replace_consts` has been removed: associated-constants `MIN`/`MAX` of integers are preferred to `{min,max}_value()` and module constants
+ --> $DIR/deprecated.rs:15:9
+ |
+LL | #![warn(clippy::replace_consts)]
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: lint `clippy::regex_macro` has been removed: the regex! macro has been removed from the regex crate in 2018
+ --> $DIR/deprecated.rs:16:9
+ |
+LL | #![warn(clippy::regex_macro)]
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: lint `clippy::find_map` has been removed: this lint has been replaced by `manual_find_map`, a more specific lint
+ --> $DIR/deprecated.rs:17:9
+ |
+LL | #![warn(clippy::find_map)]
+ | ^^^^^^^^^^^^^^^^
+
+error: lint `clippy::filter_map` has been removed: this lint has been replaced by `manual_filter_map`, a more specific lint
+ --> $DIR/deprecated.rs:18:9
+ |
+LL | #![warn(clippy::filter_map)]
+ | ^^^^^^^^^^^^^^^^^^
+
+error: lint `clippy::pub_enum_variant_names` has been removed: set the `avoid-breaking-exported-api` config option to `false` to enable the `enum_variant_names` lint for public items
+ --> $DIR/deprecated.rs:19:9
+ |
+LL | #![warn(clippy::pub_enum_variant_names)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: lint `clippy::wrong_pub_self_convention` has been removed: set the `avoid-breaking-exported-api` config option to `false` to enable the `wrong_self_convention` lint for public items
+ --> $DIR/deprecated.rs:20:9
+ |
+LL | #![warn(clippy::wrong_pub_self_convention)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 16 previous errors
+
diff --git a/src/tools/clippy/tests/ui/deprecated_old.rs b/src/tools/clippy/tests/ui/deprecated_old.rs
new file mode 100644
index 000000000..e89dca4fc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/deprecated_old.rs
@@ -0,0 +1,5 @@
+#[warn(unstable_as_slice)]
+#[warn(unstable_as_mut_slice)]
+#[warn(misaligned_transmute)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/deprecated_old.stderr b/src/tools/clippy/tests/ui/deprecated_old.stderr
new file mode 100644
index 000000000..8043ab005
--- /dev/null
+++ b/src/tools/clippy/tests/ui/deprecated_old.stderr
@@ -0,0 +1,22 @@
+error: lint `unstable_as_slice` has been removed: `Vec::as_slice` has been stabilized in 1.7
+ --> $DIR/deprecated_old.rs:1:8
+ |
+LL | #[warn(unstable_as_slice)]
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D renamed-and-removed-lints` implied by `-D warnings`
+
+error: lint `unstable_as_mut_slice` has been removed: `Vec::as_mut_slice` has been stabilized in 1.7
+ --> $DIR/deprecated_old.rs:2:8
+ |
+LL | #[warn(unstable_as_mut_slice)]
+ | ^^^^^^^^^^^^^^^^^^^^^
+
+error: lint `misaligned_transmute` has been removed: this lint has been split into cast_ptr_alignment and transmute_ptr_to_ptr
+ --> $DIR/deprecated_old.rs:3:8
+ |
+LL | #[warn(misaligned_transmute)]
+ | ^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/deref_addrof.fixed b/src/tools/clippy/tests/ui/deref_addrof.fixed
new file mode 100644
index 000000000..2f489deb1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/deref_addrof.fixed
@@ -0,0 +1,68 @@
+// run-rustfix
+#![allow(clippy::return_self_not_must_use)]
+#![warn(clippy::deref_addrof)]
+
+fn get_number() -> usize {
+ 10
+}
+
+fn get_reference(n: &usize) -> &usize {
+ n
+}
+
+#[allow(clippy::double_parens)]
+#[allow(unused_variables, unused_parens)]
+fn main() {
+ let a = 10;
+ let aref = &a;
+
+ let b = a;
+
+ let b = get_number();
+
+ let b = *get_reference(&a);
+
+ let bytes: Vec<usize> = vec![1, 2, 3, 4];
+ let b = bytes[1..2][0];
+
+ //This produces a suggestion of 'let b = (a);' which
+ //will trigger the 'unused_parens' lint
+ let b = (a);
+
+ let b = a;
+
+ #[rustfmt::skip]
+ let b = a;
+
+ let b = &a;
+
+ let b = *aref;
+
+ let _ = unsafe { *core::ptr::addr_of!(a) };
+}
+
+#[rustfmt::skip]
+macro_rules! m {
+ ($visitor: expr) => {
+ $visitor
+ };
+}
+
+#[rustfmt::skip]
+macro_rules! m_mut {
+ ($visitor: expr) => {
+ $visitor
+ };
+}
+
+#[derive(Copy, Clone)]
+pub struct S;
+impl S {
+ pub fn f(&self) -> &Self {
+ m!(self)
+ }
+ #[allow(unused_mut)] // mut will be unused, once the macro is fixed
+ pub fn f_mut(mut self) -> Self {
+ m_mut!(self)
+ }
+}
diff --git a/src/tools/clippy/tests/ui/deref_addrof.rs b/src/tools/clippy/tests/ui/deref_addrof.rs
new file mode 100644
index 000000000..49f360b9a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/deref_addrof.rs
@@ -0,0 +1,68 @@
+// run-rustfix
+#![allow(clippy::return_self_not_must_use)]
+#![warn(clippy::deref_addrof)]
+
+fn get_number() -> usize {
+ 10
+}
+
+fn get_reference(n: &usize) -> &usize {
+ n
+}
+
+#[allow(clippy::double_parens)]
+#[allow(unused_variables, unused_parens)]
+fn main() {
+ let a = 10;
+ let aref = &a;
+
+ let b = *&a;
+
+ let b = *&get_number();
+
+ let b = *get_reference(&a);
+
+ let bytes: Vec<usize> = vec![1, 2, 3, 4];
+ let b = *&bytes[1..2][0];
+
+ //This produces a suggestion of 'let b = (a);' which
+ //will trigger the 'unused_parens' lint
+ let b = *&(a);
+
+ let b = *(&a);
+
+ #[rustfmt::skip]
+ let b = *((&a));
+
+ let b = *&&a;
+
+ let b = **&aref;
+
+ let _ = unsafe { *core::ptr::addr_of!(a) };
+}
+
+#[rustfmt::skip]
+macro_rules! m {
+ ($visitor: expr) => {
+ *& $visitor
+ };
+}
+
+#[rustfmt::skip]
+macro_rules! m_mut {
+ ($visitor: expr) => {
+ *& mut $visitor
+ };
+}
+
+#[derive(Copy, Clone)]
+pub struct S;
+impl S {
+ pub fn f(&self) -> &Self {
+ m!(self)
+ }
+ #[allow(unused_mut)] // mut will be unused, once the macro is fixed
+ pub fn f_mut(mut self) -> Self {
+ m_mut!(self)
+ }
+}
diff --git a/src/tools/clippy/tests/ui/deref_addrof.stderr b/src/tools/clippy/tests/ui/deref_addrof.stderr
new file mode 100644
index 000000000..75371fcdb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/deref_addrof.stderr
@@ -0,0 +1,74 @@
+error: immediately dereferencing a reference
+ --> $DIR/deref_addrof.rs:19:13
+ |
+LL | let b = *&a;
+ | ^^^ help: try this: `a`
+ |
+ = note: `-D clippy::deref-addrof` implied by `-D warnings`
+
+error: immediately dereferencing a reference
+ --> $DIR/deref_addrof.rs:21:13
+ |
+LL | let b = *&get_number();
+ | ^^^^^^^^^^^^^^ help: try this: `get_number()`
+
+error: immediately dereferencing a reference
+ --> $DIR/deref_addrof.rs:26:13
+ |
+LL | let b = *&bytes[1..2][0];
+ | ^^^^^^^^^^^^^^^^ help: try this: `bytes[1..2][0]`
+
+error: immediately dereferencing a reference
+ --> $DIR/deref_addrof.rs:30:13
+ |
+LL | let b = *&(a);
+ | ^^^^^ help: try this: `(a)`
+
+error: immediately dereferencing a reference
+ --> $DIR/deref_addrof.rs:32:13
+ |
+LL | let b = *(&a);
+ | ^^^^^ help: try this: `a`
+
+error: immediately dereferencing a reference
+ --> $DIR/deref_addrof.rs:35:13
+ |
+LL | let b = *((&a));
+ | ^^^^^^^ help: try this: `a`
+
+error: immediately dereferencing a reference
+ --> $DIR/deref_addrof.rs:37:13
+ |
+LL | let b = *&&a;
+ | ^^^^ help: try this: `&a`
+
+error: immediately dereferencing a reference
+ --> $DIR/deref_addrof.rs:39:14
+ |
+LL | let b = **&aref;
+ | ^^^^^^ help: try this: `aref`
+
+error: immediately dereferencing a reference
+ --> $DIR/deref_addrof.rs:47:9
+ |
+LL | *& $visitor
+ | ^^^^^^^^^^^ help: try this: `$visitor`
+...
+LL | m!(self)
+ | -------- in this macro invocation
+ |
+ = note: this error originates in the macro `m` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: immediately dereferencing a reference
+ --> $DIR/deref_addrof.rs:54:9
+ |
+LL | *& mut $visitor
+ | ^^^^^^^^^^^^^^^ help: try this: `$visitor`
+...
+LL | m_mut!(self)
+ | ------------ in this macro invocation
+ |
+ = note: this error originates in the macro `m_mut` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/deref_addrof_double_trigger.rs b/src/tools/clippy/tests/ui/deref_addrof_double_trigger.rs
new file mode 100644
index 000000000..453194329
--- /dev/null
+++ b/src/tools/clippy/tests/ui/deref_addrof_double_trigger.rs
@@ -0,0 +1,23 @@
+// This test can't work with run-rustfix because it needs two passes of test+fix
+
+#[warn(clippy::deref_addrof)]
+#[allow(unused_variables, unused_mut)]
+fn main() {
+ let a = 10;
+
+ //This produces a suggestion of 'let b = *&a;' which
+ //will trigger the 'clippy::deref_addrof' lint again
+ let b = **&&a;
+
+ {
+ let mut x = 10;
+ let y = *&mut x;
+ }
+
+ {
+ //This produces a suggestion of 'let y = *&mut x' which
+ //will trigger the 'clippy::deref_addrof' lint again
+ let mut x = 10;
+ let y = **&mut &mut x;
+ }
+}
diff --git a/src/tools/clippy/tests/ui/deref_addrof_double_trigger.stderr b/src/tools/clippy/tests/ui/deref_addrof_double_trigger.stderr
new file mode 100644
index 000000000..2c55a4ed6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/deref_addrof_double_trigger.stderr
@@ -0,0 +1,22 @@
+error: immediately dereferencing a reference
+ --> $DIR/deref_addrof_double_trigger.rs:10:14
+ |
+LL | let b = **&&a;
+ | ^^^^ help: try this: `&a`
+ |
+ = note: `-D clippy::deref-addrof` implied by `-D warnings`
+
+error: immediately dereferencing a reference
+ --> $DIR/deref_addrof_double_trigger.rs:14:17
+ |
+LL | let y = *&mut x;
+ | ^^^^^^^ help: try this: `x`
+
+error: immediately dereferencing a reference
+ --> $DIR/deref_addrof_double_trigger.rs:21:18
+ |
+LL | let y = **&mut &mut x;
+ | ^^^^^^^^^^^^ help: try this: `&mut x`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/deref_addrof_macro.rs b/src/tools/clippy/tests/ui/deref_addrof_macro.rs
new file mode 100644
index 000000000..dcebd6c6e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/deref_addrof_macro.rs
@@ -0,0 +1,10 @@
+macro_rules! m {
+ ($($x:tt),*) => { &[$(($x, stringify!(x)),)*] };
+}
+
+#[warn(clippy::deref_addrof)]
+fn f() -> [(i32, &'static str); 3] {
+ *m![1, 2, 3] // should be fine
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/deref_by_slicing.fixed b/src/tools/clippy/tests/ui/deref_by_slicing.fixed
new file mode 100644
index 000000000..257393e56
--- /dev/null
+++ b/src/tools/clippy/tests/ui/deref_by_slicing.fixed
@@ -0,0 +1,30 @@
+// run-rustfix
+
+#![warn(clippy::deref_by_slicing)]
+#![allow(clippy::borrow_deref_ref)]
+
+use std::io::Read;
+
+fn main() {
+ let mut vec = vec![0];
+ let _ = &*vec;
+ let _ = &mut *vec;
+
+ let ref_vec = &mut vec;
+ let _ = &**ref_vec;
+ let mut_slice = &mut **ref_vec;
+ let _ = &mut *mut_slice; // Err, re-borrows slice
+
+ let s = String::new();
+ let _ = &*s;
+
+ static S: &[u8] = &[0, 1, 2];
+ let _ = &mut &*S; // Err, re-borrows slice
+
+ let slice: &[u32] = &[0u32, 1u32];
+ let slice_ref = &slice;
+ let _ = *slice_ref; // Err, derefs slice
+
+ let bytes: &[u8] = &[];
+ let _ = (&*bytes).read_to_end(&mut vec![]).unwrap(); // Err, re-borrows slice
+}
diff --git a/src/tools/clippy/tests/ui/deref_by_slicing.rs b/src/tools/clippy/tests/ui/deref_by_slicing.rs
new file mode 100644
index 000000000..e288046f9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/deref_by_slicing.rs
@@ -0,0 +1,30 @@
+// run-rustfix
+
+#![warn(clippy::deref_by_slicing)]
+#![allow(clippy::borrow_deref_ref)]
+
+use std::io::Read;
+
+fn main() {
+ let mut vec = vec![0];
+ let _ = &vec[..];
+ let _ = &mut vec[..];
+
+ let ref_vec = &mut vec;
+ let _ = &ref_vec[..];
+ let mut_slice = &mut ref_vec[..];
+ let _ = &mut mut_slice[..]; // Err, re-borrows slice
+
+ let s = String::new();
+ let _ = &s[..];
+
+ static S: &[u8] = &[0, 1, 2];
+ let _ = &mut &S[..]; // Err, re-borrows slice
+
+ let slice: &[u32] = &[0u32, 1u32];
+ let slice_ref = &slice;
+ let _ = &slice_ref[..]; // Err, derefs slice
+
+ let bytes: &[u8] = &[];
+ let _ = (&bytes[..]).read_to_end(&mut vec![]).unwrap(); // Err, re-borrows slice
+}
diff --git a/src/tools/clippy/tests/ui/deref_by_slicing.stderr b/src/tools/clippy/tests/ui/deref_by_slicing.stderr
new file mode 100644
index 000000000..8f042ef47
--- /dev/null
+++ b/src/tools/clippy/tests/ui/deref_by_slicing.stderr
@@ -0,0 +1,58 @@
+error: slicing when dereferencing would work
+ --> $DIR/deref_by_slicing.rs:10:13
+ |
+LL | let _ = &vec[..];
+ | ^^^^^^^^ help: dereference the original value instead: `&*vec`
+ |
+ = note: `-D clippy::deref-by-slicing` implied by `-D warnings`
+
+error: slicing when dereferencing would work
+ --> $DIR/deref_by_slicing.rs:11:13
+ |
+LL | let _ = &mut vec[..];
+ | ^^^^^^^^^^^^ help: dereference the original value instead: `&mut *vec`
+
+error: slicing when dereferencing would work
+ --> $DIR/deref_by_slicing.rs:14:13
+ |
+LL | let _ = &ref_vec[..];
+ | ^^^^^^^^^^^^ help: dereference the original value instead: `&**ref_vec`
+
+error: slicing when dereferencing would work
+ --> $DIR/deref_by_slicing.rs:15:21
+ |
+LL | let mut_slice = &mut ref_vec[..];
+ | ^^^^^^^^^^^^^^^^ help: dereference the original value instead: `&mut **ref_vec`
+
+error: slicing when dereferencing would work
+ --> $DIR/deref_by_slicing.rs:16:13
+ |
+LL | let _ = &mut mut_slice[..]; // Err, re-borrows slice
+ | ^^^^^^^^^^^^^^^^^^ help: reborrow the original value instead: `&mut *mut_slice`
+
+error: slicing when dereferencing would work
+ --> $DIR/deref_by_slicing.rs:19:13
+ |
+LL | let _ = &s[..];
+ | ^^^^^^ help: dereference the original value instead: `&*s`
+
+error: slicing when dereferencing would work
+ --> $DIR/deref_by_slicing.rs:22:18
+ |
+LL | let _ = &mut &S[..]; // Err, re-borrows slice
+ | ^^^^^^ help: reborrow the original value instead: `&*S`
+
+error: slicing when dereferencing would work
+ --> $DIR/deref_by_slicing.rs:26:13
+ |
+LL | let _ = &slice_ref[..]; // Err, derefs slice
+ | ^^^^^^^^^^^^^^ help: dereference the original value instead: `*slice_ref`
+
+error: slicing when dereferencing would work
+ --> $DIR/deref_by_slicing.rs:29:13
+ |
+LL | let _ = (&bytes[..]).read_to_end(&mut vec![]).unwrap(); // Err, re-borrows slice
+ | ^^^^^^^^^^^^ help: reborrow the original value instead: `(&*bytes)`
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/derivable_impls.rs b/src/tools/clippy/tests/ui/derivable_impls.rs
new file mode 100644
index 000000000..a64120047
--- /dev/null
+++ b/src/tools/clippy/tests/ui/derivable_impls.rs
@@ -0,0 +1,243 @@
+use std::collections::HashMap;
+
+struct FooDefault<'a> {
+ a: bool,
+ b: i32,
+ c: u64,
+ d: Vec<i32>,
+ e: FooND1,
+ f: FooND2,
+ g: HashMap<i32, i32>,
+ h: (i32, Vec<i32>),
+ i: [Vec<i32>; 3],
+ j: [i32; 5],
+ k: Option<i32>,
+ l: &'a [i32],
+}
+
+impl std::default::Default for FooDefault<'_> {
+ fn default() -> Self {
+ Self {
+ a: false,
+ b: 0,
+ c: 0u64,
+ d: vec![],
+ e: Default::default(),
+ f: FooND2::default(),
+ g: HashMap::new(),
+ h: (0, vec![]),
+ i: [vec![], vec![], vec![]],
+ j: [0; 5],
+ k: None,
+ l: &[],
+ }
+ }
+}
+
+struct TupleDefault(bool, i32, u64);
+
+impl std::default::Default for TupleDefault {
+ fn default() -> Self {
+ Self(false, 0, 0u64)
+ }
+}
+
+struct FooND1 {
+ a: bool,
+}
+
+impl std::default::Default for FooND1 {
+ fn default() -> Self {
+ Self { a: true }
+ }
+}
+
+struct FooND2 {
+ a: i32,
+}
+
+impl std::default::Default for FooND2 {
+ fn default() -> Self {
+ Self { a: 5 }
+ }
+}
+
+struct FooNDNew {
+ a: bool,
+}
+
+impl FooNDNew {
+ fn new() -> Self {
+ Self { a: true }
+ }
+}
+
+impl Default for FooNDNew {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+struct FooNDVec(Vec<i32>);
+
+impl Default for FooNDVec {
+ fn default() -> Self {
+ Self(vec![5, 12])
+ }
+}
+
+struct StrDefault<'a>(&'a str);
+
+impl Default for StrDefault<'_> {
+ fn default() -> Self {
+ Self("")
+ }
+}
+
+#[derive(Default)]
+struct AlreadyDerived(i32, bool);
+
+macro_rules! mac {
+ () => {
+ 0
+ };
+ ($e:expr) => {
+ struct X(u32);
+ impl Default for X {
+ fn default() -> Self {
+ Self($e)
+ }
+ }
+ };
+}
+
+mac!(0);
+
+struct Y(u32);
+impl Default for Y {
+ fn default() -> Self {
+ Self(mac!())
+ }
+}
+
+struct RustIssue26925<T> {
+ a: Option<T>,
+}
+
+// We should watch out for cases where a manual impl is needed because a
+// derive adds different type bounds (https://github.com/rust-lang/rust/issues/26925).
+// For example, a struct with Option<T> does not require T: Default, but a derive adds
+// that type bound anyways. So until #26925 get fixed we should disable lint
+// for the following case
+impl<T> Default for RustIssue26925<T> {
+ fn default() -> Self {
+ Self { a: None }
+ }
+}
+
+struct SpecializedImpl<A, B> {
+ a: A,
+ b: B,
+}
+
+impl<T: Default> Default for SpecializedImpl<T, T> {
+ fn default() -> Self {
+ Self {
+ a: T::default(),
+ b: T::default(),
+ }
+ }
+}
+
+struct WithoutSelfCurly {
+ a: bool,
+}
+
+impl Default for WithoutSelfCurly {
+ fn default() -> Self {
+ WithoutSelfCurly { a: false }
+ }
+}
+
+struct WithoutSelfParan(bool);
+
+impl Default for WithoutSelfParan {
+ fn default() -> Self {
+ WithoutSelfParan(false)
+ }
+}
+
+// https://github.com/rust-lang/rust-clippy/issues/7655
+
+pub struct SpecializedImpl2<T> {
+ v: Vec<T>,
+}
+
+impl Default for SpecializedImpl2<String> {
+ fn default() -> Self {
+ Self { v: Vec::new() }
+ }
+}
+
+// https://github.com/rust-lang/rust-clippy/issues/7654
+
+pub struct Color {
+ pub r: u8,
+ pub g: u8,
+ pub b: u8,
+}
+
+/// `#000000`
+impl Default for Color {
+ fn default() -> Self {
+ Color { r: 0, g: 0, b: 0 }
+ }
+}
+
+pub struct Color2 {
+ pub r: u8,
+ pub g: u8,
+ pub b: u8,
+}
+
+impl Default for Color2 {
+ /// `#000000`
+ fn default() -> Self {
+ Self { r: 0, g: 0, b: 0 }
+ }
+}
+
+pub struct RepeatDefault1 {
+ a: [i8; 32],
+}
+
+impl Default for RepeatDefault1 {
+ fn default() -> Self {
+ RepeatDefault1 { a: [0; 32] }
+ }
+}
+
+pub struct RepeatDefault2 {
+ a: [i8; 33],
+}
+
+impl Default for RepeatDefault2 {
+ fn default() -> Self {
+ RepeatDefault2 { a: [0; 33] }
+ }
+}
+
+// https://github.com/rust-lang/rust-clippy/issues/7753
+
+pub enum IntOrString {
+ Int(i32),
+ String(String),
+}
+
+impl Default for IntOrString {
+ fn default() -> Self {
+ IntOrString::Int(0)
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/derivable_impls.stderr b/src/tools/clippy/tests/ui/derivable_impls.stderr
new file mode 100644
index 000000000..49fb471a2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/derivable_impls.stderr
@@ -0,0 +1,89 @@
+error: this `impl` can be derived
+ --> $DIR/derivable_impls.rs:18:1
+ |
+LL | / impl std::default::Default for FooDefault<'_> {
+LL | | fn default() -> Self {
+LL | | Self {
+LL | | a: false,
+... |
+LL | | }
+LL | | }
+ | |_^
+ |
+ = note: `-D clippy::derivable-impls` implied by `-D warnings`
+ = help: try annotating `FooDefault` with `#[derive(Default)]`
+
+error: this `impl` can be derived
+ --> $DIR/derivable_impls.rs:39:1
+ |
+LL | / impl std::default::Default for TupleDefault {
+LL | | fn default() -> Self {
+LL | | Self(false, 0, 0u64)
+LL | | }
+LL | | }
+ | |_^
+ |
+ = help: try annotating `TupleDefault` with `#[derive(Default)]`
+
+error: this `impl` can be derived
+ --> $DIR/derivable_impls.rs:91:1
+ |
+LL | / impl Default for StrDefault<'_> {
+LL | | fn default() -> Self {
+LL | | Self("")
+LL | | }
+LL | | }
+ | |_^
+ |
+ = help: try annotating `StrDefault` with `#[derive(Default)]`
+
+error: this `impl` can be derived
+ --> $DIR/derivable_impls.rs:117:1
+ |
+LL | / impl Default for Y {
+LL | | fn default() -> Self {
+LL | | Self(mac!())
+LL | | }
+LL | | }
+ | |_^
+ |
+ = help: try annotating `Y` with `#[derive(Default)]`
+
+error: this `impl` can be derived
+ --> $DIR/derivable_impls.rs:156:1
+ |
+LL | / impl Default for WithoutSelfCurly {
+LL | | fn default() -> Self {
+LL | | WithoutSelfCurly { a: false }
+LL | | }
+LL | | }
+ | |_^
+ |
+ = help: try annotating `WithoutSelfCurly` with `#[derive(Default)]`
+
+error: this `impl` can be derived
+ --> $DIR/derivable_impls.rs:164:1
+ |
+LL | / impl Default for WithoutSelfParan {
+LL | | fn default() -> Self {
+LL | | WithoutSelfParan(false)
+LL | | }
+LL | | }
+ | |_^
+ |
+ = help: try annotating `WithoutSelfParan` with `#[derive(Default)]`
+
+error: this `impl` can be derived
+ --> $DIR/derivable_impls.rs:214:1
+ |
+LL | / impl Default for RepeatDefault1 {
+LL | | fn default() -> Self {
+LL | | RepeatDefault1 { a: [0; 32] }
+LL | | }
+LL | | }
+ | |_^
+ |
+ = help: try annotating `RepeatDefault1` with `#[derive(Default)]`
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/derive.rs b/src/tools/clippy/tests/ui/derive.rs
new file mode 100644
index 000000000..b276c384c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/derive.rs
@@ -0,0 +1,89 @@
+#![allow(dead_code)]
+#![warn(clippy::expl_impl_clone_on_copy)]
+
+
+#[derive(Copy)]
+struct Qux;
+
+impl Clone for Qux {
+ fn clone(&self) -> Self {
+ Qux
+ }
+}
+
+// looks like unions don't support deriving Clone for now
+#[derive(Copy)]
+union Union {
+ a: u8,
+}
+
+impl Clone for Union {
+ fn clone(&self) -> Self {
+ Union { a: 42 }
+ }
+}
+
+// See #666
+#[derive(Copy)]
+struct Lt<'a> {
+ a: &'a u8,
+}
+
+impl<'a> Clone for Lt<'a> {
+ fn clone(&self) -> Self {
+ unimplemented!()
+ }
+}
+
+#[derive(Copy)]
+struct BigArray {
+ a: [u8; 65],
+}
+
+impl Clone for BigArray {
+ fn clone(&self) -> Self {
+ unimplemented!()
+ }
+}
+
+#[derive(Copy)]
+struct FnPtr {
+ a: fn() -> !,
+}
+
+impl Clone for FnPtr {
+ fn clone(&self) -> Self {
+ unimplemented!()
+ }
+}
+
+// Ok, Clone trait impl doesn't have constrained generics.
+#[derive(Copy)]
+struct Generic<T> {
+ a: T,
+}
+
+impl<T> Clone for Generic<T> {
+ fn clone(&self) -> Self {
+ unimplemented!()
+ }
+}
+
+#[derive(Copy)]
+struct Generic2<T>(T);
+impl<T: Clone> Clone for Generic2<T> {
+ fn clone(&self) -> Self {
+ Self(self.0.clone())
+ }
+}
+
+// Ok, Clone trait impl doesn't have constrained generics.
+#[derive(Copy)]
+struct GenericRef<'a, T, U>(T, &'a U);
+impl<T: Clone, U> Clone for GenericRef<'_, T, U> {
+ fn clone(&self) -> Self {
+ Self(self.0.clone(), self.1)
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/derive.stderr b/src/tools/clippy/tests/ui/derive.stderr
new file mode 100644
index 000000000..82a70ceec
--- /dev/null
+++ b/src/tools/clippy/tests/ui/derive.stderr
@@ -0,0 +1,103 @@
+error: you are implementing `Clone` explicitly on a `Copy` type
+ --> $DIR/derive.rs:8:1
+ |
+LL | / impl Clone for Qux {
+LL | | fn clone(&self) -> Self {
+LL | | Qux
+LL | | }
+LL | | }
+ | |_^
+ |
+ = note: `-D clippy::expl-impl-clone-on-copy` implied by `-D warnings`
+note: consider deriving `Clone` or removing `Copy`
+ --> $DIR/derive.rs:8:1
+ |
+LL | / impl Clone for Qux {
+LL | | fn clone(&self) -> Self {
+LL | | Qux
+LL | | }
+LL | | }
+ | |_^
+
+error: you are implementing `Clone` explicitly on a `Copy` type
+ --> $DIR/derive.rs:32:1
+ |
+LL | / impl<'a> Clone for Lt<'a> {
+LL | | fn clone(&self) -> Self {
+LL | | unimplemented!()
+LL | | }
+LL | | }
+ | |_^
+ |
+note: consider deriving `Clone` or removing `Copy`
+ --> $DIR/derive.rs:32:1
+ |
+LL | / impl<'a> Clone for Lt<'a> {
+LL | | fn clone(&self) -> Self {
+LL | | unimplemented!()
+LL | | }
+LL | | }
+ | |_^
+
+error: you are implementing `Clone` explicitly on a `Copy` type
+ --> $DIR/derive.rs:43:1
+ |
+LL | / impl Clone for BigArray {
+LL | | fn clone(&self) -> Self {
+LL | | unimplemented!()
+LL | | }
+LL | | }
+ | |_^
+ |
+note: consider deriving `Clone` or removing `Copy`
+ --> $DIR/derive.rs:43:1
+ |
+LL | / impl Clone for BigArray {
+LL | | fn clone(&self) -> Self {
+LL | | unimplemented!()
+LL | | }
+LL | | }
+ | |_^
+
+error: you are implementing `Clone` explicitly on a `Copy` type
+ --> $DIR/derive.rs:54:1
+ |
+LL | / impl Clone for FnPtr {
+LL | | fn clone(&self) -> Self {
+LL | | unimplemented!()
+LL | | }
+LL | | }
+ | |_^
+ |
+note: consider deriving `Clone` or removing `Copy`
+ --> $DIR/derive.rs:54:1
+ |
+LL | / impl Clone for FnPtr {
+LL | | fn clone(&self) -> Self {
+LL | | unimplemented!()
+LL | | }
+LL | | }
+ | |_^
+
+error: you are implementing `Clone` explicitly on a `Copy` type
+ --> $DIR/derive.rs:74:1
+ |
+LL | / impl<T: Clone> Clone for Generic2<T> {
+LL | | fn clone(&self) -> Self {
+LL | | Self(self.0.clone())
+LL | | }
+LL | | }
+ | |_^
+ |
+note: consider deriving `Clone` or removing `Copy`
+ --> $DIR/derive.rs:74:1
+ |
+LL | / impl<T: Clone> Clone for Generic2<T> {
+LL | | fn clone(&self) -> Self {
+LL | | Self(self.0.clone())
+LL | | }
+LL | | }
+ | |_^
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/derive_hash_xor_eq.rs b/src/tools/clippy/tests/ui/derive_hash_xor_eq.rs
new file mode 100644
index 000000000..813ddc566
--- /dev/null
+++ b/src/tools/clippy/tests/ui/derive_hash_xor_eq.rs
@@ -0,0 +1,56 @@
+#![allow(clippy::derive_partial_eq_without_eq)]
+
+#[derive(PartialEq, Hash)]
+struct Foo;
+
+impl PartialEq<u64> for Foo {
+ fn eq(&self, _: &u64) -> bool {
+ true
+ }
+}
+
+#[derive(Hash)]
+struct Bar;
+
+impl PartialEq for Bar {
+ fn eq(&self, _: &Bar) -> bool {
+ true
+ }
+}
+
+#[derive(Hash)]
+struct Baz;
+
+impl PartialEq<Baz> for Baz {
+ fn eq(&self, _: &Baz) -> bool {
+ true
+ }
+}
+
+#[derive(PartialEq)]
+struct Bah;
+
+impl std::hash::Hash for Bah {
+ fn hash<H: std::hash::Hasher>(&self, _: &mut H) {}
+}
+
+#[derive(PartialEq)]
+struct Foo2;
+
+trait Hash {}
+
+// We don't want to lint on user-defined traits called `Hash`
+impl Hash for Foo2 {}
+
+mod use_hash {
+ use std::hash::{Hash, Hasher};
+
+ #[derive(PartialEq)]
+ struct Foo3;
+
+ impl Hash for Foo3 {
+ fn hash<H: std::hash::Hasher>(&self, _: &mut H) {}
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/derive_hash_xor_eq.stderr b/src/tools/clippy/tests/ui/derive_hash_xor_eq.stderr
new file mode 100644
index 000000000..2a4abb0c5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/derive_hash_xor_eq.stderr
@@ -0,0 +1,59 @@
+error: you are deriving `Hash` but have implemented `PartialEq` explicitly
+ --> $DIR/derive_hash_xor_eq.rs:12:10
+ |
+LL | #[derive(Hash)]
+ | ^^^^
+ |
+ = note: `#[deny(clippy::derive_hash_xor_eq)]` on by default
+note: `PartialEq` implemented here
+ --> $DIR/derive_hash_xor_eq.rs:15:1
+ |
+LL | impl PartialEq for Bar {
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ = note: this error originates in the derive macro `Hash` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: you are deriving `Hash` but have implemented `PartialEq` explicitly
+ --> $DIR/derive_hash_xor_eq.rs:21:10
+ |
+LL | #[derive(Hash)]
+ | ^^^^
+ |
+note: `PartialEq` implemented here
+ --> $DIR/derive_hash_xor_eq.rs:24:1
+ |
+LL | impl PartialEq<Baz> for Baz {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = note: this error originates in the derive macro `Hash` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: you are implementing `Hash` explicitly but have derived `PartialEq`
+ --> $DIR/derive_hash_xor_eq.rs:33:1
+ |
+LL | / impl std::hash::Hash for Bah {
+LL | | fn hash<H: std::hash::Hasher>(&self, _: &mut H) {}
+LL | | }
+ | |_^
+ |
+note: `PartialEq` implemented here
+ --> $DIR/derive_hash_xor_eq.rs:30:10
+ |
+LL | #[derive(PartialEq)]
+ | ^^^^^^^^^
+ = note: this error originates in the derive macro `PartialEq` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: you are implementing `Hash` explicitly but have derived `PartialEq`
+ --> $DIR/derive_hash_xor_eq.rs:51:5
+ |
+LL | / impl Hash for Foo3 {
+LL | | fn hash<H: std::hash::Hasher>(&self, _: &mut H) {}
+LL | | }
+ | |_____^
+ |
+note: `PartialEq` implemented here
+ --> $DIR/derive_hash_xor_eq.rs:48:14
+ |
+LL | #[derive(PartialEq)]
+ | ^^^^^^^^^
+ = note: this error originates in the derive macro `PartialEq` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.rs b/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.rs
new file mode 100644
index 000000000..6f12d36d7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.rs
@@ -0,0 +1,69 @@
+#![warn(clippy::derive_ord_xor_partial_ord)]
+#![allow(clippy::unnecessary_wraps)]
+
+use std::cmp::Ordering;
+
+#[derive(PartialOrd, Ord, PartialEq, Eq)]
+struct DeriveBoth;
+
+impl PartialEq<u64> for DeriveBoth {
+ fn eq(&self, _: &u64) -> bool {
+ true
+ }
+}
+
+impl PartialOrd<u64> for DeriveBoth {
+ fn partial_cmp(&self, _: &u64) -> Option<Ordering> {
+ Some(Ordering::Equal)
+ }
+}
+
+#[derive(Ord, PartialEq, Eq)]
+struct DeriveOrd;
+
+impl PartialOrd for DeriveOrd {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(other.cmp(self))
+ }
+}
+
+#[derive(Ord, PartialEq, Eq)]
+struct DeriveOrdWithExplicitTypeVariable;
+
+impl PartialOrd<DeriveOrdWithExplicitTypeVariable> for DeriveOrdWithExplicitTypeVariable {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(other.cmp(self))
+ }
+}
+
+#[derive(PartialOrd, PartialEq, Eq)]
+struct DerivePartialOrd;
+
+impl std::cmp::Ord for DerivePartialOrd {
+ fn cmp(&self, other: &Self) -> Ordering {
+ Ordering::Less
+ }
+}
+
+#[derive(PartialOrd, PartialEq, Eq)]
+struct ImplUserOrd;
+
+trait Ord {}
+
+// We don't want to lint on user-defined traits called `Ord`
+impl Ord for ImplUserOrd {}
+
+mod use_ord {
+ use std::cmp::{Ord, Ordering};
+
+ #[derive(PartialOrd, PartialEq, Eq)]
+ struct DerivePartialOrdInUseOrd;
+
+ impl Ord for DerivePartialOrdInUseOrd {
+ fn cmp(&self, other: &Self) -> Ordering {
+ Ordering::Less
+ }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.stderr b/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.stderr
new file mode 100644
index 000000000..baf8341ab
--- /dev/null
+++ b/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.stderr
@@ -0,0 +1,63 @@
+error: you are deriving `Ord` but have implemented `PartialOrd` explicitly
+ --> $DIR/derive_ord_xor_partial_ord.rs:21:10
+ |
+LL | #[derive(Ord, PartialEq, Eq)]
+ | ^^^
+ |
+ = note: `-D clippy::derive-ord-xor-partial-ord` implied by `-D warnings`
+note: `PartialOrd` implemented here
+ --> $DIR/derive_ord_xor_partial_ord.rs:24:1
+ |
+LL | impl PartialOrd for DeriveOrd {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = note: this error originates in the derive macro `Ord` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: you are deriving `Ord` but have implemented `PartialOrd` explicitly
+ --> $DIR/derive_ord_xor_partial_ord.rs:30:10
+ |
+LL | #[derive(Ord, PartialEq, Eq)]
+ | ^^^
+ |
+note: `PartialOrd` implemented here
+ --> $DIR/derive_ord_xor_partial_ord.rs:33:1
+ |
+LL | impl PartialOrd<DeriveOrdWithExplicitTypeVariable> for DeriveOrdWithExplicitTypeVariable {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = note: this error originates in the derive macro `Ord` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: you are implementing `Ord` explicitly but have derived `PartialOrd`
+ --> $DIR/derive_ord_xor_partial_ord.rs:42:1
+ |
+LL | / impl std::cmp::Ord for DerivePartialOrd {
+LL | | fn cmp(&self, other: &Self) -> Ordering {
+LL | | Ordering::Less
+LL | | }
+LL | | }
+ | |_^
+ |
+note: `PartialOrd` implemented here
+ --> $DIR/derive_ord_xor_partial_ord.rs:39:10
+ |
+LL | #[derive(PartialOrd, PartialEq, Eq)]
+ | ^^^^^^^^^^
+ = note: this error originates in the derive macro `PartialOrd` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: you are implementing `Ord` explicitly but have derived `PartialOrd`
+ --> $DIR/derive_ord_xor_partial_ord.rs:62:5
+ |
+LL | / impl Ord for DerivePartialOrdInUseOrd {
+LL | | fn cmp(&self, other: &Self) -> Ordering {
+LL | | Ordering::Less
+LL | | }
+LL | | }
+ | |_____^
+ |
+note: `PartialOrd` implemented here
+ --> $DIR/derive_ord_xor_partial_ord.rs:59:14
+ |
+LL | #[derive(PartialOrd, PartialEq, Eq)]
+ | ^^^^^^^^^^
+ = note: this error originates in the derive macro `PartialOrd` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/derive_partial_eq_without_eq.fixed b/src/tools/clippy/tests/ui/derive_partial_eq_without_eq.fixed
new file mode 100644
index 000000000..bbbe46759
--- /dev/null
+++ b/src/tools/clippy/tests/ui/derive_partial_eq_without_eq.fixed
@@ -0,0 +1,126 @@
+// run-rustfix
+
+#![allow(unused)]
+#![warn(clippy::derive_partial_eq_without_eq)]
+
+// Don't warn on structs that aren't PartialEq
+pub struct NotPartialEq {
+ foo: u32,
+ bar: String,
+}
+
+// Eq can be derived but is missing
+#[derive(Debug, PartialEq, Eq)]
+pub struct MissingEq {
+ foo: u32,
+ bar: String,
+}
+
+// Eq is derived
+#[derive(PartialEq, Eq)]
+pub struct NotMissingEq {
+ foo: u32,
+ bar: String,
+}
+
+// Eq is manually implemented
+#[derive(PartialEq)]
+pub struct ManualEqImpl {
+ foo: u32,
+ bar: String,
+}
+
+impl Eq for ManualEqImpl {}
+
+// Cannot be Eq because f32 isn't Eq
+#[derive(PartialEq)]
+pub struct CannotBeEq {
+ foo: u32,
+ bar: f32,
+}
+
+// Don't warn if PartialEq is manually implemented
+pub struct ManualPartialEqImpl {
+ foo: u32,
+ bar: String,
+}
+
+impl PartialEq for ManualPartialEqImpl {
+ fn eq(&self, other: &Self) -> bool {
+ self.foo == other.foo && self.bar == other.bar
+ }
+}
+
+// Generic fields should be properly checked for Eq-ness
+#[derive(PartialEq, Eq)]
+pub struct GenericNotEq<T: Eq, U: PartialEq> {
+ foo: T,
+ bar: U,
+}
+
+#[derive(PartialEq, Eq)]
+pub struct GenericEq<T: Eq, U: Eq> {
+ foo: T,
+ bar: U,
+}
+
+#[derive(PartialEq, Eq)]
+pub struct TupleStruct(u32);
+
+#[derive(PartialEq, Eq)]
+pub struct GenericTupleStruct<T: Eq>(T);
+
+#[derive(PartialEq)]
+pub struct TupleStructNotEq(f32);
+
+#[derive(PartialEq, Eq)]
+pub enum Enum {
+ Foo(u32),
+ Bar { a: String, b: () },
+}
+
+#[derive(PartialEq, Eq)]
+pub enum GenericEnum<T: Eq, U: Eq, V: Eq> {
+ Foo(T),
+ Bar { a: U, b: V },
+}
+
+#[derive(PartialEq)]
+pub enum EnumNotEq {
+ Foo(u32),
+ Bar { a: String, b: f32 },
+}
+
+// Ensure that rustfix works properly when `PartialEq` has other derives on either side
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub struct RustFixWithOtherDerives;
+
+#[derive(PartialEq, Eq)]
+pub struct Generic<T>(T);
+
+#[derive(PartialEq, Eq)]
+pub struct GenericPhantom<T>(core::marker::PhantomData<T>);
+
+mod _hidden {
+ #[derive(PartialEq, Eq)]
+ pub struct Reexported;
+
+ #[derive(PartialEq, Eq)]
+ pub struct InPubFn;
+
+ #[derive(PartialEq)]
+ pub(crate) struct PubCrate;
+
+ #[derive(PartialEq)]
+ pub(super) struct PubSuper;
+}
+
+pub use _hidden::Reexported;
+pub fn _from_mod() -> _hidden::InPubFn {
+ _hidden::InPubFn
+}
+
+#[derive(PartialEq)]
+struct InternalTy;
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/derive_partial_eq_without_eq.rs b/src/tools/clippy/tests/ui/derive_partial_eq_without_eq.rs
new file mode 100644
index 000000000..88d6fbd1a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/derive_partial_eq_without_eq.rs
@@ -0,0 +1,126 @@
+// run-rustfix
+
+#![allow(unused)]
+#![warn(clippy::derive_partial_eq_without_eq)]
+
+// Don't warn on structs that aren't PartialEq
+pub struct NotPartialEq {
+ foo: u32,
+ bar: String,
+}
+
+// Eq can be derived but is missing
+#[derive(Debug, PartialEq)]
+pub struct MissingEq {
+ foo: u32,
+ bar: String,
+}
+
+// Eq is derived
+#[derive(PartialEq, Eq)]
+pub struct NotMissingEq {
+ foo: u32,
+ bar: String,
+}
+
+// Eq is manually implemented
+#[derive(PartialEq)]
+pub struct ManualEqImpl {
+ foo: u32,
+ bar: String,
+}
+
+impl Eq for ManualEqImpl {}
+
+// Cannot be Eq because f32 isn't Eq
+#[derive(PartialEq)]
+pub struct CannotBeEq {
+ foo: u32,
+ bar: f32,
+}
+
+// Don't warn if PartialEq is manually implemented
+pub struct ManualPartialEqImpl {
+ foo: u32,
+ bar: String,
+}
+
+impl PartialEq for ManualPartialEqImpl {
+ fn eq(&self, other: &Self) -> bool {
+ self.foo == other.foo && self.bar == other.bar
+ }
+}
+
+// Generic fields should be properly checked for Eq-ness
+#[derive(PartialEq)]
+pub struct GenericNotEq<T: Eq, U: PartialEq> {
+ foo: T,
+ bar: U,
+}
+
+#[derive(PartialEq)]
+pub struct GenericEq<T: Eq, U: Eq> {
+ foo: T,
+ bar: U,
+}
+
+#[derive(PartialEq)]
+pub struct TupleStruct(u32);
+
+#[derive(PartialEq)]
+pub struct GenericTupleStruct<T: Eq>(T);
+
+#[derive(PartialEq)]
+pub struct TupleStructNotEq(f32);
+
+#[derive(PartialEq)]
+pub enum Enum {
+ Foo(u32),
+ Bar { a: String, b: () },
+}
+
+#[derive(PartialEq)]
+pub enum GenericEnum<T: Eq, U: Eq, V: Eq> {
+ Foo(T),
+ Bar { a: U, b: V },
+}
+
+#[derive(PartialEq)]
+pub enum EnumNotEq {
+ Foo(u32),
+ Bar { a: String, b: f32 },
+}
+
+// Ensure that rustfix works properly when `PartialEq` has other derives on either side
+#[derive(Debug, PartialEq, Clone)]
+pub struct RustFixWithOtherDerives;
+
+#[derive(PartialEq)]
+pub struct Generic<T>(T);
+
+#[derive(PartialEq, Eq)]
+pub struct GenericPhantom<T>(core::marker::PhantomData<T>);
+
+mod _hidden {
+ #[derive(PartialEq)]
+ pub struct Reexported;
+
+ #[derive(PartialEq)]
+ pub struct InPubFn;
+
+ #[derive(PartialEq)]
+ pub(crate) struct PubCrate;
+
+ #[derive(PartialEq)]
+ pub(super) struct PubSuper;
+}
+
+pub use _hidden::Reexported;
+pub fn _from_mod() -> _hidden::InPubFn {
+ _hidden::InPubFn
+}
+
+#[derive(PartialEq)]
+struct InternalTy;
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/derive_partial_eq_without_eq.stderr b/src/tools/clippy/tests/ui/derive_partial_eq_without_eq.stderr
new file mode 100644
index 000000000..794c5dab8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/derive_partial_eq_without_eq.stderr
@@ -0,0 +1,70 @@
+error: you are deriving `PartialEq` and can implement `Eq`
+ --> $DIR/derive_partial_eq_without_eq.rs:13:17
+ |
+LL | #[derive(Debug, PartialEq)]
+ | ^^^^^^^^^ help: consider deriving `Eq` as well: `PartialEq, Eq`
+ |
+ = note: `-D clippy::derive-partial-eq-without-eq` implied by `-D warnings`
+
+error: you are deriving `PartialEq` and can implement `Eq`
+ --> $DIR/derive_partial_eq_without_eq.rs:55:10
+ |
+LL | #[derive(PartialEq)]
+ | ^^^^^^^^^ help: consider deriving `Eq` as well: `PartialEq, Eq`
+
+error: you are deriving `PartialEq` and can implement `Eq`
+ --> $DIR/derive_partial_eq_without_eq.rs:61:10
+ |
+LL | #[derive(PartialEq)]
+ | ^^^^^^^^^ help: consider deriving `Eq` as well: `PartialEq, Eq`
+
+error: you are deriving `PartialEq` and can implement `Eq`
+ --> $DIR/derive_partial_eq_without_eq.rs:67:10
+ |
+LL | #[derive(PartialEq)]
+ | ^^^^^^^^^ help: consider deriving `Eq` as well: `PartialEq, Eq`
+
+error: you are deriving `PartialEq` and can implement `Eq`
+ --> $DIR/derive_partial_eq_without_eq.rs:70:10
+ |
+LL | #[derive(PartialEq)]
+ | ^^^^^^^^^ help: consider deriving `Eq` as well: `PartialEq, Eq`
+
+error: you are deriving `PartialEq` and can implement `Eq`
+ --> $DIR/derive_partial_eq_without_eq.rs:76:10
+ |
+LL | #[derive(PartialEq)]
+ | ^^^^^^^^^ help: consider deriving `Eq` as well: `PartialEq, Eq`
+
+error: you are deriving `PartialEq` and can implement `Eq`
+ --> $DIR/derive_partial_eq_without_eq.rs:82:10
+ |
+LL | #[derive(PartialEq)]
+ | ^^^^^^^^^ help: consider deriving `Eq` as well: `PartialEq, Eq`
+
+error: you are deriving `PartialEq` and can implement `Eq`
+ --> $DIR/derive_partial_eq_without_eq.rs:95:17
+ |
+LL | #[derive(Debug, PartialEq, Clone)]
+ | ^^^^^^^^^ help: consider deriving `Eq` as well: `PartialEq, Eq`
+
+error: you are deriving `PartialEq` and can implement `Eq`
+ --> $DIR/derive_partial_eq_without_eq.rs:98:10
+ |
+LL | #[derive(PartialEq)]
+ | ^^^^^^^^^ help: consider deriving `Eq` as well: `PartialEq, Eq`
+
+error: you are deriving `PartialEq` and can implement `Eq`
+ --> $DIR/derive_partial_eq_without_eq.rs:105:14
+ |
+LL | #[derive(PartialEq)]
+ | ^^^^^^^^^ help: consider deriving `Eq` as well: `PartialEq, Eq`
+
+error: you are deriving `PartialEq` and can implement `Eq`
+ --> $DIR/derive_partial_eq_without_eq.rs:108:14
+ |
+LL | #[derive(PartialEq)]
+ | ^^^^^^^^^ help: consider deriving `Eq` as well: `PartialEq, Eq`
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/disallowed_script_idents.rs b/src/tools/clippy/tests/ui/disallowed_script_idents.rs
new file mode 100644
index 000000000..cfdda3597
--- /dev/null
+++ b/src/tools/clippy/tests/ui/disallowed_script_idents.rs
@@ -0,0 +1,10 @@
+#![deny(clippy::disallowed_script_idents)]
+#![allow(dead_code)]
+
+fn main() {
+ let counter = 10; // OK, latin is allowed.
+ let zähler = 10; // OK, it's still latin.
+
+ let счётчик = 10; // Cyrillic is not allowed by default.
+ let カウンタ = 10; // Same for japanese.
+}
diff --git a/src/tools/clippy/tests/ui/disallowed_script_idents.stderr b/src/tools/clippy/tests/ui/disallowed_script_idents.stderr
new file mode 100644
index 000000000..cc84dc1d4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/disallowed_script_idents.stderr
@@ -0,0 +1,20 @@
+error: identifier `счётчик` has a Unicode script that is not allowed by configuration: Cyrillic
+ --> $DIR/disallowed_script_idents.rs:8:9
+ |
+LL | let счётчик = 10; // Cyrillic is not allowed by default.
+ | ^^^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/disallowed_script_idents.rs:1:9
+ |
+LL | #![deny(clippy::disallowed_script_idents)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: identifier `カウンタ` has a Unicode script that is not allowed by configuration: Katakana
+ --> $DIR/disallowed_script_idents.rs:9:9
+ |
+LL | let カウンタ = 10; // Same for japanese.
+ | ^^^^^^^^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/diverging_sub_expression.rs b/src/tools/clippy/tests/ui/diverging_sub_expression.rs
new file mode 100644
index 000000000..e27f9fea7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/diverging_sub_expression.rs
@@ -0,0 +1,41 @@
+#![warn(clippy::diverging_sub_expression)]
+#![allow(clippy::match_same_arms, clippy::logic_bug)]
+#[allow(clippy::empty_loop)]
+fn diverge() -> ! {
+ loop {}
+}
+
+struct A;
+
+impl A {
+ fn foo(&self) -> ! {
+ diverge()
+ }
+}
+
+#[allow(unused_variables, clippy::unnecessary_operation, clippy::short_circuit_statement)]
+fn main() {
+ let b = true;
+ b || diverge();
+ b || A.foo();
+}
+
+#[allow(dead_code, unused_variables)]
+fn foobar() {
+ loop {
+ let x = match 5 {
+ 4 => return,
+ 5 => continue,
+ 6 => true || return,
+ 7 => true || continue,
+ 8 => break,
+ 9 => diverge(),
+ 3 => true || diverge(),
+ 10 => match 42 {
+ 99 => return,
+ _ => true || panic!("boo"),
+ },
+ _ => true || break,
+ };
+ }
+}
diff --git a/src/tools/clippy/tests/ui/diverging_sub_expression.stderr b/src/tools/clippy/tests/ui/diverging_sub_expression.stderr
new file mode 100644
index 000000000..9c91d9357
--- /dev/null
+++ b/src/tools/clippy/tests/ui/diverging_sub_expression.stderr
@@ -0,0 +1,48 @@
+error: sub-expression diverges
+ --> $DIR/diverging_sub_expression.rs:19:10
+ |
+LL | b || diverge();
+ | ^^^^^^^^^
+ |
+ = note: `-D clippy::diverging-sub-expression` implied by `-D warnings`
+
+error: sub-expression diverges
+ --> $DIR/diverging_sub_expression.rs:20:10
+ |
+LL | b || A.foo();
+ | ^^^^^^^
+
+error: sub-expression diverges
+ --> $DIR/diverging_sub_expression.rs:29:26
+ |
+LL | 6 => true || return,
+ | ^^^^^^
+
+error: sub-expression diverges
+ --> $DIR/diverging_sub_expression.rs:30:26
+ |
+LL | 7 => true || continue,
+ | ^^^^^^^^
+
+error: sub-expression diverges
+ --> $DIR/diverging_sub_expression.rs:33:26
+ |
+LL | 3 => true || diverge(),
+ | ^^^^^^^^^
+
+error: sub-expression diverges
+ --> $DIR/diverging_sub_expression.rs:36:30
+ |
+LL | _ => true || panic!("boo"),
+ | ^^^^^^^^^^^^^
+ |
+ = note: this error originates in the macro `$crate::panic::panic_2021` which comes from the expansion of the macro `panic` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: sub-expression diverges
+ --> $DIR/diverging_sub_expression.rs:38:26
+ |
+LL | _ => true || break,
+ | ^^^^^
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/doc/doc-fixable.fixed b/src/tools/clippy/tests/ui/doc/doc-fixable.fixed
new file mode 100644
index 000000000..747801b40
--- /dev/null
+++ b/src/tools/clippy/tests/ui/doc/doc-fixable.fixed
@@ -0,0 +1,215 @@
+// run-rustfix
+//! This file tests for the `DOC_MARKDOWN` lint.
+
+#![allow(dead_code, incomplete_features)]
+#![warn(clippy::doc_markdown)]
+#![feature(custom_inner_attributes, generic_const_exprs, const_option)]
+#![rustfmt::skip]
+
+/// The `foo_bar` function does _nothing_. See also `foo::bar`. (note the dot there)
+/// Markdown is _weird_. I mean _really weird_. This \_ is ok. So is `_`. But not `Foo::some_fun`
+/// which should be reported only once despite being __doubly bad__.
+/// Here be `::a::global:path`, and _`::another::global::path`_. :: is not a path though.
+/// Import an item from `::awesome::global::blob::` (Intended postfix)
+/// These are the options for `::Cat`: (Intended trailing single colon, shouldn't be linted)
+/// That's not code ~`NotInCodeBlock`~.
+/// `be_sure_we_got_to_the_end_of_it`
+fn foo_bar() {
+}
+
+/// That one tests multiline ticks.
+/// ```rust
+/// foo_bar FOO_BAR
+/// _foo bar_
+/// ```
+///
+/// ~~~rust
+/// foo_bar FOO_BAR
+/// _foo bar_
+/// ~~~
+/// `be_sure_we_got_to_the_end_of_it`
+fn multiline_codeblock() {
+}
+
+/// This _is a test for
+/// multiline
+/// emphasis_.
+/// `be_sure_we_got_to_the_end_of_it`
+fn test_emphasis() {
+}
+
+/// This tests units. See also #835.
+/// kiB MiB GiB TiB PiB EiB
+/// kib Mib Gib Tib Pib Eib
+/// kB MB GB TB PB EB
+/// kb Mb Gb Tb Pb Eb
+/// 32kiB 32MiB 32GiB 32TiB 32PiB 32EiB
+/// 32kib 32Mib 32Gib 32Tib 32Pib 32Eib
+/// 32kB 32MB 32GB 32TB 32PB 32EB
+/// 32kb 32Mb 32Gb 32Tb 32Pb 32Eb
+/// NaN
+/// `be_sure_we_got_to_the_end_of_it`
+fn test_units() {
+}
+
+/// This tests allowed identifiers.
+/// KiB MiB GiB TiB PiB EiB
+/// DirectX
+/// ECMAScript
+/// GPLv2 GPLv3
+/// GitHub GitLab
+/// IPv4 IPv6
+/// ClojureScript CoffeeScript JavaScript PureScript TypeScript
+/// NaN NaNs
+/// OAuth GraphQL
+/// OCaml
+/// OpenGL OpenMP OpenSSH OpenSSL OpenStreetMap OpenDNS
+/// WebGL
+/// TensorFlow
+/// TrueType
+/// iOS macOS FreeBSD
+/// TeX LaTeX BibTeX BibLaTeX
+/// MinGW
+/// CamelCase (see also #2395)
+/// `be_sure_we_got_to_the_end_of_it`
+fn test_allowed() {
+}
+
+/// This test has [a `link_with_underscores`][chunked-example] inside it. See #823.
+/// See also [the issue tracker](https://github.com/rust-lang/rust-clippy/search?q=clippy::doc_markdown&type=Issues)
+/// on GitHub (which is a camel-cased word, but is OK). And here is another [inline link][inline_link].
+/// It can also be [`inline_link2`].
+///
+/// [chunked-example]: https://en.wikipedia.org/wiki/Chunked_transfer_encoding#Example
+/// [inline_link]: https://foobar
+/// [inline_link2]: https://foobar
+/// The `main` function is the entry point of the program. Here it only calls the `foo_bar` and
+/// `multiline_ticks` functions.
+///
+/// expression of the type `_ <bit_op> m <cmp_op> c` (where `<bit_op>`
+/// is one of {`&`, '|'} and `<cmp_op>` is one of {`!=`, `>=`, `>` ,
+/// `be_sure_we_got_to_the_end_of_it`
+fn main() {
+ foo_bar();
+ multiline_codeblock();
+ test_emphasis();
+ test_units();
+}
+
+/// ## `CamelCaseThing`
+/// Talks about `CamelCaseThing`. Titles should be ignored; see issue #897.
+///
+/// # `CamelCaseThing`
+///
+/// Not a title #897 `CamelCaseThing`
+/// `be_sure_we_got_to_the_end_of_it`
+fn issue897() {
+}
+
+/// I am confused by brackets? (`x_y`)
+/// I am confused by brackets? (foo `x_y`)
+/// I am confused by brackets? (`x_y` foo)
+/// `be_sure_we_got_to_the_end_of_it`
+fn issue900() {
+}
+
+/// Diesel queries also have a similar problem to [Iterator][iterator], where
+/// /// More talking
+/// returning them from a function requires exposing the implementation of that
+/// function. The [`helper_types`][helper_types] module exists to help with this,
+/// but you might want to hide the return type or have it conditionally change.
+/// Boxing can achieve both.
+///
+/// [iterator]: https://doc.rust-lang.org/stable/std/iter/trait.Iterator.html
+/// [helper_types]: ../helper_types/index.html
+/// `be_sure_we_got_to_the_end_of_it`
+fn issue883() {
+}
+
+/// `foo_bar
+/// baz_quz`
+/// [foo
+/// bar](https://doc.rust-lang.org/stable/std/iter/trait.IteratorFooBar.html)
+fn multiline() {
+}
+
+/** E.g., serialization of an empty list: `FooBar`
+```
+That's in a code block: `PackedNode`
+```
+
+And `BarQuz` too.
+`be_sure_we_got_to_the_end_of_it`
+*/
+fn issue1073() {
+}
+
+/** E.g., serialization of an empty list: `FooBar`
+```
+That's in a code block: PackedNode
+```
+
+And `BarQuz` too.
+`be_sure_we_got_to_the_end_of_it`
+*/
+fn issue1073_alt() {
+}
+
+/// Tests more than three quotes:
+/// ````
+/// DoNotWarn
+/// ```
+/// StillDont
+/// ````
+/// `be_sure_we_got_to_the_end_of_it`
+fn four_quotes() {
+}
+
+#[cfg_attr(feature = "a", doc = " ```")]
+#[cfg_attr(not(feature = "a"), doc = " ```ignore")]
+/// fn main() {
+/// let s = "localhost:10000".to_string();
+/// println!("{}", s);
+/// }
+/// ```
+fn issue_1469() {}
+
+/**
+ * This is a doc comment that should not be a list
+ *This would also be an error under a strict common mark interpretation
+ */
+fn issue_1920() {}
+
+/// An iterator over `mycrate::Collection`'s values.
+/// It should not lint a `'static` lifetime in ticks.
+fn issue_2210() {}
+
+/// This should not cause the lint to trigger:
+/// #REQ-data-family.lint_partof_exists
+fn issue_2343() {}
+
+/// This should not cause an ICE:
+/// __|_ _|__||_|
+fn pulldown_cmark_crash() {}
+
+/// This should not lint
+/// (regression test for #7758)
+/// [plain text][path::to::item]
+fn intra_doc_link() {}
+
+// issue #7033 - generic_const_exprs ICE
+struct S<T, const N: usize>
+where [(); N.checked_next_power_of_two().unwrap()]: {
+ arr: [T; N.checked_next_power_of_two().unwrap()],
+ n: usize,
+}
+
+impl<T: Copy + Default, const N: usize> S<T, N>
+where [(); N.checked_next_power_of_two().unwrap()]: {
+ fn new() -> Self {
+ Self {
+ arr: [T::default(); N.checked_next_power_of_two().unwrap()],
+ n: 0,
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/doc/doc-fixable.rs b/src/tools/clippy/tests/ui/doc/doc-fixable.rs
new file mode 100644
index 000000000..f3cf96615
--- /dev/null
+++ b/src/tools/clippy/tests/ui/doc/doc-fixable.rs
@@ -0,0 +1,215 @@
+// run-rustfix
+//! This file tests for the `DOC_MARKDOWN` lint.
+
+#![allow(dead_code, incomplete_features)]
+#![warn(clippy::doc_markdown)]
+#![feature(custom_inner_attributes, generic_const_exprs, const_option)]
+#![rustfmt::skip]
+
+/// The foo_bar function does _nothing_. See also foo::bar. (note the dot there)
+/// Markdown is _weird_. I mean _really weird_. This \_ is ok. So is `_`. But not Foo::some_fun
+/// which should be reported only once despite being __doubly bad__.
+/// Here be ::a::global:path, and _::another::global::path_. :: is not a path though.
+/// Import an item from ::awesome::global::blob:: (Intended postfix)
+/// These are the options for ::Cat: (Intended trailing single colon, shouldn't be linted)
+/// That's not code ~NotInCodeBlock~.
+/// be_sure_we_got_to_the_end_of_it
+fn foo_bar() {
+}
+
+/// That one tests multiline ticks.
+/// ```rust
+/// foo_bar FOO_BAR
+/// _foo bar_
+/// ```
+///
+/// ~~~rust
+/// foo_bar FOO_BAR
+/// _foo bar_
+/// ~~~
+/// be_sure_we_got_to_the_end_of_it
+fn multiline_codeblock() {
+}
+
+/// This _is a test for
+/// multiline
+/// emphasis_.
+/// be_sure_we_got_to_the_end_of_it
+fn test_emphasis() {
+}
+
+/// This tests units. See also #835.
+/// kiB MiB GiB TiB PiB EiB
+/// kib Mib Gib Tib Pib Eib
+/// kB MB GB TB PB EB
+/// kb Mb Gb Tb Pb Eb
+/// 32kiB 32MiB 32GiB 32TiB 32PiB 32EiB
+/// 32kib 32Mib 32Gib 32Tib 32Pib 32Eib
+/// 32kB 32MB 32GB 32TB 32PB 32EB
+/// 32kb 32Mb 32Gb 32Tb 32Pb 32Eb
+/// NaN
+/// be_sure_we_got_to_the_end_of_it
+fn test_units() {
+}
+
+/// This tests allowed identifiers.
+/// KiB MiB GiB TiB PiB EiB
+/// DirectX
+/// ECMAScript
+/// GPLv2 GPLv3
+/// GitHub GitLab
+/// IPv4 IPv6
+/// ClojureScript CoffeeScript JavaScript PureScript TypeScript
+/// NaN NaNs
+/// OAuth GraphQL
+/// OCaml
+/// OpenGL OpenMP OpenSSH OpenSSL OpenStreetMap OpenDNS
+/// WebGL
+/// TensorFlow
+/// TrueType
+/// iOS macOS FreeBSD
+/// TeX LaTeX BibTeX BibLaTeX
+/// MinGW
+/// CamelCase (see also #2395)
+/// be_sure_we_got_to_the_end_of_it
+fn test_allowed() {
+}
+
+/// This test has [a link_with_underscores][chunked-example] inside it. See #823.
+/// See also [the issue tracker](https://github.com/rust-lang/rust-clippy/search?q=clippy::doc_markdown&type=Issues)
+/// on GitHub (which is a camel-cased word, but is OK). And here is another [inline link][inline_link].
+/// It can also be [inline_link2].
+///
+/// [chunked-example]: https://en.wikipedia.org/wiki/Chunked_transfer_encoding#Example
+/// [inline_link]: https://foobar
+/// [inline_link2]: https://foobar
+/// The `main` function is the entry point of the program. Here it only calls the `foo_bar` and
+/// `multiline_ticks` functions.
+///
+/// expression of the type `_ <bit_op> m <cmp_op> c` (where `<bit_op>`
+/// is one of {`&`, '|'} and `<cmp_op>` is one of {`!=`, `>=`, `>` ,
+/// be_sure_we_got_to_the_end_of_it
+fn main() {
+ foo_bar();
+ multiline_codeblock();
+ test_emphasis();
+ test_units();
+}
+
+/// ## CamelCaseThing
+/// Talks about `CamelCaseThing`. Titles should be ignored; see issue #897.
+///
+/// # CamelCaseThing
+///
+/// Not a title #897 CamelCaseThing
+/// be_sure_we_got_to_the_end_of_it
+fn issue897() {
+}
+
+/// I am confused by brackets? (`x_y`)
+/// I am confused by brackets? (foo `x_y`)
+/// I am confused by brackets? (`x_y` foo)
+/// be_sure_we_got_to_the_end_of_it
+fn issue900() {
+}
+
+/// Diesel queries also have a similar problem to [Iterator][iterator], where
+/// /// More talking
+/// returning them from a function requires exposing the implementation of that
+/// function. The [`helper_types`][helper_types] module exists to help with this,
+/// but you might want to hide the return type or have it conditionally change.
+/// Boxing can achieve both.
+///
+/// [iterator]: https://doc.rust-lang.org/stable/std/iter/trait.Iterator.html
+/// [helper_types]: ../helper_types/index.html
+/// be_sure_we_got_to_the_end_of_it
+fn issue883() {
+}
+
+/// `foo_bar
+/// baz_quz`
+/// [foo
+/// bar](https://doc.rust-lang.org/stable/std/iter/trait.IteratorFooBar.html)
+fn multiline() {
+}
+
+/** E.g., serialization of an empty list: FooBar
+```
+That's in a code block: `PackedNode`
+```
+
+And BarQuz too.
+be_sure_we_got_to_the_end_of_it
+*/
+fn issue1073() {
+}
+
+/** E.g., serialization of an empty list: FooBar
+```
+That's in a code block: PackedNode
+```
+
+And BarQuz too.
+be_sure_we_got_to_the_end_of_it
+*/
+fn issue1073_alt() {
+}
+
+/// Tests more than three quotes:
+/// ````
+/// DoNotWarn
+/// ```
+/// StillDont
+/// ````
+/// be_sure_we_got_to_the_end_of_it
+fn four_quotes() {
+}
+
+#[cfg_attr(feature = "a", doc = " ```")]
+#[cfg_attr(not(feature = "a"), doc = " ```ignore")]
+/// fn main() {
+/// let s = "localhost:10000".to_string();
+/// println!("{}", s);
+/// }
+/// ```
+fn issue_1469() {}
+
+/**
+ * This is a doc comment that should not be a list
+ *This would also be an error under a strict common mark interpretation
+ */
+fn issue_1920() {}
+
+/// An iterator over mycrate::Collection's values.
+/// It should not lint a `'static` lifetime in ticks.
+fn issue_2210() {}
+
+/// This should not cause the lint to trigger:
+/// #REQ-data-family.lint_partof_exists
+fn issue_2343() {}
+
+/// This should not cause an ICE:
+/// __|_ _|__||_|
+fn pulldown_cmark_crash() {}
+
+/// This should not lint
+/// (regression test for #7758)
+/// [plain text][path::to::item]
+fn intra_doc_link() {}
+
+// issue #7033 - generic_const_exprs ICE
+struct S<T, const N: usize>
+where [(); N.checked_next_power_of_two().unwrap()]: {
+ arr: [T; N.checked_next_power_of_two().unwrap()],
+ n: usize,
+}
+
+impl<T: Copy + Default, const N: usize> S<T, N>
+where [(); N.checked_next_power_of_two().unwrap()]: {
+ fn new() -> Self {
+ Self {
+ arr: [T::default(); N.checked_next_power_of_two().unwrap()],
+ n: 0,
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/doc/doc-fixable.stderr b/src/tools/clippy/tests/ui/doc/doc-fixable.stderr
new file mode 100644
index 000000000..40345370c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/doc/doc-fixable.stderr
@@ -0,0 +1,333 @@
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:9:9
+ |
+LL | /// The foo_bar function does _nothing_. See also foo::bar. (note the dot there)
+ | ^^^^^^^
+ |
+ = note: `-D clippy::doc-markdown` implied by `-D warnings`
+help: try
+ |
+LL | /// The `foo_bar` function does _nothing_. See also foo::bar. (note the dot there)
+ | ~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:9:51
+ |
+LL | /// The foo_bar function does _nothing_. See also foo::bar. (note the dot there)
+ | ^^^^^^^^
+ |
+help: try
+ |
+LL | /// The foo_bar function does _nothing_. See also `foo::bar`. (note the dot there)
+ | ~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:10:83
+ |
+LL | /// Markdown is _weird_. I mean _really weird_. This /_ is ok. So is `_`. But not Foo::some_fun
+ | ^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// Markdown is _weird_. I mean _really weird_. This /_ is ok. So is `_`. But not `Foo::some_fun`
+ | ~~~~~~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:12:13
+ |
+LL | /// Here be ::a::global:path, and _::another::global::path_. :: is not a path though.
+ | ^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// Here be `::a::global:path`, and _::another::global::path_. :: is not a path though.
+ | ~~~~~~~~~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:12:36
+ |
+LL | /// Here be ::a::global:path, and _::another::global::path_. :: is not a path though.
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// Here be ::a::global:path, and _`::another::global::path`_. :: is not a path though.
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:13:25
+ |
+LL | /// Import an item from ::awesome::global::blob:: (Intended postfix)
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// Import an item from `::awesome::global::blob::` (Intended postfix)
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:14:31
+ |
+LL | /// These are the options for ::Cat: (Intended trailing single colon, shouldn't be linted)
+ | ^^^^^
+ |
+help: try
+ |
+LL | /// These are the options for `::Cat`: (Intended trailing single colon, shouldn't be linted)
+ | ~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:15:22
+ |
+LL | /// That's not code ~NotInCodeBlock~.
+ | ^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// That's not code ~`NotInCodeBlock`~.
+ | ~~~~~~~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:16:5
+ |
+LL | /// be_sure_we_got_to_the_end_of_it
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// `be_sure_we_got_to_the_end_of_it`
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:30:5
+ |
+LL | /// be_sure_we_got_to_the_end_of_it
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// `be_sure_we_got_to_the_end_of_it`
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:37:5
+ |
+LL | /// be_sure_we_got_to_the_end_of_it
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// `be_sure_we_got_to_the_end_of_it`
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:51:5
+ |
+LL | /// be_sure_we_got_to_the_end_of_it
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// `be_sure_we_got_to_the_end_of_it`
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:74:5
+ |
+LL | /// be_sure_we_got_to_the_end_of_it
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// `be_sure_we_got_to_the_end_of_it`
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:78:22
+ |
+LL | /// This test has [a link_with_underscores][chunked-example] inside it. See #823.
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// This test has [a `link_with_underscores`][chunked-example] inside it. See #823.
+ | ~~~~~~~~~~~~~~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:81:21
+ |
+LL | /// It can also be [inline_link2].
+ | ^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// It can also be [`inline_link2`].
+ | ~~~~~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:91:5
+ |
+LL | /// be_sure_we_got_to_the_end_of_it
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// `be_sure_we_got_to_the_end_of_it`
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:99:8
+ |
+LL | /// ## CamelCaseThing
+ | ^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// ## `CamelCaseThing`
+ | ~~~~~~~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:102:7
+ |
+LL | /// # CamelCaseThing
+ | ^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// # `CamelCaseThing`
+ | ~~~~~~~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:104:22
+ |
+LL | /// Not a title #897 CamelCaseThing
+ | ^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// Not a title #897 `CamelCaseThing`
+ | ~~~~~~~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:105:5
+ |
+LL | /// be_sure_we_got_to_the_end_of_it
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// `be_sure_we_got_to_the_end_of_it`
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:112:5
+ |
+LL | /// be_sure_we_got_to_the_end_of_it
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// `be_sure_we_got_to_the_end_of_it`
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:125:5
+ |
+LL | /// be_sure_we_got_to_the_end_of_it
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// `be_sure_we_got_to_the_end_of_it`
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:136:43
+ |
+LL | /** E.g., serialization of an empty list: FooBar
+ | ^^^^^^
+ |
+help: try
+ |
+LL | /** E.g., serialization of an empty list: `FooBar`
+ | ~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:141:5
+ |
+LL | And BarQuz too.
+ | ^^^^^^
+ |
+help: try
+ |
+LL | And `BarQuz` too.
+ | ~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:142:1
+ |
+LL | be_sure_we_got_to_the_end_of_it
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | `be_sure_we_got_to_the_end_of_it`
+ |
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:147:43
+ |
+LL | /** E.g., serialization of an empty list: FooBar
+ | ^^^^^^
+ |
+help: try
+ |
+LL | /** E.g., serialization of an empty list: `FooBar`
+ | ~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:152:5
+ |
+LL | And BarQuz too.
+ | ^^^^^^
+ |
+help: try
+ |
+LL | And `BarQuz` too.
+ | ~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:153:1
+ |
+LL | be_sure_we_got_to_the_end_of_it
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | `be_sure_we_got_to_the_end_of_it`
+ |
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:164:5
+ |
+LL | /// be_sure_we_got_to_the_end_of_it
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// `be_sure_we_got_to_the_end_of_it`
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: item in documentation is missing backticks
+ --> $DIR/doc-fixable.rs:183:22
+ |
+LL | /// An iterator over mycrate::Collection's values.
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// An iterator over `mycrate::Collection`'s values.
+ | ~~~~~~~~~~~~~~~~~~~~~
+
+error: aborting due to 30 previous errors
+
diff --git a/src/tools/clippy/tests/ui/doc/issue_1832.rs b/src/tools/clippy/tests/ui/doc/issue_1832.rs
new file mode 100644
index 000000000..10586f16d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/doc/issue_1832.rs
@@ -0,0 +1,9 @@
+/// Ok: <http://www.unicode.org/reports/tr9/#Reordering_Resolved_Levels>
+///
+/// Not ok: http://www.unicode.org
+/// Not ok: https://www.unicode.org
+/// Not ok: http://www.unicode.org/
+/// Not ok: http://www.unicode.org/reports/tr9/#Reordering_Resolved_Levels
+fn issue_1832() {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/doc/issue_902.rs b/src/tools/clippy/tests/ui/doc/issue_902.rs
new file mode 100644
index 000000000..4b0c835dd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/doc/issue_902.rs
@@ -0,0 +1,7 @@
+/// See [NIST SP 800-56A, revision 2].
+///
+/// [NIST SP 800-56A, revision 2]:
+/// https://github.com/rust-lang/rust-clippy/issues/902#issuecomment-261919419
+fn issue_902_comment() {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/doc/unbalanced_ticks.rs b/src/tools/clippy/tests/ui/doc/unbalanced_ticks.rs
new file mode 100644
index 000000000..8e8324b30
--- /dev/null
+++ b/src/tools/clippy/tests/ui/doc/unbalanced_ticks.rs
@@ -0,0 +1,43 @@
+//! This file tests for the `DOC_MARKDOWN` lint, specifically cases
+//! where ticks are unbalanced (see issue #6753).
+
+#![allow(dead_code)]
+#![warn(clippy::doc_markdown)]
+
+/// This is a doc comment with `unbalanced_tick marks and several words that
+/// should be `encompassed_by` tick marks because they `contain_underscores`.
+/// Because of the initial `unbalanced_tick` pair, the error message is
+/// very `confusing_and_misleading`.
+fn main() {}
+
+/// This paragraph has `unbalanced_tick marks and should stop_linting.
+///
+/// This paragraph is fine and should_be linted normally.
+///
+/// Double unbalanced backtick from ``here to here` should lint.
+///
+/// Double balanced back ticks ``start end`` is fine.
+fn multiple_paragraphs() {}
+
+/// ```
+/// // Unbalanced tick mark in code block shouldn't warn:
+/// `
+/// ```
+fn in_code_block() {}
+
+/// # `Fine`
+///
+/// ## not_fine
+///
+/// ### `unbalanced
+///
+/// - This `item has unbalanced tick marks
+/// - This item needs backticks_here
+fn other_markdown() {}
+
+#[rustfmt::skip]
+/// - ```rust
+/// /// `lol`
+/// pub struct Struct;
+/// ```
+fn iss_7421() {}
diff --git a/src/tools/clippy/tests/ui/doc/unbalanced_ticks.stderr b/src/tools/clippy/tests/ui/doc/unbalanced_ticks.stderr
new file mode 100644
index 000000000..a462b9887
--- /dev/null
+++ b/src/tools/clippy/tests/ui/doc/unbalanced_ticks.stderr
@@ -0,0 +1,79 @@
+error: backticks are unbalanced
+ --> $DIR/unbalanced_ticks.rs:7:1
+ |
+LL | / /// This is a doc comment with `unbalanced_tick marks and several words that
+LL | | /// should be `encompassed_by` tick marks because they `contain_underscores`.
+LL | | /// Because of the initial `unbalanced_tick` pair, the error message is
+LL | | /// very `confusing_and_misleading`.
+ | |____________________________________^
+ |
+ = note: `-D clippy::doc-markdown` implied by `-D warnings`
+ = help: a backtick may be missing a pair
+
+error: backticks are unbalanced
+ --> $DIR/unbalanced_ticks.rs:13:1
+ |
+LL | /// This paragraph has `unbalanced_tick marks and should stop_linting.
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: a backtick may be missing a pair
+
+error: item in documentation is missing backticks
+ --> $DIR/unbalanced_ticks.rs:15:32
+ |
+LL | /// This paragraph is fine and should_be linted normally.
+ | ^^^^^^^^^
+ |
+help: try
+ |
+LL | /// This paragraph is fine and `should_be` linted normally.
+ | ~~~~~~~~~~~
+
+error: backticks are unbalanced
+ --> $DIR/unbalanced_ticks.rs:17:1
+ |
+LL | /// Double unbalanced backtick from ``here to here` should lint.
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: a backtick may be missing a pair
+
+error: item in documentation is missing backticks
+ --> $DIR/unbalanced_ticks.rs:30:8
+ |
+LL | /// ## not_fine
+ | ^^^^^^^^
+ |
+help: try
+ |
+LL | /// ## `not_fine`
+ | ~~~~~~~~~~
+
+error: backticks are unbalanced
+ --> $DIR/unbalanced_ticks.rs:32:1
+ |
+LL | /// ### `unbalanced
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: a backtick may be missing a pair
+
+error: backticks are unbalanced
+ --> $DIR/unbalanced_ticks.rs:34:1
+ |
+LL | /// - This `item has unbalanced tick marks
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: a backtick may be missing a pair
+
+error: item in documentation is missing backticks
+ --> $DIR/unbalanced_ticks.rs:35:23
+ |
+LL | /// - This item needs backticks_here
+ | ^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | /// - This item needs `backticks_here`
+ | ~~~~~~~~~~~~~~~~
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/doc_errors.rs b/src/tools/clippy/tests/ui/doc_errors.rs
new file mode 100644
index 000000000..30fdd3b08
--- /dev/null
+++ b/src/tools/clippy/tests/ui/doc_errors.rs
@@ -0,0 +1,104 @@
+#![warn(clippy::missing_errors_doc)]
+#![allow(clippy::result_unit_err)]
+#![allow(clippy::unnecessary_wraps)]
+
+use std::io;
+
+pub fn pub_fn_missing_errors_header() -> Result<(), ()> {
+ unimplemented!();
+}
+
+pub async fn async_pub_fn_missing_errors_header() -> Result<(), ()> {
+ unimplemented!();
+}
+
+/// This is not sufficiently documented.
+pub fn pub_fn_returning_io_result() -> io::Result<()> {
+ unimplemented!();
+}
+
+/// This is not sufficiently documented.
+pub async fn async_pub_fn_returning_io_result() -> io::Result<()> {
+ unimplemented!();
+}
+
+/// # Errors
+/// A description of the errors goes here.
+pub fn pub_fn_with_errors_header() -> Result<(), ()> {
+ unimplemented!();
+}
+
+/// # Errors
+/// A description of the errors goes here.
+pub async fn async_pub_fn_with_errors_header() -> Result<(), ()> {
+ unimplemented!();
+}
+
+/// This function doesn't require the documentation because it is private
+fn priv_fn_missing_errors_header() -> Result<(), ()> {
+ unimplemented!();
+}
+
+/// This function doesn't require the documentation because it is private
+async fn async_priv_fn_missing_errors_header() -> Result<(), ()> {
+ unimplemented!();
+}
+
+pub struct Struct1;
+
+impl Struct1 {
+ /// This is not sufficiently documented.
+ pub fn pub_method_missing_errors_header() -> Result<(), ()> {
+ unimplemented!();
+ }
+
+ /// This is not sufficiently documented.
+ pub async fn async_pub_method_missing_errors_header() -> Result<(), ()> {
+ unimplemented!();
+ }
+
+ /// # Errors
+ /// A description of the errors goes here.
+ pub fn pub_method_with_errors_header() -> Result<(), ()> {
+ unimplemented!();
+ }
+
+ /// # Errors
+ /// A description of the errors goes here.
+ pub async fn async_pub_method_with_errors_header() -> Result<(), ()> {
+ unimplemented!();
+ }
+
+ /// This function doesn't require the documentation because it is private.
+ fn priv_method_missing_errors_header() -> Result<(), ()> {
+ unimplemented!();
+ }
+
+ /// This function doesn't require the documentation because it is private.
+ async fn async_priv_method_missing_errors_header() -> Result<(), ()> {
+ unimplemented!();
+ }
+}
+
+pub trait Trait1 {
+ /// This is not sufficiently documented.
+ fn trait_method_missing_errors_header() -> Result<(), ()>;
+
+ /// # Errors
+ /// A description of the errors goes here.
+ fn trait_method_with_errors_header() -> Result<(), ()>;
+}
+
+impl Trait1 for Struct1 {
+ fn trait_method_missing_errors_header() -> Result<(), ()> {
+ unimplemented!();
+ }
+
+ fn trait_method_with_errors_header() -> Result<(), ()> {
+ unimplemented!();
+ }
+}
+
+fn main() -> Result<(), ()> {
+ Ok(())
+}
diff --git a/src/tools/clippy/tests/ui/doc_errors.stderr b/src/tools/clippy/tests/ui/doc_errors.stderr
new file mode 100644
index 000000000..c7b616e28
--- /dev/null
+++ b/src/tools/clippy/tests/ui/doc_errors.stderr
@@ -0,0 +1,58 @@
+error: docs for function returning `Result` missing `# Errors` section
+ --> $DIR/doc_errors.rs:7:1
+ |
+LL | / pub fn pub_fn_missing_errors_header() -> Result<(), ()> {
+LL | | unimplemented!();
+LL | | }
+ | |_^
+ |
+ = note: `-D clippy::missing-errors-doc` implied by `-D warnings`
+
+error: docs for function returning `Result` missing `# Errors` section
+ --> $DIR/doc_errors.rs:11:1
+ |
+LL | / pub async fn async_pub_fn_missing_errors_header() -> Result<(), ()> {
+LL | | unimplemented!();
+LL | | }
+ | |_^
+
+error: docs for function returning `Result` missing `# Errors` section
+ --> $DIR/doc_errors.rs:16:1
+ |
+LL | / pub fn pub_fn_returning_io_result() -> io::Result<()> {
+LL | | unimplemented!();
+LL | | }
+ | |_^
+
+error: docs for function returning `Result` missing `# Errors` section
+ --> $DIR/doc_errors.rs:21:1
+ |
+LL | / pub async fn async_pub_fn_returning_io_result() -> io::Result<()> {
+LL | | unimplemented!();
+LL | | }
+ | |_^
+
+error: docs for function returning `Result` missing `# Errors` section
+ --> $DIR/doc_errors.rs:51:5
+ |
+LL | / pub fn pub_method_missing_errors_header() -> Result<(), ()> {
+LL | | unimplemented!();
+LL | | }
+ | |_____^
+
+error: docs for function returning `Result` missing `# Errors` section
+ --> $DIR/doc_errors.rs:56:5
+ |
+LL | / pub async fn async_pub_method_missing_errors_header() -> Result<(), ()> {
+LL | | unimplemented!();
+LL | | }
+ | |_____^
+
+error: docs for function returning `Result` missing `# Errors` section
+ --> $DIR/doc_errors.rs:85:5
+ |
+LL | fn trait_method_missing_errors_header() -> Result<(), ()>;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/doc_link_with_quotes.rs b/src/tools/clippy/tests/ui/doc_link_with_quotes.rs
new file mode 100644
index 000000000..ab52fb1a4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/doc_link_with_quotes.rs
@@ -0,0 +1,12 @@
+#![warn(clippy::doc_link_with_quotes)]
+
+fn main() {
+ foo()
+}
+
+/// Calls ['bar']
+pub fn foo() {
+ bar()
+}
+
+pub fn bar() {}
diff --git a/src/tools/clippy/tests/ui/doc_link_with_quotes.stderr b/src/tools/clippy/tests/ui/doc_link_with_quotes.stderr
new file mode 100644
index 000000000..bf6d57d8a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/doc_link_with_quotes.stderr
@@ -0,0 +1,10 @@
+error: possible intra-doc link using quotes instead of backticks
+ --> $DIR/doc_link_with_quotes.rs:7:1
+ |
+LL | /// Calls ['bar']
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::doc-link-with-quotes` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/doc_unsafe.rs b/src/tools/clippy/tests/ui/doc_unsafe.rs
new file mode 100644
index 000000000..b91f7aa0d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/doc_unsafe.rs
@@ -0,0 +1,134 @@
+// aux-build:doc_unsafe_macros.rs
+
+#![allow(clippy::let_unit_value)]
+
+#[macro_use]
+extern crate doc_unsafe_macros;
+
+/// This is not sufficiently documented
+pub unsafe fn destroy_the_planet() {
+ unimplemented!();
+}
+
+/// This one is
+///
+/// # Safety
+///
+/// This function shouldn't be called unless the horsemen are ready
+pub unsafe fn apocalypse(universe: &mut ()) {
+ unimplemented!();
+}
+
+/// This is a private function, so docs aren't necessary
+unsafe fn you_dont_see_me() {
+ unimplemented!();
+}
+
+mod private_mod {
+ pub unsafe fn only_crate_wide_accessible() {
+ unimplemented!();
+ }
+
+ pub unsafe fn republished() {
+ unimplemented!();
+ }
+}
+
+pub use private_mod::republished;
+
+pub trait SafeTraitUnsafeMethods {
+ unsafe fn woefully_underdocumented(self);
+
+ /// # Safety
+ unsafe fn at_least_somewhat_documented(self);
+}
+
+pub unsafe trait UnsafeTrait {
+ fn method();
+}
+
+/// # Safety
+pub unsafe trait DocumentedUnsafeTrait {
+ fn method2();
+}
+
+pub struct Struct;
+
+impl SafeTraitUnsafeMethods for Struct {
+ unsafe fn woefully_underdocumented(self) {
+ // all is well
+ }
+
+ unsafe fn at_least_somewhat_documented(self) {
+ // all is still well
+ }
+}
+
+unsafe impl UnsafeTrait for Struct {
+ fn method() {}
+}
+
+unsafe impl DocumentedUnsafeTrait for Struct {
+ fn method2() {}
+}
+
+impl Struct {
+ pub unsafe fn more_undocumented_unsafe() -> Self {
+ unimplemented!();
+ }
+
+ /// # Safety
+ pub unsafe fn somewhat_documented(&self) {
+ unimplemented!();
+ }
+
+ unsafe fn private(&self) {
+ unimplemented!();
+ }
+}
+
+macro_rules! very_unsafe {
+ () => {
+ pub unsafe fn whee() {
+ unimplemented!()
+ }
+
+ /// # Safety
+ ///
+ /// Please keep the seat belt fastened
+ pub unsafe fn drive() {
+ whee()
+ }
+ };
+}
+
+very_unsafe!();
+
+// we don't lint code from external macros
+undocd_unsafe!();
+
+fn main() {
+ unsafe {
+ you_dont_see_me();
+ destroy_the_planet();
+ let mut universe = ();
+ apocalypse(&mut universe);
+ private_mod::only_crate_wide_accessible();
+ drive();
+ }
+}
+
+// do not lint if any parent has `#[doc(hidden)]` attribute
+// see #7347
+#[doc(hidden)]
+pub mod __macro {
+ pub struct T;
+ impl T {
+ pub unsafe fn f() {}
+ }
+}
+
+/// # Implementation safety
+pub unsafe trait DocumentedUnsafeTraitWithImplementationHeader {
+ fn method();
+}
diff --git a/src/tools/clippy/tests/ui/doc_unsafe.stderr b/src/tools/clippy/tests/ui/doc_unsafe.stderr
new file mode 100644
index 000000000..904b88eae
--- /dev/null
+++ b/src/tools/clippy/tests/ui/doc_unsafe.stderr
@@ -0,0 +1,55 @@
+error: unsafe function's docs miss `# Safety` section
+ --> $DIR/doc_unsafe.rs:9:1
+ |
+LL | / pub unsafe fn destroy_the_planet() {
+LL | | unimplemented!();
+LL | | }
+ | |_^
+ |
+ = note: `-D clippy::missing-safety-doc` implied by `-D warnings`
+
+error: unsafe function's docs miss `# Safety` section
+ --> $DIR/doc_unsafe.rs:32:5
+ |
+LL | / pub unsafe fn republished() {
+LL | | unimplemented!();
+LL | | }
+ | |_____^
+
+error: unsafe function's docs miss `# Safety` section
+ --> $DIR/doc_unsafe.rs:40:5
+ |
+LL | unsafe fn woefully_underdocumented(self);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: docs for unsafe trait missing `# Safety` section
+ --> $DIR/doc_unsafe.rs:46:1
+ |
+LL | / pub unsafe trait UnsafeTrait {
+LL | | fn method();
+LL | | }
+ | |_^
+
+error: unsafe function's docs miss `# Safety` section
+ --> $DIR/doc_unsafe.rs:76:5
+ |
+LL | / pub unsafe fn more_undocumented_unsafe() -> Self {
+LL | | unimplemented!();
+LL | | }
+ | |_____^
+
+error: unsafe function's docs miss `# Safety` section
+ --> $DIR/doc_unsafe.rs:92:9
+ |
+LL | / pub unsafe fn whee() {
+LL | | unimplemented!()
+LL | | }
+ | |_________^
+...
+LL | very_unsafe!();
+ | -------------- in this macro invocation
+ |
+ = note: this error originates in the macro `very_unsafe` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/double_comparison.fixed b/src/tools/clippy/tests/ui/double_comparison.fixed
new file mode 100644
index 000000000..bb6cdaa66
--- /dev/null
+++ b/src/tools/clippy/tests/ui/double_comparison.fixed
@@ -0,0 +1,30 @@
+// run-rustfix
+
+fn main() {
+ let x = 1;
+ let y = 2;
+ if x <= y {
+ // do something
+ }
+ if x <= y {
+ // do something
+ }
+ if x >= y {
+ // do something
+ }
+ if x >= y {
+ // do something
+ }
+ if x != y {
+ // do something
+ }
+ if x != y {
+ // do something
+ }
+ if x == y {
+ // do something
+ }
+ if x == y {
+ // do something
+ }
+}
diff --git a/src/tools/clippy/tests/ui/double_comparison.rs b/src/tools/clippy/tests/ui/double_comparison.rs
new file mode 100644
index 000000000..9a2a9068a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/double_comparison.rs
@@ -0,0 +1,30 @@
+// run-rustfix
+
+fn main() {
+ let x = 1;
+ let y = 2;
+ if x == y || x < y {
+ // do something
+ }
+ if x < y || x == y {
+ // do something
+ }
+ if x == y || x > y {
+ // do something
+ }
+ if x > y || x == y {
+ // do something
+ }
+ if x < y || x > y {
+ // do something
+ }
+ if x > y || x < y {
+ // do something
+ }
+ if x <= y && x >= y {
+ // do something
+ }
+ if x >= y && x <= y {
+ // do something
+ }
+}
diff --git a/src/tools/clippy/tests/ui/double_comparison.stderr b/src/tools/clippy/tests/ui/double_comparison.stderr
new file mode 100644
index 000000000..05ef4e25f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/double_comparison.stderr
@@ -0,0 +1,52 @@
+error: this binary expression can be simplified
+ --> $DIR/double_comparison.rs:6:8
+ |
+LL | if x == y || x < y {
+ | ^^^^^^^^^^^^^^^ help: try: `x <= y`
+ |
+ = note: `-D clippy::double-comparisons` implied by `-D warnings`
+
+error: this binary expression can be simplified
+ --> $DIR/double_comparison.rs:9:8
+ |
+LL | if x < y || x == y {
+ | ^^^^^^^^^^^^^^^ help: try: `x <= y`
+
+error: this binary expression can be simplified
+ --> $DIR/double_comparison.rs:12:8
+ |
+LL | if x == y || x > y {
+ | ^^^^^^^^^^^^^^^ help: try: `x >= y`
+
+error: this binary expression can be simplified
+ --> $DIR/double_comparison.rs:15:8
+ |
+LL | if x > y || x == y {
+ | ^^^^^^^^^^^^^^^ help: try: `x >= y`
+
+error: this binary expression can be simplified
+ --> $DIR/double_comparison.rs:18:8
+ |
+LL | if x < y || x > y {
+ | ^^^^^^^^^^^^^^ help: try: `x != y`
+
+error: this binary expression can be simplified
+ --> $DIR/double_comparison.rs:21:8
+ |
+LL | if x > y || x < y {
+ | ^^^^^^^^^^^^^^ help: try: `x != y`
+
+error: this binary expression can be simplified
+ --> $DIR/double_comparison.rs:24:8
+ |
+LL | if x <= y && x >= y {
+ | ^^^^^^^^^^^^^^^^ help: try: `x == y`
+
+error: this binary expression can be simplified
+ --> $DIR/double_comparison.rs:27:8
+ |
+LL | if x >= y && x <= y {
+ | ^^^^^^^^^^^^^^^^ help: try: `x == y`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/double_must_use.rs b/src/tools/clippy/tests/ui/double_must_use.rs
new file mode 100644
index 000000000..05e087b08
--- /dev/null
+++ b/src/tools/clippy/tests/ui/double_must_use.rs
@@ -0,0 +1,28 @@
+#![warn(clippy::double_must_use)]
+#![allow(clippy::result_unit_err)]
+
+#[must_use]
+pub fn must_use_result() -> Result<(), ()> {
+ unimplemented!();
+}
+
+#[must_use]
+pub fn must_use_tuple() -> (Result<(), ()>, u8) {
+ unimplemented!();
+}
+
+#[must_use]
+pub fn must_use_array() -> [Result<(), ()>; 1] {
+ unimplemented!();
+}
+
+#[must_use = "With note"]
+pub fn must_use_with_note() -> Result<(), ()> {
+ unimplemented!();
+}
+
+fn main() {
+ must_use_result();
+ must_use_tuple();
+ must_use_with_note();
+}
diff --git a/src/tools/clippy/tests/ui/double_must_use.stderr b/src/tools/clippy/tests/ui/double_must_use.stderr
new file mode 100644
index 000000000..8290ece1c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/double_must_use.stderr
@@ -0,0 +1,27 @@
+error: this function has an empty `#[must_use]` attribute, but returns a type already marked as `#[must_use]`
+ --> $DIR/double_must_use.rs:5:1
+ |
+LL | pub fn must_use_result() -> Result<(), ()> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::double-must-use` implied by `-D warnings`
+ = help: either add some descriptive text or remove the attribute
+
+error: this function has an empty `#[must_use]` attribute, but returns a type already marked as `#[must_use]`
+ --> $DIR/double_must_use.rs:10:1
+ |
+LL | pub fn must_use_tuple() -> (Result<(), ()>, u8) {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: either add some descriptive text or remove the attribute
+
+error: this function has an empty `#[must_use]` attribute, but returns a type already marked as `#[must_use]`
+ --> $DIR/double_must_use.rs:15:1
+ |
+LL | pub fn must_use_array() -> [Result<(), ()>; 1] {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: either add some descriptive text or remove the attribute
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/double_neg.rs b/src/tools/clippy/tests/ui/double_neg.rs
new file mode 100644
index 000000000..38a8fbd74
--- /dev/null
+++ b/src/tools/clippy/tests/ui/double_neg.rs
@@ -0,0 +1,8 @@
+#[warn(clippy::double_neg)]
+#[allow(clippy::no_effect)]
+fn main() {
+ let x = 1;
+ -x;
+ -(-x);
+ --x;
+}
diff --git a/src/tools/clippy/tests/ui/double_neg.stderr b/src/tools/clippy/tests/ui/double_neg.stderr
new file mode 100644
index 000000000..7cdb040b6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/double_neg.stderr
@@ -0,0 +1,10 @@
+error: `--x` could be misinterpreted as pre-decrement by C programmers, is usually a no-op
+ --> $DIR/double_neg.rs:7:5
+ |
+LL | --x;
+ | ^^^
+ |
+ = note: `-D clippy::double-neg` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/double_parens.rs b/src/tools/clippy/tests/ui/double_parens.rs
new file mode 100644
index 000000000..ff1dc76ab
--- /dev/null
+++ b/src/tools/clippy/tests/ui/double_parens.rs
@@ -0,0 +1,56 @@
+#![warn(clippy::double_parens)]
+#![allow(dead_code, clippy::eq_op)]
+#![feature(custom_inner_attributes)]
+#![rustfmt::skip]
+
+fn dummy_fn<T>(_: T) {}
+
+struct DummyStruct;
+
+impl DummyStruct {
+ fn dummy_method<T>(self, _: T) {}
+}
+
+fn simple_double_parens() -> i32 {
+ ((0))
+}
+
+fn fn_double_parens() {
+ dummy_fn((0));
+}
+
+fn method_double_parens(x: DummyStruct) {
+ x.dummy_method((0));
+}
+
+fn tuple_double_parens() -> (i32, i32) {
+ ((1, 2))
+}
+
+fn unit_double_parens() {
+ (())
+}
+
+fn fn_tuple_ok() {
+ dummy_fn((1, 2));
+}
+
+fn method_tuple_ok(x: DummyStruct) {
+ x.dummy_method((1, 2));
+}
+
+fn fn_unit_ok() {
+ dummy_fn(());
+}
+
+fn method_unit_ok(x: DummyStruct) {
+ x.dummy_method(());
+}
+
+// Issue #3206
+fn inside_macro() {
+ assert_eq!((1, 2), (1, 2), "Error");
+ assert_eq!(((1, 2)), (1, 2), "Error");
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/double_parens.stderr b/src/tools/clippy/tests/ui/double_parens.stderr
new file mode 100644
index 000000000..40fcad2ab
--- /dev/null
+++ b/src/tools/clippy/tests/ui/double_parens.stderr
@@ -0,0 +1,40 @@
+error: consider removing unnecessary double parentheses
+ --> $DIR/double_parens.rs:15:5
+ |
+LL | ((0))
+ | ^^^^^
+ |
+ = note: `-D clippy::double-parens` implied by `-D warnings`
+
+error: consider removing unnecessary double parentheses
+ --> $DIR/double_parens.rs:19:14
+ |
+LL | dummy_fn((0));
+ | ^^^
+
+error: consider removing unnecessary double parentheses
+ --> $DIR/double_parens.rs:23:20
+ |
+LL | x.dummy_method((0));
+ | ^^^
+
+error: consider removing unnecessary double parentheses
+ --> $DIR/double_parens.rs:27:5
+ |
+LL | ((1, 2))
+ | ^^^^^^^^
+
+error: consider removing unnecessary double parentheses
+ --> $DIR/double_parens.rs:31:5
+ |
+LL | (())
+ | ^^^^
+
+error: consider removing unnecessary double parentheses
+ --> $DIR/double_parens.rs:53:16
+ |
+LL | assert_eq!(((1, 2)), (1, 2), "Error");
+ | ^^^^^^^^
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/drop_forget_copy.rs b/src/tools/clippy/tests/ui/drop_forget_copy.rs
new file mode 100644
index 000000000..7c7a9ecff
--- /dev/null
+++ b/src/tools/clippy/tests/ui/drop_forget_copy.rs
@@ -0,0 +1,66 @@
+#![warn(clippy::drop_copy, clippy::forget_copy)]
+#![allow(clippy::toplevel_ref_arg, clippy::drop_ref, clippy::forget_ref, unused_mut)]
+
+use std::mem::{drop, forget};
+use std::vec::Vec;
+
+#[derive(Copy, Clone)]
+struct SomeStruct;
+
+struct AnotherStruct {
+ x: u8,
+ y: u8,
+ z: Vec<u8>,
+}
+
+impl Clone for AnotherStruct {
+ fn clone(&self) -> AnotherStruct {
+ AnotherStruct {
+ x: self.x,
+ y: self.y,
+ z: self.z.clone(),
+ }
+ }
+}
+
+fn main() {
+ let s1 = SomeStruct {};
+ let s2 = s1;
+ let s3 = &s1;
+ let mut s4 = s1;
+ let ref s5 = s1;
+
+ drop(s1);
+ drop(s2);
+ drop(s3);
+ drop(s4);
+ drop(s5);
+
+ forget(s1);
+ forget(s2);
+ forget(s3);
+ forget(s4);
+ forget(s5);
+
+ let a1 = AnotherStruct {
+ x: 255,
+ y: 0,
+ z: vec![1, 2, 3],
+ };
+ let a2 = &a1;
+ let mut a3 = a1.clone();
+ let ref a4 = a1;
+ let a5 = a1.clone();
+
+ drop(a2);
+ drop(a3);
+ drop(a4);
+ drop(a5);
+
+ forget(a2);
+ let a3 = &a1;
+ forget(a3);
+ forget(a4);
+ let a5 = a1.clone();
+ forget(a5);
+}
diff --git a/src/tools/clippy/tests/ui/drop_forget_copy.stderr b/src/tools/clippy/tests/ui/drop_forget_copy.stderr
new file mode 100644
index 000000000..88228afae
--- /dev/null
+++ b/src/tools/clippy/tests/ui/drop_forget_copy.stderr
@@ -0,0 +1,76 @@
+error: calls to `std::mem::drop` with a value that implements `Copy`. Dropping a copy leaves the original intact
+ --> $DIR/drop_forget_copy.rs:33:5
+ |
+LL | drop(s1);
+ | ^^^^^^^^
+ |
+ = note: `-D clippy::drop-copy` implied by `-D warnings`
+note: argument has type `SomeStruct`
+ --> $DIR/drop_forget_copy.rs:33:10
+ |
+LL | drop(s1);
+ | ^^
+
+error: calls to `std::mem::drop` with a value that implements `Copy`. Dropping a copy leaves the original intact
+ --> $DIR/drop_forget_copy.rs:34:5
+ |
+LL | drop(s2);
+ | ^^^^^^^^
+ |
+note: argument has type `SomeStruct`
+ --> $DIR/drop_forget_copy.rs:34:10
+ |
+LL | drop(s2);
+ | ^^
+
+error: calls to `std::mem::drop` with a value that implements `Copy`. Dropping a copy leaves the original intact
+ --> $DIR/drop_forget_copy.rs:36:5
+ |
+LL | drop(s4);
+ | ^^^^^^^^
+ |
+note: argument has type `SomeStruct`
+ --> $DIR/drop_forget_copy.rs:36:10
+ |
+LL | drop(s4);
+ | ^^
+
+error: calls to `std::mem::forget` with a value that implements `Copy`. Forgetting a copy leaves the original intact
+ --> $DIR/drop_forget_copy.rs:39:5
+ |
+LL | forget(s1);
+ | ^^^^^^^^^^
+ |
+ = note: `-D clippy::forget-copy` implied by `-D warnings`
+note: argument has type `SomeStruct`
+ --> $DIR/drop_forget_copy.rs:39:12
+ |
+LL | forget(s1);
+ | ^^
+
+error: calls to `std::mem::forget` with a value that implements `Copy`. Forgetting a copy leaves the original intact
+ --> $DIR/drop_forget_copy.rs:40:5
+ |
+LL | forget(s2);
+ | ^^^^^^^^^^
+ |
+note: argument has type `SomeStruct`
+ --> $DIR/drop_forget_copy.rs:40:12
+ |
+LL | forget(s2);
+ | ^^
+
+error: calls to `std::mem::forget` with a value that implements `Copy`. Forgetting a copy leaves the original intact
+ --> $DIR/drop_forget_copy.rs:42:5
+ |
+LL | forget(s4);
+ | ^^^^^^^^^^
+ |
+note: argument has type `SomeStruct`
+ --> $DIR/drop_forget_copy.rs:42:12
+ |
+LL | forget(s4);
+ | ^^
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/drop_non_drop.rs b/src/tools/clippy/tests/ui/drop_non_drop.rs
new file mode 100644
index 000000000..5a0ebde82
--- /dev/null
+++ b/src/tools/clippy/tests/ui/drop_non_drop.rs
@@ -0,0 +1,40 @@
+#![warn(clippy::drop_non_drop)]
+
+use core::mem::drop;
+
+fn make_result<T>(t: T) -> Result<T, ()> {
+ Ok(t)
+}
+
+#[must_use]
+fn must_use<T>(t: T) -> T {
+ t
+}
+
+fn drop_generic<T>(t: T) {
+ // Don't lint
+ drop(t)
+}
+
+fn main() {
+ struct Foo;
+ // Lint
+ drop(Foo);
+ // Don't lint
+ drop(make_result(Foo));
+ // Don't lint
+ drop(must_use(Foo));
+
+ struct Bar;
+ impl Drop for Bar {
+ fn drop(&mut self) {}
+ }
+ // Don't lint
+ drop(Bar);
+
+ struct Baz<T>(T);
+ // Lint
+ drop(Baz(Foo));
+ // Don't lint
+ drop(Baz(Bar));
+}
diff --git a/src/tools/clippy/tests/ui/drop_non_drop.stderr b/src/tools/clippy/tests/ui/drop_non_drop.stderr
new file mode 100644
index 000000000..30121033d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/drop_non_drop.stderr
@@ -0,0 +1,27 @@
+error: call to `std::mem::drop` with a value that does not implement `Drop`. Dropping such a type only extends its contained lifetimes
+ --> $DIR/drop_non_drop.rs:22:5
+ |
+LL | drop(Foo);
+ | ^^^^^^^^^
+ |
+ = note: `-D clippy::drop-non-drop` implied by `-D warnings`
+note: argument has type `main::Foo`
+ --> $DIR/drop_non_drop.rs:22:10
+ |
+LL | drop(Foo);
+ | ^^^
+
+error: call to `std::mem::drop` with a value that does not implement `Drop`. Dropping such a type only extends its contained lifetimes
+ --> $DIR/drop_non_drop.rs:37:5
+ |
+LL | drop(Baz(Foo));
+ | ^^^^^^^^^^^^^^
+ |
+note: argument has type `main::Baz<main::Foo>`
+ --> $DIR/drop_non_drop.rs:37:10
+ |
+LL | drop(Baz(Foo));
+ | ^^^^^^^^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/drop_ref.rs b/src/tools/clippy/tests/ui/drop_ref.rs
new file mode 100644
index 000000000..7de0b0bbd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/drop_ref.rs
@@ -0,0 +1,74 @@
+#![warn(clippy::drop_ref)]
+#![allow(clippy::toplevel_ref_arg)]
+#![allow(clippy::map_err_ignore)]
+#![allow(clippy::unnecessary_wraps, clippy::drop_non_drop)]
+
+use std::mem::drop;
+
+struct SomeStruct;
+
+fn main() {
+ drop(&SomeStruct);
+
+ let mut owned1 = SomeStruct;
+ drop(&owned1);
+ drop(&&owned1);
+ drop(&mut owned1);
+ drop(owned1); //OK
+
+ let reference1 = &SomeStruct;
+ drop(reference1);
+
+ let reference2 = &mut SomeStruct;
+ drop(reference2);
+
+ let ref reference3 = SomeStruct;
+ drop(reference3);
+}
+
+#[allow(dead_code)]
+fn test_generic_fn_drop<T>(val: T) {
+ drop(&val);
+ drop(val); //OK
+}
+
+#[allow(dead_code)]
+fn test_similarly_named_function() {
+ fn drop<T>(_val: T) {}
+ drop(&SomeStruct); //OK; call to unrelated function which happens to have the same name
+ std::mem::drop(&SomeStruct);
+}
+
+#[derive(Copy, Clone)]
+pub struct Error;
+fn produce_half_owl_error() -> Result<(), Error> {
+ Ok(())
+}
+
+fn produce_half_owl_ok() -> Result<bool, ()> {
+ Ok(true)
+}
+
+#[allow(dead_code)]
+fn test_owl_result() -> Result<(), ()> {
+ produce_half_owl_error().map_err(|_| ())?;
+ produce_half_owl_ok().map(|_| ())?;
+ // the following should not be linted,
+ // we should not force users to use toilet closures
+ // to produce owl results when drop is more convenient
+ produce_half_owl_error().map_err(drop)?;
+ produce_half_owl_ok().map_err(drop)?;
+ Ok(())
+}
+
+#[allow(dead_code)]
+fn test_owl_result_2() -> Result<u8, ()> {
+ produce_half_owl_error().map_err(|_| ())?;
+ produce_half_owl_ok().map(|_| ())?;
+ // the following should not be linted,
+ // we should not force users to use toilet closures
+ // to produce owl results when drop is more convenient
+ produce_half_owl_error().map_err(drop)?;
+ produce_half_owl_ok().map(drop)?;
+ Ok(1)
+}
diff --git a/src/tools/clippy/tests/ui/drop_ref.stderr b/src/tools/clippy/tests/ui/drop_ref.stderr
new file mode 100644
index 000000000..531849f06
--- /dev/null
+++ b/src/tools/clippy/tests/ui/drop_ref.stderr
@@ -0,0 +1,111 @@
+error: calls to `std::mem::drop` with a reference instead of an owned value. Dropping a reference does nothing
+ --> $DIR/drop_ref.rs:11:5
+ |
+LL | drop(&SomeStruct);
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::drop-ref` implied by `-D warnings`
+note: argument has type `&SomeStruct`
+ --> $DIR/drop_ref.rs:11:10
+ |
+LL | drop(&SomeStruct);
+ | ^^^^^^^^^^^
+
+error: calls to `std::mem::drop` with a reference instead of an owned value. Dropping a reference does nothing
+ --> $DIR/drop_ref.rs:14:5
+ |
+LL | drop(&owned1);
+ | ^^^^^^^^^^^^^
+ |
+note: argument has type `&SomeStruct`
+ --> $DIR/drop_ref.rs:14:10
+ |
+LL | drop(&owned1);
+ | ^^^^^^^
+
+error: calls to `std::mem::drop` with a reference instead of an owned value. Dropping a reference does nothing
+ --> $DIR/drop_ref.rs:15:5
+ |
+LL | drop(&&owned1);
+ | ^^^^^^^^^^^^^^
+ |
+note: argument has type `&&SomeStruct`
+ --> $DIR/drop_ref.rs:15:10
+ |
+LL | drop(&&owned1);
+ | ^^^^^^^^
+
+error: calls to `std::mem::drop` with a reference instead of an owned value. Dropping a reference does nothing
+ --> $DIR/drop_ref.rs:16:5
+ |
+LL | drop(&mut owned1);
+ | ^^^^^^^^^^^^^^^^^
+ |
+note: argument has type `&mut SomeStruct`
+ --> $DIR/drop_ref.rs:16:10
+ |
+LL | drop(&mut owned1);
+ | ^^^^^^^^^^^
+
+error: calls to `std::mem::drop` with a reference instead of an owned value. Dropping a reference does nothing
+ --> $DIR/drop_ref.rs:20:5
+ |
+LL | drop(reference1);
+ | ^^^^^^^^^^^^^^^^
+ |
+note: argument has type `&SomeStruct`
+ --> $DIR/drop_ref.rs:20:10
+ |
+LL | drop(reference1);
+ | ^^^^^^^^^^
+
+error: calls to `std::mem::drop` with a reference instead of an owned value. Dropping a reference does nothing
+ --> $DIR/drop_ref.rs:23:5
+ |
+LL | drop(reference2);
+ | ^^^^^^^^^^^^^^^^
+ |
+note: argument has type `&mut SomeStruct`
+ --> $DIR/drop_ref.rs:23:10
+ |
+LL | drop(reference2);
+ | ^^^^^^^^^^
+
+error: calls to `std::mem::drop` with a reference instead of an owned value. Dropping a reference does nothing
+ --> $DIR/drop_ref.rs:26:5
+ |
+LL | drop(reference3);
+ | ^^^^^^^^^^^^^^^^
+ |
+note: argument has type `&SomeStruct`
+ --> $DIR/drop_ref.rs:26:10
+ |
+LL | drop(reference3);
+ | ^^^^^^^^^^
+
+error: calls to `std::mem::drop` with a reference instead of an owned value. Dropping a reference does nothing
+ --> $DIR/drop_ref.rs:31:5
+ |
+LL | drop(&val);
+ | ^^^^^^^^^^
+ |
+note: argument has type `&T`
+ --> $DIR/drop_ref.rs:31:10
+ |
+LL | drop(&val);
+ | ^^^^
+
+error: calls to `std::mem::drop` with a reference instead of an owned value. Dropping a reference does nothing
+ --> $DIR/drop_ref.rs:39:5
+ |
+LL | std::mem::drop(&SomeStruct);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: argument has type `&SomeStruct`
+ --> $DIR/drop_ref.rs:39:20
+ |
+LL | std::mem::drop(&SomeStruct);
+ | ^^^^^^^^^^^
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/duplicate_underscore_argument.rs b/src/tools/clippy/tests/ui/duplicate_underscore_argument.rs
new file mode 100644
index 000000000..54d748c7c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/duplicate_underscore_argument.rs
@@ -0,0 +1,10 @@
+#![warn(clippy::duplicate_underscore_argument)]
+#[allow(dead_code, unused)]
+
+fn join_the_dark_side(darth: i32, _darth: i32) {}
+fn join_the_light_side(knight: i32, _master: i32) {} // the Force is strong with this one
+
+fn main() {
+ join_the_dark_side(0, 0);
+ join_the_light_side(0, 0);
+}
diff --git a/src/tools/clippy/tests/ui/duplicate_underscore_argument.stderr b/src/tools/clippy/tests/ui/duplicate_underscore_argument.stderr
new file mode 100644
index 000000000..f71614a5f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/duplicate_underscore_argument.stderr
@@ -0,0 +1,10 @@
+error: `darth` already exists, having another argument having almost the same name makes code comprehension and documentation more difficult
+ --> $DIR/duplicate_underscore_argument.rs:4:23
+ |
+LL | fn join_the_dark_side(darth: i32, _darth: i32) {}
+ | ^^^^^
+ |
+ = note: `-D clippy::duplicate-underscore-argument` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/duration_subsec.fixed b/src/tools/clippy/tests/ui/duration_subsec.fixed
new file mode 100644
index 000000000..d92b8998e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/duration_subsec.fixed
@@ -0,0 +1,29 @@
+// run-rustfix
+#![allow(dead_code, clippy::needless_borrow)]
+#![warn(clippy::duration_subsec)]
+
+use std::time::Duration;
+
+fn main() {
+ let dur = Duration::new(5, 0);
+
+ let bad_millis_1 = dur.subsec_millis();
+ let bad_millis_2 = dur.subsec_millis();
+ let good_millis = dur.subsec_millis();
+ assert_eq!(bad_millis_1, good_millis);
+ assert_eq!(bad_millis_2, good_millis);
+
+ let bad_micros = dur.subsec_micros();
+ let good_micros = dur.subsec_micros();
+ assert_eq!(bad_micros, good_micros);
+
+ // Handle refs
+ let _ = (&dur).subsec_micros();
+
+ // Handle constants
+ const NANOS_IN_MICRO: u32 = 1_000;
+ let _ = dur.subsec_micros();
+
+ // Other literals aren't linted
+ let _ = dur.subsec_nanos() / 699;
+}
diff --git a/src/tools/clippy/tests/ui/duration_subsec.rs b/src/tools/clippy/tests/ui/duration_subsec.rs
new file mode 100644
index 000000000..08da80499
--- /dev/null
+++ b/src/tools/clippy/tests/ui/duration_subsec.rs
@@ -0,0 +1,29 @@
+// run-rustfix
+#![allow(dead_code, clippy::needless_borrow)]
+#![warn(clippy::duration_subsec)]
+
+use std::time::Duration;
+
+fn main() {
+ let dur = Duration::new(5, 0);
+
+ let bad_millis_1 = dur.subsec_micros() / 1_000;
+ let bad_millis_2 = dur.subsec_nanos() / 1_000_000;
+ let good_millis = dur.subsec_millis();
+ assert_eq!(bad_millis_1, good_millis);
+ assert_eq!(bad_millis_2, good_millis);
+
+ let bad_micros = dur.subsec_nanos() / 1_000;
+ let good_micros = dur.subsec_micros();
+ assert_eq!(bad_micros, good_micros);
+
+ // Handle refs
+ let _ = (&dur).subsec_nanos() / 1_000;
+
+ // Handle constants
+ const NANOS_IN_MICRO: u32 = 1_000;
+ let _ = dur.subsec_nanos() / NANOS_IN_MICRO;
+
+ // Other literals aren't linted
+ let _ = dur.subsec_nanos() / 699;
+}
diff --git a/src/tools/clippy/tests/ui/duration_subsec.stderr b/src/tools/clippy/tests/ui/duration_subsec.stderr
new file mode 100644
index 000000000..cdbeff6a0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/duration_subsec.stderr
@@ -0,0 +1,34 @@
+error: calling `subsec_millis()` is more concise than this calculation
+ --> $DIR/duration_subsec.rs:10:24
+ |
+LL | let bad_millis_1 = dur.subsec_micros() / 1_000;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `dur.subsec_millis()`
+ |
+ = note: `-D clippy::duration-subsec` implied by `-D warnings`
+
+error: calling `subsec_millis()` is more concise than this calculation
+ --> $DIR/duration_subsec.rs:11:24
+ |
+LL | let bad_millis_2 = dur.subsec_nanos() / 1_000_000;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `dur.subsec_millis()`
+
+error: calling `subsec_micros()` is more concise than this calculation
+ --> $DIR/duration_subsec.rs:16:22
+ |
+LL | let bad_micros = dur.subsec_nanos() / 1_000;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `dur.subsec_micros()`
+
+error: calling `subsec_micros()` is more concise than this calculation
+ --> $DIR/duration_subsec.rs:21:13
+ |
+LL | let _ = (&dur).subsec_nanos() / 1_000;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(&dur).subsec_micros()`
+
+error: calling `subsec_micros()` is more concise than this calculation
+ --> $DIR/duration_subsec.rs:25:13
+ |
+LL | let _ = dur.subsec_nanos() / NANOS_IN_MICRO;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `dur.subsec_micros()`
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/else_if_without_else.rs b/src/tools/clippy/tests/ui/else_if_without_else.rs
new file mode 100644
index 000000000..879b3ac39
--- /dev/null
+++ b/src/tools/clippy/tests/ui/else_if_without_else.rs
@@ -0,0 +1,58 @@
+#![warn(clippy::all)]
+#![warn(clippy::else_if_without_else)]
+
+fn bla1() -> bool {
+ unimplemented!()
+}
+fn bla2() -> bool {
+ unimplemented!()
+}
+fn bla3() -> bool {
+ unimplemented!()
+}
+
+fn main() {
+ if bla1() {
+ println!("if");
+ }
+
+ if bla1() {
+ println!("if");
+ } else {
+ println!("else");
+ }
+
+ if bla1() {
+ println!("if");
+ } else if bla2() {
+ println!("else if");
+ } else {
+ println!("else")
+ }
+
+ if bla1() {
+ println!("if");
+ } else if bla2() {
+ println!("else if 1");
+ } else if bla3() {
+ println!("else if 2");
+ } else {
+ println!("else")
+ }
+
+ if bla1() {
+ println!("if");
+ } else if bla2() {
+ //~ ERROR else if without else
+ println!("else if");
+ }
+
+ if bla1() {
+ println!("if");
+ } else if bla2() {
+ println!("else if 1");
+ } else if bla3() {
+ //~ ERROR else if without else
+ println!("else if 2");
+ }
+}
diff --git a/src/tools/clippy/tests/ui/else_if_without_else.stderr b/src/tools/clippy/tests/ui/else_if_without_else.stderr
new file mode 100644
index 000000000..6f47658cf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/else_if_without_else.stderr
@@ -0,0 +1,27 @@
+error: `if` expression with an `else if`, but without a final `else`
+ --> $DIR/else_if_without_else.rs:45:12
+ |
+LL | } else if bla2() {
+ | ____________^
+LL | | //~ ERROR else if without else
+LL | | println!("else if");
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::else-if-without-else` implied by `-D warnings`
+ = help: add an `else` block here
+
+error: `if` expression with an `else if`, but without a final `else`
+ --> $DIR/else_if_without_else.rs:54:12
+ |
+LL | } else if bla3() {
+ | ____________^
+LL | | //~ ERROR else if without else
+LL | | println!("else if 2");
+LL | | }
+ | |_____^
+ |
+ = help: add an `else` block here
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/empty_drop.fixed b/src/tools/clippy/tests/ui/empty_drop.fixed
new file mode 100644
index 000000000..2e1b76846
--- /dev/null
+++ b/src/tools/clippy/tests/ui/empty_drop.fixed
@@ -0,0 +1,24 @@
+// run-rustfix
+#![warn(clippy::empty_drop)]
+#![allow(unused)]
+
+// should cause an error
+struct Foo;
+
+
+
+// shouldn't cause an error
+struct Bar;
+
+impl Drop for Bar {
+ fn drop(&mut self) {
+ println!("dropping bar!");
+ }
+}
+
+// should error
+struct Baz;
+
+
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/empty_drop.rs b/src/tools/clippy/tests/ui/empty_drop.rs
new file mode 100644
index 000000000..75232b033
--- /dev/null
+++ b/src/tools/clippy/tests/ui/empty_drop.rs
@@ -0,0 +1,30 @@
+// run-rustfix
+#![warn(clippy::empty_drop)]
+#![allow(unused)]
+
+// should cause an error
+struct Foo;
+
+impl Drop for Foo {
+ fn drop(&mut self) {}
+}
+
+// shouldn't cause an error
+struct Bar;
+
+impl Drop for Bar {
+ fn drop(&mut self) {
+ println!("dropping bar!");
+ }
+}
+
+// should error
+struct Baz;
+
+impl Drop for Baz {
+ fn drop(&mut self) {
+ {}
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/empty_drop.stderr b/src/tools/clippy/tests/ui/empty_drop.stderr
new file mode 100644
index 000000000..70f7880d0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/empty_drop.stderr
@@ -0,0 +1,22 @@
+error: empty drop implementation
+ --> $DIR/empty_drop.rs:8:1
+ |
+LL | / impl Drop for Foo {
+LL | | fn drop(&mut self) {}
+LL | | }
+ | |_^ help: try removing this impl
+ |
+ = note: `-D clippy::empty-drop` implied by `-D warnings`
+
+error: empty drop implementation
+ --> $DIR/empty_drop.rs:24:1
+ |
+LL | / impl Drop for Baz {
+LL | | fn drop(&mut self) {
+LL | | {}
+LL | | }
+LL | | }
+ | |_^ help: try removing this impl
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/empty_enum.rs b/src/tools/clippy/tests/ui/empty_enum.rs
new file mode 100644
index 000000000..a2e5c13c4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/empty_enum.rs
@@ -0,0 +1,7 @@
+#![allow(dead_code)]
+#![warn(clippy::empty_enum)]
+// Enable never type to test empty enum lint
+#![feature(never_type)]
+enum Empty {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/empty_enum.stderr b/src/tools/clippy/tests/ui/empty_enum.stderr
new file mode 100644
index 000000000..7125e5f60
--- /dev/null
+++ b/src/tools/clippy/tests/ui/empty_enum.stderr
@@ -0,0 +1,11 @@
+error: enum with no variants
+ --> $DIR/empty_enum.rs:5:1
+ |
+LL | enum Empty {}
+ | ^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::empty-enum` implied by `-D warnings`
+ = help: consider using the uninhabited type `!` (never type) or a wrapper around it to introduce a type which can't be instantiated
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/empty_enum_without_never_type.rs b/src/tools/clippy/tests/ui/empty_enum_without_never_type.rs
new file mode 100644
index 000000000..386677352
--- /dev/null
+++ b/src/tools/clippy/tests/ui/empty_enum_without_never_type.rs
@@ -0,0 +1,7 @@
+#![allow(dead_code)]
+#![warn(clippy::empty_enum)]
+
+// `never_type` is not enabled; this test has no stderr file
+enum Empty {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/empty_line_after_outer_attribute.rs b/src/tools/clippy/tests/ui/empty_line_after_outer_attribute.rs
new file mode 100644
index 000000000..697412c00
--- /dev/null
+++ b/src/tools/clippy/tests/ui/empty_line_after_outer_attribute.rs
@@ -0,0 +1,120 @@
+// aux-build:proc_macro_attr.rs
+#![warn(clippy::empty_line_after_outer_attr)]
+#![allow(clippy::assertions_on_constants)]
+#![feature(custom_inner_attributes)]
+#![rustfmt::skip]
+
+#[macro_use]
+extern crate proc_macro_attr;
+
+// This should produce a warning
+#[crate_type = "lib"]
+
+/// some comment
+fn with_one_newline_and_comment() { assert!(true) }
+
+// This should not produce a warning
+#[crate_type = "lib"]
+/// some comment
+fn with_no_newline_and_comment() { assert!(true) }
+
+
+// This should produce a warning
+#[crate_type = "lib"]
+
+fn with_one_newline() { assert!(true) }
+
+// This should produce a warning, too
+#[crate_type = "lib"]
+
+
+fn with_two_newlines() { assert!(true) }
+
+
+// This should produce a warning
+#[crate_type = "lib"]
+
+enum Baz {
+ One,
+ Two
+}
+
+// This should produce a warning
+#[crate_type = "lib"]
+
+struct Foo {
+ one: isize,
+ two: isize
+}
+
+// This should produce a warning
+#[crate_type = "lib"]
+
+mod foo {
+}
+
+/// This doc comment should not produce a warning
+
+/** This is also a doc comment and should not produce a warning
+ */
+
+// This should not produce a warning
+#[allow(non_camel_case_types)]
+#[allow(missing_docs)]
+#[allow(missing_docs)]
+fn three_attributes() { assert!(true) }
+
+// This should not produce a warning
+#[doc = "
+Returns the escaped value of the textual representation of
+
+"]
+pub fn function() -> bool {
+ true
+}
+
+// This should not produce a warning
+#[derive(Clone, Copy)]
+pub enum FooFighter {
+ Bar1,
+
+ Bar2,
+
+ Bar3,
+
+ Bar4
+}
+
+// This should not produce a warning because the empty line is inside a block comment
+#[crate_type = "lib"]
+/*
+
+*/
+pub struct S;
+
+// This should not produce a warning
+#[crate_type = "lib"]
+/* test */
+pub struct T;
+
+// This should not produce a warning
+// See https://github.com/rust-lang/rust-clippy/issues/5567
+#[fake_async_trait]
+pub trait Bazz {
+ fn foo() -> Vec<u8> {
+ let _i = "";
+
+
+
+ vec![]
+ }
+}
+
+#[derive(Clone, Copy)]
+#[dummy(string = "first line
+
+second line
+")]
+pub struct Args;
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/empty_line_after_outer_attribute.stderr b/src/tools/clippy/tests/ui/empty_line_after_outer_attribute.stderr
new file mode 100644
index 000000000..594fca44a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/empty_line_after_outer_attribute.stderr
@@ -0,0 +1,54 @@
+error: found an empty line after an outer attribute. Perhaps you forgot to add a `!` to make it an inner attribute?
+ --> $DIR/empty_line_after_outer_attribute.rs:11:1
+ |
+LL | / #[crate_type = "lib"]
+LL | |
+LL | | /// some comment
+LL | | fn with_one_newline_and_comment() { assert!(true) }
+ | |_
+ |
+ = note: `-D clippy::empty-line-after-outer-attr` implied by `-D warnings`
+
+error: found an empty line after an outer attribute. Perhaps you forgot to add a `!` to make it an inner attribute?
+ --> $DIR/empty_line_after_outer_attribute.rs:23:1
+ |
+LL | / #[crate_type = "lib"]
+LL | |
+LL | | fn with_one_newline() { assert!(true) }
+ | |_
+
+error: found an empty line after an outer attribute. Perhaps you forgot to add a `!` to make it an inner attribute?
+ --> $DIR/empty_line_after_outer_attribute.rs:28:1
+ |
+LL | / #[crate_type = "lib"]
+LL | |
+LL | |
+LL | | fn with_two_newlines() { assert!(true) }
+ | |_
+
+error: found an empty line after an outer attribute. Perhaps you forgot to add a `!` to make it an inner attribute?
+ --> $DIR/empty_line_after_outer_attribute.rs:35:1
+ |
+LL | / #[crate_type = "lib"]
+LL | |
+LL | | enum Baz {
+ | |_
+
+error: found an empty line after an outer attribute. Perhaps you forgot to add a `!` to make it an inner attribute?
+ --> $DIR/empty_line_after_outer_attribute.rs:43:1
+ |
+LL | / #[crate_type = "lib"]
+LL | |
+LL | | struct Foo {
+ | |_
+
+error: found an empty line after an outer attribute. Perhaps you forgot to add a `!` to make it an inner attribute?
+ --> $DIR/empty_line_after_outer_attribute.rs:51:1
+ |
+LL | / #[crate_type = "lib"]
+LL | |
+LL | | mod foo {
+ | |_
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/empty_loop.rs b/src/tools/clippy/tests/ui/empty_loop.rs
new file mode 100644
index 000000000..8fd7697eb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/empty_loop.rs
@@ -0,0 +1,51 @@
+// aux-build:macro_rules.rs
+
+#![warn(clippy::empty_loop)]
+
+#[macro_use]
+extern crate macro_rules;
+
+fn should_trigger() {
+ loop {}
+ loop {
+ loop {}
+ }
+
+ 'outer: loop {
+ 'inner: loop {}
+ }
+}
+
+fn should_not_trigger() {
+ loop {
+ panic!("This is fine")
+ }
+ let ten_millis = std::time::Duration::from_millis(10);
+ loop {
+ std::thread::sleep(ten_millis)
+ }
+
+ #[allow(clippy::never_loop)]
+ 'outer: loop {
+ 'inner: loop {
+ break 'inner;
+ }
+ break 'outer;
+ }
+
+ // Make sure `allow` works for this lint
+ #[allow(clippy::empty_loop)]
+ loop {}
+
+ // We don't lint loops inside macros
+ macro_rules! foo {
+ () => {
+ loop {}
+ };
+ }
+
+ // We don't lint external macros
+ foofoo!()
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/empty_loop.stderr b/src/tools/clippy/tests/ui/empty_loop.stderr
new file mode 100644
index 000000000..555f3d3d8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/empty_loop.stderr
@@ -0,0 +1,27 @@
+error: empty `loop {}` wastes CPU cycles
+ --> $DIR/empty_loop.rs:9:5
+ |
+LL | loop {}
+ | ^^^^^^^
+ |
+ = note: `-D clippy::empty-loop` implied by `-D warnings`
+ = help: you should either use `panic!()` or add `std::thread::sleep(..);` to the loop body
+
+error: empty `loop {}` wastes CPU cycles
+ --> $DIR/empty_loop.rs:11:9
+ |
+LL | loop {}
+ | ^^^^^^^
+ |
+ = help: you should either use `panic!()` or add `std::thread::sleep(..);` to the loop body
+
+error: empty `loop {}` wastes CPU cycles
+ --> $DIR/empty_loop.rs:15:9
+ |
+LL | 'inner: loop {}
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: you should either use `panic!()` or add `std::thread::sleep(..);` to the loop body
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/empty_loop_no_std.rs b/src/tools/clippy/tests/ui/empty_loop_no_std.rs
new file mode 100644
index 000000000..235e0fc51
--- /dev/null
+++ b/src/tools/clippy/tests/ui/empty_loop_no_std.rs
@@ -0,0 +1,27 @@
+// compile-flags: -Clink-arg=-nostartfiles
+// ignore-macos
+// ignore-windows
+
+#![warn(clippy::empty_loop)]
+#![feature(lang_items, start, libc)]
+#![no_std]
+
+use core::panic::PanicInfo;
+
+#[start]
+fn main(argc: isize, argv: *const *const u8) -> isize {
+ // This should trigger the lint
+ loop {}
+}
+
+#[panic_handler]
+fn panic(_info: &PanicInfo) -> ! {
+ // This should NOT trigger the lint
+ loop {}
+}
+
+#[lang = "eh_personality"]
+extern "C" fn eh_personality() {
+ // This should also trigger the lint
+ loop {}
+}
diff --git a/src/tools/clippy/tests/ui/empty_loop_no_std.stderr b/src/tools/clippy/tests/ui/empty_loop_no_std.stderr
new file mode 100644
index 000000000..520248fcb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/empty_loop_no_std.stderr
@@ -0,0 +1,19 @@
+error: empty `loop {}` wastes CPU cycles
+ --> $DIR/empty_loop_no_std.rs:14:5
+ |
+LL | loop {}
+ | ^^^^^^^
+ |
+ = note: `-D clippy::empty-loop` implied by `-D warnings`
+ = help: you should either use `panic!()` or add a call pausing or sleeping the thread to the loop body
+
+error: empty `loop {}` wastes CPU cycles
+ --> $DIR/empty_loop_no_std.rs:26:5
+ |
+LL | loop {}
+ | ^^^^^^^
+ |
+ = help: you should either use `panic!()` or add a call pausing or sleeping the thread to the loop body
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/empty_structs_with_brackets.fixed b/src/tools/clippy/tests/ui/empty_structs_with_brackets.fixed
new file mode 100644
index 000000000..80f07603b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/empty_structs_with_brackets.fixed
@@ -0,0 +1,25 @@
+// run-rustfix
+#![warn(clippy::empty_structs_with_brackets)]
+#![allow(dead_code)]
+
+pub struct MyEmptyStruct; // should trigger lint
+struct MyEmptyTupleStruct; // should trigger lint
+
+// should not trigger lint
+struct MyCfgStruct {
+ #[cfg(feature = "thisisneverenabled")]
+ field: u8,
+}
+
+// should not trigger lint
+struct MyCfgTupleStruct(#[cfg(feature = "thisisneverenabled")] u8);
+
+// should not trigger lint
+struct MyStruct {
+ field: u8,
+}
+struct MyTupleStruct(usize, String); // should not trigger lint
+struct MySingleTupleStruct(usize); // should not trigger lint
+struct MyUnitLikeStruct; // should not trigger lint
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/empty_structs_with_brackets.rs b/src/tools/clippy/tests/ui/empty_structs_with_brackets.rs
new file mode 100644
index 000000000..1d1ed4c76
--- /dev/null
+++ b/src/tools/clippy/tests/ui/empty_structs_with_brackets.rs
@@ -0,0 +1,25 @@
+// run-rustfix
+#![warn(clippy::empty_structs_with_brackets)]
+#![allow(dead_code)]
+
+pub struct MyEmptyStruct {} // should trigger lint
+struct MyEmptyTupleStruct(); // should trigger lint
+
+// should not trigger lint
+struct MyCfgStruct {
+ #[cfg(feature = "thisisneverenabled")]
+ field: u8,
+}
+
+// should not trigger lint
+struct MyCfgTupleStruct(#[cfg(feature = "thisisneverenabled")] u8);
+
+// should not trigger lint
+struct MyStruct {
+ field: u8,
+}
+struct MyTupleStruct(usize, String); // should not trigger lint
+struct MySingleTupleStruct(usize); // should not trigger lint
+struct MyUnitLikeStruct; // should not trigger lint
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/empty_structs_with_brackets.stderr b/src/tools/clippy/tests/ui/empty_structs_with_brackets.stderr
new file mode 100644
index 000000000..0308cb557
--- /dev/null
+++ b/src/tools/clippy/tests/ui/empty_structs_with_brackets.stderr
@@ -0,0 +1,19 @@
+error: found empty brackets on struct declaration
+ --> $DIR/empty_structs_with_brackets.rs:5:25
+ |
+LL | pub struct MyEmptyStruct {} // should trigger lint
+ | ^^^
+ |
+ = note: `-D clippy::empty-structs-with-brackets` implied by `-D warnings`
+ = help: remove the brackets
+
+error: found empty brackets on struct declaration
+ --> $DIR/empty_structs_with_brackets.rs:6:26
+ |
+LL | struct MyEmptyTupleStruct(); // should trigger lint
+ | ^^^
+ |
+ = help: remove the brackets
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/entry.fixed b/src/tools/clippy/tests/ui/entry.fixed
new file mode 100644
index 000000000..e43635abc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/entry.fixed
@@ -0,0 +1,154 @@
+// run-rustfix
+
+#![allow(unused, clippy::needless_pass_by_value, clippy::collapsible_if)]
+#![warn(clippy::map_entry)]
+
+use std::arch::asm;
+use std::collections::HashMap;
+use std::hash::Hash;
+
+macro_rules! m {
+ ($e:expr) => {{ $e }};
+}
+
+macro_rules! insert {
+ ($map:expr, $key:expr, $val:expr) => {
+ $map.insert($key, $val)
+ };
+}
+
+fn foo() {}
+
+fn hash_map<K: Eq + Hash + Copy, V: Copy>(m: &mut HashMap<K, V>, m2: &mut HashMap<K, V>, k: K, k2: K, v: V, v2: V) {
+ // or_insert(v)
+ m.entry(k).or_insert(v);
+
+ // semicolon on insert, use or_insert_with(..)
+ m.entry(k).or_insert_with(|| {
+ if true {
+ v
+ } else {
+ v2
+ }
+ });
+
+ // semicolon on if, use or_insert_with(..)
+ m.entry(k).or_insert_with(|| {
+ if true {
+ v
+ } else {
+ v2
+ }
+ });
+
+ // early return, use if let
+ if let std::collections::hash_map::Entry::Vacant(e) = m.entry(k) {
+ if true {
+ e.insert(v);
+ } else {
+ e.insert(v2);
+ return;
+ }
+ }
+
+ // use or_insert_with(..)
+ m.entry(k).or_insert_with(|| {
+ foo();
+ v
+ });
+
+ // semicolon on insert and match, use or_insert_with(..)
+ m.entry(k).or_insert_with(|| {
+ match 0 {
+ 1 if true => {
+ v
+ },
+ _ => {
+ v2
+ },
+ }
+ });
+
+ // one branch doesn't insert, use if let
+ if let std::collections::hash_map::Entry::Vacant(e) = m.entry(k) {
+ match 0 {
+ 0 => foo(),
+ _ => {
+ e.insert(v2);
+ },
+ };
+ }
+
+ // use or_insert_with
+ m.entry(k).or_insert_with(|| {
+ foo();
+ match 0 {
+ 0 if false => {
+ v
+ },
+ 1 => {
+ foo();
+ v
+ },
+ 2 | 3 => {
+ for _ in 0..2 {
+ foo();
+ }
+ if true {
+ v
+ } else {
+ v2
+ }
+ },
+ _ => {
+ v2
+ },
+ }
+ });
+
+ // ok, insert in loop
+ if !m.contains_key(&k) {
+ for _ in 0..2 {
+ m.insert(k, v);
+ }
+ }
+
+ // macro_expansion test, use or_insert(..)
+ m.entry(m!(k)).or_insert_with(|| m!(v));
+
+ // ok, map used before insertion
+ if !m.contains_key(&k) {
+ let _ = m.len();
+ m.insert(k, v);
+ }
+
+ // ok, inline asm
+ if !m.contains_key(&k) {
+ unsafe { asm!("nop") }
+ m.insert(k, v);
+ }
+
+ // ok, different keys.
+ if !m.contains_key(&k) {
+ m.insert(k2, v);
+ }
+
+ // ok, different maps
+ if !m.contains_key(&k) {
+ m2.insert(k, v);
+ }
+
+ // ok, insert in macro
+ if !m.contains_key(&k) {
+ insert!(m, k, v);
+ }
+
+ // or_insert_with. Partial move of a local declared in the closure is ok.
+ m.entry(k).or_insert_with(|| {
+ let x = (String::new(), String::new());
+ let _ = x.0;
+ v
+ });
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/entry.rs b/src/tools/clippy/tests/ui/entry.rs
new file mode 100644
index 000000000..d999b3b7d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/entry.rs
@@ -0,0 +1,158 @@
+// run-rustfix
+
+#![allow(unused, clippy::needless_pass_by_value, clippy::collapsible_if)]
+#![warn(clippy::map_entry)]
+
+use std::arch::asm;
+use std::collections::HashMap;
+use std::hash::Hash;
+
+macro_rules! m {
+ ($e:expr) => {{ $e }};
+}
+
+macro_rules! insert {
+ ($map:expr, $key:expr, $val:expr) => {
+ $map.insert($key, $val)
+ };
+}
+
+fn foo() {}
+
+fn hash_map<K: Eq + Hash + Copy, V: Copy>(m: &mut HashMap<K, V>, m2: &mut HashMap<K, V>, k: K, k2: K, v: V, v2: V) {
+ // or_insert(v)
+ if !m.contains_key(&k) {
+ m.insert(k, v);
+ }
+
+ // semicolon on insert, use or_insert_with(..)
+ if !m.contains_key(&k) {
+ if true {
+ m.insert(k, v);
+ } else {
+ m.insert(k, v2);
+ }
+ }
+
+ // semicolon on if, use or_insert_with(..)
+ if !m.contains_key(&k) {
+ if true {
+ m.insert(k, v)
+ } else {
+ m.insert(k, v2)
+ };
+ }
+
+ // early return, use if let
+ if !m.contains_key(&k) {
+ if true {
+ m.insert(k, v);
+ } else {
+ m.insert(k, v2);
+ return;
+ }
+ }
+
+ // use or_insert_with(..)
+ if !m.contains_key(&k) {
+ foo();
+ m.insert(k, v);
+ }
+
+ // semicolon on insert and match, use or_insert_with(..)
+ if !m.contains_key(&k) {
+ match 0 {
+ 1 if true => {
+ m.insert(k, v);
+ },
+ _ => {
+ m.insert(k, v2);
+ },
+ };
+ }
+
+ // one branch doesn't insert, use if let
+ if !m.contains_key(&k) {
+ match 0 {
+ 0 => foo(),
+ _ => {
+ m.insert(k, v2);
+ },
+ };
+ }
+
+ // use or_insert_with
+ if !m.contains_key(&k) {
+ foo();
+ match 0 {
+ 0 if false => {
+ m.insert(k, v);
+ },
+ 1 => {
+ foo();
+ m.insert(k, v);
+ },
+ 2 | 3 => {
+ for _ in 0..2 {
+ foo();
+ }
+ if true {
+ m.insert(k, v);
+ } else {
+ m.insert(k, v2);
+ };
+ },
+ _ => {
+ m.insert(k, v2);
+ },
+ }
+ }
+
+ // ok, insert in loop
+ if !m.contains_key(&k) {
+ for _ in 0..2 {
+ m.insert(k, v);
+ }
+ }
+
+ // macro_expansion test, use or_insert(..)
+ if !m.contains_key(&m!(k)) {
+ m.insert(m!(k), m!(v));
+ }
+
+ // ok, map used before insertion
+ if !m.contains_key(&k) {
+ let _ = m.len();
+ m.insert(k, v);
+ }
+
+ // ok, inline asm
+ if !m.contains_key(&k) {
+ unsafe { asm!("nop") }
+ m.insert(k, v);
+ }
+
+ // ok, different keys.
+ if !m.contains_key(&k) {
+ m.insert(k2, v);
+ }
+
+ // ok, different maps
+ if !m.contains_key(&k) {
+ m2.insert(k, v);
+ }
+
+ // ok, insert in macro
+ if !m.contains_key(&k) {
+ insert!(m, k, v);
+ }
+
+ // or_insert_with. Partial move of a local declared in the closure is ok.
+ if !m.contains_key(&k) {
+ let x = (String::new(), String::new());
+ let _ = x.0;
+ m.insert(k, v);
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/entry.stderr b/src/tools/clippy/tests/ui/entry.stderr
new file mode 100644
index 000000000..2ef996652
--- /dev/null
+++ b/src/tools/clippy/tests/ui/entry.stderr
@@ -0,0 +1,217 @@
+error: usage of `contains_key` followed by `insert` on a `HashMap`
+ --> $DIR/entry.rs:24:5
+ |
+LL | / if !m.contains_key(&k) {
+LL | | m.insert(k, v);
+LL | | }
+ | |_____^ help: try this: `m.entry(k).or_insert(v);`
+ |
+ = note: `-D clippy::map-entry` implied by `-D warnings`
+
+error: usage of `contains_key` followed by `insert` on a `HashMap`
+ --> $DIR/entry.rs:29:5
+ |
+LL | / if !m.contains_key(&k) {
+LL | | if true {
+LL | | m.insert(k, v);
+LL | | } else {
+LL | | m.insert(k, v2);
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: try this
+ |
+LL ~ m.entry(k).or_insert_with(|| {
+LL + if true {
+LL + v
+LL + } else {
+LL + v2
+LL + }
+LL + });
+ |
+
+error: usage of `contains_key` followed by `insert` on a `HashMap`
+ --> $DIR/entry.rs:38:5
+ |
+LL | / if !m.contains_key(&k) {
+LL | | if true {
+LL | | m.insert(k, v)
+LL | | } else {
+LL | | m.insert(k, v2)
+LL | | };
+LL | | }
+ | |_____^
+ |
+help: try this
+ |
+LL ~ m.entry(k).or_insert_with(|| {
+LL + if true {
+LL + v
+LL + } else {
+LL + v2
+LL + }
+LL + });
+ |
+
+error: usage of `contains_key` followed by `insert` on a `HashMap`
+ --> $DIR/entry.rs:47:5
+ |
+LL | / if !m.contains_key(&k) {
+LL | | if true {
+LL | | m.insert(k, v);
+LL | | } else {
+... |
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: try this
+ |
+LL ~ if let std::collections::hash_map::Entry::Vacant(e) = m.entry(k) {
+LL + if true {
+LL + e.insert(v);
+LL + } else {
+LL + e.insert(v2);
+LL + return;
+LL + }
+LL + }
+ |
+
+error: usage of `contains_key` followed by `insert` on a `HashMap`
+ --> $DIR/entry.rs:57:5
+ |
+LL | / if !m.contains_key(&k) {
+LL | | foo();
+LL | | m.insert(k, v);
+LL | | }
+ | |_____^
+ |
+help: try this
+ |
+LL ~ m.entry(k).or_insert_with(|| {
+LL + foo();
+LL + v
+LL + });
+ |
+
+error: usage of `contains_key` followed by `insert` on a `HashMap`
+ --> $DIR/entry.rs:63:5
+ |
+LL | / if !m.contains_key(&k) {
+LL | | match 0 {
+LL | | 1 if true => {
+LL | | m.insert(k, v);
+... |
+LL | | };
+LL | | }
+ | |_____^
+ |
+help: try this
+ |
+LL ~ m.entry(k).or_insert_with(|| {
+LL + match 0 {
+LL + 1 if true => {
+LL + v
+LL + },
+LL + _ => {
+LL + v2
+LL + },
+LL + }
+LL + });
+ |
+
+error: usage of `contains_key` followed by `insert` on a `HashMap`
+ --> $DIR/entry.rs:75:5
+ |
+LL | / if !m.contains_key(&k) {
+LL | | match 0 {
+LL | | 0 => foo(),
+LL | | _ => {
+... |
+LL | | };
+LL | | }
+ | |_____^
+ |
+help: try this
+ |
+LL ~ if let std::collections::hash_map::Entry::Vacant(e) = m.entry(k) {
+LL + match 0 {
+LL + 0 => foo(),
+LL + _ => {
+LL + e.insert(v2);
+LL + },
+LL + };
+LL + }
+ |
+
+error: usage of `contains_key` followed by `insert` on a `HashMap`
+ --> $DIR/entry.rs:85:5
+ |
+LL | / if !m.contains_key(&k) {
+LL | | foo();
+LL | | match 0 {
+LL | | 0 if false => {
+... |
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: try this
+ |
+LL ~ m.entry(k).or_insert_with(|| {
+LL + foo();
+LL + match 0 {
+LL + 0 if false => {
+LL + v
+LL + },
+LL + 1 => {
+LL + foo();
+LL + v
+LL + },
+LL + 2 | 3 => {
+LL + for _ in 0..2 {
+LL + foo();
+LL + }
+LL + if true {
+LL + v
+LL + } else {
+LL + v2
+LL + }
+LL + },
+LL + _ => {
+LL + v2
+LL + },
+LL + }
+LL + });
+ |
+
+error: usage of `contains_key` followed by `insert` on a `HashMap`
+ --> $DIR/entry.rs:119:5
+ |
+LL | / if !m.contains_key(&m!(k)) {
+LL | | m.insert(m!(k), m!(v));
+LL | | }
+ | |_____^ help: try this: `m.entry(m!(k)).or_insert_with(|| m!(v));`
+
+error: usage of `contains_key` followed by `insert` on a `HashMap`
+ --> $DIR/entry.rs:151:5
+ |
+LL | / if !m.contains_key(&k) {
+LL | | let x = (String::new(), String::new());
+LL | | let _ = x.0;
+LL | | m.insert(k, v);
+LL | | }
+ | |_____^
+ |
+help: try this
+ |
+LL ~ m.entry(k).or_insert_with(|| {
+LL + let x = (String::new(), String::new());
+LL + let _ = x.0;
+LL + v
+LL + });
+ |
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/entry_btree.fixed b/src/tools/clippy/tests/ui/entry_btree.fixed
new file mode 100644
index 000000000..949791045
--- /dev/null
+++ b/src/tools/clippy/tests/ui/entry_btree.fixed
@@ -0,0 +1,18 @@
+// run-rustfix
+
+#![warn(clippy::map_entry)]
+#![allow(dead_code)]
+
+use std::collections::BTreeMap;
+
+fn foo() {}
+
+fn btree_map<K: Eq + Ord + Copy, V: Copy>(m: &mut BTreeMap<K, V>, k: K, v: V) {
+ // insert then do something, use if let
+ if let std::collections::btree_map::Entry::Vacant(e) = m.entry(k) {
+ e.insert(v);
+ foo();
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/entry_btree.rs b/src/tools/clippy/tests/ui/entry_btree.rs
new file mode 100644
index 000000000..080c1d959
--- /dev/null
+++ b/src/tools/clippy/tests/ui/entry_btree.rs
@@ -0,0 +1,18 @@
+// run-rustfix
+
+#![warn(clippy::map_entry)]
+#![allow(dead_code)]
+
+use std::collections::BTreeMap;
+
+fn foo() {}
+
+fn btree_map<K: Eq + Ord + Copy, V: Copy>(m: &mut BTreeMap<K, V>, k: K, v: V) {
+ // insert then do something, use if let
+ if !m.contains_key(&k) {
+ m.insert(k, v);
+ foo();
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/entry_btree.stderr b/src/tools/clippy/tests/ui/entry_btree.stderr
new file mode 100644
index 000000000..5c6fcdf1a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/entry_btree.stderr
@@ -0,0 +1,20 @@
+error: usage of `contains_key` followed by `insert` on a `BTreeMap`
+ --> $DIR/entry_btree.rs:12:5
+ |
+LL | / if !m.contains_key(&k) {
+LL | | m.insert(k, v);
+LL | | foo();
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::map-entry` implied by `-D warnings`
+help: try this
+ |
+LL ~ if let std::collections::btree_map::Entry::Vacant(e) = m.entry(k) {
+LL + e.insert(v);
+LL + foo();
+LL + }
+ |
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/entry_with_else.fixed b/src/tools/clippy/tests/ui/entry_with_else.fixed
new file mode 100644
index 000000000..2332fa631
--- /dev/null
+++ b/src/tools/clippy/tests/ui/entry_with_else.fixed
@@ -0,0 +1,73 @@
+// run-rustfix
+
+#![allow(unused, clippy::needless_pass_by_value, clippy::collapsible_if)]
+#![warn(clippy::map_entry)]
+
+use std::collections::{BTreeMap, HashMap};
+use std::hash::Hash;
+
+macro_rules! m {
+ ($e:expr) => {{ $e }};
+}
+
+fn foo() {}
+
+fn insert_if_absent0<K: Eq + Hash + Copy, V: Copy>(m: &mut HashMap<K, V>, k: K, v: V, v2: V) {
+ match m.entry(k) {
+ std::collections::hash_map::Entry::Vacant(e) => {
+ e.insert(v);
+ }
+ std::collections::hash_map::Entry::Occupied(mut e) => {
+ e.insert(v2);
+ }
+ }
+
+ match m.entry(k) {
+ std::collections::hash_map::Entry::Occupied(mut e) => {
+ e.insert(v);
+ }
+ std::collections::hash_map::Entry::Vacant(e) => {
+ e.insert(v2);
+ }
+ }
+
+ if let std::collections::hash_map::Entry::Vacant(e) = m.entry(k) {
+ e.insert(v);
+ } else {
+ foo();
+ }
+
+ if let std::collections::hash_map::Entry::Occupied(mut e) = m.entry(k) {
+ e.insert(v);
+ } else {
+ foo();
+ }
+
+ match m.entry(k) {
+ std::collections::hash_map::Entry::Vacant(e) => {
+ e.insert(v);
+ }
+ std::collections::hash_map::Entry::Occupied(mut e) => {
+ e.insert(v2);
+ }
+ }
+
+ match m.entry(k) {
+ std::collections::hash_map::Entry::Occupied(mut e) => {
+ if true { Some(e.insert(v)) } else { Some(e.insert(v2)) }
+ }
+ std::collections::hash_map::Entry::Vacant(e) => {
+ e.insert(v);
+ None
+ }
+ };
+
+ if let std::collections::hash_map::Entry::Occupied(mut e) = m.entry(k) {
+ foo();
+ Some(e.insert(v))
+ } else {
+ None
+ };
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/entry_with_else.rs b/src/tools/clippy/tests/ui/entry_with_else.rs
new file mode 100644
index 000000000..2ff0c038e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/entry_with_else.rs
@@ -0,0 +1,60 @@
+// run-rustfix
+
+#![allow(unused, clippy::needless_pass_by_value, clippy::collapsible_if)]
+#![warn(clippy::map_entry)]
+
+use std::collections::{BTreeMap, HashMap};
+use std::hash::Hash;
+
+macro_rules! m {
+ ($e:expr) => {{ $e }};
+}
+
+fn foo() {}
+
+fn insert_if_absent0<K: Eq + Hash + Copy, V: Copy>(m: &mut HashMap<K, V>, k: K, v: V, v2: V) {
+ if !m.contains_key(&k) {
+ m.insert(k, v);
+ } else {
+ m.insert(k, v2);
+ }
+
+ if m.contains_key(&k) {
+ m.insert(k, v);
+ } else {
+ m.insert(k, v2);
+ }
+
+ if !m.contains_key(&k) {
+ m.insert(k, v);
+ } else {
+ foo();
+ }
+
+ if !m.contains_key(&k) {
+ foo();
+ } else {
+ m.insert(k, v);
+ }
+
+ if !m.contains_key(&k) {
+ m.insert(k, v);
+ } else {
+ m.insert(k, v2);
+ }
+
+ if m.contains_key(&k) {
+ if true { m.insert(k, v) } else { m.insert(k, v2) }
+ } else {
+ m.insert(k, v)
+ };
+
+ if m.contains_key(&k) {
+ foo();
+ m.insert(k, v)
+ } else {
+ None
+ };
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/entry_with_else.stderr b/src/tools/clippy/tests/ui/entry_with_else.stderr
new file mode 100644
index 000000000..e0f6671b4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/entry_with_else.stderr
@@ -0,0 +1,151 @@
+error: usage of `contains_key` followed by `insert` on a `HashMap`
+ --> $DIR/entry_with_else.rs:16:5
+ |
+LL | / if !m.contains_key(&k) {
+LL | | m.insert(k, v);
+LL | | } else {
+LL | | m.insert(k, v2);
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::map-entry` implied by `-D warnings`
+help: try this
+ |
+LL ~ match m.entry(k) {
+LL + std::collections::hash_map::Entry::Vacant(e) => {
+LL + e.insert(v);
+LL + }
+LL + std::collections::hash_map::Entry::Occupied(mut e) => {
+LL + e.insert(v2);
+LL + }
+LL + }
+ |
+
+error: usage of `contains_key` followed by `insert` on a `HashMap`
+ --> $DIR/entry_with_else.rs:22:5
+ |
+LL | / if m.contains_key(&k) {
+LL | | m.insert(k, v);
+LL | | } else {
+LL | | m.insert(k, v2);
+LL | | }
+ | |_____^
+ |
+help: try this
+ |
+LL ~ match m.entry(k) {
+LL + std::collections::hash_map::Entry::Occupied(mut e) => {
+LL + e.insert(v);
+LL + }
+LL + std::collections::hash_map::Entry::Vacant(e) => {
+LL + e.insert(v2);
+LL + }
+LL + }
+ |
+
+error: usage of `contains_key` followed by `insert` on a `HashMap`
+ --> $DIR/entry_with_else.rs:28:5
+ |
+LL | / if !m.contains_key(&k) {
+LL | | m.insert(k, v);
+LL | | } else {
+LL | | foo();
+LL | | }
+ | |_____^
+ |
+help: try this
+ |
+LL ~ if let std::collections::hash_map::Entry::Vacant(e) = m.entry(k) {
+LL + e.insert(v);
+LL + } else {
+LL + foo();
+LL + }
+ |
+
+error: usage of `contains_key` followed by `insert` on a `HashMap`
+ --> $DIR/entry_with_else.rs:34:5
+ |
+LL | / if !m.contains_key(&k) {
+LL | | foo();
+LL | | } else {
+LL | | m.insert(k, v);
+LL | | }
+ | |_____^
+ |
+help: try this
+ |
+LL ~ if let std::collections::hash_map::Entry::Occupied(mut e) = m.entry(k) {
+LL + e.insert(v);
+LL + } else {
+LL + foo();
+LL + }
+ |
+
+error: usage of `contains_key` followed by `insert` on a `HashMap`
+ --> $DIR/entry_with_else.rs:40:5
+ |
+LL | / if !m.contains_key(&k) {
+LL | | m.insert(k, v);
+LL | | } else {
+LL | | m.insert(k, v2);
+LL | | }
+ | |_____^
+ |
+help: try this
+ |
+LL ~ match m.entry(k) {
+LL + std::collections::hash_map::Entry::Vacant(e) => {
+LL + e.insert(v);
+LL + }
+LL + std::collections::hash_map::Entry::Occupied(mut e) => {
+LL + e.insert(v2);
+LL + }
+LL + }
+ |
+
+error: usage of `contains_key` followed by `insert` on a `HashMap`
+ --> $DIR/entry_with_else.rs:46:5
+ |
+LL | / if m.contains_key(&k) {
+LL | | if true { m.insert(k, v) } else { m.insert(k, v2) }
+LL | | } else {
+LL | | m.insert(k, v)
+LL | | };
+ | |_____^
+ |
+help: try this
+ |
+LL ~ match m.entry(k) {
+LL + std::collections::hash_map::Entry::Occupied(mut e) => {
+LL + if true { Some(e.insert(v)) } else { Some(e.insert(v2)) }
+LL + }
+LL + std::collections::hash_map::Entry::Vacant(e) => {
+LL + e.insert(v);
+LL + None
+LL + }
+LL ~ };
+ |
+
+error: usage of `contains_key` followed by `insert` on a `HashMap`
+ --> $DIR/entry_with_else.rs:52:5
+ |
+LL | / if m.contains_key(&k) {
+LL | | foo();
+LL | | m.insert(k, v)
+LL | | } else {
+LL | | None
+LL | | };
+ | |_____^
+ |
+help: try this
+ |
+LL ~ if let std::collections::hash_map::Entry::Occupied(mut e) = m.entry(k) {
+LL + foo();
+LL + Some(e.insert(v))
+LL + } else {
+LL + None
+LL ~ };
+ |
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/enum_clike_unportable_variant.rs b/src/tools/clippy/tests/ui/enum_clike_unportable_variant.rs
new file mode 100644
index 000000000..7d6842f5b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/enum_clike_unportable_variant.rs
@@ -0,0 +1,50 @@
+// ignore-x86
+
+#![warn(clippy::enum_clike_unportable_variant)]
+#![allow(unused, non_upper_case_globals)]
+
+#[repr(usize)]
+enum NonPortable {
+ X = 0x1_0000_0000,
+ Y = 0,
+ Z = 0x7FFF_FFFF,
+ A = 0xFFFF_FFFF,
+}
+
+enum NonPortableNoHint {
+ X = 0x1_0000_0000,
+ Y = 0,
+ Z = 0x7FFF_FFFF,
+ A = 0xFFFF_FFFF,
+}
+
+#[repr(isize)]
+enum NonPortableSigned {
+ X = -1,
+ Y = 0x7FFF_FFFF,
+ Z = 0xFFFF_FFFF,
+ A = 0x1_0000_0000,
+ B = i32::MIN as isize,
+ C = (i32::MIN as isize) - 1,
+}
+
+enum NonPortableSignedNoHint {
+ X = -1,
+ Y = 0x7FFF_FFFF,
+ Z = 0xFFFF_FFFF,
+ A = 0x1_0000_0000,
+}
+
+#[repr(usize)]
+enum NonPortable2 {
+ X = <usize as Trait>::Number,
+ Y = 0,
+}
+
+trait Trait {
+ const Number: usize = 0x1_0000_0000;
+}
+
+impl Trait for usize {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/enum_clike_unportable_variant.stderr b/src/tools/clippy/tests/ui/enum_clike_unportable_variant.stderr
new file mode 100644
index 000000000..5935eea5e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/enum_clike_unportable_variant.stderr
@@ -0,0 +1,58 @@
+error: C-like enum variant discriminant is not portable to 32-bit targets
+ --> $DIR/enum_clike_unportable_variant.rs:8:5
+ |
+LL | X = 0x1_0000_0000,
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::enum-clike-unportable-variant` implied by `-D warnings`
+
+error: C-like enum variant discriminant is not portable to 32-bit targets
+ --> $DIR/enum_clike_unportable_variant.rs:15:5
+ |
+LL | X = 0x1_0000_0000,
+ | ^^^^^^^^^^^^^^^^^
+
+error: C-like enum variant discriminant is not portable to 32-bit targets
+ --> $DIR/enum_clike_unportable_variant.rs:18:5
+ |
+LL | A = 0xFFFF_FFFF,
+ | ^^^^^^^^^^^^^^^
+
+error: C-like enum variant discriminant is not portable to 32-bit targets
+ --> $DIR/enum_clike_unportable_variant.rs:25:5
+ |
+LL | Z = 0xFFFF_FFFF,
+ | ^^^^^^^^^^^^^^^
+
+error: C-like enum variant discriminant is not portable to 32-bit targets
+ --> $DIR/enum_clike_unportable_variant.rs:26:5
+ |
+LL | A = 0x1_0000_0000,
+ | ^^^^^^^^^^^^^^^^^
+
+error: C-like enum variant discriminant is not portable to 32-bit targets
+ --> $DIR/enum_clike_unportable_variant.rs:28:5
+ |
+LL | C = (i32::MIN as isize) - 1,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: C-like enum variant discriminant is not portable to 32-bit targets
+ --> $DIR/enum_clike_unportable_variant.rs:34:5
+ |
+LL | Z = 0xFFFF_FFFF,
+ | ^^^^^^^^^^^^^^^
+
+error: C-like enum variant discriminant is not portable to 32-bit targets
+ --> $DIR/enum_clike_unportable_variant.rs:35:5
+ |
+LL | A = 0x1_0000_0000,
+ | ^^^^^^^^^^^^^^^^^
+
+error: C-like enum variant discriminant is not portable to 32-bit targets
+ --> $DIR/enum_clike_unportable_variant.rs:40:5
+ |
+LL | X = <usize as Trait>::Number,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/enum_glob_use.fixed b/src/tools/clippy/tests/ui/enum_glob_use.fixed
new file mode 100644
index 000000000..a98216758
--- /dev/null
+++ b/src/tools/clippy/tests/ui/enum_glob_use.fixed
@@ -0,0 +1,30 @@
+// run-rustfix
+
+#![warn(clippy::enum_glob_use)]
+#![allow(unused)]
+#![warn(unused_imports)]
+
+use std::cmp::Ordering::Less;
+
+enum Enum {
+ Foo,
+}
+
+use self::Enum::Foo;
+
+mod in_fn_test {
+ fn blarg() {
+ use crate::Enum::Foo;
+
+ let _ = Foo;
+ }
+}
+
+mod blurg {
+ pub use std::cmp::Ordering::*; // ok, re-export
+}
+
+fn main() {
+ let _ = Foo;
+ let _ = Less;
+}
diff --git a/src/tools/clippy/tests/ui/enum_glob_use.rs b/src/tools/clippy/tests/ui/enum_glob_use.rs
new file mode 100644
index 000000000..5d929c973
--- /dev/null
+++ b/src/tools/clippy/tests/ui/enum_glob_use.rs
@@ -0,0 +1,30 @@
+// run-rustfix
+
+#![warn(clippy::enum_glob_use)]
+#![allow(unused)]
+#![warn(unused_imports)]
+
+use std::cmp::Ordering::*;
+
+enum Enum {
+ Foo,
+}
+
+use self::Enum::*;
+
+mod in_fn_test {
+ fn blarg() {
+ use crate::Enum::*;
+
+ let _ = Foo;
+ }
+}
+
+mod blurg {
+ pub use std::cmp::Ordering::*; // ok, re-export
+}
+
+fn main() {
+ let _ = Foo;
+ let _ = Less;
+}
diff --git a/src/tools/clippy/tests/ui/enum_glob_use.stderr b/src/tools/clippy/tests/ui/enum_glob_use.stderr
new file mode 100644
index 000000000..69531aed3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/enum_glob_use.stderr
@@ -0,0 +1,22 @@
+error: usage of wildcard import for enum variants
+ --> $DIR/enum_glob_use.rs:7:5
+ |
+LL | use std::cmp::Ordering::*;
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try: `std::cmp::Ordering::Less`
+ |
+ = note: `-D clippy::enum-glob-use` implied by `-D warnings`
+
+error: usage of wildcard import for enum variants
+ --> $DIR/enum_glob_use.rs:13:5
+ |
+LL | use self::Enum::*;
+ | ^^^^^^^^^^^^^ help: try: `self::Enum::Foo`
+
+error: usage of wildcard import for enum variants
+ --> $DIR/enum_glob_use.rs:17:13
+ |
+LL | use crate::Enum::*;
+ | ^^^^^^^^^^^^^^ help: try: `crate::Enum::Foo`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/enum_variants.rs b/src/tools/clippy/tests/ui/enum_variants.rs
new file mode 100644
index 000000000..efed12ee2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/enum_variants.rs
@@ -0,0 +1,182 @@
+#![warn(clippy::enum_variant_names)]
+#![allow(non_camel_case_types, clippy::upper_case_acronyms)]
+
+enum FakeCallType {
+ CALL,
+ CREATE,
+}
+
+enum FakeCallType2 {
+ CALL,
+ CREATELL,
+}
+
+enum Foo {
+ cFoo,
+ cBar,
+ cBaz,
+}
+
+enum Fooo {
+ cFoo, // no error, threshold is 3 variants by default
+ cBar,
+}
+
+enum Food {
+ FoodGood,
+ FoodMiddle,
+ FoodBad,
+}
+
+enum Stuff {
+ StuffBad, // no error
+}
+
+enum BadCallType {
+ CallTypeCall,
+ CallTypeCreate,
+ CallTypeDestroy,
+}
+
+enum TwoCallType {
+ // no error
+ CallTypeCall,
+ CallTypeCreate,
+}
+
+enum Consts {
+ ConstantInt,
+ ConstantCake,
+ ConstantLie,
+}
+
+enum Two {
+ // no error here
+ ConstantInt,
+ ConstantInfer,
+}
+
+enum Something {
+ CCall,
+ CCreate,
+ CCryogenize,
+}
+
+enum Seal {
+ With,
+ Without,
+}
+
+enum Seall {
+ With,
+ WithOut,
+ Withbroken,
+}
+
+enum Sealll {
+ With,
+ WithOut,
+}
+
+enum Seallll {
+ WithOutCake,
+ WithOutTea,
+ WithOut,
+}
+
+enum NonCaps {
+ Prefix的,
+ PrefixTea,
+ PrefixCake,
+}
+
+pub enum PubSeall {
+ WithOutCake,
+ WithOutTea,
+ WithOut,
+}
+
+#[allow(clippy::enum_variant_names)]
+pub mod allowed {
+ pub enum PubAllowed {
+ SomeThis,
+ SomeThat,
+ SomeOtherWhat,
+ }
+}
+
+// should not lint
+enum Pat {
+ Foo,
+ Bar,
+ Path,
+}
+
+// should not lint
+enum N {
+ Pos,
+ Neg,
+ Float,
+}
+
+// should not lint
+enum Peek {
+ Peek1,
+ Peek2,
+ Peek3,
+}
+
+// should not lint
+pub enum NetworkLayer {
+ Layer2,
+ Layer3,
+}
+
+// should lint suggesting `IData`, not only `Data` (see #4639)
+enum IDataRequest {
+ PutIData(String),
+ GetIData(String),
+ DeleteUnpubIData(String),
+}
+
+enum HIDataRequest {
+ PutHIData(String),
+ GetHIData(String),
+ DeleteUnpubHIData(String),
+}
+
+enum North {
+ Normal,
+ NoLeft,
+ NoRight,
+}
+
+// #8324
+enum Phase {
+ PreLookup,
+ Lookup,
+ PostLookup,
+}
+
+mod issue9018 {
+ enum DoLint {
+ _TypeCreate,
+ _TypeRead,
+ _TypeUpdate,
+ _TypeDestroy,
+ }
+
+ enum DoLintToo {
+ _CreateType,
+ _UpdateType,
+ _DeleteType,
+ }
+
+ enum DoNotLint {
+ _Foo,
+ _Bar,
+ _Baz,
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/enum_variants.stderr b/src/tools/clippy/tests/ui/enum_variants.stderr
new file mode 100644
index 000000000..7342aff80
--- /dev/null
+++ b/src/tools/clippy/tests/ui/enum_variants.stderr
@@ -0,0 +1,149 @@
+error: variant name ends with the enum's name
+ --> $DIR/enum_variants.rs:15:5
+ |
+LL | cFoo,
+ | ^^^^
+ |
+ = note: `-D clippy::enum-variant-names` implied by `-D warnings`
+
+error: all variants have the same prefix: `c`
+ --> $DIR/enum_variants.rs:14:1
+ |
+LL | / enum Foo {
+LL | | cFoo,
+LL | | cBar,
+LL | | cBaz,
+LL | | }
+ | |_^
+ |
+ = help: remove the prefixes and use full paths to the variants instead of glob imports
+
+error: variant name starts with the enum's name
+ --> $DIR/enum_variants.rs:26:5
+ |
+LL | FoodGood,
+ | ^^^^^^^^
+
+error: variant name starts with the enum's name
+ --> $DIR/enum_variants.rs:27:5
+ |
+LL | FoodMiddle,
+ | ^^^^^^^^^^
+
+error: variant name starts with the enum's name
+ --> $DIR/enum_variants.rs:28:5
+ |
+LL | FoodBad,
+ | ^^^^^^^
+
+error: all variants have the same prefix: `Food`
+ --> $DIR/enum_variants.rs:25:1
+ |
+LL | / enum Food {
+LL | | FoodGood,
+LL | | FoodMiddle,
+LL | | FoodBad,
+LL | | }
+ | |_^
+ |
+ = help: remove the prefixes and use full paths to the variants instead of glob imports
+
+error: all variants have the same prefix: `CallType`
+ --> $DIR/enum_variants.rs:35:1
+ |
+LL | / enum BadCallType {
+LL | | CallTypeCall,
+LL | | CallTypeCreate,
+LL | | CallTypeDestroy,
+LL | | }
+ | |_^
+ |
+ = help: remove the prefixes and use full paths to the variants instead of glob imports
+
+error: all variants have the same prefix: `Constant`
+ --> $DIR/enum_variants.rs:47:1
+ |
+LL | / enum Consts {
+LL | | ConstantInt,
+LL | | ConstantCake,
+LL | | ConstantLie,
+LL | | }
+ | |_^
+ |
+ = help: remove the prefixes and use full paths to the variants instead of glob imports
+
+error: all variants have the same prefix: `C`
+ --> $DIR/enum_variants.rs:59:1
+ |
+LL | / enum Something {
+LL | | CCall,
+LL | | CCreate,
+LL | | CCryogenize,
+LL | | }
+ | |_^
+ |
+ = help: remove the prefixes and use full paths to the variants instead of glob imports
+
+error: all variants have the same prefix: `WithOut`
+ --> $DIR/enum_variants.rs:81:1
+ |
+LL | / enum Seallll {
+LL | | WithOutCake,
+LL | | WithOutTea,
+LL | | WithOut,
+LL | | }
+ | |_^
+ |
+ = help: remove the prefixes and use full paths to the variants instead of glob imports
+
+error: all variants have the same postfix: `IData`
+ --> $DIR/enum_variants.rs:136:1
+ |
+LL | / enum IDataRequest {
+LL | | PutIData(String),
+LL | | GetIData(String),
+LL | | DeleteUnpubIData(String),
+LL | | }
+ | |_^
+ |
+ = help: remove the postfixes and use full paths to the variants instead of glob imports
+
+error: all variants have the same postfix: `HIData`
+ --> $DIR/enum_variants.rs:142:1
+ |
+LL | / enum HIDataRequest {
+LL | | PutHIData(String),
+LL | | GetHIData(String),
+LL | | DeleteUnpubHIData(String),
+LL | | }
+ | |_^
+ |
+ = help: remove the postfixes and use full paths to the variants instead of glob imports
+
+error: all variants have the same prefix: `_Type`
+ --> $DIR/enum_variants.rs:162:5
+ |
+LL | / enum DoLint {
+LL | | _TypeCreate,
+LL | | _TypeRead,
+LL | | _TypeUpdate,
+LL | | _TypeDestroy,
+LL | | }
+ | |_____^
+ |
+ = help: remove the prefixes and use full paths to the variants instead of glob imports
+
+error: all variants have the same postfix: `Type`
+ --> $DIR/enum_variants.rs:169:5
+ |
+LL | / enum DoLintToo {
+LL | | _CreateType,
+LL | | _UpdateType,
+LL | | _DeleteType,
+LL | | }
+ | |_____^
+ |
+ = help: remove the postfixes and use full paths to the variants instead of glob imports
+
+error: aborting due to 14 previous errors
+
diff --git a/src/tools/clippy/tests/ui/eprint_with_newline.rs b/src/tools/clippy/tests/ui/eprint_with_newline.rs
new file mode 100644
index 000000000..8df32649a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/eprint_with_newline.rs
@@ -0,0 +1,49 @@
+#![allow(clippy::print_literal)]
+#![warn(clippy::print_with_newline)]
+
+fn main() {
+ eprint!("Hello\n");
+ eprint!("Hello {}\n", "world");
+ eprint!("Hello {} {}\n", "world", "#2");
+ eprint!("{}\n", 1265);
+ eprint!("\n");
+
+ // these are all fine
+ eprint!("");
+ eprint!("Hello");
+ eprintln!("Hello");
+ eprintln!("Hello\n");
+ eprintln!("Hello {}\n", "world");
+ eprint!("Issue\n{}", 1265);
+ eprint!("{}", 1265);
+ eprint!("\n{}", 1275);
+ eprint!("\n\n");
+ eprint!("like eof\n\n");
+ eprint!("Hello {} {}\n\n", "world", "#2");
+ eprintln!("\ndon't\nwarn\nfor\nmultiple\nnewlines\n"); // #3126
+ eprintln!("\nbla\n\n"); // #3126
+
+ // Escaping
+ eprint!("\\n"); // #3514
+ eprint!("\\\n"); // should fail
+ eprint!("\\\\n");
+
+ // Raw strings
+ eprint!(r"\n"); // #3778
+
+ // Literal newlines should also fail
+ eprint!(
+ "
+"
+ );
+ eprint!(
+ r"
+"
+ );
+
+ // Don't warn on CRLF (#4208)
+ eprint!("\r\n");
+ eprint!("foo\r\n");
+ eprint!("\\r\n"); //~ ERROR
+ eprint!("foo\rbar\n") // ~ ERROR
+}
diff --git a/src/tools/clippy/tests/ui/eprint_with_newline.stderr b/src/tools/clippy/tests/ui/eprint_with_newline.stderr
new file mode 100644
index 000000000..f137787bf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/eprint_with_newline.stderr
@@ -0,0 +1,129 @@
+error: using `eprint!()` with a format string that ends in a single newline
+ --> $DIR/eprint_with_newline.rs:5:5
+ |
+LL | eprint!("Hello/n");
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::print-with-newline` implied by `-D warnings`
+help: use `eprintln!` instead
+ |
+LL - eprint!("Hello/n");
+LL + eprintln!("Hello");
+ |
+
+error: using `eprint!()` with a format string that ends in a single newline
+ --> $DIR/eprint_with_newline.rs:6:5
+ |
+LL | eprint!("Hello {}/n", "world");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: use `eprintln!` instead
+ |
+LL - eprint!("Hello {}/n", "world");
+LL + eprintln!("Hello {}", "world");
+ |
+
+error: using `eprint!()` with a format string that ends in a single newline
+ --> $DIR/eprint_with_newline.rs:7:5
+ |
+LL | eprint!("Hello {} {}/n", "world", "#2");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: use `eprintln!` instead
+ |
+LL - eprint!("Hello {} {}/n", "world", "#2");
+LL + eprintln!("Hello {} {}", "world", "#2");
+ |
+
+error: using `eprint!()` with a format string that ends in a single newline
+ --> $DIR/eprint_with_newline.rs:8:5
+ |
+LL | eprint!("{}/n", 1265);
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+help: use `eprintln!` instead
+ |
+LL - eprint!("{}/n", 1265);
+LL + eprintln!("{}", 1265);
+ |
+
+error: using `eprint!()` with a format string that ends in a single newline
+ --> $DIR/eprint_with_newline.rs:9:5
+ |
+LL | eprint!("/n");
+ | ^^^^^^^^^^^^^
+ |
+help: use `eprintln!` instead
+ |
+LL - eprint!("/n");
+LL + eprintln!();
+ |
+
+error: using `eprint!()` with a format string that ends in a single newline
+ --> $DIR/eprint_with_newline.rs:28:5
+ |
+LL | eprint!("//n"); // should fail
+ | ^^^^^^^^^^^^^^^
+ |
+help: use `eprintln!` instead
+ |
+LL - eprint!("//n"); // should fail
+LL + eprintln!("/"); // should fail
+ |
+
+error: using `eprint!()` with a format string that ends in a single newline
+ --> $DIR/eprint_with_newline.rs:35:5
+ |
+LL | / eprint!(
+LL | | "
+LL | | "
+LL | | );
+ | |_____^
+ |
+help: use `eprintln!` instead
+ |
+LL ~ eprintln!(
+LL ~ ""
+ |
+
+error: using `eprint!()` with a format string that ends in a single newline
+ --> $DIR/eprint_with_newline.rs:39:5
+ |
+LL | / eprint!(
+LL | | r"
+LL | | "
+LL | | );
+ | |_____^
+ |
+help: use `eprintln!` instead
+ |
+LL ~ eprintln!(
+LL ~ r""
+ |
+
+error: using `eprint!()` with a format string that ends in a single newline
+ --> $DIR/eprint_with_newline.rs:47:5
+ |
+LL | eprint!("/r/n"); //~ ERROR
+ | ^^^^^^^^^^^^^^^^
+ |
+help: use `eprintln!` instead
+ |
+LL - eprint!("/r/n"); //~ ERROR
+LL + eprintln!("/r"); //~ ERROR
+ |
+
+error: using `eprint!()` with a format string that ends in a single newline
+ --> $DIR/eprint_with_newline.rs:48:5
+ |
+LL | eprint!("foo/rbar/n") // ~ ERROR
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+help: use `eprintln!` instead
+ |
+LL - eprint!("foo/rbar/n") // ~ ERROR
+LL + eprintln!("foo/rbar") // ~ ERROR
+ |
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/eq_op.rs b/src/tools/clippy/tests/ui/eq_op.rs
new file mode 100644
index 000000000..422f94865
--- /dev/null
+++ b/src/tools/clippy/tests/ui/eq_op.rs
@@ -0,0 +1,108 @@
+// compile-flags: --test
+
+#![warn(clippy::eq_op)]
+#![allow(clippy::double_parens, clippy::identity_op, clippy::nonminimal_bool)]
+
+fn main() {
+ // simple values and comparisons
+ let _ = 1 == 1;
+ let _ = "no" == "no";
+ // even though I agree that no means no ;-)
+ let _ = false != false;
+ let _ = 1.5 < 1.5;
+ let _ = 1u64 >= 1u64;
+
+ // casts, methods, parentheses
+ let _ = (1u32 as u64) & (1u32 as u64);
+ #[rustfmt::skip]
+ {
+ let _ = 1 ^ ((((((1))))));
+ };
+
+ // unary and binary operators
+ let _ = (-(2) < -(2));
+ let _ = ((1 + 1) & (1 + 1) == (1 + 1) & (1 + 1));
+ let _ = (1 * 2) + (3 * 4) == 1 * 2 + 3 * 4;
+
+ // various other things
+ let _ = ([1] != [1]);
+ let _ = ((1, 2) != (1, 2));
+ let _ = vec![1, 2, 3] == vec![1, 2, 3]; //no error yet, as we don't match macros
+
+ // const folding
+ let _ = 1 + 1 == 2;
+ let _ = 1 - 1 == 0;
+
+ let _ = 1 - 1;
+ let _ = 1 / 1;
+ let _ = true && true;
+
+ let _ = true || true;
+
+ let a: u32 = 0;
+ let b: u32 = 0;
+
+ let _ = a == b && b == a;
+ let _ = a != b && b != a;
+ let _ = a < b && b > a;
+ let _ = a <= b && b >= a;
+
+ let mut a = vec![1];
+ let _ = a == a;
+ let _ = 2 * a.len() == 2 * a.len(); // ok, functions
+ let _ = a.pop() == a.pop(); // ok, functions
+
+ check_ignore_macro();
+
+ // named constants
+ const A: u32 = 10;
+ const B: u32 = 10;
+ const C: u32 = A / B; // ok, different named constants
+ const D: u32 = A / A;
+}
+
+macro_rules! check_if_named_foo {
+ ($expression:expr) => {
+ if stringify!($expression) == "foo" {
+ println!("foo!");
+ } else {
+ println!("not foo.");
+ }
+ };
+}
+
+macro_rules! bool_macro {
+ ($expression:expr) => {
+ true
+ };
+}
+
+fn check_ignore_macro() {
+ check_if_named_foo!(foo);
+ // checks if the lint ignores macros with `!` operator
+ let _ = !bool_macro!(1) && !bool_macro!("");
+}
+
+struct Nested {
+ inner: ((i32,), (i32,), (i32,)),
+}
+
+fn check_nested(n1: &Nested, n2: &Nested) -> bool {
+ // `n2.inner.0.0` mistyped as `n1.inner.0.0`
+ (n1.inner.0).0 == (n1.inner.0).0 && (n1.inner.1).0 == (n2.inner.1).0 && (n1.inner.2).0 == (n2.inner.2).0
+}
+
+#[test]
+fn eq_op_shouldnt_trigger_in_tests() {
+ let a = 1;
+ let result = a + 1 == 1 + a;
+ assert!(result);
+}
+
+#[test]
+fn eq_op_macros_shouldnt_trigger_in_tests() {
+ let a = 1;
+ let b = 2;
+ assert_eq!(a, a);
+ assert_eq!(a + b, b + a);
+}
diff --git a/src/tools/clippy/tests/ui/eq_op.stderr b/src/tools/clippy/tests/ui/eq_op.stderr
new file mode 100644
index 000000000..313ceed2b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/eq_op.stderr
@@ -0,0 +1,172 @@
+error: equal expressions as operands to `==`
+ --> $DIR/eq_op.rs:8:13
+ |
+LL | let _ = 1 == 1;
+ | ^^^^^^
+ |
+ = note: `-D clippy::eq-op` implied by `-D warnings`
+
+error: equal expressions as operands to `==`
+ --> $DIR/eq_op.rs:9:13
+ |
+LL | let _ = "no" == "no";
+ | ^^^^^^^^^^^^
+
+error: equal expressions as operands to `!=`
+ --> $DIR/eq_op.rs:11:13
+ |
+LL | let _ = false != false;
+ | ^^^^^^^^^^^^^^
+
+error: equal expressions as operands to `<`
+ --> $DIR/eq_op.rs:12:13
+ |
+LL | let _ = 1.5 < 1.5;
+ | ^^^^^^^^^
+
+error: equal expressions as operands to `>=`
+ --> $DIR/eq_op.rs:13:13
+ |
+LL | let _ = 1u64 >= 1u64;
+ | ^^^^^^^^^^^^
+
+error: equal expressions as operands to `&`
+ --> $DIR/eq_op.rs:16:13
+ |
+LL | let _ = (1u32 as u64) & (1u32 as u64);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: equal expressions as operands to `^`
+ --> $DIR/eq_op.rs:19:17
+ |
+LL | let _ = 1 ^ ((((((1))))));
+ | ^^^^^^^^^^^^^^^^^
+
+error: equal expressions as operands to `<`
+ --> $DIR/eq_op.rs:23:13
+ |
+LL | let _ = (-(2) < -(2));
+ | ^^^^^^^^^^^^^
+
+error: equal expressions as operands to `==`
+ --> $DIR/eq_op.rs:24:13
+ |
+LL | let _ = ((1 + 1) & (1 + 1) == (1 + 1) & (1 + 1));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: equal expressions as operands to `&`
+ --> $DIR/eq_op.rs:24:14
+ |
+LL | let _ = ((1 + 1) & (1 + 1) == (1 + 1) & (1 + 1));
+ | ^^^^^^^^^^^^^^^^^
+
+error: equal expressions as operands to `&`
+ --> $DIR/eq_op.rs:24:35
+ |
+LL | let _ = ((1 + 1) & (1 + 1) == (1 + 1) & (1 + 1));
+ | ^^^^^^^^^^^^^^^^^
+
+error: equal expressions as operands to `==`
+ --> $DIR/eq_op.rs:25:13
+ |
+LL | let _ = (1 * 2) + (3 * 4) == 1 * 2 + 3 * 4;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: equal expressions as operands to `!=`
+ --> $DIR/eq_op.rs:28:13
+ |
+LL | let _ = ([1] != [1]);
+ | ^^^^^^^^^^^^
+
+error: equal expressions as operands to `!=`
+ --> $DIR/eq_op.rs:29:13
+ |
+LL | let _ = ((1, 2) != (1, 2));
+ | ^^^^^^^^^^^^^^^^^^
+
+error: equal expressions as operands to `==`
+ --> $DIR/eq_op.rs:33:13
+ |
+LL | let _ = 1 + 1 == 2;
+ | ^^^^^^^^^^
+
+error: equal expressions as operands to `==`
+ --> $DIR/eq_op.rs:34:13
+ |
+LL | let _ = 1 - 1 == 0;
+ | ^^^^^^^^^^
+
+error: equal expressions as operands to `-`
+ --> $DIR/eq_op.rs:34:13
+ |
+LL | let _ = 1 - 1 == 0;
+ | ^^^^^
+
+error: equal expressions as operands to `-`
+ --> $DIR/eq_op.rs:36:13
+ |
+LL | let _ = 1 - 1;
+ | ^^^^^
+
+error: equal expressions as operands to `/`
+ --> $DIR/eq_op.rs:37:13
+ |
+LL | let _ = 1 / 1;
+ | ^^^^^
+
+error: equal expressions as operands to `&&`
+ --> $DIR/eq_op.rs:38:13
+ |
+LL | let _ = true && true;
+ | ^^^^^^^^^^^^
+
+error: equal expressions as operands to `||`
+ --> $DIR/eq_op.rs:40:13
+ |
+LL | let _ = true || true;
+ | ^^^^^^^^^^^^
+
+error: equal expressions as operands to `&&`
+ --> $DIR/eq_op.rs:45:13
+ |
+LL | let _ = a == b && b == a;
+ | ^^^^^^^^^^^^^^^^
+
+error: equal expressions as operands to `&&`
+ --> $DIR/eq_op.rs:46:13
+ |
+LL | let _ = a != b && b != a;
+ | ^^^^^^^^^^^^^^^^
+
+error: equal expressions as operands to `&&`
+ --> $DIR/eq_op.rs:47:13
+ |
+LL | let _ = a < b && b > a;
+ | ^^^^^^^^^^^^^^
+
+error: equal expressions as operands to `&&`
+ --> $DIR/eq_op.rs:48:13
+ |
+LL | let _ = a <= b && b >= a;
+ | ^^^^^^^^^^^^^^^^
+
+error: equal expressions as operands to `==`
+ --> $DIR/eq_op.rs:51:13
+ |
+LL | let _ = a == a;
+ | ^^^^^^
+
+error: equal expressions as operands to `/`
+ --> $DIR/eq_op.rs:61:20
+ |
+LL | const D: u32 = A / A;
+ | ^^^^^
+
+error: equal expressions as operands to `==`
+ --> $DIR/eq_op.rs:92:5
+ |
+LL | (n1.inner.0).0 == (n1.inner.0).0 && (n1.inner.1).0 == (n2.inner.1).0 && (n1.inner.2).0 == (n2.inner.2).0
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 28 previous errors
+
diff --git a/src/tools/clippy/tests/ui/eq_op_macros.rs b/src/tools/clippy/tests/ui/eq_op_macros.rs
new file mode 100644
index 000000000..6b5b31a1a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/eq_op_macros.rs
@@ -0,0 +1,56 @@
+#![warn(clippy::eq_op)]
+
+// lint also in macro definition
+macro_rules! assert_in_macro_def {
+ () => {
+ let a = 42;
+ assert_eq!(a, a);
+ assert_ne!(a, a);
+ debug_assert_eq!(a, a);
+ debug_assert_ne!(a, a);
+ };
+}
+
+// lint identical args in assert-like macro invocations (see #3574)
+fn main() {
+ assert_in_macro_def!();
+
+ let a = 1;
+ let b = 2;
+
+ // lint identical args in `assert_eq!`
+ assert_eq!(a, a);
+ assert_eq!(a + 1, a + 1);
+ // ok
+ assert_eq!(a, b);
+ assert_eq!(a, a + 1);
+ assert_eq!(a + 1, b + 1);
+
+ // lint identical args in `assert_ne!`
+ assert_ne!(a, a);
+ assert_ne!(a + 1, a + 1);
+ // ok
+ assert_ne!(a, b);
+ assert_ne!(a, a + 1);
+ assert_ne!(a + 1, b + 1);
+
+ // lint identical args in `debug_assert_eq!`
+ debug_assert_eq!(a, a);
+ debug_assert_eq!(a + 1, a + 1);
+ // ok
+ debug_assert_eq!(a, b);
+ debug_assert_eq!(a, a + 1);
+ debug_assert_eq!(a + 1, b + 1);
+
+ // lint identical args in `debug_assert_ne!`
+ debug_assert_ne!(a, a);
+ debug_assert_ne!(a + 1, a + 1);
+ // ok
+ debug_assert_ne!(a, b);
+ debug_assert_ne!(a, a + 1);
+ debug_assert_ne!(a + 1, b + 1);
+
+ let my_vec = vec![1; 5];
+ let mut my_iter = my_vec.iter();
+ assert_ne!(my_iter.next(), my_iter.next());
+}
diff --git a/src/tools/clippy/tests/ui/eq_op_macros.stderr b/src/tools/clippy/tests/ui/eq_op_macros.stderr
new file mode 100644
index 000000000..cd9f1826e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/eq_op_macros.stderr
@@ -0,0 +1,95 @@
+error: identical args used in this `assert_eq!` macro call
+ --> $DIR/eq_op_macros.rs:7:20
+ |
+LL | assert_eq!(a, a);
+ | ^^^^
+...
+LL | assert_in_macro_def!();
+ | ---------------------- in this macro invocation
+ |
+ = note: `-D clippy::eq-op` implied by `-D warnings`
+ = note: this error originates in the macro `assert_in_macro_def` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: identical args used in this `assert_ne!` macro call
+ --> $DIR/eq_op_macros.rs:8:20
+ |
+LL | assert_ne!(a, a);
+ | ^^^^
+...
+LL | assert_in_macro_def!();
+ | ---------------------- in this macro invocation
+ |
+ = note: this error originates in the macro `assert_in_macro_def` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: identical args used in this `debug_assert_eq!` macro call
+ --> $DIR/eq_op_macros.rs:9:26
+ |
+LL | debug_assert_eq!(a, a);
+ | ^^^^
+...
+LL | assert_in_macro_def!();
+ | ---------------------- in this macro invocation
+ |
+ = note: this error originates in the macro `assert_in_macro_def` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: identical args used in this `debug_assert_ne!` macro call
+ --> $DIR/eq_op_macros.rs:10:26
+ |
+LL | debug_assert_ne!(a, a);
+ | ^^^^
+...
+LL | assert_in_macro_def!();
+ | ---------------------- in this macro invocation
+ |
+ = note: this error originates in the macro `assert_in_macro_def` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: identical args used in this `assert_eq!` macro call
+ --> $DIR/eq_op_macros.rs:22:16
+ |
+LL | assert_eq!(a, a);
+ | ^^^^
+
+error: identical args used in this `assert_eq!` macro call
+ --> $DIR/eq_op_macros.rs:23:16
+ |
+LL | assert_eq!(a + 1, a + 1);
+ | ^^^^^^^^^^^^
+
+error: identical args used in this `assert_ne!` macro call
+ --> $DIR/eq_op_macros.rs:30:16
+ |
+LL | assert_ne!(a, a);
+ | ^^^^
+
+error: identical args used in this `assert_ne!` macro call
+ --> $DIR/eq_op_macros.rs:31:16
+ |
+LL | assert_ne!(a + 1, a + 1);
+ | ^^^^^^^^^^^^
+
+error: identical args used in this `debug_assert_eq!` macro call
+ --> $DIR/eq_op_macros.rs:38:22
+ |
+LL | debug_assert_eq!(a, a);
+ | ^^^^
+
+error: identical args used in this `debug_assert_eq!` macro call
+ --> $DIR/eq_op_macros.rs:39:22
+ |
+LL | debug_assert_eq!(a + 1, a + 1);
+ | ^^^^^^^^^^^^
+
+error: identical args used in this `debug_assert_ne!` macro call
+ --> $DIR/eq_op_macros.rs:46:22
+ |
+LL | debug_assert_ne!(a, a);
+ | ^^^^
+
+error: identical args used in this `debug_assert_ne!` macro call
+ --> $DIR/eq_op_macros.rs:47:22
+ |
+LL | debug_assert_ne!(a + 1, a + 1);
+ | ^^^^^^^^^^^^
+
+error: aborting due to 12 previous errors
+
diff --git a/src/tools/clippy/tests/ui/equatable_if_let.fixed b/src/tools/clippy/tests/ui/equatable_if_let.fixed
new file mode 100644
index 000000000..687efdada
--- /dev/null
+++ b/src/tools/clippy/tests/ui/equatable_if_let.fixed
@@ -0,0 +1,84 @@
+// run-rustfix
+// aux-build:macro_rules.rs
+
+#![allow(unused_variables, dead_code, clippy::derive_partial_eq_without_eq)]
+#![warn(clippy::equatable_if_let)]
+
+#[macro_use]
+extern crate macro_rules;
+
+use std::cmp::Ordering;
+
+#[derive(PartialEq)]
+enum Enum {
+ TupleVariant(i32, u64),
+ RecordVariant { a: i64, b: u32 },
+ UnitVariant,
+ Recursive(Struct),
+}
+
+#[derive(PartialEq)]
+struct Struct {
+ a: i32,
+ b: bool,
+}
+
+enum NotPartialEq {
+ A,
+ B,
+}
+
+enum NotStructuralEq {
+ A,
+ B,
+}
+
+impl PartialEq for NotStructuralEq {
+ fn eq(&self, _: &NotStructuralEq) -> bool {
+ false
+ }
+}
+
+fn main() {
+ let a = 2;
+ let b = 3;
+ let c = Some(2);
+ let d = Struct { a: 2, b: false };
+ let e = Enum::UnitVariant;
+ let f = NotPartialEq::A;
+ let g = NotStructuralEq::A;
+
+ // true
+
+ if a == 2 {}
+ if a.cmp(&b) == Ordering::Greater {}
+ if c == Some(2) {}
+ if d == (Struct { a: 2, b: false }) {}
+ if e == Enum::TupleVariant(32, 64) {}
+ if e == (Enum::RecordVariant { a: 64, b: 32 }) {}
+ if e == Enum::UnitVariant {}
+ if (e, &d) == (Enum::UnitVariant, &Struct { a: 2, b: false }) {}
+
+ // false
+
+ if let 2 | 3 = a {}
+ if let x @ 2 = a {}
+ if let Some(3 | 4) = c {}
+ if let Struct { a, b: false } = d {}
+ if let Struct { a: 2, b: x } = d {}
+ if let NotPartialEq::A = f {}
+ if g == NotStructuralEq::A {}
+ if let Some(NotPartialEq::A) = Some(f) {}
+ if Some(g) == Some(NotStructuralEq::A) {}
+
+ macro_rules! m1 {
+ (x) => {
+ "abc"
+ };
+ }
+ if "abc" == m1!(x) {
+ println!("OK");
+ }
+
+ equatable_if_let!(a);
+}
diff --git a/src/tools/clippy/tests/ui/equatable_if_let.rs b/src/tools/clippy/tests/ui/equatable_if_let.rs
new file mode 100644
index 000000000..8c467d14d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/equatable_if_let.rs
@@ -0,0 +1,84 @@
+// run-rustfix
+// aux-build:macro_rules.rs
+
+#![allow(unused_variables, dead_code, clippy::derive_partial_eq_without_eq)]
+#![warn(clippy::equatable_if_let)]
+
+#[macro_use]
+extern crate macro_rules;
+
+use std::cmp::Ordering;
+
+#[derive(PartialEq)]
+enum Enum {
+ TupleVariant(i32, u64),
+ RecordVariant { a: i64, b: u32 },
+ UnitVariant,
+ Recursive(Struct),
+}
+
+#[derive(PartialEq)]
+struct Struct {
+ a: i32,
+ b: bool,
+}
+
+enum NotPartialEq {
+ A,
+ B,
+}
+
+enum NotStructuralEq {
+ A,
+ B,
+}
+
+impl PartialEq for NotStructuralEq {
+ fn eq(&self, _: &NotStructuralEq) -> bool {
+ false
+ }
+}
+
+fn main() {
+ let a = 2;
+ let b = 3;
+ let c = Some(2);
+ let d = Struct { a: 2, b: false };
+ let e = Enum::UnitVariant;
+ let f = NotPartialEq::A;
+ let g = NotStructuralEq::A;
+
+ // true
+
+ if let 2 = a {}
+ if let Ordering::Greater = a.cmp(&b) {}
+ if let Some(2) = c {}
+ if let Struct { a: 2, b: false } = d {}
+ if let Enum::TupleVariant(32, 64) = e {}
+ if let Enum::RecordVariant { a: 64, b: 32 } = e {}
+ if let Enum::UnitVariant = e {}
+ if let (Enum::UnitVariant, &Struct { a: 2, b: false }) = (e, &d) {}
+
+ // false
+
+ if let 2 | 3 = a {}
+ if let x @ 2 = a {}
+ if let Some(3 | 4) = c {}
+ if let Struct { a, b: false } = d {}
+ if let Struct { a: 2, b: x } = d {}
+ if let NotPartialEq::A = f {}
+ if let NotStructuralEq::A = g {}
+ if let Some(NotPartialEq::A) = Some(f) {}
+ if let Some(NotStructuralEq::A) = Some(g) {}
+
+ macro_rules! m1 {
+ (x) => {
+ "abc"
+ };
+ }
+ if let m1!(x) = "abc" {
+ println!("OK");
+ }
+
+ equatable_if_let!(a);
+}
diff --git a/src/tools/clippy/tests/ui/equatable_if_let.stderr b/src/tools/clippy/tests/ui/equatable_if_let.stderr
new file mode 100644
index 000000000..9c4c3cc36
--- /dev/null
+++ b/src/tools/clippy/tests/ui/equatable_if_let.stderr
@@ -0,0 +1,70 @@
+error: this pattern matching can be expressed using equality
+ --> $DIR/equatable_if_let.rs:53:8
+ |
+LL | if let 2 = a {}
+ | ^^^^^^^^^ help: try: `a == 2`
+ |
+ = note: `-D clippy::equatable-if-let` implied by `-D warnings`
+
+error: this pattern matching can be expressed using equality
+ --> $DIR/equatable_if_let.rs:54:8
+ |
+LL | if let Ordering::Greater = a.cmp(&b) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `a.cmp(&b) == Ordering::Greater`
+
+error: this pattern matching can be expressed using equality
+ --> $DIR/equatable_if_let.rs:55:8
+ |
+LL | if let Some(2) = c {}
+ | ^^^^^^^^^^^^^^^ help: try: `c == Some(2)`
+
+error: this pattern matching can be expressed using equality
+ --> $DIR/equatable_if_let.rs:56:8
+ |
+LL | if let Struct { a: 2, b: false } = d {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `d == (Struct { a: 2, b: false })`
+
+error: this pattern matching can be expressed using equality
+ --> $DIR/equatable_if_let.rs:57:8
+ |
+LL | if let Enum::TupleVariant(32, 64) = e {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `e == Enum::TupleVariant(32, 64)`
+
+error: this pattern matching can be expressed using equality
+ --> $DIR/equatable_if_let.rs:58:8
+ |
+LL | if let Enum::RecordVariant { a: 64, b: 32 } = e {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `e == (Enum::RecordVariant { a: 64, b: 32 })`
+
+error: this pattern matching can be expressed using equality
+ --> $DIR/equatable_if_let.rs:59:8
+ |
+LL | if let Enum::UnitVariant = e {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `e == Enum::UnitVariant`
+
+error: this pattern matching can be expressed using equality
+ --> $DIR/equatable_if_let.rs:60:8
+ |
+LL | if let (Enum::UnitVariant, &Struct { a: 2, b: false }) = (e, &d) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(e, &d) == (Enum::UnitVariant, &Struct { a: 2, b: false })`
+
+error: this pattern matching can be expressed using equality
+ --> $DIR/equatable_if_let.rs:70:8
+ |
+LL | if let NotStructuralEq::A = g {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `g == NotStructuralEq::A`
+
+error: this pattern matching can be expressed using equality
+ --> $DIR/equatable_if_let.rs:72:8
+ |
+LL | if let Some(NotStructuralEq::A) = Some(g) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Some(g) == Some(NotStructuralEq::A)`
+
+error: this pattern matching can be expressed using equality
+ --> $DIR/equatable_if_let.rs:79:8
+ |
+LL | if let m1!(x) = "abc" {
+ | ^^^^^^^^^^^^^^^^^^ help: try: `"abc" == m1!(x)`
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/erasing_op.rs b/src/tools/clippy/tests/ui/erasing_op.rs
new file mode 100644
index 000000000..ae2fad008
--- /dev/null
+++ b/src/tools/clippy/tests/ui/erasing_op.rs
@@ -0,0 +1,43 @@
+struct Length(u8);
+struct Meter;
+
+impl core::ops::Mul<Meter> for u8 {
+ type Output = Length;
+ fn mul(self, _: Meter) -> Length {
+ Length(self)
+ }
+}
+
+#[derive(Clone, Default, PartialEq, Eq, Hash)]
+struct Vec1 {
+ x: i32,
+}
+
+impl core::ops::Mul<Vec1> for i32 {
+ type Output = Vec1;
+ fn mul(self, mut right: Vec1) -> Vec1 {
+ right.x *= self;
+ right
+ }
+}
+
+impl core::ops::Mul<i32> for Vec1 {
+ type Output = Vec1;
+ fn mul(mut self, right: i32) -> Vec1 {
+ self.x *= right;
+ self
+ }
+}
+
+#[allow(clippy::no_effect)]
+#[warn(clippy::erasing_op)]
+fn main() {
+ let x: u8 = 0;
+
+ x * 0;
+ 0 & x;
+ 0 / x;
+ 0 * Meter; // no error: Output type is different from the non-zero argument
+ 0 * Vec1 { x: 5 };
+ Vec1 { x: 5 } * 0;
+}
diff --git a/src/tools/clippy/tests/ui/erasing_op.stderr b/src/tools/clippy/tests/ui/erasing_op.stderr
new file mode 100644
index 000000000..165ed9bfe
--- /dev/null
+++ b/src/tools/clippy/tests/ui/erasing_op.stderr
@@ -0,0 +1,34 @@
+error: this operation will always return zero. This is likely not the intended outcome
+ --> $DIR/erasing_op.rs:37:5
+ |
+LL | x * 0;
+ | ^^^^^
+ |
+ = note: `-D clippy::erasing-op` implied by `-D warnings`
+
+error: this operation will always return zero. This is likely not the intended outcome
+ --> $DIR/erasing_op.rs:38:5
+ |
+LL | 0 & x;
+ | ^^^^^
+
+error: this operation will always return zero. This is likely not the intended outcome
+ --> $DIR/erasing_op.rs:39:5
+ |
+LL | 0 / x;
+ | ^^^^^
+
+error: this operation will always return zero. This is likely not the intended outcome
+ --> $DIR/erasing_op.rs:41:5
+ |
+LL | 0 * Vec1 { x: 5 };
+ | ^^^^^^^^^^^^^^^^^
+
+error: this operation will always return zero. This is likely not the intended outcome
+ --> $DIR/erasing_op.rs:42:5
+ |
+LL | Vec1 { x: 5 } * 0;
+ | ^^^^^^^^^^^^^^^^^
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/err_expect.fixed b/src/tools/clippy/tests/ui/err_expect.fixed
new file mode 100644
index 000000000..7e18d70ba
--- /dev/null
+++ b/src/tools/clippy/tests/ui/err_expect.fixed
@@ -0,0 +1,14 @@
+// run-rustfix
+
+struct MyTypeNonDebug;
+
+#[derive(Debug)]
+struct MyTypeDebug;
+
+fn main() {
+ let test_debug: Result<MyTypeDebug, u32> = Ok(MyTypeDebug);
+ test_debug.expect_err("Testing debug type");
+
+ let test_non_debug: Result<MyTypeNonDebug, u32> = Ok(MyTypeNonDebug);
+ test_non_debug.err().expect("Testing non debug type");
+}
diff --git a/src/tools/clippy/tests/ui/err_expect.rs b/src/tools/clippy/tests/ui/err_expect.rs
new file mode 100644
index 000000000..bf8c3c9fb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/err_expect.rs
@@ -0,0 +1,14 @@
+// run-rustfix
+
+struct MyTypeNonDebug;
+
+#[derive(Debug)]
+struct MyTypeDebug;
+
+fn main() {
+ let test_debug: Result<MyTypeDebug, u32> = Ok(MyTypeDebug);
+ test_debug.err().expect("Testing debug type");
+
+ let test_non_debug: Result<MyTypeNonDebug, u32> = Ok(MyTypeNonDebug);
+ test_non_debug.err().expect("Testing non debug type");
+}
diff --git a/src/tools/clippy/tests/ui/err_expect.stderr b/src/tools/clippy/tests/ui/err_expect.stderr
new file mode 100644
index 000000000..ffd97e00a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/err_expect.stderr
@@ -0,0 +1,10 @@
+error: called `.err().expect()` on a `Result` value
+ --> $DIR/err_expect.rs:10:16
+ |
+LL | test_debug.err().expect("Testing debug type");
+ | ^^^^^^^^^^^^ help: try: `expect_err`
+ |
+ = note: `-D clippy::err-expect` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/eta.fixed b/src/tools/clippy/tests/ui/eta.fixed
new file mode 100644
index 000000000..f8d559bf2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/eta.fixed
@@ -0,0 +1,305 @@
+// run-rustfix
+
+#![allow(
+ unused,
+ clippy::no_effect,
+ clippy::redundant_closure_call,
+ clippy::needless_pass_by_value,
+ clippy::option_map_unit_fn,
+ clippy::needless_borrow
+)]
+#![warn(clippy::redundant_closure, clippy::redundant_closure_for_method_calls)]
+
+use std::path::{Path, PathBuf};
+
+macro_rules! mac {
+ () => {
+ foobar()
+ };
+}
+
+macro_rules! closure_mac {
+ () => {
+ |n| foo(n)
+ };
+}
+
+fn main() {
+ let a = Some(1u8).map(foo);
+ let c = Some(1u8).map(|a| {1+2; foo}(a));
+ true.then(|| mac!()); // don't lint function in macro expansion
+ Some(1).map(closure_mac!()); // don't lint closure in macro expansion
+ let _: Option<Vec<u8>> = true.then(std::vec::Vec::new); // special case vec!
+ let d = Some(1u8).map(|a| foo(foo2(a))); //is adjusted?
+ all(&[1, 2, 3], &&2, below); //is adjusted
+ unsafe {
+ Some(1u8).map(|a| unsafe_fn(a)); // unsafe fn
+ }
+
+ // See #815
+ let e = Some(1u8).map(|a| divergent(a));
+ let e = Some(1u8).map(generic);
+ let e = Some(1u8).map(generic);
+ // See #515
+ let a: Option<Box<dyn (::std::ops::Deref<Target = [i32]>)>> =
+ Some(vec![1i32, 2]).map(|v| -> Box<dyn (::std::ops::Deref<Target = [i32]>)> { Box::new(v) });
+
+ // issue #7224
+ let _: Option<Vec<u32>> = Some(0).map(|_| vec![]);
+}
+
+trait TestTrait {
+ fn trait_foo(self) -> bool;
+ fn trait_foo_ref(&self) -> bool;
+}
+
+struct TestStruct<'a> {
+ some_ref: &'a i32,
+}
+
+impl<'a> TestStruct<'a> {
+ fn foo(self) -> bool {
+ false
+ }
+ unsafe fn foo_unsafe(self) -> bool {
+ true
+ }
+}
+
+impl<'a> TestTrait for TestStruct<'a> {
+ fn trait_foo(self) -> bool {
+ false
+ }
+ fn trait_foo_ref(&self) -> bool {
+ false
+ }
+}
+
+impl<'a> std::ops::Deref for TestStruct<'a> {
+ type Target = char;
+ fn deref(&self) -> &char {
+ &'a'
+ }
+}
+
+fn test_redundant_closures_containing_method_calls() {
+ let i = 10;
+ let e = Some(TestStruct { some_ref: &i }).map(TestStruct::foo);
+ let e = Some(TestStruct { some_ref: &i }).map(TestTrait::trait_foo);
+ let e = Some(TestStruct { some_ref: &i }).map(|a| a.trait_foo_ref());
+ let e = Some(&mut vec![1, 2, 3]).map(std::vec::Vec::clear);
+ unsafe {
+ let e = Some(TestStruct { some_ref: &i }).map(|a| a.foo_unsafe());
+ }
+ let e = Some("str").map(std::string::ToString::to_string);
+ let e = Some('a').map(char::to_uppercase);
+ let e: std::vec::Vec<usize> = vec!['a', 'b', 'c'].iter().map(|c| c.len_utf8()).collect();
+ let e: std::vec::Vec<char> = vec!['a', 'b', 'c'].iter().map(char::to_ascii_uppercase).collect();
+ let e = Some(PathBuf::new()).as_ref().and_then(|s| s.to_str());
+ let c = Some(TestStruct { some_ref: &i })
+ .as_ref()
+ .map(|c| c.to_ascii_uppercase());
+
+ fn test_different_borrow_levels<T>(t: &[&T])
+ where
+ T: TestTrait,
+ {
+ t.iter().filter(|x| x.trait_foo_ref());
+ t.iter().map(|x| x.trait_foo_ref());
+ }
+}
+
+struct Thunk<T>(Box<dyn FnMut() -> T>);
+
+impl<T> Thunk<T> {
+ fn new<F: 'static + FnOnce() -> T>(f: F) -> Thunk<T> {
+ let mut option = Some(f);
+ // This should not trigger redundant_closure (#1439)
+ Thunk(Box::new(move || option.take().unwrap()()))
+ }
+
+ fn unwrap(self) -> T {
+ let Thunk(mut f) = self;
+ f()
+ }
+}
+
+fn foobar() {
+ let thunk = Thunk::new(|| println!("Hello, world!"));
+ thunk.unwrap()
+}
+
+fn foo(_: u8) {}
+
+fn foo2(_: u8) -> u8 {
+ 1u8
+}
+
+fn all<X, F>(x: &[X], y: &X, f: F) -> bool
+where
+ F: Fn(&X, &X) -> bool,
+{
+ x.iter().all(|e| f(e, y))
+}
+
+fn below(x: &u8, y: &u8) -> bool {
+ x < y
+}
+
+unsafe fn unsafe_fn(_: u8) {}
+
+fn divergent(_: u8) -> ! {
+ unimplemented!()
+}
+
+fn generic<T>(_: T) -> u8 {
+ 0
+}
+
+fn passes_fn_mut(mut x: Box<dyn FnMut()>) {
+ requires_fn_once(x);
+}
+fn requires_fn_once<T: FnOnce()>(_: T) {}
+
+fn test_redundant_closure_with_function_pointer() {
+ type FnPtrType = fn(u8);
+ let foo_ptr: FnPtrType = foo;
+ let a = Some(1u8).map(foo_ptr);
+}
+
+fn test_redundant_closure_with_another_closure() {
+ let closure = |a| println!("{}", a);
+ let a = Some(1u8).map(closure);
+}
+
+fn make_lazy(f: impl Fn() -> fn(u8) -> u8) -> impl Fn(u8) -> u8 {
+ // Currently f is called when result of make_lazy is called.
+ // If the closure is removed, f will be called when make_lazy itself is
+ // called. This changes semantics, so the closure must stay.
+ Box::new(move |x| f()(x))
+}
+
+fn call<F: FnOnce(&mut String) -> String>(f: F) -> String {
+ f(&mut "Hello".to_owned())
+}
+fn test_difference_in_mutability() {
+ call(|s| s.clone());
+}
+
+struct Bar;
+impl std::ops::Deref for Bar {
+ type Target = str;
+ fn deref(&self) -> &str {
+ "hi"
+ }
+}
+
+fn test_deref_with_trait_method() {
+ let _ = [Bar].iter().map(|s| s.to_string()).collect::<Vec<_>>();
+}
+
+fn mutable_closure_used_again(x: Vec<i32>, y: Vec<i32>, z: Vec<i32>) {
+ let mut res = Vec::new();
+ let mut add_to_res = |n| res.push(n);
+ x.into_iter().for_each(&mut add_to_res);
+ y.into_iter().for_each(&mut add_to_res);
+ z.into_iter().for_each(add_to_res);
+}
+
+fn mutable_closure_in_loop() {
+ let mut value = 0;
+ let mut closure = |n| value += n;
+ for _ in 0..5 {
+ Some(1).map(&mut closure);
+
+ let mut value = 0;
+ let mut in_loop = |n| value += n;
+ Some(1).map(in_loop);
+ }
+}
+
+fn late_bound_lifetimes() {
+ fn take_asref_path<P: AsRef<Path>>(path: P) {}
+
+ fn map_str<F>(thunk: F)
+ where
+ F: FnOnce(&str),
+ {
+ }
+
+ fn map_str_to_path<F>(thunk: F)
+ where
+ F: FnOnce(&str) -> &Path,
+ {
+ }
+ map_str(|s| take_asref_path(s));
+ map_str_to_path(|s| s.as_ref());
+}
+
+mod type_param_bound {
+ trait Trait {
+ fn fun();
+ }
+
+ fn take<T: 'static>(_: T) {}
+
+ fn test<X: Trait>() {
+ // don't lint, but it's questionable that rust requires a cast
+ take(|| X::fun());
+ take(X::fun as fn());
+ }
+}
+
+// #8073 Don't replace closure with `Arc<F>` or `Rc<F>`
+fn arc_fp() {
+ let rc = std::rc::Rc::new(|| 7);
+ let arc = std::sync::Arc::new(|n| n + 1);
+ let ref_arc = &std::sync::Arc::new(|_| 5);
+
+ true.then(|| rc());
+ (0..5).map(|n| arc(n));
+ Some(4).map(|n| ref_arc(n));
+}
+
+// #8460 Don't replace closures with params bounded as `ref`
+mod bind_by_ref {
+ struct A;
+ struct B;
+
+ impl From<&A> for B {
+ fn from(A: &A) -> Self {
+ B
+ }
+ }
+
+ fn test() {
+ // should not lint
+ Some(A).map(|a| B::from(&a));
+ // should not lint
+ Some(A).map(|ref a| B::from(a));
+ }
+}
+
+// #7812 False positive on coerced closure
+fn coerced_closure() {
+ fn function_returning_unit<F: FnMut(i32)>(f: F) {}
+ function_returning_unit(|x| std::process::exit(x));
+
+ fn arr() -> &'static [u8; 0] {
+ &[]
+ }
+ fn slice_fn(_: impl FnOnce() -> &'static [u8]) {}
+ slice_fn(|| arr());
+}
+
+// https://github.com/rust-lang/rust-clippy/issues/7861
+fn box_dyn() {
+ fn f(_: impl Fn(usize) -> Box<dyn std::any::Any>) {}
+ f(|x| Box::new(x));
+}
+
+// https://github.com/rust-lang/rust-clippy/issues/5939
+fn not_general_enough() {
+ fn f(_: impl FnMut(&Path) -> std::io::Result<()>) {}
+ f(|path| std::fs::remove_file(path));
+}
diff --git a/src/tools/clippy/tests/ui/eta.rs b/src/tools/clippy/tests/ui/eta.rs
new file mode 100644
index 000000000..f0fb55a1e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/eta.rs
@@ -0,0 +1,305 @@
+// run-rustfix
+
+#![allow(
+ unused,
+ clippy::no_effect,
+ clippy::redundant_closure_call,
+ clippy::needless_pass_by_value,
+ clippy::option_map_unit_fn,
+ clippy::needless_borrow
+)]
+#![warn(clippy::redundant_closure, clippy::redundant_closure_for_method_calls)]
+
+use std::path::{Path, PathBuf};
+
+macro_rules! mac {
+ () => {
+ foobar()
+ };
+}
+
+macro_rules! closure_mac {
+ () => {
+ |n| foo(n)
+ };
+}
+
+fn main() {
+ let a = Some(1u8).map(|a| foo(a));
+ let c = Some(1u8).map(|a| {1+2; foo}(a));
+ true.then(|| mac!()); // don't lint function in macro expansion
+ Some(1).map(closure_mac!()); // don't lint closure in macro expansion
+ let _: Option<Vec<u8>> = true.then(|| vec![]); // special case vec!
+ let d = Some(1u8).map(|a| foo((|b| foo2(b))(a))); //is adjusted?
+ all(&[1, 2, 3], &&2, |x, y| below(x, y)); //is adjusted
+ unsafe {
+ Some(1u8).map(|a| unsafe_fn(a)); // unsafe fn
+ }
+
+ // See #815
+ let e = Some(1u8).map(|a| divergent(a));
+ let e = Some(1u8).map(|a| generic(a));
+ let e = Some(1u8).map(generic);
+ // See #515
+ let a: Option<Box<dyn (::std::ops::Deref<Target = [i32]>)>> =
+ Some(vec![1i32, 2]).map(|v| -> Box<dyn (::std::ops::Deref<Target = [i32]>)> { Box::new(v) });
+
+ // issue #7224
+ let _: Option<Vec<u32>> = Some(0).map(|_| vec![]);
+}
+
+trait TestTrait {
+ fn trait_foo(self) -> bool;
+ fn trait_foo_ref(&self) -> bool;
+}
+
+struct TestStruct<'a> {
+ some_ref: &'a i32,
+}
+
+impl<'a> TestStruct<'a> {
+ fn foo(self) -> bool {
+ false
+ }
+ unsafe fn foo_unsafe(self) -> bool {
+ true
+ }
+}
+
+impl<'a> TestTrait for TestStruct<'a> {
+ fn trait_foo(self) -> bool {
+ false
+ }
+ fn trait_foo_ref(&self) -> bool {
+ false
+ }
+}
+
+impl<'a> std::ops::Deref for TestStruct<'a> {
+ type Target = char;
+ fn deref(&self) -> &char {
+ &'a'
+ }
+}
+
+fn test_redundant_closures_containing_method_calls() {
+ let i = 10;
+ let e = Some(TestStruct { some_ref: &i }).map(|a| a.foo());
+ let e = Some(TestStruct { some_ref: &i }).map(|a| a.trait_foo());
+ let e = Some(TestStruct { some_ref: &i }).map(|a| a.trait_foo_ref());
+ let e = Some(&mut vec![1, 2, 3]).map(|v| v.clear());
+ unsafe {
+ let e = Some(TestStruct { some_ref: &i }).map(|a| a.foo_unsafe());
+ }
+ let e = Some("str").map(|s| s.to_string());
+ let e = Some('a').map(|s| s.to_uppercase());
+ let e: std::vec::Vec<usize> = vec!['a', 'b', 'c'].iter().map(|c| c.len_utf8()).collect();
+ let e: std::vec::Vec<char> = vec!['a', 'b', 'c'].iter().map(|c| c.to_ascii_uppercase()).collect();
+ let e = Some(PathBuf::new()).as_ref().and_then(|s| s.to_str());
+ let c = Some(TestStruct { some_ref: &i })
+ .as_ref()
+ .map(|c| c.to_ascii_uppercase());
+
+ fn test_different_borrow_levels<T>(t: &[&T])
+ where
+ T: TestTrait,
+ {
+ t.iter().filter(|x| x.trait_foo_ref());
+ t.iter().map(|x| x.trait_foo_ref());
+ }
+}
+
+struct Thunk<T>(Box<dyn FnMut() -> T>);
+
+impl<T> Thunk<T> {
+ fn new<F: 'static + FnOnce() -> T>(f: F) -> Thunk<T> {
+ let mut option = Some(f);
+ // This should not trigger redundant_closure (#1439)
+ Thunk(Box::new(move || option.take().unwrap()()))
+ }
+
+ fn unwrap(self) -> T {
+ let Thunk(mut f) = self;
+ f()
+ }
+}
+
+fn foobar() {
+ let thunk = Thunk::new(|| println!("Hello, world!"));
+ thunk.unwrap()
+}
+
+fn foo(_: u8) {}
+
+fn foo2(_: u8) -> u8 {
+ 1u8
+}
+
+fn all<X, F>(x: &[X], y: &X, f: F) -> bool
+where
+ F: Fn(&X, &X) -> bool,
+{
+ x.iter().all(|e| f(e, y))
+}
+
+fn below(x: &u8, y: &u8) -> bool {
+ x < y
+}
+
+unsafe fn unsafe_fn(_: u8) {}
+
+fn divergent(_: u8) -> ! {
+ unimplemented!()
+}
+
+fn generic<T>(_: T) -> u8 {
+ 0
+}
+
+fn passes_fn_mut(mut x: Box<dyn FnMut()>) {
+ requires_fn_once(|| x());
+}
+fn requires_fn_once<T: FnOnce()>(_: T) {}
+
+fn test_redundant_closure_with_function_pointer() {
+ type FnPtrType = fn(u8);
+ let foo_ptr: FnPtrType = foo;
+ let a = Some(1u8).map(|a| foo_ptr(a));
+}
+
+fn test_redundant_closure_with_another_closure() {
+ let closure = |a| println!("{}", a);
+ let a = Some(1u8).map(|a| closure(a));
+}
+
+fn make_lazy(f: impl Fn() -> fn(u8) -> u8) -> impl Fn(u8) -> u8 {
+ // Currently f is called when result of make_lazy is called.
+ // If the closure is removed, f will be called when make_lazy itself is
+ // called. This changes semantics, so the closure must stay.
+ Box::new(move |x| f()(x))
+}
+
+fn call<F: FnOnce(&mut String) -> String>(f: F) -> String {
+ f(&mut "Hello".to_owned())
+}
+fn test_difference_in_mutability() {
+ call(|s| s.clone());
+}
+
+struct Bar;
+impl std::ops::Deref for Bar {
+ type Target = str;
+ fn deref(&self) -> &str {
+ "hi"
+ }
+}
+
+fn test_deref_with_trait_method() {
+ let _ = [Bar].iter().map(|s| s.to_string()).collect::<Vec<_>>();
+}
+
+fn mutable_closure_used_again(x: Vec<i32>, y: Vec<i32>, z: Vec<i32>) {
+ let mut res = Vec::new();
+ let mut add_to_res = |n| res.push(n);
+ x.into_iter().for_each(|x| add_to_res(x));
+ y.into_iter().for_each(|x| add_to_res(x));
+ z.into_iter().for_each(|x| add_to_res(x));
+}
+
+fn mutable_closure_in_loop() {
+ let mut value = 0;
+ let mut closure = |n| value += n;
+ for _ in 0..5 {
+ Some(1).map(|n| closure(n));
+
+ let mut value = 0;
+ let mut in_loop = |n| value += n;
+ Some(1).map(|n| in_loop(n));
+ }
+}
+
+fn late_bound_lifetimes() {
+ fn take_asref_path<P: AsRef<Path>>(path: P) {}
+
+ fn map_str<F>(thunk: F)
+ where
+ F: FnOnce(&str),
+ {
+ }
+
+ fn map_str_to_path<F>(thunk: F)
+ where
+ F: FnOnce(&str) -> &Path,
+ {
+ }
+ map_str(|s| take_asref_path(s));
+ map_str_to_path(|s| s.as_ref());
+}
+
+mod type_param_bound {
+ trait Trait {
+ fn fun();
+ }
+
+ fn take<T: 'static>(_: T) {}
+
+ fn test<X: Trait>() {
+ // don't lint, but it's questionable that rust requires a cast
+ take(|| X::fun());
+ take(X::fun as fn());
+ }
+}
+
+// #8073 Don't replace closure with `Arc<F>` or `Rc<F>`
+fn arc_fp() {
+ let rc = std::rc::Rc::new(|| 7);
+ let arc = std::sync::Arc::new(|n| n + 1);
+ let ref_arc = &std::sync::Arc::new(|_| 5);
+
+ true.then(|| rc());
+ (0..5).map(|n| arc(n));
+ Some(4).map(|n| ref_arc(n));
+}
+
+// #8460 Don't replace closures with params bounded as `ref`
+mod bind_by_ref {
+ struct A;
+ struct B;
+
+ impl From<&A> for B {
+ fn from(A: &A) -> Self {
+ B
+ }
+ }
+
+ fn test() {
+ // should not lint
+ Some(A).map(|a| B::from(&a));
+ // should not lint
+ Some(A).map(|ref a| B::from(a));
+ }
+}
+
+// #7812 False positive on coerced closure
+fn coerced_closure() {
+ fn function_returning_unit<F: FnMut(i32)>(f: F) {}
+ function_returning_unit(|x| std::process::exit(x));
+
+ fn arr() -> &'static [u8; 0] {
+ &[]
+ }
+ fn slice_fn(_: impl FnOnce() -> &'static [u8]) {}
+ slice_fn(|| arr());
+}
+
+// https://github.com/rust-lang/rust-clippy/issues/7861
+fn box_dyn() {
+ fn f(_: impl Fn(usize) -> Box<dyn std::any::Any>) {}
+ f(|x| Box::new(x));
+}
+
+// https://github.com/rust-lang/rust-clippy/issues/5939
+fn not_general_enough() {
+ fn f(_: impl FnMut(&Path) -> std::io::Result<()>) {}
+ f(|path| std::fs::remove_file(path));
+}
diff --git a/src/tools/clippy/tests/ui/eta.stderr b/src/tools/clippy/tests/ui/eta.stderr
new file mode 100644
index 000000000..bf2e97e74
--- /dev/null
+++ b/src/tools/clippy/tests/ui/eta.stderr
@@ -0,0 +1,120 @@
+error: redundant closure
+ --> $DIR/eta.rs:28:27
+ |
+LL | let a = Some(1u8).map(|a| foo(a));
+ | ^^^^^^^^^^ help: replace the closure with the function itself: `foo`
+ |
+ = note: `-D clippy::redundant-closure` implied by `-D warnings`
+
+error: redundant closure
+ --> $DIR/eta.rs:32:40
+ |
+LL | let _: Option<Vec<u8>> = true.then(|| vec![]); // special case vec!
+ | ^^^^^^^^^ help: replace the closure with `Vec::new`: `std::vec::Vec::new`
+
+error: redundant closure
+ --> $DIR/eta.rs:33:35
+ |
+LL | let d = Some(1u8).map(|a| foo((|b| foo2(b))(a))); //is adjusted?
+ | ^^^^^^^^^^^^^ help: replace the closure with the function itself: `foo2`
+
+error: redundant closure
+ --> $DIR/eta.rs:34:26
+ |
+LL | all(&[1, 2, 3], &&2, |x, y| below(x, y)); //is adjusted
+ | ^^^^^^^^^^^^^^^^^^ help: replace the closure with the function itself: `below`
+
+error: redundant closure
+ --> $DIR/eta.rs:41:27
+ |
+LL | let e = Some(1u8).map(|a| generic(a));
+ | ^^^^^^^^^^^^^^ help: replace the closure with the function itself: `generic`
+
+error: redundant closure
+ --> $DIR/eta.rs:87:51
+ |
+LL | let e = Some(TestStruct { some_ref: &i }).map(|a| a.foo());
+ | ^^^^^^^^^^^ help: replace the closure with the method itself: `TestStruct::foo`
+ |
+ = note: `-D clippy::redundant-closure-for-method-calls` implied by `-D warnings`
+
+error: redundant closure
+ --> $DIR/eta.rs:88:51
+ |
+LL | let e = Some(TestStruct { some_ref: &i }).map(|a| a.trait_foo());
+ | ^^^^^^^^^^^^^^^^^ help: replace the closure with the method itself: `TestTrait::trait_foo`
+
+error: redundant closure
+ --> $DIR/eta.rs:90:42
+ |
+LL | let e = Some(&mut vec![1, 2, 3]).map(|v| v.clear());
+ | ^^^^^^^^^^^^^ help: replace the closure with the method itself: `std::vec::Vec::clear`
+
+error: redundant closure
+ --> $DIR/eta.rs:94:29
+ |
+LL | let e = Some("str").map(|s| s.to_string());
+ | ^^^^^^^^^^^^^^^^^ help: replace the closure with the method itself: `std::string::ToString::to_string`
+
+error: redundant closure
+ --> $DIR/eta.rs:95:27
+ |
+LL | let e = Some('a').map(|s| s.to_uppercase());
+ | ^^^^^^^^^^^^^^^^^^^^ help: replace the closure with the method itself: `char::to_uppercase`
+
+error: redundant closure
+ --> $DIR/eta.rs:97:65
+ |
+LL | let e: std::vec::Vec<char> = vec!['a', 'b', 'c'].iter().map(|c| c.to_ascii_uppercase()).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace the closure with the method itself: `char::to_ascii_uppercase`
+
+error: redundant closure
+ --> $DIR/eta.rs:160:22
+ |
+LL | requires_fn_once(|| x());
+ | ^^^^^^ help: replace the closure with the function itself: `x`
+
+error: redundant closure
+ --> $DIR/eta.rs:167:27
+ |
+LL | let a = Some(1u8).map(|a| foo_ptr(a));
+ | ^^^^^^^^^^^^^^ help: replace the closure with the function itself: `foo_ptr`
+
+error: redundant closure
+ --> $DIR/eta.rs:172:27
+ |
+LL | let a = Some(1u8).map(|a| closure(a));
+ | ^^^^^^^^^^^^^^ help: replace the closure with the function itself: `closure`
+
+error: redundant closure
+ --> $DIR/eta.rs:204:28
+ |
+LL | x.into_iter().for_each(|x| add_to_res(x));
+ | ^^^^^^^^^^^^^^^^^ help: replace the closure with the function itself: `&mut add_to_res`
+
+error: redundant closure
+ --> $DIR/eta.rs:205:28
+ |
+LL | y.into_iter().for_each(|x| add_to_res(x));
+ | ^^^^^^^^^^^^^^^^^ help: replace the closure with the function itself: `&mut add_to_res`
+
+error: redundant closure
+ --> $DIR/eta.rs:206:28
+ |
+LL | z.into_iter().for_each(|x| add_to_res(x));
+ | ^^^^^^^^^^^^^^^^^ help: replace the closure with the function itself: `add_to_res`
+
+error: redundant closure
+ --> $DIR/eta.rs:213:21
+ |
+LL | Some(1).map(|n| closure(n));
+ | ^^^^^^^^^^^^^^ help: replace the closure with the function itself: `&mut closure`
+
+error: redundant closure
+ --> $DIR/eta.rs:217:21
+ |
+LL | Some(1).map(|n| in_loop(n));
+ | ^^^^^^^^^^^^^^ help: replace the closure with the function itself: `in_loop`
+
+error: aborting due to 19 previous errors
+
diff --git a/src/tools/clippy/tests/ui/excessive_precision.fixed b/src/tools/clippy/tests/ui/excessive_precision.fixed
new file mode 100644
index 000000000..b74bda182
--- /dev/null
+++ b/src/tools/clippy/tests/ui/excessive_precision.fixed
@@ -0,0 +1,69 @@
+// run-rustfix
+#![warn(clippy::excessive_precision)]
+#![allow(dead_code, unused_variables, clippy::print_literal)]
+
+fn main() {
+ // Consts
+ const GOOD32: f32 = 0.123_456;
+ const GOOD32_SM: f32 = 0.000_000_000_1;
+ const GOOD32_DOT: f32 = 10_000_000_000.0;
+ const GOOD32_EDGE: f32 = 1.000_000_8;
+ const GOOD64: f64 = 0.123_456_789_012;
+ const GOOD64_SM: f32 = 0.000_000_000_000_000_1;
+ const GOOD64_DOT: f32 = 10_000_000_000_000_000.0;
+
+ const BAD32_1: f32 = 0.123_456_79_f32;
+ const BAD32_2: f32 = 0.123_456_79;
+ const BAD32_3: f32 = 0.1;
+ const BAD32_EDGE: f32 = 1.000_001;
+
+ const BAD64_1: f64 = 0.123_456_789_012_345_67f64;
+ const BAD64_2: f64 = 0.123_456_789_012_345_67;
+ const BAD64_3: f64 = 0.1;
+
+ // Literal as param
+ println!("{:?}", 8.888_888_888_888_89);
+
+ // // TODO add inferred type tests for f32
+ // Locals
+ let good32: f32 = 0.123_456_f32;
+ let good32_2: f32 = 0.123_456;
+
+ let good64: f64 = 0.123_456_789_012;
+ let good64_suf: f64 = 0.123_456_789_012f64;
+ let good64_inf = 0.123_456_789_012;
+
+ let bad32: f32 = 1.123_456_8;
+ let bad32_suf: f32 = 1.123_456_8_f32;
+ let bad32_inf = 1.123_456_8_f32;
+
+ let bad64: f64 = 0.123_456_789_012_345_67;
+ let bad64_suf: f64 = 0.123_456_789_012_345_67f64;
+ let bad64_inf = 0.123_456_789_012_345_67;
+
+ // Vectors
+ let good_vec32: Vec<f32> = vec![0.123_456];
+ let good_vec64: Vec<f64> = vec![0.123_456_789];
+
+ let bad_vec32: Vec<f32> = vec![0.123_456_79];
+ let bad_vec64: Vec<f64> = vec![0.123_456_789_123_456_78];
+
+ // Exponential float notation
+ let good_e32: f32 = 1e-10;
+ let bad_e32: f32 = 1.123_456_8e-10;
+
+ let good_bige32: f32 = 1E-10;
+ let bad_bige32: f32 = 1.123_456_8E-10;
+
+ // Inferred type
+ let good_inferred: f32 = 1f32 * 1_000_000_000.;
+
+ // issue #2840
+ let num = 0.000_000_000_01e-10f64;
+
+ // issue #7744
+ let _ = 2.225_073_858_507_201e-308_f64;
+
+ // issue #7745
+ let _ = 0_f64;
+}
diff --git a/src/tools/clippy/tests/ui/excessive_precision.rs b/src/tools/clippy/tests/ui/excessive_precision.rs
new file mode 100644
index 000000000..6e84a71f2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/excessive_precision.rs
@@ -0,0 +1,69 @@
+// run-rustfix
+#![warn(clippy::excessive_precision)]
+#![allow(dead_code, unused_variables, clippy::print_literal)]
+
+fn main() {
+ // Consts
+ const GOOD32: f32 = 0.123_456;
+ const GOOD32_SM: f32 = 0.000_000_000_1;
+ const GOOD32_DOT: f32 = 10_000_000_000.0;
+ const GOOD32_EDGE: f32 = 1.000_000_8;
+ const GOOD64: f64 = 0.123_456_789_012;
+ const GOOD64_SM: f32 = 0.000_000_000_000_000_1;
+ const GOOD64_DOT: f32 = 10_000_000_000_000_000.0;
+
+ const BAD32_1: f32 = 0.123_456_789_f32;
+ const BAD32_2: f32 = 0.123_456_789;
+ const BAD32_3: f32 = 0.100_000_000_000_1;
+ const BAD32_EDGE: f32 = 1.000_000_9;
+
+ const BAD64_1: f64 = 0.123_456_789_012_345_67f64;
+ const BAD64_2: f64 = 0.123_456_789_012_345_67;
+ const BAD64_3: f64 = 0.100_000_000_000_000_000_1;
+
+ // Literal as param
+ println!("{:?}", 8.888_888_888_888_888_888_888);
+
+ // // TODO add inferred type tests for f32
+ // Locals
+ let good32: f32 = 0.123_456_f32;
+ let good32_2: f32 = 0.123_456;
+
+ let good64: f64 = 0.123_456_789_012;
+ let good64_suf: f64 = 0.123_456_789_012f64;
+ let good64_inf = 0.123_456_789_012;
+
+ let bad32: f32 = 1.123_456_789;
+ let bad32_suf: f32 = 1.123_456_789_f32;
+ let bad32_inf = 1.123_456_789_f32;
+
+ let bad64: f64 = 0.123_456_789_012_345_67;
+ let bad64_suf: f64 = 0.123_456_789_012_345_67f64;
+ let bad64_inf = 0.123_456_789_012_345_67;
+
+ // Vectors
+ let good_vec32: Vec<f32> = vec![0.123_456];
+ let good_vec64: Vec<f64> = vec![0.123_456_789];
+
+ let bad_vec32: Vec<f32> = vec![0.123_456_789];
+ let bad_vec64: Vec<f64> = vec![0.123_456_789_123_456_789];
+
+ // Exponential float notation
+ let good_e32: f32 = 1e-10;
+ let bad_e32: f32 = 1.123_456_788_888e-10;
+
+ let good_bige32: f32 = 1E-10;
+ let bad_bige32: f32 = 1.123_456_788_888E-10;
+
+ // Inferred type
+ let good_inferred: f32 = 1f32 * 1_000_000_000.;
+
+ // issue #2840
+ let num = 0.000_000_000_01e-10f64;
+
+ // issue #7744
+ let _ = 2.225_073_858_507_201_1e-308_f64;
+
+ // issue #7745
+ let _ = 1.000_000_000_000_001e-324_f64;
+}
diff --git a/src/tools/clippy/tests/ui/excessive_precision.stderr b/src/tools/clippy/tests/ui/excessive_precision.stderr
new file mode 100644
index 000000000..42d9d4de1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/excessive_precision.stderr
@@ -0,0 +1,94 @@
+error: float has excessive precision
+ --> $DIR/excessive_precision.rs:15:26
+ |
+LL | const BAD32_1: f32 = 0.123_456_789_f32;
+ | ^^^^^^^^^^^^^^^^^ help: consider changing the type or truncating it to: `0.123_456_79_f32`
+ |
+ = note: `-D clippy::excessive-precision` implied by `-D warnings`
+
+error: float has excessive precision
+ --> $DIR/excessive_precision.rs:16:26
+ |
+LL | const BAD32_2: f32 = 0.123_456_789;
+ | ^^^^^^^^^^^^^ help: consider changing the type or truncating it to: `0.123_456_79`
+
+error: float has excessive precision
+ --> $DIR/excessive_precision.rs:17:26
+ |
+LL | const BAD32_3: f32 = 0.100_000_000_000_1;
+ | ^^^^^^^^^^^^^^^^^^^ help: consider changing the type or truncating it to: `0.1`
+
+error: float has excessive precision
+ --> $DIR/excessive_precision.rs:18:29
+ |
+LL | const BAD32_EDGE: f32 = 1.000_000_9;
+ | ^^^^^^^^^^^ help: consider changing the type or truncating it to: `1.000_001`
+
+error: float has excessive precision
+ --> $DIR/excessive_precision.rs:22:26
+ |
+LL | const BAD64_3: f64 = 0.100_000_000_000_000_000_1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider changing the type or truncating it to: `0.1`
+
+error: float has excessive precision
+ --> $DIR/excessive_precision.rs:25:22
+ |
+LL | println!("{:?}", 8.888_888_888_888_888_888_888);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider changing the type or truncating it to: `8.888_888_888_888_89`
+
+error: float has excessive precision
+ --> $DIR/excessive_precision.rs:36:22
+ |
+LL | let bad32: f32 = 1.123_456_789;
+ | ^^^^^^^^^^^^^ help: consider changing the type or truncating it to: `1.123_456_8`
+
+error: float has excessive precision
+ --> $DIR/excessive_precision.rs:37:26
+ |
+LL | let bad32_suf: f32 = 1.123_456_789_f32;
+ | ^^^^^^^^^^^^^^^^^ help: consider changing the type or truncating it to: `1.123_456_8_f32`
+
+error: float has excessive precision
+ --> $DIR/excessive_precision.rs:38:21
+ |
+LL | let bad32_inf = 1.123_456_789_f32;
+ | ^^^^^^^^^^^^^^^^^ help: consider changing the type or truncating it to: `1.123_456_8_f32`
+
+error: float has excessive precision
+ --> $DIR/excessive_precision.rs:48:36
+ |
+LL | let bad_vec32: Vec<f32> = vec![0.123_456_789];
+ | ^^^^^^^^^^^^^ help: consider changing the type or truncating it to: `0.123_456_79`
+
+error: float has excessive precision
+ --> $DIR/excessive_precision.rs:49:36
+ |
+LL | let bad_vec64: Vec<f64> = vec![0.123_456_789_123_456_789];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider changing the type or truncating it to: `0.123_456_789_123_456_78`
+
+error: float has excessive precision
+ --> $DIR/excessive_precision.rs:53:24
+ |
+LL | let bad_e32: f32 = 1.123_456_788_888e-10;
+ | ^^^^^^^^^^^^^^^^^^^^^ help: consider changing the type or truncating it to: `1.123_456_8e-10`
+
+error: float has excessive precision
+ --> $DIR/excessive_precision.rs:56:27
+ |
+LL | let bad_bige32: f32 = 1.123_456_788_888E-10;
+ | ^^^^^^^^^^^^^^^^^^^^^ help: consider changing the type or truncating it to: `1.123_456_8E-10`
+
+error: float has excessive precision
+ --> $DIR/excessive_precision.rs:65:13
+ |
+LL | let _ = 2.225_073_858_507_201_1e-308_f64;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider changing the type or truncating it to: `2.225_073_858_507_201e-308_f64`
+
+error: float has excessive precision
+ --> $DIR/excessive_precision.rs:68:13
+ |
+LL | let _ = 1.000_000_000_000_001e-324_f64;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider changing the type or truncating it to: `0_f64`
+
+error: aborting due to 15 previous errors
+
diff --git a/src/tools/clippy/tests/ui/exhaustive_items.fixed b/src/tools/clippy/tests/ui/exhaustive_items.fixed
new file mode 100644
index 000000000..c209f5b4b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/exhaustive_items.fixed
@@ -0,0 +1,91 @@
+// run-rustfix
+
+#![deny(clippy::exhaustive_enums, clippy::exhaustive_structs)]
+#![allow(unused)]
+
+fn main() {
+ // nop
+}
+
+pub mod enums {
+ #[non_exhaustive]
+ pub enum Exhaustive {
+ Foo,
+ Bar,
+ Baz,
+ Quux(String),
+ }
+
+ /// Some docs
+ #[repr(C)]
+ #[non_exhaustive]
+ pub enum ExhaustiveWithAttrs {
+ Foo,
+ Bar,
+ Baz,
+ Quux(String),
+ }
+
+ // no warning, already non_exhaustive
+ #[non_exhaustive]
+ pub enum NonExhaustive {
+ Foo,
+ Bar,
+ Baz,
+ Quux(String),
+ }
+
+ // no warning, private
+ enum ExhaustivePrivate {
+ Foo,
+ Bar,
+ Baz,
+ Quux(String),
+ }
+
+ // no warning, private
+ #[non_exhaustive]
+ enum NonExhaustivePrivate {
+ Foo,
+ Bar,
+ Baz,
+ Quux(String),
+ }
+}
+
+pub mod structs {
+ #[non_exhaustive]
+ pub struct Exhaustive {
+ pub foo: u8,
+ pub bar: String,
+ }
+
+ // no warning, already non_exhaustive
+ #[non_exhaustive]
+ pub struct NonExhaustive {
+ pub foo: u8,
+ pub bar: String,
+ }
+
+ // no warning, private fields
+ pub struct ExhaustivePrivateFieldTuple(u8);
+
+ // no warning, private fields
+ pub struct ExhaustivePrivateField {
+ pub foo: u8,
+ bar: String,
+ }
+
+ // no warning, private
+ struct ExhaustivePrivate {
+ pub foo: u8,
+ pub bar: String,
+ }
+
+ // no warning, private
+ #[non_exhaustive]
+ struct NonExhaustivePrivate {
+ pub foo: u8,
+ pub bar: String,
+ }
+}
diff --git a/src/tools/clippy/tests/ui/exhaustive_items.rs b/src/tools/clippy/tests/ui/exhaustive_items.rs
new file mode 100644
index 000000000..6f59dbf2d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/exhaustive_items.rs
@@ -0,0 +1,88 @@
+// run-rustfix
+
+#![deny(clippy::exhaustive_enums, clippy::exhaustive_structs)]
+#![allow(unused)]
+
+fn main() {
+ // nop
+}
+
+pub mod enums {
+ pub enum Exhaustive {
+ Foo,
+ Bar,
+ Baz,
+ Quux(String),
+ }
+
+ /// Some docs
+ #[repr(C)]
+ pub enum ExhaustiveWithAttrs {
+ Foo,
+ Bar,
+ Baz,
+ Quux(String),
+ }
+
+ // no warning, already non_exhaustive
+ #[non_exhaustive]
+ pub enum NonExhaustive {
+ Foo,
+ Bar,
+ Baz,
+ Quux(String),
+ }
+
+ // no warning, private
+ enum ExhaustivePrivate {
+ Foo,
+ Bar,
+ Baz,
+ Quux(String),
+ }
+
+ // no warning, private
+ #[non_exhaustive]
+ enum NonExhaustivePrivate {
+ Foo,
+ Bar,
+ Baz,
+ Quux(String),
+ }
+}
+
+pub mod structs {
+ pub struct Exhaustive {
+ pub foo: u8,
+ pub bar: String,
+ }
+
+ // no warning, already non_exhaustive
+ #[non_exhaustive]
+ pub struct NonExhaustive {
+ pub foo: u8,
+ pub bar: String,
+ }
+
+ // no warning, private fields
+ pub struct ExhaustivePrivateFieldTuple(u8);
+
+ // no warning, private fields
+ pub struct ExhaustivePrivateField {
+ pub foo: u8,
+ bar: String,
+ }
+
+ // no warning, private
+ struct ExhaustivePrivate {
+ pub foo: u8,
+ pub bar: String,
+ }
+
+ // no warning, private
+ #[non_exhaustive]
+ struct NonExhaustivePrivate {
+ pub foo: u8,
+ pub bar: String,
+ }
+}
diff --git a/src/tools/clippy/tests/ui/exhaustive_items.stderr b/src/tools/clippy/tests/ui/exhaustive_items.stderr
new file mode 100644
index 000000000..f46ebd477
--- /dev/null
+++ b/src/tools/clippy/tests/ui/exhaustive_items.stderr
@@ -0,0 +1,61 @@
+error: exported enums should not be exhaustive
+ --> $DIR/exhaustive_items.rs:11:5
+ |
+LL | / pub enum Exhaustive {
+LL | | Foo,
+LL | | Bar,
+LL | | Baz,
+LL | | Quux(String),
+LL | | }
+ | |_____^
+ |
+note: the lint level is defined here
+ --> $DIR/exhaustive_items.rs:3:9
+ |
+LL | #![deny(clippy::exhaustive_enums, clippy::exhaustive_structs)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+help: try adding #[non_exhaustive]
+ |
+LL ~ #[non_exhaustive]
+LL ~ pub enum Exhaustive {
+ |
+
+error: exported enums should not be exhaustive
+ --> $DIR/exhaustive_items.rs:20:5
+ |
+LL | / pub enum ExhaustiveWithAttrs {
+LL | | Foo,
+LL | | Bar,
+LL | | Baz,
+LL | | Quux(String),
+LL | | }
+ | |_____^
+ |
+help: try adding #[non_exhaustive]
+ |
+LL ~ #[non_exhaustive]
+LL ~ pub enum ExhaustiveWithAttrs {
+ |
+
+error: exported structs should not be exhaustive
+ --> $DIR/exhaustive_items.rs:55:5
+ |
+LL | / pub struct Exhaustive {
+LL | | pub foo: u8,
+LL | | pub bar: String,
+LL | | }
+ | |_____^
+ |
+note: the lint level is defined here
+ --> $DIR/exhaustive_items.rs:3:35
+ |
+LL | #![deny(clippy::exhaustive_enums, clippy::exhaustive_structs)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+help: try adding #[non_exhaustive]
+ |
+LL ~ #[non_exhaustive]
+LL ~ pub struct Exhaustive {
+ |
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/exit1.rs b/src/tools/clippy/tests/ui/exit1.rs
new file mode 100644
index 000000000..4eac6eb74
--- /dev/null
+++ b/src/tools/clippy/tests/ui/exit1.rs
@@ -0,0 +1,15 @@
+#[warn(clippy::exit)]
+
+fn not_main() {
+ if true {
+ std::process::exit(4);
+ }
+}
+
+fn main() {
+ if true {
+ std::process::exit(2);
+ };
+ not_main();
+ std::process::exit(1);
+}
diff --git a/src/tools/clippy/tests/ui/exit1.stderr b/src/tools/clippy/tests/ui/exit1.stderr
new file mode 100644
index 000000000..a8d3956aa
--- /dev/null
+++ b/src/tools/clippy/tests/ui/exit1.stderr
@@ -0,0 +1,10 @@
+error: usage of `process::exit`
+ --> $DIR/exit1.rs:5:9
+ |
+LL | std::process::exit(4);
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::exit` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/exit2.rs b/src/tools/clippy/tests/ui/exit2.rs
new file mode 100644
index 000000000..4b693ed70
--- /dev/null
+++ b/src/tools/clippy/tests/ui/exit2.rs
@@ -0,0 +1,13 @@
+#[warn(clippy::exit)]
+
+fn also_not_main() {
+ std::process::exit(3);
+}
+
+fn main() {
+ if true {
+ std::process::exit(2);
+ };
+ also_not_main();
+ std::process::exit(1);
+}
diff --git a/src/tools/clippy/tests/ui/exit2.stderr b/src/tools/clippy/tests/ui/exit2.stderr
new file mode 100644
index 000000000..7263e156a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/exit2.stderr
@@ -0,0 +1,10 @@
+error: usage of `process::exit`
+ --> $DIR/exit2.rs:4:5
+ |
+LL | std::process::exit(3);
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::exit` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/exit3.rs b/src/tools/clippy/tests/ui/exit3.rs
new file mode 100644
index 000000000..9dc0e1015
--- /dev/null
+++ b/src/tools/clippy/tests/ui/exit3.rs
@@ -0,0 +1,8 @@
+#[warn(clippy::exit)]
+
+fn main() {
+ if true {
+ std::process::exit(2);
+ };
+ std::process::exit(1);
+}
diff --git a/src/tools/clippy/tests/ui/expect.rs b/src/tools/clippy/tests/ui/expect.rs
new file mode 100644
index 000000000..1073acf6f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/expect.rs
@@ -0,0 +1,16 @@
+#![warn(clippy::expect_used)]
+
+fn expect_option() {
+ let opt = Some(0);
+ let _ = opt.expect("");
+}
+
+fn expect_result() {
+ let res: Result<u8, ()> = Ok(0);
+ let _ = res.expect("");
+}
+
+fn main() {
+ expect_option();
+ expect_result();
+}
diff --git a/src/tools/clippy/tests/ui/expect.stderr b/src/tools/clippy/tests/ui/expect.stderr
new file mode 100644
index 000000000..9d3fc7df1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/expect.stderr
@@ -0,0 +1,19 @@
+error: used `expect()` on `an Option` value
+ --> $DIR/expect.rs:5:13
+ |
+LL | let _ = opt.expect("");
+ | ^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::expect-used` implied by `-D warnings`
+ = help: if this value is an `None`, it will panic
+
+error: used `expect()` on `a Result` value
+ --> $DIR/expect.rs:10:13
+ |
+LL | let _ = res.expect("");
+ | ^^^^^^^^^^^^^^
+ |
+ = help: if this value is an `Err`, it will panic
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/expect_fun_call.fixed b/src/tools/clippy/tests/ui/expect_fun_call.fixed
new file mode 100644
index 000000000..53e45d28b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/expect_fun_call.fixed
@@ -0,0 +1,104 @@
+// run-rustfix
+
+#![warn(clippy::expect_fun_call)]
+#![allow(clippy::to_string_in_format_args)]
+
+/// Checks implementation of the `EXPECT_FUN_CALL` lint
+
+macro_rules! one {
+ () => {
+ 1
+ };
+}
+
+fn main() {
+ struct Foo;
+
+ impl Foo {
+ fn new() -> Self {
+ Foo
+ }
+
+ fn expect(&self, msg: &str) {
+ panic!("{}", msg)
+ }
+ }
+
+ let with_some = Some("value");
+ with_some.expect("error");
+
+ let with_none: Option<i32> = None;
+ with_none.expect("error");
+
+ let error_code = 123_i32;
+ let with_none_and_format: Option<i32> = None;
+ with_none_and_format.unwrap_or_else(|| panic!("Error {}: fake error", error_code));
+
+ let with_none_and_as_str: Option<i32> = None;
+ with_none_and_as_str.unwrap_or_else(|| panic!("Error {}: fake error", error_code));
+
+ let with_none_and_format_with_macro: Option<i32> = None;
+ with_none_and_format_with_macro.unwrap_or_else(|| panic!("Error {}: fake error", one!()));
+
+ let with_ok: Result<(), ()> = Ok(());
+ with_ok.expect("error");
+
+ let with_err: Result<(), ()> = Err(());
+ with_err.expect("error");
+
+ let error_code = 123_i32;
+ let with_err_and_format: Result<(), ()> = Err(());
+ with_err_and_format.unwrap_or_else(|_| panic!("Error {}: fake error", error_code));
+
+ let with_err_and_as_str: Result<(), ()> = Err(());
+ with_err_and_as_str.unwrap_or_else(|_| panic!("Error {}: fake error", error_code));
+
+ let with_dummy_type = Foo::new();
+ with_dummy_type.expect("another test string");
+
+ let with_dummy_type_and_format = Foo::new();
+ with_dummy_type_and_format.expect(&format!("Error {}: fake error", error_code));
+
+ let with_dummy_type_and_as_str = Foo::new();
+ with_dummy_type_and_as_str.expect(format!("Error {}: fake error", error_code).as_str());
+
+ //Issue #2937
+ Some("foo").unwrap_or_else(|| panic!("{} {}", 1, 2));
+
+ //Issue #2979 - this should not lint
+ {
+ let msg = "bar";
+ Some("foo").expect(msg);
+ }
+
+ {
+ fn get_string() -> String {
+ "foo".to_string()
+ }
+
+ fn get_static_str() -> &'static str {
+ "foo"
+ }
+
+ fn get_non_static_str(_: &u32) -> &str {
+ "foo"
+ }
+
+ Some("foo").unwrap_or_else(|| { panic!("{}", get_string()) });
+ Some("foo").unwrap_or_else(|| { panic!("{}", get_string()) });
+ Some("foo").unwrap_or_else(|| { panic!("{}", get_string()) });
+
+ Some("foo").unwrap_or_else(|| { panic!("{}", get_static_str()) });
+ Some("foo").unwrap_or_else(|| { panic!("{}", get_non_static_str(&0).to_string()) });
+ }
+
+ //Issue #3839
+ Some(true).unwrap_or_else(|| panic!("key {}, {}", 1, 2));
+
+ //Issue #4912 - the receiver is a &Option
+ {
+ let opt = Some(1);
+ let opt_ref = &opt;
+ opt_ref.unwrap_or_else(|| panic!("{:?}", opt_ref));
+ }
+}
diff --git a/src/tools/clippy/tests/ui/expect_fun_call.rs b/src/tools/clippy/tests/ui/expect_fun_call.rs
new file mode 100644
index 000000000..22e530b80
--- /dev/null
+++ b/src/tools/clippy/tests/ui/expect_fun_call.rs
@@ -0,0 +1,104 @@
+// run-rustfix
+
+#![warn(clippy::expect_fun_call)]
+#![allow(clippy::to_string_in_format_args)]
+
+/// Checks implementation of the `EXPECT_FUN_CALL` lint
+
+macro_rules! one {
+ () => {
+ 1
+ };
+}
+
+fn main() {
+ struct Foo;
+
+ impl Foo {
+ fn new() -> Self {
+ Foo
+ }
+
+ fn expect(&self, msg: &str) {
+ panic!("{}", msg)
+ }
+ }
+
+ let with_some = Some("value");
+ with_some.expect("error");
+
+ let with_none: Option<i32> = None;
+ with_none.expect("error");
+
+ let error_code = 123_i32;
+ let with_none_and_format: Option<i32> = None;
+ with_none_and_format.expect(&format!("Error {}: fake error", error_code));
+
+ let with_none_and_as_str: Option<i32> = None;
+ with_none_and_as_str.expect(format!("Error {}: fake error", error_code).as_str());
+
+ let with_none_and_format_with_macro: Option<i32> = None;
+ with_none_and_format_with_macro.expect(format!("Error {}: fake error", one!()).as_str());
+
+ let with_ok: Result<(), ()> = Ok(());
+ with_ok.expect("error");
+
+ let with_err: Result<(), ()> = Err(());
+ with_err.expect("error");
+
+ let error_code = 123_i32;
+ let with_err_and_format: Result<(), ()> = Err(());
+ with_err_and_format.expect(&format!("Error {}: fake error", error_code));
+
+ let with_err_and_as_str: Result<(), ()> = Err(());
+ with_err_and_as_str.expect(format!("Error {}: fake error", error_code).as_str());
+
+ let with_dummy_type = Foo::new();
+ with_dummy_type.expect("another test string");
+
+ let with_dummy_type_and_format = Foo::new();
+ with_dummy_type_and_format.expect(&format!("Error {}: fake error", error_code));
+
+ let with_dummy_type_and_as_str = Foo::new();
+ with_dummy_type_and_as_str.expect(format!("Error {}: fake error", error_code).as_str());
+
+ //Issue #2937
+ Some("foo").expect(format!("{} {}", 1, 2).as_ref());
+
+ //Issue #2979 - this should not lint
+ {
+ let msg = "bar";
+ Some("foo").expect(msg);
+ }
+
+ {
+ fn get_string() -> String {
+ "foo".to_string()
+ }
+
+ fn get_static_str() -> &'static str {
+ "foo"
+ }
+
+ fn get_non_static_str(_: &u32) -> &str {
+ "foo"
+ }
+
+ Some("foo").expect(&get_string());
+ Some("foo").expect(get_string().as_ref());
+ Some("foo").expect(get_string().as_str());
+
+ Some("foo").expect(get_static_str());
+ Some("foo").expect(get_non_static_str(&0));
+ }
+
+ //Issue #3839
+ Some(true).expect(&format!("key {}, {}", 1, 2));
+
+ //Issue #4912 - the receiver is a &Option
+ {
+ let opt = Some(1);
+ let opt_ref = &opt;
+ opt_ref.expect(&format!("{:?}", opt_ref));
+ }
+}
diff --git a/src/tools/clippy/tests/ui/expect_fun_call.stderr b/src/tools/clippy/tests/ui/expect_fun_call.stderr
new file mode 100644
index 000000000..aca15935f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/expect_fun_call.stderr
@@ -0,0 +1,82 @@
+error: use of `expect` followed by a function call
+ --> $DIR/expect_fun_call.rs:35:26
+ |
+LL | with_none_and_format.expect(&format!("Error {}: fake error", error_code));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("Error {}: fake error", error_code))`
+ |
+ = note: `-D clippy::expect-fun-call` implied by `-D warnings`
+
+error: use of `expect` followed by a function call
+ --> $DIR/expect_fun_call.rs:38:26
+ |
+LL | with_none_and_as_str.expect(format!("Error {}: fake error", error_code).as_str());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("Error {}: fake error", error_code))`
+
+error: use of `expect` followed by a function call
+ --> $DIR/expect_fun_call.rs:41:37
+ |
+LL | with_none_and_format_with_macro.expect(format!("Error {}: fake error", one!()).as_str());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("Error {}: fake error", one!()))`
+
+error: use of `expect` followed by a function call
+ --> $DIR/expect_fun_call.rs:51:25
+ |
+LL | with_err_and_format.expect(&format!("Error {}: fake error", error_code));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|_| panic!("Error {}: fake error", error_code))`
+
+error: use of `expect` followed by a function call
+ --> $DIR/expect_fun_call.rs:54:25
+ |
+LL | with_err_and_as_str.expect(format!("Error {}: fake error", error_code).as_str());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|_| panic!("Error {}: fake error", error_code))`
+
+error: use of `expect` followed by a function call
+ --> $DIR/expect_fun_call.rs:66:17
+ |
+LL | Some("foo").expect(format!("{} {}", 1, 2).as_ref());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("{} {}", 1, 2))`
+
+error: use of `expect` followed by a function call
+ --> $DIR/expect_fun_call.rs:87:21
+ |
+LL | Some("foo").expect(&get_string());
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| { panic!("{}", get_string()) })`
+
+error: use of `expect` followed by a function call
+ --> $DIR/expect_fun_call.rs:88:21
+ |
+LL | Some("foo").expect(get_string().as_ref());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| { panic!("{}", get_string()) })`
+
+error: use of `expect` followed by a function call
+ --> $DIR/expect_fun_call.rs:89:21
+ |
+LL | Some("foo").expect(get_string().as_str());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| { panic!("{}", get_string()) })`
+
+error: use of `expect` followed by a function call
+ --> $DIR/expect_fun_call.rs:91:21
+ |
+LL | Some("foo").expect(get_static_str());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| { panic!("{}", get_static_str()) })`
+
+error: use of `expect` followed by a function call
+ --> $DIR/expect_fun_call.rs:92:21
+ |
+LL | Some("foo").expect(get_non_static_str(&0));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| { panic!("{}", get_non_static_str(&0).to_string()) })`
+
+error: use of `expect` followed by a function call
+ --> $DIR/expect_fun_call.rs:96:16
+ |
+LL | Some(true).expect(&format!("key {}, {}", 1, 2));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("key {}, {}", 1, 2))`
+
+error: use of `expect` followed by a function call
+ --> $DIR/expect_fun_call.rs:102:17
+ |
+LL | opt_ref.expect(&format!("{:?}", opt_ref));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("{:?}", opt_ref))`
+
+error: aborting due to 13 previous errors
+
diff --git a/src/tools/clippy/tests/ui/expect_tool_lint_rfc_2383.rs b/src/tools/clippy/tests/ui/expect_tool_lint_rfc_2383.rs
new file mode 100644
index 000000000..28b37f96e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/expect_tool_lint_rfc_2383.rs
@@ -0,0 +1,142 @@
+// check-pass
+#![feature(lint_reasons)]
+//! This file tests the `#[expect]` attribute implementation for tool lints. The same
+//! file is used to test clippy and rustdoc. Any changes to this file should be synced
+//! to the other test files as well.
+//!
+//! Expectations:
+//! * rustc: only rustc lint expectations are emitted
+//! * clippy: rustc and Clippy's expectations are emitted
+//! * rustdoc: only rustdoc lint expectations are emitted
+//!
+//! This test can't cover every lint from Clippy, rustdoc and potentially other
+//! tools that will be developed. This therefore only tests a small subset of lints
+#![expect(rustdoc::missing_crate_level_docs)]
+
+mod rustc_ok {
+ //! See <https://doc.rust-lang.org/rustc/lints/index.html>
+
+ #[expect(dead_code)]
+ pub fn rustc_lints() {
+ let x = 42.0;
+
+ #[expect(illegal_floating_point_literal_pattern)]
+ match x {
+ 5.0 => {}
+ 6.0 => {}
+ _ => {}
+ }
+ }
+}
+
+mod rustc_warn {
+ //! See <https://doc.rust-lang.org/rustc/lints/index.html>
+
+ #[expect(dead_code)]
+ pub fn rustc_lints() {
+ let x = 42;
+
+ #[expect(illegal_floating_point_literal_pattern)]
+ match x {
+ 5 => {}
+ 6 => {}
+ _ => {}
+ }
+ }
+}
+
+pub mod rustdoc_ok {
+ //! See <https://doc.rust-lang.org/rustdoc/lints.html>
+
+ #[expect(rustdoc::broken_intra_doc_links)]
+ /// I want to link to [`Nonexistent`] but it doesn't exist!
+ pub fn foo() {}
+
+ #[expect(rustdoc::invalid_html_tags)]
+ /// <h1>
+ pub fn bar() {}
+
+ #[expect(rustdoc::bare_urls)]
+ /// http://example.org
+ pub fn baz() {}
+}
+
+pub mod rustdoc_warn {
+ //! See <https://doc.rust-lang.org/rustdoc/lints.html>
+
+ #[expect(rustdoc::broken_intra_doc_links)]
+ /// I want to link to [`bar`] but it doesn't exist!
+ pub fn foo() {}
+
+ #[expect(rustdoc::invalid_html_tags)]
+ /// <h1></h1>
+ pub fn bar() {}
+
+ #[expect(rustdoc::bare_urls)]
+ /// <http://example.org>
+ pub fn baz() {}
+}
+
+mod clippy_ok {
+ //! See <https://rust-lang.github.io/rust-clippy/master/index.html>
+
+ #[expect(clippy::almost_swapped)]
+ fn foo() {
+ let mut a = 0;
+ let mut b = 9;
+ a = b;
+ b = a;
+ }
+
+ #[expect(clippy::bytes_nth)]
+ fn bar() {
+ let _ = "Hello".bytes().nth(3);
+ }
+
+ #[expect(clippy::if_same_then_else)]
+ fn baz() {
+ let _ = if true { 42 } else { 42 };
+ }
+
+ #[expect(clippy::logic_bug)]
+ fn burger() {
+ let a = false;
+ let b = true;
+
+ if a && b || a {}
+ }
+}
+
+mod clippy_warn {
+ //! See <https://rust-lang.github.io/rust-clippy/master/index.html>
+
+ #[expect(clippy::almost_swapped)]
+ fn foo() {
+ let mut a = 0;
+ let mut b = 9;
+ a = b;
+ }
+
+ #[expect(clippy::bytes_nth)]
+ fn bar() {
+ let _ = "Hello".as_bytes().get(3);
+ }
+
+ #[expect(clippy::if_same_then_else)]
+ fn baz() {
+ let _ = if true { 33 } else { 42 };
+ }
+
+ #[expect(clippy::logic_bug)]
+ fn burger() {
+ let a = false;
+ let b = true;
+ let c = false;
+
+ if a && b || c {}
+ }
+}
+
+fn main() {
+ rustc_warn::rustc_lints();
+}
diff --git a/src/tools/clippy/tests/ui/expect_tool_lint_rfc_2383.stderr b/src/tools/clippy/tests/ui/expect_tool_lint_rfc_2383.stderr
new file mode 100644
index 000000000..db29e85a8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/expect_tool_lint_rfc_2383.stderr
@@ -0,0 +1,40 @@
+error: this lint expectation is unfulfilled
+ --> $DIR/expect_tool_lint_rfc_2383.rs:35:14
+ |
+LL | #[expect(dead_code)]
+ | ^^^^^^^^^
+ |
+ = note: `-D unfulfilled-lint-expectations` implied by `-D warnings`
+
+error: this lint expectation is unfulfilled
+ --> $DIR/expect_tool_lint_rfc_2383.rs:39:18
+ |
+LL | #[expect(illegal_floating_point_literal_pattern)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this lint expectation is unfulfilled
+ --> $DIR/expect_tool_lint_rfc_2383.rs:113:14
+ |
+LL | #[expect(clippy::almost_swapped)]
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: this lint expectation is unfulfilled
+ --> $DIR/expect_tool_lint_rfc_2383.rs:120:14
+ |
+LL | #[expect(clippy::bytes_nth)]
+ | ^^^^^^^^^^^^^^^^^
+
+error: this lint expectation is unfulfilled
+ --> $DIR/expect_tool_lint_rfc_2383.rs:125:14
+ |
+LL | #[expect(clippy::if_same_then_else)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this lint expectation is unfulfilled
+ --> $DIR/expect_tool_lint_rfc_2383.rs:130:14
+ |
+LL | #[expect(clippy::logic_bug)]
+ | ^^^^^^^^^^^^^^^^^
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/explicit_auto_deref.fixed b/src/tools/clippy/tests/ui/explicit_auto_deref.fixed
new file mode 100644
index 000000000..a650fdc1f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/explicit_auto_deref.fixed
@@ -0,0 +1,218 @@
+// run-rustfix
+
+#![warn(clippy::explicit_auto_deref)]
+#![allow(
+ dead_code,
+ unused_braces,
+ clippy::borrowed_box,
+ clippy::needless_borrow,
+ clippy::needless_return,
+ clippy::ptr_arg,
+ clippy::redundant_field_names,
+ clippy::too_many_arguments,
+ clippy::borrow_deref_ref,
+ clippy::let_unit_value
+)]
+
+trait CallableStr {
+ type T: Fn(&str);
+ fn callable_str(&self) -> Self::T;
+}
+impl CallableStr for () {
+ type T = fn(&str);
+ fn callable_str(&self) -> Self::T {
+ fn f(_: &str) {}
+ f
+ }
+}
+impl CallableStr for i32 {
+ type T = <() as CallableStr>::T;
+ fn callable_str(&self) -> Self::T {
+ ().callable_str()
+ }
+}
+
+trait CallableT<U: ?Sized> {
+ type T: Fn(&U);
+ fn callable_t(&self) -> Self::T;
+}
+impl<U: ?Sized> CallableT<U> for () {
+ type T = fn(&U);
+ fn callable_t(&self) -> Self::T {
+ fn f<U: ?Sized>(_: &U) {}
+ f::<U>
+ }
+}
+impl<U: ?Sized> CallableT<U> for i32 {
+ type T = <() as CallableT<U>>::T;
+ fn callable_t(&self) -> Self::T {
+ ().callable_t()
+ }
+}
+
+fn f_str(_: &str) {}
+fn f_string(_: &String) {}
+fn f_t<T>(_: T) {}
+fn f_ref_t<T: ?Sized>(_: &T) {}
+
+fn f_str_t<T>(_: &str, _: T) {}
+
+fn f_box_t<T>(_: &Box<T>) {}
+
+extern "C" {
+ fn var(_: u32, ...);
+}
+
+fn main() {
+ let s = String::new();
+
+ let _: &str = &s;
+ let _ = &*s; // Don't lint. Inferred type would change.
+ let _: &_ = &*s; // Don't lint. Inferred type would change.
+
+ f_str(&s);
+ f_t(&*s); // Don't lint. Inferred type would change.
+ f_ref_t(&*s); // Don't lint. Inferred type would change.
+
+ f_str_t(&s, &*s); // Don't lint second param.
+
+ let b = Box::new(Box::new(Box::new(5)));
+ let _: &Box<i32> = &b;
+ let _: &Box<_> = &**b; // Don't lint. Inferred type would change.
+
+ f_box_t(&**b); // Don't lint. Inferred type would change.
+
+ let c = |_x: &str| ();
+ c(&s);
+
+ let c = |_x| ();
+ c(&*s); // Don't lint. Inferred type would change.
+
+ fn _f(x: &String) -> &str {
+ x
+ }
+
+ fn _f1(x: &String) -> &str {
+ { x }
+ }
+
+ fn _f2(x: &String) -> &str {
+ { x }
+ }
+
+ fn _f3(x: &Box<Box<Box<i32>>>) -> &Box<i32> {
+ x
+ }
+
+ fn _f4(
+ x: String,
+ f1: impl Fn(&str),
+ f2: &dyn Fn(&str),
+ f3: fn(&str),
+ f4: impl CallableStr,
+ f5: <() as CallableStr>::T,
+ f6: <i32 as CallableStr>::T,
+ f7: &dyn CallableStr<T = fn(&str)>,
+ f8: impl CallableT<str>,
+ f9: <() as CallableT<str>>::T,
+ f10: <i32 as CallableT<str>>::T,
+ f11: &dyn CallableT<str, T = fn(&str)>,
+ ) {
+ f1(&x);
+ f2(&x);
+ f3(&x);
+ f4.callable_str()(&x);
+ f5(&x);
+ f6(&x);
+ f7.callable_str()(&x);
+ f8.callable_t()(&x);
+ f9(&x);
+ f10(&x);
+ f11.callable_t()(&x);
+ }
+
+ struct S1<'a>(&'a str);
+ let _ = S1(&s);
+
+ struct S2<'a> {
+ s: &'a str,
+ }
+ let _ = S2 { s: &s };
+
+ struct S3<'a, T: ?Sized>(&'a T);
+ let _ = S3(&*s); // Don't lint. Inferred type would change.
+
+ struct S4<'a, T: ?Sized> {
+ s: &'a T,
+ }
+ let _ = S4 { s: &*s }; // Don't lint. Inferred type would change.
+
+ enum E1<'a> {
+ S1(&'a str),
+ S2 { s: &'a str },
+ }
+ impl<'a> E1<'a> {
+ fn m1(s: &'a String) {
+ let _ = Self::S1(s);
+ let _ = Self::S2 { s: s };
+ }
+ }
+ let _ = E1::S1(&s);
+ let _ = E1::S2 { s: &s };
+
+ enum E2<'a, T: ?Sized> {
+ S1(&'a T),
+ S2 { s: &'a T },
+ }
+ let _ = E2::S1(&*s); // Don't lint. Inferred type would change.
+ let _ = E2::S2 { s: &*s }; // Don't lint. Inferred type would change.
+
+ let ref_s = &s;
+ let _: &String = &*ref_s; // Don't lint reborrow.
+ f_string(&*ref_s); // Don't lint reborrow.
+
+ struct S5 {
+ foo: u32,
+ }
+ let b = Box::new(Box::new(S5 { foo: 5 }));
+ let _ = b.foo;
+ let _ = b.foo;
+ let _ = b.foo;
+
+ struct S6 {
+ foo: S5,
+ }
+ impl core::ops::Deref for S6 {
+ type Target = S5;
+ fn deref(&self) -> &Self::Target {
+ &self.foo
+ }
+ }
+ let s6 = S6 { foo: S5 { foo: 5 } };
+ let _ = (*s6).foo; // Don't lint. `S6` also has a field named `foo`
+
+ let ref_str = &"foo";
+ let _ = f_str(ref_str);
+ let ref_ref_str = &ref_str;
+ let _ = f_str(ref_ref_str);
+
+ fn _f5(x: &u32) -> u32 {
+ if true {
+ *x
+ } else {
+ return *x;
+ }
+ }
+
+ f_str(&&ref_str); // `needless_borrow` will suggest removing both references
+ f_str(&ref_str); // `needless_borrow` will suggest removing only one reference
+
+ let x = &&40;
+ unsafe {
+ var(0, &**x);
+ }
+
+ let s = &"str";
+ let _ = || return *s;
+ let _ = || -> &'static str { return s };
+}
diff --git a/src/tools/clippy/tests/ui/explicit_auto_deref.rs b/src/tools/clippy/tests/ui/explicit_auto_deref.rs
new file mode 100644
index 000000000..8f4f35257
--- /dev/null
+++ b/src/tools/clippy/tests/ui/explicit_auto_deref.rs
@@ -0,0 +1,218 @@
+// run-rustfix
+
+#![warn(clippy::explicit_auto_deref)]
+#![allow(
+ dead_code,
+ unused_braces,
+ clippy::borrowed_box,
+ clippy::needless_borrow,
+ clippy::needless_return,
+ clippy::ptr_arg,
+ clippy::redundant_field_names,
+ clippy::too_many_arguments,
+ clippy::borrow_deref_ref,
+ clippy::let_unit_value
+)]
+
+trait CallableStr {
+ type T: Fn(&str);
+ fn callable_str(&self) -> Self::T;
+}
+impl CallableStr for () {
+ type T = fn(&str);
+ fn callable_str(&self) -> Self::T {
+ fn f(_: &str) {}
+ f
+ }
+}
+impl CallableStr for i32 {
+ type T = <() as CallableStr>::T;
+ fn callable_str(&self) -> Self::T {
+ ().callable_str()
+ }
+}
+
+trait CallableT<U: ?Sized> {
+ type T: Fn(&U);
+ fn callable_t(&self) -> Self::T;
+}
+impl<U: ?Sized> CallableT<U> for () {
+ type T = fn(&U);
+ fn callable_t(&self) -> Self::T {
+ fn f<U: ?Sized>(_: &U) {}
+ f::<U>
+ }
+}
+impl<U: ?Sized> CallableT<U> for i32 {
+ type T = <() as CallableT<U>>::T;
+ fn callable_t(&self) -> Self::T {
+ ().callable_t()
+ }
+}
+
+fn f_str(_: &str) {}
+fn f_string(_: &String) {}
+fn f_t<T>(_: T) {}
+fn f_ref_t<T: ?Sized>(_: &T) {}
+
+fn f_str_t<T>(_: &str, _: T) {}
+
+fn f_box_t<T>(_: &Box<T>) {}
+
+extern "C" {
+ fn var(_: u32, ...);
+}
+
+fn main() {
+ let s = String::new();
+
+ let _: &str = &*s;
+ let _ = &*s; // Don't lint. Inferred type would change.
+ let _: &_ = &*s; // Don't lint. Inferred type would change.
+
+ f_str(&*s);
+ f_t(&*s); // Don't lint. Inferred type would change.
+ f_ref_t(&*s); // Don't lint. Inferred type would change.
+
+ f_str_t(&*s, &*s); // Don't lint second param.
+
+ let b = Box::new(Box::new(Box::new(5)));
+ let _: &Box<i32> = &**b;
+ let _: &Box<_> = &**b; // Don't lint. Inferred type would change.
+
+ f_box_t(&**b); // Don't lint. Inferred type would change.
+
+ let c = |_x: &str| ();
+ c(&*s);
+
+ let c = |_x| ();
+ c(&*s); // Don't lint. Inferred type would change.
+
+ fn _f(x: &String) -> &str {
+ &**x
+ }
+
+ fn _f1(x: &String) -> &str {
+ { &**x }
+ }
+
+ fn _f2(x: &String) -> &str {
+ &**{ x }
+ }
+
+ fn _f3(x: &Box<Box<Box<i32>>>) -> &Box<i32> {
+ &***x
+ }
+
+ fn _f4(
+ x: String,
+ f1: impl Fn(&str),
+ f2: &dyn Fn(&str),
+ f3: fn(&str),
+ f4: impl CallableStr,
+ f5: <() as CallableStr>::T,
+ f6: <i32 as CallableStr>::T,
+ f7: &dyn CallableStr<T = fn(&str)>,
+ f8: impl CallableT<str>,
+ f9: <() as CallableT<str>>::T,
+ f10: <i32 as CallableT<str>>::T,
+ f11: &dyn CallableT<str, T = fn(&str)>,
+ ) {
+ f1(&*x);
+ f2(&*x);
+ f3(&*x);
+ f4.callable_str()(&*x);
+ f5(&*x);
+ f6(&*x);
+ f7.callable_str()(&*x);
+ f8.callable_t()(&*x);
+ f9(&*x);
+ f10(&*x);
+ f11.callable_t()(&*x);
+ }
+
+ struct S1<'a>(&'a str);
+ let _ = S1(&*s);
+
+ struct S2<'a> {
+ s: &'a str,
+ }
+ let _ = S2 { s: &*s };
+
+ struct S3<'a, T: ?Sized>(&'a T);
+ let _ = S3(&*s); // Don't lint. Inferred type would change.
+
+ struct S4<'a, T: ?Sized> {
+ s: &'a T,
+ }
+ let _ = S4 { s: &*s }; // Don't lint. Inferred type would change.
+
+ enum E1<'a> {
+ S1(&'a str),
+ S2 { s: &'a str },
+ }
+ impl<'a> E1<'a> {
+ fn m1(s: &'a String) {
+ let _ = Self::S1(&**s);
+ let _ = Self::S2 { s: &**s };
+ }
+ }
+ let _ = E1::S1(&*s);
+ let _ = E1::S2 { s: &*s };
+
+ enum E2<'a, T: ?Sized> {
+ S1(&'a T),
+ S2 { s: &'a T },
+ }
+ let _ = E2::S1(&*s); // Don't lint. Inferred type would change.
+ let _ = E2::S2 { s: &*s }; // Don't lint. Inferred type would change.
+
+ let ref_s = &s;
+ let _: &String = &*ref_s; // Don't lint reborrow.
+ f_string(&*ref_s); // Don't lint reborrow.
+
+ struct S5 {
+ foo: u32,
+ }
+ let b = Box::new(Box::new(S5 { foo: 5 }));
+ let _ = b.foo;
+ let _ = (*b).foo;
+ let _ = (**b).foo;
+
+ struct S6 {
+ foo: S5,
+ }
+ impl core::ops::Deref for S6 {
+ type Target = S5;
+ fn deref(&self) -> &Self::Target {
+ &self.foo
+ }
+ }
+ let s6 = S6 { foo: S5 { foo: 5 } };
+ let _ = (*s6).foo; // Don't lint. `S6` also has a field named `foo`
+
+ let ref_str = &"foo";
+ let _ = f_str(*ref_str);
+ let ref_ref_str = &ref_str;
+ let _ = f_str(**ref_ref_str);
+
+ fn _f5(x: &u32) -> u32 {
+ if true {
+ *x
+ } else {
+ return *x;
+ }
+ }
+
+ f_str(&&*ref_str); // `needless_borrow` will suggest removing both references
+ f_str(&&**ref_str); // `needless_borrow` will suggest removing only one reference
+
+ let x = &&40;
+ unsafe {
+ var(0, &**x);
+ }
+
+ let s = &"str";
+ let _ = || return *s;
+ let _ = || -> &'static str { return *s };
+}
diff --git a/src/tools/clippy/tests/ui/explicit_auto_deref.stderr b/src/tools/clippy/tests/ui/explicit_auto_deref.stderr
new file mode 100644
index 000000000..92765307e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/explicit_auto_deref.stderr
@@ -0,0 +1,202 @@
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:69:20
+ |
+LL | let _: &str = &*s;
+ | ^^ help: try this: `s`
+ |
+ = note: `-D clippy::explicit-auto-deref` implied by `-D warnings`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:73:12
+ |
+LL | f_str(&*s);
+ | ^^ help: try this: `s`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:77:14
+ |
+LL | f_str_t(&*s, &*s); // Don't lint second param.
+ | ^^ help: try this: `s`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:80:25
+ |
+LL | let _: &Box<i32> = &**b;
+ | ^^^ help: try this: `b`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:86:8
+ |
+LL | c(&*s);
+ | ^^ help: try this: `s`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:92:9
+ |
+LL | &**x
+ | ^^^^ help: try this: `x`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:96:11
+ |
+LL | { &**x }
+ | ^^^^ help: try this: `x`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:100:9
+ |
+LL | &**{ x }
+ | ^^^^^^^^ help: try this: `{ x }`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:104:9
+ |
+LL | &***x
+ | ^^^^^ help: try this: `x`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:121:13
+ |
+LL | f1(&*x);
+ | ^^ help: try this: `x`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:122:13
+ |
+LL | f2(&*x);
+ | ^^ help: try this: `x`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:123:13
+ |
+LL | f3(&*x);
+ | ^^ help: try this: `x`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:124:28
+ |
+LL | f4.callable_str()(&*x);
+ | ^^ help: try this: `x`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:125:13
+ |
+LL | f5(&*x);
+ | ^^ help: try this: `x`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:126:13
+ |
+LL | f6(&*x);
+ | ^^ help: try this: `x`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:127:28
+ |
+LL | f7.callable_str()(&*x);
+ | ^^ help: try this: `x`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:128:26
+ |
+LL | f8.callable_t()(&*x);
+ | ^^ help: try this: `x`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:129:13
+ |
+LL | f9(&*x);
+ | ^^ help: try this: `x`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:130:14
+ |
+LL | f10(&*x);
+ | ^^ help: try this: `x`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:131:27
+ |
+LL | f11.callable_t()(&*x);
+ | ^^ help: try this: `x`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:135:17
+ |
+LL | let _ = S1(&*s);
+ | ^^ help: try this: `s`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:140:22
+ |
+LL | let _ = S2 { s: &*s };
+ | ^^ help: try this: `s`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:156:30
+ |
+LL | let _ = Self::S1(&**s);
+ | ^^^^ help: try this: `s`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:157:35
+ |
+LL | let _ = Self::S2 { s: &**s };
+ | ^^^^ help: try this: `s`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:160:21
+ |
+LL | let _ = E1::S1(&*s);
+ | ^^ help: try this: `s`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:161:26
+ |
+LL | let _ = E1::S2 { s: &*s };
+ | ^^ help: try this: `s`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:179:13
+ |
+LL | let _ = (*b).foo;
+ | ^^^^ help: try this: `b`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:180:13
+ |
+LL | let _ = (**b).foo;
+ | ^^^^^ help: try this: `b`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:195:19
+ |
+LL | let _ = f_str(*ref_str);
+ | ^^^^^^^^ help: try this: `ref_str`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:197:19
+ |
+LL | let _ = f_str(**ref_ref_str);
+ | ^^^^^^^^^^^^^ help: try this: `ref_ref_str`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:207:13
+ |
+LL | f_str(&&*ref_str); // `needless_borrow` will suggest removing both references
+ | ^^^^^^^^ help: try this: `ref_str`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:208:12
+ |
+LL | f_str(&&**ref_str); // `needless_borrow` will suggest removing only one reference
+ | ^^^^^^^^^^ help: try this: `ref_str`
+
+error: deref which would be done by auto-deref
+ --> $DIR/explicit_auto_deref.rs:217:41
+ |
+LL | let _ = || -> &'static str { return *s };
+ | ^^ help: try this: `s`
+
+error: aborting due to 33 previous errors
+
diff --git a/src/tools/clippy/tests/ui/explicit_counter_loop.rs b/src/tools/clippy/tests/ui/explicit_counter_loop.rs
new file mode 100644
index 000000000..aa966761f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/explicit_counter_loop.rs
@@ -0,0 +1,190 @@
+#![warn(clippy::explicit_counter_loop)]
+
+fn main() {
+ let mut vec = vec![1, 2, 3, 4];
+ let mut _index = 0;
+ for _v in &vec {
+ _index += 1
+ }
+
+ let mut _index = 1;
+ _index = 0;
+ for _v in &vec {
+ _index += 1
+ }
+
+ let mut _index = 0;
+ for _v in &mut vec {
+ _index += 1;
+ }
+
+ let mut _index = 0;
+ for _v in vec {
+ _index += 1;
+ }
+}
+
+mod issue_1219 {
+ pub fn test() {
+ // should not trigger the lint because variable is used after the loop #473
+ let vec = vec![1, 2, 3];
+ let mut index = 0;
+ for _v in &vec {
+ index += 1
+ }
+ println!("index: {}", index);
+
+ // should not trigger the lint because the count is conditional #1219
+ let text = "banana";
+ let mut count = 0;
+ for ch in text.chars() {
+ println!("{}", count);
+ if ch == 'a' {
+ continue;
+ }
+ count += 1;
+ }
+
+ // should not trigger the lint because the count is conditional
+ let text = "banana";
+ let mut count = 0;
+ for ch in text.chars() {
+ println!("{}", count);
+ if ch == 'a' {
+ count += 1;
+ }
+ }
+
+ // should trigger the lint because the count is not conditional
+ let text = "banana";
+ let mut count = 0;
+ for ch in text.chars() {
+ println!("{}", count);
+ count += 1;
+ if ch == 'a' {
+ continue;
+ }
+ }
+
+ // should trigger the lint because the count is not conditional
+ let text = "banana";
+ let mut count = 0;
+ for ch in text.chars() {
+ println!("{}", count);
+ count += 1;
+ for i in 0..2 {
+ let _ = 123;
+ }
+ }
+
+ // should not trigger the lint because the count is incremented multiple times
+ let text = "banana";
+ let mut count = 0;
+ for ch in text.chars() {
+ println!("{}", count);
+ count += 1;
+ for i in 0..2 {
+ count += 1;
+ }
+ }
+ }
+}
+
+mod issue_3308 {
+ pub fn test() {
+ // should not trigger the lint because the count is incremented multiple times
+ let mut skips = 0;
+ let erasures = vec![];
+ for i in 0..10 {
+ println!("{}", skips);
+ while erasures.contains(&(i + skips)) {
+ skips += 1;
+ }
+ }
+
+ // should not trigger the lint because the count is incremented multiple times
+ let mut skips = 0;
+ for i in 0..10 {
+ println!("{}", skips);
+ let mut j = 0;
+ while j < 5 {
+ skips += 1;
+ j += 1;
+ }
+ }
+
+ // should not trigger the lint because the count is incremented multiple times
+ let mut skips = 0;
+ for i in 0..10 {
+ println!("{}", skips);
+ for j in 0..5 {
+ skips += 1;
+ }
+ }
+ }
+}
+
+mod issue_1670 {
+ pub fn test() {
+ let mut count = 0;
+ for _i in 3..10 {
+ count += 1;
+ }
+ }
+}
+
+mod issue_4732 {
+ pub fn test() {
+ let slice = &[1, 2, 3];
+ let mut index = 0;
+
+ // should not trigger the lint because the count is used after the loop
+ for _v in slice {
+ index += 1
+ }
+ let _closure = || println!("index: {}", index);
+ }
+}
+
+mod issue_4677 {
+ pub fn test() {
+ let slice = &[1, 2, 3];
+
+ // should not trigger the lint because the count is used after incremented
+ let mut count = 0;
+ for _i in slice {
+ count += 1;
+ println!("{}", count);
+ }
+ }
+}
+
+mod issue_7920 {
+ pub fn test() {
+ let slice = &[1, 2, 3];
+
+ let index_usize: usize = 0;
+ let mut idx_usize: usize = 0;
+
+ // should suggest `enumerate`
+ for _item in slice {
+ if idx_usize == index_usize {
+ break;
+ }
+
+ idx_usize += 1;
+ }
+
+ let index_u32: u32 = 0;
+ let mut idx_u32: u32 = 0;
+
+ // should suggest `zip`
+ for _item in slice {
+ if idx_u32 == index_u32 {
+ break;
+ }
+
+ idx_u32 += 1;
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/explicit_counter_loop.stderr b/src/tools/clippy/tests/ui/explicit_counter_loop.stderr
new file mode 100644
index 000000000..f9f8407d5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/explicit_counter_loop.stderr
@@ -0,0 +1,60 @@
+error: the variable `_index` is used as a loop counter
+ --> $DIR/explicit_counter_loop.rs:6:5
+ |
+LL | for _v in &vec {
+ | ^^^^^^^^^^^^^^ help: consider using: `for (_index, _v) in vec.iter().enumerate()`
+ |
+ = note: `-D clippy::explicit-counter-loop` implied by `-D warnings`
+
+error: the variable `_index` is used as a loop counter
+ --> $DIR/explicit_counter_loop.rs:12:5
+ |
+LL | for _v in &vec {
+ | ^^^^^^^^^^^^^^ help: consider using: `for (_index, _v) in vec.iter().enumerate()`
+
+error: the variable `_index` is used as a loop counter
+ --> $DIR/explicit_counter_loop.rs:17:5
+ |
+LL | for _v in &mut vec {
+ | ^^^^^^^^^^^^^^^^^^ help: consider using: `for (_index, _v) in vec.iter_mut().enumerate()`
+
+error: the variable `_index` is used as a loop counter
+ --> $DIR/explicit_counter_loop.rs:22:5
+ |
+LL | for _v in vec {
+ | ^^^^^^^^^^^^^ help: consider using: `for (_index, _v) in vec.into_iter().enumerate()`
+
+error: the variable `count` is used as a loop counter
+ --> $DIR/explicit_counter_loop.rs:61:9
+ |
+LL | for ch in text.chars() {
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `for (count, ch) in text.chars().enumerate()`
+
+error: the variable `count` is used as a loop counter
+ --> $DIR/explicit_counter_loop.rs:72:9
+ |
+LL | for ch in text.chars() {
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `for (count, ch) in text.chars().enumerate()`
+
+error: the variable `count` is used as a loop counter
+ --> $DIR/explicit_counter_loop.rs:130:9
+ |
+LL | for _i in 3..10 {
+ | ^^^^^^^^^^^^^^^ help: consider using: `for (count, _i) in (3..10).enumerate()`
+
+error: the variable `idx_usize` is used as a loop counter
+ --> $DIR/explicit_counter_loop.rs:170:9
+ |
+LL | for _item in slice {
+ | ^^^^^^^^^^^^^^^^^^ help: consider using: `for (idx_usize, _item) in slice.iter().enumerate()`
+
+error: the variable `idx_u32` is used as a loop counter
+ --> $DIR/explicit_counter_loop.rs:182:9
+ |
+LL | for _item in slice {
+ | ^^^^^^^^^^^^^^^^^^ help: consider using: `for (idx_u32, _item) in (0_u32..).zip(slice.iter())`
+ |
+ = note: `idx_u32` is of type `u32`, making it ineligible for `Iterator::enumerate`
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/explicit_deref_methods.fixed b/src/tools/clippy/tests/ui/explicit_deref_methods.fixed
new file mode 100644
index 000000000..523cae183
--- /dev/null
+++ b/src/tools/clippy/tests/ui/explicit_deref_methods.fixed
@@ -0,0 +1,101 @@
+// run-rustfix
+
+#![allow(
+ unused_variables,
+ clippy::clone_double_ref,
+ clippy::needless_borrow,
+ clippy::borrow_deref_ref,
+ clippy::explicit_auto_deref
+)]
+#![warn(clippy::explicit_deref_methods)]
+
+use std::ops::{Deref, DerefMut};
+
+fn concat(deref_str: &str) -> String {
+ format!("{}bar", deref_str)
+}
+
+fn just_return(deref_str: &str) -> &str {
+ deref_str
+}
+
+struct CustomVec(Vec<u8>);
+impl Deref for CustomVec {
+ type Target = Vec<u8>;
+
+ fn deref(&self) -> &Vec<u8> {
+ &self.0
+ }
+}
+
+fn main() {
+ let a: &mut String = &mut String::from("foo");
+
+ // these should require linting
+
+ let b: &str = &*a;
+
+ let b: &mut str = &mut **a;
+
+ // both derefs should get linted here
+ let b: String = format!("{}, {}", &*a, &*a);
+
+ println!("{}", &*a);
+
+ #[allow(clippy::match_single_binding)]
+ match &*a {
+ _ => (),
+ }
+
+ let b: String = concat(&*a);
+
+ let b = just_return(a);
+
+ let b: String = concat(just_return(a));
+
+ let b: &str = &**a;
+
+ let opt_a = Some(a.clone());
+ let b = &*opt_a.unwrap();
+
+ // following should not require linting
+
+ let cv = CustomVec(vec![0, 42]);
+ let c = cv.deref()[0];
+
+ let b: &str = &*a.deref();
+
+ let b: String = a.deref().clone();
+
+ let b: usize = a.deref_mut().len();
+
+ let b: &usize = &a.deref().len();
+
+ let b: &str = &*a;
+
+ let b: &mut str = &mut *a;
+
+ macro_rules! expr_deref {
+ ($body:expr) => {
+ $body.deref()
+ };
+ }
+ let b: &str = expr_deref!(a);
+
+ let b: &str = expr_deref!(&*a);
+
+ // The struct does not implement Deref trait
+ #[derive(Copy, Clone)]
+ struct NoLint(u32);
+ impl NoLint {
+ pub fn deref(self) -> u32 {
+ self.0
+ }
+ pub fn deref_mut(self) -> u32 {
+ self.0
+ }
+ }
+ let no_lint = NoLint(42);
+ let b = no_lint.deref();
+ let b = no_lint.deref_mut();
+}
diff --git a/src/tools/clippy/tests/ui/explicit_deref_methods.rs b/src/tools/clippy/tests/ui/explicit_deref_methods.rs
new file mode 100644
index 000000000..0bbc1ae57
--- /dev/null
+++ b/src/tools/clippy/tests/ui/explicit_deref_methods.rs
@@ -0,0 +1,101 @@
+// run-rustfix
+
+#![allow(
+ unused_variables,
+ clippy::clone_double_ref,
+ clippy::needless_borrow,
+ clippy::borrow_deref_ref,
+ clippy::explicit_auto_deref
+)]
+#![warn(clippy::explicit_deref_methods)]
+
+use std::ops::{Deref, DerefMut};
+
+fn concat(deref_str: &str) -> String {
+ format!("{}bar", deref_str)
+}
+
+fn just_return(deref_str: &str) -> &str {
+ deref_str
+}
+
+struct CustomVec(Vec<u8>);
+impl Deref for CustomVec {
+ type Target = Vec<u8>;
+
+ fn deref(&self) -> &Vec<u8> {
+ &self.0
+ }
+}
+
+fn main() {
+ let a: &mut String = &mut String::from("foo");
+
+ // these should require linting
+
+ let b: &str = a.deref();
+
+ let b: &mut str = a.deref_mut();
+
+ // both derefs should get linted here
+ let b: String = format!("{}, {}", a.deref(), a.deref());
+
+ println!("{}", a.deref());
+
+ #[allow(clippy::match_single_binding)]
+ match a.deref() {
+ _ => (),
+ }
+
+ let b: String = concat(a.deref());
+
+ let b = just_return(a).deref();
+
+ let b: String = concat(just_return(a).deref());
+
+ let b: &str = a.deref().deref();
+
+ let opt_a = Some(a.clone());
+ let b = opt_a.unwrap().deref();
+
+ // following should not require linting
+
+ let cv = CustomVec(vec![0, 42]);
+ let c = cv.deref()[0];
+
+ let b: &str = &*a.deref();
+
+ let b: String = a.deref().clone();
+
+ let b: usize = a.deref_mut().len();
+
+ let b: &usize = &a.deref().len();
+
+ let b: &str = &*a;
+
+ let b: &mut str = &mut *a;
+
+ macro_rules! expr_deref {
+ ($body:expr) => {
+ $body.deref()
+ };
+ }
+ let b: &str = expr_deref!(a);
+
+ let b: &str = expr_deref!(a.deref());
+
+ // The struct does not implement Deref trait
+ #[derive(Copy, Clone)]
+ struct NoLint(u32);
+ impl NoLint {
+ pub fn deref(self) -> u32 {
+ self.0
+ }
+ pub fn deref_mut(self) -> u32 {
+ self.0
+ }
+ }
+ let no_lint = NoLint(42);
+ let b = no_lint.deref();
+ let b = no_lint.deref_mut();
+}
diff --git a/src/tools/clippy/tests/ui/explicit_deref_methods.stderr b/src/tools/clippy/tests/ui/explicit_deref_methods.stderr
new file mode 100644
index 000000000..4b10ed137
--- /dev/null
+++ b/src/tools/clippy/tests/ui/explicit_deref_methods.stderr
@@ -0,0 +1,76 @@
+error: explicit `deref` method call
+ --> $DIR/explicit_deref_methods.rs:36:19
+ |
+LL | let b: &str = a.deref();
+ | ^^^^^^^^^ help: try this: `&*a`
+ |
+ = note: `-D clippy::explicit-deref-methods` implied by `-D warnings`
+
+error: explicit `deref_mut` method call
+ --> $DIR/explicit_deref_methods.rs:38:23
+ |
+LL | let b: &mut str = a.deref_mut();
+ | ^^^^^^^^^^^^^ help: try this: `&mut **a`
+
+error: explicit `deref` method call
+ --> $DIR/explicit_deref_methods.rs:41:39
+ |
+LL | let b: String = format!("{}, {}", a.deref(), a.deref());
+ | ^^^^^^^^^ help: try this: `&*a`
+
+error: explicit `deref` method call
+ --> $DIR/explicit_deref_methods.rs:41:50
+ |
+LL | let b: String = format!("{}, {}", a.deref(), a.deref());
+ | ^^^^^^^^^ help: try this: `&*a`
+
+error: explicit `deref` method call
+ --> $DIR/explicit_deref_methods.rs:43:20
+ |
+LL | println!("{}", a.deref());
+ | ^^^^^^^^^ help: try this: `&*a`
+
+error: explicit `deref` method call
+ --> $DIR/explicit_deref_methods.rs:46:11
+ |
+LL | match a.deref() {
+ | ^^^^^^^^^ help: try this: `&*a`
+
+error: explicit `deref` method call
+ --> $DIR/explicit_deref_methods.rs:50:28
+ |
+LL | let b: String = concat(a.deref());
+ | ^^^^^^^^^ help: try this: `&*a`
+
+error: explicit `deref` method call
+ --> $DIR/explicit_deref_methods.rs:52:13
+ |
+LL | let b = just_return(a).deref();
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try this: `just_return(a)`
+
+error: explicit `deref` method call
+ --> $DIR/explicit_deref_methods.rs:54:28
+ |
+LL | let b: String = concat(just_return(a).deref());
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try this: `just_return(a)`
+
+error: explicit `deref` method call
+ --> $DIR/explicit_deref_methods.rs:56:19
+ |
+LL | let b: &str = a.deref().deref();
+ | ^^^^^^^^^^^^^^^^^ help: try this: `&**a`
+
+error: explicit `deref` method call
+ --> $DIR/explicit_deref_methods.rs:59:13
+ |
+LL | let b = opt_a.unwrap().deref();
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&*opt_a.unwrap()`
+
+error: explicit `deref` method call
+ --> $DIR/explicit_deref_methods.rs:85:31
+ |
+LL | let b: &str = expr_deref!(a.deref());
+ | ^^^^^^^^^ help: try this: `&*a`
+
+error: aborting due to 12 previous errors
+
diff --git a/src/tools/clippy/tests/ui/explicit_write.fixed b/src/tools/clippy/tests/ui/explicit_write.fixed
new file mode 100644
index 000000000..74d0e5290
--- /dev/null
+++ b/src/tools/clippy/tests/ui/explicit_write.fixed
@@ -0,0 +1,63 @@
+// run-rustfix
+#![allow(unused_imports)]
+#![warn(clippy::explicit_write)]
+
+fn stdout() -> String {
+ String::new()
+}
+
+fn stderr() -> String {
+ String::new()
+}
+
+macro_rules! one {
+ () => {
+ 1
+ };
+}
+
+fn main() {
+ // these should warn
+ {
+ use std::io::Write;
+ print!("test");
+ eprint!("test");
+ println!("test");
+ eprintln!("test");
+ print!("test");
+ eprint!("test");
+
+ // including newlines
+ println!("test\ntest");
+ eprintln!("test\ntest");
+
+ let value = 1;
+ eprintln!("with {}", value);
+ eprintln!("with {} {}", 2, value);
+ eprintln!("with {value}");
+ eprintln!("macro arg {}", one!());
+ }
+ // these should not warn, different destination
+ {
+ use std::fmt::Write;
+ let mut s = String::new();
+ write!(s, "test").unwrap();
+ write!(s, "test").unwrap();
+ writeln!(s, "test").unwrap();
+ writeln!(s, "test").unwrap();
+ s.write_fmt(format_args!("test")).unwrap();
+ s.write_fmt(format_args!("test")).unwrap();
+ write!(stdout(), "test").unwrap();
+ write!(stderr(), "test").unwrap();
+ writeln!(stdout(), "test").unwrap();
+ writeln!(stderr(), "test").unwrap();
+ stdout().write_fmt(format_args!("test")).unwrap();
+ stderr().write_fmt(format_args!("test")).unwrap();
+ }
+ // these should not warn, no unwrap
+ {
+ use std::io::Write;
+ std::io::stdout().write_fmt(format_args!("test")).expect("no stdout");
+ std::io::stderr().write_fmt(format_args!("test")).expect("no stderr");
+ }
+}
diff --git a/src/tools/clippy/tests/ui/explicit_write.rs b/src/tools/clippy/tests/ui/explicit_write.rs
new file mode 100644
index 000000000..e7a698d3e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/explicit_write.rs
@@ -0,0 +1,63 @@
+// run-rustfix
+#![allow(unused_imports)]
+#![warn(clippy::explicit_write)]
+
+fn stdout() -> String {
+ String::new()
+}
+
+fn stderr() -> String {
+ String::new()
+}
+
+macro_rules! one {
+ () => {
+ 1
+ };
+}
+
+fn main() {
+ // these should warn
+ {
+ use std::io::Write;
+ write!(std::io::stdout(), "test").unwrap();
+ write!(std::io::stderr(), "test").unwrap();
+ writeln!(std::io::stdout(), "test").unwrap();
+ writeln!(std::io::stderr(), "test").unwrap();
+ std::io::stdout().write_fmt(format_args!("test")).unwrap();
+ std::io::stderr().write_fmt(format_args!("test")).unwrap();
+
+ // including newlines
+ writeln!(std::io::stdout(), "test\ntest").unwrap();
+ writeln!(std::io::stderr(), "test\ntest").unwrap();
+
+ let value = 1;
+ writeln!(std::io::stderr(), "with {}", value).unwrap();
+ writeln!(std::io::stderr(), "with {} {}", 2, value).unwrap();
+ writeln!(std::io::stderr(), "with {value}").unwrap();
+ writeln!(std::io::stderr(), "macro arg {}", one!()).unwrap();
+ }
+ // these should not warn, different destination
+ {
+ use std::fmt::Write;
+ let mut s = String::new();
+ write!(s, "test").unwrap();
+ write!(s, "test").unwrap();
+ writeln!(s, "test").unwrap();
+ writeln!(s, "test").unwrap();
+ s.write_fmt(format_args!("test")).unwrap();
+ s.write_fmt(format_args!("test")).unwrap();
+ write!(stdout(), "test").unwrap();
+ write!(stderr(), "test").unwrap();
+ writeln!(stdout(), "test").unwrap();
+ writeln!(stderr(), "test").unwrap();
+ stdout().write_fmt(format_args!("test")).unwrap();
+ stderr().write_fmt(format_args!("test")).unwrap();
+ }
+ // these should not warn, no unwrap
+ {
+ use std::io::Write;
+ std::io::stdout().write_fmt(format_args!("test")).expect("no stdout");
+ std::io::stderr().write_fmt(format_args!("test")).expect("no stderr");
+ }
+}
diff --git a/src/tools/clippy/tests/ui/explicit_write.stderr b/src/tools/clippy/tests/ui/explicit_write.stderr
new file mode 100644
index 000000000..29ae0cdec
--- /dev/null
+++ b/src/tools/clippy/tests/ui/explicit_write.stderr
@@ -0,0 +1,76 @@
+error: use of `write!(stdout(), ...).unwrap()`
+ --> $DIR/explicit_write.rs:23:9
+ |
+LL | write!(std::io::stdout(), "test").unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `print!("test")`
+ |
+ = note: `-D clippy::explicit-write` implied by `-D warnings`
+
+error: use of `write!(stderr(), ...).unwrap()`
+ --> $DIR/explicit_write.rs:24:9
+ |
+LL | write!(std::io::stderr(), "test").unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprint!("test")`
+
+error: use of `writeln!(stdout(), ...).unwrap()`
+ --> $DIR/explicit_write.rs:25:9
+ |
+LL | writeln!(std::io::stdout(), "test").unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `println!("test")`
+
+error: use of `writeln!(stderr(), ...).unwrap()`
+ --> $DIR/explicit_write.rs:26:9
+ |
+LL | writeln!(std::io::stderr(), "test").unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprintln!("test")`
+
+error: use of `stdout().write_fmt(...).unwrap()`
+ --> $DIR/explicit_write.rs:27:9
+ |
+LL | std::io::stdout().write_fmt(format_args!("test")).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `print!("test")`
+
+error: use of `stderr().write_fmt(...).unwrap()`
+ --> $DIR/explicit_write.rs:28:9
+ |
+LL | std::io::stderr().write_fmt(format_args!("test")).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprint!("test")`
+
+error: use of `writeln!(stdout(), ...).unwrap()`
+ --> $DIR/explicit_write.rs:31:9
+ |
+LL | writeln!(std::io::stdout(), "test/ntest").unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `println!("test/ntest")`
+
+error: use of `writeln!(stderr(), ...).unwrap()`
+ --> $DIR/explicit_write.rs:32:9
+ |
+LL | writeln!(std::io::stderr(), "test/ntest").unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprintln!("test/ntest")`
+
+error: use of `writeln!(stderr(), ...).unwrap()`
+ --> $DIR/explicit_write.rs:35:9
+ |
+LL | writeln!(std::io::stderr(), "with {}", value).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprintln!("with {}", value)`
+
+error: use of `writeln!(stderr(), ...).unwrap()`
+ --> $DIR/explicit_write.rs:36:9
+ |
+LL | writeln!(std::io::stderr(), "with {} {}", 2, value).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprintln!("with {} {}", 2, value)`
+
+error: use of `writeln!(stderr(), ...).unwrap()`
+ --> $DIR/explicit_write.rs:37:9
+ |
+LL | writeln!(std::io::stderr(), "with {value}").unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprintln!("with {value}")`
+
+error: use of `writeln!(stderr(), ...).unwrap()`
+ --> $DIR/explicit_write.rs:38:9
+ |
+LL | writeln!(std::io::stderr(), "macro arg {}", one!()).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprintln!("macro arg {}", one!())`
+
+error: aborting due to 12 previous errors
+
diff --git a/src/tools/clippy/tests/ui/extend_with_drain.fixed b/src/tools/clippy/tests/ui/extend_with_drain.fixed
new file mode 100644
index 000000000..71ebad24c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/extend_with_drain.fixed
@@ -0,0 +1,60 @@
+// run-rustfix
+#![warn(clippy::extend_with_drain)]
+#![allow(clippy::iter_with_drain)]
+use std::collections::BinaryHeap;
+fn main() {
+ //gets linted
+ let mut vec1 = vec![0u8; 1024];
+ let mut vec2: std::vec::Vec<u8> = Vec::new();
+ vec2.append(&mut vec1);
+
+ let mut vec3 = vec![0u8; 1024];
+ let mut vec4: std::vec::Vec<u8> = Vec::new();
+
+ vec4.append(&mut vec3);
+
+ let mut vec11: std::vec::Vec<u8> = Vec::new();
+
+ vec11.append(&mut return_vector());
+
+ //won't get linted it doesn't move the entire content of a vec into another
+ let mut test1 = vec![0u8, 10];
+ let mut test2: std::vec::Vec<u8> = Vec::new();
+
+ test2.extend(test1.drain(4..10));
+
+ let mut vec3 = vec![0u8; 104];
+ let mut vec7: std::vec::Vec<u8> = Vec::new();
+
+ vec3.append(&mut vec7);
+
+ let mut vec5 = vec![0u8; 1024];
+ let mut vec6: std::vec::Vec<u8> = Vec::new();
+
+ vec5.extend(vec6.drain(..4));
+
+ let mut vec9: std::vec::Vec<u8> = Vec::new();
+
+ return_vector().append(&mut vec9);
+
+ //won't get linted because it is not a vec
+
+ let mut heap = BinaryHeap::from(vec![1, 3]);
+ let mut heap2 = BinaryHeap::from(vec![]);
+ heap2.extend(heap.drain());
+
+ let mut x = vec![0, 1, 2, 3, 5];
+ let ref_x = &mut x;
+ let mut y = Vec::new();
+ y.append(ref_x);
+}
+
+fn return_vector() -> Vec<u8> {
+ let mut new_vector = vec![];
+
+ for i in 1..10 {
+ new_vector.push(i)
+ }
+
+ new_vector
+}
diff --git a/src/tools/clippy/tests/ui/extend_with_drain.rs b/src/tools/clippy/tests/ui/extend_with_drain.rs
new file mode 100644
index 000000000..e9f011abb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/extend_with_drain.rs
@@ -0,0 +1,60 @@
+// run-rustfix
+#![warn(clippy::extend_with_drain)]
+#![allow(clippy::iter_with_drain)]
+use std::collections::BinaryHeap;
+fn main() {
+ //gets linted
+ let mut vec1 = vec![0u8; 1024];
+ let mut vec2: std::vec::Vec<u8> = Vec::new();
+ vec2.extend(vec1.drain(..));
+
+ let mut vec3 = vec![0u8; 1024];
+ let mut vec4: std::vec::Vec<u8> = Vec::new();
+
+ vec4.extend(vec3.drain(..));
+
+ let mut vec11: std::vec::Vec<u8> = Vec::new();
+
+ vec11.extend(return_vector().drain(..));
+
+ //won't get linted it doesn't move the entire content of a vec into another
+ let mut test1 = vec![0u8, 10];
+ let mut test2: std::vec::Vec<u8> = Vec::new();
+
+ test2.extend(test1.drain(4..10));
+
+ let mut vec3 = vec![0u8; 104];
+ let mut vec7: std::vec::Vec<u8> = Vec::new();
+
+ vec3.append(&mut vec7);
+
+ let mut vec5 = vec![0u8; 1024];
+ let mut vec6: std::vec::Vec<u8> = Vec::new();
+
+ vec5.extend(vec6.drain(..4));
+
+ let mut vec9: std::vec::Vec<u8> = Vec::new();
+
+ return_vector().append(&mut vec9);
+
+ //won't get linted because it is not a vec
+
+ let mut heap = BinaryHeap::from(vec![1, 3]);
+ let mut heap2 = BinaryHeap::from(vec![]);
+ heap2.extend(heap.drain());
+
+ let mut x = vec![0, 1, 2, 3, 5];
+ let ref_x = &mut x;
+ let mut y = Vec::new();
+ y.extend(ref_x.drain(..));
+}
+
+fn return_vector() -> Vec<u8> {
+ let mut new_vector = vec![];
+
+ for i in 1..10 {
+ new_vector.push(i)
+ }
+
+ new_vector
+}
diff --git a/src/tools/clippy/tests/ui/extend_with_drain.stderr b/src/tools/clippy/tests/ui/extend_with_drain.stderr
new file mode 100644
index 000000000..da14ddb25
--- /dev/null
+++ b/src/tools/clippy/tests/ui/extend_with_drain.stderr
@@ -0,0 +1,28 @@
+error: use of `extend` instead of `append` for adding the full range of a second vector
+ --> $DIR/extend_with_drain.rs:9:5
+ |
+LL | vec2.extend(vec1.drain(..));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `vec2.append(&mut vec1)`
+ |
+ = note: `-D clippy::extend-with-drain` implied by `-D warnings`
+
+error: use of `extend` instead of `append` for adding the full range of a second vector
+ --> $DIR/extend_with_drain.rs:14:5
+ |
+LL | vec4.extend(vec3.drain(..));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `vec4.append(&mut vec3)`
+
+error: use of `extend` instead of `append` for adding the full range of a second vector
+ --> $DIR/extend_with_drain.rs:18:5
+ |
+LL | vec11.extend(return_vector().drain(..));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `vec11.append(&mut return_vector())`
+
+error: use of `extend` instead of `append` for adding the full range of a second vector
+ --> $DIR/extend_with_drain.rs:49:5
+ |
+LL | y.extend(ref_x.drain(..));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `y.append(ref_x)`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/extra_unused_lifetimes.rs b/src/tools/clippy/tests/ui/extra_unused_lifetimes.rs
new file mode 100644
index 000000000..d6631e012
--- /dev/null
+++ b/src/tools/clippy/tests/ui/extra_unused_lifetimes.rs
@@ -0,0 +1,129 @@
+// aux-build:proc_macro_derive.rs
+
+#![allow(
+ unused,
+ dead_code,
+ clippy::needless_lifetimes,
+ clippy::needless_pass_by_value,
+ clippy::needless_arbitrary_self_type
+)]
+#![warn(clippy::extra_unused_lifetimes)]
+
+#[macro_use]
+extern crate proc_macro_derive;
+
+fn empty() {}
+
+fn used_lt<'a>(x: &'a u8) {}
+
+fn unused_lt<'a>(x: u8) {}
+
+fn unused_lt_transitive<'a, 'b: 'a>(x: &'b u8) {
+ // 'a is useless here since it's not directly bound
+}
+
+fn lt_return<'a, 'b: 'a>(x: &'b u8) -> &'a u8 {
+ panic!()
+}
+
+fn lt_return_only<'a>() -> &'a u8 {
+ panic!()
+}
+
+fn unused_lt_blergh<'a>(x: Option<Box<dyn Send + 'a>>) {}
+
+trait Foo<'a> {
+ fn x(&self, a: &'a u8);
+}
+
+impl<'a> Foo<'a> for u8 {
+ fn x(&self, a: &'a u8) {}
+}
+
+struct Bar;
+
+impl Bar {
+ fn x<'a>(&self) {}
+}
+
+// test for #489 (used lifetimes in bounds)
+pub fn parse<'a, I: Iterator<Item = &'a str>>(_it: &mut I) {
+ unimplemented!()
+}
+pub fn parse2<'a, I>(_it: &mut I)
+where
+ I: Iterator<Item = &'a str>,
+{
+ unimplemented!()
+}
+
+struct X {
+ x: u32,
+}
+
+impl X {
+ fn self_ref_with_lifetime<'a>(&'a self) {}
+ fn explicit_self_with_lifetime<'a>(self: &'a Self) {}
+}
+
+// Methods implementing traits must have matching lifetimes
+mod issue4291 {
+ trait BadTrait {
+ fn unused_lt<'a>(x: u8) {}
+ }
+
+ impl BadTrait for () {
+ fn unused_lt<'a>(_x: u8) {}
+ }
+}
+
+mod issue6437 {
+ pub struct Scalar;
+
+ impl<'a> std::ops::AddAssign<&Scalar> for &mut Scalar {
+ fn add_assign(&mut self, _rhs: &Scalar) {
+ unimplemented!();
+ }
+ }
+
+ impl<'b> Scalar {
+ pub fn something<'c>() -> Self {
+ Self
+ }
+ }
+}
+
+// https://github.com/rust-lang/rust-clippy/pull/8737#pullrequestreview-951268213
+mod first_case {
+ use serde::de::Visitor;
+ pub trait Expected {
+ fn fmt(&self, formatter: &mut std::fmt::Formatter);
+ }
+
+ impl<'de, T> Expected for T
+ where
+ T: Visitor<'de>,
+ {
+ fn fmt(&self, formatter: &mut std::fmt::Formatter) {}
+ }
+}
+
+// https://github.com/rust-lang/rust-clippy/pull/8737#pullrequestreview-951268213
+mod second_case {
+ pub trait Source {
+ fn hey();
+ }
+
+ impl<'a, T: Source + ?Sized + 'a> Source for Box<T> {
+ fn hey() {}
+ }
+}
+
+// Should not lint
+#[derive(ExtraLifetimeDerive)]
+struct Human<'a> {
+ pub bones: i32,
+ pub name: &'a str,
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/extra_unused_lifetimes.stderr b/src/tools/clippy/tests/ui/extra_unused_lifetimes.stderr
new file mode 100644
index 000000000..26ebc3976
--- /dev/null
+++ b/src/tools/clippy/tests/ui/extra_unused_lifetimes.stderr
@@ -0,0 +1,40 @@
+error: this lifetime isn't used in the function definition
+ --> $DIR/extra_unused_lifetimes.rs:19:14
+ |
+LL | fn unused_lt<'a>(x: u8) {}
+ | ^^
+ |
+ = note: `-D clippy::extra-unused-lifetimes` implied by `-D warnings`
+
+error: this lifetime isn't used in the function definition
+ --> $DIR/extra_unused_lifetimes.rs:46:10
+ |
+LL | fn x<'a>(&self) {}
+ | ^^
+
+error: this lifetime isn't used in the function definition
+ --> $DIR/extra_unused_lifetimes.rs:72:22
+ |
+LL | fn unused_lt<'a>(x: u8) {}
+ | ^^
+
+error: this lifetime isn't used in the impl
+ --> $DIR/extra_unused_lifetimes.rs:83:10
+ |
+LL | impl<'a> std::ops::AddAssign<&Scalar> for &mut Scalar {
+ | ^^
+
+error: this lifetime isn't used in the impl
+ --> $DIR/extra_unused_lifetimes.rs:89:10
+ |
+LL | impl<'b> Scalar {
+ | ^^
+
+error: this lifetime isn't used in the function definition
+ --> $DIR/extra_unused_lifetimes.rs:90:26
+ |
+LL | pub fn something<'c>() -> Self {
+ | ^^
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/fallible_impl_from.rs b/src/tools/clippy/tests/ui/fallible_impl_from.rs
new file mode 100644
index 000000000..5d5af4e46
--- /dev/null
+++ b/src/tools/clippy/tests/ui/fallible_impl_from.rs
@@ -0,0 +1,76 @@
+#![deny(clippy::fallible_impl_from)]
+
+// docs example
+struct Foo(i32);
+impl From<String> for Foo {
+ fn from(s: String) -> Self {
+ Foo(s.parse().unwrap())
+ }
+}
+
+struct Valid(Vec<u8>);
+
+impl<'a> From<&'a str> for Valid {
+ fn from(s: &'a str) -> Valid {
+ Valid(s.to_owned().into_bytes())
+ }
+}
+impl From<usize> for Valid {
+ fn from(i: usize) -> Valid {
+ Valid(Vec::with_capacity(i))
+ }
+}
+
+struct Invalid;
+
+impl From<usize> for Invalid {
+ fn from(i: usize) -> Invalid {
+ if i != 42 {
+ panic!();
+ }
+ Invalid
+ }
+}
+
+impl From<Option<String>> for Invalid {
+ fn from(s: Option<String>) -> Invalid {
+ let s = s.unwrap();
+ if !s.is_empty() {
+ panic!("42");
+ } else if s.parse::<u32>().unwrap() != 42 {
+ panic!("{:?}", s);
+ }
+ Invalid
+ }
+}
+
+trait ProjStrTrait {
+ type ProjString;
+}
+impl<T> ProjStrTrait for Box<T> {
+ type ProjString = String;
+}
+impl<'a> From<&'a mut <Box<u32> as ProjStrTrait>::ProjString> for Invalid {
+ fn from(s: &'a mut <Box<u32> as ProjStrTrait>::ProjString) -> Invalid {
+ if s.parse::<u32>().ok().unwrap() != 42 {
+ panic!("{:?}", s);
+ }
+ Invalid
+ }
+}
+
+struct Unreachable;
+
+impl From<String> for Unreachable {
+ fn from(s: String) -> Unreachable {
+ if s.is_empty() {
+ return Unreachable;
+ }
+ match s.chars().next() {
+ Some(_) => Unreachable,
+ None => unreachable!(), // do not lint the unreachable macro
+ }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/fallible_impl_from.stderr b/src/tools/clippy/tests/ui/fallible_impl_from.stderr
new file mode 100644
index 000000000..d637dbce5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/fallible_impl_from.stderr
@@ -0,0 +1,93 @@
+error: consider implementing `TryFrom` instead
+ --> $DIR/fallible_impl_from.rs:5:1
+ |
+LL | / impl From<String> for Foo {
+LL | | fn from(s: String) -> Self {
+LL | | Foo(s.parse().unwrap())
+LL | | }
+LL | | }
+ | |_^
+ |
+note: the lint level is defined here
+ --> $DIR/fallible_impl_from.rs:1:9
+ |
+LL | #![deny(clippy::fallible_impl_from)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = help: `From` is intended for infallible conversions only. Use `TryFrom` if there's a possibility for the conversion to fail
+note: potential failure(s)
+ --> $DIR/fallible_impl_from.rs:7:13
+ |
+LL | Foo(s.parse().unwrap())
+ | ^^^^^^^^^^^^^^^^^^
+
+error: consider implementing `TryFrom` instead
+ --> $DIR/fallible_impl_from.rs:26:1
+ |
+LL | / impl From<usize> for Invalid {
+LL | | fn from(i: usize) -> Invalid {
+LL | | if i != 42 {
+LL | | panic!();
+... |
+LL | | }
+LL | | }
+ | |_^
+ |
+ = help: `From` is intended for infallible conversions only. Use `TryFrom` if there's a possibility for the conversion to fail
+note: potential failure(s)
+ --> $DIR/fallible_impl_from.rs:29:13
+ |
+LL | panic!();
+ | ^^^^^^^^
+ = note: this error originates in the macro `$crate::panic::panic_2021` which comes from the expansion of the macro `panic` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: consider implementing `TryFrom` instead
+ --> $DIR/fallible_impl_from.rs:35:1
+ |
+LL | / impl From<Option<String>> for Invalid {
+LL | | fn from(s: Option<String>) -> Invalid {
+LL | | let s = s.unwrap();
+LL | | if !s.is_empty() {
+... |
+LL | | }
+LL | | }
+ | |_^
+ |
+ = help: `From` is intended for infallible conversions only. Use `TryFrom` if there's a possibility for the conversion to fail
+note: potential failure(s)
+ --> $DIR/fallible_impl_from.rs:37:17
+ |
+LL | let s = s.unwrap();
+ | ^^^^^^^^^^
+LL | if !s.is_empty() {
+LL | panic!("42");
+ | ^^^^^^^^^^^^
+LL | } else if s.parse::<u32>().unwrap() != 42 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | panic!("{:?}", s);
+ | ^^^^^^^^^^^^^^^^^
+ = note: this error originates in the macro `$crate::panic::panic_2021` which comes from the expansion of the macro `panic` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: consider implementing `TryFrom` instead
+ --> $DIR/fallible_impl_from.rs:53:1
+ |
+LL | / impl<'a> From<&'a mut <Box<u32> as ProjStrTrait>::ProjString> for Invalid {
+LL | | fn from(s: &'a mut <Box<u32> as ProjStrTrait>::ProjString) -> Invalid {
+LL | | if s.parse::<u32>().ok().unwrap() != 42 {
+LL | | panic!("{:?}", s);
+... |
+LL | | }
+LL | | }
+ | |_^
+ |
+ = help: `From` is intended for infallible conversions only. Use `TryFrom` if there's a possibility for the conversion to fail
+note: potential failure(s)
+ --> $DIR/fallible_impl_from.rs:55:12
+ |
+LL | if s.parse::<u32>().ok().unwrap() != 42 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | panic!("{:?}", s);
+ | ^^^^^^^^^^^^^^^^^
+ = note: this error originates in the macro `$crate::panic::panic_2021` which comes from the expansion of the macro `panic` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/field_reassign_with_default.rs b/src/tools/clippy/tests/ui/field_reassign_with_default.rs
new file mode 100644
index 000000000..7367910ea
--- /dev/null
+++ b/src/tools/clippy/tests/ui/field_reassign_with_default.rs
@@ -0,0 +1,249 @@
+// aux-build:proc_macro_derive.rs
+// aux-build:macro_rules.rs
+
+#![warn(clippy::field_reassign_with_default)]
+
+#[macro_use]
+extern crate proc_macro_derive;
+#[macro_use]
+extern crate macro_rules;
+
+// Don't lint on derives that derive `Default`
+// See https://github.com/rust-lang/rust-clippy/issues/6545
+#[derive(FieldReassignWithDefault)]
+struct DerivedStruct;
+
+#[derive(Default)]
+struct A {
+ i: i32,
+ j: i64,
+}
+
+struct B {
+ i: i32,
+ j: i64,
+}
+
+#[derive(Default)]
+struct C {
+ i: Vec<i32>,
+ j: i64,
+}
+
+#[derive(Default)]
+struct D {
+ a: Option<i32>,
+ b: Option<i32>,
+}
+
+macro_rules! m {
+ ($key:ident: $value:tt) => {{
+ let mut data = $crate::D::default();
+ data.$key = Some($value);
+ data
+ }};
+}
+
+/// Implements .next() that returns a different number each time.
+struct SideEffect(i32);
+
+impl SideEffect {
+ fn new() -> SideEffect {
+ SideEffect(0)
+ }
+ fn next(&mut self) -> i32 {
+ self.0 += 1;
+ self.0
+ }
+}
+
+fn main() {
+ // wrong, produces first error in stderr
+ let mut a: A = Default::default();
+ a.i = 42;
+
+ // right
+ let mut a: A = Default::default();
+
+ // right
+ let a = A {
+ i: 42,
+ ..Default::default()
+ };
+
+ // right
+ let mut a: A = Default::default();
+ if a.i == 0 {
+ a.j = 12;
+ }
+
+ // right
+ let mut a: A = Default::default();
+ let b = 5;
+
+ // right
+ let mut b = 32;
+ let mut a: A = Default::default();
+ b = 2;
+
+ // right
+ let b: B = B { i: 42, j: 24 };
+
+ // right
+ let mut b: B = B { i: 42, j: 24 };
+ b.i = 52;
+
+ // right
+ let mut b = B { i: 15, j: 16 };
+ let mut a: A = Default::default();
+ b.i = 2;
+
+ // wrong, produces second error in stderr
+ let mut a: A = Default::default();
+ a.j = 43;
+ a.i = 42;
+
+ // wrong, produces third error in stderr
+ let mut a: A = Default::default();
+ a.i = 42;
+ a.j = 43;
+ a.j = 44;
+
+ // wrong, produces fourth error in stderr
+ let mut a = A::default();
+ a.i = 42;
+
+ // wrong, but does not produce an error in stderr, because we can't produce a correct kind of
+ // suggestion with current implementation
+ let mut c: (i32, i32) = Default::default();
+ c.0 = 42;
+ c.1 = 21;
+
+ // wrong, produces the fifth error in stderr
+ let mut a: A = Default::default();
+ a.i = Default::default();
+
+ // wrong, produces the sixth error in stderr
+ let mut a: A = Default::default();
+ a.i = Default::default();
+ a.j = 45;
+
+ // right, because an assignment refers to another field
+ let mut x = A::default();
+ x.i = 42;
+ x.j = 21 + x.i as i64;
+
+ // right, we bail out if there's a reassignment to the same variable, since there is a risk of
+ // side-effects affecting the outcome
+ let mut x = A::default();
+ let mut side_effect = SideEffect::new();
+ x.i = side_effect.next();
+ x.j = 2;
+ x.i = side_effect.next();
+
+ // don't lint - some private fields
+ let mut x = m::F::default();
+ x.a = 1;
+
+ // don't expand macros in the suggestion (#6522)
+ let mut a: C = C::default();
+ a.i = vec![1];
+
+ // Don't lint in external macros
+ field_reassign_with_default!();
+
+ // be sure suggestion is correct with generics
+ let mut a: Wrapper<bool> = Default::default();
+ a.i = true;
+
+ let mut a: WrapperMulti<i32, i64> = Default::default();
+ a.i = 42;
+
+ // Don't lint in macros
+ m! {
+ a: 42
+ };
+}
+
+mod m {
+ #[derive(Default)]
+ pub struct F {
+ pub a: u64,
+ b: u64,
+ }
+}
+
+#[derive(Default)]
+struct Wrapper<T> {
+ i: T,
+}
+
+#[derive(Default)]
+struct WrapperMulti<T, U> {
+ i: T,
+ j: U,
+}
+
+mod issue6312 {
+ use std::sync::atomic::AtomicBool;
+ use std::sync::Arc;
+
+ // do not lint: type implements `Drop` but not all fields are `Copy`
+ #[derive(Clone, Default)]
+ pub struct ImplDropNotAllCopy {
+ name: String,
+ delay_data_sync: Arc<AtomicBool>,
+ }
+
+ impl Drop for ImplDropNotAllCopy {
+ fn drop(&mut self) {
+ self.close()
+ }
+ }
+
+ impl ImplDropNotAllCopy {
+ fn new(name: &str) -> Self {
+ let mut f = ImplDropNotAllCopy::default();
+ f.name = name.to_owned();
+ f
+ }
+ fn close(&self) {}
+ }
+
+ // lint: type implements `Drop` and all fields are `Copy`
+ #[derive(Clone, Default)]
+ pub struct ImplDropAllCopy {
+ name: usize,
+ delay_data_sync: bool,
+ }
+
+ impl Drop for ImplDropAllCopy {
+ fn drop(&mut self) {
+ self.close()
+ }
+ }
+
+ impl ImplDropAllCopy {
+ fn new(name: &str) -> Self {
+ let mut f = ImplDropAllCopy::default();
+ f.name = name.len();
+ f
+ }
+ fn close(&self) {}
+ }
+
+ // lint: type does not implement `Drop` though all fields are `Copy`
+ #[derive(Clone, Default)]
+ pub struct NoDropAllCopy {
+ name: usize,
+ delay_data_sync: bool,
+ }
+
+ impl NoDropAllCopy {
+ fn new(name: &str) -> Self {
+ let mut f = NoDropAllCopy::default();
+ f.name = name.len();
+ f
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/field_reassign_with_default.stderr b/src/tools/clippy/tests/ui/field_reassign_with_default.stderr
new file mode 100644
index 000000000..3ce4b91a5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/field_reassign_with_default.stderr
@@ -0,0 +1,135 @@
+error: field assignment outside of initializer for an instance created with Default::default()
+ --> $DIR/field_reassign_with_default.rs:63:5
+ |
+LL | a.i = 42;
+ | ^^^^^^^^^
+ |
+ = note: `-D clippy::field-reassign-with-default` implied by `-D warnings`
+note: consider initializing the variable with `main::A { i: 42, ..Default::default() }` and removing relevant reassignments
+ --> $DIR/field_reassign_with_default.rs:62:5
+ |
+LL | let mut a: A = Default::default();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: field assignment outside of initializer for an instance created with Default::default()
+ --> $DIR/field_reassign_with_default.rs:103:5
+ |
+LL | a.j = 43;
+ | ^^^^^^^^^
+ |
+note: consider initializing the variable with `main::A { j: 43, i: 42 }` and removing relevant reassignments
+ --> $DIR/field_reassign_with_default.rs:102:5
+ |
+LL | let mut a: A = Default::default();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: field assignment outside of initializer for an instance created with Default::default()
+ --> $DIR/field_reassign_with_default.rs:108:5
+ |
+LL | a.i = 42;
+ | ^^^^^^^^^
+ |
+note: consider initializing the variable with `main::A { i: 42, j: 44 }` and removing relevant reassignments
+ --> $DIR/field_reassign_with_default.rs:107:5
+ |
+LL | let mut a: A = Default::default();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: field assignment outside of initializer for an instance created with Default::default()
+ --> $DIR/field_reassign_with_default.rs:114:5
+ |
+LL | a.i = 42;
+ | ^^^^^^^^^
+ |
+note: consider initializing the variable with `main::A { i: 42, ..Default::default() }` and removing relevant reassignments
+ --> $DIR/field_reassign_with_default.rs:113:5
+ |
+LL | let mut a = A::default();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: field assignment outside of initializer for an instance created with Default::default()
+ --> $DIR/field_reassign_with_default.rs:124:5
+ |
+LL | a.i = Default::default();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: consider initializing the variable with `main::A { i: Default::default(), ..Default::default() }` and removing relevant reassignments
+ --> $DIR/field_reassign_with_default.rs:123:5
+ |
+LL | let mut a: A = Default::default();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: field assignment outside of initializer for an instance created with Default::default()
+ --> $DIR/field_reassign_with_default.rs:128:5
+ |
+LL | a.i = Default::default();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: consider initializing the variable with `main::A { i: Default::default(), j: 45 }` and removing relevant reassignments
+ --> $DIR/field_reassign_with_default.rs:127:5
+ |
+LL | let mut a: A = Default::default();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: field assignment outside of initializer for an instance created with Default::default()
+ --> $DIR/field_reassign_with_default.rs:150:5
+ |
+LL | a.i = vec![1];
+ | ^^^^^^^^^^^^^^
+ |
+note: consider initializing the variable with `C { i: vec![1], ..Default::default() }` and removing relevant reassignments
+ --> $DIR/field_reassign_with_default.rs:149:5
+ |
+LL | let mut a: C = C::default();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: field assignment outside of initializer for an instance created with Default::default()
+ --> $DIR/field_reassign_with_default.rs:157:5
+ |
+LL | a.i = true;
+ | ^^^^^^^^^^^
+ |
+note: consider initializing the variable with `Wrapper::<bool> { i: true }` and removing relevant reassignments
+ --> $DIR/field_reassign_with_default.rs:156:5
+ |
+LL | let mut a: Wrapper<bool> = Default::default();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: field assignment outside of initializer for an instance created with Default::default()
+ --> $DIR/field_reassign_with_default.rs:160:5
+ |
+LL | a.i = 42;
+ | ^^^^^^^^^
+ |
+note: consider initializing the variable with `WrapperMulti::<i32, i64> { i: 42, ..Default::default() }` and removing relevant reassignments
+ --> $DIR/field_reassign_with_default.rs:159:5
+ |
+LL | let mut a: WrapperMulti<i32, i64> = Default::default();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: field assignment outside of initializer for an instance created with Default::default()
+ --> $DIR/field_reassign_with_default.rs:229:13
+ |
+LL | f.name = name.len();
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+note: consider initializing the variable with `issue6312::ImplDropAllCopy { name: name.len(), ..Default::default() }` and removing relevant reassignments
+ --> $DIR/field_reassign_with_default.rs:228:13
+ |
+LL | let mut f = ImplDropAllCopy::default();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: field assignment outside of initializer for an instance created with Default::default()
+ --> $DIR/field_reassign_with_default.rs:245:13
+ |
+LL | f.name = name.len();
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+note: consider initializing the variable with `issue6312::NoDropAllCopy { name: name.len(), ..Default::default() }` and removing relevant reassignments
+ --> $DIR/field_reassign_with_default.rs:244:13
+ |
+LL | let mut f = NoDropAllCopy::default();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/filetype_is_file.rs b/src/tools/clippy/tests/ui/filetype_is_file.rs
new file mode 100644
index 000000000..5de8fe8cd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/filetype_is_file.rs
@@ -0,0 +1,23 @@
+#![warn(clippy::filetype_is_file)]
+
+fn main() -> std::io::Result<()> {
+ use std::fs;
+ use std::ops::BitOr;
+
+ // !filetype.is_dir()
+ if fs::metadata("foo.txt")?.file_type().is_file() {
+ // read file
+ }
+
+ // positive of filetype.is_dir()
+ if !fs::metadata("foo.txt")?.file_type().is_file() {
+ // handle dir
+ }
+
+ // false positive of filetype.is_dir()
+ if !fs::metadata("foo.txt")?.file_type().is_file().bitor(true) {
+ // ...
+ }
+
+ Ok(())
+}
diff --git a/src/tools/clippy/tests/ui/filetype_is_file.stderr b/src/tools/clippy/tests/ui/filetype_is_file.stderr
new file mode 100644
index 000000000..cd1e3ac37
--- /dev/null
+++ b/src/tools/clippy/tests/ui/filetype_is_file.stderr
@@ -0,0 +1,27 @@
+error: `FileType::is_file()` only covers regular files
+ --> $DIR/filetype_is_file.rs:8:8
+ |
+LL | if fs::metadata("foo.txt")?.file_type().is_file() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::filetype-is-file` implied by `-D warnings`
+ = help: use `!FileType::is_dir()` instead
+
+error: `!FileType::is_file()` only denies regular files
+ --> $DIR/filetype_is_file.rs:13:8
+ |
+LL | if !fs::metadata("foo.txt")?.file_type().is_file() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use `FileType::is_dir()` instead
+
+error: `FileType::is_file()` only covers regular files
+ --> $DIR/filetype_is_file.rs:18:9
+ |
+LL | if !fs::metadata("foo.txt")?.file_type().is_file().bitor(true) {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use `!FileType::is_dir()` instead
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/filter_map_identity.fixed b/src/tools/clippy/tests/ui/filter_map_identity.fixed
new file mode 100644
index 000000000..a5860aa49
--- /dev/null
+++ b/src/tools/clippy/tests/ui/filter_map_identity.fixed
@@ -0,0 +1,19 @@
+// run-rustfix
+
+#![allow(unused_imports, clippy::needless_return)]
+#![warn(clippy::filter_map_identity)]
+
+fn main() {
+ let iterator = vec![Some(1), None, Some(2)].into_iter();
+ let _ = iterator.flatten();
+
+ let iterator = vec![Some(1), None, Some(2)].into_iter();
+ let _ = iterator.flatten();
+
+ use std::convert::identity;
+ let iterator = vec![Some(1), None, Some(2)].into_iter();
+ let _ = iterator.flatten();
+
+ let iterator = vec![Some(1), None, Some(2)].into_iter();
+ let _ = iterator.flatten();
+}
diff --git a/src/tools/clippy/tests/ui/filter_map_identity.rs b/src/tools/clippy/tests/ui/filter_map_identity.rs
new file mode 100644
index 000000000..7e998b9cd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/filter_map_identity.rs
@@ -0,0 +1,19 @@
+// run-rustfix
+
+#![allow(unused_imports, clippy::needless_return)]
+#![warn(clippy::filter_map_identity)]
+
+fn main() {
+ let iterator = vec![Some(1), None, Some(2)].into_iter();
+ let _ = iterator.filter_map(|x| x);
+
+ let iterator = vec![Some(1), None, Some(2)].into_iter();
+ let _ = iterator.filter_map(std::convert::identity);
+
+ use std::convert::identity;
+ let iterator = vec![Some(1), None, Some(2)].into_iter();
+ let _ = iterator.filter_map(identity);
+
+ let iterator = vec![Some(1), None, Some(2)].into_iter();
+ let _ = iterator.filter_map(|x| return x);
+}
diff --git a/src/tools/clippy/tests/ui/filter_map_identity.stderr b/src/tools/clippy/tests/ui/filter_map_identity.stderr
new file mode 100644
index 000000000..43c9fdca4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/filter_map_identity.stderr
@@ -0,0 +1,28 @@
+error: use of `filter_map` with an identity function
+ --> $DIR/filter_map_identity.rs:8:22
+ |
+LL | let _ = iterator.filter_map(|x| x);
+ | ^^^^^^^^^^^^^^^^^ help: try: `flatten()`
+ |
+ = note: `-D clippy::filter-map-identity` implied by `-D warnings`
+
+error: use of `filter_map` with an identity function
+ --> $DIR/filter_map_identity.rs:11:22
+ |
+LL | let _ = iterator.filter_map(std::convert::identity);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `flatten()`
+
+error: use of `filter_map` with an identity function
+ --> $DIR/filter_map_identity.rs:15:22
+ |
+LL | let _ = iterator.filter_map(identity);
+ | ^^^^^^^^^^^^^^^^^^^^ help: try: `flatten()`
+
+error: use of `filter_map` with an identity function
+ --> $DIR/filter_map_identity.rs:18:22
+ |
+LL | let _ = iterator.filter_map(|x| return x);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `flatten()`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/filter_map_next.rs b/src/tools/clippy/tests/ui/filter_map_next.rs
new file mode 100644
index 000000000..dbeb23543
--- /dev/null
+++ b/src/tools/clippy/tests/ui/filter_map_next.rs
@@ -0,0 +1,17 @@
+#![warn(clippy::all, clippy::pedantic)]
+
+fn main() {
+ let a = ["1", "lol", "3", "NaN", "5"];
+
+ #[rustfmt::skip]
+ let _: Option<u32> = vec![1, 2, 3, 4, 5, 6]
+ .into_iter()
+ .filter_map(|x| {
+ if x == 2 {
+ Some(x * 2)
+ } else {
+ None
+ }
+ })
+ .next();
+}
diff --git a/src/tools/clippy/tests/ui/filter_map_next.stderr b/src/tools/clippy/tests/ui/filter_map_next.stderr
new file mode 100644
index 000000000..ddc982c93
--- /dev/null
+++ b/src/tools/clippy/tests/ui/filter_map_next.stderr
@@ -0,0 +1,17 @@
+error: called `filter_map(..).next()` on an `Iterator`. This is more succinctly expressed by calling `.find_map(..)` instead
+ --> $DIR/filter_map_next.rs:7:26
+ |
+LL | let _: Option<u32> = vec![1, 2, 3, 4, 5, 6]
+ | __________________________^
+LL | | .into_iter()
+LL | | .filter_map(|x| {
+LL | | if x == 2 {
+... |
+LL | | })
+LL | | .next();
+ | |_______________^
+ |
+ = note: `-D clippy::filter-map-next` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/filter_map_next_fixable.fixed b/src/tools/clippy/tests/ui/filter_map_next_fixable.fixed
new file mode 100644
index 000000000..c3992d7e9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/filter_map_next_fixable.fixed
@@ -0,0 +1,10 @@
+// run-rustfix
+
+#![warn(clippy::all, clippy::pedantic)]
+
+fn main() {
+ let a = ["1", "lol", "3", "NaN", "5"];
+
+ let element: Option<i32> = a.iter().find_map(|s| s.parse().ok());
+ assert_eq!(element, Some(1));
+}
diff --git a/src/tools/clippy/tests/ui/filter_map_next_fixable.rs b/src/tools/clippy/tests/ui/filter_map_next_fixable.rs
new file mode 100644
index 000000000..447219a96
--- /dev/null
+++ b/src/tools/clippy/tests/ui/filter_map_next_fixable.rs
@@ -0,0 +1,10 @@
+// run-rustfix
+
+#![warn(clippy::all, clippy::pedantic)]
+
+fn main() {
+ let a = ["1", "lol", "3", "NaN", "5"];
+
+ let element: Option<i32> = a.iter().filter_map(|s| s.parse().ok()).next();
+ assert_eq!(element, Some(1));
+}
diff --git a/src/tools/clippy/tests/ui/filter_map_next_fixable.stderr b/src/tools/clippy/tests/ui/filter_map_next_fixable.stderr
new file mode 100644
index 000000000..3bb062ffd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/filter_map_next_fixable.stderr
@@ -0,0 +1,10 @@
+error: called `filter_map(..).next()` on an `Iterator`. This is more succinctly expressed by calling `.find_map(..)` instead
+ --> $DIR/filter_map_next_fixable.rs:8:32
+ |
+LL | let element: Option<i32> = a.iter().filter_map(|s| s.parse().ok()).next();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `a.iter().find_map(|s| s.parse().ok())`
+ |
+ = note: `-D clippy::filter-map-next` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/find_map.rs b/src/tools/clippy/tests/ui/find_map.rs
new file mode 100644
index 000000000..88d3b0e74
--- /dev/null
+++ b/src/tools/clippy/tests/ui/find_map.rs
@@ -0,0 +1,33 @@
+#![warn(clippy::all, clippy::pedantic)]
+
+#[derive(Debug, Copy, Clone)]
+enum Flavor {
+ Chocolate,
+}
+
+#[derive(Debug, Copy, Clone)]
+enum Dessert {
+ Banana,
+ Pudding,
+ Cake(Flavor),
+}
+
+fn main() {
+ let desserts_of_the_week = vec![Dessert::Banana, Dessert::Cake(Flavor::Chocolate), Dessert::Pudding];
+
+ let a = ["lol", "NaN", "2", "5", "Xunda"];
+
+ let _: Option<i32> = a.iter().find(|s| s.parse::<i32>().is_ok()).map(|s| s.parse().unwrap());
+
+ #[allow(clippy::match_like_matches_macro)]
+ let _: Option<Flavor> = desserts_of_the_week
+ .iter()
+ .find(|dessert| match *dessert {
+ Dessert::Cake(_) => true,
+ _ => false,
+ })
+ .map(|dessert| match *dessert {
+ Dessert::Cake(ref flavor) => *flavor,
+ _ => unreachable!(),
+ });
+}
diff --git a/src/tools/clippy/tests/ui/flat_map_identity.fixed b/src/tools/clippy/tests/ui/flat_map_identity.fixed
new file mode 100644
index 000000000..1f4b880ef
--- /dev/null
+++ b/src/tools/clippy/tests/ui/flat_map_identity.fixed
@@ -0,0 +1,17 @@
+// run-rustfix
+
+#![allow(unused_imports, clippy::needless_return)]
+#![warn(clippy::flat_map_identity)]
+
+use std::convert;
+
+fn main() {
+ let iterator = [[0, 1], [2, 3], [4, 5]].iter();
+ let _ = iterator.flatten();
+
+ let iterator = [[0, 1], [2, 3], [4, 5]].iter();
+ let _ = iterator.flatten();
+
+ let iterator = [[0, 1], [2, 3], [4, 5]].iter();
+ let _ = iterator.flatten();
+}
diff --git a/src/tools/clippy/tests/ui/flat_map_identity.rs b/src/tools/clippy/tests/ui/flat_map_identity.rs
new file mode 100644
index 000000000..de14a06d4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/flat_map_identity.rs
@@ -0,0 +1,17 @@
+// run-rustfix
+
+#![allow(unused_imports, clippy::needless_return)]
+#![warn(clippy::flat_map_identity)]
+
+use std::convert;
+
+fn main() {
+ let iterator = [[0, 1], [2, 3], [4, 5]].iter();
+ let _ = iterator.flat_map(|x| x);
+
+ let iterator = [[0, 1], [2, 3], [4, 5]].iter();
+ let _ = iterator.flat_map(convert::identity);
+
+ let iterator = [[0, 1], [2, 3], [4, 5]].iter();
+ let _ = iterator.flat_map(|x| return x);
+}
diff --git a/src/tools/clippy/tests/ui/flat_map_identity.stderr b/src/tools/clippy/tests/ui/flat_map_identity.stderr
new file mode 100644
index 000000000..e776c9fdf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/flat_map_identity.stderr
@@ -0,0 +1,22 @@
+error: use of `flat_map` with an identity function
+ --> $DIR/flat_map_identity.rs:10:22
+ |
+LL | let _ = iterator.flat_map(|x| x);
+ | ^^^^^^^^^^^^^^^ help: try: `flatten()`
+ |
+ = note: `-D clippy::flat-map-identity` implied by `-D warnings`
+
+error: use of `flat_map` with an identity function
+ --> $DIR/flat_map_identity.rs:13:22
+ |
+LL | let _ = iterator.flat_map(convert::identity);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `flatten()`
+
+error: use of `flat_map` with an identity function
+ --> $DIR/flat_map_identity.rs:16:22
+ |
+LL | let _ = iterator.flat_map(|x| return x);
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `flatten()`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/flat_map_option.fixed b/src/tools/clippy/tests/ui/flat_map_option.fixed
new file mode 100644
index 000000000..6a34f0089
--- /dev/null
+++ b/src/tools/clippy/tests/ui/flat_map_option.fixed
@@ -0,0 +1,13 @@
+// run-rustfix
+#![warn(clippy::flat_map_option)]
+#![allow(clippy::redundant_closure, clippy::unnecessary_filter_map)]
+
+fn main() {
+ // yay
+ let c = |x| Some(x);
+ let _ = [1].iter().filter_map(c);
+ let _ = [1].iter().filter_map(Some);
+
+ // nay
+ let _ = [1].iter().flat_map(|_| &Some(1));
+}
diff --git a/src/tools/clippy/tests/ui/flat_map_option.rs b/src/tools/clippy/tests/ui/flat_map_option.rs
new file mode 100644
index 000000000..2479abddb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/flat_map_option.rs
@@ -0,0 +1,13 @@
+// run-rustfix
+#![warn(clippy::flat_map_option)]
+#![allow(clippy::redundant_closure, clippy::unnecessary_filter_map)]
+
+fn main() {
+ // yay
+ let c = |x| Some(x);
+ let _ = [1].iter().flat_map(c);
+ let _ = [1].iter().flat_map(Some);
+
+ // nay
+ let _ = [1].iter().flat_map(|_| &Some(1));
+}
diff --git a/src/tools/clippy/tests/ui/flat_map_option.stderr b/src/tools/clippy/tests/ui/flat_map_option.stderr
new file mode 100644
index 000000000..a9d8056de
--- /dev/null
+++ b/src/tools/clippy/tests/ui/flat_map_option.stderr
@@ -0,0 +1,16 @@
+error: used `flat_map` where `filter_map` could be used instead
+ --> $DIR/flat_map_option.rs:8:24
+ |
+LL | let _ = [1].iter().flat_map(c);
+ | ^^^^^^^^ help: try: `filter_map`
+ |
+ = note: `-D clippy::flat-map-option` implied by `-D warnings`
+
+error: used `flat_map` where `filter_map` could be used instead
+ --> $DIR/flat_map_option.rs:9:24
+ |
+LL | let _ = [1].iter().flat_map(Some);
+ | ^^^^^^^^ help: try: `filter_map`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/float_arithmetic.rs b/src/tools/clippy/tests/ui/float_arithmetic.rs
new file mode 100644
index 000000000..60fa7569e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/float_arithmetic.rs
@@ -0,0 +1,52 @@
+#![warn(clippy::integer_arithmetic, clippy::float_arithmetic)]
+#![allow(
+ unused,
+ clippy::shadow_reuse,
+ clippy::shadow_unrelated,
+ clippy::no_effect,
+ clippy::unnecessary_operation,
+ clippy::op_ref
+)]
+
+#[rustfmt::skip]
+fn main() {
+ let mut f = 1.0f32;
+
+ f * 2.0;
+
+ 1.0 + f;
+ f * 2.0;
+ f / 2.0;
+ f - 2.0 * 4.2;
+ -f;
+
+ f += 1.0;
+ f -= 1.0;
+ f *= 2.0;
+ f /= 2.0;
+}
+
+// also warn about floating point arith with references involved
+
+pub fn float_arith_ref() {
+ 3.1_f32 + &1.2_f32;
+ &3.4_f32 + 1.5_f32;
+ &3.5_f32 + &1.3_f32;
+}
+
+pub fn float_foo(f: &f32) -> f32 {
+ let a = 5.1;
+ a + f
+}
+
+pub fn float_bar(f1: &f32, f2: &f32) -> f32 {
+ f1 + f2
+}
+
+pub fn float_baz(f1: f32, f2: &f32) -> f32 {
+ f1 + f2
+}
+
+pub fn float_qux(f1: f32, f2: f32) -> f32 {
+ (&f1 + &f2)
+}
diff --git a/src/tools/clippy/tests/ui/float_arithmetic.stderr b/src/tools/clippy/tests/ui/float_arithmetic.stderr
new file mode 100644
index 000000000..1ceffb35b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/float_arithmetic.stderr
@@ -0,0 +1,106 @@
+error: floating-point arithmetic detected
+ --> $DIR/float_arithmetic.rs:15:5
+ |
+LL | f * 2.0;
+ | ^^^^^^^
+ |
+ = note: `-D clippy::float-arithmetic` implied by `-D warnings`
+
+error: floating-point arithmetic detected
+ --> $DIR/float_arithmetic.rs:17:5
+ |
+LL | 1.0 + f;
+ | ^^^^^^^
+
+error: floating-point arithmetic detected
+ --> $DIR/float_arithmetic.rs:18:5
+ |
+LL | f * 2.0;
+ | ^^^^^^^
+
+error: floating-point arithmetic detected
+ --> $DIR/float_arithmetic.rs:19:5
+ |
+LL | f / 2.0;
+ | ^^^^^^^
+
+error: floating-point arithmetic detected
+ --> $DIR/float_arithmetic.rs:20:5
+ |
+LL | f - 2.0 * 4.2;
+ | ^^^^^^^^^^^^^
+
+error: floating-point arithmetic detected
+ --> $DIR/float_arithmetic.rs:21:5
+ |
+LL | -f;
+ | ^^
+
+error: floating-point arithmetic detected
+ --> $DIR/float_arithmetic.rs:23:5
+ |
+LL | f += 1.0;
+ | ^^^^^^^^
+
+error: floating-point arithmetic detected
+ --> $DIR/float_arithmetic.rs:24:5
+ |
+LL | f -= 1.0;
+ | ^^^^^^^^
+
+error: floating-point arithmetic detected
+ --> $DIR/float_arithmetic.rs:25:5
+ |
+LL | f *= 2.0;
+ | ^^^^^^^^
+
+error: floating-point arithmetic detected
+ --> $DIR/float_arithmetic.rs:26:5
+ |
+LL | f /= 2.0;
+ | ^^^^^^^^
+
+error: floating-point arithmetic detected
+ --> $DIR/float_arithmetic.rs:32:5
+ |
+LL | 3.1_f32 + &1.2_f32;
+ | ^^^^^^^^^^^^^^^^^^
+
+error: floating-point arithmetic detected
+ --> $DIR/float_arithmetic.rs:33:5
+ |
+LL | &3.4_f32 + 1.5_f32;
+ | ^^^^^^^^^^^^^^^^^^
+
+error: floating-point arithmetic detected
+ --> $DIR/float_arithmetic.rs:34:5
+ |
+LL | &3.5_f32 + &1.3_f32;
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: floating-point arithmetic detected
+ --> $DIR/float_arithmetic.rs:39:5
+ |
+LL | a + f
+ | ^^^^^
+
+error: floating-point arithmetic detected
+ --> $DIR/float_arithmetic.rs:43:5
+ |
+LL | f1 + f2
+ | ^^^^^^^
+
+error: floating-point arithmetic detected
+ --> $DIR/float_arithmetic.rs:47:5
+ |
+LL | f1 + f2
+ | ^^^^^^^
+
+error: floating-point arithmetic detected
+ --> $DIR/float_arithmetic.rs:51:5
+ |
+LL | (&f1 + &f2)
+ | ^^^^^^^^^^^
+
+error: aborting due to 17 previous errors
+
diff --git a/src/tools/clippy/tests/ui/float_cmp.rs b/src/tools/clippy/tests/ui/float_cmp.rs
new file mode 100644
index 000000000..a34458b94
--- /dev/null
+++ b/src/tools/clippy/tests/ui/float_cmp.rs
@@ -0,0 +1,115 @@
+#![warn(clippy::float_cmp)]
+#![allow(
+ unused,
+ clippy::no_effect,
+ clippy::op_ref,
+ clippy::unnecessary_operation,
+ clippy::cast_lossless
+)]
+
+use std::ops::Add;
+
+const ZERO: f32 = 0.0;
+const ONE: f32 = ZERO + 1.0;
+
+fn twice<T>(x: T) -> T
+where
+ T: Add<T, Output = T> + Copy,
+{
+ x + x
+}
+
+fn eq_fl(x: f32, y: f32) -> bool {
+ if x.is_nan() { y.is_nan() } else { x == y } // no error, inside "eq" fn
+}
+
+fn fl_eq(x: f32, y: f32) -> bool {
+ if x.is_nan() { y.is_nan() } else { x == y } // no error, inside "eq" fn
+}
+
+struct X {
+ val: f32,
+}
+
+impl PartialEq for X {
+ fn eq(&self, o: &X) -> bool {
+ if self.val.is_nan() {
+ o.val.is_nan()
+ } else {
+ self.val == o.val // no error, inside "eq" fn
+ }
+ }
+}
+
+fn main() {
+ ZERO == 0f32; //no error, comparison with zero is ok
+ 1.0f32 != f32::INFINITY; // also comparison with infinity
+ 1.0f32 != f32::NEG_INFINITY; // and negative infinity
+ ZERO == 0.0; //no error, comparison with zero is ok
+ ZERO + ZERO != 1.0; //no error, comparison with zero is ok
+
+ ONE == 1f32;
+ ONE == 1.0 + 0.0;
+ ONE + ONE == ZERO + ONE + ONE;
+ ONE != 2.0;
+ ONE != 0.0; // no error, comparison with zero is ok
+ twice(ONE) != ONE;
+ ONE as f64 != 2.0;
+ ONE as f64 != 0.0; // no error, comparison with zero is ok
+
+ let x: f64 = 1.0;
+
+ x == 1.0;
+ x != 0f64; // no error, comparison with zero is ok
+
+ twice(x) != twice(ONE as f64);
+
+ x < 0.0; // no errors, lower or greater comparisons need no fuzzyness
+ x > 0.0;
+ x <= 0.0;
+ x >= 0.0;
+
+ let xs: [f32; 1] = [0.0];
+ let a: *const f32 = xs.as_ptr();
+ let b: *const f32 = xs.as_ptr();
+
+ assert_eq!(a, b); // no errors
+
+ const ZERO_ARRAY: [f32; 2] = [0.0, 0.0];
+ const NON_ZERO_ARRAY: [f32; 2] = [0.0, 0.1];
+
+ let i = 0;
+ let j = 1;
+
+ ZERO_ARRAY[i] == NON_ZERO_ARRAY[j]; // ok, because lhs is zero regardless of i
+ NON_ZERO_ARRAY[i] == NON_ZERO_ARRAY[j];
+
+ let a1: [f32; 1] = [0.0];
+ let a2: [f32; 1] = [1.1];
+
+ a1 == a2;
+ a1[0] == a2[0];
+
+ // no errors - comparing signums is ok
+ let x32 = 3.21f32;
+ 1.23f32.signum() == x32.signum();
+ 1.23f32.signum() == -(x32.signum());
+ 1.23f32.signum() == 3.21f32.signum();
+
+ 1.23f32.signum() != x32.signum();
+ 1.23f32.signum() != -(x32.signum());
+ 1.23f32.signum() != 3.21f32.signum();
+
+ let x64 = 3.21f64;
+ 1.23f64.signum() == x64.signum();
+ 1.23f64.signum() == -(x64.signum());
+ 1.23f64.signum() == 3.21f64.signum();
+
+ 1.23f64.signum() != x64.signum();
+ 1.23f64.signum() != -(x64.signum());
+ 1.23f64.signum() != 3.21f64.signum();
+
+ // the comparison should also look through references
+ &0.0 == &ZERO;
+ &&&&0.0 == &&&&ZERO;
+}
diff --git a/src/tools/clippy/tests/ui/float_cmp.stderr b/src/tools/clippy/tests/ui/float_cmp.stderr
new file mode 100644
index 000000000..9cc1f1b75
--- /dev/null
+++ b/src/tools/clippy/tests/ui/float_cmp.stderr
@@ -0,0 +1,51 @@
+error: strict comparison of `f32` or `f64`
+ --> $DIR/float_cmp.rs:57:5
+ |
+LL | ONE as f64 != 2.0;
+ | ^^^^^^^^^^^^^^^^^ help: consider comparing them within some margin of error: `(ONE as f64 - 2.0).abs() > error_margin`
+ |
+ = note: `-D clippy::float-cmp` implied by `-D warnings`
+ = note: `f32::EPSILON` and `f64::EPSILON` are available for the `error_margin`
+
+error: strict comparison of `f32` or `f64`
+ --> $DIR/float_cmp.rs:62:5
+ |
+LL | x == 1.0;
+ | ^^^^^^^^ help: consider comparing them within some margin of error: `(x - 1.0).abs() < error_margin`
+ |
+ = note: `f32::EPSILON` and `f64::EPSILON` are available for the `error_margin`
+
+error: strict comparison of `f32` or `f64`
+ --> $DIR/float_cmp.rs:65:5
+ |
+LL | twice(x) != twice(ONE as f64);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider comparing them within some margin of error: `(twice(x) - twice(ONE as f64)).abs() > error_margin`
+ |
+ = note: `f32::EPSILON` and `f64::EPSILON` are available for the `error_margin`
+
+error: strict comparison of `f32` or `f64`
+ --> $DIR/float_cmp.rs:85:5
+ |
+LL | NON_ZERO_ARRAY[i] == NON_ZERO_ARRAY[j];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider comparing them within some margin of error: `(NON_ZERO_ARRAY[i] - NON_ZERO_ARRAY[j]).abs() < error_margin`
+ |
+ = note: `f32::EPSILON` and `f64::EPSILON` are available for the `error_margin`
+
+error: strict comparison of `f32` or `f64` arrays
+ --> $DIR/float_cmp.rs:90:5
+ |
+LL | a1 == a2;
+ | ^^^^^^^^
+ |
+ = note: `f32::EPSILON` and `f64::EPSILON` are available for the `error_margin`
+
+error: strict comparison of `f32` or `f64`
+ --> $DIR/float_cmp.rs:91:5
+ |
+LL | a1[0] == a2[0];
+ | ^^^^^^^^^^^^^^ help: consider comparing them within some margin of error: `(a1[0] - a2[0]).abs() < error_margin`
+ |
+ = note: `f32::EPSILON` and `f64::EPSILON` are available for the `error_margin`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/float_cmp_const.rs b/src/tools/clippy/tests/ui/float_cmp_const.rs
new file mode 100644
index 000000000..86ce3bf3b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/float_cmp_const.rs
@@ -0,0 +1,58 @@
+// does not test any rustfixable lints
+
+#![warn(clippy::float_cmp_const)]
+#![allow(clippy::float_cmp)]
+#![allow(unused, clippy::no_effect, clippy::unnecessary_operation)]
+
+const ONE: f32 = 1.0;
+const TWO: f32 = 2.0;
+
+fn eq_one(x: f32) -> bool {
+ if x.is_nan() { false } else { x == ONE } // no error, inside "eq" fn
+}
+
+fn main() {
+ // has errors
+ 1f32 == ONE;
+ TWO == ONE;
+ TWO != ONE;
+ ONE + ONE == TWO;
+ let x = 1;
+ x as f32 == ONE;
+
+ let v = 0.9;
+ v == ONE;
+ v != ONE;
+
+ // no errors, lower than or greater than comparisons
+ v < ONE;
+ v > ONE;
+ v <= ONE;
+ v >= ONE;
+
+ // no errors, zero and infinity values
+ ONE != 0f32;
+ TWO == 0f32;
+ ONE != f32::INFINITY;
+ ONE == f32::NEG_INFINITY;
+
+ // no errors, but will warn clippy::float_cmp if '#![allow(float_cmp)]' above is removed
+ let w = 1.1;
+ v == w;
+ v != w;
+ v == 1.0;
+ v != 1.0;
+
+ const ZERO_ARRAY: [f32; 3] = [0.0, 0.0, 0.0];
+ const ZERO_INF_ARRAY: [f32; 3] = [0.0, f32::INFINITY, f32::NEG_INFINITY];
+ const NON_ZERO_ARRAY: [f32; 3] = [0.0, 0.1, 0.2];
+ const NON_ZERO_ARRAY2: [f32; 3] = [0.2, 0.1, 0.0];
+
+ // no errors, zero and infinity values
+ NON_ZERO_ARRAY[0] == NON_ZERO_ARRAY2[1]; // lhs is 0.0
+ ZERO_ARRAY == NON_ZERO_ARRAY; // lhs is all zeros
+ ZERO_INF_ARRAY == NON_ZERO_ARRAY; // lhs is all zeros or infinities
+
+ // has errors
+ NON_ZERO_ARRAY == NON_ZERO_ARRAY2;
+}
diff --git a/src/tools/clippy/tests/ui/float_cmp_const.stderr b/src/tools/clippy/tests/ui/float_cmp_const.stderr
new file mode 100644
index 000000000..d8182cf85
--- /dev/null
+++ b/src/tools/clippy/tests/ui/float_cmp_const.stderr
@@ -0,0 +1,67 @@
+error: strict comparison of `f32` or `f64` constant
+ --> $DIR/float_cmp_const.rs:16:5
+ |
+LL | 1f32 == ONE;
+ | ^^^^^^^^^^^ help: consider comparing them within some margin of error: `(1f32 - ONE).abs() < error_margin`
+ |
+ = note: `-D clippy::float-cmp-const` implied by `-D warnings`
+ = note: `f32::EPSILON` and `f64::EPSILON` are available for the `error_margin`
+
+error: strict comparison of `f32` or `f64` constant
+ --> $DIR/float_cmp_const.rs:17:5
+ |
+LL | TWO == ONE;
+ | ^^^^^^^^^^ help: consider comparing them within some margin of error: `(TWO - ONE).abs() < error_margin`
+ |
+ = note: `f32::EPSILON` and `f64::EPSILON` are available for the `error_margin`
+
+error: strict comparison of `f32` or `f64` constant
+ --> $DIR/float_cmp_const.rs:18:5
+ |
+LL | TWO != ONE;
+ | ^^^^^^^^^^ help: consider comparing them within some margin of error: `(TWO - ONE).abs() > error_margin`
+ |
+ = note: `f32::EPSILON` and `f64::EPSILON` are available for the `error_margin`
+
+error: strict comparison of `f32` or `f64` constant
+ --> $DIR/float_cmp_const.rs:19:5
+ |
+LL | ONE + ONE == TWO;
+ | ^^^^^^^^^^^^^^^^ help: consider comparing them within some margin of error: `(ONE + ONE - TWO).abs() < error_margin`
+ |
+ = note: `f32::EPSILON` and `f64::EPSILON` are available for the `error_margin`
+
+error: strict comparison of `f32` or `f64` constant
+ --> $DIR/float_cmp_const.rs:21:5
+ |
+LL | x as f32 == ONE;
+ | ^^^^^^^^^^^^^^^ help: consider comparing them within some margin of error: `(x as f32 - ONE).abs() < error_margin`
+ |
+ = note: `f32::EPSILON` and `f64::EPSILON` are available for the `error_margin`
+
+error: strict comparison of `f32` or `f64` constant
+ --> $DIR/float_cmp_const.rs:24:5
+ |
+LL | v == ONE;
+ | ^^^^^^^^ help: consider comparing them within some margin of error: `(v - ONE).abs() < error_margin`
+ |
+ = note: `f32::EPSILON` and `f64::EPSILON` are available for the `error_margin`
+
+error: strict comparison of `f32` or `f64` constant
+ --> $DIR/float_cmp_const.rs:25:5
+ |
+LL | v != ONE;
+ | ^^^^^^^^ help: consider comparing them within some margin of error: `(v - ONE).abs() > error_margin`
+ |
+ = note: `f32::EPSILON` and `f64::EPSILON` are available for the `error_margin`
+
+error: strict comparison of `f32` or `f64` constant arrays
+ --> $DIR/float_cmp_const.rs:57:5
+ |
+LL | NON_ZERO_ARRAY == NON_ZERO_ARRAY2;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `f32::EPSILON` and `f64::EPSILON` are available for the `error_margin`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/float_equality_without_abs.rs b/src/tools/clippy/tests/ui/float_equality_without_abs.rs
new file mode 100644
index 000000000..d40fa00c3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/float_equality_without_abs.rs
@@ -0,0 +1,31 @@
+#![warn(clippy::float_equality_without_abs)]
+
+pub fn is_roughly_equal(a: f32, b: f32) -> bool {
+ (a - b) < f32::EPSILON
+}
+
+pub fn main() {
+ // all errors
+ is_roughly_equal(1.0, 2.0);
+ let a = 0.05;
+ let b = 0.0500001;
+
+ let _ = (a - b) < f32::EPSILON;
+ let _ = a - b < f32::EPSILON;
+ let _ = a - b.abs() < f32::EPSILON;
+ let _ = (a as f64 - b as f64) < f64::EPSILON;
+ let _ = 1.0 - 2.0 < f32::EPSILON;
+
+ let _ = f32::EPSILON > (a - b);
+ let _ = f32::EPSILON > a - b;
+ let _ = f32::EPSILON > a - b.abs();
+ let _ = f64::EPSILON > (a as f64 - b as f64);
+ let _ = f32::EPSILON > 1.0 - 2.0;
+
+ // those are correct
+ let _ = (a - b).abs() < f32::EPSILON;
+ let _ = (a as f64 - b as f64).abs() < f64::EPSILON;
+
+ let _ = f32::EPSILON > (a - b).abs();
+ let _ = f64::EPSILON > (a as f64 - b as f64).abs();
+}
diff --git a/src/tools/clippy/tests/ui/float_equality_without_abs.stderr b/src/tools/clippy/tests/ui/float_equality_without_abs.stderr
new file mode 100644
index 000000000..b34c8159d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/float_equality_without_abs.stderr
@@ -0,0 +1,92 @@
+error: float equality check without `.abs()`
+ --> $DIR/float_equality_without_abs.rs:4:5
+ |
+LL | (a - b) < f32::EPSILON
+ | -------^^^^^^^^^^^^^^^
+ | |
+ | help: add `.abs()`: `(a - b).abs()`
+ |
+ = note: `-D clippy::float-equality-without-abs` implied by `-D warnings`
+
+error: float equality check without `.abs()`
+ --> $DIR/float_equality_without_abs.rs:13:13
+ |
+LL | let _ = (a - b) < f32::EPSILON;
+ | -------^^^^^^^^^^^^^^^
+ | |
+ | help: add `.abs()`: `(a - b).abs()`
+
+error: float equality check without `.abs()`
+ --> $DIR/float_equality_without_abs.rs:14:13
+ |
+LL | let _ = a - b < f32::EPSILON;
+ | -----^^^^^^^^^^^^^^^
+ | |
+ | help: add `.abs()`: `(a - b).abs()`
+
+error: float equality check without `.abs()`
+ --> $DIR/float_equality_without_abs.rs:15:13
+ |
+LL | let _ = a - b.abs() < f32::EPSILON;
+ | -----------^^^^^^^^^^^^^^^
+ | |
+ | help: add `.abs()`: `(a - b.abs()).abs()`
+
+error: float equality check without `.abs()`
+ --> $DIR/float_equality_without_abs.rs:16:13
+ |
+LL | let _ = (a as f64 - b as f64) < f64::EPSILON;
+ | ---------------------^^^^^^^^^^^^^^^
+ | |
+ | help: add `.abs()`: `(a as f64 - b as f64).abs()`
+
+error: float equality check without `.abs()`
+ --> $DIR/float_equality_without_abs.rs:17:13
+ |
+LL | let _ = 1.0 - 2.0 < f32::EPSILON;
+ | ---------^^^^^^^^^^^^^^^
+ | |
+ | help: add `.abs()`: `(1.0 - 2.0).abs()`
+
+error: float equality check without `.abs()`
+ --> $DIR/float_equality_without_abs.rs:19:13
+ |
+LL | let _ = f32::EPSILON > (a - b);
+ | ^^^^^^^^^^^^^^^-------
+ | |
+ | help: add `.abs()`: `(a - b).abs()`
+
+error: float equality check without `.abs()`
+ --> $DIR/float_equality_without_abs.rs:20:13
+ |
+LL | let _ = f32::EPSILON > a - b;
+ | ^^^^^^^^^^^^^^^-----
+ | |
+ | help: add `.abs()`: `(a - b).abs()`
+
+error: float equality check without `.abs()`
+ --> $DIR/float_equality_without_abs.rs:21:13
+ |
+LL | let _ = f32::EPSILON > a - b.abs();
+ | ^^^^^^^^^^^^^^^-----------
+ | |
+ | help: add `.abs()`: `(a - b.abs()).abs()`
+
+error: float equality check without `.abs()`
+ --> $DIR/float_equality_without_abs.rs:22:13
+ |
+LL | let _ = f64::EPSILON > (a as f64 - b as f64);
+ | ^^^^^^^^^^^^^^^---------------------
+ | |
+ | help: add `.abs()`: `(a as f64 - b as f64).abs()`
+
+error: float equality check without `.abs()`
+ --> $DIR/float_equality_without_abs.rs:23:13
+ |
+LL | let _ = f32::EPSILON > 1.0 - 2.0;
+ | ^^^^^^^^^^^^^^^---------
+ | |
+ | help: add `.abs()`: `(1.0 - 2.0).abs()`
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/floating_point_abs.fixed b/src/tools/clippy/tests/ui/floating_point_abs.fixed
new file mode 100644
index 000000000..ca747fefc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_abs.fixed
@@ -0,0 +1,84 @@
+// run-rustfix
+#![feature(const_fn_floating_point_arithmetic)]
+#![warn(clippy::suboptimal_flops)]
+
+/// Allow suboptimal ops in constant context
+pub const fn in_const_context(num: f64) -> f64 {
+ if num >= 0.0 { num } else { -num }
+}
+
+struct A {
+ a: f64,
+ b: f64,
+}
+
+fn fake_abs1(num: f64) -> f64 {
+ num.abs()
+}
+
+fn fake_abs2(num: f64) -> f64 {
+ num.abs()
+}
+
+fn fake_abs3(a: A) -> f64 {
+ a.a.abs()
+}
+
+fn fake_abs4(num: f64) -> f64 {
+ num.abs()
+}
+
+fn fake_abs5(a: A) -> f64 {
+ a.a.abs()
+}
+
+fn fake_nabs1(num: f64) -> f64 {
+ -num.abs()
+}
+
+fn fake_nabs2(num: f64) -> f64 {
+ -num.abs()
+}
+
+fn fake_nabs3(a: A) -> A {
+ A {
+ a: -a.a.abs(),
+ b: a.b,
+ }
+}
+
+fn not_fake_abs1(num: f64) -> f64 {
+ if num > 0.0 { num } else { -num - 1f64 }
+}
+
+fn not_fake_abs2(num: f64) -> f64 {
+ if num > 0.0 { num + 1.0 } else { -(num + 1.0) }
+}
+
+fn not_fake_abs3(num1: f64, num2: f64) -> f64 {
+ if num1 > 0.0 { num2 } else { -num2 }
+}
+
+fn not_fake_abs4(a: A) -> f64 {
+ if a.a > 0.0 { a.b } else { -a.b }
+}
+
+fn not_fake_abs5(a: A) -> f64 {
+ if a.a > 0.0 { a.a } else { -a.b }
+}
+
+fn main() {
+ fake_abs1(5.0);
+ fake_abs2(5.0);
+ fake_abs3(A { a: 5.0, b: 5.0 });
+ fake_abs4(5.0);
+ fake_abs5(A { a: 5.0, b: 5.0 });
+ fake_nabs1(5.0);
+ fake_nabs2(5.0);
+ fake_nabs3(A { a: 5.0, b: 5.0 });
+ not_fake_abs1(5.0);
+ not_fake_abs2(5.0);
+ not_fake_abs3(5.0, 5.0);
+ not_fake_abs4(A { a: 5.0, b: 5.0 });
+ not_fake_abs5(A { a: 5.0, b: 5.0 });
+}
diff --git a/src/tools/clippy/tests/ui/floating_point_abs.rs b/src/tools/clippy/tests/ui/floating_point_abs.rs
new file mode 100644
index 000000000..e4b606574
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_abs.rs
@@ -0,0 +1,84 @@
+// run-rustfix
+#![feature(const_fn_floating_point_arithmetic)]
+#![warn(clippy::suboptimal_flops)]
+
+/// Allow suboptimal ops in constant context
+pub const fn in_const_context(num: f64) -> f64 {
+ if num >= 0.0 { num } else { -num }
+}
+
+struct A {
+ a: f64,
+ b: f64,
+}
+
+fn fake_abs1(num: f64) -> f64 {
+ if num >= 0.0 { num } else { -num }
+}
+
+fn fake_abs2(num: f64) -> f64 {
+ if 0.0 < num { num } else { -num }
+}
+
+fn fake_abs3(a: A) -> f64 {
+ if a.a > 0.0 { a.a } else { -a.a }
+}
+
+fn fake_abs4(num: f64) -> f64 {
+ if 0.0 >= num { -num } else { num }
+}
+
+fn fake_abs5(a: A) -> f64 {
+ if a.a < 0.0 { -a.a } else { a.a }
+}
+
+fn fake_nabs1(num: f64) -> f64 {
+ if num < 0.0 { num } else { -num }
+}
+
+fn fake_nabs2(num: f64) -> f64 {
+ if 0.0 >= num { num } else { -num }
+}
+
+fn fake_nabs3(a: A) -> A {
+ A {
+ a: if a.a >= 0.0 { -a.a } else { a.a },
+ b: a.b,
+ }
+}
+
+fn not_fake_abs1(num: f64) -> f64 {
+ if num > 0.0 { num } else { -num - 1f64 }
+}
+
+fn not_fake_abs2(num: f64) -> f64 {
+ if num > 0.0 { num + 1.0 } else { -(num + 1.0) }
+}
+
+fn not_fake_abs3(num1: f64, num2: f64) -> f64 {
+ if num1 > 0.0 { num2 } else { -num2 }
+}
+
+fn not_fake_abs4(a: A) -> f64 {
+ if a.a > 0.0 { a.b } else { -a.b }
+}
+
+fn not_fake_abs5(a: A) -> f64 {
+ if a.a > 0.0 { a.a } else { -a.b }
+}
+
+fn main() {
+ fake_abs1(5.0);
+ fake_abs2(5.0);
+ fake_abs3(A { a: 5.0, b: 5.0 });
+ fake_abs4(5.0);
+ fake_abs5(A { a: 5.0, b: 5.0 });
+ fake_nabs1(5.0);
+ fake_nabs2(5.0);
+ fake_nabs3(A { a: 5.0, b: 5.0 });
+ not_fake_abs1(5.0);
+ not_fake_abs2(5.0);
+ not_fake_abs3(5.0, 5.0);
+ not_fake_abs4(A { a: 5.0, b: 5.0 });
+ not_fake_abs5(A { a: 5.0, b: 5.0 });
+}
diff --git a/src/tools/clippy/tests/ui/floating_point_abs.stderr b/src/tools/clippy/tests/ui/floating_point_abs.stderr
new file mode 100644
index 000000000..db8290423
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_abs.stderr
@@ -0,0 +1,52 @@
+error: manual implementation of `abs` method
+ --> $DIR/floating_point_abs.rs:16:5
+ |
+LL | if num >= 0.0 { num } else { -num }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `num.abs()`
+ |
+ = note: `-D clippy::suboptimal-flops` implied by `-D warnings`
+
+error: manual implementation of `abs` method
+ --> $DIR/floating_point_abs.rs:20:5
+ |
+LL | if 0.0 < num { num } else { -num }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `num.abs()`
+
+error: manual implementation of `abs` method
+ --> $DIR/floating_point_abs.rs:24:5
+ |
+LL | if a.a > 0.0 { a.a } else { -a.a }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `a.a.abs()`
+
+error: manual implementation of `abs` method
+ --> $DIR/floating_point_abs.rs:28:5
+ |
+LL | if 0.0 >= num { -num } else { num }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `num.abs()`
+
+error: manual implementation of `abs` method
+ --> $DIR/floating_point_abs.rs:32:5
+ |
+LL | if a.a < 0.0 { -a.a } else { a.a }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `a.a.abs()`
+
+error: manual implementation of negation of `abs` method
+ --> $DIR/floating_point_abs.rs:36:5
+ |
+LL | if num < 0.0 { num } else { -num }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `-num.abs()`
+
+error: manual implementation of negation of `abs` method
+ --> $DIR/floating_point_abs.rs:40:5
+ |
+LL | if 0.0 >= num { num } else { -num }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `-num.abs()`
+
+error: manual implementation of negation of `abs` method
+ --> $DIR/floating_point_abs.rs:45:12
+ |
+LL | a: if a.a >= 0.0 { -a.a } else { a.a },
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `-a.a.abs()`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/floating_point_exp.fixed b/src/tools/clippy/tests/ui/floating_point_exp.fixed
new file mode 100644
index 000000000..ae7805fdf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_exp.fixed
@@ -0,0 +1,18 @@
+// run-rustfix
+#![warn(clippy::imprecise_flops)]
+
+fn main() {
+ let x = 2f32;
+ let _ = x.exp_m1();
+ let _ = x.exp_m1() + 2.0;
+ // Cases where the lint shouldn't be applied
+ let _ = x.exp() - 2.0;
+ let _ = x.exp() - 1.0 * 2.0;
+
+ let x = 2f64;
+ let _ = x.exp_m1();
+ let _ = x.exp_m1() + 2.0;
+ // Cases where the lint shouldn't be applied
+ let _ = x.exp() - 2.0;
+ let _ = x.exp() - 1.0 * 2.0;
+}
diff --git a/src/tools/clippy/tests/ui/floating_point_exp.rs b/src/tools/clippy/tests/ui/floating_point_exp.rs
new file mode 100644
index 000000000..27e0b9bcb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_exp.rs
@@ -0,0 +1,18 @@
+// run-rustfix
+#![warn(clippy::imprecise_flops)]
+
+fn main() {
+ let x = 2f32;
+ let _ = x.exp() - 1.0;
+ let _ = x.exp() - 1.0 + 2.0;
+ // Cases where the lint shouldn't be applied
+ let _ = x.exp() - 2.0;
+ let _ = x.exp() - 1.0 * 2.0;
+
+ let x = 2f64;
+ let _ = x.exp() - 1.0;
+ let _ = x.exp() - 1.0 + 2.0;
+ // Cases where the lint shouldn't be applied
+ let _ = x.exp() - 2.0;
+ let _ = x.exp() - 1.0 * 2.0;
+}
diff --git a/src/tools/clippy/tests/ui/floating_point_exp.stderr b/src/tools/clippy/tests/ui/floating_point_exp.stderr
new file mode 100644
index 000000000..5cd999ad4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_exp.stderr
@@ -0,0 +1,28 @@
+error: (e.pow(x) - 1) can be computed more accurately
+ --> $DIR/floating_point_exp.rs:6:13
+ |
+LL | let _ = x.exp() - 1.0;
+ | ^^^^^^^^^^^^^ help: consider using: `x.exp_m1()`
+ |
+ = note: `-D clippy::imprecise-flops` implied by `-D warnings`
+
+error: (e.pow(x) - 1) can be computed more accurately
+ --> $DIR/floating_point_exp.rs:7:13
+ |
+LL | let _ = x.exp() - 1.0 + 2.0;
+ | ^^^^^^^^^^^^^ help: consider using: `x.exp_m1()`
+
+error: (e.pow(x) - 1) can be computed more accurately
+ --> $DIR/floating_point_exp.rs:13:13
+ |
+LL | let _ = x.exp() - 1.0;
+ | ^^^^^^^^^^^^^ help: consider using: `x.exp_m1()`
+
+error: (e.pow(x) - 1) can be computed more accurately
+ --> $DIR/floating_point_exp.rs:14:13
+ |
+LL | let _ = x.exp() - 1.0 + 2.0;
+ | ^^^^^^^^^^^^^ help: consider using: `x.exp_m1()`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/floating_point_hypot.fixed b/src/tools/clippy/tests/ui/floating_point_hypot.fixed
new file mode 100644
index 000000000..bbe411b3f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_hypot.fixed
@@ -0,0 +1,14 @@
+// run-rustfix
+#![warn(clippy::imprecise_flops)]
+
+fn main() {
+ let x = 3f32;
+ let y = 4f32;
+ let _ = x.hypot(y);
+ let _ = (x + 1f32).hypot(y);
+ let _ = x.hypot(y);
+ // Cases where the lint shouldn't be applied
+ // TODO: linting this adds some complexity, but could be done
+ let _ = x.mul_add(x, y * y).sqrt();
+ let _ = (x * 4f32 + y * y).sqrt();
+}
diff --git a/src/tools/clippy/tests/ui/floating_point_hypot.rs b/src/tools/clippy/tests/ui/floating_point_hypot.rs
new file mode 100644
index 000000000..586fd170e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_hypot.rs
@@ -0,0 +1,14 @@
+// run-rustfix
+#![warn(clippy::imprecise_flops)]
+
+fn main() {
+ let x = 3f32;
+ let y = 4f32;
+ let _ = (x * x + y * y).sqrt();
+ let _ = ((x + 1f32) * (x + 1f32) + y * y).sqrt();
+ let _ = (x.powi(2) + y.powi(2)).sqrt();
+ // Cases where the lint shouldn't be applied
+ // TODO: linting this adds some complexity, but could be done
+ let _ = x.mul_add(x, y * y).sqrt();
+ let _ = (x * 4f32 + y * y).sqrt();
+}
diff --git a/src/tools/clippy/tests/ui/floating_point_hypot.stderr b/src/tools/clippy/tests/ui/floating_point_hypot.stderr
new file mode 100644
index 000000000..42069d9ee
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_hypot.stderr
@@ -0,0 +1,22 @@
+error: hypotenuse can be computed more accurately
+ --> $DIR/floating_point_hypot.rs:7:13
+ |
+LL | let _ = (x * x + y * y).sqrt();
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `x.hypot(y)`
+ |
+ = note: `-D clippy::imprecise-flops` implied by `-D warnings`
+
+error: hypotenuse can be computed more accurately
+ --> $DIR/floating_point_hypot.rs:8:13
+ |
+LL | let _ = ((x + 1f32) * (x + 1f32) + y * y).sqrt();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `(x + 1f32).hypot(y)`
+
+error: hypotenuse can be computed more accurately
+ --> $DIR/floating_point_hypot.rs:9:13
+ |
+LL | let _ = (x.powi(2) + y.powi(2)).sqrt();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `x.hypot(y)`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/floating_point_log.fixed b/src/tools/clippy/tests/ui/floating_point_log.fixed
new file mode 100644
index 000000000..5b487bb8f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_log.fixed
@@ -0,0 +1,58 @@
+// run-rustfix
+#![allow(dead_code, clippy::double_parens)]
+#![warn(clippy::suboptimal_flops, clippy::imprecise_flops)]
+
+const TWO: f32 = 2.0;
+const E: f32 = std::f32::consts::E;
+
+fn check_log_base() {
+ let x = 1f32;
+ let _ = x.log2();
+ let _ = x.log10();
+ let _ = x.ln();
+ let _ = x.log2();
+ let _ = x.ln();
+
+ let x = 1f64;
+ let _ = x.log2();
+ let _ = x.log10();
+ let _ = x.ln();
+}
+
+fn check_ln1p() {
+ let x = 1f32;
+ let _ = 2.0f32.ln_1p();
+ let _ = 2.0f32.ln_1p();
+ let _ = x.ln_1p();
+ let _ = (x / 2.0).ln_1p();
+ let _ = x.powi(3).ln_1p();
+ let _ = (x.powi(3) / 2.0).ln_1p();
+ let _ = (std::f32::consts::E - 1.0).ln_1p();
+ let _ = x.ln_1p();
+ let _ = x.powi(3).ln_1p();
+ let _ = (x + 2.0).ln_1p();
+ let _ = (x / 2.0).ln_1p();
+ // Cases where the lint shouldn't be applied
+ let _ = (1.0 + x + 2.0).ln();
+ let _ = (x + 1.0 + 2.0).ln();
+ let _ = (x + 1.0 / 2.0).ln();
+ let _ = (1.0 + x - 2.0).ln();
+
+ let x = 1f64;
+ let _ = 2.0f64.ln_1p();
+ let _ = 2.0f64.ln_1p();
+ let _ = x.ln_1p();
+ let _ = (x / 2.0).ln_1p();
+ let _ = x.powi(3).ln_1p();
+ let _ = x.ln_1p();
+ let _ = x.powi(3).ln_1p();
+ let _ = (x + 2.0).ln_1p();
+ let _ = (x / 2.0).ln_1p();
+ // Cases where the lint shouldn't be applied
+ let _ = (1.0 + x + 2.0).ln();
+ let _ = (x + 1.0 + 2.0).ln();
+ let _ = (x + 1.0 / 2.0).ln();
+ let _ = (1.0 + x - 2.0).ln();
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/floating_point_log.rs b/src/tools/clippy/tests/ui/floating_point_log.rs
new file mode 100644
index 000000000..01181484e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_log.rs
@@ -0,0 +1,58 @@
+// run-rustfix
+#![allow(dead_code, clippy::double_parens)]
+#![warn(clippy::suboptimal_flops, clippy::imprecise_flops)]
+
+const TWO: f32 = 2.0;
+const E: f32 = std::f32::consts::E;
+
+fn check_log_base() {
+ let x = 1f32;
+ let _ = x.log(2f32);
+ let _ = x.log(10f32);
+ let _ = x.log(std::f32::consts::E);
+ let _ = x.log(TWO);
+ let _ = x.log(E);
+
+ let x = 1f64;
+ let _ = x.log(2f64);
+ let _ = x.log(10f64);
+ let _ = x.log(std::f64::consts::E);
+}
+
+fn check_ln1p() {
+ let x = 1f32;
+ let _ = (1f32 + 2.).ln();
+ let _ = (1f32 + 2.0).ln();
+ let _ = (1.0 + x).ln();
+ let _ = (1.0 + x / 2.0).ln();
+ let _ = (1.0 + x.powi(3)).ln();
+ let _ = (1.0 + x.powi(3) / 2.0).ln();
+ let _ = (1.0 + (std::f32::consts::E - 1.0)).ln();
+ let _ = (x + 1.0).ln();
+ let _ = (x.powi(3) + 1.0).ln();
+ let _ = (x + 2.0 + 1.0).ln();
+ let _ = (x / 2.0 + 1.0).ln();
+ // Cases where the lint shouldn't be applied
+ let _ = (1.0 + x + 2.0).ln();
+ let _ = (x + 1.0 + 2.0).ln();
+ let _ = (x + 1.0 / 2.0).ln();
+ let _ = (1.0 + x - 2.0).ln();
+
+ let x = 1f64;
+ let _ = (1f64 + 2.).ln();
+ let _ = (1f64 + 2.0).ln();
+ let _ = (1.0 + x).ln();
+ let _ = (1.0 + x / 2.0).ln();
+ let _ = (1.0 + x.powi(3)).ln();
+ let _ = (x + 1.0).ln();
+ let _ = (x.powi(3) + 1.0).ln();
+ let _ = (x + 2.0 + 1.0).ln();
+ let _ = (x / 2.0 + 1.0).ln();
+ // Cases where the lint shouldn't be applied
+ let _ = (1.0 + x + 2.0).ln();
+ let _ = (x + 1.0 + 2.0).ln();
+ let _ = (x + 1.0 / 2.0).ln();
+ let _ = (1.0 + x - 2.0).ln();
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/floating_point_log.stderr b/src/tools/clippy/tests/ui/floating_point_log.stderr
new file mode 100644
index 000000000..96e5a1544
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_log.stderr
@@ -0,0 +1,174 @@
+error: logarithm for bases 2, 10 and e can be computed more accurately
+ --> $DIR/floating_point_log.rs:10:13
+ |
+LL | let _ = x.log(2f32);
+ | ^^^^^^^^^^^ help: consider using: `x.log2()`
+ |
+ = note: `-D clippy::suboptimal-flops` implied by `-D warnings`
+
+error: logarithm for bases 2, 10 and e can be computed more accurately
+ --> $DIR/floating_point_log.rs:11:13
+ |
+LL | let _ = x.log(10f32);
+ | ^^^^^^^^^^^^ help: consider using: `x.log10()`
+
+error: logarithm for bases 2, 10 and e can be computed more accurately
+ --> $DIR/floating_point_log.rs:12:13
+ |
+LL | let _ = x.log(std::f32::consts::E);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `x.ln()`
+
+error: logarithm for bases 2, 10 and e can be computed more accurately
+ --> $DIR/floating_point_log.rs:13:13
+ |
+LL | let _ = x.log(TWO);
+ | ^^^^^^^^^^ help: consider using: `x.log2()`
+
+error: logarithm for bases 2, 10 and e can be computed more accurately
+ --> $DIR/floating_point_log.rs:14:13
+ |
+LL | let _ = x.log(E);
+ | ^^^^^^^^ help: consider using: `x.ln()`
+
+error: logarithm for bases 2, 10 and e can be computed more accurately
+ --> $DIR/floating_point_log.rs:17:13
+ |
+LL | let _ = x.log(2f64);
+ | ^^^^^^^^^^^ help: consider using: `x.log2()`
+
+error: logarithm for bases 2, 10 and e can be computed more accurately
+ --> $DIR/floating_point_log.rs:18:13
+ |
+LL | let _ = x.log(10f64);
+ | ^^^^^^^^^^^^ help: consider using: `x.log10()`
+
+error: logarithm for bases 2, 10 and e can be computed more accurately
+ --> $DIR/floating_point_log.rs:19:13
+ |
+LL | let _ = x.log(std::f64::consts::E);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `x.ln()`
+
+error: ln(1 + x) can be computed more accurately
+ --> $DIR/floating_point_log.rs:24:13
+ |
+LL | let _ = (1f32 + 2.).ln();
+ | ^^^^^^^^^^^^^^^^ help: consider using: `2.0f32.ln_1p()`
+ |
+ = note: `-D clippy::imprecise-flops` implied by `-D warnings`
+
+error: ln(1 + x) can be computed more accurately
+ --> $DIR/floating_point_log.rs:25:13
+ |
+LL | let _ = (1f32 + 2.0).ln();
+ | ^^^^^^^^^^^^^^^^^ help: consider using: `2.0f32.ln_1p()`
+
+error: ln(1 + x) can be computed more accurately
+ --> $DIR/floating_point_log.rs:26:13
+ |
+LL | let _ = (1.0 + x).ln();
+ | ^^^^^^^^^^^^^^ help: consider using: `x.ln_1p()`
+
+error: ln(1 + x) can be computed more accurately
+ --> $DIR/floating_point_log.rs:27:13
+ |
+LL | let _ = (1.0 + x / 2.0).ln();
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using: `(x / 2.0).ln_1p()`
+
+error: ln(1 + x) can be computed more accurately
+ --> $DIR/floating_point_log.rs:28:13
+ |
+LL | let _ = (1.0 + x.powi(3)).ln();
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `x.powi(3).ln_1p()`
+
+error: ln(1 + x) can be computed more accurately
+ --> $DIR/floating_point_log.rs:29:13
+ |
+LL | let _ = (1.0 + x.powi(3) / 2.0).ln();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `(x.powi(3) / 2.0).ln_1p()`
+
+error: ln(1 + x) can be computed more accurately
+ --> $DIR/floating_point_log.rs:30:13
+ |
+LL | let _ = (1.0 + (std::f32::consts::E - 1.0)).ln();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `(std::f32::consts::E - 1.0).ln_1p()`
+
+error: ln(1 + x) can be computed more accurately
+ --> $DIR/floating_point_log.rs:31:13
+ |
+LL | let _ = (x + 1.0).ln();
+ | ^^^^^^^^^^^^^^ help: consider using: `x.ln_1p()`
+
+error: ln(1 + x) can be computed more accurately
+ --> $DIR/floating_point_log.rs:32:13
+ |
+LL | let _ = (x.powi(3) + 1.0).ln();
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `x.powi(3).ln_1p()`
+
+error: ln(1 + x) can be computed more accurately
+ --> $DIR/floating_point_log.rs:33:13
+ |
+LL | let _ = (x + 2.0 + 1.0).ln();
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using: `(x + 2.0).ln_1p()`
+
+error: ln(1 + x) can be computed more accurately
+ --> $DIR/floating_point_log.rs:34:13
+ |
+LL | let _ = (x / 2.0 + 1.0).ln();
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using: `(x / 2.0).ln_1p()`
+
+error: ln(1 + x) can be computed more accurately
+ --> $DIR/floating_point_log.rs:42:13
+ |
+LL | let _ = (1f64 + 2.).ln();
+ | ^^^^^^^^^^^^^^^^ help: consider using: `2.0f64.ln_1p()`
+
+error: ln(1 + x) can be computed more accurately
+ --> $DIR/floating_point_log.rs:43:13
+ |
+LL | let _ = (1f64 + 2.0).ln();
+ | ^^^^^^^^^^^^^^^^^ help: consider using: `2.0f64.ln_1p()`
+
+error: ln(1 + x) can be computed more accurately
+ --> $DIR/floating_point_log.rs:44:13
+ |
+LL | let _ = (1.0 + x).ln();
+ | ^^^^^^^^^^^^^^ help: consider using: `x.ln_1p()`
+
+error: ln(1 + x) can be computed more accurately
+ --> $DIR/floating_point_log.rs:45:13
+ |
+LL | let _ = (1.0 + x / 2.0).ln();
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using: `(x / 2.0).ln_1p()`
+
+error: ln(1 + x) can be computed more accurately
+ --> $DIR/floating_point_log.rs:46:13
+ |
+LL | let _ = (1.0 + x.powi(3)).ln();
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `x.powi(3).ln_1p()`
+
+error: ln(1 + x) can be computed more accurately
+ --> $DIR/floating_point_log.rs:47:13
+ |
+LL | let _ = (x + 1.0).ln();
+ | ^^^^^^^^^^^^^^ help: consider using: `x.ln_1p()`
+
+error: ln(1 + x) can be computed more accurately
+ --> $DIR/floating_point_log.rs:48:13
+ |
+LL | let _ = (x.powi(3) + 1.0).ln();
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `x.powi(3).ln_1p()`
+
+error: ln(1 + x) can be computed more accurately
+ --> $DIR/floating_point_log.rs:49:13
+ |
+LL | let _ = (x + 2.0 + 1.0).ln();
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using: `(x + 2.0).ln_1p()`
+
+error: ln(1 + x) can be computed more accurately
+ --> $DIR/floating_point_log.rs:50:13
+ |
+LL | let _ = (x / 2.0 + 1.0).ln();
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using: `(x / 2.0).ln_1p()`
+
+error: aborting due to 28 previous errors
+
diff --git a/src/tools/clippy/tests/ui/floating_point_logbase.fixed b/src/tools/clippy/tests/ui/floating_point_logbase.fixed
new file mode 100644
index 000000000..13962a272
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_logbase.fixed
@@ -0,0 +1,16 @@
+// run-rustfix
+#![warn(clippy::suboptimal_flops)]
+
+fn main() {
+ let x = 3f32;
+ let y = 5f32;
+ let _ = x.log(y);
+ let _ = x.log(y);
+ let _ = x.log(y);
+ let _ = x.log(y);
+ // Cases where the lint shouldn't be applied
+ let _ = x.ln() / y.powf(3.2);
+ let _ = x.powf(3.2) / y.powf(3.2);
+ let _ = x.powf(3.2) / y.ln();
+ let _ = x.log(5f32) / y.log(7f32);
+}
diff --git a/src/tools/clippy/tests/ui/floating_point_logbase.rs b/src/tools/clippy/tests/ui/floating_point_logbase.rs
new file mode 100644
index 000000000..26bc20d53
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_logbase.rs
@@ -0,0 +1,16 @@
+// run-rustfix
+#![warn(clippy::suboptimal_flops)]
+
+fn main() {
+ let x = 3f32;
+ let y = 5f32;
+ let _ = x.ln() / y.ln();
+ let _ = x.log2() / y.log2();
+ let _ = x.log10() / y.log10();
+ let _ = x.log(5f32) / y.log(5f32);
+ // Cases where the lint shouldn't be applied
+ let _ = x.ln() / y.powf(3.2);
+ let _ = x.powf(3.2) / y.powf(3.2);
+ let _ = x.powf(3.2) / y.ln();
+ let _ = x.log(5f32) / y.log(7f32);
+}
diff --git a/src/tools/clippy/tests/ui/floating_point_logbase.stderr b/src/tools/clippy/tests/ui/floating_point_logbase.stderr
new file mode 100644
index 000000000..78354c2f6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_logbase.stderr
@@ -0,0 +1,28 @@
+error: log base can be expressed more clearly
+ --> $DIR/floating_point_logbase.rs:7:13
+ |
+LL | let _ = x.ln() / y.ln();
+ | ^^^^^^^^^^^^^^^ help: consider using: `x.log(y)`
+ |
+ = note: `-D clippy::suboptimal-flops` implied by `-D warnings`
+
+error: log base can be expressed more clearly
+ --> $DIR/floating_point_logbase.rs:8:13
+ |
+LL | let _ = x.log2() / y.log2();
+ | ^^^^^^^^^^^^^^^^^^^ help: consider using: `x.log(y)`
+
+error: log base can be expressed more clearly
+ --> $DIR/floating_point_logbase.rs:9:13
+ |
+LL | let _ = x.log10() / y.log10();
+ | ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `x.log(y)`
+
+error: log base can be expressed more clearly
+ --> $DIR/floating_point_logbase.rs:10:13
+ |
+LL | let _ = x.log(5f32) / y.log(5f32);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `x.log(y)`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/floating_point_mul_add.fixed b/src/tools/clippy/tests/ui/floating_point_mul_add.fixed
new file mode 100644
index 000000000..169ec02f8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_mul_add.fixed
@@ -0,0 +1,37 @@
+// run-rustfix
+#![feature(const_fn_floating_point_arithmetic)]
+#![warn(clippy::suboptimal_flops)]
+
+/// Allow suboptimal_ops in constant context
+pub const fn in_const_context() {
+ let a: f64 = 1234.567;
+ let b: f64 = 45.67834;
+ let c: f64 = 0.0004;
+
+ let _ = a * b + c;
+ let _ = c + a * b;
+}
+
+fn main() {
+ let a: f64 = 1234.567;
+ let b: f64 = 45.67834;
+ let c: f64 = 0.0004;
+ let d: f64 = 0.0001;
+
+ let _ = a.mul_add(b, c);
+ let _ = a.mul_add(b, c);
+ let _ = 2.0f64.mul_add(4.0, a);
+ let _ = 2.0f64.mul_add(4., a);
+
+ let _ = a.mul_add(b, c);
+ let _ = a.mul_add(b, c);
+ let _ = (a * b).mul_add(c, d);
+
+ let _ = a.mul_add(b, c).mul_add(a.mul_add(b, c), a.mul_add(b, c)) + c;
+ let _ = 1234.567_f64.mul_add(45.67834_f64, 0.0004_f64);
+
+ let _ = a.mul_add(a, b).sqrt();
+
+ // Cases where the lint shouldn't be applied
+ let _ = (a * a + b * b).sqrt();
+}
diff --git a/src/tools/clippy/tests/ui/floating_point_mul_add.rs b/src/tools/clippy/tests/ui/floating_point_mul_add.rs
new file mode 100644
index 000000000..5338d4fc2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_mul_add.rs
@@ -0,0 +1,37 @@
+// run-rustfix
+#![feature(const_fn_floating_point_arithmetic)]
+#![warn(clippy::suboptimal_flops)]
+
+/// Allow suboptimal_ops in constant context
+pub const fn in_const_context() {
+ let a: f64 = 1234.567;
+ let b: f64 = 45.67834;
+ let c: f64 = 0.0004;
+
+ let _ = a * b + c;
+ let _ = c + a * b;
+}
+
+fn main() {
+ let a: f64 = 1234.567;
+ let b: f64 = 45.67834;
+ let c: f64 = 0.0004;
+ let d: f64 = 0.0001;
+
+ let _ = a * b + c;
+ let _ = c + a * b;
+ let _ = a + 2.0 * 4.0;
+ let _ = a + 2. * 4.;
+
+ let _ = (a * b) + c;
+ let _ = c + (a * b);
+ let _ = a * b * c + d;
+
+ let _ = a.mul_add(b, c) * a.mul_add(b, c) + a.mul_add(b, c) + c;
+ let _ = 1234.567_f64 * 45.67834_f64 + 0.0004_f64;
+
+ let _ = (a * a + b).sqrt();
+
+ // Cases where the lint shouldn't be applied
+ let _ = (a * a + b * b).sqrt();
+}
diff --git a/src/tools/clippy/tests/ui/floating_point_mul_add.stderr b/src/tools/clippy/tests/ui/floating_point_mul_add.stderr
new file mode 100644
index 000000000..e637bbf90
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_mul_add.stderr
@@ -0,0 +1,64 @@
+error: multiply and add expressions can be calculated more efficiently and accurately
+ --> $DIR/floating_point_mul_add.rs:21:13
+ |
+LL | let _ = a * b + c;
+ | ^^^^^^^^^ help: consider using: `a.mul_add(b, c)`
+ |
+ = note: `-D clippy::suboptimal-flops` implied by `-D warnings`
+
+error: multiply and add expressions can be calculated more efficiently and accurately
+ --> $DIR/floating_point_mul_add.rs:22:13
+ |
+LL | let _ = c + a * b;
+ | ^^^^^^^^^ help: consider using: `a.mul_add(b, c)`
+
+error: multiply and add expressions can be calculated more efficiently and accurately
+ --> $DIR/floating_point_mul_add.rs:23:13
+ |
+LL | let _ = a + 2.0 * 4.0;
+ | ^^^^^^^^^^^^^ help: consider using: `2.0f64.mul_add(4.0, a)`
+
+error: multiply and add expressions can be calculated more efficiently and accurately
+ --> $DIR/floating_point_mul_add.rs:24:13
+ |
+LL | let _ = a + 2. * 4.;
+ | ^^^^^^^^^^^ help: consider using: `2.0f64.mul_add(4., a)`
+
+error: multiply and add expressions can be calculated more efficiently and accurately
+ --> $DIR/floating_point_mul_add.rs:26:13
+ |
+LL | let _ = (a * b) + c;
+ | ^^^^^^^^^^^ help: consider using: `a.mul_add(b, c)`
+
+error: multiply and add expressions can be calculated more efficiently and accurately
+ --> $DIR/floating_point_mul_add.rs:27:13
+ |
+LL | let _ = c + (a * b);
+ | ^^^^^^^^^^^ help: consider using: `a.mul_add(b, c)`
+
+error: multiply and add expressions can be calculated more efficiently and accurately
+ --> $DIR/floating_point_mul_add.rs:28:13
+ |
+LL | let _ = a * b * c + d;
+ | ^^^^^^^^^^^^^ help: consider using: `(a * b).mul_add(c, d)`
+
+error: multiply and add expressions can be calculated more efficiently and accurately
+ --> $DIR/floating_point_mul_add.rs:30:13
+ |
+LL | let _ = a.mul_add(b, c) * a.mul_add(b, c) + a.mul_add(b, c) + c;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `a.mul_add(b, c).mul_add(a.mul_add(b, c), a.mul_add(b, c))`
+
+error: multiply and add expressions can be calculated more efficiently and accurately
+ --> $DIR/floating_point_mul_add.rs:31:13
+ |
+LL | let _ = 1234.567_f64 * 45.67834_f64 + 0.0004_f64;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `1234.567_f64.mul_add(45.67834_f64, 0.0004_f64)`
+
+error: multiply and add expressions can be calculated more efficiently and accurately
+ --> $DIR/floating_point_mul_add.rs:33:13
+ |
+LL | let _ = (a * a + b).sqrt();
+ | ^^^^^^^^^^^ help: consider using: `a.mul_add(a, b)`
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/floating_point_powf.fixed b/src/tools/clippy/tests/ui/floating_point_powf.fixed
new file mode 100644
index 000000000..b0641a100
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_powf.fixed
@@ -0,0 +1,42 @@
+// run-rustfix
+#![warn(clippy::suboptimal_flops, clippy::imprecise_flops)]
+
+fn main() {
+ let x = 3f32;
+ let _ = x.exp2();
+ let _ = 3.1f32.exp2();
+ let _ = (-3.1f32).exp2();
+ let _ = x.exp();
+ let _ = 3.1f32.exp();
+ let _ = (-3.1f32).exp();
+ let _ = x.sqrt();
+ let _ = x.cbrt();
+ let _ = x.powi(3);
+ let _ = x.powi(-2);
+ let _ = x.powi(16_777_215);
+ let _ = x.powi(-16_777_215);
+ // Cases where the lint shouldn't be applied
+ let _ = x.powf(2.1);
+ let _ = x.powf(-2.1);
+ let _ = x.powf(16_777_216.0);
+ let _ = x.powf(-16_777_216.0);
+
+ let x = 3f64;
+ let _ = x.exp2();
+ let _ = 3.1f64.exp2();
+ let _ = (-3.1f64).exp2();
+ let _ = x.exp();
+ let _ = 3.1f64.exp();
+ let _ = (-3.1f64).exp();
+ let _ = x.sqrt();
+ let _ = x.cbrt();
+ let _ = x.powi(3);
+ let _ = x.powi(-2);
+ let _ = x.powi(-2_147_483_648);
+ let _ = x.powi(2_147_483_647);
+ // Cases where the lint shouldn't be applied
+ let _ = x.powf(2.1);
+ let _ = x.powf(-2.1);
+ let _ = x.powf(-2_147_483_649.0);
+ let _ = x.powf(2_147_483_648.0);
+}
diff --git a/src/tools/clippy/tests/ui/floating_point_powf.rs b/src/tools/clippy/tests/ui/floating_point_powf.rs
new file mode 100644
index 000000000..a0a2c9739
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_powf.rs
@@ -0,0 +1,42 @@
+// run-rustfix
+#![warn(clippy::suboptimal_flops, clippy::imprecise_flops)]
+
+fn main() {
+ let x = 3f32;
+ let _ = 2f32.powf(x);
+ let _ = 2f32.powf(3.1);
+ let _ = 2f32.powf(-3.1);
+ let _ = std::f32::consts::E.powf(x);
+ let _ = std::f32::consts::E.powf(3.1);
+ let _ = std::f32::consts::E.powf(-3.1);
+ let _ = x.powf(1.0 / 2.0);
+ let _ = x.powf(1.0 / 3.0);
+ let _ = x.powf(3.0);
+ let _ = x.powf(-2.0);
+ let _ = x.powf(16_777_215.0);
+ let _ = x.powf(-16_777_215.0);
+ // Cases where the lint shouldn't be applied
+ let _ = x.powf(2.1);
+ let _ = x.powf(-2.1);
+ let _ = x.powf(16_777_216.0);
+ let _ = x.powf(-16_777_216.0);
+
+ let x = 3f64;
+ let _ = 2f64.powf(x);
+ let _ = 2f64.powf(3.1);
+ let _ = 2f64.powf(-3.1);
+ let _ = std::f64::consts::E.powf(x);
+ let _ = std::f64::consts::E.powf(3.1);
+ let _ = std::f64::consts::E.powf(-3.1);
+ let _ = x.powf(1.0 / 2.0);
+ let _ = x.powf(1.0 / 3.0);
+ let _ = x.powf(3.0);
+ let _ = x.powf(-2.0);
+ let _ = x.powf(-2_147_483_648.0);
+ let _ = x.powf(2_147_483_647.0);
+ // Cases where the lint shouldn't be applied
+ let _ = x.powf(2.1);
+ let _ = x.powf(-2.1);
+ let _ = x.powf(-2_147_483_649.0);
+ let _ = x.powf(2_147_483_648.0);
+}
diff --git a/src/tools/clippy/tests/ui/floating_point_powf.stderr b/src/tools/clippy/tests/ui/floating_point_powf.stderr
new file mode 100644
index 000000000..2422eb911
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_powf.stderr
@@ -0,0 +1,150 @@
+error: exponent for bases 2 and e can be computed more accurately
+ --> $DIR/floating_point_powf.rs:6:13
+ |
+LL | let _ = 2f32.powf(x);
+ | ^^^^^^^^^^^^ help: consider using: `x.exp2()`
+ |
+ = note: `-D clippy::suboptimal-flops` implied by `-D warnings`
+
+error: exponent for bases 2 and e can be computed more accurately
+ --> $DIR/floating_point_powf.rs:7:13
+ |
+LL | let _ = 2f32.powf(3.1);
+ | ^^^^^^^^^^^^^^ help: consider using: `3.1f32.exp2()`
+
+error: exponent for bases 2 and e can be computed more accurately
+ --> $DIR/floating_point_powf.rs:8:13
+ |
+LL | let _ = 2f32.powf(-3.1);
+ | ^^^^^^^^^^^^^^^ help: consider using: `(-3.1f32).exp2()`
+
+error: exponent for bases 2 and e can be computed more accurately
+ --> $DIR/floating_point_powf.rs:9:13
+ |
+LL | let _ = std::f32::consts::E.powf(x);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `x.exp()`
+
+error: exponent for bases 2 and e can be computed more accurately
+ --> $DIR/floating_point_powf.rs:10:13
+ |
+LL | let _ = std::f32::consts::E.powf(3.1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `3.1f32.exp()`
+
+error: exponent for bases 2 and e can be computed more accurately
+ --> $DIR/floating_point_powf.rs:11:13
+ |
+LL | let _ = std::f32::consts::E.powf(-3.1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `(-3.1f32).exp()`
+
+error: square-root of a number can be computed more efficiently and accurately
+ --> $DIR/floating_point_powf.rs:12:13
+ |
+LL | let _ = x.powf(1.0 / 2.0);
+ | ^^^^^^^^^^^^^^^^^ help: consider using: `x.sqrt()`
+
+error: cube-root of a number can be computed more accurately
+ --> $DIR/floating_point_powf.rs:13:13
+ |
+LL | let _ = x.powf(1.0 / 3.0);
+ | ^^^^^^^^^^^^^^^^^ help: consider using: `x.cbrt()`
+ |
+ = note: `-D clippy::imprecise-flops` implied by `-D warnings`
+
+error: exponentiation with integer powers can be computed more efficiently
+ --> $DIR/floating_point_powf.rs:14:13
+ |
+LL | let _ = x.powf(3.0);
+ | ^^^^^^^^^^^ help: consider using: `x.powi(3)`
+
+error: exponentiation with integer powers can be computed more efficiently
+ --> $DIR/floating_point_powf.rs:15:13
+ |
+LL | let _ = x.powf(-2.0);
+ | ^^^^^^^^^^^^ help: consider using: `x.powi(-2)`
+
+error: exponentiation with integer powers can be computed more efficiently
+ --> $DIR/floating_point_powf.rs:16:13
+ |
+LL | let _ = x.powf(16_777_215.0);
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using: `x.powi(16_777_215)`
+
+error: exponentiation with integer powers can be computed more efficiently
+ --> $DIR/floating_point_powf.rs:17:13
+ |
+LL | let _ = x.powf(-16_777_215.0);
+ | ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `x.powi(-16_777_215)`
+
+error: exponent for bases 2 and e can be computed more accurately
+ --> $DIR/floating_point_powf.rs:25:13
+ |
+LL | let _ = 2f64.powf(x);
+ | ^^^^^^^^^^^^ help: consider using: `x.exp2()`
+
+error: exponent for bases 2 and e can be computed more accurately
+ --> $DIR/floating_point_powf.rs:26:13
+ |
+LL | let _ = 2f64.powf(3.1);
+ | ^^^^^^^^^^^^^^ help: consider using: `3.1f64.exp2()`
+
+error: exponent for bases 2 and e can be computed more accurately
+ --> $DIR/floating_point_powf.rs:27:13
+ |
+LL | let _ = 2f64.powf(-3.1);
+ | ^^^^^^^^^^^^^^^ help: consider using: `(-3.1f64).exp2()`
+
+error: exponent for bases 2 and e can be computed more accurately
+ --> $DIR/floating_point_powf.rs:28:13
+ |
+LL | let _ = std::f64::consts::E.powf(x);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `x.exp()`
+
+error: exponent for bases 2 and e can be computed more accurately
+ --> $DIR/floating_point_powf.rs:29:13
+ |
+LL | let _ = std::f64::consts::E.powf(3.1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `3.1f64.exp()`
+
+error: exponent for bases 2 and e can be computed more accurately
+ --> $DIR/floating_point_powf.rs:30:13
+ |
+LL | let _ = std::f64::consts::E.powf(-3.1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `(-3.1f64).exp()`
+
+error: square-root of a number can be computed more efficiently and accurately
+ --> $DIR/floating_point_powf.rs:31:13
+ |
+LL | let _ = x.powf(1.0 / 2.0);
+ | ^^^^^^^^^^^^^^^^^ help: consider using: `x.sqrt()`
+
+error: cube-root of a number can be computed more accurately
+ --> $DIR/floating_point_powf.rs:32:13
+ |
+LL | let _ = x.powf(1.0 / 3.0);
+ | ^^^^^^^^^^^^^^^^^ help: consider using: `x.cbrt()`
+
+error: exponentiation with integer powers can be computed more efficiently
+ --> $DIR/floating_point_powf.rs:33:13
+ |
+LL | let _ = x.powf(3.0);
+ | ^^^^^^^^^^^ help: consider using: `x.powi(3)`
+
+error: exponentiation with integer powers can be computed more efficiently
+ --> $DIR/floating_point_powf.rs:34:13
+ |
+LL | let _ = x.powf(-2.0);
+ | ^^^^^^^^^^^^ help: consider using: `x.powi(-2)`
+
+error: exponentiation with integer powers can be computed more efficiently
+ --> $DIR/floating_point_powf.rs:35:13
+ |
+LL | let _ = x.powf(-2_147_483_648.0);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `x.powi(-2_147_483_648)`
+
+error: exponentiation with integer powers can be computed more efficiently
+ --> $DIR/floating_point_powf.rs:36:13
+ |
+LL | let _ = x.powf(2_147_483_647.0);
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `x.powi(2_147_483_647)`
+
+error: aborting due to 24 previous errors
+
diff --git a/src/tools/clippy/tests/ui/floating_point_powi.fixed b/src/tools/clippy/tests/ui/floating_point_powi.fixed
new file mode 100644
index 000000000..85f7c531e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_powi.fixed
@@ -0,0 +1,20 @@
+// run-rustfix
+#![warn(clippy::suboptimal_flops)]
+
+fn main() {
+ let one = 1;
+ let x = 3f32;
+
+ let y = 4f32;
+ let _ = x.mul_add(x, y);
+ let _ = y.mul_add(y, x);
+ let _ = x.mul_add(x, y).sqrt();
+ let _ = y.mul_add(y, x).sqrt();
+ // Cases where the lint shouldn't be applied
+ let _ = x.powi(2);
+ let _ = x.powi(1 + 1);
+ let _ = x.powi(3);
+ let _ = x.powi(4) + y;
+ let _ = x.powi(one + 1);
+ let _ = (x.powi(2) + y.powi(2)).sqrt();
+}
diff --git a/src/tools/clippy/tests/ui/floating_point_powi.rs b/src/tools/clippy/tests/ui/floating_point_powi.rs
new file mode 100644
index 000000000..ece61d1be
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_powi.rs
@@ -0,0 +1,20 @@
+// run-rustfix
+#![warn(clippy::suboptimal_flops)]
+
+fn main() {
+ let one = 1;
+ let x = 3f32;
+
+ let y = 4f32;
+ let _ = x.powi(2) + y;
+ let _ = x + y.powi(2);
+ let _ = (x.powi(2) + y).sqrt();
+ let _ = (x + y.powi(2)).sqrt();
+ // Cases where the lint shouldn't be applied
+ let _ = x.powi(2);
+ let _ = x.powi(1 + 1);
+ let _ = x.powi(3);
+ let _ = x.powi(4) + y;
+ let _ = x.powi(one + 1);
+ let _ = (x.powi(2) + y.powi(2)).sqrt();
+}
diff --git a/src/tools/clippy/tests/ui/floating_point_powi.stderr b/src/tools/clippy/tests/ui/floating_point_powi.stderr
new file mode 100644
index 000000000..37d840988
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_powi.stderr
@@ -0,0 +1,28 @@
+error: multiply and add expressions can be calculated more efficiently and accurately
+ --> $DIR/floating_point_powi.rs:9:13
+ |
+LL | let _ = x.powi(2) + y;
+ | ^^^^^^^^^^^^^ help: consider using: `x.mul_add(x, y)`
+ |
+ = note: `-D clippy::suboptimal-flops` implied by `-D warnings`
+
+error: multiply and add expressions can be calculated more efficiently and accurately
+ --> $DIR/floating_point_powi.rs:10:13
+ |
+LL | let _ = x + y.powi(2);
+ | ^^^^^^^^^^^^^ help: consider using: `y.mul_add(y, x)`
+
+error: multiply and add expressions can be calculated more efficiently and accurately
+ --> $DIR/floating_point_powi.rs:11:13
+ |
+LL | let _ = (x.powi(2) + y).sqrt();
+ | ^^^^^^^^^^^^^^^ help: consider using: `x.mul_add(x, y)`
+
+error: multiply and add expressions can be calculated more efficiently and accurately
+ --> $DIR/floating_point_powi.rs:12:13
+ |
+LL | let _ = (x + y.powi(2)).sqrt();
+ | ^^^^^^^^^^^^^^^ help: consider using: `y.mul_add(y, x)`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/floating_point_rad.fixed b/src/tools/clippy/tests/ui/floating_point_rad.fixed
new file mode 100644
index 000000000..ce91fe176
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_rad.fixed
@@ -0,0 +1,25 @@
+// run-rustfix
+#![feature(const_fn_floating_point_arithmetic)]
+#![warn(clippy::suboptimal_flops)]
+
+/// Allow suboptimal_flops in constant context
+pub const fn const_context() {
+ let x = 3f32;
+ let _ = x * 180f32 / std::f32::consts::PI;
+}
+
+fn main() {
+ let x = 3f32;
+ let _ = x.to_degrees();
+ let _ = 90.0_f64.to_degrees();
+ let _ = 90.5_f64.to_degrees();
+ let _ = x.to_radians();
+ let _ = 90.0_f64.to_radians();
+ let _ = 90.5_f64.to_radians();
+ // let _ = 90.5 * 80. * std::f32::consts::PI / 180f32;
+ // Cases where the lint shouldn't be applied
+ let _ = x * 90f32 / std::f32::consts::PI;
+ let _ = x * std::f32::consts::PI / 90f32;
+ let _ = x * 180f32 / std::f32::consts::E;
+ let _ = x * std::f32::consts::E / 180f32;
+}
diff --git a/src/tools/clippy/tests/ui/floating_point_rad.rs b/src/tools/clippy/tests/ui/floating_point_rad.rs
new file mode 100644
index 000000000..8f3234986
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_rad.rs
@@ -0,0 +1,25 @@
+// run-rustfix
+#![feature(const_fn_floating_point_arithmetic)]
+#![warn(clippy::suboptimal_flops)]
+
+/// Allow suboptimal_flops in constant context
+pub const fn const_context() {
+ let x = 3f32;
+ let _ = x * 180f32 / std::f32::consts::PI;
+}
+
+fn main() {
+ let x = 3f32;
+ let _ = x * 180f32 / std::f32::consts::PI;
+ let _ = 90. * 180f64 / std::f64::consts::PI;
+ let _ = 90.5 * 180f64 / std::f64::consts::PI;
+ let _ = x * std::f32::consts::PI / 180f32;
+ let _ = 90. * std::f32::consts::PI / 180f32;
+ let _ = 90.5 * std::f32::consts::PI / 180f32;
+ // let _ = 90.5 * 80. * std::f32::consts::PI / 180f32;
+ // Cases where the lint shouldn't be applied
+ let _ = x * 90f32 / std::f32::consts::PI;
+ let _ = x * std::f32::consts::PI / 90f32;
+ let _ = x * 180f32 / std::f32::consts::E;
+ let _ = x * std::f32::consts::E / 180f32;
+}
diff --git a/src/tools/clippy/tests/ui/floating_point_rad.stderr b/src/tools/clippy/tests/ui/floating_point_rad.stderr
new file mode 100644
index 000000000..f12d3d23f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/floating_point_rad.stderr
@@ -0,0 +1,40 @@
+error: conversion to degrees can be done more accurately
+ --> $DIR/floating_point_rad.rs:13:13
+ |
+LL | let _ = x * 180f32 / std::f32::consts::PI;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `x.to_degrees()`
+ |
+ = note: `-D clippy::suboptimal-flops` implied by `-D warnings`
+
+error: conversion to degrees can be done more accurately
+ --> $DIR/floating_point_rad.rs:14:13
+ |
+LL | let _ = 90. * 180f64 / std::f64::consts::PI;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `90.0_f64.to_degrees()`
+
+error: conversion to degrees can be done more accurately
+ --> $DIR/floating_point_rad.rs:15:13
+ |
+LL | let _ = 90.5 * 180f64 / std::f64::consts::PI;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `90.5_f64.to_degrees()`
+
+error: conversion to radians can be done more accurately
+ --> $DIR/floating_point_rad.rs:16:13
+ |
+LL | let _ = x * std::f32::consts::PI / 180f32;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `x.to_radians()`
+
+error: conversion to radians can be done more accurately
+ --> $DIR/floating_point_rad.rs:17:13
+ |
+LL | let _ = 90. * std::f32::consts::PI / 180f32;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `90.0_f64.to_radians()`
+
+error: conversion to radians can be done more accurately
+ --> $DIR/floating_point_rad.rs:18:13
+ |
+LL | let _ = 90.5 * std::f32::consts::PI / 180f32;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `90.5_f64.to_radians()`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/fn_address_comparisons.rs b/src/tools/clippy/tests/ui/fn_address_comparisons.rs
new file mode 100644
index 000000000..362dcb4fd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/fn_address_comparisons.rs
@@ -0,0 +1,20 @@
+use std::fmt::Debug;
+use std::ptr;
+use std::rc::Rc;
+use std::sync::Arc;
+
+fn a() {}
+
+#[warn(clippy::fn_address_comparisons)]
+fn main() {
+ type F = fn();
+ let f: F = a;
+ let g: F = f;
+
+ // These should fail:
+ let _ = f == a;
+ let _ = f != a;
+
+ // These should be fine:
+ let _ = f == g;
+}
diff --git a/src/tools/clippy/tests/ui/fn_address_comparisons.stderr b/src/tools/clippy/tests/ui/fn_address_comparisons.stderr
new file mode 100644
index 000000000..9c1b5419a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/fn_address_comparisons.stderr
@@ -0,0 +1,16 @@
+error: comparing with a non-unique address of a function item
+ --> $DIR/fn_address_comparisons.rs:15:13
+ |
+LL | let _ = f == a;
+ | ^^^^^^
+ |
+ = note: `-D clippy::fn-address-comparisons` implied by `-D warnings`
+
+error: comparing with a non-unique address of a function item
+ --> $DIR/fn_address_comparisons.rs:16:13
+ |
+LL | let _ = f != a;
+ | ^^^^^^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/fn_params_excessive_bools.rs b/src/tools/clippy/tests/ui/fn_params_excessive_bools.rs
new file mode 100644
index 000000000..f805bcc9b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/fn_params_excessive_bools.rs
@@ -0,0 +1,45 @@
+#![warn(clippy::fn_params_excessive_bools)]
+#![allow(clippy::too_many_arguments)]
+
+extern "C" {
+ fn f(_: bool, _: bool, _: bool, _: bool);
+}
+
+macro_rules! foo {
+ () => {
+ fn fff(_: bool, _: bool, _: bool, _: bool) {}
+ };
+}
+
+foo!();
+
+#[no_mangle]
+extern "C" fn k(_: bool, _: bool, _: bool, _: bool) {}
+fn g(_: bool, _: bool, _: bool, _: bool) {}
+fn h(_: bool, _: bool, _: bool) {}
+fn e(_: S, _: S, _: Box<S>, _: Vec<u32>) {}
+fn t(_: S, _: S, _: Box<S>, _: Vec<u32>, _: bool, _: bool, _: bool, _: bool) {}
+
+struct S;
+trait Trait {
+ fn f(_: bool, _: bool, _: bool, _: bool);
+ fn g(_: bool, _: bool, _: bool, _: Vec<u32>);
+}
+
+impl S {
+ fn f(&self, _: bool, _: bool, _: bool, _: bool) {}
+ fn g(&self, _: bool, _: bool, _: bool) {}
+ #[no_mangle]
+ extern "C" fn h(_: bool, _: bool, _: bool, _: bool) {}
+}
+
+impl Trait for S {
+ fn f(_: bool, _: bool, _: bool, _: bool) {}
+ fn g(_: bool, _: bool, _: bool, _: Vec<u32>) {}
+}
+
+fn main() {
+ fn n(_: bool, _: u32, _: bool, _: Box<u32>, _: bool, _: bool) {
+ fn nn(_: bool, _: bool, _: bool, _: bool) {}
+ }
+}
diff --git a/src/tools/clippy/tests/ui/fn_params_excessive_bools.stderr b/src/tools/clippy/tests/ui/fn_params_excessive_bools.stderr
new file mode 100644
index 000000000..cd9d07fa1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/fn_params_excessive_bools.stderr
@@ -0,0 +1,53 @@
+error: more than 3 bools in function parameters
+ --> $DIR/fn_params_excessive_bools.rs:18:1
+ |
+LL | fn g(_: bool, _: bool, _: bool, _: bool) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::fn-params-excessive-bools` implied by `-D warnings`
+ = help: consider refactoring bools into two-variant enums
+
+error: more than 3 bools in function parameters
+ --> $DIR/fn_params_excessive_bools.rs:21:1
+ |
+LL | fn t(_: S, _: S, _: Box<S>, _: Vec<u32>, _: bool, _: bool, _: bool, _: bool) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider refactoring bools into two-variant enums
+
+error: more than 3 bools in function parameters
+ --> $DIR/fn_params_excessive_bools.rs:25:5
+ |
+LL | fn f(_: bool, _: bool, _: bool, _: bool);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider refactoring bools into two-variant enums
+
+error: more than 3 bools in function parameters
+ --> $DIR/fn_params_excessive_bools.rs:30:5
+ |
+LL | fn f(&self, _: bool, _: bool, _: bool, _: bool) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider refactoring bools into two-variant enums
+
+error: more than 3 bools in function parameters
+ --> $DIR/fn_params_excessive_bools.rs:42:5
+ |
+LL | / fn n(_: bool, _: u32, _: bool, _: Box<u32>, _: bool, _: bool) {
+LL | | fn nn(_: bool, _: bool, _: bool, _: bool) {}
+LL | | }
+ | |_____^
+ |
+ = help: consider refactoring bools into two-variant enums
+
+error: more than 3 bools in function parameters
+ --> $DIR/fn_params_excessive_bools.rs:43:9
+ |
+LL | fn nn(_: bool, _: bool, _: bool, _: bool) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider refactoring bools into two-variant enums
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/fn_to_numeric_cast.rs b/src/tools/clippy/tests/ui/fn_to_numeric_cast.rs
new file mode 100644
index 000000000..a456c085c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/fn_to_numeric_cast.rs
@@ -0,0 +1,55 @@
+// ignore-32bit
+
+#![warn(clippy::fn_to_numeric_cast, clippy::fn_to_numeric_cast_with_truncation)]
+
+fn foo() -> String {
+ String::new()
+}
+
+fn test_function_to_numeric_cast() {
+ let _ = foo as i8;
+ let _ = foo as i16;
+ let _ = foo as i32;
+ let _ = foo as i64;
+ let _ = foo as i128;
+ let _ = foo as isize;
+
+ let _ = foo as u8;
+ let _ = foo as u16;
+ let _ = foo as u32;
+ let _ = foo as u64;
+ let _ = foo as u128;
+
+ // Casting to usize is OK and should not warn
+ let _ = foo as usize;
+
+ // Cast `f` (a `FnDef`) to `fn()` should not warn
+ fn f() {}
+ let _ = f as fn();
+}
+
+fn test_function_var_to_numeric_cast() {
+ let abc: fn() -> String = foo;
+
+ let _ = abc as i8;
+ let _ = abc as i16;
+ let _ = abc as i32;
+ let _ = abc as i64;
+ let _ = abc as i128;
+ let _ = abc as isize;
+
+ let _ = abc as u8;
+ let _ = abc as u16;
+ let _ = abc as u32;
+ let _ = abc as u64;
+ let _ = abc as u128;
+
+ // Casting to usize is OK and should not warn
+ let _ = abc as usize;
+}
+
+fn fn_with_fn_args(f: fn(i32) -> i32) -> i32 {
+ f as i32
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/fn_to_numeric_cast.stderr b/src/tools/clippy/tests/ui/fn_to_numeric_cast.stderr
new file mode 100644
index 000000000..e9549e157
--- /dev/null
+++ b/src/tools/clippy/tests/ui/fn_to_numeric_cast.stderr
@@ -0,0 +1,144 @@
+error: casting function pointer `foo` to `i8`, which truncates the value
+ --> $DIR/fn_to_numeric_cast.rs:10:13
+ |
+LL | let _ = foo as i8;
+ | ^^^^^^^^^ help: try: `foo as usize`
+ |
+ = note: `-D clippy::fn-to-numeric-cast-with-truncation` implied by `-D warnings`
+
+error: casting function pointer `foo` to `i16`, which truncates the value
+ --> $DIR/fn_to_numeric_cast.rs:11:13
+ |
+LL | let _ = foo as i16;
+ | ^^^^^^^^^^ help: try: `foo as usize`
+
+error: casting function pointer `foo` to `i32`, which truncates the value
+ --> $DIR/fn_to_numeric_cast.rs:12:13
+ |
+LL | let _ = foo as i32;
+ | ^^^^^^^^^^ help: try: `foo as usize`
+
+error: casting function pointer `foo` to `i64`
+ --> $DIR/fn_to_numeric_cast.rs:13:13
+ |
+LL | let _ = foo as i64;
+ | ^^^^^^^^^^ help: try: `foo as usize`
+ |
+ = note: `-D clippy::fn-to-numeric-cast` implied by `-D warnings`
+
+error: casting function pointer `foo` to `i128`
+ --> $DIR/fn_to_numeric_cast.rs:14:13
+ |
+LL | let _ = foo as i128;
+ | ^^^^^^^^^^^ help: try: `foo as usize`
+
+error: casting function pointer `foo` to `isize`
+ --> $DIR/fn_to_numeric_cast.rs:15:13
+ |
+LL | let _ = foo as isize;
+ | ^^^^^^^^^^^^ help: try: `foo as usize`
+
+error: casting function pointer `foo` to `u8`, which truncates the value
+ --> $DIR/fn_to_numeric_cast.rs:17:13
+ |
+LL | let _ = foo as u8;
+ | ^^^^^^^^^ help: try: `foo as usize`
+
+error: casting function pointer `foo` to `u16`, which truncates the value
+ --> $DIR/fn_to_numeric_cast.rs:18:13
+ |
+LL | let _ = foo as u16;
+ | ^^^^^^^^^^ help: try: `foo as usize`
+
+error: casting function pointer `foo` to `u32`, which truncates the value
+ --> $DIR/fn_to_numeric_cast.rs:19:13
+ |
+LL | let _ = foo as u32;
+ | ^^^^^^^^^^ help: try: `foo as usize`
+
+error: casting function pointer `foo` to `u64`
+ --> $DIR/fn_to_numeric_cast.rs:20:13
+ |
+LL | let _ = foo as u64;
+ | ^^^^^^^^^^ help: try: `foo as usize`
+
+error: casting function pointer `foo` to `u128`
+ --> $DIR/fn_to_numeric_cast.rs:21:13
+ |
+LL | let _ = foo as u128;
+ | ^^^^^^^^^^^ help: try: `foo as usize`
+
+error: casting function pointer `abc` to `i8`, which truncates the value
+ --> $DIR/fn_to_numeric_cast.rs:34:13
+ |
+LL | let _ = abc as i8;
+ | ^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `abc` to `i16`, which truncates the value
+ --> $DIR/fn_to_numeric_cast.rs:35:13
+ |
+LL | let _ = abc as i16;
+ | ^^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `abc` to `i32`, which truncates the value
+ --> $DIR/fn_to_numeric_cast.rs:36:13
+ |
+LL | let _ = abc as i32;
+ | ^^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `abc` to `i64`
+ --> $DIR/fn_to_numeric_cast.rs:37:13
+ |
+LL | let _ = abc as i64;
+ | ^^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `abc` to `i128`
+ --> $DIR/fn_to_numeric_cast.rs:38:13
+ |
+LL | let _ = abc as i128;
+ | ^^^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `abc` to `isize`
+ --> $DIR/fn_to_numeric_cast.rs:39:13
+ |
+LL | let _ = abc as isize;
+ | ^^^^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `abc` to `u8`, which truncates the value
+ --> $DIR/fn_to_numeric_cast.rs:41:13
+ |
+LL | let _ = abc as u8;
+ | ^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `abc` to `u16`, which truncates the value
+ --> $DIR/fn_to_numeric_cast.rs:42:13
+ |
+LL | let _ = abc as u16;
+ | ^^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `abc` to `u32`, which truncates the value
+ --> $DIR/fn_to_numeric_cast.rs:43:13
+ |
+LL | let _ = abc as u32;
+ | ^^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `abc` to `u64`
+ --> $DIR/fn_to_numeric_cast.rs:44:13
+ |
+LL | let _ = abc as u64;
+ | ^^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `abc` to `u128`
+ --> $DIR/fn_to_numeric_cast.rs:45:13
+ |
+LL | let _ = abc as u128;
+ | ^^^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `f` to `i32`, which truncates the value
+ --> $DIR/fn_to_numeric_cast.rs:52:5
+ |
+LL | f as i32
+ | ^^^^^^^^ help: try: `f as usize`
+
+error: aborting due to 23 previous errors
+
diff --git a/src/tools/clippy/tests/ui/fn_to_numeric_cast_32bit.rs b/src/tools/clippy/tests/ui/fn_to_numeric_cast_32bit.rs
new file mode 100644
index 000000000..04ee985c0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/fn_to_numeric_cast_32bit.rs
@@ -0,0 +1,55 @@
+// ignore-64bit
+
+#![warn(clippy::fn_to_numeric_cast, clippy::fn_to_numeric_cast_with_truncation)]
+
+fn foo() -> String {
+ String::new()
+}
+
+fn test_function_to_numeric_cast() {
+ let _ = foo as i8;
+ let _ = foo as i16;
+ let _ = foo as i32;
+ let _ = foo as i64;
+ let _ = foo as i128;
+ let _ = foo as isize;
+
+ let _ = foo as u8;
+ let _ = foo as u16;
+ let _ = foo as u32;
+ let _ = foo as u64;
+ let _ = foo as u128;
+
+ // Casting to usize is OK and should not warn
+ let _ = foo as usize;
+
+ // Cast `f` (a `FnDef`) to `fn()` should not warn
+ fn f() {}
+ let _ = f as fn();
+}
+
+fn test_function_var_to_numeric_cast() {
+ let abc: fn() -> String = foo;
+
+ let _ = abc as i8;
+ let _ = abc as i16;
+ let _ = abc as i32;
+ let _ = abc as i64;
+ let _ = abc as i128;
+ let _ = abc as isize;
+
+ let _ = abc as u8;
+ let _ = abc as u16;
+ let _ = abc as u32;
+ let _ = abc as u64;
+ let _ = abc as u128;
+
+ // Casting to usize is OK and should not warn
+ let _ = abc as usize;
+}
+
+fn fn_with_fn_args(f: fn(i32) -> i32) -> i32 {
+ f as i32
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/fn_to_numeric_cast_32bit.stderr b/src/tools/clippy/tests/ui/fn_to_numeric_cast_32bit.stderr
new file mode 100644
index 000000000..08dd611d6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/fn_to_numeric_cast_32bit.stderr
@@ -0,0 +1,144 @@
+error: casting function pointer `foo` to `i8`, which truncates the value
+ --> $DIR/fn_to_numeric_cast_32bit.rs:10:13
+ |
+LL | let _ = foo as i8;
+ | ^^^^^^^^^ help: try: `foo as usize`
+ |
+ = note: `-D clippy::fn-to-numeric-cast-with-truncation` implied by `-D warnings`
+
+error: casting function pointer `foo` to `i16`, which truncates the value
+ --> $DIR/fn_to_numeric_cast_32bit.rs:11:13
+ |
+LL | let _ = foo as i16;
+ | ^^^^^^^^^^ help: try: `foo as usize`
+
+error: casting function pointer `foo` to `i32`
+ --> $DIR/fn_to_numeric_cast_32bit.rs:12:13
+ |
+LL | let _ = foo as i32;
+ | ^^^^^^^^^^ help: try: `foo as usize`
+ |
+ = note: `-D clippy::fn-to-numeric-cast` implied by `-D warnings`
+
+error: casting function pointer `foo` to `i64`
+ --> $DIR/fn_to_numeric_cast_32bit.rs:13:13
+ |
+LL | let _ = foo as i64;
+ | ^^^^^^^^^^ help: try: `foo as usize`
+
+error: casting function pointer `foo` to `i128`
+ --> $DIR/fn_to_numeric_cast_32bit.rs:14:13
+ |
+LL | let _ = foo as i128;
+ | ^^^^^^^^^^^ help: try: `foo as usize`
+
+error: casting function pointer `foo` to `isize`
+ --> $DIR/fn_to_numeric_cast_32bit.rs:15:13
+ |
+LL | let _ = foo as isize;
+ | ^^^^^^^^^^^^ help: try: `foo as usize`
+
+error: casting function pointer `foo` to `u8`, which truncates the value
+ --> $DIR/fn_to_numeric_cast_32bit.rs:17:13
+ |
+LL | let _ = foo as u8;
+ | ^^^^^^^^^ help: try: `foo as usize`
+
+error: casting function pointer `foo` to `u16`, which truncates the value
+ --> $DIR/fn_to_numeric_cast_32bit.rs:18:13
+ |
+LL | let _ = foo as u16;
+ | ^^^^^^^^^^ help: try: `foo as usize`
+
+error: casting function pointer `foo` to `u32`
+ --> $DIR/fn_to_numeric_cast_32bit.rs:19:13
+ |
+LL | let _ = foo as u32;
+ | ^^^^^^^^^^ help: try: `foo as usize`
+
+error: casting function pointer `foo` to `u64`
+ --> $DIR/fn_to_numeric_cast_32bit.rs:20:13
+ |
+LL | let _ = foo as u64;
+ | ^^^^^^^^^^ help: try: `foo as usize`
+
+error: casting function pointer `foo` to `u128`
+ --> $DIR/fn_to_numeric_cast_32bit.rs:21:13
+ |
+LL | let _ = foo as u128;
+ | ^^^^^^^^^^^ help: try: `foo as usize`
+
+error: casting function pointer `abc` to `i8`, which truncates the value
+ --> $DIR/fn_to_numeric_cast_32bit.rs:34:13
+ |
+LL | let _ = abc as i8;
+ | ^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `abc` to `i16`, which truncates the value
+ --> $DIR/fn_to_numeric_cast_32bit.rs:35:13
+ |
+LL | let _ = abc as i16;
+ | ^^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `abc` to `i32`
+ --> $DIR/fn_to_numeric_cast_32bit.rs:36:13
+ |
+LL | let _ = abc as i32;
+ | ^^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `abc` to `i64`
+ --> $DIR/fn_to_numeric_cast_32bit.rs:37:13
+ |
+LL | let _ = abc as i64;
+ | ^^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `abc` to `i128`
+ --> $DIR/fn_to_numeric_cast_32bit.rs:38:13
+ |
+LL | let _ = abc as i128;
+ | ^^^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `abc` to `isize`
+ --> $DIR/fn_to_numeric_cast_32bit.rs:39:13
+ |
+LL | let _ = abc as isize;
+ | ^^^^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `abc` to `u8`, which truncates the value
+ --> $DIR/fn_to_numeric_cast_32bit.rs:41:13
+ |
+LL | let _ = abc as u8;
+ | ^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `abc` to `u16`, which truncates the value
+ --> $DIR/fn_to_numeric_cast_32bit.rs:42:13
+ |
+LL | let _ = abc as u16;
+ | ^^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `abc` to `u32`
+ --> $DIR/fn_to_numeric_cast_32bit.rs:43:13
+ |
+LL | let _ = abc as u32;
+ | ^^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `abc` to `u64`
+ --> $DIR/fn_to_numeric_cast_32bit.rs:44:13
+ |
+LL | let _ = abc as u64;
+ | ^^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `abc` to `u128`
+ --> $DIR/fn_to_numeric_cast_32bit.rs:45:13
+ |
+LL | let _ = abc as u128;
+ | ^^^^^^^^^^^ help: try: `abc as usize`
+
+error: casting function pointer `f` to `i32`
+ --> $DIR/fn_to_numeric_cast_32bit.rs:52:5
+ |
+LL | f as i32
+ | ^^^^^^^^ help: try: `f as usize`
+
+error: aborting due to 23 previous errors
+
diff --git a/src/tools/clippy/tests/ui/fn_to_numeric_cast_any.rs b/src/tools/clippy/tests/ui/fn_to_numeric_cast_any.rs
new file mode 100644
index 000000000..467046839
--- /dev/null
+++ b/src/tools/clippy/tests/ui/fn_to_numeric_cast_any.rs
@@ -0,0 +1,76 @@
+#![warn(clippy::fn_to_numeric_cast_any)]
+#![allow(clippy::fn_to_numeric_cast, clippy::fn_to_numeric_cast_with_truncation)]
+
+fn foo() -> u8 {
+ 0
+}
+
+fn generic_foo<T>(x: T) -> T {
+ x
+}
+
+trait Trait {
+ fn static_method() -> u32 {
+ 2
+ }
+}
+
+struct Struct;
+
+impl Trait for Struct {}
+
+fn fn_pointer_to_integer() {
+ let _ = foo as i8;
+ let _ = foo as i16;
+ let _ = foo as i32;
+ let _ = foo as i64;
+ let _ = foo as i128;
+ let _ = foo as isize;
+
+ let _ = foo as u8;
+ let _ = foo as u16;
+ let _ = foo as u32;
+ let _ = foo as u64;
+ let _ = foo as u128;
+ let _ = foo as usize;
+}
+
+fn static_method_to_integer() {
+ let _ = Struct::static_method as usize;
+}
+
+fn fn_with_fn_arg(f: fn(i32) -> u32) -> usize {
+ f as usize
+}
+
+fn fn_with_generic_static_trait_method<T: Trait>() -> usize {
+ T::static_method as usize
+}
+
+fn closure_to_fn_to_integer() {
+ let clos = |x| x * 2_u32;
+
+ let _ = (clos as fn(u32) -> u32) as usize;
+}
+
+fn fn_to_raw_ptr() {
+ let _ = foo as *const ();
+}
+
+fn cast_fn_to_self() {
+ // Casting to the same function pointer type should be permitted.
+ let _ = foo as fn() -> u8;
+}
+
+fn cast_generic_to_concrete() {
+ // Casting to a more concrete function pointer type should be permitted.
+ let _ = generic_foo as fn(usize) -> usize;
+}
+
+fn cast_closure_to_fn() {
+ // Casting a closure to a function pointer should be permitted.
+ let id = |x| x;
+ let _ = id as fn(usize) -> usize;
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/fn_to_numeric_cast_any.stderr b/src/tools/clippy/tests/ui/fn_to_numeric_cast_any.stderr
new file mode 100644
index 000000000..a6c4a7767
--- /dev/null
+++ b/src/tools/clippy/tests/ui/fn_to_numeric_cast_any.stderr
@@ -0,0 +1,106 @@
+error: casting function pointer `foo` to `i8`
+ --> $DIR/fn_to_numeric_cast_any.rs:23:13
+ |
+LL | let _ = foo as i8;
+ | ^^^^^^^^^ help: did you mean to invoke the function?: `foo() as i8`
+ |
+ = note: `-D clippy::fn-to-numeric-cast-any` implied by `-D warnings`
+
+error: casting function pointer `foo` to `i16`
+ --> $DIR/fn_to_numeric_cast_any.rs:24:13
+ |
+LL | let _ = foo as i16;
+ | ^^^^^^^^^^ help: did you mean to invoke the function?: `foo() as i16`
+
+error: casting function pointer `foo` to `i32`
+ --> $DIR/fn_to_numeric_cast_any.rs:25:13
+ |
+LL | let _ = foo as i32;
+ | ^^^^^^^^^^ help: did you mean to invoke the function?: `foo() as i32`
+
+error: casting function pointer `foo` to `i64`
+ --> $DIR/fn_to_numeric_cast_any.rs:26:13
+ |
+LL | let _ = foo as i64;
+ | ^^^^^^^^^^ help: did you mean to invoke the function?: `foo() as i64`
+
+error: casting function pointer `foo` to `i128`
+ --> $DIR/fn_to_numeric_cast_any.rs:27:13
+ |
+LL | let _ = foo as i128;
+ | ^^^^^^^^^^^ help: did you mean to invoke the function?: `foo() as i128`
+
+error: casting function pointer `foo` to `isize`
+ --> $DIR/fn_to_numeric_cast_any.rs:28:13
+ |
+LL | let _ = foo as isize;
+ | ^^^^^^^^^^^^ help: did you mean to invoke the function?: `foo() as isize`
+
+error: casting function pointer `foo` to `u8`
+ --> $DIR/fn_to_numeric_cast_any.rs:30:13
+ |
+LL | let _ = foo as u8;
+ | ^^^^^^^^^ help: did you mean to invoke the function?: `foo() as u8`
+
+error: casting function pointer `foo` to `u16`
+ --> $DIR/fn_to_numeric_cast_any.rs:31:13
+ |
+LL | let _ = foo as u16;
+ | ^^^^^^^^^^ help: did you mean to invoke the function?: `foo() as u16`
+
+error: casting function pointer `foo` to `u32`
+ --> $DIR/fn_to_numeric_cast_any.rs:32:13
+ |
+LL | let _ = foo as u32;
+ | ^^^^^^^^^^ help: did you mean to invoke the function?: `foo() as u32`
+
+error: casting function pointer `foo` to `u64`
+ --> $DIR/fn_to_numeric_cast_any.rs:33:13
+ |
+LL | let _ = foo as u64;
+ | ^^^^^^^^^^ help: did you mean to invoke the function?: `foo() as u64`
+
+error: casting function pointer `foo` to `u128`
+ --> $DIR/fn_to_numeric_cast_any.rs:34:13
+ |
+LL | let _ = foo as u128;
+ | ^^^^^^^^^^^ help: did you mean to invoke the function?: `foo() as u128`
+
+error: casting function pointer `foo` to `usize`
+ --> $DIR/fn_to_numeric_cast_any.rs:35:13
+ |
+LL | let _ = foo as usize;
+ | ^^^^^^^^^^^^ help: did you mean to invoke the function?: `foo() as usize`
+
+error: casting function pointer `Struct::static_method` to `usize`
+ --> $DIR/fn_to_numeric_cast_any.rs:39:13
+ |
+LL | let _ = Struct::static_method as usize;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: did you mean to invoke the function?: `Struct::static_method() as usize`
+
+error: casting function pointer `f` to `usize`
+ --> $DIR/fn_to_numeric_cast_any.rs:43:5
+ |
+LL | f as usize
+ | ^^^^^^^^^^ help: did you mean to invoke the function?: `f() as usize`
+
+error: casting function pointer `T::static_method` to `usize`
+ --> $DIR/fn_to_numeric_cast_any.rs:47:5
+ |
+LL | T::static_method as usize
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: did you mean to invoke the function?: `T::static_method() as usize`
+
+error: casting function pointer `(clos as fn(u32) -> u32)` to `usize`
+ --> $DIR/fn_to_numeric_cast_any.rs:53:13
+ |
+LL | let _ = (clos as fn(u32) -> u32) as usize;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: did you mean to invoke the function?: `(clos as fn(u32) -> u32)() as usize`
+
+error: casting function pointer `foo` to `*const ()`
+ --> $DIR/fn_to_numeric_cast_any.rs:57:13
+ |
+LL | let _ = foo as *const ();
+ | ^^^^^^^^^^^^^^^^ help: did you mean to invoke the function?: `foo() as *const ()`
+
+error: aborting due to 17 previous errors
+
diff --git a/src/tools/clippy/tests/ui/for_kv_map.rs b/src/tools/clippy/tests/ui/for_kv_map.rs
new file mode 100644
index 000000000..39a8d960a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/for_kv_map.rs
@@ -0,0 +1,50 @@
+#![warn(clippy::for_kv_map)]
+#![allow(clippy::used_underscore_binding)]
+
+use std::collections::*;
+use std::rc::Rc;
+
+fn main() {
+ let m: HashMap<u64, u64> = HashMap::new();
+ for (_, v) in &m {
+ let _v = v;
+ }
+
+ let m: Rc<HashMap<u64, u64>> = Rc::new(HashMap::new());
+ for (_, v) in &*m {
+ let _v = v;
+ // Here the `*` is not actually necessary, but the test tests that we don't
+ // suggest
+ // `in *m.values()` as we used to
+ }
+
+ let mut m: HashMap<u64, u64> = HashMap::new();
+ for (_, v) in &mut m {
+ let _v = v;
+ }
+
+ let m: &mut HashMap<u64, u64> = &mut HashMap::new();
+ for (_, v) in &mut *m {
+ let _v = v;
+ }
+
+ let m: HashMap<u64, u64> = HashMap::new();
+ let rm = &m;
+ for (k, _value) in rm {
+ let _k = k;
+ }
+
+ // The following should not produce warnings.
+
+ let m: HashMap<u64, u64> = HashMap::new();
+ // No error, _value is actually used
+ for (k, _value) in &m {
+ let _ = _value;
+ let _k = k;
+ }
+
+ let m: HashMap<u64, String> = Default::default();
+ for (_, v) in m {
+ let _v = v;
+ }
+}
diff --git a/src/tools/clippy/tests/ui/for_kv_map.stderr b/src/tools/clippy/tests/ui/for_kv_map.stderr
new file mode 100644
index 000000000..e5cc7c146
--- /dev/null
+++ b/src/tools/clippy/tests/ui/for_kv_map.stderr
@@ -0,0 +1,58 @@
+error: you seem to want to iterate on a map's values
+ --> $DIR/for_kv_map.rs:9:19
+ |
+LL | for (_, v) in &m {
+ | ^^
+ |
+ = note: `-D clippy::for-kv-map` implied by `-D warnings`
+help: use the corresponding method
+ |
+LL | for v in m.values() {
+ | ~ ~~~~~~~~~~
+
+error: you seem to want to iterate on a map's values
+ --> $DIR/for_kv_map.rs:14:19
+ |
+LL | for (_, v) in &*m {
+ | ^^^
+ |
+help: use the corresponding method
+ |
+LL | for v in (*m).values() {
+ | ~ ~~~~~~~~~~~~~
+
+error: you seem to want to iterate on a map's values
+ --> $DIR/for_kv_map.rs:22:19
+ |
+LL | for (_, v) in &mut m {
+ | ^^^^^^
+ |
+help: use the corresponding method
+ |
+LL | for v in m.values_mut() {
+ | ~ ~~~~~~~~~~~~~~
+
+error: you seem to want to iterate on a map's values
+ --> $DIR/for_kv_map.rs:27:19
+ |
+LL | for (_, v) in &mut *m {
+ | ^^^^^^^
+ |
+help: use the corresponding method
+ |
+LL | for v in (*m).values_mut() {
+ | ~ ~~~~~~~~~~~~~~~~~
+
+error: you seem to want to iterate on a map's keys
+ --> $DIR/for_kv_map.rs:33:24
+ |
+LL | for (k, _value) in rm {
+ | ^^
+ |
+help: use the corresponding method
+ |
+LL | for k in rm.keys() {
+ | ~ ~~~~~~~~~
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/for_loop_fixable.fixed b/src/tools/clippy/tests/ui/for_loop_fixable.fixed
new file mode 100644
index 000000000..aa69781d1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/for_loop_fixable.fixed
@@ -0,0 +1,309 @@
+// run-rustfix
+
+#![allow(dead_code, unused)]
+
+use std::collections::*;
+
+#[warn(clippy::all)]
+struct Unrelated(Vec<u8>);
+impl Unrelated {
+ fn next(&self) -> std::slice::Iter<u8> {
+ self.0.iter()
+ }
+
+ fn iter(&self) -> std::slice::Iter<u8> {
+ self.0.iter()
+ }
+}
+
+#[warn(
+ clippy::needless_range_loop,
+ clippy::explicit_iter_loop,
+ clippy::explicit_into_iter_loop,
+ clippy::iter_next_loop,
+ clippy::for_kv_map
+)]
+#[allow(
+ clippy::linkedlist,
+ clippy::unnecessary_mut_passed,
+ clippy::similar_names,
+ clippy::needless_borrow
+)]
+#[allow(unused_variables)]
+fn main() {
+ let mut vec = vec![1, 2, 3, 4];
+
+ // See #601
+ for i in 0..10 {
+ // no error, id_col does not exist outside the loop
+ let mut id_col = vec![0f64; 10];
+ id_col[i] = 1f64;
+ }
+
+ for _v in &vec {}
+
+ for _v in &mut vec {}
+
+ let out_vec = vec![1, 2, 3];
+ for _v in out_vec {}
+
+ for _v in &vec {} // these are fine
+ for _v in &mut vec {} // these are fine
+
+ for _v in &[1, 2, 3] {}
+
+ for _v in (&mut [1, 2, 3]).iter() {} // no error
+
+ for _v in &[0; 32] {}
+
+ for _v in [0; 33].iter() {} // no error
+
+ let ll: LinkedList<()> = LinkedList::new();
+ for _v in &ll {}
+
+ let vd: VecDeque<()> = VecDeque::new();
+ for _v in &vd {}
+
+ let bh: BinaryHeap<()> = BinaryHeap::new();
+ for _v in &bh {}
+
+ let hm: HashMap<(), ()> = HashMap::new();
+ for _v in &hm {}
+
+ let bt: BTreeMap<(), ()> = BTreeMap::new();
+ for _v in &bt {}
+
+ let hs: HashSet<()> = HashSet::new();
+ for _v in &hs {}
+
+ let bs: BTreeSet<()> = BTreeSet::new();
+ for _v in &bs {}
+
+ let u = Unrelated(vec![]);
+ for _v in u.next() {} // no error
+ for _v in u.iter() {} // no error
+
+ let mut out = vec![];
+ vec.iter().cloned().map(|x| out.push(x)).collect::<Vec<_>>();
+ let _y = vec.iter().cloned().map(|x| out.push(x)).collect::<Vec<_>>(); // this is fine
+
+ // Loop with explicit counter variable
+
+ // Potential false positives
+ let mut _index = 0;
+ _index = 1;
+ for _v in &vec {
+ _index += 1
+ }
+
+ let mut _index = 0;
+ _index += 1;
+ for _v in &vec {
+ _index += 1
+ }
+
+ let mut _index = 0;
+ if true {
+ _index = 1
+ }
+ for _v in &vec {
+ _index += 1
+ }
+
+ let mut _index = 0;
+ let mut _index = 1;
+ for _v in &vec {
+ _index += 1
+ }
+
+ let mut _index = 0;
+ for _v in &vec {
+ _index += 1;
+ _index += 1
+ }
+
+ let mut _index = 0;
+ for _v in &vec {
+ _index *= 2;
+ _index += 1
+ }
+
+ let mut _index = 0;
+ for _v in &vec {
+ _index = 1;
+ _index += 1
+ }
+
+ let mut _index = 0;
+
+ for _v in &vec {
+ let mut _index = 0;
+ _index += 1
+ }
+
+ let mut _index = 0;
+ for _v in &vec {
+ _index += 1;
+ _index = 0;
+ }
+
+ let mut _index = 0;
+ for _v in &vec {
+ for _x in 0..1 {
+ _index += 1;
+ }
+ _index += 1
+ }
+
+ let mut _index = 0;
+ for x in &vec {
+ if *x == 1 {
+ _index += 1
+ }
+ }
+
+ let mut _index = 0;
+ if true {
+ _index = 1
+ };
+ for _v in &vec {
+ _index += 1
+ }
+
+ let mut _index = 1;
+ if false {
+ _index = 0
+ };
+ for _v in &vec {
+ _index += 1
+ }
+
+ let mut index = 0;
+ {
+ let mut _x = &mut index;
+ }
+ for _v in &vec {
+ _index += 1
+ }
+
+ let mut index = 0;
+ for _v in &vec {
+ index += 1
+ }
+ println!("index: {}", index);
+
+ fn f<T>(_: &T, _: &T) -> bool {
+ unimplemented!()
+ }
+ fn g<T>(_: &mut [T], _: usize, _: usize) {
+ unimplemented!()
+ }
+ for i in 1..vec.len() {
+ if f(&vec[i - 1], &vec[i]) {
+ g(&mut vec, i - 1, i);
+ }
+ }
+
+ for mid in 1..vec.len() {
+ let (_, _) = vec.split_at(mid);
+ }
+}
+
+fn partition<T: PartialOrd + Send>(v: &mut [T]) -> usize {
+ let pivot = v.len() - 1;
+ let mut i = 0;
+ for j in 0..pivot {
+ if v[j] <= v[pivot] {
+ v.swap(i, j);
+ i += 1;
+ }
+ }
+ v.swap(i, pivot);
+ i
+}
+
+#[warn(clippy::needless_range_loop)]
+pub fn manual_copy_same_destination(dst: &mut [i32], d: usize, s: usize) {
+ // Same source and destination - don't trigger lint
+ for i in 0..dst.len() {
+ dst[d + i] = dst[s + i];
+ }
+}
+
+mod issue_2496 {
+ pub trait Handle {
+ fn new_for_index(index: usize) -> Self;
+ fn index(&self) -> usize;
+ }
+
+ pub fn test<H: Handle>() -> H {
+ for x in 0..5 {
+ let next_handle = H::new_for_index(x);
+ println!("{}", next_handle.index());
+ }
+ unimplemented!()
+ }
+}
+
+// explicit_into_iter_loop bad suggestions
+#[warn(clippy::explicit_into_iter_loop, clippy::explicit_iter_loop)]
+mod issue_4958 {
+ fn takes_iterator<T>(iterator: &T)
+ where
+ for<'a> &'a T: IntoIterator<Item = &'a String>,
+ {
+ for i in iterator {
+ println!("{}", i);
+ }
+ }
+
+ struct T;
+ impl IntoIterator for &T {
+ type Item = ();
+ type IntoIter = std::vec::IntoIter<Self::Item>;
+ fn into_iter(self) -> Self::IntoIter {
+ vec![].into_iter()
+ }
+ }
+
+ fn more_tests() {
+ let t = T;
+ let r = &t;
+ let rr = &&t;
+
+ // This case is handled by `explicit_iter_loop`. No idea why.
+ for _ in &t {}
+
+ for _ in r {}
+
+ // No suggestion for this.
+ // We'd have to suggest `for _ in *rr {}` which is less clear.
+ for _ in rr.into_iter() {}
+ }
+}
+
+// explicit_into_iter_loop
+#[warn(clippy::explicit_into_iter_loop)]
+mod issue_6900 {
+ struct S;
+ impl S {
+ #[allow(clippy::should_implement_trait)]
+ pub fn into_iter<T>(self) -> I<T> {
+ unimplemented!()
+ }
+ }
+
+ struct I<T>(T);
+ impl<T> Iterator for I<T> {
+ type Item = T;
+ fn next(&mut self) -> Option<Self::Item> {
+ unimplemented!()
+ }
+ }
+
+ fn f() {
+ for _ in S.into_iter::<u32>() {
+ unimplemented!()
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/for_loop_fixable.rs b/src/tools/clippy/tests/ui/for_loop_fixable.rs
new file mode 100644
index 000000000..7c063d995
--- /dev/null
+++ b/src/tools/clippy/tests/ui/for_loop_fixable.rs
@@ -0,0 +1,309 @@
+// run-rustfix
+
+#![allow(dead_code, unused)]
+
+use std::collections::*;
+
+#[warn(clippy::all)]
+struct Unrelated(Vec<u8>);
+impl Unrelated {
+ fn next(&self) -> std::slice::Iter<u8> {
+ self.0.iter()
+ }
+
+ fn iter(&self) -> std::slice::Iter<u8> {
+ self.0.iter()
+ }
+}
+
+#[warn(
+ clippy::needless_range_loop,
+ clippy::explicit_iter_loop,
+ clippy::explicit_into_iter_loop,
+ clippy::iter_next_loop,
+ clippy::for_kv_map
+)]
+#[allow(
+ clippy::linkedlist,
+ clippy::unnecessary_mut_passed,
+ clippy::similar_names,
+ clippy::needless_borrow
+)]
+#[allow(unused_variables)]
+fn main() {
+ let mut vec = vec![1, 2, 3, 4];
+
+ // See #601
+ for i in 0..10 {
+ // no error, id_col does not exist outside the loop
+ let mut id_col = vec![0f64; 10];
+ id_col[i] = 1f64;
+ }
+
+ for _v in vec.iter() {}
+
+ for _v in vec.iter_mut() {}
+
+ let out_vec = vec![1, 2, 3];
+ for _v in out_vec.into_iter() {}
+
+ for _v in &vec {} // these are fine
+ for _v in &mut vec {} // these are fine
+
+ for _v in [1, 2, 3].iter() {}
+
+ for _v in (&mut [1, 2, 3]).iter() {} // no error
+
+ for _v in [0; 32].iter() {}
+
+ for _v in [0; 33].iter() {} // no error
+
+ let ll: LinkedList<()> = LinkedList::new();
+ for _v in ll.iter() {}
+
+ let vd: VecDeque<()> = VecDeque::new();
+ for _v in vd.iter() {}
+
+ let bh: BinaryHeap<()> = BinaryHeap::new();
+ for _v in bh.iter() {}
+
+ let hm: HashMap<(), ()> = HashMap::new();
+ for _v in hm.iter() {}
+
+ let bt: BTreeMap<(), ()> = BTreeMap::new();
+ for _v in bt.iter() {}
+
+ let hs: HashSet<()> = HashSet::new();
+ for _v in hs.iter() {}
+
+ let bs: BTreeSet<()> = BTreeSet::new();
+ for _v in bs.iter() {}
+
+ let u = Unrelated(vec![]);
+ for _v in u.next() {} // no error
+ for _v in u.iter() {} // no error
+
+ let mut out = vec![];
+ vec.iter().cloned().map(|x| out.push(x)).collect::<Vec<_>>();
+ let _y = vec.iter().cloned().map(|x| out.push(x)).collect::<Vec<_>>(); // this is fine
+
+ // Loop with explicit counter variable
+
+ // Potential false positives
+ let mut _index = 0;
+ _index = 1;
+ for _v in &vec {
+ _index += 1
+ }
+
+ let mut _index = 0;
+ _index += 1;
+ for _v in &vec {
+ _index += 1
+ }
+
+ let mut _index = 0;
+ if true {
+ _index = 1
+ }
+ for _v in &vec {
+ _index += 1
+ }
+
+ let mut _index = 0;
+ let mut _index = 1;
+ for _v in &vec {
+ _index += 1
+ }
+
+ let mut _index = 0;
+ for _v in &vec {
+ _index += 1;
+ _index += 1
+ }
+
+ let mut _index = 0;
+ for _v in &vec {
+ _index *= 2;
+ _index += 1
+ }
+
+ let mut _index = 0;
+ for _v in &vec {
+ _index = 1;
+ _index += 1
+ }
+
+ let mut _index = 0;
+
+ for _v in &vec {
+ let mut _index = 0;
+ _index += 1
+ }
+
+ let mut _index = 0;
+ for _v in &vec {
+ _index += 1;
+ _index = 0;
+ }
+
+ let mut _index = 0;
+ for _v in &vec {
+ for _x in 0..1 {
+ _index += 1;
+ }
+ _index += 1
+ }
+
+ let mut _index = 0;
+ for x in &vec {
+ if *x == 1 {
+ _index += 1
+ }
+ }
+
+ let mut _index = 0;
+ if true {
+ _index = 1
+ };
+ for _v in &vec {
+ _index += 1
+ }
+
+ let mut _index = 1;
+ if false {
+ _index = 0
+ };
+ for _v in &vec {
+ _index += 1
+ }
+
+ let mut index = 0;
+ {
+ let mut _x = &mut index;
+ }
+ for _v in &vec {
+ _index += 1
+ }
+
+ let mut index = 0;
+ for _v in &vec {
+ index += 1
+ }
+ println!("index: {}", index);
+
+ fn f<T>(_: &T, _: &T) -> bool {
+ unimplemented!()
+ }
+ fn g<T>(_: &mut [T], _: usize, _: usize) {
+ unimplemented!()
+ }
+ for i in 1..vec.len() {
+ if f(&vec[i - 1], &vec[i]) {
+ g(&mut vec, i - 1, i);
+ }
+ }
+
+ for mid in 1..vec.len() {
+ let (_, _) = vec.split_at(mid);
+ }
+}
+
+fn partition<T: PartialOrd + Send>(v: &mut [T]) -> usize {
+ let pivot = v.len() - 1;
+ let mut i = 0;
+ for j in 0..pivot {
+ if v[j] <= v[pivot] {
+ v.swap(i, j);
+ i += 1;
+ }
+ }
+ v.swap(i, pivot);
+ i
+}
+
+#[warn(clippy::needless_range_loop)]
+pub fn manual_copy_same_destination(dst: &mut [i32], d: usize, s: usize) {
+ // Same source and destination - don't trigger lint
+ for i in 0..dst.len() {
+ dst[d + i] = dst[s + i];
+ }
+}
+
+mod issue_2496 {
+ pub trait Handle {
+ fn new_for_index(index: usize) -> Self;
+ fn index(&self) -> usize;
+ }
+
+ pub fn test<H: Handle>() -> H {
+ for x in 0..5 {
+ let next_handle = H::new_for_index(x);
+ println!("{}", next_handle.index());
+ }
+ unimplemented!()
+ }
+}
+
+// explicit_into_iter_loop bad suggestions
+#[warn(clippy::explicit_into_iter_loop, clippy::explicit_iter_loop)]
+mod issue_4958 {
+ fn takes_iterator<T>(iterator: &T)
+ where
+ for<'a> &'a T: IntoIterator<Item = &'a String>,
+ {
+ for i in iterator.into_iter() {
+ println!("{}", i);
+ }
+ }
+
+ struct T;
+ impl IntoIterator for &T {
+ type Item = ();
+ type IntoIter = std::vec::IntoIter<Self::Item>;
+ fn into_iter(self) -> Self::IntoIter {
+ vec![].into_iter()
+ }
+ }
+
+ fn more_tests() {
+ let t = T;
+ let r = &t;
+ let rr = &&t;
+
+ // This case is handled by `explicit_iter_loop`. No idea why.
+ for _ in t.into_iter() {}
+
+ for _ in r.into_iter() {}
+
+ // No suggestion for this.
+ // We'd have to suggest `for _ in *rr {}` which is less clear.
+ for _ in rr.into_iter() {}
+ }
+}
+
+// explicit_into_iter_loop
+#[warn(clippy::explicit_into_iter_loop)]
+mod issue_6900 {
+ struct S;
+ impl S {
+ #[allow(clippy::should_implement_trait)]
+ pub fn into_iter<T>(self) -> I<T> {
+ unimplemented!()
+ }
+ }
+
+ struct I<T>(T);
+ impl<T> Iterator for I<T> {
+ type Item = T;
+ fn next(&mut self) -> Option<Self::Item> {
+ unimplemented!()
+ }
+ }
+
+ fn f() {
+ for _ in S.into_iter::<u32>() {
+ unimplemented!()
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/for_loop_fixable.stderr b/src/tools/clippy/tests/ui/for_loop_fixable.stderr
new file mode 100644
index 000000000..ddfe66d67
--- /dev/null
+++ b/src/tools/clippy/tests/ui/for_loop_fixable.stderr
@@ -0,0 +1,96 @@
+error: it is more concise to loop over references to containers instead of using explicit iteration methods
+ --> $DIR/for_loop_fixable.rs:43:15
+ |
+LL | for _v in vec.iter() {}
+ | ^^^^^^^^^^ help: to write this more concisely, try: `&vec`
+ |
+ = note: `-D clippy::explicit-iter-loop` implied by `-D warnings`
+
+error: it is more concise to loop over references to containers instead of using explicit iteration methods
+ --> $DIR/for_loop_fixable.rs:45:15
+ |
+LL | for _v in vec.iter_mut() {}
+ | ^^^^^^^^^^^^^^ help: to write this more concisely, try: `&mut vec`
+
+error: it is more concise to loop over containers instead of using explicit iteration methods
+ --> $DIR/for_loop_fixable.rs:48:15
+ |
+LL | for _v in out_vec.into_iter() {}
+ | ^^^^^^^^^^^^^^^^^^^ help: to write this more concisely, try: `out_vec`
+ |
+ = note: `-D clippy::explicit-into-iter-loop` implied by `-D warnings`
+
+error: it is more concise to loop over references to containers instead of using explicit iteration methods
+ --> $DIR/for_loop_fixable.rs:53:15
+ |
+LL | for _v in [1, 2, 3].iter() {}
+ | ^^^^^^^^^^^^^^^^ help: to write this more concisely, try: `&[1, 2, 3]`
+
+error: it is more concise to loop over references to containers instead of using explicit iteration methods
+ --> $DIR/for_loop_fixable.rs:57:15
+ |
+LL | for _v in [0; 32].iter() {}
+ | ^^^^^^^^^^^^^^ help: to write this more concisely, try: `&[0; 32]`
+
+error: it is more concise to loop over references to containers instead of using explicit iteration methods
+ --> $DIR/for_loop_fixable.rs:62:15
+ |
+LL | for _v in ll.iter() {}
+ | ^^^^^^^^^ help: to write this more concisely, try: `&ll`
+
+error: it is more concise to loop over references to containers instead of using explicit iteration methods
+ --> $DIR/for_loop_fixable.rs:65:15
+ |
+LL | for _v in vd.iter() {}
+ | ^^^^^^^^^ help: to write this more concisely, try: `&vd`
+
+error: it is more concise to loop over references to containers instead of using explicit iteration methods
+ --> $DIR/for_loop_fixable.rs:68:15
+ |
+LL | for _v in bh.iter() {}
+ | ^^^^^^^^^ help: to write this more concisely, try: `&bh`
+
+error: it is more concise to loop over references to containers instead of using explicit iteration methods
+ --> $DIR/for_loop_fixable.rs:71:15
+ |
+LL | for _v in hm.iter() {}
+ | ^^^^^^^^^ help: to write this more concisely, try: `&hm`
+
+error: it is more concise to loop over references to containers instead of using explicit iteration methods
+ --> $DIR/for_loop_fixable.rs:74:15
+ |
+LL | for _v in bt.iter() {}
+ | ^^^^^^^^^ help: to write this more concisely, try: `&bt`
+
+error: it is more concise to loop over references to containers instead of using explicit iteration methods
+ --> $DIR/for_loop_fixable.rs:77:15
+ |
+LL | for _v in hs.iter() {}
+ | ^^^^^^^^^ help: to write this more concisely, try: `&hs`
+
+error: it is more concise to loop over references to containers instead of using explicit iteration methods
+ --> $DIR/for_loop_fixable.rs:80:15
+ |
+LL | for _v in bs.iter() {}
+ | ^^^^^^^^^ help: to write this more concisely, try: `&bs`
+
+error: it is more concise to loop over containers instead of using explicit iteration methods
+ --> $DIR/for_loop_fixable.rs:255:18
+ |
+LL | for i in iterator.into_iter() {
+ | ^^^^^^^^^^^^^^^^^^^^ help: to write this more concisely, try: `iterator`
+
+error: it is more concise to loop over references to containers instead of using explicit iteration methods
+ --> $DIR/for_loop_fixable.rs:275:18
+ |
+LL | for _ in t.into_iter() {}
+ | ^^^^^^^^^^^^^ help: to write this more concisely, try: `&t`
+
+error: it is more concise to loop over containers instead of using explicit iteration methods
+ --> $DIR/for_loop_fixable.rs:277:18
+ |
+LL | for _ in r.into_iter() {}
+ | ^^^^^^^^^^^^^ help: to write this more concisely, try: `r`
+
+error: aborting due to 15 previous errors
+
diff --git a/src/tools/clippy/tests/ui/for_loop_unfixable.rs b/src/tools/clippy/tests/ui/for_loop_unfixable.rs
new file mode 100644
index 000000000..efcaffce2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/for_loop_unfixable.rs
@@ -0,0 +1,15 @@
+// Tests from for_loop.rs that don't have suggestions
+
+#[warn(
+ clippy::needless_range_loop,
+ clippy::explicit_iter_loop,
+ clippy::explicit_into_iter_loop,
+ clippy::iter_next_loop,
+ clippy::for_kv_map
+)]
+#[allow(clippy::linkedlist, clippy::unnecessary_mut_passed, clippy::similar_names)]
+fn main() {
+ let vec = vec![1, 2, 3, 4];
+
+ for _v in vec.iter().next() {}
+}
diff --git a/src/tools/clippy/tests/ui/for_loop_unfixable.stderr b/src/tools/clippy/tests/ui/for_loop_unfixable.stderr
new file mode 100644
index 000000000..f769b4bdc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/for_loop_unfixable.stderr
@@ -0,0 +1,10 @@
+error: you are iterating over `Iterator::next()` which is an Option; this will compile but is probably not what you want
+ --> $DIR/for_loop_unfixable.rs:14:15
+ |
+LL | for _v in vec.iter().next() {}
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::iter-next-loop` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/for_loops_over_fallibles.rs b/src/tools/clippy/tests/ui/for_loops_over_fallibles.rs
new file mode 100644
index 000000000..3390111d0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/for_loops_over_fallibles.rs
@@ -0,0 +1,72 @@
+#![warn(clippy::for_loops_over_fallibles)]
+
+fn for_loops_over_fallibles() {
+ let option = Some(1);
+ let mut result = option.ok_or("x not found");
+ let v = vec![0, 1, 2];
+
+ // check over an `Option`
+ for x in option {
+ println!("{}", x);
+ }
+
+ // check over an `Option`
+ for x in option.iter() {
+ println!("{}", x);
+ }
+
+ // check over a `Result`
+ for x in result {
+ println!("{}", x);
+ }
+
+ // check over a `Result`
+ for x in result.iter_mut() {
+ println!("{}", x);
+ }
+
+ // check over a `Result`
+ for x in result.into_iter() {
+ println!("{}", x);
+ }
+
+ for x in option.ok_or("x not found") {
+ println!("{}", x);
+ }
+
+ // make sure LOOP_OVER_NEXT lint takes clippy::precedence when next() is the last call
+ // in the chain
+ for x in v.iter().next() {
+ println!("{}", x);
+ }
+
+ // make sure we lint when next() is not the last call in the chain
+ for x in v.iter().next().and(Some(0)) {
+ println!("{}", x);
+ }
+
+ for x in v.iter().next().ok_or("x not found") {
+ println!("{}", x);
+ }
+
+ // check for false positives
+
+ // for loop false positive
+ for x in v {
+ println!("{}", x);
+ }
+
+ // while let false positive for Option
+ while let Some(x) = option {
+ println!("{}", x);
+ break;
+ }
+
+ // while let false positive for Result
+ while let Ok(x) = result {
+ println!("{}", x);
+ break;
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/for_loops_over_fallibles.stderr b/src/tools/clippy/tests/ui/for_loops_over_fallibles.stderr
new file mode 100644
index 000000000..8c8c02224
--- /dev/null
+++ b/src/tools/clippy/tests/ui/for_loops_over_fallibles.stderr
@@ -0,0 +1,95 @@
+error: for loop over `option`, which is an `Option`. This is more readably written as an `if let` statement
+ --> $DIR/for_loops_over_fallibles.rs:9:14
+ |
+LL | for x in option {
+ | ^^^^^^
+ |
+ = note: `-D clippy::for-loops-over-fallibles` implied by `-D warnings`
+ = help: consider replacing `for x in option` with `if let Some(x) = option`
+
+error: for loop over `option`, which is an `Option`. This is more readably written as an `if let` statement
+ --> $DIR/for_loops_over_fallibles.rs:14:14
+ |
+LL | for x in option.iter() {
+ | ^^^^^^
+ |
+ = help: consider replacing `for x in option.iter()` with `if let Some(x) = option`
+
+error: for loop over `result`, which is a `Result`. This is more readably written as an `if let` statement
+ --> $DIR/for_loops_over_fallibles.rs:19:14
+ |
+LL | for x in result {
+ | ^^^^^^
+ |
+ = help: consider replacing `for x in result` with `if let Ok(x) = result`
+
+error: for loop over `result`, which is a `Result`. This is more readably written as an `if let` statement
+ --> $DIR/for_loops_over_fallibles.rs:24:14
+ |
+LL | for x in result.iter_mut() {
+ | ^^^^^^
+ |
+ = help: consider replacing `for x in result.iter_mut()` with `if let Ok(x) = result`
+
+error: for loop over `result`, which is a `Result`. This is more readably written as an `if let` statement
+ --> $DIR/for_loops_over_fallibles.rs:29:14
+ |
+LL | for x in result.into_iter() {
+ | ^^^^^^
+ |
+ = help: consider replacing `for x in result.into_iter()` with `if let Ok(x) = result`
+
+error: for loop over `option.ok_or("x not found")`, which is a `Result`. This is more readably written as an `if let` statement
+ --> $DIR/for_loops_over_fallibles.rs:33:14
+ |
+LL | for x in option.ok_or("x not found") {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider replacing `for x in option.ok_or("x not found")` with `if let Ok(x) = option.ok_or("x not found")`
+
+error: you are iterating over `Iterator::next()` which is an Option; this will compile but is probably not what you want
+ --> $DIR/for_loops_over_fallibles.rs:39:14
+ |
+LL | for x in v.iter().next() {
+ | ^^^^^^^^^^^^^^^
+ |
+ = note: `#[deny(clippy::iter_next_loop)]` on by default
+
+error: for loop over `v.iter().next().and(Some(0))`, which is an `Option`. This is more readably written as an `if let` statement
+ --> $DIR/for_loops_over_fallibles.rs:44:14
+ |
+LL | for x in v.iter().next().and(Some(0)) {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider replacing `for x in v.iter().next().and(Some(0))` with `if let Some(x) = v.iter().next().and(Some(0))`
+
+error: for loop over `v.iter().next().ok_or("x not found")`, which is a `Result`. This is more readably written as an `if let` statement
+ --> $DIR/for_loops_over_fallibles.rs:48:14
+ |
+LL | for x in v.iter().next().ok_or("x not found") {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider replacing `for x in v.iter().next().ok_or("x not found")` with `if let Ok(x) = v.iter().next().ok_or("x not found")`
+
+error: this loop never actually loops
+ --> $DIR/for_loops_over_fallibles.rs:60:5
+ |
+LL | / while let Some(x) = option {
+LL | | println!("{}", x);
+LL | | break;
+LL | | }
+ | |_____^
+ |
+ = note: `#[deny(clippy::never_loop)]` on by default
+
+error: this loop never actually loops
+ --> $DIR/for_loops_over_fallibles.rs:66:5
+ |
+LL | / while let Ok(x) = result {
+LL | | println!("{}", x);
+LL | | break;
+LL | | }
+ | |_____^
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/forget_non_drop.rs b/src/tools/clippy/tests/ui/forget_non_drop.rs
new file mode 100644
index 000000000..7580cf95e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/forget_non_drop.rs
@@ -0,0 +1,27 @@
+#![warn(clippy::forget_non_drop)]
+
+use core::mem::forget;
+
+fn forget_generic<T>(t: T) {
+ // Don't lint
+ forget(t)
+}
+
+fn main() {
+ struct Foo;
+ // Lint
+ forget(Foo);
+
+ struct Bar;
+ impl Drop for Bar {
+ fn drop(&mut self) {}
+ }
+ // Don't lint
+ forget(Bar);
+
+ struct Baz<T>(T);
+ // Lint
+ forget(Baz(Foo));
+ // Don't lint
+ forget(Baz(Bar));
+}
diff --git a/src/tools/clippy/tests/ui/forget_non_drop.stderr b/src/tools/clippy/tests/ui/forget_non_drop.stderr
new file mode 100644
index 000000000..03fb00960
--- /dev/null
+++ b/src/tools/clippy/tests/ui/forget_non_drop.stderr
@@ -0,0 +1,27 @@
+error: call to `std::mem::forget` with a value that does not implement `Drop`. Forgetting such a type is the same as dropping it
+ --> $DIR/forget_non_drop.rs:13:5
+ |
+LL | forget(Foo);
+ | ^^^^^^^^^^^
+ |
+ = note: `-D clippy::forget-non-drop` implied by `-D warnings`
+note: argument has type `main::Foo`
+ --> $DIR/forget_non_drop.rs:13:12
+ |
+LL | forget(Foo);
+ | ^^^
+
+error: call to `std::mem::forget` with a value that does not implement `Drop`. Forgetting such a type is the same as dropping it
+ --> $DIR/forget_non_drop.rs:24:5
+ |
+LL | forget(Baz(Foo));
+ | ^^^^^^^^^^^^^^^^
+ |
+note: argument has type `main::Baz<main::Foo>`
+ --> $DIR/forget_non_drop.rs:24:12
+ |
+LL | forget(Baz(Foo));
+ | ^^^^^^^^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/forget_ref.rs b/src/tools/clippy/tests/ui/forget_ref.rs
new file mode 100644
index 000000000..031b415f5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/forget_ref.rs
@@ -0,0 +1,50 @@
+#![warn(clippy::forget_ref)]
+#![allow(clippy::toplevel_ref_arg)]
+#![allow(clippy::unnecessary_wraps, clippy::forget_non_drop)]
+#![allow(clippy::borrow_deref_ref)]
+
+use std::mem::forget;
+
+struct SomeStruct;
+
+fn main() {
+ forget(&SomeStruct);
+
+ let mut owned = SomeStruct;
+ forget(&owned);
+ forget(&&owned);
+ forget(&mut owned);
+ forget(owned); //OK
+
+ let reference1 = &SomeStruct;
+ forget(&*reference1);
+
+ let reference2 = &mut SomeStruct;
+ forget(reference2);
+
+ let ref reference3 = SomeStruct;
+ forget(reference3);
+}
+
+#[allow(dead_code)]
+fn test_generic_fn_forget<T>(val: T) {
+ forget(&val);
+ forget(val); //OK
+}
+
+#[allow(dead_code)]
+fn test_similarly_named_function() {
+ fn forget<T>(_val: T) {}
+ forget(&SomeStruct); //OK; call to unrelated function which happens to have the same name
+ std::mem::forget(&SomeStruct);
+}
+
+#[derive(Copy, Clone)]
+pub struct Error;
+fn produce_half_owl_error() -> Result<(), Error> {
+ Ok(())
+}
+
+fn produce_half_owl_ok() -> Result<bool, ()> {
+ Ok(true)
+}
diff --git a/src/tools/clippy/tests/ui/forget_ref.stderr b/src/tools/clippy/tests/ui/forget_ref.stderr
new file mode 100644
index 000000000..df5cd8cac
--- /dev/null
+++ b/src/tools/clippy/tests/ui/forget_ref.stderr
@@ -0,0 +1,111 @@
+error: calls to `std::mem::forget` with a reference instead of an owned value. Forgetting a reference does nothing
+ --> $DIR/forget_ref.rs:11:5
+ |
+LL | forget(&SomeStruct);
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::forget-ref` implied by `-D warnings`
+note: argument has type `&SomeStruct`
+ --> $DIR/forget_ref.rs:11:12
+ |
+LL | forget(&SomeStruct);
+ | ^^^^^^^^^^^
+
+error: calls to `std::mem::forget` with a reference instead of an owned value. Forgetting a reference does nothing
+ --> $DIR/forget_ref.rs:14:5
+ |
+LL | forget(&owned);
+ | ^^^^^^^^^^^^^^
+ |
+note: argument has type `&SomeStruct`
+ --> $DIR/forget_ref.rs:14:12
+ |
+LL | forget(&owned);
+ | ^^^^^^
+
+error: calls to `std::mem::forget` with a reference instead of an owned value. Forgetting a reference does nothing
+ --> $DIR/forget_ref.rs:15:5
+ |
+LL | forget(&&owned);
+ | ^^^^^^^^^^^^^^^
+ |
+note: argument has type `&&SomeStruct`
+ --> $DIR/forget_ref.rs:15:12
+ |
+LL | forget(&&owned);
+ | ^^^^^^^
+
+error: calls to `std::mem::forget` with a reference instead of an owned value. Forgetting a reference does nothing
+ --> $DIR/forget_ref.rs:16:5
+ |
+LL | forget(&mut owned);
+ | ^^^^^^^^^^^^^^^^^^
+ |
+note: argument has type `&mut SomeStruct`
+ --> $DIR/forget_ref.rs:16:12
+ |
+LL | forget(&mut owned);
+ | ^^^^^^^^^^
+
+error: calls to `std::mem::forget` with a reference instead of an owned value. Forgetting a reference does nothing
+ --> $DIR/forget_ref.rs:20:5
+ |
+LL | forget(&*reference1);
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+note: argument has type `&SomeStruct`
+ --> $DIR/forget_ref.rs:20:12
+ |
+LL | forget(&*reference1);
+ | ^^^^^^^^^^^^
+
+error: calls to `std::mem::forget` with a reference instead of an owned value. Forgetting a reference does nothing
+ --> $DIR/forget_ref.rs:23:5
+ |
+LL | forget(reference2);
+ | ^^^^^^^^^^^^^^^^^^
+ |
+note: argument has type `&mut SomeStruct`
+ --> $DIR/forget_ref.rs:23:12
+ |
+LL | forget(reference2);
+ | ^^^^^^^^^^
+
+error: calls to `std::mem::forget` with a reference instead of an owned value. Forgetting a reference does nothing
+ --> $DIR/forget_ref.rs:26:5
+ |
+LL | forget(reference3);
+ | ^^^^^^^^^^^^^^^^^^
+ |
+note: argument has type `&SomeStruct`
+ --> $DIR/forget_ref.rs:26:12
+ |
+LL | forget(reference3);
+ | ^^^^^^^^^^
+
+error: calls to `std::mem::forget` with a reference instead of an owned value. Forgetting a reference does nothing
+ --> $DIR/forget_ref.rs:31:5
+ |
+LL | forget(&val);
+ | ^^^^^^^^^^^^
+ |
+note: argument has type `&T`
+ --> $DIR/forget_ref.rs:31:12
+ |
+LL | forget(&val);
+ | ^^^^
+
+error: calls to `std::mem::forget` with a reference instead of an owned value. Forgetting a reference does nothing
+ --> $DIR/forget_ref.rs:39:5
+ |
+LL | std::mem::forget(&SomeStruct);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: argument has type `&SomeStruct`
+ --> $DIR/forget_ref.rs:39:22
+ |
+LL | std::mem::forget(&SomeStruct);
+ | ^^^^^^^^^^^
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/format.fixed b/src/tools/clippy/tests/ui/format.fixed
new file mode 100644
index 000000000..6b754f3bd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/format.fixed
@@ -0,0 +1,94 @@
+// run-rustfix
+
+#![allow(
+ unused_tuple_struct_fields,
+ clippy::print_literal,
+ clippy::redundant_clone,
+ clippy::to_string_in_format_args,
+ clippy::needless_borrow
+)]
+#![warn(clippy::useless_format)]
+
+struct Foo(pub String);
+
+macro_rules! foo {
+ ($($t:tt)*) => (Foo(format!($($t)*)))
+}
+
+fn main() {
+ "foo".to_string();
+ "{}".to_string();
+ "{} abc {}".to_string();
+ r##"foo {}
+" bar"##.to_string();
+
+ let _ = String::new();
+
+ "foo".to_string();
+ format!("{:?}", "foo"); // Don't warn about `Debug`.
+ format!("{:8}", "foo");
+ format!("{:width$}", "foo", width = 8);
+ "foo".to_string(); // Warn when the format makes no difference.
+ "foo".to_string(); // Warn when the format makes no difference.
+ format!("foo {}", "bar");
+ format!("{} bar", "foo");
+
+ let arg: String = "".to_owned();
+ arg.to_string();
+ format!("{:?}", arg); // Don't warn about debug.
+ format!("{:8}", arg);
+ format!("{:width$}", arg, width = 8);
+ arg.to_string(); // Warn when the format makes no difference.
+ arg.to_string(); // Warn when the format makes no difference.
+ format!("foo {}", arg);
+ format!("{} bar", arg);
+
+ // We don’t want to warn for non-string args; see issue #697.
+ format!("{}", 42);
+ format!("{:?}", 42);
+ format!("{:+}", 42);
+ format!("foo {}", 42);
+ format!("{} bar", 42);
+
+ // We only want to warn about `format!` itself.
+ println!("foo");
+ println!("{}", "foo");
+ println!("foo {}", "foo");
+ println!("{}", 42);
+ println!("foo {}", 42);
+
+ // A `format!` inside a macro should not trigger a warning.
+ foo!("should not warn");
+
+ // Precision on string means slicing without panicking on size.
+ format!("{:.1}", "foo"); // Could be `"foo"[..1]`
+ format!("{:.10}", "foo"); // Could not be `"foo"[..10]`
+ format!("{:.prec$}", "foo", prec = 1);
+ format!("{:.prec$}", "foo", prec = 10);
+
+ 42.to_string();
+ let x = std::path::PathBuf::from("/bar/foo/qux");
+ x.display().to_string();
+
+ // False positive
+ let a = "foo".to_string();
+ let _ = Some(a + "bar");
+
+ // Wrap it with braces
+ let v: Vec<String> = vec!["foo".to_string(), "bar".to_string()];
+ let _s: String = (&*v.join("\n")).to_string();
+
+ format!("prepend {:+}", "s");
+
+ // Issue #8290
+ let x = "foo";
+ let _ = x.to_string();
+ let _ = format!("{x:?}"); // Don't lint on debug
+ let _ = x.to_string();
+
+ // Issue #9234
+ let abc = "abc";
+ let _ = abc.to_string();
+ let xx = "xx";
+ let _ = xx.to_string();
+}
diff --git a/src/tools/clippy/tests/ui/format.rs b/src/tools/clippy/tests/ui/format.rs
new file mode 100644
index 000000000..ca9826b35
--- /dev/null
+++ b/src/tools/clippy/tests/ui/format.rs
@@ -0,0 +1,96 @@
+// run-rustfix
+
+#![allow(
+ unused_tuple_struct_fields,
+ clippy::print_literal,
+ clippy::redundant_clone,
+ clippy::to_string_in_format_args,
+ clippy::needless_borrow
+)]
+#![warn(clippy::useless_format)]
+
+struct Foo(pub String);
+
+macro_rules! foo {
+ ($($t:tt)*) => (Foo(format!($($t)*)))
+}
+
+fn main() {
+ format!("foo");
+ format!("{{}}");
+ format!("{{}} abc {{}}");
+ format!(
+ r##"foo {{}}
+" bar"##
+ );
+
+ let _ = format!("");
+
+ format!("{}", "foo");
+ format!("{:?}", "foo"); // Don't warn about `Debug`.
+ format!("{:8}", "foo");
+ format!("{:width$}", "foo", width = 8);
+ format!("{:+}", "foo"); // Warn when the format makes no difference.
+ format!("{:<}", "foo"); // Warn when the format makes no difference.
+ format!("foo {}", "bar");
+ format!("{} bar", "foo");
+
+ let arg: String = "".to_owned();
+ format!("{}", arg);
+ format!("{:?}", arg); // Don't warn about debug.
+ format!("{:8}", arg);
+ format!("{:width$}", arg, width = 8);
+ format!("{:+}", arg); // Warn when the format makes no difference.
+ format!("{:<}", arg); // Warn when the format makes no difference.
+ format!("foo {}", arg);
+ format!("{} bar", arg);
+
+ // We don’t want to warn for non-string args; see issue #697.
+ format!("{}", 42);
+ format!("{:?}", 42);
+ format!("{:+}", 42);
+ format!("foo {}", 42);
+ format!("{} bar", 42);
+
+ // We only want to warn about `format!` itself.
+ println!("foo");
+ println!("{}", "foo");
+ println!("foo {}", "foo");
+ println!("{}", 42);
+ println!("foo {}", 42);
+
+ // A `format!` inside a macro should not trigger a warning.
+ foo!("should not warn");
+
+ // Precision on string means slicing without panicking on size.
+ format!("{:.1}", "foo"); // Could be `"foo"[..1]`
+ format!("{:.10}", "foo"); // Could not be `"foo"[..10]`
+ format!("{:.prec$}", "foo", prec = 1);
+ format!("{:.prec$}", "foo", prec = 10);
+
+ format!("{}", 42.to_string());
+ let x = std::path::PathBuf::from("/bar/foo/qux");
+ format!("{}", x.display().to_string());
+
+ // False positive
+ let a = "foo".to_string();
+ let _ = Some(format!("{}", a + "bar"));
+
+ // Wrap it with braces
+ let v: Vec<String> = vec!["foo".to_string(), "bar".to_string()];
+ let _s: String = format!("{}", &*v.join("\n"));
+
+ format!("prepend {:+}", "s");
+
+ // Issue #8290
+ let x = "foo";
+ let _ = format!("{x}");
+ let _ = format!("{x:?}"); // Don't lint on debug
+ let _ = format!("{y}", y = x);
+
+ // Issue #9234
+ let abc = "abc";
+ let _ = format!("{abc}");
+ let xx = "xx";
+ let _ = format!("{xx}");
+}
diff --git a/src/tools/clippy/tests/ui/format.stderr b/src/tools/clippy/tests/ui/format.stderr
new file mode 100644
index 000000000..6c35caeb0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/format.stderr
@@ -0,0 +1,127 @@
+error: useless use of `format!`
+ --> $DIR/format.rs:19:5
+ |
+LL | format!("foo");
+ | ^^^^^^^^^^^^^^ help: consider using `.to_string()`: `"foo".to_string()`
+ |
+ = note: `-D clippy::useless-format` implied by `-D warnings`
+
+error: useless use of `format!`
+ --> $DIR/format.rs:20:5
+ |
+LL | format!("{{}}");
+ | ^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `"{}".to_string()`
+
+error: useless use of `format!`
+ --> $DIR/format.rs:21:5
+ |
+LL | format!("{{}} abc {{}}");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `"{} abc {}".to_string()`
+
+error: useless use of `format!`
+ --> $DIR/format.rs:22:5
+ |
+LL | / format!(
+LL | | r##"foo {{}}
+LL | | " bar"##
+LL | | );
+ | |_____^
+ |
+help: consider using `.to_string()`
+ |
+LL ~ r##"foo {}
+LL ~ " bar"##.to_string();
+ |
+
+error: useless use of `format!`
+ --> $DIR/format.rs:27:13
+ |
+LL | let _ = format!("");
+ | ^^^^^^^^^^^ help: consider using `String::new()`: `String::new()`
+
+error: useless use of `format!`
+ --> $DIR/format.rs:29:5
+ |
+LL | format!("{}", "foo");
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `"foo".to_string()`
+
+error: useless use of `format!`
+ --> $DIR/format.rs:33:5
+ |
+LL | format!("{:+}", "foo"); // Warn when the format makes no difference.
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `"foo".to_string()`
+
+error: useless use of `format!`
+ --> $DIR/format.rs:34:5
+ |
+LL | format!("{:<}", "foo"); // Warn when the format makes no difference.
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `"foo".to_string()`
+
+error: useless use of `format!`
+ --> $DIR/format.rs:39:5
+ |
+LL | format!("{}", arg);
+ | ^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `arg.to_string()`
+
+error: useless use of `format!`
+ --> $DIR/format.rs:43:5
+ |
+LL | format!("{:+}", arg); // Warn when the format makes no difference.
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `arg.to_string()`
+
+error: useless use of `format!`
+ --> $DIR/format.rs:44:5
+ |
+LL | format!("{:<}", arg); // Warn when the format makes no difference.
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `arg.to_string()`
+
+error: useless use of `format!`
+ --> $DIR/format.rs:71:5
+ |
+LL | format!("{}", 42.to_string());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `42.to_string()`
+
+error: useless use of `format!`
+ --> $DIR/format.rs:73:5
+ |
+LL | format!("{}", x.display().to_string());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `x.display().to_string()`
+
+error: useless use of `format!`
+ --> $DIR/format.rs:77:18
+ |
+LL | let _ = Some(format!("{}", a + "bar"));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `a + "bar"`
+
+error: useless use of `format!`
+ --> $DIR/format.rs:81:22
+ |
+LL | let _s: String = format!("{}", &*v.join("/n"));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `(&*v.join("/n")).to_string()`
+
+error: useless use of `format!`
+ --> $DIR/format.rs:87:13
+ |
+LL | let _ = format!("{x}");
+ | ^^^^^^^^^^^^^^ help: consider using `.to_string()`: `x.to_string()`
+
+error: useless use of `format!`
+ --> $DIR/format.rs:89:13
+ |
+LL | let _ = format!("{y}", y = x);
+ | ^^^^^^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `x.to_string()`
+
+error: useless use of `format!`
+ --> $DIR/format.rs:93:13
+ |
+LL | let _ = format!("{abc}");
+ | ^^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `abc.to_string()`
+
+error: useless use of `format!`
+ --> $DIR/format.rs:95:13
+ |
+LL | let _ = format!("{xx}");
+ | ^^^^^^^^^^^^^^^ help: consider using `.to_string()`: `xx.to_string()`
+
+error: aborting due to 19 previous errors
+
diff --git a/src/tools/clippy/tests/ui/format_args.fixed b/src/tools/clippy/tests/ui/format_args.fixed
new file mode 100644
index 000000000..69b5e1c72
--- /dev/null
+++ b/src/tools/clippy/tests/ui/format_args.fixed
@@ -0,0 +1,117 @@
+// run-rustfix
+
+#![allow(unreachable_code)]
+#![allow(unused_macros)]
+#![allow(unused_variables)]
+#![allow(clippy::assertions_on_constants)]
+#![allow(clippy::eq_op)]
+#![allow(clippy::print_literal)]
+#![warn(clippy::to_string_in_format_args)]
+
+use std::io::{stdout, Write};
+use std::ops::Deref;
+use std::panic::Location;
+
+struct Somewhere;
+
+impl ToString for Somewhere {
+ fn to_string(&self) -> String {
+ String::from("somewhere")
+ }
+}
+
+struct X(u32);
+
+impl Deref for X {
+ type Target = u32;
+
+ fn deref(&self) -> &u32 {
+ &self.0
+ }
+}
+
+struct Y<'a>(&'a X);
+
+impl<'a> Deref for Y<'a> {
+ type Target = &'a X;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+struct Z(u32);
+
+impl Deref for Z {
+ type Target = u32;
+
+ fn deref(&self) -> &u32 {
+ &self.0
+ }
+}
+
+impl std::fmt::Display for Z {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "Z")
+ }
+}
+
+macro_rules! my_macro {
+ () => {
+ // here be dragons, do not enter (or lint)
+ println!("error: something failed at {}", Location::caller().to_string());
+ };
+}
+
+macro_rules! my_other_macro {
+ () => {
+ Location::caller().to_string()
+ };
+}
+
+fn main() {
+ let x = &X(1);
+ let x_ref = &x;
+
+ let _ = format!("error: something failed at {}", Location::caller());
+ let _ = write!(
+ stdout(),
+ "error: something failed at {}",
+ Location::caller()
+ );
+ let _ = writeln!(
+ stdout(),
+ "error: something failed at {}",
+ Location::caller()
+ );
+ print!("error: something failed at {}", Location::caller());
+ println!("error: something failed at {}", Location::caller());
+ eprint!("error: something failed at {}", Location::caller());
+ eprintln!("error: something failed at {}", Location::caller());
+ let _ = format_args!("error: something failed at {}", Location::caller());
+ assert!(true, "error: something failed at {}", Location::caller());
+ assert_eq!(0, 0, "error: something failed at {}", Location::caller());
+ assert_ne!(0, 0, "error: something failed at {}", Location::caller());
+ panic!("error: something failed at {}", Location::caller());
+ println!("{}", *X(1));
+ println!("{}", ***Y(&X(1)));
+ println!("{}", Z(1));
+ println!("{}", **x);
+ println!("{}", ***x_ref);
+ // https://github.com/rust-lang/rust-clippy/issues/7903
+ println!("{foo}{bar}", foo = "foo", bar = "bar");
+ println!("{foo}{bar}", foo = "foo", bar = "bar");
+ println!("{foo}{bar}", bar = "bar", foo = "foo");
+ println!("{foo}{bar}", bar = "bar", foo = "foo");
+
+ // negative tests
+ println!("error: something failed at {}", Somewhere.to_string());
+ // The next two tests are negative because caching the string might be faster than calling `<X as
+ // Display>::fmt` twice.
+ println!("{} and again {0}", x.to_string());
+ println!("{foo}{foo}", foo = "foo".to_string());
+ my_macro!();
+ println!("error: something failed at {}", my_other_macro!());
+ // https://github.com/rust-lang/rust-clippy/issues/7903
+ println!("{foo}{foo:?}", foo = "foo".to_string());
+}
diff --git a/src/tools/clippy/tests/ui/format_args.rs b/src/tools/clippy/tests/ui/format_args.rs
new file mode 100644
index 000000000..3a434c5bf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/format_args.rs
@@ -0,0 +1,117 @@
+// run-rustfix
+
+#![allow(unreachable_code)]
+#![allow(unused_macros)]
+#![allow(unused_variables)]
+#![allow(clippy::assertions_on_constants)]
+#![allow(clippy::eq_op)]
+#![allow(clippy::print_literal)]
+#![warn(clippy::to_string_in_format_args)]
+
+use std::io::{stdout, Write};
+use std::ops::Deref;
+use std::panic::Location;
+
+struct Somewhere;
+
+impl ToString for Somewhere {
+ fn to_string(&self) -> String {
+ String::from("somewhere")
+ }
+}
+
+struct X(u32);
+
+impl Deref for X {
+ type Target = u32;
+
+ fn deref(&self) -> &u32 {
+ &self.0
+ }
+}
+
+struct Y<'a>(&'a X);
+
+impl<'a> Deref for Y<'a> {
+ type Target = &'a X;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+struct Z(u32);
+
+impl Deref for Z {
+ type Target = u32;
+
+ fn deref(&self) -> &u32 {
+ &self.0
+ }
+}
+
+impl std::fmt::Display for Z {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "Z")
+ }
+}
+
+macro_rules! my_macro {
+ () => {
+ // here be dragons, do not enter (or lint)
+ println!("error: something failed at {}", Location::caller().to_string());
+ };
+}
+
+macro_rules! my_other_macro {
+ () => {
+ Location::caller().to_string()
+ };
+}
+
+fn main() {
+ let x = &X(1);
+ let x_ref = &x;
+
+ let _ = format!("error: something failed at {}", Location::caller().to_string());
+ let _ = write!(
+ stdout(),
+ "error: something failed at {}",
+ Location::caller().to_string()
+ );
+ let _ = writeln!(
+ stdout(),
+ "error: something failed at {}",
+ Location::caller().to_string()
+ );
+ print!("error: something failed at {}", Location::caller().to_string());
+ println!("error: something failed at {}", Location::caller().to_string());
+ eprint!("error: something failed at {}", Location::caller().to_string());
+ eprintln!("error: something failed at {}", Location::caller().to_string());
+ let _ = format_args!("error: something failed at {}", Location::caller().to_string());
+ assert!(true, "error: something failed at {}", Location::caller().to_string());
+ assert_eq!(0, 0, "error: something failed at {}", Location::caller().to_string());
+ assert_ne!(0, 0, "error: something failed at {}", Location::caller().to_string());
+ panic!("error: something failed at {}", Location::caller().to_string());
+ println!("{}", X(1).to_string());
+ println!("{}", Y(&X(1)).to_string());
+ println!("{}", Z(1).to_string());
+ println!("{}", x.to_string());
+ println!("{}", x_ref.to_string());
+ // https://github.com/rust-lang/rust-clippy/issues/7903
+ println!("{foo}{bar}", foo = "foo".to_string(), bar = "bar");
+ println!("{foo}{bar}", foo = "foo", bar = "bar".to_string());
+ println!("{foo}{bar}", bar = "bar".to_string(), foo = "foo");
+ println!("{foo}{bar}", bar = "bar", foo = "foo".to_string());
+
+ // negative tests
+ println!("error: something failed at {}", Somewhere.to_string());
+ // The next two tests are negative because caching the string might be faster than calling `<X as
+ // Display>::fmt` twice.
+ println!("{} and again {0}", x.to_string());
+ println!("{foo}{foo}", foo = "foo".to_string());
+ my_macro!();
+ println!("error: something failed at {}", my_other_macro!());
+ // https://github.com/rust-lang/rust-clippy/issues/7903
+ println!("{foo}{foo:?}", foo = "foo".to_string());
+}
diff --git a/src/tools/clippy/tests/ui/format_args.stderr b/src/tools/clippy/tests/ui/format_args.stderr
new file mode 100644
index 000000000..c0cbca507
--- /dev/null
+++ b/src/tools/clippy/tests/ui/format_args.stderr
@@ -0,0 +1,130 @@
+error: `to_string` applied to a type that implements `Display` in `format!` args
+ --> $DIR/format_args.rs:76:72
+ |
+LL | let _ = format!("error: something failed at {}", Location::caller().to_string());
+ | ^^^^^^^^^^^^ help: remove this
+ |
+ = note: `-D clippy::to-string-in-format-args` implied by `-D warnings`
+
+error: `to_string` applied to a type that implements `Display` in `write!` args
+ --> $DIR/format_args.rs:80:27
+ |
+LL | Location::caller().to_string()
+ | ^^^^^^^^^^^^ help: remove this
+
+error: `to_string` applied to a type that implements `Display` in `writeln!` args
+ --> $DIR/format_args.rs:85:27
+ |
+LL | Location::caller().to_string()
+ | ^^^^^^^^^^^^ help: remove this
+
+error: `to_string` applied to a type that implements `Display` in `print!` args
+ --> $DIR/format_args.rs:87:63
+ |
+LL | print!("error: something failed at {}", Location::caller().to_string());
+ | ^^^^^^^^^^^^ help: remove this
+
+error: `to_string` applied to a type that implements `Display` in `println!` args
+ --> $DIR/format_args.rs:88:65
+ |
+LL | println!("error: something failed at {}", Location::caller().to_string());
+ | ^^^^^^^^^^^^ help: remove this
+
+error: `to_string` applied to a type that implements `Display` in `eprint!` args
+ --> $DIR/format_args.rs:89:64
+ |
+LL | eprint!("error: something failed at {}", Location::caller().to_string());
+ | ^^^^^^^^^^^^ help: remove this
+
+error: `to_string` applied to a type that implements `Display` in `eprintln!` args
+ --> $DIR/format_args.rs:90:66
+ |
+LL | eprintln!("error: something failed at {}", Location::caller().to_string());
+ | ^^^^^^^^^^^^ help: remove this
+
+error: `to_string` applied to a type that implements `Display` in `format_args!` args
+ --> $DIR/format_args.rs:91:77
+ |
+LL | let _ = format_args!("error: something failed at {}", Location::caller().to_string());
+ | ^^^^^^^^^^^^ help: remove this
+
+error: `to_string` applied to a type that implements `Display` in `assert!` args
+ --> $DIR/format_args.rs:92:70
+ |
+LL | assert!(true, "error: something failed at {}", Location::caller().to_string());
+ | ^^^^^^^^^^^^ help: remove this
+
+error: `to_string` applied to a type that implements `Display` in `assert_eq!` args
+ --> $DIR/format_args.rs:93:73
+ |
+LL | assert_eq!(0, 0, "error: something failed at {}", Location::caller().to_string());
+ | ^^^^^^^^^^^^ help: remove this
+
+error: `to_string` applied to a type that implements `Display` in `assert_ne!` args
+ --> $DIR/format_args.rs:94:73
+ |
+LL | assert_ne!(0, 0, "error: something failed at {}", Location::caller().to_string());
+ | ^^^^^^^^^^^^ help: remove this
+
+error: `to_string` applied to a type that implements `Display` in `panic!` args
+ --> $DIR/format_args.rs:95:63
+ |
+LL | panic!("error: something failed at {}", Location::caller().to_string());
+ | ^^^^^^^^^^^^ help: remove this
+
+error: `to_string` applied to a type that implements `Display` in `println!` args
+ --> $DIR/format_args.rs:96:20
+ |
+LL | println!("{}", X(1).to_string());
+ | ^^^^^^^^^^^^^^^^ help: use this: `*X(1)`
+
+error: `to_string` applied to a type that implements `Display` in `println!` args
+ --> $DIR/format_args.rs:97:20
+ |
+LL | println!("{}", Y(&X(1)).to_string());
+ | ^^^^^^^^^^^^^^^^^^^^ help: use this: `***Y(&X(1))`
+
+error: `to_string` applied to a type that implements `Display` in `println!` args
+ --> $DIR/format_args.rs:98:24
+ |
+LL | println!("{}", Z(1).to_string());
+ | ^^^^^^^^^^^^ help: remove this
+
+error: `to_string` applied to a type that implements `Display` in `println!` args
+ --> $DIR/format_args.rs:99:20
+ |
+LL | println!("{}", x.to_string());
+ | ^^^^^^^^^^^^^ help: use this: `**x`
+
+error: `to_string` applied to a type that implements `Display` in `println!` args
+ --> $DIR/format_args.rs:100:20
+ |
+LL | println!("{}", x_ref.to_string());
+ | ^^^^^^^^^^^^^^^^^ help: use this: `***x_ref`
+
+error: `to_string` applied to a type that implements `Display` in `println!` args
+ --> $DIR/format_args.rs:102:39
+ |
+LL | println!("{foo}{bar}", foo = "foo".to_string(), bar = "bar");
+ | ^^^^^^^^^^^^ help: remove this
+
+error: `to_string` applied to a type that implements `Display` in `println!` args
+ --> $DIR/format_args.rs:103:52
+ |
+LL | println!("{foo}{bar}", foo = "foo", bar = "bar".to_string());
+ | ^^^^^^^^^^^^ help: remove this
+
+error: `to_string` applied to a type that implements `Display` in `println!` args
+ --> $DIR/format_args.rs:104:39
+ |
+LL | println!("{foo}{bar}", bar = "bar".to_string(), foo = "foo");
+ | ^^^^^^^^^^^^ help: remove this
+
+error: `to_string` applied to a type that implements `Display` in `println!` args
+ --> $DIR/format_args.rs:105:52
+ |
+LL | println!("{foo}{bar}", bar = "bar", foo = "foo".to_string());
+ | ^^^^^^^^^^^^ help: remove this
+
+error: aborting due to 21 previous errors
+
diff --git a/src/tools/clippy/tests/ui/format_args_unfixable.rs b/src/tools/clippy/tests/ui/format_args_unfixable.rs
new file mode 100644
index 000000000..b24ddf732
--- /dev/null
+++ b/src/tools/clippy/tests/ui/format_args_unfixable.rs
@@ -0,0 +1,61 @@
+#![allow(clippy::assertions_on_constants)]
+#![allow(clippy::eq_op)]
+#![warn(clippy::format_in_format_args)]
+#![warn(clippy::to_string_in_format_args)]
+
+use std::io::{stdout, Error, ErrorKind, Write};
+use std::ops::Deref;
+use std::panic::Location;
+
+macro_rules! my_macro {
+ () => {
+ // here be dragons, do not enter (or lint)
+ println!("error: {}", format!("something failed at {}", Location::caller()));
+ };
+}
+
+macro_rules! my_other_macro {
+ () => {
+ format!("something failed at {}", Location::caller())
+ };
+}
+
+fn main() {
+ let error = Error::new(ErrorKind::Other, "bad thing");
+ let x = 'x';
+
+ println!("error: {}", format!("something failed at {}", Location::caller()));
+ println!("{}: {}", error, format!("something failed at {}", Location::caller()));
+ println!("{:?}: {}", error, format!("something failed at {}", Location::caller()));
+ println!("{{}}: {}", format!("something failed at {}", Location::caller()));
+ println!(r#"error: "{}""#, format!("something failed at {}", Location::caller()));
+ println!("error: {}", format!(r#"something failed at "{}""#, Location::caller()));
+ println!("error: {}", format!("something failed at {} {0}", Location::caller()));
+ let _ = format!("error: {}", format!("something failed at {}", Location::caller()));
+ let _ = write!(
+ stdout(),
+ "error: {}",
+ format!("something failed at {}", Location::caller())
+ );
+ let _ = writeln!(
+ stdout(),
+ "error: {}",
+ format!("something failed at {}", Location::caller())
+ );
+ print!("error: {}", format!("something failed at {}", Location::caller()));
+ eprint!("error: {}", format!("something failed at {}", Location::caller()));
+ eprintln!("error: {}", format!("something failed at {}", Location::caller()));
+ let _ = format_args!("error: {}", format!("something failed at {}", Location::caller()));
+ assert!(true, "error: {}", format!("something failed at {}", Location::caller()));
+ assert_eq!(0, 0, "error: {}", format!("something failed at {}", Location::caller()));
+ assert_ne!(0, 0, "error: {}", format!("something failed at {}", Location::caller()));
+ panic!("error: {}", format!("something failed at {}", Location::caller()));
+
+ // negative tests
+ println!("error: {}", format_args!("something failed at {}", Location::caller()));
+ println!("error: {:>70}", format!("something failed at {}", Location::caller()));
+ println!("error: {} {0}", format!("something failed at {}", Location::caller()));
+ println!("{} and again {0}", format!("hi {}", x));
+ my_macro!();
+ println!("error: {}", my_other_macro!());
+}
diff --git a/src/tools/clippy/tests/ui/format_args_unfixable.stderr b/src/tools/clippy/tests/ui/format_args_unfixable.stderr
new file mode 100644
index 000000000..4476218ad
--- /dev/null
+++ b/src/tools/clippy/tests/ui/format_args_unfixable.stderr
@@ -0,0 +1,175 @@
+error: `format!` in `println!` args
+ --> $DIR/format_args_unfixable.rs:27:5
+ |
+LL | println!("error: {}", format!("something failed at {}", Location::caller()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::format-in-format-args` implied by `-D warnings`
+ = help: combine the `format!(..)` arguments with the outer `println!(..)` call
+ = help: or consider changing `format!` to `format_args!`
+
+error: `format!` in `println!` args
+ --> $DIR/format_args_unfixable.rs:28:5
+ |
+LL | println!("{}: {}", error, format!("something failed at {}", Location::caller()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: combine the `format!(..)` arguments with the outer `println!(..)` call
+ = help: or consider changing `format!` to `format_args!`
+
+error: `format!` in `println!` args
+ --> $DIR/format_args_unfixable.rs:29:5
+ |
+LL | println!("{:?}: {}", error, format!("something failed at {}", Location::caller()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: combine the `format!(..)` arguments with the outer `println!(..)` call
+ = help: or consider changing `format!` to `format_args!`
+
+error: `format!` in `println!` args
+ --> $DIR/format_args_unfixable.rs:30:5
+ |
+LL | println!("{{}}: {}", format!("something failed at {}", Location::caller()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: combine the `format!(..)` arguments with the outer `println!(..)` call
+ = help: or consider changing `format!` to `format_args!`
+
+error: `format!` in `println!` args
+ --> $DIR/format_args_unfixable.rs:31:5
+ |
+LL | println!(r#"error: "{}""#, format!("something failed at {}", Location::caller()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: combine the `format!(..)` arguments with the outer `println!(..)` call
+ = help: or consider changing `format!` to `format_args!`
+
+error: `format!` in `println!` args
+ --> $DIR/format_args_unfixable.rs:32:5
+ |
+LL | println!("error: {}", format!(r#"something failed at "{}""#, Location::caller()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: combine the `format!(..)` arguments with the outer `println!(..)` call
+ = help: or consider changing `format!` to `format_args!`
+
+error: `format!` in `println!` args
+ --> $DIR/format_args_unfixable.rs:33:5
+ |
+LL | println!("error: {}", format!("something failed at {} {0}", Location::caller()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: combine the `format!(..)` arguments with the outer `println!(..)` call
+ = help: or consider changing `format!` to `format_args!`
+
+error: `format!` in `format!` args
+ --> $DIR/format_args_unfixable.rs:34:13
+ |
+LL | let _ = format!("error: {}", format!("something failed at {}", Location::caller()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: combine the `format!(..)` arguments with the outer `format!(..)` call
+ = help: or consider changing `format!` to `format_args!`
+
+error: `format!` in `write!` args
+ --> $DIR/format_args_unfixable.rs:35:13
+ |
+LL | let _ = write!(
+ | _____________^
+LL | | stdout(),
+LL | | "error: {}",
+LL | | format!("something failed at {}", Location::caller())
+LL | | );
+ | |_____^
+ |
+ = help: combine the `format!(..)` arguments with the outer `write!(..)` call
+ = help: or consider changing `format!` to `format_args!`
+
+error: `format!` in `writeln!` args
+ --> $DIR/format_args_unfixable.rs:40:13
+ |
+LL | let _ = writeln!(
+ | _____________^
+LL | | stdout(),
+LL | | "error: {}",
+LL | | format!("something failed at {}", Location::caller())
+LL | | );
+ | |_____^
+ |
+ = help: combine the `format!(..)` arguments with the outer `writeln!(..)` call
+ = help: or consider changing `format!` to `format_args!`
+
+error: `format!` in `print!` args
+ --> $DIR/format_args_unfixable.rs:45:5
+ |
+LL | print!("error: {}", format!("something failed at {}", Location::caller()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: combine the `format!(..)` arguments with the outer `print!(..)` call
+ = help: or consider changing `format!` to `format_args!`
+
+error: `format!` in `eprint!` args
+ --> $DIR/format_args_unfixable.rs:46:5
+ |
+LL | eprint!("error: {}", format!("something failed at {}", Location::caller()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: combine the `format!(..)` arguments with the outer `eprint!(..)` call
+ = help: or consider changing `format!` to `format_args!`
+
+error: `format!` in `eprintln!` args
+ --> $DIR/format_args_unfixable.rs:47:5
+ |
+LL | eprintln!("error: {}", format!("something failed at {}", Location::caller()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: combine the `format!(..)` arguments with the outer `eprintln!(..)` call
+ = help: or consider changing `format!` to `format_args!`
+
+error: `format!` in `format_args!` args
+ --> $DIR/format_args_unfixable.rs:48:13
+ |
+LL | let _ = format_args!("error: {}", format!("something failed at {}", Location::caller()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: combine the `format!(..)` arguments with the outer `format_args!(..)` call
+ = help: or consider changing `format!` to `format_args!`
+
+error: `format!` in `assert!` args
+ --> $DIR/format_args_unfixable.rs:49:5
+ |
+LL | assert!(true, "error: {}", format!("something failed at {}", Location::caller()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: combine the `format!(..)` arguments with the outer `assert!(..)` call
+ = help: or consider changing `format!` to `format_args!`
+
+error: `format!` in `assert_eq!` args
+ --> $DIR/format_args_unfixable.rs:50:5
+ |
+LL | assert_eq!(0, 0, "error: {}", format!("something failed at {}", Location::caller()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: combine the `format!(..)` arguments with the outer `assert_eq!(..)` call
+ = help: or consider changing `format!` to `format_args!`
+
+error: `format!` in `assert_ne!` args
+ --> $DIR/format_args_unfixable.rs:51:5
+ |
+LL | assert_ne!(0, 0, "error: {}", format!("something failed at {}", Location::caller()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: combine the `format!(..)` arguments with the outer `assert_ne!(..)` call
+ = help: or consider changing `format!` to `format_args!`
+
+error: `format!` in `panic!` args
+ --> $DIR/format_args_unfixable.rs:52:5
+ |
+LL | panic!("error: {}", format!("something failed at {}", Location::caller()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: combine the `format!(..)` arguments with the outer `panic!(..)` call
+ = help: or consider changing `format!` to `format_args!`
+
+error: aborting due to 18 previous errors
+
diff --git a/src/tools/clippy/tests/ui/format_push_string.rs b/src/tools/clippy/tests/ui/format_push_string.rs
new file mode 100644
index 000000000..4db13d650
--- /dev/null
+++ b/src/tools/clippy/tests/ui/format_push_string.rs
@@ -0,0 +1,7 @@
+#![warn(clippy::format_push_string)]
+
+fn main() {
+ let mut string = String::new();
+ string += &format!("{:?}", 1234);
+ string.push_str(&format!("{:?}", 5678));
+}
diff --git a/src/tools/clippy/tests/ui/format_push_string.stderr b/src/tools/clippy/tests/ui/format_push_string.stderr
new file mode 100644
index 000000000..953784bcc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/format_push_string.stderr
@@ -0,0 +1,19 @@
+error: `format!(..)` appended to existing `String`
+ --> $DIR/format_push_string.rs:5:5
+ |
+LL | string += &format!("{:?}", 1234);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::format-push-string` implied by `-D warnings`
+ = help: consider using `write!` to avoid the extra allocation
+
+error: `format!(..)` appended to existing `String`
+ --> $DIR/format_push_string.rs:6:5
+ |
+LL | string.push_str(&format!("{:?}", 5678));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using `write!` to avoid the extra allocation
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/formatting.rs b/src/tools/clippy/tests/ui/formatting.rs
new file mode 100644
index 000000000..471a8e0de
--- /dev/null
+++ b/src/tools/clippy/tests/ui/formatting.rs
@@ -0,0 +1,73 @@
+#![warn(clippy::all)]
+#![allow(unused_variables)]
+#![allow(unused_assignments)]
+#![allow(clippy::if_same_then_else)]
+#![allow(clippy::deref_addrof)]
+#![allow(clippy::nonminimal_bool)]
+
+fn foo() -> bool {
+ true
+}
+
+#[rustfmt::skip]
+fn main() {
+ // weird op_eq formatting:
+ let mut a = 42;
+ a =- 35;
+ a =* &191;
+
+ let mut b = true;
+ b =! false;
+
+ // those are ok:
+ a = -35;
+ a = *&191;
+ b = !false;
+
+ // possible missing comma in an array
+ let _ = &[
+ -1, -2, -3 // <= no comma here
+ -4, -5, -6
+ ];
+ let _ = &[
+ -1, -2, -3 // <= no comma here
+ *4, -5, -6
+ ];
+
+ // those are ok:
+ let _ = &[
+ -1, -2, -3,
+ -4, -5, -6
+ ];
+ let _ = &[
+ -1, -2, -3,
+ -4, -5, -6,
+ ];
+ let _ = &[
+ 1 + 2, 3 +
+ 4, 5 + 6,
+ ];
+
+ // don't lint for bin op without unary equiv
+ // issue 3244
+ vec![
+ 1
+ / 2,
+ ];
+ // issue 3396
+ vec![
+ true
+ | false,
+ ];
+
+ // don't lint if the indentation suggests not to
+ let _ = &[
+ 1 + 2, 3
+ - 4, 5
+ ];
+ // lint if it doesn't
+ let _ = &[
+ -1
+ -4,
+ ];
+}
diff --git a/src/tools/clippy/tests/ui/formatting.stderr b/src/tools/clippy/tests/ui/formatting.stderr
new file mode 100644
index 000000000..9272cd604
--- /dev/null
+++ b/src/tools/clippy/tests/ui/formatting.stderr
@@ -0,0 +1,52 @@
+error: this looks like you are trying to use `.. -= ..`, but you really are doing `.. = (- ..)`
+ --> $DIR/formatting.rs:16:6
+ |
+LL | a =- 35;
+ | ^^^^
+ |
+ = note: `-D clippy::suspicious-assignment-formatting` implied by `-D warnings`
+ = note: to remove this lint, use either `-=` or `= -`
+
+error: this looks like you are trying to use `.. *= ..`, but you really are doing `.. = (* ..)`
+ --> $DIR/formatting.rs:17:6
+ |
+LL | a =* &191;
+ | ^^^^
+ |
+ = note: to remove this lint, use either `*=` or `= *`
+
+error: this looks like you are trying to use `.. != ..`, but you really are doing `.. = (! ..)`
+ --> $DIR/formatting.rs:20:6
+ |
+LL | b =! false;
+ | ^^^^
+ |
+ = note: to remove this lint, use either `!=` or `= !`
+
+error: possibly missing a comma here
+ --> $DIR/formatting.rs:29:19
+ |
+LL | -1, -2, -3 // <= no comma here
+ | ^
+ |
+ = note: `-D clippy::possible-missing-comma` implied by `-D warnings`
+ = note: to remove this lint, add a comma or write the expr in a single line
+
+error: possibly missing a comma here
+ --> $DIR/formatting.rs:33:19
+ |
+LL | -1, -2, -3 // <= no comma here
+ | ^
+ |
+ = note: to remove this lint, add a comma or write the expr in a single line
+
+error: possibly missing a comma here
+ --> $DIR/formatting.rs:70:11
+ |
+LL | -1
+ | ^
+ |
+ = note: to remove this lint, add a comma or write the expr in a single line
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/from_iter_instead_of_collect.fixed b/src/tools/clippy/tests/ui/from_iter_instead_of_collect.fixed
new file mode 100644
index 000000000..48f809331
--- /dev/null
+++ b/src/tools/clippy/tests/ui/from_iter_instead_of_collect.fixed
@@ -0,0 +1,61 @@
+// run-rustfix
+
+#![warn(clippy::from_iter_instead_of_collect)]
+#![allow(unused_imports, unused_tuple_struct_fields)]
+
+use std::collections::{BTreeMap, BTreeSet, HashMap, VecDeque};
+
+struct Foo(Vec<bool>);
+
+impl FromIterator<bool> for Foo {
+ fn from_iter<T: IntoIterator<Item = bool>>(_: T) -> Self {
+ todo!()
+ }
+}
+
+impl<'a> FromIterator<&'a bool> for Foo {
+ fn from_iter<T: IntoIterator<Item = &'a bool>>(iter: T) -> Self {
+ iter.into_iter().copied().collect::<Self>()
+ }
+}
+
+fn main() {
+ let iter_expr = std::iter::repeat(5).take(5);
+ let _ = iter_expr.collect::<Vec<_>>();
+
+ let _ = vec![5, 5, 5, 5].iter().enumerate().collect::<HashMap<usize, &i8>>();
+
+ Vec::from_iter(vec![42u32]);
+
+ let a = vec![0, 1, 2];
+ assert_eq!(a, (0..3).collect::<Vec<_>>());
+ assert_eq!(a, (0..3).collect::<Vec<i32>>());
+
+ let mut b = (0..3).collect::<VecDeque<_>>();
+ b.push_back(4);
+
+ let mut b = (0..3).collect::<VecDeque<i32>>();
+ b.push_back(4);
+
+ {
+ use std::collections;
+ let mut b = (0..3).collect::<collections::VecDeque<i32>>();
+ b.push_back(4);
+ }
+
+ let values = [(0, 'a'), (1, 'b'), (2, 'c'), (3, 'd')];
+ let bm = values.iter().cloned().collect::<BTreeMap<_, _>>();
+ let mut bar = bm.range(0..2).collect::<BTreeMap<_, _>>();
+ bar.insert(&4, &'e');
+
+ let mut bts = (0..3).collect::<BTreeSet<_>>();
+ bts.insert(2);
+ {
+ use std::collections;
+ let _ = (0..3).collect::<collections::BTreeSet<_>>();
+ let _ = (0..3).collect::<collections::BTreeSet<u32>>();
+ }
+
+ for _i in [1, 2, 3].iter().collect::<Vec<_>>() {}
+ for _i in [1, 2, 3].iter().collect::<Vec<&i32>>() {}
+}
diff --git a/src/tools/clippy/tests/ui/from_iter_instead_of_collect.rs b/src/tools/clippy/tests/ui/from_iter_instead_of_collect.rs
new file mode 100644
index 000000000..ebe0ad278
--- /dev/null
+++ b/src/tools/clippy/tests/ui/from_iter_instead_of_collect.rs
@@ -0,0 +1,61 @@
+// run-rustfix
+
+#![warn(clippy::from_iter_instead_of_collect)]
+#![allow(unused_imports, unused_tuple_struct_fields)]
+
+use std::collections::{BTreeMap, BTreeSet, HashMap, VecDeque};
+
+struct Foo(Vec<bool>);
+
+impl FromIterator<bool> for Foo {
+ fn from_iter<T: IntoIterator<Item = bool>>(_: T) -> Self {
+ todo!()
+ }
+}
+
+impl<'a> FromIterator<&'a bool> for Foo {
+ fn from_iter<T: IntoIterator<Item = &'a bool>>(iter: T) -> Self {
+ <Self as FromIterator<bool>>::from_iter(iter.into_iter().copied())
+ }
+}
+
+fn main() {
+ let iter_expr = std::iter::repeat(5).take(5);
+ let _ = Vec::from_iter(iter_expr);
+
+ let _ = HashMap::<usize, &i8>::from_iter(vec![5, 5, 5, 5].iter().enumerate());
+
+ Vec::from_iter(vec![42u32]);
+
+ let a = vec![0, 1, 2];
+ assert_eq!(a, Vec::from_iter(0..3));
+ assert_eq!(a, Vec::<i32>::from_iter(0..3));
+
+ let mut b = VecDeque::from_iter(0..3);
+ b.push_back(4);
+
+ let mut b = VecDeque::<i32>::from_iter(0..3);
+ b.push_back(4);
+
+ {
+ use std::collections;
+ let mut b = collections::VecDeque::<i32>::from_iter(0..3);
+ b.push_back(4);
+ }
+
+ let values = [(0, 'a'), (1, 'b'), (2, 'c'), (3, 'd')];
+ let bm = BTreeMap::from_iter(values.iter().cloned());
+ let mut bar = BTreeMap::from_iter(bm.range(0..2));
+ bar.insert(&4, &'e');
+
+ let mut bts = BTreeSet::from_iter(0..3);
+ bts.insert(2);
+ {
+ use std::collections;
+ let _ = collections::BTreeSet::from_iter(0..3);
+ let _ = collections::BTreeSet::<u32>::from_iter(0..3);
+ }
+
+ for _i in Vec::from_iter([1, 2, 3].iter()) {}
+ for _i in Vec::<&i32>::from_iter([1, 2, 3].iter()) {}
+}
diff --git a/src/tools/clippy/tests/ui/from_iter_instead_of_collect.stderr b/src/tools/clippy/tests/ui/from_iter_instead_of_collect.stderr
new file mode 100644
index 000000000..8aa3c3c01
--- /dev/null
+++ b/src/tools/clippy/tests/ui/from_iter_instead_of_collect.stderr
@@ -0,0 +1,94 @@
+error: usage of `FromIterator::from_iter`
+ --> $DIR/from_iter_instead_of_collect.rs:18:9
+ |
+LL | <Self as FromIterator<bool>>::from_iter(iter.into_iter().copied())
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `iter.into_iter().copied().collect::<Self>()`
+ |
+ = note: `-D clippy::from-iter-instead-of-collect` implied by `-D warnings`
+
+error: usage of `FromIterator::from_iter`
+ --> $DIR/from_iter_instead_of_collect.rs:24:13
+ |
+LL | let _ = Vec::from_iter(iter_expr);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `iter_expr.collect::<Vec<_>>()`
+
+error: usage of `FromIterator::from_iter`
+ --> $DIR/from_iter_instead_of_collect.rs:26:13
+ |
+LL | let _ = HashMap::<usize, &i8>::from_iter(vec![5, 5, 5, 5].iter().enumerate());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `vec![5, 5, 5, 5].iter().enumerate().collect::<HashMap<usize, &i8>>()`
+
+error: usage of `FromIterator::from_iter`
+ --> $DIR/from_iter_instead_of_collect.rs:31:19
+ |
+LL | assert_eq!(a, Vec::from_iter(0..3));
+ | ^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `(0..3).collect::<Vec<_>>()`
+
+error: usage of `FromIterator::from_iter`
+ --> $DIR/from_iter_instead_of_collect.rs:32:19
+ |
+LL | assert_eq!(a, Vec::<i32>::from_iter(0..3));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `(0..3).collect::<Vec<i32>>()`
+
+error: usage of `FromIterator::from_iter`
+ --> $DIR/from_iter_instead_of_collect.rs:34:17
+ |
+LL | let mut b = VecDeque::from_iter(0..3);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `(0..3).collect::<VecDeque<_>>()`
+
+error: usage of `FromIterator::from_iter`
+ --> $DIR/from_iter_instead_of_collect.rs:37:17
+ |
+LL | let mut b = VecDeque::<i32>::from_iter(0..3);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `(0..3).collect::<VecDeque<i32>>()`
+
+error: usage of `FromIterator::from_iter`
+ --> $DIR/from_iter_instead_of_collect.rs:42:21
+ |
+LL | let mut b = collections::VecDeque::<i32>::from_iter(0..3);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `(0..3).collect::<collections::VecDeque<i32>>()`
+
+error: usage of `FromIterator::from_iter`
+ --> $DIR/from_iter_instead_of_collect.rs:47:14
+ |
+LL | let bm = BTreeMap::from_iter(values.iter().cloned());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `values.iter().cloned().collect::<BTreeMap<_, _>>()`
+
+error: usage of `FromIterator::from_iter`
+ --> $DIR/from_iter_instead_of_collect.rs:48:19
+ |
+LL | let mut bar = BTreeMap::from_iter(bm.range(0..2));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `bm.range(0..2).collect::<BTreeMap<_, _>>()`
+
+error: usage of `FromIterator::from_iter`
+ --> $DIR/from_iter_instead_of_collect.rs:51:19
+ |
+LL | let mut bts = BTreeSet::from_iter(0..3);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `(0..3).collect::<BTreeSet<_>>()`
+
+error: usage of `FromIterator::from_iter`
+ --> $DIR/from_iter_instead_of_collect.rs:55:17
+ |
+LL | let _ = collections::BTreeSet::from_iter(0..3);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `(0..3).collect::<collections::BTreeSet<_>>()`
+
+error: usage of `FromIterator::from_iter`
+ --> $DIR/from_iter_instead_of_collect.rs:56:17
+ |
+LL | let _ = collections::BTreeSet::<u32>::from_iter(0..3);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `(0..3).collect::<collections::BTreeSet<u32>>()`
+
+error: usage of `FromIterator::from_iter`
+ --> $DIR/from_iter_instead_of_collect.rs:59:15
+ |
+LL | for _i in Vec::from_iter([1, 2, 3].iter()) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `[1, 2, 3].iter().collect::<Vec<_>>()`
+
+error: usage of `FromIterator::from_iter`
+ --> $DIR/from_iter_instead_of_collect.rs:60:15
+ |
+LL | for _i in Vec::<&i32>::from_iter([1, 2, 3].iter()) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `[1, 2, 3].iter().collect::<Vec<&i32>>()`
+
+error: aborting due to 15 previous errors
+
diff --git a/src/tools/clippy/tests/ui/from_over_into.rs b/src/tools/clippy/tests/ui/from_over_into.rs
new file mode 100644
index 000000000..292d0924f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/from_over_into.rs
@@ -0,0 +1,21 @@
+#![warn(clippy::from_over_into)]
+
+// this should throw an error
+struct StringWrapper(String);
+
+impl Into<StringWrapper> for String {
+ fn into(self) -> StringWrapper {
+ StringWrapper(self)
+ }
+}
+
+// this is fine
+struct A(String);
+
+impl From<String> for A {
+ fn from(s: String) -> A {
+ A(s)
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/from_over_into.stderr b/src/tools/clippy/tests/ui/from_over_into.stderr
new file mode 100644
index 000000000..2951e6bda
--- /dev/null
+++ b/src/tools/clippy/tests/ui/from_over_into.stderr
@@ -0,0 +1,11 @@
+error: an implementation of `From` is preferred since it gives you `Into<_>` for free where the reverse isn't true
+ --> $DIR/from_over_into.rs:6:1
+ |
+LL | impl Into<StringWrapper> for String {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::from-over-into` implied by `-D warnings`
+ = help: consider to implement `From<std::string::String>` instead
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/from_str_radix_10.rs b/src/tools/clippy/tests/ui/from_str_radix_10.rs
new file mode 100644
index 000000000..2f2ea0484
--- /dev/null
+++ b/src/tools/clippy/tests/ui/from_str_radix_10.rs
@@ -0,0 +1,52 @@
+#![warn(clippy::from_str_radix_10)]
+
+mod some_mod {
+ // fake function that shouldn't trigger the lint
+ pub fn from_str_radix(_: &str, _: u32) -> Result<(), std::num::ParseIntError> {
+ unimplemented!()
+ }
+}
+
+// fake function that shouldn't trigger the lint
+fn from_str_radix(_: &str, _: u32) -> Result<(), std::num::ParseIntError> {
+ unimplemented!()
+}
+
+// to test parenthesis addition
+struct Test;
+
+impl std::ops::Add<Test> for Test {
+ type Output = &'static str;
+
+ fn add(self, _: Self) -> Self::Output {
+ "304"
+ }
+}
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ // all of these should trigger the lint
+ u32::from_str_radix("30", 10)?;
+ i64::from_str_radix("24", 10)?;
+ isize::from_str_radix("100", 10)?;
+ u8::from_str_radix("7", 10)?;
+ u16::from_str_radix(&("10".to_owned() + "5"), 10)?;
+ i128::from_str_radix(Test + Test, 10)?;
+
+ let string = "300";
+ i32::from_str_radix(string, 10)?;
+
+ let stringier = "400".to_string();
+ i32::from_str_radix(&stringier, 10)?;
+
+ // none of these should trigger the lint
+ u16::from_str_radix("20", 3)?;
+ i32::from_str_radix("45", 12)?;
+ usize::from_str_radix("10", 16)?;
+ i128::from_str_radix("10", 13)?;
+ some_mod::from_str_radix("50", 10)?;
+ some_mod::from_str_radix("50", 6)?;
+ from_str_radix("50", 10)?;
+ from_str_radix("50", 6)?;
+
+ Ok(())
+}
diff --git a/src/tools/clippy/tests/ui/from_str_radix_10.stderr b/src/tools/clippy/tests/ui/from_str_radix_10.stderr
new file mode 100644
index 000000000..da5c16f8d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/from_str_radix_10.stderr
@@ -0,0 +1,52 @@
+error: this call to `from_str_radix` can be replaced with a call to `str::parse`
+ --> $DIR/from_str_radix_10.rs:28:5
+ |
+LL | u32::from_str_radix("30", 10)?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"30".parse::<u32>()`
+ |
+ = note: `-D clippy::from-str-radix-10` implied by `-D warnings`
+
+error: this call to `from_str_radix` can be replaced with a call to `str::parse`
+ --> $DIR/from_str_radix_10.rs:29:5
+ |
+LL | i64::from_str_radix("24", 10)?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"24".parse::<i64>()`
+
+error: this call to `from_str_radix` can be replaced with a call to `str::parse`
+ --> $DIR/from_str_radix_10.rs:30:5
+ |
+LL | isize::from_str_radix("100", 10)?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"100".parse::<isize>()`
+
+error: this call to `from_str_radix` can be replaced with a call to `str::parse`
+ --> $DIR/from_str_radix_10.rs:31:5
+ |
+LL | u8::from_str_radix("7", 10)?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"7".parse::<u8>()`
+
+error: this call to `from_str_radix` can be replaced with a call to `str::parse`
+ --> $DIR/from_str_radix_10.rs:32:5
+ |
+LL | u16::from_str_radix(&("10".to_owned() + "5"), 10)?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `("10".to_owned() + "5").parse::<u16>()`
+
+error: this call to `from_str_radix` can be replaced with a call to `str::parse`
+ --> $DIR/from_str_radix_10.rs:33:5
+ |
+LL | i128::from_str_radix(Test + Test, 10)?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(Test + Test).parse::<i128>()`
+
+error: this call to `from_str_radix` can be replaced with a call to `str::parse`
+ --> $DIR/from_str_radix_10.rs:36:5
+ |
+LL | i32::from_str_radix(string, 10)?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `string.parse::<i32>()`
+
+error: this call to `from_str_radix` can be replaced with a call to `str::parse`
+ --> $DIR/from_str_radix_10.rs:39:5
+ |
+LL | i32::from_str_radix(&stringier, 10)?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `stringier.parse::<i32>()`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/functions.rs b/src/tools/clippy/tests/ui/functions.rs
new file mode 100644
index 000000000..5521870ea
--- /dev/null
+++ b/src/tools/clippy/tests/ui/functions.rs
@@ -0,0 +1,112 @@
+#![warn(clippy::all)]
+#![allow(dead_code)]
+#![allow(unused_unsafe, clippy::missing_safety_doc)]
+
+// TOO_MANY_ARGUMENTS
+fn good(_one: u32, _two: u32, _three: &str, _four: bool, _five: f32, _six: f32, _seven: bool) {}
+
+fn bad(_one: u32, _two: u32, _three: &str, _four: bool, _five: f32, _six: f32, _seven: bool, _eight: ()) {}
+
+#[rustfmt::skip]
+fn bad_multiline(
+ one: u32,
+ two: u32,
+ three: &str,
+ four: bool,
+ five: f32,
+ six: f32,
+ seven: bool,
+ eight: ()
+) {
+ let _one = one;
+ let _two = two;
+ let _three = three;
+ let _four = four;
+ let _five = five;
+ let _six = six;
+ let _seven = seven;
+}
+
+// don't lint extern fns
+extern "C" fn extern_fn(
+ _one: u32,
+ _two: u32,
+ _three: *const u8,
+ _four: bool,
+ _five: f32,
+ _six: f32,
+ _seven: bool,
+ _eight: *const std::ffi::c_void,
+) {
+}
+
+pub trait Foo {
+ fn good(_one: u32, _two: u32, _three: &str, _four: bool, _five: f32, _six: f32, _seven: bool);
+ fn bad(_one: u32, _two: u32, _three: &str, _four: bool, _five: f32, _six: f32, _seven: bool, _eight: ());
+
+ fn ptr(p: *const u8);
+}
+
+pub struct Bar;
+
+impl Bar {
+ fn good_method(_one: u32, _two: u32, _three: &str, _four: bool, _five: f32, _six: f32, _seven: bool) {}
+ fn bad_method(_one: u32, _two: u32, _three: &str, _four: bool, _five: f32, _six: f32, _seven: bool, _eight: ()) {}
+}
+
+// ok, we don’t want to warn implementations
+impl Foo for Bar {
+ fn good(_one: u32, _two: u32, _three: &str, _four: bool, _five: f32, _six: f32, _seven: bool) {}
+ fn bad(_one: u32, _two: u32, _three: &str, _four: bool, _five: f32, _six: f32, _seven: bool, _eight: ()) {}
+
+ fn ptr(p: *const u8) {
+ println!("{}", unsafe { *p });
+ println!("{:?}", unsafe { p.as_ref() });
+ unsafe { std::ptr::read(p) };
+ }
+}
+
+// NOT_UNSAFE_PTR_ARG_DEREF
+
+fn private(p: *const u8) {
+ println!("{}", unsafe { *p });
+}
+
+pub fn public(p: *const u8) {
+ println!("{}", unsafe { *p });
+ println!("{:?}", unsafe { p.as_ref() });
+ unsafe { std::ptr::read(p) };
+}
+
+type Alias = *const u8;
+
+pub fn type_alias(p: Alias) {
+ println!("{}", unsafe { *p });
+ println!("{:?}", unsafe { p.as_ref() });
+ unsafe { std::ptr::read(p) };
+}
+
+impl Bar {
+ fn private(self, p: *const u8) {
+ println!("{}", unsafe { *p });
+ }
+
+ pub fn public(self, p: *const u8) {
+ println!("{}", unsafe { *p });
+ println!("{:?}", unsafe { p.as_ref() });
+ unsafe { std::ptr::read(p) };
+ }
+
+ pub fn public_ok(self, p: *const u8) {
+ if !p.is_null() {
+ println!("{:p}", p);
+ }
+ }
+
+ pub unsafe fn public_unsafe(self, p: *const u8) {
+ println!("{}", unsafe { *p });
+ println!("{:?}", unsafe { p.as_ref() });
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/functions.stderr b/src/tools/clippy/tests/ui/functions.stderr
new file mode 100644
index 000000000..8ebd4997f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/functions.stderr
@@ -0,0 +1,108 @@
+error: this function has too many arguments (8/7)
+ --> $DIR/functions.rs:8:1
+ |
+LL | fn bad(_one: u32, _two: u32, _three: &str, _four: bool, _five: f32, _six: f32, _seven: bool, _eight: ()) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::too-many-arguments` implied by `-D warnings`
+
+error: this function has too many arguments (8/7)
+ --> $DIR/functions.rs:11:1
+ |
+LL | / fn bad_multiline(
+LL | | one: u32,
+LL | | two: u32,
+LL | | three: &str,
+... |
+LL | | eight: ()
+LL | | ) {
+ | |__^
+
+error: this function has too many arguments (8/7)
+ --> $DIR/functions.rs:45:5
+ |
+LL | fn bad(_one: u32, _two: u32, _three: &str, _four: bool, _five: f32, _six: f32, _seven: bool, _eight: ());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this function has too many arguments (8/7)
+ --> $DIR/functions.rs:54:5
+ |
+LL | fn bad_method(_one: u32, _two: u32, _three: &str, _four: bool, _five: f32, _six: f32, _seven: bool, _eight: ()) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this public function might dereference a raw pointer but is not marked `unsafe`
+ --> $DIR/functions.rs:63:34
+ |
+LL | println!("{}", unsafe { *p });
+ | ^
+ |
+ = note: `-D clippy::not-unsafe-ptr-arg-deref` implied by `-D warnings`
+
+error: this public function might dereference a raw pointer but is not marked `unsafe`
+ --> $DIR/functions.rs:64:35
+ |
+LL | println!("{:?}", unsafe { p.as_ref() });
+ | ^
+
+error: this public function might dereference a raw pointer but is not marked `unsafe`
+ --> $DIR/functions.rs:65:33
+ |
+LL | unsafe { std::ptr::read(p) };
+ | ^
+
+error: this public function might dereference a raw pointer but is not marked `unsafe`
+ --> $DIR/functions.rs:76:30
+ |
+LL | println!("{}", unsafe { *p });
+ | ^
+
+error: this public function might dereference a raw pointer but is not marked `unsafe`
+ --> $DIR/functions.rs:77:31
+ |
+LL | println!("{:?}", unsafe { p.as_ref() });
+ | ^
+
+error: this public function might dereference a raw pointer but is not marked `unsafe`
+ --> $DIR/functions.rs:78:29
+ |
+LL | unsafe { std::ptr::read(p) };
+ | ^
+
+error: this public function might dereference a raw pointer but is not marked `unsafe`
+ --> $DIR/functions.rs:84:30
+ |
+LL | println!("{}", unsafe { *p });
+ | ^
+
+error: this public function might dereference a raw pointer but is not marked `unsafe`
+ --> $DIR/functions.rs:85:31
+ |
+LL | println!("{:?}", unsafe { p.as_ref() });
+ | ^
+
+error: this public function might dereference a raw pointer but is not marked `unsafe`
+ --> $DIR/functions.rs:86:29
+ |
+LL | unsafe { std::ptr::read(p) };
+ | ^
+
+error: this public function might dereference a raw pointer but is not marked `unsafe`
+ --> $DIR/functions.rs:95:34
+ |
+LL | println!("{}", unsafe { *p });
+ | ^
+
+error: this public function might dereference a raw pointer but is not marked `unsafe`
+ --> $DIR/functions.rs:96:35
+ |
+LL | println!("{:?}", unsafe { p.as_ref() });
+ | ^
+
+error: this public function might dereference a raw pointer but is not marked `unsafe`
+ --> $DIR/functions.rs:97:33
+ |
+LL | unsafe { std::ptr::read(p) };
+ | ^
+
+error: aborting due to 16 previous errors
+
diff --git a/src/tools/clippy/tests/ui/functions_maxlines.rs b/src/tools/clippy/tests/ui/functions_maxlines.rs
new file mode 100644
index 000000000..5e1ee55e0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/functions_maxlines.rs
@@ -0,0 +1,163 @@
+#![warn(clippy::too_many_lines)]
+
+fn good_lines() {
+ /* println!("This is good."); */
+ // println!("This is good.");
+ /* */ // println!("This is good.");
+ /* */ // println!("This is good.");
+ /* */ // println!("This is good.");
+ /* */ // println!("This is good.");
+ /* println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good."); */
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+ println!("This is good.");
+}
+
+fn bad_lines() {
+ println!("Dont get confused by braces: {{}}");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+ println!("This is bad.");
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/functions_maxlines.stderr b/src/tools/clippy/tests/ui/functions_maxlines.stderr
new file mode 100644
index 000000000..dc6c8ba2f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/functions_maxlines.stderr
@@ -0,0 +1,16 @@
+error: this function has too many lines (102/100)
+ --> $DIR/functions_maxlines.rs:58:1
+ |
+LL | / fn bad_lines() {
+LL | | println!("Dont get confused by braces: {{}}");
+LL | | println!("This is bad.");
+LL | | println!("This is bad.");
+... |
+LL | | println!("This is bad.");
+LL | | }
+ | |_^
+ |
+ = note: `-D clippy::too-many-lines` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/future_not_send.rs b/src/tools/clippy/tests/ui/future_not_send.rs
new file mode 100644
index 000000000..858036692
--- /dev/null
+++ b/src/tools/clippy/tests/ui/future_not_send.rs
@@ -0,0 +1,79 @@
+#![warn(clippy::future_not_send)]
+
+use std::cell::Cell;
+use std::rc::Rc;
+use std::sync::Arc;
+
+async fn private_future(rc: Rc<[u8]>, cell: &Cell<usize>) -> bool {
+ async { true }.await
+}
+
+pub async fn public_future(rc: Rc<[u8]>) {
+ async { true }.await;
+}
+
+pub async fn public_send(arc: Arc<[u8]>) -> bool {
+ async { false }.await
+}
+
+async fn private_future2(rc: Rc<[u8]>, cell: &Cell<usize>) -> bool {
+ true
+}
+
+pub async fn public_future2(rc: Rc<[u8]>) {}
+
+pub async fn public_send2(arc: Arc<[u8]>) -> bool {
+ false
+}
+
+struct Dummy {
+ rc: Rc<[u8]>,
+}
+
+impl Dummy {
+ async fn private_future(&self) -> usize {
+ async { true }.await;
+ self.rc.len()
+ }
+
+ pub async fn public_future(&self) {
+ self.private_future().await;
+ }
+
+ #[allow(clippy::manual_async_fn)]
+ pub fn public_send(&self) -> impl std::future::Future<Output = bool> {
+ async { false }
+ }
+}
+
+async fn generic_future<T>(t: T) -> T
+where
+ T: Send,
+{
+ let rt = &t;
+ async { true }.await;
+ t
+}
+
+async fn generic_future_send<T>(t: T)
+where
+ T: Send,
+{
+ async { true }.await;
+}
+
+async fn unclear_future<T>(t: T) {}
+
+fn main() {
+ let rc = Rc::new([1, 2, 3]);
+ private_future(rc.clone(), &Cell::new(42));
+ public_future(rc.clone());
+ let arc = Arc::new([4, 5, 6]);
+ public_send(arc);
+ generic_future(42);
+ generic_future_send(42);
+
+ let dummy = Dummy { rc };
+ dummy.public_future();
+ dummy.public_send();
+}
diff --git a/src/tools/clippy/tests/ui/future_not_send.stderr b/src/tools/clippy/tests/ui/future_not_send.stderr
new file mode 100644
index 000000000..a9f2ad36d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/future_not_send.stderr
@@ -0,0 +1,145 @@
+error: future cannot be sent between threads safely
+ --> $DIR/future_not_send.rs:7:62
+ |
+LL | async fn private_future(rc: Rc<[u8]>, cell: &Cell<usize>) -> bool {
+ | ^^^^ future returned by `private_future` is not `Send`
+ |
+ = note: `-D clippy::future-not-send` implied by `-D warnings`
+note: future is not `Send` as this value is used across an await
+ --> $DIR/future_not_send.rs:8:19
+ |
+LL | async fn private_future(rc: Rc<[u8]>, cell: &Cell<usize>) -> bool {
+ | -- has type `std::rc::Rc<[u8]>` which is not `Send`
+LL | async { true }.await
+ | ^^^^^^ await occurs here, with `rc` maybe used later
+LL | }
+ | - `rc` is later dropped here
+ = note: `std::rc::Rc<[u8]>` doesn't implement `std::marker::Send`
+note: future is not `Send` as this value is used across an await
+ --> $DIR/future_not_send.rs:8:19
+ |
+LL | async fn private_future(rc: Rc<[u8]>, cell: &Cell<usize>) -> bool {
+ | ---- has type `&std::cell::Cell<usize>` which is not `Send`
+LL | async { true }.await
+ | ^^^^^^ await occurs here, with `cell` maybe used later
+LL | }
+ | - `cell` is later dropped here
+ = note: `std::cell::Cell<usize>` doesn't implement `std::marker::Sync`
+
+error: future cannot be sent between threads safely
+ --> $DIR/future_not_send.rs:11:42
+ |
+LL | pub async fn public_future(rc: Rc<[u8]>) {
+ | ^ future returned by `public_future` is not `Send`
+ |
+note: future is not `Send` as this value is used across an await
+ --> $DIR/future_not_send.rs:12:19
+ |
+LL | pub async fn public_future(rc: Rc<[u8]>) {
+ | -- has type `std::rc::Rc<[u8]>` which is not `Send`
+LL | async { true }.await;
+ | ^^^^^^ await occurs here, with `rc` maybe used later
+LL | }
+ | - `rc` is later dropped here
+ = note: `std::rc::Rc<[u8]>` doesn't implement `std::marker::Send`
+
+error: future cannot be sent between threads safely
+ --> $DIR/future_not_send.rs:19:63
+ |
+LL | async fn private_future2(rc: Rc<[u8]>, cell: &Cell<usize>) -> bool {
+ | ^^^^ future returned by `private_future2` is not `Send`
+ |
+note: captured value is not `Send`
+ --> $DIR/future_not_send.rs:19:26
+ |
+LL | async fn private_future2(rc: Rc<[u8]>, cell: &Cell<usize>) -> bool {
+ | ^^ has type `std::rc::Rc<[u8]>` which is not `Send`
+ = note: `std::rc::Rc<[u8]>` doesn't implement `std::marker::Send`
+note: captured value is not `Send` because `&` references cannot be sent unless their referent is `Sync`
+ --> $DIR/future_not_send.rs:19:40
+ |
+LL | async fn private_future2(rc: Rc<[u8]>, cell: &Cell<usize>) -> bool {
+ | ^^^^ has type `&std::cell::Cell<usize>` which is not `Send`, because `std::cell::Cell<usize>` is not `Sync`
+ = note: `std::cell::Cell<usize>` doesn't implement `std::marker::Sync`
+
+error: future cannot be sent between threads safely
+ --> $DIR/future_not_send.rs:23:43
+ |
+LL | pub async fn public_future2(rc: Rc<[u8]>) {}
+ | ^ future returned by `public_future2` is not `Send`
+ |
+note: captured value is not `Send`
+ --> $DIR/future_not_send.rs:23:29
+ |
+LL | pub async fn public_future2(rc: Rc<[u8]>) {}
+ | ^^ has type `std::rc::Rc<[u8]>` which is not `Send`
+ = note: `std::rc::Rc<[u8]>` doesn't implement `std::marker::Send`
+
+error: future cannot be sent between threads safely
+ --> $DIR/future_not_send.rs:34:39
+ |
+LL | async fn private_future(&self) -> usize {
+ | ^^^^^ future returned by `private_future` is not `Send`
+ |
+note: future is not `Send` as this value is used across an await
+ --> $DIR/future_not_send.rs:35:23
+ |
+LL | async fn private_future(&self) -> usize {
+ | ----- has type `&Dummy` which is not `Send`
+LL | async { true }.await;
+ | ^^^^^^ await occurs here, with `&self` maybe used later
+LL | self.rc.len()
+LL | }
+ | - `&self` is later dropped here
+ = note: `std::rc::Rc<[u8]>` doesn't implement `std::marker::Sync`
+
+error: future cannot be sent between threads safely
+ --> $DIR/future_not_send.rs:39:39
+ |
+LL | pub async fn public_future(&self) {
+ | ^ future returned by `public_future` is not `Send`
+ |
+note: future is not `Send` as this value is used across an await
+ --> $DIR/future_not_send.rs:40:30
+ |
+LL | pub async fn public_future(&self) {
+ | ----- has type `&Dummy` which is not `Send`
+LL | self.private_future().await;
+ | ^^^^^^ await occurs here, with `&self` maybe used later
+LL | }
+ | - `&self` is later dropped here
+ = note: `std::rc::Rc<[u8]>` doesn't implement `std::marker::Sync`
+
+error: future cannot be sent between threads safely
+ --> $DIR/future_not_send.rs:49:37
+ |
+LL | async fn generic_future<T>(t: T) -> T
+ | ^ future returned by `generic_future` is not `Send`
+ |
+note: future is not `Send` as this value is used across an await
+ --> $DIR/future_not_send.rs:54:19
+ |
+LL | let rt = &t;
+ | -- has type `&T` which is not `Send`
+LL | async { true }.await;
+ | ^^^^^^ await occurs here, with `rt` maybe used later
+LL | t
+LL | }
+ | - `rt` is later dropped here
+ = note: `T` doesn't implement `std::marker::Sync`
+
+error: future cannot be sent between threads safely
+ --> $DIR/future_not_send.rs:65:34
+ |
+LL | async fn unclear_future<T>(t: T) {}
+ | ^ future returned by `unclear_future` is not `Send`
+ |
+note: captured value is not `Send`
+ --> $DIR/future_not_send.rs:65:28
+ |
+LL | async fn unclear_future<T>(t: T) {}
+ | ^ has type `T` which is not `Send`
+ = note: `T` doesn't implement `std::marker::Send`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/get_first.fixed b/src/tools/clippy/tests/ui/get_first.fixed
new file mode 100644
index 000000000..def58afa4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/get_first.fixed
@@ -0,0 +1,42 @@
+// run-rustfix
+#![warn(clippy::get_first)]
+use std::collections::BTreeMap;
+use std::collections::HashMap;
+use std::collections::VecDeque;
+
+struct Bar {
+ arr: [u32; 3],
+}
+
+impl Bar {
+ fn get(&self, pos: usize) -> Option<&u32> {
+ self.arr.get(pos)
+ }
+}
+
+fn main() {
+ let x = vec![2, 3, 5];
+ let _ = x.first(); // Use x.first()
+ let _ = x.get(1);
+ let _ = x[0];
+
+ let y = [2, 3, 5];
+ let _ = y.first(); // Use y.first()
+ let _ = y.get(1);
+ let _ = y[0];
+
+ let z = &[2, 3, 5];
+ let _ = z.first(); // Use z.first()
+ let _ = z.get(1);
+ let _ = z[0];
+
+ let vecdeque: VecDeque<_> = x.iter().cloned().collect();
+ let hashmap: HashMap<u8, char> = HashMap::from_iter(vec![(0, 'a'), (1, 'b')]);
+ let btreemap: BTreeMap<u8, char> = BTreeMap::from_iter(vec![(0, 'a'), (1, 'b')]);
+ let _ = vecdeque.get(0); // Do not lint, because VecDeque is not slice.
+ let _ = hashmap.get(&0); // Do not lint, because HashMap is not slice.
+ let _ = btreemap.get(&0); // Do not lint, because BTreeMap is not slice.
+
+ let bar = Bar { arr: [0, 1, 2] };
+ let _ = bar.get(0); // Do not lint, because Bar is struct.
+}
diff --git a/src/tools/clippy/tests/ui/get_first.rs b/src/tools/clippy/tests/ui/get_first.rs
new file mode 100644
index 000000000..85a381854
--- /dev/null
+++ b/src/tools/clippy/tests/ui/get_first.rs
@@ -0,0 +1,42 @@
+// run-rustfix
+#![warn(clippy::get_first)]
+use std::collections::BTreeMap;
+use std::collections::HashMap;
+use std::collections::VecDeque;
+
+struct Bar {
+ arr: [u32; 3],
+}
+
+impl Bar {
+ fn get(&self, pos: usize) -> Option<&u32> {
+ self.arr.get(pos)
+ }
+}
+
+fn main() {
+ let x = vec![2, 3, 5];
+ let _ = x.get(0); // Use x.first()
+ let _ = x.get(1);
+ let _ = x[0];
+
+ let y = [2, 3, 5];
+ let _ = y.get(0); // Use y.first()
+ let _ = y.get(1);
+ let _ = y[0];
+
+ let z = &[2, 3, 5];
+ let _ = z.get(0); // Use z.first()
+ let _ = z.get(1);
+ let _ = z[0];
+
+ let vecdeque: VecDeque<_> = x.iter().cloned().collect();
+ let hashmap: HashMap<u8, char> = HashMap::from_iter(vec![(0, 'a'), (1, 'b')]);
+ let btreemap: BTreeMap<u8, char> = BTreeMap::from_iter(vec![(0, 'a'), (1, 'b')]);
+ let _ = vecdeque.get(0); // Do not lint, because VecDeque is not slice.
+ let _ = hashmap.get(&0); // Do not lint, because HashMap is not slice.
+ let _ = btreemap.get(&0); // Do not lint, because BTreeMap is not slice.
+
+ let bar = Bar { arr: [0, 1, 2] };
+ let _ = bar.get(0); // Do not lint, because Bar is struct.
+}
diff --git a/src/tools/clippy/tests/ui/get_first.stderr b/src/tools/clippy/tests/ui/get_first.stderr
new file mode 100644
index 000000000..466beff9c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/get_first.stderr
@@ -0,0 +1,22 @@
+error: accessing first element with `x.get(0)`
+ --> $DIR/get_first.rs:19:13
+ |
+LL | let _ = x.get(0); // Use x.first()
+ | ^^^^^^^^ help: try: `x.first()`
+ |
+ = note: `-D clippy::get-first` implied by `-D warnings`
+
+error: accessing first element with `y.get(0)`
+ --> $DIR/get_first.rs:24:13
+ |
+LL | let _ = y.get(0); // Use y.first()
+ | ^^^^^^^^ help: try: `y.first()`
+
+error: accessing first element with `z.get(0)`
+ --> $DIR/get_first.rs:29:13
+ |
+LL | let _ = z.get(0); // Use z.first()
+ | ^^^^^^^^ help: try: `z.first()`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/get_last_with_len.fixed b/src/tools/clippy/tests/ui/get_last_with_len.fixed
new file mode 100644
index 000000000..1e90b3768
--- /dev/null
+++ b/src/tools/clippy/tests/ui/get_last_with_len.fixed
@@ -0,0 +1,49 @@
+// run-rustfix
+
+#![warn(clippy::get_last_with_len)]
+#![allow(unused)]
+
+use std::collections::VecDeque;
+
+fn dont_use_last() {
+ let x = vec![2, 3, 5];
+ let _ = x.last();
+}
+
+fn indexing_two_from_end() {
+ let x = vec![2, 3, 5];
+ let _ = x.get(x.len() - 2);
+}
+
+fn index_into_last() {
+ let x = vec![2, 3, 5];
+ let _ = x[x.len() - 1];
+}
+
+fn use_last_with_different_vec_length() {
+ let x = vec![2, 3, 5];
+ let y = vec!['a', 'b', 'c'];
+ let _ = x.get(y.len() - 1);
+}
+
+struct S {
+ field: Vec<usize>,
+}
+
+fn in_field(s: &S) {
+ let _ = s.field.last();
+}
+
+fn main() {
+ let slice = &[1, 2, 3];
+ let _ = slice.last();
+
+ let array = [4, 5, 6];
+ let _ = array.last();
+
+ let deq = VecDeque::from([7, 8, 9]);
+ let _ = deq.back();
+
+ let nested = [[1]];
+ let _ = nested[0].last();
+}
diff --git a/src/tools/clippy/tests/ui/get_last_with_len.rs b/src/tools/clippy/tests/ui/get_last_with_len.rs
new file mode 100644
index 000000000..d63a731bd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/get_last_with_len.rs
@@ -0,0 +1,49 @@
+// run-rustfix
+
+#![warn(clippy::get_last_with_len)]
+#![allow(unused)]
+
+use std::collections::VecDeque;
+
+fn dont_use_last() {
+ let x = vec![2, 3, 5];
+ let _ = x.get(x.len() - 1);
+}
+
+fn indexing_two_from_end() {
+ let x = vec![2, 3, 5];
+ let _ = x.get(x.len() - 2);
+}
+
+fn index_into_last() {
+ let x = vec![2, 3, 5];
+ let _ = x[x.len() - 1];
+}
+
+fn use_last_with_different_vec_length() {
+ let x = vec![2, 3, 5];
+ let y = vec!['a', 'b', 'c'];
+ let _ = x.get(y.len() - 1);
+}
+
+struct S {
+ field: Vec<usize>,
+}
+
+fn in_field(s: &S) {
+ let _ = s.field.get(s.field.len() - 1);
+}
+
+fn main() {
+ let slice = &[1, 2, 3];
+ let _ = slice.get(slice.len() - 1);
+
+ let array = [4, 5, 6];
+ let _ = array.get(array.len() - 1);
+
+ let deq = VecDeque::from([7, 8, 9]);
+ let _ = deq.get(deq.len() - 1);
+
+ let nested = [[1]];
+ let _ = nested[0].get(nested[0].len() - 1);
+}
diff --git a/src/tools/clippy/tests/ui/get_last_with_len.stderr b/src/tools/clippy/tests/ui/get_last_with_len.stderr
new file mode 100644
index 000000000..ac8dd6c2e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/get_last_with_len.stderr
@@ -0,0 +1,40 @@
+error: accessing last element with `x.get(x.len() - 1)`
+ --> $DIR/get_last_with_len.rs:10:13
+ |
+LL | let _ = x.get(x.len() - 1);
+ | ^^^^^^^^^^^^^^^^^^ help: try: `x.last()`
+ |
+ = note: `-D clippy::get-last-with-len` implied by `-D warnings`
+
+error: accessing last element with `s.field.get(s.field.len() - 1)`
+ --> $DIR/get_last_with_len.rs:34:13
+ |
+LL | let _ = s.field.get(s.field.len() - 1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.field.last()`
+
+error: accessing last element with `slice.get(slice.len() - 1)`
+ --> $DIR/get_last_with_len.rs:39:13
+ |
+LL | let _ = slice.get(slice.len() - 1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `slice.last()`
+
+error: accessing last element with `array.get(array.len() - 1)`
+ --> $DIR/get_last_with_len.rs:42:13
+ |
+LL | let _ = array.get(array.len() - 1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `array.last()`
+
+error: accessing last element with `deq.get(deq.len() - 1)`
+ --> $DIR/get_last_with_len.rs:45:13
+ |
+LL | let _ = deq.get(deq.len() - 1);
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `deq.back()`
+
+error: accessing last element with `nested[0].get(nested[0].len() - 1)`
+ --> $DIR/get_last_with_len.rs:48:13
+ |
+LL | let _ = nested[0].get(nested[0].len() - 1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `nested[0].last()`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/get_unwrap.fixed b/src/tools/clippy/tests/ui/get_unwrap.fixed
new file mode 100644
index 000000000..5827fc7d7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/get_unwrap.fixed
@@ -0,0 +1,67 @@
+// run-rustfix
+
+#![allow(unused_mut, clippy::from_iter_instead_of_collect, clippy::get_first)]
+#![warn(clippy::unwrap_used)]
+#![deny(clippy::get_unwrap)]
+
+use std::collections::BTreeMap;
+use std::collections::HashMap;
+use std::collections::VecDeque;
+
+struct GetFalsePositive {
+ arr: [u32; 3],
+}
+
+impl GetFalsePositive {
+ fn get(&self, pos: usize) -> Option<&u32> {
+ self.arr.get(pos)
+ }
+ fn get_mut(&mut self, pos: usize) -> Option<&mut u32> {
+ self.arr.get_mut(pos)
+ }
+}
+
+fn main() {
+ let mut boxed_slice: Box<[u8]> = Box::new([0, 1, 2, 3]);
+ let mut some_slice = &mut [0, 1, 2, 3];
+ let mut some_vec = vec![0, 1, 2, 3];
+ let mut some_vecdeque: VecDeque<_> = some_vec.iter().cloned().collect();
+ let mut some_hashmap: HashMap<u8, char> = HashMap::from_iter(vec![(1, 'a'), (2, 'b')]);
+ let mut some_btreemap: BTreeMap<u8, char> = BTreeMap::from_iter(vec![(1, 'a'), (2, 'b')]);
+ let mut false_positive = GetFalsePositive { arr: [0, 1, 2] };
+
+ {
+ // Test `get().unwrap()`
+ let _ = &boxed_slice[1];
+ let _ = &some_slice[0];
+ let _ = &some_vec[0];
+ let _ = &some_vecdeque[0];
+ let _ = &some_hashmap[&1];
+ let _ = &some_btreemap[&1];
+ #[allow(clippy::unwrap_used)]
+ let _ = false_positive.get(0).unwrap();
+ // Test with deref
+ let _: u8 = boxed_slice[1];
+ }
+
+ {
+ // Test `get_mut().unwrap()`
+ boxed_slice[0] = 1;
+ some_slice[0] = 1;
+ some_vec[0] = 1;
+ some_vecdeque[0] = 1;
+ // Check false positives
+ #[allow(clippy::unwrap_used)]
+ {
+ *some_hashmap.get_mut(&1).unwrap() = 'b';
+ *some_btreemap.get_mut(&1).unwrap() = 'b';
+ *false_positive.get_mut(0).unwrap() = 1;
+ }
+ }
+
+ {
+ // Test `get().unwrap().foo()` and `get_mut().unwrap().bar()`
+ let _ = some_vec[0..1].to_vec();
+ let _ = some_vec[0..1].to_vec();
+ }
+}
diff --git a/src/tools/clippy/tests/ui/get_unwrap.rs b/src/tools/clippy/tests/ui/get_unwrap.rs
new file mode 100644
index 000000000..a2a323c14
--- /dev/null
+++ b/src/tools/clippy/tests/ui/get_unwrap.rs
@@ -0,0 +1,67 @@
+// run-rustfix
+
+#![allow(unused_mut, clippy::from_iter_instead_of_collect, clippy::get_first)]
+#![warn(clippy::unwrap_used)]
+#![deny(clippy::get_unwrap)]
+
+use std::collections::BTreeMap;
+use std::collections::HashMap;
+use std::collections::VecDeque;
+
+struct GetFalsePositive {
+ arr: [u32; 3],
+}
+
+impl GetFalsePositive {
+ fn get(&self, pos: usize) -> Option<&u32> {
+ self.arr.get(pos)
+ }
+ fn get_mut(&mut self, pos: usize) -> Option<&mut u32> {
+ self.arr.get_mut(pos)
+ }
+}
+
+fn main() {
+ let mut boxed_slice: Box<[u8]> = Box::new([0, 1, 2, 3]);
+ let mut some_slice = &mut [0, 1, 2, 3];
+ let mut some_vec = vec![0, 1, 2, 3];
+ let mut some_vecdeque: VecDeque<_> = some_vec.iter().cloned().collect();
+ let mut some_hashmap: HashMap<u8, char> = HashMap::from_iter(vec![(1, 'a'), (2, 'b')]);
+ let mut some_btreemap: BTreeMap<u8, char> = BTreeMap::from_iter(vec![(1, 'a'), (2, 'b')]);
+ let mut false_positive = GetFalsePositive { arr: [0, 1, 2] };
+
+ {
+ // Test `get().unwrap()`
+ let _ = boxed_slice.get(1).unwrap();
+ let _ = some_slice.get(0).unwrap();
+ let _ = some_vec.get(0).unwrap();
+ let _ = some_vecdeque.get(0).unwrap();
+ let _ = some_hashmap.get(&1).unwrap();
+ let _ = some_btreemap.get(&1).unwrap();
+ #[allow(clippy::unwrap_used)]
+ let _ = false_positive.get(0).unwrap();
+ // Test with deref
+ let _: u8 = *boxed_slice.get(1).unwrap();
+ }
+
+ {
+ // Test `get_mut().unwrap()`
+ *boxed_slice.get_mut(0).unwrap() = 1;
+ *some_slice.get_mut(0).unwrap() = 1;
+ *some_vec.get_mut(0).unwrap() = 1;
+ *some_vecdeque.get_mut(0).unwrap() = 1;
+ // Check false positives
+ #[allow(clippy::unwrap_used)]
+ {
+ *some_hashmap.get_mut(&1).unwrap() = 'b';
+ *some_btreemap.get_mut(&1).unwrap() = 'b';
+ *false_positive.get_mut(0).unwrap() = 1;
+ }
+ }
+
+ {
+ // Test `get().unwrap().foo()` and `get_mut().unwrap().bar()`
+ let _ = some_vec.get(0..1).unwrap().to_vec();
+ let _ = some_vec.get_mut(0..1).unwrap().to_vec();
+ }
+}
diff --git a/src/tools/clippy/tests/ui/get_unwrap.stderr b/src/tools/clippy/tests/ui/get_unwrap.stderr
new file mode 100644
index 000000000..ea8fec527
--- /dev/null
+++ b/src/tools/clippy/tests/ui/get_unwrap.stderr
@@ -0,0 +1,191 @@
+error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise
+ --> $DIR/get_unwrap.rs:35:17
+ |
+LL | let _ = boxed_slice.get(1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&boxed_slice[1]`
+ |
+note: the lint level is defined here
+ --> $DIR/get_unwrap.rs:5:9
+ |
+LL | #![deny(clippy::get_unwrap)]
+ | ^^^^^^^^^^^^^^^^^^
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/get_unwrap.rs:35:17
+ |
+LL | let _ = boxed_slice.get(1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::unwrap-used` implied by `-D warnings`
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise
+ --> $DIR/get_unwrap.rs:36:17
+ |
+LL | let _ = some_slice.get(0).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_slice[0]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/get_unwrap.rs:36:17
+ |
+LL | let _ = some_slice.get(0).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get().unwrap()` on a Vec. Using `[]` is more clear and more concise
+ --> $DIR/get_unwrap.rs:37:17
+ |
+LL | let _ = some_vec.get(0).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_vec[0]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/get_unwrap.rs:37:17
+ |
+LL | let _ = some_vec.get(0).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get().unwrap()` on a VecDeque. Using `[]` is more clear and more concise
+ --> $DIR/get_unwrap.rs:38:17
+ |
+LL | let _ = some_vecdeque.get(0).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_vecdeque[0]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/get_unwrap.rs:38:17
+ |
+LL | let _ = some_vecdeque.get(0).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get().unwrap()` on a HashMap. Using `[]` is more clear and more concise
+ --> $DIR/get_unwrap.rs:39:17
+ |
+LL | let _ = some_hashmap.get(&1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_hashmap[&1]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/get_unwrap.rs:39:17
+ |
+LL | let _ = some_hashmap.get(&1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get().unwrap()` on a BTreeMap. Using `[]` is more clear and more concise
+ --> $DIR/get_unwrap.rs:40:17
+ |
+LL | let _ = some_btreemap.get(&1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_btreemap[&1]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/get_unwrap.rs:40:17
+ |
+LL | let _ = some_btreemap.get(&1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise
+ --> $DIR/get_unwrap.rs:44:21
+ |
+LL | let _: u8 = *boxed_slice.get(1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `boxed_slice[1]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/get_unwrap.rs:44:22
+ |
+LL | let _: u8 = *boxed_slice.get(1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get_mut().unwrap()` on a slice. Using `[]` is more clear and more concise
+ --> $DIR/get_unwrap.rs:49:9
+ |
+LL | *boxed_slice.get_mut(0).unwrap() = 1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `boxed_slice[0]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/get_unwrap.rs:49:10
+ |
+LL | *boxed_slice.get_mut(0).unwrap() = 1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get_mut().unwrap()` on a slice. Using `[]` is more clear and more concise
+ --> $DIR/get_unwrap.rs:50:9
+ |
+LL | *some_slice.get_mut(0).unwrap() = 1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_slice[0]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/get_unwrap.rs:50:10
+ |
+LL | *some_slice.get_mut(0).unwrap() = 1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get_mut().unwrap()` on a Vec. Using `[]` is more clear and more concise
+ --> $DIR/get_unwrap.rs:51:9
+ |
+LL | *some_vec.get_mut(0).unwrap() = 1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vec[0]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/get_unwrap.rs:51:10
+ |
+LL | *some_vec.get_mut(0).unwrap() = 1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get_mut().unwrap()` on a VecDeque. Using `[]` is more clear and more concise
+ --> $DIR/get_unwrap.rs:52:9
+ |
+LL | *some_vecdeque.get_mut(0).unwrap() = 1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vecdeque[0]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/get_unwrap.rs:52:10
+ |
+LL | *some_vecdeque.get_mut(0).unwrap() = 1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get().unwrap()` on a Vec. Using `[]` is more clear and more concise
+ --> $DIR/get_unwrap.rs:64:17
+ |
+LL | let _ = some_vec.get(0..1).unwrap().to_vec();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vec[0..1]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/get_unwrap.rs:64:17
+ |
+LL | let _ = some_vec.get(0..1).unwrap().to_vec();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: called `.get_mut().unwrap()` on a Vec. Using `[]` is more clear and more concise
+ --> $DIR/get_unwrap.rs:65:17
+ |
+LL | let _ = some_vec.get_mut(0..1).unwrap().to_vec();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vec[0..1]`
+
+error: used `unwrap()` on `an Option` value
+ --> $DIR/get_unwrap.rs:65:17
+ |
+LL | let _ = some_vec.get_mut(0..1).unwrap().to_vec();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: aborting due to 26 previous errors
+
diff --git a/src/tools/clippy/tests/ui/identity_op.fixed b/src/tools/clippy/tests/ui/identity_op.fixed
new file mode 100644
index 000000000..5f9cebe21
--- /dev/null
+++ b/src/tools/clippy/tests/ui/identity_op.fixed
@@ -0,0 +1,119 @@
+// run-rustfix
+
+#![warn(clippy::identity_op)]
+#![allow(
+ clippy::eq_op,
+ clippy::no_effect,
+ clippy::unnecessary_operation,
+ clippy::op_ref,
+ clippy::double_parens,
+ unused
+)]
+
+use std::fmt::Write as _;
+
+const ONE: i64 = 1;
+const NEG_ONE: i64 = -1;
+const ZERO: i64 = 0;
+
+struct A(String);
+
+impl std::ops::Shl<i32> for A {
+ type Output = A;
+ fn shl(mut self, other: i32) -> Self {
+ let _ = write!(self.0, "{}", other);
+ self
+ }
+}
+
+struct Length(u8);
+struct Meter;
+
+impl core::ops::Mul<Meter> for u8 {
+ type Output = Length;
+ fn mul(self, _: Meter) -> Length {
+ Length(self)
+ }
+}
+
+#[rustfmt::skip]
+fn main() {
+ let x = 0;
+
+ x;
+ x;
+ x + 1;
+ x;
+ 1 + x;
+ x - ZERO; //no error, as we skip lookups (for now)
+ x;
+ ((ZERO)) | x; //no error, as we skip lookups (for now)
+
+ x;
+ x;
+ x / ONE; //no error, as we skip lookups (for now)
+
+ x / 2; //no false positive
+
+ x & NEG_ONE; //no error, as we skip lookups (for now)
+ x;
+
+ let u: u8 = 0;
+ u;
+
+ 1 << 0; // no error, this case is allowed, see issue 3430
+ 42;
+ 1;
+ 42;
+ &x;
+ x;
+
+ let mut a = A("".into());
+ let b = a << 0; // no error: non-integer
+
+ 1 * Meter; // no error: non-integer
+
+ 2;
+ -2;
+ 2 + x;
+ -2 + x;
+ x + 1;
+ (x + 1) % 3; // no error
+ 4 % 3; // no error
+ 4 % -3; // no error
+
+ // See #8724
+ let a = 0;
+ let b = true;
+ (if b { 1 } else { 2 });
+ (if b { 1 } else { 2 }) + if b { 3 } else { 4 };
+ (match a { 0 => 10, _ => 20 });
+ (match a { 0 => 10, _ => 20 }) + match a { 0 => 30, _ => 40 };
+ (if b { 1 } else { 2 }) + match a { 0 => 30, _ => 40 };
+ (match a { 0 => 10, _ => 20 }) + if b { 3 } else { 4 };
+ (if b { 1 } else { 2 });
+
+ ({ a }) + 3;
+ ({ a } * 2);
+ (loop { let mut c = 0; if c == 10 { break c; } c += 1; }) + { a * 2 };
+
+ fn f(_: i32) {
+ todo!();
+ }
+ f(a + { 8 * 5 });
+ f(if b { 1 } else { 2 } + 3);
+ const _: i32 = { 2 * 4 } + 3;
+ const _: i32 = { 1 + 2 * 3 } + 3;
+
+ a as usize;
+ let _ = a as usize;
+ ({ a } as usize);
+
+ 2 * { a };
+ (({ a } + 4));
+ 1;
+}
+
+pub fn decide(a: bool, b: bool) -> u32 {
+ (if a { 1 } else { 2 }) + if b { 3 } else { 5 }
+}
diff --git a/src/tools/clippy/tests/ui/identity_op.rs b/src/tools/clippy/tests/ui/identity_op.rs
new file mode 100644
index 000000000..ca799c9cf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/identity_op.rs
@@ -0,0 +1,119 @@
+// run-rustfix
+
+#![warn(clippy::identity_op)]
+#![allow(
+ clippy::eq_op,
+ clippy::no_effect,
+ clippy::unnecessary_operation,
+ clippy::op_ref,
+ clippy::double_parens,
+ unused
+)]
+
+use std::fmt::Write as _;
+
+const ONE: i64 = 1;
+const NEG_ONE: i64 = -1;
+const ZERO: i64 = 0;
+
+struct A(String);
+
+impl std::ops::Shl<i32> for A {
+ type Output = A;
+ fn shl(mut self, other: i32) -> Self {
+ let _ = write!(self.0, "{}", other);
+ self
+ }
+}
+
+struct Length(u8);
+struct Meter;
+
+impl core::ops::Mul<Meter> for u8 {
+ type Output = Length;
+ fn mul(self, _: Meter) -> Length {
+ Length(self)
+ }
+}
+
+#[rustfmt::skip]
+fn main() {
+ let x = 0;
+
+ x + 0;
+ x + (1 - 1);
+ x + 1;
+ 0 + x;
+ 1 + x;
+ x - ZERO; //no error, as we skip lookups (for now)
+ x | (0);
+ ((ZERO)) | x; //no error, as we skip lookups (for now)
+
+ x * 1;
+ 1 * x;
+ x / ONE; //no error, as we skip lookups (for now)
+
+ x / 2; //no false positive
+
+ x & NEG_ONE; //no error, as we skip lookups (for now)
+ -1 & x;
+
+ let u: u8 = 0;
+ u & 255;
+
+ 1 << 0; // no error, this case is allowed, see issue 3430
+ 42 << 0;
+ 1 >> 0;
+ 42 >> 0;
+ &x >> 0;
+ x >> &0;
+
+ let mut a = A("".into());
+ let b = a << 0; // no error: non-integer
+
+ 1 * Meter; // no error: non-integer
+
+ 2 % 3;
+ -2 % 3;
+ 2 % -3 + x;
+ -2 % -3 + x;
+ x + 1 % 3;
+ (x + 1) % 3; // no error
+ 4 % 3; // no error
+ 4 % -3; // no error
+
+ // See #8724
+ let a = 0;
+ let b = true;
+ 0 + if b { 1 } else { 2 };
+ 0 + if b { 1 } else { 2 } + if b { 3 } else { 4 };
+ 0 + match a { 0 => 10, _ => 20 };
+ 0 + match a { 0 => 10, _ => 20 } + match a { 0 => 30, _ => 40 };
+ 0 + if b { 1 } else { 2 } + match a { 0 => 30, _ => 40 };
+ 0 + match a { 0 => 10, _ => 20 } + if b { 3 } else { 4 };
+ (if b { 1 } else { 2 }) + 0;
+
+ 0 + { a } + 3;
+ 0 + { a } * 2;
+ 0 + loop { let mut c = 0; if c == 10 { break c; } c += 1; } + { a * 2 };
+
+ fn f(_: i32) {
+ todo!();
+ }
+ f(1 * a + { 8 * 5 });
+ f(0 + if b { 1 } else { 2 } + 3);
+ const _: i32 = { 2 * 4 } + 0 + 3;
+ const _: i32 = 0 + { 1 + 2 * 3 } + 3;
+
+ 0 + a as usize;
+ let _ = 0 + a as usize;
+ 0 + { a } as usize;
+
+ 2 * (0 + { a });
+ 1 * ({ a } + 4);
+ 1 * 1;
+}
+
+pub fn decide(a: bool, b: bool) -> u32 {
+ 0 + if a { 1 } else { 2 } + if b { 3 } else { 5 }
+}
diff --git a/src/tools/clippy/tests/ui/identity_op.stderr b/src/tools/clippy/tests/ui/identity_op.stderr
new file mode 100644
index 000000000..1a104a20b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/identity_op.stderr
@@ -0,0 +1,238 @@
+error: this operation has no effect
+ --> $DIR/identity_op.rs:43:5
+ |
+LL | x + 0;
+ | ^^^^^ help: consider reducing it to: `x`
+ |
+ = note: `-D clippy::identity-op` implied by `-D warnings`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:44:5
+ |
+LL | x + (1 - 1);
+ | ^^^^^^^^^^^ help: consider reducing it to: `x`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:46:5
+ |
+LL | 0 + x;
+ | ^^^^^ help: consider reducing it to: `x`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:49:5
+ |
+LL | x | (0);
+ | ^^^^^^^ help: consider reducing it to: `x`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:52:5
+ |
+LL | x * 1;
+ | ^^^^^ help: consider reducing it to: `x`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:53:5
+ |
+LL | 1 * x;
+ | ^^^^^ help: consider reducing it to: `x`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:59:5
+ |
+LL | -1 & x;
+ | ^^^^^^ help: consider reducing it to: `x`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:62:5
+ |
+LL | u & 255;
+ | ^^^^^^^ help: consider reducing it to: `u`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:65:5
+ |
+LL | 42 << 0;
+ | ^^^^^^^ help: consider reducing it to: `42`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:66:5
+ |
+LL | 1 >> 0;
+ | ^^^^^^ help: consider reducing it to: `1`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:67:5
+ |
+LL | 42 >> 0;
+ | ^^^^^^^ help: consider reducing it to: `42`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:68:5
+ |
+LL | &x >> 0;
+ | ^^^^^^^ help: consider reducing it to: `&x`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:69:5
+ |
+LL | x >> &0;
+ | ^^^^^^^ help: consider reducing it to: `x`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:76:5
+ |
+LL | 2 % 3;
+ | ^^^^^ help: consider reducing it to: `2`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:77:5
+ |
+LL | -2 % 3;
+ | ^^^^^^ help: consider reducing it to: `-2`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:78:5
+ |
+LL | 2 % -3 + x;
+ | ^^^^^^ help: consider reducing it to: `2`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:79:5
+ |
+LL | -2 % -3 + x;
+ | ^^^^^^^ help: consider reducing it to: `-2`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:80:9
+ |
+LL | x + 1 % 3;
+ | ^^^^^ help: consider reducing it to: `1`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:88:5
+ |
+LL | 0 + if b { 1 } else { 2 };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider reducing it to: `(if b { 1 } else { 2 })`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:89:5
+ |
+LL | 0 + if b { 1 } else { 2 } + if b { 3 } else { 4 };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider reducing it to: `(if b { 1 } else { 2 })`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:90:5
+ |
+LL | 0 + match a { 0 => 10, _ => 20 };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider reducing it to: `(match a { 0 => 10, _ => 20 })`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:91:5
+ |
+LL | 0 + match a { 0 => 10, _ => 20 } + match a { 0 => 30, _ => 40 };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider reducing it to: `(match a { 0 => 10, _ => 20 })`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:92:5
+ |
+LL | 0 + if b { 1 } else { 2 } + match a { 0 => 30, _ => 40 };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider reducing it to: `(if b { 1 } else { 2 })`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:93:5
+ |
+LL | 0 + match a { 0 => 10, _ => 20 } + if b { 3 } else { 4 };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider reducing it to: `(match a { 0 => 10, _ => 20 })`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:94:5
+ |
+LL | (if b { 1 } else { 2 }) + 0;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider reducing it to: `(if b { 1 } else { 2 })`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:96:5
+ |
+LL | 0 + { a } + 3;
+ | ^^^^^^^^^ help: consider reducing it to: `({ a })`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:97:5
+ |
+LL | 0 + { a } * 2;
+ | ^^^^^^^^^^^^^ help: consider reducing it to: `({ a } * 2)`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:98:5
+ |
+LL | 0 + loop { let mut c = 0; if c == 10 { break c; } c += 1; } + { a * 2 };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider reducing it to: `(loop { let mut c = 0; if c == 10 { break c; } c += 1; })`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:103:7
+ |
+LL | f(1 * a + { 8 * 5 });
+ | ^^^^^ help: consider reducing it to: `a`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:104:7
+ |
+LL | f(0 + if b { 1 } else { 2 } + 3);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider reducing it to: `if b { 1 } else { 2 }`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:105:20
+ |
+LL | const _: i32 = { 2 * 4 } + 0 + 3;
+ | ^^^^^^^^^^^^^ help: consider reducing it to: `{ 2 * 4 }`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:106:20
+ |
+LL | const _: i32 = 0 + { 1 + 2 * 3 } + 3;
+ | ^^^^^^^^^^^^^^^^^ help: consider reducing it to: `{ 1 + 2 * 3 }`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:108:5
+ |
+LL | 0 + a as usize;
+ | ^^^^^^^^^^^^^^ help: consider reducing it to: `a as usize`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:109:13
+ |
+LL | let _ = 0 + a as usize;
+ | ^^^^^^^^^^^^^^ help: consider reducing it to: `a as usize`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:110:5
+ |
+LL | 0 + { a } as usize;
+ | ^^^^^^^^^^^^^^^^^^ help: consider reducing it to: `({ a } as usize)`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:112:9
+ |
+LL | 2 * (0 + { a });
+ | ^^^^^^^^^^^ help: consider reducing it to: `{ a }`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:113:5
+ |
+LL | 1 * ({ a } + 4);
+ | ^^^^^^^^^^^^^^^ help: consider reducing it to: `(({ a } + 4))`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:114:5
+ |
+LL | 1 * 1;
+ | ^^^^^ help: consider reducing it to: `1`
+
+error: this operation has no effect
+ --> $DIR/identity_op.rs:118:5
+ |
+LL | 0 + if a { 1 } else { 2 } + if b { 3 } else { 5 }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider reducing it to: `(if a { 1 } else { 2 })`
+
+error: aborting due to 39 previous errors
+
diff --git a/src/tools/clippy/tests/ui/if_let_mutex.rs b/src/tools/clippy/tests/ui/if_let_mutex.rs
new file mode 100644
index 000000000..6cbfafbb3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/if_let_mutex.rs
@@ -0,0 +1,42 @@
+#![warn(clippy::if_let_mutex)]
+
+use std::ops::Deref;
+use std::sync::Mutex;
+
+fn do_stuff<T>(_: T) {}
+
+fn if_let() {
+ let m = Mutex::new(1_u8);
+ if let Err(locked) = m.lock() {
+ do_stuff(locked);
+ } else {
+ let lock = m.lock().unwrap();
+ do_stuff(lock);
+ };
+}
+
+// This is the most common case as the above case is pretty
+// contrived.
+fn if_let_option() {
+ let m = Mutex::new(Some(0_u8));
+ if let Some(locked) = m.lock().unwrap().deref() {
+ do_stuff(locked);
+ } else {
+ let lock = m.lock().unwrap();
+ do_stuff(lock);
+ };
+}
+
+// When mutexes are different don't warn
+fn if_let_different_mutex() {
+ let m = Mutex::new(Some(0_u8));
+ let other = Mutex::new(None::<u8>);
+ if let Some(locked) = m.lock().unwrap().deref() {
+ do_stuff(locked);
+ } else {
+ let lock = other.lock().unwrap();
+ do_stuff(lock);
+ };
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/if_let_mutex.stderr b/src/tools/clippy/tests/ui/if_let_mutex.stderr
new file mode 100644
index 000000000..e9c4d9163
--- /dev/null
+++ b/src/tools/clippy/tests/ui/if_let_mutex.stderr
@@ -0,0 +1,29 @@
+error: calling `Mutex::lock` inside the scope of another `Mutex::lock` causes a deadlock
+ --> $DIR/if_let_mutex.rs:10:5
+ |
+LL | / if let Err(locked) = m.lock() {
+LL | | do_stuff(locked);
+LL | | } else {
+LL | | let lock = m.lock().unwrap();
+LL | | do_stuff(lock);
+LL | | };
+ | |_____^
+ |
+ = note: `-D clippy::if-let-mutex` implied by `-D warnings`
+ = help: move the lock call outside of the `if let ...` expression
+
+error: calling `Mutex::lock` inside the scope of another `Mutex::lock` causes a deadlock
+ --> $DIR/if_let_mutex.rs:22:5
+ |
+LL | / if let Some(locked) = m.lock().unwrap().deref() {
+LL | | do_stuff(locked);
+LL | | } else {
+LL | | let lock = m.lock().unwrap();
+LL | | do_stuff(lock);
+LL | | };
+ | |_____^
+ |
+ = help: move the lock call outside of the `if let ...` expression
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/if_not_else.rs b/src/tools/clippy/tests/ui/if_not_else.rs
new file mode 100644
index 000000000..b7012b43d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/if_not_else.rs
@@ -0,0 +1,29 @@
+#![warn(clippy::all)]
+#![warn(clippy::if_not_else)]
+
+fn foo() -> bool {
+ unimplemented!()
+}
+fn bla() -> bool {
+ unimplemented!()
+}
+
+fn main() {
+ if !bla() {
+ println!("Bugs");
+ } else {
+ println!("Bunny");
+ }
+ if 4 != 5 {
+ println!("Bugs");
+ } else {
+ println!("Bunny");
+ }
+ if !foo() {
+ println!("Foo");
+ } else if !bla() {
+ println!("Bugs");
+ } else {
+ println!("Bunny");
+ }
+}
diff --git a/src/tools/clippy/tests/ui/if_not_else.stderr b/src/tools/clippy/tests/ui/if_not_else.stderr
new file mode 100644
index 000000000..8c8cc44bb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/if_not_else.stderr
@@ -0,0 +1,27 @@
+error: unnecessary boolean `not` operation
+ --> $DIR/if_not_else.rs:12:5
+ |
+LL | / if !bla() {
+LL | | println!("Bugs");
+LL | | } else {
+LL | | println!("Bunny");
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::if-not-else` implied by `-D warnings`
+ = help: remove the `!` and swap the blocks of the `if`/`else`
+
+error: unnecessary `!=` operation
+ --> $DIR/if_not_else.rs:17:5
+ |
+LL | / if 4 != 5 {
+LL | | println!("Bugs");
+LL | | } else {
+LL | | println!("Bunny");
+LL | | }
+ | |_____^
+ |
+ = help: change to `==` and swap the blocks of the `if`/`else`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/if_same_then_else.rs b/src/tools/clippy/tests/ui/if_same_then_else.rs
new file mode 100644
index 000000000..2598c2ab4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/if_same_then_else.rs
@@ -0,0 +1,217 @@
+#![warn(clippy::if_same_then_else)]
+#![allow(
+ clippy::blacklisted_name,
+ clippy::eq_op,
+ clippy::never_loop,
+ clippy::no_effect,
+ clippy::unused_unit,
+ clippy::zero_divided_by_zero,
+ clippy::branches_sharing_code,
+ dead_code,
+ unreachable_code
+)]
+
+struct Foo {
+ bar: u8,
+}
+
+fn foo() -> bool {
+ unimplemented!()
+}
+
+fn if_same_then_else() {
+ if true {
+ Foo { bar: 42 };
+ 0..10;
+ ..;
+ 0..;
+ ..10;
+ 0..=10;
+ foo();
+ } else {
+ //~ ERROR same body as `if` block
+ Foo { bar: 42 };
+ 0..10;
+ ..;
+ 0..;
+ ..10;
+ 0..=10;
+ foo();
+ }
+
+ if true {
+ Foo { bar: 42 };
+ } else {
+ Foo { bar: 43 };
+ }
+
+ if true {
+ ();
+ } else {
+ ()
+ }
+
+ if true {
+ 0..10;
+ } else {
+ 0..=10;
+ }
+
+ if true {
+ foo();
+ foo();
+ } else {
+ foo();
+ }
+
+ let _ = if true {
+ 0.0
+ } else {
+ //~ ERROR same body as `if` block
+ 0.0
+ };
+
+ let _ = if true {
+ -0.0
+ } else {
+ //~ ERROR same body as `if` block
+ -0.0
+ };
+
+ let _ = if true { 0.0 } else { -0.0 };
+
+ // Different NaNs
+ let _ = if true { 0.0 / 0.0 } else { f32::NAN };
+
+ if true {
+ foo();
+ }
+
+ let _ = if true {
+ 42
+ } else {
+ //~ ERROR same body as `if` block
+ 42
+ };
+
+ if true {
+ let bar = if true { 42 } else { 43 };
+
+ while foo() {
+ break;
+ }
+ bar + 1;
+ } else {
+ //~ ERROR same body as `if` block
+ let bar = if true { 42 } else { 43 };
+
+ while foo() {
+ break;
+ }
+ bar + 1;
+ }
+
+ if true {
+ let _ = match 42 {
+ 42 => 1,
+ a if a > 0 => 2,
+ 10..=15 => 3,
+ _ => 4,
+ };
+ } else if false {
+ foo();
+ } else if foo() {
+ let _ = match 42 {
+ 42 => 1,
+ a if a > 0 => 2,
+ 10..=15 => 3,
+ _ => 4,
+ };
+ }
+}
+
+// Issue #2423. This was causing an ICE.
+fn func() {
+ if true {
+ f(&[0; 62]);
+ f(&[0; 4]);
+ f(&[0; 3]);
+ } else {
+ f(&[0; 62]);
+ f(&[0; 6]);
+ f(&[0; 6]);
+ }
+}
+
+fn f(val: &[u8]) {}
+
+mod issue_5698 {
+ fn mul_not_always_commutative(x: i32, y: i32) -> i32 {
+ if x == 42 {
+ x * y
+ } else if x == 21 {
+ y * x
+ } else {
+ 0
+ }
+ }
+}
+
+mod issue_8836 {
+ fn do_not_lint() {
+ if true {
+ todo!()
+ } else {
+ todo!()
+ }
+ if true {
+ todo!();
+ } else {
+ todo!();
+ }
+ if true {
+ unimplemented!()
+ } else {
+ unimplemented!()
+ }
+ if true {
+ unimplemented!();
+ } else {
+ unimplemented!();
+ }
+
+ if true {
+ println!("FOO");
+ todo!();
+ } else {
+ println!("FOO");
+ todo!();
+ }
+
+ if true {
+ println!("FOO");
+ unimplemented!();
+ } else {
+ println!("FOO");
+ unimplemented!();
+ }
+
+ if true {
+ println!("FOO");
+ todo!()
+ } else {
+ println!("FOO");
+ todo!()
+ }
+
+ if true {
+ println!("FOO");
+ unimplemented!()
+ } else {
+ println!("FOO");
+ unimplemented!()
+ }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/if_same_then_else.stderr b/src/tools/clippy/tests/ui/if_same_then_else.stderr
new file mode 100644
index 000000000..2cdf44248
--- /dev/null
+++ b/src/tools/clippy/tests/ui/if_same_then_else.stderr
@@ -0,0 +1,112 @@
+error: this `if` has identical blocks
+ --> $DIR/if_same_then_else.rs:23:13
+ |
+LL | if true {
+ | _____________^
+LL | | Foo { bar: 42 };
+LL | | 0..10;
+LL | | ..;
+... |
+LL | | foo();
+LL | | } else {
+ | |_____^
+ |
+ = note: `-D clippy::if-same-then-else` implied by `-D warnings`
+note: same as this
+ --> $DIR/if_same_then_else.rs:31:12
+ |
+LL | } else {
+ | ____________^
+LL | | //~ ERROR same body as `if` block
+LL | | Foo { bar: 42 };
+LL | | 0..10;
+... |
+LL | | foo();
+LL | | }
+ | |_____^
+
+error: this `if` has identical blocks
+ --> $DIR/if_same_then_else.rs:67:21
+ |
+LL | let _ = if true {
+ | _____________________^
+LL | | 0.0
+LL | | } else {
+ | |_____^
+ |
+note: same as this
+ --> $DIR/if_same_then_else.rs:69:12
+ |
+LL | } else {
+ | ____________^
+LL | | //~ ERROR same body as `if` block
+LL | | 0.0
+LL | | };
+ | |_____^
+
+error: this `if` has identical blocks
+ --> $DIR/if_same_then_else.rs:74:21
+ |
+LL | let _ = if true {
+ | _____________________^
+LL | | -0.0
+LL | | } else {
+ | |_____^
+ |
+note: same as this
+ --> $DIR/if_same_then_else.rs:76:12
+ |
+LL | } else {
+ | ____________^
+LL | | //~ ERROR same body as `if` block
+LL | | -0.0
+LL | | };
+ | |_____^
+
+error: this `if` has identical blocks
+ --> $DIR/if_same_then_else.rs:90:21
+ |
+LL | let _ = if true {
+ | _____________________^
+LL | | 42
+LL | | } else {
+ | |_____^
+ |
+note: same as this
+ --> $DIR/if_same_then_else.rs:92:12
+ |
+LL | } else {
+ | ____________^
+LL | | //~ ERROR same body as `if` block
+LL | | 42
+LL | | };
+ | |_____^
+
+error: this `if` has identical blocks
+ --> $DIR/if_same_then_else.rs:97:13
+ |
+LL | if true {
+ | _____________^
+LL | | let bar = if true { 42 } else { 43 };
+LL | |
+LL | | while foo() {
+... |
+LL | | bar + 1;
+LL | | } else {
+ | |_____^
+ |
+note: same as this
+ --> $DIR/if_same_then_else.rs:104:12
+ |
+LL | } else {
+ | ____________^
+LL | | //~ ERROR same body as `if` block
+LL | | let bar = if true { 42 } else { 43 };
+LL | |
+... |
+LL | | bar + 1;
+LL | | }
+ | |_____^
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/if_same_then_else2.rs b/src/tools/clippy/tests/ui/if_same_then_else2.rs
new file mode 100644
index 000000000..0016009a0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/if_same_then_else2.rs
@@ -0,0 +1,160 @@
+#![warn(clippy::if_same_then_else)]
+#![allow(
+ clippy::blacklisted_name,
+ clippy::collapsible_else_if,
+ clippy::equatable_if_let,
+ clippy::collapsible_if,
+ clippy::ifs_same_cond,
+ clippy::needless_return,
+ clippy::single_element_loop,
+ clippy::branches_sharing_code
+)]
+
+fn if_same_then_else2() -> Result<&'static str, ()> {
+ if true {
+ for _ in &[42] {
+ let foo: &Option<_> = &Some::<u8>(42);
+ if foo.is_some() {
+ break;
+ } else {
+ continue;
+ }
+ }
+ } else {
+ //~ ERROR same body as `if` block
+ for _ in &[42] {
+ let bar: &Option<_> = &Some::<u8>(42);
+ if bar.is_some() {
+ break;
+ } else {
+ continue;
+ }
+ }
+ }
+
+ if true {
+ if let Some(a) = Some(42) {}
+ } else {
+ //~ ERROR same body as `if` block
+ if let Some(a) = Some(42) {}
+ }
+
+ if true {
+ if let (1, .., 3) = (1, 2, 3) {}
+ } else {
+ //~ ERROR same body as `if` block
+ if let (1, .., 3) = (1, 2, 3) {}
+ }
+
+ if true {
+ if let (1, .., 3) = (1, 2, 3) {}
+ } else {
+ if let (.., 3) = (1, 2, 3) {}
+ }
+
+ if true {
+ if let (1, .., 3) = (1, 2, 3) {}
+ } else {
+ if let (.., 4) = (1, 2, 3) {}
+ }
+
+ if true {
+ if let (1, .., 3) = (1, 2, 3) {}
+ } else {
+ if let (.., 1, 3) = (1, 2, 3) {}
+ }
+
+ if true {
+ if let Some(42) = None {}
+ } else {
+ if let Option::Some(42) = None {}
+ }
+
+ if true {
+ if let Some(42) = None::<u8> {}
+ } else {
+ if let Some(42) = None {}
+ }
+
+ if true {
+ if let Some(42) = None::<u8> {}
+ } else {
+ if let Some(42) = None::<u32> {}
+ }
+
+ if true {
+ if let Some(a) = Some(42) {}
+ } else {
+ if let Some(a) = Some(43) {}
+ }
+
+ // Same NaNs
+ let _ = if true {
+ f32::NAN
+ } else {
+ //~ ERROR same body as `if` block
+ f32::NAN
+ };
+
+ if true {
+ Ok("foo")?;
+ } else {
+ //~ ERROR same body as `if` block
+ Ok("foo")?;
+ }
+
+ if true {
+ let foo = "";
+ return Ok(&foo[0..]);
+ } else if false {
+ let foo = "bar";
+ return Ok(&foo[0..]);
+ } else {
+ let foo = "";
+ return Ok(&foo[0..]);
+ }
+
+ if true {
+ let foo = "";
+ return Ok(&foo[0..]);
+ } else if false {
+ let foo = "bar";
+ return Ok(&foo[0..]);
+ } else if true {
+ let foo = "";
+ return Ok(&foo[0..]);
+ } else {
+ let foo = "";
+ return Ok(&foo[0..]);
+ }
+
+ // False positive `if_same_then_else`: `let (x, y)` vs. `let (y, x)`; see issue #3559.
+ if true {
+ let foo = "";
+ let (x, y) = (1, 2);
+ return Ok(&foo[x..y]);
+ } else {
+ let foo = "";
+ let (y, x) = (1, 2);
+ return Ok(&foo[x..y]);
+ }
+
+ // Issue #7579
+ let _ = if let Some(0) = None { 0 } else { 0 };
+
+ if true {
+ return Err(());
+ } else if let Some(0) = None {
+ return Err(());
+ }
+
+ let _ = if let Some(0) = None {
+ 0
+ } else if let Some(1) = None {
+ 0
+ } else {
+ 0
+ };
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/if_same_then_else2.stderr b/src/tools/clippy/tests/ui/if_same_then_else2.stderr
new file mode 100644
index 000000000..cac788f85
--- /dev/null
+++ b/src/tools/clippy/tests/ui/if_same_then_else2.stderr
@@ -0,0 +1,125 @@
+error: this `if` has identical blocks
+ --> $DIR/if_same_then_else2.rs:14:13
+ |
+LL | if true {
+ | _____________^
+LL | | for _ in &[42] {
+LL | | let foo: &Option<_> = &Some::<u8>(42);
+LL | | if foo.is_some() {
+... |
+LL | | }
+LL | | } else {
+ | |_____^
+ |
+ = note: `-D clippy::if-same-then-else` implied by `-D warnings`
+note: same as this
+ --> $DIR/if_same_then_else2.rs:23:12
+ |
+LL | } else {
+ | ____________^
+LL | | //~ ERROR same body as `if` block
+LL | | for _ in &[42] {
+LL | | let bar: &Option<_> = &Some::<u8>(42);
+... |
+LL | | }
+LL | | }
+ | |_____^
+
+error: this `if` has identical blocks
+ --> $DIR/if_same_then_else2.rs:35:13
+ |
+LL | if true {
+ | _____________^
+LL | | if let Some(a) = Some(42) {}
+LL | | } else {
+ | |_____^
+ |
+note: same as this
+ --> $DIR/if_same_then_else2.rs:37:12
+ |
+LL | } else {
+ | ____________^
+LL | | //~ ERROR same body as `if` block
+LL | | if let Some(a) = Some(42) {}
+LL | | }
+ | |_____^
+
+error: this `if` has identical blocks
+ --> $DIR/if_same_then_else2.rs:42:13
+ |
+LL | if true {
+ | _____________^
+LL | | if let (1, .., 3) = (1, 2, 3) {}
+LL | | } else {
+ | |_____^
+ |
+note: same as this
+ --> $DIR/if_same_then_else2.rs:44:12
+ |
+LL | } else {
+ | ____________^
+LL | | //~ ERROR same body as `if` block
+LL | | if let (1, .., 3) = (1, 2, 3) {}
+LL | | }
+ | |_____^
+
+error: this `if` has identical blocks
+ --> $DIR/if_same_then_else2.rs:92:21
+ |
+LL | let _ = if true {
+ | _____________________^
+LL | | f32::NAN
+LL | | } else {
+ | |_____^
+ |
+note: same as this
+ --> $DIR/if_same_then_else2.rs:94:12
+ |
+LL | } else {
+ | ____________^
+LL | | //~ ERROR same body as `if` block
+LL | | f32::NAN
+LL | | };
+ | |_____^
+
+error: this `if` has identical blocks
+ --> $DIR/if_same_then_else2.rs:99:13
+ |
+LL | if true {
+ | _____________^
+LL | | Ok("foo")?;
+LL | | } else {
+ | |_____^
+ |
+note: same as this
+ --> $DIR/if_same_then_else2.rs:101:12
+ |
+LL | } else {
+ | ____________^
+LL | | //~ ERROR same body as `if` block
+LL | | Ok("foo")?;
+LL | | }
+ | |_____^
+
+error: this `if` has identical blocks
+ --> $DIR/if_same_then_else2.rs:123:20
+ |
+LL | } else if true {
+ | ____________________^
+LL | | let foo = "";
+LL | | return Ok(&foo[0..]);
+LL | | } else {
+ | |_____^
+ |
+note: same as this
+ --> $DIR/if_same_then_else2.rs:126:12
+ |
+LL | } else {
+ | ____________^
+LL | | let foo = "";
+LL | | return Ok(&foo[0..]);
+LL | | }
+ | |_____^
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/if_then_some_else_none.rs b/src/tools/clippy/tests/ui/if_then_some_else_none.rs
new file mode 100644
index 000000000..3bc3a0395
--- /dev/null
+++ b/src/tools/clippy/tests/ui/if_then_some_else_none.rs
@@ -0,0 +1,115 @@
+#![warn(clippy::if_then_some_else_none)]
+#![feature(custom_inner_attributes)]
+
+fn main() {
+ // Should issue an error.
+ let _ = if foo() {
+ println!("true!");
+ Some("foo")
+ } else {
+ None
+ };
+
+ // Should issue an error when macros are used.
+ let _ = if matches!(true, true) {
+ println!("true!");
+ Some(matches!(true, false))
+ } else {
+ None
+ };
+
+ // Should issue an error. Binary expression `o < 32` should be parenthesized.
+ let x = Some(5);
+ let _ = x.and_then(|o| if o < 32 { Some(o) } else { None });
+
+ // Should issue an error. Unary expression `!x` should be parenthesized.
+ let x = true;
+ let _ = if !x { Some(0) } else { None };
+
+ // Should not issue an error since the `else` block has a statement besides `None`.
+ let _ = if foo() {
+ println!("true!");
+ Some("foo")
+ } else {
+ eprintln!("false...");
+ None
+ };
+
+ // Should not issue an error since there are more than 2 blocks in the if-else chain.
+ let _ = if foo() {
+ println!("foo true!");
+ Some("foo")
+ } else if bar() {
+ println!("bar true!");
+ Some("bar")
+ } else {
+ None
+ };
+
+ let _ = if foo() {
+ println!("foo true!");
+ Some("foo")
+ } else {
+ bar().then(|| {
+ println!("bar true!");
+ "bar"
+ })
+ };
+
+ // Should not issue an error since the `then` block has `None`, not `Some`.
+ let _ = if foo() { None } else { Some("foo is false") };
+
+ // Should not issue an error since the `else` block doesn't use `None` directly.
+ let _ = if foo() { Some("foo is true") } else { into_none() };
+
+ // Should not issue an error since the `then` block doesn't use `Some` directly.
+ let _ = if foo() { into_some("foo") } else { None };
+}
+
+fn _msrv_1_49() {
+ #![clippy::msrv = "1.49"]
+ // `bool::then` was stabilized in 1.50. Do not lint this
+ let _ = if foo() {
+ println!("true!");
+ Some(149)
+ } else {
+ None
+ };
+}
+
+fn _msrv_1_50() {
+ #![clippy::msrv = "1.50"]
+ let _ = if foo() {
+ println!("true!");
+ Some(150)
+ } else {
+ None
+ };
+}
+
+fn foo() -> bool {
+ unimplemented!()
+}
+
+fn bar() -> bool {
+ unimplemented!()
+}
+
+fn into_some<T>(v: T) -> Option<T> {
+ Some(v)
+}
+
+fn into_none<T>() -> Option<T> {
+ None
+}
+
+// Should not warn
+fn f(b: bool, v: Option<()>) -> Option<()> {
+ if b {
+ v?; // This is a potential early return, is not equivalent with `bool::then`
+
+ Some(())
+ } else {
+ None
+ }
+}
diff --git a/src/tools/clippy/tests/ui/if_then_some_else_none.stderr b/src/tools/clippy/tests/ui/if_then_some_else_none.stderr
new file mode 100644
index 000000000..8cb22d569
--- /dev/null
+++ b/src/tools/clippy/tests/ui/if_then_some_else_none.stderr
@@ -0,0 +1,61 @@
+error: this could be simplified with `bool::then`
+ --> $DIR/if_then_some_else_none.rs:6:13
+ |
+LL | let _ = if foo() {
+ | _____________^
+LL | | println!("true!");
+LL | | Some("foo")
+LL | | } else {
+LL | | None
+LL | | };
+ | |_____^
+ |
+ = note: `-D clippy::if-then-some-else-none` implied by `-D warnings`
+ = help: consider using `bool::then` like: `foo().then(|| { /* snippet */ "foo" })`
+
+error: this could be simplified with `bool::then`
+ --> $DIR/if_then_some_else_none.rs:14:13
+ |
+LL | let _ = if matches!(true, true) {
+ | _____________^
+LL | | println!("true!");
+LL | | Some(matches!(true, false))
+LL | | } else {
+LL | | None
+LL | | };
+ | |_____^
+ |
+ = help: consider using `bool::then` like: `matches!(true, true).then(|| { /* snippet */ matches!(true, false) })`
+
+error: this could be simplified with `bool::then`
+ --> $DIR/if_then_some_else_none.rs:23:28
+ |
+LL | let _ = x.and_then(|o| if o < 32 { Some(o) } else { None });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using `bool::then` like: `(o < 32).then(|| o)`
+
+error: this could be simplified with `bool::then`
+ --> $DIR/if_then_some_else_none.rs:27:13
+ |
+LL | let _ = if !x { Some(0) } else { None };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using `bool::then` like: `(!x).then(|| 0)`
+
+error: this could be simplified with `bool::then`
+ --> $DIR/if_then_some_else_none.rs:82:13
+ |
+LL | let _ = if foo() {
+ | _____________^
+LL | | println!("true!");
+LL | | Some(150)
+LL | | } else {
+LL | | None
+LL | | };
+ | |_____^
+ |
+ = help: consider using `bool::then` like: `foo().then(|| { /* snippet */ 150 })`
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/ifs_same_cond.rs b/src/tools/clippy/tests/ui/ifs_same_cond.rs
new file mode 100644
index 000000000..80e9839ff
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ifs_same_cond.rs
@@ -0,0 +1,46 @@
+#![warn(clippy::ifs_same_cond)]
+#![allow(clippy::if_same_then_else, clippy::comparison_chain)] // all empty blocks
+
+fn ifs_same_cond() {
+ let a = 0;
+ let b = false;
+
+ if b {
+ } else if b {
+ //~ ERROR ifs same condition
+ }
+
+ if a == 1 {
+ } else if a == 1 {
+ //~ ERROR ifs same condition
+ }
+
+ if 2 * a == 1 {
+ } else if 2 * a == 2 {
+ } else if 2 * a == 1 {
+ //~ ERROR ifs same condition
+ } else if a == 1 {
+ }
+
+ // See #659
+ if cfg!(feature = "feature1-659") {
+ 1
+ } else if cfg!(feature = "feature2-659") {
+ 2
+ } else {
+ 3
+ };
+
+ let mut v = vec![1];
+ if v.pop() == None {
+ // ok, functions
+ } else if v.pop() == None {
+ }
+
+ if v.len() == 42 {
+ // ok, functions
+ } else if v.len() == 42 {
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/ifs_same_cond.stderr b/src/tools/clippy/tests/ui/ifs_same_cond.stderr
new file mode 100644
index 000000000..0c8f49b86
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ifs_same_cond.stderr
@@ -0,0 +1,39 @@
+error: this `if` has the same condition as a previous `if`
+ --> $DIR/ifs_same_cond.rs:9:15
+ |
+LL | } else if b {
+ | ^
+ |
+ = note: `-D clippy::ifs-same-cond` implied by `-D warnings`
+note: same as this
+ --> $DIR/ifs_same_cond.rs:8:8
+ |
+LL | if b {
+ | ^
+
+error: this `if` has the same condition as a previous `if`
+ --> $DIR/ifs_same_cond.rs:14:15
+ |
+LL | } else if a == 1 {
+ | ^^^^^^
+ |
+note: same as this
+ --> $DIR/ifs_same_cond.rs:13:8
+ |
+LL | if a == 1 {
+ | ^^^^^^
+
+error: this `if` has the same condition as a previous `if`
+ --> $DIR/ifs_same_cond.rs:20:15
+ |
+LL | } else if 2 * a == 1 {
+ | ^^^^^^^^^^
+ |
+note: same as this
+ --> $DIR/ifs_same_cond.rs:18:8
+ |
+LL | if 2 * a == 1 {
+ | ^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/impl.rs b/src/tools/clippy/tests/ui/impl.rs
new file mode 100644
index 000000000..aea52a852
--- /dev/null
+++ b/src/tools/clippy/tests/ui/impl.rs
@@ -0,0 +1,67 @@
+#![allow(dead_code, clippy::extra_unused_lifetimes)]
+#![warn(clippy::multiple_inherent_impl)]
+
+struct MyStruct;
+
+impl MyStruct {
+ fn first() {}
+}
+
+impl MyStruct {
+ fn second() {}
+}
+
+impl<'a> MyStruct {
+ fn lifetimed() {}
+}
+
+mod submod {
+ struct MyStruct;
+ impl MyStruct {
+ fn other() {}
+ }
+
+ impl super::MyStruct {
+ fn third() {}
+ }
+}
+
+use std::fmt;
+impl fmt::Debug for MyStruct {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "MyStruct {{ }}")
+ }
+}
+
+// issue #5772
+struct WithArgs<T>(T);
+impl WithArgs<u32> {
+ fn f1() {}
+}
+impl WithArgs<u64> {
+ fn f2() {}
+}
+impl WithArgs<u64> {
+ fn f3() {}
+}
+
+// Ok, the struct is allowed to have multiple impls.
+#[allow(clippy::multiple_inherent_impl)]
+struct Allowed;
+impl Allowed {}
+impl Allowed {}
+impl Allowed {}
+
+struct AllowedImpl;
+#[allow(clippy::multiple_inherent_impl)]
+impl AllowedImpl {}
+// Ok, the first block is skipped by this lint.
+impl AllowedImpl {}
+
+struct OneAllowedImpl;
+impl OneAllowedImpl {}
+#[allow(clippy::multiple_inherent_impl)]
+impl OneAllowedImpl {}
+impl OneAllowedImpl {} // Lint, only one of the three blocks is allowed.
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/impl.stderr b/src/tools/clippy/tests/ui/impl.stderr
new file mode 100644
index 000000000..8703ecac9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/impl.stderr
@@ -0,0 +1,63 @@
+error: multiple implementations of this structure
+ --> $DIR/impl.rs:10:1
+ |
+LL | / impl MyStruct {
+LL | | fn second() {}
+LL | | }
+ | |_^
+ |
+ = note: `-D clippy::multiple-inherent-impl` implied by `-D warnings`
+note: first implementation here
+ --> $DIR/impl.rs:6:1
+ |
+LL | / impl MyStruct {
+LL | | fn first() {}
+LL | | }
+ | |_^
+
+error: multiple implementations of this structure
+ --> $DIR/impl.rs:24:5
+ |
+LL | / impl super::MyStruct {
+LL | | fn third() {}
+LL | | }
+ | |_____^
+ |
+note: first implementation here
+ --> $DIR/impl.rs:6:1
+ |
+LL | / impl MyStruct {
+LL | | fn first() {}
+LL | | }
+ | |_^
+
+error: multiple implementations of this structure
+ --> $DIR/impl.rs:44:1
+ |
+LL | / impl WithArgs<u64> {
+LL | | fn f3() {}
+LL | | }
+ | |_^
+ |
+note: first implementation here
+ --> $DIR/impl.rs:41:1
+ |
+LL | / impl WithArgs<u64> {
+LL | | fn f2() {}
+LL | | }
+ | |_^
+
+error: multiple implementations of this structure
+ --> $DIR/impl.rs:65:1
+ |
+LL | impl OneAllowedImpl {} // Lint, only one of the three blocks is allowed.
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: first implementation here
+ --> $DIR/impl.rs:62:1
+ |
+LL | impl OneAllowedImpl {}
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/implicit_clone.fixed b/src/tools/clippy/tests/ui/implicit_clone.fixed
new file mode 100644
index 000000000..33770fc2a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/implicit_clone.fixed
@@ -0,0 +1,118 @@
+// run-rustfix
+#![warn(clippy::implicit_clone)]
+#![allow(clippy::clone_on_copy, clippy::redundant_clone)]
+use std::borrow::Borrow;
+use std::ffi::{OsStr, OsString};
+use std::path::PathBuf;
+
+fn return_owned_from_slice(slice: &[u32]) -> Vec<u32> {
+ slice.to_owned()
+}
+
+pub fn own_same<T>(v: T) -> T
+where
+ T: ToOwned<Owned = T>,
+{
+ v.to_owned()
+}
+
+pub fn own_same_from_ref<T>(v: &T) -> T
+where
+ T: ToOwned<Owned = T>,
+{
+ v.to_owned()
+}
+
+pub fn own_different<T, U>(v: T) -> U
+where
+ T: ToOwned<Owned = U>,
+{
+ v.to_owned()
+}
+
+#[derive(Copy, Clone)]
+struct Kitten;
+impl Kitten {
+ // badly named method
+ fn to_vec(self) -> Kitten {
+ Kitten {}
+ }
+}
+impl Borrow<BorrowedKitten> for Kitten {
+ fn borrow(&self) -> &BorrowedKitten {
+ static VALUE: BorrowedKitten = BorrowedKitten {};
+ &VALUE
+ }
+}
+
+struct BorrowedKitten;
+impl ToOwned for BorrowedKitten {
+ type Owned = Kitten;
+ fn to_owned(&self) -> Kitten {
+ Kitten {}
+ }
+}
+
+mod weird {
+ #[allow(clippy::ptr_arg)]
+ pub fn to_vec(v: &Vec<u32>) -> Vec<u32> {
+ v.clone()
+ }
+}
+
+fn main() {
+ let vec = vec![5];
+ let _ = return_owned_from_slice(&vec);
+ let _ = vec.clone();
+ let _ = vec.clone();
+
+ let vec_ref = &vec;
+ let _ = return_owned_from_slice(vec_ref);
+ let _ = vec_ref.clone();
+ let _ = vec_ref.clone();
+
+ // we expect no lint for this
+ let _ = weird::to_vec(&vec);
+
+ // we expect no lints for this
+ let slice: &[u32] = &[1, 2, 3, 4, 5];
+ let _ = return_owned_from_slice(slice);
+ let _ = slice.to_owned();
+ let _ = slice.to_vec();
+
+ let str = "hello world".to_string();
+ let _ = str.clone();
+
+ // testing w/ an arbitrary type
+ let kitten = Kitten {};
+ let _ = kitten.clone();
+ let _ = own_same_from_ref(&kitten);
+ // this shouln't lint
+ let _ = kitten.to_vec();
+
+ // we expect no lints for this
+ let borrowed = BorrowedKitten {};
+ let _ = borrowed.to_owned();
+
+ let pathbuf = PathBuf::new();
+ let _ = pathbuf.clone();
+ let _ = pathbuf.clone();
+
+ let os_string = OsString::from("foo");
+ let _ = os_string.clone();
+ let _ = os_string.clone();
+
+ // we expect no lints for this
+ let os_str = OsStr::new("foo");
+ let _ = os_str.to_owned();
+ let _ = os_str.to_os_string();
+
+ // issue #8227
+ let pathbuf_ref = &pathbuf;
+ let pathbuf_ref = &pathbuf_ref;
+ let _ = pathbuf_ref.to_owned(); // Don't lint. Returns `&PathBuf`
+ let _ = (*pathbuf_ref).clone();
+ let pathbuf_ref = &pathbuf_ref;
+ let _ = pathbuf_ref.to_owned(); // Don't lint. Returns `&&PathBuf`
+ let _ = (**pathbuf_ref).clone();
+}
diff --git a/src/tools/clippy/tests/ui/implicit_clone.rs b/src/tools/clippy/tests/ui/implicit_clone.rs
new file mode 100644
index 000000000..fc896525b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/implicit_clone.rs
@@ -0,0 +1,118 @@
+// run-rustfix
+#![warn(clippy::implicit_clone)]
+#![allow(clippy::clone_on_copy, clippy::redundant_clone)]
+use std::borrow::Borrow;
+use std::ffi::{OsStr, OsString};
+use std::path::PathBuf;
+
+fn return_owned_from_slice(slice: &[u32]) -> Vec<u32> {
+ slice.to_owned()
+}
+
+pub fn own_same<T>(v: T) -> T
+where
+ T: ToOwned<Owned = T>,
+{
+ v.to_owned()
+}
+
+pub fn own_same_from_ref<T>(v: &T) -> T
+where
+ T: ToOwned<Owned = T>,
+{
+ v.to_owned()
+}
+
+pub fn own_different<T, U>(v: T) -> U
+where
+ T: ToOwned<Owned = U>,
+{
+ v.to_owned()
+}
+
+#[derive(Copy, Clone)]
+struct Kitten;
+impl Kitten {
+ // badly named method
+ fn to_vec(self) -> Kitten {
+ Kitten {}
+ }
+}
+impl Borrow<BorrowedKitten> for Kitten {
+ fn borrow(&self) -> &BorrowedKitten {
+ static VALUE: BorrowedKitten = BorrowedKitten {};
+ &VALUE
+ }
+}
+
+struct BorrowedKitten;
+impl ToOwned for BorrowedKitten {
+ type Owned = Kitten;
+ fn to_owned(&self) -> Kitten {
+ Kitten {}
+ }
+}
+
+mod weird {
+ #[allow(clippy::ptr_arg)]
+ pub fn to_vec(v: &Vec<u32>) -> Vec<u32> {
+ v.clone()
+ }
+}
+
+fn main() {
+ let vec = vec![5];
+ let _ = return_owned_from_slice(&vec);
+ let _ = vec.to_owned();
+ let _ = vec.to_vec();
+
+ let vec_ref = &vec;
+ let _ = return_owned_from_slice(vec_ref);
+ let _ = vec_ref.to_owned();
+ let _ = vec_ref.to_vec();
+
+ // we expect no lint for this
+ let _ = weird::to_vec(&vec);
+
+ // we expect no lints for this
+ let slice: &[u32] = &[1, 2, 3, 4, 5];
+ let _ = return_owned_from_slice(slice);
+ let _ = slice.to_owned();
+ let _ = slice.to_vec();
+
+ let str = "hello world".to_string();
+ let _ = str.to_owned();
+
+ // testing w/ an arbitrary type
+ let kitten = Kitten {};
+ let _ = kitten.to_owned();
+ let _ = own_same_from_ref(&kitten);
+ // this shouln't lint
+ let _ = kitten.to_vec();
+
+ // we expect no lints for this
+ let borrowed = BorrowedKitten {};
+ let _ = borrowed.to_owned();
+
+ let pathbuf = PathBuf::new();
+ let _ = pathbuf.to_owned();
+ let _ = pathbuf.to_path_buf();
+
+ let os_string = OsString::from("foo");
+ let _ = os_string.to_owned();
+ let _ = os_string.to_os_string();
+
+ // we expect no lints for this
+ let os_str = OsStr::new("foo");
+ let _ = os_str.to_owned();
+ let _ = os_str.to_os_string();
+
+ // issue #8227
+ let pathbuf_ref = &pathbuf;
+ let pathbuf_ref = &pathbuf_ref;
+ let _ = pathbuf_ref.to_owned(); // Don't lint. Returns `&PathBuf`
+ let _ = pathbuf_ref.to_path_buf();
+ let pathbuf_ref = &pathbuf_ref;
+ let _ = pathbuf_ref.to_owned(); // Don't lint. Returns `&&PathBuf`
+ let _ = pathbuf_ref.to_path_buf();
+}
diff --git a/src/tools/clippy/tests/ui/implicit_clone.stderr b/src/tools/clippy/tests/ui/implicit_clone.stderr
new file mode 100644
index 000000000..92c1aa58a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/implicit_clone.stderr
@@ -0,0 +1,76 @@
+error: implicitly cloning a `Vec` by calling `to_owned` on its dereferenced type
+ --> $DIR/implicit_clone.rs:66:13
+ |
+LL | let _ = vec.to_owned();
+ | ^^^^^^^^^^^^^^ help: consider using: `vec.clone()`
+ |
+ = note: `-D clippy::implicit-clone` implied by `-D warnings`
+
+error: implicitly cloning a `Vec` by calling `to_vec` on its dereferenced type
+ --> $DIR/implicit_clone.rs:67:13
+ |
+LL | let _ = vec.to_vec();
+ | ^^^^^^^^^^^^ help: consider using: `vec.clone()`
+
+error: implicitly cloning a `Vec` by calling `to_owned` on its dereferenced type
+ --> $DIR/implicit_clone.rs:71:13
+ |
+LL | let _ = vec_ref.to_owned();
+ | ^^^^^^^^^^^^^^^^^^ help: consider using: `vec_ref.clone()`
+
+error: implicitly cloning a `Vec` by calling `to_vec` on its dereferenced type
+ --> $DIR/implicit_clone.rs:72:13
+ |
+LL | let _ = vec_ref.to_vec();
+ | ^^^^^^^^^^^^^^^^ help: consider using: `vec_ref.clone()`
+
+error: implicitly cloning a `String` by calling `to_owned` on its dereferenced type
+ --> $DIR/implicit_clone.rs:84:13
+ |
+LL | let _ = str.to_owned();
+ | ^^^^^^^^^^^^^^ help: consider using: `str.clone()`
+
+error: implicitly cloning a `Kitten` by calling `to_owned` on its dereferenced type
+ --> $DIR/implicit_clone.rs:88:13
+ |
+LL | let _ = kitten.to_owned();
+ | ^^^^^^^^^^^^^^^^^ help: consider using: `kitten.clone()`
+
+error: implicitly cloning a `PathBuf` by calling `to_owned` on its dereferenced type
+ --> $DIR/implicit_clone.rs:98:13
+ |
+LL | let _ = pathbuf.to_owned();
+ | ^^^^^^^^^^^^^^^^^^ help: consider using: `pathbuf.clone()`
+
+error: implicitly cloning a `PathBuf` by calling `to_path_buf` on its dereferenced type
+ --> $DIR/implicit_clone.rs:99:13
+ |
+LL | let _ = pathbuf.to_path_buf();
+ | ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `pathbuf.clone()`
+
+error: implicitly cloning a `OsString` by calling `to_owned` on its dereferenced type
+ --> $DIR/implicit_clone.rs:102:13
+ |
+LL | let _ = os_string.to_owned();
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using: `os_string.clone()`
+
+error: implicitly cloning a `OsString` by calling `to_os_string` on its dereferenced type
+ --> $DIR/implicit_clone.rs:103:13
+ |
+LL | let _ = os_string.to_os_string();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `os_string.clone()`
+
+error: implicitly cloning a `PathBuf` by calling `to_path_buf` on its dereferenced type
+ --> $DIR/implicit_clone.rs:114:13
+ |
+LL | let _ = pathbuf_ref.to_path_buf();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `(*pathbuf_ref).clone()`
+
+error: implicitly cloning a `PathBuf` by calling `to_path_buf` on its dereferenced type
+ --> $DIR/implicit_clone.rs:117:13
+ |
+LL | let _ = pathbuf_ref.to_path_buf();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `(**pathbuf_ref).clone()`
+
+error: aborting due to 12 previous errors
+
diff --git a/src/tools/clippy/tests/ui/implicit_hasher.rs b/src/tools/clippy/tests/ui/implicit_hasher.rs
new file mode 100644
index 000000000..fd96ca3f4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/implicit_hasher.rs
@@ -0,0 +1,102 @@
+// aux-build:implicit_hasher_macros.rs
+#![deny(clippy::implicit_hasher)]
+#![allow(unused)]
+
+#[macro_use]
+extern crate implicit_hasher_macros;
+
+use std::cmp::Eq;
+use std::collections::{HashMap, HashSet};
+use std::hash::{BuildHasher, Hash};
+
+pub trait Foo<T>: Sized {
+ fn make() -> (Self, Self);
+}
+
+impl<K: Hash + Eq, V> Foo<i8> for HashMap<K, V> {
+ fn make() -> (Self, Self) {
+ // OK, don't suggest to modify these
+ let _: HashMap<i32, i32> = HashMap::new();
+ let _: HashSet<i32> = HashSet::new();
+
+ (HashMap::new(), HashMap::with_capacity(10))
+ }
+}
+impl<K: Hash + Eq, V> Foo<i8> for (HashMap<K, V>,) {
+ fn make() -> (Self, Self) {
+ ((HashMap::new(),), (HashMap::with_capacity(10),))
+ }
+}
+impl Foo<i16> for HashMap<String, String> {
+ fn make() -> (Self, Self) {
+ (HashMap::new(), HashMap::with_capacity(10))
+ }
+}
+
+impl<K: Hash + Eq, V, S: BuildHasher + Default> Foo<i32> for HashMap<K, V, S> {
+ fn make() -> (Self, Self) {
+ (HashMap::default(), HashMap::with_capacity_and_hasher(10, S::default()))
+ }
+}
+impl<S: BuildHasher + Default> Foo<i64> for HashMap<String, String, S> {
+ fn make() -> (Self, Self) {
+ (HashMap::default(), HashMap::with_capacity_and_hasher(10, S::default()))
+ }
+}
+
+impl<T: Hash + Eq> Foo<i8> for HashSet<T> {
+ fn make() -> (Self, Self) {
+ (HashSet::new(), HashSet::with_capacity(10))
+ }
+}
+impl Foo<i16> for HashSet<String> {
+ fn make() -> (Self, Self) {
+ (HashSet::new(), HashSet::with_capacity(10))
+ }
+}
+
+impl<T: Hash + Eq, S: BuildHasher + Default> Foo<i32> for HashSet<T, S> {
+ fn make() -> (Self, Self) {
+ (HashSet::default(), HashSet::with_capacity_and_hasher(10, S::default()))
+ }
+}
+impl<S: BuildHasher + Default> Foo<i64> for HashSet<String, S> {
+ fn make() -> (Self, Self) {
+ (HashSet::default(), HashSet::with_capacity_and_hasher(10, S::default()))
+ }
+}
+
+pub fn foo(_map: &mut HashMap<i32, i32>, _set: &mut HashSet<i32>) {}
+
+macro_rules! gen {
+ (impl) => {
+ impl<K: Hash + Eq, V> Foo<u8> for HashMap<K, V> {
+ fn make() -> (Self, Self) {
+ (HashMap::new(), HashMap::with_capacity(10))
+ }
+ }
+ };
+
+ (fn $name:ident) => {
+ pub fn $name(_map: &mut HashMap<i32, i32>, _set: &mut HashSet<i32>) {}
+ };
+}
+#[rustfmt::skip]
+gen!(impl);
+gen!(fn bar);
+
+// When the macro is in a different file, the suggestion spans can't be combined properly
+// and should not cause an ICE
+// See #2707
+#[macro_use]
+#[path = "auxiliary/test_macro.rs"]
+pub mod test_macro;
+__implicit_hasher_test_macro!(impl<K, V> for HashMap<K, V> where V: test_macro::A);
+
+// #4260
+implicit_hasher_fn!();
+
+// #7712
+pub async fn election_vote(_data: HashMap<i32, i32>) {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/implicit_hasher.stderr b/src/tools/clippy/tests/ui/implicit_hasher.stderr
new file mode 100644
index 000000000..59b0fba2a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/implicit_hasher.stderr
@@ -0,0 +1,164 @@
+error: impl for `HashMap` should be generalized over different hashers
+ --> $DIR/implicit_hasher.rs:16:35
+ |
+LL | impl<K: Hash + Eq, V> Foo<i8> for HashMap<K, V> {
+ | ^^^^^^^^^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/implicit_hasher.rs:2:9
+ |
+LL | #![deny(clippy::implicit_hasher)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+help: consider adding a type parameter
+ |
+LL | impl<K: Hash + Eq, V, S: ::std::hash::BuildHasher + Default> Foo<i8> for HashMap<K, V, S> {
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~
+help: ...and use generic constructor
+ |
+LL | (HashMap::default(), HashMap::with_capacity_and_hasher(10, Default::default()))
+ | ~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: impl for `HashMap` should be generalized over different hashers
+ --> $DIR/implicit_hasher.rs:25:36
+ |
+LL | impl<K: Hash + Eq, V> Foo<i8> for (HashMap<K, V>,) {
+ | ^^^^^^^^^^^^^
+ |
+help: consider adding a type parameter
+ |
+LL | impl<K: Hash + Eq, V, S: ::std::hash::BuildHasher + Default> Foo<i8> for (HashMap<K, V, S>,) {
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~
+help: ...and use generic constructor
+ |
+LL | ((HashMap::default(),), (HashMap::with_capacity_and_hasher(10, Default::default()),))
+ | ~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: impl for `HashMap` should be generalized over different hashers
+ --> $DIR/implicit_hasher.rs:30:19
+ |
+LL | impl Foo<i16> for HashMap<String, String> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: consider adding a type parameter
+ |
+LL | impl<S: ::std::hash::BuildHasher + Default> Foo<i16> for HashMap<String, String, S> {
+ | +++++++++++++++++++++++++++++++++++++++ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+help: ...and use generic constructor
+ |
+LL | (HashMap::default(), HashMap::with_capacity_and_hasher(10, Default::default()))
+ | ~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: impl for `HashSet` should be generalized over different hashers
+ --> $DIR/implicit_hasher.rs:47:32
+ |
+LL | impl<T: Hash + Eq> Foo<i8> for HashSet<T> {
+ | ^^^^^^^^^^
+ |
+help: consider adding a type parameter
+ |
+LL | impl<T: Hash + Eq, S: ::std::hash::BuildHasher + Default> Foo<i8> for HashSet<T, S> {
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~
+help: ...and use generic constructor
+ |
+LL | (HashSet::default(), HashSet::with_capacity_and_hasher(10, Default::default()))
+ | ~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: impl for `HashSet` should be generalized over different hashers
+ --> $DIR/implicit_hasher.rs:52:19
+ |
+LL | impl Foo<i16> for HashSet<String> {
+ | ^^^^^^^^^^^^^^^
+ |
+help: consider adding a type parameter
+ |
+LL | impl<S: ::std::hash::BuildHasher + Default> Foo<i16> for HashSet<String, S> {
+ | +++++++++++++++++++++++++++++++++++++++ ~~~~~~~~~~~~~~~~~~
+help: ...and use generic constructor
+ |
+LL | (HashSet::default(), HashSet::with_capacity_and_hasher(10, Default::default()))
+ | ~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: parameter of type `HashMap` should be generalized over different hashers
+ --> $DIR/implicit_hasher.rs:69:23
+ |
+LL | pub fn foo(_map: &mut HashMap<i32, i32>, _set: &mut HashSet<i32>) {}
+ | ^^^^^^^^^^^^^^^^^
+ |
+help: consider adding a type parameter
+ |
+LL | pub fn foo<S: ::std::hash::BuildHasher>(_map: &mut HashMap<i32, i32, S>, _set: &mut HashSet<i32>) {}
+ | +++++++++++++++++++++++++++++ ~~~~~~~~~~~~~~~~~~~~
+
+error: parameter of type `HashSet` should be generalized over different hashers
+ --> $DIR/implicit_hasher.rs:69:53
+ |
+LL | pub fn foo(_map: &mut HashMap<i32, i32>, _set: &mut HashSet<i32>) {}
+ | ^^^^^^^^^^^^
+ |
+help: consider adding a type parameter
+ |
+LL | pub fn foo<S: ::std::hash::BuildHasher>(_map: &mut HashMap<i32, i32>, _set: &mut HashSet<i32, S>) {}
+ | +++++++++++++++++++++++++++++ ~~~~~~~~~~~~~~~
+
+error: impl for `HashMap` should be generalized over different hashers
+ --> $DIR/implicit_hasher.rs:73:43
+ |
+LL | impl<K: Hash + Eq, V> Foo<u8> for HashMap<K, V> {
+ | ^^^^^^^^^^^^^
+...
+LL | gen!(impl);
+ | ---------- in this macro invocation
+ |
+ = note: this error originates in the macro `gen` (in Nightly builds, run with -Z macro-backtrace for more info)
+help: consider adding a type parameter
+ |
+LL | impl<K: Hash + Eq, V, S: ::std::hash::BuildHasher + Default> Foo<u8> for HashMap<K, V, S> {
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~
+help: ...and use generic constructor
+ |
+LL | (HashMap::default(), HashMap::with_capacity_and_hasher(10, Default::default()))
+ | ~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: parameter of type `HashMap` should be generalized over different hashers
+ --> $DIR/implicit_hasher.rs:81:33
+ |
+LL | pub fn $name(_map: &mut HashMap<i32, i32>, _set: &mut HashSet<i32>) {}
+ | ^^^^^^^^^^^^^^^^^
+...
+LL | gen!(fn bar);
+ | ------------ in this macro invocation
+ |
+ = note: this error originates in the macro `gen` (in Nightly builds, run with -Z macro-backtrace for more info)
+help: consider adding a type parameter
+ |
+LL | pub fn $name<S: ::std::hash::BuildHasher>(_map: &mut HashMap<i32, i32, S>, _set: &mut HashSet<i32>) {}
+ | +++++++++++++++++++++++++++++ ~~~~~~~~~~~~~~~~~~~~
+
+error: parameter of type `HashSet` should be generalized over different hashers
+ --> $DIR/implicit_hasher.rs:81:63
+ |
+LL | pub fn $name(_map: &mut HashMap<i32, i32>, _set: &mut HashSet<i32>) {}
+ | ^^^^^^^^^^^^
+...
+LL | gen!(fn bar);
+ | ------------ in this macro invocation
+ |
+ = note: this error originates in the macro `gen` (in Nightly builds, run with -Z macro-backtrace for more info)
+help: consider adding a type parameter
+ |
+LL | pub fn $name<S: ::std::hash::BuildHasher>(_map: &mut HashMap<i32, i32>, _set: &mut HashSet<i32, S>) {}
+ | +++++++++++++++++++++++++++++ ~~~~~~~~~~~~~~~
+
+error: parameter of type `HashMap` should be generalized over different hashers
+ --> $DIR/implicit_hasher.rs:100:35
+ |
+LL | pub async fn election_vote(_data: HashMap<i32, i32>) {}
+ | ^^^^^^^^^^^^^^^^^
+ |
+help: consider adding a type parameter
+ |
+LL | pub async fn election_vote<S: ::std::hash::BuildHasher>(_data: HashMap<i32, i32, S>) {}
+ | +++++++++++++++++++++++++++++ ~~~~~~~~~~~~~~~~~~~~
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/implicit_return.fixed b/src/tools/clippy/tests/ui/implicit_return.fixed
new file mode 100644
index 000000000..5e55b8b67
--- /dev/null
+++ b/src/tools/clippy/tests/ui/implicit_return.fixed
@@ -0,0 +1,140 @@
+// run-rustfix
+#![feature(lint_reasons)]
+#![warn(clippy::implicit_return)]
+#![allow(clippy::needless_return, clippy::needless_bool, unused, clippy::never_loop)]
+
+fn test_end_of_fn() -> bool {
+ if true {
+ // no error!
+ return true;
+ }
+
+ return true
+}
+
+fn test_if_block() -> bool {
+ if true { return true } else { return false }
+}
+
+#[rustfmt::skip]
+fn test_match(x: bool) -> bool {
+ match x {
+ true => return false,
+ false => { return true },
+ }
+}
+
+fn test_match_with_unreachable(x: bool) -> bool {
+ match x {
+ true => return false,
+ false => unreachable!(),
+ }
+}
+
+fn test_loop() -> bool {
+ loop {
+ return true;
+ }
+}
+
+fn test_loop_with_block() -> bool {
+ loop {
+ {
+ return true;
+ }
+ }
+}
+
+fn test_loop_with_nests() -> bool {
+ loop {
+ if true {
+ return true;
+ } else {
+ let _ = true;
+ }
+ }
+}
+
+#[allow(clippy::redundant_pattern_matching)]
+fn test_loop_with_if_let() -> bool {
+ loop {
+ if let Some(x) = Some(true) {
+ return x;
+ }
+ }
+}
+
+fn test_closure() {
+ #[rustfmt::skip]
+ let _ = || { return true };
+ let _ = || return true;
+}
+
+fn test_panic() -> bool {
+ panic!()
+}
+
+fn test_return_macro() -> String {
+ return format!("test {}", "test")
+}
+
+fn macro_branch_test() -> bool {
+ macro_rules! m {
+ ($t:expr, $f:expr) => {
+ if true { $t } else { $f }
+ };
+ }
+ return m!(true, false)
+}
+
+fn loop_test() -> bool {
+ 'outer: loop {
+ if true {
+ return true;
+ }
+
+ let _ = loop {
+ if false {
+ return false;
+ }
+ if true {
+ break true;
+ }
+ };
+ }
+}
+
+fn loop_macro_test() -> bool {
+ macro_rules! m {
+ ($e:expr) => {
+ break $e
+ };
+ }
+ return loop {
+ m!(true);
+ }
+}
+
+fn divergent_test() -> bool {
+ fn diverge() -> ! {
+ panic!()
+ }
+ diverge()
+}
+
+// issue #6940
+async fn foo() -> bool {
+ return true
+}
+
+fn main() {}
+
+fn check_expect() -> bool {
+ if true {
+ // no error!
+ return true;
+ }
+
+ #[expect(clippy::implicit_return)]
+ true
+}
diff --git a/src/tools/clippy/tests/ui/implicit_return.rs b/src/tools/clippy/tests/ui/implicit_return.rs
new file mode 100644
index 000000000..76f0a9803
--- /dev/null
+++ b/src/tools/clippy/tests/ui/implicit_return.rs
@@ -0,0 +1,140 @@
+// run-rustfix
+#![feature(lint_reasons)]
+#![warn(clippy::implicit_return)]
+#![allow(clippy::needless_return, clippy::needless_bool, unused, clippy::never_loop)]
+
+fn test_end_of_fn() -> bool {
+ if true {
+ // no error!
+ return true;
+ }
+
+ true
+}
+
+fn test_if_block() -> bool {
+ if true { true } else { false }
+}
+
+#[rustfmt::skip]
+fn test_match(x: bool) -> bool {
+ match x {
+ true => false,
+ false => { true },
+ }
+}
+
+fn test_match_with_unreachable(x: bool) -> bool {
+ match x {
+ true => return false,
+ false => unreachable!(),
+ }
+}
+
+fn test_loop() -> bool {
+ loop {
+ break true;
+ }
+}
+
+fn test_loop_with_block() -> bool {
+ loop {
+ {
+ break true;
+ }
+ }
+}
+
+fn test_loop_with_nests() -> bool {
+ loop {
+ if true {
+ break true;
+ } else {
+ let _ = true;
+ }
+ }
+}
+
+#[allow(clippy::redundant_pattern_matching)]
+fn test_loop_with_if_let() -> bool {
+ loop {
+ if let Some(x) = Some(true) {
+ return x;
+ }
+ }
+}
+
+fn test_closure() {
+ #[rustfmt::skip]
+ let _ = || { true };
+ let _ = || true;
+}
+
+fn test_panic() -> bool {
+ panic!()
+}
+
+fn test_return_macro() -> String {
+ format!("test {}", "test")
+}
+
+fn macro_branch_test() -> bool {
+ macro_rules! m {
+ ($t:expr, $f:expr) => {
+ if true { $t } else { $f }
+ };
+ }
+ m!(true, false)
+}
+
+fn loop_test() -> bool {
+ 'outer: loop {
+ if true {
+ break true;
+ }
+
+ let _ = loop {
+ if false {
+ break 'outer false;
+ }
+ if true {
+ break true;
+ }
+ };
+ }
+}
+
+fn loop_macro_test() -> bool {
+ macro_rules! m {
+ ($e:expr) => {
+ break $e
+ };
+ }
+ loop {
+ m!(true);
+ }
+}
+
+fn divergent_test() -> bool {
+ fn diverge() -> ! {
+ panic!()
+ }
+ diverge()
+}
+
+// issue #6940
+async fn foo() -> bool {
+ true
+}
+
+fn main() {}
+
+fn check_expect() -> bool {
+ if true {
+ // no error!
+ return true;
+ }
+
+ #[expect(clippy::implicit_return)]
+ true
+}
diff --git a/src/tools/clippy/tests/ui/implicit_return.stderr b/src/tools/clippy/tests/ui/implicit_return.stderr
new file mode 100644
index 000000000..522bc3bf8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/implicit_return.stderr
@@ -0,0 +1,109 @@
+error: missing `return` statement
+ --> $DIR/implicit_return.rs:12:5
+ |
+LL | true
+ | ^^^^ help: add `return` as shown: `return true`
+ |
+ = note: `-D clippy::implicit-return` implied by `-D warnings`
+
+error: missing `return` statement
+ --> $DIR/implicit_return.rs:16:15
+ |
+LL | if true { true } else { false }
+ | ^^^^ help: add `return` as shown: `return true`
+
+error: missing `return` statement
+ --> $DIR/implicit_return.rs:16:29
+ |
+LL | if true { true } else { false }
+ | ^^^^^ help: add `return` as shown: `return false`
+
+error: missing `return` statement
+ --> $DIR/implicit_return.rs:22:17
+ |
+LL | true => false,
+ | ^^^^^ help: add `return` as shown: `return false`
+
+error: missing `return` statement
+ --> $DIR/implicit_return.rs:23:20
+ |
+LL | false => { true },
+ | ^^^^ help: add `return` as shown: `return true`
+
+error: missing `return` statement
+ --> $DIR/implicit_return.rs:36:9
+ |
+LL | break true;
+ | ^^^^^^^^^^ help: change `break` to `return` as shown: `return true`
+
+error: missing `return` statement
+ --> $DIR/implicit_return.rs:43:13
+ |
+LL | break true;
+ | ^^^^^^^^^^ help: change `break` to `return` as shown: `return true`
+
+error: missing `return` statement
+ --> $DIR/implicit_return.rs:51:13
+ |
+LL | break true;
+ | ^^^^^^^^^^ help: change `break` to `return` as shown: `return true`
+
+error: missing `return` statement
+ --> $DIR/implicit_return.rs:69:18
+ |
+LL | let _ = || { true };
+ | ^^^^ help: add `return` as shown: `return true`
+
+error: missing `return` statement
+ --> $DIR/implicit_return.rs:70:16
+ |
+LL | let _ = || true;
+ | ^^^^ help: add `return` as shown: `return true`
+
+error: missing `return` statement
+ --> $DIR/implicit_return.rs:78:5
+ |
+LL | format!("test {}", "test")
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add `return` as shown: `return format!("test {}", "test")`
+
+error: missing `return` statement
+ --> $DIR/implicit_return.rs:87:5
+ |
+LL | m!(true, false)
+ | ^^^^^^^^^^^^^^^ help: add `return` as shown: `return m!(true, false)`
+
+error: missing `return` statement
+ --> $DIR/implicit_return.rs:93:13
+ |
+LL | break true;
+ | ^^^^^^^^^^ help: change `break` to `return` as shown: `return true`
+
+error: missing `return` statement
+ --> $DIR/implicit_return.rs:98:17
+ |
+LL | break 'outer false;
+ | ^^^^^^^^^^^^^^^^^^ help: change `break` to `return` as shown: `return false`
+
+error: missing `return` statement
+ --> $DIR/implicit_return.rs:113:5
+ |
+LL | / loop {
+LL | | m!(true);
+LL | | }
+ | |_____^
+ |
+help: add `return` as shown
+ |
+LL ~ return loop {
+LL + m!(true);
+LL + }
+ |
+
+error: missing `return` statement
+ --> $DIR/implicit_return.rs:127:5
+ |
+LL | true
+ | ^^^^ help: add `return` as shown: `return true`
+
+error: aborting due to 16 previous errors
+
diff --git a/src/tools/clippy/tests/ui/implicit_saturating_sub.fixed b/src/tools/clippy/tests/ui/implicit_saturating_sub.fixed
new file mode 100644
index 000000000..e6f57e926
--- /dev/null
+++ b/src/tools/clippy/tests/ui/implicit_saturating_sub.fixed
@@ -0,0 +1,168 @@
+// run-rustfix
+#![allow(unused_assignments, unused_mut, clippy::assign_op_pattern)]
+#![warn(clippy::implicit_saturating_sub)]
+
+fn main() {
+ // Tests for unsigned integers
+
+ let end_8: u8 = 10;
+ let start_8: u8 = 5;
+ let mut u_8: u8 = end_8 - start_8;
+
+ // Lint
+ u_8 = u_8.saturating_sub(1);
+
+ match end_8 {
+ 10 => {
+ // Lint
+ u_8 = u_8.saturating_sub(1);
+ },
+ 11 => u_8 += 1,
+ _ => u_8 = 0,
+ }
+
+ let end_16: u16 = 40;
+ let start_16: u16 = 35;
+
+ let mut u_16: u16 = end_16 - start_16;
+
+ // Lint
+ u_16 = u_16.saturating_sub(1);
+
+ let mut end_32: u32 = 7010;
+ let mut start_32: u32 = 7000;
+
+ let mut u_32: u32 = end_32 - start_32;
+
+ // Lint
+ u_32 = u_32.saturating_sub(1);
+
+ // No Lint
+ if u_32 > 0 {
+ u_16 += 1;
+ }
+
+ // No Lint
+ if u_32 != 0 {
+ end_32 -= 1;
+ start_32 += 1;
+ }
+
+ let mut end_64: u64 = 75001;
+ let mut start_64: u64 = 75000;
+
+ let mut u_64: u64 = end_64 - start_64;
+
+ // Lint
+ u_64 = u_64.saturating_sub(1);
+
+ // Lint
+ u_64 = u_64.saturating_sub(1);
+
+ // Lint
+ u_64 = u_64.saturating_sub(1);
+
+ // No Lint
+ if u_64 >= 1 {
+ u_64 -= 1;
+ }
+
+ // No Lint
+ if u_64 > 0 {
+ end_64 -= 1;
+ }
+
+ // Tests for usize
+ let end_usize: usize = 8054;
+ let start_usize: usize = 8050;
+
+ let mut u_usize: usize = end_usize - start_usize;
+
+ // Lint
+ u_usize = u_usize.saturating_sub(1);
+
+ // Tests for signed integers
+
+ let endi_8: i8 = 10;
+ let starti_8: i8 = 50;
+
+ let mut i_8: i8 = endi_8 - starti_8;
+
+ // Lint
+ i_8 = i_8.saturating_sub(1);
+
+ // Lint
+ i_8 = i_8.saturating_sub(1);
+
+ // Lint
+ i_8 = i_8.saturating_sub(1);
+
+ // Lint
+ i_8 = i_8.saturating_sub(1);
+
+ let endi_16: i16 = 45;
+ let starti_16: i16 = 44;
+
+ let mut i_16: i16 = endi_16 - starti_16;
+
+ // Lint
+ i_16 = i_16.saturating_sub(1);
+
+ // Lint
+ i_16 = i_16.saturating_sub(1);
+
+ // Lint
+ i_16 = i_16.saturating_sub(1);
+
+ // Lint
+ i_16 = i_16.saturating_sub(1);
+
+ let endi_32: i32 = 45;
+ let starti_32: i32 = 44;
+
+ let mut i_32: i32 = endi_32 - starti_32;
+
+ // Lint
+ i_32 = i_32.saturating_sub(1);
+
+ // Lint
+ i_32 = i_32.saturating_sub(1);
+
+ // Lint
+ i_32 = i_32.saturating_sub(1);
+
+ // Lint
+ i_32 = i_32.saturating_sub(1);
+
+ let endi_64: i64 = 45;
+ let starti_64: i64 = 44;
+
+ let mut i_64: i64 = endi_64 - starti_64;
+
+ // Lint
+ i_64 = i_64.saturating_sub(1);
+
+ // Lint
+ i_64 = i_64.saturating_sub(1);
+
+ // Lint
+ i_64 = i_64.saturating_sub(1);
+
+ // No Lint
+ if i_64 > 0 {
+ i_64 -= 1;
+ }
+
+ // No Lint
+ if i_64 != 0 {
+ i_64 -= 1;
+ }
+
+ // issue #7831
+ // No Lint
+ if u_32 > 0 {
+ u_32 -= 1;
+ } else {
+ println!("side effect");
+ }
+}
diff --git a/src/tools/clippy/tests/ui/implicit_saturating_sub.rs b/src/tools/clippy/tests/ui/implicit_saturating_sub.rs
new file mode 100644
index 000000000..8bb28d149
--- /dev/null
+++ b/src/tools/clippy/tests/ui/implicit_saturating_sub.rs
@@ -0,0 +1,214 @@
+// run-rustfix
+#![allow(unused_assignments, unused_mut, clippy::assign_op_pattern)]
+#![warn(clippy::implicit_saturating_sub)]
+
+fn main() {
+ // Tests for unsigned integers
+
+ let end_8: u8 = 10;
+ let start_8: u8 = 5;
+ let mut u_8: u8 = end_8 - start_8;
+
+ // Lint
+ if u_8 > 0 {
+ u_8 = u_8 - 1;
+ }
+
+ match end_8 {
+ 10 => {
+ // Lint
+ if u_8 > 0 {
+ u_8 -= 1;
+ }
+ },
+ 11 => u_8 += 1,
+ _ => u_8 = 0,
+ }
+
+ let end_16: u16 = 40;
+ let start_16: u16 = 35;
+
+ let mut u_16: u16 = end_16 - start_16;
+
+ // Lint
+ if u_16 > 0 {
+ u_16 -= 1;
+ }
+
+ let mut end_32: u32 = 7010;
+ let mut start_32: u32 = 7000;
+
+ let mut u_32: u32 = end_32 - start_32;
+
+ // Lint
+ if u_32 != 0 {
+ u_32 -= 1;
+ }
+
+ // No Lint
+ if u_32 > 0 {
+ u_16 += 1;
+ }
+
+ // No Lint
+ if u_32 != 0 {
+ end_32 -= 1;
+ start_32 += 1;
+ }
+
+ let mut end_64: u64 = 75001;
+ let mut start_64: u64 = 75000;
+
+ let mut u_64: u64 = end_64 - start_64;
+
+ // Lint
+ if u_64 > 0 {
+ u_64 -= 1;
+ }
+
+ // Lint
+ if 0 < u_64 {
+ u_64 -= 1;
+ }
+
+ // Lint
+ if 0 != u_64 {
+ u_64 -= 1;
+ }
+
+ // No Lint
+ if u_64 >= 1 {
+ u_64 -= 1;
+ }
+
+ // No Lint
+ if u_64 > 0 {
+ end_64 -= 1;
+ }
+
+ // Tests for usize
+ let end_usize: usize = 8054;
+ let start_usize: usize = 8050;
+
+ let mut u_usize: usize = end_usize - start_usize;
+
+ // Lint
+ if u_usize > 0 {
+ u_usize -= 1;
+ }
+
+ // Tests for signed integers
+
+ let endi_8: i8 = 10;
+ let starti_8: i8 = 50;
+
+ let mut i_8: i8 = endi_8 - starti_8;
+
+ // Lint
+ if i_8 > i8::MIN {
+ i_8 -= 1;
+ }
+
+ // Lint
+ if i_8 > i8::MIN {
+ i_8 -= 1;
+ }
+
+ // Lint
+ if i_8 != i8::MIN {
+ i_8 -= 1;
+ }
+
+ // Lint
+ if i_8 != i8::MIN {
+ i_8 -= 1;
+ }
+
+ let endi_16: i16 = 45;
+ let starti_16: i16 = 44;
+
+ let mut i_16: i16 = endi_16 - starti_16;
+
+ // Lint
+ if i_16 > i16::MIN {
+ i_16 -= 1;
+ }
+
+ // Lint
+ if i_16 > i16::MIN {
+ i_16 -= 1;
+ }
+
+ // Lint
+ if i_16 != i16::MIN {
+ i_16 -= 1;
+ }
+
+ // Lint
+ if i_16 != i16::MIN {
+ i_16 -= 1;
+ }
+
+ let endi_32: i32 = 45;
+ let starti_32: i32 = 44;
+
+ let mut i_32: i32 = endi_32 - starti_32;
+
+ // Lint
+ if i_32 > i32::MIN {
+ i_32 -= 1;
+ }
+
+ // Lint
+ if i_32 > i32::MIN {
+ i_32 -= 1;
+ }
+
+ // Lint
+ if i_32 != i32::MIN {
+ i_32 -= 1;
+ }
+
+ // Lint
+ if i_32 != i32::MIN {
+ i_32 -= 1;
+ }
+
+ let endi_64: i64 = 45;
+ let starti_64: i64 = 44;
+
+ let mut i_64: i64 = endi_64 - starti_64;
+
+ // Lint
+ if i64::MIN < i_64 {
+ i_64 -= 1;
+ }
+
+ // Lint
+ if i64::MIN != i_64 {
+ i_64 -= 1;
+ }
+
+ // Lint
+ if i64::MIN < i_64 {
+ i_64 -= 1;
+ }
+
+ // No Lint
+ if i_64 > 0 {
+ i_64 -= 1;
+ }
+
+ // No Lint
+ if i_64 != 0 {
+ i_64 -= 1;
+ }
+
+ // issue #7831
+ // No Lint
+ if u_32 > 0 {
+ u_32 -= 1;
+ } else {
+ println!("side effect");
+ }
+}
diff --git a/src/tools/clippy/tests/ui/implicit_saturating_sub.stderr b/src/tools/clippy/tests/ui/implicit_saturating_sub.stderr
new file mode 100644
index 000000000..5bb9a6064
--- /dev/null
+++ b/src/tools/clippy/tests/ui/implicit_saturating_sub.stderr
@@ -0,0 +1,188 @@
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:13:5
+ |
+LL | / if u_8 > 0 {
+LL | | u_8 = u_8 - 1;
+LL | | }
+ | |_____^ help: try: `u_8 = u_8.saturating_sub(1);`
+ |
+ = note: `-D clippy::implicit-saturating-sub` implied by `-D warnings`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:20:13
+ |
+LL | / if u_8 > 0 {
+LL | | u_8 -= 1;
+LL | | }
+ | |_____________^ help: try: `u_8 = u_8.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:34:5
+ |
+LL | / if u_16 > 0 {
+LL | | u_16 -= 1;
+LL | | }
+ | |_____^ help: try: `u_16 = u_16.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:44:5
+ |
+LL | / if u_32 != 0 {
+LL | | u_32 -= 1;
+LL | | }
+ | |_____^ help: try: `u_32 = u_32.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:65:5
+ |
+LL | / if u_64 > 0 {
+LL | | u_64 -= 1;
+LL | | }
+ | |_____^ help: try: `u_64 = u_64.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:70:5
+ |
+LL | / if 0 < u_64 {
+LL | | u_64 -= 1;
+LL | | }
+ | |_____^ help: try: `u_64 = u_64.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:75:5
+ |
+LL | / if 0 != u_64 {
+LL | | u_64 -= 1;
+LL | | }
+ | |_____^ help: try: `u_64 = u_64.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:96:5
+ |
+LL | / if u_usize > 0 {
+LL | | u_usize -= 1;
+LL | | }
+ | |_____^ help: try: `u_usize = u_usize.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:108:5
+ |
+LL | / if i_8 > i8::MIN {
+LL | | i_8 -= 1;
+LL | | }
+ | |_____^ help: try: `i_8 = i_8.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:113:5
+ |
+LL | / if i_8 > i8::MIN {
+LL | | i_8 -= 1;
+LL | | }
+ | |_____^ help: try: `i_8 = i_8.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:118:5
+ |
+LL | / if i_8 != i8::MIN {
+LL | | i_8 -= 1;
+LL | | }
+ | |_____^ help: try: `i_8 = i_8.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:123:5
+ |
+LL | / if i_8 != i8::MIN {
+LL | | i_8 -= 1;
+LL | | }
+ | |_____^ help: try: `i_8 = i_8.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:133:5
+ |
+LL | / if i_16 > i16::MIN {
+LL | | i_16 -= 1;
+LL | | }
+ | |_____^ help: try: `i_16 = i_16.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:138:5
+ |
+LL | / if i_16 > i16::MIN {
+LL | | i_16 -= 1;
+LL | | }
+ | |_____^ help: try: `i_16 = i_16.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:143:5
+ |
+LL | / if i_16 != i16::MIN {
+LL | | i_16 -= 1;
+LL | | }
+ | |_____^ help: try: `i_16 = i_16.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:148:5
+ |
+LL | / if i_16 != i16::MIN {
+LL | | i_16 -= 1;
+LL | | }
+ | |_____^ help: try: `i_16 = i_16.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:158:5
+ |
+LL | / if i_32 > i32::MIN {
+LL | | i_32 -= 1;
+LL | | }
+ | |_____^ help: try: `i_32 = i_32.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:163:5
+ |
+LL | / if i_32 > i32::MIN {
+LL | | i_32 -= 1;
+LL | | }
+ | |_____^ help: try: `i_32 = i_32.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:168:5
+ |
+LL | / if i_32 != i32::MIN {
+LL | | i_32 -= 1;
+LL | | }
+ | |_____^ help: try: `i_32 = i_32.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:173:5
+ |
+LL | / if i_32 != i32::MIN {
+LL | | i_32 -= 1;
+LL | | }
+ | |_____^ help: try: `i_32 = i_32.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:183:5
+ |
+LL | / if i64::MIN < i_64 {
+LL | | i_64 -= 1;
+LL | | }
+ | |_____^ help: try: `i_64 = i_64.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:188:5
+ |
+LL | / if i64::MIN != i_64 {
+LL | | i_64 -= 1;
+LL | | }
+ | |_____^ help: try: `i_64 = i_64.saturating_sub(1);`
+
+error: implicitly performing saturating subtraction
+ --> $DIR/implicit_saturating_sub.rs:193:5
+ |
+LL | / if i64::MIN < i_64 {
+LL | | i_64 -= 1;
+LL | | }
+ | |_____^ help: try: `i_64 = i_64.saturating_sub(1);`
+
+error: aborting due to 23 previous errors
+
diff --git a/src/tools/clippy/tests/ui/inconsistent_digit_grouping.fixed b/src/tools/clippy/tests/ui/inconsistent_digit_grouping.fixed
new file mode 100644
index 000000000..dd683e7f7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/inconsistent_digit_grouping.fixed
@@ -0,0 +1,47 @@
+// run-rustfix
+#[warn(clippy::inconsistent_digit_grouping)]
+#[deny(clippy::unreadable_literal)]
+#[allow(unused_variables, clippy::excessive_precision)]
+fn main() {
+ macro_rules! mac1 {
+ () => {
+ 1_23_456
+ };
+ }
+ macro_rules! mac2 {
+ () => {
+ 1_234.5678_f32
+ };
+ }
+
+ let good = (
+ 123,
+ 1_234,
+ 1_2345_6789,
+ 123_f32,
+ 1_234.12_f32,
+ 1_234.123_4_f32,
+ 1.123_456_7_f32,
+ );
+ let bad = (123_456, 12_345_678, 1_234_567, 1_234.567_8_f32, 1.234_567_8_f32);
+
+ // Test padding
+ let _ = 0x0010_0000;
+ let _ = 0x0100_0000;
+ let _ = 0x1000_0000;
+ let _ = 0x0001_0000_0000_u64;
+
+ // Test suggestion when fraction has no digits
+ let _: f32 = 123_456.;
+
+ // Test UUID formatted literal
+ let _: u128 = 0x12345678_1234_1234_1234_123456789012;
+
+ // Ignore literals in macros
+ let _ = mac1!();
+ let _ = mac2!();
+
+ // Issue #6096
+ // Allow separating exponent with '_'
+ let _ = 1.025_011_10_E0;
+}
diff --git a/src/tools/clippy/tests/ui/inconsistent_digit_grouping.rs b/src/tools/clippy/tests/ui/inconsistent_digit_grouping.rs
new file mode 100644
index 000000000..d5d27c853
--- /dev/null
+++ b/src/tools/clippy/tests/ui/inconsistent_digit_grouping.rs
@@ -0,0 +1,47 @@
+// run-rustfix
+#[warn(clippy::inconsistent_digit_grouping)]
+#[deny(clippy::unreadable_literal)]
+#[allow(unused_variables, clippy::excessive_precision)]
+fn main() {
+ macro_rules! mac1 {
+ () => {
+ 1_23_456
+ };
+ }
+ macro_rules! mac2 {
+ () => {
+ 1_234.5678_f32
+ };
+ }
+
+ let good = (
+ 123,
+ 1_234,
+ 1_2345_6789,
+ 123_f32,
+ 1_234.12_f32,
+ 1_234.123_4_f32,
+ 1.123_456_7_f32,
+ );
+ let bad = (1_23_456, 1_234_5678, 1234_567, 1_234.5678_f32, 1.234_5678_f32);
+
+ // Test padding
+ let _ = 0x100000;
+ let _ = 0x1000000;
+ let _ = 0x10000000;
+ let _ = 0x100000000_u64;
+
+ // Test suggestion when fraction has no digits
+ let _: f32 = 1_23_456.;
+
+ // Test UUID formatted literal
+ let _: u128 = 0x12345678_1234_1234_1234_123456789012;
+
+ // Ignore literals in macros
+ let _ = mac1!();
+ let _ = mac2!();
+
+ // Issue #6096
+ // Allow separating exponent with '_'
+ let _ = 1.025_011_10_E0;
+}
diff --git a/src/tools/clippy/tests/ui/inconsistent_digit_grouping.stderr b/src/tools/clippy/tests/ui/inconsistent_digit_grouping.stderr
new file mode 100644
index 000000000..b8ac91554
--- /dev/null
+++ b/src/tools/clippy/tests/ui/inconsistent_digit_grouping.stderr
@@ -0,0 +1,70 @@
+error: digits grouped inconsistently by underscores
+ --> $DIR/inconsistent_digit_grouping.rs:26:16
+ |
+LL | let bad = (1_23_456, 1_234_5678, 1234_567, 1_234.5678_f32, 1.234_5678_f32);
+ | ^^^^^^^^ help: consider: `123_456`
+ |
+ = note: `-D clippy::inconsistent-digit-grouping` implied by `-D warnings`
+
+error: digits grouped inconsistently by underscores
+ --> $DIR/inconsistent_digit_grouping.rs:26:26
+ |
+LL | let bad = (1_23_456, 1_234_5678, 1234_567, 1_234.5678_f32, 1.234_5678_f32);
+ | ^^^^^^^^^^ help: consider: `12_345_678`
+
+error: digits grouped inconsistently by underscores
+ --> $DIR/inconsistent_digit_grouping.rs:26:38
+ |
+LL | let bad = (1_23_456, 1_234_5678, 1234_567, 1_234.5678_f32, 1.234_5678_f32);
+ | ^^^^^^^^ help: consider: `1_234_567`
+
+error: digits grouped inconsistently by underscores
+ --> $DIR/inconsistent_digit_grouping.rs:26:48
+ |
+LL | let bad = (1_23_456, 1_234_5678, 1234_567, 1_234.5678_f32, 1.234_5678_f32);
+ | ^^^^^^^^^^^^^^ help: consider: `1_234.567_8_f32`
+
+error: digits grouped inconsistently by underscores
+ --> $DIR/inconsistent_digit_grouping.rs:26:64
+ |
+LL | let bad = (1_23_456, 1_234_5678, 1234_567, 1_234.5678_f32, 1.234_5678_f32);
+ | ^^^^^^^^^^^^^^ help: consider: `1.234_567_8_f32`
+
+error: long literal lacking separators
+ --> $DIR/inconsistent_digit_grouping.rs:29:13
+ |
+LL | let _ = 0x100000;
+ | ^^^^^^^^ help: consider: `0x0010_0000`
+ |
+note: the lint level is defined here
+ --> $DIR/inconsistent_digit_grouping.rs:3:8
+ |
+LL | #[deny(clippy::unreadable_literal)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: long literal lacking separators
+ --> $DIR/inconsistent_digit_grouping.rs:30:13
+ |
+LL | let _ = 0x1000000;
+ | ^^^^^^^^^ help: consider: `0x0100_0000`
+
+error: long literal lacking separators
+ --> $DIR/inconsistent_digit_grouping.rs:31:13
+ |
+LL | let _ = 0x10000000;
+ | ^^^^^^^^^^ help: consider: `0x1000_0000`
+
+error: long literal lacking separators
+ --> $DIR/inconsistent_digit_grouping.rs:32:13
+ |
+LL | let _ = 0x100000000_u64;
+ | ^^^^^^^^^^^^^^^ help: consider: `0x0001_0000_0000_u64`
+
+error: digits grouped inconsistently by underscores
+ --> $DIR/inconsistent_digit_grouping.rs:35:18
+ |
+LL | let _: f32 = 1_23_456.;
+ | ^^^^^^^^^ help: consider: `123_456.`
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/inconsistent_struct_constructor.fixed b/src/tools/clippy/tests/ui/inconsistent_struct_constructor.fixed
new file mode 100644
index 000000000..74ba2f1c5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/inconsistent_struct_constructor.fixed
@@ -0,0 +1,73 @@
+// run-rustfix
+#![warn(clippy::inconsistent_struct_constructor)]
+#![allow(clippy::redundant_field_names)]
+#![allow(clippy::unnecessary_operation)]
+#![allow(clippy::no_effect)]
+#![allow(dead_code)]
+
+#[derive(Default)]
+struct Foo {
+ x: i32,
+ y: i32,
+ z: i32,
+}
+
+macro_rules! new_foo {
+ () => {
+ let x = 1;
+ let y = 1;
+ let z = 1;
+ Foo { y, x, z }
+ };
+}
+
+mod without_base {
+ use super::Foo;
+
+ fn test() {
+ let x = 1;
+ let y = 1;
+ let z = 1;
+
+ // Should lint.
+ Foo { x, y, z };
+
+ // Should NOT lint.
+ // issue #7069.
+ new_foo!();
+
+ // Should NOT lint because the order is the same as in the definition.
+ Foo { x, y, z };
+
+ // Should NOT lint because z is not a shorthand init.
+ Foo { y, x, z: z };
+ }
+}
+
+mod with_base {
+ use super::Foo;
+
+ fn test() {
+ let x = 1;
+ let z = 1;
+
+ // Should lint.
+ Foo { x, z, ..Default::default() };
+
+ // Should NOT lint because the order is consistent with the definition.
+ Foo {
+ x,
+ z,
+ ..Default::default()
+ };
+
+ // Should NOT lint because z is not a shorthand init.
+ Foo {
+ z: z,
+ x,
+ ..Default::default()
+ };
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/inconsistent_struct_constructor.rs b/src/tools/clippy/tests/ui/inconsistent_struct_constructor.rs
new file mode 100644
index 000000000..ba96e1e33
--- /dev/null
+++ b/src/tools/clippy/tests/ui/inconsistent_struct_constructor.rs
@@ -0,0 +1,77 @@
+// run-rustfix
+#![warn(clippy::inconsistent_struct_constructor)]
+#![allow(clippy::redundant_field_names)]
+#![allow(clippy::unnecessary_operation)]
+#![allow(clippy::no_effect)]
+#![allow(dead_code)]
+
+#[derive(Default)]
+struct Foo {
+ x: i32,
+ y: i32,
+ z: i32,
+}
+
+macro_rules! new_foo {
+ () => {
+ let x = 1;
+ let y = 1;
+ let z = 1;
+ Foo { y, x, z }
+ };
+}
+
+mod without_base {
+ use super::Foo;
+
+ fn test() {
+ let x = 1;
+ let y = 1;
+ let z = 1;
+
+ // Should lint.
+ Foo { y, x, z };
+
+ // Should NOT lint.
+ // issue #7069.
+ new_foo!();
+
+ // Should NOT lint because the order is the same as in the definition.
+ Foo { x, y, z };
+
+ // Should NOT lint because z is not a shorthand init.
+ Foo { y, x, z: z };
+ }
+}
+
+mod with_base {
+ use super::Foo;
+
+ fn test() {
+ let x = 1;
+ let z = 1;
+
+ // Should lint.
+ Foo {
+ z,
+ x,
+ ..Default::default()
+ };
+
+ // Should NOT lint because the order is consistent with the definition.
+ Foo {
+ x,
+ z,
+ ..Default::default()
+ };
+
+ // Should NOT lint because z is not a shorthand init.
+ Foo {
+ z: z,
+ x,
+ ..Default::default()
+ };
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/inconsistent_struct_constructor.stderr b/src/tools/clippy/tests/ui/inconsistent_struct_constructor.stderr
new file mode 100644
index 000000000..c90189e96
--- /dev/null
+++ b/src/tools/clippy/tests/ui/inconsistent_struct_constructor.stderr
@@ -0,0 +1,20 @@
+error: struct constructor field order is inconsistent with struct definition field order
+ --> $DIR/inconsistent_struct_constructor.rs:33:9
+ |
+LL | Foo { y, x, z };
+ | ^^^^^^^^^^^^^^^ help: try: `Foo { x, y, z }`
+ |
+ = note: `-D clippy::inconsistent-struct-constructor` implied by `-D warnings`
+
+error: struct constructor field order is inconsistent with struct definition field order
+ --> $DIR/inconsistent_struct_constructor.rs:55:9
+ |
+LL | / Foo {
+LL | | z,
+LL | | x,
+LL | | ..Default::default()
+LL | | };
+ | |_________^ help: try: `Foo { x, z, ..Default::default() }`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/index_refutable_slice/if_let_slice_binding.rs b/src/tools/clippy/tests/ui/index_refutable_slice/if_let_slice_binding.rs
new file mode 100644
index 000000000..c2c0c520d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/index_refutable_slice/if_let_slice_binding.rs
@@ -0,0 +1,166 @@
+#![deny(clippy::index_refutable_slice)]
+
+enum SomeEnum<T> {
+ One(T),
+ Two(T),
+ Three(T),
+ Four(T),
+}
+
+fn lintable_examples() {
+ // Try with reference
+ let slice: Option<&[u32]> = Some(&[1, 2, 3]);
+ if let Some(slice) = slice {
+ println!("{}", slice[0]);
+ }
+
+ // Try with copy
+ let slice: Option<[u32; 3]> = Some([1, 2, 3]);
+ if let Some(slice) = slice {
+ println!("{}", slice[0]);
+ }
+
+ // Try with long slice and small indices
+ let slice: Option<[u32; 9]> = Some([1, 2, 3, 4, 5, 6, 7, 8, 9]);
+ if let Some(slice) = slice {
+ println!("{}", slice[2]);
+ println!("{}", slice[0]);
+ }
+
+ // Multiple bindings
+ let slice_wrapped: SomeEnum<[u32; 3]> = SomeEnum::One([5, 6, 7]);
+ if let SomeEnum::One(slice) | SomeEnum::Three(slice) = slice_wrapped {
+ println!("{}", slice[0]);
+ }
+
+ // Two lintable slices in one if let
+ let a_wrapped: SomeEnum<[u32; 3]> = SomeEnum::One([9, 5, 1]);
+ let b_wrapped: Option<[u32; 2]> = Some([4, 6]);
+ if let (SomeEnum::Three(a), Some(b)) = (a_wrapped, b_wrapped) {
+ println!("{} -> {}", a[2], b[1]);
+ }
+
+ // This requires the slice values to be borrowed as the slice values can only be
+ // borrowed and `String` doesn't implement copy
+ let slice: Option<[String; 2]> = Some([String::from("1"), String::from("2")]);
+ if let Some(ref slice) = slice {
+ println!("{:?}", slice[1]);
+ }
+ println!("{:?}", slice);
+
+ // This should not suggest using the `ref` keyword as the scrutinee is already
+ // a reference
+ let slice: Option<[String; 2]> = Some([String::from("1"), String::from("2")]);
+ if let Some(slice) = &slice {
+ println!("{:?}", slice[0]);
+ }
+ println!("{:?}", slice);
+}
+
+fn slice_index_above_limit() {
+ let slice: Option<&[u32]> = Some(&[1, 2, 3]);
+
+ if let Some(slice) = slice {
+ // Would cause a panic, IDK
+ println!("{}", slice[7]);
+ }
+}
+
+fn slice_is_used() {
+ let slice: Option<&[u32]> = Some(&[1, 2, 3]);
+ if let Some(slice) = slice {
+ println!("{:?}", slice.len());
+ }
+
+ let slice: Option<&[u32]> = Some(&[1, 2, 3]);
+ if let Some(slice) = slice {
+ println!("{:?}", slice.to_vec());
+ }
+
+ let opt: Option<[String; 2]> = Some([String::from("Hello"), String::from("world")]);
+ if let Some(slice) = opt {
+ if !slice.is_empty() {
+ println!("first: {}", slice[0]);
+ }
+ }
+}
+
+/// The slice is used by an external function and should therefore not be linted
+fn check_slice_as_arg() {
+ fn is_interesting<T>(slice: &[T; 2]) -> bool {
+ !slice.is_empty()
+ }
+
+ let slice_wrapped: Option<[String; 2]> = Some([String::from("Hello"), String::from("world")]);
+ if let Some(slice) = &slice_wrapped {
+ if is_interesting(slice) {
+ println!("This is interesting {}", slice[0]);
+ }
+ }
+ println!("{:?}", slice_wrapped);
+}
+
+fn check_slice_in_struct() {
+ #[derive(Debug)]
+ struct Wrapper<'a> {
+ inner: Option<&'a [String]>,
+ is_awesome: bool,
+ }
+
+ impl<'a> Wrapper<'a> {
+ fn is_super_awesome(&self) -> bool {
+ self.is_awesome
+ }
+ }
+
+ let inner = &[String::from("New"), String::from("World")];
+ let wrap = Wrapper {
+ inner: Some(inner),
+ is_awesome: true,
+ };
+
+ // Test 1: Field access
+ if let Some(slice) = wrap.inner {
+ if wrap.is_awesome {
+ println!("This is awesome! {}", slice[0]);
+ }
+ }
+
+ // Test 2: function access
+ if let Some(slice) = wrap.inner {
+ if wrap.is_super_awesome() {
+ println!("This is super awesome! {}", slice[0]);
+ }
+ }
+ println!("Complete wrap: {:?}", wrap);
+}
+
+/// This would be a nice additional feature to have in the future, but adding it
+/// now would make the PR too large. This is therefore only a test that we don't
+/// lint cases we can't make a reasonable suggestion for
+fn mutable_slice_index() {
+ // Mut access
+ let mut slice: Option<[String; 1]> = Some([String::from("Penguin")]);
+ if let Some(ref mut slice) = slice {
+ slice[0] = String::from("Mr. Penguin");
+ }
+ println!("Use after modification: {:?}", slice);
+
+ // Mut access on reference
+ let mut slice: Option<[String; 1]> = Some([String::from("Cat")]);
+ if let Some(slice) = &mut slice {
+ slice[0] = String::from("Lord Meow Meow");
+ }
+ println!("Use after modification: {:?}", slice);
+}
+
+/// The lint will ignore bindings with sub patterns as it would be hard
+/// to build correct suggestions for these instances :)
+fn binding_with_sub_pattern() {
+ let slice: Option<&[u32]> = Some(&[1, 2, 3]);
+ if let Some(slice @ [_, _, _]) = slice {
+ println!("{:?}", slice[2]);
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/index_refutable_slice/if_let_slice_binding.stderr b/src/tools/clippy/tests/ui/index_refutable_slice/if_let_slice_binding.stderr
new file mode 100644
index 000000000..a607df9b8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/index_refutable_slice/if_let_slice_binding.stderr
@@ -0,0 +1,158 @@
+error: this binding can be a slice pattern to avoid indexing
+ --> $DIR/if_let_slice_binding.rs:13:17
+ |
+LL | if let Some(slice) = slice {
+ | ^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/if_let_slice_binding.rs:1:9
+ |
+LL | #![deny(clippy::index_refutable_slice)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+help: try using a slice pattern here
+ |
+LL | if let Some([slice_0, ..]) = slice {
+ | ~~~~~~~~~~~~~
+help: and replace the index expressions here
+ |
+LL | println!("{}", slice_0);
+ | ~~~~~~~
+
+error: this binding can be a slice pattern to avoid indexing
+ --> $DIR/if_let_slice_binding.rs:19:17
+ |
+LL | if let Some(slice) = slice {
+ | ^^^^^
+ |
+help: try using a slice pattern here
+ |
+LL | if let Some([slice_0, ..]) = slice {
+ | ~~~~~~~~~~~~~
+help: and replace the index expressions here
+ |
+LL | println!("{}", slice_0);
+ | ~~~~~~~
+
+error: this binding can be a slice pattern to avoid indexing
+ --> $DIR/if_let_slice_binding.rs:25:17
+ |
+LL | if let Some(slice) = slice {
+ | ^^^^^
+ |
+help: try using a slice pattern here
+ |
+LL | if let Some([slice_0, _, slice_2, ..]) = slice {
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~
+help: and replace the index expressions here
+ |
+LL ~ println!("{}", slice_2);
+LL ~ println!("{}", slice_0);
+ |
+
+error: this binding can be a slice pattern to avoid indexing
+ --> $DIR/if_let_slice_binding.rs:32:26
+ |
+LL | if let SomeEnum::One(slice) | SomeEnum::Three(slice) = slice_wrapped {
+ | ^^^^^
+ |
+help: try using a slice pattern here
+ |
+LL | if let SomeEnum::One([slice_0, ..]) | SomeEnum::Three([slice_0, ..]) = slice_wrapped {
+ | ~~~~~~~~~~~~~ ~~~~~~~~~~~~~
+help: and replace the index expressions here
+ |
+LL | println!("{}", slice_0);
+ | ~~~~~~~
+
+error: this binding can be a slice pattern to avoid indexing
+ --> $DIR/if_let_slice_binding.rs:39:29
+ |
+LL | if let (SomeEnum::Three(a), Some(b)) = (a_wrapped, b_wrapped) {
+ | ^
+ |
+help: try using a slice pattern here
+ |
+LL | if let (SomeEnum::Three([_, _, a_2, ..]), Some(b)) = (a_wrapped, b_wrapped) {
+ | ~~~~~~~~~~~~~~~
+help: and replace the index expressions here
+ |
+LL | println!("{} -> {}", a_2, b[1]);
+ | ~~~
+
+error: this binding can be a slice pattern to avoid indexing
+ --> $DIR/if_let_slice_binding.rs:39:38
+ |
+LL | if let (SomeEnum::Three(a), Some(b)) = (a_wrapped, b_wrapped) {
+ | ^
+ |
+help: try using a slice pattern here
+ |
+LL | if let (SomeEnum::Three(a), Some([_, b_1, ..])) = (a_wrapped, b_wrapped) {
+ | ~~~~~~~~~~~~
+help: and replace the index expressions here
+ |
+LL | println!("{} -> {}", a[2], b_1);
+ | ~~~
+
+error: this binding can be a slice pattern to avoid indexing
+ --> $DIR/if_let_slice_binding.rs:46:21
+ |
+LL | if let Some(ref slice) = slice {
+ | ^^^^^
+ |
+help: try using a slice pattern here
+ |
+LL | if let Some([_, ref slice_1, ..]) = slice {
+ | ~~~~~~~~~~~~~~~~~~~~
+help: and replace the index expressions here
+ |
+LL | println!("{:?}", slice_1);
+ | ~~~~~~~
+
+error: this binding can be a slice pattern to avoid indexing
+ --> $DIR/if_let_slice_binding.rs:54:17
+ |
+LL | if let Some(slice) = &slice {
+ | ^^^^^
+ |
+help: try using a slice pattern here
+ |
+LL | if let Some([slice_0, ..]) = &slice {
+ | ~~~~~~~~~~~~~
+help: and replace the index expressions here
+ |
+LL | println!("{:?}", slice_0);
+ | ~~~~~~~
+
+error: this binding can be a slice pattern to avoid indexing
+ --> $DIR/if_let_slice_binding.rs:123:17
+ |
+LL | if let Some(slice) = wrap.inner {
+ | ^^^^^
+ |
+help: try using a slice pattern here
+ |
+LL | if let Some([slice_0, ..]) = wrap.inner {
+ | ~~~~~~~~~~~~~
+help: and replace the index expressions here
+ |
+LL | println!("This is awesome! {}", slice_0);
+ | ~~~~~~~
+
+error: this binding can be a slice pattern to avoid indexing
+ --> $DIR/if_let_slice_binding.rs:130:17
+ |
+LL | if let Some(slice) = wrap.inner {
+ | ^^^^^
+ |
+help: try using a slice pattern here
+ |
+LL | if let Some([slice_0, ..]) = wrap.inner {
+ | ~~~~~~~~~~~~~
+help: and replace the index expressions here
+ |
+LL | println!("This is super awesome! {}", slice_0);
+ | ~~~~~~~
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.rs b/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.rs
new file mode 100644
index 000000000..406e82083
--- /dev/null
+++ b/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.rs
@@ -0,0 +1,28 @@
+#![deny(clippy::index_refutable_slice)]
+
+extern crate if_chain;
+use if_chain::if_chain;
+
+macro_rules! if_let_slice_macro {
+ () => {
+ // This would normally be linted
+ let slice: Option<&[u32]> = Some(&[1, 2, 3]);
+ if let Some(slice) = slice {
+ println!("{}", slice[0]);
+ }
+ };
+}
+
+fn main() {
+ // Don't lint this
+ if_let_slice_macro!();
+
+ // Do lint this
+ if_chain! {
+ let slice: Option<&[u32]> = Some(&[1, 2, 3]);
+ if let Some(slice) = slice;
+ then {
+ println!("{}", slice[0]);
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.stderr b/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.stderr
new file mode 100644
index 000000000..11b19428b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/index_refutable_slice/slice_indexing_in_macro.stderr
@@ -0,0 +1,22 @@
+error: this binding can be a slice pattern to avoid indexing
+ --> $DIR/slice_indexing_in_macro.rs:23:21
+ |
+LL | if let Some(slice) = slice;
+ | ^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/slice_indexing_in_macro.rs:1:9
+ |
+LL | #![deny(clippy::index_refutable_slice)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+help: try using a slice pattern here
+ |
+LL | if let Some([slice_0, ..]) = slice;
+ | ~~~~~~~~~~~~~
+help: and replace the index expressions here
+ |
+LL | println!("{}", slice_0);
+ | ~~~~~~~
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/indexing_slicing_index.rs b/src/tools/clippy/tests/ui/indexing_slicing_index.rs
new file mode 100644
index 000000000..45a430edc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/indexing_slicing_index.rs
@@ -0,0 +1,48 @@
+#![feature(inline_const)]
+#![warn(clippy::indexing_slicing)]
+// We also check the out_of_bounds_indexing lint here, because it lints similar things and
+// we want to avoid false positives.
+#![warn(clippy::out_of_bounds_indexing)]
+#![allow(const_err, clippy::no_effect, clippy::unnecessary_operation)]
+
+const ARR: [i32; 2] = [1, 2];
+const REF: &i32 = &ARR[idx()]; // Ok, should not produce stderr.
+const REF_ERR: &i32 = &ARR[idx4()]; // Ok, let rustc handle const contexts.
+
+const fn idx() -> usize {
+ 1
+}
+const fn idx4() -> usize {
+ 4
+}
+
+fn main() {
+ let x = [1, 2, 3, 4];
+ let index: usize = 1;
+ x[index];
+ x[4]; // Ok, let rustc's `unconditional_panic` lint handle `usize` indexing on arrays.
+ x[1 << 3]; // Ok, let rustc's `unconditional_panic` lint handle `usize` indexing on arrays.
+
+ x[0]; // Ok, should not produce stderr.
+ x[3]; // Ok, should not produce stderr.
+ x[const { idx() }]; // Ok, should not produce stderr.
+ x[const { idx4() }]; // Ok, let rustc's `unconditional_panic` lint handle `usize` indexing on arrays.
+ const { &ARR[idx()] }; // Ok, should not produce stderr.
+ const { &ARR[idx4()] }; // Ok, let rustc handle const contexts.
+
+ let y = &x;
+ y[0]; // Ok, referencing shouldn't affect this lint. See the issue 6021
+ y[4]; // Ok, rustc will handle references too.
+
+ let v = vec![0; 5];
+ v[0];
+ v[10];
+ v[1 << 3];
+
+ const N: usize = 15; // Out of bounds
+ const M: usize = 3; // In bounds
+ x[N]; // Ok, let rustc's `unconditional_panic` lint handle `usize` indexing on arrays.
+ x[M]; // Ok, should not produce stderr.
+ v[N];
+ v[M];
+}
diff --git a/src/tools/clippy/tests/ui/indexing_slicing_index.stderr b/src/tools/clippy/tests/ui/indexing_slicing_index.stderr
new file mode 100644
index 000000000..6ae700753
--- /dev/null
+++ b/src/tools/clippy/tests/ui/indexing_slicing_index.stderr
@@ -0,0 +1,64 @@
+error[E0080]: evaluation of `main::{constant#3}` failed
+ --> $DIR/indexing_slicing_index.rs:31:14
+ |
+LL | const { &ARR[idx4()] }; // Ok, let rustc handle const contexts.
+ | ^^^^^^^^^^^ index out of bounds: the length is 2 but the index is 4
+
+error[E0080]: erroneous constant used
+ --> $DIR/indexing_slicing_index.rs:31:5
+ |
+LL | const { &ARR[idx4()] }; // Ok, let rustc handle const contexts.
+ | ^^^^^^^^^^^^^^^^^^^^^^ referenced constant has errors
+
+error: indexing may panic
+ --> $DIR/indexing_slicing_index.rs:22:5
+ |
+LL | x[index];
+ | ^^^^^^^^
+ |
+ = note: `-D clippy::indexing-slicing` implied by `-D warnings`
+ = help: consider using `.get(n)` or `.get_mut(n)` instead
+
+error: indexing may panic
+ --> $DIR/indexing_slicing_index.rs:38:5
+ |
+LL | v[0];
+ | ^^^^
+ |
+ = help: consider using `.get(n)` or `.get_mut(n)` instead
+
+error: indexing may panic
+ --> $DIR/indexing_slicing_index.rs:39:5
+ |
+LL | v[10];
+ | ^^^^^
+ |
+ = help: consider using `.get(n)` or `.get_mut(n)` instead
+
+error: indexing may panic
+ --> $DIR/indexing_slicing_index.rs:40:5
+ |
+LL | v[1 << 3];
+ | ^^^^^^^^^
+ |
+ = help: consider using `.get(n)` or `.get_mut(n)` instead
+
+error: indexing may panic
+ --> $DIR/indexing_slicing_index.rs:46:5
+ |
+LL | v[N];
+ | ^^^^
+ |
+ = help: consider using `.get(n)` or `.get_mut(n)` instead
+
+error: indexing may panic
+ --> $DIR/indexing_slicing_index.rs:47:5
+ |
+LL | v[M];
+ | ^^^^
+ |
+ = help: consider using `.get(n)` or `.get_mut(n)` instead
+
+error: aborting due to 8 previous errors
+
+For more information about this error, try `rustc --explain E0080`.
diff --git a/src/tools/clippy/tests/ui/indexing_slicing_slice.rs b/src/tools/clippy/tests/ui/indexing_slicing_slice.rs
new file mode 100644
index 000000000..7b107db39
--- /dev/null
+++ b/src/tools/clippy/tests/ui/indexing_slicing_slice.rs
@@ -0,0 +1,37 @@
+#![warn(clippy::indexing_slicing)]
+// We also check the out_of_bounds_indexing lint here, because it lints similar things and
+// we want to avoid false positives.
+#![warn(clippy::out_of_bounds_indexing)]
+#![allow(clippy::no_effect, clippy::unnecessary_operation)]
+
+fn main() {
+ let x = [1, 2, 3, 4];
+ let index: usize = 1;
+ let index_from: usize = 2;
+ let index_to: usize = 3;
+ &x[index..];
+ &x[..index];
+ &x[index_from..index_to];
+ &x[index_from..][..index_to]; // Two lint reports, one for [index_from..] and another for [..index_to].
+ &x[5..][..10]; // Two lint reports, one for out of bounds [5..] and another for slicing [..10].
+ &x[0..][..3];
+ &x[1..][..5];
+
+ &x[0..].get(..3); // Ok, should not produce stderr.
+ &x[0..3]; // Ok, should not produce stderr.
+
+ let y = &x;
+ &y[1..2];
+ &y[0..=4];
+ &y[..=4];
+
+ &y[..]; // Ok, should not produce stderr.
+
+ let v = vec![0; 5];
+ &v[10..100];
+ &x[10..][..100]; // Two lint reports, one for [10..] and another for [..100].
+ &v[10..];
+ &v[..100];
+
+ &v[..]; // Ok, should not produce stderr.
+}
diff --git a/src/tools/clippy/tests/ui/indexing_slicing_slice.stderr b/src/tools/clippy/tests/ui/indexing_slicing_slice.stderr
new file mode 100644
index 000000000..f70722b92
--- /dev/null
+++ b/src/tools/clippy/tests/ui/indexing_slicing_slice.stderr
@@ -0,0 +1,125 @@
+error: slicing may panic
+ --> $DIR/indexing_slicing_slice.rs:12:6
+ |
+LL | &x[index..];
+ | ^^^^^^^^^^
+ |
+ = note: `-D clippy::indexing-slicing` implied by `-D warnings`
+ = help: consider using `.get(n..)` or .get_mut(n..)` instead
+
+error: slicing may panic
+ --> $DIR/indexing_slicing_slice.rs:13:6
+ |
+LL | &x[..index];
+ | ^^^^^^^^^^
+ |
+ = help: consider using `.get(..n)`or `.get_mut(..n)` instead
+
+error: slicing may panic
+ --> $DIR/indexing_slicing_slice.rs:14:6
+ |
+LL | &x[index_from..index_to];
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using `.get(n..m)` or `.get_mut(n..m)` instead
+
+error: slicing may panic
+ --> $DIR/indexing_slicing_slice.rs:15:6
+ |
+LL | &x[index_from..][..index_to]; // Two lint reports, one for [index_from..] and another for [..index_to].
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using `.get(..n)`or `.get_mut(..n)` instead
+
+error: slicing may panic
+ --> $DIR/indexing_slicing_slice.rs:15:6
+ |
+LL | &x[index_from..][..index_to]; // Two lint reports, one for [index_from..] and another for [..index_to].
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: consider using `.get(n..)` or .get_mut(n..)` instead
+
+error: slicing may panic
+ --> $DIR/indexing_slicing_slice.rs:16:6
+ |
+LL | &x[5..][..10]; // Two lint reports, one for out of bounds [5..] and another for slicing [..10].
+ | ^^^^^^^^^^^^
+ |
+ = help: consider using `.get(..n)`or `.get_mut(..n)` instead
+
+error: range is out of bounds
+ --> $DIR/indexing_slicing_slice.rs:16:8
+ |
+LL | &x[5..][..10]; // Two lint reports, one for out of bounds [5..] and another for slicing [..10].
+ | ^
+ |
+ = note: `-D clippy::out-of-bounds-indexing` implied by `-D warnings`
+
+error: slicing may panic
+ --> $DIR/indexing_slicing_slice.rs:17:6
+ |
+LL | &x[0..][..3];
+ | ^^^^^^^^^^^
+ |
+ = help: consider using `.get(..n)`or `.get_mut(..n)` instead
+
+error: slicing may panic
+ --> $DIR/indexing_slicing_slice.rs:18:6
+ |
+LL | &x[1..][..5];
+ | ^^^^^^^^^^^
+ |
+ = help: consider using `.get(..n)`or `.get_mut(..n)` instead
+
+error: range is out of bounds
+ --> $DIR/indexing_slicing_slice.rs:25:12
+ |
+LL | &y[0..=4];
+ | ^
+
+error: range is out of bounds
+ --> $DIR/indexing_slicing_slice.rs:26:11
+ |
+LL | &y[..=4];
+ | ^
+
+error: slicing may panic
+ --> $DIR/indexing_slicing_slice.rs:31:6
+ |
+LL | &v[10..100];
+ | ^^^^^^^^^^
+ |
+ = help: consider using `.get(n..m)` or `.get_mut(n..m)` instead
+
+error: slicing may panic
+ --> $DIR/indexing_slicing_slice.rs:32:6
+ |
+LL | &x[10..][..100]; // Two lint reports, one for [10..] and another for [..100].
+ | ^^^^^^^^^^^^^^
+ |
+ = help: consider using `.get(..n)`or `.get_mut(..n)` instead
+
+error: range is out of bounds
+ --> $DIR/indexing_slicing_slice.rs:32:8
+ |
+LL | &x[10..][..100]; // Two lint reports, one for [10..] and another for [..100].
+ | ^^
+
+error: slicing may panic
+ --> $DIR/indexing_slicing_slice.rs:33:6
+ |
+LL | &v[10..];
+ | ^^^^^^^
+ |
+ = help: consider using `.get(n..)` or .get_mut(n..)` instead
+
+error: slicing may panic
+ --> $DIR/indexing_slicing_slice.rs:34:6
+ |
+LL | &v[..100];
+ | ^^^^^^^^
+ |
+ = help: consider using `.get(..n)`or `.get_mut(..n)` instead
+
+error: aborting due to 16 previous errors
+
diff --git a/src/tools/clippy/tests/ui/inefficient_to_string.fixed b/src/tools/clippy/tests/ui/inefficient_to_string.fixed
new file mode 100644
index 000000000..c972b9419
--- /dev/null
+++ b/src/tools/clippy/tests/ui/inefficient_to_string.fixed
@@ -0,0 +1,31 @@
+// run-rustfix
+#![deny(clippy::inefficient_to_string)]
+
+use std::borrow::Cow;
+
+fn main() {
+ let rstr: &str = "hello";
+ let rrstr: &&str = &rstr;
+ let rrrstr: &&&str = &rrstr;
+ let _: String = rstr.to_string();
+ let _: String = (*rrstr).to_string();
+ let _: String = (**rrrstr).to_string();
+
+ let string: String = String::from("hello");
+ let rstring: &String = &string;
+ let rrstring: &&String = &rstring;
+ let rrrstring: &&&String = &rrstring;
+ let _: String = string.to_string();
+ let _: String = rstring.to_string();
+ let _: String = (*rrstring).to_string();
+ let _: String = (**rrrstring).to_string();
+
+ let cow: Cow<'_, str> = Cow::Borrowed("hello");
+ let rcow: &Cow<'_, str> = &cow;
+ let rrcow: &&Cow<'_, str> = &rcow;
+ let rrrcow: &&&Cow<'_, str> = &rrcow;
+ let _: String = cow.to_string();
+ let _: String = rcow.to_string();
+ let _: String = (*rrcow).to_string();
+ let _: String = (**rrrcow).to_string();
+}
diff --git a/src/tools/clippy/tests/ui/inefficient_to_string.rs b/src/tools/clippy/tests/ui/inefficient_to_string.rs
new file mode 100644
index 000000000..acdc55aa0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/inefficient_to_string.rs
@@ -0,0 +1,31 @@
+// run-rustfix
+#![deny(clippy::inefficient_to_string)]
+
+use std::borrow::Cow;
+
+fn main() {
+ let rstr: &str = "hello";
+ let rrstr: &&str = &rstr;
+ let rrrstr: &&&str = &rrstr;
+ let _: String = rstr.to_string();
+ let _: String = rrstr.to_string();
+ let _: String = rrrstr.to_string();
+
+ let string: String = String::from("hello");
+ let rstring: &String = &string;
+ let rrstring: &&String = &rstring;
+ let rrrstring: &&&String = &rrstring;
+ let _: String = string.to_string();
+ let _: String = rstring.to_string();
+ let _: String = rrstring.to_string();
+ let _: String = rrrstring.to_string();
+
+ let cow: Cow<'_, str> = Cow::Borrowed("hello");
+ let rcow: &Cow<'_, str> = &cow;
+ let rrcow: &&Cow<'_, str> = &rcow;
+ let rrrcow: &&&Cow<'_, str> = &rrcow;
+ let _: String = cow.to_string();
+ let _: String = rcow.to_string();
+ let _: String = rrcow.to_string();
+ let _: String = rrrcow.to_string();
+}
diff --git a/src/tools/clippy/tests/ui/inefficient_to_string.stderr b/src/tools/clippy/tests/ui/inefficient_to_string.stderr
new file mode 100644
index 000000000..4be46161e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/inefficient_to_string.stderr
@@ -0,0 +1,55 @@
+error: calling `to_string` on `&&str`
+ --> $DIR/inefficient_to_string.rs:11:21
+ |
+LL | let _: String = rrstr.to_string();
+ | ^^^^^^^^^^^^^^^^^ help: try dereferencing the receiver: `(*rrstr).to_string()`
+ |
+note: the lint level is defined here
+ --> $DIR/inefficient_to_string.rs:2:9
+ |
+LL | #![deny(clippy::inefficient_to_string)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = help: `&str` implements `ToString` through a slower blanket impl, but `str` has a fast specialization of `ToString`
+
+error: calling `to_string` on `&&&str`
+ --> $DIR/inefficient_to_string.rs:12:21
+ |
+LL | let _: String = rrrstr.to_string();
+ | ^^^^^^^^^^^^^^^^^^ help: try dereferencing the receiver: `(**rrrstr).to_string()`
+ |
+ = help: `&&str` implements `ToString` through a slower blanket impl, but `str` has a fast specialization of `ToString`
+
+error: calling `to_string` on `&&std::string::String`
+ --> $DIR/inefficient_to_string.rs:20:21
+ |
+LL | let _: String = rrstring.to_string();
+ | ^^^^^^^^^^^^^^^^^^^^ help: try dereferencing the receiver: `(*rrstring).to_string()`
+ |
+ = help: `&std::string::String` implements `ToString` through a slower blanket impl, but `std::string::String` has a fast specialization of `ToString`
+
+error: calling `to_string` on `&&&std::string::String`
+ --> $DIR/inefficient_to_string.rs:21:21
+ |
+LL | let _: String = rrrstring.to_string();
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try dereferencing the receiver: `(**rrrstring).to_string()`
+ |
+ = help: `&&std::string::String` implements `ToString` through a slower blanket impl, but `std::string::String` has a fast specialization of `ToString`
+
+error: calling `to_string` on `&&std::borrow::Cow<str>`
+ --> $DIR/inefficient_to_string.rs:29:21
+ |
+LL | let _: String = rrcow.to_string();
+ | ^^^^^^^^^^^^^^^^^ help: try dereferencing the receiver: `(*rrcow).to_string()`
+ |
+ = help: `&std::borrow::Cow<str>` implements `ToString` through a slower blanket impl, but `std::borrow::Cow<str>` has a fast specialization of `ToString`
+
+error: calling `to_string` on `&&&std::borrow::Cow<str>`
+ --> $DIR/inefficient_to_string.rs:30:21
+ |
+LL | let _: String = rrrcow.to_string();
+ | ^^^^^^^^^^^^^^^^^^ help: try dereferencing the receiver: `(**rrrcow).to_string()`
+ |
+ = help: `&&std::borrow::Cow<str>` implements `ToString` through a slower blanket impl, but `std::borrow::Cow<str>` has a fast specialization of `ToString`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/infallible_destructuring_match.fixed b/src/tools/clippy/tests/ui/infallible_destructuring_match.fixed
new file mode 100644
index 000000000..b8e40d995
--- /dev/null
+++ b/src/tools/clippy/tests/ui/infallible_destructuring_match.fixed
@@ -0,0 +1,112 @@
+// run-rustfix
+#![feature(exhaustive_patterns, never_type)]
+#![allow(dead_code, unreachable_code, unused_variables)]
+#![allow(clippy::let_and_return)]
+
+enum SingleVariantEnum {
+ Variant(i32),
+}
+
+struct TupleStruct(i32);
+
+enum EmptyEnum {}
+
+macro_rules! match_enum {
+ ($param:expr) => {
+ let data = match $param {
+ SingleVariantEnum::Variant(i) => i,
+ };
+ };
+}
+
+fn infallible_destructuring_match_enum() {
+ let wrapper = SingleVariantEnum::Variant(0);
+
+ // This should lint!
+ let SingleVariantEnum::Variant(data) = wrapper;
+
+ // This shouldn't (inside macro)
+ match_enum!(wrapper);
+
+ // This shouldn't!
+ let data = match wrapper {
+ SingleVariantEnum::Variant(_) => -1,
+ };
+
+ // Neither should this!
+ let data = match wrapper {
+ SingleVariantEnum::Variant(i) => -1,
+ };
+
+ let SingleVariantEnum::Variant(data) = wrapper;
+}
+
+macro_rules! match_struct {
+ ($param:expr) => {
+ let data = match $param {
+ TupleStruct(i) => i,
+ };
+ };
+}
+
+fn infallible_destructuring_match_struct() {
+ let wrapper = TupleStruct(0);
+
+ // This should lint!
+ let TupleStruct(data) = wrapper;
+
+ // This shouldn't (inside macro)
+ match_struct!(wrapper);
+
+ // This shouldn't!
+ let data = match wrapper {
+ TupleStruct(_) => -1,
+ };
+
+ // Neither should this!
+ let data = match wrapper {
+ TupleStruct(i) => -1,
+ };
+
+ let TupleStruct(data) = wrapper;
+}
+
+macro_rules! match_never_enum {
+ ($param:expr) => {
+ let data = match $param {
+ Ok(i) => i,
+ };
+ };
+}
+
+fn never_enum() {
+ let wrapper: Result<i32, !> = Ok(23);
+
+ // This should lint!
+ let Ok(data) = wrapper;
+
+ // This shouldn't (inside macro)
+ match_never_enum!(wrapper);
+
+ // This shouldn't!
+ let data = match wrapper {
+ Ok(_) => -1,
+ };
+
+ // Neither should this!
+ let data = match wrapper {
+ Ok(i) => -1,
+ };
+
+ let Ok(data) = wrapper;
+}
+
+impl EmptyEnum {
+ fn match_on(&self) -> ! {
+ // The lint shouldn't pick this up, as `let` won't work here!
+ let data = match *self {};
+ data
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/infallible_destructuring_match.rs b/src/tools/clippy/tests/ui/infallible_destructuring_match.rs
new file mode 100644
index 000000000..106cd438b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/infallible_destructuring_match.rs
@@ -0,0 +1,118 @@
+// run-rustfix
+#![feature(exhaustive_patterns, never_type)]
+#![allow(dead_code, unreachable_code, unused_variables)]
+#![allow(clippy::let_and_return)]
+
+enum SingleVariantEnum {
+ Variant(i32),
+}
+
+struct TupleStruct(i32);
+
+enum EmptyEnum {}
+
+macro_rules! match_enum {
+ ($param:expr) => {
+ let data = match $param {
+ SingleVariantEnum::Variant(i) => i,
+ };
+ };
+}
+
+fn infallible_destructuring_match_enum() {
+ let wrapper = SingleVariantEnum::Variant(0);
+
+ // This should lint!
+ let data = match wrapper {
+ SingleVariantEnum::Variant(i) => i,
+ };
+
+ // This shouldn't (inside macro)
+ match_enum!(wrapper);
+
+ // This shouldn't!
+ let data = match wrapper {
+ SingleVariantEnum::Variant(_) => -1,
+ };
+
+ // Neither should this!
+ let data = match wrapper {
+ SingleVariantEnum::Variant(i) => -1,
+ };
+
+ let SingleVariantEnum::Variant(data) = wrapper;
+}
+
+macro_rules! match_struct {
+ ($param:expr) => {
+ let data = match $param {
+ TupleStruct(i) => i,
+ };
+ };
+}
+
+fn infallible_destructuring_match_struct() {
+ let wrapper = TupleStruct(0);
+
+ // This should lint!
+ let data = match wrapper {
+ TupleStruct(i) => i,
+ };
+
+ // This shouldn't (inside macro)
+ match_struct!(wrapper);
+
+ // This shouldn't!
+ let data = match wrapper {
+ TupleStruct(_) => -1,
+ };
+
+ // Neither should this!
+ let data = match wrapper {
+ TupleStruct(i) => -1,
+ };
+
+ let TupleStruct(data) = wrapper;
+}
+
+macro_rules! match_never_enum {
+ ($param:expr) => {
+ let data = match $param {
+ Ok(i) => i,
+ };
+ };
+}
+
+fn never_enum() {
+ let wrapper: Result<i32, !> = Ok(23);
+
+ // This should lint!
+ let data = match wrapper {
+ Ok(i) => i,
+ };
+
+ // This shouldn't (inside macro)
+ match_never_enum!(wrapper);
+
+ // This shouldn't!
+ let data = match wrapper {
+ Ok(_) => -1,
+ };
+
+ // Neither should this!
+ let data = match wrapper {
+ Ok(i) => -1,
+ };
+
+ let Ok(data) = wrapper;
+}
+
+impl EmptyEnum {
+ fn match_on(&self) -> ! {
+ // The lint shouldn't pick this up, as `let` won't work here!
+ let data = match *self {};
+ data
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/infallible_destructuring_match.stderr b/src/tools/clippy/tests/ui/infallible_destructuring_match.stderr
new file mode 100644
index 000000000..1b78db420
--- /dev/null
+++ b/src/tools/clippy/tests/ui/infallible_destructuring_match.stderr
@@ -0,0 +1,28 @@
+error: you seem to be trying to use `match` to destructure a single infallible pattern. Consider using `let`
+ --> $DIR/infallible_destructuring_match.rs:26:5
+ |
+LL | / let data = match wrapper {
+LL | | SingleVariantEnum::Variant(i) => i,
+LL | | };
+ | |______^ help: try this: `let SingleVariantEnum::Variant(data) = wrapper;`
+ |
+ = note: `-D clippy::infallible-destructuring-match` implied by `-D warnings`
+
+error: you seem to be trying to use `match` to destructure a single infallible pattern. Consider using `let`
+ --> $DIR/infallible_destructuring_match.rs:58:5
+ |
+LL | / let data = match wrapper {
+LL | | TupleStruct(i) => i,
+LL | | };
+ | |______^ help: try this: `let TupleStruct(data) = wrapper;`
+
+error: you seem to be trying to use `match` to destructure a single infallible pattern. Consider using `let`
+ --> $DIR/infallible_destructuring_match.rs:90:5
+ |
+LL | / let data = match wrapper {
+LL | | Ok(i) => i,
+LL | | };
+ | |______^ help: try this: `let Ok(data) = wrapper;`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/infinite_iter.rs b/src/tools/clippy/tests/ui/infinite_iter.rs
new file mode 100644
index 000000000..a1e5fad0c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/infinite_iter.rs
@@ -0,0 +1,68 @@
+use std::iter::repeat;
+fn square_is_lower_64(x: &u32) -> bool {
+ x * x < 64
+}
+
+#[allow(clippy::maybe_infinite_iter)]
+#[deny(clippy::infinite_iter)]
+fn infinite_iters() {
+ repeat(0_u8).collect::<Vec<_>>(); // infinite iter
+ (0..8_u32).take_while(square_is_lower_64).cycle().count(); // infinite iter
+ (0..8_u64).chain(0..).max(); // infinite iter
+ (0_usize..)
+ .chain([0usize, 1, 2].iter().cloned())
+ .skip_while(|x| *x != 42)
+ .min(); // infinite iter
+ (0..8_u32)
+ .rev()
+ .cycle()
+ .map(|x| x + 1_u32)
+ .for_each(|x| println!("{}", x)); // infinite iter
+ (0..3_u32).flat_map(|x| x..).sum::<u32>(); // infinite iter
+ (0_usize..).flat_map(|x| 0..x).product::<usize>(); // infinite iter
+ (0_u64..).filter(|x| x % 2 == 0).last(); // infinite iter
+ (0..42_u64).by_ref().last(); // not an infinite, because ranges are double-ended
+ (0..).next(); // iterator is not exhausted
+}
+
+#[deny(clippy::maybe_infinite_iter)]
+fn potential_infinite_iters() {
+ (0..).zip((0..).take_while(square_is_lower_64)).count(); // maybe infinite iter
+ repeat(42).take_while(|x| *x == 42).chain(0..42).max(); // maybe infinite iter
+ (1..)
+ .scan(0, |state, x| {
+ *state += x;
+ Some(*state)
+ })
+ .min(); // maybe infinite iter
+ (0..).find(|x| *x == 24); // maybe infinite iter
+ (0..).position(|x| x == 24); // maybe infinite iter
+ (0..).any(|x| x == 24); // maybe infinite iter
+ (0..).all(|x| x == 24); // maybe infinite iter
+
+ (0..).zip(0..42).take_while(|&(x, _)| x != 42).count(); // not infinite
+ repeat(42).take_while(|x| *x == 42).next(); // iterator is not exhausted
+}
+
+fn main() {
+ infinite_iters();
+ potential_infinite_iters();
+}
+
+mod finite_collect {
+ use std::collections::HashSet;
+
+ struct C;
+ impl FromIterator<i32> for C {
+ fn from_iter<I: IntoIterator<Item = i32>>(iter: I) -> Self {
+ C
+ }
+ }
+
+ fn check_collect() {
+ let _: HashSet<i32> = (0..).collect(); // Infinite iter
+
+ // Some data structures don't collect infinitely, such as `ArrayVec`
+ let _: C = (0..).collect();
+ }
+}
diff --git a/src/tools/clippy/tests/ui/infinite_iter.stderr b/src/tools/clippy/tests/ui/infinite_iter.stderr
new file mode 100644
index 000000000..ba277e363
--- /dev/null
+++ b/src/tools/clippy/tests/ui/infinite_iter.stderr
@@ -0,0 +1,109 @@
+error: infinite iteration detected
+ --> $DIR/infinite_iter.rs:9:5
+ |
+LL | repeat(0_u8).collect::<Vec<_>>(); // infinite iter
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/infinite_iter.rs:7:8
+ |
+LL | #[deny(clippy::infinite_iter)]
+ | ^^^^^^^^^^^^^^^^^^^^^
+
+error: infinite iteration detected
+ --> $DIR/infinite_iter.rs:10:5
+ |
+LL | (0..8_u32).take_while(square_is_lower_64).cycle().count(); // infinite iter
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: infinite iteration detected
+ --> $DIR/infinite_iter.rs:11:5
+ |
+LL | (0..8_u64).chain(0..).max(); // infinite iter
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: infinite iteration detected
+ --> $DIR/infinite_iter.rs:16:5
+ |
+LL | / (0..8_u32)
+LL | | .rev()
+LL | | .cycle()
+LL | | .map(|x| x + 1_u32)
+LL | | .for_each(|x| println!("{}", x)); // infinite iter
+ | |________________________________________^
+
+error: infinite iteration detected
+ --> $DIR/infinite_iter.rs:22:5
+ |
+LL | (0_usize..).flat_map(|x| 0..x).product::<usize>(); // infinite iter
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: infinite iteration detected
+ --> $DIR/infinite_iter.rs:23:5
+ |
+LL | (0_u64..).filter(|x| x % 2 == 0).last(); // infinite iter
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: possible infinite iteration detected
+ --> $DIR/infinite_iter.rs:30:5
+ |
+LL | (0..).zip((0..).take_while(square_is_lower_64)).count(); // maybe infinite iter
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/infinite_iter.rs:28:8
+ |
+LL | #[deny(clippy::maybe_infinite_iter)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: possible infinite iteration detected
+ --> $DIR/infinite_iter.rs:31:5
+ |
+LL | repeat(42).take_while(|x| *x == 42).chain(0..42).max(); // maybe infinite iter
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: possible infinite iteration detected
+ --> $DIR/infinite_iter.rs:32:5
+ |
+LL | / (1..)
+LL | | .scan(0, |state, x| {
+LL | | *state += x;
+LL | | Some(*state)
+LL | | })
+LL | | .min(); // maybe infinite iter
+ | |______________^
+
+error: possible infinite iteration detected
+ --> $DIR/infinite_iter.rs:38:5
+ |
+LL | (0..).find(|x| *x == 24); // maybe infinite iter
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: possible infinite iteration detected
+ --> $DIR/infinite_iter.rs:39:5
+ |
+LL | (0..).position(|x| x == 24); // maybe infinite iter
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: possible infinite iteration detected
+ --> $DIR/infinite_iter.rs:40:5
+ |
+LL | (0..).any(|x| x == 24); // maybe infinite iter
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: possible infinite iteration detected
+ --> $DIR/infinite_iter.rs:41:5
+ |
+LL | (0..).all(|x| x == 24); // maybe infinite iter
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: infinite iteration detected
+ --> $DIR/infinite_iter.rs:63:31
+ |
+LL | let _: HashSet<i32> = (0..).collect(); // Infinite iter
+ | ^^^^^^^^^^^^^^^
+ |
+ = note: `#[deny(clippy::infinite_iter)]` on by default
+
+error: aborting due to 14 previous errors
+
diff --git a/src/tools/clippy/tests/ui/infinite_loop.rs b/src/tools/clippy/tests/ui/infinite_loop.rs
new file mode 100644
index 000000000..38e64b9ac
--- /dev/null
+++ b/src/tools/clippy/tests/ui/infinite_loop.rs
@@ -0,0 +1,217 @@
+fn fn_val(i: i32) -> i32 {
+ unimplemented!()
+}
+fn fn_constref(i: &i32) -> i32 {
+ unimplemented!()
+}
+fn fn_mutref(i: &mut i32) {
+ unimplemented!()
+}
+fn fooi() -> i32 {
+ unimplemented!()
+}
+fn foob() -> bool {
+ unimplemented!()
+}
+
+fn immutable_condition() {
+ // Should warn when all vars mentioned are immutable
+ let y = 0;
+ while y < 10 {
+ println!("KO - y is immutable");
+ }
+
+ let x = 0;
+ while y < 10 && x < 3 {
+ let mut k = 1;
+ k += 2;
+ println!("KO - x and y immutable");
+ }
+
+ let cond = false;
+ while !cond {
+ println!("KO - cond immutable");
+ }
+
+ let mut i = 0;
+ while y < 10 && i < 3 {
+ i += 1;
+ println!("OK - i is mutable");
+ }
+
+ let mut mut_cond = false;
+ while !mut_cond || cond {
+ mut_cond = true;
+ println!("OK - mut_cond is mutable");
+ }
+
+ while fooi() < x {
+ println!("OK - Fn call results may vary");
+ }
+
+ while foob() {
+ println!("OK - Fn call results may vary");
+ }
+
+ let mut a = 0;
+ let mut c = move || {
+ while a < 5 {
+ a += 1;
+ println!("OK - a is mutable");
+ }
+ };
+ c();
+
+ let mut tup = (0, 0);
+ while tup.0 < 5 {
+ tup.0 += 1;
+ println!("OK - tup.0 gets mutated")
+ }
+}
+
+fn unused_var() {
+ // Should warn when a (mutable) var is not used in while body
+ let (mut i, mut j) = (0, 0);
+
+ while i < 3 {
+ j = 3;
+ println!("KO - i not mentioned");
+ }
+
+ while i < 3 && j > 0 {
+ println!("KO - i and j not mentioned");
+ }
+
+ while i < 3 {
+ let mut i = 5;
+ fn_mutref(&mut i);
+ println!("KO - shadowed");
+ }
+
+ while i < 3 && j > 0 {
+ i = 5;
+ println!("OK - i in cond and mentioned");
+ }
+}
+
+fn used_immutable() {
+ let mut i = 0;
+
+ while i < 3 {
+ fn_constref(&i);
+ println!("KO - const reference");
+ }
+
+ while i < 3 {
+ fn_val(i);
+ println!("KO - passed by value");
+ }
+
+ while i < 3 {
+ println!("OK - passed by mutable reference");
+ fn_mutref(&mut i)
+ }
+
+ while i < 3 {
+ fn_mutref(&mut i);
+ println!("OK - passed by mutable reference");
+ }
+}
+
+const N: i32 = 5;
+const B: bool = false;
+
+fn consts() {
+ while false {
+ println!("Constants are not linted");
+ }
+
+ while B {
+ println!("Constants are not linted");
+ }
+
+ while N > 0 {
+ println!("Constants are not linted");
+ }
+}
+
+use std::cell::Cell;
+
+fn maybe_i_mutate(i: &Cell<bool>) {
+ unimplemented!()
+}
+
+fn internally_mutable() {
+ let b = Cell::new(true);
+
+ while b.get() {
+ // b cannot be silently coerced to `bool`
+ maybe_i_mutate(&b);
+ println!("OK - Method call within condition");
+ }
+}
+
+struct Counter {
+ count: usize,
+}
+
+impl Counter {
+ fn inc(&mut self) {
+ self.count += 1;
+ }
+
+ fn inc_n(&mut self, n: usize) {
+ while self.count < n {
+ self.inc();
+ }
+ println!("OK - self borrowed mutably");
+ }
+
+ fn print_n(&self, n: usize) {
+ while self.count < n {
+ println!("KO - {} is not mutated", self.count);
+ }
+ }
+}
+
+fn while_loop_with_break_and_return() {
+ let y = 0;
+ while y < 10 {
+ if y == 0 {
+ break;
+ }
+ println!("KO - loop contains break");
+ }
+
+ while y < 10 {
+ if y == 0 {
+ return;
+ }
+ println!("KO - loop contains return");
+ }
+}
+
+fn immutable_condition_false_positive(mut n: u64) -> u32 {
+ let mut count = 0;
+ while {
+ n >>= 1;
+ n != 0
+ } {
+ count += 1;
+ }
+ count
+}
+
+fn main() {
+ immutable_condition();
+ unused_var();
+ used_immutable();
+ internally_mutable();
+ immutable_condition_false_positive(5);
+
+ let mut c = Counter { count: 0 };
+ c.inc_n(5);
+ c.print_n(2);
+
+ while_loop_with_break_and_return();
+}
diff --git a/src/tools/clippy/tests/ui/infinite_loop.stderr b/src/tools/clippy/tests/ui/infinite_loop.stderr
new file mode 100644
index 000000000..4ec7d900a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/infinite_loop.stderr
@@ -0,0 +1,95 @@
+error: variables in the condition are not mutated in the loop body
+ --> $DIR/infinite_loop.rs:20:11
+ |
+LL | while y < 10 {
+ | ^^^^^^
+ |
+ = note: `#[deny(clippy::while_immutable_condition)]` on by default
+ = note: this may lead to an infinite or to a never running loop
+
+error: variables in the condition are not mutated in the loop body
+ --> $DIR/infinite_loop.rs:25:11
+ |
+LL | while y < 10 && x < 3 {
+ | ^^^^^^^^^^^^^^^
+ |
+ = note: this may lead to an infinite or to a never running loop
+
+error: variables in the condition are not mutated in the loop body
+ --> $DIR/infinite_loop.rs:32:11
+ |
+LL | while !cond {
+ | ^^^^^
+ |
+ = note: this may lead to an infinite or to a never running loop
+
+error: variables in the condition are not mutated in the loop body
+ --> $DIR/infinite_loop.rs:76:11
+ |
+LL | while i < 3 {
+ | ^^^^^
+ |
+ = note: this may lead to an infinite or to a never running loop
+
+error: variables in the condition are not mutated in the loop body
+ --> $DIR/infinite_loop.rs:81:11
+ |
+LL | while i < 3 && j > 0 {
+ | ^^^^^^^^^^^^^^
+ |
+ = note: this may lead to an infinite or to a never running loop
+
+error: variables in the condition are not mutated in the loop body
+ --> $DIR/infinite_loop.rs:85:11
+ |
+LL | while i < 3 {
+ | ^^^^^
+ |
+ = note: this may lead to an infinite or to a never running loop
+
+error: variables in the condition are not mutated in the loop body
+ --> $DIR/infinite_loop.rs:100:11
+ |
+LL | while i < 3 {
+ | ^^^^^
+ |
+ = note: this may lead to an infinite or to a never running loop
+
+error: variables in the condition are not mutated in the loop body
+ --> $DIR/infinite_loop.rs:105:11
+ |
+LL | while i < 3 {
+ | ^^^^^
+ |
+ = note: this may lead to an infinite or to a never running loop
+
+error: variables in the condition are not mutated in the loop body
+ --> $DIR/infinite_loop.rs:171:15
+ |
+LL | while self.count < n {
+ | ^^^^^^^^^^^^^^
+ |
+ = note: this may lead to an infinite or to a never running loop
+
+error: variables in the condition are not mutated in the loop body
+ --> $DIR/infinite_loop.rs:179:11
+ |
+LL | while y < 10 {
+ | ^^^^^^
+ |
+ = note: this may lead to an infinite or to a never running loop
+ = note: this loop contains `return`s or `break`s
+ = help: rewrite it as `if cond { loop { } }`
+
+error: variables in the condition are not mutated in the loop body
+ --> $DIR/infinite_loop.rs:186:11
+ |
+LL | while y < 10 {
+ | ^^^^^^
+ |
+ = note: this may lead to an infinite or to a never running loop
+ = note: this loop contains `return`s or `break`s
+ = help: rewrite it as `if cond { loop { } }`
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/inherent_to_string.rs b/src/tools/clippy/tests/ui/inherent_to_string.rs
new file mode 100644
index 000000000..aeb0a0c1e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/inherent_to_string.rs
@@ -0,0 +1,106 @@
+#![warn(clippy::inherent_to_string)]
+#![deny(clippy::inherent_to_string_shadow_display)]
+
+use std::fmt;
+
+trait FalsePositive {
+ fn to_string(&self) -> String;
+}
+
+struct A;
+struct B;
+struct C;
+struct D;
+struct E;
+struct F;
+struct G;
+
+impl A {
+ // Should be detected; emit warning
+ fn to_string(&self) -> String {
+ "A.to_string()".to_string()
+ }
+
+ // Should not be detected as it does not match the function signature
+ fn to_str(&self) -> String {
+ "A.to_str()".to_string()
+ }
+}
+
+// Should not be detected as it is a free function
+fn to_string() -> String {
+ "free to_string()".to_string()
+}
+
+impl B {
+ // Should not be detected, wrong return type
+ fn to_string(&self) -> i32 {
+ 42
+ }
+}
+
+impl C {
+ // Should be detected and emit error as C also implements Display
+ fn to_string(&self) -> String {
+ "C.to_string()".to_string()
+ }
+}
+
+impl fmt::Display for C {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "impl Display for C")
+ }
+}
+
+impl FalsePositive for D {
+ // Should not be detected, as it is a trait function
+ fn to_string(&self) -> String {
+ "impl FalsePositive for D".to_string()
+ }
+}
+
+impl E {
+ // Should not be detected, as it is not bound to an instance
+ fn to_string() -> String {
+ "E::to_string()".to_string()
+ }
+}
+
+impl F {
+ // Should not be detected, as it does not match the function signature
+ fn to_string(&self, _i: i32) -> String {
+ "F.to_string()".to_string()
+ }
+}
+
+impl G {
+ // Should not be detected, as it does not match the function signature
+ fn to_string<const _N: usize>(&self) -> String {
+ "G.to_string()".to_string()
+ }
+}
+
+fn main() {
+ let a = A;
+ a.to_string();
+ a.to_str();
+
+ to_string();
+
+ let b = B;
+ b.to_string();
+
+ let c = C;
+ C.to_string();
+
+ let d = D;
+ d.to_string();
+
+ E::to_string();
+
+ let f = F;
+ f.to_string(1);
+
+ let g = G;
+ g.to_string::<1>();
+}
diff --git a/src/tools/clippy/tests/ui/inherent_to_string.stderr b/src/tools/clippy/tests/ui/inherent_to_string.stderr
new file mode 100644
index 000000000..4f331f5be
--- /dev/null
+++ b/src/tools/clippy/tests/ui/inherent_to_string.stderr
@@ -0,0 +1,28 @@
+error: implementation of inherent method `to_string(&self) -> String` for type `A`
+ --> $DIR/inherent_to_string.rs:20:5
+ |
+LL | / fn to_string(&self) -> String {
+LL | | "A.to_string()".to_string()
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::inherent-to-string` implied by `-D warnings`
+ = help: implement trait `Display` for type `A` instead
+
+error: type `C` implements inherent method `to_string(&self) -> String` which shadows the implementation of `Display`
+ --> $DIR/inherent_to_string.rs:44:5
+ |
+LL | / fn to_string(&self) -> String {
+LL | | "C.to_string()".to_string()
+LL | | }
+ | |_____^
+ |
+note: the lint level is defined here
+ --> $DIR/inherent_to_string.rs:2:9
+ |
+LL | #![deny(clippy::inherent_to_string_shadow_display)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = help: remove the inherent method from type `C`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/inline_fn_without_body.fixed b/src/tools/clippy/tests/ui/inline_fn_without_body.fixed
new file mode 100644
index 000000000..fe21a71a4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/inline_fn_without_body.fixed
@@ -0,0 +1,17 @@
+// run-rustfix
+
+#![warn(clippy::inline_fn_without_body)]
+#![allow(clippy::inline_always)]
+
+trait Foo {
+ fn default_inline();
+
+ fn always_inline();
+
+ fn never_inline();
+
+ #[inline]
+ fn has_body() {}
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/inline_fn_without_body.rs b/src/tools/clippy/tests/ui/inline_fn_without_body.rs
new file mode 100644
index 000000000..507469894
--- /dev/null
+++ b/src/tools/clippy/tests/ui/inline_fn_without_body.rs
@@ -0,0 +1,20 @@
+// run-rustfix
+
+#![warn(clippy::inline_fn_without_body)]
+#![allow(clippy::inline_always)]
+
+trait Foo {
+ #[inline]
+ fn default_inline();
+
+ #[inline(always)]
+ fn always_inline();
+
+ #[inline(never)]
+ fn never_inline();
+
+ #[inline]
+ fn has_body() {}
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/inline_fn_without_body.stderr b/src/tools/clippy/tests/ui/inline_fn_without_body.stderr
new file mode 100644
index 000000000..32d35e209
--- /dev/null
+++ b/src/tools/clippy/tests/ui/inline_fn_without_body.stderr
@@ -0,0 +1,28 @@
+error: use of `#[inline]` on trait method `default_inline` which has no body
+ --> $DIR/inline_fn_without_body.rs:7:5
+ |
+LL | #[inline]
+ | _____-^^^^^^^^
+LL | | fn default_inline();
+ | |____- help: remove
+ |
+ = note: `-D clippy::inline-fn-without-body` implied by `-D warnings`
+
+error: use of `#[inline]` on trait method `always_inline` which has no body
+ --> $DIR/inline_fn_without_body.rs:10:5
+ |
+LL | #[inline(always)]
+ | _____-^^^^^^^^^^^^^^^^
+LL | | fn always_inline();
+ | |____- help: remove
+
+error: use of `#[inline]` on trait method `never_inline` which has no body
+ --> $DIR/inline_fn_without_body.rs:13:5
+ |
+LL | #[inline(never)]
+ | _____-^^^^^^^^^^^^^^^
+LL | | fn never_inline();
+ | |____- help: remove
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/inspect_for_each.rs b/src/tools/clippy/tests/ui/inspect_for_each.rs
new file mode 100644
index 000000000..7fe45c83b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/inspect_for_each.rs
@@ -0,0 +1,22 @@
+#![warn(clippy::inspect_for_each)]
+
+fn main() {
+ let a: Vec<usize> = vec![1, 2, 3, 4, 5];
+
+ let mut b: Vec<usize> = Vec::new();
+ a.into_iter().inspect(|x| assert!(*x > 0)).for_each(|x| {
+ let y = do_some(x);
+ let z = do_more(y);
+ b.push(z);
+ });
+
+ assert_eq!(b, vec![4, 5, 6, 7, 8]);
+}
+
+fn do_some(a: usize) -> usize {
+ a + 1
+}
+
+fn do_more(a: usize) -> usize {
+ a + 2
+}
diff --git a/src/tools/clippy/tests/ui/inspect_for_each.stderr b/src/tools/clippy/tests/ui/inspect_for_each.stderr
new file mode 100644
index 000000000..9f976bb74
--- /dev/null
+++ b/src/tools/clippy/tests/ui/inspect_for_each.stderr
@@ -0,0 +1,16 @@
+error: called `inspect(..).for_each(..)` on an `Iterator`
+ --> $DIR/inspect_for_each.rs:7:19
+ |
+LL | a.into_iter().inspect(|x| assert!(*x > 0)).for_each(|x| {
+ | ___________________^
+LL | | let y = do_some(x);
+LL | | let z = do_more(y);
+LL | | b.push(z);
+LL | | });
+ | |______^
+ |
+ = note: `-D clippy::inspect-for-each` implied by `-D warnings`
+ = help: move the code from `inspect(..)` to `for_each(..)` and remove the `inspect(..)`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/int_plus_one.fixed b/src/tools/clippy/tests/ui/int_plus_one.fixed
new file mode 100644
index 000000000..642830f24
--- /dev/null
+++ b/src/tools/clippy/tests/ui/int_plus_one.fixed
@@ -0,0 +1,17 @@
+// run-rustfix
+
+#[allow(clippy::no_effect, clippy::unnecessary_operation)]
+#[warn(clippy::int_plus_one)]
+fn main() {
+ let x = 1i32;
+ let y = 0i32;
+
+ let _ = x > y;
+ let _ = y < x;
+
+ let _ = x > y;
+ let _ = y < x;
+
+ let _ = x > y; // should be ok
+ let _ = y < x; // should be ok
+}
diff --git a/src/tools/clippy/tests/ui/int_plus_one.rs b/src/tools/clippy/tests/ui/int_plus_one.rs
new file mode 100644
index 000000000..0755a0c79
--- /dev/null
+++ b/src/tools/clippy/tests/ui/int_plus_one.rs
@@ -0,0 +1,17 @@
+// run-rustfix
+
+#[allow(clippy::no_effect, clippy::unnecessary_operation)]
+#[warn(clippy::int_plus_one)]
+fn main() {
+ let x = 1i32;
+ let y = 0i32;
+
+ let _ = x >= y + 1;
+ let _ = y + 1 <= x;
+
+ let _ = x - 1 >= y;
+ let _ = y <= x - 1;
+
+ let _ = x > y; // should be ok
+ let _ = y < x; // should be ok
+}
diff --git a/src/tools/clippy/tests/ui/int_plus_one.stderr b/src/tools/clippy/tests/ui/int_plus_one.stderr
new file mode 100644
index 000000000..c5b020ba8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/int_plus_one.stderr
@@ -0,0 +1,28 @@
+error: unnecessary `>= y + 1` or `x - 1 >=`
+ --> $DIR/int_plus_one.rs:9:13
+ |
+LL | let _ = x >= y + 1;
+ | ^^^^^^^^^^ help: change it to: `x > y`
+ |
+ = note: `-D clippy::int-plus-one` implied by `-D warnings`
+
+error: unnecessary `>= y + 1` or `x - 1 >=`
+ --> $DIR/int_plus_one.rs:10:13
+ |
+LL | let _ = y + 1 <= x;
+ | ^^^^^^^^^^ help: change it to: `y < x`
+
+error: unnecessary `>= y + 1` or `x - 1 >=`
+ --> $DIR/int_plus_one.rs:12:13
+ |
+LL | let _ = x - 1 >= y;
+ | ^^^^^^^^^^ help: change it to: `x > y`
+
+error: unnecessary `>= y + 1` or `x - 1 >=`
+ --> $DIR/int_plus_one.rs:13:13
+ |
+LL | let _ = y <= x - 1;
+ | ^^^^^^^^^^ help: change it to: `y < x`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/integer_arithmetic.rs b/src/tools/clippy/tests/ui/integer_arithmetic.rs
new file mode 100644
index 000000000..67f24b454
--- /dev/null
+++ b/src/tools/clippy/tests/ui/integer_arithmetic.rs
@@ -0,0 +1,102 @@
+#![warn(clippy::integer_arithmetic, clippy::float_arithmetic)]
+#![allow(clippy::no_effect, clippy::unnecessary_operation, clippy::op_ref)]
+
+#[rustfmt::skip]
+fn main() {
+ let mut i = 1i32;
+ let mut var1 = 0i32;
+ let mut var2 = -1i32;
+ 1 + i;
+ i * 2;
+ 1 %
+ i / 2; // no error, this is part of the expression in the preceding line
+ i - 2 + 2 - i;
+ -i;
+ i >> 1;
+ i << 1;
+
+ // no error, overflows are checked by `overflowing_literals`
+ -1;
+ -(-1);
+
+ i & 1; // no wrapping
+ i | 1;
+ i ^ 1;
+
+ i += 1;
+ i -= 1;
+ i *= 2;
+ i /= 2;
+ i /= 0;
+ i /= -1;
+ i /= var1;
+ i /= var2;
+ i %= 2;
+ i %= 0;
+ i %= -1;
+ i %= var1;
+ i %= var2;
+ i <<= 3;
+ i >>= 2;
+
+ // no errors
+ i |= 1;
+ i &= 1;
+ i ^= i;
+
+ // No errors for the following items because they are constant expressions
+ enum Foo {
+ Bar = -2,
+ }
+ struct Baz([i32; 1 + 1]);
+ union Qux {
+ field: [i32; 1 + 1],
+ }
+ type Alias = [i32; 1 + 1];
+
+ const FOO: i32 = -2;
+ static BAR: i32 = -2;
+
+ let _: [i32; 1 + 1] = [0, 0];
+
+ let _: [i32; 1 + 1] = {
+ let a: [i32; 1 + 1] = [0, 0];
+ a
+ };
+
+ trait Trait {
+ const ASSOC: i32 = 1 + 1;
+ }
+
+ impl Trait for Foo {
+ const ASSOC: i32 = {
+ let _: [i32; 1 + 1];
+ fn foo() {}
+ 1 + 1
+ };
+ }
+}
+
+// warn on references as well! (#5328)
+pub fn int_arith_ref() {
+ 3 + &1;
+ &3 + 1;
+ &3 + &1;
+}
+
+pub fn foo(x: &i32) -> i32 {
+ let a = 5;
+ a + x
+}
+
+pub fn bar(x: &i32, y: &i32) -> i32 {
+ x + y
+}
+
+pub fn baz(x: i32, y: &i32) -> i32 {
+ x + y
+}
+
+pub fn qux(x: i32, y: i32) -> i32 {
+ (&x + &y)
+}
diff --git a/src/tools/clippy/tests/ui/integer_arithmetic.stderr b/src/tools/clippy/tests/ui/integer_arithmetic.stderr
new file mode 100644
index 000000000..9a795b1f2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/integer_arithmetic.stderr
@@ -0,0 +1,169 @@
+error: this operation will panic at runtime
+ --> $DIR/integer_arithmetic.rs:30:5
+ |
+LL | i /= 0;
+ | ^^^^^^ attempt to divide `_` by zero
+ |
+ = note: `#[deny(unconditional_panic)]` on by default
+
+error: this operation will panic at runtime
+ --> $DIR/integer_arithmetic.rs:35:5
+ |
+LL | i %= 0;
+ | ^^^^^^ attempt to calculate the remainder of `_` with a divisor of zero
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:9:5
+ |
+LL | 1 + i;
+ | ^^^^^
+ |
+ = note: `-D clippy::integer-arithmetic` implied by `-D warnings`
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:10:5
+ |
+LL | i * 2;
+ | ^^^^^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:11:5
+ |
+LL | / 1 %
+LL | | i / 2; // no error, this is part of the expression in the preceding line
+ | |_____^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:13:5
+ |
+LL | i - 2 + 2 - i;
+ | ^^^^^^^^^^^^^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:14:5
+ |
+LL | -i;
+ | ^^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:15:5
+ |
+LL | i >> 1;
+ | ^^^^^^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:16:5
+ |
+LL | i << 1;
+ | ^^^^^^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:26:5
+ |
+LL | i += 1;
+ | ^^^^^^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:27:5
+ |
+LL | i -= 1;
+ | ^^^^^^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:28:5
+ |
+LL | i *= 2;
+ | ^^^^^^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:31:11
+ |
+LL | i /= -1;
+ | ^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:32:5
+ |
+LL | i /= var1;
+ | ^^^^^^^^^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:33:5
+ |
+LL | i /= var2;
+ | ^^^^^^^^^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:36:11
+ |
+LL | i %= -1;
+ | ^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:37:5
+ |
+LL | i %= var1;
+ | ^^^^^^^^^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:38:5
+ |
+LL | i %= var2;
+ | ^^^^^^^^^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:39:5
+ |
+LL | i <<= 3;
+ | ^^^^^^^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:40:5
+ |
+LL | i >>= 2;
+ | ^^^^^^^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:82:5
+ |
+LL | 3 + &1;
+ | ^^^^^^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:83:5
+ |
+LL | &3 + 1;
+ | ^^^^^^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:84:5
+ |
+LL | &3 + &1;
+ | ^^^^^^^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:89:5
+ |
+LL | a + x
+ | ^^^^^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:93:5
+ |
+LL | x + y
+ | ^^^^^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:97:5
+ |
+LL | x + y
+ | ^^^^^
+
+error: integer arithmetic detected
+ --> $DIR/integer_arithmetic.rs:101:5
+ |
+LL | (&x + &y)
+ | ^^^^^^^^^
+
+error: aborting due to 27 previous errors
+
diff --git a/src/tools/clippy/tests/ui/integer_division.rs b/src/tools/clippy/tests/ui/integer_division.rs
new file mode 100644
index 000000000..800c75257
--- /dev/null
+++ b/src/tools/clippy/tests/ui/integer_division.rs
@@ -0,0 +1,9 @@
+#![warn(clippy::integer_division)]
+
+fn main() {
+ let two = 2;
+ let n = 1 / 2;
+ let o = 1 / two;
+ let p = two / 4;
+ let x = 1. / 2.0;
+}
diff --git a/src/tools/clippy/tests/ui/integer_division.stderr b/src/tools/clippy/tests/ui/integer_division.stderr
new file mode 100644
index 000000000..cbb7f8814
--- /dev/null
+++ b/src/tools/clippy/tests/ui/integer_division.stderr
@@ -0,0 +1,27 @@
+error: integer division
+ --> $DIR/integer_division.rs:5:13
+ |
+LL | let n = 1 / 2;
+ | ^^^^^
+ |
+ = note: `-D clippy::integer-division` implied by `-D warnings`
+ = help: division of integers may cause loss of precision. consider using floats
+
+error: integer division
+ --> $DIR/integer_division.rs:6:13
+ |
+LL | let o = 1 / two;
+ | ^^^^^^^
+ |
+ = help: division of integers may cause loss of precision. consider using floats
+
+error: integer division
+ --> $DIR/integer_division.rs:7:13
+ |
+LL | let p = two / 4;
+ | ^^^^^^^
+ |
+ = help: division of integers may cause loss of precision. consider using floats
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/into_iter_on_ref.fixed b/src/tools/clippy/tests/ui/into_iter_on_ref.fixed
new file mode 100644
index 000000000..b77f17944
--- /dev/null
+++ b/src/tools/clippy/tests/ui/into_iter_on_ref.fixed
@@ -0,0 +1,45 @@
+// run-rustfix
+#![allow(clippy::useless_vec, clippy::needless_borrow)]
+#![warn(clippy::into_iter_on_ref)]
+
+struct X;
+use std::collections::*;
+
+fn main() {
+ for _ in &[1, 2, 3] {}
+ for _ in vec![X, X] {}
+ for _ in &vec![X, X] {}
+
+ let _ = vec![1, 2, 3].into_iter();
+ let _ = (&vec![1, 2, 3]).iter(); //~ WARN equivalent to .iter()
+ let _ = vec![1, 2, 3].into_boxed_slice().iter(); //~ WARN equivalent to .iter()
+ let _ = std::rc::Rc::from(&[X][..]).iter(); //~ WARN equivalent to .iter()
+ let _ = std::sync::Arc::from(&[X][..]).iter(); //~ WARN equivalent to .iter()
+
+ let _ = (&&&&&&&[1, 2, 3]).iter(); //~ ERROR equivalent to .iter()
+ let _ = (&&&&mut &&&[1, 2, 3]).iter(); //~ ERROR equivalent to .iter()
+ let _ = (&mut &mut &mut [1, 2, 3]).iter_mut(); //~ ERROR equivalent to .iter_mut()
+
+ let _ = (&Some(4)).iter(); //~ WARN equivalent to .iter()
+ let _ = (&mut Some(5)).iter_mut(); //~ WARN equivalent to .iter_mut()
+ let _ = (&Ok::<_, i32>(6)).iter(); //~ WARN equivalent to .iter()
+ let _ = (&mut Err::<i32, _>(7)).iter_mut(); //~ WARN equivalent to .iter_mut()
+ let _ = (&Vec::<i32>::new()).iter(); //~ WARN equivalent to .iter()
+ let _ = (&mut Vec::<i32>::new()).iter_mut(); //~ WARN equivalent to .iter_mut()
+ let _ = (&BTreeMap::<i32, u64>::new()).iter(); //~ WARN equivalent to .iter()
+ let _ = (&mut BTreeMap::<i32, u64>::new()).iter_mut(); //~ WARN equivalent to .iter_mut()
+ let _ = (&VecDeque::<i32>::new()).iter(); //~ WARN equivalent to .iter()
+ let _ = (&mut VecDeque::<i32>::new()).iter_mut(); //~ WARN equivalent to .iter_mut()
+ let _ = (&LinkedList::<i32>::new()).iter(); //~ WARN equivalent to .iter()
+ let _ = (&mut LinkedList::<i32>::new()).iter_mut(); //~ WARN equivalent to .iter_mut()
+ let _ = (&HashMap::<i32, u64>::new()).iter(); //~ WARN equivalent to .iter()
+ let _ = (&mut HashMap::<i32, u64>::new()).iter_mut(); //~ WARN equivalent to .iter_mut()
+
+ let _ = (&BTreeSet::<i32>::new()).iter(); //~ WARN equivalent to .iter()
+ let _ = (&BinaryHeap::<i32>::new()).iter(); //~ WARN equivalent to .iter()
+ let _ = (&HashSet::<i32>::new()).iter(); //~ WARN equivalent to .iter()
+ let _ = std::path::Path::new("12/34").iter(); //~ WARN equivalent to .iter()
+ let _ = std::path::PathBuf::from("12/34").iter(); //~ ERROR equivalent to .iter()
+
+ let _ = (&[1, 2, 3]).iter().next(); //~ WARN equivalent to .iter()
+}
diff --git a/src/tools/clippy/tests/ui/into_iter_on_ref.rs b/src/tools/clippy/tests/ui/into_iter_on_ref.rs
new file mode 100644
index 000000000..3854bb05a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/into_iter_on_ref.rs
@@ -0,0 +1,45 @@
+// run-rustfix
+#![allow(clippy::useless_vec, clippy::needless_borrow)]
+#![warn(clippy::into_iter_on_ref)]
+
+struct X;
+use std::collections::*;
+
+fn main() {
+ for _ in &[1, 2, 3] {}
+ for _ in vec![X, X] {}
+ for _ in &vec![X, X] {}
+
+ let _ = vec![1, 2, 3].into_iter();
+ let _ = (&vec![1, 2, 3]).into_iter(); //~ WARN equivalent to .iter()
+ let _ = vec![1, 2, 3].into_boxed_slice().into_iter(); //~ WARN equivalent to .iter()
+ let _ = std::rc::Rc::from(&[X][..]).into_iter(); //~ WARN equivalent to .iter()
+ let _ = std::sync::Arc::from(&[X][..]).into_iter(); //~ WARN equivalent to .iter()
+
+ let _ = (&&&&&&&[1, 2, 3]).into_iter(); //~ ERROR equivalent to .iter()
+ let _ = (&&&&mut &&&[1, 2, 3]).into_iter(); //~ ERROR equivalent to .iter()
+ let _ = (&mut &mut &mut [1, 2, 3]).into_iter(); //~ ERROR equivalent to .iter_mut()
+
+ let _ = (&Some(4)).into_iter(); //~ WARN equivalent to .iter()
+ let _ = (&mut Some(5)).into_iter(); //~ WARN equivalent to .iter_mut()
+ let _ = (&Ok::<_, i32>(6)).into_iter(); //~ WARN equivalent to .iter()
+ let _ = (&mut Err::<i32, _>(7)).into_iter(); //~ WARN equivalent to .iter_mut()
+ let _ = (&Vec::<i32>::new()).into_iter(); //~ WARN equivalent to .iter()
+ let _ = (&mut Vec::<i32>::new()).into_iter(); //~ WARN equivalent to .iter_mut()
+ let _ = (&BTreeMap::<i32, u64>::new()).into_iter(); //~ WARN equivalent to .iter()
+ let _ = (&mut BTreeMap::<i32, u64>::new()).into_iter(); //~ WARN equivalent to .iter_mut()
+ let _ = (&VecDeque::<i32>::new()).into_iter(); //~ WARN equivalent to .iter()
+ let _ = (&mut VecDeque::<i32>::new()).into_iter(); //~ WARN equivalent to .iter_mut()
+ let _ = (&LinkedList::<i32>::new()).into_iter(); //~ WARN equivalent to .iter()
+ let _ = (&mut LinkedList::<i32>::new()).into_iter(); //~ WARN equivalent to .iter_mut()
+ let _ = (&HashMap::<i32, u64>::new()).into_iter(); //~ WARN equivalent to .iter()
+ let _ = (&mut HashMap::<i32, u64>::new()).into_iter(); //~ WARN equivalent to .iter_mut()
+
+ let _ = (&BTreeSet::<i32>::new()).into_iter(); //~ WARN equivalent to .iter()
+ let _ = (&BinaryHeap::<i32>::new()).into_iter(); //~ WARN equivalent to .iter()
+ let _ = (&HashSet::<i32>::new()).into_iter(); //~ WARN equivalent to .iter()
+ let _ = std::path::Path::new("12/34").into_iter(); //~ WARN equivalent to .iter()
+ let _ = std::path::PathBuf::from("12/34").into_iter(); //~ ERROR equivalent to .iter()
+
+ let _ = (&[1, 2, 3]).into_iter().next(); //~ WARN equivalent to .iter()
+}
diff --git a/src/tools/clippy/tests/ui/into_iter_on_ref.stderr b/src/tools/clippy/tests/ui/into_iter_on_ref.stderr
new file mode 100644
index 000000000..28003b365
--- /dev/null
+++ b/src/tools/clippy/tests/ui/into_iter_on_ref.stderr
@@ -0,0 +1,166 @@
+error: this `.into_iter()` call is equivalent to `.iter()` and will not consume the `Vec`
+ --> $DIR/into_iter_on_ref.rs:14:30
+ |
+LL | let _ = (&vec![1, 2, 3]).into_iter(); //~ WARN equivalent to .iter()
+ | ^^^^^^^^^ help: call directly: `iter`
+ |
+ = note: `-D clippy::into-iter-on-ref` implied by `-D warnings`
+
+error: this `.into_iter()` call is equivalent to `.iter()` and will not consume the `slice`
+ --> $DIR/into_iter_on_ref.rs:15:46
+ |
+LL | let _ = vec![1, 2, 3].into_boxed_slice().into_iter(); //~ WARN equivalent to .iter()
+ | ^^^^^^^^^ help: call directly: `iter`
+
+error: this `.into_iter()` call is equivalent to `.iter()` and will not consume the `slice`
+ --> $DIR/into_iter_on_ref.rs:16:41
+ |
+LL | let _ = std::rc::Rc::from(&[X][..]).into_iter(); //~ WARN equivalent to .iter()
+ | ^^^^^^^^^ help: call directly: `iter`
+
+error: this `.into_iter()` call is equivalent to `.iter()` and will not consume the `slice`
+ --> $DIR/into_iter_on_ref.rs:17:44
+ |
+LL | let _ = std::sync::Arc::from(&[X][..]).into_iter(); //~ WARN equivalent to .iter()
+ | ^^^^^^^^^ help: call directly: `iter`
+
+error: this `.into_iter()` call is equivalent to `.iter()` and will not consume the `array`
+ --> $DIR/into_iter_on_ref.rs:19:32
+ |
+LL | let _ = (&&&&&&&[1, 2, 3]).into_iter(); //~ ERROR equivalent to .iter()
+ | ^^^^^^^^^ help: call directly: `iter`
+
+error: this `.into_iter()` call is equivalent to `.iter()` and will not consume the `array`
+ --> $DIR/into_iter_on_ref.rs:20:36
+ |
+LL | let _ = (&&&&mut &&&[1, 2, 3]).into_iter(); //~ ERROR equivalent to .iter()
+ | ^^^^^^^^^ help: call directly: `iter`
+
+error: this `.into_iter()` call is equivalent to `.iter_mut()` and will not consume the `array`
+ --> $DIR/into_iter_on_ref.rs:21:40
+ |
+LL | let _ = (&mut &mut &mut [1, 2, 3]).into_iter(); //~ ERROR equivalent to .iter_mut()
+ | ^^^^^^^^^ help: call directly: `iter_mut`
+
+error: this `.into_iter()` call is equivalent to `.iter()` and will not consume the `Option`
+ --> $DIR/into_iter_on_ref.rs:23:24
+ |
+LL | let _ = (&Some(4)).into_iter(); //~ WARN equivalent to .iter()
+ | ^^^^^^^^^ help: call directly: `iter`
+
+error: this `.into_iter()` call is equivalent to `.iter_mut()` and will not consume the `Option`
+ --> $DIR/into_iter_on_ref.rs:24:28
+ |
+LL | let _ = (&mut Some(5)).into_iter(); //~ WARN equivalent to .iter_mut()
+ | ^^^^^^^^^ help: call directly: `iter_mut`
+
+error: this `.into_iter()` call is equivalent to `.iter()` and will not consume the `Result`
+ --> $DIR/into_iter_on_ref.rs:25:32
+ |
+LL | let _ = (&Ok::<_, i32>(6)).into_iter(); //~ WARN equivalent to .iter()
+ | ^^^^^^^^^ help: call directly: `iter`
+
+error: this `.into_iter()` call is equivalent to `.iter_mut()` and will not consume the `Result`
+ --> $DIR/into_iter_on_ref.rs:26:37
+ |
+LL | let _ = (&mut Err::<i32, _>(7)).into_iter(); //~ WARN equivalent to .iter_mut()
+ | ^^^^^^^^^ help: call directly: `iter_mut`
+
+error: this `.into_iter()` call is equivalent to `.iter()` and will not consume the `Vec`
+ --> $DIR/into_iter_on_ref.rs:27:34
+ |
+LL | let _ = (&Vec::<i32>::new()).into_iter(); //~ WARN equivalent to .iter()
+ | ^^^^^^^^^ help: call directly: `iter`
+
+error: this `.into_iter()` call is equivalent to `.iter_mut()` and will not consume the `Vec`
+ --> $DIR/into_iter_on_ref.rs:28:38
+ |
+LL | let _ = (&mut Vec::<i32>::new()).into_iter(); //~ WARN equivalent to .iter_mut()
+ | ^^^^^^^^^ help: call directly: `iter_mut`
+
+error: this `.into_iter()` call is equivalent to `.iter()` and will not consume the `BTreeMap`
+ --> $DIR/into_iter_on_ref.rs:29:44
+ |
+LL | let _ = (&BTreeMap::<i32, u64>::new()).into_iter(); //~ WARN equivalent to .iter()
+ | ^^^^^^^^^ help: call directly: `iter`
+
+error: this `.into_iter()` call is equivalent to `.iter_mut()` and will not consume the `BTreeMap`
+ --> $DIR/into_iter_on_ref.rs:30:48
+ |
+LL | let _ = (&mut BTreeMap::<i32, u64>::new()).into_iter(); //~ WARN equivalent to .iter_mut()
+ | ^^^^^^^^^ help: call directly: `iter_mut`
+
+error: this `.into_iter()` call is equivalent to `.iter()` and will not consume the `VecDeque`
+ --> $DIR/into_iter_on_ref.rs:31:39
+ |
+LL | let _ = (&VecDeque::<i32>::new()).into_iter(); //~ WARN equivalent to .iter()
+ | ^^^^^^^^^ help: call directly: `iter`
+
+error: this `.into_iter()` call is equivalent to `.iter_mut()` and will not consume the `VecDeque`
+ --> $DIR/into_iter_on_ref.rs:32:43
+ |
+LL | let _ = (&mut VecDeque::<i32>::new()).into_iter(); //~ WARN equivalent to .iter_mut()
+ | ^^^^^^^^^ help: call directly: `iter_mut`
+
+error: this `.into_iter()` call is equivalent to `.iter()` and will not consume the `LinkedList`
+ --> $DIR/into_iter_on_ref.rs:33:41
+ |
+LL | let _ = (&LinkedList::<i32>::new()).into_iter(); //~ WARN equivalent to .iter()
+ | ^^^^^^^^^ help: call directly: `iter`
+
+error: this `.into_iter()` call is equivalent to `.iter_mut()` and will not consume the `LinkedList`
+ --> $DIR/into_iter_on_ref.rs:34:45
+ |
+LL | let _ = (&mut LinkedList::<i32>::new()).into_iter(); //~ WARN equivalent to .iter_mut()
+ | ^^^^^^^^^ help: call directly: `iter_mut`
+
+error: this `.into_iter()` call is equivalent to `.iter()` and will not consume the `HashMap`
+ --> $DIR/into_iter_on_ref.rs:35:43
+ |
+LL | let _ = (&HashMap::<i32, u64>::new()).into_iter(); //~ WARN equivalent to .iter()
+ | ^^^^^^^^^ help: call directly: `iter`
+
+error: this `.into_iter()` call is equivalent to `.iter_mut()` and will not consume the `HashMap`
+ --> $DIR/into_iter_on_ref.rs:36:47
+ |
+LL | let _ = (&mut HashMap::<i32, u64>::new()).into_iter(); //~ WARN equivalent to .iter_mut()
+ | ^^^^^^^^^ help: call directly: `iter_mut`
+
+error: this `.into_iter()` call is equivalent to `.iter()` and will not consume the `BTreeSet`
+ --> $DIR/into_iter_on_ref.rs:38:39
+ |
+LL | let _ = (&BTreeSet::<i32>::new()).into_iter(); //~ WARN equivalent to .iter()
+ | ^^^^^^^^^ help: call directly: `iter`
+
+error: this `.into_iter()` call is equivalent to `.iter()` and will not consume the `BinaryHeap`
+ --> $DIR/into_iter_on_ref.rs:39:41
+ |
+LL | let _ = (&BinaryHeap::<i32>::new()).into_iter(); //~ WARN equivalent to .iter()
+ | ^^^^^^^^^ help: call directly: `iter`
+
+error: this `.into_iter()` call is equivalent to `.iter()` and will not consume the `HashSet`
+ --> $DIR/into_iter_on_ref.rs:40:38
+ |
+LL | let _ = (&HashSet::<i32>::new()).into_iter(); //~ WARN equivalent to .iter()
+ | ^^^^^^^^^ help: call directly: `iter`
+
+error: this `.into_iter()` call is equivalent to `.iter()` and will not consume the `Path`
+ --> $DIR/into_iter_on_ref.rs:41:43
+ |
+LL | let _ = std::path::Path::new("12/34").into_iter(); //~ WARN equivalent to .iter()
+ | ^^^^^^^^^ help: call directly: `iter`
+
+error: this `.into_iter()` call is equivalent to `.iter()` and will not consume the `PathBuf`
+ --> $DIR/into_iter_on_ref.rs:42:47
+ |
+LL | let _ = std::path::PathBuf::from("12/34").into_iter(); //~ ERROR equivalent to .iter()
+ | ^^^^^^^^^ help: call directly: `iter`
+
+error: this `.into_iter()` call is equivalent to `.iter()` and will not consume the `array`
+ --> $DIR/into_iter_on_ref.rs:44:26
+ |
+LL | let _ = (&[1, 2, 3]).into_iter().next(); //~ WARN equivalent to .iter()
+ | ^^^^^^^^^ help: call directly: `iter`
+
+error: aborting due to 27 previous errors
+
diff --git a/src/tools/clippy/tests/ui/invalid_null_ptr_usage.fixed b/src/tools/clippy/tests/ui/invalid_null_ptr_usage.fixed
new file mode 100644
index 000000000..4f5322ebf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/invalid_null_ptr_usage.fixed
@@ -0,0 +1,49 @@
+// run-rustfix
+
+fn main() {
+ unsafe {
+ let _slice: &[usize] = std::slice::from_raw_parts(core::ptr::NonNull::dangling().as_ptr(), 0);
+ let _slice: &[usize] = std::slice::from_raw_parts(core::ptr::NonNull::dangling().as_ptr(), 0);
+
+ let _slice: &[usize] = std::slice::from_raw_parts_mut(core::ptr::NonNull::dangling().as_ptr(), 0);
+
+ std::ptr::copy::<usize>(core::ptr::NonNull::dangling().as_ptr(), std::ptr::NonNull::dangling().as_ptr(), 0);
+ std::ptr::copy::<usize>(std::ptr::NonNull::dangling().as_ptr(), core::ptr::NonNull::dangling().as_ptr(), 0);
+
+ std::ptr::copy_nonoverlapping::<usize>(core::ptr::NonNull::dangling().as_ptr(), std::ptr::NonNull::dangling().as_ptr(), 0);
+ std::ptr::copy_nonoverlapping::<usize>(std::ptr::NonNull::dangling().as_ptr(), core::ptr::NonNull::dangling().as_ptr(), 0);
+
+ struct A; // zero sized struct
+ assert_eq!(std::mem::size_of::<A>(), 0);
+
+ let _a: A = std::ptr::read(core::ptr::NonNull::dangling().as_ptr());
+ let _a: A = std::ptr::read(core::ptr::NonNull::dangling().as_ptr());
+
+ let _a: A = std::ptr::read_unaligned(core::ptr::NonNull::dangling().as_ptr());
+ let _a: A = std::ptr::read_unaligned(core::ptr::NonNull::dangling().as_ptr());
+
+ let _a: A = std::ptr::read_volatile(core::ptr::NonNull::dangling().as_ptr());
+ let _a: A = std::ptr::read_volatile(core::ptr::NonNull::dangling().as_ptr());
+
+ let _a: A = std::ptr::replace(core::ptr::NonNull::dangling().as_ptr(), A);
+
+ let _slice: *const [usize] = std::ptr::slice_from_raw_parts(core::ptr::NonNull::dangling().as_ptr(), 0);
+ let _slice: *const [usize] = std::ptr::slice_from_raw_parts(core::ptr::NonNull::dangling().as_ptr(), 0);
+
+ let _slice: *const [usize] = std::ptr::slice_from_raw_parts_mut(core::ptr::NonNull::dangling().as_ptr(), 0);
+
+ std::ptr::swap::<A>(core::ptr::NonNull::dangling().as_ptr(), &mut A);
+ std::ptr::swap::<A>(&mut A, core::ptr::NonNull::dangling().as_ptr());
+
+ std::ptr::swap_nonoverlapping::<A>(core::ptr::NonNull::dangling().as_ptr(), &mut A, 0);
+ std::ptr::swap_nonoverlapping::<A>(&mut A, core::ptr::NonNull::dangling().as_ptr(), 0);
+
+ std::ptr::write(core::ptr::NonNull::dangling().as_ptr(), A);
+
+ std::ptr::write_unaligned(core::ptr::NonNull::dangling().as_ptr(), A);
+
+ std::ptr::write_volatile(core::ptr::NonNull::dangling().as_ptr(), A);
+
+ std::ptr::write_bytes::<usize>(core::ptr::NonNull::dangling().as_ptr(), 42, 0);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/invalid_null_ptr_usage.rs b/src/tools/clippy/tests/ui/invalid_null_ptr_usage.rs
new file mode 100644
index 000000000..ae51c52d8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/invalid_null_ptr_usage.rs
@@ -0,0 +1,49 @@
+// run-rustfix
+
+fn main() {
+ unsafe {
+ let _slice: &[usize] = std::slice::from_raw_parts(std::ptr::null(), 0);
+ let _slice: &[usize] = std::slice::from_raw_parts(std::ptr::null_mut(), 0);
+
+ let _slice: &[usize] = std::slice::from_raw_parts_mut(std::ptr::null_mut(), 0);
+
+ std::ptr::copy::<usize>(std::ptr::null(), std::ptr::NonNull::dangling().as_ptr(), 0);
+ std::ptr::copy::<usize>(std::ptr::NonNull::dangling().as_ptr(), std::ptr::null_mut(), 0);
+
+ std::ptr::copy_nonoverlapping::<usize>(std::ptr::null(), std::ptr::NonNull::dangling().as_ptr(), 0);
+ std::ptr::copy_nonoverlapping::<usize>(std::ptr::NonNull::dangling().as_ptr(), std::ptr::null_mut(), 0);
+
+ struct A; // zero sized struct
+ assert_eq!(std::mem::size_of::<A>(), 0);
+
+ let _a: A = std::ptr::read(std::ptr::null());
+ let _a: A = std::ptr::read(std::ptr::null_mut());
+
+ let _a: A = std::ptr::read_unaligned(std::ptr::null());
+ let _a: A = std::ptr::read_unaligned(std::ptr::null_mut());
+
+ let _a: A = std::ptr::read_volatile(std::ptr::null());
+ let _a: A = std::ptr::read_volatile(std::ptr::null_mut());
+
+ let _a: A = std::ptr::replace(std::ptr::null_mut(), A);
+
+ let _slice: *const [usize] = std::ptr::slice_from_raw_parts(std::ptr::null(), 0);
+ let _slice: *const [usize] = std::ptr::slice_from_raw_parts(std::ptr::null_mut(), 0);
+
+ let _slice: *const [usize] = std::ptr::slice_from_raw_parts_mut(std::ptr::null_mut(), 0);
+
+ std::ptr::swap::<A>(std::ptr::null_mut(), &mut A);
+ std::ptr::swap::<A>(&mut A, std::ptr::null_mut());
+
+ std::ptr::swap_nonoverlapping::<A>(std::ptr::null_mut(), &mut A, 0);
+ std::ptr::swap_nonoverlapping::<A>(&mut A, std::ptr::null_mut(), 0);
+
+ std::ptr::write(std::ptr::null_mut(), A);
+
+ std::ptr::write_unaligned(std::ptr::null_mut(), A);
+
+ std::ptr::write_volatile(std::ptr::null_mut(), A);
+
+ std::ptr::write_bytes::<usize>(std::ptr::null_mut(), 42, 0);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/invalid_null_ptr_usage.stderr b/src/tools/clippy/tests/ui/invalid_null_ptr_usage.stderr
new file mode 100644
index 000000000..532c36abe
--- /dev/null
+++ b/src/tools/clippy/tests/ui/invalid_null_ptr_usage.stderr
@@ -0,0 +1,154 @@
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:5:59
+ |
+LL | let _slice: &[usize] = std::slice::from_raw_parts(std::ptr::null(), 0);
+ | ^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+ |
+ = note: `#[deny(clippy::invalid_null_ptr_usage)]` on by default
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:6:59
+ |
+LL | let _slice: &[usize] = std::slice::from_raw_parts(std::ptr::null_mut(), 0);
+ | ^^^^^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:8:63
+ |
+LL | let _slice: &[usize] = std::slice::from_raw_parts_mut(std::ptr::null_mut(), 0);
+ | ^^^^^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:10:33
+ |
+LL | std::ptr::copy::<usize>(std::ptr::null(), std::ptr::NonNull::dangling().as_ptr(), 0);
+ | ^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:11:73
+ |
+LL | std::ptr::copy::<usize>(std::ptr::NonNull::dangling().as_ptr(), std::ptr::null_mut(), 0);
+ | ^^^^^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:13:48
+ |
+LL | std::ptr::copy_nonoverlapping::<usize>(std::ptr::null(), std::ptr::NonNull::dangling().as_ptr(), 0);
+ | ^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:14:88
+ |
+LL | std::ptr::copy_nonoverlapping::<usize>(std::ptr::NonNull::dangling().as_ptr(), std::ptr::null_mut(), 0);
+ | ^^^^^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:19:36
+ |
+LL | let _a: A = std::ptr::read(std::ptr::null());
+ | ^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:20:36
+ |
+LL | let _a: A = std::ptr::read(std::ptr::null_mut());
+ | ^^^^^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:22:46
+ |
+LL | let _a: A = std::ptr::read_unaligned(std::ptr::null());
+ | ^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:23:46
+ |
+LL | let _a: A = std::ptr::read_unaligned(std::ptr::null_mut());
+ | ^^^^^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:25:45
+ |
+LL | let _a: A = std::ptr::read_volatile(std::ptr::null());
+ | ^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:26:45
+ |
+LL | let _a: A = std::ptr::read_volatile(std::ptr::null_mut());
+ | ^^^^^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:28:39
+ |
+LL | let _a: A = std::ptr::replace(std::ptr::null_mut(), A);
+ | ^^^^^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:30:69
+ |
+LL | let _slice: *const [usize] = std::ptr::slice_from_raw_parts(std::ptr::null(), 0);
+ | ^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:31:69
+ |
+LL | let _slice: *const [usize] = std::ptr::slice_from_raw_parts(std::ptr::null_mut(), 0);
+ | ^^^^^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:33:73
+ |
+LL | let _slice: *const [usize] = std::ptr::slice_from_raw_parts_mut(std::ptr::null_mut(), 0);
+ | ^^^^^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:35:29
+ |
+LL | std::ptr::swap::<A>(std::ptr::null_mut(), &mut A);
+ | ^^^^^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:36:37
+ |
+LL | std::ptr::swap::<A>(&mut A, std::ptr::null_mut());
+ | ^^^^^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:38:44
+ |
+LL | std::ptr::swap_nonoverlapping::<A>(std::ptr::null_mut(), &mut A, 0);
+ | ^^^^^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:39:52
+ |
+LL | std::ptr::swap_nonoverlapping::<A>(&mut A, std::ptr::null_mut(), 0);
+ | ^^^^^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:41:25
+ |
+LL | std::ptr::write(std::ptr::null_mut(), A);
+ | ^^^^^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:43:35
+ |
+LL | std::ptr::write_unaligned(std::ptr::null_mut(), A);
+ | ^^^^^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:45:34
+ |
+LL | std::ptr::write_volatile(std::ptr::null_mut(), A);
+ | ^^^^^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: pointer must be non-null
+ --> $DIR/invalid_null_ptr_usage.rs:47:40
+ |
+LL | std::ptr::write_bytes::<usize>(std::ptr::null_mut(), 42, 0);
+ | ^^^^^^^^^^^^^^^^^^^^ help: change this to: `core::ptr::NonNull::dangling().as_ptr()`
+
+error: aborting due to 25 previous errors
+
diff --git a/src/tools/clippy/tests/ui/invalid_upcast_comparisons.rs b/src/tools/clippy/tests/ui/invalid_upcast_comparisons.rs
new file mode 100644
index 000000000..697416dce
--- /dev/null
+++ b/src/tools/clippy/tests/ui/invalid_upcast_comparisons.rs
@@ -0,0 +1,85 @@
+#![warn(clippy::invalid_upcast_comparisons)]
+#![allow(
+ unused,
+ clippy::eq_op,
+ clippy::no_effect,
+ clippy::unnecessary_operation,
+ clippy::cast_lossless
+)]
+
+fn mk_value<T>() -> T {
+ unimplemented!()
+}
+
+fn main() {
+ let u32: u32 = mk_value();
+ let u8: u8 = mk_value();
+ let i32: i32 = mk_value();
+ let i8: i8 = mk_value();
+
+ // always false, since no u8 can be > 300
+ (u8 as u32) > 300;
+ (u8 as i32) > 300;
+ (u8 as u32) == 300;
+ (u8 as i32) == 300;
+ 300 < (u8 as u32);
+ 300 < (u8 as i32);
+ 300 == (u8 as u32);
+ 300 == (u8 as i32);
+ // inverted of the above
+ (u8 as u32) <= 300;
+ (u8 as i32) <= 300;
+ (u8 as u32) != 300;
+ (u8 as i32) != 300;
+ 300 >= (u8 as u32);
+ 300 >= (u8 as i32);
+ 300 != (u8 as u32);
+ 300 != (u8 as i32);
+
+ // always false, since u8 -> i32 doesn't wrap
+ (u8 as i32) < 0;
+ -5 != (u8 as i32);
+ // inverted of the above
+ (u8 as i32) >= 0;
+ -5 == (u8 as i32);
+
+ // always false, since no u8 can be 1337
+ 1337 == (u8 as i32);
+ 1337 == (u8 as u32);
+ // inverted of the above
+ 1337 != (u8 as i32);
+ 1337 != (u8 as u32);
+
+ // Those are Ok:
+ (u8 as u32) > 20;
+ 42 == (u8 as i32);
+ 42 != (u8 as i32);
+ 42 > (u8 as i32);
+ (u8 as i32) == 42;
+ (u8 as i32) != 42;
+ (u8 as i32) > 42;
+ (u8 as i32) < 42;
+
+ (u8 as i8) == -1;
+ (u8 as i8) != -1;
+ (u8 as i32) > -1;
+ (u8 as i32) < -1;
+ (u32 as i32) < -5;
+ (u32 as i32) < 10;
+
+ (i8 as u8) == 1;
+ (i8 as u8) != 1;
+ (i8 as u8) < 1;
+ (i8 as u8) > 1;
+ (i32 as u32) < 5;
+ (i32 as u32) < 10;
+
+ -5 < (u32 as i32);
+ 0 <= (u32 as i32);
+ 0 < (u32 as i32);
+
+ -5 > (u32 as i32);
+ -5 >= (u8 as i32);
+
+ -5 == (u32 as i32);
+}
diff --git a/src/tools/clippy/tests/ui/invalid_upcast_comparisons.stderr b/src/tools/clippy/tests/ui/invalid_upcast_comparisons.stderr
new file mode 100644
index 000000000..03c3fb80a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/invalid_upcast_comparisons.stderr
@@ -0,0 +1,166 @@
+error: because of the numeric bounds on `u8` prior to casting, this expression is always false
+ --> $DIR/invalid_upcast_comparisons.rs:21:5
+ |
+LL | (u8 as u32) > 300;
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::invalid-upcast-comparisons` implied by `-D warnings`
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always false
+ --> $DIR/invalid_upcast_comparisons.rs:22:5
+ |
+LL | (u8 as i32) > 300;
+ | ^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always false
+ --> $DIR/invalid_upcast_comparisons.rs:23:5
+ |
+LL | (u8 as u32) == 300;
+ | ^^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always false
+ --> $DIR/invalid_upcast_comparisons.rs:24:5
+ |
+LL | (u8 as i32) == 300;
+ | ^^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always false
+ --> $DIR/invalid_upcast_comparisons.rs:25:5
+ |
+LL | 300 < (u8 as u32);
+ | ^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always false
+ --> $DIR/invalid_upcast_comparisons.rs:26:5
+ |
+LL | 300 < (u8 as i32);
+ | ^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always false
+ --> $DIR/invalid_upcast_comparisons.rs:27:5
+ |
+LL | 300 == (u8 as u32);
+ | ^^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always false
+ --> $DIR/invalid_upcast_comparisons.rs:28:5
+ |
+LL | 300 == (u8 as i32);
+ | ^^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always true
+ --> $DIR/invalid_upcast_comparisons.rs:30:5
+ |
+LL | (u8 as u32) <= 300;
+ | ^^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always true
+ --> $DIR/invalid_upcast_comparisons.rs:31:5
+ |
+LL | (u8 as i32) <= 300;
+ | ^^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always true
+ --> $DIR/invalid_upcast_comparisons.rs:32:5
+ |
+LL | (u8 as u32) != 300;
+ | ^^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always true
+ --> $DIR/invalid_upcast_comparisons.rs:33:5
+ |
+LL | (u8 as i32) != 300;
+ | ^^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always true
+ --> $DIR/invalid_upcast_comparisons.rs:34:5
+ |
+LL | 300 >= (u8 as u32);
+ | ^^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always true
+ --> $DIR/invalid_upcast_comparisons.rs:35:5
+ |
+LL | 300 >= (u8 as i32);
+ | ^^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always true
+ --> $DIR/invalid_upcast_comparisons.rs:36:5
+ |
+LL | 300 != (u8 as u32);
+ | ^^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always true
+ --> $DIR/invalid_upcast_comparisons.rs:37:5
+ |
+LL | 300 != (u8 as i32);
+ | ^^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always false
+ --> $DIR/invalid_upcast_comparisons.rs:40:5
+ |
+LL | (u8 as i32) < 0;
+ | ^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always true
+ --> $DIR/invalid_upcast_comparisons.rs:41:5
+ |
+LL | -5 != (u8 as i32);
+ | ^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always true
+ --> $DIR/invalid_upcast_comparisons.rs:43:5
+ |
+LL | (u8 as i32) >= 0;
+ | ^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always false
+ --> $DIR/invalid_upcast_comparisons.rs:44:5
+ |
+LL | -5 == (u8 as i32);
+ | ^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always false
+ --> $DIR/invalid_upcast_comparisons.rs:47:5
+ |
+LL | 1337 == (u8 as i32);
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always false
+ --> $DIR/invalid_upcast_comparisons.rs:48:5
+ |
+LL | 1337 == (u8 as u32);
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always true
+ --> $DIR/invalid_upcast_comparisons.rs:50:5
+ |
+LL | 1337 != (u8 as i32);
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always true
+ --> $DIR/invalid_upcast_comparisons.rs:51:5
+ |
+LL | 1337 != (u8 as u32);
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always true
+ --> $DIR/invalid_upcast_comparisons.rs:65:5
+ |
+LL | (u8 as i32) > -1;
+ | ^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always false
+ --> $DIR/invalid_upcast_comparisons.rs:66:5
+ |
+LL | (u8 as i32) < -1;
+ | ^^^^^^^^^^^^^^^^
+
+error: because of the numeric bounds on `u8` prior to casting, this expression is always false
+ --> $DIR/invalid_upcast_comparisons.rs:82:5
+ |
+LL | -5 >= (u8 as i32);
+ | ^^^^^^^^^^^^^^^^^
+
+error: aborting due to 27 previous errors
+
diff --git a/src/tools/clippy/tests/ui/invalid_utf8_in_unchecked.rs b/src/tools/clippy/tests/ui/invalid_utf8_in_unchecked.rs
new file mode 100644
index 000000000..3dc096d31
--- /dev/null
+++ b/src/tools/clippy/tests/ui/invalid_utf8_in_unchecked.rs
@@ -0,0 +1,20 @@
+#![warn(clippy::invalid_utf8_in_unchecked)]
+
+fn main() {
+ // Valid
+ unsafe {
+ std::str::from_utf8_unchecked(&[99, 108, 105, 112, 112, 121]);
+ std::str::from_utf8_unchecked(&[b'c', b'l', b'i', b'p', b'p', b'y']);
+ std::str::from_utf8_unchecked(b"clippy");
+
+ let x = 0xA0;
+ std::str::from_utf8_unchecked(&[0xC0, x]);
+ }
+
+ // Invalid
+ unsafe {
+ std::str::from_utf8_unchecked(&[99, 108, 130, 105, 112, 112, 121]);
+ std::str::from_utf8_unchecked(&[b'c', b'l', b'\x82', b'i', b'p', b'p', b'y']);
+ std::str::from_utf8_unchecked(b"cl\x82ippy");
+ }
+}
diff --git a/src/tools/clippy/tests/ui/invalid_utf8_in_unchecked.stderr b/src/tools/clippy/tests/ui/invalid_utf8_in_unchecked.stderr
new file mode 100644
index 000000000..c89cd2758
--- /dev/null
+++ b/src/tools/clippy/tests/ui/invalid_utf8_in_unchecked.stderr
@@ -0,0 +1,22 @@
+error: non UTF-8 literal in `std::str::from_utf8_unchecked`
+ --> $DIR/invalid_utf8_in_unchecked.rs:16:9
+ |
+LL | std::str::from_utf8_unchecked(&[99, 108, 130, 105, 112, 112, 121]);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::invalid-utf8-in-unchecked` implied by `-D warnings`
+
+error: non UTF-8 literal in `std::str::from_utf8_unchecked`
+ --> $DIR/invalid_utf8_in_unchecked.rs:17:9
+ |
+LL | std::str::from_utf8_unchecked(&[b'c', b'l', b'/x82', b'i', b'p', b'p', b'y']);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: non UTF-8 literal in `std::str::from_utf8_unchecked`
+ --> $DIR/invalid_utf8_in_unchecked.rs:18:9
+ |
+LL | std::str::from_utf8_unchecked(b"cl/x82ippy");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/is_digit_ascii_radix.fixed b/src/tools/clippy/tests/ui/is_digit_ascii_radix.fixed
new file mode 100644
index 000000000..c0ba647d7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/is_digit_ascii_radix.fixed
@@ -0,0 +1,18 @@
+// run-rustfix
+
+#![warn(clippy::is_digit_ascii_radix)]
+
+const TEN: u32 = 10;
+
+fn main() {
+ let c: char = '6';
+
+ // Should trigger the lint.
+ let _ = c.is_ascii_digit();
+ let _ = c.is_ascii_hexdigit();
+ let _ = c.is_ascii_hexdigit();
+
+ // Should not trigger the lint.
+ let _ = c.is_digit(11);
+ let _ = c.is_digit(TEN);
+}
diff --git a/src/tools/clippy/tests/ui/is_digit_ascii_radix.rs b/src/tools/clippy/tests/ui/is_digit_ascii_radix.rs
new file mode 100644
index 000000000..68e3f3243
--- /dev/null
+++ b/src/tools/clippy/tests/ui/is_digit_ascii_radix.rs
@@ -0,0 +1,18 @@
+// run-rustfix
+
+#![warn(clippy::is_digit_ascii_radix)]
+
+const TEN: u32 = 10;
+
+fn main() {
+ let c: char = '6';
+
+ // Should trigger the lint.
+ let _ = c.is_digit(10);
+ let _ = c.is_digit(16);
+ let _ = c.is_digit(0x10);
+
+ // Should not trigger the lint.
+ let _ = c.is_digit(11);
+ let _ = c.is_digit(TEN);
+}
diff --git a/src/tools/clippy/tests/ui/is_digit_ascii_radix.stderr b/src/tools/clippy/tests/ui/is_digit_ascii_radix.stderr
new file mode 100644
index 000000000..dc5cb2913
--- /dev/null
+++ b/src/tools/clippy/tests/ui/is_digit_ascii_radix.stderr
@@ -0,0 +1,22 @@
+error: use of `char::is_digit` with literal radix of 10
+ --> $DIR/is_digit_ascii_radix.rs:11:13
+ |
+LL | let _ = c.is_digit(10);
+ | ^^^^^^^^^^^^^^ help: try: `c.is_ascii_digit()`
+ |
+ = note: `-D clippy::is-digit-ascii-radix` implied by `-D warnings`
+
+error: use of `char::is_digit` with literal radix of 16
+ --> $DIR/is_digit_ascii_radix.rs:12:13
+ |
+LL | let _ = c.is_digit(16);
+ | ^^^^^^^^^^^^^^ help: try: `c.is_ascii_hexdigit()`
+
+error: use of `char::is_digit` with literal radix of 16
+ --> $DIR/is_digit_ascii_radix.rs:13:13
+ |
+LL | let _ = c.is_digit(0x10);
+ | ^^^^^^^^^^^^^^^^ help: try: `c.is_ascii_hexdigit()`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/issue-3145.rs b/src/tools/clippy/tests/ui/issue-3145.rs
new file mode 100644
index 000000000..586d13647
--- /dev/null
+++ b/src/tools/clippy/tests/ui/issue-3145.rs
@@ -0,0 +1,3 @@
+fn main() {
+ println!("{}" a); //~ERROR expected `,`, found `a`
+}
diff --git a/src/tools/clippy/tests/ui/issue-3145.stderr b/src/tools/clippy/tests/ui/issue-3145.stderr
new file mode 100644
index 000000000..a35032aa1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/issue-3145.stderr
@@ -0,0 +1,8 @@
+error: expected `,`, found `a`
+ --> $DIR/issue-3145.rs:2:19
+ |
+LL | println!("{}" a); //~ERROR expected `,`, found `a`
+ | ^ expected `,`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/issue-7447.rs b/src/tools/clippy/tests/ui/issue-7447.rs
new file mode 100644
index 000000000..fdb77f322
--- /dev/null
+++ b/src/tools/clippy/tests/ui/issue-7447.rs
@@ -0,0 +1,25 @@
+use std::{borrow::Cow, collections::BTreeMap, marker::PhantomData, sync::Arc};
+
+fn byte_view<'a>(s: &'a ByteView<'_>) -> BTreeMap<&'a str, ByteView<'a>> {
+ panic!()
+}
+
+fn group_entries(s: &()) -> BTreeMap<Cow<'_, str>, Vec<Cow<'_, str>>> {
+ todo!()
+}
+
+struct Mmap;
+
+enum ByteViewBacking<'a> {
+ Buf(Cow<'a, [u8]>),
+ Mmap(Mmap),
+}
+
+pub struct ByteView<'a> {
+ backing: Arc<ByteViewBacking<'a>>,
+}
+
+fn main() {
+ byte_view(panic!());
+ group_entries(panic!());
+}
diff --git a/src/tools/clippy/tests/ui/issue-7447.stderr b/src/tools/clippy/tests/ui/issue-7447.stderr
new file mode 100644
index 000000000..8d8c29f13
--- /dev/null
+++ b/src/tools/clippy/tests/ui/issue-7447.stderr
@@ -0,0 +1,19 @@
+error: sub-expression diverges
+ --> $DIR/issue-7447.rs:23:15
+ |
+LL | byte_view(panic!());
+ | ^^^^^^^^
+ |
+ = note: `-D clippy::diverging-sub-expression` implied by `-D warnings`
+ = note: this error originates in the macro `$crate::panic::panic_2021` which comes from the expansion of the macro `panic` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: sub-expression diverges
+ --> $DIR/issue-7447.rs:24:19
+ |
+LL | group_entries(panic!());
+ | ^^^^^^^^
+ |
+ = note: this error originates in the macro `$crate::panic::panic_2021` which comes from the expansion of the macro `panic` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/issue_2356.fixed b/src/tools/clippy/tests/ui/issue_2356.fixed
new file mode 100644
index 000000000..942e99fa8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/issue_2356.fixed
@@ -0,0 +1,26 @@
+// run-rustfix
+#![deny(clippy::while_let_on_iterator)]
+#![allow(unused_mut)]
+
+use std::iter::Iterator;
+
+struct Foo;
+
+impl Foo {
+ fn foo1<I: Iterator<Item = usize>>(mut it: I) {
+ while let Some(_) = it.next() {
+ println!("{:?}", it.size_hint());
+ }
+ }
+
+ fn foo2<I: Iterator<Item = usize>>(mut it: I) {
+ for e in it {
+ println!("{:?}", e);
+ }
+ }
+}
+
+fn main() {
+ Foo::foo1(vec![].into_iter());
+ Foo::foo2(vec![].into_iter());
+}
diff --git a/src/tools/clippy/tests/ui/issue_2356.rs b/src/tools/clippy/tests/ui/issue_2356.rs
new file mode 100644
index 000000000..b000234ea
--- /dev/null
+++ b/src/tools/clippy/tests/ui/issue_2356.rs
@@ -0,0 +1,26 @@
+// run-rustfix
+#![deny(clippy::while_let_on_iterator)]
+#![allow(unused_mut)]
+
+use std::iter::Iterator;
+
+struct Foo;
+
+impl Foo {
+ fn foo1<I: Iterator<Item = usize>>(mut it: I) {
+ while let Some(_) = it.next() {
+ println!("{:?}", it.size_hint());
+ }
+ }
+
+ fn foo2<I: Iterator<Item = usize>>(mut it: I) {
+ while let Some(e) = it.next() {
+ println!("{:?}", e);
+ }
+ }
+}
+
+fn main() {
+ Foo::foo1(vec![].into_iter());
+ Foo::foo2(vec![].into_iter());
+}
diff --git a/src/tools/clippy/tests/ui/issue_2356.stderr b/src/tools/clippy/tests/ui/issue_2356.stderr
new file mode 100644
index 000000000..4e3ff7522
--- /dev/null
+++ b/src/tools/clippy/tests/ui/issue_2356.stderr
@@ -0,0 +1,14 @@
+error: this loop could be written as a `for` loop
+ --> $DIR/issue_2356.rs:17:9
+ |
+LL | while let Some(e) = it.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for e in it`
+ |
+note: the lint level is defined here
+ --> $DIR/issue_2356.rs:2:9
+ |
+LL | #![deny(clippy::while_let_on_iterator)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/issue_4266.rs b/src/tools/clippy/tests/ui/issue_4266.rs
new file mode 100644
index 000000000..d9d48189b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/issue_4266.rs
@@ -0,0 +1,38 @@
+#![allow(dead_code)]
+
+async fn sink1<'a>(_: &'a str) {} // lint
+async fn sink1_elided(_: &str) {} // ok
+
+// lint
+async fn one_to_one<'a>(s: &'a str) -> &'a str {
+ s
+}
+
+// ok
+async fn one_to_one_elided(s: &str) -> &str {
+ s
+}
+
+// ok
+async fn all_to_one<'a>(a: &'a str, _b: &'a str) -> &'a str {
+ a
+}
+
+// async fn unrelated(_: &str, _: &str) {} // Not allowed in async fn
+
+// #3988
+struct Foo;
+impl Foo {
+ // ok
+ pub async fn new(&mut self) -> Self {
+ Foo {}
+ }
+}
+
+// rust-lang/rust#61115
+// ok
+async fn print(s: &str) {
+ println!("{}", s);
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/issue_4266.stderr b/src/tools/clippy/tests/ui/issue_4266.stderr
new file mode 100644
index 000000000..e5042aaa7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/issue_4266.stderr
@@ -0,0 +1,25 @@
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/issue_4266.rs:3:1
+ |
+LL | async fn sink1<'a>(_: &'a str) {} // lint
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::needless-lifetimes` implied by `-D warnings`
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/issue_4266.rs:7:1
+ |
+LL | async fn one_to_one<'a>(s: &'a str) -> &'a str {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: methods called `new` usually take no `self`
+ --> $DIR/issue_4266.rs:27:22
+ |
+LL | pub async fn new(&mut self) -> Self {
+ | ^^^^^^^^^
+ |
+ = note: `-D clippy::wrong-self-convention` implied by `-D warnings`
+ = help: consider choosing a less ambiguous name
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/item_after_statement.rs b/src/tools/clippy/tests/ui/item_after_statement.rs
new file mode 100644
index 000000000..d439ca1e4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/item_after_statement.rs
@@ -0,0 +1,52 @@
+#![warn(clippy::items_after_statements)]
+
+fn ok() {
+ fn foo() {
+ println!("foo");
+ }
+ foo();
+}
+
+fn last() {
+ foo();
+ fn foo() {
+ println!("foo");
+ }
+}
+
+fn main() {
+ foo();
+ fn foo() {
+ println!("foo");
+ }
+ foo();
+}
+
+fn mac() {
+ let mut a = 5;
+ println!("{}", a);
+ // do not lint this, because it needs to be after `a`
+ macro_rules! b {
+ () => {{
+ a = 6;
+ fn say_something() {
+ println!("something");
+ }
+ }};
+ }
+ b!();
+ println!("{}", a);
+}
+
+fn semicolon() {
+ struct S {
+ a: u32,
+ };
+ impl S {
+ fn new(a: u32) -> Self {
+ Self { a }
+ }
+ }
+
+ let _ = S::new(3);
+}
diff --git a/src/tools/clippy/tests/ui/item_after_statement.stderr b/src/tools/clippy/tests/ui/item_after_statement.stderr
new file mode 100644
index 000000000..ab4a6374c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/item_after_statement.stderr
@@ -0,0 +1,33 @@
+error: adding items after statements is confusing, since items exist from the start of the scope
+ --> $DIR/item_after_statement.rs:12:5
+ |
+LL | / fn foo() {
+LL | | println!("foo");
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::items-after-statements` implied by `-D warnings`
+
+error: adding items after statements is confusing, since items exist from the start of the scope
+ --> $DIR/item_after_statement.rs:19:5
+ |
+LL | / fn foo() {
+LL | | println!("foo");
+LL | | }
+ | |_____^
+
+error: adding items after statements is confusing, since items exist from the start of the scope
+ --> $DIR/item_after_statement.rs:32:13
+ |
+LL | / fn say_something() {
+LL | | println!("something");
+LL | | }
+ | |_____________^
+...
+LL | b!();
+ | ---- in this macro invocation
+ |
+ = note: this error originates in the macro `b` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/iter_cloned_collect.fixed b/src/tools/clippy/tests/ui/iter_cloned_collect.fixed
new file mode 100644
index 000000000..9b8621335
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_cloned_collect.fixed
@@ -0,0 +1,29 @@
+// run-rustfix
+
+#![allow(unused)]
+
+use std::collections::HashSet;
+use std::collections::VecDeque;
+
+fn main() {
+ let v = [1, 2, 3, 4, 5];
+ let v2: Vec<isize> = v.to_vec();
+ let v3: HashSet<isize> = v.iter().cloned().collect();
+ let v4: VecDeque<isize> = v.iter().cloned().collect();
+
+ // Handle macro expansion in suggestion
+ let _: Vec<isize> = vec![1, 2, 3].to_vec();
+
+ // Issue #3704
+ unsafe {
+ let _: Vec<u8> = std::ffi::CStr::from_ptr(std::ptr::null())
+ .to_bytes().to_vec();
+ }
+
+ // Issue #6808
+ let arr: [u8; 64] = [0; 64];
+ let _: Vec<_> = arr.to_vec();
+
+ // Issue #6703
+ let _: Vec<isize> = v.to_vec();
+}
diff --git a/src/tools/clippy/tests/ui/iter_cloned_collect.rs b/src/tools/clippy/tests/ui/iter_cloned_collect.rs
new file mode 100644
index 000000000..639f50665
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_cloned_collect.rs
@@ -0,0 +1,32 @@
+// run-rustfix
+
+#![allow(unused)]
+
+use std::collections::HashSet;
+use std::collections::VecDeque;
+
+fn main() {
+ let v = [1, 2, 3, 4, 5];
+ let v2: Vec<isize> = v.iter().cloned().collect();
+ let v3: HashSet<isize> = v.iter().cloned().collect();
+ let v4: VecDeque<isize> = v.iter().cloned().collect();
+
+ // Handle macro expansion in suggestion
+ let _: Vec<isize> = vec![1, 2, 3].iter().cloned().collect();
+
+ // Issue #3704
+ unsafe {
+ let _: Vec<u8> = std::ffi::CStr::from_ptr(std::ptr::null())
+ .to_bytes()
+ .iter()
+ .cloned()
+ .collect();
+ }
+
+ // Issue #6808
+ let arr: [u8; 64] = [0; 64];
+ let _: Vec<_> = arr.iter().cloned().collect();
+
+ // Issue #6703
+ let _: Vec<isize> = v.iter().copied().collect();
+}
diff --git a/src/tools/clippy/tests/ui/iter_cloned_collect.stderr b/src/tools/clippy/tests/ui/iter_cloned_collect.stderr
new file mode 100644
index 000000000..b2cc497bf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_cloned_collect.stderr
@@ -0,0 +1,38 @@
+error: called `iter().cloned().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable
+ --> $DIR/iter_cloned_collect.rs:10:27
+ |
+LL | let v2: Vec<isize> = v.iter().cloned().collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `.to_vec()`
+ |
+ = note: `-D clippy::iter-cloned-collect` implied by `-D warnings`
+
+error: called `iter().cloned().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable
+ --> $DIR/iter_cloned_collect.rs:15:38
+ |
+LL | let _: Vec<isize> = vec![1, 2, 3].iter().cloned().collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `.to_vec()`
+
+error: called `iter().cloned().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable
+ --> $DIR/iter_cloned_collect.rs:20:24
+ |
+LL | .to_bytes()
+ | ________________________^
+LL | | .iter()
+LL | | .cloned()
+LL | | .collect();
+ | |______________________^ help: try: `.to_vec()`
+
+error: called `iter().cloned().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable
+ --> $DIR/iter_cloned_collect.rs:28:24
+ |
+LL | let _: Vec<_> = arr.iter().cloned().collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `.to_vec()`
+
+error: called `iter().copied().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable
+ --> $DIR/iter_cloned_collect.rs:31:26
+ |
+LL | let _: Vec<isize> = v.iter().copied().collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `.to_vec()`
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/iter_count.fixed b/src/tools/clippy/tests/ui/iter_count.fixed
new file mode 100644
index 000000000..90a6eef75
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_count.fixed
@@ -0,0 +1,87 @@
+// run-rustfix
+// aux-build:option_helpers.rs
+
+#![warn(clippy::iter_count)]
+#![allow(
+ unused_variables,
+ array_into_iter,
+ unused_mut,
+ clippy::into_iter_on_ref,
+ clippy::unnecessary_operation
+)]
+
+extern crate option_helpers;
+
+use option_helpers::IteratorFalsePositives;
+use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, LinkedList, VecDeque};
+
+/// Struct to generate false positives for things with `.iter()`.
+#[derive(Copy, Clone)]
+struct HasIter;
+
+impl HasIter {
+ fn iter(self) -> IteratorFalsePositives {
+ IteratorFalsePositives { foo: 0 }
+ }
+
+ fn iter_mut(self) -> IteratorFalsePositives {
+ IteratorFalsePositives { foo: 0 }
+ }
+
+ fn into_iter(self) -> IteratorFalsePositives {
+ IteratorFalsePositives { foo: 0 }
+ }
+}
+
+#[allow(unused_must_use)]
+fn main() {
+ let mut vec = vec![0, 1, 2, 3];
+ let mut boxed_slice: Box<[u8]> = Box::new([0, 1, 2, 3]);
+ let mut vec_deque: VecDeque<_> = vec.iter().cloned().collect();
+ let mut hash_set = HashSet::new();
+ let mut hash_map = HashMap::new();
+ let mut b_tree_map = BTreeMap::new();
+ let mut b_tree_set = BTreeSet::new();
+ let mut linked_list = LinkedList::new();
+ let mut binary_heap = BinaryHeap::new();
+ hash_set.insert(1);
+ hash_map.insert(1, 2);
+ b_tree_map.insert(1, 2);
+ b_tree_set.insert(1);
+ linked_list.push_back(1);
+ binary_heap.push(1);
+
+ &vec[..].len();
+ vec.len();
+ boxed_slice.len();
+ vec_deque.len();
+ hash_set.len();
+ hash_map.len();
+ b_tree_map.len();
+ b_tree_set.len();
+ linked_list.len();
+ binary_heap.len();
+
+ vec.len();
+ &vec[..].len();
+ vec_deque.len();
+ hash_map.len();
+ b_tree_map.len();
+ linked_list.len();
+
+ &vec[..].len();
+ vec.len();
+ vec_deque.len();
+ hash_set.len();
+ hash_map.len();
+ b_tree_map.len();
+ b_tree_set.len();
+ linked_list.len();
+ binary_heap.len();
+
+ // Make sure we don't lint for non-relevant types.
+ let false_positive = HasIter;
+ false_positive.iter().count();
+ false_positive.iter_mut().count();
+ false_positive.into_iter().count();
+}
diff --git a/src/tools/clippy/tests/ui/iter_count.rs b/src/tools/clippy/tests/ui/iter_count.rs
new file mode 100644
index 000000000..6681a480a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_count.rs
@@ -0,0 +1,87 @@
+// run-rustfix
+// aux-build:option_helpers.rs
+
+#![warn(clippy::iter_count)]
+#![allow(
+ unused_variables,
+ array_into_iter,
+ unused_mut,
+ clippy::into_iter_on_ref,
+ clippy::unnecessary_operation
+)]
+
+extern crate option_helpers;
+
+use option_helpers::IteratorFalsePositives;
+use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, LinkedList, VecDeque};
+
+/// Struct to generate false positives for things with `.iter()`.
+#[derive(Copy, Clone)]
+struct HasIter;
+
+impl HasIter {
+ fn iter(self) -> IteratorFalsePositives {
+ IteratorFalsePositives { foo: 0 }
+ }
+
+ fn iter_mut(self) -> IteratorFalsePositives {
+ IteratorFalsePositives { foo: 0 }
+ }
+
+ fn into_iter(self) -> IteratorFalsePositives {
+ IteratorFalsePositives { foo: 0 }
+ }
+}
+
+#[allow(unused_must_use)]
+fn main() {
+ let mut vec = vec![0, 1, 2, 3];
+ let mut boxed_slice: Box<[u8]> = Box::new([0, 1, 2, 3]);
+ let mut vec_deque: VecDeque<_> = vec.iter().cloned().collect();
+ let mut hash_set = HashSet::new();
+ let mut hash_map = HashMap::new();
+ let mut b_tree_map = BTreeMap::new();
+ let mut b_tree_set = BTreeSet::new();
+ let mut linked_list = LinkedList::new();
+ let mut binary_heap = BinaryHeap::new();
+ hash_set.insert(1);
+ hash_map.insert(1, 2);
+ b_tree_map.insert(1, 2);
+ b_tree_set.insert(1);
+ linked_list.push_back(1);
+ binary_heap.push(1);
+
+ &vec[..].iter().count();
+ vec.iter().count();
+ boxed_slice.iter().count();
+ vec_deque.iter().count();
+ hash_set.iter().count();
+ hash_map.iter().count();
+ b_tree_map.iter().count();
+ b_tree_set.iter().count();
+ linked_list.iter().count();
+ binary_heap.iter().count();
+
+ vec.iter_mut().count();
+ &vec[..].iter_mut().count();
+ vec_deque.iter_mut().count();
+ hash_map.iter_mut().count();
+ b_tree_map.iter_mut().count();
+ linked_list.iter_mut().count();
+
+ &vec[..].into_iter().count();
+ vec.into_iter().count();
+ vec_deque.into_iter().count();
+ hash_set.into_iter().count();
+ hash_map.into_iter().count();
+ b_tree_map.into_iter().count();
+ b_tree_set.into_iter().count();
+ linked_list.into_iter().count();
+ binary_heap.into_iter().count();
+
+ // Make sure we don't lint for non-relevant types.
+ let false_positive = HasIter;
+ false_positive.iter().count();
+ false_positive.iter_mut().count();
+ false_positive.into_iter().count();
+}
diff --git a/src/tools/clippy/tests/ui/iter_count.stderr b/src/tools/clippy/tests/ui/iter_count.stderr
new file mode 100644
index 000000000..2e3d7fc35
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_count.stderr
@@ -0,0 +1,154 @@
+error: called `.iter().count()` on a `slice`
+ --> $DIR/iter_count.rs:54:6
+ |
+LL | &vec[..].iter().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec[..].len()`
+ |
+ = note: `-D clippy::iter-count` implied by `-D warnings`
+
+error: called `.iter().count()` on a `Vec`
+ --> $DIR/iter_count.rs:55:5
+ |
+LL | vec.iter().count();
+ | ^^^^^^^^^^^^^^^^^^ help: try: `vec.len()`
+
+error: called `.iter().count()` on a `slice`
+ --> $DIR/iter_count.rs:56:5
+ |
+LL | boxed_slice.iter().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `boxed_slice.len()`
+
+error: called `.iter().count()` on a `VecDeque`
+ --> $DIR/iter_count.rs:57:5
+ |
+LL | vec_deque.iter().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec_deque.len()`
+
+error: called `.iter().count()` on a `HashSet`
+ --> $DIR/iter_count.rs:58:5
+ |
+LL | hash_set.iter().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `hash_set.len()`
+
+error: called `.iter().count()` on a `HashMap`
+ --> $DIR/iter_count.rs:59:5
+ |
+LL | hash_map.iter().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `hash_map.len()`
+
+error: called `.iter().count()` on a `BTreeMap`
+ --> $DIR/iter_count.rs:60:5
+ |
+LL | b_tree_map.iter().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `b_tree_map.len()`
+
+error: called `.iter().count()` on a `BTreeSet`
+ --> $DIR/iter_count.rs:61:5
+ |
+LL | b_tree_set.iter().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `b_tree_set.len()`
+
+error: called `.iter().count()` on a `LinkedList`
+ --> $DIR/iter_count.rs:62:5
+ |
+LL | linked_list.iter().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `linked_list.len()`
+
+error: called `.iter().count()` on a `BinaryHeap`
+ --> $DIR/iter_count.rs:63:5
+ |
+LL | binary_heap.iter().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `binary_heap.len()`
+
+error: called `.iter_mut().count()` on a `Vec`
+ --> $DIR/iter_count.rs:65:5
+ |
+LL | vec.iter_mut().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec.len()`
+
+error: called `.iter_mut().count()` on a `slice`
+ --> $DIR/iter_count.rs:66:6
+ |
+LL | &vec[..].iter_mut().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec[..].len()`
+
+error: called `.iter_mut().count()` on a `VecDeque`
+ --> $DIR/iter_count.rs:67:5
+ |
+LL | vec_deque.iter_mut().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec_deque.len()`
+
+error: called `.iter_mut().count()` on a `HashMap`
+ --> $DIR/iter_count.rs:68:5
+ |
+LL | hash_map.iter_mut().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `hash_map.len()`
+
+error: called `.iter_mut().count()` on a `BTreeMap`
+ --> $DIR/iter_count.rs:69:5
+ |
+LL | b_tree_map.iter_mut().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `b_tree_map.len()`
+
+error: called `.iter_mut().count()` on a `LinkedList`
+ --> $DIR/iter_count.rs:70:5
+ |
+LL | linked_list.iter_mut().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `linked_list.len()`
+
+error: called `.into_iter().count()` on a `slice`
+ --> $DIR/iter_count.rs:72:6
+ |
+LL | &vec[..].into_iter().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec[..].len()`
+
+error: called `.into_iter().count()` on a `Vec`
+ --> $DIR/iter_count.rs:73:5
+ |
+LL | vec.into_iter().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec.len()`
+
+error: called `.into_iter().count()` on a `VecDeque`
+ --> $DIR/iter_count.rs:74:5
+ |
+LL | vec_deque.into_iter().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec_deque.len()`
+
+error: called `.into_iter().count()` on a `HashSet`
+ --> $DIR/iter_count.rs:75:5
+ |
+LL | hash_set.into_iter().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `hash_set.len()`
+
+error: called `.into_iter().count()` on a `HashMap`
+ --> $DIR/iter_count.rs:76:5
+ |
+LL | hash_map.into_iter().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `hash_map.len()`
+
+error: called `.into_iter().count()` on a `BTreeMap`
+ --> $DIR/iter_count.rs:77:5
+ |
+LL | b_tree_map.into_iter().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `b_tree_map.len()`
+
+error: called `.into_iter().count()` on a `BTreeSet`
+ --> $DIR/iter_count.rs:78:5
+ |
+LL | b_tree_set.into_iter().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `b_tree_set.len()`
+
+error: called `.into_iter().count()` on a `LinkedList`
+ --> $DIR/iter_count.rs:79:5
+ |
+LL | linked_list.into_iter().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `linked_list.len()`
+
+error: called `.into_iter().count()` on a `BinaryHeap`
+ --> $DIR/iter_count.rs:80:5
+ |
+LL | binary_heap.into_iter().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `binary_heap.len()`
+
+error: aborting due to 25 previous errors
+
diff --git a/src/tools/clippy/tests/ui/iter_next_slice.fixed b/src/tools/clippy/tests/ui/iter_next_slice.fixed
new file mode 100644
index 000000000..f612d26aa
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_next_slice.fixed
@@ -0,0 +1,24 @@
+// run-rustfix
+#![warn(clippy::iter_next_slice)]
+
+fn main() {
+ // test code goes here
+ let s = [1, 2, 3];
+ let v = vec![1, 2, 3];
+
+ let _ = s.first();
+ // Should be replaced by s.first()
+
+ let _ = s.get(2);
+ // Should be replaced by s.get(2)
+
+ let _ = v.get(5);
+ // Should be replaced by v.get(5)
+
+ let _ = v.first();
+ // Should be replaced by v.first()
+
+ let o = Some(5);
+ o.iter().next();
+ // Shouldn't be linted since this is not a Slice or an Array
+}
diff --git a/src/tools/clippy/tests/ui/iter_next_slice.rs b/src/tools/clippy/tests/ui/iter_next_slice.rs
new file mode 100644
index 000000000..5195f1c86
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_next_slice.rs
@@ -0,0 +1,24 @@
+// run-rustfix
+#![warn(clippy::iter_next_slice)]
+
+fn main() {
+ // test code goes here
+ let s = [1, 2, 3];
+ let v = vec![1, 2, 3];
+
+ let _ = s.iter().next();
+ // Should be replaced by s.first()
+
+ let _ = s[2..].iter().next();
+ // Should be replaced by s.get(2)
+
+ let _ = v[5..].iter().next();
+ // Should be replaced by v.get(5)
+
+ let _ = v.iter().next();
+ // Should be replaced by v.first()
+
+ let o = Some(5);
+ o.iter().next();
+ // Shouldn't be linted since this is not a Slice or an Array
+}
diff --git a/src/tools/clippy/tests/ui/iter_next_slice.stderr b/src/tools/clippy/tests/ui/iter_next_slice.stderr
new file mode 100644
index 000000000..d8b89061f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_next_slice.stderr
@@ -0,0 +1,28 @@
+error: using `.iter().next()` on an array
+ --> $DIR/iter_next_slice.rs:9:13
+ |
+LL | let _ = s.iter().next();
+ | ^^^^^^^^^^^^^^^ help: try calling: `s.first()`
+ |
+ = note: `-D clippy::iter-next-slice` implied by `-D warnings`
+
+error: using `.iter().next()` on a Slice without end index
+ --> $DIR/iter_next_slice.rs:12:13
+ |
+LL | let _ = s[2..].iter().next();
+ | ^^^^^^^^^^^^^^^^^^^^ help: try calling: `s.get(2)`
+
+error: using `.iter().next()` on a Slice without end index
+ --> $DIR/iter_next_slice.rs:15:13
+ |
+LL | let _ = v[5..].iter().next();
+ | ^^^^^^^^^^^^^^^^^^^^ help: try calling: `v.get(5)`
+
+error: using `.iter().next()` on an array
+ --> $DIR/iter_next_slice.rs:18:13
+ |
+LL | let _ = v.iter().next();
+ | ^^^^^^^^^^^^^^^ help: try calling: `v.first()`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/iter_not_returning_iterator.rs b/src/tools/clippy/tests/ui/iter_not_returning_iterator.rs
new file mode 100644
index 000000000..cce216fc6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_not_returning_iterator.rs
@@ -0,0 +1,74 @@
+#![warn(clippy::iter_not_returning_iterator)]
+
+struct Data {
+ begin: u32,
+}
+
+struct Counter {
+ count: u32,
+}
+
+impl Data {
+ fn iter(&self) -> Counter {
+ todo!()
+ }
+
+ fn iter_mut(&self) -> Counter {
+ todo!()
+ }
+}
+
+struct Data2 {
+ begin: u32,
+}
+
+struct Counter2 {
+ count: u32,
+}
+
+impl Data2 {
+ fn iter(&self) -> Counter2 {
+ todo!()
+ }
+
+ fn iter_mut(&self) -> Counter2 {
+ todo!()
+ }
+}
+
+impl Iterator for Counter {
+ type Item = u32;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ todo!()
+ }
+}
+
+// Issue #8225
+trait Iter {
+ type I;
+ fn iter(&self) -> Self::I;
+}
+
+impl Iter for () {
+ type I = core::slice::Iter<'static, ()>;
+ fn iter(&self) -> Self::I {
+ [].iter()
+ }
+}
+
+struct S;
+impl S {
+ fn iter(&self) -> <() as Iter>::I {
+ ().iter()
+ }
+}
+
+struct S2([u8]);
+impl S2 {
+ fn iter(&self) -> core::slice::Iter<u8> {
+ self.0.iter()
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/iter_not_returning_iterator.stderr b/src/tools/clippy/tests/ui/iter_not_returning_iterator.stderr
new file mode 100644
index 000000000..44f029558
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_not_returning_iterator.stderr
@@ -0,0 +1,22 @@
+error: this method is named `iter` but its return type does not implement `Iterator`
+ --> $DIR/iter_not_returning_iterator.rs:30:5
+ |
+LL | fn iter(&self) -> Counter2 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::iter-not-returning-iterator` implied by `-D warnings`
+
+error: this method is named `iter_mut` but its return type does not implement `Iterator`
+ --> $DIR/iter_not_returning_iterator.rs:34:5
+ |
+LL | fn iter_mut(&self) -> Counter2 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this method is named `iter` but its return type does not implement `Iterator`
+ --> $DIR/iter_not_returning_iterator.rs:50:5
+ |
+LL | fn iter(&self) -> Self::I;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/iter_nth.rs b/src/tools/clippy/tests/ui/iter_nth.rs
new file mode 100644
index 000000000..9c21dd82e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_nth.rs
@@ -0,0 +1,56 @@
+// aux-build:option_helpers.rs
+
+#![warn(clippy::iter_nth)]
+
+#[macro_use]
+extern crate option_helpers;
+
+use option_helpers::IteratorFalsePositives;
+use std::collections::VecDeque;
+
+/// Struct to generate false positives for things with `.iter()`.
+#[derive(Copy, Clone)]
+struct HasIter;
+
+impl HasIter {
+ fn iter(self) -> IteratorFalsePositives {
+ IteratorFalsePositives { foo: 0 }
+ }
+
+ fn iter_mut(self) -> IteratorFalsePositives {
+ IteratorFalsePositives { foo: 0 }
+ }
+}
+
+/// Checks implementation of `ITER_NTH` lint.
+fn iter_nth() {
+ let mut some_vec = vec![0, 1, 2, 3];
+ let mut boxed_slice: Box<[u8]> = Box::new([0, 1, 2, 3]);
+ let mut some_vec_deque: VecDeque<_> = some_vec.iter().cloned().collect();
+
+ {
+ // Make sure we lint `.iter()` for relevant types.
+ let bad_vec = some_vec.iter().nth(3);
+ let bad_slice = &some_vec[..].iter().nth(3);
+ let bad_boxed_slice = boxed_slice.iter().nth(3);
+ let bad_vec_deque = some_vec_deque.iter().nth(3);
+ }
+
+ {
+ // Make sure we lint `.iter_mut()` for relevant types.
+ let bad_vec = some_vec.iter_mut().nth(3);
+ }
+ {
+ let bad_slice = &some_vec[..].iter_mut().nth(3);
+ }
+ {
+ let bad_vec_deque = some_vec_deque.iter_mut().nth(3);
+ }
+
+ // Make sure we don't lint for non-relevant types.
+ let false_positive = HasIter;
+ let ok = false_positive.iter().nth(3);
+ let ok_mut = false_positive.iter_mut().nth(3);
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/iter_nth.stderr b/src/tools/clippy/tests/ui/iter_nth.stderr
new file mode 100644
index 000000000..d00b2fb67
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_nth.stderr
@@ -0,0 +1,59 @@
+error: called `.iter().nth()` on a Vec
+ --> $DIR/iter_nth.rs:33:23
+ |
+LL | let bad_vec = some_vec.iter().nth(3);
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::iter-nth` implied by `-D warnings`
+ = help: calling `.get()` is both faster and more readable
+
+error: called `.iter().nth()` on a slice
+ --> $DIR/iter_nth.rs:34:26
+ |
+LL | let bad_slice = &some_vec[..].iter().nth(3);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: calling `.get()` is both faster and more readable
+
+error: called `.iter().nth()` on a slice
+ --> $DIR/iter_nth.rs:35:31
+ |
+LL | let bad_boxed_slice = boxed_slice.iter().nth(3);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: calling `.get()` is both faster and more readable
+
+error: called `.iter().nth()` on a VecDeque
+ --> $DIR/iter_nth.rs:36:29
+ |
+LL | let bad_vec_deque = some_vec_deque.iter().nth(3);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: calling `.get()` is both faster and more readable
+
+error: called `.iter_mut().nth()` on a Vec
+ --> $DIR/iter_nth.rs:41:23
+ |
+LL | let bad_vec = some_vec.iter_mut().nth(3);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: calling `.get_mut()` is both faster and more readable
+
+error: called `.iter_mut().nth()` on a slice
+ --> $DIR/iter_nth.rs:44:26
+ |
+LL | let bad_slice = &some_vec[..].iter_mut().nth(3);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: calling `.get_mut()` is both faster and more readable
+
+error: called `.iter_mut().nth()` on a VecDeque
+ --> $DIR/iter_nth.rs:47:29
+ |
+LL | let bad_vec_deque = some_vec_deque.iter_mut().nth(3);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: calling `.get_mut()` is both faster and more readable
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/iter_nth_zero.fixed b/src/tools/clippy/tests/ui/iter_nth_zero.fixed
new file mode 100644
index 000000000..f23671c26
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_nth_zero.fixed
@@ -0,0 +1,31 @@
+// run-rustfix
+
+#![warn(clippy::iter_nth_zero)]
+use std::collections::HashSet;
+
+struct Foo;
+
+impl Foo {
+ fn nth(&self, index: usize) -> usize {
+ index + 1
+ }
+}
+
+fn main() {
+ let f = Foo {};
+ f.nth(0); // lint does not apply here
+
+ let mut s = HashSet::new();
+ s.insert(1);
+ let _x = s.iter().next();
+
+ let mut s2 = HashSet::new();
+ s2.insert(2);
+ let mut iter = s2.iter();
+ let _y = iter.next();
+
+ let mut s3 = HashSet::new();
+ s3.insert(3);
+ let mut iter2 = s3.iter();
+ let _unwrapped = iter2.next().unwrap();
+}
diff --git a/src/tools/clippy/tests/ui/iter_nth_zero.rs b/src/tools/clippy/tests/ui/iter_nth_zero.rs
new file mode 100644
index 000000000..7c968d498
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_nth_zero.rs
@@ -0,0 +1,31 @@
+// run-rustfix
+
+#![warn(clippy::iter_nth_zero)]
+use std::collections::HashSet;
+
+struct Foo;
+
+impl Foo {
+ fn nth(&self, index: usize) -> usize {
+ index + 1
+ }
+}
+
+fn main() {
+ let f = Foo {};
+ f.nth(0); // lint does not apply here
+
+ let mut s = HashSet::new();
+ s.insert(1);
+ let _x = s.iter().nth(0);
+
+ let mut s2 = HashSet::new();
+ s2.insert(2);
+ let mut iter = s2.iter();
+ let _y = iter.nth(0);
+
+ let mut s3 = HashSet::new();
+ s3.insert(3);
+ let mut iter2 = s3.iter();
+ let _unwrapped = iter2.nth(0).unwrap();
+}
diff --git a/src/tools/clippy/tests/ui/iter_nth_zero.stderr b/src/tools/clippy/tests/ui/iter_nth_zero.stderr
new file mode 100644
index 000000000..29c56f3a9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_nth_zero.stderr
@@ -0,0 +1,22 @@
+error: called `.nth(0)` on a `std::iter::Iterator`, when `.next()` is equivalent
+ --> $DIR/iter_nth_zero.rs:20:14
+ |
+LL | let _x = s.iter().nth(0);
+ | ^^^^^^^^^^^^^^^ help: try calling `.next()` instead of `.nth(0)`: `s.iter().next()`
+ |
+ = note: `-D clippy::iter-nth-zero` implied by `-D warnings`
+
+error: called `.nth(0)` on a `std::iter::Iterator`, when `.next()` is equivalent
+ --> $DIR/iter_nth_zero.rs:25:14
+ |
+LL | let _y = iter.nth(0);
+ | ^^^^^^^^^^^ help: try calling `.next()` instead of `.nth(0)`: `iter.next()`
+
+error: called `.nth(0)` on a `std::iter::Iterator`, when `.next()` is equivalent
+ --> $DIR/iter_nth_zero.rs:30:22
+ |
+LL | let _unwrapped = iter2.nth(0).unwrap();
+ | ^^^^^^^^^^^^ help: try calling `.next()` instead of `.nth(0)`: `iter2.next()`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/iter_overeager_cloned.fixed b/src/tools/clippy/tests/ui/iter_overeager_cloned.fixed
new file mode 100644
index 000000000..c100705d0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_overeager_cloned.fixed
@@ -0,0 +1,55 @@
+// run-rustfix
+#![warn(clippy::iter_overeager_cloned, clippy::redundant_clone, clippy::filter_next)]
+#![allow(dead_code, clippy::let_unit_value)]
+
+fn main() {
+ let vec = vec!["1".to_string(), "2".to_string(), "3".to_string()];
+
+ let _: Option<String> = vec.iter().last().cloned();
+
+ let _: Option<String> = vec.iter().chain(vec.iter()).next().cloned();
+
+ let _: usize = vec.iter().filter(|x| x == &"2").count();
+
+ let _: Vec<_> = vec.iter().take(2).cloned().collect();
+
+ let _: Vec<_> = vec.iter().skip(2).cloned().collect();
+
+ let _ = vec.iter().filter(|x| x == &"2").nth(2).cloned();
+
+ let _ = [Some(Some("str".to_string())), Some(Some("str".to_string()))]
+ .iter()
+ .flatten().cloned();
+
+ // Not implemented yet
+ let _ = vec.iter().cloned().filter(|x| x.starts_with('2'));
+
+ // Not implemented yet
+ let _ = vec.iter().cloned().map(|x| x.len());
+
+ // This would fail if changed.
+ let _ = vec.iter().cloned().map(|x| x + "2");
+
+ // Not implemented yet
+ let _ = vec.iter().cloned().find(|x| x == "2");
+
+ // Not implemented yet
+ let _ = vec.iter().cloned().for_each(|x| assert!(!x.is_empty()));
+
+ // Not implemented yet
+ let _ = vec.iter().cloned().all(|x| x.len() == 1);
+
+ // Not implemented yet
+ let _ = vec.iter().cloned().any(|x| x.len() == 1);
+
+ // Should probably stay as it is.
+ let _ = [0, 1, 2, 3, 4].iter().cloned().take(10);
+
+ // `&Range<_>` doesn't implement `IntoIterator`
+ let _ = [0..1, 2..5].iter().cloned().flatten();
+}
+
+// #8527
+fn cloned_flatten(x: Option<&Option<String>>) -> Option<String> {
+ x.cloned().flatten()
+}
diff --git a/src/tools/clippy/tests/ui/iter_overeager_cloned.rs b/src/tools/clippy/tests/ui/iter_overeager_cloned.rs
new file mode 100644
index 000000000..2caa88020
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_overeager_cloned.rs
@@ -0,0 +1,56 @@
+// run-rustfix
+#![warn(clippy::iter_overeager_cloned, clippy::redundant_clone, clippy::filter_next)]
+#![allow(dead_code, clippy::let_unit_value)]
+
+fn main() {
+ let vec = vec!["1".to_string(), "2".to_string(), "3".to_string()];
+
+ let _: Option<String> = vec.iter().cloned().last();
+
+ let _: Option<String> = vec.iter().chain(vec.iter()).cloned().next();
+
+ let _: usize = vec.iter().filter(|x| x == &"2").cloned().count();
+
+ let _: Vec<_> = vec.iter().cloned().take(2).collect();
+
+ let _: Vec<_> = vec.iter().cloned().skip(2).collect();
+
+ let _ = vec.iter().filter(|x| x == &"2").cloned().nth(2);
+
+ let _ = [Some(Some("str".to_string())), Some(Some("str".to_string()))]
+ .iter()
+ .cloned()
+ .flatten();
+
+ // Not implemented yet
+ let _ = vec.iter().cloned().filter(|x| x.starts_with('2'));
+
+ // Not implemented yet
+ let _ = vec.iter().cloned().map(|x| x.len());
+
+ // This would fail if changed.
+ let _ = vec.iter().cloned().map(|x| x + "2");
+
+ // Not implemented yet
+ let _ = vec.iter().cloned().find(|x| x == "2");
+
+ // Not implemented yet
+ let _ = vec.iter().cloned().for_each(|x| assert!(!x.is_empty()));
+
+ // Not implemented yet
+ let _ = vec.iter().cloned().all(|x| x.len() == 1);
+
+ // Not implemented yet
+ let _ = vec.iter().cloned().any(|x| x.len() == 1);
+
+ // Should probably stay as it is.
+ let _ = [0, 1, 2, 3, 4].iter().cloned().take(10);
+
+ // `&Range<_>` doesn't implement `IntoIterator`
+ let _ = [0..1, 2..5].iter().cloned().flatten();
+}
+
+// #8527
+fn cloned_flatten(x: Option<&Option<String>>) -> Option<String> {
+ x.cloned().flatten()
+}
diff --git a/src/tools/clippy/tests/ui/iter_overeager_cloned.stderr b/src/tools/clippy/tests/ui/iter_overeager_cloned.stderr
new file mode 100644
index 000000000..dcae7cecd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_overeager_cloned.stderr
@@ -0,0 +1,70 @@
+error: unnecessarily eager cloning of iterator items
+ --> $DIR/iter_overeager_cloned.rs:8:29
+ |
+LL | let _: Option<String> = vec.iter().cloned().last();
+ | ^^^^^^^^^^----------------
+ | |
+ | help: try this: `.last().cloned()`
+ |
+ = note: `-D clippy::iter-overeager-cloned` implied by `-D warnings`
+
+error: unnecessarily eager cloning of iterator items
+ --> $DIR/iter_overeager_cloned.rs:10:29
+ |
+LL | let _: Option<String> = vec.iter().chain(vec.iter()).cloned().next();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^----------------
+ | |
+ | help: try this: `.next().cloned()`
+
+error: unneeded cloning of iterator items
+ --> $DIR/iter_overeager_cloned.rs:12:20
+ |
+LL | let _: usize = vec.iter().filter(|x| x == &"2").cloned().count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-----------------
+ | |
+ | help: try this: `.count()`
+ |
+ = note: `-D clippy::redundant-clone` implied by `-D warnings`
+
+error: unnecessarily eager cloning of iterator items
+ --> $DIR/iter_overeager_cloned.rs:14:21
+ |
+LL | let _: Vec<_> = vec.iter().cloned().take(2).collect();
+ | ^^^^^^^^^^-----------------
+ | |
+ | help: try this: `.take(2).cloned()`
+
+error: unnecessarily eager cloning of iterator items
+ --> $DIR/iter_overeager_cloned.rs:16:21
+ |
+LL | let _: Vec<_> = vec.iter().cloned().skip(2).collect();
+ | ^^^^^^^^^^-----------------
+ | |
+ | help: try this: `.skip(2).cloned()`
+
+error: unnecessarily eager cloning of iterator items
+ --> $DIR/iter_overeager_cloned.rs:18:13
+ |
+LL | let _ = vec.iter().filter(|x| x == &"2").cloned().nth(2);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^----------------
+ | |
+ | help: try this: `.nth(2).cloned()`
+
+error: unnecessarily eager cloning of iterator items
+ --> $DIR/iter_overeager_cloned.rs:20:13
+ |
+LL | let _ = [Some(Some("str".to_string())), Some(Some("str".to_string()))]
+ | _____________^
+LL | | .iter()
+LL | | .cloned()
+LL | | .flatten();
+ | |__________________^
+ |
+help: try this
+ |
+LL ~ .iter()
+LL ~ .flatten().cloned();
+ |
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/iter_skip_next.fixed b/src/tools/clippy/tests/ui/iter_skip_next.fixed
new file mode 100644
index 000000000..2db4c2bee
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_skip_next.fixed
@@ -0,0 +1,37 @@
+// run-rustfix
+// aux-build:option_helpers.rs
+
+#![warn(clippy::iter_skip_next)]
+#![allow(clippy::blacklisted_name)]
+#![allow(clippy::iter_nth)]
+#![allow(unused_mut, dead_code)]
+
+extern crate option_helpers;
+
+use option_helpers::IteratorFalsePositives;
+
+/// Checks implementation of `ITER_SKIP_NEXT` lint
+fn main() {
+ let some_vec = vec![0, 1, 2, 3];
+ let _ = some_vec.iter().nth(42);
+ let _ = some_vec.iter().cycle().nth(42);
+ let _ = (1..10).nth(10);
+ let _ = &some_vec[..].iter().nth(3);
+ let foo = IteratorFalsePositives { foo: 0 };
+ let _ = foo.skip(42).next();
+ let _ = foo.filter().skip(42).next();
+
+ // fix #8128
+ let test_string = "1|1 2";
+ let mut sp = test_string.split('|').map(|s| s.trim());
+ let _: Vec<&str> = sp.nth(1).unwrap().split(' ').collect();
+ if let Some(mut s) = Some(test_string.split('|').map(|s| s.trim())) {
+ let _: Vec<&str> = s.nth(1).unwrap().split(' ').collect();
+ };
+ fn check<T>(mut s: T)
+ where
+ T: Iterator<Item = String>,
+ {
+ let _: Vec<&str> = s.nth(1).unwrap().split(' ').collect();
+ }
+}
diff --git a/src/tools/clippy/tests/ui/iter_skip_next.rs b/src/tools/clippy/tests/ui/iter_skip_next.rs
new file mode 100644
index 000000000..692edb9ae
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_skip_next.rs
@@ -0,0 +1,37 @@
+// run-rustfix
+// aux-build:option_helpers.rs
+
+#![warn(clippy::iter_skip_next)]
+#![allow(clippy::blacklisted_name)]
+#![allow(clippy::iter_nth)]
+#![allow(unused_mut, dead_code)]
+
+extern crate option_helpers;
+
+use option_helpers::IteratorFalsePositives;
+
+/// Checks implementation of `ITER_SKIP_NEXT` lint
+fn main() {
+ let some_vec = vec![0, 1, 2, 3];
+ let _ = some_vec.iter().skip(42).next();
+ let _ = some_vec.iter().cycle().skip(42).next();
+ let _ = (1..10).skip(10).next();
+ let _ = &some_vec[..].iter().skip(3).next();
+ let foo = IteratorFalsePositives { foo: 0 };
+ let _ = foo.skip(42).next();
+ let _ = foo.filter().skip(42).next();
+
+ // fix #8128
+ let test_string = "1|1 2";
+ let mut sp = test_string.split('|').map(|s| s.trim());
+ let _: Vec<&str> = sp.skip(1).next().unwrap().split(' ').collect();
+ if let Some(mut s) = Some(test_string.split('|').map(|s| s.trim())) {
+ let _: Vec<&str> = s.skip(1).next().unwrap().split(' ').collect();
+ };
+ fn check<T>(mut s: T)
+ where
+ T: Iterator<Item = String>,
+ {
+ let _: Vec<&str> = s.skip(1).next().unwrap().split(' ').collect();
+ }
+}
diff --git a/src/tools/clippy/tests/ui/iter_skip_next.stderr b/src/tools/clippy/tests/ui/iter_skip_next.stderr
new file mode 100644
index 000000000..ca6970b27
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_skip_next.stderr
@@ -0,0 +1,46 @@
+error: called `skip(..).next()` on an iterator
+ --> $DIR/iter_skip_next.rs:16:28
+ |
+LL | let _ = some_vec.iter().skip(42).next();
+ | ^^^^^^^^^^^^^^^^ help: use `nth` instead: `.nth(42)`
+ |
+ = note: `-D clippy::iter-skip-next` implied by `-D warnings`
+
+error: called `skip(..).next()` on an iterator
+ --> $DIR/iter_skip_next.rs:17:36
+ |
+LL | let _ = some_vec.iter().cycle().skip(42).next();
+ | ^^^^^^^^^^^^^^^^ help: use `nth` instead: `.nth(42)`
+
+error: called `skip(..).next()` on an iterator
+ --> $DIR/iter_skip_next.rs:18:20
+ |
+LL | let _ = (1..10).skip(10).next();
+ | ^^^^^^^^^^^^^^^^ help: use `nth` instead: `.nth(10)`
+
+error: called `skip(..).next()` on an iterator
+ --> $DIR/iter_skip_next.rs:19:33
+ |
+LL | let _ = &some_vec[..].iter().skip(3).next();
+ | ^^^^^^^^^^^^^^^ help: use `nth` instead: `.nth(3)`
+
+error: called `skip(..).next()` on an iterator
+ --> $DIR/iter_skip_next.rs:27:26
+ |
+LL | let _: Vec<&str> = sp.skip(1).next().unwrap().split(' ').collect();
+ | ^^^^^^^^^^^^^^^ help: use `nth` instead: `.nth(1)`
+
+error: called `skip(..).next()` on an iterator
+ --> $DIR/iter_skip_next.rs:29:29
+ |
+LL | let _: Vec<&str> = s.skip(1).next().unwrap().split(' ').collect();
+ | ^^^^^^^^^^^^^^^ help: use `nth` instead: `.nth(1)`
+
+error: called `skip(..).next()` on an iterator
+ --> $DIR/iter_skip_next.rs:35:29
+ |
+LL | let _: Vec<&str> = s.skip(1).next().unwrap().split(' ').collect();
+ | ^^^^^^^^^^^^^^^ help: use `nth` instead: `.nth(1)`
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/iter_skip_next_unfixable.rs b/src/tools/clippy/tests/ui/iter_skip_next_unfixable.rs
new file mode 100644
index 000000000..3607330cf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_skip_next_unfixable.rs
@@ -0,0 +1,19 @@
+#![warn(clippy::iter_skip_next)]
+#![allow(dead_code)]
+
+/// Checks implementation of `ITER_SKIP_NEXT` lint
+fn main() {
+ // fix #8128
+ let test_string = "1|1 2";
+ let sp = test_string.split('|').map(|s| s.trim());
+ let _: Vec<&str> = sp.skip(1).next().unwrap().split(' ').collect();
+ if let Some(s) = Some(test_string.split('|').map(|s| s.trim())) {
+ let _: Vec<&str> = s.skip(1).next().unwrap().split(' ').collect();
+ };
+ fn check<T>(s: T)
+ where
+ T: Iterator<Item = String>,
+ {
+ let _: Vec<&str> = s.skip(1).next().unwrap().split(' ').collect();
+ }
+}
diff --git a/src/tools/clippy/tests/ui/iter_skip_next_unfixable.stderr b/src/tools/clippy/tests/ui/iter_skip_next_unfixable.stderr
new file mode 100644
index 000000000..74c327c74
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_skip_next_unfixable.stderr
@@ -0,0 +1,39 @@
+error: called `skip(..).next()` on an iterator
+ --> $DIR/iter_skip_next_unfixable.rs:9:26
+ |
+LL | let _: Vec<&str> = sp.skip(1).next().unwrap().split(' ').collect();
+ | ^^^^^^^^^^^^^^^ help: use `nth` instead: `.nth(1)`
+ |
+ = note: `-D clippy::iter-skip-next` implied by `-D warnings`
+help: for this change `sp` has to be mutable
+ --> $DIR/iter_skip_next_unfixable.rs:8:9
+ |
+LL | let sp = test_string.split('|').map(|s| s.trim());
+ | ^^
+
+error: called `skip(..).next()` on an iterator
+ --> $DIR/iter_skip_next_unfixable.rs:11:29
+ |
+LL | let _: Vec<&str> = s.skip(1).next().unwrap().split(' ').collect();
+ | ^^^^^^^^^^^^^^^ help: use `nth` instead: `.nth(1)`
+ |
+help: for this change `s` has to be mutable
+ --> $DIR/iter_skip_next_unfixable.rs:10:17
+ |
+LL | if let Some(s) = Some(test_string.split('|').map(|s| s.trim())) {
+ | ^
+
+error: called `skip(..).next()` on an iterator
+ --> $DIR/iter_skip_next_unfixable.rs:17:29
+ |
+LL | let _: Vec<&str> = s.skip(1).next().unwrap().split(' ').collect();
+ | ^^^^^^^^^^^^^^^ help: use `nth` instead: `.nth(1)`
+ |
+help: for this change `s` has to be mutable
+ --> $DIR/iter_skip_next_unfixable.rs:13:17
+ |
+LL | fn check<T>(s: T)
+ | ^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/iter_with_drain.fixed b/src/tools/clippy/tests/ui/iter_with_drain.fixed
new file mode 100644
index 000000000..0330d5549
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_with_drain.fixed
@@ -0,0 +1,65 @@
+// run-rustfix
+// will emits unused mut warnings after fixing
+#![allow(unused_mut)]
+// will emits needless collect warnings after fixing
+#![allow(clippy::needless_collect)]
+#![warn(clippy::iter_with_drain)]
+use std::collections::{BinaryHeap, HashMap, HashSet, VecDeque};
+
+fn full() {
+ let mut a = vec!["aaa".to_string(), "bbb".to_string()];
+ let mut a: BinaryHeap<_> = a.into_iter().collect();
+ let mut a: HashSet<_> = a.drain().collect();
+ let mut a: VecDeque<_> = a.drain().collect();
+ let mut a: Vec<_> = a.into_iter().collect();
+ let mut a: HashMap<_, _> = a.into_iter().map(|x| (x.clone(), x)).collect();
+ let _: Vec<(String, String)> = a.drain().collect();
+}
+
+fn closed() {
+ let mut a = vec!["aaa".to_string(), "bbb".to_string()];
+ let mut a: BinaryHeap<_> = a.into_iter().collect();
+ let mut a: HashSet<_> = a.drain().collect();
+ let mut a: VecDeque<_> = a.drain().collect();
+ let mut a: Vec<_> = a.into_iter().collect();
+ let mut a: HashMap<_, _> = a.into_iter().map(|x| (x.clone(), x)).collect();
+ let _: Vec<(String, String)> = a.drain().collect();
+}
+
+fn should_not_help() {
+ let mut a = vec!["aaa".to_string(), "bbb".to_string()];
+ let mut a: BinaryHeap<_> = a.drain(1..).collect();
+ let mut a: HashSet<_> = a.drain().collect();
+ let mut a: VecDeque<_> = a.drain().collect();
+ let mut a: Vec<_> = a.drain(..a.len() - 1).collect();
+ let mut a: HashMap<_, _> = a.drain(1..a.len() - 1).map(|x| (x.clone(), x)).collect();
+ let _: Vec<(String, String)> = a.drain().collect();
+
+ let mut b = vec!["aaa".to_string(), "bbb".to_string()];
+ let _: Vec<_> = b.drain(0..a.len()).collect();
+}
+
+fn _closed_range(mut x: Vec<String>) {
+ let _: Vec<String> = x.drain(0..=x.len()).collect();
+}
+
+fn _with_mut(x: &mut Vec<String>, y: &mut VecDeque<String>) {
+ let _: Vec<String> = x.drain(..).collect();
+ let _: Vec<String> = y.drain(..).collect();
+}
+
+#[derive(Default)]
+struct Bomb {
+ fire: Vec<u8>,
+}
+
+fn should_not_help_0(bomb: &mut Bomb) {
+ let _: Vec<u8> = bomb.fire.drain(..).collect();
+}
+
+fn main() {
+ full();
+ closed();
+ should_not_help();
+ should_not_help_0(&mut Bomb::default());
+}
diff --git a/src/tools/clippy/tests/ui/iter_with_drain.rs b/src/tools/clippy/tests/ui/iter_with_drain.rs
new file mode 100644
index 000000000..993936fb8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_with_drain.rs
@@ -0,0 +1,65 @@
+// run-rustfix
+// will emits unused mut warnings after fixing
+#![allow(unused_mut)]
+// will emits needless collect warnings after fixing
+#![allow(clippy::needless_collect)]
+#![warn(clippy::iter_with_drain)]
+use std::collections::{BinaryHeap, HashMap, HashSet, VecDeque};
+
+fn full() {
+ let mut a = vec!["aaa".to_string(), "bbb".to_string()];
+ let mut a: BinaryHeap<_> = a.drain(..).collect();
+ let mut a: HashSet<_> = a.drain().collect();
+ let mut a: VecDeque<_> = a.drain().collect();
+ let mut a: Vec<_> = a.drain(..).collect();
+ let mut a: HashMap<_, _> = a.drain(..).map(|x| (x.clone(), x)).collect();
+ let _: Vec<(String, String)> = a.drain().collect();
+}
+
+fn closed() {
+ let mut a = vec!["aaa".to_string(), "bbb".to_string()];
+ let mut a: BinaryHeap<_> = a.drain(0..).collect();
+ let mut a: HashSet<_> = a.drain().collect();
+ let mut a: VecDeque<_> = a.drain().collect();
+ let mut a: Vec<_> = a.drain(..a.len()).collect();
+ let mut a: HashMap<_, _> = a.drain(0..a.len()).map(|x| (x.clone(), x)).collect();
+ let _: Vec<(String, String)> = a.drain().collect();
+}
+
+fn should_not_help() {
+ let mut a = vec!["aaa".to_string(), "bbb".to_string()];
+ let mut a: BinaryHeap<_> = a.drain(1..).collect();
+ let mut a: HashSet<_> = a.drain().collect();
+ let mut a: VecDeque<_> = a.drain().collect();
+ let mut a: Vec<_> = a.drain(..a.len() - 1).collect();
+ let mut a: HashMap<_, _> = a.drain(1..a.len() - 1).map(|x| (x.clone(), x)).collect();
+ let _: Vec<(String, String)> = a.drain().collect();
+
+ let mut b = vec!["aaa".to_string(), "bbb".to_string()];
+ let _: Vec<_> = b.drain(0..a.len()).collect();
+}
+
+fn _closed_range(mut x: Vec<String>) {
+ let _: Vec<String> = x.drain(0..=x.len()).collect();
+}
+
+fn _with_mut(x: &mut Vec<String>, y: &mut VecDeque<String>) {
+ let _: Vec<String> = x.drain(..).collect();
+ let _: Vec<String> = y.drain(..).collect();
+}
+
+#[derive(Default)]
+struct Bomb {
+ fire: Vec<u8>,
+}
+
+fn should_not_help_0(bomb: &mut Bomb) {
+ let _: Vec<u8> = bomb.fire.drain(..).collect();
+}
+
+fn main() {
+ full();
+ closed();
+ should_not_help();
+ should_not_help_0(&mut Bomb::default());
+}
diff --git a/src/tools/clippy/tests/ui/iter_with_drain.stderr b/src/tools/clippy/tests/ui/iter_with_drain.stderr
new file mode 100644
index 000000000..aa394439f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_with_drain.stderr
@@ -0,0 +1,40 @@
+error: `drain(..)` used on a `Vec`
+ --> $DIR/iter_with_drain.rs:11:34
+ |
+LL | let mut a: BinaryHeap<_> = a.drain(..).collect();
+ | ^^^^^^^^^ help: try this: `into_iter()`
+ |
+ = note: `-D clippy::iter-with-drain` implied by `-D warnings`
+
+error: `drain(..)` used on a `VecDeque`
+ --> $DIR/iter_with_drain.rs:14:27
+ |
+LL | let mut a: Vec<_> = a.drain(..).collect();
+ | ^^^^^^^^^ help: try this: `into_iter()`
+
+error: `drain(..)` used on a `Vec`
+ --> $DIR/iter_with_drain.rs:15:34
+ |
+LL | let mut a: HashMap<_, _> = a.drain(..).map(|x| (x.clone(), x)).collect();
+ | ^^^^^^^^^ help: try this: `into_iter()`
+
+error: `drain(..)` used on a `Vec`
+ --> $DIR/iter_with_drain.rs:21:34
+ |
+LL | let mut a: BinaryHeap<_> = a.drain(0..).collect();
+ | ^^^^^^^^^^ help: try this: `into_iter()`
+
+error: `drain(..)` used on a `VecDeque`
+ --> $DIR/iter_with_drain.rs:24:27
+ |
+LL | let mut a: Vec<_> = a.drain(..a.len()).collect();
+ | ^^^^^^^^^^^^^^^^ help: try this: `into_iter()`
+
+error: `drain(..)` used on a `Vec`
+ --> $DIR/iter_with_drain.rs:25:34
+ |
+LL | let mut a: HashMap<_, _> = a.drain(0..a.len()).map(|x| (x.clone(), x)).collect();
+ | ^^^^^^^^^^^^^^^^^ help: try this: `into_iter()`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/iterator_step_by_zero.rs b/src/tools/clippy/tests/ui/iterator_step_by_zero.rs
new file mode 100644
index 000000000..13d1cfd42
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iterator_step_by_zero.rs
@@ -0,0 +1,28 @@
+#[warn(clippy::iterator_step_by_zero)]
+fn main() {
+ let _ = vec!["A", "B", "B"].iter().step_by(0);
+ let _ = "XXX".chars().step_by(0);
+ let _ = (0..1).step_by(0);
+
+ // No error, not an iterator.
+ let y = NotIterator;
+ y.step_by(0);
+
+ // No warning for non-zero step
+ let _ = (0..1).step_by(1);
+
+ let _ = (1..).step_by(0);
+ let _ = (1..=2).step_by(0);
+
+ let x = 0..1;
+ let _ = x.step_by(0);
+
+ // check const eval
+ let v1 = vec![1, 2, 3];
+ let _ = v1.iter().step_by(2 / 3);
+}
+
+struct NotIterator;
+impl NotIterator {
+ fn step_by(&self, _: u32) {}
+}
diff --git a/src/tools/clippy/tests/ui/iterator_step_by_zero.stderr b/src/tools/clippy/tests/ui/iterator_step_by_zero.stderr
new file mode 100644
index 000000000..d792aea11
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iterator_step_by_zero.stderr
@@ -0,0 +1,46 @@
+error: `Iterator::step_by(0)` will panic at runtime
+ --> $DIR/iterator_step_by_zero.rs:3:13
+ |
+LL | let _ = vec!["A", "B", "B"].iter().step_by(0);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::iterator-step-by-zero` implied by `-D warnings`
+
+error: `Iterator::step_by(0)` will panic at runtime
+ --> $DIR/iterator_step_by_zero.rs:4:13
+ |
+LL | let _ = "XXX".chars().step_by(0);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `Iterator::step_by(0)` will panic at runtime
+ --> $DIR/iterator_step_by_zero.rs:5:13
+ |
+LL | let _ = (0..1).step_by(0);
+ | ^^^^^^^^^^^^^^^^^
+
+error: `Iterator::step_by(0)` will panic at runtime
+ --> $DIR/iterator_step_by_zero.rs:14:13
+ |
+LL | let _ = (1..).step_by(0);
+ | ^^^^^^^^^^^^^^^^
+
+error: `Iterator::step_by(0)` will panic at runtime
+ --> $DIR/iterator_step_by_zero.rs:15:13
+ |
+LL | let _ = (1..=2).step_by(0);
+ | ^^^^^^^^^^^^^^^^^^
+
+error: `Iterator::step_by(0)` will panic at runtime
+ --> $DIR/iterator_step_by_zero.rs:18:13
+ |
+LL | let _ = x.step_by(0);
+ | ^^^^^^^^^^^^
+
+error: `Iterator::step_by(0)` will panic at runtime
+ --> $DIR/iterator_step_by_zero.rs:22:13
+ |
+LL | let _ = v1.iter().step_by(2 / 3);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/large_const_arrays.fixed b/src/tools/clippy/tests/ui/large_const_arrays.fixed
new file mode 100644
index 000000000..c5af07c8a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/large_const_arrays.fixed
@@ -0,0 +1,37 @@
+// run-rustfix
+
+#![warn(clippy::large_const_arrays)]
+#![allow(dead_code)]
+
+#[derive(Clone, Copy)]
+pub struct S {
+ pub data: [u64; 32],
+}
+
+// Should lint
+pub(crate) static FOO_PUB_CRATE: [u32; 1_000_000] = [0u32; 1_000_000];
+pub static FOO_PUB: [u32; 1_000_000] = [0u32; 1_000_000];
+static FOO: [u32; 1_000_000] = [0u32; 1_000_000];
+
+// Good
+pub(crate) const G_FOO_PUB_CRATE: [u32; 1_000] = [0u32; 1_000];
+pub const G_FOO_PUB: [u32; 1_000] = [0u32; 1_000];
+const G_FOO: [u32; 1_000] = [0u32; 1_000];
+
+fn main() {
+ // Should lint
+ pub static BAR_PUB: [u32; 1_000_000] = [0u32; 1_000_000];
+ static BAR: [u32; 1_000_000] = [0u32; 1_000_000];
+ pub static BAR_STRUCT_PUB: [S; 5_000] = [S { data: [0; 32] }; 5_000];
+ static BAR_STRUCT: [S; 5_000] = [S { data: [0; 32] }; 5_000];
+ pub static BAR_S_PUB: [Option<&str>; 200_000] = [Some("str"); 200_000];
+ static BAR_S: [Option<&str>; 200_000] = [Some("str"); 200_000];
+
+ // Good
+ pub const G_BAR_PUB: [u32; 1_000] = [0u32; 1_000];
+ const G_BAR: [u32; 1_000] = [0u32; 1_000];
+ pub const G_BAR_STRUCT_PUB: [S; 500] = [S { data: [0; 32] }; 500];
+ const G_BAR_STRUCT: [S; 500] = [S { data: [0; 32] }; 500];
+ pub const G_BAR_S_PUB: [Option<&str>; 200] = [Some("str"); 200];
+ const G_BAR_S: [Option<&str>; 200] = [Some("str"); 200];
+}
diff --git a/src/tools/clippy/tests/ui/large_const_arrays.rs b/src/tools/clippy/tests/ui/large_const_arrays.rs
new file mode 100644
index 000000000..a160b9f8a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/large_const_arrays.rs
@@ -0,0 +1,37 @@
+// run-rustfix
+
+#![warn(clippy::large_const_arrays)]
+#![allow(dead_code)]
+
+#[derive(Clone, Copy)]
+pub struct S {
+ pub data: [u64; 32],
+}
+
+// Should lint
+pub(crate) const FOO_PUB_CRATE: [u32; 1_000_000] = [0u32; 1_000_000];
+pub const FOO_PUB: [u32; 1_000_000] = [0u32; 1_000_000];
+const FOO: [u32; 1_000_000] = [0u32; 1_000_000];
+
+// Good
+pub(crate) const G_FOO_PUB_CRATE: [u32; 1_000] = [0u32; 1_000];
+pub const G_FOO_PUB: [u32; 1_000] = [0u32; 1_000];
+const G_FOO: [u32; 1_000] = [0u32; 1_000];
+
+fn main() {
+ // Should lint
+ pub const BAR_PUB: [u32; 1_000_000] = [0u32; 1_000_000];
+ const BAR: [u32; 1_000_000] = [0u32; 1_000_000];
+ pub const BAR_STRUCT_PUB: [S; 5_000] = [S { data: [0; 32] }; 5_000];
+ const BAR_STRUCT: [S; 5_000] = [S { data: [0; 32] }; 5_000];
+ pub const BAR_S_PUB: [Option<&str>; 200_000] = [Some("str"); 200_000];
+ const BAR_S: [Option<&str>; 200_000] = [Some("str"); 200_000];
+
+ // Good
+ pub const G_BAR_PUB: [u32; 1_000] = [0u32; 1_000];
+ const G_BAR: [u32; 1_000] = [0u32; 1_000];
+ pub const G_BAR_STRUCT_PUB: [S; 500] = [S { data: [0; 32] }; 500];
+ const G_BAR_STRUCT: [S; 500] = [S { data: [0; 32] }; 500];
+ pub const G_BAR_S_PUB: [Option<&str>; 200] = [Some("str"); 200];
+ const G_BAR_S: [Option<&str>; 200] = [Some("str"); 200];
+}
diff --git a/src/tools/clippy/tests/ui/large_const_arrays.stderr b/src/tools/clippy/tests/ui/large_const_arrays.stderr
new file mode 100644
index 000000000..3fb0acbca
--- /dev/null
+++ b/src/tools/clippy/tests/ui/large_const_arrays.stderr
@@ -0,0 +1,76 @@
+error: large array defined as const
+ --> $DIR/large_const_arrays.rs:12:1
+ |
+LL | pub(crate) const FOO_PUB_CRATE: [u32; 1_000_000] = [0u32; 1_000_000];
+ | ^^^^^^^^^^^-----^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | help: make this a static item: `static`
+ |
+ = note: `-D clippy::large-const-arrays` implied by `-D warnings`
+
+error: large array defined as const
+ --> $DIR/large_const_arrays.rs:13:1
+ |
+LL | pub const FOO_PUB: [u32; 1_000_000] = [0u32; 1_000_000];
+ | ^^^^-----^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | help: make this a static item: `static`
+
+error: large array defined as const
+ --> $DIR/large_const_arrays.rs:14:1
+ |
+LL | const FOO: [u32; 1_000_000] = [0u32; 1_000_000];
+ | -----^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | help: make this a static item: `static`
+
+error: large array defined as const
+ --> $DIR/large_const_arrays.rs:23:5
+ |
+LL | pub const BAR_PUB: [u32; 1_000_000] = [0u32; 1_000_000];
+ | ^^^^-----^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | help: make this a static item: `static`
+
+error: large array defined as const
+ --> $DIR/large_const_arrays.rs:24:5
+ |
+LL | const BAR: [u32; 1_000_000] = [0u32; 1_000_000];
+ | -----^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | help: make this a static item: `static`
+
+error: large array defined as const
+ --> $DIR/large_const_arrays.rs:25:5
+ |
+LL | pub const BAR_STRUCT_PUB: [S; 5_000] = [S { data: [0; 32] }; 5_000];
+ | ^^^^-----^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | help: make this a static item: `static`
+
+error: large array defined as const
+ --> $DIR/large_const_arrays.rs:26:5
+ |
+LL | const BAR_STRUCT: [S; 5_000] = [S { data: [0; 32] }; 5_000];
+ | -----^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | help: make this a static item: `static`
+
+error: large array defined as const
+ --> $DIR/large_const_arrays.rs:27:5
+ |
+LL | pub const BAR_S_PUB: [Option<&str>; 200_000] = [Some("str"); 200_000];
+ | ^^^^-----^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | help: make this a static item: `static`
+
+error: large array defined as const
+ --> $DIR/large_const_arrays.rs:28:5
+ |
+LL | const BAR_S: [Option<&str>; 200_000] = [Some("str"); 200_000];
+ | -----^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | help: make this a static item: `static`
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/large_digit_groups.fixed b/src/tools/clippy/tests/ui/large_digit_groups.fixed
new file mode 100644
index 000000000..3430c137e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/large_digit_groups.fixed
@@ -0,0 +1,31 @@
+// run-rustfix
+#![warn(clippy::large_digit_groups)]
+
+fn main() {
+ macro_rules! mac {
+ () => {
+ 0b1_10110_i64
+ };
+ }
+
+ let _good = (
+ 0b1011_i64,
+ 0o1_234_u32,
+ 0x0123_4567,
+ 1_2345_6789,
+ 1234_f32,
+ 1_234.12_f32,
+ 1_234.123_f32,
+ 1.123_4_f32,
+ );
+ let _bad = (
+ 0b11_0110_i64,
+ 0xdead_beef_usize,
+ 123_456_f32,
+ 123_456.12_f32,
+ 123_456.123_45_f64,
+ 123_456.123_456_f64,
+ );
+ // Ignore literals in macros
+ let _ = mac!();
+}
diff --git a/src/tools/clippy/tests/ui/large_digit_groups.rs b/src/tools/clippy/tests/ui/large_digit_groups.rs
new file mode 100644
index 000000000..ac116d5db
--- /dev/null
+++ b/src/tools/clippy/tests/ui/large_digit_groups.rs
@@ -0,0 +1,31 @@
+// run-rustfix
+#![warn(clippy::large_digit_groups)]
+
+fn main() {
+ macro_rules! mac {
+ () => {
+ 0b1_10110_i64
+ };
+ }
+
+ let _good = (
+ 0b1011_i64,
+ 0o1_234_u32,
+ 0x1_234_567,
+ 1_2345_6789,
+ 1234_f32,
+ 1_234.12_f32,
+ 1_234.123_f32,
+ 1.123_4_f32,
+ );
+ let _bad = (
+ 0b1_10110_i64,
+ 0xd_e_adbee_f_usize,
+ 1_23456_f32,
+ 1_23456.12_f32,
+ 1_23456.12345_f64,
+ 1_23456.12345_6_f64,
+ );
+ // Ignore literals in macros
+ let _ = mac!();
+}
diff --git a/src/tools/clippy/tests/ui/large_digit_groups.stderr b/src/tools/clippy/tests/ui/large_digit_groups.stderr
new file mode 100644
index 000000000..13d108b56
--- /dev/null
+++ b/src/tools/clippy/tests/ui/large_digit_groups.stderr
@@ -0,0 +1,48 @@
+error: digits of hex or binary literal not grouped by four
+ --> $DIR/large_digit_groups.rs:14:9
+ |
+LL | 0x1_234_567,
+ | ^^^^^^^^^^^ help: consider: `0x0123_4567`
+ |
+ = note: `-D clippy::unusual-byte-groupings` implied by `-D warnings`
+
+error: digits of hex or binary literal not grouped by four
+ --> $DIR/large_digit_groups.rs:22:9
+ |
+LL | 0b1_10110_i64,
+ | ^^^^^^^^^^^^^ help: consider: `0b11_0110_i64`
+
+error: digits of hex or binary literal not grouped by four
+ --> $DIR/large_digit_groups.rs:23:9
+ |
+LL | 0xd_e_adbee_f_usize,
+ | ^^^^^^^^^^^^^^^^^^^ help: consider: `0xdead_beef_usize`
+
+error: digit groups should be smaller
+ --> $DIR/large_digit_groups.rs:24:9
+ |
+LL | 1_23456_f32,
+ | ^^^^^^^^^^^ help: consider: `123_456_f32`
+ |
+ = note: `-D clippy::large-digit-groups` implied by `-D warnings`
+
+error: digit groups should be smaller
+ --> $DIR/large_digit_groups.rs:25:9
+ |
+LL | 1_23456.12_f32,
+ | ^^^^^^^^^^^^^^ help: consider: `123_456.12_f32`
+
+error: digit groups should be smaller
+ --> $DIR/large_digit_groups.rs:26:9
+ |
+LL | 1_23456.12345_f64,
+ | ^^^^^^^^^^^^^^^^^ help: consider: `123_456.123_45_f64`
+
+error: digit groups should be smaller
+ --> $DIR/large_digit_groups.rs:27:9
+ |
+LL | 1_23456.12345_6_f64,
+ | ^^^^^^^^^^^^^^^^^^^ help: consider: `123_456.123_456_f64`
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/large_enum_variant.rs b/src/tools/clippy/tests/ui/large_enum_variant.rs
new file mode 100644
index 000000000..23152a133
--- /dev/null
+++ b/src/tools/clippy/tests/ui/large_enum_variant.rs
@@ -0,0 +1,135 @@
+// aux-build:macro_rules.rs
+
+#![allow(dead_code)]
+#![allow(unused_variables)]
+#![warn(clippy::large_enum_variant)]
+
+#[macro_use]
+extern crate macro_rules;
+
+enum LargeEnum {
+ A(i32),
+ B([i32; 8000]),
+}
+
+enum GenericEnumOk<T> {
+ A(i32),
+ B([T; 8000]),
+}
+
+enum GenericEnum2<T> {
+ A(i32),
+ B([i32; 8000]),
+ C(T, [i32; 8000]),
+}
+
+trait SomeTrait {
+ type Item;
+}
+
+enum LargeEnumGeneric<A: SomeTrait> {
+ Var(A::Item),
+}
+
+enum LargeEnum2 {
+ VariantOk(i32, u32),
+ ContainingLargeEnum(LargeEnum),
+}
+
+enum LargeEnum3 {
+ ContainingMoreThanOneField(i32, [i32; 8000], [i32; 9500]),
+ VoidVariant,
+ StructLikeLittle { x: i32, y: i32 },
+}
+
+enum LargeEnum4 {
+ VariantOk(i32, u32),
+ StructLikeLarge { x: [i32; 8000], y: i32 },
+}
+
+enum LargeEnum5 {
+ VariantOk(i32, u32),
+ StructLikeLarge2 { x: [i32; 8000] },
+}
+
+enum LargeEnumOk {
+ LargeA([i32; 8000]),
+ LargeB([i32; 8001]),
+}
+
+enum LargeEnum6 {
+ A,
+ B([u8; 255]),
+ C([u8; 200]),
+}
+
+enum LargeEnum7 {
+ A,
+ B([u8; 1255]),
+ C([u8; 200]),
+}
+
+enum LargeEnum8 {
+ VariantOk(i32, u32),
+ ContainingMoreThanOneField([i32; 8000], [i32; 2], [i32; 9500], [i32; 30]),
+}
+
+enum LargeEnum9 {
+ A(Struct<()>),
+ B(Struct2),
+}
+
+enum LargeEnumOk2<T> {
+ A(T),
+ B(Struct2),
+}
+
+enum LargeEnumOk3<T> {
+ A(Struct<T>),
+ B(Struct2),
+}
+
+struct Struct<T> {
+ a: i32,
+ t: T,
+}
+
+struct Struct2 {
+ a: [i32; 8000],
+}
+
+#[derive(Copy, Clone)]
+enum CopyableLargeEnum {
+ A(bool),
+ B([u128; 4000]),
+}
+
+enum ManuallyCopyLargeEnum {
+ A(bool),
+ B([u128; 4000]),
+}
+
+impl Clone for ManuallyCopyLargeEnum {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+impl Copy for ManuallyCopyLargeEnum {}
+
+enum SomeGenericPossiblyCopyEnum<T> {
+ A(bool, std::marker::PhantomData<T>),
+ B([u64; 4000]),
+}
+
+impl<T: Copy> Clone for SomeGenericPossiblyCopyEnum<T> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+impl<T: Copy> Copy for SomeGenericPossiblyCopyEnum<T> {}
+
+fn main() {
+ large_enum_variant!();
+}
diff --git a/src/tools/clippy/tests/ui/large_enum_variant.stderr b/src/tools/clippy/tests/ui/large_enum_variant.stderr
new file mode 100644
index 000000000..024832726
--- /dev/null
+++ b/src/tools/clippy/tests/ui/large_enum_variant.stderr
@@ -0,0 +1,197 @@
+error: large size difference between variants
+ --> $DIR/large_enum_variant.rs:12:5
+ |
+LL | B([i32; 8000]),
+ | ^^^^^^^^^^^^^^ this variant is 32000 bytes
+ |
+ = note: `-D clippy::large-enum-variant` implied by `-D warnings`
+note: and the second-largest variant is 4 bytes:
+ --> $DIR/large_enum_variant.rs:11:5
+ |
+LL | A(i32),
+ | ^^^^^^
+help: consider boxing the large fields to reduce the total size of the enum
+ |
+LL | B(Box<[i32; 8000]>),
+ | ~~~~~~~~~~~~~~~~
+
+error: large size difference between variants
+ --> $DIR/large_enum_variant.rs:36:5
+ |
+LL | ContainingLargeEnum(LargeEnum),
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this variant is 32004 bytes
+ |
+note: and the second-largest variant is 8 bytes:
+ --> $DIR/large_enum_variant.rs:35:5
+ |
+LL | VariantOk(i32, u32),
+ | ^^^^^^^^^^^^^^^^^^^
+help: consider boxing the large fields to reduce the total size of the enum
+ |
+LL | ContainingLargeEnum(Box<LargeEnum>),
+ | ~~~~~~~~~~~~~~
+
+error: large size difference between variants
+ --> $DIR/large_enum_variant.rs:40:5
+ |
+LL | ContainingMoreThanOneField(i32, [i32; 8000], [i32; 9500]),
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this variant is 70004 bytes
+ |
+note: and the second-largest variant is 8 bytes:
+ --> $DIR/large_enum_variant.rs:42:5
+ |
+LL | StructLikeLittle { x: i32, y: i32 },
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+help: consider boxing the large fields to reduce the total size of the enum
+ |
+LL | ContainingMoreThanOneField(i32, Box<[i32; 8000]>, Box<[i32; 9500]>),
+ | ~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~
+
+error: large size difference between variants
+ --> $DIR/large_enum_variant.rs:47:5
+ |
+LL | StructLikeLarge { x: [i32; 8000], y: i32 },
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this variant is 32004 bytes
+ |
+note: and the second-largest variant is 8 bytes:
+ --> $DIR/large_enum_variant.rs:46:5
+ |
+LL | VariantOk(i32, u32),
+ | ^^^^^^^^^^^^^^^^^^^
+help: consider boxing the large fields to reduce the total size of the enum
+ |
+LL | StructLikeLarge { x: Box<[i32; 8000]>, y: i32 },
+ | ~~~~~~~~~~~~~~~~
+
+error: large size difference between variants
+ --> $DIR/large_enum_variant.rs:52:5
+ |
+LL | StructLikeLarge2 { x: [i32; 8000] },
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this variant is 32000 bytes
+ |
+note: and the second-largest variant is 8 bytes:
+ --> $DIR/large_enum_variant.rs:51:5
+ |
+LL | VariantOk(i32, u32),
+ | ^^^^^^^^^^^^^^^^^^^
+help: consider boxing the large fields to reduce the total size of the enum
+ |
+LL | StructLikeLarge2 { x: Box<[i32; 8000]> },
+ | ~~~~~~~~~~~~~~~~
+
+error: large size difference between variants
+ --> $DIR/large_enum_variant.rs:68:5
+ |
+LL | B([u8; 1255]),
+ | ^^^^^^^^^^^^^ this variant is 1255 bytes
+ |
+note: and the second-largest variant is 200 bytes:
+ --> $DIR/large_enum_variant.rs:69:5
+ |
+LL | C([u8; 200]),
+ | ^^^^^^^^^^^^
+help: consider boxing the large fields to reduce the total size of the enum
+ |
+LL | B(Box<[u8; 1255]>),
+ | ~~~~~~~~~~~~~~~
+
+error: large size difference between variants
+ --> $DIR/large_enum_variant.rs:74:5
+ |
+LL | ContainingMoreThanOneField([i32; 8000], [i32; 2], [i32; 9500], [i32; 30]),
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this variant is 70128 bytes
+ |
+note: and the second-largest variant is 8 bytes:
+ --> $DIR/large_enum_variant.rs:73:5
+ |
+LL | VariantOk(i32, u32),
+ | ^^^^^^^^^^^^^^^^^^^
+help: consider boxing the large fields to reduce the total size of the enum
+ |
+LL | ContainingMoreThanOneField(Box<[i32; 8000]>, [i32; 2], Box<[i32; 9500]>, [i32; 30]),
+ | ~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~
+
+error: large size difference between variants
+ --> $DIR/large_enum_variant.rs:79:5
+ |
+LL | B(Struct2),
+ | ^^^^^^^^^^ this variant is 32000 bytes
+ |
+note: and the second-largest variant is 4 bytes:
+ --> $DIR/large_enum_variant.rs:78:5
+ |
+LL | A(Struct<()>),
+ | ^^^^^^^^^^^^^
+help: consider boxing the large fields to reduce the total size of the enum
+ |
+LL | B(Box<Struct2>),
+ | ~~~~~~~~~~~~
+
+error: large size difference between variants
+ --> $DIR/large_enum_variant.rs:104:5
+ |
+LL | B([u128; 4000]),
+ | ^^^^^^^^^^^^^^^ this variant is 64000 bytes
+ |
+note: and the second-largest variant is 1 bytes:
+ --> $DIR/large_enum_variant.rs:103:5
+ |
+LL | A(bool),
+ | ^^^^^^^
+note: boxing a variant would require the type no longer be `Copy`
+ --> $DIR/large_enum_variant.rs:102:6
+ |
+LL | enum CopyableLargeEnum {
+ | ^^^^^^^^^^^^^^^^^
+help: consider boxing the large fields to reduce the total size of the enum
+ --> $DIR/large_enum_variant.rs:104:5
+ |
+LL | B([u128; 4000]),
+ | ^^^^^^^^^^^^^^^
+
+error: large size difference between variants
+ --> $DIR/large_enum_variant.rs:109:5
+ |
+LL | B([u128; 4000]),
+ | ^^^^^^^^^^^^^^^ this variant is 64000 bytes
+ |
+note: and the second-largest variant is 1 bytes:
+ --> $DIR/large_enum_variant.rs:108:5
+ |
+LL | A(bool),
+ | ^^^^^^^
+note: boxing a variant would require the type no longer be `Copy`
+ --> $DIR/large_enum_variant.rs:107:6
+ |
+LL | enum ManuallyCopyLargeEnum {
+ | ^^^^^^^^^^^^^^^^^^^^^
+help: consider boxing the large fields to reduce the total size of the enum
+ --> $DIR/large_enum_variant.rs:109:5
+ |
+LL | B([u128; 4000]),
+ | ^^^^^^^^^^^^^^^
+
+error: large size difference between variants
+ --> $DIR/large_enum_variant.rs:122:5
+ |
+LL | B([u64; 4000]),
+ | ^^^^^^^^^^^^^^ this variant is 32000 bytes
+ |
+note: and the second-largest variant is 1 bytes:
+ --> $DIR/large_enum_variant.rs:121:5
+ |
+LL | A(bool, std::marker::PhantomData<T>),
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+note: boxing a variant would require the type no longer be `Copy`
+ --> $DIR/large_enum_variant.rs:120:6
+ |
+LL | enum SomeGenericPossiblyCopyEnum<T> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+help: consider boxing the large fields to reduce the total size of the enum
+ --> $DIR/large_enum_variant.rs:122:5
+ |
+LL | B([u64; 4000]),
+ | ^^^^^^^^^^^^^^
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/large_stack_arrays.rs b/src/tools/clippy/tests/ui/large_stack_arrays.rs
new file mode 100644
index 000000000..d9161bfcf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/large_stack_arrays.rs
@@ -0,0 +1,30 @@
+#![warn(clippy::large_stack_arrays)]
+#![allow(clippy::large_enum_variant)]
+
+#[derive(Clone, Copy)]
+struct S {
+ pub data: [u64; 32],
+}
+
+#[derive(Clone, Copy)]
+enum E {
+ S(S),
+ T(u32),
+}
+
+fn main() {
+ let bad = (
+ [0u32; 20_000_000],
+ [S { data: [0; 32] }; 5000],
+ [Some(""); 20_000_000],
+ [E::T(0); 5000],
+ );
+
+ let good = (
+ [0u32; 1000],
+ [S { data: [0; 32] }; 1000],
+ [Some(""); 1000],
+ [E::T(0); 1000],
+ [(); 20_000_000],
+ );
+}
diff --git a/src/tools/clippy/tests/ui/large_stack_arrays.stderr b/src/tools/clippy/tests/ui/large_stack_arrays.stderr
new file mode 100644
index 000000000..58c0a77c1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/large_stack_arrays.stderr
@@ -0,0 +1,35 @@
+error: allocating a local array larger than 512000 bytes
+ --> $DIR/large_stack_arrays.rs:17:9
+ |
+LL | [0u32; 20_000_000],
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::large-stack-arrays` implied by `-D warnings`
+ = help: consider allocating on the heap with `vec![0u32; 20_000_000].into_boxed_slice()`
+
+error: allocating a local array larger than 512000 bytes
+ --> $DIR/large_stack_arrays.rs:18:9
+ |
+LL | [S { data: [0; 32] }; 5000],
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider allocating on the heap with `vec![S { data: [0; 32] }; 5000].into_boxed_slice()`
+
+error: allocating a local array larger than 512000 bytes
+ --> $DIR/large_stack_arrays.rs:19:9
+ |
+LL | [Some(""); 20_000_000],
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider allocating on the heap with `vec![Some(""); 20_000_000].into_boxed_slice()`
+
+error: allocating a local array larger than 512000 bytes
+ --> $DIR/large_stack_arrays.rs:20:9
+ |
+LL | [E::T(0); 5000],
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: consider allocating on the heap with `vec![E::T(0); 5000].into_boxed_slice()`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/large_types_passed_by_value.rs b/src/tools/clippy/tests/ui/large_types_passed_by_value.rs
new file mode 100644
index 000000000..7601b5c66
--- /dev/null
+++ b/src/tools/clippy/tests/ui/large_types_passed_by_value.rs
@@ -0,0 +1,66 @@
+// normalize-stderr-test "\(\d+ byte\)" -> "(N byte)"
+// normalize-stderr-test "\(limit: \d+ byte\)" -> "(limit: N byte)"
+
+#![warn(clippy::large_types_passed_by_value)]
+
+pub struct Large([u8; 2048]);
+
+#[derive(Clone, Copy)]
+pub struct LargeAndCopy([u8; 2048]);
+
+pub struct Small([u8; 4]);
+
+#[derive(Clone, Copy)]
+pub struct SmallAndCopy([u8; 4]);
+
+fn small(a: Small, b: SmallAndCopy) {}
+fn not_copy(a: Large) {}
+fn by_ref(a: &Large, b: &LargeAndCopy) {}
+fn mutable(mut a: LargeAndCopy) {}
+fn bad(a: LargeAndCopy) {}
+pub fn bad_but_pub(a: LargeAndCopy) {}
+
+impl LargeAndCopy {
+ fn self_is_ok(self) {}
+ fn other_is_not_ok(self, other: LargeAndCopy) {}
+ fn unless_other_can_change(self, mut other: LargeAndCopy) {}
+ pub fn or_were_in_public(self, other: LargeAndCopy) {}
+}
+
+trait LargeTypeDevourer {
+ fn devoure_array(&self, array: [u8; 6666]);
+ fn devoure_tuple(&self, tup: (LargeAndCopy, LargeAndCopy));
+ fn devoure_array_and_tuple_wow(&self, array: [u8; 6666], tup: (LargeAndCopy, LargeAndCopy));
+}
+
+pub trait PubLargeTypeDevourer {
+ fn devoure_array_in_public(&self, array: [u8; 6666]);
+}
+
+struct S;
+impl LargeTypeDevourer for S {
+ fn devoure_array(&self, array: [u8; 6666]) {
+ todo!();
+ }
+ fn devoure_tuple(&self, tup: (LargeAndCopy, LargeAndCopy)) {
+ todo!();
+ }
+ fn devoure_array_and_tuple_wow(&self, array: [u8; 6666], tup: (LargeAndCopy, LargeAndCopy)) {
+ todo!();
+ }
+}
+
+#[inline(always)]
+fn foo_always(x: LargeAndCopy) {
+ todo!();
+}
+#[inline(never)]
+fn foo_never(x: LargeAndCopy) {
+ todo!();
+}
+#[inline]
+fn foo(x: LargeAndCopy) {
+ todo!();
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/large_types_passed_by_value.stderr b/src/tools/clippy/tests/ui/large_types_passed_by_value.stderr
new file mode 100644
index 000000000..5f42dcfb9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/large_types_passed_by_value.stderr
@@ -0,0 +1,52 @@
+error: this argument (N byte) is passed by value, but might be more efficient if passed by reference (limit: N byte)
+ --> $DIR/large_types_passed_by_value.rs:20:11
+ |
+LL | fn bad(a: LargeAndCopy) {}
+ | ^^^^^^^^^^^^ help: consider passing by reference instead: `&LargeAndCopy`
+ |
+ = note: `-D clippy::large-types-passed-by-value` implied by `-D warnings`
+
+error: this argument (N byte) is passed by value, but might be more efficient if passed by reference (limit: N byte)
+ --> $DIR/large_types_passed_by_value.rs:25:37
+ |
+LL | fn other_is_not_ok(self, other: LargeAndCopy) {}
+ | ^^^^^^^^^^^^ help: consider passing by reference instead: `&LargeAndCopy`
+
+error: this argument (N byte) is passed by value, but might be more efficient if passed by reference (limit: N byte)
+ --> $DIR/large_types_passed_by_value.rs:31:36
+ |
+LL | fn devoure_array(&self, array: [u8; 6666]);
+ | ^^^^^^^^^^ help: consider passing by reference instead: `&[u8; 6666]`
+
+error: this argument (N byte) is passed by value, but might be more efficient if passed by reference (limit: N byte)
+ --> $DIR/large_types_passed_by_value.rs:32:34
+ |
+LL | fn devoure_tuple(&self, tup: (LargeAndCopy, LargeAndCopy));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider passing by reference instead: `&(LargeAndCopy, LargeAndCopy)`
+
+error: this argument (N byte) is passed by value, but might be more efficient if passed by reference (limit: N byte)
+ --> $DIR/large_types_passed_by_value.rs:33:50
+ |
+LL | fn devoure_array_and_tuple_wow(&self, array: [u8; 6666], tup: (LargeAndCopy, LargeAndCopy));
+ | ^^^^^^^^^^ help: consider passing by reference instead: `&[u8; 6666]`
+
+error: this argument (N byte) is passed by value, but might be more efficient if passed by reference (limit: N byte)
+ --> $DIR/large_types_passed_by_value.rs:33:67
+ |
+LL | fn devoure_array_and_tuple_wow(&self, array: [u8; 6666], tup: (LargeAndCopy, LargeAndCopy));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider passing by reference instead: `&(LargeAndCopy, LargeAndCopy)`
+
+error: this argument (N byte) is passed by value, but might be more efficient if passed by reference (limit: N byte)
+ --> $DIR/large_types_passed_by_value.rs:58:17
+ |
+LL | fn foo_never(x: LargeAndCopy) {
+ | ^^^^^^^^^^^^ help: consider passing by reference instead: `&LargeAndCopy`
+
+error: this argument (N byte) is passed by value, but might be more efficient if passed by reference (limit: N byte)
+ --> $DIR/large_types_passed_by_value.rs:62:11
+ |
+LL | fn foo(x: LargeAndCopy) {
+ | ^^^^^^^^^^^^ help: consider passing by reference instead: `&LargeAndCopy`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/len_without_is_empty.rs b/src/tools/clippy/tests/ui/len_without_is_empty.rs
new file mode 100644
index 000000000..1e938e72b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/len_without_is_empty.rs
@@ -0,0 +1,285 @@
+#![warn(clippy::len_without_is_empty)]
+#![allow(dead_code, unused)]
+
+pub struct PubOne;
+
+impl PubOne {
+ pub fn len(&self) -> isize {
+ 1
+ }
+}
+
+impl PubOne {
+ // A second impl for this struct -- the error span shouldn't mention this.
+ pub fn irrelevant(&self) -> bool {
+ false
+ }
+}
+
+// Identical to `PubOne`, but with an `allow` attribute on the impl complaining `len`.
+pub struct PubAllowed;
+
+#[allow(clippy::len_without_is_empty)]
+impl PubAllowed {
+ pub fn len(&self) -> isize {
+ 1
+ }
+}
+
+// No `allow` attribute on this impl block, but that doesn't matter -- we only require one on the
+// impl containing `len`.
+impl PubAllowed {
+ pub fn irrelevant(&self) -> bool {
+ false
+ }
+}
+
+pub struct PubAllowedFn;
+
+impl PubAllowedFn {
+ #[allow(clippy::len_without_is_empty)]
+ pub fn len(&self) -> isize {
+ 1
+ }
+}
+
+#[allow(clippy::len_without_is_empty)]
+pub struct PubAllowedStruct;
+
+impl PubAllowedStruct {
+ pub fn len(&self) -> isize {
+ 1
+ }
+}
+
+pub trait PubTraitsToo {
+ fn len(&self) -> isize;
+}
+
+impl PubTraitsToo for One {
+ fn len(&self) -> isize {
+ 0
+ }
+}
+
+pub struct HasIsEmpty;
+
+impl HasIsEmpty {
+ pub fn len(&self) -> isize {
+ 1
+ }
+
+ fn is_empty(&self) -> bool {
+ false
+ }
+}
+
+pub struct HasWrongIsEmpty;
+
+impl HasWrongIsEmpty {
+ pub fn len(&self) -> isize {
+ 1
+ }
+
+ pub fn is_empty(&self, x: u32) -> bool {
+ false
+ }
+}
+
+pub struct MismatchedSelf;
+
+impl MismatchedSelf {
+ pub fn len(self) -> isize {
+ 1
+ }
+
+ pub fn is_empty(&self) -> bool {
+ false
+ }
+}
+
+struct NotPubOne;
+
+impl NotPubOne {
+ pub fn len(&self) -> isize {
+ // No error; `len` is pub but `NotPubOne` is not exported anyway.
+ 1
+ }
+}
+
+struct One;
+
+impl One {
+ fn len(&self) -> isize {
+ // No error; `len` is private; see issue #1085.
+ 1
+ }
+}
+
+trait TraitsToo {
+ fn len(&self) -> isize;
+ // No error; `len` is private; see issue #1085.
+}
+
+impl TraitsToo for One {
+ fn len(&self) -> isize {
+ 0
+ }
+}
+
+struct HasPrivateIsEmpty;
+
+impl HasPrivateIsEmpty {
+ pub fn len(&self) -> isize {
+ 1
+ }
+
+ fn is_empty(&self) -> bool {
+ false
+ }
+}
+
+struct Wither;
+
+pub trait WithIsEmpty {
+ fn len(&self) -> isize;
+ fn is_empty(&self) -> bool;
+}
+
+impl WithIsEmpty for Wither {
+ fn len(&self) -> isize {
+ 1
+ }
+
+ fn is_empty(&self) -> bool {
+ false
+ }
+}
+
+pub trait Empty {
+ fn is_empty(&self) -> bool;
+}
+
+pub trait InheritingEmpty: Empty {
+ // Must not trigger `LEN_WITHOUT_IS_EMPTY`.
+ fn len(&self) -> isize;
+}
+
+// This used to ICE.
+pub trait Foo: Sized {}
+
+pub trait DependsOnFoo: Foo {
+ fn len(&mut self) -> usize;
+}
+
+// issue #1562
+pub struct MultipleImpls;
+
+impl MultipleImpls {
+ pub fn len(&self) -> usize {
+ 1
+ }
+}
+
+impl MultipleImpls {
+ pub fn is_empty(&self) -> bool {
+ false
+ }
+}
+
+// issue #6958
+pub struct OptionalLen;
+
+impl OptionalLen {
+ pub fn len(&self) -> Option<usize> {
+ Some(0)
+ }
+
+ pub fn is_empty(&self) -> Option<bool> {
+ Some(true)
+ }
+}
+
+pub struct OptionalLen2;
+impl OptionalLen2 {
+ pub fn len(&self) -> Option<usize> {
+ Some(0)
+ }
+
+ pub fn is_empty(&self) -> bool {
+ true
+ }
+}
+
+pub struct OptionalLen3;
+impl OptionalLen3 {
+ pub fn len(&self) -> usize {
+ 0
+ }
+
+ // should lint, len is not an option
+ pub fn is_empty(&self) -> Option<bool> {
+ None
+ }
+}
+
+pub struct ResultLen;
+impl ResultLen {
+ pub fn len(&self) -> Result<usize, ()> {
+ Ok(0)
+ }
+
+ // Differing result types
+ pub fn is_empty(&self) -> Option<bool> {
+ Some(true)
+ }
+}
+
+pub struct ResultLen2;
+impl ResultLen2 {
+ pub fn len(&self) -> Result<usize, ()> {
+ Ok(0)
+ }
+
+ pub fn is_empty(&self) -> Result<bool, ()> {
+ Ok(true)
+ }
+}
+
+pub struct ResultLen3;
+impl ResultLen3 {
+ pub fn len(&self) -> Result<usize, ()> {
+ Ok(0)
+ }
+
+ // Non-fallible result is ok.
+ pub fn is_empty(&self) -> bool {
+ true
+ }
+}
+
+pub struct OddLenSig;
+impl OddLenSig {
+ // don't lint
+ pub fn len(&self) -> bool {
+ true
+ }
+}
+
+// issue #6958
+pub struct AsyncLen;
+impl AsyncLen {
+ async fn async_task(&self) -> bool {
+ true
+ }
+
+ pub async fn len(&self) -> usize {
+ if self.async_task().await { 0 } else { 1 }
+ }
+
+ pub async fn is_empty(&self) -> bool {
+ self.len().await == 0
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/len_without_is_empty.stderr b/src/tools/clippy/tests/ui/len_without_is_empty.stderr
new file mode 100644
index 000000000..a1f48f761
--- /dev/null
+++ b/src/tools/clippy/tests/ui/len_without_is_empty.stderr
@@ -0,0 +1,123 @@
+error: struct `PubOne` has a public `len` method, but no `is_empty` method
+ --> $DIR/len_without_is_empty.rs:7:5
+ |
+LL | pub fn len(&self) -> isize {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::len-without-is-empty` implied by `-D warnings`
+
+error: trait `PubTraitsToo` has a `len` method but no (possibly inherited) `is_empty` method
+ --> $DIR/len_without_is_empty.rs:55:1
+ |
+LL | / pub trait PubTraitsToo {
+LL | | fn len(&self) -> isize;
+LL | | }
+ | |_^
+
+error: struct `HasIsEmpty` has a public `len` method, but a private `is_empty` method
+ --> $DIR/len_without_is_empty.rs:68:5
+ |
+LL | pub fn len(&self) -> isize {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: `is_empty` defined here
+ --> $DIR/len_without_is_empty.rs:72:5
+ |
+LL | fn is_empty(&self) -> bool {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: struct `HasWrongIsEmpty` has a public `len` method, but the `is_empty` method has an unexpected signature
+ --> $DIR/len_without_is_empty.rs:80:5
+ |
+LL | pub fn len(&self) -> isize {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: `is_empty` defined here
+ --> $DIR/len_without_is_empty.rs:84:5
+ |
+LL | pub fn is_empty(&self, x: u32) -> bool {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = note: expected signature: `(&self) -> bool`
+
+error: struct `MismatchedSelf` has a public `len` method, but the `is_empty` method has an unexpected signature
+ --> $DIR/len_without_is_empty.rs:92:5
+ |
+LL | pub fn len(self) -> isize {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: `is_empty` defined here
+ --> $DIR/len_without_is_empty.rs:96:5
+ |
+LL | pub fn is_empty(&self) -> bool {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = note: expected signature: `(self) -> bool`
+
+error: trait `DependsOnFoo` has a `len` method but no (possibly inherited) `is_empty` method
+ --> $DIR/len_without_is_empty.rs:171:1
+ |
+LL | / pub trait DependsOnFoo: Foo {
+LL | | fn len(&mut self) -> usize;
+LL | | }
+ | |_^
+
+error: struct `OptionalLen3` has a public `len` method, but the `is_empty` method has an unexpected signature
+ --> $DIR/len_without_is_empty.rs:216:5
+ |
+LL | pub fn len(&self) -> usize {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: `is_empty` defined here
+ --> $DIR/len_without_is_empty.rs:221:5
+ |
+LL | pub fn is_empty(&self) -> Option<bool> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = note: expected signature: `(&self) -> bool`
+
+error: struct `ResultLen` has a public `len` method, but the `is_empty` method has an unexpected signature
+ --> $DIR/len_without_is_empty.rs:228:5
+ |
+LL | pub fn len(&self) -> Result<usize, ()> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: `is_empty` defined here
+ --> $DIR/len_without_is_empty.rs:233:5
+ |
+LL | pub fn is_empty(&self) -> Option<bool> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = note: expected signature: `(&self) -> bool` or `(&self) -> Result<bool>
+
+error: this returns a `Result<_, ()>`
+ --> $DIR/len_without_is_empty.rs:228:5
+ |
+LL | pub fn len(&self) -> Result<usize, ()> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::result-unit-err` implied by `-D warnings`
+ = help: use a custom `Error` type instead
+
+error: this returns a `Result<_, ()>`
+ --> $DIR/len_without_is_empty.rs:240:5
+ |
+LL | pub fn len(&self) -> Result<usize, ()> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a custom `Error` type instead
+
+error: this returns a `Result<_, ()>`
+ --> $DIR/len_without_is_empty.rs:244:5
+ |
+LL | pub fn is_empty(&self) -> Result<bool, ()> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a custom `Error` type instead
+
+error: this returns a `Result<_, ()>`
+ --> $DIR/len_without_is_empty.rs:251:5
+ |
+LL | pub fn len(&self) -> Result<usize, ()> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a custom `Error` type instead
+
+error: aborting due to 12 previous errors
+
diff --git a/src/tools/clippy/tests/ui/len_zero.fixed b/src/tools/clippy/tests/ui/len_zero.fixed
new file mode 100644
index 000000000..1f3b8ac99
--- /dev/null
+++ b/src/tools/clippy/tests/ui/len_zero.fixed
@@ -0,0 +1,143 @@
+// run-rustfix
+
+#![warn(clippy::len_zero)]
+#![allow(dead_code, unused, clippy::len_without_is_empty)]
+
+pub struct One;
+struct Wither;
+
+trait TraitsToo {
+ fn len(&self) -> isize;
+ // No error; `len` is private; see issue #1085.
+}
+
+impl TraitsToo for One {
+ fn len(&self) -> isize {
+ 0
+ }
+}
+
+pub struct HasIsEmpty;
+
+impl HasIsEmpty {
+ pub fn len(&self) -> isize {
+ 1
+ }
+
+ fn is_empty(&self) -> bool {
+ false
+ }
+}
+
+pub struct HasWrongIsEmpty;
+
+impl HasWrongIsEmpty {
+ pub fn len(&self) -> isize {
+ 1
+ }
+
+ pub fn is_empty(&self, x: u32) -> bool {
+ false
+ }
+}
+
+pub trait WithIsEmpty {
+ fn len(&self) -> isize;
+ fn is_empty(&self) -> bool;
+}
+
+impl WithIsEmpty for Wither {
+ fn len(&self) -> isize {
+ 1
+ }
+
+ fn is_empty(&self) -> bool {
+ false
+ }
+}
+
+fn main() {
+ let x = [1, 2];
+ if x.is_empty() {
+ println!("This should not happen!");
+ }
+
+ if "".is_empty() {}
+
+ let y = One;
+ if y.len() == 0 {
+ // No error; `One` does not have `.is_empty()`.
+ println!("This should not happen either!");
+ }
+
+ let z: &dyn TraitsToo = &y;
+ if z.len() > 0 {
+ // No error; `TraitsToo` has no `.is_empty()` method.
+ println!("Nor should this!");
+ }
+
+ let has_is_empty = HasIsEmpty;
+ if has_is_empty.is_empty() {
+ println!("Or this!");
+ }
+ if !has_is_empty.is_empty() {
+ println!("Or this!");
+ }
+ if !has_is_empty.is_empty() {
+ println!("Or this!");
+ }
+ if has_is_empty.is_empty() {
+ println!("Or this!");
+ }
+ if !has_is_empty.is_empty() {
+ println!("Or this!");
+ }
+ if has_is_empty.len() > 1 {
+ // No error.
+ println!("This can happen.");
+ }
+ if has_is_empty.len() <= 1 {
+ // No error.
+ println!("This can happen.");
+ }
+ if has_is_empty.is_empty() {
+ println!("Or this!");
+ }
+ if !has_is_empty.is_empty() {
+ println!("Or this!");
+ }
+ if !has_is_empty.is_empty() {
+ println!("Or this!");
+ }
+ if !has_is_empty.is_empty() {
+ println!("Or this!");
+ }
+ if has_is_empty.is_empty() {
+ println!("Or this!");
+ }
+ if 1 < has_is_empty.len() {
+ // No error.
+ println!("This can happen.");
+ }
+ if 1 >= has_is_empty.len() {
+ // No error.
+ println!("This can happen.");
+ }
+ assert!(!has_is_empty.is_empty());
+
+ let with_is_empty: &dyn WithIsEmpty = &Wither;
+ if with_is_empty.is_empty() {
+ println!("Or this!");
+ }
+ assert!(!with_is_empty.is_empty());
+
+ let has_wrong_is_empty = HasWrongIsEmpty;
+ if has_wrong_is_empty.len() == 0 {
+ // No error; `HasWrongIsEmpty` does not have `.is_empty()`.
+ println!("Or this!");
+ }
+}
+
+fn test_slice(b: &[u8]) {
+ if !b.is_empty() {}
+}
diff --git a/src/tools/clippy/tests/ui/len_zero.rs b/src/tools/clippy/tests/ui/len_zero.rs
new file mode 100644
index 000000000..dc21de000
--- /dev/null
+++ b/src/tools/clippy/tests/ui/len_zero.rs
@@ -0,0 +1,143 @@
+// run-rustfix
+
+#![warn(clippy::len_zero)]
+#![allow(dead_code, unused, clippy::len_without_is_empty)]
+
+pub struct One;
+struct Wither;
+
+trait TraitsToo {
+ fn len(&self) -> isize;
+ // No error; `len` is private; see issue #1085.
+}
+
+impl TraitsToo for One {
+ fn len(&self) -> isize {
+ 0
+ }
+}
+
+pub struct HasIsEmpty;
+
+impl HasIsEmpty {
+ pub fn len(&self) -> isize {
+ 1
+ }
+
+ fn is_empty(&self) -> bool {
+ false
+ }
+}
+
+pub struct HasWrongIsEmpty;
+
+impl HasWrongIsEmpty {
+ pub fn len(&self) -> isize {
+ 1
+ }
+
+ pub fn is_empty(&self, x: u32) -> bool {
+ false
+ }
+}
+
+pub trait WithIsEmpty {
+ fn len(&self) -> isize;
+ fn is_empty(&self) -> bool;
+}
+
+impl WithIsEmpty for Wither {
+ fn len(&self) -> isize {
+ 1
+ }
+
+ fn is_empty(&self) -> bool {
+ false
+ }
+}
+
+fn main() {
+ let x = [1, 2];
+ if x.len() == 0 {
+ println!("This should not happen!");
+ }
+
+ if "".len() == 0 {}
+
+ let y = One;
+ if y.len() == 0 {
+ // No error; `One` does not have `.is_empty()`.
+ println!("This should not happen either!");
+ }
+
+ let z: &dyn TraitsToo = &y;
+ if z.len() > 0 {
+ // No error; `TraitsToo` has no `.is_empty()` method.
+ println!("Nor should this!");
+ }
+
+ let has_is_empty = HasIsEmpty;
+ if has_is_empty.len() == 0 {
+ println!("Or this!");
+ }
+ if has_is_empty.len() != 0 {
+ println!("Or this!");
+ }
+ if has_is_empty.len() > 0 {
+ println!("Or this!");
+ }
+ if has_is_empty.len() < 1 {
+ println!("Or this!");
+ }
+ if has_is_empty.len() >= 1 {
+ println!("Or this!");
+ }
+ if has_is_empty.len() > 1 {
+ // No error.
+ println!("This can happen.");
+ }
+ if has_is_empty.len() <= 1 {
+ // No error.
+ println!("This can happen.");
+ }
+ if 0 == has_is_empty.len() {
+ println!("Or this!");
+ }
+ if 0 != has_is_empty.len() {
+ println!("Or this!");
+ }
+ if 0 < has_is_empty.len() {
+ println!("Or this!");
+ }
+ if 1 <= has_is_empty.len() {
+ println!("Or this!");
+ }
+ if 1 > has_is_empty.len() {
+ println!("Or this!");
+ }
+ if 1 < has_is_empty.len() {
+ // No error.
+ println!("This can happen.");
+ }
+ if 1 >= has_is_empty.len() {
+ // No error.
+ println!("This can happen.");
+ }
+ assert!(!has_is_empty.is_empty());
+
+ let with_is_empty: &dyn WithIsEmpty = &Wither;
+ if with_is_empty.len() == 0 {
+ println!("Or this!");
+ }
+ assert!(!with_is_empty.is_empty());
+
+ let has_wrong_is_empty = HasWrongIsEmpty;
+ if has_wrong_is_empty.len() == 0 {
+ // No error; `HasWrongIsEmpty` does not have `.is_empty()`.
+ println!("Or this!");
+ }
+}
+
+fn test_slice(b: &[u8]) {
+ if b.len() != 0 {}
+}
diff --git a/src/tools/clippy/tests/ui/len_zero.stderr b/src/tools/clippy/tests/ui/len_zero.stderr
new file mode 100644
index 000000000..6c71f1bee
--- /dev/null
+++ b/src/tools/clippy/tests/ui/len_zero.stderr
@@ -0,0 +1,88 @@
+error: length comparison to zero
+ --> $DIR/len_zero.rs:61:8
+ |
+LL | if x.len() == 0 {
+ | ^^^^^^^^^^^^ help: using `is_empty` is clearer and more explicit: `x.is_empty()`
+ |
+ = note: `-D clippy::len-zero` implied by `-D warnings`
+
+error: length comparison to zero
+ --> $DIR/len_zero.rs:65:8
+ |
+LL | if "".len() == 0 {}
+ | ^^^^^^^^^^^^^ help: using `is_empty` is clearer and more explicit: `"".is_empty()`
+
+error: length comparison to zero
+ --> $DIR/len_zero.rs:80:8
+ |
+LL | if has_is_empty.len() == 0 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: using `is_empty` is clearer and more explicit: `has_is_empty.is_empty()`
+
+error: length comparison to zero
+ --> $DIR/len_zero.rs:83:8
+ |
+LL | if has_is_empty.len() != 0 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: using `!is_empty` is clearer and more explicit: `!has_is_empty.is_empty()`
+
+error: length comparison to zero
+ --> $DIR/len_zero.rs:86:8
+ |
+LL | if has_is_empty.len() > 0 {
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: using `!is_empty` is clearer and more explicit: `!has_is_empty.is_empty()`
+
+error: length comparison to one
+ --> $DIR/len_zero.rs:89:8
+ |
+LL | if has_is_empty.len() < 1 {
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: using `is_empty` is clearer and more explicit: `has_is_empty.is_empty()`
+
+error: length comparison to one
+ --> $DIR/len_zero.rs:92:8
+ |
+LL | if has_is_empty.len() >= 1 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: using `!is_empty` is clearer and more explicit: `!has_is_empty.is_empty()`
+
+error: length comparison to zero
+ --> $DIR/len_zero.rs:103:8
+ |
+LL | if 0 == has_is_empty.len() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: using `is_empty` is clearer and more explicit: `has_is_empty.is_empty()`
+
+error: length comparison to zero
+ --> $DIR/len_zero.rs:106:8
+ |
+LL | if 0 != has_is_empty.len() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: using `!is_empty` is clearer and more explicit: `!has_is_empty.is_empty()`
+
+error: length comparison to zero
+ --> $DIR/len_zero.rs:109:8
+ |
+LL | if 0 < has_is_empty.len() {
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: using `!is_empty` is clearer and more explicit: `!has_is_empty.is_empty()`
+
+error: length comparison to one
+ --> $DIR/len_zero.rs:112:8
+ |
+LL | if 1 <= has_is_empty.len() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: using `!is_empty` is clearer and more explicit: `!has_is_empty.is_empty()`
+
+error: length comparison to one
+ --> $DIR/len_zero.rs:115:8
+ |
+LL | if 1 > has_is_empty.len() {
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: using `is_empty` is clearer and more explicit: `has_is_empty.is_empty()`
+
+error: length comparison to zero
+ --> $DIR/len_zero.rs:129:8
+ |
+LL | if with_is_empty.len() == 0 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: using `is_empty` is clearer and more explicit: `with_is_empty.is_empty()`
+
+error: length comparison to zero
+ --> $DIR/len_zero.rs:142:8
+ |
+LL | if b.len() != 0 {}
+ | ^^^^^^^^^^^^ help: using `!is_empty` is clearer and more explicit: `!b.is_empty()`
+
+error: aborting due to 14 previous errors
+
diff --git a/src/tools/clippy/tests/ui/len_zero_ranges.fixed b/src/tools/clippy/tests/ui/len_zero_ranges.fixed
new file mode 100644
index 000000000..797817662
--- /dev/null
+++ b/src/tools/clippy/tests/ui/len_zero_ranges.fixed
@@ -0,0 +1,17 @@
+// run-rustfix
+
+#![warn(clippy::len_zero)]
+#![allow(unused)]
+
+// Now that `Range(Inclusive)::is_empty` is stable (1.47), we can always suggest this
+mod issue_3807 {
+ fn suggestion_is_fine_range() {
+ let _ = (0..42).is_empty();
+ }
+
+ fn suggestion_is_fine_range_inclusive() {
+ let _ = (0_u8..=42).is_empty();
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/len_zero_ranges.rs b/src/tools/clippy/tests/ui/len_zero_ranges.rs
new file mode 100644
index 000000000..a0eb51cc9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/len_zero_ranges.rs
@@ -0,0 +1,17 @@
+// run-rustfix
+
+#![warn(clippy::len_zero)]
+#![allow(unused)]
+
+// Now that `Range(Inclusive)::is_empty` is stable (1.47), we can always suggest this
+mod issue_3807 {
+ fn suggestion_is_fine_range() {
+ let _ = (0..42).len() == 0;
+ }
+
+ fn suggestion_is_fine_range_inclusive() {
+ let _ = (0_u8..=42).len() == 0;
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/len_zero_ranges.stderr b/src/tools/clippy/tests/ui/len_zero_ranges.stderr
new file mode 100644
index 000000000..d0defb5a7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/len_zero_ranges.stderr
@@ -0,0 +1,16 @@
+error: length comparison to zero
+ --> $DIR/len_zero_ranges.rs:9:17
+ |
+LL | let _ = (0..42).len() == 0;
+ | ^^^^^^^^^^^^^^^^^^ help: using `is_empty` is clearer and more explicit: `(0..42).is_empty()`
+ |
+ = note: `-D clippy::len-zero` implied by `-D warnings`
+
+error: length comparison to zero
+ --> $DIR/len_zero_ranges.rs:13:17
+ |
+LL | let _ = (0_u8..=42).len() == 0;
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: using `is_empty` is clearer and more explicit: `(0_u8..=42).is_empty()`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/let_and_return.rs b/src/tools/clippy/tests/ui/let_and_return.rs
new file mode 100644
index 000000000..bb162adc9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/let_and_return.rs
@@ -0,0 +1,169 @@
+#![allow(unused)]
+#![warn(clippy::let_and_return)]
+
+fn test() -> i32 {
+ let _y = 0; // no warning
+ let x = 5;
+ x
+}
+
+fn test_inner() -> i32 {
+ if true {
+ let x = 5;
+ x
+ } else {
+ 0
+ }
+}
+
+fn test_nowarn_1() -> i32 {
+ let mut x = 5;
+ x += 1;
+ x
+}
+
+fn test_nowarn_2() -> i32 {
+ let x = 5;
+ x + 1
+}
+
+fn test_nowarn_3() -> (i32, i32) {
+ // this should technically warn, but we do not compare complex patterns
+ let (x, y) = (5, 9);
+ (x, y)
+}
+
+fn test_nowarn_4() -> i32 {
+ // this should technically warn, but not b/c of clippy::let_and_return, but b/c of useless type
+ let x: i32 = 5;
+ x
+}
+
+fn test_nowarn_5(x: i16) -> u16 {
+ #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)]
+ let x = x as u16;
+ x
+}
+
+// False positive example
+trait Decode {
+ fn decode<D: std::io::Read>(d: D) -> Result<Self, ()>
+ where
+ Self: Sized;
+}
+
+macro_rules! tuple_encode {
+ ($($x:ident),*) => (
+ impl<$($x: Decode),*> Decode for ($($x),*) {
+ #[inline]
+ #[allow(non_snake_case)]
+ fn decode<D: std::io::Read>(mut d: D) -> Result<Self, ()> {
+ // Shouldn't trigger lint
+ Ok(($({let $x = Decode::decode(&mut d)?; $x }),*))
+ }
+ }
+ );
+}
+
+tuple_encode!(T0, T1, T2, T3, T4, T5, T6, T7);
+
+mod no_lint_if_stmt_borrows {
+ mod issue_3792 {
+ use std::io::{self, BufRead, Stdin};
+
+ fn read_line() -> String {
+ let stdin = io::stdin();
+ let line = stdin.lock().lines().next().unwrap().unwrap();
+ line
+ }
+ }
+
+ mod issue_3324 {
+ use std::cell::RefCell;
+ use std::rc::{Rc, Weak};
+
+ fn test(value: Weak<RefCell<Bar>>) -> u32 {
+ let value = value.upgrade().unwrap();
+ let ret = value.borrow().baz();
+ ret
+ }
+
+ struct Bar;
+
+ impl Bar {
+ fn new() -> Self {
+ Bar {}
+ }
+ fn baz(&self) -> u32 {
+ 0
+ }
+ }
+
+ fn main() {
+ let a = Rc::new(RefCell::new(Bar::new()));
+ let b = Rc::downgrade(&a);
+ test(b);
+ }
+ }
+
+ mod free_function {
+ struct Inner;
+
+ struct Foo<'a> {
+ inner: &'a Inner,
+ }
+
+ impl Drop for Foo<'_> {
+ fn drop(&mut self) {}
+ }
+
+ impl<'a> Foo<'a> {
+ fn new(inner: &'a Inner) -> Self {
+ Self { inner }
+ }
+
+ fn value(&self) -> i32 {
+ 42
+ }
+ }
+
+ fn some_foo(inner: &Inner) -> Foo<'_> {
+ Foo { inner }
+ }
+
+ fn test() -> i32 {
+ let x = Inner {};
+ let value = some_foo(&x).value();
+ value
+ }
+
+ fn test2() -> i32 {
+ let x = Inner {};
+ let value = Foo::new(&x).value();
+ value
+ }
+ }
+}
+
+mod issue_5729 {
+ use std::sync::Arc;
+
+ trait Foo {}
+
+ trait FooStorage {
+ fn foo_cloned(&self) -> Arc<dyn Foo>;
+ }
+
+ struct FooStorageImpl<T: Foo> {
+ foo: Arc<T>,
+ }
+
+ impl<T: Foo + 'static> FooStorage for FooStorageImpl<T> {
+ fn foo_cloned(&self) -> Arc<dyn Foo> {
+ let clone = Arc::clone(&self.foo);
+ clone
+ }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/let_and_return.stderr b/src/tools/clippy/tests/ui/let_and_return.stderr
new file mode 100644
index 000000000..17fd694bf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/let_and_return.stderr
@@ -0,0 +1,45 @@
+error: returning the result of a `let` binding from a block
+ --> $DIR/let_and_return.rs:7:5
+ |
+LL | let x = 5;
+ | ---------- unnecessary `let` binding
+LL | x
+ | ^
+ |
+ = note: `-D clippy::let-and-return` implied by `-D warnings`
+help: return the expression directly
+ |
+LL ~
+LL ~ 5
+ |
+
+error: returning the result of a `let` binding from a block
+ --> $DIR/let_and_return.rs:13:9
+ |
+LL | let x = 5;
+ | ---------- unnecessary `let` binding
+LL | x
+ | ^
+ |
+help: return the expression directly
+ |
+LL ~
+LL ~ 5
+ |
+
+error: returning the result of a `let` binding from a block
+ --> $DIR/let_and_return.rs:164:13
+ |
+LL | let clone = Arc::clone(&self.foo);
+ | ---------------------------------- unnecessary `let` binding
+LL | clone
+ | ^^^^^
+ |
+help: return the expression directly
+ |
+LL ~
+LL ~ Arc::clone(&self.foo) as _
+ |
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/let_if_seq.rs b/src/tools/clippy/tests/ui/let_if_seq.rs
new file mode 100644
index 000000000..c5cb2eb1f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/let_if_seq.rs
@@ -0,0 +1,122 @@
+#![allow(
+ unused_variables,
+ unused_assignments,
+ clippy::similar_names,
+ clippy::blacklisted_name,
+ clippy::branches_sharing_code,
+ clippy::needless_late_init
+)]
+#![warn(clippy::useless_let_if_seq)]
+
+fn f() -> bool {
+ true
+}
+fn g(x: i32) -> i32 {
+ x + 1
+}
+
+fn issue985() -> i32 {
+ let mut x = 42;
+ if f() {
+ x = g(x);
+ }
+
+ x
+}
+
+fn issue985_alt() -> i32 {
+ let mut x = 42;
+ if f() {
+ f();
+ } else {
+ x = g(x);
+ }
+
+ x
+}
+
+#[allow(clippy::manual_strip)]
+fn issue975() -> String {
+ let mut udn = "dummy".to_string();
+ if udn.starts_with("uuid:") {
+ udn = String::from(&udn[5..]);
+ }
+ udn
+}
+
+fn early_return() -> u8 {
+ // FIXME: we could extend the lint to include such cases:
+ let foo;
+
+ if f() {
+ return 42;
+ } else {
+ foo = 0;
+ }
+
+ foo
+}
+
+fn main() {
+ early_return();
+ issue975();
+ issue985();
+ issue985_alt();
+
+ let mut foo = 0;
+ if f() {
+ foo = 42;
+ }
+
+ let mut bar = 0;
+ if f() {
+ f();
+ bar = 42;
+ } else {
+ f();
+ }
+
+ let quz;
+ if f() {
+ quz = 42;
+ } else {
+ quz = 0;
+ }
+
+ // `toto` is used several times
+ let mut toto;
+ if f() {
+ toto = 42;
+ } else {
+ for i in &[1, 2] {
+ toto = *i;
+ }
+
+ toto = 2;
+ }
+
+ // found in libcore, the inner if is not a statement but the block's expr
+ let mut ch = b'x';
+ if f() {
+ ch = b'*';
+ if f() {
+ ch = b'?';
+ }
+ }
+
+ // baz needs to be mut
+ let mut baz = 0;
+ if f() {
+ baz = 42;
+ }
+
+ baz = 1337;
+
+ // issue 3043 - types with interior mutability should not trigger this lint
+ use std::cell::Cell;
+ let mut val = Cell::new(1);
+ if true {
+ val = Cell::new(2);
+ }
+ println!("{}", val.get());
+}
diff --git a/src/tools/clippy/tests/ui/let_if_seq.stderr b/src/tools/clippy/tests/ui/let_if_seq.stderr
new file mode 100644
index 000000000..271ccce68
--- /dev/null
+++ b/src/tools/clippy/tests/ui/let_if_seq.stderr
@@ -0,0 +1,50 @@
+error: `if _ { .. } else { .. }` is an expression
+ --> $DIR/let_if_seq.rs:66:5
+ |
+LL | / let mut foo = 0;
+LL | | if f() {
+LL | | foo = 42;
+LL | | }
+ | |_____^ help: it is more idiomatic to write: `let <mut> foo = if f() { 42 } else { 0 };`
+ |
+ = note: `-D clippy::useless-let-if-seq` implied by `-D warnings`
+ = note: you might not need `mut` at all
+
+error: `if _ { .. } else { .. }` is an expression
+ --> $DIR/let_if_seq.rs:71:5
+ |
+LL | / let mut bar = 0;
+LL | | if f() {
+LL | | f();
+LL | | bar = 42;
+LL | | } else {
+LL | | f();
+LL | | }
+ | |_____^ help: it is more idiomatic to write: `let <mut> bar = if f() { ..; 42 } else { ..; 0 };`
+ |
+ = note: you might not need `mut` at all
+
+error: `if _ { .. } else { .. }` is an expression
+ --> $DIR/let_if_seq.rs:79:5
+ |
+LL | / let quz;
+LL | | if f() {
+LL | | quz = 42;
+LL | | } else {
+LL | | quz = 0;
+LL | | }
+ | |_____^ help: it is more idiomatic to write: `let quz = if f() { 42 } else { 0 };`
+
+error: `if _ { .. } else { .. }` is an expression
+ --> $DIR/let_if_seq.rs:108:5
+ |
+LL | / let mut baz = 0;
+LL | | if f() {
+LL | | baz = 42;
+LL | | }
+ | |_____^ help: it is more idiomatic to write: `let <mut> baz = if f() { 42 } else { 0 };`
+ |
+ = note: you might not need `mut` at all
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/let_underscore_drop.rs b/src/tools/clippy/tests/ui/let_underscore_drop.rs
new file mode 100644
index 000000000..11b50492a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/let_underscore_drop.rs
@@ -0,0 +1,28 @@
+#![warn(clippy::let_underscore_drop)]
+#![allow(clippy::let_unit_value)]
+
+struct Droppable;
+
+impl Drop for Droppable {
+ fn drop(&mut self) {}
+}
+
+fn main() {
+ let unit = ();
+ let boxed = Box::new(());
+ let droppable = Droppable;
+ let optional = Some(Droppable);
+
+ let _ = ();
+ let _ = Box::new(());
+ let _ = Droppable;
+ let _ = Some(Droppable);
+
+ // no lint for reference
+ let _ = droppable_ref();
+}
+
+#[must_use]
+fn droppable_ref() -> &'static mut Droppable {
+ unimplemented!()
+}
diff --git a/src/tools/clippy/tests/ui/let_underscore_drop.stderr b/src/tools/clippy/tests/ui/let_underscore_drop.stderr
new file mode 100644
index 000000000..ee7bbe995
--- /dev/null
+++ b/src/tools/clippy/tests/ui/let_underscore_drop.stderr
@@ -0,0 +1,27 @@
+error: non-binding `let` on a type that implements `Drop`
+ --> $DIR/let_underscore_drop.rs:17:5
+ |
+LL | let _ = Box::new(());
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::let-underscore-drop` implied by `-D warnings`
+ = help: consider using an underscore-prefixed named binding or dropping explicitly with `std::mem::drop`
+
+error: non-binding `let` on a type that implements `Drop`
+ --> $DIR/let_underscore_drop.rs:18:5
+ |
+LL | let _ = Droppable;
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using an underscore-prefixed named binding or dropping explicitly with `std::mem::drop`
+
+error: non-binding `let` on a type that implements `Drop`
+ --> $DIR/let_underscore_drop.rs:19:5
+ |
+LL | let _ = Some(Droppable);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using an underscore-prefixed named binding or dropping explicitly with `std::mem::drop`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/let_underscore_lock.rs b/src/tools/clippy/tests/ui/let_underscore_lock.rs
new file mode 100644
index 000000000..7a7c4e924
--- /dev/null
+++ b/src/tools/clippy/tests/ui/let_underscore_lock.rs
@@ -0,0 +1,36 @@
+#![warn(clippy::let_underscore_lock)]
+
+extern crate parking_lot;
+
+fn main() {
+ let m = std::sync::Mutex::new(());
+ let rw = std::sync::RwLock::new(());
+
+ let _ = m.lock();
+ let _ = rw.read();
+ let _ = rw.write();
+ let _ = m.try_lock();
+ let _ = rw.try_read();
+ let _ = rw.try_write();
+
+ // These shouldn't throw an error.
+ let _ = m;
+ let _ = rw;
+
+ use parking_lot::{lock_api::RawMutex, Mutex, RwLock};
+
+ let p_m: Mutex<()> = Mutex::const_new(RawMutex::INIT, ());
+ let _ = p_m.lock();
+
+ let p_m1 = Mutex::new(0);
+ let _ = p_m1.lock();
+
+ let p_rw = RwLock::new(0);
+ let _ = p_rw.read();
+ let _ = p_rw.write();
+
+ // These shouldn't throw an error.
+ let _ = p_m;
+ let _ = p_m1;
+ let _ = p_rw;
+}
diff --git a/src/tools/clippy/tests/ui/let_underscore_lock.stderr b/src/tools/clippy/tests/ui/let_underscore_lock.stderr
new file mode 100644
index 000000000..4365b48fa
--- /dev/null
+++ b/src/tools/clippy/tests/ui/let_underscore_lock.stderr
@@ -0,0 +1,83 @@
+error: non-binding let on a synchronization lock
+ --> $DIR/let_underscore_lock.rs:9:5
+ |
+LL | let _ = m.lock();
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::let-underscore-lock` implied by `-D warnings`
+ = help: consider using an underscore-prefixed named binding or dropping explicitly with `std::mem::drop`
+
+error: non-binding let on a synchronization lock
+ --> $DIR/let_underscore_lock.rs:10:5
+ |
+LL | let _ = rw.read();
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using an underscore-prefixed named binding or dropping explicitly with `std::mem::drop`
+
+error: non-binding let on a synchronization lock
+ --> $DIR/let_underscore_lock.rs:11:5
+ |
+LL | let _ = rw.write();
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using an underscore-prefixed named binding or dropping explicitly with `std::mem::drop`
+
+error: non-binding let on a synchronization lock
+ --> $DIR/let_underscore_lock.rs:12:5
+ |
+LL | let _ = m.try_lock();
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using an underscore-prefixed named binding or dropping explicitly with `std::mem::drop`
+
+error: non-binding let on a synchronization lock
+ --> $DIR/let_underscore_lock.rs:13:5
+ |
+LL | let _ = rw.try_read();
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using an underscore-prefixed named binding or dropping explicitly with `std::mem::drop`
+
+error: non-binding let on a synchronization lock
+ --> $DIR/let_underscore_lock.rs:14:5
+ |
+LL | let _ = rw.try_write();
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using an underscore-prefixed named binding or dropping explicitly with `std::mem::drop`
+
+error: non-binding let on a synchronization lock
+ --> $DIR/let_underscore_lock.rs:23:5
+ |
+LL | let _ = p_m.lock();
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using an underscore-prefixed named binding or dropping explicitly with `std::mem::drop`
+
+error: non-binding let on a synchronization lock
+ --> $DIR/let_underscore_lock.rs:26:5
+ |
+LL | let _ = p_m1.lock();
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using an underscore-prefixed named binding or dropping explicitly with `std::mem::drop`
+
+error: non-binding let on a synchronization lock
+ --> $DIR/let_underscore_lock.rs:29:5
+ |
+LL | let _ = p_rw.read();
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using an underscore-prefixed named binding or dropping explicitly with `std::mem::drop`
+
+error: non-binding let on a synchronization lock
+ --> $DIR/let_underscore_lock.rs:30:5
+ |
+LL | let _ = p_rw.write();
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using an underscore-prefixed named binding or dropping explicitly with `std::mem::drop`
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/let_underscore_must_use.rs b/src/tools/clippy/tests/ui/let_underscore_must_use.rs
new file mode 100644
index 000000000..1edb77c74
--- /dev/null
+++ b/src/tools/clippy/tests/ui/let_underscore_must_use.rs
@@ -0,0 +1,95 @@
+#![warn(clippy::let_underscore_must_use)]
+#![allow(clippy::unnecessary_wraps)]
+
+// Debug implementations can fire this lint,
+// so we shouldn't lint external macros
+#[derive(Debug)]
+struct Foo {
+ field: i32,
+}
+
+#[must_use]
+fn f() -> u32 {
+ 0
+}
+
+fn g() -> Result<u32, u32> {
+ Ok(0)
+}
+
+#[must_use]
+fn l<T>(x: T) -> T {
+ x
+}
+
+fn h() -> u32 {
+ 0
+}
+
+struct S;
+
+impl S {
+ #[must_use]
+ pub fn f(&self) -> u32 {
+ 0
+ }
+
+ pub fn g(&self) -> Result<u32, u32> {
+ Ok(0)
+ }
+
+ fn k(&self) -> u32 {
+ 0
+ }
+
+ #[must_use]
+ fn h() -> u32 {
+ 0
+ }
+
+ fn p() -> Result<u32, u32> {
+ Ok(0)
+ }
+}
+
+trait Trait {
+ #[must_use]
+ fn a() -> u32;
+}
+
+impl Trait for S {
+ fn a() -> u32 {
+ 0
+ }
+}
+
+fn main() {
+ let _ = f();
+ let _ = g();
+ let _ = h();
+ let _ = l(0_u32);
+
+ let s = S {};
+
+ let _ = s.f();
+ let _ = s.g();
+ let _ = s.k();
+
+ let _ = S::h();
+ let _ = S::p();
+
+ let _ = S::a();
+
+ let _ = if true { Ok(()) } else { Err(()) };
+
+ let a = Result::<(), ()>::Ok(());
+
+ let _ = a.is_ok();
+
+ let _ = a.map(|_| ());
+
+ let _ = a;
+
+ #[allow(clippy::let_underscore_must_use)]
+ let _ = a;
+}
diff --git a/src/tools/clippy/tests/ui/let_underscore_must_use.stderr b/src/tools/clippy/tests/ui/let_underscore_must_use.stderr
new file mode 100644
index 000000000..5b751ea56
--- /dev/null
+++ b/src/tools/clippy/tests/ui/let_underscore_must_use.stderr
@@ -0,0 +1,99 @@
+error: non-binding let on a result of a `#[must_use]` function
+ --> $DIR/let_underscore_must_use.rs:67:5
+ |
+LL | let _ = f();
+ | ^^^^^^^^^^^^
+ |
+ = note: `-D clippy::let-underscore-must-use` implied by `-D warnings`
+ = help: consider explicitly using function result
+
+error: non-binding let on an expression with `#[must_use]` type
+ --> $DIR/let_underscore_must_use.rs:68:5
+ |
+LL | let _ = g();
+ | ^^^^^^^^^^^^
+ |
+ = help: consider explicitly using expression value
+
+error: non-binding let on a result of a `#[must_use]` function
+ --> $DIR/let_underscore_must_use.rs:70:5
+ |
+LL | let _ = l(0_u32);
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = help: consider explicitly using function result
+
+error: non-binding let on a result of a `#[must_use]` function
+ --> $DIR/let_underscore_must_use.rs:74:5
+ |
+LL | let _ = s.f();
+ | ^^^^^^^^^^^^^^
+ |
+ = help: consider explicitly using function result
+
+error: non-binding let on an expression with `#[must_use]` type
+ --> $DIR/let_underscore_must_use.rs:75:5
+ |
+LL | let _ = s.g();
+ | ^^^^^^^^^^^^^^
+ |
+ = help: consider explicitly using expression value
+
+error: non-binding let on a result of a `#[must_use]` function
+ --> $DIR/let_underscore_must_use.rs:78:5
+ |
+LL | let _ = S::h();
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: consider explicitly using function result
+
+error: non-binding let on an expression with `#[must_use]` type
+ --> $DIR/let_underscore_must_use.rs:79:5
+ |
+LL | let _ = S::p();
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: consider explicitly using expression value
+
+error: non-binding let on a result of a `#[must_use]` function
+ --> $DIR/let_underscore_must_use.rs:81:5
+ |
+LL | let _ = S::a();
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: consider explicitly using function result
+
+error: non-binding let on an expression with `#[must_use]` type
+ --> $DIR/let_underscore_must_use.rs:83:5
+ |
+LL | let _ = if true { Ok(()) } else { Err(()) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider explicitly using expression value
+
+error: non-binding let on a result of a `#[must_use]` function
+ --> $DIR/let_underscore_must_use.rs:87:5
+ |
+LL | let _ = a.is_ok();
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider explicitly using function result
+
+error: non-binding let on an expression with `#[must_use]` type
+ --> $DIR/let_underscore_must_use.rs:89:5
+ |
+LL | let _ = a.map(|_| ());
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider explicitly using expression value
+
+error: non-binding let on an expression with `#[must_use]` type
+ --> $DIR/let_underscore_must_use.rs:91:5
+ |
+LL | let _ = a;
+ | ^^^^^^^^^^
+ |
+ = help: consider explicitly using expression value
+
+error: aborting due to 12 previous errors
+
diff --git a/src/tools/clippy/tests/ui/let_unit.fixed b/src/tools/clippy/tests/ui/let_unit.fixed
new file mode 100644
index 000000000..6343cff0f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/let_unit.fixed
@@ -0,0 +1,177 @@
+// run-rustfix
+
+#![feature(lint_reasons)]
+#![warn(clippy::let_unit_value)]
+#![allow(unused, clippy::no_effect, clippy::needless_late_init, path_statements)]
+
+macro_rules! let_and_return {
+ ($n:expr) => {{
+ let ret = $n;
+ }};
+}
+
+fn main() {
+ println!("x");
+ let _y = 1; // this is fine
+ let _z = ((), 1); // this as well
+ if true {
+ ();
+ }
+
+ consume_units_with_for_loop(); // should be fine as well
+
+ multiline_sugg();
+
+ let_and_return!(()) // should be fine
+}
+
+// Related to issue #1964
+fn consume_units_with_for_loop() {
+ // `for_let_unit` lint should not be triggered by consuming them using for loop.
+ let v = vec![(), (), ()];
+ let mut count = 0;
+ for _ in v {
+ count += 1;
+ }
+ assert_eq!(count, 3);
+
+ // Same for consuming from some other Iterator<Item = ()>.
+ let (tx, rx) = ::std::sync::mpsc::channel();
+ tx.send(()).unwrap();
+ drop(tx);
+
+ count = 0;
+ for _ in rx.iter() {
+ count += 1;
+ }
+ assert_eq!(count, 1);
+}
+
+fn multiline_sugg() {
+ let v: Vec<u8> = vec![2];
+
+ v
+ .into_iter()
+ .map(|i| i * 2)
+ .filter(|i| i % 2 == 0)
+ .map(|_| ())
+ .next()
+ .unwrap();
+}
+
+#[derive(Copy, Clone)]
+pub struct ContainsUnit(()); // should be fine
+
+fn _returns_generic() {
+ fn f<T>() -> T {
+ unimplemented!()
+ }
+ fn f2<T, U>(_: T) -> U {
+ unimplemented!()
+ }
+ fn f3<T>(x: T) -> T {
+ x
+ }
+ fn f5<T: Default>(x: bool) -> Option<T> {
+ x.then(|| T::default())
+ }
+
+ let _: () = f(); // Ok
+ let _: () = f(); // Lint.
+
+ let _: () = f2(0i32); // Ok
+ let _: () = f2(0i32); // Lint.
+
+ f3(()); // Lint
+ f3(()); // Lint
+
+ // Should lint:
+ // fn f4<T>(mut x: Vec<T>) -> T {
+ // x.pop().unwrap()
+ // }
+ // let _: () = f4(vec![()]);
+ // let x: () = f4(vec![()]);
+
+ // Ok
+ let _: () = {
+ let x = 5;
+ f2(x)
+ };
+
+ let _: () = if true { f() } else { f2(0) }; // Ok
+ let _: () = if true { f() } else { f2(0) }; // Lint
+
+ // Ok
+ let _: () = match Some(0) {
+ None => f2(1),
+ Some(0) => f(),
+ Some(1) => f2(3),
+ Some(_) => f2('x'),
+ };
+
+ // Lint
+ match Some(0) {
+ None => f2(1),
+ Some(0) => f(),
+ Some(1) => f2(3),
+ Some(_) => (),
+ };
+
+ let _: () = f5(true).unwrap();
+
+ #[allow(clippy::let_unit_value)]
+ {
+ let x = f();
+ let y;
+ let z;
+ match 0 {
+ 0 => {
+ y = f();
+ z = f();
+ },
+ 1 => {
+ println!("test");
+ y = f();
+ z = f3(());
+ },
+ _ => panic!(),
+ }
+
+ let x1;
+ let x2;
+ if true {
+ x1 = f();
+ x2 = x1;
+ } else {
+ x2 = f();
+ x1 = x2;
+ }
+
+ let opt;
+ match f5(true) {
+ Some(x) => opt = x,
+ None => panic!(),
+ };
+
+ #[warn(clippy::let_unit_value)]
+ {
+ let _: () = x;
+ let _: () = y;
+ z;
+ let _: () = x1;
+ let _: () = x2;
+ let _: () = opt;
+ }
+ }
+
+ let () = f();
+}
+
+fn attributes() {
+ fn f() {}
+
+ #[allow(clippy::let_unit_value)]
+ let _ = f();
+ #[expect(clippy::let_unit_value)]
+ let _ = f();
+}
diff --git a/src/tools/clippy/tests/ui/let_unit.rs b/src/tools/clippy/tests/ui/let_unit.rs
new file mode 100644
index 000000000..c9bb2849f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/let_unit.rs
@@ -0,0 +1,177 @@
+// run-rustfix
+
+#![feature(lint_reasons)]
+#![warn(clippy::let_unit_value)]
+#![allow(unused, clippy::no_effect, clippy::needless_late_init, path_statements)]
+
+macro_rules! let_and_return {
+ ($n:expr) => {{
+ let ret = $n;
+ }};
+}
+
+fn main() {
+ let _x = println!("x");
+ let _y = 1; // this is fine
+ let _z = ((), 1); // this as well
+ if true {
+ let _a = ();
+ }
+
+ consume_units_with_for_loop(); // should be fine as well
+
+ multiline_sugg();
+
+ let_and_return!(()) // should be fine
+}
+
+// Related to issue #1964
+fn consume_units_with_for_loop() {
+ // `for_let_unit` lint should not be triggered by consuming them using for loop.
+ let v = vec![(), (), ()];
+ let mut count = 0;
+ for _ in v {
+ count += 1;
+ }
+ assert_eq!(count, 3);
+
+ // Same for consuming from some other Iterator<Item = ()>.
+ let (tx, rx) = ::std::sync::mpsc::channel();
+ tx.send(()).unwrap();
+ drop(tx);
+
+ count = 0;
+ for _ in rx.iter() {
+ count += 1;
+ }
+ assert_eq!(count, 1);
+}
+
+fn multiline_sugg() {
+ let v: Vec<u8> = vec![2];
+
+ let _ = v
+ .into_iter()
+ .map(|i| i * 2)
+ .filter(|i| i % 2 == 0)
+ .map(|_| ())
+ .next()
+ .unwrap();
+}
+
+#[derive(Copy, Clone)]
+pub struct ContainsUnit(()); // should be fine
+
+fn _returns_generic() {
+ fn f<T>() -> T {
+ unimplemented!()
+ }
+ fn f2<T, U>(_: T) -> U {
+ unimplemented!()
+ }
+ fn f3<T>(x: T) -> T {
+ x
+ }
+ fn f5<T: Default>(x: bool) -> Option<T> {
+ x.then(|| T::default())
+ }
+
+ let _: () = f(); // Ok
+ let x: () = f(); // Lint.
+
+ let _: () = f2(0i32); // Ok
+ let x: () = f2(0i32); // Lint.
+
+ let _: () = f3(()); // Lint
+ let x: () = f3(()); // Lint
+
+ // Should lint:
+ // fn f4<T>(mut x: Vec<T>) -> T {
+ // x.pop().unwrap()
+ // }
+ // let _: () = f4(vec![()]);
+ // let x: () = f4(vec![()]);
+
+ // Ok
+ let _: () = {
+ let x = 5;
+ f2(x)
+ };
+
+ let _: () = if true { f() } else { f2(0) }; // Ok
+ let x: () = if true { f() } else { f2(0) }; // Lint
+
+ // Ok
+ let _: () = match Some(0) {
+ None => f2(1),
+ Some(0) => f(),
+ Some(1) => f2(3),
+ Some(_) => f2('x'),
+ };
+
+ // Lint
+ let _: () = match Some(0) {
+ None => f2(1),
+ Some(0) => f(),
+ Some(1) => f2(3),
+ Some(_) => (),
+ };
+
+ let _: () = f5(true).unwrap();
+
+ #[allow(clippy::let_unit_value)]
+ {
+ let x = f();
+ let y;
+ let z;
+ match 0 {
+ 0 => {
+ y = f();
+ z = f();
+ },
+ 1 => {
+ println!("test");
+ y = f();
+ z = f3(());
+ },
+ _ => panic!(),
+ }
+
+ let x1;
+ let x2;
+ if true {
+ x1 = f();
+ x2 = x1;
+ } else {
+ x2 = f();
+ x1 = x2;
+ }
+
+ let opt;
+ match f5(true) {
+ Some(x) => opt = x,
+ None => panic!(),
+ };
+
+ #[warn(clippy::let_unit_value)]
+ {
+ let _: () = x;
+ let _: () = y;
+ let _: () = z;
+ let _: () = x1;
+ let _: () = x2;
+ let _: () = opt;
+ }
+ }
+
+ let () = f();
+}
+
+fn attributes() {
+ fn f() {}
+
+ #[allow(clippy::let_unit_value)]
+ let _ = f();
+ #[expect(clippy::let_unit_value)]
+ let _ = f();
+}
diff --git a/src/tools/clippy/tests/ui/let_unit.stderr b/src/tools/clippy/tests/ui/let_unit.stderr
new file mode 100644
index 000000000..49da74ca7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/let_unit.stderr
@@ -0,0 +1,102 @@
+error: this let-binding has unit value
+ --> $DIR/let_unit.rs:14:5
+ |
+LL | let _x = println!("x");
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: omit the `let` binding: `println!("x");`
+ |
+ = note: `-D clippy::let-unit-value` implied by `-D warnings`
+
+error: this let-binding has unit value
+ --> $DIR/let_unit.rs:18:9
+ |
+LL | let _a = ();
+ | ^^^^^^^^^^^^ help: omit the `let` binding: `();`
+
+error: this let-binding has unit value
+ --> $DIR/let_unit.rs:53:5
+ |
+LL | / let _ = v
+LL | | .into_iter()
+LL | | .map(|i| i * 2)
+LL | | .filter(|i| i % 2 == 0)
+LL | | .map(|_| ())
+LL | | .next()
+LL | | .unwrap();
+ | |__________________^
+ |
+help: omit the `let` binding
+ |
+LL ~ v
+LL + .into_iter()
+LL + .map(|i| i * 2)
+LL + .filter(|i| i % 2 == 0)
+LL + .map(|_| ())
+LL + .next()
+LL + .unwrap();
+ |
+
+error: this let-binding has unit value
+ --> $DIR/let_unit.rs:80:5
+ |
+LL | let x: () = f(); // Lint.
+ | ^^^^-^^^^^^^^^^^
+ | |
+ | help: use a wild (`_`) binding: `_`
+
+error: this let-binding has unit value
+ --> $DIR/let_unit.rs:83:5
+ |
+LL | let x: () = f2(0i32); // Lint.
+ | ^^^^-^^^^^^^^^^^^^^^^
+ | |
+ | help: use a wild (`_`) binding: `_`
+
+error: this let-binding has unit value
+ --> $DIR/let_unit.rs:85:5
+ |
+LL | let _: () = f3(()); // Lint
+ | ^^^^^^^^^^^^^^^^^^^ help: omit the `let` binding: `f3(());`
+
+error: this let-binding has unit value
+ --> $DIR/let_unit.rs:86:5
+ |
+LL | let x: () = f3(()); // Lint
+ | ^^^^^^^^^^^^^^^^^^^ help: omit the `let` binding: `f3(());`
+
+error: this let-binding has unit value
+ --> $DIR/let_unit.rs:102:5
+ |
+LL | let x: () = if true { f() } else { f2(0) }; // Lint
+ | ^^^^-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | help: use a wild (`_`) binding: `_`
+
+error: this let-binding has unit value
+ --> $DIR/let_unit.rs:113:5
+ |
+LL | / let _: () = match Some(0) {
+LL | | None => f2(1),
+LL | | Some(0) => f(),
+LL | | Some(1) => f2(3),
+LL | | Some(_) => (),
+LL | | };
+ | |______^
+ |
+help: omit the `let` binding
+ |
+LL ~ match Some(0) {
+LL + None => f2(1),
+LL + Some(0) => f(),
+LL + Some(1) => f2(3),
+LL + Some(_) => (),
+LL + };
+ |
+
+error: this let-binding has unit value
+ --> $DIR/let_unit.rs:160:13
+ |
+LL | let _: () = z;
+ | ^^^^^^^^^^^^^^ help: omit the `let` binding: `z;`
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/linkedlist.rs b/src/tools/clippy/tests/ui/linkedlist.rs
new file mode 100644
index 000000000..690ea810a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/linkedlist.rs
@@ -0,0 +1,48 @@
+#![feature(associated_type_defaults)]
+#![warn(clippy::linkedlist)]
+#![allow(unused, dead_code, clippy::needless_pass_by_value)]
+
+extern crate alloc;
+use alloc::collections::linked_list::LinkedList;
+
+const C: LinkedList<i32> = LinkedList::new();
+static S: LinkedList<i32> = LinkedList::new();
+
+trait Foo {
+ type Baz = LinkedList<u8>;
+ fn foo(_: LinkedList<u8>);
+ const BAR: Option<LinkedList<u8>>;
+}
+
+// Ok, we don’t want to warn for implementations; see issue #605.
+impl Foo for LinkedList<u8> {
+ fn foo(_: LinkedList<u8>) {}
+ const BAR: Option<LinkedList<u8>> = None;
+}
+
+pub struct Bar {
+ priv_linked_list_field: LinkedList<u8>,
+ pub pub_linked_list_field: LinkedList<u8>,
+}
+impl Bar {
+ fn foo(_: LinkedList<u8>) {}
+}
+
+// All of these test should be trigger the lint because they are not
+// part of the public api
+fn test(my_favorite_linked_list: LinkedList<u8>) {}
+fn test_ret() -> Option<LinkedList<u8>> {
+ None
+}
+fn test_local_not_linted() {
+ let _: LinkedList<u8>;
+}
+
+// All of these test should be allowed because they are part of the
+// public api and `avoid_breaking_exported_api` is `false` by default.
+pub fn pub_test(the_most_awesome_linked_list: LinkedList<u8>) {}
+pub fn pub_test_ret() -> Option<LinkedList<u8>> {
+ None
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/linkedlist.stderr b/src/tools/clippy/tests/ui/linkedlist.stderr
new file mode 100644
index 000000000..51327df13
--- /dev/null
+++ b/src/tools/clippy/tests/ui/linkedlist.stderr
@@ -0,0 +1,75 @@
+error: you seem to be using a `LinkedList`! Perhaps you meant some other data structure?
+ --> $DIR/linkedlist.rs:8:10
+ |
+LL | const C: LinkedList<i32> = LinkedList::new();
+ | ^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::linkedlist` implied by `-D warnings`
+ = help: a `VecDeque` might work
+
+error: you seem to be using a `LinkedList`! Perhaps you meant some other data structure?
+ --> $DIR/linkedlist.rs:9:11
+ |
+LL | static S: LinkedList<i32> = LinkedList::new();
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: a `VecDeque` might work
+
+error: you seem to be using a `LinkedList`! Perhaps you meant some other data structure?
+ --> $DIR/linkedlist.rs:12:16
+ |
+LL | type Baz = LinkedList<u8>;
+ | ^^^^^^^^^^^^^^
+ |
+ = help: a `VecDeque` might work
+
+error: you seem to be using a `LinkedList`! Perhaps you meant some other data structure?
+ --> $DIR/linkedlist.rs:13:15
+ |
+LL | fn foo(_: LinkedList<u8>);
+ | ^^^^^^^^^^^^^^
+ |
+ = help: a `VecDeque` might work
+
+error: you seem to be using a `LinkedList`! Perhaps you meant some other data structure?
+ --> $DIR/linkedlist.rs:14:23
+ |
+LL | const BAR: Option<LinkedList<u8>>;
+ | ^^^^^^^^^^^^^^
+ |
+ = help: a `VecDeque` might work
+
+error: you seem to be using a `LinkedList`! Perhaps you meant some other data structure?
+ --> $DIR/linkedlist.rs:24:29
+ |
+LL | priv_linked_list_field: LinkedList<u8>,
+ | ^^^^^^^^^^^^^^
+ |
+ = help: a `VecDeque` might work
+
+error: you seem to be using a `LinkedList`! Perhaps you meant some other data structure?
+ --> $DIR/linkedlist.rs:28:15
+ |
+LL | fn foo(_: LinkedList<u8>) {}
+ | ^^^^^^^^^^^^^^
+ |
+ = help: a `VecDeque` might work
+
+error: you seem to be using a `LinkedList`! Perhaps you meant some other data structure?
+ --> $DIR/linkedlist.rs:33:34
+ |
+LL | fn test(my_favorite_linked_list: LinkedList<u8>) {}
+ | ^^^^^^^^^^^^^^
+ |
+ = help: a `VecDeque` might work
+
+error: you seem to be using a `LinkedList`! Perhaps you meant some other data structure?
+ --> $DIR/linkedlist.rs:34:25
+ |
+LL | fn test_ret() -> Option<LinkedList<u8>> {
+ | ^^^^^^^^^^^^^^
+ |
+ = help: a `VecDeque` might work
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/literals.rs b/src/tools/clippy/tests/ui/literals.rs
new file mode 100644
index 000000000..0cadd5a3d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/literals.rs
@@ -0,0 +1,42 @@
+// does not test any rustfixable lints
+
+#![warn(clippy::mixed_case_hex_literals)]
+#![warn(clippy::zero_prefixed_literal)]
+#![warn(clippy::unseparated_literal_suffix)]
+#![warn(clippy::separated_literal_suffix)]
+#![allow(dead_code, overflowing_literals)]
+
+fn main() {
+ let ok1 = 0xABCD;
+ let ok3 = 0xab_cd;
+ let ok4 = 0xab_cd_i32;
+ let ok5 = 0xAB_CD_u32;
+ let ok5 = 0xAB_CD_isize;
+ let fail1 = 0xabCD;
+ let fail2 = 0xabCD_u32;
+ let fail2 = 0xabCD_isize;
+ let fail_multi_zero = 000_123usize;
+
+ let ok9 = 0;
+ let ok10 = 0_i64;
+ let fail8 = 0123;
+
+ let ok11 = 0o123;
+ let ok12 = 0b10_1010;
+
+ let ok13 = 0xab_abcd;
+ let ok14 = 0xBAFE_BAFE;
+ let ok15 = 0xab_cabc_abca_bcab_cabc;
+ let ok16 = 0xFE_BAFE_ABAB_ABCD;
+ let ok17 = 0x123_4567_8901_usize;
+ let ok18 = 0xF;
+
+ let fail19 = 12_3456_21;
+ let fail22 = 3__4___23;
+ let fail23 = 3__16___23;
+
+ let fail24 = 0xAB_ABC_AB;
+ let fail25 = 0b01_100_101;
+ let ok26 = 0x6_A0_BF;
+ let ok27 = 0b1_0010_0101;
+}
diff --git a/src/tools/clippy/tests/ui/literals.stderr b/src/tools/clippy/tests/ui/literals.stderr
new file mode 100644
index 000000000..365b24074
--- /dev/null
+++ b/src/tools/clippy/tests/ui/literals.stderr
@@ -0,0 +1,139 @@
+error: integer type suffix should not be separated by an underscore
+ --> $DIR/literals.rs:12:15
+ |
+LL | let ok4 = 0xab_cd_i32;
+ | ^^^^^^^^^^^ help: remove the underscore: `0xab_cdi32`
+ |
+ = note: `-D clippy::separated-literal-suffix` implied by `-D warnings`
+
+error: integer type suffix should not be separated by an underscore
+ --> $DIR/literals.rs:13:15
+ |
+LL | let ok5 = 0xAB_CD_u32;
+ | ^^^^^^^^^^^ help: remove the underscore: `0xAB_CDu32`
+
+error: integer type suffix should not be separated by an underscore
+ --> $DIR/literals.rs:14:15
+ |
+LL | let ok5 = 0xAB_CD_isize;
+ | ^^^^^^^^^^^^^ help: remove the underscore: `0xAB_CDisize`
+
+error: inconsistent casing in hexadecimal literal
+ --> $DIR/literals.rs:15:17
+ |
+LL | let fail1 = 0xabCD;
+ | ^^^^^^
+ |
+ = note: `-D clippy::mixed-case-hex-literals` implied by `-D warnings`
+
+error: integer type suffix should not be separated by an underscore
+ --> $DIR/literals.rs:16:17
+ |
+LL | let fail2 = 0xabCD_u32;
+ | ^^^^^^^^^^ help: remove the underscore: `0xabCDu32`
+
+error: inconsistent casing in hexadecimal literal
+ --> $DIR/literals.rs:16:17
+ |
+LL | let fail2 = 0xabCD_u32;
+ | ^^^^^^^^^^
+
+error: integer type suffix should not be separated by an underscore
+ --> $DIR/literals.rs:17:17
+ |
+LL | let fail2 = 0xabCD_isize;
+ | ^^^^^^^^^^^^ help: remove the underscore: `0xabCDisize`
+
+error: inconsistent casing in hexadecimal literal
+ --> $DIR/literals.rs:17:17
+ |
+LL | let fail2 = 0xabCD_isize;
+ | ^^^^^^^^^^^^
+
+error: integer type suffix should be separated by an underscore
+ --> $DIR/literals.rs:18:27
+ |
+LL | let fail_multi_zero = 000_123usize;
+ | ^^^^^^^^^^^^ help: add an underscore: `000_123_usize`
+ |
+ = note: `-D clippy::unseparated-literal-suffix` implied by `-D warnings`
+
+error: this is a decimal constant
+ --> $DIR/literals.rs:18:27
+ |
+LL | let fail_multi_zero = 000_123usize;
+ | ^^^^^^^^^^^^
+ |
+ = note: `-D clippy::zero-prefixed-literal` implied by `-D warnings`
+help: if you mean to use a decimal constant, remove the `0` to avoid confusion
+ |
+LL | let fail_multi_zero = 123usize;
+ | ~~~~~~~~
+help: if you mean to use an octal constant, use `0o`
+ |
+LL | let fail_multi_zero = 0o123usize;
+ | ~~~~~~~~~~
+
+error: integer type suffix should not be separated by an underscore
+ --> $DIR/literals.rs:21:16
+ |
+LL | let ok10 = 0_i64;
+ | ^^^^^ help: remove the underscore: `0i64`
+
+error: this is a decimal constant
+ --> $DIR/literals.rs:22:17
+ |
+LL | let fail8 = 0123;
+ | ^^^^
+ |
+help: if you mean to use a decimal constant, remove the `0` to avoid confusion
+ |
+LL | let fail8 = 123;
+ | ~~~
+help: if you mean to use an octal constant, use `0o`
+ |
+LL | let fail8 = 0o123;
+ | ~~~~~
+
+error: integer type suffix should not be separated by an underscore
+ --> $DIR/literals.rs:31:16
+ |
+LL | let ok17 = 0x123_4567_8901_usize;
+ | ^^^^^^^^^^^^^^^^^^^^^ help: remove the underscore: `0x123_4567_8901usize`
+
+error: digits grouped inconsistently by underscores
+ --> $DIR/literals.rs:34:18
+ |
+LL | let fail19 = 12_3456_21;
+ | ^^^^^^^^^^ help: consider: `12_345_621`
+ |
+ = note: `-D clippy::inconsistent-digit-grouping` implied by `-D warnings`
+
+error: digits grouped inconsistently by underscores
+ --> $DIR/literals.rs:35:18
+ |
+LL | let fail22 = 3__4___23;
+ | ^^^^^^^^^ help: consider: `3_423`
+
+error: digits grouped inconsistently by underscores
+ --> $DIR/literals.rs:36:18
+ |
+LL | let fail23 = 3__16___23;
+ | ^^^^^^^^^^ help: consider: `31_623`
+
+error: digits of hex or binary literal not grouped by four
+ --> $DIR/literals.rs:38:18
+ |
+LL | let fail24 = 0xAB_ABC_AB;
+ | ^^^^^^^^^^^ help: consider: `0x0ABA_BCAB`
+ |
+ = note: `-D clippy::unusual-byte-groupings` implied by `-D warnings`
+
+error: digits of hex or binary literal not grouped by four
+ --> $DIR/literals.rs:39:18
+ |
+LL | let fail25 = 0b01_100_101;
+ | ^^^^^^^^^^^^ help: consider: `0b0110_0101`
+
+error: aborting due to 18 previous errors
+
diff --git a/src/tools/clippy/tests/ui/logic_bug.rs b/src/tools/clippy/tests/ui/logic_bug.rs
new file mode 100644
index 000000000..dd6b1db5f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/logic_bug.rs
@@ -0,0 +1,34 @@
+#![feature(lint_reasons)]
+#![allow(unused, clippy::diverging_sub_expression)]
+#![warn(clippy::logic_bug)]
+
+fn main() {
+ let a: bool = unimplemented!();
+ let b: bool = unimplemented!();
+ let c: bool = unimplemented!();
+ let d: bool = unimplemented!();
+ let e: bool = unimplemented!();
+ let _ = a && b || a;
+ let _ = !(a && b);
+ let _ = false && a;
+ // don't lint on cfgs
+ let _ = cfg!(you_shall_not_not_pass) && a;
+ let _ = a || !b || !c || !d || !e;
+ let _ = !(a && b || c);
+}
+
+fn equality_stuff() {
+ let a: i32 = unimplemented!();
+ let b: i32 = unimplemented!();
+ let _ = a == b && a != b;
+ let _ = a < b && a >= b;
+ let _ = a > b && a <= b;
+ let _ = a > b && a == b;
+}
+
+fn check_expect() {
+ let a: i32 = unimplemented!();
+ let b: i32 = unimplemented!();
+ #[expect(clippy::logic_bug)]
+ let _ = a < b && a >= b;
+}
diff --git a/src/tools/clippy/tests/ui/logic_bug.stderr b/src/tools/clippy/tests/ui/logic_bug.stderr
new file mode 100644
index 000000000..4021fbf45
--- /dev/null
+++ b/src/tools/clippy/tests/ui/logic_bug.stderr
@@ -0,0 +1,63 @@
+error: this boolean expression contains a logic bug
+ --> $DIR/logic_bug.rs:11:13
+ |
+LL | let _ = a && b || a;
+ | ^^^^^^^^^^^ help: it would look like the following: `a`
+ |
+ = note: `-D clippy::logic-bug` implied by `-D warnings`
+help: this expression can be optimized out by applying boolean operations to the outer expression
+ --> $DIR/logic_bug.rs:11:18
+ |
+LL | let _ = a && b || a;
+ | ^
+
+error: this boolean expression contains a logic bug
+ --> $DIR/logic_bug.rs:13:13
+ |
+LL | let _ = false && a;
+ | ^^^^^^^^^^ help: it would look like the following: `false`
+ |
+help: this expression can be optimized out by applying boolean operations to the outer expression
+ --> $DIR/logic_bug.rs:13:22
+ |
+LL | let _ = false && a;
+ | ^
+
+error: this boolean expression contains a logic bug
+ --> $DIR/logic_bug.rs:23:13
+ |
+LL | let _ = a == b && a != b;
+ | ^^^^^^^^^^^^^^^^ help: it would look like the following: `false`
+ |
+help: this expression can be optimized out by applying boolean operations to the outer expression
+ --> $DIR/logic_bug.rs:23:13
+ |
+LL | let _ = a == b && a != b;
+ | ^^^^^^
+
+error: this boolean expression contains a logic bug
+ --> $DIR/logic_bug.rs:24:13
+ |
+LL | let _ = a < b && a >= b;
+ | ^^^^^^^^^^^^^^^ help: it would look like the following: `false`
+ |
+help: this expression can be optimized out by applying boolean operations to the outer expression
+ --> $DIR/logic_bug.rs:24:13
+ |
+LL | let _ = a < b && a >= b;
+ | ^^^^^
+
+error: this boolean expression contains a logic bug
+ --> $DIR/logic_bug.rs:25:13
+ |
+LL | let _ = a > b && a <= b;
+ | ^^^^^^^^^^^^^^^ help: it would look like the following: `false`
+ |
+help: this expression can be optimized out by applying boolean operations to the outer expression
+ --> $DIR/logic_bug.rs:25:13
+ |
+LL | let _ = a > b && a <= b;
+ | ^^^^^
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/lossy_float_literal.fixed b/src/tools/clippy/tests/ui/lossy_float_literal.fixed
new file mode 100644
index 000000000..24e372354
--- /dev/null
+++ b/src/tools/clippy/tests/ui/lossy_float_literal.fixed
@@ -0,0 +1,35 @@
+// run-rustfix
+#![warn(clippy::lossy_float_literal)]
+
+fn main() {
+ // Lossy whole-number float literals
+ let _: f32 = 16_777_216.0;
+ let _: f32 = 16_777_220.0;
+ let _: f32 = 16_777_220.0;
+ let _: f32 = 16_777_220.0;
+ let _ = 16_777_220_f32;
+ let _: f32 = -16_777_220.0;
+ let _: f64 = 9_007_199_254_740_992.0;
+ let _: f64 = 9_007_199_254_740_992.0;
+ let _: f64 = 9_007_199_254_740_992.0;
+ let _ = 9_007_199_254_740_992_f64;
+ let _: f64 = -9_007_199_254_740_992.0;
+
+ // Lossless whole number float literals
+ let _: f32 = 16_777_216.0;
+ let _: f32 = 16_777_218.0;
+ let _: f32 = 16_777_220.0;
+ let _: f32 = -16_777_216.0;
+ let _: f32 = -16_777_220.0;
+ let _: f64 = 16_777_217.0;
+ let _: f64 = -16_777_217.0;
+ let _: f64 = 9_007_199_254_740_992.0;
+ let _: f64 = -9_007_199_254_740_992.0;
+
+ // Ignored whole number float literals
+ let _: f32 = 1e25;
+ let _: f32 = 1E25;
+ let _: f64 = 1e99;
+ let _: f64 = 1E99;
+ let _: f32 = 0.1;
+}
diff --git a/src/tools/clippy/tests/ui/lossy_float_literal.rs b/src/tools/clippy/tests/ui/lossy_float_literal.rs
new file mode 100644
index 000000000..3dcf98fa0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/lossy_float_literal.rs
@@ -0,0 +1,35 @@
+// run-rustfix
+#![warn(clippy::lossy_float_literal)]
+
+fn main() {
+ // Lossy whole-number float literals
+ let _: f32 = 16_777_217.0;
+ let _: f32 = 16_777_219.0;
+ let _: f32 = 16_777_219.;
+ let _: f32 = 16_777_219.000;
+ let _ = 16_777_219f32;
+ let _: f32 = -16_777_219.0;
+ let _: f64 = 9_007_199_254_740_993.0;
+ let _: f64 = 9_007_199_254_740_993.;
+ let _: f64 = 9_007_199_254_740_993.00;
+ let _ = 9_007_199_254_740_993f64;
+ let _: f64 = -9_007_199_254_740_993.0;
+
+ // Lossless whole number float literals
+ let _: f32 = 16_777_216.0;
+ let _: f32 = 16_777_218.0;
+ let _: f32 = 16_777_220.0;
+ let _: f32 = -16_777_216.0;
+ let _: f32 = -16_777_220.0;
+ let _: f64 = 16_777_217.0;
+ let _: f64 = -16_777_217.0;
+ let _: f64 = 9_007_199_254_740_992.0;
+ let _: f64 = -9_007_199_254_740_992.0;
+
+ // Ignored whole number float literals
+ let _: f32 = 1e25;
+ let _: f32 = 1E25;
+ let _: f64 = 1e99;
+ let _: f64 = 1E99;
+ let _: f32 = 0.1;
+}
diff --git a/src/tools/clippy/tests/ui/lossy_float_literal.stderr b/src/tools/clippy/tests/ui/lossy_float_literal.stderr
new file mode 100644
index 000000000..d2193c0c8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/lossy_float_literal.stderr
@@ -0,0 +1,70 @@
+error: literal cannot be represented as the underlying type without loss of precision
+ --> $DIR/lossy_float_literal.rs:6:18
+ |
+LL | let _: f32 = 16_777_217.0;
+ | ^^^^^^^^^^^^ help: consider changing the type or replacing it with: `16_777_216.0`
+ |
+ = note: `-D clippy::lossy-float-literal` implied by `-D warnings`
+
+error: literal cannot be represented as the underlying type without loss of precision
+ --> $DIR/lossy_float_literal.rs:7:18
+ |
+LL | let _: f32 = 16_777_219.0;
+ | ^^^^^^^^^^^^ help: consider changing the type or replacing it with: `16_777_220.0`
+
+error: literal cannot be represented as the underlying type without loss of precision
+ --> $DIR/lossy_float_literal.rs:8:18
+ |
+LL | let _: f32 = 16_777_219.;
+ | ^^^^^^^^^^^ help: consider changing the type or replacing it with: `16_777_220.0`
+
+error: literal cannot be represented as the underlying type without loss of precision
+ --> $DIR/lossy_float_literal.rs:9:18
+ |
+LL | let _: f32 = 16_777_219.000;
+ | ^^^^^^^^^^^^^^ help: consider changing the type or replacing it with: `16_777_220.0`
+
+error: literal cannot be represented as the underlying type without loss of precision
+ --> $DIR/lossy_float_literal.rs:10:13
+ |
+LL | let _ = 16_777_219f32;
+ | ^^^^^^^^^^^^^ help: consider changing the type or replacing it with: `16_777_220_f32`
+
+error: literal cannot be represented as the underlying type without loss of precision
+ --> $DIR/lossy_float_literal.rs:11:19
+ |
+LL | let _: f32 = -16_777_219.0;
+ | ^^^^^^^^^^^^ help: consider changing the type or replacing it with: `16_777_220.0`
+
+error: literal cannot be represented as the underlying type without loss of precision
+ --> $DIR/lossy_float_literal.rs:12:18
+ |
+LL | let _: f64 = 9_007_199_254_740_993.0;
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: consider changing the type or replacing it with: `9_007_199_254_740_992.0`
+
+error: literal cannot be represented as the underlying type without loss of precision
+ --> $DIR/lossy_float_literal.rs:13:18
+ |
+LL | let _: f64 = 9_007_199_254_740_993.;
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: consider changing the type or replacing it with: `9_007_199_254_740_992.0`
+
+error: literal cannot be represented as the underlying type without loss of precision
+ --> $DIR/lossy_float_literal.rs:14:18
+ |
+LL | let _: f64 = 9_007_199_254_740_993.00;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider changing the type or replacing it with: `9_007_199_254_740_992.0`
+
+error: literal cannot be represented as the underlying type without loss of precision
+ --> $DIR/lossy_float_literal.rs:15:13
+ |
+LL | let _ = 9_007_199_254_740_993f64;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider changing the type or replacing it with: `9_007_199_254_740_992_f64`
+
+error: literal cannot be represented as the underlying type without loss of precision
+ --> $DIR/lossy_float_literal.rs:16:19
+ |
+LL | let _: f64 = -9_007_199_254_740_993.0;
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: consider changing the type or replacing it with: `9_007_199_254_740_992.0`
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/macro_use_imports.fixed b/src/tools/clippy/tests/ui/macro_use_imports.fixed
new file mode 100644
index 000000000..e612480d2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/macro_use_imports.fixed
@@ -0,0 +1,48 @@
+// aux-build:macro_rules.rs
+// aux-build:macro_use_helper.rs
+// aux-build:proc_macro_derive.rs
+// run-rustfix
+// ignore-32bit
+
+#![feature(lint_reasons)]
+#![allow(unused_imports, unreachable_code, unused_variables, dead_code, unused_attributes)]
+#![allow(clippy::single_component_path_imports)]
+#![warn(clippy::macro_use_imports)]
+
+#[macro_use]
+extern crate macro_use_helper as mac;
+
+#[macro_use]
+extern crate proc_macro_derive as mini_mac;
+
+mod a {
+ use mac::{pub_macro, function_macro, ty_macro, inner_mod_macro, pub_in_private_macro};
+ use mac;
+ use mini_mac::ClippyMiniMacroTest;
+ use mini_mac;
+ use mac::{inner::foofoo, inner::try_err};
+ use mac::inner;
+ use mac::inner::nested::string_add;
+ use mac::inner::nested;
+
+ #[derive(ClippyMiniMacroTest)]
+ struct Test;
+
+ fn test() {
+ pub_macro!();
+ inner_mod_macro!();
+ pub_in_private_macro!(_var);
+ function_macro!();
+ let v: ty_macro!() = Vec::default();
+
+ inner::try_err!();
+ inner::foofoo!();
+ nested::string_add!();
+ }
+}
+
+// issue #7015, ICE due to calling `module_children` with local `DefId`
+#[macro_use]
+use a as b;
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/macro_use_imports.rs b/src/tools/clippy/tests/ui/macro_use_imports.rs
new file mode 100644
index 000000000..b34817cc3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/macro_use_imports.rs
@@ -0,0 +1,48 @@
+// aux-build:macro_rules.rs
+// aux-build:macro_use_helper.rs
+// aux-build:proc_macro_derive.rs
+// run-rustfix
+// ignore-32bit
+
+#![feature(lint_reasons)]
+#![allow(unused_imports, unreachable_code, unused_variables, dead_code, unused_attributes)]
+#![allow(clippy::single_component_path_imports)]
+#![warn(clippy::macro_use_imports)]
+
+#[macro_use]
+extern crate macro_use_helper as mac;
+
+#[macro_use]
+extern crate proc_macro_derive as mini_mac;
+
+mod a {
+ #[macro_use]
+ use mac;
+ #[macro_use]
+ use mini_mac;
+ #[macro_use]
+ use mac::inner;
+ #[macro_use]
+ use mac::inner::nested;
+
+ #[derive(ClippyMiniMacroTest)]
+ struct Test;
+
+ fn test() {
+ pub_macro!();
+ inner_mod_macro!();
+ pub_in_private_macro!(_var);
+ function_macro!();
+ let v: ty_macro!() = Vec::default();
+
+ inner::try_err!();
+ inner::foofoo!();
+ nested::string_add!();
+ }
+}
+
+// issue #7015, ICE due to calling `module_children` with local `DefId`
+#[macro_use]
+use a as b;
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/macro_use_imports.stderr b/src/tools/clippy/tests/ui/macro_use_imports.stderr
new file mode 100644
index 000000000..bf7b6edd0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/macro_use_imports.stderr
@@ -0,0 +1,28 @@
+error: `macro_use` attributes are no longer needed in the Rust 2018 edition
+ --> $DIR/macro_use_imports.rs:23:5
+ |
+LL | #[macro_use]
+ | ^^^^^^^^^^^^ help: remove the attribute and import the macro directly, try: `use mac::{inner::foofoo, inner::try_err};`
+ |
+ = note: `-D clippy::macro-use-imports` implied by `-D warnings`
+
+error: `macro_use` attributes are no longer needed in the Rust 2018 edition
+ --> $DIR/macro_use_imports.rs:21:5
+ |
+LL | #[macro_use]
+ | ^^^^^^^^^^^^ help: remove the attribute and import the macro directly, try: `use mini_mac::ClippyMiniMacroTest;`
+
+error: `macro_use` attributes are no longer needed in the Rust 2018 edition
+ --> $DIR/macro_use_imports.rs:25:5
+ |
+LL | #[macro_use]
+ | ^^^^^^^^^^^^ help: remove the attribute and import the macro directly, try: `use mac::inner::nested::string_add;`
+
+error: `macro_use` attributes are no longer needed in the Rust 2018 edition
+ --> $DIR/macro_use_imports.rs:19:5
+ |
+LL | #[macro_use]
+ | ^^^^^^^^^^^^ help: remove the attribute and import the macro directly, try: `use mac::{pub_macro, function_macro, ty_macro, inner_mod_macro, pub_in_private_macro};`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/macro_use_imports_expect.rs b/src/tools/clippy/tests/ui/macro_use_imports_expect.rs
new file mode 100644
index 000000000..8a1b05da9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/macro_use_imports_expect.rs
@@ -0,0 +1,51 @@
+// aux-build:macro_rules.rs
+// aux-build:macro_use_helper.rs
+// aux-build:proc_macro_derive.rs
+// ignore-32bit
+
+#![feature(lint_reasons)]
+#![allow(unused_imports, unreachable_code, unused_variables, dead_code, unused_attributes)]
+#![allow(clippy::single_component_path_imports)]
+#![warn(clippy::macro_use_imports)]
+
+#[macro_use]
+extern crate macro_use_helper as mac;
+
+#[macro_use]
+extern crate proc_macro_derive as mini_mac;
+
+mod a {
+ #[expect(clippy::macro_use_imports)]
+ #[macro_use]
+ use mac;
+ #[expect(clippy::macro_use_imports)]
+ #[macro_use]
+ use mini_mac;
+ #[expect(clippy::macro_use_imports)]
+ #[macro_use]
+ use mac::inner;
+ #[expect(clippy::macro_use_imports)]
+ #[macro_use]
+ use mac::inner::nested;
+
+ #[derive(ClippyMiniMacroTest)]
+ struct Test;
+
+ fn test() {
+ pub_macro!();
+ inner_mod_macro!();
+ pub_in_private_macro!(_var);
+ function_macro!();
+ let v: ty_macro!() = Vec::default();
+
+ inner::try_err!();
+ inner::foofoo!();
+ nested::string_add!();
+ }
+}
+
+// issue #7015, ICE due to calling `module_children` with local `DefId`
+#[macro_use]
+use a as b;
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/manual_assert.edition2018.fixed b/src/tools/clippy/tests/ui/manual_assert.edition2018.fixed
new file mode 100644
index 000000000..d0bc640db
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_assert.edition2018.fixed
@@ -0,0 +1,52 @@
+// revisions: edition2018 edition2021
+// [edition2018] edition:2018
+// [edition2021] edition:2021
+// run-rustfix
+
+#![warn(clippy::manual_assert)]
+#![allow(clippy::nonminimal_bool)]
+
+macro_rules! one {
+ () => {
+ 1
+ };
+}
+
+fn main() {
+ let a = vec![1, 2, 3];
+ let c = Some(2);
+ if !a.is_empty()
+ && a.len() == 3
+ && c != None
+ && !a.is_empty()
+ && a.len() == 3
+ && !a.is_empty()
+ && a.len() == 3
+ && !a.is_empty()
+ && a.len() == 3
+ {
+ panic!("qaqaq{:?}", a);
+ }
+ assert!(a.is_empty(), "qaqaq{:?}", a);
+ assert!(a.is_empty(), "qwqwq");
+ if a.len() == 3 {
+ println!("qwq");
+ println!("qwq");
+ println!("qwq");
+ }
+ if let Some(b) = c {
+ panic!("orz {}", b);
+ }
+ if a.len() == 3 {
+ panic!("qaqaq");
+ } else {
+ println!("qwq");
+ }
+ let b = vec![1, 2, 3];
+ assert!(!b.is_empty(), "panic1");
+ assert!(!(b.is_empty() && a.is_empty()), "panic2");
+ assert!(!(a.is_empty() && !b.is_empty()), "panic3");
+ assert!(!(b.is_empty() || a.is_empty()), "panic4");
+ assert!(!(a.is_empty() || !b.is_empty()), "panic5");
+ assert!(!a.is_empty(), "with expansion {}", one!());
+}
diff --git a/src/tools/clippy/tests/ui/manual_assert.edition2018.stderr b/src/tools/clippy/tests/ui/manual_assert.edition2018.stderr
new file mode 100644
index 000000000..a0f31afd6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_assert.edition2018.stderr
@@ -0,0 +1,68 @@
+error: only a `panic!` in `if`-then statement
+ --> $DIR/manual_assert.rs:30:5
+ |
+LL | / if !a.is_empty() {
+LL | | panic!("qaqaq{:?}", a);
+LL | | }
+ | |_____^ help: try: `assert!(a.is_empty(), "qaqaq{:?}", a);`
+ |
+ = note: `-D clippy::manual-assert` implied by `-D warnings`
+
+error: only a `panic!` in `if`-then statement
+ --> $DIR/manual_assert.rs:33:5
+ |
+LL | / if !a.is_empty() {
+LL | | panic!("qwqwq");
+LL | | }
+ | |_____^ help: try: `assert!(a.is_empty(), "qwqwq");`
+
+error: only a `panic!` in `if`-then statement
+ --> $DIR/manual_assert.rs:50:5
+ |
+LL | / if b.is_empty() {
+LL | | panic!("panic1");
+LL | | }
+ | |_____^ help: try: `assert!(!b.is_empty(), "panic1");`
+
+error: only a `panic!` in `if`-then statement
+ --> $DIR/manual_assert.rs:53:5
+ |
+LL | / if b.is_empty() && a.is_empty() {
+LL | | panic!("panic2");
+LL | | }
+ | |_____^ help: try: `assert!(!(b.is_empty() && a.is_empty()), "panic2");`
+
+error: only a `panic!` in `if`-then statement
+ --> $DIR/manual_assert.rs:56:5
+ |
+LL | / if a.is_empty() && !b.is_empty() {
+LL | | panic!("panic3");
+LL | | }
+ | |_____^ help: try: `assert!(!(a.is_empty() && !b.is_empty()), "panic3");`
+
+error: only a `panic!` in `if`-then statement
+ --> $DIR/manual_assert.rs:59:5
+ |
+LL | / if b.is_empty() || a.is_empty() {
+LL | | panic!("panic4");
+LL | | }
+ | |_____^ help: try: `assert!(!(b.is_empty() || a.is_empty()), "panic4");`
+
+error: only a `panic!` in `if`-then statement
+ --> $DIR/manual_assert.rs:62:5
+ |
+LL | / if a.is_empty() || !b.is_empty() {
+LL | | panic!("panic5");
+LL | | }
+ | |_____^ help: try: `assert!(!(a.is_empty() || !b.is_empty()), "panic5");`
+
+error: only a `panic!` in `if`-then statement
+ --> $DIR/manual_assert.rs:65:5
+ |
+LL | / if a.is_empty() {
+LL | | panic!("with expansion {}", one!())
+LL | | }
+ | |_____^ help: try: `assert!(!a.is_empty(), "with expansion {}", one!());`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_assert.edition2021.fixed b/src/tools/clippy/tests/ui/manual_assert.edition2021.fixed
new file mode 100644
index 000000000..d0bc640db
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_assert.edition2021.fixed
@@ -0,0 +1,52 @@
+// revisions: edition2018 edition2021
+// [edition2018] edition:2018
+// [edition2021] edition:2021
+// run-rustfix
+
+#![warn(clippy::manual_assert)]
+#![allow(clippy::nonminimal_bool)]
+
+macro_rules! one {
+ () => {
+ 1
+ };
+}
+
+fn main() {
+ let a = vec![1, 2, 3];
+ let c = Some(2);
+ if !a.is_empty()
+ && a.len() == 3
+ && c != None
+ && !a.is_empty()
+ && a.len() == 3
+ && !a.is_empty()
+ && a.len() == 3
+ && !a.is_empty()
+ && a.len() == 3
+ {
+ panic!("qaqaq{:?}", a);
+ }
+ assert!(a.is_empty(), "qaqaq{:?}", a);
+ assert!(a.is_empty(), "qwqwq");
+ if a.len() == 3 {
+ println!("qwq");
+ println!("qwq");
+ println!("qwq");
+ }
+ if let Some(b) = c {
+ panic!("orz {}", b);
+ }
+ if a.len() == 3 {
+ panic!("qaqaq");
+ } else {
+ println!("qwq");
+ }
+ let b = vec![1, 2, 3];
+ assert!(!b.is_empty(), "panic1");
+ assert!(!(b.is_empty() && a.is_empty()), "panic2");
+ assert!(!(a.is_empty() && !b.is_empty()), "panic3");
+ assert!(!(b.is_empty() || a.is_empty()), "panic4");
+ assert!(!(a.is_empty() || !b.is_empty()), "panic5");
+ assert!(!a.is_empty(), "with expansion {}", one!());
+}
diff --git a/src/tools/clippy/tests/ui/manual_assert.edition2021.stderr b/src/tools/clippy/tests/ui/manual_assert.edition2021.stderr
new file mode 100644
index 000000000..a0f31afd6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_assert.edition2021.stderr
@@ -0,0 +1,68 @@
+error: only a `panic!` in `if`-then statement
+ --> $DIR/manual_assert.rs:30:5
+ |
+LL | / if !a.is_empty() {
+LL | | panic!("qaqaq{:?}", a);
+LL | | }
+ | |_____^ help: try: `assert!(a.is_empty(), "qaqaq{:?}", a);`
+ |
+ = note: `-D clippy::manual-assert` implied by `-D warnings`
+
+error: only a `panic!` in `if`-then statement
+ --> $DIR/manual_assert.rs:33:5
+ |
+LL | / if !a.is_empty() {
+LL | | panic!("qwqwq");
+LL | | }
+ | |_____^ help: try: `assert!(a.is_empty(), "qwqwq");`
+
+error: only a `panic!` in `if`-then statement
+ --> $DIR/manual_assert.rs:50:5
+ |
+LL | / if b.is_empty() {
+LL | | panic!("panic1");
+LL | | }
+ | |_____^ help: try: `assert!(!b.is_empty(), "panic1");`
+
+error: only a `panic!` in `if`-then statement
+ --> $DIR/manual_assert.rs:53:5
+ |
+LL | / if b.is_empty() && a.is_empty() {
+LL | | panic!("panic2");
+LL | | }
+ | |_____^ help: try: `assert!(!(b.is_empty() && a.is_empty()), "panic2");`
+
+error: only a `panic!` in `if`-then statement
+ --> $DIR/manual_assert.rs:56:5
+ |
+LL | / if a.is_empty() && !b.is_empty() {
+LL | | panic!("panic3");
+LL | | }
+ | |_____^ help: try: `assert!(!(a.is_empty() && !b.is_empty()), "panic3");`
+
+error: only a `panic!` in `if`-then statement
+ --> $DIR/manual_assert.rs:59:5
+ |
+LL | / if b.is_empty() || a.is_empty() {
+LL | | panic!("panic4");
+LL | | }
+ | |_____^ help: try: `assert!(!(b.is_empty() || a.is_empty()), "panic4");`
+
+error: only a `panic!` in `if`-then statement
+ --> $DIR/manual_assert.rs:62:5
+ |
+LL | / if a.is_empty() || !b.is_empty() {
+LL | | panic!("panic5");
+LL | | }
+ | |_____^ help: try: `assert!(!(a.is_empty() || !b.is_empty()), "panic5");`
+
+error: only a `panic!` in `if`-then statement
+ --> $DIR/manual_assert.rs:65:5
+ |
+LL | / if a.is_empty() {
+LL | | panic!("with expansion {}", one!())
+LL | | }
+ | |_____^ help: try: `assert!(!a.is_empty(), "with expansion {}", one!());`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_assert.fixed b/src/tools/clippy/tests/ui/manual_assert.fixed
new file mode 100644
index 000000000..6c2a25c37
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_assert.fixed
@@ -0,0 +1,45 @@
+// revisions: edition2018 edition2021
+// [edition2018] edition:2018
+// [edition2021] edition:2021
+// run-rustfix
+
+#![warn(clippy::manual_assert)]
+#![allow(clippy::nonminimal_bool)]
+
+fn main() {
+ let a = vec![1, 2, 3];
+ let c = Some(2);
+ if !a.is_empty()
+ && a.len() == 3
+ && c != None
+ && !a.is_empty()
+ && a.len() == 3
+ && !a.is_empty()
+ && a.len() == 3
+ && !a.is_empty()
+ && a.len() == 3
+ {
+ panic!("qaqaq{:?}", a);
+ }
+ assert!(a.is_empty(), "qaqaq{:?}", a);
+ assert!(a.is_empty(), "qwqwq");
+ if a.len() == 3 {
+ println!("qwq");
+ println!("qwq");
+ println!("qwq");
+ }
+ if let Some(b) = c {
+ panic!("orz {}", b);
+ }
+ if a.len() == 3 {
+ panic!("qaqaq");
+ } else {
+ println!("qwq");
+ }
+ let b = vec![1, 2, 3];
+ assert!(!b.is_empty(), "panic1");
+ assert!(!(b.is_empty() && a.is_empty()), "panic2");
+ assert!(!(a.is_empty() && !b.is_empty()), "panic3");
+ assert!(!(b.is_empty() || a.is_empty()), "panic4");
+ assert!(!(a.is_empty() || !b.is_empty()), "panic5");
+}
diff --git a/src/tools/clippy/tests/ui/manual_assert.rs b/src/tools/clippy/tests/ui/manual_assert.rs
new file mode 100644
index 000000000..027747d83
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_assert.rs
@@ -0,0 +1,68 @@
+// revisions: edition2018 edition2021
+// [edition2018] edition:2018
+// [edition2021] edition:2021
+// run-rustfix
+
+#![warn(clippy::manual_assert)]
+#![allow(clippy::nonminimal_bool)]
+
+macro_rules! one {
+ () => {
+ 1
+ };
+}
+
+fn main() {
+ let a = vec![1, 2, 3];
+ let c = Some(2);
+ if !a.is_empty()
+ && a.len() == 3
+ && c != None
+ && !a.is_empty()
+ && a.len() == 3
+ && !a.is_empty()
+ && a.len() == 3
+ && !a.is_empty()
+ && a.len() == 3
+ {
+ panic!("qaqaq{:?}", a);
+ }
+ if !a.is_empty() {
+ panic!("qaqaq{:?}", a);
+ }
+ if !a.is_empty() {
+ panic!("qwqwq");
+ }
+ if a.len() == 3 {
+ println!("qwq");
+ println!("qwq");
+ println!("qwq");
+ }
+ if let Some(b) = c {
+ panic!("orz {}", b);
+ }
+ if a.len() == 3 {
+ panic!("qaqaq");
+ } else {
+ println!("qwq");
+ }
+ let b = vec![1, 2, 3];
+ if b.is_empty() {
+ panic!("panic1");
+ }
+ if b.is_empty() && a.is_empty() {
+ panic!("panic2");
+ }
+ if a.is_empty() && !b.is_empty() {
+ panic!("panic3");
+ }
+ if b.is_empty() || a.is_empty() {
+ panic!("panic4");
+ }
+ if a.is_empty() || !b.is_empty() {
+ panic!("panic5");
+ }
+ if a.is_empty() {
+ panic!("with expansion {}", one!())
+ }
+}
diff --git a/src/tools/clippy/tests/ui/manual_async_fn.fixed b/src/tools/clippy/tests/ui/manual_async_fn.fixed
new file mode 100644
index 000000000..b7e46a4a8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_async_fn.fixed
@@ -0,0 +1,110 @@
+// run-rustfix
+#![warn(clippy::manual_async_fn)]
+#![allow(unused)]
+
+use std::future::Future;
+
+async fn fut() -> i32 { 42 }
+
+#[rustfmt::skip]
+async fn fut2() -> i32 { 42 }
+
+#[rustfmt::skip]
+async fn fut3() -> i32 { 42 }
+
+async fn empty_fut() {}
+
+#[rustfmt::skip]
+async fn empty_fut2() {}
+
+#[rustfmt::skip]
+async fn empty_fut3() {}
+
+async fn core_fut() -> i32 { 42 }
+
+// should be ignored
+fn has_other_stmts() -> impl core::future::Future<Output = i32> {
+ let _ = 42;
+ async move { 42 }
+}
+
+// should be ignored
+fn not_fut() -> i32 {
+ 42
+}
+
+// should be ignored
+async fn already_async() -> impl Future<Output = i32> {
+ async { 42 }
+}
+
+struct S;
+impl S {
+ async fn inh_fut() -> i32 {
+ // NOTE: this code is here just to check that the indentation is correct in the suggested fix
+ let a = 42;
+ let b = 21;
+ if a < b {
+ let c = 21;
+ let d = 42;
+ if c < d {
+ let _ = 42;
+ }
+ }
+ 42
+ }
+
+ // should be ignored
+ fn not_fut(&self) -> i32 {
+ 42
+ }
+
+ // should be ignored
+ fn has_other_stmts() -> impl core::future::Future<Output = i32> {
+ let _ = 42;
+ async move { 42 }
+ }
+
+ // should be ignored
+ async fn already_async(&self) -> impl Future<Output = i32> {
+ async { 42 }
+ }
+}
+
+// Tests related to lifetime capture
+
+async fn elided(_: &i32) -> i32 { 42 }
+
+// should be ignored
+fn elided_not_bound(_: &i32) -> impl Future<Output = i32> {
+ async { 42 }
+}
+
+async fn explicit<'a, 'b>(_: &'a i32, _: &'b i32) -> i32 { 42 }
+
+// should be ignored
+#[allow(clippy::needless_lifetimes)]
+fn explicit_not_bound<'a, 'b>(_: &'a i32, _: &'b i32) -> impl Future<Output = i32> {
+ async { 42 }
+}
+
+// should be ignored
+mod issue_5765 {
+ use std::future::Future;
+
+ struct A;
+ impl A {
+ fn f(&self) -> impl Future<Output = ()> {
+ async {}
+ }
+ }
+
+ fn test() {
+ let _future = {
+ let a = A;
+ a.f()
+ };
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/manual_async_fn.rs b/src/tools/clippy/tests/ui/manual_async_fn.rs
new file mode 100644
index 000000000..b05429da6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_async_fn.rs
@@ -0,0 +1,130 @@
+// run-rustfix
+#![warn(clippy::manual_async_fn)]
+#![allow(unused)]
+
+use std::future::Future;
+
+fn fut() -> impl Future<Output = i32> {
+ async { 42 }
+}
+
+#[rustfmt::skip]
+fn fut2() ->impl Future<Output = i32> {
+ async { 42 }
+}
+
+#[rustfmt::skip]
+fn fut3()-> impl Future<Output = i32> {
+ async { 42 }
+}
+
+fn empty_fut() -> impl Future<Output = ()> {
+ async {}
+}
+
+#[rustfmt::skip]
+fn empty_fut2() ->impl Future<Output = ()> {
+ async {}
+}
+
+#[rustfmt::skip]
+fn empty_fut3()-> impl Future<Output = ()> {
+ async {}
+}
+
+fn core_fut() -> impl core::future::Future<Output = i32> {
+ async move { 42 }
+}
+
+// should be ignored
+fn has_other_stmts() -> impl core::future::Future<Output = i32> {
+ let _ = 42;
+ async move { 42 }
+}
+
+// should be ignored
+fn not_fut() -> i32 {
+ 42
+}
+
+// should be ignored
+async fn already_async() -> impl Future<Output = i32> {
+ async { 42 }
+}
+
+struct S;
+impl S {
+ fn inh_fut() -> impl Future<Output = i32> {
+ async {
+ // NOTE: this code is here just to check that the indentation is correct in the suggested fix
+ let a = 42;
+ let b = 21;
+ if a < b {
+ let c = 21;
+ let d = 42;
+ if c < d {
+ let _ = 42;
+ }
+ }
+ 42
+ }
+ }
+
+ // should be ignored
+ fn not_fut(&self) -> i32 {
+ 42
+ }
+
+ // should be ignored
+ fn has_other_stmts() -> impl core::future::Future<Output = i32> {
+ let _ = 42;
+ async move { 42 }
+ }
+
+ // should be ignored
+ async fn already_async(&self) -> impl Future<Output = i32> {
+ async { 42 }
+ }
+}
+
+// Tests related to lifetime capture
+
+fn elided(_: &i32) -> impl Future<Output = i32> + '_ {
+ async { 42 }
+}
+
+// should be ignored
+fn elided_not_bound(_: &i32) -> impl Future<Output = i32> {
+ async { 42 }
+}
+
+fn explicit<'a, 'b>(_: &'a i32, _: &'b i32) -> impl Future<Output = i32> + 'a + 'b {
+ async { 42 }
+}
+
+// should be ignored
+#[allow(clippy::needless_lifetimes)]
+fn explicit_not_bound<'a, 'b>(_: &'a i32, _: &'b i32) -> impl Future<Output = i32> {
+ async { 42 }
+}
+
+// should be ignored
+mod issue_5765 {
+ use std::future::Future;
+
+ struct A;
+ impl A {
+ fn f(&self) -> impl Future<Output = ()> {
+ async {}
+ }
+ }
+
+ fn test() {
+ let _future = {
+ let a = A;
+ a.f()
+ };
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/manual_async_fn.stderr b/src/tools/clippy/tests/ui/manual_async_fn.stderr
new file mode 100644
index 000000000..0a903ed6f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_async_fn.stderr
@@ -0,0 +1,165 @@
+error: this function can be simplified using the `async fn` syntax
+ --> $DIR/manual_async_fn.rs:7:1
+ |
+LL | fn fut() -> impl Future<Output = i32> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::manual-async-fn` implied by `-D warnings`
+help: make the function `async` and return the output of the future directly
+ |
+LL | async fn fut() -> i32 {
+ | ~~~~~~~~~~~~~~~~~~~~~
+help: move the body of the async block to the enclosing function
+ |
+LL | fn fut() -> impl Future<Output = i32> { 42 }
+ | ~~~~~~
+
+error: this function can be simplified using the `async fn` syntax
+ --> $DIR/manual_async_fn.rs:12:1
+ |
+LL | fn fut2() ->impl Future<Output = i32> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: make the function `async` and return the output of the future directly
+ |
+LL | async fn fut2() -> i32 {
+ | ~~~~~~~~~~~~~~~~~~~~~~
+help: move the body of the async block to the enclosing function
+ |
+LL | fn fut2() ->impl Future<Output = i32> { 42 }
+ | ~~~~~~
+
+error: this function can be simplified using the `async fn` syntax
+ --> $DIR/manual_async_fn.rs:17:1
+ |
+LL | fn fut3()-> impl Future<Output = i32> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: make the function `async` and return the output of the future directly
+ |
+LL | async fn fut3() -> i32 {
+ | ~~~~~~~~~~~~~~~~~~~~~~
+help: move the body of the async block to the enclosing function
+ |
+LL | fn fut3()-> impl Future<Output = i32> { 42 }
+ | ~~~~~~
+
+error: this function can be simplified using the `async fn` syntax
+ --> $DIR/manual_async_fn.rs:21:1
+ |
+LL | fn empty_fut() -> impl Future<Output = ()> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: make the function `async` and remove the return type
+ |
+LL | async fn empty_fut() {
+ | ~~~~~~~~~~~~~~~~~~~~
+help: move the body of the async block to the enclosing function
+ |
+LL | fn empty_fut() -> impl Future<Output = ()> {}
+ | ~~
+
+error: this function can be simplified using the `async fn` syntax
+ --> $DIR/manual_async_fn.rs:26:1
+ |
+LL | fn empty_fut2() ->impl Future<Output = ()> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: make the function `async` and remove the return type
+ |
+LL | async fn empty_fut2() {
+ | ~~~~~~~~~~~~~~~~~~~~~
+help: move the body of the async block to the enclosing function
+ |
+LL | fn empty_fut2() ->impl Future<Output = ()> {}
+ | ~~
+
+error: this function can be simplified using the `async fn` syntax
+ --> $DIR/manual_async_fn.rs:31:1
+ |
+LL | fn empty_fut3()-> impl Future<Output = ()> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: make the function `async` and remove the return type
+ |
+LL | async fn empty_fut3() {
+ | ~~~~~~~~~~~~~~~~~~~~~
+help: move the body of the async block to the enclosing function
+ |
+LL | fn empty_fut3()-> impl Future<Output = ()> {}
+ | ~~
+
+error: this function can be simplified using the `async fn` syntax
+ --> $DIR/manual_async_fn.rs:35:1
+ |
+LL | fn core_fut() -> impl core::future::Future<Output = i32> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: make the function `async` and return the output of the future directly
+ |
+LL | async fn core_fut() -> i32 {
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~
+help: move the body of the async block to the enclosing function
+ |
+LL | fn core_fut() -> impl core::future::Future<Output = i32> { 42 }
+ | ~~~~~~
+
+error: this function can be simplified using the `async fn` syntax
+ --> $DIR/manual_async_fn.rs:57:5
+ |
+LL | fn inh_fut() -> impl Future<Output = i32> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: make the function `async` and return the output of the future directly
+ |
+LL | async fn inh_fut() -> i32 {
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~
+help: move the body of the async block to the enclosing function
+ |
+LL ~ fn inh_fut() -> impl Future<Output = i32> {
+LL + // NOTE: this code is here just to check that the indentation is correct in the suggested fix
+LL + let a = 42;
+LL + let b = 21;
+LL + if a < b {
+LL + let c = 21;
+LL + let d = 42;
+LL + if c < d {
+LL + let _ = 42;
+LL + }
+LL + }
+LL + 42
+LL + }
+ |
+
+error: this function can be simplified using the `async fn` syntax
+ --> $DIR/manual_async_fn.rs:92:1
+ |
+LL | fn elided(_: &i32) -> impl Future<Output = i32> + '_ {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: make the function `async` and return the output of the future directly
+ |
+LL | async fn elided(_: &i32) -> i32 {
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+help: move the body of the async block to the enclosing function
+ |
+LL | fn elided(_: &i32) -> impl Future<Output = i32> + '_ { 42 }
+ | ~~~~~~
+
+error: this function can be simplified using the `async fn` syntax
+ --> $DIR/manual_async_fn.rs:101:1
+ |
+LL | fn explicit<'a, 'b>(_: &'a i32, _: &'b i32) -> impl Future<Output = i32> + 'a + 'b {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: make the function `async` and return the output of the future directly
+ |
+LL | async fn explicit<'a, 'b>(_: &'a i32, _: &'b i32) -> i32 {
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+help: move the body of the async block to the enclosing function
+ |
+LL | fn explicit<'a, 'b>(_: &'a i32, _: &'b i32) -> impl Future<Output = i32> + 'a + 'b { 42 }
+ | ~~~~~~
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_bits.fixed b/src/tools/clippy/tests/ui/manual_bits.fixed
new file mode 100644
index 000000000..386360dbd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_bits.fixed
@@ -0,0 +1,59 @@
+// run-rustfix
+
+#![warn(clippy::manual_bits)]
+#![allow(
+ clippy::no_effect,
+ clippy::useless_conversion,
+ path_statements,
+ unused_must_use,
+ clippy::unnecessary_operation
+)]
+
+use std::mem::{size_of, size_of_val};
+
+fn main() {
+ i8::BITS as usize;
+ i16::BITS as usize;
+ i32::BITS as usize;
+ i64::BITS as usize;
+ i128::BITS as usize;
+ isize::BITS as usize;
+
+ u8::BITS as usize;
+ u16::BITS as usize;
+ u32::BITS as usize;
+ u64::BITS as usize;
+ u128::BITS as usize;
+ usize::BITS as usize;
+
+ i8::BITS as usize;
+ i16::BITS as usize;
+ i32::BITS as usize;
+ i64::BITS as usize;
+ i128::BITS as usize;
+ isize::BITS as usize;
+
+ u8::BITS as usize;
+ u16::BITS as usize;
+ u32::BITS as usize;
+ u64::BITS as usize;
+ u128::BITS as usize;
+ usize::BITS as usize;
+
+ size_of::<usize>() * 4;
+ 4 * size_of::<usize>();
+ size_of::<bool>() * 8;
+ 8 * size_of::<bool>();
+
+ size_of_val(&0u32) * 8;
+
+ type Word = u32;
+ Word::BITS as usize;
+ type Bool = bool;
+ size_of::<Bool>() * 8;
+
+ let _: u32 = u128::BITS as u32;
+ let _: u32 = u128::BITS.try_into().unwrap();
+ let _ = (u128::BITS as usize).pow(5);
+ let _ = &(u128::BITS as usize);
+}
diff --git a/src/tools/clippy/tests/ui/manual_bits.rs b/src/tools/clippy/tests/ui/manual_bits.rs
new file mode 100644
index 000000000..62638f047
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_bits.rs
@@ -0,0 +1,59 @@
+// run-rustfix
+
+#![warn(clippy::manual_bits)]
+#![allow(
+ clippy::no_effect,
+ clippy::useless_conversion,
+ path_statements,
+ unused_must_use,
+ clippy::unnecessary_operation
+)]
+
+use std::mem::{size_of, size_of_val};
+
+fn main() {
+ size_of::<i8>() * 8;
+ size_of::<i16>() * 8;
+ size_of::<i32>() * 8;
+ size_of::<i64>() * 8;
+ size_of::<i128>() * 8;
+ size_of::<isize>() * 8;
+
+ size_of::<u8>() * 8;
+ size_of::<u16>() * 8;
+ size_of::<u32>() * 8;
+ size_of::<u64>() * 8;
+ size_of::<u128>() * 8;
+ size_of::<usize>() * 8;
+
+ 8 * size_of::<i8>();
+ 8 * size_of::<i16>();
+ 8 * size_of::<i32>();
+ 8 * size_of::<i64>();
+ 8 * size_of::<i128>();
+ 8 * size_of::<isize>();
+
+ 8 * size_of::<u8>();
+ 8 * size_of::<u16>();
+ 8 * size_of::<u32>();
+ 8 * size_of::<u64>();
+ 8 * size_of::<u128>();
+ 8 * size_of::<usize>();
+
+ size_of::<usize>() * 4;
+ 4 * size_of::<usize>();
+ size_of::<bool>() * 8;
+ 8 * size_of::<bool>();
+
+ size_of_val(&0u32) * 8;
+
+ type Word = u32;
+ size_of::<Word>() * 8;
+ type Bool = bool;
+ size_of::<Bool>() * 8;
+
+ let _: u32 = (size_of::<u128>() * 8) as u32;
+ let _: u32 = (size_of::<u128>() * 8).try_into().unwrap();
+ let _ = (size_of::<u128>() * 8).pow(5);
+ let _ = &(size_of::<u128>() * 8);
+}
diff --git a/src/tools/clippy/tests/ui/manual_bits.stderr b/src/tools/clippy/tests/ui/manual_bits.stderr
new file mode 100644
index 000000000..69c591a20
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_bits.stderr
@@ -0,0 +1,178 @@
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:15:5
+ |
+LL | size_of::<i8>() * 8;
+ | ^^^^^^^^^^^^^^^^^^^ help: consider using: `i8::BITS as usize`
+ |
+ = note: `-D clippy::manual-bits` implied by `-D warnings`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:16:5
+ |
+LL | size_of::<i16>() * 8;
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using: `i16::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:17:5
+ |
+LL | size_of::<i32>() * 8;
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using: `i32::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:18:5
+ |
+LL | size_of::<i64>() * 8;
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using: `i64::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:19:5
+ |
+LL | size_of::<i128>() * 8;
+ | ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `i128::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:20:5
+ |
+LL | size_of::<isize>() * 8;
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `isize::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:22:5
+ |
+LL | size_of::<u8>() * 8;
+ | ^^^^^^^^^^^^^^^^^^^ help: consider using: `u8::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:23:5
+ |
+LL | size_of::<u16>() * 8;
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using: `u16::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:24:5
+ |
+LL | size_of::<u32>() * 8;
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using: `u32::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:25:5
+ |
+LL | size_of::<u64>() * 8;
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using: `u64::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:26:5
+ |
+LL | size_of::<u128>() * 8;
+ | ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `u128::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:27:5
+ |
+LL | size_of::<usize>() * 8;
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `usize::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:29:5
+ |
+LL | 8 * size_of::<i8>();
+ | ^^^^^^^^^^^^^^^^^^^ help: consider using: `i8::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:30:5
+ |
+LL | 8 * size_of::<i16>();
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using: `i16::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:31:5
+ |
+LL | 8 * size_of::<i32>();
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using: `i32::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:32:5
+ |
+LL | 8 * size_of::<i64>();
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using: `i64::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:33:5
+ |
+LL | 8 * size_of::<i128>();
+ | ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `i128::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:34:5
+ |
+LL | 8 * size_of::<isize>();
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `isize::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:36:5
+ |
+LL | 8 * size_of::<u8>();
+ | ^^^^^^^^^^^^^^^^^^^ help: consider using: `u8::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:37:5
+ |
+LL | 8 * size_of::<u16>();
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using: `u16::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:38:5
+ |
+LL | 8 * size_of::<u32>();
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using: `u32::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:39:5
+ |
+LL | 8 * size_of::<u64>();
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using: `u64::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:40:5
+ |
+LL | 8 * size_of::<u128>();
+ | ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `u128::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:41:5
+ |
+LL | 8 * size_of::<usize>();
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `usize::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:51:5
+ |
+LL | size_of::<Word>() * 8;
+ | ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `Word::BITS as usize`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:55:18
+ |
+LL | let _: u32 = (size_of::<u128>() * 8) as u32;
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `u128::BITS`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:56:18
+ |
+LL | let _: u32 = (size_of::<u128>() * 8).try_into().unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `u128::BITS`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:57:13
+ |
+LL | let _ = (size_of::<u128>() * 8).pow(5);
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `(u128::BITS as usize)`
+
+error: usage of `mem::size_of::<T>()` to obtain the size of `T` in bits
+ --> $DIR/manual_bits.rs:58:14
+ |
+LL | let _ = &(size_of::<u128>() * 8);
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `(u128::BITS as usize)`
+
+error: aborting due to 29 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_filter_map.fixed b/src/tools/clippy/tests/ui/manual_filter_map.fixed
new file mode 100644
index 000000000..4936dc9b2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_filter_map.fixed
@@ -0,0 +1,121 @@
+// run-rustfix
+#![allow(dead_code)]
+#![warn(clippy::manual_filter_map)]
+#![allow(clippy::redundant_closure)] // FIXME suggestion may have redundant closure
+
+fn main() {
+ // is_some(), unwrap()
+ let _ = (0..).filter_map(|a| to_opt(a));
+
+ // ref pattern, expect()
+ let _ = (0..).filter_map(|a| to_opt(a));
+
+ // is_ok(), unwrap_or()
+ let _ = (0..).filter_map(|a| to_res(a).ok());
+
+ let _ = (1..5)
+ .filter_map(|y| *to_ref(to_opt(y)));
+ let _ = (1..5)
+ .filter_map(|y| *to_ref(to_opt(y)));
+
+ let _ = (1..5)
+ .filter_map(|y| to_ref(to_res(y)).ok());
+ let _ = (1..5)
+ .filter_map(|y| to_ref(to_res(y)).ok());
+}
+
+#[rustfmt::skip]
+fn simple_equal() {
+ iter::<Option<&u8>>().find_map(|x| x.cloned());
+ iter::<&Option<&u8>>().find_map(|x| x.cloned());
+ iter::<&Option<String>>().find_map(|x| x.as_deref());
+ iter::<Option<&String>>().find_map(|y| to_ref(y).cloned());
+
+ iter::<Result<u8, ()>>().find_map(|x| x.ok());
+ iter::<&Result<u8, ()>>().find_map(|x| x.ok());
+ iter::<&&Result<u8, ()>>().find_map(|x| x.ok());
+ iter::<Result<&u8, ()>>().find_map(|x| x.cloned().ok());
+ iter::<&Result<&u8, ()>>().find_map(|x| x.cloned().ok());
+ iter::<&Result<String, ()>>().find_map(|x| x.as_deref().ok());
+ iter::<Result<&String, ()>>().find_map(|y| to_ref(y).cloned().ok());
+}
+
+fn no_lint() {
+ // no shared code
+ let _ = (0..).filter(|n| *n > 1).map(|n| n + 1);
+
+ // very close but different since filter() provides a reference
+ let _ = (0..).filter(|n| to_opt(n).is_some()).map(|a| to_opt(a).unwrap());
+
+ // similar but different
+ let _ = (0..).filter(|n| to_opt(n).is_some()).map(|n| to_res(n).unwrap());
+ let _ = (0..)
+ .filter(|n| to_opt(n).map(|n| n + 1).is_some())
+ .map(|a| to_opt(a).unwrap());
+}
+
+fn iter<T>() -> impl Iterator<Item = T> {
+ std::iter::empty()
+}
+
+fn to_opt<T>(_: T) -> Option<T> {
+ unimplemented!()
+}
+
+fn to_res<T>(_: T) -> Result<T, ()> {
+ unimplemented!()
+}
+
+fn to_ref<'a, T>(_: T) -> &'a T {
+ unimplemented!()
+}
+
+struct Issue8920<'a> {
+ option_field: Option<String>,
+ result_field: Result<String, ()>,
+ ref_field: Option<&'a usize>,
+}
+
+fn issue_8920() {
+ let mut vec = vec![Issue8920 {
+ option_field: Some(String::from("str")),
+ result_field: Ok(String::from("str")),
+ ref_field: Some(&1),
+ }];
+
+ let _ = vec
+ .iter()
+ .filter_map(|f| f.option_field.clone());
+
+ let _ = vec
+ .iter()
+ .filter_map(|f| f.ref_field.cloned());
+
+ let _ = vec
+ .iter()
+ .filter_map(|f| f.ref_field.copied());
+
+ let _ = vec
+ .iter()
+ .filter_map(|f| f.result_field.clone().ok());
+
+ let _ = vec
+ .iter()
+ .filter_map(|f| f.result_field.as_ref().ok());
+
+ let _ = vec
+ .iter()
+ .filter_map(|f| f.result_field.as_deref().ok());
+
+ let _ = vec
+ .iter_mut()
+ .filter_map(|f| f.result_field.as_mut().ok());
+
+ let _ = vec
+ .iter_mut()
+ .filter_map(|f| f.result_field.as_deref_mut().ok());
+
+ let _ = vec
+ .iter()
+ .filter_map(|f| f.result_field.to_owned().ok());
+}
diff --git a/src/tools/clippy/tests/ui/manual_filter_map.rs b/src/tools/clippy/tests/ui/manual_filter_map.rs
new file mode 100644
index 000000000..8c67e827b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_filter_map.rs
@@ -0,0 +1,134 @@
+// run-rustfix
+#![allow(dead_code)]
+#![warn(clippy::manual_filter_map)]
+#![allow(clippy::redundant_closure)] // FIXME suggestion may have redundant closure
+
+fn main() {
+ // is_some(), unwrap()
+ let _ = (0..).filter(|n| to_opt(*n).is_some()).map(|a| to_opt(a).unwrap());
+
+ // ref pattern, expect()
+ let _ = (0..).filter(|&n| to_opt(n).is_some()).map(|a| to_opt(a).expect("hi"));
+
+ // is_ok(), unwrap_or()
+ let _ = (0..).filter(|&n| to_res(n).is_ok()).map(|a| to_res(a).unwrap_or(1));
+
+ let _ = (1..5)
+ .filter(|&x| to_ref(to_opt(x)).is_some())
+ .map(|y| to_ref(to_opt(y)).unwrap());
+ let _ = (1..5)
+ .filter(|x| to_ref(to_opt(*x)).is_some())
+ .map(|y| to_ref(to_opt(y)).unwrap());
+
+ let _ = (1..5)
+ .filter(|&x| to_ref(to_res(x)).is_ok())
+ .map(|y| to_ref(to_res(y)).unwrap());
+ let _ = (1..5)
+ .filter(|x| to_ref(to_res(*x)).is_ok())
+ .map(|y| to_ref(to_res(y)).unwrap());
+}
+
+#[rustfmt::skip]
+fn simple_equal() {
+ iter::<Option<&u8>>().find(|x| x.is_some()).map(|x| x.cloned().unwrap());
+ iter::<&Option<&u8>>().find(|x| x.is_some()).map(|x| x.cloned().unwrap());
+ iter::<&Option<String>>().find(|x| x.is_some()).map(|x| x.as_deref().unwrap());
+ iter::<Option<&String>>().find(|&x| to_ref(x).is_some()).map(|y| to_ref(y).cloned().unwrap());
+
+ iter::<Result<u8, ()>>().find(|x| x.is_ok()).map(|x| x.unwrap());
+ iter::<&Result<u8, ()>>().find(|x| x.is_ok()).map(|x| x.unwrap());
+ iter::<&&Result<u8, ()>>().find(|x| x.is_ok()).map(|x| x.unwrap());
+ iter::<Result<&u8, ()>>().find(|x| x.is_ok()).map(|x| x.cloned().unwrap());
+ iter::<&Result<&u8, ()>>().find(|x| x.is_ok()).map(|x| x.cloned().unwrap());
+ iter::<&Result<String, ()>>().find(|x| x.is_ok()).map(|x| x.as_deref().unwrap());
+ iter::<Result<&String, ()>>().find(|&x| to_ref(x).is_ok()).map(|y| to_ref(y).cloned().unwrap());
+}
+
+fn no_lint() {
+ // no shared code
+ let _ = (0..).filter(|n| *n > 1).map(|n| n + 1);
+
+ // very close but different since filter() provides a reference
+ let _ = (0..).filter(|n| to_opt(n).is_some()).map(|a| to_opt(a).unwrap());
+
+ // similar but different
+ let _ = (0..).filter(|n| to_opt(n).is_some()).map(|n| to_res(n).unwrap());
+ let _ = (0..)
+ .filter(|n| to_opt(n).map(|n| n + 1).is_some())
+ .map(|a| to_opt(a).unwrap());
+}
+
+fn iter<T>() -> impl Iterator<Item = T> {
+ std::iter::empty()
+}
+
+fn to_opt<T>(_: T) -> Option<T> {
+ unimplemented!()
+}
+
+fn to_res<T>(_: T) -> Result<T, ()> {
+ unimplemented!()
+}
+
+fn to_ref<'a, T>(_: T) -> &'a T {
+ unimplemented!()
+}
+
+struct Issue8920<'a> {
+ option_field: Option<String>,
+ result_field: Result<String, ()>,
+ ref_field: Option<&'a usize>,
+}
+
+fn issue_8920() {
+ let mut vec = vec![Issue8920 {
+ option_field: Some(String::from("str")),
+ result_field: Ok(String::from("str")),
+ ref_field: Some(&1),
+ }];
+
+ let _ = vec
+ .iter()
+ .filter(|f| f.option_field.is_some())
+ .map(|f| f.option_field.clone().unwrap());
+
+ let _ = vec
+ .iter()
+ .filter(|f| f.ref_field.is_some())
+ .map(|f| f.ref_field.cloned().unwrap());
+
+ let _ = vec
+ .iter()
+ .filter(|f| f.ref_field.is_some())
+ .map(|f| f.ref_field.copied().unwrap());
+
+ let _ = vec
+ .iter()
+ .filter(|f| f.result_field.is_ok())
+ .map(|f| f.result_field.clone().unwrap());
+
+ let _ = vec
+ .iter()
+ .filter(|f| f.result_field.is_ok())
+ .map(|f| f.result_field.as_ref().unwrap());
+
+ let _ = vec
+ .iter()
+ .filter(|f| f.result_field.is_ok())
+ .map(|f| f.result_field.as_deref().unwrap());
+
+ let _ = vec
+ .iter_mut()
+ .filter(|f| f.result_field.is_ok())
+ .map(|f| f.result_field.as_mut().unwrap());
+
+ let _ = vec
+ .iter_mut()
+ .filter(|f| f.result_field.is_ok())
+ .map(|f| f.result_field.as_deref_mut().unwrap());
+
+ let _ = vec
+ .iter()
+ .filter(|f| f.result_field.is_ok())
+ .map(|f| f.result_field.to_owned().unwrap());
+}
diff --git a/src/tools/clippy/tests/ui/manual_filter_map.stderr b/src/tools/clippy/tests/ui/manual_filter_map.stderr
new file mode 100644
index 000000000..6e5bbe8f2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_filter_map.stderr
@@ -0,0 +1,194 @@
+error: `filter(..).map(..)` can be simplified as `filter_map(..)`
+ --> $DIR/manual_filter_map.rs:8:19
+ |
+LL | let _ = (0..).filter(|n| to_opt(*n).is_some()).map(|a| to_opt(a).unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `filter_map(|a| to_opt(a))`
+ |
+ = note: `-D clippy::manual-filter-map` implied by `-D warnings`
+
+error: `filter(..).map(..)` can be simplified as `filter_map(..)`
+ --> $DIR/manual_filter_map.rs:11:19
+ |
+LL | let _ = (0..).filter(|&n| to_opt(n).is_some()).map(|a| to_opt(a).expect("hi"));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `filter_map(|a| to_opt(a))`
+
+error: `filter(..).map(..)` can be simplified as `filter_map(..)`
+ --> $DIR/manual_filter_map.rs:14:19
+ |
+LL | let _ = (0..).filter(|&n| to_res(n).is_ok()).map(|a| to_res(a).unwrap_or(1));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `filter_map(|a| to_res(a).ok())`
+
+error: `filter(..).map(..)` can be simplified as `filter_map(..)`
+ --> $DIR/manual_filter_map.rs:17:10
+ |
+LL | .filter(|&x| to_ref(to_opt(x)).is_some())
+ | __________^
+LL | | .map(|y| to_ref(to_opt(y)).unwrap());
+ | |____________________________________________^ help: try: `filter_map(|y| *to_ref(to_opt(y)))`
+
+error: `filter(..).map(..)` can be simplified as `filter_map(..)`
+ --> $DIR/manual_filter_map.rs:20:10
+ |
+LL | .filter(|x| to_ref(to_opt(*x)).is_some())
+ | __________^
+LL | | .map(|y| to_ref(to_opt(y)).unwrap());
+ | |____________________________________________^ help: try: `filter_map(|y| *to_ref(to_opt(y)))`
+
+error: `filter(..).map(..)` can be simplified as `filter_map(..)`
+ --> $DIR/manual_filter_map.rs:24:10
+ |
+LL | .filter(|&x| to_ref(to_res(x)).is_ok())
+ | __________^
+LL | | .map(|y| to_ref(to_res(y)).unwrap());
+ | |____________________________________________^ help: try: `filter_map(|y| to_ref(to_res(y)).ok())`
+
+error: `filter(..).map(..)` can be simplified as `filter_map(..)`
+ --> $DIR/manual_filter_map.rs:27:10
+ |
+LL | .filter(|x| to_ref(to_res(*x)).is_ok())
+ | __________^
+LL | | .map(|y| to_ref(to_res(y)).unwrap());
+ | |____________________________________________^ help: try: `filter_map(|y| to_ref(to_res(y)).ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_filter_map.rs:33:27
+ |
+LL | iter::<Option<&u8>>().find(|x| x.is_some()).map(|x| x.cloned().unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| x.cloned())`
+ |
+ = note: `-D clippy::manual-find-map` implied by `-D warnings`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_filter_map.rs:34:28
+ |
+LL | iter::<&Option<&u8>>().find(|x| x.is_some()).map(|x| x.cloned().unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| x.cloned())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_filter_map.rs:35:31
+ |
+LL | iter::<&Option<String>>().find(|x| x.is_some()).map(|x| x.as_deref().unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| x.as_deref())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_filter_map.rs:36:31
+ |
+LL | iter::<Option<&String>>().find(|&x| to_ref(x).is_some()).map(|y| to_ref(y).cloned().unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|y| to_ref(y).cloned())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_filter_map.rs:38:30
+ |
+LL | iter::<Result<u8, ()>>().find(|x| x.is_ok()).map(|x| x.unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| x.ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_filter_map.rs:39:31
+ |
+LL | iter::<&Result<u8, ()>>().find(|x| x.is_ok()).map(|x| x.unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| x.ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_filter_map.rs:40:32
+ |
+LL | iter::<&&Result<u8, ()>>().find(|x| x.is_ok()).map(|x| x.unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| x.ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_filter_map.rs:41:31
+ |
+LL | iter::<Result<&u8, ()>>().find(|x| x.is_ok()).map(|x| x.cloned().unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| x.cloned().ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_filter_map.rs:42:32
+ |
+LL | iter::<&Result<&u8, ()>>().find(|x| x.is_ok()).map(|x| x.cloned().unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| x.cloned().ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_filter_map.rs:43:35
+ |
+LL | iter::<&Result<String, ()>>().find(|x| x.is_ok()).map(|x| x.as_deref().unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| x.as_deref().ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_filter_map.rs:44:35
+ |
+LL | iter::<Result<&String, ()>>().find(|&x| to_ref(x).is_ok()).map(|y| to_ref(y).cloned().unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|y| to_ref(y).cloned().ok())`
+
+error: `filter(..).map(..)` can be simplified as `filter_map(..)`
+ --> $DIR/manual_filter_map.rs:92:10
+ |
+LL | .filter(|f| f.option_field.is_some())
+ | __________^
+LL | | .map(|f| f.option_field.clone().unwrap());
+ | |_________________________________________________^ help: try: `filter_map(|f| f.option_field.clone())`
+
+error: `filter(..).map(..)` can be simplified as `filter_map(..)`
+ --> $DIR/manual_filter_map.rs:97:10
+ |
+LL | .filter(|f| f.ref_field.is_some())
+ | __________^
+LL | | .map(|f| f.ref_field.cloned().unwrap());
+ | |_______________________________________________^ help: try: `filter_map(|f| f.ref_field.cloned())`
+
+error: `filter(..).map(..)` can be simplified as `filter_map(..)`
+ --> $DIR/manual_filter_map.rs:102:10
+ |
+LL | .filter(|f| f.ref_field.is_some())
+ | __________^
+LL | | .map(|f| f.ref_field.copied().unwrap());
+ | |_______________________________________________^ help: try: `filter_map(|f| f.ref_field.copied())`
+
+error: `filter(..).map(..)` can be simplified as `filter_map(..)`
+ --> $DIR/manual_filter_map.rs:107:10
+ |
+LL | .filter(|f| f.result_field.is_ok())
+ | __________^
+LL | | .map(|f| f.result_field.clone().unwrap());
+ | |_________________________________________________^ help: try: `filter_map(|f| f.result_field.clone().ok())`
+
+error: `filter(..).map(..)` can be simplified as `filter_map(..)`
+ --> $DIR/manual_filter_map.rs:112:10
+ |
+LL | .filter(|f| f.result_field.is_ok())
+ | __________^
+LL | | .map(|f| f.result_field.as_ref().unwrap());
+ | |__________________________________________________^ help: try: `filter_map(|f| f.result_field.as_ref().ok())`
+
+error: `filter(..).map(..)` can be simplified as `filter_map(..)`
+ --> $DIR/manual_filter_map.rs:117:10
+ |
+LL | .filter(|f| f.result_field.is_ok())
+ | __________^
+LL | | .map(|f| f.result_field.as_deref().unwrap());
+ | |____________________________________________________^ help: try: `filter_map(|f| f.result_field.as_deref().ok())`
+
+error: `filter(..).map(..)` can be simplified as `filter_map(..)`
+ --> $DIR/manual_filter_map.rs:122:10
+ |
+LL | .filter(|f| f.result_field.is_ok())
+ | __________^
+LL | | .map(|f| f.result_field.as_mut().unwrap());
+ | |__________________________________________________^ help: try: `filter_map(|f| f.result_field.as_mut().ok())`
+
+error: `filter(..).map(..)` can be simplified as `filter_map(..)`
+ --> $DIR/manual_filter_map.rs:127:10
+ |
+LL | .filter(|f| f.result_field.is_ok())
+ | __________^
+LL | | .map(|f| f.result_field.as_deref_mut().unwrap());
+ | |________________________________________________________^ help: try: `filter_map(|f| f.result_field.as_deref_mut().ok())`
+
+error: `filter(..).map(..)` can be simplified as `filter_map(..)`
+ --> $DIR/manual_filter_map.rs:132:10
+ |
+LL | .filter(|f| f.result_field.is_ok())
+ | __________^
+LL | | .map(|f| f.result_field.to_owned().unwrap());
+ | |____________________________________________________^ help: try: `filter_map(|f| f.result_field.to_owned().ok())`
+
+error: aborting due to 27 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_find.rs b/src/tools/clippy/tests/ui/manual_find.rs
new file mode 100644
index 000000000..257fe045f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_find.rs
@@ -0,0 +1,22 @@
+#![allow(unused)]
+#![warn(clippy::manual_find)]
+
+fn vec_string(strings: Vec<String>) -> Option<String> {
+ for s in strings {
+ if s == String::new() {
+ return Some(s);
+ }
+ }
+ None
+}
+
+fn tuple(arr: Vec<(String, i32)>) -> Option<String> {
+ for (s, _) in arr {
+ if s == String::new() {
+ return Some(s);
+ }
+ }
+ None
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/manual_find.stderr b/src/tools/clippy/tests/ui/manual_find.stderr
new file mode 100644
index 000000000..da0fd4aae
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_find.stderr
@@ -0,0 +1,29 @@
+error: manual implementation of `Iterator::find`
+ --> $DIR/manual_find.rs:5:5
+ |
+LL | / for s in strings {
+LL | | if s == String::new() {
+LL | | return Some(s);
+LL | | }
+LL | | }
+LL | | None
+ | |________^ help: replace with an iterator: `strings.into_iter().find(|s| s == String::new())`
+ |
+ = note: `-D clippy::manual-find` implied by `-D warnings`
+ = note: you may need to dereference some variables
+
+error: manual implementation of `Iterator::find`
+ --> $DIR/manual_find.rs:14:5
+ |
+LL | / for (s, _) in arr {
+LL | | if s == String::new() {
+LL | | return Some(s);
+LL | | }
+LL | | }
+LL | | None
+ | |________^ help: replace with an iterator: `arr.into_iter().map(|(s, _)| s).find(|s| s == String::new())`
+ |
+ = note: you may need to dereference some variables
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_find_fixable.fixed b/src/tools/clippy/tests/ui/manual_find_fixable.fixed
new file mode 100644
index 000000000..36d1644c2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_find_fixable.fixed
@@ -0,0 +1,182 @@
+// run-rustfix
+
+#![allow(unused, clippy::needless_return)]
+#![warn(clippy::manual_find)]
+
+use std::collections::HashMap;
+
+const ARRAY: &[u32; 5] = &[2, 7, 1, 9, 3];
+
+fn lookup(n: u32) -> Option<u32> {
+ ARRAY.iter().find(|&&v| v == n).copied()
+}
+
+fn with_pat(arr: Vec<(u32, u32)>) -> Option<u32> {
+ arr.into_iter().map(|(a, _)| a).find(|&a| a % 2 == 0)
+}
+
+struct Data {
+ name: String,
+ is_true: bool,
+}
+fn with_struct(arr: Vec<Data>) -> Option<Data> {
+ arr.into_iter().find(|el| el.name.len() == 10)
+}
+
+struct Tuple(usize, usize);
+fn with_tuple_struct(arr: Vec<Tuple>) -> Option<usize> {
+ arr.into_iter().map(|Tuple(a, _)| a).find(|&a| a >= 3)
+}
+
+struct A;
+impl A {
+ fn should_keep(&self) -> bool {
+ true
+ }
+}
+fn with_method_call(arr: Vec<A>) -> Option<A> {
+ arr.into_iter().find(|el| el.should_keep())
+}
+
+fn with_closure(arr: Vec<u32>) -> Option<u32> {
+ let f = |el: u32| -> u32 { el + 10 };
+ arr.into_iter().find(|&el| f(el) == 20)
+}
+
+fn with_closure2(arr: HashMap<String, i32>) -> Option<i32> {
+ let f = |el: i32| -> bool { el == 10 };
+ arr.values().find(|&&el| f(el)).copied()
+}
+
+fn with_bool(arr: Vec<Data>) -> Option<Data> {
+ arr.into_iter().find(|el| el.is_true)
+}
+
+fn with_side_effects(arr: Vec<u32>) -> Option<u32> {
+ for v in arr {
+ if v == 1 {
+ println!("side effect");
+ return Some(v);
+ }
+ }
+ None
+}
+
+fn with_else(arr: Vec<u32>) -> Option<u32> {
+ for el in arr {
+ if el % 2 == 0 {
+ return Some(el);
+ } else {
+ println!("{}", el);
+ }
+ }
+ None
+}
+
+fn tuple_with_ref(v: [(u8, &u8); 3]) -> Option<u8> {
+ v.into_iter().map(|(_, &x)| x).find(|&x| x > 10)
+}
+
+fn ref_to_tuple_with_ref(v: &[(u8, &u8)]) -> Option<u8> {
+ v.iter().map(|&(_, &x)| x).find(|&x| x > 10)
+}
+
+fn explicit_ret(arr: Vec<i32>) -> Option<i32> {
+ arr.into_iter().find(|&x| x >= 5)
+}
+
+fn plus_one(a: i32) -> Option<i32> {
+ Some(a + 1)
+}
+fn fn_instead_of_some(a: &[i32]) -> Option<i32> {
+ for &x in a {
+ if x == 1 {
+ return plus_one(x);
+ }
+ }
+ None
+}
+
+fn for_in_condition(a: &[i32], b: bool) -> Option<i32> {
+ if b {
+ for &x in a {
+ if x == 1 {
+ return Some(x);
+ }
+ }
+ }
+ None
+}
+
+fn intermediate_statements(a: &[i32]) -> Option<i32> {
+ for &x in a {
+ if x == 1 {
+ return Some(x);
+ }
+ }
+
+ println!("side effect");
+
+ None
+}
+
+fn mixed_binding_modes(arr: Vec<(i32, String)>) -> Option<i32> {
+ for (x, mut s) in arr {
+ if x == 1 && s.as_mut_str().len() == 2 {
+ return Some(x);
+ }
+ }
+ None
+}
+
+fn as_closure() {
+ #[rustfmt::skip]
+ let f = |arr: Vec<i32>| -> Option<i32> {
+ arr.into_iter().find(|&x| x < 1)
+ };
+}
+
+fn in_block(a: &[i32]) -> Option<i32> {
+ let should_be_none = {
+ for &x in a {
+ if x == 1 {
+ return Some(x);
+ }
+ }
+ None
+ };
+
+ assert!(should_be_none.is_none());
+
+ should_be_none
+}
+
+// Not handled yet
+fn mut_binding(v: Vec<String>) -> Option<String> {
+ for mut s in v {
+ if s.as_mut_str().len() > 1 {
+ return Some(s);
+ }
+ }
+ None
+}
+
+fn subpattern(v: Vec<[u32; 32]>) -> Option<[u32; 32]> {
+ for a @ [first, ..] in v {
+ if a[12] == first {
+ return Some(a);
+ }
+ }
+ None
+}
+
+fn two_bindings(v: Vec<(u8, u8)>) -> Option<u8> {
+ for (a, n) in v {
+ if a == n {
+ return Some(a);
+ }
+ }
+ None
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/manual_find_fixable.rs b/src/tools/clippy/tests/ui/manual_find_fixable.rs
new file mode 100644
index 000000000..ed277ddaa
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_find_fixable.rs
@@ -0,0 +1,242 @@
+// run-rustfix
+
+#![allow(unused, clippy::needless_return)]
+#![warn(clippy::manual_find)]
+
+use std::collections::HashMap;
+
+const ARRAY: &[u32; 5] = &[2, 7, 1, 9, 3];
+
+fn lookup(n: u32) -> Option<u32> {
+ for &v in ARRAY {
+ if v == n {
+ return Some(v);
+ }
+ }
+ None
+}
+
+fn with_pat(arr: Vec<(u32, u32)>) -> Option<u32> {
+ for (a, _) in arr {
+ if a % 2 == 0 {
+ return Some(a);
+ }
+ }
+ None
+}
+
+struct Data {
+ name: String,
+ is_true: bool,
+}
+fn with_struct(arr: Vec<Data>) -> Option<Data> {
+ for el in arr {
+ if el.name.len() == 10 {
+ return Some(el);
+ }
+ }
+ None
+}
+
+struct Tuple(usize, usize);
+fn with_tuple_struct(arr: Vec<Tuple>) -> Option<usize> {
+ for Tuple(a, _) in arr {
+ if a >= 3 {
+ return Some(a);
+ }
+ }
+ None
+}
+
+struct A;
+impl A {
+ fn should_keep(&self) -> bool {
+ true
+ }
+}
+fn with_method_call(arr: Vec<A>) -> Option<A> {
+ for el in arr {
+ if el.should_keep() {
+ return Some(el);
+ }
+ }
+ None
+}
+
+fn with_closure(arr: Vec<u32>) -> Option<u32> {
+ let f = |el: u32| -> u32 { el + 10 };
+ for el in arr {
+ if f(el) == 20 {
+ return Some(el);
+ }
+ }
+ None
+}
+
+fn with_closure2(arr: HashMap<String, i32>) -> Option<i32> {
+ let f = |el: i32| -> bool { el == 10 };
+ for &el in arr.values() {
+ if f(el) {
+ return Some(el);
+ }
+ }
+ None
+}
+
+fn with_bool(arr: Vec<Data>) -> Option<Data> {
+ for el in arr {
+ if el.is_true {
+ return Some(el);
+ }
+ }
+ None
+}
+
+fn with_side_effects(arr: Vec<u32>) -> Option<u32> {
+ for v in arr {
+ if v == 1 {
+ println!("side effect");
+ return Some(v);
+ }
+ }
+ None
+}
+
+fn with_else(arr: Vec<u32>) -> Option<u32> {
+ for el in arr {
+ if el % 2 == 0 {
+ return Some(el);
+ } else {
+ println!("{}", el);
+ }
+ }
+ None
+}
+
+fn tuple_with_ref(v: [(u8, &u8); 3]) -> Option<u8> {
+ for (_, &x) in v {
+ if x > 10 {
+ return Some(x);
+ }
+ }
+ None
+}
+
+fn ref_to_tuple_with_ref(v: &[(u8, &u8)]) -> Option<u8> {
+ for &(_, &x) in v {
+ if x > 10 {
+ return Some(x);
+ }
+ }
+ None
+}
+
+fn explicit_ret(arr: Vec<i32>) -> Option<i32> {
+ for x in arr {
+ if x >= 5 {
+ return Some(x);
+ }
+ }
+ return None;
+}
+
+fn plus_one(a: i32) -> Option<i32> {
+ Some(a + 1)
+}
+fn fn_instead_of_some(a: &[i32]) -> Option<i32> {
+ for &x in a {
+ if x == 1 {
+ return plus_one(x);
+ }
+ }
+ None
+}
+
+fn for_in_condition(a: &[i32], b: bool) -> Option<i32> {
+ if b {
+ for &x in a {
+ if x == 1 {
+ return Some(x);
+ }
+ }
+ }
+ None
+}
+
+fn intermediate_statements(a: &[i32]) -> Option<i32> {
+ for &x in a {
+ if x == 1 {
+ return Some(x);
+ }
+ }
+
+ println!("side effect");
+
+ None
+}
+
+fn mixed_binding_modes(arr: Vec<(i32, String)>) -> Option<i32> {
+ for (x, mut s) in arr {
+ if x == 1 && s.as_mut_str().len() == 2 {
+ return Some(x);
+ }
+ }
+ None
+}
+
+fn as_closure() {
+ #[rustfmt::skip]
+ let f = |arr: Vec<i32>| -> Option<i32> {
+ for x in arr {
+ if x < 1 {
+ return Some(x);
+ }
+ }
+ None
+ };
+}
+
+fn in_block(a: &[i32]) -> Option<i32> {
+ let should_be_none = {
+ for &x in a {
+ if x == 1 {
+ return Some(x);
+ }
+ }
+ None
+ };
+
+ assert!(should_be_none.is_none());
+
+ should_be_none
+}
+
+// Not handled yet
+fn mut_binding(v: Vec<String>) -> Option<String> {
+ for mut s in v {
+ if s.as_mut_str().len() > 1 {
+ return Some(s);
+ }
+ }
+ None
+}
+
+fn subpattern(v: Vec<[u32; 32]>) -> Option<[u32; 32]> {
+ for a @ [first, ..] in v {
+ if a[12] == first {
+ return Some(a);
+ }
+ }
+ None
+}
+
+fn two_bindings(v: Vec<(u8, u8)>) -> Option<u8> {
+ for (a, n) in v {
+ if a == n {
+ return Some(a);
+ }
+ }
+ None
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/manual_find_fixable.stderr b/src/tools/clippy/tests/ui/manual_find_fixable.stderr
new file mode 100644
index 000000000..dbc4ff69a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_find_fixable.stderr
@@ -0,0 +1,142 @@
+error: manual implementation of `Iterator::find`
+ --> $DIR/manual_find_fixable.rs:11:5
+ |
+LL | / for &v in ARRAY {
+LL | | if v == n {
+LL | | return Some(v);
+LL | | }
+LL | | }
+LL | | None
+ | |________^ help: replace with an iterator: `ARRAY.iter().find(|&&v| v == n).copied()`
+ |
+ = note: `-D clippy::manual-find` implied by `-D warnings`
+
+error: manual implementation of `Iterator::find`
+ --> $DIR/manual_find_fixable.rs:20:5
+ |
+LL | / for (a, _) in arr {
+LL | | if a % 2 == 0 {
+LL | | return Some(a);
+LL | | }
+LL | | }
+LL | | None
+ | |________^ help: replace with an iterator: `arr.into_iter().map(|(a, _)| a).find(|&a| a % 2 == 0)`
+
+error: manual implementation of `Iterator::find`
+ --> $DIR/manual_find_fixable.rs:33:5
+ |
+LL | / for el in arr {
+LL | | if el.name.len() == 10 {
+LL | | return Some(el);
+LL | | }
+LL | | }
+LL | | None
+ | |________^ help: replace with an iterator: `arr.into_iter().find(|el| el.name.len() == 10)`
+ |
+ = note: you may need to dereference some variables
+
+error: manual implementation of `Iterator::find`
+ --> $DIR/manual_find_fixable.rs:43:5
+ |
+LL | / for Tuple(a, _) in arr {
+LL | | if a >= 3 {
+LL | | return Some(a);
+LL | | }
+LL | | }
+LL | | None
+ | |________^ help: replace with an iterator: `arr.into_iter().map(|Tuple(a, _)| a).find(|&a| a >= 3)`
+
+error: manual implementation of `Iterator::find`
+ --> $DIR/manual_find_fixable.rs:58:5
+ |
+LL | / for el in arr {
+LL | | if el.should_keep() {
+LL | | return Some(el);
+LL | | }
+LL | | }
+LL | | None
+ | |________^ help: replace with an iterator: `arr.into_iter().find(|el| el.should_keep())`
+ |
+ = note: you may need to dereference some variables
+
+error: manual implementation of `Iterator::find`
+ --> $DIR/manual_find_fixable.rs:68:5
+ |
+LL | / for el in arr {
+LL | | if f(el) == 20 {
+LL | | return Some(el);
+LL | | }
+LL | | }
+LL | | None
+ | |________^ help: replace with an iterator: `arr.into_iter().find(|&el| f(el) == 20)`
+
+error: manual implementation of `Iterator::find`
+ --> $DIR/manual_find_fixable.rs:78:5
+ |
+LL | / for &el in arr.values() {
+LL | | if f(el) {
+LL | | return Some(el);
+LL | | }
+LL | | }
+LL | | None
+ | |________^ help: replace with an iterator: `arr.values().find(|&&el| f(el)).copied()`
+
+error: manual implementation of `Iterator::find`
+ --> $DIR/manual_find_fixable.rs:87:5
+ |
+LL | / for el in arr {
+LL | | if el.is_true {
+LL | | return Some(el);
+LL | | }
+LL | | }
+LL | | None
+ | |________^ help: replace with an iterator: `arr.into_iter().find(|el| el.is_true)`
+ |
+ = note: you may need to dereference some variables
+
+error: manual implementation of `Iterator::find`
+ --> $DIR/manual_find_fixable.rs:117:5
+ |
+LL | / for (_, &x) in v {
+LL | | if x > 10 {
+LL | | return Some(x);
+LL | | }
+LL | | }
+LL | | None
+ | |________^ help: replace with an iterator: `v.into_iter().map(|(_, &x)| x).find(|&x| x > 10)`
+
+error: manual implementation of `Iterator::find`
+ --> $DIR/manual_find_fixable.rs:126:5
+ |
+LL | / for &(_, &x) in v {
+LL | | if x > 10 {
+LL | | return Some(x);
+LL | | }
+LL | | }
+LL | | None
+ | |________^ help: replace with an iterator: `v.iter().map(|&(_, &x)| x).find(|&x| x > 10)`
+
+error: manual implementation of `Iterator::find`
+ --> $DIR/manual_find_fixable.rs:135:5
+ |
+LL | / for x in arr {
+LL | | if x >= 5 {
+LL | | return Some(x);
+LL | | }
+LL | | }
+LL | | return None;
+ | |________________^ help: replace with an iterator: `arr.into_iter().find(|&x| x >= 5)`
+
+error: manual implementation of `Iterator::find`
+ --> $DIR/manual_find_fixable.rs:190:9
+ |
+LL | / for x in arr {
+LL | | if x < 1 {
+LL | | return Some(x);
+LL | | }
+LL | | }
+LL | | None
+ | |____________^ help: replace with an iterator: `arr.into_iter().find(|&x| x < 1)`
+
+error: aborting due to 12 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_find_map.fixed b/src/tools/clippy/tests/ui/manual_find_map.fixed
new file mode 100644
index 000000000..54302bece
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_find_map.fixed
@@ -0,0 +1,124 @@
+// run-rustfix
+#![allow(dead_code)]
+#![warn(clippy::manual_find_map)]
+#![allow(clippy::redundant_closure)] // FIXME suggestion may have redundant closure
+
+fn main() {
+ // is_some(), unwrap()
+ let _ = (0..).find_map(|a| to_opt(a));
+
+ // ref pattern, expect()
+ let _ = (0..).find_map(|a| to_opt(a));
+
+ // is_ok(), unwrap_or()
+ let _ = (0..).find_map(|a| to_res(a).ok());
+
+ let _ = (1..5)
+ .find_map(|y| *to_ref(to_opt(y)));
+ let _ = (1..5)
+ .find_map(|y| *to_ref(to_opt(y)));
+
+ let _ = (1..5)
+ .find_map(|y| to_ref(to_res(y)).ok());
+ let _ = (1..5)
+ .find_map(|y| to_ref(to_res(y)).ok());
+}
+
+#[rustfmt::skip]
+fn simple_equal() {
+ iter::<Option<u8>>().find_map(|x| x);
+ iter::<&Option<u8>>().find_map(|x| *x);
+ iter::<&&Option<u8>>().find_map(|x| **x);
+ iter::<Option<&u8>>().find_map(|x| x.cloned());
+ iter::<&Option<&u8>>().find_map(|x| x.cloned());
+ iter::<&Option<String>>().find_map(|x| x.as_deref());
+ iter::<Option<&String>>().find_map(|y| to_ref(y).cloned());
+
+ iter::<Result<u8, ()>>().find_map(|x| x.ok());
+ iter::<&Result<u8, ()>>().find_map(|x| x.ok());
+ iter::<&&Result<u8, ()>>().find_map(|x| x.ok());
+ iter::<Result<&u8, ()>>().find_map(|x| x.cloned().ok());
+ iter::<&Result<&u8, ()>>().find_map(|x| x.cloned().ok());
+ iter::<&Result<String, ()>>().find_map(|x| x.as_deref().ok());
+ iter::<Result<&String, ()>>().find_map(|y| to_ref(y).cloned().ok());
+}
+
+fn no_lint() {
+ // no shared code
+ let _ = (0..).filter(|n| *n > 1).map(|n| n + 1);
+
+ // very close but different since filter() provides a reference
+ let _ = (0..).find(|n| to_opt(n).is_some()).map(|a| to_opt(a).unwrap());
+
+ // similar but different
+ let _ = (0..).find(|n| to_opt(n).is_some()).map(|n| to_res(n).unwrap());
+ let _ = (0..)
+ .find(|n| to_opt(n).map(|n| n + 1).is_some())
+ .map(|a| to_opt(a).unwrap());
+}
+
+fn iter<T>() -> impl Iterator<Item = T> {
+ std::iter::empty()
+}
+
+fn to_opt<T>(_: T) -> Option<T> {
+ unimplemented!()
+}
+
+fn to_res<T>(_: T) -> Result<T, ()> {
+ unimplemented!()
+}
+
+fn to_ref<'a, T>(_: T) -> &'a T {
+ unimplemented!()
+}
+
+struct Issue8920<'a> {
+ option_field: Option<String>,
+ result_field: Result<String, ()>,
+ ref_field: Option<&'a usize>,
+}
+
+fn issue_8920() {
+ let mut vec = vec![Issue8920 {
+ option_field: Some(String::from("str")),
+ result_field: Ok(String::from("str")),
+ ref_field: Some(&1),
+ }];
+
+ let _ = vec
+ .iter()
+ .find_map(|f| f.option_field.clone());
+
+ let _ = vec
+ .iter()
+ .find_map(|f| f.ref_field.cloned());
+
+ let _ = vec
+ .iter()
+ .find_map(|f| f.ref_field.copied());
+
+ let _ = vec
+ .iter()
+ .find_map(|f| f.result_field.clone().ok());
+
+ let _ = vec
+ .iter()
+ .find_map(|f| f.result_field.as_ref().ok());
+
+ let _ = vec
+ .iter()
+ .find_map(|f| f.result_field.as_deref().ok());
+
+ let _ = vec
+ .iter_mut()
+ .find_map(|f| f.result_field.as_mut().ok());
+
+ let _ = vec
+ .iter_mut()
+ .find_map(|f| f.result_field.as_deref_mut().ok());
+
+ let _ = vec
+ .iter()
+ .find_map(|f| f.result_field.to_owned().ok());
+}
diff --git a/src/tools/clippy/tests/ui/manual_find_map.rs b/src/tools/clippy/tests/ui/manual_find_map.rs
new file mode 100644
index 000000000..afcc1825a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_find_map.rs
@@ -0,0 +1,137 @@
+// run-rustfix
+#![allow(dead_code)]
+#![warn(clippy::manual_find_map)]
+#![allow(clippy::redundant_closure)] // FIXME suggestion may have redundant closure
+
+fn main() {
+ // is_some(), unwrap()
+ let _ = (0..).find(|n| to_opt(*n).is_some()).map(|a| to_opt(a).unwrap());
+
+ // ref pattern, expect()
+ let _ = (0..).find(|&n| to_opt(n).is_some()).map(|a| to_opt(a).expect("hi"));
+
+ // is_ok(), unwrap_or()
+ let _ = (0..).find(|&n| to_res(n).is_ok()).map(|a| to_res(a).unwrap_or(1));
+
+ let _ = (1..5)
+ .find(|&x| to_ref(to_opt(x)).is_some())
+ .map(|y| to_ref(to_opt(y)).unwrap());
+ let _ = (1..5)
+ .find(|x| to_ref(to_opt(*x)).is_some())
+ .map(|y| to_ref(to_opt(y)).unwrap());
+
+ let _ = (1..5)
+ .find(|&x| to_ref(to_res(x)).is_ok())
+ .map(|y| to_ref(to_res(y)).unwrap());
+ let _ = (1..5)
+ .find(|x| to_ref(to_res(*x)).is_ok())
+ .map(|y| to_ref(to_res(y)).unwrap());
+}
+
+#[rustfmt::skip]
+fn simple_equal() {
+ iter::<Option<u8>>().find(|x| x.is_some()).map(|x| x.unwrap());
+ iter::<&Option<u8>>().find(|x| x.is_some()).map(|x| x.unwrap());
+ iter::<&&Option<u8>>().find(|x| x.is_some()).map(|x| x.unwrap());
+ iter::<Option<&u8>>().find(|x| x.is_some()).map(|x| x.cloned().unwrap());
+ iter::<&Option<&u8>>().find(|x| x.is_some()).map(|x| x.cloned().unwrap());
+ iter::<&Option<String>>().find(|x| x.is_some()).map(|x| x.as_deref().unwrap());
+ iter::<Option<&String>>().find(|&x| to_ref(x).is_some()).map(|y| to_ref(y).cloned().unwrap());
+
+ iter::<Result<u8, ()>>().find(|x| x.is_ok()).map(|x| x.unwrap());
+ iter::<&Result<u8, ()>>().find(|x| x.is_ok()).map(|x| x.unwrap());
+ iter::<&&Result<u8, ()>>().find(|x| x.is_ok()).map(|x| x.unwrap());
+ iter::<Result<&u8, ()>>().find(|x| x.is_ok()).map(|x| x.cloned().unwrap());
+ iter::<&Result<&u8, ()>>().find(|x| x.is_ok()).map(|x| x.cloned().unwrap());
+ iter::<&Result<String, ()>>().find(|x| x.is_ok()).map(|x| x.as_deref().unwrap());
+ iter::<Result<&String, ()>>().find(|&x| to_ref(x).is_ok()).map(|y| to_ref(y).cloned().unwrap());
+}
+
+fn no_lint() {
+ // no shared code
+ let _ = (0..).filter(|n| *n > 1).map(|n| n + 1);
+
+ // very close but different since filter() provides a reference
+ let _ = (0..).find(|n| to_opt(n).is_some()).map(|a| to_opt(a).unwrap());
+
+ // similar but different
+ let _ = (0..).find(|n| to_opt(n).is_some()).map(|n| to_res(n).unwrap());
+ let _ = (0..)
+ .find(|n| to_opt(n).map(|n| n + 1).is_some())
+ .map(|a| to_opt(a).unwrap());
+}
+
+fn iter<T>() -> impl Iterator<Item = T> {
+ std::iter::empty()
+}
+
+fn to_opt<T>(_: T) -> Option<T> {
+ unimplemented!()
+}
+
+fn to_res<T>(_: T) -> Result<T, ()> {
+ unimplemented!()
+}
+
+fn to_ref<'a, T>(_: T) -> &'a T {
+ unimplemented!()
+}
+
+struct Issue8920<'a> {
+ option_field: Option<String>,
+ result_field: Result<String, ()>,
+ ref_field: Option<&'a usize>,
+}
+
+fn issue_8920() {
+ let mut vec = vec![Issue8920 {
+ option_field: Some(String::from("str")),
+ result_field: Ok(String::from("str")),
+ ref_field: Some(&1),
+ }];
+
+ let _ = vec
+ .iter()
+ .find(|f| f.option_field.is_some())
+ .map(|f| f.option_field.clone().unwrap());
+
+ let _ = vec
+ .iter()
+ .find(|f| f.ref_field.is_some())
+ .map(|f| f.ref_field.cloned().unwrap());
+
+ let _ = vec
+ .iter()
+ .find(|f| f.ref_field.is_some())
+ .map(|f| f.ref_field.copied().unwrap());
+
+ let _ = vec
+ .iter()
+ .find(|f| f.result_field.is_ok())
+ .map(|f| f.result_field.clone().unwrap());
+
+ let _ = vec
+ .iter()
+ .find(|f| f.result_field.is_ok())
+ .map(|f| f.result_field.as_ref().unwrap());
+
+ let _ = vec
+ .iter()
+ .find(|f| f.result_field.is_ok())
+ .map(|f| f.result_field.as_deref().unwrap());
+
+ let _ = vec
+ .iter_mut()
+ .find(|f| f.result_field.is_ok())
+ .map(|f| f.result_field.as_mut().unwrap());
+
+ let _ = vec
+ .iter_mut()
+ .find(|f| f.result_field.is_ok())
+ .map(|f| f.result_field.as_deref_mut().unwrap());
+
+ let _ = vec
+ .iter()
+ .find(|f| f.result_field.is_ok())
+ .map(|f| f.result_field.to_owned().unwrap());
+}
diff --git a/src/tools/clippy/tests/ui/manual_find_map.stderr b/src/tools/clippy/tests/ui/manual_find_map.stderr
new file mode 100644
index 000000000..c1ac499f7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_find_map.stderr
@@ -0,0 +1,210 @@
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:8:19
+ |
+LL | let _ = (0..).find(|n| to_opt(*n).is_some()).map(|a| to_opt(a).unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|a| to_opt(a))`
+ |
+ = note: `-D clippy::manual-find-map` implied by `-D warnings`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:11:19
+ |
+LL | let _ = (0..).find(|&n| to_opt(n).is_some()).map(|a| to_opt(a).expect("hi"));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|a| to_opt(a))`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:14:19
+ |
+LL | let _ = (0..).find(|&n| to_res(n).is_ok()).map(|a| to_res(a).unwrap_or(1));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|a| to_res(a).ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:17:10
+ |
+LL | .find(|&x| to_ref(to_opt(x)).is_some())
+ | __________^
+LL | | .map(|y| to_ref(to_opt(y)).unwrap());
+ | |____________________________________________^ help: try: `find_map(|y| *to_ref(to_opt(y)))`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:20:10
+ |
+LL | .find(|x| to_ref(to_opt(*x)).is_some())
+ | __________^
+LL | | .map(|y| to_ref(to_opt(y)).unwrap());
+ | |____________________________________________^ help: try: `find_map(|y| *to_ref(to_opt(y)))`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:24:10
+ |
+LL | .find(|&x| to_ref(to_res(x)).is_ok())
+ | __________^
+LL | | .map(|y| to_ref(to_res(y)).unwrap());
+ | |____________________________________________^ help: try: `find_map(|y| to_ref(to_res(y)).ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:27:10
+ |
+LL | .find(|x| to_ref(to_res(*x)).is_ok())
+ | __________^
+LL | | .map(|y| to_ref(to_res(y)).unwrap());
+ | |____________________________________________^ help: try: `find_map(|y| to_ref(to_res(y)).ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:33:26
+ |
+LL | iter::<Option<u8>>().find(|x| x.is_some()).map(|x| x.unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| x)`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:34:27
+ |
+LL | iter::<&Option<u8>>().find(|x| x.is_some()).map(|x| x.unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| *x)`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:35:28
+ |
+LL | iter::<&&Option<u8>>().find(|x| x.is_some()).map(|x| x.unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| **x)`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:36:27
+ |
+LL | iter::<Option<&u8>>().find(|x| x.is_some()).map(|x| x.cloned().unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| x.cloned())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:37:28
+ |
+LL | iter::<&Option<&u8>>().find(|x| x.is_some()).map(|x| x.cloned().unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| x.cloned())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:38:31
+ |
+LL | iter::<&Option<String>>().find(|x| x.is_some()).map(|x| x.as_deref().unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| x.as_deref())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:39:31
+ |
+LL | iter::<Option<&String>>().find(|&x| to_ref(x).is_some()).map(|y| to_ref(y).cloned().unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|y| to_ref(y).cloned())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:41:30
+ |
+LL | iter::<Result<u8, ()>>().find(|x| x.is_ok()).map(|x| x.unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| x.ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:42:31
+ |
+LL | iter::<&Result<u8, ()>>().find(|x| x.is_ok()).map(|x| x.unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| x.ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:43:32
+ |
+LL | iter::<&&Result<u8, ()>>().find(|x| x.is_ok()).map(|x| x.unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| x.ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:44:31
+ |
+LL | iter::<Result<&u8, ()>>().find(|x| x.is_ok()).map(|x| x.cloned().unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| x.cloned().ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:45:32
+ |
+LL | iter::<&Result<&u8, ()>>().find(|x| x.is_ok()).map(|x| x.cloned().unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| x.cloned().ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:46:35
+ |
+LL | iter::<&Result<String, ()>>().find(|x| x.is_ok()).map(|x| x.as_deref().unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|x| x.as_deref().ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:47:35
+ |
+LL | iter::<Result<&String, ()>>().find(|&x| to_ref(x).is_ok()).map(|y| to_ref(y).cloned().unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|y| to_ref(y).cloned().ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:95:10
+ |
+LL | .find(|f| f.option_field.is_some())
+ | __________^
+LL | | .map(|f| f.option_field.clone().unwrap());
+ | |_________________________________________________^ help: try: `find_map(|f| f.option_field.clone())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:100:10
+ |
+LL | .find(|f| f.ref_field.is_some())
+ | __________^
+LL | | .map(|f| f.ref_field.cloned().unwrap());
+ | |_______________________________________________^ help: try: `find_map(|f| f.ref_field.cloned())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:105:10
+ |
+LL | .find(|f| f.ref_field.is_some())
+ | __________^
+LL | | .map(|f| f.ref_field.copied().unwrap());
+ | |_______________________________________________^ help: try: `find_map(|f| f.ref_field.copied())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:110:10
+ |
+LL | .find(|f| f.result_field.is_ok())
+ | __________^
+LL | | .map(|f| f.result_field.clone().unwrap());
+ | |_________________________________________________^ help: try: `find_map(|f| f.result_field.clone().ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:115:10
+ |
+LL | .find(|f| f.result_field.is_ok())
+ | __________^
+LL | | .map(|f| f.result_field.as_ref().unwrap());
+ | |__________________________________________________^ help: try: `find_map(|f| f.result_field.as_ref().ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:120:10
+ |
+LL | .find(|f| f.result_field.is_ok())
+ | __________^
+LL | | .map(|f| f.result_field.as_deref().unwrap());
+ | |____________________________________________________^ help: try: `find_map(|f| f.result_field.as_deref().ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:125:10
+ |
+LL | .find(|f| f.result_field.is_ok())
+ | __________^
+LL | | .map(|f| f.result_field.as_mut().unwrap());
+ | |__________________________________________________^ help: try: `find_map(|f| f.result_field.as_mut().ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:130:10
+ |
+LL | .find(|f| f.result_field.is_ok())
+ | __________^
+LL | | .map(|f| f.result_field.as_deref_mut().unwrap());
+ | |________________________________________________________^ help: try: `find_map(|f| f.result_field.as_deref_mut().ok())`
+
+error: `find(..).map(..)` can be simplified as `find_map(..)`
+ --> $DIR/manual_find_map.rs:135:10
+ |
+LL | .find(|f| f.result_field.is_ok())
+ | __________^
+LL | | .map(|f| f.result_field.to_owned().unwrap());
+ | |____________________________________________________^ help: try: `find_map(|f| f.result_field.to_owned().ok())`
+
+error: aborting due to 30 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_flatten.rs b/src/tools/clippy/tests/ui/manual_flatten.rs
new file mode 100644
index 000000000..d922593bc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_flatten.rs
@@ -0,0 +1,125 @@
+#![warn(clippy::manual_flatten)]
+#![allow(clippy::useless_vec)]
+
+fn main() {
+ // Test for loop over implicitly adjusted `Iterator` with `if let` expression
+ let x = vec![Some(1), Some(2), Some(3)];
+ for n in x {
+ if let Some(y) = n {
+ println!("{}", y);
+ }
+ }
+
+ // Test for loop over implicitly implicitly adjusted `Iterator` with `if let` statement
+ let y: Vec<Result<i32, i32>> = vec![];
+ for n in y.clone() {
+ if let Ok(n) = n {
+ println!("{}", n);
+ };
+ }
+
+ // Test for loop over by reference
+ for n in &y {
+ if let Ok(n) = n {
+ println!("{}", n);
+ }
+ }
+
+ // Test for loop over an implicit reference
+ let z = &y;
+ for n in z {
+ if let Ok(n) = n {
+ println!("{}", n);
+ }
+ }
+
+ // Test for loop over `Iterator` with `if let` expression
+ let z = vec![Some(1), Some(2), Some(3)];
+ let z = z.iter();
+ for n in z {
+ if let Some(m) = n {
+ println!("{}", m);
+ }
+ }
+
+ // Using the `None` variant should not trigger the lint
+ // Note: for an autofixable suggestion, the binding in the for loop has to take the
+ // name of the binding in the `if let`
+ let z = vec![Some(1), Some(2), Some(3)];
+ for n in z {
+ if n.is_none() {
+ println!("Nada.");
+ }
+ }
+
+ // Using the `Err` variant should not trigger the lint
+ for n in y.clone() {
+ if let Err(e) = n {
+ println!("Oops: {}!", e);
+ }
+ }
+
+ // Having an else clause should not trigger the lint
+ for n in y.clone() {
+ if let Ok(n) = n {
+ println!("{}", n);
+ } else {
+ println!("Oops!");
+ }
+ }
+
+ let vec_of_ref = vec![&Some(1)];
+ for n in &vec_of_ref {
+ if let Some(n) = n {
+ println!("{:?}", n);
+ }
+ }
+
+ let vec_of_ref = &vec_of_ref;
+ for n in vec_of_ref {
+ if let Some(n) = n {
+ println!("{:?}", n);
+ }
+ }
+
+ let slice_of_ref = &[&Some(1)];
+ for n in slice_of_ref {
+ if let Some(n) = n {
+ println!("{:?}", n);
+ }
+ }
+
+ struct Test {
+ a: usize,
+ }
+
+ let mut vec_of_struct = [Some(Test { a: 1 }), None];
+
+ // Usage of `if let` expression should not trigger lint
+ for n in vec_of_struct.iter_mut() {
+ if let Some(z) = n {
+ *n = None;
+ }
+ }
+
+ // Using manual flatten should not trigger the lint
+ for n in vec![Some(1), Some(2), Some(3)].iter().flatten() {
+ println!("{}", n);
+ }
+
+ run_unformatted_tests();
+}
+
+#[rustfmt::skip]
+fn run_unformatted_tests() {
+ // Skip rustfmt here on purpose so the suggestion does not fit in one line
+ for n in vec![
+ Some(1),
+ Some(2),
+ Some(3)
+ ].iter() {
+ if let Some(n) = n {
+ println!("{:?}", n);
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/manual_flatten.stderr b/src/tools/clippy/tests/ui/manual_flatten.stderr
new file mode 100644
index 000000000..da053c056
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_flatten.stderr
@@ -0,0 +1,199 @@
+error: unnecessary `if let` since only the `Some` variant of the iterator element is used
+ --> $DIR/manual_flatten.rs:7:5
+ |
+LL | for n in x {
+ | ^ - help: try: `x.into_iter().flatten()`
+ | _____|
+ | |
+LL | | if let Some(y) = n {
+LL | | println!("{}", y);
+LL | | }
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::manual-flatten` implied by `-D warnings`
+help: ...and remove the `if let` statement in the for loop
+ --> $DIR/manual_flatten.rs:8:9
+ |
+LL | / if let Some(y) = n {
+LL | | println!("{}", y);
+LL | | }
+ | |_________^
+
+error: unnecessary `if let` since only the `Ok` variant of the iterator element is used
+ --> $DIR/manual_flatten.rs:15:5
+ |
+LL | for n in y.clone() {
+ | ^ --------- help: try: `y.clone().into_iter().flatten()`
+ | _____|
+ | |
+LL | | if let Ok(n) = n {
+LL | | println!("{}", n);
+LL | | };
+LL | | }
+ | |_____^
+ |
+help: ...and remove the `if let` statement in the for loop
+ --> $DIR/manual_flatten.rs:16:9
+ |
+LL | / if let Ok(n) = n {
+LL | | println!("{}", n);
+LL | | };
+ | |_________^
+
+error: unnecessary `if let` since only the `Ok` variant of the iterator element is used
+ --> $DIR/manual_flatten.rs:22:5
+ |
+LL | for n in &y {
+ | ^ -- help: try: `y.iter().flatten()`
+ | _____|
+ | |
+LL | | if let Ok(n) = n {
+LL | | println!("{}", n);
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: ...and remove the `if let` statement in the for loop
+ --> $DIR/manual_flatten.rs:23:9
+ |
+LL | / if let Ok(n) = n {
+LL | | println!("{}", n);
+LL | | }
+ | |_________^
+
+error: unnecessary `if let` since only the `Ok` variant of the iterator element is used
+ --> $DIR/manual_flatten.rs:30:5
+ |
+LL | for n in z {
+ | ^ - help: try: `z.iter().flatten()`
+ | _____|
+ | |
+LL | | if let Ok(n) = n {
+LL | | println!("{}", n);
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: ...and remove the `if let` statement in the for loop
+ --> $DIR/manual_flatten.rs:31:9
+ |
+LL | / if let Ok(n) = n {
+LL | | println!("{}", n);
+LL | | }
+ | |_________^
+
+error: unnecessary `if let` since only the `Some` variant of the iterator element is used
+ --> $DIR/manual_flatten.rs:39:5
+ |
+LL | for n in z {
+ | ^ - help: try: `z.flatten()`
+ | _____|
+ | |
+LL | | if let Some(m) = n {
+LL | | println!("{}", m);
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: ...and remove the `if let` statement in the for loop
+ --> $DIR/manual_flatten.rs:40:9
+ |
+LL | / if let Some(m) = n {
+LL | | println!("{}", m);
+LL | | }
+ | |_________^
+
+error: unnecessary `if let` since only the `Some` variant of the iterator element is used
+ --> $DIR/manual_flatten.rs:72:5
+ |
+LL | for n in &vec_of_ref {
+ | ^ ----------- help: try: `vec_of_ref.iter().copied().flatten()`
+ | _____|
+ | |
+LL | | if let Some(n) = n {
+LL | | println!("{:?}", n);
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: ...and remove the `if let` statement in the for loop
+ --> $DIR/manual_flatten.rs:73:9
+ |
+LL | / if let Some(n) = n {
+LL | | println!("{:?}", n);
+LL | | }
+ | |_________^
+
+error: unnecessary `if let` since only the `Some` variant of the iterator element is used
+ --> $DIR/manual_flatten.rs:79:5
+ |
+LL | for n in vec_of_ref {
+ | ^ ---------- help: try: `vec_of_ref.iter().copied().flatten()`
+ | _____|
+ | |
+LL | | if let Some(n) = n {
+LL | | println!("{:?}", n);
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: ...and remove the `if let` statement in the for loop
+ --> $DIR/manual_flatten.rs:80:9
+ |
+LL | / if let Some(n) = n {
+LL | | println!("{:?}", n);
+LL | | }
+ | |_________^
+
+error: unnecessary `if let` since only the `Some` variant of the iterator element is used
+ --> $DIR/manual_flatten.rs:86:5
+ |
+LL | for n in slice_of_ref {
+ | ^ ------------ help: try: `slice_of_ref.iter().copied().flatten()`
+ | _____|
+ | |
+LL | | if let Some(n) = n {
+LL | | println!("{:?}", n);
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: ...and remove the `if let` statement in the for loop
+ --> $DIR/manual_flatten.rs:87:9
+ |
+LL | / if let Some(n) = n {
+LL | | println!("{:?}", n);
+LL | | }
+ | |_________^
+
+error: unnecessary `if let` since only the `Some` variant of the iterator element is used
+ --> $DIR/manual_flatten.rs:116:5
+ |
+LL | / for n in vec![
+LL | | Some(1),
+LL | | Some(2),
+LL | | Some(3)
+... |
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: remove the `if let` statement in the for loop and then...
+ --> $DIR/manual_flatten.rs:121:9
+ |
+LL | / if let Some(n) = n {
+LL | | println!("{:?}", n);
+LL | | }
+ | |_________^
+help: try
+ |
+LL ~ for n in vec![
+LL + Some(1),
+LL + Some(2),
+LL + Some(3)
+LL ~ ].iter().flatten() {
+ |
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_map_option.fixed b/src/tools/clippy/tests/ui/manual_map_option.fixed
new file mode 100644
index 000000000..a59da4ae1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_map_option.fixed
@@ -0,0 +1,157 @@
+// run-rustfix
+
+#![warn(clippy::manual_map)]
+#![allow(
+ clippy::no_effect,
+ clippy::map_identity,
+ clippy::unit_arg,
+ clippy::match_ref_pats,
+ clippy::redundant_pattern_matching,
+ clippy::for_loops_over_fallibles,
+ dead_code
+)]
+
+fn main() {
+ Some(0).map(|_| 2);
+
+ Some(0).map(|x| x + 1);
+
+ Some("").map(|x| x.is_empty());
+
+ Some(0).map(|x| !x);
+
+ #[rustfmt::skip]
+ Some(0).map(std::convert::identity);
+
+ Some(&String::new()).map(|x| str::len(x));
+
+ match Some(0) {
+ Some(x) if false => Some(x + 1),
+ _ => None,
+ };
+
+ Some([0, 1]).as_ref().map(|x| x[0]);
+
+ Some(0).map(|x| x * 2);
+
+ Some(String::new()).as_ref().map(|x| x.is_empty());
+
+ Some(String::new()).as_ref().map(|x| x.len());
+
+ Some(0).map(|x| x + x);
+
+ #[warn(clippy::option_map_unit_fn)]
+ match &mut Some(String::new()) {
+ Some(x) => Some(x.push_str("")),
+ None => None,
+ };
+
+ #[allow(clippy::option_map_unit_fn)]
+ {
+ Some(String::new()).as_mut().map(|x| x.push_str(""));
+ }
+
+ Some(String::new()).as_ref().map(|x| x.len());
+
+ Some(String::new()).as_ref().map(|x| x.is_empty());
+
+ Some((0, 1, 2)).map(|(x, y, z)| x + y + z);
+
+ Some([1, 2, 3]).map(|[first, ..]| first);
+
+ Some((String::new(), "test")).as_ref().map(|(x, y)| (y, x));
+
+ match Some((String::new(), 0)) {
+ Some((ref x, y)) => Some((y, x)),
+ None => None,
+ };
+
+ match Some(Some(0)) {
+ Some(Some(_)) | Some(None) => Some(0),
+ None => None,
+ };
+
+ match Some(Some((0, 1))) {
+ Some(Some((x, 1))) => Some(x),
+ _ => None,
+ };
+
+ // #6795
+ fn f1() -> Result<(), ()> {
+ let _ = match Some(Ok(())) {
+ Some(x) => Some(x?),
+ None => None,
+ };
+ Ok(())
+ }
+
+ for &x in Some(Some(true)).iter() {
+ let _ = match x {
+ Some(x) => Some(if x { continue } else { x }),
+ None => None,
+ };
+ }
+
+ // #6797
+ let x1 = (Some(String::new()), 0);
+ let x2 = x1.0;
+ match x2 {
+ Some(x) => Some((x, x1.1)),
+ None => None,
+ };
+
+ struct S1 {
+ x: Option<String>,
+ y: u32,
+ }
+ impl S1 {
+ fn f(self) -> Option<(String, u32)> {
+ match self.x {
+ Some(x) => Some((x, self.y)),
+ None => None,
+ }
+ }
+ }
+
+ // #6811
+ Some(0).map(|x| vec![x]);
+
+ option_env!("").map(String::from);
+
+ // #6819
+ async fn f2(x: u32) -> u32 {
+ x
+ }
+
+ async fn f3() {
+ match Some(0) {
+ Some(x) => Some(f2(x).await),
+ None => None,
+ };
+ }
+
+ // #6847
+ if let Some(_) = Some(0) {
+ Some(0)
+ } else { Some(0).map(|x| x + 1) };
+
+ if true {
+ Some(0)
+ } else { Some(0).map(|x| x + 1) };
+
+ // #6967
+ const fn f4() {
+ match Some(0) {
+ Some(x) => Some(x + 1),
+ None => None,
+ };
+ }
+
+ // #7077
+ let s = &String::new();
+ #[allow(clippy::needless_match)]
+ let _: Option<&str> = match Some(s) {
+ Some(s) => Some(s),
+ None => None,
+ };
+}
diff --git a/src/tools/clippy/tests/ui/manual_map_option.rs b/src/tools/clippy/tests/ui/manual_map_option.rs
new file mode 100644
index 000000000..0bdbefa51
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_map_option.rs
@@ -0,0 +1,223 @@
+// run-rustfix
+
+#![warn(clippy::manual_map)]
+#![allow(
+ clippy::no_effect,
+ clippy::map_identity,
+ clippy::unit_arg,
+ clippy::match_ref_pats,
+ clippy::redundant_pattern_matching,
+ clippy::for_loops_over_fallibles,
+ dead_code
+)]
+
+fn main() {
+ match Some(0) {
+ Some(_) => Some(2),
+ None::<u32> => None,
+ };
+
+ match Some(0) {
+ Some(x) => Some(x + 1),
+ _ => None,
+ };
+
+ match Some("") {
+ Some(x) => Some(x.is_empty()),
+ None => None,
+ };
+
+ if let Some(x) = Some(0) {
+ Some(!x)
+ } else {
+ None
+ };
+
+ #[rustfmt::skip]
+ match Some(0) {
+ Some(x) => { Some(std::convert::identity(x)) }
+ None => { None }
+ };
+
+ match Some(&String::new()) {
+ Some(x) => Some(str::len(x)),
+ None => None,
+ };
+
+ match Some(0) {
+ Some(x) if false => Some(x + 1),
+ _ => None,
+ };
+
+ match &Some([0, 1]) {
+ Some(x) => Some(x[0]),
+ &None => None,
+ };
+
+ match &Some(0) {
+ &Some(x) => Some(x * 2),
+ None => None,
+ };
+
+ match Some(String::new()) {
+ Some(ref x) => Some(x.is_empty()),
+ _ => None,
+ };
+
+ match &&Some(String::new()) {
+ Some(x) => Some(x.len()),
+ _ => None,
+ };
+
+ match &&Some(0) {
+ &&Some(x) => Some(x + x),
+ &&_ => None,
+ };
+
+ #[warn(clippy::option_map_unit_fn)]
+ match &mut Some(String::new()) {
+ Some(x) => Some(x.push_str("")),
+ None => None,
+ };
+
+ #[allow(clippy::option_map_unit_fn)]
+ {
+ match &mut Some(String::new()) {
+ Some(x) => Some(x.push_str("")),
+ None => None,
+ };
+ }
+
+ match &mut Some(String::new()) {
+ Some(ref x) => Some(x.len()),
+ None => None,
+ };
+
+ match &mut &Some(String::new()) {
+ Some(x) => Some(x.is_empty()),
+ &mut _ => None,
+ };
+
+ match Some((0, 1, 2)) {
+ Some((x, y, z)) => Some(x + y + z),
+ None => None,
+ };
+
+ match Some([1, 2, 3]) {
+ Some([first, ..]) => Some(first),
+ None => None,
+ };
+
+ match &Some((String::new(), "test")) {
+ Some((x, y)) => Some((y, x)),
+ None => None,
+ };
+
+ match Some((String::new(), 0)) {
+ Some((ref x, y)) => Some((y, x)),
+ None => None,
+ };
+
+ match Some(Some(0)) {
+ Some(Some(_)) | Some(None) => Some(0),
+ None => None,
+ };
+
+ match Some(Some((0, 1))) {
+ Some(Some((x, 1))) => Some(x),
+ _ => None,
+ };
+
+ // #6795
+ fn f1() -> Result<(), ()> {
+ let _ = match Some(Ok(())) {
+ Some(x) => Some(x?),
+ None => None,
+ };
+ Ok(())
+ }
+
+ for &x in Some(Some(true)).iter() {
+ let _ = match x {
+ Some(x) => Some(if x { continue } else { x }),
+ None => None,
+ };
+ }
+
+ // #6797
+ let x1 = (Some(String::new()), 0);
+ let x2 = x1.0;
+ match x2 {
+ Some(x) => Some((x, x1.1)),
+ None => None,
+ };
+
+ struct S1 {
+ x: Option<String>,
+ y: u32,
+ }
+ impl S1 {
+ fn f(self) -> Option<(String, u32)> {
+ match self.x {
+ Some(x) => Some((x, self.y)),
+ None => None,
+ }
+ }
+ }
+
+ // #6811
+ match Some(0) {
+ Some(x) => Some(vec![x]),
+ None => None,
+ };
+
+ match option_env!("") {
+ Some(x) => Some(String::from(x)),
+ None => None,
+ };
+
+ // #6819
+ async fn f2(x: u32) -> u32 {
+ x
+ }
+
+ async fn f3() {
+ match Some(0) {
+ Some(x) => Some(f2(x).await),
+ None => None,
+ };
+ }
+
+ // #6847
+ if let Some(_) = Some(0) {
+ Some(0)
+ } else if let Some(x) = Some(0) {
+ Some(x + 1)
+ } else {
+ None
+ };
+
+ if true {
+ Some(0)
+ } else if let Some(x) = Some(0) {
+ Some(x + 1)
+ } else {
+ None
+ };
+
+ // #6967
+ const fn f4() {
+ match Some(0) {
+ Some(x) => Some(x + 1),
+ None => None,
+ };
+ }
+
+ // #7077
+ let s = &String::new();
+ #[allow(clippy::needless_match)]
+ let _: Option<&str> = match Some(s) {
+ Some(s) => Some(s),
+ None => None,
+ };
+}
diff --git a/src/tools/clippy/tests/ui/manual_map_option.stderr b/src/tools/clippy/tests/ui/manual_map_option.stderr
new file mode 100644
index 000000000..cdc2c0e62
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_map_option.stderr
@@ -0,0 +1,198 @@
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:15:5
+ |
+LL | / match Some(0) {
+LL | | Some(_) => Some(2),
+LL | | None::<u32> => None,
+LL | | };
+ | |_____^ help: try this: `Some(0).map(|_| 2)`
+ |
+ = note: `-D clippy::manual-map` implied by `-D warnings`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:20:5
+ |
+LL | / match Some(0) {
+LL | | Some(x) => Some(x + 1),
+LL | | _ => None,
+LL | | };
+ | |_____^ help: try this: `Some(0).map(|x| x + 1)`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:25:5
+ |
+LL | / match Some("") {
+LL | | Some(x) => Some(x.is_empty()),
+LL | | None => None,
+LL | | };
+ | |_____^ help: try this: `Some("").map(|x| x.is_empty())`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:30:5
+ |
+LL | / if let Some(x) = Some(0) {
+LL | | Some(!x)
+LL | | } else {
+LL | | None
+LL | | };
+ | |_____^ help: try this: `Some(0).map(|x| !x)`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:37:5
+ |
+LL | / match Some(0) {
+LL | | Some(x) => { Some(std::convert::identity(x)) }
+LL | | None => { None }
+LL | | };
+ | |_____^ help: try this: `Some(0).map(std::convert::identity)`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:42:5
+ |
+LL | / match Some(&String::new()) {
+LL | | Some(x) => Some(str::len(x)),
+LL | | None => None,
+LL | | };
+ | |_____^ help: try this: `Some(&String::new()).map(|x| str::len(x))`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:52:5
+ |
+LL | / match &Some([0, 1]) {
+LL | | Some(x) => Some(x[0]),
+LL | | &None => None,
+LL | | };
+ | |_____^ help: try this: `Some([0, 1]).as_ref().map(|x| x[0])`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:57:5
+ |
+LL | / match &Some(0) {
+LL | | &Some(x) => Some(x * 2),
+LL | | None => None,
+LL | | };
+ | |_____^ help: try this: `Some(0).map(|x| x * 2)`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:62:5
+ |
+LL | / match Some(String::new()) {
+LL | | Some(ref x) => Some(x.is_empty()),
+LL | | _ => None,
+LL | | };
+ | |_____^ help: try this: `Some(String::new()).as_ref().map(|x| x.is_empty())`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:67:5
+ |
+LL | / match &&Some(String::new()) {
+LL | | Some(x) => Some(x.len()),
+LL | | _ => None,
+LL | | };
+ | |_____^ help: try this: `Some(String::new()).as_ref().map(|x| x.len())`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:72:5
+ |
+LL | / match &&Some(0) {
+LL | | &&Some(x) => Some(x + x),
+LL | | &&_ => None,
+LL | | };
+ | |_____^ help: try this: `Some(0).map(|x| x + x)`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:85:9
+ |
+LL | / match &mut Some(String::new()) {
+LL | | Some(x) => Some(x.push_str("")),
+LL | | None => None,
+LL | | };
+ | |_________^ help: try this: `Some(String::new()).as_mut().map(|x| x.push_str(""))`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:91:5
+ |
+LL | / match &mut Some(String::new()) {
+LL | | Some(ref x) => Some(x.len()),
+LL | | None => None,
+LL | | };
+ | |_____^ help: try this: `Some(String::new()).as_ref().map(|x| x.len())`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:96:5
+ |
+LL | / match &mut &Some(String::new()) {
+LL | | Some(x) => Some(x.is_empty()),
+LL | | &mut _ => None,
+LL | | };
+ | |_____^ help: try this: `Some(String::new()).as_ref().map(|x| x.is_empty())`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:101:5
+ |
+LL | / match Some((0, 1, 2)) {
+LL | | Some((x, y, z)) => Some(x + y + z),
+LL | | None => None,
+LL | | };
+ | |_____^ help: try this: `Some((0, 1, 2)).map(|(x, y, z)| x + y + z)`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:106:5
+ |
+LL | / match Some([1, 2, 3]) {
+LL | | Some([first, ..]) => Some(first),
+LL | | None => None,
+LL | | };
+ | |_____^ help: try this: `Some([1, 2, 3]).map(|[first, ..]| first)`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:111:5
+ |
+LL | / match &Some((String::new(), "test")) {
+LL | | Some((x, y)) => Some((y, x)),
+LL | | None => None,
+LL | | };
+ | |_____^ help: try this: `Some((String::new(), "test")).as_ref().map(|(x, y)| (y, x))`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:169:5
+ |
+LL | / match Some(0) {
+LL | | Some(x) => Some(vec![x]),
+LL | | None => None,
+LL | | };
+ | |_____^ help: try this: `Some(0).map(|x| vec![x])`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:174:5
+ |
+LL | / match option_env!("") {
+LL | | Some(x) => Some(String::from(x)),
+LL | | None => None,
+LL | | };
+ | |_____^ help: try this: `option_env!("").map(String::from)`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:194:12
+ |
+LL | } else if let Some(x) = Some(0) {
+ | ____________^
+LL | | Some(x + 1)
+LL | | } else {
+LL | | None
+LL | | };
+ | |_____^ help: try this: `{ Some(0).map(|x| x + 1) }`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option.rs:202:12
+ |
+LL | } else if let Some(x) = Some(0) {
+ | ____________^
+LL | | Some(x + 1)
+LL | | } else {
+LL | | None
+LL | | };
+ | |_____^ help: try this: `{ Some(0).map(|x| x + 1) }`
+
+error: aborting due to 21 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_map_option_2.fixed b/src/tools/clippy/tests/ui/manual_map_option_2.fixed
new file mode 100644
index 000000000..ebf3f8cab
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_map_option_2.fixed
@@ -0,0 +1,60 @@
+// run-rustfix
+
+#![warn(clippy::manual_map)]
+#![allow(clippy::toplevel_ref_arg)]
+
+fn main() {
+ // Lint. `y` is declared within the arm, so it isn't captured by the map closure
+ let _ = Some(0).map(|x| {
+ let y = (String::new(), String::new());
+ (x, y.0)
+ });
+
+ // Don't lint. `s` is borrowed until partway through the arm, but needs to be captured by the map
+ // closure
+ let s = Some(String::new());
+ let _ = match &s {
+ Some(x) => Some((x.clone(), s)),
+ None => None,
+ };
+
+ // Don't lint. `s` is borrowed until partway through the arm, but needs to be captured by the map
+ // closure
+ let s = Some(String::new());
+ let _ = match &s {
+ Some(x) => Some({
+ let clone = x.clone();
+ let s = || s;
+ (clone, s())
+ }),
+ None => None,
+ };
+
+ // Don't lint. `s` is borrowed until partway through the arm, but needs to be captured as a mutable
+ // reference by the map closure
+ let mut s = Some(String::new());
+ let _ = match &s {
+ Some(x) => Some({
+ let clone = x.clone();
+ let ref mut s = s;
+ (clone, s)
+ }),
+ None => None,
+ };
+
+ // Lint. `s` is captured by reference, so no lifetime issues.
+ let s = Some(String::new());
+ let _ = s.as_ref().map(|x| {
+ if let Some(ref s) = s { (x.clone(), s) } else { panic!() }
+ });
+
+ // Issue #7820
+ unsafe fn f(x: u32) -> u32 {
+ x
+ }
+ unsafe {
+ let _ = Some(0).map(|x| f(x));
+ }
+ let _ = Some(0).map(|x| unsafe { f(x) });
+ let _ = Some(0).map(|x| unsafe { f(x) });
+}
diff --git a/src/tools/clippy/tests/ui/manual_map_option_2.rs b/src/tools/clippy/tests/ui/manual_map_option_2.rs
new file mode 100644
index 000000000..1382d9af0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_map_option_2.rs
@@ -0,0 +1,75 @@
+// run-rustfix
+
+#![warn(clippy::manual_map)]
+#![allow(clippy::toplevel_ref_arg)]
+
+fn main() {
+ // Lint. `y` is declared within the arm, so it isn't captured by the map closure
+ let _ = match Some(0) {
+ Some(x) => Some({
+ let y = (String::new(), String::new());
+ (x, y.0)
+ }),
+ None => None,
+ };
+
+ // Don't lint. `s` is borrowed until partway through the arm, but needs to be captured by the map
+ // closure
+ let s = Some(String::new());
+ let _ = match &s {
+ Some(x) => Some((x.clone(), s)),
+ None => None,
+ };
+
+ // Don't lint. `s` is borrowed until partway through the arm, but needs to be captured by the map
+ // closure
+ let s = Some(String::new());
+ let _ = match &s {
+ Some(x) => Some({
+ let clone = x.clone();
+ let s = || s;
+ (clone, s())
+ }),
+ None => None,
+ };
+
+ // Don't lint. `s` is borrowed until partway through the arm, but needs to be captured as a mutable
+ // reference by the map closure
+ let mut s = Some(String::new());
+ let _ = match &s {
+ Some(x) => Some({
+ let clone = x.clone();
+ let ref mut s = s;
+ (clone, s)
+ }),
+ None => None,
+ };
+
+ // Lint. `s` is captured by reference, so no lifetime issues.
+ let s = Some(String::new());
+ let _ = match &s {
+ Some(x) => Some({
+ if let Some(ref s) = s { (x.clone(), s) } else { panic!() }
+ }),
+ None => None,
+ };
+
+ // Issue #7820
+ unsafe fn f(x: u32) -> u32 {
+ x
+ }
+ unsafe {
+ let _ = match Some(0) {
+ Some(x) => Some(f(x)),
+ None => None,
+ };
+ }
+ let _ = match Some(0) {
+ Some(x) => unsafe { Some(f(x)) },
+ None => None,
+ };
+ let _ = match Some(0) {
+ Some(x) => Some(unsafe { f(x) }),
+ None => None,
+ };
+}
diff --git a/src/tools/clippy/tests/ui/manual_map_option_2.stderr b/src/tools/clippy/tests/ui/manual_map_option_2.stderr
new file mode 100644
index 000000000..d35b6252f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_map_option_2.stderr
@@ -0,0 +1,73 @@
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option_2.rs:8:13
+ |
+LL | let _ = match Some(0) {
+ | _____________^
+LL | | Some(x) => Some({
+LL | | let y = (String::new(), String::new());
+LL | | (x, y.0)
+LL | | }),
+LL | | None => None,
+LL | | };
+ | |_____^
+ |
+ = note: `-D clippy::manual-map` implied by `-D warnings`
+help: try this
+ |
+LL ~ let _ = Some(0).map(|x| {
+LL + let y = (String::new(), String::new());
+LL + (x, y.0)
+LL ~ });
+ |
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option_2.rs:50:13
+ |
+LL | let _ = match &s {
+ | _____________^
+LL | | Some(x) => Some({
+LL | | if let Some(ref s) = s { (x.clone(), s) } else { panic!() }
+LL | | }),
+LL | | None => None,
+LL | | };
+ | |_____^
+ |
+help: try this
+ |
+LL ~ let _ = s.as_ref().map(|x| {
+LL + if let Some(ref s) = s { (x.clone(), s) } else { panic!() }
+LL ~ });
+ |
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option_2.rs:62:17
+ |
+LL | let _ = match Some(0) {
+ | _________________^
+LL | | Some(x) => Some(f(x)),
+LL | | None => None,
+LL | | };
+ | |_________^ help: try this: `Some(0).map(|x| f(x))`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option_2.rs:67:13
+ |
+LL | let _ = match Some(0) {
+ | _____________^
+LL | | Some(x) => unsafe { Some(f(x)) },
+LL | | None => None,
+LL | | };
+ | |_____^ help: try this: `Some(0).map(|x| unsafe { f(x) })`
+
+error: manual implementation of `Option::map`
+ --> $DIR/manual_map_option_2.rs:71:13
+ |
+LL | let _ = match Some(0) {
+ | _____________^
+LL | | Some(x) => Some(unsafe { f(x) }),
+LL | | None => None,
+LL | | };
+ | |_____^ help: try this: `Some(0).map(|x| unsafe { f(x) })`
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_memcpy/with_loop_counters.rs b/src/tools/clippy/tests/ui/manual_memcpy/with_loop_counters.rs
new file mode 100644
index 000000000..c826b082a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_memcpy/with_loop_counters.rs
@@ -0,0 +1,88 @@
+#![warn(clippy::needless_range_loop, clippy::manual_memcpy)]
+
+pub fn manual_copy_with_counters(src: &[i32], dst: &mut [i32], dst2: &mut [i32]) {
+ let mut count = 0;
+ for i in 3..src.len() {
+ dst[i] = src[count];
+ count += 1;
+ }
+
+ let mut count = 0;
+ for i in 3..src.len() {
+ dst[count] = src[i];
+ count += 1;
+ }
+
+ let mut count = 3;
+ for i in 0..src.len() {
+ dst[count] = src[i];
+ count += 1;
+ }
+
+ let mut count = 3;
+ for i in 0..src.len() {
+ dst[i] = src[count];
+ count += 1;
+ }
+
+ let mut count = 0;
+ for i in 3..(3 + src.len()) {
+ dst[i] = src[count];
+ count += 1;
+ }
+
+ let mut count = 3;
+ for i in 5..src.len() {
+ dst[i] = src[count - 2];
+ count += 1;
+ }
+
+ let mut count = 2;
+ for i in 0..dst.len() {
+ dst[i] = src[count];
+ count += 1;
+ }
+
+ let mut count = 5;
+ for i in 3..10 {
+ dst[i] = src[count];
+ count += 1;
+ }
+
+ let mut count = 3;
+ let mut count2 = 30;
+ for i in 0..src.len() {
+ dst[count] = src[i];
+ dst2[count2] = src[i];
+ count += 1;
+ count2 += 1;
+ }
+
+ // make sure parentheses are added properly to bitwise operators, which have lower precedence than
+ // arithmetic ones
+ let mut count = 0 << 1;
+ for i in 0..1 << 1 {
+ dst[count] = src[i + 2];
+ count += 1;
+ }
+
+ // make sure incrementing expressions without semicolons at the end of loops are handled correctly.
+ let mut count = 0;
+ for i in 3..src.len() {
+ dst[i] = src[count];
+ count += 1
+ }
+
+ // make sure ones where the increment is not at the end of the loop.
+ // As a possible enhancement, one could adjust the offset in the suggestion according to
+ // the position. For example, if the increment is at the top of the loop;
+ // treating the loop counter as if it were initialized 1 greater than the original value.
+ let mut count = 0;
+ #[allow(clippy::needless_range_loop)]
+ for i in 0..src.len() {
+ count += 1;
+ dst[i] = src[count];
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/manual_memcpy/with_loop_counters.stderr b/src/tools/clippy/tests/ui/manual_memcpy/with_loop_counters.stderr
new file mode 100644
index 000000000..79d40c0bc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_memcpy/with_loop_counters.stderr
@@ -0,0 +1,111 @@
+error: it looks like you're manually copying between slices
+ --> $DIR/with_loop_counters.rs:5:5
+ |
+LL | / for i in 3..src.len() {
+LL | | dst[i] = src[count];
+LL | | count += 1;
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst[3..src.len()].copy_from_slice(&src[..(src.len() - 3)]);`
+ |
+ = note: `-D clippy::manual-memcpy` implied by `-D warnings`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/with_loop_counters.rs:11:5
+ |
+LL | / for i in 3..src.len() {
+LL | | dst[count] = src[i];
+LL | | count += 1;
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst[..(src.len() - 3)].copy_from_slice(&src[3..]);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/with_loop_counters.rs:17:5
+ |
+LL | / for i in 0..src.len() {
+LL | | dst[count] = src[i];
+LL | | count += 1;
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst[3..(src.len() + 3)].copy_from_slice(&src[..]);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/with_loop_counters.rs:23:5
+ |
+LL | / for i in 0..src.len() {
+LL | | dst[i] = src[count];
+LL | | count += 1;
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst[..src.len()].copy_from_slice(&src[3..(src.len() + 3)]);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/with_loop_counters.rs:29:5
+ |
+LL | / for i in 3..(3 + src.len()) {
+LL | | dst[i] = src[count];
+LL | | count += 1;
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst[3..(3 + src.len())].copy_from_slice(&src[..(3 + src.len() - 3)]);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/with_loop_counters.rs:35:5
+ |
+LL | / for i in 5..src.len() {
+LL | | dst[i] = src[count - 2];
+LL | | count += 1;
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst[5..src.len()].copy_from_slice(&src[(3 - 2)..((src.len() - 2) + 3 - 5)]);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/with_loop_counters.rs:41:5
+ |
+LL | / for i in 0..dst.len() {
+LL | | dst[i] = src[count];
+LL | | count += 1;
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst.copy_from_slice(&src[2..(dst.len() + 2)]);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/with_loop_counters.rs:47:5
+ |
+LL | / for i in 3..10 {
+LL | | dst[i] = src[count];
+LL | | count += 1;
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst[3..10].copy_from_slice(&src[5..(10 + 5 - 3)]);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/with_loop_counters.rs:54:5
+ |
+LL | / for i in 0..src.len() {
+LL | | dst[count] = src[i];
+LL | | dst2[count2] = src[i];
+LL | | count += 1;
+LL | | count2 += 1;
+LL | | }
+ | |_____^
+ |
+help: try replacing the loop by
+ |
+LL ~ dst[3..(src.len() + 3)].copy_from_slice(&src[..]);
+LL + dst2[30..(src.len() + 30)].copy_from_slice(&src[..]);
+ |
+
+error: it looks like you're manually copying between slices
+ --> $DIR/with_loop_counters.rs:64:5
+ |
+LL | / for i in 0..1 << 1 {
+LL | | dst[count] = src[i + 2];
+LL | | count += 1;
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst[(0 << 1)..((1 << 1) + (0 << 1))].copy_from_slice(&src[2..((1 << 1) + 2)]);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/with_loop_counters.rs:71:5
+ |
+LL | / for i in 3..src.len() {
+LL | | dst[i] = src[count];
+LL | | count += 1
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst[3..src.len()].copy_from_slice(&src[..(src.len() - 3)]);`
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_memcpy/without_loop_counters.rs b/src/tools/clippy/tests/ui/manual_memcpy/without_loop_counters.rs
new file mode 100644
index 000000000..ea0535d07
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_memcpy/without_loop_counters.rs
@@ -0,0 +1,136 @@
+#![warn(clippy::needless_range_loop, clippy::manual_memcpy)]
+
+const LOOP_OFFSET: usize = 5000;
+
+pub fn manual_copy(src: &[i32], dst: &mut [i32], dst2: &mut [i32]) {
+ // plain manual memcpy
+ for i in 0..src.len() {
+ dst[i] = src[i];
+ }
+
+ // dst offset memcpy
+ for i in 0..src.len() {
+ dst[i + 10] = src[i];
+ }
+
+ // src offset memcpy
+ for i in 0..src.len() {
+ dst[i] = src[i + 10];
+ }
+
+ // src offset memcpy
+ for i in 11..src.len() {
+ dst[i] = src[i - 10];
+ }
+
+ // overwrite entire dst
+ for i in 0..dst.len() {
+ dst[i] = src[i];
+ }
+
+ // manual copy with branch - can't easily convert to memcpy!
+ for i in 0..src.len() {
+ dst[i] = src[i];
+ if dst[i] > 5 {
+ break;
+ }
+ }
+
+ // multiple copies - suggest two memcpy statements
+ for i in 10..256 {
+ dst[i] = src[i - 5];
+ dst2[i + 500] = src[i]
+ }
+
+ // this is a reversal - the copy lint shouldn't be triggered
+ for i in 10..LOOP_OFFSET {
+ dst[i + LOOP_OFFSET] = src[LOOP_OFFSET - i];
+ }
+
+ let some_var = 5;
+ // Offset in variable
+ for i in 10..LOOP_OFFSET {
+ dst[i + LOOP_OFFSET] = src[i - some_var];
+ }
+
+ // Non continuous copy - don't trigger lint
+ for i in 0..10 {
+ dst[i + i] = src[i];
+ }
+
+ let src_vec = vec![1, 2, 3, 4, 5];
+ let mut dst_vec = vec![0, 0, 0, 0, 0];
+
+ // make sure vectors are supported
+ for i in 0..src_vec.len() {
+ dst_vec[i] = src_vec[i];
+ }
+
+ // lint should not trigger when either
+ // source or destination type is not
+ // slice-like, like DummyStruct
+ struct DummyStruct(i32);
+
+ impl ::std::ops::Index<usize> for DummyStruct {
+ type Output = i32;
+
+ fn index(&self, _: usize) -> &i32 {
+ &self.0
+ }
+ }
+
+ let src = DummyStruct(5);
+ let mut dst_vec = vec![0; 10];
+
+ for i in 0..10 {
+ dst_vec[i] = src[i];
+ }
+
+ // Simplify suggestion (issue #3004)
+ let src = [0, 1, 2, 3, 4];
+ let mut dst = [0, 0, 0, 0, 0, 0];
+ let from = 1;
+
+ for i in from..from + src.len() {
+ dst[i] = src[i - from];
+ }
+
+ for i in from..from + 3 {
+ dst[i] = src[i - from];
+ }
+
+ #[allow(clippy::identity_op)]
+ for i in 0..5 {
+ dst[i - 0] = src[i];
+ }
+
+ #[allow(clippy::reversed_empty_ranges)]
+ for i in 0..0 {
+ dst[i] = src[i];
+ }
+
+ // `RangeTo` `for` loop - don't trigger lint
+ for i in 0.. {
+ dst[i] = src[i];
+ }
+
+ // VecDeque - ideally this would work, but would require something like `range_as_slices`
+ let mut dst = std::collections::VecDeque::from_iter([0; 5]);
+ let src = std::collections::VecDeque::from_iter([0, 1, 2, 3, 4]);
+ for i in 0..dst.len() {
+ dst[i] = src[i];
+ }
+ let src = vec![0, 1, 2, 3, 4];
+ for i in 0..dst.len() {
+ dst[i] = src[i];
+ }
+}
+
+#[warn(clippy::needless_range_loop, clippy::manual_memcpy)]
+pub fn manual_clone(src: &[String], dst: &mut [String]) {
+ for i in 0..src.len() {
+ dst[i] = src[i].clone();
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/manual_memcpy/without_loop_counters.stderr b/src/tools/clippy/tests/ui/manual_memcpy/without_loop_counters.stderr
new file mode 100644
index 000000000..c163ae061
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_memcpy/without_loop_counters.stderr
@@ -0,0 +1,115 @@
+error: it looks like you're manually copying between slices
+ --> $DIR/without_loop_counters.rs:7:5
+ |
+LL | / for i in 0..src.len() {
+LL | | dst[i] = src[i];
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst[..src.len()].copy_from_slice(&src[..]);`
+ |
+ = note: `-D clippy::manual-memcpy` implied by `-D warnings`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/without_loop_counters.rs:12:5
+ |
+LL | / for i in 0..src.len() {
+LL | | dst[i + 10] = src[i];
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst[10..(src.len() + 10)].copy_from_slice(&src[..]);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/without_loop_counters.rs:17:5
+ |
+LL | / for i in 0..src.len() {
+LL | | dst[i] = src[i + 10];
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst[..src.len()].copy_from_slice(&src[10..(src.len() + 10)]);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/without_loop_counters.rs:22:5
+ |
+LL | / for i in 11..src.len() {
+LL | | dst[i] = src[i - 10];
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst[11..src.len()].copy_from_slice(&src[(11 - 10)..(src.len() - 10)]);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/without_loop_counters.rs:27:5
+ |
+LL | / for i in 0..dst.len() {
+LL | | dst[i] = src[i];
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst.copy_from_slice(&src[..dst.len()]);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/without_loop_counters.rs:40:5
+ |
+LL | / for i in 10..256 {
+LL | | dst[i] = src[i - 5];
+LL | | dst2[i + 500] = src[i]
+LL | | }
+ | |_____^
+ |
+help: try replacing the loop by
+ |
+LL ~ dst[10..256].copy_from_slice(&src[(10 - 5)..(256 - 5)]);
+LL + dst2[(10 + 500)..(256 + 500)].copy_from_slice(&src[10..256]);
+ |
+
+error: it looks like you're manually copying between slices
+ --> $DIR/without_loop_counters.rs:52:5
+ |
+LL | / for i in 10..LOOP_OFFSET {
+LL | | dst[i + LOOP_OFFSET] = src[i - some_var];
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst[(10 + LOOP_OFFSET)..(LOOP_OFFSET + LOOP_OFFSET)].copy_from_slice(&src[(10 - some_var)..(LOOP_OFFSET - some_var)]);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/without_loop_counters.rs:65:5
+ |
+LL | / for i in 0..src_vec.len() {
+LL | | dst_vec[i] = src_vec[i];
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst_vec[..src_vec.len()].copy_from_slice(&src_vec[..]);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/without_loop_counters.rs:94:5
+ |
+LL | / for i in from..from + src.len() {
+LL | | dst[i] = src[i - from];
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst[from..(from + src.len())].copy_from_slice(&src[..(from + src.len() - from)]);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/without_loop_counters.rs:98:5
+ |
+LL | / for i in from..from + 3 {
+LL | | dst[i] = src[i - from];
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst[from..(from + 3)].copy_from_slice(&src[..(from + 3 - from)]);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/without_loop_counters.rs:103:5
+ |
+LL | / for i in 0..5 {
+LL | | dst[i - 0] = src[i];
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst[..5].copy_from_slice(&src[..5]);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/without_loop_counters.rs:108:5
+ |
+LL | / for i in 0..0 {
+LL | | dst[i] = src[i];
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst[..0].copy_from_slice(&src[..0]);`
+
+error: it looks like you're manually copying between slices
+ --> $DIR/without_loop_counters.rs:131:5
+ |
+LL | / for i in 0..src.len() {
+LL | | dst[i] = src[i].clone();
+LL | | }
+ | |_____^ help: try replacing the loop by: `dst[..src.len()].clone_from_slice(&src[..]);`
+
+error: aborting due to 13 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_non_exhaustive_enum.rs b/src/tools/clippy/tests/ui/manual_non_exhaustive_enum.rs
new file mode 100644
index 000000000..03b2433f6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_non_exhaustive_enum.rs
@@ -0,0 +1,87 @@
+#![feature(lint_reasons)]
+#![warn(clippy::manual_non_exhaustive)]
+#![allow(unused)]
+
+enum E {
+ A,
+ B,
+ #[doc(hidden)]
+ _C,
+}
+
+// user forgot to remove the marker
+#[non_exhaustive]
+enum Ep {
+ A,
+ B,
+ #[doc(hidden)]
+ _C,
+}
+
+// marker variant does not have doc hidden attribute, should be ignored
+enum NoDocHidden {
+ A,
+ B,
+ _C,
+}
+
+// name of variant with doc hidden does not start with underscore, should be ignored
+enum NoUnderscore {
+ A,
+ B,
+ #[doc(hidden)]
+ C,
+}
+
+// variant with doc hidden is not unit, should be ignored
+enum NotUnit {
+ A,
+ B,
+ #[doc(hidden)]
+ _C(bool),
+}
+
+// variant with doc hidden is the only one, should be ignored
+enum OnlyMarker {
+ #[doc(hidden)]
+ _A,
+}
+
+// variant with multiple markers, should be ignored
+enum MultipleMarkers {
+ A,
+ #[doc(hidden)]
+ _B,
+ #[doc(hidden)]
+ _C,
+}
+
+// already non_exhaustive and no markers, should be ignored
+#[non_exhaustive]
+enum NonExhaustive {
+ A,
+ B,
+}
+
+// marked is used, don't lint
+enum UsedHidden {
+ #[doc(hidden)]
+ _A,
+ B,
+ C,
+}
+fn foo(x: &mut UsedHidden) {
+ if matches!(*x, UsedHidden::B) {
+ *x = UsedHidden::_A;
+ }
+}
+
+#[expect(clippy::manual_non_exhaustive)]
+enum ExpectLint {
+ A,
+ B,
+ #[doc(hidden)]
+ _C,
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/manual_non_exhaustive_enum.stderr b/src/tools/clippy/tests/ui/manual_non_exhaustive_enum.stderr
new file mode 100644
index 000000000..144fe86df
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_non_exhaustive_enum.stderr
@@ -0,0 +1,41 @@
+error: this seems like a manual implementation of the non-exhaustive pattern
+ --> $DIR/manual_non_exhaustive_enum.rs:5:1
+ |
+LL | enum E {
+ | ^-----
+ | |
+ | _help: add the attribute: `#[non_exhaustive] enum E`
+ | |
+LL | | A,
+LL | | B,
+LL | | #[doc(hidden)]
+LL | | _C,
+LL | | }
+ | |_^
+ |
+ = note: `-D clippy::manual-non-exhaustive` implied by `-D warnings`
+help: remove this variant
+ --> $DIR/manual_non_exhaustive_enum.rs:9:5
+ |
+LL | _C,
+ | ^^
+
+error: this seems like a manual implementation of the non-exhaustive pattern
+ --> $DIR/manual_non_exhaustive_enum.rs:14:1
+ |
+LL | / enum Ep {
+LL | | A,
+LL | | B,
+LL | | #[doc(hidden)]
+LL | | _C,
+LL | | }
+ | |_^
+ |
+help: remove this variant
+ --> $DIR/manual_non_exhaustive_enum.rs:18:5
+ |
+LL | _C,
+ | ^^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_non_exhaustive_struct.rs b/src/tools/clippy/tests/ui/manual_non_exhaustive_struct.rs
new file mode 100644
index 000000000..498eee444
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_non_exhaustive_struct.rs
@@ -0,0 +1,74 @@
+#![warn(clippy::manual_non_exhaustive)]
+#![allow(unused)]
+
+mod structs {
+ struct S {
+ pub a: i32,
+ pub b: i32,
+ _c: (),
+ }
+
+ // user forgot to remove the private field
+ #[non_exhaustive]
+ struct Sp {
+ pub a: i32,
+ pub b: i32,
+ _c: (),
+ }
+
+ // some other fields are private, should be ignored
+ struct PrivateFields {
+ a: i32,
+ pub b: i32,
+ _c: (),
+ }
+
+ // private field name does not start with underscore, should be ignored
+ struct NoUnderscore {
+ pub a: i32,
+ pub b: i32,
+ c: (),
+ }
+
+ // private field is not unit type, should be ignored
+ struct NotUnit {
+ pub a: i32,
+ pub b: i32,
+ _c: i32,
+ }
+
+ // private field is the only field, should be ignored
+ struct OnlyMarker {
+ _a: (),
+ }
+
+ // already non exhaustive and no private fields, should be ignored
+ #[non_exhaustive]
+ struct NonExhaustive {
+ pub a: i32,
+ pub b: i32,
+ }
+}
+
+mod tuple_structs {
+ struct T(pub i32, pub i32, ());
+
+ // user forgot to remove the private field
+ #[non_exhaustive]
+ struct Tp(pub i32, pub i32, ());
+
+ // some other fields are private, should be ignored
+ struct PrivateFields(pub i32, i32, ());
+
+ // private field is not unit type, should be ignored
+ struct NotUnit(pub i32, pub i32, i32);
+
+ // private field is the only field, should be ignored
+ struct OnlyMarker(());
+
+ // already non exhaustive and no private fields, should be ignored
+ #[non_exhaustive]
+ struct NonExhaustive(pub i32, pub i32);
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/manual_non_exhaustive_struct.stderr b/src/tools/clippy/tests/ui/manual_non_exhaustive_struct.stderr
new file mode 100644
index 000000000..e0766c17b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_non_exhaustive_struct.stderr
@@ -0,0 +1,65 @@
+error: this seems like a manual implementation of the non-exhaustive pattern
+ --> $DIR/manual_non_exhaustive_struct.rs:5:5
+ |
+LL | struct S {
+ | ^-------
+ | |
+ | _____help: add the attribute: `#[non_exhaustive] struct S`
+ | |
+LL | | pub a: i32,
+LL | | pub b: i32,
+LL | | _c: (),
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::manual-non-exhaustive` implied by `-D warnings`
+help: remove this field
+ --> $DIR/manual_non_exhaustive_struct.rs:8:9
+ |
+LL | _c: (),
+ | ^^^^^^
+
+error: this seems like a manual implementation of the non-exhaustive pattern
+ --> $DIR/manual_non_exhaustive_struct.rs:13:5
+ |
+LL | / struct Sp {
+LL | | pub a: i32,
+LL | | pub b: i32,
+LL | | _c: (),
+LL | | }
+ | |_____^
+ |
+help: remove this field
+ --> $DIR/manual_non_exhaustive_struct.rs:16:9
+ |
+LL | _c: (),
+ | ^^^^^^
+
+error: this seems like a manual implementation of the non-exhaustive pattern
+ --> $DIR/manual_non_exhaustive_struct.rs:54:5
+ |
+LL | struct T(pub i32, pub i32, ());
+ | --------^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | help: add the attribute: `#[non_exhaustive] struct T`
+ |
+help: remove this field
+ --> $DIR/manual_non_exhaustive_struct.rs:54:32
+ |
+LL | struct T(pub i32, pub i32, ());
+ | ^^
+
+error: this seems like a manual implementation of the non-exhaustive pattern
+ --> $DIR/manual_non_exhaustive_struct.rs:58:5
+ |
+LL | struct Tp(pub i32, pub i32, ());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: remove this field
+ --> $DIR/manual_non_exhaustive_struct.rs:58:33
+ |
+LL | struct Tp(pub i32, pub i32, ());
+ | ^^
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_ok_or.fixed b/src/tools/clippy/tests/ui/manual_ok_or.fixed
new file mode 100644
index 000000000..887a97d7a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_ok_or.fixed
@@ -0,0 +1,40 @@
+// run-rustfix
+#![warn(clippy::manual_ok_or)]
+#![allow(clippy::blacklisted_name)]
+#![allow(clippy::redundant_closure)]
+#![allow(dead_code)]
+#![allow(unused_must_use)]
+
+fn main() {
+ // basic case
+ let foo: Option<i32> = None;
+ foo.ok_or("error");
+
+ // eta expansion case
+ foo.ok_or("error");
+
+ // turbo fish syntax
+ None::<i32>.ok_or("error");
+
+ // multiline case
+ #[rustfmt::skip]
+ foo.ok_or(&format!(
+ "{}{}{}{}{}{}{}",
+ "Alice", "Bob", "Sarah", "Marc", "Sandra", "Eric", "Jenifer"));
+
+ // not applicable, closure isn't direct `Ok` wrapping
+ foo.map_or(Err("error"), |v| Ok(v + 1));
+
+ // not applicable, or side isn't `Result::Err`
+ foo.map_or(Ok::<i32, &str>(1), |v| Ok(v));
+
+ // not applicable, expr is not a `Result` value
+ foo.map_or(42, |v| v);
+
+ // TODO patterns not covered yet
+ match foo {
+ Some(v) => Ok(v),
+ None => Err("error"),
+ };
+ foo.map_or_else(|| Err("error"), |v| Ok(v));
+}
diff --git a/src/tools/clippy/tests/ui/manual_ok_or.rs b/src/tools/clippy/tests/ui/manual_ok_or.rs
new file mode 100644
index 000000000..3c99872f5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_ok_or.rs
@@ -0,0 +1,44 @@
+// run-rustfix
+#![warn(clippy::manual_ok_or)]
+#![allow(clippy::blacklisted_name)]
+#![allow(clippy::redundant_closure)]
+#![allow(dead_code)]
+#![allow(unused_must_use)]
+
+fn main() {
+ // basic case
+ let foo: Option<i32> = None;
+ foo.map_or(Err("error"), |v| Ok(v));
+
+ // eta expansion case
+ foo.map_or(Err("error"), Ok);
+
+ // turbo fish syntax
+ None::<i32>.map_or(Err("error"), |v| Ok(v));
+
+ // multiline case
+ #[rustfmt::skip]
+ foo.map_or(Err::<i32, &str>(
+ &format!(
+ "{}{}{}{}{}{}{}",
+ "Alice", "Bob", "Sarah", "Marc", "Sandra", "Eric", "Jenifer")
+ ),
+ |v| Ok(v),
+ );
+
+ // not applicable, closure isn't direct `Ok` wrapping
+ foo.map_or(Err("error"), |v| Ok(v + 1));
+
+ // not applicable, or side isn't `Result::Err`
+ foo.map_or(Ok::<i32, &str>(1), |v| Ok(v));
+
+ // not applicable, expr is not a `Result` value
+ foo.map_or(42, |v| v);
+
+ // TODO patterns not covered yet
+ match foo {
+ Some(v) => Ok(v),
+ None => Err("error"),
+ };
+ foo.map_or_else(|| Err("error"), |v| Ok(v));
+}
diff --git a/src/tools/clippy/tests/ui/manual_ok_or.stderr b/src/tools/clippy/tests/ui/manual_ok_or.stderr
new file mode 100644
index 000000000..65459a097
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_ok_or.stderr
@@ -0,0 +1,41 @@
+error: this pattern reimplements `Option::ok_or`
+ --> $DIR/manual_ok_or.rs:11:5
+ |
+LL | foo.map_or(Err("error"), |v| Ok(v));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `foo.ok_or("error")`
+ |
+ = note: `-D clippy::manual-ok-or` implied by `-D warnings`
+
+error: this pattern reimplements `Option::ok_or`
+ --> $DIR/manual_ok_or.rs:14:5
+ |
+LL | foo.map_or(Err("error"), Ok);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `foo.ok_or("error")`
+
+error: this pattern reimplements `Option::ok_or`
+ --> $DIR/manual_ok_or.rs:17:5
+ |
+LL | None::<i32>.map_or(Err("error"), |v| Ok(v));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `None::<i32>.ok_or("error")`
+
+error: this pattern reimplements `Option::ok_or`
+ --> $DIR/manual_ok_or.rs:21:5
+ |
+LL | / foo.map_or(Err::<i32, &str>(
+LL | | &format!(
+LL | | "{}{}{}{}{}{}{}",
+LL | | "Alice", "Bob", "Sarah", "Marc", "Sandra", "Eric", "Jenifer")
+LL | | ),
+LL | | |v| Ok(v),
+LL | | );
+ | |_____^
+ |
+help: replace with
+ |
+LL ~ foo.ok_or(&format!(
+LL + "{}{}{}{}{}{}{}",
+LL ~ "Alice", "Bob", "Sarah", "Marc", "Sandra", "Eric", "Jenifer"));
+ |
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_rem_euclid.fixed b/src/tools/clippy/tests/ui/manual_rem_euclid.fixed
new file mode 100644
index 000000000..5601c96c1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_rem_euclid.fixed
@@ -0,0 +1,55 @@
+// run-rustfix
+// aux-build:macro_rules.rs
+
+#![warn(clippy::manual_rem_euclid)]
+
+#[macro_use]
+extern crate macro_rules;
+
+macro_rules! internal_rem_euclid {
+ () => {
+ let value: i32 = 5;
+ let _: i32 = value.rem_euclid(4);
+ };
+}
+
+fn main() {
+ let value: i32 = 5;
+
+ let _: i32 = value.rem_euclid(4);
+ let _: i32 = value.rem_euclid(4);
+ let _: i32 = value.rem_euclid(4);
+ let _: i32 = value.rem_euclid(4);
+ let _: i32 = 1 + value.rem_euclid(4);
+
+ let _: i32 = (3 + value % 4) % 4;
+ let _: i32 = (-4 + value % -4) % -4;
+ let _: i32 = ((5 % 4) + 4) % 4;
+
+ // Make sure the lint does not trigger if it would cause an error, like with an ambiguous
+ // integer type
+ let not_annotated = 24;
+ let _ = ((not_annotated % 4) + 4) % 4;
+ let inferred: _ = 24;
+ let _ = ((inferred % 4) + 4) % 4;
+
+ // For lint to apply the constant must always be on the RHS of the previous value for %
+ let _: i32 = 4 % ((value % 4) + 4);
+ let _: i32 = ((4 % value) + 4) % 4;
+
+ // Lint in internal macros
+ internal_rem_euclid!();
+
+ // Do not lint in external macros
+ manual_rem_euclid!();
+}
+
+// Should lint for params too
+pub fn rem_euclid_4(num: i32) -> i32 {
+ num.rem_euclid(4)
+}
+
+// Constant version came later, should still lint
+pub const fn const_rem_euclid_4(num: i32) -> i32 {
+ num.rem_euclid(4)
+}
diff --git a/src/tools/clippy/tests/ui/manual_rem_euclid.rs b/src/tools/clippy/tests/ui/manual_rem_euclid.rs
new file mode 100644
index 000000000..52135be26
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_rem_euclid.rs
@@ -0,0 +1,55 @@
+// run-rustfix
+// aux-build:macro_rules.rs
+
+#![warn(clippy::manual_rem_euclid)]
+
+#[macro_use]
+extern crate macro_rules;
+
+macro_rules! internal_rem_euclid {
+ () => {
+ let value: i32 = 5;
+ let _: i32 = ((value % 4) + 4) % 4;
+ };
+}
+
+fn main() {
+ let value: i32 = 5;
+
+ let _: i32 = ((value % 4) + 4) % 4;
+ let _: i32 = (4 + (value % 4)) % 4;
+ let _: i32 = (value % 4 + 4) % 4;
+ let _: i32 = (4 + value % 4) % 4;
+ let _: i32 = 1 + (4 + value % 4) % 4;
+
+ let _: i32 = (3 + value % 4) % 4;
+ let _: i32 = (-4 + value % -4) % -4;
+ let _: i32 = ((5 % 4) + 4) % 4;
+
+ // Make sure the lint does not trigger if it would cause an error, like with an ambiguous
+ // integer type
+ let not_annotated = 24;
+ let _ = ((not_annotated % 4) + 4) % 4;
+ let inferred: _ = 24;
+ let _ = ((inferred % 4) + 4) % 4;
+
+ // For lint to apply the constant must always be on the RHS of the previous value for %
+ let _: i32 = 4 % ((value % 4) + 4);
+ let _: i32 = ((4 % value) + 4) % 4;
+
+ // Lint in internal macros
+ internal_rem_euclid!();
+
+ // Do not lint in external macros
+ manual_rem_euclid!();
+}
+
+// Should lint for params too
+pub fn rem_euclid_4(num: i32) -> i32 {
+ ((num % 4) + 4) % 4
+}
+
+// Constant version came later, should still lint
+pub const fn const_rem_euclid_4(num: i32) -> i32 {
+ ((num % 4) + 4) % 4
+}
diff --git a/src/tools/clippy/tests/ui/manual_rem_euclid.stderr b/src/tools/clippy/tests/ui/manual_rem_euclid.stderr
new file mode 100644
index 000000000..a237fd021
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_rem_euclid.stderr
@@ -0,0 +1,57 @@
+error: manual `rem_euclid` implementation
+ --> $DIR/manual_rem_euclid.rs:19:18
+ |
+LL | let _: i32 = ((value % 4) + 4) % 4;
+ | ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `value.rem_euclid(4)`
+ |
+ = note: `-D clippy::manual-rem-euclid` implied by `-D warnings`
+
+error: manual `rem_euclid` implementation
+ --> $DIR/manual_rem_euclid.rs:20:18
+ |
+LL | let _: i32 = (4 + (value % 4)) % 4;
+ | ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `value.rem_euclid(4)`
+
+error: manual `rem_euclid` implementation
+ --> $DIR/manual_rem_euclid.rs:21:18
+ |
+LL | let _: i32 = (value % 4 + 4) % 4;
+ | ^^^^^^^^^^^^^^^^^^^ help: consider using: `value.rem_euclid(4)`
+
+error: manual `rem_euclid` implementation
+ --> $DIR/manual_rem_euclid.rs:22:18
+ |
+LL | let _: i32 = (4 + value % 4) % 4;
+ | ^^^^^^^^^^^^^^^^^^^ help: consider using: `value.rem_euclid(4)`
+
+error: manual `rem_euclid` implementation
+ --> $DIR/manual_rem_euclid.rs:23:22
+ |
+LL | let _: i32 = 1 + (4 + value % 4) % 4;
+ | ^^^^^^^^^^^^^^^^^^^ help: consider using: `value.rem_euclid(4)`
+
+error: manual `rem_euclid` implementation
+ --> $DIR/manual_rem_euclid.rs:12:22
+ |
+LL | let _: i32 = ((value % 4) + 4) % 4;
+ | ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `value.rem_euclid(4)`
+...
+LL | internal_rem_euclid!();
+ | ---------------------- in this macro invocation
+ |
+ = note: this error originates in the macro `internal_rem_euclid` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: manual `rem_euclid` implementation
+ --> $DIR/manual_rem_euclid.rs:49:5
+ |
+LL | ((num % 4) + 4) % 4
+ | ^^^^^^^^^^^^^^^^^^^ help: consider using: `num.rem_euclid(4)`
+
+error: manual `rem_euclid` implementation
+ --> $DIR/manual_rem_euclid.rs:54:5
+ |
+LL | ((num % 4) + 4) % 4
+ | ^^^^^^^^^^^^^^^^^^^ help: consider using: `num.rem_euclid(4)`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_retain.fixed b/src/tools/clippy/tests/ui/manual_retain.fixed
new file mode 100644
index 000000000..fba503a20
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_retain.fixed
@@ -0,0 +1,240 @@
+// run-rustfix
+#![feature(custom_inner_attributes)]
+#![warn(clippy::manual_retain)]
+#![allow(unused)]
+use std::collections::BTreeMap;
+use std::collections::BTreeSet;
+use std::collections::BinaryHeap;
+use std::collections::HashMap;
+use std::collections::HashSet;
+use std::collections::VecDeque;
+
+fn main() {
+ binary_heap_retain();
+ btree_set_retain();
+ btree_map_retain();
+ hash_set_retain();
+ hash_map_retain();
+ string_retain();
+ vec_deque_retain();
+ vec_retain();
+ _msrv_153();
+ _msrv_126();
+ _msrv_118();
+}
+
+fn binary_heap_retain() {
+ // NOTE: Do not lint now, because binary_heap_retain is nighyly API.
+ // And we need to add a test case for msrv if we update this implmention.
+ // https://github.com/rust-lang/rust/issues/71503
+ let mut heap = BinaryHeap::from([1, 2, 3]);
+ heap = heap.into_iter().filter(|x| x % 2 == 0).collect();
+ heap = heap.iter().filter(|&x| x % 2 == 0).copied().collect();
+ heap = heap.iter().filter(|&x| x % 2 == 0).cloned().collect();
+
+ // Do not lint, because type conversion is performed
+ heap = heap.into_iter().filter(|x| x % 2 == 0).collect::<BinaryHeap<i8>>();
+ heap = heap.iter().filter(|&x| x % 2 == 0).copied().collect::<BinaryHeap<i8>>();
+ heap = heap.iter().filter(|&x| x % 2 == 0).cloned().collect::<BinaryHeap<i8>>();
+
+ // Do not lint, because this expression is not assign.
+ let mut bar: BinaryHeap<i8> = heap.iter().filter(|&x| x % 2 == 0).copied().collect();
+ let mut foobar: BinaryHeap<i8> = heap.into_iter().filter(|x| x % 2 == 0).collect();
+
+ // Do not lint, because it is an assignment to a different variable.
+ bar = foobar.iter().filter(|&x| x % 2 == 0).copied().collect();
+ bar = foobar.into_iter().filter(|x| x % 2 == 0).collect();
+}
+
+fn btree_map_retain() {
+ let mut btree_map: BTreeMap<i8, i8> = (0..8).map(|x| (x, x * 10)).collect();
+ // Do lint.
+ btree_map.retain(|k, _| k % 2 == 0);
+ btree_map.retain(|_, &mut v| v % 2 == 0);
+ btree_map.retain(|k, &mut v| (k % 2 == 0) && (v % 2 == 0));
+
+ // Do not lint.
+ btree_map = btree_map
+ .into_iter()
+ .filter(|(x, _)| x % 2 == 0)
+ .collect::<BTreeMap<i8, i8>>();
+
+ // Do not lint, because this expression is not assign.
+ let mut foobar: BTreeMap<i8, i8> = btree_map.into_iter().filter(|(k, _)| k % 2 == 0).collect();
+
+ // Do not lint, because it is an assignment to a different variable.
+ btree_map = foobar.into_iter().filter(|(k, _)| k % 2 == 0).collect();
+}
+
+fn btree_set_retain() {
+ let mut btree_set = BTreeSet::from([1, 2, 3, 4, 5, 6]);
+
+ // Do lint.
+ btree_set.retain(|x| x % 2 == 0);
+ btree_set.retain(|x| x % 2 == 0);
+ btree_set.retain(|x| x % 2 == 0);
+
+ // Do not lint, because type conversion is performed
+ btree_set = btree_set
+ .iter()
+ .filter(|&x| x % 2 == 0)
+ .copied()
+ .collect::<BTreeSet<i8>>();
+
+ btree_set = btree_set
+ .iter()
+ .filter(|&x| x % 2 == 0)
+ .cloned()
+ .collect::<BTreeSet<i8>>();
+
+ btree_set = btree_set.into_iter().filter(|x| x % 2 == 0).collect::<BTreeSet<i8>>();
+
+ // Do not lint, because this expression is not assign.
+ let mut foobar: BTreeSet<i8> = btree_set.iter().filter(|&x| x % 2 == 0).copied().collect();
+ let mut bar: BTreeSet<i8> = btree_set.into_iter().filter(|x| x % 2 == 0).collect();
+
+ // Do not lint, because it is an assignment to a different variable.
+ bar = foobar.iter().filter(|&x| x % 2 == 0).copied().collect();
+ bar = foobar.iter().filter(|&x| x % 2 == 0).cloned().collect();
+ bar = foobar.into_iter().filter(|x| x % 2 == 0).collect();
+}
+
+fn hash_map_retain() {
+ let mut hash_map: HashMap<i8, i8> = (0..8).map(|x| (x, x * 10)).collect();
+ // Do lint.
+ hash_map.retain(|k, _| k % 2 == 0);
+ hash_map.retain(|_, &mut v| v % 2 == 0);
+ hash_map.retain(|k, &mut v| (k % 2 == 0) && (v % 2 == 0));
+
+ // Do not lint.
+ hash_map = hash_map
+ .into_iter()
+ .filter(|(x, _)| x % 2 == 0)
+ .collect::<HashMap<i8, i8>>();
+
+ // Do not lint, because this expression is not assign.
+ let mut foobar: HashMap<i8, i8> = hash_map.into_iter().filter(|(k, _)| k % 2 == 0).collect();
+
+ // Do not lint, because it is an assignment to a different variable.
+ hash_map = foobar.into_iter().filter(|(k, _)| k % 2 == 0).collect();
+}
+
+fn hash_set_retain() {
+ let mut hash_set = HashSet::from([1, 2, 3, 4, 5, 6]);
+ // Do lint.
+ hash_set.retain(|x| x % 2 == 0);
+ hash_set.retain(|x| x % 2 == 0);
+ hash_set.retain(|x| x % 2 == 0);
+
+ // Do not lint, because type conversion is performed
+ hash_set = hash_set.into_iter().filter(|x| x % 2 == 0).collect::<HashSet<i8>>();
+ hash_set = hash_set
+ .iter()
+ .filter(|&x| x % 2 == 0)
+ .copied()
+ .collect::<HashSet<i8>>();
+
+ hash_set = hash_set
+ .iter()
+ .filter(|&x| x % 2 == 0)
+ .cloned()
+ .collect::<HashSet<i8>>();
+
+ // Do not lint, because this expression is not assign.
+ let mut bar: HashSet<i8> = hash_set.iter().filter(|&x| x % 2 == 0).copied().collect();
+ let mut foobar: HashSet<i8> = hash_set.into_iter().filter(|x| x % 2 == 0).collect();
+
+ // Do not lint, because it is an assignment to a different variable.
+ bar = foobar.iter().filter(|&x| x % 2 == 0).copied().collect();
+ bar = foobar.iter().filter(|&x| x % 2 == 0).cloned().collect();
+ bar = foobar.into_iter().filter(|&x| x % 2 == 0).collect();
+}
+
+fn string_retain() {
+ let mut s = String::from("foobar");
+ // Do lint.
+ s.retain(|c| c != 'o');
+
+ // Do not lint, because this expression is not assign.
+ let mut bar: String = s.chars().filter(|&c| c != 'o').to_owned().collect();
+
+ // Do not lint, because it is an assignment to a different variable.
+ s = bar.chars().filter(|&c| c != 'o').to_owned().collect();
+}
+
+fn vec_retain() {
+ let mut vec = vec![0, 1, 2];
+ // Do lint.
+ vec.retain(|x| x % 2 == 0);
+ vec.retain(|x| x % 2 == 0);
+ vec.retain(|x| x % 2 == 0);
+
+ // Do not lint, because type conversion is performed
+ vec = vec.into_iter().filter(|x| x % 2 == 0).collect::<Vec<i8>>();
+ vec = vec.iter().filter(|&x| x % 2 == 0).copied().collect::<Vec<i8>>();
+ vec = vec.iter().filter(|&x| x % 2 == 0).cloned().collect::<Vec<i8>>();
+
+ // Do not lint, because this expression is not assign.
+ let mut bar: Vec<i8> = vec.iter().filter(|&x| x % 2 == 0).copied().collect();
+ let mut foobar: Vec<i8> = vec.into_iter().filter(|x| x % 2 == 0).collect();
+
+ // Do not lint, because it is an assignment to a different variable.
+ bar = foobar.iter().filter(|&x| x % 2 == 0).copied().collect();
+ bar = foobar.iter().filter(|&x| x % 2 == 0).cloned().collect();
+ bar = foobar.into_iter().filter(|x| x % 2 == 0).collect();
+}
+
+fn vec_deque_retain() {
+ let mut vec_deque = VecDeque::new();
+ vec_deque.extend(1..5);
+
+ // Do lint.
+ vec_deque.retain(|x| x % 2 == 0);
+ vec_deque.retain(|x| x % 2 == 0);
+ vec_deque.retain(|x| x % 2 == 0);
+
+ // Do not lint, because type conversion is performed
+ vec_deque = vec_deque
+ .iter()
+ .filter(|&x| x % 2 == 0)
+ .copied()
+ .collect::<VecDeque<i8>>();
+ vec_deque = vec_deque
+ .iter()
+ .filter(|&x| x % 2 == 0)
+ .cloned()
+ .collect::<VecDeque<i8>>();
+ vec_deque = vec_deque.into_iter().filter(|x| x % 2 == 0).collect::<VecDeque<i8>>();
+
+ // Do not lint, because this expression is not assign.
+ let mut bar: VecDeque<i8> = vec_deque.iter().filter(|&x| x % 2 == 0).copied().collect();
+ let mut foobar: VecDeque<i8> = vec_deque.into_iter().filter(|x| x % 2 == 0).collect();
+
+ // Do not lint, because it is an assignment to a different variable.
+ bar = foobar.iter().filter(|&x| x % 2 == 0).copied().collect();
+ bar = foobar.iter().filter(|&x| x % 2 == 0).cloned().collect();
+ bar = foobar.into_iter().filter(|x| x % 2 == 0).collect();
+}
+
+fn _msrv_153() {
+ #![clippy::msrv = "1.52"]
+ let mut btree_map: BTreeMap<i8, i8> = (0..8).map(|x| (x, x * 10)).collect();
+ btree_map = btree_map.into_iter().filter(|(k, _)| k % 2 == 0).collect();
+
+ let mut btree_set = BTreeSet::from([1, 2, 3, 4, 5, 6]);
+ btree_set = btree_set.iter().filter(|&x| x % 2 == 0).copied().collect();
+}
+
+fn _msrv_126() {
+ #![clippy::msrv = "1.25"]
+ let mut s = String::from("foobar");
+ s = s.chars().filter(|&c| c != 'o').to_owned().collect();
+}
+
+fn _msrv_118() {
+ #![clippy::msrv = "1.17"]
+ let mut hash_set = HashSet::from([1, 2, 3, 4, 5, 6]);
+ hash_set = hash_set.into_iter().filter(|x| x % 2 == 0).collect();
+ let mut hash_map: HashMap<i8, i8> = (0..8).map(|x| (x, x * 10)).collect();
+ hash_map = hash_map.into_iter().filter(|(k, _)| k % 2 == 0).collect();
+}
diff --git a/src/tools/clippy/tests/ui/manual_retain.rs b/src/tools/clippy/tests/ui/manual_retain.rs
new file mode 100644
index 000000000..81a849fe7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_retain.rs
@@ -0,0 +1,246 @@
+// run-rustfix
+#![feature(custom_inner_attributes)]
+#![warn(clippy::manual_retain)]
+#![allow(unused)]
+use std::collections::BTreeMap;
+use std::collections::BTreeSet;
+use std::collections::BinaryHeap;
+use std::collections::HashMap;
+use std::collections::HashSet;
+use std::collections::VecDeque;
+
+fn main() {
+ binary_heap_retain();
+ btree_set_retain();
+ btree_map_retain();
+ hash_set_retain();
+ hash_map_retain();
+ string_retain();
+ vec_deque_retain();
+ vec_retain();
+ _msrv_153();
+ _msrv_126();
+ _msrv_118();
+}
+
+fn binary_heap_retain() {
+ // NOTE: Do not lint now, because binary_heap_retain is nighyly API.
+ // And we need to add a test case for msrv if we update this implmention.
+ // https://github.com/rust-lang/rust/issues/71503
+ let mut heap = BinaryHeap::from([1, 2, 3]);
+ heap = heap.into_iter().filter(|x| x % 2 == 0).collect();
+ heap = heap.iter().filter(|&x| x % 2 == 0).copied().collect();
+ heap = heap.iter().filter(|&x| x % 2 == 0).cloned().collect();
+
+ // Do not lint, because type conversion is performed
+ heap = heap.into_iter().filter(|x| x % 2 == 0).collect::<BinaryHeap<i8>>();
+ heap = heap.iter().filter(|&x| x % 2 == 0).copied().collect::<BinaryHeap<i8>>();
+ heap = heap.iter().filter(|&x| x % 2 == 0).cloned().collect::<BinaryHeap<i8>>();
+
+ // Do not lint, because this expression is not assign.
+ let mut bar: BinaryHeap<i8> = heap.iter().filter(|&x| x % 2 == 0).copied().collect();
+ let mut foobar: BinaryHeap<i8> = heap.into_iter().filter(|x| x % 2 == 0).collect();
+
+ // Do not lint, because it is an assignment to a different variable.
+ bar = foobar.iter().filter(|&x| x % 2 == 0).copied().collect();
+ bar = foobar.into_iter().filter(|x| x % 2 == 0).collect();
+}
+
+fn btree_map_retain() {
+ let mut btree_map: BTreeMap<i8, i8> = (0..8).map(|x| (x, x * 10)).collect();
+ // Do lint.
+ btree_map = btree_map.into_iter().filter(|(k, _)| k % 2 == 0).collect();
+ btree_map = btree_map.into_iter().filter(|(_, v)| v % 2 == 0).collect();
+ btree_map = btree_map
+ .into_iter()
+ .filter(|(k, v)| (k % 2 == 0) && (v % 2 == 0))
+ .collect();
+
+ // Do not lint.
+ btree_map = btree_map
+ .into_iter()
+ .filter(|(x, _)| x % 2 == 0)
+ .collect::<BTreeMap<i8, i8>>();
+
+ // Do not lint, because this expression is not assign.
+ let mut foobar: BTreeMap<i8, i8> = btree_map.into_iter().filter(|(k, _)| k % 2 == 0).collect();
+
+ // Do not lint, because it is an assignment to a different variable.
+ btree_map = foobar.into_iter().filter(|(k, _)| k % 2 == 0).collect();
+}
+
+fn btree_set_retain() {
+ let mut btree_set = BTreeSet::from([1, 2, 3, 4, 5, 6]);
+
+ // Do lint.
+ btree_set = btree_set.iter().filter(|&x| x % 2 == 0).copied().collect();
+ btree_set = btree_set.iter().filter(|&x| x % 2 == 0).cloned().collect();
+ btree_set = btree_set.into_iter().filter(|x| x % 2 == 0).collect();
+
+ // Do not lint, because type conversion is performed
+ btree_set = btree_set
+ .iter()
+ .filter(|&x| x % 2 == 0)
+ .copied()
+ .collect::<BTreeSet<i8>>();
+
+ btree_set = btree_set
+ .iter()
+ .filter(|&x| x % 2 == 0)
+ .cloned()
+ .collect::<BTreeSet<i8>>();
+
+ btree_set = btree_set.into_iter().filter(|x| x % 2 == 0).collect::<BTreeSet<i8>>();
+
+ // Do not lint, because this expression is not assign.
+ let mut foobar: BTreeSet<i8> = btree_set.iter().filter(|&x| x % 2 == 0).copied().collect();
+ let mut bar: BTreeSet<i8> = btree_set.into_iter().filter(|x| x % 2 == 0).collect();
+
+ // Do not lint, because it is an assignment to a different variable.
+ bar = foobar.iter().filter(|&x| x % 2 == 0).copied().collect();
+ bar = foobar.iter().filter(|&x| x % 2 == 0).cloned().collect();
+ bar = foobar.into_iter().filter(|x| x % 2 == 0).collect();
+}
+
+fn hash_map_retain() {
+ let mut hash_map: HashMap<i8, i8> = (0..8).map(|x| (x, x * 10)).collect();
+ // Do lint.
+ hash_map = hash_map.into_iter().filter(|(k, _)| k % 2 == 0).collect();
+ hash_map = hash_map.into_iter().filter(|(_, v)| v % 2 == 0).collect();
+ hash_map = hash_map
+ .into_iter()
+ .filter(|(k, v)| (k % 2 == 0) && (v % 2 == 0))
+ .collect();
+
+ // Do not lint.
+ hash_map = hash_map
+ .into_iter()
+ .filter(|(x, _)| x % 2 == 0)
+ .collect::<HashMap<i8, i8>>();
+
+ // Do not lint, because this expression is not assign.
+ let mut foobar: HashMap<i8, i8> = hash_map.into_iter().filter(|(k, _)| k % 2 == 0).collect();
+
+ // Do not lint, because it is an assignment to a different variable.
+ hash_map = foobar.into_iter().filter(|(k, _)| k % 2 == 0).collect();
+}
+
+fn hash_set_retain() {
+ let mut hash_set = HashSet::from([1, 2, 3, 4, 5, 6]);
+ // Do lint.
+ hash_set = hash_set.into_iter().filter(|x| x % 2 == 0).collect();
+ hash_set = hash_set.iter().filter(|&x| x % 2 == 0).copied().collect();
+ hash_set = hash_set.iter().filter(|&x| x % 2 == 0).cloned().collect();
+
+ // Do not lint, because type conversion is performed
+ hash_set = hash_set.into_iter().filter(|x| x % 2 == 0).collect::<HashSet<i8>>();
+ hash_set = hash_set
+ .iter()
+ .filter(|&x| x % 2 == 0)
+ .copied()
+ .collect::<HashSet<i8>>();
+
+ hash_set = hash_set
+ .iter()
+ .filter(|&x| x % 2 == 0)
+ .cloned()
+ .collect::<HashSet<i8>>();
+
+ // Do not lint, because this expression is not assign.
+ let mut bar: HashSet<i8> = hash_set.iter().filter(|&x| x % 2 == 0).copied().collect();
+ let mut foobar: HashSet<i8> = hash_set.into_iter().filter(|x| x % 2 == 0).collect();
+
+ // Do not lint, because it is an assignment to a different variable.
+ bar = foobar.iter().filter(|&x| x % 2 == 0).copied().collect();
+ bar = foobar.iter().filter(|&x| x % 2 == 0).cloned().collect();
+ bar = foobar.into_iter().filter(|&x| x % 2 == 0).collect();
+}
+
+fn string_retain() {
+ let mut s = String::from("foobar");
+ // Do lint.
+ s = s.chars().filter(|&c| c != 'o').to_owned().collect();
+
+ // Do not lint, because this expression is not assign.
+ let mut bar: String = s.chars().filter(|&c| c != 'o').to_owned().collect();
+
+ // Do not lint, because it is an assignment to a different variable.
+ s = bar.chars().filter(|&c| c != 'o').to_owned().collect();
+}
+
+fn vec_retain() {
+ let mut vec = vec![0, 1, 2];
+ // Do lint.
+ vec = vec.iter().filter(|&x| x % 2 == 0).copied().collect();
+ vec = vec.iter().filter(|&x| x % 2 == 0).cloned().collect();
+ vec = vec.into_iter().filter(|x| x % 2 == 0).collect();
+
+ // Do not lint, because type conversion is performed
+ vec = vec.into_iter().filter(|x| x % 2 == 0).collect::<Vec<i8>>();
+ vec = vec.iter().filter(|&x| x % 2 == 0).copied().collect::<Vec<i8>>();
+ vec = vec.iter().filter(|&x| x % 2 == 0).cloned().collect::<Vec<i8>>();
+
+ // Do not lint, because this expression is not assign.
+ let mut bar: Vec<i8> = vec.iter().filter(|&x| x % 2 == 0).copied().collect();
+ let mut foobar: Vec<i8> = vec.into_iter().filter(|x| x % 2 == 0).collect();
+
+ // Do not lint, because it is an assignment to a different variable.
+ bar = foobar.iter().filter(|&x| x % 2 == 0).copied().collect();
+ bar = foobar.iter().filter(|&x| x % 2 == 0).cloned().collect();
+ bar = foobar.into_iter().filter(|x| x % 2 == 0).collect();
+}
+
+fn vec_deque_retain() {
+ let mut vec_deque = VecDeque::new();
+ vec_deque.extend(1..5);
+
+ // Do lint.
+ vec_deque = vec_deque.iter().filter(|&x| x % 2 == 0).copied().collect();
+ vec_deque = vec_deque.iter().filter(|&x| x % 2 == 0).cloned().collect();
+ vec_deque = vec_deque.into_iter().filter(|x| x % 2 == 0).collect();
+
+ // Do not lint, because type conversion is performed
+ vec_deque = vec_deque
+ .iter()
+ .filter(|&x| x % 2 == 0)
+ .copied()
+ .collect::<VecDeque<i8>>();
+ vec_deque = vec_deque
+ .iter()
+ .filter(|&x| x % 2 == 0)
+ .cloned()
+ .collect::<VecDeque<i8>>();
+ vec_deque = vec_deque.into_iter().filter(|x| x % 2 == 0).collect::<VecDeque<i8>>();
+
+ // Do not lint, because this expression is not assign.
+ let mut bar: VecDeque<i8> = vec_deque.iter().filter(|&x| x % 2 == 0).copied().collect();
+ let mut foobar: VecDeque<i8> = vec_deque.into_iter().filter(|x| x % 2 == 0).collect();
+
+ // Do not lint, because it is an assignment to a different variable.
+ bar = foobar.iter().filter(|&x| x % 2 == 0).copied().collect();
+ bar = foobar.iter().filter(|&x| x % 2 == 0).cloned().collect();
+ bar = foobar.into_iter().filter(|x| x % 2 == 0).collect();
+}
+
+fn _msrv_153() {
+ #![clippy::msrv = "1.52"]
+ let mut btree_map: BTreeMap<i8, i8> = (0..8).map(|x| (x, x * 10)).collect();
+ btree_map = btree_map.into_iter().filter(|(k, _)| k % 2 == 0).collect();
+
+ let mut btree_set = BTreeSet::from([1, 2, 3, 4, 5, 6]);
+ btree_set = btree_set.iter().filter(|&x| x % 2 == 0).copied().collect();
+}
+
+fn _msrv_126() {
+ #![clippy::msrv = "1.25"]
+ let mut s = String::from("foobar");
+ s = s.chars().filter(|&c| c != 'o').to_owned().collect();
+}
+
+fn _msrv_118() {
+ #![clippy::msrv = "1.17"]
+ let mut hash_set = HashSet::from([1, 2, 3, 4, 5, 6]);
+ hash_set = hash_set.into_iter().filter(|x| x % 2 == 0).collect();
+ let mut hash_map: HashMap<i8, i8> = (0..8).map(|x| (x, x * 10)).collect();
+ hash_map = hash_map.into_iter().filter(|(k, _)| k % 2 == 0).collect();
+}
diff --git a/src/tools/clippy/tests/ui/manual_retain.stderr b/src/tools/clippy/tests/ui/manual_retain.stderr
new file mode 100644
index 000000000..ec635919b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_retain.stderr
@@ -0,0 +1,124 @@
+error: this expression can be written more simply using `.retain()`
+ --> $DIR/manual_retain.rs:52:5
+ |
+LL | btree_map = btree_map.into_iter().filter(|(k, _)| k % 2 == 0).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `btree_map.retain(|k, _| k % 2 == 0)`
+ |
+ = note: `-D clippy::manual-retain` implied by `-D warnings`
+
+error: this expression can be written more simply using `.retain()`
+ --> $DIR/manual_retain.rs:53:5
+ |
+LL | btree_map = btree_map.into_iter().filter(|(_, v)| v % 2 == 0).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `btree_map.retain(|_, &mut v| v % 2 == 0)`
+
+error: this expression can be written more simply using `.retain()`
+ --> $DIR/manual_retain.rs:54:5
+ |
+LL | / btree_map = btree_map
+LL | | .into_iter()
+LL | | .filter(|(k, v)| (k % 2 == 0) && (v % 2 == 0))
+LL | | .collect();
+ | |__________________^ help: consider calling `.retain()` instead: `btree_map.retain(|k, &mut v| (k % 2 == 0) && (v % 2 == 0))`
+
+error: this expression can be written more simply using `.retain()`
+ --> $DIR/manual_retain.rs:76:5
+ |
+LL | btree_set = btree_set.iter().filter(|&x| x % 2 == 0).copied().collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `btree_set.retain(|x| x % 2 == 0)`
+
+error: this expression can be written more simply using `.retain()`
+ --> $DIR/manual_retain.rs:77:5
+ |
+LL | btree_set = btree_set.iter().filter(|&x| x % 2 == 0).cloned().collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `btree_set.retain(|x| x % 2 == 0)`
+
+error: this expression can be written more simply using `.retain()`
+ --> $DIR/manual_retain.rs:78:5
+ |
+LL | btree_set = btree_set.into_iter().filter(|x| x % 2 == 0).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `btree_set.retain(|x| x % 2 == 0)`
+
+error: this expression can be written more simply using `.retain()`
+ --> $DIR/manual_retain.rs:108:5
+ |
+LL | hash_map = hash_map.into_iter().filter(|(k, _)| k % 2 == 0).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `hash_map.retain(|k, _| k % 2 == 0)`
+
+error: this expression can be written more simply using `.retain()`
+ --> $DIR/manual_retain.rs:109:5
+ |
+LL | hash_map = hash_map.into_iter().filter(|(_, v)| v % 2 == 0).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `hash_map.retain(|_, &mut v| v % 2 == 0)`
+
+error: this expression can be written more simply using `.retain()`
+ --> $DIR/manual_retain.rs:110:5
+ |
+LL | / hash_map = hash_map
+LL | | .into_iter()
+LL | | .filter(|(k, v)| (k % 2 == 0) && (v % 2 == 0))
+LL | | .collect();
+ | |__________________^ help: consider calling `.retain()` instead: `hash_map.retain(|k, &mut v| (k % 2 == 0) && (v % 2 == 0))`
+
+error: this expression can be written more simply using `.retain()`
+ --> $DIR/manual_retain.rs:131:5
+ |
+LL | hash_set = hash_set.into_iter().filter(|x| x % 2 == 0).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `hash_set.retain(|x| x % 2 == 0)`
+
+error: this expression can be written more simply using `.retain()`
+ --> $DIR/manual_retain.rs:132:5
+ |
+LL | hash_set = hash_set.iter().filter(|&x| x % 2 == 0).copied().collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `hash_set.retain(|x| x % 2 == 0)`
+
+error: this expression can be written more simply using `.retain()`
+ --> $DIR/manual_retain.rs:133:5
+ |
+LL | hash_set = hash_set.iter().filter(|&x| x % 2 == 0).cloned().collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `hash_set.retain(|x| x % 2 == 0)`
+
+error: this expression can be written more simply using `.retain()`
+ --> $DIR/manual_retain.rs:162:5
+ |
+LL | s = s.chars().filter(|&c| c != 'o').to_owned().collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `s.retain(|c| c != 'o')`
+
+error: this expression can be written more simply using `.retain()`
+ --> $DIR/manual_retain.rs:174:5
+ |
+LL | vec = vec.iter().filter(|&x| x % 2 == 0).copied().collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `vec.retain(|x| x % 2 == 0)`
+
+error: this expression can be written more simply using `.retain()`
+ --> $DIR/manual_retain.rs:175:5
+ |
+LL | vec = vec.iter().filter(|&x| x % 2 == 0).cloned().collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `vec.retain(|x| x % 2 == 0)`
+
+error: this expression can be written more simply using `.retain()`
+ --> $DIR/manual_retain.rs:176:5
+ |
+LL | vec = vec.into_iter().filter(|x| x % 2 == 0).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `vec.retain(|x| x % 2 == 0)`
+
+error: this expression can be written more simply using `.retain()`
+ --> $DIR/manual_retain.rs:198:5
+ |
+LL | vec_deque = vec_deque.iter().filter(|&x| x % 2 == 0).copied().collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `vec_deque.retain(|x| x % 2 == 0)`
+
+error: this expression can be written more simply using `.retain()`
+ --> $DIR/manual_retain.rs:199:5
+ |
+LL | vec_deque = vec_deque.iter().filter(|&x| x % 2 == 0).cloned().collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `vec_deque.retain(|x| x % 2 == 0)`
+
+error: this expression can be written more simply using `.retain()`
+ --> $DIR/manual_retain.rs:200:5
+ |
+LL | vec_deque = vec_deque.into_iter().filter(|x| x % 2 == 0).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `vec_deque.retain(|x| x % 2 == 0)`
+
+error: aborting due to 19 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_saturating_arithmetic.fixed b/src/tools/clippy/tests/ui/manual_saturating_arithmetic.fixed
new file mode 100644
index 000000000..c4f53c446
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_saturating_arithmetic.fixed
@@ -0,0 +1,45 @@
+// run-rustfix
+
+#![allow(unused_imports)]
+
+use std::{i128, i32, u128, u32};
+
+fn main() {
+ let _ = 1u32.saturating_add(1);
+ let _ = 1u32.saturating_add(1);
+ let _ = 1u8.saturating_add(1);
+ let _ = 1u128.saturating_add(1);
+ let _ = 1u32.checked_add(1).unwrap_or(1234); // ok
+ let _ = 1u8.checked_add(1).unwrap_or(0); // ok
+ let _ = 1u32.saturating_mul(1);
+
+ let _ = 1u32.saturating_sub(1);
+ let _ = 1u32.saturating_sub(1);
+ let _ = 1u8.saturating_sub(1);
+ let _ = 1u32.checked_sub(1).unwrap_or(1234); // ok
+ let _ = 1u8.checked_sub(1).unwrap_or(255); // ok
+
+ let _ = 1i32.saturating_add(1);
+ let _ = 1i32.saturating_add(1);
+ let _ = 1i8.saturating_add(1);
+ let _ = 1i128.saturating_add(1);
+ let _ = 1i32.saturating_add(-1);
+ let _ = 1i32.saturating_add(-1);
+ let _ = 1i8.saturating_add(-1);
+ let _ = 1i128.saturating_add(-1);
+ let _ = 1i32.checked_add(1).unwrap_or(1234); // ok
+ let _ = 1i8.checked_add(1).unwrap_or(-128); // ok
+ let _ = 1i8.checked_add(-1).unwrap_or(127); // ok
+
+ let _ = 1i32.saturating_sub(1);
+ let _ = 1i32.saturating_sub(1);
+ let _ = 1i8.saturating_sub(1);
+ let _ = 1i128.saturating_sub(1);
+ let _ = 1i32.saturating_sub(-1);
+ let _ = 1i32.saturating_sub(-1);
+ let _ = 1i8.saturating_sub(-1);
+ let _ = 1i128.saturating_sub(-1);
+ let _ = 1i32.checked_sub(1).unwrap_or(1234); // ok
+ let _ = 1i8.checked_sub(1).unwrap_or(127); // ok
+ let _ = 1i8.checked_sub(-1).unwrap_or(-128); // ok
+}
diff --git a/src/tools/clippy/tests/ui/manual_saturating_arithmetic.rs b/src/tools/clippy/tests/ui/manual_saturating_arithmetic.rs
new file mode 100644
index 000000000..cd83cf6e6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_saturating_arithmetic.rs
@@ -0,0 +1,55 @@
+// run-rustfix
+
+#![allow(unused_imports)]
+
+use std::{i128, i32, u128, u32};
+
+fn main() {
+ let _ = 1u32.checked_add(1).unwrap_or(u32::max_value());
+ let _ = 1u32.checked_add(1).unwrap_or(u32::MAX);
+ let _ = 1u8.checked_add(1).unwrap_or(255);
+ let _ = 1u128
+ .checked_add(1)
+ .unwrap_or(340_282_366_920_938_463_463_374_607_431_768_211_455);
+ let _ = 1u32.checked_add(1).unwrap_or(1234); // ok
+ let _ = 1u8.checked_add(1).unwrap_or(0); // ok
+ let _ = 1u32.checked_mul(1).unwrap_or(u32::MAX);
+
+ let _ = 1u32.checked_sub(1).unwrap_or(u32::min_value());
+ let _ = 1u32.checked_sub(1).unwrap_or(u32::MIN);
+ let _ = 1u8.checked_sub(1).unwrap_or(0);
+ let _ = 1u32.checked_sub(1).unwrap_or(1234); // ok
+ let _ = 1u8.checked_sub(1).unwrap_or(255); // ok
+
+ let _ = 1i32.checked_add(1).unwrap_or(i32::max_value());
+ let _ = 1i32.checked_add(1).unwrap_or(i32::MAX);
+ let _ = 1i8.checked_add(1).unwrap_or(127);
+ let _ = 1i128
+ .checked_add(1)
+ .unwrap_or(170_141_183_460_469_231_731_687_303_715_884_105_727);
+ let _ = 1i32.checked_add(-1).unwrap_or(i32::min_value());
+ let _ = 1i32.checked_add(-1).unwrap_or(i32::MIN);
+ let _ = 1i8.checked_add(-1).unwrap_or(-128);
+ let _ = 1i128
+ .checked_add(-1)
+ .unwrap_or(-170_141_183_460_469_231_731_687_303_715_884_105_728);
+ let _ = 1i32.checked_add(1).unwrap_or(1234); // ok
+ let _ = 1i8.checked_add(1).unwrap_or(-128); // ok
+ let _ = 1i8.checked_add(-1).unwrap_or(127); // ok
+
+ let _ = 1i32.checked_sub(1).unwrap_or(i32::min_value());
+ let _ = 1i32.checked_sub(1).unwrap_or(i32::MIN);
+ let _ = 1i8.checked_sub(1).unwrap_or(-128);
+ let _ = 1i128
+ .checked_sub(1)
+ .unwrap_or(-170_141_183_460_469_231_731_687_303_715_884_105_728);
+ let _ = 1i32.checked_sub(-1).unwrap_or(i32::max_value());
+ let _ = 1i32.checked_sub(-1).unwrap_or(i32::MAX);
+ let _ = 1i8.checked_sub(-1).unwrap_or(127);
+ let _ = 1i128
+ .checked_sub(-1)
+ .unwrap_or(170_141_183_460_469_231_731_687_303_715_884_105_727);
+ let _ = 1i32.checked_sub(1).unwrap_or(1234); // ok
+ let _ = 1i8.checked_sub(1).unwrap_or(127); // ok
+ let _ = 1i8.checked_sub(-1).unwrap_or(-128); // ok
+}
diff --git a/src/tools/clippy/tests/ui/manual_saturating_arithmetic.stderr b/src/tools/clippy/tests/ui/manual_saturating_arithmetic.stderr
new file mode 100644
index 000000000..d985f2e75
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_saturating_arithmetic.stderr
@@ -0,0 +1,163 @@
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:8:13
+ |
+LL | let _ = 1u32.checked_add(1).unwrap_or(u32::max_value());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `saturating_add`: `1u32.saturating_add(1)`
+ |
+ = note: `-D clippy::manual-saturating-arithmetic` implied by `-D warnings`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:9:13
+ |
+LL | let _ = 1u32.checked_add(1).unwrap_or(u32::MAX);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `saturating_add`: `1u32.saturating_add(1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:10:13
+ |
+LL | let _ = 1u8.checked_add(1).unwrap_or(255);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `saturating_add`: `1u8.saturating_add(1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:11:13
+ |
+LL | let _ = 1u128
+ | _____________^
+LL | | .checked_add(1)
+LL | | .unwrap_or(340_282_366_920_938_463_463_374_607_431_768_211_455);
+ | |_______________________________________________________________________^ help: try using `saturating_add`: `1u128.saturating_add(1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:16:13
+ |
+LL | let _ = 1u32.checked_mul(1).unwrap_or(u32::MAX);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `saturating_mul`: `1u32.saturating_mul(1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:18:13
+ |
+LL | let _ = 1u32.checked_sub(1).unwrap_or(u32::min_value());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `saturating_sub`: `1u32.saturating_sub(1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:19:13
+ |
+LL | let _ = 1u32.checked_sub(1).unwrap_or(u32::MIN);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `saturating_sub`: `1u32.saturating_sub(1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:20:13
+ |
+LL | let _ = 1u8.checked_sub(1).unwrap_or(0);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `saturating_sub`: `1u8.saturating_sub(1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:24:13
+ |
+LL | let _ = 1i32.checked_add(1).unwrap_or(i32::max_value());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `saturating_add`: `1i32.saturating_add(1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:25:13
+ |
+LL | let _ = 1i32.checked_add(1).unwrap_or(i32::MAX);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `saturating_add`: `1i32.saturating_add(1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:26:13
+ |
+LL | let _ = 1i8.checked_add(1).unwrap_or(127);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `saturating_add`: `1i8.saturating_add(1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:27:13
+ |
+LL | let _ = 1i128
+ | _____________^
+LL | | .checked_add(1)
+LL | | .unwrap_or(170_141_183_460_469_231_731_687_303_715_884_105_727);
+ | |_______________________________________________________________________^ help: try using `saturating_add`: `1i128.saturating_add(1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:30:13
+ |
+LL | let _ = 1i32.checked_add(-1).unwrap_or(i32::min_value());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `saturating_add`: `1i32.saturating_add(-1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:31:13
+ |
+LL | let _ = 1i32.checked_add(-1).unwrap_or(i32::MIN);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `saturating_add`: `1i32.saturating_add(-1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:32:13
+ |
+LL | let _ = 1i8.checked_add(-1).unwrap_or(-128);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `saturating_add`: `1i8.saturating_add(-1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:33:13
+ |
+LL | let _ = 1i128
+ | _____________^
+LL | | .checked_add(-1)
+LL | | .unwrap_or(-170_141_183_460_469_231_731_687_303_715_884_105_728);
+ | |________________________________________________________________________^ help: try using `saturating_add`: `1i128.saturating_add(-1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:40:13
+ |
+LL | let _ = 1i32.checked_sub(1).unwrap_or(i32::min_value());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `saturating_sub`: `1i32.saturating_sub(1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:41:13
+ |
+LL | let _ = 1i32.checked_sub(1).unwrap_or(i32::MIN);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `saturating_sub`: `1i32.saturating_sub(1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:42:13
+ |
+LL | let _ = 1i8.checked_sub(1).unwrap_or(-128);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `saturating_sub`: `1i8.saturating_sub(1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:43:13
+ |
+LL | let _ = 1i128
+ | _____________^
+LL | | .checked_sub(1)
+LL | | .unwrap_or(-170_141_183_460_469_231_731_687_303_715_884_105_728);
+ | |________________________________________________________________________^ help: try using `saturating_sub`: `1i128.saturating_sub(1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:46:13
+ |
+LL | let _ = 1i32.checked_sub(-1).unwrap_or(i32::max_value());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `saturating_sub`: `1i32.saturating_sub(-1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:47:13
+ |
+LL | let _ = 1i32.checked_sub(-1).unwrap_or(i32::MAX);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `saturating_sub`: `1i32.saturating_sub(-1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:48:13
+ |
+LL | let _ = 1i8.checked_sub(-1).unwrap_or(127);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `saturating_sub`: `1i8.saturating_sub(-1)`
+
+error: manual saturating arithmetic
+ --> $DIR/manual_saturating_arithmetic.rs:49:13
+ |
+LL | let _ = 1i128
+ | _____________^
+LL | | .checked_sub(-1)
+LL | | .unwrap_or(170_141_183_460_469_231_731_687_303_715_884_105_727);
+ | |_______________________________________________________________________^ help: try using `saturating_sub`: `1i128.saturating_sub(-1)`
+
+error: aborting due to 24 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_split_once.fixed b/src/tools/clippy/tests/ui/manual_split_once.fixed
new file mode 100644
index 000000000..c7ca77043
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_split_once.fixed
@@ -0,0 +1,147 @@
+// run-rustfix
+
+#![feature(custom_inner_attributes)]
+#![warn(clippy::manual_split_once)]
+#![allow(unused, clippy::iter_skip_next, clippy::iter_nth_zero)]
+
+extern crate itertools;
+
+#[allow(unused_imports)]
+use itertools::Itertools;
+
+fn main() {
+ let _ = "key=value".splitn(2, '=').nth(2);
+ let _ = "key=value".split_once('=').unwrap().1;
+ let _ = "key=value".split_once('=').unwrap().1;
+ let (_, _) = "key=value".split_once('=').unwrap();
+
+ let s = String::from("key=value");
+ let _ = s.split_once('=').unwrap().1;
+
+ let s = Box::<str>::from("key=value");
+ let _ = s.split_once('=').unwrap().1;
+
+ let s = &"key=value";
+ let _ = s.split_once('=').unwrap().1;
+
+ fn _f(s: &str) -> Option<&str> {
+ let _ = s.split_once('=')?.1;
+ let _ = s.split_once('=')?.1;
+ let _ = s.rsplit_once('=')?.0;
+ let _ = s.rsplit_once('=')?.0;
+ None
+ }
+
+ // Don't lint, slices don't have `split_once`
+ let _ = [0, 1, 2].splitn(2, |&x| x == 1).nth(1).unwrap();
+
+ // `rsplitn` gives the results in the reverse order of `rsplit_once`
+ let _ = "key=value".rsplit_once('=').unwrap().0;
+ let (_, _) = "key=value".rsplit_once('=').map(|(x, y)| (y, x)).unwrap();
+ let _ = s.rsplit_once('=').map(|x| x.0);
+}
+
+fn indirect() -> Option<()> {
+ let (l, r) = "a.b.c".split_once('.').unwrap();
+
+
+
+ let (l, r) = "a.b.c".split_once('.')?;
+
+
+
+ let (l, r) = "a.b.c".rsplit_once('.').unwrap();
+
+
+
+ let (l, r) = "a.b.c".rsplit_once('.')?;
+
+
+
+ // could lint, currently doesn't
+
+ let mut iter = "a.b.c".splitn(2, '.');
+ let other = 1;
+ let l = iter.next()?;
+ let r = iter.next()?;
+
+ let mut iter = "a.b.c".splitn(2, '.');
+ let mut mut_binding = iter.next()?;
+ let r = iter.next()?;
+
+ let mut iter = "a.b.c".splitn(2, '.');
+ let tuple = (iter.next()?, iter.next()?);
+
+ // should not lint
+
+ let mut missing_unwrap = "a.b.c".splitn(2, '.');
+ let l = missing_unwrap.next();
+ let r = missing_unwrap.next();
+
+ let mut mixed_unrap = "a.b.c".splitn(2, '.');
+ let unwrap = mixed_unrap.next().unwrap();
+ let question_mark = mixed_unrap.next()?;
+
+ let mut iter = "a.b.c".splitn(2, '.');
+ let same_name = iter.next()?;
+ let same_name = iter.next()?;
+
+ let mut iter = "a.b.c".splitn(2, '.');
+ let shadows_existing = "d";
+ let shadows_existing = iter.next()?;
+ let r = iter.next()?;
+
+ let mut iter = "a.b.c".splitn(2, '.');
+ let becomes_shadowed = iter.next()?;
+ let becomes_shadowed = "d";
+ let r = iter.next()?;
+
+ let mut iter = "a.b.c".splitn(2, '.');
+ let l = iter.next()?;
+ let r = iter.next()?;
+ let third_usage = iter.next()?;
+
+ let mut n_three = "a.b.c".splitn(3, '.');
+ let l = n_three.next()?;
+ let r = n_three.next()?;
+
+ let mut iter = "a.b.c".splitn(2, '.');
+ {
+ let in_block = iter.next()?;
+ }
+ let r = iter.next()?;
+
+ let mut lacks_binding = "a.b.c".splitn(2, '.');
+ let _ = lacks_binding.next()?;
+ let r = lacks_binding.next()?;
+
+ let mut mapped = "a.b.c".splitn(2, '.').map(|_| "~");
+ let l = iter.next()?;
+ let r = iter.next()?;
+
+ let mut assigned = "";
+ let mut iter = "a.b.c".splitn(2, '.');
+ let l = iter.next()?;
+ assigned = iter.next()?;
+
+ None
+}
+
+fn _msrv_1_51() {
+ #![clippy::msrv = "1.51"]
+ // `str::split_once` was stabilized in 1.52. Do not lint this
+ let _ = "key=value".splitn(2, '=').nth(1).unwrap();
+
+ let mut iter = "a.b.c".splitn(2, '.');
+ let a = iter.next().unwrap();
+ let b = iter.next().unwrap();
+}
+
+fn _msrv_1_52() {
+ #![clippy::msrv = "1.52"]
+ let _ = "key=value".split_once('=').unwrap().1;
+
+ let (a, b) = "a.b.c".split_once('.').unwrap();
+
+
+}
diff --git a/src/tools/clippy/tests/ui/manual_split_once.rs b/src/tools/clippy/tests/ui/manual_split_once.rs
new file mode 100644
index 000000000..ee2848a25
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_split_once.rs
@@ -0,0 +1,147 @@
+// run-rustfix
+
+#![feature(custom_inner_attributes)]
+#![warn(clippy::manual_split_once)]
+#![allow(unused, clippy::iter_skip_next, clippy::iter_nth_zero)]
+
+extern crate itertools;
+
+#[allow(unused_imports)]
+use itertools::Itertools;
+
+fn main() {
+ let _ = "key=value".splitn(2, '=').nth(2);
+ let _ = "key=value".splitn(2, '=').nth(1).unwrap();
+ let _ = "key=value".splitn(2, '=').skip(1).next().unwrap();
+ let (_, _) = "key=value".splitn(2, '=').next_tuple().unwrap();
+
+ let s = String::from("key=value");
+ let _ = s.splitn(2, '=').nth(1).unwrap();
+
+ let s = Box::<str>::from("key=value");
+ let _ = s.splitn(2, '=').nth(1).unwrap();
+
+ let s = &"key=value";
+ let _ = s.splitn(2, '=').skip(1).next().unwrap();
+
+ fn _f(s: &str) -> Option<&str> {
+ let _ = s.splitn(2, '=').nth(1)?;
+ let _ = s.splitn(2, '=').skip(1).next()?;
+ let _ = s.rsplitn(2, '=').nth(1)?;
+ let _ = s.rsplitn(2, '=').skip(1).next()?;
+ None
+ }
+
+ // Don't lint, slices don't have `split_once`
+ let _ = [0, 1, 2].splitn(2, |&x| x == 1).nth(1).unwrap();
+
+ // `rsplitn` gives the results in the reverse order of `rsplit_once`
+ let _ = "key=value".rsplitn(2, '=').nth(1).unwrap();
+ let (_, _) = "key=value".rsplitn(2, '=').next_tuple().unwrap();
+ let _ = s.rsplitn(2, '=').nth(1);
+}
+
+fn indirect() -> Option<()> {
+ let mut iter = "a.b.c".splitn(2, '.');
+ let l = iter.next().unwrap();
+ let r = iter.next().unwrap();
+
+ let mut iter = "a.b.c".splitn(2, '.');
+ let l = iter.next()?;
+ let r = iter.next()?;
+
+ let mut iter = "a.b.c".rsplitn(2, '.');
+ let r = iter.next().unwrap();
+ let l = iter.next().unwrap();
+
+ let mut iter = "a.b.c".rsplitn(2, '.');
+ let r = iter.next()?;
+ let l = iter.next()?;
+
+ // could lint, currently doesn't
+
+ let mut iter = "a.b.c".splitn(2, '.');
+ let other = 1;
+ let l = iter.next()?;
+ let r = iter.next()?;
+
+ let mut iter = "a.b.c".splitn(2, '.');
+ let mut mut_binding = iter.next()?;
+ let r = iter.next()?;
+
+ let mut iter = "a.b.c".splitn(2, '.');
+ let tuple = (iter.next()?, iter.next()?);
+
+ // should not lint
+
+ let mut missing_unwrap = "a.b.c".splitn(2, '.');
+ let l = missing_unwrap.next();
+ let r = missing_unwrap.next();
+
+ let mut mixed_unrap = "a.b.c".splitn(2, '.');
+ let unwrap = mixed_unrap.next().unwrap();
+ let question_mark = mixed_unrap.next()?;
+
+ let mut iter = "a.b.c".splitn(2, '.');
+ let same_name = iter.next()?;
+ let same_name = iter.next()?;
+
+ let mut iter = "a.b.c".splitn(2, '.');
+ let shadows_existing = "d";
+ let shadows_existing = iter.next()?;
+ let r = iter.next()?;
+
+ let mut iter = "a.b.c".splitn(2, '.');
+ let becomes_shadowed = iter.next()?;
+ let becomes_shadowed = "d";
+ let r = iter.next()?;
+
+ let mut iter = "a.b.c".splitn(2, '.');
+ let l = iter.next()?;
+ let r = iter.next()?;
+ let third_usage = iter.next()?;
+
+ let mut n_three = "a.b.c".splitn(3, '.');
+ let l = n_three.next()?;
+ let r = n_three.next()?;
+
+ let mut iter = "a.b.c".splitn(2, '.');
+ {
+ let in_block = iter.next()?;
+ }
+ let r = iter.next()?;
+
+ let mut lacks_binding = "a.b.c".splitn(2, '.');
+ let _ = lacks_binding.next()?;
+ let r = lacks_binding.next()?;
+
+ let mut mapped = "a.b.c".splitn(2, '.').map(|_| "~");
+ let l = iter.next()?;
+ let r = iter.next()?;
+
+ let mut assigned = "";
+ let mut iter = "a.b.c".splitn(2, '.');
+ let l = iter.next()?;
+ assigned = iter.next()?;
+
+ None
+}
+
+fn _msrv_1_51() {
+ #![clippy::msrv = "1.51"]
+ // `str::split_once` was stabilized in 1.52. Do not lint this
+ let _ = "key=value".splitn(2, '=').nth(1).unwrap();
+
+ let mut iter = "a.b.c".splitn(2, '.');
+ let a = iter.next().unwrap();
+ let b = iter.next().unwrap();
+}
+
+fn _msrv_1_52() {
+ #![clippy::msrv = "1.52"]
+ let _ = "key=value".splitn(2, '=').nth(1).unwrap();
+
+ let mut iter = "a.b.c".splitn(2, '.');
+ let a = iter.next().unwrap();
+ let b = iter.next().unwrap();
+}
diff --git a/src/tools/clippy/tests/ui/manual_split_once.stderr b/src/tools/clippy/tests/ui/manual_split_once.stderr
new file mode 100644
index 000000000..269669468
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_split_once.stderr
@@ -0,0 +1,213 @@
+error: manual implementation of `split_once`
+ --> $DIR/manual_split_once.rs:14:13
+ |
+LL | let _ = "key=value".splitn(2, '=').nth(1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".split_once('=').unwrap().1`
+ |
+ = note: `-D clippy::manual-split-once` implied by `-D warnings`
+
+error: manual implementation of `split_once`
+ --> $DIR/manual_split_once.rs:15:13
+ |
+LL | let _ = "key=value".splitn(2, '=').skip(1).next().unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".split_once('=').unwrap().1`
+
+error: manual implementation of `split_once`
+ --> $DIR/manual_split_once.rs:16:18
+ |
+LL | let (_, _) = "key=value".splitn(2, '=').next_tuple().unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".split_once('=')`
+
+error: manual implementation of `split_once`
+ --> $DIR/manual_split_once.rs:19:13
+ |
+LL | let _ = s.splitn(2, '=').nth(1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.split_once('=').unwrap().1`
+
+error: manual implementation of `split_once`
+ --> $DIR/manual_split_once.rs:22:13
+ |
+LL | let _ = s.splitn(2, '=').nth(1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.split_once('=').unwrap().1`
+
+error: manual implementation of `split_once`
+ --> $DIR/manual_split_once.rs:25:13
+ |
+LL | let _ = s.splitn(2, '=').skip(1).next().unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.split_once('=').unwrap().1`
+
+error: manual implementation of `split_once`
+ --> $DIR/manual_split_once.rs:28:17
+ |
+LL | let _ = s.splitn(2, '=').nth(1)?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.split_once('=')?.1`
+
+error: manual implementation of `split_once`
+ --> $DIR/manual_split_once.rs:29:17
+ |
+LL | let _ = s.splitn(2, '=').skip(1).next()?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.split_once('=')?.1`
+
+error: manual implementation of `rsplit_once`
+ --> $DIR/manual_split_once.rs:30:17
+ |
+LL | let _ = s.rsplitn(2, '=').nth(1)?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.rsplit_once('=')?.0`
+
+error: manual implementation of `rsplit_once`
+ --> $DIR/manual_split_once.rs:31:17
+ |
+LL | let _ = s.rsplitn(2, '=').skip(1).next()?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.rsplit_once('=')?.0`
+
+error: manual implementation of `rsplit_once`
+ --> $DIR/manual_split_once.rs:39:13
+ |
+LL | let _ = "key=value".rsplitn(2, '=').nth(1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".rsplit_once('=').unwrap().0`
+
+error: manual implementation of `rsplit_once`
+ --> $DIR/manual_split_once.rs:40:18
+ |
+LL | let (_, _) = "key=value".rsplitn(2, '=').next_tuple().unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".rsplit_once('=').map(|(x, y)| (y, x))`
+
+error: manual implementation of `rsplit_once`
+ --> $DIR/manual_split_once.rs:41:13
+ |
+LL | let _ = s.rsplitn(2, '=').nth(1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.rsplit_once('=').map(|x| x.0)`
+
+error: manual implementation of `split_once`
+ --> $DIR/manual_split_once.rs:45:5
+ |
+LL | let mut iter = "a.b.c".splitn(2, '.');
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | let l = iter.next().unwrap();
+ | ----------------------------- first usage here
+LL | let r = iter.next().unwrap();
+ | ----------------------------- second usage here
+ |
+help: try `split_once`
+ |
+LL | let (l, r) = "a.b.c".split_once('.').unwrap();
+ |
+help: remove the `iter` usages
+ |
+LL - let l = iter.next().unwrap();
+LL +
+ |
+help: remove the `iter` usages
+ |
+LL - let r = iter.next().unwrap();
+LL +
+ |
+
+error: manual implementation of `split_once`
+ --> $DIR/manual_split_once.rs:49:5
+ |
+LL | let mut iter = "a.b.c".splitn(2, '.');
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | let l = iter.next()?;
+ | --------------------- first usage here
+LL | let r = iter.next()?;
+ | --------------------- second usage here
+ |
+help: try `split_once`
+ |
+LL | let (l, r) = "a.b.c".split_once('.')?;
+ |
+help: remove the `iter` usages
+ |
+LL - let l = iter.next()?;
+LL +
+ |
+help: remove the `iter` usages
+ |
+LL - let r = iter.next()?;
+LL +
+ |
+
+error: manual implementation of `rsplit_once`
+ --> $DIR/manual_split_once.rs:53:5
+ |
+LL | let mut iter = "a.b.c".rsplitn(2, '.');
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | let r = iter.next().unwrap();
+ | ----------------------------- first usage here
+LL | let l = iter.next().unwrap();
+ | ----------------------------- second usage here
+ |
+help: try `rsplit_once`
+ |
+LL | let (l, r) = "a.b.c".rsplit_once('.').unwrap();
+ |
+help: remove the `iter` usages
+ |
+LL - let r = iter.next().unwrap();
+LL +
+ |
+help: remove the `iter` usages
+ |
+LL - let l = iter.next().unwrap();
+LL +
+ |
+
+error: manual implementation of `rsplit_once`
+ --> $DIR/manual_split_once.rs:57:5
+ |
+LL | let mut iter = "a.b.c".rsplitn(2, '.');
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | let r = iter.next()?;
+ | --------------------- first usage here
+LL | let l = iter.next()?;
+ | --------------------- second usage here
+ |
+help: try `rsplit_once`
+ |
+LL | let (l, r) = "a.b.c".rsplit_once('.')?;
+ |
+help: remove the `iter` usages
+ |
+LL - let r = iter.next()?;
+LL +
+ |
+help: remove the `iter` usages
+ |
+LL - let l = iter.next()?;
+LL +
+ |
+
+error: manual implementation of `split_once`
+ --> $DIR/manual_split_once.rs:142:13
+ |
+LL | let _ = "key=value".splitn(2, '=').nth(1).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".split_once('=').unwrap().1`
+
+error: manual implementation of `split_once`
+ --> $DIR/manual_split_once.rs:144:5
+ |
+LL | let mut iter = "a.b.c".splitn(2, '.');
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | let a = iter.next().unwrap();
+ | ----------------------------- first usage here
+LL | let b = iter.next().unwrap();
+ | ----------------------------- second usage here
+ |
+help: try `split_once`
+ |
+LL | let (a, b) = "a.b.c".split_once('.').unwrap();
+ |
+help: remove the `iter` usages
+ |
+LL - let a = iter.next().unwrap();
+LL +
+ |
+help: remove the `iter` usages
+ |
+LL - let b = iter.next().unwrap();
+LL +
+ |
+
+error: aborting due to 19 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_str_repeat.fixed b/src/tools/clippy/tests/ui/manual_str_repeat.fixed
new file mode 100644
index 000000000..0704ba2f9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_str_repeat.fixed
@@ -0,0 +1,66 @@
+// run-rustfix
+
+#![feature(custom_inner_attributes)]
+#![warn(clippy::manual_str_repeat)]
+
+use std::borrow::Cow;
+use std::iter::repeat;
+
+fn main() {
+ let _: String = "test".repeat(10);
+ let _: String = "x".repeat(10);
+ let _: String = "'".repeat(10);
+ let _: String = "\"".repeat(10);
+
+ let x = "test";
+ let count = 10;
+ let _ = x.repeat(count + 2);
+
+ macro_rules! m {
+ ($e:expr) => {{ $e }};
+ }
+ // FIXME: macro args are fine
+ let _: String = repeat(m!("test")).take(m!(count)).collect();
+
+ let x = &x;
+ let _: String = (*x).repeat(count);
+
+ macro_rules! repeat_m {
+ ($e:expr) => {{ repeat($e) }};
+ }
+ // Don't lint, repeat is from a macro.
+ let _: String = repeat_m!("test").take(count).collect();
+
+ let x: Box<str> = Box::from("test");
+ let _: String = x.repeat(count);
+
+ #[derive(Clone)]
+ struct S;
+ impl FromIterator<Box<S>> for String {
+ fn from_iter<T: IntoIterator<Item = Box<S>>>(_: T) -> Self {
+ Self::new()
+ }
+ }
+ // Don't lint, wrong box type
+ let _: String = repeat(Box::new(S)).take(count).collect();
+
+ let _: String = Cow::Borrowed("test").repeat(count);
+
+ let x = "x".to_owned();
+ let _: String = x.repeat(count);
+
+ let x = 'x';
+ // Don't lint, not char literal
+ let _: String = repeat(x).take(count).collect();
+}
+
+fn _msrv_1_15() {
+ #![clippy::msrv = "1.15"]
+ // `str::repeat` was stabilized in 1.16. Do not lint this
+ let _: String = std::iter::repeat("test").take(10).collect();
+}
+
+fn _msrv_1_16() {
+ #![clippy::msrv = "1.16"]
+ let _: String = "test".repeat(10);
+}
diff --git a/src/tools/clippy/tests/ui/manual_str_repeat.rs b/src/tools/clippy/tests/ui/manual_str_repeat.rs
new file mode 100644
index 000000000..f522be439
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_str_repeat.rs
@@ -0,0 +1,66 @@
+// run-rustfix
+
+#![feature(custom_inner_attributes)]
+#![warn(clippy::manual_str_repeat)]
+
+use std::borrow::Cow;
+use std::iter::repeat;
+
+fn main() {
+ let _: String = std::iter::repeat("test").take(10).collect();
+ let _: String = std::iter::repeat('x').take(10).collect();
+ let _: String = std::iter::repeat('\'').take(10).collect();
+ let _: String = std::iter::repeat('"').take(10).collect();
+
+ let x = "test";
+ let count = 10;
+ let _ = repeat(x).take(count + 2).collect::<String>();
+
+ macro_rules! m {
+ ($e:expr) => {{ $e }};
+ }
+ // FIXME: macro args are fine
+ let _: String = repeat(m!("test")).take(m!(count)).collect();
+
+ let x = &x;
+ let _: String = repeat(*x).take(count).collect();
+
+ macro_rules! repeat_m {
+ ($e:expr) => {{ repeat($e) }};
+ }
+ // Don't lint, repeat is from a macro.
+ let _: String = repeat_m!("test").take(count).collect();
+
+ let x: Box<str> = Box::from("test");
+ let _: String = repeat(x).take(count).collect();
+
+ #[derive(Clone)]
+ struct S;
+ impl FromIterator<Box<S>> for String {
+ fn from_iter<T: IntoIterator<Item = Box<S>>>(_: T) -> Self {
+ Self::new()
+ }
+ }
+ // Don't lint, wrong box type
+ let _: String = repeat(Box::new(S)).take(count).collect();
+
+ let _: String = repeat(Cow::Borrowed("test")).take(count).collect();
+
+ let x = "x".to_owned();
+ let _: String = repeat(x).take(count).collect();
+
+ let x = 'x';
+ // Don't lint, not char literal
+ let _: String = repeat(x).take(count).collect();
+}
+
+fn _msrv_1_15() {
+ #![clippy::msrv = "1.15"]
+ // `str::repeat` was stabilized in 1.16. Do not lint this
+ let _: String = std::iter::repeat("test").take(10).collect();
+}
+
+fn _msrv_1_16() {
+ #![clippy::msrv = "1.16"]
+ let _: String = std::iter::repeat("test").take(10).collect();
+}
diff --git a/src/tools/clippy/tests/ui/manual_str_repeat.stderr b/src/tools/clippy/tests/ui/manual_str_repeat.stderr
new file mode 100644
index 000000000..c65116897
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_str_repeat.stderr
@@ -0,0 +1,64 @@
+error: manual implementation of `str::repeat` using iterators
+ --> $DIR/manual_str_repeat.rs:10:21
+ |
+LL | let _: String = std::iter::repeat("test").take(10).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"test".repeat(10)`
+ |
+ = note: `-D clippy::manual-str-repeat` implied by `-D warnings`
+
+error: manual implementation of `str::repeat` using iterators
+ --> $DIR/manual_str_repeat.rs:11:21
+ |
+LL | let _: String = std::iter::repeat('x').take(10).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"x".repeat(10)`
+
+error: manual implementation of `str::repeat` using iterators
+ --> $DIR/manual_str_repeat.rs:12:21
+ |
+LL | let _: String = std::iter::repeat('/'').take(10).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"'".repeat(10)`
+
+error: manual implementation of `str::repeat` using iterators
+ --> $DIR/manual_str_repeat.rs:13:21
+ |
+LL | let _: String = std::iter::repeat('"').take(10).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"/"".repeat(10)`
+
+error: manual implementation of `str::repeat` using iterators
+ --> $DIR/manual_str_repeat.rs:17:13
+ |
+LL | let _ = repeat(x).take(count + 2).collect::<String>();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `x.repeat(count + 2)`
+
+error: manual implementation of `str::repeat` using iterators
+ --> $DIR/manual_str_repeat.rs:26:21
+ |
+LL | let _: String = repeat(*x).take(count).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `(*x).repeat(count)`
+
+error: manual implementation of `str::repeat` using iterators
+ --> $DIR/manual_str_repeat.rs:35:21
+ |
+LL | let _: String = repeat(x).take(count).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `x.repeat(count)`
+
+error: manual implementation of `str::repeat` using iterators
+ --> $DIR/manual_str_repeat.rs:47:21
+ |
+LL | let _: String = repeat(Cow::Borrowed("test")).take(count).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `Cow::Borrowed("test").repeat(count)`
+
+error: manual implementation of `str::repeat` using iterators
+ --> $DIR/manual_str_repeat.rs:50:21
+ |
+LL | let _: String = repeat(x).take(count).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `x.repeat(count)`
+
+error: manual implementation of `str::repeat` using iterators
+ --> $DIR/manual_str_repeat.rs:65:21
+ |
+LL | let _: String = std::iter::repeat("test").take(10).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"test".repeat(10)`
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_strip.rs b/src/tools/clippy/tests/ui/manual_strip.rs
new file mode 100644
index 000000000..cbb84eb5c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_strip.rs
@@ -0,0 +1,66 @@
+#![warn(clippy::manual_strip)]
+
+fn main() {
+ let s = "abc";
+
+ if s.starts_with("ab") {
+ str::to_string(&s["ab".len()..]);
+ s["ab".len()..].to_string();
+
+ str::to_string(&s[2..]);
+ s[2..].to_string();
+ }
+
+ if s.ends_with("bc") {
+ str::to_string(&s[..s.len() - "bc".len()]);
+ s[..s.len() - "bc".len()].to_string();
+
+ str::to_string(&s[..s.len() - 2]);
+ s[..s.len() - 2].to_string();
+ }
+
+ // Character patterns
+ if s.starts_with('a') {
+ str::to_string(&s[1..]);
+ s[1..].to_string();
+ }
+
+ // Variable prefix
+ let prefix = "ab";
+ if s.starts_with(prefix) {
+ str::to_string(&s[prefix.len()..]);
+ }
+
+ // Constant prefix
+ const PREFIX: &str = "ab";
+ if s.starts_with(PREFIX) {
+ str::to_string(&s[PREFIX.len()..]);
+ str::to_string(&s[2..]);
+ }
+
+ // Constant target
+ const TARGET: &str = "abc";
+ if TARGET.starts_with(prefix) {
+ str::to_string(&TARGET[prefix.len()..]);
+ }
+
+ // String target - not mutated.
+ let s1: String = "abc".into();
+ if s1.starts_with("ab") {
+ s1[2..].to_uppercase();
+ }
+
+ // String target - mutated. (Don't lint.)
+ let mut s2: String = "abc".into();
+ if s2.starts_with("ab") {
+ s2.push('d');
+ s2[2..].to_uppercase();
+ }
+
+ // Target not stripped. (Don't lint.)
+ let s3 = String::from("abcd");
+ let s4 = String::from("efgh");
+ if s3.starts_with("ab") {
+ s4[2..].to_string();
+ }
+}
diff --git a/src/tools/clippy/tests/ui/manual_strip.stderr b/src/tools/clippy/tests/ui/manual_strip.stderr
new file mode 100644
index 000000000..896edf2ae
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_strip.stderr
@@ -0,0 +1,132 @@
+error: stripping a prefix manually
+ --> $DIR/manual_strip.rs:7:24
+ |
+LL | str::to_string(&s["ab".len()..]);
+ | ^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::manual-strip` implied by `-D warnings`
+note: the prefix was tested here
+ --> $DIR/manual_strip.rs:6:5
+ |
+LL | if s.starts_with("ab") {
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+help: try using the `strip_prefix` method
+ |
+LL ~ if let Some(<stripped>) = s.strip_prefix("ab") {
+LL ~ str::to_string(<stripped>);
+LL ~ <stripped>.to_string();
+LL |
+LL ~ str::to_string(<stripped>);
+LL ~ <stripped>.to_string();
+ |
+
+error: stripping a suffix manually
+ --> $DIR/manual_strip.rs:15:24
+ |
+LL | str::to_string(&s[..s.len() - "bc".len()]);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: the suffix was tested here
+ --> $DIR/manual_strip.rs:14:5
+ |
+LL | if s.ends_with("bc") {
+ | ^^^^^^^^^^^^^^^^^^^^^
+help: try using the `strip_suffix` method
+ |
+LL ~ if let Some(<stripped>) = s.strip_suffix("bc") {
+LL ~ str::to_string(<stripped>);
+LL ~ <stripped>.to_string();
+LL |
+LL ~ str::to_string(<stripped>);
+LL ~ <stripped>.to_string();
+ |
+
+error: stripping a prefix manually
+ --> $DIR/manual_strip.rs:24:24
+ |
+LL | str::to_string(&s[1..]);
+ | ^^^^^^^
+ |
+note: the prefix was tested here
+ --> $DIR/manual_strip.rs:23:5
+ |
+LL | if s.starts_with('a') {
+ | ^^^^^^^^^^^^^^^^^^^^^^
+help: try using the `strip_prefix` method
+ |
+LL ~ if let Some(<stripped>) = s.strip_prefix('a') {
+LL ~ str::to_string(<stripped>);
+LL ~ <stripped>.to_string();
+ |
+
+error: stripping a prefix manually
+ --> $DIR/manual_strip.rs:31:24
+ |
+LL | str::to_string(&s[prefix.len()..]);
+ | ^^^^^^^^^^^^^^^^^^
+ |
+note: the prefix was tested here
+ --> $DIR/manual_strip.rs:30:5
+ |
+LL | if s.starts_with(prefix) {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+help: try using the `strip_prefix` method
+ |
+LL ~ if let Some(<stripped>) = s.strip_prefix(prefix) {
+LL ~ str::to_string(<stripped>);
+ |
+
+error: stripping a prefix manually
+ --> $DIR/manual_strip.rs:37:24
+ |
+LL | str::to_string(&s[PREFIX.len()..]);
+ | ^^^^^^^^^^^^^^^^^^
+ |
+note: the prefix was tested here
+ --> $DIR/manual_strip.rs:36:5
+ |
+LL | if s.starts_with(PREFIX) {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+help: try using the `strip_prefix` method
+ |
+LL ~ if let Some(<stripped>) = s.strip_prefix(PREFIX) {
+LL ~ str::to_string(<stripped>);
+LL ~ str::to_string(<stripped>);
+ |
+
+error: stripping a prefix manually
+ --> $DIR/manual_strip.rs:44:24
+ |
+LL | str::to_string(&TARGET[prefix.len()..]);
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: the prefix was tested here
+ --> $DIR/manual_strip.rs:43:5
+ |
+LL | if TARGET.starts_with(prefix) {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+help: try using the `strip_prefix` method
+ |
+LL ~ if let Some(<stripped>) = TARGET.strip_prefix(prefix) {
+LL ~ str::to_string(<stripped>);
+ |
+
+error: stripping a prefix manually
+ --> $DIR/manual_strip.rs:50:9
+ |
+LL | s1[2..].to_uppercase();
+ | ^^^^^^^
+ |
+note: the prefix was tested here
+ --> $DIR/manual_strip.rs:49:5
+ |
+LL | if s1.starts_with("ab") {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+help: try using the `strip_prefix` method
+ |
+LL ~ if let Some(<stripped>) = s1.strip_prefix("ab") {
+LL ~ <stripped>.to_uppercase();
+ |
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_unwrap_or.fixed b/src/tools/clippy/tests/ui/manual_unwrap_or.fixed
new file mode 100644
index 000000000..7d6897821
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_unwrap_or.fixed
@@ -0,0 +1,181 @@
+// run-rustfix
+#![allow(dead_code)]
+#![allow(unused_variables, clippy::unnecessary_wraps)]
+
+fn option_unwrap_or() {
+ // int case
+ Some(1).unwrap_or(42);
+
+ // int case reversed
+ Some(1).unwrap_or(42);
+
+ // richer none expr
+ Some(1).unwrap_or(1 + 42);
+
+ // multiline case
+ #[rustfmt::skip]
+ Some(1).unwrap_or({
+ 42 + 42
+ + 42 + 42 + 42
+ + 42 + 42 + 42
+ });
+
+ // string case
+ Some("Bob").unwrap_or("Alice");
+
+ // don't lint
+ match Some(1) {
+ Some(i) => i + 2,
+ None => 42,
+ };
+ match Some(1) {
+ Some(i) => i,
+ None => return,
+ };
+ for j in 0..4 {
+ match Some(j) {
+ Some(i) => i,
+ None => continue,
+ };
+ match Some(j) {
+ Some(i) => i,
+ None => break,
+ };
+ }
+
+ // cases where the none arm isn't a constant expression
+ // are not linted due to potential ownership issues
+
+ // ownership issue example, don't lint
+ struct NonCopyable;
+ let mut option: Option<NonCopyable> = None;
+ match option {
+ Some(x) => x,
+ None => {
+ option = Some(NonCopyable);
+ // some more code ...
+ option.unwrap()
+ },
+ };
+
+ // ownership issue example, don't lint
+ let option: Option<&str> = None;
+ match option {
+ Some(s) => s,
+ None => &format!("{} {}!", "hello", "world"),
+ };
+}
+
+fn result_unwrap_or() {
+ // int case
+ Ok::<i32, &str>(1).unwrap_or(42);
+
+ // int case, scrutinee is a binding
+ let a = Ok::<i32, &str>(1);
+ a.unwrap_or(42);
+
+ // int case, suggestion must surround Result expr with parentheses
+ (Ok(1) as Result<i32, &str>).unwrap_or(42);
+
+ // method call case, suggestion must not surround Result expr `s.method()` with parentheses
+ struct S;
+ impl S {
+ fn method(self) -> Option<i32> {
+ Some(42)
+ }
+ }
+ let s = S {};
+ s.method().unwrap_or(42);
+
+ // int case reversed
+ Ok::<i32, &str>(1).unwrap_or(42);
+
+ // richer none expr
+ Ok::<i32, &str>(1).unwrap_or(1 + 42);
+
+ // multiline case
+ #[rustfmt::skip]
+ Ok::<i32, &str>(1).unwrap_or({
+ 42 + 42
+ + 42 + 42 + 42
+ + 42 + 42 + 42
+ });
+
+ // string case
+ Ok::<&str, &str>("Bob").unwrap_or("Alice");
+
+ // don't lint
+ match Ok::<i32, &str>(1) {
+ Ok(i) => i + 2,
+ Err(_) => 42,
+ };
+ match Ok::<i32, &str>(1) {
+ Ok(i) => i,
+ Err(_) => return,
+ };
+ for j in 0..4 {
+ match Ok::<i32, &str>(j) {
+ Ok(i) => i,
+ Err(_) => continue,
+ };
+ match Ok::<i32, &str>(j) {
+ Ok(i) => i,
+ Err(_) => break,
+ };
+ }
+
+ // don't lint, Err value is used
+ match Ok::<&str, &str>("Alice") {
+ Ok(s) => s,
+ Err(s) => s,
+ };
+ // could lint, but unused_variables takes care of it
+ match Ok::<&str, &str>("Alice") {
+ Ok(s) => s,
+ Err(s) => "Bob",
+ };
+}
+
+// don't lint in const fn
+const fn const_fn_option_unwrap_or() {
+ match Some(1) {
+ Some(s) => s,
+ None => 0,
+ };
+}
+
+const fn const_fn_result_unwrap_or() {
+ match Ok::<&str, &str>("Alice") {
+ Ok(s) => s,
+ Err(_) => "Bob",
+ };
+}
+
+mod issue6965 {
+ macro_rules! some_macro {
+ () => {
+ if 1 > 2 { Some(1) } else { None }
+ };
+ }
+
+ fn test() {
+ let _ = some_macro!().unwrap_or(0);
+ }
+}
+
+use std::rc::Rc;
+fn format_name(name: Option<&Rc<str>>) -> &str {
+ match name {
+ None => "<anon>",
+ Some(name) => name,
+ }
+}
+
+fn implicit_deref_ref() {
+ let _: &str = match Some(&"bye") {
+ None => "hi",
+ Some(s) => s,
+ };
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/manual_unwrap_or.rs b/src/tools/clippy/tests/ui/manual_unwrap_or.rs
new file mode 100644
index 000000000..b937fe6f9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_unwrap_or.rs
@@ -0,0 +1,223 @@
+// run-rustfix
+#![allow(dead_code)]
+#![allow(unused_variables, clippy::unnecessary_wraps)]
+
+fn option_unwrap_or() {
+ // int case
+ match Some(1) {
+ Some(i) => i,
+ None => 42,
+ };
+
+ // int case reversed
+ match Some(1) {
+ None => 42,
+ Some(i) => i,
+ };
+
+ // richer none expr
+ match Some(1) {
+ Some(i) => i,
+ None => 1 + 42,
+ };
+
+ // multiline case
+ #[rustfmt::skip]
+ match Some(1) {
+ Some(i) => i,
+ None => {
+ 42 + 42
+ + 42 + 42 + 42
+ + 42 + 42 + 42
+ }
+ };
+
+ // string case
+ match Some("Bob") {
+ Some(i) => i,
+ None => "Alice",
+ };
+
+ // don't lint
+ match Some(1) {
+ Some(i) => i + 2,
+ None => 42,
+ };
+ match Some(1) {
+ Some(i) => i,
+ None => return,
+ };
+ for j in 0..4 {
+ match Some(j) {
+ Some(i) => i,
+ None => continue,
+ };
+ match Some(j) {
+ Some(i) => i,
+ None => break,
+ };
+ }
+
+ // cases where the none arm isn't a constant expression
+ // are not linted due to potential ownership issues
+
+ // ownership issue example, don't lint
+ struct NonCopyable;
+ let mut option: Option<NonCopyable> = None;
+ match option {
+ Some(x) => x,
+ None => {
+ option = Some(NonCopyable);
+ // some more code ...
+ option.unwrap()
+ },
+ };
+
+ // ownership issue example, don't lint
+ let option: Option<&str> = None;
+ match option {
+ Some(s) => s,
+ None => &format!("{} {}!", "hello", "world"),
+ };
+}
+
+fn result_unwrap_or() {
+ // int case
+ match Ok::<i32, &str>(1) {
+ Ok(i) => i,
+ Err(_) => 42,
+ };
+
+ // int case, scrutinee is a binding
+ let a = Ok::<i32, &str>(1);
+ match a {
+ Ok(i) => i,
+ Err(_) => 42,
+ };
+
+ // int case, suggestion must surround Result expr with parentheses
+ match Ok(1) as Result<i32, &str> {
+ Ok(i) => i,
+ Err(_) => 42,
+ };
+
+ // method call case, suggestion must not surround Result expr `s.method()` with parentheses
+ struct S;
+ impl S {
+ fn method(self) -> Option<i32> {
+ Some(42)
+ }
+ }
+ let s = S {};
+ match s.method() {
+ Some(i) => i,
+ None => 42,
+ };
+
+ // int case reversed
+ match Ok::<i32, &str>(1) {
+ Err(_) => 42,
+ Ok(i) => i,
+ };
+
+ // richer none expr
+ match Ok::<i32, &str>(1) {
+ Ok(i) => i,
+ Err(_) => 1 + 42,
+ };
+
+ // multiline case
+ #[rustfmt::skip]
+ match Ok::<i32, &str>(1) {
+ Ok(i) => i,
+ Err(_) => {
+ 42 + 42
+ + 42 + 42 + 42
+ + 42 + 42 + 42
+ }
+ };
+
+ // string case
+ match Ok::<&str, &str>("Bob") {
+ Ok(i) => i,
+ Err(_) => "Alice",
+ };
+
+ // don't lint
+ match Ok::<i32, &str>(1) {
+ Ok(i) => i + 2,
+ Err(_) => 42,
+ };
+ match Ok::<i32, &str>(1) {
+ Ok(i) => i,
+ Err(_) => return,
+ };
+ for j in 0..4 {
+ match Ok::<i32, &str>(j) {
+ Ok(i) => i,
+ Err(_) => continue,
+ };
+ match Ok::<i32, &str>(j) {
+ Ok(i) => i,
+ Err(_) => break,
+ };
+ }
+
+ // don't lint, Err value is used
+ match Ok::<&str, &str>("Alice") {
+ Ok(s) => s,
+ Err(s) => s,
+ };
+ // could lint, but unused_variables takes care of it
+ match Ok::<&str, &str>("Alice") {
+ Ok(s) => s,
+ Err(s) => "Bob",
+ };
+}
+
+// don't lint in const fn
+const fn const_fn_option_unwrap_or() {
+ match Some(1) {
+ Some(s) => s,
+ None => 0,
+ };
+}
+
+const fn const_fn_result_unwrap_or() {
+ match Ok::<&str, &str>("Alice") {
+ Ok(s) => s,
+ Err(_) => "Bob",
+ };
+}
+
+mod issue6965 {
+ macro_rules! some_macro {
+ () => {
+ if 1 > 2 { Some(1) } else { None }
+ };
+ }
+
+ fn test() {
+ let _ = match some_macro!() {
+ Some(val) => val,
+ None => 0,
+ };
+ }
+}
+
+use std::rc::Rc;
+fn format_name(name: Option<&Rc<str>>) -> &str {
+ match name {
+ None => "<anon>",
+ Some(name) => name,
+ }
+}
+
+fn implicit_deref_ref() {
+ let _: &str = match Some(&"bye") {
+ None => "hi",
+ Some(s) => s,
+ };
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/manual_unwrap_or.stderr b/src/tools/clippy/tests/ui/manual_unwrap_or.stderr
new file mode 100644
index 000000000..0e4cb798d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_unwrap_or.stderr
@@ -0,0 +1,155 @@
+error: this pattern reimplements `Option::unwrap_or`
+ --> $DIR/manual_unwrap_or.rs:7:5
+ |
+LL | / match Some(1) {
+LL | | Some(i) => i,
+LL | | None => 42,
+LL | | };
+ | |_____^ help: replace with: `Some(1).unwrap_or(42)`
+ |
+ = note: `-D clippy::manual-unwrap-or` implied by `-D warnings`
+
+error: this pattern reimplements `Option::unwrap_or`
+ --> $DIR/manual_unwrap_or.rs:13:5
+ |
+LL | / match Some(1) {
+LL | | None => 42,
+LL | | Some(i) => i,
+LL | | };
+ | |_____^ help: replace with: `Some(1).unwrap_or(42)`
+
+error: this pattern reimplements `Option::unwrap_or`
+ --> $DIR/manual_unwrap_or.rs:19:5
+ |
+LL | / match Some(1) {
+LL | | Some(i) => i,
+LL | | None => 1 + 42,
+LL | | };
+ | |_____^ help: replace with: `Some(1).unwrap_or(1 + 42)`
+
+error: this pattern reimplements `Option::unwrap_or`
+ --> $DIR/manual_unwrap_or.rs:26:5
+ |
+LL | / match Some(1) {
+LL | | Some(i) => i,
+LL | | None => {
+LL | | 42 + 42
+... |
+LL | | }
+LL | | };
+ | |_____^
+ |
+help: replace with
+ |
+LL ~ Some(1).unwrap_or({
+LL + 42 + 42
+LL + + 42 + 42 + 42
+LL + + 42 + 42 + 42
+LL ~ });
+ |
+
+error: this pattern reimplements `Option::unwrap_or`
+ --> $DIR/manual_unwrap_or.rs:36:5
+ |
+LL | / match Some("Bob") {
+LL | | Some(i) => i,
+LL | | None => "Alice",
+LL | | };
+ | |_____^ help: replace with: `Some("Bob").unwrap_or("Alice")`
+
+error: this pattern reimplements `Result::unwrap_or`
+ --> $DIR/manual_unwrap_or.rs:86:5
+ |
+LL | / match Ok::<i32, &str>(1) {
+LL | | Ok(i) => i,
+LL | | Err(_) => 42,
+LL | | };
+ | |_____^ help: replace with: `Ok::<i32, &str>(1).unwrap_or(42)`
+
+error: this pattern reimplements `Result::unwrap_or`
+ --> $DIR/manual_unwrap_or.rs:93:5
+ |
+LL | / match a {
+LL | | Ok(i) => i,
+LL | | Err(_) => 42,
+LL | | };
+ | |_____^ help: replace with: `a.unwrap_or(42)`
+
+error: this pattern reimplements `Result::unwrap_or`
+ --> $DIR/manual_unwrap_or.rs:99:5
+ |
+LL | / match Ok(1) as Result<i32, &str> {
+LL | | Ok(i) => i,
+LL | | Err(_) => 42,
+LL | | };
+ | |_____^ help: replace with: `(Ok(1) as Result<i32, &str>).unwrap_or(42)`
+
+error: this pattern reimplements `Option::unwrap_or`
+ --> $DIR/manual_unwrap_or.rs:112:5
+ |
+LL | / match s.method() {
+LL | | Some(i) => i,
+LL | | None => 42,
+LL | | };
+ | |_____^ help: replace with: `s.method().unwrap_or(42)`
+
+error: this pattern reimplements `Result::unwrap_or`
+ --> $DIR/manual_unwrap_or.rs:118:5
+ |
+LL | / match Ok::<i32, &str>(1) {
+LL | | Err(_) => 42,
+LL | | Ok(i) => i,
+LL | | };
+ | |_____^ help: replace with: `Ok::<i32, &str>(1).unwrap_or(42)`
+
+error: this pattern reimplements `Result::unwrap_or`
+ --> $DIR/manual_unwrap_or.rs:124:5
+ |
+LL | / match Ok::<i32, &str>(1) {
+LL | | Ok(i) => i,
+LL | | Err(_) => 1 + 42,
+LL | | };
+ | |_____^ help: replace with: `Ok::<i32, &str>(1).unwrap_or(1 + 42)`
+
+error: this pattern reimplements `Result::unwrap_or`
+ --> $DIR/manual_unwrap_or.rs:131:5
+ |
+LL | / match Ok::<i32, &str>(1) {
+LL | | Ok(i) => i,
+LL | | Err(_) => {
+LL | | 42 + 42
+... |
+LL | | }
+LL | | };
+ | |_____^
+ |
+help: replace with
+ |
+LL ~ Ok::<i32, &str>(1).unwrap_or({
+LL + 42 + 42
+LL + + 42 + 42 + 42
+LL + + 42 + 42 + 42
+LL ~ });
+ |
+
+error: this pattern reimplements `Result::unwrap_or`
+ --> $DIR/manual_unwrap_or.rs:141:5
+ |
+LL | / match Ok::<&str, &str>("Bob") {
+LL | | Ok(i) => i,
+LL | | Err(_) => "Alice",
+LL | | };
+ | |_____^ help: replace with: `Ok::<&str, &str>("Bob").unwrap_or("Alice")`
+
+error: this pattern reimplements `Option::unwrap_or`
+ --> $DIR/manual_unwrap_or.rs:201:17
+ |
+LL | let _ = match some_macro!() {
+ | _________________^
+LL | | Some(val) => val,
+LL | | None => 0,
+LL | | };
+ | |_________^ help: replace with: `some_macro!().unwrap_or(0)`
+
+error: aborting due to 14 previous errors
+
diff --git a/src/tools/clippy/tests/ui/many_single_char_names.rs b/src/tools/clippy/tests/ui/many_single_char_names.rs
new file mode 100644
index 000000000..88fcce668
--- /dev/null
+++ b/src/tools/clippy/tests/ui/many_single_char_names.rs
@@ -0,0 +1,74 @@
+#![allow(clippy::too_many_arguments, clippy::diverging_sub_expression)]
+#![warn(clippy::many_single_char_names)]
+
+fn bla() {
+ let a: i32;
+ let (b, c, d): (i32, i64, i16);
+ {
+ {
+ let cdefg: i32;
+ let blar: i32;
+ }
+ {
+ let e: i32;
+ }
+ {
+ let e: i32;
+ let f: i32;
+ }
+ match 5 {
+ 1 => println!(),
+ e => panic!(),
+ }
+ match 5 {
+ 1 => println!(),
+ _ => panic!(),
+ }
+ }
+}
+
+fn bindings(a: i32, b: i32, c: i32, d: i32, e: i32, f: i32, g: i32, h: i32) {}
+
+fn bindings2() {
+ let (a, b, c, d, e, f, g, h): (bool, bool, bool, bool, bool, bool, bool, bool) = unimplemented!();
+}
+
+fn shadowing() {
+ let a = 0i32;
+ let a = 0i32;
+ let a = 0i32;
+ let a = 0i32;
+ let a = 0i32;
+ let a = 0i32;
+ {
+ let a = 0i32;
+ }
+}
+
+fn patterns() {
+ enum Z {
+ A(i32),
+ B(i32),
+ C(i32),
+ D(i32),
+ E(i32),
+ F(i32),
+ }
+
+ // These should not trigger a warning, since the pattern bindings are a new scope.
+ match Z::A(0) {
+ Z::A(a) => {},
+ Z::B(b) => {},
+ Z::C(c) => {},
+ Z::D(d) => {},
+ Z::E(e) => {},
+ Z::F(f) => {},
+ }
+}
+
+#[allow(clippy::many_single_char_names)]
+fn issue_3198_allow_works() {
+ let (a, b, c, d, e) = (0, 0, 0, 0, 0);
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/many_single_char_names.stderr b/src/tools/clippy/tests/ui/many_single_char_names.stderr
new file mode 100644
index 000000000..ade0f84bc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/many_single_char_names.stderr
@@ -0,0 +1,51 @@
+error: 5 bindings with single-character names in scope
+ --> $DIR/many_single_char_names.rs:5:9
+ |
+LL | let a: i32;
+ | ^
+LL | let (b, c, d): (i32, i64, i16);
+ | ^ ^ ^
+...
+LL | let e: i32;
+ | ^
+ |
+ = note: `-D clippy::many-single-char-names` implied by `-D warnings`
+
+error: 6 bindings with single-character names in scope
+ --> $DIR/many_single_char_names.rs:5:9
+ |
+LL | let a: i32;
+ | ^
+LL | let (b, c, d): (i32, i64, i16);
+ | ^ ^ ^
+...
+LL | let e: i32;
+ | ^
+LL | let f: i32;
+ | ^
+
+error: 5 bindings with single-character names in scope
+ --> $DIR/many_single_char_names.rs:5:9
+ |
+LL | let a: i32;
+ | ^
+LL | let (b, c, d): (i32, i64, i16);
+ | ^ ^ ^
+...
+LL | e => panic!(),
+ | ^
+
+error: 8 bindings with single-character names in scope
+ --> $DIR/many_single_char_names.rs:30:13
+ |
+LL | fn bindings(a: i32, b: i32, c: i32, d: i32, e: i32, f: i32, g: i32, h: i32) {}
+ | ^ ^ ^ ^ ^ ^ ^ ^
+
+error: 8 bindings with single-character names in scope
+ --> $DIR/many_single_char_names.rs:33:10
+ |
+LL | let (a, b, c, d, e, f, g, h): (bool, bool, bool, bool, bool, bool, bool, bool) = unimplemented!();
+ | ^ ^ ^ ^ ^ ^ ^ ^
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/map_clone.fixed b/src/tools/clippy/tests/ui/map_clone.fixed
new file mode 100644
index 000000000..0860dcf8e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_clone.fixed
@@ -0,0 +1,63 @@
+// run-rustfix
+#![warn(clippy::map_clone)]
+#![allow(
+ clippy::clone_on_copy,
+ clippy::iter_cloned_collect,
+ clippy::many_single_char_names,
+ clippy::redundant_clone
+)]
+
+fn main() {
+ let _: Vec<i8> = vec![5_i8; 6].iter().copied().collect();
+ let _: Vec<String> = vec![String::new()].iter().cloned().collect();
+ let _: Vec<u32> = vec![42, 43].iter().copied().collect();
+ let _: Option<u64> = Some(Box::new(16)).map(|b| *b);
+ let _: Option<u64> = Some(&16).copied();
+ let _: Option<u8> = Some(&1).copied();
+
+ // Don't lint these
+ let v = vec![5_i8; 6];
+ let a = 0;
+ let b = &a;
+ let _ = v.iter().map(|_x| *b);
+ let _ = v.iter().map(|_x| a.clone());
+ let _ = v.iter().map(|&_x| a);
+
+ // Issue #498
+ let _ = std::env::args();
+
+ // Issue #4824 item types that aren't references
+ {
+ use std::rc::Rc;
+
+ let o: Option<Rc<u32>> = Some(Rc::new(0_u32));
+ let _: Option<u32> = o.map(|x| *x);
+ let v: Vec<Rc<u32>> = vec![Rc::new(0_u32)];
+ let _: Vec<u32> = v.into_iter().map(|x| *x).collect();
+ }
+
+ // Issue #5524 mutable references
+ {
+ let mut c = 42;
+ let v = vec![&mut c];
+ let _: Vec<u32> = v.into_iter().map(|x| *x).collect();
+ let mut d = 21;
+ let v = vec![&mut d];
+ let _: Vec<u32> = v.into_iter().map(|&mut x| x).collect();
+ }
+
+ // Issue #6299
+ {
+ let mut aa = 5;
+ let mut bb = 3;
+ let items = vec![&mut aa, &mut bb];
+ let _: Vec<_> = items.into_iter().map(|x| x.clone()).collect();
+ }
+
+ // Issue #6239 deref coercion and clone deref
+ {
+ use std::cell::RefCell;
+
+ let _ = Some(RefCell::new(String::new()).borrow()).map(|s| s.clone());
+ }
+}
diff --git a/src/tools/clippy/tests/ui/map_clone.rs b/src/tools/clippy/tests/ui/map_clone.rs
new file mode 100644
index 000000000..b69873368
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_clone.rs
@@ -0,0 +1,63 @@
+// run-rustfix
+#![warn(clippy::map_clone)]
+#![allow(
+ clippy::clone_on_copy,
+ clippy::iter_cloned_collect,
+ clippy::many_single_char_names,
+ clippy::redundant_clone
+)]
+
+fn main() {
+ let _: Vec<i8> = vec![5_i8; 6].iter().map(|x| *x).collect();
+ let _: Vec<String> = vec![String::new()].iter().map(|x| x.clone()).collect();
+ let _: Vec<u32> = vec![42, 43].iter().map(|&x| x).collect();
+ let _: Option<u64> = Some(Box::new(16)).map(|b| *b);
+ let _: Option<u64> = Some(&16).map(|b| *b);
+ let _: Option<u8> = Some(&1).map(|x| x.clone());
+
+ // Don't lint these
+ let v = vec![5_i8; 6];
+ let a = 0;
+ let b = &a;
+ let _ = v.iter().map(|_x| *b);
+ let _ = v.iter().map(|_x| a.clone());
+ let _ = v.iter().map(|&_x| a);
+
+ // Issue #498
+ let _ = std::env::args().map(|v| v.clone());
+
+ // Issue #4824 item types that aren't references
+ {
+ use std::rc::Rc;
+
+ let o: Option<Rc<u32>> = Some(Rc::new(0_u32));
+ let _: Option<u32> = o.map(|x| *x);
+ let v: Vec<Rc<u32>> = vec![Rc::new(0_u32)];
+ let _: Vec<u32> = v.into_iter().map(|x| *x).collect();
+ }
+
+ // Issue #5524 mutable references
+ {
+ let mut c = 42;
+ let v = vec![&mut c];
+ let _: Vec<u32> = v.into_iter().map(|x| *x).collect();
+ let mut d = 21;
+ let v = vec![&mut d];
+ let _: Vec<u32> = v.into_iter().map(|&mut x| x).collect();
+ }
+
+ // Issue #6299
+ {
+ let mut aa = 5;
+ let mut bb = 3;
+ let items = vec![&mut aa, &mut bb];
+ let _: Vec<_> = items.into_iter().map(|x| x.clone()).collect();
+ }
+
+ // Issue #6239 deref coercion and clone deref
+ {
+ use std::cell::RefCell;
+
+ let _ = Some(RefCell::new(String::new()).borrow()).map(|s| s.clone());
+ }
+}
diff --git a/src/tools/clippy/tests/ui/map_clone.stderr b/src/tools/clippy/tests/ui/map_clone.stderr
new file mode 100644
index 000000000..d84a5bf8d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_clone.stderr
@@ -0,0 +1,40 @@
+error: you are using an explicit closure for copying elements
+ --> $DIR/map_clone.rs:11:22
+ |
+LL | let _: Vec<i8> = vec![5_i8; 6].iter().map(|x| *x).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling the dedicated `copied` method: `vec![5_i8; 6].iter().copied()`
+ |
+ = note: `-D clippy::map-clone` implied by `-D warnings`
+
+error: you are using an explicit closure for cloning elements
+ --> $DIR/map_clone.rs:12:26
+ |
+LL | let _: Vec<String> = vec![String::new()].iter().map(|x| x.clone()).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling the dedicated `cloned` method: `vec![String::new()].iter().cloned()`
+
+error: you are using an explicit closure for copying elements
+ --> $DIR/map_clone.rs:13:23
+ |
+LL | let _: Vec<u32> = vec![42, 43].iter().map(|&x| x).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling the dedicated `copied` method: `vec![42, 43].iter().copied()`
+
+error: you are using an explicit closure for copying elements
+ --> $DIR/map_clone.rs:15:26
+ |
+LL | let _: Option<u64> = Some(&16).map(|b| *b);
+ | ^^^^^^^^^^^^^^^^^^^^^ help: consider calling the dedicated `copied` method: `Some(&16).copied()`
+
+error: you are using an explicit closure for copying elements
+ --> $DIR/map_clone.rs:16:25
+ |
+LL | let _: Option<u8> = Some(&1).map(|x| x.clone());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling the dedicated `copied` method: `Some(&1).copied()`
+
+error: you are needlessly cloning iterator elements
+ --> $DIR/map_clone.rs:27:29
+ |
+LL | let _ = std::env::args().map(|v| v.clone());
+ | ^^^^^^^^^^^^^^^^^^^ help: remove the `map` call
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/map_collect_result_unit.fixed b/src/tools/clippy/tests/ui/map_collect_result_unit.fixed
new file mode 100644
index 000000000..e66c9cc24
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_collect_result_unit.fixed
@@ -0,0 +1,16 @@
+// run-rustfix
+#![warn(clippy::map_collect_result_unit)]
+
+fn main() {
+ {
+ let _ = (0..3).try_for_each(|t| Err(t + 1));
+ let _: Result<(), _> = (0..3).try_for_each(|t| Err(t + 1));
+
+ let _ = (0..3).try_for_each(|t| Err(t + 1));
+ }
+}
+
+fn _ignore() {
+ let _ = (0..3).map(|t| Err(t + 1)).collect::<Result<Vec<i32>, _>>();
+ let _ = (0..3).map(|t| Err(t + 1)).collect::<Vec<Result<(), _>>>();
+}
diff --git a/src/tools/clippy/tests/ui/map_collect_result_unit.rs b/src/tools/clippy/tests/ui/map_collect_result_unit.rs
new file mode 100644
index 000000000..6f08f4c3c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_collect_result_unit.rs
@@ -0,0 +1,16 @@
+// run-rustfix
+#![warn(clippy::map_collect_result_unit)]
+
+fn main() {
+ {
+ let _ = (0..3).map(|t| Err(t + 1)).collect::<Result<(), _>>();
+ let _: Result<(), _> = (0..3).map(|t| Err(t + 1)).collect();
+
+ let _ = (0..3).try_for_each(|t| Err(t + 1));
+ }
+}
+
+fn _ignore() {
+ let _ = (0..3).map(|t| Err(t + 1)).collect::<Result<Vec<i32>, _>>();
+ let _ = (0..3).map(|t| Err(t + 1)).collect::<Vec<Result<(), _>>>();
+}
diff --git a/src/tools/clippy/tests/ui/map_collect_result_unit.stderr b/src/tools/clippy/tests/ui/map_collect_result_unit.stderr
new file mode 100644
index 000000000..8b06e13ba
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_collect_result_unit.stderr
@@ -0,0 +1,16 @@
+error: `.map().collect()` can be replaced with `.try_for_each()`
+ --> $DIR/map_collect_result_unit.rs:6:17
+ |
+LL | let _ = (0..3).map(|t| Err(t + 1)).collect::<Result<(), _>>();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `(0..3).try_for_each(|t| Err(t + 1))`
+ |
+ = note: `-D clippy::map-collect-result-unit` implied by `-D warnings`
+
+error: `.map().collect()` can be replaced with `.try_for_each()`
+ --> $DIR/map_collect_result_unit.rs:7:32
+ |
+LL | let _: Result<(), _> = (0..3).map(|t| Err(t + 1)).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `(0..3).try_for_each(|t| Err(t + 1))`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/map_err.rs b/src/tools/clippy/tests/ui/map_err.rs
new file mode 100644
index 000000000..bb35ab1a1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_err.rs
@@ -0,0 +1,29 @@
+#![warn(clippy::map_err_ignore)]
+#![allow(clippy::unnecessary_wraps)]
+use std::error::Error;
+use std::fmt;
+
+#[derive(Debug)]
+enum Errors {
+ Ignored,
+}
+
+impl Error for Errors {}
+
+impl fmt::Display for Errors {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "Error")
+ }
+}
+
+fn main() -> Result<(), Errors> {
+ let x = u32::try_from(-123_i32);
+
+ println!("{:?}", x.map_err(|_| Errors::Ignored));
+
+ // Should not warn you because you explicitly ignore the parameter
+ // using a named wildcard value
+ println!("{:?}", x.map_err(|_foo| Errors::Ignored));
+
+ Ok(())
+}
diff --git a/src/tools/clippy/tests/ui/map_err.stderr b/src/tools/clippy/tests/ui/map_err.stderr
new file mode 100644
index 000000000..c03584052
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_err.stderr
@@ -0,0 +1,11 @@
+error: `map_err(|_|...` wildcard pattern discards the original error
+ --> $DIR/map_err.rs:22:32
+ |
+LL | println!("{:?}", x.map_err(|_| Errors::Ignored));
+ | ^^^
+ |
+ = note: `-D clippy::map-err-ignore` implied by `-D warnings`
+ = help: consider storing the original error as a source in the new error, or silence this warning using an ignored identifier (`.map_err(|_foo| ...`)
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/map_flatten.rs b/src/tools/clippy/tests/ui/map_flatten.rs
new file mode 100644
index 000000000..7d47ee09d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_flatten.rs
@@ -0,0 +1,55 @@
+#![warn(clippy::map_flatten)]
+#![feature(result_flattening)]
+
+// issue #8506, multi-line
+#[rustfmt::skip]
+fn long_span() {
+ let _: Option<i32> = Some(1)
+ .map(|x| {
+ if x <= 5 {
+ Some(x)
+ } else {
+ None
+ }
+ })
+ .flatten();
+
+ let _: Result<i32, i32> = Ok(1)
+ .map(|x| {
+ if x == 1 {
+ Ok(x)
+ } else {
+ Err(0)
+ }
+ })
+ .flatten();
+
+ let result: Result<i32, i32> = Ok(2);
+ fn do_something() { }
+ let _: Result<i32, i32> = result
+ .map(|res| {
+ if res > 0 {
+ do_something();
+ Ok(res)
+ } else {
+ Err(0)
+ }
+ })
+ .flatten();
+
+ let _: Vec<_> = vec![5_i8; 6]
+ .into_iter()
+ .map(|some_value| {
+ if some_value > 3 {
+ Some(some_value)
+ } else {
+ None
+ }
+ })
+ .flatten()
+ .collect();
+}
+
+fn main() {
+ long_span();
+}
diff --git a/src/tools/clippy/tests/ui/map_flatten.stderr b/src/tools/clippy/tests/ui/map_flatten.stderr
new file mode 100644
index 000000000..4b2630d68
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_flatten.stderr
@@ -0,0 +1,100 @@
+error: called `map(..).flatten()` on `Option`
+ --> $DIR/map_flatten.rs:8:10
+ |
+LL | .map(|x| {
+ | __________^
+LL | | if x <= 5 {
+LL | | Some(x)
+LL | | } else {
+... |
+LL | | })
+LL | | .flatten();
+ | |__________________^
+ |
+ = note: `-D clippy::map-flatten` implied by `-D warnings`
+help: try replacing `map` with `and_then` and remove the `.flatten()`
+ |
+LL ~ .and_then(|x| {
+LL + if x <= 5 {
+LL + Some(x)
+LL + } else {
+LL + None
+LL + }
+LL ~ });
+ |
+
+error: called `map(..).flatten()` on `Result`
+ --> $DIR/map_flatten.rs:18:10
+ |
+LL | .map(|x| {
+ | __________^
+LL | | if x == 1 {
+LL | | Ok(x)
+LL | | } else {
+... |
+LL | | })
+LL | | .flatten();
+ | |__________________^
+ |
+help: try replacing `map` with `and_then` and remove the `.flatten()`
+ |
+LL ~ .and_then(|x| {
+LL + if x == 1 {
+LL + Ok(x)
+LL + } else {
+LL + Err(0)
+LL + }
+LL ~ });
+ |
+
+error: called `map(..).flatten()` on `Result`
+ --> $DIR/map_flatten.rs:30:10
+ |
+LL | .map(|res| {
+ | __________^
+LL | | if res > 0 {
+LL | | do_something();
+LL | | Ok(res)
+... |
+LL | | })
+LL | | .flatten();
+ | |__________________^
+ |
+help: try replacing `map` with `and_then` and remove the `.flatten()`
+ |
+LL ~ .and_then(|res| {
+LL + if res > 0 {
+LL + do_something();
+LL + Ok(res)
+LL + } else {
+LL + Err(0)
+LL + }
+LL ~ });
+ |
+
+error: called `map(..).flatten()` on `Iterator`
+ --> $DIR/map_flatten.rs:42:10
+ |
+LL | .map(|some_value| {
+ | __________^
+LL | | if some_value > 3 {
+LL | | Some(some_value)
+LL | | } else {
+... |
+LL | | })
+LL | | .flatten()
+ | |__________________^
+ |
+help: try replacing `map` with `filter_map` and remove the `.flatten()`
+ |
+LL ~ .filter_map(|some_value| {
+LL + if some_value > 3 {
+LL + Some(some_value)
+LL + } else {
+LL + None
+LL + }
+LL + })
+ |
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/map_flatten_fixable.fixed b/src/tools/clippy/tests/ui/map_flatten_fixable.fixed
new file mode 100644
index 000000000..312819a0a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_flatten_fixable.fixed
@@ -0,0 +1,65 @@
+// run-rustfix
+
+#![warn(clippy::all, clippy::pedantic)]
+#![allow(clippy::let_underscore_drop)]
+#![allow(clippy::missing_docs_in_private_items)]
+#![allow(clippy::map_identity)]
+#![allow(clippy::redundant_closure)]
+#![allow(clippy::unnecessary_wraps)]
+#![feature(result_flattening)]
+
+fn main() {
+ // mapping to Option on Iterator
+ fn option_id(x: i8) -> Option<i8> {
+ Some(x)
+ }
+ let option_id_ref: fn(i8) -> Option<i8> = option_id;
+ let option_id_closure = |x| Some(x);
+ let _: Vec<_> = vec![5_i8; 6].into_iter().filter_map(option_id).collect();
+ let _: Vec<_> = vec![5_i8; 6].into_iter().filter_map(option_id_ref).collect();
+ let _: Vec<_> = vec![5_i8; 6].into_iter().filter_map(option_id_closure).collect();
+ let _: Vec<_> = vec![5_i8; 6].into_iter().filter_map(|x| x.checked_add(1)).collect();
+
+ // mapping to Iterator on Iterator
+ let _: Vec<_> = vec![5_i8; 6].into_iter().flat_map(|x| 0..x).collect();
+
+ // mapping to Option on Option
+ let _: Option<_> = (Some(Some(1))).and_then(|x| x);
+
+ // mapping to Result on Result
+ let _: Result<_, &str> = (Ok(Ok(1))).and_then(|x| x);
+
+ issue8734();
+ issue8878();
+}
+
+fn issue8734() {
+ let _ = [0u8, 1, 2, 3]
+ .into_iter()
+ .flat_map(|n| match n {
+ 1 => [n
+ .saturating_add(1)
+ .saturating_add(1)
+ .saturating_add(1)
+ .saturating_add(1)
+ .saturating_add(1)
+ .saturating_add(1)
+ .saturating_add(1)
+ .saturating_add(1)],
+ n => [n],
+ });
+}
+
+#[allow(clippy::bind_instead_of_map)] // map + flatten will be suggested to `and_then`, but afterwards `map` is suggested again
+#[rustfmt::skip] // whitespace is important for this one
+fn issue8878() {
+ std::collections::HashMap::<u32, u32>::new()
+ .get(&0)
+ .and_then(|_| {
+// we need some newlines
+// so that the span is big enough
+// for a split output of the diagnostic
+ Some("")
+ // whitespace beforehand is important as well
+ });
+}
diff --git a/src/tools/clippy/tests/ui/map_flatten_fixable.rs b/src/tools/clippy/tests/ui/map_flatten_fixable.rs
new file mode 100644
index 000000000..3fbf4f9a1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_flatten_fixable.rs
@@ -0,0 +1,67 @@
+// run-rustfix
+
+#![warn(clippy::all, clippy::pedantic)]
+#![allow(clippy::let_underscore_drop)]
+#![allow(clippy::missing_docs_in_private_items)]
+#![allow(clippy::map_identity)]
+#![allow(clippy::redundant_closure)]
+#![allow(clippy::unnecessary_wraps)]
+#![feature(result_flattening)]
+
+fn main() {
+ // mapping to Option on Iterator
+ fn option_id(x: i8) -> Option<i8> {
+ Some(x)
+ }
+ let option_id_ref: fn(i8) -> Option<i8> = option_id;
+ let option_id_closure = |x| Some(x);
+ let _: Vec<_> = vec![5_i8; 6].into_iter().map(option_id).flatten().collect();
+ let _: Vec<_> = vec![5_i8; 6].into_iter().map(option_id_ref).flatten().collect();
+ let _: Vec<_> = vec![5_i8; 6].into_iter().map(option_id_closure).flatten().collect();
+ let _: Vec<_> = vec![5_i8; 6].into_iter().map(|x| x.checked_add(1)).flatten().collect();
+
+ // mapping to Iterator on Iterator
+ let _: Vec<_> = vec![5_i8; 6].into_iter().map(|x| 0..x).flatten().collect();
+
+ // mapping to Option on Option
+ let _: Option<_> = (Some(Some(1))).map(|x| x).flatten();
+
+ // mapping to Result on Result
+ let _: Result<_, &str> = (Ok(Ok(1))).map(|x| x).flatten();
+
+ issue8734();
+ issue8878();
+}
+
+fn issue8734() {
+ let _ = [0u8, 1, 2, 3]
+ .into_iter()
+ .map(|n| match n {
+ 1 => [n
+ .saturating_add(1)
+ .saturating_add(1)
+ .saturating_add(1)
+ .saturating_add(1)
+ .saturating_add(1)
+ .saturating_add(1)
+ .saturating_add(1)
+ .saturating_add(1)],
+ n => [n],
+ })
+ .flatten();
+}
+
+#[allow(clippy::bind_instead_of_map)] // map + flatten will be suggested to `and_then`, but afterwards `map` is suggested again
+#[rustfmt::skip] // whitespace is important for this one
+fn issue8878() {
+ std::collections::HashMap::<u32, u32>::new()
+ .get(&0)
+ .map(|_| {
+// we need some newlines
+// so that the span is big enough
+// for a split output of the diagnostic
+ Some("")
+ // whitespace beforehand is important as well
+ })
+ .flatten();
+}
diff --git a/src/tools/clippy/tests/ui/map_flatten_fixable.stderr b/src/tools/clippy/tests/ui/map_flatten_fixable.stderr
new file mode 100644
index 000000000..c91f0b9ae
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_flatten_fixable.stderr
@@ -0,0 +1,99 @@
+error: called `map(..).flatten()` on `Iterator`
+ --> $DIR/map_flatten_fixable.rs:18:47
+ |
+LL | let _: Vec<_> = vec![5_i8; 6].into_iter().map(option_id).flatten().collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try replacing `map` with `filter_map` and remove the `.flatten()`: `filter_map(option_id)`
+ |
+ = note: `-D clippy::map-flatten` implied by `-D warnings`
+
+error: called `map(..).flatten()` on `Iterator`
+ --> $DIR/map_flatten_fixable.rs:19:47
+ |
+LL | let _: Vec<_> = vec![5_i8; 6].into_iter().map(option_id_ref).flatten().collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try replacing `map` with `filter_map` and remove the `.flatten()`: `filter_map(option_id_ref)`
+
+error: called `map(..).flatten()` on `Iterator`
+ --> $DIR/map_flatten_fixable.rs:20:47
+ |
+LL | let _: Vec<_> = vec![5_i8; 6].into_iter().map(option_id_closure).flatten().collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try replacing `map` with `filter_map` and remove the `.flatten()`: `filter_map(option_id_closure)`
+
+error: called `map(..).flatten()` on `Iterator`
+ --> $DIR/map_flatten_fixable.rs:21:47
+ |
+LL | let _: Vec<_> = vec![5_i8; 6].into_iter().map(|x| x.checked_add(1)).flatten().collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try replacing `map` with `filter_map` and remove the `.flatten()`: `filter_map(|x| x.checked_add(1))`
+
+error: called `map(..).flatten()` on `Iterator`
+ --> $DIR/map_flatten_fixable.rs:24:47
+ |
+LL | let _: Vec<_> = vec![5_i8; 6].into_iter().map(|x| 0..x).flatten().collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: try replacing `map` with `flat_map` and remove the `.flatten()`: `flat_map(|x| 0..x)`
+
+error: called `map(..).flatten()` on `Option`
+ --> $DIR/map_flatten_fixable.rs:27:40
+ |
+LL | let _: Option<_> = (Some(Some(1))).map(|x| x).flatten();
+ | ^^^^^^^^^^^^^^^^^^^^ help: try replacing `map` with `and_then` and remove the `.flatten()`: `and_then(|x| x)`
+
+error: called `map(..).flatten()` on `Result`
+ --> $DIR/map_flatten_fixable.rs:30:42
+ |
+LL | let _: Result<_, &str> = (Ok(Ok(1))).map(|x| x).flatten();
+ | ^^^^^^^^^^^^^^^^^^^^ help: try replacing `map` with `and_then` and remove the `.flatten()`: `and_then(|x| x)`
+
+error: called `map(..).flatten()` on `Iterator`
+ --> $DIR/map_flatten_fixable.rs:39:10
+ |
+LL | .map(|n| match n {
+ | __________^
+LL | | 1 => [n
+LL | | .saturating_add(1)
+LL | | .saturating_add(1)
+... |
+LL | | })
+LL | | .flatten();
+ | |__________________^
+ |
+help: try replacing `map` with `flat_map` and remove the `.flatten()`
+ |
+LL ~ .flat_map(|n| match n {
+LL + 1 => [n
+LL + .saturating_add(1)
+LL + .saturating_add(1)
+LL + .saturating_add(1)
+LL + .saturating_add(1)
+LL + .saturating_add(1)
+LL + .saturating_add(1)
+LL + .saturating_add(1)
+LL + .saturating_add(1)],
+LL + n => [n],
+LL ~ });
+ |
+
+error: called `map(..).flatten()` on `Option`
+ --> $DIR/map_flatten_fixable.rs:59:10
+ |
+LL | .map(|_| {
+ | __________^
+LL | | // we need some newlines
+LL | | // so that the span is big enough
+LL | | // for a split output of the diagnostic
+... |
+LL | | })
+LL | | .flatten();
+ | |__________________^
+ |
+help: try replacing `map` with `and_then` and remove the `.flatten()`
+ |
+LL ~ .and_then(|_| {
+LL + // we need some newlines
+LL + // so that the span is big enough
+LL + // for a split output of the diagnostic
+LL + Some("")
+LL + // whitespace beforehand is important as well
+LL ~ });
+ |
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/map_identity.fixed b/src/tools/clippy/tests/ui/map_identity.fixed
new file mode 100644
index 000000000..2256e51f2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_identity.fixed
@@ -0,0 +1,25 @@
+// run-rustfix
+#![warn(clippy::map_identity)]
+#![allow(clippy::needless_return)]
+
+fn main() {
+ let x: [u16; 3] = [1, 2, 3];
+ // should lint
+ let _: Vec<_> = x.iter().map(not_identity).collect();
+ let _: Vec<_> = x.iter().collect();
+ let _: Option<u8> = Some(3);
+ let _: Result<i8, f32> = Ok(-3);
+ // should not lint
+ let _: Vec<_> = x.iter().map(|x| 2 * x).collect();
+ let _: Vec<_> = x.iter().map(not_identity).map(|x| return x - 4).collect();
+ let _: Option<u8> = None.map(|x: u8| x - 1);
+ let _: Result<i8, f32> = Err(2.3).map(|x: i8| {
+ return x + 3;
+ });
+ let _: Result<u32, u32> = Ok(1);
+ let _: Result<u32, u32> = Ok(1).map_err(|a: u32| a * 42);
+}
+
+fn not_identity(x: &u16) -> u16 {
+ *x
+}
diff --git a/src/tools/clippy/tests/ui/map_identity.rs b/src/tools/clippy/tests/ui/map_identity.rs
new file mode 100644
index 000000000..ccfdc9ea7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_identity.rs
@@ -0,0 +1,27 @@
+// run-rustfix
+#![warn(clippy::map_identity)]
+#![allow(clippy::needless_return)]
+
+fn main() {
+ let x: [u16; 3] = [1, 2, 3];
+ // should lint
+ let _: Vec<_> = x.iter().map(not_identity).map(|x| return x).collect();
+ let _: Vec<_> = x.iter().map(std::convert::identity).map(|y| y).collect();
+ let _: Option<u8> = Some(3).map(|x| x);
+ let _: Result<i8, f32> = Ok(-3).map(|x| {
+ return x;
+ });
+ // should not lint
+ let _: Vec<_> = x.iter().map(|x| 2 * x).collect();
+ let _: Vec<_> = x.iter().map(not_identity).map(|x| return x - 4).collect();
+ let _: Option<u8> = None.map(|x: u8| x - 1);
+ let _: Result<i8, f32> = Err(2.3).map(|x: i8| {
+ return x + 3;
+ });
+ let _: Result<u32, u32> = Ok(1).map_err(|a| a);
+ let _: Result<u32, u32> = Ok(1).map_err(|a: u32| a * 42);
+}
+
+fn not_identity(x: &u16) -> u16 {
+ *x
+}
diff --git a/src/tools/clippy/tests/ui/map_identity.stderr b/src/tools/clippy/tests/ui/map_identity.stderr
new file mode 100644
index 000000000..b6a77281f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_identity.stderr
@@ -0,0 +1,43 @@
+error: unnecessary map of the identity function
+ --> $DIR/map_identity.rs:8:47
+ |
+LL | let _: Vec<_> = x.iter().map(not_identity).map(|x| return x).collect();
+ | ^^^^^^^^^^^^^^^^^^ help: remove the call to `map`
+ |
+ = note: `-D clippy::map-identity` implied by `-D warnings`
+
+error: unnecessary map of the identity function
+ --> $DIR/map_identity.rs:9:57
+ |
+LL | let _: Vec<_> = x.iter().map(std::convert::identity).map(|y| y).collect();
+ | ^^^^^^^^^^^ help: remove the call to `map`
+
+error: unnecessary map of the identity function
+ --> $DIR/map_identity.rs:9:29
+ |
+LL | let _: Vec<_> = x.iter().map(std::convert::identity).map(|y| y).collect();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove the call to `map`
+
+error: unnecessary map of the identity function
+ --> $DIR/map_identity.rs:10:32
+ |
+LL | let _: Option<u8> = Some(3).map(|x| x);
+ | ^^^^^^^^^^^ help: remove the call to `map`
+
+error: unnecessary map of the identity function
+ --> $DIR/map_identity.rs:11:36
+ |
+LL | let _: Result<i8, f32> = Ok(-3).map(|x| {
+ | ____________________________________^
+LL | | return x;
+LL | | });
+ | |______^ help: remove the call to `map`
+
+error: unnecessary map of the identity function
+ --> $DIR/map_identity.rs:21:36
+ |
+LL | let _: Result<u32, u32> = Ok(1).map_err(|a| a);
+ | ^^^^^^^^^^^^^^^ help: remove the call to `map_err`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/map_unit_fn.rs b/src/tools/clippy/tests/ui/map_unit_fn.rs
new file mode 100644
index 000000000..e7f07b50f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_unit_fn.rs
@@ -0,0 +1,11 @@
+#![allow(unused)]
+struct Mappable;
+
+impl Mappable {
+ pub fn map(&self) {}
+}
+
+fn main() {
+ let m = Mappable {};
+ m.map();
+}
diff --git a/src/tools/clippy/tests/ui/map_unwrap_or.rs b/src/tools/clippy/tests/ui/map_unwrap_or.rs
new file mode 100644
index 000000000..87e16f5d0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_unwrap_or.rs
@@ -0,0 +1,81 @@
+// aux-build:option_helpers.rs
+
+#![warn(clippy::map_unwrap_or)]
+
+#[macro_use]
+extern crate option_helpers;
+
+use std::collections::HashMap;
+
+#[rustfmt::skip]
+fn option_methods() {
+ let opt = Some(1);
+
+ // Check for `option.map(_).unwrap_or(_)` use.
+ // Single line case.
+ let _ = opt.map(|x| x + 1)
+ // Should lint even though this call is on a separate line.
+ .unwrap_or(0);
+ // Multi-line cases.
+ let _ = opt.map(|x| {
+ x + 1
+ }
+ ).unwrap_or(0);
+ let _ = opt.map(|x| x + 1)
+ .unwrap_or({
+ 0
+ });
+ // Single line `map(f).unwrap_or(None)` case.
+ let _ = opt.map(|x| Some(x + 1)).unwrap_or(None);
+ // Multi-line `map(f).unwrap_or(None)` cases.
+ let _ = opt.map(|x| {
+ Some(x + 1)
+ }
+ ).unwrap_or(None);
+ let _ = opt
+ .map(|x| Some(x + 1))
+ .unwrap_or(None);
+ // macro case
+ let _ = opt_map!(opt, |x| x + 1).unwrap_or(0); // should not lint
+
+ // Should not lint if not copyable
+ let id: String = "identifier".to_string();
+ let _ = Some("prefix").map(|p| format!("{}.{}", p, id)).unwrap_or(id);
+ // ...but DO lint if the `unwrap_or` argument is not used in the `map`
+ let id: String = "identifier".to_string();
+ let _ = Some("prefix").map(|p| format!("{}.", p)).unwrap_or(id);
+
+ // Check for `option.map(_).unwrap_or_else(_)` use.
+ // Multi-line cases.
+ let _ = opt.map(|x| {
+ x + 1
+ }
+ ).unwrap_or_else(|| 0);
+ let _ = opt.map(|x| x + 1)
+ .unwrap_or_else(||
+ 0
+ );
+}
+
+#[rustfmt::skip]
+fn result_methods() {
+ let res: Result<i32, ()> = Ok(1);
+
+ // Check for `result.map(_).unwrap_or_else(_)` use.
+ // multi line cases
+ let _ = res.map(|x| {
+ x + 1
+ }
+ ).unwrap_or_else(|_e| 0);
+ let _ = res.map(|x| x + 1)
+ .unwrap_or_else(|_e| {
+ 0
+ });
+ // macro case
+ let _ = opt_map!(res, |x| x + 1).unwrap_or_else(|_e| 0); // should not lint
+}
+
+fn main() {
+ option_methods();
+ result_methods();
+}
diff --git a/src/tools/clippy/tests/ui/map_unwrap_or.stderr b/src/tools/clippy/tests/ui/map_unwrap_or.stderr
new file mode 100644
index 000000000..abc9c1ece
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_unwrap_or.stderr
@@ -0,0 +1,150 @@
+error: called `map(<f>).unwrap_or(<a>)` on an `Option` value. This can be done more directly by calling `map_or(<a>, <f>)` instead
+ --> $DIR/map_unwrap_or.rs:16:13
+ |
+LL | let _ = opt.map(|x| x + 1)
+ | _____________^
+LL | | // Should lint even though this call is on a separate line.
+LL | | .unwrap_or(0);
+ | |_____________________^
+ |
+ = note: `-D clippy::map-unwrap-or` implied by `-D warnings`
+help: use `map_or(<a>, <f>)` instead
+ |
+LL - let _ = opt.map(|x| x + 1)
+LL + let _ = opt.map_or(0, |x| x + 1);
+ |
+
+error: called `map(<f>).unwrap_or(<a>)` on an `Option` value. This can be done more directly by calling `map_or(<a>, <f>)` instead
+ --> $DIR/map_unwrap_or.rs:20:13
+ |
+LL | let _ = opt.map(|x| {
+ | _____________^
+LL | | x + 1
+LL | | }
+LL | | ).unwrap_or(0);
+ | |__________________^
+ |
+help: use `map_or(<a>, <f>)` instead
+ |
+LL ~ let _ = opt.map_or(0, |x| {
+LL | x + 1
+LL | }
+LL ~ );
+ |
+
+error: called `map(<f>).unwrap_or(<a>)` on an `Option` value. This can be done more directly by calling `map_or(<a>, <f>)` instead
+ --> $DIR/map_unwrap_or.rs:24:13
+ |
+LL | let _ = opt.map(|x| x + 1)
+ | _____________^
+LL | | .unwrap_or({
+LL | | 0
+LL | | });
+ | |__________^
+ |
+help: use `map_or(<a>, <f>)` instead
+ |
+LL ~ let _ = opt.map_or({
+LL + 0
+LL ~ }, |x| x + 1);
+ |
+
+error: called `map(<f>).unwrap_or(None)` on an `Option` value. This can be done more directly by calling `and_then(<f>)` instead
+ --> $DIR/map_unwrap_or.rs:29:13
+ |
+LL | let _ = opt.map(|x| Some(x + 1)).unwrap_or(None);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: use `and_then(<f>)` instead
+ |
+LL - let _ = opt.map(|x| Some(x + 1)).unwrap_or(None);
+LL + let _ = opt.and_then(|x| Some(x + 1));
+ |
+
+error: called `map(<f>).unwrap_or(None)` on an `Option` value. This can be done more directly by calling `and_then(<f>)` instead
+ --> $DIR/map_unwrap_or.rs:31:13
+ |
+LL | let _ = opt.map(|x| {
+ | _____________^
+LL | | Some(x + 1)
+LL | | }
+LL | | ).unwrap_or(None);
+ | |_____________________^
+ |
+help: use `and_then(<f>)` instead
+ |
+LL ~ let _ = opt.and_then(|x| {
+LL | Some(x + 1)
+LL | }
+LL ~ );
+ |
+
+error: called `map(<f>).unwrap_or(None)` on an `Option` value. This can be done more directly by calling `and_then(<f>)` instead
+ --> $DIR/map_unwrap_or.rs:35:13
+ |
+LL | let _ = opt
+ | _____________^
+LL | | .map(|x| Some(x + 1))
+LL | | .unwrap_or(None);
+ | |________________________^
+ |
+help: use `and_then(<f>)` instead
+ |
+LL - .map(|x| Some(x + 1))
+LL + .and_then(|x| Some(x + 1));
+ |
+
+error: called `map(<f>).unwrap_or(<a>)` on an `Option` value. This can be done more directly by calling `map_or(<a>, <f>)` instead
+ --> $DIR/map_unwrap_or.rs:46:13
+ |
+LL | let _ = Some("prefix").map(|p| format!("{}.", p)).unwrap_or(id);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: use `map_or(<a>, <f>)` instead
+ |
+LL - let _ = Some("prefix").map(|p| format!("{}.", p)).unwrap_or(id);
+LL + let _ = Some("prefix").map_or(id, |p| format!("{}.", p));
+ |
+
+error: called `map(<f>).unwrap_or_else(<g>)` on an `Option` value. This can be done more directly by calling `map_or_else(<g>, <f>)` instead
+ --> $DIR/map_unwrap_or.rs:50:13
+ |
+LL | let _ = opt.map(|x| {
+ | _____________^
+LL | | x + 1
+LL | | }
+LL | | ).unwrap_or_else(|| 0);
+ | |__________________________^
+
+error: called `map(<f>).unwrap_or_else(<g>)` on an `Option` value. This can be done more directly by calling `map_or_else(<g>, <f>)` instead
+ --> $DIR/map_unwrap_or.rs:54:13
+ |
+LL | let _ = opt.map(|x| x + 1)
+ | _____________^
+LL | | .unwrap_or_else(||
+LL | | 0
+LL | | );
+ | |_________^
+
+error: called `map(<f>).unwrap_or_else(<g>)` on a `Result` value. This can be done more directly by calling `.map_or_else(<g>, <f>)` instead
+ --> $DIR/map_unwrap_or.rs:66:13
+ |
+LL | let _ = res.map(|x| {
+ | _____________^
+LL | | x + 1
+LL | | }
+LL | | ).unwrap_or_else(|_e| 0);
+ | |____________________________^
+
+error: called `map(<f>).unwrap_or_else(<g>)` on a `Result` value. This can be done more directly by calling `.map_or_else(<g>, <f>)` instead
+ --> $DIR/map_unwrap_or.rs:70:13
+ |
+LL | let _ = res.map(|x| x + 1)
+ | _____________^
+LL | | .unwrap_or_else(|_e| {
+LL | | 0
+LL | | });
+ | |__________^
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/map_unwrap_or_fixable.fixed b/src/tools/clippy/tests/ui/map_unwrap_or_fixable.fixed
new file mode 100644
index 000000000..bd5b4f716
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_unwrap_or_fixable.fixed
@@ -0,0 +1,54 @@
+// run-rustfix
+// aux-build:option_helpers.rs
+
+#![warn(clippy::map_unwrap_or)]
+
+#[macro_use]
+extern crate option_helpers;
+
+use std::collections::HashMap;
+
+#[rustfmt::skip]
+fn option_methods() {
+ let opt = Some(1);
+
+ // Check for `option.map(_).unwrap_or_else(_)` use.
+ // single line case
+ let _ = opt.map_or_else(|| 0, |x| x + 1);
+
+ // Macro case.
+ // Should not lint.
+ let _ = opt_map!(opt, |x| x + 1).unwrap_or_else(|| 0);
+
+ // Issue #4144
+ {
+ let mut frequencies = HashMap::new();
+ let word = "foo";
+
+ frequencies
+ .get_mut(word)
+ .map(|count| {
+ *count += 1;
+ })
+ .unwrap_or_else(|| {
+ frequencies.insert(word.to_owned(), 1);
+ });
+ }
+}
+
+#[rustfmt::skip]
+fn result_methods() {
+ let res: Result<i32, ()> = Ok(1);
+
+ // Check for `result.map(_).unwrap_or_else(_)` use.
+ // single line case
+ let _ = res.map_or_else(|_e| 0, |x| x + 1);
+
+ // macro case
+ let _ = opt_map!(res, |x| x + 1).unwrap_or_else(|_e| 0); // should not lint
+}
+
+fn main() {
+ option_methods();
+ result_methods();
+}
diff --git a/src/tools/clippy/tests/ui/map_unwrap_or_fixable.rs b/src/tools/clippy/tests/ui/map_unwrap_or_fixable.rs
new file mode 100644
index 000000000..0b892caf2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_unwrap_or_fixable.rs
@@ -0,0 +1,58 @@
+// run-rustfix
+// aux-build:option_helpers.rs
+
+#![warn(clippy::map_unwrap_or)]
+
+#[macro_use]
+extern crate option_helpers;
+
+use std::collections::HashMap;
+
+#[rustfmt::skip]
+fn option_methods() {
+ let opt = Some(1);
+
+ // Check for `option.map(_).unwrap_or_else(_)` use.
+ // single line case
+ let _ = opt.map(|x| x + 1)
+ // Should lint even though this call is on a separate line.
+ .unwrap_or_else(|| 0);
+
+ // Macro case.
+ // Should not lint.
+ let _ = opt_map!(opt, |x| x + 1).unwrap_or_else(|| 0);
+
+ // Issue #4144
+ {
+ let mut frequencies = HashMap::new();
+ let word = "foo";
+
+ frequencies
+ .get_mut(word)
+ .map(|count| {
+ *count += 1;
+ })
+ .unwrap_or_else(|| {
+ frequencies.insert(word.to_owned(), 1);
+ });
+ }
+}
+
+#[rustfmt::skip]
+fn result_methods() {
+ let res: Result<i32, ()> = Ok(1);
+
+ // Check for `result.map(_).unwrap_or_else(_)` use.
+ // single line case
+ let _ = res.map(|x| x + 1)
+ // should lint even though this call is on a separate line
+ .unwrap_or_else(|_e| 0);
+
+ // macro case
+ let _ = opt_map!(res, |x| x + 1).unwrap_or_else(|_e| 0); // should not lint
+}
+
+fn main() {
+ option_methods();
+ result_methods();
+}
diff --git a/src/tools/clippy/tests/ui/map_unwrap_or_fixable.stderr b/src/tools/clippy/tests/ui/map_unwrap_or_fixable.stderr
new file mode 100644
index 000000000..1837bc2ca
--- /dev/null
+++ b/src/tools/clippy/tests/ui/map_unwrap_or_fixable.stderr
@@ -0,0 +1,22 @@
+error: called `map(<f>).unwrap_or_else(<g>)` on an `Option` value. This can be done more directly by calling `map_or_else(<g>, <f>)` instead
+ --> $DIR/map_unwrap_or_fixable.rs:17:13
+ |
+LL | let _ = opt.map(|x| x + 1)
+ | _____________^
+LL | | // Should lint even though this call is on a separate line.
+LL | | .unwrap_or_else(|| 0);
+ | |_____________________________^ help: try this: `opt.map_or_else(|| 0, |x| x + 1)`
+ |
+ = note: `-D clippy::map-unwrap-or` implied by `-D warnings`
+
+error: called `map(<f>).unwrap_or_else(<g>)` on a `Result` value. This can be done more directly by calling `.map_or_else(<g>, <f>)` instead
+ --> $DIR/map_unwrap_or_fixable.rs:47:13
+ |
+LL | let _ = res.map(|x| x + 1)
+ | _____________^
+LL | | // should lint even though this call is on a separate line
+LL | | .unwrap_or_else(|_e| 0);
+ | |_______________________________^ help: try this: `res.map_or_else(|_e| 0, |x| x + 1)`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/match_as_ref.fixed b/src/tools/clippy/tests/ui/match_as_ref.fixed
new file mode 100644
index 000000000..ddfa1e741
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_as_ref.fixed
@@ -0,0 +1,43 @@
+// run-rustfix
+
+#![allow(unused)]
+#![warn(clippy::match_as_ref)]
+
+fn match_as_ref() {
+ let owned: Option<()> = None;
+ let borrowed: Option<&()> = owned.as_ref();
+
+ let mut mut_owned: Option<()> = None;
+ let borrow_mut: Option<&mut ()> = mut_owned.as_mut();
+}
+
+mod issue4437 {
+ use std::{error::Error, fmt, num::ParseIntError};
+
+ #[derive(Debug)]
+ struct E {
+ source: Option<ParseIntError>,
+ }
+
+ impl Error for E {
+ fn source(&self) -> Option<&(dyn Error + 'static)> {
+ self.source.as_ref().map(|x| x as _)
+ }
+ }
+
+ impl fmt::Display for E {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ unimplemented!()
+ }
+ }
+}
+
+fn main() {
+ // Don't lint
+ let _ = match Some(0) {
+ #[cfg(feature = "foo")]
+ Some(ref x) if *x > 50 => None,
+ Some(ref x) => Some(x),
+ None => None,
+ };
+}
diff --git a/src/tools/clippy/tests/ui/match_as_ref.rs b/src/tools/clippy/tests/ui/match_as_ref.rs
new file mode 100644
index 000000000..025d475ae
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_as_ref.rs
@@ -0,0 +1,52 @@
+// run-rustfix
+
+#![allow(unused)]
+#![warn(clippy::match_as_ref)]
+
+fn match_as_ref() {
+ let owned: Option<()> = None;
+ let borrowed: Option<&()> = match owned {
+ None => None,
+ Some(ref v) => Some(v),
+ };
+
+ let mut mut_owned: Option<()> = None;
+ let borrow_mut: Option<&mut ()> = match mut_owned {
+ None => None,
+ Some(ref mut v) => Some(v),
+ };
+}
+
+mod issue4437 {
+ use std::{error::Error, fmt, num::ParseIntError};
+
+ #[derive(Debug)]
+ struct E {
+ source: Option<ParseIntError>,
+ }
+
+ impl Error for E {
+ fn source(&self) -> Option<&(dyn Error + 'static)> {
+ match self.source {
+ Some(ref s) => Some(s),
+ None => None,
+ }
+ }
+ }
+
+ impl fmt::Display for E {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ unimplemented!()
+ }
+ }
+}
+
+fn main() {
+ // Don't lint
+ let _ = match Some(0) {
+ #[cfg(feature = "foo")]
+ Some(ref x) if *x > 50 => None,
+ Some(ref x) => Some(x),
+ None => None,
+ };
+}
diff --git a/src/tools/clippy/tests/ui/match_as_ref.stderr b/src/tools/clippy/tests/ui/match_as_ref.stderr
new file mode 100644
index 000000000..c3b62849c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_as_ref.stderr
@@ -0,0 +1,33 @@
+error: use `as_ref()` instead
+ --> $DIR/match_as_ref.rs:8:33
+ |
+LL | let borrowed: Option<&()> = match owned {
+ | _________________________________^
+LL | | None => None,
+LL | | Some(ref v) => Some(v),
+LL | | };
+ | |_____^ help: try this: `owned.as_ref()`
+ |
+ = note: `-D clippy::match-as-ref` implied by `-D warnings`
+
+error: use `as_mut()` instead
+ --> $DIR/match_as_ref.rs:14:39
+ |
+LL | let borrow_mut: Option<&mut ()> = match mut_owned {
+ | _______________________________________^
+LL | | None => None,
+LL | | Some(ref mut v) => Some(v),
+LL | | };
+ | |_____^ help: try this: `mut_owned.as_mut()`
+
+error: use `as_ref()` instead
+ --> $DIR/match_as_ref.rs:30:13
+ |
+LL | / match self.source {
+LL | | Some(ref s) => Some(s),
+LL | | None => None,
+LL | | }
+ | |_____________^ help: try this: `self.source.as_ref().map(|x| x as _)`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/match_bool.rs b/src/tools/clippy/tests/ui/match_bool.rs
new file mode 100644
index 000000000..bcc999a49
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_bool.rs
@@ -0,0 +1,63 @@
+#![deny(clippy::match_bool)]
+
+fn match_bool() {
+ let test: bool = true;
+
+ match test {
+ true => 0,
+ false => 42,
+ };
+
+ let option = 1;
+ match option == 1 {
+ true => 1,
+ false => 0,
+ };
+
+ match test {
+ true => (),
+ false => {
+ println!("Noooo!");
+ },
+ };
+
+ match test {
+ false => {
+ println!("Noooo!");
+ },
+ _ => (),
+ };
+
+ match test && test {
+ false => {
+ println!("Noooo!");
+ },
+ _ => (),
+ };
+
+ match test {
+ false => {
+ println!("Noooo!");
+ },
+ true => {
+ println!("Yes!");
+ },
+ };
+
+ // Not linted
+ match option {
+ 1..=10 => 1,
+ 11..=20 => 2,
+ _ => 3,
+ };
+
+ // Don't lint
+ let _ = match test {
+ #[cfg(feature = "foo")]
+ true if option == 5 => 10,
+ true => 0,
+ false => 1,
+ };
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/match_bool.stderr b/src/tools/clippy/tests/ui/match_bool.stderr
new file mode 100644
index 000000000..3fd0468e5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_bool.stderr
@@ -0,0 +1,117 @@
+error: this boolean expression can be simplified
+ --> $DIR/match_bool.rs:31:11
+ |
+LL | match test && test {
+ | ^^^^^^^^^^^^ help: try: `test`
+ |
+ = note: `-D clippy::nonminimal-bool` implied by `-D warnings`
+
+error: you seem to be trying to match on a boolean expression
+ --> $DIR/match_bool.rs:6:5
+ |
+LL | / match test {
+LL | | true => 0,
+LL | | false => 42,
+LL | | };
+ | |_____^ help: consider using an `if`/`else` expression: `if test { 0 } else { 42 }`
+ |
+note: the lint level is defined here
+ --> $DIR/match_bool.rs:1:9
+ |
+LL | #![deny(clippy::match_bool)]
+ | ^^^^^^^^^^^^^^^^^^
+
+error: you seem to be trying to match on a boolean expression
+ --> $DIR/match_bool.rs:12:5
+ |
+LL | / match option == 1 {
+LL | | true => 1,
+LL | | false => 0,
+LL | | };
+ | |_____^ help: consider using an `if`/`else` expression: `if option == 1 { 1 } else { 0 }`
+
+error: you seem to be trying to match on a boolean expression
+ --> $DIR/match_bool.rs:17:5
+ |
+LL | / match test {
+LL | | true => (),
+LL | | false => {
+LL | | println!("Noooo!");
+LL | | },
+LL | | };
+ | |_____^
+ |
+help: consider using an `if`/`else` expression
+ |
+LL ~ if !test {
+LL + println!("Noooo!");
+LL ~ };
+ |
+
+error: you seem to be trying to match on a boolean expression
+ --> $DIR/match_bool.rs:24:5
+ |
+LL | / match test {
+LL | | false => {
+LL | | println!("Noooo!");
+LL | | },
+LL | | _ => (),
+LL | | };
+ | |_____^
+ |
+help: consider using an `if`/`else` expression
+ |
+LL ~ if !test {
+LL + println!("Noooo!");
+LL ~ };
+ |
+
+error: you seem to be trying to match on a boolean expression
+ --> $DIR/match_bool.rs:31:5
+ |
+LL | / match test && test {
+LL | | false => {
+LL | | println!("Noooo!");
+LL | | },
+LL | | _ => (),
+LL | | };
+ | |_____^
+ |
+help: consider using an `if`/`else` expression
+ |
+LL ~ if !(test && test) {
+LL + println!("Noooo!");
+LL ~ };
+ |
+
+error: equal expressions as operands to `&&`
+ --> $DIR/match_bool.rs:31:11
+ |
+LL | match test && test {
+ | ^^^^^^^^^^^^
+ |
+ = note: `#[deny(clippy::eq_op)]` on by default
+
+error: you seem to be trying to match on a boolean expression
+ --> $DIR/match_bool.rs:38:5
+ |
+LL | / match test {
+LL | | false => {
+LL | | println!("Noooo!");
+LL | | },
+... |
+LL | | },
+LL | | };
+ | |_____^
+ |
+help: consider using an `if`/`else` expression
+ |
+LL ~ if test {
+LL + println!("Yes!");
+LL + } else {
+LL + println!("Noooo!");
+LL ~ };
+ |
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/match_expr_like_matches_macro.fixed b/src/tools/clippy/tests/ui/match_expr_like_matches_macro.fixed
new file mode 100644
index 000000000..1ccbfda64
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_expr_like_matches_macro.fixed
@@ -0,0 +1,170 @@
+// run-rustfix
+
+#![warn(clippy::match_like_matches_macro)]
+#![allow(unreachable_patterns, dead_code, clippy::equatable_if_let)]
+
+fn main() {
+ let x = Some(5);
+
+ // Lint
+ let _y = matches!(x, Some(0));
+
+ // Lint
+ let _w = matches!(x, Some(_));
+
+ // Turn into is_none
+ let _z = x.is_none();
+
+ // Lint
+ let _zz = !matches!(x, Some(r) if r == 0);
+
+ // Lint
+ let _zzz = matches!(x, Some(5));
+
+ // No lint
+ let _a = match x {
+ Some(_) => false,
+ _ => false,
+ };
+
+ // No lint
+ let _ab = match x {
+ Some(0) => false,
+ _ => true,
+ None => false,
+ };
+
+ enum E {
+ A(u32),
+ B(i32),
+ C,
+ D,
+ }
+ let x = E::A(2);
+ {
+ // lint
+ let _ans = matches!(x, E::A(_) | E::B(_));
+ }
+ {
+ // lint
+ // skip rustfmt to prevent removing block for first pattern
+ #[rustfmt::skip]
+ let _ans = matches!(x, E::A(_) | E::B(_));
+ }
+ {
+ // lint
+ let _ans = !matches!(x, E::B(_) | E::C);
+ }
+ {
+ // no lint
+ let _ans = match x {
+ E::A(_) => false,
+ E::B(_) => false,
+ E::C => true,
+ _ => true,
+ };
+ }
+ {
+ // no lint
+ let _ans = match x {
+ E::A(_) => true,
+ E::B(_) => false,
+ E::C => false,
+ _ => true,
+ };
+ }
+ {
+ // no lint
+ let _ans = match x {
+ E::A(a) if a < 10 => false,
+ E::B(a) if a < 10 => false,
+ _ => true,
+ };
+ }
+ {
+ // no lint
+ let _ans = match x {
+ E::A(_) => false,
+ E::B(a) if a < 10 => false,
+ _ => true,
+ };
+ }
+ {
+ // no lint
+ let _ans = match x {
+ E::A(a) => a == 10,
+ E::B(_) => false,
+ _ => true,
+ };
+ }
+ {
+ // no lint
+ let _ans = match x {
+ E::A(_) => false,
+ E::B(_) => true,
+ _ => false,
+ };
+ }
+
+ {
+ // should print "z" in suggestion (#6503)
+ let z = &Some(3);
+ let _z = matches!(z, Some(3));
+ }
+
+ {
+ // this could also print "z" in suggestion..?
+ let z = Some(3);
+ let _z = matches!(&z, Some(3));
+ }
+
+ {
+ enum AnEnum {
+ X,
+ Y,
+ }
+
+ fn foo(_x: AnEnum) {}
+
+ fn main() {
+ let z = AnEnum::X;
+ // we can't remove the reference here!
+ let _ = matches!(&z, AnEnum::X);
+ foo(z);
+ }
+ }
+
+ {
+ struct S(i32);
+
+ fn fun(_val: Option<S>) {}
+ let val = Some(S(42));
+ // we need the reference here because later val is consumed by fun()
+ let _res = matches!(&val, &Some(ref _a));
+ fun(val);
+ }
+
+ {
+ struct S(i32);
+
+ fn fun(_val: Option<S>) {}
+ let val = Some(S(42));
+ let _res = matches!(&val, &Some(ref _a));
+ fun(val);
+ }
+
+ {
+ enum E {
+ A,
+ B,
+ C,
+ }
+
+ let _ = match E::A {
+ E::B => true,
+ #[cfg(feature = "foo")]
+ E::A => true,
+ _ => false,
+ };
+ }
+}
diff --git a/src/tools/clippy/tests/ui/match_expr_like_matches_macro.rs b/src/tools/clippy/tests/ui/match_expr_like_matches_macro.rs
new file mode 100644
index 000000000..a49991f59
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_expr_like_matches_macro.rs
@@ -0,0 +1,211 @@
+// run-rustfix
+
+#![warn(clippy::match_like_matches_macro)]
+#![allow(unreachable_patterns, dead_code, clippy::equatable_if_let)]
+
+fn main() {
+ let x = Some(5);
+
+ // Lint
+ let _y = match x {
+ Some(0) => true,
+ _ => false,
+ };
+
+ // Lint
+ let _w = match x {
+ Some(_) => true,
+ _ => false,
+ };
+
+ // Turn into is_none
+ let _z = match x {
+ Some(_) => false,
+ None => true,
+ };
+
+ // Lint
+ let _zz = match x {
+ Some(r) if r == 0 => false,
+ _ => true,
+ };
+
+ // Lint
+ let _zzz = if let Some(5) = x { true } else { false };
+
+ // No lint
+ let _a = match x {
+ Some(_) => false,
+ _ => false,
+ };
+
+ // No lint
+ let _ab = match x {
+ Some(0) => false,
+ _ => true,
+ None => false,
+ };
+
+ enum E {
+ A(u32),
+ B(i32),
+ C,
+ D,
+ }
+ let x = E::A(2);
+ {
+ // lint
+ let _ans = match x {
+ E::A(_) => true,
+ E::B(_) => true,
+ _ => false,
+ };
+ }
+ {
+ // lint
+ // skip rustfmt to prevent removing block for first pattern
+ #[rustfmt::skip]
+ let _ans = match x {
+ E::A(_) => {
+ true
+ }
+ E::B(_) => true,
+ _ => false,
+ };
+ }
+ {
+ // lint
+ let _ans = match x {
+ E::B(_) => false,
+ E::C => false,
+ _ => true,
+ };
+ }
+ {
+ // no lint
+ let _ans = match x {
+ E::A(_) => false,
+ E::B(_) => false,
+ E::C => true,
+ _ => true,
+ };
+ }
+ {
+ // no lint
+ let _ans = match x {
+ E::A(_) => true,
+ E::B(_) => false,
+ E::C => false,
+ _ => true,
+ };
+ }
+ {
+ // no lint
+ let _ans = match x {
+ E::A(a) if a < 10 => false,
+ E::B(a) if a < 10 => false,
+ _ => true,
+ };
+ }
+ {
+ // no lint
+ let _ans = match x {
+ E::A(_) => false,
+ E::B(a) if a < 10 => false,
+ _ => true,
+ };
+ }
+ {
+ // no lint
+ let _ans = match x {
+ E::A(a) => a == 10,
+ E::B(_) => false,
+ _ => true,
+ };
+ }
+ {
+ // no lint
+ let _ans = match x {
+ E::A(_) => false,
+ E::B(_) => true,
+ _ => false,
+ };
+ }
+
+ {
+ // should print "z" in suggestion (#6503)
+ let z = &Some(3);
+ let _z = match &z {
+ Some(3) => true,
+ _ => false,
+ };
+ }
+
+ {
+ // this could also print "z" in suggestion..?
+ let z = Some(3);
+ let _z = match &z {
+ Some(3) => true,
+ _ => false,
+ };
+ }
+
+ {
+ enum AnEnum {
+ X,
+ Y,
+ }
+
+ fn foo(_x: AnEnum) {}
+
+ fn main() {
+ let z = AnEnum::X;
+ // we can't remove the reference here!
+ let _ = match &z {
+ AnEnum::X => true,
+ _ => false,
+ };
+ foo(z);
+ }
+ }
+
+ {
+ struct S(i32);
+
+ fn fun(_val: Option<S>) {}
+ let val = Some(S(42));
+ // we need the reference here because later val is consumed by fun()
+ let _res = match &val {
+ &Some(ref _a) => true,
+ _ => false,
+ };
+ fun(val);
+ }
+
+ {
+ struct S(i32);
+
+ fn fun(_val: Option<S>) {}
+ let val = Some(S(42));
+ let _res = match &val {
+ &Some(ref _a) => true,
+ _ => false,
+ };
+ fun(val);
+ }
+
+ {
+ enum E {
+ A,
+ B,
+ C,
+ }
+
+ let _ = match E::A {
+ E::B => true,
+ #[cfg(feature = "foo")]
+ E::A => true,
+ _ => false,
+ };
+ }
+}
diff --git a/src/tools/clippy/tests/ui/match_expr_like_matches_macro.stderr b/src/tools/clippy/tests/ui/match_expr_like_matches_macro.stderr
new file mode 100644
index 000000000..e94555e27
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_expr_like_matches_macro.stderr
@@ -0,0 +1,137 @@
+error: match expression looks like `matches!` macro
+ --> $DIR/match_expr_like_matches_macro.rs:10:14
+ |
+LL | let _y = match x {
+ | ______________^
+LL | | Some(0) => true,
+LL | | _ => false,
+LL | | };
+ | |_____^ help: try this: `matches!(x, Some(0))`
+ |
+ = note: `-D clippy::match-like-matches-macro` implied by `-D warnings`
+
+error: match expression looks like `matches!` macro
+ --> $DIR/match_expr_like_matches_macro.rs:16:14
+ |
+LL | let _w = match x {
+ | ______________^
+LL | | Some(_) => true,
+LL | | _ => false,
+LL | | };
+ | |_____^ help: try this: `matches!(x, Some(_))`
+
+error: redundant pattern matching, consider using `is_none()`
+ --> $DIR/match_expr_like_matches_macro.rs:22:14
+ |
+LL | let _z = match x {
+ | ______________^
+LL | | Some(_) => false,
+LL | | None => true,
+LL | | };
+ | |_____^ help: try this: `x.is_none()`
+ |
+ = note: `-D clippy::redundant-pattern-matching` implied by `-D warnings`
+
+error: match expression looks like `matches!` macro
+ --> $DIR/match_expr_like_matches_macro.rs:28:15
+ |
+LL | let _zz = match x {
+ | _______________^
+LL | | Some(r) if r == 0 => false,
+LL | | _ => true,
+LL | | };
+ | |_____^ help: try this: `!matches!(x, Some(r) if r == 0)`
+
+error: if let .. else expression looks like `matches!` macro
+ --> $DIR/match_expr_like_matches_macro.rs:34:16
+ |
+LL | let _zzz = if let Some(5) = x { true } else { false };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `matches!(x, Some(5))`
+
+error: match expression looks like `matches!` macro
+ --> $DIR/match_expr_like_matches_macro.rs:58:20
+ |
+LL | let _ans = match x {
+ | ____________________^
+LL | | E::A(_) => true,
+LL | | E::B(_) => true,
+LL | | _ => false,
+LL | | };
+ | |_________^ help: try this: `matches!(x, E::A(_) | E::B(_))`
+
+error: match expression looks like `matches!` macro
+ --> $DIR/match_expr_like_matches_macro.rs:68:20
+ |
+LL | let _ans = match x {
+ | ____________________^
+LL | | E::A(_) => {
+LL | | true
+LL | | }
+LL | | E::B(_) => true,
+LL | | _ => false,
+LL | | };
+ | |_________^ help: try this: `matches!(x, E::A(_) | E::B(_))`
+
+error: match expression looks like `matches!` macro
+ --> $DIR/match_expr_like_matches_macro.rs:78:20
+ |
+LL | let _ans = match x {
+ | ____________________^
+LL | | E::B(_) => false,
+LL | | E::C => false,
+LL | | _ => true,
+LL | | };
+ | |_________^ help: try this: `!matches!(x, E::B(_) | E::C)`
+
+error: match expression looks like `matches!` macro
+ --> $DIR/match_expr_like_matches_macro.rs:138:18
+ |
+LL | let _z = match &z {
+ | __________________^
+LL | | Some(3) => true,
+LL | | _ => false,
+LL | | };
+ | |_________^ help: try this: `matches!(z, Some(3))`
+
+error: match expression looks like `matches!` macro
+ --> $DIR/match_expr_like_matches_macro.rs:147:18
+ |
+LL | let _z = match &z {
+ | __________________^
+LL | | Some(3) => true,
+LL | | _ => false,
+LL | | };
+ | |_________^ help: try this: `matches!(&z, Some(3))`
+
+error: match expression looks like `matches!` macro
+ --> $DIR/match_expr_like_matches_macro.rs:164:21
+ |
+LL | let _ = match &z {
+ | _____________________^
+LL | | AnEnum::X => true,
+LL | | _ => false,
+LL | | };
+ | |_____________^ help: try this: `matches!(&z, AnEnum::X)`
+
+error: match expression looks like `matches!` macro
+ --> $DIR/match_expr_like_matches_macro.rs:178:20
+ |
+LL | let _res = match &val {
+ | ____________________^
+LL | | &Some(ref _a) => true,
+LL | | _ => false,
+LL | | };
+ | |_________^ help: try this: `matches!(&val, &Some(ref _a))`
+
+error: match expression looks like `matches!` macro
+ --> $DIR/match_expr_like_matches_macro.rs:190:20
+ |
+LL | let _res = match &val {
+ | ____________________^
+LL | | &Some(ref _a) => true,
+LL | | _ => false,
+LL | | };
+ | |_________^ help: try this: `matches!(&val, &Some(ref _a))`
+
+error: aborting due to 13 previous errors
+
diff --git a/src/tools/clippy/tests/ui/match_on_vec_items.rs b/src/tools/clippy/tests/ui/match_on_vec_items.rs
new file mode 100644
index 000000000..30415e3b9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_on_vec_items.rs
@@ -0,0 +1,152 @@
+#![warn(clippy::match_on_vec_items)]
+
+fn match_with_wildcard() {
+ let arr = vec![0, 1, 2, 3];
+ let range = 1..3;
+ let idx = 1;
+
+ // Lint, may panic
+ match arr[idx] {
+ 0 => println!("0"),
+ 1 => println!("1"),
+ _ => {},
+ }
+
+ // Lint, may panic
+ match arr[range] {
+ [0, 1] => println!("0 1"),
+ [1, 2] => println!("1 2"),
+ _ => {},
+ }
+}
+
+fn match_without_wildcard() {
+ let arr = vec![0, 1, 2, 3];
+ let range = 1..3;
+ let idx = 2;
+
+ // Lint, may panic
+ match arr[idx] {
+ 0 => println!("0"),
+ 1 => println!("1"),
+ num => {},
+ }
+
+ // Lint, may panic
+ match arr[range] {
+ [0, 1] => println!("0 1"),
+ [1, 2] => println!("1 2"),
+ [ref sub @ ..] => {},
+ }
+}
+
+fn match_wildcard_and_action() {
+ let arr = vec![0, 1, 2, 3];
+ let range = 1..3;
+ let idx = 3;
+
+ // Lint, may panic
+ match arr[idx] {
+ 0 => println!("0"),
+ 1 => println!("1"),
+ _ => println!("Hello, World!"),
+ }
+
+ // Lint, may panic
+ match arr[range] {
+ [0, 1] => println!("0 1"),
+ [1, 2] => println!("1 2"),
+ _ => println!("Hello, World!"),
+ }
+}
+
+fn match_vec_ref() {
+ let arr = &vec![0, 1, 2, 3];
+ let range = 1..3;
+ let idx = 3;
+
+ // Lint, may panic
+ match arr[idx] {
+ 0 => println!("0"),
+ 1 => println!("1"),
+ _ => {},
+ }
+
+ // Lint, may panic
+ match arr[range] {
+ [0, 1] => println!("0 1"),
+ [1, 2] => println!("1 2"),
+ _ => {},
+ }
+}
+
+fn match_with_get() {
+ let arr = vec![0, 1, 2, 3];
+ let range = 1..3;
+ let idx = 3;
+
+ // Ok
+ match arr.get(idx) {
+ Some(0) => println!("0"),
+ Some(1) => println!("1"),
+ _ => {},
+ }
+
+ // Ok
+ match arr.get(range) {
+ Some(&[0, 1]) => println!("0 1"),
+ Some(&[1, 2]) => println!("1 2"),
+ _ => {},
+ }
+}
+
+fn match_with_array() {
+ let arr = [0, 1, 2, 3];
+ let range = 1..3;
+ let idx = 3;
+
+ // Ok
+ match arr[idx] {
+ 0 => println!("0"),
+ 1 => println!("1"),
+ _ => {},
+ }
+
+ // Ok
+ match arr[range] {
+ [0, 1] => println!("0 1"),
+ [1, 2] => println!("1 2"),
+ _ => {},
+ }
+}
+
+fn match_with_endless_range() {
+ let arr = vec![0, 1, 2, 3];
+ let range = ..;
+
+ // Ok
+ match arr[range] {
+ [0, 1] => println!("0 1"),
+ [1, 2] => println!("1 2"),
+ [0, 1, 2, 3] => println!("0, 1, 2, 3"),
+ _ => {},
+ }
+
+ // Ok
+ match arr[..] {
+ [0, 1] => println!("0 1"),
+ [1, 2] => println!("1 2"),
+ [0, 1, 2, 3] => println!("0, 1, 2, 3"),
+ _ => {},
+ }
+}
+
+fn main() {
+ match_with_wildcard();
+ match_without_wildcard();
+ match_wildcard_and_action();
+ match_vec_ref();
+ match_with_get();
+ match_with_array();
+ match_with_endless_range();
+}
diff --git a/src/tools/clippy/tests/ui/match_on_vec_items.stderr b/src/tools/clippy/tests/ui/match_on_vec_items.stderr
new file mode 100644
index 000000000..49446d715
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_on_vec_items.stderr
@@ -0,0 +1,52 @@
+error: indexing into a vector may panic
+ --> $DIR/match_on_vec_items.rs:9:11
+ |
+LL | match arr[idx] {
+ | ^^^^^^^^ help: try this: `arr.get(idx)`
+ |
+ = note: `-D clippy::match-on-vec-items` implied by `-D warnings`
+
+error: indexing into a vector may panic
+ --> $DIR/match_on_vec_items.rs:16:11
+ |
+LL | match arr[range] {
+ | ^^^^^^^^^^ help: try this: `arr.get(range)`
+
+error: indexing into a vector may panic
+ --> $DIR/match_on_vec_items.rs:29:11
+ |
+LL | match arr[idx] {
+ | ^^^^^^^^ help: try this: `arr.get(idx)`
+
+error: indexing into a vector may panic
+ --> $DIR/match_on_vec_items.rs:36:11
+ |
+LL | match arr[range] {
+ | ^^^^^^^^^^ help: try this: `arr.get(range)`
+
+error: indexing into a vector may panic
+ --> $DIR/match_on_vec_items.rs:49:11
+ |
+LL | match arr[idx] {
+ | ^^^^^^^^ help: try this: `arr.get(idx)`
+
+error: indexing into a vector may panic
+ --> $DIR/match_on_vec_items.rs:56:11
+ |
+LL | match arr[range] {
+ | ^^^^^^^^^^ help: try this: `arr.get(range)`
+
+error: indexing into a vector may panic
+ --> $DIR/match_on_vec_items.rs:69:11
+ |
+LL | match arr[idx] {
+ | ^^^^^^^^ help: try this: `arr.get(idx)`
+
+error: indexing into a vector may panic
+ --> $DIR/match_on_vec_items.rs:76:11
+ |
+LL | match arr[range] {
+ | ^^^^^^^^^^ help: try this: `arr.get(range)`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/match_overlapping_arm.rs b/src/tools/clippy/tests/ui/match_overlapping_arm.rs
new file mode 100644
index 000000000..2f85e6357
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_overlapping_arm.rs
@@ -0,0 +1,135 @@
+#![feature(exclusive_range_pattern)]
+#![feature(half_open_range_patterns)]
+#![warn(clippy::match_overlapping_arm)]
+#![allow(clippy::redundant_pattern_matching)]
+#![allow(clippy::if_same_then_else, clippy::equatable_if_let)]
+
+/// Tests for match_overlapping_arm
+
+fn overlapping() {
+ const FOO: u64 = 2;
+
+ match 42 {
+ 0..=10 => println!("0..=10"),
+ 0..=11 => println!("0..=11"),
+ _ => (),
+ }
+
+ match 42 {
+ 0..=5 => println!("0..=5"),
+ 6..=7 => println!("6..=7"),
+ FOO..=11 => println!("FOO..=11"),
+ _ => (),
+ }
+
+ match 42 {
+ 2 => println!("2"),
+ 0..=5 => println!("0..=5"),
+ _ => (),
+ }
+
+ match 42 {
+ 2 => println!("2"),
+ 0..=2 => println!("0..=2"),
+ _ => (),
+ }
+
+ match 42 {
+ 0..=10 => println!("0..=10"),
+ 11..=50 => println!("11..=50"),
+ _ => (),
+ }
+
+ match 42 {
+ 2 => println!("2"),
+ 0..2 => println!("0..2"),
+ _ => (),
+ }
+
+ match 42 {
+ 0..10 => println!("0..10"),
+ 10..50 => println!("10..50"),
+ _ => (),
+ }
+
+ match 42 {
+ 0..11 => println!("0..11"),
+ 0..=11 => println!("0..=11"),
+ _ => (),
+ }
+
+ match 42 {
+ 5..7 => println!("5..7"),
+ 0..10 => println!("0..10"),
+ _ => (),
+ }
+
+ match 42 {
+ 5..10 => println!("5..10"),
+ 0..=10 => println!("0..=10"),
+ _ => (),
+ }
+
+ match 42 {
+ 0..14 => println!("0..14"),
+ 5..10 => println!("5..10"),
+ _ => (),
+ }
+
+ match 42 {
+ 5..14 => println!("5..14"),
+ 0..=10 => println!("0..=10"),
+ _ => (),
+ }
+
+ match 42 {
+ 0..7 => println!("0..7"),
+ 0..=10 => println!("0..=10"),
+ _ => (),
+ }
+
+ match 42 {
+ 3.. => println!("3.."),
+ 0.. => println!("0.."),
+ _ => (),
+ }
+
+ match 42 {
+ ..=23 => println!("..=23"),
+ ..26 => println!("..26"),
+ _ => (),
+ }
+
+ // Issue #7816 - overlap after included range
+ match 42 {
+ 5..=10 => (),
+ 0..=20 => (),
+ 21..=30 => (),
+ 21..=40 => (),
+ _ => (),
+ }
+
+ // Issue #7829
+ match 0 {
+ -1..=1 => (),
+ -2..=2 => (),
+ _ => (),
+ }
+
+ // Only warn about the first if there are multiple overlaps
+ match 42u128 {
+ 0..=0x0000_0000_0000_00ff => (),
+ 0..=0x0000_0000_0000_ffff => (),
+ 0..=0x0000_0000_ffff_ffff => (),
+ 0..=0xffff_ffff_ffff_ffff => (),
+ _ => (),
+ }
+
+ if let None = Some(42) {
+ // nothing
+ } else if let None = Some(42) {
+ // another nothing :-)
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/match_overlapping_arm.stderr b/src/tools/clippy/tests/ui/match_overlapping_arm.stderr
new file mode 100644
index 000000000..b81bb1ecf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_overlapping_arm.stderr
@@ -0,0 +1,99 @@
+error: some ranges overlap
+ --> $DIR/match_overlapping_arm.rs:13:9
+ |
+LL | 0..=10 => println!("0..=10"),
+ | ^^^^^^
+ |
+ = note: `-D clippy::match-overlapping-arm` implied by `-D warnings`
+note: overlaps with this
+ --> $DIR/match_overlapping_arm.rs:14:9
+ |
+LL | 0..=11 => println!("0..=11"),
+ | ^^^^^^
+
+error: some ranges overlap
+ --> $DIR/match_overlapping_arm.rs:19:9
+ |
+LL | 0..=5 => println!("0..=5"),
+ | ^^^^^
+ |
+note: overlaps with this
+ --> $DIR/match_overlapping_arm.rs:21:9
+ |
+LL | FOO..=11 => println!("FOO..=11"),
+ | ^^^^^^^^
+
+error: some ranges overlap
+ --> $DIR/match_overlapping_arm.rs:56:9
+ |
+LL | 0..11 => println!("0..11"),
+ | ^^^^^
+ |
+note: overlaps with this
+ --> $DIR/match_overlapping_arm.rs:57:9
+ |
+LL | 0..=11 => println!("0..=11"),
+ | ^^^^^^
+
+error: some ranges overlap
+ --> $DIR/match_overlapping_arm.rs:81:9
+ |
+LL | 0..=10 => println!("0..=10"),
+ | ^^^^^^
+ |
+note: overlaps with this
+ --> $DIR/match_overlapping_arm.rs:80:9
+ |
+LL | 5..14 => println!("5..14"),
+ | ^^^^^
+
+error: some ranges overlap
+ --> $DIR/match_overlapping_arm.rs:86:9
+ |
+LL | 0..7 => println!("0..7"),
+ | ^^^^
+ |
+note: overlaps with this
+ --> $DIR/match_overlapping_arm.rs:87:9
+ |
+LL | 0..=10 => println!("0..=10"),
+ | ^^^^^^
+
+error: some ranges overlap
+ --> $DIR/match_overlapping_arm.rs:98:9
+ |
+LL | ..=23 => println!("..=23"),
+ | ^^^^^
+ |
+note: overlaps with this
+ --> $DIR/match_overlapping_arm.rs:99:9
+ |
+LL | ..26 => println!("..26"),
+ | ^^^^
+
+error: some ranges overlap
+ --> $DIR/match_overlapping_arm.rs:107:9
+ |
+LL | 21..=30 => (),
+ | ^^^^^^^
+ |
+note: overlaps with this
+ --> $DIR/match_overlapping_arm.rs:108:9
+ |
+LL | 21..=40 => (),
+ | ^^^^^^^
+
+error: some ranges overlap
+ --> $DIR/match_overlapping_arm.rs:121:9
+ |
+LL | 0..=0x0000_0000_0000_00ff => (),
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: overlaps with this
+ --> $DIR/match_overlapping_arm.rs:122:9
+ |
+LL | 0..=0x0000_0000_0000_ffff => (),
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/match_ref_pats.fixed b/src/tools/clippy/tests/ui/match_ref_pats.fixed
new file mode 100644
index 000000000..1b6c2d924
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_ref_pats.fixed
@@ -0,0 +1,118 @@
+// run-rustfix
+#![warn(clippy::match_ref_pats)]
+#![allow(dead_code, unused_variables, clippy::equatable_if_let, clippy::enum_variant_names)]
+
+fn ref_pats() {
+ {
+ let v = &Some(0);
+ match *v {
+ Some(v) => println!("{:?}", v),
+ None => println!("none"),
+ }
+ match v {
+ // This doesn't trigger; we have a different pattern.
+ &Some(v) => println!("some"),
+ other => println!("other"),
+ }
+ }
+ let tup = &(1, 2);
+ match tup {
+ &(v, 1) => println!("{}", v),
+ _ => println!("none"),
+ }
+ // Special case: using `&` both in expr and pats.
+ let w = Some(0);
+ match w {
+ Some(v) => println!("{:?}", v),
+ None => println!("none"),
+ }
+ // False positive: only wildcard pattern.
+ let w = Some(0);
+ #[allow(clippy::match_single_binding)]
+ match w {
+ _ => println!("none"),
+ }
+
+ let a = &Some(0);
+ if a.is_none() {
+ println!("none");
+ }
+
+ let b = Some(0);
+ if b.is_none() {
+ println!("none");
+ }
+}
+
+mod ice_3719 {
+ macro_rules! foo_variant(
+ ($idx:expr) => (Foo::get($idx).unwrap())
+ );
+
+ enum Foo {
+ A,
+ B,
+ }
+
+ impl Foo {
+ fn get(idx: u8) -> Option<&'static Self> {
+ match idx {
+ 0 => Some(&Foo::A),
+ 1 => Some(&Foo::B),
+ _ => None,
+ }
+ }
+ }
+
+ fn ice_3719() {
+ // ICE #3719
+ match foo_variant!(0) {
+ &Foo::A => println!("A"),
+ _ => println!("Wild"),
+ }
+ }
+}
+
+mod issue_7740 {
+ macro_rules! foobar_variant(
+ ($idx:expr) => (FooBar::get($idx).unwrap())
+ );
+
+ enum FooBar {
+ Foo,
+ Bar,
+ FooBar,
+ BarFoo,
+ }
+
+ impl FooBar {
+ fn get(idx: u8) -> Option<&'static Self> {
+ match idx {
+ 0 => Some(&FooBar::Foo),
+ 1 => Some(&FooBar::Bar),
+ 2 => Some(&FooBar::FooBar),
+ 3 => Some(&FooBar::BarFoo),
+ _ => None,
+ }
+ }
+ }
+
+ fn issue_7740() {
+ // Issue #7740
+ match *foobar_variant!(0) {
+ FooBar::Foo => println!("Foo"),
+ FooBar::Bar => println!("Bar"),
+ FooBar::FooBar => println!("FooBar"),
+ _ => println!("Wild"),
+ }
+
+ // This shouldn't trigger
+ if let &FooBar::BarFoo = foobar_variant!(3) {
+ println!("BarFoo");
+ } else {
+ println!("Wild");
+ }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/match_ref_pats.rs b/src/tools/clippy/tests/ui/match_ref_pats.rs
new file mode 100644
index 000000000..68dfac4e2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_ref_pats.rs
@@ -0,0 +1,118 @@
+// run-rustfix
+#![warn(clippy::match_ref_pats)]
+#![allow(dead_code, unused_variables, clippy::equatable_if_let, clippy::enum_variant_names)]
+
+fn ref_pats() {
+ {
+ let v = &Some(0);
+ match v {
+ &Some(v) => println!("{:?}", v),
+ &None => println!("none"),
+ }
+ match v {
+ // This doesn't trigger; we have a different pattern.
+ &Some(v) => println!("some"),
+ other => println!("other"),
+ }
+ }
+ let tup = &(1, 2);
+ match tup {
+ &(v, 1) => println!("{}", v),
+ _ => println!("none"),
+ }
+ // Special case: using `&` both in expr and pats.
+ let w = Some(0);
+ match &w {
+ &Some(v) => println!("{:?}", v),
+ &None => println!("none"),
+ }
+ // False positive: only wildcard pattern.
+ let w = Some(0);
+ #[allow(clippy::match_single_binding)]
+ match w {
+ _ => println!("none"),
+ }
+
+ let a = &Some(0);
+ if let &None = a {
+ println!("none");
+ }
+
+ let b = Some(0);
+ if let &None = &b {
+ println!("none");
+ }
+}
+
+mod ice_3719 {
+ macro_rules! foo_variant(
+ ($idx:expr) => (Foo::get($idx).unwrap())
+ );
+
+ enum Foo {
+ A,
+ B,
+ }
+
+ impl Foo {
+ fn get(idx: u8) -> Option<&'static Self> {
+ match idx {
+ 0 => Some(&Foo::A),
+ 1 => Some(&Foo::B),
+ _ => None,
+ }
+ }
+ }
+
+ fn ice_3719() {
+ // ICE #3719
+ match foo_variant!(0) {
+ &Foo::A => println!("A"),
+ _ => println!("Wild"),
+ }
+ }
+}
+
+mod issue_7740 {
+ macro_rules! foobar_variant(
+ ($idx:expr) => (FooBar::get($idx).unwrap())
+ );
+
+ enum FooBar {
+ Foo,
+ Bar,
+ FooBar,
+ BarFoo,
+ }
+
+ impl FooBar {
+ fn get(idx: u8) -> Option<&'static Self> {
+ match idx {
+ 0 => Some(&FooBar::Foo),
+ 1 => Some(&FooBar::Bar),
+ 2 => Some(&FooBar::FooBar),
+ 3 => Some(&FooBar::BarFoo),
+ _ => None,
+ }
+ }
+ }
+
+ fn issue_7740() {
+ // Issue #7740
+ match foobar_variant!(0) {
+ &FooBar::Foo => println!("Foo"),
+ &FooBar::Bar => println!("Bar"),
+ &FooBar::FooBar => println!("FooBar"),
+ _ => println!("Wild"),
+ }
+
+ // This shouldn't trigger
+ if let &FooBar::BarFoo = foobar_variant!(3) {
+ println!("BarFoo");
+ } else {
+ println!("Wild");
+ }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/match_ref_pats.stderr b/src/tools/clippy/tests/ui/match_ref_pats.stderr
new file mode 100644
index 000000000..353f7399d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_ref_pats.stderr
@@ -0,0 +1,68 @@
+error: you don't need to add `&` to all patterns
+ --> $DIR/match_ref_pats.rs:8:9
+ |
+LL | / match v {
+LL | | &Some(v) => println!("{:?}", v),
+LL | | &None => println!("none"),
+LL | | }
+ | |_________^
+ |
+ = note: `-D clippy::match-ref-pats` implied by `-D warnings`
+help: instead of prefixing all patterns with `&`, you can dereference the expression
+ |
+LL ~ match *v {
+LL ~ Some(v) => println!("{:?}", v),
+LL ~ None => println!("none"),
+ |
+
+error: you don't need to add `&` to both the expression and the patterns
+ --> $DIR/match_ref_pats.rs:25:5
+ |
+LL | / match &w {
+LL | | &Some(v) => println!("{:?}", v),
+LL | | &None => println!("none"),
+LL | | }
+ | |_____^
+ |
+help: try
+ |
+LL ~ match w {
+LL ~ Some(v) => println!("{:?}", v),
+LL ~ None => println!("none"),
+ |
+
+error: redundant pattern matching, consider using `is_none()`
+ --> $DIR/match_ref_pats.rs:37:12
+ |
+LL | if let &None = a {
+ | -------^^^^^---- help: try this: `if a.is_none()`
+ |
+ = note: `-D clippy::redundant-pattern-matching` implied by `-D warnings`
+
+error: redundant pattern matching, consider using `is_none()`
+ --> $DIR/match_ref_pats.rs:42:12
+ |
+LL | if let &None = &b {
+ | -------^^^^^----- help: try this: `if b.is_none()`
+
+error: you don't need to add `&` to all patterns
+ --> $DIR/match_ref_pats.rs:102:9
+ |
+LL | / match foobar_variant!(0) {
+LL | | &FooBar::Foo => println!("Foo"),
+LL | | &FooBar::Bar => println!("Bar"),
+LL | | &FooBar::FooBar => println!("FooBar"),
+LL | | _ => println!("Wild"),
+LL | | }
+ | |_________^
+ |
+help: instead of prefixing all patterns with `&`, you can dereference the expression
+ |
+LL ~ match *foobar_variant!(0) {
+LL ~ FooBar::Foo => println!("Foo"),
+LL ~ FooBar::Bar => println!("Bar"),
+LL ~ FooBar::FooBar => println!("FooBar"),
+ |
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/match_result_ok.fixed b/src/tools/clippy/tests/ui/match_result_ok.fixed
new file mode 100644
index 000000000..d4760a975
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_result_ok.fixed
@@ -0,0 +1,63 @@
+// run-rustfix
+
+#![warn(clippy::match_result_ok)]
+#![allow(clippy::boxed_local)]
+#![allow(dead_code)]
+
+// Checking `if` cases
+
+fn str_to_int(x: &str) -> i32 {
+ if let Ok(y) = x.parse() { y } else { 0 }
+}
+
+fn str_to_int_ok(x: &str) -> i32 {
+ if let Ok(y) = x.parse() { y } else { 0 }
+}
+
+#[rustfmt::skip]
+fn strange_some_no_else(x: &str) -> i32 {
+ {
+ if let Ok(y) = x . parse() {
+ return y;
+ };
+ 0
+ }
+}
+
+// Checking `while` cases
+
+struct Wat {
+ counter: i32,
+}
+
+impl Wat {
+ fn next(&mut self) -> Result<i32, &str> {
+ self.counter += 1;
+ if self.counter < 5 {
+ Ok(self.counter)
+ } else {
+ Err("Oh no")
+ }
+ }
+}
+
+fn base_1(x: i32) {
+ let mut wat = Wat { counter: x };
+ while let Ok(a) = wat.next() {
+ println!("{}", a);
+ }
+}
+
+fn base_2(x: i32) {
+ let mut wat = Wat { counter: x };
+ while let Ok(a) = wat.next() {
+ println!("{}", a);
+ }
+}
+
+fn base_3(test_func: Box<Result<i32, &str>>) {
+ // Expected to stay as is
+ while let Some(_b) = test_func.ok() {}
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/match_result_ok.rs b/src/tools/clippy/tests/ui/match_result_ok.rs
new file mode 100644
index 000000000..0b818723d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_result_ok.rs
@@ -0,0 +1,63 @@
+// run-rustfix
+
+#![warn(clippy::match_result_ok)]
+#![allow(clippy::boxed_local)]
+#![allow(dead_code)]
+
+// Checking `if` cases
+
+fn str_to_int(x: &str) -> i32 {
+ if let Some(y) = x.parse().ok() { y } else { 0 }
+}
+
+fn str_to_int_ok(x: &str) -> i32 {
+ if let Ok(y) = x.parse() { y } else { 0 }
+}
+
+#[rustfmt::skip]
+fn strange_some_no_else(x: &str) -> i32 {
+ {
+ if let Some(y) = x . parse() . ok () {
+ return y;
+ };
+ 0
+ }
+}
+
+// Checking `while` cases
+
+struct Wat {
+ counter: i32,
+}
+
+impl Wat {
+ fn next(&mut self) -> Result<i32, &str> {
+ self.counter += 1;
+ if self.counter < 5 {
+ Ok(self.counter)
+ } else {
+ Err("Oh no")
+ }
+ }
+}
+
+fn base_1(x: i32) {
+ let mut wat = Wat { counter: x };
+ while let Some(a) = wat.next().ok() {
+ println!("{}", a);
+ }
+}
+
+fn base_2(x: i32) {
+ let mut wat = Wat { counter: x };
+ while let Ok(a) = wat.next() {
+ println!("{}", a);
+ }
+}
+
+fn base_3(test_func: Box<Result<i32, &str>>) {
+ // Expected to stay as is
+ while let Some(_b) = test_func.ok() {}
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/match_result_ok.stderr b/src/tools/clippy/tests/ui/match_result_ok.stderr
new file mode 100644
index 000000000..cc3bc8c76
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_result_ok.stderr
@@ -0,0 +1,36 @@
+error: matching on `Some` with `ok()` is redundant
+ --> $DIR/match_result_ok.rs:10:5
+ |
+LL | if let Some(y) = x.parse().ok() { y } else { 0 }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::match-result-ok` implied by `-D warnings`
+help: consider matching on `Ok(y)` and removing the call to `ok` instead
+ |
+LL | if let Ok(y) = x.parse() { y } else { 0 }
+ | ~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: matching on `Some` with `ok()` is redundant
+ --> $DIR/match_result_ok.rs:20:9
+ |
+LL | if let Some(y) = x . parse() . ok () {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: consider matching on `Ok(y)` and removing the call to `ok` instead
+ |
+LL | if let Ok(y) = x . parse() {
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: matching on `Some` with `ok()` is redundant
+ --> $DIR/match_result_ok.rs:46:5
+ |
+LL | while let Some(a) = wat.next().ok() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: consider matching on `Ok(a)` and removing the call to `ok` instead
+ |
+LL | while let Ok(a) = wat.next() {
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/match_same_arms.rs b/src/tools/clippy/tests/ui/match_same_arms.rs
new file mode 100644
index 000000000..0b9342c9c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_same_arms.rs
@@ -0,0 +1,56 @@
+#![warn(clippy::match_same_arms)]
+
+pub enum Abc {
+ A,
+ B,
+ C,
+}
+
+fn match_same_arms() {
+ let _ = match Abc::A {
+ Abc::A => 0,
+ Abc::B => 1,
+ _ => 0, //~ ERROR match arms have same body
+ };
+
+ match (1, 2, 3) {
+ (1, .., 3) => 42,
+ (.., 3) => 42, //~ ERROR match arms have same body
+ _ => 0,
+ };
+
+ let _ = match 42 {
+ 42 => 1,
+ 51 => 1, //~ ERROR match arms have same body
+ 41 => 2,
+ 52 => 2, //~ ERROR match arms have same body
+ _ => 0,
+ };
+
+ let _ = match 42 {
+ 1 => 2,
+ 2 => 2, //~ ERROR 2nd matched arms have same body
+ 3 => 2, //~ ERROR 3rd matched arms have same body
+ 4 => 3,
+ _ => 0,
+ };
+}
+
+mod issue4244 {
+ #[derive(PartialEq, PartialOrd, Eq, Ord)]
+ pub enum CommandInfo {
+ BuiltIn { name: String, about: Option<String> },
+ External { name: String, path: std::path::PathBuf },
+ }
+
+ impl CommandInfo {
+ pub fn name(&self) -> String {
+ match self {
+ CommandInfo::BuiltIn { name, .. } => name.to_string(),
+ CommandInfo::External { name, .. } => name.to_string(),
+ }
+ }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/match_same_arms.stderr b/src/tools/clippy/tests/ui/match_same_arms.stderr
new file mode 100644
index 000000000..b6d04263b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_same_arms.stderr
@@ -0,0 +1,121 @@
+error: this match arm has an identical body to the `_` wildcard arm
+ --> $DIR/match_same_arms.rs:11:9
+ |
+LL | Abc::A => 0,
+ | ^^^^^^^^^^^ help: try removing the arm
+ |
+ = note: `-D clippy::match-same-arms` implied by `-D warnings`
+ = help: or try changing either arm body
+note: `_` wildcard arm here
+ --> $DIR/match_same_arms.rs:13:9
+ |
+LL | _ => 0, //~ ERROR match arms have same body
+ | ^^^^^^
+
+error: this match arm has an identical body to another arm
+ --> $DIR/match_same_arms.rs:17:9
+ |
+LL | (1, .., 3) => 42,
+ | ----------^^^^^^
+ | |
+ | help: try merging the arm patterns: `(1, .., 3) | (.., 3)`
+ |
+ = help: or try changing either arm body
+note: other arm here
+ --> $DIR/match_same_arms.rs:18:9
+ |
+LL | (.., 3) => 42, //~ ERROR match arms have same body
+ | ^^^^^^^^^^^^^
+
+error: this match arm has an identical body to another arm
+ --> $DIR/match_same_arms.rs:24:9
+ |
+LL | 51 => 1, //~ ERROR match arms have same body
+ | --^^^^^
+ | |
+ | help: try merging the arm patterns: `51 | 42`
+ |
+ = help: or try changing either arm body
+note: other arm here
+ --> $DIR/match_same_arms.rs:23:9
+ |
+LL | 42 => 1,
+ | ^^^^^^^
+
+error: this match arm has an identical body to another arm
+ --> $DIR/match_same_arms.rs:25:9
+ |
+LL | 41 => 2,
+ | --^^^^^
+ | |
+ | help: try merging the arm patterns: `41 | 52`
+ |
+ = help: or try changing either arm body
+note: other arm here
+ --> $DIR/match_same_arms.rs:26:9
+ |
+LL | 52 => 2, //~ ERROR match arms have same body
+ | ^^^^^^^
+
+error: this match arm has an identical body to another arm
+ --> $DIR/match_same_arms.rs:32:9
+ |
+LL | 2 => 2, //~ ERROR 2nd matched arms have same body
+ | -^^^^^
+ | |
+ | help: try merging the arm patterns: `2 | 1`
+ |
+ = help: or try changing either arm body
+note: other arm here
+ --> $DIR/match_same_arms.rs:31:9
+ |
+LL | 1 => 2,
+ | ^^^^^^
+
+error: this match arm has an identical body to another arm
+ --> $DIR/match_same_arms.rs:33:9
+ |
+LL | 3 => 2, //~ ERROR 3rd matched arms have same body
+ | -^^^^^
+ | |
+ | help: try merging the arm patterns: `3 | 1`
+ |
+ = help: or try changing either arm body
+note: other arm here
+ --> $DIR/match_same_arms.rs:31:9
+ |
+LL | 1 => 2,
+ | ^^^^^^
+
+error: this match arm has an identical body to another arm
+ --> $DIR/match_same_arms.rs:32:9
+ |
+LL | 2 => 2, //~ ERROR 2nd matched arms have same body
+ | -^^^^^
+ | |
+ | help: try merging the arm patterns: `2 | 3`
+ |
+ = help: or try changing either arm body
+note: other arm here
+ --> $DIR/match_same_arms.rs:33:9
+ |
+LL | 3 => 2, //~ ERROR 3rd matched arms have same body
+ | ^^^^^^
+
+error: this match arm has an identical body to another arm
+ --> $DIR/match_same_arms.rs:50:17
+ |
+LL | CommandInfo::External { name, .. } => name.to_string(),
+ | ----------------------------------^^^^^^^^^^^^^^^^^^^^
+ | |
+ | help: try merging the arm patterns: `CommandInfo::External { name, .. } | CommandInfo::BuiltIn { name, .. }`
+ |
+ = help: or try changing either arm body
+note: other arm here
+ --> $DIR/match_same_arms.rs:49:17
+ |
+LL | CommandInfo::BuiltIn { name, .. } => name.to_string(),
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/match_same_arms2.rs b/src/tools/clippy/tests/ui/match_same_arms2.rs
new file mode 100644
index 000000000..7aba5b447
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_same_arms2.rs
@@ -0,0 +1,238 @@
+#![warn(clippy::match_same_arms)]
+#![allow(clippy::blacklisted_name, clippy::diverging_sub_expression)]
+
+fn bar<T>(_: T) {}
+fn foo() -> bool {
+ unimplemented!()
+}
+
+fn match_same_arms() {
+ let _ = match 42 {
+ 42 => {
+ foo();
+ let mut a = 42 + [23].len() as i32;
+ if true {
+ a += 7;
+ }
+ a = -31 - a;
+ a
+ },
+ _ => {
+ //~ ERROR match arms have same body
+ foo();
+ let mut a = 42 + [23].len() as i32;
+ if true {
+ a += 7;
+ }
+ a = -31 - a;
+ a
+ },
+ };
+
+ let _ = match 42 {
+ 42 => foo(),
+ 51 => foo(), //~ ERROR match arms have same body
+ _ => true,
+ };
+
+ let _ = match Some(42) {
+ Some(_) => 24,
+ None => 24, //~ ERROR match arms have same body
+ };
+
+ let _ = match Some(42) {
+ Some(foo) => 24,
+ None => 24,
+ };
+
+ let _ = match Some(42) {
+ Some(42) => 24,
+ Some(a) => 24, // bindings are different
+ None => 0,
+ };
+
+ let _ = match Some(42) {
+ Some(a) if a > 0 => 24,
+ Some(a) => 24, // one arm has a guard
+ None => 0,
+ };
+
+ match (Some(42), Some(42)) {
+ (Some(a), None) => bar(a),
+ (None, Some(a)) => bar(a), //~ ERROR match arms have same body
+ _ => (),
+ }
+
+ match (Some(42), Some(42)) {
+ (Some(a), ..) => bar(a),
+ (.., Some(a)) => bar(a), //~ ERROR match arms have same body
+ _ => (),
+ }
+
+ let _ = match Some(()) {
+ Some(()) => 0.0,
+ None => -0.0,
+ };
+
+ match (Some(42), Some("")) {
+ (Some(a), None) => bar(a),
+ (None, Some(a)) => bar(a), // bindings have different types
+ _ => (),
+ }
+
+ let x: Result<i32, &str> = Ok(3);
+
+ // No warning because of the guard.
+ match x {
+ Ok(x) if x * x == 64 => println!("ok"),
+ Ok(_) => println!("ok"),
+ Err(_) => println!("err"),
+ }
+
+ // This used to be a false positive; see issue #1996.
+ match x {
+ Ok(3) => println!("ok"),
+ Ok(x) if x * x == 64 => println!("ok 64"),
+ Ok(_) => println!("ok"),
+ Err(_) => println!("err"),
+ }
+
+ match (x, Some(1i32)) {
+ (Ok(x), Some(_)) => println!("ok {}", x),
+ (Ok(_), Some(x)) => println!("ok {}", x),
+ _ => println!("err"),
+ }
+
+ // No warning; different types for `x`.
+ match (x, Some(1.0f64)) {
+ (Ok(x), Some(_)) => println!("ok {}", x),
+ (Ok(_), Some(x)) => println!("ok {}", x),
+ _ => println!("err"),
+ }
+
+ // False negative #2251.
+ match x {
+ Ok(_tmp) => println!("ok"),
+ Ok(3) => println!("ok"),
+ Ok(_) => println!("ok"),
+ Err(_) => {
+ unreachable!();
+ },
+ }
+
+ // False positive #1390
+ macro_rules! empty {
+ ($e:expr) => {};
+ }
+ match 0 {
+ 0 => {
+ empty!(0);
+ },
+ 1 => {
+ empty!(1);
+ },
+ x => {
+ empty!(x);
+ },
+ };
+
+ // still lint if the tokens are the same
+ match 0 {
+ 0 => {
+ empty!(0);
+ },
+ 1 => {
+ empty!(0);
+ },
+ x => {
+ empty!(x);
+ },
+ }
+
+ match_expr_like_matches_macro_priority();
+}
+
+fn match_expr_like_matches_macro_priority() {
+ enum E {
+ A,
+ B,
+ C,
+ }
+ let x = E::A;
+ let _ans = match x {
+ E::A => false,
+ E::B => false,
+ _ => true,
+ };
+}
+
+fn main() {
+ let _ = match Some(0) {
+ Some(0) => 0,
+ Some(1) => 1,
+ #[cfg(feature = "foo")]
+ Some(2) => 2,
+ _ => 1,
+ };
+
+ enum Foo {
+ X(u32),
+ Y(u32),
+ Z(u32),
+ }
+
+ // Don't lint. `Foo::X(0)` and `Foo::Z(_)` overlap with the arm in between.
+ let _ = match Foo::X(0) {
+ Foo::X(0) => 1,
+ Foo::X(_) | Foo::Y(_) | Foo::Z(0) => 2,
+ Foo::Z(_) => 1,
+ _ => 0,
+ };
+
+ // Suggest moving `Foo::Z(_)` up.
+ let _ = match Foo::X(0) {
+ Foo::X(0) => 1,
+ Foo::X(_) | Foo::Y(_) => 2,
+ Foo::Z(_) => 1,
+ _ => 0,
+ };
+
+ // Suggest moving `Foo::X(0)` down.
+ let _ = match Foo::X(0) {
+ Foo::X(0) => 1,
+ Foo::Y(_) | Foo::Z(0) => 2,
+ Foo::Z(_) => 1,
+ _ => 0,
+ };
+
+ // Don't lint.
+ let _ = match 0 {
+ -2 => 1,
+ -5..=50 => 2,
+ -150..=88 => 1,
+ _ => 3,
+ };
+
+ struct Bar {
+ x: u32,
+ y: u32,
+ z: u32,
+ }
+
+ // Lint.
+ let _ = match None {
+ Some(Bar { x: 0, y: 5, .. }) => 1,
+ Some(Bar { y: 10, z: 0, .. }) => 2,
+ None => 50,
+ Some(Bar { y: 0, x: 5, .. }) => 1,
+ _ => 200,
+ };
+
+ let _ = match 0 {
+ 0 => todo!(),
+ 1 => todo!(),
+ 2 => core::convert::identity::<u32>(todo!()),
+ 3 => core::convert::identity::<u32>(todo!()),
+ _ => 5,
+ };
+}
diff --git a/src/tools/clippy/tests/ui/match_same_arms2.stderr b/src/tools/clippy/tests/ui/match_same_arms2.stderr
new file mode 100644
index 000000000..14a672ba2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_same_arms2.stderr
@@ -0,0 +1,196 @@
+error: this match arm has an identical body to the `_` wildcard arm
+ --> $DIR/match_same_arms2.rs:11:9
+ |
+LL | / 42 => {
+LL | | foo();
+LL | | let mut a = 42 + [23].len() as i32;
+LL | | if true {
+... |
+LL | | a
+LL | | },
+ | |_________^ help: try removing the arm
+ |
+ = note: `-D clippy::match-same-arms` implied by `-D warnings`
+ = help: or try changing either arm body
+note: `_` wildcard arm here
+ --> $DIR/match_same_arms2.rs:20:9
+ |
+LL | / _ => {
+LL | | //~ ERROR match arms have same body
+LL | | foo();
+LL | | let mut a = 42 + [23].len() as i32;
+... |
+LL | | a
+LL | | },
+ | |_________^
+
+error: this match arm has an identical body to another arm
+ --> $DIR/match_same_arms2.rs:34:9
+ |
+LL | 51 => foo(), //~ ERROR match arms have same body
+ | --^^^^^^^^^
+ | |
+ | help: try merging the arm patterns: `51 | 42`
+ |
+ = help: or try changing either arm body
+note: other arm here
+ --> $DIR/match_same_arms2.rs:33:9
+ |
+LL | 42 => foo(),
+ | ^^^^^^^^^^^
+
+error: this match arm has an identical body to another arm
+ --> $DIR/match_same_arms2.rs:40:9
+ |
+LL | None => 24, //~ ERROR match arms have same body
+ | ----^^^^^^
+ | |
+ | help: try merging the arm patterns: `None | Some(_)`
+ |
+ = help: or try changing either arm body
+note: other arm here
+ --> $DIR/match_same_arms2.rs:39:9
+ |
+LL | Some(_) => 24,
+ | ^^^^^^^^^^^^^
+
+error: this match arm has an identical body to another arm
+ --> $DIR/match_same_arms2.rs:62:9
+ |
+LL | (None, Some(a)) => bar(a), //~ ERROR match arms have same body
+ | ---------------^^^^^^^^^^
+ | |
+ | help: try merging the arm patterns: `(None, Some(a)) | (Some(a), None)`
+ |
+ = help: or try changing either arm body
+note: other arm here
+ --> $DIR/match_same_arms2.rs:61:9
+ |
+LL | (Some(a), None) => bar(a),
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this match arm has an identical body to another arm
+ --> $DIR/match_same_arms2.rs:67:9
+ |
+LL | (Some(a), ..) => bar(a),
+ | -------------^^^^^^^^^^
+ | |
+ | help: try merging the arm patterns: `(Some(a), ..) | (.., Some(a))`
+ |
+ = help: or try changing either arm body
+note: other arm here
+ --> $DIR/match_same_arms2.rs:68:9
+ |
+LL | (.., Some(a)) => bar(a), //~ ERROR match arms have same body
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this match arm has an identical body to another arm
+ --> $DIR/match_same_arms2.rs:101:9
+ |
+LL | (Ok(x), Some(_)) => println!("ok {}", x),
+ | ----------------^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | help: try merging the arm patterns: `(Ok(x), Some(_)) | (Ok(_), Some(x))`
+ |
+ = help: or try changing either arm body
+note: other arm here
+ --> $DIR/match_same_arms2.rs:102:9
+ |
+LL | (Ok(_), Some(x)) => println!("ok {}", x),
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this match arm has an identical body to another arm
+ --> $DIR/match_same_arms2.rs:117:9
+ |
+LL | Ok(_) => println!("ok"),
+ | -----^^^^^^^^^^^^^^^^^^
+ | |
+ | help: try merging the arm patterns: `Ok(_) | Ok(3)`
+ |
+ = help: or try changing either arm body
+note: other arm here
+ --> $DIR/match_same_arms2.rs:116:9
+ |
+LL | Ok(3) => println!("ok"),
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this match arm has an identical body to another arm
+ --> $DIR/match_same_arms2.rs:144:9
+ |
+LL | 1 => {
+ | ^ help: try merging the arm patterns: `1 | 0`
+ | _________|
+ | |
+LL | | empty!(0);
+LL | | },
+ | |_________^
+ |
+ = help: or try changing either arm body
+note: other arm here
+ --> $DIR/match_same_arms2.rs:141:9
+ |
+LL | / 0 => {
+LL | | empty!(0);
+LL | | },
+ | |_________^
+
+error: match expression looks like `matches!` macro
+ --> $DIR/match_same_arms2.rs:162:16
+ |
+LL | let _ans = match x {
+ | ________________^
+LL | | E::A => false,
+LL | | E::B => false,
+LL | | _ => true,
+LL | | };
+ | |_____^ help: try this: `!matches!(x, E::A | E::B)`
+ |
+ = note: `-D clippy::match-like-matches-macro` implied by `-D warnings`
+
+error: this match arm has an identical body to another arm
+ --> $DIR/match_same_arms2.rs:194:9
+ |
+LL | Foo::X(0) => 1,
+ | ---------^^^^^
+ | |
+ | help: try merging the arm patterns: `Foo::X(0) | Foo::Z(_)`
+ |
+ = help: or try changing either arm body
+note: other arm here
+ --> $DIR/match_same_arms2.rs:196:9
+ |
+LL | Foo::Z(_) => 1,
+ | ^^^^^^^^^^^^^^
+
+error: this match arm has an identical body to another arm
+ --> $DIR/match_same_arms2.rs:204:9
+ |
+LL | Foo::Z(_) => 1,
+ | ---------^^^^^
+ | |
+ | help: try merging the arm patterns: `Foo::Z(_) | Foo::X(0)`
+ |
+ = help: or try changing either arm body
+note: other arm here
+ --> $DIR/match_same_arms2.rs:202:9
+ |
+LL | Foo::X(0) => 1,
+ | ^^^^^^^^^^^^^^
+
+error: this match arm has an identical body to another arm
+ --> $DIR/match_same_arms2.rs:227:9
+ |
+LL | Some(Bar { y: 0, x: 5, .. }) => 1,
+ | ----------------------------^^^^^
+ | |
+ | help: try merging the arm patterns: `Some(Bar { y: 0, x: 5, .. }) | Some(Bar { x: 0, y: 5, .. })`
+ |
+ = help: or try changing either arm body
+note: other arm here
+ --> $DIR/match_same_arms2.rs:224:9
+ |
+LL | Some(Bar { x: 0, y: 5, .. }) => 1,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 12 previous errors
+
diff --git a/src/tools/clippy/tests/ui/match_single_binding.fixed b/src/tools/clippy/tests/ui/match_single_binding.fixed
new file mode 100644
index 000000000..de46e6cff
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_single_binding.fixed
@@ -0,0 +1,126 @@
+// run-rustfix
+
+#![warn(clippy::match_single_binding)]
+#![allow(unused_variables, clippy::toplevel_ref_arg)]
+
+struct Point {
+ x: i32,
+ y: i32,
+}
+
+fn coords() -> Point {
+ Point { x: 1, y: 2 }
+}
+
+macro_rules! foo {
+ ($param:expr) => {
+ match $param {
+ _ => println!("whatever"),
+ }
+ };
+}
+
+fn main() {
+ let a = 1;
+ let b = 2;
+ let c = 3;
+ // Lint
+ let (x, y, z) = (a, b, c);
+ {
+ println!("{} {} {}", x, y, z);
+ }
+ // Lint
+ let (x, y, z) = (a, b, c);
+ println!("{} {} {}", x, y, z);
+ // Ok
+ foo!(a);
+ // Ok
+ match a {
+ 2 => println!("2"),
+ _ => println!("Not 2"),
+ }
+ // Ok
+ let d = Some(5);
+ match d {
+ Some(d) => println!("{}", d),
+ _ => println!("None"),
+ }
+ // Lint
+ println!("whatever");
+ // Lint
+ {
+ let x = 29;
+ println!("x has a value of {}", x);
+ }
+ // Lint
+ {
+ let e = 5 * a;
+ if e >= 5 {
+ println!("e is superior to 5");
+ }
+ }
+ // Lint
+ let p = Point { x: 0, y: 7 };
+ let Point { x, y } = p;
+ println!("Coords: ({}, {})", x, y);
+ // Lint
+ let Point { x: x1, y: y1 } = p;
+ println!("Coords: ({}, {})", x1, y1);
+ // Lint
+ let x = 5;
+ let ref r = x;
+ println!("Got a reference to {}", r);
+ // Lint
+ let mut x = 5;
+ let ref mut mr = x;
+ println!("Got a mutable reference to {}", mr);
+ // Lint
+ let Point { x, y } = coords();
+ let product = x * y;
+ // Lint
+ let v = vec![Some(1), Some(2), Some(3), Some(4)];
+ #[allow(clippy::let_and_return)]
+ let _ = v
+ .iter()
+ .map(|i| {
+ let unwrapped = i.unwrap();
+ unwrapped
+ })
+ .collect::<Vec<u8>>();
+ // Ok
+ let x = 1;
+ match x {
+ #[cfg(disabled_feature)]
+ 0 => println!("Disabled branch"),
+ _ => println!("Enabled branch"),
+ }
+
+ // Ok
+ let x = 1;
+ let y = 1;
+ match match y {
+ 0 => 1,
+ _ => 2,
+ } {
+ #[cfg(disabled_feature)]
+ 0 => println!("Array index start"),
+ _ => println!("Not an array index start"),
+ }
+
+ // Lint
+ let x = 1;
+ println!("Not an array index start");
+}
+
+#[allow(dead_code)]
+fn issue_8723() {
+ let (mut val, idx) = ("a b", 1);
+
+ let (pre, suf) = val.split_at(idx);
+ val = {
+ println!("{}", pre);
+ suf
+ };
+
+ let _ = val;
+}
diff --git a/src/tools/clippy/tests/ui/match_single_binding.rs b/src/tools/clippy/tests/ui/match_single_binding.rs
new file mode 100644
index 000000000..eea64fcb2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_single_binding.rs
@@ -0,0 +1,142 @@
+// run-rustfix
+
+#![warn(clippy::match_single_binding)]
+#![allow(unused_variables, clippy::toplevel_ref_arg)]
+
+struct Point {
+ x: i32,
+ y: i32,
+}
+
+fn coords() -> Point {
+ Point { x: 1, y: 2 }
+}
+
+macro_rules! foo {
+ ($param:expr) => {
+ match $param {
+ _ => println!("whatever"),
+ }
+ };
+}
+
+fn main() {
+ let a = 1;
+ let b = 2;
+ let c = 3;
+ // Lint
+ match (a, b, c) {
+ (x, y, z) => {
+ println!("{} {} {}", x, y, z);
+ },
+ }
+ // Lint
+ match (a, b, c) {
+ (x, y, z) => println!("{} {} {}", x, y, z),
+ }
+ // Ok
+ foo!(a);
+ // Ok
+ match a {
+ 2 => println!("2"),
+ _ => println!("Not 2"),
+ }
+ // Ok
+ let d = Some(5);
+ match d {
+ Some(d) => println!("{}", d),
+ _ => println!("None"),
+ }
+ // Lint
+ match a {
+ _ => println!("whatever"),
+ }
+ // Lint
+ match a {
+ _ => {
+ let x = 29;
+ println!("x has a value of {}", x);
+ },
+ }
+ // Lint
+ match a {
+ _ => {
+ let e = 5 * a;
+ if e >= 5 {
+ println!("e is superior to 5");
+ }
+ },
+ }
+ // Lint
+ let p = Point { x: 0, y: 7 };
+ match p {
+ Point { x, y } => println!("Coords: ({}, {})", x, y),
+ }
+ // Lint
+ match p {
+ Point { x: x1, y: y1 } => println!("Coords: ({}, {})", x1, y1),
+ }
+ // Lint
+ let x = 5;
+ match x {
+ ref r => println!("Got a reference to {}", r),
+ }
+ // Lint
+ let mut x = 5;
+ match x {
+ ref mut mr => println!("Got a mutable reference to {}", mr),
+ }
+ // Lint
+ let product = match coords() {
+ Point { x, y } => x * y,
+ };
+ // Lint
+ let v = vec![Some(1), Some(2), Some(3), Some(4)];
+ #[allow(clippy::let_and_return)]
+ let _ = v
+ .iter()
+ .map(|i| match i.unwrap() {
+ unwrapped => unwrapped,
+ })
+ .collect::<Vec<u8>>();
+ // Ok
+ let x = 1;
+ match x {
+ #[cfg(disabled_feature)]
+ 0 => println!("Disabled branch"),
+ _ => println!("Enabled branch"),
+ }
+
+ // Ok
+ let x = 1;
+ let y = 1;
+ match match y {
+ 0 => 1,
+ _ => 2,
+ } {
+ #[cfg(disabled_feature)]
+ 0 => println!("Array index start"),
+ _ => println!("Not an array index start"),
+ }
+
+ // Lint
+ let x = 1;
+ match x {
+ // =>
+ _ => println!("Not an array index start"),
+ }
+}
+
+#[allow(dead_code)]
+fn issue_8723() {
+ let (mut val, idx) = ("a b", 1);
+
+ val = match val.split_at(idx) {
+ (pre, suf) => {
+ println!("{}", pre);
+ suf
+ },
+ };
+
+ let _ = val;
+}
diff --git a/src/tools/clippy/tests/ui/match_single_binding.stderr b/src/tools/clippy/tests/ui/match_single_binding.stderr
new file mode 100644
index 000000000..5d4e7314b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_single_binding.stderr
@@ -0,0 +1,200 @@
+error: this match could be written as a `let` statement
+ --> $DIR/match_single_binding.rs:28:5
+ |
+LL | / match (a, b, c) {
+LL | | (x, y, z) => {
+LL | | println!("{} {} {}", x, y, z);
+LL | | },
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::match-single-binding` implied by `-D warnings`
+help: consider using a `let` statement
+ |
+LL ~ let (x, y, z) = (a, b, c);
+LL + {
+LL + println!("{} {} {}", x, y, z);
+LL + }
+ |
+
+error: this match could be written as a `let` statement
+ --> $DIR/match_single_binding.rs:34:5
+ |
+LL | / match (a, b, c) {
+LL | | (x, y, z) => println!("{} {} {}", x, y, z),
+LL | | }
+ | |_____^
+ |
+help: consider using a `let` statement
+ |
+LL ~ let (x, y, z) = (a, b, c);
+LL + println!("{} {} {}", x, y, z);
+ |
+
+error: this match could be replaced by its body itself
+ --> $DIR/match_single_binding.rs:51:5
+ |
+LL | / match a {
+LL | | _ => println!("whatever"),
+LL | | }
+ | |_____^ help: consider using the match body instead: `println!("whatever");`
+
+error: this match could be replaced by its body itself
+ --> $DIR/match_single_binding.rs:55:5
+ |
+LL | / match a {
+LL | | _ => {
+LL | | let x = 29;
+LL | | println!("x has a value of {}", x);
+LL | | },
+LL | | }
+ | |_____^
+ |
+help: consider using the match body instead
+ |
+LL ~ {
+LL + let x = 29;
+LL + println!("x has a value of {}", x);
+LL + }
+ |
+
+error: this match could be replaced by its body itself
+ --> $DIR/match_single_binding.rs:62:5
+ |
+LL | / match a {
+LL | | _ => {
+LL | | let e = 5 * a;
+LL | | if e >= 5 {
+... |
+LL | | },
+LL | | }
+ | |_____^
+ |
+help: consider using the match body instead
+ |
+LL ~ {
+LL + let e = 5 * a;
+LL + if e >= 5 {
+LL + println!("e is superior to 5");
+LL + }
+LL + }
+ |
+
+error: this match could be written as a `let` statement
+ --> $DIR/match_single_binding.rs:72:5
+ |
+LL | / match p {
+LL | | Point { x, y } => println!("Coords: ({}, {})", x, y),
+LL | | }
+ | |_____^
+ |
+help: consider using a `let` statement
+ |
+LL ~ let Point { x, y } = p;
+LL + println!("Coords: ({}, {})", x, y);
+ |
+
+error: this match could be written as a `let` statement
+ --> $DIR/match_single_binding.rs:76:5
+ |
+LL | / match p {
+LL | | Point { x: x1, y: y1 } => println!("Coords: ({}, {})", x1, y1),
+LL | | }
+ | |_____^
+ |
+help: consider using a `let` statement
+ |
+LL ~ let Point { x: x1, y: y1 } = p;
+LL + println!("Coords: ({}, {})", x1, y1);
+ |
+
+error: this match could be written as a `let` statement
+ --> $DIR/match_single_binding.rs:81:5
+ |
+LL | / match x {
+LL | | ref r => println!("Got a reference to {}", r),
+LL | | }
+ | |_____^
+ |
+help: consider using a `let` statement
+ |
+LL ~ let ref r = x;
+LL + println!("Got a reference to {}", r);
+ |
+
+error: this match could be written as a `let` statement
+ --> $DIR/match_single_binding.rs:86:5
+ |
+LL | / match x {
+LL | | ref mut mr => println!("Got a mutable reference to {}", mr),
+LL | | }
+ | |_____^
+ |
+help: consider using a `let` statement
+ |
+LL ~ let ref mut mr = x;
+LL + println!("Got a mutable reference to {}", mr);
+ |
+
+error: this match could be written as a `let` statement
+ --> $DIR/match_single_binding.rs:90:5
+ |
+LL | / let product = match coords() {
+LL | | Point { x, y } => x * y,
+LL | | };
+ | |______^
+ |
+help: consider using a `let` statement
+ |
+LL ~ let Point { x, y } = coords();
+LL + let product = x * y;
+ |
+
+error: this match could be written as a `let` statement
+ --> $DIR/match_single_binding.rs:98:18
+ |
+LL | .map(|i| match i.unwrap() {
+ | __________________^
+LL | | unwrapped => unwrapped,
+LL | | })
+ | |_________^
+ |
+help: consider using a `let` statement
+ |
+LL ~ .map(|i| {
+LL + let unwrapped = i.unwrap();
+LL + unwrapped
+LL ~ })
+ |
+
+error: this match could be replaced by its body itself
+ --> $DIR/match_single_binding.rs:124:5
+ |
+LL | / match x {
+LL | | // =>
+LL | | _ => println!("Not an array index start"),
+LL | | }
+ | |_____^ help: consider using the match body instead: `println!("Not an array index start");`
+
+error: this assignment could be simplified
+ --> $DIR/match_single_binding.rs:134:5
+ |
+LL | / val = match val.split_at(idx) {
+LL | | (pre, suf) => {
+LL | | println!("{}", pre);
+LL | | suf
+LL | | },
+LL | | };
+ | |_____^
+ |
+help: consider removing the `match` expression
+ |
+LL ~ let (pre, suf) = val.split_at(idx);
+LL + val = {
+LL + println!("{}", pre);
+LL + suf
+LL ~ };
+ |
+
+error: aborting due to 13 previous errors
+
diff --git a/src/tools/clippy/tests/ui/match_single_binding2.fixed b/src/tools/clippy/tests/ui/match_single_binding2.fixed
new file mode 100644
index 000000000..a91fcc212
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_single_binding2.fixed
@@ -0,0 +1,53 @@
+// run-rustfix
+
+#![warn(clippy::match_single_binding)]
+#![allow(unused_variables)]
+
+fn main() {
+ // Lint (additional curly braces needed, see #6572)
+ struct AppendIter<I>
+ where
+ I: Iterator,
+ {
+ inner: Option<(I, <I as Iterator>::Item)>,
+ }
+
+ #[allow(dead_code)]
+ fn size_hint<I: Iterator>(iter: &AppendIter<I>) -> (usize, Option<usize>) {
+ match &iter.inner {
+ Some((iter, _item)) => {
+ let (min, max) = iter.size_hint();
+ (min.saturating_add(1), max.and_then(|max| max.checked_add(1)))
+ },
+ None => (0, Some(0)),
+ }
+ }
+
+ // Lint (no additional curly braces needed)
+ let opt = Some((5, 2));
+ let get_tup = || -> (i32, i32) { (1, 2) };
+ match opt {
+ #[rustfmt::skip]
+ Some((first, _second)) => {
+ let (a, b) = get_tup();
+ println!("a {:?} and b {:?}", a, b);
+ },
+ None => println!("nothing"),
+ }
+
+ fn side_effects() {}
+
+ // Lint (scrutinee has side effects)
+ // issue #7094
+ side_effects();
+ println!("Side effects");
+
+ // Lint (scrutinee has side effects)
+ // issue #7094
+ let x = 1;
+ match x {
+ 0 => 1,
+ _ => 2,
+ };
+ println!("Single branch");
+}
diff --git a/src/tools/clippy/tests/ui/match_single_binding2.rs b/src/tools/clippy/tests/ui/match_single_binding2.rs
new file mode 100644
index 000000000..476386eba
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_single_binding2.rs
@@ -0,0 +1,55 @@
+// run-rustfix
+
+#![warn(clippy::match_single_binding)]
+#![allow(unused_variables)]
+
+fn main() {
+ // Lint (additional curly braces needed, see #6572)
+ struct AppendIter<I>
+ where
+ I: Iterator,
+ {
+ inner: Option<(I, <I as Iterator>::Item)>,
+ }
+
+ #[allow(dead_code)]
+ fn size_hint<I: Iterator>(iter: &AppendIter<I>) -> (usize, Option<usize>) {
+ match &iter.inner {
+ Some((iter, _item)) => match iter.size_hint() {
+ (min, max) => (min.saturating_add(1), max.and_then(|max| max.checked_add(1))),
+ },
+ None => (0, Some(0)),
+ }
+ }
+
+ // Lint (no additional curly braces needed)
+ let opt = Some((5, 2));
+ let get_tup = || -> (i32, i32) { (1, 2) };
+ match opt {
+ #[rustfmt::skip]
+ Some((first, _second)) => {
+ match get_tup() {
+ (a, b) => println!("a {:?} and b {:?}", a, b),
+ }
+ },
+ None => println!("nothing"),
+ }
+
+ fn side_effects() {}
+
+ // Lint (scrutinee has side effects)
+ // issue #7094
+ match side_effects() {
+ _ => println!("Side effects"),
+ }
+
+ // Lint (scrutinee has side effects)
+ // issue #7094
+ let x = 1;
+ match match x {
+ 0 => 1,
+ _ => 2,
+ } {
+ _ => println!("Single branch"),
+ }
+}
diff --git a/src/tools/clippy/tests/ui/match_single_binding2.stderr b/src/tools/clippy/tests/ui/match_single_binding2.stderr
new file mode 100644
index 000000000..22bf7d8be
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_single_binding2.stderr
@@ -0,0 +1,68 @@
+error: this match could be written as a `let` statement
+ --> $DIR/match_single_binding2.rs:18:36
+ |
+LL | Some((iter, _item)) => match iter.size_hint() {
+ | ____________________________________^
+LL | | (min, max) => (min.saturating_add(1), max.and_then(|max| max.checked_add(1))),
+LL | | },
+ | |_____________^
+ |
+ = note: `-D clippy::match-single-binding` implied by `-D warnings`
+help: consider using a `let` statement
+ |
+LL ~ Some((iter, _item)) => {
+LL + let (min, max) = iter.size_hint();
+LL + (min.saturating_add(1), max.and_then(|max| max.checked_add(1)))
+LL ~ },
+ |
+
+error: this match could be written as a `let` statement
+ --> $DIR/match_single_binding2.rs:31:13
+ |
+LL | / match get_tup() {
+LL | | (a, b) => println!("a {:?} and b {:?}", a, b),
+LL | | }
+ | |_____________^
+ |
+help: consider using a `let` statement
+ |
+LL ~ let (a, b) = get_tup();
+LL + println!("a {:?} and b {:?}", a, b);
+ |
+
+error: this match could be replaced by its scrutinee and body
+ --> $DIR/match_single_binding2.rs:42:5
+ |
+LL | / match side_effects() {
+LL | | _ => println!("Side effects"),
+LL | | }
+ | |_____^
+ |
+help: consider using the scrutinee and body instead
+ |
+LL ~ side_effects();
+LL + println!("Side effects");
+ |
+
+error: this match could be replaced by its scrutinee and body
+ --> $DIR/match_single_binding2.rs:49:5
+ |
+LL | / match match x {
+LL | | 0 => 1,
+LL | | _ => 2,
+LL | | } {
+LL | | _ => println!("Single branch"),
+LL | | }
+ | |_____^
+ |
+help: consider using the scrutinee and body instead
+ |
+LL ~ match x {
+LL + 0 => 1,
+LL + _ => 2,
+LL + };
+LL + println!("Single branch");
+ |
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/match_str_case_mismatch.fixed b/src/tools/clippy/tests/ui/match_str_case_mismatch.fixed
new file mode 100644
index 000000000..e436bcf49
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_str_case_mismatch.fixed
@@ -0,0 +1,186 @@
+// run-rustfix
+#![warn(clippy::match_str_case_mismatch)]
+#![allow(dead_code)]
+
+// Valid
+
+fn as_str_match() {
+ let var = "BAR";
+
+ match var.to_ascii_lowercase().as_str() {
+ "foo" => {},
+ "bar" => {},
+ _ => {},
+ }
+}
+
+fn non_alphabetic() {
+ let var = "~!@#$%^&*()-_=+FOO";
+
+ match var.to_ascii_lowercase().as_str() {
+ "1234567890" => {},
+ "~!@#$%^&*()-_=+foo" => {},
+ "\n\r\t\x7F" => {},
+ _ => {},
+ }
+}
+
+fn unicode_cased() {
+ let var = "ВОДЫ";
+
+ match var.to_lowercase().as_str() {
+ "水" => {},
+ "νερό" => {},
+ "воды" => {},
+ "물" => {},
+ _ => {},
+ }
+}
+
+fn titlecase() {
+ let var = "BarDz";
+
+ match var.to_lowercase().as_str() {
+ "foolj" => {},
+ "bardz" => {},
+ _ => {},
+ }
+}
+
+fn no_case_equivalent() {
+ let var = "barʁ";
+
+ match var.to_uppercase().as_str() {
+ "FOOɕ" => {},
+ "BARʁ" => {},
+ _ => {},
+ }
+}
+
+fn addrof_unary_match() {
+ let var = "BAR";
+
+ match &*var.to_ascii_lowercase() {
+ "foo" => {},
+ "bar" => {},
+ _ => {},
+ }
+}
+
+fn alternating_chain() {
+ let var = "BAR";
+
+ match &*var
+ .to_ascii_lowercase()
+ .to_uppercase()
+ .to_lowercase()
+ .to_ascii_uppercase()
+ {
+ "FOO" => {},
+ "BAR" => {},
+ _ => {},
+ }
+}
+
+fn unrelated_method() {
+ struct Item {
+ a: String,
+ }
+
+ impl Item {
+ #[allow(clippy::wrong_self_convention)]
+ fn to_lowercase(self) -> String {
+ self.a
+ }
+ }
+
+ let item = Item { a: String::from("BAR") };
+
+ match &*item.to_lowercase() {
+ "FOO" => {},
+ "BAR" => {},
+ _ => {},
+ }
+}
+
+// Invalid
+
+fn as_str_match_mismatch() {
+ let var = "BAR";
+
+ match var.to_ascii_lowercase().as_str() {
+ "foo" => {},
+ "bar" => {},
+ _ => {},
+ }
+}
+
+fn non_alphabetic_mismatch() {
+ let var = "~!@#$%^&*()-_=+FOO";
+
+ match var.to_ascii_lowercase().as_str() {
+ "1234567890" => {},
+ "~!@#$%^&*()-_=+foo" => {},
+ "\n\r\t\x7F" => {},
+ _ => {},
+ }
+}
+
+fn unicode_cased_mismatch() {
+ let var = "ВОДЫ";
+
+ match var.to_lowercase().as_str() {
+ "水" => {},
+ "νερό" => {},
+ "воды" => {},
+ "물" => {},
+ _ => {},
+ }
+}
+
+fn titlecase_mismatch() {
+ let var = "BarDz";
+
+ match var.to_lowercase().as_str() {
+ "foolj" => {},
+ "bardz" => {},
+ _ => {},
+ }
+}
+
+fn no_case_equivalent_mismatch() {
+ let var = "barʁ";
+
+ match var.to_uppercase().as_str() {
+ "FOOɕ" => {},
+ "BARʁ" => {},
+ _ => {},
+ }
+}
+
+fn addrof_unary_match_mismatch() {
+ let var = "BAR";
+
+ match &*var.to_ascii_lowercase() {
+ "foo" => {},
+ "bar" => {},
+ _ => {},
+ }
+}
+
+fn alternating_chain_mismatch() {
+ let var = "BAR";
+
+ match &*var
+ .to_ascii_lowercase()
+ .to_uppercase()
+ .to_lowercase()
+ .to_ascii_uppercase()
+ {
+ "FOO" => {},
+ "BAR" => {},
+ _ => {},
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/match_str_case_mismatch.rs b/src/tools/clippy/tests/ui/match_str_case_mismatch.rs
new file mode 100644
index 000000000..92e2a000a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_str_case_mismatch.rs
@@ -0,0 +1,186 @@
+// run-rustfix
+#![warn(clippy::match_str_case_mismatch)]
+#![allow(dead_code)]
+
+// Valid
+
+fn as_str_match() {
+ let var = "BAR";
+
+ match var.to_ascii_lowercase().as_str() {
+ "foo" => {},
+ "bar" => {},
+ _ => {},
+ }
+}
+
+fn non_alphabetic() {
+ let var = "~!@#$%^&*()-_=+FOO";
+
+ match var.to_ascii_lowercase().as_str() {
+ "1234567890" => {},
+ "~!@#$%^&*()-_=+foo" => {},
+ "\n\r\t\x7F" => {},
+ _ => {},
+ }
+}
+
+fn unicode_cased() {
+ let var = "ВОДЫ";
+
+ match var.to_lowercase().as_str() {
+ "水" => {},
+ "νερό" => {},
+ "воды" => {},
+ "물" => {},
+ _ => {},
+ }
+}
+
+fn titlecase() {
+ let var = "BarDz";
+
+ match var.to_lowercase().as_str() {
+ "foolj" => {},
+ "bardz" => {},
+ _ => {},
+ }
+}
+
+fn no_case_equivalent() {
+ let var = "barʁ";
+
+ match var.to_uppercase().as_str() {
+ "FOOɕ" => {},
+ "BARʁ" => {},
+ _ => {},
+ }
+}
+
+fn addrof_unary_match() {
+ let var = "BAR";
+
+ match &*var.to_ascii_lowercase() {
+ "foo" => {},
+ "bar" => {},
+ _ => {},
+ }
+}
+
+fn alternating_chain() {
+ let var = "BAR";
+
+ match &*var
+ .to_ascii_lowercase()
+ .to_uppercase()
+ .to_lowercase()
+ .to_ascii_uppercase()
+ {
+ "FOO" => {},
+ "BAR" => {},
+ _ => {},
+ }
+}
+
+fn unrelated_method() {
+ struct Item {
+ a: String,
+ }
+
+ impl Item {
+ #[allow(clippy::wrong_self_convention)]
+ fn to_lowercase(self) -> String {
+ self.a
+ }
+ }
+
+ let item = Item { a: String::from("BAR") };
+
+ match &*item.to_lowercase() {
+ "FOO" => {},
+ "BAR" => {},
+ _ => {},
+ }
+}
+
+// Invalid
+
+fn as_str_match_mismatch() {
+ let var = "BAR";
+
+ match var.to_ascii_lowercase().as_str() {
+ "foo" => {},
+ "Bar" => {},
+ _ => {},
+ }
+}
+
+fn non_alphabetic_mismatch() {
+ let var = "~!@#$%^&*()-_=+FOO";
+
+ match var.to_ascii_lowercase().as_str() {
+ "1234567890" => {},
+ "~!@#$%^&*()-_=+Foo" => {},
+ "\n\r\t\x7F" => {},
+ _ => {},
+ }
+}
+
+fn unicode_cased_mismatch() {
+ let var = "ВОДЫ";
+
+ match var.to_lowercase().as_str() {
+ "水" => {},
+ "νερό" => {},
+ "Воды" => {},
+ "물" => {},
+ _ => {},
+ }
+}
+
+fn titlecase_mismatch() {
+ let var = "BarDz";
+
+ match var.to_lowercase().as_str() {
+ "foolj" => {},
+ "barDz" => {},
+ _ => {},
+ }
+}
+
+fn no_case_equivalent_mismatch() {
+ let var = "barʁ";
+
+ match var.to_uppercase().as_str() {
+ "FOOɕ" => {},
+ "bARʁ" => {},
+ _ => {},
+ }
+}
+
+fn addrof_unary_match_mismatch() {
+ let var = "BAR";
+
+ match &*var.to_ascii_lowercase() {
+ "foo" => {},
+ "Bar" => {},
+ _ => {},
+ }
+}
+
+fn alternating_chain_mismatch() {
+ let var = "BAR";
+
+ match &*var
+ .to_ascii_lowercase()
+ .to_uppercase()
+ .to_lowercase()
+ .to_ascii_uppercase()
+ {
+ "FOO" => {},
+ "bAR" => {},
+ _ => {},
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/match_str_case_mismatch.stderr b/src/tools/clippy/tests/ui/match_str_case_mismatch.stderr
new file mode 100644
index 000000000..197520a3d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_str_case_mismatch.stderr
@@ -0,0 +1,80 @@
+error: this `match` arm has a differing case than its expression
+ --> $DIR/match_str_case_mismatch.rs:113:9
+ |
+LL | "Bar" => {},
+ | ^^^^^
+ |
+ = note: `-D clippy::match-str-case-mismatch` implied by `-D warnings`
+help: consider changing the case of this arm to respect `to_ascii_lowercase`
+ |
+LL | "bar" => {},
+ | ~~~~~
+
+error: this `match` arm has a differing case than its expression
+ --> $DIR/match_str_case_mismatch.rs:123:9
+ |
+LL | "~!@#$%^&*()-_=+Foo" => {},
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+help: consider changing the case of this arm to respect `to_ascii_lowercase`
+ |
+LL | "~!@#$%^&*()-_=+foo" => {},
+ | ~~~~~~~~~~~~~~~~~~~~
+
+error: this `match` arm has a differing case than its expression
+ --> $DIR/match_str_case_mismatch.rs:135:9
+ |
+LL | "Воды" => {},
+ | ^^^^^^
+ |
+help: consider changing the case of this arm to respect `to_lowercase`
+ |
+LL | "воды" => {},
+ | ~~~~~~
+
+error: this `match` arm has a differing case than its expression
+ --> $DIR/match_str_case_mismatch.rs:146:9
+ |
+LL | "barDz" => {},
+ | ^^^^^^
+ |
+help: consider changing the case of this arm to respect `to_lowercase`
+ |
+LL | "bardz" => {},
+ | ~~~~~~
+
+error: this `match` arm has a differing case than its expression
+ --> $DIR/match_str_case_mismatch.rs:156:9
+ |
+LL | "bARʁ" => {},
+ | ^^^^^^
+ |
+help: consider changing the case of this arm to respect `to_uppercase`
+ |
+LL | "BARʁ" => {},
+ | ~~~~~~
+
+error: this `match` arm has a differing case than its expression
+ --> $DIR/match_str_case_mismatch.rs:166:9
+ |
+LL | "Bar" => {},
+ | ^^^^^
+ |
+help: consider changing the case of this arm to respect `to_ascii_lowercase`
+ |
+LL | "bar" => {},
+ | ~~~~~
+
+error: this `match` arm has a differing case than its expression
+ --> $DIR/match_str_case_mismatch.rs:181:9
+ |
+LL | "bAR" => {},
+ | ^^^^^
+ |
+help: consider changing the case of this arm to respect `to_ascii_uppercase`
+ |
+LL | "BAR" => {},
+ | ~~~~~
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/match_wild_err_arm.edition2018.stderr b/src/tools/clippy/tests/ui/match_wild_err_arm.edition2018.stderr
new file mode 100644
index 000000000..2a4012039
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_wild_err_arm.edition2018.stderr
@@ -0,0 +1,35 @@
+error: `Err(_)` matches all errors
+ --> $DIR/match_wild_err_arm.rs:14:9
+ |
+LL | Err(_) => panic!("err"),
+ | ^^^^^^
+ |
+ = note: `-D clippy::match-wild-err-arm` implied by `-D warnings`
+ = note: match each error separately or use the error output, or use `.except(msg)` if the error case is unreachable
+
+error: `Err(_)` matches all errors
+ --> $DIR/match_wild_err_arm.rs:20:9
+ |
+LL | Err(_) => panic!(),
+ | ^^^^^^
+ |
+ = note: match each error separately or use the error output, or use `.except(msg)` if the error case is unreachable
+
+error: `Err(_)` matches all errors
+ --> $DIR/match_wild_err_arm.rs:26:9
+ |
+LL | Err(_) => {
+ | ^^^^^^
+ |
+ = note: match each error separately or use the error output, or use `.except(msg)` if the error case is unreachable
+
+error: `Err(_e)` matches all errors
+ --> $DIR/match_wild_err_arm.rs:34:9
+ |
+LL | Err(_e) => panic!(),
+ | ^^^^^^^
+ |
+ = note: match each error separately or use the error output, or use `.except(msg)` if the error case is unreachable
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/match_wild_err_arm.edition2021.stderr b/src/tools/clippy/tests/ui/match_wild_err_arm.edition2021.stderr
new file mode 100644
index 000000000..2a4012039
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_wild_err_arm.edition2021.stderr
@@ -0,0 +1,35 @@
+error: `Err(_)` matches all errors
+ --> $DIR/match_wild_err_arm.rs:14:9
+ |
+LL | Err(_) => panic!("err"),
+ | ^^^^^^
+ |
+ = note: `-D clippy::match-wild-err-arm` implied by `-D warnings`
+ = note: match each error separately or use the error output, or use `.except(msg)` if the error case is unreachable
+
+error: `Err(_)` matches all errors
+ --> $DIR/match_wild_err_arm.rs:20:9
+ |
+LL | Err(_) => panic!(),
+ | ^^^^^^
+ |
+ = note: match each error separately or use the error output, or use `.except(msg)` if the error case is unreachable
+
+error: `Err(_)` matches all errors
+ --> $DIR/match_wild_err_arm.rs:26:9
+ |
+LL | Err(_) => {
+ | ^^^^^^
+ |
+ = note: match each error separately or use the error output, or use `.except(msg)` if the error case is unreachable
+
+error: `Err(_e)` matches all errors
+ --> $DIR/match_wild_err_arm.rs:34:9
+ |
+LL | Err(_e) => panic!(),
+ | ^^^^^^^
+ |
+ = note: match each error separately or use the error output, or use `.except(msg)` if the error case is unreachable
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/match_wild_err_arm.rs b/src/tools/clippy/tests/ui/match_wild_err_arm.rs
new file mode 100644
index 000000000..0a86144b9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_wild_err_arm.rs
@@ -0,0 +1,68 @@
+// revisions: edition2018 edition2021
+// [edition2018] edition:2018
+// [edition2021] edition:2021
+#![feature(exclusive_range_pattern)]
+#![allow(clippy::match_same_arms)]
+#![warn(clippy::match_wild_err_arm)]
+
+fn match_wild_err_arm() {
+ let x: Result<i32, &str> = Ok(3);
+
+ match x {
+ Ok(3) => println!("ok"),
+ Ok(_) => println!("ok"),
+ Err(_) => panic!("err"),
+ }
+
+ match x {
+ Ok(3) => println!("ok"),
+ Ok(_) => println!("ok"),
+ Err(_) => panic!(),
+ }
+
+ match x {
+ Ok(3) => println!("ok"),
+ Ok(_) => println!("ok"),
+ Err(_) => {
+ panic!();
+ },
+ }
+
+ match x {
+ Ok(3) => println!("ok"),
+ Ok(_) => println!("ok"),
+ Err(_e) => panic!(),
+ }
+
+ // Allowed when used in `panic!`.
+ match x {
+ Ok(3) => println!("ok"),
+ Ok(_) => println!("ok"),
+ Err(_e) => panic!("{}", _e),
+ }
+
+ // Allowed when not with `panic!` block.
+ match x {
+ Ok(3) => println!("ok"),
+ Ok(_) => println!("ok"),
+ Err(_) => println!("err"),
+ }
+
+ // Allowed when used with `unreachable!`.
+ match x {
+ Ok(3) => println!("ok"),
+ Ok(_) => println!("ok"),
+ Err(_) => unreachable!(),
+ }
+
+ // Allowed when used with `unreachable!`.
+ match x {
+ Ok(3) => println!("ok"),
+ Ok(_) => println!("ok"),
+ Err(_) => {
+ unreachable!();
+ },
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/match_wildcard_for_single_variants.fixed b/src/tools/clippy/tests/ui/match_wildcard_for_single_variants.fixed
new file mode 100644
index 000000000..e675c183e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_wildcard_for_single_variants.fixed
@@ -0,0 +1,134 @@
+// run-rustfix
+
+#![warn(clippy::match_wildcard_for_single_variants)]
+#![allow(dead_code)]
+
+enum Foo {
+ A,
+ B,
+ C,
+}
+
+enum Color {
+ Red,
+ Green,
+ Blue,
+ Rgb(u8, u8, u8),
+}
+impl Color {
+ fn f(self) {
+ match self {
+ Self::Red => (),
+ Self::Green => (),
+ Self::Blue => (),
+ Self::Rgb(..) => (),
+ };
+ }
+}
+
+fn main() {
+ let f = Foo::A;
+ match f {
+ Foo::A => {},
+ Foo::B => {},
+ Foo::C => {},
+ }
+
+ let color = Color::Red;
+
+ // check exhaustive bindings
+ match color {
+ Color::Red => {},
+ Color::Green => {},
+ Color::Rgb(_r, _g, _b) => {},
+ Color::Blue => {},
+ }
+
+ // check exhaustive wild
+ match color {
+ Color::Red => {},
+ Color::Green => {},
+ Color::Rgb(..) => {},
+ Color::Blue => {},
+ }
+ match color {
+ Color::Red => {},
+ Color::Green => {},
+ Color::Rgb(_, _, _) => {},
+ Color::Blue => {},
+ }
+
+ // shouldn't lint as there is one missing variant
+ // and one that isn't exhaustively covered
+ match color {
+ Color::Red => {},
+ Color::Green => {},
+ Color::Rgb(255, _, _) => {},
+ _ => {},
+ }
+
+ // References shouldn't change anything
+ match &color {
+ &Color::Red => (),
+ Color::Green => (),
+ &Color::Rgb(..) => (),
+ Color::Blue => (),
+ }
+
+ use self::Color as C;
+
+ match color {
+ C::Red => (),
+ C::Green => (),
+ C::Rgb(..) => (),
+ C::Blue => (),
+ }
+
+ match color {
+ C::Red => (),
+ Color::Green => (),
+ Color::Rgb(..) => (),
+ Color::Blue => (),
+ }
+
+ match Some(0) {
+ Some(0) => 0,
+ Some(_) => 1,
+ _ => 2,
+ };
+
+ #[non_exhaustive]
+ enum Bar {
+ A,
+ B,
+ C,
+ }
+ match Bar::A {
+ Bar::A => (),
+ Bar::B => (),
+ _ => (),
+ };
+
+ //#6984
+ {
+ #![allow(clippy::manual_non_exhaustive)]
+ pub enum Enum {
+ A,
+ B,
+ C,
+ #[doc(hidden)]
+ __Private,
+ }
+ match Enum::A {
+ Enum::A => (),
+ Enum::B => (),
+ Enum::C => (),
+ _ => (),
+ }
+ match Enum::A {
+ Enum::A => (),
+ Enum::B => (),
+ _ => (),
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/match_wildcard_for_single_variants.rs b/src/tools/clippy/tests/ui/match_wildcard_for_single_variants.rs
new file mode 100644
index 000000000..38c3ffc00
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_wildcard_for_single_variants.rs
@@ -0,0 +1,134 @@
+// run-rustfix
+
+#![warn(clippy::match_wildcard_for_single_variants)]
+#![allow(dead_code)]
+
+enum Foo {
+ A,
+ B,
+ C,
+}
+
+enum Color {
+ Red,
+ Green,
+ Blue,
+ Rgb(u8, u8, u8),
+}
+impl Color {
+ fn f(self) {
+ match self {
+ Self::Red => (),
+ Self::Green => (),
+ Self::Blue => (),
+ _ => (),
+ };
+ }
+}
+
+fn main() {
+ let f = Foo::A;
+ match f {
+ Foo::A => {},
+ Foo::B => {},
+ _ => {},
+ }
+
+ let color = Color::Red;
+
+ // check exhaustive bindings
+ match color {
+ Color::Red => {},
+ Color::Green => {},
+ Color::Rgb(_r, _g, _b) => {},
+ _ => {},
+ }
+
+ // check exhaustive wild
+ match color {
+ Color::Red => {},
+ Color::Green => {},
+ Color::Rgb(..) => {},
+ _ => {},
+ }
+ match color {
+ Color::Red => {},
+ Color::Green => {},
+ Color::Rgb(_, _, _) => {},
+ _ => {},
+ }
+
+ // shouldn't lint as there is one missing variant
+ // and one that isn't exhaustively covered
+ match color {
+ Color::Red => {},
+ Color::Green => {},
+ Color::Rgb(255, _, _) => {},
+ _ => {},
+ }
+
+ // References shouldn't change anything
+ match &color {
+ &Color::Red => (),
+ Color::Green => (),
+ &Color::Rgb(..) => (),
+ &_ => (),
+ }
+
+ use self::Color as C;
+
+ match color {
+ C::Red => (),
+ C::Green => (),
+ C::Rgb(..) => (),
+ _ => (),
+ }
+
+ match color {
+ C::Red => (),
+ Color::Green => (),
+ Color::Rgb(..) => (),
+ _ => (),
+ }
+
+ match Some(0) {
+ Some(0) => 0,
+ Some(_) => 1,
+ _ => 2,
+ };
+
+ #[non_exhaustive]
+ enum Bar {
+ A,
+ B,
+ C,
+ }
+ match Bar::A {
+ Bar::A => (),
+ Bar::B => (),
+ _ => (),
+ };
+
+ //#6984
+ {
+ #![allow(clippy::manual_non_exhaustive)]
+ pub enum Enum {
+ A,
+ B,
+ C,
+ #[doc(hidden)]
+ __Private,
+ }
+ match Enum::A {
+ Enum::A => (),
+ Enum::B => (),
+ Enum::C => (),
+ _ => (),
+ }
+ match Enum::A {
+ Enum::A => (),
+ Enum::B => (),
+ _ => (),
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/match_wildcard_for_single_variants.stderr b/src/tools/clippy/tests/ui/match_wildcard_for_single_variants.stderr
new file mode 100644
index 000000000..34538dea8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/match_wildcard_for_single_variants.stderr
@@ -0,0 +1,52 @@
+error: wildcard matches only a single variant and will also match any future added variants
+ --> $DIR/match_wildcard_for_single_variants.rs:24:13
+ |
+LL | _ => (),
+ | ^ help: try this: `Self::Rgb(..)`
+ |
+ = note: `-D clippy::match-wildcard-for-single-variants` implied by `-D warnings`
+
+error: wildcard matches only a single variant and will also match any future added variants
+ --> $DIR/match_wildcard_for_single_variants.rs:34:9
+ |
+LL | _ => {},
+ | ^ help: try this: `Foo::C`
+
+error: wildcard matches only a single variant and will also match any future added variants
+ --> $DIR/match_wildcard_for_single_variants.rs:44:9
+ |
+LL | _ => {},
+ | ^ help: try this: `Color::Blue`
+
+error: wildcard matches only a single variant and will also match any future added variants
+ --> $DIR/match_wildcard_for_single_variants.rs:52:9
+ |
+LL | _ => {},
+ | ^ help: try this: `Color::Blue`
+
+error: wildcard matches only a single variant and will also match any future added variants
+ --> $DIR/match_wildcard_for_single_variants.rs:58:9
+ |
+LL | _ => {},
+ | ^ help: try this: `Color::Blue`
+
+error: wildcard matches only a single variant and will also match any future added variants
+ --> $DIR/match_wildcard_for_single_variants.rs:75:9
+ |
+LL | &_ => (),
+ | ^^ help: try this: `Color::Blue`
+
+error: wildcard matches only a single variant and will also match any future added variants
+ --> $DIR/match_wildcard_for_single_variants.rs:84:9
+ |
+LL | _ => (),
+ | ^ help: try this: `C::Blue`
+
+error: wildcard matches only a single variant and will also match any future added variants
+ --> $DIR/match_wildcard_for_single_variants.rs:91:9
+ |
+LL | _ => (),
+ | ^ help: try this: `Color::Blue`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/mem_forget.rs b/src/tools/clippy/tests/ui/mem_forget.rs
new file mode 100644
index 000000000..e5b35c098
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mem_forget.rs
@@ -0,0 +1,23 @@
+use std::rc::Rc;
+use std::sync::Arc;
+
+use std::mem as memstuff;
+use std::mem::forget as forgetSomething;
+
+#[warn(clippy::mem_forget)]
+#[allow(clippy::forget_copy)]
+fn main() {
+ let five: i32 = 5;
+ forgetSomething(five);
+
+ let six: Arc<i32> = Arc::new(6);
+ memstuff::forget(six);
+
+ let seven: Rc<i32> = Rc::new(7);
+ std::mem::forget(seven);
+
+ let eight: Vec<i32> = vec![8];
+ forgetSomething(eight);
+
+ std::mem::forget(7);
+}
diff --git a/src/tools/clippy/tests/ui/mem_forget.stderr b/src/tools/clippy/tests/ui/mem_forget.stderr
new file mode 100644
index 000000000..a90d8b165
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mem_forget.stderr
@@ -0,0 +1,22 @@
+error: usage of `mem::forget` on `Drop` type
+ --> $DIR/mem_forget.rs:14:5
+ |
+LL | memstuff::forget(six);
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::mem-forget` implied by `-D warnings`
+
+error: usage of `mem::forget` on `Drop` type
+ --> $DIR/mem_forget.rs:17:5
+ |
+LL | std::mem::forget(seven);
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+
+error: usage of `mem::forget` on `Drop` type
+ --> $DIR/mem_forget.rs:20:5
+ |
+LL | forgetSomething(eight);
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/mem_replace.fixed b/src/tools/clippy/tests/ui/mem_replace.fixed
new file mode 100644
index 000000000..b609ba659
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mem_replace.fixed
@@ -0,0 +1,79 @@
+// run-rustfix
+#![allow(unused_imports)]
+#![warn(
+ clippy::all,
+ clippy::style,
+ clippy::mem_replace_option_with_none,
+ clippy::mem_replace_with_default
+)]
+
+use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, LinkedList, VecDeque};
+use std::mem;
+
+fn replace_option_with_none() {
+ let mut an_option = Some(1);
+ let _ = an_option.take();
+ let an_option = &mut Some(1);
+ let _ = an_option.take();
+}
+
+fn replace_with_default() {
+ let mut s = String::from("foo");
+ let _ = std::mem::take(&mut s);
+
+ let s = &mut String::from("foo");
+ let _ = std::mem::take(s);
+ let _ = std::mem::take(s);
+
+ let mut v = vec![123];
+ let _ = std::mem::take(&mut v);
+ let _ = std::mem::take(&mut v);
+ let _ = std::mem::take(&mut v);
+ let _ = std::mem::take(&mut v);
+
+ let mut hash_map: HashMap<i32, i32> = HashMap::new();
+ let _ = std::mem::take(&mut hash_map);
+
+ let mut btree_map: BTreeMap<i32, i32> = BTreeMap::new();
+ let _ = std::mem::take(&mut btree_map);
+
+ let mut vd: VecDeque<i32> = VecDeque::new();
+ let _ = std::mem::take(&mut vd);
+
+ let mut hash_set: HashSet<&str> = HashSet::new();
+ let _ = std::mem::take(&mut hash_set);
+
+ let mut btree_set: BTreeSet<&str> = BTreeSet::new();
+ let _ = std::mem::take(&mut btree_set);
+
+ let mut list: LinkedList<i32> = LinkedList::new();
+ let _ = std::mem::take(&mut list);
+
+ let mut binary_heap: BinaryHeap<i32> = BinaryHeap::new();
+ let _ = std::mem::take(&mut binary_heap);
+
+ let mut tuple = (vec![1, 2], BinaryHeap::<i32>::new());
+ let _ = std::mem::take(&mut tuple);
+
+ let mut refstr = "hello";
+ let _ = std::mem::take(&mut refstr);
+
+ let mut slice: &[i32] = &[1, 2, 3];
+ let _ = std::mem::take(&mut slice);
+}
+
+// lint is disabled for primitives because in this case `take`
+// has no clear benefit over `replace` and sometimes is harder to read
+fn dont_lint_primitive() {
+ let mut pbool = true;
+ let _ = std::mem::replace(&mut pbool, false);
+
+ let mut pint = 5;
+ let _ = std::mem::replace(&mut pint, 0);
+}
+
+fn main() {
+ replace_option_with_none();
+ replace_with_default();
+ dont_lint_primitive();
+}
diff --git a/src/tools/clippy/tests/ui/mem_replace.rs b/src/tools/clippy/tests/ui/mem_replace.rs
new file mode 100644
index 000000000..93f6dcdec
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mem_replace.rs
@@ -0,0 +1,79 @@
+// run-rustfix
+#![allow(unused_imports)]
+#![warn(
+ clippy::all,
+ clippy::style,
+ clippy::mem_replace_option_with_none,
+ clippy::mem_replace_with_default
+)]
+
+use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, LinkedList, VecDeque};
+use std::mem;
+
+fn replace_option_with_none() {
+ let mut an_option = Some(1);
+ let _ = mem::replace(&mut an_option, None);
+ let an_option = &mut Some(1);
+ let _ = mem::replace(an_option, None);
+}
+
+fn replace_with_default() {
+ let mut s = String::from("foo");
+ let _ = std::mem::replace(&mut s, String::default());
+
+ let s = &mut String::from("foo");
+ let _ = std::mem::replace(s, String::default());
+ let _ = std::mem::replace(s, Default::default());
+
+ let mut v = vec![123];
+ let _ = std::mem::replace(&mut v, Vec::default());
+ let _ = std::mem::replace(&mut v, Default::default());
+ let _ = std::mem::replace(&mut v, Vec::new());
+ let _ = std::mem::replace(&mut v, vec![]);
+
+ let mut hash_map: HashMap<i32, i32> = HashMap::new();
+ let _ = std::mem::replace(&mut hash_map, HashMap::new());
+
+ let mut btree_map: BTreeMap<i32, i32> = BTreeMap::new();
+ let _ = std::mem::replace(&mut btree_map, BTreeMap::new());
+
+ let mut vd: VecDeque<i32> = VecDeque::new();
+ let _ = std::mem::replace(&mut vd, VecDeque::new());
+
+ let mut hash_set: HashSet<&str> = HashSet::new();
+ let _ = std::mem::replace(&mut hash_set, HashSet::new());
+
+ let mut btree_set: BTreeSet<&str> = BTreeSet::new();
+ let _ = std::mem::replace(&mut btree_set, BTreeSet::new());
+
+ let mut list: LinkedList<i32> = LinkedList::new();
+ let _ = std::mem::replace(&mut list, LinkedList::new());
+
+ let mut binary_heap: BinaryHeap<i32> = BinaryHeap::new();
+ let _ = std::mem::replace(&mut binary_heap, BinaryHeap::new());
+
+ let mut tuple = (vec![1, 2], BinaryHeap::<i32>::new());
+ let _ = std::mem::replace(&mut tuple, (vec![], BinaryHeap::new()));
+
+ let mut refstr = "hello";
+ let _ = std::mem::replace(&mut refstr, "");
+
+ let mut slice: &[i32] = &[1, 2, 3];
+ let _ = std::mem::replace(&mut slice, &[]);
+}
+
+// lint is disabled for primitives because in this case `take`
+// has no clear benefit over `replace` and sometimes is harder to read
+fn dont_lint_primitive() {
+ let mut pbool = true;
+ let _ = std::mem::replace(&mut pbool, false);
+
+ let mut pint = 5;
+ let _ = std::mem::replace(&mut pint, 0);
+}
+
+fn main() {
+ replace_option_with_none();
+ replace_with_default();
+ dont_lint_primitive();
+}
diff --git a/src/tools/clippy/tests/ui/mem_replace.stderr b/src/tools/clippy/tests/ui/mem_replace.stderr
new file mode 100644
index 000000000..90dc6c95f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mem_replace.stderr
@@ -0,0 +1,120 @@
+error: replacing an `Option` with `None`
+ --> $DIR/mem_replace.rs:15:13
+ |
+LL | let _ = mem::replace(&mut an_option, None);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider `Option::take()` instead: `an_option.take()`
+ |
+ = note: `-D clippy::mem-replace-option-with-none` implied by `-D warnings`
+
+error: replacing an `Option` with `None`
+ --> $DIR/mem_replace.rs:17:13
+ |
+LL | let _ = mem::replace(an_option, None);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider `Option::take()` instead: `an_option.take()`
+
+error: replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`
+ --> $DIR/mem_replace.rs:22:13
+ |
+LL | let _ = std::mem::replace(&mut s, String::default());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::mem::take(&mut s)`
+ |
+ = note: `-D clippy::mem-replace-with-default` implied by `-D warnings`
+
+error: replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`
+ --> $DIR/mem_replace.rs:25:13
+ |
+LL | let _ = std::mem::replace(s, String::default());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::mem::take(s)`
+
+error: replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`
+ --> $DIR/mem_replace.rs:26:13
+ |
+LL | let _ = std::mem::replace(s, Default::default());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::mem::take(s)`
+
+error: replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`
+ --> $DIR/mem_replace.rs:29:13
+ |
+LL | let _ = std::mem::replace(&mut v, Vec::default());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::mem::take(&mut v)`
+
+error: replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`
+ --> $DIR/mem_replace.rs:30:13
+ |
+LL | let _ = std::mem::replace(&mut v, Default::default());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::mem::take(&mut v)`
+
+error: replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`
+ --> $DIR/mem_replace.rs:31:13
+ |
+LL | let _ = std::mem::replace(&mut v, Vec::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::mem::take(&mut v)`
+
+error: replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`
+ --> $DIR/mem_replace.rs:32:13
+ |
+LL | let _ = std::mem::replace(&mut v, vec![]);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::mem::take(&mut v)`
+
+error: replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`
+ --> $DIR/mem_replace.rs:35:13
+ |
+LL | let _ = std::mem::replace(&mut hash_map, HashMap::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::mem::take(&mut hash_map)`
+
+error: replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`
+ --> $DIR/mem_replace.rs:38:13
+ |
+LL | let _ = std::mem::replace(&mut btree_map, BTreeMap::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::mem::take(&mut btree_map)`
+
+error: replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`
+ --> $DIR/mem_replace.rs:41:13
+ |
+LL | let _ = std::mem::replace(&mut vd, VecDeque::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::mem::take(&mut vd)`
+
+error: replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`
+ --> $DIR/mem_replace.rs:44:13
+ |
+LL | let _ = std::mem::replace(&mut hash_set, HashSet::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::mem::take(&mut hash_set)`
+
+error: replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`
+ --> $DIR/mem_replace.rs:47:13
+ |
+LL | let _ = std::mem::replace(&mut btree_set, BTreeSet::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::mem::take(&mut btree_set)`
+
+error: replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`
+ --> $DIR/mem_replace.rs:50:13
+ |
+LL | let _ = std::mem::replace(&mut list, LinkedList::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::mem::take(&mut list)`
+
+error: replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`
+ --> $DIR/mem_replace.rs:53:13
+ |
+LL | let _ = std::mem::replace(&mut binary_heap, BinaryHeap::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::mem::take(&mut binary_heap)`
+
+error: replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`
+ --> $DIR/mem_replace.rs:56:13
+ |
+LL | let _ = std::mem::replace(&mut tuple, (vec![], BinaryHeap::new()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::mem::take(&mut tuple)`
+
+error: replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`
+ --> $DIR/mem_replace.rs:59:13
+ |
+LL | let _ = std::mem::replace(&mut refstr, "");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::mem::take(&mut refstr)`
+
+error: replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`
+ --> $DIR/mem_replace.rs:62:13
+ |
+LL | let _ = std::mem::replace(&mut slice, &[]);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::mem::take(&mut slice)`
+
+error: aborting due to 19 previous errors
+
diff --git a/src/tools/clippy/tests/ui/mem_replace_macro.rs b/src/tools/clippy/tests/ui/mem_replace_macro.rs
new file mode 100644
index 000000000..0c09344b8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mem_replace_macro.rs
@@ -0,0 +1,21 @@
+// aux-build:macro_rules.rs
+#![warn(clippy::mem_replace_with_default)]
+
+#[macro_use]
+extern crate macro_rules;
+
+macro_rules! take {
+ ($s:expr) => {
+ std::mem::replace($s, Default::default())
+ };
+}
+
+fn replace_with_default() {
+ let s = &mut String::from("foo");
+ take!(s);
+ take_external!(s);
+}
+
+fn main() {
+ replace_with_default();
+}
diff --git a/src/tools/clippy/tests/ui/mem_replace_macro.stderr b/src/tools/clippy/tests/ui/mem_replace_macro.stderr
new file mode 100644
index 000000000..dd69ab8b5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mem_replace_macro.stderr
@@ -0,0 +1,14 @@
+error: replacing a value of type `T` with `T::default()` is better expressed using `std::mem::take`
+ --> $DIR/mem_replace_macro.rs:9:9
+ |
+LL | std::mem::replace($s, Default::default())
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | take!(s);
+ | -------- in this macro invocation
+ |
+ = note: `-D clippy::mem-replace-with-default` implied by `-D warnings`
+ = note: this error originates in the macro `take` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/methods.rs b/src/tools/clippy/tests/ui/methods.rs
new file mode 100644
index 000000000..1970c2eae
--- /dev/null
+++ b/src/tools/clippy/tests/ui/methods.rs
@@ -0,0 +1,140 @@
+// aux-build:option_helpers.rs
+
+#![warn(clippy::all, clippy::pedantic)]
+#![allow(
+ clippy::blacklisted_name,
+ clippy::default_trait_access,
+ clippy::missing_docs_in_private_items,
+ clippy::missing_safety_doc,
+ clippy::non_ascii_literal,
+ clippy::new_without_default,
+ clippy::needless_pass_by_value,
+ clippy::needless_lifetimes,
+ clippy::print_stdout,
+ clippy::must_use_candidate,
+ clippy::use_self,
+ clippy::useless_format,
+ clippy::wrong_self_convention,
+ clippy::unused_async,
+ clippy::unused_self,
+ unused
+)]
+
+#[macro_use]
+extern crate option_helpers;
+
+use std::collections::BTreeMap;
+use std::collections::HashMap;
+use std::collections::HashSet;
+use std::collections::VecDeque;
+use std::ops::Mul;
+use std::rc::{self, Rc};
+use std::sync::{self, Arc};
+
+use option_helpers::{IteratorFalsePositives, IteratorMethodFalsePositives};
+
+struct Lt<'a> {
+ foo: &'a u32,
+}
+
+impl<'a> Lt<'a> {
+ // The lifetime is different, but that’s irrelevant; see issue #734.
+ #[allow(clippy::needless_lifetimes)]
+ pub fn new<'b>(s: &'b str) -> Lt<'b> {
+ unimplemented!()
+ }
+}
+
+struct Lt2<'a> {
+ foo: &'a u32,
+}
+
+impl<'a> Lt2<'a> {
+ // The lifetime is different, but that’s irrelevant; see issue #734.
+ pub fn new(s: &str) -> Lt2 {
+ unimplemented!()
+ }
+}
+
+struct Lt3<'a> {
+ foo: &'a u32,
+}
+
+impl<'a> Lt3<'a> {
+ // The lifetime is different, but that’s irrelevant; see issue #734.
+ pub fn new() -> Lt3<'static> {
+ unimplemented!()
+ }
+}
+
+#[derive(Clone, Copy)]
+struct U;
+
+impl U {
+ fn new() -> Self {
+ U
+ }
+ // Ok because `U` is `Copy`.
+ fn to_something(self) -> u32 {
+ 0
+ }
+}
+
+struct V<T> {
+ _dummy: T,
+}
+
+impl<T> V<T> {
+ fn new() -> Option<V<T>> {
+ None
+ }
+}
+
+struct AsyncNew;
+
+impl AsyncNew {
+ async fn new() -> Option<Self> {
+ None
+ }
+}
+
+struct BadNew;
+
+impl BadNew {
+ fn new() -> i32 {
+ 0
+ }
+}
+
+struct T;
+
+impl Mul<T> for T {
+ type Output = T;
+ // No error, obviously.
+ fn mul(self, other: T) -> T {
+ self
+ }
+}
+
+/// Checks implementation of `FILTER_NEXT` lint.
+#[rustfmt::skip]
+fn filter_next() {
+ let v = vec![3, 2, 1, 0, -1, -2, -3];
+
+ // Multi-line case.
+ let _ = v.iter().filter(|&x| {
+ *x < 0
+ }
+ ).next();
+
+ // Check that we don't lint if the caller is not an `Iterator`.
+ let foo = IteratorFalsePositives { foo: 0 };
+ let _ = foo.filter().next();
+
+ let foo = IteratorMethodFalsePositives {};
+ let _ = foo.filter(42).next();
+}
+
+fn main() {
+ filter_next();
+}
diff --git a/src/tools/clippy/tests/ui/methods.stderr b/src/tools/clippy/tests/ui/methods.stderr
new file mode 100644
index 000000000..b63672dd6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/methods.stderr
@@ -0,0 +1,24 @@
+error: methods called `new` usually return `Self`
+ --> $DIR/methods.rs:104:5
+ |
+LL | / fn new() -> i32 {
+LL | | 0
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::new-ret-no-self` implied by `-D warnings`
+
+error: called `filter(..).next()` on an `Iterator`. This is more succinctly expressed by calling `.find(..)` instead
+ --> $DIR/methods.rs:125:13
+ |
+LL | let _ = v.iter().filter(|&x| {
+ | _____________^
+LL | | *x < 0
+LL | | }
+LL | | ).next();
+ | |___________________________^
+ |
+ = note: `-D clippy::filter-next` implied by `-D warnings`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/methods_fixable.fixed b/src/tools/clippy/tests/ui/methods_fixable.fixed
new file mode 100644
index 000000000..ee7c1b0da
--- /dev/null
+++ b/src/tools/clippy/tests/ui/methods_fixable.fixed
@@ -0,0 +1,11 @@
+// run-rustfix
+
+#![warn(clippy::filter_next)]
+
+/// Checks implementation of `FILTER_NEXT` lint.
+fn main() {
+ let v = vec![3, 2, 1, 0, -1, -2, -3];
+
+ // Single-line case.
+ let _ = v.iter().find(|&x| *x < 0);
+}
diff --git a/src/tools/clippy/tests/ui/methods_fixable.rs b/src/tools/clippy/tests/ui/methods_fixable.rs
new file mode 100644
index 000000000..6d0f1b7bd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/methods_fixable.rs
@@ -0,0 +1,11 @@
+// run-rustfix
+
+#![warn(clippy::filter_next)]
+
+/// Checks implementation of `FILTER_NEXT` lint.
+fn main() {
+ let v = vec![3, 2, 1, 0, -1, -2, -3];
+
+ // Single-line case.
+ let _ = v.iter().filter(|&x| *x < 0).next();
+}
diff --git a/src/tools/clippy/tests/ui/methods_fixable.stderr b/src/tools/clippy/tests/ui/methods_fixable.stderr
new file mode 100644
index 000000000..852f48e32
--- /dev/null
+++ b/src/tools/clippy/tests/ui/methods_fixable.stderr
@@ -0,0 +1,10 @@
+error: called `filter(..).next()` on an `Iterator`. This is more succinctly expressed by calling `.find(..)` instead
+ --> $DIR/methods_fixable.rs:10:13
+ |
+LL | let _ = v.iter().filter(|&x| *x < 0).next();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `v.iter().find(|&x| *x < 0)`
+ |
+ = note: `-D clippy::filter-next` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/min_max.rs b/src/tools/clippy/tests/ui/min_max.rs
new file mode 100644
index 000000000..b2bc97f47
--- /dev/null
+++ b/src/tools/clippy/tests/ui/min_max.rs
@@ -0,0 +1,62 @@
+#![warn(clippy::all)]
+
+use std::cmp::max as my_max;
+use std::cmp::min as my_min;
+use std::cmp::{max, min};
+
+const LARGE: usize = 3;
+
+struct NotOrd(u64);
+
+impl NotOrd {
+ fn min(self, x: u64) -> NotOrd {
+ NotOrd(x)
+ }
+
+ fn max(self, x: u64) -> NotOrd {
+ NotOrd(x)
+ }
+}
+
+fn main() {
+ let x = 2usize;
+ min(1, max(3, x));
+ min(max(3, x), 1);
+ max(min(x, 1), 3);
+ max(3, min(x, 1));
+
+ my_max(3, my_min(x, 1));
+
+ min(3, max(1, x)); // ok, could be 1, 2 or 3 depending on x
+
+ min(1, max(LARGE, x)); // no error, we don't lookup consts here
+
+ let y = 2isize;
+ min(max(y, -1), 3);
+
+ let s = "Hello";
+ min("Apple", max("Zoo", s));
+ max(min(s, "Apple"), "Zoo");
+
+ max("Apple", min(s, "Zoo")); // ok
+
+ let f = 3f32;
+ x.min(1).max(3);
+ x.max(3).min(1);
+ f.max(3f32).min(1f32);
+
+ x.max(1).min(3); // ok
+ x.min(3).max(1); // ok
+ f.min(3f32).max(1f32); // ok
+
+ max(x.min(1), 3);
+ min(x.max(1), 3); // ok
+
+ s.max("Zoo").min("Apple");
+ s.min("Apple").max("Zoo");
+
+ s.min("Zoo").max("Apple"); // ok
+
+ let not_ord = NotOrd(1);
+ not_ord.min(1).max(3); // ok
+}
diff --git a/src/tools/clippy/tests/ui/min_max.stderr b/src/tools/clippy/tests/ui/min_max.stderr
new file mode 100644
index 000000000..c70b77eab
--- /dev/null
+++ b/src/tools/clippy/tests/ui/min_max.stderr
@@ -0,0 +1,82 @@
+error: this `min`/`max` combination leads to constant result
+ --> $DIR/min_max.rs:23:5
+ |
+LL | min(1, max(3, x));
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::min-max` implied by `-D warnings`
+
+error: this `min`/`max` combination leads to constant result
+ --> $DIR/min_max.rs:24:5
+ |
+LL | min(max(3, x), 1);
+ | ^^^^^^^^^^^^^^^^^
+
+error: this `min`/`max` combination leads to constant result
+ --> $DIR/min_max.rs:25:5
+ |
+LL | max(min(x, 1), 3);
+ | ^^^^^^^^^^^^^^^^^
+
+error: this `min`/`max` combination leads to constant result
+ --> $DIR/min_max.rs:26:5
+ |
+LL | max(3, min(x, 1));
+ | ^^^^^^^^^^^^^^^^^
+
+error: this `min`/`max` combination leads to constant result
+ --> $DIR/min_max.rs:28:5
+ |
+LL | my_max(3, my_min(x, 1));
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this `min`/`max` combination leads to constant result
+ --> $DIR/min_max.rs:38:5
+ |
+LL | min("Apple", max("Zoo", s));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this `min`/`max` combination leads to constant result
+ --> $DIR/min_max.rs:39:5
+ |
+LL | max(min(s, "Apple"), "Zoo");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this `min`/`max` combination leads to constant result
+ --> $DIR/min_max.rs:44:5
+ |
+LL | x.min(1).max(3);
+ | ^^^^^^^^^^^^^^^
+
+error: this `min`/`max` combination leads to constant result
+ --> $DIR/min_max.rs:45:5
+ |
+LL | x.max(3).min(1);
+ | ^^^^^^^^^^^^^^^
+
+error: this `min`/`max` combination leads to constant result
+ --> $DIR/min_max.rs:46:5
+ |
+LL | f.max(3f32).min(1f32);
+ | ^^^^^^^^^^^^^^^^^^^^^
+
+error: this `min`/`max` combination leads to constant result
+ --> $DIR/min_max.rs:52:5
+ |
+LL | max(x.min(1), 3);
+ | ^^^^^^^^^^^^^^^^
+
+error: this `min`/`max` combination leads to constant result
+ --> $DIR/min_max.rs:55:5
+ |
+LL | s.max("Zoo").min("Apple");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this `min`/`max` combination leads to constant result
+ --> $DIR/min_max.rs:56:5
+ |
+LL | s.min("Apple").max("Zoo");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 13 previous errors
+
diff --git a/src/tools/clippy/tests/ui/min_rust_version_attr.rs b/src/tools/clippy/tests/ui/min_rust_version_attr.rs
new file mode 100644
index 000000000..44e407bd1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/min_rust_version_attr.rs
@@ -0,0 +1,228 @@
+#![allow(clippy::redundant_clone)]
+#![feature(custom_inner_attributes)]
+#![clippy::msrv = "1.0.0"]
+
+use std::ops::{Deref, RangeFrom};
+
+fn approx_const() {
+ let log2_10 = 3.321928094887362;
+ let log10_2 = 0.301029995663981;
+}
+
+fn cloned_instead_of_copied() {
+ let _ = [1].iter().cloned();
+}
+
+fn option_as_ref_deref() {
+ let mut opt = Some(String::from("123"));
+
+ let _ = opt.as_ref().map(String::as_str);
+ let _ = opt.as_ref().map(|x| x.as_str());
+ let _ = opt.as_mut().map(String::as_mut_str);
+ let _ = opt.as_mut().map(|x| x.as_mut_str());
+}
+
+fn match_like_matches() {
+ let _y = match Some(5) {
+ Some(0) => true,
+ _ => false,
+ };
+}
+
+fn match_same_arms() {
+ match (1, 2, 3) {
+ (1, .., 3) => 42,
+ (.., 3) => 42, //~ ERROR match arms have same body
+ _ => 0,
+ };
+}
+
+fn match_same_arms2() {
+ let _ = match Some(42) {
+ Some(_) => 24,
+ None => 24, //~ ERROR match arms have same body
+ };
+}
+
+pub fn manual_strip_msrv() {
+ let s = "hello, world!";
+ if s.starts_with("hello, ") {
+ assert_eq!(s["hello, ".len()..].to_uppercase(), "WORLD!");
+ }
+}
+
+pub fn redundant_fieldnames() {
+ let start = 0;
+ let _ = RangeFrom { start: start };
+}
+
+pub fn redundant_static_lifetime() {
+ const VAR_ONE: &'static str = "Test constant #1";
+}
+
+pub fn checked_conversion() {
+ let value: i64 = 42;
+ let _ = value <= (u32::max_value() as i64) && value >= 0;
+ let _ = value <= (u32::MAX as i64) && value >= 0;
+}
+
+pub struct FromOverInto(String);
+
+impl Into<FromOverInto> for String {
+ fn into(self) -> FromOverInto {
+ FromOverInto(self)
+ }
+}
+
+pub fn filter_map_next() {
+ let a = ["1", "lol", "3", "NaN", "5"];
+
+ #[rustfmt::skip]
+ let _: Option<u32> = vec![1, 2, 3, 4, 5, 6]
+ .into_iter()
+ .filter_map(|x| {
+ if x == 2 {
+ Some(x * 2)
+ } else {
+ None
+ }
+ })
+ .next();
+}
+
+#[allow(clippy::no_effect)]
+#[allow(clippy::short_circuit_statement)]
+#[allow(clippy::unnecessary_operation)]
+pub fn manual_range_contains() {
+ let x = 5;
+ x >= 8 && x < 12;
+}
+
+pub fn use_self() {
+ struct Foo;
+
+ impl Foo {
+ fn new() -> Foo {
+ Foo {}
+ }
+ fn test() -> Foo {
+ Foo::new()
+ }
+ }
+}
+
+fn replace_with_default() {
+ let mut s = String::from("foo");
+ let _ = std::mem::replace(&mut s, String::default());
+}
+
+fn map_unwrap_or() {
+ let opt = Some(1);
+
+ // Check for `option.map(_).unwrap_or(_)` use.
+ // Single line case.
+ let _ = opt
+ .map(|x| x + 1)
+ // Should lint even though this call is on a separate line.
+ .unwrap_or(0);
+}
+
+// Could be const
+fn missing_const_for_fn() -> i32 {
+ 1
+}
+
+fn unnest_or_patterns() {
+ struct TS(u8, u8);
+ if let TS(0, x) | TS(1, x) = TS(0, 0) {}
+}
+
+#[cfg_attr(rustfmt, rustfmt_skip)]
+fn deprecated_cfg_attr() {}
+
+#[warn(clippy::cast_lossless)]
+fn int_from_bool() -> u8 {
+ true as u8
+}
+
+fn err_expect() {
+ let x: Result<u32, &str> = Ok(10);
+ x.err().expect("Testing expect_err");
+}
+
+fn cast_abs_to_unsigned() {
+ let x: i32 = 10;
+ assert_eq!(10u32, x.abs() as u32);
+}
+
+fn manual_rem_euclid() {
+ let x: i32 = 10;
+ let _: i32 = ((x % 4) + 4) % 4;
+}
+
+fn main() {
+ filter_map_next();
+ checked_conversion();
+ redundant_fieldnames();
+ redundant_static_lifetime();
+ option_as_ref_deref();
+ match_like_matches();
+ match_same_arms();
+ match_same_arms2();
+ manual_strip_msrv();
+ manual_range_contains();
+ use_self();
+ replace_with_default();
+ map_unwrap_or();
+ missing_const_for_fn();
+ unnest_or_patterns();
+ int_from_bool();
+ err_expect();
+ cast_abs_to_unsigned();
+ manual_rem_euclid();
+}
+
+mod just_under_msrv {
+ #![feature(custom_inner_attributes)]
+ #![clippy::msrv = "1.44.0"]
+
+ fn main() {
+ let s = "hello, world!";
+ if s.starts_with("hello, ") {
+ assert_eq!(s["hello, ".len()..].to_uppercase(), "WORLD!");
+ }
+ }
+}
+
+mod meets_msrv {
+ #![feature(custom_inner_attributes)]
+ #![clippy::msrv = "1.45.0"]
+
+ fn main() {
+ let s = "hello, world!";
+ if s.starts_with("hello, ") {
+ assert_eq!(s["hello, ".len()..].to_uppercase(), "WORLD!");
+ }
+ }
+}
+
+mod just_above_msrv {
+ #![feature(custom_inner_attributes)]
+ #![clippy::msrv = "1.46.0"]
+
+ fn main() {
+ let s = "hello, world!";
+ if s.starts_with("hello, ") {
+ assert_eq!(s["hello, ".len()..].to_uppercase(), "WORLD!");
+ }
+ }
+}
+
+mod const_rem_euclid {
+ #![feature(custom_inner_attributes)]
+ #![clippy::msrv = "1.50.0"]
+
+ pub const fn const_rem_euclid_4(num: i32) -> i32 {
+ ((num % 4) + 4) % 4
+ }
+}
diff --git a/src/tools/clippy/tests/ui/min_rust_version_attr.stderr b/src/tools/clippy/tests/ui/min_rust_version_attr.stderr
new file mode 100644
index 000000000..b1c23b539
--- /dev/null
+++ b/src/tools/clippy/tests/ui/min_rust_version_attr.stderr
@@ -0,0 +1,37 @@
+error: stripping a prefix manually
+ --> $DIR/min_rust_version_attr.rs:204:24
+ |
+LL | assert_eq!(s["hello, ".len()..].to_uppercase(), "WORLD!");
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::manual-strip` implied by `-D warnings`
+note: the prefix was tested here
+ --> $DIR/min_rust_version_attr.rs:203:9
+ |
+LL | if s.starts_with("hello, ") {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+help: try using the `strip_prefix` method
+ |
+LL ~ if let Some(<stripped>) = s.strip_prefix("hello, ") {
+LL ~ assert_eq!(<stripped>.to_uppercase(), "WORLD!");
+ |
+
+error: stripping a prefix manually
+ --> $DIR/min_rust_version_attr.rs:216:24
+ |
+LL | assert_eq!(s["hello, ".len()..].to_uppercase(), "WORLD!");
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+note: the prefix was tested here
+ --> $DIR/min_rust_version_attr.rs:215:9
+ |
+LL | if s.starts_with("hello, ") {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+help: try using the `strip_prefix` method
+ |
+LL ~ if let Some(<stripped>) = s.strip_prefix("hello, ") {
+LL ~ assert_eq!(<stripped>.to_uppercase(), "WORLD!");
+ |
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/min_rust_version_invalid_attr.rs b/src/tools/clippy/tests/ui/min_rust_version_invalid_attr.rs
new file mode 100644
index 000000000..f20841891
--- /dev/null
+++ b/src/tools/clippy/tests/ui/min_rust_version_invalid_attr.rs
@@ -0,0 +1,4 @@
+#![feature(custom_inner_attributes)]
+#![clippy::msrv = "invalid.version"]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/min_rust_version_invalid_attr.stderr b/src/tools/clippy/tests/ui/min_rust_version_invalid_attr.stderr
new file mode 100644
index 000000000..6ff88ca56
--- /dev/null
+++ b/src/tools/clippy/tests/ui/min_rust_version_invalid_attr.stderr
@@ -0,0 +1,8 @@
+error: `invalid.version` is not a valid Rust version
+ --> $DIR/min_rust_version_invalid_attr.rs:2:1
+ |
+LL | #![clippy::msrv = "invalid.version"]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/min_rust_version_multiple_inner_attr.rs b/src/tools/clippy/tests/ui/min_rust_version_multiple_inner_attr.rs
new file mode 100644
index 000000000..e882d5ccf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/min_rust_version_multiple_inner_attr.rs
@@ -0,0 +1,11 @@
+#![feature(custom_inner_attributes)]
+#![clippy::msrv = "1.40"]
+#![clippy::msrv = "=1.35.0"]
+#![clippy::msrv = "1.10.1"]
+
+mod foo {
+ #![clippy::msrv = "1"]
+ #![clippy::msrv = "1.0.0"]
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/min_rust_version_multiple_inner_attr.stderr b/src/tools/clippy/tests/ui/min_rust_version_multiple_inner_attr.stderr
new file mode 100644
index 000000000..e3ff6605c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/min_rust_version_multiple_inner_attr.stderr
@@ -0,0 +1,38 @@
+error: `msrv` is defined multiple times
+ --> $DIR/min_rust_version_multiple_inner_attr.rs:3:1
+ |
+LL | #![clippy::msrv = "=1.35.0"]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: first definition found here
+ --> $DIR/min_rust_version_multiple_inner_attr.rs:2:1
+ |
+LL | #![clippy::msrv = "1.40"]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `msrv` is defined multiple times
+ --> $DIR/min_rust_version_multiple_inner_attr.rs:4:1
+ |
+LL | #![clippy::msrv = "1.10.1"]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: first definition found here
+ --> $DIR/min_rust_version_multiple_inner_attr.rs:2:1
+ |
+LL | #![clippy::msrv = "1.40"]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `msrv` is defined multiple times
+ --> $DIR/min_rust_version_multiple_inner_attr.rs:8:5
+ |
+LL | #![clippy::msrv = "1.0.0"]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: first definition found here
+ --> $DIR/min_rust_version_multiple_inner_attr.rs:7:5
+ |
+LL | #![clippy::msrv = "1"]
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/min_rust_version_no_patch.rs b/src/tools/clippy/tests/ui/min_rust_version_no_patch.rs
new file mode 100644
index 000000000..98fffe1e3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/min_rust_version_no_patch.rs
@@ -0,0 +1,14 @@
+#![allow(clippy::redundant_clone)]
+#![feature(custom_inner_attributes)]
+#![clippy::msrv = "1.0"]
+
+fn manual_strip_msrv() {
+ let s = "hello, world!";
+ if s.starts_with("hello, ") {
+ assert_eq!(s["hello, ".len()..].to_uppercase(), "WORLD!");
+ }
+}
+
+fn main() {
+ manual_strip_msrv()
+}
diff --git a/src/tools/clippy/tests/ui/min_rust_version_outer_attr.rs b/src/tools/clippy/tests/ui/min_rust_version_outer_attr.rs
new file mode 100644
index 000000000..551948bd7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/min_rust_version_outer_attr.rs
@@ -0,0 +1,4 @@
+#![feature(custom_inner_attributes)]
+
+#[clippy::msrv = "invalid.version"]
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/min_rust_version_outer_attr.stderr b/src/tools/clippy/tests/ui/min_rust_version_outer_attr.stderr
new file mode 100644
index 000000000..579ee7a87
--- /dev/null
+++ b/src/tools/clippy/tests/ui/min_rust_version_outer_attr.stderr
@@ -0,0 +1,8 @@
+error: `msrv` cannot be an outer attribute
+ --> $DIR/min_rust_version_outer_attr.rs:3:1
+ |
+LL | #[clippy::msrv = "invalid.version"]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/mismatched_target_os_non_unix.fixed b/src/tools/clippy/tests/ui/mismatched_target_os_non_unix.fixed
new file mode 100644
index 000000000..f219a570e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mismatched_target_os_non_unix.fixed
@@ -0,0 +1,27 @@
+// run-rustfix
+
+#![warn(clippy::mismatched_target_os)]
+#![allow(unused)]
+
+#[cfg(target_os = "hermit")]
+fn hermit() {}
+
+#[cfg(target_os = "wasi")]
+fn wasi() {}
+
+#[cfg(target_os = "none")]
+fn none() {}
+
+// list with conditions
+#[cfg(all(not(windows), target_os = "wasi"))]
+fn list() {}
+
+// windows is a valid target family, should be ignored
+#[cfg(windows)]
+fn windows() {}
+
+// correct use, should be ignored
+#[cfg(target_os = "hermit")]
+fn correct() {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/mismatched_target_os_non_unix.rs b/src/tools/clippy/tests/ui/mismatched_target_os_non_unix.rs
new file mode 100644
index 000000000..8a8ae756a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mismatched_target_os_non_unix.rs
@@ -0,0 +1,27 @@
+// run-rustfix
+
+#![warn(clippy::mismatched_target_os)]
+#![allow(unused)]
+
+#[cfg(hermit)]
+fn hermit() {}
+
+#[cfg(wasi)]
+fn wasi() {}
+
+#[cfg(none)]
+fn none() {}
+
+// list with conditions
+#[cfg(all(not(windows), wasi))]
+fn list() {}
+
+// windows is a valid target family, should be ignored
+#[cfg(windows)]
+fn windows() {}
+
+// correct use, should be ignored
+#[cfg(target_os = "hermit")]
+fn correct() {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/mismatched_target_os_non_unix.stderr b/src/tools/clippy/tests/ui/mismatched_target_os_non_unix.stderr
new file mode 100644
index 000000000..5f1b09083
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mismatched_target_os_non_unix.stderr
@@ -0,0 +1,36 @@
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_non_unix.rs:6:1
+ |
+LL | #[cfg(hermit)]
+ | ^^^^^^------^^
+ | |
+ | help: try: `target_os = "hermit"`
+ |
+ = note: `-D clippy::mismatched-target-os` implied by `-D warnings`
+
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_non_unix.rs:9:1
+ |
+LL | #[cfg(wasi)]
+ | ^^^^^^----^^
+ | |
+ | help: try: `target_os = "wasi"`
+
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_non_unix.rs:12:1
+ |
+LL | #[cfg(none)]
+ | ^^^^^^----^^
+ | |
+ | help: try: `target_os = "none"`
+
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_non_unix.rs:16:1
+ |
+LL | #[cfg(all(not(windows), wasi))]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^----^^^
+ | |
+ | help: try: `target_os = "wasi"`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/mismatched_target_os_unix.fixed b/src/tools/clippy/tests/ui/mismatched_target_os_unix.fixed
new file mode 100644
index 000000000..7d9d406d9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mismatched_target_os_unix.fixed
@@ -0,0 +1,62 @@
+// run-rustfix
+
+#![warn(clippy::mismatched_target_os)]
+#![allow(unused)]
+
+#[cfg(target_os = "linux")]
+fn linux() {}
+
+#[cfg(target_os = "freebsd")]
+fn freebsd() {}
+
+#[cfg(target_os = "dragonfly")]
+fn dragonfly() {}
+
+#[cfg(target_os = "openbsd")]
+fn openbsd() {}
+
+#[cfg(target_os = "netbsd")]
+fn netbsd() {}
+
+#[cfg(target_os = "macos")]
+fn macos() {}
+
+#[cfg(target_os = "ios")]
+fn ios() {}
+
+#[cfg(target_os = "android")]
+fn android() {}
+
+#[cfg(target_os = "emscripten")]
+fn emscripten() {}
+
+#[cfg(target_os = "fuchsia")]
+fn fuchsia() {}
+
+#[cfg(target_os = "haiku")]
+fn haiku() {}
+
+#[cfg(target_os = "illumos")]
+fn illumos() {}
+
+#[cfg(target_os = "l4re")]
+fn l4re() {}
+
+#[cfg(target_os = "redox")]
+fn redox() {}
+
+#[cfg(target_os = "solaris")]
+fn solaris() {}
+
+#[cfg(target_os = "vxworks")]
+fn vxworks() {}
+
+// list with conditions
+#[cfg(all(not(any(target_os = "solaris", target_os = "linux")), target_os = "freebsd"))]
+fn list() {}
+
+// correct use, should be ignored
+#[cfg(target_os = "freebsd")]
+fn correct() {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/mismatched_target_os_unix.rs b/src/tools/clippy/tests/ui/mismatched_target_os_unix.rs
new file mode 100644
index 000000000..c1177f1ee
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mismatched_target_os_unix.rs
@@ -0,0 +1,62 @@
+// run-rustfix
+
+#![warn(clippy::mismatched_target_os)]
+#![allow(unused)]
+
+#[cfg(linux)]
+fn linux() {}
+
+#[cfg(freebsd)]
+fn freebsd() {}
+
+#[cfg(dragonfly)]
+fn dragonfly() {}
+
+#[cfg(openbsd)]
+fn openbsd() {}
+
+#[cfg(netbsd)]
+fn netbsd() {}
+
+#[cfg(macos)]
+fn macos() {}
+
+#[cfg(ios)]
+fn ios() {}
+
+#[cfg(android)]
+fn android() {}
+
+#[cfg(emscripten)]
+fn emscripten() {}
+
+#[cfg(fuchsia)]
+fn fuchsia() {}
+
+#[cfg(haiku)]
+fn haiku() {}
+
+#[cfg(illumos)]
+fn illumos() {}
+
+#[cfg(l4re)]
+fn l4re() {}
+
+#[cfg(redox)]
+fn redox() {}
+
+#[cfg(solaris)]
+fn solaris() {}
+
+#[cfg(vxworks)]
+fn vxworks() {}
+
+// list with conditions
+#[cfg(all(not(any(solaris, linux)), freebsd))]
+fn list() {}
+
+// correct use, should be ignored
+#[cfg(target_os = "freebsd")]
+fn correct() {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/mismatched_target_os_unix.stderr b/src/tools/clippy/tests/ui/mismatched_target_os_unix.stderr
new file mode 100644
index 000000000..3534b5328
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mismatched_target_os_unix.stderr
@@ -0,0 +1,183 @@
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_unix.rs:6:1
+ |
+LL | #[cfg(linux)]
+ | ^^^^^^-----^^
+ | |
+ | help: try: `target_os = "linux"`
+ |
+ = note: `-D clippy::mismatched-target-os` implied by `-D warnings`
+ = help: did you mean `unix`?
+
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_unix.rs:9:1
+ |
+LL | #[cfg(freebsd)]
+ | ^^^^^^-------^^
+ | |
+ | help: try: `target_os = "freebsd"`
+ |
+ = help: did you mean `unix`?
+
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_unix.rs:12:1
+ |
+LL | #[cfg(dragonfly)]
+ | ^^^^^^---------^^
+ | |
+ | help: try: `target_os = "dragonfly"`
+ |
+ = help: did you mean `unix`?
+
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_unix.rs:15:1
+ |
+LL | #[cfg(openbsd)]
+ | ^^^^^^-------^^
+ | |
+ | help: try: `target_os = "openbsd"`
+ |
+ = help: did you mean `unix`?
+
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_unix.rs:18:1
+ |
+LL | #[cfg(netbsd)]
+ | ^^^^^^------^^
+ | |
+ | help: try: `target_os = "netbsd"`
+ |
+ = help: did you mean `unix`?
+
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_unix.rs:21:1
+ |
+LL | #[cfg(macos)]
+ | ^^^^^^-----^^
+ | |
+ | help: try: `target_os = "macos"`
+ |
+ = help: did you mean `unix`?
+
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_unix.rs:24:1
+ |
+LL | #[cfg(ios)]
+ | ^^^^^^---^^
+ | |
+ | help: try: `target_os = "ios"`
+ |
+ = help: did you mean `unix`?
+
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_unix.rs:27:1
+ |
+LL | #[cfg(android)]
+ | ^^^^^^-------^^
+ | |
+ | help: try: `target_os = "android"`
+ |
+ = help: did you mean `unix`?
+
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_unix.rs:30:1
+ |
+LL | #[cfg(emscripten)]
+ | ^^^^^^----------^^
+ | |
+ | help: try: `target_os = "emscripten"`
+ |
+ = help: did you mean `unix`?
+
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_unix.rs:33:1
+ |
+LL | #[cfg(fuchsia)]
+ | ^^^^^^-------^^
+ | |
+ | help: try: `target_os = "fuchsia"`
+ |
+ = help: did you mean `unix`?
+
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_unix.rs:36:1
+ |
+LL | #[cfg(haiku)]
+ | ^^^^^^-----^^
+ | |
+ | help: try: `target_os = "haiku"`
+ |
+ = help: did you mean `unix`?
+
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_unix.rs:39:1
+ |
+LL | #[cfg(illumos)]
+ | ^^^^^^-------^^
+ | |
+ | help: try: `target_os = "illumos"`
+ |
+ = help: did you mean `unix`?
+
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_unix.rs:42:1
+ |
+LL | #[cfg(l4re)]
+ | ^^^^^^----^^
+ | |
+ | help: try: `target_os = "l4re"`
+ |
+ = help: did you mean `unix`?
+
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_unix.rs:45:1
+ |
+LL | #[cfg(redox)]
+ | ^^^^^^-----^^
+ | |
+ | help: try: `target_os = "redox"`
+ |
+ = help: did you mean `unix`?
+
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_unix.rs:48:1
+ |
+LL | #[cfg(solaris)]
+ | ^^^^^^-------^^
+ | |
+ | help: try: `target_os = "solaris"`
+ |
+ = help: did you mean `unix`?
+
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_unix.rs:51:1
+ |
+LL | #[cfg(vxworks)]
+ | ^^^^^^-------^^
+ | |
+ | help: try: `target_os = "vxworks"`
+ |
+ = help: did you mean `unix`?
+
+error: operating system used in target family position
+ --> $DIR/mismatched_target_os_unix.rs:55:1
+ |
+LL | #[cfg(all(not(any(solaris, linux)), freebsd))]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: did you mean `unix`?
+help: try
+ |
+LL | #[cfg(all(not(any(target_os = "solaris", linux)), freebsd))]
+ | ~~~~~~~~~~~~~~~~~~~~~
+help: try
+ |
+LL | #[cfg(all(not(any(solaris, target_os = "linux")), freebsd))]
+ | ~~~~~~~~~~~~~~~~~~~
+help: try
+ |
+LL | #[cfg(all(not(any(solaris, linux)), target_os = "freebsd"))]
+ | ~~~~~~~~~~~~~~~~~~~~~
+
+error: aborting due to 17 previous errors
+
diff --git a/src/tools/clippy/tests/ui/mismatching_type_param_order.rs b/src/tools/clippy/tests/ui/mismatching_type_param_order.rs
new file mode 100644
index 000000000..8c0da84d8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mismatching_type_param_order.rs
@@ -0,0 +1,64 @@
+#![warn(clippy::mismatching_type_param_order)]
+#![allow(clippy::blacklisted_name)]
+
+fn main() {
+ struct Foo<A, B> {
+ x: A,
+ y: B,
+ }
+
+ // lint on both params
+ impl<B, A> Foo<B, A> {}
+
+ // lint on the 2nd param
+ impl<C, A> Foo<C, A> {}
+
+ // should not lint
+ impl<A, B> Foo<A, B> {}
+
+ struct FooLifetime<'l, 'm, A, B> {
+ x: &'l A,
+ y: &'m B,
+ }
+
+ // should not lint on lifetimes
+ impl<'m, 'l, B, A> FooLifetime<'m, 'l, B, A> {}
+
+ struct Bar {
+ x: i32,
+ }
+
+ // should not lint
+ impl Bar {}
+
+ // also works for enums
+ enum FooEnum<A, B, C> {
+ X(A),
+ Y(B),
+ Z(C),
+ }
+
+ impl<C, A, B> FooEnum<C, A, B> {}
+
+ // also works for unions
+ union FooUnion<A: Copy, B>
+ where
+ B: Copy,
+ {
+ x: A,
+ y: B,
+ }
+
+ impl<B: Copy, A> FooUnion<B, A> where A: Copy {}
+
+ impl<A, B> FooUnion<A, B>
+ where
+ A: Copy,
+ B: Copy,
+ {
+ }
+
+ // if the types are complicated, do not lint
+ impl<K, V, B> Foo<(K, V), B> {}
+ impl<K, V, A> Foo<(K, V), A> {}
+}
diff --git a/src/tools/clippy/tests/ui/mismatching_type_param_order.stderr b/src/tools/clippy/tests/ui/mismatching_type_param_order.stderr
new file mode 100644
index 000000000..cb720256c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mismatching_type_param_order.stderr
@@ -0,0 +1,83 @@
+error: `Foo` has a similarly named generic type parameter `B` in its declaration, but in a different order
+ --> $DIR/mismatching_type_param_order.rs:11:20
+ |
+LL | impl<B, A> Foo<B, A> {}
+ | ^
+ |
+ = note: `-D clippy::mismatching-type-param-order` implied by `-D warnings`
+ = help: try `A`, or a name that does not conflict with `Foo`'s generic params
+
+error: `Foo` has a similarly named generic type parameter `A` in its declaration, but in a different order
+ --> $DIR/mismatching_type_param_order.rs:11:23
+ |
+LL | impl<B, A> Foo<B, A> {}
+ | ^
+ |
+ = help: try `B`, or a name that does not conflict with `Foo`'s generic params
+
+error: `Foo` has a similarly named generic type parameter `A` in its declaration, but in a different order
+ --> $DIR/mismatching_type_param_order.rs:14:23
+ |
+LL | impl<C, A> Foo<C, A> {}
+ | ^
+ |
+ = help: try `B`, or a name that does not conflict with `Foo`'s generic params
+
+error: `FooLifetime` has a similarly named generic type parameter `B` in its declaration, but in a different order
+ --> $DIR/mismatching_type_param_order.rs:25:44
+ |
+LL | impl<'m, 'l, B, A> FooLifetime<'m, 'l, B, A> {}
+ | ^
+ |
+ = help: try `A`, or a name that does not conflict with `FooLifetime`'s generic params
+
+error: `FooLifetime` has a similarly named generic type parameter `A` in its declaration, but in a different order
+ --> $DIR/mismatching_type_param_order.rs:25:47
+ |
+LL | impl<'m, 'l, B, A> FooLifetime<'m, 'l, B, A> {}
+ | ^
+ |
+ = help: try `B`, or a name that does not conflict with `FooLifetime`'s generic params
+
+error: `FooEnum` has a similarly named generic type parameter `C` in its declaration, but in a different order
+ --> $DIR/mismatching_type_param_order.rs:41:27
+ |
+LL | impl<C, A, B> FooEnum<C, A, B> {}
+ | ^
+ |
+ = help: try `A`, or a name that does not conflict with `FooEnum`'s generic params
+
+error: `FooEnum` has a similarly named generic type parameter `A` in its declaration, but in a different order
+ --> $DIR/mismatching_type_param_order.rs:41:30
+ |
+LL | impl<C, A, B> FooEnum<C, A, B> {}
+ | ^
+ |
+ = help: try `B`, or a name that does not conflict with `FooEnum`'s generic params
+
+error: `FooEnum` has a similarly named generic type parameter `B` in its declaration, but in a different order
+ --> $DIR/mismatching_type_param_order.rs:41:33
+ |
+LL | impl<C, A, B> FooEnum<C, A, B> {}
+ | ^
+ |
+ = help: try `C`, or a name that does not conflict with `FooEnum`'s generic params
+
+error: `FooUnion` has a similarly named generic type parameter `B` in its declaration, but in a different order
+ --> $DIR/mismatching_type_param_order.rs:52:31
+ |
+LL | impl<B: Copy, A> FooUnion<B, A> where A: Copy {}
+ | ^
+ |
+ = help: try `A`, or a name that does not conflict with `FooUnion`'s generic params
+
+error: `FooUnion` has a similarly named generic type parameter `A` in its declaration, but in a different order
+ --> $DIR/mismatching_type_param_order.rs:52:34
+ |
+LL | impl<B: Copy, A> FooUnion<B, A> where A: Copy {}
+ | ^
+ |
+ = help: try `B`, or a name that does not conflict with `FooUnion`'s generic params
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/missing-doc-crate-missing.rs b/src/tools/clippy/tests/ui/missing-doc-crate-missing.rs
new file mode 100644
index 000000000..51fd57df8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing-doc-crate-missing.rs
@@ -0,0 +1,3 @@
+#![warn(clippy::missing_docs_in_private_items)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/missing-doc-crate-missing.stderr b/src/tools/clippy/tests/ui/missing-doc-crate-missing.stderr
new file mode 100644
index 000000000..d56c5cc4c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing-doc-crate-missing.stderr
@@ -0,0 +1,12 @@
+error: missing documentation for the crate
+ --> $DIR/missing-doc-crate-missing.rs:1:1
+ |
+LL | / #![warn(clippy::missing_docs_in_private_items)]
+LL | |
+LL | | fn main() {}
+ | |____________^
+ |
+ = note: `-D clippy::missing-docs-in-private-items` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/missing-doc-crate.rs b/src/tools/clippy/tests/ui/missing-doc-crate.rs
new file mode 100644
index 000000000..e00c7fbfe
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing-doc-crate.rs
@@ -0,0 +1,4 @@
+#![warn(clippy::missing_docs_in_private_items)]
+#![doc = include_str!("../../README.md")]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/missing-doc-impl.rs b/src/tools/clippy/tests/ui/missing-doc-impl.rs
new file mode 100644
index 000000000..d5724bf66
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing-doc-impl.rs
@@ -0,0 +1,92 @@
+#![warn(clippy::missing_docs_in_private_items)]
+#![allow(dead_code)]
+#![feature(associated_type_defaults)]
+
+//! Some garbage docs for the crate here
+#![doc = "More garbage"]
+
+struct Foo {
+ a: isize,
+ b: isize,
+}
+
+pub struct PubFoo {
+ pub a: isize,
+ b: isize,
+}
+
+#[allow(clippy::missing_docs_in_private_items)]
+pub struct PubFoo2 {
+ pub a: isize,
+ pub c: isize,
+}
+
+/// dox
+pub trait A {
+ /// dox
+ fn foo(&self);
+ /// dox
+ fn foo_with_impl(&self) {}
+}
+
+#[allow(clippy::missing_docs_in_private_items)]
+trait B {
+ fn foo(&self);
+ fn foo_with_impl(&self) {}
+}
+
+pub trait C {
+ fn foo(&self);
+ fn foo_with_impl(&self) {}
+}
+
+#[allow(clippy::missing_docs_in_private_items)]
+pub trait D {
+ fn dummy(&self) {}
+}
+
+/// dox
+pub trait E: Sized {
+ type AssociatedType;
+ type AssociatedTypeDef = Self;
+
+ /// dox
+ type DocumentedType;
+ /// dox
+ type DocumentedTypeDef = Self;
+ /// dox
+ fn dummy(&self) {}
+}
+
+impl Foo {
+ pub fn new() -> Self {
+ Foo { a: 0, b: 0 }
+ }
+ fn bar() {}
+}
+
+impl PubFoo {
+ pub fn foo() {}
+ /// dox
+ pub fn foo1() {}
+ #[must_use = "yep"]
+ fn foo2() -> u32 {
+ 1
+ }
+ #[allow(clippy::missing_docs_in_private_items)]
+ pub fn foo3() {}
+}
+
+#[allow(clippy::missing_docs_in_private_items)]
+trait F {
+ fn a();
+ fn b(&self);
+}
+
+// should need to redefine documentation for implementations of traits
+impl F for Foo {
+ fn a() {}
+ fn b(&self) {}
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/missing-doc-impl.stderr b/src/tools/clippy/tests/ui/missing-doc-impl.stderr
new file mode 100644
index 000000000..bda63d66a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing-doc-impl.stderr
@@ -0,0 +1,107 @@
+error: missing documentation for a struct
+ --> $DIR/missing-doc-impl.rs:8:1
+ |
+LL | / struct Foo {
+LL | | a: isize,
+LL | | b: isize,
+LL | | }
+ | |_^
+ |
+ = note: `-D clippy::missing-docs-in-private-items` implied by `-D warnings`
+
+error: missing documentation for a struct field
+ --> $DIR/missing-doc-impl.rs:9:5
+ |
+LL | a: isize,
+ | ^^^^^^^^
+
+error: missing documentation for a struct field
+ --> $DIR/missing-doc-impl.rs:10:5
+ |
+LL | b: isize,
+ | ^^^^^^^^
+
+error: missing documentation for a struct
+ --> $DIR/missing-doc-impl.rs:13:1
+ |
+LL | / pub struct PubFoo {
+LL | | pub a: isize,
+LL | | b: isize,
+LL | | }
+ | |_^
+
+error: missing documentation for a struct field
+ --> $DIR/missing-doc-impl.rs:14:5
+ |
+LL | pub a: isize,
+ | ^^^^^^^^^^^^
+
+error: missing documentation for a struct field
+ --> $DIR/missing-doc-impl.rs:15:5
+ |
+LL | b: isize,
+ | ^^^^^^^^
+
+error: missing documentation for a trait
+ --> $DIR/missing-doc-impl.rs:38:1
+ |
+LL | / pub trait C {
+LL | | fn foo(&self);
+LL | | fn foo_with_impl(&self) {}
+LL | | }
+ | |_^
+
+error: missing documentation for an associated function
+ --> $DIR/missing-doc-impl.rs:39:5
+ |
+LL | fn foo(&self);
+ | ^^^^^^^^^^^^^^
+
+error: missing documentation for an associated function
+ --> $DIR/missing-doc-impl.rs:40:5
+ |
+LL | fn foo_with_impl(&self) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: missing documentation for an associated type
+ --> $DIR/missing-doc-impl.rs:50:5
+ |
+LL | type AssociatedType;
+ | ^^^^^^^^^^^^^^^^^^^^
+
+error: missing documentation for an associated type
+ --> $DIR/missing-doc-impl.rs:51:5
+ |
+LL | type AssociatedTypeDef = Self;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: missing documentation for an associated function
+ --> $DIR/missing-doc-impl.rs:62:5
+ |
+LL | / pub fn new() -> Self {
+LL | | Foo { a: 0, b: 0 }
+LL | | }
+ | |_____^
+
+error: missing documentation for an associated function
+ --> $DIR/missing-doc-impl.rs:65:5
+ |
+LL | fn bar() {}
+ | ^^^^^^^^^^^
+
+error: missing documentation for an associated function
+ --> $DIR/missing-doc-impl.rs:69:5
+ |
+LL | pub fn foo() {}
+ | ^^^^^^^^^^^^^^^
+
+error: missing documentation for an associated function
+ --> $DIR/missing-doc-impl.rs:73:5
+ |
+LL | / fn foo2() -> u32 {
+LL | | 1
+LL | | }
+ | |_____^
+
+error: aborting due to 15 previous errors
+
diff --git a/src/tools/clippy/tests/ui/missing-doc.rs b/src/tools/clippy/tests/ui/missing-doc.rs
new file mode 100644
index 000000000..6e2e710e2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing-doc.rs
@@ -0,0 +1,102 @@
+#![warn(clippy::missing_docs_in_private_items)]
+// When denying at the crate level, be sure to not get random warnings from the
+// injected intrinsics by the compiler.
+#![allow(dead_code)]
+//! Some garbage docs for the crate here
+#![doc = "More garbage"]
+
+use std::arch::global_asm;
+
+type Typedef = String;
+pub type PubTypedef = String;
+
+mod module_no_dox {}
+pub mod pub_module_no_dox {}
+
+/// dox
+pub fn foo() {}
+pub fn foo2() {}
+fn foo3() {}
+#[allow(clippy::missing_docs_in_private_items)]
+pub fn foo4() {}
+
+// It sure is nice if doc(hidden) implies allow(missing_docs), and that it
+// applies recursively
+#[doc(hidden)]
+mod a {
+ pub fn baz() {}
+ pub mod b {
+ pub fn baz() {}
+ }
+}
+
+enum Baz {
+ BazA { a: isize, b: isize },
+ BarB,
+}
+
+pub enum PubBaz {
+ PubBazA { a: isize },
+}
+
+/// dox
+pub enum PubBaz2 {
+ /// dox
+ PubBaz2A {
+ /// dox
+ a: isize,
+ },
+}
+
+#[allow(clippy::missing_docs_in_private_items)]
+pub enum PubBaz3 {
+ PubBaz3A { b: isize },
+}
+
+#[doc(hidden)]
+pub fn baz() {}
+
+const FOO: u32 = 0;
+/// dox
+pub const FOO1: u32 = 0;
+#[allow(clippy::missing_docs_in_private_items)]
+pub const FOO2: u32 = 0;
+#[doc(hidden)]
+pub const FOO3: u32 = 0;
+pub const FOO4: u32 = 0;
+
+static BAR: u32 = 0;
+/// dox
+pub static BAR1: u32 = 0;
+#[allow(clippy::missing_docs_in_private_items)]
+pub static BAR2: u32 = 0;
+#[doc(hidden)]
+pub static BAR3: u32 = 0;
+pub static BAR4: u32 = 0;
+
+mod internal_impl {
+ /// dox
+ pub fn documented() {}
+ pub fn undocumented1() {}
+ pub fn undocumented2() {}
+ fn undocumented3() {}
+ /// dox
+ pub mod globbed {
+ /// dox
+ pub fn also_documented() {}
+ pub fn also_undocumented1() {}
+ fn also_undocumented2() {}
+ }
+}
+/// dox
+pub mod public_interface {
+ pub use crate::internal_impl::documented as foo;
+ pub use crate::internal_impl::globbed::*;
+ pub use crate::internal_impl::undocumented1 as bar;
+ pub use crate::internal_impl::{documented, undocumented2};
+}
+
+fn main() {}
+
+// Ensure global asm doesn't require documentation.
+global_asm! { "" }
diff --git a/src/tools/clippy/tests/ui/missing-doc.stderr b/src/tools/clippy/tests/ui/missing-doc.stderr
new file mode 100644
index 000000000..a876dc078
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing-doc.stderr
@@ -0,0 +1,159 @@
+error: missing documentation for a type alias
+ --> $DIR/missing-doc.rs:10:1
+ |
+LL | type Typedef = String;
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::missing-docs-in-private-items` implied by `-D warnings`
+
+error: missing documentation for a type alias
+ --> $DIR/missing-doc.rs:11:1
+ |
+LL | pub type PubTypedef = String;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: missing documentation for a module
+ --> $DIR/missing-doc.rs:13:1
+ |
+LL | mod module_no_dox {}
+ | ^^^^^^^^^^^^^^^^^^^^
+
+error: missing documentation for a module
+ --> $DIR/missing-doc.rs:14:1
+ |
+LL | pub mod pub_module_no_dox {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: missing documentation for a function
+ --> $DIR/missing-doc.rs:18:1
+ |
+LL | pub fn foo2() {}
+ | ^^^^^^^^^^^^^^^^
+
+error: missing documentation for a function
+ --> $DIR/missing-doc.rs:19:1
+ |
+LL | fn foo3() {}
+ | ^^^^^^^^^^^^
+
+error: missing documentation for an enum
+ --> $DIR/missing-doc.rs:33:1
+ |
+LL | / enum Baz {
+LL | | BazA { a: isize, b: isize },
+LL | | BarB,
+LL | | }
+ | |_^
+
+error: missing documentation for a variant
+ --> $DIR/missing-doc.rs:34:5
+ |
+LL | BazA { a: isize, b: isize },
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: missing documentation for a struct field
+ --> $DIR/missing-doc.rs:34:12
+ |
+LL | BazA { a: isize, b: isize },
+ | ^^^^^^^^
+
+error: missing documentation for a struct field
+ --> $DIR/missing-doc.rs:34:22
+ |
+LL | BazA { a: isize, b: isize },
+ | ^^^^^^^^
+
+error: missing documentation for a variant
+ --> $DIR/missing-doc.rs:35:5
+ |
+LL | BarB,
+ | ^^^^
+
+error: missing documentation for an enum
+ --> $DIR/missing-doc.rs:38:1
+ |
+LL | / pub enum PubBaz {
+LL | | PubBazA { a: isize },
+LL | | }
+ | |_^
+
+error: missing documentation for a variant
+ --> $DIR/missing-doc.rs:39:5
+ |
+LL | PubBazA { a: isize },
+ | ^^^^^^^^^^^^^^^^^^^^
+
+error: missing documentation for a struct field
+ --> $DIR/missing-doc.rs:39:15
+ |
+LL | PubBazA { a: isize },
+ | ^^^^^^^^
+
+error: missing documentation for a constant
+ --> $DIR/missing-doc.rs:59:1
+ |
+LL | const FOO: u32 = 0;
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: missing documentation for a constant
+ --> $DIR/missing-doc.rs:66:1
+ |
+LL | pub const FOO4: u32 = 0;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: missing documentation for a static
+ --> $DIR/missing-doc.rs:68:1
+ |
+LL | static BAR: u32 = 0;
+ | ^^^^^^^^^^^^^^^^^^^^
+
+error: missing documentation for a static
+ --> $DIR/missing-doc.rs:75:1
+ |
+LL | pub static BAR4: u32 = 0;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: missing documentation for a module
+ --> $DIR/missing-doc.rs:77:1
+ |
+LL | / mod internal_impl {
+LL | | /// dox
+LL | | pub fn documented() {}
+LL | | pub fn undocumented1() {}
+... |
+LL | | }
+LL | | }
+ | |_^
+
+error: missing documentation for a function
+ --> $DIR/missing-doc.rs:80:5
+ |
+LL | pub fn undocumented1() {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: missing documentation for a function
+ --> $DIR/missing-doc.rs:81:5
+ |
+LL | pub fn undocumented2() {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: missing documentation for a function
+ --> $DIR/missing-doc.rs:82:5
+ |
+LL | fn undocumented3() {}
+ | ^^^^^^^^^^^^^^^^^^^^^
+
+error: missing documentation for a function
+ --> $DIR/missing-doc.rs:87:9
+ |
+LL | pub fn also_undocumented1() {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: missing documentation for a function
+ --> $DIR/missing-doc.rs:88:9
+ |
+LL | fn also_undocumented2() {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 24 previous errors
+
diff --git a/src/tools/clippy/tests/ui/missing_const_for_fn/auxiliary/helper.rs b/src/tools/clippy/tests/ui/missing_const_for_fn/auxiliary/helper.rs
new file mode 100644
index 000000000..7b9dc76b8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing_const_for_fn/auxiliary/helper.rs
@@ -0,0 +1,8 @@
+// This file provides a const function that is unstably const forever.
+
+#![feature(staged_api)]
+#![stable(feature = "1", since = "1.0.0")]
+
+#[stable(feature = "1", since = "1.0.0")]
+#[rustc_const_unstable(feature = "foo", issue = "none")]
+pub const fn unstably_const_fn() {}
diff --git a/src/tools/clippy/tests/ui/missing_const_for_fn/cant_be_const.rs b/src/tools/clippy/tests/ui/missing_const_for_fn/cant_be_const.rs
new file mode 100644
index 000000000..aa60d0504
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing_const_for_fn/cant_be_const.rs
@@ -0,0 +1,121 @@
+//! False-positive tests to ensure we don't suggest `const` for things where it would cause a
+//! compilation error.
+//! The .stderr output of this test should be empty. Otherwise it's a bug somewhere.
+
+// aux-build:helper.rs
+
+#![warn(clippy::missing_const_for_fn)]
+#![feature(start)]
+#![feature(custom_inner_attributes)]
+
+extern crate helper;
+
+struct Game;
+
+// This should not be linted because it's already const
+const fn already_const() -> i32 {
+ 32
+}
+
+impl Game {
+ // This should not be linted because it's already const
+ pub const fn already_const() -> i32 {
+ 32
+ }
+}
+
+// Allowing on this function, because it would lint, which we don't want in this case.
+#[allow(clippy::missing_const_for_fn)]
+fn random() -> u32 {
+ 42
+}
+
+// We should not suggest to make this function `const` because `random()` is non-const
+fn random_caller() -> u32 {
+ random()
+}
+
+static Y: u32 = 0;
+
+// We should not suggest to make this function `const` because const functions are not allowed to
+// refer to a static variable
+fn get_y() -> u32 {
+ Y
+ //~^ ERROR E0013
+}
+
+// Don't lint entrypoint functions
+#[start]
+fn init(num: isize, something: *const *const u8) -> isize {
+ 1
+}
+
+trait Foo {
+ // This should not be suggested to be made const
+ // (rustc doesn't allow const trait methods)
+ fn f() -> u32;
+
+ // This should not be suggested to be made const either
+ fn g() -> u32 {
+ 33
+ }
+}
+
+// Don't lint in external macros (derive)
+#[derive(PartialEq, Eq)]
+struct Point(isize, isize);
+
+impl std::ops::Add for Point {
+ type Output = Self;
+
+ // Don't lint in trait impls of derived methods
+ fn add(self, other: Self) -> Self {
+ Point(self.0 + other.0, self.1 + other.1)
+ }
+}
+
+mod with_drop {
+ pub struct A;
+ pub struct B;
+ impl Drop for A {
+ fn drop(&mut self) {}
+ }
+
+ impl A {
+ // This can not be const because the type implements `Drop`.
+ pub fn b(self) -> B {
+ B
+ }
+ }
+
+ impl B {
+ // This can not be const because `a` implements `Drop`.
+ pub fn a(self, a: A) -> B {
+ B
+ }
+ }
+}
+
+fn const_generic_params<T, const N: usize>(t: &[T; N]) -> &[T; N] {
+ t
+}
+
+fn const_generic_return<T, const N: usize>(t: &[T]) -> &[T; N] {
+ let p = t.as_ptr() as *const [T; N];
+
+ unsafe { &*p }
+}
+
+// Do not lint this because it calls a function whose constness is unstable.
+fn unstably_const_fn() {
+ helper::unstably_const_fn()
+}
+
+mod const_fn_stabilized_after_msrv {
+ #![clippy::msrv = "1.46.0"]
+
+ // Do not lint this because `u8::is_ascii_digit` is stabilized as a const function in 1.47.0.
+ fn const_fn_stabilized_after_msrv(byte: u8) {
+ byte.is_ascii_digit();
+ }
+}
diff --git a/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.rs b/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.rs
new file mode 100644
index 000000000..88f6935d2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.rs
@@ -0,0 +1,81 @@
+#![warn(clippy::missing_const_for_fn)]
+#![allow(incomplete_features, clippy::let_and_return)]
+#![feature(custom_inner_attributes)]
+
+use std::mem::transmute;
+
+struct Game {
+ guess: i32,
+}
+
+impl Game {
+ // Could be const
+ pub fn new() -> Self {
+ Self { guess: 42 }
+ }
+
+ fn const_generic_params<'a, T, const N: usize>(&self, b: &'a [T; N]) -> &'a [T; N] {
+ b
+ }
+}
+
+// Could be const
+fn one() -> i32 {
+ 1
+}
+
+// Could also be const
+fn two() -> i32 {
+ let abc = 2;
+ abc
+}
+
+// Could be const (since Rust 1.39)
+fn string() -> String {
+ String::new()
+}
+
+// Could be const
+unsafe fn four() -> i32 {
+ 4
+}
+
+// Could also be const
+fn generic<T>(t: T) -> T {
+ t
+}
+
+fn sub(x: u32) -> usize {
+ unsafe { transmute(&x) }
+}
+
+fn generic_arr<T: Copy>(t: [T; 1]) -> T {
+ t[0]
+}
+
+mod with_drop {
+ pub struct A;
+ pub struct B;
+ impl Drop for A {
+ fn drop(&mut self) {}
+ }
+
+ impl B {
+ // This can be const, because `a` is passed by reference
+ pub fn b(self, a: &A) -> B {
+ B
+ }
+ }
+}
+
+mod const_fn_stabilized_before_msrv {
+ #![clippy::msrv = "1.47.0"]
+
+ // This could be const because `u8::is_ascii_digit` is a stable const function in 1.47.
+ fn const_fn_stabilized_before_msrv(byte: u8) {
+ byte.is_ascii_digit();
+ }
+}
+
+// Should not be const
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.stderr b/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.stderr
new file mode 100644
index 000000000..3eb52b682
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.stderr
@@ -0,0 +1,85 @@
+error: this could be a `const fn`
+ --> $DIR/could_be_const.rs:13:5
+ |
+LL | / pub fn new() -> Self {
+LL | | Self { guess: 42 }
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::missing-const-for-fn` implied by `-D warnings`
+
+error: this could be a `const fn`
+ --> $DIR/could_be_const.rs:17:5
+ |
+LL | / fn const_generic_params<'a, T, const N: usize>(&self, b: &'a [T; N]) -> &'a [T; N] {
+LL | | b
+LL | | }
+ | |_____^
+
+error: this could be a `const fn`
+ --> $DIR/could_be_const.rs:23:1
+ |
+LL | / fn one() -> i32 {
+LL | | 1
+LL | | }
+ | |_^
+
+error: this could be a `const fn`
+ --> $DIR/could_be_const.rs:28:1
+ |
+LL | / fn two() -> i32 {
+LL | | let abc = 2;
+LL | | abc
+LL | | }
+ | |_^
+
+error: this could be a `const fn`
+ --> $DIR/could_be_const.rs:34:1
+ |
+LL | / fn string() -> String {
+LL | | String::new()
+LL | | }
+ | |_^
+
+error: this could be a `const fn`
+ --> $DIR/could_be_const.rs:39:1
+ |
+LL | / unsafe fn four() -> i32 {
+LL | | 4
+LL | | }
+ | |_^
+
+error: this could be a `const fn`
+ --> $DIR/could_be_const.rs:44:1
+ |
+LL | / fn generic<T>(t: T) -> T {
+LL | | t
+LL | | }
+ | |_^
+
+error: this could be a `const fn`
+ --> $DIR/could_be_const.rs:52:1
+ |
+LL | / fn generic_arr<T: Copy>(t: [T; 1]) -> T {
+LL | | t[0]
+LL | | }
+ | |_^
+
+error: this could be a `const fn`
+ --> $DIR/could_be_const.rs:65:9
+ |
+LL | / pub fn b(self, a: &A) -> B {
+LL | | B
+LL | | }
+ | |_________^
+
+error: this could be a `const fn`
+ --> $DIR/could_be_const.rs:75:5
+ |
+LL | / fn const_fn_stabilized_before_msrv(byte: u8) {
+LL | | byte.is_ascii_digit();
+LL | | }
+ | |_____^
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/missing_inline.rs b/src/tools/clippy/tests/ui/missing_inline.rs
new file mode 100644
index 000000000..07f8e3888
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing_inline.rs
@@ -0,0 +1,66 @@
+#![warn(clippy::missing_inline_in_public_items)]
+#![crate_type = "dylib"]
+// When denying at the crate level, be sure to not get random warnings from the
+// injected intrinsics by the compiler.
+#![allow(dead_code, non_snake_case)]
+
+type Typedef = String;
+pub type PubTypedef = String;
+
+struct Foo; // ok
+pub struct PubFoo; // ok
+enum FooE {} // ok
+pub enum PubFooE {} // ok
+
+mod module {} // ok
+pub mod pub_module {} // ok
+
+fn foo() {}
+pub fn pub_foo() {} // missing #[inline]
+#[inline]
+pub fn pub_foo_inline() {} // ok
+#[inline(always)]
+pub fn pub_foo_inline_always() {} // ok
+
+#[allow(clippy::missing_inline_in_public_items)]
+pub fn pub_foo_no_inline() {}
+
+trait Bar {
+ fn Bar_a(); // ok
+ fn Bar_b() {} // ok
+}
+
+pub trait PubBar {
+ fn PubBar_a(); // ok
+ fn PubBar_b() {} // missing #[inline]
+ #[inline]
+ fn PubBar_c() {} // ok
+}
+
+// none of these need inline because Foo is not exported
+impl PubBar for Foo {
+ fn PubBar_a() {} // ok
+ fn PubBar_b() {} // ok
+ fn PubBar_c() {} // ok
+}
+
+// all of these need inline because PubFoo is exported
+impl PubBar for PubFoo {
+ fn PubBar_a() {} // missing #[inline]
+ fn PubBar_b() {} // missing #[inline]
+ fn PubBar_c() {} // missing #[inline]
+}
+
+// do not need inline because Foo is not exported
+impl Foo {
+ fn FooImpl() {} // ok
+}
+
+// need inline because PubFoo is exported
+impl PubFoo {
+ pub fn PubFooImpl() {} // missing #[inline]
+}
+
+// do not lint this since users cannot control the external code
+#[derive(Debug)]
+pub struct S;
diff --git a/src/tools/clippy/tests/ui/missing_inline.stderr b/src/tools/clippy/tests/ui/missing_inline.stderr
new file mode 100644
index 000000000..40b92b764
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing_inline.stderr
@@ -0,0 +1,40 @@
+error: missing `#[inline]` for a function
+ --> $DIR/missing_inline.rs:19:1
+ |
+LL | pub fn pub_foo() {} // missing #[inline]
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::missing-inline-in-public-items` implied by `-D warnings`
+
+error: missing `#[inline]` for a default trait method
+ --> $DIR/missing_inline.rs:35:5
+ |
+LL | fn PubBar_b() {} // missing #[inline]
+ | ^^^^^^^^^^^^^^^^
+
+error: missing `#[inline]` for a method
+ --> $DIR/missing_inline.rs:49:5
+ |
+LL | fn PubBar_a() {} // missing #[inline]
+ | ^^^^^^^^^^^^^^^^
+
+error: missing `#[inline]` for a method
+ --> $DIR/missing_inline.rs:50:5
+ |
+LL | fn PubBar_b() {} // missing #[inline]
+ | ^^^^^^^^^^^^^^^^
+
+error: missing `#[inline]` for a method
+ --> $DIR/missing_inline.rs:51:5
+ |
+LL | fn PubBar_c() {} // missing #[inline]
+ | ^^^^^^^^^^^^^^^^
+
+error: missing `#[inline]` for a method
+ --> $DIR/missing_inline.rs:61:5
+ |
+LL | pub fn PubFooImpl() {} // missing #[inline]
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/missing_inline_executable.rs b/src/tools/clippy/tests/ui/missing_inline_executable.rs
new file mode 100644
index 000000000..6e0400ac9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing_inline_executable.rs
@@ -0,0 +1,5 @@
+#![warn(clippy::missing_inline_in_public_items)]
+
+pub fn foo() {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/missing_inline_proc_macro.rs b/src/tools/clippy/tests/ui/missing_inline_proc_macro.rs
new file mode 100644
index 000000000..3c68fb905
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing_inline_proc_macro.rs
@@ -0,0 +1,23 @@
+#![warn(clippy::missing_inline_in_public_items)]
+#![crate_type = "proc-macro"]
+
+extern crate proc_macro;
+
+use proc_macro::TokenStream;
+
+fn _foo() {}
+
+#[proc_macro]
+pub fn function_like(_: TokenStream) -> TokenStream {
+ TokenStream::new()
+}
+
+#[proc_macro_attribute]
+pub fn attribute(_: TokenStream, _: TokenStream) -> TokenStream {
+ TokenStream::new()
+}
+
+#[proc_macro_derive(Derive)]
+pub fn derive(_: TokenStream) -> TokenStream {
+ TokenStream::new()
+}
diff --git a/src/tools/clippy/tests/ui/missing_panics_doc.rs b/src/tools/clippy/tests/ui/missing_panics_doc.rs
new file mode 100644
index 000000000..7dc445292
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing_panics_doc.rs
@@ -0,0 +1,153 @@
+#![warn(clippy::missing_panics_doc)]
+#![allow(clippy::option_map_unit_fn)]
+fn main() {}
+
+/// This needs to be documented
+pub fn unwrap() {
+ let result = Err("Hi");
+ result.unwrap()
+}
+
+/// This needs to be documented
+pub fn panic() {
+ panic!("This function panics")
+}
+
+/// This needs to be documented
+pub fn todo() {
+ todo!()
+}
+
+/// This needs to be documented
+pub fn inner_body(opt: Option<u32>) {
+ opt.map(|x| {
+ if x == 10 {
+ panic!()
+ }
+ });
+}
+
+/// This needs to be documented
+pub fn unreachable_and_panic() {
+ if true { unreachable!() } else { panic!() }
+}
+
+/// This needs to be documented
+pub fn assert_eq() {
+ let x = 0;
+ assert_eq!(x, 0);
+}
+
+/// This needs to be documented
+pub fn assert_ne() {
+ let x = 0;
+ assert_ne!(x, 0);
+}
+
+/// This is documented
+///
+/// # Panics
+///
+/// Panics if `result` if an error
+pub fn unwrap_documented() {
+ let result = Err("Hi");
+ result.unwrap()
+}
+
+/// This is documented
+///
+/// # Panics
+///
+/// Panics just because
+pub fn panic_documented() {
+ panic!("This function panics")
+}
+
+/// This is documented
+///
+/// # Panics
+///
+/// Panics if `opt` is Just(10)
+pub fn inner_body_documented(opt: Option<u32>) {
+ opt.map(|x| {
+ if x == 10 {
+ panic!()
+ }
+ });
+}
+
+/// This is documented
+///
+/// # Panics
+///
+/// We still need to do this part
+pub fn todo_documented() {
+ todo!()
+}
+
+/// This is documented
+///
+/// # Panics
+///
+/// We still need to do this part
+pub fn unreachable_amd_panic_documented() {
+ if true { unreachable!() } else { panic!() }
+}
+
+/// This is documented
+///
+/// # Panics
+///
+/// Panics if `x` is not 0.
+pub fn assert_eq_documented() {
+ let x = 0;
+ assert_eq!(x, 0);
+}
+
+/// This is documented
+///
+/// # Panics
+///
+/// Panics if `x` is 0.
+pub fn assert_ne_documented() {
+ let x = 0;
+ assert_ne!(x, 0);
+}
+
+/// This is okay because it is private
+fn unwrap_private() {
+ let result = Err("Hi");
+ result.unwrap()
+}
+
+/// This is okay because it is private
+fn panic_private() {
+ panic!("This function panics")
+}
+
+/// This is okay because it is private
+fn todo_private() {
+ todo!()
+}
+
+/// This is okay because it is private
+fn inner_body_private(opt: Option<u32>) {
+ opt.map(|x| {
+ if x == 10 {
+ panic!()
+ }
+ });
+}
+
+/// This is okay because unreachable
+pub fn unreachable() {
+ unreachable!("This function panics")
+}
+
+/// #6970.
+/// This is okay because it is expansion of `debug_assert` family.
+pub fn debug_assertions() {
+ debug_assert!(false);
+ debug_assert_eq!(1, 2);
+ debug_assert_ne!(1, 2);
+}
diff --git a/src/tools/clippy/tests/ui/missing_panics_doc.stderr b/src/tools/clippy/tests/ui/missing_panics_doc.stderr
new file mode 100644
index 000000000..91ebd6952
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing_panics_doc.stderr
@@ -0,0 +1,108 @@
+error: docs for function which may panic missing `# Panics` section
+ --> $DIR/missing_panics_doc.rs:6:1
+ |
+LL | / pub fn unwrap() {
+LL | | let result = Err("Hi");
+LL | | result.unwrap()
+LL | | }
+ | |_^
+ |
+ = note: `-D clippy::missing-panics-doc` implied by `-D warnings`
+note: first possible panic found here
+ --> $DIR/missing_panics_doc.rs:8:5
+ |
+LL | result.unwrap()
+ | ^^^^^^^^^^^^^^^
+
+error: docs for function which may panic missing `# Panics` section
+ --> $DIR/missing_panics_doc.rs:12:1
+ |
+LL | / pub fn panic() {
+LL | | panic!("This function panics")
+LL | | }
+ | |_^
+ |
+note: first possible panic found here
+ --> $DIR/missing_panics_doc.rs:13:5
+ |
+LL | panic!("This function panics")
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: docs for function which may panic missing `# Panics` section
+ --> $DIR/missing_panics_doc.rs:17:1
+ |
+LL | / pub fn todo() {
+LL | | todo!()
+LL | | }
+ | |_^
+ |
+note: first possible panic found here
+ --> $DIR/missing_panics_doc.rs:18:5
+ |
+LL | todo!()
+ | ^^^^^^^
+
+error: docs for function which may panic missing `# Panics` section
+ --> $DIR/missing_panics_doc.rs:22:1
+ |
+LL | / pub fn inner_body(opt: Option<u32>) {
+LL | | opt.map(|x| {
+LL | | if x == 10 {
+LL | | panic!()
+LL | | }
+LL | | });
+LL | | }
+ | |_^
+ |
+note: first possible panic found here
+ --> $DIR/missing_panics_doc.rs:25:13
+ |
+LL | panic!()
+ | ^^^^^^^^
+
+error: docs for function which may panic missing `# Panics` section
+ --> $DIR/missing_panics_doc.rs:31:1
+ |
+LL | / pub fn unreachable_and_panic() {
+LL | | if true { unreachable!() } else { panic!() }
+LL | | }
+ | |_^
+ |
+note: first possible panic found here
+ --> $DIR/missing_panics_doc.rs:32:39
+ |
+LL | if true { unreachable!() } else { panic!() }
+ | ^^^^^^^^
+
+error: docs for function which may panic missing `# Panics` section
+ --> $DIR/missing_panics_doc.rs:36:1
+ |
+LL | / pub fn assert_eq() {
+LL | | let x = 0;
+LL | | assert_eq!(x, 0);
+LL | | }
+ | |_^
+ |
+note: first possible panic found here
+ --> $DIR/missing_panics_doc.rs:38:5
+ |
+LL | assert_eq!(x, 0);
+ | ^^^^^^^^^^^^^^^^
+
+error: docs for function which may panic missing `# Panics` section
+ --> $DIR/missing_panics_doc.rs:42:1
+ |
+LL | / pub fn assert_ne() {
+LL | | let x = 0;
+LL | | assert_ne!(x, 0);
+LL | | }
+ | |_^
+ |
+note: first possible panic found here
+ --> $DIR/missing_panics_doc.rs:44:5
+ |
+LL | assert_ne!(x, 0);
+ | ^^^^^^^^^^^^^^^^
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/missing_spin_loop.fixed b/src/tools/clippy/tests/ui/missing_spin_loop.fixed
new file mode 100644
index 000000000..aa89e04d2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing_spin_loop.fixed
@@ -0,0 +1,28 @@
+// run-rustfix
+#![warn(clippy::missing_spin_loop)]
+#![allow(clippy::bool_comparison)]
+#![allow(unused_braces)]
+
+use core::sync::atomic::{AtomicBool, Ordering};
+
+fn main() {
+ let b = AtomicBool::new(true);
+ // Those should lint
+ while b.load(Ordering::Acquire) { std::hint::spin_loop() }
+
+ while !b.load(Ordering::SeqCst) { std::hint::spin_loop() }
+
+ while b.load(Ordering::Acquire) == false { std::hint::spin_loop() }
+
+ while { true == b.load(Ordering::Acquire) } { std::hint::spin_loop() }
+
+ while b.compare_exchange(true, false, Ordering::Acquire, Ordering::Relaxed) != Ok(true) { std::hint::spin_loop() }
+
+ while Ok(false) != b.compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed) { std::hint::spin_loop() }
+
+ // This is OK, as the body is not empty
+ while b.load(Ordering::Acquire) {
+ std::hint::spin_loop()
+ }
+ // TODO: also match on loop+match or while let
+}
diff --git a/src/tools/clippy/tests/ui/missing_spin_loop.rs b/src/tools/clippy/tests/ui/missing_spin_loop.rs
new file mode 100644
index 000000000..88745e477
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing_spin_loop.rs
@@ -0,0 +1,28 @@
+// run-rustfix
+#![warn(clippy::missing_spin_loop)]
+#![allow(clippy::bool_comparison)]
+#![allow(unused_braces)]
+
+use core::sync::atomic::{AtomicBool, Ordering};
+
+fn main() {
+ let b = AtomicBool::new(true);
+ // Those should lint
+ while b.load(Ordering::Acquire) {}
+
+ while !b.load(Ordering::SeqCst) {}
+
+ while b.load(Ordering::Acquire) == false {}
+
+ while { true == b.load(Ordering::Acquire) } {}
+
+ while b.compare_exchange(true, false, Ordering::Acquire, Ordering::Relaxed) != Ok(true) {}
+
+ while Ok(false) != b.compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed) {}
+
+ // This is OK, as the body is not empty
+ while b.load(Ordering::Acquire) {
+ std::hint::spin_loop()
+ }
+ // TODO: also match on loop+match or while let
+}
diff --git a/src/tools/clippy/tests/ui/missing_spin_loop.stderr b/src/tools/clippy/tests/ui/missing_spin_loop.stderr
new file mode 100644
index 000000000..485da00dc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing_spin_loop.stderr
@@ -0,0 +1,40 @@
+error: busy-waiting loop should at least have a spin loop hint
+ --> $DIR/missing_spin_loop.rs:11:37
+ |
+LL | while b.load(Ordering::Acquire) {}
+ | ^^ help: try this: `{ std::hint::spin_loop() }`
+ |
+ = note: `-D clippy::missing-spin-loop` implied by `-D warnings`
+
+error: busy-waiting loop should at least have a spin loop hint
+ --> $DIR/missing_spin_loop.rs:13:37
+ |
+LL | while !b.load(Ordering::SeqCst) {}
+ | ^^ help: try this: `{ std::hint::spin_loop() }`
+
+error: busy-waiting loop should at least have a spin loop hint
+ --> $DIR/missing_spin_loop.rs:15:46
+ |
+LL | while b.load(Ordering::Acquire) == false {}
+ | ^^ help: try this: `{ std::hint::spin_loop() }`
+
+error: busy-waiting loop should at least have a spin loop hint
+ --> $DIR/missing_spin_loop.rs:17:49
+ |
+LL | while { true == b.load(Ordering::Acquire) } {}
+ | ^^ help: try this: `{ std::hint::spin_loop() }`
+
+error: busy-waiting loop should at least have a spin loop hint
+ --> $DIR/missing_spin_loop.rs:19:93
+ |
+LL | while b.compare_exchange(true, false, Ordering::Acquire, Ordering::Relaxed) != Ok(true) {}
+ | ^^ help: try this: `{ std::hint::spin_loop() }`
+
+error: busy-waiting loop should at least have a spin loop hint
+ --> $DIR/missing_spin_loop.rs:21:94
+ |
+LL | while Ok(false) != b.compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed) {}
+ | ^^ help: try this: `{ std::hint::spin_loop() }`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/missing_spin_loop_no_std.fixed b/src/tools/clippy/tests/ui/missing_spin_loop_no_std.fixed
new file mode 100644
index 000000000..bb4b47955
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing_spin_loop_no_std.fixed
@@ -0,0 +1,23 @@
+// run-rustfix
+#![warn(clippy::missing_spin_loop)]
+#![feature(lang_items, start, libc)]
+#![no_std]
+
+use core::sync::atomic::{AtomicBool, Ordering};
+
+#[start]
+fn main(_argc: isize, _argv: *const *const u8) -> isize {
+ // This should trigger the lint
+ let b = AtomicBool::new(true);
+ // This should lint with `core::hint::spin_loop()`
+ while b.load(Ordering::Acquire) { core::hint::spin_loop() }
+ 0
+}
+
+#[panic_handler]
+fn panic(_info: &core::panic::PanicInfo) -> ! {
+ loop {}
+}
+
+#[lang = "eh_personality"]
+extern "C" fn eh_personality() {}
diff --git a/src/tools/clippy/tests/ui/missing_spin_loop_no_std.rs b/src/tools/clippy/tests/ui/missing_spin_loop_no_std.rs
new file mode 100644
index 000000000..a19bc72ba
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing_spin_loop_no_std.rs
@@ -0,0 +1,23 @@
+// run-rustfix
+#![warn(clippy::missing_spin_loop)]
+#![feature(lang_items, start, libc)]
+#![no_std]
+
+use core::sync::atomic::{AtomicBool, Ordering};
+
+#[start]
+fn main(_argc: isize, _argv: *const *const u8) -> isize {
+ // This should trigger the lint
+ let b = AtomicBool::new(true);
+ // This should lint with `core::hint::spin_loop()`
+ while b.load(Ordering::Acquire) {}
+ 0
+}
+
+#[panic_handler]
+fn panic(_info: &core::panic::PanicInfo) -> ! {
+ loop {}
+}
+
+#[lang = "eh_personality"]
+extern "C" fn eh_personality() {}
diff --git a/src/tools/clippy/tests/ui/missing_spin_loop_no_std.stderr b/src/tools/clippy/tests/ui/missing_spin_loop_no_std.stderr
new file mode 100644
index 000000000..2b3b6873c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/missing_spin_loop_no_std.stderr
@@ -0,0 +1,10 @@
+error: busy-waiting loop should at least have a spin loop hint
+ --> $DIR/missing_spin_loop_no_std.rs:13:37
+ |
+LL | while b.load(Ordering::Acquire) {}
+ | ^^ help: try this: `{ core::hint::spin_loop() }`
+ |
+ = note: `-D clippy::missing-spin-loop` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/mistyped_literal_suffix.fixed b/src/tools/clippy/tests/ui/mistyped_literal_suffix.fixed
new file mode 100644
index 000000000..a7b36d53c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mistyped_literal_suffix.fixed
@@ -0,0 +1,43 @@
+// run-rustfix
+
+#![allow(
+ dead_code,
+ unused_variables,
+ overflowing_literals,
+ clippy::excessive_precision,
+ clippy::inconsistent_digit_grouping,
+ clippy::unusual_byte_groupings
+)]
+
+fn main() {
+ let fail14 = 2_i32;
+ let fail15 = 4_i64;
+ let fail16 = 7_i8; //
+ let fail17 = 23_i16; //
+ let ok18 = 23_128;
+
+ let fail20 = 2_i8; //
+ let fail21 = 4_i16; //
+
+ let ok24 = 12.34_64;
+ let fail25 = 1E2_f32;
+ let fail26 = 43E7_f64;
+ let fail27 = 243E17_f32;
+ let fail28 = 241_251_235E723_f64;
+ let ok29 = 42279.911_32;
+
+ // testing that the suggestion actually fits in its type
+ let fail30 = 127_i8; // should be i8
+ let fail31 = 240_u8; // should be u8
+ let ok32 = 360_8; // doesnt fit in either, should be ignored
+ let fail33 = 0x1234_i16;
+ let fail34 = 0xABCD_u16;
+ let ok35 = 0x12345_16;
+ let fail36 = 0xFFFF_FFFF_FFFF_FFFF_u64; // u64
+
+ // issue #6129
+ let ok37 = 123_32.123;
+ let ok38 = 124_64.0;
+
+ let _ = 1.123_45E1_f32;
+}
diff --git a/src/tools/clippy/tests/ui/mistyped_literal_suffix.rs b/src/tools/clippy/tests/ui/mistyped_literal_suffix.rs
new file mode 100644
index 000000000..c97b31965
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mistyped_literal_suffix.rs
@@ -0,0 +1,43 @@
+// run-rustfix
+
+#![allow(
+ dead_code,
+ unused_variables,
+ overflowing_literals,
+ clippy::excessive_precision,
+ clippy::inconsistent_digit_grouping,
+ clippy::unusual_byte_groupings
+)]
+
+fn main() {
+ let fail14 = 2_32;
+ let fail15 = 4_64;
+ let fail16 = 7_8; //
+ let fail17 = 23_16; //
+ let ok18 = 23_128;
+
+ let fail20 = 2__8; //
+ let fail21 = 4___16; //
+
+ let ok24 = 12.34_64;
+ let fail25 = 1E2_32;
+ let fail26 = 43E7_64;
+ let fail27 = 243E17_32;
+ let fail28 = 241251235E723_64;
+ let ok29 = 42279.911_32;
+
+ // testing that the suggestion actually fits in its type
+ let fail30 = 127_8; // should be i8
+ let fail31 = 240_8; // should be u8
+ let ok32 = 360_8; // doesnt fit in either, should be ignored
+ let fail33 = 0x1234_16;
+ let fail34 = 0xABCD_16;
+ let ok35 = 0x12345_16;
+ let fail36 = 0xFFFF_FFFF_FFFF_FFFF_64; // u64
+
+ // issue #6129
+ let ok37 = 123_32.123;
+ let ok38 = 124_64.0;
+
+ let _ = 1.12345E1_32;
+}
diff --git a/src/tools/clippy/tests/ui/mistyped_literal_suffix.stderr b/src/tools/clippy/tests/ui/mistyped_literal_suffix.stderr
new file mode 100644
index 000000000..fb761d9bd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mistyped_literal_suffix.stderr
@@ -0,0 +1,100 @@
+error: mistyped literal suffix
+ --> $DIR/mistyped_literal_suffix.rs:13:18
+ |
+LL | let fail14 = 2_32;
+ | ^^^^ help: did you mean to write: `2_i32`
+ |
+ = note: `#[deny(clippy::mistyped_literal_suffixes)]` on by default
+
+error: mistyped literal suffix
+ --> $DIR/mistyped_literal_suffix.rs:14:18
+ |
+LL | let fail15 = 4_64;
+ | ^^^^ help: did you mean to write: `4_i64`
+
+error: mistyped literal suffix
+ --> $DIR/mistyped_literal_suffix.rs:15:18
+ |
+LL | let fail16 = 7_8; //
+ | ^^^ help: did you mean to write: `7_i8`
+
+error: mistyped literal suffix
+ --> $DIR/mistyped_literal_suffix.rs:16:18
+ |
+LL | let fail17 = 23_16; //
+ | ^^^^^ help: did you mean to write: `23_i16`
+
+error: mistyped literal suffix
+ --> $DIR/mistyped_literal_suffix.rs:19:18
+ |
+LL | let fail20 = 2__8; //
+ | ^^^^ help: did you mean to write: `2_i8`
+
+error: mistyped literal suffix
+ --> $DIR/mistyped_literal_suffix.rs:20:18
+ |
+LL | let fail21 = 4___16; //
+ | ^^^^^^ help: did you mean to write: `4_i16`
+
+error: mistyped literal suffix
+ --> $DIR/mistyped_literal_suffix.rs:23:18
+ |
+LL | let fail25 = 1E2_32;
+ | ^^^^^^ help: did you mean to write: `1E2_f32`
+
+error: mistyped literal suffix
+ --> $DIR/mistyped_literal_suffix.rs:24:18
+ |
+LL | let fail26 = 43E7_64;
+ | ^^^^^^^ help: did you mean to write: `43E7_f64`
+
+error: mistyped literal suffix
+ --> $DIR/mistyped_literal_suffix.rs:25:18
+ |
+LL | let fail27 = 243E17_32;
+ | ^^^^^^^^^ help: did you mean to write: `243E17_f32`
+
+error: mistyped literal suffix
+ --> $DIR/mistyped_literal_suffix.rs:26:18
+ |
+LL | let fail28 = 241251235E723_64;
+ | ^^^^^^^^^^^^^^^^ help: did you mean to write: `241_251_235E723_f64`
+
+error: mistyped literal suffix
+ --> $DIR/mistyped_literal_suffix.rs:30:18
+ |
+LL | let fail30 = 127_8; // should be i8
+ | ^^^^^ help: did you mean to write: `127_i8`
+
+error: mistyped literal suffix
+ --> $DIR/mistyped_literal_suffix.rs:31:18
+ |
+LL | let fail31 = 240_8; // should be u8
+ | ^^^^^ help: did you mean to write: `240_u8`
+
+error: mistyped literal suffix
+ --> $DIR/mistyped_literal_suffix.rs:33:18
+ |
+LL | let fail33 = 0x1234_16;
+ | ^^^^^^^^^ help: did you mean to write: `0x1234_i16`
+
+error: mistyped literal suffix
+ --> $DIR/mistyped_literal_suffix.rs:34:18
+ |
+LL | let fail34 = 0xABCD_16;
+ | ^^^^^^^^^ help: did you mean to write: `0xABCD_u16`
+
+error: mistyped literal suffix
+ --> $DIR/mistyped_literal_suffix.rs:36:18
+ |
+LL | let fail36 = 0xFFFF_FFFF_FFFF_FFFF_64; // u64
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: did you mean to write: `0xFFFF_FFFF_FFFF_FFFF_u64`
+
+error: mistyped literal suffix
+ --> $DIR/mistyped_literal_suffix.rs:42:13
+ |
+LL | let _ = 1.12345E1_32;
+ | ^^^^^^^^^^^^ help: did you mean to write: `1.123_45E1_f32`
+
+error: aborting due to 16 previous errors
+
diff --git a/src/tools/clippy/tests/ui/mixed_read_write_in_expression.rs b/src/tools/clippy/tests/ui/mixed_read_write_in_expression.rs
new file mode 100644
index 000000000..7640057ab
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mixed_read_write_in_expression.rs
@@ -0,0 +1,112 @@
+#[warn(clippy::mixed_read_write_in_expression)]
+#[allow(
+ unused_assignments,
+ unused_variables,
+ clippy::no_effect,
+ dead_code,
+ clippy::blacklisted_name
+)]
+fn main() {
+ let mut x = 0;
+ let a = {
+ x = 1;
+ 1
+ } + x;
+
+ // Example from iss#277
+ x += {
+ x = 20;
+ 2
+ };
+
+ // Does it work in weird places?
+ // ...in the base for a struct expression?
+ struct Foo {
+ a: i32,
+ b: i32,
+ };
+ let base = Foo { a: 4, b: 5 };
+ let foo = Foo {
+ a: x,
+ ..{
+ x = 6;
+ base
+ }
+ };
+ // ...inside a closure?
+ let closure = || {
+ let mut x = 0;
+ x += {
+ x = 20;
+ 2
+ };
+ };
+ // ...not across a closure?
+ let mut y = 0;
+ let b = (y, || y = 1);
+
+ // && and || evaluate left-to-right.
+ let a = {
+ x = 1;
+ true
+ } && (x == 3);
+ let a = {
+ x = 1;
+ true
+ } || (x == 3);
+
+ // Make sure we don't get confused by alpha conversion.
+ let a = {
+ let mut x = 1;
+ x = 2;
+ 1
+ } + x;
+
+ // No warning if we don't read the variable...
+ x = {
+ x = 20;
+ 2
+ };
+ // ...if the assignment is in a closure...
+ let b = {
+ || {
+ x = 1;
+ };
+ 1
+ } + x;
+ // ... or the access is under an address.
+ let b = (
+ {
+ let p = &x;
+ 1
+ },
+ {
+ x = 1;
+ x
+ },
+ );
+
+ // Limitation: l-values other than simple variables don't trigger
+ // the warning.
+ let mut tup = (0, 0);
+ let c = {
+ tup.0 = 1;
+ 1
+ } + tup.0;
+ // Limitation: you can get away with a read under address-of.
+ let mut z = 0;
+ let b = (
+ &{
+ z = x;
+ x
+ },
+ {
+ x = 3;
+ x
+ },
+ );
+}
+
+async fn issue_6925() {
+ let _ = vec![async { true }.await, async { false }.await];
+}
diff --git a/src/tools/clippy/tests/ui/mixed_read_write_in_expression.stderr b/src/tools/clippy/tests/ui/mixed_read_write_in_expression.stderr
new file mode 100644
index 000000000..2e951cdbc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mixed_read_write_in_expression.stderr
@@ -0,0 +1,51 @@
+error: unsequenced read of `x`
+ --> $DIR/mixed_read_write_in_expression.rs:14:9
+ |
+LL | } + x;
+ | ^
+ |
+ = note: `-D clippy::mixed-read-write-in-expression` implied by `-D warnings`
+note: whether read occurs before this write depends on evaluation order
+ --> $DIR/mixed_read_write_in_expression.rs:12:9
+ |
+LL | x = 1;
+ | ^^^^^
+
+error: unsequenced read of `x`
+ --> $DIR/mixed_read_write_in_expression.rs:17:5
+ |
+LL | x += {
+ | ^
+ |
+note: whether read occurs before this write depends on evaluation order
+ --> $DIR/mixed_read_write_in_expression.rs:18:9
+ |
+LL | x = 20;
+ | ^^^^^^
+
+error: unsequenced read of `x`
+ --> $DIR/mixed_read_write_in_expression.rs:30:12
+ |
+LL | a: x,
+ | ^
+ |
+note: whether read occurs before this write depends on evaluation order
+ --> $DIR/mixed_read_write_in_expression.rs:32:13
+ |
+LL | x = 6;
+ | ^^^^^
+
+error: unsequenced read of `x`
+ --> $DIR/mixed_read_write_in_expression.rs:39:9
+ |
+LL | x += {
+ | ^
+ |
+note: whether read occurs before this write depends on evaluation order
+ --> $DIR/mixed_read_write_in_expression.rs:40:13
+ |
+LL | x = 20;
+ | ^^^^^^
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/module_inception.rs b/src/tools/clippy/tests/ui/module_inception.rs
new file mode 100644
index 000000000..a23aba916
--- /dev/null
+++ b/src/tools/clippy/tests/ui/module_inception.rs
@@ -0,0 +1,21 @@
+#![warn(clippy::module_inception)]
+
+mod foo {
+ mod bar {
+ mod bar {
+ mod foo {}
+ }
+ mod foo {}
+ }
+ mod foo {
+ mod bar {}
+ }
+}
+
+// No warning. See <https://github.com/rust-lang/rust-clippy/issues/1220>.
+mod bar {
+ #[allow(clippy::module_inception)]
+ mod bar {}
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/module_inception.stderr b/src/tools/clippy/tests/ui/module_inception.stderr
new file mode 100644
index 000000000..77564dce9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/module_inception.stderr
@@ -0,0 +1,20 @@
+error: module has the same name as its containing module
+ --> $DIR/module_inception.rs:5:9
+ |
+LL | / mod bar {
+LL | | mod foo {}
+LL | | }
+ | |_________^
+ |
+ = note: `-D clippy::module-inception` implied by `-D warnings`
+
+error: module has the same name as its containing module
+ --> $DIR/module_inception.rs:10:5
+ |
+LL | / mod foo {
+LL | | mod bar {}
+LL | | }
+ | |_____^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/module_name_repetitions.rs b/src/tools/clippy/tests/ui/module_name_repetitions.rs
new file mode 100644
index 000000000..ebaa77cc2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/module_name_repetitions.rs
@@ -0,0 +1,18 @@
+// compile-flags: --test
+
+#![warn(clippy::module_name_repetitions)]
+#![allow(dead_code)]
+
+mod foo {
+ pub fn foo() {}
+ pub fn foo_bar() {}
+ pub fn bar_foo() {}
+ pub struct FooCake;
+ pub enum CakeFoo {}
+ pub struct Foo7Bar;
+
+ // Should not warn
+ pub struct Foobar;
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/module_name_repetitions.stderr b/src/tools/clippy/tests/ui/module_name_repetitions.stderr
new file mode 100644
index 000000000..3f343a3e4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/module_name_repetitions.stderr
@@ -0,0 +1,34 @@
+error: item name starts with its containing module's name
+ --> $DIR/module_name_repetitions.rs:8:5
+ |
+LL | pub fn foo_bar() {}
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::module-name-repetitions` implied by `-D warnings`
+
+error: item name ends with its containing module's name
+ --> $DIR/module_name_repetitions.rs:9:5
+ |
+LL | pub fn bar_foo() {}
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: item name starts with its containing module's name
+ --> $DIR/module_name_repetitions.rs:10:5
+ |
+LL | pub struct FooCake;
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: item name ends with its containing module's name
+ --> $DIR/module_name_repetitions.rs:11:5
+ |
+LL | pub enum CakeFoo {}
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: item name starts with its containing module's name
+ --> $DIR/module_name_repetitions.rs:12:5
+ |
+LL | pub struct Foo7Bar;
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/modulo_arithmetic_float.rs b/src/tools/clippy/tests/ui/modulo_arithmetic_float.rs
new file mode 100644
index 000000000..b1861f07c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/modulo_arithmetic_float.rs
@@ -0,0 +1,29 @@
+#![warn(clippy::modulo_arithmetic)]
+#![allow(clippy::no_effect, clippy::unnecessary_operation, clippy::modulo_one)]
+
+fn main() {
+ // Lint when both sides are const and of the opposite sign
+ -1.6 % 2.1;
+ 1.6 % -2.1;
+ (1.1 - 2.3) % (1.1 + 2.3);
+ (1.1 + 2.3) % (1.1 - 2.3);
+
+ // Lint on floating point numbers
+ let a_f32: f32 = -1.6;
+ let mut b_f32: f32 = 2.1;
+ a_f32 % b_f32;
+ b_f32 % a_f32;
+ b_f32 %= a_f32;
+
+ let a_f64: f64 = -1.6;
+ let mut b_f64: f64 = 2.1;
+ a_f64 % b_f64;
+ b_f64 % a_f64;
+ b_f64 %= a_f64;
+
+ // No lint when both sides are const and of the same sign
+ 1.6 % 2.1;
+ -1.6 % -2.1;
+ (1.1 + 2.3) % (-1.1 + 2.3);
+ (-1.1 - 2.3) % (1.1 - 2.3);
+}
diff --git a/src/tools/clippy/tests/ui/modulo_arithmetic_float.stderr b/src/tools/clippy/tests/ui/modulo_arithmetic_float.stderr
new file mode 100644
index 000000000..97844aaaa
--- /dev/null
+++ b/src/tools/clippy/tests/ui/modulo_arithmetic_float.stderr
@@ -0,0 +1,83 @@
+error: you are using modulo operator on constants with different signs: `-1.600 % 2.100`
+ --> $DIR/modulo_arithmetic_float.rs:6:5
+ |
+LL | -1.6 % 2.1;
+ | ^^^^^^^^^^
+ |
+ = note: `-D clippy::modulo-arithmetic` implied by `-D warnings`
+ = note: double check for expected result especially when interoperating with different languages
+
+error: you are using modulo operator on constants with different signs: `1.600 % -2.100`
+ --> $DIR/modulo_arithmetic_float.rs:7:5
+ |
+LL | 1.6 % -2.1;
+ | ^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+
+error: you are using modulo operator on constants with different signs: `-1.200 % 3.400`
+ --> $DIR/modulo_arithmetic_float.rs:8:5
+ |
+LL | (1.1 - 2.3) % (1.1 + 2.3);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+
+error: you are using modulo operator on constants with different signs: `3.400 % -1.200`
+ --> $DIR/modulo_arithmetic_float.rs:9:5
+ |
+LL | (1.1 + 2.3) % (1.1 - 2.3);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_float.rs:14:5
+ |
+LL | a_f32 % b_f32;
+ | ^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_float.rs:15:5
+ |
+LL | b_f32 % a_f32;
+ | ^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_float.rs:16:5
+ |
+LL | b_f32 %= a_f32;
+ | ^^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_float.rs:20:5
+ |
+LL | a_f64 % b_f64;
+ | ^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_float.rs:21:5
+ |
+LL | b_f64 % a_f64;
+ | ^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_float.rs:22:5
+ |
+LL | b_f64 %= a_f64;
+ | ^^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/modulo_arithmetic_integral.rs b/src/tools/clippy/tests/ui/modulo_arithmetic_integral.rs
new file mode 100644
index 000000000..fc1acc39e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/modulo_arithmetic_integral.rs
@@ -0,0 +1,83 @@
+#![warn(clippy::modulo_arithmetic)]
+#![allow(clippy::no_effect, clippy::unnecessary_operation, clippy::modulo_one)]
+
+fn main() {
+ // Lint on signed integral numbers
+ let a = -1;
+ let mut b = 2;
+ a % b;
+ b % a;
+ b %= a;
+
+ let a_i8: i8 = 1;
+ let mut b_i8: i8 = 2;
+ a_i8 % b_i8;
+ b_i8 %= a_i8;
+
+ let a_i16: i16 = 1;
+ let mut b_i16: i16 = 2;
+ a_i16 % b_i16;
+ b_i16 %= a_i16;
+
+ let a_i32: i32 = 1;
+ let mut b_i32: i32 = 2;
+ a_i32 % b_i32;
+ b_i32 %= a_i32;
+
+ let a_i64: i64 = 1;
+ let mut b_i64: i64 = 2;
+ a_i64 % b_i64;
+ b_i64 %= a_i64;
+
+ let a_i128: i128 = 1;
+ let mut b_i128: i128 = 2;
+ a_i128 % b_i128;
+ b_i128 %= a_i128;
+
+ let a_isize: isize = 1;
+ let mut b_isize: isize = 2;
+ a_isize % b_isize;
+ b_isize %= a_isize;
+
+ let a = 1;
+ let mut b = 2;
+ a % b;
+ b %= a;
+
+ // No lint on unsigned integral value
+ let a_u8: u8 = 17;
+ let b_u8: u8 = 3;
+ a_u8 % b_u8;
+ let mut a_u8: u8 = 1;
+ a_u8 %= 2;
+
+ let a_u16: u16 = 17;
+ let b_u16: u16 = 3;
+ a_u16 % b_u16;
+ let mut a_u16: u16 = 1;
+ a_u16 %= 2;
+
+ let a_u32: u32 = 17;
+ let b_u32: u32 = 3;
+ a_u32 % b_u32;
+ let mut a_u32: u32 = 1;
+ a_u32 %= 2;
+
+ let a_u64: u64 = 17;
+ let b_u64: u64 = 3;
+ a_u64 % b_u64;
+ let mut a_u64: u64 = 1;
+ a_u64 %= 2;
+
+ let a_u128: u128 = 17;
+ let b_u128: u128 = 3;
+ a_u128 % b_u128;
+ let mut a_u128: u128 = 1;
+ a_u128 %= 2;
+
+ let a_usize: usize = 17;
+ let b_usize: usize = 3;
+ a_usize % b_usize;
+ let mut a_usize: usize = 1;
+ a_usize %= 2;
+}
diff --git a/src/tools/clippy/tests/ui/modulo_arithmetic_integral.stderr b/src/tools/clippy/tests/ui/modulo_arithmetic_integral.stderr
new file mode 100644
index 000000000..f71adf5b0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/modulo_arithmetic_integral.stderr
@@ -0,0 +1,156 @@
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_integral.rs:8:5
+ |
+LL | a % b;
+ | ^^^^^
+ |
+ = note: `-D clippy::modulo-arithmetic` implied by `-D warnings`
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_integral.rs:9:5
+ |
+LL | b % a;
+ | ^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_integral.rs:10:5
+ |
+LL | b %= a;
+ | ^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_integral.rs:14:5
+ |
+LL | a_i8 % b_i8;
+ | ^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_integral.rs:15:5
+ |
+LL | b_i8 %= a_i8;
+ | ^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_integral.rs:19:5
+ |
+LL | a_i16 % b_i16;
+ | ^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_integral.rs:20:5
+ |
+LL | b_i16 %= a_i16;
+ | ^^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_integral.rs:24:5
+ |
+LL | a_i32 % b_i32;
+ | ^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_integral.rs:25:5
+ |
+LL | b_i32 %= a_i32;
+ | ^^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_integral.rs:29:5
+ |
+LL | a_i64 % b_i64;
+ | ^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_integral.rs:30:5
+ |
+LL | b_i64 %= a_i64;
+ | ^^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_integral.rs:34:5
+ |
+LL | a_i128 % b_i128;
+ | ^^^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_integral.rs:35:5
+ |
+LL | b_i128 %= a_i128;
+ | ^^^^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_integral.rs:39:5
+ |
+LL | a_isize % b_isize;
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_integral.rs:40:5
+ |
+LL | b_isize %= a_isize;
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_integral.rs:44:5
+ |
+LL | a % b;
+ | ^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on types that might have different signs
+ --> $DIR/modulo_arithmetic_integral.rs:45:5
+ |
+LL | b %= a;
+ | ^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: aborting due to 17 previous errors
+
diff --git a/src/tools/clippy/tests/ui/modulo_arithmetic_integral_const.rs b/src/tools/clippy/tests/ui/modulo_arithmetic_integral_const.rs
new file mode 100644
index 000000000..3ebe46bc5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/modulo_arithmetic_integral_const.rs
@@ -0,0 +1,42 @@
+#![warn(clippy::modulo_arithmetic)]
+#![allow(
+ clippy::no_effect,
+ clippy::unnecessary_operation,
+ clippy::modulo_one,
+ clippy::identity_op
+)]
+
+fn main() {
+ // Lint when both sides are const and of the opposite sign
+ -1 % 2;
+ 1 % -2;
+ (1 - 2) % (1 + 2);
+ (1 + 2) % (1 - 2);
+ 35 * (7 - 4 * 2) % (-500 * -600);
+
+ -1i8 % 2i8;
+ 1i8 % -2i8;
+ -1i16 % 2i16;
+ 1i16 % -2i16;
+ -1i32 % 2i32;
+ 1i32 % -2i32;
+ -1i64 % 2i64;
+ 1i64 % -2i64;
+ -1i128 % 2i128;
+ 1i128 % -2i128;
+ -1isize % 2isize;
+ 1isize % -2isize;
+
+ // No lint when both sides are const and of the same sign
+ 1 % 2;
+ -1 % -2;
+ (1 + 2) % (-1 + 2);
+ (-1 - 2) % (1 - 2);
+
+ 1u8 % 2u8;
+ 1u16 % 2u16;
+ 1u32 % 2u32;
+ 1u64 % 2u64;
+ 1u128 % 2u128;
+ 1usize % 2usize;
+}
diff --git a/src/tools/clippy/tests/ui/modulo_arithmetic_integral_const.stderr b/src/tools/clippy/tests/ui/modulo_arithmetic_integral_const.stderr
new file mode 100644
index 000000000..11b5f7746
--- /dev/null
+++ b/src/tools/clippy/tests/ui/modulo_arithmetic_integral_const.stderr
@@ -0,0 +1,156 @@
+error: you are using modulo operator on constants with different signs: `-1 % 2`
+ --> $DIR/modulo_arithmetic_integral_const.rs:11:5
+ |
+LL | -1 % 2;
+ | ^^^^^^
+ |
+ = note: `-D clippy::modulo-arithmetic` implied by `-D warnings`
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on constants with different signs: `1 % -2`
+ --> $DIR/modulo_arithmetic_integral_const.rs:12:5
+ |
+LL | 1 % -2;
+ | ^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on constants with different signs: `-1 % 3`
+ --> $DIR/modulo_arithmetic_integral_const.rs:13:5
+ |
+LL | (1 - 2) % (1 + 2);
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on constants with different signs: `3 % -1`
+ --> $DIR/modulo_arithmetic_integral_const.rs:14:5
+ |
+LL | (1 + 2) % (1 - 2);
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on constants with different signs: `-35 % 300000`
+ --> $DIR/modulo_arithmetic_integral_const.rs:15:5
+ |
+LL | 35 * (7 - 4 * 2) % (-500 * -600);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on constants with different signs: `-1 % 2`
+ --> $DIR/modulo_arithmetic_integral_const.rs:17:5
+ |
+LL | -1i8 % 2i8;
+ | ^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on constants with different signs: `1 % -2`
+ --> $DIR/modulo_arithmetic_integral_const.rs:18:5
+ |
+LL | 1i8 % -2i8;
+ | ^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on constants with different signs: `-1 % 2`
+ --> $DIR/modulo_arithmetic_integral_const.rs:19:5
+ |
+LL | -1i16 % 2i16;
+ | ^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on constants with different signs: `1 % -2`
+ --> $DIR/modulo_arithmetic_integral_const.rs:20:5
+ |
+LL | 1i16 % -2i16;
+ | ^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on constants with different signs: `-1 % 2`
+ --> $DIR/modulo_arithmetic_integral_const.rs:21:5
+ |
+LL | -1i32 % 2i32;
+ | ^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on constants with different signs: `1 % -2`
+ --> $DIR/modulo_arithmetic_integral_const.rs:22:5
+ |
+LL | 1i32 % -2i32;
+ | ^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on constants with different signs: `-1 % 2`
+ --> $DIR/modulo_arithmetic_integral_const.rs:23:5
+ |
+LL | -1i64 % 2i64;
+ | ^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on constants with different signs: `1 % -2`
+ --> $DIR/modulo_arithmetic_integral_const.rs:24:5
+ |
+LL | 1i64 % -2i64;
+ | ^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on constants with different signs: `-1 % 2`
+ --> $DIR/modulo_arithmetic_integral_const.rs:25:5
+ |
+LL | -1i128 % 2i128;
+ | ^^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on constants with different signs: `1 % -2`
+ --> $DIR/modulo_arithmetic_integral_const.rs:26:5
+ |
+LL | 1i128 % -2i128;
+ | ^^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on constants with different signs: `-1 % 2`
+ --> $DIR/modulo_arithmetic_integral_const.rs:27:5
+ |
+LL | -1isize % 2isize;
+ | ^^^^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: you are using modulo operator on constants with different signs: `1 % -2`
+ --> $DIR/modulo_arithmetic_integral_const.rs:28:5
+ |
+LL | 1isize % -2isize;
+ | ^^^^^^^^^^^^^^^^
+ |
+ = note: double check for expected result especially when interoperating with different languages
+ = note: or consider using `rem_euclid` or similar function
+
+error: aborting due to 17 previous errors
+
diff --git a/src/tools/clippy/tests/ui/modulo_one.rs b/src/tools/clippy/tests/ui/modulo_one.rs
new file mode 100644
index 000000000..adff08e5d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/modulo_one.rs
@@ -0,0 +1,23 @@
+#![warn(clippy::modulo_one)]
+#![allow(clippy::no_effect, clippy::unnecessary_operation, clippy::identity_op)]
+
+static STATIC_ONE: usize = 2 - 1;
+static STATIC_NEG_ONE: i64 = 1 - 2;
+
+fn main() {
+ 10 % 1;
+ 10 % -1;
+ 10 % 2;
+ i32::MIN % (-1); // also caught by rustc
+
+ const ONE: u32 = 1 * 1;
+ const NEG_ONE: i64 = 1 - 2;
+ const INT_MIN: i64 = i64::MIN;
+
+ 2 % ONE;
+ 5 % STATIC_ONE; // NOT caught by lint
+ 2 % NEG_ONE;
+ 5 % STATIC_NEG_ONE; // NOT caught by lint
+ INT_MIN % NEG_ONE; // also caught by rustc
+ INT_MIN % STATIC_NEG_ONE; // ONLY caught by rustc
+}
diff --git a/src/tools/clippy/tests/ui/modulo_one.stderr b/src/tools/clippy/tests/ui/modulo_one.stderr
new file mode 100644
index 000000000..04ecdef5e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/modulo_one.stderr
@@ -0,0 +1,60 @@
+error: this operation will panic at runtime
+ --> $DIR/modulo_one.rs:11:5
+ |
+LL | i32::MIN % (-1); // also caught by rustc
+ | ^^^^^^^^^^^^^^^ attempt to compute the remainder of `i32::MIN % -1_i32`, which would overflow
+ |
+ = note: `#[deny(unconditional_panic)]` on by default
+
+error: this operation will panic at runtime
+ --> $DIR/modulo_one.rs:21:5
+ |
+LL | INT_MIN % NEG_ONE; // also caught by rustc
+ | ^^^^^^^^^^^^^^^^^ attempt to compute the remainder of `i64::MIN % -1_i64`, which would overflow
+
+error: this operation will panic at runtime
+ --> $DIR/modulo_one.rs:22:5
+ |
+LL | INT_MIN % STATIC_NEG_ONE; // ONLY caught by rustc
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ attempt to compute the remainder of `i64::MIN % -1_i64`, which would overflow
+
+error: any number modulo 1 will be 0
+ --> $DIR/modulo_one.rs:8:5
+ |
+LL | 10 % 1;
+ | ^^^^^^
+ |
+ = note: `-D clippy::modulo-one` implied by `-D warnings`
+
+error: any number modulo -1 will panic/overflow or result in 0
+ --> $DIR/modulo_one.rs:9:5
+ |
+LL | 10 % -1;
+ | ^^^^^^^
+
+error: any number modulo -1 will panic/overflow or result in 0
+ --> $DIR/modulo_one.rs:11:5
+ |
+LL | i32::MIN % (-1); // also caught by rustc
+ | ^^^^^^^^^^^^^^^
+
+error: any number modulo 1 will be 0
+ --> $DIR/modulo_one.rs:17:5
+ |
+LL | 2 % ONE;
+ | ^^^^^^^
+
+error: any number modulo -1 will panic/overflow or result in 0
+ --> $DIR/modulo_one.rs:19:5
+ |
+LL | 2 % NEG_ONE;
+ | ^^^^^^^^^^^
+
+error: any number modulo -1 will panic/overflow or result in 0
+ --> $DIR/modulo_one.rs:21:5
+ |
+LL | INT_MIN % NEG_ONE; // also caught by rustc
+ | ^^^^^^^^^^^^^^^^^
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/must_use_candidates.fixed b/src/tools/clippy/tests/ui/must_use_candidates.fixed
new file mode 100644
index 000000000..04a74a009
--- /dev/null
+++ b/src/tools/clippy/tests/ui/must_use_candidates.fixed
@@ -0,0 +1,93 @@
+// run-rustfix
+#![feature(never_type)]
+#![allow(unused_mut, unused_tuple_struct_fields, clippy::redundant_allocation)]
+#![warn(clippy::must_use_candidate)]
+use std::rc::Rc;
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::sync::Arc;
+
+pub struct MyAtomic(AtomicBool);
+pub struct MyPure;
+
+#[must_use] pub fn pure(i: u8) -> u8 {
+ i
+}
+
+impl MyPure {
+ #[must_use] pub fn inherent_pure(&self) -> u8 {
+ 0
+ }
+}
+
+pub trait MyPureTrait {
+ fn trait_pure(&self, i: u32) -> u32 {
+ self.trait_impl_pure(i) + 1
+ }
+
+ fn trait_impl_pure(&self, i: u32) -> u32;
+}
+
+impl MyPureTrait for MyPure {
+ fn trait_impl_pure(&self, i: u32) -> u32 {
+ i
+ }
+}
+
+pub fn without_result() {
+ // OK
+}
+
+pub fn impure_primitive(i: &mut u8) -> u8 {
+ *i
+}
+
+pub fn with_callback<F: Fn(u32) -> bool>(f: &F) -> bool {
+ f(0)
+}
+
+#[must_use] pub fn with_marker(_d: std::marker::PhantomData<&mut u32>) -> bool {
+ true
+}
+
+pub fn quoth_the_raven(_more: !) -> u32 {
+ unimplemented!();
+}
+
+pub fn atomics(b: &AtomicBool) -> bool {
+ b.load(Ordering::SeqCst)
+}
+
+#[must_use] pub fn rcd(_x: Rc<u32>) -> bool {
+ true
+}
+
+pub fn rcmut(_x: Rc<&mut u32>) -> bool {
+ true
+}
+
+#[must_use] pub fn arcd(_x: Arc<u32>) -> bool {
+ false
+}
+
+pub fn inner_types(_m: &MyAtomic) -> bool {
+ true
+}
+
+static mut COUNTER: usize = 0;
+
+/// # Safety
+///
+/// Don't ever call this from multiple threads
+pub unsafe fn mutates_static() -> usize {
+ COUNTER += 1;
+ COUNTER
+}
+
+#[no_mangle]
+pub fn unmangled(i: bool) -> bool {
+ !i
+}
+
+fn main() {
+ assert_eq!(1, pure(1));
+}
diff --git a/src/tools/clippy/tests/ui/must_use_candidates.rs b/src/tools/clippy/tests/ui/must_use_candidates.rs
new file mode 100644
index 000000000..f04122f4e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/must_use_candidates.rs
@@ -0,0 +1,93 @@
+// run-rustfix
+#![feature(never_type)]
+#![allow(unused_mut, unused_tuple_struct_fields, clippy::redundant_allocation)]
+#![warn(clippy::must_use_candidate)]
+use std::rc::Rc;
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::sync::Arc;
+
+pub struct MyAtomic(AtomicBool);
+pub struct MyPure;
+
+pub fn pure(i: u8) -> u8 {
+ i
+}
+
+impl MyPure {
+ pub fn inherent_pure(&self) -> u8 {
+ 0
+ }
+}
+
+pub trait MyPureTrait {
+ fn trait_pure(&self, i: u32) -> u32 {
+ self.trait_impl_pure(i) + 1
+ }
+
+ fn trait_impl_pure(&self, i: u32) -> u32;
+}
+
+impl MyPureTrait for MyPure {
+ fn trait_impl_pure(&self, i: u32) -> u32 {
+ i
+ }
+}
+
+pub fn without_result() {
+ // OK
+}
+
+pub fn impure_primitive(i: &mut u8) -> u8 {
+ *i
+}
+
+pub fn with_callback<F: Fn(u32) -> bool>(f: &F) -> bool {
+ f(0)
+}
+
+pub fn with_marker(_d: std::marker::PhantomData<&mut u32>) -> bool {
+ true
+}
+
+pub fn quoth_the_raven(_more: !) -> u32 {
+ unimplemented!();
+}
+
+pub fn atomics(b: &AtomicBool) -> bool {
+ b.load(Ordering::SeqCst)
+}
+
+pub fn rcd(_x: Rc<u32>) -> bool {
+ true
+}
+
+pub fn rcmut(_x: Rc<&mut u32>) -> bool {
+ true
+}
+
+pub fn arcd(_x: Arc<u32>) -> bool {
+ false
+}
+
+pub fn inner_types(_m: &MyAtomic) -> bool {
+ true
+}
+
+static mut COUNTER: usize = 0;
+
+/// # Safety
+///
+/// Don't ever call this from multiple threads
+pub unsafe fn mutates_static() -> usize {
+ COUNTER += 1;
+ COUNTER
+}
+
+#[no_mangle]
+pub fn unmangled(i: bool) -> bool {
+ !i
+}
+
+fn main() {
+ assert_eq!(1, pure(1));
+}
diff --git a/src/tools/clippy/tests/ui/must_use_candidates.stderr b/src/tools/clippy/tests/ui/must_use_candidates.stderr
new file mode 100644
index 000000000..0fa3849d0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/must_use_candidates.stderr
@@ -0,0 +1,34 @@
+error: this function could have a `#[must_use]` attribute
+ --> $DIR/must_use_candidates.rs:12:1
+ |
+LL | pub fn pure(i: u8) -> u8 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn pure(i: u8) -> u8`
+ |
+ = note: `-D clippy::must-use-candidate` implied by `-D warnings`
+
+error: this method could have a `#[must_use]` attribute
+ --> $DIR/must_use_candidates.rs:17:5
+ |
+LL | pub fn inherent_pure(&self) -> u8 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn inherent_pure(&self) -> u8`
+
+error: this function could have a `#[must_use]` attribute
+ --> $DIR/must_use_candidates.rs:48:1
+ |
+LL | pub fn with_marker(_d: std::marker::PhantomData<&mut u32>) -> bool {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn with_marker(_d: std::marker::PhantomData<&mut u32>) -> bool`
+
+error: this function could have a `#[must_use]` attribute
+ --> $DIR/must_use_candidates.rs:60:1
+ |
+LL | pub fn rcd(_x: Rc<u32>) -> bool {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn rcd(_x: Rc<u32>) -> bool`
+
+error: this function could have a `#[must_use]` attribute
+ --> $DIR/must_use_candidates.rs:68:1
+ |
+LL | pub fn arcd(_x: Arc<u32>) -> bool {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn arcd(_x: Arc<u32>) -> bool`
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/must_use_unit.fixed b/src/tools/clippy/tests/ui/must_use_unit.fixed
new file mode 100644
index 000000000..6c9aa434a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/must_use_unit.fixed
@@ -0,0 +1,26 @@
+//run-rustfix
+// aux-build:macro_rules.rs
+
+#![warn(clippy::must_use_unit)]
+#![allow(clippy::unused_unit)]
+
+#[macro_use]
+extern crate macro_rules;
+
+
+pub fn must_use_default() {}
+
+
+pub fn must_use_unit() -> () {}
+
+
+pub fn must_use_with_note() {}
+
+fn main() {
+ must_use_default();
+ must_use_unit();
+ must_use_with_note();
+
+ // We should not lint in external macros
+ must_use_unit!();
+}
diff --git a/src/tools/clippy/tests/ui/must_use_unit.rs b/src/tools/clippy/tests/ui/must_use_unit.rs
new file mode 100644
index 000000000..8a395dc28
--- /dev/null
+++ b/src/tools/clippy/tests/ui/must_use_unit.rs
@@ -0,0 +1,26 @@
+//run-rustfix
+// aux-build:macro_rules.rs
+
+#![warn(clippy::must_use_unit)]
+#![allow(clippy::unused_unit)]
+
+#[macro_use]
+extern crate macro_rules;
+
+#[must_use]
+pub fn must_use_default() {}
+
+#[must_use]
+pub fn must_use_unit() -> () {}
+
+#[must_use = "With note"]
+pub fn must_use_with_note() {}
+
+fn main() {
+ must_use_default();
+ must_use_unit();
+ must_use_with_note();
+
+ // We should not lint in external macros
+ must_use_unit!();
+}
diff --git a/src/tools/clippy/tests/ui/must_use_unit.stderr b/src/tools/clippy/tests/ui/must_use_unit.stderr
new file mode 100644
index 000000000..15e0906b6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/must_use_unit.stderr
@@ -0,0 +1,28 @@
+error: this unit-returning function has a `#[must_use]` attribute
+ --> $DIR/must_use_unit.rs:11:1
+ |
+LL | #[must_use]
+ | ----------- help: remove the attribute
+LL | pub fn must_use_default() {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::must-use-unit` implied by `-D warnings`
+
+error: this unit-returning function has a `#[must_use]` attribute
+ --> $DIR/must_use_unit.rs:14:1
+ |
+LL | #[must_use]
+ | ----------- help: remove the attribute
+LL | pub fn must_use_unit() -> () {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this unit-returning function has a `#[must_use]` attribute
+ --> $DIR/must_use_unit.rs:17:1
+ |
+LL | #[must_use = "With note"]
+ | ------------------------- help: remove the attribute
+LL | pub fn must_use_with_note() {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/mut_from_ref.rs b/src/tools/clippy/tests/ui/mut_from_ref.rs
new file mode 100644
index 000000000..370dbd588
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mut_from_ref.rs
@@ -0,0 +1,54 @@
+#![allow(unused)]
+#![warn(clippy::mut_from_ref)]
+
+struct Foo;
+
+impl Foo {
+ fn this_wont_hurt_a_bit(&self) -> &mut Foo {
+ unsafe { unimplemented!() }
+ }
+}
+
+trait Ouch {
+ fn ouch(x: &Foo) -> &mut Foo;
+}
+
+impl Ouch for Foo {
+ fn ouch(x: &Foo) -> &mut Foo {
+ unsafe { unimplemented!() }
+ }
+}
+
+fn fail(x: &u32) -> &mut u16 {
+ unsafe { unimplemented!() }
+}
+
+fn fail_lifetime<'a>(x: &'a u32, y: &mut u32) -> &'a mut u32 {
+ unsafe { unimplemented!() }
+}
+
+fn fail_double<'a, 'b>(x: &'a u32, y: &'a u32, z: &'b mut u32) -> &'a mut u32 {
+ unsafe { unimplemented!() }
+}
+
+// this is OK, because the result borrows y
+fn works<'a>(x: &u32, y: &'a mut u32) -> &'a mut u32 {
+ unsafe { unimplemented!() }
+}
+
+// this is also OK, because the result could borrow y
+fn also_works<'a>(x: &'a u32, y: &'a mut u32) -> &'a mut u32 {
+ unsafe { unimplemented!() }
+}
+
+unsafe fn also_broken(x: &u32) -> &mut u32 {
+ unimplemented!()
+}
+
+fn without_unsafe(x: &u32) -> &mut u32 {
+ unimplemented!()
+}
+
+fn main() {
+ //TODO
+}
diff --git a/src/tools/clippy/tests/ui/mut_from_ref.stderr b/src/tools/clippy/tests/ui/mut_from_ref.stderr
new file mode 100644
index 000000000..b76d6a13f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mut_from_ref.stderr
@@ -0,0 +1,75 @@
+error: mutable borrow from immutable input(s)
+ --> $DIR/mut_from_ref.rs:7:39
+ |
+LL | fn this_wont_hurt_a_bit(&self) -> &mut Foo {
+ | ^^^^^^^^
+ |
+ = note: `-D clippy::mut-from-ref` implied by `-D warnings`
+note: immutable borrow here
+ --> $DIR/mut_from_ref.rs:7:29
+ |
+LL | fn this_wont_hurt_a_bit(&self) -> &mut Foo {
+ | ^^^^^
+
+error: mutable borrow from immutable input(s)
+ --> $DIR/mut_from_ref.rs:13:25
+ |
+LL | fn ouch(x: &Foo) -> &mut Foo;
+ | ^^^^^^^^
+ |
+note: immutable borrow here
+ --> $DIR/mut_from_ref.rs:13:16
+ |
+LL | fn ouch(x: &Foo) -> &mut Foo;
+ | ^^^^
+
+error: mutable borrow from immutable input(s)
+ --> $DIR/mut_from_ref.rs:22:21
+ |
+LL | fn fail(x: &u32) -> &mut u16 {
+ | ^^^^^^^^
+ |
+note: immutable borrow here
+ --> $DIR/mut_from_ref.rs:22:12
+ |
+LL | fn fail(x: &u32) -> &mut u16 {
+ | ^^^^
+
+error: mutable borrow from immutable input(s)
+ --> $DIR/mut_from_ref.rs:26:50
+ |
+LL | fn fail_lifetime<'a>(x: &'a u32, y: &mut u32) -> &'a mut u32 {
+ | ^^^^^^^^^^^
+ |
+note: immutable borrow here
+ --> $DIR/mut_from_ref.rs:26:25
+ |
+LL | fn fail_lifetime<'a>(x: &'a u32, y: &mut u32) -> &'a mut u32 {
+ | ^^^^^^^
+
+error: mutable borrow from immutable input(s)
+ --> $DIR/mut_from_ref.rs:30:67
+ |
+LL | fn fail_double<'a, 'b>(x: &'a u32, y: &'a u32, z: &'b mut u32) -> &'a mut u32 {
+ | ^^^^^^^^^^^
+ |
+note: immutable borrow here
+ --> $DIR/mut_from_ref.rs:30:27
+ |
+LL | fn fail_double<'a, 'b>(x: &'a u32, y: &'a u32, z: &'b mut u32) -> &'a mut u32 {
+ | ^^^^^^^ ^^^^^^^
+
+error: mutable borrow from immutable input(s)
+ --> $DIR/mut_from_ref.rs:44:35
+ |
+LL | unsafe fn also_broken(x: &u32) -> &mut u32 {
+ | ^^^^^^^^
+ |
+note: immutable borrow here
+ --> $DIR/mut_from_ref.rs:44:26
+ |
+LL | unsafe fn also_broken(x: &u32) -> &mut u32 {
+ | ^^^^
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/mut_key.rs b/src/tools/clippy/tests/ui/mut_key.rs
new file mode 100644
index 000000000..1c0ba6645
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mut_key.rs
@@ -0,0 +1,85 @@
+use std::cell::Cell;
+use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
+use std::hash::{Hash, Hasher};
+use std::rc::Rc;
+use std::sync::atomic::{AtomicUsize, Ordering::Relaxed};
+use std::sync::Arc;
+
+struct Key(AtomicUsize);
+
+impl Clone for Key {
+ fn clone(&self) -> Self {
+ Key(AtomicUsize::new(self.0.load(Relaxed)))
+ }
+}
+
+impl PartialEq for Key {
+ fn eq(&self, other: &Self) -> bool {
+ self.0.load(Relaxed) == other.0.load(Relaxed)
+ }
+}
+
+impl Eq for Key {}
+
+impl Hash for Key {
+ fn hash<H: Hasher>(&self, h: &mut H) {
+ self.0.load(Relaxed).hash(h);
+ }
+}
+
+fn should_not_take_this_arg(m: &mut HashMap<Key, usize>, _n: usize) -> HashSet<Key> {
+ let _other: HashMap<Key, bool> = HashMap::new();
+ m.keys().cloned().collect()
+}
+
+fn this_is_ok(_m: &mut HashMap<usize, Key>) {}
+
+// Raw pointers are hashed by the address they point to, so it doesn't matter if they point to a
+// type with interior mutability. See:
+// - clippy issue: https://github.com/rust-lang/rust-clippy/issues/6745
+// - std lib: https://github.com/rust-lang/rust/blob/1.54.0/library/core/src/hash/mod.rs#L717-L736
+// So these are OK:
+fn raw_ptr_is_ok(_m: &mut HashMap<*const Key, ()>) {}
+fn raw_mut_ptr_is_ok(_m: &mut HashMap<*mut Key, ()>) {}
+
+#[allow(unused)]
+trait Trait {
+ type AssociatedType;
+
+ fn trait_fn(&self, set: HashSet<Self::AssociatedType>);
+}
+
+fn generics_are_ok_too<K>(_m: &mut HashSet<K>) {
+ // nothing to see here, move along
+}
+
+fn tuples<U>(_m: &mut HashMap<((), U), ()>) {}
+
+fn tuples_bad<U>(_m: &mut HashMap<(Key, U), bool>) {}
+
+fn main() {
+ let _ = should_not_take_this_arg(&mut HashMap::new(), 1);
+ this_is_ok(&mut HashMap::new());
+ tuples::<Key>(&mut HashMap::new());
+ tuples::<()>(&mut HashMap::new());
+ tuples_bad::<()>(&mut HashMap::new());
+
+ raw_ptr_is_ok(&mut HashMap::new());
+ raw_mut_ptr_is_ok(&mut HashMap::new());
+
+ let _map = HashMap::<Cell<usize>, usize>::new();
+ let _map = HashMap::<&mut Cell<usize>, usize>::new();
+ let _map = HashMap::<&mut usize, usize>::new();
+ // Collection types from `std` who's impl of `Hash` or `Ord` delegate their type parameters
+ let _map = HashMap::<Vec<Cell<usize>>, usize>::new();
+ let _map = HashMap::<BTreeMap<Cell<usize>, ()>, usize>::new();
+ let _map = HashMap::<BTreeMap<(), Cell<usize>>, usize>::new();
+ let _map = HashMap::<BTreeSet<Cell<usize>>, usize>::new();
+ let _map = HashMap::<Option<Cell<usize>>, usize>::new();
+ let _map = HashMap::<Option<Vec<Cell<usize>>>, usize>::new();
+ let _map = HashMap::<Result<&mut usize, ()>, usize>::new();
+ // Smart pointers from `std` who's impl of `Hash` or `Ord` delegate their type parameters
+ let _map = HashMap::<Box<Cell<usize>>, usize>::new();
+ let _map = HashMap::<Rc<Cell<usize>>, usize>::new();
+ let _map = HashMap::<Arc<Cell<usize>>, usize>::new();
+}
diff --git a/src/tools/clippy/tests/ui/mut_key.stderr b/src/tools/clippy/tests/ui/mut_key.stderr
new file mode 100644
index 000000000..25dd029b1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mut_key.stderr
@@ -0,0 +1,106 @@
+error: mutable key type
+ --> $DIR/mut_key.rs:30:32
+ |
+LL | fn should_not_take_this_arg(m: &mut HashMap<Key, usize>, _n: usize) -> HashSet<Key> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::mutable-key-type` implied by `-D warnings`
+
+error: mutable key type
+ --> $DIR/mut_key.rs:30:72
+ |
+LL | fn should_not_take_this_arg(m: &mut HashMap<Key, usize>, _n: usize) -> HashSet<Key> {
+ | ^^^^^^^^^^^^
+
+error: mutable key type
+ --> $DIR/mut_key.rs:31:5
+ |
+LL | let _other: HashMap<Key, bool> = HashMap::new();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: mutable key type
+ --> $DIR/mut_key.rs:58:22
+ |
+LL | fn tuples_bad<U>(_m: &mut HashMap<(Key, U), bool>) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: mutable key type
+ --> $DIR/mut_key.rs:70:5
+ |
+LL | let _map = HashMap::<Cell<usize>, usize>::new();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: mutable key type
+ --> $DIR/mut_key.rs:71:5
+ |
+LL | let _map = HashMap::<&mut Cell<usize>, usize>::new();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: mutable key type
+ --> $DIR/mut_key.rs:72:5
+ |
+LL | let _map = HashMap::<&mut usize, usize>::new();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: mutable key type
+ --> $DIR/mut_key.rs:74:5
+ |
+LL | let _map = HashMap::<Vec<Cell<usize>>, usize>::new();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: mutable key type
+ --> $DIR/mut_key.rs:75:5
+ |
+LL | let _map = HashMap::<BTreeMap<Cell<usize>, ()>, usize>::new();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: mutable key type
+ --> $DIR/mut_key.rs:76:5
+ |
+LL | let _map = HashMap::<BTreeMap<(), Cell<usize>>, usize>::new();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: mutable key type
+ --> $DIR/mut_key.rs:77:5
+ |
+LL | let _map = HashMap::<BTreeSet<Cell<usize>>, usize>::new();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: mutable key type
+ --> $DIR/mut_key.rs:78:5
+ |
+LL | let _map = HashMap::<Option<Cell<usize>>, usize>::new();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: mutable key type
+ --> $DIR/mut_key.rs:79:5
+ |
+LL | let _map = HashMap::<Option<Vec<Cell<usize>>>, usize>::new();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: mutable key type
+ --> $DIR/mut_key.rs:80:5
+ |
+LL | let _map = HashMap::<Result<&mut usize, ()>, usize>::new();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: mutable key type
+ --> $DIR/mut_key.rs:82:5
+ |
+LL | let _map = HashMap::<Box<Cell<usize>>, usize>::new();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: mutable key type
+ --> $DIR/mut_key.rs:83:5
+ |
+LL | let _map = HashMap::<Rc<Cell<usize>>, usize>::new();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: mutable key type
+ --> $DIR/mut_key.rs:84:5
+ |
+LL | let _map = HashMap::<Arc<Cell<usize>>, usize>::new();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 17 previous errors
+
diff --git a/src/tools/clippy/tests/ui/mut_mut.rs b/src/tools/clippy/tests/ui/mut_mut.rs
new file mode 100644
index 000000000..be854d941
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mut_mut.rs
@@ -0,0 +1,59 @@
+// aux-build:macro_rules.rs
+
+#![allow(unused, clippy::no_effect, clippy::unnecessary_operation)]
+#![warn(clippy::mut_mut)]
+
+#[macro_use]
+extern crate macro_rules;
+
+fn fun(x: &mut &mut u32) -> bool {
+ **x > 0
+}
+
+fn less_fun(x: *mut *mut u32) {
+ let y = x;
+}
+
+macro_rules! mut_ptr {
+ ($p:expr) => {
+ &mut $p
+ };
+}
+
+#[allow(unused_mut, unused_variables)]
+fn main() {
+ let mut x = &mut &mut 1u32;
+ {
+ let mut y = &mut x;
+ }
+
+ if fun(x) {
+ let y: &mut &mut u32 = &mut &mut 2;
+ **y + **x;
+ }
+
+ if fun(x) {
+ let y: &mut &mut &mut u32 = &mut &mut &mut 2;
+ ***y + **x;
+ }
+
+ let mut z = mut_ptr!(&mut 3u32);
+}
+
+fn issue939() {
+ let array = [5, 6, 7, 8, 9];
+ let mut args = array.iter().skip(2);
+ for &arg in &mut args {
+ println!("{}", arg);
+ }
+
+ let args = &mut args;
+ for arg in args {
+ println!(":{}", arg);
+ }
+}
+
+fn issue6922() {
+ // do not lint from an external macro
+ mut_mut!();
+}
diff --git a/src/tools/clippy/tests/ui/mut_mut.stderr b/src/tools/clippy/tests/ui/mut_mut.stderr
new file mode 100644
index 000000000..6820a85aa
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mut_mut.stderr
@@ -0,0 +1,63 @@
+error: generally you want to avoid `&mut &mut _` if possible
+ --> $DIR/mut_mut.rs:9:11
+ |
+LL | fn fun(x: &mut &mut u32) -> bool {
+ | ^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::mut-mut` implied by `-D warnings`
+
+error: generally you want to avoid `&mut &mut _` if possible
+ --> $DIR/mut_mut.rs:25:17
+ |
+LL | let mut x = &mut &mut 1u32;
+ | ^^^^^^^^^^^^^^
+
+error: generally you want to avoid `&mut &mut _` if possible
+ --> $DIR/mut_mut.rs:19:9
+ |
+LL | &mut $p
+ | ^^^^^^^
+...
+LL | let mut z = mut_ptr!(&mut 3u32);
+ | ------------------- in this macro invocation
+ |
+ = note: this error originates in the macro `mut_ptr` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: this expression mutably borrows a mutable reference. Consider reborrowing
+ --> $DIR/mut_mut.rs:27:21
+ |
+LL | let mut y = &mut x;
+ | ^^^^^^
+
+error: generally you want to avoid `&mut &mut _` if possible
+ --> $DIR/mut_mut.rs:31:32
+ |
+LL | let y: &mut &mut u32 = &mut &mut 2;
+ | ^^^^^^^^^^^
+
+error: generally you want to avoid `&mut &mut _` if possible
+ --> $DIR/mut_mut.rs:31:16
+ |
+LL | let y: &mut &mut u32 = &mut &mut 2;
+ | ^^^^^^^^^^^^^
+
+error: generally you want to avoid `&mut &mut _` if possible
+ --> $DIR/mut_mut.rs:36:37
+ |
+LL | let y: &mut &mut &mut u32 = &mut &mut &mut 2;
+ | ^^^^^^^^^^^^^^^^
+
+error: generally you want to avoid `&mut &mut _` if possible
+ --> $DIR/mut_mut.rs:36:16
+ |
+LL | let y: &mut &mut &mut u32 = &mut &mut &mut 2;
+ | ^^^^^^^^^^^^^^^^^^
+
+error: generally you want to avoid `&mut &mut _` if possible
+ --> $DIR/mut_mut.rs:36:21
+ |
+LL | let y: &mut &mut &mut u32 = &mut &mut &mut 2;
+ | ^^^^^^^^^^^^^
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/mut_mutex_lock.fixed b/src/tools/clippy/tests/ui/mut_mutex_lock.fixed
new file mode 100644
index 000000000..36bc52e33
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mut_mutex_lock.fixed
@@ -0,0 +1,21 @@
+// run-rustfix
+#![allow(dead_code, unused_mut)]
+#![warn(clippy::mut_mutex_lock)]
+
+use std::sync::{Arc, Mutex};
+
+fn mut_mutex_lock() {
+ let mut value_rc = Arc::new(Mutex::new(42_u8));
+ let value_mutex = Arc::get_mut(&mut value_rc).unwrap();
+
+ let mut value = value_mutex.get_mut().unwrap();
+ *value += 1;
+}
+
+fn no_owned_mutex_lock() {
+ let mut value_rc = Arc::new(Mutex::new(42_u8));
+ let mut value = value_rc.lock().unwrap();
+ *value += 1;
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/mut_mutex_lock.rs b/src/tools/clippy/tests/ui/mut_mutex_lock.rs
new file mode 100644
index 000000000..ea60df5ae
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mut_mutex_lock.rs
@@ -0,0 +1,21 @@
+// run-rustfix
+#![allow(dead_code, unused_mut)]
+#![warn(clippy::mut_mutex_lock)]
+
+use std::sync::{Arc, Mutex};
+
+fn mut_mutex_lock() {
+ let mut value_rc = Arc::new(Mutex::new(42_u8));
+ let value_mutex = Arc::get_mut(&mut value_rc).unwrap();
+
+ let mut value = value_mutex.lock().unwrap();
+ *value += 1;
+}
+
+fn no_owned_mutex_lock() {
+ let mut value_rc = Arc::new(Mutex::new(42_u8));
+ let mut value = value_rc.lock().unwrap();
+ *value += 1;
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/mut_mutex_lock.stderr b/src/tools/clippy/tests/ui/mut_mutex_lock.stderr
new file mode 100644
index 000000000..21c1b3486
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mut_mutex_lock.stderr
@@ -0,0 +1,10 @@
+error: calling `&mut Mutex::lock` unnecessarily locks an exclusive (mutable) reference
+ --> $DIR/mut_mutex_lock.rs:11:33
+ |
+LL | let mut value = value_mutex.lock().unwrap();
+ | ^^^^ help: change this to: `get_mut`
+ |
+ = note: `-D clippy::mut-mutex-lock` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/mut_range_bound.rs b/src/tools/clippy/tests/ui/mut_range_bound.rs
new file mode 100644
index 000000000..e1ae1ef92
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mut_range_bound.rs
@@ -0,0 +1,84 @@
+#![allow(unused)]
+
+fn main() {}
+
+fn mut_range_bound_upper() {
+ let mut m = 4;
+ for i in 0..m {
+ m = 5;
+ } // warning
+}
+
+fn mut_range_bound_lower() {
+ let mut m = 4;
+ for i in m..10 {
+ m *= 2;
+ } // warning
+}
+
+fn mut_range_bound_both() {
+ let mut m = 4;
+ let mut n = 6;
+ for i in m..n {
+ m = 5;
+ n = 7;
+ } // warning (1 for each mutated bound)
+}
+
+fn mut_range_bound_no_mutation() {
+ let mut m = 4;
+ for i in 0..m {
+ continue;
+ } // no warning
+}
+
+fn mut_borrow_range_bound() {
+ let mut m = 4;
+ for i in 0..m {
+ let n = &mut m; // warning
+ *n += 1;
+ }
+}
+
+fn immut_borrow_range_bound() {
+ let mut m = 4;
+ for i in 0..m {
+ let n = &m; // should be no warning?
+ }
+}
+
+fn immut_range_bound() {
+ let m = 4;
+ for i in 0..m {
+ continue;
+ } // no warning
+}
+
+fn mut_range_bound_break() {
+ let mut m = 4;
+ for i in 0..m {
+ if m == 4 {
+ m = 5; // no warning because of immediate break
+ break;
+ }
+ }
+}
+
+fn mut_range_bound_no_immediate_break() {
+ let mut m = 4;
+ for i in 0..m {
+ m = 2; // warning because it is not immediately followed by break
+ if m == 4 {
+ break;
+ }
+ }
+
+ let mut n = 3;
+ for i in n..10 {
+ if n == 4 {
+ n = 1; // FIXME: warning because is is not immediately followed by break
+ let _ = 2;
+ break;
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/mut_range_bound.stderr b/src/tools/clippy/tests/ui/mut_range_bound.stderr
new file mode 100644
index 000000000..4b5a3fc1e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mut_range_bound.stderr
@@ -0,0 +1,59 @@
+error: attempt to mutate range bound within loop
+ --> $DIR/mut_range_bound.rs:8:9
+ |
+LL | m = 5;
+ | ^
+ |
+ = note: `-D clippy::mut-range-bound` implied by `-D warnings`
+ = note: the range of the loop is unchanged
+
+error: attempt to mutate range bound within loop
+ --> $DIR/mut_range_bound.rs:15:9
+ |
+LL | m *= 2;
+ | ^
+ |
+ = note: the range of the loop is unchanged
+
+error: attempt to mutate range bound within loop
+ --> $DIR/mut_range_bound.rs:23:9
+ |
+LL | m = 5;
+ | ^
+ |
+ = note: the range of the loop is unchanged
+
+error: attempt to mutate range bound within loop
+ --> $DIR/mut_range_bound.rs:24:9
+ |
+LL | n = 7;
+ | ^
+ |
+ = note: the range of the loop is unchanged
+
+error: attempt to mutate range bound within loop
+ --> $DIR/mut_range_bound.rs:38:22
+ |
+LL | let n = &mut m; // warning
+ | ^
+ |
+ = note: the range of the loop is unchanged
+
+error: attempt to mutate range bound within loop
+ --> $DIR/mut_range_bound.rs:70:9
+ |
+LL | m = 2; // warning because it is not immediately followed by break
+ | ^
+ |
+ = note: the range of the loop is unchanged
+
+error: attempt to mutate range bound within loop
+ --> $DIR/mut_range_bound.rs:79:13
+ |
+LL | n = 1; // FIXME: warning because is is not immediately followed by break
+ | ^
+ |
+ = note: the range of the loop is unchanged
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/mut_reference.rs b/src/tools/clippy/tests/ui/mut_reference.rs
new file mode 100644
index 000000000..73906121c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mut_reference.rs
@@ -0,0 +1,43 @@
+#![allow(unused_variables)]
+
+fn takes_an_immutable_reference(a: &i32) {}
+fn takes_a_mutable_reference(a: &mut i32) {}
+
+struct MyStruct;
+
+impl MyStruct {
+ fn takes_an_immutable_reference(&self, a: &i32) {}
+
+ fn takes_a_mutable_reference(&self, a: &mut i32) {}
+}
+
+#[warn(clippy::unnecessary_mut_passed)]
+fn main() {
+ // Functions
+ takes_an_immutable_reference(&mut 42);
+ let as_ptr: fn(&i32) = takes_an_immutable_reference;
+ as_ptr(&mut 42);
+
+ // Methods
+ let my_struct = MyStruct;
+ my_struct.takes_an_immutable_reference(&mut 42);
+
+ // No error
+
+ // Functions
+ takes_an_immutable_reference(&42);
+ let as_ptr: fn(&i32) = takes_an_immutable_reference;
+ as_ptr(&42);
+
+ takes_a_mutable_reference(&mut 42);
+ let as_ptr: fn(&mut i32) = takes_a_mutable_reference;
+ as_ptr(&mut 42);
+
+ let a = &mut 42;
+ takes_an_immutable_reference(a);
+
+ // Methods
+ my_struct.takes_an_immutable_reference(&42);
+ my_struct.takes_a_mutable_reference(&mut 42);
+ my_struct.takes_an_immutable_reference(a);
+}
diff --git a/src/tools/clippy/tests/ui/mut_reference.stderr b/src/tools/clippy/tests/ui/mut_reference.stderr
new file mode 100644
index 000000000..062d30b26
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mut_reference.stderr
@@ -0,0 +1,22 @@
+error: the function `takes_an_immutable_reference` doesn't need a mutable reference
+ --> $DIR/mut_reference.rs:17:34
+ |
+LL | takes_an_immutable_reference(&mut 42);
+ | ^^^^^^^
+ |
+ = note: `-D clippy::unnecessary-mut-passed` implied by `-D warnings`
+
+error: the function `as_ptr` doesn't need a mutable reference
+ --> $DIR/mut_reference.rs:19:12
+ |
+LL | as_ptr(&mut 42);
+ | ^^^^^^^
+
+error: the method `takes_an_immutable_reference` doesn't need a mutable reference
+ --> $DIR/mut_reference.rs:23:44
+ |
+LL | my_struct.takes_an_immutable_reference(&mut 42);
+ | ^^^^^^^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/mutex_atomic.rs b/src/tools/clippy/tests/ui/mutex_atomic.rs
new file mode 100644
index 000000000..47b3dad39
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mutex_atomic.rs
@@ -0,0 +1,17 @@
+#![warn(clippy::all)]
+#![warn(clippy::mutex_integer)]
+#![warn(clippy::mutex_atomic)]
+#![allow(clippy::borrow_as_ptr)]
+
+fn main() {
+ use std::sync::Mutex;
+ Mutex::new(true);
+ Mutex::new(5usize);
+ Mutex::new(9isize);
+ let mut x = 4u32;
+ Mutex::new(&x as *const u32);
+ Mutex::new(&mut x as *mut u32);
+ Mutex::new(0u32);
+ Mutex::new(0i32);
+ Mutex::new(0f32); // there are no float atomics, so this should not lint
+}
diff --git a/src/tools/clippy/tests/ui/mutex_atomic.stderr b/src/tools/clippy/tests/ui/mutex_atomic.stderr
new file mode 100644
index 000000000..262028a87
--- /dev/null
+++ b/src/tools/clippy/tests/ui/mutex_atomic.stderr
@@ -0,0 +1,48 @@
+error: consider using an `AtomicBool` instead of a `Mutex` here; if you just want the locking behavior and not the internal type, consider using `Mutex<()>`
+ --> $DIR/mutex_atomic.rs:8:5
+ |
+LL | Mutex::new(true);
+ | ^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::mutex-atomic` implied by `-D warnings`
+
+error: consider using an `AtomicUsize` instead of a `Mutex` here; if you just want the locking behavior and not the internal type, consider using `Mutex<()>`
+ --> $DIR/mutex_atomic.rs:9:5
+ |
+LL | Mutex::new(5usize);
+ | ^^^^^^^^^^^^^^^^^^
+
+error: consider using an `AtomicIsize` instead of a `Mutex` here; if you just want the locking behavior and not the internal type, consider using `Mutex<()>`
+ --> $DIR/mutex_atomic.rs:10:5
+ |
+LL | Mutex::new(9isize);
+ | ^^^^^^^^^^^^^^^^^^
+
+error: consider using an `AtomicPtr` instead of a `Mutex` here; if you just want the locking behavior and not the internal type, consider using `Mutex<()>`
+ --> $DIR/mutex_atomic.rs:12:5
+ |
+LL | Mutex::new(&x as *const u32);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: consider using an `AtomicPtr` instead of a `Mutex` here; if you just want the locking behavior and not the internal type, consider using `Mutex<()>`
+ --> $DIR/mutex_atomic.rs:13:5
+ |
+LL | Mutex::new(&mut x as *mut u32);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: consider using an `AtomicUsize` instead of a `Mutex` here; if you just want the locking behavior and not the internal type, consider using `Mutex<()>`
+ --> $DIR/mutex_atomic.rs:14:5
+ |
+LL | Mutex::new(0u32);
+ | ^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::mutex-integer` implied by `-D warnings`
+
+error: consider using an `AtomicIsize` instead of a `Mutex` here; if you just want the locking behavior and not the internal type, consider using `Mutex<()>`
+ --> $DIR/mutex_atomic.rs:15:5
+ |
+LL | Mutex::new(0i32);
+ | ^^^^^^^^^^^^^^^^
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_arbitrary_self_type.fixed b/src/tools/clippy/tests/ui/needless_arbitrary_self_type.fixed
new file mode 100644
index 000000000..9da21eb6b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_arbitrary_self_type.fixed
@@ -0,0 +1,69 @@
+// run-rustfix
+
+#![warn(clippy::needless_arbitrary_self_type)]
+#![allow(unused_mut, clippy::needless_lifetimes)]
+
+pub enum ValType {
+ A,
+ B,
+}
+
+impl ValType {
+ pub fn bad(self) {
+ unimplemented!();
+ }
+
+ pub fn good(self) {
+ unimplemented!();
+ }
+
+ pub fn mut_bad(mut self) {
+ unimplemented!();
+ }
+
+ pub fn mut_good(mut self) {
+ unimplemented!();
+ }
+
+ pub fn ref_bad(&self) {
+ unimplemented!();
+ }
+
+ pub fn ref_good(&self) {
+ unimplemented!();
+ }
+
+ pub fn ref_bad_with_lifetime<'a>(&'a self) {
+ unimplemented!();
+ }
+
+ pub fn ref_good_with_lifetime<'a>(&'a self) {
+ unimplemented!();
+ }
+
+ pub fn mut_ref_bad(&mut self) {
+ unimplemented!();
+ }
+
+ pub fn mut_ref_good(&mut self) {
+ unimplemented!();
+ }
+
+ pub fn mut_ref_bad_with_lifetime<'a>(&'a mut self) {
+ unimplemented!();
+ }
+
+ pub fn mut_ref_good_with_lifetime<'a>(&'a mut self) {
+ unimplemented!();
+ }
+
+ pub fn mut_ref_mut_good(mut self: &mut Self) {
+ unimplemented!();
+ }
+
+ pub fn mut_ref_mut_ref_good(self: &&mut &mut Self) {
+ unimplemented!();
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/needless_arbitrary_self_type.rs b/src/tools/clippy/tests/ui/needless_arbitrary_self_type.rs
new file mode 100644
index 000000000..17aeaaf97
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_arbitrary_self_type.rs
@@ -0,0 +1,69 @@
+// run-rustfix
+
+#![warn(clippy::needless_arbitrary_self_type)]
+#![allow(unused_mut, clippy::needless_lifetimes)]
+
+pub enum ValType {
+ A,
+ B,
+}
+
+impl ValType {
+ pub fn bad(self: Self) {
+ unimplemented!();
+ }
+
+ pub fn good(self) {
+ unimplemented!();
+ }
+
+ pub fn mut_bad(mut self: Self) {
+ unimplemented!();
+ }
+
+ pub fn mut_good(mut self) {
+ unimplemented!();
+ }
+
+ pub fn ref_bad(self: &Self) {
+ unimplemented!();
+ }
+
+ pub fn ref_good(&self) {
+ unimplemented!();
+ }
+
+ pub fn ref_bad_with_lifetime<'a>(self: &'a Self) {
+ unimplemented!();
+ }
+
+ pub fn ref_good_with_lifetime<'a>(&'a self) {
+ unimplemented!();
+ }
+
+ pub fn mut_ref_bad(self: &mut Self) {
+ unimplemented!();
+ }
+
+ pub fn mut_ref_good(&mut self) {
+ unimplemented!();
+ }
+
+ pub fn mut_ref_bad_with_lifetime<'a>(self: &'a mut Self) {
+ unimplemented!();
+ }
+
+ pub fn mut_ref_good_with_lifetime<'a>(&'a mut self) {
+ unimplemented!();
+ }
+
+ pub fn mut_ref_mut_good(mut self: &mut Self) {
+ unimplemented!();
+ }
+
+ pub fn mut_ref_mut_ref_good(self: &&mut &mut Self) {
+ unimplemented!();
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/needless_arbitrary_self_type.stderr b/src/tools/clippy/tests/ui/needless_arbitrary_self_type.stderr
new file mode 100644
index 000000000..f4c645d35
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_arbitrary_self_type.stderr
@@ -0,0 +1,40 @@
+error: the type of the `self` parameter does not need to be arbitrary
+ --> $DIR/needless_arbitrary_self_type.rs:12:16
+ |
+LL | pub fn bad(self: Self) {
+ | ^^^^^^^^^^ help: consider to change this parameter to: `self`
+ |
+ = note: `-D clippy::needless-arbitrary-self-type` implied by `-D warnings`
+
+error: the type of the `self` parameter does not need to be arbitrary
+ --> $DIR/needless_arbitrary_self_type.rs:20:20
+ |
+LL | pub fn mut_bad(mut self: Self) {
+ | ^^^^^^^^^^^^^^ help: consider to change this parameter to: `mut self`
+
+error: the type of the `self` parameter does not need to be arbitrary
+ --> $DIR/needless_arbitrary_self_type.rs:28:20
+ |
+LL | pub fn ref_bad(self: &Self) {
+ | ^^^^^^^^^^^ help: consider to change this parameter to: `&self`
+
+error: the type of the `self` parameter does not need to be arbitrary
+ --> $DIR/needless_arbitrary_self_type.rs:36:38
+ |
+LL | pub fn ref_bad_with_lifetime<'a>(self: &'a Self) {
+ | ^^^^^^^^^^^^^^ help: consider to change this parameter to: `&'a self`
+
+error: the type of the `self` parameter does not need to be arbitrary
+ --> $DIR/needless_arbitrary_self_type.rs:44:24
+ |
+LL | pub fn mut_ref_bad(self: &mut Self) {
+ | ^^^^^^^^^^^^^^^ help: consider to change this parameter to: `&mut self`
+
+error: the type of the `self` parameter does not need to be arbitrary
+ --> $DIR/needless_arbitrary_self_type.rs:52:42
+ |
+LL | pub fn mut_ref_bad_with_lifetime<'a>(self: &'a mut Self) {
+ | ^^^^^^^^^^^^^^^^^^ help: consider to change this parameter to: `&'a mut self`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_arbitrary_self_type_unfixable.rs b/src/tools/clippy/tests/ui/needless_arbitrary_self_type_unfixable.rs
new file mode 100644
index 000000000..02b43cce2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_arbitrary_self_type_unfixable.rs
@@ -0,0 +1,46 @@
+// aux-build:proc_macro_attr.rs
+
+#![warn(clippy::needless_arbitrary_self_type)]
+
+#[macro_use]
+extern crate proc_macro_attr;
+
+mod issue_6089 {
+ // Check that we don't lint if the `self` parameter comes from expansion
+
+ macro_rules! test_from_expansion {
+ () => {
+ trait T1 {
+ fn test(self: &Self);
+ }
+
+ struct S1;
+
+ impl T1 for S1 {
+ fn test(self: &Self) {}
+ }
+ };
+ }
+
+ test_from_expansion!();
+
+ // If only the lifetime name comes from expansion we will lint, but the suggestion will have
+ // placeholders and will not be applied automatically, as we can't reliably know the original name.
+ // This specific case happened with async_trait.
+
+ trait T2 {
+ fn call_with_mut_self(&mut self);
+ }
+
+ struct S2;
+
+ // The method's signature will be expanded to:
+ // fn call_with_mut_self<'life0>(self: &'life0 mut Self) {}
+ #[rename_my_lifetimes]
+ impl T2 for S2 {
+ #[allow(clippy::needless_lifetimes)]
+ fn call_with_mut_self(self: &mut Self) {}
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/needless_arbitrary_self_type_unfixable.stderr b/src/tools/clippy/tests/ui/needless_arbitrary_self_type_unfixable.stderr
new file mode 100644
index 000000000..b2edbfe43
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_arbitrary_self_type_unfixable.stderr
@@ -0,0 +1,10 @@
+error: the type of the `self` parameter does not need to be arbitrary
+ --> $DIR/needless_arbitrary_self_type_unfixable.rs:42:31
+ |
+LL | fn call_with_mut_self(self: &mut Self) {}
+ | ^^^^^^^^^^^^^^^ help: consider to change this parameter to: `&'_ mut self`
+ |
+ = note: `-D clippy::needless-arbitrary-self-type` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/needless_bitwise_bool.fixed b/src/tools/clippy/tests/ui/needless_bitwise_bool.fixed
new file mode 100644
index 000000000..5e1ea663a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_bitwise_bool.fixed
@@ -0,0 +1,40 @@
+// run-rustfix
+
+#![warn(clippy::needless_bitwise_bool)]
+
+fn returns_bool() -> bool {
+ true
+}
+
+const fn const_returns_bool() -> bool {
+ false
+}
+
+fn main() {
+ let (x, y) = (false, true);
+ if x & y {
+ println!("true")
+ }
+ if returns_bool() & x {
+ println!("true")
+ }
+ if !returns_bool() & returns_bool() {
+ println!("true")
+ }
+ if y && !x {
+ println!("true")
+ }
+
+ // BELOW: lints we hope to catch as `Expr::can_have_side_effects` improves.
+ if y & !const_returns_bool() {
+ println!("true") // This is a const function, in an UnOp
+ }
+
+ if y & "abcD".is_empty() {
+ println!("true") // This is a const method call
+ }
+
+ if y & (0 < 1) {
+ println!("true") // This is a BinOp with no side effects
+ }
+}
diff --git a/src/tools/clippy/tests/ui/needless_bitwise_bool.rs b/src/tools/clippy/tests/ui/needless_bitwise_bool.rs
new file mode 100644
index 000000000..f3075fba0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_bitwise_bool.rs
@@ -0,0 +1,40 @@
+// run-rustfix
+
+#![warn(clippy::needless_bitwise_bool)]
+
+fn returns_bool() -> bool {
+ true
+}
+
+const fn const_returns_bool() -> bool {
+ false
+}
+
+fn main() {
+ let (x, y) = (false, true);
+ if x & y {
+ println!("true")
+ }
+ if returns_bool() & x {
+ println!("true")
+ }
+ if !returns_bool() & returns_bool() {
+ println!("true")
+ }
+ if y & !x {
+ println!("true")
+ }
+
+ // BELOW: lints we hope to catch as `Expr::can_have_side_effects` improves.
+ if y & !const_returns_bool() {
+ println!("true") // This is a const function, in an UnOp
+ }
+
+ if y & "abcD".is_empty() {
+ println!("true") // This is a const method call
+ }
+
+ if y & (0 < 1) {
+ println!("true") // This is a BinOp with no side effects
+ }
+}
diff --git a/src/tools/clippy/tests/ui/needless_bitwise_bool.stderr b/src/tools/clippy/tests/ui/needless_bitwise_bool.stderr
new file mode 100644
index 000000000..63c88ef63
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_bitwise_bool.stderr
@@ -0,0 +1,10 @@
+error: use of bitwise operator instead of lazy operator between booleans
+ --> $DIR/needless_bitwise_bool.rs:24:8
+ |
+LL | if y & !x {
+ | ^^^^^^ help: try: `y && !x`
+ |
+ = note: `-D clippy::needless-bitwise-bool` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/needless_bool/fixable.fixed b/src/tools/clippy/tests/ui/needless_bool/fixable.fixed
new file mode 100644
index 000000000..89dc13fd5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_bool/fixable.fixed
@@ -0,0 +1,126 @@
+// run-rustfix
+
+#![warn(clippy::needless_bool)]
+#![allow(
+ unused,
+ dead_code,
+ clippy::no_effect,
+ clippy::if_same_then_else,
+ clippy::equatable_if_let,
+ clippy::needless_return,
+ clippy::self_named_constructors
+)]
+
+use std::cell::Cell;
+
+macro_rules! bool_comparison_trigger {
+ ($($i:ident: $def:expr, $stb:expr );+ $(;)*) => (
+
+ #[derive(Clone)]
+ pub struct Trigger {
+ $($i: (Cell<bool>, bool, bool)),+
+ }
+
+ #[allow(dead_code)]
+ impl Trigger {
+ pub fn trigger(&self, key: &str) -> bool {
+ $(
+ if let stringify!($i) = key {
+ return self.$i.1 && self.$i.2 == $def;
+ }
+ )+
+ false
+ }
+ }
+ )
+}
+
+fn main() {
+ let x = true;
+ let y = false;
+ x;
+ !x;
+ !(x && y);
+ let a = 0;
+ let b = 1;
+
+ a != b;
+ a == b;
+ a >= b;
+ a > b;
+ a <= b;
+ a < b;
+ if x {
+ x
+ } else {
+ false
+ }; // would also be questionable, but we don't catch this yet
+ bool_ret3(x);
+ bool_ret4(x);
+ bool_ret5(x, x);
+ bool_ret6(x, x);
+ needless_bool(x);
+ needless_bool2(x);
+ needless_bool3(x);
+ needless_bool_condition();
+}
+
+fn bool_ret3(x: bool) -> bool {
+ return x;
+}
+
+fn bool_ret4(x: bool) -> bool {
+ return !x;
+}
+
+fn bool_ret5(x: bool, y: bool) -> bool {
+ return x && y;
+}
+
+fn bool_ret6(x: bool, y: bool) -> bool {
+ return !(x && y);
+}
+
+fn needless_bool(x: bool) {
+ if x {};
+}
+
+fn needless_bool2(x: bool) {
+ if !x {};
+}
+
+fn needless_bool3(x: bool) {
+ bool_comparison_trigger! {
+ test_one: false, false;
+ test_three: false, false;
+ test_two: true, true;
+ }
+
+ if x {};
+ if !x {};
+}
+
+fn needless_bool_in_the_suggestion_wraps_the_predicate_of_if_else_statement_in_brackets() {
+ let b = false;
+ let returns_bool = || false;
+
+ let x = if b {
+ true
+ } else { !returns_bool() };
+}
+
+unsafe fn no(v: u8) -> u8 {
+ v
+}
+
+#[allow(clippy::unnecessary_operation)]
+fn needless_bool_condition() -> bool {
+ (unsafe { no(4) } & 1 != 0);
+ let _brackets_unneeded = unsafe { no(4) } & 1 != 0;
+ fn foo() -> bool {
+ // parentheses are needed here
+ (unsafe { no(4) } & 1 != 0)
+ }
+
+ foo()
+}
diff --git a/src/tools/clippy/tests/ui/needless_bool/fixable.rs b/src/tools/clippy/tests/ui/needless_bool/fixable.rs
new file mode 100644
index 000000000..c11d9472e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_bool/fixable.rs
@@ -0,0 +1,186 @@
+// run-rustfix
+
+#![warn(clippy::needless_bool)]
+#![allow(
+ unused,
+ dead_code,
+ clippy::no_effect,
+ clippy::if_same_then_else,
+ clippy::equatable_if_let,
+ clippy::needless_return,
+ clippy::self_named_constructors
+)]
+
+use std::cell::Cell;
+
+macro_rules! bool_comparison_trigger {
+ ($($i:ident: $def:expr, $stb:expr );+ $(;)*) => (
+
+ #[derive(Clone)]
+ pub struct Trigger {
+ $($i: (Cell<bool>, bool, bool)),+
+ }
+
+ #[allow(dead_code)]
+ impl Trigger {
+ pub fn trigger(&self, key: &str) -> bool {
+ $(
+ if let stringify!($i) = key {
+ return self.$i.1 && self.$i.2 == $def;
+ }
+ )+
+ false
+ }
+ }
+ )
+}
+
+fn main() {
+ let x = true;
+ let y = false;
+ if x {
+ true
+ } else {
+ false
+ };
+ if x {
+ false
+ } else {
+ true
+ };
+ if x && y {
+ false
+ } else {
+ true
+ };
+ let a = 0;
+ let b = 1;
+
+ if a == b {
+ false
+ } else {
+ true
+ };
+ if a != b {
+ false
+ } else {
+ true
+ };
+ if a < b {
+ false
+ } else {
+ true
+ };
+ if a <= b {
+ false
+ } else {
+ true
+ };
+ if a > b {
+ false
+ } else {
+ true
+ };
+ if a >= b {
+ false
+ } else {
+ true
+ };
+ if x {
+ x
+ } else {
+ false
+ }; // would also be questionable, but we don't catch this yet
+ bool_ret3(x);
+ bool_ret4(x);
+ bool_ret5(x, x);
+ bool_ret6(x, x);
+ needless_bool(x);
+ needless_bool2(x);
+ needless_bool3(x);
+ needless_bool_condition();
+}
+
+fn bool_ret3(x: bool) -> bool {
+ if x {
+ return true;
+ } else {
+ return false;
+ };
+}
+
+fn bool_ret4(x: bool) -> bool {
+ if x {
+ return false;
+ } else {
+ return true;
+ };
+}
+
+fn bool_ret5(x: bool, y: bool) -> bool {
+ if x && y {
+ return true;
+ } else {
+ return false;
+ };
+}
+
+fn bool_ret6(x: bool, y: bool) -> bool {
+ if x && y {
+ return false;
+ } else {
+ return true;
+ };
+}
+
+fn needless_bool(x: bool) {
+ if x == true {};
+}
+
+fn needless_bool2(x: bool) {
+ if x == false {};
+}
+
+fn needless_bool3(x: bool) {
+ bool_comparison_trigger! {
+ test_one: false, false;
+ test_three: false, false;
+ test_two: true, true;
+ }
+
+ if x == true {};
+ if x == false {};
+}
+
+fn needless_bool_in_the_suggestion_wraps_the_predicate_of_if_else_statement_in_brackets() {
+ let b = false;
+ let returns_bool = || false;
+
+ let x = if b {
+ true
+ } else if returns_bool() {
+ false
+ } else {
+ true
+ };
+}
+
+unsafe fn no(v: u8) -> u8 {
+ v
+}
+
+#[allow(clippy::unnecessary_operation)]
+fn needless_bool_condition() -> bool {
+ if unsafe { no(4) } & 1 != 0 {
+ true
+ } else {
+ false
+ };
+ let _brackets_unneeded = if unsafe { no(4) } & 1 != 0 { true } else { false };
+ fn foo() -> bool {
+ // parentheses are needed here
+ if unsafe { no(4) } & 1 != 0 { true } else { false }
+ }
+
+ foo()
+}
diff --git a/src/tools/clippy/tests/ui/needless_bool/fixable.stderr b/src/tools/clippy/tests/ui/needless_bool/fixable.stderr
new file mode 100644
index 000000000..d2c48376f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_bool/fixable.stderr
@@ -0,0 +1,193 @@
+error: this if-then-else expression returns a bool literal
+ --> $DIR/fixable.rs:41:5
+ |
+LL | / if x {
+LL | | true
+LL | | } else {
+LL | | false
+LL | | };
+ | |_____^ help: you can reduce it to: `x`
+ |
+ = note: `-D clippy::needless-bool` implied by `-D warnings`
+
+error: this if-then-else expression returns a bool literal
+ --> $DIR/fixable.rs:46:5
+ |
+LL | / if x {
+LL | | false
+LL | | } else {
+LL | | true
+LL | | };
+ | |_____^ help: you can reduce it to: `!x`
+
+error: this if-then-else expression returns a bool literal
+ --> $DIR/fixable.rs:51:5
+ |
+LL | / if x && y {
+LL | | false
+LL | | } else {
+LL | | true
+LL | | };
+ | |_____^ help: you can reduce it to: `!(x && y)`
+
+error: this if-then-else expression returns a bool literal
+ --> $DIR/fixable.rs:59:5
+ |
+LL | / if a == b {
+LL | | false
+LL | | } else {
+LL | | true
+LL | | };
+ | |_____^ help: you can reduce it to: `a != b`
+
+error: this if-then-else expression returns a bool literal
+ --> $DIR/fixable.rs:64:5
+ |
+LL | / if a != b {
+LL | | false
+LL | | } else {
+LL | | true
+LL | | };
+ | |_____^ help: you can reduce it to: `a == b`
+
+error: this if-then-else expression returns a bool literal
+ --> $DIR/fixable.rs:69:5
+ |
+LL | / if a < b {
+LL | | false
+LL | | } else {
+LL | | true
+LL | | };
+ | |_____^ help: you can reduce it to: `a >= b`
+
+error: this if-then-else expression returns a bool literal
+ --> $DIR/fixable.rs:74:5
+ |
+LL | / if a <= b {
+LL | | false
+LL | | } else {
+LL | | true
+LL | | };
+ | |_____^ help: you can reduce it to: `a > b`
+
+error: this if-then-else expression returns a bool literal
+ --> $DIR/fixable.rs:79:5
+ |
+LL | / if a > b {
+LL | | false
+LL | | } else {
+LL | | true
+LL | | };
+ | |_____^ help: you can reduce it to: `a <= b`
+
+error: this if-then-else expression returns a bool literal
+ --> $DIR/fixable.rs:84:5
+ |
+LL | / if a >= b {
+LL | | false
+LL | | } else {
+LL | | true
+LL | | };
+ | |_____^ help: you can reduce it to: `a < b`
+
+error: this if-then-else expression returns a bool literal
+ --> $DIR/fixable.rs:105:5
+ |
+LL | / if x {
+LL | | return true;
+LL | | } else {
+LL | | return false;
+LL | | };
+ | |_____^ help: you can reduce it to: `return x`
+
+error: this if-then-else expression returns a bool literal
+ --> $DIR/fixable.rs:113:5
+ |
+LL | / if x {
+LL | | return false;
+LL | | } else {
+LL | | return true;
+LL | | };
+ | |_____^ help: you can reduce it to: `return !x`
+
+error: this if-then-else expression returns a bool literal
+ --> $DIR/fixable.rs:121:5
+ |
+LL | / if x && y {
+LL | | return true;
+LL | | } else {
+LL | | return false;
+LL | | };
+ | |_____^ help: you can reduce it to: `return x && y`
+
+error: this if-then-else expression returns a bool literal
+ --> $DIR/fixable.rs:129:5
+ |
+LL | / if x && y {
+LL | | return false;
+LL | | } else {
+LL | | return true;
+LL | | };
+ | |_____^ help: you can reduce it to: `return !(x && y)`
+
+error: equality checks against true are unnecessary
+ --> $DIR/fixable.rs:137:8
+ |
+LL | if x == true {};
+ | ^^^^^^^^^ help: try simplifying it as shown: `x`
+ |
+ = note: `-D clippy::bool-comparison` implied by `-D warnings`
+
+error: equality checks against false can be replaced by a negation
+ --> $DIR/fixable.rs:141:8
+ |
+LL | if x == false {};
+ | ^^^^^^^^^^ help: try simplifying it as shown: `!x`
+
+error: equality checks against true are unnecessary
+ --> $DIR/fixable.rs:151:8
+ |
+LL | if x == true {};
+ | ^^^^^^^^^ help: try simplifying it as shown: `x`
+
+error: equality checks against false can be replaced by a negation
+ --> $DIR/fixable.rs:152:8
+ |
+LL | if x == false {};
+ | ^^^^^^^^^^ help: try simplifying it as shown: `!x`
+
+error: this if-then-else expression returns a bool literal
+ --> $DIR/fixable.rs:161:12
+ |
+LL | } else if returns_bool() {
+ | ____________^
+LL | | false
+LL | | } else {
+LL | | true
+LL | | };
+ | |_____^ help: you can reduce it to: `{ !returns_bool() }`
+
+error: this if-then-else expression returns a bool literal
+ --> $DIR/fixable.rs:174:5
+ |
+LL | / if unsafe { no(4) } & 1 != 0 {
+LL | | true
+LL | | } else {
+LL | | false
+LL | | };
+ | |_____^ help: you can reduce it to: `(unsafe { no(4) } & 1 != 0)`
+
+error: this if-then-else expression returns a bool literal
+ --> $DIR/fixable.rs:179:30
+ |
+LL | let _brackets_unneeded = if unsafe { no(4) } & 1 != 0 { true } else { false };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can reduce it to: `unsafe { no(4) } & 1 != 0`
+
+error: this if-then-else expression returns a bool literal
+ --> $DIR/fixable.rs:182:9
+ |
+LL | if unsafe { no(4) } & 1 != 0 { true } else { false }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can reduce it to: `(unsafe { no(4) } & 1 != 0)`
+
+error: aborting due to 21 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_bool/simple.rs b/src/tools/clippy/tests/ui/needless_bool/simple.rs
new file mode 100644
index 000000000..588bb88f4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_bool/simple.rs
@@ -0,0 +1,47 @@
+#![warn(clippy::needless_bool)]
+#![allow(
+ unused,
+ dead_code,
+ clippy::no_effect,
+ clippy::if_same_then_else,
+ clippy::needless_return,
+ clippy::branches_sharing_code
+)]
+
+fn main() {
+ let x = true;
+ let y = false;
+ if x {
+ true
+ } else {
+ true
+ };
+ if x {
+ false
+ } else {
+ false
+ };
+ if x {
+ x
+ } else {
+ false
+ }; // would also be questionable, but we don't catch this yet
+ bool_ret(x);
+ bool_ret2(x);
+}
+
+fn bool_ret(x: bool) -> bool {
+ if x {
+ return true;
+ } else {
+ return true;
+ };
+}
+
+fn bool_ret2(x: bool) -> bool {
+ if x {
+ return false;
+ } else {
+ return false;
+ };
+}
diff --git a/src/tools/clippy/tests/ui/needless_bool/simple.stderr b/src/tools/clippy/tests/ui/needless_bool/simple.stderr
new file mode 100644
index 000000000..0ccc9416b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_bool/simple.stderr
@@ -0,0 +1,44 @@
+error: this if-then-else expression will always return true
+ --> $DIR/simple.rs:14:5
+ |
+LL | / if x {
+LL | | true
+LL | | } else {
+LL | | true
+LL | | };
+ | |_____^
+ |
+ = note: `-D clippy::needless-bool` implied by `-D warnings`
+
+error: this if-then-else expression will always return false
+ --> $DIR/simple.rs:19:5
+ |
+LL | / if x {
+LL | | false
+LL | | } else {
+LL | | false
+LL | | };
+ | |_____^
+
+error: this if-then-else expression will always return true
+ --> $DIR/simple.rs:34:5
+ |
+LL | / if x {
+LL | | return true;
+LL | | } else {
+LL | | return true;
+LL | | };
+ | |_____^
+
+error: this if-then-else expression will always return false
+ --> $DIR/simple.rs:42:5
+ |
+LL | / if x {
+LL | | return false;
+LL | | } else {
+LL | | return false;
+LL | | };
+ | |_____^
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_borrow.fixed b/src/tools/clippy/tests/ui/needless_borrow.fixed
new file mode 100644
index 000000000..bfd2725ec
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_borrow.fixed
@@ -0,0 +1,185 @@
+// run-rustfix
+
+#![feature(lint_reasons)]
+
+#[warn(clippy::all, clippy::needless_borrow)]
+#[allow(unused_variables, clippy::unnecessary_mut_passed)]
+fn main() {
+ let a = 5;
+ let ref_a = &a;
+ let _ = x(&a); // no warning
+ let _ = x(&a); // warn
+
+ let mut b = 5;
+ mut_ref(&mut b); // no warning
+ mut_ref(&mut b); // warn
+
+ let s = &String::from("hi");
+ let s_ident = f(&s); // should not error, because `&String` implements Copy, but `String` does not
+ let g_val = g(&Vec::new()); // should not error, because `&Vec<T>` derefs to `&[T]`
+ let vec = Vec::new();
+ let vec_val = g(&vec); // should not error, because `&Vec<T>` derefs to `&[T]`
+ h(&"foo"); // should not error, because the `&&str` is required, due to `&Trait`
+ let garbl = match 42 {
+ 44 => &a,
+ 45 => {
+ println!("foo");
+ &a
+ },
+ 46 => &a,
+ 47 => {
+ println!("foo");
+ loop {
+ println!("{}", a);
+ if a == 25 {
+ break ref_a;
+ }
+ }
+ },
+ _ => panic!(),
+ };
+
+ let _ = x(&a);
+ let _ = x(&a);
+ let _ = x(&mut b);
+ let _ = x(ref_a);
+ {
+ let b = &mut b;
+ x(b);
+ }
+
+ // Issue #8191
+ let mut x = 5;
+ let mut x = &mut x;
+
+ mut_ref(x);
+ mut_ref(x);
+ let y: &mut i32 = x;
+ let y: &mut i32 = x;
+
+ let y = match 0 {
+ // Don't lint. Removing the borrow would move 'x'
+ 0 => &mut x,
+ _ => &mut *x,
+ };
+ let y: &mut i32 = match 0 {
+ // Lint here. The type given above triggers auto-borrow.
+ 0 => x,
+ _ => &mut *x,
+ };
+ fn ref_mut_i32(_: &mut i32) {}
+ ref_mut_i32(match 0 {
+ // Lint here. The type given above triggers auto-borrow.
+ 0 => x,
+ _ => &mut *x,
+ });
+ // use 'x' after to make sure it's still usable in the fixed code.
+ *x = 5;
+
+ let s = String::new();
+ // let _ = (&s).len();
+ // let _ = (&s).capacity();
+ // let _ = (&&s).capacity();
+
+ let x = (1, 2);
+ let _ = x.0;
+ let x = &x as *const (i32, i32);
+ let _ = unsafe { (*x).0 };
+
+ // Issue #8367
+ trait Foo {
+ fn foo(self);
+ }
+ impl Foo for &'_ () {
+ fn foo(self) {}
+ }
+ (&()).foo(); // Don't lint. `()` doesn't implement `Foo`
+ (&()).foo();
+
+ impl Foo for i32 {
+ fn foo(self) {}
+ }
+ impl Foo for &'_ i32 {
+ fn foo(self) {}
+ }
+ (&5).foo(); // Don't lint. `5` will call `<i32 as Foo>::foo`
+ (&5).foo();
+
+ trait FooRef {
+ fn foo_ref(&self);
+ }
+ impl FooRef for () {
+ fn foo_ref(&self) {}
+ }
+ impl FooRef for &'_ () {
+ fn foo_ref(&self) {}
+ }
+ (&&()).foo_ref(); // Don't lint. `&()` will call `<() as FooRef>::foo_ref`
+
+ struct S;
+ impl From<S> for u32 {
+ fn from(s: S) -> Self {
+ (&s).into()
+ }
+ }
+ impl From<&S> for u32 {
+ fn from(s: &S) -> Self {
+ 0
+ }
+ }
+}
+
+#[allow(clippy::needless_borrowed_reference)]
+fn x(y: &i32) -> i32 {
+ *y
+}
+
+fn mut_ref(y: &mut i32) {
+ *y = 5;
+}
+
+fn f<T: Copy>(y: &T) -> T {
+ *y
+}
+
+fn g(y: &[u8]) -> u8 {
+ y[0]
+}
+
+trait Trait {}
+
+impl<'a> Trait for &'a str {}
+
+fn h(_: &dyn Trait) {}
+
+#[allow(dead_code)]
+fn check_expect_suppression() {
+ let a = 5;
+ #[expect(clippy::needless_borrow)]
+ let _ = x(&&a);
+}
+
+#[allow(dead_code)]
+mod issue9160 {
+ pub struct S<F> {
+ f: F,
+ }
+
+ impl<T, F> S<F>
+ where
+ F: Fn() -> T,
+ {
+ fn calls_field(&self) -> T {
+ (self.f)()
+ }
+ }
+
+ impl<T, F> S<F>
+ where
+ F: FnMut() -> T,
+ {
+ fn calls_mut_field(&mut self) -> T {
+ (self.f)()
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/needless_borrow.rs b/src/tools/clippy/tests/ui/needless_borrow.rs
new file mode 100644
index 000000000..c457d8c54
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_borrow.rs
@@ -0,0 +1,185 @@
+// run-rustfix
+
+#![feature(lint_reasons)]
+
+#[warn(clippy::all, clippy::needless_borrow)]
+#[allow(unused_variables, clippy::unnecessary_mut_passed)]
+fn main() {
+ let a = 5;
+ let ref_a = &a;
+ let _ = x(&a); // no warning
+ let _ = x(&&a); // warn
+
+ let mut b = 5;
+ mut_ref(&mut b); // no warning
+ mut_ref(&mut &mut b); // warn
+
+ let s = &String::from("hi");
+ let s_ident = f(&s); // should not error, because `&String` implements Copy, but `String` does not
+ let g_val = g(&Vec::new()); // should not error, because `&Vec<T>` derefs to `&[T]`
+ let vec = Vec::new();
+ let vec_val = g(&vec); // should not error, because `&Vec<T>` derefs to `&[T]`
+ h(&"foo"); // should not error, because the `&&str` is required, due to `&Trait`
+ let garbl = match 42 {
+ 44 => &a,
+ 45 => {
+ println!("foo");
+ &&a
+ },
+ 46 => &&a,
+ 47 => {
+ println!("foo");
+ loop {
+ println!("{}", a);
+ if a == 25 {
+ break &ref_a;
+ }
+ }
+ },
+ _ => panic!(),
+ };
+
+ let _ = x(&&&a);
+ let _ = x(&mut &&a);
+ let _ = x(&&&mut b);
+ let _ = x(&&ref_a);
+ {
+ let b = &mut b;
+ x(&b);
+ }
+
+ // Issue #8191
+ let mut x = 5;
+ let mut x = &mut x;
+
+ mut_ref(&mut x);
+ mut_ref(&mut &mut x);
+ let y: &mut i32 = &mut x;
+ let y: &mut i32 = &mut &mut x;
+
+ let y = match 0 {
+ // Don't lint. Removing the borrow would move 'x'
+ 0 => &mut x,
+ _ => &mut *x,
+ };
+ let y: &mut i32 = match 0 {
+ // Lint here. The type given above triggers auto-borrow.
+ 0 => &mut x,
+ _ => &mut *x,
+ };
+ fn ref_mut_i32(_: &mut i32) {}
+ ref_mut_i32(match 0 {
+ // Lint here. The type given above triggers auto-borrow.
+ 0 => &mut x,
+ _ => &mut *x,
+ });
+ // use 'x' after to make sure it's still usable in the fixed code.
+ *x = 5;
+
+ let s = String::new();
+ // let _ = (&s).len();
+ // let _ = (&s).capacity();
+ // let _ = (&&s).capacity();
+
+ let x = (1, 2);
+ let _ = (&x).0;
+ let x = &x as *const (i32, i32);
+ let _ = unsafe { (&*x).0 };
+
+ // Issue #8367
+ trait Foo {
+ fn foo(self);
+ }
+ impl Foo for &'_ () {
+ fn foo(self) {}
+ }
+ (&()).foo(); // Don't lint. `()` doesn't implement `Foo`
+ (&&()).foo();
+
+ impl Foo for i32 {
+ fn foo(self) {}
+ }
+ impl Foo for &'_ i32 {
+ fn foo(self) {}
+ }
+ (&5).foo(); // Don't lint. `5` will call `<i32 as Foo>::foo`
+ (&&5).foo();
+
+ trait FooRef {
+ fn foo_ref(&self);
+ }
+ impl FooRef for () {
+ fn foo_ref(&self) {}
+ }
+ impl FooRef for &'_ () {
+ fn foo_ref(&self) {}
+ }
+ (&&()).foo_ref(); // Don't lint. `&()` will call `<() as FooRef>::foo_ref`
+
+ struct S;
+ impl From<S> for u32 {
+ fn from(s: S) -> Self {
+ (&s).into()
+ }
+ }
+ impl From<&S> for u32 {
+ fn from(s: &S) -> Self {
+ 0
+ }
+ }
+}
+
+#[allow(clippy::needless_borrowed_reference)]
+fn x(y: &i32) -> i32 {
+ *y
+}
+
+fn mut_ref(y: &mut i32) {
+ *y = 5;
+}
+
+fn f<T: Copy>(y: &T) -> T {
+ *y
+}
+
+fn g(y: &[u8]) -> u8 {
+ y[0]
+}
+
+trait Trait {}
+
+impl<'a> Trait for &'a str {}
+
+fn h(_: &dyn Trait) {}
+
+#[allow(dead_code)]
+fn check_expect_suppression() {
+ let a = 5;
+ #[expect(clippy::needless_borrow)]
+ let _ = x(&&a);
+}
+
+#[allow(dead_code)]
+mod issue9160 {
+ pub struct S<F> {
+ f: F,
+ }
+
+ impl<T, F> S<F>
+ where
+ F: Fn() -> T,
+ {
+ fn calls_field(&self) -> T {
+ (&self.f)()
+ }
+ }
+
+ impl<T, F> S<F>
+ where
+ F: FnMut() -> T,
+ {
+ fn calls_mut_field(&mut self) -> T {
+ (&mut self.f)()
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/needless_borrow.stderr b/src/tools/clippy/tests/ui/needless_borrow.stderr
new file mode 100644
index 000000000..66588689d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_borrow.stderr
@@ -0,0 +1,136 @@
+error: this expression creates a reference which is immediately dereferenced by the compiler
+ --> $DIR/needless_borrow.rs:11:15
+ |
+LL | let _ = x(&&a); // warn
+ | ^^^ help: change this to: `&a`
+ |
+ = note: `-D clippy::needless-borrow` implied by `-D warnings`
+
+error: this expression creates a reference which is immediately dereferenced by the compiler
+ --> $DIR/needless_borrow.rs:15:13
+ |
+LL | mut_ref(&mut &mut b); // warn
+ | ^^^^^^^^^^^ help: change this to: `&mut b`
+
+error: this expression creates a reference which is immediately dereferenced by the compiler
+ --> $DIR/needless_borrow.rs:27:13
+ |
+LL | &&a
+ | ^^^ help: change this to: `&a`
+
+error: this expression creates a reference which is immediately dereferenced by the compiler
+ --> $DIR/needless_borrow.rs:29:15
+ |
+LL | 46 => &&a,
+ | ^^^ help: change this to: `&a`
+
+error: this expression creates a reference which is immediately dereferenced by the compiler
+ --> $DIR/needless_borrow.rs:35:27
+ |
+LL | break &ref_a;
+ | ^^^^^^ help: change this to: `ref_a`
+
+error: this expression creates a reference which is immediately dereferenced by the compiler
+ --> $DIR/needless_borrow.rs:42:15
+ |
+LL | let _ = x(&&&a);
+ | ^^^^ help: change this to: `&a`
+
+error: this expression creates a reference which is immediately dereferenced by the compiler
+ --> $DIR/needless_borrow.rs:43:15
+ |
+LL | let _ = x(&mut &&a);
+ | ^^^^^^^^ help: change this to: `&a`
+
+error: this expression creates a reference which is immediately dereferenced by the compiler
+ --> $DIR/needless_borrow.rs:44:15
+ |
+LL | let _ = x(&&&mut b);
+ | ^^^^^^^^ help: change this to: `&mut b`
+
+error: this expression creates a reference which is immediately dereferenced by the compiler
+ --> $DIR/needless_borrow.rs:45:15
+ |
+LL | let _ = x(&&ref_a);
+ | ^^^^^^^ help: change this to: `ref_a`
+
+error: this expression creates a reference which is immediately dereferenced by the compiler
+ --> $DIR/needless_borrow.rs:48:11
+ |
+LL | x(&b);
+ | ^^ help: change this to: `b`
+
+error: this expression creates a reference which is immediately dereferenced by the compiler
+ --> $DIR/needless_borrow.rs:55:13
+ |
+LL | mut_ref(&mut x);
+ | ^^^^^^ help: change this to: `x`
+
+error: this expression creates a reference which is immediately dereferenced by the compiler
+ --> $DIR/needless_borrow.rs:56:13
+ |
+LL | mut_ref(&mut &mut x);
+ | ^^^^^^^^^^^ help: change this to: `x`
+
+error: this expression creates a reference which is immediately dereferenced by the compiler
+ --> $DIR/needless_borrow.rs:57:23
+ |
+LL | let y: &mut i32 = &mut x;
+ | ^^^^^^ help: change this to: `x`
+
+error: this expression creates a reference which is immediately dereferenced by the compiler
+ --> $DIR/needless_borrow.rs:58:23
+ |
+LL | let y: &mut i32 = &mut &mut x;
+ | ^^^^^^^^^^^ help: change this to: `x`
+
+error: this expression creates a reference which is immediately dereferenced by the compiler
+ --> $DIR/needless_borrow.rs:67:14
+ |
+LL | 0 => &mut x,
+ | ^^^^^^ help: change this to: `x`
+
+error: this expression creates a reference which is immediately dereferenced by the compiler
+ --> $DIR/needless_borrow.rs:73:14
+ |
+LL | 0 => &mut x,
+ | ^^^^^^ help: change this to: `x`
+
+error: this expression borrows a value the compiler would automatically borrow
+ --> $DIR/needless_borrow.rs:85:13
+ |
+LL | let _ = (&x).0;
+ | ^^^^ help: change this to: `x`
+
+error: this expression borrows a value the compiler would automatically borrow
+ --> $DIR/needless_borrow.rs:87:22
+ |
+LL | let _ = unsafe { (&*x).0 };
+ | ^^^^^ help: change this to: `(*x)`
+
+error: this expression creates a reference which is immediately dereferenced by the compiler
+ --> $DIR/needless_borrow.rs:97:5
+ |
+LL | (&&()).foo();
+ | ^^^^^^ help: change this to: `(&())`
+
+error: this expression creates a reference which is immediately dereferenced by the compiler
+ --> $DIR/needless_borrow.rs:106:5
+ |
+LL | (&&5).foo();
+ | ^^^^^ help: change this to: `(&5)`
+
+error: this expression borrows a value the compiler would automatically borrow
+ --> $DIR/needless_borrow.rs:173:13
+ |
+LL | (&self.f)()
+ | ^^^^^^^^^ help: change this to: `(self.f)`
+
+error: this expression borrows a value the compiler would automatically borrow
+ --> $DIR/needless_borrow.rs:182:13
+ |
+LL | (&mut self.f)()
+ | ^^^^^^^^^^^^^ help: change this to: `(self.f)`
+
+error: aborting due to 22 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_borrow_pat.rs b/src/tools/clippy/tests/ui/needless_borrow_pat.rs
new file mode 100644
index 000000000..222e8e617
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_borrow_pat.rs
@@ -0,0 +1,150 @@
+// FIXME: run-rustfix waiting on multi-span suggestions
+
+#![warn(clippy::needless_borrow)]
+#![allow(clippy::needless_borrowed_reference, clippy::explicit_auto_deref)]
+
+fn f1(_: &str) {}
+macro_rules! m1 {
+ ($e:expr) => {
+ f1($e)
+ };
+}
+macro_rules! m3 {
+ ($i:ident) => {
+ Some(ref $i)
+ };
+}
+macro_rules! if_chain {
+ (if $e:expr; $($rest:tt)*) => {
+ if $e {
+ if_chain!($($rest)*)
+ }
+ };
+
+ (if let $p:pat = $e:expr; $($rest:tt)*) => {
+ if let $p = $e {
+ if_chain!($($rest)*)
+ }
+ };
+
+ (then $b:block) => {
+ $b
+ };
+}
+
+#[allow(dead_code)]
+fn main() {
+ let x = String::new();
+
+ // Ok, reference to a String.
+ let _: &String = match Some(x.clone()) {
+ Some(ref x) => x,
+ None => return,
+ };
+
+ // Ok, reference to a &mut String
+ let _: &&mut String = match Some(&mut x.clone()) {
+ Some(ref x) => x,
+ None => return,
+ };
+
+ // Ok, the pattern is from a macro
+ let _: &String = match Some(&x) {
+ m3!(x) => x,
+ None => return,
+ };
+
+ // Err, reference to a &String
+ let _: &String = match Some(&x) {
+ Some(ref x) => x,
+ None => return,
+ };
+
+ // Err, reference to a &String.
+ let _: &String = match Some(&x) {
+ Some(ref x) => *x,
+ None => return,
+ };
+
+ // Err, reference to a &String
+ let _: &String = match Some(&x) {
+ Some(ref x) => {
+ f1(x);
+ f1(*x);
+ x
+ },
+ None => return,
+ };
+
+ // Err, reference to a &String
+ match Some(&x) {
+ Some(ref x) => m1!(x),
+ None => return,
+ };
+
+ // Err, reference to a &String
+ let _ = |&ref x: &&String| {
+ let _: &String = x;
+ };
+
+ // Err, reference to a &String
+ let (ref y,) = (&x,);
+ let _: &String = *y;
+
+ let y = &&x;
+ // Ok, different y
+ let _: &String = *y;
+
+ let x = (0, 0);
+ // Err, reference to a &u32. Don't suggest adding a reference to the field access.
+ let _: u32 = match Some(&x) {
+ Some(ref x) => x.0,
+ None => return,
+ };
+
+ enum E {
+ A(&'static u32),
+ B(&'static u32),
+ }
+ // Err, reference to &u32.
+ let _: &u32 = match E::A(&0) {
+ E::A(ref x) | E::B(ref x) => *x,
+ };
+
+ // Err, reference to &String.
+ if_chain! {
+ if true;
+ if let Some(ref x) = Some(&String::new());
+ then {
+ f1(x);
+ }
+ }
+}
+
+// Err, reference to a &String
+fn f2<'a>(&ref x: &&'a String) -> &'a String {
+ let _: &String = x;
+ *x
+}
+
+trait T1 {
+ // Err, reference to a &String
+ fn f(&ref x: &&String) {
+ let _: &String = x;
+ }
+}
+
+struct S;
+impl T1 for S {
+ // Err, reference to a &String
+ fn f(&ref x: &&String) {
+ let _: &String = *x;
+ }
+}
+
+// Ok - used to error due to rustc bug
+#[allow(dead_code)]
+#[derive(Debug)]
+enum Foo<'a> {
+ Str(&'a str),
+}
diff --git a/src/tools/clippy/tests/ui/needless_borrow_pat.stderr b/src/tools/clippy/tests/ui/needless_borrow_pat.stderr
new file mode 100644
index 000000000..db3b52b88
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_borrow_pat.stderr
@@ -0,0 +1,112 @@
+error: this pattern creates a reference to a reference
+ --> $DIR/needless_borrow_pat.rs:59:14
+ |
+LL | Some(ref x) => x,
+ | ^^^^^ help: try this: `x`
+ |
+ = note: `-D clippy::needless-borrow` implied by `-D warnings`
+
+error: this pattern creates a reference to a reference
+ --> $DIR/needless_borrow_pat.rs:65:14
+ |
+LL | Some(ref x) => *x,
+ | ^^^^^
+ |
+help: try this
+ |
+LL | Some(x) => x,
+ | ~ ~
+
+error: this pattern creates a reference to a reference
+ --> $DIR/needless_borrow_pat.rs:71:14
+ |
+LL | Some(ref x) => {
+ | ^^^^^
+ |
+help: try this
+ |
+LL ~ Some(x) => {
+LL | f1(x);
+LL ~ f1(x);
+ |
+
+error: this pattern creates a reference to a reference
+ --> $DIR/needless_borrow_pat.rs:81:14
+ |
+LL | Some(ref x) => m1!(x),
+ | ^^^^^ help: try this: `x`
+
+error: this pattern creates a reference to a reference
+ --> $DIR/needless_borrow_pat.rs:86:15
+ |
+LL | let _ = |&ref x: &&String| {
+ | ^^^^^ help: try this: `x`
+
+error: this pattern creates a reference to a reference
+ --> $DIR/needless_borrow_pat.rs:91:10
+ |
+LL | let (ref y,) = (&x,);
+ | ^^^^^
+ |
+help: try this
+ |
+LL ~ let (y,) = (&x,);
+LL ~ let _: &String = y;
+ |
+
+error: this pattern creates a reference to a reference
+ --> $DIR/needless_borrow_pat.rs:101:14
+ |
+LL | Some(ref x) => x.0,
+ | ^^^^^ help: try this: `x`
+
+error: this pattern creates a reference to a reference
+ --> $DIR/needless_borrow_pat.rs:111:14
+ |
+LL | E::A(ref x) | E::B(ref x) => *x,
+ | ^^^^^ ^^^^^
+ |
+help: try this
+ |
+LL | E::A(x) | E::B(x) => x,
+ | ~ ~ ~
+
+error: this pattern creates a reference to a reference
+ --> $DIR/needless_borrow_pat.rs:117:21
+ |
+LL | if let Some(ref x) = Some(&String::new());
+ | ^^^^^ help: try this: `x`
+
+error: this pattern creates a reference to a reference
+ --> $DIR/needless_borrow_pat.rs:125:12
+ |
+LL | fn f2<'a>(&ref x: &&'a String) -> &'a String {
+ | ^^^^^
+ |
+help: try this
+ |
+LL ~ fn f2<'a>(&x: &&'a String) -> &'a String {
+LL | let _: &String = x;
+LL ~ x
+ |
+
+error: this pattern creates a reference to a reference
+ --> $DIR/needless_borrow_pat.rs:132:11
+ |
+LL | fn f(&ref x: &&String) {
+ | ^^^^^ help: try this: `x`
+
+error: this pattern creates a reference to a reference
+ --> $DIR/needless_borrow_pat.rs:140:11
+ |
+LL | fn f(&ref x: &&String) {
+ | ^^^^^
+ |
+help: try this
+ |
+LL ~ fn f(&x: &&String) {
+LL ~ let _: &String = x;
+ |
+
+error: aborting due to 12 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_borrowed_ref.fixed b/src/tools/clippy/tests/ui/needless_borrowed_ref.fixed
new file mode 100644
index 000000000..a0937a2c5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_borrowed_ref.fixed
@@ -0,0 +1,45 @@
+// run-rustfix
+
+#[warn(clippy::needless_borrowed_reference)]
+#[allow(unused_variables)]
+fn main() {
+ let mut v = Vec::<String>::new();
+ let _ = v.iter_mut().filter(|a| a.is_empty());
+ // ^ should be linted
+
+ let var = 3;
+ let thingy = Some(&var);
+ if let Some(&ref v) = thingy {
+ // ^ should be linted
+ }
+
+ let mut var2 = 5;
+ let thingy2 = Some(&mut var2);
+ if let Some(&mut ref mut v) = thingy2 {
+ // ^ should **not** be linted
+ // v is borrowed as mutable.
+ *v = 10;
+ }
+ if let Some(&mut ref v) = thingy2 {
+ // ^ should **not** be linted
+ // here, v is borrowed as immutable.
+ // can't do that:
+ //*v = 15;
+ }
+}
+
+#[allow(dead_code)]
+enum Animal {
+ Cat(u64),
+ Dog(u64),
+}
+
+#[allow(unused_variables)]
+#[allow(dead_code)]
+fn foo(a: &Animal, b: &Animal) {
+ match (a, b) {
+ (&Animal::Cat(v), &ref k) | (&ref k, &Animal::Cat(v)) => (), // lifetime mismatch error if there is no '&ref'
+ // ^ and ^ should **not** be linted
+ (&Animal::Dog(ref a), &Animal::Dog(_)) => (), // ^ should **not** be linted
+ }
+}
diff --git a/src/tools/clippy/tests/ui/needless_borrowed_ref.rs b/src/tools/clippy/tests/ui/needless_borrowed_ref.rs
new file mode 100644
index 000000000..500ac448f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_borrowed_ref.rs
@@ -0,0 +1,45 @@
+// run-rustfix
+
+#[warn(clippy::needless_borrowed_reference)]
+#[allow(unused_variables)]
+fn main() {
+ let mut v = Vec::<String>::new();
+ let _ = v.iter_mut().filter(|&ref a| a.is_empty());
+ // ^ should be linted
+
+ let var = 3;
+ let thingy = Some(&var);
+ if let Some(&ref v) = thingy {
+ // ^ should be linted
+ }
+
+ let mut var2 = 5;
+ let thingy2 = Some(&mut var2);
+ if let Some(&mut ref mut v) = thingy2 {
+ // ^ should **not** be linted
+ // v is borrowed as mutable.
+ *v = 10;
+ }
+ if let Some(&mut ref v) = thingy2 {
+ // ^ should **not** be linted
+ // here, v is borrowed as immutable.
+ // can't do that:
+ //*v = 15;
+ }
+}
+
+#[allow(dead_code)]
+enum Animal {
+ Cat(u64),
+ Dog(u64),
+}
+
+#[allow(unused_variables)]
+#[allow(dead_code)]
+fn foo(a: &Animal, b: &Animal) {
+ match (a, b) {
+ (&Animal::Cat(v), &ref k) | (&ref k, &Animal::Cat(v)) => (), // lifetime mismatch error if there is no '&ref'
+ // ^ and ^ should **not** be linted
+ (&Animal::Dog(ref a), &Animal::Dog(_)) => (), // ^ should **not** be linted
+ }
+}
diff --git a/src/tools/clippy/tests/ui/needless_borrowed_ref.stderr b/src/tools/clippy/tests/ui/needless_borrowed_ref.stderr
new file mode 100644
index 000000000..0a5cfb3db
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_borrowed_ref.stderr
@@ -0,0 +1,10 @@
+error: this pattern takes a reference on something that is being de-referenced
+ --> $DIR/needless_borrowed_ref.rs:7:34
+ |
+LL | let _ = v.iter_mut().filter(|&ref a| a.is_empty());
+ | ^^^^^^ help: try removing the `&ref` part and just keep: `a`
+ |
+ = note: `-D clippy::needless-borrowed-reference` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/needless_collect.fixed b/src/tools/clippy/tests/ui/needless_collect.fixed
new file mode 100644
index 000000000..6ecbbcb62
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_collect.fixed
@@ -0,0 +1,36 @@
+// run-rustfix
+
+#![allow(unused, clippy::suspicious_map, clippy::iter_count)]
+
+use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, LinkedList};
+
+#[warn(clippy::needless_collect)]
+#[allow(unused_variables, clippy::iter_cloned_collect, clippy::iter_next_slice)]
+fn main() {
+ let sample = [1; 5];
+ let len = sample.iter().count();
+ if sample.iter().next().is_none() {
+ // Empty
+ }
+ sample.iter().cloned().any(|x| x == 1);
+ // #7164 HashMap's and BTreeMap's `len` usage should not be linted
+ sample.iter().map(|x| (x, x)).collect::<HashMap<_, _>>().len();
+ sample.iter().map(|x| (x, x)).collect::<BTreeMap<_, _>>().len();
+
+ sample.iter().map(|x| (x, x)).next().is_none();
+ sample.iter().map(|x| (x, x)).next().is_none();
+
+ // Notice the `HashSet`--this should not be linted
+ sample.iter().collect::<HashSet<_>>().len();
+ // Neither should this
+ sample.iter().collect::<BTreeSet<_>>().len();
+
+ sample.iter().count();
+ sample.iter().next().is_none();
+ sample.iter().cloned().any(|x| x == 1);
+ sample.iter().any(|x| x == &1);
+
+ // `BinaryHeap` doesn't have `contains` method
+ sample.iter().count();
+ sample.iter().next().is_none();
+}
diff --git a/src/tools/clippy/tests/ui/needless_collect.rs b/src/tools/clippy/tests/ui/needless_collect.rs
new file mode 100644
index 000000000..8dc69bcf5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_collect.rs
@@ -0,0 +1,36 @@
+// run-rustfix
+
+#![allow(unused, clippy::suspicious_map, clippy::iter_count)]
+
+use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, LinkedList};
+
+#[warn(clippy::needless_collect)]
+#[allow(unused_variables, clippy::iter_cloned_collect, clippy::iter_next_slice)]
+fn main() {
+ let sample = [1; 5];
+ let len = sample.iter().collect::<Vec<_>>().len();
+ if sample.iter().collect::<Vec<_>>().is_empty() {
+ // Empty
+ }
+ sample.iter().cloned().collect::<Vec<_>>().contains(&1);
+ // #7164 HashMap's and BTreeMap's `len` usage should not be linted
+ sample.iter().map(|x| (x, x)).collect::<HashMap<_, _>>().len();
+ sample.iter().map(|x| (x, x)).collect::<BTreeMap<_, _>>().len();
+
+ sample.iter().map(|x| (x, x)).collect::<HashMap<_, _>>().is_empty();
+ sample.iter().map(|x| (x, x)).collect::<BTreeMap<_, _>>().is_empty();
+
+ // Notice the `HashSet`--this should not be linted
+ sample.iter().collect::<HashSet<_>>().len();
+ // Neither should this
+ sample.iter().collect::<BTreeSet<_>>().len();
+
+ sample.iter().collect::<LinkedList<_>>().len();
+ sample.iter().collect::<LinkedList<_>>().is_empty();
+ sample.iter().cloned().collect::<LinkedList<_>>().contains(&1);
+ sample.iter().collect::<LinkedList<_>>().contains(&&1);
+
+ // `BinaryHeap` doesn't have `contains` method
+ sample.iter().collect::<BinaryHeap<_>>().len();
+ sample.iter().collect::<BinaryHeap<_>>().is_empty();
+}
diff --git a/src/tools/clippy/tests/ui/needless_collect.stderr b/src/tools/clippy/tests/ui/needless_collect.stderr
new file mode 100644
index 000000000..039091627
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_collect.stderr
@@ -0,0 +1,70 @@
+error: avoid using `collect()` when not needed
+ --> $DIR/needless_collect.rs:11:29
+ |
+LL | let len = sample.iter().collect::<Vec<_>>().len();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `count()`
+ |
+ = note: `-D clippy::needless-collect` implied by `-D warnings`
+
+error: avoid using `collect()` when not needed
+ --> $DIR/needless_collect.rs:12:22
+ |
+LL | if sample.iter().collect::<Vec<_>>().is_empty() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `next().is_none()`
+
+error: avoid using `collect()` when not needed
+ --> $DIR/needless_collect.rs:15:28
+ |
+LL | sample.iter().cloned().collect::<Vec<_>>().contains(&1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `any(|x| x == 1)`
+
+error: avoid using `collect()` when not needed
+ --> $DIR/needless_collect.rs:20:35
+ |
+LL | sample.iter().map(|x| (x, x)).collect::<HashMap<_, _>>().is_empty();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `next().is_none()`
+
+error: avoid using `collect()` when not needed
+ --> $DIR/needless_collect.rs:21:35
+ |
+LL | sample.iter().map(|x| (x, x)).collect::<BTreeMap<_, _>>().is_empty();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `next().is_none()`
+
+error: avoid using `collect()` when not needed
+ --> $DIR/needless_collect.rs:28:19
+ |
+LL | sample.iter().collect::<LinkedList<_>>().len();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `count()`
+
+error: avoid using `collect()` when not needed
+ --> $DIR/needless_collect.rs:29:19
+ |
+LL | sample.iter().collect::<LinkedList<_>>().is_empty();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `next().is_none()`
+
+error: avoid using `collect()` when not needed
+ --> $DIR/needless_collect.rs:30:28
+ |
+LL | sample.iter().cloned().collect::<LinkedList<_>>().contains(&1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `any(|x| x == 1)`
+
+error: avoid using `collect()` when not needed
+ --> $DIR/needless_collect.rs:31:19
+ |
+LL | sample.iter().collect::<LinkedList<_>>().contains(&&1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `any(|x| x == &1)`
+
+error: avoid using `collect()` when not needed
+ --> $DIR/needless_collect.rs:34:19
+ |
+LL | sample.iter().collect::<BinaryHeap<_>>().len();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `count()`
+
+error: avoid using `collect()` when not needed
+ --> $DIR/needless_collect.rs:35:19
+ |
+LL | sample.iter().collect::<BinaryHeap<_>>().is_empty();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `next().is_none()`
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_collect_indirect.rs b/src/tools/clippy/tests/ui/needless_collect_indirect.rs
new file mode 100644
index 000000000..1f11d1f8d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_collect_indirect.rs
@@ -0,0 +1,114 @@
+use std::collections::{BinaryHeap, HashMap, HashSet, LinkedList, VecDeque};
+
+fn main() {
+ let sample = [1; 5];
+ let indirect_iter = sample.iter().collect::<Vec<_>>();
+ indirect_iter.into_iter().map(|x| (x, x + 1)).collect::<HashMap<_, _>>();
+ let indirect_len = sample.iter().collect::<VecDeque<_>>();
+ indirect_len.len();
+ let indirect_empty = sample.iter().collect::<VecDeque<_>>();
+ indirect_empty.is_empty();
+ let indirect_contains = sample.iter().collect::<VecDeque<_>>();
+ indirect_contains.contains(&&5);
+ let indirect_negative = sample.iter().collect::<Vec<_>>();
+ indirect_negative.len();
+ indirect_negative
+ .into_iter()
+ .map(|x| (*x, *x + 1))
+ .collect::<HashMap<_, _>>();
+
+ // #6202
+ let a = "a".to_string();
+ let sample = vec![a.clone(), "b".to_string(), "c".to_string()];
+ let non_copy_contains = sample.into_iter().collect::<Vec<_>>();
+ non_copy_contains.contains(&a);
+
+ // Fix #5991
+ let vec_a = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
+ let vec_b = vec_a.iter().collect::<Vec<_>>();
+ if vec_b.len() > 3 {}
+ let other_vec = vec![1, 3, 12, 4, 16, 2];
+ let we_got_the_same_numbers = other_vec.iter().filter(|item| vec_b.contains(item)).collect::<Vec<_>>();
+
+ // Fix #6297
+ let sample = [1; 5];
+ let multiple_indirect = sample.iter().collect::<Vec<_>>();
+ let sample2 = vec![2, 3];
+ if multiple_indirect.is_empty() {
+ // do something
+ } else {
+ let found = sample2
+ .iter()
+ .filter(|i| multiple_indirect.iter().any(|s| **s % **i == 0))
+ .collect::<Vec<_>>();
+ }
+}
+
+mod issue7110 {
+ // #7110 - lint for type annotation cases
+ use super::*;
+
+ fn lint_vec(string: &str) -> usize {
+ let buffer: Vec<&str> = string.split('/').collect();
+ buffer.len()
+ }
+ fn lint_vec_deque() -> usize {
+ let sample = [1; 5];
+ let indirect_len: VecDeque<_> = sample.iter().collect();
+ indirect_len.len()
+ }
+ fn lint_linked_list() -> usize {
+ let sample = [1; 5];
+ let indirect_len: LinkedList<_> = sample.iter().collect();
+ indirect_len.len()
+ }
+ fn lint_binary_heap() -> usize {
+ let sample = [1; 5];
+ let indirect_len: BinaryHeap<_> = sample.iter().collect();
+ indirect_len.len()
+ }
+ fn dont_lint(string: &str) -> usize {
+ let buffer: Vec<&str> = string.split('/').collect();
+ for buff in &buffer {
+ println!("{}", buff);
+ }
+ buffer.len()
+ }
+}
+
+mod issue7975 {
+ use super::*;
+
+ fn direct_mapping_with_used_mutable_reference() -> Vec<()> {
+ let test_vec: Vec<()> = vec![];
+ let mut vec_2: Vec<()> = vec![];
+ let mut_ref = &mut vec_2;
+ let collected_vec: Vec<_> = test_vec.into_iter().map(|_| mut_ref.push(())).collect();
+ collected_vec.into_iter().map(|_| mut_ref.push(())).collect()
+ }
+
+ fn indirectly_mapping_with_used_mutable_reference() -> Vec<()> {
+ let test_vec: Vec<()> = vec![];
+ let mut vec_2: Vec<()> = vec![];
+ let mut_ref = &mut vec_2;
+ let collected_vec: Vec<_> = test_vec.into_iter().map(|_| mut_ref.push(())).collect();
+ let iter = collected_vec.into_iter();
+ iter.map(|_| mut_ref.push(())).collect()
+ }
+
+ fn indirect_collect_after_indirect_mapping_with_used_mutable_reference() -> Vec<()> {
+ let test_vec: Vec<()> = vec![];
+ let mut vec_2: Vec<()> = vec![];
+ let mut_ref = &mut vec_2;
+ let collected_vec: Vec<_> = test_vec.into_iter().map(|_| mut_ref.push(())).collect();
+ let iter = collected_vec.into_iter();
+ let mapped_iter = iter.map(|_| mut_ref.push(()));
+ mapped_iter.collect()
+ }
+}
+
+fn allow_test() {
+ #[allow(clippy::needless_collect)]
+ let v = [1].iter().collect::<Vec<_>>();
+ v.into_iter().collect::<HashSet<_>>();
+}
diff --git a/src/tools/clippy/tests/ui/needless_collect_indirect.stderr b/src/tools/clippy/tests/ui/needless_collect_indirect.stderr
new file mode 100644
index 000000000..0f5e78f91
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_collect_indirect.stderr
@@ -0,0 +1,129 @@
+error: avoid using `collect()` when not needed
+ --> $DIR/needless_collect_indirect.rs:5:39
+ |
+LL | let indirect_iter = sample.iter().collect::<Vec<_>>();
+ | ^^^^^^^
+LL | indirect_iter.into_iter().map(|x| (x, x + 1)).collect::<HashMap<_, _>>();
+ | ------------------------- the iterator could be used here instead
+ |
+ = note: `-D clippy::needless-collect` implied by `-D warnings`
+help: use the original Iterator instead of collecting it and then producing a new one
+ |
+LL ~
+LL ~ sample.iter().map(|x| (x, x + 1)).collect::<HashMap<_, _>>();
+ |
+
+error: avoid using `collect()` when not needed
+ --> $DIR/needless_collect_indirect.rs:7:38
+ |
+LL | let indirect_len = sample.iter().collect::<VecDeque<_>>();
+ | ^^^^^^^
+LL | indirect_len.len();
+ | ------------------ the iterator could be used here instead
+ |
+help: take the original Iterator's count instead of collecting it and finding the length
+ |
+LL ~
+LL ~ sample.iter().count();
+ |
+
+error: avoid using `collect()` when not needed
+ --> $DIR/needless_collect_indirect.rs:9:40
+ |
+LL | let indirect_empty = sample.iter().collect::<VecDeque<_>>();
+ | ^^^^^^^
+LL | indirect_empty.is_empty();
+ | ------------------------- the iterator could be used here instead
+ |
+help: check if the original Iterator has anything instead of collecting it and seeing if it's empty
+ |
+LL ~
+LL ~ sample.iter().next().is_none();
+ |
+
+error: avoid using `collect()` when not needed
+ --> $DIR/needless_collect_indirect.rs:11:43
+ |
+LL | let indirect_contains = sample.iter().collect::<VecDeque<_>>();
+ | ^^^^^^^
+LL | indirect_contains.contains(&&5);
+ | ------------------------------- the iterator could be used here instead
+ |
+help: check if the original Iterator contains an element instead of collecting then checking
+ |
+LL ~
+LL ~ sample.iter().any(|x| x == &5);
+ |
+
+error: avoid using `collect()` when not needed
+ --> $DIR/needless_collect_indirect.rs:23:48
+ |
+LL | let non_copy_contains = sample.into_iter().collect::<Vec<_>>();
+ | ^^^^^^^
+LL | non_copy_contains.contains(&a);
+ | ------------------------------ the iterator could be used here instead
+ |
+help: check if the original Iterator contains an element instead of collecting then checking
+ |
+LL ~
+LL ~ sample.into_iter().any(|x| x == a);
+ |
+
+error: avoid using `collect()` when not needed
+ --> $DIR/needless_collect_indirect.rs:52:51
+ |
+LL | let buffer: Vec<&str> = string.split('/').collect();
+ | ^^^^^^^
+LL | buffer.len()
+ | ------------ the iterator could be used here instead
+ |
+help: take the original Iterator's count instead of collecting it and finding the length
+ |
+LL ~
+LL ~ string.split('/').count()
+ |
+
+error: avoid using `collect()` when not needed
+ --> $DIR/needless_collect_indirect.rs:57:55
+ |
+LL | let indirect_len: VecDeque<_> = sample.iter().collect();
+ | ^^^^^^^
+LL | indirect_len.len()
+ | ------------------ the iterator could be used here instead
+ |
+help: take the original Iterator's count instead of collecting it and finding the length
+ |
+LL ~
+LL ~ sample.iter().count()
+ |
+
+error: avoid using `collect()` when not needed
+ --> $DIR/needless_collect_indirect.rs:62:57
+ |
+LL | let indirect_len: LinkedList<_> = sample.iter().collect();
+ | ^^^^^^^
+LL | indirect_len.len()
+ | ------------------ the iterator could be used here instead
+ |
+help: take the original Iterator's count instead of collecting it and finding the length
+ |
+LL ~
+LL ~ sample.iter().count()
+ |
+
+error: avoid using `collect()` when not needed
+ --> $DIR/needless_collect_indirect.rs:67:57
+ |
+LL | let indirect_len: BinaryHeap<_> = sample.iter().collect();
+ | ^^^^^^^
+LL | indirect_len.len()
+ | ------------------ the iterator could be used here instead
+ |
+help: take the original Iterator's count instead of collecting it and finding the length
+ |
+LL ~
+LL ~ sample.iter().count()
+ |
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_continue.rs b/src/tools/clippy/tests/ui/needless_continue.rs
new file mode 100644
index 000000000..f105d3d65
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_continue.rs
@@ -0,0 +1,144 @@
+#![warn(clippy::needless_continue)]
+
+macro_rules! zero {
+ ($x:expr) => {
+ $x == 0
+ };
+}
+
+macro_rules! nonzero {
+ ($x:expr) => {
+ !zero!($x)
+ };
+}
+
+#[allow(clippy::nonminimal_bool)]
+fn main() {
+ let mut i = 1;
+ while i < 10 {
+ i += 1;
+
+ if i % 2 == 0 && i % 3 == 0 {
+ println!("{}", i);
+ println!("{}", i + 1);
+ if i % 5 == 0 {
+ println!("{}", i + 2);
+ }
+ let i = 0;
+ println!("bar {} ", i);
+ } else {
+ continue;
+ }
+
+ println!("bleh");
+ {
+ println!("blah");
+ }
+
+ // some comments that also should ideally be included in the
+ // output of the lint suggestion if possible.
+ if !(!(i == 2) || !(i == 5)) {
+ println!("lama");
+ }
+
+ if (zero!(i % 2) || nonzero!(i % 5)) && i % 3 != 0 {
+ continue;
+ } else {
+ println!("Blabber");
+ println!("Jabber");
+ }
+
+ println!("bleh");
+ }
+}
+
+fn simple_loop() {
+ loop {
+ continue; // should lint here
+ }
+}
+
+fn simple_loop2() {
+ loop {
+ println!("bleh");
+ continue; // should lint here
+ }
+}
+
+#[rustfmt::skip]
+fn simple_loop3() {
+ loop {
+ continue // should lint here
+ }
+}
+
+#[rustfmt::skip]
+fn simple_loop4() {
+ loop {
+ println!("bleh");
+ continue // should lint here
+ }
+}
+
+mod issue_2329 {
+ fn condition() -> bool {
+ unimplemented!()
+ }
+ fn update_condition() {}
+
+ // only the outer loop has a label
+ fn foo() {
+ 'outer: loop {
+ println!("Entry");
+ while condition() {
+ update_condition();
+ if condition() {
+ println!("foo-1");
+ } else {
+ continue 'outer; // should not lint here
+ }
+ println!("foo-2");
+
+ update_condition();
+ if condition() {
+ continue 'outer; // should not lint here
+ } else {
+ println!("foo-3");
+ }
+ println!("foo-4");
+ }
+ }
+ }
+
+ // both loops have labels
+ fn bar() {
+ 'outer: loop {
+ println!("Entry");
+ 'inner: while condition() {
+ update_condition();
+ if condition() {
+ println!("bar-1");
+ } else {
+ continue 'outer; // should not lint here
+ }
+ println!("bar-2");
+
+ update_condition();
+ if condition() {
+ println!("bar-3");
+ } else {
+ continue 'inner; // should lint here
+ }
+ println!("bar-4");
+
+ update_condition();
+ if condition() {
+ continue; // should lint here
+ } else {
+ println!("bar-5");
+ }
+ println!("bar-6");
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/needless_continue.stderr b/src/tools/clippy/tests/ui/needless_continue.stderr
new file mode 100644
index 000000000..b8657c74c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_continue.stderr
@@ -0,0 +1,131 @@
+error: this `else` block is redundant
+ --> $DIR/needless_continue.rs:29:16
+ |
+LL | } else {
+ | ________________^
+LL | | continue;
+LL | | }
+ | |_________^
+ |
+ = note: `-D clippy::needless-continue` implied by `-D warnings`
+ = help: consider dropping the `else` clause and merging the code that follows (in the loop) with the `if` block
+ if i % 2 == 0 && i % 3 == 0 {
+ println!("{}", i);
+ println!("{}", i + 1);
+ if i % 5 == 0 {
+ println!("{}", i + 2);
+ }
+ let i = 0;
+ println!("bar {} ", i);
+ // merged code follows:
+ println!("bleh");
+ {
+ println!("blah");
+ }
+ if !(!(i == 2) || !(i == 5)) {
+ println!("lama");
+ }
+ if (zero!(i % 2) || nonzero!(i % 5)) && i % 3 != 0 {
+ continue;
+ } else {
+ println!("Blabber");
+ println!("Jabber");
+ }
+ println!("bleh");
+ }
+
+error: there is no need for an explicit `else` block for this `if` expression
+ --> $DIR/needless_continue.rs:44:9
+ |
+LL | / if (zero!(i % 2) || nonzero!(i % 5)) && i % 3 != 0 {
+LL | | continue;
+LL | | } else {
+LL | | println!("Blabber");
+LL | | println!("Jabber");
+LL | | }
+ | |_________^
+ |
+ = help: consider dropping the `else` clause
+ if (zero!(i % 2) || nonzero!(i % 5)) && i % 3 != 0 {
+ continue;
+ }
+ {
+ println!("Blabber");
+ println!("Jabber");
+ }
+
+error: this `continue` expression is redundant
+ --> $DIR/needless_continue.rs:57:9
+ |
+LL | continue; // should lint here
+ | ^^^^^^^^^
+ |
+ = help: consider dropping the `continue` expression
+
+error: this `continue` expression is redundant
+ --> $DIR/needless_continue.rs:64:9
+ |
+LL | continue; // should lint here
+ | ^^^^^^^^^
+ |
+ = help: consider dropping the `continue` expression
+
+error: this `continue` expression is redundant
+ --> $DIR/needless_continue.rs:71:9
+ |
+LL | continue // should lint here
+ | ^^^^^^^^
+ |
+ = help: consider dropping the `continue` expression
+
+error: this `continue` expression is redundant
+ --> $DIR/needless_continue.rs:79:9
+ |
+LL | continue // should lint here
+ | ^^^^^^^^
+ |
+ = help: consider dropping the `continue` expression
+
+error: this `else` block is redundant
+ --> $DIR/needless_continue.rs:129:24
+ |
+LL | } else {
+ | ________________________^
+LL | | continue 'inner; // should lint here
+LL | | }
+ | |_________________^
+ |
+ = help: consider dropping the `else` clause and merging the code that follows (in the loop) with the `if` block
+ if condition() {
+ println!("bar-3");
+ // merged code follows:
+ println!("bar-4");
+ update_condition();
+ if condition() {
+ continue; // should lint here
+ } else {
+ println!("bar-5");
+ }
+ println!("bar-6");
+ }
+
+error: there is no need for an explicit `else` block for this `if` expression
+ --> $DIR/needless_continue.rs:135:17
+ |
+LL | / if condition() {
+LL | | continue; // should lint here
+LL | | } else {
+LL | | println!("bar-5");
+LL | | }
+ | |_________________^
+ |
+ = help: consider dropping the `else` clause
+ if condition() {
+ continue; // should lint here
+ }
+ {
+ println!("bar-5");
+ }
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_doc_main.rs b/src/tools/clippy/tests/ui/needless_doc_main.rs
new file mode 100644
index 000000000..83e9bbaa3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_doc_main.rs
@@ -0,0 +1,140 @@
+/// This is a test for needless `fn main()` in doctests.
+///
+/// # Examples
+///
+/// This should lint
+/// ```
+/// fn main() {
+/// unimplemented!();
+/// }
+/// ```
+///
+/// With an explicit return type it should lint too
+/// ```edition2015
+/// fn main() -> () {
+/// unimplemented!();
+/// }
+/// ```
+///
+/// This should, too.
+/// ```rust
+/// fn main() {
+/// unimplemented!();
+/// }
+/// ```
+///
+/// This one too.
+/// ```no_run
+/// fn main() {
+/// unimplemented!();
+/// }
+/// ```
+fn bad_doctests() {}
+
+/// # Examples
+///
+/// This shouldn't lint, because the `main` is empty:
+/// ```
+/// fn main(){}
+/// ```
+///
+/// This shouldn't lint either, because main is async:
+/// ```edition2018
+/// async fn main() {
+/// assert_eq!(42, ANSWER);
+/// }
+/// ```
+///
+/// Same here, because the return type is not the unit type:
+/// ```
+/// fn main() -> Result<()> {
+/// Ok(())
+/// }
+/// ```
+///
+/// This shouldn't lint either, because there's a `static`:
+/// ```
+/// static ANSWER: i32 = 42;
+///
+/// fn main() {
+/// assert_eq!(42, ANSWER);
+/// }
+/// ```
+///
+/// This shouldn't lint either, because there's a `const`:
+/// ```
+/// fn main() {
+/// assert_eq!(42, ANSWER);
+/// }
+///
+/// const ANSWER: i32 = 42;
+/// ```
+///
+/// Neither should this lint because of `extern crate`:
+/// ```
+/// #![feature(test)]
+/// extern crate test;
+/// fn main() {
+/// assert_eq(1u8, test::black_box(1));
+/// }
+/// ```
+///
+/// Neither should this lint because it has an extern block:
+/// ```
+/// extern {}
+/// fn main() {
+/// unimplemented!();
+/// }
+/// ```
+///
+/// This should not lint because there is another function defined:
+/// ```
+/// fn fun() {}
+///
+/// fn main() {
+/// unimplemented!();
+/// }
+/// ```
+///
+/// We should not lint inside raw strings ...
+/// ```
+/// let string = r#"
+/// fn main() {
+/// unimplemented!();
+/// }
+/// "#;
+/// ```
+///
+/// ... or comments
+/// ```
+/// // fn main() {
+/// // let _inception = 42;
+/// // }
+/// let _inception = 42;
+/// ```
+///
+/// We should not lint ignored examples:
+/// ```rust,ignore
+/// fn main() {
+/// unimplemented!();
+/// }
+/// ```
+///
+/// Or even non-rust examples:
+/// ```text
+/// fn main() {
+/// is what starts the program
+/// }
+/// ```
+fn no_false_positives() {}
+
+/// Yields a parse error when interpreted as rust code:
+/// ```
+/// r#"hi"
+/// ```
+fn issue_6022() {}
+
+fn main() {
+ bad_doctests();
+ no_false_positives();
+}
diff --git a/src/tools/clippy/tests/ui/needless_doc_main.stderr b/src/tools/clippy/tests/ui/needless_doc_main.stderr
new file mode 100644
index 000000000..05c7f9d33
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_doc_main.stderr
@@ -0,0 +1,28 @@
+error: needless `fn main` in doctest
+ --> $DIR/needless_doc_main.rs:7:4
+ |
+LL | /// fn main() {
+ | ^^^^^^^^^^^^
+ |
+ = note: `-D clippy::needless-doctest-main` implied by `-D warnings`
+
+error: needless `fn main` in doctest
+ --> $DIR/needless_doc_main.rs:14:4
+ |
+LL | /// fn main() -> () {
+ | ^^^^^^^^^^^^^^^^^^
+
+error: needless `fn main` in doctest
+ --> $DIR/needless_doc_main.rs:21:4
+ |
+LL | /// fn main() {
+ | ^^^^^^^^^^^^
+
+error: needless `fn main` in doctest
+ --> $DIR/needless_doc_main.rs:28:4
+ |
+LL | /// fn main() {
+ | ^^^^^^^^^^^^
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_for_each_fixable.fixed b/src/tools/clippy/tests/ui/needless_for_each_fixable.fixed
new file mode 100644
index 000000000..c1685f7b6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_for_each_fixable.fixed
@@ -0,0 +1,118 @@
+// run-rustfix
+#![warn(clippy::needless_for_each)]
+#![allow(
+ unused,
+ clippy::needless_return,
+ clippy::match_single_binding,
+ clippy::let_unit_value
+)]
+
+use std::collections::HashMap;
+
+fn should_lint() {
+ let v: Vec<i32> = Vec::new();
+ let mut acc = 0;
+ for elem in v.iter() {
+ acc += elem;
+ }
+ for elem in v.into_iter() {
+ acc += elem;
+ }
+
+ for elem in [1, 2, 3].iter() {
+ acc += elem;
+ }
+
+ let mut hash_map: HashMap<i32, i32> = HashMap::new();
+ for (k, v) in hash_map.iter() {
+ acc += k + v;
+ }
+ for (k, v) in hash_map.iter_mut() {
+ acc += *k + *v;
+ }
+ for k in hash_map.keys() {
+ acc += k;
+ }
+ for v in hash_map.values() {
+ acc += v;
+ }
+
+ fn my_vec() -> Vec<i32> {
+ Vec::new()
+ }
+ for elem in my_vec().iter() {
+ acc += elem;
+ }
+}
+
+fn should_not_lint() {
+ let v: Vec<i32> = Vec::new();
+ let mut acc = 0;
+
+ // `for_each` argument is not closure.
+ fn print(x: &i32) {
+ println!("{}", x);
+ }
+ v.iter().for_each(print);
+
+ // User defined type.
+ struct MyStruct {
+ v: Vec<i32>,
+ }
+ impl MyStruct {
+ fn iter(&self) -> impl Iterator<Item = &i32> {
+ self.v.iter()
+ }
+ }
+ let s = MyStruct { v: Vec::new() };
+ s.iter().for_each(|elem| {
+ acc += elem;
+ });
+
+ // `for_each` follows long iterator chain.
+ v.iter().chain(v.iter()).for_each(|v| {
+ acc += v;
+ });
+ v.as_slice().iter().for_each(|v| {
+ acc += v;
+ });
+ s.v.iter().for_each(|v| {
+ acc += v;
+ });
+
+ // `return` is used in `Loop` of the closure.
+ v.iter().for_each(|v| {
+ for i in 0..*v {
+ if i == 10 {
+ return;
+ } else {
+ println!("{}", v);
+ }
+ }
+ if *v == 20 {
+ return;
+ } else {
+ println!("{}", v);
+ }
+ });
+
+ // Previously transformed iterator variable.
+ let it = v.iter();
+ it.chain(v.iter()).for_each(|elem| {
+ acc += elem;
+ });
+
+ // `for_each` is not directly in a statement.
+ match 1 {
+ _ => v.iter().for_each(|elem| {
+ acc += elem;
+ }),
+ }
+
+ // `for_each` is in a let bingind.
+ let _ = v.iter().for_each(|elem| {
+ acc += elem;
+ });
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/needless_for_each_fixable.rs b/src/tools/clippy/tests/ui/needless_for_each_fixable.rs
new file mode 100644
index 000000000..ad17b0956
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_for_each_fixable.rs
@@ -0,0 +1,118 @@
+// run-rustfix
+#![warn(clippy::needless_for_each)]
+#![allow(
+ unused,
+ clippy::needless_return,
+ clippy::match_single_binding,
+ clippy::let_unit_value
+)]
+
+use std::collections::HashMap;
+
+fn should_lint() {
+ let v: Vec<i32> = Vec::new();
+ let mut acc = 0;
+ v.iter().for_each(|elem| {
+ acc += elem;
+ });
+ v.into_iter().for_each(|elem| {
+ acc += elem;
+ });
+
+ [1, 2, 3].iter().for_each(|elem| {
+ acc += elem;
+ });
+
+ let mut hash_map: HashMap<i32, i32> = HashMap::new();
+ hash_map.iter().for_each(|(k, v)| {
+ acc += k + v;
+ });
+ hash_map.iter_mut().for_each(|(k, v)| {
+ acc += *k + *v;
+ });
+ hash_map.keys().for_each(|k| {
+ acc += k;
+ });
+ hash_map.values().for_each(|v| {
+ acc += v;
+ });
+
+ fn my_vec() -> Vec<i32> {
+ Vec::new()
+ }
+ my_vec().iter().for_each(|elem| {
+ acc += elem;
+ });
+}
+
+fn should_not_lint() {
+ let v: Vec<i32> = Vec::new();
+ let mut acc = 0;
+
+ // `for_each` argument is not closure.
+ fn print(x: &i32) {
+ println!("{}", x);
+ }
+ v.iter().for_each(print);
+
+ // User defined type.
+ struct MyStruct {
+ v: Vec<i32>,
+ }
+ impl MyStruct {
+ fn iter(&self) -> impl Iterator<Item = &i32> {
+ self.v.iter()
+ }
+ }
+ let s = MyStruct { v: Vec::new() };
+ s.iter().for_each(|elem| {
+ acc += elem;
+ });
+
+ // `for_each` follows long iterator chain.
+ v.iter().chain(v.iter()).for_each(|v| {
+ acc += v;
+ });
+ v.as_slice().iter().for_each(|v| {
+ acc += v;
+ });
+ s.v.iter().for_each(|v| {
+ acc += v;
+ });
+
+ // `return` is used in `Loop` of the closure.
+ v.iter().for_each(|v| {
+ for i in 0..*v {
+ if i == 10 {
+ return;
+ } else {
+ println!("{}", v);
+ }
+ }
+ if *v == 20 {
+ return;
+ } else {
+ println!("{}", v);
+ }
+ });
+
+ // Previously transformed iterator variable.
+ let it = v.iter();
+ it.chain(v.iter()).for_each(|elem| {
+ acc += elem;
+ });
+
+ // `for_each` is not directly in a statement.
+ match 1 {
+ _ => v.iter().for_each(|elem| {
+ acc += elem;
+ }),
+ }
+
+ // `for_each` is in a let bingind.
+ let _ = v.iter().for_each(|elem| {
+ acc += elem;
+ });
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/needless_for_each_fixable.stderr b/src/tools/clippy/tests/ui/needless_for_each_fixable.stderr
new file mode 100644
index 000000000..08e995851
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_for_each_fixable.stderr
@@ -0,0 +1,123 @@
+error: needless use of `for_each`
+ --> $DIR/needless_for_each_fixable.rs:15:5
+ |
+LL | / v.iter().for_each(|elem| {
+LL | | acc += elem;
+LL | | });
+ | |_______^
+ |
+ = note: `-D clippy::needless-for-each` implied by `-D warnings`
+help: try
+ |
+LL ~ for elem in v.iter() {
+LL + acc += elem;
+LL + }
+ |
+
+error: needless use of `for_each`
+ --> $DIR/needless_for_each_fixable.rs:18:5
+ |
+LL | / v.into_iter().for_each(|elem| {
+LL | | acc += elem;
+LL | | });
+ | |_______^
+ |
+help: try
+ |
+LL ~ for elem in v.into_iter() {
+LL + acc += elem;
+LL + }
+ |
+
+error: needless use of `for_each`
+ --> $DIR/needless_for_each_fixable.rs:22:5
+ |
+LL | / [1, 2, 3].iter().for_each(|elem| {
+LL | | acc += elem;
+LL | | });
+ | |_______^
+ |
+help: try
+ |
+LL ~ for elem in [1, 2, 3].iter() {
+LL + acc += elem;
+LL + }
+ |
+
+error: needless use of `for_each`
+ --> $DIR/needless_for_each_fixable.rs:27:5
+ |
+LL | / hash_map.iter().for_each(|(k, v)| {
+LL | | acc += k + v;
+LL | | });
+ | |_______^
+ |
+help: try
+ |
+LL ~ for (k, v) in hash_map.iter() {
+LL + acc += k + v;
+LL + }
+ |
+
+error: needless use of `for_each`
+ --> $DIR/needless_for_each_fixable.rs:30:5
+ |
+LL | / hash_map.iter_mut().for_each(|(k, v)| {
+LL | | acc += *k + *v;
+LL | | });
+ | |_______^
+ |
+help: try
+ |
+LL ~ for (k, v) in hash_map.iter_mut() {
+LL + acc += *k + *v;
+LL + }
+ |
+
+error: needless use of `for_each`
+ --> $DIR/needless_for_each_fixable.rs:33:5
+ |
+LL | / hash_map.keys().for_each(|k| {
+LL | | acc += k;
+LL | | });
+ | |_______^
+ |
+help: try
+ |
+LL ~ for k in hash_map.keys() {
+LL + acc += k;
+LL + }
+ |
+
+error: needless use of `for_each`
+ --> $DIR/needless_for_each_fixable.rs:36:5
+ |
+LL | / hash_map.values().for_each(|v| {
+LL | | acc += v;
+LL | | });
+ | |_______^
+ |
+help: try
+ |
+LL ~ for v in hash_map.values() {
+LL + acc += v;
+LL + }
+ |
+
+error: needless use of `for_each`
+ --> $DIR/needless_for_each_fixable.rs:43:5
+ |
+LL | / my_vec().iter().for_each(|elem| {
+LL | | acc += elem;
+LL | | });
+ | |_______^
+ |
+help: try
+ |
+LL ~ for elem in my_vec().iter() {
+LL + acc += elem;
+LL + }
+ |
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_for_each_unfixable.rs b/src/tools/clippy/tests/ui/needless_for_each_unfixable.rs
new file mode 100644
index 000000000..d765d7dab
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_for_each_unfixable.rs
@@ -0,0 +1,14 @@
+#![warn(clippy::needless_for_each)]
+#![allow(clippy::needless_return)]
+
+fn main() {
+ let v: Vec<i32> = Vec::new();
+ // This is unfixable because the closure includes `return`.
+ v.iter().for_each(|v| {
+ if *v == 10 {
+ return;
+ } else {
+ println!("{}", v);
+ }
+ });
+}
diff --git a/src/tools/clippy/tests/ui/needless_for_each_unfixable.stderr b/src/tools/clippy/tests/ui/needless_for_each_unfixable.stderr
new file mode 100644
index 000000000..7893ff31a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_for_each_unfixable.stderr
@@ -0,0 +1,30 @@
+error: needless use of `for_each`
+ --> $DIR/needless_for_each_unfixable.rs:7:5
+ |
+LL | / v.iter().for_each(|v| {
+LL | | if *v == 10 {
+LL | | return;
+LL | | } else {
+LL | | println!("{}", v);
+LL | | }
+LL | | });
+ | |_______^
+ |
+ = note: `-D clippy::needless-for-each` implied by `-D warnings`
+help: try
+ |
+LL ~ for v in v.iter() {
+LL + if *v == 10 {
+LL + return;
+LL + } else {
+LL + println!("{}", v);
+LL + }
+LL + }
+ |
+help: ...and replace `return` with `continue`
+ |
+LL | continue;
+ | ~~~~~~~~
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/needless_late_init.fixed b/src/tools/clippy/tests/ui/needless_late_init.fixed
new file mode 100644
index 000000000..fee8e3030
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_late_init.fixed
@@ -0,0 +1,273 @@
+// run-rustfix
+#![feature(let_chains)]
+#![allow(
+ unused,
+ clippy::assign_op_pattern,
+ clippy::blocks_in_if_conditions,
+ clippy::let_and_return,
+ clippy::let_unit_value,
+ clippy::nonminimal_bool
+)]
+
+use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
+use std::rc::Rc;
+
+struct SignificantDrop;
+impl std::ops::Drop for SignificantDrop {
+ fn drop(&mut self) {
+ println!("dropped");
+ }
+}
+
+fn simple() {
+
+ let a = "zero";
+
+
+
+ let b = 1;
+ let c = 2;
+
+
+ let d: usize = 1;
+
+
+ let e = format!("{}", d);
+}
+
+fn main() {
+
+ let n = 1;
+ let a = match n {
+ 1 => "one",
+ _ => {
+ "two"
+ },
+ };
+
+
+ let b = if n == 3 {
+ "four"
+ } else {
+ "five"
+ };
+
+
+ let d = if true {
+ let temp = 5;
+ temp
+ } else {
+ 15
+ };
+
+
+ let e = if true {
+ format!("{} {}", a, b)
+ } else {
+ format!("{}", n)
+ };
+
+
+ let f = match 1 {
+ 1 => "three",
+ _ => return,
+ }; // has semi
+
+
+ let g: usize = if true {
+ 5
+ } else {
+ panic!();
+ };
+
+ // Drop order only matters if both are significant
+
+ let y = SignificantDrop;
+ let x = 1;
+
+
+ let y = 1;
+ let x = SignificantDrop;
+
+
+ // types that should be considered insignificant
+ let y = 1;
+ let y = "2";
+ let y = String::new();
+ let y = vec![3.0];
+ let y = HashMap::<usize, usize>::new();
+ let y = BTreeMap::<usize, usize>::new();
+ let y = HashSet::<usize>::new();
+ let y = BTreeSet::<usize>::new();
+ let y = Box::new(4);
+ let x = SignificantDrop;
+}
+
+async fn in_async() -> &'static str {
+ async fn f() -> &'static str {
+ "one"
+ }
+
+
+ let n = 1;
+ let a = match n {
+ 1 => f().await,
+ _ => {
+ "two"
+ },
+ };
+
+ a
+}
+
+const fn in_const() -> &'static str {
+ const fn f() -> &'static str {
+ "one"
+ }
+
+
+ let n = 1;
+ let a = match n {
+ 1 => f(),
+ _ => {
+ "two"
+ },
+ };
+
+ a
+}
+
+fn does_not_lint() {
+ let z;
+ if false {
+ z = 1;
+ }
+
+ let x;
+ let y;
+ if true {
+ x = 1;
+ } else {
+ y = 1;
+ }
+
+ let mut x;
+ if true {
+ x = 5;
+ x = 10 / x;
+ } else {
+ x = 2;
+ }
+
+ let x;
+ let _ = match 1 {
+ 1 => x = 10,
+ _ => x = 20,
+ };
+
+ // using tuples would be possible, but not always preferable
+ let x;
+ let y;
+ if true {
+ x = 1;
+ y = 2;
+ } else {
+ x = 3;
+ y = 4;
+ }
+
+ // could match with a smarter heuristic to avoid multiple assignments
+ let x;
+ if true {
+ let mut y = 5;
+ y = 6;
+ x = y;
+ } else {
+ x = 2;
+ }
+
+ let (x, y);
+ if true {
+ x = 1;
+ } else {
+ x = 2;
+ }
+ y = 3;
+
+ macro_rules! assign {
+ ($i:ident) => {
+ $i = 1;
+ };
+ }
+ let x;
+ assign!(x);
+
+ let x;
+ if true {
+ assign!(x);
+ } else {
+ x = 2;
+ }
+
+ macro_rules! in_macro {
+ () => {
+ let x;
+ x = 1;
+
+ let x;
+ if true {
+ x = 1;
+ } else {
+ x = 2;
+ }
+ };
+ }
+ in_macro!();
+
+ // ignore if-lets - https://github.com/rust-lang/rust-clippy/issues/8613
+ let x;
+ if let Some(n) = Some("v") {
+ x = 1;
+ } else {
+ x = 2;
+ }
+
+ let x;
+ if true && let Some(n) = Some("let chains too") {
+ x = 1;
+ } else {
+ x = 2;
+ }
+
+ // ignore mut bindings
+ // https://github.com/shepmaster/twox-hash/blob/b169c16d86eb8ea4a296b0acb9d00ca7e3c3005f/src/sixty_four.rs#L88-L93
+ // https://github.com/dtolnay/thiserror/blob/21c26903e29cb92ba1a7ff11e82ae2001646b60d/tests/test_generics.rs#L91-L100
+ let mut x: usize;
+ x = 1;
+ x = 2;
+ x = 3;
+
+ // should not move the declaration if `x` has a significant drop, and there
+ // is another binding with a significant drop between it and the first usage
+ let x;
+ let y = SignificantDrop;
+ x = SignificantDrop;
+}
+
+#[rustfmt::skip]
+fn issue8911() -> u32 {
+ let x;
+ match 1 {
+ _ if { x = 1; false } => return 1,
+ _ => return 2,
+ }
+
+ let x;
+ if { x = 1; true } {
+ return 1;
+ } else {
+ return 2;
+ }
+
+ 3
+}
diff --git a/src/tools/clippy/tests/ui/needless_late_init.rs b/src/tools/clippy/tests/ui/needless_late_init.rs
new file mode 100644
index 000000000..402d9f9ef
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_late_init.rs
@@ -0,0 +1,273 @@
+// run-rustfix
+#![feature(let_chains)]
+#![allow(
+ unused,
+ clippy::assign_op_pattern,
+ clippy::blocks_in_if_conditions,
+ clippy::let_and_return,
+ clippy::let_unit_value,
+ clippy::nonminimal_bool
+)]
+
+use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
+use std::rc::Rc;
+
+struct SignificantDrop;
+impl std::ops::Drop for SignificantDrop {
+ fn drop(&mut self) {
+ println!("dropped");
+ }
+}
+
+fn simple() {
+ let a;
+ a = "zero";
+
+ let b;
+ let c;
+ b = 1;
+ c = 2;
+
+ let d: usize;
+ d = 1;
+
+ let e;
+ e = format!("{}", d);
+}
+
+fn main() {
+ let a;
+ let n = 1;
+ match n {
+ 1 => a = "one",
+ _ => {
+ a = "two";
+ },
+ }
+
+ let b;
+ if n == 3 {
+ b = "four";
+ } else {
+ b = "five"
+ }
+
+ let d;
+ if true {
+ let temp = 5;
+ d = temp;
+ } else {
+ d = 15;
+ }
+
+ let e;
+ if true {
+ e = format!("{} {}", a, b);
+ } else {
+ e = format!("{}", n);
+ }
+
+ let f;
+ match 1 {
+ 1 => f = "three",
+ _ => return,
+ }; // has semi
+
+ let g: usize;
+ if true {
+ g = 5;
+ } else {
+ panic!();
+ }
+
+ // Drop order only matters if both are significant
+ let x;
+ let y = SignificantDrop;
+ x = 1;
+
+ let x;
+ let y = 1;
+ x = SignificantDrop;
+
+ let x;
+ // types that should be considered insignificant
+ let y = 1;
+ let y = "2";
+ let y = String::new();
+ let y = vec![3.0];
+ let y = HashMap::<usize, usize>::new();
+ let y = BTreeMap::<usize, usize>::new();
+ let y = HashSet::<usize>::new();
+ let y = BTreeSet::<usize>::new();
+ let y = Box::new(4);
+ x = SignificantDrop;
+}
+
+async fn in_async() -> &'static str {
+ async fn f() -> &'static str {
+ "one"
+ }
+
+ let a;
+ let n = 1;
+ match n {
+ 1 => a = f().await,
+ _ => {
+ a = "two";
+ },
+ }
+
+ a
+}
+
+const fn in_const() -> &'static str {
+ const fn f() -> &'static str {
+ "one"
+ }
+
+ let a;
+ let n = 1;
+ match n {
+ 1 => a = f(),
+ _ => {
+ a = "two";
+ },
+ }
+
+ a
+}
+
+fn does_not_lint() {
+ let z;
+ if false {
+ z = 1;
+ }
+
+ let x;
+ let y;
+ if true {
+ x = 1;
+ } else {
+ y = 1;
+ }
+
+ let mut x;
+ if true {
+ x = 5;
+ x = 10 / x;
+ } else {
+ x = 2;
+ }
+
+ let x;
+ let _ = match 1 {
+ 1 => x = 10,
+ _ => x = 20,
+ };
+
+ // using tuples would be possible, but not always preferable
+ let x;
+ let y;
+ if true {
+ x = 1;
+ y = 2;
+ } else {
+ x = 3;
+ y = 4;
+ }
+
+ // could match with a smarter heuristic to avoid multiple assignments
+ let x;
+ if true {
+ let mut y = 5;
+ y = 6;
+ x = y;
+ } else {
+ x = 2;
+ }
+
+ let (x, y);
+ if true {
+ x = 1;
+ } else {
+ x = 2;
+ }
+ y = 3;
+
+ macro_rules! assign {
+ ($i:ident) => {
+ $i = 1;
+ };
+ }
+ let x;
+ assign!(x);
+
+ let x;
+ if true {
+ assign!(x);
+ } else {
+ x = 2;
+ }
+
+ macro_rules! in_macro {
+ () => {
+ let x;
+ x = 1;
+
+ let x;
+ if true {
+ x = 1;
+ } else {
+ x = 2;
+ }
+ };
+ }
+ in_macro!();
+
+ // ignore if-lets - https://github.com/rust-lang/rust-clippy/issues/8613
+ let x;
+ if let Some(n) = Some("v") {
+ x = 1;
+ } else {
+ x = 2;
+ }
+
+ let x;
+ if true && let Some(n) = Some("let chains too") {
+ x = 1;
+ } else {
+ x = 2;
+ }
+
+ // ignore mut bindings
+ // https://github.com/shepmaster/twox-hash/blob/b169c16d86eb8ea4a296b0acb9d00ca7e3c3005f/src/sixty_four.rs#L88-L93
+ // https://github.com/dtolnay/thiserror/blob/21c26903e29cb92ba1a7ff11e82ae2001646b60d/tests/test_generics.rs#L91-L100
+ let mut x: usize;
+ x = 1;
+ x = 2;
+ x = 3;
+
+ // should not move the declaration if `x` has a significant drop, and there
+ // is another binding with a significant drop between it and the first usage
+ let x;
+ let y = SignificantDrop;
+ x = SignificantDrop;
+}
+
+#[rustfmt::skip]
+fn issue8911() -> u32 {
+ let x;
+ match 1 {
+ _ if { x = 1; false } => return 1,
+ _ => return 2,
+ }
+
+ let x;
+ if { x = 1; true } {
+ return 1;
+ } else {
+ return 2;
+ }
+
+ 3
+}
diff --git a/src/tools/clippy/tests/ui/needless_late_init.stderr b/src/tools/clippy/tests/ui/needless_late_init.stderr
new file mode 100644
index 000000000..313cdbbeb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_late_init.stderr
@@ -0,0 +1,274 @@
+error: unneeded late initialization
+ --> $DIR/needless_late_init.rs:23:5
+ |
+LL | let a;
+ | ^^^^^^ created here
+LL | a = "zero";
+ | ^^^^^^^^^^ initialised here
+ |
+ = note: `-D clippy::needless-late-init` implied by `-D warnings`
+help: declare `a` here
+ |
+LL | let a = "zero";
+ | ~~~~~
+
+error: unneeded late initialization
+ --> $DIR/needless_late_init.rs:26:5
+ |
+LL | let b;
+ | ^^^^^^ created here
+LL | let c;
+LL | b = 1;
+ | ^^^^^ initialised here
+ |
+help: declare `b` here
+ |
+LL | let b = 1;
+ | ~~~~~
+
+error: unneeded late initialization
+ --> $DIR/needless_late_init.rs:27:5
+ |
+LL | let c;
+ | ^^^^^^ created here
+LL | b = 1;
+LL | c = 2;
+ | ^^^^^ initialised here
+ |
+help: declare `c` here
+ |
+LL | let c = 2;
+ | ~~~~~
+
+error: unneeded late initialization
+ --> $DIR/needless_late_init.rs:31:5
+ |
+LL | let d: usize;
+ | ^^^^^^^^^^^^^ created here
+LL | d = 1;
+ | ^^^^^ initialised here
+ |
+help: declare `d` here
+ |
+LL | let d: usize = 1;
+ | ~~~~~~~~~~~~
+
+error: unneeded late initialization
+ --> $DIR/needless_late_init.rs:34:5
+ |
+LL | let e;
+ | ^^^^^^ created here
+LL | e = format!("{}", d);
+ | ^^^^^^^^^^^^^^^^^^^^ initialised here
+ |
+help: declare `e` here
+ |
+LL | let e = format!("{}", d);
+ | ~~~~~
+
+error: unneeded late initialization
+ --> $DIR/needless_late_init.rs:39:5
+ |
+LL | let a;
+ | ^^^^^^
+ |
+help: declare `a` here
+ |
+LL | let a = match n {
+ | +++++++
+help: remove the assignments from the `match` arms
+ |
+LL ~ 1 => "one",
+LL | _ => {
+LL ~ "two"
+ |
+help: add a semicolon after the `match` expression
+ |
+LL | };
+ | +
+
+error: unneeded late initialization
+ --> $DIR/needless_late_init.rs:48:5
+ |
+LL | let b;
+ | ^^^^^^
+ |
+help: declare `b` here
+ |
+LL | let b = if n == 3 {
+ | +++++++
+help: remove the assignments from the branches
+ |
+LL ~ "four"
+LL | } else {
+LL ~ "five"
+ |
+help: add a semicolon after the `if` expression
+ |
+LL | };
+ | +
+
+error: unneeded late initialization
+ --> $DIR/needless_late_init.rs:55:5
+ |
+LL | let d;
+ | ^^^^^^
+ |
+help: declare `d` here
+ |
+LL | let d = if true {
+ | +++++++
+help: remove the assignments from the branches
+ |
+LL ~ temp
+LL | } else {
+LL ~ 15
+ |
+help: add a semicolon after the `if` expression
+ |
+LL | };
+ | +
+
+error: unneeded late initialization
+ --> $DIR/needless_late_init.rs:63:5
+ |
+LL | let e;
+ | ^^^^^^
+ |
+help: declare `e` here
+ |
+LL | let e = if true {
+ | +++++++
+help: remove the assignments from the branches
+ |
+LL ~ format!("{} {}", a, b)
+LL | } else {
+LL ~ format!("{}", n)
+ |
+help: add a semicolon after the `if` expression
+ |
+LL | };
+ | +
+
+error: unneeded late initialization
+ --> $DIR/needless_late_init.rs:70:5
+ |
+LL | let f;
+ | ^^^^^^
+ |
+help: declare `f` here
+ |
+LL | let f = match 1 {
+ | +++++++
+help: remove the assignments from the `match` arms
+ |
+LL - 1 => f = "three",
+LL + 1 => "three",
+ |
+
+error: unneeded late initialization
+ --> $DIR/needless_late_init.rs:76:5
+ |
+LL | let g: usize;
+ | ^^^^^^^^^^^^^
+ |
+help: declare `g` here
+ |
+LL | let g: usize = if true {
+ | ++++++++++++++
+help: remove the assignments from the branches
+ |
+LL - g = 5;
+LL + 5
+ |
+help: add a semicolon after the `if` expression
+ |
+LL | };
+ | +
+
+error: unneeded late initialization
+ --> $DIR/needless_late_init.rs:84:5
+ |
+LL | let x;
+ | ^^^^^^ created here
+LL | let y = SignificantDrop;
+LL | x = 1;
+ | ^^^^^ initialised here
+ |
+help: declare `x` here
+ |
+LL | let x = 1;
+ | ~~~~~
+
+error: unneeded late initialization
+ --> $DIR/needless_late_init.rs:88:5
+ |
+LL | let x;
+ | ^^^^^^ created here
+LL | let y = 1;
+LL | x = SignificantDrop;
+ | ^^^^^^^^^^^^^^^^^^^ initialised here
+ |
+help: declare `x` here
+ |
+LL | let x = SignificantDrop;
+ | ~~~~~
+
+error: unneeded late initialization
+ --> $DIR/needless_late_init.rs:92:5
+ |
+LL | let x;
+ | ^^^^^^ created here
+...
+LL | x = SignificantDrop;
+ | ^^^^^^^^^^^^^^^^^^^ initialised here
+ |
+help: declare `x` here
+ |
+LL | let x = SignificantDrop;
+ | ~~~~~
+
+error: unneeded late initialization
+ --> $DIR/needless_late_init.rs:111:5
+ |
+LL | let a;
+ | ^^^^^^
+ |
+help: declare `a` here
+ |
+LL | let a = match n {
+ | +++++++
+help: remove the assignments from the `match` arms
+ |
+LL ~ 1 => f().await,
+LL | _ => {
+LL ~ "two"
+ |
+help: add a semicolon after the `match` expression
+ |
+LL | };
+ | +
+
+error: unneeded late initialization
+ --> $DIR/needless_late_init.rs:128:5
+ |
+LL | let a;
+ | ^^^^^^
+ |
+help: declare `a` here
+ |
+LL | let a = match n {
+ | +++++++
+help: remove the assignments from the `match` arms
+ |
+LL ~ 1 => f(),
+LL | _ => {
+LL ~ "two"
+ |
+help: add a semicolon after the `match` expression
+ |
+LL | };
+ | +
+
+error: aborting due to 16 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_lifetimes.rs b/src/tools/clippy/tests/ui/needless_lifetimes.rs
new file mode 100644
index 000000000..fc686b1da
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_lifetimes.rs
@@ -0,0 +1,422 @@
+#![warn(clippy::needless_lifetimes)]
+#![allow(
+ dead_code,
+ clippy::boxed_local,
+ clippy::needless_pass_by_value,
+ clippy::unnecessary_wraps,
+ dyn_drop,
+ clippy::get_first
+)]
+
+fn distinct_lifetimes<'a, 'b>(_x: &'a u8, _y: &'b u8, _z: u8) {}
+
+fn distinct_and_static<'a, 'b>(_x: &'a u8, _y: &'b u8, _z: &'static u8) {}
+
+// No error; same lifetime on two params.
+fn same_lifetime_on_input<'a>(_x: &'a u8, _y: &'a u8) {}
+
+// No error; static involved.
+fn only_static_on_input(_x: &u8, _y: &u8, _z: &'static u8) {}
+
+fn mut_and_static_input(_x: &mut u8, _y: &'static str) {}
+
+fn in_and_out<'a>(x: &'a u8, _y: u8) -> &'a u8 {
+ x
+}
+
+// No error; multiple input refs.
+fn multiple_in_and_out_1<'a>(x: &'a u8, _y: &'a u8) -> &'a u8 {
+ x
+}
+
+// No error; multiple input refs.
+fn multiple_in_and_out_2<'a, 'b>(x: &'a u8, _y: &'b u8) -> &'a u8 {
+ x
+}
+
+// No error; multiple input refs
+async fn func<'a>(args: &[&'a str]) -> Option<&'a str> {
+ args.get(0).cloned()
+}
+
+// No error; static involved.
+fn in_static_and_out<'a>(x: &'a u8, _y: &'static u8) -> &'a u8 {
+ x
+}
+
+// No error.
+fn deep_reference_1<'a, 'b>(x: &'a u8, _y: &'b u8) -> Result<&'a u8, ()> {
+ Ok(x)
+}
+
+// No error; two input refs.
+fn deep_reference_2<'a>(x: Result<&'a u8, &'a u8>) -> &'a u8 {
+ x.unwrap()
+}
+
+fn deep_reference_3<'a>(x: &'a u8, _y: u8) -> Result<&'a u8, ()> {
+ Ok(x)
+}
+
+// Where-clause, but without lifetimes.
+fn where_clause_without_lt<'a, T>(x: &'a u8, _y: u8) -> Result<&'a u8, ()>
+where
+ T: Copy,
+{
+ Ok(x)
+}
+
+type Ref<'r> = &'r u8;
+
+// No error; same lifetime on two params.
+fn lifetime_param_1<'a>(_x: Ref<'a>, _y: &'a u8) {}
+
+fn lifetime_param_2<'a, 'b>(_x: Ref<'a>, _y: &'b u8) {}
+
+// No error; bounded lifetime.
+fn lifetime_param_3<'a, 'b: 'a>(_x: Ref<'a>, _y: &'b u8) {}
+
+// No error; bounded lifetime.
+fn lifetime_param_4<'a, 'b>(_x: Ref<'a>, _y: &'b u8)
+where
+ 'b: 'a,
+{
+}
+
+struct Lt<'a, I: 'static> {
+ x: &'a I,
+}
+
+// No error; fn bound references `'a`.
+fn fn_bound<'a, F, I>(_m: Lt<'a, I>, _f: F) -> Lt<'a, I>
+where
+ F: Fn(Lt<'a, I>) -> Lt<'a, I>,
+{
+ unreachable!()
+}
+
+fn fn_bound_2<'a, F, I>(_m: Lt<'a, I>, _f: F) -> Lt<'a, I>
+where
+ for<'x> F: Fn(Lt<'x, I>) -> Lt<'x, I>,
+{
+ unreachable!()
+}
+
+// No error; see below.
+fn fn_bound_3<'a, F: FnOnce(&'a i32)>(x: &'a i32, f: F) {
+ f(x);
+}
+
+fn fn_bound_3_cannot_elide() {
+ let x = 42;
+ let p = &x;
+ let mut q = &x;
+ // This will fail if we elide lifetimes of `fn_bound_3`.
+ fn_bound_3(p, |y| q = y);
+}
+
+// No error; multiple input refs.
+fn fn_bound_4<'a, F: FnOnce() -> &'a ()>(cond: bool, x: &'a (), f: F) -> &'a () {
+ if cond { x } else { f() }
+}
+
+struct X {
+ x: u8,
+}
+
+impl X {
+ fn self_and_out<'s>(&'s self) -> &'s u8 {
+ &self.x
+ }
+
+ // No error; multiple input refs.
+ fn self_and_in_out<'s, 't>(&'s self, _x: &'t u8) -> &'s u8 {
+ &self.x
+ }
+
+ fn distinct_self_and_in<'s, 't>(&'s self, _x: &'t u8) {}
+
+ // No error; same lifetimes on two params.
+ fn self_and_same_in<'s>(&'s self, _x: &'s u8) {}
+}
+
+struct Foo<'a>(&'a u8);
+
+impl<'a> Foo<'a> {
+ // No error; lifetime `'a` not defined in method.
+ fn self_shared_lifetime(&self, _: &'a u8) {}
+ // No error; bounds exist.
+ fn self_bound_lifetime<'b: 'a>(&self, _: &'b u8) {}
+}
+
+fn already_elided<'a>(_: &u8, _: &'a u8) -> &'a u8 {
+ unimplemented!()
+}
+
+fn struct_with_lt<'a>(_foo: Foo<'a>) -> &'a str {
+ unimplemented!()
+}
+
+// No warning; two input lifetimes (named on the reference, anonymous on `Foo`).
+fn struct_with_lt2<'a>(_foo: &'a Foo) -> &'a str {
+ unimplemented!()
+}
+
+// No warning; two input lifetimes (anonymous on the reference, named on `Foo`).
+fn struct_with_lt3<'a>(_foo: &Foo<'a>) -> &'a str {
+ unimplemented!()
+}
+
+// No warning; two input lifetimes.
+fn struct_with_lt4<'a, 'b>(_foo: &'a Foo<'b>) -> &'a str {
+ unimplemented!()
+}
+
+trait WithLifetime<'a> {}
+
+type WithLifetimeAlias<'a> = dyn WithLifetime<'a>;
+
+// Should not warn because it won't build without the lifetime.
+fn trait_obj_elided<'a>(_arg: &'a dyn WithLifetime) -> &'a str {
+ unimplemented!()
+}
+
+// Should warn because there is no lifetime on `Drop`, so this would be
+// unambiguous if we elided the lifetime.
+fn trait_obj_elided2<'a>(_arg: &'a dyn Drop) -> &'a str {
+ unimplemented!()
+}
+
+type FooAlias<'a> = Foo<'a>;
+
+fn alias_with_lt<'a>(_foo: FooAlias<'a>) -> &'a str {
+ unimplemented!()
+}
+
+// No warning; two input lifetimes (named on the reference, anonymous on `FooAlias`).
+fn alias_with_lt2<'a>(_foo: &'a FooAlias) -> &'a str {
+ unimplemented!()
+}
+
+// No warning; two input lifetimes (anonymous on the reference, named on `FooAlias`).
+fn alias_with_lt3<'a>(_foo: &FooAlias<'a>) -> &'a str {
+ unimplemented!()
+}
+
+// No warning; two input lifetimes.
+fn alias_with_lt4<'a, 'b>(_foo: &'a FooAlias<'b>) -> &'a str {
+ unimplemented!()
+}
+
+fn named_input_elided_output<'a>(_arg: &'a str) -> &str {
+ unimplemented!()
+}
+
+fn elided_input_named_output<'a>(_arg: &str) -> &'a str {
+ unimplemented!()
+}
+
+fn trait_bound_ok<'a, T: WithLifetime<'static>>(_: &'a u8, _: T) {
+ unimplemented!()
+}
+fn trait_bound<'a, T: WithLifetime<'a>>(_: &'a u8, _: T) {
+ unimplemented!()
+}
+
+// Don't warn on these; see issue #292.
+fn trait_bound_bug<'a, T: WithLifetime<'a>>() {
+ unimplemented!()
+}
+
+// See issue #740.
+struct Test {
+ vec: Vec<usize>,
+}
+
+impl Test {
+ fn iter<'a>(&'a self) -> Box<dyn Iterator<Item = usize> + 'a> {
+ unimplemented!()
+ }
+}
+
+trait LintContext<'a> {}
+
+fn f<'a, T: LintContext<'a>>(_: &T) {}
+
+fn test<'a>(x: &'a [u8]) -> u8 {
+ let y: &'a u8 = &x[5];
+ *y
+}
+
+// Issue #3284: give hint regarding lifetime in return type.
+struct Cow<'a> {
+ x: &'a str,
+}
+fn out_return_type_lts<'a>(e: &'a str) -> Cow<'a> {
+ unimplemented!()
+}
+
+// Make sure we still warn on implementations
+mod issue4291 {
+ trait BadTrait {
+ fn needless_lt<'a>(x: &'a u8) {}
+ }
+
+ impl BadTrait for () {
+ fn needless_lt<'a>(_x: &'a u8) {}
+ }
+}
+
+mod issue2944 {
+ trait Foo {}
+ struct Bar;
+ struct Baz<'a> {
+ bar: &'a Bar,
+ }
+
+ impl<'a> Foo for Baz<'a> {}
+ impl Bar {
+ fn baz<'a>(&'a self) -> impl Foo + 'a {
+ Baz { bar: self }
+ }
+ }
+}
+
+mod nested_elision_sites {
+ // issue #issue2944
+
+ // closure trait bounds subject to nested elision
+ // don't lint because they refer to outer lifetimes
+ fn trait_fn<'a>(i: &'a i32) -> impl Fn() -> &'a i32 {
+ move || i
+ }
+ fn trait_fn_mut<'a>(i: &'a i32) -> impl FnMut() -> &'a i32 {
+ move || i
+ }
+ fn trait_fn_once<'a>(i: &'a i32) -> impl FnOnce() -> &'a i32 {
+ move || i
+ }
+
+ // don't lint
+ fn impl_trait_in_input_position<'a>(f: impl Fn() -> &'a i32) -> &'a i32 {
+ f()
+ }
+ fn impl_trait_in_output_position<'a>(i: &'a i32) -> impl Fn() -> &'a i32 {
+ move || i
+ }
+ // lint
+ fn impl_trait_elidable_nested_named_lifetimes<'a>(i: &'a i32, f: impl for<'b> Fn(&'b i32) -> &'b i32) -> &'a i32 {
+ f(i)
+ }
+ fn impl_trait_elidable_nested_anonymous_lifetimes<'a>(i: &'a i32, f: impl Fn(&i32) -> &i32) -> &'a i32 {
+ f(i)
+ }
+
+ // don't lint
+ fn generics_not_elidable<'a, T: Fn() -> &'a i32>(f: T) -> &'a i32 {
+ f()
+ }
+ // lint
+ fn generics_elidable<'a, T: Fn(&i32) -> &i32>(i: &'a i32, f: T) -> &'a i32 {
+ f(i)
+ }
+
+ // don't lint
+ fn where_clause_not_elidable<'a, T>(f: T) -> &'a i32
+ where
+ T: Fn() -> &'a i32,
+ {
+ f()
+ }
+ // lint
+ fn where_clause_elidadable<'a, T>(i: &'a i32, f: T) -> &'a i32
+ where
+ T: Fn(&i32) -> &i32,
+ {
+ f(i)
+ }
+
+ // don't lint
+ fn pointer_fn_in_input_position<'a>(f: fn(&'a i32) -> &'a i32, i: &'a i32) -> &'a i32 {
+ f(i)
+ }
+ fn pointer_fn_in_output_position<'a>(_: &'a i32) -> fn(&'a i32) -> &'a i32 {
+ |i| i
+ }
+ // lint
+ fn pointer_fn_elidable<'a>(i: &'a i32, f: fn(&i32) -> &i32) -> &'a i32 {
+ f(i)
+ }
+
+ // don't lint
+ fn nested_fn_pointer_1<'a>(_: &'a i32) -> fn(fn(&'a i32) -> &'a i32) -> i32 {
+ |f| 42
+ }
+ fn nested_fn_pointer_2<'a>(_: &'a i32) -> impl Fn(fn(&'a i32)) {
+ |f| ()
+ }
+
+ // lint
+ fn nested_fn_pointer_3<'a>(_: &'a i32) -> fn(fn(&i32) -> &i32) -> i32 {
+ |f| 42
+ }
+ fn nested_fn_pointer_4<'a>(_: &'a i32) -> impl Fn(fn(&i32)) {
+ |f| ()
+ }
+}
+
+mod issue6159 {
+ use std::ops::Deref;
+ pub fn apply_deref<'a, T, F, R>(x: &'a T, f: F) -> R
+ where
+ T: Deref,
+ F: FnOnce(&'a T::Target) -> R,
+ {
+ f(x.deref())
+ }
+}
+
+mod issue7296 {
+ use std::rc::Rc;
+ use std::sync::Arc;
+
+ struct Foo;
+ impl Foo {
+ fn implicit<'a>(&'a self) -> &'a () {
+ &()
+ }
+ fn implicit_mut<'a>(&'a mut self) -> &'a () {
+ &()
+ }
+
+ fn explicit<'a>(self: &'a Arc<Self>) -> &'a () {
+ &()
+ }
+ fn explicit_mut<'a>(self: &'a mut Rc<Self>) -> &'a () {
+ &()
+ }
+
+ fn lifetime_elsewhere<'a>(self: Box<Self>, here: &'a ()) -> &'a () {
+ &()
+ }
+ }
+
+ trait Bar {
+ fn implicit<'a>(&'a self) -> &'a ();
+ fn implicit_provided<'a>(&'a self) -> &'a () {
+ &()
+ }
+
+ fn explicit<'a>(self: &'a Arc<Self>) -> &'a ();
+ fn explicit_provided<'a>(self: &'a Arc<Self>) -> &'a () {
+ &()
+ }
+
+ fn lifetime_elsewhere<'a>(self: Box<Self>, here: &'a ()) -> &'a ();
+ fn lifetime_elsewhere_provided<'a>(self: Box<Self>, here: &'a ()) -> &'a () {
+ &()
+ }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/needless_lifetimes.stderr b/src/tools/clippy/tests/ui/needless_lifetimes.stderr
new file mode 100644
index 000000000..3c428fd46
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_lifetimes.stderr
@@ -0,0 +1,190 @@
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:11:1
+ |
+LL | fn distinct_lifetimes<'a, 'b>(_x: &'a u8, _y: &'b u8, _z: u8) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::needless-lifetimes` implied by `-D warnings`
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:13:1
+ |
+LL | fn distinct_and_static<'a, 'b>(_x: &'a u8, _y: &'b u8, _z: &'static u8) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:23:1
+ |
+LL | fn in_and_out<'a>(x: &'a u8, _y: u8) -> &'a u8 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:57:1
+ |
+LL | fn deep_reference_3<'a>(x: &'a u8, _y: u8) -> Result<&'a u8, ()> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:62:1
+ |
+LL | fn where_clause_without_lt<'a, T>(x: &'a u8, _y: u8) -> Result<&'a u8, ()>
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:74:1
+ |
+LL | fn lifetime_param_2<'a, 'b>(_x: Ref<'a>, _y: &'b u8) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:98:1
+ |
+LL | fn fn_bound_2<'a, F, I>(_m: Lt<'a, I>, _f: F) -> Lt<'a, I>
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:128:5
+ |
+LL | fn self_and_out<'s>(&'s self) -> &'s u8 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:137:5
+ |
+LL | fn distinct_self_and_in<'s, 't>(&'s self, _x: &'t u8) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:156:1
+ |
+LL | fn struct_with_lt<'a>(_foo: Foo<'a>) -> &'a str {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:186:1
+ |
+LL | fn trait_obj_elided2<'a>(_arg: &'a dyn Drop) -> &'a str {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:192:1
+ |
+LL | fn alias_with_lt<'a>(_foo: FooAlias<'a>) -> &'a str {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:211:1
+ |
+LL | fn named_input_elided_output<'a>(_arg: &'a str) -> &str {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:219:1
+ |
+LL | fn trait_bound_ok<'a, T: WithLifetime<'static>>(_: &'a u8, _: T) {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:255:1
+ |
+LL | fn out_return_type_lts<'a>(e: &'a str) -> Cow<'a> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:262:9
+ |
+LL | fn needless_lt<'a>(x: &'a u8) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:266:9
+ |
+LL | fn needless_lt<'a>(_x: &'a u8) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:279:9
+ |
+LL | fn baz<'a>(&'a self) -> impl Foo + 'a {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:311:5
+ |
+LL | fn impl_trait_elidable_nested_anonymous_lifetimes<'a>(i: &'a i32, f: impl Fn(&i32) -> &i32) -> &'a i32 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:320:5
+ |
+LL | fn generics_elidable<'a, T: Fn(&i32) -> &i32>(i: &'a i32, f: T) -> &'a i32 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:332:5
+ |
+LL | fn where_clause_elidadable<'a, T>(i: &'a i32, f: T) -> &'a i32
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:347:5
+ |
+LL | fn pointer_fn_elidable<'a>(i: &'a i32, f: fn(&i32) -> &i32) -> &'a i32 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:360:5
+ |
+LL | fn nested_fn_pointer_3<'a>(_: &'a i32) -> fn(fn(&i32) -> &i32) -> i32 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:363:5
+ |
+LL | fn nested_fn_pointer_4<'a>(_: &'a i32) -> impl Fn(fn(&i32)) {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:385:9
+ |
+LL | fn implicit<'a>(&'a self) -> &'a () {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:388:9
+ |
+LL | fn implicit_mut<'a>(&'a mut self) -> &'a () {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:399:9
+ |
+LL | fn lifetime_elsewhere<'a>(self: Box<Self>, here: &'a ()) -> &'a () {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:405:9
+ |
+LL | fn implicit<'a>(&'a self) -> &'a ();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:406:9
+ |
+LL | fn implicit_provided<'a>(&'a self) -> &'a () {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:415:9
+ |
+LL | fn lifetime_elsewhere<'a>(self: Box<Self>, here: &'a ()) -> &'a ();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: explicit lifetimes given in parameter types where they could be elided (or replaced with `'_` if needed by type declaration)
+ --> $DIR/needless_lifetimes.rs:416:9
+ |
+LL | fn lifetime_elsewhere_provided<'a>(self: Box<Self>, here: &'a ()) -> &'a () {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 31 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_match.fixed b/src/tools/clippy/tests/ui/needless_match.fixed
new file mode 100644
index 000000000..0c9178fb8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_match.fixed
@@ -0,0 +1,210 @@
+// run-rustfix
+#![warn(clippy::needless_match)]
+#![allow(clippy::manual_map)]
+#![allow(dead_code)]
+
+#[derive(Clone, Copy)]
+enum Simple {
+ A,
+ B,
+ C,
+ D,
+}
+
+fn useless_match() {
+ let i = 10;
+ let _: i32 = i;
+ let s = "test";
+ let _: &str = s;
+}
+
+fn custom_type_match() {
+ let se = Simple::A;
+ let _: Simple = se;
+ // Don't trigger
+ let _: Simple = match se {
+ Simple::A => Simple::A,
+ Simple::B => Simple::B,
+ _ => Simple::C,
+ };
+ // Mingled, don't trigger
+ let _: Simple = match se {
+ Simple::A => Simple::B,
+ Simple::B => Simple::C,
+ Simple::C => Simple::D,
+ Simple::D => Simple::A,
+ };
+}
+
+fn option_match(x: Option<i32>) {
+ let _: Option<i32> = x;
+ // Don't trigger, this is the case for manual_map_option
+ let _: Option<i32> = match x {
+ Some(a) => Some(-a),
+ None => None,
+ };
+}
+
+fn func_ret_err<T>(err: T) -> Result<i32, T> {
+ Err(err)
+}
+
+fn result_match() {
+ let _: Result<i32, i32> = Ok(1);
+ let _: Result<i32, i32> = func_ret_err(0_i32);
+ // as ref, don't trigger
+ let res = &func_ret_err(0_i32);
+ let _: Result<&i32, &i32> = match *res {
+ Ok(ref x) => Ok(x),
+ Err(ref x) => Err(x),
+ };
+}
+
+fn if_let_option() {
+ let _ = Some(1);
+
+ fn do_something() {}
+
+ // Don't trigger
+ let _ = if let Some(a) = Some(1) {
+ Some(a)
+ } else {
+ do_something();
+ None
+ };
+
+ // Don't trigger
+ let _ = if let Some(a) = Some(1) {
+ do_something();
+ Some(a)
+ } else {
+ None
+ };
+
+ // Don't trigger
+ let _ = if let Some(a) = Some(1) { Some(a) } else { Some(2) };
+}
+
+fn if_let_option_result() -> Result<(), ()> {
+ fn f(x: i32) -> Result<Option<i32>, ()> {
+ Ok(Some(x))
+ }
+ // Don't trigger
+ let _ = if let Some(v) = f(1)? { Some(v) } else { f(2)? };
+ Ok(())
+}
+
+fn if_let_result() {
+ let x: Result<i32, i32> = Ok(1);
+ let _: Result<i32, i32> = x;
+ let _: Result<i32, i32> = x;
+ // Input type mismatch, don't trigger
+ #[allow(clippy::question_mark)]
+ let _: Result<i32, i32> = if let Err(e) = Ok(1) { Err(e) } else { x };
+}
+
+fn if_let_custom_enum(x: Simple) {
+ let _: Simple = x;
+
+ // Don't trigger
+ let _: Simple = if let Simple::A = x {
+ Simple::A
+ } else if true {
+ Simple::B
+ } else {
+ x
+ };
+}
+
+mod issue8542 {
+ #[derive(Clone, Copy)]
+ enum E {
+ VariantA(u8, u8),
+ VariantB(u8, bool),
+ }
+
+ enum Complex {
+ A(u8),
+ B(u8, bool),
+ C(u8, i32, f64),
+ D(E, bool),
+ }
+
+ fn match_test() {
+ let ce = Complex::B(8, false);
+ let aa = 0_u8;
+ let bb = false;
+
+ let _: Complex = ce;
+
+ // Don't trigger
+ let _: Complex = match ce {
+ Complex::A(_) => Complex::A(aa),
+ Complex::B(_, b) => Complex::B(aa, b),
+ Complex::C(_, b, _) => Complex::C(aa, b, 64_f64),
+ Complex::D(e, b) => Complex::D(e, b),
+ };
+
+ // Don't trigger
+ let _: Complex = match ce {
+ Complex::A(a) => Complex::A(a),
+ Complex::B(a, _) => Complex::B(a, bb),
+ Complex::C(a, _, _) => Complex::C(a, 32_i32, 64_f64),
+ _ => ce,
+ };
+ }
+}
+
+/// Lint triggered when type coercions happen.
+/// Do NOT trigger on any of these.
+mod issue8551 {
+ trait Trait {}
+ struct Struct;
+ impl Trait for Struct {}
+
+ fn optmap(s: Option<&Struct>) -> Option<&dyn Trait> {
+ match s {
+ Some(s) => Some(s),
+ None => None,
+ }
+ }
+
+ fn lint_tests() {
+ let option: Option<&Struct> = None;
+ let _: Option<&dyn Trait> = match option {
+ Some(s) => Some(s),
+ None => None,
+ };
+
+ let _: Option<&dyn Trait> = if true {
+ match option {
+ Some(s) => Some(s),
+ None => None,
+ }
+ } else {
+ None
+ };
+
+ let result: Result<&Struct, i32> = Err(0);
+ let _: Result<&dyn Trait, i32> = match result {
+ Ok(s) => Ok(s),
+ Err(e) => Err(e),
+ };
+
+ let _: Option<&dyn Trait> = if let Some(s) = option { Some(s) } else { None };
+ }
+}
+
+trait Tr {
+ fn as_mut(&mut self) -> Result<&mut i32, &mut i32>;
+}
+impl Tr for Result<i32, i32> {
+ fn as_mut(&mut self) -> Result<&mut i32, &mut i32> {
+ match self {
+ Ok(x) => Ok(x),
+ Err(e) => Err(e),
+ }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/needless_match.rs b/src/tools/clippy/tests/ui/needless_match.rs
new file mode 100644
index 000000000..f66f01d7c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_match.rs
@@ -0,0 +1,247 @@
+// run-rustfix
+#![warn(clippy::needless_match)]
+#![allow(clippy::manual_map)]
+#![allow(dead_code)]
+
+#[derive(Clone, Copy)]
+enum Simple {
+ A,
+ B,
+ C,
+ D,
+}
+
+fn useless_match() {
+ let i = 10;
+ let _: i32 = match i {
+ 0 => 0,
+ 1 => 1,
+ 2 => 2,
+ _ => i,
+ };
+ let s = "test";
+ let _: &str = match s {
+ "a" => "a",
+ "b" => "b",
+ s => s,
+ };
+}
+
+fn custom_type_match() {
+ let se = Simple::A;
+ let _: Simple = match se {
+ Simple::A => Simple::A,
+ Simple::B => Simple::B,
+ Simple::C => Simple::C,
+ Simple::D => Simple::D,
+ };
+ // Don't trigger
+ let _: Simple = match se {
+ Simple::A => Simple::A,
+ Simple::B => Simple::B,
+ _ => Simple::C,
+ };
+ // Mingled, don't trigger
+ let _: Simple = match se {
+ Simple::A => Simple::B,
+ Simple::B => Simple::C,
+ Simple::C => Simple::D,
+ Simple::D => Simple::A,
+ };
+}
+
+fn option_match(x: Option<i32>) {
+ let _: Option<i32> = match x {
+ Some(a) => Some(a),
+ None => None,
+ };
+ // Don't trigger, this is the case for manual_map_option
+ let _: Option<i32> = match x {
+ Some(a) => Some(-a),
+ None => None,
+ };
+}
+
+fn func_ret_err<T>(err: T) -> Result<i32, T> {
+ Err(err)
+}
+
+fn result_match() {
+ let _: Result<i32, i32> = match Ok(1) {
+ Ok(a) => Ok(a),
+ Err(err) => Err(err),
+ };
+ let _: Result<i32, i32> = match func_ret_err(0_i32) {
+ Err(err) => Err(err),
+ Ok(a) => Ok(a),
+ };
+ // as ref, don't trigger
+ let res = &func_ret_err(0_i32);
+ let _: Result<&i32, &i32> = match *res {
+ Ok(ref x) => Ok(x),
+ Err(ref x) => Err(x),
+ };
+}
+
+fn if_let_option() {
+ let _ = if let Some(a) = Some(1) { Some(a) } else { None };
+
+ fn do_something() {}
+
+ // Don't trigger
+ let _ = if let Some(a) = Some(1) {
+ Some(a)
+ } else {
+ do_something();
+ None
+ };
+
+ // Don't trigger
+ let _ = if let Some(a) = Some(1) {
+ do_something();
+ Some(a)
+ } else {
+ None
+ };
+
+ // Don't trigger
+ let _ = if let Some(a) = Some(1) { Some(a) } else { Some(2) };
+}
+
+fn if_let_option_result() -> Result<(), ()> {
+ fn f(x: i32) -> Result<Option<i32>, ()> {
+ Ok(Some(x))
+ }
+ // Don't trigger
+ let _ = if let Some(v) = f(1)? { Some(v) } else { f(2)? };
+ Ok(())
+}
+
+fn if_let_result() {
+ let x: Result<i32, i32> = Ok(1);
+ let _: Result<i32, i32> = if let Err(e) = x { Err(e) } else { x };
+ let _: Result<i32, i32> = if let Ok(val) = x { Ok(val) } else { x };
+ // Input type mismatch, don't trigger
+ #[allow(clippy::question_mark)]
+ let _: Result<i32, i32> = if let Err(e) = Ok(1) { Err(e) } else { x };
+}
+
+fn if_let_custom_enum(x: Simple) {
+ let _: Simple = if let Simple::A = x {
+ Simple::A
+ } else if let Simple::B = x {
+ Simple::B
+ } else if let Simple::C = x {
+ Simple::C
+ } else {
+ x
+ };
+
+ // Don't trigger
+ let _: Simple = if let Simple::A = x {
+ Simple::A
+ } else if true {
+ Simple::B
+ } else {
+ x
+ };
+}
+
+mod issue8542 {
+ #[derive(Clone, Copy)]
+ enum E {
+ VariantA(u8, u8),
+ VariantB(u8, bool),
+ }
+
+ enum Complex {
+ A(u8),
+ B(u8, bool),
+ C(u8, i32, f64),
+ D(E, bool),
+ }
+
+ fn match_test() {
+ let ce = Complex::B(8, false);
+ let aa = 0_u8;
+ let bb = false;
+
+ let _: Complex = match ce {
+ Complex::A(a) => Complex::A(a),
+ Complex::B(a, b) => Complex::B(a, b),
+ Complex::C(a, b, c) => Complex::C(a, b, c),
+ Complex::D(E::VariantA(ea, eb), b) => Complex::D(E::VariantA(ea, eb), b),
+ Complex::D(E::VariantB(ea, eb), b) => Complex::D(E::VariantB(ea, eb), b),
+ };
+
+ // Don't trigger
+ let _: Complex = match ce {
+ Complex::A(_) => Complex::A(aa),
+ Complex::B(_, b) => Complex::B(aa, b),
+ Complex::C(_, b, _) => Complex::C(aa, b, 64_f64),
+ Complex::D(e, b) => Complex::D(e, b),
+ };
+
+ // Don't trigger
+ let _: Complex = match ce {
+ Complex::A(a) => Complex::A(a),
+ Complex::B(a, _) => Complex::B(a, bb),
+ Complex::C(a, _, _) => Complex::C(a, 32_i32, 64_f64),
+ _ => ce,
+ };
+ }
+}
+
+/// Lint triggered when type coercions happen.
+/// Do NOT trigger on any of these.
+mod issue8551 {
+ trait Trait {}
+ struct Struct;
+ impl Trait for Struct {}
+
+ fn optmap(s: Option<&Struct>) -> Option<&dyn Trait> {
+ match s {
+ Some(s) => Some(s),
+ None => None,
+ }
+ }
+
+ fn lint_tests() {
+ let option: Option<&Struct> = None;
+ let _: Option<&dyn Trait> = match option {
+ Some(s) => Some(s),
+ None => None,
+ };
+
+ let _: Option<&dyn Trait> = if true {
+ match option {
+ Some(s) => Some(s),
+ None => None,
+ }
+ } else {
+ None
+ };
+
+ let result: Result<&Struct, i32> = Err(0);
+ let _: Result<&dyn Trait, i32> = match result {
+ Ok(s) => Ok(s),
+ Err(e) => Err(e),
+ };
+
+ let _: Option<&dyn Trait> = if let Some(s) = option { Some(s) } else { None };
+ }
+}
+
+trait Tr {
+ fn as_mut(&mut self) -> Result<&mut i32, &mut i32>;
+}
+impl Tr for Result<i32, i32> {
+ fn as_mut(&mut self) -> Result<&mut i32, &mut i32> {
+ match self {
+ Ok(x) => Ok(x),
+ Err(e) => Err(e),
+ }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/needless_match.stderr b/src/tools/clippy/tests/ui/needless_match.stderr
new file mode 100644
index 000000000..5bc79800a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_match.stderr
@@ -0,0 +1,113 @@
+error: this match expression is unnecessary
+ --> $DIR/needless_match.rs:16:18
+ |
+LL | let _: i32 = match i {
+ | __________________^
+LL | | 0 => 0,
+LL | | 1 => 1,
+LL | | 2 => 2,
+LL | | _ => i,
+LL | | };
+ | |_____^ help: replace it with: `i`
+ |
+ = note: `-D clippy::needless-match` implied by `-D warnings`
+
+error: this match expression is unnecessary
+ --> $DIR/needless_match.rs:23:19
+ |
+LL | let _: &str = match s {
+ | ___________________^
+LL | | "a" => "a",
+LL | | "b" => "b",
+LL | | s => s,
+LL | | };
+ | |_____^ help: replace it with: `s`
+
+error: this match expression is unnecessary
+ --> $DIR/needless_match.rs:32:21
+ |
+LL | let _: Simple = match se {
+ | _____________________^
+LL | | Simple::A => Simple::A,
+LL | | Simple::B => Simple::B,
+LL | | Simple::C => Simple::C,
+LL | | Simple::D => Simple::D,
+LL | | };
+ | |_____^ help: replace it with: `se`
+
+error: this match expression is unnecessary
+ --> $DIR/needless_match.rs:54:26
+ |
+LL | let _: Option<i32> = match x {
+ | __________________________^
+LL | | Some(a) => Some(a),
+LL | | None => None,
+LL | | };
+ | |_____^ help: replace it with: `x`
+
+error: this match expression is unnecessary
+ --> $DIR/needless_match.rs:70:31
+ |
+LL | let _: Result<i32, i32> = match Ok(1) {
+ | _______________________________^
+LL | | Ok(a) => Ok(a),
+LL | | Err(err) => Err(err),
+LL | | };
+ | |_____^ help: replace it with: `Ok(1)`
+
+error: this match expression is unnecessary
+ --> $DIR/needless_match.rs:74:31
+ |
+LL | let _: Result<i32, i32> = match func_ret_err(0_i32) {
+ | _______________________________^
+LL | | Err(err) => Err(err),
+LL | | Ok(a) => Ok(a),
+LL | | };
+ | |_____^ help: replace it with: `func_ret_err(0_i32)`
+
+error: this if-let expression is unnecessary
+ --> $DIR/needless_match.rs:87:13
+ |
+LL | let _ = if let Some(a) = Some(1) { Some(a) } else { None };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `Some(1)`
+
+error: this if-let expression is unnecessary
+ --> $DIR/needless_match.rs:122:31
+ |
+LL | let _: Result<i32, i32> = if let Err(e) = x { Err(e) } else { x };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `x`
+
+error: this if-let expression is unnecessary
+ --> $DIR/needless_match.rs:123:31
+ |
+LL | let _: Result<i32, i32> = if let Ok(val) = x { Ok(val) } else { x };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `x`
+
+error: this if-let expression is unnecessary
+ --> $DIR/needless_match.rs:130:21
+ |
+LL | let _: Simple = if let Simple::A = x {
+ | _____________________^
+LL | | Simple::A
+LL | | } else if let Simple::B = x {
+LL | | Simple::B
+... |
+LL | | x
+LL | | };
+ | |_____^ help: replace it with: `x`
+
+error: this match expression is unnecessary
+ --> $DIR/needless_match.rs:169:26
+ |
+LL | let _: Complex = match ce {
+ | __________________________^
+LL | | Complex::A(a) => Complex::A(a),
+LL | | Complex::B(a, b) => Complex::B(a, b),
+LL | | Complex::C(a, b, c) => Complex::C(a, b, c),
+LL | | Complex::D(E::VariantA(ea, eb), b) => Complex::D(E::VariantA(ea, eb), b),
+LL | | Complex::D(E::VariantB(ea, eb), b) => Complex::D(E::VariantB(ea, eb), b),
+LL | | };
+ | |_________^ help: replace it with: `ce`
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_option_as_deref.fixed b/src/tools/clippy/tests/ui/needless_option_as_deref.fixed
new file mode 100644
index 000000000..acd22c6bb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_option_as_deref.fixed
@@ -0,0 +1,55 @@
+// run-rustfix
+
+#![allow(unused)]
+#![warn(clippy::needless_option_as_deref)]
+
+fn main() {
+ // should lint
+ let _: Option<&usize> = Some(&1);
+ let _: Option<&mut usize> = Some(&mut 1);
+
+ let mut y = 0;
+ let mut x = Some(&mut y);
+ let _ = x;
+
+ // should not lint
+ let _ = Some(Box::new(1)).as_deref();
+ let _ = Some(Box::new(1)).as_deref_mut();
+
+ let mut y = 0;
+ let mut x = Some(&mut y);
+ for _ in 0..3 {
+ let _ = x.as_deref_mut();
+ }
+
+ let mut y = 0;
+ let mut x = Some(&mut y);
+ let mut closure = || {
+ let _ = x.as_deref_mut();
+ };
+ closure();
+ closure();
+
+ // #7846
+ let mut i = 0;
+ let mut opt_vec = vec![Some(&mut i)];
+ opt_vec[0].as_deref_mut().unwrap();
+
+ let mut i = 0;
+ let x = &mut Some(&mut i);
+ (*x).as_deref_mut();
+
+ // #8047
+ let mut y = 0;
+ let mut x = Some(&mut y);
+ x.as_deref_mut();
+ dbg!(x);
+}
+
+struct S<'a> {
+ opt: Option<&'a mut usize>,
+}
+
+fn from_field<'a>(s: &'a mut S<'a>) -> Option<&'a mut usize> {
+ s.opt.as_deref_mut()
+}
diff --git a/src/tools/clippy/tests/ui/needless_option_as_deref.rs b/src/tools/clippy/tests/ui/needless_option_as_deref.rs
new file mode 100644
index 000000000..61eda5052
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_option_as_deref.rs
@@ -0,0 +1,55 @@
+// run-rustfix
+
+#![allow(unused)]
+#![warn(clippy::needless_option_as_deref)]
+
+fn main() {
+ // should lint
+ let _: Option<&usize> = Some(&1).as_deref();
+ let _: Option<&mut usize> = Some(&mut 1).as_deref_mut();
+
+ let mut y = 0;
+ let mut x = Some(&mut y);
+ let _ = x.as_deref_mut();
+
+ // should not lint
+ let _ = Some(Box::new(1)).as_deref();
+ let _ = Some(Box::new(1)).as_deref_mut();
+
+ let mut y = 0;
+ let mut x = Some(&mut y);
+ for _ in 0..3 {
+ let _ = x.as_deref_mut();
+ }
+
+ let mut y = 0;
+ let mut x = Some(&mut y);
+ let mut closure = || {
+ let _ = x.as_deref_mut();
+ };
+ closure();
+ closure();
+
+ // #7846
+ let mut i = 0;
+ let mut opt_vec = vec![Some(&mut i)];
+ opt_vec[0].as_deref_mut().unwrap();
+
+ let mut i = 0;
+ let x = &mut Some(&mut i);
+ (*x).as_deref_mut();
+
+ // #8047
+ let mut y = 0;
+ let mut x = Some(&mut y);
+ x.as_deref_mut();
+ dbg!(x);
+}
+
+struct S<'a> {
+ opt: Option<&'a mut usize>,
+}
+
+fn from_field<'a>(s: &'a mut S<'a>) -> Option<&'a mut usize> {
+ s.opt.as_deref_mut()
+}
diff --git a/src/tools/clippy/tests/ui/needless_option_as_deref.stderr b/src/tools/clippy/tests/ui/needless_option_as_deref.stderr
new file mode 100644
index 000000000..bc07db5b3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_option_as_deref.stderr
@@ -0,0 +1,22 @@
+error: derefed type is same as origin
+ --> $DIR/needless_option_as_deref.rs:8:29
+ |
+LL | let _: Option<&usize> = Some(&1).as_deref();
+ | ^^^^^^^^^^^^^^^^^^^ help: try this: `Some(&1)`
+ |
+ = note: `-D clippy::needless-option-as-deref` implied by `-D warnings`
+
+error: derefed type is same as origin
+ --> $DIR/needless_option_as_deref.rs:9:33
+ |
+LL | let _: Option<&mut usize> = Some(&mut 1).as_deref_mut();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `Some(&mut 1)`
+
+error: derefed type is same as origin
+ --> $DIR/needless_option_as_deref.rs:13:13
+ |
+LL | let _ = x.as_deref_mut();
+ | ^^^^^^^^^^^^^^^^ help: try this: `x`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_option_take.fixed b/src/tools/clippy/tests/ui/needless_option_take.fixed
new file mode 100644
index 000000000..29691e816
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_option_take.fixed
@@ -0,0 +1,15 @@
+// run-rustfix
+
+fn main() {
+ println!("Testing non erroneous option_take_on_temporary");
+ let mut option = Some(1);
+ let _ = Box::new(move || option.take().unwrap());
+
+ println!("Testing non erroneous option_take_on_temporary");
+ let x = Some(3);
+ x.as_ref();
+
+ println!("Testing erroneous option_take_on_temporary");
+ let x = Some(3);
+ x.as_ref();
+}
diff --git a/src/tools/clippy/tests/ui/needless_option_take.rs b/src/tools/clippy/tests/ui/needless_option_take.rs
new file mode 100644
index 000000000..9f4109eb4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_option_take.rs
@@ -0,0 +1,15 @@
+// run-rustfix
+
+fn main() {
+ println!("Testing non erroneous option_take_on_temporary");
+ let mut option = Some(1);
+ let _ = Box::new(move || option.take().unwrap());
+
+ println!("Testing non erroneous option_take_on_temporary");
+ let x = Some(3);
+ x.as_ref();
+
+ println!("Testing erroneous option_take_on_temporary");
+ let x = Some(3);
+ x.as_ref().take();
+}
diff --git a/src/tools/clippy/tests/ui/needless_option_take.stderr b/src/tools/clippy/tests/ui/needless_option_take.stderr
new file mode 100644
index 000000000..cb3bf015b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_option_take.stderr
@@ -0,0 +1,10 @@
+error: called `Option::take()` on a temporary value
+ --> $DIR/needless_option_take.rs:14:5
+ |
+LL | x.as_ref().take();
+ | ^^^^^^^^^^^^^^^^^ help: try: `x.as_ref()`
+ |
+ = note: `-D clippy::needless-option-take` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/needless_parens_on_range_literals.fixed b/src/tools/clippy/tests/ui/needless_parens_on_range_literals.fixed
new file mode 100644
index 000000000..1bd75c806
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_parens_on_range_literals.fixed
@@ -0,0 +1,14 @@
+// run-rustfix
+// edition:2018
+
+#![warn(clippy::needless_parens_on_range_literals)]
+#![allow(clippy::almost_complete_letter_range)]
+
+fn main() {
+ let _ = 'a'..='z';
+ let _ = 'a'..'z';
+ let _ = (1.)..2.;
+ let _ = (1.)..2.;
+ let _ = 'a'..;
+ let _ = ..'z';
+}
diff --git a/src/tools/clippy/tests/ui/needless_parens_on_range_literals.rs b/src/tools/clippy/tests/ui/needless_parens_on_range_literals.rs
new file mode 100644
index 000000000..7abb8a1ad
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_parens_on_range_literals.rs
@@ -0,0 +1,14 @@
+// run-rustfix
+// edition:2018
+
+#![warn(clippy::needless_parens_on_range_literals)]
+#![allow(clippy::almost_complete_letter_range)]
+
+fn main() {
+ let _ = ('a')..=('z');
+ let _ = 'a'..('z');
+ let _ = (1.)..2.;
+ let _ = (1.)..(2.);
+ let _ = ('a')..;
+ let _ = ..('z');
+}
diff --git a/src/tools/clippy/tests/ui/needless_parens_on_range_literals.stderr b/src/tools/clippy/tests/ui/needless_parens_on_range_literals.stderr
new file mode 100644
index 000000000..505f7ac91
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_parens_on_range_literals.stderr
@@ -0,0 +1,40 @@
+error: needless parenthesis on range literals can be removed
+ --> $DIR/needless_parens_on_range_literals.rs:8:13
+ |
+LL | let _ = ('a')..=('z');
+ | ^^^^^ help: try: `'a'`
+ |
+ = note: `-D clippy::needless-parens-on-range-literals` implied by `-D warnings`
+
+error: needless parenthesis on range literals can be removed
+ --> $DIR/needless_parens_on_range_literals.rs:8:21
+ |
+LL | let _ = ('a')..=('z');
+ | ^^^^^ help: try: `'z'`
+
+error: needless parenthesis on range literals can be removed
+ --> $DIR/needless_parens_on_range_literals.rs:9:18
+ |
+LL | let _ = 'a'..('z');
+ | ^^^^^ help: try: `'z'`
+
+error: needless parenthesis on range literals can be removed
+ --> $DIR/needless_parens_on_range_literals.rs:11:19
+ |
+LL | let _ = (1.)..(2.);
+ | ^^^^ help: try: `2.`
+
+error: needless parenthesis on range literals can be removed
+ --> $DIR/needless_parens_on_range_literals.rs:12:13
+ |
+LL | let _ = ('a')..;
+ | ^^^^^ help: try: `'a'`
+
+error: needless parenthesis on range literals can be removed
+ --> $DIR/needless_parens_on_range_literals.rs:13:15
+ |
+LL | let _ = ..('z');
+ | ^^^^^ help: try: `'z'`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_pass_by_value.rs b/src/tools/clippy/tests/ui/needless_pass_by_value.rs
new file mode 100644
index 000000000..5a35b100a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_pass_by_value.rs
@@ -0,0 +1,160 @@
+#![warn(clippy::needless_pass_by_value)]
+#![allow(
+ dead_code,
+ clippy::single_match,
+ clippy::redundant_pattern_matching,
+ clippy::option_option,
+ clippy::redundant_clone
+)]
+
+use std::borrow::Borrow;
+use std::collections::HashSet;
+use std::convert::AsRef;
+use std::mem::MaybeUninit;
+
+// `v` should be warned
+// `w`, `x` and `y` are allowed (moved or mutated)
+fn foo<T: Default>(v: Vec<T>, w: Vec<T>, mut x: Vec<T>, y: Vec<T>) -> Vec<T> {
+ assert_eq!(v.len(), 42);
+
+ consume(w);
+
+ x.push(T::default());
+
+ y
+}
+
+fn consume<T>(_: T) {}
+
+struct Wrapper(String);
+
+fn bar(x: String, y: Wrapper) {
+ assert_eq!(x.len(), 42);
+ assert_eq!(y.0.len(), 42);
+}
+
+// V implements `Borrow<V>`, but should be warned correctly
+fn test_borrow_trait<T: Borrow<str>, U: AsRef<str>, V>(t: T, u: U, v: V) {
+ println!("{}", t.borrow());
+ println!("{}", u.as_ref());
+ consume(&v);
+}
+
+// ok
+fn test_fn<F: Fn(i32) -> i32>(f: F) {
+ f(1);
+}
+
+// x should be warned, but y is ok
+fn test_match(x: Option<Option<String>>, y: Option<Option<String>>) {
+ match x {
+ Some(Some(_)) => 1, // not moved
+ _ => 0,
+ };
+
+ match y {
+ Some(Some(s)) => consume(s), // moved
+ _ => (),
+ };
+}
+
+// x and y should be warned, but z is ok
+fn test_destructure(x: Wrapper, y: Wrapper, z: Wrapper) {
+ let Wrapper(s) = z; // moved
+ let Wrapper(ref t) = y; // not moved
+ let Wrapper(_) = y; // still not moved
+
+ assert_eq!(x.0.len(), s.len());
+ println!("{}", t);
+}
+
+trait Foo {}
+
+// `S: Serialize` is allowed to be passed by value, since a caller can pass `&S` instead
+trait Serialize {}
+impl<'a, T> Serialize for &'a T where T: Serialize {}
+impl Serialize for i32 {}
+
+fn test_blanket_ref<T: Foo, S: Serialize>(_foo: T, _serializable: S) {}
+
+fn issue_2114(s: String, t: String, u: Vec<i32>, v: Vec<i32>) {
+ s.capacity();
+ let _ = t.clone();
+ u.capacity();
+ let _ = v.clone();
+}
+
+struct S<T, U>(T, U);
+
+impl<T: Serialize, U> S<T, U> {
+ fn foo(
+ self,
+ // taking `self` by value is always allowed
+ s: String,
+ t: String,
+ ) -> usize {
+ s.len() + t.capacity()
+ }
+
+ fn bar(_t: T, // Ok, since `&T: Serialize` too
+ ) {
+ }
+
+ fn baz(&self, _u: U, _s: Self) {}
+}
+
+trait FalsePositive {
+ fn visit_str(s: &str);
+ fn visit_string(s: String) {
+ Self::visit_str(&s);
+ }
+}
+
+// shouldn't warn on extern funcs
+extern "C" fn ext(x: MaybeUninit<usize>) -> usize {
+ unsafe { x.assume_init() }
+}
+
+// exempt RangeArgument
+fn range<T: ::std::ops::RangeBounds<usize>>(range: T) {
+ let _ = range.start_bound();
+}
+
+struct CopyWrapper(u32);
+
+fn bar_copy(x: u32, y: CopyWrapper) {
+ assert_eq!(x, 42);
+ assert_eq!(y.0, 42);
+}
+
+// x and y should be warned, but z is ok
+fn test_destructure_copy(x: CopyWrapper, y: CopyWrapper, z: CopyWrapper) {
+ let CopyWrapper(s) = z; // moved
+ let CopyWrapper(ref t) = y; // not moved
+ let CopyWrapper(_) = y; // still not moved
+
+ assert_eq!(x.0, s);
+ println!("{}", t);
+}
+
+// The following 3 lines should not cause an ICE. See #2831
+trait Bar<'a, A> {}
+impl<'b, T> Bar<'b, T> for T {}
+fn some_fun<'b, S: Bar<'b, ()>>(_item: S) {}
+
+// Also this should not cause an ICE. See #2831
+trait Club<'a, A> {}
+impl<T> Club<'static, T> for T {}
+fn more_fun(_item: impl Club<'static, i32>) {}
+
+fn is_sync<T>(_: T)
+where
+ T: Sync,
+{
+}
+
+fn main() {
+ // This should not cause an ICE either
+ // https://github.com/rust-lang/rust-clippy/issues/3144
+ is_sync(HashSet::<usize>::new());
+}
diff --git a/src/tools/clippy/tests/ui/needless_pass_by_value.stderr b/src/tools/clippy/tests/ui/needless_pass_by_value.stderr
new file mode 100644
index 000000000..38f33c53f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_pass_by_value.stderr
@@ -0,0 +1,178 @@
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:17:23
+ |
+LL | fn foo<T: Default>(v: Vec<T>, w: Vec<T>, mut x: Vec<T>, y: Vec<T>) -> Vec<T> {
+ | ^^^^^^ help: consider changing the type to: `&[T]`
+ |
+ = note: `-D clippy::needless-pass-by-value` implied by `-D warnings`
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:31:11
+ |
+LL | fn bar(x: String, y: Wrapper) {
+ | ^^^^^^ help: consider changing the type to: `&str`
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:31:22
+ |
+LL | fn bar(x: String, y: Wrapper) {
+ | ^^^^^^^ help: consider taking a reference instead: `&Wrapper`
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:37:71
+ |
+LL | fn test_borrow_trait<T: Borrow<str>, U: AsRef<str>, V>(t: T, u: U, v: V) {
+ | ^ help: consider taking a reference instead: `&V`
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:49:18
+ |
+LL | fn test_match(x: Option<Option<String>>, y: Option<Option<String>>) {
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: consider taking a reference instead: `&Option<Option<String>>`
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:62:24
+ |
+LL | fn test_destructure(x: Wrapper, y: Wrapper, z: Wrapper) {
+ | ^^^^^^^ help: consider taking a reference instead: `&Wrapper`
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:62:36
+ |
+LL | fn test_destructure(x: Wrapper, y: Wrapper, z: Wrapper) {
+ | ^^^^^^^ help: consider taking a reference instead: `&Wrapper`
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:78:49
+ |
+LL | fn test_blanket_ref<T: Foo, S: Serialize>(_foo: T, _serializable: S) {}
+ | ^ help: consider taking a reference instead: `&T`
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:80:18
+ |
+LL | fn issue_2114(s: String, t: String, u: Vec<i32>, v: Vec<i32>) {
+ | ^^^^^^ help: consider taking a reference instead: `&String`
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:80:29
+ |
+LL | fn issue_2114(s: String, t: String, u: Vec<i32>, v: Vec<i32>) {
+ | ^^^^^^
+ |
+help: consider changing the type to
+ |
+LL | fn issue_2114(s: String, t: &str, u: Vec<i32>, v: Vec<i32>) {
+ | ~~~~
+help: change `t.clone()` to
+ |
+LL | let _ = t.to_string();
+ | ~~~~~~~~~~~~~
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:80:40
+ |
+LL | fn issue_2114(s: String, t: String, u: Vec<i32>, v: Vec<i32>) {
+ | ^^^^^^^^ help: consider taking a reference instead: `&Vec<i32>`
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:80:53
+ |
+LL | fn issue_2114(s: String, t: String, u: Vec<i32>, v: Vec<i32>) {
+ | ^^^^^^^^
+ |
+help: consider changing the type to
+ |
+LL | fn issue_2114(s: String, t: String, u: Vec<i32>, v: &[i32]) {
+ | ~~~~~~
+help: change `v.clone()` to
+ |
+LL | let _ = v.to_owned();
+ | ~~~~~~~~~~~~
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:93:12
+ |
+LL | s: String,
+ | ^^^^^^ help: consider changing the type to: `&str`
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:94:12
+ |
+LL | t: String,
+ | ^^^^^^ help: consider taking a reference instead: `&String`
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:103:23
+ |
+LL | fn baz(&self, _u: U, _s: Self) {}
+ | ^ help: consider taking a reference instead: `&U`
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:103:30
+ |
+LL | fn baz(&self, _u: U, _s: Self) {}
+ | ^^^^ help: consider taking a reference instead: `&Self`
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:125:24
+ |
+LL | fn bar_copy(x: u32, y: CopyWrapper) {
+ | ^^^^^^^^^^^ help: consider taking a reference instead: `&CopyWrapper`
+ |
+help: consider marking this type as `Copy`
+ --> $DIR/needless_pass_by_value.rs:123:1
+ |
+LL | struct CopyWrapper(u32);
+ | ^^^^^^^^^^^^^^^^^^
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:131:29
+ |
+LL | fn test_destructure_copy(x: CopyWrapper, y: CopyWrapper, z: CopyWrapper) {
+ | ^^^^^^^^^^^ help: consider taking a reference instead: `&CopyWrapper`
+ |
+help: consider marking this type as `Copy`
+ --> $DIR/needless_pass_by_value.rs:123:1
+ |
+LL | struct CopyWrapper(u32);
+ | ^^^^^^^^^^^^^^^^^^
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:131:45
+ |
+LL | fn test_destructure_copy(x: CopyWrapper, y: CopyWrapper, z: CopyWrapper) {
+ | ^^^^^^^^^^^ help: consider taking a reference instead: `&CopyWrapper`
+ |
+help: consider marking this type as `Copy`
+ --> $DIR/needless_pass_by_value.rs:123:1
+ |
+LL | struct CopyWrapper(u32);
+ | ^^^^^^^^^^^^^^^^^^
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:131:61
+ |
+LL | fn test_destructure_copy(x: CopyWrapper, y: CopyWrapper, z: CopyWrapper) {
+ | ^^^^^^^^^^^ help: consider taking a reference instead: `&CopyWrapper`
+ |
+help: consider marking this type as `Copy`
+ --> $DIR/needless_pass_by_value.rs:123:1
+ |
+LL | struct CopyWrapper(u32);
+ | ^^^^^^^^^^^^^^^^^^
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:143:40
+ |
+LL | fn some_fun<'b, S: Bar<'b, ()>>(_item: S) {}
+ | ^ help: consider taking a reference instead: `&S`
+
+error: this argument is passed by value, but not consumed in the function body
+ --> $DIR/needless_pass_by_value.rs:148:20
+ |
+LL | fn more_fun(_item: impl Club<'static, i32>) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: consider taking a reference instead: `&impl Club<'static, i32>`
+
+error: aborting due to 22 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_pass_by_value_proc_macro.rs b/src/tools/clippy/tests/ui/needless_pass_by_value_proc_macro.rs
new file mode 100644
index 000000000..78a0e92d1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_pass_by_value_proc_macro.rs
@@ -0,0 +1,21 @@
+#![crate_type = "proc-macro"]
+#![warn(clippy::needless_pass_by_value)]
+
+extern crate proc_macro;
+
+use proc_macro::TokenStream;
+
+#[proc_macro_derive(Foo)]
+pub fn foo(_input: TokenStream) -> TokenStream {
+ unimplemented!()
+}
+
+#[proc_macro]
+pub fn bar(_input: TokenStream) -> TokenStream {
+ unimplemented!()
+}
+
+#[proc_macro_attribute]
+pub fn baz(_args: TokenStream, _input: TokenStream) -> TokenStream {
+ unimplemented!()
+}
diff --git a/src/tools/clippy/tests/ui/needless_question_mark.fixed b/src/tools/clippy/tests/ui/needless_question_mark.fixed
new file mode 100644
index 000000000..ba9d15e59
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_question_mark.fixed
@@ -0,0 +1,140 @@
+// run-rustfix
+
+#![warn(clippy::needless_question_mark)]
+#![allow(
+ clippy::needless_return,
+ clippy::unnecessary_unwrap,
+ clippy::upper_case_acronyms,
+ dead_code,
+ unused_must_use
+)]
+#![feature(custom_inner_attributes)]
+
+struct TO {
+ magic: Option<usize>,
+}
+
+struct TR {
+ magic: Result<usize, bool>,
+}
+
+fn simple_option_bad1(to: TO) -> Option<usize> {
+ // return as a statement
+ return to.magic;
+}
+
+// formatting will add a semi-colon, which would make
+// this identical to the test case above
+#[rustfmt::skip]
+fn simple_option_bad2(to: TO) -> Option<usize> {
+ // return as an expression
+ return to.magic
+}
+
+fn simple_option_bad3(to: TO) -> Option<usize> {
+ // block value "return"
+ to.magic
+}
+
+fn simple_option_bad4(to: Option<TO>) -> Option<usize> {
+ // single line closure
+ to.and_then(|t| t.magic)
+}
+
+// formatting this will remove the block brackets, making
+// this test identical to the one above
+#[rustfmt::skip]
+fn simple_option_bad5(to: Option<TO>) -> Option<usize> {
+ // closure with body
+ to.and_then(|t| {
+ t.magic
+ })
+}
+
+fn simple_result_bad1(tr: TR) -> Result<usize, bool> {
+ return tr.magic;
+}
+
+// formatting will add a semi-colon, which would make
+// this identical to the test case above
+#[rustfmt::skip]
+fn simple_result_bad2(tr: TR) -> Result<usize, bool> {
+ return tr.magic
+}
+
+fn simple_result_bad3(tr: TR) -> Result<usize, bool> {
+ tr.magic
+}
+
+fn simple_result_bad4(tr: Result<TR, bool>) -> Result<usize, bool> {
+ tr.and_then(|t| t.magic)
+}
+
+// formatting this will remove the block brackets, making
+// this test identical to the one above
+#[rustfmt::skip]
+fn simple_result_bad5(tr: Result<TR, bool>) -> Result<usize, bool> {
+ tr.and_then(|t| {
+ t.magic
+ })
+}
+
+fn also_bad(tr: Result<TR, bool>) -> Result<usize, bool> {
+ if tr.is_ok() {
+ let t = tr.unwrap();
+ return t.magic;
+ }
+ Err(false)
+}
+
+fn false_positive_test<U, T>(x: Result<(), U>) -> Result<(), T>
+where
+ T: From<U>,
+{
+ Ok(x?)
+}
+
+// not quite needless
+fn deref_ref(s: Option<&String>) -> Option<&str> {
+ Some(s?)
+}
+
+fn main() {}
+
+// #6921 if a macro wraps an expr in Some( ) and the ? is in the macro use,
+// the suggestion fails to apply; do not lint
+macro_rules! some_in_macro {
+ ($expr:expr) => {
+ || -> _ { Some($expr) }()
+ };
+}
+
+pub fn test1() {
+ let x = Some(3);
+ let _x = some_in_macro!(x?);
+}
+
+// this one is ok because both the ? and the Some are both inside the macro def
+macro_rules! some_and_qmark_in_macro {
+ ($expr:expr) => {
+ || -> Option<_> { Some($expr) }()
+ };
+}
+
+pub fn test2() {
+ let x = Some(3);
+ let _x = some_and_qmark_in_macro!(x?);
+}
+
+async fn async_option_bad(to: TO) -> Option<usize> {
+ let _ = Some(3);
+ to.magic
+}
+
+async fn async_deref_ref(s: Option<&String>) -> Option<&str> {
+ Some(s?)
+}
+
+async fn async_result_bad(s: TR) -> Result<usize, bool> {
+ s.magic
+}
diff --git a/src/tools/clippy/tests/ui/needless_question_mark.rs b/src/tools/clippy/tests/ui/needless_question_mark.rs
new file mode 100644
index 000000000..3a6523e8f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_question_mark.rs
@@ -0,0 +1,140 @@
+// run-rustfix
+
+#![warn(clippy::needless_question_mark)]
+#![allow(
+ clippy::needless_return,
+ clippy::unnecessary_unwrap,
+ clippy::upper_case_acronyms,
+ dead_code,
+ unused_must_use
+)]
+#![feature(custom_inner_attributes)]
+
+struct TO {
+ magic: Option<usize>,
+}
+
+struct TR {
+ magic: Result<usize, bool>,
+}
+
+fn simple_option_bad1(to: TO) -> Option<usize> {
+ // return as a statement
+ return Some(to.magic?);
+}
+
+// formatting will add a semi-colon, which would make
+// this identical to the test case above
+#[rustfmt::skip]
+fn simple_option_bad2(to: TO) -> Option<usize> {
+ // return as an expression
+ return Some(to.magic?)
+}
+
+fn simple_option_bad3(to: TO) -> Option<usize> {
+ // block value "return"
+ Some(to.magic?)
+}
+
+fn simple_option_bad4(to: Option<TO>) -> Option<usize> {
+ // single line closure
+ to.and_then(|t| Some(t.magic?))
+}
+
+// formatting this will remove the block brackets, making
+// this test identical to the one above
+#[rustfmt::skip]
+fn simple_option_bad5(to: Option<TO>) -> Option<usize> {
+ // closure with body
+ to.and_then(|t| {
+ Some(t.magic?)
+ })
+}
+
+fn simple_result_bad1(tr: TR) -> Result<usize, bool> {
+ return Ok(tr.magic?);
+}
+
+// formatting will add a semi-colon, which would make
+// this identical to the test case above
+#[rustfmt::skip]
+fn simple_result_bad2(tr: TR) -> Result<usize, bool> {
+ return Ok(tr.magic?)
+}
+
+fn simple_result_bad3(tr: TR) -> Result<usize, bool> {
+ Ok(tr.magic?)
+}
+
+fn simple_result_bad4(tr: Result<TR, bool>) -> Result<usize, bool> {
+ tr.and_then(|t| Ok(t.magic?))
+}
+
+// formatting this will remove the block brackets, making
+// this test identical to the one above
+#[rustfmt::skip]
+fn simple_result_bad5(tr: Result<TR, bool>) -> Result<usize, bool> {
+ tr.and_then(|t| {
+ Ok(t.magic?)
+ })
+}
+
+fn also_bad(tr: Result<TR, bool>) -> Result<usize, bool> {
+ if tr.is_ok() {
+ let t = tr.unwrap();
+ return Ok(t.magic?);
+ }
+ Err(false)
+}
+
+fn false_positive_test<U, T>(x: Result<(), U>) -> Result<(), T>
+where
+ T: From<U>,
+{
+ Ok(x?)
+}
+
+// not quite needless
+fn deref_ref(s: Option<&String>) -> Option<&str> {
+ Some(s?)
+}
+
+fn main() {}
+
+// #6921 if a macro wraps an expr in Some( ) and the ? is in the macro use,
+// the suggestion fails to apply; do not lint
+macro_rules! some_in_macro {
+ ($expr:expr) => {
+ || -> _ { Some($expr) }()
+ };
+}
+
+pub fn test1() {
+ let x = Some(3);
+ let _x = some_in_macro!(x?);
+}
+
+// this one is ok because both the ? and the Some are both inside the macro def
+macro_rules! some_and_qmark_in_macro {
+ ($expr:expr) => {
+ || -> Option<_> { Some(Some($expr)?) }()
+ };
+}
+
+pub fn test2() {
+ let x = Some(3);
+ let _x = some_and_qmark_in_macro!(x?);
+}
+
+async fn async_option_bad(to: TO) -> Option<usize> {
+ let _ = Some(3);
+ Some(to.magic?)
+}
+
+async fn async_deref_ref(s: Option<&String>) -> Option<&str> {
+ Some(s?)
+}
+
+async fn async_result_bad(s: TR) -> Result<usize, bool> {
+ Ok(s.magic?)
+}
diff --git a/src/tools/clippy/tests/ui/needless_question_mark.stderr b/src/tools/clippy/tests/ui/needless_question_mark.stderr
new file mode 100644
index 000000000..f8308e24e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_question_mark.stderr
@@ -0,0 +1,93 @@
+error: question mark operator is useless here
+ --> $DIR/needless_question_mark.rs:23:12
+ |
+LL | return Some(to.magic?);
+ | ^^^^^^^^^^^^^^^ help: try removing question mark and `Some()`: `to.magic`
+ |
+ = note: `-D clippy::needless-question-mark` implied by `-D warnings`
+
+error: question mark operator is useless here
+ --> $DIR/needless_question_mark.rs:31:12
+ |
+LL | return Some(to.magic?)
+ | ^^^^^^^^^^^^^^^ help: try removing question mark and `Some()`: `to.magic`
+
+error: question mark operator is useless here
+ --> $DIR/needless_question_mark.rs:36:5
+ |
+LL | Some(to.magic?)
+ | ^^^^^^^^^^^^^^^ help: try removing question mark and `Some()`: `to.magic`
+
+error: question mark operator is useless here
+ --> $DIR/needless_question_mark.rs:41:21
+ |
+LL | to.and_then(|t| Some(t.magic?))
+ | ^^^^^^^^^^^^^^ help: try removing question mark and `Some()`: `t.magic`
+
+error: question mark operator is useless here
+ --> $DIR/needless_question_mark.rs:50:9
+ |
+LL | Some(t.magic?)
+ | ^^^^^^^^^^^^^^ help: try removing question mark and `Some()`: `t.magic`
+
+error: question mark operator is useless here
+ --> $DIR/needless_question_mark.rs:55:12
+ |
+LL | return Ok(tr.magic?);
+ | ^^^^^^^^^^^^^ help: try removing question mark and `Ok()`: `tr.magic`
+
+error: question mark operator is useless here
+ --> $DIR/needless_question_mark.rs:62:12
+ |
+LL | return Ok(tr.magic?)
+ | ^^^^^^^^^^^^^ help: try removing question mark and `Ok()`: `tr.magic`
+
+error: question mark operator is useless here
+ --> $DIR/needless_question_mark.rs:66:5
+ |
+LL | Ok(tr.magic?)
+ | ^^^^^^^^^^^^^ help: try removing question mark and `Ok()`: `tr.magic`
+
+error: question mark operator is useless here
+ --> $DIR/needless_question_mark.rs:70:21
+ |
+LL | tr.and_then(|t| Ok(t.magic?))
+ | ^^^^^^^^^^^^ help: try removing question mark and `Ok()`: `t.magic`
+
+error: question mark operator is useless here
+ --> $DIR/needless_question_mark.rs:78:9
+ |
+LL | Ok(t.magic?)
+ | ^^^^^^^^^^^^ help: try removing question mark and `Ok()`: `t.magic`
+
+error: question mark operator is useless here
+ --> $DIR/needless_question_mark.rs:85:16
+ |
+LL | return Ok(t.magic?);
+ | ^^^^^^^^^^^^ help: try removing question mark and `Ok()`: `t.magic`
+
+error: question mark operator is useless here
+ --> $DIR/needless_question_mark.rs:120:27
+ |
+LL | || -> Option<_> { Some(Some($expr)?) }()
+ | ^^^^^^^^^^^^^^^^^^ help: try removing question mark and `Some()`: `Some($expr)`
+...
+LL | let _x = some_and_qmark_in_macro!(x?);
+ | ---------------------------- in this macro invocation
+ |
+ = note: this error originates in the macro `some_and_qmark_in_macro` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: question mark operator is useless here
+ --> $DIR/needless_question_mark.rs:131:5
+ |
+LL | Some(to.magic?)
+ | ^^^^^^^^^^^^^^^ help: try removing question mark and `Some()`: `to.magic`
+
+error: question mark operator is useless here
+ --> $DIR/needless_question_mark.rs:139:5
+ |
+LL | Ok(s.magic?)
+ | ^^^^^^^^^^^^ help: try removing question mark and `Ok()`: `s.magic`
+
+error: aborting due to 14 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_range_loop.rs b/src/tools/clippy/tests/ui/needless_range_loop.rs
new file mode 100644
index 000000000..3fce34367
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_range_loop.rs
@@ -0,0 +1,95 @@
+#![warn(clippy::needless_range_loop)]
+
+static STATIC: [usize; 4] = [0, 1, 8, 16];
+const CONST: [usize; 4] = [0, 1, 8, 16];
+const MAX_LEN: usize = 42;
+
+fn main() {
+ let mut vec = vec![1, 2, 3, 4];
+ let vec2 = vec![1, 2, 3, 4];
+ for i in 0..vec.len() {
+ println!("{}", vec[i]);
+ }
+
+ for i in 0..vec.len() {
+ let i = 42; // make a different `i`
+ println!("{}", vec[i]); // ok, not the `i` of the for-loop
+ }
+
+ for i in 0..vec.len() {
+ let _ = vec[i];
+ }
+
+ // ICE #746
+ for j in 0..4 {
+ println!("{:?}", STATIC[j]);
+ }
+
+ for j in 0..4 {
+ println!("{:?}", CONST[j]);
+ }
+
+ for i in 0..vec.len() {
+ println!("{} {}", vec[i], i);
+ }
+ for i in 0..vec.len() {
+ // not an error, indexing more than one variable
+ println!("{} {}", vec[i], vec2[i]);
+ }
+
+ for i in 0..vec.len() {
+ println!("{}", vec2[i]);
+ }
+
+ for i in 5..vec.len() {
+ println!("{}", vec[i]);
+ }
+
+ for i in 0..MAX_LEN {
+ println!("{}", vec[i]);
+ }
+
+ for i in 0..=MAX_LEN {
+ println!("{}", vec[i]);
+ }
+
+ for i in 5..10 {
+ println!("{}", vec[i]);
+ }
+
+ for i in 5..=10 {
+ println!("{}", vec[i]);
+ }
+
+ for i in 5..vec.len() {
+ println!("{} {}", vec[i], i);
+ }
+
+ for i in 5..10 {
+ println!("{} {}", vec[i], i);
+ }
+
+ // #2542
+ for i in 0..vec.len() {
+ vec[i] = Some(1).unwrap_or_else(|| panic!("error on {}", i));
+ }
+
+ // #3788
+ let test = Test {
+ inner: vec![1, 2, 3, 4],
+ };
+ for i in 0..2 {
+ println!("{}", test[i]);
+ }
+}
+
+struct Test {
+ inner: Vec<usize>,
+}
+
+impl std::ops::Index<usize> for Test {
+ type Output = usize;
+ fn index(&self, index: usize) -> &Self::Output {
+ &self.inner[index]
+ }
+}
diff --git a/src/tools/clippy/tests/ui/needless_range_loop.stderr b/src/tools/clippy/tests/ui/needless_range_loop.stderr
new file mode 100644
index 000000000..a86cc69df
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_range_loop.stderr
@@ -0,0 +1,157 @@
+error: the loop variable `i` is only used to index `vec`
+ --> $DIR/needless_range_loop.rs:10:14
+ |
+LL | for i in 0..vec.len() {
+ | ^^^^^^^^^^^^
+ |
+ = note: `-D clippy::needless-range-loop` implied by `-D warnings`
+help: consider using an iterator
+ |
+LL | for <item> in &vec {
+ | ~~~~~~ ~~~~
+
+error: the loop variable `i` is only used to index `vec`
+ --> $DIR/needless_range_loop.rs:19:14
+ |
+LL | for i in 0..vec.len() {
+ | ^^^^^^^^^^^^
+ |
+help: consider using an iterator
+ |
+LL | for <item> in &vec {
+ | ~~~~~~ ~~~~
+
+error: the loop variable `j` is only used to index `STATIC`
+ --> $DIR/needless_range_loop.rs:24:14
+ |
+LL | for j in 0..4 {
+ | ^^^^
+ |
+help: consider using an iterator
+ |
+LL | for <item> in &STATIC {
+ | ~~~~~~ ~~~~~~~
+
+error: the loop variable `j` is only used to index `CONST`
+ --> $DIR/needless_range_loop.rs:28:14
+ |
+LL | for j in 0..4 {
+ | ^^^^
+ |
+help: consider using an iterator
+ |
+LL | for <item> in &CONST {
+ | ~~~~~~ ~~~~~~
+
+error: the loop variable `i` is used to index `vec`
+ --> $DIR/needless_range_loop.rs:32:14
+ |
+LL | for i in 0..vec.len() {
+ | ^^^^^^^^^^^^
+ |
+help: consider using an iterator
+ |
+LL | for (i, <item>) in vec.iter().enumerate() {
+ | ~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~
+
+error: the loop variable `i` is only used to index `vec2`
+ --> $DIR/needless_range_loop.rs:40:14
+ |
+LL | for i in 0..vec.len() {
+ | ^^^^^^^^^^^^
+ |
+help: consider using an iterator
+ |
+LL | for <item> in vec2.iter().take(vec.len()) {
+ | ~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: the loop variable `i` is only used to index `vec`
+ --> $DIR/needless_range_loop.rs:44:14
+ |
+LL | for i in 5..vec.len() {
+ | ^^^^^^^^^^^^
+ |
+help: consider using an iterator
+ |
+LL | for <item> in vec.iter().skip(5) {
+ | ~~~~~~ ~~~~~~~~~~~~~~~~~~
+
+error: the loop variable `i` is only used to index `vec`
+ --> $DIR/needless_range_loop.rs:48:14
+ |
+LL | for i in 0..MAX_LEN {
+ | ^^^^^^^^^^
+ |
+help: consider using an iterator
+ |
+LL | for <item> in vec.iter().take(MAX_LEN) {
+ | ~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: the loop variable `i` is only used to index `vec`
+ --> $DIR/needless_range_loop.rs:52:14
+ |
+LL | for i in 0..=MAX_LEN {
+ | ^^^^^^^^^^^
+ |
+help: consider using an iterator
+ |
+LL | for <item> in vec.iter().take(MAX_LEN + 1) {
+ | ~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: the loop variable `i` is only used to index `vec`
+ --> $DIR/needless_range_loop.rs:56:14
+ |
+LL | for i in 5..10 {
+ | ^^^^^
+ |
+help: consider using an iterator
+ |
+LL | for <item> in vec.iter().take(10).skip(5) {
+ | ~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: the loop variable `i` is only used to index `vec`
+ --> $DIR/needless_range_loop.rs:60:14
+ |
+LL | for i in 5..=10 {
+ | ^^^^^^
+ |
+help: consider using an iterator
+ |
+LL | for <item> in vec.iter().take(10 + 1).skip(5) {
+ | ~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: the loop variable `i` is used to index `vec`
+ --> $DIR/needless_range_loop.rs:64:14
+ |
+LL | for i in 5..vec.len() {
+ | ^^^^^^^^^^^^
+ |
+help: consider using an iterator
+ |
+LL | for (i, <item>) in vec.iter().enumerate().skip(5) {
+ | ~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: the loop variable `i` is used to index `vec`
+ --> $DIR/needless_range_loop.rs:68:14
+ |
+LL | for i in 5..10 {
+ | ^^^^^
+ |
+help: consider using an iterator
+ |
+LL | for (i, <item>) in vec.iter().enumerate().take(10).skip(5) {
+ | ~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: the loop variable `i` is used to index `vec`
+ --> $DIR/needless_range_loop.rs:73:14
+ |
+LL | for i in 0..vec.len() {
+ | ^^^^^^^^^^^^
+ |
+help: consider using an iterator
+ |
+LL | for (i, <item>) in vec.iter_mut().enumerate() {
+ | ~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: aborting due to 14 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_range_loop2.rs b/src/tools/clippy/tests/ui/needless_range_loop2.rs
new file mode 100644
index 000000000..7633316e0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_range_loop2.rs
@@ -0,0 +1,109 @@
+#![warn(clippy::needless_range_loop)]
+
+fn calc_idx(i: usize) -> usize {
+ (i + i + 20) % 4
+}
+
+fn main() {
+ let ns = vec![2, 3, 5, 7];
+
+ for i in 3..10 {
+ println!("{}", ns[i]);
+ }
+
+ for i in 3..10 {
+ println!("{}", ns[i % 4]);
+ }
+
+ for i in 3..10 {
+ println!("{}", ns[i % ns.len()]);
+ }
+
+ for i in 3..10 {
+ println!("{}", ns[calc_idx(i)]);
+ }
+
+ for i in 3..10 {
+ println!("{}", ns[calc_idx(i) % 4]);
+ }
+
+ let mut ms = vec![1, 2, 3, 4, 5, 6];
+ for i in 0..ms.len() {
+ ms[i] *= 2;
+ }
+ assert_eq!(ms, vec![2, 4, 6, 8, 10, 12]);
+
+ let mut ms = vec![1, 2, 3, 4, 5, 6];
+ for i in 0..ms.len() {
+ let x = &mut ms[i];
+ *x *= 2;
+ }
+ assert_eq!(ms, vec![2, 4, 6, 8, 10, 12]);
+
+ let g = vec![1, 2, 3, 4, 5, 6];
+ let glen = g.len();
+ for i in 0..glen {
+ let x: u32 = g[i + 1..].iter().sum();
+ println!("{}", g[i] + x);
+ }
+ assert_eq!(g, vec![20, 18, 15, 11, 6, 0]);
+
+ let mut g = vec![1, 2, 3, 4, 5, 6];
+ let glen = g.len();
+ for i in 0..glen {
+ g[i] = g[i + 1..].iter().sum();
+ }
+ assert_eq!(g, vec![20, 18, 15, 11, 6, 0]);
+
+ let x = 5;
+ let mut vec = vec![0; 9];
+
+ for i in x..x + 4 {
+ vec[i] += 1;
+ }
+
+ let x = 5;
+ let mut vec = vec![0; 10];
+
+ for i in x..=x + 4 {
+ vec[i] += 1;
+ }
+
+ let arr = [1, 2, 3];
+
+ for i in 0..3 {
+ println!("{}", arr[i]);
+ }
+
+ for i in 0..2 {
+ println!("{}", arr[i]);
+ }
+
+ for i in 1..3 {
+ println!("{}", arr[i]);
+ }
+
+ // Fix #5945
+ let mut vec = vec![1, 2, 3, 4];
+ for i in 0..vec.len() - 1 {
+ vec[i] += 1;
+ }
+ let mut vec = vec![1, 2, 3, 4];
+ for i in vec.len() - 3..vec.len() {
+ vec[i] += 1;
+ }
+ let mut vec = vec![1, 2, 3, 4];
+ for i in vec.len() - 3..vec.len() - 1 {
+ vec[i] += 1;
+ }
+}
+
+mod issue2277 {
+ pub fn example(list: &[[f64; 3]]) {
+ let mut x: [f64; 3] = [10.; 3];
+
+ for i in 0..3 {
+ x[i] = list.iter().map(|item| item[i]).sum::<f64>();
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/needless_range_loop2.stderr b/src/tools/clippy/tests/ui/needless_range_loop2.stderr
new file mode 100644
index 000000000..1e6ec5e66
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_range_loop2.stderr
@@ -0,0 +1,91 @@
+error: the loop variable `i` is only used to index `ns`
+ --> $DIR/needless_range_loop2.rs:10:14
+ |
+LL | for i in 3..10 {
+ | ^^^^^
+ |
+ = note: `-D clippy::needless-range-loop` implied by `-D warnings`
+help: consider using an iterator
+ |
+LL | for <item> in ns.iter().take(10).skip(3) {
+ | ~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: the loop variable `i` is only used to index `ms`
+ --> $DIR/needless_range_loop2.rs:31:14
+ |
+LL | for i in 0..ms.len() {
+ | ^^^^^^^^^^^
+ |
+help: consider using an iterator
+ |
+LL | for <item> in &mut ms {
+ | ~~~~~~ ~~~~~~~
+
+error: the loop variable `i` is only used to index `ms`
+ --> $DIR/needless_range_loop2.rs:37:14
+ |
+LL | for i in 0..ms.len() {
+ | ^^^^^^^^^^^
+ |
+help: consider using an iterator
+ |
+LL | for <item> in &mut ms {
+ | ~~~~~~ ~~~~~~~
+
+error: the loop variable `i` is only used to index `vec`
+ --> $DIR/needless_range_loop2.rs:61:14
+ |
+LL | for i in x..x + 4 {
+ | ^^^^^^^^
+ |
+help: consider using an iterator
+ |
+LL | for <item> in vec.iter_mut().skip(x).take(4) {
+ | ~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: the loop variable `i` is only used to index `vec`
+ --> $DIR/needless_range_loop2.rs:68:14
+ |
+LL | for i in x..=x + 4 {
+ | ^^^^^^^^^
+ |
+help: consider using an iterator
+ |
+LL | for <item> in vec.iter_mut().skip(x).take(4 + 1) {
+ | ~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: the loop variable `i` is only used to index `arr`
+ --> $DIR/needless_range_loop2.rs:74:14
+ |
+LL | for i in 0..3 {
+ | ^^^^
+ |
+help: consider using an iterator
+ |
+LL | for <item> in &arr {
+ | ~~~~~~ ~~~~
+
+error: the loop variable `i` is only used to index `arr`
+ --> $DIR/needless_range_loop2.rs:78:14
+ |
+LL | for i in 0..2 {
+ | ^^^^
+ |
+help: consider using an iterator
+ |
+LL | for <item> in arr.iter().take(2) {
+ | ~~~~~~ ~~~~~~~~~~~~~~~~~~
+
+error: the loop variable `i` is only used to index `arr`
+ --> $DIR/needless_range_loop2.rs:82:14
+ |
+LL | for i in 1..3 {
+ | ^^^^
+ |
+help: consider using an iterator
+ |
+LL | for <item> in arr.iter().skip(1) {
+ | ~~~~~~ ~~~~~~~~~~~~~~~~~~
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_return.fixed b/src/tools/clippy/tests/ui/needless_return.fixed
new file mode 100644
index 000000000..0bc0d0011
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_return.fixed
@@ -0,0 +1,240 @@
+// run-rustfix
+
+#![feature(lint_reasons)]
+#![feature(let_else)]
+#![allow(unused)]
+#![allow(
+ clippy::if_same_then_else,
+ clippy::single_match,
+ clippy::needless_bool,
+ clippy::equatable_if_let
+)]
+#![warn(clippy::needless_return)]
+
+use std::cell::RefCell;
+
+macro_rules! the_answer {
+ () => {
+ 42
+ };
+}
+
+fn test_end_of_fn() -> bool {
+ if true {
+ // no error!
+ return true;
+ }
+ true
+}
+
+fn test_no_semicolon() -> bool {
+ true
+}
+
+fn test_if_block() -> bool {
+ if true {
+ true
+ } else {
+ false
+ }
+}
+
+fn test_match(x: bool) -> bool {
+ match x {
+ true => false,
+ false => {
+ true
+ },
+ }
+}
+
+fn test_closure() {
+ let _ = || {
+ true
+ };
+ let _ = || true;
+}
+
+fn test_macro_call() -> i32 {
+ the_answer!()
+}
+
+fn test_void_fun() {
+
+}
+
+fn test_void_if_fun(b: bool) {
+ if b {
+
+ } else {
+
+ }
+}
+
+fn test_void_match(x: u32) {
+ match x {
+ 0 => (),
+ _ => (),
+ }
+}
+
+fn test_nested_match(x: u32) {
+ match x {
+ 0 => (),
+ 1 => {
+ let _ = 42;
+
+ },
+ _ => (),
+ }
+}
+
+fn temporary_outlives_local() -> String {
+ let x = RefCell::<String>::default();
+ return x.borrow().clone();
+}
+
+fn borrows_but_not_last(value: bool) -> String {
+ if value {
+ let x = RefCell::<String>::default();
+ let _a = x.borrow().clone();
+ String::from("test")
+ } else {
+ String::new()
+ }
+}
+
+macro_rules! needed_return {
+ ($e:expr) => {
+ if $e > 3 {
+ return;
+ }
+ };
+}
+
+fn test_return_in_macro() {
+ // This will return and the macro below won't be executed. Removing the `return` from the macro
+ // will change semantics.
+ needed_return!(10);
+ needed_return!(0);
+}
+
+mod issue6501 {
+ #[allow(clippy::unnecessary_lazy_evaluations)]
+ fn foo(bar: Result<(), ()>) {
+ bar.unwrap_or_else(|_| {})
+ }
+
+ fn test_closure() {
+ let _ = || {
+
+ };
+ let _ = || {};
+ }
+
+ struct Foo;
+ #[allow(clippy::unnecessary_lazy_evaluations)]
+ fn bar(res: Result<Foo, u8>) -> Foo {
+ res.unwrap_or_else(|_| Foo)
+ }
+}
+
+async fn async_test_end_of_fn() -> bool {
+ if true {
+ // no error!
+ return true;
+ }
+ true
+}
+
+async fn async_test_no_semicolon() -> bool {
+ true
+}
+
+async fn async_test_if_block() -> bool {
+ if true {
+ true
+ } else {
+ false
+ }
+}
+
+async fn async_test_match(x: bool) -> bool {
+ match x {
+ true => false,
+ false => {
+ true
+ },
+ }
+}
+
+async fn async_test_closure() {
+ let _ = || {
+ true
+ };
+ let _ = || true;
+}
+
+async fn async_test_macro_call() -> i32 {
+ the_answer!()
+}
+
+async fn async_test_void_fun() {
+
+}
+
+async fn async_test_void_if_fun(b: bool) {
+ if b {
+
+ } else {
+
+ }
+}
+
+async fn async_test_void_match(x: u32) {
+ match x {
+ 0 => (),
+ _ => (),
+ }
+}
+
+async fn async_temporary_outlives_local() -> String {
+ let x = RefCell::<String>::default();
+ return x.borrow().clone();
+}
+
+async fn async_borrows_but_not_last(value: bool) -> String {
+ if value {
+ let x = RefCell::<String>::default();
+ let _a = x.borrow().clone();
+ String::from("test")
+ } else {
+ String::new()
+ }
+}
+
+async fn async_test_return_in_macro() {
+ needed_return!(10);
+ needed_return!(0);
+}
+
+fn let_else() {
+ let Some(1) = Some(1) else { return };
+}
+
+fn needless_return_macro() -> String {
+ let _ = "foo";
+ let _ = "bar";
+ format!("Hello {}", "world!")
+}
+
+fn check_expect() -> bool {
+ if true {
+ // no error!
+ return true;
+ }
+ #[expect(clippy::needless_return)]
+ return true;
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/needless_return.rs b/src/tools/clippy/tests/ui/needless_return.rs
new file mode 100644
index 000000000..eb9f72e8e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_return.rs
@@ -0,0 +1,240 @@
+// run-rustfix
+
+#![feature(lint_reasons)]
+#![feature(let_else)]
+#![allow(unused)]
+#![allow(
+ clippy::if_same_then_else,
+ clippy::single_match,
+ clippy::needless_bool,
+ clippy::equatable_if_let
+)]
+#![warn(clippy::needless_return)]
+
+use std::cell::RefCell;
+
+macro_rules! the_answer {
+ () => {
+ 42
+ };
+}
+
+fn test_end_of_fn() -> bool {
+ if true {
+ // no error!
+ return true;
+ }
+ return true;
+}
+
+fn test_no_semicolon() -> bool {
+ return true;
+}
+
+fn test_if_block() -> bool {
+ if true {
+ return true;
+ } else {
+ return false;
+ }
+}
+
+fn test_match(x: bool) -> bool {
+ match x {
+ true => return false,
+ false => {
+ return true;
+ },
+ }
+}
+
+fn test_closure() {
+ let _ = || {
+ return true;
+ };
+ let _ = || return true;
+}
+
+fn test_macro_call() -> i32 {
+ return the_answer!();
+}
+
+fn test_void_fun() {
+ return;
+}
+
+fn test_void_if_fun(b: bool) {
+ if b {
+ return;
+ } else {
+ return;
+ }
+}
+
+fn test_void_match(x: u32) {
+ match x {
+ 0 => (),
+ _ => return,
+ }
+}
+
+fn test_nested_match(x: u32) {
+ match x {
+ 0 => (),
+ 1 => {
+ let _ = 42;
+ return;
+ },
+ _ => return,
+ }
+}
+
+fn temporary_outlives_local() -> String {
+ let x = RefCell::<String>::default();
+ return x.borrow().clone();
+}
+
+fn borrows_but_not_last(value: bool) -> String {
+ if value {
+ let x = RefCell::<String>::default();
+ let _a = x.borrow().clone();
+ return String::from("test");
+ } else {
+ return String::new();
+ }
+}
+
+macro_rules! needed_return {
+ ($e:expr) => {
+ if $e > 3 {
+ return;
+ }
+ };
+}
+
+fn test_return_in_macro() {
+ // This will return and the macro below won't be executed. Removing the `return` from the macro
+ // will change semantics.
+ needed_return!(10);
+ needed_return!(0);
+}
+
+mod issue6501 {
+ #[allow(clippy::unnecessary_lazy_evaluations)]
+ fn foo(bar: Result<(), ()>) {
+ bar.unwrap_or_else(|_| return)
+ }
+
+ fn test_closure() {
+ let _ = || {
+ return;
+ };
+ let _ = || return;
+ }
+
+ struct Foo;
+ #[allow(clippy::unnecessary_lazy_evaluations)]
+ fn bar(res: Result<Foo, u8>) -> Foo {
+ res.unwrap_or_else(|_| return Foo)
+ }
+}
+
+async fn async_test_end_of_fn() -> bool {
+ if true {
+ // no error!
+ return true;
+ }
+ return true;
+}
+
+async fn async_test_no_semicolon() -> bool {
+ return true;
+}
+
+async fn async_test_if_block() -> bool {
+ if true {
+ return true;
+ } else {
+ return false;
+ }
+}
+
+async fn async_test_match(x: bool) -> bool {
+ match x {
+ true => return false,
+ false => {
+ return true;
+ },
+ }
+}
+
+async fn async_test_closure() {
+ let _ = || {
+ return true;
+ };
+ let _ = || return true;
+}
+
+async fn async_test_macro_call() -> i32 {
+ return the_answer!();
+}
+
+async fn async_test_void_fun() {
+ return;
+}
+
+async fn async_test_void_if_fun(b: bool) {
+ if b {
+ return;
+ } else {
+ return;
+ }
+}
+
+async fn async_test_void_match(x: u32) {
+ match x {
+ 0 => (),
+ _ => return,
+ }
+}
+
+async fn async_temporary_outlives_local() -> String {
+ let x = RefCell::<String>::default();
+ return x.borrow().clone();
+}
+
+async fn async_borrows_but_not_last(value: bool) -> String {
+ if value {
+ let x = RefCell::<String>::default();
+ let _a = x.borrow().clone();
+ return String::from("test");
+ } else {
+ return String::new();
+ }
+}
+
+async fn async_test_return_in_macro() {
+ needed_return!(10);
+ needed_return!(0);
+}
+
+fn let_else() {
+ let Some(1) = Some(1) else { return };
+}
+
+fn needless_return_macro() -> String {
+ let _ = "foo";
+ let _ = "bar";
+ return format!("Hello {}", "world!");
+}
+
+fn check_expect() -> bool {
+ if true {
+ // no error!
+ return true;
+ }
+ #[expect(clippy::needless_return)]
+ return true;
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/needless_return.stderr b/src/tools/clippy/tests/ui/needless_return.stderr
new file mode 100644
index 000000000..83ff07638
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_return.stderr
@@ -0,0 +1,226 @@
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:27:5
+ |
+LL | return true;
+ | ^^^^^^^^^^^^ help: remove `return`: `true`
+ |
+ = note: `-D clippy::needless-return` implied by `-D warnings`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:31:5
+ |
+LL | return true;
+ | ^^^^^^^^^^^^ help: remove `return`: `true`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:36:9
+ |
+LL | return true;
+ | ^^^^^^^^^^^^ help: remove `return`: `true`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:38:9
+ |
+LL | return false;
+ | ^^^^^^^^^^^^^ help: remove `return`: `false`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:44:17
+ |
+LL | true => return false,
+ | ^^^^^^^^^^^^ help: remove `return`: `false`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:46:13
+ |
+LL | return true;
+ | ^^^^^^^^^^^^ help: remove `return`: `true`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:53:9
+ |
+LL | return true;
+ | ^^^^^^^^^^^^ help: remove `return`: `true`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:55:16
+ |
+LL | let _ = || return true;
+ | ^^^^^^^^^^^ help: remove `return`: `true`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:59:5
+ |
+LL | return the_answer!();
+ | ^^^^^^^^^^^^^^^^^^^^^ help: remove `return`: `the_answer!()`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:63:5
+ |
+LL | return;
+ | ^^^^^^^ help: remove `return`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:68:9
+ |
+LL | return;
+ | ^^^^^^^ help: remove `return`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:70:9
+ |
+LL | return;
+ | ^^^^^^^ help: remove `return`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:77:14
+ |
+LL | _ => return,
+ | ^^^^^^ help: replace `return` with a unit value: `()`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:86:13
+ |
+LL | return;
+ | ^^^^^^^ help: remove `return`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:88:14
+ |
+LL | _ => return,
+ | ^^^^^^ help: replace `return` with a unit value: `()`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:101:9
+ |
+LL | return String::from("test");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove `return`: `String::from("test")`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:103:9
+ |
+LL | return String::new();
+ | ^^^^^^^^^^^^^^^^^^^^^ help: remove `return`: `String::new()`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:125:32
+ |
+LL | bar.unwrap_or_else(|_| return)
+ | ^^^^^^ help: replace `return` with an empty block: `{}`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:130:13
+ |
+LL | return;
+ | ^^^^^^^ help: remove `return`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:132:20
+ |
+LL | let _ = || return;
+ | ^^^^^^ help: replace `return` with an empty block: `{}`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:138:32
+ |
+LL | res.unwrap_or_else(|_| return Foo)
+ | ^^^^^^^^^^ help: remove `return`: `Foo`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:147:5
+ |
+LL | return true;
+ | ^^^^^^^^^^^^ help: remove `return`: `true`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:151:5
+ |
+LL | return true;
+ | ^^^^^^^^^^^^ help: remove `return`: `true`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:156:9
+ |
+LL | return true;
+ | ^^^^^^^^^^^^ help: remove `return`: `true`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:158:9
+ |
+LL | return false;
+ | ^^^^^^^^^^^^^ help: remove `return`: `false`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:164:17
+ |
+LL | true => return false,
+ | ^^^^^^^^^^^^ help: remove `return`: `false`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:166:13
+ |
+LL | return true;
+ | ^^^^^^^^^^^^ help: remove `return`: `true`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:173:9
+ |
+LL | return true;
+ | ^^^^^^^^^^^^ help: remove `return`: `true`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:175:16
+ |
+LL | let _ = || return true;
+ | ^^^^^^^^^^^ help: remove `return`: `true`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:179:5
+ |
+LL | return the_answer!();
+ | ^^^^^^^^^^^^^^^^^^^^^ help: remove `return`: `the_answer!()`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:183:5
+ |
+LL | return;
+ | ^^^^^^^ help: remove `return`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:188:9
+ |
+LL | return;
+ | ^^^^^^^ help: remove `return`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:190:9
+ |
+LL | return;
+ | ^^^^^^^ help: remove `return`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:197:14
+ |
+LL | _ => return,
+ | ^^^^^^ help: replace `return` with a unit value: `()`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:210:9
+ |
+LL | return String::from("test");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove `return`: `String::from("test")`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:212:9
+ |
+LL | return String::new();
+ | ^^^^^^^^^^^^^^^^^^^^^ help: remove `return`: `String::new()`
+
+error: unneeded `return` statement
+ --> $DIR/needless_return.rs:228:5
+ |
+LL | return format!("Hello {}", "world!");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove `return`: `format!("Hello {}", "world!")`
+
+error: aborting due to 37 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_splitn.fixed b/src/tools/clippy/tests/ui/needless_splitn.fixed
new file mode 100644
index 000000000..61f5fc4e6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_splitn.fixed
@@ -0,0 +1,47 @@
+// run-rustfix
+// edition:2018
+
+#![feature(custom_inner_attributes)]
+#![warn(clippy::needless_splitn)]
+#![allow(clippy::iter_skip_next, clippy::iter_nth_zero, clippy::manual_split_once)]
+
+extern crate itertools;
+
+#[allow(unused_imports)]
+use itertools::Itertools;
+
+fn main() {
+ let str = "key=value=end";
+ let _ = str.split('=').next();
+ let _ = str.split('=').nth(0);
+ let _ = str.splitn(2, '=').nth(1);
+ let (_, _) = str.splitn(2, '=').next_tuple().unwrap();
+ let (_, _) = str.split('=').next_tuple().unwrap();
+ let _: Vec<&str> = str.splitn(3, '=').collect();
+
+ let _ = str.rsplit('=').next();
+ let _ = str.rsplit('=').nth(0);
+ let _ = str.rsplitn(2, '=').nth(1);
+ let (_, _) = str.rsplitn(2, '=').next_tuple().unwrap();
+ let (_, _) = str.rsplit('=').next_tuple().unwrap();
+
+ let _ = str.split('=').next();
+ let _ = str.split('=').nth(3);
+ let _ = str.splitn(5, '=').nth(4);
+ let _ = str.splitn(5, '=').nth(5);
+}
+
+fn _question_mark(s: &str) -> Option<()> {
+ let _ = s.split('=').next()?;
+ let _ = s.split('=').nth(0)?;
+ let _ = s.rsplit('=').next()?;
+ let _ = s.rsplit('=').nth(0)?;
+
+ Some(())
+}
+
+fn _test_msrv() {
+ #![clippy::msrv = "1.51"]
+ // `manual_split_once` MSRV shouldn't apply to `needless_splitn`
+ let _ = "key=value".split('=').nth(0).unwrap();
+}
diff --git a/src/tools/clippy/tests/ui/needless_splitn.rs b/src/tools/clippy/tests/ui/needless_splitn.rs
new file mode 100644
index 000000000..71d9a7077
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_splitn.rs
@@ -0,0 +1,47 @@
+// run-rustfix
+// edition:2018
+
+#![feature(custom_inner_attributes)]
+#![warn(clippy::needless_splitn)]
+#![allow(clippy::iter_skip_next, clippy::iter_nth_zero, clippy::manual_split_once)]
+
+extern crate itertools;
+
+#[allow(unused_imports)]
+use itertools::Itertools;
+
+fn main() {
+ let str = "key=value=end";
+ let _ = str.splitn(2, '=').next();
+ let _ = str.splitn(2, '=').nth(0);
+ let _ = str.splitn(2, '=').nth(1);
+ let (_, _) = str.splitn(2, '=').next_tuple().unwrap();
+ let (_, _) = str.splitn(3, '=').next_tuple().unwrap();
+ let _: Vec<&str> = str.splitn(3, '=').collect();
+
+ let _ = str.rsplitn(2, '=').next();
+ let _ = str.rsplitn(2, '=').nth(0);
+ let _ = str.rsplitn(2, '=').nth(1);
+ let (_, _) = str.rsplitn(2, '=').next_tuple().unwrap();
+ let (_, _) = str.rsplitn(3, '=').next_tuple().unwrap();
+
+ let _ = str.splitn(5, '=').next();
+ let _ = str.splitn(5, '=').nth(3);
+ let _ = str.splitn(5, '=').nth(4);
+ let _ = str.splitn(5, '=').nth(5);
+}
+
+fn _question_mark(s: &str) -> Option<()> {
+ let _ = s.splitn(2, '=').next()?;
+ let _ = s.splitn(2, '=').nth(0)?;
+ let _ = s.rsplitn(2, '=').next()?;
+ let _ = s.rsplitn(2, '=').nth(0)?;
+
+ Some(())
+}
+
+fn _test_msrv() {
+ #![clippy::msrv = "1.51"]
+ // `manual_split_once` MSRV shouldn't apply to `needless_splitn`
+ let _ = "key=value".splitn(2, '=').nth(0).unwrap();
+}
diff --git a/src/tools/clippy/tests/ui/needless_splitn.stderr b/src/tools/clippy/tests/ui/needless_splitn.stderr
new file mode 100644
index 000000000..f112b29e7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_splitn.stderr
@@ -0,0 +1,82 @@
+error: unnecessary use of `splitn`
+ --> $DIR/needless_splitn.rs:15:13
+ |
+LL | let _ = str.splitn(2, '=').next();
+ | ^^^^^^^^^^^^^^^^^^ help: try this: `str.split('=')`
+ |
+ = note: `-D clippy::needless-splitn` implied by `-D warnings`
+
+error: unnecessary use of `splitn`
+ --> $DIR/needless_splitn.rs:16:13
+ |
+LL | let _ = str.splitn(2, '=').nth(0);
+ | ^^^^^^^^^^^^^^^^^^ help: try this: `str.split('=')`
+
+error: unnecessary use of `splitn`
+ --> $DIR/needless_splitn.rs:19:18
+ |
+LL | let (_, _) = str.splitn(3, '=').next_tuple().unwrap();
+ | ^^^^^^^^^^^^^^^^^^ help: try this: `str.split('=')`
+
+error: unnecessary use of `rsplitn`
+ --> $DIR/needless_splitn.rs:22:13
+ |
+LL | let _ = str.rsplitn(2, '=').next();
+ | ^^^^^^^^^^^^^^^^^^^ help: try this: `str.rsplit('=')`
+
+error: unnecessary use of `rsplitn`
+ --> $DIR/needless_splitn.rs:23:13
+ |
+LL | let _ = str.rsplitn(2, '=').nth(0);
+ | ^^^^^^^^^^^^^^^^^^^ help: try this: `str.rsplit('=')`
+
+error: unnecessary use of `rsplitn`
+ --> $DIR/needless_splitn.rs:26:18
+ |
+LL | let (_, _) = str.rsplitn(3, '=').next_tuple().unwrap();
+ | ^^^^^^^^^^^^^^^^^^^ help: try this: `str.rsplit('=')`
+
+error: unnecessary use of `splitn`
+ --> $DIR/needless_splitn.rs:28:13
+ |
+LL | let _ = str.splitn(5, '=').next();
+ | ^^^^^^^^^^^^^^^^^^ help: try this: `str.split('=')`
+
+error: unnecessary use of `splitn`
+ --> $DIR/needless_splitn.rs:29:13
+ |
+LL | let _ = str.splitn(5, '=').nth(3);
+ | ^^^^^^^^^^^^^^^^^^ help: try this: `str.split('=')`
+
+error: unnecessary use of `splitn`
+ --> $DIR/needless_splitn.rs:35:13
+ |
+LL | let _ = s.splitn(2, '=').next()?;
+ | ^^^^^^^^^^^^^^^^ help: try this: `s.split('=')`
+
+error: unnecessary use of `splitn`
+ --> $DIR/needless_splitn.rs:36:13
+ |
+LL | let _ = s.splitn(2, '=').nth(0)?;
+ | ^^^^^^^^^^^^^^^^ help: try this: `s.split('=')`
+
+error: unnecessary use of `rsplitn`
+ --> $DIR/needless_splitn.rs:37:13
+ |
+LL | let _ = s.rsplitn(2, '=').next()?;
+ | ^^^^^^^^^^^^^^^^^ help: try this: `s.rsplit('=')`
+
+error: unnecessary use of `rsplitn`
+ --> $DIR/needless_splitn.rs:38:13
+ |
+LL | let _ = s.rsplitn(2, '=').nth(0)?;
+ | ^^^^^^^^^^^^^^^^^ help: try this: `s.rsplit('=')`
+
+error: unnecessary use of `splitn`
+ --> $DIR/needless_splitn.rs:46:13
+ |
+LL | let _ = "key=value".splitn(2, '=').nth(0).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".split('=')`
+
+error: aborting due to 13 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_update.rs b/src/tools/clippy/tests/ui/needless_update.rs
new file mode 100644
index 000000000..b93ff048a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_update.rs
@@ -0,0 +1,25 @@
+#![warn(clippy::needless_update)]
+#![allow(clippy::no_effect)]
+
+struct S {
+ pub a: i32,
+ pub b: i32,
+}
+
+#[non_exhaustive]
+struct T {
+ pub x: i32,
+ pub y: i32,
+}
+
+fn main() {
+ let base = S { a: 0, b: 0 };
+ S { ..base }; // no error
+ S { a: 1, ..base }; // no error
+ S { a: 1, b: 1, ..base };
+
+ let base = T { x: 0, y: 0 };
+ T { ..base }; // no error
+ T { x: 1, ..base }; // no error
+ T { x: 1, y: 1, ..base }; // no error
+}
diff --git a/src/tools/clippy/tests/ui/needless_update.stderr b/src/tools/clippy/tests/ui/needless_update.stderr
new file mode 100644
index 000000000..b154b3b30
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_update.stderr
@@ -0,0 +1,10 @@
+error: struct update has no effect, all the fields in the struct have already been specified
+ --> $DIR/needless_update.rs:19:23
+ |
+LL | S { a: 1, b: 1, ..base };
+ | ^^^^
+ |
+ = note: `-D clippy::needless-update` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/neg_cmp_op_on_partial_ord.rs b/src/tools/clippy/tests/ui/neg_cmp_op_on_partial_ord.rs
new file mode 100644
index 000000000..2d392c593
--- /dev/null
+++ b/src/tools/clippy/tests/ui/neg_cmp_op_on_partial_ord.rs
@@ -0,0 +1,62 @@
+//! This test case utilizes `f64` an easy example for `PartialOrd` only types
+//! but the lint itself actually validates any expression where the left
+//! operand implements `PartialOrd` but not `Ord`.
+
+use std::cmp::Ordering;
+
+#[allow(clippy::unnested_or_patterns, clippy::match_like_matches_macro)]
+#[warn(clippy::neg_cmp_op_on_partial_ord)]
+fn main() {
+ let a_value = 1.0;
+ let another_value = 7.0;
+
+ // --- Bad ---
+
+ // Not Less but potentially Greater, Equal or Uncomparable.
+ let _not_less = !(a_value < another_value);
+
+ // Not Less or Equal but potentially Greater or Uncomparable.
+ let _not_less_or_equal = !(a_value <= another_value);
+
+ // Not Greater but potentially Less, Equal or Uncomparable.
+ let _not_greater = !(a_value > another_value);
+
+ // Not Greater or Equal but potentially Less or Uncomparable.
+ let _not_greater_or_equal = !(a_value >= another_value);
+
+ // --- Good ---
+
+ let _not_less = match a_value.partial_cmp(&another_value) {
+ None | Some(Ordering::Greater) | Some(Ordering::Equal) => true,
+ _ => false,
+ };
+ let _not_less_or_equal = match a_value.partial_cmp(&another_value) {
+ None | Some(Ordering::Greater) => true,
+ _ => false,
+ };
+ let _not_greater = match a_value.partial_cmp(&another_value) {
+ None | Some(Ordering::Less) | Some(Ordering::Equal) => true,
+ _ => false,
+ };
+ let _not_greater_or_equal = match a_value.partial_cmp(&another_value) {
+ None | Some(Ordering::Less) => true,
+ _ => false,
+ };
+
+ // --- Should not trigger ---
+
+ let _ = a_value < another_value;
+ let _ = a_value <= another_value;
+ let _ = a_value > another_value;
+ let _ = a_value >= another_value;
+
+ // --- regression tests ---
+
+ // Issue 2856: False positive on assert!()
+ //
+ // The macro always negates the result of the given comparison in its
+ // internal check which automatically triggered the lint. As it's an
+ // external macro there was no chance to do anything about it which led
+ // to an exempting of all external macros.
+ assert!(a_value < another_value);
+}
diff --git a/src/tools/clippy/tests/ui/neg_cmp_op_on_partial_ord.stderr b/src/tools/clippy/tests/ui/neg_cmp_op_on_partial_ord.stderr
new file mode 100644
index 000000000..c78560007
--- /dev/null
+++ b/src/tools/clippy/tests/ui/neg_cmp_op_on_partial_ord.stderr
@@ -0,0 +1,28 @@
+error: the use of negated comparison operators on partially ordered types produces code that is hard to read and refactor, please consider using the `partial_cmp` method instead, to make it clear that the two values could be incomparable
+ --> $DIR/neg_cmp_op_on_partial_ord.rs:16:21
+ |
+LL | let _not_less = !(a_value < another_value);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::neg-cmp-op-on-partial-ord` implied by `-D warnings`
+
+error: the use of negated comparison operators on partially ordered types produces code that is hard to read and refactor, please consider using the `partial_cmp` method instead, to make it clear that the two values could be incomparable
+ --> $DIR/neg_cmp_op_on_partial_ord.rs:19:30
+ |
+LL | let _not_less_or_equal = !(a_value <= another_value);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: the use of negated comparison operators on partially ordered types produces code that is hard to read and refactor, please consider using the `partial_cmp` method instead, to make it clear that the two values could be incomparable
+ --> $DIR/neg_cmp_op_on_partial_ord.rs:22:24
+ |
+LL | let _not_greater = !(a_value > another_value);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: the use of negated comparison operators on partially ordered types produces code that is hard to read and refactor, please consider using the `partial_cmp` method instead, to make it clear that the two values could be incomparable
+ --> $DIR/neg_cmp_op_on_partial_ord.rs:25:33
+ |
+LL | let _not_greater_or_equal = !(a_value >= another_value);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/neg_multiply.fixed b/src/tools/clippy/tests/ui/neg_multiply.fixed
new file mode 100644
index 000000000..58ab9e856
--- /dev/null
+++ b/src/tools/clippy/tests/ui/neg_multiply.fixed
@@ -0,0 +1,48 @@
+// run-rustfix
+#![warn(clippy::neg_multiply)]
+#![allow(clippy::no_effect, clippy::unnecessary_operation, clippy::precedence)]
+#![allow(unused)]
+
+use std::ops::Mul;
+
+struct X;
+
+impl Mul<isize> for X {
+ type Output = X;
+
+ fn mul(self, _r: isize) -> Self {
+ self
+ }
+}
+
+impl Mul<X> for isize {
+ type Output = X;
+
+ fn mul(self, _r: X) -> X {
+ X
+ }
+}
+
+fn main() {
+ let x = 0;
+
+ -x;
+
+ -x;
+
+ 100 + -x;
+
+ -(100 + x);
+
+ -17;
+
+ 0xcafe | -0xff00;
+
+ -(3_usize as i32);
+ -(3_usize as i32);
+
+ -1 * -1; // should be ok
+
+ X * -1; // should be ok
+ -1 * X; // should also be ok
+}
diff --git a/src/tools/clippy/tests/ui/neg_multiply.rs b/src/tools/clippy/tests/ui/neg_multiply.rs
new file mode 100644
index 000000000..581290dc7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/neg_multiply.rs
@@ -0,0 +1,48 @@
+// run-rustfix
+#![warn(clippy::neg_multiply)]
+#![allow(clippy::no_effect, clippy::unnecessary_operation, clippy::precedence)]
+#![allow(unused)]
+
+use std::ops::Mul;
+
+struct X;
+
+impl Mul<isize> for X {
+ type Output = X;
+
+ fn mul(self, _r: isize) -> Self {
+ self
+ }
+}
+
+impl Mul<X> for isize {
+ type Output = X;
+
+ fn mul(self, _r: X) -> X {
+ X
+ }
+}
+
+fn main() {
+ let x = 0;
+
+ x * -1;
+
+ -1 * x;
+
+ 100 + x * -1;
+
+ (100 + x) * -1;
+
+ -1 * 17;
+
+ 0xcafe | 0xff00 * -1;
+
+ 3_usize as i32 * -1;
+ (3_usize as i32) * -1;
+
+ -1 * -1; // should be ok
+
+ X * -1; // should be ok
+ -1 * X; // should also be ok
+}
diff --git a/src/tools/clippy/tests/ui/neg_multiply.stderr b/src/tools/clippy/tests/ui/neg_multiply.stderr
new file mode 100644
index 000000000..388ef29eb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/neg_multiply.stderr
@@ -0,0 +1,52 @@
+error: this multiplication by -1 can be written more succinctly
+ --> $DIR/neg_multiply.rs:29:5
+ |
+LL | x * -1;
+ | ^^^^^^ help: consider using: `-x`
+ |
+ = note: `-D clippy::neg-multiply` implied by `-D warnings`
+
+error: this multiplication by -1 can be written more succinctly
+ --> $DIR/neg_multiply.rs:31:5
+ |
+LL | -1 * x;
+ | ^^^^^^ help: consider using: `-x`
+
+error: this multiplication by -1 can be written more succinctly
+ --> $DIR/neg_multiply.rs:33:11
+ |
+LL | 100 + x * -1;
+ | ^^^^^^ help: consider using: `-x`
+
+error: this multiplication by -1 can be written more succinctly
+ --> $DIR/neg_multiply.rs:35:5
+ |
+LL | (100 + x) * -1;
+ | ^^^^^^^^^^^^^^ help: consider using: `-(100 + x)`
+
+error: this multiplication by -1 can be written more succinctly
+ --> $DIR/neg_multiply.rs:37:5
+ |
+LL | -1 * 17;
+ | ^^^^^^^ help: consider using: `-17`
+
+error: this multiplication by -1 can be written more succinctly
+ --> $DIR/neg_multiply.rs:39:14
+ |
+LL | 0xcafe | 0xff00 * -1;
+ | ^^^^^^^^^^^ help: consider using: `-0xff00`
+
+error: this multiplication by -1 can be written more succinctly
+ --> $DIR/neg_multiply.rs:41:5
+ |
+LL | 3_usize as i32 * -1;
+ | ^^^^^^^^^^^^^^^^^^^ help: consider using: `-(3_usize as i32)`
+
+error: this multiplication by -1 can be written more succinctly
+ --> $DIR/neg_multiply.rs:42:5
+ |
+LL | (3_usize as i32) * -1;
+ | ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `-(3_usize as i32)`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/never_loop.rs b/src/tools/clippy/tests/ui/never_loop.rs
new file mode 100644
index 000000000..0a21589dd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/never_loop.rs
@@ -0,0 +1,221 @@
+#![allow(
+ clippy::single_match,
+ unused_assignments,
+ unused_variables,
+ clippy::while_immutable_condition
+)]
+
+fn test1() {
+ let mut x = 0;
+ loop {
+ // clippy::never_loop
+ x += 1;
+ if x == 1 {
+ return;
+ }
+ break;
+ }
+}
+
+fn test2() {
+ let mut x = 0;
+ loop {
+ x += 1;
+ if x == 1 {
+ break;
+ }
+ }
+}
+
+fn test3() {
+ let mut x = 0;
+ loop {
+ // never loops
+ x += 1;
+ break;
+ }
+}
+
+fn test4() {
+ let mut x = 1;
+ loop {
+ x += 1;
+ match x {
+ 5 => return,
+ _ => (),
+ }
+ }
+}
+
+fn test5() {
+ let i = 0;
+ loop {
+ // never loops
+ while i == 0 {
+ // never loops
+ break;
+ }
+ return;
+ }
+}
+
+fn test6() {
+ let mut x = 0;
+ 'outer: loop {
+ x += 1;
+ loop {
+ // never loops
+ if x == 5 {
+ break;
+ }
+ continue 'outer;
+ }
+ return;
+ }
+}
+
+fn test7() {
+ let mut x = 0;
+ loop {
+ x += 1;
+ match x {
+ 1 => continue,
+ _ => (),
+ }
+ return;
+ }
+}
+
+fn test8() {
+ let mut x = 0;
+ loop {
+ x += 1;
+ match x {
+ 5 => return,
+ _ => continue,
+ }
+ }
+}
+
+fn test9() {
+ let x = Some(1);
+ while let Some(y) = x {
+ // never loops
+ return;
+ }
+}
+
+fn test10() {
+ for x in 0..10 {
+ // never loops
+ match x {
+ 1 => break,
+ _ => return,
+ }
+ }
+}
+
+fn test11<F: FnMut() -> i32>(mut f: F) {
+ loop {
+ return match f() {
+ 1 => continue,
+ _ => (),
+ };
+ }
+}
+
+pub fn test12(a: bool, b: bool) {
+ 'label: loop {
+ loop {
+ if a {
+ continue 'label;
+ }
+ if b {
+ break;
+ }
+ }
+ break;
+ }
+}
+
+pub fn test13() {
+ let mut a = true;
+ loop {
+ // infinite loop
+ while a {
+ if true {
+ a = false;
+ continue;
+ }
+ return;
+ }
+ }
+}
+
+pub fn test14() {
+ let mut a = true;
+ 'outer: while a {
+ // never loops
+ while a {
+ if a {
+ a = false;
+ continue;
+ }
+ }
+ break 'outer;
+ }
+}
+
+// Issue #1991: the outer loop should not warn.
+pub fn test15() {
+ 'label: loop {
+ while false {
+ break 'label;
+ }
+ }
+}
+
+// Issue #4058: `continue` in `break` expression
+pub fn test16() {
+ let mut n = 1;
+ loop {
+ break if n != 5 {
+ n += 1;
+ continue;
+ };
+ }
+}
+
+// Issue #9001: `continue` in struct expression fields
+pub fn test17() {
+ struct Foo {
+ f: (),
+ }
+
+ let mut n = 0;
+ let _ = loop {
+ break Foo {
+ f: if n < 5 {
+ n += 1;
+ continue;
+ },
+ };
+ };
+}
+
+fn main() {
+ test1();
+ test2();
+ test3();
+ test4();
+ test5();
+ test6();
+ test7();
+ test8();
+ test9();
+ test10();
+ test11(|| 0);
+ test12(true, false);
+ test13();
+ test14();
+}
diff --git a/src/tools/clippy/tests/ui/never_loop.stderr b/src/tools/clippy/tests/ui/never_loop.stderr
new file mode 100644
index 000000000..f49b23924
--- /dev/null
+++ b/src/tools/clippy/tests/ui/never_loop.stderr
@@ -0,0 +1,105 @@
+error: this loop never actually loops
+ --> $DIR/never_loop.rs:10:5
+ |
+LL | / loop {
+LL | | // clippy::never_loop
+LL | | x += 1;
+LL | | if x == 1 {
+... |
+LL | | break;
+LL | | }
+ | |_____^
+ |
+ = note: `#[deny(clippy::never_loop)]` on by default
+
+error: this loop never actually loops
+ --> $DIR/never_loop.rs:32:5
+ |
+LL | / loop {
+LL | | // never loops
+LL | | x += 1;
+LL | | break;
+LL | | }
+ | |_____^
+
+error: this loop never actually loops
+ --> $DIR/never_loop.rs:52:5
+ |
+LL | / loop {
+LL | | // never loops
+LL | | while i == 0 {
+LL | | // never loops
+... |
+LL | | return;
+LL | | }
+ | |_____^
+
+error: this loop never actually loops
+ --> $DIR/never_loop.rs:54:9
+ |
+LL | / while i == 0 {
+LL | | // never loops
+LL | | break;
+LL | | }
+ | |_________^
+
+error: this loop never actually loops
+ --> $DIR/never_loop.rs:66:9
+ |
+LL | / loop {
+LL | | // never loops
+LL | | if x == 5 {
+LL | | break;
+LL | | }
+LL | | continue 'outer;
+LL | | }
+ | |_________^
+
+error: this loop never actually loops
+ --> $DIR/never_loop.rs:102:5
+ |
+LL | / while let Some(y) = x {
+LL | | // never loops
+LL | | return;
+LL | | }
+ | |_____^
+
+error: this loop never actually loops
+ --> $DIR/never_loop.rs:109:5
+ |
+LL | / for x in 0..10 {
+LL | | // never loops
+LL | | match x {
+LL | | 1 => break,
+LL | | _ => return,
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: if you need the first element of the iterator, try writing
+ |
+LL | if let Some(x) = (0..10).next() {
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: this loop never actually loops
+ --> $DIR/never_loop.rs:157:5
+ |
+LL | / 'outer: while a {
+LL | | // never loops
+LL | | while a {
+LL | | if a {
+... |
+LL | | break 'outer;
+LL | | }
+ | |_____^
+
+error: this loop never actually loops
+ --> $DIR/never_loop.rs:172:9
+ |
+LL | / while false {
+LL | | break 'label;
+LL | | }
+ | |_________^
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/new_ret_no_self.rs b/src/tools/clippy/tests/ui/new_ret_no_self.rs
new file mode 100644
index 000000000..2f315ffe2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/new_ret_no_self.rs
@@ -0,0 +1,352 @@
+#![warn(clippy::new_ret_no_self)]
+#![allow(dead_code)]
+
+fn main() {}
+
+trait R {
+ type Item;
+}
+
+trait Q {
+ type Item;
+ type Item2;
+}
+
+struct S;
+
+impl R for S {
+ type Item = Self;
+}
+
+impl S {
+ // should not trigger the lint
+ pub fn new() -> impl R<Item = Self> {
+ S
+ }
+}
+
+struct S2;
+
+impl R for S2 {
+ type Item = Self;
+}
+
+impl S2 {
+ // should not trigger the lint
+ pub fn new(_: String) -> impl R<Item = Self> {
+ S2
+ }
+}
+
+struct S3;
+
+impl R for S3 {
+ type Item = u32;
+}
+
+impl S3 {
+ // should trigger the lint
+ pub fn new(_: String) -> impl R<Item = u32> {
+ S3
+ }
+}
+
+struct S4;
+
+impl Q for S4 {
+ type Item = u32;
+ type Item2 = Self;
+}
+
+impl S4 {
+ // should not trigger the lint
+ pub fn new(_: String) -> impl Q<Item = u32, Item2 = Self> {
+ S4
+ }
+}
+
+struct T;
+
+impl T {
+ // should not trigger lint
+ pub fn new() -> Self {
+ unimplemented!();
+ }
+}
+
+struct U;
+
+impl U {
+ // should trigger lint
+ pub fn new() -> u32 {
+ unimplemented!();
+ }
+}
+
+struct V;
+
+impl V {
+ // should trigger lint
+ pub fn new(_: String) -> u32 {
+ unimplemented!();
+ }
+}
+
+struct TupleReturnerOk;
+
+impl TupleReturnerOk {
+ // should not trigger lint
+ pub fn new() -> (Self, u32) {
+ unimplemented!();
+ }
+}
+
+struct TupleReturnerOk2;
+
+impl TupleReturnerOk2 {
+ // should not trigger lint (it doesn't matter which element in the tuple is Self)
+ pub fn new() -> (u32, Self) {
+ unimplemented!();
+ }
+}
+
+struct TupleReturnerOk3;
+
+impl TupleReturnerOk3 {
+ // should not trigger lint (tuple can contain multiple Self)
+ pub fn new() -> (Self, Self) {
+ unimplemented!();
+ }
+}
+
+struct TupleReturnerBad;
+
+impl TupleReturnerBad {
+ // should trigger lint
+ pub fn new() -> (u32, u32) {
+ unimplemented!();
+ }
+}
+
+struct MutPointerReturnerOk;
+
+impl MutPointerReturnerOk {
+ // should not trigger lint
+ pub fn new() -> *mut Self {
+ unimplemented!();
+ }
+}
+
+struct ConstPointerReturnerOk2;
+
+impl ConstPointerReturnerOk2 {
+ // should not trigger lint
+ pub fn new() -> *const Self {
+ unimplemented!();
+ }
+}
+
+struct MutPointerReturnerBad;
+
+impl MutPointerReturnerBad {
+ // should trigger lint
+ pub fn new() -> *mut V {
+ unimplemented!();
+ }
+}
+
+struct GenericReturnerOk;
+
+impl GenericReturnerOk {
+ // should not trigger lint
+ pub fn new() -> Option<Self> {
+ unimplemented!();
+ }
+}
+
+struct GenericReturnerBad;
+
+impl GenericReturnerBad {
+ // should trigger lint
+ pub fn new() -> Option<u32> {
+ unimplemented!();
+ }
+}
+
+struct NestedReturnerOk;
+
+impl NestedReturnerOk {
+ // should not trigger lint
+ pub fn new() -> (Option<Self>, u32) {
+ unimplemented!();
+ }
+}
+
+struct NestedReturnerOk2;
+
+impl NestedReturnerOk2 {
+ // should not trigger lint
+ pub fn new() -> ((Self, u32), u32) {
+ unimplemented!();
+ }
+}
+
+struct NestedReturnerOk3;
+
+impl NestedReturnerOk3 {
+ // should not trigger lint
+ pub fn new() -> Option<(Self, u32)> {
+ unimplemented!();
+ }
+}
+
+struct WithLifetime<'a> {
+ cat: &'a str,
+}
+
+impl<'a> WithLifetime<'a> {
+ // should not trigger the lint, because the lifetimes are different
+ pub fn new<'b: 'a>(s: &'b str) -> WithLifetime<'b> {
+ unimplemented!();
+ }
+}
+
+mod issue5435 {
+ struct V;
+
+ pub trait TraitRetSelf {
+ // should not trigger lint
+ fn new() -> Self;
+ }
+
+ pub trait TraitRet {
+ // should trigger lint as we are in trait definition
+ fn new() -> String;
+ }
+ pub struct StructRet;
+ impl TraitRet for StructRet {
+ // should not trigger lint as we are in the impl block
+ fn new() -> String {
+ unimplemented!();
+ }
+ }
+
+ pub trait TraitRet2 {
+ // should trigger lint
+ fn new(_: String) -> String;
+ }
+
+ trait TupleReturnerOk {
+ // should not trigger lint
+ fn new() -> (Self, u32)
+ where
+ Self: Sized,
+ {
+ unimplemented!();
+ }
+ }
+
+ trait TupleReturnerOk2 {
+ // should not trigger lint (it doesn't matter which element in the tuple is Self)
+ fn new() -> (u32, Self)
+ where
+ Self: Sized,
+ {
+ unimplemented!();
+ }
+ }
+
+ trait TupleReturnerOk3 {
+ // should not trigger lint (tuple can contain multiple Self)
+ fn new() -> (Self, Self)
+ where
+ Self: Sized,
+ {
+ unimplemented!();
+ }
+ }
+
+ trait TupleReturnerBad {
+ // should trigger lint
+ fn new() -> (u32, u32) {
+ unimplemented!();
+ }
+ }
+
+ trait MutPointerReturnerOk {
+ // should not trigger lint
+ fn new() -> *mut Self
+ where
+ Self: Sized,
+ {
+ unimplemented!();
+ }
+ }
+
+ trait ConstPointerReturnerOk2 {
+ // should not trigger lint
+ fn new() -> *const Self
+ where
+ Self: Sized,
+ {
+ unimplemented!();
+ }
+ }
+
+ trait MutPointerReturnerBad {
+ // should trigger lint
+ fn new() -> *mut V {
+ unimplemented!();
+ }
+ }
+
+ trait GenericReturnerOk {
+ // should not trigger lint
+ fn new() -> Option<Self>
+ where
+ Self: Sized,
+ {
+ unimplemented!();
+ }
+ }
+
+ trait NestedReturnerOk {
+ // should not trigger lint
+ fn new() -> (Option<Self>, u32)
+ where
+ Self: Sized,
+ {
+ unimplemented!();
+ }
+ }
+
+ trait NestedReturnerOk2 {
+ // should not trigger lint
+ fn new() -> ((Self, u32), u32)
+ where
+ Self: Sized,
+ {
+ unimplemented!();
+ }
+ }
+
+ trait NestedReturnerOk3 {
+ // should not trigger lint
+ fn new() -> Option<(Self, u32)>
+ where
+ Self: Sized,
+ {
+ unimplemented!();
+ }
+ }
+}
+
+// issue #1724
+struct RetOtherSelf<T>(T);
+struct RetOtherSelfWrapper<T>(T);
+
+impl RetOtherSelf<T> {
+ fn new(t: T) -> RetOtherSelf<RetOtherSelfWrapper<T>> {
+ RetOtherSelf(RetOtherSelfWrapper(t))
+ }
+}
diff --git a/src/tools/clippy/tests/ui/new_ret_no_self.stderr b/src/tools/clippy/tests/ui/new_ret_no_self.stderr
new file mode 100644
index 000000000..8217bc618
--- /dev/null
+++ b/src/tools/clippy/tests/ui/new_ret_no_self.stderr
@@ -0,0 +1,80 @@
+error: methods called `new` usually return `Self`
+ --> $DIR/new_ret_no_self.rs:49:5
+ |
+LL | / pub fn new(_: String) -> impl R<Item = u32> {
+LL | | S3
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::new-ret-no-self` implied by `-D warnings`
+
+error: methods called `new` usually return `Self`
+ --> $DIR/new_ret_no_self.rs:81:5
+ |
+LL | / pub fn new() -> u32 {
+LL | | unimplemented!();
+LL | | }
+ | |_____^
+
+error: methods called `new` usually return `Self`
+ --> $DIR/new_ret_no_self.rs:90:5
+ |
+LL | / pub fn new(_: String) -> u32 {
+LL | | unimplemented!();
+LL | | }
+ | |_____^
+
+error: methods called `new` usually return `Self`
+ --> $DIR/new_ret_no_self.rs:126:5
+ |
+LL | / pub fn new() -> (u32, u32) {
+LL | | unimplemented!();
+LL | | }
+ | |_____^
+
+error: methods called `new` usually return `Self`
+ --> $DIR/new_ret_no_self.rs:153:5
+ |
+LL | / pub fn new() -> *mut V {
+LL | | unimplemented!();
+LL | | }
+ | |_____^
+
+error: methods called `new` usually return `Self`
+ --> $DIR/new_ret_no_self.rs:171:5
+ |
+LL | / pub fn new() -> Option<u32> {
+LL | | unimplemented!();
+LL | | }
+ | |_____^
+
+error: methods called `new` usually return `Self`
+ --> $DIR/new_ret_no_self.rs:224:9
+ |
+LL | fn new() -> String;
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: methods called `new` usually return `Self`
+ --> $DIR/new_ret_no_self.rs:236:9
+ |
+LL | fn new(_: String) -> String;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: methods called `new` usually return `Self`
+ --> $DIR/new_ret_no_self.rs:271:9
+ |
+LL | / fn new() -> (u32, u32) {
+LL | | unimplemented!();
+LL | | }
+ | |_________^
+
+error: methods called `new` usually return `Self`
+ --> $DIR/new_ret_no_self.rs:298:9
+ |
+LL | / fn new() -> *mut V {
+LL | | unimplemented!();
+LL | | }
+ | |_________^
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/new_without_default.rs b/src/tools/clippy/tests/ui/new_without_default.rs
new file mode 100644
index 000000000..65809023f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/new_without_default.rs
@@ -0,0 +1,228 @@
+#![allow(dead_code, clippy::missing_safety_doc, clippy::extra_unused_lifetimes)]
+#![warn(clippy::new_without_default)]
+
+pub struct Foo;
+
+impl Foo {
+ pub fn new() -> Foo {
+ Foo
+ }
+}
+
+pub struct Bar;
+
+impl Bar {
+ pub fn new() -> Self {
+ Bar
+ }
+}
+
+pub struct Ok;
+
+impl Ok {
+ pub fn new() -> Self {
+ Ok
+ }
+}
+
+impl Default for Ok {
+ fn default() -> Self {
+ Ok
+ }
+}
+
+pub struct Params;
+
+impl Params {
+ pub fn new(_: u32) -> Self {
+ Params
+ }
+}
+
+pub struct GenericsOk<T> {
+ bar: T,
+}
+
+impl<U> Default for GenericsOk<U> {
+ fn default() -> Self {
+ unimplemented!();
+ }
+}
+
+impl<'c, V> GenericsOk<V> {
+ pub fn new() -> GenericsOk<V> {
+ unimplemented!()
+ }
+}
+
+pub struct LtOk<'a> {
+ foo: &'a bool,
+}
+
+impl<'b> Default for LtOk<'b> {
+ fn default() -> Self {
+ unimplemented!();
+ }
+}
+
+impl<'c> LtOk<'c> {
+ pub fn new() -> LtOk<'c> {
+ unimplemented!()
+ }
+}
+
+pub struct LtKo<'a> {
+ foo: &'a bool,
+}
+
+impl<'c> LtKo<'c> {
+ pub fn new() -> LtKo<'c> {
+ unimplemented!()
+ }
+ // FIXME: that suggestion is missing lifetimes
+}
+
+struct Private;
+
+impl Private {
+ fn new() -> Private {
+ unimplemented!()
+ } // We don't lint private items
+}
+
+struct PrivateStruct;
+
+impl PrivateStruct {
+ pub fn new() -> PrivateStruct {
+ unimplemented!()
+ } // We don't lint public items on private structs
+}
+
+pub struct PrivateItem;
+
+impl PrivateItem {
+ fn new() -> PrivateItem {
+ unimplemented!()
+ } // We don't lint private items on public structs
+}
+
+struct Const;
+
+impl Const {
+ pub const fn new() -> Const {
+ Const
+ } // const fns can't be implemented via Default
+}
+
+pub struct IgnoreGenericNew;
+
+impl IgnoreGenericNew {
+ pub fn new<T>() -> Self {
+ IgnoreGenericNew
+ } // the derived Default does not make sense here as the result depends on T
+}
+
+pub trait TraitWithNew: Sized {
+ fn new() -> Self {
+ panic!()
+ }
+}
+
+pub struct IgnoreUnsafeNew;
+
+impl IgnoreUnsafeNew {
+ pub unsafe fn new() -> Self {
+ IgnoreUnsafeNew
+ }
+}
+
+#[derive(Default)]
+pub struct OptionRefWrapper<'a, T>(Option<&'a T>);
+
+impl<'a, T> OptionRefWrapper<'a, T> {
+ pub fn new() -> Self {
+ OptionRefWrapper(None)
+ }
+}
+
+pub struct Allow(Foo);
+
+impl Allow {
+ #[allow(clippy::new_without_default)]
+ pub fn new() -> Self {
+ unimplemented!()
+ }
+}
+
+pub struct AllowDerive;
+
+impl AllowDerive {
+ #[allow(clippy::new_without_default)]
+ pub fn new() -> Self {
+ unimplemented!()
+ }
+}
+
+pub struct NewNotEqualToDerive {
+ foo: i32,
+}
+
+impl NewNotEqualToDerive {
+ // This `new` implementation is not equal to a derived `Default`, so do not suggest deriving.
+ pub fn new() -> Self {
+ NewNotEqualToDerive { foo: 1 }
+ }
+}
+
+// see #6933
+pub struct FooGenerics<T>(std::marker::PhantomData<T>);
+impl<T> FooGenerics<T> {
+ pub fn new() -> Self {
+ Self(Default::default())
+ }
+}
+
+pub struct BarGenerics<T>(std::marker::PhantomData<T>);
+impl<T: Copy> BarGenerics<T> {
+ pub fn new() -> Self {
+ Self(Default::default())
+ }
+}
+
+pub mod issue7220 {
+ pub struct Foo<T> {
+ _bar: *mut T,
+ }
+
+ impl<T> Foo<T> {
+ pub fn new() -> Self {
+ todo!()
+ }
+ }
+}
+
+// see issue #8152
+// This should not create any lints
+pub struct DocHidden;
+impl DocHidden {
+ #[doc(hidden)]
+ pub fn new() -> Self {
+ DocHidden
+ }
+}
+
+fn main() {}
+
+pub struct IgnoreConstGenericNew(usize);
+impl IgnoreConstGenericNew {
+ pub fn new<const N: usize>() -> Self {
+ Self(N)
+ }
+}
+
+pub struct IgnoreLifetimeNew;
+impl IgnoreLifetimeNew {
+ pub fn new<'a>() -> Self {
+ Self
+ }
+}
diff --git a/src/tools/clippy/tests/ui/new_without_default.stderr b/src/tools/clippy/tests/ui/new_without_default.stderr
new file mode 100644
index 000000000..212a69ab9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/new_without_default.stderr
@@ -0,0 +1,124 @@
+error: you should consider adding a `Default` implementation for `Foo`
+ --> $DIR/new_without_default.rs:7:5
+ |
+LL | / pub fn new() -> Foo {
+LL | | Foo
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::new-without-default` implied by `-D warnings`
+help: try adding this
+ |
+LL + impl Default for Foo {
+LL + fn default() -> Self {
+LL + Self::new()
+LL + }
+LL + }
+ |
+
+error: you should consider adding a `Default` implementation for `Bar`
+ --> $DIR/new_without_default.rs:15:5
+ |
+LL | / pub fn new() -> Self {
+LL | | Bar
+LL | | }
+ | |_____^
+ |
+help: try adding this
+ |
+LL + impl Default for Bar {
+LL + fn default() -> Self {
+LL + Self::new()
+LL + }
+LL + }
+ |
+
+error: you should consider adding a `Default` implementation for `LtKo<'c>`
+ --> $DIR/new_without_default.rs:79:5
+ |
+LL | / pub fn new() -> LtKo<'c> {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+help: try adding this
+ |
+LL + impl<'c> Default for LtKo<'c> {
+LL + fn default() -> Self {
+LL + Self::new()
+LL + }
+LL + }
+ |
+
+error: you should consider adding a `Default` implementation for `NewNotEqualToDerive`
+ --> $DIR/new_without_default.rs:172:5
+ |
+LL | / pub fn new() -> Self {
+LL | | NewNotEqualToDerive { foo: 1 }
+LL | | }
+ | |_____^
+ |
+help: try adding this
+ |
+LL + impl Default for NewNotEqualToDerive {
+LL + fn default() -> Self {
+LL + Self::new()
+LL + }
+LL + }
+ |
+
+error: you should consider adding a `Default` implementation for `FooGenerics<T>`
+ --> $DIR/new_without_default.rs:180:5
+ |
+LL | / pub fn new() -> Self {
+LL | | Self(Default::default())
+LL | | }
+ | |_____^
+ |
+help: try adding this
+ |
+LL + impl<T> Default for FooGenerics<T> {
+LL + fn default() -> Self {
+LL + Self::new()
+LL + }
+LL + }
+ |
+
+error: you should consider adding a `Default` implementation for `BarGenerics<T>`
+ --> $DIR/new_without_default.rs:187:5
+ |
+LL | / pub fn new() -> Self {
+LL | | Self(Default::default())
+LL | | }
+ | |_____^
+ |
+help: try adding this
+ |
+LL + impl<T: Copy> Default for BarGenerics<T> {
+LL + fn default() -> Self {
+LL + Self::new()
+LL + }
+LL + }
+ |
+
+error: you should consider adding a `Default` implementation for `Foo<T>`
+ --> $DIR/new_without_default.rs:198:9
+ |
+LL | / pub fn new() -> Self {
+LL | | todo!()
+LL | | }
+ | |_________^
+ |
+help: try adding this
+ |
+LL ~ impl<T> Default for Foo<T> {
+LL + fn default() -> Self {
+LL + Self::new()
+LL + }
+LL + }
+LL +
+LL ~ impl<T> Foo<T> {
+ |
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/no_effect.rs b/src/tools/clippy/tests/ui/no_effect.rs
new file mode 100644
index 000000000..fdefb11ae
--- /dev/null
+++ b/src/tools/clippy/tests/ui/no_effect.rs
@@ -0,0 +1,143 @@
+#![feature(box_syntax, fn_traits, unboxed_closures)]
+#![warn(clippy::no_effect_underscore_binding)]
+#![allow(dead_code)]
+#![allow(path_statements)]
+#![allow(clippy::deref_addrof)]
+#![allow(clippy::redundant_field_names)]
+
+struct Unit;
+struct Tuple(i32);
+struct Struct {
+ field: i32,
+}
+enum Enum {
+ Tuple(i32),
+ Struct { field: i32 },
+}
+struct DropUnit;
+impl Drop for DropUnit {
+ fn drop(&mut self) {}
+}
+struct DropStruct {
+ field: i32,
+}
+impl Drop for DropStruct {
+ fn drop(&mut self) {}
+}
+struct DropTuple(i32);
+impl Drop for DropTuple {
+ fn drop(&mut self) {}
+}
+enum DropEnum {
+ Tuple(i32),
+ Struct { field: i32 },
+}
+impl Drop for DropEnum {
+ fn drop(&mut self) {}
+}
+struct FooString {
+ s: String,
+}
+union Union {
+ a: u8,
+ b: f64,
+}
+
+fn get_number() -> i32 {
+ 0
+}
+fn get_struct() -> Struct {
+ Struct { field: 0 }
+}
+fn get_drop_struct() -> DropStruct {
+ DropStruct { field: 0 }
+}
+
+unsafe fn unsafe_fn() -> i32 {
+ 0
+}
+
+struct GreetStruct1;
+
+impl FnOnce<(&str,)> for GreetStruct1 {
+ type Output = ();
+
+ extern "rust-call" fn call_once(self, (who,): (&str,)) -> Self::Output {
+ println!("hello {}", who);
+ }
+}
+
+struct GreetStruct2();
+
+impl FnOnce<(&str,)> for GreetStruct2 {
+ type Output = ();
+
+ extern "rust-call" fn call_once(self, (who,): (&str,)) -> Self::Output {
+ println!("hello {}", who);
+ }
+}
+
+struct GreetStruct3;
+
+impl FnOnce<(&str,)> for GreetStruct3 {
+ type Output = ();
+
+ extern "rust-call" fn call_once(self, (who,): (&str,)) -> Self::Output {
+ println!("hello {}", who);
+ }
+}
+
+fn main() {
+ let s = get_struct();
+ let s2 = get_struct();
+
+ 0;
+ s2;
+ Unit;
+ Tuple(0);
+ Struct { field: 0 };
+ Struct { ..s };
+ Union { a: 0 };
+ Enum::Tuple(0);
+ Enum::Struct { field: 0 };
+ 5 + 6;
+ *&42;
+ &6;
+ (5, 6, 7);
+ box 42;
+ ..;
+ 5..;
+ ..5;
+ 5..6;
+ 5..=6;
+ [42, 55];
+ [42, 55][1];
+ (42, 55).1;
+ [42; 55];
+ [42; 55][13];
+ let mut x = 0;
+ || x += 5;
+ let s: String = "foo".into();
+ FooString { s: s };
+ let _unused = 1;
+ let _penguin = || println!("Some helpful closure");
+ let _duck = Struct { field: 0 };
+ let _cat = [2, 4, 6, 8][2];
+
+ #[allow(clippy::no_effect)]
+ 0;
+
+ // Do not warn
+ get_number();
+ unsafe { unsafe_fn() };
+ let _used = get_struct();
+ let _x = vec![1];
+ DropUnit;
+ DropStruct { field: 0 };
+ DropTuple(0);
+ DropEnum::Tuple(0);
+ DropEnum::Struct { field: 0 };
+ GreetStruct1("world");
+ GreetStruct2()("world");
+ GreetStruct3 {}("world");
+}
diff --git a/src/tools/clippy/tests/ui/no_effect.stderr b/src/tools/clippy/tests/ui/no_effect.stderr
new file mode 100644
index 000000000..328d2555c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/no_effect.stderr
@@ -0,0 +1,186 @@
+error: statement with no effect
+ --> $DIR/no_effect.rs:94:5
+ |
+LL | 0;
+ | ^^
+ |
+ = note: `-D clippy::no-effect` implied by `-D warnings`
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:95:5
+ |
+LL | s2;
+ | ^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:96:5
+ |
+LL | Unit;
+ | ^^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:97:5
+ |
+LL | Tuple(0);
+ | ^^^^^^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:98:5
+ |
+LL | Struct { field: 0 };
+ | ^^^^^^^^^^^^^^^^^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:99:5
+ |
+LL | Struct { ..s };
+ | ^^^^^^^^^^^^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:100:5
+ |
+LL | Union { a: 0 };
+ | ^^^^^^^^^^^^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:101:5
+ |
+LL | Enum::Tuple(0);
+ | ^^^^^^^^^^^^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:102:5
+ |
+LL | Enum::Struct { field: 0 };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:103:5
+ |
+LL | 5 + 6;
+ | ^^^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:104:5
+ |
+LL | *&42;
+ | ^^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:105:5
+ |
+LL | &6;
+ | ^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:106:5
+ |
+LL | (5, 6, 7);
+ | ^^^^^^^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:107:5
+ |
+LL | box 42;
+ | ^^^^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:108:5
+ |
+LL | ..;
+ | ^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:109:5
+ |
+LL | 5..;
+ | ^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:110:5
+ |
+LL | ..5;
+ | ^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:111:5
+ |
+LL | 5..6;
+ | ^^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:112:5
+ |
+LL | 5..=6;
+ | ^^^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:113:5
+ |
+LL | [42, 55];
+ | ^^^^^^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:114:5
+ |
+LL | [42, 55][1];
+ | ^^^^^^^^^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:115:5
+ |
+LL | (42, 55).1;
+ | ^^^^^^^^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:116:5
+ |
+LL | [42; 55];
+ | ^^^^^^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:117:5
+ |
+LL | [42; 55][13];
+ | ^^^^^^^^^^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:119:5
+ |
+LL | || x += 5;
+ | ^^^^^^^^^^
+
+error: statement with no effect
+ --> $DIR/no_effect.rs:121:5
+ |
+LL | FooString { s: s };
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: binding to `_` prefixed variable with no side-effect
+ --> $DIR/no_effect.rs:122:5
+ |
+LL | let _unused = 1;
+ | ^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::no-effect-underscore-binding` implied by `-D warnings`
+
+error: binding to `_` prefixed variable with no side-effect
+ --> $DIR/no_effect.rs:123:5
+ |
+LL | let _penguin = || println!("Some helpful closure");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: binding to `_` prefixed variable with no side-effect
+ --> $DIR/no_effect.rs:124:5
+ |
+LL | let _duck = Struct { field: 0 };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: binding to `_` prefixed variable with no side-effect
+ --> $DIR/no_effect.rs:125:5
+ |
+LL | let _cat = [2, 4, 6, 8][2];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 30 previous errors
+
diff --git a/src/tools/clippy/tests/ui/no_effect_replace.rs b/src/tools/clippy/tests/ui/no_effect_replace.rs
new file mode 100644
index 000000000..ad17d53f7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/no_effect_replace.rs
@@ -0,0 +1,51 @@
+#![warn(clippy::no_effect_replace)]
+
+fn main() {
+ let _ = "12345".replace('1', "1");
+ let _ = "12345".replace("12", "12");
+ let _ = String::new().replace("12", "12");
+
+ let _ = "12345".replacen('1', "1", 1);
+ let _ = "12345".replacen("12", "12", 1);
+ let _ = String::new().replacen("12", "12", 1);
+
+ let _ = "12345".replace("12", "22");
+ let _ = "12345".replacen("12", "22", 1);
+
+ let mut x = X::default();
+ let _ = "hello".replace(&x.f(), &x.f());
+ let _ = "hello".replace(&x.f(), &x.ff());
+
+ let _ = "hello".replace(&y(), &y());
+ let _ = "hello".replace(&y(), &z());
+
+ let _ = Replaceme.replace("a", "a");
+}
+
+#[derive(Default)]
+struct X {}
+
+impl X {
+ fn f(&mut self) -> String {
+ "he".to_string()
+ }
+
+ fn ff(&mut self) -> String {
+ "hh".to_string()
+ }
+}
+
+fn y() -> String {
+ "he".to_string()
+}
+
+fn z() -> String {
+ "hh".to_string()
+}
+
+struct Replaceme;
+impl Replaceme {
+ pub fn replace(&mut self, a: &str, b: &str) -> Self {
+ Self
+ }
+}
diff --git a/src/tools/clippy/tests/ui/no_effect_replace.stderr b/src/tools/clippy/tests/ui/no_effect_replace.stderr
new file mode 100644
index 000000000..53a28aa73
--- /dev/null
+++ b/src/tools/clippy/tests/ui/no_effect_replace.stderr
@@ -0,0 +1,52 @@
+error: replacing text with itself
+ --> $DIR/no_effect_replace.rs:4:13
+ |
+LL | let _ = "12345".replace('1', "1");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::no-effect-replace` implied by `-D warnings`
+
+error: replacing text with itself
+ --> $DIR/no_effect_replace.rs:5:13
+ |
+LL | let _ = "12345".replace("12", "12");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: replacing text with itself
+ --> $DIR/no_effect_replace.rs:6:13
+ |
+LL | let _ = String::new().replace("12", "12");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: replacing text with itself
+ --> $DIR/no_effect_replace.rs:8:13
+ |
+LL | let _ = "12345".replacen('1', "1", 1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: replacing text with itself
+ --> $DIR/no_effect_replace.rs:9:13
+ |
+LL | let _ = "12345".replacen("12", "12", 1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: replacing text with itself
+ --> $DIR/no_effect_replace.rs:10:13
+ |
+LL | let _ = String::new().replacen("12", "12", 1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: replacing text with itself
+ --> $DIR/no_effect_replace.rs:16:13
+ |
+LL | let _ = "hello".replace(&x.f(), &x.f());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: replacing text with itself
+ --> $DIR/no_effect_replace.rs:19:13
+ |
+LL | let _ = "hello".replace(&y(), &y());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/non_expressive_names.rs b/src/tools/clippy/tests/ui/non_expressive_names.rs
new file mode 100644
index 000000000..583096ac0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/non_expressive_names.rs
@@ -0,0 +1,58 @@
+#![warn(clippy::all)]
+#![allow(unused, clippy::println_empty_string, non_snake_case, clippy::let_unit_value)]
+
+#[derive(Clone, Debug)]
+enum MaybeInst {
+ Split,
+ Split1(usize),
+ Split2(usize),
+}
+
+struct InstSplit {
+ uiae: usize,
+}
+
+impl MaybeInst {
+ fn fill(&mut self) {
+ #[allow(non_fmt_panics)]
+ let filled = match *self {
+ MaybeInst::Split1(goto1) => panic!("1"),
+ MaybeInst::Split2(goto2) => panic!("2"),
+ _ => unimplemented!(),
+ };
+ unimplemented!()
+ }
+}
+
+fn underscores_and_numbers() {
+ let _1 = 1; //~ERROR Consider a more descriptive name
+ let ____1 = 1; //~ERROR Consider a more descriptive name
+ let __1___2 = 12; //~ERROR Consider a more descriptive name
+ let _1_ok = 1;
+}
+
+fn issue2927() {
+ let args = 1;
+ format!("{:?}", 2);
+}
+
+fn issue3078() {
+ #[allow(clippy::single_match)]
+ match "a" {
+ stringify!(a) => {},
+ _ => {},
+ }
+}
+
+struct Bar;
+
+impl Bar {
+ fn bar() {
+ let _1 = 1;
+ let ____1 = 1;
+ let __1___2 = 12;
+ let _1_ok = 1;
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/non_expressive_names.stderr b/src/tools/clippy/tests/ui/non_expressive_names.stderr
new file mode 100644
index 000000000..116d5da87
--- /dev/null
+++ b/src/tools/clippy/tests/ui/non_expressive_names.stderr
@@ -0,0 +1,40 @@
+error: consider choosing a more descriptive name
+ --> $DIR/non_expressive_names.rs:28:9
+ |
+LL | let _1 = 1; //~ERROR Consider a more descriptive name
+ | ^^
+ |
+ = note: `-D clippy::just-underscores-and-digits` implied by `-D warnings`
+
+error: consider choosing a more descriptive name
+ --> $DIR/non_expressive_names.rs:29:9
+ |
+LL | let ____1 = 1; //~ERROR Consider a more descriptive name
+ | ^^^^^
+
+error: consider choosing a more descriptive name
+ --> $DIR/non_expressive_names.rs:30:9
+ |
+LL | let __1___2 = 12; //~ERROR Consider a more descriptive name
+ | ^^^^^^^
+
+error: consider choosing a more descriptive name
+ --> $DIR/non_expressive_names.rs:51:13
+ |
+LL | let _1 = 1;
+ | ^^
+
+error: consider choosing a more descriptive name
+ --> $DIR/non_expressive_names.rs:52:13
+ |
+LL | let ____1 = 1;
+ | ^^^^^
+
+error: consider choosing a more descriptive name
+ --> $DIR/non_expressive_names.rs:53:13
+ |
+LL | let __1___2 = 12;
+ | ^^^^^^^
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/non_octal_unix_permissions.fixed b/src/tools/clippy/tests/ui/non_octal_unix_permissions.fixed
new file mode 100644
index 000000000..a9b2dcfb0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/non_octal_unix_permissions.fixed
@@ -0,0 +1,33 @@
+// ignore-windows
+// run-rustfix
+#![warn(clippy::non_octal_unix_permissions)]
+use std::fs::{DirBuilder, File, OpenOptions, Permissions};
+use std::os::unix::fs::{DirBuilderExt, OpenOptionsExt, PermissionsExt};
+
+fn main() {
+ let permissions = 0o760;
+
+ // OpenOptionsExt::mode
+ let mut options = OpenOptions::new();
+ options.mode(0o440);
+ options.mode(0o400);
+ options.mode(permissions);
+
+ // PermissionsExt::from_mode
+ let _permissions = Permissions::from_mode(0o647);
+ let _permissions = Permissions::from_mode(0o000);
+ let _permissions = Permissions::from_mode(permissions);
+
+ // PermissionsExt::set_mode
+ let f = File::create("foo.txt").unwrap();
+ let metadata = f.metadata().unwrap();
+ let mut permissions = metadata.permissions();
+
+ permissions.set_mode(0o644);
+ permissions.set_mode(0o704);
+
+ // DirBuilderExt::mode
+ let mut builder = DirBuilder::new();
+ builder.mode(0o755);
+ builder.mode(0o406);
+}
diff --git a/src/tools/clippy/tests/ui/non_octal_unix_permissions.rs b/src/tools/clippy/tests/ui/non_octal_unix_permissions.rs
new file mode 100644
index 000000000..7d2922f49
--- /dev/null
+++ b/src/tools/clippy/tests/ui/non_octal_unix_permissions.rs
@@ -0,0 +1,33 @@
+// ignore-windows
+// run-rustfix
+#![warn(clippy::non_octal_unix_permissions)]
+use std::fs::{DirBuilder, File, OpenOptions, Permissions};
+use std::os::unix::fs::{DirBuilderExt, OpenOptionsExt, PermissionsExt};
+
+fn main() {
+ let permissions = 0o760;
+
+ // OpenOptionsExt::mode
+ let mut options = OpenOptions::new();
+ options.mode(440);
+ options.mode(0o400);
+ options.mode(permissions);
+
+ // PermissionsExt::from_mode
+ let _permissions = Permissions::from_mode(647);
+ let _permissions = Permissions::from_mode(0o000);
+ let _permissions = Permissions::from_mode(permissions);
+
+ // PermissionsExt::set_mode
+ let f = File::create("foo.txt").unwrap();
+ let metadata = f.metadata().unwrap();
+ let mut permissions = metadata.permissions();
+
+ permissions.set_mode(644);
+ permissions.set_mode(0o704);
+
+ // DirBuilderExt::mode
+ let mut builder = DirBuilder::new();
+ builder.mode(755);
+ builder.mode(0o406);
+}
diff --git a/src/tools/clippy/tests/ui/non_octal_unix_permissions.stderr b/src/tools/clippy/tests/ui/non_octal_unix_permissions.stderr
new file mode 100644
index 000000000..32845d065
--- /dev/null
+++ b/src/tools/clippy/tests/ui/non_octal_unix_permissions.stderr
@@ -0,0 +1,28 @@
+error: using a non-octal value to set unix file permissions
+ --> $DIR/non_octal_unix_permissions.rs:12:18
+ |
+LL | options.mode(440);
+ | ^^^ help: consider using an octal literal instead: `0o440`
+ |
+ = note: `-D clippy::non-octal-unix-permissions` implied by `-D warnings`
+
+error: using a non-octal value to set unix file permissions
+ --> $DIR/non_octal_unix_permissions.rs:17:47
+ |
+LL | let _permissions = Permissions::from_mode(647);
+ | ^^^ help: consider using an octal literal instead: `0o647`
+
+error: using a non-octal value to set unix file permissions
+ --> $DIR/non_octal_unix_permissions.rs:26:26
+ |
+LL | permissions.set_mode(644);
+ | ^^^ help: consider using an octal literal instead: `0o644`
+
+error: using a non-octal value to set unix file permissions
+ --> $DIR/non_octal_unix_permissions.rs:31:18
+ |
+LL | builder.mode(755);
+ | ^^^ help: consider using an octal literal instead: `0o755`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/non_send_fields_in_send_ty.rs b/src/tools/clippy/tests/ui/non_send_fields_in_send_ty.rs
new file mode 100644
index 000000000..514fb25c8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/non_send_fields_in_send_ty.rs
@@ -0,0 +1,133 @@
+#![warn(clippy::non_send_fields_in_send_ty)]
+#![allow(suspicious_auto_trait_impls)]
+#![feature(extern_types)]
+
+use std::cell::UnsafeCell;
+use std::ptr::NonNull;
+use std::rc::Rc;
+use std::sync::{Arc, Mutex, MutexGuard};
+
+// disrustor / RUSTSEC-2020-0150
+pub struct RingBuffer<T> {
+ data: Vec<UnsafeCell<T>>,
+ capacity: usize,
+ mask: usize,
+}
+
+unsafe impl<T> Send for RingBuffer<T> {}
+
+// noise_search / RUSTSEC-2020-0141
+pub struct MvccRwLock<T> {
+ raw: *const T,
+ lock: Mutex<Box<T>>,
+}
+
+unsafe impl<T> Send for MvccRwLock<T> {}
+
+// async-coap / RUSTSEC-2020-0124
+pub struct ArcGuard<RC, T> {
+ inner: T,
+ head: Arc<RC>,
+}
+
+unsafe impl<RC, T: Send> Send for ArcGuard<RC, T> {}
+
+// rusb / RUSTSEC-2020-0098
+extern "C" {
+ type libusb_device_handle;
+}
+
+pub trait UsbContext {
+ // some user trait that does not guarantee `Send`
+}
+
+pub struct DeviceHandle<T: UsbContext> {
+ context: T,
+ handle: NonNull<libusb_device_handle>,
+}
+
+unsafe impl<T: UsbContext> Send for DeviceHandle<T> {}
+
+// Other basic tests
+pub struct NoGeneric {
+ rc_is_not_send: Rc<String>,
+}
+
+unsafe impl Send for NoGeneric {}
+
+pub struct MultiField<T> {
+ field1: T,
+ field2: T,
+ field3: T,
+}
+
+unsafe impl<T> Send for MultiField<T> {}
+
+pub enum MyOption<T> {
+ MySome(T),
+ MyNone,
+}
+
+unsafe impl<T> Send for MyOption<T> {}
+
+// Test types that contain `NonNull` instead of raw pointers (#8045)
+pub struct WrappedNonNull(UnsafeCell<NonNull<()>>);
+
+unsafe impl Send for WrappedNonNull {}
+
+// Multiple type parameters
+pub struct MultiParam<A, B> {
+ vec: Vec<(A, B)>,
+}
+
+unsafe impl<A, B> Send for MultiParam<A, B> {}
+
+// Tests for raw pointer heuristic
+extern "C" {
+ type NonSend;
+}
+
+pub struct HeuristicTest {
+ // raw pointers are allowed
+ field1: Vec<*const NonSend>,
+ field2: [*const NonSend; 3],
+ field3: (*const NonSend, *const NonSend, *const NonSend),
+ // not allowed when it contains concrete `!Send` field
+ field4: (*const NonSend, Rc<u8>),
+ // nested raw pointer is also allowed
+ field5: Vec<Vec<*const NonSend>>,
+}
+
+unsafe impl Send for HeuristicTest {}
+
+// Test attributes
+#[allow(clippy::non_send_fields_in_send_ty)]
+pub struct AttrTest1<T>(T);
+
+pub struct AttrTest2<T> {
+ #[allow(clippy::non_send_fields_in_send_ty)]
+ field: T,
+}
+
+pub enum AttrTest3<T> {
+ #[allow(clippy::non_send_fields_in_send_ty)]
+ Enum1(T),
+ Enum2(T),
+}
+
+unsafe impl<T> Send for AttrTest1<T> {}
+unsafe impl<T> Send for AttrTest2<T> {}
+unsafe impl<T> Send for AttrTest3<T> {}
+
+// Multiple non-overlapping `Send` for a single type
+pub struct Complex<A, B> {
+ field1: A,
+ field2: B,
+}
+
+unsafe impl<P> Send for Complex<P, u32> {}
+
+// `MutexGuard` is non-Send
+unsafe impl<Q: Send> Send for Complex<Q, MutexGuard<'static, bool>> {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/non_send_fields_in_send_ty.stderr b/src/tools/clippy/tests/ui/non_send_fields_in_send_ty.stderr
new file mode 100644
index 000000000..b6c904a14
--- /dev/null
+++ b/src/tools/clippy/tests/ui/non_send_fields_in_send_ty.stderr
@@ -0,0 +1,171 @@
+error: some fields in `RingBuffer<T>` are not safe to be sent to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:17:1
+ |
+LL | unsafe impl<T> Send for RingBuffer<T> {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::non-send-fields-in-send-ty` implied by `-D warnings`
+note: it is not safe to send field `data` to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:12:5
+ |
+LL | data: Vec<UnsafeCell<T>>,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ = help: add bounds on type parameter `T` that satisfy `Vec<UnsafeCell<T>>: Send`
+
+error: some fields in `MvccRwLock<T>` are not safe to be sent to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:25:1
+ |
+LL | unsafe impl<T> Send for MvccRwLock<T> {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: it is not safe to send field `lock` to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:22:5
+ |
+LL | lock: Mutex<Box<T>>,
+ | ^^^^^^^^^^^^^^^^^^^
+ = help: add bounds on type parameter `T` that satisfy `Mutex<Box<T>>: Send`
+
+error: some fields in `ArcGuard<RC, T>` are not safe to be sent to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:33:1
+ |
+LL | unsafe impl<RC, T: Send> Send for ArcGuard<RC, T> {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: it is not safe to send field `head` to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:30:5
+ |
+LL | head: Arc<RC>,
+ | ^^^^^^^^^^^^^
+ = help: add bounds on type parameter `RC` that satisfy `Arc<RC>: Send`
+
+error: some fields in `DeviceHandle<T>` are not safe to be sent to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:49:1
+ |
+LL | unsafe impl<T: UsbContext> Send for DeviceHandle<T> {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: it is not safe to send field `context` to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:45:5
+ |
+LL | context: T,
+ | ^^^^^^^^^^
+ = help: add `T: Send` bound in `Send` impl
+
+error: some fields in `NoGeneric` are not safe to be sent to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:56:1
+ |
+LL | unsafe impl Send for NoGeneric {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: it is not safe to send field `rc_is_not_send` to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:53:5
+ |
+LL | rc_is_not_send: Rc<String>,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = help: use a thread-safe type that implements `Send`
+
+error: some fields in `MultiField<T>` are not safe to be sent to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:64:1
+ |
+LL | unsafe impl<T> Send for MultiField<T> {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: it is not safe to send field `field1` to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:59:5
+ |
+LL | field1: T,
+ | ^^^^^^^^^
+ = help: add `T: Send` bound in `Send` impl
+note: it is not safe to send field `field2` to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:60:5
+ |
+LL | field2: T,
+ | ^^^^^^^^^
+ = help: add `T: Send` bound in `Send` impl
+note: it is not safe to send field `field3` to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:61:5
+ |
+LL | field3: T,
+ | ^^^^^^^^^
+ = help: add `T: Send` bound in `Send` impl
+
+error: some fields in `MyOption<T>` are not safe to be sent to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:71:1
+ |
+LL | unsafe impl<T> Send for MyOption<T> {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: it is not safe to send field `0` to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:67:12
+ |
+LL | MySome(T),
+ | ^
+ = help: add `T: Send` bound in `Send` impl
+
+error: some fields in `MultiParam<A, B>` are not safe to be sent to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:83:1
+ |
+LL | unsafe impl<A, B> Send for MultiParam<A, B> {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: it is not safe to send field `vec` to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:80:5
+ |
+LL | vec: Vec<(A, B)>,
+ | ^^^^^^^^^^^^^^^^
+ = help: add bounds on type parameters `A, B` that satisfy `Vec<(A, B)>: Send`
+
+error: some fields in `HeuristicTest` are not safe to be sent to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:101:1
+ |
+LL | unsafe impl Send for HeuristicTest {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: it is not safe to send field `field4` to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:96:5
+ |
+LL | field4: (*const NonSend, Rc<u8>),
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = help: use a thread-safe type that implements `Send`
+
+error: some fields in `AttrTest3<T>` are not safe to be sent to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:120:1
+ |
+LL | unsafe impl<T> Send for AttrTest3<T> {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: it is not safe to send field `0` to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:115:11
+ |
+LL | Enum2(T),
+ | ^
+ = help: add `T: Send` bound in `Send` impl
+
+error: some fields in `Complex<P, u32>` are not safe to be sent to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:128:1
+ |
+LL | unsafe impl<P> Send for Complex<P, u32> {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: it is not safe to send field `field1` to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:124:5
+ |
+LL | field1: A,
+ | ^^^^^^^^^
+ = help: add `P: Send` bound in `Send` impl
+
+error: some fields in `Complex<Q, MutexGuard<'static, bool>>` are not safe to be sent to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:131:1
+ |
+LL | unsafe impl<Q: Send> Send for Complex<Q, MutexGuard<'static, bool>> {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: it is not safe to send field `field2` to another thread
+ --> $DIR/non_send_fields_in_send_ty.rs:125:5
+ |
+LL | field2: B,
+ | ^^^^^^^^^
+ = help: use a thread-safe type that implements `Send`
+
+error: aborting due to 12 previous errors
+
diff --git a/src/tools/clippy/tests/ui/nonminimal_bool.rs b/src/tools/clippy/tests/ui/nonminimal_bool.rs
new file mode 100644
index 000000000..24ae62bb0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/nonminimal_bool.rs
@@ -0,0 +1,59 @@
+#![feature(lint_reasons)]
+#![allow(unused, clippy::diverging_sub_expression)]
+#![warn(clippy::nonminimal_bool)]
+
+fn main() {
+ let a: bool = unimplemented!();
+ let b: bool = unimplemented!();
+ let c: bool = unimplemented!();
+ let d: bool = unimplemented!();
+ let e: bool = unimplemented!();
+ let _ = !true;
+ let _ = !false;
+ let _ = !!a;
+ let _ = false || a;
+ // don't lint on cfgs
+ let _ = cfg!(you_shall_not_not_pass) && a;
+ let _ = a || !b || !c || !d || !e;
+ let _ = !(!a && b);
+ let _ = !(!a || b);
+ let _ = !a && !(b && c);
+}
+
+fn equality_stuff() {
+ let a: i32 = unimplemented!();
+ let b: i32 = unimplemented!();
+ let c: i32 = unimplemented!();
+ let d: i32 = unimplemented!();
+ let _ = a == b && c == 5 && a == b;
+ let _ = a == b || c == 5 || a == b;
+ let _ = a == b && c == 5 && b == a;
+ let _ = a != b || !(a != b || c == d);
+ let _ = a != b && !(a != b && c == d);
+}
+
+fn issue3847(a: u32, b: u32) -> bool {
+ const THRESHOLD: u32 = 1_000;
+
+ if a < THRESHOLD && b >= THRESHOLD || a >= THRESHOLD && b < THRESHOLD {
+ return false;
+ }
+ true
+}
+
+fn issue4548() {
+ fn f(_i: u32, _j: u32) -> u32 {
+ unimplemented!();
+ }
+
+ let i = 0;
+ let j = 0;
+
+ if i != j && f(i, j) != 0 || i == j && f(i, j) != 1 {}
+}
+
+fn check_expect() {
+ let a: bool = unimplemented!();
+ #[expect(clippy::nonminimal_bool)]
+ let _ = !!a;
+}
diff --git a/src/tools/clippy/tests/ui/nonminimal_bool.stderr b/src/tools/clippy/tests/ui/nonminimal_bool.stderr
new file mode 100644
index 000000000..fc6a5ce1d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/nonminimal_bool.stderr
@@ -0,0 +1,111 @@
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool.rs:11:13
+ |
+LL | let _ = !true;
+ | ^^^^^ help: try: `false`
+ |
+ = note: `-D clippy::nonminimal-bool` implied by `-D warnings`
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool.rs:12:13
+ |
+LL | let _ = !false;
+ | ^^^^^^ help: try: `true`
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool.rs:13:13
+ |
+LL | let _ = !!a;
+ | ^^^ help: try: `a`
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool.rs:14:13
+ |
+LL | let _ = false || a;
+ | ^^^^^^^^^^ help: try: `a`
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool.rs:18:13
+ |
+LL | let _ = !(!a && b);
+ | ^^^^^^^^^^ help: try: `a || !b`
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool.rs:19:13
+ |
+LL | let _ = !(!a || b);
+ | ^^^^^^^^^^ help: try: `a && !b`
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool.rs:20:13
+ |
+LL | let _ = !a && !(b && c);
+ | ^^^^^^^^^^^^^^^ help: try: `!(a || b && c)`
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool.rs:28:13
+ |
+LL | let _ = a == b && c == 5 && a == b;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | let _ = !(a != b || c != 5);
+ | ~~~~~~~~~~~~~~~~~~~
+LL | let _ = a == b && c == 5;
+ | ~~~~~~~~~~~~~~~~
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool.rs:29:13
+ |
+LL | let _ = a == b || c == 5 || a == b;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | let _ = !(a != b && c != 5);
+ | ~~~~~~~~~~~~~~~~~~~
+LL | let _ = a == b || c == 5;
+ | ~~~~~~~~~~~~~~~~
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool.rs:30:13
+ |
+LL | let _ = a == b && c == 5 && b == a;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | let _ = !(a != b || c != 5);
+ | ~~~~~~~~~~~~~~~~~~~
+LL | let _ = a == b && c == 5;
+ | ~~~~~~~~~~~~~~~~
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool.rs:31:13
+ |
+LL | let _ = a != b || !(a != b || c == d);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | let _ = !(a == b && c == d);
+ | ~~~~~~~~~~~~~~~~~~~
+LL | let _ = a != b || c != d;
+ | ~~~~~~~~~~~~~~~~
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool.rs:32:13
+ |
+LL | let _ = a != b && !(a != b && c == d);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | let _ = !(a == b || c == d);
+ | ~~~~~~~~~~~~~~~~~~~
+LL | let _ = a != b && c != d;
+ | ~~~~~~~~~~~~~~~~
+
+error: aborting due to 12 previous errors
+
diff --git a/src/tools/clippy/tests/ui/nonminimal_bool_methods.fixed b/src/tools/clippy/tests/ui/nonminimal_bool_methods.fixed
new file mode 100644
index 000000000..aad44089d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/nonminimal_bool_methods.fixed
@@ -0,0 +1,111 @@
+// run-rustfix
+#![allow(unused, clippy::diverging_sub_expression)]
+#![warn(clippy::nonminimal_bool)]
+
+fn methods_with_negation() {
+ let a: Option<i32> = unimplemented!();
+ let b: Result<i32, i32> = unimplemented!();
+ let _ = a.is_some();
+ let _ = a.is_none();
+ let _ = a.is_none();
+ let _ = a.is_some();
+ let _ = b.is_err();
+ let _ = b.is_ok();
+ let _ = b.is_ok();
+ let _ = b.is_err();
+ let c = false;
+ let _ = a.is_none() || c;
+ let _ = a.is_none() && c;
+ let _ = !(!c ^ c) || a.is_none();
+ let _ = (!c ^ c) || a.is_none();
+ let _ = !c ^ c || a.is_none();
+}
+
+// Simplified versions of https://github.com/rust-lang/rust-clippy/issues/2638
+// clippy::nonminimal_bool should only check the built-in Result and Some type, not
+// any other types like the following.
+enum CustomResultOk<E> {
+ Ok,
+ Err(E),
+}
+enum CustomResultErr<E> {
+ Ok,
+ Err(E),
+}
+enum CustomSomeSome<T> {
+ Some(T),
+ None,
+}
+enum CustomSomeNone<T> {
+ Some(T),
+ None,
+}
+
+impl<E> CustomResultOk<E> {
+ pub fn is_ok(&self) -> bool {
+ true
+ }
+}
+
+impl<E> CustomResultErr<E> {
+ pub fn is_err(&self) -> bool {
+ true
+ }
+}
+
+impl<T> CustomSomeSome<T> {
+ pub fn is_some(&self) -> bool {
+ true
+ }
+}
+
+impl<T> CustomSomeNone<T> {
+ pub fn is_none(&self) -> bool {
+ true
+ }
+}
+
+fn dont_warn_for_custom_methods_with_negation() {
+ let res = CustomResultOk::Err("Error");
+ // Should not warn and suggest 'is_err()' because the type does not
+ // implement is_err().
+ if !res.is_ok() {}
+
+ let res = CustomResultErr::Err("Error");
+ // Should not warn and suggest 'is_ok()' because the type does not
+ // implement is_ok().
+ if !res.is_err() {}
+
+ let res = CustomSomeSome::Some("thing");
+ // Should not warn and suggest 'is_none()' because the type does not
+ // implement is_none().
+ if !res.is_some() {}
+
+ let res = CustomSomeNone::Some("thing");
+ // Should not warn and suggest 'is_some()' because the type does not
+ // implement is_some().
+ if !res.is_none() {}
+}
+
+// Only Built-in Result and Some types should suggest the negated alternative
+fn warn_for_built_in_methods_with_negation() {
+ let res: Result<usize, usize> = Ok(1);
+ if res.is_err() {}
+ if res.is_ok() {}
+
+ let res = Some(1);
+ if res.is_none() {}
+ if res.is_some() {}
+}
+
+#[allow(clippy::neg_cmp_op_on_partial_ord)]
+fn dont_warn_for_negated_partial_ord_comparison() {
+ let a: f64 = unimplemented!();
+ let b: f64 = unimplemented!();
+ let _ = !(a < b);
+ let _ = !(a <= b);
+ let _ = !(a > b);
+ let _ = !(a >= b);
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/nonminimal_bool_methods.rs b/src/tools/clippy/tests/ui/nonminimal_bool_methods.rs
new file mode 100644
index 000000000..b9074da84
--- /dev/null
+++ b/src/tools/clippy/tests/ui/nonminimal_bool_methods.rs
@@ -0,0 +1,111 @@
+// run-rustfix
+#![allow(unused, clippy::diverging_sub_expression)]
+#![warn(clippy::nonminimal_bool)]
+
+fn methods_with_negation() {
+ let a: Option<i32> = unimplemented!();
+ let b: Result<i32, i32> = unimplemented!();
+ let _ = a.is_some();
+ let _ = !a.is_some();
+ let _ = a.is_none();
+ let _ = !a.is_none();
+ let _ = b.is_err();
+ let _ = !b.is_err();
+ let _ = b.is_ok();
+ let _ = !b.is_ok();
+ let c = false;
+ let _ = !(a.is_some() && !c);
+ let _ = !(a.is_some() || !c);
+ let _ = !(!c ^ c) || !a.is_some();
+ let _ = (!c ^ c) || !a.is_some();
+ let _ = !c ^ c || !a.is_some();
+}
+
+// Simplified versions of https://github.com/rust-lang/rust-clippy/issues/2638
+// clippy::nonminimal_bool should only check the built-in Result and Some type, not
+// any other types like the following.
+enum CustomResultOk<E> {
+ Ok,
+ Err(E),
+}
+enum CustomResultErr<E> {
+ Ok,
+ Err(E),
+}
+enum CustomSomeSome<T> {
+ Some(T),
+ None,
+}
+enum CustomSomeNone<T> {
+ Some(T),
+ None,
+}
+
+impl<E> CustomResultOk<E> {
+ pub fn is_ok(&self) -> bool {
+ true
+ }
+}
+
+impl<E> CustomResultErr<E> {
+ pub fn is_err(&self) -> bool {
+ true
+ }
+}
+
+impl<T> CustomSomeSome<T> {
+ pub fn is_some(&self) -> bool {
+ true
+ }
+}
+
+impl<T> CustomSomeNone<T> {
+ pub fn is_none(&self) -> bool {
+ true
+ }
+}
+
+fn dont_warn_for_custom_methods_with_negation() {
+ let res = CustomResultOk::Err("Error");
+ // Should not warn and suggest 'is_err()' because the type does not
+ // implement is_err().
+ if !res.is_ok() {}
+
+ let res = CustomResultErr::Err("Error");
+ // Should not warn and suggest 'is_ok()' because the type does not
+ // implement is_ok().
+ if !res.is_err() {}
+
+ let res = CustomSomeSome::Some("thing");
+ // Should not warn and suggest 'is_none()' because the type does not
+ // implement is_none().
+ if !res.is_some() {}
+
+ let res = CustomSomeNone::Some("thing");
+ // Should not warn and suggest 'is_some()' because the type does not
+ // implement is_some().
+ if !res.is_none() {}
+}
+
+// Only Built-in Result and Some types should suggest the negated alternative
+fn warn_for_built_in_methods_with_negation() {
+ let res: Result<usize, usize> = Ok(1);
+ if !res.is_ok() {}
+ if !res.is_err() {}
+
+ let res = Some(1);
+ if !res.is_some() {}
+ if !res.is_none() {}
+}
+
+#[allow(clippy::neg_cmp_op_on_partial_ord)]
+fn dont_warn_for_negated_partial_ord_comparison() {
+ let a: f64 = unimplemented!();
+ let b: f64 = unimplemented!();
+ let _ = !(a < b);
+ let _ = !(a <= b);
+ let _ = !(a > b);
+ let _ = !(a >= b);
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/nonminimal_bool_methods.stderr b/src/tools/clippy/tests/ui/nonminimal_bool_methods.stderr
new file mode 100644
index 000000000..21b84db85
--- /dev/null
+++ b/src/tools/clippy/tests/ui/nonminimal_bool_methods.stderr
@@ -0,0 +1,82 @@
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool_methods.rs:9:13
+ |
+LL | let _ = !a.is_some();
+ | ^^^^^^^^^^^^ help: try: `a.is_none()`
+ |
+ = note: `-D clippy::nonminimal-bool` implied by `-D warnings`
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool_methods.rs:11:13
+ |
+LL | let _ = !a.is_none();
+ | ^^^^^^^^^^^^ help: try: `a.is_some()`
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool_methods.rs:13:13
+ |
+LL | let _ = !b.is_err();
+ | ^^^^^^^^^^^ help: try: `b.is_ok()`
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool_methods.rs:15:13
+ |
+LL | let _ = !b.is_ok();
+ | ^^^^^^^^^^ help: try: `b.is_err()`
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool_methods.rs:17:13
+ |
+LL | let _ = !(a.is_some() && !c);
+ | ^^^^^^^^^^^^^^^^^^^^ help: try: `a.is_none() || c`
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool_methods.rs:18:13
+ |
+LL | let _ = !(a.is_some() || !c);
+ | ^^^^^^^^^^^^^^^^^^^^ help: try: `a.is_none() && c`
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool_methods.rs:19:26
+ |
+LL | let _ = !(!c ^ c) || !a.is_some();
+ | ^^^^^^^^^^^^ help: try: `a.is_none()`
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool_methods.rs:20:25
+ |
+LL | let _ = (!c ^ c) || !a.is_some();
+ | ^^^^^^^^^^^^ help: try: `a.is_none()`
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool_methods.rs:21:23
+ |
+LL | let _ = !c ^ c || !a.is_some();
+ | ^^^^^^^^^^^^ help: try: `a.is_none()`
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool_methods.rs:93:8
+ |
+LL | if !res.is_ok() {}
+ | ^^^^^^^^^^^^ help: try: `res.is_err()`
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool_methods.rs:94:8
+ |
+LL | if !res.is_err() {}
+ | ^^^^^^^^^^^^^ help: try: `res.is_ok()`
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool_methods.rs:97:8
+ |
+LL | if !res.is_some() {}
+ | ^^^^^^^^^^^^^^ help: try: `res.is_none()`
+
+error: this boolean expression can be simplified
+ --> $DIR/nonminimal_bool_methods.rs:98:8
+ |
+LL | if !res.is_none() {}
+ | ^^^^^^^^^^^^^^ help: try: `res.is_some()`
+
+error: aborting due to 13 previous errors
+
diff --git a/src/tools/clippy/tests/ui/numbered_fields.fixed b/src/tools/clippy/tests/ui/numbered_fields.fixed
new file mode 100644
index 000000000..68c987eb4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/numbered_fields.fixed
@@ -0,0 +1,39 @@
+//run-rustfix
+#![warn(clippy::init_numbered_fields)]
+#![allow(unused_tuple_struct_fields)]
+
+#[derive(Default)]
+struct TupleStruct(u32, u32, u8);
+
+// This shouldn't lint because it's in a macro
+macro_rules! tuple_struct_init {
+ () => {
+ TupleStruct { 0: 0, 1: 1, 2: 2 }
+ };
+}
+
+fn main() {
+ let tuple_struct = TupleStruct::default();
+
+ // This should lint
+ let _ = TupleStruct(1u32, 42, 23u8);
+
+ // This should also lint and order the fields correctly
+ let _ = TupleStruct(1u32, 3u32, 2u8);
+
+ // Ok because of default initializer
+ let _ = TupleStruct { 0: 42, ..tuple_struct };
+
+ let _ = TupleStruct {
+ 1: 23,
+ ..TupleStruct::default()
+ };
+
+ // Ok because it's in macro
+ let _ = tuple_struct_init!();
+
+ type Alias = TupleStruct;
+
+ // Aliases can't be tuple constructed #8638
+ let _ = Alias { 0: 0, 1: 1, 2: 2 };
+}
diff --git a/src/tools/clippy/tests/ui/numbered_fields.rs b/src/tools/clippy/tests/ui/numbered_fields.rs
new file mode 100644
index 000000000..2ef4fb4de
--- /dev/null
+++ b/src/tools/clippy/tests/ui/numbered_fields.rs
@@ -0,0 +1,47 @@
+//run-rustfix
+#![warn(clippy::init_numbered_fields)]
+#![allow(unused_tuple_struct_fields)]
+
+#[derive(Default)]
+struct TupleStruct(u32, u32, u8);
+
+// This shouldn't lint because it's in a macro
+macro_rules! tuple_struct_init {
+ () => {
+ TupleStruct { 0: 0, 1: 1, 2: 2 }
+ };
+}
+
+fn main() {
+ let tuple_struct = TupleStruct::default();
+
+ // This should lint
+ let _ = TupleStruct {
+ 0: 1u32,
+ 1: 42,
+ 2: 23u8,
+ };
+
+ // This should also lint and order the fields correctly
+ let _ = TupleStruct {
+ 0: 1u32,
+ 2: 2u8,
+ 1: 3u32,
+ };
+
+ // Ok because of default initializer
+ let _ = TupleStruct { 0: 42, ..tuple_struct };
+
+ let _ = TupleStruct {
+ 1: 23,
+ ..TupleStruct::default()
+ };
+
+ // Ok because it's in macro
+ let _ = tuple_struct_init!();
+
+ type Alias = TupleStruct;
+
+ // Aliases can't be tuple constructed #8638
+ let _ = Alias { 0: 0, 1: 1, 2: 2 };
+}
diff --git a/src/tools/clippy/tests/ui/numbered_fields.stderr b/src/tools/clippy/tests/ui/numbered_fields.stderr
new file mode 100644
index 000000000..60c0d7898
--- /dev/null
+++ b/src/tools/clippy/tests/ui/numbered_fields.stderr
@@ -0,0 +1,26 @@
+error: used a field initializer for a tuple struct
+ --> $DIR/numbered_fields.rs:19:13
+ |
+LL | let _ = TupleStruct {
+ | _____________^
+LL | | 0: 1u32,
+LL | | 1: 42,
+LL | | 2: 23u8,
+LL | | };
+ | |_____^ help: try this instead: `TupleStruct(1u32, 42, 23u8)`
+ |
+ = note: `-D clippy::init-numbered-fields` implied by `-D warnings`
+
+error: used a field initializer for a tuple struct
+ --> $DIR/numbered_fields.rs:26:13
+ |
+LL | let _ = TupleStruct {
+ | _____________^
+LL | | 0: 1u32,
+LL | | 2: 2u8,
+LL | | 1: 3u32,
+LL | | };
+ | |_____^ help: try this instead: `TupleStruct(1u32, 3u32, 2u8)`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/obfuscated_if_else.fixed b/src/tools/clippy/tests/ui/obfuscated_if_else.fixed
new file mode 100644
index 000000000..62d932c2c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/obfuscated_if_else.fixed
@@ -0,0 +1,7 @@
+// run-rustfix
+
+#![warn(clippy::obfuscated_if_else)]
+
+fn main() {
+ if true { "a" } else { "b" };
+}
diff --git a/src/tools/clippy/tests/ui/obfuscated_if_else.rs b/src/tools/clippy/tests/ui/obfuscated_if_else.rs
new file mode 100644
index 000000000..273be9092
--- /dev/null
+++ b/src/tools/clippy/tests/ui/obfuscated_if_else.rs
@@ -0,0 +1,7 @@
+// run-rustfix
+
+#![warn(clippy::obfuscated_if_else)]
+
+fn main() {
+ true.then_some("a").unwrap_or("b");
+}
diff --git a/src/tools/clippy/tests/ui/obfuscated_if_else.stderr b/src/tools/clippy/tests/ui/obfuscated_if_else.stderr
new file mode 100644
index 000000000..e4180c288
--- /dev/null
+++ b/src/tools/clippy/tests/ui/obfuscated_if_else.stderr
@@ -0,0 +1,10 @@
+error: use of `.then_some(..).unwrap_or(..)` can be written more clearly with `if .. else ..`
+ --> $DIR/obfuscated_if_else.rs:6:5
+ |
+LL | true.then_some("a").unwrap_or("b");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `if true { "a" } else { "b" }`
+ |
+ = note: `-D clippy::obfuscated-if-else` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/octal_escapes.rs b/src/tools/clippy/tests/ui/octal_escapes.rs
new file mode 100644
index 000000000..53145ef0f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/octal_escapes.rs
@@ -0,0 +1,20 @@
+#![warn(clippy::octal_escapes)]
+
+fn main() {
+ let _bad1 = "\033[0m";
+ let _bad2 = b"\033[0m";
+ let _bad3 = "\\\033[0m";
+ // maximum 3 digits (\012 is the escape)
+ let _bad4 = "\01234567";
+ let _bad5 = "\0\03";
+ let _bad6 = "Text-\055\077-MoreText";
+ let _bad7 = "EvenMoreText-\01\02-ShortEscapes";
+ let _bad8 = "锈\01锈";
+ let _bad9 = "锈\011锈";
+
+ let _good1 = "\\033[0m";
+ let _good2 = "\0\\0";
+ let _good3 = "\0\0";
+ let _good4 = "X\0\0X";
+ let _good5 = "锈\0锈";
+}
diff --git a/src/tools/clippy/tests/ui/octal_escapes.stderr b/src/tools/clippy/tests/ui/octal_escapes.stderr
new file mode 100644
index 000000000..54f5bbb0f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/octal_escapes.stderr
@@ -0,0 +1,131 @@
+error: octal-looking escape in string literal
+ --> $DIR/octal_escapes.rs:4:17
+ |
+LL | let _bad1 = "/033[0m";
+ | ^^^^^^^^^
+ |
+ = note: `-D clippy::octal-escapes` implied by `-D warnings`
+ = help: octal escapes are not supported, `/0` is always a null character
+help: if an octal escape was intended, use the hexadecimal representation instead
+ |
+LL | let _bad1 = "/x1b[0m";
+ | ~~~~~~~~~
+help: if the null character is intended, disambiguate using
+ |
+LL | let _bad1 = "/x0033[0m";
+ | ~~~~~~~~~~~
+
+error: octal-looking escape in byte string literal
+ --> $DIR/octal_escapes.rs:5:17
+ |
+LL | let _bad2 = b"/033[0m";
+ | ^^^^^^^^^^
+ |
+ = help: octal escapes are not supported, `/0` is always a null byte
+help: if an octal escape was intended, use the hexadecimal representation instead
+ |
+LL | let _bad2 = b"/x1b[0m";
+ | ~~~~~~~~~~
+help: if the null byte is intended, disambiguate using
+ |
+LL | let _bad2 = b"/x0033[0m";
+ | ~~~~~~~~~~~~
+
+error: octal-looking escape in string literal
+ --> $DIR/octal_escapes.rs:6:17
+ |
+LL | let _bad3 = "//033[0m";
+ | ^^^^^^^^^^^
+ |
+ = help: octal escapes are not supported, `/0` is always a null character
+help: if an octal escape was intended, use the hexadecimal representation instead
+ |
+LL | let _bad3 = "//x1b[0m";
+ | ~~~~~~~~~~~
+help: if the null character is intended, disambiguate using
+ |
+LL | let _bad3 = "//x0033[0m";
+ | ~~~~~~~~~~~~~
+
+error: octal-looking escape in string literal
+ --> $DIR/octal_escapes.rs:8:17
+ |
+LL | let _bad4 = "/01234567";
+ | ^^^^^^^^^^^
+ |
+ = help: octal escapes are not supported, `/0` is always a null character
+help: if an octal escape was intended, use the hexadecimal representation instead
+ |
+LL | let _bad4 = "/x0a34567";
+ | ~~~~~~~~~~~
+help: if the null character is intended, disambiguate using
+ |
+LL | let _bad4 = "/x001234567";
+ | ~~~~~~~~~~~~~
+
+error: octal-looking escape in string literal
+ --> $DIR/octal_escapes.rs:10:17
+ |
+LL | let _bad6 = "Text-/055/077-MoreText";
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: octal escapes are not supported, `/0` is always a null character
+help: if an octal escape was intended, use the hexadecimal representation instead
+ |
+LL | let _bad6 = "Text-/x2d/x3f-MoreText";
+ | ~~~~~~~~~~~~~~~~~~~~~~~~
+help: if the null character is intended, disambiguate using
+ |
+LL | let _bad6 = "Text-/x0055/x0077-MoreText";
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: octal-looking escape in string literal
+ --> $DIR/octal_escapes.rs:11:17
+ |
+LL | let _bad7 = "EvenMoreText-/01/02-ShortEscapes";
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: octal escapes are not supported, `/0` is always a null character
+help: if an octal escape was intended, use the hexadecimal representation instead
+ |
+LL | let _bad7 = "EvenMoreText-/x01/x02-ShortEscapes";
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+help: if the null character is intended, disambiguate using
+ |
+LL | let _bad7 = "EvenMoreText-/x001/x002-ShortEscapes";
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: octal-looking escape in string literal
+ --> $DIR/octal_escapes.rs:12:17
+ |
+LL | let _bad8 = "锈/01锈";
+ | ^^^^^^^^^
+ |
+ = help: octal escapes are not supported, `/0` is always a null character
+help: if an octal escape was intended, use the hexadecimal representation instead
+ |
+LL | let _bad8 = "锈/x01锈";
+ | ~~~~~~~~~~
+help: if the null character is intended, disambiguate using
+ |
+LL | let _bad8 = "锈/x001锈";
+ | ~~~~~~~~~~~
+
+error: octal-looking escape in string literal
+ --> $DIR/octal_escapes.rs:13:17
+ |
+LL | let _bad9 = "锈/011锈";
+ | ^^^^^^^^^^
+ |
+ = help: octal escapes are not supported, `/0` is always a null character
+help: if an octal escape was intended, use the hexadecimal representation instead
+ |
+LL | let _bad9 = "锈/x09锈";
+ | ~~~~~~~~~~
+help: if the null character is intended, disambiguate using
+ |
+LL | let _bad9 = "锈/x0011锈";
+ | ~~~~~~~~~~~~
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/ok_expect.rs b/src/tools/clippy/tests/ui/ok_expect.rs
new file mode 100644
index 000000000..ff68d38c7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ok_expect.rs
@@ -0,0 +1,27 @@
+use std::io;
+
+struct MyError(()); // doesn't implement Debug
+
+#[derive(Debug)]
+struct MyErrorWithParam<T> {
+ x: T,
+}
+
+fn main() {
+ let res: Result<i32, ()> = Ok(0);
+ let _ = res.unwrap();
+
+ res.ok().expect("disaster!");
+ // the following should not warn, since `expect` isn't implemented unless
+ // the error type implements `Debug`
+ let res2: Result<i32, MyError> = Ok(0);
+ res2.ok().expect("oh noes!");
+ let res3: Result<u32, MyErrorWithParam<u8>> = Ok(0);
+ res3.ok().expect("whoof");
+ let res4: Result<u32, io::Error> = Ok(0);
+ res4.ok().expect("argh");
+ let res5: io::Result<u32> = Ok(0);
+ res5.ok().expect("oops");
+ let res6: Result<u32, &str> = Ok(0);
+ res6.ok().expect("meh");
+}
diff --git a/src/tools/clippy/tests/ui/ok_expect.stderr b/src/tools/clippy/tests/ui/ok_expect.stderr
new file mode 100644
index 000000000..b02b28e7f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ok_expect.stderr
@@ -0,0 +1,43 @@
+error: called `ok().expect()` on a `Result` value
+ --> $DIR/ok_expect.rs:14:5
+ |
+LL | res.ok().expect("disaster!");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::ok-expect` implied by `-D warnings`
+ = help: you can call `expect()` directly on the `Result`
+
+error: called `ok().expect()` on a `Result` value
+ --> $DIR/ok_expect.rs:20:5
+ |
+LL | res3.ok().expect("whoof");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: you can call `expect()` directly on the `Result`
+
+error: called `ok().expect()` on a `Result` value
+ --> $DIR/ok_expect.rs:22:5
+ |
+LL | res4.ok().expect("argh");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: you can call `expect()` directly on the `Result`
+
+error: called `ok().expect()` on a `Result` value
+ --> $DIR/ok_expect.rs:24:5
+ |
+LL | res5.ok().expect("oops");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: you can call `expect()` directly on the `Result`
+
+error: called `ok().expect()` on a `Result` value
+ --> $DIR/ok_expect.rs:26:5
+ |
+LL | res6.ok().expect("meh");
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: you can call `expect()` directly on the `Result`
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/only_used_in_recursion.rs b/src/tools/clippy/tests/ui/only_used_in_recursion.rs
new file mode 100644
index 000000000..5768434f9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/only_used_in_recursion.rs
@@ -0,0 +1,122 @@
+#![warn(clippy::only_used_in_recursion)]
+
+fn simple(a: usize, b: usize) -> usize {
+ if a == 0 { 1 } else { simple(a - 1, b) }
+}
+
+fn with_calc(a: usize, b: isize) -> usize {
+ if a == 0 { 1 } else { with_calc(a - 1, -b + 1) }
+}
+
+fn tuple((a, b): (usize, usize)) -> usize {
+ if a == 0 { 1 } else { tuple((a - 1, b + 1)) }
+}
+
+fn let_tuple(a: usize, b: usize) -> usize {
+ let (c, d) = (a, b);
+ if c == 0 { 1 } else { let_tuple(c - 1, d + 1) }
+}
+
+fn array([a, b]: [usize; 2]) -> usize {
+ if a == 0 { 1 } else { array([a - 1, b + 1]) }
+}
+
+fn index(a: usize, mut b: &[usize], c: usize) -> usize {
+ if a == 0 { 1 } else { index(a - 1, b, c + b[0]) }
+}
+
+fn break_(a: usize, mut b: usize, mut c: usize) -> usize {
+ let c = loop {
+ b += 1;
+ c += 1;
+ if c == 10 {
+ break b;
+ }
+ };
+
+ if a == 0 { 1 } else { break_(a - 1, c, c) }
+}
+
+// this has a side effect
+fn mut_ref(a: usize, b: &mut usize) -> usize {
+ *b = 1;
+ if a == 0 { 1 } else { mut_ref(a - 1, b) }
+}
+
+fn mut_ref2(a: usize, b: &mut usize) -> usize {
+ let mut c = *b;
+ if a == 0 { 1 } else { mut_ref2(a - 1, &mut c) }
+}
+
+fn not_primitive(a: usize, b: String) -> usize {
+ if a == 0 { 1 } else { not_primitive(a - 1, b) }
+}
+
+// this doesn't have a side effect,
+// but `String` is not primitive.
+fn not_primitive_op(a: usize, b: String, c: &str) -> usize {
+ if a == 1 { 1 } else { not_primitive_op(a, b + c, c) }
+}
+
+struct A;
+
+impl A {
+ fn method(a: usize, b: usize) -> usize {
+ if a == 0 { 1 } else { A::method(a - 1, b - 1) }
+ }
+
+ fn method2(&self, a: usize, b: usize) -> usize {
+ if a == 0 { 1 } else { self.method2(a - 1, b + 1) }
+ }
+}
+
+trait B {
+ fn hello(a: usize, b: usize) -> usize;
+
+ fn hello2(&self, a: usize, b: usize) -> usize;
+}
+
+impl B for A {
+ fn hello(a: usize, b: usize) -> usize {
+ if a == 0 { 1 } else { A::hello(a - 1, b + 1) }
+ }
+
+ fn hello2(&self, a: usize, b: usize) -> usize {
+ if a == 0 { 1 } else { self.hello2(a - 1, b + 1) }
+ }
+}
+
+trait C {
+ fn hello(a: usize, b: usize) -> usize {
+ if a == 0 { 1 } else { Self::hello(a - 1, b + 1) }
+ }
+
+ fn hello2(&self, a: usize, b: usize) -> usize {
+ if a == 0 { 1 } else { self.hello2(a - 1, b + 1) }
+ }
+}
+
+fn ignore(a: usize, _: usize) -> usize {
+ if a == 1 { 1 } else { ignore(a - 1, 0) }
+}
+
+fn ignore2(a: usize, _b: usize) -> usize {
+ if a == 1 { 1 } else { ignore2(a - 1, _b) }
+}
+
+fn f1(a: u32) -> u32 {
+ a
+}
+
+fn f2(a: u32) -> u32 {
+ f1(a)
+}
+
+fn inner_fn(a: u32) -> u32 {
+ fn inner_fn(a: u32) -> u32 {
+ a
+ }
+ inner_fn(a)
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/only_used_in_recursion.stderr b/src/tools/clippy/tests/ui/only_used_in_recursion.stderr
new file mode 100644
index 000000000..6fe9361bf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/only_used_in_recursion.stderr
@@ -0,0 +1,82 @@
+error: parameter is only used in recursion
+ --> $DIR/only_used_in_recursion.rs:3:21
+ |
+LL | fn simple(a: usize, b: usize) -> usize {
+ | ^ help: if this is intentional, prefix with an underscore: `_b`
+ |
+ = note: `-D clippy::only-used-in-recursion` implied by `-D warnings`
+
+error: parameter is only used in recursion
+ --> $DIR/only_used_in_recursion.rs:7:24
+ |
+LL | fn with_calc(a: usize, b: isize) -> usize {
+ | ^ help: if this is intentional, prefix with an underscore: `_b`
+
+error: parameter is only used in recursion
+ --> $DIR/only_used_in_recursion.rs:11:14
+ |
+LL | fn tuple((a, b): (usize, usize)) -> usize {
+ | ^ help: if this is intentional, prefix with an underscore: `_b`
+
+error: parameter is only used in recursion
+ --> $DIR/only_used_in_recursion.rs:15:24
+ |
+LL | fn let_tuple(a: usize, b: usize) -> usize {
+ | ^ help: if this is intentional, prefix with an underscore: `_b`
+
+error: parameter is only used in recursion
+ --> $DIR/only_used_in_recursion.rs:20:14
+ |
+LL | fn array([a, b]: [usize; 2]) -> usize {
+ | ^ help: if this is intentional, prefix with an underscore: `_b`
+
+error: parameter is only used in recursion
+ --> $DIR/only_used_in_recursion.rs:24:20
+ |
+LL | fn index(a: usize, mut b: &[usize], c: usize) -> usize {
+ | ^^^^^ help: if this is intentional, prefix with an underscore: `_b`
+
+error: parameter is only used in recursion
+ --> $DIR/only_used_in_recursion.rs:24:37
+ |
+LL | fn index(a: usize, mut b: &[usize], c: usize) -> usize {
+ | ^ help: if this is intentional, prefix with an underscore: `_c`
+
+error: parameter is only used in recursion
+ --> $DIR/only_used_in_recursion.rs:28:21
+ |
+LL | fn break_(a: usize, mut b: usize, mut c: usize) -> usize {
+ | ^^^^^ help: if this is intentional, prefix with an underscore: `_b`
+
+error: parameter is only used in recursion
+ --> $DIR/only_used_in_recursion.rs:46:23
+ |
+LL | fn mut_ref2(a: usize, b: &mut usize) -> usize {
+ | ^ help: if this is intentional, prefix with an underscore: `_b`
+
+error: parameter is only used in recursion
+ --> $DIR/only_used_in_recursion.rs:51:28
+ |
+LL | fn not_primitive(a: usize, b: String) -> usize {
+ | ^ help: if this is intentional, prefix with an underscore: `_b`
+
+error: parameter is only used in recursion
+ --> $DIR/only_used_in_recursion.rs:68:33
+ |
+LL | fn method2(&self, a: usize, b: usize) -> usize {
+ | ^ help: if this is intentional, prefix with an underscore: `_b`
+
+error: parameter is only used in recursion
+ --> $DIR/only_used_in_recursion.rs:90:24
+ |
+LL | fn hello(a: usize, b: usize) -> usize {
+ | ^ help: if this is intentional, prefix with an underscore: `_b`
+
+error: parameter is only used in recursion
+ --> $DIR/only_used_in_recursion.rs:94:32
+ |
+LL | fn hello2(&self, a: usize, b: usize) -> usize {
+ | ^ help: if this is intentional, prefix with an underscore: `_b`
+
+error: aborting due to 13 previous errors
+
diff --git a/src/tools/clippy/tests/ui/op_ref.rs b/src/tools/clippy/tests/ui/op_ref.rs
new file mode 100644
index 000000000..d8bf66603
--- /dev/null
+++ b/src/tools/clippy/tests/ui/op_ref.rs
@@ -0,0 +1,94 @@
+#![allow(unused_variables, clippy::blacklisted_name)]
+#![warn(clippy::op_ref)]
+use std::collections::HashSet;
+use std::ops::{BitAnd, Mul};
+
+fn main() {
+ let tracked_fds: HashSet<i32> = HashSet::new();
+ let new_fds = HashSet::new();
+ let unwanted = &tracked_fds - &new_fds;
+
+ let foo = &5 - &6;
+
+ let bar = String::new();
+ let bar = "foo" == &bar;
+
+ let a = "a".to_string();
+ let b = "a";
+
+ if b < &a {
+ println!("OK");
+ }
+
+ struct X(i32);
+ impl BitAnd for X {
+ type Output = X;
+ fn bitand(self, rhs: X) -> X {
+ X(self.0 & rhs.0)
+ }
+ }
+ impl<'a> BitAnd<&'a X> for X {
+ type Output = X;
+ fn bitand(self, rhs: &'a X) -> X {
+ X(self.0 & rhs.0)
+ }
+ }
+ let x = X(1);
+ let y = X(2);
+ let z = x & &y;
+
+ #[derive(Copy, Clone)]
+ struct Y(i32);
+ impl BitAnd for Y {
+ type Output = Y;
+ fn bitand(self, rhs: Y) -> Y {
+ Y(self.0 & rhs.0)
+ }
+ }
+ impl<'a> BitAnd<&'a Y> for Y {
+ type Output = Y;
+ fn bitand(self, rhs: &'a Y) -> Y {
+ Y(self.0 & rhs.0)
+ }
+ }
+ let x = Y(1);
+ let y = Y(2);
+ let z = x & &y;
+}
+
+#[derive(Clone, Copy)]
+struct A(i32);
+#[derive(Clone, Copy)]
+struct B(i32);
+
+impl Mul<&A> for B {
+ type Output = i32;
+ fn mul(self, rhs: &A) -> Self::Output {
+ self.0 * rhs.0
+ }
+}
+impl Mul<A> for B {
+ type Output = i32;
+ fn mul(self, rhs: A) -> Self::Output {
+ // Should not lint because removing the reference would lead to unconditional recursion
+ self * &rhs
+ }
+}
+impl Mul<&A> for A {
+ type Output = i32;
+ fn mul(self, rhs: &A) -> Self::Output {
+ self.0 * rhs.0
+ }
+}
+impl Mul<A> for A {
+ type Output = i32;
+ fn mul(self, rhs: A) -> Self::Output {
+ let one = B(1);
+ let two = 2;
+ let three = 3;
+ let _ = one * &self;
+ let _ = two + &three;
+ // Removing the reference would lead to unconditional recursion
+ self * &rhs
+ }
+}
diff --git a/src/tools/clippy/tests/ui/op_ref.stderr b/src/tools/clippy/tests/ui/op_ref.stderr
new file mode 100644
index 000000000..fe36c0116
--- /dev/null
+++ b/src/tools/clippy/tests/ui/op_ref.stderr
@@ -0,0 +1,38 @@
+error: needlessly taken reference of both operands
+ --> $DIR/op_ref.rs:11:15
+ |
+LL | let foo = &5 - &6;
+ | ^^^^^^^
+ |
+ = note: `-D clippy::op-ref` implied by `-D warnings`
+help: use the values directly
+ |
+LL | let foo = 5 - 6;
+ | ~ ~
+
+error: taken reference of right operand
+ --> $DIR/op_ref.rs:56:13
+ |
+LL | let z = x & &y;
+ | ^^^^--
+ | |
+ | help: use the right value directly: `y`
+
+error: taken reference of right operand
+ --> $DIR/op_ref.rs:89:17
+ |
+LL | let _ = one * &self;
+ | ^^^^^^-----
+ | |
+ | help: use the right value directly: `self`
+
+error: taken reference of right operand
+ --> $DIR/op_ref.rs:90:17
+ |
+LL | let _ = two + &three;
+ | ^^^^^^------
+ | |
+ | help: use the right value directly: `three`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/open_options.rs b/src/tools/clippy/tests/ui/open_options.rs
new file mode 100644
index 000000000..9063fafbc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/open_options.rs
@@ -0,0 +1,14 @@
+use std::fs::OpenOptions;
+
+#[allow(unused_must_use)]
+#[warn(clippy::nonsensical_open_options)]
+fn main() {
+ OpenOptions::new().read(true).truncate(true).open("foo.txt");
+ OpenOptions::new().append(true).truncate(true).open("foo.txt");
+
+ OpenOptions::new().read(true).read(false).open("foo.txt");
+ OpenOptions::new().create(true).create(false).open("foo.txt");
+ OpenOptions::new().write(true).write(false).open("foo.txt");
+ OpenOptions::new().append(true).append(false).open("foo.txt");
+ OpenOptions::new().truncate(true).truncate(false).open("foo.txt");
+}
diff --git a/src/tools/clippy/tests/ui/open_options.stderr b/src/tools/clippy/tests/ui/open_options.stderr
new file mode 100644
index 000000000..26fe9f6fb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/open_options.stderr
@@ -0,0 +1,46 @@
+error: file opened with `truncate` and `read`
+ --> $DIR/open_options.rs:6:5
+ |
+LL | OpenOptions::new().read(true).truncate(true).open("foo.txt");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::nonsensical-open-options` implied by `-D warnings`
+
+error: file opened with `append` and `truncate`
+ --> $DIR/open_options.rs:7:5
+ |
+LL | OpenOptions::new().append(true).truncate(true).open("foo.txt");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: the method `read` is called more than once
+ --> $DIR/open_options.rs:9:5
+ |
+LL | OpenOptions::new().read(true).read(false).open("foo.txt");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: the method `create` is called more than once
+ --> $DIR/open_options.rs:10:5
+ |
+LL | OpenOptions::new().create(true).create(false).open("foo.txt");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: the method `write` is called more than once
+ --> $DIR/open_options.rs:11:5
+ |
+LL | OpenOptions::new().write(true).write(false).open("foo.txt");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: the method `append` is called more than once
+ --> $DIR/open_options.rs:12:5
+ |
+LL | OpenOptions::new().append(true).append(false).open("foo.txt");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: the method `truncate` is called more than once
+ --> $DIR/open_options.rs:13:5
+ |
+LL | OpenOptions::new().truncate(true).truncate(false).open("foo.txt");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/option_as_ref_deref.fixed b/src/tools/clippy/tests/ui/option_as_ref_deref.fixed
new file mode 100644
index 000000000..07d7f0b45
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_as_ref_deref.fixed
@@ -0,0 +1,44 @@
+// run-rustfix
+
+#![allow(unused_imports, clippy::redundant_clone)]
+#![warn(clippy::option_as_ref_deref)]
+
+use std::ffi::{CString, OsString};
+use std::ops::{Deref, DerefMut};
+use std::path::PathBuf;
+
+fn main() {
+ let mut opt = Some(String::from("123"));
+
+ let _ = opt.clone().as_deref().map(str::len);
+
+ #[rustfmt::skip]
+ let _ = opt.clone().as_deref()
+ .map(str::len);
+
+ let _ = opt.as_deref_mut();
+
+ let _ = opt.as_deref();
+ let _ = opt.as_deref();
+ let _ = opt.as_deref_mut();
+ let _ = opt.as_deref_mut();
+ let _ = Some(CString::new(vec![]).unwrap()).as_deref();
+ let _ = Some(OsString::new()).as_deref();
+ let _ = Some(PathBuf::new()).as_deref();
+ let _ = Some(Vec::<()>::new()).as_deref();
+ let _ = Some(Vec::<()>::new()).as_deref_mut();
+
+ let _ = opt.as_deref();
+ let _ = opt.clone().as_deref_mut().map(|x| x.len());
+
+ let vc = vec![String::new()];
+ let _ = Some(1_usize).as_ref().map(|x| vc[*x].as_str()); // should not be linted
+
+ let _: Option<&str> = Some(&String::new()).as_ref().map(|x| x.as_str()); // should not be linted
+
+ let _ = opt.as_deref();
+ let _ = opt.as_deref_mut();
+
+ // Issue #5927
+ let _ = opt.as_deref();
+}
diff --git a/src/tools/clippy/tests/ui/option_as_ref_deref.rs b/src/tools/clippy/tests/ui/option_as_ref_deref.rs
new file mode 100644
index 000000000..6ae059c94
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_as_ref_deref.rs
@@ -0,0 +1,47 @@
+// run-rustfix
+
+#![allow(unused_imports, clippy::redundant_clone)]
+#![warn(clippy::option_as_ref_deref)]
+
+use std::ffi::{CString, OsString};
+use std::ops::{Deref, DerefMut};
+use std::path::PathBuf;
+
+fn main() {
+ let mut opt = Some(String::from("123"));
+
+ let _ = opt.clone().as_ref().map(Deref::deref).map(str::len);
+
+ #[rustfmt::skip]
+ let _ = opt.clone()
+ .as_ref().map(
+ Deref::deref
+ )
+ .map(str::len);
+
+ let _ = opt.as_mut().map(DerefMut::deref_mut);
+
+ let _ = opt.as_ref().map(String::as_str);
+ let _ = opt.as_ref().map(|x| x.as_str());
+ let _ = opt.as_mut().map(String::as_mut_str);
+ let _ = opt.as_mut().map(|x| x.as_mut_str());
+ let _ = Some(CString::new(vec![]).unwrap()).as_ref().map(CString::as_c_str);
+ let _ = Some(OsString::new()).as_ref().map(OsString::as_os_str);
+ let _ = Some(PathBuf::new()).as_ref().map(PathBuf::as_path);
+ let _ = Some(Vec::<()>::new()).as_ref().map(Vec::as_slice);
+ let _ = Some(Vec::<()>::new()).as_mut().map(Vec::as_mut_slice);
+
+ let _ = opt.as_ref().map(|x| x.deref());
+ let _ = opt.clone().as_mut().map(|x| x.deref_mut()).map(|x| x.len());
+
+ let vc = vec![String::new()];
+ let _ = Some(1_usize).as_ref().map(|x| vc[*x].as_str()); // should not be linted
+
+ let _: Option<&str> = Some(&String::new()).as_ref().map(|x| x.as_str()); // should not be linted
+
+ let _ = opt.as_ref().map(|x| &**x);
+ let _ = opt.as_mut().map(|x| &mut **x);
+
+ // Issue #5927
+ let _ = opt.as_ref().map(std::ops::Deref::deref);
+}
diff --git a/src/tools/clippy/tests/ui/option_as_ref_deref.stderr b/src/tools/clippy/tests/ui/option_as_ref_deref.stderr
new file mode 100644
index 000000000..62f282324
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_as_ref_deref.stderr
@@ -0,0 +1,110 @@
+error: called `.as_ref().map(Deref::deref)` on an Option value. This can be done more directly by calling `opt.clone().as_deref()` instead
+ --> $DIR/option_as_ref_deref.rs:13:13
+ |
+LL | let _ = opt.clone().as_ref().map(Deref::deref).map(str::len);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref instead: `opt.clone().as_deref()`
+ |
+ = note: `-D clippy::option-as-ref-deref` implied by `-D warnings`
+
+error: called `.as_ref().map(Deref::deref)` on an Option value. This can be done more directly by calling `opt.clone().as_deref()` instead
+ --> $DIR/option_as_ref_deref.rs:16:13
+ |
+LL | let _ = opt.clone()
+ | _____________^
+LL | | .as_ref().map(
+LL | | Deref::deref
+LL | | )
+ | |_________^ help: try using as_deref instead: `opt.clone().as_deref()`
+
+error: called `.as_mut().map(DerefMut::deref_mut)` on an Option value. This can be done more directly by calling `opt.as_deref_mut()` instead
+ --> $DIR/option_as_ref_deref.rs:22:13
+ |
+LL | let _ = opt.as_mut().map(DerefMut::deref_mut);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref_mut instead: `opt.as_deref_mut()`
+
+error: called `.as_ref().map(String::as_str)` on an Option value. This can be done more directly by calling `opt.as_deref()` instead
+ --> $DIR/option_as_ref_deref.rs:24:13
+ |
+LL | let _ = opt.as_ref().map(String::as_str);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref instead: `opt.as_deref()`
+
+error: called `.as_ref().map(|x| x.as_str())` on an Option value. This can be done more directly by calling `opt.as_deref()` instead
+ --> $DIR/option_as_ref_deref.rs:25:13
+ |
+LL | let _ = opt.as_ref().map(|x| x.as_str());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref instead: `opt.as_deref()`
+
+error: called `.as_mut().map(String::as_mut_str)` on an Option value. This can be done more directly by calling `opt.as_deref_mut()` instead
+ --> $DIR/option_as_ref_deref.rs:26:13
+ |
+LL | let _ = opt.as_mut().map(String::as_mut_str);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref_mut instead: `opt.as_deref_mut()`
+
+error: called `.as_mut().map(|x| x.as_mut_str())` on an Option value. This can be done more directly by calling `opt.as_deref_mut()` instead
+ --> $DIR/option_as_ref_deref.rs:27:13
+ |
+LL | let _ = opt.as_mut().map(|x| x.as_mut_str());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref_mut instead: `opt.as_deref_mut()`
+
+error: called `.as_ref().map(CString::as_c_str)` on an Option value. This can be done more directly by calling `Some(CString::new(vec![]).unwrap()).as_deref()` instead
+ --> $DIR/option_as_ref_deref.rs:28:13
+ |
+LL | let _ = Some(CString::new(vec![]).unwrap()).as_ref().map(CString::as_c_str);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref instead: `Some(CString::new(vec![]).unwrap()).as_deref()`
+
+error: called `.as_ref().map(OsString::as_os_str)` on an Option value. This can be done more directly by calling `Some(OsString::new()).as_deref()` instead
+ --> $DIR/option_as_ref_deref.rs:29:13
+ |
+LL | let _ = Some(OsString::new()).as_ref().map(OsString::as_os_str);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref instead: `Some(OsString::new()).as_deref()`
+
+error: called `.as_ref().map(PathBuf::as_path)` on an Option value. This can be done more directly by calling `Some(PathBuf::new()).as_deref()` instead
+ --> $DIR/option_as_ref_deref.rs:30:13
+ |
+LL | let _ = Some(PathBuf::new()).as_ref().map(PathBuf::as_path);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref instead: `Some(PathBuf::new()).as_deref()`
+
+error: called `.as_ref().map(Vec::as_slice)` on an Option value. This can be done more directly by calling `Some(Vec::<()>::new()).as_deref()` instead
+ --> $DIR/option_as_ref_deref.rs:31:13
+ |
+LL | let _ = Some(Vec::<()>::new()).as_ref().map(Vec::as_slice);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref instead: `Some(Vec::<()>::new()).as_deref()`
+
+error: called `.as_mut().map(Vec::as_mut_slice)` on an Option value. This can be done more directly by calling `Some(Vec::<()>::new()).as_deref_mut()` instead
+ --> $DIR/option_as_ref_deref.rs:32:13
+ |
+LL | let _ = Some(Vec::<()>::new()).as_mut().map(Vec::as_mut_slice);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref_mut instead: `Some(Vec::<()>::new()).as_deref_mut()`
+
+error: called `.as_ref().map(|x| x.deref())` on an Option value. This can be done more directly by calling `opt.as_deref()` instead
+ --> $DIR/option_as_ref_deref.rs:34:13
+ |
+LL | let _ = opt.as_ref().map(|x| x.deref());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref instead: `opt.as_deref()`
+
+error: called `.as_mut().map(|x| x.deref_mut())` on an Option value. This can be done more directly by calling `opt.clone().as_deref_mut()` instead
+ --> $DIR/option_as_ref_deref.rs:35:13
+ |
+LL | let _ = opt.clone().as_mut().map(|x| x.deref_mut()).map(|x| x.len());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref_mut instead: `opt.clone().as_deref_mut()`
+
+error: called `.as_ref().map(|x| &**x)` on an Option value. This can be done more directly by calling `opt.as_deref()` instead
+ --> $DIR/option_as_ref_deref.rs:42:13
+ |
+LL | let _ = opt.as_ref().map(|x| &**x);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref instead: `opt.as_deref()`
+
+error: called `.as_mut().map(|x| &mut **x)` on an Option value. This can be done more directly by calling `opt.as_deref_mut()` instead
+ --> $DIR/option_as_ref_deref.rs:43:13
+ |
+LL | let _ = opt.as_mut().map(|x| &mut **x);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref_mut instead: `opt.as_deref_mut()`
+
+error: called `.as_ref().map(std::ops::Deref::deref)` on an Option value. This can be done more directly by calling `opt.as_deref()` instead
+ --> $DIR/option_as_ref_deref.rs:46:13
+ |
+LL | let _ = opt.as_ref().map(std::ops::Deref::deref);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using as_deref instead: `opt.as_deref()`
+
+error: aborting due to 17 previous errors
+
diff --git a/src/tools/clippy/tests/ui/option_env_unwrap.rs b/src/tools/clippy/tests/ui/option_env_unwrap.rs
new file mode 100644
index 000000000..0141fb785
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_env_unwrap.rs
@@ -0,0 +1,24 @@
+// aux-build:macro_rules.rs
+#![warn(clippy::option_env_unwrap)]
+#![allow(clippy::map_flatten)]
+
+#[macro_use]
+extern crate macro_rules;
+
+macro_rules! option_env_unwrap {
+ ($env: expr) => {
+ option_env!($env).unwrap()
+ };
+ ($env: expr, $message: expr) => {
+ option_env!($env).expect($message)
+ };
+}
+
+fn main() {
+ let _ = option_env!("PATH").unwrap();
+ let _ = option_env!("PATH").expect("environment variable PATH isn't set");
+ let _ = option_env_unwrap!("PATH");
+ let _ = option_env_unwrap!("PATH", "environment variable PATH isn't set");
+ let _ = option_env_unwrap_external!("PATH");
+ let _ = option_env_unwrap_external!("PATH", "environment variable PATH isn't set");
+}
diff --git a/src/tools/clippy/tests/ui/option_env_unwrap.stderr b/src/tools/clippy/tests/ui/option_env_unwrap.stderr
new file mode 100644
index 000000000..885ac096c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_env_unwrap.stderr
@@ -0,0 +1,61 @@
+error: this will panic at run-time if the environment variable doesn't exist at compile-time
+ --> $DIR/option_env_unwrap.rs:18:13
+ |
+LL | let _ = option_env!("PATH").unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::option-env-unwrap` implied by `-D warnings`
+ = help: consider using the `env!` macro instead
+
+error: this will panic at run-time if the environment variable doesn't exist at compile-time
+ --> $DIR/option_env_unwrap.rs:19:13
+ |
+LL | let _ = option_env!("PATH").expect("environment variable PATH isn't set");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using the `env!` macro instead
+
+error: this will panic at run-time if the environment variable doesn't exist at compile-time
+ --> $DIR/option_env_unwrap.rs:10:9
+ |
+LL | option_env!($env).unwrap()
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | let _ = option_env_unwrap!("PATH");
+ | -------------------------- in this macro invocation
+ |
+ = help: consider using the `env!` macro instead
+ = note: this error originates in the macro `option_env_unwrap` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: this will panic at run-time if the environment variable doesn't exist at compile-time
+ --> $DIR/option_env_unwrap.rs:13:9
+ |
+LL | option_env!($env).expect($message)
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | let _ = option_env_unwrap!("PATH", "environment variable PATH isn't set");
+ | ----------------------------------------------------------------- in this macro invocation
+ |
+ = help: consider using the `env!` macro instead
+ = note: this error originates in the macro `option_env_unwrap` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: this will panic at run-time if the environment variable doesn't exist at compile-time
+ --> $DIR/option_env_unwrap.rs:22:13
+ |
+LL | let _ = option_env_unwrap_external!("PATH");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using the `env!` macro instead
+ = note: this error originates in the macro `option_env_unwrap_external` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: this will panic at run-time if the environment variable doesn't exist at compile-time
+ --> $DIR/option_env_unwrap.rs:23:13
+ |
+LL | let _ = option_env_unwrap_external!("PATH", "environment variable PATH isn't set");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using the `env!` macro instead
+ = note: this error originates in the macro `option_env_unwrap_external` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/option_filter_map.fixed b/src/tools/clippy/tests/ui/option_filter_map.fixed
new file mode 100644
index 000000000..b20f73f31
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_filter_map.fixed
@@ -0,0 +1,25 @@
+// run-rustfix
+#![warn(clippy::option_filter_map)]
+#![allow(clippy::map_flatten)]
+
+fn main() {
+ let _ = Some(Some(1)).flatten();
+ let _ = Some(Some(1)).flatten();
+ let _ = Some(1).map(odds_out).flatten();
+ let _ = Some(1).map(odds_out).flatten();
+
+ let _ = vec![Some(1)].into_iter().flatten();
+ let _ = vec![Some(1)].into_iter().flatten();
+ let _ = vec![1]
+ .into_iter()
+ .map(odds_out)
+ .flatten();
+ let _ = vec![1]
+ .into_iter()
+ .map(odds_out)
+ .flatten();
+}
+
+fn odds_out(x: i32) -> Option<i32> {
+ if x % 2 == 0 { Some(x) } else { None }
+}
diff --git a/src/tools/clippy/tests/ui/option_filter_map.rs b/src/tools/clippy/tests/ui/option_filter_map.rs
new file mode 100644
index 000000000..7abaaa0fb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_filter_map.rs
@@ -0,0 +1,27 @@
+// run-rustfix
+#![warn(clippy::option_filter_map)]
+#![allow(clippy::map_flatten)]
+
+fn main() {
+ let _ = Some(Some(1)).filter(Option::is_some).map(Option::unwrap);
+ let _ = Some(Some(1)).filter(|o| o.is_some()).map(|o| o.unwrap());
+ let _ = Some(1).map(odds_out).filter(Option::is_some).map(Option::unwrap);
+ let _ = Some(1).map(odds_out).filter(|o| o.is_some()).map(|o| o.unwrap());
+
+ let _ = vec![Some(1)].into_iter().filter(Option::is_some).map(Option::unwrap);
+ let _ = vec![Some(1)].into_iter().filter(|o| o.is_some()).map(|o| o.unwrap());
+ let _ = vec![1]
+ .into_iter()
+ .map(odds_out)
+ .filter(Option::is_some)
+ .map(Option::unwrap);
+ let _ = vec![1]
+ .into_iter()
+ .map(odds_out)
+ .filter(|o| o.is_some())
+ .map(|o| o.unwrap());
+}
+
+fn odds_out(x: i32) -> Option<i32> {
+ if x % 2 == 0 { Some(x) } else { None }
+}
diff --git a/src/tools/clippy/tests/ui/option_filter_map.stderr b/src/tools/clippy/tests/ui/option_filter_map.stderr
new file mode 100644
index 000000000..4a030ac9a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_filter_map.stderr
@@ -0,0 +1,56 @@
+error: `filter` for `Some` followed by `unwrap`
+ --> $DIR/option_filter_map.rs:6:27
+ |
+LL | let _ = Some(Some(1)).filter(Option::is_some).map(Option::unwrap);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `flatten` instead: `flatten()`
+ |
+ = note: `-D clippy::option-filter-map` implied by `-D warnings`
+
+error: `filter` for `Some` followed by `unwrap`
+ --> $DIR/option_filter_map.rs:7:27
+ |
+LL | let _ = Some(Some(1)).filter(|o| o.is_some()).map(|o| o.unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `flatten` instead: `flatten()`
+
+error: `filter` for `Some` followed by `unwrap`
+ --> $DIR/option_filter_map.rs:8:35
+ |
+LL | let _ = Some(1).map(odds_out).filter(Option::is_some).map(Option::unwrap);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `flatten` instead: `flatten()`
+
+error: `filter` for `Some` followed by `unwrap`
+ --> $DIR/option_filter_map.rs:9:35
+ |
+LL | let _ = Some(1).map(odds_out).filter(|o| o.is_some()).map(|o| o.unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `flatten` instead: `flatten()`
+
+error: `filter` for `Some` followed by `unwrap`
+ --> $DIR/option_filter_map.rs:11:39
+ |
+LL | let _ = vec![Some(1)].into_iter().filter(Option::is_some).map(Option::unwrap);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `flatten` instead: `flatten()`
+
+error: `filter` for `Some` followed by `unwrap`
+ --> $DIR/option_filter_map.rs:12:39
+ |
+LL | let _ = vec![Some(1)].into_iter().filter(|o| o.is_some()).map(|o| o.unwrap());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `flatten` instead: `flatten()`
+
+error: `filter` for `Some` followed by `unwrap`
+ --> $DIR/option_filter_map.rs:16:10
+ |
+LL | .filter(Option::is_some)
+ | __________^
+LL | | .map(Option::unwrap);
+ | |____________________________^ help: consider using `flatten` instead: `flatten()`
+
+error: `filter` for `Some` followed by `unwrap`
+ --> $DIR/option_filter_map.rs:21:10
+ |
+LL | .filter(|o| o.is_some())
+ | __________^
+LL | | .map(|o| o.unwrap());
+ | |____________________________^ help: consider using `flatten` instead: `flatten()`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/option_if_let_else.fixed b/src/tools/clippy/tests/ui/option_if_let_else.fixed
new file mode 100644
index 000000000..b6d5e106f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_if_let_else.fixed
@@ -0,0 +1,182 @@
+// run-rustfix
+#![warn(clippy::option_if_let_else)]
+#![allow(
+ unused_tuple_struct_fields,
+ clippy::redundant_closure,
+ clippy::ref_option_ref,
+ clippy::equatable_if_let,
+ clippy::let_unit_value
+)]
+
+fn bad1(string: Option<&str>) -> (bool, &str) {
+ string.map_or((false, "hello"), |x| (true, x))
+}
+
+fn else_if_option(string: Option<&str>) -> Option<(bool, &str)> {
+ if string.is_none() {
+ None
+ } else if let Some(x) = string {
+ Some((true, x))
+ } else {
+ Some((false, ""))
+ }
+}
+
+fn unop_bad(string: &Option<&str>, mut num: Option<i32>) {
+ let _ = string.map_or(0, |s| s.len());
+ let _ = num.as_ref().map_or(&0, |s| s);
+ let _ = num.as_mut().map_or(&mut 0, |s| {
+ *s += 1;
+ s
+ });
+ let _ = num.as_ref().map_or(&0, |s| s);
+ let _ = num.map_or(0, |mut s| {
+ s += 1;
+ s
+ });
+ let _ = num.as_mut().map_or(&mut 0, |s| {
+ *s += 1;
+ s
+ });
+}
+
+fn longer_body(arg: Option<u32>) -> u32 {
+ arg.map_or(13, |x| {
+ let y = x * x;
+ y * y
+ })
+}
+
+fn impure_else(arg: Option<i32>) {
+ let side_effect = || {
+ println!("return 1");
+ 1
+ };
+ let _ = arg.map_or_else(|| side_effect(), |x| x);
+}
+
+fn test_map_or_else(arg: Option<u32>) {
+ let _ = arg.map_or_else(|| {
+ let mut y = 1;
+ y = (y + 2 / y) / 2;
+ y = (y + 2 / y) / 2;
+ y
+ }, |x| x * x * x * x);
+}
+
+fn negative_tests(arg: Option<u32>) -> u32 {
+ let _ = if let Some(13) = arg { "unlucky" } else { "lucky" };
+ for _ in 0..10 {
+ let _ = if let Some(x) = arg {
+ x
+ } else {
+ continue;
+ };
+ }
+ let _ = if let Some(x) = arg {
+ return x;
+ } else {
+ 5
+ };
+ 7
+}
+
+// #7973
+fn pattern_to_vec(pattern: &str) -> Vec<String> {
+ pattern
+ .trim_matches('/')
+ .split('/')
+ .flat_map(|s| {
+ s.find('.').map_or_else(|| vec![s.to_string()], |idx| vec![s[..idx].to_string(), s[idx..].to_string()])
+ })
+ .collect::<Vec<_>>()
+}
+
+enum DummyEnum {
+ One(u8),
+ Two,
+}
+
+// should not warn since there is a compled complex subpat
+// see #7991
+fn complex_subpat() -> DummyEnum {
+ let x = Some(DummyEnum::One(1));
+ let _ = if let Some(_one @ DummyEnum::One(..)) = x { 1 } else { 2 };
+ DummyEnum::Two
+}
+
+fn main() {
+ let optional = Some(5);
+ let _ = optional.map_or(5, |x| x + 2);
+ let _ = bad1(None);
+ let _ = else_if_option(None);
+ unop_bad(&None, None);
+ let _ = longer_body(None);
+ test_map_or_else(None);
+ let _ = negative_tests(None);
+ let _ = impure_else(None);
+
+ let _ = Some(0).map_or(0, |x| loop {
+ if x == 0 {
+ break x;
+ }
+ });
+
+ // #7576
+ const fn _f(x: Option<u32>) -> u32 {
+ // Don't lint, `map_or` isn't const
+ if let Some(x) = x { x } else { 10 }
+ }
+
+ // #5822
+ let s = String::new();
+ // Don't lint, `Some` branch consumes `s`, but else branch uses `s`
+ let _ = if let Some(x) = Some(0) {
+ let s = s;
+ s.len() + x
+ } else {
+ s.len()
+ };
+
+ let s = String::new();
+ // Lint, both branches immutably borrow `s`.
+ let _ = Some(0).map_or(s.len(), |x| s.len() + x);
+
+ let s = String::new();
+ // Lint, `Some` branch consumes `s`, but else branch doesn't use `s`.
+ let _ = Some(0).map_or(1, |x| {
+ let s = s;
+ s.len() + x
+ });
+
+ let s = Some(String::new());
+ // Don't lint, `Some` branch borrows `s`, but else branch consumes `s`
+ let _ = if let Some(x) = &s {
+ x.len()
+ } else {
+ let _s = s;
+ 10
+ };
+
+ let mut s = Some(String::new());
+ // Don't lint, `Some` branch mutably borrows `s`, but else branch also borrows `s`
+ let _ = if let Some(x) = &mut s {
+ x.push_str("test");
+ x.len()
+ } else {
+ let _s = &s;
+ 10
+ };
+
+ async fn _f1(x: u32) -> u32 {
+ x
+ }
+
+ async fn _f2() {
+ // Don't lint. `await` can't be moved into a closure.
+ let _ = if let Some(x) = Some(0) { _f1(x).await } else { 0 };
+ }
+
+ let _ = pattern_to_vec("hello world");
+ let _ = complex_subpat();
+}
diff --git a/src/tools/clippy/tests/ui/option_if_let_else.rs b/src/tools/clippy/tests/ui/option_if_let_else.rs
new file mode 100644
index 000000000..35bae1593
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_if_let_else.rs
@@ -0,0 +1,211 @@
+// run-rustfix
+#![warn(clippy::option_if_let_else)]
+#![allow(
+ unused_tuple_struct_fields,
+ clippy::redundant_closure,
+ clippy::ref_option_ref,
+ clippy::equatable_if_let,
+ clippy::let_unit_value
+)]
+
+fn bad1(string: Option<&str>) -> (bool, &str) {
+ if let Some(x) = string {
+ (true, x)
+ } else {
+ (false, "hello")
+ }
+}
+
+fn else_if_option(string: Option<&str>) -> Option<(bool, &str)> {
+ if string.is_none() {
+ None
+ } else if let Some(x) = string {
+ Some((true, x))
+ } else {
+ Some((false, ""))
+ }
+}
+
+fn unop_bad(string: &Option<&str>, mut num: Option<i32>) {
+ let _ = if let Some(s) = *string { s.len() } else { 0 };
+ let _ = if let Some(s) = &num { s } else { &0 };
+ let _ = if let Some(s) = &mut num {
+ *s += 1;
+ s
+ } else {
+ &mut 0
+ };
+ let _ = if let Some(ref s) = num { s } else { &0 };
+ let _ = if let Some(mut s) = num {
+ s += 1;
+ s
+ } else {
+ 0
+ };
+ let _ = if let Some(ref mut s) = num {
+ *s += 1;
+ s
+ } else {
+ &mut 0
+ };
+}
+
+fn longer_body(arg: Option<u32>) -> u32 {
+ if let Some(x) = arg {
+ let y = x * x;
+ y * y
+ } else {
+ 13
+ }
+}
+
+fn impure_else(arg: Option<i32>) {
+ let side_effect = || {
+ println!("return 1");
+ 1
+ };
+ let _ = if let Some(x) = arg {
+ x
+ } else {
+ // map_or_else must be suggested
+ side_effect()
+ };
+}
+
+fn test_map_or_else(arg: Option<u32>) {
+ let _ = if let Some(x) = arg {
+ x * x * x * x
+ } else {
+ let mut y = 1;
+ y = (y + 2 / y) / 2;
+ y = (y + 2 / y) / 2;
+ y
+ };
+}
+
+fn negative_tests(arg: Option<u32>) -> u32 {
+ let _ = if let Some(13) = arg { "unlucky" } else { "lucky" };
+ for _ in 0..10 {
+ let _ = if let Some(x) = arg {
+ x
+ } else {
+ continue;
+ };
+ }
+ let _ = if let Some(x) = arg {
+ return x;
+ } else {
+ 5
+ };
+ 7
+}
+
+// #7973
+fn pattern_to_vec(pattern: &str) -> Vec<String> {
+ pattern
+ .trim_matches('/')
+ .split('/')
+ .flat_map(|s| {
+ if let Some(idx) = s.find('.') {
+ vec![s[..idx].to_string(), s[idx..].to_string()]
+ } else {
+ vec![s.to_string()]
+ }
+ })
+ .collect::<Vec<_>>()
+}
+
+enum DummyEnum {
+ One(u8),
+ Two,
+}
+
+// should not warn since there is a compled complex subpat
+// see #7991
+fn complex_subpat() -> DummyEnum {
+ let x = Some(DummyEnum::One(1));
+ let _ = if let Some(_one @ DummyEnum::One(..)) = x { 1 } else { 2 };
+ DummyEnum::Two
+}
+
+fn main() {
+ let optional = Some(5);
+ let _ = if let Some(x) = optional { x + 2 } else { 5 };
+ let _ = bad1(None);
+ let _ = else_if_option(None);
+ unop_bad(&None, None);
+ let _ = longer_body(None);
+ test_map_or_else(None);
+ let _ = negative_tests(None);
+ let _ = impure_else(None);
+
+ let _ = if let Some(x) = Some(0) {
+ loop {
+ if x == 0 {
+ break x;
+ }
+ }
+ } else {
+ 0
+ };
+
+ // #7576
+ const fn _f(x: Option<u32>) -> u32 {
+ // Don't lint, `map_or` isn't const
+ if let Some(x) = x { x } else { 10 }
+ }
+
+ // #5822
+ let s = String::new();
+ // Don't lint, `Some` branch consumes `s`, but else branch uses `s`
+ let _ = if let Some(x) = Some(0) {
+ let s = s;
+ s.len() + x
+ } else {
+ s.len()
+ };
+
+ let s = String::new();
+ // Lint, both branches immutably borrow `s`.
+ let _ = if let Some(x) = Some(0) { s.len() + x } else { s.len() };
+
+ let s = String::new();
+ // Lint, `Some` branch consumes `s`, but else branch doesn't use `s`.
+ let _ = if let Some(x) = Some(0) {
+ let s = s;
+ s.len() + x
+ } else {
+ 1
+ };
+
+ let s = Some(String::new());
+ // Don't lint, `Some` branch borrows `s`, but else branch consumes `s`
+ let _ = if let Some(x) = &s {
+ x.len()
+ } else {
+ let _s = s;
+ 10
+ };
+
+ let mut s = Some(String::new());
+ // Don't lint, `Some` branch mutably borrows `s`, but else branch also borrows `s`
+ let _ = if let Some(x) = &mut s {
+ x.push_str("test");
+ x.len()
+ } else {
+ let _s = &s;
+ 10
+ };
+
+ async fn _f1(x: u32) -> u32 {
+ x
+ }
+
+ async fn _f2() {
+ // Don't lint. `await` can't be moved into a closure.
+ let _ = if let Some(x) = Some(0) { _f1(x).await } else { 0 };
+ }
+
+ let _ = pattern_to_vec("hello world");
+ let _ = complex_subpat();
+}
diff --git a/src/tools/clippy/tests/ui/option_if_let_else.stderr b/src/tools/clippy/tests/ui/option_if_let_else.stderr
new file mode 100644
index 000000000..daba60600
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_if_let_else.stderr
@@ -0,0 +1,210 @@
+error: use Option::map_or instead of an if let/else
+ --> $DIR/option_if_let_else.rs:12:5
+ |
+LL | / if let Some(x) = string {
+LL | | (true, x)
+LL | | } else {
+LL | | (false, "hello")
+LL | | }
+ | |_____^ help: try: `string.map_or((false, "hello"), |x| (true, x))`
+ |
+ = note: `-D clippy::option-if-let-else` implied by `-D warnings`
+
+error: use Option::map_or instead of an if let/else
+ --> $DIR/option_if_let_else.rs:30:13
+ |
+LL | let _ = if let Some(s) = *string { s.len() } else { 0 };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `string.map_or(0, |s| s.len())`
+
+error: use Option::map_or instead of an if let/else
+ --> $DIR/option_if_let_else.rs:31:13
+ |
+LL | let _ = if let Some(s) = &num { s } else { &0 };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `num.as_ref().map_or(&0, |s| s)`
+
+error: use Option::map_or instead of an if let/else
+ --> $DIR/option_if_let_else.rs:32:13
+ |
+LL | let _ = if let Some(s) = &mut num {
+ | _____________^
+LL | | *s += 1;
+LL | | s
+LL | | } else {
+LL | | &mut 0
+LL | | };
+ | |_____^
+ |
+help: try
+ |
+LL ~ let _ = num.as_mut().map_or(&mut 0, |s| {
+LL + *s += 1;
+LL + s
+LL ~ });
+ |
+
+error: use Option::map_or instead of an if let/else
+ --> $DIR/option_if_let_else.rs:38:13
+ |
+LL | let _ = if let Some(ref s) = num { s } else { &0 };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `num.as_ref().map_or(&0, |s| s)`
+
+error: use Option::map_or instead of an if let/else
+ --> $DIR/option_if_let_else.rs:39:13
+ |
+LL | let _ = if let Some(mut s) = num {
+ | _____________^
+LL | | s += 1;
+LL | | s
+LL | | } else {
+LL | | 0
+LL | | };
+ | |_____^
+ |
+help: try
+ |
+LL ~ let _ = num.map_or(0, |mut s| {
+LL + s += 1;
+LL + s
+LL ~ });
+ |
+
+error: use Option::map_or instead of an if let/else
+ --> $DIR/option_if_let_else.rs:45:13
+ |
+LL | let _ = if let Some(ref mut s) = num {
+ | _____________^
+LL | | *s += 1;
+LL | | s
+LL | | } else {
+LL | | &mut 0
+LL | | };
+ | |_____^
+ |
+help: try
+ |
+LL ~ let _ = num.as_mut().map_or(&mut 0, |s| {
+LL + *s += 1;
+LL + s
+LL ~ });
+ |
+
+error: use Option::map_or instead of an if let/else
+ --> $DIR/option_if_let_else.rs:54:5
+ |
+LL | / if let Some(x) = arg {
+LL | | let y = x * x;
+LL | | y * y
+LL | | } else {
+LL | | 13
+LL | | }
+ | |_____^
+ |
+help: try
+ |
+LL ~ arg.map_or(13, |x| {
+LL + let y = x * x;
+LL + y * y
+LL + })
+ |
+
+error: use Option::map_or_else instead of an if let/else
+ --> $DIR/option_if_let_else.rs:67:13
+ |
+LL | let _ = if let Some(x) = arg {
+ | _____________^
+LL | | x
+LL | | } else {
+LL | | // map_or_else must be suggested
+LL | | side_effect()
+LL | | };
+ | |_____^ help: try: `arg.map_or_else(|| side_effect(), |x| x)`
+
+error: use Option::map_or_else instead of an if let/else
+ --> $DIR/option_if_let_else.rs:76:13
+ |
+LL | let _ = if let Some(x) = arg {
+ | _____________^
+LL | | x * x * x * x
+LL | | } else {
+LL | | let mut y = 1;
+... |
+LL | | y
+LL | | };
+ | |_____^
+ |
+help: try
+ |
+LL ~ let _ = arg.map_or_else(|| {
+LL + let mut y = 1;
+LL + y = (y + 2 / y) / 2;
+LL + y = (y + 2 / y) / 2;
+LL + y
+LL ~ }, |x| x * x * x * x);
+ |
+
+error: use Option::map_or_else instead of an if let/else
+ --> $DIR/option_if_let_else.rs:109:13
+ |
+LL | / if let Some(idx) = s.find('.') {
+LL | | vec![s[..idx].to_string(), s[idx..].to_string()]
+LL | | } else {
+LL | | vec![s.to_string()]
+LL | | }
+ | |_____________^ help: try: `s.find('.').map_or_else(|| vec![s.to_string()], |idx| vec![s[..idx].to_string(), s[idx..].to_string()])`
+
+error: use Option::map_or instead of an if let/else
+ --> $DIR/option_if_let_else.rs:133:13
+ |
+LL | let _ = if let Some(x) = optional { x + 2 } else { 5 };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `optional.map_or(5, |x| x + 2)`
+
+error: use Option::map_or instead of an if let/else
+ --> $DIR/option_if_let_else.rs:142:13
+ |
+LL | let _ = if let Some(x) = Some(0) {
+ | _____________^
+LL | | loop {
+LL | | if x == 0 {
+LL | | break x;
+... |
+LL | | 0
+LL | | };
+ | |_____^
+ |
+help: try
+ |
+LL ~ let _ = Some(0).map_or(0, |x| loop {
+LL + if x == 0 {
+LL + break x;
+LL + }
+LL ~ });
+ |
+
+error: use Option::map_or instead of an if let/else
+ --> $DIR/option_if_let_else.rs:170:13
+ |
+LL | let _ = if let Some(x) = Some(0) { s.len() + x } else { s.len() };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Some(0).map_or(s.len(), |x| s.len() + x)`
+
+error: use Option::map_or instead of an if let/else
+ --> $DIR/option_if_let_else.rs:174:13
+ |
+LL | let _ = if let Some(x) = Some(0) {
+ | _____________^
+LL | | let s = s;
+LL | | s.len() + x
+LL | | } else {
+LL | | 1
+LL | | };
+ | |_____^
+ |
+help: try
+ |
+LL ~ let _ = Some(0).map_or(1, |x| {
+LL + let s = s;
+LL + s.len() + x
+LL ~ });
+ |
+
+error: aborting due to 15 previous errors
+
diff --git a/src/tools/clippy/tests/ui/option_map_or_none.fixed b/src/tools/clippy/tests/ui/option_map_or_none.fixed
new file mode 100644
index 000000000..04bfac773
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_map_or_none.fixed
@@ -0,0 +1,26 @@
+// run-rustfix
+
+#![allow(clippy::bind_instead_of_map)]
+
+fn main() {
+ let opt = Some(1);
+ let r: Result<i32, i32> = Ok(1);
+ let bar = |_| Some(1);
+
+ // Check `OPTION_MAP_OR_NONE`.
+ // Single line case.
+ let _: Option<i32> = opt.map(|x| x + 1);
+ // Multi-line case.
+ #[rustfmt::skip]
+ let _: Option<i32> = opt.map(|x| x + 1);
+ // function returning `Option`
+ let _: Option<i32> = opt.and_then(bar);
+ let _: Option<i32> = opt.and_then(|x| {
+ let offset = 0;
+ let height = x;
+ Some(offset + height)
+ });
+
+ // Check `RESULT_MAP_OR_INTO_OPTION`.
+ let _: Option<i32> = r.ok();
+}
diff --git a/src/tools/clippy/tests/ui/option_map_or_none.rs b/src/tools/clippy/tests/ui/option_map_or_none.rs
new file mode 100644
index 000000000..bb84f8a48
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_map_or_none.rs
@@ -0,0 +1,28 @@
+// run-rustfix
+
+#![allow(clippy::bind_instead_of_map)]
+
+fn main() {
+ let opt = Some(1);
+ let r: Result<i32, i32> = Ok(1);
+ let bar = |_| Some(1);
+
+ // Check `OPTION_MAP_OR_NONE`.
+ // Single line case.
+ let _: Option<i32> = opt.map_or(None, |x| Some(x + 1));
+ // Multi-line case.
+ #[rustfmt::skip]
+ let _: Option<i32> = opt.map_or(None, |x| {
+ Some(x + 1)
+ });
+ // function returning `Option`
+ let _: Option<i32> = opt.map_or(None, bar);
+ let _: Option<i32> = opt.map_or(None, |x| {
+ let offset = 0;
+ let height = x;
+ Some(offset + height)
+ });
+
+ // Check `RESULT_MAP_OR_INTO_OPTION`.
+ let _: Option<i32> = r.map_or(None, Some);
+}
diff --git a/src/tools/clippy/tests/ui/option_map_or_none.stderr b/src/tools/clippy/tests/ui/option_map_or_none.stderr
new file mode 100644
index 000000000..7befcb890
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_map_or_none.stderr
@@ -0,0 +1,53 @@
+error: called `map_or(None, ..)` on an `Option` value. This can be done more directly by calling `map(..)` instead
+ --> $DIR/option_map_or_none.rs:12:26
+ |
+LL | let _: Option<i32> = opt.map_or(None, |x| Some(x + 1));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try using `map` instead: `opt.map(|x| x + 1)`
+ |
+ = note: `-D clippy::option-map-or-none` implied by `-D warnings`
+
+error: called `map_or(None, ..)` on an `Option` value. This can be done more directly by calling `map(..)` instead
+ --> $DIR/option_map_or_none.rs:15:26
+ |
+LL | let _: Option<i32> = opt.map_or(None, |x| {
+ | __________________________^
+LL | | Some(x + 1)
+LL | | });
+ | |_________________________^ help: try using `map` instead: `opt.map(|x| x + 1)`
+
+error: called `map_or(None, ..)` on an `Option` value. This can be done more directly by calling `and_then(..)` instead
+ --> $DIR/option_map_or_none.rs:19:26
+ |
+LL | let _: Option<i32> = opt.map_or(None, bar);
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try using `and_then` instead: `opt.and_then(bar)`
+
+error: called `map_or(None, ..)` on an `Option` value. This can be done more directly by calling `and_then(..)` instead
+ --> $DIR/option_map_or_none.rs:20:26
+ |
+LL | let _: Option<i32> = opt.map_or(None, |x| {
+ | __________________________^
+LL | | let offset = 0;
+LL | | let height = x;
+LL | | Some(offset + height)
+LL | | });
+ | |______^
+ |
+help: try using `and_then` instead
+ |
+LL ~ let _: Option<i32> = opt.and_then(|x| {
+LL + let offset = 0;
+LL + let height = x;
+LL + Some(offset + height)
+LL ~ });
+ |
+
+error: called `map_or(None, Some)` on a `Result` value. This can be done more directly by calling `ok()` instead
+ --> $DIR/option_map_or_none.rs:27:26
+ |
+LL | let _: Option<i32> = r.map_or(None, Some);
+ | ^^^^^^^^^^^^^^^^^^^^ help: try using `ok` instead: `r.ok()`
+ |
+ = note: `-D clippy::result-map-or-into-option` implied by `-D warnings`
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/option_map_unit_fn_fixable.fixed b/src/tools/clippy/tests/ui/option_map_unit_fn_fixable.fixed
new file mode 100644
index 000000000..1290bd8ef
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_map_unit_fn_fixable.fixed
@@ -0,0 +1,88 @@
+// run-rustfix
+
+#![warn(clippy::option_map_unit_fn)]
+#![allow(unused)]
+#![allow(clippy::unnecessary_wraps)]
+
+fn do_nothing<T>(_: T) {}
+
+fn diverge<T>(_: T) -> ! {
+ panic!()
+}
+
+fn plus_one(value: usize) -> usize {
+ value + 1
+}
+
+fn option() -> Option<usize> {
+ Some(10)
+}
+
+struct HasOption {
+ field: Option<usize>,
+}
+
+impl HasOption {
+ fn do_option_nothing(&self, value: usize) {}
+
+ fn do_option_plus_one(&self, value: usize) -> usize {
+ value + 1
+ }
+}
+#[rustfmt::skip]
+fn option_map_unit_fn() {
+ let x = HasOption { field: Some(10) };
+
+ x.field.map(plus_one);
+ let _ : Option<()> = x.field.map(do_nothing);
+
+ if let Some(x_field) = x.field { do_nothing(x_field) }
+
+ if let Some(x_field) = x.field { do_nothing(x_field) }
+
+ if let Some(x_field) = x.field { diverge(x_field) }
+
+ let captured = 10;
+ if let Some(value) = x.field { do_nothing(value + captured) };
+ let _ : Option<()> = x.field.map(|value| do_nothing(value + captured));
+
+ if let Some(value) = x.field { x.do_option_nothing(value + captured) }
+
+ if let Some(value) = x.field { x.do_option_plus_one(value + captured); }
+
+
+ if let Some(value) = x.field { do_nothing(value + captured) }
+
+ if let Some(value) = x.field { do_nothing(value + captured) }
+
+ if let Some(value) = x.field { do_nothing(value + captured); }
+
+ if let Some(value) = x.field { do_nothing(value + captured); }
+
+
+ if let Some(value) = x.field { diverge(value + captured) }
+
+ if let Some(value) = x.field { diverge(value + captured) }
+
+ if let Some(value) = x.field { diverge(value + captured); }
+
+ if let Some(value) = x.field { diverge(value + captured); }
+
+
+ x.field.map(|value| plus_one(value + captured));
+ x.field.map(|value| { plus_one(value + captured) });
+ if let Some(value) = x.field { let y = plus_one(value + captured); }
+
+ if let Some(value) = x.field { plus_one(value + captured); }
+
+ if let Some(value) = x.field { plus_one(value + captured); }
+
+
+ if let Some(ref value) = x.field { do_nothing(value + captured) }
+
+ if let Some(a) = option() { do_nothing(a) }
+
+ if let Some(value) = option() { println!("{:?}", value) }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/option_map_unit_fn_fixable.rs b/src/tools/clippy/tests/ui/option_map_unit_fn_fixable.rs
new file mode 100644
index 000000000..f3e5b62c6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_map_unit_fn_fixable.rs
@@ -0,0 +1,88 @@
+// run-rustfix
+
+#![warn(clippy::option_map_unit_fn)]
+#![allow(unused)]
+#![allow(clippy::unnecessary_wraps)]
+
+fn do_nothing<T>(_: T) {}
+
+fn diverge<T>(_: T) -> ! {
+ panic!()
+}
+
+fn plus_one(value: usize) -> usize {
+ value + 1
+}
+
+fn option() -> Option<usize> {
+ Some(10)
+}
+
+struct HasOption {
+ field: Option<usize>,
+}
+
+impl HasOption {
+ fn do_option_nothing(&self, value: usize) {}
+
+ fn do_option_plus_one(&self, value: usize) -> usize {
+ value + 1
+ }
+}
+#[rustfmt::skip]
+fn option_map_unit_fn() {
+ let x = HasOption { field: Some(10) };
+
+ x.field.map(plus_one);
+ let _ : Option<()> = x.field.map(do_nothing);
+
+ x.field.map(do_nothing);
+
+ x.field.map(do_nothing);
+
+ x.field.map(diverge);
+
+ let captured = 10;
+ if let Some(value) = x.field { do_nothing(value + captured) };
+ let _ : Option<()> = x.field.map(|value| do_nothing(value + captured));
+
+ x.field.map(|value| x.do_option_nothing(value + captured));
+
+ x.field.map(|value| { x.do_option_plus_one(value + captured); });
+
+
+ x.field.map(|value| do_nothing(value + captured));
+
+ x.field.map(|value| { do_nothing(value + captured) });
+
+ x.field.map(|value| { do_nothing(value + captured); });
+
+ x.field.map(|value| { { do_nothing(value + captured); } });
+
+
+ x.field.map(|value| diverge(value + captured));
+
+ x.field.map(|value| { diverge(value + captured) });
+
+ x.field.map(|value| { diverge(value + captured); });
+
+ x.field.map(|value| { { diverge(value + captured); } });
+
+
+ x.field.map(|value| plus_one(value + captured));
+ x.field.map(|value| { plus_one(value + captured) });
+ x.field.map(|value| { let y = plus_one(value + captured); });
+
+ x.field.map(|value| { plus_one(value + captured); });
+
+ x.field.map(|value| { { plus_one(value + captured); } });
+
+
+ x.field.map(|ref value| { do_nothing(value + captured) });
+
+ option().map(do_nothing);
+
+ option().map(|value| println!("{:?}", value));
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/option_map_unit_fn_fixable.stderr b/src/tools/clippy/tests/ui/option_map_unit_fn_fixable.stderr
new file mode 100644
index 000000000..ab2a294a0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_map_unit_fn_fixable.stderr
@@ -0,0 +1,156 @@
+error: called `map(f)` on an `Option` value where `f` is a function that returns the unit type `()`
+ --> $DIR/option_map_unit_fn_fixable.rs:39:5
+ |
+LL | x.field.map(do_nothing);
+ | ^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Some(x_field) = x.field { do_nothing(x_field) }`
+ |
+ = note: `-D clippy::option-map-unit-fn` implied by `-D warnings`
+
+error: called `map(f)` on an `Option` value where `f` is a function that returns the unit type `()`
+ --> $DIR/option_map_unit_fn_fixable.rs:41:5
+ |
+LL | x.field.map(do_nothing);
+ | ^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Some(x_field) = x.field { do_nothing(x_field) }`
+
+error: called `map(f)` on an `Option` value where `f` is a function that returns the unit type `()`
+ --> $DIR/option_map_unit_fn_fixable.rs:43:5
+ |
+LL | x.field.map(diverge);
+ | ^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Some(x_field) = x.field { diverge(x_field) }`
+
+error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/option_map_unit_fn_fixable.rs:49:5
+ |
+LL | x.field.map(|value| x.do_option_nothing(value + captured));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Some(value) = x.field { x.do_option_nothing(value + captured) }`
+
+error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/option_map_unit_fn_fixable.rs:51:5
+ |
+LL | x.field.map(|value| { x.do_option_plus_one(value + captured); });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Some(value) = x.field { x.do_option_plus_one(value + captured); }`
+
+error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/option_map_unit_fn_fixable.rs:54:5
+ |
+LL | x.field.map(|value| do_nothing(value + captured));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Some(value) = x.field { do_nothing(value + captured) }`
+
+error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/option_map_unit_fn_fixable.rs:56:5
+ |
+LL | x.field.map(|value| { do_nothing(value + captured) });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Some(value) = x.field { do_nothing(value + captured) }`
+
+error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/option_map_unit_fn_fixable.rs:58:5
+ |
+LL | x.field.map(|value| { do_nothing(value + captured); });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Some(value) = x.field { do_nothing(value + captured); }`
+
+error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/option_map_unit_fn_fixable.rs:60:5
+ |
+LL | x.field.map(|value| { { do_nothing(value + captured); } });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Some(value) = x.field { do_nothing(value + captured); }`
+
+error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/option_map_unit_fn_fixable.rs:63:5
+ |
+LL | x.field.map(|value| diverge(value + captured));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Some(value) = x.field { diverge(value + captured) }`
+
+error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/option_map_unit_fn_fixable.rs:65:5
+ |
+LL | x.field.map(|value| { diverge(value + captured) });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Some(value) = x.field { diverge(value + captured) }`
+
+error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/option_map_unit_fn_fixable.rs:67:5
+ |
+LL | x.field.map(|value| { diverge(value + captured); });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Some(value) = x.field { diverge(value + captured); }`
+
+error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/option_map_unit_fn_fixable.rs:69:5
+ |
+LL | x.field.map(|value| { { diverge(value + captured); } });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Some(value) = x.field { diverge(value + captured); }`
+
+error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/option_map_unit_fn_fixable.rs:74:5
+ |
+LL | x.field.map(|value| { let y = plus_one(value + captured); });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Some(value) = x.field { let y = plus_one(value + captured); }`
+
+error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/option_map_unit_fn_fixable.rs:76:5
+ |
+LL | x.field.map(|value| { plus_one(value + captured); });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Some(value) = x.field { plus_one(value + captured); }`
+
+error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/option_map_unit_fn_fixable.rs:78:5
+ |
+LL | x.field.map(|value| { { plus_one(value + captured); } });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Some(value) = x.field { plus_one(value + captured); }`
+
+error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/option_map_unit_fn_fixable.rs:81:5
+ |
+LL | x.field.map(|ref value| { do_nothing(value + captured) });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Some(ref value) = x.field { do_nothing(value + captured) }`
+
+error: called `map(f)` on an `Option` value where `f` is a function that returns the unit type `()`
+ --> $DIR/option_map_unit_fn_fixable.rs:83:5
+ |
+LL | option().map(do_nothing);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Some(a) = option() { do_nothing(a) }`
+
+error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/option_map_unit_fn_fixable.rs:85:5
+ |
+LL | option().map(|value| println!("{:?}", value));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Some(value) = option() { println!("{:?}", value) }`
+
+error: aborting due to 19 previous errors
+
diff --git a/src/tools/clippy/tests/ui/option_map_unit_fn_unfixable.rs b/src/tools/clippy/tests/ui/option_map_unit_fn_unfixable.rs
new file mode 100644
index 000000000..20e6c15b1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_map_unit_fn_unfixable.rs
@@ -0,0 +1,39 @@
+#![warn(clippy::option_map_unit_fn)]
+#![allow(unused)]
+
+fn do_nothing<T>(_: T) {}
+
+fn diverge<T>(_: T) -> ! {
+ panic!()
+}
+
+fn plus_one(value: usize) -> usize {
+ value + 1
+}
+
+#[rustfmt::skip]
+fn option_map_unit_fn() {
+
+ x.field.map(|value| { do_nothing(value); do_nothing(value) });
+
+ x.field.map(|value| if value > 0 { do_nothing(value); do_nothing(value) });
+
+ // Suggestion for the let block should be `{ ... }` as it's too difficult to build a
+ // proper suggestion for these cases
+ x.field.map(|value| {
+ do_nothing(value);
+ do_nothing(value)
+ });
+ x.field.map(|value| { do_nothing(value); do_nothing(value); });
+
+ // The following should suggest `if let Some(_X) ...` as it's difficult to generate a proper let variable name for them
+ Some(42).map(diverge);
+ "12".parse::<i32>().ok().map(diverge);
+ Some(plus_one(1)).map(do_nothing);
+
+ // Should suggest `if let Some(_y) ...` to not override the existing foo variable
+ let y = Some(42);
+ y.map(do_nothing);
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/option_map_unit_fn_unfixable.stderr b/src/tools/clippy/tests/ui/option_map_unit_fn_unfixable.stderr
new file mode 100644
index 000000000..a53f5889c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_map_unit_fn_unfixable.stderr
@@ -0,0 +1,27 @@
+error[E0425]: cannot find value `x` in this scope
+ --> $DIR/option_map_unit_fn_unfixable.rs:17:5
+ |
+LL | x.field.map(|value| { do_nothing(value); do_nothing(value) });
+ | ^ not found in this scope
+
+error[E0425]: cannot find value `x` in this scope
+ --> $DIR/option_map_unit_fn_unfixable.rs:19:5
+ |
+LL | x.field.map(|value| if value > 0 { do_nothing(value); do_nothing(value) });
+ | ^ not found in this scope
+
+error[E0425]: cannot find value `x` in this scope
+ --> $DIR/option_map_unit_fn_unfixable.rs:23:5
+ |
+LL | x.field.map(|value| {
+ | ^ not found in this scope
+
+error[E0425]: cannot find value `x` in this scope
+ --> $DIR/option_map_unit_fn_unfixable.rs:27:5
+ |
+LL | x.field.map(|value| { do_nothing(value); do_nothing(value); });
+ | ^ not found in this scope
+
+error: aborting due to 4 previous errors
+
+For more information about this error, try `rustc --explain E0425`.
diff --git a/src/tools/clippy/tests/ui/option_option.rs b/src/tools/clippy/tests/ui/option_option.rs
new file mode 100644
index 000000000..2faab9e03
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_option.rs
@@ -0,0 +1,89 @@
+#![deny(clippy::option_option)]
+#![allow(clippy::unnecessary_wraps)]
+
+const C: Option<Option<i32>> = None;
+static S: Option<Option<i32>> = None;
+
+fn input(_: Option<Option<u8>>) {}
+
+fn output() -> Option<Option<u8>> {
+ None
+}
+
+fn output_nested() -> Vec<Option<Option<u8>>> {
+ vec![None]
+}
+
+// The lint only generates one warning for this
+fn output_nested_nested() -> Option<Option<Option<u8>>> {
+ None
+}
+
+struct Struct {
+ x: Option<Option<u8>>,
+}
+
+impl Struct {
+ fn struct_fn() -> Option<Option<u8>> {
+ None
+ }
+}
+
+trait Trait {
+ fn trait_fn() -> Option<Option<u8>>;
+}
+
+enum Enum {
+ Tuple(Option<Option<u8>>),
+ Struct { x: Option<Option<u8>> },
+}
+
+// The lint allows this
+type OptionOption = Option<Option<u32>>;
+
+// The lint allows this
+fn output_type_alias() -> OptionOption {
+ None
+}
+
+// The line allows this
+impl Trait for Struct {
+ fn trait_fn() -> Option<Option<u8>> {
+ None
+ }
+}
+
+fn main() {
+ input(None);
+ output();
+ output_nested();
+
+ // The lint allows this
+ let local: Option<Option<u8>> = None;
+
+ // The lint allows this
+ let expr = Some(Some(true));
+}
+
+extern crate serde;
+mod issue_4298 {
+ use serde::{Deserialize, Deserializer, Serialize};
+ use std::borrow::Cow;
+
+ #[derive(Serialize, Deserialize)]
+ struct Foo<'a> {
+ #[serde(deserialize_with = "func")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ #[serde(default)]
+ #[serde(borrow)]
+ foo: Option<Option<Cow<'a, str>>>,
+ }
+
+ #[allow(clippy::option_option)]
+ fn func<'a, D>(_: D) -> Result<Option<Option<Cow<'a, str>>>, D::Error>
+ where
+ D: Deserializer<'a>,
+ {
+ Ok(Some(Some(Cow::Borrowed("hi"))))
+ }
+}
diff --git a/src/tools/clippy/tests/ui/option_option.stderr b/src/tools/clippy/tests/ui/option_option.stderr
new file mode 100644
index 000000000..a925bb35b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_option.stderr
@@ -0,0 +1,80 @@
+error: consider using `Option<T>` instead of `Option<Option<T>>` or a custom enum if you need to distinguish all 3 cases
+ --> $DIR/option_option.rs:4:10
+ |
+LL | const C: Option<Option<i32>> = None;
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/option_option.rs:1:9
+ |
+LL | #![deny(clippy::option_option)]
+ | ^^^^^^^^^^^^^^^^^^^^^
+
+error: consider using `Option<T>` instead of `Option<Option<T>>` or a custom enum if you need to distinguish all 3 cases
+ --> $DIR/option_option.rs:5:11
+ |
+LL | static S: Option<Option<i32>> = None;
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: consider using `Option<T>` instead of `Option<Option<T>>` or a custom enum if you need to distinguish all 3 cases
+ --> $DIR/option_option.rs:7:13
+ |
+LL | fn input(_: Option<Option<u8>>) {}
+ | ^^^^^^^^^^^^^^^^^^
+
+error: consider using `Option<T>` instead of `Option<Option<T>>` or a custom enum if you need to distinguish all 3 cases
+ --> $DIR/option_option.rs:9:16
+ |
+LL | fn output() -> Option<Option<u8>> {
+ | ^^^^^^^^^^^^^^^^^^
+
+error: consider using `Option<T>` instead of `Option<Option<T>>` or a custom enum if you need to distinguish all 3 cases
+ --> $DIR/option_option.rs:13:27
+ |
+LL | fn output_nested() -> Vec<Option<Option<u8>>> {
+ | ^^^^^^^^^^^^^^^^^^
+
+error: consider using `Option<T>` instead of `Option<Option<T>>` or a custom enum if you need to distinguish all 3 cases
+ --> $DIR/option_option.rs:18:30
+ |
+LL | fn output_nested_nested() -> Option<Option<Option<u8>>> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: consider using `Option<T>` instead of `Option<Option<T>>` or a custom enum if you need to distinguish all 3 cases
+ --> $DIR/option_option.rs:23:8
+ |
+LL | x: Option<Option<u8>>,
+ | ^^^^^^^^^^^^^^^^^^
+
+error: consider using `Option<T>` instead of `Option<Option<T>>` or a custom enum if you need to distinguish all 3 cases
+ --> $DIR/option_option.rs:27:23
+ |
+LL | fn struct_fn() -> Option<Option<u8>> {
+ | ^^^^^^^^^^^^^^^^^^
+
+error: consider using `Option<T>` instead of `Option<Option<T>>` or a custom enum if you need to distinguish all 3 cases
+ --> $DIR/option_option.rs:33:22
+ |
+LL | fn trait_fn() -> Option<Option<u8>>;
+ | ^^^^^^^^^^^^^^^^^^
+
+error: consider using `Option<T>` instead of `Option<Option<T>>` or a custom enum if you need to distinguish all 3 cases
+ --> $DIR/option_option.rs:37:11
+ |
+LL | Tuple(Option<Option<u8>>),
+ | ^^^^^^^^^^^^^^^^^^
+
+error: consider using `Option<T>` instead of `Option<Option<T>>` or a custom enum if you need to distinguish all 3 cases
+ --> $DIR/option_option.rs:38:17
+ |
+LL | Struct { x: Option<Option<u8>> },
+ | ^^^^^^^^^^^^^^^^^^
+
+error: consider using `Option<T>` instead of `Option<Option<T>>` or a custom enum if you need to distinguish all 3 cases
+ --> $DIR/option_option.rs:79:14
+ |
+LL | foo: Option<Option<Cow<'a, str>>>,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 12 previous errors
+
diff --git a/src/tools/clippy/tests/ui/option_take_on_temporary.fixed b/src/tools/clippy/tests/ui/option_take_on_temporary.fixed
new file mode 100644
index 000000000..29691e816
--- /dev/null
+++ b/src/tools/clippy/tests/ui/option_take_on_temporary.fixed
@@ -0,0 +1,15 @@
+// run-rustfix
+
+fn main() {
+ println!("Testing non erroneous option_take_on_temporary");
+ let mut option = Some(1);
+ let _ = Box::new(move || option.take().unwrap());
+
+ println!("Testing non erroneous option_take_on_temporary");
+ let x = Some(3);
+ x.as_ref();
+
+ println!("Testing erroneous option_take_on_temporary");
+ let x = Some(3);
+ x.as_ref();
+}
diff --git a/src/tools/clippy/tests/ui/or_fun_call.fixed b/src/tools/clippy/tests/ui/or_fun_call.fixed
new file mode 100644
index 000000000..fdb08d953
--- /dev/null
+++ b/src/tools/clippy/tests/ui/or_fun_call.fixed
@@ -0,0 +1,229 @@
+// run-rustfix
+
+#![warn(clippy::or_fun_call)]
+#![allow(dead_code)]
+#![allow(clippy::unnecessary_wraps, clippy::borrow_as_ptr)]
+
+use std::collections::BTreeMap;
+use std::collections::HashMap;
+use std::time::Duration;
+
+/// Checks implementation of the `OR_FUN_CALL` lint.
+fn or_fun_call() {
+ struct Foo;
+
+ impl Foo {
+ fn new() -> Foo {
+ Foo
+ }
+ }
+
+ struct FakeDefault;
+ impl FakeDefault {
+ fn default() -> Self {
+ FakeDefault
+ }
+ }
+
+ impl Default for FakeDefault {
+ fn default() -> Self {
+ FakeDefault
+ }
+ }
+
+ enum Enum {
+ A(i32),
+ }
+
+ fn make<T>() -> T {
+ unimplemented!();
+ }
+
+ let with_enum = Some(Enum::A(1));
+ with_enum.unwrap_or(Enum::A(5));
+
+ let with_const_fn = Some(Duration::from_secs(1));
+ with_const_fn.unwrap_or(Duration::from_secs(5));
+
+ let with_constructor = Some(vec![1]);
+ with_constructor.unwrap_or_else(make);
+
+ let with_new = Some(vec![1]);
+ with_new.unwrap_or_default();
+
+ let with_const_args = Some(vec![1]);
+ with_const_args.unwrap_or_else(|| Vec::with_capacity(12));
+
+ let with_err: Result<_, ()> = Ok(vec![1]);
+ with_err.unwrap_or_else(|_| make());
+
+ let with_err_args: Result<_, ()> = Ok(vec![1]);
+ with_err_args.unwrap_or_else(|_| Vec::with_capacity(12));
+
+ let with_default_trait = Some(1);
+ with_default_trait.unwrap_or_default();
+
+ let with_default_type = Some(1);
+ with_default_type.unwrap_or_default();
+
+ let self_default = None::<FakeDefault>;
+ self_default.unwrap_or_else(<FakeDefault>::default);
+
+ let real_default = None::<FakeDefault>;
+ real_default.unwrap_or_default();
+
+ let with_vec = Some(vec![1]);
+ with_vec.unwrap_or_default();
+
+ let without_default = Some(Foo);
+ without_default.unwrap_or_else(Foo::new);
+
+ let mut map = HashMap::<u64, String>::new();
+ map.entry(42).or_insert(String::new());
+
+ let mut map_vec = HashMap::<u64, Vec<i32>>::new();
+ map_vec.entry(42).or_insert(vec![]);
+
+ let mut btree = BTreeMap::<u64, String>::new();
+ btree.entry(42).or_insert(String::new());
+
+ let mut btree_vec = BTreeMap::<u64, Vec<i32>>::new();
+ btree_vec.entry(42).or_insert(vec![]);
+
+ let stringy = Some(String::from(""));
+ let _ = stringy.unwrap_or_else(|| "".to_owned());
+
+ let opt = Some(1);
+ let hello = "Hello";
+ let _ = opt.ok_or(format!("{} world.", hello));
+
+ // index
+ let map = HashMap::<u64, u64>::new();
+ let _ = Some(1).unwrap_or_else(|| map[&1]);
+ let map = BTreeMap::<u64, u64>::new();
+ let _ = Some(1).unwrap_or_else(|| map[&1]);
+ // don't lint index vec
+ let vec = vec![1];
+ let _ = Some(1).unwrap_or(vec[1]);
+}
+
+struct Foo(u8);
+struct Bar(String, Duration);
+#[rustfmt::skip]
+fn test_or_with_ctors() {
+ let opt = Some(1);
+ let opt_opt = Some(Some(1));
+ // we also test for const promotion, this makes sure we don't hit that
+ let two = 2;
+
+ let _ = opt_opt.unwrap_or(Some(2));
+ let _ = opt_opt.unwrap_or(Some(two));
+ let _ = opt.ok_or(Some(2));
+ let _ = opt.ok_or(Some(two));
+ let _ = opt.ok_or(Foo(2));
+ let _ = opt.ok_or(Foo(two));
+ let _ = opt.or(Some(2));
+ let _ = opt.or(Some(two));
+
+ let _ = Some("a".to_string()).or_else(|| Some("b".to_string()));
+
+ let b = "b".to_string();
+ let _ = Some(Bar("a".to_string(), Duration::from_secs(1)))
+ .or(Some(Bar(b, Duration::from_secs(2))));
+
+ let vec = vec!["foo"];
+ let _ = opt.ok_or(vec.len());
+
+ let array = ["foo"];
+ let _ = opt.ok_or(array.len());
+
+ let slice = &["foo"][..];
+ let _ = opt.ok_or(slice.len());
+
+ let string = "foo";
+ let _ = opt.ok_or(string.len());
+}
+
+// Issue 4514 - early return
+fn f() -> Option<()> {
+ let a = Some(1);
+ let b = 1i32;
+
+ let _ = a.unwrap_or(b.checked_mul(3)?.min(240));
+
+ Some(())
+}
+
+mod issue6675 {
+ unsafe fn ptr_to_ref<'a, T>(p: *const T) -> &'a T {
+ #[allow(unused)]
+ let x = vec![0; 1000]; // future-proofing, make this function expensive.
+ &*p
+ }
+
+ unsafe fn foo() {
+ let s = "test".to_owned();
+ let s = &s as *const _;
+ None.unwrap_or_else(|| ptr_to_ref(s));
+ }
+
+ fn bar() {
+ let s = "test".to_owned();
+ let s = &s as *const _;
+ None.unwrap_or_else(|| unsafe { ptr_to_ref(s) });
+ #[rustfmt::skip]
+ None.unwrap_or_else(|| unsafe { ptr_to_ref(s) });
+ }
+}
+
+mod issue8239 {
+ fn more_than_max_suggestion_highest_lines_0() {
+ let frames = Vec::new();
+ frames
+ .iter()
+ .map(|f: &String| f.to_lowercase())
+ .reduce(|mut acc, f| {
+ acc.push_str(&f);
+ acc
+ })
+ .unwrap_or_default();
+ }
+
+ fn more_to_max_suggestion_highest_lines_1() {
+ let frames = Vec::new();
+ let iter = frames.iter();
+ iter.map(|f: &String| f.to_lowercase())
+ .reduce(|mut acc, f| {
+ let _ = "";
+ let _ = "";
+ acc.push_str(&f);
+ acc
+ })
+ .unwrap_or_default();
+ }
+
+ fn equal_to_max_suggestion_highest_lines() {
+ let frames = Vec::new();
+ let iter = frames.iter();
+ iter.map(|f: &String| f.to_lowercase())
+ .reduce(|mut acc, f| {
+ let _ = "";
+ acc.push_str(&f);
+ acc
+ })
+ .unwrap_or_default();
+ }
+
+ fn less_than_max_suggestion_highest_lines() {
+ let frames = Vec::new();
+ let iter = frames.iter();
+ let map = iter.map(|f: &String| f.to_lowercase());
+ map.reduce(|mut acc, f| {
+ acc.push_str(&f);
+ acc
+ })
+ .unwrap_or_default();
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/or_fun_call.rs b/src/tools/clippy/tests/ui/or_fun_call.rs
new file mode 100644
index 000000000..57ab5f03e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/or_fun_call.rs
@@ -0,0 +1,229 @@
+// run-rustfix
+
+#![warn(clippy::or_fun_call)]
+#![allow(dead_code)]
+#![allow(clippy::unnecessary_wraps, clippy::borrow_as_ptr)]
+
+use std::collections::BTreeMap;
+use std::collections::HashMap;
+use std::time::Duration;
+
+/// Checks implementation of the `OR_FUN_CALL` lint.
+fn or_fun_call() {
+ struct Foo;
+
+ impl Foo {
+ fn new() -> Foo {
+ Foo
+ }
+ }
+
+ struct FakeDefault;
+ impl FakeDefault {
+ fn default() -> Self {
+ FakeDefault
+ }
+ }
+
+ impl Default for FakeDefault {
+ fn default() -> Self {
+ FakeDefault
+ }
+ }
+
+ enum Enum {
+ A(i32),
+ }
+
+ fn make<T>() -> T {
+ unimplemented!();
+ }
+
+ let with_enum = Some(Enum::A(1));
+ with_enum.unwrap_or(Enum::A(5));
+
+ let with_const_fn = Some(Duration::from_secs(1));
+ with_const_fn.unwrap_or(Duration::from_secs(5));
+
+ let with_constructor = Some(vec![1]);
+ with_constructor.unwrap_or(make());
+
+ let with_new = Some(vec![1]);
+ with_new.unwrap_or(Vec::new());
+
+ let with_const_args = Some(vec![1]);
+ with_const_args.unwrap_or(Vec::with_capacity(12));
+
+ let with_err: Result<_, ()> = Ok(vec![1]);
+ with_err.unwrap_or(make());
+
+ let with_err_args: Result<_, ()> = Ok(vec![1]);
+ with_err_args.unwrap_or(Vec::with_capacity(12));
+
+ let with_default_trait = Some(1);
+ with_default_trait.unwrap_or(Default::default());
+
+ let with_default_type = Some(1);
+ with_default_type.unwrap_or(u64::default());
+
+ let self_default = None::<FakeDefault>;
+ self_default.unwrap_or(<FakeDefault>::default());
+
+ let real_default = None::<FakeDefault>;
+ real_default.unwrap_or(<FakeDefault as Default>::default());
+
+ let with_vec = Some(vec![1]);
+ with_vec.unwrap_or(vec![]);
+
+ let without_default = Some(Foo);
+ without_default.unwrap_or(Foo::new());
+
+ let mut map = HashMap::<u64, String>::new();
+ map.entry(42).or_insert(String::new());
+
+ let mut map_vec = HashMap::<u64, Vec<i32>>::new();
+ map_vec.entry(42).or_insert(vec![]);
+
+ let mut btree = BTreeMap::<u64, String>::new();
+ btree.entry(42).or_insert(String::new());
+
+ let mut btree_vec = BTreeMap::<u64, Vec<i32>>::new();
+ btree_vec.entry(42).or_insert(vec![]);
+
+ let stringy = Some(String::from(""));
+ let _ = stringy.unwrap_or("".to_owned());
+
+ let opt = Some(1);
+ let hello = "Hello";
+ let _ = opt.ok_or(format!("{} world.", hello));
+
+ // index
+ let map = HashMap::<u64, u64>::new();
+ let _ = Some(1).unwrap_or(map[&1]);
+ let map = BTreeMap::<u64, u64>::new();
+ let _ = Some(1).unwrap_or(map[&1]);
+ // don't lint index vec
+ let vec = vec![1];
+ let _ = Some(1).unwrap_or(vec[1]);
+}
+
+struct Foo(u8);
+struct Bar(String, Duration);
+#[rustfmt::skip]
+fn test_or_with_ctors() {
+ let opt = Some(1);
+ let opt_opt = Some(Some(1));
+ // we also test for const promotion, this makes sure we don't hit that
+ let two = 2;
+
+ let _ = opt_opt.unwrap_or(Some(2));
+ let _ = opt_opt.unwrap_or(Some(two));
+ let _ = opt.ok_or(Some(2));
+ let _ = opt.ok_or(Some(two));
+ let _ = opt.ok_or(Foo(2));
+ let _ = opt.ok_or(Foo(two));
+ let _ = opt.or(Some(2));
+ let _ = opt.or(Some(two));
+
+ let _ = Some("a".to_string()).or(Some("b".to_string()));
+
+ let b = "b".to_string();
+ let _ = Some(Bar("a".to_string(), Duration::from_secs(1)))
+ .or(Some(Bar(b, Duration::from_secs(2))));
+
+ let vec = vec!["foo"];
+ let _ = opt.ok_or(vec.len());
+
+ let array = ["foo"];
+ let _ = opt.ok_or(array.len());
+
+ let slice = &["foo"][..];
+ let _ = opt.ok_or(slice.len());
+
+ let string = "foo";
+ let _ = opt.ok_or(string.len());
+}
+
+// Issue 4514 - early return
+fn f() -> Option<()> {
+ let a = Some(1);
+ let b = 1i32;
+
+ let _ = a.unwrap_or(b.checked_mul(3)?.min(240));
+
+ Some(())
+}
+
+mod issue6675 {
+ unsafe fn ptr_to_ref<'a, T>(p: *const T) -> &'a T {
+ #[allow(unused)]
+ let x = vec![0; 1000]; // future-proofing, make this function expensive.
+ &*p
+ }
+
+ unsafe fn foo() {
+ let s = "test".to_owned();
+ let s = &s as *const _;
+ None.unwrap_or(ptr_to_ref(s));
+ }
+
+ fn bar() {
+ let s = "test".to_owned();
+ let s = &s as *const _;
+ None.unwrap_or(unsafe { ptr_to_ref(s) });
+ #[rustfmt::skip]
+ None.unwrap_or( unsafe { ptr_to_ref(s) } );
+ }
+}
+
+mod issue8239 {
+ fn more_than_max_suggestion_highest_lines_0() {
+ let frames = Vec::new();
+ frames
+ .iter()
+ .map(|f: &String| f.to_lowercase())
+ .reduce(|mut acc, f| {
+ acc.push_str(&f);
+ acc
+ })
+ .unwrap_or(String::new());
+ }
+
+ fn more_to_max_suggestion_highest_lines_1() {
+ let frames = Vec::new();
+ let iter = frames.iter();
+ iter.map(|f: &String| f.to_lowercase())
+ .reduce(|mut acc, f| {
+ let _ = "";
+ let _ = "";
+ acc.push_str(&f);
+ acc
+ })
+ .unwrap_or(String::new());
+ }
+
+ fn equal_to_max_suggestion_highest_lines() {
+ let frames = Vec::new();
+ let iter = frames.iter();
+ iter.map(|f: &String| f.to_lowercase())
+ .reduce(|mut acc, f| {
+ let _ = "";
+ acc.push_str(&f);
+ acc
+ })
+ .unwrap_or(String::new());
+ }
+
+ fn less_than_max_suggestion_highest_lines() {
+ let frames = Vec::new();
+ let iter = frames.iter();
+ let map = iter.map(|f: &String| f.to_lowercase());
+ map.reduce(|mut acc, f| {
+ acc.push_str(&f);
+ acc
+ })
+ .unwrap_or(String::new());
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/or_fun_call.stderr b/src/tools/clippy/tests/ui/or_fun_call.stderr
new file mode 100644
index 000000000..4c5938ab8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/or_fun_call.stderr
@@ -0,0 +1,136 @@
+error: use of `unwrap_or` followed by a function call
+ --> $DIR/or_fun_call.rs:49:22
+ |
+LL | with_constructor.unwrap_or(make());
+ | ^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(make)`
+ |
+ = note: `-D clippy::or-fun-call` implied by `-D warnings`
+
+error: use of `unwrap_or` followed by a call to `new`
+ --> $DIR/or_fun_call.rs:52:14
+ |
+LL | with_new.unwrap_or(Vec::new());
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()`
+
+error: use of `unwrap_or` followed by a function call
+ --> $DIR/or_fun_call.rs:55:21
+ |
+LL | with_const_args.unwrap_or(Vec::with_capacity(12));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| Vec::with_capacity(12))`
+
+error: use of `unwrap_or` followed by a function call
+ --> $DIR/or_fun_call.rs:58:14
+ |
+LL | with_err.unwrap_or(make());
+ | ^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|_| make())`
+
+error: use of `unwrap_or` followed by a function call
+ --> $DIR/or_fun_call.rs:61:19
+ |
+LL | with_err_args.unwrap_or(Vec::with_capacity(12));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|_| Vec::with_capacity(12))`
+
+error: use of `unwrap_or` followed by a call to `default`
+ --> $DIR/or_fun_call.rs:64:24
+ |
+LL | with_default_trait.unwrap_or(Default::default());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()`
+
+error: use of `unwrap_or` followed by a call to `default`
+ --> $DIR/or_fun_call.rs:67:23
+ |
+LL | with_default_type.unwrap_or(u64::default());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()`
+
+error: use of `unwrap_or` followed by a function call
+ --> $DIR/or_fun_call.rs:70:18
+ |
+LL | self_default.unwrap_or(<FakeDefault>::default());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(<FakeDefault>::default)`
+
+error: use of `unwrap_or` followed by a call to `default`
+ --> $DIR/or_fun_call.rs:73:18
+ |
+LL | real_default.unwrap_or(<FakeDefault as Default>::default());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()`
+
+error: use of `unwrap_or` followed by a call to `new`
+ --> $DIR/or_fun_call.rs:76:14
+ |
+LL | with_vec.unwrap_or(vec![]);
+ | ^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()`
+
+error: use of `unwrap_or` followed by a function call
+ --> $DIR/or_fun_call.rs:79:21
+ |
+LL | without_default.unwrap_or(Foo::new());
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(Foo::new)`
+
+error: use of `unwrap_or` followed by a function call
+ --> $DIR/or_fun_call.rs:94:21
+ |
+LL | let _ = stringy.unwrap_or("".to_owned());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| "".to_owned())`
+
+error: use of `unwrap_or` followed by a function call
+ --> $DIR/or_fun_call.rs:102:21
+ |
+LL | let _ = Some(1).unwrap_or(map[&1]);
+ | ^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| map[&1])`
+
+error: use of `unwrap_or` followed by a function call
+ --> $DIR/or_fun_call.rs:104:21
+ |
+LL | let _ = Some(1).unwrap_or(map[&1]);
+ | ^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| map[&1])`
+
+error: use of `or` followed by a function call
+ --> $DIR/or_fun_call.rs:128:35
+ |
+LL | let _ = Some("a".to_string()).or(Some("b".to_string()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `or_else(|| Some("b".to_string()))`
+
+error: use of `unwrap_or` followed by a function call
+ --> $DIR/or_fun_call.rs:167:14
+ |
+LL | None.unwrap_or(ptr_to_ref(s));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| ptr_to_ref(s))`
+
+error: use of `unwrap_or` followed by a function call
+ --> $DIR/or_fun_call.rs:173:14
+ |
+LL | None.unwrap_or(unsafe { ptr_to_ref(s) });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| unsafe { ptr_to_ref(s) })`
+
+error: use of `unwrap_or` followed by a function call
+ --> $DIR/or_fun_call.rs:175:14
+ |
+LL | None.unwrap_or( unsafe { ptr_to_ref(s) } );
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| unsafe { ptr_to_ref(s) })`
+
+error: use of `unwrap_or` followed by a call to `new`
+ --> $DIR/or_fun_call.rs:189:14
+ |
+LL | .unwrap_or(String::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()`
+
+error: use of `unwrap_or` followed by a call to `new`
+ --> $DIR/or_fun_call.rs:202:14
+ |
+LL | .unwrap_or(String::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()`
+
+error: use of `unwrap_or` followed by a call to `new`
+ --> $DIR/or_fun_call.rs:214:14
+ |
+LL | .unwrap_or(String::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()`
+
+error: use of `unwrap_or` followed by a call to `new`
+ --> $DIR/or_fun_call.rs:225:10
+ |
+LL | .unwrap_or(String::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()`
+
+error: aborting due to 22 previous errors
+
diff --git a/src/tools/clippy/tests/ui/or_then_unwrap.fixed b/src/tools/clippy/tests/ui/or_then_unwrap.fixed
new file mode 100644
index 000000000..844cc4b7a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/or_then_unwrap.fixed
@@ -0,0 +1,52 @@
+// run-rustfix
+
+#![warn(clippy::or_then_unwrap)]
+#![allow(clippy::map_identity, clippy::let_unit_value)]
+
+struct SomeStruct;
+impl SomeStruct {
+ fn or(self, _: Option<Self>) -> Self {
+ self
+ }
+ fn unwrap(&self) {}
+}
+
+struct SomeOtherStruct;
+impl SomeOtherStruct {
+ fn or(self) -> Self {
+ self
+ }
+ fn unwrap(&self) {}
+}
+
+fn main() {
+ let option: Option<&str> = None;
+ let _ = option.unwrap_or("fallback"); // should trigger lint
+
+ let result: Result<&str, &str> = Err("Error");
+ let _ = result.unwrap_or("fallback"); // should trigger lint
+
+ // as part of a method chain
+ let option: Option<&str> = None;
+ let _ = option.map(|v| v).unwrap_or("fallback").to_string().chars(); // should trigger lint
+
+ // Not Option/Result
+ let instance = SomeStruct {};
+ let _ = instance.or(Some(SomeStruct {})).unwrap(); // should not trigger lint
+
+ // or takes no argument
+ let instance = SomeOtherStruct {};
+ let _ = instance.or().unwrap(); // should not trigger lint and should not panic
+
+ // None in or
+ let option: Option<&str> = None;
+ let _ = option.or(None).unwrap(); // should not trigger lint
+
+ // Not Err in or
+ let result: Result<&str, &str> = Err("Error");
+ let _ = result.or::<&str>(Err("Other Error")).unwrap(); // should not trigger lint
+
+ // other function between
+ let option: Option<&str> = None;
+ let _ = option.or(Some("fallback")).map(|v| v).unwrap(); // should not trigger lint
+}
diff --git a/src/tools/clippy/tests/ui/or_then_unwrap.rs b/src/tools/clippy/tests/ui/or_then_unwrap.rs
new file mode 100644
index 000000000..1528ef9be
--- /dev/null
+++ b/src/tools/clippy/tests/ui/or_then_unwrap.rs
@@ -0,0 +1,52 @@
+// run-rustfix
+
+#![warn(clippy::or_then_unwrap)]
+#![allow(clippy::map_identity, clippy::let_unit_value)]
+
+struct SomeStruct;
+impl SomeStruct {
+ fn or(self, _: Option<Self>) -> Self {
+ self
+ }
+ fn unwrap(&self) {}
+}
+
+struct SomeOtherStruct;
+impl SomeOtherStruct {
+ fn or(self) -> Self {
+ self
+ }
+ fn unwrap(&self) {}
+}
+
+fn main() {
+ let option: Option<&str> = None;
+ let _ = option.or(Some("fallback")).unwrap(); // should trigger lint
+
+ let result: Result<&str, &str> = Err("Error");
+ let _ = result.or::<&str>(Ok("fallback")).unwrap(); // should trigger lint
+
+ // as part of a method chain
+ let option: Option<&str> = None;
+ let _ = option.map(|v| v).or(Some("fallback")).unwrap().to_string().chars(); // should trigger lint
+
+ // Not Option/Result
+ let instance = SomeStruct {};
+ let _ = instance.or(Some(SomeStruct {})).unwrap(); // should not trigger lint
+
+ // or takes no argument
+ let instance = SomeOtherStruct {};
+ let _ = instance.or().unwrap(); // should not trigger lint and should not panic
+
+ // None in or
+ let option: Option<&str> = None;
+ let _ = option.or(None).unwrap(); // should not trigger lint
+
+ // Not Err in or
+ let result: Result<&str, &str> = Err("Error");
+ let _ = result.or::<&str>(Err("Other Error")).unwrap(); // should not trigger lint
+
+ // other function between
+ let option: Option<&str> = None;
+ let _ = option.or(Some("fallback")).map(|v| v).unwrap(); // should not trigger lint
+}
diff --git a/src/tools/clippy/tests/ui/or_then_unwrap.stderr b/src/tools/clippy/tests/ui/or_then_unwrap.stderr
new file mode 100644
index 000000000..da88154c5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/or_then_unwrap.stderr
@@ -0,0 +1,22 @@
+error: found `.or(Some(…)).unwrap()`
+ --> $DIR/or_then_unwrap.rs:24:20
+ |
+LL | let _ = option.or(Some("fallback")).unwrap(); // should trigger lint
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or("fallback")`
+ |
+ = note: `-D clippy::or-then-unwrap` implied by `-D warnings`
+
+error: found `.or(Ok(…)).unwrap()`
+ --> $DIR/or_then_unwrap.rs:27:20
+ |
+LL | let _ = result.or::<&str>(Ok("fallback")).unwrap(); // should trigger lint
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or("fallback")`
+
+error: found `.or(Some(…)).unwrap()`
+ --> $DIR/or_then_unwrap.rs:31:31
+ |
+LL | let _ = option.map(|v| v).or(Some("fallback")).unwrap().to_string().chars(); // should trigger lint
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or("fallback")`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/out_of_bounds_indexing/issue-3102.rs b/src/tools/clippy/tests/ui/out_of_bounds_indexing/issue-3102.rs
new file mode 100644
index 000000000..f20a0ede1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/out_of_bounds_indexing/issue-3102.rs
@@ -0,0 +1,11 @@
+#![warn(clippy::out_of_bounds_indexing)]
+#![allow(clippy::no_effect, const_err)]
+
+fn main() {
+ let x = [1, 2, 3, 4];
+
+ // issue 3102
+ let num = 1;
+ &x[num..10]; // should trigger out of bounds error
+ &x[10..num]; // should trigger out of bounds error
+}
diff --git a/src/tools/clippy/tests/ui/out_of_bounds_indexing/issue-3102.stderr b/src/tools/clippy/tests/ui/out_of_bounds_indexing/issue-3102.stderr
new file mode 100644
index 000000000..516c1df40
--- /dev/null
+++ b/src/tools/clippy/tests/ui/out_of_bounds_indexing/issue-3102.stderr
@@ -0,0 +1,16 @@
+error: range is out of bounds
+ --> $DIR/issue-3102.rs:9:13
+ |
+LL | &x[num..10]; // should trigger out of bounds error
+ | ^^
+ |
+ = note: `-D clippy::out-of-bounds-indexing` implied by `-D warnings`
+
+error: range is out of bounds
+ --> $DIR/issue-3102.rs:10:8
+ |
+LL | &x[10..num]; // should trigger out of bounds error
+ | ^^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/out_of_bounds_indexing/simple.rs b/src/tools/clippy/tests/ui/out_of_bounds_indexing/simple.rs
new file mode 100644
index 000000000..590e578d7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/out_of_bounds_indexing/simple.rs
@@ -0,0 +1,22 @@
+#![warn(clippy::out_of_bounds_indexing)]
+#![allow(clippy::no_effect, clippy::unnecessary_operation, const_err)]
+
+fn main() {
+ let x = [1, 2, 3, 4];
+
+ &x[..=4];
+ &x[1..5];
+ &x[5..];
+ &x[..5];
+ &x[5..].iter().map(|x| 2 * x).collect::<Vec<i32>>();
+ &x[0..=4];
+
+ &x[4..]; // Ok, should not produce stderr.
+ &x[..4]; // Ok, should not produce stderr.
+ &x[..]; // Ok, should not produce stderr.
+ &x[1..]; // Ok, should not produce stderr.
+ &x[2..].iter().map(|x| 2 * x).collect::<Vec<i32>>(); // Ok, should not produce stderr.
+
+ &x[0..].get(..3); // Ok, should not produce stderr.
+ &x[0..3]; // Ok, should not produce stderr.
+}
diff --git a/src/tools/clippy/tests/ui/out_of_bounds_indexing/simple.stderr b/src/tools/clippy/tests/ui/out_of_bounds_indexing/simple.stderr
new file mode 100644
index 000000000..3d95afcda
--- /dev/null
+++ b/src/tools/clippy/tests/ui/out_of_bounds_indexing/simple.stderr
@@ -0,0 +1,40 @@
+error: range is out of bounds
+ --> $DIR/simple.rs:7:11
+ |
+LL | &x[..=4];
+ | ^
+ |
+ = note: `-D clippy::out-of-bounds-indexing` implied by `-D warnings`
+
+error: range is out of bounds
+ --> $DIR/simple.rs:8:11
+ |
+LL | &x[1..5];
+ | ^
+
+error: range is out of bounds
+ --> $DIR/simple.rs:9:8
+ |
+LL | &x[5..];
+ | ^
+
+error: range is out of bounds
+ --> $DIR/simple.rs:10:10
+ |
+LL | &x[..5];
+ | ^
+
+error: range is out of bounds
+ --> $DIR/simple.rs:11:8
+ |
+LL | &x[5..].iter().map(|x| 2 * x).collect::<Vec<i32>>();
+ | ^
+
+error: range is out of bounds
+ --> $DIR/simple.rs:12:12
+ |
+LL | &x[0..=4];
+ | ^
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/overflow_check_conditional.rs b/src/tools/clippy/tests/ui/overflow_check_conditional.rs
new file mode 100644
index 000000000..5db75f529
--- /dev/null
+++ b/src/tools/clippy/tests/ui/overflow_check_conditional.rs
@@ -0,0 +1,25 @@
+#![warn(clippy::overflow_check_conditional)]
+
+fn main() {
+ let a: u32 = 1;
+ let b: u32 = 2;
+ let c: u32 = 3;
+ if a + b < a {}
+ if a > a + b {}
+ if a + b < b {}
+ if b > a + b {}
+ if a - b > b {}
+ if b < a - b {}
+ if a - b > a {}
+ if a < a - b {}
+ if a + b < c {}
+ if c > a + b {}
+ if a - b < c {}
+ if c > a - b {}
+ let i = 1.1;
+ let j = 2.2;
+ if i + j < i {}
+ if i - j < i {}
+ if i > i + j {}
+ if i - j < i {}
+}
diff --git a/src/tools/clippy/tests/ui/overflow_check_conditional.stderr b/src/tools/clippy/tests/ui/overflow_check_conditional.stderr
new file mode 100644
index 000000000..1b8b146b6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/overflow_check_conditional.stderr
@@ -0,0 +1,52 @@
+error: you are trying to use classic C overflow conditions that will fail in Rust
+ --> $DIR/overflow_check_conditional.rs:7:8
+ |
+LL | if a + b < a {}
+ | ^^^^^^^^^
+ |
+ = note: `-D clippy::overflow-check-conditional` implied by `-D warnings`
+
+error: you are trying to use classic C overflow conditions that will fail in Rust
+ --> $DIR/overflow_check_conditional.rs:8:8
+ |
+LL | if a > a + b {}
+ | ^^^^^^^^^
+
+error: you are trying to use classic C overflow conditions that will fail in Rust
+ --> $DIR/overflow_check_conditional.rs:9:8
+ |
+LL | if a + b < b {}
+ | ^^^^^^^^^
+
+error: you are trying to use classic C overflow conditions that will fail in Rust
+ --> $DIR/overflow_check_conditional.rs:10:8
+ |
+LL | if b > a + b {}
+ | ^^^^^^^^^
+
+error: you are trying to use classic C underflow conditions that will fail in Rust
+ --> $DIR/overflow_check_conditional.rs:11:8
+ |
+LL | if a - b > b {}
+ | ^^^^^^^^^
+
+error: you are trying to use classic C underflow conditions that will fail in Rust
+ --> $DIR/overflow_check_conditional.rs:12:8
+ |
+LL | if b < a - b {}
+ | ^^^^^^^^^
+
+error: you are trying to use classic C underflow conditions that will fail in Rust
+ --> $DIR/overflow_check_conditional.rs:13:8
+ |
+LL | if a - b > a {}
+ | ^^^^^^^^^
+
+error: you are trying to use classic C underflow conditions that will fail in Rust
+ --> $DIR/overflow_check_conditional.rs:14:8
+ |
+LL | if a < a - b {}
+ | ^^^^^^^^^
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/panic_in_result_fn.rs b/src/tools/clippy/tests/ui/panic_in_result_fn.rs
new file mode 100644
index 000000000..e75eb1b6e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/panic_in_result_fn.rs
@@ -0,0 +1,70 @@
+#![warn(clippy::panic_in_result_fn)]
+#![allow(clippy::unnecessary_wraps)]
+struct A;
+
+impl A {
+ fn result_with_panic() -> Result<bool, String> // should emit lint
+ {
+ panic!("error");
+ }
+
+ fn result_with_unimplemented() -> Result<bool, String> // should emit lint
+ {
+ unimplemented!();
+ }
+
+ fn result_with_unreachable() -> Result<bool, String> // should emit lint
+ {
+ unreachable!();
+ }
+
+ fn result_with_todo() -> Result<bool, String> // should emit lint
+ {
+ todo!("Finish this");
+ }
+
+ fn other_with_panic() // should not emit lint
+ {
+ panic!("");
+ }
+
+ fn other_with_unreachable() // should not emit lint
+ {
+ unreachable!();
+ }
+
+ fn other_with_unimplemented() // should not emit lint
+ {
+ unimplemented!();
+ }
+
+ fn other_with_todo() // should not emit lint
+ {
+ todo!("finish this")
+ }
+
+ fn result_without_banned_functions() -> Result<bool, String> // should not emit lint
+ {
+ Ok(true)
+ }
+}
+
+fn function_result_with_panic() -> Result<bool, String> // should emit lint
+{
+ panic!("error");
+}
+
+fn todo() {
+ println!("something");
+}
+
+fn function_result_with_custom_todo() -> Result<bool, String> // should not emit lint
+{
+ todo();
+ Ok(true)
+}
+
+fn main() -> Result<(), String> {
+ todo!("finish main method");
+ Ok(())
+}
diff --git a/src/tools/clippy/tests/ui/panic_in_result_fn.stderr b/src/tools/clippy/tests/ui/panic_in_result_fn.stderr
new file mode 100644
index 000000000..561503ae5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/panic_in_result_fn.stderr
@@ -0,0 +1,99 @@
+error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`
+ --> $DIR/panic_in_result_fn.rs:6:5
+ |
+LL | / fn result_with_panic() -> Result<bool, String> // should emit lint
+LL | | {
+LL | | panic!("error");
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::panic-in-result-fn` implied by `-D warnings`
+ = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
+note: return Err() instead of panicking
+ --> $DIR/panic_in_result_fn.rs:8:9
+ |
+LL | panic!("error");
+ | ^^^^^^^^^^^^^^^
+
+error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`
+ --> $DIR/panic_in_result_fn.rs:11:5
+ |
+LL | / fn result_with_unimplemented() -> Result<bool, String> // should emit lint
+LL | | {
+LL | | unimplemented!();
+LL | | }
+ | |_____^
+ |
+ = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
+note: return Err() instead of panicking
+ --> $DIR/panic_in_result_fn.rs:13:9
+ |
+LL | unimplemented!();
+ | ^^^^^^^^^^^^^^^^
+
+error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`
+ --> $DIR/panic_in_result_fn.rs:16:5
+ |
+LL | / fn result_with_unreachable() -> Result<bool, String> // should emit lint
+LL | | {
+LL | | unreachable!();
+LL | | }
+ | |_____^
+ |
+ = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
+note: return Err() instead of panicking
+ --> $DIR/panic_in_result_fn.rs:18:9
+ |
+LL | unreachable!();
+ | ^^^^^^^^^^^^^^
+
+error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`
+ --> $DIR/panic_in_result_fn.rs:21:5
+ |
+LL | / fn result_with_todo() -> Result<bool, String> // should emit lint
+LL | | {
+LL | | todo!("Finish this");
+LL | | }
+ | |_____^
+ |
+ = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
+note: return Err() instead of panicking
+ --> $DIR/panic_in_result_fn.rs:23:9
+ |
+LL | todo!("Finish this");
+ | ^^^^^^^^^^^^^^^^^^^^
+
+error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`
+ --> $DIR/panic_in_result_fn.rs:52:1
+ |
+LL | / fn function_result_with_panic() -> Result<bool, String> // should emit lint
+LL | | {
+LL | | panic!("error");
+LL | | }
+ | |_^
+ |
+ = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
+note: return Err() instead of panicking
+ --> $DIR/panic_in_result_fn.rs:54:5
+ |
+LL | panic!("error");
+ | ^^^^^^^^^^^^^^^
+
+error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`
+ --> $DIR/panic_in_result_fn.rs:67:1
+ |
+LL | / fn main() -> Result<(), String> {
+LL | | todo!("finish main method");
+LL | | Ok(())
+LL | | }
+ | |_^
+ |
+ = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
+note: return Err() instead of panicking
+ --> $DIR/panic_in_result_fn.rs:68:5
+ |
+LL | todo!("finish main method");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/panic_in_result_fn_assertions.rs b/src/tools/clippy/tests/ui/panic_in_result_fn_assertions.rs
new file mode 100644
index 000000000..ffdf8288a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/panic_in_result_fn_assertions.rs
@@ -0,0 +1,48 @@
+#![warn(clippy::panic_in_result_fn)]
+#![allow(clippy::unnecessary_wraps)]
+
+struct A;
+
+impl A {
+ fn result_with_assert_with_message(x: i32) -> Result<bool, String> // should emit lint
+ {
+ assert!(x == 5, "wrong argument");
+ Ok(true)
+ }
+
+ fn result_with_assert_eq(x: i32) -> Result<bool, String> // should emit lint
+ {
+ assert_eq!(x, 5);
+ Ok(true)
+ }
+
+ fn result_with_assert_ne(x: i32) -> Result<bool, String> // should emit lint
+ {
+ assert_ne!(x, 1);
+ Ok(true)
+ }
+
+ fn other_with_assert_with_message(x: i32) // should not emit lint
+ {
+ assert!(x == 5, "wrong argument");
+ }
+
+ fn other_with_assert_eq(x: i32) // should not emit lint
+ {
+ assert_eq!(x, 5);
+ }
+
+ fn other_with_assert_ne(x: i32) // should not emit lint
+ {
+ assert_ne!(x, 1);
+ }
+
+ fn result_without_banned_functions() -> Result<bool, String> // should not emit lint
+ {
+ let assert = "assert!";
+ println!("No {}", assert);
+ Ok(true)
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/panic_in_result_fn_assertions.stderr b/src/tools/clippy/tests/ui/panic_in_result_fn_assertions.stderr
new file mode 100644
index 000000000..b6aa005e7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/panic_in_result_fn_assertions.stderr
@@ -0,0 +1,54 @@
+error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`
+ --> $DIR/panic_in_result_fn_assertions.rs:7:5
+ |
+LL | / fn result_with_assert_with_message(x: i32) -> Result<bool, String> // should emit lint
+LL | | {
+LL | | assert!(x == 5, "wrong argument");
+LL | | Ok(true)
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::panic-in-result-fn` implied by `-D warnings`
+ = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
+note: return Err() instead of panicking
+ --> $DIR/panic_in_result_fn_assertions.rs:9:9
+ |
+LL | assert!(x == 5, "wrong argument");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`
+ --> $DIR/panic_in_result_fn_assertions.rs:13:5
+ |
+LL | / fn result_with_assert_eq(x: i32) -> Result<bool, String> // should emit lint
+LL | | {
+LL | | assert_eq!(x, 5);
+LL | | Ok(true)
+LL | | }
+ | |_____^
+ |
+ = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
+note: return Err() instead of panicking
+ --> $DIR/panic_in_result_fn_assertions.rs:15:9
+ |
+LL | assert_eq!(x, 5);
+ | ^^^^^^^^^^^^^^^^
+
+error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`
+ --> $DIR/panic_in_result_fn_assertions.rs:19:5
+ |
+LL | / fn result_with_assert_ne(x: i32) -> Result<bool, String> // should emit lint
+LL | | {
+LL | | assert_ne!(x, 1);
+LL | | Ok(true)
+LL | | }
+ | |_____^
+ |
+ = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
+note: return Err() instead of panicking
+ --> $DIR/panic_in_result_fn_assertions.rs:21:9
+ |
+LL | assert_ne!(x, 1);
+ | ^^^^^^^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/panic_in_result_fn_debug_assertions.rs b/src/tools/clippy/tests/ui/panic_in_result_fn_debug_assertions.rs
new file mode 100644
index 000000000..c4fcd7e70
--- /dev/null
+++ b/src/tools/clippy/tests/ui/panic_in_result_fn_debug_assertions.rs
@@ -0,0 +1,43 @@
+#![warn(clippy::panic_in_result_fn)]
+#![allow(clippy::unnecessary_wraps)]
+
+// debug_assert should never trigger the `panic_in_result_fn` lint
+
+struct A;
+
+impl A {
+ fn result_with_debug_assert_with_message(x: i32) -> Result<bool, String> {
+ debug_assert!(x == 5, "wrong argument");
+ Ok(true)
+ }
+
+ fn result_with_debug_assert_eq(x: i32) -> Result<bool, String> {
+ debug_assert_eq!(x, 5);
+ Ok(true)
+ }
+
+ fn result_with_debug_assert_ne(x: i32) -> Result<bool, String> {
+ debug_assert_ne!(x, 1);
+ Ok(true)
+ }
+
+ fn other_with_debug_assert_with_message(x: i32) {
+ debug_assert!(x == 5, "wrong argument");
+ }
+
+ fn other_with_debug_assert_eq(x: i32) {
+ debug_assert_eq!(x, 5);
+ }
+
+ fn other_with_debug_assert_ne(x: i32) {
+ debug_assert_ne!(x, 1);
+ }
+
+ fn result_without_banned_functions() -> Result<bool, String> {
+ let debug_assert = "debug_assert!";
+ println!("No {}", debug_assert);
+ Ok(true)
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/panicking_macros.rs b/src/tools/clippy/tests/ui/panicking_macros.rs
new file mode 100644
index 000000000..041ef17fa
--- /dev/null
+++ b/src/tools/clippy/tests/ui/panicking_macros.rs
@@ -0,0 +1,95 @@
+#![allow(clippy::assertions_on_constants, clippy::eq_op, clippy::let_unit_value)]
+#![feature(inline_const)]
+#![warn(clippy::unimplemented, clippy::unreachable, clippy::todo, clippy::panic)]
+
+extern crate core;
+
+const _: () = {
+ if 1 == 0 {
+ panic!("A balanced diet means a cupcake in each hand");
+ }
+};
+
+fn inline_const() {
+ let _ = const {
+ if 1 == 0 {
+ panic!("When nothing goes right, go left")
+ }
+ };
+}
+
+fn panic() {
+ let a = 2;
+ panic!();
+ panic!("message");
+ panic!("{} {}", "panic with", "multiple arguments");
+ let b = a + 2;
+}
+
+fn todo() {
+ let a = 2;
+ todo!();
+ todo!("message");
+ todo!("{} {}", "panic with", "multiple arguments");
+ let b = a + 2;
+}
+
+fn unimplemented() {
+ let a = 2;
+ unimplemented!();
+ unimplemented!("message");
+ unimplemented!("{} {}", "panic with", "multiple arguments");
+ let b = a + 2;
+}
+
+fn unreachable() {
+ let a = 2;
+ unreachable!();
+ unreachable!("message");
+ unreachable!("{} {}", "panic with", "multiple arguments");
+ let b = a + 2;
+}
+
+fn core_versions() {
+ use core::{panic, todo, unimplemented, unreachable};
+ panic!();
+ todo!();
+ unimplemented!();
+ unreachable!();
+}
+
+fn assert() {
+ assert!(true);
+ assert_eq!(true, true);
+ assert_ne!(true, false);
+}
+
+fn assert_msg() {
+ assert!(true, "this should not panic");
+ assert_eq!(true, true, "this should not panic");
+ assert_ne!(true, false, "this should not panic");
+}
+
+fn debug_assert() {
+ debug_assert!(true);
+ debug_assert_eq!(true, true);
+ debug_assert_ne!(true, false);
+}
+
+fn debug_assert_msg() {
+ debug_assert!(true, "test");
+ debug_assert_eq!(true, true, "test");
+ debug_assert_ne!(true, false, "test");
+}
+
+fn main() {
+ panic();
+ todo();
+ unimplemented();
+ unreachable();
+ core_versions();
+ assert();
+ assert_msg();
+ debug_assert();
+ debug_assert_msg();
+}
diff --git a/src/tools/clippy/tests/ui/panicking_macros.stderr b/src/tools/clippy/tests/ui/panicking_macros.stderr
new file mode 100644
index 000000000..4ceb6d144
--- /dev/null
+++ b/src/tools/clippy/tests/ui/panicking_macros.stderr
@@ -0,0 +1,106 @@
+error: `panic` should not be present in production code
+ --> $DIR/panicking_macros.rs:23:5
+ |
+LL | panic!();
+ | ^^^^^^^^
+ |
+ = note: `-D clippy::panic` implied by `-D warnings`
+
+error: `panic` should not be present in production code
+ --> $DIR/panicking_macros.rs:24:5
+ |
+LL | panic!("message");
+ | ^^^^^^^^^^^^^^^^^
+
+error: `panic` should not be present in production code
+ --> $DIR/panicking_macros.rs:25:5
+ |
+LL | panic!("{} {}", "panic with", "multiple arguments");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `todo` should not be present in production code
+ --> $DIR/panicking_macros.rs:31:5
+ |
+LL | todo!();
+ | ^^^^^^^
+ |
+ = note: `-D clippy::todo` implied by `-D warnings`
+
+error: `todo` should not be present in production code
+ --> $DIR/panicking_macros.rs:32:5
+ |
+LL | todo!("message");
+ | ^^^^^^^^^^^^^^^^
+
+error: `todo` should not be present in production code
+ --> $DIR/panicking_macros.rs:33:5
+ |
+LL | todo!("{} {}", "panic with", "multiple arguments");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `unimplemented` should not be present in production code
+ --> $DIR/panicking_macros.rs:39:5
+ |
+LL | unimplemented!();
+ | ^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::unimplemented` implied by `-D warnings`
+
+error: `unimplemented` should not be present in production code
+ --> $DIR/panicking_macros.rs:40:5
+ |
+LL | unimplemented!("message");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `unimplemented` should not be present in production code
+ --> $DIR/panicking_macros.rs:41:5
+ |
+LL | unimplemented!("{} {}", "panic with", "multiple arguments");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: usage of the `unreachable!` macro
+ --> $DIR/panicking_macros.rs:47:5
+ |
+LL | unreachable!();
+ | ^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::unreachable` implied by `-D warnings`
+
+error: usage of the `unreachable!` macro
+ --> $DIR/panicking_macros.rs:48:5
+ |
+LL | unreachable!("message");
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+
+error: usage of the `unreachable!` macro
+ --> $DIR/panicking_macros.rs:49:5
+ |
+LL | unreachable!("{} {}", "panic with", "multiple arguments");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `panic` should not be present in production code
+ --> $DIR/panicking_macros.rs:55:5
+ |
+LL | panic!();
+ | ^^^^^^^^
+
+error: `todo` should not be present in production code
+ --> $DIR/panicking_macros.rs:56:5
+ |
+LL | todo!();
+ | ^^^^^^^
+
+error: `unimplemented` should not be present in production code
+ --> $DIR/panicking_macros.rs:57:5
+ |
+LL | unimplemented!();
+ | ^^^^^^^^^^^^^^^^
+
+error: usage of the `unreachable!` macro
+ --> $DIR/panicking_macros.rs:58:5
+ |
+LL | unreachable!();
+ | ^^^^^^^^^^^^^^
+
+error: aborting due to 16 previous errors
+
diff --git a/src/tools/clippy/tests/ui/partialeq_ne_impl.rs b/src/tools/clippy/tests/ui/partialeq_ne_impl.rs
new file mode 100644
index 000000000..1338d3c74
--- /dev/null
+++ b/src/tools/clippy/tests/ui/partialeq_ne_impl.rs
@@ -0,0 +1,26 @@
+#![allow(dead_code)]
+
+struct Foo;
+
+impl PartialEq for Foo {
+ fn eq(&self, _: &Foo) -> bool {
+ true
+ }
+ fn ne(&self, _: &Foo) -> bool {
+ false
+ }
+}
+
+struct Bar;
+
+impl PartialEq for Bar {
+ fn eq(&self, _: &Bar) -> bool {
+ true
+ }
+ #[allow(clippy::partialeq_ne_impl)]
+ fn ne(&self, _: &Bar) -> bool {
+ false
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/partialeq_ne_impl.stderr b/src/tools/clippy/tests/ui/partialeq_ne_impl.stderr
new file mode 100644
index 000000000..b92da4511
--- /dev/null
+++ b/src/tools/clippy/tests/ui/partialeq_ne_impl.stderr
@@ -0,0 +1,12 @@
+error: re-implementing `PartialEq::ne` is unnecessary
+ --> $DIR/partialeq_ne_impl.rs:9:5
+ |
+LL | / fn ne(&self, _: &Foo) -> bool {
+LL | | false
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::partialeq-ne-impl` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/path_buf_push_overwrite.fixed b/src/tools/clippy/tests/ui/path_buf_push_overwrite.fixed
new file mode 100644
index 000000000..ef8856830
--- /dev/null
+++ b/src/tools/clippy/tests/ui/path_buf_push_overwrite.fixed
@@ -0,0 +1,8 @@
+// run-rustfix
+use std::path::PathBuf;
+
+#[warn(clippy::all, clippy::path_buf_push_overwrite)]
+fn main() {
+ let mut x = PathBuf::from("/foo");
+ x.push("bar");
+}
diff --git a/src/tools/clippy/tests/ui/path_buf_push_overwrite.rs b/src/tools/clippy/tests/ui/path_buf_push_overwrite.rs
new file mode 100644
index 000000000..6e2d483f4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/path_buf_push_overwrite.rs
@@ -0,0 +1,8 @@
+// run-rustfix
+use std::path::PathBuf;
+
+#[warn(clippy::all, clippy::path_buf_push_overwrite)]
+fn main() {
+ let mut x = PathBuf::from("/foo");
+ x.push("/bar");
+}
diff --git a/src/tools/clippy/tests/ui/path_buf_push_overwrite.stderr b/src/tools/clippy/tests/ui/path_buf_push_overwrite.stderr
new file mode 100644
index 000000000..bb8dce2bb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/path_buf_push_overwrite.stderr
@@ -0,0 +1,10 @@
+error: calling `push` with '/' or '/' (file system root) will overwrite the previous path definition
+ --> $DIR/path_buf_push_overwrite.rs:7:12
+ |
+LL | x.push("/bar");
+ | ^^^^^^ help: try: `"bar"`
+ |
+ = note: `-D clippy::path-buf-push-overwrite` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/mutability.rs b/src/tools/clippy/tests/ui/pattern_type_mismatch/mutability.rs
new file mode 100644
index 000000000..55a8c2621
--- /dev/null
+++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/mutability.rs
@@ -0,0 +1,49 @@
+#![allow(clippy::all)]
+#![warn(clippy::pattern_type_mismatch)]
+
+fn main() {}
+
+fn should_lint() {
+ let value = &Some(23);
+ match value {
+ Some(_) => (),
+ _ => (),
+ }
+
+ let value = &mut Some(23);
+ match value {
+ Some(_) => (),
+ _ => (),
+ }
+}
+
+fn should_not_lint() {
+ let value = &Some(23);
+ match value {
+ &Some(_) => (),
+ _ => (),
+ }
+ match *value {
+ Some(_) => (),
+ _ => (),
+ }
+
+ let value = &mut Some(23);
+ match value {
+ &mut Some(_) => (),
+ _ => (),
+ }
+ match *value {
+ Some(_) => (),
+ _ => (),
+ }
+
+ const FOO: &str = "foo";
+
+ fn foo(s: &str) -> i32 {
+ match s {
+ FOO => 1,
+ _ => 0,
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/mutability.stderr b/src/tools/clippy/tests/ui/pattern_type_mismatch/mutability.stderr
new file mode 100644
index 000000000..3421d5683
--- /dev/null
+++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/mutability.stderr
@@ -0,0 +1,19 @@
+error: type of pattern does not match the expression type
+ --> $DIR/mutability.rs:9:9
+ |
+LL | Some(_) => (),
+ | ^^^^^^^
+ |
+ = note: `-D clippy::pattern-type-mismatch` implied by `-D warnings`
+ = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/mutability.rs:15:9
+ |
+LL | Some(_) => (),
+ | ^^^^^^^
+ |
+ = help: use `*` to dereference the match expression or explicitly match against a `&mut _` pattern and adjust the enclosed variable bindings
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_alternatives.rs b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_alternatives.rs
new file mode 100644
index 000000000..065ea9fb9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_alternatives.rs
@@ -0,0 +1,24 @@
+#![allow(clippy::all)]
+#![warn(clippy::pattern_type_mismatch)]
+
+fn main() {}
+
+fn alternatives() {
+ enum Value<'a> {
+ Unused,
+ A(&'a Option<i32>),
+ B,
+ }
+ let ref_value = &Value::A(&Some(23));
+
+ // not ok
+ if let Value::B | Value::A(_) = ref_value {}
+ if let &Value::B | &Value::A(Some(_)) = ref_value {}
+ if let Value::B | Value::A(Some(_)) = *ref_value {}
+
+ // ok
+ if let &Value::B | &Value::A(_) = ref_value {}
+ if let Value::B | Value::A(_) = *ref_value {}
+ if let &Value::B | &Value::A(&Some(_)) = ref_value {}
+ if let Value::B | Value::A(&Some(_)) = *ref_value {}
+}
diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_alternatives.stderr b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_alternatives.stderr
new file mode 100644
index 000000000..d285c9378
--- /dev/null
+++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_alternatives.stderr
@@ -0,0 +1,27 @@
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_alternatives.rs:15:12
+ |
+LL | if let Value::B | Value::A(_) = ref_value {}
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::pattern-type-mismatch` implied by `-D warnings`
+ = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_alternatives.rs:16:34
+ |
+LL | if let &Value::B | &Value::A(Some(_)) = ref_value {}
+ | ^^^^^^^
+ |
+ = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_alternatives.rs:17:32
+ |
+LL | if let Value::B | Value::A(Some(_)) = *ref_value {}
+ | ^^^^^^^
+ |
+ = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_structs.rs b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_structs.rs
new file mode 100644
index 000000000..417b1c107
--- /dev/null
+++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_structs.rs
@@ -0,0 +1,45 @@
+#![allow(clippy::all)]
+#![warn(clippy::pattern_type_mismatch)]
+
+fn main() {}
+
+fn struct_types() {
+ struct Struct<'a> {
+ ref_inner: &'a Option<i32>,
+ }
+ let ref_value = &Struct { ref_inner: &Some(42) };
+
+ // not ok
+ let Struct { .. } = ref_value;
+ if let &Struct { ref_inner: Some(_) } = ref_value {}
+ if let Struct { ref_inner: Some(_) } = *ref_value {}
+
+ // ok
+ let &Struct { .. } = ref_value;
+ let Struct { .. } = *ref_value;
+ if let &Struct { ref_inner: &Some(_) } = ref_value {}
+ if let Struct { ref_inner: &Some(_) } = *ref_value {}
+}
+
+fn struct_enum_variants() {
+ enum StructEnum<'a> {
+ Empty,
+ Var { inner_ref: &'a Option<i32> },
+ }
+ let ref_value = &StructEnum::Var { inner_ref: &Some(42) };
+
+ // not ok
+ if let StructEnum::Var { .. } = ref_value {}
+ if let StructEnum::Var { inner_ref: Some(_) } = ref_value {}
+ if let &StructEnum::Var { inner_ref: Some(_) } = ref_value {}
+ if let StructEnum::Var { inner_ref: Some(_) } = *ref_value {}
+ if let StructEnum::Empty = ref_value {}
+
+ // ok
+ if let &StructEnum::Var { .. } = ref_value {}
+ if let StructEnum::Var { .. } = *ref_value {}
+ if let &StructEnum::Var { inner_ref: &Some(_) } = ref_value {}
+ if let StructEnum::Var { inner_ref: &Some(_) } = *ref_value {}
+ if let &StructEnum::Empty = ref_value {}
+ if let StructEnum::Empty = *ref_value {}
+}
diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_structs.stderr b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_structs.stderr
new file mode 100644
index 000000000..d428e85b0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_structs.stderr
@@ -0,0 +1,67 @@
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_structs.rs:13:9
+ |
+LL | let Struct { .. } = ref_value;
+ | ^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::pattern-type-mismatch` implied by `-D warnings`
+ = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_structs.rs:14:33
+ |
+LL | if let &Struct { ref_inner: Some(_) } = ref_value {}
+ | ^^^^^^^
+ |
+ = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_structs.rs:15:32
+ |
+LL | if let Struct { ref_inner: Some(_) } = *ref_value {}
+ | ^^^^^^^
+ |
+ = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_structs.rs:32:12
+ |
+LL | if let StructEnum::Var { .. } = ref_value {}
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_structs.rs:33:12
+ |
+LL | if let StructEnum::Var { inner_ref: Some(_) } = ref_value {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_structs.rs:34:42
+ |
+LL | if let &StructEnum::Var { inner_ref: Some(_) } = ref_value {}
+ | ^^^^^^^
+ |
+ = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_structs.rs:35:41
+ |
+LL | if let StructEnum::Var { inner_ref: Some(_) } = *ref_value {}
+ | ^^^^^^^
+ |
+ = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_structs.rs:36:12
+ |
+LL | if let StructEnum::Empty = ref_value {}
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_tuples.rs b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_tuples.rs
new file mode 100644
index 000000000..19504a051
--- /dev/null
+++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_tuples.rs
@@ -0,0 +1,57 @@
+#![allow(clippy::all)]
+#![warn(clippy::pattern_type_mismatch)]
+
+fn main() {}
+
+fn tuple_types() {
+ struct TupleStruct<'a>(&'a Option<i32>);
+ let ref_value = &TupleStruct(&Some(42));
+
+ // not ok
+ let TupleStruct(_) = ref_value;
+ if let &TupleStruct(Some(_)) = ref_value {}
+ if let TupleStruct(Some(_)) = *ref_value {}
+
+ // ok
+ let &TupleStruct(_) = ref_value;
+ let TupleStruct(_) = *ref_value;
+ if let &TupleStruct(&Some(_)) = ref_value {}
+ if let TupleStruct(&Some(_)) = *ref_value {}
+}
+
+fn tuple_enum_variants() {
+ enum TupleEnum<'a> {
+ Empty,
+ Var(&'a Option<i32>),
+ }
+ let ref_value = &TupleEnum::Var(&Some(42));
+
+ // not ok
+ if let TupleEnum::Var(_) = ref_value {}
+ if let &TupleEnum::Var(Some(_)) = ref_value {}
+ if let TupleEnum::Var(Some(_)) = *ref_value {}
+ if let TupleEnum::Empty = ref_value {}
+
+ // ok
+ if let &TupleEnum::Var(_) = ref_value {}
+ if let TupleEnum::Var(_) = *ref_value {}
+ if let &TupleEnum::Var(&Some(_)) = ref_value {}
+ if let TupleEnum::Var(&Some(_)) = *ref_value {}
+ if let &TupleEnum::Empty = ref_value {}
+ if let TupleEnum::Empty = *ref_value {}
+}
+
+fn plain_tuples() {
+ let ref_value = &(&Some(23), &Some(42));
+
+ // not ok
+ let (_a, _b) = ref_value;
+ if let &(_a, Some(_)) = ref_value {}
+ if let (_a, Some(_)) = *ref_value {}
+
+ // ok
+ let &(_a, _b) = ref_value;
+ let (_a, _b) = *ref_value;
+ if let &(_a, &Some(_)) = ref_value {}
+ if let (_a, &Some(_)) = *ref_value {}
+}
diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_tuples.stderr b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_tuples.stderr
new file mode 100644
index 000000000..edd0074d0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_tuples.stderr
@@ -0,0 +1,83 @@
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_tuples.rs:11:9
+ |
+LL | let TupleStruct(_) = ref_value;
+ | ^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::pattern-type-mismatch` implied by `-D warnings`
+ = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_tuples.rs:12:25
+ |
+LL | if let &TupleStruct(Some(_)) = ref_value {}
+ | ^^^^^^^
+ |
+ = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_tuples.rs:13:24
+ |
+LL | if let TupleStruct(Some(_)) = *ref_value {}
+ | ^^^^^^^
+ |
+ = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_tuples.rs:30:12
+ |
+LL | if let TupleEnum::Var(_) = ref_value {}
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_tuples.rs:31:28
+ |
+LL | if let &TupleEnum::Var(Some(_)) = ref_value {}
+ | ^^^^^^^
+ |
+ = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_tuples.rs:32:27
+ |
+LL | if let TupleEnum::Var(Some(_)) = *ref_value {}
+ | ^^^^^^^
+ |
+ = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_tuples.rs:33:12
+ |
+LL | if let TupleEnum::Empty = ref_value {}
+ | ^^^^^^^^^^^^^^^^
+ |
+ = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_tuples.rs:48:9
+ |
+LL | let (_a, _b) = ref_value;
+ | ^^^^^^^^
+ |
+ = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_tuples.rs:49:18
+ |
+LL | if let &(_a, Some(_)) = ref_value {}
+ | ^^^^^^^
+ |
+ = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/pattern_tuples.rs:50:17
+ |
+LL | if let (_a, Some(_)) = *ref_value {}
+ | ^^^^^^^
+ |
+ = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.rs b/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.rs
new file mode 100644
index 000000000..e89917c41
--- /dev/null
+++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.rs
@@ -0,0 +1,146 @@
+#![allow(clippy::all)]
+#![warn(clippy::pattern_type_mismatch)]
+
+fn main() {}
+
+fn syntax_match() {
+ let ref_value = &Some(&Some(42));
+
+ // not ok
+ match ref_value {
+ Some(_) => (),
+ None => (),
+ }
+
+ // ok
+ match ref_value {
+ &Some(_) => (),
+ &None => (),
+ }
+ match *ref_value {
+ Some(_) => (),
+ None => (),
+ }
+}
+
+fn syntax_if_let() {
+ let ref_value = &Some(42);
+
+ // not ok
+ if let Some(_) = ref_value {}
+
+ // ok
+ if let &Some(_) = ref_value {}
+ if let Some(_) = *ref_value {}
+}
+
+fn syntax_while_let() {
+ let ref_value = &Some(42);
+
+ // not ok
+ while let Some(_) = ref_value {
+ break;
+ }
+
+ // ok
+ while let &Some(_) = ref_value {
+ break;
+ }
+ while let Some(_) = *ref_value {
+ break;
+ }
+}
+
+fn syntax_for() {
+ let ref_value = &Some(23);
+ let slice = &[(2, 3), (4, 2)];
+
+ // not ok
+ for (_a, _b) in slice.iter() {}
+
+ // ok
+ for &(_a, _b) in slice.iter() {}
+}
+
+fn syntax_let() {
+ let ref_value = &(2, 3);
+
+ // not ok
+ let (_n, _m) = ref_value;
+
+ // ok
+ let &(_n, _m) = ref_value;
+ let (_n, _m) = *ref_value;
+}
+
+fn syntax_fn() {
+ // not ok
+ fn foo((_a, _b): &(i32, i32)) {}
+
+ // ok
+ fn foo_ok_1(&(_a, _b): &(i32, i32)) {}
+}
+
+fn syntax_closure() {
+ fn foo<F>(f: F)
+ where
+ F: FnOnce(&(i32, i32)),
+ {
+ }
+
+ // not ok
+ foo(|(_a, _b)| ());
+
+ // ok
+ foo(|&(_a, _b)| ());
+}
+
+fn macro_with_expression() {
+ macro_rules! matching_macro {
+ ($e:expr) => {
+ $e
+ };
+ }
+ let value = &Some(23);
+
+ // not ok
+ matching_macro!(match value {
+ Some(_) => (),
+ _ => (),
+ });
+
+ // ok
+ matching_macro!(match value {
+ &Some(_) => (),
+ _ => (),
+ });
+ matching_macro!(match *value {
+ Some(_) => (),
+ _ => (),
+ });
+}
+
+fn macro_expansion() {
+ macro_rules! matching_macro {
+ ($e:expr) => {
+ // not ok
+ match $e {
+ Some(_) => (),
+ _ => (),
+ }
+
+ // ok
+ match $e {
+ &Some(_) => (),
+ _ => (),
+ }
+ match *$e {
+ Some(_) => (),
+ _ => (),
+ }
+ };
+ }
+
+ let value = &Some(23);
+ matching_macro!(value);
+}
diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.stderr b/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.stderr
new file mode 100644
index 000000000..12b3d3a8b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.stderr
@@ -0,0 +1,79 @@
+error: type of pattern does not match the expression type
+ --> $DIR/syntax.rs:11:9
+ |
+LL | Some(_) => (),
+ | ^^^^^^^
+ |
+ = note: `-D clippy::pattern-type-mismatch` implied by `-D warnings`
+ = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/syntax.rs:30:12
+ |
+LL | if let Some(_) = ref_value {}
+ | ^^^^^^^
+ |
+ = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/syntax.rs:41:15
+ |
+LL | while let Some(_) = ref_value {
+ | ^^^^^^^
+ |
+ = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/syntax.rs:59:9
+ |
+LL | for (_a, _b) in slice.iter() {}
+ | ^^^^^^^^
+ |
+ = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/syntax.rs:69:9
+ |
+LL | let (_n, _m) = ref_value;
+ | ^^^^^^^^
+ |
+ = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/syntax.rs:78:12
+ |
+LL | fn foo((_a, _b): &(i32, i32)) {}
+ | ^^^^^^^^
+ |
+ = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/syntax.rs:92:10
+ |
+LL | foo(|(_a, _b)| ());
+ | ^^^^^^^^
+ |
+ = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/syntax.rs:108:9
+ |
+LL | Some(_) => (),
+ | ^^^^^^^
+ |
+ = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+
+error: type of pattern does not match the expression type
+ --> $DIR/syntax.rs:128:17
+ |
+LL | Some(_) => (),
+ | ^^^^^^^
+...
+LL | matching_macro!(value);
+ | ---------------------- in this macro invocation
+ |
+ = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings
+ = note: this error originates in the macro `matching_macro` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/patterns.fixed b/src/tools/clippy/tests/ui/patterns.fixed
new file mode 100644
index 000000000..f22388154
--- /dev/null
+++ b/src/tools/clippy/tests/ui/patterns.fixed
@@ -0,0 +1,36 @@
+// run-rustfix
+#![allow(unused)]
+#![warn(clippy::all)]
+
+fn main() {
+ let v = Some(true);
+ let s = [0, 1, 2, 3, 4];
+ match v {
+ Some(x) => (),
+ y => (),
+ }
+ match v {
+ Some(x) => (),
+ y @ None => (), // no error
+ }
+ match s {
+ [x, inside @ .., y] => (), // no error
+ [..] => (),
+ }
+
+ let mut mutv = vec![1, 2, 3];
+
+ // required "ref" left out in suggestion: #5271
+ match mutv {
+ ref mut x => {
+ x.push(4);
+ println!("vec: {:?}", x);
+ },
+ ref y if y == &vec![0] => (),
+ }
+
+ match mutv {
+ ref x => println!("vec: {:?}", x),
+ ref y if y == &vec![0] => (),
+ }
+}
diff --git a/src/tools/clippy/tests/ui/patterns.rs b/src/tools/clippy/tests/ui/patterns.rs
new file mode 100644
index 000000000..5848ecd38
--- /dev/null
+++ b/src/tools/clippy/tests/ui/patterns.rs
@@ -0,0 +1,36 @@
+// run-rustfix
+#![allow(unused)]
+#![warn(clippy::all)]
+
+fn main() {
+ let v = Some(true);
+ let s = [0, 1, 2, 3, 4];
+ match v {
+ Some(x) => (),
+ y @ _ => (),
+ }
+ match v {
+ Some(x) => (),
+ y @ None => (), // no error
+ }
+ match s {
+ [x, inside @ .., y] => (), // no error
+ [..] => (),
+ }
+
+ let mut mutv = vec![1, 2, 3];
+
+ // required "ref" left out in suggestion: #5271
+ match mutv {
+ ref mut x @ _ => {
+ x.push(4);
+ println!("vec: {:?}", x);
+ },
+ ref y if y == &vec![0] => (),
+ }
+
+ match mutv {
+ ref x @ _ => println!("vec: {:?}", x),
+ ref y if y == &vec![0] => (),
+ }
+}
diff --git a/src/tools/clippy/tests/ui/patterns.stderr b/src/tools/clippy/tests/ui/patterns.stderr
new file mode 100644
index 000000000..af0675806
--- /dev/null
+++ b/src/tools/clippy/tests/ui/patterns.stderr
@@ -0,0 +1,22 @@
+error: the `y @ _` pattern can be written as just `y`
+ --> $DIR/patterns.rs:10:9
+ |
+LL | y @ _ => (),
+ | ^^^^^ help: try: `y`
+ |
+ = note: `-D clippy::redundant-pattern` implied by `-D warnings`
+
+error: the `x @ _` pattern can be written as just `x`
+ --> $DIR/patterns.rs:25:9
+ |
+LL | ref mut x @ _ => {
+ | ^^^^^^^^^^^^^ help: try: `ref mut x`
+
+error: the `x @ _` pattern can be written as just `x`
+ --> $DIR/patterns.rs:33:9
+ |
+LL | ref x @ _ => println!("vec: {:?}", x),
+ | ^^^^^^^^^ help: try: `ref x`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/precedence.fixed b/src/tools/clippy/tests/ui/precedence.fixed
new file mode 100644
index 000000000..163bd044c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/precedence.fixed
@@ -0,0 +1,61 @@
+// run-rustfix
+#![warn(clippy::precedence)]
+#![allow(unused_must_use, clippy::no_effect, clippy::unnecessary_operation)]
+#![allow(clippy::identity_op)]
+#![allow(clippy::eq_op)]
+
+macro_rules! trip {
+ ($a:expr) => {
+ match $a & 0b1111_1111u8 {
+ 0 => println!("a is zero ({})", $a),
+ _ => println!("a is {}", $a),
+ }
+ };
+}
+
+fn main() {
+ 1 << (2 + 3);
+ (1 + 2) << 3;
+ 4 >> (1 + 1);
+ (1 + 3) >> 2;
+ 1 ^ (1 - 1);
+ 3 | (2 - 1);
+ 3 & (5 - 2);
+ -(1i32.abs());
+ -(1f32.abs());
+
+ // These should not trigger an error
+ let _ = (-1i32).abs();
+ let _ = (-1f32).abs();
+ let _ = -(1i32).abs();
+ let _ = -(1f32).abs();
+ let _ = -(1i32.abs());
+ let _ = -(1f32.abs());
+
+ // Odd functions should not trigger an error
+ let _ = -1f64.asin();
+ let _ = -1f64.asinh();
+ let _ = -1f64.atan();
+ let _ = -1f64.atanh();
+ let _ = -1f64.cbrt();
+ let _ = -1f64.fract();
+ let _ = -1f64.round();
+ let _ = -1f64.signum();
+ let _ = -1f64.sin();
+ let _ = -1f64.sinh();
+ let _ = -1f64.tan();
+ let _ = -1f64.tanh();
+ let _ = -1f64.to_degrees();
+ let _ = -1f64.to_radians();
+
+ // Chains containing any non-odd function should trigger (issue #5924)
+ let _ = -(1.0_f64.cos().cos());
+ let _ = -(1.0_f64.cos().sin());
+ let _ = -(1.0_f64.sin().cos());
+
+ // Chains of odd functions shouldn't trigger
+ let _ = -1f64.sin().sin();
+
+ let b = 3;
+ trip!(b * 8);
+}
diff --git a/src/tools/clippy/tests/ui/precedence.rs b/src/tools/clippy/tests/ui/precedence.rs
new file mode 100644
index 000000000..8c849e320
--- /dev/null
+++ b/src/tools/clippy/tests/ui/precedence.rs
@@ -0,0 +1,61 @@
+// run-rustfix
+#![warn(clippy::precedence)]
+#![allow(unused_must_use, clippy::no_effect, clippy::unnecessary_operation)]
+#![allow(clippy::identity_op)]
+#![allow(clippy::eq_op)]
+
+macro_rules! trip {
+ ($a:expr) => {
+ match $a & 0b1111_1111u8 {
+ 0 => println!("a is zero ({})", $a),
+ _ => println!("a is {}", $a),
+ }
+ };
+}
+
+fn main() {
+ 1 << 2 + 3;
+ 1 + 2 << 3;
+ 4 >> 1 + 1;
+ 1 + 3 >> 2;
+ 1 ^ 1 - 1;
+ 3 | 2 - 1;
+ 3 & 5 - 2;
+ -1i32.abs();
+ -1f32.abs();
+
+ // These should not trigger an error
+ let _ = (-1i32).abs();
+ let _ = (-1f32).abs();
+ let _ = -(1i32).abs();
+ let _ = -(1f32).abs();
+ let _ = -(1i32.abs());
+ let _ = -(1f32.abs());
+
+ // Odd functions should not trigger an error
+ let _ = -1f64.asin();
+ let _ = -1f64.asinh();
+ let _ = -1f64.atan();
+ let _ = -1f64.atanh();
+ let _ = -1f64.cbrt();
+ let _ = -1f64.fract();
+ let _ = -1f64.round();
+ let _ = -1f64.signum();
+ let _ = -1f64.sin();
+ let _ = -1f64.sinh();
+ let _ = -1f64.tan();
+ let _ = -1f64.tanh();
+ let _ = -1f64.to_degrees();
+ let _ = -1f64.to_radians();
+
+ // Chains containing any non-odd function should trigger (issue #5924)
+ let _ = -1.0_f64.cos().cos();
+ let _ = -1.0_f64.cos().sin();
+ let _ = -1.0_f64.sin().cos();
+
+ // Chains of odd functions shouldn't trigger
+ let _ = -1f64.sin().sin();
+
+ let b = 3;
+ trip!(b * 8);
+}
diff --git a/src/tools/clippy/tests/ui/precedence.stderr b/src/tools/clippy/tests/ui/precedence.stderr
new file mode 100644
index 000000000..03d585b39
--- /dev/null
+++ b/src/tools/clippy/tests/ui/precedence.stderr
@@ -0,0 +1,76 @@
+error: operator precedence can trip the unwary
+ --> $DIR/precedence.rs:17:5
+ |
+LL | 1 << 2 + 3;
+ | ^^^^^^^^^^ help: consider parenthesizing your expression: `1 << (2 + 3)`
+ |
+ = note: `-D clippy::precedence` implied by `-D warnings`
+
+error: operator precedence can trip the unwary
+ --> $DIR/precedence.rs:18:5
+ |
+LL | 1 + 2 << 3;
+ | ^^^^^^^^^^ help: consider parenthesizing your expression: `(1 + 2) << 3`
+
+error: operator precedence can trip the unwary
+ --> $DIR/precedence.rs:19:5
+ |
+LL | 4 >> 1 + 1;
+ | ^^^^^^^^^^ help: consider parenthesizing your expression: `4 >> (1 + 1)`
+
+error: operator precedence can trip the unwary
+ --> $DIR/precedence.rs:20:5
+ |
+LL | 1 + 3 >> 2;
+ | ^^^^^^^^^^ help: consider parenthesizing your expression: `(1 + 3) >> 2`
+
+error: operator precedence can trip the unwary
+ --> $DIR/precedence.rs:21:5
+ |
+LL | 1 ^ 1 - 1;
+ | ^^^^^^^^^ help: consider parenthesizing your expression: `1 ^ (1 - 1)`
+
+error: operator precedence can trip the unwary
+ --> $DIR/precedence.rs:22:5
+ |
+LL | 3 | 2 - 1;
+ | ^^^^^^^^^ help: consider parenthesizing your expression: `3 | (2 - 1)`
+
+error: operator precedence can trip the unwary
+ --> $DIR/precedence.rs:23:5
+ |
+LL | 3 & 5 - 2;
+ | ^^^^^^^^^ help: consider parenthesizing your expression: `3 & (5 - 2)`
+
+error: unary minus has lower precedence than method call
+ --> $DIR/precedence.rs:24:5
+ |
+LL | -1i32.abs();
+ | ^^^^^^^^^^^ help: consider adding parentheses to clarify your intent: `-(1i32.abs())`
+
+error: unary minus has lower precedence than method call
+ --> $DIR/precedence.rs:25:5
+ |
+LL | -1f32.abs();
+ | ^^^^^^^^^^^ help: consider adding parentheses to clarify your intent: `-(1f32.abs())`
+
+error: unary minus has lower precedence than method call
+ --> $DIR/precedence.rs:52:13
+ |
+LL | let _ = -1.0_f64.cos().cos();
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider adding parentheses to clarify your intent: `-(1.0_f64.cos().cos())`
+
+error: unary minus has lower precedence than method call
+ --> $DIR/precedence.rs:53:13
+ |
+LL | let _ = -1.0_f64.cos().sin();
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider adding parentheses to clarify your intent: `-(1.0_f64.cos().sin())`
+
+error: unary minus has lower precedence than method call
+ --> $DIR/precedence.rs:54:13
+ |
+LL | let _ = -1.0_f64.sin().cos();
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider adding parentheses to clarify your intent: `-(1.0_f64.sin().cos())`
+
+error: aborting due to 12 previous errors
+
diff --git a/src/tools/clippy/tests/ui/print.rs b/src/tools/clippy/tests/ui/print.rs
new file mode 100644
index 000000000..366ccc2b3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/print.rs
@@ -0,0 +1,35 @@
+#![allow(clippy::print_literal, clippy::write_literal)]
+#![warn(clippy::print_stdout, clippy::use_debug)]
+
+use std::fmt::{Debug, Display, Formatter, Result};
+
+#[allow(dead_code)]
+struct Foo;
+
+impl Display for Foo {
+ fn fmt(&self, f: &mut Formatter) -> Result {
+ write!(f, "{:?}", 43.1415)
+ }
+}
+
+impl Debug for Foo {
+ fn fmt(&self, f: &mut Formatter) -> Result {
+ // ok, we can use `Debug` formatting in `Debug` implementations
+ write!(f, "{:?}", 42.718)
+ }
+}
+
+fn main() {
+ println!("Hello");
+ print!("Hello");
+
+ print!("Hello {}", "World");
+
+ print!("Hello {:?}", "World");
+
+ print!("Hello {:#?}", "#orld");
+
+ assert_eq!(42, 1337);
+
+ vec![1, 2];
+}
diff --git a/src/tools/clippy/tests/ui/print.stderr b/src/tools/clippy/tests/ui/print.stderr
new file mode 100644
index 000000000..1754c4183
--- /dev/null
+++ b/src/tools/clippy/tests/ui/print.stderr
@@ -0,0 +1,54 @@
+error: use of `Debug`-based formatting
+ --> $DIR/print.rs:11:20
+ |
+LL | write!(f, "{:?}", 43.1415)
+ | ^^^^
+ |
+ = note: `-D clippy::use-debug` implied by `-D warnings`
+
+error: use of `println!`
+ --> $DIR/print.rs:23:5
+ |
+LL | println!("Hello");
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::print-stdout` implied by `-D warnings`
+
+error: use of `print!`
+ --> $DIR/print.rs:24:5
+ |
+LL | print!("Hello");
+ | ^^^^^^^^^^^^^^^
+
+error: use of `print!`
+ --> $DIR/print.rs:26:5
+ |
+LL | print!("Hello {}", "World");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: use of `print!`
+ --> $DIR/print.rs:28:5
+ |
+LL | print!("Hello {:?}", "World");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: use of `Debug`-based formatting
+ --> $DIR/print.rs:28:19
+ |
+LL | print!("Hello {:?}", "World");
+ | ^^^^
+
+error: use of `print!`
+ --> $DIR/print.rs:30:5
+ |
+LL | print!("Hello {:#?}", "#orld");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: use of `Debug`-based formatting
+ --> $DIR/print.rs:30:19
+ |
+LL | print!("Hello {:#?}", "#orld");
+ | ^^^^^
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/print_in_format_impl.rs b/src/tools/clippy/tests/ui/print_in_format_impl.rs
new file mode 100644
index 000000000..64e886866
--- /dev/null
+++ b/src/tools/clippy/tests/ui/print_in_format_impl.rs
@@ -0,0 +1,58 @@
+#![allow(unused, clippy::print_literal, clippy::write_literal)]
+#![warn(clippy::print_in_format_impl)]
+use std::fmt::{Debug, Display, Error, Formatter};
+
+macro_rules! indirect {
+ () => {{ println!() }};
+}
+
+macro_rules! nested {
+ ($($tt:tt)*) => {
+ $($tt)*
+ };
+}
+
+struct Foo;
+impl Debug for Foo {
+ fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
+ static WORKS_WITH_NESTED_ITEMS: bool = true;
+
+ print!("{}", 1);
+ println!("{}", 2);
+ eprint!("{}", 3);
+ eprintln!("{}", 4);
+ nested! {
+ println!("nested");
+ };
+
+ write!(f, "{}", 5);
+ writeln!(f, "{}", 6);
+ indirect!();
+
+ Ok(())
+ }
+}
+
+impl Display for Foo {
+ fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
+ print!("Display");
+ write!(f, "Display");
+
+ Ok(())
+ }
+}
+
+struct UnnamedFormatter;
+impl Debug for UnnamedFormatter {
+ fn fmt(&self, _: &mut Formatter) -> Result<(), Error> {
+ println!("UnnamedFormatter");
+ Ok(())
+ }
+}
+
+fn main() {
+ print!("outside fmt");
+ println!("outside fmt");
+ eprint!("outside fmt");
+ eprintln!("outside fmt");
+}
diff --git a/src/tools/clippy/tests/ui/print_in_format_impl.stderr b/src/tools/clippy/tests/ui/print_in_format_impl.stderr
new file mode 100644
index 000000000..63b7179bc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/print_in_format_impl.stderr
@@ -0,0 +1,46 @@
+error: use of `print!` in `Debug` impl
+ --> $DIR/print_in_format_impl.rs:20:9
+ |
+LL | print!("{}", 1);
+ | ^^^^^^^^^^^^^^^ help: replace with: `write!(f, ..)`
+ |
+ = note: `-D clippy::print-in-format-impl` implied by `-D warnings`
+
+error: use of `println!` in `Debug` impl
+ --> $DIR/print_in_format_impl.rs:21:9
+ |
+LL | println!("{}", 2);
+ | ^^^^^^^^^^^^^^^^^ help: replace with: `writeln!(f, ..)`
+
+error: use of `eprint!` in `Debug` impl
+ --> $DIR/print_in_format_impl.rs:22:9
+ |
+LL | eprint!("{}", 3);
+ | ^^^^^^^^^^^^^^^^ help: replace with: `write!(f, ..)`
+
+error: use of `eprintln!` in `Debug` impl
+ --> $DIR/print_in_format_impl.rs:23:9
+ |
+LL | eprintln!("{}", 4);
+ | ^^^^^^^^^^^^^^^^^^ help: replace with: `writeln!(f, ..)`
+
+error: use of `println!` in `Debug` impl
+ --> $DIR/print_in_format_impl.rs:25:13
+ |
+LL | println!("nested");
+ | ^^^^^^^^^^^^^^^^^^ help: replace with: `writeln!(f, ..)`
+
+error: use of `print!` in `Display` impl
+ --> $DIR/print_in_format_impl.rs:38:9
+ |
+LL | print!("Display");
+ | ^^^^^^^^^^^^^^^^^ help: replace with: `write!(f, ..)`
+
+error: use of `println!` in `Debug` impl
+ --> $DIR/print_in_format_impl.rs:48:9
+ |
+LL | println!("UnnamedFormatter");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `writeln!(..)`
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/print_literal.rs b/src/tools/clippy/tests/ui/print_literal.rs
new file mode 100644
index 000000000..8665a3bb2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/print_literal.rs
@@ -0,0 +1,38 @@
+#![warn(clippy::print_literal)]
+
+fn main() {
+ // these should be fine
+ print!("Hello");
+ println!("Hello");
+ let world = "world";
+ println!("Hello {}", world);
+ println!("Hello {world}", world = world);
+ println!("3 in hex is {:X}", 3);
+ println!("2 + 1 = {:.4}", 3);
+ println!("2 + 1 = {:5.4}", 3);
+ println!("Debug test {:?}", "hello, world");
+ println!("{0:8} {1:>8}", "hello", "world");
+ println!("{1:8} {0:>8}", "hello", "world");
+ println!("{foo:8} {bar:>8}", foo = "hello", bar = "world");
+ println!("{bar:8} {foo:>8}", foo = "hello", bar = "world");
+ println!("{number:>width$}", number = 1, width = 6);
+ println!("{number:>0width$}", number = 1, width = 6);
+ println!("{} of {:b} people know binary, the other half doesn't", 1, 2);
+ println!("10 / 4 is {}", 2.5);
+ println!("2 + 1 = {}", 3);
+
+ // these should throw warnings
+ print!("Hello {}", "world");
+ println!("Hello {} {}", world, "world");
+ println!("Hello {}", "world");
+
+ // positional args don't change the fact
+ // that we're using a literal -- this should
+ // throw a warning
+ println!("{0} {1}", "hello", "world");
+ println!("{1} {0}", "hello", "world");
+
+ // named args shouldn't change anything either
+ println!("{foo} {bar}", foo = "hello", bar = "world");
+ println!("{bar} {foo}", foo = "hello", bar = "world");
+}
diff --git a/src/tools/clippy/tests/ui/print_literal.stderr b/src/tools/clippy/tests/ui/print_literal.stderr
new file mode 100644
index 000000000..72aae0756
--- /dev/null
+++ b/src/tools/clippy/tests/ui/print_literal.stderr
@@ -0,0 +1,135 @@
+error: literal with an empty format string
+ --> $DIR/print_literal.rs:25:24
+ |
+LL | print!("Hello {}", "world");
+ | ^^^^^^^
+ |
+ = note: `-D clippy::print-literal` implied by `-D warnings`
+help: try this
+ |
+LL - print!("Hello {}", "world");
+LL + print!("Hello world");
+ |
+
+error: literal with an empty format string
+ --> $DIR/print_literal.rs:26:36
+ |
+LL | println!("Hello {} {}", world, "world");
+ | ^^^^^^^
+ |
+help: try this
+ |
+LL - println!("Hello {} {}", world, "world");
+LL + println!("Hello {} world", world);
+ |
+
+error: literal with an empty format string
+ --> $DIR/print_literal.rs:27:26
+ |
+LL | println!("Hello {}", "world");
+ | ^^^^^^^
+ |
+help: try this
+ |
+LL - println!("Hello {}", "world");
+LL + println!("Hello world");
+ |
+
+error: literal with an empty format string
+ --> $DIR/print_literal.rs:32:25
+ |
+LL | println!("{0} {1}", "hello", "world");
+ | ^^^^^^^
+ |
+help: try this
+ |
+LL - println!("{0} {1}", "hello", "world");
+LL + println!("hello {1}", "world");
+ |
+
+error: literal with an empty format string
+ --> $DIR/print_literal.rs:32:34
+ |
+LL | println!("{0} {1}", "hello", "world");
+ | ^^^^^^^
+ |
+help: try this
+ |
+LL - println!("{0} {1}", "hello", "world");
+LL + println!("{0} world", "hello");
+ |
+
+error: literal with an empty format string
+ --> $DIR/print_literal.rs:33:25
+ |
+LL | println!("{1} {0}", "hello", "world");
+ | ^^^^^^^
+ |
+help: try this
+ |
+LL - println!("{1} {0}", "hello", "world");
+LL + println!("{1} hello", "world");
+ |
+
+error: literal with an empty format string
+ --> $DIR/print_literal.rs:33:34
+ |
+LL | println!("{1} {0}", "hello", "world");
+ | ^^^^^^^
+ |
+help: try this
+ |
+LL - println!("{1} {0}", "hello", "world");
+LL + println!("world {0}", "hello");
+ |
+
+error: literal with an empty format string
+ --> $DIR/print_literal.rs:36:29
+ |
+LL | println!("{foo} {bar}", foo = "hello", bar = "world");
+ | ^^^^^^^^^^^^^
+ |
+help: try this
+ |
+LL - println!("{foo} {bar}", foo = "hello", bar = "world");
+LL + println!("hello {bar}", bar = "world");
+ |
+
+error: literal with an empty format string
+ --> $DIR/print_literal.rs:36:44
+ |
+LL | println!("{foo} {bar}", foo = "hello", bar = "world");
+ | ^^^^^^^^^^^^^
+ |
+help: try this
+ |
+LL - println!("{foo} {bar}", foo = "hello", bar = "world");
+LL + println!("{foo} world", foo = "hello");
+ |
+
+error: literal with an empty format string
+ --> $DIR/print_literal.rs:37:29
+ |
+LL | println!("{bar} {foo}", foo = "hello", bar = "world");
+ | ^^^^^^^^^^^^^
+ |
+help: try this
+ |
+LL - println!("{bar} {foo}", foo = "hello", bar = "world");
+LL + println!("{bar} hello", bar = "world");
+ |
+
+error: literal with an empty format string
+ --> $DIR/print_literal.rs:37:44
+ |
+LL | println!("{bar} {foo}", foo = "hello", bar = "world");
+ | ^^^^^^^^^^^^^
+ |
+help: try this
+ |
+LL - println!("{bar} {foo}", foo = "hello", bar = "world");
+LL + println!("world {foo}", foo = "hello");
+ |
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/print_stderr.rs b/src/tools/clippy/tests/ui/print_stderr.rs
new file mode 100644
index 000000000..fa07e74a7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/print_stderr.rs
@@ -0,0 +1,8 @@
+#![warn(clippy::print_stderr)]
+
+fn main() {
+ eprintln!("Hello");
+ println!("This should not do anything");
+ eprint!("World");
+ print!("Nor should this");
+}
diff --git a/src/tools/clippy/tests/ui/print_stderr.stderr b/src/tools/clippy/tests/ui/print_stderr.stderr
new file mode 100644
index 000000000..5af735af6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/print_stderr.stderr
@@ -0,0 +1,16 @@
+error: use of `eprintln!`
+ --> $DIR/print_stderr.rs:4:5
+ |
+LL | eprintln!("Hello");
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::print-stderr` implied by `-D warnings`
+
+error: use of `eprint!`
+ --> $DIR/print_stderr.rs:6:5
+ |
+LL | eprint!("World");
+ | ^^^^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/print_stdout_build_script.rs b/src/tools/clippy/tests/ui/print_stdout_build_script.rs
new file mode 100644
index 000000000..997ebef8a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/print_stdout_build_script.rs
@@ -0,0 +1,12 @@
+// compile-flags: --crate-name=build_script_build
+
+#![warn(clippy::print_stdout)]
+
+fn main() {
+ // Fix #6041
+ //
+ // The `print_stdout` lint shouldn't emit in `build.rs`
+ // as these methods are used for the build script.
+ println!("Hello");
+ print!("Hello");
+}
diff --git a/src/tools/clippy/tests/ui/print_with_newline.rs b/src/tools/clippy/tests/ui/print_with_newline.rs
new file mode 100644
index 000000000..a43a1fc4f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/print_with_newline.rs
@@ -0,0 +1,52 @@
+// FIXME: Ideally these suggestions would be fixed via rustfix. Blocked by rust-lang/rust#53934
+// // run-rustfix
+
+#![allow(clippy::print_literal)]
+#![warn(clippy::print_with_newline)]
+
+fn main() {
+ print!("Hello\n");
+ print!("Hello {}\n", "world");
+ print!("Hello {} {}\n", "world", "#2");
+ print!("{}\n", 1265);
+ print!("\n");
+
+ // these are all fine
+ print!("");
+ print!("Hello");
+ println!("Hello");
+ println!("Hello\n");
+ println!("Hello {}\n", "world");
+ print!("Issue\n{}", 1265);
+ print!("{}", 1265);
+ print!("\n{}", 1275);
+ print!("\n\n");
+ print!("like eof\n\n");
+ print!("Hello {} {}\n\n", "world", "#2");
+ println!("\ndon't\nwarn\nfor\nmultiple\nnewlines\n"); // #3126
+ println!("\nbla\n\n"); // #3126
+
+ // Escaping
+ print!("\\n"); // #3514
+ print!("\\\n"); // should fail
+ print!("\\\\n");
+
+ // Raw strings
+ print!(r"\n"); // #3778
+
+ // Literal newlines should also fail
+ print!(
+ "
+"
+ );
+ print!(
+ r"
+"
+ );
+
+ // Don't warn on CRLF (#4208)
+ print!("\r\n");
+ print!("foo\r\n");
+ print!("\\r\n"); //~ ERROR
+ print!("foo\rbar\n") // ~ ERROR
+}
diff --git a/src/tools/clippy/tests/ui/print_with_newline.stderr b/src/tools/clippy/tests/ui/print_with_newline.stderr
new file mode 100644
index 000000000..edbaa1cdf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/print_with_newline.stderr
@@ -0,0 +1,129 @@
+error: using `print!()` with a format string that ends in a single newline
+ --> $DIR/print_with_newline.rs:8:5
+ |
+LL | print!("Hello/n");
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::print-with-newline` implied by `-D warnings`
+help: use `println!` instead
+ |
+LL - print!("Hello/n");
+LL + println!("Hello");
+ |
+
+error: using `print!()` with a format string that ends in a single newline
+ --> $DIR/print_with_newline.rs:9:5
+ |
+LL | print!("Hello {}/n", "world");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: use `println!` instead
+ |
+LL - print!("Hello {}/n", "world");
+LL + println!("Hello {}", "world");
+ |
+
+error: using `print!()` with a format string that ends in a single newline
+ --> $DIR/print_with_newline.rs:10:5
+ |
+LL | print!("Hello {} {}/n", "world", "#2");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: use `println!` instead
+ |
+LL - print!("Hello {} {}/n", "world", "#2");
+LL + println!("Hello {} {}", "world", "#2");
+ |
+
+error: using `print!()` with a format string that ends in a single newline
+ --> $DIR/print_with_newline.rs:11:5
+ |
+LL | print!("{}/n", 1265);
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+help: use `println!` instead
+ |
+LL - print!("{}/n", 1265);
+LL + println!("{}", 1265);
+ |
+
+error: using `print!()` with a format string that ends in a single newline
+ --> $DIR/print_with_newline.rs:12:5
+ |
+LL | print!("/n");
+ | ^^^^^^^^^^^^
+ |
+help: use `println!` instead
+ |
+LL - print!("/n");
+LL + println!();
+ |
+
+error: using `print!()` with a format string that ends in a single newline
+ --> $DIR/print_with_newline.rs:31:5
+ |
+LL | print!("//n"); // should fail
+ | ^^^^^^^^^^^^^^
+ |
+help: use `println!` instead
+ |
+LL - print!("//n"); // should fail
+LL + println!("/"); // should fail
+ |
+
+error: using `print!()` with a format string that ends in a single newline
+ --> $DIR/print_with_newline.rs:38:5
+ |
+LL | / print!(
+LL | | "
+LL | | "
+LL | | );
+ | |_____^
+ |
+help: use `println!` instead
+ |
+LL ~ println!(
+LL ~ ""
+ |
+
+error: using `print!()` with a format string that ends in a single newline
+ --> $DIR/print_with_newline.rs:42:5
+ |
+LL | / print!(
+LL | | r"
+LL | | "
+LL | | );
+ | |_____^
+ |
+help: use `println!` instead
+ |
+LL ~ println!(
+LL ~ r""
+ |
+
+error: using `print!()` with a format string that ends in a single newline
+ --> $DIR/print_with_newline.rs:50:5
+ |
+LL | print!("/r/n"); //~ ERROR
+ | ^^^^^^^^^^^^^^^
+ |
+help: use `println!` instead
+ |
+LL - print!("/r/n"); //~ ERROR
+LL + println!("/r"); //~ ERROR
+ |
+
+error: using `print!()` with a format string that ends in a single newline
+ --> $DIR/print_with_newline.rs:51:5
+ |
+LL | print!("foo/rbar/n") // ~ ERROR
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+help: use `println!` instead
+ |
+LL - print!("foo/rbar/n") // ~ ERROR
+LL + println!("foo/rbar") // ~ ERROR
+ |
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/println_empty_string.fixed b/src/tools/clippy/tests/ui/println_empty_string.fixed
new file mode 100644
index 000000000..976068092
--- /dev/null
+++ b/src/tools/clippy/tests/ui/println_empty_string.fixed
@@ -0,0 +1,18 @@
+// run-rustfix
+#![allow(clippy::match_single_binding)]
+
+fn main() {
+ println!();
+ println!();
+
+ match "a" {
+ _ => println!(),
+ }
+
+ eprintln!();
+ eprintln!();
+
+ match "a" {
+ _ => eprintln!(),
+ }
+}
diff --git a/src/tools/clippy/tests/ui/println_empty_string.rs b/src/tools/clippy/tests/ui/println_empty_string.rs
new file mode 100644
index 000000000..80fdb3e6e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/println_empty_string.rs
@@ -0,0 +1,18 @@
+// run-rustfix
+#![allow(clippy::match_single_binding)]
+
+fn main() {
+ println!();
+ println!("");
+
+ match "a" {
+ _ => println!(""),
+ }
+
+ eprintln!();
+ eprintln!("");
+
+ match "a" {
+ _ => eprintln!(""),
+ }
+}
diff --git a/src/tools/clippy/tests/ui/println_empty_string.stderr b/src/tools/clippy/tests/ui/println_empty_string.stderr
new file mode 100644
index 000000000..17fe4ea74
--- /dev/null
+++ b/src/tools/clippy/tests/ui/println_empty_string.stderr
@@ -0,0 +1,28 @@
+error: using `println!("")`
+ --> $DIR/println_empty_string.rs:6:5
+ |
+LL | println!("");
+ | ^^^^^^^^^^^^ help: replace it with: `println!()`
+ |
+ = note: `-D clippy::println-empty-string` implied by `-D warnings`
+
+error: using `println!("")`
+ --> $DIR/println_empty_string.rs:9:14
+ |
+LL | _ => println!(""),
+ | ^^^^^^^^^^^^ help: replace it with: `println!()`
+
+error: using `eprintln!("")`
+ --> $DIR/println_empty_string.rs:13:5
+ |
+LL | eprintln!("");
+ | ^^^^^^^^^^^^^ help: replace it with: `eprintln!()`
+
+error: using `eprintln!("")`
+ --> $DIR/println_empty_string.rs:16:14
+ |
+LL | _ => eprintln!(""),
+ | ^^^^^^^^^^^^^ help: replace it with: `eprintln!()`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/proc_macro.rs b/src/tools/clippy/tests/ui/proc_macro.rs
new file mode 100644
index 000000000..59914b8b8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/proc_macro.rs
@@ -0,0 +1,26 @@
+//! Check that we correctly lint procedural macros.
+#![crate_type = "proc-macro"]
+
+extern crate proc_macro;
+
+use proc_macro::TokenStream;
+
+#[allow(dead_code)]
+fn f() {
+ let _x = 3.14;
+}
+
+#[proc_macro]
+pub fn mybangmacro(t: TokenStream) -> TokenStream {
+ t
+}
+
+#[proc_macro_derive(MyDerivedTrait)]
+pub fn myderive(t: TokenStream) -> TokenStream {
+ t
+}
+
+#[proc_macro_attribute]
+pub fn myattribute(t: TokenStream, a: TokenStream) -> TokenStream {
+ t
+}
diff --git a/src/tools/clippy/tests/ui/proc_macro.stderr b/src/tools/clippy/tests/ui/proc_macro.stderr
new file mode 100644
index 000000000..48fd58c9a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/proc_macro.stderr
@@ -0,0 +1,11 @@
+error: approximate value of `f{32, 64}::consts::PI` found
+ --> $DIR/proc_macro.rs:10:14
+ |
+LL | let _x = 3.14;
+ | ^^^^
+ |
+ = note: `#[deny(clippy::approx_constant)]` on by default
+ = help: consider using the constant directly
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/ptr_arg.rs b/src/tools/clippy/tests/ui/ptr_arg.rs
new file mode 100644
index 000000000..fd15001e5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ptr_arg.rs
@@ -0,0 +1,209 @@
+#![feature(lint_reasons)]
+#![allow(unused, clippy::many_single_char_names, clippy::redundant_clone)]
+#![warn(clippy::ptr_arg)]
+
+use std::borrow::Cow;
+use std::path::PathBuf;
+
+fn do_vec(x: &Vec<i64>) {
+ //Nothing here
+}
+
+fn do_vec_mut(x: &mut Vec<i64>) {
+ //Nothing here
+}
+
+fn do_str(x: &String) {
+ //Nothing here either
+}
+
+fn do_str_mut(x: &mut String) {
+ //Nothing here either
+}
+
+fn do_path(x: &PathBuf) {
+ //Nothing here either
+}
+
+fn do_path_mut(x: &mut PathBuf) {
+ //Nothing here either
+}
+
+fn main() {}
+
+trait Foo {
+ type Item;
+ fn do_vec(x: &Vec<i64>);
+ fn do_item(x: &Self::Item);
+}
+
+struct Bar;
+
+// no error, in trait impl (#425)
+impl Foo for Bar {
+ type Item = Vec<u8>;
+ fn do_vec(x: &Vec<i64>) {}
+ fn do_item(x: &Vec<u8>) {}
+}
+
+fn cloned(x: &Vec<u8>) -> Vec<u8> {
+ let e = x.clone();
+ let f = e.clone(); // OK
+ let g = x;
+ let h = g.clone();
+ let i = (e).clone();
+ x.clone()
+}
+
+fn str_cloned(x: &String) -> String {
+ let a = x.clone();
+ let b = x.clone();
+ let c = b.clone();
+ let d = a.clone().clone().clone();
+ x.clone()
+}
+
+fn path_cloned(x: &PathBuf) -> PathBuf {
+ let a = x.clone();
+ let b = x.clone();
+ let c = b.clone();
+ let d = a.clone().clone().clone();
+ x.clone()
+}
+
+fn false_positive_capacity(x: &Vec<u8>, y: &String) {
+ let a = x.capacity();
+ let b = y.clone();
+ let c = y.as_str();
+}
+
+fn false_positive_capacity_too(x: &String) -> String {
+ if x.capacity() > 1024 {
+ panic!("Too large!");
+ }
+ x.clone()
+}
+
+#[allow(dead_code)]
+fn test_cow_with_ref(c: &Cow<[i32]>) {}
+
+fn test_cow(c: Cow<[i32]>) {
+ let _c = c;
+}
+
+trait Foo2 {
+ fn do_string(&self);
+}
+
+// no error for &self references where self is of type String (#2293)
+impl Foo2 for String {
+ fn do_string(&self) {}
+}
+
+// Check that the allow attribute on parameters is honored
+mod issue_5644 {
+ use std::borrow::Cow;
+ use std::path::PathBuf;
+
+ fn allowed(
+ #[allow(clippy::ptr_arg)] _v: &Vec<u32>,
+ #[allow(clippy::ptr_arg)] _s: &String,
+ #[allow(clippy::ptr_arg)] _p: &PathBuf,
+ #[allow(clippy::ptr_arg)] _c: &Cow<[i32]>,
+ #[expect(clippy::ptr_arg)] _expect: &Cow<[i32]>,
+ ) {
+ }
+
+ fn some_allowed(#[allow(clippy::ptr_arg)] _v: &Vec<u32>, _s: &String) {}
+
+ struct S;
+ impl S {
+ fn allowed(
+ #[allow(clippy::ptr_arg)] _v: &Vec<u32>,
+ #[allow(clippy::ptr_arg)] _s: &String,
+ #[allow(clippy::ptr_arg)] _p: &PathBuf,
+ #[allow(clippy::ptr_arg)] _c: &Cow<[i32]>,
+ #[expect(clippy::ptr_arg)] _expect: &Cow<[i32]>,
+ ) {
+ }
+ }
+
+ trait T {
+ fn allowed(
+ #[allow(clippy::ptr_arg)] _v: &Vec<u32>,
+ #[allow(clippy::ptr_arg)] _s: &String,
+ #[allow(clippy::ptr_arg)] _p: &PathBuf,
+ #[allow(clippy::ptr_arg)] _c: &Cow<[i32]>,
+ #[expect(clippy::ptr_arg)] _expect: &Cow<[i32]>,
+ ) {
+ }
+ }
+}
+
+mod issue6509 {
+ use std::path::PathBuf;
+
+ fn foo_vec(vec: &Vec<u8>) {
+ let _ = vec.clone().pop();
+ let _ = vec.clone().clone();
+ }
+
+ fn foo_path(path: &PathBuf) {
+ let _ = path.clone().pop();
+ let _ = path.clone().clone();
+ }
+
+ fn foo_str(str: &PathBuf) {
+ let _ = str.clone().pop();
+ let _ = str.clone().clone();
+ }
+}
+
+fn mut_vec_slice_methods(v: &mut Vec<u32>) {
+ v.copy_within(1..5, 10);
+}
+
+fn mut_vec_vec_methods(v: &mut Vec<u32>) {
+ v.clear();
+}
+
+fn vec_contains(v: &Vec<u32>) -> bool {
+ [vec![], vec![0]].as_slice().contains(v)
+}
+
+fn fn_requires_vec(v: &Vec<u32>) -> bool {
+ vec_contains(v)
+}
+
+fn impl_fn_requires_vec(v: &Vec<u32>, f: impl Fn(&Vec<u32>)) {
+ f(v);
+}
+
+fn dyn_fn_requires_vec(v: &Vec<u32>, f: &dyn Fn(&Vec<u32>)) {
+ f(v);
+}
+
+// No error for types behind an alias (#7699)
+type A = Vec<u8>;
+fn aliased(a: &A) {}
+
+// Issue #8366
+pub trait Trait {
+ fn f(v: &mut Vec<i32>);
+ fn f2(v: &mut Vec<i32>) {}
+}
+
+// Issue #8463
+fn two_vecs(a: &mut Vec<u32>, b: &mut Vec<u32>) {
+ a.push(0);
+ a.push(0);
+ a.push(0);
+ b.push(1);
+}
+
+// Issue #8495
+fn cow_conditional_to_mut(a: &mut Cow<str>) {
+ if a.is_empty() {
+ a.to_mut().push_str("foo");
+ }
+}
diff --git a/src/tools/clippy/tests/ui/ptr_arg.stderr b/src/tools/clippy/tests/ui/ptr_arg.stderr
new file mode 100644
index 000000000..d64b5f454
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ptr_arg.stderr
@@ -0,0 +1,166 @@
+error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do
+ --> $DIR/ptr_arg.rs:8:14
+ |
+LL | fn do_vec(x: &Vec<i64>) {
+ | ^^^^^^^^^ help: change this to: `&[i64]`
+ |
+ = note: `-D clippy::ptr-arg` implied by `-D warnings`
+
+error: writing `&mut Vec` instead of `&mut [_]` involves a new object where a slice will do
+ --> $DIR/ptr_arg.rs:12:18
+ |
+LL | fn do_vec_mut(x: &mut Vec<i64>) {
+ | ^^^^^^^^^^^^^ help: change this to: `&mut [i64]`
+
+error: writing `&String` instead of `&str` involves a new object where a slice will do
+ --> $DIR/ptr_arg.rs:16:14
+ |
+LL | fn do_str(x: &String) {
+ | ^^^^^^^ help: change this to: `&str`
+
+error: writing `&mut String` instead of `&mut str` involves a new object where a slice will do
+ --> $DIR/ptr_arg.rs:20:18
+ |
+LL | fn do_str_mut(x: &mut String) {
+ | ^^^^^^^^^^^ help: change this to: `&mut str`
+
+error: writing `&PathBuf` instead of `&Path` involves a new object where a slice will do
+ --> $DIR/ptr_arg.rs:24:15
+ |
+LL | fn do_path(x: &PathBuf) {
+ | ^^^^^^^^ help: change this to: `&Path`
+
+error: writing `&mut PathBuf` instead of `&mut Path` involves a new object where a slice will do
+ --> $DIR/ptr_arg.rs:28:19
+ |
+LL | fn do_path_mut(x: &mut PathBuf) {
+ | ^^^^^^^^^^^^ help: change this to: `&mut Path`
+
+error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do
+ --> $DIR/ptr_arg.rs:36:18
+ |
+LL | fn do_vec(x: &Vec<i64>);
+ | ^^^^^^^^^ help: change this to: `&[i64]`
+
+error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do
+ --> $DIR/ptr_arg.rs:49:14
+ |
+LL | fn cloned(x: &Vec<u8>) -> Vec<u8> {
+ | ^^^^^^^^
+ |
+help: change this to
+ |
+LL ~ fn cloned(x: &[u8]) -> Vec<u8> {
+LL ~ let e = x.to_owned();
+LL | let f = e.clone(); // OK
+LL | let g = x;
+LL ~ let h = g.to_owned();
+LL | let i = (e).clone();
+LL ~ x.to_owned()
+ |
+
+error: writing `&String` instead of `&str` involves a new object where a slice will do
+ --> $DIR/ptr_arg.rs:58:18
+ |
+LL | fn str_cloned(x: &String) -> String {
+ | ^^^^^^^
+ |
+help: change this to
+ |
+LL ~ fn str_cloned(x: &str) -> String {
+LL ~ let a = x.to_owned();
+LL ~ let b = x.to_owned();
+LL | let c = b.clone();
+LL | let d = a.clone().clone().clone();
+LL ~ x.to_owned()
+ |
+
+error: writing `&PathBuf` instead of `&Path` involves a new object where a slice will do
+ --> $DIR/ptr_arg.rs:66:19
+ |
+LL | fn path_cloned(x: &PathBuf) -> PathBuf {
+ | ^^^^^^^^
+ |
+help: change this to
+ |
+LL ~ fn path_cloned(x: &Path) -> PathBuf {
+LL ~ let a = x.to_path_buf();
+LL ~ let b = x.to_path_buf();
+LL | let c = b.clone();
+LL | let d = a.clone().clone().clone();
+LL ~ x.to_path_buf()
+ |
+
+error: writing `&String` instead of `&str` involves a new object where a slice will do
+ --> $DIR/ptr_arg.rs:74:44
+ |
+LL | fn false_positive_capacity(x: &Vec<u8>, y: &String) {
+ | ^^^^^^^
+ |
+help: change this to
+ |
+LL ~ fn false_positive_capacity(x: &Vec<u8>, y: &str) {
+LL | let a = x.capacity();
+LL ~ let b = y.to_owned();
+LL ~ let c = y;
+ |
+
+error: using a reference to `Cow` is not recommended
+ --> $DIR/ptr_arg.rs:88:25
+ |
+LL | fn test_cow_with_ref(c: &Cow<[i32]>) {}
+ | ^^^^^^^^^^^ help: change this to: `&[i32]`
+
+error: writing `&String` instead of `&str` involves a new object where a slice will do
+ --> $DIR/ptr_arg.rs:117:66
+ |
+LL | fn some_allowed(#[allow(clippy::ptr_arg)] _v: &Vec<u32>, _s: &String) {}
+ | ^^^^^^^ help: change this to: `&str`
+
+error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do
+ --> $DIR/ptr_arg.rs:146:21
+ |
+LL | fn foo_vec(vec: &Vec<u8>) {
+ | ^^^^^^^^
+ |
+help: change this to
+ |
+LL ~ fn foo_vec(vec: &[u8]) {
+LL ~ let _ = vec.to_owned().pop();
+LL ~ let _ = vec.to_owned().clone();
+ |
+
+error: writing `&PathBuf` instead of `&Path` involves a new object where a slice will do
+ --> $DIR/ptr_arg.rs:151:23
+ |
+LL | fn foo_path(path: &PathBuf) {
+ | ^^^^^^^^
+ |
+help: change this to
+ |
+LL ~ fn foo_path(path: &Path) {
+LL ~ let _ = path.to_path_buf().pop();
+LL ~ let _ = path.to_path_buf().clone();
+ |
+
+error: writing `&PathBuf` instead of `&Path` involves a new object where a slice will do
+ --> $DIR/ptr_arg.rs:156:21
+ |
+LL | fn foo_str(str: &PathBuf) {
+ | ^^^^^^^^
+ |
+help: change this to
+ |
+LL ~ fn foo_str(str: &Path) {
+LL ~ let _ = str.to_path_buf().pop();
+LL ~ let _ = str.to_path_buf().clone();
+ |
+
+error: writing `&mut Vec` instead of `&mut [_]` involves a new object where a slice will do
+ --> $DIR/ptr_arg.rs:162:29
+ |
+LL | fn mut_vec_slice_methods(v: &mut Vec<u32>) {
+ | ^^^^^^^^^^^^^ help: change this to: `&mut [u32]`
+
+error: aborting due to 17 previous errors
+
diff --git a/src/tools/clippy/tests/ui/ptr_as_ptr.fixed b/src/tools/clippy/tests/ui/ptr_as_ptr.fixed
new file mode 100644
index 000000000..bea6be66a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ptr_as_ptr.fixed
@@ -0,0 +1,65 @@
+// run-rustfix
+// aux-build:macro_rules.rs
+
+#![warn(clippy::ptr_as_ptr)]
+#![feature(custom_inner_attributes)]
+
+extern crate macro_rules;
+
+macro_rules! cast_it {
+ ($ptr: ident) => {
+ $ptr.cast::<i32>()
+ };
+}
+
+fn main() {
+ let ptr: *const u32 = &42_u32;
+ let mut_ptr: *mut u32 = &mut 42_u32;
+
+ let _ = ptr.cast::<i32>();
+ let _ = mut_ptr.cast::<i32>();
+
+ // Make sure the lint can handle the difference in their operator precedences.
+ unsafe {
+ let ptr_ptr: *const *const u32 = &ptr;
+ let _ = (*ptr_ptr).cast::<i32>();
+ }
+
+ // Changes in mutability. Do not lint this.
+ let _ = ptr as *mut i32;
+ let _ = mut_ptr as *const i32;
+
+ // `pointer::cast` cannot perform unsized coercions unlike `as`. Do not lint this.
+ let ptr_of_array: *const [u32; 4] = &[1, 2, 3, 4];
+ let _ = ptr_of_array as *const [u32];
+ let _ = ptr_of_array as *const dyn std::fmt::Debug;
+
+ // Ensure the lint doesn't produce unnecessary turbofish for inferred types.
+ let _: *const i32 = ptr.cast();
+ let _: *mut i32 = mut_ptr.cast();
+
+ // Make sure the lint is triggered inside a macro
+ let _ = cast_it!(ptr);
+
+ // Do not lint inside macros from external crates
+ let _ = macro_rules::ptr_as_ptr_cast!(ptr);
+}
+
+fn _msrv_1_37() {
+ #![clippy::msrv = "1.37"]
+ let ptr: *const u32 = &42_u32;
+ let mut_ptr: *mut u32 = &mut 42_u32;
+
+ // `pointer::cast` was stabilized in 1.38. Do not lint this
+ let _ = ptr as *const i32;
+ let _ = mut_ptr as *mut i32;
+}
+
+fn _msrv_1_38() {
+ #![clippy::msrv = "1.38"]
+ let ptr: *const u32 = &42_u32;
+ let mut_ptr: *mut u32 = &mut 42_u32;
+
+ let _ = ptr.cast::<i32>();
+ let _ = mut_ptr.cast::<i32>();
+}
diff --git a/src/tools/clippy/tests/ui/ptr_as_ptr.rs b/src/tools/clippy/tests/ui/ptr_as_ptr.rs
new file mode 100644
index 000000000..ca2616b00
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ptr_as_ptr.rs
@@ -0,0 +1,65 @@
+// run-rustfix
+// aux-build:macro_rules.rs
+
+#![warn(clippy::ptr_as_ptr)]
+#![feature(custom_inner_attributes)]
+
+extern crate macro_rules;
+
+macro_rules! cast_it {
+ ($ptr: ident) => {
+ $ptr as *const i32
+ };
+}
+
+fn main() {
+ let ptr: *const u32 = &42_u32;
+ let mut_ptr: *mut u32 = &mut 42_u32;
+
+ let _ = ptr as *const i32;
+ let _ = mut_ptr as *mut i32;
+
+ // Make sure the lint can handle the difference in their operator precedences.
+ unsafe {
+ let ptr_ptr: *const *const u32 = &ptr;
+ let _ = *ptr_ptr as *const i32;
+ }
+
+ // Changes in mutability. Do not lint this.
+ let _ = ptr as *mut i32;
+ let _ = mut_ptr as *const i32;
+
+ // `pointer::cast` cannot perform unsized coercions unlike `as`. Do not lint this.
+ let ptr_of_array: *const [u32; 4] = &[1, 2, 3, 4];
+ let _ = ptr_of_array as *const [u32];
+ let _ = ptr_of_array as *const dyn std::fmt::Debug;
+
+ // Ensure the lint doesn't produce unnecessary turbofish for inferred types.
+ let _: *const i32 = ptr as *const _;
+ let _: *mut i32 = mut_ptr as _;
+
+ // Make sure the lint is triggered inside a macro
+ let _ = cast_it!(ptr);
+
+ // Do not lint inside macros from external crates
+ let _ = macro_rules::ptr_as_ptr_cast!(ptr);
+}
+
+fn _msrv_1_37() {
+ #![clippy::msrv = "1.37"]
+ let ptr: *const u32 = &42_u32;
+ let mut_ptr: *mut u32 = &mut 42_u32;
+
+ // `pointer::cast` was stabilized in 1.38. Do not lint this
+ let _ = ptr as *const i32;
+ let _ = mut_ptr as *mut i32;
+}
+
+fn _msrv_1_38() {
+ #![clippy::msrv = "1.38"]
+ let ptr: *const u32 = &42_u32;
+ let mut_ptr: *mut u32 = &mut 42_u32;
+
+ let _ = ptr as *const i32;
+ let _ = mut_ptr as *mut i32;
+}
diff --git a/src/tools/clippy/tests/ui/ptr_as_ptr.stderr b/src/tools/clippy/tests/ui/ptr_as_ptr.stderr
new file mode 100644
index 000000000..c58c55cfd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ptr_as_ptr.stderr
@@ -0,0 +1,57 @@
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:19:13
+ |
+LL | let _ = ptr as *const i32;
+ | ^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `ptr.cast::<i32>()`
+ |
+ = note: `-D clippy::ptr-as-ptr` implied by `-D warnings`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:20:13
+ |
+LL | let _ = mut_ptr as *mut i32;
+ | ^^^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `mut_ptr.cast::<i32>()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:25:17
+ |
+LL | let _ = *ptr_ptr as *const i32;
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `(*ptr_ptr).cast::<i32>()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:38:25
+ |
+LL | let _: *const i32 = ptr as *const _;
+ | ^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `ptr.cast()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:39:23
+ |
+LL | let _: *mut i32 = mut_ptr as _;
+ | ^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `mut_ptr.cast()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:11:9
+ |
+LL | $ptr as *const i32
+ | ^^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `$ptr.cast::<i32>()`
+...
+LL | let _ = cast_it!(ptr);
+ | ------------- in this macro invocation
+ |
+ = note: this error originates in the macro `cast_it` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:63:13
+ |
+LL | let _ = ptr as *const i32;
+ | ^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `ptr.cast::<i32>()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:64:13
+ |
+LL | let _ = mut_ptr as *mut i32;
+ | ^^^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `mut_ptr.cast::<i32>()`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/ptr_eq.fixed b/src/tools/clippy/tests/ui/ptr_eq.fixed
new file mode 100644
index 000000000..209081e6e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ptr_eq.fixed
@@ -0,0 +1,38 @@
+// run-rustfix
+#![warn(clippy::ptr_eq)]
+
+macro_rules! mac {
+ ($a:expr, $b:expr) => {
+ $a as *const _ as usize == $b as *const _ as usize
+ };
+}
+
+macro_rules! another_mac {
+ ($a:expr, $b:expr) => {
+ $a as *const _ == $b as *const _
+ };
+}
+
+fn main() {
+ let a = &[1, 2, 3];
+ let b = &[1, 2, 3];
+
+ let _ = std::ptr::eq(a, b);
+ let _ = std::ptr::eq(a, b);
+ let _ = a.as_ptr() == b as *const _;
+ let _ = a.as_ptr() == b.as_ptr();
+
+ // Do not lint
+
+ let _ = mac!(a, b);
+ let _ = another_mac!(a, b);
+
+ let a = &mut [1, 2, 3];
+ let b = &mut [1, 2, 3];
+
+ let _ = a.as_mut_ptr() == b as *mut [i32] as *mut _;
+ let _ = a.as_mut_ptr() == b.as_mut_ptr();
+
+ let _ = a == b;
+ let _ = core::ptr::eq(a, b);
+}
diff --git a/src/tools/clippy/tests/ui/ptr_eq.rs b/src/tools/clippy/tests/ui/ptr_eq.rs
new file mode 100644
index 000000000..691628708
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ptr_eq.rs
@@ -0,0 +1,38 @@
+// run-rustfix
+#![warn(clippy::ptr_eq)]
+
+macro_rules! mac {
+ ($a:expr, $b:expr) => {
+ $a as *const _ as usize == $b as *const _ as usize
+ };
+}
+
+macro_rules! another_mac {
+ ($a:expr, $b:expr) => {
+ $a as *const _ == $b as *const _
+ };
+}
+
+fn main() {
+ let a = &[1, 2, 3];
+ let b = &[1, 2, 3];
+
+ let _ = a as *const _ as usize == b as *const _ as usize;
+ let _ = a as *const _ == b as *const _;
+ let _ = a.as_ptr() == b as *const _;
+ let _ = a.as_ptr() == b.as_ptr();
+
+ // Do not lint
+
+ let _ = mac!(a, b);
+ let _ = another_mac!(a, b);
+
+ let a = &mut [1, 2, 3];
+ let b = &mut [1, 2, 3];
+
+ let _ = a.as_mut_ptr() == b as *mut [i32] as *mut _;
+ let _ = a.as_mut_ptr() == b.as_mut_ptr();
+
+ let _ = a == b;
+ let _ = core::ptr::eq(a, b);
+}
diff --git a/src/tools/clippy/tests/ui/ptr_eq.stderr b/src/tools/clippy/tests/ui/ptr_eq.stderr
new file mode 100644
index 000000000..45d8c6038
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ptr_eq.stderr
@@ -0,0 +1,16 @@
+error: use `std::ptr::eq` when comparing raw pointers
+ --> $DIR/ptr_eq.rs:20:13
+ |
+LL | let _ = a as *const _ as usize == b as *const _ as usize;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `std::ptr::eq(a, b)`
+ |
+ = note: `-D clippy::ptr-eq` implied by `-D warnings`
+
+error: use `std::ptr::eq` when comparing raw pointers
+ --> $DIR/ptr_eq.rs:21:13
+ |
+LL | let _ = a as *const _ == b as *const _;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `std::ptr::eq(a, b)`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/ptr_offset_with_cast.fixed b/src/tools/clippy/tests/ui/ptr_offset_with_cast.fixed
new file mode 100644
index 000000000..718e391e8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ptr_offset_with_cast.fixed
@@ -0,0 +1,20 @@
+// run-rustfix
+
+fn main() {
+ let vec = vec![b'a', b'b', b'c'];
+ let ptr = vec.as_ptr();
+
+ let offset_u8 = 1_u8;
+ let offset_usize = 1_usize;
+ let offset_isize = 1_isize;
+
+ unsafe {
+ let _ = ptr.add(offset_usize);
+ let _ = ptr.offset(offset_isize as isize);
+ let _ = ptr.offset(offset_u8 as isize);
+
+ let _ = ptr.wrapping_add(offset_usize);
+ let _ = ptr.wrapping_offset(offset_isize as isize);
+ let _ = ptr.wrapping_offset(offset_u8 as isize);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/ptr_offset_with_cast.rs b/src/tools/clippy/tests/ui/ptr_offset_with_cast.rs
new file mode 100644
index 000000000..f613742c7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ptr_offset_with_cast.rs
@@ -0,0 +1,20 @@
+// run-rustfix
+
+fn main() {
+ let vec = vec![b'a', b'b', b'c'];
+ let ptr = vec.as_ptr();
+
+ let offset_u8 = 1_u8;
+ let offset_usize = 1_usize;
+ let offset_isize = 1_isize;
+
+ unsafe {
+ let _ = ptr.offset(offset_usize as isize);
+ let _ = ptr.offset(offset_isize as isize);
+ let _ = ptr.offset(offset_u8 as isize);
+
+ let _ = ptr.wrapping_offset(offset_usize as isize);
+ let _ = ptr.wrapping_offset(offset_isize as isize);
+ let _ = ptr.wrapping_offset(offset_u8 as isize);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/ptr_offset_with_cast.stderr b/src/tools/clippy/tests/ui/ptr_offset_with_cast.stderr
new file mode 100644
index 000000000..fd45224ca
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ptr_offset_with_cast.stderr
@@ -0,0 +1,16 @@
+error: use of `offset` with a `usize` casted to an `isize`
+ --> $DIR/ptr_offset_with_cast.rs:12:17
+ |
+LL | let _ = ptr.offset(offset_usize as isize);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `ptr.add(offset_usize)`
+ |
+ = note: `-D clippy::ptr-offset-with-cast` implied by `-D warnings`
+
+error: use of `wrapping_offset` with a `usize` casted to an `isize`
+ --> $DIR/ptr_offset_with_cast.rs:16:17
+ |
+LL | let _ = ptr.wrapping_offset(offset_usize as isize);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `ptr.wrapping_add(offset_usize)`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/pub_use.rs b/src/tools/clippy/tests/ui/pub_use.rs
new file mode 100644
index 000000000..65542bede
--- /dev/null
+++ b/src/tools/clippy/tests/ui/pub_use.rs
@@ -0,0 +1,14 @@
+#![warn(clippy::pub_use)]
+#![allow(unused_imports)]
+#![no_main]
+
+pub mod outer {
+ mod inner {
+ pub struct Test {}
+ }
+ // should be linted
+ pub use inner::Test;
+}
+
+// should not be linted
+use std::fmt;
diff --git a/src/tools/clippy/tests/ui/pub_use.stderr b/src/tools/clippy/tests/ui/pub_use.stderr
new file mode 100644
index 000000000..9ab710df8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/pub_use.stderr
@@ -0,0 +1,11 @@
+error: using `pub use`
+ --> $DIR/pub_use.rs:10:5
+ |
+LL | pub use inner::Test;
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::pub-use` implied by `-D warnings`
+ = help: move the exported item to a public module instead
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/question_mark.fixed b/src/tools/clippy/tests/ui/question_mark.fixed
new file mode 100644
index 000000000..c4c9c8214
--- /dev/null
+++ b/src/tools/clippy/tests/ui/question_mark.fixed
@@ -0,0 +1,210 @@
+// run-rustfix
+#![allow(unreachable_code)]
+#![allow(dead_code)]
+#![allow(clippy::unnecessary_wraps)]
+
+fn some_func(a: Option<u32>) -> Option<u32> {
+ a?;
+
+ a
+}
+
+fn some_other_func(a: Option<u32>) -> Option<u32> {
+ if a.is_none() {
+ return None;
+ } else {
+ return Some(0);
+ }
+ unreachable!()
+}
+
+pub enum SeemsOption<T> {
+ Some(T),
+ None,
+}
+
+impl<T> SeemsOption<T> {
+ pub fn is_none(&self) -> bool {
+ match *self {
+ SeemsOption::None => true,
+ SeemsOption::Some(_) => false,
+ }
+ }
+}
+
+fn returns_something_similar_to_option(a: SeemsOption<u32>) -> SeemsOption<u32> {
+ if a.is_none() {
+ return SeemsOption::None;
+ }
+
+ a
+}
+
+pub struct CopyStruct {
+ pub opt: Option<u32>,
+}
+
+impl CopyStruct {
+ #[rustfmt::skip]
+ pub fn func(&self) -> Option<u32> {
+ (self.opt)?;
+
+ self.opt?;
+
+ let _ = Some(self.opt?);
+
+ let _ = self.opt?;
+
+ self.opt
+ }
+}
+
+#[derive(Clone)]
+pub struct MoveStruct {
+ pub opt: Option<Vec<u32>>,
+}
+
+impl MoveStruct {
+ pub fn ref_func(&self) -> Option<Vec<u32>> {
+ self.opt.as_ref()?;
+
+ self.opt.clone()
+ }
+
+ pub fn mov_func_reuse(self) -> Option<Vec<u32>> {
+ self.opt.as_ref()?;
+
+ self.opt
+ }
+
+ pub fn mov_func_no_use(self) -> Option<Vec<u32>> {
+ self.opt.as_ref()?;
+ Some(Vec::new())
+ }
+
+ pub fn if_let_ref_func(self) -> Option<Vec<u32>> {
+ let v: &Vec<_> = self.opt.as_ref()?;
+
+ Some(v.clone())
+ }
+
+ pub fn if_let_mov_func(self) -> Option<Vec<u32>> {
+ let v = self.opt?;
+
+ Some(v)
+ }
+}
+
+fn func() -> Option<i32> {
+ fn f() -> Option<String> {
+ Some(String::new())
+ }
+
+ f()?;
+
+ Some(0)
+}
+
+fn func_returning_result() -> Result<i32, i32> {
+ Ok(1)
+}
+
+fn result_func(x: Result<i32, i32>) -> Result<i32, i32> {
+ let _ = x?;
+
+ x?;
+
+ // No warning
+ let y = if let Ok(x) = x {
+ x
+ } else {
+ return Err(0);
+ };
+
+ // issue #7859
+ // no warning
+ let _ = if let Ok(x) = func_returning_result() {
+ x
+ } else {
+ return Err(0);
+ };
+
+ // no warning
+ if func_returning_result().is_err() {
+ return func_returning_result();
+ }
+
+ Ok(y)
+}
+
+// see issue #8019
+pub enum NotOption {
+ None,
+ First,
+ AfterFirst,
+}
+
+fn obj(_: i32) -> Result<(), NotOption> {
+ Err(NotOption::First)
+}
+
+fn f() -> NotOption {
+ if obj(2).is_err() {
+ return NotOption::None;
+ }
+ NotOption::First
+}
+
+fn do_something() {}
+
+fn err_immediate_return() -> Result<i32, i32> {
+ func_returning_result()?;
+ Ok(1)
+}
+
+fn err_immediate_return_and_do_something() -> Result<i32, i32> {
+ func_returning_result()?;
+ do_something();
+ Ok(1)
+}
+
+// No warning
+fn no_immediate_return() -> Result<i32, i32> {
+ if let Err(err) = func_returning_result() {
+ do_something();
+ return Err(err);
+ }
+ Ok(1)
+}
+
+// No warning
+fn mixed_result_and_option() -> Option<i32> {
+ if let Err(err) = func_returning_result() {
+ return Some(err);
+ }
+ None
+}
+
+// No warning
+fn else_if_check() -> Result<i32, i32> {
+ if true {
+ Ok(1)
+ } else if let Err(e) = func_returning_result() {
+ Err(e)
+ } else {
+ Err(-1)
+ }
+}
+
+// No warning
+#[allow(clippy::manual_map)]
+#[rustfmt::skip]
+fn option_map() -> Option<bool> {
+ if let Some(a) = Some(false) {
+ Some(!a)
+ } else {
+ None
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/question_mark.rs b/src/tools/clippy/tests/ui/question_mark.rs
new file mode 100644
index 000000000..cdbc7b160
--- /dev/null
+++ b/src/tools/clippy/tests/ui/question_mark.rs
@@ -0,0 +1,246 @@
+// run-rustfix
+#![allow(unreachable_code)]
+#![allow(dead_code)]
+#![allow(clippy::unnecessary_wraps)]
+
+fn some_func(a: Option<u32>) -> Option<u32> {
+ if a.is_none() {
+ return None;
+ }
+
+ a
+}
+
+fn some_other_func(a: Option<u32>) -> Option<u32> {
+ if a.is_none() {
+ return None;
+ } else {
+ return Some(0);
+ }
+ unreachable!()
+}
+
+pub enum SeemsOption<T> {
+ Some(T),
+ None,
+}
+
+impl<T> SeemsOption<T> {
+ pub fn is_none(&self) -> bool {
+ match *self {
+ SeemsOption::None => true,
+ SeemsOption::Some(_) => false,
+ }
+ }
+}
+
+fn returns_something_similar_to_option(a: SeemsOption<u32>) -> SeemsOption<u32> {
+ if a.is_none() {
+ return SeemsOption::None;
+ }
+
+ a
+}
+
+pub struct CopyStruct {
+ pub opt: Option<u32>,
+}
+
+impl CopyStruct {
+ #[rustfmt::skip]
+ pub fn func(&self) -> Option<u32> {
+ if (self.opt).is_none() {
+ return None;
+ }
+
+ if self.opt.is_none() {
+ return None
+ }
+
+ let _ = if self.opt.is_none() {
+ return None;
+ } else {
+ self.opt
+ };
+
+ let _ = if let Some(x) = self.opt {
+ x
+ } else {
+ return None;
+ };
+
+ self.opt
+ }
+}
+
+#[derive(Clone)]
+pub struct MoveStruct {
+ pub opt: Option<Vec<u32>>,
+}
+
+impl MoveStruct {
+ pub fn ref_func(&self) -> Option<Vec<u32>> {
+ if self.opt.is_none() {
+ return None;
+ }
+
+ self.opt.clone()
+ }
+
+ pub fn mov_func_reuse(self) -> Option<Vec<u32>> {
+ if self.opt.is_none() {
+ return None;
+ }
+
+ self.opt
+ }
+
+ pub fn mov_func_no_use(self) -> Option<Vec<u32>> {
+ if self.opt.is_none() {
+ return None;
+ }
+ Some(Vec::new())
+ }
+
+ pub fn if_let_ref_func(self) -> Option<Vec<u32>> {
+ let v: &Vec<_> = if let Some(ref v) = self.opt {
+ v
+ } else {
+ return None;
+ };
+
+ Some(v.clone())
+ }
+
+ pub fn if_let_mov_func(self) -> Option<Vec<u32>> {
+ let v = if let Some(v) = self.opt {
+ v
+ } else {
+ return None;
+ };
+
+ Some(v)
+ }
+}
+
+fn func() -> Option<i32> {
+ fn f() -> Option<String> {
+ Some(String::new())
+ }
+
+ if f().is_none() {
+ return None;
+ }
+
+ Some(0)
+}
+
+fn func_returning_result() -> Result<i32, i32> {
+ Ok(1)
+}
+
+fn result_func(x: Result<i32, i32>) -> Result<i32, i32> {
+ let _ = if let Ok(x) = x { x } else { return x };
+
+ if x.is_err() {
+ return x;
+ }
+
+ // No warning
+ let y = if let Ok(x) = x {
+ x
+ } else {
+ return Err(0);
+ };
+
+ // issue #7859
+ // no warning
+ let _ = if let Ok(x) = func_returning_result() {
+ x
+ } else {
+ return Err(0);
+ };
+
+ // no warning
+ if func_returning_result().is_err() {
+ return func_returning_result();
+ }
+
+ Ok(y)
+}
+
+// see issue #8019
+pub enum NotOption {
+ None,
+ First,
+ AfterFirst,
+}
+
+fn obj(_: i32) -> Result<(), NotOption> {
+ Err(NotOption::First)
+}
+
+fn f() -> NotOption {
+ if obj(2).is_err() {
+ return NotOption::None;
+ }
+ NotOption::First
+}
+
+fn do_something() {}
+
+fn err_immediate_return() -> Result<i32, i32> {
+ if let Err(err) = func_returning_result() {
+ return Err(err);
+ }
+ Ok(1)
+}
+
+fn err_immediate_return_and_do_something() -> Result<i32, i32> {
+ if let Err(err) = func_returning_result() {
+ return Err(err);
+ }
+ do_something();
+ Ok(1)
+}
+
+// No warning
+fn no_immediate_return() -> Result<i32, i32> {
+ if let Err(err) = func_returning_result() {
+ do_something();
+ return Err(err);
+ }
+ Ok(1)
+}
+
+// No warning
+fn mixed_result_and_option() -> Option<i32> {
+ if let Err(err) = func_returning_result() {
+ return Some(err);
+ }
+ None
+}
+
+// No warning
+fn else_if_check() -> Result<i32, i32> {
+ if true {
+ Ok(1)
+ } else if let Err(e) = func_returning_result() {
+ Err(e)
+ } else {
+ Err(-1)
+ }
+}
+
+// No warning
+#[allow(clippy::manual_map)]
+#[rustfmt::skip]
+fn option_map() -> Option<bool> {
+ if let Some(a) = Some(false) {
+ Some(!a)
+ } else {
+ None
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/question_mark.stderr b/src/tools/clippy/tests/ui/question_mark.stderr
new file mode 100644
index 000000000..1b6cd524b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/question_mark.stderr
@@ -0,0 +1,134 @@
+error: this block may be rewritten with the `?` operator
+ --> $DIR/question_mark.rs:7:5
+ |
+LL | / if a.is_none() {
+LL | | return None;
+LL | | }
+ | |_____^ help: replace it with: `a?;`
+ |
+ = note: `-D clippy::question-mark` implied by `-D warnings`
+
+error: this block may be rewritten with the `?` operator
+ --> $DIR/question_mark.rs:52:9
+ |
+LL | / if (self.opt).is_none() {
+LL | | return None;
+LL | | }
+ | |_________^ help: replace it with: `(self.opt)?;`
+
+error: this block may be rewritten with the `?` operator
+ --> $DIR/question_mark.rs:56:9
+ |
+LL | / if self.opt.is_none() {
+LL | | return None
+LL | | }
+ | |_________^ help: replace it with: `self.opt?;`
+
+error: this block may be rewritten with the `?` operator
+ --> $DIR/question_mark.rs:60:17
+ |
+LL | let _ = if self.opt.is_none() {
+ | _________________^
+LL | | return None;
+LL | | } else {
+LL | | self.opt
+LL | | };
+ | |_________^ help: replace it with: `Some(self.opt?)`
+
+error: this block may be rewritten with the `?` operator
+ --> $DIR/question_mark.rs:66:17
+ |
+LL | let _ = if let Some(x) = self.opt {
+ | _________________^
+LL | | x
+LL | | } else {
+LL | | return None;
+LL | | };
+ | |_________^ help: replace it with: `self.opt?`
+
+error: this block may be rewritten with the `?` operator
+ --> $DIR/question_mark.rs:83:9
+ |
+LL | / if self.opt.is_none() {
+LL | | return None;
+LL | | }
+ | |_________^ help: replace it with: `self.opt.as_ref()?;`
+
+error: this block may be rewritten with the `?` operator
+ --> $DIR/question_mark.rs:91:9
+ |
+LL | / if self.opt.is_none() {
+LL | | return None;
+LL | | }
+ | |_________^ help: replace it with: `self.opt.as_ref()?;`
+
+error: this block may be rewritten with the `?` operator
+ --> $DIR/question_mark.rs:99:9
+ |
+LL | / if self.opt.is_none() {
+LL | | return None;
+LL | | }
+ | |_________^ help: replace it with: `self.opt.as_ref()?;`
+
+error: this block may be rewritten with the `?` operator
+ --> $DIR/question_mark.rs:106:26
+ |
+LL | let v: &Vec<_> = if let Some(ref v) = self.opt {
+ | __________________________^
+LL | | v
+LL | | } else {
+LL | | return None;
+LL | | };
+ | |_________^ help: replace it with: `self.opt.as_ref()?`
+
+error: this block may be rewritten with the `?` operator
+ --> $DIR/question_mark.rs:116:17
+ |
+LL | let v = if let Some(v) = self.opt {
+ | _________________^
+LL | | v
+LL | | } else {
+LL | | return None;
+LL | | };
+ | |_________^ help: replace it with: `self.opt?`
+
+error: this block may be rewritten with the `?` operator
+ --> $DIR/question_mark.rs:131:5
+ |
+LL | / if f().is_none() {
+LL | | return None;
+LL | | }
+ | |_____^ help: replace it with: `f()?;`
+
+error: this block may be rewritten with the `?` operator
+ --> $DIR/question_mark.rs:143:13
+ |
+LL | let _ = if let Ok(x) = x { x } else { return x };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `x?`
+
+error: this block may be rewritten with the `?` operator
+ --> $DIR/question_mark.rs:145:5
+ |
+LL | / if x.is_err() {
+LL | | return x;
+LL | | }
+ | |_____^ help: replace it with: `x?;`
+
+error: this block may be rewritten with the `?` operator
+ --> $DIR/question_mark.rs:193:5
+ |
+LL | / if let Err(err) = func_returning_result() {
+LL | | return Err(err);
+LL | | }
+ | |_____^ help: replace it with: `func_returning_result()?;`
+
+error: this block may be rewritten with the `?` operator
+ --> $DIR/question_mark.rs:200:5
+ |
+LL | / if let Err(err) = func_returning_result() {
+LL | | return Err(err);
+LL | | }
+ | |_____^ help: replace it with: `func_returning_result()?;`
+
+error: aborting due to 15 previous errors
+
diff --git a/src/tools/clippy/tests/ui/range.rs b/src/tools/clippy/tests/ui/range.rs
new file mode 100644
index 000000000..628282509
--- /dev/null
+++ b/src/tools/clippy/tests/ui/range.rs
@@ -0,0 +1,16 @@
+#[warn(clippy::range_zip_with_len)]
+fn main() {
+ let v1 = vec![1, 2, 3];
+ let v2 = vec![4, 5];
+ let _x = v1.iter().zip(0..v1.len());
+ let _y = v1.iter().zip(0..v2.len()); // No error
+}
+
+#[allow(unused)]
+fn no_panic_with_fake_range_types() {
+ struct Range {
+ foo: i32,
+ }
+
+ let _ = Range { foo: 0 };
+}
diff --git a/src/tools/clippy/tests/ui/range.stderr b/src/tools/clippy/tests/ui/range.stderr
new file mode 100644
index 000000000..dcb506137
--- /dev/null
+++ b/src/tools/clippy/tests/ui/range.stderr
@@ -0,0 +1,10 @@
+error: it is more idiomatic to use `v1.iter().enumerate()`
+ --> $DIR/range.rs:5:14
+ |
+LL | let _x = v1.iter().zip(0..v1.len());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::range-zip-with-len` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/range_contains.fixed b/src/tools/clippy/tests/ui/range_contains.fixed
new file mode 100644
index 000000000..85d021b2f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/range_contains.fixed
@@ -0,0 +1,64 @@
+// run-rustfix
+
+#[warn(clippy::manual_range_contains)]
+#[allow(unused)]
+#[allow(clippy::no_effect)]
+#[allow(clippy::short_circuit_statement)]
+#[allow(clippy::unnecessary_operation)]
+fn main() {
+ let x = 9_i32;
+
+ // order shouldn't matter
+ (8..12).contains(&x);
+ (21..42).contains(&x);
+ (1..100).contains(&x);
+
+ // also with inclusive ranges
+ (9..=99).contains(&x);
+ (1..=33).contains(&x);
+ (1..=999).contains(&x);
+
+ // and the outside
+ !(8..12).contains(&x);
+ !(21..42).contains(&x);
+ !(1..100).contains(&x);
+
+ // also with the outside of inclusive ranges
+ !(9..=99).contains(&x);
+ !(1..=33).contains(&x);
+ !(1..=999).contains(&x);
+
+ // not a range.contains
+ x > 8 && x < 12; // lower bound not inclusive
+ x < 8 && x <= 12; // same direction
+ x >= 12 && 12 >= x; // same bounds
+ x < 8 && x > 12; // wrong direction
+
+ x <= 8 || x >= 12;
+ x >= 8 || x >= 12;
+ x < 12 || 12 < x;
+ x >= 8 || x <= 12;
+
+ // Fix #6315
+ let y = 3.;
+ (0. ..1.).contains(&y);
+ !(0. ..=1.).contains(&y);
+
+ // handle negatives #8721
+ (-10..=10).contains(&x);
+ x >= 10 && x <= -10;
+ (-3. ..=3.).contains(&y);
+ y >= 3. && y <= -3.;
+
+ // Fix #8745
+ let z = 42;
+ (0..=10).contains(&x) && (0..=10).contains(&z);
+ !(0..10).contains(&x) || !(0..10).contains(&z);
+ // Make sure operators in parens don't give a breaking suggestion
+ ((x % 2 == 0) || (x < 0)) || (x >= 10);
+}
+
+// Fix #6373
+pub const fn in_range(a: i32) -> bool {
+ 3 <= a && a <= 20
+}
diff --git a/src/tools/clippy/tests/ui/range_contains.rs b/src/tools/clippy/tests/ui/range_contains.rs
new file mode 100644
index 000000000..9a7a75dc1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/range_contains.rs
@@ -0,0 +1,64 @@
+// run-rustfix
+
+#[warn(clippy::manual_range_contains)]
+#[allow(unused)]
+#[allow(clippy::no_effect)]
+#[allow(clippy::short_circuit_statement)]
+#[allow(clippy::unnecessary_operation)]
+fn main() {
+ let x = 9_i32;
+
+ // order shouldn't matter
+ x >= 8 && x < 12;
+ x < 42 && x >= 21;
+ 100 > x && 1 <= x;
+
+ // also with inclusive ranges
+ x >= 9 && x <= 99;
+ x <= 33 && x >= 1;
+ 999 >= x && 1 <= x;
+
+ // and the outside
+ x < 8 || x >= 12;
+ x >= 42 || x < 21;
+ 100 <= x || 1 > x;
+
+ // also with the outside of inclusive ranges
+ x < 9 || x > 99;
+ x > 33 || x < 1;
+ 999 < x || 1 > x;
+
+ // not a range.contains
+ x > 8 && x < 12; // lower bound not inclusive
+ x < 8 && x <= 12; // same direction
+ x >= 12 && 12 >= x; // same bounds
+ x < 8 && x > 12; // wrong direction
+
+ x <= 8 || x >= 12;
+ x >= 8 || x >= 12;
+ x < 12 || 12 < x;
+ x >= 8 || x <= 12;
+
+ // Fix #6315
+ let y = 3.;
+ y >= 0. && y < 1.;
+ y < 0. || y > 1.;
+
+ // handle negatives #8721
+ x >= -10 && x <= 10;
+ x >= 10 && x <= -10;
+ y >= -3. && y <= 3.;
+ y >= 3. && y <= -3.;
+
+ // Fix #8745
+ let z = 42;
+ (x >= 0) && (x <= 10) && (z >= 0) && (z <= 10);
+ (x < 0) || (x >= 10) || (z < 0) || (z >= 10);
+ // Make sure operators in parens don't give a breaking suggestion
+ ((x % 2 == 0) || (x < 0)) || (x >= 10);
+}
+
+// Fix #6373
+pub const fn in_range(a: i32) -> bool {
+ 3 <= a && a <= 20
+}
diff --git a/src/tools/clippy/tests/ui/range_contains.stderr b/src/tools/clippy/tests/ui/range_contains.stderr
new file mode 100644
index 000000000..936859db5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/range_contains.stderr
@@ -0,0 +1,124 @@
+error: manual `Range::contains` implementation
+ --> $DIR/range_contains.rs:12:5
+ |
+LL | x >= 8 && x < 12;
+ | ^^^^^^^^^^^^^^^^ help: use: `(8..12).contains(&x)`
+ |
+ = note: `-D clippy::manual-range-contains` implied by `-D warnings`
+
+error: manual `Range::contains` implementation
+ --> $DIR/range_contains.rs:13:5
+ |
+LL | x < 42 && x >= 21;
+ | ^^^^^^^^^^^^^^^^^ help: use: `(21..42).contains(&x)`
+
+error: manual `Range::contains` implementation
+ --> $DIR/range_contains.rs:14:5
+ |
+LL | 100 > x && 1 <= x;
+ | ^^^^^^^^^^^^^^^^^ help: use: `(1..100).contains(&x)`
+
+error: manual `RangeInclusive::contains` implementation
+ --> $DIR/range_contains.rs:17:5
+ |
+LL | x >= 9 && x <= 99;
+ | ^^^^^^^^^^^^^^^^^ help: use: `(9..=99).contains(&x)`
+
+error: manual `RangeInclusive::contains` implementation
+ --> $DIR/range_contains.rs:18:5
+ |
+LL | x <= 33 && x >= 1;
+ | ^^^^^^^^^^^^^^^^^ help: use: `(1..=33).contains(&x)`
+
+error: manual `RangeInclusive::contains` implementation
+ --> $DIR/range_contains.rs:19:5
+ |
+LL | 999 >= x && 1 <= x;
+ | ^^^^^^^^^^^^^^^^^^ help: use: `(1..=999).contains(&x)`
+
+error: manual `!Range::contains` implementation
+ --> $DIR/range_contains.rs:22:5
+ |
+LL | x < 8 || x >= 12;
+ | ^^^^^^^^^^^^^^^^ help: use: `!(8..12).contains(&x)`
+
+error: manual `!Range::contains` implementation
+ --> $DIR/range_contains.rs:23:5
+ |
+LL | x >= 42 || x < 21;
+ | ^^^^^^^^^^^^^^^^^ help: use: `!(21..42).contains(&x)`
+
+error: manual `!Range::contains` implementation
+ --> $DIR/range_contains.rs:24:5
+ |
+LL | 100 <= x || 1 > x;
+ | ^^^^^^^^^^^^^^^^^ help: use: `!(1..100).contains(&x)`
+
+error: manual `!RangeInclusive::contains` implementation
+ --> $DIR/range_contains.rs:27:5
+ |
+LL | x < 9 || x > 99;
+ | ^^^^^^^^^^^^^^^ help: use: `!(9..=99).contains(&x)`
+
+error: manual `!RangeInclusive::contains` implementation
+ --> $DIR/range_contains.rs:28:5
+ |
+LL | x > 33 || x < 1;
+ | ^^^^^^^^^^^^^^^ help: use: `!(1..=33).contains(&x)`
+
+error: manual `!RangeInclusive::contains` implementation
+ --> $DIR/range_contains.rs:29:5
+ |
+LL | 999 < x || 1 > x;
+ | ^^^^^^^^^^^^^^^^ help: use: `!(1..=999).contains(&x)`
+
+error: manual `Range::contains` implementation
+ --> $DIR/range_contains.rs:44:5
+ |
+LL | y >= 0. && y < 1.;
+ | ^^^^^^^^^^^^^^^^^ help: use: `(0. ..1.).contains(&y)`
+
+error: manual `!RangeInclusive::contains` implementation
+ --> $DIR/range_contains.rs:45:5
+ |
+LL | y < 0. || y > 1.;
+ | ^^^^^^^^^^^^^^^^ help: use: `!(0. ..=1.).contains(&y)`
+
+error: manual `RangeInclusive::contains` implementation
+ --> $DIR/range_contains.rs:48:5
+ |
+LL | x >= -10 && x <= 10;
+ | ^^^^^^^^^^^^^^^^^^^ help: use: `(-10..=10).contains(&x)`
+
+error: manual `RangeInclusive::contains` implementation
+ --> $DIR/range_contains.rs:50:5
+ |
+LL | y >= -3. && y <= 3.;
+ | ^^^^^^^^^^^^^^^^^^^ help: use: `(-3. ..=3.).contains(&y)`
+
+error: manual `RangeInclusive::contains` implementation
+ --> $DIR/range_contains.rs:55:30
+ |
+LL | (x >= 0) && (x <= 10) && (z >= 0) && (z <= 10);
+ | ^^^^^^^^^^^^^^^^^^^^^ help: use: `(0..=10).contains(&z)`
+
+error: manual `RangeInclusive::contains` implementation
+ --> $DIR/range_contains.rs:55:5
+ |
+LL | (x >= 0) && (x <= 10) && (z >= 0) && (z <= 10);
+ | ^^^^^^^^^^^^^^^^^^^^^ help: use: `(0..=10).contains(&x)`
+
+error: manual `!Range::contains` implementation
+ --> $DIR/range_contains.rs:56:29
+ |
+LL | (x < 0) || (x >= 10) || (z < 0) || (z >= 10);
+ | ^^^^^^^^^^^^^^^^^^^^ help: use: `!(0..10).contains(&z)`
+
+error: manual `!Range::contains` implementation
+ --> $DIR/range_contains.rs:56:5
+ |
+LL | (x < 0) || (x >= 10) || (z < 0) || (z >= 10);
+ | ^^^^^^^^^^^^^^^^^^^^ help: use: `!(0..10).contains(&x)`
+
+error: aborting due to 20 previous errors
+
diff --git a/src/tools/clippy/tests/ui/range_plus_minus_one.fixed b/src/tools/clippy/tests/ui/range_plus_minus_one.fixed
new file mode 100644
index 000000000..40d7791df
--- /dev/null
+++ b/src/tools/clippy/tests/ui/range_plus_minus_one.fixed
@@ -0,0 +1,42 @@
+// run-rustfix
+
+#![allow(unused_parens)]
+#![allow(clippy::iter_with_drain)]
+fn f() -> usize {
+ 42
+}
+
+#[warn(clippy::range_plus_one)]
+#[warn(clippy::range_minus_one)]
+fn main() {
+ for _ in 0..2 {}
+ for _ in 0..=2 {}
+
+ for _ in 0..=3 {}
+ for _ in 0..=3 + 1 {}
+
+ for _ in 0..=5 {}
+ for _ in 0..=1 + 5 {}
+
+ for _ in 1..=1 {}
+ for _ in 1..=1 + 1 {}
+
+ for _ in 0..13 + 13 {}
+ for _ in 0..=13 - 7 {}
+
+ for _ in 0..=f() {}
+ for _ in 0..=(1 + f()) {}
+
+ let _ = ..11 - 1;
+ let _ = ..11;
+ let _ = ..11;
+ let _ = (1..=11);
+ let _ = ((f() + 1)..=f());
+
+ const ONE: usize = 1;
+ // integer consts are linted, too
+ for _ in 1..=ONE {}
+
+ let mut vec: Vec<()> = std::vec::Vec::new();
+ vec.drain(..);
+}
diff --git a/src/tools/clippy/tests/ui/range_plus_minus_one.rs b/src/tools/clippy/tests/ui/range_plus_minus_one.rs
new file mode 100644
index 000000000..a8ddd9b5f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/range_plus_minus_one.rs
@@ -0,0 +1,42 @@
+// run-rustfix
+
+#![allow(unused_parens)]
+#![allow(clippy::iter_with_drain)]
+fn f() -> usize {
+ 42
+}
+
+#[warn(clippy::range_plus_one)]
+#[warn(clippy::range_minus_one)]
+fn main() {
+ for _ in 0..2 {}
+ for _ in 0..=2 {}
+
+ for _ in 0..3 + 1 {}
+ for _ in 0..=3 + 1 {}
+
+ for _ in 0..1 + 5 {}
+ for _ in 0..=1 + 5 {}
+
+ for _ in 1..1 + 1 {}
+ for _ in 1..=1 + 1 {}
+
+ for _ in 0..13 + 13 {}
+ for _ in 0..=13 - 7 {}
+
+ for _ in 0..(1 + f()) {}
+ for _ in 0..=(1 + f()) {}
+
+ let _ = ..11 - 1;
+ let _ = ..=11 - 1;
+ let _ = ..=(11 - 1);
+ let _ = (1..11 + 1);
+ let _ = (f() + 1)..(f() + 1);
+
+ const ONE: usize = 1;
+ // integer consts are linted, too
+ for _ in 1..ONE + ONE {}
+
+ let mut vec: Vec<()> = std::vec::Vec::new();
+ vec.drain(..);
+}
diff --git a/src/tools/clippy/tests/ui/range_plus_minus_one.stderr b/src/tools/clippy/tests/ui/range_plus_minus_one.stderr
new file mode 100644
index 000000000..fb4f16585
--- /dev/null
+++ b/src/tools/clippy/tests/ui/range_plus_minus_one.stderr
@@ -0,0 +1,60 @@
+error: an inclusive range would be more readable
+ --> $DIR/range_plus_minus_one.rs:15:14
+ |
+LL | for _ in 0..3 + 1 {}
+ | ^^^^^^^^ help: use: `0..=3`
+ |
+ = note: `-D clippy::range-plus-one` implied by `-D warnings`
+
+error: an inclusive range would be more readable
+ --> $DIR/range_plus_minus_one.rs:18:14
+ |
+LL | for _ in 0..1 + 5 {}
+ | ^^^^^^^^ help: use: `0..=5`
+
+error: an inclusive range would be more readable
+ --> $DIR/range_plus_minus_one.rs:21:14
+ |
+LL | for _ in 1..1 + 1 {}
+ | ^^^^^^^^ help: use: `1..=1`
+
+error: an inclusive range would be more readable
+ --> $DIR/range_plus_minus_one.rs:27:14
+ |
+LL | for _ in 0..(1 + f()) {}
+ | ^^^^^^^^^^^^ help: use: `0..=f()`
+
+error: an exclusive range would be more readable
+ --> $DIR/range_plus_minus_one.rs:31:13
+ |
+LL | let _ = ..=11 - 1;
+ | ^^^^^^^^^ help: use: `..11`
+ |
+ = note: `-D clippy::range-minus-one` implied by `-D warnings`
+
+error: an exclusive range would be more readable
+ --> $DIR/range_plus_minus_one.rs:32:13
+ |
+LL | let _ = ..=(11 - 1);
+ | ^^^^^^^^^^^ help: use: `..11`
+
+error: an inclusive range would be more readable
+ --> $DIR/range_plus_minus_one.rs:33:13
+ |
+LL | let _ = (1..11 + 1);
+ | ^^^^^^^^^^^ help: use: `(1..=11)`
+
+error: an inclusive range would be more readable
+ --> $DIR/range_plus_minus_one.rs:34:13
+ |
+LL | let _ = (f() + 1)..(f() + 1);
+ | ^^^^^^^^^^^^^^^^^^^^ help: use: `((f() + 1)..=f())`
+
+error: an inclusive range would be more readable
+ --> $DIR/range_plus_minus_one.rs:38:14
+ |
+LL | for _ in 1..ONE + ONE {}
+ | ^^^^^^^^^^^^ help: use: `1..=ONE`
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/rc_buffer.fixed b/src/tools/clippy/tests/ui/rc_buffer.fixed
new file mode 100644
index 000000000..8910c01b1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/rc_buffer.fixed
@@ -0,0 +1,28 @@
+// run-rustfix
+#![warn(clippy::rc_buffer)]
+#![allow(dead_code, unused_imports)]
+
+use std::cell::RefCell;
+use std::ffi::OsString;
+use std::path::PathBuf;
+use std::rc::Rc;
+
+struct S {
+ // triggers lint
+ bad1: Rc<str>,
+ bad2: Rc<std::path::Path>,
+ bad3: Rc<[u8]>,
+ bad4: Rc<std::ffi::OsStr>,
+ // does not trigger lint
+ good1: Rc<RefCell<String>>,
+}
+
+// triggers lint
+fn func_bad1(_: Rc<str>) {}
+fn func_bad2(_: Rc<std::path::Path>) {}
+fn func_bad3(_: Rc<[u8]>) {}
+fn func_bad4(_: Rc<std::ffi::OsStr>) {}
+// does not trigger lint
+fn func_good1(_: Rc<RefCell<String>>) {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/rc_buffer.rs b/src/tools/clippy/tests/ui/rc_buffer.rs
new file mode 100644
index 000000000..1e63a4326
--- /dev/null
+++ b/src/tools/clippy/tests/ui/rc_buffer.rs
@@ -0,0 +1,28 @@
+// run-rustfix
+#![warn(clippy::rc_buffer)]
+#![allow(dead_code, unused_imports)]
+
+use std::cell::RefCell;
+use std::ffi::OsString;
+use std::path::PathBuf;
+use std::rc::Rc;
+
+struct S {
+ // triggers lint
+ bad1: Rc<String>,
+ bad2: Rc<PathBuf>,
+ bad3: Rc<Vec<u8>>,
+ bad4: Rc<OsString>,
+ // does not trigger lint
+ good1: Rc<RefCell<String>>,
+}
+
+// triggers lint
+fn func_bad1(_: Rc<String>) {}
+fn func_bad2(_: Rc<PathBuf>) {}
+fn func_bad3(_: Rc<Vec<u8>>) {}
+fn func_bad4(_: Rc<OsString>) {}
+// does not trigger lint
+fn func_good1(_: Rc<RefCell<String>>) {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/rc_buffer.stderr b/src/tools/clippy/tests/ui/rc_buffer.stderr
new file mode 100644
index 000000000..9ed028e3d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/rc_buffer.stderr
@@ -0,0 +1,52 @@
+error: usage of `Rc<T>` when T is a buffer type
+ --> $DIR/rc_buffer.rs:12:11
+ |
+LL | bad1: Rc<String>,
+ | ^^^^^^^^^^ help: try: `Rc<str>`
+ |
+ = note: `-D clippy::rc-buffer` implied by `-D warnings`
+
+error: usage of `Rc<T>` when T is a buffer type
+ --> $DIR/rc_buffer.rs:13:11
+ |
+LL | bad2: Rc<PathBuf>,
+ | ^^^^^^^^^^^ help: try: `Rc<std::path::Path>`
+
+error: usage of `Rc<T>` when T is a buffer type
+ --> $DIR/rc_buffer.rs:14:11
+ |
+LL | bad3: Rc<Vec<u8>>,
+ | ^^^^^^^^^^^ help: try: `Rc<[u8]>`
+
+error: usage of `Rc<T>` when T is a buffer type
+ --> $DIR/rc_buffer.rs:15:11
+ |
+LL | bad4: Rc<OsString>,
+ | ^^^^^^^^^^^^ help: try: `Rc<std::ffi::OsStr>`
+
+error: usage of `Rc<T>` when T is a buffer type
+ --> $DIR/rc_buffer.rs:21:17
+ |
+LL | fn func_bad1(_: Rc<String>) {}
+ | ^^^^^^^^^^ help: try: `Rc<str>`
+
+error: usage of `Rc<T>` when T is a buffer type
+ --> $DIR/rc_buffer.rs:22:17
+ |
+LL | fn func_bad2(_: Rc<PathBuf>) {}
+ | ^^^^^^^^^^^ help: try: `Rc<std::path::Path>`
+
+error: usage of `Rc<T>` when T is a buffer type
+ --> $DIR/rc_buffer.rs:23:17
+ |
+LL | fn func_bad3(_: Rc<Vec<u8>>) {}
+ | ^^^^^^^^^^^ help: try: `Rc<[u8]>`
+
+error: usage of `Rc<T>` when T is a buffer type
+ --> $DIR/rc_buffer.rs:24:17
+ |
+LL | fn func_bad4(_: Rc<OsString>) {}
+ | ^^^^^^^^^^^^ help: try: `Rc<std::ffi::OsStr>`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/rc_buffer_arc.fixed b/src/tools/clippy/tests/ui/rc_buffer_arc.fixed
new file mode 100644
index 000000000..13dd6f5fc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/rc_buffer_arc.fixed
@@ -0,0 +1,27 @@
+// run-rustfix
+#![warn(clippy::rc_buffer)]
+#![allow(dead_code, unused_imports)]
+
+use std::ffi::OsString;
+use std::path::PathBuf;
+use std::sync::{Arc, Mutex};
+
+struct S {
+ // triggers lint
+ bad1: Arc<str>,
+ bad2: Arc<std::path::Path>,
+ bad3: Arc<[u8]>,
+ bad4: Arc<std::ffi::OsStr>,
+ // does not trigger lint
+ good1: Arc<Mutex<String>>,
+}
+
+// triggers lint
+fn func_bad1(_: Arc<str>) {}
+fn func_bad2(_: Arc<std::path::Path>) {}
+fn func_bad3(_: Arc<[u8]>) {}
+fn func_bad4(_: Arc<std::ffi::OsStr>) {}
+// does not trigger lint
+fn func_good1(_: Arc<Mutex<String>>) {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/rc_buffer_arc.rs b/src/tools/clippy/tests/ui/rc_buffer_arc.rs
new file mode 100644
index 000000000..1a521bfeb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/rc_buffer_arc.rs
@@ -0,0 +1,27 @@
+// run-rustfix
+#![warn(clippy::rc_buffer)]
+#![allow(dead_code, unused_imports)]
+
+use std::ffi::OsString;
+use std::path::PathBuf;
+use std::sync::{Arc, Mutex};
+
+struct S {
+ // triggers lint
+ bad1: Arc<String>,
+ bad2: Arc<PathBuf>,
+ bad3: Arc<Vec<u8>>,
+ bad4: Arc<OsString>,
+ // does not trigger lint
+ good1: Arc<Mutex<String>>,
+}
+
+// triggers lint
+fn func_bad1(_: Arc<String>) {}
+fn func_bad2(_: Arc<PathBuf>) {}
+fn func_bad3(_: Arc<Vec<u8>>) {}
+fn func_bad4(_: Arc<OsString>) {}
+// does not trigger lint
+fn func_good1(_: Arc<Mutex<String>>) {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/rc_buffer_arc.stderr b/src/tools/clippy/tests/ui/rc_buffer_arc.stderr
new file mode 100644
index 000000000..911feea73
--- /dev/null
+++ b/src/tools/clippy/tests/ui/rc_buffer_arc.stderr
@@ -0,0 +1,52 @@
+error: usage of `Arc<T>` when T is a buffer type
+ --> $DIR/rc_buffer_arc.rs:11:11
+ |
+LL | bad1: Arc<String>,
+ | ^^^^^^^^^^^ help: try: `Arc<str>`
+ |
+ = note: `-D clippy::rc-buffer` implied by `-D warnings`
+
+error: usage of `Arc<T>` when T is a buffer type
+ --> $DIR/rc_buffer_arc.rs:12:11
+ |
+LL | bad2: Arc<PathBuf>,
+ | ^^^^^^^^^^^^ help: try: `Arc<std::path::Path>`
+
+error: usage of `Arc<T>` when T is a buffer type
+ --> $DIR/rc_buffer_arc.rs:13:11
+ |
+LL | bad3: Arc<Vec<u8>>,
+ | ^^^^^^^^^^^^ help: try: `Arc<[u8]>`
+
+error: usage of `Arc<T>` when T is a buffer type
+ --> $DIR/rc_buffer_arc.rs:14:11
+ |
+LL | bad4: Arc<OsString>,
+ | ^^^^^^^^^^^^^ help: try: `Arc<std::ffi::OsStr>`
+
+error: usage of `Arc<T>` when T is a buffer type
+ --> $DIR/rc_buffer_arc.rs:20:17
+ |
+LL | fn func_bad1(_: Arc<String>) {}
+ | ^^^^^^^^^^^ help: try: `Arc<str>`
+
+error: usage of `Arc<T>` when T is a buffer type
+ --> $DIR/rc_buffer_arc.rs:21:17
+ |
+LL | fn func_bad2(_: Arc<PathBuf>) {}
+ | ^^^^^^^^^^^^ help: try: `Arc<std::path::Path>`
+
+error: usage of `Arc<T>` when T is a buffer type
+ --> $DIR/rc_buffer_arc.rs:22:17
+ |
+LL | fn func_bad3(_: Arc<Vec<u8>>) {}
+ | ^^^^^^^^^^^^ help: try: `Arc<[u8]>`
+
+error: usage of `Arc<T>` when T is a buffer type
+ --> $DIR/rc_buffer_arc.rs:23:17
+ |
+LL | fn func_bad4(_: Arc<OsString>) {}
+ | ^^^^^^^^^^^^^ help: try: `Arc<std::ffi::OsStr>`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/rc_buffer_redefined_string.rs b/src/tools/clippy/tests/ui/rc_buffer_redefined_string.rs
new file mode 100644
index 000000000..5d31a848c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/rc_buffer_redefined_string.rs
@@ -0,0 +1,12 @@
+#![warn(clippy::rc_buffer)]
+
+use std::rc::Rc;
+
+struct String;
+
+struct S {
+ // does not trigger lint
+ good1: Rc<String>,
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/rc_clone_in_vec_init/arc.rs b/src/tools/clippy/tests/ui/rc_clone_in_vec_init/arc.rs
new file mode 100644
index 000000000..384060e6e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/rc_clone_in_vec_init/arc.rs
@@ -0,0 +1,68 @@
+#![warn(clippy::rc_clone_in_vec_init)]
+use std::sync::{Arc, Mutex};
+
+fn main() {}
+
+fn should_warn_simple_case() {
+ let v = vec![Arc::new("x".to_string()); 2];
+}
+
+fn should_warn_simple_case_with_big_indentation() {
+ if true {
+ let k = 1;
+ dbg!(k);
+ if true {
+ let v = vec![Arc::new("x".to_string()); 2];
+ }
+ }
+}
+
+fn should_warn_complex_case() {
+ let v = vec![
+ std::sync::Arc::new(Mutex::new({
+ let x = 1;
+ dbg!(x);
+ x
+ }));
+ 2
+ ];
+
+ let v1 = vec![
+ Arc::new(Mutex::new({
+ let x = 1;
+ dbg!(x);
+ x
+ }));
+ 2
+ ];
+}
+
+fn should_not_warn_custom_arc() {
+ #[derive(Clone)]
+ struct Arc;
+
+ impl Arc {
+ fn new() -> Self {
+ Arc
+ }
+ }
+
+ let v = vec![Arc::new(); 2];
+}
+
+fn should_not_warn_vec_from_elem_but_not_arc() {
+ let v = vec![String::new(); 2];
+ let v1 = vec![1; 2];
+ let v2 = vec![
+ Box::new(std::sync::Arc::new({
+ let y = 3;
+ dbg!(y);
+ y
+ }));
+ 2
+ ];
+}
+
+fn should_not_warn_vec_macro_but_not_from_elem() {
+ let v = vec![Arc::new("x".to_string())];
+}
diff --git a/src/tools/clippy/tests/ui/rc_clone_in_vec_init/arc.stderr b/src/tools/clippy/tests/ui/rc_clone_in_vec_init/arc.stderr
new file mode 100644
index 000000000..cd7d91e12
--- /dev/null
+++ b/src/tools/clippy/tests/ui/rc_clone_in_vec_init/arc.stderr
@@ -0,0 +1,109 @@
+error: initializing a reference-counted pointer in `vec![elem; len]`
+ --> $DIR/arc.rs:7:13
+ |
+LL | let v = vec![Arc::new("x".to_string()); 2];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::rc-clone-in-vec-init` implied by `-D warnings`
+ = note: each element will point to the same `Arc` instance
+help: consider initializing each `Arc` element individually
+ |
+LL ~ let v = {
+LL + let mut v = Vec::with_capacity(2);
+LL + (0..2).for_each(|_| v.push(Arc::new(..)));
+LL + v
+LL ~ };
+ |
+help: or if this is intentional, consider extracting the `Arc` initialization to a variable
+ |
+LL ~ let v = {
+LL + let data = Arc::new(..);
+LL + vec![data; 2]
+LL ~ };
+ |
+
+error: initializing a reference-counted pointer in `vec![elem; len]`
+ --> $DIR/arc.rs:15:21
+ |
+LL | let v = vec![Arc::new("x".to_string()); 2];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: each element will point to the same `Arc` instance
+help: consider initializing each `Arc` element individually
+ |
+LL ~ let v = {
+LL + let mut v = Vec::with_capacity(2);
+LL + (0..2).for_each(|_| v.push(Arc::new(..)));
+LL + v
+LL ~ };
+ |
+help: or if this is intentional, consider extracting the `Arc` initialization to a variable
+ |
+LL ~ let v = {
+LL + let data = Arc::new(..);
+LL + vec![data; 2]
+LL ~ };
+ |
+
+error: initializing a reference-counted pointer in `vec![elem; len]`
+ --> $DIR/arc.rs:21:13
+ |
+LL | let v = vec![
+ | _____________^
+LL | | std::sync::Arc::new(Mutex::new({
+LL | | let x = 1;
+LL | | dbg!(x);
+... |
+LL | | 2
+LL | | ];
+ | |_____^
+ |
+ = note: each element will point to the same `Arc` instance
+help: consider initializing each `Arc` element individually
+ |
+LL ~ let v = {
+LL + let mut v = Vec::with_capacity(2);
+LL + (0..2).for_each(|_| v.push(std::sync::Arc::new(..)));
+LL + v
+LL ~ };
+ |
+help: or if this is intentional, consider extracting the `Arc` initialization to a variable
+ |
+LL ~ let v = {
+LL + let data = std::sync::Arc::new(..);
+LL + vec![data; 2]
+LL ~ };
+ |
+
+error: initializing a reference-counted pointer in `vec![elem; len]`
+ --> $DIR/arc.rs:30:14
+ |
+LL | let v1 = vec![
+ | ______________^
+LL | | Arc::new(Mutex::new({
+LL | | let x = 1;
+LL | | dbg!(x);
+... |
+LL | | 2
+LL | | ];
+ | |_____^
+ |
+ = note: each element will point to the same `Arc` instance
+help: consider initializing each `Arc` element individually
+ |
+LL ~ let v1 = {
+LL + let mut v = Vec::with_capacity(2);
+LL + (0..2).for_each(|_| v.push(Arc::new(..)));
+LL + v
+LL ~ };
+ |
+help: or if this is intentional, consider extracting the `Arc` initialization to a variable
+ |
+LL ~ let v1 = {
+LL + let data = Arc::new(..);
+LL + vec![data; 2]
+LL ~ };
+ |
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/rc_clone_in_vec_init/rc.rs b/src/tools/clippy/tests/ui/rc_clone_in_vec_init/rc.rs
new file mode 100644
index 000000000..0394457fe
--- /dev/null
+++ b/src/tools/clippy/tests/ui/rc_clone_in_vec_init/rc.rs
@@ -0,0 +1,69 @@
+#![warn(clippy::rc_clone_in_vec_init)]
+use std::rc::Rc;
+use std::sync::Mutex;
+
+fn main() {}
+
+fn should_warn_simple_case() {
+ let v = vec![Rc::new("x".to_string()); 2];
+}
+
+fn should_warn_simple_case_with_big_indentation() {
+ if true {
+ let k = 1;
+ dbg!(k);
+ if true {
+ let v = vec![Rc::new("x".to_string()); 2];
+ }
+ }
+}
+
+fn should_warn_complex_case() {
+ let v = vec![
+ std::rc::Rc::new(Mutex::new({
+ let x = 1;
+ dbg!(x);
+ x
+ }));
+ 2
+ ];
+
+ let v1 = vec![
+ Rc::new(Mutex::new({
+ let x = 1;
+ dbg!(x);
+ x
+ }));
+ 2
+ ];
+}
+
+fn should_not_warn_custom_arc() {
+ #[derive(Clone)]
+ struct Rc;
+
+ impl Rc {
+ fn new() -> Self {
+ Rc
+ }
+ }
+
+ let v = vec![Rc::new(); 2];
+}
+
+fn should_not_warn_vec_from_elem_but_not_rc() {
+ let v = vec![String::new(); 2];
+ let v1 = vec![1; 2];
+ let v2 = vec![
+ Box::new(std::rc::Rc::new({
+ let y = 3;
+ dbg!(y);
+ y
+ }));
+ 2
+ ];
+}
+
+fn should_not_warn_vec_macro_but_not_from_elem() {
+ let v = vec![Rc::new("x".to_string())];
+}
diff --git a/src/tools/clippy/tests/ui/rc_clone_in_vec_init/rc.stderr b/src/tools/clippy/tests/ui/rc_clone_in_vec_init/rc.stderr
new file mode 100644
index 000000000..fe861afe0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/rc_clone_in_vec_init/rc.stderr
@@ -0,0 +1,109 @@
+error: initializing a reference-counted pointer in `vec![elem; len]`
+ --> $DIR/rc.rs:8:13
+ |
+LL | let v = vec![Rc::new("x".to_string()); 2];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::rc-clone-in-vec-init` implied by `-D warnings`
+ = note: each element will point to the same `Rc` instance
+help: consider initializing each `Rc` element individually
+ |
+LL ~ let v = {
+LL + let mut v = Vec::with_capacity(2);
+LL + (0..2).for_each(|_| v.push(Rc::new(..)));
+LL + v
+LL ~ };
+ |
+help: or if this is intentional, consider extracting the `Rc` initialization to a variable
+ |
+LL ~ let v = {
+LL + let data = Rc::new(..);
+LL + vec![data; 2]
+LL ~ };
+ |
+
+error: initializing a reference-counted pointer in `vec![elem; len]`
+ --> $DIR/rc.rs:16:21
+ |
+LL | let v = vec![Rc::new("x".to_string()); 2];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: each element will point to the same `Rc` instance
+help: consider initializing each `Rc` element individually
+ |
+LL ~ let v = {
+LL + let mut v = Vec::with_capacity(2);
+LL + (0..2).for_each(|_| v.push(Rc::new(..)));
+LL + v
+LL ~ };
+ |
+help: or if this is intentional, consider extracting the `Rc` initialization to a variable
+ |
+LL ~ let v = {
+LL + let data = Rc::new(..);
+LL + vec![data; 2]
+LL ~ };
+ |
+
+error: initializing a reference-counted pointer in `vec![elem; len]`
+ --> $DIR/rc.rs:22:13
+ |
+LL | let v = vec![
+ | _____________^
+LL | | std::rc::Rc::new(Mutex::new({
+LL | | let x = 1;
+LL | | dbg!(x);
+... |
+LL | | 2
+LL | | ];
+ | |_____^
+ |
+ = note: each element will point to the same `Rc` instance
+help: consider initializing each `Rc` element individually
+ |
+LL ~ let v = {
+LL + let mut v = Vec::with_capacity(2);
+LL + (0..2).for_each(|_| v.push(std::rc::Rc::new(..)));
+LL + v
+LL ~ };
+ |
+help: or if this is intentional, consider extracting the `Rc` initialization to a variable
+ |
+LL ~ let v = {
+LL + let data = std::rc::Rc::new(..);
+LL + vec![data; 2]
+LL ~ };
+ |
+
+error: initializing a reference-counted pointer in `vec![elem; len]`
+ --> $DIR/rc.rs:31:14
+ |
+LL | let v1 = vec![
+ | ______________^
+LL | | Rc::new(Mutex::new({
+LL | | let x = 1;
+LL | | dbg!(x);
+... |
+LL | | 2
+LL | | ];
+ | |_____^
+ |
+ = note: each element will point to the same `Rc` instance
+help: consider initializing each `Rc` element individually
+ |
+LL ~ let v1 = {
+LL + let mut v = Vec::with_capacity(2);
+LL + (0..2).for_each(|_| v.push(Rc::new(..)));
+LL + v
+LL ~ };
+ |
+help: or if this is intentional, consider extracting the `Rc` initialization to a variable
+ |
+LL ~ let v1 = {
+LL + let data = Rc::new(..);
+LL + vec![data; 2]
+LL ~ };
+ |
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/rc_clone_in_vec_init/weak.rs b/src/tools/clippy/tests/ui/rc_clone_in_vec_init/weak.rs
new file mode 100644
index 000000000..693c9b553
--- /dev/null
+++ b/src/tools/clippy/tests/ui/rc_clone_in_vec_init/weak.rs
@@ -0,0 +1,83 @@
+#![warn(clippy::rc_clone_in_vec_init)]
+use std::rc::{Rc, Weak as UnSyncWeak};
+use std::sync::{Arc, Mutex, Weak as SyncWeak};
+
+fn main() {}
+
+fn should_warn_simple_case() {
+ let v = vec![SyncWeak::<u32>::new(); 2];
+ let v2 = vec![UnSyncWeak::<u32>::new(); 2];
+
+ let v = vec![Rc::downgrade(&Rc::new("x".to_string())); 2];
+ let v = vec![Arc::downgrade(&Arc::new("x".to_string())); 2];
+}
+
+fn should_warn_simple_case_with_big_indentation() {
+ if true {
+ let k = 1;
+ dbg!(k);
+ if true {
+ let v = vec![Arc::downgrade(&Arc::new("x".to_string())); 2];
+ let v2 = vec![Rc::downgrade(&Rc::new("x".to_string())); 2];
+ }
+ }
+}
+
+fn should_warn_complex_case() {
+ let v = vec![
+ Arc::downgrade(&Arc::new(Mutex::new({
+ let x = 1;
+ dbg!(x);
+ x
+ })));
+ 2
+ ];
+
+ let v1 = vec![
+ Rc::downgrade(&Rc::new(Mutex::new({
+ let x = 1;
+ dbg!(x);
+ x
+ })));
+ 2
+ ];
+}
+
+fn should_not_warn_custom_weak() {
+ #[derive(Clone)]
+ struct Weak;
+
+ impl Weak {
+ fn new() -> Self {
+ Weak
+ }
+ }
+
+ let v = vec![Weak::new(); 2];
+}
+
+fn should_not_warn_vec_from_elem_but_not_weak() {
+ let v = vec![String::new(); 2];
+ let v1 = vec![1; 2];
+ let v2 = vec![
+ Box::new(Arc::downgrade(&Arc::new({
+ let y = 3;
+ dbg!(y);
+ y
+ })));
+ 2
+ ];
+ let v3 = vec![
+ Box::new(Rc::downgrade(&Rc::new({
+ let y = 3;
+ dbg!(y);
+ y
+ })));
+ 2
+ ];
+}
+
+fn should_not_warn_vec_macro_but_not_from_elem() {
+ let v = vec![Arc::downgrade(&Arc::new("x".to_string()))];
+ let v = vec![Rc::downgrade(&Rc::new("x".to_string()))];
+}
diff --git a/src/tools/clippy/tests/ui/rc_clone_in_vec_init/weak.stderr b/src/tools/clippy/tests/ui/rc_clone_in_vec_init/weak.stderr
new file mode 100644
index 000000000..4a21946cc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/rc_clone_in_vec_init/weak.stderr
@@ -0,0 +1,201 @@
+error: initializing a reference-counted pointer in `vec![elem; len]`
+ --> $DIR/weak.rs:8:13
+ |
+LL | let v = vec![SyncWeak::<u32>::new(); 2];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::rc-clone-in-vec-init` implied by `-D warnings`
+ = note: each element will point to the same `Weak` instance
+help: consider initializing each `Weak` element individually
+ |
+LL ~ let v = {
+LL + let mut v = Vec::with_capacity(2);
+LL + (0..2).for_each(|_| v.push(SyncWeak::<u32>::new(..)));
+LL + v
+LL ~ };
+ |
+help: or if this is intentional, consider extracting the `Weak` initialization to a variable
+ |
+LL ~ let v = {
+LL + let data = SyncWeak::<u32>::new(..);
+LL + vec![data; 2]
+LL ~ };
+ |
+
+error: initializing a reference-counted pointer in `vec![elem; len]`
+ --> $DIR/weak.rs:9:14
+ |
+LL | let v2 = vec![UnSyncWeak::<u32>::new(); 2];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: each element will point to the same `Weak` instance
+help: consider initializing each `Weak` element individually
+ |
+LL ~ let v2 = {
+LL + let mut v = Vec::with_capacity(2);
+LL + (0..2).for_each(|_| v.push(UnSyncWeak::<u32>::new(..)));
+LL + v
+LL ~ };
+ |
+help: or if this is intentional, consider extracting the `Weak` initialization to a variable
+ |
+LL ~ let v2 = {
+LL + let data = UnSyncWeak::<u32>::new(..);
+LL + vec![data; 2]
+LL ~ };
+ |
+
+error: initializing a reference-counted pointer in `vec![elem; len]`
+ --> $DIR/weak.rs:11:13
+ |
+LL | let v = vec![Rc::downgrade(&Rc::new("x".to_string())); 2];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: each element will point to the same `Weak` instance
+help: consider initializing each `Weak` element individually
+ |
+LL ~ let v = {
+LL + let mut v = Vec::with_capacity(2);
+LL + (0..2).for_each(|_| v.push(Rc::downgrade(..)));
+LL + v
+LL ~ };
+ |
+help: or if this is intentional, consider extracting the `Weak` initialization to a variable
+ |
+LL ~ let v = {
+LL + let data = Rc::downgrade(..);
+LL + vec![data; 2]
+LL ~ };
+ |
+
+error: initializing a reference-counted pointer in `vec![elem; len]`
+ --> $DIR/weak.rs:12:13
+ |
+LL | let v = vec![Arc::downgrade(&Arc::new("x".to_string())); 2];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: each element will point to the same `Weak` instance
+help: consider initializing each `Weak` element individually
+ |
+LL ~ let v = {
+LL + let mut v = Vec::with_capacity(2);
+LL + (0..2).for_each(|_| v.push(Arc::downgrade(..)));
+LL + v
+LL ~ };
+ |
+help: or if this is intentional, consider extracting the `Weak` initialization to a variable
+ |
+LL ~ let v = {
+LL + let data = Arc::downgrade(..);
+LL + vec![data; 2]
+LL ~ };
+ |
+
+error: initializing a reference-counted pointer in `vec![elem; len]`
+ --> $DIR/weak.rs:20:21
+ |
+LL | let v = vec![Arc::downgrade(&Arc::new("x".to_string())); 2];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: each element will point to the same `Weak` instance
+help: consider initializing each `Weak` element individually
+ |
+LL ~ let v = {
+LL + let mut v = Vec::with_capacity(2);
+LL + (0..2).for_each(|_| v.push(Arc::downgrade(..)));
+LL + v
+LL ~ };
+ |
+help: or if this is intentional, consider extracting the `Weak` initialization to a variable
+ |
+LL ~ let v = {
+LL + let data = Arc::downgrade(..);
+LL + vec![data; 2]
+LL ~ };
+ |
+
+error: initializing a reference-counted pointer in `vec![elem; len]`
+ --> $DIR/weak.rs:21:22
+ |
+LL | let v2 = vec![Rc::downgrade(&Rc::new("x".to_string())); 2];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: each element will point to the same `Weak` instance
+help: consider initializing each `Weak` element individually
+ |
+LL ~ let v2 = {
+LL + let mut v = Vec::with_capacity(2);
+LL + (0..2).for_each(|_| v.push(Rc::downgrade(..)));
+LL + v
+LL ~ };
+ |
+help: or if this is intentional, consider extracting the `Weak` initialization to a variable
+ |
+LL ~ let v2 = {
+LL + let data = Rc::downgrade(..);
+LL + vec![data; 2]
+LL ~ };
+ |
+
+error: initializing a reference-counted pointer in `vec![elem; len]`
+ --> $DIR/weak.rs:27:13
+ |
+LL | let v = vec![
+ | _____________^
+LL | | Arc::downgrade(&Arc::new(Mutex::new({
+LL | | let x = 1;
+LL | | dbg!(x);
+... |
+LL | | 2
+LL | | ];
+ | |_____^
+ |
+ = note: each element will point to the same `Weak` instance
+help: consider initializing each `Weak` element individually
+ |
+LL ~ let v = {
+LL + let mut v = Vec::with_capacity(2);
+LL + (0..2).for_each(|_| v.push(Arc::downgrade(..)));
+LL + v
+LL ~ };
+ |
+help: or if this is intentional, consider extracting the `Weak` initialization to a variable
+ |
+LL ~ let v = {
+LL + let data = Arc::downgrade(..);
+LL + vec![data; 2]
+LL ~ };
+ |
+
+error: initializing a reference-counted pointer in `vec![elem; len]`
+ --> $DIR/weak.rs:36:14
+ |
+LL | let v1 = vec![
+ | ______________^
+LL | | Rc::downgrade(&Rc::new(Mutex::new({
+LL | | let x = 1;
+LL | | dbg!(x);
+... |
+LL | | 2
+LL | | ];
+ | |_____^
+ |
+ = note: each element will point to the same `Weak` instance
+help: consider initializing each `Weak` element individually
+ |
+LL ~ let v1 = {
+LL + let mut v = Vec::with_capacity(2);
+LL + (0..2).for_each(|_| v.push(Rc::downgrade(..)));
+LL + v
+LL ~ };
+ |
+help: or if this is intentional, consider extracting the `Weak` initialization to a variable
+ |
+LL ~ let v1 = {
+LL + let data = Rc::downgrade(..);
+LL + vec![data; 2]
+LL ~ };
+ |
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/rc_mutex.rs b/src/tools/clippy/tests/ui/rc_mutex.rs
new file mode 100644
index 000000000..18e8a2e01
--- /dev/null
+++ b/src/tools/clippy/tests/ui/rc_mutex.rs
@@ -0,0 +1,36 @@
+#![warn(clippy::rc_mutex)]
+#![allow(unused, clippy::blacklisted_name)]
+
+use std::rc::Rc;
+use std::sync::Mutex;
+
+pub struct MyStructWithPrivItem {
+ foo: Rc<Mutex<i32>>,
+}
+
+pub struct MyStructWithPubItem {
+ pub foo: Rc<Mutex<i32>>,
+}
+
+pub struct SubT<T> {
+ foo: T,
+}
+
+pub enum MyEnum {
+ One,
+ Two,
+}
+
+// All of these test should be trigger the lint because they are not
+// part of the public api
+fn test1<T>(foo: Rc<Mutex<T>>) {}
+fn test2(foo: Rc<Mutex<MyEnum>>) {}
+fn test3(foo: Rc<Mutex<SubT<usize>>>) {}
+
+// All of these test should be allowed because they are part of the
+// public api and `avoid_breaking_exported_api` is `false` by default.
+pub fn pub_test1<T>(foo: Rc<Mutex<T>>) {}
+pub fn pub_test2(foo: Rc<Mutex<MyEnum>>) {}
+pub fn pub_test3(foo: Rc<Mutex<SubT<usize>>>) {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/rc_mutex.stderr b/src/tools/clippy/tests/ui/rc_mutex.stderr
new file mode 100644
index 000000000..fe84361d7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/rc_mutex.stderr
@@ -0,0 +1,35 @@
+error: usage of `Rc<Mutex<_>>`
+ --> $DIR/rc_mutex.rs:8:10
+ |
+LL | foo: Rc<Mutex<i32>>,
+ | ^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::rc-mutex` implied by `-D warnings`
+ = help: consider using `Rc<RefCell<_>>` or `Arc<Mutex<_>>` instead
+
+error: usage of `Rc<Mutex<_>>`
+ --> $DIR/rc_mutex.rs:26:18
+ |
+LL | fn test1<T>(foo: Rc<Mutex<T>>) {}
+ | ^^^^^^^^^^^^
+ |
+ = help: consider using `Rc<RefCell<_>>` or `Arc<Mutex<_>>` instead
+
+error: usage of `Rc<Mutex<_>>`
+ --> $DIR/rc_mutex.rs:27:15
+ |
+LL | fn test2(foo: Rc<Mutex<MyEnum>>) {}
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using `Rc<RefCell<_>>` or `Arc<Mutex<_>>` instead
+
+error: usage of `Rc<Mutex<_>>`
+ --> $DIR/rc_mutex.rs:28:15
+ |
+LL | fn test3(foo: Rc<Mutex<SubT<usize>>>) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using `Rc<RefCell<_>>` or `Arc<Mutex<_>>` instead
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/read_zero_byte_vec.rs b/src/tools/clippy/tests/ui/read_zero_byte_vec.rs
new file mode 100644
index 000000000..30807e0f8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/read_zero_byte_vec.rs
@@ -0,0 +1,87 @@
+#![warn(clippy::read_zero_byte_vec)]
+#![allow(clippy::unused_io_amount)]
+use std::fs::File;
+use std::io;
+use std::io::prelude::*;
+
+extern crate futures;
+use futures::io::{AsyncRead, AsyncReadExt};
+use tokio::io::{AsyncRead as TokioAsyncRead, AsyncReadExt as _, AsyncWrite as TokioAsyncWrite, AsyncWriteExt as _};
+
+fn test() -> io::Result<()> {
+ let cap = 1000;
+ let mut f = File::open("foo.txt").unwrap();
+
+ // should lint
+ let mut data = Vec::with_capacity(20);
+ f.read_exact(&mut data).unwrap();
+
+ // should lint
+ let mut data2 = Vec::with_capacity(cap);
+ f.read_exact(&mut data2)?;
+
+ // should lint
+ let mut data3 = Vec::new();
+ f.read_exact(&mut data3)?;
+
+ // should lint
+ let mut data4 = vec![];
+ let _ = f.read(&mut data4)?;
+
+ // should lint
+ let _ = {
+ let mut data5 = Vec::new();
+ f.read(&mut data5)
+ };
+
+ // should lint
+ let _ = {
+ let mut data6: Vec<u8> = Default::default();
+ f.read(&mut data6)
+ };
+
+ // should not lint
+ let mut buf = [0u8; 100];
+ f.read(&mut buf)?;
+
+ // should not lint
+ let mut empty = vec![];
+ let mut data7 = vec![];
+ f.read(&mut empty);
+
+ // should not lint
+ f.read(&mut data7);
+
+ // should not lint
+ let mut data8 = Vec::new();
+ data8.resize(100, 0);
+ f.read_exact(&mut data8)?;
+
+ // should not lint
+ let mut data9 = vec![1, 2, 3];
+ f.read_exact(&mut data9)?;
+
+ Ok(())
+}
+
+async fn test_futures<R: AsyncRead + Unpin>(r: &mut R) {
+ // should lint
+ let mut data = Vec::new();
+ r.read(&mut data).await.unwrap();
+
+ // should lint
+ let mut data2 = Vec::new();
+ r.read_exact(&mut data2).await.unwrap();
+}
+
+async fn test_tokio<R: TokioAsyncRead + Unpin>(r: &mut R) {
+ // should lint
+ let mut data = Vec::new();
+ r.read(&mut data).await.unwrap();
+
+ // should lint
+ let mut data2 = Vec::new();
+ r.read_exact(&mut data2).await.unwrap();
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/read_zero_byte_vec.stderr b/src/tools/clippy/tests/ui/read_zero_byte_vec.stderr
new file mode 100644
index 000000000..08ba9753d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/read_zero_byte_vec.stderr
@@ -0,0 +1,64 @@
+error: reading zero byte data to `Vec`
+ --> $DIR/read_zero_byte_vec.rs:17:5
+ |
+LL | f.read_exact(&mut data).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `data.resize(20, 0); f.read_exact(&mut data).unwrap();`
+ |
+ = note: `-D clippy::read-zero-byte-vec` implied by `-D warnings`
+
+error: reading zero byte data to `Vec`
+ --> $DIR/read_zero_byte_vec.rs:21:5
+ |
+LL | f.read_exact(&mut data2)?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `data2.resize(cap, 0); f.read_exact(&mut data2)?;`
+
+error: reading zero byte data to `Vec`
+ --> $DIR/read_zero_byte_vec.rs:25:5
+ |
+LL | f.read_exact(&mut data3)?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: reading zero byte data to `Vec`
+ --> $DIR/read_zero_byte_vec.rs:29:5
+ |
+LL | let _ = f.read(&mut data4)?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: reading zero byte data to `Vec`
+ --> $DIR/read_zero_byte_vec.rs:34:9
+ |
+LL | f.read(&mut data5)
+ | ^^^^^^^^^^^^^^^^^^
+
+error: reading zero byte data to `Vec`
+ --> $DIR/read_zero_byte_vec.rs:40:9
+ |
+LL | f.read(&mut data6)
+ | ^^^^^^^^^^^^^^^^^^
+
+error: reading zero byte data to `Vec`
+ --> $DIR/read_zero_byte_vec.rs:70:5
+ |
+LL | r.read(&mut data).await.unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: reading zero byte data to `Vec`
+ --> $DIR/read_zero_byte_vec.rs:74:5
+ |
+LL | r.read_exact(&mut data2).await.unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: reading zero byte data to `Vec`
+ --> $DIR/read_zero_byte_vec.rs:80:5
+ |
+LL | r.read(&mut data).await.unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: reading zero byte data to `Vec`
+ --> $DIR/read_zero_byte_vec.rs:84:5
+ |
+LL | r.read_exact(&mut data2).await.unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/recursive_format_impl.rs b/src/tools/clippy/tests/ui/recursive_format_impl.rs
new file mode 100644
index 000000000..cb6ba36b1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/recursive_format_impl.rs
@@ -0,0 +1,322 @@
+#![warn(clippy::recursive_format_impl)]
+#![allow(
+ clippy::inherent_to_string_shadow_display,
+ clippy::to_string_in_format_args,
+ clippy::deref_addrof,
+ clippy::borrow_deref_ref
+)]
+
+use std::fmt;
+
+struct A;
+impl A {
+ fn fmt(&self) {
+ self.to_string();
+ }
+}
+
+trait B {
+ fn fmt(&self) {}
+}
+
+impl B for A {
+ fn fmt(&self) {
+ self.to_string();
+ }
+}
+
+impl fmt::Display for A {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "{}", self.to_string())
+ }
+}
+
+fn fmt(a: A) {
+ a.to_string();
+}
+
+struct C;
+
+impl C {
+ // Doesn't trigger if to_string defined separately
+ // i.e. not using ToString trait (from Display)
+ fn to_string(&self) -> String {
+ String::from("I am C")
+ }
+}
+
+impl fmt::Display for C {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "{}", self.to_string())
+ }
+}
+
+enum D {
+ E(String),
+ F,
+}
+
+impl std::fmt::Display for D {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match &self {
+ Self::E(string) => write!(f, "E {}", string.to_string()),
+ Self::F => write!(f, "F"),
+ }
+ }
+}
+
+// Check for use of self as Display, in Display impl
+// Triggers on direct use of self
+struct G;
+
+impl std::fmt::Display for G {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}", self)
+ }
+}
+
+// Triggers on reference to self
+struct H;
+
+impl std::fmt::Display for H {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}", &self)
+ }
+}
+
+impl std::fmt::Debug for H {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{:?}", &self)
+ }
+}
+
+// Triggers on multiple reference to self
+struct H2;
+
+impl std::fmt::Display for H2 {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}", &&&self)
+ }
+}
+
+// Doesn't trigger on correct deref
+struct I;
+
+impl std::ops::Deref for I {
+ type Target = str;
+
+ fn deref(&self) -> &Self::Target {
+ "test"
+ }
+}
+
+impl std::fmt::Display for I {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ write!(f, "{}", &**self)
+ }
+}
+
+impl std::fmt::Debug for I {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ write!(f, "{:?}", &**self)
+ }
+}
+
+// Doesn't trigger on multiple correct deref
+struct I2;
+
+impl std::ops::Deref for I2 {
+ type Target = str;
+
+ fn deref(&self) -> &Self::Target {
+ "test"
+ }
+}
+
+impl std::fmt::Display for I2 {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ write!(f, "{}", **&&&**self)
+ }
+}
+
+// Doesn't trigger on multiple correct deref
+struct I3;
+
+impl std::ops::Deref for I3 {
+ type Target = str;
+
+ fn deref(&self) -> &Self::Target {
+ "test"
+ }
+}
+
+impl std::fmt::Display for I3 {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ write!(f, "{}", &&**&&&**self)
+ }
+}
+
+// Does trigger when deref resolves to self
+struct J;
+
+impl std::ops::Deref for J {
+ type Target = str;
+
+ fn deref(&self) -> &Self::Target {
+ "test"
+ }
+}
+
+impl std::fmt::Display for J {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ write!(f, "{}", &*self)
+ }
+}
+
+impl std::fmt::Debug for J {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ write!(f, "{:?}", &*self)
+ }
+}
+
+struct J2;
+
+impl std::ops::Deref for J2 {
+ type Target = str;
+
+ fn deref(&self) -> &Self::Target {
+ "test"
+ }
+}
+
+impl std::fmt::Display for J2 {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ write!(f, "{}", *self)
+ }
+}
+
+struct J3;
+
+impl std::ops::Deref for J3 {
+ type Target = str;
+
+ fn deref(&self) -> &Self::Target {
+ "test"
+ }
+}
+
+impl std::fmt::Display for J3 {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ write!(f, "{}", **&&*self)
+ }
+}
+
+struct J4;
+
+impl std::ops::Deref for J4 {
+ type Target = str;
+
+ fn deref(&self) -> &Self::Target {
+ "test"
+ }
+}
+
+impl std::fmt::Display for J4 {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ write!(f, "{}", &&**&&*self)
+ }
+}
+
+// Doesn't trigger on Debug from Display
+struct K;
+
+impl std::fmt::Debug for K {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ write!(f, "test")
+ }
+}
+
+impl std::fmt::Display for K {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ write!(f, "{:?}", self)
+ }
+}
+
+// Doesn't trigger on Display from Debug
+struct K2;
+
+impl std::fmt::Debug for K2 {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ write!(f, "{}", self)
+ }
+}
+
+impl std::fmt::Display for K2 {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ write!(f, "test")
+ }
+}
+
+// Doesn't trigger on struct fields
+struct L {
+ field1: u32,
+ field2: i32,
+}
+
+impl std::fmt::Display for L {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ write!(f, "{},{}", self.field1, self.field2)
+ }
+}
+
+impl std::fmt::Debug for L {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ write!(f, "{:?},{:?}", self.field1, self.field2)
+ }
+}
+
+// Doesn't trigger on nested enum matching
+enum Tree {
+ Leaf,
+ Node(Vec<Tree>),
+}
+
+impl std::fmt::Display for Tree {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ Tree::Leaf => write!(f, "*"),
+ Tree::Node(children) => {
+ write!(f, "(")?;
+ for child in children.iter() {
+ write!(f, "{},", child)?;
+ }
+ write!(f, ")")
+ },
+ }
+ }
+}
+
+impl std::fmt::Debug for Tree {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ Tree::Leaf => write!(f, "*"),
+ Tree::Node(children) => {
+ write!(f, "(")?;
+ for child in children.iter() {
+ write!(f, "{:?},", child)?;
+ }
+ write!(f, ")")
+ },
+ }
+ }
+}
+
+fn main() {
+ let a = A;
+ a.to_string();
+ a.fmt();
+ fmt(a);
+
+ let c = C;
+ c.to_string();
+}
diff --git a/src/tools/clippy/tests/ui/recursive_format_impl.stderr b/src/tools/clippy/tests/ui/recursive_format_impl.stderr
new file mode 100644
index 000000000..84ce69df5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/recursive_format_impl.stderr
@@ -0,0 +1,82 @@
+error: using `self.to_string` in `fmt::Display` implementation will cause infinite recursion
+ --> $DIR/recursive_format_impl.rs:30:25
+ |
+LL | write!(f, "{}", self.to_string())
+ | ^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::recursive-format-impl` implied by `-D warnings`
+
+error: using `self` as `Display` in `impl Display` will cause infinite recursion
+ --> $DIR/recursive_format_impl.rs:74:9
+ |
+LL | write!(f, "{}", self)
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: this error originates in the macro `write` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: using `self` as `Display` in `impl Display` will cause infinite recursion
+ --> $DIR/recursive_format_impl.rs:83:9
+ |
+LL | write!(f, "{}", &self)
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: this error originates in the macro `write` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: using `self` as `Debug` in `impl Debug` will cause infinite recursion
+ --> $DIR/recursive_format_impl.rs:89:9
+ |
+LL | write!(f, "{:?}", &self)
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: this error originates in the macro `write` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: using `self` as `Display` in `impl Display` will cause infinite recursion
+ --> $DIR/recursive_format_impl.rs:98:9
+ |
+LL | write!(f, "{}", &&&self)
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: this error originates in the macro `write` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: using `self` as `Display` in `impl Display` will cause infinite recursion
+ --> $DIR/recursive_format_impl.rs:172:9
+ |
+LL | write!(f, "{}", &*self)
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: this error originates in the macro `write` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: using `self` as `Debug` in `impl Debug` will cause infinite recursion
+ --> $DIR/recursive_format_impl.rs:178:9
+ |
+LL | write!(f, "{:?}", &*self)
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: this error originates in the macro `write` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: using `self` as `Display` in `impl Display` will cause infinite recursion
+ --> $DIR/recursive_format_impl.rs:194:9
+ |
+LL | write!(f, "{}", *self)
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: this error originates in the macro `write` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: using `self` as `Display` in `impl Display` will cause infinite recursion
+ --> $DIR/recursive_format_impl.rs:210:9
+ |
+LL | write!(f, "{}", **&&*self)
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: this error originates in the macro `write` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: using `self` as `Display` in `impl Display` will cause infinite recursion
+ --> $DIR/recursive_format_impl.rs:226:9
+ |
+LL | write!(f, "{}", &&**&&*self)
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: this error originates in the macro `write` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/redundant_allocation.rs b/src/tools/clippy/tests/ui/redundant_allocation.rs
new file mode 100644
index 000000000..cf7d8c6e3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_allocation.rs
@@ -0,0 +1,135 @@
+#![warn(clippy::all)]
+#![allow(clippy::boxed_local, clippy::needless_pass_by_value)]
+#![allow(clippy::blacklisted_name, unused_variables, dead_code)]
+#![allow(unused_imports)]
+
+pub struct MyStruct;
+
+pub struct SubT<T> {
+ foo: T,
+}
+
+pub enum MyEnum {
+ One,
+ Two,
+}
+
+mod outer_box {
+ use crate::MyEnum;
+ use crate::MyStruct;
+ use crate::SubT;
+ use std::boxed::Box;
+ use std::rc::Rc;
+ use std::sync::Arc;
+
+ pub fn box_test6<T>(foo: Box<Rc<T>>) {}
+
+ pub fn box_test7<T>(foo: Box<Arc<T>>) {}
+
+ pub fn box_test8() -> Box<Rc<SubT<usize>>> {
+ unimplemented!();
+ }
+
+ pub fn box_test9<T>(foo: Box<Arc<T>>) -> Box<Arc<SubT<T>>> {
+ unimplemented!();
+ }
+}
+
+mod outer_rc {
+ use crate::MyEnum;
+ use crate::MyStruct;
+ use crate::SubT;
+ use std::boxed::Box;
+ use std::rc::Rc;
+ use std::sync::Arc;
+
+ pub fn rc_test5(a: Rc<Box<bool>>) {}
+
+ pub fn rc_test7(a: Rc<Arc<bool>>) {}
+
+ pub fn rc_test8() -> Rc<Box<SubT<usize>>> {
+ unimplemented!();
+ }
+
+ pub fn rc_test9<T>(foo: Rc<Arc<T>>) -> Rc<Arc<SubT<T>>> {
+ unimplemented!();
+ }
+}
+
+mod outer_arc {
+ use crate::MyEnum;
+ use crate::MyStruct;
+ use crate::SubT;
+ use std::boxed::Box;
+ use std::rc::Rc;
+ use std::sync::Arc;
+
+ pub fn arc_test5(a: Arc<Box<bool>>) {}
+
+ pub fn arc_test6(a: Arc<Rc<bool>>) {}
+
+ pub fn arc_test8() -> Arc<Box<SubT<usize>>> {
+ unimplemented!();
+ }
+
+ pub fn arc_test9<T>(foo: Arc<Rc<T>>) -> Arc<Rc<SubT<T>>> {
+ unimplemented!();
+ }
+}
+
+// https://github.com/rust-lang/rust-clippy/issues/7487
+mod box_dyn {
+ use std::boxed::Box;
+ use std::rc::Rc;
+ use std::sync::Arc;
+
+ pub trait T {}
+
+ struct S {
+ a: Box<Box<dyn T>>,
+ b: Rc<Box<dyn T>>,
+ c: Arc<Box<dyn T>>,
+ }
+
+ pub fn test_box(_: Box<Box<dyn T>>) {}
+ pub fn test_rc(_: Rc<Box<dyn T>>) {}
+ pub fn test_arc(_: Arc<Box<dyn T>>) {}
+ pub fn test_rc_box(_: Rc<Box<Box<dyn T>>>) {}
+}
+
+// https://github.com/rust-lang/rust-clippy/issues/8604
+mod box_fat_ptr {
+ use std::boxed::Box;
+ use std::path::Path;
+ use std::rc::Rc;
+ use std::sync::Arc;
+
+ pub struct DynSized {
+ foo: [usize],
+ }
+
+ struct S {
+ a: Box<Box<str>>,
+ b: Rc<Box<str>>,
+ c: Arc<Box<str>>,
+
+ e: Box<Box<[usize]>>,
+ f: Box<Box<Path>>,
+ g: Box<Box<DynSized>>,
+ }
+
+ pub fn test_box_str(_: Box<Box<str>>) {}
+ pub fn test_rc_str(_: Rc<Box<str>>) {}
+ pub fn test_arc_str(_: Arc<Box<str>>) {}
+
+ pub fn test_box_slice(_: Box<Box<[usize]>>) {}
+ pub fn test_box_path(_: Box<Box<Path>>) {}
+ pub fn test_box_custom(_: Box<Box<DynSized>>) {}
+
+ pub fn test_rc_box_str(_: Rc<Box<Box<str>>>) {}
+ pub fn test_rc_box_slice(_: Rc<Box<Box<[usize]>>>) {}
+ pub fn test_rc_box_path(_: Rc<Box<Box<Path>>>) {}
+ pub fn test_rc_box_custom(_: Rc<Box<Box<DynSized>>>) {}
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/redundant_allocation.stderr b/src/tools/clippy/tests/ui/redundant_allocation.stderr
new file mode 100644
index 000000000..fab1b069f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_allocation.stderr
@@ -0,0 +1,183 @@
+error: usage of `Box<Rc<T>>`
+ --> $DIR/redundant_allocation.rs:25:30
+ |
+LL | pub fn box_test6<T>(foo: Box<Rc<T>>) {}
+ | ^^^^^^^^^^
+ |
+ = note: `-D clippy::redundant-allocation` implied by `-D warnings`
+ = note: `Rc<T>` is already on the heap, `Box<Rc<T>>` makes an extra allocation
+ = help: consider using just `Box<T>` or `Rc<T>`
+
+error: usage of `Box<Arc<T>>`
+ --> $DIR/redundant_allocation.rs:27:30
+ |
+LL | pub fn box_test7<T>(foo: Box<Arc<T>>) {}
+ | ^^^^^^^^^^^
+ |
+ = note: `Arc<T>` is already on the heap, `Box<Arc<T>>` makes an extra allocation
+ = help: consider using just `Box<T>` or `Arc<T>`
+
+error: usage of `Box<Rc<SubT<usize>>>`
+ --> $DIR/redundant_allocation.rs:29:27
+ |
+LL | pub fn box_test8() -> Box<Rc<SubT<usize>>> {
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `Rc<SubT<usize>>` is already on the heap, `Box<Rc<SubT<usize>>>` makes an extra allocation
+ = help: consider using just `Box<SubT<usize>>` or `Rc<SubT<usize>>`
+
+error: usage of `Box<Arc<T>>`
+ --> $DIR/redundant_allocation.rs:33:30
+ |
+LL | pub fn box_test9<T>(foo: Box<Arc<T>>) -> Box<Arc<SubT<T>>> {
+ | ^^^^^^^^^^^
+ |
+ = note: `Arc<T>` is already on the heap, `Box<Arc<T>>` makes an extra allocation
+ = help: consider using just `Box<T>` or `Arc<T>`
+
+error: usage of `Box<Arc<SubT<T>>>`
+ --> $DIR/redundant_allocation.rs:33:46
+ |
+LL | pub fn box_test9<T>(foo: Box<Arc<T>>) -> Box<Arc<SubT<T>>> {
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: `Arc<SubT<T>>` is already on the heap, `Box<Arc<SubT<T>>>` makes an extra allocation
+ = help: consider using just `Box<SubT<T>>` or `Arc<SubT<T>>`
+
+error: usage of `Rc<Box<bool>>`
+ --> $DIR/redundant_allocation.rs:46:24
+ |
+LL | pub fn rc_test5(a: Rc<Box<bool>>) {}
+ | ^^^^^^^^^^^^^
+ |
+ = note: `Box<bool>` is already on the heap, `Rc<Box<bool>>` makes an extra allocation
+ = help: consider using just `Rc<bool>` or `Box<bool>`
+
+error: usage of `Rc<Arc<bool>>`
+ --> $DIR/redundant_allocation.rs:48:24
+ |
+LL | pub fn rc_test7(a: Rc<Arc<bool>>) {}
+ | ^^^^^^^^^^^^^
+ |
+ = note: `Arc<bool>` is already on the heap, `Rc<Arc<bool>>` makes an extra allocation
+ = help: consider using just `Rc<bool>` or `Arc<bool>`
+
+error: usage of `Rc<Box<SubT<usize>>>`
+ --> $DIR/redundant_allocation.rs:50:26
+ |
+LL | pub fn rc_test8() -> Rc<Box<SubT<usize>>> {
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `Box<SubT<usize>>` is already on the heap, `Rc<Box<SubT<usize>>>` makes an extra allocation
+ = help: consider using just `Rc<SubT<usize>>` or `Box<SubT<usize>>`
+
+error: usage of `Rc<Arc<T>>`
+ --> $DIR/redundant_allocation.rs:54:29
+ |
+LL | pub fn rc_test9<T>(foo: Rc<Arc<T>>) -> Rc<Arc<SubT<T>>> {
+ | ^^^^^^^^^^
+ |
+ = note: `Arc<T>` is already on the heap, `Rc<Arc<T>>` makes an extra allocation
+ = help: consider using just `Rc<T>` or `Arc<T>`
+
+error: usage of `Rc<Arc<SubT<T>>>`
+ --> $DIR/redundant_allocation.rs:54:44
+ |
+LL | pub fn rc_test9<T>(foo: Rc<Arc<T>>) -> Rc<Arc<SubT<T>>> {
+ | ^^^^^^^^^^^^^^^^
+ |
+ = note: `Arc<SubT<T>>` is already on the heap, `Rc<Arc<SubT<T>>>` makes an extra allocation
+ = help: consider using just `Rc<SubT<T>>` or `Arc<SubT<T>>`
+
+error: usage of `Arc<Box<bool>>`
+ --> $DIR/redundant_allocation.rs:67:25
+ |
+LL | pub fn arc_test5(a: Arc<Box<bool>>) {}
+ | ^^^^^^^^^^^^^^
+ |
+ = note: `Box<bool>` is already on the heap, `Arc<Box<bool>>` makes an extra allocation
+ = help: consider using just `Arc<bool>` or `Box<bool>`
+
+error: usage of `Arc<Rc<bool>>`
+ --> $DIR/redundant_allocation.rs:69:25
+ |
+LL | pub fn arc_test6(a: Arc<Rc<bool>>) {}
+ | ^^^^^^^^^^^^^
+ |
+ = note: `Rc<bool>` is already on the heap, `Arc<Rc<bool>>` makes an extra allocation
+ = help: consider using just `Arc<bool>` or `Rc<bool>`
+
+error: usage of `Arc<Box<SubT<usize>>>`
+ --> $DIR/redundant_allocation.rs:71:27
+ |
+LL | pub fn arc_test8() -> Arc<Box<SubT<usize>>> {
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `Box<SubT<usize>>` is already on the heap, `Arc<Box<SubT<usize>>>` makes an extra allocation
+ = help: consider using just `Arc<SubT<usize>>` or `Box<SubT<usize>>`
+
+error: usage of `Arc<Rc<T>>`
+ --> $DIR/redundant_allocation.rs:75:30
+ |
+LL | pub fn arc_test9<T>(foo: Arc<Rc<T>>) -> Arc<Rc<SubT<T>>> {
+ | ^^^^^^^^^^
+ |
+ = note: `Rc<T>` is already on the heap, `Arc<Rc<T>>` makes an extra allocation
+ = help: consider using just `Arc<T>` or `Rc<T>`
+
+error: usage of `Arc<Rc<SubT<T>>>`
+ --> $DIR/redundant_allocation.rs:75:45
+ |
+LL | pub fn arc_test9<T>(foo: Arc<Rc<T>>) -> Arc<Rc<SubT<T>>> {
+ | ^^^^^^^^^^^^^^^^
+ |
+ = note: `Rc<SubT<T>>` is already on the heap, `Arc<Rc<SubT<T>>>` makes an extra allocation
+ = help: consider using just `Arc<SubT<T>>` or `Rc<SubT<T>>`
+
+error: usage of `Rc<Box<Box<dyn T>>>`
+ --> $DIR/redundant_allocation.rs:97:27
+ |
+LL | pub fn test_rc_box(_: Rc<Box<Box<dyn T>>>) {}
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `Box<Box<dyn T>>` is already on the heap, `Rc<Box<Box<dyn T>>>` makes an extra allocation
+ = help: consider using just `Rc<Box<dyn T>>` or `Box<Box<dyn T>>`
+
+error: usage of `Rc<Box<Box<str>>>`
+ --> $DIR/redundant_allocation.rs:129:31
+ |
+LL | pub fn test_rc_box_str(_: Rc<Box<Box<str>>>) {}
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: `Box<Box<str>>` is already on the heap, `Rc<Box<Box<str>>>` makes an extra allocation
+ = help: consider using just `Rc<Box<str>>` or `Box<Box<str>>`
+
+error: usage of `Rc<Box<Box<[usize]>>>`
+ --> $DIR/redundant_allocation.rs:130:33
+ |
+LL | pub fn test_rc_box_slice(_: Rc<Box<Box<[usize]>>>) {}
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `Box<Box<[usize]>>` is already on the heap, `Rc<Box<Box<[usize]>>>` makes an extra allocation
+ = help: consider using just `Rc<Box<[usize]>>` or `Box<Box<[usize]>>`
+
+error: usage of `Rc<Box<Box<Path>>>`
+ --> $DIR/redundant_allocation.rs:131:32
+ |
+LL | pub fn test_rc_box_path(_: Rc<Box<Box<Path>>>) {}
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = note: `Box<Box<Path>>` is already on the heap, `Rc<Box<Box<Path>>>` makes an extra allocation
+ = help: consider using just `Rc<Box<Path>>` or `Box<Box<Path>>`
+
+error: usage of `Rc<Box<Box<DynSized>>>`
+ --> $DIR/redundant_allocation.rs:132:34
+ |
+LL | pub fn test_rc_box_custom(_: Rc<Box<Box<DynSized>>>) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `Box<Box<DynSized>>` is already on the heap, `Rc<Box<Box<DynSized>>>` makes an extra allocation
+ = help: consider using just `Rc<Box<DynSized>>` or `Box<Box<DynSized>>`
+
+error: aborting due to 20 previous errors
+
diff --git a/src/tools/clippy/tests/ui/redundant_allocation_fixable.fixed b/src/tools/clippy/tests/ui/redundant_allocation_fixable.fixed
new file mode 100644
index 000000000..e7ed84731
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_allocation_fixable.fixed
@@ -0,0 +1,75 @@
+// run-rustfix
+#![warn(clippy::all)]
+#![allow(clippy::boxed_local, clippy::needless_pass_by_value)]
+#![allow(clippy::blacklisted_name, unused_variables, dead_code)]
+#![allow(unused_imports)]
+
+pub struct MyStruct;
+
+pub struct SubT<T> {
+ foo: T,
+}
+
+pub enum MyEnum {
+ One,
+ Two,
+}
+
+mod outer_box {
+ use crate::MyEnum;
+ use crate::MyStruct;
+ use crate::SubT;
+ use std::boxed::Box;
+ use std::rc::Rc;
+ use std::sync::Arc;
+
+ pub fn box_test1<T>(foo: &T) {}
+
+ pub fn box_test2(foo: &MyStruct) {}
+
+ pub fn box_test3(foo: &MyEnum) {}
+
+ pub fn box_test4_neg(foo: Box<SubT<&usize>>) {}
+
+ pub fn box_test5<T>(foo: Box<T>) {}
+}
+
+mod outer_rc {
+ use crate::MyEnum;
+ use crate::MyStruct;
+ use crate::SubT;
+ use std::boxed::Box;
+ use std::rc::Rc;
+ use std::sync::Arc;
+
+ pub fn rc_test1<T>(foo: &T) {}
+
+ pub fn rc_test2(foo: &MyStruct) {}
+
+ pub fn rc_test3(foo: &MyEnum) {}
+
+ pub fn rc_test4_neg(foo: Rc<SubT<&usize>>) {}
+
+ pub fn rc_test6(a: Rc<bool>) {}
+}
+
+mod outer_arc {
+ use crate::MyEnum;
+ use crate::MyStruct;
+ use crate::SubT;
+ use std::boxed::Box;
+ use std::rc::Rc;
+ use std::sync::Arc;
+
+ pub fn arc_test1<T>(foo: &T) {}
+
+ pub fn arc_test2(foo: &MyStruct) {}
+
+ pub fn arc_test3(foo: &MyEnum) {}
+
+ pub fn arc_test4_neg(foo: Arc<SubT<&usize>>) {}
+
+ pub fn arc_test7(a: Arc<bool>) {}
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/redundant_allocation_fixable.rs b/src/tools/clippy/tests/ui/redundant_allocation_fixable.rs
new file mode 100644
index 000000000..de763f98b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_allocation_fixable.rs
@@ -0,0 +1,75 @@
+// run-rustfix
+#![warn(clippy::all)]
+#![allow(clippy::boxed_local, clippy::needless_pass_by_value)]
+#![allow(clippy::blacklisted_name, unused_variables, dead_code)]
+#![allow(unused_imports)]
+
+pub struct MyStruct;
+
+pub struct SubT<T> {
+ foo: T,
+}
+
+pub enum MyEnum {
+ One,
+ Two,
+}
+
+mod outer_box {
+ use crate::MyEnum;
+ use crate::MyStruct;
+ use crate::SubT;
+ use std::boxed::Box;
+ use std::rc::Rc;
+ use std::sync::Arc;
+
+ pub fn box_test1<T>(foo: Box<&T>) {}
+
+ pub fn box_test2(foo: Box<&MyStruct>) {}
+
+ pub fn box_test3(foo: Box<&MyEnum>) {}
+
+ pub fn box_test4_neg(foo: Box<SubT<&usize>>) {}
+
+ pub fn box_test5<T>(foo: Box<Box<T>>) {}
+}
+
+mod outer_rc {
+ use crate::MyEnum;
+ use crate::MyStruct;
+ use crate::SubT;
+ use std::boxed::Box;
+ use std::rc::Rc;
+ use std::sync::Arc;
+
+ pub fn rc_test1<T>(foo: Rc<&T>) {}
+
+ pub fn rc_test2(foo: Rc<&MyStruct>) {}
+
+ pub fn rc_test3(foo: Rc<&MyEnum>) {}
+
+ pub fn rc_test4_neg(foo: Rc<SubT<&usize>>) {}
+
+ pub fn rc_test6(a: Rc<Rc<bool>>) {}
+}
+
+mod outer_arc {
+ use crate::MyEnum;
+ use crate::MyStruct;
+ use crate::SubT;
+ use std::boxed::Box;
+ use std::rc::Rc;
+ use std::sync::Arc;
+
+ pub fn arc_test1<T>(foo: Arc<&T>) {}
+
+ pub fn arc_test2(foo: Arc<&MyStruct>) {}
+
+ pub fn arc_test3(foo: Arc<&MyEnum>) {}
+
+ pub fn arc_test4_neg(foo: Arc<SubT<&usize>>) {}
+
+ pub fn arc_test7(a: Arc<Arc<bool>>) {}
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/redundant_allocation_fixable.stderr b/src/tools/clippy/tests/ui/redundant_allocation_fixable.stderr
new file mode 100644
index 000000000..fdd76ef17
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_allocation_fixable.stderr
@@ -0,0 +1,99 @@
+error: usage of `Box<&T>`
+ --> $DIR/redundant_allocation_fixable.rs:26:30
+ |
+LL | pub fn box_test1<T>(foo: Box<&T>) {}
+ | ^^^^^^^ help: try: `&T`
+ |
+ = note: `-D clippy::redundant-allocation` implied by `-D warnings`
+ = note: `&T` is already a pointer, `Box<&T>` allocates a pointer on the heap
+
+error: usage of `Box<&MyStruct>`
+ --> $DIR/redundant_allocation_fixable.rs:28:27
+ |
+LL | pub fn box_test2(foo: Box<&MyStruct>) {}
+ | ^^^^^^^^^^^^^^ help: try: `&MyStruct`
+ |
+ = note: `&MyStruct` is already a pointer, `Box<&MyStruct>` allocates a pointer on the heap
+
+error: usage of `Box<&MyEnum>`
+ --> $DIR/redundant_allocation_fixable.rs:30:27
+ |
+LL | pub fn box_test3(foo: Box<&MyEnum>) {}
+ | ^^^^^^^^^^^^ help: try: `&MyEnum`
+ |
+ = note: `&MyEnum` is already a pointer, `Box<&MyEnum>` allocates a pointer on the heap
+
+error: usage of `Box<Box<T>>`
+ --> $DIR/redundant_allocation_fixable.rs:34:30
+ |
+LL | pub fn box_test5<T>(foo: Box<Box<T>>) {}
+ | ^^^^^^^^^^^ help: try: `Box<T>`
+ |
+ = note: `Box<T>` is already on the heap, `Box<Box<T>>` makes an extra allocation
+
+error: usage of `Rc<&T>`
+ --> $DIR/redundant_allocation_fixable.rs:45:29
+ |
+LL | pub fn rc_test1<T>(foo: Rc<&T>) {}
+ | ^^^^^^ help: try: `&T`
+ |
+ = note: `&T` is already a pointer, `Rc<&T>` allocates a pointer on the heap
+
+error: usage of `Rc<&MyStruct>`
+ --> $DIR/redundant_allocation_fixable.rs:47:26
+ |
+LL | pub fn rc_test2(foo: Rc<&MyStruct>) {}
+ | ^^^^^^^^^^^^^ help: try: `&MyStruct`
+ |
+ = note: `&MyStruct` is already a pointer, `Rc<&MyStruct>` allocates a pointer on the heap
+
+error: usage of `Rc<&MyEnum>`
+ --> $DIR/redundant_allocation_fixable.rs:49:26
+ |
+LL | pub fn rc_test3(foo: Rc<&MyEnum>) {}
+ | ^^^^^^^^^^^ help: try: `&MyEnum`
+ |
+ = note: `&MyEnum` is already a pointer, `Rc<&MyEnum>` allocates a pointer on the heap
+
+error: usage of `Rc<Rc<bool>>`
+ --> $DIR/redundant_allocation_fixable.rs:53:24
+ |
+LL | pub fn rc_test6(a: Rc<Rc<bool>>) {}
+ | ^^^^^^^^^^^^ help: try: `Rc<bool>`
+ |
+ = note: `Rc<bool>` is already on the heap, `Rc<Rc<bool>>` makes an extra allocation
+
+error: usage of `Arc<&T>`
+ --> $DIR/redundant_allocation_fixable.rs:64:30
+ |
+LL | pub fn arc_test1<T>(foo: Arc<&T>) {}
+ | ^^^^^^^ help: try: `&T`
+ |
+ = note: `&T` is already a pointer, `Arc<&T>` allocates a pointer on the heap
+
+error: usage of `Arc<&MyStruct>`
+ --> $DIR/redundant_allocation_fixable.rs:66:27
+ |
+LL | pub fn arc_test2(foo: Arc<&MyStruct>) {}
+ | ^^^^^^^^^^^^^^ help: try: `&MyStruct`
+ |
+ = note: `&MyStruct` is already a pointer, `Arc<&MyStruct>` allocates a pointer on the heap
+
+error: usage of `Arc<&MyEnum>`
+ --> $DIR/redundant_allocation_fixable.rs:68:27
+ |
+LL | pub fn arc_test3(foo: Arc<&MyEnum>) {}
+ | ^^^^^^^^^^^^ help: try: `&MyEnum`
+ |
+ = note: `&MyEnum` is already a pointer, `Arc<&MyEnum>` allocates a pointer on the heap
+
+error: usage of `Arc<Arc<bool>>`
+ --> $DIR/redundant_allocation_fixable.rs:72:25
+ |
+LL | pub fn arc_test7(a: Arc<Arc<bool>>) {}
+ | ^^^^^^^^^^^^^^ help: try: `Arc<bool>`
+ |
+ = note: `Arc<bool>` is already on the heap, `Arc<Arc<bool>>` makes an extra allocation
+
+error: aborting due to 12 previous errors
+
diff --git a/src/tools/clippy/tests/ui/redundant_clone.fixed b/src/tools/clippy/tests/ui/redundant_clone.fixed
new file mode 100644
index 000000000..da52c0acf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_clone.fixed
@@ -0,0 +1,241 @@
+// run-rustfix
+// rustfix-only-machine-applicable
+
+#![feature(lint_reasons)]
+#![allow(clippy::implicit_clone, clippy::drop_non_drop)]
+use std::ffi::OsString;
+use std::path::Path;
+
+fn main() {
+ let _s = ["lorem", "ipsum"].join(" ");
+
+ let s = String::from("foo");
+ let _s = s;
+
+ let s = String::from("foo");
+ let _s = s;
+
+ let s = String::from("foo");
+ let _s = s;
+
+ let _s = Path::new("/a/b/").join("c");
+
+ let _s = Path::new("/a/b/").join("c");
+
+ let _s = OsString::new();
+
+ let _s = OsString::new();
+
+ // Check that lint level works
+ #[allow(clippy::redundant_clone)]
+ let _s = String::new().to_string();
+
+ // Check that lint level works
+ #[expect(clippy::redundant_clone)]
+ let _s = String::new().to_string();
+
+ let tup = (String::from("foo"),);
+ let _t = tup.0;
+
+ let tup_ref = &(String::from("foo"),);
+ let _s = tup_ref.0.clone(); // this `.clone()` cannot be removed
+
+ {
+ let x = String::new();
+ let y = &x;
+
+ let _x = x.clone(); // ok; `x` is borrowed by `y`
+
+ let _ = y.len();
+ }
+
+ let x = (String::new(),);
+ let _ = Some(String::new()).unwrap_or_else(|| x.0.clone()); // ok; closure borrows `x`
+
+ with_branch(Alpha, true);
+ cannot_double_move(Alpha);
+ cannot_move_from_type_with_drop();
+ borrower_propagation();
+ not_consumed();
+ issue_5405();
+ manually_drop();
+ clone_then_move_cloned();
+ hashmap_neg();
+ false_negative_5707();
+}
+
+#[derive(Clone)]
+struct Alpha;
+fn with_branch(a: Alpha, b: bool) -> (Alpha, Alpha) {
+ if b { (a.clone(), a) } else { (Alpha, a) }
+}
+
+fn cannot_double_move(a: Alpha) -> (Alpha, Alpha) {
+ (a.clone(), a)
+}
+
+struct TypeWithDrop {
+ x: String,
+}
+
+impl Drop for TypeWithDrop {
+ fn drop(&mut self) {}
+}
+
+fn cannot_move_from_type_with_drop() -> String {
+ let s = TypeWithDrop { x: String::new() };
+ s.x.clone() // removing this `clone()` summons E0509
+}
+
+fn borrower_propagation() {
+ let s = String::new();
+ let t = String::new();
+
+ {
+ fn b() -> bool {
+ unimplemented!()
+ }
+ let _u = if b() { &s } else { &t };
+
+ // ok; `s` and `t` are possibly borrowed
+ let _s = s.clone();
+ let _t = t.clone();
+ }
+
+ {
+ let _u = || s.len();
+ let _v = [&t; 32];
+ let _s = s.clone(); // ok
+ let _t = t.clone(); // ok
+ }
+
+ {
+ let _u = {
+ let u = Some(&s);
+ let _ = s.clone(); // ok
+ u
+ };
+ let _s = s.clone(); // ok
+ }
+
+ {
+ use std::convert::identity as id;
+ let _u = id(id(&s));
+ let _s = s.clone(); // ok, `u` borrows `s`
+ }
+
+ let _s = s;
+ let _t = t;
+
+ #[derive(Clone)]
+ struct Foo {
+ x: usize,
+ }
+
+ {
+ let f = Foo { x: 123 };
+ let _x = Some(f.x);
+ let _f = f;
+ }
+
+ {
+ let f = Foo { x: 123 };
+ let _x = &f.x;
+ let _f = f.clone(); // ok
+ }
+}
+
+fn not_consumed() {
+ let x = std::path::PathBuf::from("home");
+ let y = x.join("matthias");
+ // join() creates a new owned PathBuf, does not take a &mut to x variable, thus the .clone() is
+ // redundant. (It also does not consume the PathBuf)
+
+ println!("x: {:?}, y: {:?}", x, y);
+
+ let mut s = String::new();
+ s.clone().push_str("foo"); // OK, removing this `clone()` will change the behavior.
+ s.push_str("bar");
+ assert_eq!(s, "bar");
+
+ let t = Some(s);
+ // OK
+ if let Some(x) = t.clone() {
+ println!("{}", x);
+ }
+ if let Some(x) = t {
+ println!("{}", x);
+ }
+}
+
+#[allow(clippy::clone_on_copy)]
+fn issue_5405() {
+ let a: [String; 1] = [String::from("foo")];
+ let _b: String = a[0].clone();
+
+ let c: [usize; 2] = [2, 3];
+ let _d: usize = c[1].clone();
+}
+
+fn manually_drop() {
+ use std::mem::ManuallyDrop;
+ use std::sync::Arc;
+
+ let a = ManuallyDrop::new(Arc::new("Hello!".to_owned()));
+ let _ = a.clone(); // OK
+
+ let p: *const String = Arc::into_raw(ManuallyDrop::into_inner(a));
+ unsafe {
+ Arc::from_raw(p);
+ Arc::from_raw(p);
+ }
+}
+
+fn clone_then_move_cloned() {
+ // issue #5973
+ let x = Some(String::new());
+ // ok, x is moved while the clone is in use.
+ assert_eq!(x.clone(), None, "not equal {}", x.unwrap());
+
+ // issue #5595
+ fn foo<F: Fn()>(_: &Alpha, _: F) {}
+ let x = Alpha;
+ // ok, data is moved while the clone is in use.
+ foo(&x, move || {
+ let _ = x;
+ });
+
+ // issue #6998
+ struct S(String);
+ impl S {
+ fn m(&mut self) {}
+ }
+ let mut x = S(String::new());
+ x.0.clone().chars().for_each(|_| x.m());
+}
+
+fn hashmap_neg() {
+ // issue 5707
+ use std::collections::HashMap;
+ use std::path::PathBuf;
+
+ let p = PathBuf::from("/");
+
+ let mut h: HashMap<&str, &str> = HashMap::new();
+ h.insert("orig-p", p.to_str().unwrap());
+
+ let mut q = p.clone();
+ q.push("foo");
+
+ println!("{:?} {}", h, q.display());
+}
+
+fn false_negative_5707() {
+ fn foo(_x: &Alpha, _y: &mut Alpha) {}
+
+ let x = Alpha;
+ let mut y = Alpha;
+ foo(&x, &mut y);
+ let _z = x.clone(); // pr 7346 can't lint on `x`
+ drop(y);
+}
diff --git a/src/tools/clippy/tests/ui/redundant_clone.rs b/src/tools/clippy/tests/ui/redundant_clone.rs
new file mode 100644
index 000000000..5867d019d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_clone.rs
@@ -0,0 +1,241 @@
+// run-rustfix
+// rustfix-only-machine-applicable
+
+#![feature(lint_reasons)]
+#![allow(clippy::implicit_clone, clippy::drop_non_drop)]
+use std::ffi::OsString;
+use std::path::Path;
+
+fn main() {
+ let _s = ["lorem", "ipsum"].join(" ").to_string();
+
+ let s = String::from("foo");
+ let _s = s.clone();
+
+ let s = String::from("foo");
+ let _s = s.to_string();
+
+ let s = String::from("foo");
+ let _s = s.to_owned();
+
+ let _s = Path::new("/a/b/").join("c").to_owned();
+
+ let _s = Path::new("/a/b/").join("c").to_path_buf();
+
+ let _s = OsString::new().to_owned();
+
+ let _s = OsString::new().to_os_string();
+
+ // Check that lint level works
+ #[allow(clippy::redundant_clone)]
+ let _s = String::new().to_string();
+
+ // Check that lint level works
+ #[expect(clippy::redundant_clone)]
+ let _s = String::new().to_string();
+
+ let tup = (String::from("foo"),);
+ let _t = tup.0.clone();
+
+ let tup_ref = &(String::from("foo"),);
+ let _s = tup_ref.0.clone(); // this `.clone()` cannot be removed
+
+ {
+ let x = String::new();
+ let y = &x;
+
+ let _x = x.clone(); // ok; `x` is borrowed by `y`
+
+ let _ = y.len();
+ }
+
+ let x = (String::new(),);
+ let _ = Some(String::new()).unwrap_or_else(|| x.0.clone()); // ok; closure borrows `x`
+
+ with_branch(Alpha, true);
+ cannot_double_move(Alpha);
+ cannot_move_from_type_with_drop();
+ borrower_propagation();
+ not_consumed();
+ issue_5405();
+ manually_drop();
+ clone_then_move_cloned();
+ hashmap_neg();
+ false_negative_5707();
+}
+
+#[derive(Clone)]
+struct Alpha;
+fn with_branch(a: Alpha, b: bool) -> (Alpha, Alpha) {
+ if b { (a.clone(), a.clone()) } else { (Alpha, a) }
+}
+
+fn cannot_double_move(a: Alpha) -> (Alpha, Alpha) {
+ (a.clone(), a)
+}
+
+struct TypeWithDrop {
+ x: String,
+}
+
+impl Drop for TypeWithDrop {
+ fn drop(&mut self) {}
+}
+
+fn cannot_move_from_type_with_drop() -> String {
+ let s = TypeWithDrop { x: String::new() };
+ s.x.clone() // removing this `clone()` summons E0509
+}
+
+fn borrower_propagation() {
+ let s = String::new();
+ let t = String::new();
+
+ {
+ fn b() -> bool {
+ unimplemented!()
+ }
+ let _u = if b() { &s } else { &t };
+
+ // ok; `s` and `t` are possibly borrowed
+ let _s = s.clone();
+ let _t = t.clone();
+ }
+
+ {
+ let _u = || s.len();
+ let _v = [&t; 32];
+ let _s = s.clone(); // ok
+ let _t = t.clone(); // ok
+ }
+
+ {
+ let _u = {
+ let u = Some(&s);
+ let _ = s.clone(); // ok
+ u
+ };
+ let _s = s.clone(); // ok
+ }
+
+ {
+ use std::convert::identity as id;
+ let _u = id(id(&s));
+ let _s = s.clone(); // ok, `u` borrows `s`
+ }
+
+ let _s = s.clone();
+ let _t = t.clone();
+
+ #[derive(Clone)]
+ struct Foo {
+ x: usize,
+ }
+
+ {
+ let f = Foo { x: 123 };
+ let _x = Some(f.x);
+ let _f = f.clone();
+ }
+
+ {
+ let f = Foo { x: 123 };
+ let _x = &f.x;
+ let _f = f.clone(); // ok
+ }
+}
+
+fn not_consumed() {
+ let x = std::path::PathBuf::from("home");
+ let y = x.clone().join("matthias");
+ // join() creates a new owned PathBuf, does not take a &mut to x variable, thus the .clone() is
+ // redundant. (It also does not consume the PathBuf)
+
+ println!("x: {:?}, y: {:?}", x, y);
+
+ let mut s = String::new();
+ s.clone().push_str("foo"); // OK, removing this `clone()` will change the behavior.
+ s.push_str("bar");
+ assert_eq!(s, "bar");
+
+ let t = Some(s);
+ // OK
+ if let Some(x) = t.clone() {
+ println!("{}", x);
+ }
+ if let Some(x) = t {
+ println!("{}", x);
+ }
+}
+
+#[allow(clippy::clone_on_copy)]
+fn issue_5405() {
+ let a: [String; 1] = [String::from("foo")];
+ let _b: String = a[0].clone();
+
+ let c: [usize; 2] = [2, 3];
+ let _d: usize = c[1].clone();
+}
+
+fn manually_drop() {
+ use std::mem::ManuallyDrop;
+ use std::sync::Arc;
+
+ let a = ManuallyDrop::new(Arc::new("Hello!".to_owned()));
+ let _ = a.clone(); // OK
+
+ let p: *const String = Arc::into_raw(ManuallyDrop::into_inner(a));
+ unsafe {
+ Arc::from_raw(p);
+ Arc::from_raw(p);
+ }
+}
+
+fn clone_then_move_cloned() {
+ // issue #5973
+ let x = Some(String::new());
+ // ok, x is moved while the clone is in use.
+ assert_eq!(x.clone(), None, "not equal {}", x.unwrap());
+
+ // issue #5595
+ fn foo<F: Fn()>(_: &Alpha, _: F) {}
+ let x = Alpha;
+ // ok, data is moved while the clone is in use.
+ foo(&x.clone(), move || {
+ let _ = x;
+ });
+
+ // issue #6998
+ struct S(String);
+ impl S {
+ fn m(&mut self) {}
+ }
+ let mut x = S(String::new());
+ x.0.clone().chars().for_each(|_| x.m());
+}
+
+fn hashmap_neg() {
+ // issue 5707
+ use std::collections::HashMap;
+ use std::path::PathBuf;
+
+ let p = PathBuf::from("/");
+
+ let mut h: HashMap<&str, &str> = HashMap::new();
+ h.insert("orig-p", p.to_str().unwrap());
+
+ let mut q = p.clone();
+ q.push("foo");
+
+ println!("{:?} {}", h, q.display());
+}
+
+fn false_negative_5707() {
+ fn foo(_x: &Alpha, _y: &mut Alpha) {}
+
+ let x = Alpha;
+ let mut y = Alpha;
+ foo(&x, &mut y);
+ let _z = x.clone(); // pr 7346 can't lint on `x`
+ drop(y);
+}
diff --git a/src/tools/clippy/tests/ui/redundant_clone.stderr b/src/tools/clippy/tests/ui/redundant_clone.stderr
new file mode 100644
index 000000000..aa1dd7cbb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_clone.stderr
@@ -0,0 +1,183 @@
+error: redundant clone
+ --> $DIR/redundant_clone.rs:10:42
+ |
+LL | let _s = ["lorem", "ipsum"].join(" ").to_string();
+ | ^^^^^^^^^^^^ help: remove this
+ |
+ = note: `-D clippy::redundant-clone` implied by `-D warnings`
+note: this value is dropped without further use
+ --> $DIR/redundant_clone.rs:10:14
+ |
+LL | let _s = ["lorem", "ipsum"].join(" ").to_string();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: redundant clone
+ --> $DIR/redundant_clone.rs:13:15
+ |
+LL | let _s = s.clone();
+ | ^^^^^^^^ help: remove this
+ |
+note: this value is dropped without further use
+ --> $DIR/redundant_clone.rs:13:14
+ |
+LL | let _s = s.clone();
+ | ^
+
+error: redundant clone
+ --> $DIR/redundant_clone.rs:16:15
+ |
+LL | let _s = s.to_string();
+ | ^^^^^^^^^^^^ help: remove this
+ |
+note: this value is dropped without further use
+ --> $DIR/redundant_clone.rs:16:14
+ |
+LL | let _s = s.to_string();
+ | ^
+
+error: redundant clone
+ --> $DIR/redundant_clone.rs:19:15
+ |
+LL | let _s = s.to_owned();
+ | ^^^^^^^^^^^ help: remove this
+ |
+note: this value is dropped without further use
+ --> $DIR/redundant_clone.rs:19:14
+ |
+LL | let _s = s.to_owned();
+ | ^
+
+error: redundant clone
+ --> $DIR/redundant_clone.rs:21:42
+ |
+LL | let _s = Path::new("/a/b/").join("c").to_owned();
+ | ^^^^^^^^^^^ help: remove this
+ |
+note: this value is dropped without further use
+ --> $DIR/redundant_clone.rs:21:14
+ |
+LL | let _s = Path::new("/a/b/").join("c").to_owned();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: redundant clone
+ --> $DIR/redundant_clone.rs:23:42
+ |
+LL | let _s = Path::new("/a/b/").join("c").to_path_buf();
+ | ^^^^^^^^^^^^^^ help: remove this
+ |
+note: this value is dropped without further use
+ --> $DIR/redundant_clone.rs:23:14
+ |
+LL | let _s = Path::new("/a/b/").join("c").to_path_buf();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: redundant clone
+ --> $DIR/redundant_clone.rs:25:29
+ |
+LL | let _s = OsString::new().to_owned();
+ | ^^^^^^^^^^^ help: remove this
+ |
+note: this value is dropped without further use
+ --> $DIR/redundant_clone.rs:25:14
+ |
+LL | let _s = OsString::new().to_owned();
+ | ^^^^^^^^^^^^^^^
+
+error: redundant clone
+ --> $DIR/redundant_clone.rs:27:29
+ |
+LL | let _s = OsString::new().to_os_string();
+ | ^^^^^^^^^^^^^^^ help: remove this
+ |
+note: this value is dropped without further use
+ --> $DIR/redundant_clone.rs:27:14
+ |
+LL | let _s = OsString::new().to_os_string();
+ | ^^^^^^^^^^^^^^^
+
+error: redundant clone
+ --> $DIR/redundant_clone.rs:38:19
+ |
+LL | let _t = tup.0.clone();
+ | ^^^^^^^^ help: remove this
+ |
+note: this value is dropped without further use
+ --> $DIR/redundant_clone.rs:38:14
+ |
+LL | let _t = tup.0.clone();
+ | ^^^^^
+
+error: redundant clone
+ --> $DIR/redundant_clone.rs:70:25
+ |
+LL | if b { (a.clone(), a.clone()) } else { (Alpha, a) }
+ | ^^^^^^^^ help: remove this
+ |
+note: this value is dropped without further use
+ --> $DIR/redundant_clone.rs:70:24
+ |
+LL | if b { (a.clone(), a.clone()) } else { (Alpha, a) }
+ | ^
+
+error: redundant clone
+ --> $DIR/redundant_clone.rs:127:15
+ |
+LL | let _s = s.clone();
+ | ^^^^^^^^ help: remove this
+ |
+note: this value is dropped without further use
+ --> $DIR/redundant_clone.rs:127:14
+ |
+LL | let _s = s.clone();
+ | ^
+
+error: redundant clone
+ --> $DIR/redundant_clone.rs:128:15
+ |
+LL | let _t = t.clone();
+ | ^^^^^^^^ help: remove this
+ |
+note: this value is dropped without further use
+ --> $DIR/redundant_clone.rs:128:14
+ |
+LL | let _t = t.clone();
+ | ^
+
+error: redundant clone
+ --> $DIR/redundant_clone.rs:138:19
+ |
+LL | let _f = f.clone();
+ | ^^^^^^^^ help: remove this
+ |
+note: this value is dropped without further use
+ --> $DIR/redundant_clone.rs:138:18
+ |
+LL | let _f = f.clone();
+ | ^
+
+error: redundant clone
+ --> $DIR/redundant_clone.rs:150:14
+ |
+LL | let y = x.clone().join("matthias");
+ | ^^^^^^^^ help: remove this
+ |
+note: cloned value is neither consumed nor mutated
+ --> $DIR/redundant_clone.rs:150:13
+ |
+LL | let y = x.clone().join("matthias");
+ | ^^^^^^^^^
+
+error: redundant clone
+ --> $DIR/redundant_clone.rs:204:11
+ |
+LL | foo(&x.clone(), move || {
+ | ^^^^^^^^ help: remove this
+ |
+note: this value is dropped without further use
+ --> $DIR/redundant_clone.rs:204:10
+ |
+LL | foo(&x.clone(), move || {
+ | ^
+
+error: aborting due to 15 previous errors
+
diff --git a/src/tools/clippy/tests/ui/redundant_closure_call_early.rs b/src/tools/clippy/tests/ui/redundant_closure_call_early.rs
new file mode 100644
index 000000000..5649d8dd1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_closure_call_early.rs
@@ -0,0 +1,20 @@
+// non rustfixable, see redundant_closure_call_fixable.rs
+
+#![warn(clippy::redundant_closure_call)]
+
+fn main() {
+ let mut i = 1;
+
+ // lint here
+ let mut k = (|m| m + 1)(i);
+
+ // lint here
+ k = (|a, b| a * b)(1, 5);
+
+ // don't lint these
+ #[allow(clippy::needless_return)]
+ (|| return 2)();
+ (|| -> Option<i32> { None? })();
+ #[allow(clippy::try_err)]
+ (|| -> Result<i32, i32> { Err(2)? })();
+}
diff --git a/src/tools/clippy/tests/ui/redundant_closure_call_early.stderr b/src/tools/clippy/tests/ui/redundant_closure_call_early.stderr
new file mode 100644
index 000000000..2735e4173
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_closure_call_early.stderr
@@ -0,0 +1,16 @@
+error: try not to call a closure in the expression where it is declared
+ --> $DIR/redundant_closure_call_early.rs:9:17
+ |
+LL | let mut k = (|m| m + 1)(i);
+ | ^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::redundant-closure-call` implied by `-D warnings`
+
+error: try not to call a closure in the expression where it is declared
+ --> $DIR/redundant_closure_call_early.rs:12:9
+ |
+LL | k = (|a, b| a * b)(1, 5);
+ | ^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/redundant_closure_call_fixable.fixed b/src/tools/clippy/tests/ui/redundant_closure_call_fixable.fixed
new file mode 100644
index 000000000..0abca6fca
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_closure_call_fixable.fixed
@@ -0,0 +1,8 @@
+// run-rustfix
+
+#![warn(clippy::redundant_closure_call)]
+#![allow(unused)]
+
+fn main() {
+ let a = 42;
+}
diff --git a/src/tools/clippy/tests/ui/redundant_closure_call_fixable.rs b/src/tools/clippy/tests/ui/redundant_closure_call_fixable.rs
new file mode 100644
index 000000000..f8b9d37a5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_closure_call_fixable.rs
@@ -0,0 +1,8 @@
+// run-rustfix
+
+#![warn(clippy::redundant_closure_call)]
+#![allow(unused)]
+
+fn main() {
+ let a = (|| 42)();
+}
diff --git a/src/tools/clippy/tests/ui/redundant_closure_call_fixable.stderr b/src/tools/clippy/tests/ui/redundant_closure_call_fixable.stderr
new file mode 100644
index 000000000..afd704ef1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_closure_call_fixable.stderr
@@ -0,0 +1,10 @@
+error: try not to call a closure in the expression where it is declared
+ --> $DIR/redundant_closure_call_fixable.rs:7:13
+ |
+LL | let a = (|| 42)();
+ | ^^^^^^^^^ help: try doing something like: `42`
+ |
+ = note: `-D clippy::redundant-closure-call` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/redundant_closure_call_late.rs b/src/tools/clippy/tests/ui/redundant_closure_call_late.rs
new file mode 100644
index 000000000..5612827bd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_closure_call_late.rs
@@ -0,0 +1,40 @@
+// non rustfixable, see redundant_closure_call_fixable.rs
+
+#![warn(clippy::redundant_closure_call)]
+#![allow(clippy::needless_late_init)]
+
+fn main() {
+ let mut i = 1;
+
+ // don't lint here, the closure is used more than once
+ let closure = |i| i + 1;
+ i = closure(3);
+ i = closure(4);
+
+ // lint here
+ let redun_closure = || 1;
+ i = redun_closure();
+
+ // shadowed closures are supported, lint here
+ let shadowed_closure = || 1;
+ i = shadowed_closure();
+ let shadowed_closure = || 2;
+ i = shadowed_closure();
+
+ // don't lint here
+ let shadowed_closure = || 2;
+ i = shadowed_closure();
+ i = shadowed_closure();
+
+ // Fix FP in #5916
+ let mut x;
+ let create = || 2 * 2;
+ x = create();
+ fun(move || {
+ x = create();
+ })
+}
+
+fn fun<T: 'static + FnMut()>(mut f: T) {
+ f();
+}
diff --git a/src/tools/clippy/tests/ui/redundant_closure_call_late.stderr b/src/tools/clippy/tests/ui/redundant_closure_call_late.stderr
new file mode 100644
index 000000000..4eca43a2b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_closure_call_late.stderr
@@ -0,0 +1,22 @@
+error: closure called just once immediately after it was declared
+ --> $DIR/redundant_closure_call_late.rs:16:5
+ |
+LL | i = redun_closure();
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::redundant-closure-call` implied by `-D warnings`
+
+error: closure called just once immediately after it was declared
+ --> $DIR/redundant_closure_call_late.rs:20:5
+ |
+LL | i = shadowed_closure();
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: closure called just once immediately after it was declared
+ --> $DIR/redundant_closure_call_late.rs:22:5
+ |
+LL | i = shadowed_closure();
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/redundant_else.rs b/src/tools/clippy/tests/ui/redundant_else.rs
new file mode 100644
index 000000000..64f566735
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_else.rs
@@ -0,0 +1,154 @@
+#![warn(clippy::redundant_else)]
+#![allow(clippy::needless_return, clippy::if_same_then_else, clippy::needless_late_init)]
+
+fn main() {
+ loop {
+ // break
+ if foo() {
+ println!("Love your neighbor;");
+ break;
+ } else {
+ println!("yet don't pull down your hedge.");
+ }
+ // continue
+ if foo() {
+ println!("He that lies down with Dogs,");
+ continue;
+ } else {
+ println!("shall rise up with fleas.");
+ }
+ // match block
+ if foo() {
+ match foo() {
+ 1 => break,
+ _ => return,
+ }
+ } else {
+ println!("You may delay, but time will not.");
+ }
+ }
+ // else if
+ if foo() {
+ return;
+ } else if foo() {
+ return;
+ } else {
+ println!("A fat kitchen makes a lean will.");
+ }
+ // let binding outside of block
+ let _ = {
+ if foo() {
+ return;
+ } else {
+ 1
+ }
+ };
+ // else if with let binding outside of block
+ let _ = {
+ if foo() {
+ return;
+ } else if foo() {
+ return;
+ } else {
+ 2
+ }
+ };
+ // inside if let
+ let _ = if let Some(1) = foo() {
+ let _ = 1;
+ if foo() {
+ return;
+ } else {
+ 1
+ }
+ } else {
+ 1
+ };
+
+ //
+ // non-lint cases
+ //
+
+ // sanity check
+ if foo() {
+ let _ = 1;
+ } else {
+ println!("Who is wise? He that learns from every one.");
+ }
+ // else if without else
+ if foo() {
+ return;
+ } else if foo() {
+ foo()
+ };
+ // nested if return
+ if foo() {
+ if foo() {
+ return;
+ }
+ } else {
+ foo()
+ };
+ // match with non-breaking branch
+ if foo() {
+ match foo() {
+ 1 => foo(),
+ _ => return,
+ }
+ } else {
+ println!("Three may keep a secret, if two of them are dead.");
+ }
+ // let binding
+ let _ = if foo() {
+ return;
+ } else {
+ 1
+ };
+ // assign
+ let mut a;
+ a = if foo() {
+ return;
+ } else {
+ 1
+ };
+ // assign-op
+ a += if foo() {
+ return;
+ } else {
+ 1
+ };
+ // if return else if else
+ if foo() {
+ return;
+ } else if foo() {
+ 1
+ } else {
+ 2
+ };
+ // if else if return else
+ if foo() {
+ 1
+ } else if foo() {
+ return;
+ } else {
+ 2
+ };
+ // else if with let binding
+ let _ = if foo() {
+ return;
+ } else if foo() {
+ return;
+ } else {
+ 2
+ };
+ // inside function call
+ Box::new(if foo() {
+ return;
+ } else {
+ 1
+ });
+}
+
+fn foo<T>() -> T {
+ unimplemented!("I'm not Santa Claus")
+}
diff --git a/src/tools/clippy/tests/ui/redundant_else.stderr b/src/tools/clippy/tests/ui/redundant_else.stderr
new file mode 100644
index 000000000..9000cdc81
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_else.stderr
@@ -0,0 +1,80 @@
+error: redundant else block
+ --> $DIR/redundant_else.rs:10:16
+ |
+LL | } else {
+ | ________________^
+LL | | println!("yet don't pull down your hedge.");
+LL | | }
+ | |_________^
+ |
+ = note: `-D clippy::redundant-else` implied by `-D warnings`
+ = help: remove the `else` block and move the contents out
+
+error: redundant else block
+ --> $DIR/redundant_else.rs:17:16
+ |
+LL | } else {
+ | ________________^
+LL | | println!("shall rise up with fleas.");
+LL | | }
+ | |_________^
+ |
+ = help: remove the `else` block and move the contents out
+
+error: redundant else block
+ --> $DIR/redundant_else.rs:26:16
+ |
+LL | } else {
+ | ________________^
+LL | | println!("You may delay, but time will not.");
+LL | | }
+ | |_________^
+ |
+ = help: remove the `else` block and move the contents out
+
+error: redundant else block
+ --> $DIR/redundant_else.rs:35:12
+ |
+LL | } else {
+ | ____________^
+LL | | println!("A fat kitchen makes a lean will.");
+LL | | }
+ | |_____^
+ |
+ = help: remove the `else` block and move the contents out
+
+error: redundant else block
+ --> $DIR/redundant_else.rs:42:16
+ |
+LL | } else {
+ | ________________^
+LL | | 1
+LL | | }
+ | |_________^
+ |
+ = help: remove the `else` block and move the contents out
+
+error: redundant else block
+ --> $DIR/redundant_else.rs:52:16
+ |
+LL | } else {
+ | ________________^
+LL | | 2
+LL | | }
+ | |_________^
+ |
+ = help: remove the `else` block and move the contents out
+
+error: redundant else block
+ --> $DIR/redundant_else.rs:61:16
+ |
+LL | } else {
+ | ________________^
+LL | | 1
+LL | | }
+ | |_________^
+ |
+ = help: remove the `else` block and move the contents out
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/redundant_field_names.fixed b/src/tools/clippy/tests/ui/redundant_field_names.fixed
new file mode 100644
index 000000000..5b4b8eeed
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_field_names.fixed
@@ -0,0 +1,71 @@
+// run-rustfix
+#![warn(clippy::redundant_field_names)]
+#![allow(clippy::no_effect, dead_code, unused_variables)]
+
+#[macro_use]
+extern crate derive_new;
+
+use std::ops::{Range, RangeFrom, RangeInclusive, RangeTo, RangeToInclusive};
+
+mod foo {
+ pub const BAR: u8 = 0;
+}
+
+struct Person {
+ gender: u8,
+ age: u8,
+ name: u8,
+ buzz: u64,
+ foo: u8,
+}
+
+#[derive(new)]
+pub struct S {
+ v: String,
+}
+
+fn main() {
+ let gender: u8 = 42;
+ let age = 0;
+ let fizz: u64 = 0;
+ let name: u8 = 0;
+
+ let me = Person {
+ gender,
+ age,
+
+ name, //should be ok
+ buzz: fizz, //should be ok
+ foo: foo::BAR, //should be ok
+ };
+
+ // Range expressions
+ let (start, end) = (0, 0);
+
+ let _ = start..;
+ let _ = ..end;
+ let _ = start..end;
+
+ let _ = ..=end;
+ let _ = start..=end;
+
+ // Issue #2799
+ let _: Vec<_> = (start..end).collect();
+
+ // hand-written Range family structs are linted
+ let _ = RangeFrom { start };
+ let _ = RangeTo { end };
+ let _ = Range { start, end };
+ let _ = RangeInclusive::new(start, end);
+ let _ = RangeToInclusive { end };
+}
+
+fn issue_3476() {
+ fn foo<T>() {}
+
+ struct S {
+ foo: fn(),
+ }
+
+ S { foo: foo::<i32> };
+}
diff --git a/src/tools/clippy/tests/ui/redundant_field_names.rs b/src/tools/clippy/tests/ui/redundant_field_names.rs
new file mode 100644
index 000000000..3f97b80c5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_field_names.rs
@@ -0,0 +1,71 @@
+// run-rustfix
+#![warn(clippy::redundant_field_names)]
+#![allow(clippy::no_effect, dead_code, unused_variables)]
+
+#[macro_use]
+extern crate derive_new;
+
+use std::ops::{Range, RangeFrom, RangeInclusive, RangeTo, RangeToInclusive};
+
+mod foo {
+ pub const BAR: u8 = 0;
+}
+
+struct Person {
+ gender: u8,
+ age: u8,
+ name: u8,
+ buzz: u64,
+ foo: u8,
+}
+
+#[derive(new)]
+pub struct S {
+ v: String,
+}
+
+fn main() {
+ let gender: u8 = 42;
+ let age = 0;
+ let fizz: u64 = 0;
+ let name: u8 = 0;
+
+ let me = Person {
+ gender: gender,
+ age: age,
+
+ name, //should be ok
+ buzz: fizz, //should be ok
+ foo: foo::BAR, //should be ok
+ };
+
+ // Range expressions
+ let (start, end) = (0, 0);
+
+ let _ = start..;
+ let _ = ..end;
+ let _ = start..end;
+
+ let _ = ..=end;
+ let _ = start..=end;
+
+ // Issue #2799
+ let _: Vec<_> = (start..end).collect();
+
+ // hand-written Range family structs are linted
+ let _ = RangeFrom { start: start };
+ let _ = RangeTo { end: end };
+ let _ = Range { start: start, end: end };
+ let _ = RangeInclusive::new(start, end);
+ let _ = RangeToInclusive { end: end };
+}
+
+fn issue_3476() {
+ fn foo<T>() {}
+
+ struct S {
+ foo: fn(),
+ }
+
+ S { foo: foo::<i32> };
+}
diff --git a/src/tools/clippy/tests/ui/redundant_field_names.stderr b/src/tools/clippy/tests/ui/redundant_field_names.stderr
new file mode 100644
index 000000000..7976292df
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_field_names.stderr
@@ -0,0 +1,46 @@
+error: redundant field names in struct initialization
+ --> $DIR/redundant_field_names.rs:34:9
+ |
+LL | gender: gender,
+ | ^^^^^^^^^^^^^^ help: replace it with: `gender`
+ |
+ = note: `-D clippy::redundant-field-names` implied by `-D warnings`
+
+error: redundant field names in struct initialization
+ --> $DIR/redundant_field_names.rs:35:9
+ |
+LL | age: age,
+ | ^^^^^^^^ help: replace it with: `age`
+
+error: redundant field names in struct initialization
+ --> $DIR/redundant_field_names.rs:56:25
+ |
+LL | let _ = RangeFrom { start: start };
+ | ^^^^^^^^^^^^ help: replace it with: `start`
+
+error: redundant field names in struct initialization
+ --> $DIR/redundant_field_names.rs:57:23
+ |
+LL | let _ = RangeTo { end: end };
+ | ^^^^^^^^ help: replace it with: `end`
+
+error: redundant field names in struct initialization
+ --> $DIR/redundant_field_names.rs:58:21
+ |
+LL | let _ = Range { start: start, end: end };
+ | ^^^^^^^^^^^^ help: replace it with: `start`
+
+error: redundant field names in struct initialization
+ --> $DIR/redundant_field_names.rs:58:35
+ |
+LL | let _ = Range { start: start, end: end };
+ | ^^^^^^^^ help: replace it with: `end`
+
+error: redundant field names in struct initialization
+ --> $DIR/redundant_field_names.rs:60:32
+ |
+LL | let _ = RangeToInclusive { end: end };
+ | ^^^^^^^^ help: replace it with: `end`
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.fixed b/src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.fixed
new file mode 100644
index 000000000..ce3229f17
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.fixed
@@ -0,0 +1,58 @@
+// run-rustfix
+
+// Issue #5746
+#![warn(clippy::redundant_pattern_matching)]
+#![allow(clippy::if_same_then_else, clippy::equatable_if_let)]
+use std::task::Poll::{Pending, Ready};
+
+fn main() {
+ let m = std::sync::Mutex::new((0, 0));
+
+ // Result
+ if m.lock().is_ok() {}
+ if Err::<(), _>(m.lock().unwrap().0).is_err() {}
+
+ {
+ if Ok::<_, std::sync::MutexGuard<()>>(()).is_ok() {}
+ }
+ if Ok::<_, std::sync::MutexGuard<()>>(()).is_ok() {
+ } else {
+ }
+ if Ok::<_, std::sync::MutexGuard<()>>(()).is_ok() {}
+ if Err::<std::sync::MutexGuard<()>, _>(()).is_err() {}
+
+ if Ok::<_, ()>(String::new()).is_ok() {}
+ if Err::<(), _>((String::new(), ())).is_err() {}
+
+ // Option
+ if Some(m.lock()).is_some() {}
+ if Some(m.lock().unwrap().0).is_some() {}
+
+ {
+ if None::<std::sync::MutexGuard<()>>.is_none() {}
+ }
+ if None::<std::sync::MutexGuard<()>>.is_none() {
+ } else {
+ }
+
+ if None::<std::sync::MutexGuard<()>>.is_none() {}
+
+ if Some(String::new()).is_some() {}
+ if Some((String::new(), ())).is_some() {}
+
+ // Poll
+ if Ready(m.lock()).is_ready() {}
+ if Ready(m.lock().unwrap().0).is_ready() {}
+
+ {
+ if Pending::<std::sync::MutexGuard<()>>.is_pending() {}
+ }
+ if Pending::<std::sync::MutexGuard<()>>.is_pending() {
+ } else {
+ }
+
+ if Pending::<std::sync::MutexGuard<()>>.is_pending() {}
+
+ if Ready(String::new()).is_ready() {}
+ if Ready((String::new(), ())).is_ready() {}
+}
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.rs b/src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.rs
new file mode 100644
index 000000000..29b8543cf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.rs
@@ -0,0 +1,58 @@
+// run-rustfix
+
+// Issue #5746
+#![warn(clippy::redundant_pattern_matching)]
+#![allow(clippy::if_same_then_else, clippy::equatable_if_let)]
+use std::task::Poll::{Pending, Ready};
+
+fn main() {
+ let m = std::sync::Mutex::new((0, 0));
+
+ // Result
+ if let Ok(_) = m.lock() {}
+ if let Err(_) = Err::<(), _>(m.lock().unwrap().0) {}
+
+ {
+ if let Ok(_) = Ok::<_, std::sync::MutexGuard<()>>(()) {}
+ }
+ if let Ok(_) = Ok::<_, std::sync::MutexGuard<()>>(()) {
+ } else {
+ }
+ if let Ok(_) = Ok::<_, std::sync::MutexGuard<()>>(()) {}
+ if let Err(_) = Err::<std::sync::MutexGuard<()>, _>(()) {}
+
+ if let Ok(_) = Ok::<_, ()>(String::new()) {}
+ if let Err(_) = Err::<(), _>((String::new(), ())) {}
+
+ // Option
+ if let Some(_) = Some(m.lock()) {}
+ if let Some(_) = Some(m.lock().unwrap().0) {}
+
+ {
+ if let None = None::<std::sync::MutexGuard<()>> {}
+ }
+ if let None = None::<std::sync::MutexGuard<()>> {
+ } else {
+ }
+
+ if let None = None::<std::sync::MutexGuard<()>> {}
+
+ if let Some(_) = Some(String::new()) {}
+ if let Some(_) = Some((String::new(), ())) {}
+
+ // Poll
+ if let Ready(_) = Ready(m.lock()) {}
+ if let Ready(_) = Ready(m.lock().unwrap().0) {}
+
+ {
+ if let Pending = Pending::<std::sync::MutexGuard<()>> {}
+ }
+ if let Pending = Pending::<std::sync::MutexGuard<()>> {
+ } else {
+ }
+
+ if let Pending = Pending::<std::sync::MutexGuard<()>> {}
+
+ if let Ready(_) = Ready(String::new()) {}
+ if let Ready(_) = Ready((String::new(), ())) {}
+}
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.stderr b/src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.stderr
new file mode 100644
index 000000000..eb7aa70ee
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.stderr
@@ -0,0 +1,171 @@
+error: redundant pattern matching, consider using `is_ok()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:12:12
+ |
+LL | if let Ok(_) = m.lock() {}
+ | -------^^^^^----------- help: try this: `if m.lock().is_ok()`
+ |
+ = note: `-D clippy::redundant-pattern-matching` implied by `-D warnings`
+ = note: this will change drop order of the result, as well as all temporaries
+ = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
+
+error: redundant pattern matching, consider using `is_err()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:13:12
+ |
+LL | if let Err(_) = Err::<(), _>(m.lock().unwrap().0) {}
+ | -------^^^^^^------------------------------------ help: try this: `if Err::<(), _>(m.lock().unwrap().0).is_err()`
+ |
+ = note: this will change drop order of the result, as well as all temporaries
+ = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
+
+error: redundant pattern matching, consider using `is_ok()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:16:16
+ |
+LL | if let Ok(_) = Ok::<_, std::sync::MutexGuard<()>>(()) {}
+ | -------^^^^^----------------------------------------- help: try this: `if Ok::<_, std::sync::MutexGuard<()>>(()).is_ok()`
+ |
+ = note: this will change drop order of the result, as well as all temporaries
+ = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
+
+error: redundant pattern matching, consider using `is_ok()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:18:12
+ |
+LL | if let Ok(_) = Ok::<_, std::sync::MutexGuard<()>>(()) {
+ | -------^^^^^----------------------------------------- help: try this: `if Ok::<_, std::sync::MutexGuard<()>>(()).is_ok()`
+ |
+ = note: this will change drop order of the result, as well as all temporaries
+ = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
+
+error: redundant pattern matching, consider using `is_ok()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:21:12
+ |
+LL | if let Ok(_) = Ok::<_, std::sync::MutexGuard<()>>(()) {}
+ | -------^^^^^----------------------------------------- help: try this: `if Ok::<_, std::sync::MutexGuard<()>>(()).is_ok()`
+
+error: redundant pattern matching, consider using `is_err()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:22:12
+ |
+LL | if let Err(_) = Err::<std::sync::MutexGuard<()>, _>(()) {}
+ | -------^^^^^^------------------------------------------ help: try this: `if Err::<std::sync::MutexGuard<()>, _>(()).is_err()`
+
+error: redundant pattern matching, consider using `is_ok()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:24:12
+ |
+LL | if let Ok(_) = Ok::<_, ()>(String::new()) {}
+ | -------^^^^^----------------------------- help: try this: `if Ok::<_, ()>(String::new()).is_ok()`
+
+error: redundant pattern matching, consider using `is_err()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:25:12
+ |
+LL | if let Err(_) = Err::<(), _>((String::new(), ())) {}
+ | -------^^^^^^------------------------------------ help: try this: `if Err::<(), _>((String::new(), ())).is_err()`
+
+error: redundant pattern matching, consider using `is_some()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:28:12
+ |
+LL | if let Some(_) = Some(m.lock()) {}
+ | -------^^^^^^^----------------- help: try this: `if Some(m.lock()).is_some()`
+ |
+ = note: this will change drop order of the result, as well as all temporaries
+ = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
+
+error: redundant pattern matching, consider using `is_some()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:29:12
+ |
+LL | if let Some(_) = Some(m.lock().unwrap().0) {}
+ | -------^^^^^^^---------------------------- help: try this: `if Some(m.lock().unwrap().0).is_some()`
+ |
+ = note: this will change drop order of the result, as well as all temporaries
+ = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
+
+error: redundant pattern matching, consider using `is_none()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:32:16
+ |
+LL | if let None = None::<std::sync::MutexGuard<()>> {}
+ | -------^^^^------------------------------------ help: try this: `if None::<std::sync::MutexGuard<()>>.is_none()`
+ |
+ = note: this will change drop order of the result, as well as all temporaries
+ = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
+
+error: redundant pattern matching, consider using `is_none()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:34:12
+ |
+LL | if let None = None::<std::sync::MutexGuard<()>> {
+ | -------^^^^------------------------------------ help: try this: `if None::<std::sync::MutexGuard<()>>.is_none()`
+ |
+ = note: this will change drop order of the result, as well as all temporaries
+ = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
+
+error: redundant pattern matching, consider using `is_none()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:38:12
+ |
+LL | if let None = None::<std::sync::MutexGuard<()>> {}
+ | -------^^^^------------------------------------ help: try this: `if None::<std::sync::MutexGuard<()>>.is_none()`
+
+error: redundant pattern matching, consider using `is_some()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:40:12
+ |
+LL | if let Some(_) = Some(String::new()) {}
+ | -------^^^^^^^---------------------- help: try this: `if Some(String::new()).is_some()`
+
+error: redundant pattern matching, consider using `is_some()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:41:12
+ |
+LL | if let Some(_) = Some((String::new(), ())) {}
+ | -------^^^^^^^---------------------------- help: try this: `if Some((String::new(), ())).is_some()`
+
+error: redundant pattern matching, consider using `is_ready()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:44:12
+ |
+LL | if let Ready(_) = Ready(m.lock()) {}
+ | -------^^^^^^^^------------------ help: try this: `if Ready(m.lock()).is_ready()`
+ |
+ = note: this will change drop order of the result, as well as all temporaries
+ = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
+
+error: redundant pattern matching, consider using `is_ready()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:45:12
+ |
+LL | if let Ready(_) = Ready(m.lock().unwrap().0) {}
+ | -------^^^^^^^^----------------------------- help: try this: `if Ready(m.lock().unwrap().0).is_ready()`
+ |
+ = note: this will change drop order of the result, as well as all temporaries
+ = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
+
+error: redundant pattern matching, consider using `is_pending()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:48:16
+ |
+LL | if let Pending = Pending::<std::sync::MutexGuard<()>> {}
+ | -------^^^^^^^--------------------------------------- help: try this: `if Pending::<std::sync::MutexGuard<()>>.is_pending()`
+ |
+ = note: this will change drop order of the result, as well as all temporaries
+ = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
+
+error: redundant pattern matching, consider using `is_pending()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:50:12
+ |
+LL | if let Pending = Pending::<std::sync::MutexGuard<()>> {
+ | -------^^^^^^^--------------------------------------- help: try this: `if Pending::<std::sync::MutexGuard<()>>.is_pending()`
+ |
+ = note: this will change drop order of the result, as well as all temporaries
+ = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
+
+error: redundant pattern matching, consider using `is_pending()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:54:12
+ |
+LL | if let Pending = Pending::<std::sync::MutexGuard<()>> {}
+ | -------^^^^^^^--------------------------------------- help: try this: `if Pending::<std::sync::MutexGuard<()>>.is_pending()`
+
+error: redundant pattern matching, consider using `is_ready()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:56:12
+ |
+LL | if let Ready(_) = Ready(String::new()) {}
+ | -------^^^^^^^^----------------------- help: try this: `if Ready(String::new()).is_ready()`
+
+error: redundant pattern matching, consider using `is_ready()`
+ --> $DIR/redundant_pattern_matching_drop_order.rs:57:12
+ |
+LL | if let Ready(_) = Ready((String::new(), ())) {}
+ | -------^^^^^^^^----------------------------- help: try this: `if Ready((String::new(), ())).is_ready()`
+
+error: aborting due to 22 previous errors
+
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.fixed b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.fixed
new file mode 100644
index 000000000..acc8de5f4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.fixed
@@ -0,0 +1,73 @@
+// run-rustfix
+
+#![warn(clippy::all)]
+#![warn(clippy::redundant_pattern_matching)]
+#![allow(unused_must_use, clippy::needless_bool, clippy::match_like_matches_macro)]
+
+use std::net::{
+ IpAddr::{self, V4, V6},
+ Ipv4Addr, Ipv6Addr,
+};
+
+fn main() {
+ let ipaddr: IpAddr = V4(Ipv4Addr::LOCALHOST);
+ if ipaddr.is_ipv4() {}
+
+ if V4(Ipv4Addr::LOCALHOST).is_ipv4() {}
+
+ if V6(Ipv6Addr::LOCALHOST).is_ipv6() {}
+
+ while V4(Ipv4Addr::LOCALHOST).is_ipv4() {}
+
+ while V6(Ipv6Addr::LOCALHOST).is_ipv6() {}
+
+ if V4(Ipv4Addr::LOCALHOST).is_ipv4() {}
+
+ if V6(Ipv6Addr::LOCALHOST).is_ipv6() {}
+
+ if let V4(ipaddr) = V4(Ipv4Addr::LOCALHOST) {
+ println!("{}", ipaddr);
+ }
+
+ V4(Ipv4Addr::LOCALHOST).is_ipv4();
+
+ V4(Ipv4Addr::LOCALHOST).is_ipv6();
+
+ V6(Ipv6Addr::LOCALHOST).is_ipv6();
+
+ V6(Ipv6Addr::LOCALHOST).is_ipv4();
+
+ let _ = if V4(Ipv4Addr::LOCALHOST).is_ipv4() {
+ true
+ } else {
+ false
+ };
+
+ ipaddr_const();
+
+ let _ = if gen_ipaddr().is_ipv4() {
+ 1
+ } else if gen_ipaddr().is_ipv6() {
+ 2
+ } else {
+ 3
+ };
+}
+
+fn gen_ipaddr() -> IpAddr {
+ V4(Ipv4Addr::LOCALHOST)
+}
+
+const fn ipaddr_const() {
+ if V4(Ipv4Addr::LOCALHOST).is_ipv4() {}
+
+ if V6(Ipv6Addr::LOCALHOST).is_ipv6() {}
+
+ while V4(Ipv4Addr::LOCALHOST).is_ipv4() {}
+
+ while V6(Ipv6Addr::LOCALHOST).is_ipv6() {}
+
+ V4(Ipv4Addr::LOCALHOST).is_ipv4();
+
+ V6(Ipv6Addr::LOCALHOST).is_ipv6();
+}
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.rs b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.rs
new file mode 100644
index 000000000..678d91ce9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.rs
@@ -0,0 +1,91 @@
+// run-rustfix
+
+#![warn(clippy::all)]
+#![warn(clippy::redundant_pattern_matching)]
+#![allow(unused_must_use, clippy::needless_bool, clippy::match_like_matches_macro)]
+
+use std::net::{
+ IpAddr::{self, V4, V6},
+ Ipv4Addr, Ipv6Addr,
+};
+
+fn main() {
+ let ipaddr: IpAddr = V4(Ipv4Addr::LOCALHOST);
+ if let V4(_) = &ipaddr {}
+
+ if let V4(_) = V4(Ipv4Addr::LOCALHOST) {}
+
+ if let V6(_) = V6(Ipv6Addr::LOCALHOST) {}
+
+ while let V4(_) = V4(Ipv4Addr::LOCALHOST) {}
+
+ while let V6(_) = V6(Ipv6Addr::LOCALHOST) {}
+
+ if V4(Ipv4Addr::LOCALHOST).is_ipv4() {}
+
+ if V6(Ipv6Addr::LOCALHOST).is_ipv6() {}
+
+ if let V4(ipaddr) = V4(Ipv4Addr::LOCALHOST) {
+ println!("{}", ipaddr);
+ }
+
+ match V4(Ipv4Addr::LOCALHOST) {
+ V4(_) => true,
+ V6(_) => false,
+ };
+
+ match V4(Ipv4Addr::LOCALHOST) {
+ V4(_) => false,
+ V6(_) => true,
+ };
+
+ match V6(Ipv6Addr::LOCALHOST) {
+ V4(_) => false,
+ V6(_) => true,
+ };
+
+ match V6(Ipv6Addr::LOCALHOST) {
+ V4(_) => true,
+ V6(_) => false,
+ };
+
+ let _ = if let V4(_) = V4(Ipv4Addr::LOCALHOST) {
+ true
+ } else {
+ false
+ };
+
+ ipaddr_const();
+
+ let _ = if let V4(_) = gen_ipaddr() {
+ 1
+ } else if let V6(_) = gen_ipaddr() {
+ 2
+ } else {
+ 3
+ };
+}
+
+fn gen_ipaddr() -> IpAddr {
+ V4(Ipv4Addr::LOCALHOST)
+}
+
+const fn ipaddr_const() {
+ if let V4(_) = V4(Ipv4Addr::LOCALHOST) {}
+
+ if let V6(_) = V6(Ipv6Addr::LOCALHOST) {}
+
+ while let V4(_) = V4(Ipv4Addr::LOCALHOST) {}
+
+ while let V6(_) = V6(Ipv6Addr::LOCALHOST) {}
+
+ match V4(Ipv4Addr::LOCALHOST) {
+ V4(_) => true,
+ V6(_) => false,
+ };
+
+ match V6(Ipv6Addr::LOCALHOST) {
+ V4(_) => false,
+ V6(_) => true,
+ };
+}
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.stderr b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.stderr
new file mode 100644
index 000000000..caf458cd8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.stderr
@@ -0,0 +1,130 @@
+error: redundant pattern matching, consider using `is_ipv4()`
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:14:12
+ |
+LL | if let V4(_) = &ipaddr {}
+ | -------^^^^^---------- help: try this: `if ipaddr.is_ipv4()`
+ |
+ = note: `-D clippy::redundant-pattern-matching` implied by `-D warnings`
+
+error: redundant pattern matching, consider using `is_ipv4()`
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:16:12
+ |
+LL | if let V4(_) = V4(Ipv4Addr::LOCALHOST) {}
+ | -------^^^^^-------------------------- help: try this: `if V4(Ipv4Addr::LOCALHOST).is_ipv4()`
+
+error: redundant pattern matching, consider using `is_ipv6()`
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:18:12
+ |
+LL | if let V6(_) = V6(Ipv6Addr::LOCALHOST) {}
+ | -------^^^^^-------------------------- help: try this: `if V6(Ipv6Addr::LOCALHOST).is_ipv6()`
+
+error: redundant pattern matching, consider using `is_ipv4()`
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:20:15
+ |
+LL | while let V4(_) = V4(Ipv4Addr::LOCALHOST) {}
+ | ----------^^^^^-------------------------- help: try this: `while V4(Ipv4Addr::LOCALHOST).is_ipv4()`
+
+error: redundant pattern matching, consider using `is_ipv6()`
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:22:15
+ |
+LL | while let V6(_) = V6(Ipv6Addr::LOCALHOST) {}
+ | ----------^^^^^-------------------------- help: try this: `while V6(Ipv6Addr::LOCALHOST).is_ipv6()`
+
+error: redundant pattern matching, consider using `is_ipv4()`
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:32:5
+ |
+LL | / match V4(Ipv4Addr::LOCALHOST) {
+LL | | V4(_) => true,
+LL | | V6(_) => false,
+LL | | };
+ | |_____^ help: try this: `V4(Ipv4Addr::LOCALHOST).is_ipv4()`
+
+error: redundant pattern matching, consider using `is_ipv6()`
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:37:5
+ |
+LL | / match V4(Ipv4Addr::LOCALHOST) {
+LL | | V4(_) => false,
+LL | | V6(_) => true,
+LL | | };
+ | |_____^ help: try this: `V4(Ipv4Addr::LOCALHOST).is_ipv6()`
+
+error: redundant pattern matching, consider using `is_ipv6()`
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:42:5
+ |
+LL | / match V6(Ipv6Addr::LOCALHOST) {
+LL | | V4(_) => false,
+LL | | V6(_) => true,
+LL | | };
+ | |_____^ help: try this: `V6(Ipv6Addr::LOCALHOST).is_ipv6()`
+
+error: redundant pattern matching, consider using `is_ipv4()`
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:47:5
+ |
+LL | / match V6(Ipv6Addr::LOCALHOST) {
+LL | | V4(_) => true,
+LL | | V6(_) => false,
+LL | | };
+ | |_____^ help: try this: `V6(Ipv6Addr::LOCALHOST).is_ipv4()`
+
+error: redundant pattern matching, consider using `is_ipv4()`
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:52:20
+ |
+LL | let _ = if let V4(_) = V4(Ipv4Addr::LOCALHOST) {
+ | -------^^^^^-------------------------- help: try this: `if V4(Ipv4Addr::LOCALHOST).is_ipv4()`
+
+error: redundant pattern matching, consider using `is_ipv4()`
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:60:20
+ |
+LL | let _ = if let V4(_) = gen_ipaddr() {
+ | -------^^^^^--------------- help: try this: `if gen_ipaddr().is_ipv4()`
+
+error: redundant pattern matching, consider using `is_ipv6()`
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:62:19
+ |
+LL | } else if let V6(_) = gen_ipaddr() {
+ | -------^^^^^--------------- help: try this: `if gen_ipaddr().is_ipv6()`
+
+error: redundant pattern matching, consider using `is_ipv4()`
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:74:12
+ |
+LL | if let V4(_) = V4(Ipv4Addr::LOCALHOST) {}
+ | -------^^^^^-------------------------- help: try this: `if V4(Ipv4Addr::LOCALHOST).is_ipv4()`
+
+error: redundant pattern matching, consider using `is_ipv6()`
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:76:12
+ |
+LL | if let V6(_) = V6(Ipv6Addr::LOCALHOST) {}
+ | -------^^^^^-------------------------- help: try this: `if V6(Ipv6Addr::LOCALHOST).is_ipv6()`
+
+error: redundant pattern matching, consider using `is_ipv4()`
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:78:15
+ |
+LL | while let V4(_) = V4(Ipv4Addr::LOCALHOST) {}
+ | ----------^^^^^-------------------------- help: try this: `while V4(Ipv4Addr::LOCALHOST).is_ipv4()`
+
+error: redundant pattern matching, consider using `is_ipv6()`
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:80:15
+ |
+LL | while let V6(_) = V6(Ipv6Addr::LOCALHOST) {}
+ | ----------^^^^^-------------------------- help: try this: `while V6(Ipv6Addr::LOCALHOST).is_ipv6()`
+
+error: redundant pattern matching, consider using `is_ipv4()`
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:82:5
+ |
+LL | / match V4(Ipv4Addr::LOCALHOST) {
+LL | | V4(_) => true,
+LL | | V6(_) => false,
+LL | | };
+ | |_____^ help: try this: `V4(Ipv4Addr::LOCALHOST).is_ipv4()`
+
+error: redundant pattern matching, consider using `is_ipv6()`
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:87:5
+ |
+LL | / match V6(Ipv6Addr::LOCALHOST) {
+LL | | V4(_) => false,
+LL | | V6(_) => true,
+LL | | };
+ | |_____^ help: try this: `V6(Ipv6Addr::LOCALHOST).is_ipv6()`
+
+error: aborting due to 18 previous errors
+
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_option.fixed b/src/tools/clippy/tests/ui/redundant_pattern_matching_option.fixed
new file mode 100644
index 000000000..a89845c1d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_option.fixed
@@ -0,0 +1,88 @@
+// run-rustfix
+
+#![warn(clippy::all)]
+#![warn(clippy::redundant_pattern_matching)]
+#![allow(
+ unused_must_use,
+ clippy::needless_bool,
+ clippy::match_like_matches_macro,
+ clippy::equatable_if_let,
+ clippy::if_same_then_else
+)]
+
+fn main() {
+ if None::<()>.is_none() {}
+
+ if Some(42).is_some() {}
+
+ if Some(42).is_some() {
+ foo();
+ } else {
+ bar();
+ }
+
+ while Some(42).is_some() {}
+
+ while Some(42).is_none() {}
+
+ while None::<()>.is_none() {}
+
+ let mut v = vec![1, 2, 3];
+ while v.pop().is_some() {
+ foo();
+ }
+
+ if None::<i32>.is_none() {}
+
+ if Some(42).is_some() {}
+
+ Some(42).is_some();
+
+ None::<()>.is_none();
+
+ let _ = None::<()>.is_none();
+
+ let opt = Some(false);
+ let _ = if opt.is_some() { true } else { false };
+
+ issue6067();
+
+ let _ = if gen_opt().is_some() {
+ 1
+ } else if gen_opt().is_none() {
+ 2
+ } else {
+ 3
+ };
+}
+
+fn gen_opt() -> Option<()> {
+ None
+}
+
+fn foo() {}
+
+fn bar() {}
+
+// Methods that are unstable const should not be suggested within a const context, see issue #5697.
+// However, in Rust 1.48.0 the methods `is_some` and `is_none` of `Option` were stabilized as const,
+// so the following should be linted.
+const fn issue6067() {
+ if Some(42).is_some() {}
+
+ if None::<()>.is_none() {}
+
+ while Some(42).is_some() {}
+
+ while None::<()>.is_none() {}
+
+ Some(42).is_some();
+
+ None::<()>.is_none();
+}
+
+#[allow(clippy::deref_addrof, dead_code, clippy::needless_borrow)]
+fn issue7921() {
+ if (&None::<()>).is_none() {}
+ if (&None::<()>).is_none() {}
+}
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_option.rs b/src/tools/clippy/tests/ui/redundant_pattern_matching_option.rs
new file mode 100644
index 000000000..d6f444034
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_option.rs
@@ -0,0 +1,103 @@
+// run-rustfix
+
+#![warn(clippy::all)]
+#![warn(clippy::redundant_pattern_matching)]
+#![allow(
+ unused_must_use,
+ clippy::needless_bool,
+ clippy::match_like_matches_macro,
+ clippy::equatable_if_let,
+ clippy::if_same_then_else
+)]
+
+fn main() {
+ if let None = None::<()> {}
+
+ if let Some(_) = Some(42) {}
+
+ if let Some(_) = Some(42) {
+ foo();
+ } else {
+ bar();
+ }
+
+ while let Some(_) = Some(42) {}
+
+ while let None = Some(42) {}
+
+ while let None = None::<()> {}
+
+ let mut v = vec![1, 2, 3];
+ while let Some(_) = v.pop() {
+ foo();
+ }
+
+ if None::<i32>.is_none() {}
+
+ if Some(42).is_some() {}
+
+ match Some(42) {
+ Some(_) => true,
+ None => false,
+ };
+
+ match None::<()> {
+ Some(_) => false,
+ None => true,
+ };
+
+ let _ = match None::<()> {
+ Some(_) => false,
+ None => true,
+ };
+
+ let opt = Some(false);
+ let _ = if let Some(_) = opt { true } else { false };
+
+ issue6067();
+
+ let _ = if let Some(_) = gen_opt() {
+ 1
+ } else if let None = gen_opt() {
+ 2
+ } else {
+ 3
+ };
+}
+
+fn gen_opt() -> Option<()> {
+ None
+}
+
+fn foo() {}
+
+fn bar() {}
+
+// Methods that are unstable const should not be suggested within a const context, see issue #5697.
+// However, in Rust 1.48.0 the methods `is_some` and `is_none` of `Option` were stabilized as const,
+// so the following should be linted.
+const fn issue6067() {
+ if let Some(_) = Some(42) {}
+
+ if let None = None::<()> {}
+
+ while let Some(_) = Some(42) {}
+
+ while let None = None::<()> {}
+
+ match Some(42) {
+ Some(_) => true,
+ None => false,
+ };
+
+ match None::<()> {
+ Some(_) => false,
+ None => true,
+ };
+}
+
+#[allow(clippy::deref_addrof, dead_code, clippy::needless_borrow)]
+fn issue7921() {
+ if let None = *(&None::<()>) {}
+ if let None = *&None::<()> {}
+}
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_option.stderr b/src/tools/clippy/tests/ui/redundant_pattern_matching_option.stderr
new file mode 100644
index 000000000..27ff812ba
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_option.stderr
@@ -0,0 +1,146 @@
+error: redundant pattern matching, consider using `is_none()`
+ --> $DIR/redundant_pattern_matching_option.rs:14:12
+ |
+LL | if let None = None::<()> {}
+ | -------^^^^------------- help: try this: `if None::<()>.is_none()`
+ |
+ = note: `-D clippy::redundant-pattern-matching` implied by `-D warnings`
+
+error: redundant pattern matching, consider using `is_some()`
+ --> $DIR/redundant_pattern_matching_option.rs:16:12
+ |
+LL | if let Some(_) = Some(42) {}
+ | -------^^^^^^^----------- help: try this: `if Some(42).is_some()`
+
+error: redundant pattern matching, consider using `is_some()`
+ --> $DIR/redundant_pattern_matching_option.rs:18:12
+ |
+LL | if let Some(_) = Some(42) {
+ | -------^^^^^^^----------- help: try this: `if Some(42).is_some()`
+
+error: redundant pattern matching, consider using `is_some()`
+ --> $DIR/redundant_pattern_matching_option.rs:24:15
+ |
+LL | while let Some(_) = Some(42) {}
+ | ----------^^^^^^^----------- help: try this: `while Some(42).is_some()`
+
+error: redundant pattern matching, consider using `is_none()`
+ --> $DIR/redundant_pattern_matching_option.rs:26:15
+ |
+LL | while let None = Some(42) {}
+ | ----------^^^^----------- help: try this: `while Some(42).is_none()`
+
+error: redundant pattern matching, consider using `is_none()`
+ --> $DIR/redundant_pattern_matching_option.rs:28:15
+ |
+LL | while let None = None::<()> {}
+ | ----------^^^^------------- help: try this: `while None::<()>.is_none()`
+
+error: redundant pattern matching, consider using `is_some()`
+ --> $DIR/redundant_pattern_matching_option.rs:31:15
+ |
+LL | while let Some(_) = v.pop() {
+ | ----------^^^^^^^---------- help: try this: `while v.pop().is_some()`
+
+error: redundant pattern matching, consider using `is_some()`
+ --> $DIR/redundant_pattern_matching_option.rs:39:5
+ |
+LL | / match Some(42) {
+LL | | Some(_) => true,
+LL | | None => false,
+LL | | };
+ | |_____^ help: try this: `Some(42).is_some()`
+
+error: redundant pattern matching, consider using `is_none()`
+ --> $DIR/redundant_pattern_matching_option.rs:44:5
+ |
+LL | / match None::<()> {
+LL | | Some(_) => false,
+LL | | None => true,
+LL | | };
+ | |_____^ help: try this: `None::<()>.is_none()`
+
+error: redundant pattern matching, consider using `is_none()`
+ --> $DIR/redundant_pattern_matching_option.rs:49:13
+ |
+LL | let _ = match None::<()> {
+ | _____________^
+LL | | Some(_) => false,
+LL | | None => true,
+LL | | };
+ | |_____^ help: try this: `None::<()>.is_none()`
+
+error: redundant pattern matching, consider using `is_some()`
+ --> $DIR/redundant_pattern_matching_option.rs:55:20
+ |
+LL | let _ = if let Some(_) = opt { true } else { false };
+ | -------^^^^^^^------ help: try this: `if opt.is_some()`
+
+error: redundant pattern matching, consider using `is_some()`
+ --> $DIR/redundant_pattern_matching_option.rs:59:20
+ |
+LL | let _ = if let Some(_) = gen_opt() {
+ | -------^^^^^^^------------ help: try this: `if gen_opt().is_some()`
+
+error: redundant pattern matching, consider using `is_none()`
+ --> $DIR/redundant_pattern_matching_option.rs:61:19
+ |
+LL | } else if let None = gen_opt() {
+ | -------^^^^------------ help: try this: `if gen_opt().is_none()`
+
+error: redundant pattern matching, consider using `is_some()`
+ --> $DIR/redundant_pattern_matching_option.rs:80:12
+ |
+LL | if let Some(_) = Some(42) {}
+ | -------^^^^^^^----------- help: try this: `if Some(42).is_some()`
+
+error: redundant pattern matching, consider using `is_none()`
+ --> $DIR/redundant_pattern_matching_option.rs:82:12
+ |
+LL | if let None = None::<()> {}
+ | -------^^^^------------- help: try this: `if None::<()>.is_none()`
+
+error: redundant pattern matching, consider using `is_some()`
+ --> $DIR/redundant_pattern_matching_option.rs:84:15
+ |
+LL | while let Some(_) = Some(42) {}
+ | ----------^^^^^^^----------- help: try this: `while Some(42).is_some()`
+
+error: redundant pattern matching, consider using `is_none()`
+ --> $DIR/redundant_pattern_matching_option.rs:86:15
+ |
+LL | while let None = None::<()> {}
+ | ----------^^^^------------- help: try this: `while None::<()>.is_none()`
+
+error: redundant pattern matching, consider using `is_some()`
+ --> $DIR/redundant_pattern_matching_option.rs:88:5
+ |
+LL | / match Some(42) {
+LL | | Some(_) => true,
+LL | | None => false,
+LL | | };
+ | |_____^ help: try this: `Some(42).is_some()`
+
+error: redundant pattern matching, consider using `is_none()`
+ --> $DIR/redundant_pattern_matching_option.rs:93:5
+ |
+LL | / match None::<()> {
+LL | | Some(_) => false,
+LL | | None => true,
+LL | | };
+ | |_____^ help: try this: `None::<()>.is_none()`
+
+error: redundant pattern matching, consider using `is_none()`
+ --> $DIR/redundant_pattern_matching_option.rs:101:12
+ |
+LL | if let None = *(&None::<()>) {}
+ | -------^^^^----------------- help: try this: `if (&None::<()>).is_none()`
+
+error: redundant pattern matching, consider using `is_none()`
+ --> $DIR/redundant_pattern_matching_option.rs:102:12
+ |
+LL | if let None = *&None::<()> {}
+ | -------^^^^--------------- help: try this: `if (&None::<()>).is_none()`
+
+error: aborting due to 21 previous errors
+
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.fixed b/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.fixed
new file mode 100644
index 000000000..3645f2c4b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.fixed
@@ -0,0 +1,76 @@
+// run-rustfix
+
+#![warn(clippy::all)]
+#![warn(clippy::redundant_pattern_matching)]
+#![allow(
+ unused_must_use,
+ clippy::needless_bool,
+ clippy::match_like_matches_macro,
+ clippy::equatable_if_let,
+ clippy::if_same_then_else
+)]
+
+use std::task::Poll::{self, Pending, Ready};
+
+fn main() {
+ if Pending::<()>.is_pending() {}
+
+ if Ready(42).is_ready() {}
+
+ if Ready(42).is_ready() {
+ foo();
+ } else {
+ bar();
+ }
+
+ while Ready(42).is_ready() {}
+
+ while Ready(42).is_pending() {}
+
+ while Pending::<()>.is_pending() {}
+
+ if Pending::<i32>.is_pending() {}
+
+ if Ready(42).is_ready() {}
+
+ Ready(42).is_ready();
+
+ Pending::<()>.is_pending();
+
+ let _ = Pending::<()>.is_pending();
+
+ let poll = Ready(false);
+ let _ = if poll.is_ready() { true } else { false };
+
+ poll_const();
+
+ let _ = if gen_poll().is_ready() {
+ 1
+ } else if gen_poll().is_pending() {
+ 2
+ } else {
+ 3
+ };
+}
+
+fn gen_poll() -> Poll<()> {
+ Pending
+}
+
+fn foo() {}
+
+fn bar() {}
+
+const fn poll_const() {
+ if Ready(42).is_ready() {}
+
+ if Pending::<()>.is_pending() {}
+
+ while Ready(42).is_ready() {}
+
+ while Pending::<()>.is_pending() {}
+
+ Ready(42).is_ready();
+
+ Pending::<()>.is_pending();
+}
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.rs b/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.rs
new file mode 100644
index 000000000..866c71b7c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.rs
@@ -0,0 +1,91 @@
+// run-rustfix
+
+#![warn(clippy::all)]
+#![warn(clippy::redundant_pattern_matching)]
+#![allow(
+ unused_must_use,
+ clippy::needless_bool,
+ clippy::match_like_matches_macro,
+ clippy::equatable_if_let,
+ clippy::if_same_then_else
+)]
+
+use std::task::Poll::{self, Pending, Ready};
+
+fn main() {
+ if let Pending = Pending::<()> {}
+
+ if let Ready(_) = Ready(42) {}
+
+ if let Ready(_) = Ready(42) {
+ foo();
+ } else {
+ bar();
+ }
+
+ while let Ready(_) = Ready(42) {}
+
+ while let Pending = Ready(42) {}
+
+ while let Pending = Pending::<()> {}
+
+ if Pending::<i32>.is_pending() {}
+
+ if Ready(42).is_ready() {}
+
+ match Ready(42) {
+ Ready(_) => true,
+ Pending => false,
+ };
+
+ match Pending::<()> {
+ Ready(_) => false,
+ Pending => true,
+ };
+
+ let _ = match Pending::<()> {
+ Ready(_) => false,
+ Pending => true,
+ };
+
+ let poll = Ready(false);
+ let _ = if let Ready(_) = poll { true } else { false };
+
+ poll_const();
+
+ let _ = if let Ready(_) = gen_poll() {
+ 1
+ } else if let Pending = gen_poll() {
+ 2
+ } else {
+ 3
+ };
+}
+
+fn gen_poll() -> Poll<()> {
+ Pending
+}
+
+fn foo() {}
+
+fn bar() {}
+
+const fn poll_const() {
+ if let Ready(_) = Ready(42) {}
+
+ if let Pending = Pending::<()> {}
+
+ while let Ready(_) = Ready(42) {}
+
+ while let Pending = Pending::<()> {}
+
+ match Ready(42) {
+ Ready(_) => true,
+ Pending => false,
+ };
+
+ match Pending::<()> {
+ Ready(_) => false,
+ Pending => true,
+ };
+}
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.stderr b/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.stderr
new file mode 100644
index 000000000..1b480f315
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.stderr
@@ -0,0 +1,128 @@
+error: redundant pattern matching, consider using `is_pending()`
+ --> $DIR/redundant_pattern_matching_poll.rs:16:12
+ |
+LL | if let Pending = Pending::<()> {}
+ | -------^^^^^^^---------------- help: try this: `if Pending::<()>.is_pending()`
+ |
+ = note: `-D clippy::redundant-pattern-matching` implied by `-D warnings`
+
+error: redundant pattern matching, consider using `is_ready()`
+ --> $DIR/redundant_pattern_matching_poll.rs:18:12
+ |
+LL | if let Ready(_) = Ready(42) {}
+ | -------^^^^^^^^------------ help: try this: `if Ready(42).is_ready()`
+
+error: redundant pattern matching, consider using `is_ready()`
+ --> $DIR/redundant_pattern_matching_poll.rs:20:12
+ |
+LL | if let Ready(_) = Ready(42) {
+ | -------^^^^^^^^------------ help: try this: `if Ready(42).is_ready()`
+
+error: redundant pattern matching, consider using `is_ready()`
+ --> $DIR/redundant_pattern_matching_poll.rs:26:15
+ |
+LL | while let Ready(_) = Ready(42) {}
+ | ----------^^^^^^^^------------ help: try this: `while Ready(42).is_ready()`
+
+error: redundant pattern matching, consider using `is_pending()`
+ --> $DIR/redundant_pattern_matching_poll.rs:28:15
+ |
+LL | while let Pending = Ready(42) {}
+ | ----------^^^^^^^------------ help: try this: `while Ready(42).is_pending()`
+
+error: redundant pattern matching, consider using `is_pending()`
+ --> $DIR/redundant_pattern_matching_poll.rs:30:15
+ |
+LL | while let Pending = Pending::<()> {}
+ | ----------^^^^^^^---------------- help: try this: `while Pending::<()>.is_pending()`
+
+error: redundant pattern matching, consider using `is_ready()`
+ --> $DIR/redundant_pattern_matching_poll.rs:36:5
+ |
+LL | / match Ready(42) {
+LL | | Ready(_) => true,
+LL | | Pending => false,
+LL | | };
+ | |_____^ help: try this: `Ready(42).is_ready()`
+
+error: redundant pattern matching, consider using `is_pending()`
+ --> $DIR/redundant_pattern_matching_poll.rs:41:5
+ |
+LL | / match Pending::<()> {
+LL | | Ready(_) => false,
+LL | | Pending => true,
+LL | | };
+ | |_____^ help: try this: `Pending::<()>.is_pending()`
+
+error: redundant pattern matching, consider using `is_pending()`
+ --> $DIR/redundant_pattern_matching_poll.rs:46:13
+ |
+LL | let _ = match Pending::<()> {
+ | _____________^
+LL | | Ready(_) => false,
+LL | | Pending => true,
+LL | | };
+ | |_____^ help: try this: `Pending::<()>.is_pending()`
+
+error: redundant pattern matching, consider using `is_ready()`
+ --> $DIR/redundant_pattern_matching_poll.rs:52:20
+ |
+LL | let _ = if let Ready(_) = poll { true } else { false };
+ | -------^^^^^^^^------- help: try this: `if poll.is_ready()`
+
+error: redundant pattern matching, consider using `is_ready()`
+ --> $DIR/redundant_pattern_matching_poll.rs:56:20
+ |
+LL | let _ = if let Ready(_) = gen_poll() {
+ | -------^^^^^^^^------------- help: try this: `if gen_poll().is_ready()`
+
+error: redundant pattern matching, consider using `is_pending()`
+ --> $DIR/redundant_pattern_matching_poll.rs:58:19
+ |
+LL | } else if let Pending = gen_poll() {
+ | -------^^^^^^^------------- help: try this: `if gen_poll().is_pending()`
+
+error: redundant pattern matching, consider using `is_ready()`
+ --> $DIR/redundant_pattern_matching_poll.rs:74:12
+ |
+LL | if let Ready(_) = Ready(42) {}
+ | -------^^^^^^^^------------ help: try this: `if Ready(42).is_ready()`
+
+error: redundant pattern matching, consider using `is_pending()`
+ --> $DIR/redundant_pattern_matching_poll.rs:76:12
+ |
+LL | if let Pending = Pending::<()> {}
+ | -------^^^^^^^---------------- help: try this: `if Pending::<()>.is_pending()`
+
+error: redundant pattern matching, consider using `is_ready()`
+ --> $DIR/redundant_pattern_matching_poll.rs:78:15
+ |
+LL | while let Ready(_) = Ready(42) {}
+ | ----------^^^^^^^^------------ help: try this: `while Ready(42).is_ready()`
+
+error: redundant pattern matching, consider using `is_pending()`
+ --> $DIR/redundant_pattern_matching_poll.rs:80:15
+ |
+LL | while let Pending = Pending::<()> {}
+ | ----------^^^^^^^---------------- help: try this: `while Pending::<()>.is_pending()`
+
+error: redundant pattern matching, consider using `is_ready()`
+ --> $DIR/redundant_pattern_matching_poll.rs:82:5
+ |
+LL | / match Ready(42) {
+LL | | Ready(_) => true,
+LL | | Pending => false,
+LL | | };
+ | |_____^ help: try this: `Ready(42).is_ready()`
+
+error: redundant pattern matching, consider using `is_pending()`
+ --> $DIR/redundant_pattern_matching_poll.rs:87:5
+ |
+LL | / match Pending::<()> {
+LL | | Ready(_) => false,
+LL | | Pending => true,
+LL | | };
+ | |_____^ help: try this: `Pending::<()>.is_pending()`
+
+error: aborting due to 18 previous errors
+
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_result.fixed b/src/tools/clippy/tests/ui/redundant_pattern_matching_result.fixed
new file mode 100644
index 000000000..83c783385
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_result.fixed
@@ -0,0 +1,110 @@
+// run-rustfix
+
+#![warn(clippy::all)]
+#![warn(clippy::redundant_pattern_matching)]
+#![allow(
+ unused_must_use,
+ clippy::needless_bool,
+ clippy::match_like_matches_macro,
+ clippy::unnecessary_wraps,
+ deprecated,
+ clippy::if_same_then_else
+)]
+
+fn main() {
+ let result: Result<usize, usize> = Err(5);
+ if result.is_ok() {}
+
+ if Ok::<i32, i32>(42).is_ok() {}
+
+ if Err::<i32, i32>(42).is_err() {}
+
+ while Ok::<i32, i32>(10).is_ok() {}
+
+ while Ok::<i32, i32>(10).is_err() {}
+
+ if Ok::<i32, i32>(42).is_ok() {}
+
+ if Err::<i32, i32>(42).is_err() {}
+
+ if let Ok(x) = Ok::<i32, i32>(42) {
+ println!("{}", x);
+ }
+
+ Ok::<i32, i32>(42).is_ok();
+
+ Ok::<i32, i32>(42).is_err();
+
+ Err::<i32, i32>(42).is_err();
+
+ Err::<i32, i32>(42).is_ok();
+
+ let _ = if Ok::<usize, ()>(4).is_ok() { true } else { false };
+
+ issue5504();
+ issue6067();
+ issue6065();
+
+ let _ = if gen_res().is_ok() {
+ 1
+ } else if gen_res().is_err() {
+ 2
+ } else {
+ 3
+ };
+}
+
+fn gen_res() -> Result<(), ()> {
+ Ok(())
+}
+
+macro_rules! m {
+ () => {
+ Some(42u32)
+ };
+}
+
+fn issue5504() {
+ fn result_opt() -> Result<Option<i32>, i32> {
+ Err(42)
+ }
+
+ fn try_result_opt() -> Result<i32, i32> {
+ while (r#try!(result_opt())).is_some() {}
+ if (r#try!(result_opt())).is_some() {}
+ Ok(42)
+ }
+
+ try_result_opt();
+
+ if m!().is_some() {}
+ while m!().is_some() {}
+}
+
+fn issue6065() {
+ macro_rules! if_let_in_macro {
+ ($pat:pat, $x:expr) => {
+ if let Some($pat) = $x {}
+ };
+ }
+
+ // shouldn't be linted
+ if_let_in_macro!(_, Some(42));
+}
+
+// Methods that are unstable const should not be suggested within a const context, see issue #5697.
+// However, in Rust 1.48.0 the methods `is_ok` and `is_err` of `Result` were stabilized as const,
+// so the following should be linted.
+const fn issue6067() {
+ if Ok::<i32, i32>(42).is_ok() {}
+
+ if Err::<i32, i32>(42).is_err() {}
+
+ while Ok::<i32, i32>(10).is_ok() {}
+
+ while Ok::<i32, i32>(10).is_err() {}
+
+ Ok::<i32, i32>(42).is_ok();
+
+ Err::<i32, i32>(42).is_err();
+}
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_result.rs b/src/tools/clippy/tests/ui/redundant_pattern_matching_result.rs
new file mode 100644
index 000000000..e06d4485a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_result.rs
@@ -0,0 +1,128 @@
+// run-rustfix
+
+#![warn(clippy::all)]
+#![warn(clippy::redundant_pattern_matching)]
+#![allow(
+ unused_must_use,
+ clippy::needless_bool,
+ clippy::match_like_matches_macro,
+ clippy::unnecessary_wraps,
+ deprecated,
+ clippy::if_same_then_else
+)]
+
+fn main() {
+ let result: Result<usize, usize> = Err(5);
+ if let Ok(_) = &result {}
+
+ if let Ok(_) = Ok::<i32, i32>(42) {}
+
+ if let Err(_) = Err::<i32, i32>(42) {}
+
+ while let Ok(_) = Ok::<i32, i32>(10) {}
+
+ while let Err(_) = Ok::<i32, i32>(10) {}
+
+ if Ok::<i32, i32>(42).is_ok() {}
+
+ if Err::<i32, i32>(42).is_err() {}
+
+ if let Ok(x) = Ok::<i32, i32>(42) {
+ println!("{}", x);
+ }
+
+ match Ok::<i32, i32>(42) {
+ Ok(_) => true,
+ Err(_) => false,
+ };
+
+ match Ok::<i32, i32>(42) {
+ Ok(_) => false,
+ Err(_) => true,
+ };
+
+ match Err::<i32, i32>(42) {
+ Ok(_) => false,
+ Err(_) => true,
+ };
+
+ match Err::<i32, i32>(42) {
+ Ok(_) => true,
+ Err(_) => false,
+ };
+
+ let _ = if let Ok(_) = Ok::<usize, ()>(4) { true } else { false };
+
+ issue5504();
+ issue6067();
+ issue6065();
+
+ let _ = if let Ok(_) = gen_res() {
+ 1
+ } else if let Err(_) = gen_res() {
+ 2
+ } else {
+ 3
+ };
+}
+
+fn gen_res() -> Result<(), ()> {
+ Ok(())
+}
+
+macro_rules! m {
+ () => {
+ Some(42u32)
+ };
+}
+
+fn issue5504() {
+ fn result_opt() -> Result<Option<i32>, i32> {
+ Err(42)
+ }
+
+ fn try_result_opt() -> Result<i32, i32> {
+ while let Some(_) = r#try!(result_opt()) {}
+ if let Some(_) = r#try!(result_opt()) {}
+ Ok(42)
+ }
+
+ try_result_opt();
+
+ if let Some(_) = m!() {}
+ while let Some(_) = m!() {}
+}
+
+fn issue6065() {
+ macro_rules! if_let_in_macro {
+ ($pat:pat, $x:expr) => {
+ if let Some($pat) = $x {}
+ };
+ }
+
+ // shouldn't be linted
+ if_let_in_macro!(_, Some(42));
+}
+
+// Methods that are unstable const should not be suggested within a const context, see issue #5697.
+// However, in Rust 1.48.0 the methods `is_ok` and `is_err` of `Result` were stabilized as const,
+// so the following should be linted.
+const fn issue6067() {
+ if let Ok(_) = Ok::<i32, i32>(42) {}
+
+ if let Err(_) = Err::<i32, i32>(42) {}
+
+ while let Ok(_) = Ok::<i32, i32>(10) {}
+
+ while let Err(_) = Ok::<i32, i32>(10) {}
+
+ match Ok::<i32, i32>(42) {
+ Ok(_) => true,
+ Err(_) => false,
+ };
+
+ match Err::<i32, i32>(42) {
+ Ok(_) => false,
+ Err(_) => true,
+ };
+}
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr b/src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr
new file mode 100644
index 000000000..d674d061e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr
@@ -0,0 +1,154 @@
+error: redundant pattern matching, consider using `is_ok()`
+ --> $DIR/redundant_pattern_matching_result.rs:16:12
+ |
+LL | if let Ok(_) = &result {}
+ | -------^^^^^---------- help: try this: `if result.is_ok()`
+ |
+ = note: `-D clippy::redundant-pattern-matching` implied by `-D warnings`
+
+error: redundant pattern matching, consider using `is_ok()`
+ --> $DIR/redundant_pattern_matching_result.rs:18:12
+ |
+LL | if let Ok(_) = Ok::<i32, i32>(42) {}
+ | -------^^^^^--------------------- help: try this: `if Ok::<i32, i32>(42).is_ok()`
+
+error: redundant pattern matching, consider using `is_err()`
+ --> $DIR/redundant_pattern_matching_result.rs:20:12
+ |
+LL | if let Err(_) = Err::<i32, i32>(42) {}
+ | -------^^^^^^---------------------- help: try this: `if Err::<i32, i32>(42).is_err()`
+
+error: redundant pattern matching, consider using `is_ok()`
+ --> $DIR/redundant_pattern_matching_result.rs:22:15
+ |
+LL | while let Ok(_) = Ok::<i32, i32>(10) {}
+ | ----------^^^^^--------------------- help: try this: `while Ok::<i32, i32>(10).is_ok()`
+
+error: redundant pattern matching, consider using `is_err()`
+ --> $DIR/redundant_pattern_matching_result.rs:24:15
+ |
+LL | while let Err(_) = Ok::<i32, i32>(10) {}
+ | ----------^^^^^^--------------------- help: try this: `while Ok::<i32, i32>(10).is_err()`
+
+error: redundant pattern matching, consider using `is_ok()`
+ --> $DIR/redundant_pattern_matching_result.rs:34:5
+ |
+LL | / match Ok::<i32, i32>(42) {
+LL | | Ok(_) => true,
+LL | | Err(_) => false,
+LL | | };
+ | |_____^ help: try this: `Ok::<i32, i32>(42).is_ok()`
+
+error: redundant pattern matching, consider using `is_err()`
+ --> $DIR/redundant_pattern_matching_result.rs:39:5
+ |
+LL | / match Ok::<i32, i32>(42) {
+LL | | Ok(_) => false,
+LL | | Err(_) => true,
+LL | | };
+ | |_____^ help: try this: `Ok::<i32, i32>(42).is_err()`
+
+error: redundant pattern matching, consider using `is_err()`
+ --> $DIR/redundant_pattern_matching_result.rs:44:5
+ |
+LL | / match Err::<i32, i32>(42) {
+LL | | Ok(_) => false,
+LL | | Err(_) => true,
+LL | | };
+ | |_____^ help: try this: `Err::<i32, i32>(42).is_err()`
+
+error: redundant pattern matching, consider using `is_ok()`
+ --> $DIR/redundant_pattern_matching_result.rs:49:5
+ |
+LL | / match Err::<i32, i32>(42) {
+LL | | Ok(_) => true,
+LL | | Err(_) => false,
+LL | | };
+ | |_____^ help: try this: `Err::<i32, i32>(42).is_ok()`
+
+error: redundant pattern matching, consider using `is_ok()`
+ --> $DIR/redundant_pattern_matching_result.rs:54:20
+ |
+LL | let _ = if let Ok(_) = Ok::<usize, ()>(4) { true } else { false };
+ | -------^^^^^--------------------- help: try this: `if Ok::<usize, ()>(4).is_ok()`
+
+error: redundant pattern matching, consider using `is_ok()`
+ --> $DIR/redundant_pattern_matching_result.rs:60:20
+ |
+LL | let _ = if let Ok(_) = gen_res() {
+ | -------^^^^^------------ help: try this: `if gen_res().is_ok()`
+
+error: redundant pattern matching, consider using `is_err()`
+ --> $DIR/redundant_pattern_matching_result.rs:62:19
+ |
+LL | } else if let Err(_) = gen_res() {
+ | -------^^^^^^------------ help: try this: `if gen_res().is_err()`
+
+error: redundant pattern matching, consider using `is_some()`
+ --> $DIR/redundant_pattern_matching_result.rs:85:19
+ |
+LL | while let Some(_) = r#try!(result_opt()) {}
+ | ----------^^^^^^^----------------------- help: try this: `while (r#try!(result_opt())).is_some()`
+
+error: redundant pattern matching, consider using `is_some()`
+ --> $DIR/redundant_pattern_matching_result.rs:86:16
+ |
+LL | if let Some(_) = r#try!(result_opt()) {}
+ | -------^^^^^^^----------------------- help: try this: `if (r#try!(result_opt())).is_some()`
+
+error: redundant pattern matching, consider using `is_some()`
+ --> $DIR/redundant_pattern_matching_result.rs:92:12
+ |
+LL | if let Some(_) = m!() {}
+ | -------^^^^^^^------- help: try this: `if m!().is_some()`
+
+error: redundant pattern matching, consider using `is_some()`
+ --> $DIR/redundant_pattern_matching_result.rs:93:15
+ |
+LL | while let Some(_) = m!() {}
+ | ----------^^^^^^^------- help: try this: `while m!().is_some()`
+
+error: redundant pattern matching, consider using `is_ok()`
+ --> $DIR/redundant_pattern_matching_result.rs:111:12
+ |
+LL | if let Ok(_) = Ok::<i32, i32>(42) {}
+ | -------^^^^^--------------------- help: try this: `if Ok::<i32, i32>(42).is_ok()`
+
+error: redundant pattern matching, consider using `is_err()`
+ --> $DIR/redundant_pattern_matching_result.rs:113:12
+ |
+LL | if let Err(_) = Err::<i32, i32>(42) {}
+ | -------^^^^^^---------------------- help: try this: `if Err::<i32, i32>(42).is_err()`
+
+error: redundant pattern matching, consider using `is_ok()`
+ --> $DIR/redundant_pattern_matching_result.rs:115:15
+ |
+LL | while let Ok(_) = Ok::<i32, i32>(10) {}
+ | ----------^^^^^--------------------- help: try this: `while Ok::<i32, i32>(10).is_ok()`
+
+error: redundant pattern matching, consider using `is_err()`
+ --> $DIR/redundant_pattern_matching_result.rs:117:15
+ |
+LL | while let Err(_) = Ok::<i32, i32>(10) {}
+ | ----------^^^^^^--------------------- help: try this: `while Ok::<i32, i32>(10).is_err()`
+
+error: redundant pattern matching, consider using `is_ok()`
+ --> $DIR/redundant_pattern_matching_result.rs:119:5
+ |
+LL | / match Ok::<i32, i32>(42) {
+LL | | Ok(_) => true,
+LL | | Err(_) => false,
+LL | | };
+ | |_____^ help: try this: `Ok::<i32, i32>(42).is_ok()`
+
+error: redundant pattern matching, consider using `is_err()`
+ --> $DIR/redundant_pattern_matching_result.rs:124:5
+ |
+LL | / match Err::<i32, i32>(42) {
+LL | | Ok(_) => false,
+LL | | Err(_) => true,
+LL | | };
+ | |_____^ help: try this: `Err::<i32, i32>(42).is_err()`
+
+error: aborting due to 22 previous errors
+
diff --git a/src/tools/clippy/tests/ui/redundant_pub_crate.fixed b/src/tools/clippy/tests/ui/redundant_pub_crate.fixed
new file mode 100644
index 000000000..106947de6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_pub_crate.fixed
@@ -0,0 +1,117 @@
+// run-rustfix
+#![allow(dead_code)]
+#![warn(clippy::redundant_pub_crate)]
+
+mod m1 {
+ fn f() {}
+ pub fn g() {} // private due to m1
+ pub fn h() {}
+
+ mod m1_1 {
+ fn f() {}
+ pub fn g() {} // private due to m1_1 and m1
+ pub fn h() {}
+ }
+
+ pub mod m1_2 {
+ // ^ private due to m1
+ fn f() {}
+ pub fn g() {} // private due to m1_2 and m1
+ pub fn h() {}
+ }
+
+ pub mod m1_3 {
+ fn f() {}
+ pub fn g() {} // private due to m1
+ pub fn h() {}
+ }
+}
+
+pub(crate) mod m2 {
+ fn f() {}
+ pub fn g() {} // already crate visible due to m2
+ pub fn h() {}
+
+ mod m2_1 {
+ fn f() {}
+ pub fn g() {} // private due to m2_1
+ pub fn h() {}
+ }
+
+ pub mod m2_2 {
+ // ^ already crate visible due to m2
+ fn f() {}
+ pub fn g() {} // already crate visible due to m2_2 and m2
+ pub fn h() {}
+ }
+
+ pub mod m2_3 {
+ fn f() {}
+ pub fn g() {} // already crate visible due to m2
+ pub fn h() {}
+ }
+}
+
+pub mod m3 {
+ fn f() {}
+ pub(crate) fn g() {} // ok: m3 is exported
+ pub fn h() {}
+
+ mod m3_1 {
+ fn f() {}
+ pub fn g() {} // private due to m3_1
+ pub fn h() {}
+ }
+
+ pub(crate) mod m3_2 {
+ // ^ ok
+ fn f() {}
+ pub fn g() {} // already crate visible due to m3_2
+ pub fn h() {}
+ }
+
+ pub mod m3_3 {
+ fn f() {}
+ pub(crate) fn g() {} // ok: m3 and m3_3 are exported
+ pub fn h() {}
+ }
+}
+
+mod m4 {
+ fn f() {}
+ pub fn g() {} // private: not re-exported by `pub use m4::*`
+ pub fn h() {}
+
+ mod m4_1 {
+ fn f() {}
+ pub fn g() {} // private due to m4_1
+ pub fn h() {}
+ }
+
+ pub mod m4_2 {
+ // ^ private: not re-exported by `pub use m4::*`
+ fn f() {}
+ pub fn g() {} // private due to m4_2
+ pub fn h() {}
+ }
+
+ pub mod m4_3 {
+ fn f() {}
+ pub(crate) fn g() {} // ok: m4_3 is re-exported by `pub use m4::*`
+ pub fn h() {}
+ }
+}
+
+pub use m4::*;
+
+mod issue_8732 {
+ #[allow(unused_macros)]
+ macro_rules! some_macro {
+ () => {};
+ }
+
+ #[allow(unused_imports)]
+ pub(crate) use some_macro; // ok: macro exports are exempt
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/redundant_pub_crate.rs b/src/tools/clippy/tests/ui/redundant_pub_crate.rs
new file mode 100644
index 000000000..f96cfd318
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_pub_crate.rs
@@ -0,0 +1,117 @@
+// run-rustfix
+#![allow(dead_code)]
+#![warn(clippy::redundant_pub_crate)]
+
+mod m1 {
+ fn f() {}
+ pub(crate) fn g() {} // private due to m1
+ pub fn h() {}
+
+ mod m1_1 {
+ fn f() {}
+ pub(crate) fn g() {} // private due to m1_1 and m1
+ pub fn h() {}
+ }
+
+ pub(crate) mod m1_2 {
+ // ^ private due to m1
+ fn f() {}
+ pub(crate) fn g() {} // private due to m1_2 and m1
+ pub fn h() {}
+ }
+
+ pub mod m1_3 {
+ fn f() {}
+ pub(crate) fn g() {} // private due to m1
+ pub fn h() {}
+ }
+}
+
+pub(crate) mod m2 {
+ fn f() {}
+ pub(crate) fn g() {} // already crate visible due to m2
+ pub fn h() {}
+
+ mod m2_1 {
+ fn f() {}
+ pub(crate) fn g() {} // private due to m2_1
+ pub fn h() {}
+ }
+
+ pub(crate) mod m2_2 {
+ // ^ already crate visible due to m2
+ fn f() {}
+ pub(crate) fn g() {} // already crate visible due to m2_2 and m2
+ pub fn h() {}
+ }
+
+ pub mod m2_3 {
+ fn f() {}
+ pub(crate) fn g() {} // already crate visible due to m2
+ pub fn h() {}
+ }
+}
+
+pub mod m3 {
+ fn f() {}
+ pub(crate) fn g() {} // ok: m3 is exported
+ pub fn h() {}
+
+ mod m3_1 {
+ fn f() {}
+ pub(crate) fn g() {} // private due to m3_1
+ pub fn h() {}
+ }
+
+ pub(crate) mod m3_2 {
+ // ^ ok
+ fn f() {}
+ pub(crate) fn g() {} // already crate visible due to m3_2
+ pub fn h() {}
+ }
+
+ pub mod m3_3 {
+ fn f() {}
+ pub(crate) fn g() {} // ok: m3 and m3_3 are exported
+ pub fn h() {}
+ }
+}
+
+mod m4 {
+ fn f() {}
+ pub(crate) fn g() {} // private: not re-exported by `pub use m4::*`
+ pub fn h() {}
+
+ mod m4_1 {
+ fn f() {}
+ pub(crate) fn g() {} // private due to m4_1
+ pub fn h() {}
+ }
+
+ pub(crate) mod m4_2 {
+ // ^ private: not re-exported by `pub use m4::*`
+ fn f() {}
+ pub(crate) fn g() {} // private due to m4_2
+ pub fn h() {}
+ }
+
+ pub mod m4_3 {
+ fn f() {}
+ pub(crate) fn g() {} // ok: m4_3 is re-exported by `pub use m4::*`
+ pub fn h() {}
+ }
+}
+
+pub use m4::*;
+
+mod issue_8732 {
+ #[allow(unused_macros)]
+ macro_rules! some_macro {
+ () => {};
+ }
+
+ #[allow(unused_imports)]
+ pub(crate) use some_macro; // ok: macro exports are exempt
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/redundant_pub_crate.stderr b/src/tools/clippy/tests/ui/redundant_pub_crate.stderr
new file mode 100644
index 000000000..6fccdaa4e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_pub_crate.stderr
@@ -0,0 +1,132 @@
+error: pub(crate) function inside private module
+ --> $DIR/redundant_pub_crate.rs:7:5
+ |
+LL | pub(crate) fn g() {} // private due to m1
+ | ----------^^^^^
+ | |
+ | help: consider using: `pub`
+ |
+ = note: `-D clippy::redundant-pub-crate` implied by `-D warnings`
+
+error: pub(crate) function inside private module
+ --> $DIR/redundant_pub_crate.rs:12:9
+ |
+LL | pub(crate) fn g() {} // private due to m1_1 and m1
+ | ----------^^^^^
+ | |
+ | help: consider using: `pub`
+
+error: pub(crate) module inside private module
+ --> $DIR/redundant_pub_crate.rs:16:5
+ |
+LL | pub(crate) mod m1_2 {
+ | ----------^^^^^^^^^
+ | |
+ | help: consider using: `pub`
+
+error: pub(crate) function inside private module
+ --> $DIR/redundant_pub_crate.rs:19:9
+ |
+LL | pub(crate) fn g() {} // private due to m1_2 and m1
+ | ----------^^^^^
+ | |
+ | help: consider using: `pub`
+
+error: pub(crate) function inside private module
+ --> $DIR/redundant_pub_crate.rs:25:9
+ |
+LL | pub(crate) fn g() {} // private due to m1
+ | ----------^^^^^
+ | |
+ | help: consider using: `pub`
+
+error: pub(crate) function inside private module
+ --> $DIR/redundant_pub_crate.rs:32:5
+ |
+LL | pub(crate) fn g() {} // already crate visible due to m2
+ | ----------^^^^^
+ | |
+ | help: consider using: `pub`
+
+error: pub(crate) function inside private module
+ --> $DIR/redundant_pub_crate.rs:37:9
+ |
+LL | pub(crate) fn g() {} // private due to m2_1
+ | ----------^^^^^
+ | |
+ | help: consider using: `pub`
+
+error: pub(crate) module inside private module
+ --> $DIR/redundant_pub_crate.rs:41:5
+ |
+LL | pub(crate) mod m2_2 {
+ | ----------^^^^^^^^^
+ | |
+ | help: consider using: `pub`
+
+error: pub(crate) function inside private module
+ --> $DIR/redundant_pub_crate.rs:44:9
+ |
+LL | pub(crate) fn g() {} // already crate visible due to m2_2 and m2
+ | ----------^^^^^
+ | |
+ | help: consider using: `pub`
+
+error: pub(crate) function inside private module
+ --> $DIR/redundant_pub_crate.rs:50:9
+ |
+LL | pub(crate) fn g() {} // already crate visible due to m2
+ | ----------^^^^^
+ | |
+ | help: consider using: `pub`
+
+error: pub(crate) function inside private module
+ --> $DIR/redundant_pub_crate.rs:62:9
+ |
+LL | pub(crate) fn g() {} // private due to m3_1
+ | ----------^^^^^
+ | |
+ | help: consider using: `pub`
+
+error: pub(crate) function inside private module
+ --> $DIR/redundant_pub_crate.rs:69:9
+ |
+LL | pub(crate) fn g() {} // already crate visible due to m3_2
+ | ----------^^^^^
+ | |
+ | help: consider using: `pub`
+
+error: pub(crate) function inside private module
+ --> $DIR/redundant_pub_crate.rs:82:5
+ |
+LL | pub(crate) fn g() {} // private: not re-exported by `pub use m4::*`
+ | ----------^^^^^
+ | |
+ | help: consider using: `pub`
+
+error: pub(crate) function inside private module
+ --> $DIR/redundant_pub_crate.rs:87:9
+ |
+LL | pub(crate) fn g() {} // private due to m4_1
+ | ----------^^^^^
+ | |
+ | help: consider using: `pub`
+
+error: pub(crate) module inside private module
+ --> $DIR/redundant_pub_crate.rs:91:5
+ |
+LL | pub(crate) mod m4_2 {
+ | ----------^^^^^^^^^
+ | |
+ | help: consider using: `pub`
+
+error: pub(crate) function inside private module
+ --> $DIR/redundant_pub_crate.rs:94:9
+ |
+LL | pub(crate) fn g() {} // private due to m4_2
+ | ----------^^^^^
+ | |
+ | help: consider using: `pub`
+
+error: aborting due to 16 previous errors
+
diff --git a/src/tools/clippy/tests/ui/redundant_slicing.fixed b/src/tools/clippy/tests/ui/redundant_slicing.fixed
new file mode 100644
index 000000000..8dd8d3092
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_slicing.fixed
@@ -0,0 +1,46 @@
+// run-rustfix
+
+#![allow(unused, clippy::deref_by_slicing)]
+#![warn(clippy::redundant_slicing)]
+
+use std::io::Read;
+
+fn main() {
+ let slice: &[u32] = &[0];
+ let _ = slice; // Redundant slice
+
+ let v = vec![0];
+ let _ = &v[..]; // Ok, results in `&[_]`
+ let _ = (&*v); // Outer borrow is redundant
+
+ static S: &[u8] = &[0, 1, 2];
+ let _ = &mut &S[..]; // Ok, re-borrows slice
+
+ let mut vec = vec![0];
+ let mut_slice = &mut vec[..]; // Ok, results in `&mut [_]`
+ let _ = &mut mut_slice[..]; // Ok, re-borrows slice
+
+ let ref_vec = &vec;
+ let _ = &ref_vec[..]; // Ok, results in `&[_]`
+
+ macro_rules! m {
+ ($e:expr) => {
+ $e
+ };
+ }
+ let _ = slice;
+
+ macro_rules! m2 {
+ ($e:expr) => {
+ &$e[..]
+ };
+ }
+ let _ = m2!(slice); // Don't lint in a macro
+
+ let slice_ref = &slice;
+ let _ = &slice_ref[..]; // Ok, derefs slice
+
+ // Issue #7972
+ let bytes: &[u8] = &[];
+ let _ = (&bytes[..]).read_to_end(&mut vec![]).unwrap(); // Ok, re-borrows slice
+}
diff --git a/src/tools/clippy/tests/ui/redundant_slicing.rs b/src/tools/clippy/tests/ui/redundant_slicing.rs
new file mode 100644
index 000000000..51c16dd8d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_slicing.rs
@@ -0,0 +1,46 @@
+// run-rustfix
+
+#![allow(unused, clippy::deref_by_slicing)]
+#![warn(clippy::redundant_slicing)]
+
+use std::io::Read;
+
+fn main() {
+ let slice: &[u32] = &[0];
+ let _ = &slice[..]; // Redundant slice
+
+ let v = vec![0];
+ let _ = &v[..]; // Ok, results in `&[_]`
+ let _ = &(&*v)[..]; // Outer borrow is redundant
+
+ static S: &[u8] = &[0, 1, 2];
+ let _ = &mut &S[..]; // Ok, re-borrows slice
+
+ let mut vec = vec![0];
+ let mut_slice = &mut vec[..]; // Ok, results in `&mut [_]`
+ let _ = &mut mut_slice[..]; // Ok, re-borrows slice
+
+ let ref_vec = &vec;
+ let _ = &ref_vec[..]; // Ok, results in `&[_]`
+
+ macro_rules! m {
+ ($e:expr) => {
+ $e
+ };
+ }
+ let _ = &m!(slice)[..];
+
+ macro_rules! m2 {
+ ($e:expr) => {
+ &$e[..]
+ };
+ }
+ let _ = m2!(slice); // Don't lint in a macro
+
+ let slice_ref = &slice;
+ let _ = &slice_ref[..]; // Ok, derefs slice
+
+ // Issue #7972
+ let bytes: &[u8] = &[];
+ let _ = (&bytes[..]).read_to_end(&mut vec![]).unwrap(); // Ok, re-borrows slice
+}
diff --git a/src/tools/clippy/tests/ui/redundant_slicing.stderr b/src/tools/clippy/tests/ui/redundant_slicing.stderr
new file mode 100644
index 000000000..82367143c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_slicing.stderr
@@ -0,0 +1,22 @@
+error: redundant slicing of the whole range
+ --> $DIR/redundant_slicing.rs:10:13
+ |
+LL | let _ = &slice[..]; // Redundant slice
+ | ^^^^^^^^^^ help: use the original value instead: `slice`
+ |
+ = note: `-D clippy::redundant-slicing` implied by `-D warnings`
+
+error: redundant slicing of the whole range
+ --> $DIR/redundant_slicing.rs:14:13
+ |
+LL | let _ = &(&*v)[..]; // Outer borrow is redundant
+ | ^^^^^^^^^^ help: use the original value instead: `(&*v)`
+
+error: redundant slicing of the whole range
+ --> $DIR/redundant_slicing.rs:31:13
+ |
+LL | let _ = &m!(slice)[..];
+ | ^^^^^^^^^^^^^^ help: use the original value instead: `slice`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/redundant_static_lifetimes.fixed b/src/tools/clippy/tests/ui/redundant_static_lifetimes.fixed
new file mode 100644
index 000000000..acc8f1e25
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_static_lifetimes.fixed
@@ -0,0 +1,56 @@
+// run-rustfix
+
+#![allow(unused)]
+
+#[derive(Debug)]
+struct Foo;
+
+const VAR_ONE: &str = "Test constant #1"; // ERROR Consider removing 'static.
+
+const VAR_TWO: &str = "Test constant #2"; // This line should not raise a warning.
+
+const VAR_THREE: &[&str] = &["one", "two"]; // ERROR Consider removing 'static
+
+const VAR_FOUR: (&str, (&str, &str), &str) = ("on", ("th", "th"), "on"); // ERROR Consider removing 'static
+
+const VAR_SIX: &u8 = &5;
+
+const VAR_HEIGHT: &Foo = &Foo {};
+
+const VAR_SLICE: &[u8] = b"Test constant #1"; // ERROR Consider removing 'static.
+
+const VAR_TUPLE: &(u8, u8) = &(1, 2); // ERROR Consider removing 'static.
+
+const VAR_ARRAY: &[u8; 1] = b"T"; // ERROR Consider removing 'static.
+
+static STATIC_VAR_ONE: &str = "Test static #1"; // ERROR Consider removing 'static.
+
+static STATIC_VAR_TWO: &str = "Test static #2"; // This line should not raise a warning.
+
+static STATIC_VAR_THREE: &[&str] = &["one", "two"]; // ERROR Consider removing 'static
+
+static STATIC_VAR_SIX: &u8 = &5;
+
+static STATIC_VAR_HEIGHT: &Foo = &Foo {};
+
+static STATIC_VAR_SLICE: &[u8] = b"Test static #3"; // ERROR Consider removing 'static.
+
+static STATIC_VAR_TUPLE: &(u8, u8) = &(1, 2); // ERROR Consider removing 'static.
+
+static STATIC_VAR_ARRAY: &[u8; 1] = b"T"; // ERROR Consider removing 'static.
+
+fn main() {
+ let false_positive: &'static str = "test";
+}
+
+trait Bar {
+ const TRAIT_VAR: &'static str;
+}
+
+impl Foo {
+ const IMPL_VAR: &'static str = "var";
+}
+
+impl Bar for Foo {
+ const TRAIT_VAR: &'static str = "foo";
+}
diff --git a/src/tools/clippy/tests/ui/redundant_static_lifetimes.rs b/src/tools/clippy/tests/ui/redundant_static_lifetimes.rs
new file mode 100644
index 000000000..f2f0f7865
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_static_lifetimes.rs
@@ -0,0 +1,56 @@
+// run-rustfix
+
+#![allow(unused)]
+
+#[derive(Debug)]
+struct Foo;
+
+const VAR_ONE: &'static str = "Test constant #1"; // ERROR Consider removing 'static.
+
+const VAR_TWO: &str = "Test constant #2"; // This line should not raise a warning.
+
+const VAR_THREE: &[&'static str] = &["one", "two"]; // ERROR Consider removing 'static
+
+const VAR_FOUR: (&str, (&str, &'static str), &'static str) = ("on", ("th", "th"), "on"); // ERROR Consider removing 'static
+
+const VAR_SIX: &'static u8 = &5;
+
+const VAR_HEIGHT: &'static Foo = &Foo {};
+
+const VAR_SLICE: &'static [u8] = b"Test constant #1"; // ERROR Consider removing 'static.
+
+const VAR_TUPLE: &'static (u8, u8) = &(1, 2); // ERROR Consider removing 'static.
+
+const VAR_ARRAY: &'static [u8; 1] = b"T"; // ERROR Consider removing 'static.
+
+static STATIC_VAR_ONE: &'static str = "Test static #1"; // ERROR Consider removing 'static.
+
+static STATIC_VAR_TWO: &str = "Test static #2"; // This line should not raise a warning.
+
+static STATIC_VAR_THREE: &[&'static str] = &["one", "two"]; // ERROR Consider removing 'static
+
+static STATIC_VAR_SIX: &'static u8 = &5;
+
+static STATIC_VAR_HEIGHT: &'static Foo = &Foo {};
+
+static STATIC_VAR_SLICE: &'static [u8] = b"Test static #3"; // ERROR Consider removing 'static.
+
+static STATIC_VAR_TUPLE: &'static (u8, u8) = &(1, 2); // ERROR Consider removing 'static.
+
+static STATIC_VAR_ARRAY: &'static [u8; 1] = b"T"; // ERROR Consider removing 'static.
+
+fn main() {
+ let false_positive: &'static str = "test";
+}
+
+trait Bar {
+ const TRAIT_VAR: &'static str;
+}
+
+impl Foo {
+ const IMPL_VAR: &'static str = "var";
+}
+
+impl Bar for Foo {
+ const TRAIT_VAR: &'static str = "foo";
+}
diff --git a/src/tools/clippy/tests/ui/redundant_static_lifetimes.stderr b/src/tools/clippy/tests/ui/redundant_static_lifetimes.stderr
new file mode 100644
index 000000000..649831f9c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_static_lifetimes.stderr
@@ -0,0 +1,100 @@
+error: constants have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes.rs:8:17
+ |
+LL | const VAR_ONE: &'static str = "Test constant #1"; // ERROR Consider removing 'static.
+ | -^^^^^^^---- help: consider removing `'static`: `&str`
+ |
+ = note: `-D clippy::redundant-static-lifetimes` implied by `-D warnings`
+
+error: constants have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes.rs:12:21
+ |
+LL | const VAR_THREE: &[&'static str] = &["one", "two"]; // ERROR Consider removing 'static
+ | -^^^^^^^---- help: consider removing `'static`: `&str`
+
+error: constants have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes.rs:14:32
+ |
+LL | const VAR_FOUR: (&str, (&str, &'static str), &'static str) = ("on", ("th", "th"), "on"); // ERROR Consider removing 'static
+ | -^^^^^^^---- help: consider removing `'static`: `&str`
+
+error: constants have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes.rs:14:47
+ |
+LL | const VAR_FOUR: (&str, (&str, &'static str), &'static str) = ("on", ("th", "th"), "on"); // ERROR Consider removing 'static
+ | -^^^^^^^---- help: consider removing `'static`: `&str`
+
+error: constants have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes.rs:16:17
+ |
+LL | const VAR_SIX: &'static u8 = &5;
+ | -^^^^^^^--- help: consider removing `'static`: `&u8`
+
+error: constants have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes.rs:18:20
+ |
+LL | const VAR_HEIGHT: &'static Foo = &Foo {};
+ | -^^^^^^^---- help: consider removing `'static`: `&Foo`
+
+error: constants have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes.rs:20:19
+ |
+LL | const VAR_SLICE: &'static [u8] = b"Test constant #1"; // ERROR Consider removing 'static.
+ | -^^^^^^^----- help: consider removing `'static`: `&[u8]`
+
+error: constants have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes.rs:22:19
+ |
+LL | const VAR_TUPLE: &'static (u8, u8) = &(1, 2); // ERROR Consider removing 'static.
+ | -^^^^^^^--------- help: consider removing `'static`: `&(u8, u8)`
+
+error: constants have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes.rs:24:19
+ |
+LL | const VAR_ARRAY: &'static [u8; 1] = b"T"; // ERROR Consider removing 'static.
+ | -^^^^^^^-------- help: consider removing `'static`: `&[u8; 1]`
+
+error: statics have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes.rs:26:25
+ |
+LL | static STATIC_VAR_ONE: &'static str = "Test static #1"; // ERROR Consider removing 'static.
+ | -^^^^^^^---- help: consider removing `'static`: `&str`
+
+error: statics have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes.rs:30:29
+ |
+LL | static STATIC_VAR_THREE: &[&'static str] = &["one", "two"]; // ERROR Consider removing 'static
+ | -^^^^^^^---- help: consider removing `'static`: `&str`
+
+error: statics have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes.rs:32:25
+ |
+LL | static STATIC_VAR_SIX: &'static u8 = &5;
+ | -^^^^^^^--- help: consider removing `'static`: `&u8`
+
+error: statics have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes.rs:34:28
+ |
+LL | static STATIC_VAR_HEIGHT: &'static Foo = &Foo {};
+ | -^^^^^^^---- help: consider removing `'static`: `&Foo`
+
+error: statics have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes.rs:36:27
+ |
+LL | static STATIC_VAR_SLICE: &'static [u8] = b"Test static #3"; // ERROR Consider removing 'static.
+ | -^^^^^^^----- help: consider removing `'static`: `&[u8]`
+
+error: statics have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes.rs:38:27
+ |
+LL | static STATIC_VAR_TUPLE: &'static (u8, u8) = &(1, 2); // ERROR Consider removing 'static.
+ | -^^^^^^^--------- help: consider removing `'static`: `&(u8, u8)`
+
+error: statics have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes.rs:40:27
+ |
+LL | static STATIC_VAR_ARRAY: &'static [u8; 1] = b"T"; // ERROR Consider removing 'static.
+ | -^^^^^^^-------- help: consider removing `'static`: `&[u8; 1]`
+
+error: aborting due to 16 previous errors
+
diff --git a/src/tools/clippy/tests/ui/redundant_static_lifetimes_multiple.rs b/src/tools/clippy/tests/ui/redundant_static_lifetimes_multiple.rs
new file mode 100644
index 000000000..f57dd58e2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_static_lifetimes_multiple.rs
@@ -0,0 +1,13 @@
+// these are rustfixable, but run-rustfix tests cannot handle them
+
+const VAR_FIVE: &'static [&[&'static str]] = &[&["test"], &["other one"]]; // ERROR Consider removing 'static
+
+const VAR_SEVEN: &[&(&str, &'static [&'static str])] = &[&("one", &["other one"])];
+
+static STATIC_VAR_FOUR: (&str, (&str, &'static str), &'static str) = ("on", ("th", "th"), "on"); // ERROR Consider removing 'static
+
+static STATIC_VAR_FIVE: &'static [&[&'static str]] = &[&["test"], &["other one"]]; // ERROR Consider removing 'static
+
+static STATIC_VAR_SEVEN: &[&(&str, &'static [&'static str])] = &[&("one", &["other one"])];
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/redundant_static_lifetimes_multiple.stderr b/src/tools/clippy/tests/ui/redundant_static_lifetimes_multiple.stderr
new file mode 100644
index 000000000..cc7e55a75
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_static_lifetimes_multiple.stderr
@@ -0,0 +1,64 @@
+error: constants have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes_multiple.rs:3:18
+ |
+LL | const VAR_FIVE: &'static [&[&'static str]] = &[&["test"], &["other one"]]; // ERROR Consider removing 'static
+ | -^^^^^^^------------------ help: consider removing `'static`: `&[&[&'static str]]`
+ |
+ = note: `-D clippy::redundant-static-lifetimes` implied by `-D warnings`
+
+error: constants have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes_multiple.rs:3:30
+ |
+LL | const VAR_FIVE: &'static [&[&'static str]] = &[&["test"], &["other one"]]; // ERROR Consider removing 'static
+ | -^^^^^^^---- help: consider removing `'static`: `&str`
+
+error: constants have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes_multiple.rs:5:29
+ |
+LL | const VAR_SEVEN: &[&(&str, &'static [&'static str])] = &[&("one", &["other one"])];
+ | -^^^^^^^--------------- help: consider removing `'static`: `&[&'static str]`
+
+error: constants have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes_multiple.rs:5:39
+ |
+LL | const VAR_SEVEN: &[&(&str, &'static [&'static str])] = &[&("one", &["other one"])];
+ | -^^^^^^^---- help: consider removing `'static`: `&str`
+
+error: statics have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes_multiple.rs:7:40
+ |
+LL | static STATIC_VAR_FOUR: (&str, (&str, &'static str), &'static str) = ("on", ("th", "th"), "on"); // ERROR Consider removing 'static
+ | -^^^^^^^---- help: consider removing `'static`: `&str`
+
+error: statics have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes_multiple.rs:7:55
+ |
+LL | static STATIC_VAR_FOUR: (&str, (&str, &'static str), &'static str) = ("on", ("th", "th"), "on"); // ERROR Consider removing 'static
+ | -^^^^^^^---- help: consider removing `'static`: `&str`
+
+error: statics have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes_multiple.rs:9:26
+ |
+LL | static STATIC_VAR_FIVE: &'static [&[&'static str]] = &[&["test"], &["other one"]]; // ERROR Consider removing 'static
+ | -^^^^^^^------------------ help: consider removing `'static`: `&[&[&'static str]]`
+
+error: statics have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes_multiple.rs:9:38
+ |
+LL | static STATIC_VAR_FIVE: &'static [&[&'static str]] = &[&["test"], &["other one"]]; // ERROR Consider removing 'static
+ | -^^^^^^^---- help: consider removing `'static`: `&str`
+
+error: statics have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes_multiple.rs:11:37
+ |
+LL | static STATIC_VAR_SEVEN: &[&(&str, &'static [&'static str])] = &[&("one", &["other one"])];
+ | -^^^^^^^--------------- help: consider removing `'static`: `&[&'static str]`
+
+error: statics have by default a `'static` lifetime
+ --> $DIR/redundant_static_lifetimes_multiple.rs:11:47
+ |
+LL | static STATIC_VAR_SEVEN: &[&(&str, &'static [&'static str])] = &[&("one", &["other one"])];
+ | -^^^^^^^---- help: consider removing `'static`: `&str`
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/ref_binding_to_reference.rs b/src/tools/clippy/tests/ui/ref_binding_to_reference.rs
new file mode 100644
index 000000000..c8d0e56b1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ref_binding_to_reference.rs
@@ -0,0 +1,85 @@
+// FIXME: run-rustfix waiting on multi-span suggestions
+
+#![feature(lint_reasons)]
+#![warn(clippy::ref_binding_to_reference)]
+#![allow(clippy::needless_borrowed_reference, clippy::explicit_auto_deref)]
+
+fn f1(_: &str) {}
+macro_rules! m2 {
+ ($e:expr) => {
+ f1(*$e)
+ };
+}
+macro_rules! m3 {
+ ($i:ident) => {
+ Some(ref $i)
+ };
+}
+
+#[allow(dead_code)]
+fn main() {
+ let x = String::new();
+
+ // Ok, the pattern is from a macro
+ let _: &&String = match Some(&x) {
+ m3!(x) => x,
+ None => return,
+ };
+
+ // Err, reference to a &String
+ let _: &&String = match Some(&x) {
+ Some(ref x) => x,
+ None => return,
+ };
+
+ // Err, reference to a &String
+ let _: &&String = match Some(&x) {
+ Some(ref x) => {
+ f1(x);
+ f1(*x);
+ x
+ },
+ None => return,
+ };
+
+ // Err, reference to a &String
+ match Some(&x) {
+ Some(ref x) => m2!(x),
+ None => return,
+ }
+
+ // Err, reference to a &String
+ let _ = |&ref x: &&String| {
+ let _: &&String = x;
+ };
+}
+
+// Err, reference to a &String
+fn f2<'a>(&ref x: &&'a String) -> &'a String {
+ let _: &&String = x;
+ *x
+}
+
+trait T1 {
+ // Err, reference to a &String
+ fn f(&ref x: &&String) {
+ let _: &&String = x;
+ }
+}
+
+struct S;
+impl T1 for S {
+ // Err, reference to a &String
+ fn f(&ref x: &&String) {
+ let _: &&String = x;
+ }
+}
+
+fn check_expect_suppression() {
+ let x = String::new();
+ #[expect(clippy::ref_binding_to_reference)]
+ let _: &&String = match Some(&x) {
+ Some(ref x) => x,
+ None => return,
+ };
+}
diff --git a/src/tools/clippy/tests/ui/ref_binding_to_reference.stderr b/src/tools/clippy/tests/ui/ref_binding_to_reference.stderr
new file mode 100644
index 000000000..eb36cd516
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ref_binding_to_reference.stderr
@@ -0,0 +1,88 @@
+error: this pattern creates a reference to a reference
+ --> $DIR/ref_binding_to_reference.rs:31:14
+ |
+LL | Some(ref x) => x,
+ | ^^^^^
+ |
+ = note: `-D clippy::ref-binding-to-reference` implied by `-D warnings`
+help: try this
+ |
+LL | Some(x) => &x,
+ | ~ ~~
+
+error: this pattern creates a reference to a reference
+ --> $DIR/ref_binding_to_reference.rs:37:14
+ |
+LL | Some(ref x) => {
+ | ^^^^^
+ |
+help: try this
+ |
+LL ~ Some(x) => {
+LL | f1(x);
+LL ~ f1(x);
+LL ~ &x
+ |
+
+error: this pattern creates a reference to a reference
+ --> $DIR/ref_binding_to_reference.rs:47:14
+ |
+LL | Some(ref x) => m2!(x),
+ | ^^^^^
+ |
+help: try this
+ |
+LL | Some(x) => m2!(&x),
+ | ~ ~~
+
+error: this pattern creates a reference to a reference
+ --> $DIR/ref_binding_to_reference.rs:52:15
+ |
+LL | let _ = |&ref x: &&String| {
+ | ^^^^^
+ |
+help: try this
+ |
+LL ~ let _ = |&x: &&String| {
+LL ~ let _: &&String = &x;
+ |
+
+error: this pattern creates a reference to a reference
+ --> $DIR/ref_binding_to_reference.rs:58:12
+ |
+LL | fn f2<'a>(&ref x: &&'a String) -> &'a String {
+ | ^^^^^
+ |
+help: try this
+ |
+LL ~ fn f2<'a>(&x: &&'a String) -> &'a String {
+LL ~ let _: &&String = &x;
+LL ~ x
+ |
+
+error: this pattern creates a reference to a reference
+ --> $DIR/ref_binding_to_reference.rs:65:11
+ |
+LL | fn f(&ref x: &&String) {
+ | ^^^^^
+ |
+help: try this
+ |
+LL ~ fn f(&x: &&String) {
+LL ~ let _: &&String = &x;
+ |
+
+error: this pattern creates a reference to a reference
+ --> $DIR/ref_binding_to_reference.rs:73:11
+ |
+LL | fn f(&ref x: &&String) {
+ | ^^^^^
+ |
+help: try this
+ |
+LL ~ fn f(&x: &&String) {
+LL ~ let _: &&String = &x;
+ |
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/ref_option_ref.rs b/src/tools/clippy/tests/ui/ref_option_ref.rs
new file mode 100644
index 000000000..2df45c927
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ref_option_ref.rs
@@ -0,0 +1,47 @@
+#![allow(unused)]
+#![warn(clippy::ref_option_ref)]
+
+// This lint is not tagged as run-rustfix because automatically
+// changing the type of a variable would also means changing
+// all usages of this variable to match and This is not handled
+// by this lint.
+
+static THRESHOLD: i32 = 10;
+static REF_THRESHOLD: &Option<&i32> = &Some(&THRESHOLD);
+const CONST_THRESHOLD: &i32 = &10;
+const REF_CONST: &Option<&i32> = &Some(CONST_THRESHOLD);
+
+type RefOptRefU32<'a> = &'a Option<&'a u32>;
+type RefOptRef<'a, T> = &'a Option<&'a T>;
+
+fn foo(data: &Option<&u32>) {}
+
+fn bar(data: &u32) -> &Option<&u32> {
+ &None
+}
+
+struct StructRef<'a> {
+ data: &'a Option<&'a u32>,
+}
+
+struct StructTupleRef<'a>(u32, &'a Option<&'a u32>);
+
+enum EnumRef<'a> {
+ Variant1(u32),
+ Variant2(&'a Option<&'a u32>),
+}
+
+trait RefOptTrait {
+ type A;
+ fn foo(&self, _: Self::A);
+}
+
+impl RefOptTrait for u32 {
+ type A = &'static Option<&'static Self>;
+
+ fn foo(&self, _: Self::A) {}
+}
+
+fn main() {
+ let x: &Option<&u32> = &None;
+}
diff --git a/src/tools/clippy/tests/ui/ref_option_ref.stderr b/src/tools/clippy/tests/ui/ref_option_ref.stderr
new file mode 100644
index 000000000..b61334758
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ref_option_ref.stderr
@@ -0,0 +1,70 @@
+error: since `&` implements the `Copy` trait, `&Option<&T>` can be simplified to `Option<&T>`
+ --> $DIR/ref_option_ref.rs:10:23
+ |
+LL | static REF_THRESHOLD: &Option<&i32> = &Some(&THRESHOLD);
+ | ^^^^^^^^^^^^^ help: try: `Option<&i32>`
+ |
+ = note: `-D clippy::ref-option-ref` implied by `-D warnings`
+
+error: since `&` implements the `Copy` trait, `&Option<&T>` can be simplified to `Option<&T>`
+ --> $DIR/ref_option_ref.rs:12:18
+ |
+LL | const REF_CONST: &Option<&i32> = &Some(CONST_THRESHOLD);
+ | ^^^^^^^^^^^^^ help: try: `Option<&i32>`
+
+error: since `&` implements the `Copy` trait, `&Option<&T>` can be simplified to `Option<&T>`
+ --> $DIR/ref_option_ref.rs:14:25
+ |
+LL | type RefOptRefU32<'a> = &'a Option<&'a u32>;
+ | ^^^^^^^^^^^^^^^^^^^ help: try: `Option<&'a u32>`
+
+error: since `&` implements the `Copy` trait, `&Option<&T>` can be simplified to `Option<&T>`
+ --> $DIR/ref_option_ref.rs:15:25
+ |
+LL | type RefOptRef<'a, T> = &'a Option<&'a T>;
+ | ^^^^^^^^^^^^^^^^^ help: try: `Option<&'a T>`
+
+error: since `&` implements the `Copy` trait, `&Option<&T>` can be simplified to `Option<&T>`
+ --> $DIR/ref_option_ref.rs:17:14
+ |
+LL | fn foo(data: &Option<&u32>) {}
+ | ^^^^^^^^^^^^^ help: try: `Option<&u32>`
+
+error: since `&` implements the `Copy` trait, `&Option<&T>` can be simplified to `Option<&T>`
+ --> $DIR/ref_option_ref.rs:19:23
+ |
+LL | fn bar(data: &u32) -> &Option<&u32> {
+ | ^^^^^^^^^^^^^ help: try: `Option<&u32>`
+
+error: since `&` implements the `Copy` trait, `&Option<&T>` can be simplified to `Option<&T>`
+ --> $DIR/ref_option_ref.rs:24:11
+ |
+LL | data: &'a Option<&'a u32>,
+ | ^^^^^^^^^^^^^^^^^^^ help: try: `Option<&'a u32>`
+
+error: since `&` implements the `Copy` trait, `&Option<&T>` can be simplified to `Option<&T>`
+ --> $DIR/ref_option_ref.rs:27:32
+ |
+LL | struct StructTupleRef<'a>(u32, &'a Option<&'a u32>);
+ | ^^^^^^^^^^^^^^^^^^^ help: try: `Option<&'a u32>`
+
+error: since `&` implements the `Copy` trait, `&Option<&T>` can be simplified to `Option<&T>`
+ --> $DIR/ref_option_ref.rs:31:14
+ |
+LL | Variant2(&'a Option<&'a u32>),
+ | ^^^^^^^^^^^^^^^^^^^ help: try: `Option<&'a u32>`
+
+error: since `&` implements the `Copy` trait, `&Option<&T>` can be simplified to `Option<&T>`
+ --> $DIR/ref_option_ref.rs:40:14
+ |
+LL | type A = &'static Option<&'static Self>;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Option<&'static Self>`
+
+error: since `&` implements the `Copy` trait, `&Option<&T>` can be simplified to `Option<&T>`
+ --> $DIR/ref_option_ref.rs:46:12
+ |
+LL | let x: &Option<&u32> = &None;
+ | ^^^^^^^^^^^^^ help: try: `Option<&u32>`
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/regex.rs b/src/tools/clippy/tests/ui/regex.rs
new file mode 100644
index 000000000..f7f3b195c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/regex.rs
@@ -0,0 +1,82 @@
+#![allow(unused)]
+#![warn(clippy::invalid_regex, clippy::trivial_regex)]
+
+extern crate regex;
+
+use regex::bytes::{Regex as BRegex, RegexBuilder as BRegexBuilder, RegexSet as BRegexSet};
+use regex::{Regex, RegexBuilder, RegexSet};
+
+const OPENING_PAREN: &str = "(";
+const NOT_A_REAL_REGEX: &str = "foobar";
+
+fn syntax_error() {
+ let pipe_in_wrong_position = Regex::new("|");
+ let pipe_in_wrong_position_builder = RegexBuilder::new("|");
+ let wrong_char_ranice = Regex::new("[z-a]");
+ let some_unicode = Regex::new("[é-è]");
+
+ let some_regex = Regex::new(OPENING_PAREN);
+
+ let binary_pipe_in_wrong_position = BRegex::new("|");
+ let some_binary_regex = BRegex::new(OPENING_PAREN);
+ let some_binary_regex_builder = BRegexBuilder::new(OPENING_PAREN);
+
+ let closing_paren = ")";
+ let not_linted = Regex::new(closing_paren);
+
+ let set = RegexSet::new(&[r"[a-z]+@[a-z]+\.(com|org|net)", r"[a-z]+\.(com|org|net)"]);
+ let bset = BRegexSet::new(&[
+ r"[a-z]+@[a-z]+\.(com|org|net)",
+ r"[a-z]+\.(com|org|net)",
+ r".", // regression test
+ ]);
+
+ let set_error = RegexSet::new(&[OPENING_PAREN, r"[a-z]+\.(com|org|net)"]);
+ let bset_error = BRegexSet::new(&[OPENING_PAREN, r"[a-z]+\.(com|org|net)"]);
+
+ let raw_string_error = Regex::new(r"[...\/...]");
+ let raw_string_error = Regex::new(r#"[...\/...]"#);
+}
+
+fn trivial_regex() {
+ let trivial_eq = Regex::new("^foobar$");
+
+ let trivial_eq_builder = RegexBuilder::new("^foobar$");
+
+ let trivial_starts_with = Regex::new("^foobar");
+
+ let trivial_ends_with = Regex::new("foobar$");
+
+ let trivial_contains = Regex::new("foobar");
+
+ let trivial_contains = Regex::new(NOT_A_REAL_REGEX);
+
+ let trivial_backslash = Regex::new("a\\.b");
+
+ // unlikely corner cases
+ let trivial_empty = Regex::new("");
+
+ let trivial_empty = Regex::new("^");
+
+ let trivial_empty = Regex::new("^$");
+
+ let binary_trivial_empty = BRegex::new("^$");
+
+ // non-trivial regexes
+ let non_trivial_dot = Regex::new("a.b");
+ let non_trivial_dot_builder = RegexBuilder::new("a.b");
+ let non_trivial_eq = Regex::new("^foo|bar$");
+ let non_trivial_starts_with = Regex::new("^foo|bar");
+ let non_trivial_ends_with = Regex::new("^foo|bar");
+ let non_trivial_ends_with = Regex::new("foo|bar");
+ let non_trivial_binary = BRegex::new("foo|bar");
+ let non_trivial_binary_builder = BRegexBuilder::new("foo|bar");
+
+ // #6005: unicode classes in bytes::Regex
+ let a_byte_of_unicode = BRegex::new(r"\p{C}");
+}
+
+fn main() {
+ syntax_error();
+ trivial_regex();
+}
diff --git a/src/tools/clippy/tests/ui/regex.stderr b/src/tools/clippy/tests/ui/regex.stderr
new file mode 100644
index 000000000..1394a9b63
--- /dev/null
+++ b/src/tools/clippy/tests/ui/regex.stderr
@@ -0,0 +1,171 @@
+error: trivial regex
+ --> $DIR/regex.rs:13:45
+ |
+LL | let pipe_in_wrong_position = Regex::new("|");
+ | ^^^
+ |
+ = note: `-D clippy::trivial-regex` implied by `-D warnings`
+ = help: the regex is unlikely to be useful as it is
+
+error: trivial regex
+ --> $DIR/regex.rs:14:60
+ |
+LL | let pipe_in_wrong_position_builder = RegexBuilder::new("|");
+ | ^^^
+ |
+ = help: the regex is unlikely to be useful as it is
+
+error: regex syntax error: invalid character class range, the start must be <= the end
+ --> $DIR/regex.rs:15:42
+ |
+LL | let wrong_char_ranice = Regex::new("[z-a]");
+ | ^^^
+ |
+ = note: `-D clippy::invalid-regex` implied by `-D warnings`
+
+error: regex syntax error: invalid character class range, the start must be <= the end
+ --> $DIR/regex.rs:16:37
+ |
+LL | let some_unicode = Regex::new("[é-è]");
+ | ^^^
+
+error: regex syntax error on position 0: unclosed group
+ --> $DIR/regex.rs:18:33
+ |
+LL | let some_regex = Regex::new(OPENING_PAREN);
+ | ^^^^^^^^^^^^^
+
+error: trivial regex
+ --> $DIR/regex.rs:20:53
+ |
+LL | let binary_pipe_in_wrong_position = BRegex::new("|");
+ | ^^^
+ |
+ = help: the regex is unlikely to be useful as it is
+
+error: regex syntax error on position 0: unclosed group
+ --> $DIR/regex.rs:21:41
+ |
+LL | let some_binary_regex = BRegex::new(OPENING_PAREN);
+ | ^^^^^^^^^^^^^
+
+error: regex syntax error on position 0: unclosed group
+ --> $DIR/regex.rs:22:56
+ |
+LL | let some_binary_regex_builder = BRegexBuilder::new(OPENING_PAREN);
+ | ^^^^^^^^^^^^^
+
+error: regex syntax error on position 0: unclosed group
+ --> $DIR/regex.rs:34:37
+ |
+LL | let set_error = RegexSet::new(&[OPENING_PAREN, r"[a-z]+/.(com|org|net)"]);
+ | ^^^^^^^^^^^^^
+
+error: regex syntax error on position 0: unclosed group
+ --> $DIR/regex.rs:35:39
+ |
+LL | let bset_error = BRegexSet::new(&[OPENING_PAREN, r"[a-z]+/.(com|org|net)"]);
+ | ^^^^^^^^^^^^^
+
+error: regex syntax error: unrecognized escape sequence
+ --> $DIR/regex.rs:37:45
+ |
+LL | let raw_string_error = Regex::new(r"[...//...]");
+ | ^^
+
+error: regex syntax error: unrecognized escape sequence
+ --> $DIR/regex.rs:38:46
+ |
+LL | let raw_string_error = Regex::new(r#"[...//...]"#);
+ | ^^
+
+error: trivial regex
+ --> $DIR/regex.rs:42:33
+ |
+LL | let trivial_eq = Regex::new("^foobar$");
+ | ^^^^^^^^^^
+ |
+ = help: consider using `==` on `str`s
+
+error: trivial regex
+ --> $DIR/regex.rs:44:48
+ |
+LL | let trivial_eq_builder = RegexBuilder::new("^foobar$");
+ | ^^^^^^^^^^
+ |
+ = help: consider using `==` on `str`s
+
+error: trivial regex
+ --> $DIR/regex.rs:46:42
+ |
+LL | let trivial_starts_with = Regex::new("^foobar");
+ | ^^^^^^^^^
+ |
+ = help: consider using `str::starts_with`
+
+error: trivial regex
+ --> $DIR/regex.rs:48:40
+ |
+LL | let trivial_ends_with = Regex::new("foobar$");
+ | ^^^^^^^^^
+ |
+ = help: consider using `str::ends_with`
+
+error: trivial regex
+ --> $DIR/regex.rs:50:39
+ |
+LL | let trivial_contains = Regex::new("foobar");
+ | ^^^^^^^^
+ |
+ = help: consider using `str::contains`
+
+error: trivial regex
+ --> $DIR/regex.rs:52:39
+ |
+LL | let trivial_contains = Regex::new(NOT_A_REAL_REGEX);
+ | ^^^^^^^^^^^^^^^^
+ |
+ = help: consider using `str::contains`
+
+error: trivial regex
+ --> $DIR/regex.rs:54:40
+ |
+LL | let trivial_backslash = Regex::new("a/.b");
+ | ^^^^^^^
+ |
+ = help: consider using `str::contains`
+
+error: trivial regex
+ --> $DIR/regex.rs:57:36
+ |
+LL | let trivial_empty = Regex::new("");
+ | ^^
+ |
+ = help: the regex is unlikely to be useful as it is
+
+error: trivial regex
+ --> $DIR/regex.rs:59:36
+ |
+LL | let trivial_empty = Regex::new("^");
+ | ^^^
+ |
+ = help: the regex is unlikely to be useful as it is
+
+error: trivial regex
+ --> $DIR/regex.rs:61:36
+ |
+LL | let trivial_empty = Regex::new("^$");
+ | ^^^^
+ |
+ = help: consider using `str::is_empty`
+
+error: trivial regex
+ --> $DIR/regex.rs:63:44
+ |
+LL | let binary_trivial_empty = BRegex::new("^$");
+ | ^^^^
+ |
+ = help: consider using `str::is_empty`
+
+error: aborting due to 23 previous errors
+
diff --git a/src/tools/clippy/tests/ui/rename.fixed b/src/tools/clippy/tests/ui/rename.fixed
new file mode 100644
index 000000000..53288be94
--- /dev/null
+++ b/src/tools/clippy/tests/ui/rename.fixed
@@ -0,0 +1,72 @@
+// This file was generated by `cargo dev update_lints`.
+// Use that command to update this file and do not edit by hand.
+// Manual edits will be overwritten.
+
+// run-rustfix
+
+#![allow(clippy::blocks_in_if_conditions)]
+#![allow(clippy::box_collection)]
+#![allow(clippy::redundant_static_lifetimes)]
+#![allow(clippy::cognitive_complexity)]
+#![allow(clippy::disallowed_methods)]
+#![allow(clippy::disallowed_types)]
+#![allow(clippy::mixed_read_write_in_expression)]
+#![allow(clippy::for_loops_over_fallibles)]
+#![allow(clippy::useless_conversion)]
+#![allow(clippy::match_result_ok)]
+#![allow(clippy::new_without_default)]
+#![allow(clippy::bind_instead_of_map)]
+#![allow(clippy::expect_used)]
+#![allow(clippy::map_unwrap_or)]
+#![allow(clippy::unwrap_used)]
+#![allow(clippy::needless_borrow)]
+#![allow(clippy::single_char_add_str)]
+#![allow(clippy::module_name_repetitions)]
+#![allow(clippy::recursive_format_impl)]
+#![allow(clippy::invisible_characters)]
+#![allow(drop_bounds)]
+#![allow(array_into_iter)]
+#![allow(invalid_atomic_ordering)]
+#![allow(invalid_value)]
+#![allow(enum_intrinsics_non_enums)]
+#![allow(non_fmt_panics)]
+#![allow(temporary_cstring_as_ptr)]
+#![allow(unknown_lints)]
+#![allow(unused_labels)]
+#![warn(clippy::blocks_in_if_conditions)]
+#![warn(clippy::blocks_in_if_conditions)]
+#![warn(clippy::box_collection)]
+#![warn(clippy::redundant_static_lifetimes)]
+#![warn(clippy::cognitive_complexity)]
+#![warn(clippy::disallowed_methods)]
+#![warn(clippy::disallowed_types)]
+#![warn(clippy::mixed_read_write_in_expression)]
+#![warn(clippy::for_loops_over_fallibles)]
+#![warn(clippy::for_loops_over_fallibles)]
+#![warn(clippy::useless_conversion)]
+#![warn(clippy::match_result_ok)]
+#![warn(clippy::new_without_default)]
+#![warn(clippy::bind_instead_of_map)]
+#![warn(clippy::expect_used)]
+#![warn(clippy::map_unwrap_or)]
+#![warn(clippy::map_unwrap_or)]
+#![warn(clippy::unwrap_used)]
+#![warn(clippy::needless_borrow)]
+#![warn(clippy::expect_used)]
+#![warn(clippy::map_unwrap_or)]
+#![warn(clippy::unwrap_used)]
+#![warn(clippy::single_char_add_str)]
+#![warn(clippy::module_name_repetitions)]
+#![warn(clippy::recursive_format_impl)]
+#![warn(clippy::invisible_characters)]
+#![warn(drop_bounds)]
+#![warn(array_into_iter)]
+#![warn(invalid_atomic_ordering)]
+#![warn(invalid_value)]
+#![warn(enum_intrinsics_non_enums)]
+#![warn(non_fmt_panics)]
+#![warn(temporary_cstring_as_ptr)]
+#![warn(unknown_lints)]
+#![warn(unused_labels)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/rename.rs b/src/tools/clippy/tests/ui/rename.rs
new file mode 100644
index 000000000..539f34f84
--- /dev/null
+++ b/src/tools/clippy/tests/ui/rename.rs
@@ -0,0 +1,72 @@
+// This file was generated by `cargo dev update_lints`.
+// Use that command to update this file and do not edit by hand.
+// Manual edits will be overwritten.
+
+// run-rustfix
+
+#![allow(clippy::blocks_in_if_conditions)]
+#![allow(clippy::box_collection)]
+#![allow(clippy::redundant_static_lifetimes)]
+#![allow(clippy::cognitive_complexity)]
+#![allow(clippy::disallowed_methods)]
+#![allow(clippy::disallowed_types)]
+#![allow(clippy::mixed_read_write_in_expression)]
+#![allow(clippy::for_loops_over_fallibles)]
+#![allow(clippy::useless_conversion)]
+#![allow(clippy::match_result_ok)]
+#![allow(clippy::new_without_default)]
+#![allow(clippy::bind_instead_of_map)]
+#![allow(clippy::expect_used)]
+#![allow(clippy::map_unwrap_or)]
+#![allow(clippy::unwrap_used)]
+#![allow(clippy::needless_borrow)]
+#![allow(clippy::single_char_add_str)]
+#![allow(clippy::module_name_repetitions)]
+#![allow(clippy::recursive_format_impl)]
+#![allow(clippy::invisible_characters)]
+#![allow(drop_bounds)]
+#![allow(array_into_iter)]
+#![allow(invalid_atomic_ordering)]
+#![allow(invalid_value)]
+#![allow(enum_intrinsics_non_enums)]
+#![allow(non_fmt_panics)]
+#![allow(temporary_cstring_as_ptr)]
+#![allow(unknown_lints)]
+#![allow(unused_labels)]
+#![warn(clippy::block_in_if_condition_expr)]
+#![warn(clippy::block_in_if_condition_stmt)]
+#![warn(clippy::box_vec)]
+#![warn(clippy::const_static_lifetime)]
+#![warn(clippy::cyclomatic_complexity)]
+#![warn(clippy::disallowed_method)]
+#![warn(clippy::disallowed_type)]
+#![warn(clippy::eval_order_dependence)]
+#![warn(clippy::for_loop_over_option)]
+#![warn(clippy::for_loop_over_result)]
+#![warn(clippy::identity_conversion)]
+#![warn(clippy::if_let_some_result)]
+#![warn(clippy::new_without_default_derive)]
+#![warn(clippy::option_and_then_some)]
+#![warn(clippy::option_expect_used)]
+#![warn(clippy::option_map_unwrap_or)]
+#![warn(clippy::option_map_unwrap_or_else)]
+#![warn(clippy::option_unwrap_used)]
+#![warn(clippy::ref_in_deref)]
+#![warn(clippy::result_expect_used)]
+#![warn(clippy::result_map_unwrap_or_else)]
+#![warn(clippy::result_unwrap_used)]
+#![warn(clippy::single_char_push_str)]
+#![warn(clippy::stutter)]
+#![warn(clippy::to_string_in_display)]
+#![warn(clippy::zero_width_space)]
+#![warn(clippy::drop_bounds)]
+#![warn(clippy::into_iter_on_array)]
+#![warn(clippy::invalid_atomic_ordering)]
+#![warn(clippy::invalid_ref)]
+#![warn(clippy::mem_discriminant_non_enum)]
+#![warn(clippy::panic_params)]
+#![warn(clippy::temporary_cstring_as_ptr)]
+#![warn(clippy::unknown_clippy_lints)]
+#![warn(clippy::unused_label)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/rename.stderr b/src/tools/clippy/tests/ui/rename.stderr
new file mode 100644
index 000000000..8ea46b580
--- /dev/null
+++ b/src/tools/clippy/tests/ui/rename.stderr
@@ -0,0 +1,214 @@
+error: lint `clippy::block_in_if_condition_expr` has been renamed to `clippy::blocks_in_if_conditions`
+ --> $DIR/rename.rs:36:9
+ |
+LL | #![warn(clippy::block_in_if_condition_expr)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::blocks_in_if_conditions`
+ |
+ = note: `-D renamed-and-removed-lints` implied by `-D warnings`
+
+error: lint `clippy::block_in_if_condition_stmt` has been renamed to `clippy::blocks_in_if_conditions`
+ --> $DIR/rename.rs:37:9
+ |
+LL | #![warn(clippy::block_in_if_condition_stmt)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::blocks_in_if_conditions`
+
+error: lint `clippy::box_vec` has been renamed to `clippy::box_collection`
+ --> $DIR/rename.rs:38:9
+ |
+LL | #![warn(clippy::box_vec)]
+ | ^^^^^^^^^^^^^^^ help: use the new name: `clippy::box_collection`
+
+error: lint `clippy::const_static_lifetime` has been renamed to `clippy::redundant_static_lifetimes`
+ --> $DIR/rename.rs:39:9
+ |
+LL | #![warn(clippy::const_static_lifetime)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::redundant_static_lifetimes`
+
+error: lint `clippy::cyclomatic_complexity` has been renamed to `clippy::cognitive_complexity`
+ --> $DIR/rename.rs:40:9
+ |
+LL | #![warn(clippy::cyclomatic_complexity)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::cognitive_complexity`
+
+error: lint `clippy::disallowed_method` has been renamed to `clippy::disallowed_methods`
+ --> $DIR/rename.rs:41:9
+ |
+LL | #![warn(clippy::disallowed_method)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::disallowed_methods`
+
+error: lint `clippy::disallowed_type` has been renamed to `clippy::disallowed_types`
+ --> $DIR/rename.rs:42:9
+ |
+LL | #![warn(clippy::disallowed_type)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::disallowed_types`
+
+error: lint `clippy::eval_order_dependence` has been renamed to `clippy::mixed_read_write_in_expression`
+ --> $DIR/rename.rs:43:9
+ |
+LL | #![warn(clippy::eval_order_dependence)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::mixed_read_write_in_expression`
+
+error: lint `clippy::for_loop_over_option` has been renamed to `clippy::for_loops_over_fallibles`
+ --> $DIR/rename.rs:44:9
+ |
+LL | #![warn(clippy::for_loop_over_option)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::for_loops_over_fallibles`
+
+error: lint `clippy::for_loop_over_result` has been renamed to `clippy::for_loops_over_fallibles`
+ --> $DIR/rename.rs:45:9
+ |
+LL | #![warn(clippy::for_loop_over_result)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::for_loops_over_fallibles`
+
+error: lint `clippy::identity_conversion` has been renamed to `clippy::useless_conversion`
+ --> $DIR/rename.rs:46:9
+ |
+LL | #![warn(clippy::identity_conversion)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::useless_conversion`
+
+error: lint `clippy::if_let_some_result` has been renamed to `clippy::match_result_ok`
+ --> $DIR/rename.rs:47:9
+ |
+LL | #![warn(clippy::if_let_some_result)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::match_result_ok`
+
+error: lint `clippy::new_without_default_derive` has been renamed to `clippy::new_without_default`
+ --> $DIR/rename.rs:48:9
+ |
+LL | #![warn(clippy::new_without_default_derive)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::new_without_default`
+
+error: lint `clippy::option_and_then_some` has been renamed to `clippy::bind_instead_of_map`
+ --> $DIR/rename.rs:49:9
+ |
+LL | #![warn(clippy::option_and_then_some)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::bind_instead_of_map`
+
+error: lint `clippy::option_expect_used` has been renamed to `clippy::expect_used`
+ --> $DIR/rename.rs:50:9
+ |
+LL | #![warn(clippy::option_expect_used)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::expect_used`
+
+error: lint `clippy::option_map_unwrap_or` has been renamed to `clippy::map_unwrap_or`
+ --> $DIR/rename.rs:51:9
+ |
+LL | #![warn(clippy::option_map_unwrap_or)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::map_unwrap_or`
+
+error: lint `clippy::option_map_unwrap_or_else` has been renamed to `clippy::map_unwrap_or`
+ --> $DIR/rename.rs:52:9
+ |
+LL | #![warn(clippy::option_map_unwrap_or_else)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::map_unwrap_or`
+
+error: lint `clippy::option_unwrap_used` has been renamed to `clippy::unwrap_used`
+ --> $DIR/rename.rs:53:9
+ |
+LL | #![warn(clippy::option_unwrap_used)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::unwrap_used`
+
+error: lint `clippy::ref_in_deref` has been renamed to `clippy::needless_borrow`
+ --> $DIR/rename.rs:54:9
+ |
+LL | #![warn(clippy::ref_in_deref)]
+ | ^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::needless_borrow`
+
+error: lint `clippy::result_expect_used` has been renamed to `clippy::expect_used`
+ --> $DIR/rename.rs:55:9
+ |
+LL | #![warn(clippy::result_expect_used)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::expect_used`
+
+error: lint `clippy::result_map_unwrap_or_else` has been renamed to `clippy::map_unwrap_or`
+ --> $DIR/rename.rs:56:9
+ |
+LL | #![warn(clippy::result_map_unwrap_or_else)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::map_unwrap_or`
+
+error: lint `clippy::result_unwrap_used` has been renamed to `clippy::unwrap_used`
+ --> $DIR/rename.rs:57:9
+ |
+LL | #![warn(clippy::result_unwrap_used)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::unwrap_used`
+
+error: lint `clippy::single_char_push_str` has been renamed to `clippy::single_char_add_str`
+ --> $DIR/rename.rs:58:9
+ |
+LL | #![warn(clippy::single_char_push_str)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::single_char_add_str`
+
+error: lint `clippy::stutter` has been renamed to `clippy::module_name_repetitions`
+ --> $DIR/rename.rs:59:9
+ |
+LL | #![warn(clippy::stutter)]
+ | ^^^^^^^^^^^^^^^ help: use the new name: `clippy::module_name_repetitions`
+
+error: lint `clippy::to_string_in_display` has been renamed to `clippy::recursive_format_impl`
+ --> $DIR/rename.rs:60:9
+ |
+LL | #![warn(clippy::to_string_in_display)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::recursive_format_impl`
+
+error: lint `clippy::zero_width_space` has been renamed to `clippy::invisible_characters`
+ --> $DIR/rename.rs:61:9
+ |
+LL | #![warn(clippy::zero_width_space)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::invisible_characters`
+
+error: lint `clippy::drop_bounds` has been renamed to `drop_bounds`
+ --> $DIR/rename.rs:62:9
+ |
+LL | #![warn(clippy::drop_bounds)]
+ | ^^^^^^^^^^^^^^^^^^^ help: use the new name: `drop_bounds`
+
+error: lint `clippy::into_iter_on_array` has been renamed to `array_into_iter`
+ --> $DIR/rename.rs:63:9
+ |
+LL | #![warn(clippy::into_iter_on_array)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `array_into_iter`
+
+error: lint `clippy::invalid_atomic_ordering` has been renamed to `invalid_atomic_ordering`
+ --> $DIR/rename.rs:64:9
+ |
+LL | #![warn(clippy::invalid_atomic_ordering)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `invalid_atomic_ordering`
+
+error: lint `clippy::invalid_ref` has been renamed to `invalid_value`
+ --> $DIR/rename.rs:65:9
+ |
+LL | #![warn(clippy::invalid_ref)]
+ | ^^^^^^^^^^^^^^^^^^^ help: use the new name: `invalid_value`
+
+error: lint `clippy::mem_discriminant_non_enum` has been renamed to `enum_intrinsics_non_enums`
+ --> $DIR/rename.rs:66:9
+ |
+LL | #![warn(clippy::mem_discriminant_non_enum)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `enum_intrinsics_non_enums`
+
+error: lint `clippy::panic_params` has been renamed to `non_fmt_panics`
+ --> $DIR/rename.rs:67:9
+ |
+LL | #![warn(clippy::panic_params)]
+ | ^^^^^^^^^^^^^^^^^^^^ help: use the new name: `non_fmt_panics`
+
+error: lint `clippy::temporary_cstring_as_ptr` has been renamed to `temporary_cstring_as_ptr`
+ --> $DIR/rename.rs:68:9
+ |
+LL | #![warn(clippy::temporary_cstring_as_ptr)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `temporary_cstring_as_ptr`
+
+error: lint `clippy::unknown_clippy_lints` has been renamed to `unknown_lints`
+ --> $DIR/rename.rs:69:9
+ |
+LL | #![warn(clippy::unknown_clippy_lints)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `unknown_lints`
+
+error: lint `clippy::unused_label` has been renamed to `unused_labels`
+ --> $DIR/rename.rs:70:9
+ |
+LL | #![warn(clippy::unused_label)]
+ | ^^^^^^^^^^^^^^^^^^^^ help: use the new name: `unused_labels`
+
+error: aborting due to 35 previous errors
+
diff --git a/src/tools/clippy/tests/ui/renamed_builtin_attr.fixed b/src/tools/clippy/tests/ui/renamed_builtin_attr.fixed
new file mode 100644
index 000000000..cb91b841d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/renamed_builtin_attr.fixed
@@ -0,0 +1,4 @@
+// run-rustfix
+
+#[clippy::cognitive_complexity = "1"]
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/renamed_builtin_attr.rs b/src/tools/clippy/tests/ui/renamed_builtin_attr.rs
new file mode 100644
index 000000000..b3ce27580
--- /dev/null
+++ b/src/tools/clippy/tests/ui/renamed_builtin_attr.rs
@@ -0,0 +1,4 @@
+// run-rustfix
+
+#[clippy::cyclomatic_complexity = "1"]
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/renamed_builtin_attr.stderr b/src/tools/clippy/tests/ui/renamed_builtin_attr.stderr
new file mode 100644
index 000000000..880467624
--- /dev/null
+++ b/src/tools/clippy/tests/ui/renamed_builtin_attr.stderr
@@ -0,0 +1,8 @@
+error: usage of deprecated attribute
+ --> $DIR/renamed_builtin_attr.rs:3:11
+ |
+LL | #[clippy::cyclomatic_complexity = "1"]
+ | ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `cognitive_complexity`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/repeat_once.fixed b/src/tools/clippy/tests/ui/repeat_once.fixed
new file mode 100644
index 000000000..dc197e503
--- /dev/null
+++ b/src/tools/clippy/tests/ui/repeat_once.fixed
@@ -0,0 +1,16 @@
+// run-rustfix
+#![warn(clippy::repeat_once)]
+#[allow(unused, clippy::redundant_clone)]
+fn main() {
+ const N: usize = 1;
+ let s = "str";
+ let string = "String".to_string();
+ let slice = [1; 5];
+
+ let a = [1; 5].to_vec();
+ let b = slice.to_vec();
+ let c = "hello".to_string();
+ let d = "hi".to_string();
+ let e = s.to_string();
+ let f = string.clone();
+}
diff --git a/src/tools/clippy/tests/ui/repeat_once.rs b/src/tools/clippy/tests/ui/repeat_once.rs
new file mode 100644
index 000000000..0ec512711
--- /dev/null
+++ b/src/tools/clippy/tests/ui/repeat_once.rs
@@ -0,0 +1,16 @@
+// run-rustfix
+#![warn(clippy::repeat_once)]
+#[allow(unused, clippy::redundant_clone)]
+fn main() {
+ const N: usize = 1;
+ let s = "str";
+ let string = "String".to_string();
+ let slice = [1; 5];
+
+ let a = [1; 5].repeat(1);
+ let b = slice.repeat(1);
+ let c = "hello".repeat(N);
+ let d = "hi".repeat(1);
+ let e = s.repeat(1);
+ let f = string.repeat(1);
+}
diff --git a/src/tools/clippy/tests/ui/repeat_once.stderr b/src/tools/clippy/tests/ui/repeat_once.stderr
new file mode 100644
index 000000000..915eea3bf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/repeat_once.stderr
@@ -0,0 +1,40 @@
+error: calling `repeat(1)` on slice
+ --> $DIR/repeat_once.rs:10:13
+ |
+LL | let a = [1; 5].repeat(1);
+ | ^^^^^^^^^^^^^^^^ help: consider using `.to_vec()` instead: `[1; 5].to_vec()`
+ |
+ = note: `-D clippy::repeat-once` implied by `-D warnings`
+
+error: calling `repeat(1)` on slice
+ --> $DIR/repeat_once.rs:11:13
+ |
+LL | let b = slice.repeat(1);
+ | ^^^^^^^^^^^^^^^ help: consider using `.to_vec()` instead: `slice.to_vec()`
+
+error: calling `repeat(1)` on str
+ --> $DIR/repeat_once.rs:12:13
+ |
+LL | let c = "hello".repeat(N);
+ | ^^^^^^^^^^^^^^^^^ help: consider using `.to_string()` instead: `"hello".to_string()`
+
+error: calling `repeat(1)` on str
+ --> $DIR/repeat_once.rs:13:13
+ |
+LL | let d = "hi".repeat(1);
+ | ^^^^^^^^^^^^^^ help: consider using `.to_string()` instead: `"hi".to_string()`
+
+error: calling `repeat(1)` on str
+ --> $DIR/repeat_once.rs:14:13
+ |
+LL | let e = s.repeat(1);
+ | ^^^^^^^^^^^ help: consider using `.to_string()` instead: `s.to_string()`
+
+error: calling `repeat(1)` on a string literal
+ --> $DIR/repeat_once.rs:15:13
+ |
+LL | let f = string.repeat(1);
+ | ^^^^^^^^^^^^^^^^ help: consider using `.clone()` instead: `string.clone()`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/repl_uninit.rs b/src/tools/clippy/tests/ui/repl_uninit.rs
new file mode 100644
index 000000000..6c7e2b854
--- /dev/null
+++ b/src/tools/clippy/tests/ui/repl_uninit.rs
@@ -0,0 +1,41 @@
+#![allow(deprecated, invalid_value, clippy::uninit_assumed_init)]
+#![warn(clippy::mem_replace_with_uninit)]
+
+use std::mem;
+
+fn might_panic<X>(x: X) -> X {
+ // in practice this would be a possibly-panicky operation
+ x
+}
+
+fn main() {
+ let mut v = vec![0i32; 4];
+ // the following is UB if `might_panic` panics
+ unsafe {
+ let taken_v = mem::replace(&mut v, mem::uninitialized());
+ let new_v = might_panic(taken_v);
+ std::mem::forget(mem::replace(&mut v, new_v));
+ }
+
+ unsafe {
+ let taken_v = mem::replace(&mut v, mem::MaybeUninit::uninit().assume_init());
+ let new_v = might_panic(taken_v);
+ std::mem::forget(mem::replace(&mut v, new_v));
+ }
+
+ unsafe {
+ let taken_v = mem::replace(&mut v, mem::zeroed());
+ let new_v = might_panic(taken_v);
+ std::mem::forget(mem::replace(&mut v, new_v));
+ }
+
+ // this is silly but OK, because usize is a primitive type
+ let mut u: usize = 42;
+ let uref = &mut u;
+ let taken_u = unsafe { mem::replace(uref, mem::zeroed()) };
+ *uref = taken_u + 1;
+
+ // this is still not OK, because uninit
+ let taken_u = unsafe { mem::replace(uref, mem::uninitialized()) };
+ *uref = taken_u + 1;
+}
diff --git a/src/tools/clippy/tests/ui/repl_uninit.stderr b/src/tools/clippy/tests/ui/repl_uninit.stderr
new file mode 100644
index 000000000..09468eeae
--- /dev/null
+++ b/src/tools/clippy/tests/ui/repl_uninit.stderr
@@ -0,0 +1,30 @@
+error: replacing with `mem::uninitialized()`
+ --> $DIR/repl_uninit.rs:15:23
+ |
+LL | let taken_v = mem::replace(&mut v, mem::uninitialized());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::ptr::read(&mut v)`
+ |
+ = note: `-D clippy::mem-replace-with-uninit` implied by `-D warnings`
+
+error: replacing with `mem::MaybeUninit::uninit().assume_init()`
+ --> $DIR/repl_uninit.rs:21:23
+ |
+LL | let taken_v = mem::replace(&mut v, mem::MaybeUninit::uninit().assume_init());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::ptr::read(&mut v)`
+
+error: replacing with `mem::zeroed()`
+ --> $DIR/repl_uninit.rs:27:23
+ |
+LL | let taken_v = mem::replace(&mut v, mem::zeroed());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a default value or the `take_mut` crate instead
+
+error: replacing with `mem::uninitialized()`
+ --> $DIR/repl_uninit.rs:39:28
+ |
+LL | let taken_u = unsafe { mem::replace(uref, mem::uninitialized()) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::ptr::read(uref)`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/rest_pat_in_fully_bound_structs.rs b/src/tools/clippy/tests/ui/rest_pat_in_fully_bound_structs.rs
new file mode 100644
index 000000000..086331af6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/rest_pat_in_fully_bound_structs.rs
@@ -0,0 +1,57 @@
+#![warn(clippy::rest_pat_in_fully_bound_structs)]
+
+struct A {
+ a: i32,
+ b: i64,
+ c: &'static str,
+}
+
+macro_rules! foo {
+ ($param:expr) => {
+ match $param {
+ A { a: 0, b: 0, c: "", .. } => {},
+ _ => {},
+ }
+ };
+}
+
+fn main() {
+ let a_struct = A { a: 5, b: 42, c: "A" };
+
+ match a_struct {
+ A { a: 5, b: 42, c: "", .. } => {}, // Lint
+ A { a: 0, b: 0, c: "", .. } => {}, // Lint
+ _ => {},
+ }
+
+ match a_struct {
+ A { a: 5, b: 42, .. } => {},
+ A { a: 0, b: 0, c: "", .. } => {}, // Lint
+ _ => {},
+ }
+
+ // No lint
+ match a_struct {
+ A { a: 5, .. } => {},
+ A { a: 0, b: 0, .. } => {},
+ _ => {},
+ }
+
+ // No lint
+ foo!(a_struct);
+
+ #[non_exhaustive]
+ struct B {
+ a: u32,
+ b: u32,
+ c: u64,
+ }
+
+ let b_struct = B { a: 5, b: 42, c: 342 };
+
+ match b_struct {
+ B { a: 5, b: 42, .. } => {},
+ B { a: 0, b: 0, c: 128, .. } => {}, // No Lint
+ _ => {},
+ }
+}
diff --git a/src/tools/clippy/tests/ui/rest_pat_in_fully_bound_structs.stderr b/src/tools/clippy/tests/ui/rest_pat_in_fully_bound_structs.stderr
new file mode 100644
index 000000000..57ebd47f8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/rest_pat_in_fully_bound_structs.stderr
@@ -0,0 +1,27 @@
+error: unnecessary use of `..` pattern in struct binding. All fields were already bound
+ --> $DIR/rest_pat_in_fully_bound_structs.rs:22:9
+ |
+LL | A { a: 5, b: 42, c: "", .. } => {}, // Lint
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::rest-pat-in-fully-bound-structs` implied by `-D warnings`
+ = help: consider removing `..` from this binding
+
+error: unnecessary use of `..` pattern in struct binding. All fields were already bound
+ --> $DIR/rest_pat_in_fully_bound_structs.rs:23:9
+ |
+LL | A { a: 0, b: 0, c: "", .. } => {}, // Lint
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider removing `..` from this binding
+
+error: unnecessary use of `..` pattern in struct binding. All fields were already bound
+ --> $DIR/rest_pat_in_fully_bound_structs.rs:29:9
+ |
+LL | A { a: 0, b: 0, c: "", .. } => {}, // Lint
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider removing `..` from this binding
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/result_map_or_into_option.fixed b/src/tools/clippy/tests/ui/result_map_or_into_option.fixed
new file mode 100644
index 000000000..331531b51
--- /dev/null
+++ b/src/tools/clippy/tests/ui/result_map_or_into_option.fixed
@@ -0,0 +1,19 @@
+// run-rustfix
+
+#![warn(clippy::result_map_or_into_option)]
+
+fn main() {
+ let opt: Result<u32, &str> = Ok(1);
+ let _ = opt.ok();
+
+ let rewrap = |s: u32| -> Option<u32> { Some(s) };
+
+ // A non-Some `f` arg should not emit the lint
+ let opt: Result<u32, &str> = Ok(1);
+ let _ = opt.map_or(None, rewrap);
+
+ // A non-Some `f` closure where the argument is not used as the
+ // return should not emit the lint
+ let opt: Result<u32, &str> = Ok(1);
+ opt.map_or(None, |_x| Some(1));
+}
diff --git a/src/tools/clippy/tests/ui/result_map_or_into_option.rs b/src/tools/clippy/tests/ui/result_map_or_into_option.rs
new file mode 100644
index 000000000..3058480e2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/result_map_or_into_option.rs
@@ -0,0 +1,19 @@
+// run-rustfix
+
+#![warn(clippy::result_map_or_into_option)]
+
+fn main() {
+ let opt: Result<u32, &str> = Ok(1);
+ let _ = opt.map_or(None, Some);
+
+ let rewrap = |s: u32| -> Option<u32> { Some(s) };
+
+ // A non-Some `f` arg should not emit the lint
+ let opt: Result<u32, &str> = Ok(1);
+ let _ = opt.map_or(None, rewrap);
+
+ // A non-Some `f` closure where the argument is not used as the
+ // return should not emit the lint
+ let opt: Result<u32, &str> = Ok(1);
+ opt.map_or(None, |_x| Some(1));
+}
diff --git a/src/tools/clippy/tests/ui/result_map_or_into_option.stderr b/src/tools/clippy/tests/ui/result_map_or_into_option.stderr
new file mode 100644
index 000000000..febf32147
--- /dev/null
+++ b/src/tools/clippy/tests/ui/result_map_or_into_option.stderr
@@ -0,0 +1,10 @@
+error: called `map_or(None, Some)` on a `Result` value. This can be done more directly by calling `ok()` instead
+ --> $DIR/result_map_or_into_option.rs:7:13
+ |
+LL | let _ = opt.map_or(None, Some);
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try using `ok` instead: `opt.ok()`
+ |
+ = note: `-D clippy::result-map-or-into-option` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/result_map_unit_fn_fixable.fixed b/src/tools/clippy/tests/ui/result_map_unit_fn_fixable.fixed
new file mode 100644
index 000000000..14c331f67
--- /dev/null
+++ b/src/tools/clippy/tests/ui/result_map_unit_fn_fixable.fixed
@@ -0,0 +1,82 @@
+// run-rustfix
+
+#![warn(clippy::result_map_unit_fn)]
+#![allow(unused)]
+
+fn do_nothing<T>(_: T) {}
+
+fn diverge<T>(_: T) -> ! {
+ panic!()
+}
+
+fn plus_one(value: usize) -> usize {
+ value + 1
+}
+
+struct HasResult {
+ field: Result<usize, usize>,
+}
+
+impl HasResult {
+ fn do_result_nothing(&self, value: usize) {}
+
+ fn do_result_plus_one(&self, value: usize) -> usize {
+ value + 1
+ }
+}
+
+#[rustfmt::skip]
+fn result_map_unit_fn() {
+ let x = HasResult { field: Ok(10) };
+
+ x.field.map(plus_one);
+ let _: Result<(), usize> = x.field.map(do_nothing);
+
+ if let Ok(x_field) = x.field { do_nothing(x_field) }
+
+ if let Ok(x_field) = x.field { do_nothing(x_field) }
+
+ if let Ok(x_field) = x.field { diverge(x_field) }
+
+ let captured = 10;
+ if let Ok(value) = x.field { do_nothing(value + captured) };
+ let _: Result<(), usize> = x.field.map(|value| do_nothing(value + captured));
+
+ if let Ok(value) = x.field { x.do_result_nothing(value + captured) }
+
+ if let Ok(value) = x.field { x.do_result_plus_one(value + captured); }
+
+
+ if let Ok(value) = x.field { do_nothing(value + captured) }
+
+ if let Ok(value) = x.field { do_nothing(value + captured) }
+
+ if let Ok(value) = x.field { do_nothing(value + captured); }
+
+ if let Ok(value) = x.field { do_nothing(value + captured); }
+
+
+ if let Ok(value) = x.field { diverge(value + captured) }
+
+ if let Ok(value) = x.field { diverge(value + captured) }
+
+ if let Ok(value) = x.field { diverge(value + captured); }
+
+ if let Ok(value) = x.field { diverge(value + captured); }
+
+
+ x.field.map(|value| plus_one(value + captured));
+ x.field.map(|value| { plus_one(value + captured) });
+ if let Ok(value) = x.field { let y = plus_one(value + captured); }
+
+ if let Ok(value) = x.field { plus_one(value + captured); }
+
+ if let Ok(value) = x.field { plus_one(value + captured); }
+
+
+ if let Ok(ref value) = x.field { do_nothing(value + captured) }
+
+ if let Ok(value) = x.field { println!("{:?}", value) }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/result_map_unit_fn_fixable.rs b/src/tools/clippy/tests/ui/result_map_unit_fn_fixable.rs
new file mode 100644
index 000000000..8b0fca9ec
--- /dev/null
+++ b/src/tools/clippy/tests/ui/result_map_unit_fn_fixable.rs
@@ -0,0 +1,82 @@
+// run-rustfix
+
+#![warn(clippy::result_map_unit_fn)]
+#![allow(unused)]
+
+fn do_nothing<T>(_: T) {}
+
+fn diverge<T>(_: T) -> ! {
+ panic!()
+}
+
+fn plus_one(value: usize) -> usize {
+ value + 1
+}
+
+struct HasResult {
+ field: Result<usize, usize>,
+}
+
+impl HasResult {
+ fn do_result_nothing(&self, value: usize) {}
+
+ fn do_result_plus_one(&self, value: usize) -> usize {
+ value + 1
+ }
+}
+
+#[rustfmt::skip]
+fn result_map_unit_fn() {
+ let x = HasResult { field: Ok(10) };
+
+ x.field.map(plus_one);
+ let _: Result<(), usize> = x.field.map(do_nothing);
+
+ x.field.map(do_nothing);
+
+ x.field.map(do_nothing);
+
+ x.field.map(diverge);
+
+ let captured = 10;
+ if let Ok(value) = x.field { do_nothing(value + captured) };
+ let _: Result<(), usize> = x.field.map(|value| do_nothing(value + captured));
+
+ x.field.map(|value| x.do_result_nothing(value + captured));
+
+ x.field.map(|value| { x.do_result_plus_one(value + captured); });
+
+
+ x.field.map(|value| do_nothing(value + captured));
+
+ x.field.map(|value| { do_nothing(value + captured) });
+
+ x.field.map(|value| { do_nothing(value + captured); });
+
+ x.field.map(|value| { { do_nothing(value + captured); } });
+
+
+ x.field.map(|value| diverge(value + captured));
+
+ x.field.map(|value| { diverge(value + captured) });
+
+ x.field.map(|value| { diverge(value + captured); });
+
+ x.field.map(|value| { { diverge(value + captured); } });
+
+
+ x.field.map(|value| plus_one(value + captured));
+ x.field.map(|value| { plus_one(value + captured) });
+ x.field.map(|value| { let y = plus_one(value + captured); });
+
+ x.field.map(|value| { plus_one(value + captured); });
+
+ x.field.map(|value| { { plus_one(value + captured); } });
+
+
+ x.field.map(|ref value| { do_nothing(value + captured) });
+
+ x.field.map(|value| println!("{:?}", value));
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/result_map_unit_fn_fixable.stderr b/src/tools/clippy/tests/ui/result_map_unit_fn_fixable.stderr
new file mode 100644
index 000000000..782febd52
--- /dev/null
+++ b/src/tools/clippy/tests/ui/result_map_unit_fn_fixable.stderr
@@ -0,0 +1,148 @@
+error: called `map(f)` on an `Result` value where `f` is a function that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_fixable.rs:35:5
+ |
+LL | x.field.map(do_nothing);
+ | ^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(x_field) = x.field { do_nothing(x_field) }`
+ |
+ = note: `-D clippy::result-map-unit-fn` implied by `-D warnings`
+
+error: called `map(f)` on an `Result` value where `f` is a function that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_fixable.rs:37:5
+ |
+LL | x.field.map(do_nothing);
+ | ^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(x_field) = x.field { do_nothing(x_field) }`
+
+error: called `map(f)` on an `Result` value where `f` is a function that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_fixable.rs:39:5
+ |
+LL | x.field.map(diverge);
+ | ^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(x_field) = x.field { diverge(x_field) }`
+
+error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_fixable.rs:45:5
+ |
+LL | x.field.map(|value| x.do_result_nothing(value + captured));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(value) = x.field { x.do_result_nothing(value + captured) }`
+
+error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_fixable.rs:47:5
+ |
+LL | x.field.map(|value| { x.do_result_plus_one(value + captured); });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(value) = x.field { x.do_result_plus_one(value + captured); }`
+
+error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_fixable.rs:50:5
+ |
+LL | x.field.map(|value| do_nothing(value + captured));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(value) = x.field { do_nothing(value + captured) }`
+
+error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_fixable.rs:52:5
+ |
+LL | x.field.map(|value| { do_nothing(value + captured) });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(value) = x.field { do_nothing(value + captured) }`
+
+error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_fixable.rs:54:5
+ |
+LL | x.field.map(|value| { do_nothing(value + captured); });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(value) = x.field { do_nothing(value + captured); }`
+
+error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_fixable.rs:56:5
+ |
+LL | x.field.map(|value| { { do_nothing(value + captured); } });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(value) = x.field { do_nothing(value + captured); }`
+
+error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_fixable.rs:59:5
+ |
+LL | x.field.map(|value| diverge(value + captured));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(value) = x.field { diverge(value + captured) }`
+
+error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_fixable.rs:61:5
+ |
+LL | x.field.map(|value| { diverge(value + captured) });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(value) = x.field { diverge(value + captured) }`
+
+error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_fixable.rs:63:5
+ |
+LL | x.field.map(|value| { diverge(value + captured); });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(value) = x.field { diverge(value + captured); }`
+
+error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_fixable.rs:65:5
+ |
+LL | x.field.map(|value| { { diverge(value + captured); } });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(value) = x.field { diverge(value + captured); }`
+
+error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_fixable.rs:70:5
+ |
+LL | x.field.map(|value| { let y = plus_one(value + captured); });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(value) = x.field { let y = plus_one(value + captured); }`
+
+error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_fixable.rs:72:5
+ |
+LL | x.field.map(|value| { plus_one(value + captured); });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(value) = x.field { plus_one(value + captured); }`
+
+error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_fixable.rs:74:5
+ |
+LL | x.field.map(|value| { { plus_one(value + captured); } });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(value) = x.field { plus_one(value + captured); }`
+
+error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_fixable.rs:77:5
+ |
+LL | x.field.map(|ref value| { do_nothing(value + captured) });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(ref value) = x.field { do_nothing(value + captured) }`
+
+error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_fixable.rs:79:5
+ |
+LL | x.field.map(|value| println!("{:?}", value));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(value) = x.field { println!("{:?}", value) }`
+
+error: aborting due to 18 previous errors
+
diff --git a/src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.rs b/src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.rs
new file mode 100644
index 000000000..b197c609d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.rs
@@ -0,0 +1,46 @@
+#![warn(clippy::result_map_unit_fn)]
+#![feature(never_type)]
+#![allow(unused)]
+
+struct HasResult {
+ field: Result<usize, usize>,
+}
+
+fn do_nothing<T>(_: T) {}
+
+fn diverge<T>(_: T) -> ! {
+ panic!()
+}
+
+fn plus_one(value: usize) -> usize {
+ value + 1
+}
+
+#[rustfmt::skip]
+fn result_map_unit_fn() {
+ let x = HasResult { field: Ok(10) };
+
+ x.field.map(|value| { do_nothing(value); do_nothing(value) });
+
+ x.field.map(|value| if value > 0 { do_nothing(value); do_nothing(value) });
+
+ // Suggestion for the let block should be `{ ... }` as it's too difficult to build a
+ // proper suggestion for these cases
+ x.field.map(|value| {
+ do_nothing(value);
+ do_nothing(value)
+ });
+ x.field.map(|value| { do_nothing(value); do_nothing(value); });
+
+ // The following should suggest `if let Ok(_X) ...` as it's difficult to generate a proper let variable name for them
+ let res: Result<!, usize> = Ok(42).map(diverge);
+ "12".parse::<i32>().map(diverge);
+
+ let res: Result<(), usize> = Ok(plus_one(1)).map(do_nothing);
+
+ // Should suggest `if let Ok(_y) ...` to not override the existing foo variable
+ let y: Result<usize, usize> = Ok(42);
+ y.map(do_nothing);
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.stderr b/src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.stderr
new file mode 100644
index 000000000..88e4efdb0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.stderr
@@ -0,0 +1,58 @@
+error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_unfixable.rs:23:5
+ |
+LL | x.field.map(|value| { do_nothing(value); do_nothing(value) });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(value) = x.field { ... }`
+ |
+ = note: `-D clippy::result-map-unit-fn` implied by `-D warnings`
+
+error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_unfixable.rs:25:5
+ |
+LL | x.field.map(|value| if value > 0 { do_nothing(value); do_nothing(value) });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(value) = x.field { ... }`
+
+error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_unfixable.rs:29:5
+ |
+LL | x.field.map(|value| {
+ | _____^
+ | |_____|
+ | ||
+LL | || do_nothing(value);
+LL | || do_nothing(value)
+LL | || });
+ | ||______^- help: try this: `if let Ok(value) = x.field { ... }`
+ | |_______|
+ |
+
+error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_unfixable.rs:33:5
+ |
+LL | x.field.map(|value| { do_nothing(value); do_nothing(value); });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(value) = x.field { ... }`
+
+error: called `map(f)` on an `Result` value where `f` is a function that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_unfixable.rs:37:5
+ |
+LL | "12".parse::<i32>().map(diverge);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(a) = "12".parse::<i32>() { diverge(a) }`
+
+error: called `map(f)` on an `Result` value where `f` is a function that returns the unit type `()`
+ --> $DIR/result_map_unit_fn_unfixable.rs:43:5
+ |
+LL | y.map(do_nothing);
+ | ^^^^^^^^^^^^^^^^^-
+ | |
+ | help: try this: `if let Ok(_y) = y { do_nothing(_y) }`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/result_unit_error.rs b/src/tools/clippy/tests/ui/result_unit_error.rs
new file mode 100644
index 000000000..a4ec80302
--- /dev/null
+++ b/src/tools/clippy/tests/ui/result_unit_error.rs
@@ -0,0 +1,56 @@
+#![warn(clippy::result_unit_err)]
+
+pub fn returns_unit_error() -> Result<u32, ()> {
+ Err(())
+}
+
+fn private_unit_errors() -> Result<String, ()> {
+ Err(())
+}
+
+pub trait HasUnitError {
+ fn get_that_error(&self) -> Result<bool, ()>;
+
+ fn get_this_one_too(&self) -> Result<bool, ()> {
+ Err(())
+ }
+}
+
+impl HasUnitError for () {
+ fn get_that_error(&self) -> Result<bool, ()> {
+ Ok(true)
+ }
+}
+
+trait PrivateUnitError {
+ fn no_problem(&self) -> Result<usize, ()>;
+}
+
+pub struct UnitErrorHolder;
+
+impl UnitErrorHolder {
+ pub fn unit_error(&self) -> Result<usize, ()> {
+ Ok(0)
+ }
+}
+
+// https://github.com/rust-lang/rust-clippy/issues/6546
+pub mod issue_6546 {
+ type ResInv<A, B> = Result<B, A>;
+
+ pub fn should_lint() -> ResInv<(), usize> {
+ Ok(0)
+ }
+
+ pub fn should_not_lint() -> ResInv<usize, ()> {
+ Ok(())
+ }
+
+ type MyRes<A, B> = Result<(A, B), Box<dyn std::error::Error>>;
+
+ pub fn should_not_lint2(x: i32) -> MyRes<i32, ()> {
+ Ok((x, ()))
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/result_unit_error.stderr b/src/tools/clippy/tests/ui/result_unit_error.stderr
new file mode 100644
index 000000000..8c7573eab
--- /dev/null
+++ b/src/tools/clippy/tests/ui/result_unit_error.stderr
@@ -0,0 +1,43 @@
+error: this returns a `Result<_, ()>`
+ --> $DIR/result_unit_error.rs:3:1
+ |
+LL | pub fn returns_unit_error() -> Result<u32, ()> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::result-unit-err` implied by `-D warnings`
+ = help: use a custom `Error` type instead
+
+error: this returns a `Result<_, ()>`
+ --> $DIR/result_unit_error.rs:12:5
+ |
+LL | fn get_that_error(&self) -> Result<bool, ()>;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a custom `Error` type instead
+
+error: this returns a `Result<_, ()>`
+ --> $DIR/result_unit_error.rs:14:5
+ |
+LL | fn get_this_one_too(&self) -> Result<bool, ()> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a custom `Error` type instead
+
+error: this returns a `Result<_, ()>`
+ --> $DIR/result_unit_error.rs:32:5
+ |
+LL | pub fn unit_error(&self) -> Result<usize, ()> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a custom `Error` type instead
+
+error: this returns a `Result<_, ()>`
+ --> $DIR/result_unit_error.rs:41:5
+ |
+LL | pub fn should_lint() -> ResInv<(), usize> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a custom `Error` type instead
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/return_self_not_must_use.rs b/src/tools/clippy/tests/ui/return_self_not_must_use.rs
new file mode 100644
index 000000000..9b33ad6d3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/return_self_not_must_use.rs
@@ -0,0 +1,58 @@
+#![crate_type = "lib"]
+#![warn(clippy::return_self_not_must_use)]
+
+#[derive(Clone)]
+pub struct Bar;
+
+pub trait Whatever {
+ fn what(&self) -> Self;
+ // There should be no warning here! (returns a reference)
+ fn what2(&self) -> &Self;
+}
+
+impl Bar {
+ // There should be no warning here! (note taking a self argument)
+ pub fn not_new() -> Self {
+ Self
+ }
+ pub fn foo(&self) -> Self {
+ Self
+ }
+ pub fn bar(self) -> Self {
+ self
+ }
+ // There should be no warning here! (private method)
+ fn foo2(&self) -> Self {
+ Self
+ }
+ // There should be no warning here! (returns a reference)
+ pub fn foo3(&self) -> &Self {
+ self
+ }
+ // There should be no warning here! (already a `must_use` attribute)
+ #[must_use]
+ pub fn foo4(&self) -> Self {
+ Self
+ }
+}
+
+impl Whatever for Bar {
+ // There should be no warning here! (comes from the trait)
+ fn what(&self) -> Self {
+ self.foo2()
+ }
+ // There should be no warning here! (comes from the trait)
+ fn what2(&self) -> &Self {
+ self
+ }
+}
+
+#[must_use]
+pub struct Foo;
+
+impl Foo {
+ // There should be no warning here! (`Foo` already implements `#[must_use]`)
+ fn foo(&self) -> Self {
+ Self
+ }
+}
diff --git a/src/tools/clippy/tests/ui/return_self_not_must_use.stderr b/src/tools/clippy/tests/ui/return_self_not_must_use.stderr
new file mode 100644
index 000000000..94be87dfa
--- /dev/null
+++ b/src/tools/clippy/tests/ui/return_self_not_must_use.stderr
@@ -0,0 +1,31 @@
+error: missing `#[must_use]` attribute on a method returning `Self`
+ --> $DIR/return_self_not_must_use.rs:8:5
+ |
+LL | fn what(&self) -> Self;
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::return-self-not-must-use` implied by `-D warnings`
+ = help: consider adding the `#[must_use]` attribute to the method or directly to the `Self` type
+
+error: missing `#[must_use]` attribute on a method returning `Self`
+ --> $DIR/return_self_not_must_use.rs:18:5
+ |
+LL | / pub fn foo(&self) -> Self {
+LL | | Self
+LL | | }
+ | |_____^
+ |
+ = help: consider adding the `#[must_use]` attribute to the method or directly to the `Self` type
+
+error: missing `#[must_use]` attribute on a method returning `Self`
+ --> $DIR/return_self_not_must_use.rs:21:5
+ |
+LL | / pub fn bar(self) -> Self {
+LL | | self
+LL | | }
+ | |_____^
+ |
+ = help: consider adding the `#[must_use]` attribute to the method or directly to the `Self` type
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/reversed_empty_ranges_fixable.fixed b/src/tools/clippy/tests/ui/reversed_empty_ranges_fixable.fixed
new file mode 100644
index 000000000..79e482eec
--- /dev/null
+++ b/src/tools/clippy/tests/ui/reversed_empty_ranges_fixable.fixed
@@ -0,0 +1,29 @@
+// run-rustfix
+#![warn(clippy::reversed_empty_ranges)]
+
+const ANSWER: i32 = 42;
+
+fn main() {
+ // These should be linted:
+
+ (21..=42).rev().for_each(|x| println!("{}", x));
+ let _ = (21..ANSWER).rev().filter(|x| x % 2 == 0).take(10).collect::<Vec<_>>();
+
+ for _ in (-42..=-21).rev() {}
+ for _ in (21u32..42u32).rev() {}
+
+ // These should be ignored as they are not empty ranges:
+
+ (21..=42).for_each(|x| println!("{}", x));
+ (21..42).for_each(|x| println!("{}", x));
+
+ let arr = [1, 2, 3, 4, 5];
+ let _ = &arr[1..=3];
+ let _ = &arr[1..3];
+
+ for _ in 21..=42 {}
+ for _ in 21..42 {}
+
+ // This range is empty but should be ignored, see issue #5689
+ let _ = &arr[0..0];
+}
diff --git a/src/tools/clippy/tests/ui/reversed_empty_ranges_fixable.rs b/src/tools/clippy/tests/ui/reversed_empty_ranges_fixable.rs
new file mode 100644
index 000000000..b2e8bf337
--- /dev/null
+++ b/src/tools/clippy/tests/ui/reversed_empty_ranges_fixable.rs
@@ -0,0 +1,29 @@
+// run-rustfix
+#![warn(clippy::reversed_empty_ranges)]
+
+const ANSWER: i32 = 42;
+
+fn main() {
+ // These should be linted:
+
+ (42..=21).for_each(|x| println!("{}", x));
+ let _ = (ANSWER..21).filter(|x| x % 2 == 0).take(10).collect::<Vec<_>>();
+
+ for _ in -21..=-42 {}
+ for _ in 42u32..21u32 {}
+
+ // These should be ignored as they are not empty ranges:
+
+ (21..=42).for_each(|x| println!("{}", x));
+ (21..42).for_each(|x| println!("{}", x));
+
+ let arr = [1, 2, 3, 4, 5];
+ let _ = &arr[1..=3];
+ let _ = &arr[1..3];
+
+ for _ in 21..=42 {}
+ for _ in 21..42 {}
+
+ // This range is empty but should be ignored, see issue #5689
+ let _ = &arr[0..0];
+}
diff --git a/src/tools/clippy/tests/ui/reversed_empty_ranges_fixable.stderr b/src/tools/clippy/tests/ui/reversed_empty_ranges_fixable.stderr
new file mode 100644
index 000000000..2d1bfe62c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/reversed_empty_ranges_fixable.stderr
@@ -0,0 +1,47 @@
+error: this range is empty so it will yield no values
+ --> $DIR/reversed_empty_ranges_fixable.rs:9:5
+ |
+LL | (42..=21).for_each(|x| println!("{}", x));
+ | ^^^^^^^^^
+ |
+ = note: `-D clippy::reversed-empty-ranges` implied by `-D warnings`
+help: consider using the following if you are attempting to iterate over this range in reverse
+ |
+LL | (21..=42).rev().for_each(|x| println!("{}", x));
+ | ~~~~~~~~~~~~~~~
+
+error: this range is empty so it will yield no values
+ --> $DIR/reversed_empty_ranges_fixable.rs:10:13
+ |
+LL | let _ = (ANSWER..21).filter(|x| x % 2 == 0).take(10).collect::<Vec<_>>();
+ | ^^^^^^^^^^^^
+ |
+help: consider using the following if you are attempting to iterate over this range in reverse
+ |
+LL | let _ = (21..ANSWER).rev().filter(|x| x % 2 == 0).take(10).collect::<Vec<_>>();
+ | ~~~~~~~~~~~~~~~~~~
+
+error: this range is empty so it will yield no values
+ --> $DIR/reversed_empty_ranges_fixable.rs:12:14
+ |
+LL | for _ in -21..=-42 {}
+ | ^^^^^^^^^
+ |
+help: consider using the following if you are attempting to iterate over this range in reverse
+ |
+LL | for _ in (-42..=-21).rev() {}
+ | ~~~~~~~~~~~~~~~~~
+
+error: this range is empty so it will yield no values
+ --> $DIR/reversed_empty_ranges_fixable.rs:13:14
+ |
+LL | for _ in 42u32..21u32 {}
+ | ^^^^^^^^^^^^
+ |
+help: consider using the following if you are attempting to iterate over this range in reverse
+ |
+LL | for _ in (21u32..42u32).rev() {}
+ | ~~~~~~~~~~~~~~~~~~~~
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/reversed_empty_ranges_loops_fixable.fixed b/src/tools/clippy/tests/ui/reversed_empty_ranges_loops_fixable.fixed
new file mode 100644
index 000000000..f1503ed6d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/reversed_empty_ranges_loops_fixable.fixed
@@ -0,0 +1,57 @@
+// run-rustfix
+#![warn(clippy::reversed_empty_ranges)]
+
+fn main() {
+ const MAX_LEN: usize = 42;
+
+ for i in (0..10).rev() {
+ println!("{}", i);
+ }
+
+ for i in (0..=10).rev() {
+ println!("{}", i);
+ }
+
+ for i in (0..MAX_LEN).rev() {
+ println!("{}", i);
+ }
+
+ for i in 5..=5 {
+ // not an error, this is the range with only one element “5”
+ println!("{}", i);
+ }
+
+ for i in 0..10 {
+ // not an error, the start index is less than the end index
+ println!("{}", i);
+ }
+
+ for i in -10..0 {
+ // not an error
+ println!("{}", i);
+ }
+
+ for i in (0..10).rev().map(|x| x * 2) {
+ println!("{}", i);
+ }
+
+ // testing that the empty range lint folds constants
+ for i in (5 + 4..10).rev() {
+ println!("{}", i);
+ }
+
+ for i in ((3 - 1)..(5 + 2)).rev() {
+ println!("{}", i);
+ }
+
+ for i in (2 * 2)..(2 * 3) {
+ // no error, 4..6 is fine
+ println!("{}", i);
+ }
+
+ let x = 42;
+ for i in x..10 {
+ // no error, not constant-foldable
+ println!("{}", i);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/reversed_empty_ranges_loops_fixable.rs b/src/tools/clippy/tests/ui/reversed_empty_ranges_loops_fixable.rs
new file mode 100644
index 000000000..a733788dc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/reversed_empty_ranges_loops_fixable.rs
@@ -0,0 +1,57 @@
+// run-rustfix
+#![warn(clippy::reversed_empty_ranges)]
+
+fn main() {
+ const MAX_LEN: usize = 42;
+
+ for i in 10..0 {
+ println!("{}", i);
+ }
+
+ for i in 10..=0 {
+ println!("{}", i);
+ }
+
+ for i in MAX_LEN..0 {
+ println!("{}", i);
+ }
+
+ for i in 5..=5 {
+ // not an error, this is the range with only one element “5”
+ println!("{}", i);
+ }
+
+ for i in 0..10 {
+ // not an error, the start index is less than the end index
+ println!("{}", i);
+ }
+
+ for i in -10..0 {
+ // not an error
+ println!("{}", i);
+ }
+
+ for i in (10..0).map(|x| x * 2) {
+ println!("{}", i);
+ }
+
+ // testing that the empty range lint folds constants
+ for i in 10..5 + 4 {
+ println!("{}", i);
+ }
+
+ for i in (5 + 2)..(3 - 1) {
+ println!("{}", i);
+ }
+
+ for i in (2 * 2)..(2 * 3) {
+ // no error, 4..6 is fine
+ println!("{}", i);
+ }
+
+ let x = 42;
+ for i in x..10 {
+ // no error, not constant-foldable
+ println!("{}", i);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/reversed_empty_ranges_loops_fixable.stderr b/src/tools/clippy/tests/ui/reversed_empty_ranges_loops_fixable.stderr
new file mode 100644
index 000000000..a135da488
--- /dev/null
+++ b/src/tools/clippy/tests/ui/reversed_empty_ranges_loops_fixable.stderr
@@ -0,0 +1,69 @@
+error: this range is empty so it will yield no values
+ --> $DIR/reversed_empty_ranges_loops_fixable.rs:7:14
+ |
+LL | for i in 10..0 {
+ | ^^^^^
+ |
+ = note: `-D clippy::reversed-empty-ranges` implied by `-D warnings`
+help: consider using the following if you are attempting to iterate over this range in reverse
+ |
+LL | for i in (0..10).rev() {
+ | ~~~~~~~~~~~~~
+
+error: this range is empty so it will yield no values
+ --> $DIR/reversed_empty_ranges_loops_fixable.rs:11:14
+ |
+LL | for i in 10..=0 {
+ | ^^^^^^
+ |
+help: consider using the following if you are attempting to iterate over this range in reverse
+ |
+LL | for i in (0..=10).rev() {
+ | ~~~~~~~~~~~~~~
+
+error: this range is empty so it will yield no values
+ --> $DIR/reversed_empty_ranges_loops_fixable.rs:15:14
+ |
+LL | for i in MAX_LEN..0 {
+ | ^^^^^^^^^^
+ |
+help: consider using the following if you are attempting to iterate over this range in reverse
+ |
+LL | for i in (0..MAX_LEN).rev() {
+ | ~~~~~~~~~~~~~~~~~~
+
+error: this range is empty so it will yield no values
+ --> $DIR/reversed_empty_ranges_loops_fixable.rs:34:14
+ |
+LL | for i in (10..0).map(|x| x * 2) {
+ | ^^^^^^^
+ |
+help: consider using the following if you are attempting to iterate over this range in reverse
+ |
+LL | for i in (0..10).rev().map(|x| x * 2) {
+ | ~~~~~~~~~~~~~
+
+error: this range is empty so it will yield no values
+ --> $DIR/reversed_empty_ranges_loops_fixable.rs:39:14
+ |
+LL | for i in 10..5 + 4 {
+ | ^^^^^^^^^
+ |
+help: consider using the following if you are attempting to iterate over this range in reverse
+ |
+LL | for i in (5 + 4..10).rev() {
+ | ~~~~~~~~~~~~~~~~~
+
+error: this range is empty so it will yield no values
+ --> $DIR/reversed_empty_ranges_loops_fixable.rs:43:14
+ |
+LL | for i in (5 + 2)..(3 - 1) {
+ | ^^^^^^^^^^^^^^^^
+ |
+help: consider using the following if you are attempting to iterate over this range in reverse
+ |
+LL | for i in ((3 - 1)..(5 + 2)).rev() {
+ | ~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/reversed_empty_ranges_loops_unfixable.rs b/src/tools/clippy/tests/ui/reversed_empty_ranges_loops_unfixable.rs
new file mode 100644
index 000000000..c4c572244
--- /dev/null
+++ b/src/tools/clippy/tests/ui/reversed_empty_ranges_loops_unfixable.rs
@@ -0,0 +1,11 @@
+#![warn(clippy::reversed_empty_ranges)]
+
+fn main() {
+ for i in 5..5 {
+ println!("{}", i);
+ }
+
+ for i in (5 + 2)..(8 - 1) {
+ println!("{}", i);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/reversed_empty_ranges_loops_unfixable.stderr b/src/tools/clippy/tests/ui/reversed_empty_ranges_loops_unfixable.stderr
new file mode 100644
index 000000000..30095d20c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/reversed_empty_ranges_loops_unfixable.stderr
@@ -0,0 +1,16 @@
+error: this range is empty so it will yield no values
+ --> $DIR/reversed_empty_ranges_loops_unfixable.rs:4:14
+ |
+LL | for i in 5..5 {
+ | ^^^^
+ |
+ = note: `-D clippy::reversed-empty-ranges` implied by `-D warnings`
+
+error: this range is empty so it will yield no values
+ --> $DIR/reversed_empty_ranges_loops_unfixable.rs:8:14
+ |
+LL | for i in (5 + 2)..(8 - 1) {
+ | ^^^^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/reversed_empty_ranges_unfixable.rs b/src/tools/clippy/tests/ui/reversed_empty_ranges_unfixable.rs
new file mode 100644
index 000000000..264d3d1e9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/reversed_empty_ranges_unfixable.rs
@@ -0,0 +1,15 @@
+#![warn(clippy::reversed_empty_ranges)]
+
+const ANSWER: i32 = 42;
+const SOME_NUM: usize = 3;
+
+fn main() {
+ let arr = [1, 2, 3, 4, 5];
+ let _ = &arr[3usize..=1usize];
+ let _ = &arr[SOME_NUM..1];
+
+ for _ in ANSWER..ANSWER {}
+
+ // Should not be linted, see issue #5689
+ let _ = (42 + 10..42 + 10).map(|x| x / 2).find(|&x| x == 21);
+}
diff --git a/src/tools/clippy/tests/ui/reversed_empty_ranges_unfixable.stderr b/src/tools/clippy/tests/ui/reversed_empty_ranges_unfixable.stderr
new file mode 100644
index 000000000..f23d4eb0f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/reversed_empty_ranges_unfixable.stderr
@@ -0,0 +1,22 @@
+error: this range is reversed and using it to index a slice will panic at run-time
+ --> $DIR/reversed_empty_ranges_unfixable.rs:8:18
+ |
+LL | let _ = &arr[3usize..=1usize];
+ | ^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::reversed-empty-ranges` implied by `-D warnings`
+
+error: this range is reversed and using it to index a slice will panic at run-time
+ --> $DIR/reversed_empty_ranges_unfixable.rs:9:18
+ |
+LL | let _ = &arr[SOME_NUM..1];
+ | ^^^^^^^^^^^
+
+error: this range is empty so it will yield no values
+ --> $DIR/reversed_empty_ranges_unfixable.rs:11:14
+ |
+LL | for _ in ANSWER..ANSWER {}
+ | ^^^^^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/same_functions_in_if_condition.rs b/src/tools/clippy/tests/ui/same_functions_in_if_condition.rs
new file mode 100644
index 000000000..3d2295912
--- /dev/null
+++ b/src/tools/clippy/tests/ui/same_functions_in_if_condition.rs
@@ -0,0 +1,109 @@
+#![feature(adt_const_params)]
+#![allow(incomplete_features)]
+#![warn(clippy::same_functions_in_if_condition)]
+#![allow(clippy::ifs_same_cond)] // This warning is different from `ifs_same_cond`.
+#![allow(clippy::if_same_then_else, clippy::comparison_chain)] // all empty blocks
+
+fn function() -> bool {
+ true
+}
+
+fn fn_arg(_arg: u8) -> bool {
+ true
+}
+
+struct Struct;
+
+impl Struct {
+ fn method(&self) -> bool {
+ true
+ }
+ fn method_arg(&self, _arg: u8) -> bool {
+ true
+ }
+}
+
+fn ifs_same_cond_fn() {
+ let a = 0;
+ let obj = Struct;
+
+ if function() {
+ } else if function() {
+ //~ ERROR ifs same condition
+ }
+
+ if fn_arg(a) {
+ } else if fn_arg(a) {
+ //~ ERROR ifs same condition
+ }
+
+ if obj.method() {
+ } else if obj.method() {
+ //~ ERROR ifs same condition
+ }
+
+ if obj.method_arg(a) {
+ } else if obj.method_arg(a) {
+ //~ ERROR ifs same condition
+ }
+
+ let mut v = vec![1];
+ if v.pop() == None {
+ //~ ERROR ifs same condition
+ } else if v.pop() == None {
+ }
+
+ if v.len() == 42 {
+ //~ ERROR ifs same condition
+ } else if v.len() == 42 {
+ }
+
+ if v.len() == 1 {
+ // ok, different conditions
+ } else if v.len() == 2 {
+ }
+
+ if fn_arg(0) {
+ // ok, different arguments.
+ } else if fn_arg(1) {
+ }
+
+ if obj.method_arg(0) {
+ // ok, different arguments.
+ } else if obj.method_arg(1) {
+ }
+
+ if a == 1 {
+ // ok, warning is on `ifs_same_cond` behalf.
+ } else if a == 1 {
+ }
+}
+
+fn main() {
+ // macro as condition (see #6168)
+ let os = if cfg!(target_os = "macos") {
+ "macos"
+ } else if cfg!(target_os = "windows") {
+ "windows"
+ } else {
+ "linux"
+ };
+ println!("{}", os);
+
+ #[derive(PartialEq, Eq)]
+ enum E {
+ A,
+ B,
+ }
+ fn generic<const P: E>() -> bool {
+ match P {
+ E::A => true,
+ E::B => false,
+ }
+ }
+ if generic::<{ E::A }>() {
+ println!("A");
+ } else if generic::<{ E::B }>() {
+ println!("B");
+ }
+}
diff --git a/src/tools/clippy/tests/ui/same_functions_in_if_condition.stderr b/src/tools/clippy/tests/ui/same_functions_in_if_condition.stderr
new file mode 100644
index 000000000..71e82910e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/same_functions_in_if_condition.stderr
@@ -0,0 +1,75 @@
+error: this `if` has the same function call as a previous `if`
+ --> $DIR/same_functions_in_if_condition.rs:31:15
+ |
+LL | } else if function() {
+ | ^^^^^^^^^^
+ |
+ = note: `-D clippy::same-functions-in-if-condition` implied by `-D warnings`
+note: same as this
+ --> $DIR/same_functions_in_if_condition.rs:30:8
+ |
+LL | if function() {
+ | ^^^^^^^^^^
+
+error: this `if` has the same function call as a previous `if`
+ --> $DIR/same_functions_in_if_condition.rs:36:15
+ |
+LL | } else if fn_arg(a) {
+ | ^^^^^^^^^
+ |
+note: same as this
+ --> $DIR/same_functions_in_if_condition.rs:35:8
+ |
+LL | if fn_arg(a) {
+ | ^^^^^^^^^
+
+error: this `if` has the same function call as a previous `if`
+ --> $DIR/same_functions_in_if_condition.rs:41:15
+ |
+LL | } else if obj.method() {
+ | ^^^^^^^^^^^^
+ |
+note: same as this
+ --> $DIR/same_functions_in_if_condition.rs:40:8
+ |
+LL | if obj.method() {
+ | ^^^^^^^^^^^^
+
+error: this `if` has the same function call as a previous `if`
+ --> $DIR/same_functions_in_if_condition.rs:46:15
+ |
+LL | } else if obj.method_arg(a) {
+ | ^^^^^^^^^^^^^^^^^
+ |
+note: same as this
+ --> $DIR/same_functions_in_if_condition.rs:45:8
+ |
+LL | if obj.method_arg(a) {
+ | ^^^^^^^^^^^^^^^^^
+
+error: this `if` has the same function call as a previous `if`
+ --> $DIR/same_functions_in_if_condition.rs:53:15
+ |
+LL | } else if v.pop() == None {
+ | ^^^^^^^^^^^^^^^
+ |
+note: same as this
+ --> $DIR/same_functions_in_if_condition.rs:51:8
+ |
+LL | if v.pop() == None {
+ | ^^^^^^^^^^^^^^^
+
+error: this `if` has the same function call as a previous `if`
+ --> $DIR/same_functions_in_if_condition.rs:58:15
+ |
+LL | } else if v.len() == 42 {
+ | ^^^^^^^^^^^^^
+ |
+note: same as this
+ --> $DIR/same_functions_in_if_condition.rs:56:8
+ |
+LL | if v.len() == 42 {
+ | ^^^^^^^^^^^^^
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/same_item_push.rs b/src/tools/clippy/tests/ui/same_item_push.rs
new file mode 100644
index 000000000..99964f0de
--- /dev/null
+++ b/src/tools/clippy/tests/ui/same_item_push.rs
@@ -0,0 +1,158 @@
+#![warn(clippy::same_item_push)]
+
+const VALUE: u8 = 7;
+
+fn mutate_increment(x: &mut u8) -> u8 {
+ *x += 1;
+ *x
+}
+
+fn increment(x: u8) -> u8 {
+ x + 1
+}
+
+fn fun() -> usize {
+ 42
+}
+
+fn main() {
+ // ** linted cases **
+ let mut vec: Vec<u8> = Vec::new();
+ let item = 2;
+ for _ in 5..=20 {
+ vec.push(item);
+ }
+
+ let mut vec: Vec<u8> = Vec::new();
+ for _ in 0..15 {
+ let item = 2;
+ vec.push(item);
+ }
+
+ let mut vec: Vec<u8> = Vec::new();
+ for _ in 0..15 {
+ vec.push(13);
+ }
+
+ let mut vec = Vec::new();
+ for _ in 0..20 {
+ vec.push(VALUE);
+ }
+
+ let mut vec = Vec::new();
+ let item = VALUE;
+ for _ in 0..20 {
+ vec.push(item);
+ }
+
+ // ** non-linted cases **
+ let mut spaces = Vec::with_capacity(10);
+ for _ in 0..10 {
+ spaces.push(vec![b' ']);
+ }
+
+ // Suggestion should not be given as pushed variable can mutate
+ let mut vec: Vec<u8> = Vec::new();
+ let mut item: u8 = 2;
+ for _ in 0..30 {
+ vec.push(mutate_increment(&mut item));
+ }
+
+ let mut vec: Vec<u8> = Vec::new();
+ let mut item: u8 = 2;
+ let mut item2 = &mut mutate_increment(&mut item);
+ for _ in 0..30 {
+ vec.push(mutate_increment(item2));
+ }
+
+ let mut vec: Vec<usize> = Vec::new();
+ for (a, b) in [0, 1, 4, 9, 16].iter().enumerate() {
+ vec.push(a);
+ }
+
+ let mut vec: Vec<u8> = Vec::new();
+ for i in 0..30 {
+ vec.push(increment(i));
+ }
+
+ let mut vec: Vec<u8> = Vec::new();
+ for i in 0..30 {
+ vec.push(i + i * i);
+ }
+
+ // Suggestion should not be given as there are multiple pushes that are not the same
+ let mut vec: Vec<u8> = Vec::new();
+ let item: u8 = 2;
+ for _ in 0..30 {
+ vec.push(item);
+ vec.push(item * 2);
+ }
+
+ // Suggestion should not be given as Vec is not involved
+ for _ in 0..5 {
+ println!("Same Item Push");
+ }
+
+ struct A {
+ kind: u32,
+ }
+ let mut vec_a: Vec<A> = Vec::new();
+ for i in 0..30 {
+ vec_a.push(A { kind: i });
+ }
+ let mut vec: Vec<u8> = Vec::new();
+ for a in vec_a {
+ vec.push(2u8.pow(a.kind));
+ }
+
+ // Fix #5902
+ let mut vec: Vec<u8> = Vec::new();
+ let mut item = 0;
+ for _ in 0..10 {
+ vec.push(item);
+ item += 10;
+ }
+
+ // Fix #5979
+ let mut vec: Vec<std::fs::File> = Vec::new();
+ for _ in 0..10 {
+ vec.push(std::fs::File::open("foobar").unwrap());
+ }
+ // Fix #5979
+ #[derive(Clone)]
+ struct S;
+
+ trait T {}
+ impl T for S {}
+
+ let mut vec: Vec<Box<dyn T>> = Vec::new();
+ for _ in 0..10 {
+ vec.push(Box::new(S {}));
+ }
+
+ // Fix #5985
+ let mut vec = Vec::new();
+ let item = 42;
+ let item = fun();
+ for _ in 0..20 {
+ vec.push(item);
+ }
+
+ // Fix #5985
+ let mut vec = Vec::new();
+ let key = 1;
+ for _ in 0..20 {
+ let item = match key {
+ 1 => 10,
+ _ => 0,
+ };
+ vec.push(item);
+ }
+
+ // Fix #6987
+ let mut vec = Vec::new();
+ for _ in 0..10 {
+ vec.push(1);
+ vec.extend(&[2]);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/same_item_push.stderr b/src/tools/clippy/tests/ui/same_item_push.stderr
new file mode 100644
index 000000000..d9ffa1578
--- /dev/null
+++ b/src/tools/clippy/tests/ui/same_item_push.stderr
@@ -0,0 +1,43 @@
+error: it looks like the same item is being pushed into this Vec
+ --> $DIR/same_item_push.rs:23:9
+ |
+LL | vec.push(item);
+ | ^^^
+ |
+ = note: `-D clippy::same-item-push` implied by `-D warnings`
+ = help: try using vec![item;SIZE] or vec.resize(NEW_SIZE, item)
+
+error: it looks like the same item is being pushed into this Vec
+ --> $DIR/same_item_push.rs:29:9
+ |
+LL | vec.push(item);
+ | ^^^
+ |
+ = help: try using vec![item;SIZE] or vec.resize(NEW_SIZE, item)
+
+error: it looks like the same item is being pushed into this Vec
+ --> $DIR/same_item_push.rs:34:9
+ |
+LL | vec.push(13);
+ | ^^^
+ |
+ = help: try using vec![13;SIZE] or vec.resize(NEW_SIZE, 13)
+
+error: it looks like the same item is being pushed into this Vec
+ --> $DIR/same_item_push.rs:39:9
+ |
+LL | vec.push(VALUE);
+ | ^^^
+ |
+ = help: try using vec![VALUE;SIZE] or vec.resize(NEW_SIZE, VALUE)
+
+error: it looks like the same item is being pushed into this Vec
+ --> $DIR/same_item_push.rs:45:9
+ |
+LL | vec.push(item);
+ | ^^^
+ |
+ = help: try using vec![item;SIZE] or vec.resize(NEW_SIZE, item)
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/same_name_method.rs b/src/tools/clippy/tests/ui/same_name_method.rs
new file mode 100644
index 000000000..daef95a42
--- /dev/null
+++ b/src/tools/clippy/tests/ui/same_name_method.rs
@@ -0,0 +1,127 @@
+#![feature(lint_reasons)]
+#![warn(clippy::same_name_method)]
+#![allow(dead_code, non_camel_case_types)]
+
+trait T1 {
+ fn foo() {}
+}
+
+trait T2 {
+ fn foo() {}
+}
+
+mod should_lint {
+
+ mod test_basic_case {
+ use crate::T1;
+
+ struct S;
+
+ impl S {
+ fn foo() {}
+ }
+
+ impl T1 for S {
+ fn foo() {}
+ }
+ }
+
+ mod test_derive {
+
+ #[derive(Clone)]
+ struct S;
+
+ impl S {
+ fn clone() {}
+ }
+ }
+
+ mod with_generic {
+ use crate::T1;
+
+ struct S<U>(U);
+
+ impl<U> S<U> {
+ fn foo() {}
+ }
+
+ impl<U: Copy> T1 for S<U> {
+ fn foo() {}
+ }
+ }
+
+ mod default_method {
+ use crate::T1;
+
+ struct S;
+
+ impl S {
+ fn foo() {}
+ }
+
+ impl T1 for S {}
+ }
+
+ mod multiply_conflicit_trait {
+ use crate::{T1, T2};
+
+ struct S;
+
+ impl S {
+ fn foo() {}
+ }
+
+ impl T1 for S {}
+
+ impl T2 for S {}
+ }
+}
+
+mod should_not_lint {
+
+ mod not_lint_two_trait_method {
+ use crate::{T1, T2};
+
+ struct S;
+
+ impl T1 for S {
+ fn foo() {}
+ }
+
+ impl T2 for S {
+ fn foo() {}
+ }
+ }
+
+ mod only_lint_on_method {
+ trait T3 {
+ type foo;
+ }
+
+ struct S;
+
+ impl S {
+ fn foo() {}
+ }
+ impl T3 for S {
+ type foo = usize;
+ }
+ }
+}
+
+mod check_expect_suppression {
+ use crate::T1;
+
+ struct S;
+
+ impl S {
+ #[expect(clippy::same_name_method)]
+ fn foo() {}
+ }
+
+ impl T1 for S {
+ fn foo() {}
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/same_name_method.stderr b/src/tools/clippy/tests/ui/same_name_method.stderr
new file mode 100644
index 000000000..f55ec9f3c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/same_name_method.stderr
@@ -0,0 +1,64 @@
+error: method's name is the same as an existing method in a trait
+ --> $DIR/same_name_method.rs:21:13
+ |
+LL | fn foo() {}
+ | ^^^^^^^^^^^
+ |
+ = note: `-D clippy::same-name-method` implied by `-D warnings`
+note: existing `foo` defined here
+ --> $DIR/same_name_method.rs:25:13
+ |
+LL | fn foo() {}
+ | ^^^^^^^^^^^
+
+error: method's name is the same as an existing method in a trait
+ --> $DIR/same_name_method.rs:35:13
+ |
+LL | fn clone() {}
+ | ^^^^^^^^^^^^^
+ |
+note: existing `clone` defined here
+ --> $DIR/same_name_method.rs:31:18
+ |
+LL | #[derive(Clone)]
+ | ^^^^^
+ = note: this error originates in the derive macro `Clone` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: method's name is the same as an existing method in a trait
+ --> $DIR/same_name_method.rs:45:13
+ |
+LL | fn foo() {}
+ | ^^^^^^^^^^^
+ |
+note: existing `foo` defined here
+ --> $DIR/same_name_method.rs:49:13
+ |
+LL | fn foo() {}
+ | ^^^^^^^^^^^
+
+error: method's name is the same as an existing method in a trait
+ --> $DIR/same_name_method.rs:59:13
+ |
+LL | fn foo() {}
+ | ^^^^^^^^^^^
+ |
+note: existing `foo` defined here
+ --> $DIR/same_name_method.rs:62:9
+ |
+LL | impl T1 for S {}
+ | ^^^^^^^^^^^^^^^^
+
+error: method's name is the same as an existing method in a trait
+ --> $DIR/same_name_method.rs:71:13
+ |
+LL | fn foo() {}
+ | ^^^^^^^^^^^
+ |
+note: existing `foo` defined here
+ --> $DIR/same_name_method.rs:74:9
+ |
+LL | impl T1 for S {}
+ | ^^^^^^^^^^^^^^^^
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/search_is_some.rs b/src/tools/clippy/tests/ui/search_is_some.rs
new file mode 100644
index 000000000..72f335153
--- /dev/null
+++ b/src/tools/clippy/tests/ui/search_is_some.rs
@@ -0,0 +1,79 @@
+// aux-build:option_helpers.rs
+#![warn(clippy::search_is_some)]
+#![allow(dead_code)]
+extern crate option_helpers;
+use option_helpers::IteratorFalsePositives;
+
+#[rustfmt::skip]
+fn main() {
+ let v = vec![3, 2, 1, 0, -1, -2, -3];
+ let y = &&42;
+
+
+ // Check `find().is_some()`, multi-line case.
+ let _ = v.iter().find(|&x| {
+ *x < 0
+ }
+ ).is_some();
+
+ // Check `position().is_some()`, multi-line case.
+ let _ = v.iter().position(|&x| {
+ x < 0
+ }
+ ).is_some();
+
+ // Check `rposition().is_some()`, multi-line case.
+ let _ = v.iter().rposition(|&x| {
+ x < 0
+ }
+ ).is_some();
+
+ // Check that we don't lint if the caller is not an `Iterator` or string
+ let falsepos = IteratorFalsePositives { foo: 0 };
+ let _ = falsepos.find().is_some();
+ let _ = falsepos.position().is_some();
+ let _ = falsepos.rposition().is_some();
+ // check that we don't lint if `find()` is called with
+ // `Pattern` that is not a string
+ let _ = "hello world".find(|c: char| c == 'o' || c == 'l').is_some();
+
+ let some_closure = |x: &u32| *x == 0;
+ let _ = (0..1).find(some_closure).is_some();
+}
+
+#[rustfmt::skip]
+fn is_none() {
+ let v = vec![3, 2, 1, 0, -1, -2, -3];
+ let y = &&42;
+
+
+ // Check `find().is_none()`, multi-line case.
+ let _ = v.iter().find(|&x| {
+ *x < 0
+ }
+ ).is_none();
+
+ // Check `position().is_none()`, multi-line case.
+ let _ = v.iter().position(|&x| {
+ x < 0
+ }
+ ).is_none();
+
+ // Check `rposition().is_none()`, multi-line case.
+ let _ = v.iter().rposition(|&x| {
+ x < 0
+ }
+ ).is_none();
+
+ // Check that we don't lint if the caller is not an `Iterator` or string
+ let falsepos = IteratorFalsePositives { foo: 0 };
+ let _ = falsepos.find().is_none();
+ let _ = falsepos.position().is_none();
+ let _ = falsepos.rposition().is_none();
+ // check that we don't lint if `find()` is called with
+ // `Pattern` that is not a string
+ let _ = "hello world".find(|c: char| c == 'o' || c == 'l').is_none();
+
+ let some_closure = |x: &u32| *x == 0;
+ let _ = (0..1).find(some_closure).is_none();
+}
diff --git a/src/tools/clippy/tests/ui/search_is_some.stderr b/src/tools/clippy/tests/ui/search_is_some.stderr
new file mode 100644
index 000000000..54760545b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/search_is_some.stderr
@@ -0,0 +1,87 @@
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some.rs:14:13
+ |
+LL | let _ = v.iter().find(|&x| {
+ | _____________^
+LL | | *x < 0
+LL | | }
+LL | | ).is_some();
+ | |______________________________^
+ |
+ = note: `-D clippy::search-is-some` implied by `-D warnings`
+ = help: this is more succinctly expressed by calling `any()`
+
+error: called `is_some()` after searching an `Iterator` with `position`
+ --> $DIR/search_is_some.rs:20:13
+ |
+LL | let _ = v.iter().position(|&x| {
+ | _____________^
+LL | | x < 0
+LL | | }
+LL | | ).is_some();
+ | |______________________________^
+ |
+ = help: this is more succinctly expressed by calling `any()`
+
+error: called `is_some()` after searching an `Iterator` with `rposition`
+ --> $DIR/search_is_some.rs:26:13
+ |
+LL | let _ = v.iter().rposition(|&x| {
+ | _____________^
+LL | | x < 0
+LL | | }
+LL | | ).is_some();
+ | |______________________________^
+ |
+ = help: this is more succinctly expressed by calling `any()`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some.rs:41:20
+ |
+LL | let _ = (0..1).find(some_closure).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(some_closure)`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some.rs:51:13
+ |
+LL | let _ = v.iter().find(|&x| {
+ | _____________^
+LL | | *x < 0
+LL | | }
+LL | | ).is_none();
+ | |______________________________^
+ |
+ = help: this is more succinctly expressed by calling `any()` with negation
+
+error: called `is_none()` after searching an `Iterator` with `position`
+ --> $DIR/search_is_some.rs:57:13
+ |
+LL | let _ = v.iter().position(|&x| {
+ | _____________^
+LL | | x < 0
+LL | | }
+LL | | ).is_none();
+ | |______________________________^
+ |
+ = help: this is more succinctly expressed by calling `any()` with negation
+
+error: called `is_none()` after searching an `Iterator` with `rposition`
+ --> $DIR/search_is_some.rs:63:13
+ |
+LL | let _ = v.iter().rposition(|&x| {
+ | _____________^
+LL | | x < 0
+LL | | }
+LL | | ).is_none();
+ | |______________________________^
+ |
+ = help: this is more succinctly expressed by calling `any()` with negation
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some.rs:78:13
+ |
+LL | let _ = (0..1).find(some_closure).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!(0..1).any(some_closure)`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/search_is_some_fixable_none.fixed b/src/tools/clippy/tests/ui/search_is_some_fixable_none.fixed
new file mode 100644
index 000000000..5190c5304
--- /dev/null
+++ b/src/tools/clippy/tests/ui/search_is_some_fixable_none.fixed
@@ -0,0 +1,216 @@
+// run-rustfix
+#![allow(dead_code, clippy::explicit_auto_deref)]
+#![warn(clippy::search_is_some)]
+
+fn main() {
+ let v = vec![3, 2, 1, 0, -1, -2, -3];
+ let y = &&42;
+
+ // Check `find().is_none()`, single-line case.
+ let _ = !v.iter().any(|x| *x < 0);
+ let _ = !(0..1).any(|x| **y == x); // one dereference less
+ let _ = !(0..1).any(|x| x == 0);
+ let _ = !v.iter().any(|x| *x == 0);
+ let _ = !(4..5).any(|x| x == 1 || x == 3 || x == 5);
+ let _ = !(1..3).any(|x| [1, 2, 3].contains(&x));
+ let _ = !(1..3).any(|x| x == 0 || [1, 2, 3].contains(&x));
+ let _ = !(1..3).any(|x| [1, 2, 3].contains(&x) || x == 0);
+ let _ = !(1..3).any(|x| [1, 2, 3].contains(&x) || x == 0 || [4, 5, 6].contains(&x) || x == -1);
+
+ // Check `position().is_none()`, single-line case.
+ let _ = !v.iter().any(|&x| x < 0);
+
+ // Check `rposition().is_none()`, single-line case.
+ let _ = !v.iter().any(|&x| x < 0);
+
+ let s1 = String::from("hello world");
+ let s2 = String::from("world");
+
+ // caller of `find()` is a `&`static str`
+ let _ = !"hello world".contains("world");
+ let _ = !"hello world".contains(&s2);
+ let _ = !"hello world".contains(&s2[2..]);
+ // caller of `find()` is a `String`
+ let _ = !s1.contains("world");
+ let _ = !s1.contains(&s2);
+ let _ = !s1.contains(&s2[2..]);
+ // caller of `find()` is slice of `String`
+ let _ = !s1[2..].contains("world");
+ let _ = !s1[2..].contains(&s2);
+ let _ = !s1[2..].contains(&s2[2..]);
+}
+
+#[allow(clippy::clone_on_copy, clippy::map_clone)]
+mod issue7392 {
+ struct Player {
+ hand: Vec<usize>,
+ }
+ fn filter() {
+ let p = Player {
+ hand: vec![1, 2, 3, 4, 5],
+ };
+ let filter_hand = vec![5];
+ let _ = p
+ .hand
+ .iter()
+ .filter(|c| !filter_hand.iter().any(|cc| c == &cc))
+ .map(|c| c.clone())
+ .collect::<Vec<_>>();
+ }
+
+ struct PlayerTuple {
+ hand: Vec<(usize, char)>,
+ }
+ fn filter_tuple() {
+ let p = PlayerTuple {
+ hand: vec![(1, 'a'), (2, 'b'), (3, 'c'), (4, 'd'), (5, 'e')],
+ };
+ let filter_hand = vec![5];
+ let _ = p
+ .hand
+ .iter()
+ .filter(|(c, _)| !filter_hand.iter().any(|cc| c == cc))
+ .map(|c| c.clone())
+ .collect::<Vec<_>>();
+ }
+
+ fn field_projection() {
+ struct Foo {
+ foo: i32,
+ bar: u32,
+ }
+ let vfoo = vec![Foo { foo: 1, bar: 2 }];
+ let _ = !vfoo.iter().any(|v| v.foo == 1 && v.bar == 2);
+
+ let vfoo = vec![(42, Foo { foo: 1, bar: 2 })];
+ let _ = !vfoo
+ .iter().any(|(i, v)| *i == 42 && v.foo == 1 && v.bar == 2);
+ }
+
+ fn index_projection() {
+ let vfoo = vec![[0, 1, 2, 3]];
+ let _ = !vfoo.iter().any(|a| a[0] == 42);
+ }
+
+ #[allow(clippy::match_like_matches_macro)]
+ fn slice_projection() {
+ let vfoo = vec![[0, 1, 2, 3, 0, 1, 2, 3]];
+ let _ = !vfoo.iter().any(|sub| sub[1..4].len() == 3);
+ }
+
+ fn please(x: &u32) -> bool {
+ *x == 9
+ }
+
+ fn deref_enough(x: u32) -> bool {
+ x == 78
+ }
+
+ fn arg_no_deref(x: &&u32) -> bool {
+ **x == 78
+ }
+
+ fn more_projections() {
+ let x = 19;
+ let ppx: &u32 = &x;
+ let _ = ![ppx].iter().any(|ppp_x: &&u32| please(ppp_x));
+ let _ = ![String::from("Hey hey")].iter().any(|s| s.len() == 2);
+
+ let v = vec![3, 2, 1, 0];
+ let _ = !v.iter().any(|x| deref_enough(*x));
+ let _ = !v.iter().any(|x: &u32| deref_enough(*x));
+
+ #[allow(clippy::redundant_closure)]
+ let _ = !v.iter().any(|x| arg_no_deref(&x));
+ #[allow(clippy::redundant_closure)]
+ let _ = !v.iter().any(|x: &u32| arg_no_deref(&x));
+ }
+
+ fn field_index_projection() {
+ struct FooDouble {
+ bar: Vec<Vec<i32>>,
+ }
+ struct Foo {
+ bar: Vec<i32>,
+ }
+ struct FooOuter {
+ inner: Foo,
+ inner_double: FooDouble,
+ }
+ let vfoo = vec![FooOuter {
+ inner: Foo { bar: vec![0, 1, 2, 3] },
+ inner_double: FooDouble {
+ bar: vec![vec![0, 1, 2, 3]],
+ },
+ }];
+ let _ = !vfoo
+ .iter().any(|v| v.inner_double.bar[0][0] == 2 && v.inner.bar[0] == 2);
+ }
+
+ fn index_field_projection() {
+ struct Foo {
+ bar: i32,
+ }
+ struct FooOuter {
+ inner: Vec<Foo>,
+ }
+ let vfoo = vec![FooOuter {
+ inner: vec![Foo { bar: 0 }],
+ }];
+ let _ = !vfoo.iter().any(|v| v.inner[0].bar == 2);
+ }
+
+ fn double_deref_index_projection() {
+ let vfoo = vec![&&[0, 1, 2, 3]];
+ let _ = !vfoo.iter().any(|x| (**x)[0] == 9);
+ }
+
+ fn method_call_by_ref() {
+ struct Foo {
+ bar: u32,
+ }
+ impl Foo {
+ pub fn by_ref(&self, x: &u32) -> bool {
+ *x == self.bar
+ }
+ }
+ let vfoo = vec![Foo { bar: 1 }];
+ let _ = !vfoo.iter().any(|v| v.by_ref(&v.bar));
+ }
+
+ fn ref_bindings() {
+ let _ = ![&(&1, 2), &(&3, 4), &(&5, 4)].iter().any(|(&x, y)| x == *y);
+ let _ = ![&(&1, 2), &(&3, 4), &(&5, 4)].iter().any(|(&x, y)| x == *y);
+ }
+
+ fn test_string_1(s: &str) -> bool {
+ s.is_empty()
+ }
+
+ fn test_u32_1(s: &u32) -> bool {
+ s.is_power_of_two()
+ }
+
+ fn test_u32_2(s: u32) -> bool {
+ s.is_power_of_two()
+ }
+
+ fn projection_in_args_test() {
+ // Index projections
+ let lst = &[String::from("Hello"), String::from("world")];
+ let v: Vec<&[String]> = vec![lst];
+ let _ = !v.iter().any(|s| s[0].is_empty());
+ let _ = !v.iter().any(|s| test_string_1(&s[0]));
+
+ // Field projections
+ struct FieldProjection<'a> {
+ field: &'a u32,
+ }
+ let field = 123456789;
+ let instance = FieldProjection { field: &field };
+ let v = vec![instance];
+ let _ = !v.iter().any(|fp| fp.field.is_power_of_two());
+ let _ = !v.iter().any(|fp| test_u32_1(fp.field));
+ let _ = !v.iter().any(|fp| test_u32_2(*fp.field));
+ }
+}
diff --git a/src/tools/clippy/tests/ui/search_is_some_fixable_none.rs b/src/tools/clippy/tests/ui/search_is_some_fixable_none.rs
new file mode 100644
index 000000000..310d87333
--- /dev/null
+++ b/src/tools/clippy/tests/ui/search_is_some_fixable_none.rs
@@ -0,0 +1,222 @@
+// run-rustfix
+#![allow(dead_code, clippy::explicit_auto_deref)]
+#![warn(clippy::search_is_some)]
+
+fn main() {
+ let v = vec![3, 2, 1, 0, -1, -2, -3];
+ let y = &&42;
+
+ // Check `find().is_none()`, single-line case.
+ let _ = v.iter().find(|&x| *x < 0).is_none();
+ let _ = (0..1).find(|x| **y == *x).is_none(); // one dereference less
+ let _ = (0..1).find(|x| *x == 0).is_none();
+ let _ = v.iter().find(|x| **x == 0).is_none();
+ let _ = (4..5).find(|x| *x == 1 || *x == 3 || *x == 5).is_none();
+ let _ = (1..3).find(|x| [1, 2, 3].contains(x)).is_none();
+ let _ = (1..3).find(|x| *x == 0 || [1, 2, 3].contains(x)).is_none();
+ let _ = (1..3).find(|x| [1, 2, 3].contains(x) || *x == 0).is_none();
+ let _ = (1..3)
+ .find(|x| [1, 2, 3].contains(x) || *x == 0 || [4, 5, 6].contains(x) || *x == -1)
+ .is_none();
+
+ // Check `position().is_none()`, single-line case.
+ let _ = v.iter().position(|&x| x < 0).is_none();
+
+ // Check `rposition().is_none()`, single-line case.
+ let _ = v.iter().rposition(|&x| x < 0).is_none();
+
+ let s1 = String::from("hello world");
+ let s2 = String::from("world");
+
+ // caller of `find()` is a `&`static str`
+ let _ = "hello world".find("world").is_none();
+ let _ = "hello world".find(&s2).is_none();
+ let _ = "hello world".find(&s2[2..]).is_none();
+ // caller of `find()` is a `String`
+ let _ = s1.find("world").is_none();
+ let _ = s1.find(&s2).is_none();
+ let _ = s1.find(&s2[2..]).is_none();
+ // caller of `find()` is slice of `String`
+ let _ = s1[2..].find("world").is_none();
+ let _ = s1[2..].find(&s2).is_none();
+ let _ = s1[2..].find(&s2[2..]).is_none();
+}
+
+#[allow(clippy::clone_on_copy, clippy::map_clone)]
+mod issue7392 {
+ struct Player {
+ hand: Vec<usize>,
+ }
+ fn filter() {
+ let p = Player {
+ hand: vec![1, 2, 3, 4, 5],
+ };
+ let filter_hand = vec![5];
+ let _ = p
+ .hand
+ .iter()
+ .filter(|c| filter_hand.iter().find(|cc| c == cc).is_none())
+ .map(|c| c.clone())
+ .collect::<Vec<_>>();
+ }
+
+ struct PlayerTuple {
+ hand: Vec<(usize, char)>,
+ }
+ fn filter_tuple() {
+ let p = PlayerTuple {
+ hand: vec![(1, 'a'), (2, 'b'), (3, 'c'), (4, 'd'), (5, 'e')],
+ };
+ let filter_hand = vec![5];
+ let _ = p
+ .hand
+ .iter()
+ .filter(|(c, _)| filter_hand.iter().find(|cc| c == *cc).is_none())
+ .map(|c| c.clone())
+ .collect::<Vec<_>>();
+ }
+
+ fn field_projection() {
+ struct Foo {
+ foo: i32,
+ bar: u32,
+ }
+ let vfoo = vec![Foo { foo: 1, bar: 2 }];
+ let _ = vfoo.iter().find(|v| v.foo == 1 && v.bar == 2).is_none();
+
+ let vfoo = vec![(42, Foo { foo: 1, bar: 2 })];
+ let _ = vfoo
+ .iter()
+ .find(|(i, v)| *i == 42 && v.foo == 1 && v.bar == 2)
+ .is_none();
+ }
+
+ fn index_projection() {
+ let vfoo = vec![[0, 1, 2, 3]];
+ let _ = vfoo.iter().find(|a| a[0] == 42).is_none();
+ }
+
+ #[allow(clippy::match_like_matches_macro)]
+ fn slice_projection() {
+ let vfoo = vec![[0, 1, 2, 3, 0, 1, 2, 3]];
+ let _ = vfoo.iter().find(|sub| sub[1..4].len() == 3).is_none();
+ }
+
+ fn please(x: &u32) -> bool {
+ *x == 9
+ }
+
+ fn deref_enough(x: u32) -> bool {
+ x == 78
+ }
+
+ fn arg_no_deref(x: &&u32) -> bool {
+ **x == 78
+ }
+
+ fn more_projections() {
+ let x = 19;
+ let ppx: &u32 = &x;
+ let _ = [ppx].iter().find(|ppp_x: &&&u32| please(**ppp_x)).is_none();
+ let _ = [String::from("Hey hey")].iter().find(|s| s.len() == 2).is_none();
+
+ let v = vec![3, 2, 1, 0];
+ let _ = v.iter().find(|x| deref_enough(**x)).is_none();
+ let _ = v.iter().find(|x: &&u32| deref_enough(**x)).is_none();
+
+ #[allow(clippy::redundant_closure)]
+ let _ = v.iter().find(|x| arg_no_deref(x)).is_none();
+ #[allow(clippy::redundant_closure)]
+ let _ = v.iter().find(|x: &&u32| arg_no_deref(x)).is_none();
+ }
+
+ fn field_index_projection() {
+ struct FooDouble {
+ bar: Vec<Vec<i32>>,
+ }
+ struct Foo {
+ bar: Vec<i32>,
+ }
+ struct FooOuter {
+ inner: Foo,
+ inner_double: FooDouble,
+ }
+ let vfoo = vec![FooOuter {
+ inner: Foo { bar: vec![0, 1, 2, 3] },
+ inner_double: FooDouble {
+ bar: vec![vec![0, 1, 2, 3]],
+ },
+ }];
+ let _ = vfoo
+ .iter()
+ .find(|v| v.inner_double.bar[0][0] == 2 && v.inner.bar[0] == 2)
+ .is_none();
+ }
+
+ fn index_field_projection() {
+ struct Foo {
+ bar: i32,
+ }
+ struct FooOuter {
+ inner: Vec<Foo>,
+ }
+ let vfoo = vec![FooOuter {
+ inner: vec![Foo { bar: 0 }],
+ }];
+ let _ = vfoo.iter().find(|v| v.inner[0].bar == 2).is_none();
+ }
+
+ fn double_deref_index_projection() {
+ let vfoo = vec![&&[0, 1, 2, 3]];
+ let _ = vfoo.iter().find(|x| (**x)[0] == 9).is_none();
+ }
+
+ fn method_call_by_ref() {
+ struct Foo {
+ bar: u32,
+ }
+ impl Foo {
+ pub fn by_ref(&self, x: &u32) -> bool {
+ *x == self.bar
+ }
+ }
+ let vfoo = vec![Foo { bar: 1 }];
+ let _ = vfoo.iter().find(|v| v.by_ref(&v.bar)).is_none();
+ }
+
+ fn ref_bindings() {
+ let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|(&x, y)| x == *y).is_none();
+ let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|&(&x, y)| x == *y).is_none();
+ }
+
+ fn test_string_1(s: &str) -> bool {
+ s.is_empty()
+ }
+
+ fn test_u32_1(s: &u32) -> bool {
+ s.is_power_of_two()
+ }
+
+ fn test_u32_2(s: u32) -> bool {
+ s.is_power_of_two()
+ }
+
+ fn projection_in_args_test() {
+ // Index projections
+ let lst = &[String::from("Hello"), String::from("world")];
+ let v: Vec<&[String]> = vec![lst];
+ let _ = v.iter().find(|s| s[0].is_empty()).is_none();
+ let _ = v.iter().find(|s| test_string_1(&s[0])).is_none();
+
+ // Field projections
+ struct FieldProjection<'a> {
+ field: &'a u32,
+ }
+ let field = 123456789;
+ let instance = FieldProjection { field: &field };
+ let v = vec![instance];
+ let _ = v.iter().find(|fp| fp.field.is_power_of_two()).is_none();
+ let _ = v.iter().find(|fp| test_u32_1(fp.field)).is_none();
+ let _ = v.iter().find(|fp| test_u32_2(*fp.field)).is_none();
+ }
+}
diff --git a/src/tools/clippy/tests/ui/search_is_some_fixable_none.stderr b/src/tools/clippy/tests/ui/search_is_some_fixable_none.stderr
new file mode 100644
index 000000000..933ce5cf4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/search_is_some_fixable_none.stderr
@@ -0,0 +1,285 @@
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:10:13
+ |
+LL | let _ = v.iter().find(|&x| *x < 0).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!v.iter().any(|x| *x < 0)`
+ |
+ = note: `-D clippy::search-is-some` implied by `-D warnings`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:11:13
+ |
+LL | let _ = (0..1).find(|x| **y == *x).is_none(); // one dereference less
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!(0..1).any(|x| **y == x)`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:12:13
+ |
+LL | let _ = (0..1).find(|x| *x == 0).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!(0..1).any(|x| x == 0)`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:13:13
+ |
+LL | let _ = v.iter().find(|x| **x == 0).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!v.iter().any(|x| *x == 0)`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:14:13
+ |
+LL | let _ = (4..5).find(|x| *x == 1 || *x == 3 || *x == 5).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!(4..5).any(|x| x == 1 || x == 3 || x == 5)`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:15:13
+ |
+LL | let _ = (1..3).find(|x| [1, 2, 3].contains(x)).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!(1..3).any(|x| [1, 2, 3].contains(&x))`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:16:13
+ |
+LL | let _ = (1..3).find(|x| *x == 0 || [1, 2, 3].contains(x)).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!(1..3).any(|x| x == 0 || [1, 2, 3].contains(&x))`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:17:13
+ |
+LL | let _ = (1..3).find(|x| [1, 2, 3].contains(x) || *x == 0).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!(1..3).any(|x| [1, 2, 3].contains(&x) || x == 0)`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:18:13
+ |
+LL | let _ = (1..3)
+ | _____________^
+LL | | .find(|x| [1, 2, 3].contains(x) || *x == 0 || [4, 5, 6].contains(x) || *x == -1)
+LL | | .is_none();
+ | |__________________^ help: use `!_.any()` instead: `!(1..3).any(|x| [1, 2, 3].contains(&x) || x == 0 || [4, 5, 6].contains(&x) || x == -1)`
+
+error: called `is_none()` after searching an `Iterator` with `position`
+ --> $DIR/search_is_some_fixable_none.rs:23:13
+ |
+LL | let _ = v.iter().position(|&x| x < 0).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!v.iter().any(|&x| x < 0)`
+
+error: called `is_none()` after searching an `Iterator` with `rposition`
+ --> $DIR/search_is_some_fixable_none.rs:26:13
+ |
+LL | let _ = v.iter().rposition(|&x| x < 0).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!v.iter().any(|&x| x < 0)`
+
+error: called `is_none()` after calling `find()` on a string
+ --> $DIR/search_is_some_fixable_none.rs:32:13
+ |
+LL | let _ = "hello world".find("world").is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.contains()` instead: `!"hello world".contains("world")`
+
+error: called `is_none()` after calling `find()` on a string
+ --> $DIR/search_is_some_fixable_none.rs:33:13
+ |
+LL | let _ = "hello world".find(&s2).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.contains()` instead: `!"hello world".contains(&s2)`
+
+error: called `is_none()` after calling `find()` on a string
+ --> $DIR/search_is_some_fixable_none.rs:34:13
+ |
+LL | let _ = "hello world".find(&s2[2..]).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.contains()` instead: `!"hello world".contains(&s2[2..])`
+
+error: called `is_none()` after calling `find()` on a string
+ --> $DIR/search_is_some_fixable_none.rs:36:13
+ |
+LL | let _ = s1.find("world").is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.contains()` instead: `!s1.contains("world")`
+
+error: called `is_none()` after calling `find()` on a string
+ --> $DIR/search_is_some_fixable_none.rs:37:13
+ |
+LL | let _ = s1.find(&s2).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.contains()` instead: `!s1.contains(&s2)`
+
+error: called `is_none()` after calling `find()` on a string
+ --> $DIR/search_is_some_fixable_none.rs:38:13
+ |
+LL | let _ = s1.find(&s2[2..]).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.contains()` instead: `!s1.contains(&s2[2..])`
+
+error: called `is_none()` after calling `find()` on a string
+ --> $DIR/search_is_some_fixable_none.rs:40:13
+ |
+LL | let _ = s1[2..].find("world").is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.contains()` instead: `!s1[2..].contains("world")`
+
+error: called `is_none()` after calling `find()` on a string
+ --> $DIR/search_is_some_fixable_none.rs:41:13
+ |
+LL | let _ = s1[2..].find(&s2).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.contains()` instead: `!s1[2..].contains(&s2)`
+
+error: called `is_none()` after calling `find()` on a string
+ --> $DIR/search_is_some_fixable_none.rs:42:13
+ |
+LL | let _ = s1[2..].find(&s2[2..]).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.contains()` instead: `!s1[2..].contains(&s2[2..])`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:58:25
+ |
+LL | .filter(|c| filter_hand.iter().find(|cc| c == cc).is_none())
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!filter_hand.iter().any(|cc| c == &cc)`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:74:30
+ |
+LL | .filter(|(c, _)| filter_hand.iter().find(|cc| c == *cc).is_none())
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!filter_hand.iter().any(|cc| c == cc)`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:85:17
+ |
+LL | let _ = vfoo.iter().find(|v| v.foo == 1 && v.bar == 2).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!vfoo.iter().any(|v| v.foo == 1 && v.bar == 2)`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:88:17
+ |
+LL | let _ = vfoo
+ | _________________^
+LL | | .iter()
+LL | | .find(|(i, v)| *i == 42 && v.foo == 1 && v.bar == 2)
+LL | | .is_none();
+ | |______________________^
+ |
+help: use `!_.any()` instead
+ |
+LL ~ let _ = !vfoo
+LL ~ .iter().any(|(i, v)| *i == 42 && v.foo == 1 && v.bar == 2);
+ |
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:96:17
+ |
+LL | let _ = vfoo.iter().find(|a| a[0] == 42).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!vfoo.iter().any(|a| a[0] == 42)`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:102:17
+ |
+LL | let _ = vfoo.iter().find(|sub| sub[1..4].len() == 3).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!vfoo.iter().any(|sub| sub[1..4].len() == 3)`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:120:17
+ |
+LL | let _ = [ppx].iter().find(|ppp_x: &&&u32| please(**ppp_x)).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `![ppx].iter().any(|ppp_x: &&u32| please(ppp_x))`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:121:17
+ |
+LL | let _ = [String::from("Hey hey")].iter().find(|s| s.len() == 2).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `![String::from("Hey hey")].iter().any(|s| s.len() == 2)`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:124:17
+ |
+LL | let _ = v.iter().find(|x| deref_enough(**x)).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!v.iter().any(|x| deref_enough(*x))`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:125:17
+ |
+LL | let _ = v.iter().find(|x: &&u32| deref_enough(**x)).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!v.iter().any(|x: &u32| deref_enough(*x))`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:128:17
+ |
+LL | let _ = v.iter().find(|x| arg_no_deref(x)).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!v.iter().any(|x| arg_no_deref(&x))`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:130:17
+ |
+LL | let _ = v.iter().find(|x: &&u32| arg_no_deref(x)).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!v.iter().any(|x: &u32| arg_no_deref(&x))`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:150:17
+ |
+LL | let _ = vfoo
+ | _________________^
+LL | | .iter()
+LL | | .find(|v| v.inner_double.bar[0][0] == 2 && v.inner.bar[0] == 2)
+LL | | .is_none();
+ | |______________________^
+ |
+help: use `!_.any()` instead
+ |
+LL ~ let _ = !vfoo
+LL ~ .iter().any(|v| v.inner_double.bar[0][0] == 2 && v.inner.bar[0] == 2);
+ |
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:166:17
+ |
+LL | let _ = vfoo.iter().find(|v| v.inner[0].bar == 2).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!vfoo.iter().any(|v| v.inner[0].bar == 2)`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:171:17
+ |
+LL | let _ = vfoo.iter().find(|x| (**x)[0] == 9).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!vfoo.iter().any(|x| (**x)[0] == 9)`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:184:17
+ |
+LL | let _ = vfoo.iter().find(|v| v.by_ref(&v.bar)).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!vfoo.iter().any(|v| v.by_ref(&v.bar))`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:188:17
+ |
+LL | let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|(&x, y)| x == *y).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `![&(&1, 2), &(&3, 4), &(&5, 4)].iter().any(|(&x, y)| x == *y)`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:189:17
+ |
+LL | let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|&(&x, y)| x == *y).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `![&(&1, 2), &(&3, 4), &(&5, 4)].iter().any(|(&x, y)| x == *y)`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:208:17
+ |
+LL | let _ = v.iter().find(|s| s[0].is_empty()).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!v.iter().any(|s| s[0].is_empty())`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:209:17
+ |
+LL | let _ = v.iter().find(|s| test_string_1(&s[0])).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!v.iter().any(|s| test_string_1(&s[0]))`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:218:17
+ |
+LL | let _ = v.iter().find(|fp| fp.field.is_power_of_two()).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!v.iter().any(|fp| fp.field.is_power_of_two())`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:219:17
+ |
+LL | let _ = v.iter().find(|fp| test_u32_1(fp.field)).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!v.iter().any(|fp| test_u32_1(fp.field))`
+
+error: called `is_none()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_none.rs:220:17
+ |
+LL | let _ = v.iter().find(|fp| test_u32_2(*fp.field)).is_none();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `!_.any()` instead: `!v.iter().any(|fp| test_u32_2(*fp.field))`
+
+error: aborting due to 43 previous errors
+
diff --git a/src/tools/clippy/tests/ui/search_is_some_fixable_some.fixed b/src/tools/clippy/tests/ui/search_is_some_fixable_some.fixed
new file mode 100644
index 000000000..385a9986a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/search_is_some_fixable_some.fixed
@@ -0,0 +1,248 @@
+// run-rustfix
+#![allow(dead_code, clippy::explicit_auto_deref)]
+#![warn(clippy::search_is_some)]
+
+fn main() {
+ let v = vec![3, 2, 1, 0, -1, -2, -3];
+ let y = &&42;
+
+ // Check `find().is_some()`, single-line case.
+ let _ = v.iter().any(|x| *x < 0);
+ let _ = (0..1).any(|x| **y == x); // one dereference less
+ let _ = (0..1).any(|x| x == 0);
+ let _ = v.iter().any(|x| *x == 0);
+ let _ = (4..5).any(|x| x == 1 || x == 3 || x == 5);
+ let _ = (1..3).any(|x| [1, 2, 3].contains(&x));
+ let _ = (1..3).any(|x| x == 0 || [1, 2, 3].contains(&x));
+ let _ = (1..3).any(|x| [1, 2, 3].contains(&x) || x == 0);
+ let _ = (1..3)
+ .any(|x| [1, 2, 3].contains(&x) || x == 0 || [4, 5, 6].contains(&x) || x == -1);
+
+ // Check `position().is_some()`, single-line case.
+ let _ = v.iter().any(|&x| x < 0);
+
+ // Check `rposition().is_some()`, single-line case.
+ let _ = v.iter().any(|&x| x < 0);
+
+ let s1 = String::from("hello world");
+ let s2 = String::from("world");
+ // caller of `find()` is a `&`static str`
+ let _ = "hello world".contains("world");
+ let _ = "hello world".contains(&s2);
+ let _ = "hello world".contains(&s2[2..]);
+ // caller of `find()` is a `String`
+ let _ = s1.contains("world");
+ let _ = s1.contains(&s2);
+ let _ = s1.contains(&s2[2..]);
+ // caller of `find()` is slice of `String`
+ let _ = s1[2..].contains("world");
+ let _ = s1[2..].contains(&s2);
+ let _ = s1[2..].contains(&s2[2..]);
+}
+
+#[allow(clippy::clone_on_copy, clippy::map_clone)]
+mod issue7392 {
+ struct Player {
+ hand: Vec<usize>,
+ }
+ fn filter() {
+ let p = Player {
+ hand: vec![1, 2, 3, 4, 5],
+ };
+ let filter_hand = vec![5];
+ let _ = p
+ .hand
+ .iter()
+ .filter(|c| filter_hand.iter().any(|cc| c == &cc))
+ .map(|c| c.clone())
+ .collect::<Vec<_>>();
+ }
+
+ struct PlayerTuple {
+ hand: Vec<(usize, char)>,
+ }
+ fn filter_tuple() {
+ let p = PlayerTuple {
+ hand: vec![(1, 'a'), (2, 'b'), (3, 'c'), (4, 'd'), (5, 'e')],
+ };
+ let filter_hand = vec![5];
+ let _ = p
+ .hand
+ .iter()
+ .filter(|(c, _)| filter_hand.iter().any(|cc| c == cc))
+ .map(|c| c.clone())
+ .collect::<Vec<_>>();
+ }
+
+ fn field_projection() {
+ struct Foo {
+ foo: i32,
+ bar: u32,
+ }
+ let vfoo = vec![Foo { foo: 1, bar: 2 }];
+ let _ = vfoo.iter().any(|v| v.foo == 1 && v.bar == 2);
+
+ let vfoo = vec![(42, Foo { foo: 1, bar: 2 })];
+ let _ = vfoo
+ .iter()
+ .any(|(i, v)| *i == 42 && v.foo == 1 && v.bar == 2);
+ }
+
+ fn index_projection() {
+ let vfoo = vec![[0, 1, 2, 3]];
+ let _ = vfoo.iter().any(|a| a[0] == 42);
+ }
+
+ #[allow(clippy::match_like_matches_macro)]
+ fn slice_projection() {
+ let vfoo = vec![[0, 1, 2, 3, 0, 1, 2, 3]];
+ let _ = vfoo.iter().any(|sub| sub[1..4].len() == 3);
+ }
+
+ fn please(x: &u32) -> bool {
+ *x == 9
+ }
+
+ fn deref_enough(x: u32) -> bool {
+ x == 78
+ }
+
+ fn arg_no_deref(x: &&u32) -> bool {
+ **x == 78
+ }
+
+ fn more_projections() {
+ let x = 19;
+ let ppx: &u32 = &x;
+ let _ = [ppx].iter().any(|ppp_x: &&u32| please(ppp_x));
+ let _ = [String::from("Hey hey")].iter().any(|s| s.len() == 2);
+
+ let v = vec![3, 2, 1, 0];
+ let _ = v.iter().any(|x| deref_enough(*x));
+ let _ = v.iter().any(|x: &u32| deref_enough(*x));
+
+ #[allow(clippy::redundant_closure)]
+ let _ = v.iter().any(|x| arg_no_deref(&x));
+ #[allow(clippy::redundant_closure)]
+ let _ = v.iter().any(|x: &u32| arg_no_deref(&x));
+ }
+
+ fn field_index_projection() {
+ struct FooDouble {
+ bar: Vec<Vec<i32>>,
+ }
+ struct Foo {
+ bar: Vec<i32>,
+ }
+ struct FooOuter {
+ inner: Foo,
+ inner_double: FooDouble,
+ }
+ let vfoo = vec![FooOuter {
+ inner: Foo { bar: vec![0, 1, 2, 3] },
+ inner_double: FooDouble {
+ bar: vec![vec![0, 1, 2, 3]],
+ },
+ }];
+ let _ = vfoo
+ .iter()
+ .any(|v| v.inner_double.bar[0][0] == 2 && v.inner.bar[0] == 2);
+ }
+
+ fn index_field_projection() {
+ struct Foo {
+ bar: i32,
+ }
+ struct FooOuter {
+ inner: Vec<Foo>,
+ }
+ let vfoo = vec![FooOuter {
+ inner: vec![Foo { bar: 0 }],
+ }];
+ let _ = vfoo.iter().any(|v| v.inner[0].bar == 2);
+ }
+
+ fn double_deref_index_projection() {
+ let vfoo = vec![&&[0, 1, 2, 3]];
+ let _ = vfoo.iter().any(|x| (**x)[0] == 9);
+ }
+
+ fn method_call_by_ref() {
+ struct Foo {
+ bar: u32,
+ }
+ impl Foo {
+ pub fn by_ref(&self, x: &u32) -> bool {
+ *x == self.bar
+ }
+ }
+ let vfoo = vec![Foo { bar: 1 }];
+ let _ = vfoo.iter().any(|v| v.by_ref(&v.bar));
+ }
+
+ fn ref_bindings() {
+ let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().any(|(&x, y)| x == *y);
+ let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().any(|(&x, y)| x == *y);
+ }
+
+ fn test_string_1(s: &str) -> bool {
+ s.is_empty()
+ }
+
+ fn test_u32_1(s: &u32) -> bool {
+ s.is_power_of_two()
+ }
+
+ fn test_u32_2(s: u32) -> bool {
+ s.is_power_of_two()
+ }
+
+ fn projection_in_args_test() {
+ // Index projections
+ let lst = &[String::from("Hello"), String::from("world")];
+ let v: Vec<&[String]> = vec![lst];
+ let _ = v.iter().any(|s| s[0].is_empty());
+ let _ = v.iter().any(|s| test_string_1(&s[0]));
+
+ // Field projections
+ struct FieldProjection<'a> {
+ field: &'a u32,
+ }
+ let field = 123456789;
+ let instance = FieldProjection { field: &field };
+ let v = vec![instance];
+ let _ = v.iter().any(|fp| fp.field.is_power_of_two());
+ let _ = v.iter().any(|fp| test_u32_1(fp.field));
+ let _ = v.iter().any(|fp| test_u32_2(*fp.field));
+ }
+}
+
+mod issue9120 {
+ fn make_arg_no_deref_impl() -> impl Fn(&&u32) -> bool {
+ move |x: &&u32| **x == 78
+ }
+
+ fn make_arg_no_deref_dyn() -> Box<dyn Fn(&&u32) -> bool> {
+ Box::new(move |x: &&u32| **x == 78)
+ }
+
+ fn wrapper<T: Fn(&&u32) -> bool>(v: Vec<u32>, func: T) -> bool {
+ #[allow(clippy::redundant_closure)]
+ v.iter().any(|x: &u32| func(&x))
+ }
+
+ fn do_tests() {
+ let v = vec![3, 2, 1, 0];
+ let arg_no_deref_impl = make_arg_no_deref_impl();
+ let arg_no_deref_dyn = make_arg_no_deref_dyn();
+
+ #[allow(clippy::redundant_closure)]
+ let _ = v.iter().any(|x: &u32| arg_no_deref_impl(&x));
+
+ #[allow(clippy::redundant_closure)]
+ let _ = v.iter().any(|x: &u32| arg_no_deref_dyn(&x));
+
+ #[allow(clippy::redundant_closure)]
+ let _ = v.iter().any(|x: &u32| (*arg_no_deref_dyn)(&x));
+ }
+}
diff --git a/src/tools/clippy/tests/ui/search_is_some_fixable_some.rs b/src/tools/clippy/tests/ui/search_is_some_fixable_some.rs
new file mode 100644
index 000000000..67e190ee3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/search_is_some_fixable_some.rs
@@ -0,0 +1,251 @@
+// run-rustfix
+#![allow(dead_code, clippy::explicit_auto_deref)]
+#![warn(clippy::search_is_some)]
+
+fn main() {
+ let v = vec![3, 2, 1, 0, -1, -2, -3];
+ let y = &&42;
+
+ // Check `find().is_some()`, single-line case.
+ let _ = v.iter().find(|&x| *x < 0).is_some();
+ let _ = (0..1).find(|x| **y == *x).is_some(); // one dereference less
+ let _ = (0..1).find(|x| *x == 0).is_some();
+ let _ = v.iter().find(|x| **x == 0).is_some();
+ let _ = (4..5).find(|x| *x == 1 || *x == 3 || *x == 5).is_some();
+ let _ = (1..3).find(|x| [1, 2, 3].contains(x)).is_some();
+ let _ = (1..3).find(|x| *x == 0 || [1, 2, 3].contains(x)).is_some();
+ let _ = (1..3).find(|x| [1, 2, 3].contains(x) || *x == 0).is_some();
+ let _ = (1..3)
+ .find(|x| [1, 2, 3].contains(x) || *x == 0 || [4, 5, 6].contains(x) || *x == -1)
+ .is_some();
+
+ // Check `position().is_some()`, single-line case.
+ let _ = v.iter().position(|&x| x < 0).is_some();
+
+ // Check `rposition().is_some()`, single-line case.
+ let _ = v.iter().rposition(|&x| x < 0).is_some();
+
+ let s1 = String::from("hello world");
+ let s2 = String::from("world");
+ // caller of `find()` is a `&`static str`
+ let _ = "hello world".find("world").is_some();
+ let _ = "hello world".find(&s2).is_some();
+ let _ = "hello world".find(&s2[2..]).is_some();
+ // caller of `find()` is a `String`
+ let _ = s1.find("world").is_some();
+ let _ = s1.find(&s2).is_some();
+ let _ = s1.find(&s2[2..]).is_some();
+ // caller of `find()` is slice of `String`
+ let _ = s1[2..].find("world").is_some();
+ let _ = s1[2..].find(&s2).is_some();
+ let _ = s1[2..].find(&s2[2..]).is_some();
+}
+
+#[allow(clippy::clone_on_copy, clippy::map_clone)]
+mod issue7392 {
+ struct Player {
+ hand: Vec<usize>,
+ }
+ fn filter() {
+ let p = Player {
+ hand: vec![1, 2, 3, 4, 5],
+ };
+ let filter_hand = vec![5];
+ let _ = p
+ .hand
+ .iter()
+ .filter(|c| filter_hand.iter().find(|cc| c == cc).is_some())
+ .map(|c| c.clone())
+ .collect::<Vec<_>>();
+ }
+
+ struct PlayerTuple {
+ hand: Vec<(usize, char)>,
+ }
+ fn filter_tuple() {
+ let p = PlayerTuple {
+ hand: vec![(1, 'a'), (2, 'b'), (3, 'c'), (4, 'd'), (5, 'e')],
+ };
+ let filter_hand = vec![5];
+ let _ = p
+ .hand
+ .iter()
+ .filter(|(c, _)| filter_hand.iter().find(|cc| c == *cc).is_some())
+ .map(|c| c.clone())
+ .collect::<Vec<_>>();
+ }
+
+ fn field_projection() {
+ struct Foo {
+ foo: i32,
+ bar: u32,
+ }
+ let vfoo = vec![Foo { foo: 1, bar: 2 }];
+ let _ = vfoo.iter().find(|v| v.foo == 1 && v.bar == 2).is_some();
+
+ let vfoo = vec![(42, Foo { foo: 1, bar: 2 })];
+ let _ = vfoo
+ .iter()
+ .find(|(i, v)| *i == 42 && v.foo == 1 && v.bar == 2)
+ .is_some();
+ }
+
+ fn index_projection() {
+ let vfoo = vec![[0, 1, 2, 3]];
+ let _ = vfoo.iter().find(|a| a[0] == 42).is_some();
+ }
+
+ #[allow(clippy::match_like_matches_macro)]
+ fn slice_projection() {
+ let vfoo = vec![[0, 1, 2, 3, 0, 1, 2, 3]];
+ let _ = vfoo.iter().find(|sub| sub[1..4].len() == 3).is_some();
+ }
+
+ fn please(x: &u32) -> bool {
+ *x == 9
+ }
+
+ fn deref_enough(x: u32) -> bool {
+ x == 78
+ }
+
+ fn arg_no_deref(x: &&u32) -> bool {
+ **x == 78
+ }
+
+ fn more_projections() {
+ let x = 19;
+ let ppx: &u32 = &x;
+ let _ = [ppx].iter().find(|ppp_x: &&&u32| please(**ppp_x)).is_some();
+ let _ = [String::from("Hey hey")].iter().find(|s| s.len() == 2).is_some();
+
+ let v = vec![3, 2, 1, 0];
+ let _ = v.iter().find(|x| deref_enough(**x)).is_some();
+ let _ = v.iter().find(|x: &&u32| deref_enough(**x)).is_some();
+
+ #[allow(clippy::redundant_closure)]
+ let _ = v.iter().find(|x| arg_no_deref(x)).is_some();
+ #[allow(clippy::redundant_closure)]
+ let _ = v.iter().find(|x: &&u32| arg_no_deref(x)).is_some();
+ }
+
+ fn field_index_projection() {
+ struct FooDouble {
+ bar: Vec<Vec<i32>>,
+ }
+ struct Foo {
+ bar: Vec<i32>,
+ }
+ struct FooOuter {
+ inner: Foo,
+ inner_double: FooDouble,
+ }
+ let vfoo = vec![FooOuter {
+ inner: Foo { bar: vec![0, 1, 2, 3] },
+ inner_double: FooDouble {
+ bar: vec![vec![0, 1, 2, 3]],
+ },
+ }];
+ let _ = vfoo
+ .iter()
+ .find(|v| v.inner_double.bar[0][0] == 2 && v.inner.bar[0] == 2)
+ .is_some();
+ }
+
+ fn index_field_projection() {
+ struct Foo {
+ bar: i32,
+ }
+ struct FooOuter {
+ inner: Vec<Foo>,
+ }
+ let vfoo = vec![FooOuter {
+ inner: vec![Foo { bar: 0 }],
+ }];
+ let _ = vfoo.iter().find(|v| v.inner[0].bar == 2).is_some();
+ }
+
+ fn double_deref_index_projection() {
+ let vfoo = vec![&&[0, 1, 2, 3]];
+ let _ = vfoo.iter().find(|x| (**x)[0] == 9).is_some();
+ }
+
+ fn method_call_by_ref() {
+ struct Foo {
+ bar: u32,
+ }
+ impl Foo {
+ pub fn by_ref(&self, x: &u32) -> bool {
+ *x == self.bar
+ }
+ }
+ let vfoo = vec![Foo { bar: 1 }];
+ let _ = vfoo.iter().find(|v| v.by_ref(&v.bar)).is_some();
+ }
+
+ fn ref_bindings() {
+ let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|(&x, y)| x == *y).is_some();
+ let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|&(&x, y)| x == *y).is_some();
+ }
+
+ fn test_string_1(s: &str) -> bool {
+ s.is_empty()
+ }
+
+ fn test_u32_1(s: &u32) -> bool {
+ s.is_power_of_two()
+ }
+
+ fn test_u32_2(s: u32) -> bool {
+ s.is_power_of_two()
+ }
+
+ fn projection_in_args_test() {
+ // Index projections
+ let lst = &[String::from("Hello"), String::from("world")];
+ let v: Vec<&[String]> = vec![lst];
+ let _ = v.iter().find(|s| s[0].is_empty()).is_some();
+ let _ = v.iter().find(|s| test_string_1(&s[0])).is_some();
+
+ // Field projections
+ struct FieldProjection<'a> {
+ field: &'a u32,
+ }
+ let field = 123456789;
+ let instance = FieldProjection { field: &field };
+ let v = vec![instance];
+ let _ = v.iter().find(|fp| fp.field.is_power_of_two()).is_some();
+ let _ = v.iter().find(|fp| test_u32_1(fp.field)).is_some();
+ let _ = v.iter().find(|fp| test_u32_2(*fp.field)).is_some();
+ }
+}
+
+mod issue9120 {
+ fn make_arg_no_deref_impl() -> impl Fn(&&u32) -> bool {
+ move |x: &&u32| **x == 78
+ }
+
+ fn make_arg_no_deref_dyn() -> Box<dyn Fn(&&u32) -> bool> {
+ Box::new(move |x: &&u32| **x == 78)
+ }
+
+ fn wrapper<T: Fn(&&u32) -> bool>(v: Vec<u32>, func: T) -> bool {
+ #[allow(clippy::redundant_closure)]
+ v.iter().find(|x: &&u32| func(x)).is_some()
+ }
+
+ fn do_tests() {
+ let v = vec![3, 2, 1, 0];
+ let arg_no_deref_impl = make_arg_no_deref_impl();
+ let arg_no_deref_dyn = make_arg_no_deref_dyn();
+
+ #[allow(clippy::redundant_closure)]
+ let _ = v.iter().find(|x: &&u32| arg_no_deref_impl(x)).is_some();
+
+ #[allow(clippy::redundant_closure)]
+ let _ = v.iter().find(|x: &&u32| arg_no_deref_dyn(x)).is_some();
+
+ #[allow(clippy::redundant_closure)]
+ let _ = v.iter().find(|x: &&u32| (*arg_no_deref_dyn)(x)).is_some();
+ }
+}
diff --git a/src/tools/clippy/tests/ui/search_is_some_fixable_some.stderr b/src/tools/clippy/tests/ui/search_is_some_fixable_some.stderr
new file mode 100644
index 000000000..c5c3c92c9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/search_is_some_fixable_some.stderr
@@ -0,0 +1,292 @@
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:10:22
+ |
+LL | let _ = v.iter().find(|&x| *x < 0).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|x| *x < 0)`
+ |
+ = note: `-D clippy::search-is-some` implied by `-D warnings`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:11:20
+ |
+LL | let _ = (0..1).find(|x| **y == *x).is_some(); // one dereference less
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|x| **y == x)`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:12:20
+ |
+LL | let _ = (0..1).find(|x| *x == 0).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|x| x == 0)`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:13:22
+ |
+LL | let _ = v.iter().find(|x| **x == 0).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|x| *x == 0)`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:14:20
+ |
+LL | let _ = (4..5).find(|x| *x == 1 || *x == 3 || *x == 5).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|x| x == 1 || x == 3 || x == 5)`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:15:20
+ |
+LL | let _ = (1..3).find(|x| [1, 2, 3].contains(x)).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|x| [1, 2, 3].contains(&x))`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:16:20
+ |
+LL | let _ = (1..3).find(|x| *x == 0 || [1, 2, 3].contains(x)).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|x| x == 0 || [1, 2, 3].contains(&x))`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:17:20
+ |
+LL | let _ = (1..3).find(|x| [1, 2, 3].contains(x) || *x == 0).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|x| [1, 2, 3].contains(&x) || x == 0)`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:19:10
+ |
+LL | .find(|x| [1, 2, 3].contains(x) || *x == 0 || [4, 5, 6].contains(x) || *x == -1)
+ | __________^
+LL | | .is_some();
+ | |__________________^ help: use `any()` instead: `any(|x| [1, 2, 3].contains(&x) || x == 0 || [4, 5, 6].contains(&x) || x == -1)`
+
+error: called `is_some()` after searching an `Iterator` with `position`
+ --> $DIR/search_is_some_fixable_some.rs:23:22
+ |
+LL | let _ = v.iter().position(|&x| x < 0).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|&x| x < 0)`
+
+error: called `is_some()` after searching an `Iterator` with `rposition`
+ --> $DIR/search_is_some_fixable_some.rs:26:22
+ |
+LL | let _ = v.iter().rposition(|&x| x < 0).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|&x| x < 0)`
+
+error: called `is_some()` after calling `find()` on a string
+ --> $DIR/search_is_some_fixable_some.rs:31:27
+ |
+LL | let _ = "hello world".find("world").is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: use `contains()` instead: `contains("world")`
+
+error: called `is_some()` after calling `find()` on a string
+ --> $DIR/search_is_some_fixable_some.rs:32:27
+ |
+LL | let _ = "hello world".find(&s2).is_some();
+ | ^^^^^^^^^^^^^^^^^^^ help: use `contains()` instead: `contains(&s2)`
+
+error: called `is_some()` after calling `find()` on a string
+ --> $DIR/search_is_some_fixable_some.rs:33:27
+ |
+LL | let _ = "hello world".find(&s2[2..]).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: use `contains()` instead: `contains(&s2[2..])`
+
+error: called `is_some()` after calling `find()` on a string
+ --> $DIR/search_is_some_fixable_some.rs:35:16
+ |
+LL | let _ = s1.find("world").is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: use `contains()` instead: `contains("world")`
+
+error: called `is_some()` after calling `find()` on a string
+ --> $DIR/search_is_some_fixable_some.rs:36:16
+ |
+LL | let _ = s1.find(&s2).is_some();
+ | ^^^^^^^^^^^^^^^^^^^ help: use `contains()` instead: `contains(&s2)`
+
+error: called `is_some()` after calling `find()` on a string
+ --> $DIR/search_is_some_fixable_some.rs:37:16
+ |
+LL | let _ = s1.find(&s2[2..]).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: use `contains()` instead: `contains(&s2[2..])`
+
+error: called `is_some()` after calling `find()` on a string
+ --> $DIR/search_is_some_fixable_some.rs:39:21
+ |
+LL | let _ = s1[2..].find("world").is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: use `contains()` instead: `contains("world")`
+
+error: called `is_some()` after calling `find()` on a string
+ --> $DIR/search_is_some_fixable_some.rs:40:21
+ |
+LL | let _ = s1[2..].find(&s2).is_some();
+ | ^^^^^^^^^^^^^^^^^^^ help: use `contains()` instead: `contains(&s2)`
+
+error: called `is_some()` after calling `find()` on a string
+ --> $DIR/search_is_some_fixable_some.rs:41:21
+ |
+LL | let _ = s1[2..].find(&s2[2..]).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: use `contains()` instead: `contains(&s2[2..])`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:57:44
+ |
+LL | .filter(|c| filter_hand.iter().find(|cc| c == cc).is_some())
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|cc| c == &cc)`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:73:49
+ |
+LL | .filter(|(c, _)| filter_hand.iter().find(|cc| c == *cc).is_some())
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|cc| c == cc)`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:84:29
+ |
+LL | let _ = vfoo.iter().find(|v| v.foo == 1 && v.bar == 2).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|v| v.foo == 1 && v.bar == 2)`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:89:14
+ |
+LL | .find(|(i, v)| *i == 42 && v.foo == 1 && v.bar == 2)
+ | ______________^
+LL | | .is_some();
+ | |______________________^ help: use `any()` instead: `any(|(i, v)| *i == 42 && v.foo == 1 && v.bar == 2)`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:95:29
+ |
+LL | let _ = vfoo.iter().find(|a| a[0] == 42).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|a| a[0] == 42)`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:101:29
+ |
+LL | let _ = vfoo.iter().find(|sub| sub[1..4].len() == 3).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|sub| sub[1..4].len() == 3)`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:119:30
+ |
+LL | let _ = [ppx].iter().find(|ppp_x: &&&u32| please(**ppp_x)).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|ppp_x: &&u32| please(ppp_x))`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:120:50
+ |
+LL | let _ = [String::from("Hey hey")].iter().find(|s| s.len() == 2).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|s| s.len() == 2)`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:123:26
+ |
+LL | let _ = v.iter().find(|x| deref_enough(**x)).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|x| deref_enough(*x))`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:124:26
+ |
+LL | let _ = v.iter().find(|x: &&u32| deref_enough(**x)).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|x: &u32| deref_enough(*x))`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:127:26
+ |
+LL | let _ = v.iter().find(|x| arg_no_deref(x)).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|x| arg_no_deref(&x))`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:129:26
+ |
+LL | let _ = v.iter().find(|x: &&u32| arg_no_deref(x)).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|x: &u32| arg_no_deref(&x))`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:151:14
+ |
+LL | .find(|v| v.inner_double.bar[0][0] == 2 && v.inner.bar[0] == 2)
+ | ______________^
+LL | | .is_some();
+ | |______________________^ help: use `any()` instead: `any(|v| v.inner_double.bar[0][0] == 2 && v.inner.bar[0] == 2)`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:165:29
+ |
+LL | let _ = vfoo.iter().find(|v| v.inner[0].bar == 2).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|v| v.inner[0].bar == 2)`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:170:29
+ |
+LL | let _ = vfoo.iter().find(|x| (**x)[0] == 9).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|x| (**x)[0] == 9)`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:183:29
+ |
+LL | let _ = vfoo.iter().find(|v| v.by_ref(&v.bar)).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|v| v.by_ref(&v.bar))`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:187:55
+ |
+LL | let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|(&x, y)| x == *y).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|(&x, y)| x == *y)`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:188:55
+ |
+LL | let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|&(&x, y)| x == *y).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|(&x, y)| x == *y)`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:207:26
+ |
+LL | let _ = v.iter().find(|s| s[0].is_empty()).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|s| s[0].is_empty())`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:208:26
+ |
+LL | let _ = v.iter().find(|s| test_string_1(&s[0])).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|s| test_string_1(&s[0]))`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:217:26
+ |
+LL | let _ = v.iter().find(|fp| fp.field.is_power_of_two()).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|fp| fp.field.is_power_of_two())`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:218:26
+ |
+LL | let _ = v.iter().find(|fp| test_u32_1(fp.field)).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|fp| test_u32_1(fp.field))`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:219:26
+ |
+LL | let _ = v.iter().find(|fp| test_u32_2(*fp.field)).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|fp| test_u32_2(*fp.field))`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:234:18
+ |
+LL | v.iter().find(|x: &&u32| func(x)).is_some()
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|x: &u32| func(&x))`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:243:26
+ |
+LL | let _ = v.iter().find(|x: &&u32| arg_no_deref_impl(x)).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|x: &u32| arg_no_deref_impl(&x))`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:246:26
+ |
+LL | let _ = v.iter().find(|x: &&u32| arg_no_deref_dyn(x)).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|x: &u32| arg_no_deref_dyn(&x))`
+
+error: called `is_some()` after searching an `Iterator` with `find`
+ --> $DIR/search_is_some_fixable_some.rs:249:26
+ |
+LL | let _ = v.iter().find(|x: &&u32| (*arg_no_deref_dyn)(x)).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `any()` instead: `any(|x: &u32| (*arg_no_deref_dyn)(&x))`
+
+error: aborting due to 47 previous errors
+
diff --git a/src/tools/clippy/tests/ui/self_assignment.rs b/src/tools/clippy/tests/ui/self_assignment.rs
new file mode 100644
index 000000000..ef6476229
--- /dev/null
+++ b/src/tools/clippy/tests/ui/self_assignment.rs
@@ -0,0 +1,67 @@
+#![warn(clippy::self_assignment)]
+
+pub struct S<'a> {
+ a: i32,
+ b: [i32; 10],
+ c: Vec<Vec<i32>>,
+ e: &'a mut i32,
+ f: &'a mut i32,
+}
+
+pub fn positives(mut a: usize, b: &mut u32, mut s: S) {
+ a = a;
+ *b = *b;
+ s = s;
+ s.a = s.a;
+ s.b[10] = s.b[5 + 5];
+ s.c[0][1] = s.c[0][1];
+ s.b[a] = s.b[a];
+ *s.e = *s.e;
+ s.b[a + 10] = s.b[10 + a];
+
+ let mut t = (0, 1);
+ t.1 = t.1;
+ t.0 = (t.0);
+}
+
+pub fn negatives_not_equal(mut a: usize, b: &mut usize, mut s: S) {
+ dbg!(&a);
+ a = *b;
+ dbg!(&a);
+ s.b[1] += s.b[1];
+ s.b[1] = s.b[2];
+ s.c[1][0] = s.c[0][1];
+ s.b[a] = s.b[*b];
+ s.b[a + 10] = s.b[a + 11];
+ *s.e = *s.f;
+
+ let mut t = (0, 1);
+ t.0 = t.1;
+}
+
+#[allow(clippy::mixed_read_write_in_expression)]
+pub fn negatives_side_effects() {
+ let mut v = vec![1, 2, 3, 4, 5];
+ let mut i = 0;
+ v[{
+ i += 1;
+ i
+ }] = v[{
+ i += 1;
+ i
+ }];
+
+ fn next(n: &mut usize) -> usize {
+ let v = *n;
+ *n += 1;
+ v
+ }
+
+ let mut w = vec![1, 2, 3, 4, 5];
+ let mut i = 0;
+ let i = &mut i;
+ w[next(i)] = w[next(i)];
+ w[next(i)] = w[next(i)];
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/self_assignment.stderr b/src/tools/clippy/tests/ui/self_assignment.stderr
new file mode 100644
index 000000000..826e0d0ba
--- /dev/null
+++ b/src/tools/clippy/tests/ui/self_assignment.stderr
@@ -0,0 +1,70 @@
+error: self-assignment of `a` to `a`
+ --> $DIR/self_assignment.rs:12:5
+ |
+LL | a = a;
+ | ^^^^^
+ |
+ = note: `-D clippy::self-assignment` implied by `-D warnings`
+
+error: self-assignment of `*b` to `*b`
+ --> $DIR/self_assignment.rs:13:5
+ |
+LL | *b = *b;
+ | ^^^^^^^
+
+error: self-assignment of `s` to `s`
+ --> $DIR/self_assignment.rs:14:5
+ |
+LL | s = s;
+ | ^^^^^
+
+error: self-assignment of `s.a` to `s.a`
+ --> $DIR/self_assignment.rs:15:5
+ |
+LL | s.a = s.a;
+ | ^^^^^^^^^
+
+error: self-assignment of `s.b[5 + 5]` to `s.b[10]`
+ --> $DIR/self_assignment.rs:16:5
+ |
+LL | s.b[10] = s.b[5 + 5];
+ | ^^^^^^^^^^^^^^^^^^^^
+
+error: self-assignment of `s.c[0][1]` to `s.c[0][1]`
+ --> $DIR/self_assignment.rs:17:5
+ |
+LL | s.c[0][1] = s.c[0][1];
+ | ^^^^^^^^^^^^^^^^^^^^^
+
+error: self-assignment of `s.b[a]` to `s.b[a]`
+ --> $DIR/self_assignment.rs:18:5
+ |
+LL | s.b[a] = s.b[a];
+ | ^^^^^^^^^^^^^^^
+
+error: self-assignment of `*s.e` to `*s.e`
+ --> $DIR/self_assignment.rs:19:5
+ |
+LL | *s.e = *s.e;
+ | ^^^^^^^^^^^
+
+error: self-assignment of `s.b[10 + a]` to `s.b[a + 10]`
+ --> $DIR/self_assignment.rs:20:5
+ |
+LL | s.b[a + 10] = s.b[10 + a];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: self-assignment of `t.1` to `t.1`
+ --> $DIR/self_assignment.rs:23:5
+ |
+LL | t.1 = t.1;
+ | ^^^^^^^^^
+
+error: self-assignment of `(t.0)` to `t.0`
+ --> $DIR/self_assignment.rs:24:5
+ |
+LL | t.0 = (t.0);
+ | ^^^^^^^^^^^
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/self_named_constructors.rs b/src/tools/clippy/tests/ui/self_named_constructors.rs
new file mode 100644
index 000000000..356f701c9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/self_named_constructors.rs
@@ -0,0 +1,59 @@
+#![warn(clippy::self_named_constructors)]
+
+struct ShouldSpawn;
+struct ShouldNotSpawn;
+
+impl ShouldSpawn {
+ pub fn should_spawn() -> ShouldSpawn {
+ ShouldSpawn
+ }
+
+ fn should_not_spawn() -> ShouldNotSpawn {
+ ShouldNotSpawn
+ }
+}
+
+impl ShouldNotSpawn {
+ pub fn new() -> ShouldNotSpawn {
+ ShouldNotSpawn
+ }
+}
+
+struct ShouldNotSpawnWithTrait;
+
+trait ShouldNotSpawnTrait {
+ type Item;
+}
+
+impl ShouldNotSpawnTrait for ShouldNotSpawnWithTrait {
+ type Item = Self;
+}
+
+impl ShouldNotSpawnWithTrait {
+ pub fn should_not_spawn_with_trait() -> impl ShouldNotSpawnTrait<Item = Self> {
+ ShouldNotSpawnWithTrait
+ }
+}
+
+// Same trait name and same type name should not spawn the lint
+#[derive(Default)]
+pub struct Default;
+
+trait TraitSameTypeName {
+ fn should_not_spawn() -> Self;
+}
+impl TraitSameTypeName for ShouldNotSpawn {
+ fn should_not_spawn() -> Self {
+ ShouldNotSpawn
+ }
+}
+
+struct SelfMethodShouldNotSpawn;
+
+impl SelfMethodShouldNotSpawn {
+ fn self_method_should_not_spawn(self) -> Self {
+ SelfMethodShouldNotSpawn
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/self_named_constructors.stderr b/src/tools/clippy/tests/ui/self_named_constructors.stderr
new file mode 100644
index 000000000..ba989f06d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/self_named_constructors.stderr
@@ -0,0 +1,12 @@
+error: constructor `should_spawn` has the same name as the type
+ --> $DIR/self_named_constructors.rs:7:5
+ |
+LL | / pub fn should_spawn() -> ShouldSpawn {
+LL | | ShouldSpawn
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::self-named-constructors` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.rs b/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.rs
new file mode 100644
index 000000000..91916e748
--- /dev/null
+++ b/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.rs
@@ -0,0 +1,122 @@
+#![warn(clippy::semicolon_if_nothing_returned)]
+#![allow(clippy::redundant_closure)]
+#![feature(label_break_value)]
+#![feature(let_else)]
+
+fn get_unit() {}
+
+// the functions below trigger the lint
+fn main() {
+ println!("Hello")
+}
+
+fn hello() {
+ get_unit()
+}
+
+fn basic101(x: i32) {
+ let y: i32;
+ y = x + 1
+}
+
+#[rustfmt::skip]
+fn closure_error() {
+ let _d = || {
+ hello()
+ };
+}
+
+#[rustfmt::skip]
+fn unsafe_checks_error() {
+ use std::mem::MaybeUninit;
+ use std::ptr;
+
+ let mut s = MaybeUninit::<String>::uninit();
+ let _d = || unsafe {
+ ptr::drop_in_place(s.as_mut_ptr())
+ };
+}
+
+// this is fine
+fn print_sum(a: i32, b: i32) {
+ println!("{}", a + b);
+ assert_eq!(true, false);
+}
+
+fn foo(x: i32) {
+ let y: i32;
+ if x < 1 {
+ y = 4;
+ } else {
+ y = 5;
+ }
+}
+
+fn bar(x: i32) {
+ let y: i32;
+ match x {
+ 1 => y = 4,
+ _ => y = 32,
+ }
+}
+
+fn foobar(x: i32) {
+ let y: i32;
+ 'label: {
+ y = x + 1;
+ }
+}
+
+fn loop_test(x: i32) {
+ let y: i32;
+ for &ext in &["stdout", "stderr", "fixed"] {
+ println!("{}", ext);
+ }
+}
+
+fn closure() {
+ let _d = || hello();
+}
+
+#[rustfmt::skip]
+fn closure_block() {
+ let _d = || { hello() };
+}
+
+unsafe fn some_unsafe_op() {}
+unsafe fn some_other_unsafe_fn() {}
+
+fn do_something() {
+ unsafe { some_unsafe_op() };
+
+ unsafe { some_other_unsafe_fn() };
+}
+
+fn unsafe_checks() {
+ use std::mem::MaybeUninit;
+ use std::ptr;
+
+ let mut s = MaybeUninit::<String>::uninit();
+ let _d = || unsafe { ptr::drop_in_place(s.as_mut_ptr()) };
+}
+
+// Issue #7768
+#[rustfmt::skip]
+fn macro_with_semicolon() {
+ macro_rules! repro {
+ () => {
+ while false {
+ }
+ };
+ }
+ repro!();
+}
+
+fn function_returning_option() -> Option<i32> {
+ Some(1)
+}
+
+// No warning
+fn let_else_stmts() {
+ let Some(x) = function_returning_option() else { return; };
+}
diff --git a/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.stderr b/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.stderr
new file mode 100644
index 000000000..41d2c1cfb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.stderr
@@ -0,0 +1,34 @@
+error: consider adding a `;` to the last statement for consistent formatting
+ --> $DIR/semicolon_if_nothing_returned.rs:10:5
+ |
+LL | println!("Hello")
+ | ^^^^^^^^^^^^^^^^^ help: add a `;` here: `println!("Hello");`
+ |
+ = note: `-D clippy::semicolon-if-nothing-returned` implied by `-D warnings`
+
+error: consider adding a `;` to the last statement for consistent formatting
+ --> $DIR/semicolon_if_nothing_returned.rs:14:5
+ |
+LL | get_unit()
+ | ^^^^^^^^^^ help: add a `;` here: `get_unit();`
+
+error: consider adding a `;` to the last statement for consistent formatting
+ --> $DIR/semicolon_if_nothing_returned.rs:19:5
+ |
+LL | y = x + 1
+ | ^^^^^^^^^ help: add a `;` here: `y = x + 1;`
+
+error: consider adding a `;` to the last statement for consistent formatting
+ --> $DIR/semicolon_if_nothing_returned.rs:25:9
+ |
+LL | hello()
+ | ^^^^^^^ help: add a `;` here: `hello();`
+
+error: consider adding a `;` to the last statement for consistent formatting
+ --> $DIR/semicolon_if_nothing_returned.rs:36:9
+ |
+LL | ptr::drop_in_place(s.as_mut_ptr())
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add a `;` here: `ptr::drop_in_place(s.as_mut_ptr());`
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/serde.rs b/src/tools/clippy/tests/ui/serde.rs
new file mode 100644
index 000000000..5843344eb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/serde.rs
@@ -0,0 +1,47 @@
+#![warn(clippy::serde_api_misuse)]
+#![allow(dead_code)]
+
+extern crate serde;
+
+struct A;
+
+impl<'de> serde::de::Visitor<'de> for A {
+ type Value = ();
+
+ fn expecting(&self, _: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
+ unimplemented!()
+ }
+
+ fn visit_str<E>(self, _v: &str) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ unimplemented!()
+ }
+
+ fn visit_string<E>(self, _v: String) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ unimplemented!()
+ }
+}
+
+struct B;
+
+impl<'de> serde::de::Visitor<'de> for B {
+ type Value = ();
+
+ fn expecting(&self, _: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
+ unimplemented!()
+ }
+
+ fn visit_string<E>(self, _v: String) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ unimplemented!()
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/serde.stderr b/src/tools/clippy/tests/ui/serde.stderr
new file mode 100644
index 000000000..760c9c990
--- /dev/null
+++ b/src/tools/clippy/tests/ui/serde.stderr
@@ -0,0 +1,15 @@
+error: you should not implement `visit_string` without also implementing `visit_str`
+ --> $DIR/serde.rs:39:5
+ |
+LL | / fn visit_string<E>(self, _v: String) -> Result<Self::Value, E>
+LL | | where
+LL | | E: serde::de::Error,
+LL | | {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::serde-api-misuse` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/shadow.rs b/src/tools/clippy/tests/ui/shadow.rs
new file mode 100644
index 000000000..1fa9fc749
--- /dev/null
+++ b/src/tools/clippy/tests/ui/shadow.rs
@@ -0,0 +1,98 @@
+#![warn(clippy::shadow_same, clippy::shadow_reuse, clippy::shadow_unrelated)]
+#![allow(clippy::let_unit_value)]
+
+fn shadow_same() {
+ let x = 1;
+ let x = x;
+ let mut x = &x;
+ let x = &mut x;
+ let x = *x;
+}
+
+fn shadow_reuse() -> Option<()> {
+ let x = ([[0]], ());
+ let x = x.0;
+ let x = x[0];
+ let [x] = x;
+ let x = Some(x);
+ let x = foo(x);
+ let x = || x;
+ let x = Some(1).map(|_| x)?;
+ let y = 1;
+ let y = match y {
+ 1 => 2,
+ _ => 3,
+ };
+ None
+}
+
+fn shadow_unrelated() {
+ let x = 1;
+ let x = 2;
+}
+
+fn syntax() {
+ fn f(x: u32) {
+ let x = 1;
+ }
+ let x = 1;
+ match Some(1) {
+ Some(1) => {},
+ Some(x) => {
+ let x = 1;
+ },
+ _ => {},
+ }
+ if let Some(x) = Some(1) {}
+ while let Some(x) = Some(1) {}
+ let _ = |[x]: [u32; 1]| {
+ let x = 1;
+ };
+ let y = Some(1);
+ if let Some(y) = y {}
+}
+
+fn negative() {
+ match Some(1) {
+ Some(x) if x == 1 => {},
+ Some(x) => {},
+ None => {},
+ }
+ match [None, Some(1)] {
+ [Some(x), None] | [None, Some(x)] => {},
+ _ => {},
+ }
+ if let Some(x) = Some(1) {
+ let y = 1;
+ } else {
+ let x = 1;
+ let y = 1;
+ }
+ let x = 1;
+ #[allow(clippy::shadow_unrelated)]
+ let x = 1;
+}
+
+fn foo<T>(_: T) {}
+
+fn question_mark() -> Option<()> {
+ let val = 1;
+ // `?` expands with a `val` binding
+ None?;
+ None
+}
+
+pub async fn foo1(_a: i32) {}
+
+pub async fn foo2(_a: i32, _b: i64) {
+ let _b = _a;
+}
+
+fn ice_8748() {
+ let _ = [0; {
+ let x = 1;
+ if let Some(x) = Some(1) { x } else { 1 }
+ }];
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/shadow.stderr b/src/tools/clippy/tests/ui/shadow.stderr
new file mode 100644
index 000000000..43d76094d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/shadow.stderr
@@ -0,0 +1,281 @@
+error: `x` is shadowed by itself in `x`
+ --> $DIR/shadow.rs:6:9
+ |
+LL | let x = x;
+ | ^
+ |
+ = note: `-D clippy::shadow-same` implied by `-D warnings`
+note: previous binding is here
+ --> $DIR/shadow.rs:5:9
+ |
+LL | let x = 1;
+ | ^
+
+error: `mut x` is shadowed by itself in `&x`
+ --> $DIR/shadow.rs:7:13
+ |
+LL | let mut x = &x;
+ | ^
+ |
+note: previous binding is here
+ --> $DIR/shadow.rs:6:9
+ |
+LL | let x = x;
+ | ^
+
+error: `x` is shadowed by itself in `&mut x`
+ --> $DIR/shadow.rs:8:9
+ |
+LL | let x = &mut x;
+ | ^
+ |
+note: previous binding is here
+ --> $DIR/shadow.rs:7:9
+ |
+LL | let mut x = &x;
+ | ^^^^^
+
+error: `x` is shadowed by itself in `*x`
+ --> $DIR/shadow.rs:9:9
+ |
+LL | let x = *x;
+ | ^
+ |
+note: previous binding is here
+ --> $DIR/shadow.rs:8:9
+ |
+LL | let x = &mut x;
+ | ^
+
+error: `x` is shadowed
+ --> $DIR/shadow.rs:14:9
+ |
+LL | let x = x.0;
+ | ^
+ |
+ = note: `-D clippy::shadow-reuse` implied by `-D warnings`
+note: previous binding is here
+ --> $DIR/shadow.rs:13:9
+ |
+LL | let x = ([[0]], ());
+ | ^
+
+error: `x` is shadowed
+ --> $DIR/shadow.rs:15:9
+ |
+LL | let x = x[0];
+ | ^
+ |
+note: previous binding is here
+ --> $DIR/shadow.rs:14:9
+ |
+LL | let x = x.0;
+ | ^
+
+error: `x` is shadowed
+ --> $DIR/shadow.rs:16:10
+ |
+LL | let [x] = x;
+ | ^
+ |
+note: previous binding is here
+ --> $DIR/shadow.rs:15:9
+ |
+LL | let x = x[0];
+ | ^
+
+error: `x` is shadowed
+ --> $DIR/shadow.rs:17:9
+ |
+LL | let x = Some(x);
+ | ^
+ |
+note: previous binding is here
+ --> $DIR/shadow.rs:16:10
+ |
+LL | let [x] = x;
+ | ^
+
+error: `x` is shadowed
+ --> $DIR/shadow.rs:18:9
+ |
+LL | let x = foo(x);
+ | ^
+ |
+note: previous binding is here
+ --> $DIR/shadow.rs:17:9
+ |
+LL | let x = Some(x);
+ | ^
+
+error: `x` is shadowed
+ --> $DIR/shadow.rs:19:9
+ |
+LL | let x = || x;
+ | ^
+ |
+note: previous binding is here
+ --> $DIR/shadow.rs:18:9
+ |
+LL | let x = foo(x);
+ | ^
+
+error: `x` is shadowed
+ --> $DIR/shadow.rs:20:9
+ |
+LL | let x = Some(1).map(|_| x)?;
+ | ^
+ |
+note: previous binding is here
+ --> $DIR/shadow.rs:19:9
+ |
+LL | let x = || x;
+ | ^
+
+error: `y` is shadowed
+ --> $DIR/shadow.rs:22:9
+ |
+LL | let y = match y {
+ | ^
+ |
+note: previous binding is here
+ --> $DIR/shadow.rs:21:9
+ |
+LL | let y = 1;
+ | ^
+
+error: `x` shadows a previous, unrelated binding
+ --> $DIR/shadow.rs:31:9
+ |
+LL | let x = 2;
+ | ^
+ |
+ = note: `-D clippy::shadow-unrelated` implied by `-D warnings`
+note: previous binding is here
+ --> $DIR/shadow.rs:30:9
+ |
+LL | let x = 1;
+ | ^
+
+error: `x` shadows a previous, unrelated binding
+ --> $DIR/shadow.rs:36:13
+ |
+LL | let x = 1;
+ | ^
+ |
+note: previous binding is here
+ --> $DIR/shadow.rs:35:10
+ |
+LL | fn f(x: u32) {
+ | ^
+
+error: `x` shadows a previous, unrelated binding
+ --> $DIR/shadow.rs:41:14
+ |
+LL | Some(x) => {
+ | ^
+ |
+note: previous binding is here
+ --> $DIR/shadow.rs:38:9
+ |
+LL | let x = 1;
+ | ^
+
+error: `x` shadows a previous, unrelated binding
+ --> $DIR/shadow.rs:42:17
+ |
+LL | let x = 1;
+ | ^
+ |
+note: previous binding is here
+ --> $DIR/shadow.rs:41:14
+ |
+LL | Some(x) => {
+ | ^
+
+error: `x` shadows a previous, unrelated binding
+ --> $DIR/shadow.rs:46:17
+ |
+LL | if let Some(x) = Some(1) {}
+ | ^
+ |
+note: previous binding is here
+ --> $DIR/shadow.rs:38:9
+ |
+LL | let x = 1;
+ | ^
+
+error: `x` shadows a previous, unrelated binding
+ --> $DIR/shadow.rs:47:20
+ |
+LL | while let Some(x) = Some(1) {}
+ | ^
+ |
+note: previous binding is here
+ --> $DIR/shadow.rs:38:9
+ |
+LL | let x = 1;
+ | ^
+
+error: `x` shadows a previous, unrelated binding
+ --> $DIR/shadow.rs:48:15
+ |
+LL | let _ = |[x]: [u32; 1]| {
+ | ^
+ |
+note: previous binding is here
+ --> $DIR/shadow.rs:38:9
+ |
+LL | let x = 1;
+ | ^
+
+error: `x` shadows a previous, unrelated binding
+ --> $DIR/shadow.rs:49:13
+ |
+LL | let x = 1;
+ | ^
+ |
+note: previous binding is here
+ --> $DIR/shadow.rs:48:15
+ |
+LL | let _ = |[x]: [u32; 1]| {
+ | ^
+
+error: `y` is shadowed
+ --> $DIR/shadow.rs:52:17
+ |
+LL | if let Some(y) = y {}
+ | ^
+ |
+note: previous binding is here
+ --> $DIR/shadow.rs:51:9
+ |
+LL | let y = Some(1);
+ | ^
+
+error: `_b` shadows a previous, unrelated binding
+ --> $DIR/shadow.rs:88:9
+ |
+LL | let _b = _a;
+ | ^^
+ |
+note: previous binding is here
+ --> $DIR/shadow.rs:87:28
+ |
+LL | pub async fn foo2(_a: i32, _b: i64) {
+ | ^^
+
+error: `x` shadows a previous, unrelated binding
+ --> $DIR/shadow.rs:94:21
+ |
+LL | if let Some(x) = Some(1) { x } else { 1 }
+ | ^
+ |
+note: previous binding is here
+ --> $DIR/shadow.rs:93:13
+ |
+LL | let x = 1;
+ | ^
+
+error: aborting due to 23 previous errors
+
diff --git a/src/tools/clippy/tests/ui/short_circuit_statement.fixed b/src/tools/clippy/tests/ui/short_circuit_statement.fixed
new file mode 100644
index 000000000..dd22ecab0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/short_circuit_statement.fixed
@@ -0,0 +1,18 @@
+// run-rustfix
+
+#![warn(clippy::short_circuit_statement)]
+#![allow(clippy::nonminimal_bool)]
+
+fn main() {
+ if f() { g(); }
+ if !f() { g(); }
+ if 1 != 2 { g(); }
+}
+
+fn f() -> bool {
+ true
+}
+
+fn g() -> bool {
+ false
+}
diff --git a/src/tools/clippy/tests/ui/short_circuit_statement.rs b/src/tools/clippy/tests/ui/short_circuit_statement.rs
new file mode 100644
index 000000000..73a55bf1f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/short_circuit_statement.rs
@@ -0,0 +1,18 @@
+// run-rustfix
+
+#![warn(clippy::short_circuit_statement)]
+#![allow(clippy::nonminimal_bool)]
+
+fn main() {
+ f() && g();
+ f() || g();
+ 1 == 2 || g();
+}
+
+fn f() -> bool {
+ true
+}
+
+fn g() -> bool {
+ false
+}
diff --git a/src/tools/clippy/tests/ui/short_circuit_statement.stderr b/src/tools/clippy/tests/ui/short_circuit_statement.stderr
new file mode 100644
index 000000000..aa84ac3a7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/short_circuit_statement.stderr
@@ -0,0 +1,22 @@
+error: boolean short circuit operator in statement may be clearer using an explicit test
+ --> $DIR/short_circuit_statement.rs:7:5
+ |
+LL | f() && g();
+ | ^^^^^^^^^^^ help: replace it with: `if f() { g(); }`
+ |
+ = note: `-D clippy::short-circuit-statement` implied by `-D warnings`
+
+error: boolean short circuit operator in statement may be clearer using an explicit test
+ --> $DIR/short_circuit_statement.rs:8:5
+ |
+LL | f() || g();
+ | ^^^^^^^^^^^ help: replace it with: `if !f() { g(); }`
+
+error: boolean short circuit operator in statement may be clearer using an explicit test
+ --> $DIR/short_circuit_statement.rs:9:5
+ |
+LL | 1 == 2 || g();
+ | ^^^^^^^^^^^^^^ help: replace it with: `if 1 != 2 { g(); }`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/should_impl_trait/corner_cases.rs b/src/tools/clippy/tests/ui/should_impl_trait/corner_cases.rs
new file mode 100644
index 000000000..50999c6f2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/should_impl_trait/corner_cases.rs
@@ -0,0 +1,84 @@
+#![warn(clippy::all, clippy::pedantic)]
+#![allow(
+ clippy::missing_errors_doc,
+ clippy::needless_pass_by_value,
+ clippy::must_use_candidate,
+ clippy::unused_self,
+ clippy::needless_lifetimes,
+ clippy::missing_safety_doc,
+ clippy::wrong_self_convention,
+ clippy::missing_panics_doc,
+ clippy::return_self_not_must_use,
+ clippy::unused_async
+)]
+
+use std::ops::Mul;
+use std::rc::{self, Rc};
+use std::sync::{self, Arc};
+
+fn main() {}
+
+pub struct T1;
+impl T1 {
+ // corner cases: should not lint
+
+ // no error, not public interface
+ pub(crate) fn drop(&mut self) {}
+
+ // no error, private function
+ fn neg(self) -> Self {
+ self
+ }
+
+ // no error, private function
+ fn eq(&self, other: Self) -> bool {
+ true
+ }
+
+ // No error; self is a ref.
+ fn sub(&self, other: Self) -> &Self {
+ self
+ }
+
+ // No error; different number of arguments.
+ fn div(self) -> Self {
+ self
+ }
+
+ // No error; wrong return type.
+ fn rem(self, other: Self) {}
+
+ // Fine
+ fn into_u32(self) -> u32 {
+ 0
+ }
+
+ fn into_u16(&self) -> u16 {
+ 0
+ }
+
+ fn to_something(self) -> u32 {
+ 0
+ }
+
+ fn new(self) -> Self {
+ unimplemented!();
+ }
+
+ pub fn next<'b>(&'b mut self) -> Option<&'b mut T1> {
+ unimplemented!();
+ }
+}
+
+pub struct T2;
+impl T2 {
+ // Shouldn't trigger lint as it is unsafe.
+ pub unsafe fn add(self, rhs: Self) -> Self {
+ self
+ }
+
+ // Should not trigger lint since this is an async function.
+ pub async fn next(&mut self) -> Option<Self> {
+ None
+ }
+}
diff --git a/src/tools/clippy/tests/ui/should_impl_trait/method_list_1.rs b/src/tools/clippy/tests/ui/should_impl_trait/method_list_1.rs
new file mode 100644
index 000000000..20d49f5a9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/should_impl_trait/method_list_1.rs
@@ -0,0 +1,87 @@
+#![warn(clippy::all, clippy::pedantic)]
+#![allow(
+ clippy::missing_errors_doc,
+ clippy::needless_pass_by_value,
+ clippy::must_use_candidate,
+ clippy::unused_self,
+ clippy::needless_lifetimes,
+ clippy::missing_safety_doc,
+ clippy::wrong_self_convention,
+ clippy::missing_panics_doc,
+ clippy::return_self_not_must_use
+)]
+
+use std::ops::Mul;
+use std::rc::{self, Rc};
+use std::sync::{self, Arc};
+
+fn main() {}
+pub struct T;
+
+impl T {
+ // *****************************************
+ // trait method list part 1, should lint all
+ // *****************************************
+ pub fn add(self, other: T) -> T {
+ unimplemented!()
+ }
+
+ pub fn as_mut(&mut self) -> &mut T {
+ unimplemented!()
+ }
+
+ pub fn as_ref(&self) -> &T {
+ unimplemented!()
+ }
+
+ pub fn bitand(self, rhs: T) -> T {
+ unimplemented!()
+ }
+
+ pub fn bitor(self, rhs: Self) -> Self {
+ unimplemented!()
+ }
+
+ pub fn bitxor(self, rhs: Self) -> Self {
+ unimplemented!()
+ }
+
+ pub fn borrow(&self) -> &str {
+ unimplemented!()
+ }
+
+ pub fn borrow_mut(&mut self) -> &mut str {
+ unimplemented!()
+ }
+
+ pub fn clone(&self) -> Self {
+ unimplemented!()
+ }
+
+ pub fn cmp(&self, other: &Self) -> Self {
+ unimplemented!()
+ }
+
+ pub fn default() -> Self {
+ unimplemented!()
+ }
+
+ pub fn deref(&self) -> &Self {
+ unimplemented!()
+ }
+
+ pub fn deref_mut(&mut self) -> &mut Self {
+ unimplemented!()
+ }
+
+ pub fn div(self, rhs: Self) -> Self {
+ unimplemented!()
+ }
+
+ pub fn drop(&mut self) {
+ unimplemented!()
+ }
+ // **********
+ // part 1 end
+ // **********
+}
diff --git a/src/tools/clippy/tests/ui/should_impl_trait/method_list_1.stderr b/src/tools/clippy/tests/ui/should_impl_trait/method_list_1.stderr
new file mode 100644
index 000000000..2b7d4628c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/should_impl_trait/method_list_1.stderr
@@ -0,0 +1,143 @@
+error: method `add` can be confused for the standard trait method `std::ops::Add::add`
+ --> $DIR/method_list_1.rs:25:5
+ |
+LL | / pub fn add(self, other: T) -> T {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::should-implement-trait` implied by `-D warnings`
+ = help: consider implementing the trait `std::ops::Add` or choosing a less ambiguous method name
+
+error: method `as_mut` can be confused for the standard trait method `std::convert::AsMut::as_mut`
+ --> $DIR/method_list_1.rs:29:5
+ |
+LL | / pub fn as_mut(&mut self) -> &mut T {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::convert::AsMut` or choosing a less ambiguous method name
+
+error: method `as_ref` can be confused for the standard trait method `std::convert::AsRef::as_ref`
+ --> $DIR/method_list_1.rs:33:5
+ |
+LL | / pub fn as_ref(&self) -> &T {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::convert::AsRef` or choosing a less ambiguous method name
+
+error: method `bitand` can be confused for the standard trait method `std::ops::BitAnd::bitand`
+ --> $DIR/method_list_1.rs:37:5
+ |
+LL | / pub fn bitand(self, rhs: T) -> T {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::ops::BitAnd` or choosing a less ambiguous method name
+
+error: method `bitor` can be confused for the standard trait method `std::ops::BitOr::bitor`
+ --> $DIR/method_list_1.rs:41:5
+ |
+LL | / pub fn bitor(self, rhs: Self) -> Self {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::ops::BitOr` or choosing a less ambiguous method name
+
+error: method `bitxor` can be confused for the standard trait method `std::ops::BitXor::bitxor`
+ --> $DIR/method_list_1.rs:45:5
+ |
+LL | / pub fn bitxor(self, rhs: Self) -> Self {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::ops::BitXor` or choosing a less ambiguous method name
+
+error: method `borrow` can be confused for the standard trait method `std::borrow::Borrow::borrow`
+ --> $DIR/method_list_1.rs:49:5
+ |
+LL | / pub fn borrow(&self) -> &str {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::borrow::Borrow` or choosing a less ambiguous method name
+
+error: method `borrow_mut` can be confused for the standard trait method `std::borrow::BorrowMut::borrow_mut`
+ --> $DIR/method_list_1.rs:53:5
+ |
+LL | / pub fn borrow_mut(&mut self) -> &mut str {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::borrow::BorrowMut` or choosing a less ambiguous method name
+
+error: method `clone` can be confused for the standard trait method `std::clone::Clone::clone`
+ --> $DIR/method_list_1.rs:57:5
+ |
+LL | / pub fn clone(&self) -> Self {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::clone::Clone` or choosing a less ambiguous method name
+
+error: method `cmp` can be confused for the standard trait method `std::cmp::Ord::cmp`
+ --> $DIR/method_list_1.rs:61:5
+ |
+LL | / pub fn cmp(&self, other: &Self) -> Self {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::cmp::Ord` or choosing a less ambiguous method name
+
+error: method `deref` can be confused for the standard trait method `std::ops::Deref::deref`
+ --> $DIR/method_list_1.rs:69:5
+ |
+LL | / pub fn deref(&self) -> &Self {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::ops::Deref` or choosing a less ambiguous method name
+
+error: method `deref_mut` can be confused for the standard trait method `std::ops::DerefMut::deref_mut`
+ --> $DIR/method_list_1.rs:73:5
+ |
+LL | / pub fn deref_mut(&mut self) -> &mut Self {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::ops::DerefMut` or choosing a less ambiguous method name
+
+error: method `div` can be confused for the standard trait method `std::ops::Div::div`
+ --> $DIR/method_list_1.rs:77:5
+ |
+LL | / pub fn div(self, rhs: Self) -> Self {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::ops::Div` or choosing a less ambiguous method name
+
+error: method `drop` can be confused for the standard trait method `std::ops::Drop::drop`
+ --> $DIR/method_list_1.rs:81:5
+ |
+LL | / pub fn drop(&mut self) {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::ops::Drop` or choosing a less ambiguous method name
+
+error: aborting due to 14 previous errors
+
diff --git a/src/tools/clippy/tests/ui/should_impl_trait/method_list_2.rs b/src/tools/clippy/tests/ui/should_impl_trait/method_list_2.rs
new file mode 100644
index 000000000..3efec1c52
--- /dev/null
+++ b/src/tools/clippy/tests/ui/should_impl_trait/method_list_2.rs
@@ -0,0 +1,88 @@
+#![warn(clippy::all, clippy::pedantic)]
+#![allow(
+ clippy::missing_errors_doc,
+ clippy::needless_pass_by_value,
+ clippy::must_use_candidate,
+ clippy::unused_self,
+ clippy::needless_lifetimes,
+ clippy::missing_safety_doc,
+ clippy::wrong_self_convention,
+ clippy::missing_panics_doc,
+ clippy::return_self_not_must_use
+)]
+
+use std::ops::Mul;
+use std::rc::{self, Rc};
+use std::sync::{self, Arc};
+
+fn main() {}
+pub struct T;
+
+impl T {
+ // *****************************************
+ // trait method list part 2, should lint all
+ // *****************************************
+
+ pub fn eq(&self, other: &Self) -> bool {
+ unimplemented!()
+ }
+
+ pub fn from_iter<T>(iter: T) -> Self {
+ unimplemented!()
+ }
+
+ pub fn from_str(s: &str) -> Result<Self, Self> {
+ unimplemented!()
+ }
+
+ pub fn hash(&self, state: &mut T) {
+ unimplemented!()
+ }
+
+ pub fn index(&self, index: usize) -> &Self {
+ unimplemented!()
+ }
+
+ pub fn index_mut(&mut self, index: usize) -> &mut Self {
+ unimplemented!()
+ }
+
+ pub fn into_iter(self) -> Self {
+ unimplemented!()
+ }
+
+ pub fn mul(self, rhs: Self) -> Self {
+ unimplemented!()
+ }
+
+ pub fn neg(self) -> Self {
+ unimplemented!()
+ }
+
+ pub fn next(&mut self) -> Option<Self> {
+ unimplemented!()
+ }
+
+ pub fn not(self) -> Self {
+ unimplemented!()
+ }
+
+ pub fn rem(self, rhs: Self) -> Self {
+ unimplemented!()
+ }
+
+ pub fn shl(self, rhs: Self) -> Self {
+ unimplemented!()
+ }
+
+ pub fn shr(self, rhs: Self) -> Self {
+ unimplemented!()
+ }
+
+ pub fn sub(self, rhs: Self) -> Self {
+ unimplemented!()
+ }
+ // **********
+ // part 2 end
+ // **********
+}
diff --git a/src/tools/clippy/tests/ui/should_impl_trait/method_list_2.stderr b/src/tools/clippy/tests/ui/should_impl_trait/method_list_2.stderr
new file mode 100644
index 000000000..b6fd43569
--- /dev/null
+++ b/src/tools/clippy/tests/ui/should_impl_trait/method_list_2.stderr
@@ -0,0 +1,153 @@
+error: method `eq` can be confused for the standard trait method `std::cmp::PartialEq::eq`
+ --> $DIR/method_list_2.rs:26:5
+ |
+LL | / pub fn eq(&self, other: &Self) -> bool {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::should-implement-trait` implied by `-D warnings`
+ = help: consider implementing the trait `std::cmp::PartialEq` or choosing a less ambiguous method name
+
+error: method `from_iter` can be confused for the standard trait method `std::iter::FromIterator::from_iter`
+ --> $DIR/method_list_2.rs:30:5
+ |
+LL | / pub fn from_iter<T>(iter: T) -> Self {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::iter::FromIterator` or choosing a less ambiguous method name
+
+error: method `from_str` can be confused for the standard trait method `std::str::FromStr::from_str`
+ --> $DIR/method_list_2.rs:34:5
+ |
+LL | / pub fn from_str(s: &str) -> Result<Self, Self> {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::str::FromStr` or choosing a less ambiguous method name
+
+error: method `hash` can be confused for the standard trait method `std::hash::Hash::hash`
+ --> $DIR/method_list_2.rs:38:5
+ |
+LL | / pub fn hash(&self, state: &mut T) {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::hash::Hash` or choosing a less ambiguous method name
+
+error: method `index` can be confused for the standard trait method `std::ops::Index::index`
+ --> $DIR/method_list_2.rs:42:5
+ |
+LL | / pub fn index(&self, index: usize) -> &Self {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::ops::Index` or choosing a less ambiguous method name
+
+error: method `index_mut` can be confused for the standard trait method `std::ops::IndexMut::index_mut`
+ --> $DIR/method_list_2.rs:46:5
+ |
+LL | / pub fn index_mut(&mut self, index: usize) -> &mut Self {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::ops::IndexMut` or choosing a less ambiguous method name
+
+error: method `into_iter` can be confused for the standard trait method `std::iter::IntoIterator::into_iter`
+ --> $DIR/method_list_2.rs:50:5
+ |
+LL | / pub fn into_iter(self) -> Self {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::iter::IntoIterator` or choosing a less ambiguous method name
+
+error: method `mul` can be confused for the standard trait method `std::ops::Mul::mul`
+ --> $DIR/method_list_2.rs:54:5
+ |
+LL | / pub fn mul(self, rhs: Self) -> Self {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::ops::Mul` or choosing a less ambiguous method name
+
+error: method `neg` can be confused for the standard trait method `std::ops::Neg::neg`
+ --> $DIR/method_list_2.rs:58:5
+ |
+LL | / pub fn neg(self) -> Self {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::ops::Neg` or choosing a less ambiguous method name
+
+error: method `next` can be confused for the standard trait method `std::iter::Iterator::next`
+ --> $DIR/method_list_2.rs:62:5
+ |
+LL | / pub fn next(&mut self) -> Option<Self> {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::iter::Iterator` or choosing a less ambiguous method name
+
+error: method `not` can be confused for the standard trait method `std::ops::Not::not`
+ --> $DIR/method_list_2.rs:66:5
+ |
+LL | / pub fn not(self) -> Self {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::ops::Not` or choosing a less ambiguous method name
+
+error: method `rem` can be confused for the standard trait method `std::ops::Rem::rem`
+ --> $DIR/method_list_2.rs:70:5
+ |
+LL | / pub fn rem(self, rhs: Self) -> Self {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::ops::Rem` or choosing a less ambiguous method name
+
+error: method `shl` can be confused for the standard trait method `std::ops::Shl::shl`
+ --> $DIR/method_list_2.rs:74:5
+ |
+LL | / pub fn shl(self, rhs: Self) -> Self {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::ops::Shl` or choosing a less ambiguous method name
+
+error: method `shr` can be confused for the standard trait method `std::ops::Shr::shr`
+ --> $DIR/method_list_2.rs:78:5
+ |
+LL | / pub fn shr(self, rhs: Self) -> Self {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::ops::Shr` or choosing a less ambiguous method name
+
+error: method `sub` can be confused for the standard trait method `std::ops::Sub::sub`
+ --> $DIR/method_list_2.rs:82:5
+ |
+LL | / pub fn sub(self, rhs: Self) -> Self {
+LL | | unimplemented!()
+LL | | }
+ | |_____^
+ |
+ = help: consider implementing the trait `std::ops::Sub` or choosing a less ambiguous method name
+
+error: aborting due to 15 previous errors
+
diff --git a/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.rs b/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.rs
new file mode 100644
index 000000000..84ecf1ea5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.rs
@@ -0,0 +1,630 @@
+// FIXME: Ideally these suggestions would be fixed via rustfix. Blocked by rust-lang/rust#53934
+// // run-rustfix
+
+#![warn(clippy::significant_drop_in_scrutinee)]
+#![allow(clippy::single_match)]
+#![allow(clippy::match_single_binding)]
+#![allow(unused_assignments)]
+#![allow(dead_code)]
+
+use std::num::ParseIntError;
+use std::ops::Deref;
+use std::sync::atomic::{AtomicU64, Ordering};
+use std::sync::RwLock;
+use std::sync::{Mutex, MutexGuard};
+
+struct State {}
+
+impl State {
+ fn foo(&self) -> bool {
+ true
+ }
+
+ fn bar(&self) {}
+}
+
+fn should_not_trigger_lint_with_mutex_guard_outside_match() {
+ let mutex = Mutex::new(State {});
+
+ // Should not trigger lint because the temporary should drop at the `;` on line before the match
+ let is_foo = mutex.lock().unwrap().foo();
+ match is_foo {
+ true => {
+ mutex.lock().unwrap().bar();
+ },
+ false => {},
+ };
+}
+
+fn should_not_trigger_lint_with_mutex_guard_when_taking_ownership_in_match() {
+ let mutex = Mutex::new(State {});
+
+ // Should not trigger lint because the scrutinee is explicitly returning the MutexGuard,
+ // so its lifetime should not be surprising.
+ match mutex.lock() {
+ Ok(guard) => {
+ guard.foo();
+ mutex.lock().unwrap().bar();
+ },
+ _ => {},
+ };
+}
+
+fn should_trigger_lint_with_mutex_guard_in_match_scrutinee() {
+ let mutex = Mutex::new(State {});
+
+ // Should trigger lint because the lifetime of the temporary MutexGuard is surprising because it
+ // is preserved until the end of the match, but there is no clear indication that this is the
+ // case.
+ match mutex.lock().unwrap().foo() {
+ true => {
+ mutex.lock().unwrap().bar();
+ },
+ false => {},
+ };
+}
+
+fn should_not_trigger_lint_with_mutex_guard_in_match_scrutinee_when_lint_allowed() {
+ let mutex = Mutex::new(State {});
+
+ // Lint should not be triggered because it is "allowed" below.
+ #[allow(clippy::significant_drop_in_scrutinee)]
+ match mutex.lock().unwrap().foo() {
+ true => {
+ mutex.lock().unwrap().bar();
+ },
+ false => {},
+ };
+}
+
+fn should_not_trigger_lint_for_insignificant_drop() {
+ // Should not trigger lint because there are no temporaries whose drops have a significant
+ // side effect.
+ match 1u64.to_string().is_empty() {
+ true => {
+ println!("It was empty")
+ },
+ false => {
+ println!("It was not empty")
+ },
+ }
+}
+
+struct StateWithMutex {
+ m: Mutex<u64>,
+}
+
+struct MutexGuardWrapper<'a> {
+ mg: MutexGuard<'a, u64>,
+}
+
+impl<'a> MutexGuardWrapper<'a> {
+ fn get_the_value(&self) -> u64 {
+ *self.mg.deref()
+ }
+}
+
+struct MutexGuardWrapperWrapper<'a> {
+ mg: MutexGuardWrapper<'a>,
+}
+
+impl<'a> MutexGuardWrapperWrapper<'a> {
+ fn get_the_value(&self) -> u64 {
+ *self.mg.mg.deref()
+ }
+}
+
+impl StateWithMutex {
+ fn lock_m(&self) -> MutexGuardWrapper<'_> {
+ MutexGuardWrapper {
+ mg: self.m.lock().unwrap(),
+ }
+ }
+
+ fn lock_m_m(&self) -> MutexGuardWrapperWrapper<'_> {
+ MutexGuardWrapperWrapper {
+ mg: MutexGuardWrapper {
+ mg: self.m.lock().unwrap(),
+ },
+ }
+ }
+
+ fn foo(&self) -> bool {
+ true
+ }
+
+ fn bar(&self) {}
+}
+
+fn should_trigger_lint_with_wrapped_mutex() {
+ let s = StateWithMutex { m: Mutex::new(1) };
+
+ // Should trigger lint because a temporary contains a type with a significant drop and its
+ // lifetime is not obvious. Additionally, it is not obvious from looking at the scrutinee that
+ // the temporary contains such a type, making it potentially even more surprising.
+ match s.lock_m().get_the_value() {
+ 1 => {
+ println!("Got 1. Is it still 1?");
+ println!("{}", s.lock_m().get_the_value());
+ },
+ 2 => {
+ println!("Got 2. Is it still 2?");
+ println!("{}", s.lock_m().get_the_value());
+ },
+ _ => {},
+ }
+ println!("All done!");
+}
+
+fn should_trigger_lint_with_double_wrapped_mutex() {
+ let s = StateWithMutex { m: Mutex::new(1) };
+
+ // Should trigger lint because a temporary contains a type which further contains a type with a
+ // significant drop and its lifetime is not obvious. Additionally, it is not obvious from
+ // looking at the scrutinee that the temporary contains such a type, making it potentially even
+ // more surprising.
+ match s.lock_m_m().get_the_value() {
+ 1 => {
+ println!("Got 1. Is it still 1?");
+ println!("{}", s.lock_m().get_the_value());
+ },
+ 2 => {
+ println!("Got 2. Is it still 2?");
+ println!("{}", s.lock_m().get_the_value());
+ },
+ _ => {},
+ }
+ println!("All done!");
+}
+
+struct Counter {
+ i: AtomicU64,
+}
+
+#[clippy::has_significant_drop]
+struct CounterWrapper<'a> {
+ counter: &'a Counter,
+}
+
+impl<'a> CounterWrapper<'a> {
+ fn new(counter: &Counter) -> CounterWrapper {
+ counter.i.fetch_add(1, Ordering::Relaxed);
+ CounterWrapper { counter }
+ }
+}
+
+impl<'a> Drop for CounterWrapper<'a> {
+ fn drop(&mut self) {
+ self.counter.i.fetch_sub(1, Ordering::Relaxed);
+ }
+}
+
+impl Counter {
+ fn temp_increment(&self) -> Vec<CounterWrapper> {
+ vec![CounterWrapper::new(self), CounterWrapper::new(self)]
+ }
+}
+
+fn should_trigger_lint_for_vec() {
+ let counter = Counter { i: AtomicU64::new(0) };
+
+ // Should trigger lint because the temporary in the scrutinee returns a collection of types
+ // which have significant drops. The types with significant drops are also non-obvious when
+ // reading the expression in the scrutinee.
+ match counter.temp_increment().len() {
+ 2 => {
+ let current_count = counter.i.load(Ordering::Relaxed);
+ println!("Current count {}", current_count);
+ assert_eq!(current_count, 0);
+ },
+ 1 => {},
+ 3 => {},
+ _ => {},
+ };
+}
+
+struct StateWithField {
+ s: String,
+}
+
+// Should trigger lint only on the type in the tuple which is created using a temporary
+// with a significant drop. Additionally, this test ensures that the format of the tuple
+// is preserved correctly in the suggestion.
+fn should_trigger_lint_for_tuple_in_scrutinee() {
+ let mutex1 = Mutex::new(StateWithField { s: "one".to_owned() });
+
+ {
+ match (mutex1.lock().unwrap().s.len(), true) {
+ (3, _) => {
+ println!("started");
+ mutex1.lock().unwrap().s.len();
+ println!("done");
+ },
+ (_, _) => {},
+ };
+
+ match (true, mutex1.lock().unwrap().s.len(), true) {
+ (_, 3, _) => {
+ println!("started");
+ mutex1.lock().unwrap().s.len();
+ println!("done");
+ },
+ (_, _, _) => {},
+ };
+
+ let mutex2 = Mutex::new(StateWithField { s: "two".to_owned() });
+ match (mutex1.lock().unwrap().s.len(), true, mutex2.lock().unwrap().s.len()) {
+ (3, _, 3) => {
+ println!("started");
+ mutex1.lock().unwrap().s.len();
+ mutex2.lock().unwrap().s.len();
+ println!("done");
+ },
+ (_, _, _) => {},
+ };
+
+ let mutex3 = Mutex::new(StateWithField { s: "three".to_owned() });
+ match mutex3.lock().unwrap().s.as_str() {
+ "three" => {
+ println!("started");
+ mutex1.lock().unwrap().s.len();
+ mutex2.lock().unwrap().s.len();
+ println!("done");
+ },
+ _ => {},
+ };
+
+ match (true, mutex3.lock().unwrap().s.as_str()) {
+ (_, "three") => {
+ println!("started");
+ mutex1.lock().unwrap().s.len();
+ mutex2.lock().unwrap().s.len();
+ println!("done");
+ },
+ (_, _) => {},
+ };
+ }
+}
+
+// Should trigger lint when either side of a binary operation creates a temporary with a
+// significant drop.
+// To avoid potential unnecessary copies or creating references that would trigger the significant
+// drop problem, the lint recommends moving the entire binary operation.
+fn should_trigger_lint_for_accessing_field_in_mutex_in_one_side_of_binary_op() {
+ let mutex = Mutex::new(StateWithField { s: "state".to_owned() });
+
+ match mutex.lock().unwrap().s.len() > 1 {
+ true => {
+ mutex.lock().unwrap().s.len();
+ },
+ false => {},
+ };
+
+ match 1 < mutex.lock().unwrap().s.len() {
+ true => {
+ mutex.lock().unwrap().s.len();
+ },
+ false => {},
+ };
+}
+
+// Should trigger lint when both sides of a binary operation creates a temporary with a
+// significant drop.
+// To avoid potential unnecessary copies or creating references that would trigger the significant
+// drop problem, the lint recommends moving the entire binary operation.
+fn should_trigger_lint_for_accessing_fields_in_mutex_in_both_sides_of_binary_op() {
+ let mutex1 = Mutex::new(StateWithField { s: "state".to_owned() });
+ let mutex2 = Mutex::new(StateWithField {
+ s: "statewithfield".to_owned(),
+ });
+
+ match mutex1.lock().unwrap().s.len() < mutex2.lock().unwrap().s.len() {
+ true => {
+ println!(
+ "{} < {}",
+ mutex1.lock().unwrap().s.len(),
+ mutex2.lock().unwrap().s.len()
+ );
+ },
+ false => {},
+ };
+
+ match mutex1.lock().unwrap().s.len() >= mutex2.lock().unwrap().s.len() {
+ true => {
+ println!(
+ "{} >= {}",
+ mutex1.lock().unwrap().s.len(),
+ mutex2.lock().unwrap().s.len()
+ );
+ },
+ false => {},
+ };
+}
+
+fn should_not_trigger_lint_for_closure_in_scrutinee() {
+ let mutex1 = Mutex::new(StateWithField { s: "one".to_owned() });
+
+ let get_mutex_guard = || mutex1.lock().unwrap().s.len();
+
+ // Should not trigger lint because the temporary with a significant drop will be dropped
+ // at the end of the closure, so the MutexGuard will be unlocked and not have a potentially
+ // surprising lifetime.
+ match get_mutex_guard() > 1 {
+ true => {
+ mutex1.lock().unwrap().s.len();
+ },
+ false => {},
+ };
+}
+
+fn should_trigger_lint_for_return_from_closure_in_scrutinee() {
+ let mutex1 = Mutex::new(StateWithField { s: "one".to_owned() });
+
+ let get_mutex_guard = || mutex1.lock().unwrap();
+
+ // Should trigger lint because the temporary with a significant drop is returned from the
+ // closure but not used directly in any match arms, so it has a potentially surprising lifetime.
+ match get_mutex_guard().s.len() > 1 {
+ true => {
+ mutex1.lock().unwrap().s.len();
+ },
+ false => {},
+ };
+}
+
+fn should_trigger_lint_for_return_from_match_in_scrutinee() {
+ let mutex1 = Mutex::new(StateWithField { s: "one".to_owned() });
+ let mutex2 = Mutex::new(StateWithField { s: "two".to_owned() });
+
+ let i = 100;
+
+ // Should trigger lint because the nested match within the scrutinee returns a temporary with a
+ // significant drop is but not used directly in any match arms, so it has a potentially
+ // surprising lifetime.
+ match match i {
+ 100 => mutex1.lock().unwrap(),
+ _ => mutex2.lock().unwrap(),
+ }
+ .s
+ .len()
+ > 1
+ {
+ true => {
+ mutex1.lock().unwrap().s.len();
+ },
+ false => {
+ println!("nothing to do here");
+ },
+ };
+}
+
+fn should_trigger_lint_for_return_from_if_in_scrutinee() {
+ let mutex1 = Mutex::new(StateWithField { s: "one".to_owned() });
+ let mutex2 = Mutex::new(StateWithField { s: "two".to_owned() });
+
+ let i = 100;
+
+ // Should trigger lint because the nested if-expression within the scrutinee returns a temporary
+ // with a significant drop is but not used directly in any match arms, so it has a potentially
+ // surprising lifetime.
+ match if i > 1 {
+ mutex1.lock().unwrap()
+ } else {
+ mutex2.lock().unwrap()
+ }
+ .s
+ .len()
+ > 1
+ {
+ true => {
+ mutex1.lock().unwrap().s.len();
+ },
+ false => {},
+ };
+}
+
+fn should_not_trigger_lint_for_if_in_scrutinee() {
+ let mutex = Mutex::new(StateWithField { s: "state".to_owned() });
+
+ let i = 100;
+
+ // Should not trigger the lint because the temporary with a significant drop *is* dropped within
+ // the body of the if-expression nested within the match scrutinee, and therefore does not have
+ // a potentially surprising lifetime.
+ match if i > 1 {
+ mutex.lock().unwrap().s.len() > 1
+ } else {
+ false
+ } {
+ true => {
+ mutex.lock().unwrap().s.len();
+ },
+ false => {},
+ };
+}
+
+struct StateWithBoxedMutexGuard {
+ u: Mutex<u64>,
+}
+
+impl StateWithBoxedMutexGuard {
+ fn new() -> StateWithBoxedMutexGuard {
+ StateWithBoxedMutexGuard { u: Mutex::new(42) }
+ }
+ fn lock(&self) -> Box<MutexGuard<u64>> {
+ Box::new(self.u.lock().unwrap())
+ }
+}
+
+fn should_trigger_lint_for_boxed_mutex_guard() {
+ let s = StateWithBoxedMutexGuard::new();
+
+ // Should trigger lint because a temporary Box holding a type with a significant drop in a match
+ // scrutinee may have a potentially surprising lifetime.
+ match s.lock().deref().deref() {
+ 0 | 1 => println!("Value was less than 2"),
+ _ => println!("Value is {}", s.lock().deref()),
+ };
+}
+
+struct StateStringWithBoxedMutexGuard {
+ s: Mutex<String>,
+}
+
+impl StateStringWithBoxedMutexGuard {
+ fn new() -> StateStringWithBoxedMutexGuard {
+ StateStringWithBoxedMutexGuard {
+ s: Mutex::new("A String".to_owned()),
+ }
+ }
+ fn lock(&self) -> Box<MutexGuard<String>> {
+ Box::new(self.s.lock().unwrap())
+ }
+}
+
+fn should_trigger_lint_for_boxed_mutex_guard_holding_string() {
+ let s = StateStringWithBoxedMutexGuard::new();
+
+ let matcher = String::from("A String");
+
+ // Should trigger lint because a temporary Box holding a type with a significant drop in a match
+ // scrutinee may have a potentially surprising lifetime.
+ match s.lock().deref().deref() {
+ matcher => println!("Value is {}", s.lock().deref()),
+ _ => println!("Value was not a match"),
+ };
+}
+
+struct StateWithIntField {
+ i: u64,
+}
+
+// Should trigger lint when either side of an assign expression contains a temporary with a
+// significant drop, because the temporary's lifetime will be extended to the end of the match.
+// To avoid potential unnecessary copies or creating references that would trigger the significant
+// drop problem, the lint recommends moving the entire binary operation.
+fn should_trigger_lint_in_assign_expr() {
+ let mutex = Mutex::new(StateWithIntField { i: 10 });
+
+ let mut i = 100;
+
+ match mutex.lock().unwrap().i = i {
+ _ => {
+ println!("{}", mutex.lock().unwrap().i);
+ },
+ };
+
+ match i = mutex.lock().unwrap().i {
+ _ => {
+ println!("{}", mutex.lock().unwrap().i);
+ },
+ };
+
+ match mutex.lock().unwrap().i += 1 {
+ _ => {
+ println!("{}", mutex.lock().unwrap().i);
+ },
+ };
+
+ match i += mutex.lock().unwrap().i {
+ _ => {
+ println!("{}", mutex.lock().unwrap().i);
+ },
+ };
+}
+
+#[derive(Debug)]
+enum RecursiveEnum {
+ Foo(Option<Box<RecursiveEnum>>),
+}
+
+#[derive(Debug)]
+enum GenericRecursiveEnum<T> {
+ Foo(T, Option<Box<GenericRecursiveEnum<T>>>),
+}
+
+fn should_not_cause_stack_overflow() {
+ // Test that when a type recursively contains itself, a stack overflow does not occur when
+ // checking sub-types for significant drops.
+ let f = RecursiveEnum::Foo(Some(Box::new(RecursiveEnum::Foo(None))));
+ match f {
+ RecursiveEnum::Foo(Some(f)) => {
+ println!("{:?}", f)
+ },
+ RecursiveEnum::Foo(f) => {
+ println!("{:?}", f)
+ },
+ }
+
+ let f = GenericRecursiveEnum::Foo(1u64, Some(Box::new(GenericRecursiveEnum::Foo(2u64, None))));
+ match f {
+ GenericRecursiveEnum::Foo(i, Some(f)) => {
+ println!("{} {:?}", i, f)
+ },
+ GenericRecursiveEnum::Foo(i, f) => {
+ println!("{} {:?}", i, f)
+ },
+ }
+}
+
+fn should_not_produce_lint_for_try_desugar() -> Result<u64, ParseIntError> {
+ // TryDesugar (i.e. using `?` for a Result type) will turn into a match but is out of scope
+ // for this lint
+ let rwlock = RwLock::new("1".to_string());
+ let result = rwlock.read().unwrap().parse::<u64>()?;
+ println!("{}", result);
+ rwlock.write().unwrap().push('2');
+ Ok(result)
+}
+
+struct ResultReturner {
+ s: String,
+}
+
+impl ResultReturner {
+ fn to_number(&self) -> Result<i64, ParseIntError> {
+ self.s.parse::<i64>()
+ }
+}
+
+fn should_trigger_lint_for_non_ref_move_and_clone_suggestion() {
+ let rwlock = RwLock::<ResultReturner>::new(ResultReturner { s: "1".to_string() });
+ match rwlock.read().unwrap().to_number() {
+ Ok(n) => println!("Converted to number: {}", n),
+ Err(e) => println!("Could not convert {} to number", e),
+ };
+}
+
+fn should_trigger_lint_for_read_write_lock_for_loop() {
+ // For-in loops desugar to match expressions and are prone to the type of deadlock this lint is
+ // designed to look for.
+ let rwlock = RwLock::<Vec<String>>::new(vec!["1".to_string()]);
+ for s in rwlock.read().unwrap().iter() {
+ println!("{}", s);
+ }
+}
+
+fn do_bar(mutex: &Mutex<State>) {
+ mutex.lock().unwrap().bar();
+}
+
+fn should_trigger_lint_without_significant_drop_in_arm() {
+ let mutex = Mutex::new(State {});
+
+ // Should trigger lint because the lifetime of the temporary MutexGuard is surprising because it
+ // is preserved until the end of the match, but there is no clear indication that this is the
+ // case.
+ match mutex.lock().unwrap().foo() {
+ true => do_bar(&mutex),
+ false => {},
+ };
+}
+
+fn should_not_trigger_on_significant_iterator_drop() {
+ let lines = std::io::stdin().lines();
+ for line in lines {
+ println!("foo: {}", line.unwrap());
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.stderr b/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.stderr
new file mode 100644
index 000000000..88ea6bce2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.stderr
@@ -0,0 +1,497 @@
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:59:11
+ |
+LL | match mutex.lock().unwrap().foo() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | true => {
+LL | mutex.lock().unwrap().bar();
+ | --------------------- another value with significant `Drop` created here
+...
+LL | };
+ | - temporary lives until here
+ |
+ = note: `-D clippy::significant-drop-in-scrutinee` implied by `-D warnings`
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match
+ |
+LL ~ let value = mutex.lock().unwrap().foo();
+LL ~ match value {
+ |
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:145:11
+ |
+LL | match s.lock_m().get_the_value() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | println!("{}", s.lock_m().get_the_value());
+ | ---------- another value with significant `Drop` created here
+...
+LL | }
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match
+ |
+LL ~ let value = s.lock_m().get_the_value();
+LL ~ match value {
+ |
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:166:11
+ |
+LL | match s.lock_m_m().get_the_value() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | println!("{}", s.lock_m().get_the_value());
+ | ---------- another value with significant `Drop` created here
+...
+LL | }
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match
+ |
+LL ~ let value = s.lock_m_m().get_the_value();
+LL ~ match value {
+ |
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:214:11
+ |
+LL | match counter.temp_increment().len() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match
+ |
+LL ~ let value = counter.temp_increment().len();
+LL ~ match value {
+ |
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:237:16
+ |
+LL | match (mutex1.lock().unwrap().s.len(), true) {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | mutex1.lock().unwrap().s.len();
+ | ---------------------- another value with significant `Drop` created here
+...
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match
+ |
+LL ~ let value = mutex1.lock().unwrap().s.len();
+LL ~ match (value, true) {
+ |
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:246:22
+ |
+LL | match (true, mutex1.lock().unwrap().s.len(), true) {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | mutex1.lock().unwrap().s.len();
+ | ---------------------- another value with significant `Drop` created here
+...
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match
+ |
+LL ~ let value = mutex1.lock().unwrap().s.len();
+LL ~ match (true, value, true) {
+ |
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:256:16
+ |
+LL | match (mutex1.lock().unwrap().s.len(), true, mutex2.lock().unwrap().s.len()) {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | mutex1.lock().unwrap().s.len();
+ | ---------------------- another value with significant `Drop` created here
+LL | mutex2.lock().unwrap().s.len();
+ | ---------------------- another value with significant `Drop` created here
+...
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match
+ |
+LL ~ let value = mutex1.lock().unwrap().s.len();
+LL ~ match (value, true, mutex2.lock().unwrap().s.len()) {
+ |
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:256:54
+ |
+LL | match (mutex1.lock().unwrap().s.len(), true, mutex2.lock().unwrap().s.len()) {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | mutex1.lock().unwrap().s.len();
+ | ---------------------- another value with significant `Drop` created here
+LL | mutex2.lock().unwrap().s.len();
+ | ---------------------- another value with significant `Drop` created here
+...
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match
+ |
+LL ~ let value = mutex2.lock().unwrap().s.len();
+LL ~ match (mutex1.lock().unwrap().s.len(), true, value) {
+ |
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:267:15
+ |
+LL | match mutex3.lock().unwrap().s.as_str() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | mutex1.lock().unwrap().s.len();
+ | ---------------------- another value with significant `Drop` created here
+LL | mutex2.lock().unwrap().s.len();
+ | ---------------------- another value with significant `Drop` created here
+...
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:277:22
+ |
+LL | match (true, mutex3.lock().unwrap().s.as_str()) {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | mutex1.lock().unwrap().s.len();
+ | ---------------------- another value with significant `Drop` created here
+LL | mutex2.lock().unwrap().s.len();
+ | ---------------------- another value with significant `Drop` created here
+...
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:296:11
+ |
+LL | match mutex.lock().unwrap().s.len() > 1 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | true => {
+LL | mutex.lock().unwrap().s.len();
+ | --------------------- another value with significant `Drop` created here
+...
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match
+ |
+LL ~ let value = mutex.lock().unwrap().s.len() > 1;
+LL ~ match value {
+ |
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:303:11
+ |
+LL | match 1 < mutex.lock().unwrap().s.len() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | true => {
+LL | mutex.lock().unwrap().s.len();
+ | --------------------- another value with significant `Drop` created here
+...
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match
+ |
+LL ~ let value = 1 < mutex.lock().unwrap().s.len();
+LL ~ match value {
+ |
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:321:11
+ |
+LL | match mutex1.lock().unwrap().s.len() < mutex2.lock().unwrap().s.len() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | mutex1.lock().unwrap().s.len(),
+ | ---------------------- another value with significant `Drop` created here
+LL | mutex2.lock().unwrap().s.len()
+ | ---------------------- another value with significant `Drop` created here
+...
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match
+ |
+LL ~ let value = mutex1.lock().unwrap().s.len() < mutex2.lock().unwrap().s.len();
+LL ~ match value {
+ |
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:332:11
+ |
+LL | match mutex1.lock().unwrap().s.len() >= mutex2.lock().unwrap().s.len() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | mutex1.lock().unwrap().s.len(),
+ | ---------------------- another value with significant `Drop` created here
+LL | mutex2.lock().unwrap().s.len()
+ | ---------------------- another value with significant `Drop` created here
+...
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match
+ |
+LL ~ let value = mutex1.lock().unwrap().s.len() >= mutex2.lock().unwrap().s.len();
+LL ~ match value {
+ |
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:367:11
+ |
+LL | match get_mutex_guard().s.len() > 1 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | true => {
+LL | mutex1.lock().unwrap().s.len();
+ | ---------------------- another value with significant `Drop` created here
+...
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match
+ |
+LL ~ let value = get_mutex_guard().s.len() > 1;
+LL ~ match value {
+ |
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:384:11
+ |
+LL | match match i {
+ | ___________^
+LL | | 100 => mutex1.lock().unwrap(),
+LL | | _ => mutex2.lock().unwrap(),
+LL | | }
+LL | | .s
+LL | | .len()
+LL | | > 1
+ | |___________^
+...
+LL | mutex1.lock().unwrap().s.len();
+ | ---------------------- another value with significant `Drop` created here
+...
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match
+ |
+LL ~ let value = match i {
+LL + 100 => mutex1.lock().unwrap(),
+LL + _ => mutex2.lock().unwrap(),
+LL + }
+LL + .s
+LL + .len()
+LL + > 1;
+LL ~ match value
+ |
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:410:11
+ |
+LL | match if i > 1 {
+ | ___________^
+LL | | mutex1.lock().unwrap()
+LL | | } else {
+LL | | mutex2.lock().unwrap()
+... |
+LL | | .len()
+LL | | > 1
+ | |___________^
+...
+LL | mutex1.lock().unwrap().s.len();
+ | ---------------------- another value with significant `Drop` created here
+...
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match
+ |
+LL ~ let value = if i > 1 {
+LL + mutex1.lock().unwrap()
+LL + } else {
+LL + mutex2.lock().unwrap()
+LL + }
+LL + .s
+LL + .len()
+LL + > 1;
+LL ~ match value
+ |
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:464:11
+ |
+LL | match s.lock().deref().deref() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+LL | 0 | 1 => println!("Value was less than 2"),
+LL | _ => println!("Value is {}", s.lock().deref()),
+ | ---------------- another value with significant `Drop` created here
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match and create a copy
+ |
+LL ~ let value = *s.lock().deref().deref();
+LL ~ match value {
+ |
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:492:11
+ |
+LL | match s.lock().deref().deref() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+LL | matcher => println!("Value is {}", s.lock().deref()),
+ | ---------------- another value with significant `Drop` created here
+LL | _ => println!("Value was not a match"),
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:511:11
+ |
+LL | match mutex.lock().unwrap().i = i {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | _ => {
+LL | println!("{}", mutex.lock().unwrap().i);
+ | --------------------- another value with significant `Drop` created here
+LL | },
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match
+ |
+LL ~ mutex.lock().unwrap().i = i;
+LL ~ match () {
+ |
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:517:11
+ |
+LL | match i = mutex.lock().unwrap().i {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | _ => {
+LL | println!("{}", mutex.lock().unwrap().i);
+ | --------------------- another value with significant `Drop` created here
+LL | },
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match
+ |
+LL ~ i = mutex.lock().unwrap().i;
+LL ~ match () {
+ |
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:523:11
+ |
+LL | match mutex.lock().unwrap().i += 1 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | _ => {
+LL | println!("{}", mutex.lock().unwrap().i);
+ | --------------------- another value with significant `Drop` created here
+LL | },
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match
+ |
+LL ~ mutex.lock().unwrap().i += 1;
+LL ~ match () {
+ |
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:529:11
+ |
+LL | match i += mutex.lock().unwrap().i {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | _ => {
+LL | println!("{}", mutex.lock().unwrap().i);
+ | --------------------- another value with significant `Drop` created here
+LL | },
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match
+ |
+LL ~ i += mutex.lock().unwrap().i;
+LL ~ match () {
+ |
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:592:11
+ |
+LL | match rwlock.read().unwrap().to_number() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+
+error: temporary with significant `Drop` in `for` loop condition will live until the end of the `for` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:602:14
+ |
+LL | for s in rwlock.read().unwrap().iter() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | println!("{}", s);
+LL | }
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+
+error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
+ --> $DIR/significant_drop_in_scrutinee.rs:617:11
+ |
+LL | match mutex.lock().unwrap().foo() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | };
+ | - temporary lives until here
+ |
+ = note: this might lead to deadlocks or other unexpected behavior
+help: try moving the temporary above the match
+ |
+LL ~ let value = mutex.lock().unwrap().foo();
+LL ~ match value {
+ |
+
+error: aborting due to 26 previous errors
+
diff --git a/src/tools/clippy/tests/ui/similar_names.rs b/src/tools/clippy/tests/ui/similar_names.rs
new file mode 100644
index 000000000..c21225d15
--- /dev/null
+++ b/src/tools/clippy/tests/ui/similar_names.rs
@@ -0,0 +1,121 @@
+#![warn(clippy::similar_names)]
+#![allow(
+ unused,
+ clippy::println_empty_string,
+ clippy::empty_loop,
+ clippy::diverging_sub_expression,
+ clippy::let_unit_value
+)]
+
+struct Foo {
+ apple: i32,
+ bpple: i32,
+}
+
+fn main() {
+ let specter: i32;
+ let spectre: i32;
+
+ let apple: i32;
+
+ let bpple: i32;
+
+ let cpple: i32;
+
+ let a_bar: i32;
+ let b_bar: i32;
+ let c_bar: i32;
+
+ let items = [5];
+ for item in &items {
+ loop {}
+ }
+
+ let foo_x: i32;
+ let foo_y: i32;
+
+ let rhs: i32;
+ let lhs: i32;
+
+ let bla_rhs: i32;
+ let bla_lhs: i32;
+
+ let blubrhs: i32;
+ let blublhs: i32;
+
+ let blubx: i32;
+ let bluby: i32;
+
+ let cake: i32;
+ let cakes: i32;
+ let coke: i32;
+
+ match 5 {
+ cheese @ 1 => {},
+ rabbit => panic!(),
+ }
+ let cheese: i32;
+ match (42, 43) {
+ (cheese1, 1) => {},
+ (cheese2, 2) => panic!(),
+ _ => println!(""),
+ }
+ let ipv4: i32;
+ let ipv6: i32;
+ let abcd1: i32;
+ let abdc2: i32;
+ let xyz1abc: i32;
+ let xyz2abc: i32;
+ let xyzeabc: i32;
+
+ let parser: i32;
+ let parsed: i32;
+ let parsee: i32;
+
+ let setter: i32;
+ let getter: i32;
+ let tx1: i32;
+ let rx1: i32;
+ let tx_cake: i32;
+ let rx_cake: i32;
+
+ // names often used in win32 code (for example WindowProc)
+ let wparam: i32;
+ let lparam: i32;
+
+ let iter: i32;
+ let item: i32;
+}
+
+fn foo() {
+ let Foo { apple, bpple } = unimplemented!();
+ let Foo {
+ apple: spring,
+ bpple: sprang,
+ } = unimplemented!();
+}
+
+// false positive similar_names (#3057, #2651)
+// clippy claimed total_reg_src_size and total_size and
+// numb_reg_src_checkouts and total_bin_size were similar
+#[derive(Debug, Clone)]
+pub(crate) struct DirSizes {
+ pub(crate) total_size: u64,
+ pub(crate) numb_bins: u64,
+ pub(crate) total_bin_size: u64,
+ pub(crate) total_reg_size: u64,
+ pub(crate) total_git_db_size: u64,
+ pub(crate) total_git_repos_bare_size: u64,
+ pub(crate) numb_git_repos_bare_repos: u64,
+ pub(crate) numb_git_checkouts: u64,
+ pub(crate) total_git_chk_size: u64,
+ pub(crate) total_reg_cache_size: u64,
+ pub(crate) total_reg_src_size: u64,
+ pub(crate) numb_reg_cache_entries: u64,
+ pub(crate) numb_reg_src_checkouts: u64,
+}
+
+fn ignore_underscore_prefix() {
+ let hello: ();
+ let _hello: ();
+}
diff --git a/src/tools/clippy/tests/ui/similar_names.stderr b/src/tools/clippy/tests/ui/similar_names.stderr
new file mode 100644
index 000000000..6e7726938
--- /dev/null
+++ b/src/tools/clippy/tests/ui/similar_names.stderr
@@ -0,0 +1,87 @@
+error: binding's name is too similar to existing binding
+ --> $DIR/similar_names.rs:21:9
+ |
+LL | let bpple: i32;
+ | ^^^^^
+ |
+ = note: `-D clippy::similar-names` implied by `-D warnings`
+note: existing binding defined here
+ --> $DIR/similar_names.rs:19:9
+ |
+LL | let apple: i32;
+ | ^^^^^
+
+error: binding's name is too similar to existing binding
+ --> $DIR/similar_names.rs:23:9
+ |
+LL | let cpple: i32;
+ | ^^^^^
+ |
+note: existing binding defined here
+ --> $DIR/similar_names.rs:19:9
+ |
+LL | let apple: i32;
+ | ^^^^^
+
+error: binding's name is too similar to existing binding
+ --> $DIR/similar_names.rs:47:9
+ |
+LL | let bluby: i32;
+ | ^^^^^
+ |
+note: existing binding defined here
+ --> $DIR/similar_names.rs:46:9
+ |
+LL | let blubx: i32;
+ | ^^^^^
+
+error: binding's name is too similar to existing binding
+ --> $DIR/similar_names.rs:51:9
+ |
+LL | let coke: i32;
+ | ^^^^
+ |
+note: existing binding defined here
+ --> $DIR/similar_names.rs:49:9
+ |
+LL | let cake: i32;
+ | ^^^^
+
+error: binding's name is too similar to existing binding
+ --> $DIR/similar_names.rs:69:9
+ |
+LL | let xyzeabc: i32;
+ | ^^^^^^^
+ |
+note: existing binding defined here
+ --> $DIR/similar_names.rs:67:9
+ |
+LL | let xyz1abc: i32;
+ | ^^^^^^^
+
+error: binding's name is too similar to existing binding
+ --> $DIR/similar_names.rs:73:9
+ |
+LL | let parsee: i32;
+ | ^^^^^^
+ |
+note: existing binding defined here
+ --> $DIR/similar_names.rs:71:9
+ |
+LL | let parser: i32;
+ | ^^^^^^
+
+error: binding's name is too similar to existing binding
+ --> $DIR/similar_names.rs:94:16
+ |
+LL | bpple: sprang,
+ | ^^^^^^
+ |
+note: existing binding defined here
+ --> $DIR/similar_names.rs:93:16
+ |
+LL | apple: spring,
+ | ^^^^^^
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/single_char_add_str.fixed b/src/tools/clippy/tests/ui/single_char_add_str.fixed
new file mode 100644
index 000000000..63a6d37a9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_char_add_str.fixed
@@ -0,0 +1,45 @@
+// run-rustfix
+#![warn(clippy::single_char_add_str)]
+
+macro_rules! get_string {
+ () => {
+ String::from("Hello world!")
+ };
+}
+
+fn main() {
+ // `push_str` tests
+
+ let mut string = String::new();
+ string.push('R');
+ string.push('\'');
+
+ string.push('u');
+ string.push_str("st");
+ string.push_str("");
+ string.push('\x52');
+ string.push('\u{0052}');
+ string.push('a');
+
+ get_string!().push('ö');
+
+ // `insert_str` tests
+
+ let mut string = String::new();
+ string.insert(0, 'R');
+ string.insert(1, '\'');
+
+ string.insert(0, 'u');
+ string.insert_str(2, "st");
+ string.insert_str(0, "");
+ string.insert(0, '\x52');
+ string.insert(0, '\u{0052}');
+ let x: usize = 2;
+ string.insert(x, 'a');
+ const Y: usize = 1;
+ string.insert(Y, 'a');
+ string.insert(Y, '"');
+ string.insert(Y, '\'');
+
+ get_string!().insert(1, '?');
+}
diff --git a/src/tools/clippy/tests/ui/single_char_add_str.rs b/src/tools/clippy/tests/ui/single_char_add_str.rs
new file mode 100644
index 000000000..a799ea7d8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_char_add_str.rs
@@ -0,0 +1,45 @@
+// run-rustfix
+#![warn(clippy::single_char_add_str)]
+
+macro_rules! get_string {
+ () => {
+ String::from("Hello world!")
+ };
+}
+
+fn main() {
+ // `push_str` tests
+
+ let mut string = String::new();
+ string.push_str("R");
+ string.push_str("'");
+
+ string.push('u');
+ string.push_str("st");
+ string.push_str("");
+ string.push_str("\x52");
+ string.push_str("\u{0052}");
+ string.push_str(r##"a"##);
+
+ get_string!().push_str("ö");
+
+ // `insert_str` tests
+
+ let mut string = String::new();
+ string.insert_str(0, "R");
+ string.insert_str(1, "'");
+
+ string.insert(0, 'u');
+ string.insert_str(2, "st");
+ string.insert_str(0, "");
+ string.insert_str(0, "\x52");
+ string.insert_str(0, "\u{0052}");
+ let x: usize = 2;
+ string.insert_str(x, r##"a"##);
+ const Y: usize = 1;
+ string.insert_str(Y, r##"a"##);
+ string.insert_str(Y, r##"""##);
+ string.insert_str(Y, r##"'"##);
+
+ get_string!().insert_str(1, "?");
+}
diff --git a/src/tools/clippy/tests/ui/single_char_add_str.stderr b/src/tools/clippy/tests/ui/single_char_add_str.stderr
new file mode 100644
index 000000000..55d91583a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_char_add_str.stderr
@@ -0,0 +1,94 @@
+error: calling `push_str()` using a single-character string literal
+ --> $DIR/single_char_add_str.rs:14:5
+ |
+LL | string.push_str("R");
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using `push` with a character literal: `string.push('R')`
+ |
+ = note: `-D clippy::single-char-add-str` implied by `-D warnings`
+
+error: calling `push_str()` using a single-character string literal
+ --> $DIR/single_char_add_str.rs:15:5
+ |
+LL | string.push_str("'");
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider using `push` with a character literal: `string.push('/'')`
+
+error: calling `push_str()` using a single-character string literal
+ --> $DIR/single_char_add_str.rs:20:5
+ |
+LL | string.push_str("/x52");
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `push` with a character literal: `string.push('/x52')`
+
+error: calling `push_str()` using a single-character string literal
+ --> $DIR/single_char_add_str.rs:21:5
+ |
+LL | string.push_str("/u{0052}");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `push` with a character literal: `string.push('/u{0052}')`
+
+error: calling `push_str()` using a single-character string literal
+ --> $DIR/single_char_add_str.rs:22:5
+ |
+LL | string.push_str(r##"a"##);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `push` with a character literal: `string.push('a')`
+
+error: calling `push_str()` using a single-character string literal
+ --> $DIR/single_char_add_str.rs:24:5
+ |
+LL | get_string!().push_str("ö");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `push` with a character literal: `get_string!().push('ö')`
+
+error: calling `insert_str()` using a single-character string literal
+ --> $DIR/single_char_add_str.rs:29:5
+ |
+LL | string.insert_str(0, "R");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `insert` with a character literal: `string.insert(0, 'R')`
+
+error: calling `insert_str()` using a single-character string literal
+ --> $DIR/single_char_add_str.rs:30:5
+ |
+LL | string.insert_str(1, "'");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `insert` with a character literal: `string.insert(1, '/'')`
+
+error: calling `insert_str()` using a single-character string literal
+ --> $DIR/single_char_add_str.rs:35:5
+ |
+LL | string.insert_str(0, "/x52");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `insert` with a character literal: `string.insert(0, '/x52')`
+
+error: calling `insert_str()` using a single-character string literal
+ --> $DIR/single_char_add_str.rs:36:5
+ |
+LL | string.insert_str(0, "/u{0052}");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `insert` with a character literal: `string.insert(0, '/u{0052}')`
+
+error: calling `insert_str()` using a single-character string literal
+ --> $DIR/single_char_add_str.rs:38:5
+ |
+LL | string.insert_str(x, r##"a"##);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `insert` with a character literal: `string.insert(x, 'a')`
+
+error: calling `insert_str()` using a single-character string literal
+ --> $DIR/single_char_add_str.rs:40:5
+ |
+LL | string.insert_str(Y, r##"a"##);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `insert` with a character literal: `string.insert(Y, 'a')`
+
+error: calling `insert_str()` using a single-character string literal
+ --> $DIR/single_char_add_str.rs:41:5
+ |
+LL | string.insert_str(Y, r##"""##);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `insert` with a character literal: `string.insert(Y, '"')`
+
+error: calling `insert_str()` using a single-character string literal
+ --> $DIR/single_char_add_str.rs:42:5
+ |
+LL | string.insert_str(Y, r##"'"##);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `insert` with a character literal: `string.insert(Y, '/'')`
+
+error: calling `insert_str()` using a single-character string literal
+ --> $DIR/single_char_add_str.rs:44:5
+ |
+LL | get_string!().insert_str(1, "?");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `insert` with a character literal: `get_string!().insert(1, '?')`
+
+error: aborting due to 15 previous errors
+
diff --git a/src/tools/clippy/tests/ui/single_char_lifetime_names.rs b/src/tools/clippy/tests/ui/single_char_lifetime_names.rs
new file mode 100644
index 000000000..69c5b236f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_char_lifetime_names.rs
@@ -0,0 +1,44 @@
+#![warn(clippy::single_char_lifetime_names)]
+#![allow(clippy::let_unit_value)]
+
+// Lifetimes should only be linted when they're introduced
+struct DiagnosticCtx<'a, 'b>
+where
+ 'a: 'b,
+{
+ _source: &'a str,
+ _unit: &'b (),
+}
+
+// Only the lifetimes on the `impl`'s generics should be linted
+impl<'a, 'b> DiagnosticCtx<'a, 'b> {
+ fn new(source: &'a str, unit: &'b ()) -> DiagnosticCtx<'a, 'b> {
+ Self {
+ _source: source,
+ _unit: unit,
+ }
+ }
+}
+
+// No lifetimes should be linted here
+impl<'src, 'unit> DiagnosticCtx<'src, 'unit> {
+ fn new_pass(source: &'src str, unit: &'unit ()) -> DiagnosticCtx<'src, 'unit> {
+ Self {
+ _source: source,
+ _unit: unit,
+ }
+ }
+}
+
+// Only 'a should be linted here
+fn split_once<'a>(base: &'a str, other: &'_ str) -> (&'a str, Option<&'a str>) {
+ base.split_once(other)
+ .map(|(left, right)| (left, Some(right)))
+ .unwrap_or((base, None))
+}
+
+fn main() {
+ let src = "loop {}";
+ let unit = ();
+ DiagnosticCtx::new(src, &unit);
+}
diff --git a/src/tools/clippy/tests/ui/single_char_lifetime_names.stderr b/src/tools/clippy/tests/ui/single_char_lifetime_names.stderr
new file mode 100644
index 000000000..1438b3999
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_char_lifetime_names.stderr
@@ -0,0 +1,43 @@
+error: single-character lifetime names are likely uninformative
+ --> $DIR/single_char_lifetime_names.rs:5:22
+ |
+LL | struct DiagnosticCtx<'a, 'b>
+ | ^^
+ |
+ = note: `-D clippy::single-char-lifetime-names` implied by `-D warnings`
+ = help: use a more informative name
+
+error: single-character lifetime names are likely uninformative
+ --> $DIR/single_char_lifetime_names.rs:5:26
+ |
+LL | struct DiagnosticCtx<'a, 'b>
+ | ^^
+ |
+ = help: use a more informative name
+
+error: single-character lifetime names are likely uninformative
+ --> $DIR/single_char_lifetime_names.rs:14:6
+ |
+LL | impl<'a, 'b> DiagnosticCtx<'a, 'b> {
+ | ^^
+ |
+ = help: use a more informative name
+
+error: single-character lifetime names are likely uninformative
+ --> $DIR/single_char_lifetime_names.rs:14:10
+ |
+LL | impl<'a, 'b> DiagnosticCtx<'a, 'b> {
+ | ^^
+ |
+ = help: use a more informative name
+
+error: single-character lifetime names are likely uninformative
+ --> $DIR/single_char_lifetime_names.rs:34:15
+ |
+LL | fn split_once<'a>(base: &'a str, other: &'_ str) -> (&'a str, Option<&'a str>) {
+ | ^^
+ |
+ = help: use a more informative name
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/single_char_pattern.fixed b/src/tools/clippy/tests/ui/single_char_pattern.fixed
new file mode 100644
index 000000000..68e267267
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_char_pattern.fixed
@@ -0,0 +1,67 @@
+// run-rustfix
+
+#![allow(unused_must_use)]
+
+use std::collections::HashSet;
+
+fn main() {
+ let x = "foo";
+ x.split('x');
+ x.split("xx");
+ x.split('x');
+
+ let y = "x";
+ x.split(y);
+ x.split('ß');
+ x.split('ℝ');
+ x.split('💣');
+ // Can't use this lint for unicode code points which don't fit in a char
+ x.split("❤️");
+ x.split_inclusive('x');
+ x.contains('x');
+ x.starts_with('x');
+ x.ends_with('x');
+ x.find('x');
+ x.rfind('x');
+ x.rsplit('x');
+ x.split_terminator('x');
+ x.rsplit_terminator('x');
+ x.splitn(2, 'x');
+ x.rsplitn(2, 'x');
+ x.split_once('x');
+ x.rsplit_once('x');
+ x.matches('x');
+ x.rmatches('x');
+ x.match_indices('x');
+ x.rmatch_indices('x');
+ x.trim_start_matches('x');
+ x.trim_end_matches('x');
+ x.strip_prefix('x');
+ x.strip_suffix('x');
+ x.replace('x', "y");
+ x.replacen('x', "y", 3);
+ // Make sure we escape characters correctly.
+ x.split('\n');
+ x.split('\'');
+ x.split('\'');
+
+ let h = HashSet::<String>::new();
+ h.contains("X"); // should not warn
+
+ x.replace(';', ",").split(','); // issue #2978
+ x.starts_with('\x03'); // issue #2996
+
+ // Issue #3204
+ const S: &str = "#";
+ x.find(S);
+
+ // Raw string
+ x.split('a');
+ x.split('a');
+ x.split('a');
+ x.split('\'');
+ x.split('#');
+ // Must escape backslash in raw strings when converting to char #8060
+ x.split('\\');
+ x.split('\\');
+}
diff --git a/src/tools/clippy/tests/ui/single_char_pattern.rs b/src/tools/clippy/tests/ui/single_char_pattern.rs
new file mode 100644
index 000000000..186202d78
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_char_pattern.rs
@@ -0,0 +1,67 @@
+// run-rustfix
+
+#![allow(unused_must_use)]
+
+use std::collections::HashSet;
+
+fn main() {
+ let x = "foo";
+ x.split("x");
+ x.split("xx");
+ x.split('x');
+
+ let y = "x";
+ x.split(y);
+ x.split("ß");
+ x.split("ℝ");
+ x.split("💣");
+ // Can't use this lint for unicode code points which don't fit in a char
+ x.split("❤️");
+ x.split_inclusive("x");
+ x.contains("x");
+ x.starts_with("x");
+ x.ends_with("x");
+ x.find("x");
+ x.rfind("x");
+ x.rsplit("x");
+ x.split_terminator("x");
+ x.rsplit_terminator("x");
+ x.splitn(2, "x");
+ x.rsplitn(2, "x");
+ x.split_once("x");
+ x.rsplit_once("x");
+ x.matches("x");
+ x.rmatches("x");
+ x.match_indices("x");
+ x.rmatch_indices("x");
+ x.trim_start_matches("x");
+ x.trim_end_matches("x");
+ x.strip_prefix("x");
+ x.strip_suffix("x");
+ x.replace("x", "y");
+ x.replacen("x", "y", 3);
+ // Make sure we escape characters correctly.
+ x.split("\n");
+ x.split("'");
+ x.split("\'");
+
+ let h = HashSet::<String>::new();
+ h.contains("X"); // should not warn
+
+ x.replace(';', ",").split(","); // issue #2978
+ x.starts_with("\x03"); // issue #2996
+
+ // Issue #3204
+ const S: &str = "#";
+ x.find(S);
+
+ // Raw string
+ x.split(r"a");
+ x.split(r#"a"#);
+ x.split(r###"a"###);
+ x.split(r###"'"###);
+ x.split(r###"#"###);
+ // Must escape backslash in raw strings when converting to char #8060
+ x.split(r#"\"#);
+ x.split(r"\");
+}
diff --git a/src/tools/clippy/tests/ui/single_char_pattern.stderr b/src/tools/clippy/tests/ui/single_char_pattern.stderr
new file mode 100644
index 000000000..5564aac67
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_char_pattern.stderr
@@ -0,0 +1,238 @@
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:9:13
+ |
+LL | x.split("x");
+ | ^^^ help: try using a `char` instead: `'x'`
+ |
+ = note: `-D clippy::single-char-pattern` implied by `-D warnings`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:15:13
+ |
+LL | x.split("ß");
+ | ^^^ help: try using a `char` instead: `'ß'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:16:13
+ |
+LL | x.split("ℝ");
+ | ^^^ help: try using a `char` instead: `'ℝ'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:17:13
+ |
+LL | x.split("💣");
+ | ^^^^ help: try using a `char` instead: `'💣'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:20:23
+ |
+LL | x.split_inclusive("x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:21:16
+ |
+LL | x.contains("x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:22:19
+ |
+LL | x.starts_with("x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:23:17
+ |
+LL | x.ends_with("x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:24:12
+ |
+LL | x.find("x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:25:13
+ |
+LL | x.rfind("x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:26:14
+ |
+LL | x.rsplit("x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:27:24
+ |
+LL | x.split_terminator("x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:28:25
+ |
+LL | x.rsplit_terminator("x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:29:17
+ |
+LL | x.splitn(2, "x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:30:18
+ |
+LL | x.rsplitn(2, "x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:31:18
+ |
+LL | x.split_once("x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:32:19
+ |
+LL | x.rsplit_once("x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:33:15
+ |
+LL | x.matches("x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:34:16
+ |
+LL | x.rmatches("x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:35:21
+ |
+LL | x.match_indices("x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:36:22
+ |
+LL | x.rmatch_indices("x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:37:26
+ |
+LL | x.trim_start_matches("x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:38:24
+ |
+LL | x.trim_end_matches("x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:39:20
+ |
+LL | x.strip_prefix("x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:40:20
+ |
+LL | x.strip_suffix("x");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:41:15
+ |
+LL | x.replace("x", "y");
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:42:16
+ |
+LL | x.replacen("x", "y", 3);
+ | ^^^ help: try using a `char` instead: `'x'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:44:13
+ |
+LL | x.split("/n");
+ | ^^^^ help: try using a `char` instead: `'/n'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:45:13
+ |
+LL | x.split("'");
+ | ^^^ help: try using a `char` instead: `'/''`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:46:13
+ |
+LL | x.split("/'");
+ | ^^^^ help: try using a `char` instead: `'/''`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:51:31
+ |
+LL | x.replace(';', ",").split(","); // issue #2978
+ | ^^^ help: try using a `char` instead: `','`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:52:19
+ |
+LL | x.starts_with("/x03"); // issue #2996
+ | ^^^^^^ help: try using a `char` instead: `'/x03'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:59:13
+ |
+LL | x.split(r"a");
+ | ^^^^ help: try using a `char` instead: `'a'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:60:13
+ |
+LL | x.split(r#"a"#);
+ | ^^^^^^ help: try using a `char` instead: `'a'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:61:13
+ |
+LL | x.split(r###"a"###);
+ | ^^^^^^^^^^ help: try using a `char` instead: `'a'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:62:13
+ |
+LL | x.split(r###"'"###);
+ | ^^^^^^^^^^ help: try using a `char` instead: `'/''`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:63:13
+ |
+LL | x.split(r###"#"###);
+ | ^^^^^^^^^^ help: try using a `char` instead: `'#'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:65:13
+ |
+LL | x.split(r#"/"#);
+ | ^^^^^^ help: try using a `char` instead: `'/'`
+
+error: single-character string constant used as pattern
+ --> $DIR/single_char_pattern.rs:66:13
+ |
+LL | x.split(r"/");
+ | ^^^^ help: try using a `char` instead: `'/'`
+
+error: aborting due to 39 previous errors
+
diff --git a/src/tools/clippy/tests/ui/single_component_path_imports.fixed b/src/tools/clippy/tests/ui/single_component_path_imports.fixed
new file mode 100644
index 000000000..4c40739d6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_component_path_imports.fixed
@@ -0,0 +1,33 @@
+// run-rustfix
+#![warn(clippy::single_component_path_imports)]
+#![allow(unused_imports)]
+
+
+use serde as edres;
+pub use serde;
+
+macro_rules! m {
+ () => {
+ use regex;
+ };
+}
+
+fn main() {
+ regex::Regex::new(r"^\d{4}-\d{2}-\d{2}$").unwrap();
+
+ // False positive #5154, shouldn't trigger lint.
+ m!();
+}
+
+mod hello_mod {
+
+ #[allow(dead_code)]
+ fn hello_mod() {}
+}
+
+mod hi_mod {
+ use self::regex::{Regex, RegexSet};
+ use regex;
+ #[allow(dead_code)]
+ fn hi_mod() {}
+}
diff --git a/src/tools/clippy/tests/ui/single_component_path_imports.rs b/src/tools/clippy/tests/ui/single_component_path_imports.rs
new file mode 100644
index 000000000..9280bab3c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_component_path_imports.rs
@@ -0,0 +1,33 @@
+// run-rustfix
+#![warn(clippy::single_component_path_imports)]
+#![allow(unused_imports)]
+
+use regex;
+use serde as edres;
+pub use serde;
+
+macro_rules! m {
+ () => {
+ use regex;
+ };
+}
+
+fn main() {
+ regex::Regex::new(r"^\d{4}-\d{2}-\d{2}$").unwrap();
+
+ // False positive #5154, shouldn't trigger lint.
+ m!();
+}
+
+mod hello_mod {
+ use regex;
+ #[allow(dead_code)]
+ fn hello_mod() {}
+}
+
+mod hi_mod {
+ use self::regex::{Regex, RegexSet};
+ use regex;
+ #[allow(dead_code)]
+ fn hi_mod() {}
+}
diff --git a/src/tools/clippy/tests/ui/single_component_path_imports.stderr b/src/tools/clippy/tests/ui/single_component_path_imports.stderr
new file mode 100644
index 000000000..509c88ac2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_component_path_imports.stderr
@@ -0,0 +1,16 @@
+error: this import is redundant
+ --> $DIR/single_component_path_imports.rs:23:5
+ |
+LL | use regex;
+ | ^^^^^^^^^^ help: remove it entirely
+ |
+ = note: `-D clippy::single-component-path-imports` implied by `-D warnings`
+
+error: this import is redundant
+ --> $DIR/single_component_path_imports.rs:5:1
+ |
+LL | use regex;
+ | ^^^^^^^^^^ help: remove it entirely
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/single_component_path_imports_macro.rs b/src/tools/clippy/tests/ui/single_component_path_imports_macro.rs
new file mode 100644
index 000000000..fda294a61
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_component_path_imports_macro.rs
@@ -0,0 +1,20 @@
+#![warn(clippy::single_component_path_imports)]
+#![allow(unused_imports)]
+
+// #7106: use statements exporting a macro within a crate should not trigger lint
+// #7923: normal `use` statements of macros should also not trigger the lint
+
+macro_rules! m1 {
+ () => {};
+}
+pub(crate) use m1; // ok
+
+macro_rules! m2 {
+ () => {};
+}
+use m2; // ok
+
+fn main() {
+ m1!();
+ m2!();
+}
diff --git a/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.rs b/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.rs
new file mode 100644
index 000000000..c75beb747
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.rs
@@ -0,0 +1,16 @@
+#![warn(clippy::single_component_path_imports)]
+#![allow(unused_imports)]
+
+use regex;
+use serde as edres;
+pub use serde;
+
+fn main() {
+ regex::Regex::new(r"^\d{4}-\d{2}-\d{2}$").unwrap();
+}
+
+mod root_nested_use_mod {
+ use {regex, serde};
+ #[allow(dead_code)]
+ fn root_nested_use_mod() {}
+}
diff --git a/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.stderr b/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.stderr
new file mode 100644
index 000000000..cf990be1b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.stderr
@@ -0,0 +1,25 @@
+error: this import is redundant
+ --> $DIR/single_component_path_imports_nested_first.rs:13:10
+ |
+LL | use {regex, serde};
+ | ^^^^^
+ |
+ = note: `-D clippy::single-component-path-imports` implied by `-D warnings`
+ = help: remove this import
+
+error: this import is redundant
+ --> $DIR/single_component_path_imports_nested_first.rs:13:17
+ |
+LL | use {regex, serde};
+ | ^^^^^
+ |
+ = help: remove this import
+
+error: this import is redundant
+ --> $DIR/single_component_path_imports_nested_first.rs:4:1
+ |
+LL | use regex;
+ | ^^^^^^^^^^ help: remove it entirely
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/single_component_path_imports_self_after.rs b/src/tools/clippy/tests/ui/single_component_path_imports_self_after.rs
new file mode 100644
index 000000000..48e8e5302
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_component_path_imports_self_after.rs
@@ -0,0 +1,15 @@
+#![warn(clippy::single_component_path_imports)]
+#![allow(unused_imports)]
+
+use self::regex::{Regex as xeger, RegexSet as tesxeger};
+pub use self::{
+ regex::{Regex, RegexSet},
+ some_mod::SomeType,
+};
+use regex;
+
+mod some_mod {
+ pub struct SomeType;
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/single_component_path_imports_self_before.rs b/src/tools/clippy/tests/ui/single_component_path_imports_self_before.rs
new file mode 100644
index 000000000..4fb0cf40b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_component_path_imports_self_before.rs
@@ -0,0 +1,16 @@
+#![warn(clippy::single_component_path_imports)]
+#![allow(unused_imports)]
+
+use regex;
+
+use self::regex::{Regex as xeger, RegexSet as tesxeger};
+pub use self::{
+ regex::{Regex, RegexSet},
+ some_mod::SomeType,
+};
+
+mod some_mod {
+ pub struct SomeType;
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/single_element_loop.fixed b/src/tools/clippy/tests/ui/single_element_loop.fixed
new file mode 100644
index 000000000..63d31ff83
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_element_loop.fixed
@@ -0,0 +1,36 @@
+// run-rustfix
+// Tests from for_loop.rs that don't have suggestions
+
+#[warn(clippy::single_element_loop)]
+fn main() {
+ let item1 = 2;
+ {
+ let item = &item1;
+ dbg!(item);
+ }
+
+ {
+ let item = &item1;
+ dbg!(item);
+ }
+
+ {
+ let item = &(0..5);
+ dbg!(item);
+ }
+
+ {
+ let item = &mut (0..5);
+ dbg!(item);
+ }
+
+ {
+ let item = 0..5;
+ dbg!(item);
+ }
+
+ {
+ let item = 0..5;
+ dbg!(item);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/single_element_loop.rs b/src/tools/clippy/tests/ui/single_element_loop.rs
new file mode 100644
index 000000000..2cda5a329
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_element_loop.rs
@@ -0,0 +1,30 @@
+// run-rustfix
+// Tests from for_loop.rs that don't have suggestions
+
+#[warn(clippy::single_element_loop)]
+fn main() {
+ let item1 = 2;
+ for item in &[item1] {
+ dbg!(item);
+ }
+
+ for item in [item1].iter() {
+ dbg!(item);
+ }
+
+ for item in &[0..5] {
+ dbg!(item);
+ }
+
+ for item in [0..5].iter_mut() {
+ dbg!(item);
+ }
+
+ for item in [0..5] {
+ dbg!(item);
+ }
+
+ for item in [0..5].into_iter() {
+ dbg!(item);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/single_element_loop.stderr b/src/tools/clippy/tests/ui/single_element_loop.stderr
new file mode 100644
index 000000000..0aeb8da1a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_element_loop.stderr
@@ -0,0 +1,99 @@
+error: for loop over a single element
+ --> $DIR/single_element_loop.rs:7:5
+ |
+LL | / for item in &[item1] {
+LL | | dbg!(item);
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::single-element-loop` implied by `-D warnings`
+help: try
+ |
+LL ~ {
+LL + let item = &item1;
+LL + dbg!(item);
+LL + }
+ |
+
+error: for loop over a single element
+ --> $DIR/single_element_loop.rs:11:5
+ |
+LL | / for item in [item1].iter() {
+LL | | dbg!(item);
+LL | | }
+ | |_____^
+ |
+help: try
+ |
+LL ~ {
+LL + let item = &item1;
+LL + dbg!(item);
+LL + }
+ |
+
+error: for loop over a single element
+ --> $DIR/single_element_loop.rs:15:5
+ |
+LL | / for item in &[0..5] {
+LL | | dbg!(item);
+LL | | }
+ | |_____^
+ |
+help: try
+ |
+LL ~ {
+LL + let item = &(0..5);
+LL + dbg!(item);
+LL + }
+ |
+
+error: for loop over a single element
+ --> $DIR/single_element_loop.rs:19:5
+ |
+LL | / for item in [0..5].iter_mut() {
+LL | | dbg!(item);
+LL | | }
+ | |_____^
+ |
+help: try
+ |
+LL ~ {
+LL + let item = &mut (0..5);
+LL + dbg!(item);
+LL + }
+ |
+
+error: for loop over a single element
+ --> $DIR/single_element_loop.rs:23:5
+ |
+LL | / for item in [0..5] {
+LL | | dbg!(item);
+LL | | }
+ | |_____^
+ |
+help: try
+ |
+LL ~ {
+LL + let item = 0..5;
+LL + dbg!(item);
+LL + }
+ |
+
+error: for loop over a single element
+ --> $DIR/single_element_loop.rs:27:5
+ |
+LL | / for item in [0..5].into_iter() {
+LL | | dbg!(item);
+LL | | }
+ | |_____^
+ |
+help: try
+ |
+LL ~ {
+LL + let item = 0..5;
+LL + dbg!(item);
+LL + }
+ |
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/single_match.rs b/src/tools/clippy/tests/ui/single_match.rs
new file mode 100644
index 000000000..dd148edf5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_match.rs
@@ -0,0 +1,245 @@
+#![warn(clippy::single_match)]
+
+fn dummy() {}
+
+fn single_match() {
+ let x = Some(1u8);
+
+ match x {
+ Some(y) => {
+ println!("{:?}", y);
+ },
+ _ => (),
+ };
+
+ let x = Some(1u8);
+ match x {
+ // Note the missing block braces.
+ // We suggest `if let Some(y) = x { .. }` because the macro
+ // is expanded before we can do anything.
+ Some(y) => println!("{:?}", y),
+ _ => (),
+ }
+
+ let z = (1u8, 1u8);
+ match z {
+ (2..=3, 7..=9) => dummy(),
+ _ => {},
+ };
+
+ // Not linted (pattern guards used)
+ match x {
+ Some(y) if y == 0 => println!("{:?}", y),
+ _ => (),
+ }
+
+ // Not linted (no block with statements in the single arm)
+ match z {
+ (2..=3, 7..=9) => println!("{:?}", z),
+ _ => println!("nope"),
+ }
+}
+
+enum Foo {
+ Bar,
+ Baz(u8),
+}
+use std::borrow::Cow;
+use Foo::*;
+
+fn single_match_know_enum() {
+ let x = Some(1u8);
+ let y: Result<_, i8> = Ok(1i8);
+
+ match x {
+ Some(y) => dummy(),
+ None => (),
+ };
+
+ match y {
+ Ok(y) => dummy(),
+ Err(..) => (),
+ };
+
+ let c = Cow::Borrowed("");
+
+ match c {
+ Cow::Borrowed(..) => dummy(),
+ Cow::Owned(..) => (),
+ };
+
+ let z = Foo::Bar;
+ // no warning
+ match z {
+ Bar => println!("42"),
+ Baz(_) => (),
+ }
+
+ match z {
+ Baz(_) => println!("42"),
+ Bar => (),
+ }
+}
+
+// issue #173
+fn if_suggestion() {
+ let x = "test";
+ match x {
+ "test" => println!(),
+ _ => (),
+ }
+
+ #[derive(PartialEq, Eq)]
+ enum Foo {
+ A,
+ B,
+ C(u32),
+ }
+
+ let x = Foo::A;
+ match x {
+ Foo::A => println!(),
+ _ => (),
+ }
+
+ const FOO_C: Foo = Foo::C(0);
+ match x {
+ FOO_C => println!(),
+ _ => (),
+ }
+
+ match &&x {
+ Foo::A => println!(),
+ _ => (),
+ }
+
+ let x = &x;
+ match &x {
+ Foo::A => println!(),
+ _ => (),
+ }
+
+ enum Bar {
+ A,
+ B,
+ }
+ impl PartialEq for Bar {
+ fn eq(&self, rhs: &Self) -> bool {
+ matches!((self, rhs), (Self::A, Self::A) | (Self::B, Self::B))
+ }
+ }
+ impl Eq for Bar {}
+
+ let x = Bar::A;
+ match x {
+ Bar::A => println!(),
+ _ => (),
+ }
+
+ // issue #7038
+ struct X;
+ let x = Some(X);
+ match x {
+ None => println!(),
+ _ => (),
+ };
+}
+
+// See: issue #8282
+fn ranges() {
+ enum E {
+ V,
+ }
+ let x = (Some(E::V), Some(42));
+
+ // Don't lint, because the `E` enum can be extended with additional fields later. Thus, the
+ // proposed replacement to `if let Some(E::V)` may hide non-exhaustive warnings that appeared
+ // because of `match` construction.
+ match x {
+ (Some(E::V), _) => {},
+ (None, _) => {},
+ }
+
+ // lint
+ match x {
+ (Some(_), _) => {},
+ (None, _) => {},
+ }
+
+ // lint
+ match x {
+ (Some(E::V), _) => todo!(),
+ (_, _) => {},
+ }
+
+ // lint
+ match (Some(42), Some(E::V), Some(42)) {
+ (.., Some(E::V), _) => {},
+ (..) => {},
+ }
+
+ // Don't lint, see above.
+ match (Some(E::V), Some(E::V), Some(E::V)) {
+ (.., Some(E::V), _) => {},
+ (.., None, _) => {},
+ }
+
+ // Don't lint, see above.
+ match (Some(E::V), Some(E::V), Some(E::V)) {
+ (Some(E::V), ..) => {},
+ (None, ..) => {},
+ }
+
+ // Don't lint, see above.
+ match (Some(E::V), Some(E::V), Some(E::V)) {
+ (_, Some(E::V), ..) => {},
+ (_, None, ..) => {},
+ }
+}
+
+fn skip_type_aliases() {
+ enum OptionEx {
+ Some(i32),
+ None,
+ }
+ enum ResultEx {
+ Err(i32),
+ Ok(i32),
+ }
+
+ use OptionEx::{None, Some};
+ use ResultEx::{Err, Ok};
+
+ // don't lint
+ match Err(42) {
+ Ok(_) => dummy(),
+ Err(_) => (),
+ };
+
+ // don't lint
+ match Some(1i32) {
+ Some(_) => dummy(),
+ None => (),
+ };
+}
+
+macro_rules! single_match {
+ ($num:literal) => {
+ match $num {
+ 15 => println!("15"),
+ _ => (),
+ }
+ };
+}
+
+fn main() {
+ single_match!(5);
+
+ // Don't lint
+ let _ = match Some(0) {
+ #[cfg(feature = "foo")]
+ Some(10) => 11,
+ Some(x) => x,
+ _ => 0,
+ };
+}
diff --git a/src/tools/clippy/tests/ui/single_match.stderr b/src/tools/clippy/tests/ui/single_match.stderr
new file mode 100644
index 000000000..4d2b9ec5f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_match.stderr
@@ -0,0 +1,159 @@
+error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
+ --> $DIR/single_match.rs:8:5
+ |
+LL | / match x {
+LL | | Some(y) => {
+LL | | println!("{:?}", y);
+LL | | },
+LL | | _ => (),
+LL | | };
+ | |_____^
+ |
+ = note: `-D clippy::single-match` implied by `-D warnings`
+help: try this
+ |
+LL ~ if let Some(y) = x {
+LL + println!("{:?}", y);
+LL ~ };
+ |
+
+error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
+ --> $DIR/single_match.rs:16:5
+ |
+LL | / match x {
+LL | | // Note the missing block braces.
+LL | | // We suggest `if let Some(y) = x { .. }` because the macro
+LL | | // is expanded before we can do anything.
+LL | | Some(y) => println!("{:?}", y),
+LL | | _ => (),
+LL | | }
+ | |_____^ help: try this: `if let Some(y) = x { println!("{:?}", y) }`
+
+error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
+ --> $DIR/single_match.rs:25:5
+ |
+LL | / match z {
+LL | | (2..=3, 7..=9) => dummy(),
+LL | | _ => {},
+LL | | };
+ | |_____^ help: try this: `if let (2..=3, 7..=9) = z { dummy() }`
+
+error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
+ --> $DIR/single_match.rs:54:5
+ |
+LL | / match x {
+LL | | Some(y) => dummy(),
+LL | | None => (),
+LL | | };
+ | |_____^ help: try this: `if let Some(y) = x { dummy() }`
+
+error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
+ --> $DIR/single_match.rs:59:5
+ |
+LL | / match y {
+LL | | Ok(y) => dummy(),
+LL | | Err(..) => (),
+LL | | };
+ | |_____^ help: try this: `if let Ok(y) = y { dummy() }`
+
+error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
+ --> $DIR/single_match.rs:66:5
+ |
+LL | / match c {
+LL | | Cow::Borrowed(..) => dummy(),
+LL | | Cow::Owned(..) => (),
+LL | | };
+ | |_____^ help: try this: `if let Cow::Borrowed(..) = c { dummy() }`
+
+error: you seem to be trying to use `match` for an equality check. Consider using `if`
+ --> $DIR/single_match.rs:87:5
+ |
+LL | / match x {
+LL | | "test" => println!(),
+LL | | _ => (),
+LL | | }
+ | |_____^ help: try this: `if x == "test" { println!() }`
+
+error: you seem to be trying to use `match` for an equality check. Consider using `if`
+ --> $DIR/single_match.rs:100:5
+ |
+LL | / match x {
+LL | | Foo::A => println!(),
+LL | | _ => (),
+LL | | }
+ | |_____^ help: try this: `if x == Foo::A { println!() }`
+
+error: you seem to be trying to use `match` for an equality check. Consider using `if`
+ --> $DIR/single_match.rs:106:5
+ |
+LL | / match x {
+LL | | FOO_C => println!(),
+LL | | _ => (),
+LL | | }
+ | |_____^ help: try this: `if x == FOO_C { println!() }`
+
+error: you seem to be trying to use `match` for an equality check. Consider using `if`
+ --> $DIR/single_match.rs:111:5
+ |
+LL | / match &&x {
+LL | | Foo::A => println!(),
+LL | | _ => (),
+LL | | }
+ | |_____^ help: try this: `if x == Foo::A { println!() }`
+
+error: you seem to be trying to use `match` for an equality check. Consider using `if`
+ --> $DIR/single_match.rs:117:5
+ |
+LL | / match &x {
+LL | | Foo::A => println!(),
+LL | | _ => (),
+LL | | }
+ | |_____^ help: try this: `if x == &Foo::A { println!() }`
+
+error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
+ --> $DIR/single_match.rs:134:5
+ |
+LL | / match x {
+LL | | Bar::A => println!(),
+LL | | _ => (),
+LL | | }
+ | |_____^ help: try this: `if let Bar::A = x { println!() }`
+
+error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
+ --> $DIR/single_match.rs:142:5
+ |
+LL | / match x {
+LL | | None => println!(),
+LL | | _ => (),
+LL | | };
+ | |_____^ help: try this: `if let None = x { println!() }`
+
+error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
+ --> $DIR/single_match.rs:164:5
+ |
+LL | / match x {
+LL | | (Some(_), _) => {},
+LL | | (None, _) => {},
+LL | | }
+ | |_____^ help: try this: `if let (Some(_), _) = x {}`
+
+error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
+ --> $DIR/single_match.rs:170:5
+ |
+LL | / match x {
+LL | | (Some(E::V), _) => todo!(),
+LL | | (_, _) => {},
+LL | | }
+ | |_____^ help: try this: `if let (Some(E::V), _) = x { todo!() }`
+
+error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
+ --> $DIR/single_match.rs:176:5
+ |
+LL | / match (Some(42), Some(E::V), Some(42)) {
+LL | | (.., Some(E::V), _) => {},
+LL | | (..) => {},
+LL | | }
+ | |_____^ help: try this: `if let (.., Some(E::V), _) = (Some(42), Some(E::V), Some(42)) {}`
+
+error: aborting due to 16 previous errors
+
diff --git a/src/tools/clippy/tests/ui/single_match_else.rs b/src/tools/clippy/tests/ui/single_match_else.rs
new file mode 100644
index 000000000..70d6febb7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_match_else.rs
@@ -0,0 +1,119 @@
+// aux-build: proc_macro_with_span.rs
+
+#![warn(clippy::single_match_else)]
+#![allow(clippy::needless_return)]
+#![allow(clippy::no_effect)]
+
+extern crate proc_macro_with_span;
+use proc_macro_with_span::with_span;
+
+enum ExprNode {
+ ExprAddrOf,
+ Butterflies,
+ Unicorns,
+}
+
+static NODE: ExprNode = ExprNode::Unicorns;
+
+fn unwrap_addr() -> Option<&'static ExprNode> {
+ let _ = match ExprNode::Butterflies {
+ ExprNode::ExprAddrOf => Some(&NODE),
+ _ => {
+ let x = 5;
+ None
+ },
+ };
+
+ // Don't lint
+ with_span!(span match ExprNode::Butterflies {
+ ExprNode::ExprAddrOf => Some(&NODE),
+ _ => {
+ let x = 5;
+ None
+ },
+ })
+}
+
+macro_rules! unwrap_addr {
+ ($expression:expr) => {
+ match $expression {
+ ExprNode::ExprAddrOf => Some(&NODE),
+ _ => {
+ let x = 5;
+ None
+ },
+ }
+ };
+}
+
+#[rustfmt::skip]
+fn main() {
+ unwrap_addr!(ExprNode::Unicorns);
+
+ //
+ // don't lint single exprs/statements
+ //
+
+ // don't lint here
+ match Some(1) {
+ Some(a) => println!("${:?}", a),
+ None => return,
+ }
+
+ // don't lint here
+ match Some(1) {
+ Some(a) => println!("${:?}", a),
+ None => {
+ return
+ },
+ }
+
+ // don't lint here
+ match Some(1) {
+ Some(a) => println!("${:?}", a),
+ None => {
+ return;
+ },
+ }
+
+ //
+ // lint multiple exprs/statements "else" blocks
+ //
+
+ // lint here
+ match Some(1) {
+ Some(a) => println!("${:?}", a),
+ None => {
+ println!("else block");
+ return
+ },
+ }
+
+ // lint here
+ match Some(1) {
+ Some(a) => println!("${:?}", a),
+ None => {
+ println!("else block");
+ return;
+ },
+ }
+
+ // lint here
+ use std::convert::Infallible;
+ match Result::<i32, Infallible>::Ok(1) {
+ Ok(a) => println!("${:?}", a),
+ Err(_) => {
+ println!("else block");
+ return;
+ }
+ }
+
+ use std::borrow::Cow;
+ match Cow::from("moo") {
+ Cow::Owned(a) => println!("${:?}", a),
+ Cow::Borrowed(_) => {
+ println!("else block");
+ return;
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/single_match_else.stderr b/src/tools/clippy/tests/ui/single_match_else.stderr
new file mode 100644
index 000000000..38fd9c6a6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/single_match_else.stderr
@@ -0,0 +1,104 @@
+error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
+ --> $DIR/single_match_else.rs:19:13
+ |
+LL | let _ = match ExprNode::Butterflies {
+ | _____________^
+LL | | ExprNode::ExprAddrOf => Some(&NODE),
+LL | | _ => {
+LL | | let x = 5;
+LL | | None
+LL | | },
+LL | | };
+ | |_____^
+ |
+ = note: `-D clippy::single-match-else` implied by `-D warnings`
+help: try this
+ |
+LL ~ let _ = if let ExprNode::ExprAddrOf = ExprNode::Butterflies { Some(&NODE) } else {
+LL + let x = 5;
+LL + None
+LL ~ };
+ |
+
+error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
+ --> $DIR/single_match_else.rs:84:5
+ |
+LL | / match Some(1) {
+LL | | Some(a) => println!("${:?}", a),
+LL | | None => {
+LL | | println!("else block");
+LL | | return
+LL | | },
+LL | | }
+ | |_____^
+ |
+help: try this
+ |
+LL ~ if let Some(a) = Some(1) { println!("${:?}", a) } else {
+LL + println!("else block");
+LL + return
+LL + }
+ |
+
+error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
+ --> $DIR/single_match_else.rs:93:5
+ |
+LL | / match Some(1) {
+LL | | Some(a) => println!("${:?}", a),
+LL | | None => {
+LL | | println!("else block");
+LL | | return;
+LL | | },
+LL | | }
+ | |_____^
+ |
+help: try this
+ |
+LL ~ if let Some(a) = Some(1) { println!("${:?}", a) } else {
+LL + println!("else block");
+LL + return;
+LL + }
+ |
+
+error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
+ --> $DIR/single_match_else.rs:103:5
+ |
+LL | / match Result::<i32, Infallible>::Ok(1) {
+LL | | Ok(a) => println!("${:?}", a),
+LL | | Err(_) => {
+LL | | println!("else block");
+LL | | return;
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: try this
+ |
+LL ~ if let Ok(a) = Result::<i32, Infallible>::Ok(1) { println!("${:?}", a) } else {
+LL + println!("else block");
+LL + return;
+LL + }
+ |
+
+error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
+ --> $DIR/single_match_else.rs:112:5
+ |
+LL | / match Cow::from("moo") {
+LL | | Cow::Owned(a) => println!("${:?}", a),
+LL | | Cow::Borrowed(_) => {
+LL | | println!("else block");
+LL | | return;
+LL | | }
+LL | | }
+ | |_____^
+ |
+help: try this
+ |
+LL ~ if let Cow::Owned(a) = Cow::from("moo") { println!("${:?}", a) } else {
+LL + println!("else block");
+LL + return;
+LL + }
+ |
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/size_of_in_element_count/expressions.rs b/src/tools/clippy/tests/ui/size_of_in_element_count/expressions.rs
new file mode 100644
index 000000000..2594e8fa6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/size_of_in_element_count/expressions.rs
@@ -0,0 +1,37 @@
+#![warn(clippy::size_of_in_element_count)]
+#![allow(clippy::ptr_offset_with_cast)]
+
+use std::mem::{size_of, size_of_val};
+use std::ptr::{copy, copy_nonoverlapping, write_bytes};
+
+fn main() {
+ const SIZE: usize = 128;
+ const HALF_SIZE: usize = SIZE / 2;
+ const DOUBLE_SIZE: usize = SIZE * 2;
+ let mut x = [2u8; SIZE];
+ let mut y = [2u8; SIZE];
+
+ // Count expression involving multiplication of size_of (Should trigger the lint)
+ unsafe { copy_nonoverlapping(x.as_ptr(), y.as_mut_ptr(), size_of::<u8>() * SIZE) };
+
+ // Count expression involving nested multiplications of size_of (Should trigger the lint)
+ unsafe { copy_nonoverlapping(x.as_ptr(), y.as_mut_ptr(), HALF_SIZE * size_of_val(&x[0]) * 2) };
+
+ // Count expression involving divisions of size_of (Should trigger the lint)
+ unsafe { copy(x.as_ptr(), y.as_mut_ptr(), DOUBLE_SIZE * size_of::<u8>() / 2) };
+
+ // Count expression involving divisions by size_of (Should not trigger the lint)
+ unsafe { copy(x.as_ptr(), y.as_mut_ptr(), DOUBLE_SIZE / size_of::<u8>()) };
+
+ // Count expression involving divisions by multiple size_of (Should not trigger the lint)
+ unsafe { copy(x.as_ptr(), y.as_mut_ptr(), DOUBLE_SIZE / (2 * size_of::<u8>())) };
+
+ // Count expression involving recursive divisions by size_of (Should trigger the lint)
+ unsafe { copy(x.as_ptr(), y.as_mut_ptr(), DOUBLE_SIZE / (2 / size_of::<u8>())) };
+
+ // No size_of calls (Should not trigger the lint)
+ unsafe { copy(x.as_ptr(), y.as_mut_ptr(), SIZE) };
+
+ // Different types for pointee and size_of (Should not trigger the lint)
+ unsafe { y.as_mut_ptr().write_bytes(0u8, size_of::<u16>() / 2 * SIZE) };
+}
diff --git a/src/tools/clippy/tests/ui/size_of_in_element_count/expressions.stderr b/src/tools/clippy/tests/ui/size_of_in_element_count/expressions.stderr
new file mode 100644
index 000000000..0f0dff57f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/size_of_in_element_count/expressions.stderr
@@ -0,0 +1,35 @@
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/expressions.rs:15:62
+ |
+LL | unsafe { copy_nonoverlapping(x.as_ptr(), y.as_mut_ptr(), size_of::<u8>() * SIZE) };
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::size-of-in-element-count` implied by `-D warnings`
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/expressions.rs:18:62
+ |
+LL | unsafe { copy_nonoverlapping(x.as_ptr(), y.as_mut_ptr(), HALF_SIZE * size_of_val(&x[0]) * 2) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/expressions.rs:21:47
+ |
+LL | unsafe { copy(x.as_ptr(), y.as_mut_ptr(), DOUBLE_SIZE * size_of::<u8>() / 2) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/expressions.rs:30:47
+ |
+LL | unsafe { copy(x.as_ptr(), y.as_mut_ptr(), DOUBLE_SIZE / (2 / size_of::<u8>())) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/size_of_in_element_count/functions.rs b/src/tools/clippy/tests/ui/size_of_in_element_count/functions.rs
new file mode 100644
index 000000000..09d08ac37
--- /dev/null
+++ b/src/tools/clippy/tests/ui/size_of_in_element_count/functions.rs
@@ -0,0 +1,46 @@
+#![warn(clippy::size_of_in_element_count)]
+#![allow(clippy::ptr_offset_with_cast)]
+
+use std::mem::{size_of, size_of_val};
+use std::ptr::{
+ copy, copy_nonoverlapping, slice_from_raw_parts, slice_from_raw_parts_mut, swap_nonoverlapping, write_bytes,
+};
+use std::slice::{from_raw_parts, from_raw_parts_mut};
+
+fn main() {
+ const SIZE: usize = 128;
+ const HALF_SIZE: usize = SIZE / 2;
+ const DOUBLE_SIZE: usize = SIZE * 2;
+ let mut x = [2u8; SIZE];
+ let mut y = [2u8; SIZE];
+
+ // Count is size_of (Should trigger the lint)
+ unsafe { copy_nonoverlapping::<u8>(x.as_ptr(), y.as_mut_ptr(), size_of::<u8>()) };
+ unsafe { copy_nonoverlapping(x.as_ptr(), y.as_mut_ptr(), size_of_val(&x[0])) };
+
+ unsafe { x.as_ptr().copy_to(y.as_mut_ptr(), size_of::<u8>()) };
+ unsafe { x.as_ptr().copy_to_nonoverlapping(y.as_mut_ptr(), size_of::<u8>()) };
+ unsafe { y.as_mut_ptr().copy_from(x.as_ptr(), size_of::<u8>()) };
+ unsafe { y.as_mut_ptr().copy_from_nonoverlapping(x.as_ptr(), size_of::<u8>()) };
+
+ unsafe { copy(x.as_ptr(), y.as_mut_ptr(), size_of::<u8>()) };
+ unsafe { copy(x.as_ptr(), y.as_mut_ptr(), size_of_val(&x[0])) };
+
+ unsafe { y.as_mut_ptr().write_bytes(0u8, size_of::<u8>() * SIZE) };
+ unsafe { write_bytes(y.as_mut_ptr(), 0u8, size_of::<u8>() * SIZE) };
+
+ unsafe { swap_nonoverlapping(y.as_mut_ptr(), x.as_mut_ptr(), size_of::<u8>() * SIZE) };
+
+ slice_from_raw_parts_mut(y.as_mut_ptr(), size_of::<u8>() * SIZE);
+ slice_from_raw_parts(y.as_ptr(), size_of::<u8>() * SIZE);
+
+ unsafe { from_raw_parts_mut(y.as_mut_ptr(), size_of::<u8>() * SIZE) };
+ unsafe { from_raw_parts(y.as_ptr(), size_of::<u8>() * SIZE) };
+
+ unsafe { y.as_mut_ptr().sub(size_of::<u8>()) };
+ y.as_ptr().wrapping_sub(size_of::<u8>());
+ unsafe { y.as_ptr().add(size_of::<u8>()) };
+ y.as_mut_ptr().wrapping_add(size_of::<u8>());
+ unsafe { y.as_ptr().offset(size_of::<u8>() as isize) };
+ y.as_mut_ptr().wrapping_offset(size_of::<u8>() as isize);
+}
diff --git a/src/tools/clippy/tests/ui/size_of_in_element_count/functions.stderr b/src/tools/clippy/tests/ui/size_of_in_element_count/functions.stderr
new file mode 100644
index 000000000..c1e824167
--- /dev/null
+++ b/src/tools/clippy/tests/ui/size_of_in_element_count/functions.stderr
@@ -0,0 +1,171 @@
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:18:68
+ |
+LL | unsafe { copy_nonoverlapping::<u8>(x.as_ptr(), y.as_mut_ptr(), size_of::<u8>()) };
+ | ^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::size-of-in-element-count` implied by `-D warnings`
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:19:62
+ |
+LL | unsafe { copy_nonoverlapping(x.as_ptr(), y.as_mut_ptr(), size_of_val(&x[0])) };
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:21:49
+ |
+LL | unsafe { x.as_ptr().copy_to(y.as_mut_ptr(), size_of::<u8>()) };
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:22:64
+ |
+LL | unsafe { x.as_ptr().copy_to_nonoverlapping(y.as_mut_ptr(), size_of::<u8>()) };
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:23:51
+ |
+LL | unsafe { y.as_mut_ptr().copy_from(x.as_ptr(), size_of::<u8>()) };
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:24:66
+ |
+LL | unsafe { y.as_mut_ptr().copy_from_nonoverlapping(x.as_ptr(), size_of::<u8>()) };
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:26:47
+ |
+LL | unsafe { copy(x.as_ptr(), y.as_mut_ptr(), size_of::<u8>()) };
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:27:47
+ |
+LL | unsafe { copy(x.as_ptr(), y.as_mut_ptr(), size_of_val(&x[0])) };
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:29:46
+ |
+LL | unsafe { y.as_mut_ptr().write_bytes(0u8, size_of::<u8>() * SIZE) };
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:30:47
+ |
+LL | unsafe { write_bytes(y.as_mut_ptr(), 0u8, size_of::<u8>() * SIZE) };
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:32:66
+ |
+LL | unsafe { swap_nonoverlapping(y.as_mut_ptr(), x.as_mut_ptr(), size_of::<u8>() * SIZE) };
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:34:46
+ |
+LL | slice_from_raw_parts_mut(y.as_mut_ptr(), size_of::<u8>() * SIZE);
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:35:38
+ |
+LL | slice_from_raw_parts(y.as_ptr(), size_of::<u8>() * SIZE);
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:37:49
+ |
+LL | unsafe { from_raw_parts_mut(y.as_mut_ptr(), size_of::<u8>() * SIZE) };
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:38:41
+ |
+LL | unsafe { from_raw_parts(y.as_ptr(), size_of::<u8>() * SIZE) };
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:40:33
+ |
+LL | unsafe { y.as_mut_ptr().sub(size_of::<u8>()) };
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:41:29
+ |
+LL | y.as_ptr().wrapping_sub(size_of::<u8>());
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:42:29
+ |
+LL | unsafe { y.as_ptr().add(size_of::<u8>()) };
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:43:33
+ |
+LL | y.as_mut_ptr().wrapping_add(size_of::<u8>());
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:44:32
+ |
+LL | unsafe { y.as_ptr().offset(size_of::<u8>() as isize) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: found a count of bytes instead of a count of elements of `T`
+ --> $DIR/functions.rs:45:36
+ |
+LL | y.as_mut_ptr().wrapping_offset(size_of::<u8>() as isize);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use a count of elements instead of a count of bytes, it already gets multiplied by the size of the type
+
+error: aborting due to 21 previous errors
+
diff --git a/src/tools/clippy/tests/ui/skip_while_next.rs b/src/tools/clippy/tests/ui/skip_while_next.rs
new file mode 100644
index 000000000..a522c0f08
--- /dev/null
+++ b/src/tools/clippy/tests/ui/skip_while_next.rs
@@ -0,0 +1,29 @@
+// aux-build:option_helpers.rs
+
+#![warn(clippy::skip_while_next)]
+#![allow(clippy::blacklisted_name)]
+
+extern crate option_helpers;
+use option_helpers::IteratorFalsePositives;
+
+#[rustfmt::skip]
+fn skip_while_next() {
+ let v = vec![3, 2, 1, 0, -1, -2, -3];
+
+ // Single-line case.
+ let _ = v.iter().skip_while(|&x| *x < 0).next();
+
+ // Multi-line case.
+ let _ = v.iter().skip_while(|&x| {
+ *x < 0
+ }
+ ).next();
+
+ // Check that hat we don't lint if the caller is not an `Iterator`.
+ let foo = IteratorFalsePositives { foo: 0 };
+ let _ = foo.skip_while().next();
+}
+
+fn main() {
+ skip_while_next();
+}
diff --git a/src/tools/clippy/tests/ui/skip_while_next.stderr b/src/tools/clippy/tests/ui/skip_while_next.stderr
new file mode 100644
index 000000000..269cc1346
--- /dev/null
+++ b/src/tools/clippy/tests/ui/skip_while_next.stderr
@@ -0,0 +1,23 @@
+error: called `skip_while(<p>).next()` on an `Iterator`
+ --> $DIR/skip_while_next.rs:14:13
+ |
+LL | let _ = v.iter().skip_while(|&x| *x < 0).next();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::skip-while-next` implied by `-D warnings`
+ = help: this is more succinctly expressed by calling `.find(!<p>)` instead
+
+error: called `skip_while(<p>).next()` on an `Iterator`
+ --> $DIR/skip_while_next.rs:17:13
+ |
+LL | let _ = v.iter().skip_while(|&x| {
+ | _____________^
+LL | | *x < 0
+LL | | }
+LL | | ).next();
+ | |___________________________^
+ |
+ = help: this is more succinctly expressed by calling `.find(!<p>)` instead
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/slow_vector_initialization.rs b/src/tools/clippy/tests/ui/slow_vector_initialization.rs
new file mode 100644
index 000000000..16be9f6d2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/slow_vector_initialization.rs
@@ -0,0 +1,69 @@
+use std::iter::repeat;
+
+fn main() {
+ resize_vector();
+ extend_vector();
+ mixed_extend_resize_vector();
+}
+
+fn extend_vector() {
+ // Extend with constant expression
+ let len = 300;
+ let mut vec1 = Vec::with_capacity(len);
+ vec1.extend(repeat(0).take(len));
+
+ // Extend with len expression
+ let mut vec2 = Vec::with_capacity(len - 10);
+ vec2.extend(repeat(0).take(len - 10));
+
+ // Extend with mismatching expression should not be warned
+ let mut vec3 = Vec::with_capacity(24322);
+ vec3.extend(repeat(0).take(2));
+
+ let mut vec4 = Vec::with_capacity(len);
+ vec4.extend(repeat(0).take(vec4.capacity()));
+}
+
+fn mixed_extend_resize_vector() {
+ // Mismatching len
+ let mut mismatching_len = Vec::with_capacity(30);
+ mismatching_len.extend(repeat(0).take(40));
+
+ // Slow initialization
+ let mut resized_vec = Vec::with_capacity(30);
+ resized_vec.resize(30, 0);
+
+ let mut extend_vec = Vec::with_capacity(30);
+ extend_vec.extend(repeat(0).take(30));
+}
+
+fn resize_vector() {
+ // Resize with constant expression
+ let len = 300;
+ let mut vec1 = Vec::with_capacity(len);
+ vec1.resize(len, 0);
+
+ // Resize mismatch len
+ let mut vec2 = Vec::with_capacity(200);
+ vec2.resize(10, 0);
+
+ // Resize with len expression
+ let mut vec3 = Vec::with_capacity(len - 10);
+ vec3.resize(len - 10, 0);
+
+ let mut vec4 = Vec::with_capacity(len);
+ vec4.resize(vec4.capacity(), 0);
+
+ // Reinitialization should be warned
+ vec1 = Vec::with_capacity(10);
+ vec1.resize(10, 0);
+}
+
+fn do_stuff(vec: &mut [u8]) {}
+
+fn extend_vector_with_manipulations_between() {
+ let len = 300;
+ let mut vec1: Vec<u8> = Vec::with_capacity(len);
+ do_stuff(&mut vec1);
+ vec1.extend(repeat(0).take(len));
+}
diff --git a/src/tools/clippy/tests/ui/slow_vector_initialization.stderr b/src/tools/clippy/tests/ui/slow_vector_initialization.stderr
new file mode 100644
index 000000000..cb3ce3e95
--- /dev/null
+++ b/src/tools/clippy/tests/ui/slow_vector_initialization.stderr
@@ -0,0 +1,76 @@
+error: slow zero-filling initialization
+ --> $DIR/slow_vector_initialization.rs:13:5
+ |
+LL | let mut vec1 = Vec::with_capacity(len);
+ | ----------------------- help: consider replace allocation with: `vec![0; len]`
+LL | vec1.extend(repeat(0).take(len));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::slow-vector-initialization` implied by `-D warnings`
+
+error: slow zero-filling initialization
+ --> $DIR/slow_vector_initialization.rs:17:5
+ |
+LL | let mut vec2 = Vec::with_capacity(len - 10);
+ | ---------------------------- help: consider replace allocation with: `vec![0; len - 10]`
+LL | vec2.extend(repeat(0).take(len - 10));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: slow zero-filling initialization
+ --> $DIR/slow_vector_initialization.rs:24:5
+ |
+LL | let mut vec4 = Vec::with_capacity(len);
+ | ----------------------- help: consider replace allocation with: `vec![0; len]`
+LL | vec4.extend(repeat(0).take(vec4.capacity()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: slow zero-filling initialization
+ --> $DIR/slow_vector_initialization.rs:34:5
+ |
+LL | let mut resized_vec = Vec::with_capacity(30);
+ | ---------------------- help: consider replace allocation with: `vec![0; 30]`
+LL | resized_vec.resize(30, 0);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: slow zero-filling initialization
+ --> $DIR/slow_vector_initialization.rs:37:5
+ |
+LL | let mut extend_vec = Vec::with_capacity(30);
+ | ---------------------- help: consider replace allocation with: `vec![0; 30]`
+LL | extend_vec.extend(repeat(0).take(30));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: slow zero-filling initialization
+ --> $DIR/slow_vector_initialization.rs:44:5
+ |
+LL | let mut vec1 = Vec::with_capacity(len);
+ | ----------------------- help: consider replace allocation with: `vec![0; len]`
+LL | vec1.resize(len, 0);
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: slow zero-filling initialization
+ --> $DIR/slow_vector_initialization.rs:52:5
+ |
+LL | let mut vec3 = Vec::with_capacity(len - 10);
+ | ---------------------------- help: consider replace allocation with: `vec![0; len - 10]`
+LL | vec3.resize(len - 10, 0);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: slow zero-filling initialization
+ --> $DIR/slow_vector_initialization.rs:55:5
+ |
+LL | let mut vec4 = Vec::with_capacity(len);
+ | ----------------------- help: consider replace allocation with: `vec![0; len]`
+LL | vec4.resize(vec4.capacity(), 0);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: slow zero-filling initialization
+ --> $DIR/slow_vector_initialization.rs:59:5
+ |
+LL | vec1 = Vec::with_capacity(10);
+ | ---------------------- help: consider replace allocation with: `vec![0; 10]`
+LL | vec1.resize(10, 0);
+ | ^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/stable_sort_primitive.fixed b/src/tools/clippy/tests/ui/stable_sort_primitive.fixed
new file mode 100644
index 000000000..f5f18169d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/stable_sort_primitive.fixed
@@ -0,0 +1,32 @@
+// run-rustfix
+#![warn(clippy::stable_sort_primitive)]
+
+fn main() {
+ // positive examples
+ let mut vec = vec![1, 3, 2];
+ vec.sort_unstable();
+ let mut vec = vec![false, false, true];
+ vec.sort_unstable();
+ let mut vec = vec!['a', 'A', 'c'];
+ vec.sort_unstable();
+ let mut vec = vec!["ab", "cd", "ab", "bc"];
+ vec.sort_unstable();
+ let mut vec = vec![(2, 1), (1, 2), (2, 5)];
+ vec.sort_unstable();
+ let mut vec = vec![[2, 1], [1, 2], [2, 5]];
+ vec.sort_unstable();
+ let mut arr = [1, 3, 2];
+ arr.sort_unstable();
+ // Negative examples: behavior changes if made unstable
+ let mut vec = vec![1, 3, 2];
+ vec.sort_by_key(|i| i / 2);
+ vec.sort_by(|&a, &b| (a + b).cmp(&b));
+ // negative examples - Not of a primitive type
+ let mut vec_of_complex = vec![String::from("hello"), String::from("world!")];
+ vec_of_complex.sort();
+ vec_of_complex.sort_by_key(String::len);
+ let mut vec = vec![(String::from("hello"), String::from("world"))];
+ vec.sort();
+ let mut vec = vec![[String::from("hello"), String::from("world")]];
+ vec.sort();
+}
diff --git a/src/tools/clippy/tests/ui/stable_sort_primitive.rs b/src/tools/clippy/tests/ui/stable_sort_primitive.rs
new file mode 100644
index 000000000..8149c5638
--- /dev/null
+++ b/src/tools/clippy/tests/ui/stable_sort_primitive.rs
@@ -0,0 +1,32 @@
+// run-rustfix
+#![warn(clippy::stable_sort_primitive)]
+
+fn main() {
+ // positive examples
+ let mut vec = vec![1, 3, 2];
+ vec.sort();
+ let mut vec = vec![false, false, true];
+ vec.sort();
+ let mut vec = vec!['a', 'A', 'c'];
+ vec.sort();
+ let mut vec = vec!["ab", "cd", "ab", "bc"];
+ vec.sort();
+ let mut vec = vec![(2, 1), (1, 2), (2, 5)];
+ vec.sort();
+ let mut vec = vec![[2, 1], [1, 2], [2, 5]];
+ vec.sort();
+ let mut arr = [1, 3, 2];
+ arr.sort();
+ // Negative examples: behavior changes if made unstable
+ let mut vec = vec![1, 3, 2];
+ vec.sort_by_key(|i| i / 2);
+ vec.sort_by(|&a, &b| (a + b).cmp(&b));
+ // negative examples - Not of a primitive type
+ let mut vec_of_complex = vec![String::from("hello"), String::from("world!")];
+ vec_of_complex.sort();
+ vec_of_complex.sort_by_key(String::len);
+ let mut vec = vec![(String::from("hello"), String::from("world"))];
+ vec.sort();
+ let mut vec = vec![[String::from("hello"), String::from("world")]];
+ vec.sort();
+}
diff --git a/src/tools/clippy/tests/ui/stable_sort_primitive.stderr b/src/tools/clippy/tests/ui/stable_sort_primitive.stderr
new file mode 100644
index 000000000..c35e0c22a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/stable_sort_primitive.stderr
@@ -0,0 +1,59 @@
+error: used `sort` on primitive type `i32`
+ --> $DIR/stable_sort_primitive.rs:7:5
+ |
+LL | vec.sort();
+ | ^^^^^^^^^^ help: try: `vec.sort_unstable()`
+ |
+ = note: `-D clippy::stable-sort-primitive` implied by `-D warnings`
+ = note: an unstable sort typically performs faster without any observable difference for this data type
+
+error: used `sort` on primitive type `bool`
+ --> $DIR/stable_sort_primitive.rs:9:5
+ |
+LL | vec.sort();
+ | ^^^^^^^^^^ help: try: `vec.sort_unstable()`
+ |
+ = note: an unstable sort typically performs faster without any observable difference for this data type
+
+error: used `sort` on primitive type `char`
+ --> $DIR/stable_sort_primitive.rs:11:5
+ |
+LL | vec.sort();
+ | ^^^^^^^^^^ help: try: `vec.sort_unstable()`
+ |
+ = note: an unstable sort typically performs faster without any observable difference for this data type
+
+error: used `sort` on primitive type `str`
+ --> $DIR/stable_sort_primitive.rs:13:5
+ |
+LL | vec.sort();
+ | ^^^^^^^^^^ help: try: `vec.sort_unstable()`
+ |
+ = note: an unstable sort typically performs faster without any observable difference for this data type
+
+error: used `sort` on primitive type `tuple`
+ --> $DIR/stable_sort_primitive.rs:15:5
+ |
+LL | vec.sort();
+ | ^^^^^^^^^^ help: try: `vec.sort_unstable()`
+ |
+ = note: an unstable sort typically performs faster without any observable difference for this data type
+
+error: used `sort` on primitive type `array`
+ --> $DIR/stable_sort_primitive.rs:17:5
+ |
+LL | vec.sort();
+ | ^^^^^^^^^^ help: try: `vec.sort_unstable()`
+ |
+ = note: an unstable sort typically performs faster without any observable difference for this data type
+
+error: used `sort` on primitive type `i32`
+ --> $DIR/stable_sort_primitive.rs:19:5
+ |
+LL | arr.sort();
+ | ^^^^^^^^^^ help: try: `arr.sort_unstable()`
+ |
+ = note: an unstable sort typically performs faster without any observable difference for this data type
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/starts_ends_with.fixed b/src/tools/clippy/tests/ui/starts_ends_with.fixed
new file mode 100644
index 000000000..983fac7af
--- /dev/null
+++ b/src/tools/clippy/tests/ui/starts_ends_with.fixed
@@ -0,0 +1,54 @@
+// run-rustfix
+#![allow(dead_code, unused_must_use)]
+
+fn main() {}
+
+#[allow(clippy::unnecessary_operation)]
+fn starts_with() {
+ "".starts_with(' ');
+ !"".starts_with(' ');
+
+ // Ensure that suggestion is escaped correctly
+ "".starts_with('\n');
+ !"".starts_with('\n');
+}
+
+fn chars_cmp_with_unwrap() {
+ let s = String::from("foo");
+ if s.starts_with('f') {
+ // s.starts_with('f')
+ // Nothing here
+ }
+ if s.ends_with('o') {
+ // s.ends_with('o')
+ // Nothing here
+ }
+ if s.ends_with('o') {
+ // s.ends_with('o')
+ // Nothing here
+ }
+ if !s.starts_with('f') {
+ // !s.starts_with('f')
+ // Nothing here
+ }
+ if !s.ends_with('o') {
+ // !s.ends_with('o')
+ // Nothing here
+ }
+ if !s.ends_with('\n') {
+ // !s.ends_with('o')
+ // Nothing here
+ }
+}
+
+#[allow(clippy::unnecessary_operation)]
+fn ends_with() {
+ "".ends_with(' ');
+ !"".ends_with(' ');
+ "".ends_with(' ');
+ !"".ends_with(' ');
+
+ // Ensure that suggestion is escaped correctly
+ "".ends_with('\n');
+ !"".ends_with('\n');
+}
diff --git a/src/tools/clippy/tests/ui/starts_ends_with.rs b/src/tools/clippy/tests/ui/starts_ends_with.rs
new file mode 100644
index 000000000..e3335dd2e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/starts_ends_with.rs
@@ -0,0 +1,54 @@
+// run-rustfix
+#![allow(dead_code, unused_must_use)]
+
+fn main() {}
+
+#[allow(clippy::unnecessary_operation)]
+fn starts_with() {
+ "".chars().next() == Some(' ');
+ Some(' ') != "".chars().next();
+
+ // Ensure that suggestion is escaped correctly
+ "".chars().next() == Some('\n');
+ Some('\n') != "".chars().next();
+}
+
+fn chars_cmp_with_unwrap() {
+ let s = String::from("foo");
+ if s.chars().next().unwrap() == 'f' {
+ // s.starts_with('f')
+ // Nothing here
+ }
+ if s.chars().next_back().unwrap() == 'o' {
+ // s.ends_with('o')
+ // Nothing here
+ }
+ if s.chars().last().unwrap() == 'o' {
+ // s.ends_with('o')
+ // Nothing here
+ }
+ if s.chars().next().unwrap() != 'f' {
+ // !s.starts_with('f')
+ // Nothing here
+ }
+ if s.chars().next_back().unwrap() != 'o' {
+ // !s.ends_with('o')
+ // Nothing here
+ }
+ if s.chars().last().unwrap() != '\n' {
+ // !s.ends_with('o')
+ // Nothing here
+ }
+}
+
+#[allow(clippy::unnecessary_operation)]
+fn ends_with() {
+ "".chars().last() == Some(' ');
+ Some(' ') != "".chars().last();
+ "".chars().next_back() == Some(' ');
+ Some(' ') != "".chars().next_back();
+
+ // Ensure that suggestion is escaped correctly
+ "".chars().last() == Some('\n');
+ Some('\n') != "".chars().last();
+}
diff --git a/src/tools/clippy/tests/ui/starts_ends_with.stderr b/src/tools/clippy/tests/ui/starts_ends_with.stderr
new file mode 100644
index 000000000..2dd9f53b8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/starts_ends_with.stderr
@@ -0,0 +1,102 @@
+error: you should use the `starts_with` method
+ --> $DIR/starts_ends_with.rs:8:5
+ |
+LL | "".chars().next() == Some(' ');
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: like this: `"".starts_with(' ')`
+ |
+ = note: `-D clippy::chars-next-cmp` implied by `-D warnings`
+
+error: you should use the `starts_with` method
+ --> $DIR/starts_ends_with.rs:9:5
+ |
+LL | Some(' ') != "".chars().next();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: like this: `!"".starts_with(' ')`
+
+error: you should use the `starts_with` method
+ --> $DIR/starts_ends_with.rs:12:5
+ |
+LL | "".chars().next() == Some('/n');
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: like this: `"".starts_with('/n')`
+
+error: you should use the `starts_with` method
+ --> $DIR/starts_ends_with.rs:13:5
+ |
+LL | Some('/n') != "".chars().next();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: like this: `!"".starts_with('/n')`
+
+error: you should use the `starts_with` method
+ --> $DIR/starts_ends_with.rs:18:8
+ |
+LL | if s.chars().next().unwrap() == 'f' {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: like this: `s.starts_with('f')`
+
+error: you should use the `ends_with` method
+ --> $DIR/starts_ends_with.rs:22:8
+ |
+LL | if s.chars().next_back().unwrap() == 'o' {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: like this: `s.ends_with('o')`
+ |
+ = note: `-D clippy::chars-last-cmp` implied by `-D warnings`
+
+error: you should use the `ends_with` method
+ --> $DIR/starts_ends_with.rs:26:8
+ |
+LL | if s.chars().last().unwrap() == 'o' {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: like this: `s.ends_with('o')`
+
+error: you should use the `starts_with` method
+ --> $DIR/starts_ends_with.rs:30:8
+ |
+LL | if s.chars().next().unwrap() != 'f' {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: like this: `!s.starts_with('f')`
+
+error: you should use the `ends_with` method
+ --> $DIR/starts_ends_with.rs:34:8
+ |
+LL | if s.chars().next_back().unwrap() != 'o' {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: like this: `!s.ends_with('o')`
+
+error: you should use the `ends_with` method
+ --> $DIR/starts_ends_with.rs:38:8
+ |
+LL | if s.chars().last().unwrap() != '/n' {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: like this: `!s.ends_with('/n')`
+
+error: you should use the `ends_with` method
+ --> $DIR/starts_ends_with.rs:46:5
+ |
+LL | "".chars().last() == Some(' ');
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: like this: `"".ends_with(' ')`
+
+error: you should use the `ends_with` method
+ --> $DIR/starts_ends_with.rs:47:5
+ |
+LL | Some(' ') != "".chars().last();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: like this: `!"".ends_with(' ')`
+
+error: you should use the `ends_with` method
+ --> $DIR/starts_ends_with.rs:48:5
+ |
+LL | "".chars().next_back() == Some(' ');
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: like this: `"".ends_with(' ')`
+
+error: you should use the `ends_with` method
+ --> $DIR/starts_ends_with.rs:49:5
+ |
+LL | Some(' ') != "".chars().next_back();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: like this: `!"".ends_with(' ')`
+
+error: you should use the `ends_with` method
+ --> $DIR/starts_ends_with.rs:52:5
+ |
+LL | "".chars().last() == Some('/n');
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: like this: `"".ends_with('/n')`
+
+error: you should use the `ends_with` method
+ --> $DIR/starts_ends_with.rs:53:5
+ |
+LL | Some('/n') != "".chars().last();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: like this: `!"".ends_with('/n')`
+
+error: aborting due to 16 previous errors
+
diff --git a/src/tools/clippy/tests/ui/std_instead_of_core.rs b/src/tools/clippy/tests/ui/std_instead_of_core.rs
new file mode 100644
index 000000000..6b27475de
--- /dev/null
+++ b/src/tools/clippy/tests/ui/std_instead_of_core.rs
@@ -0,0 +1,45 @@
+#![warn(clippy::std_instead_of_core)]
+#![allow(unused_imports)]
+
+extern crate alloc;
+
+#[warn(clippy::std_instead_of_core)]
+fn std_instead_of_core() {
+ // Regular import
+ use std::hash::Hasher;
+ // Absolute path
+ use ::std::hash::Hash;
+ // Don't lint on `env` macro
+ use std::env;
+
+ // Multiple imports
+ use std::fmt::{Debug, Result};
+
+ // Function calls
+ let ptr = std::ptr::null::<u32>();
+ let ptr_mut = ::std::ptr::null_mut::<usize>();
+
+ // Types
+ let cell = std::cell::Cell::new(8u32);
+ let cell_absolute = ::std::cell::Cell::new(8u32);
+
+ let _ = std::env!("PATH");
+}
+
+#[warn(clippy::std_instead_of_alloc)]
+fn std_instead_of_alloc() {
+ // Only lint once.
+ use std::vec;
+ use std::vec::Vec;
+}
+
+#[warn(clippy::alloc_instead_of_core)]
+fn alloc_instead_of_core() {
+ use alloc::slice::from_ref;
+}
+
+fn main() {
+ std_instead_of_core();
+ std_instead_of_alloc();
+ alloc_instead_of_core();
+}
diff --git a/src/tools/clippy/tests/ui/std_instead_of_core.stderr b/src/tools/clippy/tests/ui/std_instead_of_core.stderr
new file mode 100644
index 000000000..bc49dabf5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/std_instead_of_core.stderr
@@ -0,0 +1,93 @@
+error: used import from `std` instead of `core`
+ --> $DIR/std_instead_of_core.rs:9:9
+ |
+LL | use std::hash::Hasher;
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::std-instead-of-core` implied by `-D warnings`
+ = help: consider importing the item from `core`
+
+error: used import from `std` instead of `core`
+ --> $DIR/std_instead_of_core.rs:11:9
+ |
+LL | use ::std::hash::Hash;
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = help: consider importing the item from `core`
+
+error: used import from `std` instead of `core`
+ --> $DIR/std_instead_of_core.rs:16:20
+ |
+LL | use std::fmt::{Debug, Result};
+ | ^^^^^
+ |
+ = help: consider importing the item from `core`
+
+error: used import from `std` instead of `core`
+ --> $DIR/std_instead_of_core.rs:16:27
+ |
+LL | use std::fmt::{Debug, Result};
+ | ^^^^^^
+ |
+ = help: consider importing the item from `core`
+
+error: used import from `std` instead of `core`
+ --> $DIR/std_instead_of_core.rs:19:15
+ |
+LL | let ptr = std::ptr::null::<u32>();
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider importing the item from `core`
+
+error: used import from `std` instead of `core`
+ --> $DIR/std_instead_of_core.rs:20:19
+ |
+LL | let ptr_mut = ::std::ptr::null_mut::<usize>();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider importing the item from `core`
+
+error: used import from `std` instead of `core`
+ --> $DIR/std_instead_of_core.rs:23:16
+ |
+LL | let cell = std::cell::Cell::new(8u32);
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: consider importing the item from `core`
+
+error: used import from `std` instead of `core`
+ --> $DIR/std_instead_of_core.rs:24:25
+ |
+LL | let cell_absolute = ::std::cell::Cell::new(8u32);
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = help: consider importing the item from `core`
+
+error: used import from `std` instead of `alloc`
+ --> $DIR/std_instead_of_core.rs:32:9
+ |
+LL | use std::vec;
+ | ^^^^^^^^
+ |
+ = note: `-D clippy::std-instead-of-alloc` implied by `-D warnings`
+ = help: consider importing the item from `alloc`
+
+error: used import from `std` instead of `alloc`
+ --> $DIR/std_instead_of_core.rs:33:9
+ |
+LL | use std::vec::Vec;
+ | ^^^^^^^^^^^^^
+ |
+ = help: consider importing the item from `alloc`
+
+error: used import from `alloc` instead of `core`
+ --> $DIR/std_instead_of_core.rs:38:9
+ |
+LL | use alloc::slice::from_ref;
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::alloc-instead-of-core` implied by `-D warnings`
+ = help: consider importing the item from `core`
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/str_to_string.rs b/src/tools/clippy/tests/ui/str_to_string.rs
new file mode 100644
index 000000000..08f734025
--- /dev/null
+++ b/src/tools/clippy/tests/ui/str_to_string.rs
@@ -0,0 +1,7 @@
+#![warn(clippy::str_to_string)]
+
+fn main() {
+ let hello = "hello world".to_string();
+ let msg = &hello[..];
+ msg.to_string();
+}
diff --git a/src/tools/clippy/tests/ui/str_to_string.stderr b/src/tools/clippy/tests/ui/str_to_string.stderr
new file mode 100644
index 000000000..b1f73eda5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/str_to_string.stderr
@@ -0,0 +1,19 @@
+error: `to_string()` called on a `&str`
+ --> $DIR/str_to_string.rs:4:17
+ |
+LL | let hello = "hello world".to_string();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::str-to-string` implied by `-D warnings`
+ = help: consider using `.to_owned()`
+
+error: `to_string()` called on a `&str`
+ --> $DIR/str_to_string.rs:6:5
+ |
+LL | msg.to_string();
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: consider using `.to_owned()`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/string_add.rs b/src/tools/clippy/tests/ui/string_add.rs
new file mode 100644
index 000000000..30fd17c59
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_add.rs
@@ -0,0 +1,26 @@
+// aux-build:macro_rules.rs
+
+#[macro_use]
+extern crate macro_rules;
+
+#[warn(clippy::string_add)]
+#[allow(clippy::string_add_assign, unused)]
+fn main() {
+ // ignores assignment distinction
+ let mut x = "".to_owned();
+
+ for _ in 1..3 {
+ x = x + ".";
+ }
+
+ let y = "".to_owned();
+ let z = y + "...";
+
+ assert_eq!(&x, &z);
+
+ let mut x = 1;
+ x = x + 1;
+ assert_eq!(2, x);
+
+ string_add!();
+}
diff --git a/src/tools/clippy/tests/ui/string_add.stderr b/src/tools/clippy/tests/ui/string_add.stderr
new file mode 100644
index 000000000..3987641c7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_add.stderr
@@ -0,0 +1,30 @@
+error: manual implementation of an assign operation
+ --> $DIR/string_add.rs:13:9
+ |
+LL | x = x + ".";
+ | ^^^^^^^^^^^ help: replace it with: `x += "."`
+ |
+ = note: `-D clippy::assign-op-pattern` implied by `-D warnings`
+
+error: you added something to a string. Consider using `String::push_str()` instead
+ --> $DIR/string_add.rs:13:13
+ |
+LL | x = x + ".";
+ | ^^^^^^^
+ |
+ = note: `-D clippy::string-add` implied by `-D warnings`
+
+error: you added something to a string. Consider using `String::push_str()` instead
+ --> $DIR/string_add.rs:17:13
+ |
+LL | let z = y + "...";
+ | ^^^^^^^^^
+
+error: manual implementation of an assign operation
+ --> $DIR/string_add.rs:22:5
+ |
+LL | x = x + 1;
+ | ^^^^^^^^^ help: replace it with: `x += 1`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/string_add_assign.fixed b/src/tools/clippy/tests/ui/string_add_assign.fixed
new file mode 100644
index 000000000..db71bab1e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_add_assign.fixed
@@ -0,0 +1,21 @@
+// run-rustfix
+
+#[allow(clippy::string_add, unused)]
+#[warn(clippy::string_add_assign)]
+fn main() {
+ // ignores assignment distinction
+ let mut x = "".to_owned();
+
+ for _ in 1..3 {
+ x += ".";
+ }
+
+ let y = "".to_owned();
+ let z = y + "...";
+
+ assert_eq!(&x, &z);
+
+ let mut x = 1;
+ x += 1;
+ assert_eq!(2, x);
+}
diff --git a/src/tools/clippy/tests/ui/string_add_assign.rs b/src/tools/clippy/tests/ui/string_add_assign.rs
new file mode 100644
index 000000000..644991945
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_add_assign.rs
@@ -0,0 +1,21 @@
+// run-rustfix
+
+#[allow(clippy::string_add, unused)]
+#[warn(clippy::string_add_assign)]
+fn main() {
+ // ignores assignment distinction
+ let mut x = "".to_owned();
+
+ for _ in 1..3 {
+ x = x + ".";
+ }
+
+ let y = "".to_owned();
+ let z = y + "...";
+
+ assert_eq!(&x, &z);
+
+ let mut x = 1;
+ x = x + 1;
+ assert_eq!(2, x);
+}
diff --git a/src/tools/clippy/tests/ui/string_add_assign.stderr b/src/tools/clippy/tests/ui/string_add_assign.stderr
new file mode 100644
index 000000000..7676175c1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_add_assign.stderr
@@ -0,0 +1,24 @@
+error: you assigned the result of adding something to this string. Consider using `String::push_str()` instead
+ --> $DIR/string_add_assign.rs:10:9
+ |
+LL | x = x + ".";
+ | ^^^^^^^^^^^
+ |
+ = note: `-D clippy::string-add-assign` implied by `-D warnings`
+
+error: manual implementation of an assign operation
+ --> $DIR/string_add_assign.rs:10:9
+ |
+LL | x = x + ".";
+ | ^^^^^^^^^^^ help: replace it with: `x += "."`
+ |
+ = note: `-D clippy::assign-op-pattern` implied by `-D warnings`
+
+error: manual implementation of an assign operation
+ --> $DIR/string_add_assign.rs:19:5
+ |
+LL | x = x + 1;
+ | ^^^^^^^^^ help: replace it with: `x += 1`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/string_extend.fixed b/src/tools/clippy/tests/ui/string_extend.fixed
new file mode 100644
index 000000000..1883a9f83
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_extend.fixed
@@ -0,0 +1,32 @@
+// run-rustfix
+
+#[derive(Copy, Clone)]
+struct HasChars;
+
+impl HasChars {
+ fn chars(self) -> std::str::Chars<'static> {
+ "HasChars".chars()
+ }
+}
+
+fn main() {
+ let abc = "abc";
+ let def = String::from("def");
+ let mut s = String::new();
+
+ s.push_str(abc);
+ s.push_str(abc);
+
+ s.push_str("abc");
+ s.push_str("abc");
+
+ s.push_str(&def);
+ s.push_str(&def);
+
+ s.extend(abc.chars().skip(1));
+ s.extend("abc".chars().skip(1));
+ s.extend(['a', 'b', 'c'].iter());
+
+ let f = HasChars;
+ s.extend(f.chars());
+}
diff --git a/src/tools/clippy/tests/ui/string_extend.rs b/src/tools/clippy/tests/ui/string_extend.rs
new file mode 100644
index 000000000..07d0baa1b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_extend.rs
@@ -0,0 +1,32 @@
+// run-rustfix
+
+#[derive(Copy, Clone)]
+struct HasChars;
+
+impl HasChars {
+ fn chars(self) -> std::str::Chars<'static> {
+ "HasChars".chars()
+ }
+}
+
+fn main() {
+ let abc = "abc";
+ let def = String::from("def");
+ let mut s = String::new();
+
+ s.push_str(abc);
+ s.extend(abc.chars());
+
+ s.push_str("abc");
+ s.extend("abc".chars());
+
+ s.push_str(&def);
+ s.extend(def.chars());
+
+ s.extend(abc.chars().skip(1));
+ s.extend("abc".chars().skip(1));
+ s.extend(['a', 'b', 'c'].iter());
+
+ let f = HasChars;
+ s.extend(f.chars());
+}
diff --git a/src/tools/clippy/tests/ui/string_extend.stderr b/src/tools/clippy/tests/ui/string_extend.stderr
new file mode 100644
index 000000000..6af8c9e16
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_extend.stderr
@@ -0,0 +1,22 @@
+error: calling `.extend(_.chars())`
+ --> $DIR/string_extend.rs:18:5
+ |
+LL | s.extend(abc.chars());
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.push_str(abc)`
+ |
+ = note: `-D clippy::string-extend-chars` implied by `-D warnings`
+
+error: calling `.extend(_.chars())`
+ --> $DIR/string_extend.rs:21:5
+ |
+LL | s.extend("abc".chars());
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.push_str("abc")`
+
+error: calling `.extend(_.chars())`
+ --> $DIR/string_extend.rs:24:5
+ |
+LL | s.extend(def.chars());
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.push_str(&def)`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/string_from_utf8_as_bytes.fixed b/src/tools/clippy/tests/ui/string_from_utf8_as_bytes.fixed
new file mode 100644
index 000000000..6e665cdd5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_from_utf8_as_bytes.fixed
@@ -0,0 +1,6 @@
+// run-rustfix
+#![warn(clippy::string_from_utf8_as_bytes)]
+
+fn main() {
+ let _ = Some(&"Hello World!"[6..11]);
+}
diff --git a/src/tools/clippy/tests/ui/string_from_utf8_as_bytes.rs b/src/tools/clippy/tests/ui/string_from_utf8_as_bytes.rs
new file mode 100644
index 000000000..670d206d3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_from_utf8_as_bytes.rs
@@ -0,0 +1,6 @@
+// run-rustfix
+#![warn(clippy::string_from_utf8_as_bytes)]
+
+fn main() {
+ let _ = std::str::from_utf8(&"Hello World!".as_bytes()[6..11]);
+}
diff --git a/src/tools/clippy/tests/ui/string_from_utf8_as_bytes.stderr b/src/tools/clippy/tests/ui/string_from_utf8_as_bytes.stderr
new file mode 100644
index 000000000..bf5e5d33e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_from_utf8_as_bytes.stderr
@@ -0,0 +1,10 @@
+error: calling a slice of `as_bytes()` with `from_utf8` should be not necessary
+ --> $DIR/string_from_utf8_as_bytes.rs:5:13
+ |
+LL | let _ = std::str::from_utf8(&"Hello World!".as_bytes()[6..11]);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Some(&"Hello World!"[6..11])`
+ |
+ = note: `-D clippy::string-from-utf8-as-bytes` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/string_lit_as_bytes.fixed b/src/tools/clippy/tests/ui/string_lit_as_bytes.fixed
new file mode 100644
index 000000000..df2256e4f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_lit_as_bytes.fixed
@@ -0,0 +1,30 @@
+// run-rustfix
+
+#![allow(dead_code, unused_variables)]
+#![warn(clippy::string_lit_as_bytes)]
+
+fn str_lit_as_bytes() {
+ let bs = b"hello there";
+
+ let bs = br###"raw string with 3# plus " ""###;
+
+ let bs = b"lit to string".to_vec();
+ let bs = b"lit to owned".to_vec();
+
+ // no warning, because these cannot be written as byte string literals:
+ let ubs = "☃".as_bytes();
+ let ubs = "hello there! this is a very long string".as_bytes();
+
+ let ubs = "☃".to_string().into_bytes();
+ let ubs = "this is also too long and shouldn't be fixed".to_string().into_bytes();
+
+ let strify = stringify!(foobar).as_bytes();
+
+ let current_version = env!("CARGO_PKG_VERSION").as_bytes();
+
+ let includestr = include_bytes!("string_lit_as_bytes.rs");
+
+ let _ = b"string with newline\t\n";
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/string_lit_as_bytes.rs b/src/tools/clippy/tests/ui/string_lit_as_bytes.rs
new file mode 100644
index 000000000..c6bf8f732
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_lit_as_bytes.rs
@@ -0,0 +1,30 @@
+// run-rustfix
+
+#![allow(dead_code, unused_variables)]
+#![warn(clippy::string_lit_as_bytes)]
+
+fn str_lit_as_bytes() {
+ let bs = "hello there".as_bytes();
+
+ let bs = r###"raw string with 3# plus " ""###.as_bytes();
+
+ let bs = "lit to string".to_string().into_bytes();
+ let bs = "lit to owned".to_owned().into_bytes();
+
+ // no warning, because these cannot be written as byte string literals:
+ let ubs = "☃".as_bytes();
+ let ubs = "hello there! this is a very long string".as_bytes();
+
+ let ubs = "☃".to_string().into_bytes();
+ let ubs = "this is also too long and shouldn't be fixed".to_string().into_bytes();
+
+ let strify = stringify!(foobar).as_bytes();
+
+ let current_version = env!("CARGO_PKG_VERSION").as_bytes();
+
+ let includestr = include_str!("string_lit_as_bytes.rs").as_bytes();
+
+ let _ = "string with newline\t\n".as_bytes();
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/string_lit_as_bytes.stderr b/src/tools/clippy/tests/ui/string_lit_as_bytes.stderr
new file mode 100644
index 000000000..f47d6161c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_lit_as_bytes.stderr
@@ -0,0 +1,40 @@
+error: calling `as_bytes()` on a string literal
+ --> $DIR/string_lit_as_bytes.rs:7:14
+ |
+LL | let bs = "hello there".as_bytes();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using a byte string literal instead: `b"hello there"`
+ |
+ = note: `-D clippy::string-lit-as-bytes` implied by `-D warnings`
+
+error: calling `as_bytes()` on a string literal
+ --> $DIR/string_lit_as_bytes.rs:9:14
+ |
+LL | let bs = r###"raw string with 3# plus " ""###.as_bytes();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using a byte string literal instead: `br###"raw string with 3# plus " ""###`
+
+error: calling `into_bytes()` on a string literal
+ --> $DIR/string_lit_as_bytes.rs:11:14
+ |
+LL | let bs = "lit to string".to_string().into_bytes();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using a byte string literal instead: `b"lit to string".to_vec()`
+
+error: calling `into_bytes()` on a string literal
+ --> $DIR/string_lit_as_bytes.rs:12:14
+ |
+LL | let bs = "lit to owned".to_owned().into_bytes();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using a byte string literal instead: `b"lit to owned".to_vec()`
+
+error: calling `as_bytes()` on `include_str!(..)`
+ --> $DIR/string_lit_as_bytes.rs:25:22
+ |
+LL | let includestr = include_str!("string_lit_as_bytes.rs").as_bytes();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `include_bytes!(..)` instead: `include_bytes!("string_lit_as_bytes.rs")`
+
+error: calling `as_bytes()` on a string literal
+ --> $DIR/string_lit_as_bytes.rs:27:13
+ |
+LL | let _ = "string with newline/t/n".as_bytes();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using a byte string literal instead: `b"string with newline/t/n"`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/string_slice.rs b/src/tools/clippy/tests/ui/string_slice.rs
new file mode 100644
index 000000000..be4dfc881
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_slice.rs
@@ -0,0 +1,10 @@
+#[warn(clippy::string_slice)]
+#[allow(clippy::no_effect)]
+
+fn main() {
+ &"Ölkanne"[1..];
+ let m = "Mötörhead";
+ &m[2..5];
+ let s = String::from(m);
+ &s[0..2];
+}
diff --git a/src/tools/clippy/tests/ui/string_slice.stderr b/src/tools/clippy/tests/ui/string_slice.stderr
new file mode 100644
index 000000000..55040bf5d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_slice.stderr
@@ -0,0 +1,22 @@
+error: indexing into a string may panic if the index is within a UTF-8 character
+ --> $DIR/string_slice.rs:5:6
+ |
+LL | &"Ölkanne"[1..];
+ | ^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::string-slice` implied by `-D warnings`
+
+error: indexing into a string may panic if the index is within a UTF-8 character
+ --> $DIR/string_slice.rs:7:6
+ |
+LL | &m[2..5];
+ | ^^^^^^^
+
+error: indexing into a string may panic if the index is within a UTF-8 character
+ --> $DIR/string_slice.rs:9:6
+ |
+LL | &s[0..2];
+ | ^^^^^^^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/string_to_string.rs b/src/tools/clippy/tests/ui/string_to_string.rs
new file mode 100644
index 000000000..4c66855f7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_to_string.rs
@@ -0,0 +1,7 @@
+#![warn(clippy::string_to_string)]
+#![allow(clippy::redundant_clone)]
+
+fn main() {
+ let mut message = String::from("Hello");
+ let mut v = message.to_string();
+}
diff --git a/src/tools/clippy/tests/ui/string_to_string.stderr b/src/tools/clippy/tests/ui/string_to_string.stderr
new file mode 100644
index 000000000..1ebd17999
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_to_string.stderr
@@ -0,0 +1,11 @@
+error: `to_string()` called on a `String`
+ --> $DIR/string_to_string.rs:6:17
+ |
+LL | let mut v = message.to_string();
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::string-to-string` implied by `-D warnings`
+ = help: consider using `.clone()`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/strlen_on_c_strings.fixed b/src/tools/clippy/tests/ui/strlen_on_c_strings.fixed
new file mode 100644
index 000000000..947a59bcc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/strlen_on_c_strings.fixed
@@ -0,0 +1,34 @@
+// run-rustfix
+
+#![warn(clippy::strlen_on_c_strings)]
+#![allow(dead_code)]
+#![feature(rustc_private)]
+extern crate libc;
+
+#[allow(unused)]
+use libc::strlen;
+use std::ffi::{CStr, CString};
+
+fn main() {
+ // CString
+ let cstring = CString::new("foo").expect("CString::new failed");
+ let _ = cstring.as_bytes().len();
+
+ // CStr
+ let cstr = CStr::from_bytes_with_nul(b"foo\0").expect("CStr::from_bytes_with_nul failed");
+ let _ = cstr.to_bytes().len();
+
+ let _ = cstr.to_bytes().len();
+
+ let pcstr: *const &CStr = &cstr;
+ let _ = unsafe { (*pcstr).to_bytes().len() };
+
+ unsafe fn unsafe_identity<T>(x: T) -> T {
+ x
+ }
+ let _ = unsafe { unsafe_identity(cstr).to_bytes().len() };
+ let _ = unsafe { unsafe_identity(cstr) }.to_bytes().len();
+
+ let f: unsafe fn(_) -> _ = unsafe_identity;
+ let _ = unsafe { f(cstr).to_bytes().len() };
+}
diff --git a/src/tools/clippy/tests/ui/strlen_on_c_strings.rs b/src/tools/clippy/tests/ui/strlen_on_c_strings.rs
new file mode 100644
index 000000000..1237f1ab0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/strlen_on_c_strings.rs
@@ -0,0 +1,34 @@
+// run-rustfix
+
+#![warn(clippy::strlen_on_c_strings)]
+#![allow(dead_code)]
+#![feature(rustc_private)]
+extern crate libc;
+
+#[allow(unused)]
+use libc::strlen;
+use std::ffi::{CStr, CString};
+
+fn main() {
+ // CString
+ let cstring = CString::new("foo").expect("CString::new failed");
+ let _ = unsafe { libc::strlen(cstring.as_ptr()) };
+
+ // CStr
+ let cstr = CStr::from_bytes_with_nul(b"foo\0").expect("CStr::from_bytes_with_nul failed");
+ let _ = unsafe { libc::strlen(cstr.as_ptr()) };
+
+ let _ = unsafe { strlen(cstr.as_ptr()) };
+
+ let pcstr: *const &CStr = &cstr;
+ let _ = unsafe { strlen((*pcstr).as_ptr()) };
+
+ unsafe fn unsafe_identity<T>(x: T) -> T {
+ x
+ }
+ let _ = unsafe { strlen(unsafe_identity(cstr).as_ptr()) };
+ let _ = unsafe { strlen(unsafe { unsafe_identity(cstr) }.as_ptr()) };
+
+ let f: unsafe fn(_) -> _ = unsafe_identity;
+ let _ = unsafe { strlen(f(cstr).as_ptr()) };
+}
diff --git a/src/tools/clippy/tests/ui/strlen_on_c_strings.stderr b/src/tools/clippy/tests/ui/strlen_on_c_strings.stderr
new file mode 100644
index 000000000..296268a5f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/strlen_on_c_strings.stderr
@@ -0,0 +1,46 @@
+error: using `libc::strlen` on a `CString` or `CStr` value
+ --> $DIR/strlen_on_c_strings.rs:15:13
+ |
+LL | let _ = unsafe { libc::strlen(cstring.as_ptr()) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `cstring.as_bytes().len()`
+ |
+ = note: `-D clippy::strlen-on-c-strings` implied by `-D warnings`
+
+error: using `libc::strlen` on a `CString` or `CStr` value
+ --> $DIR/strlen_on_c_strings.rs:19:13
+ |
+LL | let _ = unsafe { libc::strlen(cstr.as_ptr()) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `cstr.to_bytes().len()`
+
+error: using `libc::strlen` on a `CString` or `CStr` value
+ --> $DIR/strlen_on_c_strings.rs:21:13
+ |
+LL | let _ = unsafe { strlen(cstr.as_ptr()) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `cstr.to_bytes().len()`
+
+error: using `libc::strlen` on a `CString` or `CStr` value
+ --> $DIR/strlen_on_c_strings.rs:24:22
+ |
+LL | let _ = unsafe { strlen((*pcstr).as_ptr()) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `(*pcstr).to_bytes().len()`
+
+error: using `libc::strlen` on a `CString` or `CStr` value
+ --> $DIR/strlen_on_c_strings.rs:29:22
+ |
+LL | let _ = unsafe { strlen(unsafe_identity(cstr).as_ptr()) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unsafe_identity(cstr).to_bytes().len()`
+
+error: using `libc::strlen` on a `CString` or `CStr` value
+ --> $DIR/strlen_on_c_strings.rs:30:13
+ |
+LL | let _ = unsafe { strlen(unsafe { unsafe_identity(cstr) }.as_ptr()) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unsafe { unsafe_identity(cstr) }.to_bytes().len()`
+
+error: using `libc::strlen` on a `CString` or `CStr` value
+ --> $DIR/strlen_on_c_strings.rs:33:22
+ |
+LL | let _ = unsafe { strlen(f(cstr).as_ptr()) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `f(cstr).to_bytes().len()`
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/struct_excessive_bools.rs b/src/tools/clippy/tests/ui/struct_excessive_bools.rs
new file mode 100644
index 000000000..ce4fe830a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/struct_excessive_bools.rs
@@ -0,0 +1,44 @@
+#![warn(clippy::struct_excessive_bools)]
+
+macro_rules! foo {
+ () => {
+ struct MacroFoo {
+ a: bool,
+ b: bool,
+ c: bool,
+ d: bool,
+ }
+ };
+}
+
+foo!();
+
+struct Foo {
+ a: bool,
+ b: bool,
+ c: bool,
+}
+
+struct BadFoo {
+ a: bool,
+ b: bool,
+ c: bool,
+ d: bool,
+}
+
+#[repr(C)]
+struct Bar {
+ a: bool,
+ b: bool,
+ c: bool,
+ d: bool,
+}
+
+fn main() {
+ struct FooFoo {
+ a: bool,
+ b: bool,
+ c: bool,
+ d: bool,
+ }
+}
diff --git a/src/tools/clippy/tests/ui/struct_excessive_bools.stderr b/src/tools/clippy/tests/ui/struct_excessive_bools.stderr
new file mode 100644
index 000000000..2941bf298
--- /dev/null
+++ b/src/tools/clippy/tests/ui/struct_excessive_bools.stderr
@@ -0,0 +1,29 @@
+error: more than 3 bools in a struct
+ --> $DIR/struct_excessive_bools.rs:22:1
+ |
+LL | / struct BadFoo {
+LL | | a: bool,
+LL | | b: bool,
+LL | | c: bool,
+LL | | d: bool,
+LL | | }
+ | |_^
+ |
+ = note: `-D clippy::struct-excessive-bools` implied by `-D warnings`
+ = help: consider using a state machine or refactoring bools into two-variant enums
+
+error: more than 3 bools in a struct
+ --> $DIR/struct_excessive_bools.rs:38:5
+ |
+LL | / struct FooFoo {
+LL | | a: bool,
+LL | | b: bool,
+LL | | c: bool,
+LL | | d: bool,
+LL | | }
+ | |_____^
+ |
+ = help: consider using a state machine or refactoring bools into two-variant enums
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/suspicious_arithmetic_impl.rs b/src/tools/clippy/tests/ui/suspicious_arithmetic_impl.rs
new file mode 100644
index 000000000..ae253a048
--- /dev/null
+++ b/src/tools/clippy/tests/ui/suspicious_arithmetic_impl.rs
@@ -0,0 +1,170 @@
+#![warn(clippy::suspicious_arithmetic_impl)]
+use std::ops::{
+ Add, AddAssign, BitAnd, BitOr, BitOrAssign, BitXor, Div, DivAssign, Mul, MulAssign, Rem, Shl, Shr, Sub,
+};
+
+#[derive(Copy, Clone)]
+struct Foo(u32);
+
+impl Add for Foo {
+ type Output = Foo;
+
+ fn add(self, other: Self) -> Self {
+ Foo(self.0 - other.0)
+ }
+}
+
+impl AddAssign for Foo {
+ fn add_assign(&mut self, other: Foo) {
+ *self = *self - other;
+ }
+}
+
+impl BitOrAssign for Foo {
+ fn bitor_assign(&mut self, other: Foo) {
+ let idx = other.0;
+ self.0 |= 1 << idx; // OK: BinOpKind::Shl part of AssignOp as child node
+ }
+}
+
+impl MulAssign for Foo {
+ fn mul_assign(&mut self, other: Foo) {
+ self.0 /= other.0;
+ }
+}
+
+impl DivAssign for Foo {
+ fn div_assign(&mut self, other: Foo) {
+ self.0 /= other.0; // OK: BinOpKind::Div == DivAssign
+ }
+}
+
+impl Mul for Foo {
+ type Output = Foo;
+
+ fn mul(self, other: Foo) -> Foo {
+ Foo(self.0 * other.0 % 42) // OK: BinOpKind::Rem part of BiExpr as parent node
+ }
+}
+
+impl Sub for Foo {
+ type Output = Foo;
+
+ fn sub(self, other: Self) -> Self {
+ Foo(self.0 * other.0 - 42) // OK: BinOpKind::Mul part of BiExpr as child node
+ }
+}
+
+impl Div for Foo {
+ type Output = Foo;
+
+ fn div(self, other: Self) -> Self {
+ Foo(do_nothing(self.0 + other.0) / 42) // OK: BinOpKind::Add part of BiExpr as child node
+ }
+}
+
+impl Rem for Foo {
+ type Output = Foo;
+
+ fn rem(self, other: Self) -> Self {
+ Foo(self.0 / other.0)
+ }
+}
+
+impl BitAnd for Foo {
+ type Output = Foo;
+
+ fn bitand(self, other: Self) -> Self {
+ Foo(self.0 | other.0)
+ }
+}
+
+impl BitOr for Foo {
+ type Output = Foo;
+
+ fn bitor(self, other: Self) -> Self {
+ Foo(self.0 ^ other.0)
+ }
+}
+
+impl BitXor for Foo {
+ type Output = Foo;
+
+ fn bitxor(self, other: Self) -> Self {
+ Foo(self.0 & other.0)
+ }
+}
+
+impl Shl for Foo {
+ type Output = Foo;
+
+ fn shl(self, other: Self) -> Self {
+ Foo(self.0 >> other.0)
+ }
+}
+
+impl Shr for Foo {
+ type Output = Foo;
+
+ fn shr(self, other: Self) -> Self {
+ Foo(self.0 << other.0)
+ }
+}
+
+struct Bar(i32);
+
+impl Add for Bar {
+ type Output = Bar;
+
+ fn add(self, other: Self) -> Self {
+ Bar(self.0 & !other.0) // OK: Not part of BiExpr as child node
+ }
+}
+
+impl Sub for Bar {
+ type Output = Bar;
+
+ fn sub(self, other: Self) -> Self {
+ if self.0 <= other.0 {
+ Bar(-(self.0 & other.0)) // OK: Neg part of BiExpr as parent node
+ } else {
+ Bar(0)
+ }
+ }
+}
+
+fn main() {}
+
+fn do_nothing(x: u32) -> u32 {
+ x
+}
+
+struct MultipleBinops(u32);
+
+impl Add for MultipleBinops {
+ type Output = MultipleBinops;
+
+ // OK: multiple Binops in `add` impl
+ fn add(self, other: Self) -> Self::Output {
+ let mut result = self.0 + other.0;
+ if result >= u32::max_value() {
+ result -= u32::max_value();
+ }
+ MultipleBinops(result)
+ }
+}
+
+impl Mul for MultipleBinops {
+ type Output = MultipleBinops;
+
+ // OK: multiple Binops in `mul` impl
+ fn mul(self, other: Self) -> Self::Output {
+ let mut result: u32 = 0;
+ let size = std::cmp::max(self.0, other.0) as usize;
+ let mut v = vec![0; size + 1];
+ for i in 0..size + 1 {
+ result *= i as u32;
+ }
+ MultipleBinops(result)
+ }
+}
diff --git a/src/tools/clippy/tests/ui/suspicious_arithmetic_impl.stderr b/src/tools/clippy/tests/ui/suspicious_arithmetic_impl.stderr
new file mode 100644
index 000000000..ced130587
--- /dev/null
+++ b/src/tools/clippy/tests/ui/suspicious_arithmetic_impl.stderr
@@ -0,0 +1,60 @@
+error: suspicious use of `-` in `Add` impl
+ --> $DIR/suspicious_arithmetic_impl.rs:13:20
+ |
+LL | Foo(self.0 - other.0)
+ | ^
+ |
+ = note: `-D clippy::suspicious-arithmetic-impl` implied by `-D warnings`
+
+error: suspicious use of `-` in `AddAssign` impl
+ --> $DIR/suspicious_arithmetic_impl.rs:19:23
+ |
+LL | *self = *self - other;
+ | ^
+ |
+ = note: `-D clippy::suspicious-op-assign-impl` implied by `-D warnings`
+
+error: suspicious use of `/` in `MulAssign` impl
+ --> $DIR/suspicious_arithmetic_impl.rs:32:16
+ |
+LL | self.0 /= other.0;
+ | ^^
+
+error: suspicious use of `/` in `Rem` impl
+ --> $DIR/suspicious_arithmetic_impl.rs:70:20
+ |
+LL | Foo(self.0 / other.0)
+ | ^
+
+error: suspicious use of `|` in `BitAnd` impl
+ --> $DIR/suspicious_arithmetic_impl.rs:78:20
+ |
+LL | Foo(self.0 | other.0)
+ | ^
+
+error: suspicious use of `^` in `BitOr` impl
+ --> $DIR/suspicious_arithmetic_impl.rs:86:20
+ |
+LL | Foo(self.0 ^ other.0)
+ | ^
+
+error: suspicious use of `&` in `BitXor` impl
+ --> $DIR/suspicious_arithmetic_impl.rs:94:20
+ |
+LL | Foo(self.0 & other.0)
+ | ^
+
+error: suspicious use of `>>` in `Shl` impl
+ --> $DIR/suspicious_arithmetic_impl.rs:102:20
+ |
+LL | Foo(self.0 >> other.0)
+ | ^^
+
+error: suspicious use of `<<` in `Shr` impl
+ --> $DIR/suspicious_arithmetic_impl.rs:110:20
+ |
+LL | Foo(self.0 << other.0)
+ | ^^
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/suspicious_else_formatting.rs b/src/tools/clippy/tests/ui/suspicious_else_formatting.rs
new file mode 100644
index 000000000..21753e5dc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/suspicious_else_formatting.rs
@@ -0,0 +1,115 @@
+// aux-build:proc_macro_suspicious_else_formatting.rs
+
+#![warn(clippy::suspicious_else_formatting)]
+#![allow(clippy::if_same_then_else, clippy::let_unit_value)]
+
+extern crate proc_macro_suspicious_else_formatting;
+use proc_macro_suspicious_else_formatting::DeriveBadSpan;
+
+fn foo() -> bool {
+ true
+}
+
+#[rustfmt::skip]
+fn main() {
+ // weird `else` formatting:
+ if foo() {
+ } {
+ }
+
+ if foo() {
+ } if foo() {
+ }
+
+ let _ = { // if as the last expression
+ let _ = 0;
+
+ if foo() {
+ } if foo() {
+ }
+ else {
+ }
+ };
+
+ let _ = { // if in the middle of a block
+ if foo() {
+ } if foo() {
+ }
+ else {
+ }
+
+ let _ = 0;
+ };
+
+ if foo() {
+ } else
+ {
+ }
+
+ // This is fine, though weird. Allman style braces on the else.
+ if foo() {
+ }
+ else
+ {
+ }
+
+ if foo() {
+ } else
+ if foo() { // the span of the above error should continue here
+ }
+
+ if foo() {
+ }
+ else
+ if foo() { // the span of the above error should continue here
+ }
+
+ // those are ok:
+ if foo() {
+ }
+ {
+ }
+
+ if foo() {
+ } else {
+ }
+
+ if foo() {
+ }
+ else {
+ }
+
+ if foo() {
+ }
+ if foo() {
+ }
+
+ // Almost Allman style braces. Lint these.
+ if foo() {
+ }
+
+ else
+ {
+
+ }
+
+ if foo() {
+ }
+ else
+
+ {
+
+ }
+
+ // #3864 - Allman style braces
+ if foo()
+ {
+ }
+ else
+ {
+ }
+}
+
+// #7650 - Don't lint. Proc-macro using bad spans for `if` expressions.
+#[derive(DeriveBadSpan)]
+struct _Foo(u32, u32);
diff --git a/src/tools/clippy/tests/ui/suspicious_else_formatting.stderr b/src/tools/clippy/tests/ui/suspicious_else_formatting.stderr
new file mode 100644
index 000000000..ee68eb5a7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/suspicious_else_formatting.stderr
@@ -0,0 +1,90 @@
+error: this looks like an `else {..}` but the `else` is missing
+ --> $DIR/suspicious_else_formatting.rs:17:6
+ |
+LL | } {
+ | ^
+ |
+ = note: `-D clippy::suspicious-else-formatting` implied by `-D warnings`
+ = note: to remove this lint, add the missing `else` or add a new line before the next block
+
+error: this looks like an `else if` but the `else` is missing
+ --> $DIR/suspicious_else_formatting.rs:21:6
+ |
+LL | } if foo() {
+ | ^
+ |
+ = note: to remove this lint, add the missing `else` or add a new line before the second `if`
+
+error: this looks like an `else if` but the `else` is missing
+ --> $DIR/suspicious_else_formatting.rs:28:10
+ |
+LL | } if foo() {
+ | ^
+ |
+ = note: to remove this lint, add the missing `else` or add a new line before the second `if`
+
+error: this looks like an `else if` but the `else` is missing
+ --> $DIR/suspicious_else_formatting.rs:36:10
+ |
+LL | } if foo() {
+ | ^
+ |
+ = note: to remove this lint, add the missing `else` or add a new line before the second `if`
+
+error: this is an `else {..}` but the formatting might hide it
+ --> $DIR/suspicious_else_formatting.rs:45:6
+ |
+LL | } else
+ | ______^
+LL | | {
+ | |____^
+ |
+ = note: to remove this lint, remove the `else` or remove the new line between `else` and `{..}`
+
+error: this is an `else if` but the formatting might hide it
+ --> $DIR/suspicious_else_formatting.rs:57:6
+ |
+LL | } else
+ | ______^
+LL | | if foo() { // the span of the above error should continue here
+ | |____^
+ |
+ = note: to remove this lint, remove the `else` or remove the new line between `else` and `if`
+
+error: this is an `else if` but the formatting might hide it
+ --> $DIR/suspicious_else_formatting.rs:62:6
+ |
+LL | }
+ | ______^
+LL | | else
+LL | | if foo() { // the span of the above error should continue here
+ | |____^
+ |
+ = note: to remove this lint, remove the `else` or remove the new line between `else` and `if`
+
+error: this is an `else {..}` but the formatting might hide it
+ --> $DIR/suspicious_else_formatting.rs:89:6
+ |
+LL | }
+ | ______^
+LL | |
+LL | | else
+LL | | {
+ | |____^
+ |
+ = note: to remove this lint, remove the `else` or remove the new line between `else` and `{..}`
+
+error: this is an `else {..}` but the formatting might hide it
+ --> $DIR/suspicious_else_formatting.rs:97:6
+ |
+LL | }
+ | ______^
+LL | | else
+LL | |
+LL | | {
+ | |____^
+ |
+ = note: to remove this lint, remove the `else` or remove the new line between `else` and `{..}`
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/suspicious_map.rs b/src/tools/clippy/tests/ui/suspicious_map.rs
new file mode 100644
index 000000000..3a2a10cf0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/suspicious_map.rs
@@ -0,0 +1,32 @@
+#![warn(clippy::suspicious_map)]
+
+fn main() {
+ let _ = (0..3).map(|x| x + 2).count();
+
+ let f = |x| x + 1;
+ let _ = (0..3).map(f).count();
+}
+
+fn negative() {
+ // closure with side effects
+ let mut sum = 0;
+ let _ = (0..3).map(|x| sum += x).count();
+
+ // closure variable with side effects
+ let ext_closure = |x| sum += x;
+ let _ = (0..3).map(ext_closure).count();
+
+ // closure that returns unit
+ let _ = (0..3)
+ .map(|x| {
+ // do nothing
+ })
+ .count();
+
+ // external function
+ let _ = (0..3).map(do_something).count();
+}
+
+fn do_something<T>(t: T) -> String {
+ unimplemented!()
+}
diff --git a/src/tools/clippy/tests/ui/suspicious_map.stderr b/src/tools/clippy/tests/ui/suspicious_map.stderr
new file mode 100644
index 000000000..3ffcd1a90
--- /dev/null
+++ b/src/tools/clippy/tests/ui/suspicious_map.stderr
@@ -0,0 +1,19 @@
+error: this call to `map()` won't have an effect on the call to `count()`
+ --> $DIR/suspicious_map.rs:4:13
+ |
+LL | let _ = (0..3).map(|x| x + 2).count();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::suspicious-map` implied by `-D warnings`
+ = help: make sure you did not confuse `map` with `filter`, `for_each` or `inspect`
+
+error: this call to `map()` won't have an effect on the call to `count()`
+ --> $DIR/suspicious_map.rs:7:13
+ |
+LL | let _ = (0..3).map(f).count();
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: make sure you did not confuse `map` with `filter`, `for_each` or `inspect`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/suspicious_operation_groupings.fixed b/src/tools/clippy/tests/ui/suspicious_operation_groupings.fixed
new file mode 100644
index 000000000..ede8a39fe
--- /dev/null
+++ b/src/tools/clippy/tests/ui/suspicious_operation_groupings.fixed
@@ -0,0 +1,209 @@
+// run-rustfix
+#![warn(clippy::suspicious_operation_groupings)]
+#![allow(dead_code, unused_parens, clippy::eq_op)]
+
+struct Vec3 {
+ x: f64,
+ y: f64,
+ z: f64,
+}
+
+impl Eq for Vec3 {}
+
+impl PartialEq for Vec3 {
+ fn eq(&self, other: &Self) -> bool {
+ // This should trigger the lint because `self.x` is compared to `other.y`
+ self.x == other.x && self.y == other.y && self.z == other.z
+ }
+}
+
+struct S {
+ a: i32,
+ b: i32,
+ c: i32,
+ d: i32,
+}
+
+fn buggy_ab_cmp(s1: &S, s2: &S) -> bool {
+ // There's no `s1.b`
+ s1.a < s2.a && s1.b < s2.b
+}
+
+struct SaOnly {
+ a: i32,
+}
+
+impl S {
+ fn a(&self) -> i32 {
+ 0
+ }
+}
+
+fn do_not_give_bad_suggestions_for_this_unusual_expr(s1: &S, s2: &SaOnly) -> bool {
+ // This is superficially similar to `buggy_ab_cmp`, but we should not suggest
+ // `s2.b` since that is invalid.
+ s1.a < s2.a && s1.a() < s1.b
+}
+
+fn do_not_give_bad_suggestions_for_this_macro_expr(s1: &S, s2: &SaOnly) -> bool {
+ macro_rules! s1 {
+ () => {
+ S {
+ a: 1,
+ b: 1,
+ c: 1,
+ d: 1,
+ }
+ };
+ }
+
+ // This is superficially similar to `buggy_ab_cmp`, but we should not suggest
+ // `s2.b` since that is invalid.
+ s1.a < s2.a && s1!().a < s1.b
+}
+
+fn do_not_give_bad_suggestions_for_this_incorrect_expr(s1: &S, s2: &SaOnly) -> bool {
+ // There's two `s1.b`, but we should not suggest `s2.b` since that is invalid
+ s1.a < s2.a && s1.b < s1.b
+}
+
+fn permissable(s1: &S, s2: &S) -> bool {
+ // Something like this seems like it might actually be what is desired.
+ s1.a == s2.b
+}
+
+fn non_boolean_operators(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.c`
+ s1.a * s2.a + s1.b * s2.b + s1.c * s2.c + s1.d * s2.d
+}
+
+fn odd_number_of_pairs(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.b`
+ s1.a * s2.a + s1.b * s2.b + s1.c * s2.c
+}
+
+fn not_caught_by_eq_op_middle_change_left(s1: &S, s2: &S) -> i32 {
+ // There's no `s1.b`
+ s1.a * s2.a + s1.b * s2.b + s1.c * s2.c
+}
+
+fn not_caught_by_eq_op_middle_change_right(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.b`
+ s1.a * s2.a + s1.b * s2.b + s1.c * s2.c
+}
+
+fn not_caught_by_eq_op_start(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.a`
+ s1.a * s2.a + s1.b * s2.b + s1.c * s2.c
+}
+
+fn not_caught_by_eq_op_end(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.c`
+ s1.a * s2.a + s1.b * s2.b + s1.c * s2.c
+}
+
+fn the_cross_product_should_not_lint(s1: &S, s2: &S) -> (i32, i32, i32) {
+ (
+ s1.b * s2.c - s1.c * s2.b,
+ s1.c * s2.a - s1.a * s2.c,
+ s1.a * s2.b - s1.b * s2.a,
+ )
+}
+
+fn outer_parens_simple(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.b`
+ (s1.a * s2.a + s1.b * s2.b)
+}
+
+fn outer_parens(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.c`
+ (s1.a * s2.a + s1.b * s2.b + s1.c * s2.c + s1.d * s2.d)
+}
+
+fn inner_parens(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.c`
+ (s1.a * s2.a) + (s1.b * s2.b) + (s1.c * s2.c) + (s1.d * s2.d)
+}
+
+fn outer_and_some_inner_parens(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.c`
+ ((s1.a * s2.a) + (s1.b * s2.b) + (s1.c * s2.c) + (s1.d * s2.d))
+}
+
+fn all_parens_balanced_tree(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.c`
+ (((s1.a * s2.a) + (s1.b * s2.b)) + ((s1.c * s2.c) + (s1.d * s2.d)))
+}
+
+fn all_parens_left_tree(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.c`
+ (((s1.a * s2.a) + (s1.b * s2.b) + (s1.c * s2.c)) + (s1.d * s2.d))
+}
+
+fn all_parens_right_tree(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.c`
+ ((s1.a * s2.a) + ((s1.b * s2.b) + (s1.c * s2.c) + (s1.d * s2.d)))
+}
+
+fn inside_other_binop_expression(s1: &S, s2: &S) -> i32 {
+ // There's no `s1.b`
+ (s1.a * s2.a + s1.b * s2.b) / 2
+}
+
+fn inside_function_call(s1: &S, s2: &S) -> i32 {
+ // There's no `s1.b`
+ i32::swap_bytes(s1.a * s2.a + s1.b * s2.b)
+}
+
+fn inside_larger_boolean_expression(s1: &S, s2: &S) -> bool {
+ // There's no `s1.c`
+ s1.a > 0 && s1.b > 0 && s1.c == s2.c && s1.d == s2.d
+}
+
+fn inside_larger_boolean_expression_with_unsorted_ops(s1: &S, s2: &S) -> bool {
+ // There's no `s1.c`
+ s1.a > 0 && s1.c == s2.c && s1.b > 0 && s1.d == s2.d
+}
+
+struct Nested {
+ inner: ((i32,), (i32,), (i32,)),
+}
+
+fn changed_middle_ident(n1: &Nested, n2: &Nested) -> bool {
+ // There's no `n2.inner.2.0`
+ (n1.inner.0).0 == (n2.inner.0).0 && (n1.inner.1).0 == (n2.inner.1).0 && (n1.inner.2).0 == (n2.inner.2).0
+}
+
+// `eq_op` should catch this one.
+fn changed_initial_ident(n1: &Nested, n2: &Nested) -> bool {
+ // There's no `n2.inner.0.0`
+ (n1.inner.0).0 == (n1.inner.0).0 && (n1.inner.1).0 == (n2.inner.1).0 && (n1.inner.2).0 == (n2.inner.2).0
+}
+
+fn inside_fn_with_similar_expression(s1: &S, s2: &S, strict: bool) -> bool {
+ if strict {
+ s1.a < s2.a && s1.b < s2.b
+ } else {
+ // There's no `s1.b` in this subexpression
+ s1.a <= s2.a && s1.b <= s2.b
+ }
+}
+
+fn inside_an_if_statement(s1: &mut S, s2: &S) {
+ // There's no `s1.b`
+ if s1.a < s2.a && s1.b < s2.b {
+ s1.c = s2.c;
+ }
+}
+
+fn maximum_unary_minus_right_tree(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.c`
+ -(-(-s1.a * -s2.a) + (-(-s1.b * -s2.b) + -(-s1.c * -s2.c) + -(-s1.d * -s2.d)))
+}
+
+fn unary_minus_and_an_if_expression(s1: &S, s2: &S) -> i32 {
+ // There's no `s1.b`
+ -(if -s1.a < -s2.a && -s1.b < -s2.b { s1.c } else { s2.a })
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/suspicious_operation_groupings.rs b/src/tools/clippy/tests/ui/suspicious_operation_groupings.rs
new file mode 100644
index 000000000..26ce97bb3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/suspicious_operation_groupings.rs
@@ -0,0 +1,209 @@
+// run-rustfix
+#![warn(clippy::suspicious_operation_groupings)]
+#![allow(dead_code, unused_parens, clippy::eq_op)]
+
+struct Vec3 {
+ x: f64,
+ y: f64,
+ z: f64,
+}
+
+impl Eq for Vec3 {}
+
+impl PartialEq for Vec3 {
+ fn eq(&self, other: &Self) -> bool {
+ // This should trigger the lint because `self.x` is compared to `other.y`
+ self.x == other.y && self.y == other.y && self.z == other.z
+ }
+}
+
+struct S {
+ a: i32,
+ b: i32,
+ c: i32,
+ d: i32,
+}
+
+fn buggy_ab_cmp(s1: &S, s2: &S) -> bool {
+ // There's no `s1.b`
+ s1.a < s2.a && s1.a < s2.b
+}
+
+struct SaOnly {
+ a: i32,
+}
+
+impl S {
+ fn a(&self) -> i32 {
+ 0
+ }
+}
+
+fn do_not_give_bad_suggestions_for_this_unusual_expr(s1: &S, s2: &SaOnly) -> bool {
+ // This is superficially similar to `buggy_ab_cmp`, but we should not suggest
+ // `s2.b` since that is invalid.
+ s1.a < s2.a && s1.a() < s1.b
+}
+
+fn do_not_give_bad_suggestions_for_this_macro_expr(s1: &S, s2: &SaOnly) -> bool {
+ macro_rules! s1 {
+ () => {
+ S {
+ a: 1,
+ b: 1,
+ c: 1,
+ d: 1,
+ }
+ };
+ }
+
+ // This is superficially similar to `buggy_ab_cmp`, but we should not suggest
+ // `s2.b` since that is invalid.
+ s1.a < s2.a && s1!().a < s1.b
+}
+
+fn do_not_give_bad_suggestions_for_this_incorrect_expr(s1: &S, s2: &SaOnly) -> bool {
+ // There's two `s1.b`, but we should not suggest `s2.b` since that is invalid
+ s1.a < s2.a && s1.b < s1.b
+}
+
+fn permissable(s1: &S, s2: &S) -> bool {
+ // Something like this seems like it might actually be what is desired.
+ s1.a == s2.b
+}
+
+fn non_boolean_operators(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.c`
+ s1.a * s2.a + s1.b * s2.b + s1.c * s2.b + s1.d * s2.d
+}
+
+fn odd_number_of_pairs(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.b`
+ s1.a * s2.a + s1.b * s2.c + s1.c * s2.c
+}
+
+fn not_caught_by_eq_op_middle_change_left(s1: &S, s2: &S) -> i32 {
+ // There's no `s1.b`
+ s1.a * s2.a + s2.b * s2.b + s1.c * s2.c
+}
+
+fn not_caught_by_eq_op_middle_change_right(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.b`
+ s1.a * s2.a + s1.b * s1.b + s1.c * s2.c
+}
+
+fn not_caught_by_eq_op_start(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.a`
+ s1.a * s1.a + s1.b * s2.b + s1.c * s2.c
+}
+
+fn not_caught_by_eq_op_end(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.c`
+ s1.a * s2.a + s1.b * s2.b + s1.c * s1.c
+}
+
+fn the_cross_product_should_not_lint(s1: &S, s2: &S) -> (i32, i32, i32) {
+ (
+ s1.b * s2.c - s1.c * s2.b,
+ s1.c * s2.a - s1.a * s2.c,
+ s1.a * s2.b - s1.b * s2.a,
+ )
+}
+
+fn outer_parens_simple(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.b`
+ (s1.a * s2.a + s1.b * s1.b)
+}
+
+fn outer_parens(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.c`
+ (s1.a * s2.a + s1.b * s2.b + s1.c * s2.b + s1.d * s2.d)
+}
+
+fn inner_parens(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.c`
+ (s1.a * s2.a) + (s1.b * s2.b) + (s1.c * s2.b) + (s1.d * s2.d)
+}
+
+fn outer_and_some_inner_parens(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.c`
+ ((s1.a * s2.a) + (s1.b * s2.b) + (s1.c * s2.b) + (s1.d * s2.d))
+}
+
+fn all_parens_balanced_tree(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.c`
+ (((s1.a * s2.a) + (s1.b * s2.b)) + ((s1.c * s2.b) + (s1.d * s2.d)))
+}
+
+fn all_parens_left_tree(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.c`
+ (((s1.a * s2.a) + (s1.b * s2.b) + (s1.c * s2.b)) + (s1.d * s2.d))
+}
+
+fn all_parens_right_tree(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.c`
+ ((s1.a * s2.a) + ((s1.b * s2.b) + (s1.c * s2.b) + (s1.d * s2.d)))
+}
+
+fn inside_other_binop_expression(s1: &S, s2: &S) -> i32 {
+ // There's no `s1.b`
+ (s1.a * s2.a + s2.b * s2.b) / 2
+}
+
+fn inside_function_call(s1: &S, s2: &S) -> i32 {
+ // There's no `s1.b`
+ i32::swap_bytes(s1.a * s2.a + s2.b * s2.b)
+}
+
+fn inside_larger_boolean_expression(s1: &S, s2: &S) -> bool {
+ // There's no `s1.c`
+ s1.a > 0 && s1.b > 0 && s1.d == s2.c && s1.d == s2.d
+}
+
+fn inside_larger_boolean_expression_with_unsorted_ops(s1: &S, s2: &S) -> bool {
+ // There's no `s1.c`
+ s1.a > 0 && s1.d == s2.c && s1.b > 0 && s1.d == s2.d
+}
+
+struct Nested {
+ inner: ((i32,), (i32,), (i32,)),
+}
+
+fn changed_middle_ident(n1: &Nested, n2: &Nested) -> bool {
+ // There's no `n2.inner.2.0`
+ (n1.inner.0).0 == (n2.inner.0).0 && (n1.inner.1).0 == (n2.inner.1).0 && (n1.inner.2).0 == (n2.inner.1).0
+}
+
+// `eq_op` should catch this one.
+fn changed_initial_ident(n1: &Nested, n2: &Nested) -> bool {
+ // There's no `n2.inner.0.0`
+ (n1.inner.0).0 == (n1.inner.0).0 && (n1.inner.1).0 == (n2.inner.1).0 && (n1.inner.2).0 == (n2.inner.2).0
+}
+
+fn inside_fn_with_similar_expression(s1: &S, s2: &S, strict: bool) -> bool {
+ if strict {
+ s1.a < s2.a && s1.b < s2.b
+ } else {
+ // There's no `s1.b` in this subexpression
+ s1.a <= s2.a && s1.a <= s2.b
+ }
+}
+
+fn inside_an_if_statement(s1: &mut S, s2: &S) {
+ // There's no `s1.b`
+ if s1.a < s2.a && s1.a < s2.b {
+ s1.c = s2.c;
+ }
+}
+
+fn maximum_unary_minus_right_tree(s1: &S, s2: &S) -> i32 {
+ // There's no `s2.c`
+ -(-(-s1.a * -s2.a) + (-(-s1.b * -s2.b) + -(-s1.c * -s2.b) + -(-s1.d * -s2.d)))
+}
+
+fn unary_minus_and_an_if_expression(s1: &S, s2: &S) -> i32 {
+ // There's no `s1.b`
+ -(if -s1.a < -s2.a && -s1.a < -s2.b { s1.c } else { s2.a })
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/suspicious_operation_groupings.stderr b/src/tools/clippy/tests/ui/suspicious_operation_groupings.stderr
new file mode 100644
index 000000000..29f229245
--- /dev/null
+++ b/src/tools/clippy/tests/ui/suspicious_operation_groupings.stderr
@@ -0,0 +1,160 @@
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:16:9
+ |
+LL | self.x == other.y && self.y == other.y && self.z == other.z
+ | ^^^^^^^^^^^^^^^^^ help: did you mean: `self.x == other.x`
+ |
+ = note: `-D clippy::suspicious-operation-groupings` implied by `-D warnings`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:29:20
+ |
+LL | s1.a < s2.a && s1.a < s2.b
+ | ^^^^^^^^^^^ help: did you mean: `s1.b < s2.b`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:77:33
+ |
+LL | s1.a * s2.a + s1.b * s2.b + s1.c * s2.b + s1.d * s2.d
+ | ^^^^^^^^^^^ help: did you mean: `s1.c * s2.c`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:82:19
+ |
+LL | s1.a * s2.a + s1.b * s2.c + s1.c * s2.c
+ | ^^^^^^^^^^^ help: did you mean: `s1.b * s2.b`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:82:19
+ |
+LL | s1.a * s2.a + s1.b * s2.c + s1.c * s2.c
+ | ^^^^^^^^^^^ help: did you mean: `s1.b * s2.b`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:87:19
+ |
+LL | s1.a * s2.a + s2.b * s2.b + s1.c * s2.c
+ | ^^^^^^^^^^^ help: did you mean: `s1.b * s2.b`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:92:19
+ |
+LL | s1.a * s2.a + s1.b * s1.b + s1.c * s2.c
+ | ^^^^^^^^^^^ help: did you mean: `s1.b * s2.b`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:97:5
+ |
+LL | s1.a * s1.a + s1.b * s2.b + s1.c * s2.c
+ | ^^^^^^^^^^^ help: did you mean: `s1.a * s2.a`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:102:33
+ |
+LL | s1.a * s2.a + s1.b * s2.b + s1.c * s1.c
+ | ^^^^^^^^^^^ help: did you mean: `s1.c * s2.c`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:115:20
+ |
+LL | (s1.a * s2.a + s1.b * s1.b)
+ | ^^^^^^^^^^^ help: did you mean: `s1.b * s2.b`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:120:34
+ |
+LL | (s1.a * s2.a + s1.b * s2.b + s1.c * s2.b + s1.d * s2.d)
+ | ^^^^^^^^^^^ help: did you mean: `s1.c * s2.c`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:125:38
+ |
+LL | (s1.a * s2.a) + (s1.b * s2.b) + (s1.c * s2.b) + (s1.d * s2.d)
+ | ^^^^^^^^^^^ help: did you mean: `s1.c * s2.c`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:130:39
+ |
+LL | ((s1.a * s2.a) + (s1.b * s2.b) + (s1.c * s2.b) + (s1.d * s2.d))
+ | ^^^^^^^^^^^ help: did you mean: `s1.c * s2.c`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:135:42
+ |
+LL | (((s1.a * s2.a) + (s1.b * s2.b)) + ((s1.c * s2.b) + (s1.d * s2.d)))
+ | ^^^^^^^^^^^ help: did you mean: `s1.c * s2.c`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:135:42
+ |
+LL | (((s1.a * s2.a) + (s1.b * s2.b)) + ((s1.c * s2.b) + (s1.d * s2.d)))
+ | ^^^^^^^^^^^ help: did you mean: `s1.c * s2.c`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:140:40
+ |
+LL | (((s1.a * s2.a) + (s1.b * s2.b) + (s1.c * s2.b)) + (s1.d * s2.d))
+ | ^^^^^^^^^^^ help: did you mean: `s1.c * s2.c`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:145:40
+ |
+LL | ((s1.a * s2.a) + ((s1.b * s2.b) + (s1.c * s2.b) + (s1.d * s2.d)))
+ | ^^^^^^^^^^^ help: did you mean: `s1.c * s2.c`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:150:20
+ |
+LL | (s1.a * s2.a + s2.b * s2.b) / 2
+ | ^^^^^^^^^^^ help: did you mean: `s1.b * s2.b`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:155:35
+ |
+LL | i32::swap_bytes(s1.a * s2.a + s2.b * s2.b)
+ | ^^^^^^^^^^^ help: did you mean: `s1.b * s2.b`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:160:29
+ |
+LL | s1.a > 0 && s1.b > 0 && s1.d == s2.c && s1.d == s2.d
+ | ^^^^^^^^^^^^ help: did you mean: `s1.c == s2.c`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:165:17
+ |
+LL | s1.a > 0 && s1.d == s2.c && s1.b > 0 && s1.d == s2.d
+ | ^^^^^^^^^^^^ help: did you mean: `s1.c == s2.c`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:174:77
+ |
+LL | (n1.inner.0).0 == (n2.inner.0).0 && (n1.inner.1).0 == (n2.inner.1).0 && (n1.inner.2).0 == (n2.inner.1).0
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: did you mean: `(n1.inner.2).0 == (n2.inner.2).0`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:188:25
+ |
+LL | s1.a <= s2.a && s1.a <= s2.b
+ | ^^^^^^^^^^^^ help: did you mean: `s1.b <= s2.b`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:194:23
+ |
+LL | if s1.a < s2.a && s1.a < s2.b {
+ | ^^^^^^^^^^^ help: did you mean: `s1.b < s2.b`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:201:48
+ |
+LL | -(-(-s1.a * -s2.a) + (-(-s1.b * -s2.b) + -(-s1.c * -s2.b) + -(-s1.d * -s2.d)))
+ | ^^^^^^^^^^^^^ help: did you mean: `-s1.c * -s2.c`
+
+error: this sequence of operators looks suspiciously like a bug
+ --> $DIR/suspicious_operation_groupings.rs:206:27
+ |
+LL | -(if -s1.a < -s2.a && -s1.a < -s2.b { s1.c } else { s2.a })
+ | ^^^^^^^^^^^^^ help: did you mean: `-s1.b < -s2.b`
+
+error: aborting due to 26 previous errors
+
diff --git a/src/tools/clippy/tests/ui/suspicious_splitn.rs b/src/tools/clippy/tests/ui/suspicious_splitn.rs
new file mode 100644
index 000000000..528f2ddcc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/suspicious_splitn.rs
@@ -0,0 +1,21 @@
+#![warn(clippy::suspicious_splitn)]
+#![allow(clippy::needless_splitn)]
+
+fn main() {
+ let _ = "a,b,c".splitn(3, ',');
+ let _ = [0, 1, 2, 1, 3].splitn(3, |&x| x == 1);
+ let _ = "".splitn(0, ',');
+ let _ = [].splitn(0, |&x: &u32| x == 1);
+
+ let _ = "a,b".splitn(0, ',');
+ let _ = "a,b".rsplitn(0, ',');
+ let _ = "a,b".splitn(1, ',');
+ let _ = [0, 1, 2].splitn(0, |&x| x == 1);
+ let _ = [0, 1, 2].splitn_mut(0, |&x| x == 1);
+ let _ = [0, 1, 2].splitn(1, |&x| x == 1);
+ let _ = [0, 1, 2].rsplitn_mut(1, |&x| x == 1);
+
+ const X: usize = 0;
+ let _ = "a,b".splitn(X + 1, ',');
+ let _ = "a,b".splitn(X, ',');
+}
diff --git a/src/tools/clippy/tests/ui/suspicious_splitn.stderr b/src/tools/clippy/tests/ui/suspicious_splitn.stderr
new file mode 100644
index 000000000..3bcd681fa
--- /dev/null
+++ b/src/tools/clippy/tests/ui/suspicious_splitn.stderr
@@ -0,0 +1,75 @@
+error: `splitn` called with `0` splits
+ --> $DIR/suspicious_splitn.rs:10:13
+ |
+LL | let _ = "a,b".splitn(0, ',');
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::suspicious-splitn` implied by `-D warnings`
+ = note: the resulting iterator will always return `None`
+
+error: `rsplitn` called with `0` splits
+ --> $DIR/suspicious_splitn.rs:11:13
+ |
+LL | let _ = "a,b".rsplitn(0, ',');
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: the resulting iterator will always return `None`
+
+error: `splitn` called with `1` split
+ --> $DIR/suspicious_splitn.rs:12:13
+ |
+LL | let _ = "a,b".splitn(1, ',');
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: the resulting iterator will always return the entire string followed by `None`
+
+error: `splitn` called with `0` splits
+ --> $DIR/suspicious_splitn.rs:13:13
+ |
+LL | let _ = [0, 1, 2].splitn(0, |&x| x == 1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: the resulting iterator will always return `None`
+
+error: `splitn_mut` called with `0` splits
+ --> $DIR/suspicious_splitn.rs:14:13
+ |
+LL | let _ = [0, 1, 2].splitn_mut(0, |&x| x == 1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: the resulting iterator will always return `None`
+
+error: `splitn` called with `1` split
+ --> $DIR/suspicious_splitn.rs:15:13
+ |
+LL | let _ = [0, 1, 2].splitn(1, |&x| x == 1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: the resulting iterator will always return the entire slice followed by `None`
+
+error: `rsplitn_mut` called with `1` split
+ --> $DIR/suspicious_splitn.rs:16:13
+ |
+LL | let _ = [0, 1, 2].rsplitn_mut(1, |&x| x == 1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: the resulting iterator will always return the entire slice followed by `None`
+
+error: `splitn` called with `1` split
+ --> $DIR/suspicious_splitn.rs:19:13
+ |
+LL | let _ = "a,b".splitn(X + 1, ',');
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: the resulting iterator will always return the entire string followed by `None`
+
+error: `splitn` called with `0` splits
+ --> $DIR/suspicious_splitn.rs:20:13
+ |
+LL | let _ = "a,b".splitn(X, ',');
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: the resulting iterator will always return `None`
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/suspicious_unary_op_formatting.rs b/src/tools/clippy/tests/ui/suspicious_unary_op_formatting.rs
new file mode 100644
index 000000000..9564e373c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/suspicious_unary_op_formatting.rs
@@ -0,0 +1,23 @@
+#![warn(clippy::suspicious_unary_op_formatting)]
+
+#[rustfmt::skip]
+fn main() {
+ // weird binary operator formatting:
+ let a = 42;
+
+ if a >- 30 {}
+ if a >=- 30 {}
+
+ let b = true;
+ let c = false;
+
+ if b &&! c {}
+
+ if a >- 30 {}
+
+ // those are ok:
+ if a >-30 {}
+ if a < -30 {}
+ if b && !c {}
+ if a > - 30 {}
+}
diff --git a/src/tools/clippy/tests/ui/suspicious_unary_op_formatting.stderr b/src/tools/clippy/tests/ui/suspicious_unary_op_formatting.stderr
new file mode 100644
index 000000000..581527dcf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/suspicious_unary_op_formatting.stderr
@@ -0,0 +1,35 @@
+error: by not having a space between `>` and `-` it looks like `>-` is a single operator
+ --> $DIR/suspicious_unary_op_formatting.rs:8:9
+ |
+LL | if a >- 30 {}
+ | ^^^^
+ |
+ = note: `-D clippy::suspicious-unary-op-formatting` implied by `-D warnings`
+ = help: put a space between `>` and `-` and remove the space after `-`
+
+error: by not having a space between `>=` and `-` it looks like `>=-` is a single operator
+ --> $DIR/suspicious_unary_op_formatting.rs:9:9
+ |
+LL | if a >=- 30 {}
+ | ^^^^^
+ |
+ = help: put a space between `>=` and `-` and remove the space after `-`
+
+error: by not having a space between `&&` and `!` it looks like `&&!` is a single operator
+ --> $DIR/suspicious_unary_op_formatting.rs:14:9
+ |
+LL | if b &&! c {}
+ | ^^^^^
+ |
+ = help: put a space between `&&` and `!` and remove the space after `!`
+
+error: by not having a space between `>` and `-` it looks like `>-` is a single operator
+ --> $DIR/suspicious_unary_op_formatting.rs:16:9
+ |
+LL | if a >- 30 {}
+ | ^^^^^^
+ |
+ = help: put a space between `>` and `-` and remove the space after `-`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/swap.fixed b/src/tools/clippy/tests/ui/swap.fixed
new file mode 100644
index 000000000..3329efbd4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/swap.fixed
@@ -0,0 +1,157 @@
+// run-rustfix
+
+#![warn(clippy::all)]
+#![allow(
+ clippy::blacklisted_name,
+ clippy::no_effect,
+ clippy::redundant_clone,
+ redundant_semicolons,
+ dead_code,
+ unused_assignments
+)]
+
+struct Foo(u32);
+
+#[derive(Clone)]
+struct Bar {
+ a: u32,
+ b: u32,
+}
+
+fn field() {
+ let mut bar = Bar { a: 1, b: 2 };
+
+ std::mem::swap(&mut bar.a, &mut bar.b);
+
+ let mut baz = vec![bar.clone(), bar.clone()];
+ let temp = baz[0].a;
+ baz[0].a = baz[1].a;
+ baz[1].a = temp;
+}
+
+fn array() {
+ let mut foo = [1, 2];
+ foo.swap(0, 1);
+
+ foo.swap(0, 1);
+}
+
+fn slice() {
+ let foo = &mut [1, 2];
+ foo.swap(0, 1);
+
+ foo.swap(0, 1);
+}
+
+fn unswappable_slice() {
+ let foo = &mut [vec![1, 2], vec![3, 4]];
+ let temp = foo[0][1];
+ foo[0][1] = foo[1][0];
+ foo[1][0] = temp;
+
+ // swap(foo[0][1], foo[1][0]) would fail
+ // this could use split_at_mut and mem::swap, but that is not much simpler.
+}
+
+fn vec() {
+ let mut foo = vec![1, 2];
+ foo.swap(0, 1);
+
+ foo.swap(0, 1);
+}
+
+fn xor_swap_locals() {
+ // This is an xor-based swap of local variables.
+ let mut a = 0;
+ let mut b = 1;
+ std::mem::swap(&mut a, &mut b)
+}
+
+fn xor_field_swap() {
+ // This is an xor-based swap of fields in a struct.
+ let mut bar = Bar { a: 0, b: 1 };
+ std::mem::swap(&mut bar.a, &mut bar.b)
+}
+
+fn xor_slice_swap() {
+ // This is an xor-based swap of a slice
+ let foo = &mut [1, 2];
+ foo.swap(0, 1)
+}
+
+fn xor_no_swap() {
+ // This is a sequence of xor-assignment statements that doesn't result in a swap.
+ let mut a = 0;
+ let mut b = 1;
+ let mut c = 2;
+ a ^= b;
+ b ^= c;
+ a ^= c;
+ c ^= a;
+}
+
+fn xor_unswappable_slice() {
+ let foo = &mut [vec![1, 2], vec![3, 4]];
+ foo[0][1] ^= foo[1][0];
+ foo[1][0] ^= foo[0][0];
+ foo[0][1] ^= foo[1][0];
+
+ // swap(foo[0][1], foo[1][0]) would fail
+ // this could use split_at_mut and mem::swap, but that is not much simpler.
+}
+
+fn distinct_slice() {
+ let foo = &mut [vec![1, 2], vec![3, 4]];
+ let bar = &mut [vec![1, 2], vec![3, 4]];
+ std::mem::swap(&mut foo[0][1], &mut bar[1][0]);
+}
+
+#[rustfmt::skip]
+fn main() {
+
+ let mut a = 42;
+ let mut b = 1337;
+
+ std::mem::swap(&mut a, &mut b);
+
+ ; std::mem::swap(&mut a, &mut b);
+
+ let mut c = Foo(42);
+
+ std::mem::swap(&mut c.0, &mut a);
+
+ ; std::mem::swap(&mut c.0, &mut a);
+}
+
+fn issue_8154() {
+ struct S1 {
+ x: i32,
+ y: i32,
+ }
+ struct S2(S1);
+ struct S3<'a, 'b>(&'a mut &'b mut S1);
+
+ impl std::ops::Deref for S2 {
+ type Target = S1;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+ }
+ impl std::ops::DerefMut for S2 {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.0
+ }
+ }
+
+ // Don't lint. `s.0` is mutably borrowed by `s.x` and `s.y` via the deref impl.
+ let mut s = S2(S1 { x: 0, y: 0 });
+ let t = s.x;
+ s.x = s.y;
+ s.y = t;
+
+ // Accessing through a mutable reference is fine
+ let mut s = S1 { x: 0, y: 0 };
+ let mut s = &mut s;
+ let s = S3(&mut s);
+ std::mem::swap(&mut s.0.x, &mut s.0.y);
+}
diff --git a/src/tools/clippy/tests/ui/swap.rs b/src/tools/clippy/tests/ui/swap.rs
new file mode 100644
index 000000000..8179ac1f2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/swap.rs
@@ -0,0 +1,181 @@
+// run-rustfix
+
+#![warn(clippy::all)]
+#![allow(
+ clippy::blacklisted_name,
+ clippy::no_effect,
+ clippy::redundant_clone,
+ redundant_semicolons,
+ dead_code,
+ unused_assignments
+)]
+
+struct Foo(u32);
+
+#[derive(Clone)]
+struct Bar {
+ a: u32,
+ b: u32,
+}
+
+fn field() {
+ let mut bar = Bar { a: 1, b: 2 };
+
+ let temp = bar.a;
+ bar.a = bar.b;
+ bar.b = temp;
+
+ let mut baz = vec![bar.clone(), bar.clone()];
+ let temp = baz[0].a;
+ baz[0].a = baz[1].a;
+ baz[1].a = temp;
+}
+
+fn array() {
+ let mut foo = [1, 2];
+ let temp = foo[0];
+ foo[0] = foo[1];
+ foo[1] = temp;
+
+ foo.swap(0, 1);
+}
+
+fn slice() {
+ let foo = &mut [1, 2];
+ let temp = foo[0];
+ foo[0] = foo[1];
+ foo[1] = temp;
+
+ foo.swap(0, 1);
+}
+
+fn unswappable_slice() {
+ let foo = &mut [vec![1, 2], vec![3, 4]];
+ let temp = foo[0][1];
+ foo[0][1] = foo[1][0];
+ foo[1][0] = temp;
+
+ // swap(foo[0][1], foo[1][0]) would fail
+ // this could use split_at_mut and mem::swap, but that is not much simpler.
+}
+
+fn vec() {
+ let mut foo = vec![1, 2];
+ let temp = foo[0];
+ foo[0] = foo[1];
+ foo[1] = temp;
+
+ foo.swap(0, 1);
+}
+
+fn xor_swap_locals() {
+ // This is an xor-based swap of local variables.
+ let mut a = 0;
+ let mut b = 1;
+ a ^= b;
+ b ^= a;
+ a ^= b;
+}
+
+fn xor_field_swap() {
+ // This is an xor-based swap of fields in a struct.
+ let mut bar = Bar { a: 0, b: 1 };
+ bar.a ^= bar.b;
+ bar.b ^= bar.a;
+ bar.a ^= bar.b;
+}
+
+fn xor_slice_swap() {
+ // This is an xor-based swap of a slice
+ let foo = &mut [1, 2];
+ foo[0] ^= foo[1];
+ foo[1] ^= foo[0];
+ foo[0] ^= foo[1];
+}
+
+fn xor_no_swap() {
+ // This is a sequence of xor-assignment statements that doesn't result in a swap.
+ let mut a = 0;
+ let mut b = 1;
+ let mut c = 2;
+ a ^= b;
+ b ^= c;
+ a ^= c;
+ c ^= a;
+}
+
+fn xor_unswappable_slice() {
+ let foo = &mut [vec![1, 2], vec![3, 4]];
+ foo[0][1] ^= foo[1][0];
+ foo[1][0] ^= foo[0][0];
+ foo[0][1] ^= foo[1][0];
+
+ // swap(foo[0][1], foo[1][0]) would fail
+ // this could use split_at_mut and mem::swap, but that is not much simpler.
+}
+
+fn distinct_slice() {
+ let foo = &mut [vec![1, 2], vec![3, 4]];
+ let bar = &mut [vec![1, 2], vec![3, 4]];
+ let temp = foo[0][1];
+ foo[0][1] = bar[1][0];
+ bar[1][0] = temp;
+}
+
+#[rustfmt::skip]
+fn main() {
+
+ let mut a = 42;
+ let mut b = 1337;
+
+ a = b;
+ b = a;
+
+ ; let t = a;
+ a = b;
+ b = t;
+
+ let mut c = Foo(42);
+
+ c.0 = a;
+ a = c.0;
+
+ ; let t = c.0;
+ c.0 = a;
+ a = t;
+}
+
+fn issue_8154() {
+ struct S1 {
+ x: i32,
+ y: i32,
+ }
+ struct S2(S1);
+ struct S3<'a, 'b>(&'a mut &'b mut S1);
+
+ impl std::ops::Deref for S2 {
+ type Target = S1;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+ }
+ impl std::ops::DerefMut for S2 {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.0
+ }
+ }
+
+ // Don't lint. `s.0` is mutably borrowed by `s.x` and `s.y` via the deref impl.
+ let mut s = S2(S1 { x: 0, y: 0 });
+ let t = s.x;
+ s.x = s.y;
+ s.y = t;
+
+ // Accessing through a mutable reference is fine
+ let mut s = S1 { x: 0, y: 0 };
+ let mut s = &mut s;
+ let s = S3(&mut s);
+ let t = s.0.x;
+ s.0.x = s.0.y;
+ s.0.y = t;
+}
diff --git a/src/tools/clippy/tests/ui/swap.stderr b/src/tools/clippy/tests/ui/swap.stderr
new file mode 100644
index 000000000..2b556b475
--- /dev/null
+++ b/src/tools/clippy/tests/ui/swap.stderr
@@ -0,0 +1,122 @@
+error: this looks like you are swapping `bar.a` and `bar.b` manually
+ --> $DIR/swap.rs:24:5
+ |
+LL | / let temp = bar.a;
+LL | | bar.a = bar.b;
+LL | | bar.b = temp;
+ | |________________^ help: try: `std::mem::swap(&mut bar.a, &mut bar.b)`
+ |
+ = note: `-D clippy::manual-swap` implied by `-D warnings`
+ = note: or maybe you should use `std::mem::replace`?
+
+error: this looks like you are swapping elements of `foo` manually
+ --> $DIR/swap.rs:36:5
+ |
+LL | / let temp = foo[0];
+LL | | foo[0] = foo[1];
+LL | | foo[1] = temp;
+ | |_________________^ help: try: `foo.swap(0, 1)`
+
+error: this looks like you are swapping elements of `foo` manually
+ --> $DIR/swap.rs:45:5
+ |
+LL | / let temp = foo[0];
+LL | | foo[0] = foo[1];
+LL | | foo[1] = temp;
+ | |_________________^ help: try: `foo.swap(0, 1)`
+
+error: this looks like you are swapping elements of `foo` manually
+ --> $DIR/swap.rs:64:5
+ |
+LL | / let temp = foo[0];
+LL | | foo[0] = foo[1];
+LL | | foo[1] = temp;
+ | |_________________^ help: try: `foo.swap(0, 1)`
+
+error: this looks like you are swapping `a` and `b` manually
+ --> $DIR/swap.rs:75:5
+ |
+LL | / a ^= b;
+LL | | b ^= a;
+LL | | a ^= b;
+ | |___________^ help: try: `std::mem::swap(&mut a, &mut b)`
+
+error: this looks like you are swapping `bar.a` and `bar.b` manually
+ --> $DIR/swap.rs:83:5
+ |
+LL | / bar.a ^= bar.b;
+LL | | bar.b ^= bar.a;
+LL | | bar.a ^= bar.b;
+ | |___________________^ help: try: `std::mem::swap(&mut bar.a, &mut bar.b)`
+
+error: this looks like you are swapping elements of `foo` manually
+ --> $DIR/swap.rs:91:5
+ |
+LL | / foo[0] ^= foo[1];
+LL | | foo[1] ^= foo[0];
+LL | | foo[0] ^= foo[1];
+ | |_____________________^ help: try: `foo.swap(0, 1)`
+
+error: this looks like you are swapping `foo[0][1]` and `bar[1][0]` manually
+ --> $DIR/swap.rs:120:5
+ |
+LL | / let temp = foo[0][1];
+LL | | foo[0][1] = bar[1][0];
+LL | | bar[1][0] = temp;
+ | |____________________^ help: try: `std::mem::swap(&mut foo[0][1], &mut bar[1][0])`
+ |
+ = note: or maybe you should use `std::mem::replace`?
+
+error: this looks like you are swapping `a` and `b` manually
+ --> $DIR/swap.rs:134:7
+ |
+LL | ; let t = a;
+ | _______^
+LL | | a = b;
+LL | | b = t;
+ | |_________^ help: try: `std::mem::swap(&mut a, &mut b)`
+ |
+ = note: or maybe you should use `std::mem::replace`?
+
+error: this looks like you are swapping `c.0` and `a` manually
+ --> $DIR/swap.rs:143:7
+ |
+LL | ; let t = c.0;
+ | _______^
+LL | | c.0 = a;
+LL | | a = t;
+ | |_________^ help: try: `std::mem::swap(&mut c.0, &mut a)`
+ |
+ = note: or maybe you should use `std::mem::replace`?
+
+error: this looks like you are trying to swap `a` and `b`
+ --> $DIR/swap.rs:131:5
+ |
+LL | / a = b;
+LL | | b = a;
+ | |_________^ help: try: `std::mem::swap(&mut a, &mut b)`
+ |
+ = note: `-D clippy::almost-swapped` implied by `-D warnings`
+ = note: or maybe you should use `std::mem::replace`?
+
+error: this looks like you are trying to swap `c.0` and `a`
+ --> $DIR/swap.rs:140:5
+ |
+LL | / c.0 = a;
+LL | | a = c.0;
+ | |___________^ help: try: `std::mem::swap(&mut c.0, &mut a)`
+ |
+ = note: or maybe you should use `std::mem::replace`?
+
+error: this looks like you are swapping `s.0.x` and `s.0.y` manually
+ --> $DIR/swap.rs:178:5
+ |
+LL | / let t = s.0.x;
+LL | | s.0.x = s.0.y;
+LL | | s.0.y = t;
+ | |_____________^ help: try: `std::mem::swap(&mut s.0.x, &mut s.0.y)`
+ |
+ = note: or maybe you should use `std::mem::replace`?
+
+error: aborting due to 13 previous errors
+
diff --git a/src/tools/clippy/tests/ui/swap_ptr_to_ref.fixed b/src/tools/clippy/tests/ui/swap_ptr_to_ref.fixed
new file mode 100644
index 000000000..596b6ee91
--- /dev/null
+++ b/src/tools/clippy/tests/ui/swap_ptr_to_ref.fixed
@@ -0,0 +1,24 @@
+// run-rustfix
+
+#![warn(clippy::swap_ptr_to_ref)]
+
+use core::ptr::addr_of_mut;
+
+fn main() {
+ let mut x = 0u32;
+ let y: *mut _ = &mut x;
+ let z: *mut _ = &mut x;
+
+ unsafe {
+ core::ptr::swap(y, z);
+ core::ptr::swap(y, &mut x);
+ core::ptr::swap(&mut x, y);
+ core::ptr::swap(addr_of_mut!(x), addr_of_mut!(x));
+ }
+
+ let y = &mut x;
+ let mut z = 0u32;
+ let z = &mut z;
+
+ core::mem::swap(y, z);
+}
diff --git a/src/tools/clippy/tests/ui/swap_ptr_to_ref.rs b/src/tools/clippy/tests/ui/swap_ptr_to_ref.rs
new file mode 100644
index 000000000..282f57121
--- /dev/null
+++ b/src/tools/clippy/tests/ui/swap_ptr_to_ref.rs
@@ -0,0 +1,24 @@
+// run-rustfix
+
+#![warn(clippy::swap_ptr_to_ref)]
+
+use core::ptr::addr_of_mut;
+
+fn main() {
+ let mut x = 0u32;
+ let y: *mut _ = &mut x;
+ let z: *mut _ = &mut x;
+
+ unsafe {
+ core::mem::swap(&mut *y, &mut *z);
+ core::mem::swap(&mut *y, &mut x);
+ core::mem::swap(&mut x, &mut *y);
+ core::mem::swap(&mut *addr_of_mut!(x), &mut *addr_of_mut!(x));
+ }
+
+ let y = &mut x;
+ let mut z = 0u32;
+ let z = &mut z;
+
+ core::mem::swap(y, z);
+}
diff --git a/src/tools/clippy/tests/ui/swap_ptr_to_ref.stderr b/src/tools/clippy/tests/ui/swap_ptr_to_ref.stderr
new file mode 100644
index 000000000..401ce0708
--- /dev/null
+++ b/src/tools/clippy/tests/ui/swap_ptr_to_ref.stderr
@@ -0,0 +1,28 @@
+error: call to `core::mem::swap` with a parameter derived from a raw pointer
+ --> $DIR/swap_ptr_to_ref.rs:13:9
+ |
+LL | core::mem::swap(&mut *y, &mut *z);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use ptr::swap: `core::ptr::swap(y, z)`
+ |
+ = note: `-D clippy::swap-ptr-to-ref` implied by `-D warnings`
+
+error: call to `core::mem::swap` with a parameter derived from a raw pointer
+ --> $DIR/swap_ptr_to_ref.rs:14:9
+ |
+LL | core::mem::swap(&mut *y, &mut x);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use ptr::swap: `core::ptr::swap(y, &mut x)`
+
+error: call to `core::mem::swap` with a parameter derived from a raw pointer
+ --> $DIR/swap_ptr_to_ref.rs:15:9
+ |
+LL | core::mem::swap(&mut x, &mut *y);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use ptr::swap: `core::ptr::swap(&mut x, y)`
+
+error: call to `core::mem::swap` with a parameter derived from a raw pointer
+ --> $DIR/swap_ptr_to_ref.rs:16:9
+ |
+LL | core::mem::swap(&mut *addr_of_mut!(x), &mut *addr_of_mut!(x));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use ptr::swap: `core::ptr::swap(addr_of_mut!(x), addr_of_mut!(x))`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/swap_ptr_to_ref_unfixable.rs b/src/tools/clippy/tests/ui/swap_ptr_to_ref_unfixable.rs
new file mode 100644
index 000000000..66ea7c652
--- /dev/null
+++ b/src/tools/clippy/tests/ui/swap_ptr_to_ref_unfixable.rs
@@ -0,0 +1,18 @@
+#![warn(clippy::swap_ptr_to_ref)]
+
+macro_rules! addr_of_mut_to_ref {
+ ($e:expr) => {
+ &mut *core::ptr::addr_of_mut!($e)
+ };
+}
+
+fn main() {
+ let mut x = 0u32;
+ let y: *mut _ = &mut x;
+
+ unsafe {
+ core::mem::swap(addr_of_mut_to_ref!(x), &mut *y);
+ core::mem::swap(&mut *y, addr_of_mut_to_ref!(x));
+ core::mem::swap(addr_of_mut_to_ref!(x), addr_of_mut_to_ref!(x));
+ }
+}
diff --git a/src/tools/clippy/tests/ui/swap_ptr_to_ref_unfixable.stderr b/src/tools/clippy/tests/ui/swap_ptr_to_ref_unfixable.stderr
new file mode 100644
index 000000000..c261205d5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/swap_ptr_to_ref_unfixable.stderr
@@ -0,0 +1,22 @@
+error: call to `core::mem::swap` with a parameter derived from a raw pointer
+ --> $DIR/swap_ptr_to_ref_unfixable.rs:14:9
+ |
+LL | core::mem::swap(addr_of_mut_to_ref!(x), &mut *y);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::swap-ptr-to-ref` implied by `-D warnings`
+
+error: call to `core::mem::swap` with a parameter derived from a raw pointer
+ --> $DIR/swap_ptr_to_ref_unfixable.rs:15:9
+ |
+LL | core::mem::swap(&mut *y, addr_of_mut_to_ref!(x));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: call to `core::mem::swap` with a parameter derived from a raw pointer
+ --> $DIR/swap_ptr_to_ref_unfixable.rs:16:9
+ |
+LL | core::mem::swap(addr_of_mut_to_ref!(x), addr_of_mut_to_ref!(x));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/tabs_in_doc_comments.fixed b/src/tools/clippy/tests/ui/tabs_in_doc_comments.fixed
new file mode 100644
index 000000000..4bc4bc86c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/tabs_in_doc_comments.fixed
@@ -0,0 +1,22 @@
+// run-rustfix
+
+#![warn(clippy::tabs_in_doc_comments)]
+#[allow(dead_code)]
+
+///
+/// Struct to hold two strings:
+/// - first one
+/// - second one
+pub struct DoubleString {
+ ///
+ /// - First String:
+ /// - needs to be inside here
+ first_string: String,
+ ///
+ /// - Second String:
+ /// - needs to be inside here
+ second_string: String,
+}
+
+/// This is main
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/tabs_in_doc_comments.rs b/src/tools/clippy/tests/ui/tabs_in_doc_comments.rs
new file mode 100644
index 000000000..9db3416e6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/tabs_in_doc_comments.rs
@@ -0,0 +1,22 @@
+// run-rustfix
+
+#![warn(clippy::tabs_in_doc_comments)]
+#[allow(dead_code)]
+
+///
+/// Struct to hold two strings:
+/// - first one
+/// - second one
+pub struct DoubleString {
+ ///
+ /// - First String:
+ /// - needs to be inside here
+ first_string: String,
+ ///
+ /// - Second String:
+ /// - needs to be inside here
+ second_string: String,
+}
+
+/// This is main
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/tabs_in_doc_comments.stderr b/src/tools/clippy/tests/ui/tabs_in_doc_comments.stderr
new file mode 100644
index 000000000..355f2e805
--- /dev/null
+++ b/src/tools/clippy/tests/ui/tabs_in_doc_comments.stderr
@@ -0,0 +1,52 @@
+error: using tabs in doc comments is not recommended
+ --> $DIR/tabs_in_doc_comments.rs:12:9
+ |
+LL | /// - First String:
+ | ^^^^ help: consider using four spaces per tab
+ |
+ = note: `-D clippy::tabs-in-doc-comments` implied by `-D warnings`
+
+error: using tabs in doc comments is not recommended
+ --> $DIR/tabs_in_doc_comments.rs:13:9
+ |
+LL | /// - needs to be inside here
+ | ^^^^^^^^ help: consider using four spaces per tab
+
+error: using tabs in doc comments is not recommended
+ --> $DIR/tabs_in_doc_comments.rs:16:9
+ |
+LL | /// - Second String:
+ | ^^^^ help: consider using four spaces per tab
+
+error: using tabs in doc comments is not recommended
+ --> $DIR/tabs_in_doc_comments.rs:17:9
+ |
+LL | /// - needs to be inside here
+ | ^^^^^^^^ help: consider using four spaces per tab
+
+error: using tabs in doc comments is not recommended
+ --> $DIR/tabs_in_doc_comments.rs:8:5
+ |
+LL | /// - first one
+ | ^^^^ help: consider using four spaces per tab
+
+error: using tabs in doc comments is not recommended
+ --> $DIR/tabs_in_doc_comments.rs:8:13
+ |
+LL | /// - first one
+ | ^^^^^^^^ help: consider using four spaces per tab
+
+error: using tabs in doc comments is not recommended
+ --> $DIR/tabs_in_doc_comments.rs:9:5
+ |
+LL | /// - second one
+ | ^^^^ help: consider using four spaces per tab
+
+error: using tabs in doc comments is not recommended
+ --> $DIR/tabs_in_doc_comments.rs:9:14
+ |
+LL | /// - second one
+ | ^^^^ help: consider using four spaces per tab
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/temporary_assignment.rs b/src/tools/clippy/tests/ui/temporary_assignment.rs
new file mode 100644
index 000000000..b4a931043
--- /dev/null
+++ b/src/tools/clippy/tests/ui/temporary_assignment.rs
@@ -0,0 +1,71 @@
+#![warn(clippy::temporary_assignment)]
+#![allow(const_item_mutation)]
+
+use std::ops::{Deref, DerefMut};
+
+struct TupleStruct(i32);
+
+struct Struct {
+ field: i32,
+}
+
+struct MultiStruct {
+ structure: Struct,
+}
+
+struct Wrapper<'a> {
+ inner: &'a mut Struct,
+}
+
+impl<'a> Deref for Wrapper<'a> {
+ type Target = Struct;
+ fn deref(&self) -> &Struct {
+ self.inner
+ }
+}
+
+impl<'a> DerefMut for Wrapper<'a> {
+ fn deref_mut(&mut self) -> &mut Struct {
+ self.inner
+ }
+}
+
+struct ArrayStruct {
+ array: [i32; 1],
+}
+
+const A: TupleStruct = TupleStruct(1);
+const B: Struct = Struct { field: 1 };
+const C: MultiStruct = MultiStruct {
+ structure: Struct { field: 1 },
+};
+const D: ArrayStruct = ArrayStruct { array: [1] };
+
+fn main() {
+ let mut s = Struct { field: 0 };
+ let mut t = (0, 0);
+
+ Struct { field: 0 }.field = 1;
+ MultiStruct {
+ structure: Struct { field: 0 },
+ }
+ .structure
+ .field = 1;
+ ArrayStruct { array: [0] }.array[0] = 1;
+ (0, 0).0 = 1;
+
+ // no error
+ s.field = 1;
+ t.0 = 1;
+ Wrapper { inner: &mut s }.field = 1;
+ let mut a_mut = TupleStruct(1);
+ a_mut.0 = 2;
+ let mut b_mut = Struct { field: 1 };
+ b_mut.field = 2;
+ let mut c_mut = MultiStruct {
+ structure: Struct { field: 1 },
+ };
+ c_mut.structure.field = 2;
+ let mut d_mut = ArrayStruct { array: [1] };
+ d_mut.array[0] = 2;
+}
diff --git a/src/tools/clippy/tests/ui/temporary_assignment.stderr b/src/tools/clippy/tests/ui/temporary_assignment.stderr
new file mode 100644
index 000000000..4cc32c79f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/temporary_assignment.stderr
@@ -0,0 +1,32 @@
+error: assignment to temporary
+ --> $DIR/temporary_assignment.rs:48:5
+ |
+LL | Struct { field: 0 }.field = 1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::temporary-assignment` implied by `-D warnings`
+
+error: assignment to temporary
+ --> $DIR/temporary_assignment.rs:49:5
+ |
+LL | / MultiStruct {
+LL | | structure: Struct { field: 0 },
+LL | | }
+LL | | .structure
+LL | | .field = 1;
+ | |______________^
+
+error: assignment to temporary
+ --> $DIR/temporary_assignment.rs:54:5
+ |
+LL | ArrayStruct { array: [0] }.array[0] = 1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: assignment to temporary
+ --> $DIR/temporary_assignment.rs:55:5
+ |
+LL | (0, 0).0 = 1;
+ | ^^^^^^^^^^^^
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/to_digit_is_some.fixed b/src/tools/clippy/tests/ui/to_digit_is_some.fixed
new file mode 100644
index 000000000..3c5e96427
--- /dev/null
+++ b/src/tools/clippy/tests/ui/to_digit_is_some.fixed
@@ -0,0 +1,11 @@
+//run-rustfix
+
+#![warn(clippy::to_digit_is_some)]
+
+fn main() {
+ let c = 'x';
+ let d = &c;
+
+ let _ = d.is_digit(8);
+ let _ = char::is_digit(c, 8);
+}
diff --git a/src/tools/clippy/tests/ui/to_digit_is_some.rs b/src/tools/clippy/tests/ui/to_digit_is_some.rs
new file mode 100644
index 000000000..4f247c06c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/to_digit_is_some.rs
@@ -0,0 +1,11 @@
+//run-rustfix
+
+#![warn(clippy::to_digit_is_some)]
+
+fn main() {
+ let c = 'x';
+ let d = &c;
+
+ let _ = d.to_digit(8).is_some();
+ let _ = char::to_digit(c, 8).is_some();
+}
diff --git a/src/tools/clippy/tests/ui/to_digit_is_some.stderr b/src/tools/clippy/tests/ui/to_digit_is_some.stderr
new file mode 100644
index 000000000..10a1b393a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/to_digit_is_some.stderr
@@ -0,0 +1,16 @@
+error: use of `.to_digit(..).is_some()`
+ --> $DIR/to_digit_is_some.rs:9:13
+ |
+LL | let _ = d.to_digit(8).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `d.is_digit(8)`
+ |
+ = note: `-D clippy::to-digit-is-some` implied by `-D warnings`
+
+error: use of `.to_digit(..).is_some()`
+ --> $DIR/to_digit_is_some.rs:10:13
+ |
+LL | let _ = char::to_digit(c, 8).is_some();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `char::is_digit(c, 8)`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/toplevel_ref_arg.fixed b/src/tools/clippy/tests/ui/toplevel_ref_arg.fixed
new file mode 100644
index 000000000..b129d95c5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/toplevel_ref_arg.fixed
@@ -0,0 +1,50 @@
+// run-rustfix
+// aux-build:macro_rules.rs
+
+#![warn(clippy::toplevel_ref_arg)]
+
+#[macro_use]
+extern crate macro_rules;
+
+macro_rules! gen_binding {
+ () => {
+ let _y = &42;
+ };
+}
+
+fn main() {
+ // Closures should not warn
+ let y = |ref x| println!("{:?}", x);
+ y(1u8);
+
+ let _x = &1;
+
+ let _y: &(&_, u8) = &(&1, 2);
+
+ let _z = &(1 + 2);
+
+ let _z = &mut (1 + 2);
+
+ let (ref x, _) = (1, 2); // ok, not top level
+ println!("The answer is {}.", x);
+
+ let _x = &vec![1, 2, 3];
+
+ // Make sure that allowing the lint works
+ #[allow(clippy::toplevel_ref_arg)]
+ let ref mut _x = 1_234_543;
+
+ // ok
+ for ref _x in 0..10 {}
+
+ // lint in macro
+ #[allow(unused)]
+ {
+ gen_binding!();
+ }
+
+ // do not lint in external macro
+ {
+ ref_arg_binding!();
+ }
+}
diff --git a/src/tools/clippy/tests/ui/toplevel_ref_arg.rs b/src/tools/clippy/tests/ui/toplevel_ref_arg.rs
new file mode 100644
index 000000000..73eb4ff73
--- /dev/null
+++ b/src/tools/clippy/tests/ui/toplevel_ref_arg.rs
@@ -0,0 +1,50 @@
+// run-rustfix
+// aux-build:macro_rules.rs
+
+#![warn(clippy::toplevel_ref_arg)]
+
+#[macro_use]
+extern crate macro_rules;
+
+macro_rules! gen_binding {
+ () => {
+ let ref _y = 42;
+ };
+}
+
+fn main() {
+ // Closures should not warn
+ let y = |ref x| println!("{:?}", x);
+ y(1u8);
+
+ let ref _x = 1;
+
+ let ref _y: (&_, u8) = (&1, 2);
+
+ let ref _z = 1 + 2;
+
+ let ref mut _z = 1 + 2;
+
+ let (ref x, _) = (1, 2); // ok, not top level
+ println!("The answer is {}.", x);
+
+ let ref _x = vec![1, 2, 3];
+
+ // Make sure that allowing the lint works
+ #[allow(clippy::toplevel_ref_arg)]
+ let ref mut _x = 1_234_543;
+
+ // ok
+ for ref _x in 0..10 {}
+
+ // lint in macro
+ #[allow(unused)]
+ {
+ gen_binding!();
+ }
+
+ // do not lint in external macro
+ {
+ ref_arg_binding!();
+ }
+}
diff --git a/src/tools/clippy/tests/ui/toplevel_ref_arg.stderr b/src/tools/clippy/tests/ui/toplevel_ref_arg.stderr
new file mode 100644
index 000000000..9c853020a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/toplevel_ref_arg.stderr
@@ -0,0 +1,45 @@
+error: `ref` on an entire `let` pattern is discouraged, take a reference with `&` instead
+ --> $DIR/toplevel_ref_arg.rs:20:9
+ |
+LL | let ref _x = 1;
+ | ----^^^^^^----- help: try: `let _x = &1;`
+ |
+ = note: `-D clippy::toplevel-ref-arg` implied by `-D warnings`
+
+error: `ref` on an entire `let` pattern is discouraged, take a reference with `&` instead
+ --> $DIR/toplevel_ref_arg.rs:22:9
+ |
+LL | let ref _y: (&_, u8) = (&1, 2);
+ | ----^^^^^^--------------------- help: try: `let _y: &(&_, u8) = &(&1, 2);`
+
+error: `ref` on an entire `let` pattern is discouraged, take a reference with `&` instead
+ --> $DIR/toplevel_ref_arg.rs:24:9
+ |
+LL | let ref _z = 1 + 2;
+ | ----^^^^^^--------- help: try: `let _z = &(1 + 2);`
+
+error: `ref` on an entire `let` pattern is discouraged, take a reference with `&` instead
+ --> $DIR/toplevel_ref_arg.rs:26:9
+ |
+LL | let ref mut _z = 1 + 2;
+ | ----^^^^^^^^^^--------- help: try: `let _z = &mut (1 + 2);`
+
+error: `ref` on an entire `let` pattern is discouraged, take a reference with `&` instead
+ --> $DIR/toplevel_ref_arg.rs:31:9
+ |
+LL | let ref _x = vec![1, 2, 3];
+ | ----^^^^^^----------------- help: try: `let _x = &vec![1, 2, 3];`
+
+error: `ref` on an entire `let` pattern is discouraged, take a reference with `&` instead
+ --> $DIR/toplevel_ref_arg.rs:11:13
+ |
+LL | let ref _y = 42;
+ | ----^^^^^^------ help: try: `let _y = &42;`
+...
+LL | gen_binding!();
+ | -------------- in this macro invocation
+ |
+ = note: this error originates in the macro `gen_binding` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/toplevel_ref_arg_non_rustfix.rs b/src/tools/clippy/tests/ui/toplevel_ref_arg_non_rustfix.rs
new file mode 100644
index 000000000..1a493fbce
--- /dev/null
+++ b/src/tools/clippy/tests/ui/toplevel_ref_arg_non_rustfix.rs
@@ -0,0 +1,33 @@
+// aux-build:macro_rules.rs
+
+#![warn(clippy::toplevel_ref_arg)]
+#![allow(unused)]
+
+#[macro_use]
+extern crate macro_rules;
+
+fn the_answer(ref mut x: u8) {
+ *x = 42;
+}
+
+macro_rules! gen_function {
+ () => {
+ fn fun_example(ref _x: usize) {}
+ };
+}
+
+fn main() {
+ let mut x = 0;
+ the_answer(x);
+
+ // lint in macro
+ #[allow(unused)]
+ {
+ gen_function!();
+ }
+
+ // do not lint in external macro
+ {
+ ref_arg_function!();
+ }
+}
diff --git a/src/tools/clippy/tests/ui/toplevel_ref_arg_non_rustfix.stderr b/src/tools/clippy/tests/ui/toplevel_ref_arg_non_rustfix.stderr
new file mode 100644
index 000000000..e97011c7f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/toplevel_ref_arg_non_rustfix.stderr
@@ -0,0 +1,21 @@
+error: `ref` directly on a function argument is ignored. Consider using a reference type instead
+ --> $DIR/toplevel_ref_arg_non_rustfix.rs:9:15
+ |
+LL | fn the_answer(ref mut x: u8) {
+ | ^^^^^^^^^
+ |
+ = note: `-D clippy::toplevel-ref-arg` implied by `-D warnings`
+
+error: `ref` directly on a function argument is ignored. Consider using a reference type instead
+ --> $DIR/toplevel_ref_arg_non_rustfix.rs:15:24
+ |
+LL | fn fun_example(ref _x: usize) {}
+ | ^^^^^^
+...
+LL | gen_function!();
+ | --------------- in this macro invocation
+ |
+ = note: this error originates in the macro `gen_function` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/trailing_empty_array.rs b/src/tools/clippy/tests/ui/trailing_empty_array.rs
new file mode 100644
index 000000000..c39b0bcaf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/trailing_empty_array.rs
@@ -0,0 +1,185 @@
+#![warn(clippy::trailing_empty_array)]
+
+// Do lint:
+
+struct RarelyUseful {
+ field: i32,
+ last: [usize; 0],
+}
+
+struct OnlyField {
+ first_and_last: [usize; 0],
+}
+
+struct GenericArrayType<T> {
+ field: i32,
+ last: [T; 0],
+}
+
+#[must_use]
+struct OnlyAnotherAttribute {
+ field: i32,
+ last: [usize; 0],
+}
+
+#[derive(Debug)]
+struct OnlyADeriveAttribute {
+ field: i32,
+ last: [usize; 0],
+}
+
+const ZERO: usize = 0;
+struct ZeroSizedWithConst {
+ field: i32,
+ last: [usize; ZERO],
+}
+
+#[allow(clippy::eq_op)]
+const fn compute_zero() -> usize {
+ (4 + 6) - (2 * 5)
+}
+struct ZeroSizedWithConstFunction {
+ field: i32,
+ last: [usize; compute_zero()],
+}
+
+const fn compute_zero_from_arg(x: usize) -> usize {
+ x - 1
+}
+struct ZeroSizedWithConstFunction2 {
+ field: i32,
+ last: [usize; compute_zero_from_arg(1)],
+}
+
+struct ZeroSizedArrayWrapper([usize; 0]);
+
+struct TupleStruct(i32, [usize; 0]);
+
+struct LotsOfFields {
+ f1: u32,
+ f2: u32,
+ f3: u32,
+ f4: u32,
+ f5: u32,
+ f6: u32,
+ f7: u32,
+ f8: u32,
+ f9: u32,
+ f10: u32,
+ f11: u32,
+ f12: u32,
+ f13: u32,
+ f14: u32,
+ f15: u32,
+ f16: u32,
+ last: [usize; 0],
+}
+
+// Don't lint
+
+#[repr(C)]
+struct GoodReason {
+ field: i32,
+ last: [usize; 0],
+}
+
+#[repr(C)]
+struct OnlyFieldWithReprC {
+ first_and_last: [usize; 0],
+}
+
+struct NonZeroSizedArray {
+ field: i32,
+ last: [usize; 1],
+}
+
+struct NotLastField {
+ f1: u32,
+ zero_sized: [usize; 0],
+ last: i32,
+}
+
+const ONE: usize = 1;
+struct NonZeroSizedWithConst {
+ field: i32,
+ last: [usize; ONE],
+}
+
+#[derive(Debug)]
+#[repr(C)]
+struct AlsoADeriveAttribute {
+ field: i32,
+ last: [usize; 0],
+}
+
+#[must_use]
+#[repr(C)]
+struct AlsoAnotherAttribute {
+ field: i32,
+ last: [usize; 0],
+}
+
+#[repr(packed)]
+struct ReprPacked {
+ field: i32,
+ last: [usize; 0],
+}
+
+#[repr(C, packed)]
+struct ReprCPacked {
+ field: i32,
+ last: [usize; 0],
+}
+
+#[repr(align(64))]
+struct ReprAlign {
+ field: i32,
+ last: [usize; 0],
+}
+#[repr(C, align(64))]
+struct ReprCAlign {
+ field: i32,
+ last: [usize; 0],
+}
+
+// NOTE: because of https://doc.rust-lang.org/stable/reference/type-layout.html#primitive-representation-of-enums-with-fields and I'm not sure when in the compilation pipeline that would happen
+#[repr(C)]
+enum DontLintAnonymousStructsFromDesuraging {
+ A(u32),
+ B(f32, [u64; 0]),
+ C { x: u32, y: [u64; 0] },
+}
+
+#[repr(C)]
+struct TupleStructReprC(i32, [usize; 0]);
+
+type NamedTuple = (i32, [usize; 0]);
+
+#[rustfmt::skip] // [rustfmt#4995](https://github.com/rust-lang/rustfmt/issues/4995)
+struct ConstParamZeroDefault<const N: usize = 0> {
+ field: i32,
+ last: [usize; N],
+}
+
+struct ConstParamNoDefault<const N: usize> {
+ field: i32,
+ last: [usize; N],
+}
+
+#[rustfmt::skip]
+struct ConstParamNonZeroDefault<const N: usize = 1> {
+ field: i32,
+ last: [usize; N],
+}
+
+struct TwoGenericParams<T, const N: usize> {
+ field: i32,
+ last: [T; N],
+}
+
+type A = ConstParamZeroDefault;
+type B = ConstParamZeroDefault<0>;
+type C = ConstParamNoDefault<0>;
+type D = ConstParamNonZeroDefault<0>;
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/trailing_empty_array.stderr b/src/tools/clippy/tests/ui/trailing_empty_array.stderr
new file mode 100644
index 000000000..9e2bd31d9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/trailing_empty_array.stderr
@@ -0,0 +1,120 @@
+error: trailing zero-sized array in a struct which is not marked with a `repr` attribute
+ --> $DIR/trailing_empty_array.rs:5:1
+ |
+LL | / struct RarelyUseful {
+LL | | field: i32,
+LL | | last: [usize; 0],
+LL | | }
+ | |_^
+ |
+ = note: `-D clippy::trailing-empty-array` implied by `-D warnings`
+ = help: consider annotating `RarelyUseful` with `#[repr(C)]` or another `repr` attribute
+
+error: trailing zero-sized array in a struct which is not marked with a `repr` attribute
+ --> $DIR/trailing_empty_array.rs:10:1
+ |
+LL | / struct OnlyField {
+LL | | first_and_last: [usize; 0],
+LL | | }
+ | |_^
+ |
+ = help: consider annotating `OnlyField` with `#[repr(C)]` or another `repr` attribute
+
+error: trailing zero-sized array in a struct which is not marked with a `repr` attribute
+ --> $DIR/trailing_empty_array.rs:14:1
+ |
+LL | / struct GenericArrayType<T> {
+LL | | field: i32,
+LL | | last: [T; 0],
+LL | | }
+ | |_^
+ |
+ = help: consider annotating `GenericArrayType` with `#[repr(C)]` or another `repr` attribute
+
+error: trailing zero-sized array in a struct which is not marked with a `repr` attribute
+ --> $DIR/trailing_empty_array.rs:20:1
+ |
+LL | / struct OnlyAnotherAttribute {
+LL | | field: i32,
+LL | | last: [usize; 0],
+LL | | }
+ | |_^
+ |
+ = help: consider annotating `OnlyAnotherAttribute` with `#[repr(C)]` or another `repr` attribute
+
+error: trailing zero-sized array in a struct which is not marked with a `repr` attribute
+ --> $DIR/trailing_empty_array.rs:26:1
+ |
+LL | / struct OnlyADeriveAttribute {
+LL | | field: i32,
+LL | | last: [usize; 0],
+LL | | }
+ | |_^
+ |
+ = help: consider annotating `OnlyADeriveAttribute` with `#[repr(C)]` or another `repr` attribute
+
+error: trailing zero-sized array in a struct which is not marked with a `repr` attribute
+ --> $DIR/trailing_empty_array.rs:32:1
+ |
+LL | / struct ZeroSizedWithConst {
+LL | | field: i32,
+LL | | last: [usize; ZERO],
+LL | | }
+ | |_^
+ |
+ = help: consider annotating `ZeroSizedWithConst` with `#[repr(C)]` or another `repr` attribute
+
+error: trailing zero-sized array in a struct which is not marked with a `repr` attribute
+ --> $DIR/trailing_empty_array.rs:41:1
+ |
+LL | / struct ZeroSizedWithConstFunction {
+LL | | field: i32,
+LL | | last: [usize; compute_zero()],
+LL | | }
+ | |_^
+ |
+ = help: consider annotating `ZeroSizedWithConstFunction` with `#[repr(C)]` or another `repr` attribute
+
+error: trailing zero-sized array in a struct which is not marked with a `repr` attribute
+ --> $DIR/trailing_empty_array.rs:49:1
+ |
+LL | / struct ZeroSizedWithConstFunction2 {
+LL | | field: i32,
+LL | | last: [usize; compute_zero_from_arg(1)],
+LL | | }
+ | |_^
+ |
+ = help: consider annotating `ZeroSizedWithConstFunction2` with `#[repr(C)]` or another `repr` attribute
+
+error: trailing zero-sized array in a struct which is not marked with a `repr` attribute
+ --> $DIR/trailing_empty_array.rs:54:1
+ |
+LL | struct ZeroSizedArrayWrapper([usize; 0]);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider annotating `ZeroSizedArrayWrapper` with `#[repr(C)]` or another `repr` attribute
+
+error: trailing zero-sized array in a struct which is not marked with a `repr` attribute
+ --> $DIR/trailing_empty_array.rs:56:1
+ |
+LL | struct TupleStruct(i32, [usize; 0]);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider annotating `TupleStruct` with `#[repr(C)]` or another `repr` attribute
+
+error: trailing zero-sized array in a struct which is not marked with a `repr` attribute
+ --> $DIR/trailing_empty_array.rs:58:1
+ |
+LL | / struct LotsOfFields {
+LL | | f1: u32,
+LL | | f2: u32,
+LL | | f3: u32,
+... |
+LL | | last: [usize; 0],
+LL | | }
+ | |_^
+ |
+ = help: consider annotating `LotsOfFields` with `#[repr(C)]` or another `repr` attribute
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/trailing_zeros.rs b/src/tools/clippy/tests/ui/trailing_zeros.rs
new file mode 100644
index 000000000..fbdc977b7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/trailing_zeros.rs
@@ -0,0 +1,10 @@
+#![allow(unused_parens)]
+#![warn(clippy::verbose_bit_mask)]
+
+fn main() {
+ let x: i32 = 42;
+ let _ = (x & 0b1111 == 0); // suggest trailing_zeros
+ let _ = x & 0b1_1111 == 0; // suggest trailing_zeros
+ let _ = x & 0b1_1010 == 0; // do not lint
+ let _ = x & 1 == 0; // do not lint
+}
diff --git a/src/tools/clippy/tests/ui/trailing_zeros.stderr b/src/tools/clippy/tests/ui/trailing_zeros.stderr
new file mode 100644
index 000000000..798551118
--- /dev/null
+++ b/src/tools/clippy/tests/ui/trailing_zeros.stderr
@@ -0,0 +1,16 @@
+error: bit mask could be simplified with a call to `trailing_zeros`
+ --> $DIR/trailing_zeros.rs:6:13
+ |
+LL | let _ = (x & 0b1111 == 0); // suggest trailing_zeros
+ | ^^^^^^^^^^^^^^^^^ help: try: `x.trailing_zeros() >= 4`
+ |
+ = note: `-D clippy::verbose-bit-mask` implied by `-D warnings`
+
+error: bit mask could be simplified with a call to `trailing_zeros`
+ --> $DIR/trailing_zeros.rs:7:13
+ |
+LL | let _ = x & 0b1_1111 == 0; // suggest trailing_zeros
+ | ^^^^^^^^^^^^^^^^^ help: try: `x.trailing_zeros() >= 5`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/trait_duplication_in_bounds.rs b/src/tools/clippy/tests/ui/trait_duplication_in_bounds.rs
new file mode 100644
index 000000000..a5751c58a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/trait_duplication_in_bounds.rs
@@ -0,0 +1,212 @@
+#![deny(clippy::trait_duplication_in_bounds)]
+#![allow(unused)]
+
+use std::collections::BTreeMap;
+use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Sub, SubAssign};
+
+fn bad_foo<T: Clone + Default, Z: Copy>(arg0: T, arg1: Z)
+where
+ T: Clone,
+ T: Default,
+{
+ unimplemented!();
+}
+
+fn good_bar<T: Clone + Default>(arg: T) {
+ unimplemented!();
+}
+
+fn good_foo<T>(arg: T)
+where
+ T: Clone + Default,
+{
+ unimplemented!();
+}
+
+fn good_foobar<T: Default>(arg: T)
+where
+ T: Clone,
+{
+ unimplemented!();
+}
+
+trait T: Default {
+ fn f()
+ where
+ Self: Default;
+}
+
+trait U: Default {
+ fn f()
+ where
+ Self: Clone;
+}
+
+trait ZZ: Default {
+ fn g();
+ fn h();
+ fn f()
+ where
+ Self: Default + Clone;
+}
+
+trait BadTrait: Default + Clone {
+ fn f()
+ where
+ Self: Default + Clone;
+ fn g()
+ where
+ Self: Default;
+ fn h()
+ where
+ Self: Copy;
+}
+
+#[derive(Default, Clone)]
+struct Life;
+
+impl T for Life {
+ // this should not warn
+ fn f() {}
+}
+
+impl U for Life {
+ // this should not warn
+ fn f() {}
+}
+
+// should not warn
+trait Iter: Iterator {
+ fn into_group_btreemap<K, V>(self) -> BTreeMap<K, Vec<V>>
+ where
+ Self: Iterator<Item = (K, V)> + Sized,
+ K: Ord + Eq,
+ {
+ unimplemented!();
+ }
+}
+
+struct Foo;
+
+trait FooIter: Iterator<Item = Foo> {
+ fn bar()
+ where
+ Self: Iterator<Item = Foo>,
+ {
+ }
+}
+
+// This should not lint
+fn impl_trait(_: impl AsRef<str>, _: impl AsRef<str>) {}
+
+mod repeated_where_clauses_or_trait_bounds {
+ fn bad_foo<T: Clone + Clone + Clone + Copy, U: Clone + Copy>(arg0: T, argo1: U) {
+ unimplemented!();
+ }
+
+ fn bad_bar<T, U>(arg0: T, arg1: U)
+ where
+ T: Clone + Clone + Clone + Copy,
+ U: Clone + Copy,
+ {
+ unimplemented!();
+ }
+
+ fn good_bar<T: Clone + Copy, U: Clone + Copy>(arg0: T, arg1: U) {
+ unimplemented!();
+ }
+
+ fn good_foo<T, U>(arg0: T, arg1: U)
+ where
+ T: Clone + Copy,
+ U: Clone + Copy,
+ {
+ unimplemented!();
+ }
+
+ trait GoodSelfTraitBound: Clone + Copy {
+ fn f();
+ }
+
+ trait GoodSelfWhereClause {
+ fn f()
+ where
+ Self: Clone + Copy;
+ }
+
+ trait BadSelfTraitBound: Clone + Clone + Clone {
+ fn f();
+ }
+
+ trait BadSelfWhereClause {
+ fn f()
+ where
+ Self: Clone + Clone + Clone;
+ }
+
+ trait GoodTraitBound<T: Clone + Copy, U: Clone + Copy> {
+ fn f();
+ }
+
+ trait GoodWhereClause<T, U> {
+ fn f()
+ where
+ T: Clone + Copy,
+ U: Clone + Copy;
+ }
+
+ trait BadTraitBound<T: Clone + Clone + Clone + Copy, U: Clone + Copy> {
+ fn f();
+ }
+
+ trait BadWhereClause<T, U> {
+ fn f()
+ where
+ T: Clone + Clone + Clone + Copy,
+ U: Clone + Copy;
+ }
+
+ struct GoodStructBound<T: Clone + Copy, U: Clone + Copy> {
+ t: T,
+ u: U,
+ }
+
+ impl<T: Clone + Copy, U: Clone + Copy> GoodTraitBound<T, U> for GoodStructBound<T, U> {
+ // this should not warn
+ fn f() {}
+ }
+
+ struct GoodStructWhereClause;
+
+ impl<T, U> GoodTraitBound<T, U> for GoodStructWhereClause
+ where
+ T: Clone + Copy,
+ U: Clone + Copy,
+ {
+ // this should not warn
+ fn f() {}
+ }
+
+ fn no_error_separate_arg_bounds(program: impl AsRef<()>, dir: impl AsRef<()>, args: &[impl AsRef<()>]) {}
+
+ trait GenericTrait<T> {}
+
+ // This should not warn but currently does see #8757
+ fn good_generic<T: GenericTrait<u64> + GenericTrait<u32>>(arg0: T) {
+ unimplemented!();
+ }
+
+ fn bad_generic<T: GenericTrait<u64> + GenericTrait<u32> + GenericTrait<u64>>(arg0: T) {
+ unimplemented!();
+ }
+
+ mod foo {
+ pub trait Clone {}
+ }
+
+ fn qualified_path<T: std::clone::Clone + Clone + foo::Clone>(arg0: T) {
+ unimplemented!();
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/trait_duplication_in_bounds.stderr b/src/tools/clippy/tests/ui/trait_duplication_in_bounds.stderr
new file mode 100644
index 000000000..7ef04e527
--- /dev/null
+++ b/src/tools/clippy/tests/ui/trait_duplication_in_bounds.stderr
@@ -0,0 +1,167 @@
+error: this trait bound is already specified in the where clause
+ --> $DIR/trait_duplication_in_bounds.rs:7:15
+ |
+LL | fn bad_foo<T: Clone + Default, Z: Copy>(arg0: T, arg1: Z)
+ | ^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/trait_duplication_in_bounds.rs:1:9
+ |
+LL | #![deny(clippy::trait_duplication_in_bounds)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = help: consider removing this trait bound
+
+error: this trait bound is already specified in the where clause
+ --> $DIR/trait_duplication_in_bounds.rs:7:23
+ |
+LL | fn bad_foo<T: Clone + Default, Z: Copy>(arg0: T, arg1: Z)
+ | ^^^^^^^
+ |
+ = help: consider removing this trait bound
+
+error: this trait bound is already specified in trait declaration
+ --> $DIR/trait_duplication_in_bounds.rs:36:15
+ |
+LL | Self: Default;
+ | ^^^^^^^
+ |
+ = help: consider removing this trait bound
+
+error: this trait bound is already specified in trait declaration
+ --> $DIR/trait_duplication_in_bounds.rs:50:15
+ |
+LL | Self: Default + Clone;
+ | ^^^^^^^
+ |
+ = help: consider removing this trait bound
+
+error: this trait bound is already specified in trait declaration
+ --> $DIR/trait_duplication_in_bounds.rs:56:15
+ |
+LL | Self: Default + Clone;
+ | ^^^^^^^
+ |
+ = help: consider removing this trait bound
+
+error: this trait bound is already specified in trait declaration
+ --> $DIR/trait_duplication_in_bounds.rs:56:25
+ |
+LL | Self: Default + Clone;
+ | ^^^^^
+ |
+ = help: consider removing this trait bound
+
+error: this trait bound is already specified in trait declaration
+ --> $DIR/trait_duplication_in_bounds.rs:59:15
+ |
+LL | Self: Default;
+ | ^^^^^^^
+ |
+ = help: consider removing this trait bound
+
+error: this trait bound is already specified in trait declaration
+ --> $DIR/trait_duplication_in_bounds.rs:94:15
+ |
+LL | Self: Iterator<Item = Foo>,
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider removing this trait bound
+
+error: this trait bound is already specified in the where clause
+ --> $DIR/trait_duplication_in_bounds.rs:103:19
+ |
+LL | fn bad_foo<T: Clone + Clone + Clone + Copy, U: Clone + Copy>(arg0: T, argo1: U) {
+ | ^^^^^
+ |
+ = help: consider removing this trait bound
+
+error: these bounds contain repeated elements
+ --> $DIR/trait_duplication_in_bounds.rs:103:19
+ |
+LL | fn bad_foo<T: Clone + Clone + Clone + Copy, U: Clone + Copy>(arg0: T, argo1: U) {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Clone + Copy`
+
+error: this trait bound is already specified in the where clause
+ --> $DIR/trait_duplication_in_bounds.rs:109:12
+ |
+LL | T: Clone + Clone + Clone + Copy,
+ | ^^^^^
+ |
+ = help: consider removing this trait bound
+
+error: these where clauses contain repeated elements
+ --> $DIR/trait_duplication_in_bounds.rs:109:12
+ |
+LL | T: Clone + Clone + Clone + Copy,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Clone + Copy`
+
+error: these bounds contain repeated elements
+ --> $DIR/trait_duplication_in_bounds.rs:137:30
+ |
+LL | trait BadSelfTraitBound: Clone + Clone + Clone {
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try: `Clone`
+
+error: these where clauses contain repeated elements
+ --> $DIR/trait_duplication_in_bounds.rs:144:19
+ |
+LL | Self: Clone + Clone + Clone;
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try: `Clone`
+
+error: this trait bound is already specified in the where clause
+ --> $DIR/trait_duplication_in_bounds.rs:158:28
+ |
+LL | trait BadTraitBound<T: Clone + Clone + Clone + Copy, U: Clone + Copy> {
+ | ^^^^^
+ |
+ = help: consider removing this trait bound
+
+error: these bounds contain repeated elements
+ --> $DIR/trait_duplication_in_bounds.rs:158:28
+ |
+LL | trait BadTraitBound<T: Clone + Clone + Clone + Copy, U: Clone + Copy> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Clone + Copy`
+
+error: these where clauses contain repeated elements
+ --> $DIR/trait_duplication_in_bounds.rs:165:16
+ |
+LL | T: Clone + Clone + Clone + Copy,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Clone + Copy`
+
+error: this trait bound is already specified in the where clause
+ --> $DIR/trait_duplication_in_bounds.rs:195:24
+ |
+LL | fn good_generic<T: GenericTrait<u64> + GenericTrait<u32>>(arg0: T) {
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = help: consider removing this trait bound
+
+error: this trait bound is already specified in the where clause
+ --> $DIR/trait_duplication_in_bounds.rs:199:23
+ |
+LL | fn bad_generic<T: GenericTrait<u64> + GenericTrait<u32> + GenericTrait<u64>>(arg0: T) {
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = help: consider removing this trait bound
+
+error: these bounds contain repeated elements
+ --> $DIR/trait_duplication_in_bounds.rs:199:23
+ |
+LL | fn bad_generic<T: GenericTrait<u64> + GenericTrait<u32> + GenericTrait<u64>>(arg0: T) {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `GenericTrait<u32> + GenericTrait<u64>`
+
+error: this trait bound is already specified in the where clause
+ --> $DIR/trait_duplication_in_bounds.rs:207:26
+ |
+LL | fn qualified_path<T: std::clone::Clone + Clone + foo::Clone>(arg0: T) {
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = help: consider removing this trait bound
+
+error: these bounds contain repeated elements
+ --> $DIR/trait_duplication_in_bounds.rs:207:26
+ |
+LL | fn qualified_path<T: std::clone::Clone + Clone + foo::Clone>(arg0: T) {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Clone + foo::Clone`
+
+error: aborting due to 22 previous errors
+
diff --git a/src/tools/clippy/tests/ui/transmute.rs b/src/tools/clippy/tests/ui/transmute.rs
new file mode 100644
index 000000000..001c91023
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmute.rs
@@ -0,0 +1,162 @@
+#![allow(dead_code, clippy::borrow_as_ptr)]
+
+extern crate core;
+
+use std::mem::transmute as my_transmute;
+use std::vec::Vec as MyVec;
+
+fn my_int() -> Usize {
+ Usize(42)
+}
+
+fn my_vec() -> MyVec<i32> {
+ vec![]
+}
+
+#[allow(clippy::needless_lifetimes, clippy::transmute_ptr_to_ptr)]
+#[warn(clippy::useless_transmute)]
+unsafe fn _generic<'a, T, U: 'a>(t: &'a T) {
+ // FIXME: should lint
+ // let _: &'a T = core::intrinsics::transmute(t);
+
+ let _: &'a U = core::intrinsics::transmute(t);
+
+ let _: *const T = core::intrinsics::transmute(t);
+
+ let _: *mut T = core::intrinsics::transmute(t);
+
+ let _: *const U = core::intrinsics::transmute(t);
+}
+
+#[warn(clippy::useless_transmute)]
+fn useless() {
+ unsafe {
+ let _: Vec<i32> = core::intrinsics::transmute(my_vec());
+
+ let _: Vec<i32> = core::mem::transmute(my_vec());
+
+ let _: Vec<i32> = std::intrinsics::transmute(my_vec());
+
+ let _: Vec<i32> = std::mem::transmute(my_vec());
+
+ let _: Vec<i32> = my_transmute(my_vec());
+
+ let _: *const usize = std::mem::transmute(5_isize);
+
+ let _ = 5_isize as *const usize;
+
+ let _: *const usize = std::mem::transmute(1 + 1usize);
+
+ let _ = (1 + 1_usize) as *const usize;
+ }
+
+ unsafe fn _f<'a, 'b>(x: &'a u32) -> &'b u32 {
+ std::mem::transmute(x)
+ }
+
+ unsafe fn _f2<'a, 'b>(x: *const (dyn Iterator<Item = u32> + 'a)) -> *const (dyn Iterator<Item = u32> + 'b) {
+ std::mem::transmute(x)
+ }
+
+ unsafe fn _f3<'a, 'b>(x: fn(&'a u32)) -> fn(&'b u32) {
+ std::mem::transmute(x)
+ }
+
+ unsafe fn _f4<'a, 'b>(x: std::borrow::Cow<'a, str>) -> std::borrow::Cow<'b, str> {
+ std::mem::transmute(x)
+ }
+}
+
+struct Usize(usize);
+
+#[warn(clippy::crosspointer_transmute)]
+fn crosspointer() {
+ let mut int: Usize = Usize(0);
+ let int_const_ptr: *const Usize = &int as *const Usize;
+ let int_mut_ptr: *mut Usize = &mut int as *mut Usize;
+
+ unsafe {
+ let _: Usize = core::intrinsics::transmute(int_const_ptr);
+
+ let _: Usize = core::intrinsics::transmute(int_mut_ptr);
+
+ let _: *const Usize = core::intrinsics::transmute(my_int());
+
+ let _: *mut Usize = core::intrinsics::transmute(my_int());
+ }
+}
+
+#[warn(clippy::transmute_int_to_char)]
+fn int_to_char() {
+ let _: char = unsafe { std::mem::transmute(0_u32) };
+ let _: char = unsafe { std::mem::transmute(0_i32) };
+
+ // These shouldn't warn
+ const _: char = unsafe { std::mem::transmute(0_u32) };
+ const _: char = unsafe { std::mem::transmute(0_i32) };
+}
+
+#[warn(clippy::transmute_int_to_bool)]
+fn int_to_bool() {
+ let _: bool = unsafe { std::mem::transmute(0_u8) };
+}
+
+#[warn(clippy::transmute_int_to_float)]
+mod int_to_float {
+ fn test() {
+ let _: f32 = unsafe { std::mem::transmute(0_u32) };
+ let _: f32 = unsafe { std::mem::transmute(0_i32) };
+ let _: f64 = unsafe { std::mem::transmute(0_u64) };
+ let _: f64 = unsafe { std::mem::transmute(0_i64) };
+ }
+
+ mod issue_5747 {
+ const VALUE32: f32 = unsafe { std::mem::transmute(0_u32) };
+ const VALUE64: f64 = unsafe { std::mem::transmute(0_i64) };
+
+ const fn from_bits_32(v: i32) -> f32 {
+ unsafe { std::mem::transmute(v) }
+ }
+
+ const fn from_bits_64(v: u64) -> f64 {
+ unsafe { std::mem::transmute(v) }
+ }
+ }
+}
+
+mod num_to_bytes {
+ fn test() {
+ unsafe {
+ let _: [u8; 1] = std::mem::transmute(0u8);
+ let _: [u8; 4] = std::mem::transmute(0u32);
+ let _: [u8; 16] = std::mem::transmute(0u128);
+ let _: [u8; 1] = std::mem::transmute(0i8);
+ let _: [u8; 4] = std::mem::transmute(0i32);
+ let _: [u8; 16] = std::mem::transmute(0i128);
+ let _: [u8; 4] = std::mem::transmute(0.0f32);
+ let _: [u8; 8] = std::mem::transmute(0.0f64);
+ }
+ }
+ const fn test_const() {
+ unsafe {
+ let _: [u8; 1] = std::mem::transmute(0u8);
+ let _: [u8; 4] = std::mem::transmute(0u32);
+ let _: [u8; 16] = std::mem::transmute(0u128);
+ let _: [u8; 1] = std::mem::transmute(0i8);
+ let _: [u8; 4] = std::mem::transmute(0i32);
+ let _: [u8; 16] = std::mem::transmute(0i128);
+ let _: [u8; 4] = std::mem::transmute(0.0f32);
+ let _: [u8; 8] = std::mem::transmute(0.0f64);
+ }
+ }
+}
+
+fn bytes_to_str(mb: &mut [u8]) {
+ const B: &[u8] = b"";
+
+ let _: &str = unsafe { std::mem::transmute(B) };
+ let _: &mut str = unsafe { std::mem::transmute(mb) };
+ const _: &str = unsafe { std::mem::transmute(B) };
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/transmute.stderr b/src/tools/clippy/tests/ui/transmute.stderr
new file mode 100644
index 000000000..008b4a981
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmute.stderr
@@ -0,0 +1,244 @@
+error: transmute from a reference to a pointer
+ --> $DIR/transmute.rs:24:23
+ |
+LL | let _: *const T = core::intrinsics::transmute(t);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `t as *const T`
+ |
+ = note: `-D clippy::useless-transmute` implied by `-D warnings`
+
+error: transmute from a reference to a pointer
+ --> $DIR/transmute.rs:26:21
+ |
+LL | let _: *mut T = core::intrinsics::transmute(t);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `t as *const T as *mut T`
+
+error: transmute from a reference to a pointer
+ --> $DIR/transmute.rs:28:23
+ |
+LL | let _: *const U = core::intrinsics::transmute(t);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `t as *const T as *const U`
+
+error: transmute from a type (`std::vec::Vec<i32>`) to itself
+ --> $DIR/transmute.rs:34:27
+ |
+LL | let _: Vec<i32> = core::intrinsics::transmute(my_vec());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from a type (`std::vec::Vec<i32>`) to itself
+ --> $DIR/transmute.rs:36:27
+ |
+LL | let _: Vec<i32> = core::mem::transmute(my_vec());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from a type (`std::vec::Vec<i32>`) to itself
+ --> $DIR/transmute.rs:38:27
+ |
+LL | let _: Vec<i32> = std::intrinsics::transmute(my_vec());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from a type (`std::vec::Vec<i32>`) to itself
+ --> $DIR/transmute.rs:40:27
+ |
+LL | let _: Vec<i32> = std::mem::transmute(my_vec());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from a type (`std::vec::Vec<i32>`) to itself
+ --> $DIR/transmute.rs:42:27
+ |
+LL | let _: Vec<i32> = my_transmute(my_vec());
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from an integer to a pointer
+ --> $DIR/transmute.rs:44:31
+ |
+LL | let _: *const usize = std::mem::transmute(5_isize);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `5_isize as *const usize`
+
+error: transmute from an integer to a pointer
+ --> $DIR/transmute.rs:48:31
+ |
+LL | let _: *const usize = std::mem::transmute(1 + 1usize);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(1 + 1usize) as *const usize`
+
+error: transmute from a type (`*const Usize`) to the type that it points to (`Usize`)
+ --> $DIR/transmute.rs:79:24
+ |
+LL | let _: Usize = core::intrinsics::transmute(int_const_ptr);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::crosspointer-transmute` implied by `-D warnings`
+
+error: transmute from a type (`*mut Usize`) to the type that it points to (`Usize`)
+ --> $DIR/transmute.rs:81:24
+ |
+LL | let _: Usize = core::intrinsics::transmute(int_mut_ptr);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from a type (`Usize`) to a pointer to that type (`*const Usize`)
+ --> $DIR/transmute.rs:83:31
+ |
+LL | let _: *const Usize = core::intrinsics::transmute(my_int());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from a type (`Usize`) to a pointer to that type (`*mut Usize`)
+ --> $DIR/transmute.rs:85:29
+ |
+LL | let _: *mut Usize = core::intrinsics::transmute(my_int());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from a `u32` to a `char`
+ --> $DIR/transmute.rs:91:28
+ |
+LL | let _: char = unsafe { std::mem::transmute(0_u32) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::char::from_u32(0_u32).unwrap()`
+ |
+ = note: `-D clippy::transmute-int-to-char` implied by `-D warnings`
+
+error: transmute from a `i32` to a `char`
+ --> $DIR/transmute.rs:92:28
+ |
+LL | let _: char = unsafe { std::mem::transmute(0_i32) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::char::from_u32(0_i32 as u32).unwrap()`
+
+error: transmute from a `u8` to a `bool`
+ --> $DIR/transmute.rs:101:28
+ |
+LL | let _: bool = unsafe { std::mem::transmute(0_u8) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `0_u8 != 0`
+ |
+ = note: `-D clippy::transmute-int-to-bool` implied by `-D warnings`
+
+error: transmute from a `u32` to a `f32`
+ --> $DIR/transmute.rs:107:31
+ |
+LL | let _: f32 = unsafe { std::mem::transmute(0_u32) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `f32::from_bits(0_u32)`
+ |
+ = note: `-D clippy::transmute-int-to-float` implied by `-D warnings`
+
+error: transmute from a `i32` to a `f32`
+ --> $DIR/transmute.rs:108:31
+ |
+LL | let _: f32 = unsafe { std::mem::transmute(0_i32) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `f32::from_bits(0_i32 as u32)`
+
+error: transmute from a `u64` to a `f64`
+ --> $DIR/transmute.rs:109:31
+ |
+LL | let _: f64 = unsafe { std::mem::transmute(0_u64) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `f64::from_bits(0_u64)`
+
+error: transmute from a `i64` to a `f64`
+ --> $DIR/transmute.rs:110:31
+ |
+LL | let _: f64 = unsafe { std::mem::transmute(0_i64) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `f64::from_bits(0_i64 as u64)`
+
+error: transmute from a `u8` to a `[u8; 1]`
+ --> $DIR/transmute.rs:130:30
+ |
+LL | let _: [u8; 1] = std::mem::transmute(0u8);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0u8.to_ne_bytes()`
+ |
+ = note: `-D clippy::transmute-num-to-bytes` implied by `-D warnings`
+
+error: transmute from a `u32` to a `[u8; 4]`
+ --> $DIR/transmute.rs:131:30
+ |
+LL | let _: [u8; 4] = std::mem::transmute(0u32);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0u32.to_ne_bytes()`
+
+error: transmute from a `u128` to a `[u8; 16]`
+ --> $DIR/transmute.rs:132:31
+ |
+LL | let _: [u8; 16] = std::mem::transmute(0u128);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0u128.to_ne_bytes()`
+
+error: transmute from a `i8` to a `[u8; 1]`
+ --> $DIR/transmute.rs:133:30
+ |
+LL | let _: [u8; 1] = std::mem::transmute(0i8);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0i8.to_ne_bytes()`
+
+error: transmute from a `i32` to a `[u8; 4]`
+ --> $DIR/transmute.rs:134:30
+ |
+LL | let _: [u8; 4] = std::mem::transmute(0i32);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0i32.to_ne_bytes()`
+
+error: transmute from a `i128` to a `[u8; 16]`
+ --> $DIR/transmute.rs:135:31
+ |
+LL | let _: [u8; 16] = std::mem::transmute(0i128);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0i128.to_ne_bytes()`
+
+error: transmute from a `f32` to a `[u8; 4]`
+ --> $DIR/transmute.rs:136:30
+ |
+LL | let _: [u8; 4] = std::mem::transmute(0.0f32);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0.0f32.to_ne_bytes()`
+
+error: transmute from a `f64` to a `[u8; 8]`
+ --> $DIR/transmute.rs:137:30
+ |
+LL | let _: [u8; 8] = std::mem::transmute(0.0f64);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0.0f64.to_ne_bytes()`
+
+error: transmute from a `u8` to a `[u8; 1]`
+ --> $DIR/transmute.rs:142:30
+ |
+LL | let _: [u8; 1] = std::mem::transmute(0u8);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0u8.to_ne_bytes()`
+
+error: transmute from a `u32` to a `[u8; 4]`
+ --> $DIR/transmute.rs:143:30
+ |
+LL | let _: [u8; 4] = std::mem::transmute(0u32);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0u32.to_ne_bytes()`
+
+error: transmute from a `u128` to a `[u8; 16]`
+ --> $DIR/transmute.rs:144:31
+ |
+LL | let _: [u8; 16] = std::mem::transmute(0u128);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0u128.to_ne_bytes()`
+
+error: transmute from a `i8` to a `[u8; 1]`
+ --> $DIR/transmute.rs:145:30
+ |
+LL | let _: [u8; 1] = std::mem::transmute(0i8);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0i8.to_ne_bytes()`
+
+error: transmute from a `i32` to a `[u8; 4]`
+ --> $DIR/transmute.rs:146:30
+ |
+LL | let _: [u8; 4] = std::mem::transmute(0i32);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0i32.to_ne_bytes()`
+
+error: transmute from a `i128` to a `[u8; 16]`
+ --> $DIR/transmute.rs:147:31
+ |
+LL | let _: [u8; 16] = std::mem::transmute(0i128);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0i128.to_ne_bytes()`
+
+error: transmute from a `&[u8]` to a `&str`
+ --> $DIR/transmute.rs:157:28
+ |
+LL | let _: &str = unsafe { std::mem::transmute(B) };
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::str::from_utf8(B).unwrap()`
+ |
+ = note: `-D clippy::transmute-bytes-to-str` implied by `-D warnings`
+
+error: transmute from a `&mut [u8]` to a `&mut str`
+ --> $DIR/transmute.rs:158:32
+ |
+LL | let _: &mut str = unsafe { std::mem::transmute(mb) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::str::from_utf8_mut(mb).unwrap()`
+
+error: transmute from a `&[u8]` to a `&str`
+ --> $DIR/transmute.rs:159:30
+ |
+LL | const _: &str = unsafe { std::mem::transmute(B) };
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::str::from_utf8_unchecked(B)`
+
+error: aborting due to 38 previous errors
+
diff --git a/src/tools/clippy/tests/ui/transmute_32bit.rs b/src/tools/clippy/tests/ui/transmute_32bit.rs
new file mode 100644
index 000000000..ffe22b12f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmute_32bit.rs
@@ -0,0 +1,14 @@
+// ignore-64bit
+
+#[warn(clippy::wrong_transmute)]
+fn main() {
+ unsafe {
+ let _: *const usize = std::mem::transmute(6.0f32);
+
+ let _: *mut usize = std::mem::transmute(6.0f32);
+
+ let _: *const usize = std::mem::transmute('x');
+
+ let _: *mut usize = std::mem::transmute('x');
+ }
+}
diff --git a/src/tools/clippy/tests/ui/transmute_32bit.stderr b/src/tools/clippy/tests/ui/transmute_32bit.stderr
new file mode 100644
index 000000000..040519564
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmute_32bit.stderr
@@ -0,0 +1,28 @@
+error: transmute from a `f32` to a pointer
+ --> $DIR/transmute_32bit.rs:6:31
+ |
+LL | let _: *const usize = std::mem::transmute(6.0f32);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::wrong-transmute` implied by `-D warnings`
+
+error: transmute from a `f32` to a pointer
+ --> $DIR/transmute_32bit.rs:8:29
+ |
+LL | let _: *mut usize = std::mem::transmute(6.0f32);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from a `char` to a pointer
+ --> $DIR/transmute_32bit.rs:10:31
+ |
+LL | let _: *const usize = std::mem::transmute('x');
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from a `char` to a pointer
+ --> $DIR/transmute_32bit.rs:12:29
+ |
+LL | let _: *mut usize = std::mem::transmute('x');
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/transmute_64bit.rs b/src/tools/clippy/tests/ui/transmute_64bit.rs
new file mode 100644
index 000000000..00dc0b2c3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmute_64bit.rs
@@ -0,0 +1,10 @@
+// ignore-32bit
+
+#[warn(clippy::wrong_transmute)]
+fn main() {
+ unsafe {
+ let _: *const usize = std::mem::transmute(6.0f64);
+
+ let _: *mut usize = std::mem::transmute(6.0f64);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/transmute_64bit.stderr b/src/tools/clippy/tests/ui/transmute_64bit.stderr
new file mode 100644
index 000000000..d1854c009
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmute_64bit.stderr
@@ -0,0 +1,16 @@
+error: transmute from a `f64` to a pointer
+ --> $DIR/transmute_64bit.rs:6:31
+ |
+LL | let _: *const usize = std::mem::transmute(6.0f64);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::wrong-transmute` implied by `-D warnings`
+
+error: transmute from a `f64` to a pointer
+ --> $DIR/transmute_64bit.rs:8:29
+ |
+LL | let _: *mut usize = std::mem::transmute(6.0f64);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/transmute_collection.rs b/src/tools/clippy/tests/ui/transmute_collection.rs
new file mode 100644
index 000000000..5a431bee0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmute_collection.rs
@@ -0,0 +1,50 @@
+#![warn(clippy::unsound_collection_transmute)]
+
+use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, VecDeque};
+use std::mem::{transmute, MaybeUninit};
+
+fn main() {
+ unsafe {
+ // wrong size
+ let _ = transmute::<_, Vec<u32>>(vec![0u8]);
+ // wrong layout
+ let _ = transmute::<_, Vec<[u8; 4]>>(vec![1234u32]);
+
+ // wrong size
+ let _ = transmute::<_, VecDeque<u32>>(VecDeque::<u8>::new());
+ // wrong layout
+ let _ = transmute::<_, VecDeque<u32>>(VecDeque::<[u8; 4]>::new());
+
+ // wrong size
+ let _ = transmute::<_, BinaryHeap<u32>>(BinaryHeap::<u8>::new());
+ // wrong layout
+ let _ = transmute::<_, BinaryHeap<u32>>(BinaryHeap::<[u8; 4]>::new());
+
+ // wrong size
+ let _ = transmute::<_, BTreeSet<u32>>(BTreeSet::<u8>::new());
+ // wrong layout
+ let _ = transmute::<_, BTreeSet<u32>>(BTreeSet::<[u8; 4]>::new());
+
+ // wrong size
+ let _ = transmute::<_, HashSet<u32>>(HashSet::<u8>::new());
+ // wrong layout
+ let _ = transmute::<_, HashSet<u32>>(HashSet::<[u8; 4]>::new());
+
+ // wrong size
+ let _ = transmute::<_, BTreeMap<u8, u32>>(BTreeMap::<u8, u8>::new());
+ let _ = transmute::<_, BTreeMap<u8, u32>>(BTreeMap::<u32, u32>::new());
+ // wrong layout
+ let _ = transmute::<_, BTreeMap<u8, u32>>(BTreeMap::<u8, [u8; 4]>::new());
+ let _ = transmute::<_, BTreeMap<u32, u32>>(BTreeMap::<[u8; 4], u32>::new());
+
+ // wrong size
+ let _ = transmute::<_, HashMap<u8, u32>>(HashMap::<u8, u8>::new());
+ let _ = transmute::<_, HashMap<u8, u32>>(HashMap::<u32, u32>::new());
+ // wrong layout
+ let _ = transmute::<_, HashMap<u8, u32>>(HashMap::<u8, [u8; 4]>::new());
+ let _ = transmute::<_, HashMap<u32, u32>>(HashMap::<[u8; 4], u32>::new());
+
+ let _ = transmute::<_, Vec<u8>>(Vec::<MaybeUninit<u8>>::new());
+ let _ = transmute::<_, Vec<*mut u32>>(Vec::<Box<u32>>::new());
+ }
+}
diff --git a/src/tools/clippy/tests/ui/transmute_collection.stderr b/src/tools/clippy/tests/ui/transmute_collection.stderr
new file mode 100644
index 000000000..ebc05c402
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmute_collection.stderr
@@ -0,0 +1,112 @@
+error: transmute from `std::vec::Vec<u8>` to `std::vec::Vec<u32>` with mismatched layout is unsound
+ --> $DIR/transmute_collection.rs:9:17
+ |
+LL | let _ = transmute::<_, Vec<u32>>(vec![0u8]);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::unsound-collection-transmute` implied by `-D warnings`
+
+error: transmute from `std::vec::Vec<u32>` to `std::vec::Vec<[u8; 4]>` with mismatched layout is unsound
+ --> $DIR/transmute_collection.rs:11:17
+ |
+LL | let _ = transmute::<_, Vec<[u8; 4]>>(vec![1234u32]);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from `std::collections::VecDeque<u8>` to `std::collections::VecDeque<u32>` with mismatched layout is unsound
+ --> $DIR/transmute_collection.rs:14:17
+ |
+LL | let _ = transmute::<_, VecDeque<u32>>(VecDeque::<u8>::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from `std::collections::VecDeque<[u8; 4]>` to `std::collections::VecDeque<u32>` with mismatched layout is unsound
+ --> $DIR/transmute_collection.rs:16:17
+ |
+LL | let _ = transmute::<_, VecDeque<u32>>(VecDeque::<[u8; 4]>::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from `std::collections::BinaryHeap<u8>` to `std::collections::BinaryHeap<u32>` with mismatched layout is unsound
+ --> $DIR/transmute_collection.rs:19:17
+ |
+LL | let _ = transmute::<_, BinaryHeap<u32>>(BinaryHeap::<u8>::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from `std::collections::BinaryHeap<[u8; 4]>` to `std::collections::BinaryHeap<u32>` with mismatched layout is unsound
+ --> $DIR/transmute_collection.rs:21:17
+ |
+LL | let _ = transmute::<_, BinaryHeap<u32>>(BinaryHeap::<[u8; 4]>::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from `std::collections::BTreeSet<u8>` to `std::collections::BTreeSet<u32>` with mismatched layout is unsound
+ --> $DIR/transmute_collection.rs:24:17
+ |
+LL | let _ = transmute::<_, BTreeSet<u32>>(BTreeSet::<u8>::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from `std::collections::BTreeSet<[u8; 4]>` to `std::collections::BTreeSet<u32>` with mismatched layout is unsound
+ --> $DIR/transmute_collection.rs:26:17
+ |
+LL | let _ = transmute::<_, BTreeSet<u32>>(BTreeSet::<[u8; 4]>::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from `std::collections::HashSet<u8>` to `std::collections::HashSet<u32>` with mismatched layout is unsound
+ --> $DIR/transmute_collection.rs:29:17
+ |
+LL | let _ = transmute::<_, HashSet<u32>>(HashSet::<u8>::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from `std::collections::HashSet<[u8; 4]>` to `std::collections::HashSet<u32>` with mismatched layout is unsound
+ --> $DIR/transmute_collection.rs:31:17
+ |
+LL | let _ = transmute::<_, HashSet<u32>>(HashSet::<[u8; 4]>::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from `std::collections::BTreeMap<u8, u8>` to `std::collections::BTreeMap<u8, u32>` with mismatched layout is unsound
+ --> $DIR/transmute_collection.rs:34:17
+ |
+LL | let _ = transmute::<_, BTreeMap<u8, u32>>(BTreeMap::<u8, u8>::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from `std::collections::BTreeMap<u32, u32>` to `std::collections::BTreeMap<u8, u32>` with mismatched layout is unsound
+ --> $DIR/transmute_collection.rs:35:17
+ |
+LL | let _ = transmute::<_, BTreeMap<u8, u32>>(BTreeMap::<u32, u32>::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from `std::collections::BTreeMap<u8, [u8; 4]>` to `std::collections::BTreeMap<u8, u32>` with mismatched layout is unsound
+ --> $DIR/transmute_collection.rs:37:17
+ |
+LL | let _ = transmute::<_, BTreeMap<u8, u32>>(BTreeMap::<u8, [u8; 4]>::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from `std::collections::BTreeMap<[u8; 4], u32>` to `std::collections::BTreeMap<u32, u32>` with mismatched layout is unsound
+ --> $DIR/transmute_collection.rs:38:17
+ |
+LL | let _ = transmute::<_, BTreeMap<u32, u32>>(BTreeMap::<[u8; 4], u32>::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from `std::collections::HashMap<u8, u8>` to `std::collections::HashMap<u8, u32>` with mismatched layout is unsound
+ --> $DIR/transmute_collection.rs:41:17
+ |
+LL | let _ = transmute::<_, HashMap<u8, u32>>(HashMap::<u8, u8>::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from `std::collections::HashMap<u32, u32>` to `std::collections::HashMap<u8, u32>` with mismatched layout is unsound
+ --> $DIR/transmute_collection.rs:42:17
+ |
+LL | let _ = transmute::<_, HashMap<u8, u32>>(HashMap::<u32, u32>::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from `std::collections::HashMap<u8, [u8; 4]>` to `std::collections::HashMap<u8, u32>` with mismatched layout is unsound
+ --> $DIR/transmute_collection.rs:44:17
+ |
+LL | let _ = transmute::<_, HashMap<u8, u32>>(HashMap::<u8, [u8; 4]>::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from `std::collections::HashMap<[u8; 4], u32>` to `std::collections::HashMap<u32, u32>` with mismatched layout is unsound
+ --> $DIR/transmute_collection.rs:45:17
+ |
+LL | let _ = transmute::<_, HashMap<u32, u32>>(HashMap::<[u8; 4], u32>::new());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 18 previous errors
+
diff --git a/src/tools/clippy/tests/ui/transmute_float_to_int.rs b/src/tools/clippy/tests/ui/transmute_float_to_int.rs
new file mode 100644
index 000000000..806b2d77d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmute_float_to_int.rs
@@ -0,0 +1,25 @@
+#![warn(clippy::transmute_float_to_int)]
+
+fn float_to_int() {
+ let _: u32 = unsafe { std::mem::transmute(1f32) };
+ let _: i32 = unsafe { std::mem::transmute(1f32) };
+ let _: u64 = unsafe { std::mem::transmute(1f64) };
+ let _: i64 = unsafe { std::mem::transmute(1f64) };
+ let _: u64 = unsafe { std::mem::transmute(1.0) };
+ let _: u64 = unsafe { std::mem::transmute(-1.0) };
+}
+
+mod issue_5747 {
+ const VALUE32: i32 = unsafe { std::mem::transmute(1f32) };
+ const VALUE64: u64 = unsafe { std::mem::transmute(1f64) };
+
+ const fn to_bits_32(v: f32) -> u32 {
+ unsafe { std::mem::transmute(v) }
+ }
+
+ const fn to_bits_64(v: f64) -> i64 {
+ unsafe { std::mem::transmute(v) }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/transmute_float_to_int.stderr b/src/tools/clippy/tests/ui/transmute_float_to_int.stderr
new file mode 100644
index 000000000..eb786bb39
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmute_float_to_int.stderr
@@ -0,0 +1,40 @@
+error: transmute from a `f32` to a `u32`
+ --> $DIR/transmute_float_to_int.rs:4:27
+ |
+LL | let _: u32 = unsafe { std::mem::transmute(1f32) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `1f32.to_bits()`
+ |
+ = note: `-D clippy::transmute-float-to-int` implied by `-D warnings`
+
+error: transmute from a `f32` to a `i32`
+ --> $DIR/transmute_float_to_int.rs:5:27
+ |
+LL | let _: i32 = unsafe { std::mem::transmute(1f32) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `1f32.to_bits() as i32`
+
+error: transmute from a `f64` to a `u64`
+ --> $DIR/transmute_float_to_int.rs:6:27
+ |
+LL | let _: u64 = unsafe { std::mem::transmute(1f64) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `1f64.to_bits()`
+
+error: transmute from a `f64` to a `i64`
+ --> $DIR/transmute_float_to_int.rs:7:27
+ |
+LL | let _: i64 = unsafe { std::mem::transmute(1f64) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `1f64.to_bits() as i64`
+
+error: transmute from a `f64` to a `u64`
+ --> $DIR/transmute_float_to_int.rs:8:27
+ |
+LL | let _: u64 = unsafe { std::mem::transmute(1.0) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `1.0f64.to_bits()`
+
+error: transmute from a `f64` to a `u64`
+ --> $DIR/transmute_float_to_int.rs:9:27
+ |
+LL | let _: u64 = unsafe { std::mem::transmute(-1.0) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `(-1.0f64).to_bits()`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.rs b/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.rs
new file mode 100644
index 000000000..f06ffab5d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.rs
@@ -0,0 +1,63 @@
+#![warn(clippy::transmute_ptr_to_ptr)]
+#![allow(clippy::borrow_as_ptr)]
+
+// Make sure we can modify lifetimes, which is one of the recommended uses
+// of transmute
+
+// Make sure we can do static lifetime transmutes
+unsafe fn transmute_lifetime_to_static<'a, T>(t: &'a T) -> &'static T {
+ std::mem::transmute::<&'a T, &'static T>(t)
+}
+
+// Make sure we can do non-static lifetime transmutes
+unsafe fn transmute_lifetime<'a, 'b, T>(t: &'a T, u: &'b T) -> &'b T {
+ std::mem::transmute::<&'a T, &'b T>(t)
+}
+
+struct LifetimeParam<'a> {
+ s: &'a str,
+}
+
+struct GenericParam<T> {
+ t: T,
+}
+
+fn transmute_ptr_to_ptr() {
+ let ptr = &1u32 as *const u32;
+ let mut_ptr = &mut 1u32 as *mut u32;
+ unsafe {
+ // pointer-to-pointer transmutes; bad
+ let _: *const f32 = std::mem::transmute(ptr);
+ let _: *mut f32 = std::mem::transmute(mut_ptr);
+ // ref-ref transmutes; bad
+ let _: &f32 = std::mem::transmute(&1u32);
+ let _: &f64 = std::mem::transmute(&1f32);
+ // ^ this test is here because both f32 and f64 are the same TypeVariant, but they are not
+ // the same type
+ let _: &mut f32 = std::mem::transmute(&mut 1u32);
+ let _: &GenericParam<f32> = std::mem::transmute(&GenericParam { t: 1u32 });
+ }
+
+ // these are recommendations for solving the above; if these lint we need to update
+ // those suggestions
+ let _ = ptr as *const f32;
+ let _ = mut_ptr as *mut f32;
+ let _ = unsafe { &*(&1u32 as *const u32 as *const f32) };
+ let _ = unsafe { &mut *(&mut 1u32 as *mut u32 as *mut f32) };
+
+ // transmute internal lifetimes, should not lint
+ let s = "hello world".to_owned();
+ let lp = LifetimeParam { s: &s };
+ let _: &LifetimeParam<'static> = unsafe { std::mem::transmute(&lp) };
+ let _: &GenericParam<&LifetimeParam<'static>> = unsafe { std::mem::transmute(&GenericParam { t: &lp }) };
+}
+
+// dereferencing raw pointers in const contexts, should not lint as it's unstable (issue 5959)
+const _: &() = {
+ struct Zst;
+ let zst = &Zst;
+
+ unsafe { std::mem::transmute::<&'static Zst, &'static ()>(zst) }
+};
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.stderr b/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.stderr
new file mode 100644
index 000000000..49a8a3347
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.stderr
@@ -0,0 +1,40 @@
+error: transmute from a pointer to a pointer
+ --> $DIR/transmute_ptr_to_ptr.rs:30:29
+ |
+LL | let _: *const f32 = std::mem::transmute(ptr);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `ptr as *const f32`
+ |
+ = note: `-D clippy::transmute-ptr-to-ptr` implied by `-D warnings`
+
+error: transmute from a pointer to a pointer
+ --> $DIR/transmute_ptr_to_ptr.rs:31:27
+ |
+LL | let _: *mut f32 = std::mem::transmute(mut_ptr);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `mut_ptr as *mut f32`
+
+error: transmute from a reference to a reference
+ --> $DIR/transmute_ptr_to_ptr.rs:33:23
+ |
+LL | let _: &f32 = std::mem::transmute(&1u32);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(&1u32 as *const u32 as *const f32)`
+
+error: transmute from a reference to a reference
+ --> $DIR/transmute_ptr_to_ptr.rs:34:23
+ |
+LL | let _: &f64 = std::mem::transmute(&1f32);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(&1f32 as *const f32 as *const f64)`
+
+error: transmute from a reference to a reference
+ --> $DIR/transmute_ptr_to_ptr.rs:37:27
+ |
+LL | let _: &mut f32 = std::mem::transmute(&mut 1u32);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&mut *(&mut 1u32 as *mut u32 as *mut f32)`
+
+error: transmute from a reference to a reference
+ --> $DIR/transmute_ptr_to_ptr.rs:38:37
+ |
+LL | let _: &GenericParam<f32> = std::mem::transmute(&GenericParam { t: 1u32 });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(&GenericParam { t: 1u32 } as *const GenericParam<u32> as *const GenericParam<f32>)`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/transmute_ptr_to_ref.fixed b/src/tools/clippy/tests/ui/transmute_ptr_to_ref.fixed
new file mode 100644
index 000000000..e5fe9133f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmute_ptr_to_ref.fixed
@@ -0,0 +1,78 @@
+// run-rustfix
+
+#![feature(custom_inner_attributes)]
+#![warn(clippy::transmute_ptr_to_ref)]
+#![allow(clippy::match_single_binding)]
+
+unsafe fn _ptr_to_ref<T, U>(p: *const T, m: *mut T, o: *const U, om: *mut U) {
+ let _: &T = &*p;
+ let _: &T = &*p;
+
+ let _: &mut T = &mut *m;
+ let _: &mut T = &mut *m;
+
+ let _: &T = &*m;
+ let _: &T = &*m;
+
+ let _: &mut T = &mut *(p as *mut T);
+ let _ = &mut *(p as *mut T);
+
+ let _: &T = &*(o as *const T);
+ let _: &T = &*(o as *const T);
+
+ let _: &mut T = &mut *(om as *mut T);
+ let _: &mut T = &mut *(om as *mut T);
+
+ let _: &T = &*(om as *const T);
+ let _: &T = &*(om as *const T);
+}
+
+fn _issue1231() {
+ struct Foo<'a, T> {
+ bar: &'a T,
+ }
+
+ let raw = 42 as *const i32;
+ let _: &Foo<u8> = unsafe { &*raw.cast::<Foo<_>>() };
+
+ let _: &Foo<&u8> = unsafe { &*raw.cast::<Foo<&_>>() };
+
+ type Bar<'a> = &'a u8;
+ let raw = 42 as *const i32;
+ unsafe { &*(raw as *const u8) };
+}
+
+unsafe fn _issue8924<'a, 'b, 'c>(x: *const &'a u32, y: *const &'b u32) -> &'c &'b u32 {
+ match 0 {
+ 0 => &*x.cast::<&u32>(),
+ 1 => &*y.cast::<&u32>(),
+ 2 => &*x.cast::<&'b u32>(),
+ _ => &*y.cast::<&'b u32>(),
+ }
+}
+
+unsafe fn _meets_msrv<'a, 'b, 'c>(x: *const &'a u32) -> &'c &'b u32 {
+ #![clippy::msrv = "1.38"]
+ let a = 0u32;
+ let a = &a as *const u32;
+ let _: &u32 = &*a;
+ let _: &u32 = &*a.cast::<u32>();
+ match 0 {
+ 0 => &*x.cast::<&u32>(),
+ _ => &*x.cast::<&'b u32>(),
+ }
+}
+
+unsafe fn _under_msrv<'a, 'b, 'c>(x: *const &'a u32) -> &'c &'b u32 {
+ #![clippy::msrv = "1.37"]
+ let a = 0u32;
+ let a = &a as *const u32;
+ let _: &u32 = &*a;
+ let _: &u32 = &*(a as *const u32);
+ match 0 {
+ 0 => &*(x as *const () as *const &u32),
+ _ => &*(x as *const () as *const &'b u32),
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/transmute_ptr_to_ref.rs b/src/tools/clippy/tests/ui/transmute_ptr_to_ref.rs
new file mode 100644
index 000000000..fe49cdc32
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmute_ptr_to_ref.rs
@@ -0,0 +1,78 @@
+// run-rustfix
+
+#![feature(custom_inner_attributes)]
+#![warn(clippy::transmute_ptr_to_ref)]
+#![allow(clippy::match_single_binding)]
+
+unsafe fn _ptr_to_ref<T, U>(p: *const T, m: *mut T, o: *const U, om: *mut U) {
+ let _: &T = std::mem::transmute(p);
+ let _: &T = &*p;
+
+ let _: &mut T = std::mem::transmute(m);
+ let _: &mut T = &mut *m;
+
+ let _: &T = std::mem::transmute(m);
+ let _: &T = &*m;
+
+ let _: &mut T = std::mem::transmute(p as *mut T);
+ let _ = &mut *(p as *mut T);
+
+ let _: &T = std::mem::transmute(o);
+ let _: &T = &*(o as *const T);
+
+ let _: &mut T = std::mem::transmute(om);
+ let _: &mut T = &mut *(om as *mut T);
+
+ let _: &T = std::mem::transmute(om);
+ let _: &T = &*(om as *const T);
+}
+
+fn _issue1231() {
+ struct Foo<'a, T> {
+ bar: &'a T,
+ }
+
+ let raw = 42 as *const i32;
+ let _: &Foo<u8> = unsafe { std::mem::transmute::<_, &Foo<_>>(raw) };
+
+ let _: &Foo<&u8> = unsafe { std::mem::transmute::<_, &Foo<&_>>(raw) };
+
+ type Bar<'a> = &'a u8;
+ let raw = 42 as *const i32;
+ unsafe { std::mem::transmute::<_, Bar>(raw) };
+}
+
+unsafe fn _issue8924<'a, 'b, 'c>(x: *const &'a u32, y: *const &'b u32) -> &'c &'b u32 {
+ match 0 {
+ 0 => std::mem::transmute(x),
+ 1 => std::mem::transmute(y),
+ 2 => std::mem::transmute::<_, &&'b u32>(x),
+ _ => std::mem::transmute::<_, &&'b u32>(y),
+ }
+}
+
+unsafe fn _meets_msrv<'a, 'b, 'c>(x: *const &'a u32) -> &'c &'b u32 {
+ #![clippy::msrv = "1.38"]
+ let a = 0u32;
+ let a = &a as *const u32;
+ let _: &u32 = std::mem::transmute(a);
+ let _: &u32 = std::mem::transmute::<_, &u32>(a);
+ match 0 {
+ 0 => std::mem::transmute(x),
+ _ => std::mem::transmute::<_, &&'b u32>(x),
+ }
+}
+
+unsafe fn _under_msrv<'a, 'b, 'c>(x: *const &'a u32) -> &'c &'b u32 {
+ #![clippy::msrv = "1.37"]
+ let a = 0u32;
+ let a = &a as *const u32;
+ let _: &u32 = std::mem::transmute(a);
+ let _: &u32 = std::mem::transmute::<_, &u32>(a);
+ match 0 {
+ 0 => std::mem::transmute(x),
+ _ => std::mem::transmute::<_, &&'b u32>(x),
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/transmute_ptr_to_ref.stderr b/src/tools/clippy/tests/ui/transmute_ptr_to_ref.stderr
new file mode 100644
index 000000000..2993e5e7b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmute_ptr_to_ref.stderr
@@ -0,0 +1,136 @@
+error: transmute from a pointer type (`*const T`) to a reference type (`&T`)
+ --> $DIR/transmute_ptr_to_ref.rs:8:17
+ |
+LL | let _: &T = std::mem::transmute(p);
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*p`
+ |
+ = note: `-D clippy::transmute-ptr-to-ref` implied by `-D warnings`
+
+error: transmute from a pointer type (`*mut T`) to a reference type (`&mut T`)
+ --> $DIR/transmute_ptr_to_ref.rs:11:21
+ |
+LL | let _: &mut T = std::mem::transmute(m);
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&mut *m`
+
+error: transmute from a pointer type (`*mut T`) to a reference type (`&T`)
+ --> $DIR/transmute_ptr_to_ref.rs:14:17
+ |
+LL | let _: &T = std::mem::transmute(m);
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*m`
+
+error: transmute from a pointer type (`*mut T`) to a reference type (`&mut T`)
+ --> $DIR/transmute_ptr_to_ref.rs:17:21
+ |
+LL | let _: &mut T = std::mem::transmute(p as *mut T);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&mut *(p as *mut T)`
+
+error: transmute from a pointer type (`*const U`) to a reference type (`&T`)
+ --> $DIR/transmute_ptr_to_ref.rs:20:17
+ |
+LL | let _: &T = std::mem::transmute(o);
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(o as *const T)`
+
+error: transmute from a pointer type (`*mut U`) to a reference type (`&mut T`)
+ --> $DIR/transmute_ptr_to_ref.rs:23:21
+ |
+LL | let _: &mut T = std::mem::transmute(om);
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&mut *(om as *mut T)`
+
+error: transmute from a pointer type (`*mut U`) to a reference type (`&T`)
+ --> $DIR/transmute_ptr_to_ref.rs:26:17
+ |
+LL | let _: &T = std::mem::transmute(om);
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(om as *const T)`
+
+error: transmute from a pointer type (`*const i32`) to a reference type (`&_issue1231::Foo<u8>`)
+ --> $DIR/transmute_ptr_to_ref.rs:36:32
+ |
+LL | let _: &Foo<u8> = unsafe { std::mem::transmute::<_, &Foo<_>>(raw) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*raw.cast::<Foo<_>>()`
+
+error: transmute from a pointer type (`*const i32`) to a reference type (`&_issue1231::Foo<&u8>`)
+ --> $DIR/transmute_ptr_to_ref.rs:38:33
+ |
+LL | let _: &Foo<&u8> = unsafe { std::mem::transmute::<_, &Foo<&_>>(raw) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*raw.cast::<Foo<&_>>()`
+
+error: transmute from a pointer type (`*const i32`) to a reference type (`&u8`)
+ --> $DIR/transmute_ptr_to_ref.rs:42:14
+ |
+LL | unsafe { std::mem::transmute::<_, Bar>(raw) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(raw as *const u8)`
+
+error: transmute from a pointer type (`*const &u32`) to a reference type (`&&u32`)
+ --> $DIR/transmute_ptr_to_ref.rs:47:14
+ |
+LL | 0 => std::mem::transmute(x),
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*x.cast::<&u32>()`
+
+error: transmute from a pointer type (`*const &u32`) to a reference type (`&&u32`)
+ --> $DIR/transmute_ptr_to_ref.rs:48:14
+ |
+LL | 1 => std::mem::transmute(y),
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*y.cast::<&u32>()`
+
+error: transmute from a pointer type (`*const &u32`) to a reference type (`&&u32`)
+ --> $DIR/transmute_ptr_to_ref.rs:49:14
+ |
+LL | 2 => std::mem::transmute::<_, &&'b u32>(x),
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*x.cast::<&'b u32>()`
+
+error: transmute from a pointer type (`*const &u32`) to a reference type (`&&u32`)
+ --> $DIR/transmute_ptr_to_ref.rs:50:14
+ |
+LL | _ => std::mem::transmute::<_, &&'b u32>(y),
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*y.cast::<&'b u32>()`
+
+error: transmute from a pointer type (`*const u32`) to a reference type (`&u32`)
+ --> $DIR/transmute_ptr_to_ref.rs:58:19
+ |
+LL | let _: &u32 = std::mem::transmute(a);
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*a`
+
+error: transmute from a pointer type (`*const u32`) to a reference type (`&u32`)
+ --> $DIR/transmute_ptr_to_ref.rs:59:19
+ |
+LL | let _: &u32 = std::mem::transmute::<_, &u32>(a);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*a.cast::<u32>()`
+
+error: transmute from a pointer type (`*const &u32`) to a reference type (`&&u32`)
+ --> $DIR/transmute_ptr_to_ref.rs:61:14
+ |
+LL | 0 => std::mem::transmute(x),
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*x.cast::<&u32>()`
+
+error: transmute from a pointer type (`*const &u32`) to a reference type (`&&u32`)
+ --> $DIR/transmute_ptr_to_ref.rs:62:14
+ |
+LL | _ => std::mem::transmute::<_, &&'b u32>(x),
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*x.cast::<&'b u32>()`
+
+error: transmute from a pointer type (`*const u32`) to a reference type (`&u32`)
+ --> $DIR/transmute_ptr_to_ref.rs:70:19
+ |
+LL | let _: &u32 = std::mem::transmute(a);
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*a`
+
+error: transmute from a pointer type (`*const u32`) to a reference type (`&u32`)
+ --> $DIR/transmute_ptr_to_ref.rs:71:19
+ |
+LL | let _: &u32 = std::mem::transmute::<_, &u32>(a);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(a as *const u32)`
+
+error: transmute from a pointer type (`*const &u32`) to a reference type (`&&u32`)
+ --> $DIR/transmute_ptr_to_ref.rs:73:14
+ |
+LL | 0 => std::mem::transmute(x),
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(x as *const () as *const &u32)`
+
+error: transmute from a pointer type (`*const &u32`) to a reference type (`&&u32`)
+ --> $DIR/transmute_ptr_to_ref.rs:74:14
+ |
+LL | _ => std::mem::transmute::<_, &&'b u32>(x),
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(x as *const () as *const &'b u32)`
+
+error: aborting due to 22 previous errors
+
diff --git a/src/tools/clippy/tests/ui/transmute_undefined_repr.rs b/src/tools/clippy/tests/ui/transmute_undefined_repr.rs
new file mode 100644
index 000000000..ebcaa7a84
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmute_undefined_repr.rs
@@ -0,0 +1,144 @@
+#![warn(clippy::transmute_undefined_repr)]
+#![allow(clippy::unit_arg, clippy::transmute_ptr_to_ref, clippy::useless_transmute)]
+
+use core::any::TypeId;
+use core::ffi::c_void;
+use core::mem::{size_of, transmute, MaybeUninit};
+
+fn value<T>() -> T {
+ unimplemented!()
+}
+
+struct Empty;
+struct Ty<T>(T);
+struct Ty2<T, U>(T, U);
+
+#[repr(C)]
+struct Ty2C<T, U>(T, U);
+
+fn main() {
+ unsafe {
+ let _: () = transmute(value::<Empty>());
+ let _: Empty = transmute(value::<()>());
+
+ let _: Ty<u32> = transmute(value::<u32>());
+ let _: Ty<u32> = transmute(value::<u32>());
+
+ let _: Ty2C<u32, i32> = transmute(value::<Ty2<u32, i32>>()); // Lint, Ty2 is unordered
+ let _: Ty2<u32, i32> = transmute(value::<Ty2C<u32, i32>>()); // Lint, Ty2 is unordered
+
+ let _: Ty2<u32, i32> = transmute(value::<Ty<Ty2<u32, i32>>>()); // Ok, Ty2 types are the same
+ let _: Ty<Ty2<u32, i32>> = transmute(value::<Ty2<u32, i32>>()); // Ok, Ty2 types are the same
+
+ let _: Ty2<u32, f32> = transmute(value::<Ty<Ty2<u32, i32>>>()); // Lint, different Ty2 instances
+ let _: Ty<Ty2<u32, i32>> = transmute(value::<Ty2<u32, f32>>()); // Lint, different Ty2 instances
+
+ let _: Ty<&()> = transmute(value::<&()>());
+ let _: &() = transmute(value::<Ty<&()>>());
+
+ let _: &Ty2<u32, f32> = transmute(value::<Ty<&Ty2<u32, i32>>>()); // Lint, different Ty2 instances
+ let _: Ty<&Ty2<u32, i32>> = transmute(value::<&Ty2<u32, f32>>()); // Lint, different Ty2 instances
+
+ let _: Ty<usize> = transmute(value::<&Ty2<u32, i32>>()); // Ok, pointer to usize conversion
+ let _: &Ty2<u32, i32> = transmute(value::<Ty<usize>>()); // Ok, pointer to usize conversion
+
+ let _: Ty<[u8; 8]> = transmute(value::<Ty2<u32, i32>>()); // Ok, transmute to byte array
+ let _: Ty2<u32, i32> = transmute(value::<Ty<[u8; 8]>>()); // Ok, transmute from byte array
+
+ // issue #8417
+ let _: Ty2C<Ty2<u32, i32>, ()> = transmute(value::<Ty2<u32, i32>>()); // Ok, Ty2 types are the same
+ let _: Ty2<u32, i32> = transmute(value::<Ty2C<Ty2<u32, i32>, ()>>()); // Ok, Ty2 types are the same
+
+ let _: &'static mut Ty2<u32, u32> = transmute(value::<Box<Ty2<u32, u32>>>()); // Ok, Ty2 types are the same
+ let _: Box<Ty2<u32, u32>> = transmute(value::<&'static mut Ty2<u32, u32>>()); // Ok, Ty2 types are the same
+ let _: *mut Ty2<u32, u32> = transmute(value::<Box<Ty2<u32, u32>>>()); // Ok, Ty2 types are the same
+ let _: Box<Ty2<u32, u32>> = transmute(value::<*mut Ty2<u32, u32>>()); // Ok, Ty2 types are the same
+
+ let _: &'static mut Ty2<u32, f32> = transmute(value::<Box<Ty2<u32, u32>>>()); // Lint, different Ty2 instances
+ let _: Box<Ty2<u32, u32>> = transmute(value::<&'static mut Ty2<u32, f32>>()); // Lint, different Ty2 instances
+
+ let _: *const () = transmute(value::<Ty<&Ty2<u32, f32>>>()); // Ok, type erasure
+ let _: Ty<&Ty2<u32, f32>> = transmute(value::<*const ()>()); // Ok, reverse type erasure
+
+ let _: *const c_void = transmute(value::<Ty<&Ty2<u32, f32>>>()); // Ok, type erasure
+ let _: Ty<&Ty2<u32, f32>> = transmute(value::<*const c_void>()); // Ok, reverse type erasure
+
+ enum Erase {}
+ let _: *const Erase = transmute(value::<Ty<&Ty2<u32, f32>>>()); // Ok, type erasure
+ let _: Ty<&Ty2<u32, f32>> = transmute(value::<*const Erase>()); // Ok, reverse type erasure
+
+ struct Erase2(
+ [u8; 0],
+ core::marker::PhantomData<(*mut u8, core::marker::PhantomPinned)>,
+ );
+ let _: *const Erase2 = transmute(value::<Ty<&Ty2<u32, f32>>>()); // Ok, type erasure
+ let _: Ty<&Ty2<u32, f32>> = transmute(value::<*const Erase2>()); // Ok, reverse type erasure
+
+ let _: *const () = transmute(value::<&&[u8]>()); // Ok, type erasure
+ let _: &&[u8] = transmute(value::<*const ()>()); // Ok, reverse type erasure
+
+ let _: *mut c_void = transmute(value::<&mut &[u8]>()); // Ok, type erasure
+ let _: &mut &[u8] = transmute(value::<*mut c_void>()); // Ok, reverse type erasure
+
+ let _: [u8; size_of::<&[u8]>()] = transmute(value::<&[u8]>()); // Ok, transmute to byte array
+ let _: &[u8] = transmute(value::<[u8; size_of::<&[u8]>()]>()); // Ok, transmute from byte array
+
+ let _: [usize; 2] = transmute(value::<&[u8]>()); // Ok, transmute to int array
+ let _: &[u8] = transmute(value::<[usize; 2]>()); // Ok, transmute from int array
+
+ let _: *const [u8] = transmute(value::<Box<[u8]>>()); // Ok
+ let _: Box<[u8]> = transmute(value::<*mut [u8]>()); // Ok
+
+ let _: Ty2<u32, u32> = transmute(value::<(Ty2<u32, u32>,)>()); // Ok
+ let _: (Ty2<u32, u32>,) = transmute(value::<Ty2<u32, u32>>()); // Ok
+
+ let _: Ty2<u32, u32> = transmute(value::<(Ty2<u32, u32>, ())>()); // Ok
+ let _: (Ty2<u32, u32>, ()) = transmute(value::<Ty2<u32, u32>>()); // Ok
+
+ let _: Ty2<u32, u32> = transmute(value::<((), Ty2<u32, u32>)>()); // Ok
+ let _: ((), Ty2<u32, u32>) = transmute(value::<Ty2<u32, u32>>()); // Ok
+
+ let _: (usize, usize) = transmute(value::<&[u8]>()); // Ok
+ let _: &[u8] = transmute(value::<(usize, usize)>()); // Ok
+
+ trait Trait {}
+ let _: (isize, isize) = transmute(value::<&dyn Trait>()); // Ok
+ let _: &dyn Trait = transmute(value::<(isize, isize)>()); // Ok
+
+ let _: MaybeUninit<Ty2<u32, u32>> = transmute(value::<Ty2<u32, u32>>()); // Ok
+ let _: Ty2<u32, u32> = transmute(value::<MaybeUninit<Ty2<u32, u32>>>()); // Ok
+
+ let _: Ty<&[u32]> = transmute::<&[u32], _>(value::<&Vec<u32>>()); // Ok
+ }
+}
+
+fn _with_generics<T: 'static, U: 'static>() {
+ if TypeId::of::<T>() != TypeId::of::<u32>() || TypeId::of::<T>() != TypeId::of::<U>() {
+ return;
+ }
+ unsafe {
+ let _: &u32 = transmute(value::<&T>()); // Ok
+ let _: &T = transmute(value::<&u32>()); // Ok
+
+ let _: Vec<U> = transmute(value::<Vec<T>>()); // Ok
+ let _: Vec<T> = transmute(value::<Vec<U>>()); // Ok
+
+ let _: Ty<&u32> = transmute(value::<&T>()); // Ok
+ let _: Ty<&T> = transmute(value::<&u32>()); // Ok
+
+ let _: Vec<u32> = transmute(value::<Vec<T>>()); // Ok
+ let _: Vec<T> = transmute(value::<Vec<u32>>()); // Ok
+
+ let _: &Ty2<u32, u32> = transmute(value::<&Ty2<T, U>>()); // Ok
+ let _: &Ty2<T, U> = transmute(value::<&Ty2<u32, u32>>()); // Ok
+
+ let _: Vec<Vec<u32>> = transmute(value::<Vec<Vec<T>>>()); // Ok
+ let _: Vec<Vec<T>> = transmute(value::<Vec<Vec<u32>>>()); // Ok
+
+ let _: Vec<Ty2<T, u32>> = transmute(value::<Vec<Ty2<U, i32>>>()); // Err
+ let _: Vec<Ty2<U, i32>> = transmute(value::<Vec<Ty2<T, u32>>>()); // Err
+
+ let _: *const u32 = transmute(value::<Box<T>>()); // Ok
+ let _: Box<T> = transmute(value::<*const u32>()); // Ok
+ }
+}
diff --git a/src/tools/clippy/tests/ui/transmute_undefined_repr.stderr b/src/tools/clippy/tests/ui/transmute_undefined_repr.stderr
new file mode 100644
index 000000000..28bfba6c7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmute_undefined_repr.stderr
@@ -0,0 +1,80 @@
+error: transmute from `Ty2<u32, i32>` which has an undefined layout
+ --> $DIR/transmute_undefined_repr.rs:27:33
+ |
+LL | let _: Ty2C<u32, i32> = transmute(value::<Ty2<u32, i32>>()); // Lint, Ty2 is unordered
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::transmute-undefined-repr` implied by `-D warnings`
+
+error: transmute into `Ty2<u32, i32>` which has an undefined layout
+ --> $DIR/transmute_undefined_repr.rs:28:32
+ |
+LL | let _: Ty2<u32, i32> = transmute(value::<Ty2C<u32, i32>>()); // Lint, Ty2 is unordered
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmute from `Ty<Ty2<u32, i32>>` to `Ty2<u32, f32>`, both of which have an undefined layout
+ --> $DIR/transmute_undefined_repr.rs:33:32
+ |
+LL | let _: Ty2<u32, f32> = transmute(value::<Ty<Ty2<u32, i32>>>()); // Lint, different Ty2 instances
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: two instances of the same generic type (`Ty2`) may have different layouts
+
+error: transmute from `Ty2<u32, f32>` to `Ty<Ty2<u32, i32>>`, both of which have an undefined layout
+ --> $DIR/transmute_undefined_repr.rs:34:36
+ |
+LL | let _: Ty<Ty2<u32, i32>> = transmute(value::<Ty2<u32, f32>>()); // Lint, different Ty2 instances
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: two instances of the same generic type (`Ty2`) may have different layouts
+
+error: transmute from `Ty<&Ty2<u32, i32>>` to `&Ty2<u32, f32>`, both of which have an undefined layout
+ --> $DIR/transmute_undefined_repr.rs:39:33
+ |
+LL | let _: &Ty2<u32, f32> = transmute(value::<Ty<&Ty2<u32, i32>>>()); // Lint, different Ty2 instances
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: two instances of the same generic type (`Ty2`) may have different layouts
+
+error: transmute from `&Ty2<u32, f32>` to `Ty<&Ty2<u32, i32>>`, both of which have an undefined layout
+ --> $DIR/transmute_undefined_repr.rs:40:37
+ |
+LL | let _: Ty<&Ty2<u32, i32>> = transmute(value::<&Ty2<u32, f32>>()); // Lint, different Ty2 instances
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: two instances of the same generic type (`Ty2`) may have different layouts
+
+error: transmute from `std::boxed::Box<Ty2<u32, u32>>` to `&mut Ty2<u32, f32>`, both of which have an undefined layout
+ --> $DIR/transmute_undefined_repr.rs:57:45
+ |
+LL | let _: &'static mut Ty2<u32, f32> = transmute(value::<Box<Ty2<u32, u32>>>()); // Lint, different Ty2 instances
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: two instances of the same generic type (`Ty2`) may have different layouts
+
+error: transmute from `&mut Ty2<u32, f32>` to `std::boxed::Box<Ty2<u32, u32>>`, both of which have an undefined layout
+ --> $DIR/transmute_undefined_repr.rs:58:37
+ |
+LL | let _: Box<Ty2<u32, u32>> = transmute(value::<&'static mut Ty2<u32, f32>>()); // Lint, different Ty2 instances
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: two instances of the same generic type (`Ty2`) may have different layouts
+
+error: transmute from `std::vec::Vec<Ty2<U, i32>>` to `std::vec::Vec<Ty2<T, u32>>`, both of which have an undefined layout
+ --> $DIR/transmute_undefined_repr.rs:138:35
+ |
+LL | let _: Vec<Ty2<T, u32>> = transmute(value::<Vec<Ty2<U, i32>>>()); // Err
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: two instances of the same generic type (`Vec`) may have different layouts
+
+error: transmute from `std::vec::Vec<Ty2<T, u32>>` to `std::vec::Vec<Ty2<U, i32>>`, both of which have an undefined layout
+ --> $DIR/transmute_undefined_repr.rs:139:35
+ |
+LL | let _: Vec<Ty2<U, i32>> = transmute(value::<Vec<Ty2<T, u32>>>()); // Err
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: two instances of the same generic type (`Vec`) may have different layouts
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.fixed b/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.fixed
new file mode 100644
index 000000000..539239fc1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.fixed
@@ -0,0 +1,77 @@
+// run-rustfix
+#![warn(clippy::transmutes_expressible_as_ptr_casts)]
+// These two warnings currently cover the cases transmutes_expressible_as_ptr_casts
+// would otherwise be responsible for
+#![warn(clippy::useless_transmute)]
+#![warn(clippy::transmute_ptr_to_ptr)]
+#![allow(dead_code, unused_unsafe, clippy::borrow_as_ptr)]
+
+use std::mem::{size_of, transmute};
+
+// rustc_typeck::check::cast contains documentation about when a cast `e as U` is
+// valid, which we quote from below.
+fn main() {
+ // We should see an error message for each transmute, and no error messages for
+ // the casts, since the casts are the recommended fixes.
+
+ // e is an integer and U is *U_0, while U_0: Sized; addr-ptr-cast
+ let _ptr_i32_transmute = unsafe { usize::MAX as *const i32 };
+ let ptr_i32 = usize::MAX as *const i32;
+
+ // e has type *T, U is *U_0, and either U_0: Sized ...
+ let _ptr_i8_transmute = unsafe { ptr_i32 as *const i8 };
+ let _ptr_i8 = ptr_i32 as *const i8;
+
+ let slice_ptr = &[0, 1, 2, 3] as *const [i32];
+
+ // ... or pointer_kind(T) = pointer_kind(U_0); ptr-ptr-cast
+ let _ptr_to_unsized_transmute = unsafe { slice_ptr as *const [u32] };
+ let _ptr_to_unsized = slice_ptr as *const [u32];
+ // TODO: We could try testing vtable casts here too, but maybe
+ // we should wait until std::raw::TraitObject is stabilized?
+
+ // e has type *T and U is a numeric type, while T: Sized; ptr-addr-cast
+ let _usize_from_int_ptr_transmute = unsafe { ptr_i32 as usize };
+ let _usize_from_int_ptr = ptr_i32 as usize;
+
+ let array_ref: &[i32; 4] = &[1, 2, 3, 4];
+
+ // e has type &[T; n] and U is *const T; array-ptr-cast
+ let _array_ptr_transmute = unsafe { array_ref as *const [i32; 4] };
+ let _array_ptr = array_ref as *const [i32; 4];
+
+ fn foo(_: usize) -> u8 {
+ 42
+ }
+
+ // e is a function pointer type and U has type *T, while T: Sized; fptr-ptr-cast
+ let _usize_ptr_transmute = unsafe { foo as *const usize };
+ let _usize_ptr_transmute = foo as *const usize;
+
+ // e is a function pointer type and U is an integer; fptr-addr-cast
+ let _usize_from_fn_ptr_transmute = unsafe { foo as usize };
+ let _usize_from_fn_ptr = foo as *const usize;
+}
+
+// If a ref-to-ptr cast of this form where the pointer type points to a type other
+// than the referenced type, calling `CastCheck::do_check` has been observed to
+// cause an ICE error message. `do_check` is currently called inside the
+// `transmutes_expressible_as_ptr_casts` check, but other, more specific lints
+// currently prevent it from being called in these cases. This test is meant to
+// fail if the ordering of the checks ever changes enough to cause these cases to
+// fall through into `do_check`.
+fn trigger_do_check_to_emit_error(in_param: &[i32; 1]) -> *const u8 {
+ unsafe { in_param as *const [i32; 1] as *const u8 }
+}
+
+#[repr(C)]
+struct Single(u64);
+
+#[repr(C)]
+struct Pair(u32, u32);
+
+fn cannot_be_expressed_as_pointer_cast(in_param: Single) -> Pair {
+ assert_eq!(size_of::<Single>(), size_of::<Pair>());
+
+ unsafe { transmute::<Single, Pair>(in_param) }
+}
diff --git a/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.rs b/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.rs
new file mode 100644
index 000000000..b9e446dc8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.rs
@@ -0,0 +1,77 @@
+// run-rustfix
+#![warn(clippy::transmutes_expressible_as_ptr_casts)]
+// These two warnings currently cover the cases transmutes_expressible_as_ptr_casts
+// would otherwise be responsible for
+#![warn(clippy::useless_transmute)]
+#![warn(clippy::transmute_ptr_to_ptr)]
+#![allow(dead_code, unused_unsafe, clippy::borrow_as_ptr)]
+
+use std::mem::{size_of, transmute};
+
+// rustc_typeck::check::cast contains documentation about when a cast `e as U` is
+// valid, which we quote from below.
+fn main() {
+ // We should see an error message for each transmute, and no error messages for
+ // the casts, since the casts are the recommended fixes.
+
+ // e is an integer and U is *U_0, while U_0: Sized; addr-ptr-cast
+ let _ptr_i32_transmute = unsafe { transmute::<usize, *const i32>(usize::MAX) };
+ let ptr_i32 = usize::MAX as *const i32;
+
+ // e has type *T, U is *U_0, and either U_0: Sized ...
+ let _ptr_i8_transmute = unsafe { transmute::<*const i32, *const i8>(ptr_i32) };
+ let _ptr_i8 = ptr_i32 as *const i8;
+
+ let slice_ptr = &[0, 1, 2, 3] as *const [i32];
+
+ // ... or pointer_kind(T) = pointer_kind(U_0); ptr-ptr-cast
+ let _ptr_to_unsized_transmute = unsafe { transmute::<*const [i32], *const [u32]>(slice_ptr) };
+ let _ptr_to_unsized = slice_ptr as *const [u32];
+ // TODO: We could try testing vtable casts here too, but maybe
+ // we should wait until std::raw::TraitObject is stabilized?
+
+ // e has type *T and U is a numeric type, while T: Sized; ptr-addr-cast
+ let _usize_from_int_ptr_transmute = unsafe { transmute::<*const i32, usize>(ptr_i32) };
+ let _usize_from_int_ptr = ptr_i32 as usize;
+
+ let array_ref: &[i32; 4] = &[1, 2, 3, 4];
+
+ // e has type &[T; n] and U is *const T; array-ptr-cast
+ let _array_ptr_transmute = unsafe { transmute::<&[i32; 4], *const [i32; 4]>(array_ref) };
+ let _array_ptr = array_ref as *const [i32; 4];
+
+ fn foo(_: usize) -> u8 {
+ 42
+ }
+
+ // e is a function pointer type and U has type *T, while T: Sized; fptr-ptr-cast
+ let _usize_ptr_transmute = unsafe { transmute::<fn(usize) -> u8, *const usize>(foo) };
+ let _usize_ptr_transmute = foo as *const usize;
+
+ // e is a function pointer type and U is an integer; fptr-addr-cast
+ let _usize_from_fn_ptr_transmute = unsafe { transmute::<fn(usize) -> u8, usize>(foo) };
+ let _usize_from_fn_ptr = foo as *const usize;
+}
+
+// If a ref-to-ptr cast of this form where the pointer type points to a type other
+// than the referenced type, calling `CastCheck::do_check` has been observed to
+// cause an ICE error message. `do_check` is currently called inside the
+// `transmutes_expressible_as_ptr_casts` check, but other, more specific lints
+// currently prevent it from being called in these cases. This test is meant to
+// fail if the ordering of the checks ever changes enough to cause these cases to
+// fall through into `do_check`.
+fn trigger_do_check_to_emit_error(in_param: &[i32; 1]) -> *const u8 {
+ unsafe { transmute::<&[i32; 1], *const u8>(in_param) }
+}
+
+#[repr(C)]
+struct Single(u64);
+
+#[repr(C)]
+struct Pair(u32, u32);
+
+fn cannot_be_expressed_as_pointer_cast(in_param: Single) -> Pair {
+ assert_eq!(size_of::<Single>(), size_of::<Pair>());
+
+ unsafe { transmute::<Single, Pair>(in_param) }
+}
diff --git a/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.stderr b/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.stderr
new file mode 100644
index 000000000..de9418c8d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.stderr
@@ -0,0 +1,56 @@
+error: transmute from an integer to a pointer
+ --> $DIR/transmutes_expressible_as_ptr_casts.rs:18:39
+ |
+LL | let _ptr_i32_transmute = unsafe { transmute::<usize, *const i32>(usize::MAX) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `usize::MAX as *const i32`
+ |
+ = note: `-D clippy::useless-transmute` implied by `-D warnings`
+
+error: transmute from a pointer to a pointer
+ --> $DIR/transmutes_expressible_as_ptr_casts.rs:22:38
+ |
+LL | let _ptr_i8_transmute = unsafe { transmute::<*const i32, *const i8>(ptr_i32) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `ptr_i32 as *const i8`
+ |
+ = note: `-D clippy::transmute-ptr-to-ptr` implied by `-D warnings`
+
+error: transmute from a pointer to a pointer
+ --> $DIR/transmutes_expressible_as_ptr_casts.rs:28:46
+ |
+LL | let _ptr_to_unsized_transmute = unsafe { transmute::<*const [i32], *const [u32]>(slice_ptr) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `slice_ptr as *const [u32]`
+
+error: transmute from `*const i32` to `usize` which could be expressed as a pointer cast instead
+ --> $DIR/transmutes_expressible_as_ptr_casts.rs:34:50
+ |
+LL | let _usize_from_int_ptr_transmute = unsafe { transmute::<*const i32, usize>(ptr_i32) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `ptr_i32 as usize`
+ |
+ = note: `-D clippy::transmutes-expressible-as-ptr-casts` implied by `-D warnings`
+
+error: transmute from a reference to a pointer
+ --> $DIR/transmutes_expressible_as_ptr_casts.rs:40:41
+ |
+LL | let _array_ptr_transmute = unsafe { transmute::<&[i32; 4], *const [i32; 4]>(array_ref) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `array_ref as *const [i32; 4]`
+
+error: transmute from `fn(usize) -> u8` to `*const usize` which could be expressed as a pointer cast instead
+ --> $DIR/transmutes_expressible_as_ptr_casts.rs:48:41
+ |
+LL | let _usize_ptr_transmute = unsafe { transmute::<fn(usize) -> u8, *const usize>(foo) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `foo as *const usize`
+
+error: transmute from `fn(usize) -> u8` to `usize` which could be expressed as a pointer cast instead
+ --> $DIR/transmutes_expressible_as_ptr_casts.rs:52:49
+ |
+LL | let _usize_from_fn_ptr_transmute = unsafe { transmute::<fn(usize) -> u8, usize>(foo) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `foo as usize`
+
+error: transmute from a reference to a pointer
+ --> $DIR/transmutes_expressible_as_ptr_casts.rs:64:14
+ |
+LL | unsafe { transmute::<&[i32; 1], *const u8>(in_param) }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `in_param as *const [i32; 1] as *const u8`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/transmuting_null.rs b/src/tools/clippy/tests/ui/transmuting_null.rs
new file mode 100644
index 000000000..ea3ee8edc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmuting_null.rs
@@ -0,0 +1,30 @@
+#![allow(dead_code)]
+#![warn(clippy::transmuting_null)]
+#![allow(clippy::zero_ptr)]
+#![allow(clippy::transmute_ptr_to_ref)]
+#![allow(clippy::eq_op)]
+
+// Easy to lint because these only span one line.
+fn one_liners() {
+ unsafe {
+ let _: &u64 = std::mem::transmute(0 as *const u64);
+ let _: &u64 = std::mem::transmute(std::ptr::null::<u64>());
+ }
+}
+
+pub const ZPTR: *const usize = 0 as *const _;
+pub const NOT_ZPTR: *const usize = 1 as *const _;
+
+fn transmute_const() {
+ unsafe {
+ // Should raise a lint.
+ let _: &u64 = std::mem::transmute(ZPTR);
+ // Should NOT raise a lint.
+ let _: &u64 = std::mem::transmute(NOT_ZPTR);
+ }
+}
+
+fn main() {
+ one_liners();
+ transmute_const();
+}
diff --git a/src/tools/clippy/tests/ui/transmuting_null.stderr b/src/tools/clippy/tests/ui/transmuting_null.stderr
new file mode 100644
index 000000000..1848fc249
--- /dev/null
+++ b/src/tools/clippy/tests/ui/transmuting_null.stderr
@@ -0,0 +1,22 @@
+error: transmuting a known null pointer into a reference
+ --> $DIR/transmuting_null.rs:10:23
+ |
+LL | let _: &u64 = std::mem::transmute(0 as *const u64);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::transmuting-null` implied by `-D warnings`
+
+error: transmuting a known null pointer into a reference
+ --> $DIR/transmuting_null.rs:11:23
+ |
+LL | let _: &u64 = std::mem::transmute(std::ptr::null::<u64>());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: transmuting a known null pointer into a reference
+ --> $DIR/transmuting_null.rs:21:23
+ |
+LL | let _: &u64 = std::mem::transmute(ZPTR);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/trim_split_whitespace.fixed b/src/tools/clippy/tests/ui/trim_split_whitespace.fixed
new file mode 100644
index 000000000..e4d352f73
--- /dev/null
+++ b/src/tools/clippy/tests/ui/trim_split_whitespace.fixed
@@ -0,0 +1,91 @@
+// run-rustfix
+#![warn(clippy::trim_split_whitespace)]
+#![allow(clippy::let_unit_value)]
+
+struct Custom;
+impl Custom {
+ fn trim(self) -> Self {
+ self
+ }
+ fn split_whitespace(self) {}
+}
+
+struct DerefStr(&'static str);
+impl std::ops::Deref for DerefStr {
+ type Target = str;
+ fn deref(&self) -> &Self::Target {
+ self.0
+ }
+}
+
+struct DerefStrAndCustom(&'static str);
+impl std::ops::Deref for DerefStrAndCustom {
+ type Target = str;
+ fn deref(&self) -> &Self::Target {
+ self.0
+ }
+}
+impl DerefStrAndCustom {
+ fn trim(self) -> Self {
+ self
+ }
+ fn split_whitespace(self) {}
+}
+
+struct DerefStrAndCustomSplit(&'static str);
+impl std::ops::Deref for DerefStrAndCustomSplit {
+ type Target = str;
+ fn deref(&self) -> &Self::Target {
+ self.0
+ }
+}
+impl DerefStrAndCustomSplit {
+ #[allow(dead_code)]
+ fn split_whitespace(self) {}
+}
+
+struct DerefStrAndCustomTrim(&'static str);
+impl std::ops::Deref for DerefStrAndCustomTrim {
+ type Target = str;
+ fn deref(&self) -> &Self::Target {
+ self.0
+ }
+}
+impl DerefStrAndCustomTrim {
+ fn trim(self) -> Self {
+ self
+ }
+}
+
+fn main() {
+ // &str
+ let _ = " A B C ".split_whitespace(); // should trigger lint
+ let _ = " A B C ".split_whitespace(); // should trigger lint
+ let _ = " A B C ".split_whitespace(); // should trigger lint
+
+ // String
+ let _ = (" A B C ").to_string().split_whitespace(); // should trigger lint
+ let _ = (" A B C ").to_string().split_whitespace(); // should trigger lint
+ let _ = (" A B C ").to_string().split_whitespace(); // should trigger lint
+
+ // Custom
+ let _ = Custom.trim().split_whitespace(); // should not trigger lint
+
+ // Deref<Target=str>
+ let s = DerefStr(" A B C ");
+ let _ = s.split_whitespace(); // should trigger lint
+
+ // Deref<Target=str> + custom impl
+ let s = DerefStrAndCustom(" A B C ");
+ let _ = s.trim().split_whitespace(); // should not trigger lint
+
+ // Deref<Target=str> + only custom split_ws() impl
+ let s = DerefStrAndCustomSplit(" A B C ");
+ let _ = s.split_whitespace(); // should trigger lint
+ // Expl: trim() is called on str (deref) and returns &str.
+ // Thus split_ws() is called on str as well and the custom impl on S is unused
+
+ // Deref<Target=str> + only custom trim() impl
+ let s = DerefStrAndCustomTrim(" A B C ");
+ let _ = s.trim().split_whitespace(); // should not trigger lint
+}
diff --git a/src/tools/clippy/tests/ui/trim_split_whitespace.rs b/src/tools/clippy/tests/ui/trim_split_whitespace.rs
new file mode 100644
index 000000000..f98451a98
--- /dev/null
+++ b/src/tools/clippy/tests/ui/trim_split_whitespace.rs
@@ -0,0 +1,91 @@
+// run-rustfix
+#![warn(clippy::trim_split_whitespace)]
+#![allow(clippy::let_unit_value)]
+
+struct Custom;
+impl Custom {
+ fn trim(self) -> Self {
+ self
+ }
+ fn split_whitespace(self) {}
+}
+
+struct DerefStr(&'static str);
+impl std::ops::Deref for DerefStr {
+ type Target = str;
+ fn deref(&self) -> &Self::Target {
+ self.0
+ }
+}
+
+struct DerefStrAndCustom(&'static str);
+impl std::ops::Deref for DerefStrAndCustom {
+ type Target = str;
+ fn deref(&self) -> &Self::Target {
+ self.0
+ }
+}
+impl DerefStrAndCustom {
+ fn trim(self) -> Self {
+ self
+ }
+ fn split_whitespace(self) {}
+}
+
+struct DerefStrAndCustomSplit(&'static str);
+impl std::ops::Deref for DerefStrAndCustomSplit {
+ type Target = str;
+ fn deref(&self) -> &Self::Target {
+ self.0
+ }
+}
+impl DerefStrAndCustomSplit {
+ #[allow(dead_code)]
+ fn split_whitespace(self) {}
+}
+
+struct DerefStrAndCustomTrim(&'static str);
+impl std::ops::Deref for DerefStrAndCustomTrim {
+ type Target = str;
+ fn deref(&self) -> &Self::Target {
+ self.0
+ }
+}
+impl DerefStrAndCustomTrim {
+ fn trim(self) -> Self {
+ self
+ }
+}
+
+fn main() {
+ // &str
+ let _ = " A B C ".trim().split_whitespace(); // should trigger lint
+ let _ = " A B C ".trim_start().split_whitespace(); // should trigger lint
+ let _ = " A B C ".trim_end().split_whitespace(); // should trigger lint
+
+ // String
+ let _ = (" A B C ").to_string().trim().split_whitespace(); // should trigger lint
+ let _ = (" A B C ").to_string().trim_start().split_whitespace(); // should trigger lint
+ let _ = (" A B C ").to_string().trim_end().split_whitespace(); // should trigger lint
+
+ // Custom
+ let _ = Custom.trim().split_whitespace(); // should not trigger lint
+
+ // Deref<Target=str>
+ let s = DerefStr(" A B C ");
+ let _ = s.trim().split_whitespace(); // should trigger lint
+
+ // Deref<Target=str> + custom impl
+ let s = DerefStrAndCustom(" A B C ");
+ let _ = s.trim().split_whitespace(); // should not trigger lint
+
+ // Deref<Target=str> + only custom split_ws() impl
+ let s = DerefStrAndCustomSplit(" A B C ");
+ let _ = s.trim().split_whitespace(); // should trigger lint
+ // Expl: trim() is called on str (deref) and returns &str.
+ // Thus split_ws() is called on str as well and the custom impl on S is unused
+
+ // Deref<Target=str> + only custom trim() impl
+ let s = DerefStrAndCustomTrim(" A B C ");
+ let _ = s.trim().split_whitespace(); // should not trigger lint
+}
diff --git a/src/tools/clippy/tests/ui/trim_split_whitespace.stderr b/src/tools/clippy/tests/ui/trim_split_whitespace.stderr
new file mode 100644
index 000000000..5ae7849e2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/trim_split_whitespace.stderr
@@ -0,0 +1,52 @@
+error: found call to `str::trim` before `str::split_whitespace`
+ --> $DIR/trim_split_whitespace.rs:62:23
+ |
+LL | let _ = " A B C ".trim().split_whitespace(); // should trigger lint
+ | ^^^^^^^ help: remove `trim()`
+ |
+ = note: `-D clippy::trim-split-whitespace` implied by `-D warnings`
+
+error: found call to `str::trim_start` before `str::split_whitespace`
+ --> $DIR/trim_split_whitespace.rs:63:23
+ |
+LL | let _ = " A B C ".trim_start().split_whitespace(); // should trigger lint
+ | ^^^^^^^^^^^^^ help: remove `trim_start()`
+
+error: found call to `str::trim_end` before `str::split_whitespace`
+ --> $DIR/trim_split_whitespace.rs:64:23
+ |
+LL | let _ = " A B C ".trim_end().split_whitespace(); // should trigger lint
+ | ^^^^^^^^^^^ help: remove `trim_end()`
+
+error: found call to `str::trim` before `str::split_whitespace`
+ --> $DIR/trim_split_whitespace.rs:67:37
+ |
+LL | let _ = (" A B C ").to_string().trim().split_whitespace(); // should trigger lint
+ | ^^^^^^^ help: remove `trim()`
+
+error: found call to `str::trim_start` before `str::split_whitespace`
+ --> $DIR/trim_split_whitespace.rs:68:37
+ |
+LL | let _ = (" A B C ").to_string().trim_start().split_whitespace(); // should trigger lint
+ | ^^^^^^^^^^^^^ help: remove `trim_start()`
+
+error: found call to `str::trim_end` before `str::split_whitespace`
+ --> $DIR/trim_split_whitespace.rs:69:37
+ |
+LL | let _ = (" A B C ").to_string().trim_end().split_whitespace(); // should trigger lint
+ | ^^^^^^^^^^^ help: remove `trim_end()`
+
+error: found call to `str::trim` before `str::split_whitespace`
+ --> $DIR/trim_split_whitespace.rs:76:15
+ |
+LL | let _ = s.trim().split_whitespace(); // should trigger lint
+ | ^^^^^^^ help: remove `trim()`
+
+error: found call to `str::trim` before `str::split_whitespace`
+ --> $DIR/trim_split_whitespace.rs:84:15
+ |
+LL | let _ = s.trim().split_whitespace(); // should trigger lint
+ | ^^^^^^^ help: remove `trim()`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.rs b/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.rs
new file mode 100644
index 000000000..8f78f16a0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.rs
@@ -0,0 +1,168 @@
+// normalize-stderr-test "\(\d+ byte\)" -> "(N byte)"
+// normalize-stderr-test "\(limit: \d+ byte\)" -> "(limit: N byte)"
+
+#![deny(clippy::trivially_copy_pass_by_ref)]
+#![allow(clippy::blacklisted_name, clippy::redundant_field_names)]
+
+#[derive(Copy, Clone)]
+struct Foo(u32);
+
+#[derive(Copy, Clone)]
+struct Bar([u8; 24]);
+
+#[derive(Copy, Clone)]
+pub struct Color {
+ pub r: u8,
+ pub g: u8,
+ pub b: u8,
+ pub a: u8,
+}
+
+struct FooRef<'a> {
+ foo: &'a Foo,
+}
+
+type Baz = u32;
+
+fn good(a: &mut u32, b: u32, c: &Bar) {}
+
+fn good_return_implicit_lt_ref(foo: &Foo) -> &u32 {
+ &foo.0
+}
+
+#[allow(clippy::needless_lifetimes)]
+fn good_return_explicit_lt_ref<'a>(foo: &'a Foo) -> &'a u32 {
+ &foo.0
+}
+
+fn good_return_implicit_lt_struct(foo: &Foo) -> FooRef {
+ FooRef { foo }
+}
+
+#[allow(clippy::needless_lifetimes)]
+fn good_return_explicit_lt_struct<'a>(foo: &'a Foo) -> FooRef<'a> {
+ FooRef { foo }
+}
+
+fn bad(x: &u32, y: &Foo, z: &Baz) {}
+
+impl Foo {
+ fn good(self, a: &mut u32, b: u32, c: &Bar) {}
+
+ fn good2(&mut self) {}
+
+ fn bad(&self, x: &u32, y: &Foo, z: &Baz) {}
+
+ fn bad2(x: &u32, y: &Foo, z: &Baz) {}
+
+ fn bad_issue7518(self, other: &Self) {}
+}
+
+impl AsRef<u32> for Foo {
+ fn as_ref(&self) -> &u32 {
+ &self.0
+ }
+}
+
+impl Bar {
+ fn good(&self, a: &mut u32, b: u32, c: &Bar) {}
+
+ fn bad2(x: &u32, y: &Foo, z: &Baz) {}
+}
+
+trait MyTrait {
+ fn trait_method(&self, _foo: &Foo);
+}
+
+pub trait MyTrait2 {
+ fn trait_method2(&self, _color: &Color);
+}
+
+impl MyTrait for Foo {
+ fn trait_method(&self, _foo: &Foo) {
+ unimplemented!()
+ }
+}
+
+#[allow(unused_variables)]
+mod issue3992 {
+ pub trait A {
+ #[allow(clippy::trivially_copy_pass_by_ref)]
+ fn a(b: &u16) {}
+ }
+
+ #[allow(clippy::trivially_copy_pass_by_ref)]
+ pub fn c(d: &u16) {}
+}
+
+mod issue5876 {
+ // Don't lint here as it is always inlined
+ #[inline(always)]
+ fn foo_always(x: &i32) {
+ println!("{}", x);
+ }
+
+ #[inline(never)]
+ fn foo_never(x: &i32) {
+ println!("{}", x);
+ }
+
+ #[inline]
+ fn foo(x: &i32) {
+ println!("{}", x);
+ }
+}
+
+fn _ref_to_opt_ref_implicit(x: &u32) -> Option<&u32> {
+ Some(x)
+}
+
+#[allow(clippy::needless_lifetimes)]
+fn _ref_to_opt_ref_explicit<'a>(x: &'a u32) -> Option<&'a u32> {
+ Some(x)
+}
+
+fn _with_constraint<'a, 'b: 'a>(x: &'b u32, y: &'a u32) -> &'a u32 {
+ if true { x } else { y }
+}
+
+async fn _async_implicit(x: &u32) -> &u32 {
+ x
+}
+
+#[allow(clippy::needless_lifetimes)]
+async fn _async_explicit<'a>(x: &'a u32) -> &'a u32 {
+ x
+}
+
+fn _unrelated_lifetimes<'a, 'b>(_x: &'a u32, y: &'b u32) -> &'b u32 {
+ y
+}
+
+fn _return_ptr(x: &u32) -> *const u32 {
+ x
+}
+
+fn _return_field_ptr(x: &(u32, u32)) -> *const u32 {
+ &x.0
+}
+
+fn _return_field_ptr_addr_of(x: &(u32, u32)) -> *const u32 {
+ core::ptr::addr_of!(x.0)
+}
+
+fn main() {
+ let (mut foo, bar) = (Foo(0), Bar([0; 24]));
+ let (mut a, b, c, x, y, z) = (0, 0, Bar([0; 24]), 0, Foo(0), 0);
+ good(&mut a, b, &c);
+ good_return_implicit_lt_ref(&y);
+ good_return_explicit_lt_ref(&y);
+ bad(&x, &y, &z);
+ foo.good(&mut a, b, &c);
+ foo.good2();
+ foo.bad(&x, &y, &z);
+ Foo::bad2(&x, &y, &z);
+ bar.good(&mut a, b, &c);
+ Bar::bad2(&x, &y, &z);
+ foo.as_ref();
+}
diff --git a/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.stderr b/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.stderr
new file mode 100644
index 000000000..66ecb3d8e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.stderr
@@ -0,0 +1,116 @@
+error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
+ --> $DIR/trivially_copy_pass_by_ref.rs:47:11
+ |
+LL | fn bad(x: &u32, y: &Foo, z: &Baz) {}
+ | ^^^^ help: consider passing by value instead: `u32`
+ |
+note: the lint level is defined here
+ --> $DIR/trivially_copy_pass_by_ref.rs:4:9
+ |
+LL | #![deny(clippy::trivially_copy_pass_by_ref)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
+ --> $DIR/trivially_copy_pass_by_ref.rs:47:20
+ |
+LL | fn bad(x: &u32, y: &Foo, z: &Baz) {}
+ | ^^^^ help: consider passing by value instead: `Foo`
+
+error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
+ --> $DIR/trivially_copy_pass_by_ref.rs:47:29
+ |
+LL | fn bad(x: &u32, y: &Foo, z: &Baz) {}
+ | ^^^^ help: consider passing by value instead: `Baz`
+
+error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
+ --> $DIR/trivially_copy_pass_by_ref.rs:54:12
+ |
+LL | fn bad(&self, x: &u32, y: &Foo, z: &Baz) {}
+ | ^^^^^ help: consider passing by value instead: `self`
+
+error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
+ --> $DIR/trivially_copy_pass_by_ref.rs:54:22
+ |
+LL | fn bad(&self, x: &u32, y: &Foo, z: &Baz) {}
+ | ^^^^ help: consider passing by value instead: `u32`
+
+error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
+ --> $DIR/trivially_copy_pass_by_ref.rs:54:31
+ |
+LL | fn bad(&self, x: &u32, y: &Foo, z: &Baz) {}
+ | ^^^^ help: consider passing by value instead: `Foo`
+
+error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
+ --> $DIR/trivially_copy_pass_by_ref.rs:54:40
+ |
+LL | fn bad(&self, x: &u32, y: &Foo, z: &Baz) {}
+ | ^^^^ help: consider passing by value instead: `Baz`
+
+error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
+ --> $DIR/trivially_copy_pass_by_ref.rs:56:16
+ |
+LL | fn bad2(x: &u32, y: &Foo, z: &Baz) {}
+ | ^^^^ help: consider passing by value instead: `u32`
+
+error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
+ --> $DIR/trivially_copy_pass_by_ref.rs:56:25
+ |
+LL | fn bad2(x: &u32, y: &Foo, z: &Baz) {}
+ | ^^^^ help: consider passing by value instead: `Foo`
+
+error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
+ --> $DIR/trivially_copy_pass_by_ref.rs:56:34
+ |
+LL | fn bad2(x: &u32, y: &Foo, z: &Baz) {}
+ | ^^^^ help: consider passing by value instead: `Baz`
+
+error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
+ --> $DIR/trivially_copy_pass_by_ref.rs:58:35
+ |
+LL | fn bad_issue7518(self, other: &Self) {}
+ | ^^^^^ help: consider passing by value instead: `Self`
+
+error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
+ --> $DIR/trivially_copy_pass_by_ref.rs:70:16
+ |
+LL | fn bad2(x: &u32, y: &Foo, z: &Baz) {}
+ | ^^^^ help: consider passing by value instead: `u32`
+
+error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
+ --> $DIR/trivially_copy_pass_by_ref.rs:70:25
+ |
+LL | fn bad2(x: &u32, y: &Foo, z: &Baz) {}
+ | ^^^^ help: consider passing by value instead: `Foo`
+
+error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
+ --> $DIR/trivially_copy_pass_by_ref.rs:70:34
+ |
+LL | fn bad2(x: &u32, y: &Foo, z: &Baz) {}
+ | ^^^^ help: consider passing by value instead: `Baz`
+
+error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
+ --> $DIR/trivially_copy_pass_by_ref.rs:74:34
+ |
+LL | fn trait_method(&self, _foo: &Foo);
+ | ^^^^ help: consider passing by value instead: `Foo`
+
+error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
+ --> $DIR/trivially_copy_pass_by_ref.rs:106:21
+ |
+LL | fn foo_never(x: &i32) {
+ | ^^^^ help: consider passing by value instead: `i32`
+
+error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
+ --> $DIR/trivially_copy_pass_by_ref.rs:111:15
+ |
+LL | fn foo(x: &i32) {
+ | ^^^^ help: consider passing by value instead: `i32`
+
+error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
+ --> $DIR/trivially_copy_pass_by_ref.rs:138:37
+ |
+LL | fn _unrelated_lifetimes<'a, 'b>(_x: &'a u32, y: &'b u32) -> &'b u32 {
+ | ^^^^^^^ help: consider passing by value instead: `u32`
+
+error: aborting due to 18 previous errors
+
diff --git a/src/tools/clippy/tests/ui/try_err.fixed b/src/tools/clippy/tests/ui/try_err.fixed
new file mode 100644
index 000000000..264194419
--- /dev/null
+++ b/src/tools/clippy/tests/ui/try_err.fixed
@@ -0,0 +1,170 @@
+// run-rustfix
+// aux-build:macro_rules.rs
+
+#![deny(clippy::try_err)]
+#![allow(clippy::unnecessary_wraps, clippy::needless_question_mark)]
+
+#[macro_use]
+extern crate macro_rules;
+
+use std::io;
+use std::task::Poll;
+
+// Tests that a simple case works
+// Should flag `Err(err)?`
+pub fn basic_test() -> Result<i32, i32> {
+ let err: i32 = 1;
+ // To avoid warnings during rustfix
+ if true {
+ return Err(err);
+ }
+ Ok(0)
+}
+
+// Tests that `.into()` is added when appropriate
+pub fn into_test() -> Result<i32, i32> {
+ let err: u8 = 1;
+ // To avoid warnings during rustfix
+ if true {
+ return Err(err.into());
+ }
+ Ok(0)
+}
+
+// Tests that tries in general don't trigger the error
+pub fn negative_test() -> Result<i32, i32> {
+ Ok(nested_error()? + 1)
+}
+
+// Tests that `.into()` isn't added when the error type
+// matches the surrounding closure's return type, even
+// when it doesn't match the surrounding function's.
+pub fn closure_matches_test() -> Result<i32, i32> {
+ let res: Result<i32, i8> = Some(1)
+ .into_iter()
+ .map(|i| {
+ let err: i8 = 1;
+ // To avoid warnings during rustfix
+ if true {
+ return Err(err);
+ }
+ Ok(i)
+ })
+ .next()
+ .unwrap();
+
+ Ok(res?)
+}
+
+// Tests that `.into()` isn't added when the error type
+// doesn't match the surrounding closure's return type.
+pub fn closure_into_test() -> Result<i32, i32> {
+ let res: Result<i32, i16> = Some(1)
+ .into_iter()
+ .map(|i| {
+ let err: i8 = 1;
+ // To avoid warnings during rustfix
+ if true {
+ return Err(err.into());
+ }
+ Ok(i)
+ })
+ .next()
+ .unwrap();
+
+ Ok(res?)
+}
+
+fn nested_error() -> Result<i32, i32> {
+ Ok(1)
+}
+
+// Bad suggestion when in macro (see #6242)
+macro_rules! try_validation {
+ ($e: expr) => {{
+ match $e {
+ Ok(_) => 0,
+ Err(_) => return Err(1),
+ }
+ }};
+}
+
+macro_rules! ret_one {
+ () => {
+ 1
+ };
+}
+
+macro_rules! try_validation_in_macro {
+ ($e: expr) => {{
+ match $e {
+ Ok(_) => 0,
+ Err(_) => return Err(ret_one!()),
+ }
+ }};
+}
+
+fn calling_macro() -> Result<i32, i32> {
+ // macro
+ try_validation!(Ok::<_, i32>(5));
+ // `Err` arg is another macro
+ try_validation_in_macro!(Ok::<_, i32>(5));
+ Ok(5)
+}
+
+fn main() {
+ basic_test().unwrap();
+ into_test().unwrap();
+ negative_test().unwrap();
+ closure_matches_test().unwrap();
+ closure_into_test().unwrap();
+ calling_macro().unwrap();
+
+ // We don't want to lint in external macros
+ try_err!();
+}
+
+macro_rules! bar {
+ () => {
+ String::from("aasdfasdfasdfa")
+ };
+}
+
+macro_rules! foo {
+ () => {
+ bar!()
+ };
+}
+
+pub fn macro_inside(fail: bool) -> Result<i32, String> {
+ if fail {
+ return Err(foo!());
+ }
+ Ok(0)
+}
+
+pub fn poll_write(n: usize) -> Poll<io::Result<usize>> {
+ if n == 0 {
+ return Poll::Ready(Err(io::ErrorKind::WriteZero.into()))
+ } else if n == 1 {
+ return Poll::Ready(Err(io::Error::new(io::ErrorKind::InvalidInput, "error")))
+ };
+
+ Poll::Ready(Ok(n))
+}
+
+pub fn poll_next(ready: bool) -> Poll<Option<io::Result<()>>> {
+ if !ready {
+ return Poll::Ready(Some(Err(io::ErrorKind::NotFound.into())))
+ }
+
+ Poll::Ready(None)
+}
+
+// Tests that `return` is not duplicated
+pub fn try_return(x: bool) -> Result<i32, i32> {
+ if x {
+ return Err(42);
+ }
+ Ok(0)
+}
diff --git a/src/tools/clippy/tests/ui/try_err.rs b/src/tools/clippy/tests/ui/try_err.rs
new file mode 100644
index 000000000..bc6979bf4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/try_err.rs
@@ -0,0 +1,170 @@
+// run-rustfix
+// aux-build:macro_rules.rs
+
+#![deny(clippy::try_err)]
+#![allow(clippy::unnecessary_wraps, clippy::needless_question_mark)]
+
+#[macro_use]
+extern crate macro_rules;
+
+use std::io;
+use std::task::Poll;
+
+// Tests that a simple case works
+// Should flag `Err(err)?`
+pub fn basic_test() -> Result<i32, i32> {
+ let err: i32 = 1;
+ // To avoid warnings during rustfix
+ if true {
+ Err(err)?;
+ }
+ Ok(0)
+}
+
+// Tests that `.into()` is added when appropriate
+pub fn into_test() -> Result<i32, i32> {
+ let err: u8 = 1;
+ // To avoid warnings during rustfix
+ if true {
+ Err(err)?;
+ }
+ Ok(0)
+}
+
+// Tests that tries in general don't trigger the error
+pub fn negative_test() -> Result<i32, i32> {
+ Ok(nested_error()? + 1)
+}
+
+// Tests that `.into()` isn't added when the error type
+// matches the surrounding closure's return type, even
+// when it doesn't match the surrounding function's.
+pub fn closure_matches_test() -> Result<i32, i32> {
+ let res: Result<i32, i8> = Some(1)
+ .into_iter()
+ .map(|i| {
+ let err: i8 = 1;
+ // To avoid warnings during rustfix
+ if true {
+ Err(err)?;
+ }
+ Ok(i)
+ })
+ .next()
+ .unwrap();
+
+ Ok(res?)
+}
+
+// Tests that `.into()` isn't added when the error type
+// doesn't match the surrounding closure's return type.
+pub fn closure_into_test() -> Result<i32, i32> {
+ let res: Result<i32, i16> = Some(1)
+ .into_iter()
+ .map(|i| {
+ let err: i8 = 1;
+ // To avoid warnings during rustfix
+ if true {
+ Err(err)?;
+ }
+ Ok(i)
+ })
+ .next()
+ .unwrap();
+
+ Ok(res?)
+}
+
+fn nested_error() -> Result<i32, i32> {
+ Ok(1)
+}
+
+// Bad suggestion when in macro (see #6242)
+macro_rules! try_validation {
+ ($e: expr) => {{
+ match $e {
+ Ok(_) => 0,
+ Err(_) => Err(1)?,
+ }
+ }};
+}
+
+macro_rules! ret_one {
+ () => {
+ 1
+ };
+}
+
+macro_rules! try_validation_in_macro {
+ ($e: expr) => {{
+ match $e {
+ Ok(_) => 0,
+ Err(_) => Err(ret_one!())?,
+ }
+ }};
+}
+
+fn calling_macro() -> Result<i32, i32> {
+ // macro
+ try_validation!(Ok::<_, i32>(5));
+ // `Err` arg is another macro
+ try_validation_in_macro!(Ok::<_, i32>(5));
+ Ok(5)
+}
+
+fn main() {
+ basic_test().unwrap();
+ into_test().unwrap();
+ negative_test().unwrap();
+ closure_matches_test().unwrap();
+ closure_into_test().unwrap();
+ calling_macro().unwrap();
+
+ // We don't want to lint in external macros
+ try_err!();
+}
+
+macro_rules! bar {
+ () => {
+ String::from("aasdfasdfasdfa")
+ };
+}
+
+macro_rules! foo {
+ () => {
+ bar!()
+ };
+}
+
+pub fn macro_inside(fail: bool) -> Result<i32, String> {
+ if fail {
+ Err(foo!())?;
+ }
+ Ok(0)
+}
+
+pub fn poll_write(n: usize) -> Poll<io::Result<usize>> {
+ if n == 0 {
+ Err(io::ErrorKind::WriteZero)?
+ } else if n == 1 {
+ Err(io::Error::new(io::ErrorKind::InvalidInput, "error"))?
+ };
+
+ Poll::Ready(Ok(n))
+}
+
+pub fn poll_next(ready: bool) -> Poll<Option<io::Result<()>>> {
+ if !ready {
+ Err(io::ErrorKind::NotFound)?
+ }
+
+ Poll::Ready(None)
+}
+
+// Tests that `return` is not duplicated
+pub fn try_return(x: bool) -> Result<i32, i32> {
+ if x {
+ return Err(42)?;
+ }
+ Ok(0)
+}
diff --git a/src/tools/clippy/tests/ui/try_err.stderr b/src/tools/clippy/tests/ui/try_err.stderr
new file mode 100644
index 000000000..0cb1328fb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/try_err.stderr
@@ -0,0 +1,84 @@
+error: returning an `Err(_)` with the `?` operator
+ --> $DIR/try_err.rs:19:9
+ |
+LL | Err(err)?;
+ | ^^^^^^^^^ help: try this: `return Err(err)`
+ |
+note: the lint level is defined here
+ --> $DIR/try_err.rs:4:9
+ |
+LL | #![deny(clippy::try_err)]
+ | ^^^^^^^^^^^^^^^
+
+error: returning an `Err(_)` with the `?` operator
+ --> $DIR/try_err.rs:29:9
+ |
+LL | Err(err)?;
+ | ^^^^^^^^^ help: try this: `return Err(err.into())`
+
+error: returning an `Err(_)` with the `?` operator
+ --> $DIR/try_err.rs:49:17
+ |
+LL | Err(err)?;
+ | ^^^^^^^^^ help: try this: `return Err(err)`
+
+error: returning an `Err(_)` with the `?` operator
+ --> $DIR/try_err.rs:68:17
+ |
+LL | Err(err)?;
+ | ^^^^^^^^^ help: try this: `return Err(err.into())`
+
+error: returning an `Err(_)` with the `?` operator
+ --> $DIR/try_err.rs:87:23
+ |
+LL | Err(_) => Err(1)?,
+ | ^^^^^^^ help: try this: `return Err(1)`
+...
+LL | try_validation!(Ok::<_, i32>(5));
+ | -------------------------------- in this macro invocation
+ |
+ = note: this error originates in the macro `try_validation` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: returning an `Err(_)` with the `?` operator
+ --> $DIR/try_err.rs:102:23
+ |
+LL | Err(_) => Err(ret_one!())?,
+ | ^^^^^^^^^^^^^^^^ help: try this: `return Err(ret_one!())`
+...
+LL | try_validation_in_macro!(Ok::<_, i32>(5));
+ | ----------------------------------------- in this macro invocation
+ |
+ = note: this error originates in the macro `try_validation_in_macro` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: returning an `Err(_)` with the `?` operator
+ --> $DIR/try_err.rs:141:9
+ |
+LL | Err(foo!())?;
+ | ^^^^^^^^^^^^ help: try this: `return Err(foo!())`
+
+error: returning an `Err(_)` with the `?` operator
+ --> $DIR/try_err.rs:148:9
+ |
+LL | Err(io::ErrorKind::WriteZero)?
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `return Poll::Ready(Err(io::ErrorKind::WriteZero.into()))`
+
+error: returning an `Err(_)` with the `?` operator
+ --> $DIR/try_err.rs:150:9
+ |
+LL | Err(io::Error::new(io::ErrorKind::InvalidInput, "error"))?
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `return Poll::Ready(Err(io::Error::new(io::ErrorKind::InvalidInput, "error")))`
+
+error: returning an `Err(_)` with the `?` operator
+ --> $DIR/try_err.rs:158:9
+ |
+LL | Err(io::ErrorKind::NotFound)?
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `return Poll::Ready(Some(Err(io::ErrorKind::NotFound.into())))`
+
+error: returning an `Err(_)` with the `?` operator
+ --> $DIR/try_err.rs:167:16
+ |
+LL | return Err(42)?;
+ | ^^^^^^^^ help: try this: `Err(42)`
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/ty_fn_sig.rs b/src/tools/clippy/tests/ui/ty_fn_sig.rs
new file mode 100644
index 000000000..9e2753dcb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ty_fn_sig.rs
@@ -0,0 +1,14 @@
+// Regression test
+
+pub fn retry<F: Fn()>(f: F) {
+ for _i in 0.. {
+ f();
+ }
+}
+
+fn main() {
+ for y in 0..4 {
+ let func = || ();
+ func();
+ }
+}
diff --git a/src/tools/clippy/tests/ui/type_complexity.rs b/src/tools/clippy/tests/ui/type_complexity.rs
new file mode 100644
index 000000000..86a7bd7b6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/type_complexity.rs
@@ -0,0 +1,69 @@
+#![warn(clippy::all)]
+#![allow(unused, clippy::needless_pass_by_value, clippy::vec_box)]
+#![feature(associated_type_defaults)]
+
+type Alias = Vec<Vec<Box<(u32, u32, u32, u32)>>>; // no warning here
+
+const CST: (u32, (u32, (u32, (u32, u32)))) = (0, (0, (0, (0, 0))));
+static ST: (u32, (u32, (u32, (u32, u32)))) = (0, (0, (0, (0, 0))));
+
+struct S {
+ f: Vec<Vec<Box<(u32, u32, u32, u32)>>>,
+}
+
+struct Ts(Vec<Vec<Box<(u32, u32, u32, u32)>>>);
+
+enum E {
+ Tuple(Vec<Vec<Box<(u32, u32, u32, u32)>>>),
+ Struct { f: Vec<Vec<Box<(u32, u32, u32, u32)>>> },
+}
+
+impl S {
+ const A: (u32, (u32, (u32, (u32, u32)))) = (0, (0, (0, (0, 0))));
+ fn impl_method(&self, p: Vec<Vec<Box<(u32, u32, u32, u32)>>>) {}
+}
+
+trait T {
+ const A: Vec<Vec<Box<(u32, u32, u32, u32)>>>;
+ type B = Vec<Vec<Box<(u32, u32, u32, u32)>>>;
+ fn method(&self, p: Vec<Vec<Box<(u32, u32, u32, u32)>>>);
+ fn def_method(&self, p: Vec<Vec<Box<(u32, u32, u32, u32)>>>) {}
+}
+
+// Should not warn since there is likely no way to simplify this (#1013)
+impl T for () {
+ const A: Vec<Vec<Box<(u32, u32, u32, u32)>>> = vec![];
+
+ type B = Vec<Vec<Box<(u32, u32, u32, u32)>>>;
+
+ fn method(&self, p: Vec<Vec<Box<(u32, u32, u32, u32)>>>) {}
+}
+
+fn test1() -> Vec<Vec<Box<(u32, u32, u32, u32)>>> {
+ vec![]
+}
+
+fn test2(_x: Vec<Vec<Box<(u32, u32, u32, u32)>>>) {}
+
+fn test3() {
+ let _y: Vec<Vec<Box<(u32, u32, u32, u32)>>> = vec![];
+}
+
+#[repr(C)]
+struct D {
+ // should not warn, since we don't have control over the signature (#3222)
+ test4: extern "C" fn(
+ itself: &D,
+ a: usize,
+ b: usize,
+ c: usize,
+ d: usize,
+ e: usize,
+ f: usize,
+ g: usize,
+ h: usize,
+ i: usize,
+ ),
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/type_complexity.stderr b/src/tools/clippy/tests/ui/type_complexity.stderr
new file mode 100644
index 000000000..9da7edb1c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/type_complexity.stderr
@@ -0,0 +1,94 @@
+error: very complex type used. Consider factoring parts into `type` definitions
+ --> $DIR/type_complexity.rs:7:12
+ |
+LL | const CST: (u32, (u32, (u32, (u32, u32)))) = (0, (0, (0, (0, 0))));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::type-complexity` implied by `-D warnings`
+
+error: very complex type used. Consider factoring parts into `type` definitions
+ --> $DIR/type_complexity.rs:8:12
+ |
+LL | static ST: (u32, (u32, (u32, (u32, u32)))) = (0, (0, (0, (0, 0))));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: very complex type used. Consider factoring parts into `type` definitions
+ --> $DIR/type_complexity.rs:11:8
+ |
+LL | f: Vec<Vec<Box<(u32, u32, u32, u32)>>>,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: very complex type used. Consider factoring parts into `type` definitions
+ --> $DIR/type_complexity.rs:14:11
+ |
+LL | struct Ts(Vec<Vec<Box<(u32, u32, u32, u32)>>>);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: very complex type used. Consider factoring parts into `type` definitions
+ --> $DIR/type_complexity.rs:17:11
+ |
+LL | Tuple(Vec<Vec<Box<(u32, u32, u32, u32)>>>),
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: very complex type used. Consider factoring parts into `type` definitions
+ --> $DIR/type_complexity.rs:18:17
+ |
+LL | Struct { f: Vec<Vec<Box<(u32, u32, u32, u32)>>> },
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: very complex type used. Consider factoring parts into `type` definitions
+ --> $DIR/type_complexity.rs:22:14
+ |
+LL | const A: (u32, (u32, (u32, (u32, u32)))) = (0, (0, (0, (0, 0))));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: very complex type used. Consider factoring parts into `type` definitions
+ --> $DIR/type_complexity.rs:23:30
+ |
+LL | fn impl_method(&self, p: Vec<Vec<Box<(u32, u32, u32, u32)>>>) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: very complex type used. Consider factoring parts into `type` definitions
+ --> $DIR/type_complexity.rs:27:14
+ |
+LL | const A: Vec<Vec<Box<(u32, u32, u32, u32)>>>;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: very complex type used. Consider factoring parts into `type` definitions
+ --> $DIR/type_complexity.rs:28:14
+ |
+LL | type B = Vec<Vec<Box<(u32, u32, u32, u32)>>>;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: very complex type used. Consider factoring parts into `type` definitions
+ --> $DIR/type_complexity.rs:29:25
+ |
+LL | fn method(&self, p: Vec<Vec<Box<(u32, u32, u32, u32)>>>);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: very complex type used. Consider factoring parts into `type` definitions
+ --> $DIR/type_complexity.rs:30:29
+ |
+LL | fn def_method(&self, p: Vec<Vec<Box<(u32, u32, u32, u32)>>>) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: very complex type used. Consider factoring parts into `type` definitions
+ --> $DIR/type_complexity.rs:42:15
+ |
+LL | fn test1() -> Vec<Vec<Box<(u32, u32, u32, u32)>>> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: very complex type used. Consider factoring parts into `type` definitions
+ --> $DIR/type_complexity.rs:46:14
+ |
+LL | fn test2(_x: Vec<Vec<Box<(u32, u32, u32, u32)>>>) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: very complex type used. Consider factoring parts into `type` definitions
+ --> $DIR/type_complexity.rs:49:13
+ |
+LL | let _y: Vec<Vec<Box<(u32, u32, u32, u32)>>> = vec![];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 15 previous errors
+
diff --git a/src/tools/clippy/tests/ui/type_repetition_in_bounds.rs b/src/tools/clippy/tests/ui/type_repetition_in_bounds.rs
new file mode 100644
index 000000000..2eca1f470
--- /dev/null
+++ b/src/tools/clippy/tests/ui/type_repetition_in_bounds.rs
@@ -0,0 +1,97 @@
+#![deny(clippy::type_repetition_in_bounds)]
+
+use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Sub, SubAssign};
+
+pub fn foo<T>(_t: T)
+where
+ T: Copy,
+ T: Clone,
+{
+ unimplemented!();
+}
+
+pub fn bar<T, U>(_t: T, _u: U)
+where
+ T: Copy,
+ U: Clone,
+{
+ unimplemented!();
+}
+
+// Threshold test (see #4380)
+trait LintBounds
+where
+ Self: Clone,
+ Self: Copy + Default + Ord,
+ Self: Add<Output = Self> + AddAssign + Sub<Output = Self> + SubAssign,
+ Self: Mul<Output = Self> + MulAssign + Div<Output = Self> + DivAssign,
+{
+}
+
+trait LotsOfBounds
+where
+ Self: Clone + Copy + Default + Ord,
+ Self: Add<Output = Self> + AddAssign + Sub<Output = Self> + SubAssign,
+ Self: Mul<Output = Self> + MulAssign + Div<Output = Self> + DivAssign,
+{
+}
+
+// Generic distinction (see #4323)
+mod issue4323 {
+ pub struct Foo<A>(A);
+ pub struct Bar<A, B> {
+ a: Foo<A>,
+ b: Foo<B>,
+ }
+
+ impl<A, B> Unpin for Bar<A, B>
+ where
+ Foo<A>: Unpin,
+ Foo<B>: Unpin,
+ {
+ }
+}
+
+// Extern macros shouldn't lint (see #4326)
+extern crate serde;
+mod issue4326 {
+ use serde::{Deserialize, Serialize};
+
+ trait Foo {}
+ impl Foo for String {}
+
+ #[derive(Debug, Serialize, Deserialize)]
+ struct Bar<S>
+ where
+ S: Foo,
+ {
+ foo: S,
+ }
+}
+
+// Issue #7360
+struct Foo<T, U>
+where
+ T: Clone,
+ U: Clone,
+{
+ t: T,
+ u: U,
+}
+
+// Check for the `?` in `?Sized`
+pub fn f<T: ?Sized>()
+where
+ T: Clone,
+{
+}
+pub fn g<T: Clone>()
+where
+ T: ?Sized,
+{
+}
+
+// This should not lint
+fn impl_trait(_: impl AsRef<str>, _: impl AsRef<str>) {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/type_repetition_in_bounds.stderr b/src/tools/clippy/tests/ui/type_repetition_in_bounds.stderr
new file mode 100644
index 000000000..1d8871481
--- /dev/null
+++ b/src/tools/clippy/tests/ui/type_repetition_in_bounds.stderr
@@ -0,0 +1,39 @@
+error: this type has already been used as a bound predicate
+ --> $DIR/type_repetition_in_bounds.rs:8:5
+ |
+LL | T: Clone,
+ | ^^^^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/type_repetition_in_bounds.rs:1:9
+ |
+LL | #![deny(clippy::type_repetition_in_bounds)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = help: consider combining the bounds: `T: Copy + Clone`
+
+error: this type has already been used as a bound predicate
+ --> $DIR/type_repetition_in_bounds.rs:25:5
+ |
+LL | Self: Copy + Default + Ord,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider combining the bounds: `Self: Clone + Copy + Default + Ord`
+
+error: this type has already been used as a bound predicate
+ --> $DIR/type_repetition_in_bounds.rs:85:5
+ |
+LL | T: Clone,
+ | ^^^^^^^^
+ |
+ = help: consider combining the bounds: `T: ?Sized + Clone`
+
+error: this type has already been used as a bound predicate
+ --> $DIR/type_repetition_in_bounds.rs:90:5
+ |
+LL | T: ?Sized,
+ | ^^^^^^^^^
+ |
+ = help: consider combining the bounds: `T: Clone + ?Sized`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/types.fixed b/src/tools/clippy/tests/ui/types.fixed
new file mode 100644
index 000000000..417da42ed
--- /dev/null
+++ b/src/tools/clippy/tests/ui/types.fixed
@@ -0,0 +1,15 @@
+// run-rustfix
+
+#![allow(dead_code, unused_variables)]
+#![warn(clippy::cast_lossless)]
+
+// should not warn on lossy casting in constant types
+// because not supported yet
+const C: i32 = 42;
+const C_I64: i64 = C as i64;
+
+fn main() {
+ // should suggest i64::from(c)
+ let c: i32 = 42;
+ let c_i64: i64 = i64::from(c);
+}
diff --git a/src/tools/clippy/tests/ui/types.rs b/src/tools/clippy/tests/ui/types.rs
new file mode 100644
index 000000000..b16e9e538
--- /dev/null
+++ b/src/tools/clippy/tests/ui/types.rs
@@ -0,0 +1,15 @@
+// run-rustfix
+
+#![allow(dead_code, unused_variables)]
+#![warn(clippy::cast_lossless)]
+
+// should not warn on lossy casting in constant types
+// because not supported yet
+const C: i32 = 42;
+const C_I64: i64 = C as i64;
+
+fn main() {
+ // should suggest i64::from(c)
+ let c: i32 = 42;
+ let c_i64: i64 = c as i64;
+}
diff --git a/src/tools/clippy/tests/ui/types.stderr b/src/tools/clippy/tests/ui/types.stderr
new file mode 100644
index 000000000..59c3e05a1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/types.stderr
@@ -0,0 +1,10 @@
+error: casting `i32` to `i64` may become silently lossy if you later change the type
+ --> $DIR/types.rs:14:22
+ |
+LL | let c_i64: i64 = c as i64;
+ | ^^^^^^^^ help: try: `i64::from(c)`
+ |
+ = note: `-D clippy::cast-lossless` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/undocumented_unsafe_blocks.rs b/src/tools/clippy/tests/ui/undocumented_unsafe_blocks.rs
new file mode 100644
index 000000000..08aee4332
--- /dev/null
+++ b/src/tools/clippy/tests/ui/undocumented_unsafe_blocks.rs
@@ -0,0 +1,493 @@
+// aux-build:proc_macro_unsafe.rs
+
+#![warn(clippy::undocumented_unsafe_blocks)]
+#![allow(clippy::let_unit_value, clippy::missing_safety_doc)]
+
+extern crate proc_macro_unsafe;
+
+// Valid comments
+
+fn nested_local() {
+ let _ = {
+ let _ = {
+ // SAFETY:
+ let _ = unsafe {};
+ };
+ };
+}
+
+fn deep_nest() {
+ let _ = {
+ let _ = {
+ // SAFETY:
+ let _ = unsafe {};
+
+ // Safety:
+ unsafe {};
+
+ let _ = {
+ let _ = {
+ let _ = {
+ let _ = {
+ let _ = {
+ // Safety:
+ let _ = unsafe {};
+
+ // SAFETY:
+ unsafe {};
+ };
+ };
+ };
+
+ // Safety:
+ unsafe {};
+ };
+ };
+ };
+
+ // Safety:
+ unsafe {};
+ };
+
+ // SAFETY:
+ unsafe {};
+}
+
+fn local_tuple_expression() {
+ // Safety:
+ let _ = (42, unsafe {});
+}
+
+fn line_comment() {
+ // Safety:
+ unsafe {}
+}
+
+fn line_comment_newlines() {
+ // SAFETY:
+
+ unsafe {}
+}
+
+fn line_comment_empty() {
+ // Safety:
+ //
+ //
+ //
+ unsafe {}
+}
+
+fn line_comment_with_extras() {
+ // This is a description
+ // Safety:
+ unsafe {}
+}
+
+fn block_comment() {
+ /* Safety: */
+ unsafe {}
+}
+
+fn block_comment_newlines() {
+ /* SAFETY: */
+
+ unsafe {}
+}
+
+fn block_comment_with_extras() {
+ /* This is a description
+ * SAFETY:
+ */
+ unsafe {}
+}
+
+fn block_comment_terminator_same_line() {
+ /* This is a description
+ * Safety: */
+ unsafe {}
+}
+
+fn buried_safety() {
+ // Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor
+ // incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
+ // ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
+ // reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint
+ // occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est
+ // laborum. Safety:
+ // Tellus elementum sagittis vitae et leo duis ut diam quam. Sit amet nulla facilisi
+ // morbi tempus iaculis urna. Amet luctus venenatis lectus magna. At quis risus sed vulputate odio
+ // ut. Luctus venenatis lectus magna fringilla urna. Tortor id aliquet lectus proin nibh nisl
+ // condimentum id venenatis. Vulputate dignissim suspendisse in est ante in nibh mauris cursus.
+ unsafe {}
+}
+
+fn safety_with_prepended_text() {
+ // This is a test. safety:
+ unsafe {}
+}
+
+fn local_line_comment() {
+ // Safety:
+ let _ = unsafe {};
+}
+
+fn local_block_comment() {
+ /* SAFETY: */
+ let _ = unsafe {};
+}
+
+fn comment_array() {
+ // Safety:
+ let _ = [unsafe { 14 }, unsafe { 15 }, 42, unsafe { 16 }];
+}
+
+fn comment_tuple() {
+ // sAFETY:
+ let _ = (42, unsafe {}, "test", unsafe {});
+}
+
+fn comment_unary() {
+ // SAFETY:
+ let _ = *unsafe { &42 };
+}
+
+#[allow(clippy::match_single_binding)]
+fn comment_match() {
+ // SAFETY:
+ let _ = match unsafe {} {
+ _ => {},
+ };
+}
+
+fn comment_addr_of() {
+ // Safety:
+ let _ = &unsafe {};
+}
+
+fn comment_repeat() {
+ // Safety:
+ let _ = [unsafe {}; 5];
+}
+
+fn comment_macro_call() {
+ macro_rules! t {
+ ($b:expr) => {
+ $b
+ };
+ }
+
+ t!(
+ // SAFETY:
+ unsafe {}
+ );
+}
+
+fn comment_macro_def() {
+ macro_rules! t {
+ () => {
+ // Safety:
+ unsafe {}
+ };
+ }
+
+ t!();
+}
+
+fn non_ascii_comment() {
+ // ॐ᧻໒ SaFeTy: ௵∰
+ unsafe {};
+}
+
+fn local_commented_block() {
+ let _ =
+ // safety:
+ unsafe {};
+}
+
+fn local_nest() {
+ // safety:
+ let _ = [(42, unsafe {}, unsafe {}), (52, unsafe {}, unsafe {})];
+}
+
+fn in_fn_call(x: *const u32) {
+ fn f(x: u32) {}
+
+ // Safety: reason
+ f(unsafe { *x });
+}
+
+fn multi_in_fn_call(x: *const u32) {
+ fn f(x: u32, y: u32) {}
+
+ // Safety: reason
+ f(unsafe { *x }, unsafe { *x });
+}
+
+fn in_multiline_fn_call(x: *const u32) {
+ fn f(x: u32, y: u32) {}
+
+ f(
+ // Safety: reason
+ unsafe { *x },
+ 0,
+ );
+}
+
+fn in_macro_call(x: *const u32) {
+ // Safety: reason
+ println!("{}", unsafe { *x });
+}
+
+fn in_multiline_macro_call(x: *const u32) {
+ println!(
+ "{}",
+ // Safety: reason
+ unsafe { *x },
+ );
+}
+
+fn from_proc_macro() {
+ proc_macro_unsafe::unsafe_block!(token);
+}
+
+fn in_closure(x: *const u32) {
+ // Safety: reason
+ let _ = || unsafe { *x };
+}
+
+// Invalid comments
+
+#[rustfmt::skip]
+fn inline_block_comment() {
+ /* Safety: */ unsafe {}
+}
+
+fn no_comment() {
+ unsafe {}
+}
+
+fn no_comment_array() {
+ let _ = [unsafe { 14 }, unsafe { 15 }, 42, unsafe { 16 }];
+}
+
+fn no_comment_tuple() {
+ let _ = (42, unsafe {}, "test", unsafe {});
+}
+
+fn no_comment_unary() {
+ let _ = *unsafe { &42 };
+}
+
+#[allow(clippy::match_single_binding)]
+fn no_comment_match() {
+ let _ = match unsafe {} {
+ _ => {},
+ };
+}
+
+fn no_comment_addr_of() {
+ let _ = &unsafe {};
+}
+
+fn no_comment_repeat() {
+ let _ = [unsafe {}; 5];
+}
+
+fn local_no_comment() {
+ let _ = unsafe {};
+}
+
+fn no_comment_macro_call() {
+ macro_rules! t {
+ ($b:expr) => {
+ $b
+ };
+ }
+
+ t!(unsafe {});
+}
+
+fn no_comment_macro_def() {
+ macro_rules! t {
+ () => {
+ unsafe {}
+ };
+ }
+
+ t!();
+}
+
+fn trailing_comment() {
+ unsafe {} // SAFETY:
+}
+
+fn internal_comment() {
+ unsafe {
+ // SAFETY:
+ }
+}
+
+fn interference() {
+ // SAFETY
+
+ let _ = 42;
+
+ unsafe {};
+}
+
+pub fn print_binary_tree() {
+ println!("{}", unsafe { String::from_utf8_unchecked(vec![]) });
+}
+
+mod unsafe_impl_smoke_test {
+ unsafe trait A {}
+
+ // error: no safety comment
+ unsafe impl A for () {}
+
+ // Safety: ok
+ unsafe impl A for (i32) {}
+
+ mod sub_mod {
+ // error:
+ unsafe impl B for (u32) {}
+ unsafe trait B {}
+ }
+
+ #[rustfmt::skip]
+ mod sub_mod2 {
+ //
+ // SAFETY: ok
+ //
+
+ unsafe impl B for (u32) {}
+ unsafe trait B {}
+ }
+}
+
+mod unsafe_impl_from_macro {
+ unsafe trait T {}
+
+ // error
+ macro_rules! no_safety_comment {
+ ($t:ty) => {
+ unsafe impl T for $t {}
+ };
+ }
+
+ // ok
+ no_safety_comment!(());
+
+ // ok
+ macro_rules! with_safety_comment {
+ ($t:ty) => {
+ // SAFETY:
+ unsafe impl T for $t {}
+ };
+ }
+
+ // ok
+ with_safety_comment!((i32));
+}
+
+mod unsafe_impl_macro_and_not_macro {
+ unsafe trait T {}
+
+ // error
+ macro_rules! no_safety_comment {
+ ($t:ty) => {
+ unsafe impl T for $t {}
+ };
+ }
+
+ // ok
+ no_safety_comment!(());
+
+ // error
+ unsafe impl T for (i32) {}
+
+ // ok
+ no_safety_comment!(u32);
+
+ // error
+ unsafe impl T for (bool) {}
+}
+
+#[rustfmt::skip]
+mod unsafe_impl_valid_comment {
+ unsafe trait SaFety {}
+ // SaFety:
+ unsafe impl SaFety for () {}
+
+ unsafe trait MultiLineComment {}
+ // The following impl is safe
+ // ...
+ // Safety: reason
+ unsafe impl MultiLineComment for () {}
+
+ unsafe trait NoAscii {}
+ // 安全 SAFETY: 以下のコードは安全です
+ unsafe impl NoAscii for () {}
+
+ unsafe trait InlineAndPrecedingComment {}
+ // SAFETY:
+ /* comment */ unsafe impl InlineAndPrecedingComment for () {}
+
+ unsafe trait BuriedSafety {}
+ // Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor
+ // incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
+ // ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
+ // reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint
+ // occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est
+ // laborum. Safety:
+ // Tellus elementum sagittis vitae et leo duis ut diam quam. Sit amet nulla facilisi
+ // morbi tempus iaculis urna. Amet luctus venenatis lectus magna. At quis risus sed vulputate odio
+ // ut. Luctus venenatis lectus magna fringilla urna. Tortor id aliquet lectus proin nibh nisl
+ // condimentum id venenatis. Vulputate dignissim suspendisse in est ante in nibh mauris cursus.
+ unsafe impl BuriedSafety for () {}
+
+ unsafe trait MultiLineBlockComment {}
+ /* This is a description
+ * Safety: */
+ unsafe impl MultiLineBlockComment for () {}
+}
+
+#[rustfmt::skip]
+mod unsafe_impl_invalid_comment {
+ unsafe trait NoComment {}
+
+ unsafe impl NoComment for () {}
+
+ unsafe trait InlineComment {}
+
+ /* SAFETY: */ unsafe impl InlineComment for () {}
+
+ unsafe trait TrailingComment {}
+
+ unsafe impl TrailingComment for () {} // SAFETY:
+
+ unsafe trait Interference {}
+ // SAFETY:
+ const BIG_NUMBER: i32 = 1000000;
+ unsafe impl Interference for () {}
+}
+
+unsafe trait ImplInFn {}
+
+fn impl_in_fn() {
+ // error
+ unsafe impl ImplInFn for () {}
+
+ // SAFETY: ok
+ unsafe impl ImplInFn for (i32) {}
+}
+
+unsafe trait CrateRoot {}
+
+// error
+unsafe impl CrateRoot for () {}
+
+// SAFETY: ok
+unsafe impl CrateRoot for (i32) {}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/undocumented_unsafe_blocks.stderr b/src/tools/clippy/tests/ui/undocumented_unsafe_blocks.stderr
new file mode 100644
index 000000000..c6a212744
--- /dev/null
+++ b/src/tools/clippy/tests/ui/undocumented_unsafe_blocks.stderr
@@ -0,0 +1,267 @@
+error: unsafe block missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:262:19
+ |
+LL | /* Safety: */ unsafe {}
+ | ^^^^^^^^^
+ |
+ = note: `-D clippy::undocumented-unsafe-blocks` implied by `-D warnings`
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe block missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:266:5
+ |
+LL | unsafe {}
+ | ^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe block missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:270:14
+ |
+LL | let _ = [unsafe { 14 }, unsafe { 15 }, 42, unsafe { 16 }];
+ | ^^^^^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe block missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:270:29
+ |
+LL | let _ = [unsafe { 14 }, unsafe { 15 }, 42, unsafe { 16 }];
+ | ^^^^^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe block missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:270:48
+ |
+LL | let _ = [unsafe { 14 }, unsafe { 15 }, 42, unsafe { 16 }];
+ | ^^^^^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe block missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:274:18
+ |
+LL | let _ = (42, unsafe {}, "test", unsafe {});
+ | ^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe block missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:274:37
+ |
+LL | let _ = (42, unsafe {}, "test", unsafe {});
+ | ^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe block missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:278:14
+ |
+LL | let _ = *unsafe { &42 };
+ | ^^^^^^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe block missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:283:19
+ |
+LL | let _ = match unsafe {} {
+ | ^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe block missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:289:14
+ |
+LL | let _ = &unsafe {};
+ | ^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe block missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:293:14
+ |
+LL | let _ = [unsafe {}; 5];
+ | ^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe block missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:297:13
+ |
+LL | let _ = unsafe {};
+ | ^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe block missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:307:8
+ |
+LL | t!(unsafe {});
+ | ^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe block missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:313:13
+ |
+LL | unsafe {}
+ | ^^^^^^^^^
+...
+LL | t!();
+ | ---- in this macro invocation
+ |
+ = help: consider adding a safety comment on the preceding line
+ = note: this error originates in the macro `t` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: unsafe block missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:321:5
+ |
+LL | unsafe {} // SAFETY:
+ | ^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe block missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:325:5
+ |
+LL | unsafe {
+ | ^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe block missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:335:5
+ |
+LL | unsafe {};
+ | ^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe block missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:339:20
+ |
+LL | println!("{}", unsafe { String::from_utf8_unchecked(vec![]) });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe impl missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:346:5
+ |
+LL | unsafe impl A for () {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe impl missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:353:9
+ |
+LL | unsafe impl B for (u32) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe impl missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:374:13
+ |
+LL | unsafe impl T for $t {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | no_safety_comment!(());
+ | ---------------------- in this macro invocation
+ |
+ = help: consider adding a safety comment on the preceding line
+ = note: this error originates in the macro `no_safety_comment` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: unsafe impl missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:399:13
+ |
+LL | unsafe impl T for $t {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | no_safety_comment!(());
+ | ---------------------- in this macro invocation
+ |
+ = help: consider adding a safety comment on the preceding line
+ = note: this error originates in the macro `no_safety_comment` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: unsafe impl missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:407:5
+ |
+LL | unsafe impl T for (i32) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe impl missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:399:13
+ |
+LL | unsafe impl T for $t {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | no_safety_comment!(u32);
+ | ----------------------- in this macro invocation
+ |
+ = help: consider adding a safety comment on the preceding line
+ = note: this error originates in the macro `no_safety_comment` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: unsafe impl missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:413:5
+ |
+LL | unsafe impl T for (bool) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe impl missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:459:5
+ |
+LL | unsafe impl NoComment for () {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe impl missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:463:19
+ |
+LL | /* SAFETY: */ unsafe impl InlineComment for () {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe impl missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:467:5
+ |
+LL | unsafe impl TrailingComment for () {} // SAFETY:
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe impl missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:472:5
+ |
+LL | unsafe impl Interference for () {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe impl missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:479:5
+ |
+LL | unsafe impl ImplInFn for () {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: unsafe impl missing a safety comment
+ --> $DIR/undocumented_unsafe_blocks.rs:488:1
+ |
+LL | unsafe impl CrateRoot for () {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider adding a safety comment on the preceding line
+
+error: aborting due to 31 previous errors
+
diff --git a/src/tools/clippy/tests/ui/undropped_manually_drops.rs b/src/tools/clippy/tests/ui/undropped_manually_drops.rs
new file mode 100644
index 000000000..f4cfc92e1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/undropped_manually_drops.rs
@@ -0,0 +1,26 @@
+#![warn(clippy::undropped_manually_drops)]
+
+struct S;
+
+fn main() {
+ let f = std::mem::drop;
+ let g = std::mem::ManuallyDrop::drop;
+ let mut manual1 = std::mem::ManuallyDrop::new(S);
+ let mut manual2 = std::mem::ManuallyDrop::new(S);
+ let mut manual3 = std::mem::ManuallyDrop::new(S);
+ let mut manual4 = std::mem::ManuallyDrop::new(S);
+
+ // These lines will not drop `S` and should be linted
+ drop(std::mem::ManuallyDrop::new(S));
+ drop(manual1);
+
+ // FIXME: this line is not linted, though it should be
+ f(manual2);
+
+ // These lines will drop `S` and should be okay.
+ unsafe {
+ std::mem::ManuallyDrop::drop(&mut std::mem::ManuallyDrop::new(S));
+ std::mem::ManuallyDrop::drop(&mut manual3);
+ g(&mut manual4);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/undropped_manually_drops.stderr b/src/tools/clippy/tests/ui/undropped_manually_drops.stderr
new file mode 100644
index 000000000..2ac0fe986
--- /dev/null
+++ b/src/tools/clippy/tests/ui/undropped_manually_drops.stderr
@@ -0,0 +1,19 @@
+error: the inner value of this ManuallyDrop will not be dropped
+ --> $DIR/undropped_manually_drops.rs:14:5
+ |
+LL | drop(std::mem::ManuallyDrop::new(S));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::undropped-manually-drops` implied by `-D warnings`
+ = help: to drop a `ManuallyDrop<T>`, use std::mem::ManuallyDrop::drop
+
+error: the inner value of this ManuallyDrop will not be dropped
+ --> $DIR/undropped_manually_drops.rs:15:5
+ |
+LL | drop(manual1);
+ | ^^^^^^^^^^^^^
+ |
+ = help: to drop a `ManuallyDrop<T>`, use std::mem::ManuallyDrop::drop
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unicode.fixed b/src/tools/clippy/tests/ui/unicode.fixed
new file mode 100644
index 000000000..328cda369
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unicode.fixed
@@ -0,0 +1,36 @@
+// run-rustfix
+#[warn(clippy::invisible_characters)]
+fn zero() {
+ print!("Here >\u{200B}< is a ZWS, and \u{200B}another");
+ print!("This\u{200B}is\u{200B}fine");
+ print!("Here >\u{AD}< is a SHY, and \u{AD}another");
+ print!("This\u{ad}is\u{ad}fine");
+ print!("Here >\u{2060}< is a WJ, and \u{2060}another");
+ print!("This\u{2060}is\u{2060}fine");
+}
+
+#[warn(clippy::unicode_not_nfc)]
+fn canon() {
+ print!("̀àh?");
+ print!("a\u{0300}h?"); // also ok
+}
+
+#[warn(clippy::non_ascii_literal)]
+fn uni() {
+ print!("\u{dc}ben!");
+ print!("\u{DC}ben!"); // this is ok
+}
+
+// issue 8013
+#[warn(clippy::non_ascii_literal)]
+fn single_quote() {
+ const _EMPTY_BLOCK: char = '\u{25b1}';
+ const _FULL_BLOCK: char = '\u{25b0}';
+}
+
+fn main() {
+ zero();
+ uni();
+ canon();
+ single_quote();
+}
diff --git a/src/tools/clippy/tests/ui/unicode.rs b/src/tools/clippy/tests/ui/unicode.rs
new file mode 100644
index 000000000..7828d6bcb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unicode.rs
@@ -0,0 +1,36 @@
+// run-rustfix
+#[warn(clippy::invisible_characters)]
+fn zero() {
+ print!("Here >​< is a ZWS, and ​another");
+ print!("This\u{200B}is\u{200B}fine");
+ print!("Here >­< is a SHY, and ­another");
+ print!("This\u{ad}is\u{ad}fine");
+ print!("Here >⁠< is a WJ, and ⁠another");
+ print!("This\u{2060}is\u{2060}fine");
+}
+
+#[warn(clippy::unicode_not_nfc)]
+fn canon() {
+ print!("̀àh?");
+ print!("a\u{0300}h?"); // also ok
+}
+
+#[warn(clippy::non_ascii_literal)]
+fn uni() {
+ print!("Üben!");
+ print!("\u{DC}ben!"); // this is ok
+}
+
+// issue 8013
+#[warn(clippy::non_ascii_literal)]
+fn single_quote() {
+ const _EMPTY_BLOCK: char = '▱';
+ const _FULL_BLOCK: char = '▰';
+}
+
+fn main() {
+ zero();
+ uni();
+ canon();
+ single_quote();
+}
diff --git a/src/tools/clippy/tests/ui/unicode.stderr b/src/tools/clippy/tests/ui/unicode.stderr
new file mode 100644
index 000000000..01d3f3c02
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unicode.stderr
@@ -0,0 +1,50 @@
+error: invisible character detected
+ --> $DIR/unicode.rs:4:12
+ |
+LL | print!("Here >​< is a ZWS, and ​another");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider replacing the string with: `"Here >/u{200B}< is a ZWS, and /u{200B}another"`
+ |
+ = note: `-D clippy::invisible-characters` implied by `-D warnings`
+
+error: invisible character detected
+ --> $DIR/unicode.rs:6:12
+ |
+LL | print!("Here >­< is a SHY, and ­another");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider replacing the string with: `"Here >/u{AD}< is a SHY, and /u{AD}another"`
+
+error: invisible character detected
+ --> $DIR/unicode.rs:8:12
+ |
+LL | print!("Here >⁠< is a WJ, and ⁠another");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider replacing the string with: `"Here >/u{2060}< is a WJ, and /u{2060}another"`
+
+error: non-NFC Unicode sequence detected
+ --> $DIR/unicode.rs:14:12
+ |
+LL | print!("̀àh?");
+ | ^^^^^ help: consider replacing the string with: `"̀àh?"`
+ |
+ = note: `-D clippy::unicode-not-nfc` implied by `-D warnings`
+
+error: literal non-ASCII character detected
+ --> $DIR/unicode.rs:20:12
+ |
+LL | print!("Üben!");
+ | ^^^^^^^ help: consider replacing the string with: `"/u{dc}ben!"`
+ |
+ = note: `-D clippy::non-ascii-literal` implied by `-D warnings`
+
+error: literal non-ASCII character detected
+ --> $DIR/unicode.rs:27:32
+ |
+LL | const _EMPTY_BLOCK: char = '▱';
+ | ^^^ help: consider replacing the string with: `'/u{25b1}'`
+
+error: literal non-ASCII character detected
+ --> $DIR/unicode.rs:28:31
+ |
+LL | const _FULL_BLOCK: char = '▰';
+ | ^^^ help: consider replacing the string with: `'/u{25b0}'`
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/uninit.rs b/src/tools/clippy/tests/ui/uninit.rs
new file mode 100644
index 000000000..dac5ce272
--- /dev/null
+++ b/src/tools/clippy/tests/ui/uninit.rs
@@ -0,0 +1,26 @@
+#![feature(stmt_expr_attributes)]
+#![allow(clippy::let_unit_value)]
+
+use std::mem::{self, MaybeUninit};
+
+fn main() {
+ let _: usize = unsafe { MaybeUninit::uninit().assume_init() };
+
+ // edge case: For now we lint on empty arrays
+ let _: [u8; 0] = unsafe { MaybeUninit::uninit().assume_init() };
+
+ // edge case: For now we accept unit tuples
+ let _: () = unsafe { MaybeUninit::uninit().assume_init() };
+
+ // This is OK, because `MaybeUninit` allows uninitialized data.
+ let _: MaybeUninit<usize> = unsafe { MaybeUninit::uninit().assume_init() };
+
+ // This is OK, because all constitutent types are uninit-compatible.
+ let _: (MaybeUninit<usize>, MaybeUninit<bool>) = unsafe { MaybeUninit::uninit().assume_init() };
+
+ // This is OK, because all constitutent types are uninit-compatible.
+ let _: (MaybeUninit<usize>, [MaybeUninit<bool>; 2]) = unsafe { MaybeUninit::uninit().assume_init() };
+
+ // Was a false negative.
+ let _: usize = unsafe { mem::MaybeUninit::uninit().assume_init() };
+}
diff --git a/src/tools/clippy/tests/ui/uninit.stderr b/src/tools/clippy/tests/ui/uninit.stderr
new file mode 100644
index 000000000..15ef23494
--- /dev/null
+++ b/src/tools/clippy/tests/ui/uninit.stderr
@@ -0,0 +1,22 @@
+error: this call for this type may be undefined behavior
+ --> $DIR/uninit.rs:7:29
+ |
+LL | let _: usize = unsafe { MaybeUninit::uninit().assume_init() };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `#[deny(clippy::uninit_assumed_init)]` on by default
+
+error: this call for this type may be undefined behavior
+ --> $DIR/uninit.rs:10:31
+ |
+LL | let _: [u8; 0] = unsafe { MaybeUninit::uninit().assume_init() };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this call for this type may be undefined behavior
+ --> $DIR/uninit.rs:25:29
+ |
+LL | let _: usize = unsafe { mem::MaybeUninit::uninit().assume_init() };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/uninit_vec.rs b/src/tools/clippy/tests/ui/uninit_vec.rs
new file mode 100644
index 000000000..dc150cf28
--- /dev/null
+++ b/src/tools/clippy/tests/ui/uninit_vec.rs
@@ -0,0 +1,94 @@
+#![warn(clippy::uninit_vec)]
+
+use std::mem::MaybeUninit;
+
+#[derive(Default)]
+struct MyVec {
+ vec: Vec<u8>,
+}
+
+fn main() {
+ // with_capacity() -> set_len() should be detected
+ let mut vec: Vec<u8> = Vec::with_capacity(1000);
+ unsafe {
+ vec.set_len(200);
+ }
+
+ // reserve() -> set_len() should be detected
+ vec.reserve(1000);
+ unsafe {
+ vec.set_len(200);
+ }
+
+ // new() -> set_len() should be detected
+ let mut vec: Vec<u8> = Vec::new();
+ unsafe {
+ vec.set_len(200);
+ }
+
+ // default() -> set_len() should be detected
+ let mut vec: Vec<u8> = Default::default();
+ unsafe {
+ vec.set_len(200);
+ }
+
+ let mut vec: Vec<u8> = Vec::default();
+ unsafe {
+ vec.set_len(200);
+ }
+
+ // test when both calls are enclosed in the same unsafe block
+ unsafe {
+ let mut vec: Vec<u8> = Vec::with_capacity(1000);
+ vec.set_len(200);
+
+ vec.reserve(1000);
+ vec.set_len(200);
+ }
+
+ let mut vec: Vec<u8> = Vec::with_capacity(1000);
+ unsafe {
+ // test the case where there are other statements in the following unsafe block
+ vec.set_len(200);
+ assert!(vec.len() == 200);
+ }
+
+ // handle vec stored in the field of a struct
+ let mut my_vec = MyVec::default();
+ my_vec.vec.reserve(1000);
+ unsafe {
+ my_vec.vec.set_len(200);
+ }
+
+ my_vec.vec = Vec::with_capacity(1000);
+ unsafe {
+ my_vec.vec.set_len(200);
+ }
+
+ // Test `#[allow(...)]` attributes on inner unsafe block (shouldn't trigger)
+ let mut vec: Vec<u8> = Vec::with_capacity(1000);
+ #[allow(clippy::uninit_vec)]
+ unsafe {
+ vec.set_len(200);
+ }
+
+ // MaybeUninit-wrapped types should not be detected
+ unsafe {
+ let mut vec: Vec<MaybeUninit<u8>> = Vec::with_capacity(1000);
+ vec.set_len(200);
+
+ let mut vec: Vec<(MaybeUninit<u8>, MaybeUninit<bool>)> = Vec::with_capacity(1000);
+ vec.set_len(200);
+
+ let mut vec: Vec<(MaybeUninit<u8>, [MaybeUninit<bool>; 2])> = Vec::with_capacity(1000);
+ vec.set_len(200);
+ }
+
+ // known false negative
+ let mut vec1: Vec<u8> = Vec::with_capacity(1000);
+ let mut vec2: Vec<u8> = Vec::with_capacity(1000);
+ unsafe {
+ vec1.set_len(200);
+ vec2.set_len(200);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/uninit_vec.stderr b/src/tools/clippy/tests/ui/uninit_vec.stderr
new file mode 100644
index 000000000..520bfb26b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/uninit_vec.stderr
@@ -0,0 +1,105 @@
+error: calling `set_len()` immediately after reserving a buffer creates uninitialized values
+ --> $DIR/uninit_vec.rs:12:5
+ |
+LL | let mut vec: Vec<u8> = Vec::with_capacity(1000);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | unsafe {
+LL | vec.set_len(200);
+ | ^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::uninit-vec` implied by `-D warnings`
+ = help: initialize the buffer or wrap the content in `MaybeUninit`
+
+error: calling `set_len()` immediately after reserving a buffer creates uninitialized values
+ --> $DIR/uninit_vec.rs:18:5
+ |
+LL | vec.reserve(1000);
+ | ^^^^^^^^^^^^^^^^^^
+LL | unsafe {
+LL | vec.set_len(200);
+ | ^^^^^^^^^^^^^^^^
+ |
+ = help: initialize the buffer or wrap the content in `MaybeUninit`
+
+error: calling `set_len()` on empty `Vec` creates out-of-bound values
+ --> $DIR/uninit_vec.rs:24:5
+ |
+LL | let mut vec: Vec<u8> = Vec::new();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | unsafe {
+LL | vec.set_len(200);
+ | ^^^^^^^^^^^^^^^^
+
+error: calling `set_len()` on empty `Vec` creates out-of-bound values
+ --> $DIR/uninit_vec.rs:30:5
+ |
+LL | let mut vec: Vec<u8> = Default::default();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | unsafe {
+LL | vec.set_len(200);
+ | ^^^^^^^^^^^^^^^^
+
+error: calling `set_len()` on empty `Vec` creates out-of-bound values
+ --> $DIR/uninit_vec.rs:35:5
+ |
+LL | let mut vec: Vec<u8> = Vec::default();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | unsafe {
+LL | vec.set_len(200);
+ | ^^^^^^^^^^^^^^^^
+
+error: calling `set_len()` immediately after reserving a buffer creates uninitialized values
+ --> $DIR/uninit_vec.rs:49:5
+ |
+LL | let mut vec: Vec<u8> = Vec::with_capacity(1000);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+...
+LL | vec.set_len(200);
+ | ^^^^^^^^^^^^^^^^
+ |
+ = help: initialize the buffer or wrap the content in `MaybeUninit`
+
+error: calling `set_len()` immediately after reserving a buffer creates uninitialized values
+ --> $DIR/uninit_vec.rs:58:5
+ |
+LL | my_vec.vec.reserve(1000);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | unsafe {
+LL | my_vec.vec.set_len(200);
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: initialize the buffer or wrap the content in `MaybeUninit`
+
+error: calling `set_len()` immediately after reserving a buffer creates uninitialized values
+ --> $DIR/uninit_vec.rs:63:5
+ |
+LL | my_vec.vec = Vec::with_capacity(1000);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | unsafe {
+LL | my_vec.vec.set_len(200);
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: initialize the buffer or wrap the content in `MaybeUninit`
+
+error: calling `set_len()` immediately after reserving a buffer creates uninitialized values
+ --> $DIR/uninit_vec.rs:42:9
+ |
+LL | let mut vec: Vec<u8> = Vec::with_capacity(1000);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | vec.set_len(200);
+ | ^^^^^^^^^^^^^^^^
+ |
+ = help: initialize the buffer or wrap the content in `MaybeUninit`
+
+error: calling `set_len()` immediately after reserving a buffer creates uninitialized values
+ --> $DIR/uninit_vec.rs:45:9
+ |
+LL | vec.reserve(1000);
+ | ^^^^^^^^^^^^^^^^^^
+LL | vec.set_len(200);
+ | ^^^^^^^^^^^^^^^^
+ |
+ = help: initialize the buffer or wrap the content in `MaybeUninit`
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unit_arg.rs b/src/tools/clippy/tests/ui/unit_arg.rs
new file mode 100644
index 000000000..38be87bdd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unit_arg.rs
@@ -0,0 +1,133 @@
+#![warn(clippy::unit_arg)]
+#![allow(
+ clippy::no_effect,
+ unused_must_use,
+ unused_variables,
+ clippy::unused_unit,
+ clippy::unnecessary_wraps,
+ clippy::or_fun_call,
+ clippy::needless_question_mark,
+ clippy::self_named_constructors,
+ clippy::let_unit_value
+)]
+
+use std::fmt::Debug;
+
+fn foo<T: Debug>(t: T) {
+ println!("{:?}", t);
+}
+
+fn foo3<T1: Debug, T2: Debug, T3: Debug>(t1: T1, t2: T2, t3: T3) {
+ println!("{:?}, {:?}, {:?}", t1, t2, t3);
+}
+
+struct Bar;
+
+impl Bar {
+ fn bar<T: Debug>(&self, t: T) {
+ println!("{:?}", t);
+ }
+}
+
+fn baz<T: Debug>(t: T) {
+ foo(t);
+}
+
+trait Tr {
+ type Args;
+ fn do_it(args: Self::Args);
+}
+
+struct A;
+impl Tr for A {
+ type Args = ();
+ fn do_it(_: Self::Args) {}
+}
+
+struct B;
+impl Tr for B {
+ type Args = <A as Tr>::Args;
+
+ fn do_it(args: Self::Args) {
+ A::do_it(args)
+ }
+}
+
+fn bad() {
+ foo({
+ 1;
+ });
+ foo(foo(1));
+ foo({
+ foo(1);
+ foo(2);
+ });
+ let b = Bar;
+ b.bar({
+ 1;
+ });
+ taking_multiple_units(foo(0), foo(1));
+ taking_multiple_units(foo(0), {
+ foo(1);
+ foo(2);
+ });
+ taking_multiple_units(
+ {
+ foo(0);
+ foo(1);
+ },
+ {
+ foo(2);
+ foo(3);
+ },
+ );
+ // here Some(foo(2)) isn't the top level statement expression, wrap the suggestion in a block
+ None.or(Some(foo(2)));
+ // in this case, the suggestion can be inlined, no need for a surrounding block
+ // foo(()); foo(()) instead of { foo(()); foo(()) }
+ foo(foo(()));
+}
+
+fn ok() {
+ foo(());
+ foo(1);
+ foo({ 1 });
+ foo3("a", 3, vec![3]);
+ let b = Bar;
+ b.bar({ 1 });
+ b.bar(());
+ question_mark();
+ let named_unit_arg = ();
+ foo(named_unit_arg);
+ baz(());
+ B::do_it(());
+}
+
+fn question_mark() -> Result<(), ()> {
+ Ok(Ok(())?)?;
+ Ok(Ok(()))??;
+ Ok(())
+}
+
+#[allow(dead_code)]
+mod issue_2945 {
+ fn unit_fn() -> Result<(), i32> {
+ Ok(())
+ }
+
+ fn fallible() -> Result<(), i32> {
+ Ok(unit_fn()?)
+ }
+}
+
+#[allow(dead_code)]
+fn returning_expr() -> Option<()> {
+ Some(foo(1))
+}
+
+fn taking_multiple_units(a: (), b: ()) {}
+
+fn main() {
+ bad();
+ ok();
+}
diff --git a/src/tools/clippy/tests/ui/unit_arg.stderr b/src/tools/clippy/tests/ui/unit_arg.stderr
new file mode 100644
index 000000000..11cfe66a3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unit_arg.stderr
@@ -0,0 +1,187 @@
+error: passing a unit value to a function
+ --> $DIR/unit_arg.rs:57:5
+ |
+LL | / foo({
+LL | | 1;
+LL | | });
+ | |______^
+ |
+ = note: `-D clippy::unit-arg` implied by `-D warnings`
+help: remove the semicolon from the last statement in the block
+ |
+LL | 1
+ |
+help: or move the expression in front of the call and replace it with the unit literal `()`
+ |
+LL ~ {
+LL + 1;
+LL + };
+LL ~ foo(());
+ |
+
+error: passing a unit value to a function
+ --> $DIR/unit_arg.rs:60:5
+ |
+LL | foo(foo(1));
+ | ^^^^^^^^^^^
+ |
+help: move the expression in front of the call and replace it with the unit literal `()`
+ |
+LL ~ foo(1);
+LL ~ foo(());
+ |
+
+error: passing a unit value to a function
+ --> $DIR/unit_arg.rs:61:5
+ |
+LL | / foo({
+LL | | foo(1);
+LL | | foo(2);
+LL | | });
+ | |______^
+ |
+help: remove the semicolon from the last statement in the block
+ |
+LL | foo(2)
+ |
+help: or move the expression in front of the call and replace it with the unit literal `()`
+ |
+LL ~ {
+LL + foo(1);
+LL + foo(2);
+LL + };
+LL ~ foo(());
+ |
+
+error: passing a unit value to a function
+ --> $DIR/unit_arg.rs:66:5
+ |
+LL | / b.bar({
+LL | | 1;
+LL | | });
+ | |______^
+ |
+help: remove the semicolon from the last statement in the block
+ |
+LL | 1
+ |
+help: or move the expression in front of the call and replace it with the unit literal `()`
+ |
+LL ~ {
+LL + 1;
+LL + };
+LL ~ b.bar(());
+ |
+
+error: passing unit values to a function
+ --> $DIR/unit_arg.rs:69:5
+ |
+LL | taking_multiple_units(foo(0), foo(1));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: move the expressions in front of the call and replace them with the unit literal `()`
+ |
+LL ~ foo(0);
+LL + foo(1);
+LL ~ taking_multiple_units((), ());
+ |
+
+error: passing unit values to a function
+ --> $DIR/unit_arg.rs:70:5
+ |
+LL | / taking_multiple_units(foo(0), {
+LL | | foo(1);
+LL | | foo(2);
+LL | | });
+ | |______^
+ |
+help: remove the semicolon from the last statement in the block
+ |
+LL | foo(2)
+ |
+help: or move the expressions in front of the call and replace them with the unit literal `()`
+ |
+LL ~ foo(0);
+LL + {
+LL + foo(1);
+LL + foo(2);
+LL + };
+LL ~ taking_multiple_units((), ());
+ |
+
+error: passing unit values to a function
+ --> $DIR/unit_arg.rs:74:5
+ |
+LL | / taking_multiple_units(
+LL | | {
+LL | | foo(0);
+LL | | foo(1);
+... |
+LL | | },
+LL | | );
+ | |_____^
+ |
+help: remove the semicolon from the last statement in the block
+ |
+LL | foo(1)
+ |
+help: remove the semicolon from the last statement in the block
+ |
+LL | foo(3)
+ |
+help: or move the expressions in front of the call and replace them with the unit literal `()`
+ |
+LL ~ {
+LL + foo(0);
+LL + foo(1);
+LL + };
+LL + {
+LL + foo(2);
+LL + foo(3);
+LL + };
+LL + taking_multiple_units(
+LL + (),
+LL + (),
+LL ~ );
+ |
+
+error: passing a unit value to a function
+ --> $DIR/unit_arg.rs:85:13
+ |
+LL | None.or(Some(foo(2)));
+ | ^^^^^^^^^^^^
+ |
+help: move the expression in front of the call and replace it with the unit literal `()`
+ |
+LL ~ None.or({
+LL + foo(2);
+LL + Some(())
+LL ~ });
+ |
+
+error: passing a unit value to a function
+ --> $DIR/unit_arg.rs:88:5
+ |
+LL | foo(foo(()));
+ | ^^^^^^^^^^^^
+ |
+help: move the expression in front of the call and replace it with the unit literal `()`
+ |
+LL ~ foo(());
+LL ~ foo(());
+ |
+
+error: passing a unit value to a function
+ --> $DIR/unit_arg.rs:125:5
+ |
+LL | Some(foo(1))
+ | ^^^^^^^^^^^^
+ |
+help: move the expression in front of the call and replace it with the unit literal `()`
+ |
+LL ~ foo(1);
+LL + Some(())
+ |
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unit_arg_empty_blocks.fixed b/src/tools/clippy/tests/ui/unit_arg_empty_blocks.fixed
new file mode 100644
index 000000000..9400e93ca
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unit_arg_empty_blocks.fixed
@@ -0,0 +1,30 @@
+// run-rustfix
+#![warn(clippy::unit_arg)]
+#![allow(clippy::no_effect, unused_must_use, unused_variables)]
+
+use std::fmt::Debug;
+
+fn foo<T: Debug>(t: T) {
+ println!("{:?}", t);
+}
+
+fn foo3<T1: Debug, T2: Debug, T3: Debug>(t1: T1, t2: T2, t3: T3) {
+ println!("{:?}, {:?}, {:?}", t1, t2, t3);
+}
+
+fn bad() {
+ foo(());
+ foo3((), 2, 2);
+ foo(0);
+ taking_two_units((), ());
+ foo(0);
+ foo(1);
+ taking_three_units((), (), ());
+}
+
+fn taking_two_units(a: (), b: ()) {}
+fn taking_three_units(a: (), b: (), c: ()) {}
+
+fn main() {
+ bad();
+}
diff --git a/src/tools/clippy/tests/ui/unit_arg_empty_blocks.rs b/src/tools/clippy/tests/ui/unit_arg_empty_blocks.rs
new file mode 100644
index 000000000..5f52b6c53
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unit_arg_empty_blocks.rs
@@ -0,0 +1,27 @@
+// run-rustfix
+#![warn(clippy::unit_arg)]
+#![allow(clippy::no_effect, unused_must_use, unused_variables)]
+
+use std::fmt::Debug;
+
+fn foo<T: Debug>(t: T) {
+ println!("{:?}", t);
+}
+
+fn foo3<T1: Debug, T2: Debug, T3: Debug>(t1: T1, t2: T2, t3: T3) {
+ println!("{:?}, {:?}, {:?}", t1, t2, t3);
+}
+
+fn bad() {
+ foo({});
+ foo3({}, 2, 2);
+ taking_two_units({}, foo(0));
+ taking_three_units({}, foo(0), foo(1));
+}
+
+fn taking_two_units(a: (), b: ()) {}
+fn taking_three_units(a: (), b: (), c: ()) {}
+
+fn main() {
+ bad();
+}
diff --git a/src/tools/clippy/tests/ui/unit_arg_empty_blocks.stderr b/src/tools/clippy/tests/ui/unit_arg_empty_blocks.stderr
new file mode 100644
index 000000000..d35e93169
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unit_arg_empty_blocks.stderr
@@ -0,0 +1,45 @@
+error: passing a unit value to a function
+ --> $DIR/unit_arg_empty_blocks.rs:16:5
+ |
+LL | foo({});
+ | ^^^^--^
+ | |
+ | help: use a unit literal instead: `()`
+ |
+ = note: `-D clippy::unit-arg` implied by `-D warnings`
+
+error: passing a unit value to a function
+ --> $DIR/unit_arg_empty_blocks.rs:17:5
+ |
+LL | foo3({}, 2, 2);
+ | ^^^^^--^^^^^^^
+ | |
+ | help: use a unit literal instead: `()`
+
+error: passing unit values to a function
+ --> $DIR/unit_arg_empty_blocks.rs:18:5
+ |
+LL | taking_two_units({}, foo(0));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: move the expression in front of the call and replace it with the unit literal `()`
+ |
+LL ~ foo(0);
+LL ~ taking_two_units((), ());
+ |
+
+error: passing unit values to a function
+ --> $DIR/unit_arg_empty_blocks.rs:19:5
+ |
+LL | taking_three_units({}, foo(0), foo(1));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: move the expressions in front of the call and replace them with the unit literal `()`
+ |
+LL ~ foo(0);
+LL + foo(1);
+LL ~ taking_three_units((), (), ());
+ |
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unit_cmp.rs b/src/tools/clippy/tests/ui/unit_cmp.rs
new file mode 100644
index 000000000..3d2711043
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unit_cmp.rs
@@ -0,0 +1,61 @@
+#![warn(clippy::unit_cmp)]
+#![allow(
+ clippy::no_effect,
+ clippy::unnecessary_operation,
+ clippy::derive_partial_eq_without_eq
+)]
+
+#[derive(PartialEq)]
+pub struct ContainsUnit(()); // should be fine
+
+fn main() {
+ // this is fine
+ if true == false {}
+
+ // this warns
+ if {
+ true;
+ } == {
+ false;
+ } {}
+
+ if {
+ true;
+ } > {
+ false;
+ } {}
+
+ assert_eq!(
+ {
+ true;
+ },
+ {
+ false;
+ }
+ );
+ debug_assert_eq!(
+ {
+ true;
+ },
+ {
+ false;
+ }
+ );
+
+ assert_ne!(
+ {
+ true;
+ },
+ {
+ false;
+ }
+ );
+ debug_assert_ne!(
+ {
+ true;
+ },
+ {
+ false;
+ }
+ );
+}
diff --git a/src/tools/clippy/tests/ui/unit_cmp.stderr b/src/tools/clippy/tests/ui/unit_cmp.stderr
new file mode 100644
index 000000000..41cf19ae6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unit_cmp.stderr
@@ -0,0 +1,74 @@
+error: ==-comparison of unit values detected. This will always be true
+ --> $DIR/unit_cmp.rs:16:8
+ |
+LL | if {
+ | ________^
+LL | | true;
+LL | | } == {
+LL | | false;
+LL | | } {}
+ | |_____^
+ |
+ = note: `-D clippy::unit-cmp` implied by `-D warnings`
+
+error: >-comparison of unit values detected. This will always be false
+ --> $DIR/unit_cmp.rs:22:8
+ |
+LL | if {
+ | ________^
+LL | | true;
+LL | | } > {
+LL | | false;
+LL | | } {}
+ | |_____^
+
+error: `assert_eq` of unit values detected. This will always succeed
+ --> $DIR/unit_cmp.rs:28:5
+ |
+LL | / assert_eq!(
+LL | | {
+LL | | true;
+LL | | },
+... |
+LL | | }
+LL | | );
+ | |_____^
+
+error: `debug_assert_eq` of unit values detected. This will always succeed
+ --> $DIR/unit_cmp.rs:36:5
+ |
+LL | / debug_assert_eq!(
+LL | | {
+LL | | true;
+LL | | },
+... |
+LL | | }
+LL | | );
+ | |_____^
+
+error: `assert_ne` of unit values detected. This will always fail
+ --> $DIR/unit_cmp.rs:45:5
+ |
+LL | / assert_ne!(
+LL | | {
+LL | | true;
+LL | | },
+... |
+LL | | }
+LL | | );
+ | |_____^
+
+error: `debug_assert_ne` of unit values detected. This will always fail
+ --> $DIR/unit_cmp.rs:53:5
+ |
+LL | / debug_assert_ne!(
+LL | | {
+LL | | true;
+LL | | },
+... |
+LL | | }
+LL | | );
+ | |_____^
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unit_hash.rs b/src/tools/clippy/tests/ui/unit_hash.rs
new file mode 100644
index 000000000..43eb54eff
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unit_hash.rs
@@ -0,0 +1,28 @@
+#![warn(clippy::unit_hash)]
+#![allow(clippy::let_unit_value)]
+
+use std::collections::hash_map::DefaultHasher;
+use std::hash::Hash;
+
+enum Foo {
+ Empty,
+ WithValue(u8),
+}
+
+fn do_nothing() {}
+
+fn main() {
+ let mut state = DefaultHasher::new();
+ let my_enum = Foo::Empty;
+
+ match my_enum {
+ Foo::Empty => ().hash(&mut state),
+ Foo::WithValue(x) => x.hash(&mut state),
+ }
+
+ let res = ();
+ res.hash(&mut state);
+
+ #[allow(clippy::unit_arg)]
+ do_nothing().hash(&mut state);
+}
diff --git a/src/tools/clippy/tests/ui/unit_hash.stderr b/src/tools/clippy/tests/ui/unit_hash.stderr
new file mode 100644
index 000000000..050fa55a1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unit_hash.stderr
@@ -0,0 +1,27 @@
+error: this call to `hash` on the unit type will do nothing
+ --> $DIR/unit_hash.rs:19:23
+ |
+LL | Foo::Empty => ().hash(&mut state),
+ | ^^^^^^^^^^^^^^^^^^^ help: remove the call to `hash` or consider using: `0_u8.hash(&mut state)`
+ |
+ = note: `-D clippy::unit-hash` implied by `-D warnings`
+ = note: the implementation of `Hash` for `()` is a no-op
+
+error: this call to `hash` on the unit type will do nothing
+ --> $DIR/unit_hash.rs:24:5
+ |
+LL | res.hash(&mut state);
+ | ^^^^^^^^^^^^^^^^^^^^ help: remove the call to `hash` or consider using: `0_u8.hash(&mut state)`
+ |
+ = note: the implementation of `Hash` for `()` is a no-op
+
+error: this call to `hash` on the unit type will do nothing
+ --> $DIR/unit_hash.rs:27:5
+ |
+LL | do_nothing().hash(&mut state);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove the call to `hash` or consider using: `0_u8.hash(&mut state)`
+ |
+ = note: the implementation of `Hash` for `()` is a no-op
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unit_return_expecting_ord.rs b/src/tools/clippy/tests/ui/unit_return_expecting_ord.rs
new file mode 100644
index 000000000..bdb4710cc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unit_return_expecting_ord.rs
@@ -0,0 +1,36 @@
+#![warn(clippy::unit_return_expecting_ord)]
+#![allow(clippy::needless_return)]
+#![allow(clippy::unused_unit)]
+#![feature(is_sorted)]
+
+struct Struct {
+ field: isize,
+}
+
+fn double(i: isize) -> isize {
+ i * 2
+}
+
+fn unit(_i: isize) {}
+
+fn main() {
+ let mut structs = vec![Struct { field: 2 }, Struct { field: 1 }];
+ structs.sort_by_key(|s| {
+ double(s.field);
+ });
+ structs.sort_by_key(|s| double(s.field));
+ structs.is_sorted_by_key(|s| {
+ double(s.field);
+ });
+ structs.is_sorted_by_key(|s| {
+ if s.field > 0 {
+ ()
+ } else {
+ return ();
+ }
+ });
+ structs.sort_by_key(|s| {
+ return double(s.field);
+ });
+ structs.sort_by_key(|s| unit(s.field));
+}
diff --git a/src/tools/clippy/tests/ui/unit_return_expecting_ord.stderr b/src/tools/clippy/tests/ui/unit_return_expecting_ord.stderr
new file mode 100644
index 000000000..e63d58746
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unit_return_expecting_ord.stderr
@@ -0,0 +1,39 @@
+error: this closure returns the unit type which also implements Ord
+ --> $DIR/unit_return_expecting_ord.rs:18:25
+ |
+LL | structs.sort_by_key(|s| {
+ | ^^^
+ |
+ = note: `-D clippy::unit-return-expecting-ord` implied by `-D warnings`
+help: probably caused by this trailing semicolon
+ --> $DIR/unit_return_expecting_ord.rs:19:24
+ |
+LL | double(s.field);
+ | ^
+
+error: this closure returns the unit type which also implements PartialOrd
+ --> $DIR/unit_return_expecting_ord.rs:22:30
+ |
+LL | structs.is_sorted_by_key(|s| {
+ | ^^^
+ |
+help: probably caused by this trailing semicolon
+ --> $DIR/unit_return_expecting_ord.rs:23:24
+ |
+LL | double(s.field);
+ | ^
+
+error: this closure returns the unit type which also implements PartialOrd
+ --> $DIR/unit_return_expecting_ord.rs:25:30
+ |
+LL | structs.is_sorted_by_key(|s| {
+ | ^^^
+
+error: this closure returns the unit type which also implements Ord
+ --> $DIR/unit_return_expecting_ord.rs:35:25
+ |
+LL | structs.sort_by_key(|s| unit(s.field));
+ | ^^^
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unknown_attribute.rs b/src/tools/clippy/tests/ui/unknown_attribute.rs
new file mode 100644
index 000000000..e993e63f8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unknown_attribute.rs
@@ -0,0 +1,3 @@
+#[clippy::unknown]
+#[clippy::cognitive_complexity = "1"]
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/unknown_attribute.stderr b/src/tools/clippy/tests/ui/unknown_attribute.stderr
new file mode 100644
index 000000000..618c5980d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unknown_attribute.stderr
@@ -0,0 +1,8 @@
+error: usage of unknown attribute
+ --> $DIR/unknown_attribute.rs:1:11
+ |
+LL | #[clippy::unknown]
+ | ^^^^^^^
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/unknown_clippy_lints.fixed b/src/tools/clippy/tests/ui/unknown_clippy_lints.fixed
new file mode 100644
index 000000000..4249ff8a9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unknown_clippy_lints.fixed
@@ -0,0 +1,18 @@
+// run-rustfix
+
+#![warn(clippy::pedantic)]
+// Should suggest lowercase
+#![allow(clippy::all)]
+#![warn(clippy::cmp_nan)]
+
+// Should suggest similar clippy lint name
+#[warn(clippy::if_not_else)]
+#[warn(clippy::unnecessary_cast)]
+#[warn(clippy::useless_transmute)]
+// Shouldn't suggest rustc lint name(`dead_code`)
+#[warn(clippy::drop_copy)]
+// Shouldn't suggest removed/deprecated clippy lint name(`unused_collect`)
+#[warn(clippy::unused_self)]
+// Shouldn't suggest renamed clippy lint name(`const_static_lifetime`)
+#[warn(clippy::redundant_static_lifetimes)]
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/unknown_clippy_lints.rs b/src/tools/clippy/tests/ui/unknown_clippy_lints.rs
new file mode 100644
index 000000000..5db345f54
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unknown_clippy_lints.rs
@@ -0,0 +1,18 @@
+// run-rustfix
+
+#![warn(clippy::pedantic)]
+// Should suggest lowercase
+#![allow(clippy::All)]
+#![warn(clippy::CMP_NAN)]
+
+// Should suggest similar clippy lint name
+#[warn(clippy::if_not_els)]
+#[warn(clippy::UNNecsaRy_cAst)]
+#[warn(clippy::useles_transute)]
+// Shouldn't suggest rustc lint name(`dead_code`)
+#[warn(clippy::dead_cod)]
+// Shouldn't suggest removed/deprecated clippy lint name(`unused_collect`)
+#[warn(clippy::unused_colle)]
+// Shouldn't suggest renamed clippy lint name(`const_static_lifetime`)
+#[warn(clippy::const_static_lifetim)]
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/unknown_clippy_lints.stderr b/src/tools/clippy/tests/ui/unknown_clippy_lints.stderr
new file mode 100644
index 000000000..421bf5ffa
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unknown_clippy_lints.stderr
@@ -0,0 +1,52 @@
+error: unknown lint: `clippy::All`
+ --> $DIR/unknown_clippy_lints.rs:5:10
+ |
+LL | #![allow(clippy::All)]
+ | ^^^^^^^^^^^ help: did you mean: `clippy::all`
+ |
+ = note: `-D unknown-lints` implied by `-D warnings`
+
+error: unknown lint: `clippy::CMP_NAN`
+ --> $DIR/unknown_clippy_lints.rs:6:9
+ |
+LL | #![warn(clippy::CMP_NAN)]
+ | ^^^^^^^^^^^^^^^ help: did you mean: `clippy::cmp_nan`
+
+error: unknown lint: `clippy::if_not_els`
+ --> $DIR/unknown_clippy_lints.rs:9:8
+ |
+LL | #[warn(clippy::if_not_els)]
+ | ^^^^^^^^^^^^^^^^^^ help: did you mean: `clippy::if_not_else`
+
+error: unknown lint: `clippy::UNNecsaRy_cAst`
+ --> $DIR/unknown_clippy_lints.rs:10:8
+ |
+LL | #[warn(clippy::UNNecsaRy_cAst)]
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: did you mean: `clippy::unnecessary_cast`
+
+error: unknown lint: `clippy::useles_transute`
+ --> $DIR/unknown_clippy_lints.rs:11:8
+ |
+LL | #[warn(clippy::useles_transute)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: did you mean: `clippy::useless_transmute`
+
+error: unknown lint: `clippy::dead_cod`
+ --> $DIR/unknown_clippy_lints.rs:13:8
+ |
+LL | #[warn(clippy::dead_cod)]
+ | ^^^^^^^^^^^^^^^^ help: did you mean: `clippy::drop_copy`
+
+error: unknown lint: `clippy::unused_colle`
+ --> $DIR/unknown_clippy_lints.rs:15:8
+ |
+LL | #[warn(clippy::unused_colle)]
+ | ^^^^^^^^^^^^^^^^^^^^ help: did you mean: `clippy::unused_self`
+
+error: unknown lint: `clippy::const_static_lifetim`
+ --> $DIR/unknown_clippy_lints.rs:17:8
+ |
+LL | #[warn(clippy::const_static_lifetim)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: did you mean: `clippy::redundant_static_lifetimes`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnecessary_cast.fixed b/src/tools/clippy/tests/ui/unnecessary_cast.fixed
new file mode 100644
index 000000000..b352b285c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_cast.fixed
@@ -0,0 +1,91 @@
+// run-rustfix
+#![warn(clippy::unnecessary_cast)]
+#![allow(
+ unused_must_use,
+ clippy::borrow_as_ptr,
+ clippy::no_effect,
+ clippy::nonstandard_macro_braces,
+ clippy::unnecessary_operation
+)]
+
+#[rustfmt::skip]
+fn main() {
+ // Test cast_unnecessary
+ 1_i32;
+ 1_f32;
+ false;
+ &1i32 as &i32;
+
+ -1_i32;
+ - 1_i32;
+ -1_f32;
+ 1_i32;
+ 1_f32;
+
+ // macro version
+ macro_rules! foo {
+ ($a:ident, $b:ident) => {
+ #[allow(unused)]
+ pub fn $a() -> $b {
+ 1 as $b
+ }
+ };
+ }
+ foo!(a, i32);
+ foo!(b, f32);
+ foo!(c, f64);
+
+ // do not lint cast to cfg-dependant type
+ 1 as std::os::raw::c_char;
+
+ // do not lint cast to alias type
+ 1 as I32Alias;
+ &1 as &I32Alias;
+}
+
+type I32Alias = i32;
+
+mod fixable {
+ #![allow(dead_code)]
+
+ fn main() {
+ // casting integer literal to float is unnecessary
+ 100_f32;
+ 100_f64;
+ 100_f64;
+ let _ = -100_f32;
+ let _ = -100_f64;
+ let _ = -100_f64;
+ 100_f32;
+ 100_f64;
+ // Should not trigger
+ #[rustfmt::skip]
+ let v = vec!(1);
+ &v as &[i32];
+ 0x10 as f32;
+ 0o10 as f32;
+ 0b10 as f32;
+ 0x11 as f64;
+ 0o11 as f64;
+ 0b11 as f64;
+
+ 1_u32;
+ 0x10_i32;
+ 0b10_usize;
+ 0o73_u16;
+ 1_000_000_000_u32;
+
+ 1.0_f64;
+ 0.5_f32;
+
+ 1.0 as u16;
+
+ let _ = -1_i32;
+ let _ = -1.0_f32;
+
+ let _ = 1 as I32Alias;
+ let _ = &1 as &I32Alias;
+ }
+
+ type I32Alias = i32;
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_cast.rs b/src/tools/clippy/tests/ui/unnecessary_cast.rs
new file mode 100644
index 000000000..6c8cc3eff
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_cast.rs
@@ -0,0 +1,91 @@
+// run-rustfix
+#![warn(clippy::unnecessary_cast)]
+#![allow(
+ unused_must_use,
+ clippy::borrow_as_ptr,
+ clippy::no_effect,
+ clippy::nonstandard_macro_braces,
+ clippy::unnecessary_operation
+)]
+
+#[rustfmt::skip]
+fn main() {
+ // Test cast_unnecessary
+ 1i32 as i32;
+ 1f32 as f32;
+ false as bool;
+ &1i32 as &i32;
+
+ -1_i32 as i32;
+ - 1_i32 as i32;
+ -1f32 as f32;
+ 1_i32 as i32;
+ 1_f32 as f32;
+
+ // macro version
+ macro_rules! foo {
+ ($a:ident, $b:ident) => {
+ #[allow(unused)]
+ pub fn $a() -> $b {
+ 1 as $b
+ }
+ };
+ }
+ foo!(a, i32);
+ foo!(b, f32);
+ foo!(c, f64);
+
+ // do not lint cast to cfg-dependant type
+ 1 as std::os::raw::c_char;
+
+ // do not lint cast to alias type
+ 1 as I32Alias;
+ &1 as &I32Alias;
+}
+
+type I32Alias = i32;
+
+mod fixable {
+ #![allow(dead_code)]
+
+ fn main() {
+ // casting integer literal to float is unnecessary
+ 100 as f32;
+ 100 as f64;
+ 100_i32 as f64;
+ let _ = -100 as f32;
+ let _ = -100 as f64;
+ let _ = -100_i32 as f64;
+ 100. as f32;
+ 100. as f64;
+ // Should not trigger
+ #[rustfmt::skip]
+ let v = vec!(1);
+ &v as &[i32];
+ 0x10 as f32;
+ 0o10 as f32;
+ 0b10 as f32;
+ 0x11 as f64;
+ 0o11 as f64;
+ 0b11 as f64;
+
+ 1 as u32;
+ 0x10 as i32;
+ 0b10 as usize;
+ 0o73 as u16;
+ 1_000_000_000 as u32;
+
+ 1.0 as f64;
+ 0.5 as f32;
+
+ 1.0 as u16;
+
+ let _ = -1 as i32;
+ let _ = -1.0 as f32;
+
+ let _ = 1 as I32Alias;
+ let _ = &1 as &I32Alias;
+ }
+
+ type I32Alias = i32;
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_cast.stderr b/src/tools/clippy/tests/ui/unnecessary_cast.stderr
new file mode 100644
index 000000000..bad45f002
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_cast.stderr
@@ -0,0 +1,154 @@
+error: casting integer literal to `i32` is unnecessary
+ --> $DIR/unnecessary_cast.rs:14:5
+ |
+LL | 1i32 as i32;
+ | ^^^^^^^^^^^ help: try: `1_i32`
+ |
+ = note: `-D clippy::unnecessary-cast` implied by `-D warnings`
+
+error: casting float literal to `f32` is unnecessary
+ --> $DIR/unnecessary_cast.rs:15:5
+ |
+LL | 1f32 as f32;
+ | ^^^^^^^^^^^ help: try: `1_f32`
+
+error: casting to the same type is unnecessary (`bool` -> `bool`)
+ --> $DIR/unnecessary_cast.rs:16:5
+ |
+LL | false as bool;
+ | ^^^^^^^^^^^^^ help: try: `false`
+
+error: casting integer literal to `i32` is unnecessary
+ --> $DIR/unnecessary_cast.rs:19:5
+ |
+LL | -1_i32 as i32;
+ | ^^^^^^^^^^^^^ help: try: `-1_i32`
+
+error: casting integer literal to `i32` is unnecessary
+ --> $DIR/unnecessary_cast.rs:20:5
+ |
+LL | - 1_i32 as i32;
+ | ^^^^^^^^^^^^^^ help: try: `- 1_i32`
+
+error: casting float literal to `f32` is unnecessary
+ --> $DIR/unnecessary_cast.rs:21:5
+ |
+LL | -1f32 as f32;
+ | ^^^^^^^^^^^^ help: try: `-1_f32`
+
+error: casting integer literal to `i32` is unnecessary
+ --> $DIR/unnecessary_cast.rs:22:5
+ |
+LL | 1_i32 as i32;
+ | ^^^^^^^^^^^^ help: try: `1_i32`
+
+error: casting float literal to `f32` is unnecessary
+ --> $DIR/unnecessary_cast.rs:23:5
+ |
+LL | 1_f32 as f32;
+ | ^^^^^^^^^^^^ help: try: `1_f32`
+
+error: casting integer literal to `f32` is unnecessary
+ --> $DIR/unnecessary_cast.rs:53:9
+ |
+LL | 100 as f32;
+ | ^^^^^^^^^^ help: try: `100_f32`
+
+error: casting integer literal to `f64` is unnecessary
+ --> $DIR/unnecessary_cast.rs:54:9
+ |
+LL | 100 as f64;
+ | ^^^^^^^^^^ help: try: `100_f64`
+
+error: casting integer literal to `f64` is unnecessary
+ --> $DIR/unnecessary_cast.rs:55:9
+ |
+LL | 100_i32 as f64;
+ | ^^^^^^^^^^^^^^ help: try: `100_f64`
+
+error: casting integer literal to `f32` is unnecessary
+ --> $DIR/unnecessary_cast.rs:56:17
+ |
+LL | let _ = -100 as f32;
+ | ^^^^^^^^^^^ help: try: `-100_f32`
+
+error: casting integer literal to `f64` is unnecessary
+ --> $DIR/unnecessary_cast.rs:57:17
+ |
+LL | let _ = -100 as f64;
+ | ^^^^^^^^^^^ help: try: `-100_f64`
+
+error: casting integer literal to `f64` is unnecessary
+ --> $DIR/unnecessary_cast.rs:58:17
+ |
+LL | let _ = -100_i32 as f64;
+ | ^^^^^^^^^^^^^^^ help: try: `-100_f64`
+
+error: casting float literal to `f32` is unnecessary
+ --> $DIR/unnecessary_cast.rs:59:9
+ |
+LL | 100. as f32;
+ | ^^^^^^^^^^^ help: try: `100_f32`
+
+error: casting float literal to `f64` is unnecessary
+ --> $DIR/unnecessary_cast.rs:60:9
+ |
+LL | 100. as f64;
+ | ^^^^^^^^^^^ help: try: `100_f64`
+
+error: casting integer literal to `u32` is unnecessary
+ --> $DIR/unnecessary_cast.rs:72:9
+ |
+LL | 1 as u32;
+ | ^^^^^^^^ help: try: `1_u32`
+
+error: casting integer literal to `i32` is unnecessary
+ --> $DIR/unnecessary_cast.rs:73:9
+ |
+LL | 0x10 as i32;
+ | ^^^^^^^^^^^ help: try: `0x10_i32`
+
+error: casting integer literal to `usize` is unnecessary
+ --> $DIR/unnecessary_cast.rs:74:9
+ |
+LL | 0b10 as usize;
+ | ^^^^^^^^^^^^^ help: try: `0b10_usize`
+
+error: casting integer literal to `u16` is unnecessary
+ --> $DIR/unnecessary_cast.rs:75:9
+ |
+LL | 0o73 as u16;
+ | ^^^^^^^^^^^ help: try: `0o73_u16`
+
+error: casting integer literal to `u32` is unnecessary
+ --> $DIR/unnecessary_cast.rs:76:9
+ |
+LL | 1_000_000_000 as u32;
+ | ^^^^^^^^^^^^^^^^^^^^ help: try: `1_000_000_000_u32`
+
+error: casting float literal to `f64` is unnecessary
+ --> $DIR/unnecessary_cast.rs:78:9
+ |
+LL | 1.0 as f64;
+ | ^^^^^^^^^^ help: try: `1.0_f64`
+
+error: casting float literal to `f32` is unnecessary
+ --> $DIR/unnecessary_cast.rs:79:9
+ |
+LL | 0.5 as f32;
+ | ^^^^^^^^^^ help: try: `0.5_f32`
+
+error: casting integer literal to `i32` is unnecessary
+ --> $DIR/unnecessary_cast.rs:83:17
+ |
+LL | let _ = -1 as i32;
+ | ^^^^^^^^^ help: try: `-1_i32`
+
+error: casting float literal to `f32` is unnecessary
+ --> $DIR/unnecessary_cast.rs:84:17
+ |
+LL | let _ = -1.0 as f32;
+ | ^^^^^^^^^^^ help: try: `-1.0_f32`
+
+error: aborting due to 25 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnecessary_clone.rs b/src/tools/clippy/tests/ui/unnecessary_clone.rs
new file mode 100644
index 000000000..6770a7fac
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_clone.rs
@@ -0,0 +1,110 @@
+// does not test any rustfixable lints
+
+#![warn(clippy::clone_on_ref_ptr)]
+#![allow(unused, clippy::redundant_clone, clippy::unnecessary_wraps)]
+
+use std::cell::RefCell;
+use std::rc::{self, Rc};
+use std::sync::{self, Arc};
+
+trait SomeTrait {}
+struct SomeImpl;
+impl SomeTrait for SomeImpl {}
+
+fn main() {}
+
+fn clone_on_ref_ptr() {
+ let rc = Rc::new(true);
+ let arc = Arc::new(true);
+
+ let rcweak = Rc::downgrade(&rc);
+ let arc_weak = Arc::downgrade(&arc);
+
+ rc.clone();
+ Rc::clone(&rc);
+
+ arc.clone();
+ Arc::clone(&arc);
+
+ rcweak.clone();
+ rc::Weak::clone(&rcweak);
+
+ arc_weak.clone();
+ sync::Weak::clone(&arc_weak);
+
+ let x = Arc::new(SomeImpl);
+ let _: Arc<dyn SomeTrait> = x.clone();
+}
+
+fn clone_on_copy_generic<T: Copy>(t: T) {
+ t.clone();
+
+ Some(t).clone();
+}
+
+fn clone_on_double_ref() {
+ let x = vec![1];
+ let y = &&x;
+ let z: &Vec<_> = y.clone();
+
+ println!("{:p} {:p}", *y, z);
+}
+
+mod many_derefs {
+ struct A;
+ struct B;
+ struct C;
+ struct D;
+ #[derive(Copy, Clone)]
+ struct E;
+
+ macro_rules! impl_deref {
+ ($src:ident, $dst:ident) => {
+ impl std::ops::Deref for $src {
+ type Target = $dst;
+ fn deref(&self) -> &Self::Target {
+ &$dst
+ }
+ }
+ };
+ }
+
+ impl_deref!(A, B);
+ impl_deref!(B, C);
+ impl_deref!(C, D);
+ impl std::ops::Deref for D {
+ type Target = &'static E;
+ fn deref(&self) -> &Self::Target {
+ &&E
+ }
+ }
+
+ fn go1() {
+ let a = A;
+ let _: E = a.clone();
+ let _: E = *****a;
+ }
+
+ fn check(mut encoded: &[u8]) {
+ let _ = &mut encoded.clone();
+ let _ = &encoded.clone();
+ }
+}
+
+mod issue2076 {
+ use std::rc::Rc;
+
+ macro_rules! try_opt {
+ ($expr: expr) => {
+ match $expr {
+ Some(value) => value,
+ None => return None,
+ }
+ };
+ }
+
+ fn func() -> Option<Rc<u8>> {
+ let rc = Rc::new(42);
+ Some(try_opt!(Some(rc)).clone())
+ }
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_clone.stderr b/src/tools/clippy/tests/ui/unnecessary_clone.stderr
new file mode 100644
index 000000000..94cc7777a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_clone.stderr
@@ -0,0 +1,106 @@
+error: using `.clone()` on a ref-counted pointer
+ --> $DIR/unnecessary_clone.rs:23:5
+ |
+LL | rc.clone();
+ | ^^^^^^^^^^ help: try this: `Rc::<bool>::clone(&rc)`
+ |
+ = note: `-D clippy::clone-on-ref-ptr` implied by `-D warnings`
+
+error: using `.clone()` on a ref-counted pointer
+ --> $DIR/unnecessary_clone.rs:26:5
+ |
+LL | arc.clone();
+ | ^^^^^^^^^^^ help: try this: `Arc::<bool>::clone(&arc)`
+
+error: using `.clone()` on a ref-counted pointer
+ --> $DIR/unnecessary_clone.rs:29:5
+ |
+LL | rcweak.clone();
+ | ^^^^^^^^^^^^^^ help: try this: `Weak::<bool>::clone(&rcweak)`
+
+error: using `.clone()` on a ref-counted pointer
+ --> $DIR/unnecessary_clone.rs:32:5
+ |
+LL | arc_weak.clone();
+ | ^^^^^^^^^^^^^^^^ help: try this: `Weak::<bool>::clone(&arc_weak)`
+
+error: using `.clone()` on a ref-counted pointer
+ --> $DIR/unnecessary_clone.rs:36:33
+ |
+LL | let _: Arc<dyn SomeTrait> = x.clone();
+ | ^^^^^^^^^ help: try this: `Arc::<SomeImpl>::clone(&x)`
+
+error: using `clone` on type `T` which implements the `Copy` trait
+ --> $DIR/unnecessary_clone.rs:40:5
+ |
+LL | t.clone();
+ | ^^^^^^^^^ help: try removing the `clone` call: `t`
+ |
+ = note: `-D clippy::clone-on-copy` implied by `-D warnings`
+
+error: using `clone` on type `std::option::Option<T>` which implements the `Copy` trait
+ --> $DIR/unnecessary_clone.rs:42:5
+ |
+LL | Some(t).clone();
+ | ^^^^^^^^^^^^^^^ help: try removing the `clone` call: `Some(t)`
+
+error: using `clone` on a double-reference; this will copy the reference of type `&std::vec::Vec<i32>` instead of cloning the inner type
+ --> $DIR/unnecessary_clone.rs:48:22
+ |
+LL | let z: &Vec<_> = y.clone();
+ | ^^^^^^^^^
+ |
+ = note: `#[deny(clippy::clone_double_ref)]` on by default
+help: try dereferencing it
+ |
+LL | let z: &Vec<_> = &(*y).clone();
+ | ~~~~~~~~~~~~~
+help: or try being explicit if you are sure, that you want to clone a reference
+ |
+LL | let z: &Vec<_> = <&std::vec::Vec<i32>>::clone(y);
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: using `clone` on type `many_derefs::E` which implements the `Copy` trait
+ --> $DIR/unnecessary_clone.rs:84:20
+ |
+LL | let _: E = a.clone();
+ | ^^^^^^^^^ help: try dereferencing it: `*****a`
+
+error: using `clone` on a double-reference; this will copy the reference of type `&[u8]` instead of cloning the inner type
+ --> $DIR/unnecessary_clone.rs:89:22
+ |
+LL | let _ = &mut encoded.clone();
+ | ^^^^^^^^^^^^^^^
+ |
+help: try dereferencing it
+ |
+LL | let _ = &mut &(*encoded).clone();
+ | ~~~~~~~~~~~~~~~~~~~
+help: or try being explicit if you are sure, that you want to clone a reference
+ |
+LL | let _ = &mut <&[u8]>::clone(encoded);
+ | ~~~~~~~~~~~~~~~~~~~~~~~
+
+error: using `clone` on a double-reference; this will copy the reference of type `&[u8]` instead of cloning the inner type
+ --> $DIR/unnecessary_clone.rs:90:18
+ |
+LL | let _ = &encoded.clone();
+ | ^^^^^^^^^^^^^^^
+ |
+help: try dereferencing it
+ |
+LL | let _ = &&(*encoded).clone();
+ | ~~~~~~~~~~~~~~~~~~~
+help: or try being explicit if you are sure, that you want to clone a reference
+ |
+LL | let _ = &<&[u8]>::clone(encoded);
+ | ~~~~~~~~~~~~~~~~~~~~~~~
+
+error: using `.clone()` on a ref-counted pointer
+ --> $DIR/unnecessary_clone.rs:108:14
+ |
+LL | Some(try_opt!(Some(rc)).clone())
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `Rc::<u8>::clone(&try_opt!(Some(rc)))`
+
+error: aborting due to 12 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnecessary_filter_map.rs b/src/tools/clippy/tests/ui/unnecessary_filter_map.rs
new file mode 100644
index 000000000..8e01c2674
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_filter_map.rs
@@ -0,0 +1,150 @@
+#![allow(dead_code)]
+
+fn main() {
+ let _ = (0..4).filter_map(|x| if x > 1 { Some(x) } else { None });
+ let _ = (0..4).filter_map(|x| {
+ if x > 1 {
+ return Some(x);
+ };
+ None
+ });
+ let _ = (0..4).filter_map(|x| match x {
+ 0 | 1 => None,
+ _ => Some(x),
+ });
+
+ let _ = (0..4).filter_map(|x| Some(x + 1));
+
+ let _ = (0..4).filter_map(i32::checked_abs);
+}
+
+fn filter_map_none_changes_item_type() -> impl Iterator<Item = bool> {
+ "".chars().filter_map(|_| None)
+}
+
+// https://github.com/rust-lang/rust-clippy/issues/4433#issue-483920107
+mod comment_483920107 {
+ enum Severity {
+ Warning,
+ Other,
+ }
+
+ struct ServerError;
+
+ impl ServerError {
+ fn severity(&self) -> Severity {
+ Severity::Warning
+ }
+ }
+
+ struct S {
+ warnings: Vec<ServerError>,
+ }
+
+ impl S {
+ fn foo(&mut self, server_errors: Vec<ServerError>) {
+ #[allow(unused_variables)]
+ let errors: Vec<ServerError> = server_errors
+ .into_iter()
+ .filter_map(|se| match se.severity() {
+ Severity::Warning => {
+ self.warnings.push(se);
+ None
+ },
+ _ => Some(se),
+ })
+ .collect();
+ }
+ }
+}
+
+// https://github.com/rust-lang/rust-clippy/issues/4433#issuecomment-611006622
+mod comment_611006622 {
+ struct PendingRequest {
+ reply_to: u8,
+ token: u8,
+ expires: u8,
+ group_id: u8,
+ }
+
+ enum Value {
+ Null,
+ }
+
+ struct Node;
+
+ impl Node {
+ fn send_response(&self, _reply_to: u8, _token: u8, _value: Value) -> &Self {
+ self
+ }
+ fn on_error_warn(&self) -> &Self {
+ self
+ }
+ }
+
+ struct S {
+ pending_requests: Vec<PendingRequest>,
+ }
+
+ impl S {
+ fn foo(&mut self, node: Node, now: u8, group_id: u8) {
+ // "drain_filter"
+ self.pending_requests = self
+ .pending_requests
+ .drain(..)
+ .filter_map(|pending| {
+ if pending.expires <= now {
+ return None; // Expired, remove
+ }
+
+ if pending.group_id == group_id {
+ // Matched - reuse strings and remove
+ node.send_response(pending.reply_to, pending.token, Value::Null)
+ .on_error_warn();
+ None
+ } else {
+ // Keep waiting
+ Some(pending)
+ }
+ })
+ .collect();
+ }
+ }
+}
+
+// https://github.com/rust-lang/rust-clippy/issues/4433#issuecomment-621925270
+// This extrapolation doesn't reproduce the false positive. Additional context seems necessary.
+mod comment_621925270 {
+ struct Signature(u8);
+
+ fn foo(sig_packets: impl Iterator<Item = Result<Signature, ()>>) -> impl Iterator<Item = u8> {
+ sig_packets.filter_map(|res| match res {
+ Ok(Signature(sig_packet)) => Some(sig_packet),
+ _ => None,
+ })
+ }
+}
+
+// https://github.com/rust-lang/rust-clippy/issues/4433#issuecomment-1052978898
+mod comment_1052978898 {
+ #![allow(clippy::redundant_closure)]
+
+ pub struct S(u8);
+
+ impl S {
+ pub fn consume(self) {
+ println!("yum");
+ }
+ }
+
+ pub fn filter_owned() -> impl Iterator<Item = S> {
+ (0..10).map(|i| S(i)).filter_map(|s| {
+ if s.0 & 1 == 0 {
+ s.consume();
+ None
+ } else {
+ Some(s)
+ }
+ })
+ }
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_filter_map.stderr b/src/tools/clippy/tests/ui/unnecessary_filter_map.stderr
new file mode 100644
index 000000000..5585b10ab
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_filter_map.stderr
@@ -0,0 +1,38 @@
+error: this `.filter_map` can be written more simply using `.filter`
+ --> $DIR/unnecessary_filter_map.rs:4:13
+ |
+LL | let _ = (0..4).filter_map(|x| if x > 1 { Some(x) } else { None });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::unnecessary-filter-map` implied by `-D warnings`
+
+error: this `.filter_map` can be written more simply using `.filter`
+ --> $DIR/unnecessary_filter_map.rs:5:13
+ |
+LL | let _ = (0..4).filter_map(|x| {
+ | _____________^
+LL | | if x > 1 {
+LL | | return Some(x);
+LL | | };
+LL | | None
+LL | | });
+ | |______^
+
+error: this `.filter_map` can be written more simply using `.filter`
+ --> $DIR/unnecessary_filter_map.rs:11:13
+ |
+LL | let _ = (0..4).filter_map(|x| match x {
+ | _____________^
+LL | | 0 | 1 => None,
+LL | | _ => Some(x),
+LL | | });
+ | |______^
+
+error: this `.filter_map` can be written more simply using `.map`
+ --> $DIR/unnecessary_filter_map.rs:16:13
+ |
+LL | let _ = (0..4).filter_map(|x| Some(x + 1));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnecessary_find_map.rs b/src/tools/clippy/tests/ui/unnecessary_find_map.rs
new file mode 100644
index 000000000..a52390861
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_find_map.rs
@@ -0,0 +1,23 @@
+#![allow(dead_code)]
+
+fn main() {
+ let _ = (0..4).find_map(|x| if x > 1 { Some(x) } else { None });
+ let _ = (0..4).find_map(|x| {
+ if x > 1 {
+ return Some(x);
+ };
+ None
+ });
+ let _ = (0..4).find_map(|x| match x {
+ 0 | 1 => None,
+ _ => Some(x),
+ });
+
+ let _ = (0..4).find_map(|x| Some(x + 1));
+
+ let _ = (0..4).find_map(i32::checked_abs);
+}
+
+fn find_map_none_changes_item_type() -> Option<bool> {
+ "".chars().find_map(|_| None)
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_find_map.stderr b/src/tools/clippy/tests/ui/unnecessary_find_map.stderr
new file mode 100644
index 000000000..fb33c122f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_find_map.stderr
@@ -0,0 +1,38 @@
+error: this `.find_map` can be written more simply using `.find`
+ --> $DIR/unnecessary_find_map.rs:4:13
+ |
+LL | let _ = (0..4).find_map(|x| if x > 1 { Some(x) } else { None });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::unnecessary-find-map` implied by `-D warnings`
+
+error: this `.find_map` can be written more simply using `.find`
+ --> $DIR/unnecessary_find_map.rs:5:13
+ |
+LL | let _ = (0..4).find_map(|x| {
+ | _____________^
+LL | | if x > 1 {
+LL | | return Some(x);
+LL | | };
+LL | | None
+LL | | });
+ | |______^
+
+error: this `.find_map` can be written more simply using `.find`
+ --> $DIR/unnecessary_find_map.rs:11:13
+ |
+LL | let _ = (0..4).find_map(|x| match x {
+ | _____________^
+LL | | 0 | 1 => None,
+LL | | _ => Some(x),
+LL | | });
+ | |______^
+
+error: this `.find_map` can be written more simply using `.map(..).next()`
+ --> $DIR/unnecessary_find_map.rs:16:13
+ |
+LL | let _ = (0..4).find_map(|x| Some(x + 1));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnecessary_fold.fixed b/src/tools/clippy/tests/ui/unnecessary_fold.fixed
new file mode 100644
index 000000000..52300a3b6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_fold.fixed
@@ -0,0 +1,52 @@
+// run-rustfix
+
+#![allow(dead_code)]
+
+/// Calls which should trigger the `UNNECESSARY_FOLD` lint
+fn unnecessary_fold() {
+ // Can be replaced by .any
+ let _ = (0..3).any(|x| x > 2);
+ // Can be replaced by .all
+ let _ = (0..3).all(|x| x > 2);
+ // Can be replaced by .sum
+ let _: i32 = (0..3).sum();
+ // Can be replaced by .product
+ let _: i32 = (0..3).product();
+}
+
+/// Should trigger the `UNNECESSARY_FOLD` lint, with an error span including exactly `.fold(...)`
+fn unnecessary_fold_span_for_multi_element_chain() {
+ let _: bool = (0..3).map(|x| 2 * x).any(|x| x > 2);
+}
+
+/// Calls which should not trigger the `UNNECESSARY_FOLD` lint
+fn unnecessary_fold_should_ignore() {
+ let _ = (0..3).fold(true, |acc, x| acc || x > 2);
+ let _ = (0..3).fold(false, |acc, x| acc && x > 2);
+ let _ = (0..3).fold(1, |acc, x| acc + x);
+ let _ = (0..3).fold(0, |acc, x| acc * x);
+ let _ = (0..3).fold(0, |acc, x| 1 + acc + x);
+
+ // We only match against an accumulator on the left
+ // hand side. We could lint for .sum and .product when
+ // it's on the right, but don't for now (and this wouldn't
+ // be valid if we extended the lint to cover arbitrary numeric
+ // types).
+ let _ = (0..3).fold(false, |acc, x| x > 2 || acc);
+ let _ = (0..3).fold(true, |acc, x| x > 2 && acc);
+ let _ = (0..3).fold(0, |acc, x| x + acc);
+ let _ = (0..3).fold(1, |acc, x| x * acc);
+
+ let _ = [(0..2), (0..3)].iter().fold(0, |a, b| a + b.len());
+ let _ = [(0..2), (0..3)].iter().fold(1, |a, b| a * b.len());
+}
+
+/// Should lint only the line containing the fold
+fn unnecessary_fold_over_multiple_lines() {
+ let _ = (0..3)
+ .map(|x| x + 1)
+ .filter(|x| x % 2 == 0)
+ .any(|x| x > 2);
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/unnecessary_fold.rs b/src/tools/clippy/tests/ui/unnecessary_fold.rs
new file mode 100644
index 000000000..4028d80c0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_fold.rs
@@ -0,0 +1,52 @@
+// run-rustfix
+
+#![allow(dead_code)]
+
+/// Calls which should trigger the `UNNECESSARY_FOLD` lint
+fn unnecessary_fold() {
+ // Can be replaced by .any
+ let _ = (0..3).fold(false, |acc, x| acc || x > 2);
+ // Can be replaced by .all
+ let _ = (0..3).fold(true, |acc, x| acc && x > 2);
+ // Can be replaced by .sum
+ let _: i32 = (0..3).fold(0, |acc, x| acc + x);
+ // Can be replaced by .product
+ let _: i32 = (0..3).fold(1, |acc, x| acc * x);
+}
+
+/// Should trigger the `UNNECESSARY_FOLD` lint, with an error span including exactly `.fold(...)`
+fn unnecessary_fold_span_for_multi_element_chain() {
+ let _: bool = (0..3).map(|x| 2 * x).fold(false, |acc, x| acc || x > 2);
+}
+
+/// Calls which should not trigger the `UNNECESSARY_FOLD` lint
+fn unnecessary_fold_should_ignore() {
+ let _ = (0..3).fold(true, |acc, x| acc || x > 2);
+ let _ = (0..3).fold(false, |acc, x| acc && x > 2);
+ let _ = (0..3).fold(1, |acc, x| acc + x);
+ let _ = (0..3).fold(0, |acc, x| acc * x);
+ let _ = (0..3).fold(0, |acc, x| 1 + acc + x);
+
+ // We only match against an accumulator on the left
+ // hand side. We could lint for .sum and .product when
+ // it's on the right, but don't for now (and this wouldn't
+ // be valid if we extended the lint to cover arbitrary numeric
+ // types).
+ let _ = (0..3).fold(false, |acc, x| x > 2 || acc);
+ let _ = (0..3).fold(true, |acc, x| x > 2 && acc);
+ let _ = (0..3).fold(0, |acc, x| x + acc);
+ let _ = (0..3).fold(1, |acc, x| x * acc);
+
+ let _ = [(0..2), (0..3)].iter().fold(0, |a, b| a + b.len());
+ let _ = [(0..2), (0..3)].iter().fold(1, |a, b| a * b.len());
+}
+
+/// Should lint only the line containing the fold
+fn unnecessary_fold_over_multiple_lines() {
+ let _ = (0..3)
+ .map(|x| x + 1)
+ .filter(|x| x % 2 == 0)
+ .fold(false, |acc, x| acc || x > 2);
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/unnecessary_fold.stderr b/src/tools/clippy/tests/ui/unnecessary_fold.stderr
new file mode 100644
index 000000000..22c44588a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_fold.stderr
@@ -0,0 +1,40 @@
+error: this `.fold` can be written more succinctly using another method
+ --> $DIR/unnecessary_fold.rs:8:20
+ |
+LL | let _ = (0..3).fold(false, |acc, x| acc || x > 2);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `any(|x| x > 2)`
+ |
+ = note: `-D clippy::unnecessary-fold` implied by `-D warnings`
+
+error: this `.fold` can be written more succinctly using another method
+ --> $DIR/unnecessary_fold.rs:10:20
+ |
+LL | let _ = (0..3).fold(true, |acc, x| acc && x > 2);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `all(|x| x > 2)`
+
+error: this `.fold` can be written more succinctly using another method
+ --> $DIR/unnecessary_fold.rs:12:25
+ |
+LL | let _: i32 = (0..3).fold(0, |acc, x| acc + x);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `sum()`
+
+error: this `.fold` can be written more succinctly using another method
+ --> $DIR/unnecessary_fold.rs:14:25
+ |
+LL | let _: i32 = (0..3).fold(1, |acc, x| acc * x);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `product()`
+
+error: this `.fold` can be written more succinctly using another method
+ --> $DIR/unnecessary_fold.rs:19:41
+ |
+LL | let _: bool = (0..3).map(|x| 2 * x).fold(false, |acc, x| acc || x > 2);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `any(|x| x > 2)`
+
+error: this `.fold` can be written more succinctly using another method
+ --> $DIR/unnecessary_fold.rs:49:10
+ |
+LL | .fold(false, |acc, x| acc || x > 2);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `any(|x| x > 2)`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnecessary_iter_cloned.fixed b/src/tools/clippy/tests/ui/unnecessary_iter_cloned.fixed
new file mode 100644
index 000000000..e01e9f07b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_iter_cloned.fixed
@@ -0,0 +1,142 @@
+// run-rustfix
+
+#![allow(unused_assignments)]
+#![warn(clippy::unnecessary_to_owned)]
+
+#[allow(dead_code)]
+#[derive(Clone, Copy)]
+enum FileType {
+ Account,
+ PrivateKey,
+ Certificate,
+}
+
+fn main() {
+ let path = std::path::Path::new("x");
+
+ let _ = check_files(&[(FileType::Account, path)]);
+ let _ = check_files_vec(vec![(FileType::Account, path)]);
+
+ // negative tests
+ let _ = check_files_ref(&[(FileType::Account, path)]);
+ let _ = check_files_mut(&[(FileType::Account, path)]);
+ let _ = check_files_ref_mut(&[(FileType::Account, path)]);
+ let _ = check_files_self_and_arg(&[(FileType::Account, path)]);
+ let _ = check_files_mut_path_buf(&[(FileType::Account, std::path::PathBuf::new())]);
+}
+
+// `check_files` and its variants are based on:
+// https://github.com/breard-r/acmed/blob/1f0dcc32aadbc5e52de6d23b9703554c0f925113/acmed/src/storage.rs#L262
+fn check_files(files: &[(FileType, &std::path::Path)]) -> bool {
+ for (t, path) in files {
+ let other = match get_file_path(t) {
+ Ok(p) => p,
+ Err(_) => {
+ return false;
+ },
+ };
+ if !path.is_file() || !other.is_file() {
+ return false;
+ }
+ }
+ true
+}
+
+fn check_files_vec(files: Vec<(FileType, &std::path::Path)>) -> bool {
+ for (t, path) in files.iter() {
+ let other = match get_file_path(t) {
+ Ok(p) => p,
+ Err(_) => {
+ return false;
+ },
+ };
+ if !path.is_file() || !other.is_file() {
+ return false;
+ }
+ }
+ true
+}
+
+fn check_files_ref(files: &[(FileType, &std::path::Path)]) -> bool {
+ for (ref t, path) in files.iter().copied() {
+ let other = match get_file_path(t) {
+ Ok(p) => p,
+ Err(_) => {
+ return false;
+ },
+ };
+ if !path.is_file() || !other.is_file() {
+ return false;
+ }
+ }
+ true
+}
+
+#[allow(unused_assignments)]
+fn check_files_mut(files: &[(FileType, &std::path::Path)]) -> bool {
+ for (mut t, path) in files.iter().copied() {
+ t = FileType::PrivateKey;
+ let other = match get_file_path(&t) {
+ Ok(p) => p,
+ Err(_) => {
+ return false;
+ },
+ };
+ if !path.is_file() || !other.is_file() {
+ return false;
+ }
+ }
+ true
+}
+
+fn check_files_ref_mut(files: &[(FileType, &std::path::Path)]) -> bool {
+ for (ref mut t, path) in files.iter().copied() {
+ *t = FileType::PrivateKey;
+ let other = match get_file_path(t) {
+ Ok(p) => p,
+ Err(_) => {
+ return false;
+ },
+ };
+ if !path.is_file() || !other.is_file() {
+ return false;
+ }
+ }
+ true
+}
+
+fn check_files_self_and_arg(files: &[(FileType, &std::path::Path)]) -> bool {
+ for (t, path) in files.iter().copied() {
+ let other = match get_file_path(&t) {
+ Ok(p) => p,
+ Err(_) => {
+ return false;
+ },
+ };
+ if !path.join(path).is_file() || !other.is_file() {
+ return false;
+ }
+ }
+ true
+}
+
+#[allow(unused_assignments)]
+fn check_files_mut_path_buf(files: &[(FileType, std::path::PathBuf)]) -> bool {
+ for (mut t, path) in files.iter().cloned() {
+ t = FileType::PrivateKey;
+ let other = match get_file_path(&t) {
+ Ok(p) => p,
+ Err(_) => {
+ return false;
+ },
+ };
+ if !path.is_file() || !other.is_file() {
+ return false;
+ }
+ }
+ true
+}
+
+fn get_file_path(_file_type: &FileType) -> Result<std::path::PathBuf, std::io::Error> {
+ Ok(std::path::PathBuf::new())
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_iter_cloned.rs b/src/tools/clippy/tests/ui/unnecessary_iter_cloned.rs
new file mode 100644
index 000000000..6ef2966c8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_iter_cloned.rs
@@ -0,0 +1,142 @@
+// run-rustfix
+
+#![allow(unused_assignments)]
+#![warn(clippy::unnecessary_to_owned)]
+
+#[allow(dead_code)]
+#[derive(Clone, Copy)]
+enum FileType {
+ Account,
+ PrivateKey,
+ Certificate,
+}
+
+fn main() {
+ let path = std::path::Path::new("x");
+
+ let _ = check_files(&[(FileType::Account, path)]);
+ let _ = check_files_vec(vec![(FileType::Account, path)]);
+
+ // negative tests
+ let _ = check_files_ref(&[(FileType::Account, path)]);
+ let _ = check_files_mut(&[(FileType::Account, path)]);
+ let _ = check_files_ref_mut(&[(FileType::Account, path)]);
+ let _ = check_files_self_and_arg(&[(FileType::Account, path)]);
+ let _ = check_files_mut_path_buf(&[(FileType::Account, std::path::PathBuf::new())]);
+}
+
+// `check_files` and its variants are based on:
+// https://github.com/breard-r/acmed/blob/1f0dcc32aadbc5e52de6d23b9703554c0f925113/acmed/src/storage.rs#L262
+fn check_files(files: &[(FileType, &std::path::Path)]) -> bool {
+ for (t, path) in files.iter().copied() {
+ let other = match get_file_path(&t) {
+ Ok(p) => p,
+ Err(_) => {
+ return false;
+ },
+ };
+ if !path.is_file() || !other.is_file() {
+ return false;
+ }
+ }
+ true
+}
+
+fn check_files_vec(files: Vec<(FileType, &std::path::Path)>) -> bool {
+ for (t, path) in files.iter().copied() {
+ let other = match get_file_path(&t) {
+ Ok(p) => p,
+ Err(_) => {
+ return false;
+ },
+ };
+ if !path.is_file() || !other.is_file() {
+ return false;
+ }
+ }
+ true
+}
+
+fn check_files_ref(files: &[(FileType, &std::path::Path)]) -> bool {
+ for (ref t, path) in files.iter().copied() {
+ let other = match get_file_path(t) {
+ Ok(p) => p,
+ Err(_) => {
+ return false;
+ },
+ };
+ if !path.is_file() || !other.is_file() {
+ return false;
+ }
+ }
+ true
+}
+
+#[allow(unused_assignments)]
+fn check_files_mut(files: &[(FileType, &std::path::Path)]) -> bool {
+ for (mut t, path) in files.iter().copied() {
+ t = FileType::PrivateKey;
+ let other = match get_file_path(&t) {
+ Ok(p) => p,
+ Err(_) => {
+ return false;
+ },
+ };
+ if !path.is_file() || !other.is_file() {
+ return false;
+ }
+ }
+ true
+}
+
+fn check_files_ref_mut(files: &[(FileType, &std::path::Path)]) -> bool {
+ for (ref mut t, path) in files.iter().copied() {
+ *t = FileType::PrivateKey;
+ let other = match get_file_path(t) {
+ Ok(p) => p,
+ Err(_) => {
+ return false;
+ },
+ };
+ if !path.is_file() || !other.is_file() {
+ return false;
+ }
+ }
+ true
+}
+
+fn check_files_self_and_arg(files: &[(FileType, &std::path::Path)]) -> bool {
+ for (t, path) in files.iter().copied() {
+ let other = match get_file_path(&t) {
+ Ok(p) => p,
+ Err(_) => {
+ return false;
+ },
+ };
+ if !path.join(path).is_file() || !other.is_file() {
+ return false;
+ }
+ }
+ true
+}
+
+#[allow(unused_assignments)]
+fn check_files_mut_path_buf(files: &[(FileType, std::path::PathBuf)]) -> bool {
+ for (mut t, path) in files.iter().cloned() {
+ t = FileType::PrivateKey;
+ let other = match get_file_path(&t) {
+ Ok(p) => p,
+ Err(_) => {
+ return false;
+ },
+ };
+ if !path.is_file() || !other.is_file() {
+ return false;
+ }
+ }
+ true
+}
+
+fn get_file_path(_file_type: &FileType) -> Result<std::path::PathBuf, std::io::Error> {
+ Ok(std::path::PathBuf::new())
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_iter_cloned.stderr b/src/tools/clippy/tests/ui/unnecessary_iter_cloned.stderr
new file mode 100644
index 000000000..8f151e620
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_iter_cloned.stderr
@@ -0,0 +1,35 @@
+error: unnecessary use of `copied`
+ --> $DIR/unnecessary_iter_cloned.rs:31:22
+ |
+LL | for (t, path) in files.iter().copied() {
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::unnecessary-to-owned` implied by `-D warnings`
+help: use
+ |
+LL | for (t, path) in files {
+ | ~~~~~
+help: remove this `&`
+ |
+LL - let other = match get_file_path(&t) {
+LL + let other = match get_file_path(t) {
+ |
+
+error: unnecessary use of `copied`
+ --> $DIR/unnecessary_iter_cloned.rs:46:22
+ |
+LL | for (t, path) in files.iter().copied() {
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+help: use
+ |
+LL | for (t, path) in files.iter() {
+ | ~~~~~~~~~~~~
+help: remove this `&`
+ |
+LL - let other = match get_file_path(&t) {
+LL + let other = match get_file_path(t) {
+ |
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnecessary_join.fixed b/src/tools/clippy/tests/ui/unnecessary_join.fixed
new file mode 100644
index 000000000..7e12c6ae4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_join.fixed
@@ -0,0 +1,35 @@
+// run-rustfix
+
+#![warn(clippy::unnecessary_join)]
+
+fn main() {
+ // should be linted
+ let vector = vec!["hello", "world"];
+ let output = vector
+ .iter()
+ .map(|item| item.to_uppercase())
+ .collect::<String>();
+ println!("{}", output);
+
+ // should be linted
+ let vector = vec!["hello", "world"];
+ let output = vector
+ .iter()
+ .map(|item| item.to_uppercase())
+ .collect::<String>();
+ println!("{}", output);
+
+ // should not be linted
+ let vector = vec!["hello", "world"];
+ let output = vector
+ .iter()
+ .map(|item| item.to_uppercase())
+ .collect::<Vec<String>>()
+ .join("\n");
+ println!("{}", output);
+
+ // should not be linted
+ let vector = vec!["hello", "world"];
+ let output = vector.iter().map(|item| item.to_uppercase()).collect::<String>();
+ println!("{}", output);
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_join.rs b/src/tools/clippy/tests/ui/unnecessary_join.rs
new file mode 100644
index 000000000..0a21656a7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_join.rs
@@ -0,0 +1,37 @@
+// run-rustfix
+
+#![warn(clippy::unnecessary_join)]
+
+fn main() {
+ // should be linted
+ let vector = vec!["hello", "world"];
+ let output = vector
+ .iter()
+ .map(|item| item.to_uppercase())
+ .collect::<Vec<String>>()
+ .join("");
+ println!("{}", output);
+
+ // should be linted
+ let vector = vec!["hello", "world"];
+ let output = vector
+ .iter()
+ .map(|item| item.to_uppercase())
+ .collect::<Vec<_>>()
+ .join("");
+ println!("{}", output);
+
+ // should not be linted
+ let vector = vec!["hello", "world"];
+ let output = vector
+ .iter()
+ .map(|item| item.to_uppercase())
+ .collect::<Vec<String>>()
+ .join("\n");
+ println!("{}", output);
+
+ // should not be linted
+ let vector = vec!["hello", "world"];
+ let output = vector.iter().map(|item| item.to_uppercase()).collect::<String>();
+ println!("{}", output);
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_join.stderr b/src/tools/clippy/tests/ui/unnecessary_join.stderr
new file mode 100644
index 000000000..0b14b143a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_join.stderr
@@ -0,0 +1,20 @@
+error: called `.collect<Vec<String>>().join("")` on an iterator
+ --> $DIR/unnecessary_join.rs:11:10
+ |
+LL | .collect::<Vec<String>>()
+ | __________^
+LL | | .join("");
+ | |_________________^ help: try using: `collect::<String>()`
+ |
+ = note: `-D clippy::unnecessary-join` implied by `-D warnings`
+
+error: called `.collect<Vec<String>>().join("")` on an iterator
+ --> $DIR/unnecessary_join.rs:20:10
+ |
+LL | .collect::<Vec<_>>()
+ | __________^
+LL | | .join("");
+ | |_________________^ help: try using: `collect::<String>()`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnecessary_lazy_eval.fixed b/src/tools/clippy/tests/ui/unnecessary_lazy_eval.fixed
new file mode 100644
index 000000000..eed817968
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_lazy_eval.fixed
@@ -0,0 +1,132 @@
+// run-rustfix
+#![warn(clippy::unnecessary_lazy_evaluations)]
+#![allow(clippy::redundant_closure)]
+#![allow(clippy::bind_instead_of_map)]
+#![allow(clippy::map_identity)]
+
+struct Deep(Option<usize>);
+
+#[derive(Copy, Clone)]
+struct SomeStruct {
+ some_field: usize,
+}
+
+impl SomeStruct {
+ fn return_some_field(&self) -> usize {
+ self.some_field
+ }
+}
+
+fn some_call<T: Default>() -> T {
+ T::default()
+}
+
+fn main() {
+ let astronomers_pi = 10;
+ let ext_arr: [usize; 1] = [2];
+ let ext_str = SomeStruct { some_field: 10 };
+
+ let mut opt = Some(42);
+ let ext_opt = Some(42);
+ let nested_opt = Some(Some(42));
+ let nested_tuple_opt = Some(Some((42, 43)));
+ let cond = true;
+
+ // Should lint - Option
+ let _ = opt.unwrap_or(2);
+ let _ = opt.unwrap_or(astronomers_pi);
+ let _ = opt.unwrap_or(ext_str.some_field);
+ let _ = opt.unwrap_or_else(|| ext_arr[0]);
+ let _ = opt.and(ext_opt);
+ let _ = opt.or(ext_opt);
+ let _ = opt.or(None);
+ let _ = opt.get_or_insert(2);
+ let _ = opt.ok_or(2);
+ let _ = nested_tuple_opt.unwrap_or(Some((1, 2)));
+ let _ = cond.then_some(astronomers_pi);
+
+ // Cases when unwrap is not called on a simple variable
+ let _ = Some(10).unwrap_or(2);
+ let _ = Some(10).and(ext_opt);
+ let _: Option<usize> = None.or(ext_opt);
+ let _ = None.get_or_insert(2);
+ let _: Result<usize, usize> = None.ok_or(2);
+ let _: Option<usize> = None.or(None);
+
+ let mut deep = Deep(Some(42));
+ let _ = deep.0.unwrap_or(2);
+ let _ = deep.0.and(ext_opt);
+ let _ = deep.0.or(None);
+ let _ = deep.0.get_or_insert(2);
+ let _ = deep.0.ok_or(2);
+
+ // Should not lint - Option
+ let _ = opt.unwrap_or_else(|| ext_str.return_some_field());
+ let _ = nested_opt.unwrap_or_else(|| Some(some_call()));
+ let _ = nested_tuple_opt.unwrap_or_else(|| Some((some_call(), some_call())));
+ let _ = opt.or_else(some_call);
+ let _ = opt.or_else(|| some_call());
+ let _: Result<usize, usize> = opt.ok_or_else(|| some_call());
+ let _: Result<usize, usize> = opt.ok_or_else(some_call);
+ let _ = deep.0.get_or_insert_with(|| some_call());
+ let _ = deep.0.or_else(some_call);
+ let _ = deep.0.or_else(|| some_call());
+ let _ = opt.ok_or_else(|| ext_arr[0]);
+
+ // should not lint, bind_instead_of_map takes priority
+ let _ = Some(10).and_then(|idx| Some(ext_arr[idx]));
+ let _ = Some(10).and_then(|idx| Some(idx));
+
+ // should lint, bind_instead_of_map doesn't apply
+ let _: Option<usize> = None.or(Some(3));
+ let _ = deep.0.or(Some(3));
+ let _ = opt.or(Some(3));
+
+ // Should lint - Result
+ let res: Result<usize, usize> = Err(5);
+ let res2: Result<usize, SomeStruct> = Err(SomeStruct { some_field: 5 });
+
+ let _ = res2.unwrap_or(2);
+ let _ = res2.unwrap_or(astronomers_pi);
+ let _ = res2.unwrap_or(ext_str.some_field);
+
+ // Should not lint - Result
+ let _ = res.unwrap_or_else(|err| err);
+ let _ = res.unwrap_or_else(|err| ext_arr[err]);
+ let _ = res2.unwrap_or_else(|err| err.some_field);
+ let _ = res2.unwrap_or_else(|err| err.return_some_field());
+ let _ = res2.unwrap_or_else(|_| ext_str.return_some_field());
+
+ // should not lint, bind_instead_of_map takes priority
+ let _: Result<usize, usize> = res.and_then(|x| Ok(x));
+ let _: Result<usize, usize> = res.or_else(|err| Err(err));
+
+ let _: Result<usize, usize> = res.and_then(|_| Ok(2));
+ let _: Result<usize, usize> = res.and_then(|_| Ok(astronomers_pi));
+ let _: Result<usize, usize> = res.and_then(|_| Ok(ext_str.some_field));
+
+ let _: Result<usize, usize> = res.or_else(|_| Err(2));
+ let _: Result<usize, usize> = res.or_else(|_| Err(astronomers_pi));
+ let _: Result<usize, usize> = res.or_else(|_| Err(ext_str.some_field));
+
+ // should lint, bind_instead_of_map doesn't apply
+ let _: Result<usize, usize> = res.and(Err(2));
+ let _: Result<usize, usize> = res.and(Err(astronomers_pi));
+ let _: Result<usize, usize> = res.and(Err(ext_str.some_field));
+
+ let _: Result<usize, usize> = res.or(Ok(2));
+ let _: Result<usize, usize> = res.or(Ok(astronomers_pi));
+ let _: Result<usize, usize> = res.or(Ok(ext_str.some_field));
+ let _: Result<usize, usize> = res.
+ // some lines
+ // some lines
+ // some lines
+ // some lines
+ // some lines
+ // some lines
+ or(Ok(ext_str.some_field));
+
+ // neither bind_instead_of_map nor unnecessary_lazy_eval applies here
+ let _: Result<usize, usize> = res.and_then(|x| Err(x));
+ let _: Result<usize, usize> = res.or_else(|err| Ok(err));
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_lazy_eval.rs b/src/tools/clippy/tests/ui/unnecessary_lazy_eval.rs
new file mode 100644
index 000000000..1588db79b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_lazy_eval.rs
@@ -0,0 +1,132 @@
+// run-rustfix
+#![warn(clippy::unnecessary_lazy_evaluations)]
+#![allow(clippy::redundant_closure)]
+#![allow(clippy::bind_instead_of_map)]
+#![allow(clippy::map_identity)]
+
+struct Deep(Option<usize>);
+
+#[derive(Copy, Clone)]
+struct SomeStruct {
+ some_field: usize,
+}
+
+impl SomeStruct {
+ fn return_some_field(&self) -> usize {
+ self.some_field
+ }
+}
+
+fn some_call<T: Default>() -> T {
+ T::default()
+}
+
+fn main() {
+ let astronomers_pi = 10;
+ let ext_arr: [usize; 1] = [2];
+ let ext_str = SomeStruct { some_field: 10 };
+
+ let mut opt = Some(42);
+ let ext_opt = Some(42);
+ let nested_opt = Some(Some(42));
+ let nested_tuple_opt = Some(Some((42, 43)));
+ let cond = true;
+
+ // Should lint - Option
+ let _ = opt.unwrap_or_else(|| 2);
+ let _ = opt.unwrap_or_else(|| astronomers_pi);
+ let _ = opt.unwrap_or_else(|| ext_str.some_field);
+ let _ = opt.unwrap_or_else(|| ext_arr[0]);
+ let _ = opt.and_then(|_| ext_opt);
+ let _ = opt.or_else(|| ext_opt);
+ let _ = opt.or_else(|| None);
+ let _ = opt.get_or_insert_with(|| 2);
+ let _ = opt.ok_or_else(|| 2);
+ let _ = nested_tuple_opt.unwrap_or_else(|| Some((1, 2)));
+ let _ = cond.then(|| astronomers_pi);
+
+ // Cases when unwrap is not called on a simple variable
+ let _ = Some(10).unwrap_or_else(|| 2);
+ let _ = Some(10).and_then(|_| ext_opt);
+ let _: Option<usize> = None.or_else(|| ext_opt);
+ let _ = None.get_or_insert_with(|| 2);
+ let _: Result<usize, usize> = None.ok_or_else(|| 2);
+ let _: Option<usize> = None.or_else(|| None);
+
+ let mut deep = Deep(Some(42));
+ let _ = deep.0.unwrap_or_else(|| 2);
+ let _ = deep.0.and_then(|_| ext_opt);
+ let _ = deep.0.or_else(|| None);
+ let _ = deep.0.get_or_insert_with(|| 2);
+ let _ = deep.0.ok_or_else(|| 2);
+
+ // Should not lint - Option
+ let _ = opt.unwrap_or_else(|| ext_str.return_some_field());
+ let _ = nested_opt.unwrap_or_else(|| Some(some_call()));
+ let _ = nested_tuple_opt.unwrap_or_else(|| Some((some_call(), some_call())));
+ let _ = opt.or_else(some_call);
+ let _ = opt.or_else(|| some_call());
+ let _: Result<usize, usize> = opt.ok_or_else(|| some_call());
+ let _: Result<usize, usize> = opt.ok_or_else(some_call);
+ let _ = deep.0.get_or_insert_with(|| some_call());
+ let _ = deep.0.or_else(some_call);
+ let _ = deep.0.or_else(|| some_call());
+ let _ = opt.ok_or_else(|| ext_arr[0]);
+
+ // should not lint, bind_instead_of_map takes priority
+ let _ = Some(10).and_then(|idx| Some(ext_arr[idx]));
+ let _ = Some(10).and_then(|idx| Some(idx));
+
+ // should lint, bind_instead_of_map doesn't apply
+ let _: Option<usize> = None.or_else(|| Some(3));
+ let _ = deep.0.or_else(|| Some(3));
+ let _ = opt.or_else(|| Some(3));
+
+ // Should lint - Result
+ let res: Result<usize, usize> = Err(5);
+ let res2: Result<usize, SomeStruct> = Err(SomeStruct { some_field: 5 });
+
+ let _ = res2.unwrap_or_else(|_| 2);
+ let _ = res2.unwrap_or_else(|_| astronomers_pi);
+ let _ = res2.unwrap_or_else(|_| ext_str.some_field);
+
+ // Should not lint - Result
+ let _ = res.unwrap_or_else(|err| err);
+ let _ = res.unwrap_or_else(|err| ext_arr[err]);
+ let _ = res2.unwrap_or_else(|err| err.some_field);
+ let _ = res2.unwrap_or_else(|err| err.return_some_field());
+ let _ = res2.unwrap_or_else(|_| ext_str.return_some_field());
+
+ // should not lint, bind_instead_of_map takes priority
+ let _: Result<usize, usize> = res.and_then(|x| Ok(x));
+ let _: Result<usize, usize> = res.or_else(|err| Err(err));
+
+ let _: Result<usize, usize> = res.and_then(|_| Ok(2));
+ let _: Result<usize, usize> = res.and_then(|_| Ok(astronomers_pi));
+ let _: Result<usize, usize> = res.and_then(|_| Ok(ext_str.some_field));
+
+ let _: Result<usize, usize> = res.or_else(|_| Err(2));
+ let _: Result<usize, usize> = res.or_else(|_| Err(astronomers_pi));
+ let _: Result<usize, usize> = res.or_else(|_| Err(ext_str.some_field));
+
+ // should lint, bind_instead_of_map doesn't apply
+ let _: Result<usize, usize> = res.and_then(|_| Err(2));
+ let _: Result<usize, usize> = res.and_then(|_| Err(astronomers_pi));
+ let _: Result<usize, usize> = res.and_then(|_| Err(ext_str.some_field));
+
+ let _: Result<usize, usize> = res.or_else(|_| Ok(2));
+ let _: Result<usize, usize> = res.or_else(|_| Ok(astronomers_pi));
+ let _: Result<usize, usize> = res.or_else(|_| Ok(ext_str.some_field));
+ let _: Result<usize, usize> = res.
+ // some lines
+ // some lines
+ // some lines
+ // some lines
+ // some lines
+ // some lines
+ or_else(|_| Ok(ext_str.some_field));
+
+ // neither bind_instead_of_map nor unnecessary_lazy_eval applies here
+ let _: Result<usize, usize> = res.and_then(|x| Err(x));
+ let _: Result<usize, usize> = res.or_else(|err| Ok(err));
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_lazy_eval.stderr b/src/tools/clippy/tests/ui/unnecessary_lazy_eval.stderr
new file mode 100644
index 000000000..83dc7fd83
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_lazy_eval.stderr
@@ -0,0 +1,283 @@
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:36:13
+ |
+LL | let _ = opt.unwrap_or_else(|| 2);
+ | ^^^^--------------------
+ | |
+ | help: use `unwrap_or(..)` instead: `unwrap_or(2)`
+ |
+ = note: `-D clippy::unnecessary-lazy-evaluations` implied by `-D warnings`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:37:13
+ |
+LL | let _ = opt.unwrap_or_else(|| astronomers_pi);
+ | ^^^^---------------------------------
+ | |
+ | help: use `unwrap_or(..)` instead: `unwrap_or(astronomers_pi)`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:38:13
+ |
+LL | let _ = opt.unwrap_or_else(|| ext_str.some_field);
+ | ^^^^-------------------------------------
+ | |
+ | help: use `unwrap_or(..)` instead: `unwrap_or(ext_str.some_field)`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:40:13
+ |
+LL | let _ = opt.and_then(|_| ext_opt);
+ | ^^^^---------------------
+ | |
+ | help: use `and(..)` instead: `and(ext_opt)`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:41:13
+ |
+LL | let _ = opt.or_else(|| ext_opt);
+ | ^^^^-------------------
+ | |
+ | help: use `or(..)` instead: `or(ext_opt)`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:42:13
+ |
+LL | let _ = opt.or_else(|| None);
+ | ^^^^----------------
+ | |
+ | help: use `or(..)` instead: `or(None)`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:43:13
+ |
+LL | let _ = opt.get_or_insert_with(|| 2);
+ | ^^^^------------------------
+ | |
+ | help: use `get_or_insert(..)` instead: `get_or_insert(2)`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:44:13
+ |
+LL | let _ = opt.ok_or_else(|| 2);
+ | ^^^^----------------
+ | |
+ | help: use `ok_or(..)` instead: `ok_or(2)`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:45:13
+ |
+LL | let _ = nested_tuple_opt.unwrap_or_else(|| Some((1, 2)));
+ | ^^^^^^^^^^^^^^^^^-------------------------------
+ | |
+ | help: use `unwrap_or(..)` instead: `unwrap_or(Some((1, 2)))`
+
+error: unnecessary closure used with `bool::then`
+ --> $DIR/unnecessary_lazy_eval.rs:46:13
+ |
+LL | let _ = cond.then(|| astronomers_pi);
+ | ^^^^^-----------------------
+ | |
+ | help: use `then_some(..)` instead: `then_some(astronomers_pi)`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:49:13
+ |
+LL | let _ = Some(10).unwrap_or_else(|| 2);
+ | ^^^^^^^^^--------------------
+ | |
+ | help: use `unwrap_or(..)` instead: `unwrap_or(2)`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:50:13
+ |
+LL | let _ = Some(10).and_then(|_| ext_opt);
+ | ^^^^^^^^^---------------------
+ | |
+ | help: use `and(..)` instead: `and(ext_opt)`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:51:28
+ |
+LL | let _: Option<usize> = None.or_else(|| ext_opt);
+ | ^^^^^-------------------
+ | |
+ | help: use `or(..)` instead: `or(ext_opt)`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:52:13
+ |
+LL | let _ = None.get_or_insert_with(|| 2);
+ | ^^^^^------------------------
+ | |
+ | help: use `get_or_insert(..)` instead: `get_or_insert(2)`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:53:35
+ |
+LL | let _: Result<usize, usize> = None.ok_or_else(|| 2);
+ | ^^^^^----------------
+ | |
+ | help: use `ok_or(..)` instead: `ok_or(2)`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:54:28
+ |
+LL | let _: Option<usize> = None.or_else(|| None);
+ | ^^^^^----------------
+ | |
+ | help: use `or(..)` instead: `or(None)`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:57:13
+ |
+LL | let _ = deep.0.unwrap_or_else(|| 2);
+ | ^^^^^^^--------------------
+ | |
+ | help: use `unwrap_or(..)` instead: `unwrap_or(2)`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:58:13
+ |
+LL | let _ = deep.0.and_then(|_| ext_opt);
+ | ^^^^^^^---------------------
+ | |
+ | help: use `and(..)` instead: `and(ext_opt)`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:59:13
+ |
+LL | let _ = deep.0.or_else(|| None);
+ | ^^^^^^^----------------
+ | |
+ | help: use `or(..)` instead: `or(None)`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:60:13
+ |
+LL | let _ = deep.0.get_or_insert_with(|| 2);
+ | ^^^^^^^------------------------
+ | |
+ | help: use `get_or_insert(..)` instead: `get_or_insert(2)`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:61:13
+ |
+LL | let _ = deep.0.ok_or_else(|| 2);
+ | ^^^^^^^----------------
+ | |
+ | help: use `ok_or(..)` instead: `ok_or(2)`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:81:28
+ |
+LL | let _: Option<usize> = None.or_else(|| Some(3));
+ | ^^^^^-------------------
+ | |
+ | help: use `or(..)` instead: `or(Some(3))`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:82:13
+ |
+LL | let _ = deep.0.or_else(|| Some(3));
+ | ^^^^^^^-------------------
+ | |
+ | help: use `or(..)` instead: `or(Some(3))`
+
+error: unnecessary closure used to substitute value for `Option::None`
+ --> $DIR/unnecessary_lazy_eval.rs:83:13
+ |
+LL | let _ = opt.or_else(|| Some(3));
+ | ^^^^-------------------
+ | |
+ | help: use `or(..)` instead: `or(Some(3))`
+
+error: unnecessary closure used to substitute value for `Result::Err`
+ --> $DIR/unnecessary_lazy_eval.rs:89:13
+ |
+LL | let _ = res2.unwrap_or_else(|_| 2);
+ | ^^^^^---------------------
+ | |
+ | help: use `unwrap_or(..)` instead: `unwrap_or(2)`
+
+error: unnecessary closure used to substitute value for `Result::Err`
+ --> $DIR/unnecessary_lazy_eval.rs:90:13
+ |
+LL | let _ = res2.unwrap_or_else(|_| astronomers_pi);
+ | ^^^^^----------------------------------
+ | |
+ | help: use `unwrap_or(..)` instead: `unwrap_or(astronomers_pi)`
+
+error: unnecessary closure used to substitute value for `Result::Err`
+ --> $DIR/unnecessary_lazy_eval.rs:91:13
+ |
+LL | let _ = res2.unwrap_or_else(|_| ext_str.some_field);
+ | ^^^^^--------------------------------------
+ | |
+ | help: use `unwrap_or(..)` instead: `unwrap_or(ext_str.some_field)`
+
+error: unnecessary closure used to substitute value for `Result::Err`
+ --> $DIR/unnecessary_lazy_eval.rs:113:35
+ |
+LL | let _: Result<usize, usize> = res.and_then(|_| Err(2));
+ | ^^^^--------------------
+ | |
+ | help: use `and(..)` instead: `and(Err(2))`
+
+error: unnecessary closure used to substitute value for `Result::Err`
+ --> $DIR/unnecessary_lazy_eval.rs:114:35
+ |
+LL | let _: Result<usize, usize> = res.and_then(|_| Err(astronomers_pi));
+ | ^^^^---------------------------------
+ | |
+ | help: use `and(..)` instead: `and(Err(astronomers_pi))`
+
+error: unnecessary closure used to substitute value for `Result::Err`
+ --> $DIR/unnecessary_lazy_eval.rs:115:35
+ |
+LL | let _: Result<usize, usize> = res.and_then(|_| Err(ext_str.some_field));
+ | ^^^^-------------------------------------
+ | |
+ | help: use `and(..)` instead: `and(Err(ext_str.some_field))`
+
+error: unnecessary closure used to substitute value for `Result::Err`
+ --> $DIR/unnecessary_lazy_eval.rs:117:35
+ |
+LL | let _: Result<usize, usize> = res.or_else(|_| Ok(2));
+ | ^^^^------------------
+ | |
+ | help: use `or(..)` instead: `or(Ok(2))`
+
+error: unnecessary closure used to substitute value for `Result::Err`
+ --> $DIR/unnecessary_lazy_eval.rs:118:35
+ |
+LL | let _: Result<usize, usize> = res.or_else(|_| Ok(astronomers_pi));
+ | ^^^^-------------------------------
+ | |
+ | help: use `or(..)` instead: `or(Ok(astronomers_pi))`
+
+error: unnecessary closure used to substitute value for `Result::Err`
+ --> $DIR/unnecessary_lazy_eval.rs:119:35
+ |
+LL | let _: Result<usize, usize> = res.or_else(|_| Ok(ext_str.some_field));
+ | ^^^^-----------------------------------
+ | |
+ | help: use `or(..)` instead: `or(Ok(ext_str.some_field))`
+
+error: unnecessary closure used to substitute value for `Result::Err`
+ --> $DIR/unnecessary_lazy_eval.rs:120:35
+ |
+LL | let _: Result<usize, usize> = res.
+ | ___________________________________^
+LL | | // some lines
+LL | | // some lines
+LL | | // some lines
+... |
+LL | | // some lines
+LL | | or_else(|_| Ok(ext_str.some_field));
+ | |_________----------------------------------^
+ | |
+ | help: use `or(..)` instead: `or(Ok(ext_str.some_field))`
+
+error: aborting due to 34 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnecessary_lazy_eval_unfixable.rs b/src/tools/clippy/tests/ui/unnecessary_lazy_eval_unfixable.rs
new file mode 100644
index 000000000..b05dd143b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_lazy_eval_unfixable.rs
@@ -0,0 +1,22 @@
+#![warn(clippy::unnecessary_lazy_evaluations)]
+
+struct Deep(Option<usize>);
+
+#[derive(Copy, Clone)]
+struct SomeStruct {
+ some_field: usize,
+}
+
+fn main() {
+ // fix will break type inference
+ let _ = Ok(1).unwrap_or_else(|()| 2);
+ mod e {
+ pub struct E;
+ }
+ let _ = Ok(1).unwrap_or_else(|e::E| 2);
+ let _ = Ok(1).unwrap_or_else(|SomeStruct { .. }| 2);
+
+ // Fix #6343
+ let arr = [(Some(1),)];
+ Some(&0).and_then(|&i| arr[i].0);
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_lazy_eval_unfixable.stderr b/src/tools/clippy/tests/ui/unnecessary_lazy_eval_unfixable.stderr
new file mode 100644
index 000000000..20acab6e8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_lazy_eval_unfixable.stderr
@@ -0,0 +1,28 @@
+error: unnecessary closure used to substitute value for `Result::Err`
+ --> $DIR/unnecessary_lazy_eval_unfixable.rs:12:13
+ |
+LL | let _ = Ok(1).unwrap_or_else(|()| 2);
+ | ^^^^^^----------------------
+ | |
+ | help: use `unwrap_or(..)` instead: `unwrap_or(2)`
+ |
+ = note: `-D clippy::unnecessary-lazy-evaluations` implied by `-D warnings`
+
+error: unnecessary closure used to substitute value for `Result::Err`
+ --> $DIR/unnecessary_lazy_eval_unfixable.rs:16:13
+ |
+LL | let _ = Ok(1).unwrap_or_else(|e::E| 2);
+ | ^^^^^^------------------------
+ | |
+ | help: use `unwrap_or(..)` instead: `unwrap_or(2)`
+
+error: unnecessary closure used to substitute value for `Result::Err`
+ --> $DIR/unnecessary_lazy_eval_unfixable.rs:17:13
+ |
+LL | let _ = Ok(1).unwrap_or_else(|SomeStruct { .. }| 2);
+ | ^^^^^^-------------------------------------
+ | |
+ | help: use `unwrap_or(..)` instead: `unwrap_or(2)`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnecessary_operation.fixed b/src/tools/clippy/tests/ui/unnecessary_operation.fixed
new file mode 100644
index 000000000..bf0ec8deb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_operation.fixed
@@ -0,0 +1,79 @@
+// run-rustfix
+
+#![feature(box_syntax)]
+#![allow(clippy::deref_addrof, dead_code, unused, clippy::no_effect)]
+#![warn(clippy::unnecessary_operation)]
+
+struct Tuple(i32);
+struct Struct {
+ field: i32,
+}
+enum Enum {
+ Tuple(i32),
+ Struct { field: i32 },
+}
+struct DropStruct {
+ field: i32,
+}
+impl Drop for DropStruct {
+ fn drop(&mut self) {}
+}
+struct DropTuple(i32);
+impl Drop for DropTuple {
+ fn drop(&mut self) {}
+}
+enum DropEnum {
+ Tuple(i32),
+ Struct { field: i32 },
+}
+impl Drop for DropEnum {
+ fn drop(&mut self) {}
+}
+struct FooString {
+ s: String,
+}
+
+fn get_number() -> i32 {
+ 0
+}
+
+fn get_usize() -> usize {
+ 0
+}
+fn get_struct() -> Struct {
+ Struct { field: 0 }
+}
+fn get_drop_struct() -> DropStruct {
+ DropStruct { field: 0 }
+}
+
+fn main() {
+ get_number();
+ get_number();
+ get_struct();
+ get_number();
+ get_number();
+ 5;get_number();
+ get_number();
+ get_number();
+ 5;6;get_number();
+ get_number();
+ get_number();
+ get_number();
+ 5;get_number();
+ 42;get_number();
+ assert!([42, 55].len() > get_usize());
+ 42;get_number();
+ get_number();
+ assert!([42; 55].len() > get_usize());
+ get_number();
+ String::from("blah");
+
+ // Do not warn
+ DropTuple(get_number());
+ DropStruct { field: get_number() };
+ DropStruct { field: get_number() };
+ DropStruct { ..get_drop_struct() };
+ DropEnum::Tuple(get_number());
+ DropEnum::Struct { field: get_number() };
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_operation.rs b/src/tools/clippy/tests/ui/unnecessary_operation.rs
new file mode 100644
index 000000000..08cb9ab52
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_operation.rs
@@ -0,0 +1,83 @@
+// run-rustfix
+
+#![feature(box_syntax)]
+#![allow(clippy::deref_addrof, dead_code, unused, clippy::no_effect)]
+#![warn(clippy::unnecessary_operation)]
+
+struct Tuple(i32);
+struct Struct {
+ field: i32,
+}
+enum Enum {
+ Tuple(i32),
+ Struct { field: i32 },
+}
+struct DropStruct {
+ field: i32,
+}
+impl Drop for DropStruct {
+ fn drop(&mut self) {}
+}
+struct DropTuple(i32);
+impl Drop for DropTuple {
+ fn drop(&mut self) {}
+}
+enum DropEnum {
+ Tuple(i32),
+ Struct { field: i32 },
+}
+impl Drop for DropEnum {
+ fn drop(&mut self) {}
+}
+struct FooString {
+ s: String,
+}
+
+fn get_number() -> i32 {
+ 0
+}
+
+fn get_usize() -> usize {
+ 0
+}
+fn get_struct() -> Struct {
+ Struct { field: 0 }
+}
+fn get_drop_struct() -> DropStruct {
+ DropStruct { field: 0 }
+}
+
+fn main() {
+ Tuple(get_number());
+ Struct { field: get_number() };
+ Struct { ..get_struct() };
+ Enum::Tuple(get_number());
+ Enum::Struct { field: get_number() };
+ 5 + get_number();
+ *&get_number();
+ &get_number();
+ (5, 6, get_number());
+ box get_number();
+ get_number()..;
+ ..get_number();
+ 5..get_number();
+ [42, get_number()];
+ [42, 55][get_usize()];
+ (42, get_number()).1;
+ [get_number(); 55];
+ [42; 55][get_usize()];
+ {
+ get_number()
+ };
+ FooString {
+ s: String::from("blah"),
+ };
+
+ // Do not warn
+ DropTuple(get_number());
+ DropStruct { field: get_number() };
+ DropStruct { field: get_number() };
+ DropStruct { ..get_drop_struct() };
+ DropEnum::Tuple(get_number());
+ DropEnum::Struct { field: get_number() };
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_operation.stderr b/src/tools/clippy/tests/ui/unnecessary_operation.stderr
new file mode 100644
index 000000000..f66d08ecb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_operation.stderr
@@ -0,0 +1,128 @@
+error: unnecessary operation
+ --> $DIR/unnecessary_operation.rs:51:5
+ |
+LL | Tuple(get_number());
+ | ^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
+ |
+ = note: `-D clippy::unnecessary-operation` implied by `-D warnings`
+
+error: unnecessary operation
+ --> $DIR/unnecessary_operation.rs:52:5
+ |
+LL | Struct { field: get_number() };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
+
+error: unnecessary operation
+ --> $DIR/unnecessary_operation.rs:53:5
+ |
+LL | Struct { ..get_struct() };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_struct();`
+
+error: unnecessary operation
+ --> $DIR/unnecessary_operation.rs:54:5
+ |
+LL | Enum::Tuple(get_number());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
+
+error: unnecessary operation
+ --> $DIR/unnecessary_operation.rs:55:5
+ |
+LL | Enum::Struct { field: get_number() };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
+
+error: unnecessary operation
+ --> $DIR/unnecessary_operation.rs:56:5
+ |
+LL | 5 + get_number();
+ | ^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `5;get_number();`
+
+error: unnecessary operation
+ --> $DIR/unnecessary_operation.rs:57:5
+ |
+LL | *&get_number();
+ | ^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
+
+error: unnecessary operation
+ --> $DIR/unnecessary_operation.rs:58:5
+ |
+LL | &get_number();
+ | ^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
+
+error: unnecessary operation
+ --> $DIR/unnecessary_operation.rs:59:5
+ |
+LL | (5, 6, get_number());
+ | ^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `5;6;get_number();`
+
+error: unnecessary operation
+ --> $DIR/unnecessary_operation.rs:60:5
+ |
+LL | box get_number();
+ | ^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
+
+error: unnecessary operation
+ --> $DIR/unnecessary_operation.rs:61:5
+ |
+LL | get_number()..;
+ | ^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
+
+error: unnecessary operation
+ --> $DIR/unnecessary_operation.rs:62:5
+ |
+LL | ..get_number();
+ | ^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
+
+error: unnecessary operation
+ --> $DIR/unnecessary_operation.rs:63:5
+ |
+LL | 5..get_number();
+ | ^^^^^^^^^^^^^^^^ help: statement can be reduced to: `5;get_number();`
+
+error: unnecessary operation
+ --> $DIR/unnecessary_operation.rs:64:5
+ |
+LL | [42, get_number()];
+ | ^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `42;get_number();`
+
+error: unnecessary operation
+ --> $DIR/unnecessary_operation.rs:65:5
+ |
+LL | [42, 55][get_usize()];
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: statement can be written as: `assert!([42, 55].len() > get_usize());`
+
+error: unnecessary operation
+ --> $DIR/unnecessary_operation.rs:66:5
+ |
+LL | (42, get_number()).1;
+ | ^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `42;get_number();`
+
+error: unnecessary operation
+ --> $DIR/unnecessary_operation.rs:67:5
+ |
+LL | [get_number(); 55];
+ | ^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();`
+
+error: unnecessary operation
+ --> $DIR/unnecessary_operation.rs:68:5
+ |
+LL | [42; 55][get_usize()];
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: statement can be written as: `assert!([42; 55].len() > get_usize());`
+
+error: unnecessary operation
+ --> $DIR/unnecessary_operation.rs:69:5
+ |
+LL | / {
+LL | | get_number()
+LL | | };
+ | |______^ help: statement can be reduced to: `get_number();`
+
+error: unnecessary operation
+ --> $DIR/unnecessary_operation.rs:72:5
+ |
+LL | / FooString {
+LL | | s: String::from("blah"),
+LL | | };
+ | |______^ help: statement can be reduced to: `String::from("blah");`
+
+error: aborting due to 20 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnecessary_owned_empty_strings.fixed b/src/tools/clippy/tests/ui/unnecessary_owned_empty_strings.fixed
new file mode 100644
index 000000000..f95f91329
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_owned_empty_strings.fixed
@@ -0,0 +1,22 @@
+// run-rustfix
+
+#![warn(clippy::unnecessary_owned_empty_strings)]
+
+fn ref_str_argument(_value: &str) {}
+
+#[allow(clippy::ptr_arg)]
+fn ref_string_argument(_value: &String) {}
+
+fn main() {
+ // should be linted
+ ref_str_argument("");
+
+ // should be linted
+ ref_str_argument("");
+
+ // should not be linted
+ ref_str_argument("");
+
+ // should not be linted
+ ref_string_argument(&String::new());
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_owned_empty_strings.rs b/src/tools/clippy/tests/ui/unnecessary_owned_empty_strings.rs
new file mode 100644
index 000000000..0cbdc151e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_owned_empty_strings.rs
@@ -0,0 +1,22 @@
+// run-rustfix
+
+#![warn(clippy::unnecessary_owned_empty_strings)]
+
+fn ref_str_argument(_value: &str) {}
+
+#[allow(clippy::ptr_arg)]
+fn ref_string_argument(_value: &String) {}
+
+fn main() {
+ // should be linted
+ ref_str_argument(&String::new());
+
+ // should be linted
+ ref_str_argument(&String::from(""));
+
+ // should not be linted
+ ref_str_argument("");
+
+ // should not be linted
+ ref_string_argument(&String::new());
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_owned_empty_strings.stderr b/src/tools/clippy/tests/ui/unnecessary_owned_empty_strings.stderr
new file mode 100644
index 000000000..46bc4597b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_owned_empty_strings.stderr
@@ -0,0 +1,16 @@
+error: usage of `&String::new()` for a function expecting a `&str` argument
+ --> $DIR/unnecessary_owned_empty_strings.rs:12:22
+ |
+LL | ref_str_argument(&String::new());
+ | ^^^^^^^^^^^^^^ help: try: `""`
+ |
+ = note: `-D clippy::unnecessary-owned-empty-strings` implied by `-D warnings`
+
+error: usage of `&String::from("")` for a function expecting a `&str` argument
+ --> $DIR/unnecessary_owned_empty_strings.rs:15:22
+ |
+LL | ref_str_argument(&String::from(""));
+ | ^^^^^^^^^^^^^^^^^ help: try: `""`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnecessary_self_imports.fixed b/src/tools/clippy/tests/ui/unnecessary_self_imports.fixed
new file mode 100644
index 000000000..1185eaa1d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_self_imports.fixed
@@ -0,0 +1,10 @@
+// run-rustfix
+#![warn(clippy::unnecessary_self_imports)]
+#![allow(unused_imports, dead_code)]
+
+use std::collections::hash_map::{self, *};
+use std::fs as alias;
+use std::io::{self, Read};
+use std::rc;
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/unnecessary_self_imports.rs b/src/tools/clippy/tests/ui/unnecessary_self_imports.rs
new file mode 100644
index 000000000..56bfbc094
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_self_imports.rs
@@ -0,0 +1,10 @@
+// run-rustfix
+#![warn(clippy::unnecessary_self_imports)]
+#![allow(unused_imports, dead_code)]
+
+use std::collections::hash_map::{self, *};
+use std::fs::{self as alias};
+use std::io::{self, Read};
+use std::rc::{self};
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/unnecessary_self_imports.stderr b/src/tools/clippy/tests/ui/unnecessary_self_imports.stderr
new file mode 100644
index 000000000..83a5618c9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_self_imports.stderr
@@ -0,0 +1,23 @@
+error: import ending with `::{self}`
+ --> $DIR/unnecessary_self_imports.rs:6:1
+ |
+LL | use std::fs::{self as alias};
+ | ^^^^^^^^^--------------------
+ | |
+ | help: consider omitting `::{self}`: `fs as alias;`
+ |
+ = note: `-D clippy::unnecessary-self-imports` implied by `-D warnings`
+ = note: this will slightly change semantics; any non-module items at the same path will also be imported
+
+error: import ending with `::{self}`
+ --> $DIR/unnecessary_self_imports.rs:8:1
+ |
+LL | use std::rc::{self};
+ | ^^^^^^^^^-----------
+ | |
+ | help: consider omitting `::{self}`: `rc;`
+ |
+ = note: this will slightly change semantics; any non-module items at the same path will also be imported
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnecessary_sort_by.fixed b/src/tools/clippy/tests/ui/unnecessary_sort_by.fixed
new file mode 100644
index 000000000..21e2da474
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_sort_by.fixed
@@ -0,0 +1,103 @@
+// run-rustfix
+
+#![allow(clippy::stable_sort_primitive)]
+
+use std::cell::Ref;
+
+fn unnecessary_sort_by() {
+ fn id(x: isize) -> isize {
+ x
+ }
+
+ let mut vec: Vec<isize> = vec![3, 6, 1, 2, 5];
+ // Forward examples
+ vec.sort();
+ vec.sort_unstable();
+ vec.sort_by_key(|a| (a + 5).abs());
+ vec.sort_unstable_by_key(|a| id(-a));
+ // Reverse examples
+ vec.sort_by(|a, b| b.cmp(a)); // not linted to avoid suggesting `Reverse(b)` which would borrow
+ vec.sort_by_key(|b| std::cmp::Reverse((b + 5).abs()));
+ vec.sort_unstable_by_key(|b| std::cmp::Reverse(id(-b)));
+ // Negative examples (shouldn't be changed)
+ let c = &7;
+ vec.sort_by(|a, b| (b - a).cmp(&(a - b)));
+ vec.sort_by(|_, b| b.cmp(&5));
+ vec.sort_by(|_, b| b.cmp(c));
+ vec.sort_unstable_by(|a, _| a.cmp(c));
+
+ // Vectors of references are fine as long as the resulting key does not borrow
+ let mut vec: Vec<&&&isize> = vec![&&&3, &&&6, &&&1, &&&2, &&&5];
+ vec.sort_by_key(|a| (***a).abs());
+ vec.sort_unstable_by_key(|a| (***a).abs());
+ // `Reverse(b)` would borrow in the following cases, don't lint
+ vec.sort_by(|a, b| b.cmp(a));
+ vec.sort_unstable_by(|a, b| b.cmp(a));
+
+ // No warning if element does not implement `Ord`
+ let mut vec: Vec<Ref<usize>> = Vec::new();
+ vec.sort_unstable_by(|a, b| a.cmp(b));
+}
+
+// Do not suggest returning a reference to the closure parameter of `Vec::sort_by_key`
+mod issue_5754 {
+ #[derive(Clone, Copy)]
+ struct Test(usize);
+
+ #[derive(PartialOrd, Ord, PartialEq, Eq)]
+ struct Wrapper<'a>(&'a usize);
+
+ impl Test {
+ fn name(&self) -> &usize {
+ &self.0
+ }
+
+ fn wrapped(&self) -> Wrapper<'_> {
+ Wrapper(&self.0)
+ }
+ }
+
+ pub fn test() {
+ let mut args: Vec<Test> = vec![];
+
+ // Forward
+ args.sort_by(|a, b| a.name().cmp(b.name()));
+ args.sort_by(|a, b| a.wrapped().cmp(&b.wrapped()));
+ args.sort_unstable_by(|a, b| a.name().cmp(b.name()));
+ args.sort_unstable_by(|a, b| a.wrapped().cmp(&b.wrapped()));
+ // Reverse
+ args.sort_by(|a, b| b.name().cmp(a.name()));
+ args.sort_by(|a, b| b.wrapped().cmp(&a.wrapped()));
+ args.sort_unstable_by(|a, b| b.name().cmp(a.name()));
+ args.sort_unstable_by(|a, b| b.wrapped().cmp(&a.wrapped()));
+ }
+}
+
+// The closure parameter is not dereferenced anymore, so non-Copy types can be linted
+mod issue_6001 {
+ struct Test(String);
+
+ impl Test {
+ // Return an owned type so that we don't hit the fix for 5754
+ fn name(&self) -> String {
+ self.0.clone()
+ }
+ }
+
+ pub fn test() {
+ let mut args: Vec<Test> = vec![];
+
+ // Forward
+ args.sort_by_key(|a| a.name());
+ args.sort_unstable_by_key(|a| a.name());
+ // Reverse
+ args.sort_by_key(|b| std::cmp::Reverse(b.name()));
+ args.sort_unstable_by_key(|b| std::cmp::Reverse(b.name()));
+ }
+}
+
+fn main() {
+ unnecessary_sort_by();
+ issue_5754::test();
+ issue_6001::test();
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_sort_by.rs b/src/tools/clippy/tests/ui/unnecessary_sort_by.rs
new file mode 100644
index 000000000..3365bf6e1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_sort_by.rs
@@ -0,0 +1,103 @@
+// run-rustfix
+
+#![allow(clippy::stable_sort_primitive)]
+
+use std::cell::Ref;
+
+fn unnecessary_sort_by() {
+ fn id(x: isize) -> isize {
+ x
+ }
+
+ let mut vec: Vec<isize> = vec![3, 6, 1, 2, 5];
+ // Forward examples
+ vec.sort_by(|a, b| a.cmp(b));
+ vec.sort_unstable_by(|a, b| a.cmp(b));
+ vec.sort_by(|a, b| (a + 5).abs().cmp(&(b + 5).abs()));
+ vec.sort_unstable_by(|a, b| id(-a).cmp(&id(-b)));
+ // Reverse examples
+ vec.sort_by(|a, b| b.cmp(a)); // not linted to avoid suggesting `Reverse(b)` which would borrow
+ vec.sort_by(|a, b| (b + 5).abs().cmp(&(a + 5).abs()));
+ vec.sort_unstable_by(|a, b| id(-b).cmp(&id(-a)));
+ // Negative examples (shouldn't be changed)
+ let c = &7;
+ vec.sort_by(|a, b| (b - a).cmp(&(a - b)));
+ vec.sort_by(|_, b| b.cmp(&5));
+ vec.sort_by(|_, b| b.cmp(c));
+ vec.sort_unstable_by(|a, _| a.cmp(c));
+
+ // Vectors of references are fine as long as the resulting key does not borrow
+ let mut vec: Vec<&&&isize> = vec![&&&3, &&&6, &&&1, &&&2, &&&5];
+ vec.sort_by(|a, b| (***a).abs().cmp(&(***b).abs()));
+ vec.sort_unstable_by(|a, b| (***a).abs().cmp(&(***b).abs()));
+ // `Reverse(b)` would borrow in the following cases, don't lint
+ vec.sort_by(|a, b| b.cmp(a));
+ vec.sort_unstable_by(|a, b| b.cmp(a));
+
+ // No warning if element does not implement `Ord`
+ let mut vec: Vec<Ref<usize>> = Vec::new();
+ vec.sort_unstable_by(|a, b| a.cmp(b));
+}
+
+// Do not suggest returning a reference to the closure parameter of `Vec::sort_by_key`
+mod issue_5754 {
+ #[derive(Clone, Copy)]
+ struct Test(usize);
+
+ #[derive(PartialOrd, Ord, PartialEq, Eq)]
+ struct Wrapper<'a>(&'a usize);
+
+ impl Test {
+ fn name(&self) -> &usize {
+ &self.0
+ }
+
+ fn wrapped(&self) -> Wrapper<'_> {
+ Wrapper(&self.0)
+ }
+ }
+
+ pub fn test() {
+ let mut args: Vec<Test> = vec![];
+
+ // Forward
+ args.sort_by(|a, b| a.name().cmp(b.name()));
+ args.sort_by(|a, b| a.wrapped().cmp(&b.wrapped()));
+ args.sort_unstable_by(|a, b| a.name().cmp(b.name()));
+ args.sort_unstable_by(|a, b| a.wrapped().cmp(&b.wrapped()));
+ // Reverse
+ args.sort_by(|a, b| b.name().cmp(a.name()));
+ args.sort_by(|a, b| b.wrapped().cmp(&a.wrapped()));
+ args.sort_unstable_by(|a, b| b.name().cmp(a.name()));
+ args.sort_unstable_by(|a, b| b.wrapped().cmp(&a.wrapped()));
+ }
+}
+
+// The closure parameter is not dereferenced anymore, so non-Copy types can be linted
+mod issue_6001 {
+ struct Test(String);
+
+ impl Test {
+ // Return an owned type so that we don't hit the fix for 5754
+ fn name(&self) -> String {
+ self.0.clone()
+ }
+ }
+
+ pub fn test() {
+ let mut args: Vec<Test> = vec![];
+
+ // Forward
+ args.sort_by(|a, b| a.name().cmp(&b.name()));
+ args.sort_unstable_by(|a, b| a.name().cmp(&b.name()));
+ // Reverse
+ args.sort_by(|a, b| b.name().cmp(&a.name()));
+ args.sort_unstable_by(|a, b| b.name().cmp(&a.name()));
+ }
+}
+
+fn main() {
+ unnecessary_sort_by();
+ issue_5754::test();
+ issue_6001::test();
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_sort_by.stderr b/src/tools/clippy/tests/ui/unnecessary_sort_by.stderr
new file mode 100644
index 000000000..89da5e7ea
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_sort_by.stderr
@@ -0,0 +1,76 @@
+error: use Vec::sort here instead
+ --> $DIR/unnecessary_sort_by.rs:14:5
+ |
+LL | vec.sort_by(|a, b| a.cmp(b));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec.sort()`
+ |
+ = note: `-D clippy::unnecessary-sort-by` implied by `-D warnings`
+
+error: use Vec::sort here instead
+ --> $DIR/unnecessary_sort_by.rs:15:5
+ |
+LL | vec.sort_unstable_by(|a, b| a.cmp(b));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec.sort_unstable()`
+
+error: use Vec::sort_by_key here instead
+ --> $DIR/unnecessary_sort_by.rs:16:5
+ |
+LL | vec.sort_by(|a, b| (a + 5).abs().cmp(&(b + 5).abs()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec.sort_by_key(|a| (a + 5).abs())`
+
+error: use Vec::sort_by_key here instead
+ --> $DIR/unnecessary_sort_by.rs:17:5
+ |
+LL | vec.sort_unstable_by(|a, b| id(-a).cmp(&id(-b)));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec.sort_unstable_by_key(|a| id(-a))`
+
+error: use Vec::sort_by_key here instead
+ --> $DIR/unnecessary_sort_by.rs:20:5
+ |
+LL | vec.sort_by(|a, b| (b + 5).abs().cmp(&(a + 5).abs()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec.sort_by_key(|b| std::cmp::Reverse((b + 5).abs()))`
+
+error: use Vec::sort_by_key here instead
+ --> $DIR/unnecessary_sort_by.rs:21:5
+ |
+LL | vec.sort_unstable_by(|a, b| id(-b).cmp(&id(-a)));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec.sort_unstable_by_key(|b| std::cmp::Reverse(id(-b)))`
+
+error: use Vec::sort_by_key here instead
+ --> $DIR/unnecessary_sort_by.rs:31:5
+ |
+LL | vec.sort_by(|a, b| (***a).abs().cmp(&(***b).abs()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec.sort_by_key(|a| (***a).abs())`
+
+error: use Vec::sort_by_key here instead
+ --> $DIR/unnecessary_sort_by.rs:32:5
+ |
+LL | vec.sort_unstable_by(|a, b| (***a).abs().cmp(&(***b).abs()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec.sort_unstable_by_key(|a| (***a).abs())`
+
+error: use Vec::sort_by_key here instead
+ --> $DIR/unnecessary_sort_by.rs:91:9
+ |
+LL | args.sort_by(|a, b| a.name().cmp(&b.name()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `args.sort_by_key(|a| a.name())`
+
+error: use Vec::sort_by_key here instead
+ --> $DIR/unnecessary_sort_by.rs:92:9
+ |
+LL | args.sort_unstable_by(|a, b| a.name().cmp(&b.name()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `args.sort_unstable_by_key(|a| a.name())`
+
+error: use Vec::sort_by_key here instead
+ --> $DIR/unnecessary_sort_by.rs:94:9
+ |
+LL | args.sort_by(|a, b| b.name().cmp(&a.name()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `args.sort_by_key(|b| std::cmp::Reverse(b.name()))`
+
+error: use Vec::sort_by_key here instead
+ --> $DIR/unnecessary_sort_by.rs:95:9
+ |
+LL | args.sort_unstable_by(|a, b| b.name().cmp(&a.name()));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `args.sort_unstable_by_key(|b| std::cmp::Reverse(b.name()))`
+
+error: aborting due to 12 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnecessary_to_owned.fixed b/src/tools/clippy/tests/ui/unnecessary_to_owned.fixed
new file mode 100644
index 000000000..f4f76cd3d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_to_owned.fixed
@@ -0,0 +1,331 @@
+// run-rustfix
+
+#![allow(clippy::ptr_arg)]
+#![warn(clippy::unnecessary_to_owned)]
+#![feature(custom_inner_attributes)]
+
+use std::borrow::Cow;
+use std::ffi::{CStr, CString, OsStr, OsString};
+use std::ops::Deref;
+
+#[derive(Clone)]
+struct X(String);
+
+impl Deref for X {
+ type Target = [u8];
+ fn deref(&self) -> &[u8] {
+ self.0.as_bytes()
+ }
+}
+
+impl AsRef<str> for X {
+ fn as_ref(&self) -> &str {
+ self.0.as_str()
+ }
+}
+
+impl ToString for X {
+ fn to_string(&self) -> String {
+ self.0.to_string()
+ }
+}
+
+impl X {
+ fn join(&self, other: impl AsRef<str>) -> Self {
+ let mut s = self.0.clone();
+ s.push_str(other.as_ref());
+ Self(s)
+ }
+}
+
+#[allow(dead_code)]
+#[derive(Clone)]
+enum FileType {
+ Account,
+ PrivateKey,
+ Certificate,
+}
+
+fn main() {
+ let c_str = CStr::from_bytes_with_nul(&[0]).unwrap();
+ let os_str = OsStr::new("x");
+ let path = std::path::Path::new("x");
+ let s = "x";
+ let array = ["x"];
+ let array_ref = &["x"];
+ let slice = &["x"][..];
+ let x = X(String::from("x"));
+ let x_ref = &x;
+
+ require_c_str(&Cow::from(c_str));
+ require_c_str(c_str);
+
+ require_os_str(os_str);
+ require_os_str(&Cow::from(os_str));
+ require_os_str(os_str);
+
+ require_path(path);
+ require_path(&Cow::from(path));
+ require_path(path);
+
+ require_str(s);
+ require_str(&Cow::from(s));
+ require_str(s);
+ require_str(x_ref.as_ref());
+
+ require_slice(slice);
+ require_slice(&Cow::from(slice));
+ require_slice(array.as_ref());
+ require_slice(array_ref.as_ref());
+ require_slice(slice);
+ require_slice(&x_ref.to_owned()); // No longer flagged because of #8759.
+
+ require_x(&Cow::<X>::Owned(x.clone()));
+ require_x(&x_ref.to_owned()); // No longer flagged because of #8759.
+
+ require_deref_c_str(c_str);
+ require_deref_os_str(os_str);
+ require_deref_path(path);
+ require_deref_str(s);
+ require_deref_slice(slice);
+
+ require_impl_deref_c_str(c_str);
+ require_impl_deref_os_str(os_str);
+ require_impl_deref_path(path);
+ require_impl_deref_str(s);
+ require_impl_deref_slice(slice);
+
+ require_deref_str_slice(s, slice);
+ require_deref_slice_str(slice, s);
+
+ require_as_ref_c_str(c_str);
+ require_as_ref_os_str(os_str);
+ require_as_ref_path(path);
+ require_as_ref_str(s);
+ require_as_ref_str(&x);
+ require_as_ref_slice(array);
+ require_as_ref_slice(array_ref);
+ require_as_ref_slice(slice);
+
+ require_impl_as_ref_c_str(c_str);
+ require_impl_as_ref_os_str(os_str);
+ require_impl_as_ref_path(path);
+ require_impl_as_ref_str(s);
+ require_impl_as_ref_str(&x);
+ require_impl_as_ref_slice(array);
+ require_impl_as_ref_slice(array_ref);
+ require_impl_as_ref_slice(slice);
+
+ require_as_ref_str_slice(s, array);
+ require_as_ref_str_slice(s, array_ref);
+ require_as_ref_str_slice(s, slice);
+ require_as_ref_slice_str(array, s);
+ require_as_ref_slice_str(array_ref, s);
+ require_as_ref_slice_str(slice, s);
+
+ let _ = x.join(x_ref);
+
+ let _ = slice.iter().copied();
+ let _ = slice.iter().copied();
+ let _ = [std::path::PathBuf::new()][..].iter().cloned();
+ let _ = [std::path::PathBuf::new()][..].iter().cloned();
+
+ let _ = slice.iter().copied();
+ let _ = slice.iter().copied();
+ let _ = [std::path::PathBuf::new()][..].iter().cloned();
+ let _ = [std::path::PathBuf::new()][..].iter().cloned();
+
+ let _ = check_files(&[FileType::Account]);
+
+ // negative tests
+ require_string(&s.to_string());
+ require_string(&Cow::from(s).into_owned());
+ require_string(&s.to_owned());
+ require_string(&x_ref.to_string());
+
+ // `X` isn't copy.
+ require_slice(&x.to_owned());
+ require_deref_slice(x.to_owned());
+
+ // The following should be flagged by `redundant_clone`, but not by this lint.
+ require_c_str(&CString::from_vec_with_nul(vec![0]).unwrap());
+ require_os_str(&OsString::from("x"));
+ require_path(&std::path::PathBuf::from("x"));
+ require_str(&String::from("x"));
+ require_slice(&[String::from("x")]);
+}
+
+fn require_c_str(_: &CStr) {}
+fn require_os_str(_: &OsStr) {}
+fn require_path(_: &std::path::Path) {}
+fn require_str(_: &str) {}
+fn require_slice<T>(_: &[T]) {}
+fn require_x(_: &X) {}
+
+fn require_deref_c_str<T: Deref<Target = CStr>>(_: T) {}
+fn require_deref_os_str<T: Deref<Target = OsStr>>(_: T) {}
+fn require_deref_path<T: Deref<Target = std::path::Path>>(_: T) {}
+fn require_deref_str<T: Deref<Target = str>>(_: T) {}
+fn require_deref_slice<T, U: Deref<Target = [T]>>(_: U) {}
+
+fn require_impl_deref_c_str(_: impl Deref<Target = CStr>) {}
+fn require_impl_deref_os_str(_: impl Deref<Target = OsStr>) {}
+fn require_impl_deref_path(_: impl Deref<Target = std::path::Path>) {}
+fn require_impl_deref_str(_: impl Deref<Target = str>) {}
+fn require_impl_deref_slice<T>(_: impl Deref<Target = [T]>) {}
+
+fn require_deref_str_slice<T: Deref<Target = str>, U, V: Deref<Target = [U]>>(_: T, _: V) {}
+fn require_deref_slice_str<T, U: Deref<Target = [T]>, V: Deref<Target = str>>(_: U, _: V) {}
+
+fn require_as_ref_c_str<T: AsRef<CStr>>(_: T) {}
+fn require_as_ref_os_str<T: AsRef<OsStr>>(_: T) {}
+fn require_as_ref_path<T: AsRef<std::path::Path>>(_: T) {}
+fn require_as_ref_str<T: AsRef<str>>(_: T) {}
+fn require_as_ref_slice<T, U: AsRef<[T]>>(_: U) {}
+
+fn require_impl_as_ref_c_str(_: impl AsRef<CStr>) {}
+fn require_impl_as_ref_os_str(_: impl AsRef<OsStr>) {}
+fn require_impl_as_ref_path(_: impl AsRef<std::path::Path>) {}
+fn require_impl_as_ref_str(_: impl AsRef<str>) {}
+fn require_impl_as_ref_slice<T>(_: impl AsRef<[T]>) {}
+
+fn require_as_ref_str_slice<T: AsRef<str>, U, V: AsRef<[U]>>(_: T, _: V) {}
+fn require_as_ref_slice_str<T, U: AsRef<[T]>, V: AsRef<str>>(_: U, _: V) {}
+
+// `check_files` is based on:
+// https://github.com/breard-r/acmed/blob/1f0dcc32aadbc5e52de6d23b9703554c0f925113/acmed/src/storage.rs#L262
+fn check_files(file_types: &[FileType]) -> bool {
+ for t in file_types {
+ let path = match get_file_path(t) {
+ Ok(p) => p,
+ Err(_) => {
+ return false;
+ },
+ };
+ if !path.is_file() {
+ return false;
+ }
+ }
+ true
+}
+
+fn get_file_path(_file_type: &FileType) -> Result<std::path::PathBuf, std::io::Error> {
+ Ok(std::path::PathBuf::new())
+}
+
+fn require_string(_: &String) {}
+
+fn _msrv_1_35() {
+ #![clippy::msrv = "1.35"]
+ // `copied` was stabilized in 1.36, so clippy should use `cloned`.
+ let _ = &["x"][..].iter().cloned();
+}
+
+fn _msrv_1_36() {
+ #![clippy::msrv = "1.36"]
+ let _ = &["x"][..].iter().copied();
+}
+
+// https://github.com/rust-lang/rust-clippy/issues/8507
+mod issue_8507 {
+ #![allow(dead_code)]
+
+ struct Opaque<P>(P);
+
+ pub trait Abstracted {}
+
+ impl<P> Abstracted for Opaque<P> {}
+
+ fn build<P>(p: P) -> Opaque<P>
+ where
+ P: AsRef<str>,
+ {
+ Opaque(p)
+ }
+
+ // Should not lint.
+ fn test_str(s: &str) -> Box<dyn Abstracted> {
+ Box::new(build(s.to_string()))
+ }
+
+ // Should not lint.
+ fn test_x(x: super::X) -> Box<dyn Abstracted> {
+ Box::new(build(x))
+ }
+
+ #[derive(Clone, Copy)]
+ struct Y(&'static str);
+
+ impl AsRef<str> for Y {
+ fn as_ref(&self) -> &str {
+ self.0
+ }
+ }
+
+ impl ToString for Y {
+ fn to_string(&self) -> String {
+ self.0.to_string()
+ }
+ }
+
+ // Should lint because Y is copy.
+ fn test_y(y: Y) -> Box<dyn Abstracted> {
+ Box::new(build(y))
+ }
+}
+
+// https://github.com/rust-lang/rust-clippy/issues/8759
+mod issue_8759 {
+ #![allow(dead_code)]
+
+ #[derive(Default)]
+ struct View {}
+
+ impl std::borrow::ToOwned for View {
+ type Owned = View;
+ fn to_owned(&self) -> Self::Owned {
+ View {}
+ }
+ }
+
+ #[derive(Default)]
+ struct RenderWindow {
+ default_view: View,
+ }
+
+ impl RenderWindow {
+ fn default_view(&self) -> &View {
+ &self.default_view
+ }
+ fn set_view(&mut self, _view: &View) {}
+ }
+
+ fn main() {
+ let mut rw = RenderWindow::default();
+ rw.set_view(&rw.default_view().to_owned());
+ }
+}
+
+mod issue_8759_variant {
+ #![allow(dead_code)]
+
+ #[derive(Clone, Default)]
+ struct View {}
+
+ #[derive(Default)]
+ struct RenderWindow {
+ default_view: View,
+ }
+
+ impl RenderWindow {
+ fn default_view(&self) -> &View {
+ &self.default_view
+ }
+ fn set_view(&mut self, _view: &View) {}
+ }
+
+ fn main() {
+ let mut rw = RenderWindow::default();
+ rw.set_view(&rw.default_view().to_owned());
+ }
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_to_owned.rs b/src/tools/clippy/tests/ui/unnecessary_to_owned.rs
new file mode 100644
index 000000000..fe09a489a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_to_owned.rs
@@ -0,0 +1,331 @@
+// run-rustfix
+
+#![allow(clippy::ptr_arg)]
+#![warn(clippy::unnecessary_to_owned)]
+#![feature(custom_inner_attributes)]
+
+use std::borrow::Cow;
+use std::ffi::{CStr, CString, OsStr, OsString};
+use std::ops::Deref;
+
+#[derive(Clone)]
+struct X(String);
+
+impl Deref for X {
+ type Target = [u8];
+ fn deref(&self) -> &[u8] {
+ self.0.as_bytes()
+ }
+}
+
+impl AsRef<str> for X {
+ fn as_ref(&self) -> &str {
+ self.0.as_str()
+ }
+}
+
+impl ToString for X {
+ fn to_string(&self) -> String {
+ self.0.to_string()
+ }
+}
+
+impl X {
+ fn join(&self, other: impl AsRef<str>) -> Self {
+ let mut s = self.0.clone();
+ s.push_str(other.as_ref());
+ Self(s)
+ }
+}
+
+#[allow(dead_code)]
+#[derive(Clone)]
+enum FileType {
+ Account,
+ PrivateKey,
+ Certificate,
+}
+
+fn main() {
+ let c_str = CStr::from_bytes_with_nul(&[0]).unwrap();
+ let os_str = OsStr::new("x");
+ let path = std::path::Path::new("x");
+ let s = "x";
+ let array = ["x"];
+ let array_ref = &["x"];
+ let slice = &["x"][..];
+ let x = X(String::from("x"));
+ let x_ref = &x;
+
+ require_c_str(&Cow::from(c_str).into_owned());
+ require_c_str(&c_str.to_owned());
+
+ require_os_str(&os_str.to_os_string());
+ require_os_str(&Cow::from(os_str).into_owned());
+ require_os_str(&os_str.to_owned());
+
+ require_path(&path.to_path_buf());
+ require_path(&Cow::from(path).into_owned());
+ require_path(&path.to_owned());
+
+ require_str(&s.to_string());
+ require_str(&Cow::from(s).into_owned());
+ require_str(&s.to_owned());
+ require_str(&x_ref.to_string());
+
+ require_slice(&slice.to_vec());
+ require_slice(&Cow::from(slice).into_owned());
+ require_slice(&array.to_owned());
+ require_slice(&array_ref.to_owned());
+ require_slice(&slice.to_owned());
+ require_slice(&x_ref.to_owned()); // No longer flagged because of #8759.
+
+ require_x(&Cow::<X>::Owned(x.clone()).into_owned());
+ require_x(&x_ref.to_owned()); // No longer flagged because of #8759.
+
+ require_deref_c_str(c_str.to_owned());
+ require_deref_os_str(os_str.to_owned());
+ require_deref_path(path.to_owned());
+ require_deref_str(s.to_owned());
+ require_deref_slice(slice.to_owned());
+
+ require_impl_deref_c_str(c_str.to_owned());
+ require_impl_deref_os_str(os_str.to_owned());
+ require_impl_deref_path(path.to_owned());
+ require_impl_deref_str(s.to_owned());
+ require_impl_deref_slice(slice.to_owned());
+
+ require_deref_str_slice(s.to_owned(), slice.to_owned());
+ require_deref_slice_str(slice.to_owned(), s.to_owned());
+
+ require_as_ref_c_str(c_str.to_owned());
+ require_as_ref_os_str(os_str.to_owned());
+ require_as_ref_path(path.to_owned());
+ require_as_ref_str(s.to_owned());
+ require_as_ref_str(x.to_owned());
+ require_as_ref_slice(array.to_owned());
+ require_as_ref_slice(array_ref.to_owned());
+ require_as_ref_slice(slice.to_owned());
+
+ require_impl_as_ref_c_str(c_str.to_owned());
+ require_impl_as_ref_os_str(os_str.to_owned());
+ require_impl_as_ref_path(path.to_owned());
+ require_impl_as_ref_str(s.to_owned());
+ require_impl_as_ref_str(x.to_owned());
+ require_impl_as_ref_slice(array.to_owned());
+ require_impl_as_ref_slice(array_ref.to_owned());
+ require_impl_as_ref_slice(slice.to_owned());
+
+ require_as_ref_str_slice(s.to_owned(), array.to_owned());
+ require_as_ref_str_slice(s.to_owned(), array_ref.to_owned());
+ require_as_ref_str_slice(s.to_owned(), slice.to_owned());
+ require_as_ref_slice_str(array.to_owned(), s.to_owned());
+ require_as_ref_slice_str(array_ref.to_owned(), s.to_owned());
+ require_as_ref_slice_str(slice.to_owned(), s.to_owned());
+
+ let _ = x.join(&x_ref.to_string());
+
+ let _ = slice.to_vec().into_iter();
+ let _ = slice.to_owned().into_iter();
+ let _ = [std::path::PathBuf::new()][..].to_vec().into_iter();
+ let _ = [std::path::PathBuf::new()][..].to_owned().into_iter();
+
+ let _ = IntoIterator::into_iter(slice.to_vec());
+ let _ = IntoIterator::into_iter(slice.to_owned());
+ let _ = IntoIterator::into_iter([std::path::PathBuf::new()][..].to_vec());
+ let _ = IntoIterator::into_iter([std::path::PathBuf::new()][..].to_owned());
+
+ let _ = check_files(&[FileType::Account]);
+
+ // negative tests
+ require_string(&s.to_string());
+ require_string(&Cow::from(s).into_owned());
+ require_string(&s.to_owned());
+ require_string(&x_ref.to_string());
+
+ // `X` isn't copy.
+ require_slice(&x.to_owned());
+ require_deref_slice(x.to_owned());
+
+ // The following should be flagged by `redundant_clone`, but not by this lint.
+ require_c_str(&CString::from_vec_with_nul(vec![0]).unwrap().to_owned());
+ require_os_str(&OsString::from("x").to_os_string());
+ require_path(&std::path::PathBuf::from("x").to_path_buf());
+ require_str(&String::from("x").to_string());
+ require_slice(&[String::from("x")].to_owned());
+}
+
+fn require_c_str(_: &CStr) {}
+fn require_os_str(_: &OsStr) {}
+fn require_path(_: &std::path::Path) {}
+fn require_str(_: &str) {}
+fn require_slice<T>(_: &[T]) {}
+fn require_x(_: &X) {}
+
+fn require_deref_c_str<T: Deref<Target = CStr>>(_: T) {}
+fn require_deref_os_str<T: Deref<Target = OsStr>>(_: T) {}
+fn require_deref_path<T: Deref<Target = std::path::Path>>(_: T) {}
+fn require_deref_str<T: Deref<Target = str>>(_: T) {}
+fn require_deref_slice<T, U: Deref<Target = [T]>>(_: U) {}
+
+fn require_impl_deref_c_str(_: impl Deref<Target = CStr>) {}
+fn require_impl_deref_os_str(_: impl Deref<Target = OsStr>) {}
+fn require_impl_deref_path(_: impl Deref<Target = std::path::Path>) {}
+fn require_impl_deref_str(_: impl Deref<Target = str>) {}
+fn require_impl_deref_slice<T>(_: impl Deref<Target = [T]>) {}
+
+fn require_deref_str_slice<T: Deref<Target = str>, U, V: Deref<Target = [U]>>(_: T, _: V) {}
+fn require_deref_slice_str<T, U: Deref<Target = [T]>, V: Deref<Target = str>>(_: U, _: V) {}
+
+fn require_as_ref_c_str<T: AsRef<CStr>>(_: T) {}
+fn require_as_ref_os_str<T: AsRef<OsStr>>(_: T) {}
+fn require_as_ref_path<T: AsRef<std::path::Path>>(_: T) {}
+fn require_as_ref_str<T: AsRef<str>>(_: T) {}
+fn require_as_ref_slice<T, U: AsRef<[T]>>(_: U) {}
+
+fn require_impl_as_ref_c_str(_: impl AsRef<CStr>) {}
+fn require_impl_as_ref_os_str(_: impl AsRef<OsStr>) {}
+fn require_impl_as_ref_path(_: impl AsRef<std::path::Path>) {}
+fn require_impl_as_ref_str(_: impl AsRef<str>) {}
+fn require_impl_as_ref_slice<T>(_: impl AsRef<[T]>) {}
+
+fn require_as_ref_str_slice<T: AsRef<str>, U, V: AsRef<[U]>>(_: T, _: V) {}
+fn require_as_ref_slice_str<T, U: AsRef<[T]>, V: AsRef<str>>(_: U, _: V) {}
+
+// `check_files` is based on:
+// https://github.com/breard-r/acmed/blob/1f0dcc32aadbc5e52de6d23b9703554c0f925113/acmed/src/storage.rs#L262
+fn check_files(file_types: &[FileType]) -> bool {
+ for t in file_types.to_vec() {
+ let path = match get_file_path(&t) {
+ Ok(p) => p,
+ Err(_) => {
+ return false;
+ },
+ };
+ if !path.is_file() {
+ return false;
+ }
+ }
+ true
+}
+
+fn get_file_path(_file_type: &FileType) -> Result<std::path::PathBuf, std::io::Error> {
+ Ok(std::path::PathBuf::new())
+}
+
+fn require_string(_: &String) {}
+
+fn _msrv_1_35() {
+ #![clippy::msrv = "1.35"]
+ // `copied` was stabilized in 1.36, so clippy should use `cloned`.
+ let _ = &["x"][..].to_vec().into_iter();
+}
+
+fn _msrv_1_36() {
+ #![clippy::msrv = "1.36"]
+ let _ = &["x"][..].to_vec().into_iter();
+}
+
+// https://github.com/rust-lang/rust-clippy/issues/8507
+mod issue_8507 {
+ #![allow(dead_code)]
+
+ struct Opaque<P>(P);
+
+ pub trait Abstracted {}
+
+ impl<P> Abstracted for Opaque<P> {}
+
+ fn build<P>(p: P) -> Opaque<P>
+ where
+ P: AsRef<str>,
+ {
+ Opaque(p)
+ }
+
+ // Should not lint.
+ fn test_str(s: &str) -> Box<dyn Abstracted> {
+ Box::new(build(s.to_string()))
+ }
+
+ // Should not lint.
+ fn test_x(x: super::X) -> Box<dyn Abstracted> {
+ Box::new(build(x))
+ }
+
+ #[derive(Clone, Copy)]
+ struct Y(&'static str);
+
+ impl AsRef<str> for Y {
+ fn as_ref(&self) -> &str {
+ self.0
+ }
+ }
+
+ impl ToString for Y {
+ fn to_string(&self) -> String {
+ self.0.to_string()
+ }
+ }
+
+ // Should lint because Y is copy.
+ fn test_y(y: Y) -> Box<dyn Abstracted> {
+ Box::new(build(y.to_string()))
+ }
+}
+
+// https://github.com/rust-lang/rust-clippy/issues/8759
+mod issue_8759 {
+ #![allow(dead_code)]
+
+ #[derive(Default)]
+ struct View {}
+
+ impl std::borrow::ToOwned for View {
+ type Owned = View;
+ fn to_owned(&self) -> Self::Owned {
+ View {}
+ }
+ }
+
+ #[derive(Default)]
+ struct RenderWindow {
+ default_view: View,
+ }
+
+ impl RenderWindow {
+ fn default_view(&self) -> &View {
+ &self.default_view
+ }
+ fn set_view(&mut self, _view: &View) {}
+ }
+
+ fn main() {
+ let mut rw = RenderWindow::default();
+ rw.set_view(&rw.default_view().to_owned());
+ }
+}
+
+mod issue_8759_variant {
+ #![allow(dead_code)]
+
+ #[derive(Clone, Default)]
+ struct View {}
+
+ #[derive(Default)]
+ struct RenderWindow {
+ default_view: View,
+ }
+
+ impl RenderWindow {
+ fn default_view(&self) -> &View {
+ &self.default_view
+ }
+ fn set_view(&mut self, _view: &View) {}
+ }
+
+ fn main() {
+ let mut rw = RenderWindow::default();
+ rw.set_view(&rw.default_view().to_owned());
+ }
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_to_owned.stderr b/src/tools/clippy/tests/ui/unnecessary_to_owned.stderr
new file mode 100644
index 000000000..243b4599d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_to_owned.stderr
@@ -0,0 +1,513 @@
+error: redundant clone
+ --> $DIR/unnecessary_to_owned.rs:151:64
+ |
+LL | require_c_str(&CString::from_vec_with_nul(vec![0]).unwrap().to_owned());
+ | ^^^^^^^^^^^ help: remove this
+ |
+ = note: `-D clippy::redundant-clone` implied by `-D warnings`
+note: this value is dropped without further use
+ --> $DIR/unnecessary_to_owned.rs:151:20
+ |
+LL | require_c_str(&CString::from_vec_with_nul(vec![0]).unwrap().to_owned());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: redundant clone
+ --> $DIR/unnecessary_to_owned.rs:152:40
+ |
+LL | require_os_str(&OsString::from("x").to_os_string());
+ | ^^^^^^^^^^^^^^^ help: remove this
+ |
+note: this value is dropped without further use
+ --> $DIR/unnecessary_to_owned.rs:152:21
+ |
+LL | require_os_str(&OsString::from("x").to_os_string());
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: redundant clone
+ --> $DIR/unnecessary_to_owned.rs:153:48
+ |
+LL | require_path(&std::path::PathBuf::from("x").to_path_buf());
+ | ^^^^^^^^^^^^^^ help: remove this
+ |
+note: this value is dropped without further use
+ --> $DIR/unnecessary_to_owned.rs:153:19
+ |
+LL | require_path(&std::path::PathBuf::from("x").to_path_buf());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: redundant clone
+ --> $DIR/unnecessary_to_owned.rs:154:35
+ |
+LL | require_str(&String::from("x").to_string());
+ | ^^^^^^^^^^^^ help: remove this
+ |
+note: this value is dropped without further use
+ --> $DIR/unnecessary_to_owned.rs:154:18
+ |
+LL | require_str(&String::from("x").to_string());
+ | ^^^^^^^^^^^^^^^^^
+
+error: redundant clone
+ --> $DIR/unnecessary_to_owned.rs:155:39
+ |
+LL | require_slice(&[String::from("x")].to_owned());
+ | ^^^^^^^^^^^ help: remove this
+ |
+note: this value is dropped without further use
+ --> $DIR/unnecessary_to_owned.rs:155:20
+ |
+LL | require_slice(&[String::from("x")].to_owned());
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: unnecessary use of `into_owned`
+ --> $DIR/unnecessary_to_owned.rs:60:36
+ |
+LL | require_c_str(&Cow::from(c_str).into_owned());
+ | ^^^^^^^^^^^^^ help: remove this
+ |
+ = note: `-D clippy::unnecessary-to-owned` implied by `-D warnings`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:61:19
+ |
+LL | require_c_str(&c_str.to_owned());
+ | ^^^^^^^^^^^^^^^^^ help: use: `c_str`
+
+error: unnecessary use of `to_os_string`
+ --> $DIR/unnecessary_to_owned.rs:63:20
+ |
+LL | require_os_str(&os_str.to_os_string());
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: use: `os_str`
+
+error: unnecessary use of `into_owned`
+ --> $DIR/unnecessary_to_owned.rs:64:38
+ |
+LL | require_os_str(&Cow::from(os_str).into_owned());
+ | ^^^^^^^^^^^^^ help: remove this
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:65:20
+ |
+LL | require_os_str(&os_str.to_owned());
+ | ^^^^^^^^^^^^^^^^^^ help: use: `os_str`
+
+error: unnecessary use of `to_path_buf`
+ --> $DIR/unnecessary_to_owned.rs:67:18
+ |
+LL | require_path(&path.to_path_buf());
+ | ^^^^^^^^^^^^^^^^^^^ help: use: `path`
+
+error: unnecessary use of `into_owned`
+ --> $DIR/unnecessary_to_owned.rs:68:34
+ |
+LL | require_path(&Cow::from(path).into_owned());
+ | ^^^^^^^^^^^^^ help: remove this
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:69:18
+ |
+LL | require_path(&path.to_owned());
+ | ^^^^^^^^^^^^^^^^ help: use: `path`
+
+error: unnecessary use of `to_string`
+ --> $DIR/unnecessary_to_owned.rs:71:17
+ |
+LL | require_str(&s.to_string());
+ | ^^^^^^^^^^^^^^ help: use: `s`
+
+error: unnecessary use of `into_owned`
+ --> $DIR/unnecessary_to_owned.rs:72:30
+ |
+LL | require_str(&Cow::from(s).into_owned());
+ | ^^^^^^^^^^^^^ help: remove this
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:73:17
+ |
+LL | require_str(&s.to_owned());
+ | ^^^^^^^^^^^^^ help: use: `s`
+
+error: unnecessary use of `to_string`
+ --> $DIR/unnecessary_to_owned.rs:74:17
+ |
+LL | require_str(&x_ref.to_string());
+ | ^^^^^^^^^^^^^^^^^^ help: use: `x_ref.as_ref()`
+
+error: unnecessary use of `to_vec`
+ --> $DIR/unnecessary_to_owned.rs:76:19
+ |
+LL | require_slice(&slice.to_vec());
+ | ^^^^^^^^^^^^^^^ help: use: `slice`
+
+error: unnecessary use of `into_owned`
+ --> $DIR/unnecessary_to_owned.rs:77:36
+ |
+LL | require_slice(&Cow::from(slice).into_owned());
+ | ^^^^^^^^^^^^^ help: remove this
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:78:19
+ |
+LL | require_slice(&array.to_owned());
+ | ^^^^^^^^^^^^^^^^^ help: use: `array.as_ref()`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:79:19
+ |
+LL | require_slice(&array_ref.to_owned());
+ | ^^^^^^^^^^^^^^^^^^^^^ help: use: `array_ref.as_ref()`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:80:19
+ |
+LL | require_slice(&slice.to_owned());
+ | ^^^^^^^^^^^^^^^^^ help: use: `slice`
+
+error: unnecessary use of `into_owned`
+ --> $DIR/unnecessary_to_owned.rs:83:42
+ |
+LL | require_x(&Cow::<X>::Owned(x.clone()).into_owned());
+ | ^^^^^^^^^^^^^ help: remove this
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:86:25
+ |
+LL | require_deref_c_str(c_str.to_owned());
+ | ^^^^^^^^^^^^^^^^ help: use: `c_str`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:87:26
+ |
+LL | require_deref_os_str(os_str.to_owned());
+ | ^^^^^^^^^^^^^^^^^ help: use: `os_str`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:88:24
+ |
+LL | require_deref_path(path.to_owned());
+ | ^^^^^^^^^^^^^^^ help: use: `path`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:89:23
+ |
+LL | require_deref_str(s.to_owned());
+ | ^^^^^^^^^^^^ help: use: `s`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:90:25
+ |
+LL | require_deref_slice(slice.to_owned());
+ | ^^^^^^^^^^^^^^^^ help: use: `slice`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:92:30
+ |
+LL | require_impl_deref_c_str(c_str.to_owned());
+ | ^^^^^^^^^^^^^^^^ help: use: `c_str`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:93:31
+ |
+LL | require_impl_deref_os_str(os_str.to_owned());
+ | ^^^^^^^^^^^^^^^^^ help: use: `os_str`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:94:29
+ |
+LL | require_impl_deref_path(path.to_owned());
+ | ^^^^^^^^^^^^^^^ help: use: `path`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:95:28
+ |
+LL | require_impl_deref_str(s.to_owned());
+ | ^^^^^^^^^^^^ help: use: `s`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:96:30
+ |
+LL | require_impl_deref_slice(slice.to_owned());
+ | ^^^^^^^^^^^^^^^^ help: use: `slice`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:98:29
+ |
+LL | require_deref_str_slice(s.to_owned(), slice.to_owned());
+ | ^^^^^^^^^^^^ help: use: `s`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:98:43
+ |
+LL | require_deref_str_slice(s.to_owned(), slice.to_owned());
+ | ^^^^^^^^^^^^^^^^ help: use: `slice`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:99:29
+ |
+LL | require_deref_slice_str(slice.to_owned(), s.to_owned());
+ | ^^^^^^^^^^^^^^^^ help: use: `slice`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:99:47
+ |
+LL | require_deref_slice_str(slice.to_owned(), s.to_owned());
+ | ^^^^^^^^^^^^ help: use: `s`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:101:26
+ |
+LL | require_as_ref_c_str(c_str.to_owned());
+ | ^^^^^^^^^^^^^^^^ help: use: `c_str`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:102:27
+ |
+LL | require_as_ref_os_str(os_str.to_owned());
+ | ^^^^^^^^^^^^^^^^^ help: use: `os_str`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:103:25
+ |
+LL | require_as_ref_path(path.to_owned());
+ | ^^^^^^^^^^^^^^^ help: use: `path`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:104:24
+ |
+LL | require_as_ref_str(s.to_owned());
+ | ^^^^^^^^^^^^ help: use: `s`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:105:24
+ |
+LL | require_as_ref_str(x.to_owned());
+ | ^^^^^^^^^^^^ help: use: `&x`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:106:26
+ |
+LL | require_as_ref_slice(array.to_owned());
+ | ^^^^^^^^^^^^^^^^ help: use: `array`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:107:26
+ |
+LL | require_as_ref_slice(array_ref.to_owned());
+ | ^^^^^^^^^^^^^^^^^^^^ help: use: `array_ref`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:108:26
+ |
+LL | require_as_ref_slice(slice.to_owned());
+ | ^^^^^^^^^^^^^^^^ help: use: `slice`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:110:31
+ |
+LL | require_impl_as_ref_c_str(c_str.to_owned());
+ | ^^^^^^^^^^^^^^^^ help: use: `c_str`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:111:32
+ |
+LL | require_impl_as_ref_os_str(os_str.to_owned());
+ | ^^^^^^^^^^^^^^^^^ help: use: `os_str`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:112:30
+ |
+LL | require_impl_as_ref_path(path.to_owned());
+ | ^^^^^^^^^^^^^^^ help: use: `path`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:113:29
+ |
+LL | require_impl_as_ref_str(s.to_owned());
+ | ^^^^^^^^^^^^ help: use: `s`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:114:29
+ |
+LL | require_impl_as_ref_str(x.to_owned());
+ | ^^^^^^^^^^^^ help: use: `&x`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:115:31
+ |
+LL | require_impl_as_ref_slice(array.to_owned());
+ | ^^^^^^^^^^^^^^^^ help: use: `array`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:116:31
+ |
+LL | require_impl_as_ref_slice(array_ref.to_owned());
+ | ^^^^^^^^^^^^^^^^^^^^ help: use: `array_ref`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:117:31
+ |
+LL | require_impl_as_ref_slice(slice.to_owned());
+ | ^^^^^^^^^^^^^^^^ help: use: `slice`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:119:30
+ |
+LL | require_as_ref_str_slice(s.to_owned(), array.to_owned());
+ | ^^^^^^^^^^^^ help: use: `s`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:119:44
+ |
+LL | require_as_ref_str_slice(s.to_owned(), array.to_owned());
+ | ^^^^^^^^^^^^^^^^ help: use: `array`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:120:30
+ |
+LL | require_as_ref_str_slice(s.to_owned(), array_ref.to_owned());
+ | ^^^^^^^^^^^^ help: use: `s`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:120:44
+ |
+LL | require_as_ref_str_slice(s.to_owned(), array_ref.to_owned());
+ | ^^^^^^^^^^^^^^^^^^^^ help: use: `array_ref`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:121:30
+ |
+LL | require_as_ref_str_slice(s.to_owned(), slice.to_owned());
+ | ^^^^^^^^^^^^ help: use: `s`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:121:44
+ |
+LL | require_as_ref_str_slice(s.to_owned(), slice.to_owned());
+ | ^^^^^^^^^^^^^^^^ help: use: `slice`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:122:30
+ |
+LL | require_as_ref_slice_str(array.to_owned(), s.to_owned());
+ | ^^^^^^^^^^^^^^^^ help: use: `array`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:122:48
+ |
+LL | require_as_ref_slice_str(array.to_owned(), s.to_owned());
+ | ^^^^^^^^^^^^ help: use: `s`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:123:30
+ |
+LL | require_as_ref_slice_str(array_ref.to_owned(), s.to_owned());
+ | ^^^^^^^^^^^^^^^^^^^^ help: use: `array_ref`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:123:52
+ |
+LL | require_as_ref_slice_str(array_ref.to_owned(), s.to_owned());
+ | ^^^^^^^^^^^^ help: use: `s`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:124:30
+ |
+LL | require_as_ref_slice_str(slice.to_owned(), s.to_owned());
+ | ^^^^^^^^^^^^^^^^ help: use: `slice`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:124:48
+ |
+LL | require_as_ref_slice_str(slice.to_owned(), s.to_owned());
+ | ^^^^^^^^^^^^ help: use: `s`
+
+error: unnecessary use of `to_string`
+ --> $DIR/unnecessary_to_owned.rs:126:20
+ |
+LL | let _ = x.join(&x_ref.to_string());
+ | ^^^^^^^^^^^^^^^^^^ help: use: `x_ref`
+
+error: unnecessary use of `to_vec`
+ --> $DIR/unnecessary_to_owned.rs:128:13
+ |
+LL | let _ = slice.to_vec().into_iter();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `slice.iter().copied()`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:129:13
+ |
+LL | let _ = slice.to_owned().into_iter();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `slice.iter().copied()`
+
+error: unnecessary use of `to_vec`
+ --> $DIR/unnecessary_to_owned.rs:130:13
+ |
+LL | let _ = [std::path::PathBuf::new()][..].to_vec().into_iter();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `[std::path::PathBuf::new()][..].iter().cloned()`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:131:13
+ |
+LL | let _ = [std::path::PathBuf::new()][..].to_owned().into_iter();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `[std::path::PathBuf::new()][..].iter().cloned()`
+
+error: unnecessary use of `to_vec`
+ --> $DIR/unnecessary_to_owned.rs:133:13
+ |
+LL | let _ = IntoIterator::into_iter(slice.to_vec());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `slice.iter().copied()`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:134:13
+ |
+LL | let _ = IntoIterator::into_iter(slice.to_owned());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `slice.iter().copied()`
+
+error: unnecessary use of `to_vec`
+ --> $DIR/unnecessary_to_owned.rs:135:13
+ |
+LL | let _ = IntoIterator::into_iter([std::path::PathBuf::new()][..].to_vec());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `[std::path::PathBuf::new()][..].iter().cloned()`
+
+error: unnecessary use of `to_owned`
+ --> $DIR/unnecessary_to_owned.rs:136:13
+ |
+LL | let _ = IntoIterator::into_iter([std::path::PathBuf::new()][..].to_owned());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `[std::path::PathBuf::new()][..].iter().cloned()`
+
+error: unnecessary use of `to_vec`
+ --> $DIR/unnecessary_to_owned.rs:198:14
+ |
+LL | for t in file_types.to_vec() {
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+help: use
+ |
+LL | for t in file_types {
+ | ~~~~~~~~~~
+help: remove this `&`
+ |
+LL - let path = match get_file_path(&t) {
+LL + let path = match get_file_path(t) {
+ |
+
+error: unnecessary use of `to_vec`
+ --> $DIR/unnecessary_to_owned.rs:221:14
+ |
+LL | let _ = &["x"][..].to_vec().into_iter();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `["x"][..].iter().cloned()`
+
+error: unnecessary use of `to_vec`
+ --> $DIR/unnecessary_to_owned.rs:226:14
+ |
+LL | let _ = &["x"][..].to_vec().into_iter();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `["x"][..].iter().copied()`
+
+error: unnecessary use of `to_string`
+ --> $DIR/unnecessary_to_owned.rs:273:24
+ |
+LL | Box::new(build(y.to_string()))
+ | ^^^^^^^^^^^^^ help: use: `y`
+
+error: aborting due to 78 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnecessary_wraps.rs b/src/tools/clippy/tests/ui/unnecessary_wraps.rs
new file mode 100644
index 000000000..63648ef58
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_wraps.rs
@@ -0,0 +1,144 @@
+#![warn(clippy::unnecessary_wraps)]
+#![allow(clippy::no_effect)]
+#![allow(clippy::needless_return)]
+#![allow(clippy::if_same_then_else)]
+#![allow(dead_code)]
+
+// should be linted
+fn func1(a: bool, b: bool) -> Option<i32> {
+ if a && b {
+ return Some(42);
+ }
+ if a {
+ Some(-1);
+ Some(2)
+ } else {
+ return Some(1337);
+ }
+}
+
+// should be linted
+fn func2(a: bool, b: bool) -> Option<i32> {
+ if a && b {
+ return Some(10);
+ }
+ if a { Some(20) } else { Some(30) }
+}
+
+// public fns should not be linted
+pub fn func3(a: bool) -> Option<i32> {
+ if a { Some(1) } else { Some(1) }
+}
+
+// should not be linted
+fn func4(a: bool) -> Option<i32> {
+ if a { Some(1) } else { None }
+}
+
+// should be linted
+fn func5() -> Option<i32> {
+ Some(1)
+}
+
+// should not be linted
+fn func6() -> Option<i32> {
+ None
+}
+
+// should be linted
+fn func7() -> Result<i32, ()> {
+ Ok(1)
+}
+
+// should not be linted
+fn func8(a: bool) -> Result<i32, ()> {
+ if a { Ok(1) } else { Err(()) }
+}
+
+// should not be linted
+fn func9(a: bool) -> Result<i32, ()> {
+ Err(())
+}
+
+// should not be linted
+fn func10() -> Option<()> {
+ unimplemented!()
+}
+
+pub struct A;
+
+impl A {
+ // should not be linted
+ pub fn func11() -> Option<i32> {
+ Some(1)
+ }
+
+ // should be linted
+ fn func12() -> Option<i32> {
+ Some(1)
+ }
+}
+
+trait B {
+ // trait impls are not linted
+ fn func13() -> Option<i32> {
+ Some(1)
+ }
+}
+
+impl B for A {
+ // trait impls are not linted
+ fn func13() -> Option<i32> {
+ Some(0)
+ }
+}
+
+fn issue_6384(s: &str) -> Option<&str> {
+ Some(match s {
+ "a" => "A",
+ _ => return None,
+ })
+}
+
+// should be linted
+fn issue_6640_1(a: bool, b: bool) -> Option<()> {
+ if a && b {
+ return Some(());
+ }
+ if a {
+ Some(());
+ Some(())
+ } else {
+ return Some(());
+ }
+}
+
+// should be linted
+fn issue_6640_2(a: bool, b: bool) -> Result<(), i32> {
+ if a && b {
+ return Ok(());
+ }
+ if a {
+ Ok(())
+ } else {
+ return Ok(());
+ }
+}
+
+// should not be linted
+fn issue_6640_3() -> Option<()> {
+ if true { Some(()) } else { None }
+}
+
+// should not be linted
+fn issue_6640_4() -> Result<(), ()> {
+ if true { Ok(()) } else { Err(()) }
+}
+
+fn main() {
+ // method calls are not linted
+ func1(true, true);
+ func2(true, true);
+ issue_6640_1(true, true);
+ issue_6640_2(true, true);
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_wraps.stderr b/src/tools/clippy/tests/ui/unnecessary_wraps.stderr
new file mode 100644
index 000000000..a6a0b22cf
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_wraps.stderr
@@ -0,0 +1,156 @@
+error: this function's return value is unnecessarily wrapped by `Option`
+ --> $DIR/unnecessary_wraps.rs:8:1
+ |
+LL | / fn func1(a: bool, b: bool) -> Option<i32> {
+LL | | if a && b {
+LL | | return Some(42);
+LL | | }
+... |
+LL | | }
+LL | | }
+ | |_^
+ |
+ = note: `-D clippy::unnecessary-wraps` implied by `-D warnings`
+help: remove `Option` from the return type...
+ |
+LL | fn func1(a: bool, b: bool) -> i32 {
+ | ~~~
+help: ...and then change returning expressions
+ |
+LL ~ return 42;
+LL | }
+LL | if a {
+LL | Some(-1);
+LL ~ 2
+LL | } else {
+LL ~ return 1337;
+ |
+
+error: this function's return value is unnecessarily wrapped by `Option`
+ --> $DIR/unnecessary_wraps.rs:21:1
+ |
+LL | / fn func2(a: bool, b: bool) -> Option<i32> {
+LL | | if a && b {
+LL | | return Some(10);
+LL | | }
+LL | | if a { Some(20) } else { Some(30) }
+LL | | }
+ | |_^
+ |
+help: remove `Option` from the return type...
+ |
+LL | fn func2(a: bool, b: bool) -> i32 {
+ | ~~~
+help: ...and then change returning expressions
+ |
+LL ~ return 10;
+LL | }
+LL ~ if a { 20 } else { 30 }
+ |
+
+error: this function's return value is unnecessarily wrapped by `Option`
+ --> $DIR/unnecessary_wraps.rs:39:1
+ |
+LL | / fn func5() -> Option<i32> {
+LL | | Some(1)
+LL | | }
+ | |_^
+ |
+help: remove `Option` from the return type...
+ |
+LL | fn func5() -> i32 {
+ | ~~~
+help: ...and then change returning expressions
+ |
+LL | 1
+ |
+
+error: this function's return value is unnecessarily wrapped by `Result`
+ --> $DIR/unnecessary_wraps.rs:49:1
+ |
+LL | / fn func7() -> Result<i32, ()> {
+LL | | Ok(1)
+LL | | }
+ | |_^
+ |
+help: remove `Result` from the return type...
+ |
+LL | fn func7() -> i32 {
+ | ~~~
+help: ...and then change returning expressions
+ |
+LL | 1
+ |
+
+error: this function's return value is unnecessarily wrapped by `Option`
+ --> $DIR/unnecessary_wraps.rs:77:5
+ |
+LL | / fn func12() -> Option<i32> {
+LL | | Some(1)
+LL | | }
+ | |_____^
+ |
+help: remove `Option` from the return type...
+ |
+LL | fn func12() -> i32 {
+ | ~~~
+help: ...and then change returning expressions
+ |
+LL | 1
+ |
+
+error: this function's return value is unnecessary
+ --> $DIR/unnecessary_wraps.rs:104:1
+ |
+LL | / fn issue_6640_1(a: bool, b: bool) -> Option<()> {
+LL | | if a && b {
+LL | | return Some(());
+LL | | }
+... |
+LL | | }
+LL | | }
+ | |_^
+ |
+help: remove the return type...
+ |
+LL | fn issue_6640_1(a: bool, b: bool) -> Option<()> {
+ | ~~~~~~~~~~
+help: ...and then remove returned values
+ |
+LL ~ return ;
+LL | }
+LL | if a {
+LL | Some(());
+LL ~
+LL | } else {
+LL ~ return ;
+ |
+
+error: this function's return value is unnecessary
+ --> $DIR/unnecessary_wraps.rs:117:1
+ |
+LL | / fn issue_6640_2(a: bool, b: bool) -> Result<(), i32> {
+LL | | if a && b {
+LL | | return Ok(());
+LL | | }
+... |
+LL | | }
+LL | | }
+ | |_^
+ |
+help: remove the return type...
+ |
+LL | fn issue_6640_2(a: bool, b: bool) -> Result<(), i32> {
+ | ~~~~~~~~~~~~~~~
+help: ...and then remove returned values
+ |
+LL ~ return ;
+LL | }
+LL | if a {
+LL ~
+LL | } else {
+LL ~ return ;
+ |
+
+error: aborting due to 7 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unneeded_field_pattern.rs b/src/tools/clippy/tests/ui/unneeded_field_pattern.rs
new file mode 100644
index 000000000..fa639aa70
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unneeded_field_pattern.rs
@@ -0,0 +1,22 @@
+#![warn(clippy::unneeded_field_pattern)]
+#[allow(dead_code, unused)]
+
+struct Foo {
+ a: i32,
+ b: i32,
+ c: i32,
+}
+
+fn main() {
+ let f = Foo { a: 0, b: 0, c: 0 };
+
+ match f {
+ Foo { a: _, b: 0, .. } => {},
+
+ Foo { a: _, b: _, c: _ } => {},
+ }
+ match f {
+ Foo { b: 0, .. } => {}, // should be OK
+ Foo { .. } => {}, // and the Force might be with this one
+ }
+}
diff --git a/src/tools/clippy/tests/ui/unneeded_field_pattern.stderr b/src/tools/clippy/tests/ui/unneeded_field_pattern.stderr
new file mode 100644
index 000000000..b8d3c2945
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unneeded_field_pattern.stderr
@@ -0,0 +1,19 @@
+error: you matched a field with a wildcard pattern, consider using `..` instead
+ --> $DIR/unneeded_field_pattern.rs:14:15
+ |
+LL | Foo { a: _, b: 0, .. } => {},
+ | ^^^^
+ |
+ = note: `-D clippy::unneeded-field-pattern` implied by `-D warnings`
+ = help: try with `Foo { b: 0, .. }`
+
+error: all the struct fields are matched to a wildcard pattern, consider using `..`
+ --> $DIR/unneeded_field_pattern.rs:16:9
+ |
+LL | Foo { a: _, b: _, c: _ } => {},
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: try with `Foo { .. }` instead
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unneeded_wildcard_pattern.fixed b/src/tools/clippy/tests/ui/unneeded_wildcard_pattern.fixed
new file mode 100644
index 000000000..12c3461c9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unneeded_wildcard_pattern.fixed
@@ -0,0 +1,45 @@
+// run-rustfix
+#![feature(stmt_expr_attributes)]
+#![deny(clippy::unneeded_wildcard_pattern)]
+
+fn main() {
+ let t = (0, 1, 2, 3);
+
+ if let (0, ..) = t {};
+ if let (0, ..) = t {};
+ if let (.., 0) = t {};
+ if let (.., 0) = t {};
+ if let (0, ..) = t {};
+ if let (0, ..) = t {};
+ if let (_, 0, ..) = t {};
+ if let (.., 0, _) = t {};
+ if let (0, _, _, _) = t {};
+ if let (0, ..) = t {};
+ if let (.., 0) = t {};
+
+ #[rustfmt::skip]
+ {
+ if let (0, ..,) = t {};
+ }
+
+ struct S(usize, usize, usize, usize);
+
+ let s = S(0, 1, 2, 3);
+
+ if let S(0, ..) = s {};
+ if let S(0, ..) = s {};
+ if let S(.., 0) = s {};
+ if let S(.., 0) = s {};
+ if let S(0, ..) = s {};
+ if let S(0, ..) = s {};
+ if let S(_, 0, ..) = s {};
+ if let S(.., 0, _) = s {};
+ if let S(0, _, _, _) = s {};
+ if let S(0, ..) = s {};
+ if let S(.., 0) = s {};
+
+ #[rustfmt::skip]
+ {
+ if let S(0, ..,) = s {};
+ }
+}
diff --git a/src/tools/clippy/tests/ui/unneeded_wildcard_pattern.rs b/src/tools/clippy/tests/ui/unneeded_wildcard_pattern.rs
new file mode 100644
index 000000000..4ac01d5d2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unneeded_wildcard_pattern.rs
@@ -0,0 +1,45 @@
+// run-rustfix
+#![feature(stmt_expr_attributes)]
+#![deny(clippy::unneeded_wildcard_pattern)]
+
+fn main() {
+ let t = (0, 1, 2, 3);
+
+ if let (0, .., _) = t {};
+ if let (0, _, ..) = t {};
+ if let (_, .., 0) = t {};
+ if let (.., _, 0) = t {};
+ if let (0, _, _, ..) = t {};
+ if let (0, .., _, _) = t {};
+ if let (_, 0, ..) = t {};
+ if let (.., 0, _) = t {};
+ if let (0, _, _, _) = t {};
+ if let (0, ..) = t {};
+ if let (.., 0) = t {};
+
+ #[rustfmt::skip]
+ {
+ if let (0, .., _, _,) = t {};
+ }
+
+ struct S(usize, usize, usize, usize);
+
+ let s = S(0, 1, 2, 3);
+
+ if let S(0, .., _) = s {};
+ if let S(0, _, ..) = s {};
+ if let S(_, .., 0) = s {};
+ if let S(.., _, 0) = s {};
+ if let S(0, _, _, ..) = s {};
+ if let S(0, .., _, _) = s {};
+ if let S(_, 0, ..) = s {};
+ if let S(.., 0, _) = s {};
+ if let S(0, _, _, _) = s {};
+ if let S(0, ..) = s {};
+ if let S(.., 0) = s {};
+
+ #[rustfmt::skip]
+ {
+ if let S(0, .., _, _,) = s {};
+ }
+}
diff --git a/src/tools/clippy/tests/ui/unneeded_wildcard_pattern.stderr b/src/tools/clippy/tests/ui/unneeded_wildcard_pattern.stderr
new file mode 100644
index 000000000..716d9ecff
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unneeded_wildcard_pattern.stderr
@@ -0,0 +1,92 @@
+error: this pattern is unneeded as the `..` pattern can match that element
+ --> $DIR/unneeded_wildcard_pattern.rs:8:18
+ |
+LL | if let (0, .., _) = t {};
+ | ^^^ help: remove it
+ |
+note: the lint level is defined here
+ --> $DIR/unneeded_wildcard_pattern.rs:3:9
+ |
+LL | #![deny(clippy::unneeded_wildcard_pattern)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: this pattern is unneeded as the `..` pattern can match that element
+ --> $DIR/unneeded_wildcard_pattern.rs:9:16
+ |
+LL | if let (0, _, ..) = t {};
+ | ^^^ help: remove it
+
+error: this pattern is unneeded as the `..` pattern can match that element
+ --> $DIR/unneeded_wildcard_pattern.rs:10:13
+ |
+LL | if let (_, .., 0) = t {};
+ | ^^^ help: remove it
+
+error: this pattern is unneeded as the `..` pattern can match that element
+ --> $DIR/unneeded_wildcard_pattern.rs:11:15
+ |
+LL | if let (.., _, 0) = t {};
+ | ^^^ help: remove it
+
+error: these patterns are unneeded as the `..` pattern can match those elements
+ --> $DIR/unneeded_wildcard_pattern.rs:12:16
+ |
+LL | if let (0, _, _, ..) = t {};
+ | ^^^^^^ help: remove them
+
+error: these patterns are unneeded as the `..` pattern can match those elements
+ --> $DIR/unneeded_wildcard_pattern.rs:13:18
+ |
+LL | if let (0, .., _, _) = t {};
+ | ^^^^^^ help: remove them
+
+error: these patterns are unneeded as the `..` pattern can match those elements
+ --> $DIR/unneeded_wildcard_pattern.rs:22:22
+ |
+LL | if let (0, .., _, _,) = t {};
+ | ^^^^^^ help: remove them
+
+error: this pattern is unneeded as the `..` pattern can match that element
+ --> $DIR/unneeded_wildcard_pattern.rs:29:19
+ |
+LL | if let S(0, .., _) = s {};
+ | ^^^ help: remove it
+
+error: this pattern is unneeded as the `..` pattern can match that element
+ --> $DIR/unneeded_wildcard_pattern.rs:30:17
+ |
+LL | if let S(0, _, ..) = s {};
+ | ^^^ help: remove it
+
+error: this pattern is unneeded as the `..` pattern can match that element
+ --> $DIR/unneeded_wildcard_pattern.rs:31:14
+ |
+LL | if let S(_, .., 0) = s {};
+ | ^^^ help: remove it
+
+error: this pattern is unneeded as the `..` pattern can match that element
+ --> $DIR/unneeded_wildcard_pattern.rs:32:16
+ |
+LL | if let S(.., _, 0) = s {};
+ | ^^^ help: remove it
+
+error: these patterns are unneeded as the `..` pattern can match those elements
+ --> $DIR/unneeded_wildcard_pattern.rs:33:17
+ |
+LL | if let S(0, _, _, ..) = s {};
+ | ^^^^^^ help: remove them
+
+error: these patterns are unneeded as the `..` pattern can match those elements
+ --> $DIR/unneeded_wildcard_pattern.rs:34:19
+ |
+LL | if let S(0, .., _, _) = s {};
+ | ^^^^^^ help: remove them
+
+error: these patterns are unneeded as the `..` pattern can match those elements
+ --> $DIR/unneeded_wildcard_pattern.rs:43:23
+ |
+LL | if let S(0, .., _, _,) = s {};
+ | ^^^^^^ help: remove them
+
+error: aborting due to 14 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnested_or_patterns.fixed b/src/tools/clippy/tests/ui/unnested_or_patterns.fixed
new file mode 100644
index 000000000..c223b5bc7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnested_or_patterns.fixed
@@ -0,0 +1,35 @@
+// run-rustfix
+
+#![feature(box_patterns)]
+#![warn(clippy::unnested_or_patterns)]
+#![allow(clippy::cognitive_complexity, clippy::match_ref_pats, clippy::upper_case_acronyms)]
+#![allow(unreachable_patterns, irrefutable_let_patterns, unused_variables)]
+
+fn main() {
+ // Should be ignored by this lint, as nesting requires more characters.
+ if let &0 | &2 = &0 {}
+
+ if let box (0 | 2) = Box::new(0) {}
+ if let box (0 | 1 | 2 | 3 | 4) = Box::new(0) {}
+ const C0: Option<u8> = Some(1);
+ if let Some(1 | 2) | C0 = None {}
+ if let &mut (0 | 2) = &mut 0 {}
+ if let x @ (0 | 2) = 0 {}
+ if let (0, 1 | 2 | 3) = (0, 0) {}
+ if let (1 | 2 | 3, 0) = (0, 0) {}
+ if let (x, ..) | (x, 1 | 2) = (0, 1) {}
+ if let [0 | 1] = [0] {}
+ if let [x, 0 | 1] = [0, 1] {}
+ if let [x, 0 | 1 | 2] = [0, 1] {}
+ if let [x, ..] | [x, 1 | 2] = [0, 1] {}
+ struct TS(u8, u8);
+ if let TS(0 | 1, x) = TS(0, 0) {}
+ if let TS(1 | 2 | 3, 0) = TS(0, 0) {}
+ if let TS(x, ..) | TS(x, 1 | 2) = TS(0, 0) {}
+ struct S {
+ x: u8,
+ y: u8,
+ }
+ if let S { x: 0 | 1, y } = (S { x: 0, y: 1 }) {}
+ if let S { x: 0, y, .. } | S { y, x: 1 } = (S { x: 0, y: 1 }) {}
+}
diff --git a/src/tools/clippy/tests/ui/unnested_or_patterns.rs b/src/tools/clippy/tests/ui/unnested_or_patterns.rs
new file mode 100644
index 000000000..04cd11036
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnested_or_patterns.rs
@@ -0,0 +1,35 @@
+// run-rustfix
+
+#![feature(box_patterns)]
+#![warn(clippy::unnested_or_patterns)]
+#![allow(clippy::cognitive_complexity, clippy::match_ref_pats, clippy::upper_case_acronyms)]
+#![allow(unreachable_patterns, irrefutable_let_patterns, unused_variables)]
+
+fn main() {
+ // Should be ignored by this lint, as nesting requires more characters.
+ if let &0 | &2 = &0 {}
+
+ if let box 0 | box 2 = Box::new(0) {}
+ if let box ((0 | 1)) | box (2 | 3) | box 4 = Box::new(0) {}
+ const C0: Option<u8> = Some(1);
+ if let Some(1) | C0 | Some(2) = None {}
+ if let &mut 0 | &mut 2 = &mut 0 {}
+ if let x @ 0 | x @ 2 = 0 {}
+ if let (0, 1) | (0, 2) | (0, 3) = (0, 0) {}
+ if let (1, 0) | (2, 0) | (3, 0) = (0, 0) {}
+ if let (x, ..) | (x, 1) | (x, 2) = (0, 1) {}
+ if let [0] | [1] = [0] {}
+ if let [x, 0] | [x, 1] = [0, 1] {}
+ if let [x, 0] | [x, 1] | [x, 2] = [0, 1] {}
+ if let [x, ..] | [x, 1] | [x, 2] = [0, 1] {}
+ struct TS(u8, u8);
+ if let TS(0, x) | TS(1, x) = TS(0, 0) {}
+ if let TS(1, 0) | TS(2, 0) | TS(3, 0) = TS(0, 0) {}
+ if let TS(x, ..) | TS(x, 1) | TS(x, 2) = TS(0, 0) {}
+ struct S {
+ x: u8,
+ y: u8,
+ }
+ if let S { x: 0, y } | S { y, x: 1 } = (S { x: 0, y: 1 }) {}
+ if let S { x: 0, y, .. } | S { y, x: 1 } = (S { x: 0, y: 1 }) {}
+}
diff --git a/src/tools/clippy/tests/ui/unnested_or_patterns.stderr b/src/tools/clippy/tests/ui/unnested_or_patterns.stderr
new file mode 100644
index 000000000..453c66cbb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnested_or_patterns.stderr
@@ -0,0 +1,179 @@
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns.rs:12:12
+ |
+LL | if let box 0 | box 2 = Box::new(0) {}
+ | ^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::unnested-or-patterns` implied by `-D warnings`
+help: nest the patterns
+ |
+LL | if let box (0 | 2) = Box::new(0) {}
+ | ~~~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns.rs:13:12
+ |
+LL | if let box ((0 | 1)) | box (2 | 3) | box 4 = Box::new(0) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let box (0 | 1 | 2 | 3 | 4) = Box::new(0) {}
+ | ~~~~~~~~~~~~~~~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns.rs:15:12
+ |
+LL | if let Some(1) | C0 | Some(2) = None {}
+ | ^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let Some(1 | 2) | C0 = None {}
+ | ~~~~~~~~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns.rs:16:12
+ |
+LL | if let &mut 0 | &mut 2 = &mut 0 {}
+ | ^^^^^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let &mut (0 | 2) = &mut 0 {}
+ | ~~~~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns.rs:17:12
+ |
+LL | if let x @ 0 | x @ 2 = 0 {}
+ | ^^^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let x @ (0 | 2) = 0 {}
+ | ~~~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns.rs:18:12
+ |
+LL | if let (0, 1) | (0, 2) | (0, 3) = (0, 0) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let (0, 1 | 2 | 3) = (0, 0) {}
+ | ~~~~~~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns.rs:19:12
+ |
+LL | if let (1, 0) | (2, 0) | (3, 0) = (0, 0) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let (1 | 2 | 3, 0) = (0, 0) {}
+ | ~~~~~~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns.rs:20:12
+ |
+LL | if let (x, ..) | (x, 1) | (x, 2) = (0, 1) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let (x, ..) | (x, 1 | 2) = (0, 1) {}
+ | ~~~~~~~~~~~~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns.rs:21:12
+ |
+LL | if let [0] | [1] = [0] {}
+ | ^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let [0 | 1] = [0] {}
+ | ~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns.rs:22:12
+ |
+LL | if let [x, 0] | [x, 1] = [0, 1] {}
+ | ^^^^^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let [x, 0 | 1] = [0, 1] {}
+ | ~~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns.rs:23:12
+ |
+LL | if let [x, 0] | [x, 1] | [x, 2] = [0, 1] {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let [x, 0 | 1 | 2] = [0, 1] {}
+ | ~~~~~~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns.rs:24:12
+ |
+LL | if let [x, ..] | [x, 1] | [x, 2] = [0, 1] {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let [x, ..] | [x, 1 | 2] = [0, 1] {}
+ | ~~~~~~~~~~~~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns.rs:26:12
+ |
+LL | if let TS(0, x) | TS(1, x) = TS(0, 0) {}
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let TS(0 | 1, x) = TS(0, 0) {}
+ | ~~~~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns.rs:27:12
+ |
+LL | if let TS(1, 0) | TS(2, 0) | TS(3, 0) = TS(0, 0) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let TS(1 | 2 | 3, 0) = TS(0, 0) {}
+ | ~~~~~~~~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns.rs:28:12
+ |
+LL | if let TS(x, ..) | TS(x, 1) | TS(x, 2) = TS(0, 0) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let TS(x, ..) | TS(x, 1 | 2) = TS(0, 0) {}
+ | ~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns.rs:33:12
+ |
+LL | if let S { x: 0, y } | S { y, x: 1 } = (S { x: 0, y: 1 }) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let S { x: 0 | 1, y } = (S { x: 0, y: 1 }) {}
+ | ~~~~~~~~~~~~~~~~~
+
+error: aborting due to 16 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnested_or_patterns2.fixed b/src/tools/clippy/tests/ui/unnested_or_patterns2.fixed
new file mode 100644
index 000000000..d3539d798
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnested_or_patterns2.fixed
@@ -0,0 +1,17 @@
+// run-rustfix
+
+#![feature(box_patterns)]
+#![warn(clippy::unnested_or_patterns)]
+#![allow(clippy::cognitive_complexity, clippy::match_ref_pats)]
+#![allow(unreachable_patterns, irrefutable_let_patterns, unused_variables)]
+
+fn main() {
+ if let Some(Some(0 | 1)) = None {}
+ if let Some(Some(0 | 1 | 2)) = None {}
+ if let Some(Some(0 | 1 | 2 | 3 | 4)) = None {}
+ if let Some(Some(0 | 1 | 2)) = None {}
+ if let ((0 | 1 | 2,),) = ((0,),) {}
+ if let 0 | 1 | 2 = 0 {}
+ if let box (0 | 1 | 2 | 3 | 4) = Box::new(0) {}
+ if let box box (0 | 2 | 4) = Box::new(Box::new(0)) {}
+}
diff --git a/src/tools/clippy/tests/ui/unnested_or_patterns2.rs b/src/tools/clippy/tests/ui/unnested_or_patterns2.rs
new file mode 100644
index 000000000..9cea5cdea
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnested_or_patterns2.rs
@@ -0,0 +1,17 @@
+// run-rustfix
+
+#![feature(box_patterns)]
+#![warn(clippy::unnested_or_patterns)]
+#![allow(clippy::cognitive_complexity, clippy::match_ref_pats)]
+#![allow(unreachable_patterns, irrefutable_let_patterns, unused_variables)]
+
+fn main() {
+ if let Some(Some(0)) | Some(Some(1)) = None {}
+ if let Some(Some(0)) | Some(Some(1) | Some(2)) = None {}
+ if let Some(Some(0 | 1) | Some(2)) | Some(Some(3) | Some(4)) = None {}
+ if let Some(Some(0) | Some(1 | 2)) = None {}
+ if let ((0,),) | ((1,) | (2,),) = ((0,),) {}
+ if let 0 | (1 | 2) = 0 {}
+ if let box (0 | 1) | (box 2 | box (3 | 4)) = Box::new(0) {}
+ if let box box 0 | box (box 2 | box 4) = Box::new(Box::new(0)) {}
+}
diff --git a/src/tools/clippy/tests/ui/unnested_or_patterns2.stderr b/src/tools/clippy/tests/ui/unnested_or_patterns2.stderr
new file mode 100644
index 000000000..41e8d3fc7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnested_or_patterns2.stderr
@@ -0,0 +1,91 @@
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns2.rs:9:12
+ |
+LL | if let Some(Some(0)) | Some(Some(1)) = None {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::unnested-or-patterns` implied by `-D warnings`
+help: nest the patterns
+ |
+LL | if let Some(Some(0 | 1)) = None {}
+ | ~~~~~~~~~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns2.rs:10:12
+ |
+LL | if let Some(Some(0)) | Some(Some(1) | Some(2)) = None {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let Some(Some(0 | 1 | 2)) = None {}
+ | ~~~~~~~~~~~~~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns2.rs:11:12
+ |
+LL | if let Some(Some(0 | 1) | Some(2)) | Some(Some(3) | Some(4)) = None {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let Some(Some(0 | 1 | 2 | 3 | 4)) = None {}
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns2.rs:12:12
+ |
+LL | if let Some(Some(0) | Some(1 | 2)) = None {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let Some(Some(0 | 1 | 2)) = None {}
+ | ~~~~~~~~~~~~~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns2.rs:13:12
+ |
+LL | if let ((0,),) | ((1,) | (2,),) = ((0,),) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let ((0 | 1 | 2,),) = ((0,),) {}
+ | ~~~~~~~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns2.rs:14:12
+ |
+LL | if let 0 | (1 | 2) = 0 {}
+ | ^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let 0 | 1 | 2 = 0 {}
+ | ~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns2.rs:15:12
+ |
+LL | if let box (0 | 1) | (box 2 | box (3 | 4)) = Box::new(0) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let box (0 | 1 | 2 | 3 | 4) = Box::new(0) {}
+ | ~~~~~~~~~~~~~~~~~~~~~~~
+
+error: unnested or-patterns
+ --> $DIR/unnested_or_patterns2.rs:16:12
+ |
+LL | if let box box 0 | box (box 2 | box 4) = Box::new(Box::new(0)) {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: nest the patterns
+ |
+LL | if let box box (0 | 2 | 4) = Box::new(Box::new(0)) {}
+ | ~~~~~~~~~~~~~~~~~~~
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unreadable_literal.fixed b/src/tools/clippy/tests/ui/unreadable_literal.fixed
new file mode 100644
index 000000000..a67363b09
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unreadable_literal.fixed
@@ -0,0 +1,46 @@
+// run-rustfix
+
+#![warn(clippy::unreadable_literal)]
+#![allow(unused_tuple_struct_fields)]
+
+struct Foo(u64);
+
+macro_rules! foo {
+ () => {
+ Foo(123123123123)
+ };
+}
+
+struct Bar(f32);
+
+macro_rules! bar {
+ () => {
+ Bar(100200300400.100200300400500)
+ };
+}
+
+fn main() {
+ let _good = (
+ 0b1011_i64,
+ 0o1_234_u32,
+ 0x0123_4567,
+ 65536,
+ 1_2345_6789,
+ 1234_f32,
+ 1_234.12_f32,
+ 1_234.123_f32,
+ 1.123_4_f32,
+ );
+ let _bad = (0b11_0110_i64, 0x1234_5678_usize, 123_456_f32, 1.234_567_f32);
+ let _good_sci = 1.1234e1;
+ let _bad_sci = 1.123_456e1;
+
+ let _fail1 = 0x00ab_cdef;
+ let _fail2: u32 = 0xBAFE_BAFE;
+ let _fail3 = 0x0abc_deff;
+ let _fail4: i128 = 0x00ab_cabc_abca_bcab_cabc;
+ let _fail5 = 1.100_300_400;
+
+ let _ = foo!();
+ let _ = bar!();
+}
diff --git a/src/tools/clippy/tests/ui/unreadable_literal.rs b/src/tools/clippy/tests/ui/unreadable_literal.rs
new file mode 100644
index 000000000..82f04e7ce
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unreadable_literal.rs
@@ -0,0 +1,46 @@
+// run-rustfix
+
+#![warn(clippy::unreadable_literal)]
+#![allow(unused_tuple_struct_fields)]
+
+struct Foo(u64);
+
+macro_rules! foo {
+ () => {
+ Foo(123123123123)
+ };
+}
+
+struct Bar(f32);
+
+macro_rules! bar {
+ () => {
+ Bar(100200300400.100200300400500)
+ };
+}
+
+fn main() {
+ let _good = (
+ 0b1011_i64,
+ 0o1_234_u32,
+ 0x1_234_567,
+ 65536,
+ 1_2345_6789,
+ 1234_f32,
+ 1_234.12_f32,
+ 1_234.123_f32,
+ 1.123_4_f32,
+ );
+ let _bad = (0b110110_i64, 0x12345678_usize, 123456_f32, 1.234567_f32);
+ let _good_sci = 1.1234e1;
+ let _bad_sci = 1.123456e1;
+
+ let _fail1 = 0xabcdef;
+ let _fail2: u32 = 0xBAFEBAFE;
+ let _fail3 = 0xabcdeff;
+ let _fail4: i128 = 0xabcabcabcabcabcabc;
+ let _fail5 = 1.100300400;
+
+ let _ = foo!();
+ let _ = bar!();
+}
diff --git a/src/tools/clippy/tests/ui/unreadable_literal.stderr b/src/tools/clippy/tests/ui/unreadable_literal.stderr
new file mode 100644
index 000000000..b51130c6a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unreadable_literal.stderr
@@ -0,0 +1,72 @@
+error: digits of hex or binary literal not grouped by four
+ --> $DIR/unreadable_literal.rs:26:9
+ |
+LL | 0x1_234_567,
+ | ^^^^^^^^^^^ help: consider: `0x0123_4567`
+ |
+ = note: `-D clippy::unusual-byte-groupings` implied by `-D warnings`
+
+error: long literal lacking separators
+ --> $DIR/unreadable_literal.rs:34:17
+ |
+LL | let _bad = (0b110110_i64, 0x12345678_usize, 123456_f32, 1.234567_f32);
+ | ^^^^^^^^^^^^ help: consider: `0b11_0110_i64`
+ |
+ = note: `-D clippy::unreadable-literal` implied by `-D warnings`
+
+error: long literal lacking separators
+ --> $DIR/unreadable_literal.rs:34:31
+ |
+LL | let _bad = (0b110110_i64, 0x12345678_usize, 123456_f32, 1.234567_f32);
+ | ^^^^^^^^^^^^^^^^ help: consider: `0x1234_5678_usize`
+
+error: long literal lacking separators
+ --> $DIR/unreadable_literal.rs:34:49
+ |
+LL | let _bad = (0b110110_i64, 0x12345678_usize, 123456_f32, 1.234567_f32);
+ | ^^^^^^^^^^ help: consider: `123_456_f32`
+
+error: long literal lacking separators
+ --> $DIR/unreadable_literal.rs:34:61
+ |
+LL | let _bad = (0b110110_i64, 0x12345678_usize, 123456_f32, 1.234567_f32);
+ | ^^^^^^^^^^^^ help: consider: `1.234_567_f32`
+
+error: long literal lacking separators
+ --> $DIR/unreadable_literal.rs:36:20
+ |
+LL | let _bad_sci = 1.123456e1;
+ | ^^^^^^^^^^ help: consider: `1.123_456e1`
+
+error: long literal lacking separators
+ --> $DIR/unreadable_literal.rs:38:18
+ |
+LL | let _fail1 = 0xabcdef;
+ | ^^^^^^^^ help: consider: `0x00ab_cdef`
+
+error: long literal lacking separators
+ --> $DIR/unreadable_literal.rs:39:23
+ |
+LL | let _fail2: u32 = 0xBAFEBAFE;
+ | ^^^^^^^^^^ help: consider: `0xBAFE_BAFE`
+
+error: long literal lacking separators
+ --> $DIR/unreadable_literal.rs:40:18
+ |
+LL | let _fail3 = 0xabcdeff;
+ | ^^^^^^^^^ help: consider: `0x0abc_deff`
+
+error: long literal lacking separators
+ --> $DIR/unreadable_literal.rs:41:24
+ |
+LL | let _fail4: i128 = 0xabcabcabcabcabcabc;
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider: `0x00ab_cabc_abca_bcab_cabc`
+
+error: long literal lacking separators
+ --> $DIR/unreadable_literal.rs:42:18
+ |
+LL | let _fail5 = 1.100300400;
+ | ^^^^^^^^^^^ help: consider: `1.100_300_400`
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unsafe_derive_deserialize.rs b/src/tools/clippy/tests/ui/unsafe_derive_deserialize.rs
new file mode 100644
index 000000000..bafca9191
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unsafe_derive_deserialize.rs
@@ -0,0 +1,70 @@
+#![warn(clippy::unsafe_derive_deserialize)]
+#![allow(unused, clippy::missing_safety_doc)]
+
+extern crate serde;
+
+use serde::Deserialize;
+
+#[derive(Deserialize)]
+pub struct A;
+impl A {
+ pub unsafe fn new(_a: i32, _b: i32) -> Self {
+ Self {}
+ }
+}
+
+#[derive(Deserialize)]
+pub struct B;
+impl B {
+ pub unsafe fn unsafe_method(&self) {}
+}
+
+#[derive(Deserialize)]
+pub struct C;
+impl C {
+ pub fn unsafe_block(&self) {
+ unsafe {}
+ }
+}
+
+#[derive(Deserialize)]
+pub struct D;
+impl D {
+ pub fn inner_unsafe_fn(&self) {
+ unsafe fn inner() {}
+ }
+}
+
+// Does not derive `Deserialize`, should be ignored
+pub struct E;
+impl E {
+ pub unsafe fn new(_a: i32, _b: i32) -> Self {
+ Self {}
+ }
+
+ pub unsafe fn unsafe_method(&self) {}
+
+ pub fn unsafe_block(&self) {
+ unsafe {}
+ }
+
+ pub fn inner_unsafe_fn(&self) {
+ unsafe fn inner() {}
+ }
+}
+
+// Does not have methods using `unsafe`, should be ignored
+#[derive(Deserialize)]
+pub struct F;
+
+// Check that we honor the `allow` attribute on the ADT
+#[allow(clippy::unsafe_derive_deserialize)]
+#[derive(Deserialize)]
+pub struct G;
+impl G {
+ pub fn unsafe_block(&self) {
+ unsafe {}
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/unsafe_derive_deserialize.stderr b/src/tools/clippy/tests/ui/unsafe_derive_deserialize.stderr
new file mode 100644
index 000000000..18c4276c6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unsafe_derive_deserialize.stderr
@@ -0,0 +1,39 @@
+error: you are deriving `serde::Deserialize` on a type that has methods using `unsafe`
+ --> $DIR/unsafe_derive_deserialize.rs:8:10
+ |
+LL | #[derive(Deserialize)]
+ | ^^^^^^^^^^^
+ |
+ = note: `-D clippy::unsafe-derive-deserialize` implied by `-D warnings`
+ = help: consider implementing `serde::Deserialize` manually. See https://serde.rs/impl-deserialize.html
+ = note: this error originates in the derive macro `Deserialize` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: you are deriving `serde::Deserialize` on a type that has methods using `unsafe`
+ --> $DIR/unsafe_derive_deserialize.rs:16:10
+ |
+LL | #[derive(Deserialize)]
+ | ^^^^^^^^^^^
+ |
+ = help: consider implementing `serde::Deserialize` manually. See https://serde.rs/impl-deserialize.html
+ = note: this error originates in the derive macro `Deserialize` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: you are deriving `serde::Deserialize` on a type that has methods using `unsafe`
+ --> $DIR/unsafe_derive_deserialize.rs:22:10
+ |
+LL | #[derive(Deserialize)]
+ | ^^^^^^^^^^^
+ |
+ = help: consider implementing `serde::Deserialize` manually. See https://serde.rs/impl-deserialize.html
+ = note: this error originates in the derive macro `Deserialize` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: you are deriving `serde::Deserialize` on a type that has methods using `unsafe`
+ --> $DIR/unsafe_derive_deserialize.rs:30:10
+ |
+LL | #[derive(Deserialize)]
+ | ^^^^^^^^^^^
+ |
+ = help: consider implementing `serde::Deserialize` manually. See https://serde.rs/impl-deserialize.html
+ = note: this error originates in the derive macro `Deserialize` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unsafe_removed_from_name.rs b/src/tools/clippy/tests/ui/unsafe_removed_from_name.rs
new file mode 100644
index 000000000..cde4e96d6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unsafe_removed_from_name.rs
@@ -0,0 +1,27 @@
+#![allow(unused_imports)]
+#![allow(dead_code)]
+#![warn(clippy::unsafe_removed_from_name)]
+
+use std::cell::UnsafeCell as TotallySafeCell;
+
+use std::cell::UnsafeCell as TotallySafeCellAgain;
+
+// Shouldn't error
+use std::cell::RefCell as ProbablyNotUnsafe;
+use std::cell::RefCell as RefCellThatCantBeUnsafe;
+use std::cell::UnsafeCell as SuperDangerousUnsafeCell;
+use std::cell::UnsafeCell as Dangerunsafe;
+use std::cell::UnsafeCell as Bombsawayunsafe;
+
+mod mod_with_some_unsafe_things {
+ pub struct Safe;
+ pub struct Unsafe;
+}
+
+use mod_with_some_unsafe_things::Unsafe as LieAboutModSafety;
+
+// Shouldn't error
+use mod_with_some_unsafe_things::Safe as IPromiseItsSafeThisTime;
+use mod_with_some_unsafe_things::Unsafe as SuperUnsafeModThing;
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/unsafe_removed_from_name.stderr b/src/tools/clippy/tests/ui/unsafe_removed_from_name.stderr
new file mode 100644
index 000000000..4f871cbe4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unsafe_removed_from_name.stderr
@@ -0,0 +1,22 @@
+error: removed `unsafe` from the name of `UnsafeCell` in use as `TotallySafeCell`
+ --> $DIR/unsafe_removed_from_name.rs:5:1
+ |
+LL | use std::cell::UnsafeCell as TotallySafeCell;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::unsafe-removed-from-name` implied by `-D warnings`
+
+error: removed `unsafe` from the name of `UnsafeCell` in use as `TotallySafeCellAgain`
+ --> $DIR/unsafe_removed_from_name.rs:7:1
+ |
+LL | use std::cell::UnsafeCell as TotallySafeCellAgain;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: removed `unsafe` from the name of `Unsafe` in use as `LieAboutModSafety`
+ --> $DIR/unsafe_removed_from_name.rs:21:1
+ |
+LL | use mod_with_some_unsafe_things::Unsafe as LieAboutModSafety;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unseparated_prefix_literals.fixed b/src/tools/clippy/tests/ui/unseparated_prefix_literals.fixed
new file mode 100644
index 000000000..f0c2ba7cc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unseparated_prefix_literals.fixed
@@ -0,0 +1,42 @@
+// run-rustfix
+// aux-build:proc_macro_derive.rs
+
+#![warn(clippy::unseparated_literal_suffix)]
+#![allow(dead_code)]
+
+#[macro_use]
+extern crate proc_macro_derive;
+
+// Test for proc-macro attribute
+#[derive(ClippyMiniMacroTest)]
+struct Foo;
+
+macro_rules! lit_from_macro {
+ () => {
+ 42_usize
+ };
+}
+
+fn main() {
+ let _ok1 = 1234_i32;
+ let _ok2 = 1234_isize;
+ let _ok3 = 0x123_isize;
+ let _fail1 = 1234_i32;
+ let _fail2 = 1234_u32;
+ let _fail3 = 1234_isize;
+ let _fail4 = 1234_usize;
+ let _fail5 = 0x123_isize;
+
+ let _okf1 = 1.5_f32;
+ let _okf2 = 1_f32;
+ let _failf1 = 1.5_f32;
+ let _failf2 = 1_f32;
+
+ // Test for macro
+ let _ = lit_from_macro!();
+
+ // Counter example
+ let _ = line!();
+ // Because `assert!` contains `line!()` macro.
+ assert_eq!(4897_u32, 32223);
+}
diff --git a/src/tools/clippy/tests/ui/unseparated_prefix_literals.rs b/src/tools/clippy/tests/ui/unseparated_prefix_literals.rs
new file mode 100644
index 000000000..f44880b41
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unseparated_prefix_literals.rs
@@ -0,0 +1,42 @@
+// run-rustfix
+// aux-build:proc_macro_derive.rs
+
+#![warn(clippy::unseparated_literal_suffix)]
+#![allow(dead_code)]
+
+#[macro_use]
+extern crate proc_macro_derive;
+
+// Test for proc-macro attribute
+#[derive(ClippyMiniMacroTest)]
+struct Foo;
+
+macro_rules! lit_from_macro {
+ () => {
+ 42usize
+ };
+}
+
+fn main() {
+ let _ok1 = 1234_i32;
+ let _ok2 = 1234_isize;
+ let _ok3 = 0x123_isize;
+ let _fail1 = 1234i32;
+ let _fail2 = 1234u32;
+ let _fail3 = 1234isize;
+ let _fail4 = 1234usize;
+ let _fail5 = 0x123isize;
+
+ let _okf1 = 1.5_f32;
+ let _okf2 = 1_f32;
+ let _failf1 = 1.5f32;
+ let _failf2 = 1f32;
+
+ // Test for macro
+ let _ = lit_from_macro!();
+
+ // Counter example
+ let _ = line!();
+ // Because `assert!` contains `line!()` macro.
+ assert_eq!(4897u32, 32223);
+}
diff --git a/src/tools/clippy/tests/ui/unseparated_prefix_literals.stderr b/src/tools/clippy/tests/ui/unseparated_prefix_literals.stderr
new file mode 100644
index 000000000..ab2f75e0c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unseparated_prefix_literals.stderr
@@ -0,0 +1,63 @@
+error: integer type suffix should be separated by an underscore
+ --> $DIR/unseparated_prefix_literals.rs:24:18
+ |
+LL | let _fail1 = 1234i32;
+ | ^^^^^^^ help: add an underscore: `1234_i32`
+ |
+ = note: `-D clippy::unseparated-literal-suffix` implied by `-D warnings`
+
+error: integer type suffix should be separated by an underscore
+ --> $DIR/unseparated_prefix_literals.rs:25:18
+ |
+LL | let _fail2 = 1234u32;
+ | ^^^^^^^ help: add an underscore: `1234_u32`
+
+error: integer type suffix should be separated by an underscore
+ --> $DIR/unseparated_prefix_literals.rs:26:18
+ |
+LL | let _fail3 = 1234isize;
+ | ^^^^^^^^^ help: add an underscore: `1234_isize`
+
+error: integer type suffix should be separated by an underscore
+ --> $DIR/unseparated_prefix_literals.rs:27:18
+ |
+LL | let _fail4 = 1234usize;
+ | ^^^^^^^^^ help: add an underscore: `1234_usize`
+
+error: integer type suffix should be separated by an underscore
+ --> $DIR/unseparated_prefix_literals.rs:28:18
+ |
+LL | let _fail5 = 0x123isize;
+ | ^^^^^^^^^^ help: add an underscore: `0x123_isize`
+
+error: float type suffix should be separated by an underscore
+ --> $DIR/unseparated_prefix_literals.rs:32:19
+ |
+LL | let _failf1 = 1.5f32;
+ | ^^^^^^ help: add an underscore: `1.5_f32`
+
+error: float type suffix should be separated by an underscore
+ --> $DIR/unseparated_prefix_literals.rs:33:19
+ |
+LL | let _failf2 = 1f32;
+ | ^^^^ help: add an underscore: `1_f32`
+
+error: integer type suffix should be separated by an underscore
+ --> $DIR/unseparated_prefix_literals.rs:16:9
+ |
+LL | 42usize
+ | ^^^^^^^ help: add an underscore: `42_usize`
+...
+LL | let _ = lit_from_macro!();
+ | ----------------- in this macro invocation
+ |
+ = note: this error originates in the macro `lit_from_macro` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: integer type suffix should be separated by an underscore
+ --> $DIR/unseparated_prefix_literals.rs:41:16
+ |
+LL | assert_eq!(4897u32, 32223);
+ | ^^^^^^^ help: add an underscore: `4897_u32`
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unused_async.rs b/src/tools/clippy/tests/ui/unused_async.rs
new file mode 100644
index 000000000..4ca7f29b3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unused_async.rs
@@ -0,0 +1,48 @@
+#![warn(clippy::unused_async)]
+
+use std::future::Future;
+use std::pin::Pin;
+
+async fn foo() -> i32 {
+ 4
+}
+
+async fn bar() -> i32 {
+ foo().await
+}
+
+struct S;
+
+impl S {
+ async fn unused(&self) -> i32 {
+ 1
+ }
+
+ async fn used(&self) -> i32 {
+ self.unused().await
+ }
+}
+
+trait AsyncTrait {
+ fn trait_method() -> Pin<Box<dyn Future<Output = i32>>>;
+}
+
+macro_rules! async_trait_impl {
+ () => {
+ impl AsyncTrait for S {
+ fn trait_method() -> Pin<Box<dyn Future<Output = i32>>> {
+ async fn unused() -> i32 {
+ 5
+ }
+
+ Box::pin(unused())
+ }
+ }
+ };
+}
+async_trait_impl!();
+
+fn main() {
+ foo();
+ bar();
+}
diff --git a/src/tools/clippy/tests/ui/unused_async.stderr b/src/tools/clippy/tests/ui/unused_async.stderr
new file mode 100644
index 000000000..8b8ad065a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unused_async.stderr
@@ -0,0 +1,23 @@
+error: unused `async` for function with no await statements
+ --> $DIR/unused_async.rs:6:1
+ |
+LL | / async fn foo() -> i32 {
+LL | | 4
+LL | | }
+ | |_^
+ |
+ = note: `-D clippy::unused-async` implied by `-D warnings`
+ = help: consider removing the `async` from this function
+
+error: unused `async` for function with no await statements
+ --> $DIR/unused_async.rs:17:5
+ |
+LL | / async fn unused(&self) -> i32 {
+LL | | 1
+LL | | }
+ | |_____^
+ |
+ = help: consider removing the `async` from this function
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unused_io_amount.rs b/src/tools/clippy/tests/ui/unused_io_amount.rs
new file mode 100644
index 000000000..4b0595581
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unused_io_amount.rs
@@ -0,0 +1,117 @@
+#![allow(dead_code)]
+#![warn(clippy::unused_io_amount)]
+
+extern crate futures;
+use futures::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt};
+use std::io::{self, Read};
+
+fn question_mark<T: io::Read + io::Write>(s: &mut T) -> io::Result<()> {
+ s.write(b"test")?;
+ let mut buf = [0u8; 4];
+ s.read(&mut buf)?;
+ Ok(())
+}
+
+fn unwrap<T: io::Read + io::Write>(s: &mut T) {
+ s.write(b"test").unwrap();
+ let mut buf = [0u8; 4];
+ s.read(&mut buf).unwrap();
+}
+
+fn vectored<T: io::Read + io::Write>(s: &mut T) -> io::Result<()> {
+ s.read_vectored(&mut [io::IoSliceMut::new(&mut [])])?;
+ s.write_vectored(&[io::IoSlice::new(&[])])?;
+ Ok(())
+}
+
+fn ok(file: &str) -> Option<()> {
+ let mut reader = std::fs::File::open(file).ok()?;
+ let mut result = [0u8; 0];
+ reader.read(&mut result).ok()?;
+ Some(())
+}
+
+#[allow(clippy::redundant_closure)]
+#[allow(clippy::bind_instead_of_map)]
+fn or_else(file: &str) -> io::Result<()> {
+ let mut reader = std::fs::File::open(file)?;
+ let mut result = [0u8; 0];
+ reader.read(&mut result).or_else(|err| Err(err))?;
+ Ok(())
+}
+
+#[derive(Debug)]
+enum Error {
+ Kind,
+}
+
+fn or(file: &str) -> Result<(), Error> {
+ let mut reader = std::fs::File::open(file).unwrap();
+ let mut result = [0u8; 0];
+ reader.read(&mut result).or(Err(Error::Kind))?;
+ Ok(())
+}
+
+fn combine_or(file: &str) -> Result<(), Error> {
+ let mut reader = std::fs::File::open(file).unwrap();
+ let mut result = [0u8; 0];
+ reader
+ .read(&mut result)
+ .or(Err(Error::Kind))
+ .or(Err(Error::Kind))
+ .expect("error");
+ Ok(())
+}
+
+async fn bad_async_write<W: AsyncWrite + Unpin>(w: &mut W) {
+ w.write(b"hello world").await.unwrap();
+}
+
+async fn bad_async_read<R: AsyncRead + Unpin>(r: &mut R) {
+ let mut buf = [0u8; 0];
+ r.read(&mut buf[..]).await.unwrap();
+}
+
+async fn io_not_ignored_async_write<W: AsyncWrite + Unpin>(mut w: W) {
+ // Here we're forgetting to await the future, so we should get a
+ // warning about _that_ (or we would, if it were enabled), but we
+ // won't get one about ignoring the return value.
+ w.write(b"hello world");
+}
+
+fn bad_async_write_closure<W: AsyncWrite + Unpin + 'static>(w: W) -> impl futures::Future<Output = io::Result<()>> {
+ let mut w = w;
+ async move {
+ w.write(b"hello world").await?;
+ Ok(())
+ }
+}
+
+async fn async_read_nested_or<R: AsyncRead + Unpin>(r: &mut R, do_it: bool) -> Result<[u8; 1], Error> {
+ let mut buf = [0u8; 1];
+ if do_it {
+ r.read(&mut buf[..]).await.or(Err(Error::Kind))?;
+ }
+ Ok(buf)
+}
+
+use tokio::io::{AsyncRead as TokioAsyncRead, AsyncReadExt as _, AsyncWrite as TokioAsyncWrite, AsyncWriteExt as _};
+
+async fn bad_async_write_tokio<W: TokioAsyncWrite + Unpin>(w: &mut W) {
+ w.write(b"hello world").await.unwrap();
+}
+
+async fn bad_async_read_tokio<R: TokioAsyncRead + Unpin>(r: &mut R) {
+ let mut buf = [0u8; 0];
+ r.read(&mut buf[..]).await.unwrap();
+}
+
+async fn undetected_bad_async_write<W: AsyncWrite + Unpin>(w: &mut W) {
+ // It would be good to detect this case some day, but the current lint
+ // doesn't handle it. (The documentation says that this lint "detects
+ // only common patterns".)
+ let future = w.write(b"Hello world");
+ future.await.unwrap();
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/unused_io_amount.stderr b/src/tools/clippy/tests/ui/unused_io_amount.stderr
new file mode 100644
index 000000000..e5bdd993a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unused_io_amount.stderr
@@ -0,0 +1,131 @@
+error: written amount is not handled
+ --> $DIR/unused_io_amount.rs:9:5
+ |
+LL | s.write(b"test")?;
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::unused-io-amount` implied by `-D warnings`
+ = help: use `Write::write_all` instead, or handle partial writes
+
+error: read amount is not handled
+ --> $DIR/unused_io_amount.rs:11:5
+ |
+LL | s.read(&mut buf)?;
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = help: use `Read::read_exact` instead, or handle partial reads
+
+error: written amount is not handled
+ --> $DIR/unused_io_amount.rs:16:5
+ |
+LL | s.write(b"test").unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use `Write::write_all` instead, or handle partial writes
+
+error: read amount is not handled
+ --> $DIR/unused_io_amount.rs:18:5
+ |
+LL | s.read(&mut buf).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use `Read::read_exact` instead, or handle partial reads
+
+error: read amount is not handled
+ --> $DIR/unused_io_amount.rs:22:5
+ |
+LL | s.read_vectored(&mut [io::IoSliceMut::new(&mut [])])?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: written amount is not handled
+ --> $DIR/unused_io_amount.rs:23:5
+ |
+LL | s.write_vectored(&[io::IoSlice::new(&[])])?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: read amount is not handled
+ --> $DIR/unused_io_amount.rs:30:5
+ |
+LL | reader.read(&mut result).ok()?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use `Read::read_exact` instead, or handle partial reads
+
+error: read amount is not handled
+ --> $DIR/unused_io_amount.rs:39:5
+ |
+LL | reader.read(&mut result).or_else(|err| Err(err))?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use `Read::read_exact` instead, or handle partial reads
+
+error: read amount is not handled
+ --> $DIR/unused_io_amount.rs:51:5
+ |
+LL | reader.read(&mut result).or(Err(Error::Kind))?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use `Read::read_exact` instead, or handle partial reads
+
+error: read amount is not handled
+ --> $DIR/unused_io_amount.rs:58:5
+ |
+LL | / reader
+LL | | .read(&mut result)
+LL | | .or(Err(Error::Kind))
+LL | | .or(Err(Error::Kind))
+LL | | .expect("error");
+ | |________________________^
+ |
+ = help: use `Read::read_exact` instead, or handle partial reads
+
+error: written amount is not handled
+ --> $DIR/unused_io_amount.rs:67:5
+ |
+LL | w.write(b"hello world").await.unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use `AsyncWriteExt::write_all` instead, or handle partial writes
+
+error: read amount is not handled
+ --> $DIR/unused_io_amount.rs:72:5
+ |
+LL | r.read(&mut buf[..]).await.unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use `AsyncReadExt::read_exact` instead, or handle partial reads
+
+error: written amount is not handled
+ --> $DIR/unused_io_amount.rs:85:9
+ |
+LL | w.write(b"hello world").await?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use `AsyncWriteExt::write_all` instead, or handle partial writes
+
+error: read amount is not handled
+ --> $DIR/unused_io_amount.rs:93:9
+ |
+LL | r.read(&mut buf[..]).await.or(Err(Error::Kind))?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use `AsyncReadExt::read_exact` instead, or handle partial reads
+
+error: written amount is not handled
+ --> $DIR/unused_io_amount.rs:101:5
+ |
+LL | w.write(b"hello world").await.unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use `AsyncWriteExt::write_all` instead, or handle partial writes
+
+error: read amount is not handled
+ --> $DIR/unused_io_amount.rs:106:5
+ |
+LL | r.read(&mut buf[..]).await.unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: use `AsyncReadExt::read_exact` instead, or handle partial reads
+
+error: aborting due to 16 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unused_rounding.fixed b/src/tools/clippy/tests/ui/unused_rounding.fixed
new file mode 100644
index 000000000..54f85806a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unused_rounding.fixed
@@ -0,0 +1,9 @@
+// run-rustfix
+#![warn(clippy::unused_rounding)]
+
+fn main() {
+ let _ = 1f32;
+ let _ = 1.0f64;
+ let _ = 1.00f32;
+ let _ = 2e-54f64.floor();
+}
diff --git a/src/tools/clippy/tests/ui/unused_rounding.rs b/src/tools/clippy/tests/ui/unused_rounding.rs
new file mode 100644
index 000000000..8d007bc4a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unused_rounding.rs
@@ -0,0 +1,9 @@
+// run-rustfix
+#![warn(clippy::unused_rounding)]
+
+fn main() {
+ let _ = 1f32.ceil();
+ let _ = 1.0f64.floor();
+ let _ = 1.00f32.round();
+ let _ = 2e-54f64.floor();
+}
diff --git a/src/tools/clippy/tests/ui/unused_rounding.stderr b/src/tools/clippy/tests/ui/unused_rounding.stderr
new file mode 100644
index 000000000..6cfb02e04
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unused_rounding.stderr
@@ -0,0 +1,22 @@
+error: used the `ceil` method with a whole number float
+ --> $DIR/unused_rounding.rs:5:13
+ |
+LL | let _ = 1f32.ceil();
+ | ^^^^^^^^^^^ help: remove the `ceil` method call: `1f32`
+ |
+ = note: `-D clippy::unused-rounding` implied by `-D warnings`
+
+error: used the `floor` method with a whole number float
+ --> $DIR/unused_rounding.rs:6:13
+ |
+LL | let _ = 1.0f64.floor();
+ | ^^^^^^^^^^^^^^ help: remove the `floor` method call: `1.0f64`
+
+error: used the `round` method with a whole number float
+ --> $DIR/unused_rounding.rs:7:13
+ |
+LL | let _ = 1.00f32.round();
+ | ^^^^^^^^^^^^^^^ help: remove the `round` method call: `1.00f32`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unused_self.rs b/src/tools/clippy/tests/ui/unused_self.rs
new file mode 100644
index 000000000..92e8e1dba
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unused_self.rs
@@ -0,0 +1,149 @@
+#![warn(clippy::unused_self)]
+#![allow(clippy::boxed_local, clippy::fn_params_excessive_bools)]
+
+mod unused_self {
+ use std::pin::Pin;
+ use std::sync::{Arc, Mutex};
+
+ struct A;
+
+ impl A {
+ fn unused_self_move(self) {}
+ fn unused_self_ref(&self) {}
+ fn unused_self_mut_ref(&mut self) {}
+ fn unused_self_pin_ref(self: Pin<&Self>) {}
+ fn unused_self_pin_mut_ref(self: Pin<&mut Self>) {}
+ fn unused_self_pin_nested(self: Pin<Arc<Self>>) {}
+ fn unused_self_box(self: Box<Self>) {}
+ fn unused_with_other_used_args(&self, x: u8, y: u8) -> u8 {
+ x + y
+ }
+ fn unused_self_class_method(&self) {
+ Self::static_method();
+ }
+
+ fn static_method() {}
+ }
+}
+
+mod unused_self_allow {
+ struct A;
+
+ impl A {
+ // shouldn't trigger
+ #[allow(clippy::unused_self)]
+ fn unused_self_move(self) {}
+ }
+
+ struct B;
+
+ // shouldn't trigger
+ #[allow(clippy::unused_self)]
+ impl B {
+ fn unused_self_move(self) {}
+ }
+
+ struct C;
+
+ #[allow(clippy::unused_self)]
+ impl C {
+ #[warn(clippy::unused_self)]
+ fn some_fn((): ()) {}
+
+ // shouldn't trigger
+ fn unused_self_move(self) {}
+ }
+
+ pub struct D;
+
+ impl D {
+ // shouldn't trigger for public methods
+ pub fn unused_self_move(self) {}
+ }
+}
+
+pub use unused_self_allow::D;
+
+mod used_self {
+ use std::pin::Pin;
+
+ struct A {
+ x: u8,
+ }
+
+ impl A {
+ fn used_self_move(self) -> u8 {
+ self.x
+ }
+ fn used_self_ref(&self) -> u8 {
+ self.x
+ }
+ fn used_self_mut_ref(&mut self) {
+ self.x += 1
+ }
+ fn used_self_pin_ref(self: Pin<&Self>) -> u8 {
+ self.x
+ }
+ fn used_self_box(self: Box<Self>) -> u8 {
+ self.x
+ }
+ fn used_self_with_other_unused_args(&self, x: u8, y: u8) -> u8 {
+ self.x
+ }
+ fn used_in_nested_closure(&self) -> u8 {
+ let mut a = || -> u8 { self.x };
+ a()
+ }
+
+ #[allow(clippy::collapsible_if)]
+ fn used_self_method_nested_conditions(&self, a: bool, b: bool, c: bool, d: bool) {
+ if a {
+ if b {
+ if c {
+ if d {
+ self.used_self_ref();
+ }
+ }
+ }
+ }
+ }
+
+ fn foo(&self) -> u32 {
+ let mut sum = 0u32;
+ for i in 0..self.x {
+ sum += i as u32;
+ }
+ sum
+ }
+
+ fn bar(&mut self, x: u8) -> u32 {
+ let mut y = 0u32;
+ for i in 0..x {
+ y += self.foo()
+ }
+ y
+ }
+ }
+}
+
+mod not_applicable {
+ use std::fmt;
+
+ struct A;
+
+ impl fmt::Debug for A {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "A")
+ }
+ }
+
+ impl A {
+ fn method(x: u8, y: u8) {}
+ }
+
+ trait B {
+ fn method(&self) {}
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/unused_self.stderr b/src/tools/clippy/tests/ui/unused_self.stderr
new file mode 100644
index 000000000..0534b40ea
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unused_self.stderr
@@ -0,0 +1,75 @@
+error: unused `self` argument
+ --> $DIR/unused_self.rs:11:29
+ |
+LL | fn unused_self_move(self) {}
+ | ^^^^
+ |
+ = note: `-D clippy::unused-self` implied by `-D warnings`
+ = help: consider refactoring to a associated function
+
+error: unused `self` argument
+ --> $DIR/unused_self.rs:12:28
+ |
+LL | fn unused_self_ref(&self) {}
+ | ^^^^^
+ |
+ = help: consider refactoring to a associated function
+
+error: unused `self` argument
+ --> $DIR/unused_self.rs:13:32
+ |
+LL | fn unused_self_mut_ref(&mut self) {}
+ | ^^^^^^^^^
+ |
+ = help: consider refactoring to a associated function
+
+error: unused `self` argument
+ --> $DIR/unused_self.rs:14:32
+ |
+LL | fn unused_self_pin_ref(self: Pin<&Self>) {}
+ | ^^^^
+ |
+ = help: consider refactoring to a associated function
+
+error: unused `self` argument
+ --> $DIR/unused_self.rs:15:36
+ |
+LL | fn unused_self_pin_mut_ref(self: Pin<&mut Self>) {}
+ | ^^^^
+ |
+ = help: consider refactoring to a associated function
+
+error: unused `self` argument
+ --> $DIR/unused_self.rs:16:35
+ |
+LL | fn unused_self_pin_nested(self: Pin<Arc<Self>>) {}
+ | ^^^^
+ |
+ = help: consider refactoring to a associated function
+
+error: unused `self` argument
+ --> $DIR/unused_self.rs:17:28
+ |
+LL | fn unused_self_box(self: Box<Self>) {}
+ | ^^^^
+ |
+ = help: consider refactoring to a associated function
+
+error: unused `self` argument
+ --> $DIR/unused_self.rs:18:40
+ |
+LL | fn unused_with_other_used_args(&self, x: u8, y: u8) -> u8 {
+ | ^^^^^
+ |
+ = help: consider refactoring to a associated function
+
+error: unused `self` argument
+ --> $DIR/unused_self.rs:21:37
+ |
+LL | fn unused_self_class_method(&self) {
+ | ^^^^^
+ |
+ = help: consider refactoring to a associated function
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unused_unit.fixed b/src/tools/clippy/tests/ui/unused_unit.fixed
new file mode 100644
index 000000000..7bb43cf7a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unused_unit.fixed
@@ -0,0 +1,89 @@
+// run-rustfix
+
+// The output for humans should just highlight the whole span without showing
+// the suggested replacement, but we also want to test that suggested
+// replacement only removes one set of parentheses, rather than naïvely
+// stripping away any starting or ending parenthesis characters—hence this
+// test of the JSON error format.
+
+#![feature(custom_inner_attributes)]
+#![rustfmt::skip]
+
+#![deny(clippy::unused_unit)]
+#![allow(dead_code)]
+#![allow(clippy::from_over_into)]
+
+struct Unitter;
+impl Unitter {
+ #[allow(clippy::no_effect)]
+ pub fn get_unit<F: Fn(), G>(&self, f: F, _g: G)
+ where G: Fn() {
+ let _y: &dyn Fn() = &f;
+ (); // this should not lint, as it's not in return type position
+ }
+}
+
+impl Into<()> for Unitter {
+ #[rustfmt::skip]
+ fn into(self) {
+
+ }
+}
+
+trait Trait {
+ fn redundant<F: FnOnce(), G, H>(&self, _f: F, _g: G, _h: H)
+ where
+ G: FnMut(),
+ H: Fn();
+}
+
+impl Trait for Unitter {
+ fn redundant<F: FnOnce(), G, H>(&self, _f: F, _g: G, _h: H)
+ where
+ G: FnMut(),
+ H: Fn() {}
+}
+
+fn return_unit() { }
+
+#[allow(clippy::needless_return)]
+#[allow(clippy::never_loop)]
+#[allow(clippy::unit_cmp)]
+fn main() {
+ let u = Unitter;
+ assert_eq!(u.get_unit(|| {}, return_unit), u.into());
+ return_unit();
+ loop {
+ break;
+ }
+ return;
+}
+
+// https://github.com/rust-lang/rust-clippy/issues/4076
+fn foo() {
+ macro_rules! foo {
+ (recv($r:expr) -> $res:pat => $body:expr) => {
+ $body
+ }
+ }
+
+ foo! {
+ recv(rx) -> _x => ()
+ }
+}
+
+#[rustfmt::skip]
+fn test(){}
+
+#[rustfmt::skip]
+fn test2(){}
+
+#[rustfmt::skip]
+fn test3(){}
+
+fn macro_expr() {
+ macro_rules! e {
+ () => (());
+ }
+ e!()
+}
diff --git a/src/tools/clippy/tests/ui/unused_unit.rs b/src/tools/clippy/tests/ui/unused_unit.rs
new file mode 100644
index 000000000..21073fb80
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unused_unit.rs
@@ -0,0 +1,89 @@
+// run-rustfix
+
+// The output for humans should just highlight the whole span without showing
+// the suggested replacement, but we also want to test that suggested
+// replacement only removes one set of parentheses, rather than naïvely
+// stripping away any starting or ending parenthesis characters—hence this
+// test of the JSON error format.
+
+#![feature(custom_inner_attributes)]
+#![rustfmt::skip]
+
+#![deny(clippy::unused_unit)]
+#![allow(dead_code)]
+#![allow(clippy::from_over_into)]
+
+struct Unitter;
+impl Unitter {
+ #[allow(clippy::no_effect)]
+ pub fn get_unit<F: Fn() -> (), G>(&self, f: F, _g: G) -> ()
+ where G: Fn() -> () {
+ let _y: &dyn Fn() -> () = &f;
+ (); // this should not lint, as it's not in return type position
+ }
+}
+
+impl Into<()> for Unitter {
+ #[rustfmt::skip]
+ fn into(self) -> () {
+ ()
+ }
+}
+
+trait Trait {
+ fn redundant<F: FnOnce() -> (), G, H>(&self, _f: F, _g: G, _h: H)
+ where
+ G: FnMut() -> (),
+ H: Fn() -> ();
+}
+
+impl Trait for Unitter {
+ fn redundant<F: FnOnce() -> (), G, H>(&self, _f: F, _g: G, _h: H)
+ where
+ G: FnMut() -> (),
+ H: Fn() -> () {}
+}
+
+fn return_unit() -> () { () }
+
+#[allow(clippy::needless_return)]
+#[allow(clippy::never_loop)]
+#[allow(clippy::unit_cmp)]
+fn main() {
+ let u = Unitter;
+ assert_eq!(u.get_unit(|| {}, return_unit), u.into());
+ return_unit();
+ loop {
+ break();
+ }
+ return();
+}
+
+// https://github.com/rust-lang/rust-clippy/issues/4076
+fn foo() {
+ macro_rules! foo {
+ (recv($r:expr) -> $res:pat => $body:expr) => {
+ $body
+ }
+ }
+
+ foo! {
+ recv(rx) -> _x => ()
+ }
+}
+
+#[rustfmt::skip]
+fn test()->(){}
+
+#[rustfmt::skip]
+fn test2() ->(){}
+
+#[rustfmt::skip]
+fn test3()-> (){}
+
+fn macro_expr() {
+ macro_rules! e {
+ () => (());
+ }
+ e!()
+}
diff --git a/src/tools/clippy/tests/ui/unused_unit.stderr b/src/tools/clippy/tests/ui/unused_unit.stderr
new file mode 100644
index 000000000..0d2cb7785
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unused_unit.stderr
@@ -0,0 +1,122 @@
+error: unneeded unit return type
+ --> $DIR/unused_unit.rs:19:58
+ |
+LL | pub fn get_unit<F: Fn() -> (), G>(&self, f: F, _g: G) -> ()
+ | ^^^^^^ help: remove the `-> ()`
+ |
+note: the lint level is defined here
+ --> $DIR/unused_unit.rs:12:9
+ |
+LL | #![deny(clippy::unused_unit)]
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: unneeded unit return type
+ --> $DIR/unused_unit.rs:19:28
+ |
+LL | pub fn get_unit<F: Fn() -> (), G>(&self, f: F, _g: G) -> ()
+ | ^^^^^^ help: remove the `-> ()`
+
+error: unneeded unit return type
+ --> $DIR/unused_unit.rs:20:18
+ |
+LL | where G: Fn() -> () {
+ | ^^^^^^ help: remove the `-> ()`
+
+error: unneeded unit return type
+ --> $DIR/unused_unit.rs:21:26
+ |
+LL | let _y: &dyn Fn() -> () = &f;
+ | ^^^^^^ help: remove the `-> ()`
+
+error: unneeded unit return type
+ --> $DIR/unused_unit.rs:28:18
+ |
+LL | fn into(self) -> () {
+ | ^^^^^^ help: remove the `-> ()`
+
+error: unneeded unit expression
+ --> $DIR/unused_unit.rs:29:9
+ |
+LL | ()
+ | ^^ help: remove the final `()`
+
+error: unneeded unit return type
+ --> $DIR/unused_unit.rs:34:29
+ |
+LL | fn redundant<F: FnOnce() -> (), G, H>(&self, _f: F, _g: G, _h: H)
+ | ^^^^^^ help: remove the `-> ()`
+
+error: unneeded unit return type
+ --> $DIR/unused_unit.rs:36:19
+ |
+LL | G: FnMut() -> (),
+ | ^^^^^^ help: remove the `-> ()`
+
+error: unneeded unit return type
+ --> $DIR/unused_unit.rs:37:16
+ |
+LL | H: Fn() -> ();
+ | ^^^^^^ help: remove the `-> ()`
+
+error: unneeded unit return type
+ --> $DIR/unused_unit.rs:41:29
+ |
+LL | fn redundant<F: FnOnce() -> (), G, H>(&self, _f: F, _g: G, _h: H)
+ | ^^^^^^ help: remove the `-> ()`
+
+error: unneeded unit return type
+ --> $DIR/unused_unit.rs:43:19
+ |
+LL | G: FnMut() -> (),
+ | ^^^^^^ help: remove the `-> ()`
+
+error: unneeded unit return type
+ --> $DIR/unused_unit.rs:44:16
+ |
+LL | H: Fn() -> () {}
+ | ^^^^^^ help: remove the `-> ()`
+
+error: unneeded unit return type
+ --> $DIR/unused_unit.rs:47:17
+ |
+LL | fn return_unit() -> () { () }
+ | ^^^^^^ help: remove the `-> ()`
+
+error: unneeded unit expression
+ --> $DIR/unused_unit.rs:47:26
+ |
+LL | fn return_unit() -> () { () }
+ | ^^ help: remove the final `()`
+
+error: unneeded `()`
+ --> $DIR/unused_unit.rs:57:14
+ |
+LL | break();
+ | ^^ help: remove the `()`
+
+error: unneeded `()`
+ --> $DIR/unused_unit.rs:59:11
+ |
+LL | return();
+ | ^^ help: remove the `()`
+
+error: unneeded unit return type
+ --> $DIR/unused_unit.rs:76:10
+ |
+LL | fn test()->(){}
+ | ^^^^ help: remove the `-> ()`
+
+error: unneeded unit return type
+ --> $DIR/unused_unit.rs:79:11
+ |
+LL | fn test2() ->(){}
+ | ^^^^^ help: remove the `-> ()`
+
+error: unneeded unit return type
+ --> $DIR/unused_unit.rs:82:11
+ |
+LL | fn test3()-> (){}
+ | ^^^^^ help: remove the `-> ()`
+
+error: aborting due to 19 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unwrap.rs b/src/tools/clippy/tests/ui/unwrap.rs
new file mode 100644
index 000000000..a4a3cd1d3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unwrap.rs
@@ -0,0 +1,16 @@
+#![warn(clippy::unwrap_used)]
+
+fn unwrap_option() {
+ let opt = Some(0);
+ let _ = opt.unwrap();
+}
+
+fn unwrap_result() {
+ let res: Result<u8, ()> = Ok(0);
+ let _ = res.unwrap();
+}
+
+fn main() {
+ unwrap_option();
+ unwrap_result();
+}
diff --git a/src/tools/clippy/tests/ui/unwrap.stderr b/src/tools/clippy/tests/ui/unwrap.stderr
new file mode 100644
index 000000000..4f0858005
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unwrap.stderr
@@ -0,0 +1,19 @@
+error: used `unwrap()` on `an Option` value
+ --> $DIR/unwrap.rs:5:13
+ |
+LL | let _ = opt.unwrap();
+ | ^^^^^^^^^^^^
+ |
+ = note: `-D clippy::unwrap-used` implied by `-D warnings`
+ = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+
+error: used `unwrap()` on `a Result` value
+ --> $DIR/unwrap.rs:10:13
+ |
+LL | let _ = res.unwrap();
+ | ^^^^^^^^^^^^
+ |
+ = help: if you don't want to handle the `Err` case gracefully, consider using `expect()` to provide a better panic message
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unwrap_in_result.rs b/src/tools/clippy/tests/ui/unwrap_in_result.rs
new file mode 100644
index 000000000..2aa842adc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unwrap_in_result.rs
@@ -0,0 +1,44 @@
+#![warn(clippy::unwrap_in_result)]
+
+struct A;
+
+impl A {
+ // should not be detected
+ fn good_divisible_by_3(i_str: String) -> Result<bool, String> {
+ // checks whether a string represents a number divisible by 3
+ let i_result = i_str.parse::<i32>();
+ match i_result {
+ Err(_e) => Err("Not a number".to_string()),
+ Ok(i) => {
+ if i % 3 == 0 {
+ return Ok(true);
+ }
+ Err("Number is not divisible by 3".to_string())
+ },
+ }
+ }
+
+ // should be detected
+ fn bad_divisible_by_3(i_str: String) -> Result<bool, String> {
+ // checks whether a string represents a number divisible by 3
+ let i = i_str.parse::<i32>().unwrap();
+ if i % 3 == 0 {
+ Ok(true)
+ } else {
+ Err("Number is not divisible by 3".to_string())
+ }
+ }
+
+ fn example_option_expect(i_str: String) -> Option<bool> {
+ let i = i_str.parse::<i32>().expect("not a number");
+ if i % 3 == 0 {
+ return Some(true);
+ }
+ None
+ }
+}
+
+fn main() {
+ A::bad_divisible_by_3("3".to_string());
+ A::good_divisible_by_3("3".to_string());
+}
diff --git a/src/tools/clippy/tests/ui/unwrap_in_result.stderr b/src/tools/clippy/tests/ui/unwrap_in_result.stderr
new file mode 100644
index 000000000..56bc2f2d1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unwrap_in_result.stderr
@@ -0,0 +1,41 @@
+error: used unwrap or expect in a function that returns result or option
+ --> $DIR/unwrap_in_result.rs:22:5
+ |
+LL | / fn bad_divisible_by_3(i_str: String) -> Result<bool, String> {
+LL | | // checks whether a string represents a number divisible by 3
+LL | | let i = i_str.parse::<i32>().unwrap();
+LL | | if i % 3 == 0 {
+... |
+LL | | }
+LL | | }
+ | |_____^
+ |
+ = note: `-D clippy::unwrap-in-result` implied by `-D warnings`
+ = help: unwrap and expect should not be used in a function that returns result or option
+note: potential non-recoverable error(s)
+ --> $DIR/unwrap_in_result.rs:24:17
+ |
+LL | let i = i_str.parse::<i32>().unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: used unwrap or expect in a function that returns result or option
+ --> $DIR/unwrap_in_result.rs:32:5
+ |
+LL | / fn example_option_expect(i_str: String) -> Option<bool> {
+LL | | let i = i_str.parse::<i32>().expect("not a number");
+LL | | if i % 3 == 0 {
+LL | | return Some(true);
+LL | | }
+LL | | None
+LL | | }
+ | |_____^
+ |
+ = help: unwrap and expect should not be used in a function that returns result or option
+note: potential non-recoverable error(s)
+ --> $DIR/unwrap_in_result.rs:33:17
+ |
+LL | let i = i_str.parse::<i32>().expect("not a number");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unwrap_or.rs b/src/tools/clippy/tests/ui/unwrap_or.rs
new file mode 100644
index 000000000..bfb41e439
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unwrap_or.rs
@@ -0,0 +1,9 @@
+#![warn(clippy::all)]
+
+fn main() {
+ let s = Some(String::from("test string")).unwrap_or("Fail".to_string()).len();
+}
+
+fn new_lines() {
+ let s = Some(String::from("test string")).unwrap_or("Fail".to_string()).len();
+}
diff --git a/src/tools/clippy/tests/ui/unwrap_or.stderr b/src/tools/clippy/tests/ui/unwrap_or.stderr
new file mode 100644
index 000000000..c3a7464fd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unwrap_or.stderr
@@ -0,0 +1,16 @@
+error: use of `unwrap_or` followed by a function call
+ --> $DIR/unwrap_or.rs:4:47
+ |
+LL | let s = Some(String::from("test string")).unwrap_or("Fail".to_string()).len();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| "Fail".to_string())`
+ |
+ = note: `-D clippy::or-fun-call` implied by `-D warnings`
+
+error: use of `unwrap_or` followed by a function call
+ --> $DIR/unwrap_or.rs:8:47
+ |
+LL | let s = Some(String::from("test string")).unwrap_or("Fail".to_string()).len();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| "Fail".to_string())`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unwrap_or_else_default.fixed b/src/tools/clippy/tests/ui/unwrap_or_else_default.fixed
new file mode 100644
index 000000000..c2b9bd2c8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unwrap_or_else_default.fixed
@@ -0,0 +1,74 @@
+// run-rustfix
+
+#![warn(clippy::unwrap_or_else_default)]
+#![allow(dead_code)]
+#![allow(clippy::unnecessary_wraps)]
+
+/// Checks implementation of the `UNWRAP_OR_ELSE_DEFAULT` lint.
+fn unwrap_or_else_default() {
+ struct Foo;
+
+ impl Foo {
+ fn new() -> Foo {
+ Foo
+ }
+
+ // fake default, we should not trigger on this
+ fn default() -> Foo {
+ Foo
+ }
+ }
+
+ struct HasDefaultAndDuplicate;
+
+ impl HasDefaultAndDuplicate {
+ fn default() -> Self {
+ HasDefaultAndDuplicate
+ }
+ }
+
+ impl Default for HasDefaultAndDuplicate {
+ fn default() -> Self {
+ HasDefaultAndDuplicate
+ }
+ }
+
+ enum Enum {
+ A(),
+ }
+
+ fn make<T, V>(_: V) -> T {
+ unimplemented!();
+ }
+
+ let with_enum = Some(Enum::A());
+ with_enum.unwrap_or_else(Enum::A);
+
+ let with_new = Some(vec![1]);
+ with_new.unwrap_or_default();
+
+ let with_err: Result<_, ()> = Ok(vec![1]);
+ with_err.unwrap_or_else(make);
+
+ // should not be changed
+ let with_fake_default = None::<Foo>;
+ with_fake_default.unwrap_or_else(Foo::default);
+
+ // should not be changed
+ let with_fake_default2 = None::<HasDefaultAndDuplicate>;
+ with_fake_default2.unwrap_or_else(<HasDefaultAndDuplicate>::default);
+
+ let with_real_default = None::<HasDefaultAndDuplicate>;
+ with_real_default.unwrap_or_default();
+
+ let with_default_trait = Some(1);
+ with_default_trait.unwrap_or_default();
+
+ let with_default_type = Some(1);
+ with_default_type.unwrap_or_default();
+
+ let with_default_type: Option<Vec<u64>> = None;
+ with_default_type.unwrap_or_default();
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/unwrap_or_else_default.rs b/src/tools/clippy/tests/ui/unwrap_or_else_default.rs
new file mode 100644
index 000000000..d55664990
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unwrap_or_else_default.rs
@@ -0,0 +1,74 @@
+// run-rustfix
+
+#![warn(clippy::unwrap_or_else_default)]
+#![allow(dead_code)]
+#![allow(clippy::unnecessary_wraps)]
+
+/// Checks implementation of the `UNWRAP_OR_ELSE_DEFAULT` lint.
+fn unwrap_or_else_default() {
+ struct Foo;
+
+ impl Foo {
+ fn new() -> Foo {
+ Foo
+ }
+
+ // fake default, we should not trigger on this
+ fn default() -> Foo {
+ Foo
+ }
+ }
+
+ struct HasDefaultAndDuplicate;
+
+ impl HasDefaultAndDuplicate {
+ fn default() -> Self {
+ HasDefaultAndDuplicate
+ }
+ }
+
+ impl Default for HasDefaultAndDuplicate {
+ fn default() -> Self {
+ HasDefaultAndDuplicate
+ }
+ }
+
+ enum Enum {
+ A(),
+ }
+
+ fn make<T, V>(_: V) -> T {
+ unimplemented!();
+ }
+
+ let with_enum = Some(Enum::A());
+ with_enum.unwrap_or_else(Enum::A);
+
+ let with_new = Some(vec![1]);
+ with_new.unwrap_or_else(Vec::new);
+
+ let with_err: Result<_, ()> = Ok(vec![1]);
+ with_err.unwrap_or_else(make);
+
+ // should not be changed
+ let with_fake_default = None::<Foo>;
+ with_fake_default.unwrap_or_else(Foo::default);
+
+ // should not be changed
+ let with_fake_default2 = None::<HasDefaultAndDuplicate>;
+ with_fake_default2.unwrap_or_else(<HasDefaultAndDuplicate>::default);
+
+ let with_real_default = None::<HasDefaultAndDuplicate>;
+ with_real_default.unwrap_or_else(<HasDefaultAndDuplicate as Default>::default);
+
+ let with_default_trait = Some(1);
+ with_default_trait.unwrap_or_else(Default::default);
+
+ let with_default_type = Some(1);
+ with_default_type.unwrap_or_else(u64::default);
+
+ let with_default_type: Option<Vec<u64>> = None;
+ with_default_type.unwrap_or_else(Vec::new);
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/unwrap_or_else_default.stderr b/src/tools/clippy/tests/ui/unwrap_or_else_default.stderr
new file mode 100644
index 000000000..53e31d85e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unwrap_or_else_default.stderr
@@ -0,0 +1,34 @@
+error: use of `.unwrap_or_else(..)` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:48:5
+ |
+LL | with_new.unwrap_or_else(Vec::new);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `with_new.unwrap_or_default()`
+ |
+ = note: `-D clippy::unwrap-or-else-default` implied by `-D warnings`
+
+error: use of `.unwrap_or_else(..)` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:62:5
+ |
+LL | with_real_default.unwrap_or_else(<HasDefaultAndDuplicate as Default>::default);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `with_real_default.unwrap_or_default()`
+
+error: use of `.unwrap_or_else(..)` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:65:5
+ |
+LL | with_default_trait.unwrap_or_else(Default::default);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `with_default_trait.unwrap_or_default()`
+
+error: use of `.unwrap_or_else(..)` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:68:5
+ |
+LL | with_default_type.unwrap_or_else(u64::default);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `with_default_type.unwrap_or_default()`
+
+error: use of `.unwrap_or_else(..)` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:71:5
+ |
+LL | with_default_type.unwrap_or_else(Vec::new);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `with_default_type.unwrap_or_default()`
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/update-all-references.sh b/src/tools/clippy/tests/ui/update-all-references.sh
new file mode 100755
index 000000000..4391499a1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/update-all-references.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+echo "Please use 'cargo dev bless' instead."
diff --git a/src/tools/clippy/tests/ui/upper_case_acronyms.rs b/src/tools/clippy/tests/ui/upper_case_acronyms.rs
new file mode 100644
index 000000000..48bb9e54b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/upper_case_acronyms.rs
@@ -0,0 +1,41 @@
+#![warn(clippy::upper_case_acronyms)]
+
+struct HTTPResponse; // not linted by default, but with cfg option
+
+struct CString; // not linted
+
+enum Flags {
+ NS, // not linted
+ CWR,
+ ECE,
+ URG,
+ ACK,
+ PSH,
+ RST,
+ SYN,
+ FIN,
+}
+
+// linted with cfg option, beware that lint suggests `GccllvmSomething` instead of
+// `GccLlvmSomething`
+struct GCCLLVMSomething;
+
+// public items must not be linted
+pub struct NOWARNINGHERE;
+pub struct ALSONoWarningHERE;
+
+// enum variants should not be linted if the num is pub
+pub enum ParseError<T> {
+ YDB(u8),
+ Utf8(std::string::FromUtf8Error),
+ Parse(T, String),
+}
+
+// private, do lint here
+enum ParseErrorPrivate<T> {
+ WASD(u8),
+ Utf8(std::string::FromUtf8Error),
+ Parse(T, String),
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/upper_case_acronyms.stderr b/src/tools/clippy/tests/ui/upper_case_acronyms.stderr
new file mode 100644
index 000000000..250b196a9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/upper_case_acronyms.stderr
@@ -0,0 +1,58 @@
+error: name `CWR` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:9:5
+ |
+LL | CWR,
+ | ^^^ help: consider making the acronym lowercase, except the initial letter: `Cwr`
+ |
+ = note: `-D clippy::upper-case-acronyms` implied by `-D warnings`
+
+error: name `ECE` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:10:5
+ |
+LL | ECE,
+ | ^^^ help: consider making the acronym lowercase, except the initial letter: `Ece`
+
+error: name `URG` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:11:5
+ |
+LL | URG,
+ | ^^^ help: consider making the acronym lowercase, except the initial letter: `Urg`
+
+error: name `ACK` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:12:5
+ |
+LL | ACK,
+ | ^^^ help: consider making the acronym lowercase, except the initial letter (notice the capitalization): `Ack`
+
+error: name `PSH` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:13:5
+ |
+LL | PSH,
+ | ^^^ help: consider making the acronym lowercase, except the initial letter: `Psh`
+
+error: name `RST` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:14:5
+ |
+LL | RST,
+ | ^^^ help: consider making the acronym lowercase, except the initial letter: `Rst`
+
+error: name `SYN` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:15:5
+ |
+LL | SYN,
+ | ^^^ help: consider making the acronym lowercase, except the initial letter: `Syn`
+
+error: name `FIN` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:16:5
+ |
+LL | FIN,
+ | ^^^ help: consider making the acronym lowercase, except the initial letter: `Fin`
+
+error: name `WASD` contains a capitalized acronym
+ --> $DIR/upper_case_acronyms.rs:36:5
+ |
+LL | WASD(u8),
+ | ^^^^ help: consider making the acronym lowercase, except the initial letter: `Wasd`
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/use_self.fixed b/src/tools/clippy/tests/ui/use_self.fixed
new file mode 100644
index 000000000..4f80aaecc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/use_self.fixed
@@ -0,0 +1,610 @@
+// run-rustfix
+// aux-build:proc_macro_derive.rs
+
+#![warn(clippy::use_self)]
+#![allow(dead_code, unreachable_code)]
+#![allow(
+ clippy::should_implement_trait,
+ clippy::upper_case_acronyms,
+ clippy::from_over_into,
+ clippy::self_named_constructors
+)]
+
+#[macro_use]
+extern crate proc_macro_derive;
+
+fn main() {}
+
+mod use_self {
+ struct Foo;
+
+ impl Foo {
+ fn new() -> Self {
+ Self {}
+ }
+ fn test() -> Self {
+ Self::new()
+ }
+ }
+
+ impl Default for Foo {
+ fn default() -> Self {
+ Self::new()
+ }
+ }
+}
+
+mod better {
+ struct Foo;
+
+ impl Foo {
+ fn new() -> Self {
+ Self {}
+ }
+ fn test() -> Self {
+ Self::new()
+ }
+ }
+
+ impl Default for Foo {
+ fn default() -> Self {
+ Self::new()
+ }
+ }
+}
+
+mod lifetimes {
+ struct Foo<'a> {
+ foo_str: &'a str,
+ }
+
+ impl<'a> Foo<'a> {
+ // Cannot use `Self` as return type, because the function is actually `fn foo<'b>(s: &'b str) ->
+ // Foo<'b>`
+ fn foo(s: &str) -> Foo {
+ Foo { foo_str: s }
+ }
+ // cannot replace with `Self`, because that's `Foo<'a>`
+ fn bar() -> Foo<'static> {
+ Foo { foo_str: "foo" }
+ }
+
+ // FIXME: the lint does not handle lifetimed struct
+ // `Self` should be applicable here
+ fn clone(&self) -> Foo<'a> {
+ Foo { foo_str: self.foo_str }
+ }
+ }
+}
+
+mod issue2894 {
+ trait IntoBytes {
+ fn to_bytes(self) -> Vec<u8>;
+ }
+
+ // This should not be linted
+ impl IntoBytes for u8 {
+ fn to_bytes(self) -> Vec<u8> {
+ vec![self]
+ }
+ }
+}
+
+mod existential {
+ struct Foo;
+
+ impl Foo {
+ fn bad(foos: &[Self]) -> impl Iterator<Item = &Self> {
+ foos.iter()
+ }
+
+ fn good(foos: &[Self]) -> impl Iterator<Item = &Self> {
+ foos.iter()
+ }
+ }
+}
+
+mod tuple_structs {
+ pub struct TS(i32);
+
+ impl TS {
+ pub fn ts() -> Self {
+ Self(0)
+ }
+ }
+}
+
+mod macros {
+ macro_rules! use_self_expand {
+ () => {
+ fn new() -> Foo {
+ Foo {}
+ }
+ };
+ }
+
+ struct Foo;
+
+ impl Foo {
+ use_self_expand!(); // Should not lint in local macros
+ }
+
+ #[derive(StructAUseSelf)] // Should not lint in derives
+ struct A;
+}
+
+mod nesting {
+ struct Foo;
+ impl Foo {
+ fn foo() {
+ #[allow(unused_imports)]
+ use self::Foo; // Can't use Self here
+ struct Bar {
+ foo: Foo, // Foo != Self
+ }
+
+ impl Bar {
+ fn bar() -> Self {
+ Self { foo: Foo {} }
+ }
+ }
+
+ // Can't use Self here
+ fn baz() -> Foo {
+ Foo {}
+ }
+ }
+
+ // Should lint here
+ fn baz() -> Self {
+ Self {}
+ }
+ }
+
+ enum Enum {
+ A,
+ B(u64),
+ C { field: bool },
+ }
+ impl Enum {
+ fn method() {
+ #[allow(unused_imports)]
+ use self::Enum::*; // Issue 3425
+ static STATIC: Enum = Enum::A; // Can't use Self as type
+ }
+
+ fn method2() {
+ let _ = Self::B(42);
+ let _ = Self::C { field: true };
+ let _ = Self::A;
+ }
+ }
+}
+
+mod issue3410 {
+
+ struct A;
+ struct B;
+
+ trait Trait<T> {
+ fn a(v: T) -> Self;
+ }
+
+ impl Trait<Vec<A>> for Vec<B> {
+ fn a(_: Vec<A>) -> Self {
+ unimplemented!()
+ }
+ }
+
+ impl<T> Trait<Vec<A>> for Vec<T>
+ where
+ T: Trait<B>,
+ {
+ fn a(v: Vec<A>) -> Self {
+ <Vec<B>>::a(v).into_iter().map(Trait::a).collect()
+ }
+ }
+}
+
+#[allow(clippy::no_effect, path_statements)]
+mod rustfix {
+ mod nested {
+ pub struct A;
+ }
+
+ impl nested::A {
+ const A: bool = true;
+
+ fn fun_1() {}
+
+ fn fun_2() {
+ Self::fun_1();
+ Self::A;
+
+ Self {};
+ }
+ }
+}
+
+mod issue3567 {
+ struct TestStruct;
+ impl TestStruct {
+ fn from_something() -> Self {
+ Self {}
+ }
+ }
+
+ trait Test {
+ fn test() -> TestStruct;
+ }
+
+ impl Test for TestStruct {
+ fn test() -> TestStruct {
+ Self::from_something()
+ }
+ }
+}
+
+mod paths_created_by_lowering {
+ use std::ops::Range;
+
+ struct S;
+
+ impl S {
+ const A: usize = 0;
+ const B: usize = 1;
+
+ async fn g() -> Self {
+ Self {}
+ }
+
+ fn f<'a>(&self, p: &'a [u8]) -> &'a [u8] {
+ &p[Self::A..Self::B]
+ }
+ }
+
+ trait T {
+ fn f<'a>(&self, p: &'a [u8]) -> &'a [u8];
+ }
+
+ impl T for Range<u8> {
+ fn f<'a>(&self, p: &'a [u8]) -> &'a [u8] {
+ &p[0..1]
+ }
+ }
+}
+
+// reused from #1997
+mod generics {
+ struct Foo<T> {
+ value: T,
+ }
+
+ impl<T> Foo<T> {
+ // `Self` is applicable here
+ fn foo(value: T) -> Self {
+ Self { value }
+ }
+
+ // `Cannot` use `Self` as a return type as the generic types are different
+ fn bar(value: i32) -> Foo<i32> {
+ Foo { value }
+ }
+ }
+}
+
+mod issue4140 {
+ pub struct Error<From, To> {
+ _from: From,
+ _too: To,
+ }
+
+ pub trait From<T> {
+ type From;
+ type To;
+
+ fn from(value: T) -> Self;
+ }
+
+ pub trait TryFrom<T>
+ where
+ Self: Sized,
+ {
+ type From;
+ type To;
+
+ fn try_from(value: T) -> Result<Self, Error<Self::From, Self::To>>;
+ }
+
+ // FIXME: Suggested fix results in infinite recursion.
+ // impl<F, T> TryFrom<F> for T
+ // where
+ // T: From<F>,
+ // {
+ // type From = Self::From;
+ // type To = Self::To;
+
+ // fn try_from(value: F) -> Result<Self, Error<Self::From, Self::To>> {
+ // Ok(From::from(value))
+ // }
+ // }
+
+ impl From<bool> for i64 {
+ type From = bool;
+ type To = Self;
+
+ fn from(value: bool) -> Self {
+ if value { 100 } else { 0 }
+ }
+ }
+}
+
+mod issue2843 {
+ trait Foo {
+ type Bar;
+ }
+
+ impl Foo for usize {
+ type Bar = u8;
+ }
+
+ impl<T: Foo> Foo for Option<T> {
+ type Bar = Option<T::Bar>;
+ }
+}
+
+mod issue3859 {
+ pub struct Foo;
+ pub struct Bar([usize; 3]);
+
+ impl Foo {
+ pub const BAR: usize = 3;
+
+ pub fn foo() {
+ const _X: usize = Foo::BAR;
+ // const _Y: usize = Self::BAR;
+ }
+ }
+}
+
+mod issue4305 {
+ trait Foo: 'static {}
+
+ struct Bar;
+
+ impl Foo for Bar {}
+
+ impl<T: Foo> From<T> for Box<dyn Foo> {
+ fn from(t: T) -> Self {
+ Box::new(t)
+ }
+ }
+}
+
+mod lint_at_item_level {
+ struct Foo;
+
+ #[allow(clippy::use_self)]
+ impl Foo {
+ fn new() -> Foo {
+ Foo {}
+ }
+ }
+
+ #[allow(clippy::use_self)]
+ impl Default for Foo {
+ fn default() -> Foo {
+ Foo::new()
+ }
+ }
+}
+
+mod lint_at_impl_item_level {
+ struct Foo;
+
+ impl Foo {
+ #[allow(clippy::use_self)]
+ fn new() -> Foo {
+ Foo {}
+ }
+ }
+
+ impl Default for Foo {
+ #[allow(clippy::use_self)]
+ fn default() -> Foo {
+ Foo::new()
+ }
+ }
+}
+
+mod issue4734 {
+ #[repr(C, packed)]
+ pub struct X {
+ pub x: u32,
+ }
+
+ impl From<X> for u32 {
+ fn from(c: X) -> Self {
+ unsafe { core::mem::transmute(c) }
+ }
+ }
+}
+
+mod nested_paths {
+ use std::convert::Into;
+ mod submod {
+ pub struct B;
+ pub struct C;
+
+ impl Into<C> for B {
+ fn into(self) -> C {
+ C {}
+ }
+ }
+ }
+
+ struct A<T> {
+ t: T,
+ }
+
+ impl<T> A<T> {
+ fn new<V: Into<T>>(v: V) -> Self {
+ Self { t: Into::into(v) }
+ }
+ }
+
+ impl A<submod::C> {
+ fn test() -> Self {
+ Self::new::<submod::B>(submod::B {})
+ }
+ }
+}
+
+mod issue6818 {
+ #[derive(serde::Deserialize)]
+ struct A {
+ a: i32,
+ }
+}
+
+mod issue7206 {
+ struct MyStruct<const C: char>;
+ impl From<MyStruct<'a'>> for MyStruct<'b'> {
+ fn from(_s: MyStruct<'a'>) -> Self {
+ Self
+ }
+ }
+
+ // keep linting non-`Const` generic args
+ struct S<'a> {
+ inner: &'a str,
+ }
+
+ struct S2<T> {
+ inner: T,
+ }
+
+ impl<T> S2<T> {
+ fn new() -> Self {
+ unimplemented!();
+ }
+ }
+
+ impl<'a> S2<S<'a>> {
+ fn new_again() -> Self {
+ Self::new()
+ }
+ }
+}
+
+mod self_is_ty_param {
+ trait Trait {
+ type Type;
+ type Hi;
+
+ fn test();
+ }
+
+ impl<I> Trait for I
+ where
+ I: Iterator,
+ I::Item: Trait, // changing this to Self would require <Self as Iterator>
+ {
+ type Type = I;
+ type Hi = I::Item;
+
+ fn test() {
+ let _: I::Item;
+ let _: I; // this could lint, but is questionable
+ }
+ }
+}
+
+mod use_self_in_pat {
+ enum Foo {
+ Bar,
+ Baz,
+ }
+
+ impl Foo {
+ fn do_stuff(self) {
+ match self {
+ Self::Bar => unimplemented!(),
+ Self::Baz => unimplemented!(),
+ }
+ match Some(1) {
+ Some(_) => unimplemented!(),
+ None => unimplemented!(),
+ }
+ if let Self::Bar = self {
+ unimplemented!()
+ }
+ }
+ }
+}
+
+mod issue8845 {
+ pub enum Something {
+ Num(u8),
+ TupleNums(u8, u8),
+ StructNums { one: u8, two: u8 },
+ }
+
+ struct Foo(u8);
+
+ struct Bar {
+ x: u8,
+ y: usize,
+ }
+
+ impl Something {
+ fn get_value(&self) -> u8 {
+ match self {
+ Self::Num(n) => *n,
+ Self::TupleNums(n, _m) => *n,
+ Self::StructNums { one, two: _ } => *one,
+ }
+ }
+
+ fn use_crate(&self) -> u8 {
+ match self {
+ Self::Num(n) => *n,
+ Self::TupleNums(n, _m) => *n,
+ Self::StructNums { one, two: _ } => *one,
+ }
+ }
+
+ fn imported_values(&self) -> u8 {
+ use Something::*;
+ match self {
+ Num(n) => *n,
+ TupleNums(n, _m) => *n,
+ StructNums { one, two: _ } => *one,
+ }
+ }
+ }
+
+ impl Foo {
+ fn get_value(&self) -> u8 {
+ let Self(x) = self;
+ *x
+ }
+
+ fn use_crate(&self) -> u8 {
+ let Self(x) = self;
+ *x
+ }
+ }
+
+ impl Bar {
+ fn get_value(&self) -> u8 {
+ let Self { x, .. } = self;
+ *x
+ }
+
+ fn use_crate(&self) -> u8 {
+ let Self { x, .. } = self;
+ *x
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/use_self.rs b/src/tools/clippy/tests/ui/use_self.rs
new file mode 100644
index 000000000..52da72db5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/use_self.rs
@@ -0,0 +1,610 @@
+// run-rustfix
+// aux-build:proc_macro_derive.rs
+
+#![warn(clippy::use_self)]
+#![allow(dead_code, unreachable_code)]
+#![allow(
+ clippy::should_implement_trait,
+ clippy::upper_case_acronyms,
+ clippy::from_over_into,
+ clippy::self_named_constructors
+)]
+
+#[macro_use]
+extern crate proc_macro_derive;
+
+fn main() {}
+
+mod use_self {
+ struct Foo;
+
+ impl Foo {
+ fn new() -> Foo {
+ Foo {}
+ }
+ fn test() -> Foo {
+ Foo::new()
+ }
+ }
+
+ impl Default for Foo {
+ fn default() -> Foo {
+ Foo::new()
+ }
+ }
+}
+
+mod better {
+ struct Foo;
+
+ impl Foo {
+ fn new() -> Self {
+ Self {}
+ }
+ fn test() -> Self {
+ Self::new()
+ }
+ }
+
+ impl Default for Foo {
+ fn default() -> Self {
+ Self::new()
+ }
+ }
+}
+
+mod lifetimes {
+ struct Foo<'a> {
+ foo_str: &'a str,
+ }
+
+ impl<'a> Foo<'a> {
+ // Cannot use `Self` as return type, because the function is actually `fn foo<'b>(s: &'b str) ->
+ // Foo<'b>`
+ fn foo(s: &str) -> Foo {
+ Foo { foo_str: s }
+ }
+ // cannot replace with `Self`, because that's `Foo<'a>`
+ fn bar() -> Foo<'static> {
+ Foo { foo_str: "foo" }
+ }
+
+ // FIXME: the lint does not handle lifetimed struct
+ // `Self` should be applicable here
+ fn clone(&self) -> Foo<'a> {
+ Foo { foo_str: self.foo_str }
+ }
+ }
+}
+
+mod issue2894 {
+ trait IntoBytes {
+ fn to_bytes(self) -> Vec<u8>;
+ }
+
+ // This should not be linted
+ impl IntoBytes for u8 {
+ fn to_bytes(self) -> Vec<u8> {
+ vec![self]
+ }
+ }
+}
+
+mod existential {
+ struct Foo;
+
+ impl Foo {
+ fn bad(foos: &[Foo]) -> impl Iterator<Item = &Foo> {
+ foos.iter()
+ }
+
+ fn good(foos: &[Self]) -> impl Iterator<Item = &Self> {
+ foos.iter()
+ }
+ }
+}
+
+mod tuple_structs {
+ pub struct TS(i32);
+
+ impl TS {
+ pub fn ts() -> Self {
+ TS(0)
+ }
+ }
+}
+
+mod macros {
+ macro_rules! use_self_expand {
+ () => {
+ fn new() -> Foo {
+ Foo {}
+ }
+ };
+ }
+
+ struct Foo;
+
+ impl Foo {
+ use_self_expand!(); // Should not lint in local macros
+ }
+
+ #[derive(StructAUseSelf)] // Should not lint in derives
+ struct A;
+}
+
+mod nesting {
+ struct Foo;
+ impl Foo {
+ fn foo() {
+ #[allow(unused_imports)]
+ use self::Foo; // Can't use Self here
+ struct Bar {
+ foo: Foo, // Foo != Self
+ }
+
+ impl Bar {
+ fn bar() -> Bar {
+ Bar { foo: Foo {} }
+ }
+ }
+
+ // Can't use Self here
+ fn baz() -> Foo {
+ Foo {}
+ }
+ }
+
+ // Should lint here
+ fn baz() -> Foo {
+ Foo {}
+ }
+ }
+
+ enum Enum {
+ A,
+ B(u64),
+ C { field: bool },
+ }
+ impl Enum {
+ fn method() {
+ #[allow(unused_imports)]
+ use self::Enum::*; // Issue 3425
+ static STATIC: Enum = Enum::A; // Can't use Self as type
+ }
+
+ fn method2() {
+ let _ = Enum::B(42);
+ let _ = Enum::C { field: true };
+ let _ = Enum::A;
+ }
+ }
+}
+
+mod issue3410 {
+
+ struct A;
+ struct B;
+
+ trait Trait<T> {
+ fn a(v: T) -> Self;
+ }
+
+ impl Trait<Vec<A>> for Vec<B> {
+ fn a(_: Vec<A>) -> Self {
+ unimplemented!()
+ }
+ }
+
+ impl<T> Trait<Vec<A>> for Vec<T>
+ where
+ T: Trait<B>,
+ {
+ fn a(v: Vec<A>) -> Self {
+ <Vec<B>>::a(v).into_iter().map(Trait::a).collect()
+ }
+ }
+}
+
+#[allow(clippy::no_effect, path_statements)]
+mod rustfix {
+ mod nested {
+ pub struct A;
+ }
+
+ impl nested::A {
+ const A: bool = true;
+
+ fn fun_1() {}
+
+ fn fun_2() {
+ nested::A::fun_1();
+ nested::A::A;
+
+ nested::A {};
+ }
+ }
+}
+
+mod issue3567 {
+ struct TestStruct;
+ impl TestStruct {
+ fn from_something() -> Self {
+ Self {}
+ }
+ }
+
+ trait Test {
+ fn test() -> TestStruct;
+ }
+
+ impl Test for TestStruct {
+ fn test() -> TestStruct {
+ TestStruct::from_something()
+ }
+ }
+}
+
+mod paths_created_by_lowering {
+ use std::ops::Range;
+
+ struct S;
+
+ impl S {
+ const A: usize = 0;
+ const B: usize = 1;
+
+ async fn g() -> S {
+ S {}
+ }
+
+ fn f<'a>(&self, p: &'a [u8]) -> &'a [u8] {
+ &p[S::A..S::B]
+ }
+ }
+
+ trait T {
+ fn f<'a>(&self, p: &'a [u8]) -> &'a [u8];
+ }
+
+ impl T for Range<u8> {
+ fn f<'a>(&self, p: &'a [u8]) -> &'a [u8] {
+ &p[0..1]
+ }
+ }
+}
+
+// reused from #1997
+mod generics {
+ struct Foo<T> {
+ value: T,
+ }
+
+ impl<T> Foo<T> {
+ // `Self` is applicable here
+ fn foo(value: T) -> Foo<T> {
+ Foo::<T> { value }
+ }
+
+ // `Cannot` use `Self` as a return type as the generic types are different
+ fn bar(value: i32) -> Foo<i32> {
+ Foo { value }
+ }
+ }
+}
+
+mod issue4140 {
+ pub struct Error<From, To> {
+ _from: From,
+ _too: To,
+ }
+
+ pub trait From<T> {
+ type From;
+ type To;
+
+ fn from(value: T) -> Self;
+ }
+
+ pub trait TryFrom<T>
+ where
+ Self: Sized,
+ {
+ type From;
+ type To;
+
+ fn try_from(value: T) -> Result<Self, Error<Self::From, Self::To>>;
+ }
+
+ // FIXME: Suggested fix results in infinite recursion.
+ // impl<F, T> TryFrom<F> for T
+ // where
+ // T: From<F>,
+ // {
+ // type From = Self::From;
+ // type To = Self::To;
+
+ // fn try_from(value: F) -> Result<Self, Error<Self::From, Self::To>> {
+ // Ok(From::from(value))
+ // }
+ // }
+
+ impl From<bool> for i64 {
+ type From = bool;
+ type To = Self;
+
+ fn from(value: bool) -> Self {
+ if value { 100 } else { 0 }
+ }
+ }
+}
+
+mod issue2843 {
+ trait Foo {
+ type Bar;
+ }
+
+ impl Foo for usize {
+ type Bar = u8;
+ }
+
+ impl<T: Foo> Foo for Option<T> {
+ type Bar = Option<T::Bar>;
+ }
+}
+
+mod issue3859 {
+ pub struct Foo;
+ pub struct Bar([usize; 3]);
+
+ impl Foo {
+ pub const BAR: usize = 3;
+
+ pub fn foo() {
+ const _X: usize = Foo::BAR;
+ // const _Y: usize = Self::BAR;
+ }
+ }
+}
+
+mod issue4305 {
+ trait Foo: 'static {}
+
+ struct Bar;
+
+ impl Foo for Bar {}
+
+ impl<T: Foo> From<T> for Box<dyn Foo> {
+ fn from(t: T) -> Self {
+ Box::new(t)
+ }
+ }
+}
+
+mod lint_at_item_level {
+ struct Foo;
+
+ #[allow(clippy::use_self)]
+ impl Foo {
+ fn new() -> Foo {
+ Foo {}
+ }
+ }
+
+ #[allow(clippy::use_self)]
+ impl Default for Foo {
+ fn default() -> Foo {
+ Foo::new()
+ }
+ }
+}
+
+mod lint_at_impl_item_level {
+ struct Foo;
+
+ impl Foo {
+ #[allow(clippy::use_self)]
+ fn new() -> Foo {
+ Foo {}
+ }
+ }
+
+ impl Default for Foo {
+ #[allow(clippy::use_self)]
+ fn default() -> Foo {
+ Foo::new()
+ }
+ }
+}
+
+mod issue4734 {
+ #[repr(C, packed)]
+ pub struct X {
+ pub x: u32,
+ }
+
+ impl From<X> for u32 {
+ fn from(c: X) -> Self {
+ unsafe { core::mem::transmute(c) }
+ }
+ }
+}
+
+mod nested_paths {
+ use std::convert::Into;
+ mod submod {
+ pub struct B;
+ pub struct C;
+
+ impl Into<C> for B {
+ fn into(self) -> C {
+ C {}
+ }
+ }
+ }
+
+ struct A<T> {
+ t: T,
+ }
+
+ impl<T> A<T> {
+ fn new<V: Into<T>>(v: V) -> Self {
+ Self { t: Into::into(v) }
+ }
+ }
+
+ impl A<submod::C> {
+ fn test() -> Self {
+ A::new::<submod::B>(submod::B {})
+ }
+ }
+}
+
+mod issue6818 {
+ #[derive(serde::Deserialize)]
+ struct A {
+ a: i32,
+ }
+}
+
+mod issue7206 {
+ struct MyStruct<const C: char>;
+ impl From<MyStruct<'a'>> for MyStruct<'b'> {
+ fn from(_s: MyStruct<'a'>) -> Self {
+ Self
+ }
+ }
+
+ // keep linting non-`Const` generic args
+ struct S<'a> {
+ inner: &'a str,
+ }
+
+ struct S2<T> {
+ inner: T,
+ }
+
+ impl<T> S2<T> {
+ fn new() -> Self {
+ unimplemented!();
+ }
+ }
+
+ impl<'a> S2<S<'a>> {
+ fn new_again() -> Self {
+ S2::new()
+ }
+ }
+}
+
+mod self_is_ty_param {
+ trait Trait {
+ type Type;
+ type Hi;
+
+ fn test();
+ }
+
+ impl<I> Trait for I
+ where
+ I: Iterator,
+ I::Item: Trait, // changing this to Self would require <Self as Iterator>
+ {
+ type Type = I;
+ type Hi = I::Item;
+
+ fn test() {
+ let _: I::Item;
+ let _: I; // this could lint, but is questionable
+ }
+ }
+}
+
+mod use_self_in_pat {
+ enum Foo {
+ Bar,
+ Baz,
+ }
+
+ impl Foo {
+ fn do_stuff(self) {
+ match self {
+ Foo::Bar => unimplemented!(),
+ Foo::Baz => unimplemented!(),
+ }
+ match Some(1) {
+ Some(_) => unimplemented!(),
+ None => unimplemented!(),
+ }
+ if let Foo::Bar = self {
+ unimplemented!()
+ }
+ }
+ }
+}
+
+mod issue8845 {
+ pub enum Something {
+ Num(u8),
+ TupleNums(u8, u8),
+ StructNums { one: u8, two: u8 },
+ }
+
+ struct Foo(u8);
+
+ struct Bar {
+ x: u8,
+ y: usize,
+ }
+
+ impl Something {
+ fn get_value(&self) -> u8 {
+ match self {
+ Something::Num(n) => *n,
+ Something::TupleNums(n, _m) => *n,
+ Something::StructNums { one, two: _ } => *one,
+ }
+ }
+
+ fn use_crate(&self) -> u8 {
+ match self {
+ crate::issue8845::Something::Num(n) => *n,
+ crate::issue8845::Something::TupleNums(n, _m) => *n,
+ crate::issue8845::Something::StructNums { one, two: _ } => *one,
+ }
+ }
+
+ fn imported_values(&self) -> u8 {
+ use Something::*;
+ match self {
+ Num(n) => *n,
+ TupleNums(n, _m) => *n,
+ StructNums { one, two: _ } => *one,
+ }
+ }
+ }
+
+ impl Foo {
+ fn get_value(&self) -> u8 {
+ let Foo(x) = self;
+ *x
+ }
+
+ fn use_crate(&self) -> u8 {
+ let crate::issue8845::Foo(x) = self;
+ *x
+ }
+ }
+
+ impl Bar {
+ fn get_value(&self) -> u8 {
+ let Bar { x, .. } = self;
+ *x
+ }
+
+ fn use_crate(&self) -> u8 {
+ let crate::issue8845::Bar { x, .. } = self;
+ *x
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/use_self.stderr b/src/tools/clippy/tests/ui/use_self.stderr
new file mode 100644
index 000000000..f06bb959b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/use_self.stderr
@@ -0,0 +1,250 @@
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:22:21
+ |
+LL | fn new() -> Foo {
+ | ^^^ help: use the applicable keyword: `Self`
+ |
+ = note: `-D clippy::use-self` implied by `-D warnings`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:23:13
+ |
+LL | Foo {}
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:25:22
+ |
+LL | fn test() -> Foo {
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:26:13
+ |
+LL | Foo::new()
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:31:25
+ |
+LL | fn default() -> Foo {
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:32:13
+ |
+LL | Foo::new()
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:97:24
+ |
+LL | fn bad(foos: &[Foo]) -> impl Iterator<Item = &Foo> {
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:97:55
+ |
+LL | fn bad(foos: &[Foo]) -> impl Iterator<Item = &Foo> {
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:112:13
+ |
+LL | TS(0)
+ | ^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:147:29
+ |
+LL | fn bar() -> Bar {
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:148:21
+ |
+LL | Bar { foo: Foo {} }
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:159:21
+ |
+LL | fn baz() -> Foo {
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:160:13
+ |
+LL | Foo {}
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:177:21
+ |
+LL | let _ = Enum::B(42);
+ | ^^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:178:21
+ |
+LL | let _ = Enum::C { field: true };
+ | ^^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:179:21
+ |
+LL | let _ = Enum::A;
+ | ^^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:221:13
+ |
+LL | nested::A::fun_1();
+ | ^^^^^^^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:222:13
+ |
+LL | nested::A::A;
+ | ^^^^^^^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:224:13
+ |
+LL | nested::A {};
+ | ^^^^^^^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:243:13
+ |
+LL | TestStruct::from_something()
+ | ^^^^^^^^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:257:25
+ |
+LL | async fn g() -> S {
+ | ^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:258:13
+ |
+LL | S {}
+ | ^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:262:16
+ |
+LL | &p[S::A..S::B]
+ | ^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:262:22
+ |
+LL | &p[S::A..S::B]
+ | ^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:285:29
+ |
+LL | fn foo(value: T) -> Foo<T> {
+ | ^^^^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:286:13
+ |
+LL | Foo::<T> { value }
+ | ^^^^^^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:458:13
+ |
+LL | A::new::<submod::B>(submod::B {})
+ | ^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:495:13
+ |
+LL | S2::new()
+ | ^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:532:17
+ |
+LL | Foo::Bar => unimplemented!(),
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:533:17
+ |
+LL | Foo::Baz => unimplemented!(),
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:539:20
+ |
+LL | if let Foo::Bar = self {
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:563:17
+ |
+LL | Something::Num(n) => *n,
+ | ^^^^^^^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:564:17
+ |
+LL | Something::TupleNums(n, _m) => *n,
+ | ^^^^^^^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:565:17
+ |
+LL | Something::StructNums { one, two: _ } => *one,
+ | ^^^^^^^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:571:17
+ |
+LL | crate::issue8845::Something::Num(n) => *n,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:572:17
+ |
+LL | crate::issue8845::Something::TupleNums(n, _m) => *n,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:573:17
+ |
+LL | crate::issue8845::Something::StructNums { one, two: _ } => *one,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:589:17
+ |
+LL | let Foo(x) = self;
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:594:17
+ |
+LL | let crate::issue8845::Foo(x) = self;
+ | ^^^^^^^^^^^^^^^^^^^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:601:17
+ |
+LL | let Bar { x, .. } = self;
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self.rs:606:17
+ |
+LL | let crate::issue8845::Bar { x, .. } = self;
+ | ^^^^^^^^^^^^^^^^^^^^^ help: use the applicable keyword: `Self`
+
+error: aborting due to 41 previous errors
+
diff --git a/src/tools/clippy/tests/ui/use_self_trait.fixed b/src/tools/clippy/tests/ui/use_self_trait.fixed
new file mode 100644
index 000000000..9bcd692fb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/use_self_trait.fixed
@@ -0,0 +1,115 @@
+// run-rustfix
+
+#![warn(clippy::use_self)]
+#![allow(dead_code)]
+#![allow(clippy::should_implement_trait, clippy::boxed_local)]
+
+use std::ops::Mul;
+
+trait SelfTrait {
+ fn refs(p1: &Self) -> &Self;
+ fn ref_refs<'a>(p1: &'a &'a Self) -> &'a &'a Self;
+ fn mut_refs(p1: &mut Self) -> &mut Self;
+ fn nested(p1: Box<Self>, p2: (&u8, &Self));
+ fn vals(r: Self) -> Self;
+}
+
+#[derive(Default)]
+struct Bad;
+
+impl SelfTrait for Bad {
+ fn refs(p1: &Self) -> &Self {
+ p1
+ }
+
+ fn ref_refs<'a>(p1: &'a &'a Self) -> &'a &'a Self {
+ p1
+ }
+
+ fn mut_refs(p1: &mut Self) -> &mut Self {
+ p1
+ }
+
+ fn nested(_p1: Box<Self>, _p2: (&u8, &Self)) {}
+
+ fn vals(_: Self) -> Self {
+ Self::default()
+ }
+}
+
+impl Mul for Bad {
+ type Output = Self;
+
+ fn mul(self, rhs: Self) -> Self {
+ rhs
+ }
+}
+
+impl Clone for Bad {
+ fn clone(&self) -> Self {
+ // FIXME: applicable here
+ Bad
+ }
+}
+
+#[derive(Default)]
+struct Good;
+
+impl SelfTrait for Good {
+ fn refs(p1: &Self) -> &Self {
+ p1
+ }
+
+ fn ref_refs<'a>(p1: &'a &'a Self) -> &'a &'a Self {
+ p1
+ }
+
+ fn mut_refs(p1: &mut Self) -> &mut Self {
+ p1
+ }
+
+ fn nested(_p1: Box<Self>, _p2: (&u8, &Self)) {}
+
+ fn vals(_: Self) -> Self {
+ Self::default()
+ }
+}
+
+impl Mul for Good {
+ type Output = Self;
+
+ fn mul(self, rhs: Self) -> Self {
+ rhs
+ }
+}
+
+trait NameTrait {
+ fn refs(p1: &u8) -> &u8;
+ fn ref_refs<'a>(p1: &'a &'a u8) -> &'a &'a u8;
+ fn mut_refs(p1: &mut u8) -> &mut u8;
+ fn nested(p1: Box<u8>, p2: (&u8, &u8));
+ fn vals(p1: u8) -> u8;
+}
+
+// Using `Self` instead of the type name is OK
+impl NameTrait for u8 {
+ fn refs(p1: &Self) -> &Self {
+ p1
+ }
+
+ fn ref_refs<'a>(p1: &'a &'a Self) -> &'a &'a Self {
+ p1
+ }
+
+ fn mut_refs(p1: &mut Self) -> &mut Self {
+ p1
+ }
+
+ fn nested(_p1: Box<Self>, _p2: (&Self, &Self)) {}
+
+ fn vals(_: Self) -> Self {
+ Self::default()
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/use_self_trait.rs b/src/tools/clippy/tests/ui/use_self_trait.rs
new file mode 100644
index 000000000..de305d40f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/use_self_trait.rs
@@ -0,0 +1,115 @@
+// run-rustfix
+
+#![warn(clippy::use_self)]
+#![allow(dead_code)]
+#![allow(clippy::should_implement_trait, clippy::boxed_local)]
+
+use std::ops::Mul;
+
+trait SelfTrait {
+ fn refs(p1: &Self) -> &Self;
+ fn ref_refs<'a>(p1: &'a &'a Self) -> &'a &'a Self;
+ fn mut_refs(p1: &mut Self) -> &mut Self;
+ fn nested(p1: Box<Self>, p2: (&u8, &Self));
+ fn vals(r: Self) -> Self;
+}
+
+#[derive(Default)]
+struct Bad;
+
+impl SelfTrait for Bad {
+ fn refs(p1: &Bad) -> &Bad {
+ p1
+ }
+
+ fn ref_refs<'a>(p1: &'a &'a Bad) -> &'a &'a Bad {
+ p1
+ }
+
+ fn mut_refs(p1: &mut Bad) -> &mut Bad {
+ p1
+ }
+
+ fn nested(_p1: Box<Bad>, _p2: (&u8, &Bad)) {}
+
+ fn vals(_: Bad) -> Bad {
+ Bad::default()
+ }
+}
+
+impl Mul for Bad {
+ type Output = Bad;
+
+ fn mul(self, rhs: Bad) -> Bad {
+ rhs
+ }
+}
+
+impl Clone for Bad {
+ fn clone(&self) -> Self {
+ // FIXME: applicable here
+ Bad
+ }
+}
+
+#[derive(Default)]
+struct Good;
+
+impl SelfTrait for Good {
+ fn refs(p1: &Self) -> &Self {
+ p1
+ }
+
+ fn ref_refs<'a>(p1: &'a &'a Self) -> &'a &'a Self {
+ p1
+ }
+
+ fn mut_refs(p1: &mut Self) -> &mut Self {
+ p1
+ }
+
+ fn nested(_p1: Box<Self>, _p2: (&u8, &Self)) {}
+
+ fn vals(_: Self) -> Self {
+ Self::default()
+ }
+}
+
+impl Mul for Good {
+ type Output = Self;
+
+ fn mul(self, rhs: Self) -> Self {
+ rhs
+ }
+}
+
+trait NameTrait {
+ fn refs(p1: &u8) -> &u8;
+ fn ref_refs<'a>(p1: &'a &'a u8) -> &'a &'a u8;
+ fn mut_refs(p1: &mut u8) -> &mut u8;
+ fn nested(p1: Box<u8>, p2: (&u8, &u8));
+ fn vals(p1: u8) -> u8;
+}
+
+// Using `Self` instead of the type name is OK
+impl NameTrait for u8 {
+ fn refs(p1: &Self) -> &Self {
+ p1
+ }
+
+ fn ref_refs<'a>(p1: &'a &'a Self) -> &'a &'a Self {
+ p1
+ }
+
+ fn mut_refs(p1: &mut Self) -> &mut Self {
+ p1
+ }
+
+ fn nested(_p1: Box<Self>, _p2: (&Self, &Self)) {}
+
+ fn vals(_: Self) -> Self {
+ Self::default()
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/use_self_trait.stderr b/src/tools/clippy/tests/ui/use_self_trait.stderr
new file mode 100644
index 000000000..55af3ff2a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/use_self_trait.stderr
@@ -0,0 +1,88 @@
+error: unnecessary structure name repetition
+ --> $DIR/use_self_trait.rs:21:18
+ |
+LL | fn refs(p1: &Bad) -> &Bad {
+ | ^^^ help: use the applicable keyword: `Self`
+ |
+ = note: `-D clippy::use-self` implied by `-D warnings`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self_trait.rs:21:27
+ |
+LL | fn refs(p1: &Bad) -> &Bad {
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self_trait.rs:25:33
+ |
+LL | fn ref_refs<'a>(p1: &'a &'a Bad) -> &'a &'a Bad {
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self_trait.rs:25:49
+ |
+LL | fn ref_refs<'a>(p1: &'a &'a Bad) -> &'a &'a Bad {
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self_trait.rs:29:26
+ |
+LL | fn mut_refs(p1: &mut Bad) -> &mut Bad {
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self_trait.rs:29:39
+ |
+LL | fn mut_refs(p1: &mut Bad) -> &mut Bad {
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self_trait.rs:33:24
+ |
+LL | fn nested(_p1: Box<Bad>, _p2: (&u8, &Bad)) {}
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self_trait.rs:33:42
+ |
+LL | fn nested(_p1: Box<Bad>, _p2: (&u8, &Bad)) {}
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self_trait.rs:35:16
+ |
+LL | fn vals(_: Bad) -> Bad {
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self_trait.rs:35:24
+ |
+LL | fn vals(_: Bad) -> Bad {
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self_trait.rs:36:9
+ |
+LL | Bad::default()
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self_trait.rs:41:19
+ |
+LL | type Output = Bad;
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self_trait.rs:43:23
+ |
+LL | fn mul(self, rhs: Bad) -> Bad {
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: unnecessary structure name repetition
+ --> $DIR/use_self_trait.rs:43:31
+ |
+LL | fn mul(self, rhs: Bad) -> Bad {
+ | ^^^ help: use the applicable keyword: `Self`
+
+error: aborting due to 14 previous errors
+
diff --git a/src/tools/clippy/tests/ui/used_underscore_binding.rs b/src/tools/clippy/tests/ui/used_underscore_binding.rs
new file mode 100644
index 000000000..d20977d55
--- /dev/null
+++ b/src/tools/clippy/tests/ui/used_underscore_binding.rs
@@ -0,0 +1,124 @@
+// aux-build:proc_macro_derive.rs
+
+#![feature(rustc_private)]
+#![warn(clippy::all)]
+#![allow(clippy::blacklisted_name, clippy::eq_op)]
+#![warn(clippy::used_underscore_binding)]
+
+#[macro_use]
+extern crate proc_macro_derive;
+
+// This should not trigger the lint. There's underscore binding inside the external derive that
+// would trigger the `used_underscore_binding` lint.
+#[derive(DeriveSomething)]
+struct Baz;
+
+macro_rules! test_macro {
+ () => {{
+ let _foo = 42;
+ _foo + 1
+ }};
+}
+
+/// Tests that we lint if we use a binding with a single leading underscore
+fn prefix_underscore(_foo: u32) -> u32 {
+ _foo + 1
+}
+
+/// Tests that we lint if we use a `_`-variable defined outside within a macro expansion
+fn in_macro_or_desugar(_foo: u32) {
+ println!("{}", _foo);
+ assert_eq!(_foo, _foo);
+
+ test_macro!() + 1;
+}
+
+// Struct for testing use of fields prefixed with an underscore
+struct StructFieldTest {
+ _underscore_field: u32,
+}
+
+/// Tests that we lint the use of a struct field which is prefixed with an underscore
+fn in_struct_field() {
+ let mut s = StructFieldTest { _underscore_field: 0 };
+ s._underscore_field += 1;
+}
+
+/// Tests that we do not lint if the struct field is used in code created with derive.
+#[derive(Clone, Debug)]
+pub struct UnderscoreInStruct {
+ _foo: u32,
+}
+
+/// Tests that we do not lint if the underscore is not a prefix
+fn non_prefix_underscore(some_foo: u32) -> u32 {
+ some_foo + 1
+}
+
+/// Tests that we do not lint if we do not use the binding (simple case)
+fn unused_underscore_simple(_foo: u32) -> u32 {
+ 1
+}
+
+/// Tests that we do not lint if we do not use the binding (complex case). This checks for
+/// compatibility with the built-in `unused_variables` lint.
+fn unused_underscore_complex(mut _foo: u32) -> u32 {
+ _foo += 1;
+ _foo = 2;
+ 1
+}
+
+/// Test that we do not lint for multiple underscores
+fn multiple_underscores(__foo: u32) -> u32 {
+ __foo + 1
+}
+
+// Non-variable bindings with preceding underscore
+fn _fn_test() {}
+struct _StructTest;
+enum _EnumTest {
+ _Empty,
+ _Value(_StructTest),
+}
+
+/// Tests that we do not lint for non-variable bindings
+fn non_variables() {
+ _fn_test();
+ let _s = _StructTest;
+ let _e = match _EnumTest::_Value(_StructTest) {
+ _EnumTest::_Empty => 0,
+ _EnumTest::_Value(_st) => 1,
+ };
+ let f = _fn_test;
+ f();
+}
+
+// Tests that we do not lint if the binding comes from await desugaring,
+// but we do lint the awaited expression. See issue 5360.
+async fn await_desugaring() {
+ async fn foo() {}
+ fn uses_i(_i: i32) {}
+
+ foo().await;
+ ({
+ let _i = 5;
+ uses_i(_i);
+ foo()
+ })
+ .await
+}
+
+fn main() {
+ let foo = 0u32;
+ // tests of unused_underscore lint
+ let _ = prefix_underscore(foo);
+ in_macro_or_desugar(foo);
+ in_struct_field();
+ // possible false positives
+ let _ = non_prefix_underscore(foo);
+ let _ = unused_underscore_simple(foo);
+ let _ = unused_underscore_complex(foo);
+ let _ = multiple_underscores(foo);
+ non_variables();
+ await_desugaring();
+}
diff --git a/src/tools/clippy/tests/ui/used_underscore_binding.stderr b/src/tools/clippy/tests/ui/used_underscore_binding.stderr
new file mode 100644
index 000000000..61a9161d2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/used_underscore_binding.stderr
@@ -0,0 +1,40 @@
+error: used binding `_foo` which is prefixed with an underscore. A leading underscore signals that a binding will not be used
+ --> $DIR/used_underscore_binding.rs:25:5
+ |
+LL | _foo + 1
+ | ^^^^
+ |
+ = note: `-D clippy::used-underscore-binding` implied by `-D warnings`
+
+error: used binding `_foo` which is prefixed with an underscore. A leading underscore signals that a binding will not be used
+ --> $DIR/used_underscore_binding.rs:30:20
+ |
+LL | println!("{}", _foo);
+ | ^^^^
+
+error: used binding `_foo` which is prefixed with an underscore. A leading underscore signals that a binding will not be used
+ --> $DIR/used_underscore_binding.rs:31:16
+ |
+LL | assert_eq!(_foo, _foo);
+ | ^^^^
+
+error: used binding `_foo` which is prefixed with an underscore. A leading underscore signals that a binding will not be used
+ --> $DIR/used_underscore_binding.rs:31:22
+ |
+LL | assert_eq!(_foo, _foo);
+ | ^^^^
+
+error: used binding `_underscore_field` which is prefixed with an underscore. A leading underscore signals that a binding will not be used
+ --> $DIR/used_underscore_binding.rs:44:5
+ |
+LL | s._underscore_field += 1;
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: used binding `_i` which is prefixed with an underscore. A leading underscore signals that a binding will not be used
+ --> $DIR/used_underscore_binding.rs:105:16
+ |
+LL | uses_i(_i);
+ | ^^
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/useful_asref.rs b/src/tools/clippy/tests/ui/useful_asref.rs
new file mode 100644
index 000000000..a9f0170a7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/useful_asref.rs
@@ -0,0 +1,13 @@
+#![deny(clippy::useless_asref)]
+
+trait Trait {
+ fn as_ptr(&self);
+}
+
+impl<'a> Trait for &'a [u8] {
+ fn as_ptr(&self) {
+ self.as_ref().as_ptr();
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/useless_asref.fixed b/src/tools/clippy/tests/ui/useless_asref.fixed
new file mode 100644
index 000000000..90cb8945e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/useless_asref.fixed
@@ -0,0 +1,136 @@
+// run-rustfix
+
+#![deny(clippy::useless_asref)]
+#![allow(clippy::explicit_auto_deref)]
+
+use std::fmt::Debug;
+
+struct FakeAsRef;
+
+#[allow(clippy::should_implement_trait)]
+impl FakeAsRef {
+ fn as_ref(&self) -> &Self {
+ self
+ }
+}
+
+struct MoreRef;
+
+impl<'a, 'b, 'c> AsRef<&'a &'b &'c MoreRef> for MoreRef {
+ fn as_ref(&self) -> &&'a &'b &'c MoreRef {
+ &&&&MoreRef
+ }
+}
+
+fn foo_rstr(x: &str) {
+ println!("{:?}", x);
+}
+fn foo_rslice(x: &[i32]) {
+ println!("{:?}", x);
+}
+fn foo_mrslice(x: &mut [i32]) {
+ println!("{:?}", x);
+}
+fn foo_rrrrmr(_: &&&&MoreRef) {
+ println!("so many refs");
+}
+
+fn not_ok() {
+ let rstr: &str = "hello";
+ let mut mrslice: &mut [i32] = &mut [1, 2, 3];
+
+ {
+ let rslice: &[i32] = &*mrslice;
+ foo_rstr(rstr);
+ foo_rstr(rstr);
+ foo_rslice(rslice);
+ foo_rslice(rslice);
+ }
+ {
+ foo_mrslice(mrslice);
+ foo_mrslice(mrslice);
+ foo_rslice(mrslice);
+ foo_rslice(mrslice);
+ }
+
+ {
+ let rrrrrstr = &&&&rstr;
+ let rrrrrslice = &&&&&*mrslice;
+ foo_rslice(rrrrrslice);
+ foo_rslice(rrrrrslice);
+ foo_rstr(rrrrrstr);
+ foo_rstr(rrrrrstr);
+ }
+ {
+ let mrrrrrslice = &mut &mut &mut &mut mrslice;
+ foo_mrslice(mrrrrrslice);
+ foo_mrslice(mrrrrrslice);
+ foo_rslice(mrrrrrslice);
+ foo_rslice(mrrrrrslice);
+ }
+ #[allow(unused_parens, clippy::double_parens, clippy::needless_borrow)]
+ foo_rrrrmr((&&&&MoreRef));
+
+ generic_not_ok(mrslice);
+ generic_ok(mrslice);
+}
+
+fn ok() {
+ let string = "hello".to_owned();
+ let mut arr = [1, 2, 3];
+ let mut vec = vec![1, 2, 3];
+
+ {
+ foo_rstr(string.as_ref());
+ foo_rslice(arr.as_ref());
+ foo_rslice(vec.as_ref());
+ }
+ {
+ foo_mrslice(arr.as_mut());
+ foo_mrslice(vec.as_mut());
+ }
+
+ {
+ let rrrrstring = &&&&string;
+ let rrrrarr = &&&&arr;
+ let rrrrvec = &&&&vec;
+ foo_rstr(rrrrstring.as_ref());
+ foo_rslice(rrrrarr.as_ref());
+ foo_rslice(rrrrvec.as_ref());
+ }
+ {
+ let mrrrrarr = &mut &mut &mut &mut arr;
+ let mrrrrvec = &mut &mut &mut &mut vec;
+ foo_mrslice(mrrrrarr.as_mut());
+ foo_mrslice(mrrrrvec.as_mut());
+ }
+ FakeAsRef.as_ref();
+ foo_rrrrmr(MoreRef.as_ref());
+
+ generic_not_ok(arr.as_mut());
+ generic_ok(&mut arr);
+}
+
+fn foo_mrt<T: Debug + ?Sized>(t: &mut T) {
+ println!("{:?}", t);
+}
+fn foo_rt<T: Debug + ?Sized>(t: &T) {
+ println!("{:?}", t);
+}
+
+fn generic_not_ok<T: AsMut<T> + AsRef<T> + Debug + ?Sized>(mrt: &mut T) {
+ foo_mrt(mrt);
+ foo_mrt(mrt);
+ foo_rt(mrt);
+ foo_rt(mrt);
+}
+
+fn generic_ok<U: AsMut<T> + AsRef<T> + ?Sized, T: Debug + ?Sized>(mru: &mut U) {
+ foo_mrt(mru.as_mut());
+ foo_rt(mru.as_ref());
+}
+
+fn main() {
+ not_ok();
+ ok();
+}
diff --git a/src/tools/clippy/tests/ui/useless_asref.rs b/src/tools/clippy/tests/ui/useless_asref.rs
new file mode 100644
index 000000000..cb9f8ae59
--- /dev/null
+++ b/src/tools/clippy/tests/ui/useless_asref.rs
@@ -0,0 +1,136 @@
+// run-rustfix
+
+#![deny(clippy::useless_asref)]
+#![allow(clippy::explicit_auto_deref)]
+
+use std::fmt::Debug;
+
+struct FakeAsRef;
+
+#[allow(clippy::should_implement_trait)]
+impl FakeAsRef {
+ fn as_ref(&self) -> &Self {
+ self
+ }
+}
+
+struct MoreRef;
+
+impl<'a, 'b, 'c> AsRef<&'a &'b &'c MoreRef> for MoreRef {
+ fn as_ref(&self) -> &&'a &'b &'c MoreRef {
+ &&&&MoreRef
+ }
+}
+
+fn foo_rstr(x: &str) {
+ println!("{:?}", x);
+}
+fn foo_rslice(x: &[i32]) {
+ println!("{:?}", x);
+}
+fn foo_mrslice(x: &mut [i32]) {
+ println!("{:?}", x);
+}
+fn foo_rrrrmr(_: &&&&MoreRef) {
+ println!("so many refs");
+}
+
+fn not_ok() {
+ let rstr: &str = "hello";
+ let mut mrslice: &mut [i32] = &mut [1, 2, 3];
+
+ {
+ let rslice: &[i32] = &*mrslice;
+ foo_rstr(rstr.as_ref());
+ foo_rstr(rstr);
+ foo_rslice(rslice.as_ref());
+ foo_rslice(rslice);
+ }
+ {
+ foo_mrslice(mrslice.as_mut());
+ foo_mrslice(mrslice);
+ foo_rslice(mrslice.as_ref());
+ foo_rslice(mrslice);
+ }
+
+ {
+ let rrrrrstr = &&&&rstr;
+ let rrrrrslice = &&&&&*mrslice;
+ foo_rslice(rrrrrslice.as_ref());
+ foo_rslice(rrrrrslice);
+ foo_rstr(rrrrrstr.as_ref());
+ foo_rstr(rrrrrstr);
+ }
+ {
+ let mrrrrrslice = &mut &mut &mut &mut mrslice;
+ foo_mrslice(mrrrrrslice.as_mut());
+ foo_mrslice(mrrrrrslice);
+ foo_rslice(mrrrrrslice.as_ref());
+ foo_rslice(mrrrrrslice);
+ }
+ #[allow(unused_parens, clippy::double_parens, clippy::needless_borrow)]
+ foo_rrrrmr((&&&&MoreRef).as_ref());
+
+ generic_not_ok(mrslice);
+ generic_ok(mrslice);
+}
+
+fn ok() {
+ let string = "hello".to_owned();
+ let mut arr = [1, 2, 3];
+ let mut vec = vec![1, 2, 3];
+
+ {
+ foo_rstr(string.as_ref());
+ foo_rslice(arr.as_ref());
+ foo_rslice(vec.as_ref());
+ }
+ {
+ foo_mrslice(arr.as_mut());
+ foo_mrslice(vec.as_mut());
+ }
+
+ {
+ let rrrrstring = &&&&string;
+ let rrrrarr = &&&&arr;
+ let rrrrvec = &&&&vec;
+ foo_rstr(rrrrstring.as_ref());
+ foo_rslice(rrrrarr.as_ref());
+ foo_rslice(rrrrvec.as_ref());
+ }
+ {
+ let mrrrrarr = &mut &mut &mut &mut arr;
+ let mrrrrvec = &mut &mut &mut &mut vec;
+ foo_mrslice(mrrrrarr.as_mut());
+ foo_mrslice(mrrrrvec.as_mut());
+ }
+ FakeAsRef.as_ref();
+ foo_rrrrmr(MoreRef.as_ref());
+
+ generic_not_ok(arr.as_mut());
+ generic_ok(&mut arr);
+}
+
+fn foo_mrt<T: Debug + ?Sized>(t: &mut T) {
+ println!("{:?}", t);
+}
+fn foo_rt<T: Debug + ?Sized>(t: &T) {
+ println!("{:?}", t);
+}
+
+fn generic_not_ok<T: AsMut<T> + AsRef<T> + Debug + ?Sized>(mrt: &mut T) {
+ foo_mrt(mrt.as_mut());
+ foo_mrt(mrt);
+ foo_rt(mrt.as_ref());
+ foo_rt(mrt);
+}
+
+fn generic_ok<U: AsMut<T> + AsRef<T> + ?Sized, T: Debug + ?Sized>(mru: &mut U) {
+ foo_mrt(mru.as_mut());
+ foo_rt(mru.as_ref());
+}
+
+fn main() {
+ not_ok();
+ ok();
+}
diff --git a/src/tools/clippy/tests/ui/useless_asref.stderr b/src/tools/clippy/tests/ui/useless_asref.stderr
new file mode 100644
index 000000000..b21c67bb3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/useless_asref.stderr
@@ -0,0 +1,74 @@
+error: this call to `as_ref` does nothing
+ --> $DIR/useless_asref.rs:44:18
+ |
+LL | foo_rstr(rstr.as_ref());
+ | ^^^^^^^^^^^^^ help: try this: `rstr`
+ |
+note: the lint level is defined here
+ --> $DIR/useless_asref.rs:3:9
+ |
+LL | #![deny(clippy::useless_asref)]
+ | ^^^^^^^^^^^^^^^^^^^^^
+
+error: this call to `as_ref` does nothing
+ --> $DIR/useless_asref.rs:46:20
+ |
+LL | foo_rslice(rslice.as_ref());
+ | ^^^^^^^^^^^^^^^ help: try this: `rslice`
+
+error: this call to `as_mut` does nothing
+ --> $DIR/useless_asref.rs:50:21
+ |
+LL | foo_mrslice(mrslice.as_mut());
+ | ^^^^^^^^^^^^^^^^ help: try this: `mrslice`
+
+error: this call to `as_ref` does nothing
+ --> $DIR/useless_asref.rs:52:20
+ |
+LL | foo_rslice(mrslice.as_ref());
+ | ^^^^^^^^^^^^^^^^ help: try this: `mrslice`
+
+error: this call to `as_ref` does nothing
+ --> $DIR/useless_asref.rs:59:20
+ |
+LL | foo_rslice(rrrrrslice.as_ref());
+ | ^^^^^^^^^^^^^^^^^^^ help: try this: `rrrrrslice`
+
+error: this call to `as_ref` does nothing
+ --> $DIR/useless_asref.rs:61:18
+ |
+LL | foo_rstr(rrrrrstr.as_ref());
+ | ^^^^^^^^^^^^^^^^^ help: try this: `rrrrrstr`
+
+error: this call to `as_mut` does nothing
+ --> $DIR/useless_asref.rs:66:21
+ |
+LL | foo_mrslice(mrrrrrslice.as_mut());
+ | ^^^^^^^^^^^^^^^^^^^^ help: try this: `mrrrrrslice`
+
+error: this call to `as_ref` does nothing
+ --> $DIR/useless_asref.rs:68:20
+ |
+LL | foo_rslice(mrrrrrslice.as_ref());
+ | ^^^^^^^^^^^^^^^^^^^^ help: try this: `mrrrrrslice`
+
+error: this call to `as_ref` does nothing
+ --> $DIR/useless_asref.rs:72:16
+ |
+LL | foo_rrrrmr((&&&&MoreRef).as_ref());
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try this: `(&&&&MoreRef)`
+
+error: this call to `as_mut` does nothing
+ --> $DIR/useless_asref.rs:122:13
+ |
+LL | foo_mrt(mrt.as_mut());
+ | ^^^^^^^^^^^^ help: try this: `mrt`
+
+error: this call to `as_ref` does nothing
+ --> $DIR/useless_asref.rs:124:12
+ |
+LL | foo_rt(mrt.as_ref());
+ | ^^^^^^^^^^^^ help: try this: `mrt`
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/useless_attribute.fixed b/src/tools/clippy/tests/ui/useless_attribute.fixed
new file mode 100644
index 000000000..c23231a99
--- /dev/null
+++ b/src/tools/clippy/tests/ui/useless_attribute.fixed
@@ -0,0 +1,75 @@
+// run-rustfix
+// aux-build:proc_macro_derive.rs
+
+#![warn(clippy::useless_attribute)]
+#![warn(unreachable_pub)]
+#![feature(rustc_private)]
+
+#![allow(dead_code)]
+#![cfg_attr(feature = "cargo-clippy", allow(dead_code))]
+#[rustfmt::skip]
+#[allow(unused_imports)]
+#[allow(unused_extern_crates)]
+#[macro_use]
+extern crate rustc_middle;
+
+#[macro_use]
+extern crate proc_macro_derive;
+
+// don't lint on unused_import for `use` items
+#[allow(unused_imports)]
+use std::collections;
+
+// don't lint on unused for `use` items
+#[allow(unused)]
+use std::option;
+
+// don't lint on deprecated for `use` items
+mod foo {
+ #[deprecated]
+ pub struct Bar;
+}
+#[allow(deprecated)]
+pub use foo::Bar;
+
+// This should not trigger the lint. There's lint level definitions inside the external derive
+// that would trigger the useless_attribute lint.
+#[derive(DeriveSomething)]
+struct Baz;
+
+// don't lint on unreachable_pub for `use` items
+mod a {
+ mod b {
+ #[allow(dead_code)]
+ #[allow(unreachable_pub)]
+ pub struct C;
+ }
+
+ #[allow(unreachable_pub)]
+ pub use self::b::C;
+}
+
+// don't lint on clippy::wildcard_imports for `use` items
+#[allow(clippy::wildcard_imports)]
+pub use std::io::prelude::*;
+
+// don't lint on clippy::enum_glob_use for `use` items
+#[allow(clippy::enum_glob_use)]
+pub use std::cmp::Ordering::*;
+
+// don't lint on clippy::redundant_pub_crate
+mod c {
+ #[allow(clippy::redundant_pub_crate)]
+ pub(crate) struct S;
+}
+
+fn test_indented_attr() {
+ #![allow(clippy::almost_swapped)]
+ use std::collections::HashSet;
+
+ let _ = HashSet::<u32>::default();
+}
+
+fn main() {
+ test_indented_attr();
+}
diff --git a/src/tools/clippy/tests/ui/useless_attribute.rs b/src/tools/clippy/tests/ui/useless_attribute.rs
new file mode 100644
index 000000000..7a7b198ea
--- /dev/null
+++ b/src/tools/clippy/tests/ui/useless_attribute.rs
@@ -0,0 +1,75 @@
+// run-rustfix
+// aux-build:proc_macro_derive.rs
+
+#![warn(clippy::useless_attribute)]
+#![warn(unreachable_pub)]
+#![feature(rustc_private)]
+
+#[allow(dead_code)]
+#[cfg_attr(feature = "cargo-clippy", allow(dead_code))]
+#[rustfmt::skip]
+#[allow(unused_imports)]
+#[allow(unused_extern_crates)]
+#[macro_use]
+extern crate rustc_middle;
+
+#[macro_use]
+extern crate proc_macro_derive;
+
+// don't lint on unused_import for `use` items
+#[allow(unused_imports)]
+use std::collections;
+
+// don't lint on unused for `use` items
+#[allow(unused)]
+use std::option;
+
+// don't lint on deprecated for `use` items
+mod foo {
+ #[deprecated]
+ pub struct Bar;
+}
+#[allow(deprecated)]
+pub use foo::Bar;
+
+// This should not trigger the lint. There's lint level definitions inside the external derive
+// that would trigger the useless_attribute lint.
+#[derive(DeriveSomething)]
+struct Baz;
+
+// don't lint on unreachable_pub for `use` items
+mod a {
+ mod b {
+ #[allow(dead_code)]
+ #[allow(unreachable_pub)]
+ pub struct C;
+ }
+
+ #[allow(unreachable_pub)]
+ pub use self::b::C;
+}
+
+// don't lint on clippy::wildcard_imports for `use` items
+#[allow(clippy::wildcard_imports)]
+pub use std::io::prelude::*;
+
+// don't lint on clippy::enum_glob_use for `use` items
+#[allow(clippy::enum_glob_use)]
+pub use std::cmp::Ordering::*;
+
+// don't lint on clippy::redundant_pub_crate
+mod c {
+ #[allow(clippy::redundant_pub_crate)]
+ pub(crate) struct S;
+}
+
+fn test_indented_attr() {
+ #[allow(clippy::almost_swapped)]
+ use std::collections::HashSet;
+
+ let _ = HashSet::<u32>::default();
+}
+
+fn main() {
+ test_indented_attr();
+}
diff --git a/src/tools/clippy/tests/ui/useless_attribute.stderr b/src/tools/clippy/tests/ui/useless_attribute.stderr
new file mode 100644
index 000000000..255d28763
--- /dev/null
+++ b/src/tools/clippy/tests/ui/useless_attribute.stderr
@@ -0,0 +1,22 @@
+error: useless lint attribute
+ --> $DIR/useless_attribute.rs:8:1
+ |
+LL | #[allow(dead_code)]
+ | ^^^^^^^^^^^^^^^^^^^ help: if you just forgot a `!`, use: `#![allow(dead_code)]`
+ |
+ = note: `-D clippy::useless-attribute` implied by `-D warnings`
+
+error: useless lint attribute
+ --> $DIR/useless_attribute.rs:9:1
+ |
+LL | #[cfg_attr(feature = "cargo-clippy", allow(dead_code))]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: if you just forgot a `!`, use: `#![cfg_attr(feature = "cargo-clippy", allow(dead_code)`
+
+error: useless lint attribute
+ --> $DIR/useless_attribute.rs:67:5
+ |
+LL | #[allow(clippy::almost_swapped)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: if you just forgot a `!`, use: `#![allow(clippy::almost_swapped)]`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/useless_conversion.fixed b/src/tools/clippy/tests/ui/useless_conversion.fixed
new file mode 100644
index 000000000..70ff08f36
--- /dev/null
+++ b/src/tools/clippy/tests/ui/useless_conversion.fixed
@@ -0,0 +1,92 @@
+// run-rustfix
+
+#![deny(clippy::useless_conversion)]
+#![allow(clippy::unnecessary_wraps)]
+
+fn test_generic<T: Copy>(val: T) -> T {
+ let _ = val;
+ val
+}
+
+fn test_generic2<T: Copy + Into<i32> + Into<U>, U: From<T>>(val: T) {
+ // ok
+ let _: i32 = val.into();
+ let _: U = val.into();
+ let _ = U::from(val);
+}
+
+fn test_questionmark() -> Result<(), ()> {
+ {
+ let _: i32 = 0i32;
+ Ok(Ok(()))
+ }??;
+ Ok(())
+}
+
+fn test_issue_3913() -> Result<(), std::io::Error> {
+ use std::fs;
+ use std::path::Path;
+
+ let path = Path::new(".");
+ for _ in fs::read_dir(path)? {}
+
+ Ok(())
+}
+
+fn test_issue_5833() -> Result<(), ()> {
+ let text = "foo\r\nbar\n\nbaz\n";
+ let lines = text.lines();
+ if Some("ok") == lines.into_iter().next() {}
+
+ Ok(())
+}
+
+fn main() {
+ test_generic(10i32);
+ test_generic2::<i32, i32>(10i32);
+ test_questionmark().unwrap();
+ test_issue_3913().unwrap();
+ test_issue_5833().unwrap();
+
+ let _: String = "foo".into();
+ let _: String = From::from("foo");
+ let _ = String::from("foo");
+ #[allow(clippy::useless_conversion)]
+ {
+ let _: String = "foo".into();
+ let _ = String::from("foo");
+ let _ = "".lines().into_iter();
+ }
+
+ let _: String = "foo".to_string();
+ let _: String = "foo".to_string();
+ let _ = "foo".to_string();
+ let _ = format!("A: {:04}", 123);
+ let _ = "".lines();
+ let _ = vec![1, 2, 3].into_iter();
+ let _: String = format!("Hello {}", "world");
+
+ // keep parentheses around `a + b` for suggestion (see #4750)
+ let a: i32 = 1;
+ let b: i32 = 1;
+ let _ = (a + b) * 3;
+
+ // see #7205
+ let s: Foo<'a'> = Foo;
+ let _: Foo<'b'> = s.into();
+ let s2: Foo<'a'> = Foo;
+ let _: Foo<'a'> = s2;
+ let s3: Foo<'a'> = Foo;
+ let _ = s3;
+ let s4: Foo<'a'> = Foo;
+ let _ = vec![s4, s4, s4].into_iter();
+}
+
+#[derive(Copy, Clone)]
+struct Foo<const C: char>;
+
+impl From<Foo<'a'>> for Foo<'b'> {
+ fn from(_s: Foo<'a'>) -> Self {
+ Foo
+ }
+}
diff --git a/src/tools/clippy/tests/ui/useless_conversion.rs b/src/tools/clippy/tests/ui/useless_conversion.rs
new file mode 100644
index 000000000..f2444a8f4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/useless_conversion.rs
@@ -0,0 +1,92 @@
+// run-rustfix
+
+#![deny(clippy::useless_conversion)]
+#![allow(clippy::unnecessary_wraps)]
+
+fn test_generic<T: Copy>(val: T) -> T {
+ let _ = T::from(val);
+ val.into()
+}
+
+fn test_generic2<T: Copy + Into<i32> + Into<U>, U: From<T>>(val: T) {
+ // ok
+ let _: i32 = val.into();
+ let _: U = val.into();
+ let _ = U::from(val);
+}
+
+fn test_questionmark() -> Result<(), ()> {
+ {
+ let _: i32 = 0i32.into();
+ Ok(Ok(()))
+ }??;
+ Ok(())
+}
+
+fn test_issue_3913() -> Result<(), std::io::Error> {
+ use std::fs;
+ use std::path::Path;
+
+ let path = Path::new(".");
+ for _ in fs::read_dir(path)? {}
+
+ Ok(())
+}
+
+fn test_issue_5833() -> Result<(), ()> {
+ let text = "foo\r\nbar\n\nbaz\n";
+ let lines = text.lines();
+ if Some("ok") == lines.into_iter().next() {}
+
+ Ok(())
+}
+
+fn main() {
+ test_generic(10i32);
+ test_generic2::<i32, i32>(10i32);
+ test_questionmark().unwrap();
+ test_issue_3913().unwrap();
+ test_issue_5833().unwrap();
+
+ let _: String = "foo".into();
+ let _: String = From::from("foo");
+ let _ = String::from("foo");
+ #[allow(clippy::useless_conversion)]
+ {
+ let _: String = "foo".into();
+ let _ = String::from("foo");
+ let _ = "".lines().into_iter();
+ }
+
+ let _: String = "foo".to_string().into();
+ let _: String = From::from("foo".to_string());
+ let _ = String::from("foo".to_string());
+ let _ = String::from(format!("A: {:04}", 123));
+ let _ = "".lines().into_iter();
+ let _ = vec![1, 2, 3].into_iter().into_iter();
+ let _: String = format!("Hello {}", "world").into();
+
+ // keep parentheses around `a + b` for suggestion (see #4750)
+ let a: i32 = 1;
+ let b: i32 = 1;
+ let _ = i32::from(a + b) * 3;
+
+ // see #7205
+ let s: Foo<'a'> = Foo;
+ let _: Foo<'b'> = s.into();
+ let s2: Foo<'a'> = Foo;
+ let _: Foo<'a'> = s2.into();
+ let s3: Foo<'a'> = Foo;
+ let _ = Foo::<'a'>::from(s3);
+ let s4: Foo<'a'> = Foo;
+ let _ = vec![s4, s4, s4].into_iter().into_iter();
+}
+
+#[derive(Copy, Clone)]
+struct Foo<const C: char>;
+
+impl From<Foo<'a'>> for Foo<'b'> {
+ fn from(_s: Foo<'a'>) -> Self {
+ Foo
+ }
+}
diff --git a/src/tools/clippy/tests/ui/useless_conversion.stderr b/src/tools/clippy/tests/ui/useless_conversion.stderr
new file mode 100644
index 000000000..e6760f700
--- /dev/null
+++ b/src/tools/clippy/tests/ui/useless_conversion.stderr
@@ -0,0 +1,92 @@
+error: useless conversion to the same type: `T`
+ --> $DIR/useless_conversion.rs:7:13
+ |
+LL | let _ = T::from(val);
+ | ^^^^^^^^^^^^ help: consider removing `T::from()`: `val`
+ |
+note: the lint level is defined here
+ --> $DIR/useless_conversion.rs:3:9
+ |
+LL | #![deny(clippy::useless_conversion)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: useless conversion to the same type: `T`
+ --> $DIR/useless_conversion.rs:8:5
+ |
+LL | val.into()
+ | ^^^^^^^^^^ help: consider removing `.into()`: `val`
+
+error: useless conversion to the same type: `i32`
+ --> $DIR/useless_conversion.rs:20:22
+ |
+LL | let _: i32 = 0i32.into();
+ | ^^^^^^^^^^^ help: consider removing `.into()`: `0i32`
+
+error: useless conversion to the same type: `std::string::String`
+ --> $DIR/useless_conversion.rs:61:21
+ |
+LL | let _: String = "foo".to_string().into();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `.into()`: `"foo".to_string()`
+
+error: useless conversion to the same type: `std::string::String`
+ --> $DIR/useless_conversion.rs:62:21
+ |
+LL | let _: String = From::from("foo".to_string());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `From::from()`: `"foo".to_string()`
+
+error: useless conversion to the same type: `std::string::String`
+ --> $DIR/useless_conversion.rs:63:13
+ |
+LL | let _ = String::from("foo".to_string());
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `String::from()`: `"foo".to_string()`
+
+error: useless conversion to the same type: `std::string::String`
+ --> $DIR/useless_conversion.rs:64:13
+ |
+LL | let _ = String::from(format!("A: {:04}", 123));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `String::from()`: `format!("A: {:04}", 123)`
+
+error: useless conversion to the same type: `std::str::Lines`
+ --> $DIR/useless_conversion.rs:65:13
+ |
+LL | let _ = "".lines().into_iter();
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `.into_iter()`: `"".lines()`
+
+error: useless conversion to the same type: `std::vec::IntoIter<i32>`
+ --> $DIR/useless_conversion.rs:66:13
+ |
+LL | let _ = vec![1, 2, 3].into_iter().into_iter();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `.into_iter()`: `vec![1, 2, 3].into_iter()`
+
+error: useless conversion to the same type: `std::string::String`
+ --> $DIR/useless_conversion.rs:67:21
+ |
+LL | let _: String = format!("Hello {}", "world").into();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `.into()`: `format!("Hello {}", "world")`
+
+error: useless conversion to the same type: `i32`
+ --> $DIR/useless_conversion.rs:72:13
+ |
+LL | let _ = i32::from(a + b) * 3;
+ | ^^^^^^^^^^^^^^^^ help: consider removing `i32::from()`: `(a + b)`
+
+error: useless conversion to the same type: `Foo<'a'>`
+ --> $DIR/useless_conversion.rs:78:23
+ |
+LL | let _: Foo<'a'> = s2.into();
+ | ^^^^^^^^^ help: consider removing `.into()`: `s2`
+
+error: useless conversion to the same type: `Foo<'a'>`
+ --> $DIR/useless_conversion.rs:80:13
+ |
+LL | let _ = Foo::<'a'>::from(s3);
+ | ^^^^^^^^^^^^^^^^^^^^ help: consider removing `Foo::<'a'>::from()`: `s3`
+
+error: useless conversion to the same type: `std::vec::IntoIter<Foo<'a'>>`
+ --> $DIR/useless_conversion.rs:82:13
+ |
+LL | let _ = vec![s4, s4, s4].into_iter().into_iter();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `.into_iter()`: `vec![s4, s4, s4].into_iter()`
+
+error: aborting due to 14 previous errors
+
diff --git a/src/tools/clippy/tests/ui/useless_conversion_try.rs b/src/tools/clippy/tests/ui/useless_conversion_try.rs
new file mode 100644
index 000000000..39f54c27b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/useless_conversion_try.rs
@@ -0,0 +1,40 @@
+#![deny(clippy::useless_conversion)]
+
+fn test_generic<T: Copy>(val: T) -> T {
+ let _ = T::try_from(val).unwrap();
+ val.try_into().unwrap()
+}
+
+fn test_generic2<T: Copy + Into<i32> + Into<U>, U: From<T>>(val: T) {
+ // ok
+ let _: i32 = val.try_into().unwrap();
+ let _: U = val.try_into().unwrap();
+ let _ = U::try_from(val).unwrap();
+}
+
+fn main() {
+ test_generic(10i32);
+ test_generic2::<i32, i32>(10i32);
+
+ let _: String = "foo".try_into().unwrap();
+ let _: String = TryFrom::try_from("foo").unwrap();
+ let _ = String::try_from("foo").unwrap();
+ #[allow(clippy::useless_conversion)]
+ {
+ let _ = String::try_from("foo").unwrap();
+ let _: String = "foo".try_into().unwrap();
+ }
+ let _: String = "foo".to_string().try_into().unwrap();
+ let _: String = TryFrom::try_from("foo".to_string()).unwrap();
+ let _ = String::try_from("foo".to_string()).unwrap();
+ let _ = String::try_from(format!("A: {:04}", 123)).unwrap();
+ let _: String = format!("Hello {}", "world").try_into().unwrap();
+ let _: String = "".to_owned().try_into().unwrap();
+ let _: String = match String::from("_").try_into() {
+ Ok(a) => a,
+ Err(_) => "".into(),
+ };
+ // FIXME this is a false negative
+ #[allow(clippy::cmp_owned)]
+ if String::from("a") == TryInto::<String>::try_into(String::from("a")).unwrap() {}
+}
diff --git a/src/tools/clippy/tests/ui/useless_conversion_try.stderr b/src/tools/clippy/tests/ui/useless_conversion_try.stderr
new file mode 100644
index 000000000..b691c13f7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/useless_conversion_try.stderr
@@ -0,0 +1,79 @@
+error: useless conversion to the same type: `T`
+ --> $DIR/useless_conversion_try.rs:4:13
+ |
+LL | let _ = T::try_from(val).unwrap();
+ | ^^^^^^^^^^^^^^^^
+ |
+note: the lint level is defined here
+ --> $DIR/useless_conversion_try.rs:1:9
+ |
+LL | #![deny(clippy::useless_conversion)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = help: consider removing `T::try_from()`
+
+error: useless conversion to the same type: `T`
+ --> $DIR/useless_conversion_try.rs:5:5
+ |
+LL | val.try_into().unwrap()
+ | ^^^^^^^^^^^^^^
+ |
+ = help: consider removing `.try_into()`
+
+error: useless conversion to the same type: `std::string::String`
+ --> $DIR/useless_conversion_try.rs:27:21
+ |
+LL | let _: String = "foo".to_string().try_into().unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider removing `.try_into()`
+
+error: useless conversion to the same type: `std::string::String`
+ --> $DIR/useless_conversion_try.rs:28:21
+ |
+LL | let _: String = TryFrom::try_from("foo".to_string()).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider removing `TryFrom::try_from()`
+
+error: useless conversion to the same type: `std::string::String`
+ --> $DIR/useless_conversion_try.rs:29:13
+ |
+LL | let _ = String::try_from("foo".to_string()).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider removing `String::try_from()`
+
+error: useless conversion to the same type: `std::string::String`
+ --> $DIR/useless_conversion_try.rs:30:13
+ |
+LL | let _ = String::try_from(format!("A: {:04}", 123)).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider removing `String::try_from()`
+
+error: useless conversion to the same type: `std::string::String`
+ --> $DIR/useless_conversion_try.rs:31:21
+ |
+LL | let _: String = format!("Hello {}", "world").try_into().unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider removing `.try_into()`
+
+error: useless conversion to the same type: `std::string::String`
+ --> $DIR/useless_conversion_try.rs:32:21
+ |
+LL | let _: String = "".to_owned().try_into().unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider removing `.try_into()`
+
+error: useless conversion to the same type: `std::string::String`
+ --> $DIR/useless_conversion_try.rs:33:27
+ |
+LL | let _: String = match String::from("_").try_into() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider removing `.try_into()`
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/vec.fixed b/src/tools/clippy/tests/ui/vec.fixed
new file mode 100644
index 000000000..318f9c2dc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/vec.fixed
@@ -0,0 +1,78 @@
+// run-rustfix
+#![allow(clippy::nonstandard_macro_braces)]
+#![warn(clippy::useless_vec)]
+
+#[derive(Debug)]
+struct NonCopy;
+
+fn on_slice(_: &[u8]) {}
+
+fn on_mut_slice(_: &mut [u8]) {}
+
+#[allow(clippy::ptr_arg)]
+fn on_vec(_: &Vec<u8>) {}
+
+fn on_mut_vec(_: &mut Vec<u8>) {}
+
+struct Line {
+ length: usize,
+}
+
+impl Line {
+ fn length(&self) -> usize {
+ self.length
+ }
+}
+
+fn main() {
+ on_slice(&[]);
+ on_slice(&[]);
+ on_mut_slice(&mut []);
+
+ on_slice(&[1, 2]);
+ on_slice(&[1, 2]);
+ on_mut_slice(&mut [1, 2]);
+
+ on_slice(&[1, 2]);
+ on_slice(&[1, 2]);
+ on_mut_slice(&mut [1, 2]);
+ #[rustfmt::skip]
+ on_slice(&[1, 2]);
+ on_slice(&[1, 2]);
+ on_mut_slice(&mut [1, 2]);
+
+ on_slice(&[1; 2]);
+ on_slice(&[1; 2]);
+ on_mut_slice(&mut [1; 2]);
+
+ on_vec(&vec![]);
+ on_vec(&vec![1, 2]);
+ on_vec(&vec![1; 2]);
+ on_mut_vec(&mut vec![]);
+ on_mut_vec(&mut vec![1, 2]);
+ on_mut_vec(&mut vec![1; 2]);
+
+ // Now with non-constant expressions
+ let line = Line { length: 2 };
+
+ on_slice(&vec![2; line.length]);
+ on_slice(&vec![2; line.length()]);
+ on_mut_slice(&mut vec![2; line.length]);
+ on_mut_slice(&mut vec![2; line.length()]);
+
+ for a in &[1, 2, 3] {
+ println!("{:?}", a);
+ }
+
+ for a in vec![NonCopy, NonCopy] {
+ println!("{:?}", a);
+ }
+
+ on_vec(&vec![1; 201]); // Ok, size of `vec` higher than `too_large_for_stack`
+ on_mut_vec(&mut vec![1; 201]); // Ok, size of `vec` higher than `too_large_for_stack`
+
+ // Ok
+ for a in vec![1; 201] {
+ println!("{:?}", a);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/vec.rs b/src/tools/clippy/tests/ui/vec.rs
new file mode 100644
index 000000000..d7673ce3e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/vec.rs
@@ -0,0 +1,78 @@
+// run-rustfix
+#![allow(clippy::nonstandard_macro_braces)]
+#![warn(clippy::useless_vec)]
+
+#[derive(Debug)]
+struct NonCopy;
+
+fn on_slice(_: &[u8]) {}
+
+fn on_mut_slice(_: &mut [u8]) {}
+
+#[allow(clippy::ptr_arg)]
+fn on_vec(_: &Vec<u8>) {}
+
+fn on_mut_vec(_: &mut Vec<u8>) {}
+
+struct Line {
+ length: usize,
+}
+
+impl Line {
+ fn length(&self) -> usize {
+ self.length
+ }
+}
+
+fn main() {
+ on_slice(&vec![]);
+ on_slice(&[]);
+ on_mut_slice(&mut vec![]);
+
+ on_slice(&vec![1, 2]);
+ on_slice(&[1, 2]);
+ on_mut_slice(&mut vec![1, 2]);
+
+ on_slice(&vec![1, 2]);
+ on_slice(&[1, 2]);
+ on_mut_slice(&mut vec![1, 2]);
+ #[rustfmt::skip]
+ on_slice(&vec!(1, 2));
+ on_slice(&[1, 2]);
+ on_mut_slice(&mut vec![1, 2]);
+
+ on_slice(&vec![1; 2]);
+ on_slice(&[1; 2]);
+ on_mut_slice(&mut vec![1; 2]);
+
+ on_vec(&vec![]);
+ on_vec(&vec![1, 2]);
+ on_vec(&vec![1; 2]);
+ on_mut_vec(&mut vec![]);
+ on_mut_vec(&mut vec![1, 2]);
+ on_mut_vec(&mut vec![1; 2]);
+
+ // Now with non-constant expressions
+ let line = Line { length: 2 };
+
+ on_slice(&vec![2; line.length]);
+ on_slice(&vec![2; line.length()]);
+ on_mut_slice(&mut vec![2; line.length]);
+ on_mut_slice(&mut vec![2; line.length()]);
+
+ for a in vec![1, 2, 3] {
+ println!("{:?}", a);
+ }
+
+ for a in vec![NonCopy, NonCopy] {
+ println!("{:?}", a);
+ }
+
+ on_vec(&vec![1; 201]); // Ok, size of `vec` higher than `too_large_for_stack`
+ on_mut_vec(&mut vec![1; 201]); // Ok, size of `vec` higher than `too_large_for_stack`
+
+ // Ok
+ for a in vec![1; 201] {
+ println!("{:?}", a);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/vec.stderr b/src/tools/clippy/tests/ui/vec.stderr
new file mode 100644
index 000000000..7d1de05a5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/vec.stderr
@@ -0,0 +1,70 @@
+error: useless use of `vec!`
+ --> $DIR/vec.rs:28:14
+ |
+LL | on_slice(&vec![]);
+ | ^^^^^^^ help: you can use a slice directly: `&[]`
+ |
+ = note: `-D clippy::useless-vec` implied by `-D warnings`
+
+error: useless use of `vec!`
+ --> $DIR/vec.rs:30:18
+ |
+LL | on_mut_slice(&mut vec![]);
+ | ^^^^^^^^^^^ help: you can use a slice directly: `&mut []`
+
+error: useless use of `vec!`
+ --> $DIR/vec.rs:32:14
+ |
+LL | on_slice(&vec![1, 2]);
+ | ^^^^^^^^^^^ help: you can use a slice directly: `&[1, 2]`
+
+error: useless use of `vec!`
+ --> $DIR/vec.rs:34:18
+ |
+LL | on_mut_slice(&mut vec![1, 2]);
+ | ^^^^^^^^^^^^^^^ help: you can use a slice directly: `&mut [1, 2]`
+
+error: useless use of `vec!`
+ --> $DIR/vec.rs:36:14
+ |
+LL | on_slice(&vec![1, 2]);
+ | ^^^^^^^^^^^ help: you can use a slice directly: `&[1, 2]`
+
+error: useless use of `vec!`
+ --> $DIR/vec.rs:38:18
+ |
+LL | on_mut_slice(&mut vec![1, 2]);
+ | ^^^^^^^^^^^^^^^ help: you can use a slice directly: `&mut [1, 2]`
+
+error: useless use of `vec!`
+ --> $DIR/vec.rs:40:14
+ |
+LL | on_slice(&vec!(1, 2));
+ | ^^^^^^^^^^^ help: you can use a slice directly: `&[1, 2]`
+
+error: useless use of `vec!`
+ --> $DIR/vec.rs:42:18
+ |
+LL | on_mut_slice(&mut vec![1, 2]);
+ | ^^^^^^^^^^^^^^^ help: you can use a slice directly: `&mut [1, 2]`
+
+error: useless use of `vec!`
+ --> $DIR/vec.rs:44:14
+ |
+LL | on_slice(&vec![1; 2]);
+ | ^^^^^^^^^^^ help: you can use a slice directly: `&[1; 2]`
+
+error: useless use of `vec!`
+ --> $DIR/vec.rs:46:18
+ |
+LL | on_mut_slice(&mut vec![1; 2]);
+ | ^^^^^^^^^^^^^^^ help: you can use a slice directly: `&mut [1; 2]`
+
+error: useless use of `vec!`
+ --> $DIR/vec.rs:63:14
+ |
+LL | for a in vec![1, 2, 3] {
+ | ^^^^^^^^^^^^^ help: you can use a slice directly: `&[1, 2, 3]`
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/vec_box_sized.fixed b/src/tools/clippy/tests/ui/vec_box_sized.fixed
new file mode 100644
index 000000000..a40d91fdb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/vec_box_sized.fixed
@@ -0,0 +1,54 @@
+// run-rustfix
+
+#![allow(dead_code)]
+
+struct SizedStruct(i32);
+struct UnsizedStruct([i32]);
+struct BigStruct([i32; 10000]);
+
+/// The following should trigger the lint
+mod should_trigger {
+ use super::SizedStruct;
+ const C: Vec<i32> = Vec::new();
+ static S: Vec<i32> = Vec::new();
+
+ struct StructWithVecBox {
+ sized_type: Vec<SizedStruct>,
+ }
+
+ struct A(Vec<SizedStruct>);
+ struct B(Vec<Vec<u32>>);
+}
+
+/// The following should not trigger the lint
+mod should_not_trigger {
+ use super::{BigStruct, UnsizedStruct};
+
+ struct C(Vec<Box<UnsizedStruct>>);
+ struct D(Vec<Box<BigStruct>>);
+
+ struct StructWithVecBoxButItsUnsized {
+ unsized_type: Vec<Box<UnsizedStruct>>,
+ }
+
+ struct TraitVec<T: ?Sized> {
+ // Regression test for #3720. This was causing an ICE.
+ inner: Vec<Box<T>>,
+ }
+}
+
+mod inner_mod {
+ mod inner {
+ pub struct S;
+ }
+
+ mod inner2 {
+ use super::inner::S;
+
+ pub fn f() -> Vec<S> {
+ vec![]
+ }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/vec_box_sized.rs b/src/tools/clippy/tests/ui/vec_box_sized.rs
new file mode 100644
index 000000000..843bbb64e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/vec_box_sized.rs
@@ -0,0 +1,54 @@
+// run-rustfix
+
+#![allow(dead_code)]
+
+struct SizedStruct(i32);
+struct UnsizedStruct([i32]);
+struct BigStruct([i32; 10000]);
+
+/// The following should trigger the lint
+mod should_trigger {
+ use super::SizedStruct;
+ const C: Vec<Box<i32>> = Vec::new();
+ static S: Vec<Box<i32>> = Vec::new();
+
+ struct StructWithVecBox {
+ sized_type: Vec<Box<SizedStruct>>,
+ }
+
+ struct A(Vec<Box<SizedStruct>>);
+ struct B(Vec<Vec<Box<(u32)>>>);
+}
+
+/// The following should not trigger the lint
+mod should_not_trigger {
+ use super::{BigStruct, UnsizedStruct};
+
+ struct C(Vec<Box<UnsizedStruct>>);
+ struct D(Vec<Box<BigStruct>>);
+
+ struct StructWithVecBoxButItsUnsized {
+ unsized_type: Vec<Box<UnsizedStruct>>,
+ }
+
+ struct TraitVec<T: ?Sized> {
+ // Regression test for #3720. This was causing an ICE.
+ inner: Vec<Box<T>>,
+ }
+}
+
+mod inner_mod {
+ mod inner {
+ pub struct S;
+ }
+
+ mod inner2 {
+ use super::inner::S;
+
+ pub fn f() -> Vec<Box<S>> {
+ vec![]
+ }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/vec_box_sized.stderr b/src/tools/clippy/tests/ui/vec_box_sized.stderr
new file mode 100644
index 000000000..c518267f0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/vec_box_sized.stderr
@@ -0,0 +1,40 @@
+error: `Vec<T>` is already on the heap, the boxing is unnecessary
+ --> $DIR/vec_box_sized.rs:12:14
+ |
+LL | const C: Vec<Box<i32>> = Vec::new();
+ | ^^^^^^^^^^^^^ help: try: `Vec<i32>`
+ |
+ = note: `-D clippy::vec-box` implied by `-D warnings`
+
+error: `Vec<T>` is already on the heap, the boxing is unnecessary
+ --> $DIR/vec_box_sized.rs:13:15
+ |
+LL | static S: Vec<Box<i32>> = Vec::new();
+ | ^^^^^^^^^^^^^ help: try: `Vec<i32>`
+
+error: `Vec<T>` is already on the heap, the boxing is unnecessary
+ --> $DIR/vec_box_sized.rs:16:21
+ |
+LL | sized_type: Vec<Box<SizedStruct>>,
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try: `Vec<SizedStruct>`
+
+error: `Vec<T>` is already on the heap, the boxing is unnecessary
+ --> $DIR/vec_box_sized.rs:19:14
+ |
+LL | struct A(Vec<Box<SizedStruct>>);
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try: `Vec<SizedStruct>`
+
+error: `Vec<T>` is already on the heap, the boxing is unnecessary
+ --> $DIR/vec_box_sized.rs:20:18
+ |
+LL | struct B(Vec<Vec<Box<(u32)>>>);
+ | ^^^^^^^^^^^^^^^ help: try: `Vec<u32>`
+
+error: `Vec<T>` is already on the heap, the boxing is unnecessary
+ --> $DIR/vec_box_sized.rs:48:23
+ |
+LL | pub fn f() -> Vec<Box<S>> {
+ | ^^^^^^^^^^^ help: try: `Vec<S>`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/vec_init_then_push.rs b/src/tools/clippy/tests/ui/vec_init_then_push.rs
new file mode 100644
index 000000000..8dd098a5b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/vec_init_then_push.rs
@@ -0,0 +1,112 @@
+#![allow(unused_variables)]
+#![warn(clippy::vec_init_then_push)]
+
+fn main() {
+ let mut def_err: Vec<u32> = Default::default();
+ def_err.push(0);
+
+ let mut new_err = Vec::<u32>::new();
+ new_err.push(1);
+
+ let mut cap_err = Vec::with_capacity(2);
+ cap_err.push(0);
+ cap_err.push(1);
+ cap_err.push(2);
+ if true {
+ // don't include this one
+ cap_err.push(3);
+ }
+
+ let mut cap_ok = Vec::with_capacity(10);
+ cap_ok.push(0);
+
+ new_err = Vec::new();
+ new_err.push(0);
+
+ let mut vec = Vec::new();
+ // control flow at block final expression
+ if true {
+ // no lint
+ vec.push(1);
+ }
+
+ let mut vec = Vec::with_capacity(5);
+ vec.push(1);
+ vec.push(2);
+ vec.push(3);
+ vec.push(4);
+}
+
+pub fn no_lint() -> Vec<i32> {
+ let mut p = Some(1);
+ let mut vec = Vec::new();
+ loop {
+ match p {
+ None => return vec,
+ Some(i) => {
+ vec.push(i);
+ p = None;
+ },
+ }
+ }
+}
+
+fn _from_iter(items: impl Iterator<Item = u32>) -> Vec<u32> {
+ let mut v = Vec::new();
+ v.push(0);
+ v.push(1);
+ v.extend(items);
+ v
+}
+
+fn _cond_push(x: bool) -> Vec<u32> {
+ let mut v = Vec::new();
+ v.push(0);
+ if x {
+ v.push(1);
+ }
+ v.push(2);
+ v
+}
+
+fn _push_then_edit(x: u32) -> Vec<u32> {
+ let mut v = Vec::new();
+ v.push(x);
+ v.push(1);
+ v[0] = v[1] + 5;
+ v
+}
+
+fn _cond_push_with_large_start(x: bool) -> Vec<u32> {
+ let mut v = Vec::new();
+ v.push(0);
+ v.push(1);
+ v.push(0);
+ v.push(1);
+ v.push(0);
+ v.push(0);
+ v.push(1);
+ v.push(0);
+ if x {
+ v.push(1);
+ }
+
+ let mut v2 = Vec::new();
+ v2.push(0);
+ v2.push(1);
+ v2.push(0);
+ v2.push(1);
+ v2.push(0);
+ v2.push(0);
+ v2.push(1);
+ v2.push(0);
+ v2.extend(&v);
+
+ v2
+}
+
+fn f() {
+ let mut v = Vec::new();
+ v.push((0i32, 0i32));
+ let y = v[0].0.abs();
+}
diff --git a/src/tools/clippy/tests/ui/vec_init_then_push.stderr b/src/tools/clippy/tests/ui/vec_init_then_push.stderr
new file mode 100644
index 000000000..a9da1c520
--- /dev/null
+++ b/src/tools/clippy/tests/ui/vec_init_then_push.stderr
@@ -0,0 +1,73 @@
+error: calls to `push` immediately after creation
+ --> $DIR/vec_init_then_push.rs:5:5
+ |
+LL | / let mut def_err: Vec<u32> = Default::default();
+LL | | def_err.push(0);
+ | |____________________^ help: consider using the `vec![]` macro: `let def_err: Vec<u32> = vec![..];`
+ |
+ = note: `-D clippy::vec-init-then-push` implied by `-D warnings`
+
+error: calls to `push` immediately after creation
+ --> $DIR/vec_init_then_push.rs:8:5
+ |
+LL | / let mut new_err = Vec::<u32>::new();
+LL | | new_err.push(1);
+ | |____________________^ help: consider using the `vec![]` macro: `let mut new_err = vec![..];`
+
+error: calls to `push` immediately after creation
+ --> $DIR/vec_init_then_push.rs:11:5
+ |
+LL | / let mut cap_err = Vec::with_capacity(2);
+LL | | cap_err.push(0);
+LL | | cap_err.push(1);
+LL | | cap_err.push(2);
+ | |____________________^ help: consider using the `vec![]` macro: `let mut cap_err = vec![..];`
+
+error: calls to `push` immediately after creation
+ --> $DIR/vec_init_then_push.rs:23:5
+ |
+LL | / new_err = Vec::new();
+LL | | new_err.push(0);
+ | |____________________^ help: consider using the `vec![]` macro: `new_err = vec![..];`
+
+error: calls to `push` immediately after creation
+ --> $DIR/vec_init_then_push.rs:73:5
+ |
+LL | / let mut v = Vec::new();
+LL | | v.push(x);
+LL | | v.push(1);
+ | |______________^ help: consider using the `vec![]` macro: `let mut v = vec![..];`
+
+error: calls to `push` immediately after creation
+ --> $DIR/vec_init_then_push.rs:81:5
+ |
+LL | / let mut v = Vec::new();
+LL | | v.push(0);
+LL | | v.push(1);
+LL | | v.push(0);
+... |
+LL | | v.push(1);
+LL | | v.push(0);
+ | |______________^ help: consider using the `vec![]` macro: `let mut v = vec![..];`
+
+error: calls to `push` immediately after creation
+ --> $DIR/vec_init_then_push.rs:94:5
+ |
+LL | / let mut v2 = Vec::new();
+LL | | v2.push(0);
+LL | | v2.push(1);
+LL | | v2.push(0);
+... |
+LL | | v2.push(1);
+LL | | v2.push(0);
+ | |_______________^ help: consider using the `vec![]` macro: `let mut v2 = vec![..];`
+
+error: calls to `push` immediately after creation
+ --> $DIR/vec_init_then_push.rs:109:5
+ |
+LL | / let mut v = Vec::new();
+LL | | v.push((0i32, 0i32));
+ | |_________________________^ help: consider using the `vec![]` macro: `let v = vec![..];`
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/vec_resize_to_zero.rs b/src/tools/clippy/tests/ui/vec_resize_to_zero.rs
new file mode 100644
index 000000000..7ed27439e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/vec_resize_to_zero.rs
@@ -0,0 +1,15 @@
+#![warn(clippy::vec_resize_to_zero)]
+
+fn main() {
+ // applicable here
+ vec![1, 2, 3, 4, 5].resize(0, 5);
+
+ // not applicable
+ vec![1, 2, 3, 4, 5].resize(2, 5);
+
+ // applicable here, but only implemented for integer literals for now
+ vec!["foo", "bar", "baz"].resize(0, "bar");
+
+ // not applicable
+ vec!["foo", "bar", "baz"].resize(2, "bar")
+}
diff --git a/src/tools/clippy/tests/ui/vec_resize_to_zero.stderr b/src/tools/clippy/tests/ui/vec_resize_to_zero.stderr
new file mode 100644
index 000000000..feb846298
--- /dev/null
+++ b/src/tools/clippy/tests/ui/vec_resize_to_zero.stderr
@@ -0,0 +1,13 @@
+error: emptying a vector with `resize`
+ --> $DIR/vec_resize_to_zero.rs:5:5
+ |
+LL | vec![1, 2, 3, 4, 5].resize(0, 5);
+ | ^^^^^^^^^^^^^^^^^^^^------------
+ | |
+ | help: ...or you can empty the vector with: `clear()`
+ |
+ = note: `-D clippy::vec-resize-to-zero` implied by `-D warnings`
+ = help: the arguments may be inverted...
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/verbose_file_reads.rs b/src/tools/clippy/tests/ui/verbose_file_reads.rs
new file mode 100644
index 000000000..e0065e05a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/verbose_file_reads.rs
@@ -0,0 +1,28 @@
+#![warn(clippy::verbose_file_reads)]
+use std::env::temp_dir;
+use std::fs::File;
+use std::io::Read;
+
+struct Struct;
+// To make sure we only warn on File::{read_to_end, read_to_string} calls
+impl Struct {
+ pub fn read_to_end(&self) {}
+
+ pub fn read_to_string(&self) {}
+}
+
+fn main() -> std::io::Result<()> {
+ let path = "foo.txt";
+ // Lint shouldn't catch this
+ let s = Struct;
+ s.read_to_end();
+ s.read_to_string();
+ // Should catch this
+ let mut f = File::open(&path)?;
+ let mut buffer = Vec::new();
+ f.read_to_end(&mut buffer)?;
+ // ...and this
+ let mut string_buffer = String::new();
+ f.read_to_string(&mut string_buffer)?;
+ Ok(())
+}
diff --git a/src/tools/clippy/tests/ui/verbose_file_reads.stderr b/src/tools/clippy/tests/ui/verbose_file_reads.stderr
new file mode 100644
index 000000000..550b6ab67
--- /dev/null
+++ b/src/tools/clippy/tests/ui/verbose_file_reads.stderr
@@ -0,0 +1,19 @@
+error: use of `File::read_to_end`
+ --> $DIR/verbose_file_reads.rs:23:5
+ |
+LL | f.read_to_end(&mut buffer)?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::verbose-file-reads` implied by `-D warnings`
+ = help: consider using `fs::read` instead
+
+error: use of `File::read_to_string`
+ --> $DIR/verbose_file_reads.rs:26:5
+ |
+LL | f.read_to_string(&mut string_buffer)?;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using `fs::read_to_string` instead
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/vtable_address_comparisons.rs b/src/tools/clippy/tests/ui/vtable_address_comparisons.rs
new file mode 100644
index 000000000..a9a4a0f5a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/vtable_address_comparisons.rs
@@ -0,0 +1,44 @@
+use std::fmt::Debug;
+use std::ptr;
+use std::rc::Rc;
+use std::sync::Arc;
+
+#[warn(clippy::vtable_address_comparisons)]
+#[allow(clippy::borrow_as_ptr)]
+
+fn main() {
+ let a: *const dyn Debug = &1 as &dyn Debug;
+ let b: *const dyn Debug = &1 as &dyn Debug;
+
+ // These should fail:
+ let _ = a == b;
+ let _ = a != b;
+ let _ = a < b;
+ let _ = a <= b;
+ let _ = a > b;
+ let _ = a >= b;
+ ptr::eq(a, b);
+
+ let a = &1 as &dyn Debug;
+ let b = &1 as &dyn Debug;
+ ptr::eq(a, b);
+
+ let a: Rc<dyn Debug> = Rc::new(1);
+ Rc::ptr_eq(&a, &a);
+
+ let a: Arc<dyn Debug> = Arc::new(1);
+ Arc::ptr_eq(&a, &a);
+
+ // These should be fine:
+ let a = &1;
+ ptr::eq(a, a);
+
+ let a = Rc::new(1);
+ Rc::ptr_eq(&a, &a);
+
+ let a = Arc::new(1);
+ Arc::ptr_eq(&a, &a);
+
+ let a: &[u8] = b"";
+ ptr::eq(a, a);
+}
diff --git a/src/tools/clippy/tests/ui/vtable_address_comparisons.stderr b/src/tools/clippy/tests/ui/vtable_address_comparisons.stderr
new file mode 100644
index 000000000..2f1be61e5
--- /dev/null
+++ b/src/tools/clippy/tests/ui/vtable_address_comparisons.stderr
@@ -0,0 +1,83 @@
+error: comparing trait object pointers compares a non-unique vtable address
+ --> $DIR/vtable_address_comparisons.rs:14:13
+ |
+LL | let _ = a == b;
+ | ^^^^^^
+ |
+ = note: `-D clippy::vtable-address-comparisons` implied by `-D warnings`
+ = help: consider extracting and comparing data pointers only
+
+error: comparing trait object pointers compares a non-unique vtable address
+ --> $DIR/vtable_address_comparisons.rs:15:13
+ |
+LL | let _ = a != b;
+ | ^^^^^^
+ |
+ = help: consider extracting and comparing data pointers only
+
+error: comparing trait object pointers compares a non-unique vtable address
+ --> $DIR/vtable_address_comparisons.rs:16:13
+ |
+LL | let _ = a < b;
+ | ^^^^^
+ |
+ = help: consider extracting and comparing data pointers only
+
+error: comparing trait object pointers compares a non-unique vtable address
+ --> $DIR/vtable_address_comparisons.rs:17:13
+ |
+LL | let _ = a <= b;
+ | ^^^^^^
+ |
+ = help: consider extracting and comparing data pointers only
+
+error: comparing trait object pointers compares a non-unique vtable address
+ --> $DIR/vtable_address_comparisons.rs:18:13
+ |
+LL | let _ = a > b;
+ | ^^^^^
+ |
+ = help: consider extracting and comparing data pointers only
+
+error: comparing trait object pointers compares a non-unique vtable address
+ --> $DIR/vtable_address_comparisons.rs:19:13
+ |
+LL | let _ = a >= b;
+ | ^^^^^^
+ |
+ = help: consider extracting and comparing data pointers only
+
+error: comparing trait object pointers compares a non-unique vtable address
+ --> $DIR/vtable_address_comparisons.rs:20:5
+ |
+LL | ptr::eq(a, b);
+ | ^^^^^^^^^^^^^
+ |
+ = help: consider extracting and comparing data pointers only
+
+error: comparing trait object pointers compares a non-unique vtable address
+ --> $DIR/vtable_address_comparisons.rs:24:5
+ |
+LL | ptr::eq(a, b);
+ | ^^^^^^^^^^^^^
+ |
+ = help: consider extracting and comparing data pointers only
+
+error: comparing trait object pointers compares a non-unique vtable address
+ --> $DIR/vtable_address_comparisons.rs:27:5
+ |
+LL | Rc::ptr_eq(&a, &a);
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider extracting and comparing data pointers only
+
+error: comparing trait object pointers compares a non-unique vtable address
+ --> $DIR/vtable_address_comparisons.rs:30:5
+ |
+LL | Arc::ptr_eq(&a, &a);
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider extracting and comparing data pointers only
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/while_let_loop.rs b/src/tools/clippy/tests/ui/while_let_loop.rs
new file mode 100644
index 000000000..c42e2a79a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/while_let_loop.rs
@@ -0,0 +1,145 @@
+#![warn(clippy::while_let_loop)]
+
+fn main() {
+ let y = Some(true);
+ loop {
+ if let Some(_x) = y {
+ let _v = 1;
+ } else {
+ break;
+ }
+ }
+
+ #[allow(clippy::never_loop)]
+ loop {
+ // no error, break is not in else clause
+ if let Some(_x) = y {
+ let _v = 1;
+ }
+ break;
+ }
+
+ loop {
+ match y {
+ Some(_x) => true,
+ None => break,
+ };
+ }
+
+ loop {
+ let x = match y {
+ Some(x) => x,
+ None => break,
+ };
+ let _x = x;
+ let _str = "foo";
+ }
+
+ loop {
+ let x = match y {
+ Some(x) => x,
+ None => break,
+ };
+ {
+ let _a = "bar";
+ };
+ {
+ let _b = "foobar";
+ }
+ }
+
+ loop {
+ // no error, else branch does something other than break
+ match y {
+ Some(_x) => true,
+ _ => {
+ let _z = 1;
+ break;
+ },
+ };
+ }
+
+ while let Some(x) = y {
+ // no error, obviously
+ println!("{}", x);
+ }
+
+ // #675, this used to have a wrong suggestion
+ loop {
+ let (e, l) = match "".split_whitespace().next() {
+ Some(word) => (word.is_empty(), word.len()),
+ None => break,
+ };
+
+ let _ = (e, l);
+ }
+}
+
+fn issue771() {
+ let mut a = 100;
+ let b = Some(true);
+ loop {
+ if a > 10 {
+ break;
+ }
+
+ match b {
+ Some(_) => a = 0,
+ None => break,
+ }
+ }
+}
+
+fn issue1017() {
+ let r: Result<u32, u32> = Ok(42);
+ let mut len = 1337;
+
+ loop {
+ match r {
+ Err(_) => len = 0,
+ Ok(length) => {
+ len = length;
+ break;
+ },
+ }
+ }
+}
+
+#[allow(clippy::never_loop)]
+fn issue1948() {
+ // should not trigger clippy::while_let_loop lint because break passes an expression
+ let a = Some(10);
+ let b = loop {
+ if let Some(c) = a {
+ break Some(c);
+ } else {
+ break None;
+ }
+ };
+}
+
+fn issue_7913(m: &std::sync::Mutex<Vec<u32>>) {
+ // Don't lint. The lock shouldn't be held while printing.
+ loop {
+ let x = if let Some(x) = m.lock().unwrap().pop() {
+ x
+ } else {
+ break;
+ };
+
+ println!("{}", x);
+ }
+}
+
+fn issue_5715(mut m: core::cell::RefCell<Option<u32>>) {
+ // Don't lint. The temporary from `borrow_mut` must be dropped before overwriting the `RefCell`.
+ loop {
+ let x = if let &mut Some(x) = &mut *m.borrow_mut() {
+ x
+ } else {
+ break;
+ };
+
+ m = core::cell::RefCell::new(Some(x + 1));
+ }
+}
diff --git a/src/tools/clippy/tests/ui/while_let_loop.stderr b/src/tools/clippy/tests/ui/while_let_loop.stderr
new file mode 100644
index 000000000..13dd0ee22
--- /dev/null
+++ b/src/tools/clippy/tests/ui/while_let_loop.stderr
@@ -0,0 +1,63 @@
+error: this loop could be written as a `while let` loop
+ --> $DIR/while_let_loop.rs:5:5
+ |
+LL | / loop {
+LL | | if let Some(_x) = y {
+LL | | let _v = 1;
+LL | | } else {
+LL | | break;
+LL | | }
+LL | | }
+ | |_____^ help: try: `while let Some(_x) = y { .. }`
+ |
+ = note: `-D clippy::while-let-loop` implied by `-D warnings`
+
+error: this loop could be written as a `while let` loop
+ --> $DIR/while_let_loop.rs:22:5
+ |
+LL | / loop {
+LL | | match y {
+LL | | Some(_x) => true,
+LL | | None => break,
+LL | | };
+LL | | }
+ | |_____^ help: try: `while let Some(_x) = y { .. }`
+
+error: this loop could be written as a `while let` loop
+ --> $DIR/while_let_loop.rs:29:5
+ |
+LL | / loop {
+LL | | let x = match y {
+LL | | Some(x) => x,
+LL | | None => break,
+... |
+LL | | let _str = "foo";
+LL | | }
+ | |_____^ help: try: `while let Some(x) = y { .. }`
+
+error: this loop could be written as a `while let` loop
+ --> $DIR/while_let_loop.rs:38:5
+ |
+LL | / loop {
+LL | | let x = match y {
+LL | | Some(x) => x,
+LL | | None => break,
+... |
+LL | | }
+LL | | }
+ | |_____^ help: try: `while let Some(x) = y { .. }`
+
+error: this loop could be written as a `while let` loop
+ --> $DIR/while_let_loop.rs:68:5
+ |
+LL | / loop {
+LL | | let (e, l) = match "".split_whitespace().next() {
+LL | | Some(word) => (word.is_empty(), word.len()),
+LL | | None => break,
+... |
+LL | | let _ = (e, l);
+LL | | }
+ | |_____^ help: try: `while let Some(word) = "".split_whitespace().next() { .. }`
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/while_let_on_iterator.fixed b/src/tools/clippy/tests/ui/while_let_on_iterator.fixed
new file mode 100644
index 000000000..c57c46736
--- /dev/null
+++ b/src/tools/clippy/tests/ui/while_let_on_iterator.fixed
@@ -0,0 +1,453 @@
+// run-rustfix
+
+#![warn(clippy::while_let_on_iterator)]
+#![allow(
+ clippy::never_loop,
+ unreachable_code,
+ unused_mut,
+ dead_code,
+ clippy::equatable_if_let,
+ clippy::manual_find,
+ clippy::redundant_closure_call
+)]
+
+fn base() {
+ let mut iter = 1..20;
+ for x in iter {
+ println!("{}", x);
+ }
+
+ let mut iter = 1..20;
+ for x in iter {
+ println!("{}", x);
+ }
+
+ let mut iter = 1..20;
+ for _ in iter {}
+
+ let mut iter = 1..20;
+ while let None = iter.next() {} // this is fine (if nonsensical)
+
+ let mut iter = 1..20;
+ if let Some(x) = iter.next() {
+ // also fine
+ println!("{}", x)
+ }
+
+ // the following shouldn't warn because it can't be written with a for loop
+ let mut iter = 1u32..20;
+ while let Some(_) = iter.next() {
+ println!("next: {:?}", iter.next())
+ }
+
+ // neither can this
+ let mut iter = 1u32..20;
+ while let Some(_) = iter.next() {
+ println!("next: {:?}", iter.next());
+ }
+
+ // or this
+ let mut iter = 1u32..20;
+ while let Some(_) = iter.next() {
+ iter = 1..20;
+ }
+}
+
+// Issue #1188
+fn refutable() {
+ let a = [42, 1337];
+ let mut b = a.iter();
+
+ // consume all the 42s
+ while let Some(&42) = b.next() {}
+
+ let a = [(1, 2, 3)];
+ let mut b = a.iter();
+
+ while let Some(&(1, 2, 3)) = b.next() {}
+
+ let a = [Some(42)];
+ let mut b = a.iter();
+
+ while let Some(&None) = b.next() {}
+
+ /* This gives “refutable pattern in `for` loop binding: `&_` not covered”
+ for &42 in b {}
+ for &(1, 2, 3) in b {}
+ for &Option::None in b.next() {}
+ // */
+}
+
+fn refutable2() {
+ // Issue 3780
+ {
+ let v = vec![1, 2, 3];
+ let mut it = v.windows(2);
+ while let Some([x, y]) = it.next() {
+ println!("x: {}", x);
+ println!("y: {}", y);
+ }
+
+ let mut it = v.windows(2);
+ while let Some([x, ..]) = it.next() {
+ println!("x: {}", x);
+ }
+
+ let mut it = v.windows(2);
+ while let Some([.., y]) = it.next() {
+ println!("y: {}", y);
+ }
+
+ let mut it = v.windows(2);
+ for [..] in it {}
+
+ let v = vec![[1], [2], [3]];
+ let mut it = v.iter();
+ while let Some([1]) = it.next() {}
+
+ let mut it = v.iter();
+ for [_x] in it {}
+ }
+
+ // binding
+ {
+ let v = vec![1, 2, 3];
+ let mut it = v.iter();
+ while let Some(x @ 1) = it.next() {
+ println!("{}", x);
+ }
+
+ let v = vec![[1], [2], [3]];
+ let mut it = v.iter();
+ for x @ [_] in it {
+ println!("{:?}", x);
+ }
+ }
+
+ // false negative
+ {
+ let v = vec![1, 2, 3];
+ let mut it = v.iter().map(Some);
+ while let Some(Some(_) | None) = it.next() {
+ println!("1");
+ }
+ }
+}
+
+fn nested_loops() {
+ let a = [42, 1337];
+
+ loop {
+ let mut y = a.iter();
+ for _ in y {
+ // use a for loop here
+ }
+ }
+}
+
+fn issue1121() {
+ use std::collections::HashSet;
+ let mut values = HashSet::new();
+ values.insert(1);
+
+ while let Some(&value) = values.iter().next() {
+ values.remove(&value);
+ }
+}
+
+fn issue2965() {
+ // This should not cause an ICE
+
+ use std::collections::HashSet;
+ let mut values = HashSet::new();
+ values.insert(1);
+
+ while let Some(..) = values.iter().next() {}
+}
+
+fn issue3670() {
+ let array = [Some(0), None, Some(1)];
+ let mut iter = array.iter();
+
+ while let Some(elem) = iter.next() {
+ let _ = elem.or_else(|| *iter.next()?);
+ }
+}
+
+fn issue1654() {
+ // should not lint if the iterator is generated on every iteration
+ use std::collections::HashSet;
+ let mut values = HashSet::new();
+ values.insert(1);
+
+ while let Some(..) = values.iter().next() {
+ values.remove(&1);
+ }
+
+ while let Some(..) = values.iter().map(|x| x + 1).next() {}
+
+ let chars = "Hello, World!".char_indices();
+ while let Some((i, ch)) = chars.clone().next() {
+ println!("{}: {}", i, ch);
+ }
+}
+
+fn issue6491() {
+ // Used in outer loop, needs &mut
+ let mut it = 1..40;
+ while let Some(n) = it.next() {
+ for m in it.by_ref() {
+ if m % 10 == 0 {
+ break;
+ }
+ println!("doing something with m: {}", m);
+ }
+ println!("n still is {}", n);
+ }
+
+ // This is fine, inner loop uses a new iterator.
+ let mut it = 1..40;
+ for n in it {
+ let mut it = 1..40;
+ for m in it {
+ if m % 10 == 0 {
+ break;
+ }
+ println!("doing something with m: {}", m);
+ }
+
+ // Weird binding shouldn't change anything.
+ let (mut it, _) = (1..40, 0);
+ for m in it {
+ if m % 10 == 0 {
+ break;
+ }
+ println!("doing something with m: {}", m);
+ }
+
+ // Used after the loop, needs &mut.
+ let mut it = 1..40;
+ for m in it.by_ref() {
+ if m % 10 == 0 {
+ break;
+ }
+ println!("doing something with m: {}", m);
+ }
+ println!("next item {}", it.next().unwrap());
+
+ println!("n still is {}", n);
+ }
+}
+
+fn issue6231() {
+ // Closure in the outer loop, needs &mut
+ let mut it = 1..40;
+ let mut opt = Some(0);
+ while let Some(n) = opt.take().or_else(|| it.next()) {
+ for m in it.by_ref() {
+ if n % 10 == 0 {
+ break;
+ }
+ println!("doing something with m: {}", m);
+ }
+ println!("n still is {}", n);
+ }
+}
+
+fn issue1924() {
+ struct S<T>(T);
+ impl<T: Iterator<Item = u32>> S<T> {
+ fn f(&mut self) -> Option<u32> {
+ // Used as a field.
+ for i in self.0.by_ref() {
+ if !(3..8).contains(&i) {
+ return Some(i);
+ }
+ }
+ None
+ }
+
+ fn f2(&mut self) -> Option<u32> {
+ // Don't lint, self borrowed inside the loop
+ while let Some(i) = self.0.next() {
+ if i == 1 {
+ return self.f();
+ }
+ }
+ None
+ }
+ }
+ impl<T: Iterator<Item = u32>> S<(S<T>, Option<u32>)> {
+ fn f3(&mut self) -> Option<u32> {
+ // Don't lint, self borrowed inside the loop
+ while let Some(i) = self.0.0.0.next() {
+ if i == 1 {
+ return self.0.0.f();
+ }
+ }
+ while let Some(i) = self.0.0.0.next() {
+ if i == 1 {
+ return self.f3();
+ }
+ }
+ // This one is fine, a different field is borrowed
+ for i in self.0.0.0.by_ref() {
+ if i == 1 {
+ return self.0.1.take();
+ } else {
+ self.0.1 = Some(i);
+ }
+ }
+ None
+ }
+ }
+
+ struct S2<T>(T, u32);
+ impl<T: Iterator<Item = u32>> Iterator for S2<T> {
+ type Item = u32;
+ fn next(&mut self) -> Option<u32> {
+ self.0.next()
+ }
+ }
+
+ // Don't lint, field of the iterator is accessed in the loop
+ let mut it = S2(1..40, 0);
+ while let Some(n) = it.next() {
+ if n == it.1 {
+ break;
+ }
+ }
+
+ // Needs &mut, field of the iterator is accessed after the loop
+ let mut it = S2(1..40, 0);
+ for n in it.by_ref() {
+ if n == 0 {
+ break;
+ }
+ }
+ println!("iterator field {}", it.1);
+}
+
+fn issue7249() {
+ let mut it = 0..10;
+ let mut x = || {
+ // Needs &mut, the closure can be called multiple times
+ for x in it.by_ref() {
+ if x % 2 == 0 {
+ break;
+ }
+ }
+ };
+ x();
+ x();
+}
+
+fn issue7510() {
+ let mut it = 0..10;
+ let it = &mut it;
+ // Needs to reborrow `it` as the binding isn't mutable
+ for x in it.by_ref() {
+ if x % 2 == 0 {
+ break;
+ }
+ }
+ println!("{}", it.next().unwrap());
+
+ struct S<T>(T);
+ let mut it = 0..10;
+ let it = S(&mut it);
+ // Needs to reborrow `it.0` as the binding isn't mutable
+ for x in it.0.by_ref() {
+ if x % 2 == 0 {
+ break;
+ }
+ }
+ println!("{}", it.0.next().unwrap());
+}
+
+fn exact_match_with_single_field() {
+ struct S<T>(T);
+ let mut s = S(0..10);
+ // Don't lint. `s.0` is used inside the loop.
+ while let Some(_) = s.0.next() {
+ let _ = &mut s.0;
+ }
+}
+
+fn custom_deref() {
+ struct S1<T> {
+ x: T,
+ }
+ struct S2<T>(S1<T>);
+ impl<T> core::ops::Deref for S2<T> {
+ type Target = S1<T>;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+ }
+ impl<T> core::ops::DerefMut for S2<T> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.0
+ }
+ }
+
+ let mut s = S2(S1 { x: 0..10 });
+ for x in s.x.by_ref() {
+ println!("{}", x);
+ }
+}
+
+fn issue_8113() {
+ let mut x = [0..10];
+ for x in x[0].by_ref() {
+ println!("{}", x);
+ }
+}
+
+fn fn_once_closure() {
+ let mut it = 0..10;
+ (|| {
+ for x in it {
+ if x % 2 == 0 {
+ break;
+ }
+ }
+ })();
+
+ fn f(_: impl FnOnce()) {}
+ let mut it = 0..10;
+ f(|| {
+ for x in it {
+ if x % 2 == 0 {
+ break;
+ }
+ }
+ });
+
+ fn f2(_: impl FnMut()) {}
+ let mut it = 0..10;
+ f2(|| {
+ for x in it.by_ref() {
+ if x % 2 == 0 {
+ break;
+ }
+ }
+ });
+
+ fn f3(_: fn()) {}
+ f3(|| {
+ let mut it = 0..10;
+ for x in it {
+ if x % 2 == 0 {
+ break;
+ }
+ }
+ })
+}
+
+fn main() {
+ let mut it = 0..20;
+ for _ in it {
+ println!("test");
+ }
+}
diff --git a/src/tools/clippy/tests/ui/while_let_on_iterator.rs b/src/tools/clippy/tests/ui/while_let_on_iterator.rs
new file mode 100644
index 000000000..8b9a2dbcc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/while_let_on_iterator.rs
@@ -0,0 +1,453 @@
+// run-rustfix
+
+#![warn(clippy::while_let_on_iterator)]
+#![allow(
+ clippy::never_loop,
+ unreachable_code,
+ unused_mut,
+ dead_code,
+ clippy::equatable_if_let,
+ clippy::manual_find,
+ clippy::redundant_closure_call
+)]
+
+fn base() {
+ let mut iter = 1..20;
+ while let Option::Some(x) = iter.next() {
+ println!("{}", x);
+ }
+
+ let mut iter = 1..20;
+ while let Some(x) = iter.next() {
+ println!("{}", x);
+ }
+
+ let mut iter = 1..20;
+ while let Some(_) = iter.next() {}
+
+ let mut iter = 1..20;
+ while let None = iter.next() {} // this is fine (if nonsensical)
+
+ let mut iter = 1..20;
+ if let Some(x) = iter.next() {
+ // also fine
+ println!("{}", x)
+ }
+
+ // the following shouldn't warn because it can't be written with a for loop
+ let mut iter = 1u32..20;
+ while let Some(_) = iter.next() {
+ println!("next: {:?}", iter.next())
+ }
+
+ // neither can this
+ let mut iter = 1u32..20;
+ while let Some(_) = iter.next() {
+ println!("next: {:?}", iter.next());
+ }
+
+ // or this
+ let mut iter = 1u32..20;
+ while let Some(_) = iter.next() {
+ iter = 1..20;
+ }
+}
+
+// Issue #1188
+fn refutable() {
+ let a = [42, 1337];
+ let mut b = a.iter();
+
+ // consume all the 42s
+ while let Some(&42) = b.next() {}
+
+ let a = [(1, 2, 3)];
+ let mut b = a.iter();
+
+ while let Some(&(1, 2, 3)) = b.next() {}
+
+ let a = [Some(42)];
+ let mut b = a.iter();
+
+ while let Some(&None) = b.next() {}
+
+ /* This gives “refutable pattern in `for` loop binding: `&_` not covered”
+ for &42 in b {}
+ for &(1, 2, 3) in b {}
+ for &Option::None in b.next() {}
+ // */
+}
+
+fn refutable2() {
+ // Issue 3780
+ {
+ let v = vec![1, 2, 3];
+ let mut it = v.windows(2);
+ while let Some([x, y]) = it.next() {
+ println!("x: {}", x);
+ println!("y: {}", y);
+ }
+
+ let mut it = v.windows(2);
+ while let Some([x, ..]) = it.next() {
+ println!("x: {}", x);
+ }
+
+ let mut it = v.windows(2);
+ while let Some([.., y]) = it.next() {
+ println!("y: {}", y);
+ }
+
+ let mut it = v.windows(2);
+ while let Some([..]) = it.next() {}
+
+ let v = vec![[1], [2], [3]];
+ let mut it = v.iter();
+ while let Some([1]) = it.next() {}
+
+ let mut it = v.iter();
+ while let Some([_x]) = it.next() {}
+ }
+
+ // binding
+ {
+ let v = vec![1, 2, 3];
+ let mut it = v.iter();
+ while let Some(x @ 1) = it.next() {
+ println!("{}", x);
+ }
+
+ let v = vec![[1], [2], [3]];
+ let mut it = v.iter();
+ while let Some(x @ [_]) = it.next() {
+ println!("{:?}", x);
+ }
+ }
+
+ // false negative
+ {
+ let v = vec![1, 2, 3];
+ let mut it = v.iter().map(Some);
+ while let Some(Some(_) | None) = it.next() {
+ println!("1");
+ }
+ }
+}
+
+fn nested_loops() {
+ let a = [42, 1337];
+
+ loop {
+ let mut y = a.iter();
+ while let Some(_) = y.next() {
+ // use a for loop here
+ }
+ }
+}
+
+fn issue1121() {
+ use std::collections::HashSet;
+ let mut values = HashSet::new();
+ values.insert(1);
+
+ while let Some(&value) = values.iter().next() {
+ values.remove(&value);
+ }
+}
+
+fn issue2965() {
+ // This should not cause an ICE
+
+ use std::collections::HashSet;
+ let mut values = HashSet::new();
+ values.insert(1);
+
+ while let Some(..) = values.iter().next() {}
+}
+
+fn issue3670() {
+ let array = [Some(0), None, Some(1)];
+ let mut iter = array.iter();
+
+ while let Some(elem) = iter.next() {
+ let _ = elem.or_else(|| *iter.next()?);
+ }
+}
+
+fn issue1654() {
+ // should not lint if the iterator is generated on every iteration
+ use std::collections::HashSet;
+ let mut values = HashSet::new();
+ values.insert(1);
+
+ while let Some(..) = values.iter().next() {
+ values.remove(&1);
+ }
+
+ while let Some(..) = values.iter().map(|x| x + 1).next() {}
+
+ let chars = "Hello, World!".char_indices();
+ while let Some((i, ch)) = chars.clone().next() {
+ println!("{}: {}", i, ch);
+ }
+}
+
+fn issue6491() {
+ // Used in outer loop, needs &mut
+ let mut it = 1..40;
+ while let Some(n) = it.next() {
+ while let Some(m) = it.next() {
+ if m % 10 == 0 {
+ break;
+ }
+ println!("doing something with m: {}", m);
+ }
+ println!("n still is {}", n);
+ }
+
+ // This is fine, inner loop uses a new iterator.
+ let mut it = 1..40;
+ while let Some(n) = it.next() {
+ let mut it = 1..40;
+ while let Some(m) = it.next() {
+ if m % 10 == 0 {
+ break;
+ }
+ println!("doing something with m: {}", m);
+ }
+
+ // Weird binding shouldn't change anything.
+ let (mut it, _) = (1..40, 0);
+ while let Some(m) = it.next() {
+ if m % 10 == 0 {
+ break;
+ }
+ println!("doing something with m: {}", m);
+ }
+
+ // Used after the loop, needs &mut.
+ let mut it = 1..40;
+ while let Some(m) = it.next() {
+ if m % 10 == 0 {
+ break;
+ }
+ println!("doing something with m: {}", m);
+ }
+ println!("next item {}", it.next().unwrap());
+
+ println!("n still is {}", n);
+ }
+}
+
+fn issue6231() {
+ // Closure in the outer loop, needs &mut
+ let mut it = 1..40;
+ let mut opt = Some(0);
+ while let Some(n) = opt.take().or_else(|| it.next()) {
+ while let Some(m) = it.next() {
+ if n % 10 == 0 {
+ break;
+ }
+ println!("doing something with m: {}", m);
+ }
+ println!("n still is {}", n);
+ }
+}
+
+fn issue1924() {
+ struct S<T>(T);
+ impl<T: Iterator<Item = u32>> S<T> {
+ fn f(&mut self) -> Option<u32> {
+ // Used as a field.
+ while let Some(i) = self.0.next() {
+ if !(3..8).contains(&i) {
+ return Some(i);
+ }
+ }
+ None
+ }
+
+ fn f2(&mut self) -> Option<u32> {
+ // Don't lint, self borrowed inside the loop
+ while let Some(i) = self.0.next() {
+ if i == 1 {
+ return self.f();
+ }
+ }
+ None
+ }
+ }
+ impl<T: Iterator<Item = u32>> S<(S<T>, Option<u32>)> {
+ fn f3(&mut self) -> Option<u32> {
+ // Don't lint, self borrowed inside the loop
+ while let Some(i) = self.0.0.0.next() {
+ if i == 1 {
+ return self.0.0.f();
+ }
+ }
+ while let Some(i) = self.0.0.0.next() {
+ if i == 1 {
+ return self.f3();
+ }
+ }
+ // This one is fine, a different field is borrowed
+ while let Some(i) = self.0.0.0.next() {
+ if i == 1 {
+ return self.0.1.take();
+ } else {
+ self.0.1 = Some(i);
+ }
+ }
+ None
+ }
+ }
+
+ struct S2<T>(T, u32);
+ impl<T: Iterator<Item = u32>> Iterator for S2<T> {
+ type Item = u32;
+ fn next(&mut self) -> Option<u32> {
+ self.0.next()
+ }
+ }
+
+ // Don't lint, field of the iterator is accessed in the loop
+ let mut it = S2(1..40, 0);
+ while let Some(n) = it.next() {
+ if n == it.1 {
+ break;
+ }
+ }
+
+ // Needs &mut, field of the iterator is accessed after the loop
+ let mut it = S2(1..40, 0);
+ while let Some(n) = it.next() {
+ if n == 0 {
+ break;
+ }
+ }
+ println!("iterator field {}", it.1);
+}
+
+fn issue7249() {
+ let mut it = 0..10;
+ let mut x = || {
+ // Needs &mut, the closure can be called multiple times
+ while let Some(x) = it.next() {
+ if x % 2 == 0 {
+ break;
+ }
+ }
+ };
+ x();
+ x();
+}
+
+fn issue7510() {
+ let mut it = 0..10;
+ let it = &mut it;
+ // Needs to reborrow `it` as the binding isn't mutable
+ while let Some(x) = it.next() {
+ if x % 2 == 0 {
+ break;
+ }
+ }
+ println!("{}", it.next().unwrap());
+
+ struct S<T>(T);
+ let mut it = 0..10;
+ let it = S(&mut it);
+ // Needs to reborrow `it.0` as the binding isn't mutable
+ while let Some(x) = it.0.next() {
+ if x % 2 == 0 {
+ break;
+ }
+ }
+ println!("{}", it.0.next().unwrap());
+}
+
+fn exact_match_with_single_field() {
+ struct S<T>(T);
+ let mut s = S(0..10);
+ // Don't lint. `s.0` is used inside the loop.
+ while let Some(_) = s.0.next() {
+ let _ = &mut s.0;
+ }
+}
+
+fn custom_deref() {
+ struct S1<T> {
+ x: T,
+ }
+ struct S2<T>(S1<T>);
+ impl<T> core::ops::Deref for S2<T> {
+ type Target = S1<T>;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+ }
+ impl<T> core::ops::DerefMut for S2<T> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.0
+ }
+ }
+
+ let mut s = S2(S1 { x: 0..10 });
+ while let Some(x) = s.x.next() {
+ println!("{}", x);
+ }
+}
+
+fn issue_8113() {
+ let mut x = [0..10];
+ while let Some(x) = x[0].next() {
+ println!("{}", x);
+ }
+}
+
+fn fn_once_closure() {
+ let mut it = 0..10;
+ (|| {
+ while let Some(x) = it.next() {
+ if x % 2 == 0 {
+ break;
+ }
+ }
+ })();
+
+ fn f(_: impl FnOnce()) {}
+ let mut it = 0..10;
+ f(|| {
+ while let Some(x) = it.next() {
+ if x % 2 == 0 {
+ break;
+ }
+ }
+ });
+
+ fn f2(_: impl FnMut()) {}
+ let mut it = 0..10;
+ f2(|| {
+ while let Some(x) = it.next() {
+ if x % 2 == 0 {
+ break;
+ }
+ }
+ });
+
+ fn f3(_: fn()) {}
+ f3(|| {
+ let mut it = 0..10;
+ while let Some(x) = it.next() {
+ if x % 2 == 0 {
+ break;
+ }
+ }
+ })
+}
+
+fn main() {
+ let mut it = 0..20;
+ while let Some(..) = it.next() {
+ println!("test");
+ }
+}
diff --git a/src/tools/clippy/tests/ui/while_let_on_iterator.stderr b/src/tools/clippy/tests/ui/while_let_on_iterator.stderr
new file mode 100644
index 000000000..3236765e1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/while_let_on_iterator.stderr
@@ -0,0 +1,160 @@
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:16:5
+ |
+LL | while let Option::Some(x) = iter.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for x in iter`
+ |
+ = note: `-D clippy::while-let-on-iterator` implied by `-D warnings`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:21:5
+ |
+LL | while let Some(x) = iter.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for x in iter`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:26:5
+ |
+LL | while let Some(_) = iter.next() {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for _ in iter`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:102:9
+ |
+LL | while let Some([..]) = it.next() {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for [..] in it`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:109:9
+ |
+LL | while let Some([_x]) = it.next() {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for [_x] in it`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:122:9
+ |
+LL | while let Some(x @ [_]) = it.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for x @ [_] in it`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:142:9
+ |
+LL | while let Some(_) = y.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for _ in y`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:199:9
+ |
+LL | while let Some(m) = it.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for m in it.by_ref()`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:210:5
+ |
+LL | while let Some(n) = it.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for n in it`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:212:9
+ |
+LL | while let Some(m) = it.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for m in it`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:221:9
+ |
+LL | while let Some(m) = it.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for m in it`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:230:9
+ |
+LL | while let Some(m) = it.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for m in it.by_ref()`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:247:9
+ |
+LL | while let Some(m) = it.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for m in it.by_ref()`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:262:13
+ |
+LL | while let Some(i) = self.0.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for i in self.0.by_ref()`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:294:13
+ |
+LL | while let Some(i) = self.0.0.0.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for i in self.0.0.0.by_ref()`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:323:5
+ |
+LL | while let Some(n) = it.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for n in it.by_ref()`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:335:9
+ |
+LL | while let Some(x) = it.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for x in it.by_ref()`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:349:5
+ |
+LL | while let Some(x) = it.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for x in it.by_ref()`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:360:5
+ |
+LL | while let Some(x) = it.0.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for x in it.0.by_ref()`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:395:5
+ |
+LL | while let Some(x) = s.x.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for x in s.x.by_ref()`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:402:5
+ |
+LL | while let Some(x) = x[0].next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for x in x[0].by_ref()`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:410:9
+ |
+LL | while let Some(x) = it.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for x in it`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:420:9
+ |
+LL | while let Some(x) = it.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for x in it`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:430:9
+ |
+LL | while let Some(x) = it.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for x in it.by_ref()`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:440:9
+ |
+LL | while let Some(x) = it.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for x in it`
+
+error: this loop could be written as a `for` loop
+ --> $DIR/while_let_on_iterator.rs:450:5
+ |
+LL | while let Some(..) = it.next() {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for _ in it`
+
+error: aborting due to 26 previous errors
+
diff --git a/src/tools/clippy/tests/ui/wild_in_or_pats.rs b/src/tools/clippy/tests/ui/wild_in_or_pats.rs
new file mode 100644
index 000000000..ad600f125
--- /dev/null
+++ b/src/tools/clippy/tests/ui/wild_in_or_pats.rs
@@ -0,0 +1,36 @@
+#![warn(clippy::wildcard_in_or_patterns)]
+
+fn main() {
+ match "foo" {
+ "a" => {
+ dbg!("matched a");
+ },
+ "bar" | _ => {
+ dbg!("matched (bar or) wild");
+ },
+ };
+ match "foo" {
+ "a" => {
+ dbg!("matched a");
+ },
+ "bar" | "bar2" | _ => {
+ dbg!("matched (bar or bar2 or) wild");
+ },
+ };
+ match "foo" {
+ "a" => {
+ dbg!("matched a");
+ },
+ _ | "bar" | _ => {
+ dbg!("matched (bar or) wild");
+ },
+ };
+ match "foo" {
+ "a" => {
+ dbg!("matched a");
+ },
+ _ | "bar" => {
+ dbg!("matched (bar or) wild");
+ },
+ };
+}
diff --git a/src/tools/clippy/tests/ui/wild_in_or_pats.stderr b/src/tools/clippy/tests/ui/wild_in_or_pats.stderr
new file mode 100644
index 000000000..45b87aa0f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/wild_in_or_pats.stderr
@@ -0,0 +1,35 @@
+error: wildcard pattern covers any other pattern as it will match anyway
+ --> $DIR/wild_in_or_pats.rs:8:9
+ |
+LL | "bar" | _ => {
+ | ^^^^^^^^^
+ |
+ = note: `-D clippy::wildcard-in-or-patterns` implied by `-D warnings`
+ = help: consider handling `_` separately
+
+error: wildcard pattern covers any other pattern as it will match anyway
+ --> $DIR/wild_in_or_pats.rs:16:9
+ |
+LL | "bar" | "bar2" | _ => {
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider handling `_` separately
+
+error: wildcard pattern covers any other pattern as it will match anyway
+ --> $DIR/wild_in_or_pats.rs:24:9
+ |
+LL | _ | "bar" | _ => {
+ | ^^^^^^^^^^^^^
+ |
+ = help: consider handling `_` separately
+
+error: wildcard pattern covers any other pattern as it will match anyway
+ --> $DIR/wild_in_or_pats.rs:32:9
+ |
+LL | _ | "bar" => {
+ | ^^^^^^^^^
+ |
+ = help: consider handling `_` separately
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/wildcard_enum_match_arm.fixed b/src/tools/clippy/tests/ui/wildcard_enum_match_arm.fixed
new file mode 100644
index 000000000..3ee4ab48a
--- /dev/null
+++ b/src/tools/clippy/tests/ui/wildcard_enum_match_arm.fixed
@@ -0,0 +1,104 @@
+// run-rustfix
+// aux-build:non-exhaustive-enum.rs
+
+#![deny(clippy::wildcard_enum_match_arm)]
+#![allow(
+ unreachable_code,
+ unused_variables,
+ dead_code,
+ clippy::single_match,
+ clippy::wildcard_in_or_patterns,
+ clippy::unnested_or_patterns,
+ clippy::diverging_sub_expression
+)]
+
+extern crate non_exhaustive_enum;
+
+use non_exhaustive_enum::ErrorKind;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+enum Color {
+ Red,
+ Green,
+ Blue,
+ Rgb(u8, u8, u8),
+ Cyan,
+}
+
+impl Color {
+ fn is_monochrome(self) -> bool {
+ match self {
+ Color::Red | Color::Green | Color::Blue => true,
+ Color::Rgb(r, g, b) => r | g == 0 || r | b == 0 || g | b == 0,
+ Color::Cyan => false,
+ }
+ }
+}
+
+fn main() {
+ let color = Color::Rgb(0, 0, 127);
+ match color {
+ Color::Red => println!("Red"),
+ Color::Green | Color::Blue | Color::Rgb(..) | Color::Cyan => eprintln!("Not red"),
+ };
+ match color {
+ Color::Red => println!("Red"),
+ _not_red @ Color::Green | _not_red @ Color::Blue | _not_red @ Color::Rgb(..) | _not_red @ Color::Cyan => eprintln!("Not red"),
+ };
+ let _str = match color {
+ Color::Red => "Red".to_owned(),
+ not_red @ Color::Green | not_red @ Color::Blue | not_red @ Color::Rgb(..) | not_red @ Color::Cyan => format!("{:?}", not_red),
+ };
+ match color {
+ Color::Red => {},
+ Color::Green => {},
+ Color::Blue => {},
+ Color::Cyan => {},
+ c if c.is_monochrome() => {},
+ Color::Rgb(_, _, _) => {},
+ };
+ let _str = match color {
+ Color::Red => "Red",
+ c @ Color::Green | c @ Color::Blue | c @ Color::Rgb(_, _, _) | c @ Color::Cyan => "Not red",
+ };
+ match color {
+ Color::Rgb(r, _, _) if r > 0 => "Some red",
+ Color::Red | Color::Green | Color::Blue | Color::Rgb(..) | Color::Cyan => "No red",
+ };
+ match color {
+ Color::Red | Color::Green | Color::Blue | Color::Cyan => {},
+ Color::Rgb(..) => {},
+ };
+ let x: u8 = unimplemented!();
+ match x {
+ 0 => {},
+ 140 => {},
+ _ => {},
+ };
+ // We need to use an enum not defined in this test because non_exhaustive is ignored for the
+ // purposes of dead code analysis within a crate.
+ let error_kind = ErrorKind::NotFound;
+ match error_kind {
+ ErrorKind::NotFound => {},
+ ErrorKind::PermissionDenied | _ => {},
+ }
+ match error_kind {
+ ErrorKind::NotFound => {},
+ ErrorKind::PermissionDenied => {},
+ _ => {},
+ }
+
+ {
+ #![allow(clippy::manual_non_exhaustive)]
+ pub enum Enum {
+ A,
+ B,
+ #[doc(hidden)]
+ __Private,
+ }
+ match Enum::A {
+ Enum::A => (),
+ Enum::B | _ => (),
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/wildcard_enum_match_arm.rs b/src/tools/clippy/tests/ui/wildcard_enum_match_arm.rs
new file mode 100644
index 000000000..468865504
--- /dev/null
+++ b/src/tools/clippy/tests/ui/wildcard_enum_match_arm.rs
@@ -0,0 +1,104 @@
+// run-rustfix
+// aux-build:non-exhaustive-enum.rs
+
+#![deny(clippy::wildcard_enum_match_arm)]
+#![allow(
+ unreachable_code,
+ unused_variables,
+ dead_code,
+ clippy::single_match,
+ clippy::wildcard_in_or_patterns,
+ clippy::unnested_or_patterns,
+ clippy::diverging_sub_expression
+)]
+
+extern crate non_exhaustive_enum;
+
+use non_exhaustive_enum::ErrorKind;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+enum Color {
+ Red,
+ Green,
+ Blue,
+ Rgb(u8, u8, u8),
+ Cyan,
+}
+
+impl Color {
+ fn is_monochrome(self) -> bool {
+ match self {
+ Color::Red | Color::Green | Color::Blue => true,
+ Color::Rgb(r, g, b) => r | g == 0 || r | b == 0 || g | b == 0,
+ Color::Cyan => false,
+ }
+ }
+}
+
+fn main() {
+ let color = Color::Rgb(0, 0, 127);
+ match color {
+ Color::Red => println!("Red"),
+ _ => eprintln!("Not red"),
+ };
+ match color {
+ Color::Red => println!("Red"),
+ _not_red => eprintln!("Not red"),
+ };
+ let _str = match color {
+ Color::Red => "Red".to_owned(),
+ not_red => format!("{:?}", not_red),
+ };
+ match color {
+ Color::Red => {},
+ Color::Green => {},
+ Color::Blue => {},
+ Color::Cyan => {},
+ c if c.is_monochrome() => {},
+ Color::Rgb(_, _, _) => {},
+ };
+ let _str = match color {
+ Color::Red => "Red",
+ c @ Color::Green | c @ Color::Blue | c @ Color::Rgb(_, _, _) | c @ Color::Cyan => "Not red",
+ };
+ match color {
+ Color::Rgb(r, _, _) if r > 0 => "Some red",
+ _ => "No red",
+ };
+ match color {
+ Color::Red | Color::Green | Color::Blue | Color::Cyan => {},
+ Color::Rgb(..) => {},
+ };
+ let x: u8 = unimplemented!();
+ match x {
+ 0 => {},
+ 140 => {},
+ _ => {},
+ };
+ // We need to use an enum not defined in this test because non_exhaustive is ignored for the
+ // purposes of dead code analysis within a crate.
+ let error_kind = ErrorKind::NotFound;
+ match error_kind {
+ ErrorKind::NotFound => {},
+ _ => {},
+ }
+ match error_kind {
+ ErrorKind::NotFound => {},
+ ErrorKind::PermissionDenied => {},
+ _ => {},
+ }
+
+ {
+ #![allow(clippy::manual_non_exhaustive)]
+ pub enum Enum {
+ A,
+ B,
+ #[doc(hidden)]
+ __Private,
+ }
+ match Enum::A {
+ Enum::A => (),
+ _ => (),
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/wildcard_enum_match_arm.stderr b/src/tools/clippy/tests/ui/wildcard_enum_match_arm.stderr
new file mode 100644
index 000000000..d63f20903
--- /dev/null
+++ b/src/tools/clippy/tests/ui/wildcard_enum_match_arm.stderr
@@ -0,0 +1,44 @@
+error: wildcard match will also match any future added variants
+ --> $DIR/wildcard_enum_match_arm.rs:42:9
+ |
+LL | _ => eprintln!("Not red"),
+ | ^ help: try this: `Color::Green | Color::Blue | Color::Rgb(..) | Color::Cyan`
+ |
+note: the lint level is defined here
+ --> $DIR/wildcard_enum_match_arm.rs:4:9
+ |
+LL | #![deny(clippy::wildcard_enum_match_arm)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: wildcard match will also match any future added variants
+ --> $DIR/wildcard_enum_match_arm.rs:46:9
+ |
+LL | _not_red => eprintln!("Not red"),
+ | ^^^^^^^^ help: try this: `_not_red @ Color::Green | _not_red @ Color::Blue | _not_red @ Color::Rgb(..) | _not_red @ Color::Cyan`
+
+error: wildcard match will also match any future added variants
+ --> $DIR/wildcard_enum_match_arm.rs:50:9
+ |
+LL | not_red => format!("{:?}", not_red),
+ | ^^^^^^^ help: try this: `not_red @ Color::Green | not_red @ Color::Blue | not_red @ Color::Rgb(..) | not_red @ Color::Cyan`
+
+error: wildcard match will also match any future added variants
+ --> $DIR/wildcard_enum_match_arm.rs:66:9
+ |
+LL | _ => "No red",
+ | ^ help: try this: `Color::Red | Color::Green | Color::Blue | Color::Rgb(..) | Color::Cyan`
+
+error: wildcard matches known variants and will also match future added variants
+ --> $DIR/wildcard_enum_match_arm.rs:83:9
+ |
+LL | _ => {},
+ | ^ help: try this: `ErrorKind::PermissionDenied | _`
+
+error: wildcard matches known variants and will also match future added variants
+ --> $DIR/wildcard_enum_match_arm.rs:101:13
+ |
+LL | _ => (),
+ | ^ help: try this: `Enum::B | _`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/wildcard_imports.fixed b/src/tools/clippy/tests/ui/wildcard_imports.fixed
new file mode 100644
index 000000000..ef55f1c31
--- /dev/null
+++ b/src/tools/clippy/tests/ui/wildcard_imports.fixed
@@ -0,0 +1,245 @@
+// edition:2015
+// run-rustfix
+// aux-build:wildcard_imports_helper.rs
+
+// the 2015 edition here is needed because edition 2018 changed the module system
+// (see https://doc.rust-lang.org/edition-guide/rust-2018/path-changes.html) which means the lint
+// no longer detects some of the cases starting with Rust 2018.
+// FIXME: We should likely add another edition 2021 test case for this lint
+
+#![warn(clippy::wildcard_imports)]
+#![allow(unused, clippy::unnecessary_wraps, clippy::let_unit_value)]
+#![warn(unused_imports)]
+
+extern crate wildcard_imports_helper;
+
+use crate::fn_mod::foo;
+use crate::mod_mod::inner_mod;
+use crate::multi_fn_mod::{multi_bar, multi_foo, multi_inner_mod};
+#[macro_use]
+use crate::struct_mod::{A, inner_struct_mod};
+
+#[allow(unused_imports)]
+use wildcard_imports_helper::inner::inner_for_self_import;
+use wildcard_imports_helper::inner::inner_for_self_import::inner_extern_bar;
+use wildcard_imports_helper::{ExternA, extern_foo};
+
+use std::io::prelude::*;
+use wildcard_imports_helper::prelude::v1::*;
+
+struct ReadFoo;
+
+impl Read for ReadFoo {
+ fn read(&mut self, _buf: &mut [u8]) -> std::io::Result<usize> {
+ Ok(0)
+ }
+}
+
+mod fn_mod {
+ pub fn foo() {}
+}
+
+mod mod_mod {
+ pub mod inner_mod {
+ pub fn foo() {}
+ }
+}
+
+mod multi_fn_mod {
+ pub fn multi_foo() {}
+ pub fn multi_bar() {}
+ pub fn multi_baz() {}
+ pub mod multi_inner_mod {
+ pub fn foo() {}
+ }
+}
+
+mod struct_mod {
+ pub struct A;
+ pub struct B;
+ pub mod inner_struct_mod {
+ pub struct C;
+ }
+
+ #[macro_export]
+ macro_rules! double_struct_import_test {
+ () => {
+ let _ = A;
+ };
+ }
+}
+
+fn main() {
+ foo();
+ multi_foo();
+ multi_bar();
+ multi_inner_mod::foo();
+ inner_mod::foo();
+ extern_foo();
+ inner_extern_bar();
+
+ let _ = A;
+ let _ = inner_struct_mod::C;
+ let _ = ExternA;
+ let _ = PreludeModAnywhere;
+
+ double_struct_import_test!();
+ double_struct_import_test!();
+}
+
+mod in_fn_test {
+ pub use self::inner_exported::*;
+ #[allow(unused_imports)]
+ pub(crate) use self::inner_exported2::*;
+
+ fn test_intern() {
+ use crate::fn_mod::foo;
+
+ foo();
+ }
+
+ fn test_extern() {
+ use wildcard_imports_helper::inner::inner_for_self_import::{self, inner_extern_foo};
+ use wildcard_imports_helper::{ExternA, extern_foo};
+
+ inner_for_self_import::inner_extern_foo();
+ inner_extern_foo();
+
+ extern_foo();
+
+ let _ = ExternA;
+ }
+
+ fn test_inner_nested() {
+ use self::{inner::inner_foo, inner2::inner_bar};
+
+ inner_foo();
+ inner_bar();
+ }
+
+ fn test_extern_reexported() {
+ use wildcard_imports_helper::{ExternExportedEnum, ExternExportedStruct, extern_exported};
+
+ extern_exported();
+ let _ = ExternExportedStruct;
+ let _ = ExternExportedEnum::A;
+ }
+
+ mod inner_exported {
+ pub fn exported() {}
+ pub struct ExportedStruct;
+ pub enum ExportedEnum {
+ A,
+ }
+ }
+
+ mod inner_exported2 {
+ pub(crate) fn exported2() {}
+ }
+
+ mod inner {
+ pub fn inner_foo() {}
+ }
+
+ mod inner2 {
+ pub fn inner_bar() {}
+ }
+}
+
+fn test_reexported() {
+ use crate::in_fn_test::{ExportedEnum, ExportedStruct, exported};
+
+ exported();
+ let _ = ExportedStruct;
+ let _ = ExportedEnum::A;
+}
+
+#[rustfmt::skip]
+fn test_weird_formatting() {
+ use crate:: in_fn_test::exported;
+ use crate:: fn_mod::foo;
+
+ exported();
+ foo();
+}
+
+mod super_imports {
+ fn foofoo() {}
+
+ mod should_be_replaced {
+ use super::foofoo;
+
+ fn with_super() {
+ let _ = foofoo();
+ }
+ }
+
+ mod test_should_pass {
+ use super::*;
+
+ fn with_super() {
+ let _ = foofoo();
+ }
+ }
+
+ mod test_should_pass_inside_function {
+ fn with_super_inside_function() {
+ use super::*;
+ let _ = foofoo();
+ }
+ }
+
+ mod test_should_pass_further_inside {
+ fn insidefoo() {}
+ mod inner {
+ use super::*;
+ fn with_super() {
+ let _ = insidefoo();
+ }
+ }
+ }
+
+ mod should_be_replaced_further_inside {
+ fn insidefoo() {}
+ mod inner {
+ use super::insidefoo;
+ fn with_super() {
+ let _ = insidefoo();
+ }
+ }
+ }
+
+ mod use_explicit_should_be_replaced {
+ use super_imports::foofoo;
+
+ fn with_explicit() {
+ let _ = foofoo();
+ }
+ }
+
+ mod use_double_super_should_be_replaced {
+ mod inner {
+ use super::super::foofoo;
+
+ fn with_double_super() {
+ let _ = foofoo();
+ }
+ }
+ }
+
+ mod use_super_explicit_should_be_replaced {
+ use super::super::super_imports::foofoo;
+
+ fn with_super_explicit() {
+ let _ = foofoo();
+ }
+ }
+
+ mod attestation_should_be_replaced {
+ use super::foofoo;
+
+ fn with_explicit() {
+ let _ = foofoo();
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/wildcard_imports.rs b/src/tools/clippy/tests/ui/wildcard_imports.rs
new file mode 100644
index 000000000..b81285142
--- /dev/null
+++ b/src/tools/clippy/tests/ui/wildcard_imports.rs
@@ -0,0 +1,246 @@
+// edition:2015
+// run-rustfix
+// aux-build:wildcard_imports_helper.rs
+
+// the 2015 edition here is needed because edition 2018 changed the module system
+// (see https://doc.rust-lang.org/edition-guide/rust-2018/path-changes.html) which means the lint
+// no longer detects some of the cases starting with Rust 2018.
+// FIXME: We should likely add another edition 2021 test case for this lint
+
+#![warn(clippy::wildcard_imports)]
+#![allow(unused, clippy::unnecessary_wraps, clippy::let_unit_value)]
+#![warn(unused_imports)]
+
+extern crate wildcard_imports_helper;
+
+use crate::fn_mod::*;
+use crate::mod_mod::*;
+use crate::multi_fn_mod::*;
+#[macro_use]
+use crate::struct_mod::*;
+
+#[allow(unused_imports)]
+use wildcard_imports_helper::inner::inner_for_self_import;
+use wildcard_imports_helper::inner::inner_for_self_import::*;
+use wildcard_imports_helper::*;
+
+use std::io::prelude::*;
+use wildcard_imports_helper::prelude::v1::*;
+
+struct ReadFoo;
+
+impl Read for ReadFoo {
+ fn read(&mut self, _buf: &mut [u8]) -> std::io::Result<usize> {
+ Ok(0)
+ }
+}
+
+mod fn_mod {
+ pub fn foo() {}
+}
+
+mod mod_mod {
+ pub mod inner_mod {
+ pub fn foo() {}
+ }
+}
+
+mod multi_fn_mod {
+ pub fn multi_foo() {}
+ pub fn multi_bar() {}
+ pub fn multi_baz() {}
+ pub mod multi_inner_mod {
+ pub fn foo() {}
+ }
+}
+
+mod struct_mod {
+ pub struct A;
+ pub struct B;
+ pub mod inner_struct_mod {
+ pub struct C;
+ }
+
+ #[macro_export]
+ macro_rules! double_struct_import_test {
+ () => {
+ let _ = A;
+ };
+ }
+}
+
+fn main() {
+ foo();
+ multi_foo();
+ multi_bar();
+ multi_inner_mod::foo();
+ inner_mod::foo();
+ extern_foo();
+ inner_extern_bar();
+
+ let _ = A;
+ let _ = inner_struct_mod::C;
+ let _ = ExternA;
+ let _ = PreludeModAnywhere;
+
+ double_struct_import_test!();
+ double_struct_import_test!();
+}
+
+mod in_fn_test {
+ pub use self::inner_exported::*;
+ #[allow(unused_imports)]
+ pub(crate) use self::inner_exported2::*;
+
+ fn test_intern() {
+ use crate::fn_mod::*;
+
+ foo();
+ }
+
+ fn test_extern() {
+ use wildcard_imports_helper::inner::inner_for_self_import::{self, *};
+ use wildcard_imports_helper::*;
+
+ inner_for_self_import::inner_extern_foo();
+ inner_extern_foo();
+
+ extern_foo();
+
+ let _ = ExternA;
+ }
+
+ fn test_inner_nested() {
+ use self::{inner::*, inner2::*};
+
+ inner_foo();
+ inner_bar();
+ }
+
+ fn test_extern_reexported() {
+ use wildcard_imports_helper::*;
+
+ extern_exported();
+ let _ = ExternExportedStruct;
+ let _ = ExternExportedEnum::A;
+ }
+
+ mod inner_exported {
+ pub fn exported() {}
+ pub struct ExportedStruct;
+ pub enum ExportedEnum {
+ A,
+ }
+ }
+
+ mod inner_exported2 {
+ pub(crate) fn exported2() {}
+ }
+
+ mod inner {
+ pub fn inner_foo() {}
+ }
+
+ mod inner2 {
+ pub fn inner_bar() {}
+ }
+}
+
+fn test_reexported() {
+ use crate::in_fn_test::*;
+
+ exported();
+ let _ = ExportedStruct;
+ let _ = ExportedEnum::A;
+}
+
+#[rustfmt::skip]
+fn test_weird_formatting() {
+ use crate:: in_fn_test:: * ;
+ use crate:: fn_mod::
+ *;
+
+ exported();
+ foo();
+}
+
+mod super_imports {
+ fn foofoo() {}
+
+ mod should_be_replaced {
+ use super::*;
+
+ fn with_super() {
+ let _ = foofoo();
+ }
+ }
+
+ mod test_should_pass {
+ use super::*;
+
+ fn with_super() {
+ let _ = foofoo();
+ }
+ }
+
+ mod test_should_pass_inside_function {
+ fn with_super_inside_function() {
+ use super::*;
+ let _ = foofoo();
+ }
+ }
+
+ mod test_should_pass_further_inside {
+ fn insidefoo() {}
+ mod inner {
+ use super::*;
+ fn with_super() {
+ let _ = insidefoo();
+ }
+ }
+ }
+
+ mod should_be_replaced_further_inside {
+ fn insidefoo() {}
+ mod inner {
+ use super::*;
+ fn with_super() {
+ let _ = insidefoo();
+ }
+ }
+ }
+
+ mod use_explicit_should_be_replaced {
+ use super_imports::*;
+
+ fn with_explicit() {
+ let _ = foofoo();
+ }
+ }
+
+ mod use_double_super_should_be_replaced {
+ mod inner {
+ use super::super::*;
+
+ fn with_double_super() {
+ let _ = foofoo();
+ }
+ }
+ }
+
+ mod use_super_explicit_should_be_replaced {
+ use super::super::super_imports::*;
+
+ fn with_super_explicit() {
+ let _ = foofoo();
+ }
+ }
+
+ mod attestation_should_be_replaced {
+ use super::*;
+
+ fn with_explicit() {
+ let _ = foofoo();
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/wildcard_imports.stderr b/src/tools/clippy/tests/ui/wildcard_imports.stderr
new file mode 100644
index 000000000..626c1754f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/wildcard_imports.stderr
@@ -0,0 +1,132 @@
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:16:5
+ |
+LL | use crate::fn_mod::*;
+ | ^^^^^^^^^^^^^^^^ help: try: `crate::fn_mod::foo`
+ |
+ = note: `-D clippy::wildcard-imports` implied by `-D warnings`
+
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:17:5
+ |
+LL | use crate::mod_mod::*;
+ | ^^^^^^^^^^^^^^^^^ help: try: `crate::mod_mod::inner_mod`
+
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:18:5
+ |
+LL | use crate::multi_fn_mod::*;
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `crate::multi_fn_mod::{multi_bar, multi_foo, multi_inner_mod}`
+
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:20:5
+ |
+LL | use crate::struct_mod::*;
+ | ^^^^^^^^^^^^^^^^^^^^ help: try: `crate::struct_mod::{A, inner_struct_mod}`
+
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:24:5
+ |
+LL | use wildcard_imports_helper::inner::inner_for_self_import::*;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::inner::inner_for_self_import::inner_extern_bar`
+
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:25:5
+ |
+LL | use wildcard_imports_helper::*;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternA, extern_foo}`
+
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:96:13
+ |
+LL | use crate::fn_mod::*;
+ | ^^^^^^^^^^^^^^^^ help: try: `crate::fn_mod::foo`
+
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:102:75
+ |
+LL | use wildcard_imports_helper::inner::inner_for_self_import::{self, *};
+ | ^ help: try: `inner_extern_foo`
+
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:103:13
+ |
+LL | use wildcard_imports_helper::*;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternA, extern_foo}`
+
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:114:20
+ |
+LL | use self::{inner::*, inner2::*};
+ | ^^^^^^^^ help: try: `inner::inner_foo`
+
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:114:30
+ |
+LL | use self::{inner::*, inner2::*};
+ | ^^^^^^^^^ help: try: `inner2::inner_bar`
+
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:121:13
+ |
+LL | use wildcard_imports_helper::*;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternExportedEnum, ExternExportedStruct, extern_exported}`
+
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:150:9
+ |
+LL | use crate::in_fn_test::*;
+ | ^^^^^^^^^^^^^^^^^^^^ help: try: `crate::in_fn_test::{ExportedEnum, ExportedStruct, exported}`
+
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:159:9
+ |
+LL | use crate:: in_fn_test:: * ;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `crate:: in_fn_test::exported`
+
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:160:9
+ |
+LL | use crate:: fn_mod::
+ | _________^
+LL | | *;
+ | |_________^ help: try: `crate:: fn_mod::foo`
+
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:171:13
+ |
+LL | use super::*;
+ | ^^^^^^^^ help: try: `super::foofoo`
+
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:206:17
+ |
+LL | use super::*;
+ | ^^^^^^^^ help: try: `super::insidefoo`
+
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:214:13
+ |
+LL | use super_imports::*;
+ | ^^^^^^^^^^^^^^^^ help: try: `super_imports::foofoo`
+
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:223:17
+ |
+LL | use super::super::*;
+ | ^^^^^^^^^^^^^^^ help: try: `super::super::foofoo`
+
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:232:13
+ |
+LL | use super::super::super_imports::*;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `super::super::super_imports::foofoo`
+
+error: usage of wildcard import
+ --> $DIR/wildcard_imports.rs:240:13
+ |
+LL | use super::*;
+ | ^^^^^^^^ help: try: `super::foofoo`
+
+error: aborting due to 21 previous errors
+
diff --git a/src/tools/clippy/tests/ui/write_literal.rs b/src/tools/clippy/tests/ui/write_literal.rs
new file mode 100644
index 000000000..446691744
--- /dev/null
+++ b/src/tools/clippy/tests/ui/write_literal.rs
@@ -0,0 +1,43 @@
+#![allow(unused_must_use)]
+#![warn(clippy::write_literal)]
+
+use std::io::Write;
+
+fn main() {
+ let mut v = Vec::new();
+
+ // these should be fine
+ write!(v, "Hello");
+ writeln!(v, "Hello");
+ let world = "world";
+ writeln!(v, "Hello {}", world);
+ writeln!(v, "Hello {world}", world = world);
+ writeln!(v, "3 in hex is {:X}", 3);
+ writeln!(v, "2 + 1 = {:.4}", 3);
+ writeln!(v, "2 + 1 = {:5.4}", 3);
+ writeln!(v, "Debug test {:?}", "hello, world");
+ writeln!(v, "{0:8} {1:>8}", "hello", "world");
+ writeln!(v, "{1:8} {0:>8}", "hello", "world");
+ writeln!(v, "{foo:8} {bar:>8}", foo = "hello", bar = "world");
+ writeln!(v, "{bar:8} {foo:>8}", foo = "hello", bar = "world");
+ writeln!(v, "{number:>width$}", number = 1, width = 6);
+ writeln!(v, "{number:>0width$}", number = 1, width = 6);
+ writeln!(v, "{} of {:b} people know binary, the other half doesn't", 1, 2);
+ writeln!(v, "10 / 4 is {}", 2.5);
+ writeln!(v, "2 + 1 = {}", 3);
+
+ // these should throw warnings
+ write!(v, "Hello {}", "world");
+ writeln!(v, "Hello {} {}", world, "world");
+ writeln!(v, "Hello {}", "world");
+
+ // positional args don't change the fact
+ // that we're using a literal -- this should
+ // throw a warning
+ writeln!(v, "{0} {1}", "hello", "world");
+ writeln!(v, "{1} {0}", "hello", "world");
+
+ // named args shouldn't change anything either
+ writeln!(v, "{foo} {bar}", foo = "hello", bar = "world");
+ writeln!(v, "{bar} {foo}", foo = "hello", bar = "world");
+}
diff --git a/src/tools/clippy/tests/ui/write_literal.stderr b/src/tools/clippy/tests/ui/write_literal.stderr
new file mode 100644
index 000000000..3c5ec91d3
--- /dev/null
+++ b/src/tools/clippy/tests/ui/write_literal.stderr
@@ -0,0 +1,135 @@
+error: literal with an empty format string
+ --> $DIR/write_literal.rs:30:27
+ |
+LL | write!(v, "Hello {}", "world");
+ | ^^^^^^^
+ |
+ = note: `-D clippy::write-literal` implied by `-D warnings`
+help: try this
+ |
+LL - write!(v, "Hello {}", "world");
+LL + write!(v, "Hello world");
+ |
+
+error: literal with an empty format string
+ --> $DIR/write_literal.rs:31:39
+ |
+LL | writeln!(v, "Hello {} {}", world, "world");
+ | ^^^^^^^
+ |
+help: try this
+ |
+LL - writeln!(v, "Hello {} {}", world, "world");
+LL + writeln!(v, "Hello {} world", world);
+ |
+
+error: literal with an empty format string
+ --> $DIR/write_literal.rs:32:29
+ |
+LL | writeln!(v, "Hello {}", "world");
+ | ^^^^^^^
+ |
+help: try this
+ |
+LL - writeln!(v, "Hello {}", "world");
+LL + writeln!(v, "Hello world");
+ |
+
+error: literal with an empty format string
+ --> $DIR/write_literal.rs:37:28
+ |
+LL | writeln!(v, "{0} {1}", "hello", "world");
+ | ^^^^^^^
+ |
+help: try this
+ |
+LL - writeln!(v, "{0} {1}", "hello", "world");
+LL + writeln!(v, "hello {1}", "world");
+ |
+
+error: literal with an empty format string
+ --> $DIR/write_literal.rs:37:37
+ |
+LL | writeln!(v, "{0} {1}", "hello", "world");
+ | ^^^^^^^
+ |
+help: try this
+ |
+LL - writeln!(v, "{0} {1}", "hello", "world");
+LL + writeln!(v, "{0} world", "hello");
+ |
+
+error: literal with an empty format string
+ --> $DIR/write_literal.rs:38:28
+ |
+LL | writeln!(v, "{1} {0}", "hello", "world");
+ | ^^^^^^^
+ |
+help: try this
+ |
+LL - writeln!(v, "{1} {0}", "hello", "world");
+LL + writeln!(v, "{1} hello", "world");
+ |
+
+error: literal with an empty format string
+ --> $DIR/write_literal.rs:38:37
+ |
+LL | writeln!(v, "{1} {0}", "hello", "world");
+ | ^^^^^^^
+ |
+help: try this
+ |
+LL - writeln!(v, "{1} {0}", "hello", "world");
+LL + writeln!(v, "world {0}", "hello");
+ |
+
+error: literal with an empty format string
+ --> $DIR/write_literal.rs:41:32
+ |
+LL | writeln!(v, "{foo} {bar}", foo = "hello", bar = "world");
+ | ^^^^^^^^^^^^^
+ |
+help: try this
+ |
+LL - writeln!(v, "{foo} {bar}", foo = "hello", bar = "world");
+LL + writeln!(v, "hello {bar}", bar = "world");
+ |
+
+error: literal with an empty format string
+ --> $DIR/write_literal.rs:41:47
+ |
+LL | writeln!(v, "{foo} {bar}", foo = "hello", bar = "world");
+ | ^^^^^^^^^^^^^
+ |
+help: try this
+ |
+LL - writeln!(v, "{foo} {bar}", foo = "hello", bar = "world");
+LL + writeln!(v, "{foo} world", foo = "hello");
+ |
+
+error: literal with an empty format string
+ --> $DIR/write_literal.rs:42:32
+ |
+LL | writeln!(v, "{bar} {foo}", foo = "hello", bar = "world");
+ | ^^^^^^^^^^^^^
+ |
+help: try this
+ |
+LL - writeln!(v, "{bar} {foo}", foo = "hello", bar = "world");
+LL + writeln!(v, "{bar} hello", bar = "world");
+ |
+
+error: literal with an empty format string
+ --> $DIR/write_literal.rs:42:47
+ |
+LL | writeln!(v, "{bar} {foo}", foo = "hello", bar = "world");
+ | ^^^^^^^^^^^^^
+ |
+help: try this
+ |
+LL - writeln!(v, "{bar} {foo}", foo = "hello", bar = "world");
+LL + writeln!(v, "world {foo}", foo = "hello");
+ |
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui/write_literal_2.rs b/src/tools/clippy/tests/ui/write_literal_2.rs
new file mode 100644
index 000000000..ba0d7be5e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/write_literal_2.rs
@@ -0,0 +1,27 @@
+#![allow(unused_must_use)]
+#![warn(clippy::write_literal)]
+
+use std::io::Write;
+
+fn main() {
+ let mut v = Vec::new();
+
+ writeln!(v, "{}", "{hello}");
+ writeln!(v, r"{}", r"{hello}");
+ writeln!(v, "{}", '\'');
+ writeln!(v, "{}", '"');
+ writeln!(v, r"{}", '"'); // don't lint
+ writeln!(v, r"{}", '\'');
+ writeln!(
+ v,
+ "some {}",
+ "hello \
+ world!"
+ );
+ writeln!(
+ v,
+ "some {}\
+ {} \\ {}",
+ "1", "2", "3",
+ );
+}
diff --git a/src/tools/clippy/tests/ui/write_literal_2.stderr b/src/tools/clippy/tests/ui/write_literal_2.stderr
new file mode 100644
index 000000000..9ff297069
--- /dev/null
+++ b/src/tools/clippy/tests/ui/write_literal_2.stderr
@@ -0,0 +1,112 @@
+error: literal with an empty format string
+ --> $DIR/write_literal_2.rs:9:23
+ |
+LL | writeln!(v, "{}", "{hello}");
+ | ^^^^^^^^^
+ |
+ = note: `-D clippy::write-literal` implied by `-D warnings`
+help: try this
+ |
+LL - writeln!(v, "{}", "{hello}");
+LL + writeln!(v, "{{hello}}");
+ |
+
+error: literal with an empty format string
+ --> $DIR/write_literal_2.rs:10:24
+ |
+LL | writeln!(v, r"{}", r"{hello}");
+ | ^^^^^^^^^^
+ |
+help: try this
+ |
+LL - writeln!(v, r"{}", r"{hello}");
+LL + writeln!(v, r"{{hello}}");
+ |
+
+error: literal with an empty format string
+ --> $DIR/write_literal_2.rs:11:23
+ |
+LL | writeln!(v, "{}", '/'');
+ | ^^^^
+ |
+help: try this
+ |
+LL - writeln!(v, "{}", '/'');
+LL + writeln!(v, "'");
+ |
+
+error: literal with an empty format string
+ --> $DIR/write_literal_2.rs:12:23
+ |
+LL | writeln!(v, "{}", '"');
+ | ^^^
+ |
+help: try this
+ |
+LL - writeln!(v, "{}", '"');
+LL + writeln!(v, "/"");
+ |
+
+error: literal with an empty format string
+ --> $DIR/write_literal_2.rs:14:24
+ |
+LL | writeln!(v, r"{}", '/'');
+ | ^^^^
+ |
+help: try this
+ |
+LL - writeln!(v, r"{}", '/'');
+LL + writeln!(v, r"'");
+ |
+
+error: literal with an empty format string
+ --> $DIR/write_literal_2.rs:18:9
+ |
+LL | / "hello /
+LL | | world!"
+ | |_______________^
+ |
+help: try this
+ |
+LL ~ "some hello /
+LL ~ world!"
+ |
+
+error: literal with an empty format string
+ --> $DIR/write_literal_2.rs:25:9
+ |
+LL | "1", "2", "3",
+ | ^^^
+ |
+help: try this
+ |
+LL ~ "some 1/
+LL ~ {} / {}", "2", "3",
+ |
+
+error: literal with an empty format string
+ --> $DIR/write_literal_2.rs:25:14
+ |
+LL | "1", "2", "3",
+ | ^^^
+ |
+help: try this
+ |
+LL ~ 2 / {}",
+LL ~ "1", "3",
+ |
+
+error: literal with an empty format string
+ --> $DIR/write_literal_2.rs:25:19
+ |
+LL | "1", "2", "3",
+ | ^^^
+ |
+help: try this
+ |
+LL ~ {} / 3",
+LL ~ "1", "2",
+ |
+
+error: aborting due to 9 previous errors
+
diff --git a/src/tools/clippy/tests/ui/write_with_newline.rs b/src/tools/clippy/tests/ui/write_with_newline.rs
new file mode 100644
index 000000000..446d6914d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/write_with_newline.rs
@@ -0,0 +1,59 @@
+// FIXME: Ideally these suggestions would be fixed via rustfix. Blocked by rust-lang/rust#53934
+// // run-rustfix
+
+#![allow(clippy::write_literal)]
+#![warn(clippy::write_with_newline)]
+
+use std::io::Write;
+
+fn main() {
+ let mut v = Vec::new();
+
+ // These should fail
+ write!(v, "Hello\n");
+ write!(v, "Hello {}\n", "world");
+ write!(v, "Hello {} {}\n", "world", "#2");
+ write!(v, "{}\n", 1265);
+ write!(v, "\n");
+
+ // These should be fine
+ write!(v, "");
+ write!(v, "Hello");
+ writeln!(v, "Hello");
+ writeln!(v, "Hello\n");
+ writeln!(v, "Hello {}\n", "world");
+ write!(v, "Issue\n{}", 1265);
+ write!(v, "{}", 1265);
+ write!(v, "\n{}", 1275);
+ write!(v, "\n\n");
+ write!(v, "like eof\n\n");
+ write!(v, "Hello {} {}\n\n", "world", "#2");
+ writeln!(v, "\ndon't\nwarn\nfor\nmultiple\nnewlines\n"); // #3126
+ writeln!(v, "\nbla\n\n"); // #3126
+
+ // Escaping
+ write!(v, "\\n"); // #3514
+ write!(v, "\\\n"); // should fail
+ write!(v, "\\\\n");
+
+ // Raw strings
+ write!(v, r"\n"); // #3778
+
+ // Literal newlines should also fail
+ write!(
+ v,
+ "
+"
+ );
+ write!(
+ v,
+ r"
+"
+ );
+
+ // Don't warn on CRLF (#4208)
+ write!(v, "\r\n");
+ write!(v, "foo\r\n");
+ write!(v, "\\r\n"); //~ ERROR
+ write!(v, "foo\rbar\n");
+}
diff --git a/src/tools/clippy/tests/ui/write_with_newline.stderr b/src/tools/clippy/tests/ui/write_with_newline.stderr
new file mode 100644
index 000000000..5f55431be
--- /dev/null
+++ b/src/tools/clippy/tests/ui/write_with_newline.stderr
@@ -0,0 +1,133 @@
+error: using `write!()` with a format string that ends in a single newline
+ --> $DIR/write_with_newline.rs:13:5
+ |
+LL | write!(v, "Hello/n");
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::write-with-newline` implied by `-D warnings`
+help: use `writeln!()` instead
+ |
+LL - write!(v, "Hello/n");
+LL + writeln!(v, "Hello");
+ |
+
+error: using `write!()` with a format string that ends in a single newline
+ --> $DIR/write_with_newline.rs:14:5
+ |
+LL | write!(v, "Hello {}/n", "world");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: use `writeln!()` instead
+ |
+LL - write!(v, "Hello {}/n", "world");
+LL + writeln!(v, "Hello {}", "world");
+ |
+
+error: using `write!()` with a format string that ends in a single newline
+ --> $DIR/write_with_newline.rs:15:5
+ |
+LL | write!(v, "Hello {} {}/n", "world", "#2");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: use `writeln!()` instead
+ |
+LL - write!(v, "Hello {} {}/n", "world", "#2");
+LL + writeln!(v, "Hello {} {}", "world", "#2");
+ |
+
+error: using `write!()` with a format string that ends in a single newline
+ --> $DIR/write_with_newline.rs:16:5
+ |
+LL | write!(v, "{}/n", 1265);
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: use `writeln!()` instead
+ |
+LL - write!(v, "{}/n", 1265);
+LL + writeln!(v, "{}", 1265);
+ |
+
+error: using `write!()` with a format string that ends in a single newline
+ --> $DIR/write_with_newline.rs:17:5
+ |
+LL | write!(v, "/n");
+ | ^^^^^^^^^^^^^^^
+ |
+help: use `writeln!()` instead
+ |
+LL - write!(v, "/n");
+LL + writeln!(v);
+ |
+
+error: using `write!()` with a format string that ends in a single newline
+ --> $DIR/write_with_newline.rs:36:5
+ |
+LL | write!(v, "//n"); // should fail
+ | ^^^^^^^^^^^^^^^^^
+ |
+help: use `writeln!()` instead
+ |
+LL - write!(v, "//n"); // should fail
+LL + writeln!(v, "/"); // should fail
+ |
+
+error: using `write!()` with a format string that ends in a single newline
+ --> $DIR/write_with_newline.rs:43:5
+ |
+LL | / write!(
+LL | | v,
+LL | | "
+LL | | "
+LL | | );
+ | |_____^
+ |
+help: use `writeln!()` instead
+ |
+LL ~ writeln!(
+LL | v,
+LL ~ ""
+ |
+
+error: using `write!()` with a format string that ends in a single newline
+ --> $DIR/write_with_newline.rs:48:5
+ |
+LL | / write!(
+LL | | v,
+LL | | r"
+LL | | "
+LL | | );
+ | |_____^
+ |
+help: use `writeln!()` instead
+ |
+LL ~ writeln!(
+LL | v,
+LL ~ r""
+ |
+
+error: using `write!()` with a format string that ends in a single newline
+ --> $DIR/write_with_newline.rs:57:5
+ |
+LL | write!(v, "/r/n"); //~ ERROR
+ | ^^^^^^^^^^^^^^^^^^
+ |
+help: use `writeln!()` instead
+ |
+LL - write!(v, "/r/n"); //~ ERROR
+LL + writeln!(v, "/r"); //~ ERROR
+ |
+
+error: using `write!()` with a format string that ends in a single newline
+ --> $DIR/write_with_newline.rs:58:5
+ |
+LL | write!(v, "foo/rbar/n");
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: use `writeln!()` instead
+ |
+LL - write!(v, "foo/rbar/n");
+LL + writeln!(v, "foo/rbar");
+ |
+
+error: aborting due to 10 previous errors
+
diff --git a/src/tools/clippy/tests/ui/writeln_empty_string.fixed b/src/tools/clippy/tests/ui/writeln_empty_string.fixed
new file mode 100644
index 000000000..e7d94acd1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/writeln_empty_string.fixed
@@ -0,0 +1,20 @@
+// run-rustfix
+
+#![allow(unused_must_use)]
+#![warn(clippy::writeln_empty_string)]
+use std::io::Write;
+
+fn main() {
+ let mut v = Vec::new();
+
+ // These should fail
+ writeln!(v);
+
+ let mut suggestion = Vec::new();
+ writeln!(suggestion);
+
+ // These should be fine
+ writeln!(v);
+ writeln!(v, " ");
+ write!(v, "");
+}
diff --git a/src/tools/clippy/tests/ui/writeln_empty_string.rs b/src/tools/clippy/tests/ui/writeln_empty_string.rs
new file mode 100644
index 000000000..662c62f02
--- /dev/null
+++ b/src/tools/clippy/tests/ui/writeln_empty_string.rs
@@ -0,0 +1,20 @@
+// run-rustfix
+
+#![allow(unused_must_use)]
+#![warn(clippy::writeln_empty_string)]
+use std::io::Write;
+
+fn main() {
+ let mut v = Vec::new();
+
+ // These should fail
+ writeln!(v, "");
+
+ let mut suggestion = Vec::new();
+ writeln!(suggestion, "");
+
+ // These should be fine
+ writeln!(v);
+ writeln!(v, " ");
+ write!(v, "");
+}
diff --git a/src/tools/clippy/tests/ui/writeln_empty_string.stderr b/src/tools/clippy/tests/ui/writeln_empty_string.stderr
new file mode 100644
index 000000000..ac65aadfc
--- /dev/null
+++ b/src/tools/clippy/tests/ui/writeln_empty_string.stderr
@@ -0,0 +1,16 @@
+error: using `writeln!(v, "")`
+ --> $DIR/writeln_empty_string.rs:11:5
+ |
+LL | writeln!(v, "");
+ | ^^^^^^^^^^^^^^^ help: replace it with: `writeln!(v)`
+ |
+ = note: `-D clippy::writeln-empty-string` implied by `-D warnings`
+
+error: using `writeln!(suggestion, "")`
+ --> $DIR/writeln_empty_string.rs:14:5
+ |
+LL | writeln!(suggestion, "");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `writeln!(suggestion)`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/wrong_self_convention.rs b/src/tools/clippy/tests/ui/wrong_self_convention.rs
new file mode 100644
index 000000000..e3cc90ee2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/wrong_self_convention.rs
@@ -0,0 +1,206 @@
+#![warn(clippy::wrong_self_convention)]
+#![allow(dead_code)]
+
+fn main() {}
+
+#[derive(Clone, Copy)]
+struct Foo;
+
+impl Foo {
+ fn as_i32(self) {}
+ fn as_u32(&self) {}
+ fn into_i32(self) {}
+ fn is_i32(self) {}
+ fn is_u32(&self) {}
+ fn to_i32(self) {}
+ fn from_i32(self) {}
+
+ pub fn as_i64(self) {}
+ pub fn into_i64(self) {}
+ pub fn is_i64(self) {}
+ pub fn to_i64(self) {}
+ pub fn from_i64(self) {}
+ // check whether the lint can be allowed at the function level
+ #[allow(clippy::wrong_self_convention)]
+ pub fn from_cake(self) {}
+
+ fn as_x<F: AsRef<Self>>(_: F) {}
+ fn as_y<F: AsRef<Foo>>(_: F) {}
+}
+
+struct Bar;
+
+impl Bar {
+ fn as_i32(self) {}
+ fn as_u32(&self) {}
+ fn into_i32(&self) {}
+ fn into_u32(self) {}
+ fn is_i32(self) {}
+ fn is_u32(&self) {}
+ fn to_i32(self) {}
+ fn to_u32(&self) {}
+ fn from_i32(self) {}
+
+ pub fn as_i64(self) {}
+ pub fn into_i64(&self) {}
+ pub fn is_i64(self) {}
+ pub fn to_i64(self) {}
+ pub fn from_i64(self) {}
+
+ // test for false positives
+ fn as_(self) {}
+ fn into_(&self) {}
+ fn is_(self) {}
+ fn to_(self) {}
+ fn from_(self) {}
+ fn to_mut(&mut self) {}
+}
+
+// Allow Box<Self>, Rc<Self>, Arc<Self> for methods that take conventionally take Self by value
+#[allow(clippy::boxed_local)]
+mod issue4293 {
+ use std::rc::Rc;
+ use std::sync::Arc;
+
+ struct T;
+
+ impl T {
+ fn into_s1(self: Box<Self>) {}
+ fn into_s2(self: Rc<Self>) {}
+ fn into_s3(self: Arc<Self>) {}
+
+ fn into_t1(self: Box<T>) {}
+ fn into_t2(self: Rc<T>) {}
+ fn into_t3(self: Arc<T>) {}
+ }
+}
+
+// False positive for async (see #4037)
+mod issue4037 {
+ pub struct Foo;
+ pub struct Bar;
+
+ impl Foo {
+ pub async fn into_bar(self) -> Bar {
+ Bar
+ }
+ }
+}
+
+// Lint also in trait definition (see #6307)
+mod issue6307 {
+ trait T: Sized {
+ fn as_i32(self) {}
+ fn as_u32(&self) {}
+ fn into_i32(self) {}
+ fn into_i32_ref(&self) {}
+ fn into_u32(self) {}
+ fn is_i32(self) {}
+ fn is_u32(&self) {}
+ fn to_i32(self) {}
+ fn to_u32(&self) {}
+ fn from_i32(self) {}
+ // check whether the lint can be allowed at the function level
+ #[allow(clippy::wrong_self_convention)]
+ fn from_cake(self) {}
+
+ // test for false positives
+ fn as_(self) {}
+ fn into_(&self) {}
+ fn is_(self) {}
+ fn to_(self) {}
+ fn from_(self) {}
+ fn to_mut(&mut self) {}
+ }
+
+ trait U {
+ fn as_i32(self);
+ fn as_u32(&self);
+ fn into_i32(self);
+ fn into_i32_ref(&self);
+ fn into_u32(self);
+ fn is_i32(self);
+ fn is_u32(&self);
+ fn to_i32(self);
+ fn to_u32(&self);
+ fn from_i32(self);
+ // check whether the lint can be allowed at the function level
+ #[allow(clippy::wrong_self_convention)]
+ fn from_cake(self);
+
+ // test for false positives
+ fn as_(self);
+ fn into_(&self);
+ fn is_(self);
+ fn to_(self);
+ fn from_(self);
+ fn to_mut(&mut self);
+ }
+
+ trait C: Copy {
+ fn as_i32(self);
+ fn as_u32(&self);
+ fn into_i32(self);
+ fn into_i32_ref(&self);
+ fn into_u32(self);
+ fn is_i32(self);
+ fn is_u32(&self);
+ fn to_i32(self);
+ fn to_u32(&self);
+ fn from_i32(self);
+ // check whether the lint can be allowed at the function level
+ #[allow(clippy::wrong_self_convention)]
+ fn from_cake(self);
+
+ // test for false positives
+ fn as_(self);
+ fn into_(&self);
+ fn is_(self);
+ fn to_(self);
+ fn from_(self);
+ fn to_mut(&mut self);
+ }
+}
+
+mod issue6727 {
+ #[derive(Clone, Copy)]
+ struct FooCopy;
+
+ impl FooCopy {
+ fn to_u64(self) -> u64 {
+ 1
+ }
+ // trigger lint
+ fn to_u64_v2(&self) -> u64 {
+ 1
+ }
+ }
+
+ struct FooNoCopy;
+
+ impl FooNoCopy {
+ // trigger lint
+ fn to_u64(self) -> u64 {
+ 2
+ }
+ fn to_u64_v2(&self) -> u64 {
+ 2
+ }
+ }
+}
+
+pub mod issue8142 {
+ struct S;
+
+ impl S {
+ // Should not lint: "no self at all" is allowed.
+ fn is_forty_two(x: u32) -> bool {
+ x == 42
+ }
+
+ // Should not lint: &self is allowed.
+ fn is_test_code(&self) -> bool {
+ true
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/wrong_self_convention.stderr b/src/tools/clippy/tests/ui/wrong_self_convention.stderr
new file mode 100644
index 000000000..2e7ee51d7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/wrong_self_convention.stderr
@@ -0,0 +1,195 @@
+error: methods called `from_*` usually take no `self`
+ --> $DIR/wrong_self_convention.rs:16:17
+ |
+LL | fn from_i32(self) {}
+ | ^^^^
+ |
+ = note: `-D clippy::wrong-self-convention` implied by `-D warnings`
+ = help: consider choosing a less ambiguous name
+
+error: methods called `from_*` usually take no `self`
+ --> $DIR/wrong_self_convention.rs:22:21
+ |
+LL | pub fn from_i64(self) {}
+ | ^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods called `as_*` usually take `self` by reference or `self` by mutable reference
+ --> $DIR/wrong_self_convention.rs:34:15
+ |
+LL | fn as_i32(self) {}
+ | ^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods called `into_*` usually take `self` by value
+ --> $DIR/wrong_self_convention.rs:36:17
+ |
+LL | fn into_i32(&self) {}
+ | ^^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods called `is_*` usually take `self` by mutable reference or `self` by reference or no `self`
+ --> $DIR/wrong_self_convention.rs:38:15
+ |
+LL | fn is_i32(self) {}
+ | ^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods with the following characteristics: (`to_*` and `self` type is not `Copy`) usually take `self` by reference
+ --> $DIR/wrong_self_convention.rs:40:15
+ |
+LL | fn to_i32(self) {}
+ | ^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods called `from_*` usually take no `self`
+ --> $DIR/wrong_self_convention.rs:42:17
+ |
+LL | fn from_i32(self) {}
+ | ^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods called `as_*` usually take `self` by reference or `self` by mutable reference
+ --> $DIR/wrong_self_convention.rs:44:19
+ |
+LL | pub fn as_i64(self) {}
+ | ^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods called `into_*` usually take `self` by value
+ --> $DIR/wrong_self_convention.rs:45:21
+ |
+LL | pub fn into_i64(&self) {}
+ | ^^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods called `is_*` usually take `self` by mutable reference or `self` by reference or no `self`
+ --> $DIR/wrong_self_convention.rs:46:19
+ |
+LL | pub fn is_i64(self) {}
+ | ^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods with the following characteristics: (`to_*` and `self` type is not `Copy`) usually take `self` by reference
+ --> $DIR/wrong_self_convention.rs:47:19
+ |
+LL | pub fn to_i64(self) {}
+ | ^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods called `from_*` usually take no `self`
+ --> $DIR/wrong_self_convention.rs:48:21
+ |
+LL | pub fn from_i64(self) {}
+ | ^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods called `as_*` usually take `self` by reference or `self` by mutable reference
+ --> $DIR/wrong_self_convention.rs:93:19
+ |
+LL | fn as_i32(self) {}
+ | ^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods called `into_*` usually take `self` by value
+ --> $DIR/wrong_self_convention.rs:96:25
+ |
+LL | fn into_i32_ref(&self) {}
+ | ^^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods called `is_*` usually take `self` by mutable reference or `self` by reference or no `self`
+ --> $DIR/wrong_self_convention.rs:98:19
+ |
+LL | fn is_i32(self) {}
+ | ^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods called `from_*` usually take no `self`
+ --> $DIR/wrong_self_convention.rs:102:21
+ |
+LL | fn from_i32(self) {}
+ | ^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods called `as_*` usually take `self` by reference or `self` by mutable reference
+ --> $DIR/wrong_self_convention.rs:117:19
+ |
+LL | fn as_i32(self);
+ | ^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods called `into_*` usually take `self` by value
+ --> $DIR/wrong_self_convention.rs:120:25
+ |
+LL | fn into_i32_ref(&self);
+ | ^^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods called `is_*` usually take `self` by mutable reference or `self` by reference or no `self`
+ --> $DIR/wrong_self_convention.rs:122:19
+ |
+LL | fn is_i32(self);
+ | ^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods called `from_*` usually take no `self`
+ --> $DIR/wrong_self_convention.rs:126:21
+ |
+LL | fn from_i32(self);
+ | ^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods called `into_*` usually take `self` by value
+ --> $DIR/wrong_self_convention.rs:144:25
+ |
+LL | fn into_i32_ref(&self);
+ | ^^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods called `from_*` usually take no `self`
+ --> $DIR/wrong_self_convention.rs:150:21
+ |
+LL | fn from_i32(self);
+ | ^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods with the following characteristics: (`to_*` and `self` type is `Copy`) usually take `self` by value
+ --> $DIR/wrong_self_convention.rs:174:22
+ |
+LL | fn to_u64_v2(&self) -> u64 {
+ | ^^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: methods with the following characteristics: (`to_*` and `self` type is not `Copy`) usually take `self` by reference
+ --> $DIR/wrong_self_convention.rs:183:19
+ |
+LL | fn to_u64(self) -> u64 {
+ | ^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: aborting due to 24 previous errors
+
diff --git a/src/tools/clippy/tests/ui/wrong_self_convention2.rs b/src/tools/clippy/tests/ui/wrong_self_convention2.rs
new file mode 100644
index 000000000..0dcf4743e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/wrong_self_convention2.rs
@@ -0,0 +1,116 @@
+#![warn(clippy::wrong_self_convention)]
+#![allow(dead_code)]
+
+fn main() {}
+
+mod issue6983 {
+ pub struct Thing;
+ pub trait Trait {
+ fn to_thing(&self) -> Thing;
+ }
+
+ impl Trait for u8 {
+ // don't trigger, e.g. `ToString` from `std` requires `&self`
+ fn to_thing(&self) -> Thing {
+ Thing
+ }
+ }
+
+ trait ToU64 {
+ fn to_u64(self) -> u64;
+ }
+
+ struct FooNoCopy;
+ // don't trigger
+ impl ToU64 for FooNoCopy {
+ fn to_u64(self) -> u64 {
+ 2
+ }
+ }
+}
+
+mod issue7032 {
+ trait Foo {
+ fn from_usize(x: usize) -> Self;
+ }
+ // don't trigger
+ impl Foo for usize {
+ fn from_usize(x: usize) -> Self {
+ x
+ }
+ }
+}
+
+mod issue7179 {
+ pub struct S(i32);
+
+ impl S {
+ // don't trigger (`s` is not `self`)
+ pub fn from_be(s: Self) -> Self {
+ S(i32::from_be(s.0))
+ }
+
+ // lint
+ pub fn from_be_self(self) -> Self {
+ S(i32::from_be(self.0))
+ }
+ }
+
+ trait T {
+ // don't trigger (`s` is not `self`)
+ fn from_be(s: Self) -> Self;
+ // lint
+ fn from_be_self(self) -> Self;
+ }
+
+ trait Foo: Sized {
+ fn as_byte_slice(slice: &[Self]) -> &[u8];
+ }
+}
+
+mod issue3414 {
+ struct CellLikeThing<T>(T);
+
+ impl<T> CellLikeThing<T> {
+ // don't trigger
+ fn into_inner(this: Self) -> T {
+ this.0
+ }
+ }
+
+ impl<T> std::ops::Deref for CellLikeThing<T> {
+ type Target = T;
+
+ fn deref(&self) -> &T {
+ &self.0
+ }
+ }
+}
+
+// don't trigger
+mod issue4546 {
+ use std::pin::Pin;
+
+ struct S;
+ impl S {
+ pub fn as_mut(self: Pin<&mut Self>) {}
+
+ pub fn as_other_thingy(self: Pin<&Self>) {}
+
+ pub fn is_other_thingy(self: Pin<&Self>) {}
+
+ pub fn to_mut(self: Pin<&mut Self>) {}
+
+ pub fn to_other_thingy(self: Pin<&Self>) {}
+ }
+}
+
+mod issue_8480_8513 {
+ struct Cat(String);
+
+ impl Cat {
+ fn is_animal(&mut self) -> bool {
+ todo!();
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/wrong_self_convention2.stderr b/src/tools/clippy/tests/ui/wrong_self_convention2.stderr
new file mode 100644
index 000000000..5bdc47f91
--- /dev/null
+++ b/src/tools/clippy/tests/ui/wrong_self_convention2.stderr
@@ -0,0 +1,19 @@
+error: methods called `from_*` usually take no `self`
+ --> $DIR/wrong_self_convention2.rs:54:29
+ |
+LL | pub fn from_be_self(self) -> Self {
+ | ^^^^
+ |
+ = note: `-D clippy::wrong-self-convention` implied by `-D warnings`
+ = help: consider choosing a less ambiguous name
+
+error: methods called `from_*` usually take no `self`
+ --> $DIR/wrong_self_convention2.rs:63:25
+ |
+LL | fn from_be_self(self) -> Self;
+ | ^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/wrong_self_conventions_mut.rs b/src/tools/clippy/tests/ui/wrong_self_conventions_mut.rs
new file mode 100644
index 000000000..5bb2116bd
--- /dev/null
+++ b/src/tools/clippy/tests/ui/wrong_self_conventions_mut.rs
@@ -0,0 +1,29 @@
+#![warn(clippy::wrong_self_convention)]
+#![allow(dead_code)]
+
+fn main() {}
+
+mod issue6758 {
+ pub enum Test<T> {
+ One(T),
+ Many(Vec<T>),
+ }
+
+ impl<T> Test<T> {
+ // If a method starts with `to_` and not ends with `_mut` it should expect `&self`
+ pub fn to_many(&mut self) -> Option<&mut [T]> {
+ match self {
+ Self::Many(data) => Some(data),
+ _ => None,
+ }
+ }
+
+ // If a method starts with `to_` and ends with `_mut` it should expect `&mut self`
+ pub fn to_many_mut(&self) -> Option<&[T]> {
+ match self {
+ Self::Many(data) => Some(data),
+ _ => None,
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/wrong_self_conventions_mut.stderr b/src/tools/clippy/tests/ui/wrong_self_conventions_mut.stderr
new file mode 100644
index 000000000..8665d8dc9
--- /dev/null
+++ b/src/tools/clippy/tests/ui/wrong_self_conventions_mut.stderr
@@ -0,0 +1,19 @@
+error: methods with the following characteristics: (`to_*` and `self` type is not `Copy`) usually take `self` by reference
+ --> $DIR/wrong_self_conventions_mut.rs:14:24
+ |
+LL | pub fn to_many(&mut self) -> Option<&mut [T]> {
+ | ^^^^^^^^^
+ |
+ = note: `-D clippy::wrong-self-convention` implied by `-D warnings`
+ = help: consider choosing a less ambiguous name
+
+error: methods with the following characteristics: (`to_*` and `*_mut`) usually take `self` by mutable reference
+ --> $DIR/wrong_self_conventions_mut.rs:22:28
+ |
+LL | pub fn to_many_mut(&self) -> Option<&[T]> {
+ | ^^^^^
+ |
+ = help: consider choosing a less ambiguous name
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/zero_div_zero.rs b/src/tools/clippy/tests/ui/zero_div_zero.rs
new file mode 100644
index 000000000..968c58f40
--- /dev/null
+++ b/src/tools/clippy/tests/ui/zero_div_zero.rs
@@ -0,0 +1,13 @@
+#[allow(unused_variables, clippy::eq_op)]
+#[warn(clippy::zero_divided_by_zero)]
+fn main() {
+ let nan = 0.0 / 0.0;
+ let f64_nan = 0.0 / 0.0f64;
+ let other_f64_nan = 0.0f64 / 0.0;
+ let one_more_f64_nan = 0.0f64 / 0.0f64;
+ let zero = 0.0;
+ let other_zero = 0.0;
+ let other_nan = zero / other_zero; // fine - this lint doesn't propagate constants.
+ let not_nan = 2.0 / 0.0; // not an error: 2/0 = inf
+ let also_not_nan = 0.0 / 2.0; // not an error: 0/2 = 0
+}
diff --git a/src/tools/clippy/tests/ui/zero_div_zero.stderr b/src/tools/clippy/tests/ui/zero_div_zero.stderr
new file mode 100644
index 000000000..86563542e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/zero_div_zero.stderr
@@ -0,0 +1,35 @@
+error: constant division of `0.0` with `0.0` will always result in NaN
+ --> $DIR/zero_div_zero.rs:4:15
+ |
+LL | let nan = 0.0 / 0.0;
+ | ^^^^^^^^^
+ |
+ = note: `-D clippy::zero-divided-by-zero` implied by `-D warnings`
+ = help: consider using `f64::NAN` if you would like a constant representing NaN
+
+error: constant division of `0.0` with `0.0` will always result in NaN
+ --> $DIR/zero_div_zero.rs:5:19
+ |
+LL | let f64_nan = 0.0 / 0.0f64;
+ | ^^^^^^^^^^^^
+ |
+ = help: consider using `f64::NAN` if you would like a constant representing NaN
+
+error: constant division of `0.0` with `0.0` will always result in NaN
+ --> $DIR/zero_div_zero.rs:6:25
+ |
+LL | let other_f64_nan = 0.0f64 / 0.0;
+ | ^^^^^^^^^^^^
+ |
+ = help: consider using `f64::NAN` if you would like a constant representing NaN
+
+error: constant division of `0.0` with `0.0` will always result in NaN
+ --> $DIR/zero_div_zero.rs:7:28
+ |
+LL | let one_more_f64_nan = 0.0f64 / 0.0f64;
+ | ^^^^^^^^^^^^^^^
+ |
+ = help: consider using `f64::NAN` if you would like a constant representing NaN
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/zero_offset.rs b/src/tools/clippy/tests/ui/zero_offset.rs
new file mode 100644
index 000000000..fd9ac1fa7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/zero_offset.rs
@@ -0,0 +1,19 @@
+#[allow(clippy::borrow_as_ptr)]
+fn main() {
+ unsafe {
+ let m = &mut () as *mut ();
+ m.offset(0);
+ m.wrapping_add(0);
+ m.sub(0);
+ m.wrapping_sub(0);
+
+ let c = &() as *const ();
+ c.offset(0);
+ c.wrapping_add(0);
+ c.sub(0);
+ c.wrapping_sub(0);
+
+ let sized = &1 as *const i32;
+ sized.offset(0);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/zero_offset.stderr b/src/tools/clippy/tests/ui/zero_offset.stderr
new file mode 100644
index 000000000..481a44657
--- /dev/null
+++ b/src/tools/clippy/tests/ui/zero_offset.stderr
@@ -0,0 +1,52 @@
+error: offset calculation on zero-sized value
+ --> $DIR/zero_offset.rs:5:9
+ |
+LL | m.offset(0);
+ | ^^^^^^^^^^^
+ |
+ = note: `#[deny(clippy::zst_offset)]` on by default
+
+error: offset calculation on zero-sized value
+ --> $DIR/zero_offset.rs:6:9
+ |
+LL | m.wrapping_add(0);
+ | ^^^^^^^^^^^^^^^^^
+
+error: offset calculation on zero-sized value
+ --> $DIR/zero_offset.rs:7:9
+ |
+LL | m.sub(0);
+ | ^^^^^^^^
+
+error: offset calculation on zero-sized value
+ --> $DIR/zero_offset.rs:8:9
+ |
+LL | m.wrapping_sub(0);
+ | ^^^^^^^^^^^^^^^^^
+
+error: offset calculation on zero-sized value
+ --> $DIR/zero_offset.rs:11:9
+ |
+LL | c.offset(0);
+ | ^^^^^^^^^^^
+
+error: offset calculation on zero-sized value
+ --> $DIR/zero_offset.rs:12:9
+ |
+LL | c.wrapping_add(0);
+ | ^^^^^^^^^^^^^^^^^
+
+error: offset calculation on zero-sized value
+ --> $DIR/zero_offset.rs:13:9
+ |
+LL | c.sub(0);
+ | ^^^^^^^^
+
+error: offset calculation on zero-sized value
+ --> $DIR/zero_offset.rs:14:9
+ |
+LL | c.wrapping_sub(0);
+ | ^^^^^^^^^^^^^^^^^
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/zero_ptr.fixed b/src/tools/clippy/tests/ui/zero_ptr.fixed
new file mode 100644
index 000000000..489aa4121
--- /dev/null
+++ b/src/tools/clippy/tests/ui/zero_ptr.fixed
@@ -0,0 +1,14 @@
+// run-rustfix
+pub fn foo(_const: *const f32, _mut: *mut i64) {}
+
+fn main() {
+ let _ = std::ptr::null::<usize>();
+ let _ = std::ptr::null_mut::<f64>();
+ let _: *const u8 = std::ptr::null();
+
+ foo(0 as _, 0 as _);
+ foo(std::ptr::null(), std::ptr::null_mut());
+
+ let z = 0;
+ let _ = z as *const usize; // this is currently not caught
+}
diff --git a/src/tools/clippy/tests/ui/zero_ptr.rs b/src/tools/clippy/tests/ui/zero_ptr.rs
new file mode 100644
index 000000000..c3b55ef9e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/zero_ptr.rs
@@ -0,0 +1,14 @@
+// run-rustfix
+pub fn foo(_const: *const f32, _mut: *mut i64) {}
+
+fn main() {
+ let _ = 0 as *const usize;
+ let _ = 0 as *mut f64;
+ let _: *const u8 = 0 as *const _;
+
+ foo(0 as _, 0 as _);
+ foo(0 as *const _, 0 as *mut _);
+
+ let z = 0;
+ let _ = z as *const usize; // this is currently not caught
+}
diff --git a/src/tools/clippy/tests/ui/zero_ptr.stderr b/src/tools/clippy/tests/ui/zero_ptr.stderr
new file mode 100644
index 000000000..4ee5e9a26
--- /dev/null
+++ b/src/tools/clippy/tests/ui/zero_ptr.stderr
@@ -0,0 +1,34 @@
+error: `0 as *const _` detected
+ --> $DIR/zero_ptr.rs:5:13
+ |
+LL | let _ = 0 as *const usize;
+ | ^^^^^^^^^^^^^^^^^ help: try: `std::ptr::null::<usize>()`
+ |
+ = note: `-D clippy::zero-ptr` implied by `-D warnings`
+
+error: `0 as *mut _` detected
+ --> $DIR/zero_ptr.rs:6:13
+ |
+LL | let _ = 0 as *mut f64;
+ | ^^^^^^^^^^^^^ help: try: `std::ptr::null_mut::<f64>()`
+
+error: `0 as *const _` detected
+ --> $DIR/zero_ptr.rs:7:24
+ |
+LL | let _: *const u8 = 0 as *const _;
+ | ^^^^^^^^^^^^^ help: try: `std::ptr::null()`
+
+error: `0 as *const _` detected
+ --> $DIR/zero_ptr.rs:10:9
+ |
+LL | foo(0 as *const _, 0 as *mut _);
+ | ^^^^^^^^^^^^^ help: try: `std::ptr::null()`
+
+error: `0 as *mut _` detected
+ --> $DIR/zero_ptr.rs:10:24
+ |
+LL | foo(0 as *const _, 0 as *mut _);
+ | ^^^^^^^^^^^ help: try: `std::ptr::null_mut()`
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/zero_sized_btreemap_values.rs b/src/tools/clippy/tests/ui/zero_sized_btreemap_values.rs
new file mode 100644
index 000000000..5cd254787
--- /dev/null
+++ b/src/tools/clippy/tests/ui/zero_sized_btreemap_values.rs
@@ -0,0 +1,68 @@
+#![warn(clippy::zero_sized_map_values)]
+use std::collections::BTreeMap;
+
+const CONST_OK: Option<BTreeMap<String, usize>> = None;
+const CONST_NOT_OK: Option<BTreeMap<String, ()>> = None;
+
+static STATIC_OK: Option<BTreeMap<String, usize>> = None;
+static STATIC_NOT_OK: Option<BTreeMap<String, ()>> = None;
+
+type OkMap = BTreeMap<String, usize>;
+type NotOkMap = BTreeMap<String, ()>;
+
+enum TestEnum {
+ Ok(BTreeMap<String, usize>),
+ NotOk(BTreeMap<String, ()>),
+}
+
+struct Test {
+ ok: BTreeMap<String, usize>,
+ not_ok: BTreeMap<String, ()>,
+
+ also_not_ok: Vec<BTreeMap<usize, ()>>,
+}
+
+trait TestTrait {
+ type Output;
+
+ fn produce_output() -> Self::Output;
+
+ fn weird_map(&self, map: BTreeMap<usize, ()>);
+}
+
+impl Test {
+ fn ok(&self) -> BTreeMap<String, usize> {
+ todo!()
+ }
+
+ fn not_ok(&self) -> BTreeMap<String, ()> {
+ todo!()
+ }
+}
+
+impl TestTrait for Test {
+ type Output = BTreeMap<String, ()>;
+
+ fn produce_output() -> Self::Output {
+ todo!();
+ }
+
+ fn weird_map(&self, map: BTreeMap<usize, ()>) {
+ todo!();
+ }
+}
+
+fn test(map: BTreeMap<String, ()>, key: &str) -> BTreeMap<String, ()> {
+ todo!();
+}
+
+fn test2(map: BTreeMap<String, usize>, key: &str) -> BTreeMap<String, usize> {
+ todo!();
+}
+
+fn main() {
+ let _: BTreeMap<String, ()> = BTreeMap::new();
+ let _: BTreeMap<String, usize> = BTreeMap::new();
+
+ let _: BTreeMap<_, _> = std::iter::empty::<(String, ())>().collect();
+}
diff --git a/src/tools/clippy/tests/ui/zero_sized_btreemap_values.stderr b/src/tools/clippy/tests/ui/zero_sized_btreemap_values.stderr
new file mode 100644
index 000000000..d924f3379
--- /dev/null
+++ b/src/tools/clippy/tests/ui/zero_sized_btreemap_values.stderr
@@ -0,0 +1,107 @@
+error: map with zero-sized value type
+ --> $DIR/zero_sized_btreemap_values.rs:5:28
+ |
+LL | const CONST_NOT_OK: Option<BTreeMap<String, ()>> = None;
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::zero-sized-map-values` implied by `-D warnings`
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_btreemap_values.rs:8:30
+ |
+LL | static STATIC_NOT_OK: Option<BTreeMap<String, ()>> = None;
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_btreemap_values.rs:11:17
+ |
+LL | type NotOkMap = BTreeMap<String, ()>;
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_btreemap_values.rs:15:11
+ |
+LL | NotOk(BTreeMap<String, ()>),
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_btreemap_values.rs:20:13
+ |
+LL | not_ok: BTreeMap<String, ()>,
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_btreemap_values.rs:22:22
+ |
+LL | also_not_ok: Vec<BTreeMap<usize, ()>>,
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_btreemap_values.rs:30:30
+ |
+LL | fn weird_map(&self, map: BTreeMap<usize, ()>);
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_btreemap_values.rs:38:25
+ |
+LL | fn not_ok(&self) -> BTreeMap<String, ()> {
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_btreemap_values.rs:55:14
+ |
+LL | fn test(map: BTreeMap<String, ()>, key: &str) -> BTreeMap<String, ()> {
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_btreemap_values.rs:55:50
+ |
+LL | fn test(map: BTreeMap<String, ()>, key: &str) -> BTreeMap<String, ()> {
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_btreemap_values.rs:64:35
+ |
+LL | let _: BTreeMap<String, ()> = BTreeMap::new();
+ | ^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_btreemap_values.rs:64:12
+ |
+LL | let _: BTreeMap<String, ()> = BTreeMap::new();
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_btreemap_values.rs:67:12
+ |
+LL | let _: BTreeMap<_, _> = std::iter::empty::<(String, ())>().collect();
+ | ^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: aborting due to 13 previous errors
+
diff --git a/src/tools/clippy/tests/ui/zero_sized_hashmap_values.rs b/src/tools/clippy/tests/ui/zero_sized_hashmap_values.rs
new file mode 100644
index 000000000..a1608d863
--- /dev/null
+++ b/src/tools/clippy/tests/ui/zero_sized_hashmap_values.rs
@@ -0,0 +1,68 @@
+#![warn(clippy::zero_sized_map_values)]
+use std::collections::HashMap;
+
+const CONST_OK: Option<HashMap<String, usize>> = None;
+const CONST_NOT_OK: Option<HashMap<String, ()>> = None;
+
+static STATIC_OK: Option<HashMap<String, usize>> = None;
+static STATIC_NOT_OK: Option<HashMap<String, ()>> = None;
+
+type OkMap = HashMap<String, usize>;
+type NotOkMap = HashMap<String, ()>;
+
+enum TestEnum {
+ Ok(HashMap<String, usize>),
+ NotOk(HashMap<String, ()>),
+}
+
+struct Test {
+ ok: HashMap<String, usize>,
+ not_ok: HashMap<String, ()>,
+
+ also_not_ok: Vec<HashMap<usize, ()>>,
+}
+
+trait TestTrait {
+ type Output;
+
+ fn produce_output() -> Self::Output;
+
+ fn weird_map(&self, map: HashMap<usize, ()>);
+}
+
+impl Test {
+ fn ok(&self) -> HashMap<String, usize> {
+ todo!()
+ }
+
+ fn not_ok(&self) -> HashMap<String, ()> {
+ todo!()
+ }
+}
+
+impl TestTrait for Test {
+ type Output = HashMap<String, ()>;
+
+ fn produce_output() -> Self::Output {
+ todo!();
+ }
+
+ fn weird_map(&self, map: HashMap<usize, ()>) {
+ todo!();
+ }
+}
+
+fn test(map: HashMap<String, ()>, key: &str) -> HashMap<String, ()> {
+ todo!();
+}
+
+fn test2(map: HashMap<String, usize>, key: &str) -> HashMap<String, usize> {
+ todo!();
+}
+
+fn main() {
+ let _: HashMap<String, ()> = HashMap::new();
+ let _: HashMap<String, usize> = HashMap::new();
+
+ let _: HashMap<_, _> = std::iter::empty::<(String, ())>().collect();
+}
diff --git a/src/tools/clippy/tests/ui/zero_sized_hashmap_values.stderr b/src/tools/clippy/tests/ui/zero_sized_hashmap_values.stderr
new file mode 100644
index 000000000..79770bf90
--- /dev/null
+++ b/src/tools/clippy/tests/ui/zero_sized_hashmap_values.stderr
@@ -0,0 +1,107 @@
+error: map with zero-sized value type
+ --> $DIR/zero_sized_hashmap_values.rs:5:28
+ |
+LL | const CONST_NOT_OK: Option<HashMap<String, ()>> = None;
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::zero-sized-map-values` implied by `-D warnings`
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_hashmap_values.rs:8:30
+ |
+LL | static STATIC_NOT_OK: Option<HashMap<String, ()>> = None;
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_hashmap_values.rs:11:17
+ |
+LL | type NotOkMap = HashMap<String, ()>;
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_hashmap_values.rs:15:11
+ |
+LL | NotOk(HashMap<String, ()>),
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_hashmap_values.rs:20:13
+ |
+LL | not_ok: HashMap<String, ()>,
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_hashmap_values.rs:22:22
+ |
+LL | also_not_ok: Vec<HashMap<usize, ()>>,
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_hashmap_values.rs:30:30
+ |
+LL | fn weird_map(&self, map: HashMap<usize, ()>);
+ | ^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_hashmap_values.rs:38:25
+ |
+LL | fn not_ok(&self) -> HashMap<String, ()> {
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_hashmap_values.rs:55:14
+ |
+LL | fn test(map: HashMap<String, ()>, key: &str) -> HashMap<String, ()> {
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_hashmap_values.rs:55:49
+ |
+LL | fn test(map: HashMap<String, ()>, key: &str) -> HashMap<String, ()> {
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_hashmap_values.rs:64:34
+ |
+LL | let _: HashMap<String, ()> = HashMap::new();
+ | ^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_hashmap_values.rs:64:12
+ |
+LL | let _: HashMap<String, ()> = HashMap::new();
+ | ^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: map with zero-sized value type
+ --> $DIR/zero_sized_hashmap_values.rs:67:12
+ |
+LL | let _: HashMap<_, _> = std::iter::empty::<(String, ())>().collect();
+ | ^^^^^^^^^^^^^
+ |
+ = help: consider using a set instead
+
+error: aborting due to 13 previous errors
+
diff --git a/src/tools/clippy/tests/versioncheck.rs b/src/tools/clippy/tests/versioncheck.rs
new file mode 100644
index 000000000..38498ebdc
--- /dev/null
+++ b/src/tools/clippy/tests/versioncheck.rs
@@ -0,0 +1,89 @@
+#![cfg_attr(feature = "deny-warnings", deny(warnings))]
+#![warn(rust_2018_idioms, unused_lifetimes)]
+#![allow(clippy::single_match_else)]
+
+use rustc_tools_util::VersionInfo;
+use std::fs;
+
+#[test]
+fn check_that_clippy_lints_and_clippy_utils_have_the_same_version_as_clippy() {
+ fn read_version(path: &str) -> String {
+ let contents = fs::read_to_string(path).unwrap_or_else(|e| panic!("error reading `{}`: {:?}", path, e));
+ contents
+ .lines()
+ .filter_map(|l| l.split_once('='))
+ .find_map(|(k, v)| (k.trim() == "version").then(|| v.trim()))
+ .unwrap_or_else(|| panic!("error finding version in `{}`", path))
+ .to_string()
+ }
+
+ // do not run this test inside the upstream rustc repo:
+ // https://github.com/rust-lang/rust-clippy/issues/6683
+ if option_env!("RUSTC_TEST_SUITE").is_some() {
+ return;
+ }
+
+ let clippy_version = read_version("Cargo.toml");
+ let clippy_lints_version = read_version("clippy_lints/Cargo.toml");
+ let clippy_utils_version = read_version("clippy_utils/Cargo.toml");
+
+ assert_eq!(clippy_version, clippy_lints_version);
+ assert_eq!(clippy_version, clippy_utils_version);
+}
+
+#[test]
+fn check_that_clippy_has_the_same_major_version_as_rustc() {
+ // do not run this test inside the upstream rustc repo:
+ // https://github.com/rust-lang/rust-clippy/issues/6683
+ if option_env!("RUSTC_TEST_SUITE").is_some() {
+ return;
+ }
+
+ let clippy_version = rustc_tools_util::get_version_info!();
+ let clippy_major = clippy_version.major;
+ let clippy_minor = clippy_version.minor;
+ let clippy_patch = clippy_version.patch;
+
+ // get the rustc version either from the rustc installed with the toolchain file or from
+ // `RUSTC_REAL` if Clippy is build in the Rust repo with `./x.py`.
+ let rustc = std::env::var("RUSTC_REAL").unwrap_or_else(|_| "rustc".to_string());
+ let rustc_version = String::from_utf8(
+ std::process::Command::new(&rustc)
+ .arg("--version")
+ .output()
+ .expect("failed to run `rustc --version`")
+ .stdout,
+ )
+ .unwrap();
+ // extract "1 XX 0" from "rustc 1.XX.0-nightly (<commit> <date>)"
+ let vsplit: Vec<&str> = rustc_version
+ .split(' ')
+ .nth(1)
+ .unwrap()
+ .split('-')
+ .next()
+ .unwrap()
+ .split('.')
+ .collect();
+ match vsplit.as_slice() {
+ [rustc_major, rustc_minor, _rustc_patch] => {
+ // clippy 0.1.XX should correspond to rustc 1.XX.0
+ assert_eq!(clippy_major, 0); // this will probably stay the same for a long time
+ assert_eq!(
+ clippy_minor.to_string(),
+ *rustc_major,
+ "clippy minor version does not equal rustc major version"
+ );
+ assert_eq!(
+ clippy_patch.to_string(),
+ *rustc_minor,
+ "clippy patch version does not equal rustc minor version"
+ );
+ // do not check rustc_patch because when a stable-patch-release is made (like 1.50.2),
+ // we don't want our tests failing suddenly
+ },
+ _ => {
+ panic!("Failed to parse rustc version: {:?}", vsplit);
+ },
+ };
+}
diff --git a/src/tools/clippy/tests/workspace.rs b/src/tools/clippy/tests/workspace.rs
new file mode 100644
index 000000000..e13efb3e0
--- /dev/null
+++ b/src/tools/clippy/tests/workspace.rs
@@ -0,0 +1,107 @@
+#![feature(once_cell)]
+
+use std::path::PathBuf;
+use std::process::Command;
+use test_utils::{CARGO_CLIPPY_PATH, IS_RUSTC_TEST_SUITE};
+
+mod test_utils;
+
+#[test]
+fn test_no_deps_ignores_path_deps_in_workspaces() {
+ if IS_RUSTC_TEST_SUITE {
+ return;
+ }
+ let root = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
+ let target_dir = root.join("target").join("workspace_test");
+ let cwd = root.join("tests/workspace_test");
+
+ // Make sure we start with a clean state
+ Command::new("cargo")
+ .current_dir(&cwd)
+ .env("CARGO_TARGET_DIR", &target_dir)
+ .arg("clean")
+ .args(&["-p", "subcrate"])
+ .args(&["-p", "path_dep"])
+ .output()
+ .unwrap();
+
+ // `path_dep` is a path dependency of `subcrate` that would trigger a denied lint.
+ // Make sure that with the `--no-deps` argument Clippy does not run on `path_dep`.
+ let output = Command::new(&*CARGO_CLIPPY_PATH)
+ .current_dir(&cwd)
+ .env("CARGO_INCREMENTAL", "0")
+ .env("CARGO_TARGET_DIR", &target_dir)
+ .arg("clippy")
+ .args(&["-p", "subcrate"])
+ .arg("--no-deps")
+ .arg("--")
+ .arg("-Cdebuginfo=0") // disable debuginfo to generate less data in the target dir
+ .args(&["--cfg", r#"feature="primary_package_test""#])
+ .output()
+ .unwrap();
+ println!("status: {}", output.status);
+ println!("stdout: {}", String::from_utf8_lossy(&output.stdout));
+ println!("stderr: {}", String::from_utf8_lossy(&output.stderr));
+
+ assert!(output.status.success());
+
+ let lint_path_dep = || {
+ // Test that without the `--no-deps` argument, `path_dep` is linted.
+ let output = Command::new(&*CARGO_CLIPPY_PATH)
+ .current_dir(&cwd)
+ .env("CARGO_INCREMENTAL", "0")
+ .env("CARGO_TARGET_DIR", &target_dir)
+ .arg("clippy")
+ .args(&["-p", "subcrate"])
+ .arg("--")
+ .arg("-Cdebuginfo=0") // disable debuginfo to generate less data in the target dir
+ .args(&["--cfg", r#"feature="primary_package_test""#])
+ .output()
+ .unwrap();
+ println!("status: {}", output.status);
+ println!("stdout: {}", String::from_utf8_lossy(&output.stdout));
+ println!("stderr: {}", String::from_utf8_lossy(&output.stderr));
+
+ assert!(!output.status.success());
+ assert!(
+ String::from_utf8(output.stderr)
+ .unwrap()
+ .contains("error: empty `loop {}` wastes CPU cycles")
+ );
+ };
+
+ // Make sure Cargo is aware of the removal of `--no-deps`.
+ lint_path_dep();
+
+ let successful_build = || {
+ let output = Command::new(&*CARGO_CLIPPY_PATH)
+ .current_dir(&cwd)
+ .env("CARGO_INCREMENTAL", "0")
+ .env("CARGO_TARGET_DIR", &target_dir)
+ .arg("clippy")
+ .args(&["-p", "subcrate"])
+ .arg("--")
+ .arg("-Cdebuginfo=0") // disable debuginfo to generate less data in the target dir
+ .output()
+ .unwrap();
+ println!("status: {}", output.status);
+ println!("stdout: {}", String::from_utf8_lossy(&output.stdout));
+ println!("stderr: {}", String::from_utf8_lossy(&output.stderr));
+
+ assert!(output.status.success());
+
+ output
+ };
+
+ // Trigger a successful build, so Cargo would like to cache the build result.
+ successful_build();
+
+ // Make sure there's no spurious rebuild when nothing changes.
+ let stderr = String::from_utf8(successful_build().stderr).unwrap();
+ assert!(!stderr.contains("Compiling"));
+ assert!(!stderr.contains("Checking"));
+ assert!(stderr.contains("Finished"));
+
+ // Make sure Cargo is aware of the new `--cfg` flag.
+ lint_path_dep();
+}
diff --git a/src/tools/clippy/tests/workspace_test/Cargo.toml b/src/tools/clippy/tests/workspace_test/Cargo.toml
new file mode 100644
index 000000000..bf5b4ca52
--- /dev/null
+++ b/src/tools/clippy/tests/workspace_test/Cargo.toml
@@ -0,0 +1,7 @@
+[package]
+name = "workspace_test"
+version = "0.1.0"
+edition = "2018"
+
+[workspace]
+members = ["subcrate"]
diff --git a/src/tools/clippy/tests/workspace_test/build.rs b/src/tools/clippy/tests/workspace_test/build.rs
new file mode 100644
index 000000000..3507168a3
--- /dev/null
+++ b/src/tools/clippy/tests/workspace_test/build.rs
@@ -0,0 +1,7 @@
+#![deny(clippy::print_stdout)]
+
+fn main() {
+ // Test for #6041
+ println!("Hello");
+ print!("Hello");
+}
diff --git a/src/tools/clippy/tests/workspace_test/path_dep/Cargo.toml b/src/tools/clippy/tests/workspace_test/path_dep/Cargo.toml
new file mode 100644
index 000000000..85a91cd2d
--- /dev/null
+++ b/src/tools/clippy/tests/workspace_test/path_dep/Cargo.toml
@@ -0,0 +1,3 @@
+[package]
+name = "path_dep"
+version = "0.1.0"
diff --git a/src/tools/clippy/tests/workspace_test/path_dep/src/lib.rs b/src/tools/clippy/tests/workspace_test/path_dep/src/lib.rs
new file mode 100644
index 000000000..35ce524f2
--- /dev/null
+++ b/src/tools/clippy/tests/workspace_test/path_dep/src/lib.rs
@@ -0,0 +1,6 @@
+#![deny(clippy::empty_loop)]
+
+#[cfg(feature = "primary_package_test")]
+pub fn lint_me() {
+ loop {}
+}
diff --git a/src/tools/clippy/tests/workspace_test/src/main.rs b/src/tools/clippy/tests/workspace_test/src/main.rs
new file mode 100644
index 000000000..b322eca1d
--- /dev/null
+++ b/src/tools/clippy/tests/workspace_test/src/main.rs
@@ -0,0 +1,3 @@
+#![deny(rust_2018_idioms)]
+
+fn main() {}
diff --git a/src/tools/clippy/tests/workspace_test/subcrate/Cargo.toml b/src/tools/clippy/tests/workspace_test/subcrate/Cargo.toml
new file mode 100644
index 000000000..45362c11b
--- /dev/null
+++ b/src/tools/clippy/tests/workspace_test/subcrate/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "subcrate"
+version = "0.1.0"
+
+[dependencies]
+path_dep = { path = "../path_dep" }
diff --git a/src/tools/clippy/tests/workspace_test/subcrate/src/lib.rs b/src/tools/clippy/tests/workspace_test/subcrate/src/lib.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/clippy/tests/workspace_test/subcrate/src/lib.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/clippy/triagebot.toml b/src/tools/clippy/triagebot.toml
new file mode 100644
index 000000000..80c303938
--- /dev/null
+++ b/src/tools/clippy/triagebot.toml
@@ -0,0 +1,12 @@
+[relabel]
+allow-unauthenticated = [
+ "A-*", "C-*", "E-*", "I-*", "L-*", "P-*", "S-*", "T-*",
+ "good-first-issue"
+]
+
+[assign]
+
+# Allows shortcuts like `@rustbot ready`
+#
+# See https://github.com/rust-lang/triagebot/wiki/Shortcuts
+[shortcut]
diff --git a/src/tools/clippy/util/etc/pre-commit.sh b/src/tools/clippy/util/etc/pre-commit.sh
new file mode 100755
index 000000000..5dd2ba3d5
--- /dev/null
+++ b/src/tools/clippy/util/etc/pre-commit.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+
+# hide output
+set -e
+
+# Update lints
+cargo dev update_lints
+git add clippy_lints/src/lib.rs
+git add clippy_lints/src/lib.*.rs
+
+# Formatting:
+# Git will not automatically add the formatted code to the staged changes once
+# fmt was executed. This collects all staged files rs files that are currently staged.
+# They will later be added back.
+#
+# This was proudly stolen and adjusted from here:
+# https://medium.com/@harshitbangar/automatic-code-formatting-with-git-66c3c5c26798
+files=$( (git diff --cached --name-only --diff-filter=ACMR | grep -Ei "\.rs$") || true)
+if [ ! -z "${files}" ]; then
+ cargo dev fmt
+ git add $(echo "$files" | paste -s -d " " -)
+fi
diff --git a/src/tools/clippy/util/etc/vscode-tasks.json b/src/tools/clippy/util/etc/vscode-tasks.json
new file mode 100644
index 000000000..ab98f9b41
--- /dev/null
+++ b/src/tools/clippy/util/etc/vscode-tasks.json
@@ -0,0 +1,57 @@
+{
+ "version": "2.0.0",
+ "tasks": [
+ {
+ "label": "cargo check",
+ "type": "shell",
+ "command": "cargo check",
+ "problemMatcher": [],
+ "group": {
+ "kind": "build",
+ "isDefault": true
+ }
+ },
+ {
+ "label": "cargo dev fmt",
+ "type": "shell",
+ "command": "cargo dev fmt",
+ "problemMatcher": [],
+ "group": "none"
+ },
+ {
+ "label": "cargo uitest",
+ "type": "shell",
+ "command": "cargo uitest",
+ "options": {
+ "env": {
+ // This task will usually execute all UI tests inside `tests/ui` you can
+ // optionally uncomment the line below and only run a specific test.
+ //
+ // See: https://github.com/rust-lang/rust-clippy/blob/master/book/src/development/adding_lints.md#testing
+ //
+ // "TESTNAME": "<TODO>",
+ "RUST_BACKTRACE": "1"
+ }
+ },
+ "problemMatcher": [],
+ "group": {
+ "kind": "test",
+ "isDefault": true
+ }
+ },
+ {
+ "label": "cargo test",
+ "type": "shell",
+ "command": "cargo test",
+ "problemMatcher": [],
+ "group": "test"
+ },
+ {
+ "label": "cargo dev bless",
+ "type": "shell",
+ "command": "cargo dev bless",
+ "problemMatcher": [],
+ "group": "none"
+ }
+ ]
+}
diff --git a/src/tools/clippy/util/fetch_prs_between.sh b/src/tools/clippy/util/fetch_prs_between.sh
new file mode 100755
index 000000000..6865abf97
--- /dev/null
+++ b/src/tools/clippy/util/fetch_prs_between.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+
+# Fetches the merge commits between two git commits and prints the PR URL
+# together with the full commit message
+#
+# If you want to use this to update the Clippy changelog, be sure to manually
+# exclude the non-user facing changes like 'rustup' PRs, typo fixes, etc.
+
+first=$1
+last=$2
+
+IFS='
+'
+for pr in $(git log --oneline --grep "Merge #" --grep "Merge pull request" --grep "Auto merge of" --grep "Rollup merge of" "$first...$last" | sort -rn | uniq); do
+ id=$(echo "$pr" | rg -o '#[0-9]{3,5}' | cut -c 2-)
+ commit=$(echo "$pr" | cut -d' ' -f 1)
+ message=$(git --no-pager show --pretty=medium "$commit")
+ if [[ -n $(echo "$message" | rg "^[\s]{4}changelog: [nN]one\.*$") ]]; then
+ continue
+ fi
+
+ echo "URL: https://github.com/rust-lang/rust-clippy/pull/$id"
+ echo "Markdown URL: [#$id](https://github.com/rust-lang/rust-clippy/pull/$id)"
+ echo "$message"
+ echo "---------------------------------------------------------"
+ echo
+done
diff --git a/src/tools/clippy/util/versions.py b/src/tools/clippy/util/versions.py
new file mode 100755
index 000000000..0cfa007d1
--- /dev/null
+++ b/src/tools/clippy/util/versions.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+
+import json
+import os
+import sys
+import logging as log
+log.basicConfig(level=log.INFO, format='%(levelname)s: %(message)s')
+
+
+def key(v):
+ if v == 'master':
+ return float('inf')
+ if v == 'stable':
+ return sys.maxsize
+ if v == 'beta':
+ return sys.maxsize - 1
+
+ v = v.replace('v', '').replace('rust-', '')
+
+ s = 0
+ for i, val in enumerate(v.split('.')[::-1]):
+ s += int(val) * 100**i
+
+ return s
+
+
+def main():
+ if len(sys.argv) < 2:
+ log.error("specify output directory")
+ return
+
+ outdir = sys.argv[1]
+ versions = [
+ dir for dir in os.listdir(outdir) if not dir.startswith(".") and os.path.isdir(os.path.join(outdir, dir))
+ ]
+ versions.sort(key=key)
+
+ with open(os.path.join(outdir, "versions.json"), "w") as fp:
+ json.dump(versions, fp, indent=2)
+ log.info("wrote JSON for great justice")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/src/tools/compiletest/Cargo.toml b/src/tools/compiletest/Cargo.toml
new file mode 100644
index 000000000..23e495399
--- /dev/null
+++ b/src/tools/compiletest/Cargo.toml
@@ -0,0 +1,26 @@
+[package]
+name = "compiletest"
+version = "0.0.0"
+edition = "2021"
+
+[dependencies]
+colored = "2"
+diff = "0.1.10"
+unified-diff = "0.2.1"
+getopts = "0.2"
+tracing = "0.1"
+tracing-subscriber = { version = "0.3.3", default-features = false, features = ["fmt", "env-filter", "smallvec", "parking_lot", "ansi"] }
+regex = "1.0"
+serde = { version = "1.0", features = ["derive"] }
+serde_json = "1.0"
+rustfix = "0.6.0"
+lazy_static = "1.0"
+walkdir = "2"
+glob = "0.3.0"
+
+[target.'cfg(unix)'.dependencies]
+libc = "0.2"
+
+[target.'cfg(windows)'.dependencies]
+miow = "0.3"
+winapi = { version = "0.3", features = ["winerror"] }
diff --git a/src/tools/compiletest/src/common.rs b/src/tools/compiletest/src/common.rs
new file mode 100644
index 000000000..be81ff881
--- /dev/null
+++ b/src/tools/compiletest/src/common.rs
@@ -0,0 +1,473 @@
+pub use self::Mode::*;
+
+use std::ffi::OsString;
+use std::fmt;
+use std::path::{Path, PathBuf};
+use std::str::FromStr;
+
+use crate::util::PathBufExt;
+use test::ColorConfig;
+
+#[derive(Clone, Copy, PartialEq, Debug)]
+pub enum Mode {
+ RunPassValgrind,
+ Pretty,
+ DebugInfo,
+ Codegen,
+ Rustdoc,
+ RustdocJson,
+ CodegenUnits,
+ Incremental,
+ RunMake,
+ Ui,
+ JsDocTest,
+ MirOpt,
+ Assembly,
+}
+
+impl Mode {
+ pub fn disambiguator(self) -> &'static str {
+ // Pretty-printing tests could run concurrently, and if they do,
+ // they need to keep their output segregated.
+ match self {
+ Pretty => ".pretty",
+ _ => "",
+ }
+ }
+}
+
+impl FromStr for Mode {
+ type Err = ();
+ fn from_str(s: &str) -> Result<Mode, ()> {
+ match s {
+ "run-pass-valgrind" => Ok(RunPassValgrind),
+ "pretty" => Ok(Pretty),
+ "debuginfo" => Ok(DebugInfo),
+ "codegen" => Ok(Codegen),
+ "rustdoc" => Ok(Rustdoc),
+ "rustdoc-json" => Ok(RustdocJson),
+ "codegen-units" => Ok(CodegenUnits),
+ "incremental" => Ok(Incremental),
+ "run-make" => Ok(RunMake),
+ "ui" => Ok(Ui),
+ "js-doc-test" => Ok(JsDocTest),
+ "mir-opt" => Ok(MirOpt),
+ "assembly" => Ok(Assembly),
+ _ => Err(()),
+ }
+ }
+}
+
+impl fmt::Display for Mode {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let s = match *self {
+ RunPassValgrind => "run-pass-valgrind",
+ Pretty => "pretty",
+ DebugInfo => "debuginfo",
+ Codegen => "codegen",
+ Rustdoc => "rustdoc",
+ RustdocJson => "rustdoc-json",
+ CodegenUnits => "codegen-units",
+ Incremental => "incremental",
+ RunMake => "run-make",
+ Ui => "ui",
+ JsDocTest => "js-doc-test",
+ MirOpt => "mir-opt",
+ Assembly => "assembly",
+ };
+ fmt::Display::fmt(s, f)
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Debug, Hash)]
+pub enum PassMode {
+ Check,
+ Build,
+ Run,
+}
+
+impl FromStr for PassMode {
+ type Err = ();
+ fn from_str(s: &str) -> Result<Self, ()> {
+ match s {
+ "check" => Ok(PassMode::Check),
+ "build" => Ok(PassMode::Build),
+ "run" => Ok(PassMode::Run),
+ _ => Err(()),
+ }
+ }
+}
+
+impl fmt::Display for PassMode {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let s = match *self {
+ PassMode::Check => "check",
+ PassMode::Build => "build",
+ PassMode::Run => "run",
+ };
+ fmt::Display::fmt(s, f)
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, PartialOrd)]
+pub enum FailMode {
+ Check,
+ Build,
+ Run,
+}
+
+#[derive(Clone, Debug, PartialEq)]
+pub enum CompareMode {
+ Polonius,
+ Chalk,
+ SplitDwarf,
+ SplitDwarfSingle,
+}
+
+impl CompareMode {
+ pub(crate) fn to_str(&self) -> &'static str {
+ match *self {
+ CompareMode::Polonius => "polonius",
+ CompareMode::Chalk => "chalk",
+ CompareMode::SplitDwarf => "split-dwarf",
+ CompareMode::SplitDwarfSingle => "split-dwarf-single",
+ }
+ }
+
+ pub fn parse(s: String) -> CompareMode {
+ match s.as_str() {
+ "polonius" => CompareMode::Polonius,
+ "chalk" => CompareMode::Chalk,
+ "split-dwarf" => CompareMode::SplitDwarf,
+ "split-dwarf-single" => CompareMode::SplitDwarfSingle,
+ x => panic!("unknown --compare-mode option: {}", x),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub enum Debugger {
+ Cdb,
+ Gdb,
+ Lldb,
+}
+
+impl Debugger {
+ fn to_str(&self) -> &'static str {
+ match self {
+ Debugger::Cdb => "cdb",
+ Debugger::Gdb => "gdb",
+ Debugger::Lldb => "lldb",
+ }
+ }
+}
+
+impl fmt::Display for Debugger {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(self.to_str(), f)
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub enum PanicStrategy {
+ Unwind,
+ Abort,
+}
+
+/// Configuration for compiletest
+#[derive(Debug, Clone)]
+pub struct Config {
+ /// `true` to overwrite stderr/stdout files instead of complaining about changes in output.
+ pub bless: bool,
+
+ /// The library paths required for running the compiler.
+ pub compile_lib_path: PathBuf,
+
+ /// The library paths required for running compiled programs.
+ pub run_lib_path: PathBuf,
+
+ /// The rustc executable.
+ pub rustc_path: PathBuf,
+
+ /// The rustdoc executable.
+ pub rustdoc_path: Option<PathBuf>,
+
+ /// The rust-demangler executable.
+ pub rust_demangler_path: Option<PathBuf>,
+
+ /// The Python executable to use for LLDB and htmldocck.
+ pub python: String,
+
+ /// The jsondocck executable.
+ pub jsondocck_path: Option<String>,
+
+ /// The LLVM `FileCheck` binary path.
+ pub llvm_filecheck: Option<PathBuf>,
+
+ /// Path to LLVM's bin directory.
+ pub llvm_bin_dir: Option<PathBuf>,
+
+ /// The valgrind path.
+ pub valgrind_path: Option<String>,
+
+ /// Whether to fail if we can't run run-pass-valgrind tests under valgrind
+ /// (or, alternatively, to silently run them like regular run-pass tests).
+ pub force_valgrind: bool,
+
+ /// The path to the Clang executable to run Clang-based tests with. If
+ /// `None` then these tests will be ignored.
+ pub run_clang_based_tests_with: Option<String>,
+
+ /// The directory containing the tests to run
+ pub src_base: PathBuf,
+
+ /// The directory where programs should be built
+ pub build_base: PathBuf,
+
+ /// The name of the stage being built (stage1, etc)
+ pub stage_id: String,
+
+ /// The test mode, e.g. ui or debuginfo.
+ pub mode: Mode,
+
+ /// The test suite (essentially which directory is running, but without the
+ /// directory prefix such as src/test)
+ pub suite: String,
+
+ /// The debugger to use in debuginfo mode. Unset otherwise.
+ pub debugger: Option<Debugger>,
+
+ /// Run ignored tests
+ pub run_ignored: bool,
+
+ /// Only run tests that match these filters
+ pub filters: Vec<String>,
+
+ /// Skip tests tests matching these substrings. Corresponds to
+ /// `test::TestOpts::skip`. `filter_exact` does not apply to these flags.
+ pub skip: Vec<String>,
+
+ /// Exactly match the filter, rather than a substring
+ pub filter_exact: bool,
+
+ /// Force the pass mode of a check/build/run-pass test to this mode.
+ pub force_pass_mode: Option<PassMode>,
+
+ /// Explicitly enable or disable running.
+ pub run: Option<bool>,
+
+ /// Write out a parseable log of tests that were run
+ pub logfile: Option<PathBuf>,
+
+ /// A command line to prefix program execution with,
+ /// for running under valgrind
+ pub runtool: Option<String>,
+
+ /// Flags to pass to the compiler when building for the host
+ pub host_rustcflags: Option<String>,
+
+ /// Flags to pass to the compiler when building for the target
+ pub target_rustcflags: Option<String>,
+
+ /// Whether tests should be optimized by default. Individual test-suites and test files may
+ /// override this setting.
+ pub optimize_tests: bool,
+
+ /// What panic strategy the target is built with. Unwind supports Abort, but
+ /// not vice versa.
+ pub target_panic: PanicStrategy,
+
+ /// Target system to be tested
+ pub target: String,
+
+ /// Host triple for the compiler being invoked
+ pub host: String,
+
+ /// Path to / name of the Microsoft Console Debugger (CDB) executable
+ pub cdb: Option<OsString>,
+
+ /// Version of CDB
+ pub cdb_version: Option<[u16; 4]>,
+
+ /// Path to / name of the GDB executable
+ pub gdb: Option<String>,
+
+ /// Version of GDB, encoded as ((major * 1000) + minor) * 1000 + patch
+ pub gdb_version: Option<u32>,
+
+ /// Whether GDB has native rust support
+ pub gdb_native_rust: bool,
+
+ /// Version of LLDB
+ pub lldb_version: Option<u32>,
+
+ /// Whether LLDB has native rust support
+ pub lldb_native_rust: bool,
+
+ /// Version of LLVM
+ pub llvm_version: Option<u32>,
+
+ /// Is LLVM a system LLVM
+ pub system_llvm: bool,
+
+ /// Path to the android tools
+ pub android_cross_path: PathBuf,
+
+ /// Extra parameter to run adb on arm-linux-androideabi
+ pub adb_path: String,
+
+ /// Extra parameter to run test suite on arm-linux-androideabi
+ pub adb_test_dir: String,
+
+ /// status whether android device available or not
+ pub adb_device_status: bool,
+
+ /// the path containing LLDB's Python module
+ pub lldb_python_dir: Option<String>,
+
+ /// Explain what's going on
+ pub verbose: bool,
+
+ /// Print one character per test instead of one line
+ pub quiet: bool,
+
+ /// Whether to use colors in test.
+ pub color: ColorConfig,
+
+ /// where to find the remote test client process, if we're using it
+ pub remote_test_client: Option<PathBuf>,
+
+ /// mode describing what file the actual ui output will be compared to
+ pub compare_mode: Option<CompareMode>,
+
+ /// If true, this will generate a coverage file with UI test files that run `MachineApplicable`
+ /// diagnostics but are missing `run-rustfix` annotations. The generated coverage file is
+ /// created in `/<build_base>/rustfix_missing_coverage.txt`
+ pub rustfix_coverage: bool,
+
+ /// whether to run `tidy` when a rustdoc test fails
+ pub has_tidy: bool,
+
+ /// The current Rust channel
+ pub channel: String,
+
+ /// The default Rust edition
+ pub edition: Option<String>,
+
+ // Configuration for various run-make tests frobbing things like C compilers
+ // or querying about various LLVM component information.
+ pub cc: String,
+ pub cxx: String,
+ pub cflags: String,
+ pub cxxflags: String,
+ pub ar: String,
+ pub linker: Option<String>,
+ pub llvm_components: String,
+
+ /// Path to a NodeJS executable. Used for JS doctests, emscripten and WASM tests
+ pub nodejs: Option<String>,
+ /// Path to a npm executable. Used for rustdoc GUI tests
+ pub npm: Option<String>,
+
+ /// Whether to rerun tests even if the inputs are unchanged.
+ pub force_rerun: bool,
+}
+
+impl Config {
+ pub fn run_enabled(&self) -> bool {
+ self.run.unwrap_or_else(|| {
+ // Auto-detect whether to run based on the platform.
+ !self.target.ends_with("-fuchsia")
+ })
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct TestPaths {
+ pub file: PathBuf, // e.g., compile-test/foo/bar/baz.rs
+ pub relative_dir: PathBuf, // e.g., foo/bar
+}
+
+/// Used by `ui` tests to generate things like `foo.stderr` from `foo.rs`.
+pub fn expected_output_path(
+ testpaths: &TestPaths,
+ revision: Option<&str>,
+ compare_mode: &Option<CompareMode>,
+ kind: &str,
+) -> PathBuf {
+ assert!(UI_EXTENSIONS.contains(&kind));
+ let mut parts = Vec::new();
+
+ if let Some(x) = revision {
+ parts.push(x);
+ }
+ if let Some(ref x) = *compare_mode {
+ parts.push(x.to_str());
+ }
+ parts.push(kind);
+
+ let extension = parts.join(".");
+ testpaths.file.with_extension(extension)
+}
+
+pub const UI_EXTENSIONS: &[&str] = &[
+ UI_STDERR,
+ UI_STDOUT,
+ UI_FIXED,
+ UI_RUN_STDERR,
+ UI_RUN_STDOUT,
+ UI_STDERR_64,
+ UI_STDERR_32,
+ UI_STDERR_16,
+];
+pub const UI_STDERR: &str = "stderr";
+pub const UI_STDOUT: &str = "stdout";
+pub const UI_FIXED: &str = "fixed";
+pub const UI_RUN_STDERR: &str = "run.stderr";
+pub const UI_RUN_STDOUT: &str = "run.stdout";
+pub const UI_STDERR_64: &str = "64bit.stderr";
+pub const UI_STDERR_32: &str = "32bit.stderr";
+pub const UI_STDERR_16: &str = "16bit.stderr";
+
+/// Absolute path to the directory where all output for all tests in the given
+/// `relative_dir` group should reside. Example:
+/// /path/to/build/host-triple/test/ui/relative/
+/// This is created early when tests are collected to avoid race conditions.
+pub fn output_relative_path(config: &Config, relative_dir: &Path) -> PathBuf {
+ config.build_base.join(relative_dir)
+}
+
+/// Generates a unique name for the test, such as `testname.revision.mode`.
+pub fn output_testname_unique(
+ config: &Config,
+ testpaths: &TestPaths,
+ revision: Option<&str>,
+) -> PathBuf {
+ let mode = config.compare_mode.as_ref().map_or("", |m| m.to_str());
+ let debugger = config.debugger.as_ref().map_or("", |m| m.to_str());
+ PathBuf::from(&testpaths.file.file_stem().unwrap())
+ .with_extra_extension(revision.unwrap_or(""))
+ .with_extra_extension(mode)
+ .with_extra_extension(debugger)
+}
+
+/// Absolute path to the directory where all output for the given
+/// test/revision should reside. Example:
+/// /path/to/build/host-triple/test/ui/relative/testname.revision.mode/
+pub fn output_base_dir(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> PathBuf {
+ output_relative_path(config, &testpaths.relative_dir)
+ .join(output_testname_unique(config, testpaths, revision))
+}
+
+/// Absolute path to the base filename used as output for the given
+/// test/revision. Example:
+/// /path/to/build/host-triple/test/ui/relative/testname.revision.mode/testname
+pub fn output_base_name(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> PathBuf {
+ output_base_dir(config, testpaths, revision).join(testpaths.file.file_stem().unwrap())
+}
+
+/// Absolute path to the directory to use for incremental compilation. Example:
+/// /path/to/build/host-triple/test/ui/relative/testname.mode/testname.inc
+pub fn incremental_dir(config: &Config, testpaths: &TestPaths) -> PathBuf {
+ output_base_name(config, testpaths, None).with_extension("inc")
+}
diff --git a/src/tools/compiletest/src/compute_diff.rs b/src/tools/compiletest/src/compute_diff.rs
new file mode 100644
index 000000000..92c80c27d
--- /dev/null
+++ b/src/tools/compiletest/src/compute_diff.rs
@@ -0,0 +1,157 @@
+use std::collections::VecDeque;
+use std::fs::{File, FileType};
+use std::path::Path;
+
+#[derive(Debug, PartialEq)]
+pub enum DiffLine {
+ Context(String),
+ Expected(String),
+ Resulting(String),
+}
+
+#[derive(Debug, PartialEq)]
+pub struct Mismatch {
+ pub line_number: u32,
+ pub lines: Vec<DiffLine>,
+}
+
+impl Mismatch {
+ fn new(line_number: u32) -> Mismatch {
+ Mismatch { line_number, lines: Vec::new() }
+ }
+}
+
+// Produces a diff between the expected output and actual output.
+pub fn make_diff(expected: &str, actual: &str, context_size: usize) -> Vec<Mismatch> {
+ let mut line_number = 1;
+ let mut context_queue: VecDeque<&str> = VecDeque::with_capacity(context_size);
+ let mut lines_since_mismatch = context_size + 1;
+ let mut results = Vec::new();
+ let mut mismatch = Mismatch::new(0);
+
+ for result in diff::lines(expected, actual) {
+ match result {
+ diff::Result::Left(str) => {
+ if lines_since_mismatch >= context_size && lines_since_mismatch > 0 {
+ results.push(mismatch);
+ mismatch = Mismatch::new(line_number - context_queue.len() as u32);
+ }
+
+ while let Some(line) = context_queue.pop_front() {
+ mismatch.lines.push(DiffLine::Context(line.to_owned()));
+ }
+
+ mismatch.lines.push(DiffLine::Expected(str.to_owned()));
+ line_number += 1;
+ lines_since_mismatch = 0;
+ }
+ diff::Result::Right(str) => {
+ if lines_since_mismatch >= context_size && lines_since_mismatch > 0 {
+ results.push(mismatch);
+ mismatch = Mismatch::new(line_number - context_queue.len() as u32);
+ }
+
+ while let Some(line) = context_queue.pop_front() {
+ mismatch.lines.push(DiffLine::Context(line.to_owned()));
+ }
+
+ mismatch.lines.push(DiffLine::Resulting(str.to_owned()));
+ lines_since_mismatch = 0;
+ }
+ diff::Result::Both(str, _) => {
+ if context_queue.len() >= context_size {
+ let _ = context_queue.pop_front();
+ }
+
+ if lines_since_mismatch < context_size {
+ mismatch.lines.push(DiffLine::Context(str.to_owned()));
+ } else if context_size > 0 {
+ context_queue.push_back(str);
+ }
+
+ line_number += 1;
+ lines_since_mismatch += 1;
+ }
+ }
+ }
+
+ results.push(mismatch);
+ results.remove(0);
+
+ results
+}
+
+pub(crate) fn write_diff(expected: &str, actual: &str, context_size: usize) -> String {
+ use std::fmt::Write;
+ let mut output = String::new();
+ let diff_results = make_diff(expected, actual, context_size);
+ for result in diff_results {
+ let mut line_number = result.line_number;
+ for line in result.lines {
+ match line {
+ DiffLine::Expected(e) => {
+ writeln!(output, "-\t{}", e).unwrap();
+ line_number += 1;
+ }
+ DiffLine::Context(c) => {
+ writeln!(output, "{}\t{}", line_number, c).unwrap();
+ line_number += 1;
+ }
+ DiffLine::Resulting(r) => {
+ writeln!(output, "+\t{}", r).unwrap();
+ }
+ }
+ }
+ writeln!(output).unwrap();
+ }
+ output
+}
+
+/// Filters based on filetype and extension whether to diff a file.
+///
+/// Returns whether any data was actually written.
+pub(crate) fn write_filtered_diff<Filter>(
+ diff_filename: &str,
+ out_dir: &Path,
+ compare_dir: &Path,
+ verbose: bool,
+ filter: Filter,
+) -> bool
+where
+ Filter: Fn(FileType, Option<&str>) -> bool,
+{
+ use std::io::{Read, Write};
+ let mut diff_output = File::create(diff_filename).unwrap();
+ let mut wrote_data = false;
+ for entry in walkdir::WalkDir::new(out_dir) {
+ let entry = entry.expect("failed to read file");
+ let extension = entry.path().extension().and_then(|p| p.to_str());
+ if filter(entry.file_type(), extension) {
+ let expected_path = compare_dir.join(entry.path().strip_prefix(&out_dir).unwrap());
+ let expected = if let Ok(s) = std::fs::read(&expected_path) { s } else { continue };
+ let actual_path = entry.path();
+ let actual = std::fs::read(&actual_path).unwrap();
+ let diff = unified_diff::diff(
+ &expected,
+ &expected_path.to_string_lossy(),
+ &actual,
+ &actual_path.to_string_lossy(),
+ 3,
+ );
+ wrote_data |= !diff.is_empty();
+ diff_output.write_all(&diff).unwrap();
+ }
+ }
+
+ if !wrote_data {
+ println!("note: diff is identical to nightly rustdoc");
+ assert!(diff_output.metadata().unwrap().len() == 0);
+ return false;
+ } else if verbose {
+ eprintln!("printing diff:");
+ let mut buf = Vec::new();
+ diff_output.read_to_end(&mut buf).unwrap();
+ std::io::stderr().lock().write_all(&mut buf).unwrap();
+ }
+ true
+}
diff --git a/src/tools/compiletest/src/errors.rs b/src/tools/compiletest/src/errors.rs
new file mode 100644
index 000000000..054235ec1
--- /dev/null
+++ b/src/tools/compiletest/src/errors.rs
@@ -0,0 +1,179 @@
+use self::WhichLine::*;
+
+use std::fmt;
+use std::fs::File;
+use std::io::prelude::*;
+use std::io::BufReader;
+use std::path::Path;
+use std::str::FromStr;
+
+use lazy_static::lazy_static;
+use regex::Regex;
+use tracing::*;
+
+#[derive(Clone, Debug, PartialEq)]
+pub enum ErrorKind {
+ Help,
+ Error,
+ Note,
+ Suggestion,
+ Warning,
+}
+
+impl FromStr for ErrorKind {
+ type Err = ();
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ let s = s.to_uppercase();
+ let part0: &str = s.split(':').next().unwrap();
+ match part0 {
+ "HELP" => Ok(ErrorKind::Help),
+ "ERROR" => Ok(ErrorKind::Error),
+ "NOTE" => Ok(ErrorKind::Note),
+ "SUGGESTION" => Ok(ErrorKind::Suggestion),
+ "WARN" | "WARNING" => Ok(ErrorKind::Warning),
+ _ => Err(()),
+ }
+ }
+}
+
+impl fmt::Display for ErrorKind {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ ErrorKind::Help => write!(f, "help message"),
+ ErrorKind::Error => write!(f, "error"),
+ ErrorKind::Note => write!(f, "note"),
+ ErrorKind::Suggestion => write!(f, "suggestion"),
+ ErrorKind::Warning => write!(f, "warning"),
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct Error {
+ pub line_num: usize,
+ /// What kind of message we expect (e.g., warning, error, suggestion).
+ /// `None` if not specified or unknown message kind.
+ pub kind: Option<ErrorKind>,
+ pub msg: String,
+}
+
+#[derive(PartialEq, Debug)]
+enum WhichLine {
+ ThisLine,
+ FollowPrevious(usize),
+ AdjustBackward(usize),
+}
+
+/// Looks for either "//~| KIND MESSAGE" or "//~^^... KIND MESSAGE"
+/// The former is a "follow" that inherits its target from the preceding line;
+/// the latter is an "adjusts" that goes that many lines up.
+///
+/// Goal is to enable tests both like: //~^^^ ERROR go up three
+/// and also //~^ ERROR message one for the preceding line, and
+/// //~| ERROR message two for that same line.
+///
+/// If cfg is not None (i.e., in an incremental test), then we look
+/// for `//[X]~` instead, where `X` is the current `cfg`.
+pub fn load_errors(testfile: &Path, cfg: Option<&str>) -> Vec<Error> {
+ let rdr = BufReader::new(File::open(testfile).unwrap());
+
+ // `last_nonfollow_error` tracks the most recently seen
+ // line with an error template that did not use the
+ // follow-syntax, "//~| ...".
+ //
+ // (pnkfelix could not find an easy way to compose Iterator::scan
+ // and Iterator::filter_map to pass along this information into
+ // `parse_expected`. So instead I am storing that state here and
+ // updating it in the map callback below.)
+ let mut last_nonfollow_error = None;
+
+ rdr.lines()
+ .enumerate()
+ .filter_map(|(line_num, line)| {
+ parse_expected(last_nonfollow_error, line_num + 1, &line.unwrap(), cfg).map(
+ |(which, error)| {
+ match which {
+ FollowPrevious(_) => {}
+ _ => last_nonfollow_error = Some(error.line_num),
+ }
+
+ error
+ },
+ )
+ })
+ .collect()
+}
+
+fn parse_expected(
+ last_nonfollow_error: Option<usize>,
+ line_num: usize,
+ line: &str,
+ cfg: Option<&str>,
+) -> Option<(WhichLine, Error)> {
+ // Matches comments like:
+ // //~
+ // //~|
+ // //~^
+ // //~^^^^^
+ // //[cfg1]~
+ // //[cfg1,cfg2]~^^
+ lazy_static! {
+ static ref RE: Regex =
+ Regex::new(r"//(?:\[(?P<cfgs>[\w,]+)])?~(?P<adjust>\||\^*)").unwrap();
+ }
+
+ let captures = RE.captures(line)?;
+
+ match (cfg, captures.name("cfgs")) {
+ // Only error messages that contain our `cfg` between the square brackets apply to us.
+ (Some(cfg), Some(filter)) if !filter.as_str().split(',').any(|s| s == cfg) => return None,
+ (Some(_), Some(_)) => {}
+
+ (None, Some(_)) => panic!("Only tests with revisions should use `//[X]~`"),
+
+ // If an error has no list of revisions, it applies to all revisions.
+ (Some(_), None) | (None, None) => {}
+ }
+
+ let (follow, adjusts) = match &captures["adjust"] {
+ "|" => (true, 0),
+ circumflexes => (false, circumflexes.len()),
+ };
+
+ // Get the part of the comment after the sigil (e.g. `~^^` or ~|).
+ let whole_match = captures.get(0).unwrap();
+ let (_, mut msg) = line.split_at(whole_match.end());
+
+ let first_word = msg.split_whitespace().next().expect("Encountered unexpected empty comment");
+
+ // If we find `//~ ERROR foo` or something like that, skip the first word.
+ let kind = first_word.parse::<ErrorKind>().ok();
+ if kind.is_some() {
+ msg = &msg.trim_start().split_at(first_word.len()).1;
+ }
+
+ let msg = msg.trim().to_owned();
+
+ let (which, line_num) = if follow {
+ assert_eq!(adjusts, 0, "use either //~| or //~^, not both.");
+ let line_num = last_nonfollow_error.expect(
+ "encountered //~| without \
+ preceding //~^ line.",
+ );
+ (FollowPrevious(line_num), line_num)
+ } else {
+ let which = if adjusts > 0 { AdjustBackward(adjusts) } else { ThisLine };
+ let line_num = line_num - adjusts;
+ (which, line_num)
+ };
+
+ debug!(
+ "line={} tag={:?} which={:?} kind={:?} msg={:?}",
+ line_num,
+ whole_match.as_str(),
+ which,
+ kind,
+ msg
+ );
+ Some((which, Error { line_num, kind, msg }))
+}
diff --git a/src/tools/compiletest/src/header.rs b/src/tools/compiletest/src/header.rs
new file mode 100644
index 000000000..f8f193ddf
--- /dev/null
+++ b/src/tools/compiletest/src/header.rs
@@ -0,0 +1,1060 @@
+use std::collections::HashSet;
+use std::env;
+use std::fs::File;
+use std::io::prelude::*;
+use std::io::BufReader;
+use std::path::{Path, PathBuf};
+
+use tracing::*;
+
+use crate::common::{CompareMode, Config, Debugger, FailMode, Mode, PanicStrategy, PassMode};
+use crate::util;
+use crate::{extract_cdb_version, extract_gdb_version};
+
+#[cfg(test)]
+mod tests;
+
+/// The result of parse_cfg_name_directive.
+#[derive(Clone, Copy, PartialEq, Debug)]
+enum ParsedNameDirective {
+ /// No match.
+ NoMatch,
+ /// Match.
+ Match,
+}
+
+/// Properties which must be known very early, before actually running
+/// the test.
+#[derive(Default)]
+pub struct EarlyProps {
+ pub aux: Vec<String>,
+ pub aux_crate: Vec<(String, String)>,
+ pub revisions: Vec<String>,
+}
+
+impl EarlyProps {
+ pub fn from_file(config: &Config, testfile: &Path) -> Self {
+ let file = File::open(testfile).expect("open test file to parse earlyprops");
+ Self::from_reader(config, testfile, file)
+ }
+
+ pub fn from_reader<R: Read>(config: &Config, testfile: &Path, rdr: R) -> Self {
+ let mut props = EarlyProps::default();
+ iter_header(testfile, rdr, &mut |_, ln| {
+ config.push_name_value_directive(ln, directives::AUX_BUILD, &mut props.aux, |r| {
+ r.trim().to_string()
+ });
+ config.push_name_value_directive(
+ ln,
+ directives::AUX_CRATE,
+ &mut props.aux_crate,
+ Config::parse_aux_crate,
+ );
+ config.parse_and_update_revisions(ln, &mut props.revisions);
+ });
+ return props;
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct TestProps {
+ // Lines that should be expected, in order, on standard out
+ pub error_patterns: Vec<String>,
+ // Regexes that should be expected, in order, on standard out
+ pub regex_error_patterns: Vec<String>,
+ // Extra flags to pass to the compiler
+ pub compile_flags: Vec<String>,
+ // Extra flags to pass when the compiled code is run (such as --bench)
+ pub run_flags: Option<String>,
+ // If present, the name of a file that this test should match when
+ // pretty-printed
+ pub pp_exact: Option<PathBuf>,
+ // Other crates that should be compiled (typically from the same
+ // directory as the test, but for backwards compatibility reasons
+ // we also check the auxiliary directory)
+ pub aux_builds: Vec<String>,
+ // Similar to `aux_builds`, but a list of NAME=somelib.rs of dependencies
+ // to build and pass with the `--extern` flag.
+ pub aux_crates: Vec<(String, String)>,
+ // Environment settings to use for compiling
+ pub rustc_env: Vec<(String, String)>,
+ // Environment variables to unset prior to compiling.
+ // Variables are unset before applying 'rustc_env'.
+ pub unset_rustc_env: Vec<String>,
+ // Environment settings to use during execution
+ pub exec_env: Vec<(String, String)>,
+ // Build documentation for all specified aux-builds as well
+ pub build_aux_docs: bool,
+ // Flag to force a crate to be built with the host architecture
+ pub force_host: bool,
+ // Check stdout for error-pattern output as well as stderr
+ pub check_stdout: bool,
+ // Check stdout & stderr for output of run-pass test
+ pub check_run_results: bool,
+ // For UI tests, allows compiler to generate arbitrary output to stdout
+ pub dont_check_compiler_stdout: bool,
+ // For UI tests, allows compiler to generate arbitrary output to stderr
+ pub dont_check_compiler_stderr: bool,
+ // Don't force a --crate-type=dylib flag on the command line
+ //
+ // Set this for example if you have an auxiliary test file that contains
+ // a proc-macro and needs `#![crate_type = "proc-macro"]`. This ensures
+ // that the aux file is compiled as a `proc-macro` and not as a `dylib`.
+ pub no_prefer_dynamic: bool,
+ // Run -Zunpretty expanded when running pretty printing tests
+ pub pretty_expanded: bool,
+ // Which pretty mode are we testing with, default to 'normal'
+ pub pretty_mode: String,
+ // Only compare pretty output and don't try compiling
+ pub pretty_compare_only: bool,
+ // Patterns which must not appear in the output of a cfail test.
+ pub forbid_output: Vec<String>,
+ // Revisions to test for incremental compilation.
+ pub revisions: Vec<String>,
+ // Directory (if any) to use for incremental compilation. This is
+ // not set by end-users; rather it is set by the incremental
+ // testing harness and used when generating compilation
+ // arguments. (In particular, it propagates to the aux-builds.)
+ pub incremental_dir: Option<PathBuf>,
+ // If `true`, this test will use incremental compilation.
+ //
+ // This can be set manually with the `incremental` header, or implicitly
+ // by being a part of an incremental mode test. Using the `incremental`
+ // header should be avoided if possible; using an incremental mode test is
+ // preferred. Incremental mode tests support multiple passes, which can
+ // verify that the incremental cache can be loaded properly after being
+ // created. Just setting the header will only verify the behavior with
+ // creating an incremental cache, but doesn't check that it is created
+ // correctly.
+ //
+ // Compiletest will create the incremental directory, and ensure it is
+ // empty before the test starts. Incremental mode tests will reuse the
+ // incremental directory between passes in the same test.
+ pub incremental: bool,
+ // If `true`, this test is a known bug.
+ //
+ // When set, some requirements are relaxed. Currently, this only means no
+ // error annotations are needed, but this may be updated in the future to
+ // include other relaxations.
+ pub known_bug: bool,
+ // How far should the test proceed while still passing.
+ pass_mode: Option<PassMode>,
+ // Ignore `--pass` overrides from the command line for this test.
+ ignore_pass: bool,
+ // How far this test should proceed to start failing.
+ pub fail_mode: Option<FailMode>,
+ // rustdoc will test the output of the `--test` option
+ pub check_test_line_numbers_match: bool,
+ // customized normalization rules
+ pub normalize_stdout: Vec<(String, String)>,
+ pub normalize_stderr: Vec<(String, String)>,
+ pub failure_status: i32,
+ // Whether or not `rustfix` should apply the `CodeSuggestion`s of this test and compile the
+ // resulting Rust code.
+ pub run_rustfix: bool,
+ // If true, `rustfix` will only apply `MachineApplicable` suggestions.
+ pub rustfix_only_machine_applicable: bool,
+ pub assembly_output: Option<String>,
+ // If true, the test is expected to ICE
+ pub should_ice: bool,
+ // If true, the stderr is expected to be different across bit-widths.
+ pub stderr_per_bitwidth: bool,
+ // The MIR opt to unit test, if any
+ pub mir_unit_test: Option<String>,
+}
+
+mod directives {
+ pub const ERROR_PATTERN: &'static str = "error-pattern";
+ pub const REGEX_ERROR_PATTERN: &'static str = "regex-error-pattern";
+ pub const COMPILE_FLAGS: &'static str = "compile-flags";
+ pub const RUN_FLAGS: &'static str = "run-flags";
+ pub const SHOULD_ICE: &'static str = "should-ice";
+ pub const BUILD_AUX_DOCS: &'static str = "build-aux-docs";
+ pub const FORCE_HOST: &'static str = "force-host";
+ pub const CHECK_STDOUT: &'static str = "check-stdout";
+ pub const CHECK_RUN_RESULTS: &'static str = "check-run-results";
+ pub const DONT_CHECK_COMPILER_STDOUT: &'static str = "dont-check-compiler-stdout";
+ pub const DONT_CHECK_COMPILER_STDERR: &'static str = "dont-check-compiler-stderr";
+ pub const NO_PREFER_DYNAMIC: &'static str = "no-prefer-dynamic";
+ pub const PRETTY_EXPANDED: &'static str = "pretty-expanded";
+ pub const PRETTY_MODE: &'static str = "pretty-mode";
+ pub const PRETTY_COMPARE_ONLY: &'static str = "pretty-compare-only";
+ pub const AUX_BUILD: &'static str = "aux-build";
+ pub const AUX_CRATE: &'static str = "aux-crate";
+ pub const EXEC_ENV: &'static str = "exec-env";
+ pub const RUSTC_ENV: &'static str = "rustc-env";
+ pub const UNSET_RUSTC_ENV: &'static str = "unset-rustc-env";
+ pub const FORBID_OUTPUT: &'static str = "forbid-output";
+ pub const CHECK_TEST_LINE_NUMBERS_MATCH: &'static str = "check-test-line-numbers-match";
+ pub const IGNORE_PASS: &'static str = "ignore-pass";
+ pub const FAILURE_STATUS: &'static str = "failure-status";
+ pub const RUN_RUSTFIX: &'static str = "run-rustfix";
+ pub const RUSTFIX_ONLY_MACHINE_APPLICABLE: &'static str = "rustfix-only-machine-applicable";
+ pub const ASSEMBLY_OUTPUT: &'static str = "assembly-output";
+ pub const STDERR_PER_BITWIDTH: &'static str = "stderr-per-bitwidth";
+ pub const INCREMENTAL: &'static str = "incremental";
+ pub const KNOWN_BUG: &'static str = "known-bug";
+ pub const MIR_UNIT_TEST: &'static str = "unit-test";
+ // This isn't a real directive, just one that is probably mistyped often
+ pub const INCORRECT_COMPILER_FLAGS: &'static str = "compiler-flags";
+}
+
+impl TestProps {
+ pub fn new() -> Self {
+ TestProps {
+ error_patterns: vec![],
+ regex_error_patterns: vec![],
+ compile_flags: vec![],
+ run_flags: None,
+ pp_exact: None,
+ aux_builds: vec![],
+ aux_crates: vec![],
+ revisions: vec![],
+ rustc_env: vec![],
+ unset_rustc_env: vec![],
+ exec_env: vec![],
+ build_aux_docs: false,
+ force_host: false,
+ check_stdout: false,
+ check_run_results: false,
+ dont_check_compiler_stdout: false,
+ dont_check_compiler_stderr: false,
+ no_prefer_dynamic: false,
+ pretty_expanded: false,
+ pretty_mode: "normal".to_string(),
+ pretty_compare_only: false,
+ forbid_output: vec![],
+ incremental_dir: None,
+ incremental: false,
+ known_bug: false,
+ pass_mode: None,
+ fail_mode: None,
+ ignore_pass: false,
+ check_test_line_numbers_match: false,
+ normalize_stdout: vec![],
+ normalize_stderr: vec![],
+ failure_status: -1,
+ run_rustfix: false,
+ rustfix_only_machine_applicable: false,
+ assembly_output: None,
+ should_ice: false,
+ stderr_per_bitwidth: false,
+ mir_unit_test: None,
+ }
+ }
+
+ pub fn from_aux_file(&self, testfile: &Path, cfg: Option<&str>, config: &Config) -> Self {
+ let mut props = TestProps::new();
+
+ // copy over select properties to the aux build:
+ props.incremental_dir = self.incremental_dir.clone();
+ props.ignore_pass = true;
+ props.load_from(testfile, cfg, config);
+
+ props
+ }
+
+ pub fn from_file(testfile: &Path, cfg: Option<&str>, config: &Config) -> Self {
+ let mut props = TestProps::new();
+ props.load_from(testfile, cfg, config);
+
+ match (props.pass_mode, props.fail_mode) {
+ (None, None) => props.fail_mode = Some(FailMode::Check),
+ (Some(_), None) | (None, Some(_)) => {}
+ (Some(_), Some(_)) => panic!("cannot use a *-fail and *-pass mode together"),
+ }
+
+ props
+ }
+
+ /// Loads properties from `testfile` into `props`. If a property is
+ /// tied to a particular revision `foo` (indicated by writing
+ /// `//[foo]`), then the property is ignored unless `cfg` is
+ /// `Some("foo")`.
+ fn load_from(&mut self, testfile: &Path, cfg: Option<&str>, config: &Config) {
+ let mut has_edition = false;
+ if !testfile.is_dir() {
+ let file = File::open(testfile).unwrap();
+
+ iter_header(testfile, file, &mut |revision, ln| {
+ if revision.is_some() && revision != cfg {
+ return;
+ }
+
+ use directives::*;
+
+ config.push_name_value_directive(
+ ln,
+ ERROR_PATTERN,
+ &mut self.error_patterns,
+ |r| r,
+ );
+ config.push_name_value_directive(
+ ln,
+ REGEX_ERROR_PATTERN,
+ &mut self.regex_error_patterns,
+ |r| r,
+ );
+
+ if let Some(flags) = config.parse_name_value_directive(ln, COMPILE_FLAGS) {
+ self.compile_flags.extend(flags.split_whitespace().map(|s| s.to_owned()));
+ }
+ if config.parse_name_value_directive(ln, INCORRECT_COMPILER_FLAGS).is_some() {
+ panic!("`compiler-flags` directive should be spelled `compile-flags`");
+ }
+
+ if let Some(edition) = config.parse_edition(ln) {
+ self.compile_flags.push(format!("--edition={}", edition.trim()));
+ has_edition = true;
+ }
+
+ config.parse_and_update_revisions(ln, &mut self.revisions);
+
+ config.set_name_value_directive(ln, RUN_FLAGS, &mut self.run_flags, |r| r);
+
+ if self.pp_exact.is_none() {
+ self.pp_exact = config.parse_pp_exact(ln, testfile);
+ }
+
+ config.set_name_directive(ln, SHOULD_ICE, &mut self.should_ice);
+ config.set_name_directive(ln, BUILD_AUX_DOCS, &mut self.build_aux_docs);
+ config.set_name_directive(ln, FORCE_HOST, &mut self.force_host);
+ config.set_name_directive(ln, CHECK_STDOUT, &mut self.check_stdout);
+ config.set_name_directive(ln, CHECK_RUN_RESULTS, &mut self.check_run_results);
+ config.set_name_directive(
+ ln,
+ DONT_CHECK_COMPILER_STDOUT,
+ &mut self.dont_check_compiler_stdout,
+ );
+ config.set_name_directive(
+ ln,
+ DONT_CHECK_COMPILER_STDERR,
+ &mut self.dont_check_compiler_stderr,
+ );
+ config.set_name_directive(ln, NO_PREFER_DYNAMIC, &mut self.no_prefer_dynamic);
+ config.set_name_directive(ln, PRETTY_EXPANDED, &mut self.pretty_expanded);
+
+ if let Some(m) = config.parse_name_value_directive(ln, PRETTY_MODE) {
+ self.pretty_mode = m;
+ }
+
+ config.set_name_directive(ln, PRETTY_COMPARE_ONLY, &mut self.pretty_compare_only);
+ config.push_name_value_directive(ln, AUX_BUILD, &mut self.aux_builds, |r| {
+ r.trim().to_string()
+ });
+ config.push_name_value_directive(
+ ln,
+ AUX_CRATE,
+ &mut self.aux_crates,
+ Config::parse_aux_crate,
+ );
+ config.push_name_value_directive(
+ ln,
+ EXEC_ENV,
+ &mut self.exec_env,
+ Config::parse_env,
+ );
+ config.push_name_value_directive(
+ ln,
+ RUSTC_ENV,
+ &mut self.rustc_env,
+ Config::parse_env,
+ );
+ config.push_name_value_directive(
+ ln,
+ UNSET_RUSTC_ENV,
+ &mut self.unset_rustc_env,
+ |r| r,
+ );
+ config.push_name_value_directive(ln, FORBID_OUTPUT, &mut self.forbid_output, |r| r);
+ config.set_name_directive(
+ ln,
+ CHECK_TEST_LINE_NUMBERS_MATCH,
+ &mut self.check_test_line_numbers_match,
+ );
+
+ self.update_pass_mode(ln, cfg, config);
+ self.update_fail_mode(ln, config);
+
+ config.set_name_directive(ln, IGNORE_PASS, &mut self.ignore_pass);
+
+ if let Some(rule) = config.parse_custom_normalization(ln, "normalize-stdout") {
+ self.normalize_stdout.push(rule);
+ }
+ if let Some(rule) = config.parse_custom_normalization(ln, "normalize-stderr") {
+ self.normalize_stderr.push(rule);
+ }
+
+ if let Some(code) = config
+ .parse_name_value_directive(ln, FAILURE_STATUS)
+ .and_then(|code| code.trim().parse::<i32>().ok())
+ {
+ self.failure_status = code;
+ }
+
+ config.set_name_directive(ln, RUN_RUSTFIX, &mut self.run_rustfix);
+ config.set_name_directive(
+ ln,
+ RUSTFIX_ONLY_MACHINE_APPLICABLE,
+ &mut self.rustfix_only_machine_applicable,
+ );
+ config.set_name_value_directive(
+ ln,
+ ASSEMBLY_OUTPUT,
+ &mut self.assembly_output,
+ |r| r.trim().to_string(),
+ );
+ config.set_name_directive(ln, STDERR_PER_BITWIDTH, &mut self.stderr_per_bitwidth);
+ config.set_name_directive(ln, INCREMENTAL, &mut self.incremental);
+
+ // Unlike the other `name_value_directive`s this needs to be handled manually,
+ // because it sets a `bool` flag.
+ if let Some(known_bug) = config.parse_name_value_directive(ln, KNOWN_BUG) {
+ let known_bug = known_bug.trim();
+ if known_bug == "unknown"
+ || known_bug.split(',').all(|issue_ref| {
+ issue_ref
+ .trim()
+ .split_once('#')
+ .filter(|(_, number)| {
+ number.chars().all(|digit| digit.is_numeric())
+ })
+ .is_some()
+ })
+ {
+ self.known_bug = true;
+ } else {
+ panic!(
+ "Invalid known-bug value: {known_bug}\nIt requires comma-separated issue references (`#000` or `chalk#000`) or `unknown`."
+ );
+ }
+ }
+ config.set_name_value_directive(ln, MIR_UNIT_TEST, &mut self.mir_unit_test, |s| {
+ s.trim().to_string()
+ });
+ });
+ }
+
+ if self.failure_status == -1 {
+ self.failure_status = 1;
+ }
+ if self.should_ice {
+ self.failure_status = 101;
+ }
+
+ if config.mode == Mode::Incremental {
+ self.incremental = true;
+ }
+
+ for key in &["RUST_TEST_NOCAPTURE", "RUST_TEST_THREADS"] {
+ if let Ok(val) = env::var(key) {
+ if self.exec_env.iter().find(|&&(ref x, _)| x == key).is_none() {
+ self.exec_env.push(((*key).to_owned(), val))
+ }
+ }
+ }
+
+ if let (Some(edition), false) = (&config.edition, has_edition) {
+ self.compile_flags.push(format!("--edition={}", edition));
+ }
+ }
+
+ fn update_fail_mode(&mut self, ln: &str, config: &Config) {
+ let check_ui = |mode: &str| {
+ if config.mode != Mode::Ui {
+ panic!("`{}-fail` header is only supported in UI tests", mode);
+ }
+ };
+ if config.mode == Mode::Ui && config.parse_name_directive(ln, "compile-fail") {
+ panic!("`compile-fail` header is useless in UI tests");
+ }
+ let fail_mode = if config.parse_name_directive(ln, "check-fail") {
+ check_ui("check");
+ Some(FailMode::Check)
+ } else if config.parse_name_directive(ln, "build-fail") {
+ check_ui("build");
+ Some(FailMode::Build)
+ } else if config.parse_name_directive(ln, "run-fail") {
+ check_ui("run");
+ Some(FailMode::Run)
+ } else {
+ None
+ };
+ match (self.fail_mode, fail_mode) {
+ (None, Some(_)) => self.fail_mode = fail_mode,
+ (Some(_), Some(_)) => panic!("multiple `*-fail` headers in a single test"),
+ (_, None) => {}
+ }
+ }
+
+ fn update_pass_mode(&mut self, ln: &str, revision: Option<&str>, config: &Config) {
+ let check_no_run = |s| {
+ if config.mode != Mode::Ui && config.mode != Mode::Incremental {
+ panic!("`{}` header is only supported in UI and incremental tests", s);
+ }
+ if config.mode == Mode::Incremental
+ && !revision.map_or(false, |r| r.starts_with("cfail"))
+ && !self.revisions.iter().all(|r| r.starts_with("cfail"))
+ {
+ panic!("`{}` header is only supported in `cfail` incremental tests", s);
+ }
+ };
+ let pass_mode = if config.parse_name_directive(ln, "check-pass") {
+ check_no_run("check-pass");
+ Some(PassMode::Check)
+ } else if config.parse_name_directive(ln, "build-pass") {
+ check_no_run("build-pass");
+ Some(PassMode::Build)
+ } else if config.parse_name_directive(ln, "run-pass") {
+ if config.mode != Mode::Ui {
+ panic!("`run-pass` header is only supported in UI tests")
+ }
+ Some(PassMode::Run)
+ } else {
+ None
+ };
+ match (self.pass_mode, pass_mode) {
+ (None, Some(_)) => self.pass_mode = pass_mode,
+ (Some(_), Some(_)) => panic!("multiple `*-pass` headers in a single test"),
+ (_, None) => {}
+ }
+ }
+
+ pub fn pass_mode(&self, config: &Config) -> Option<PassMode> {
+ if !self.ignore_pass && self.fail_mode.is_none() && config.mode == Mode::Ui {
+ if let (mode @ Some(_), Some(_)) = (config.force_pass_mode, self.pass_mode) {
+ return mode;
+ }
+ }
+ self.pass_mode
+ }
+
+ // does not consider CLI override for pass mode
+ pub fn local_pass_mode(&self) -> Option<PassMode> {
+ self.pass_mode
+ }
+}
+
+pub fn line_directive<'line>(
+ comment: &str,
+ ln: &'line str,
+) -> Option<(Option<&'line str>, &'line str)> {
+ if ln.starts_with(comment) {
+ let ln = ln[comment.len()..].trim_start();
+ if ln.starts_with('[') {
+ // A comment like `//[foo]` is specific to revision `foo`
+ if let Some(close_brace) = ln.find(']') {
+ let lncfg = &ln[1..close_brace];
+
+ Some((Some(lncfg), ln[(close_brace + 1)..].trim_start()))
+ } else {
+ panic!("malformed condition directive: expected `{}[foo]`, found `{}`", comment, ln)
+ }
+ } else {
+ Some((None, ln))
+ }
+ } else {
+ None
+ }
+}
+
+fn iter_header<R: Read>(testfile: &Path, rdr: R, it: &mut dyn FnMut(Option<&str>, &str)) {
+ if testfile.is_dir() {
+ return;
+ }
+
+ let comment = if testfile.extension().map(|e| e == "rs") == Some(true) { "//" } else { "#" };
+
+ let mut rdr = BufReader::new(rdr);
+ let mut ln = String::new();
+
+ loop {
+ ln.clear();
+ if rdr.read_line(&mut ln).unwrap() == 0 {
+ break;
+ }
+
+ // Assume that any directives will be found before the first
+ // module or function. This doesn't seem to be an optimization
+ // with a warm page cache. Maybe with a cold one.
+ let ln = ln.trim();
+ if ln.starts_with("fn") || ln.starts_with("mod") {
+ return;
+ } else if let Some((lncfg, ln)) = line_directive(comment, ln) {
+ it(lncfg, ln);
+ }
+ }
+}
+
+impl Config {
+ fn parse_aux_crate(r: String) -> (String, String) {
+ let mut parts = r.trim().splitn(2, '=');
+ (
+ parts.next().expect("missing aux-crate name (e.g. log=log.rs)").to_string(),
+ parts.next().expect("missing aux-crate value (e.g. log=log.rs)").to_string(),
+ )
+ }
+
+ fn parse_and_update_revisions(&self, line: &str, existing: &mut Vec<String>) {
+ if let Some(raw) = self.parse_name_value_directive(line, "revisions") {
+ let mut duplicates: HashSet<_> = existing.iter().cloned().collect();
+ for revision in raw.split_whitespace().map(|r| r.to_string()) {
+ if !duplicates.insert(revision.clone()) {
+ panic!("Duplicate revision: `{}` in line `{}`", revision, raw);
+ }
+ existing.push(revision);
+ }
+ }
+ }
+
+ fn parse_env(nv: String) -> (String, String) {
+ // nv is either FOO or FOO=BAR
+ let mut strs: Vec<String> = nv.splitn(2, '=').map(str::to_owned).collect();
+
+ match strs.len() {
+ 1 => (strs.pop().unwrap(), String::new()),
+ 2 => {
+ let end = strs.pop().unwrap();
+ (strs.pop().unwrap(), end)
+ }
+ n => panic!("Expected 1 or 2 strings, not {}", n),
+ }
+ }
+
+ fn parse_pp_exact(&self, line: &str, testfile: &Path) -> Option<PathBuf> {
+ if let Some(s) = self.parse_name_value_directive(line, "pp-exact") {
+ Some(PathBuf::from(&s))
+ } else if self.parse_name_directive(line, "pp-exact") {
+ testfile.file_name().map(PathBuf::from)
+ } else {
+ None
+ }
+ }
+
+ fn parse_custom_normalization(&self, mut line: &str, prefix: &str) -> Option<(String, String)> {
+ if self.parse_cfg_name_directive(line, prefix) == ParsedNameDirective::Match {
+ let from = parse_normalization_string(&mut line)?;
+ let to = parse_normalization_string(&mut line)?;
+ Some((from, to))
+ } else {
+ None
+ }
+ }
+
+ fn parse_needs_matching_clang(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "needs-matching-clang")
+ }
+
+ fn parse_needs_profiler_support(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "needs-profiler-support")
+ }
+
+ /// Parses a name-value directive which contains config-specific information, e.g., `ignore-x86`
+ /// or `normalize-stderr-32bit`.
+ fn parse_cfg_name_directive(&self, line: &str, prefix: &str) -> ParsedNameDirective {
+ if !line.as_bytes().starts_with(prefix.as_bytes()) {
+ return ParsedNameDirective::NoMatch;
+ }
+ if line.as_bytes().get(prefix.len()) != Some(&b'-') {
+ return ParsedNameDirective::NoMatch;
+ }
+
+ let name = line[prefix.len() + 1..].split(&[':', ' '][..]).next().unwrap();
+
+ let is_match = name == "test" ||
+ self.target == name || // triple
+ util::matches_os(&self.target, name) || // target
+ util::matches_env(&self.target, name) || // env
+ self.target.ends_with(name) || // target and env
+ name == util::get_arch(&self.target) || // architecture
+ name == util::get_pointer_width(&self.target) || // pointer width
+ name == self.stage_id.split('-').next().unwrap() || // stage
+ name == self.channel || // channel
+ (self.target != self.host && name == "cross-compile") ||
+ (name == "endian-big" && util::is_big_endian(&self.target)) ||
+ (self.remote_test_client.is_some() && name == "remote") ||
+ match self.compare_mode {
+ Some(CompareMode::Polonius) => name == "compare-mode-polonius",
+ Some(CompareMode::Chalk) => name == "compare-mode-chalk",
+ Some(CompareMode::SplitDwarf) => name == "compare-mode-split-dwarf",
+ Some(CompareMode::SplitDwarfSingle) => name == "compare-mode-split-dwarf-single",
+ None => false,
+ } ||
+ (cfg!(debug_assertions) && name == "debug") ||
+ match self.debugger {
+ Some(Debugger::Cdb) => name == "cdb",
+ Some(Debugger::Gdb) => name == "gdb",
+ Some(Debugger::Lldb) => name == "lldb",
+ None => false,
+ };
+
+ if is_match { ParsedNameDirective::Match } else { ParsedNameDirective::NoMatch }
+ }
+
+ fn has_cfg_prefix(&self, line: &str, prefix: &str) -> bool {
+ // returns whether this line contains this prefix or not. For prefix
+ // "ignore", returns true if line says "ignore-x86_64", "ignore-arch",
+ // "ignore-android" etc.
+ line.starts_with(prefix) && line.as_bytes().get(prefix.len()) == Some(&b'-')
+ }
+
+ fn parse_name_directive(&self, line: &str, directive: &str) -> bool {
+ // Ensure the directive is a whole word. Do not match "ignore-x86" when
+ // the line says "ignore-x86_64".
+ line.starts_with(directive)
+ && matches!(line.as_bytes().get(directive.len()), None | Some(&b' ') | Some(&b':'))
+ }
+
+ pub fn parse_name_value_directive(&self, line: &str, directive: &str) -> Option<String> {
+ let colon = directive.len();
+ if line.starts_with(directive) && line.as_bytes().get(colon) == Some(&b':') {
+ let value = line[(colon + 1)..].to_owned();
+ debug!("{}: {}", directive, value);
+ Some(expand_variables(value, self))
+ } else {
+ None
+ }
+ }
+
+ pub fn find_rust_src_root(&self) -> Option<PathBuf> {
+ let mut path = self.src_base.clone();
+ let path_postfix = Path::new("src/etc/lldb_batchmode.py");
+
+ while path.pop() {
+ if path.join(&path_postfix).is_file() {
+ return Some(path);
+ }
+ }
+
+ None
+ }
+
+ fn parse_edition(&self, line: &str) -> Option<String> {
+ self.parse_name_value_directive(line, "edition")
+ }
+
+ fn set_name_directive(&self, line: &str, directive: &str, value: &mut bool) {
+ if !*value {
+ *value = self.parse_name_directive(line, directive)
+ }
+ }
+
+ fn set_name_value_directive<T>(
+ &self,
+ line: &str,
+ directive: &str,
+ value: &mut Option<T>,
+ parse: impl FnOnce(String) -> T,
+ ) {
+ if value.is_none() {
+ *value = self.parse_name_value_directive(line, directive).map(parse);
+ }
+ }
+
+ fn push_name_value_directive<T>(
+ &self,
+ line: &str,
+ directive: &str,
+ values: &mut Vec<T>,
+ parse: impl FnOnce(String) -> T,
+ ) {
+ if let Some(value) = self.parse_name_value_directive(line, directive).map(parse) {
+ values.push(value);
+ }
+ }
+}
+
+fn expand_variables(mut value: String, config: &Config) -> String {
+ const CWD: &str = "{{cwd}}";
+ const SRC_BASE: &str = "{{src-base}}";
+ const BUILD_BASE: &str = "{{build-base}}";
+
+ if value.contains(CWD) {
+ let cwd = env::current_dir().unwrap();
+ value = value.replace(CWD, &cwd.to_string_lossy());
+ }
+
+ if value.contains(SRC_BASE) {
+ value = value.replace(SRC_BASE, &config.src_base.to_string_lossy());
+ }
+
+ if value.contains(BUILD_BASE) {
+ value = value.replace(BUILD_BASE, &config.build_base.to_string_lossy());
+ }
+
+ value
+}
+
+/// Finds the next quoted string `"..."` in `line`, and extract the content from it. Move the `line`
+/// variable after the end of the quoted string.
+///
+/// # Examples
+///
+/// ```
+/// let mut s = "normalize-stderr-32bit: \"something (32 bits)\" -> \"something ($WORD bits)\".";
+/// let first = parse_normalization_string(&mut s);
+/// assert_eq!(first, Some("something (32 bits)".to_owned()));
+/// assert_eq!(s, " -> \"something ($WORD bits)\".");
+/// ```
+fn parse_normalization_string(line: &mut &str) -> Option<String> {
+ // FIXME support escapes in strings.
+ let begin = line.find('"')? + 1;
+ let end = line[begin..].find('"')? + begin;
+ let result = line[begin..end].to_owned();
+ *line = &line[end + 1..];
+ Some(result)
+}
+
+pub fn extract_llvm_version(version: &str) -> Option<u32> {
+ let pat = |c: char| !c.is_ascii_digit() && c != '.';
+ let version_without_suffix = match version.find(pat) {
+ Some(pos) => &version[..pos],
+ None => version,
+ };
+ let components: Vec<u32> = version_without_suffix
+ .split('.')
+ .map(|s| s.parse().expect("Malformed version component"))
+ .collect();
+ let version = match *components {
+ [a] => a * 10_000,
+ [a, b] => a * 10_000 + b * 100,
+ [a, b, c] => a * 10_000 + b * 100 + c,
+ _ => panic!("Malformed version"),
+ };
+ Some(version)
+}
+
+/// Takes a directive of the form "<version1> [- <version2>]",
+/// returns the numeric representation of <version1> and <version2> as
+/// tuple: (<version1> as u32, <version2> as u32)
+///
+/// If the <version2> part is omitted, the second component of the tuple
+/// is the same as <version1>.
+fn extract_version_range<F>(line: &str, parse: F) -> Option<(u32, u32)>
+where
+ F: Fn(&str) -> Option<u32>,
+{
+ let mut splits = line.splitn(2, "- ").map(str::trim);
+ let min = splits.next().unwrap();
+ if min.ends_with('-') {
+ return None;
+ }
+
+ let max = splits.next();
+
+ if min.is_empty() {
+ return None;
+ }
+
+ let min = parse(min)?;
+ let max = match max {
+ Some(max) if max.is_empty() => return None,
+ Some(max) => parse(max)?,
+ _ => min,
+ };
+
+ Some((min, max))
+}
+
+pub fn make_test_description<R: Read>(
+ config: &Config,
+ name: test::TestName,
+ path: &Path,
+ src: R,
+ cfg: Option<&str>,
+) -> test::TestDesc {
+ let mut ignore = false;
+ let ignore_message = None;
+ let mut should_fail = false;
+
+ let rustc_has_profiler_support = env::var_os("RUSTC_PROFILER_SUPPORT").is_some();
+ let rustc_has_sanitizer_support = env::var_os("RUSTC_SANITIZER_SUPPORT").is_some();
+ let has_asm_support = util::has_asm_support(&config.target);
+ let has_asan = util::ASAN_SUPPORTED_TARGETS.contains(&&*config.target);
+ let has_cfi = util::CFI_SUPPORTED_TARGETS.contains(&&*config.target);
+ let has_lsan = util::LSAN_SUPPORTED_TARGETS.contains(&&*config.target);
+ let has_msan = util::MSAN_SUPPORTED_TARGETS.contains(&&*config.target);
+ let has_tsan = util::TSAN_SUPPORTED_TARGETS.contains(&&*config.target);
+ let has_hwasan = util::HWASAN_SUPPORTED_TARGETS.contains(&&*config.target);
+ let has_memtag = util::MEMTAG_SUPPORTED_TARGETS.contains(&&*config.target);
+ let has_shadow_call_stack = util::SHADOWCALLSTACK_SUPPORTED_TARGETS.contains(&&*config.target);
+ // for `-Z gcc-ld=lld`
+ let has_rust_lld = config
+ .compile_lib_path
+ .join("rustlib")
+ .join(&config.target)
+ .join("bin")
+ .join("gcc-ld")
+ .join(if config.host.contains("windows") { "ld.exe" } else { "ld" })
+ .exists();
+ iter_header(path, src, &mut |revision, ln| {
+ if revision.is_some() && revision != cfg {
+ return;
+ }
+ ignore = match config.parse_cfg_name_directive(ln, "ignore") {
+ ParsedNameDirective::Match => true,
+ ParsedNameDirective::NoMatch => ignore,
+ };
+ if config.has_cfg_prefix(ln, "only") {
+ ignore = match config.parse_cfg_name_directive(ln, "only") {
+ ParsedNameDirective::Match => ignore,
+ ParsedNameDirective::NoMatch => true,
+ };
+ }
+ ignore |= ignore_llvm(config, ln);
+ ignore |=
+ config.run_clang_based_tests_with.is_none() && config.parse_needs_matching_clang(ln);
+ ignore |= !has_asm_support && config.parse_name_directive(ln, "needs-asm-support");
+ ignore |= !rustc_has_profiler_support && config.parse_needs_profiler_support(ln);
+ ignore |= !config.run_enabled() && config.parse_name_directive(ln, "needs-run-enabled");
+ ignore |= !rustc_has_sanitizer_support
+ && config.parse_name_directive(ln, "needs-sanitizer-support");
+ ignore |= !has_asan && config.parse_name_directive(ln, "needs-sanitizer-address");
+ ignore |= !has_cfi && config.parse_name_directive(ln, "needs-sanitizer-cfi");
+ ignore |= !has_lsan && config.parse_name_directive(ln, "needs-sanitizer-leak");
+ ignore |= !has_msan && config.parse_name_directive(ln, "needs-sanitizer-memory");
+ ignore |= !has_tsan && config.parse_name_directive(ln, "needs-sanitizer-thread");
+ ignore |= !has_hwasan && config.parse_name_directive(ln, "needs-sanitizer-hwaddress");
+ ignore |= !has_memtag && config.parse_name_directive(ln, "needs-sanitizer-memtag");
+ ignore |= !has_shadow_call_stack
+ && config.parse_name_directive(ln, "needs-sanitizer-shadow-call-stack");
+ ignore |= config.target_panic == PanicStrategy::Abort
+ && config.parse_name_directive(ln, "needs-unwind");
+ ignore |= config.target == "wasm32-unknown-unknown"
+ && config.parse_name_directive(ln, directives::CHECK_RUN_RESULTS);
+ ignore |= config.debugger == Some(Debugger::Cdb) && ignore_cdb(config, ln);
+ ignore |= config.debugger == Some(Debugger::Gdb) && ignore_gdb(config, ln);
+ ignore |= config.debugger == Some(Debugger::Lldb) && ignore_lldb(config, ln);
+ ignore |= !has_rust_lld && config.parse_name_directive(ln, "needs-rust-lld");
+ should_fail |= config.parse_name_directive(ln, "should-fail");
+ });
+
+ // The `should-fail` annotation doesn't apply to pretty tests,
+ // since we run the pretty printer across all tests by default.
+ // If desired, we could add a `should-fail-pretty` annotation.
+ let should_panic = match config.mode {
+ crate::common::Pretty => test::ShouldPanic::No,
+ _ if should_fail => test::ShouldPanic::Yes,
+ _ => test::ShouldPanic::No,
+ };
+
+ test::TestDesc {
+ name,
+ ignore,
+ ignore_message,
+ should_panic,
+ compile_fail: false,
+ no_run: false,
+ test_type: test::TestType::Unknown,
+ }
+}
+
+fn ignore_cdb(config: &Config, line: &str) -> bool {
+ if let Some(actual_version) = config.cdb_version {
+ if let Some(min_version) = line.strip_prefix("min-cdb-version:").map(str::trim) {
+ let min_version = extract_cdb_version(min_version).unwrap_or_else(|| {
+ panic!("couldn't parse version range: {:?}", min_version);
+ });
+
+ // Ignore if actual version is smaller than the minimum
+ // required version
+ return actual_version < min_version;
+ }
+ }
+ false
+}
+
+fn ignore_gdb(config: &Config, line: &str) -> bool {
+ if let Some(actual_version) = config.gdb_version {
+ if let Some(rest) = line.strip_prefix("min-gdb-version:").map(str::trim) {
+ let (start_ver, end_ver) = extract_version_range(rest, extract_gdb_version)
+ .unwrap_or_else(|| {
+ panic!("couldn't parse version range: {:?}", rest);
+ });
+
+ if start_ver != end_ver {
+ panic!("Expected single GDB version")
+ }
+ // Ignore if actual version is smaller than the minimum
+ // required version
+ return actual_version < start_ver;
+ } else if let Some(rest) = line.strip_prefix("ignore-gdb-version:").map(str::trim) {
+ let (min_version, max_version) = extract_version_range(rest, extract_gdb_version)
+ .unwrap_or_else(|| {
+ panic!("couldn't parse version range: {:?}", rest);
+ });
+
+ if max_version < min_version {
+ panic!("Malformed GDB version range: max < min")
+ }
+
+ return actual_version >= min_version && actual_version <= max_version;
+ }
+ }
+ false
+}
+
+fn ignore_lldb(config: &Config, line: &str) -> bool {
+ if let Some(actual_version) = config.lldb_version {
+ if let Some(min_version) = line.strip_prefix("min-lldb-version:").map(str::trim) {
+ let min_version = min_version.parse().unwrap_or_else(|e| {
+ panic!("Unexpected format of LLDB version string: {}\n{:?}", min_version, e);
+ });
+ // Ignore if actual version is smaller the minimum required
+ // version
+ actual_version < min_version
+ } else {
+ line.starts_with("rust-lldb") && !config.lldb_native_rust
+ }
+ } else {
+ false
+ }
+}
+
+fn ignore_llvm(config: &Config, line: &str) -> bool {
+ if config.system_llvm && line.starts_with("no-system-llvm") {
+ return true;
+ }
+ if let Some(needed_components) =
+ config.parse_name_value_directive(line, "needs-llvm-components")
+ {
+ let components: HashSet<_> = config.llvm_components.split_whitespace().collect();
+ if let Some(missing_component) = needed_components
+ .split_whitespace()
+ .find(|needed_component| !components.contains(needed_component))
+ {
+ if env::var_os("COMPILETEST_NEEDS_ALL_LLVM_COMPONENTS").is_some() {
+ panic!("missing LLVM component: {}", missing_component);
+ }
+ return true;
+ }
+ }
+ if let Some(actual_version) = config.llvm_version {
+ if let Some(rest) = line.strip_prefix("min-llvm-version:").map(str::trim) {
+ let min_version = extract_llvm_version(rest).unwrap();
+ // Ignore if actual version is smaller the minimum required
+ // version
+ actual_version < min_version
+ } else if let Some(rest) = line.strip_prefix("min-system-llvm-version:").map(str::trim) {
+ let min_version = extract_llvm_version(rest).unwrap();
+ // Ignore if using system LLVM and actual version
+ // is smaller the minimum required version
+ config.system_llvm && actual_version < min_version
+ } else if let Some(rest) = line.strip_prefix("ignore-llvm-version:").map(str::trim) {
+ // Syntax is: "ignore-llvm-version: <version1> [- <version2>]"
+ let (v_min, v_max) =
+ extract_version_range(rest, extract_llvm_version).unwrap_or_else(|| {
+ panic!("couldn't parse version range: {:?}", rest);
+ });
+ if v_max < v_min {
+ panic!("Malformed LLVM version range: max < min")
+ }
+ // Ignore if version lies inside of range.
+ actual_version >= v_min && actual_version <= v_max
+ } else {
+ false
+ }
+ } else {
+ false
+ }
+}
diff --git a/src/tools/compiletest/src/header/tests.rs b/src/tools/compiletest/src/header/tests.rs
new file mode 100644
index 000000000..a8fd4880f
--- /dev/null
+++ b/src/tools/compiletest/src/header/tests.rs
@@ -0,0 +1,283 @@
+use std::path::Path;
+
+use crate::common::{Config, Debugger};
+use crate::header::{make_test_description, parse_normalization_string, EarlyProps};
+
+#[test]
+fn test_parse_normalization_string() {
+ let mut s = "normalize-stderr-32bit: \"something (32 bits)\" -> \"something ($WORD bits)\".";
+ let first = parse_normalization_string(&mut s);
+ assert_eq!(first, Some("something (32 bits)".to_owned()));
+ assert_eq!(s, " -> \"something ($WORD bits)\".");
+
+ // Nothing to normalize (No quotes)
+ let mut s = "normalize-stderr-32bit: something (32 bits) -> something ($WORD bits).";
+ let first = parse_normalization_string(&mut s);
+ assert_eq!(first, None);
+ assert_eq!(s, r#"normalize-stderr-32bit: something (32 bits) -> something ($WORD bits)."#);
+
+ // Nothing to normalize (Only a single quote)
+ let mut s = "normalize-stderr-32bit: \"something (32 bits) -> something ($WORD bits).";
+ let first = parse_normalization_string(&mut s);
+ assert_eq!(first, None);
+ assert_eq!(s, "normalize-stderr-32bit: \"something (32 bits) -> something ($WORD bits).");
+
+ // Nothing to normalize (Three quotes)
+ let mut s = "normalize-stderr-32bit: \"something (32 bits)\" -> \"something ($WORD bits).";
+ let first = parse_normalization_string(&mut s);
+ assert_eq!(first, Some("something (32 bits)".to_owned()));
+ assert_eq!(s, " -> \"something ($WORD bits).");
+
+ // Nothing to normalize (No quotes, 16-bit)
+ let mut s = "normalize-stderr-16bit: something (16 bits) -> something ($WORD bits).";
+ let first = parse_normalization_string(&mut s);
+ assert_eq!(first, None);
+ assert_eq!(s, r#"normalize-stderr-16bit: something (16 bits) -> something ($WORD bits)."#);
+}
+
+fn config() -> Config {
+ let args = &[
+ "compiletest",
+ "--mode=ui",
+ "--suite=ui",
+ "--compile-lib-path=",
+ "--run-lib-path=",
+ "--rustc-path=",
+ "--python=",
+ "--jsondocck-path=",
+ "--src-base=",
+ "--build-base=",
+ "--stage-id=stage2",
+ "--cc=c",
+ "--cxx=c++",
+ "--cflags=",
+ "--cxxflags=",
+ "--llvm-components=",
+ "--android-cross-path=",
+ "--target=x86_64-unknown-linux-gnu",
+ "--channel=nightly",
+ ];
+ let args = args.iter().map(ToString::to_string).collect();
+ crate::parse_config(args)
+}
+
+fn parse_rs(config: &Config, contents: &str) -> EarlyProps {
+ let bytes = contents.as_bytes();
+ EarlyProps::from_reader(config, Path::new("a.rs"), bytes)
+}
+
+fn check_ignore(config: &Config, contents: &str) -> bool {
+ let tn = test::DynTestName(String::new());
+ let p = Path::new("a.rs");
+ let d = make_test_description(&config, tn, p, std::io::Cursor::new(contents), None);
+ d.ignore
+}
+
+fn parse_makefile(config: &Config, contents: &str) -> EarlyProps {
+ let bytes = contents.as_bytes();
+ EarlyProps::from_reader(config, Path::new("Makefile"), bytes)
+}
+
+#[test]
+fn should_fail() {
+ let config = config();
+ let tn = test::DynTestName(String::new());
+ let p = Path::new("a.rs");
+
+ let d = make_test_description(&config, tn.clone(), p, std::io::Cursor::new(""), None);
+ assert_eq!(d.should_panic, test::ShouldPanic::No);
+ let d = make_test_description(&config, tn, p, std::io::Cursor::new("// should-fail"), None);
+ assert_eq!(d.should_panic, test::ShouldPanic::Yes);
+}
+
+#[test]
+fn revisions() {
+ let config = config();
+
+ assert_eq!(parse_rs(&config, "// revisions: a b c").revisions, vec!["a", "b", "c"],);
+ assert_eq!(
+ parse_makefile(&config, "# revisions: hello there").revisions,
+ vec!["hello", "there"],
+ );
+}
+
+#[test]
+fn aux_build() {
+ let config = config();
+
+ assert_eq!(
+ parse_rs(
+ &config,
+ r"
+ // aux-build: a.rs
+ // aux-build: b.rs
+ "
+ )
+ .aux,
+ vec!["a.rs", "b.rs"],
+ );
+}
+
+#[test]
+fn no_system_llvm() {
+ let mut config = config();
+
+ config.system_llvm = false;
+ assert!(!check_ignore(&config, "// no-system-llvm"));
+
+ config.system_llvm = true;
+ assert!(check_ignore(&config, "// no-system-llvm"));
+}
+
+#[test]
+fn llvm_version() {
+ let mut config = config();
+
+ config.llvm_version = Some(80102);
+ assert!(check_ignore(&config, "// min-llvm-version: 9.0"));
+
+ config.llvm_version = Some(90001);
+ assert!(check_ignore(&config, "// min-llvm-version: 9.2"));
+
+ config.llvm_version = Some(90301);
+ assert!(!check_ignore(&config, "// min-llvm-version: 9.2"));
+
+ config.llvm_version = Some(100000);
+ assert!(!check_ignore(&config, "// min-llvm-version: 9.0"));
+}
+
+#[test]
+fn ignore_target() {
+ let mut config = config();
+ config.target = "x86_64-unknown-linux-gnu".to_owned();
+
+ assert!(check_ignore(&config, "// ignore-x86_64-unknown-linux-gnu"));
+ assert!(check_ignore(&config, "// ignore-x86_64"));
+ assert!(check_ignore(&config, "// ignore-linux"));
+ assert!(check_ignore(&config, "// ignore-gnu"));
+ assert!(check_ignore(&config, "// ignore-64bit"));
+
+ assert!(!check_ignore(&config, "// ignore-i686"));
+ assert!(!check_ignore(&config, "// ignore-windows"));
+ assert!(!check_ignore(&config, "// ignore-msvc"));
+ assert!(!check_ignore(&config, "// ignore-32bit"));
+}
+
+#[test]
+fn only_target() {
+ let mut config = config();
+ config.target = "x86_64-pc-windows-gnu".to_owned();
+
+ assert!(check_ignore(&config, "// only-x86"));
+ assert!(check_ignore(&config, "// only-linux"));
+ assert!(check_ignore(&config, "// only-msvc"));
+ assert!(check_ignore(&config, "// only-32bit"));
+
+ assert!(!check_ignore(&config, "// only-x86_64-pc-windows-gnu"));
+ assert!(!check_ignore(&config, "// only-x86_64"));
+ assert!(!check_ignore(&config, "// only-windows"));
+ assert!(!check_ignore(&config, "// only-gnu"));
+ assert!(!check_ignore(&config, "// only-64bit"));
+}
+
+#[test]
+fn stage() {
+ let mut config = config();
+ config.stage_id = "stage1".to_owned();
+
+ assert!(check_ignore(&config, "// ignore-stage1"));
+ assert!(!check_ignore(&config, "// ignore-stage2"));
+}
+
+#[test]
+fn cross_compile() {
+ let mut config = config();
+ config.host = "x86_64-apple-darwin".to_owned();
+ config.target = "wasm32-unknown-unknown".to_owned();
+ assert!(check_ignore(&config, "// ignore-cross-compile"));
+
+ config.target = config.host.clone();
+ assert!(!check_ignore(&config, "// ignore-cross-compile"));
+}
+
+#[test]
+fn debugger() {
+ let mut config = config();
+ config.debugger = None;
+ assert!(!check_ignore(&config, "// ignore-cdb"));
+
+ config.debugger = Some(Debugger::Cdb);
+ assert!(check_ignore(&config, "// ignore-cdb"));
+
+ config.debugger = Some(Debugger::Gdb);
+ assert!(check_ignore(&config, "// ignore-gdb"));
+
+ config.debugger = Some(Debugger::Lldb);
+ assert!(check_ignore(&config, "// ignore-lldb"));
+}
+
+#[test]
+fn sanitizers() {
+ let mut config = config();
+
+ // Target that supports all sanitizers:
+ config.target = "x86_64-unknown-linux-gnu".to_owned();
+ assert!(!check_ignore(&config, "// needs-sanitizer-address"));
+ assert!(!check_ignore(&config, "// needs-sanitizer-leak"));
+ assert!(!check_ignore(&config, "// needs-sanitizer-memory"));
+ assert!(!check_ignore(&config, "// needs-sanitizer-thread"));
+
+ // Target that doesn't support sanitizers:
+ config.target = "wasm32-unknown-emscripten".to_owned();
+ assert!(check_ignore(&config, "// needs-sanitizer-address"));
+ assert!(check_ignore(&config, "// needs-sanitizer-leak"));
+ assert!(check_ignore(&config, "// needs-sanitizer-memory"));
+ assert!(check_ignore(&config, "// needs-sanitizer-thread"));
+}
+
+#[test]
+fn asm_support() {
+ let mut config = config();
+
+ config.target = "avr-unknown-gnu-atmega328".to_owned();
+ assert!(check_ignore(&config, "// needs-asm-support"));
+
+ config.target = "i686-unknown-netbsd".to_owned();
+ assert!(!check_ignore(&config, "// needs-asm-support"));
+}
+
+#[test]
+fn channel() {
+ let mut config = config();
+ config.channel = "beta".into();
+
+ assert!(check_ignore(&config, "// ignore-beta"));
+ assert!(check_ignore(&config, "// only-nightly"));
+ assert!(check_ignore(&config, "// only-stable"));
+
+ assert!(!check_ignore(&config, "// only-beta"));
+ assert!(!check_ignore(&config, "// ignore-nightly"));
+ assert!(!check_ignore(&config, "// ignore-stable"));
+}
+
+#[test]
+fn test_extract_version_range() {
+ use super::{extract_llvm_version, extract_version_range};
+
+ assert_eq!(extract_version_range("1.2.3 - 4.5.6", extract_llvm_version), Some((10203, 40506)));
+ assert_eq!(extract_version_range("0 - 4.5.6", extract_llvm_version), Some((0, 40506)));
+ assert_eq!(extract_version_range("1.2.3 -", extract_llvm_version), None);
+ assert_eq!(extract_version_range("1.2.3 - ", extract_llvm_version), None);
+ assert_eq!(extract_version_range("- 4.5.6", extract_llvm_version), None);
+ assert_eq!(extract_version_range("-", extract_llvm_version), None);
+ assert_eq!(extract_version_range(" - 4.5.6", extract_llvm_version), None);
+ assert_eq!(extract_version_range(" - 4.5.6", extract_llvm_version), None);
+ assert_eq!(extract_version_range("0 -", extract_llvm_version), None);
+}
+
+#[test]
+#[should_panic(expected = "Duplicate revision: `rpass1` in line ` rpass1 rpass1`")]
+fn test_duplicate_revisions() {
+ let config = config();
+ parse_rs(&config, "// revisions: rpass1 rpass1");
+}
diff --git a/src/tools/compiletest/src/json.rs b/src/tools/compiletest/src/json.rs
new file mode 100644
index 000000000..10726b984
--- /dev/null
+++ b/src/tools/compiletest/src/json.rs
@@ -0,0 +1,321 @@
+//! These structs are a subset of the ones found in `rustc_errors::json`.
+//! They are only used for deserialization of JSON output provided by libtest.
+
+use crate::errors::{Error, ErrorKind};
+use crate::runtest::ProcRes;
+use serde::Deserialize;
+use std::path::{Path, PathBuf};
+use std::str::FromStr;
+
+#[derive(Deserialize)]
+struct Diagnostic {
+ message: String,
+ code: Option<DiagnosticCode>,
+ level: String,
+ spans: Vec<DiagnosticSpan>,
+ children: Vec<Diagnostic>,
+ rendered: Option<String>,
+}
+
+#[derive(Deserialize)]
+struct ArtifactNotification {
+ #[allow(dead_code)]
+ artifact: PathBuf,
+}
+
+#[derive(Deserialize)]
+struct UnusedExternNotification {
+ #[allow(dead_code)]
+ lint_level: String,
+ #[allow(dead_code)]
+ unused_extern_names: Vec<String>,
+}
+
+#[derive(Deserialize, Clone)]
+struct DiagnosticSpan {
+ file_name: String,
+ line_start: usize,
+ line_end: usize,
+ column_start: usize,
+ column_end: usize,
+ is_primary: bool,
+ label: Option<String>,
+ suggested_replacement: Option<String>,
+ expansion: Option<Box<DiagnosticSpanMacroExpansion>>,
+}
+
+#[derive(Deserialize)]
+struct FutureIncompatReport {
+ future_incompat_report: Vec<FutureBreakageItem>,
+}
+
+#[derive(Deserialize)]
+struct FutureBreakageItem {
+ diagnostic: Diagnostic,
+}
+
+impl DiagnosticSpan {
+ /// Returns the deepest source span in the macro call stack with a given file name.
+ /// This is either the supplied span, or the span for some macro callsite that expanded to it.
+ fn first_callsite_in_file(&self, file_name: &str) -> &DiagnosticSpan {
+ if self.file_name == file_name {
+ self
+ } else {
+ self.expansion
+ .as_ref()
+ .map(|origin| origin.span.first_callsite_in_file(file_name))
+ .unwrap_or(self)
+ }
+ }
+}
+
+#[derive(Deserialize, Clone)]
+struct DiagnosticSpanMacroExpansion {
+ /// span where macro was applied to generate this code
+ span: DiagnosticSpan,
+
+ /// name of macro that was applied (e.g., "foo!" or "#[derive(Eq)]")
+ macro_decl_name: String,
+}
+
+#[derive(Deserialize, Clone)]
+struct DiagnosticCode {
+ /// The code itself.
+ code: String,
+}
+
+pub fn rustfix_diagnostics_only(output: &str) -> String {
+ output
+ .lines()
+ .filter(|line| line.starts_with('{') && serde_json::from_str::<Diagnostic>(line).is_ok())
+ .collect()
+}
+
+pub fn extract_rendered(output: &str) -> String {
+ output
+ .lines()
+ .filter_map(|line| {
+ if line.starts_with('{') {
+ if let Ok(diagnostic) = serde_json::from_str::<Diagnostic>(line) {
+ diagnostic.rendered
+ } else if let Ok(report) = serde_json::from_str::<FutureIncompatReport>(line) {
+ if report.future_incompat_report.is_empty() {
+ None
+ } else {
+ Some(format!(
+ "Future incompatibility report: {}",
+ report
+ .future_incompat_report
+ .into_iter()
+ .map(|item| {
+ format!(
+ "Future breakage diagnostic:\n{}",
+ item.diagnostic
+ .rendered
+ .unwrap_or_else(|| "Not rendered".to_string())
+ )
+ })
+ .collect::<String>()
+ ))
+ }
+ } else if serde_json::from_str::<ArtifactNotification>(line).is_ok() {
+ // Ignore the notification.
+ None
+ } else if serde_json::from_str::<UnusedExternNotification>(line).is_ok() {
+ // Ignore the notification.
+ None
+ } else {
+ print!(
+ "failed to decode compiler output as json: line: {}\noutput: {}",
+ line, output
+ );
+ panic!()
+ }
+ } else {
+ // preserve non-JSON lines, such as ICEs
+ Some(format!("{}\n", line))
+ }
+ })
+ .collect()
+}
+
+pub fn parse_output(file_name: &str, output: &str, proc_res: &ProcRes) -> Vec<Error> {
+ output.lines().flat_map(|line| parse_line(file_name, line, output, proc_res)).collect()
+}
+
+fn parse_line(file_name: &str, line: &str, output: &str, proc_res: &ProcRes) -> Vec<Error> {
+ // The compiler sometimes intermingles non-JSON stuff into the
+ // output. This hack just skips over such lines. Yuck.
+ if line.starts_with('{') {
+ match serde_json::from_str::<Diagnostic>(line) {
+ Ok(diagnostic) => {
+ let mut expected_errors = vec![];
+ push_expected_errors(&mut expected_errors, &diagnostic, &[], file_name);
+ expected_errors
+ }
+ Err(error) => {
+ // Ignore the future compat report message - this is handled
+ // by `extract_rendered`
+ if serde_json::from_str::<FutureIncompatReport>(line).is_ok() {
+ vec![]
+ } else {
+ proc_res.fatal(
+ Some(&format!(
+ "failed to decode compiler output as json: \
+ `{}`\nline: {}\noutput: {}",
+ error, line, output
+ )),
+ || (),
+ );
+ }
+ }
+ }
+ } else {
+ vec![]
+ }
+}
+
+fn push_expected_errors(
+ expected_errors: &mut Vec<Error>,
+ diagnostic: &Diagnostic,
+ default_spans: &[&DiagnosticSpan],
+ file_name: &str,
+) {
+ // In case of macro expansions, we need to get the span of the callsite
+ let spans_info_in_this_file: Vec<_> = diagnostic
+ .spans
+ .iter()
+ .map(|span| (span.is_primary, span.first_callsite_in_file(file_name)))
+ .filter(|(_, span)| Path::new(&span.file_name) == Path::new(&file_name))
+ .collect();
+
+ let spans_in_this_file: Vec<_> = spans_info_in_this_file.iter().map(|(_, span)| span).collect();
+
+ let primary_spans: Vec<_> = spans_info_in_this_file
+ .iter()
+ .filter(|(is_primary, _)| *is_primary)
+ .map(|(_, span)| span)
+ .take(1) // sometimes we have more than one showing up in the json; pick first
+ .cloned()
+ .collect();
+ let primary_spans = if primary_spans.is_empty() {
+ // subdiagnostics often don't have a span of their own;
+ // inherit the span from the parent in that case
+ default_spans
+ } else {
+ &primary_spans
+ };
+
+ // We break the output into multiple lines, and then append the
+ // [E123] to every line in the output. This may be overkill. The
+ // intention was to match existing tests that do things like "//|
+ // found `i32` [E123]" and expect to match that somewhere, and yet
+ // also ensure that `//~ ERROR E123` *always* works. The
+ // assumption is that these multi-line error messages are on their
+ // way out anyhow.
+ let with_code = |span: &DiagnosticSpan, text: &str| {
+ match diagnostic.code {
+ Some(ref code) =>
+ // FIXME(#33000) -- it'd be better to use a dedicated
+ // UI harness than to include the line/col number like
+ // this, but some current tests rely on it.
+ //
+ // Note: Do NOT include the filename. These can easily
+ // cause false matches where the expected message
+ // appears in the filename, and hence the message
+ // changes but the test still passes.
+ {
+ format!(
+ "{}:{}: {}:{}: {} [{}]",
+ span.line_start,
+ span.column_start,
+ span.line_end,
+ span.column_end,
+ text,
+ code.code.clone()
+ )
+ }
+ None =>
+ // FIXME(#33000) -- it'd be better to use a dedicated UI harness
+ {
+ format!(
+ "{}:{}: {}:{}: {}",
+ span.line_start, span.column_start, span.line_end, span.column_end, text
+ )
+ }
+ }
+ };
+
+ // Convert multi-line messages into multiple expected
+ // errors. We expect to replace these with something
+ // more structured shortly anyhow.
+ let mut message_lines = diagnostic.message.lines();
+ if let Some(first_line) = message_lines.next() {
+ for span in primary_spans {
+ let msg = with_code(span, first_line);
+ let kind = ErrorKind::from_str(&diagnostic.level).ok();
+ expected_errors.push(Error { line_num: span.line_start, kind, msg });
+ }
+ }
+ for next_line in message_lines {
+ for span in primary_spans {
+ expected_errors.push(Error {
+ line_num: span.line_start,
+ kind: None,
+ msg: with_code(span, next_line),
+ });
+ }
+ }
+
+ // If the message has a suggestion, register that.
+ for span in primary_spans {
+ if let Some(ref suggested_replacement) = span.suggested_replacement {
+ for (index, line) in suggested_replacement.lines().enumerate() {
+ expected_errors.push(Error {
+ line_num: span.line_start + index,
+ kind: Some(ErrorKind::Suggestion),
+ msg: line.to_string(),
+ });
+ }
+ }
+ }
+
+ // Add notes for the backtrace
+ for span in primary_spans {
+ for frame in &span.expansion {
+ push_backtrace(expected_errors, frame, file_name);
+ }
+ }
+
+ // Add notes for any labels that appear in the message.
+ for span in spans_in_this_file.iter().filter(|span| span.label.is_some()) {
+ expected_errors.push(Error {
+ line_num: span.line_start,
+ kind: Some(ErrorKind::Note),
+ msg: span.label.clone().unwrap(),
+ });
+ }
+
+ // Flatten out the children.
+ for child in &diagnostic.children {
+ push_expected_errors(expected_errors, child, primary_spans, file_name);
+ }
+}
+
+fn push_backtrace(
+ expected_errors: &mut Vec<Error>,
+ expansion: &DiagnosticSpanMacroExpansion,
+ file_name: &str,
+) {
+ if Path::new(&expansion.span.file_name) == Path::new(&file_name) {
+ expected_errors.push(Error {
+ line_num: expansion.span.line_start,
+ kind: Some(ErrorKind::Note),
+ msg: format!("in this expansion of {}", expansion.macro_decl_name),
+ });
+ }
+
+ for previous_expansion in &expansion.span.expansion {
+ push_backtrace(expected_errors, previous_expansion, file_name);
+ }
+}
diff --git a/src/tools/compiletest/src/main.rs b/src/tools/compiletest/src/main.rs
new file mode 100644
index 000000000..a8a151ca1
--- /dev/null
+++ b/src/tools/compiletest/src/main.rs
@@ -0,0 +1,1014 @@
+#![crate_name = "compiletest"]
+// The `test` crate is the only unstable feature
+// allowed here, just to share similar code.
+#![feature(test)]
+
+extern crate test;
+
+use crate::common::{
+ expected_output_path, output_base_dir, output_relative_path, PanicStrategy, UI_EXTENSIONS,
+};
+use crate::common::{CompareMode, Config, Debugger, Mode, PassMode, TestPaths};
+use crate::util::logv;
+use getopts::Options;
+use std::env;
+use std::ffi::OsString;
+use std::fs;
+use std::io::{self, ErrorKind};
+use std::path::{Path, PathBuf};
+use std::process::{Command, Stdio};
+use std::time::SystemTime;
+use test::ColorConfig;
+use tracing::*;
+use walkdir::WalkDir;
+
+use self::header::{make_test_description, EarlyProps};
+
+#[cfg(test)]
+mod tests;
+
+pub mod common;
+pub mod compute_diff;
+pub mod errors;
+pub mod header;
+mod json;
+mod raise_fd_limit;
+mod read2;
+pub mod runtest;
+pub mod util;
+
+fn main() {
+ tracing_subscriber::fmt::init();
+
+ let config = parse_config(env::args().collect());
+
+ if config.valgrind_path.is_none() && config.force_valgrind {
+ panic!("Can't find Valgrind to run Valgrind tests");
+ }
+
+ if !config.has_tidy && config.mode == Mode::Rustdoc {
+ eprintln!("warning: `tidy` is not installed; diffs will not be generated");
+ }
+
+ log_config(&config);
+ run_tests(config);
+}
+
+pub fn parse_config(args: Vec<String>) -> Config {
+ let mut opts = Options::new();
+ opts.reqopt("", "compile-lib-path", "path to host shared libraries", "PATH")
+ .reqopt("", "run-lib-path", "path to target shared libraries", "PATH")
+ .reqopt("", "rustc-path", "path to rustc to use for compiling", "PATH")
+ .optopt("", "rustdoc-path", "path to rustdoc to use for compiling", "PATH")
+ .optopt("", "rust-demangler-path", "path to rust-demangler to use in tests", "PATH")
+ .reqopt("", "python", "path to python to use for doc tests", "PATH")
+ .optopt("", "jsondocck-path", "path to jsondocck to use for doc tests", "PATH")
+ .optopt("", "valgrind-path", "path to Valgrind executable for Valgrind tests", "PROGRAM")
+ .optflag("", "force-valgrind", "fail if Valgrind tests cannot be run under Valgrind")
+ .optopt("", "run-clang-based-tests-with", "path to Clang executable", "PATH")
+ .optopt("", "llvm-filecheck", "path to LLVM's FileCheck binary", "DIR")
+ .reqopt("", "src-base", "directory to scan for test files", "PATH")
+ .reqopt("", "build-base", "directory to deposit test outputs", "PATH")
+ .reqopt("", "stage-id", "the target-stage identifier", "stageN-TARGET")
+ .reqopt(
+ "",
+ "mode",
+ "which sort of compile tests to run",
+ "run-pass-valgrind | pretty | debug-info | codegen | rustdoc \
+ | rustdoc-json | codegen-units | incremental | run-make | ui | js-doc-test | mir-opt | assembly",
+ )
+ .reqopt(
+ "",
+ "suite",
+ "which suite of compile tests to run. used for nicer error reporting.",
+ "SUITE",
+ )
+ .optopt(
+ "",
+ "pass",
+ "force {check,build,run}-pass tests to this mode.",
+ "check | build | run",
+ )
+ .optopt("", "run", "whether to execute run-* tests", "auto | always | never")
+ .optflag("", "ignored", "run tests marked as ignored")
+ .optmulti("", "skip", "skip tests matching SUBSTRING. Can be passed multiple times", "SUBSTRING")
+ .optflag("", "exact", "filters match exactly")
+ .optopt(
+ "",
+ "runtool",
+ "supervisor program to run tests under \
+ (eg. emulator, valgrind)",
+ "PROGRAM",
+ )
+ .optmulti("", "host-rustcflags", "flags to pass to rustc for host", "FLAGS")
+ .optmulti("", "target-rustcflags", "flags to pass to rustc for target", "FLAGS")
+ .optflag("", "optimize-tests", "run tests with optimizations enabled")
+ .optopt("", "target-panic", "what panic strategy the target supports", "unwind | abort")
+ .optflag("", "verbose", "run tests verbosely, showing all output")
+ .optflag(
+ "",
+ "bless",
+ "overwrite stderr/stdout files instead of complaining about a mismatch",
+ )
+ .optflag("", "quiet", "print one character per test instead of one line")
+ .optopt("", "color", "coloring: auto, always, never", "WHEN")
+ .optopt("", "logfile", "file to log test execution to", "FILE")
+ .optopt("", "target", "the target to build for", "TARGET")
+ .optopt("", "host", "the host to build for", "HOST")
+ .optopt("", "cdb", "path to CDB to use for CDB debuginfo tests", "PATH")
+ .optopt("", "gdb", "path to GDB to use for GDB debuginfo tests", "PATH")
+ .optopt("", "lldb-version", "the version of LLDB used", "VERSION STRING")
+ .optopt("", "llvm-version", "the version of LLVM used", "VERSION STRING")
+ .optflag("", "system-llvm", "is LLVM the system LLVM")
+ .optopt("", "android-cross-path", "Android NDK standalone path", "PATH")
+ .optopt("", "adb-path", "path to the android debugger", "PATH")
+ .optopt("", "adb-test-dir", "path to tests for the android debugger", "PATH")
+ .optopt("", "lldb-python-dir", "directory containing LLDB's python module", "PATH")
+ .reqopt("", "cc", "path to a C compiler", "PATH")
+ .reqopt("", "cxx", "path to a C++ compiler", "PATH")
+ .reqopt("", "cflags", "flags for the C compiler", "FLAGS")
+ .reqopt("", "cxxflags", "flags for the CXX compiler", "FLAGS")
+ .optopt("", "ar", "path to an archiver", "PATH")
+ .optopt("", "linker", "path to a linker", "PATH")
+ .reqopt("", "llvm-components", "list of LLVM components built in", "LIST")
+ .optopt("", "llvm-bin-dir", "Path to LLVM's `bin` directory", "PATH")
+ .optopt("", "nodejs", "the name of nodejs", "PATH")
+ .optopt("", "npm", "the name of npm", "PATH")
+ .optopt("", "remote-test-client", "path to the remote test client", "PATH")
+ .optopt(
+ "",
+ "compare-mode",
+ "mode describing what file the actual ui output will be compared to",
+ "COMPARE MODE",
+ )
+ .optflag(
+ "",
+ "rustfix-coverage",
+ "enable this to generate a Rustfix coverage file, which is saved in \
+ `./<build_base>/rustfix_missing_coverage.txt`",
+ )
+ .optflag("", "force-rerun", "rerun tests even if the inputs are unchanged")
+ .optflag("h", "help", "show this message")
+ .reqopt("", "channel", "current Rust channel", "CHANNEL")
+ .optopt("", "edition", "default Rust edition", "EDITION");
+
+ let (argv0, args_) = args.split_first().unwrap();
+ if args.len() == 1 || args[1] == "-h" || args[1] == "--help" {
+ let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
+ println!("{}", opts.usage(&message));
+ println!();
+ panic!()
+ }
+
+ let matches = &match opts.parse(args_) {
+ Ok(m) => m,
+ Err(f) => panic!("{:?}", f),
+ };
+
+ if matches.opt_present("h") || matches.opt_present("help") {
+ let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
+ println!("{}", opts.usage(&message));
+ println!();
+ panic!()
+ }
+
+ fn opt_path(m: &getopts::Matches, nm: &str) -> PathBuf {
+ match m.opt_str(nm) {
+ Some(s) => PathBuf::from(&s),
+ None => panic!("no option (=path) found for {}", nm),
+ }
+ }
+
+ fn make_absolute(path: PathBuf) -> PathBuf {
+ if path.is_relative() { env::current_dir().unwrap().join(path) } else { path }
+ }
+
+ let target = opt_str2(matches.opt_str("target"));
+ let android_cross_path = opt_path(matches, "android-cross-path");
+ let (cdb, cdb_version) = analyze_cdb(matches.opt_str("cdb"), &target);
+ let (gdb, gdb_version, gdb_native_rust) =
+ analyze_gdb(matches.opt_str("gdb"), &target, &android_cross_path);
+ let (lldb_version, lldb_native_rust) = matches
+ .opt_str("lldb-version")
+ .as_deref()
+ .and_then(extract_lldb_version)
+ .map(|(v, b)| (Some(v), b))
+ .unwrap_or((None, false));
+ let color = match matches.opt_str("color").as_deref() {
+ Some("auto") | None => ColorConfig::AutoColor,
+ Some("always") => ColorConfig::AlwaysColor,
+ Some("never") => ColorConfig::NeverColor,
+ Some(x) => panic!("argument for --color must be auto, always, or never, but found `{}`", x),
+ };
+ let llvm_version =
+ matches.opt_str("llvm-version").as_deref().and_then(header::extract_llvm_version);
+
+ let src_base = opt_path(matches, "src-base");
+ let run_ignored = matches.opt_present("ignored");
+ let mode = matches.opt_str("mode").unwrap().parse().expect("invalid mode");
+ let has_tidy = if mode == Mode::Rustdoc {
+ Command::new("tidy")
+ .arg("--version")
+ .stdout(Stdio::null())
+ .status()
+ .map_or(false, |status| status.success())
+ } else {
+ // Avoid spawning an external command when we know tidy won't be used.
+ false
+ };
+ Config {
+ bless: matches.opt_present("bless"),
+ compile_lib_path: make_absolute(opt_path(matches, "compile-lib-path")),
+ run_lib_path: make_absolute(opt_path(matches, "run-lib-path")),
+ rustc_path: opt_path(matches, "rustc-path"),
+ rustdoc_path: matches.opt_str("rustdoc-path").map(PathBuf::from),
+ rust_demangler_path: matches.opt_str("rust-demangler-path").map(PathBuf::from),
+ python: matches.opt_str("python").unwrap(),
+ jsondocck_path: matches.opt_str("jsondocck-path"),
+ valgrind_path: matches.opt_str("valgrind-path"),
+ force_valgrind: matches.opt_present("force-valgrind"),
+ run_clang_based_tests_with: matches.opt_str("run-clang-based-tests-with"),
+ llvm_filecheck: matches.opt_str("llvm-filecheck").map(PathBuf::from),
+ llvm_bin_dir: matches.opt_str("llvm-bin-dir").map(PathBuf::from),
+ src_base,
+ build_base: opt_path(matches, "build-base"),
+ stage_id: matches.opt_str("stage-id").unwrap(),
+ mode,
+ suite: matches.opt_str("suite").unwrap(),
+ debugger: None,
+ run_ignored,
+ filters: matches.free.clone(),
+ skip: matches.opt_strs("skip"),
+ filter_exact: matches.opt_present("exact"),
+ force_pass_mode: matches.opt_str("pass").map(|mode| {
+ mode.parse::<PassMode>()
+ .unwrap_or_else(|_| panic!("unknown `--pass` option `{}` given", mode))
+ }),
+ run: matches.opt_str("run").and_then(|mode| match mode.as_str() {
+ "auto" => None,
+ "always" => Some(true),
+ "never" => Some(false),
+ _ => panic!("unknown `--run` option `{}` given", mode),
+ }),
+ logfile: matches.opt_str("logfile").map(|s| PathBuf::from(&s)),
+ runtool: matches.opt_str("runtool"),
+ host_rustcflags: Some(matches.opt_strs("host-rustcflags").join(" ")),
+ target_rustcflags: Some(matches.opt_strs("target-rustcflags").join(" ")),
+ optimize_tests: matches.opt_present("optimize-tests"),
+ target_panic: match matches.opt_str("target-panic").as_deref() {
+ Some("unwind") | None => PanicStrategy::Unwind,
+ Some("abort") => PanicStrategy::Abort,
+ _ => panic!("unknown `--target-panic` option `{}` given", mode),
+ },
+ target,
+ host: opt_str2(matches.opt_str("host")),
+ cdb,
+ cdb_version,
+ gdb,
+ gdb_version,
+ gdb_native_rust,
+ lldb_version,
+ lldb_native_rust,
+ llvm_version,
+ system_llvm: matches.opt_present("system-llvm"),
+ android_cross_path,
+ adb_path: opt_str2(matches.opt_str("adb-path")),
+ adb_test_dir: opt_str2(matches.opt_str("adb-test-dir")),
+ adb_device_status: opt_str2(matches.opt_str("target")).contains("android")
+ && "(none)" != opt_str2(matches.opt_str("adb-test-dir"))
+ && !opt_str2(matches.opt_str("adb-test-dir")).is_empty(),
+ lldb_python_dir: matches.opt_str("lldb-python-dir"),
+ verbose: matches.opt_present("verbose"),
+ quiet: matches.opt_present("quiet"),
+ color,
+ remote_test_client: matches.opt_str("remote-test-client").map(PathBuf::from),
+ compare_mode: matches.opt_str("compare-mode").map(CompareMode::parse),
+ rustfix_coverage: matches.opt_present("rustfix-coverage"),
+ has_tidy,
+ channel: matches.opt_str("channel").unwrap(),
+ edition: matches.opt_str("edition"),
+
+ cc: matches.opt_str("cc").unwrap(),
+ cxx: matches.opt_str("cxx").unwrap(),
+ cflags: matches.opt_str("cflags").unwrap(),
+ cxxflags: matches.opt_str("cxxflags").unwrap(),
+ ar: matches.opt_str("ar").unwrap_or_else(|| String::from("ar")),
+ linker: matches.opt_str("linker"),
+ llvm_components: matches.opt_str("llvm-components").unwrap(),
+ nodejs: matches.opt_str("nodejs"),
+ npm: matches.opt_str("npm"),
+
+ force_rerun: matches.opt_present("force-rerun"),
+ }
+}
+
+pub fn log_config(config: &Config) {
+ let c = config;
+ logv(c, "configuration:".to_string());
+ logv(c, format!("compile_lib_path: {:?}", config.compile_lib_path));
+ logv(c, format!("run_lib_path: {:?}", config.run_lib_path));
+ logv(c, format!("rustc_path: {:?}", config.rustc_path.display()));
+ logv(c, format!("rustdoc_path: {:?}", config.rustdoc_path));
+ logv(c, format!("rust_demangler_path: {:?}", config.rust_demangler_path));
+ logv(c, format!("src_base: {:?}", config.src_base.display()));
+ logv(c, format!("build_base: {:?}", config.build_base.display()));
+ logv(c, format!("stage_id: {}", config.stage_id));
+ logv(c, format!("mode: {}", config.mode));
+ logv(c, format!("run_ignored: {}", config.run_ignored));
+ logv(c, format!("filters: {:?}", config.filters));
+ logv(c, format!("skip: {:?}", config.skip));
+ logv(c, format!("filter_exact: {}", config.filter_exact));
+ logv(
+ c,
+ format!("force_pass_mode: {}", opt_str(&config.force_pass_mode.map(|m| format!("{}", m))),),
+ );
+ logv(c, format!("runtool: {}", opt_str(&config.runtool)));
+ logv(c, format!("host-rustcflags: {}", opt_str(&config.host_rustcflags)));
+ logv(c, format!("target-rustcflags: {}", opt_str(&config.target_rustcflags)));
+ logv(c, format!("target: {}", config.target));
+ logv(c, format!("host: {}", config.host));
+ logv(c, format!("android-cross-path: {:?}", config.android_cross_path.display()));
+ logv(c, format!("adb_path: {:?}", config.adb_path));
+ logv(c, format!("adb_test_dir: {:?}", config.adb_test_dir));
+ logv(c, format!("adb_device_status: {}", config.adb_device_status));
+ logv(c, format!("ar: {}", config.ar));
+ logv(c, format!("linker: {:?}", config.linker));
+ logv(c, format!("verbose: {}", config.verbose));
+ logv(c, format!("quiet: {}", config.quiet));
+ logv(c, "\n".to_string());
+}
+
+pub fn opt_str(maybestr: &Option<String>) -> &str {
+ match *maybestr {
+ None => "(none)",
+ Some(ref s) => s,
+ }
+}
+
+pub fn opt_str2(maybestr: Option<String>) -> String {
+ match maybestr {
+ None => "(none)".to_owned(),
+ Some(s) => s,
+ }
+}
+
+pub fn run_tests(config: Config) {
+ // If we want to collect rustfix coverage information,
+ // we first make sure that the coverage file does not exist.
+ // It will be created later on.
+ if config.rustfix_coverage {
+ let mut coverage_file_path = config.build_base.clone();
+ coverage_file_path.push("rustfix_missing_coverage.txt");
+ if coverage_file_path.exists() {
+ if let Err(e) = fs::remove_file(&coverage_file_path) {
+ panic!("Could not delete {} due to {}", coverage_file_path.display(), e)
+ }
+ }
+ }
+
+ // sadly osx needs some file descriptor limits raised for running tests in
+ // parallel (especially when we have lots and lots of child processes).
+ // For context, see #8904
+ unsafe {
+ raise_fd_limit::raise_fd_limit();
+ }
+ // Prevent issue #21352 UAC blocking .exe containing 'patch' etc. on Windows
+ // If #11207 is resolved (adding manifest to .exe) this becomes unnecessary
+ env::set_var("__COMPAT_LAYER", "RunAsInvoker");
+
+ // Let tests know which target they're running as
+ env::set_var("TARGET", &config.target);
+
+ let opts = test_opts(&config);
+
+ let mut configs = Vec::new();
+ if let Mode::DebugInfo = config.mode {
+ // Debugging emscripten code doesn't make sense today
+ if !config.target.contains("emscripten") {
+ configs.extend(configure_cdb(&config));
+ configs.extend(configure_gdb(&config));
+ configs.extend(configure_lldb(&config));
+ }
+ } else {
+ configs.push(config.clone());
+ };
+
+ let mut tests = Vec::new();
+ for c in &configs {
+ make_tests(c, &mut tests);
+ }
+
+ let res = test::run_tests_console(&opts, tests);
+ match res {
+ Ok(true) => {}
+ Ok(false) => {
+ // We want to report that the tests failed, but we also want to give
+ // some indication of just what tests we were running. Especially on
+ // CI, where there can be cross-compiled tests for a lot of
+ // architectures, without this critical information it can be quite
+ // easy to miss which tests failed, and as such fail to reproduce
+ // the failure locally.
+
+ eprintln!(
+ "Some tests failed in compiletest suite={}{} mode={} host={} target={}",
+ config.suite,
+ config.compare_mode.map(|c| format!(" compare_mode={:?}", c)).unwrap_or_default(),
+ config.mode,
+ config.host,
+ config.target
+ );
+
+ std::process::exit(1);
+ }
+ Err(e) => {
+ // We don't know if tests passed or not, but if there was an error
+ // during testing we don't want to just succeed (we may not have
+ // tested something), so fail.
+ //
+ // This should realistically "never" happen, so don't try to make
+ // this a pretty error message.
+ panic!("I/O failure during tests: {:?}", e);
+ }
+ }
+}
+
+fn configure_cdb(config: &Config) -> Option<Config> {
+ config.cdb.as_ref()?;
+
+ Some(Config { debugger: Some(Debugger::Cdb), ..config.clone() })
+}
+
+fn configure_gdb(config: &Config) -> Option<Config> {
+ config.gdb_version?;
+
+ if util::matches_env(&config.target, "msvc") {
+ return None;
+ }
+
+ if config.remote_test_client.is_some() && !config.target.contains("android") {
+ println!(
+ "WARNING: debuginfo tests are not available when \
+ testing with remote"
+ );
+ return None;
+ }
+
+ if config.target.contains("android") {
+ println!(
+ "{} debug-info test uses tcp 5039 port.\
+ please reserve it",
+ config.target
+ );
+
+ // android debug-info test uses remote debugger so, we test 1 thread
+ // at once as they're all sharing the same TCP port to communicate
+ // over.
+ //
+ // we should figure out how to lift this restriction! (run them all
+ // on different ports allocated dynamically).
+ env::set_var("RUST_TEST_THREADS", "1");
+ }
+
+ Some(Config { debugger: Some(Debugger::Gdb), ..config.clone() })
+}
+
+fn configure_lldb(config: &Config) -> Option<Config> {
+ config.lldb_python_dir.as_ref()?;
+
+ if let Some(350) = config.lldb_version {
+ println!(
+ "WARNING: The used version of LLDB (350) has a \
+ known issue that breaks debuginfo tests. See \
+ issue #32520 for more information. Skipping all \
+ LLDB-based tests!",
+ );
+ return None;
+ }
+
+ Some(Config { debugger: Some(Debugger::Lldb), ..config.clone() })
+}
+
+pub fn test_opts(config: &Config) -> test::TestOpts {
+ test::TestOpts {
+ exclude_should_panic: false,
+ filters: config.filters.clone(),
+ filter_exact: config.filter_exact,
+ run_ignored: if config.run_ignored { test::RunIgnored::Yes } else { test::RunIgnored::No },
+ format: if config.quiet { test::OutputFormat::Terse } else { test::OutputFormat::Pretty },
+ logfile: config.logfile.clone(),
+ run_tests: true,
+ bench_benchmarks: true,
+ nocapture: match env::var("RUST_TEST_NOCAPTURE") {
+ Ok(val) => &val != "0",
+ Err(_) => false,
+ },
+ color: config.color,
+ shuffle: false,
+ shuffle_seed: None,
+ test_threads: None,
+ skip: config.skip.clone(),
+ list: false,
+ options: test::Options::new(),
+ time_options: None,
+ force_run_in_process: false,
+ }
+}
+
+pub fn make_tests(config: &Config, tests: &mut Vec<test::TestDescAndFn>) {
+ debug!("making tests from {:?}", config.src_base.display());
+ let inputs = common_inputs_stamp(config);
+ collect_tests_from_dir(config, &config.src_base, &PathBuf::new(), &inputs, tests)
+ .unwrap_or_else(|_| panic!("Could not read tests from {}", config.src_base.display()));
+}
+
+/// Returns a stamp constructed from input files common to all test cases.
+fn common_inputs_stamp(config: &Config) -> Stamp {
+ let rust_src_dir = config.find_rust_src_root().expect("Could not find Rust source root");
+
+ let mut stamp = Stamp::from_path(&config.rustc_path);
+
+ // Relevant pretty printer files
+ let pretty_printer_files = [
+ "src/etc/rust_types.py",
+ "src/etc/gdb_load_rust_pretty_printers.py",
+ "src/etc/gdb_lookup.py",
+ "src/etc/gdb_providers.py",
+ "src/etc/lldb_batchmode.py",
+ "src/etc/lldb_lookup.py",
+ "src/etc/lldb_providers.py",
+ ];
+ for file in &pretty_printer_files {
+ let path = rust_src_dir.join(file);
+ stamp.add_path(&path);
+ }
+
+ stamp.add_dir(&config.run_lib_path);
+
+ if let Some(ref rustdoc_path) = config.rustdoc_path {
+ stamp.add_path(&rustdoc_path);
+ stamp.add_path(&rust_src_dir.join("src/etc/htmldocck.py"));
+ }
+
+ // Compiletest itself.
+ stamp.add_dir(&rust_src_dir.join("src/tools/compiletest/"));
+
+ stamp
+}
+
+fn collect_tests_from_dir(
+ config: &Config,
+ dir: &Path,
+ relative_dir_path: &Path,
+ inputs: &Stamp,
+ tests: &mut Vec<test::TestDescAndFn>,
+) -> io::Result<()> {
+ // Ignore directories that contain a file named `compiletest-ignore-dir`.
+ if dir.join("compiletest-ignore-dir").exists() {
+ return Ok(());
+ }
+
+ if config.mode == Mode::RunMake && dir.join("Makefile").exists() {
+ let paths = TestPaths {
+ file: dir.to_path_buf(),
+ relative_dir: relative_dir_path.parent().unwrap().to_path_buf(),
+ };
+ tests.extend(make_test(config, &paths, inputs));
+ return Ok(());
+ }
+
+ // If we find a test foo/bar.rs, we have to build the
+ // output directory `$build/foo` so we can write
+ // `$build/foo/bar` into it. We do this *now* in this
+ // sequential loop because otherwise, if we do it in the
+ // tests themselves, they race for the privilege of
+ // creating the directories and sometimes fail randomly.
+ let build_dir = output_relative_path(config, relative_dir_path);
+ fs::create_dir_all(&build_dir).unwrap();
+
+ // Add each `.rs` file as a test, and recurse further on any
+ // subdirectories we find, except for `aux` directories.
+ for file in fs::read_dir(dir)? {
+ let file = file?;
+ let file_path = file.path();
+ let file_name = file.file_name();
+ if is_test(&file_name) {
+ debug!("found test file: {:?}", file_path.display());
+ let paths =
+ TestPaths { file: file_path, relative_dir: relative_dir_path.to_path_buf() };
+
+ tests.extend(make_test(config, &paths, inputs))
+ } else if file_path.is_dir() {
+ let relative_file_path = relative_dir_path.join(file.file_name());
+ if &file_name != "auxiliary" {
+ debug!("found directory: {:?}", file_path.display());
+ collect_tests_from_dir(config, &file_path, &relative_file_path, inputs, tests)?;
+ }
+ } else {
+ debug!("found other file/directory: {:?}", file_path.display());
+ }
+ }
+ Ok(())
+}
+
+/// Returns true if `file_name` looks like a proper test file name.
+pub fn is_test(file_name: &OsString) -> bool {
+ let file_name = file_name.to_str().unwrap();
+
+ if !file_name.ends_with(".rs") {
+ return false;
+ }
+
+ // `.`, `#`, and `~` are common temp-file prefixes.
+ let invalid_prefixes = &[".", "#", "~"];
+ !invalid_prefixes.iter().any(|p| file_name.starts_with(p))
+}
+
+fn make_test(config: &Config, testpaths: &TestPaths, inputs: &Stamp) -> Vec<test::TestDescAndFn> {
+ let test_path = if config.mode == Mode::RunMake {
+ // Parse directives in the Makefile
+ testpaths.file.join("Makefile")
+ } else {
+ PathBuf::from(&testpaths.file)
+ };
+ let early_props = EarlyProps::from_file(config, &test_path);
+
+ // Incremental tests are special, they inherently cannot be run in parallel.
+ // `runtest::run` will be responsible for iterating over revisions.
+ let revisions = if early_props.revisions.is_empty() || config.mode == Mode::Incremental {
+ vec![None]
+ } else {
+ early_props.revisions.iter().map(Some).collect()
+ };
+ revisions
+ .into_iter()
+ .map(|revision| {
+ let src_file =
+ std::fs::File::open(&test_path).expect("open test file to parse ignores");
+ let cfg = revision.map(|v| &**v);
+ let test_name = crate::make_test_name(config, testpaths, revision);
+ let mut desc = make_test_description(config, test_name, &test_path, src_file, cfg);
+ // Ignore tests that already run and are up to date with respect to inputs.
+ if !config.force_rerun {
+ desc.ignore |= is_up_to_date(
+ config,
+ testpaths,
+ &early_props,
+ revision.map(|s| s.as_str()),
+ inputs,
+ );
+ }
+ test::TestDescAndFn { desc, testfn: make_test_closure(config, testpaths, revision) }
+ })
+ .collect()
+}
+
+fn stamp(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> PathBuf {
+ output_base_dir(config, testpaths, revision).join("stamp")
+}
+
+fn files_related_to_test(
+ config: &Config,
+ testpaths: &TestPaths,
+ props: &EarlyProps,
+ revision: Option<&str>,
+) -> Vec<PathBuf> {
+ let mut related = vec![];
+
+ if testpaths.file.is_dir() {
+ // run-make tests use their individual directory
+ for entry in WalkDir::new(&testpaths.file) {
+ let path = entry.unwrap().into_path();
+ if path.is_file() {
+ related.push(path);
+ }
+ }
+ } else {
+ related.push(testpaths.file.clone());
+ }
+
+ for aux in &props.aux {
+ let path = testpaths.file.parent().unwrap().join("auxiliary").join(aux);
+ related.push(path);
+ }
+
+ // UI test files.
+ for extension in UI_EXTENSIONS {
+ let path = expected_output_path(testpaths, revision, &config.compare_mode, extension);
+ related.push(path);
+ }
+
+ related
+}
+
+fn is_up_to_date(
+ config: &Config,
+ testpaths: &TestPaths,
+ props: &EarlyProps,
+ revision: Option<&str>,
+ inputs: &Stamp,
+) -> bool {
+ let stamp_name = stamp(config, testpaths, revision);
+ // Check hash.
+ let contents = match fs::read_to_string(&stamp_name) {
+ Ok(f) => f,
+ Err(ref e) if e.kind() == ErrorKind::InvalidData => panic!("Can't read stamp contents"),
+ Err(_) => return false,
+ };
+ let expected_hash = runtest::compute_stamp_hash(config);
+ if contents != expected_hash {
+ return false;
+ }
+
+ // Check timestamps.
+ let mut inputs = inputs.clone();
+ for path in files_related_to_test(config, testpaths, props, revision) {
+ inputs.add_path(&path);
+ }
+
+ inputs < Stamp::from_path(&stamp_name)
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
+struct Stamp {
+ time: SystemTime,
+}
+
+impl Stamp {
+ fn from_path(path: &Path) -> Self {
+ let mut stamp = Stamp { time: SystemTime::UNIX_EPOCH };
+ stamp.add_path(path);
+ stamp
+ }
+
+ fn add_path(&mut self, path: &Path) {
+ let modified = fs::metadata(path)
+ .and_then(|metadata| metadata.modified())
+ .unwrap_or(SystemTime::UNIX_EPOCH);
+ self.time = self.time.max(modified);
+ }
+
+ fn add_dir(&mut self, path: &Path) {
+ for entry in WalkDir::new(path) {
+ let entry = entry.unwrap();
+ if entry.file_type().is_file() {
+ let modified = entry
+ .metadata()
+ .ok()
+ .and_then(|metadata| metadata.modified().ok())
+ .unwrap_or(SystemTime::UNIX_EPOCH);
+ self.time = self.time.max(modified);
+ }
+ }
+ }
+}
+
+fn make_test_name(
+ config: &Config,
+ testpaths: &TestPaths,
+ revision: Option<&String>,
+) -> test::TestName {
+ // Print the name of the file, relative to the repository root.
+ // `src_base` looks like `/path/to/rust/src/test/ui`
+ let root_directory = config.src_base.parent().unwrap().parent().unwrap().parent().unwrap();
+ let path = testpaths.file.strip_prefix(root_directory).unwrap();
+ let debugger = match config.debugger {
+ Some(d) => format!("-{}", d),
+ None => String::new(),
+ };
+ let mode_suffix = match config.compare_mode {
+ Some(ref mode) => format!(" ({})", mode.to_str()),
+ None => String::new(),
+ };
+
+ test::DynTestName(format!(
+ "[{}{}{}] {}{}",
+ config.mode,
+ debugger,
+ mode_suffix,
+ path.display(),
+ revision.map_or("".to_string(), |rev| format!("#{}", rev))
+ ))
+}
+
+fn make_test_closure(
+ config: &Config,
+ testpaths: &TestPaths,
+ revision: Option<&String>,
+) -> test::TestFn {
+ let config = config.clone();
+ let testpaths = testpaths.clone();
+ let revision = revision.cloned();
+ test::DynTestFn(Box::new(move || runtest::run(config, &testpaths, revision.as_deref())))
+}
+
+/// Returns `true` if the given target is an Android target for the
+/// purposes of GDB testing.
+fn is_android_gdb_target(target: &str) -> bool {
+ matches!(
+ &target[..],
+ "arm-linux-androideabi" | "armv7-linux-androideabi" | "aarch64-linux-android"
+ )
+}
+
+/// Returns `true` if the given target is a MSVC target for the purpouses of CDB testing.
+fn is_pc_windows_msvc_target(target: &str) -> bool {
+ target.ends_with("-pc-windows-msvc")
+}
+
+fn find_cdb(target: &str) -> Option<OsString> {
+ if !(cfg!(windows) && is_pc_windows_msvc_target(target)) {
+ return None;
+ }
+
+ let pf86 = env::var_os("ProgramFiles(x86)").or_else(|| env::var_os("ProgramFiles"))?;
+ let cdb_arch = if cfg!(target_arch = "x86") {
+ "x86"
+ } else if cfg!(target_arch = "x86_64") {
+ "x64"
+ } else if cfg!(target_arch = "aarch64") {
+ "arm64"
+ } else if cfg!(target_arch = "arm") {
+ "arm"
+ } else {
+ return None; // No compatible CDB.exe in the Windows 10 SDK
+ };
+
+ let mut path = PathBuf::new();
+ path.push(pf86);
+ path.push(r"Windows Kits\10\Debuggers"); // We could check 8.1 etc. too?
+ path.push(cdb_arch);
+ path.push(r"cdb.exe");
+
+ if !path.exists() {
+ return None;
+ }
+
+ Some(path.into_os_string())
+}
+
+/// Returns Path to CDB
+fn analyze_cdb(cdb: Option<String>, target: &str) -> (Option<OsString>, Option<[u16; 4]>) {
+ let cdb = cdb.map(OsString::from).or_else(|| find_cdb(target));
+
+ let mut version = None;
+ if let Some(cdb) = cdb.as_ref() {
+ if let Ok(output) = Command::new(cdb).arg("/version").output() {
+ if let Some(first_line) = String::from_utf8_lossy(&output.stdout).lines().next() {
+ version = extract_cdb_version(&first_line);
+ }
+ }
+ }
+
+ (cdb, version)
+}
+
+fn extract_cdb_version(full_version_line: &str) -> Option<[u16; 4]> {
+ // Example full_version_line: "cdb version 10.0.18362.1"
+ let version = full_version_line.rsplit(' ').next()?;
+ let mut components = version.split('.');
+ let major: u16 = components.next().unwrap().parse().unwrap();
+ let minor: u16 = components.next().unwrap().parse().unwrap();
+ let patch: u16 = components.next().unwrap_or("0").parse().unwrap();
+ let build: u16 = components.next().unwrap_or("0").parse().unwrap();
+ Some([major, minor, patch, build])
+}
+
+/// Returns (Path to GDB, GDB Version, GDB has Rust Support)
+fn analyze_gdb(
+ gdb: Option<String>,
+ target: &str,
+ android_cross_path: &PathBuf,
+) -> (Option<String>, Option<u32>, bool) {
+ #[cfg(not(windows))]
+ const GDB_FALLBACK: &str = "gdb";
+ #[cfg(windows)]
+ const GDB_FALLBACK: &str = "gdb.exe";
+
+ const MIN_GDB_WITH_RUST: u32 = 7011010;
+
+ let fallback_gdb = || {
+ if is_android_gdb_target(target) {
+ let mut gdb_path = match android_cross_path.to_str() {
+ Some(x) => x.to_owned(),
+ None => panic!("cannot find android cross path"),
+ };
+ gdb_path.push_str("/bin/gdb");
+ gdb_path
+ } else {
+ GDB_FALLBACK.to_owned()
+ }
+ };
+
+ let gdb = match gdb {
+ None => fallback_gdb(),
+ Some(ref s) if s.is_empty() => fallback_gdb(), // may be empty if configure found no gdb
+ Some(ref s) => s.to_owned(),
+ };
+
+ let mut version_line = None;
+ if let Ok(output) = Command::new(&gdb).arg("--version").output() {
+ if let Some(first_line) = String::from_utf8_lossy(&output.stdout).lines().next() {
+ version_line = Some(first_line.to_string());
+ }
+ }
+
+ let version = match version_line {
+ Some(line) => extract_gdb_version(&line),
+ None => return (None, None, false),
+ };
+
+ let gdb_native_rust = version.map_or(false, |v| v >= MIN_GDB_WITH_RUST);
+
+ (Some(gdb), version, gdb_native_rust)
+}
+
+fn extract_gdb_version(full_version_line: &str) -> Option<u32> {
+ let full_version_line = full_version_line.trim();
+
+ // GDB versions look like this: "major.minor.patch?.yyyymmdd?", with both
+ // of the ? sections being optional
+
+ // We will parse up to 3 digits for each component, ignoring the date
+
+ // We skip text in parentheses. This avoids accidentally parsing
+ // the openSUSE version, which looks like:
+ // GNU gdb (GDB; openSUSE Leap 15.0) 8.1
+ // This particular form is documented in the GNU coding standards:
+ // https://www.gnu.org/prep/standards/html_node/_002d_002dversion.html#g_t_002d_002dversion
+
+ let unbracketed_part = full_version_line.split('[').next().unwrap();
+ let mut splits = unbracketed_part.trim_end().rsplit(' ');
+ let version_string = splits.next().unwrap();
+
+ let mut splits = version_string.split('.');
+ let major = splits.next().unwrap();
+ let minor = splits.next().unwrap();
+ let patch = splits.next();
+
+ let major: u32 = major.parse().unwrap();
+ let (minor, patch): (u32, u32) = match minor.find(not_a_digit) {
+ None => {
+ let minor = minor.parse().unwrap();
+ let patch: u32 = match patch {
+ Some(patch) => match patch.find(not_a_digit) {
+ None => patch.parse().unwrap(),
+ Some(idx) if idx > 3 => 0,
+ Some(idx) => patch[..idx].parse().unwrap(),
+ },
+ None => 0,
+ };
+ (minor, patch)
+ }
+ // There is no patch version after minor-date (e.g. "4-2012").
+ Some(idx) => {
+ let minor = minor[..idx].parse().unwrap();
+ (minor, 0)
+ }
+ };
+
+ Some(((major * 1000) + minor) * 1000 + patch)
+}
+
+/// Returns (LLDB version, LLDB is rust-enabled)
+fn extract_lldb_version(full_version_line: &str) -> Option<(u32, bool)> {
+ // Extract the major LLDB version from the given version string.
+ // LLDB version strings are different for Apple and non-Apple platforms.
+ // The Apple variant looks like this:
+ //
+ // LLDB-179.5 (older versions)
+ // lldb-300.2.51 (new versions)
+ //
+ // We are only interested in the major version number, so this function
+ // will return `Some(179)` and `Some(300)` respectively.
+ //
+ // Upstream versions look like:
+ // lldb version 6.0.1
+ //
+ // There doesn't seem to be a way to correlate the Apple version
+ // with the upstream version, and since the tests were originally
+ // written against Apple versions, we make a fake Apple version by
+ // multiplying the first number by 100. This is a hack, but
+ // normally fine because the only non-Apple version we test is
+ // rust-enabled.
+
+ let full_version_line = full_version_line.trim();
+
+ if let Some(apple_ver) =
+ full_version_line.strip_prefix("LLDB-").or_else(|| full_version_line.strip_prefix("lldb-"))
+ {
+ if let Some(idx) = apple_ver.find(not_a_digit) {
+ let version: u32 = apple_ver[..idx].parse().unwrap();
+ return Some((version, full_version_line.contains("rust-enabled")));
+ }
+ } else if let Some(lldb_ver) = full_version_line.strip_prefix("lldb version ") {
+ if let Some(idx) = lldb_ver.find(not_a_digit) {
+ let version: u32 = lldb_ver[..idx].parse().ok()?;
+ return Some((version * 100, full_version_line.contains("rust-enabled")));
+ }
+ }
+ None
+}
+
+fn not_a_digit(c: char) -> bool {
+ !c.is_digit(10)
+}
diff --git a/src/tools/compiletest/src/raise_fd_limit.rs b/src/tools/compiletest/src/raise_fd_limit.rs
new file mode 100644
index 000000000..bc2946e2c
--- /dev/null
+++ b/src/tools/compiletest/src/raise_fd_limit.rs
@@ -0,0 +1,54 @@
+/// darwin_fd_limit exists to work around an issue where launchctl on macOS
+/// defaults the rlimit maxfiles to 256/unlimited. The default soft limit of 256
+/// ends up being far too low for our multithreaded scheduler testing, depending
+/// on the number of cores available.
+///
+/// This fixes issue #7772.
+#[cfg(any(target_os = "macos", target_os = "ios", target_os = "watchos"))]
+#[allow(non_camel_case_types)]
+pub unsafe fn raise_fd_limit() {
+ use std::cmp;
+ use std::io;
+ use std::mem::size_of_val;
+ use std::ptr::null_mut;
+
+ static CTL_KERN: libc::c_int = 1;
+ static KERN_MAXFILESPERPROC: libc::c_int = 29;
+
+ // The strategy here is to fetch the current resource limits, read the
+ // kern.maxfilesperproc sysctl value, and bump the soft resource limit for
+ // maxfiles up to the sysctl value.
+
+ // Fetch the kern.maxfilesperproc value
+ let mut mib: [libc::c_int; 2] = [CTL_KERN, KERN_MAXFILESPERPROC];
+ let mut maxfiles: libc::c_int = 0;
+ let mut size: libc::size_t = size_of_val(&maxfiles) as libc::size_t;
+ if libc::sysctl(&mut mib[0], 2, &mut maxfiles as *mut _ as *mut _, &mut size, null_mut(), 0)
+ != 0
+ {
+ let err = io::Error::last_os_error();
+ panic!("raise_fd_limit: error calling sysctl: {}", err);
+ }
+
+ // Fetch the current resource limits
+ let mut rlim = libc::rlimit { rlim_cur: 0, rlim_max: 0 };
+ if libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) != 0 {
+ let err = io::Error::last_os_error();
+ panic!("raise_fd_limit: error calling getrlimit: {}", err);
+ }
+
+ // Make sure we're only ever going to increase the rlimit.
+ if rlim.rlim_cur < maxfiles as libc::rlim_t {
+ // Bump the soft limit to the smaller of kern.maxfilesperproc and the hard limit.
+ rlim.rlim_cur = cmp::min(maxfiles as libc::rlim_t, rlim.rlim_max);
+
+ // Set our newly-increased resource limit.
+ if libc::setrlimit(libc::RLIMIT_NOFILE, &rlim) != 0 {
+ let err = io::Error::last_os_error();
+ panic!("raise_fd_limit: error calling setrlimit: {}", err);
+ }
+ }
+}
+
+#[cfg(not(any(target_os = "macos", target_os = "ios")))]
+pub unsafe fn raise_fd_limit() {}
diff --git a/src/tools/compiletest/src/read2.rs b/src/tools/compiletest/src/read2.rs
new file mode 100644
index 000000000..7640e6517
--- /dev/null
+++ b/src/tools/compiletest/src/read2.rs
@@ -0,0 +1,317 @@
+// FIXME: This is a complete copy of `cargo/src/cargo/util/read2.rs`
+// Consider unify the read2() in libstd, cargo and this to prevent further code duplication.
+
+#[cfg(test)]
+mod tests;
+
+pub use self::imp::read2;
+use std::io::{self, Write};
+use std::mem::replace;
+use std::process::{Child, Output};
+
+pub fn read2_abbreviated(mut child: Child, filter_paths_from_len: &[String]) -> io::Result<Output> {
+ let mut stdout = ProcOutput::new();
+ let mut stderr = ProcOutput::new();
+
+ drop(child.stdin.take());
+ read2(
+ child.stdout.take().unwrap(),
+ child.stderr.take().unwrap(),
+ &mut |is_stdout, data, _| {
+ if is_stdout { &mut stdout } else { &mut stderr }.extend(data, filter_paths_from_len);
+ data.clear();
+ },
+ )?;
+ let status = child.wait()?;
+
+ Ok(Output { status, stdout: stdout.into_bytes(), stderr: stderr.into_bytes() })
+}
+
+const HEAD_LEN: usize = 160 * 1024;
+const TAIL_LEN: usize = 256 * 1024;
+
+// Whenever a path is filtered when counting the length of the output, we need to add some
+// placeholder length to ensure a compiler emitting only filtered paths doesn't cause a OOM.
+//
+// 32 was chosen semi-arbitrarily: it was the highest power of two that still allowed the test
+// suite to pass at the moment of implementing path filtering.
+const FILTERED_PATHS_PLACEHOLDER_LEN: usize = 32;
+
+enum ProcOutput {
+ Full { bytes: Vec<u8>, filtered_len: usize },
+ Abbreviated { head: Vec<u8>, skipped: usize, tail: Box<[u8]> },
+}
+
+impl ProcOutput {
+ fn new() -> Self {
+ ProcOutput::Full { bytes: Vec::new(), filtered_len: 0 }
+ }
+
+ fn extend(&mut self, data: &[u8], filter_paths_from_len: &[String]) {
+ let new_self = match *self {
+ ProcOutput::Full { ref mut bytes, ref mut filtered_len } => {
+ let old_len = bytes.len();
+ bytes.extend_from_slice(data);
+ *filtered_len += data.len();
+
+ // We had problems in the past with tests failing only in some environments,
+ // due to the length of the base path pushing the output size over the limit.
+ //
+ // To make those failures deterministic across all environments we ignore known
+ // paths when calculating the string length, while still including the full
+ // path in the output. This could result in some output being larger than the
+ // threshold, but it's better than having nondeterministic failures.
+ //
+ // The compiler emitting only excluded strings is addressed by adding a
+ // placeholder size for each excluded segment, which will eventually reach
+ // the configured threshold.
+ for path in filter_paths_from_len {
+ let path_bytes = path.as_bytes();
+ // We start matching `path_bytes - 1` into the previously loaded data,
+ // to account for the fact a path_bytes might be included across multiple
+ // `extend` calls. Starting from `- 1` avoids double-counting paths.
+ let matches = (&bytes[(old_len.saturating_sub(path_bytes.len() - 1))..])
+ .windows(path_bytes.len())
+ .filter(|window| window == &path_bytes)
+ .count();
+ *filtered_len -= matches * path_bytes.len();
+
+ // We can't just remove the length of the filtered path from the output lenght,
+ // otherwise a compiler emitting only filtered paths would OOM compiletest. Add
+ // a fixed placeholder length for each path to prevent that.
+ *filtered_len += matches * FILTERED_PATHS_PLACEHOLDER_LEN;
+ }
+
+ let new_len = bytes.len();
+ if *filtered_len <= HEAD_LEN + TAIL_LEN {
+ return;
+ }
+
+ let mut head = replace(bytes, Vec::new());
+ let mut middle = head.split_off(HEAD_LEN);
+ let tail = middle.split_off(middle.len() - TAIL_LEN).into_boxed_slice();
+ let skipped = new_len - HEAD_LEN - TAIL_LEN;
+ ProcOutput::Abbreviated { head, skipped, tail }
+ }
+ ProcOutput::Abbreviated { ref mut skipped, ref mut tail, .. } => {
+ *skipped += data.len();
+ if data.len() <= TAIL_LEN {
+ tail[..data.len()].copy_from_slice(data);
+ tail.rotate_left(data.len());
+ } else {
+ tail.copy_from_slice(&data[(data.len() - TAIL_LEN)..]);
+ }
+ return;
+ }
+ };
+ *self = new_self;
+ }
+
+ fn into_bytes(self) -> Vec<u8> {
+ match self {
+ ProcOutput::Full { bytes, .. } => bytes,
+ ProcOutput::Abbreviated { mut head, skipped, tail } => {
+ write!(&mut head, "\n\n<<<<<< SKIPPED {} BYTES >>>>>>\n\n", skipped).unwrap();
+ head.extend_from_slice(&tail);
+ head
+ }
+ }
+ }
+}
+
+#[cfg(not(any(unix, windows)))]
+mod imp {
+ use std::io::{self, Read};
+ use std::process::{ChildStderr, ChildStdout};
+
+ pub fn read2(
+ out_pipe: ChildStdout,
+ err_pipe: ChildStderr,
+ data: &mut dyn FnMut(bool, &mut Vec<u8>, bool),
+ ) -> io::Result<()> {
+ let mut buffer = Vec::new();
+ out_pipe.read_to_end(&mut buffer)?;
+ data(true, &mut buffer, true);
+ buffer.clear();
+ err_pipe.read_to_end(&mut buffer)?;
+ data(false, &mut buffer, true);
+ Ok(())
+ }
+}
+
+#[cfg(unix)]
+mod imp {
+ use std::io;
+ use std::io::prelude::*;
+ use std::mem;
+ use std::os::unix::prelude::*;
+ use std::process::{ChildStderr, ChildStdout};
+
+ pub fn read2(
+ mut out_pipe: ChildStdout,
+ mut err_pipe: ChildStderr,
+ data: &mut dyn FnMut(bool, &mut Vec<u8>, bool),
+ ) -> io::Result<()> {
+ unsafe {
+ libc::fcntl(out_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK);
+ libc::fcntl(err_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK);
+ }
+
+ let mut out_done = false;
+ let mut err_done = false;
+ let mut out = Vec::new();
+ let mut err = Vec::new();
+
+ let mut fds: [libc::pollfd; 2] = unsafe { mem::zeroed() };
+ fds[0].fd = out_pipe.as_raw_fd();
+ fds[0].events = libc::POLLIN;
+ fds[1].fd = err_pipe.as_raw_fd();
+ fds[1].events = libc::POLLIN;
+ let mut nfds = 2;
+ let mut errfd = 1;
+
+ while nfds > 0 {
+ // wait for either pipe to become readable using `select`
+ let r = unsafe { libc::poll(fds.as_mut_ptr(), nfds, -1) };
+ if r == -1 {
+ let err = io::Error::last_os_error();
+ if err.kind() == io::ErrorKind::Interrupted {
+ continue;
+ }
+ return Err(err);
+ }
+
+ // Read as much as we can from each pipe, ignoring EWOULDBLOCK or
+ // EAGAIN. If we hit EOF, then this will happen because the underlying
+ // reader will return Ok(0), in which case we'll see `Ok` ourselves. In
+ // this case we flip the other fd back into blocking mode and read
+ // whatever's leftover on that file descriptor.
+ let handle = |res: io::Result<_>| match res {
+ Ok(_) => Ok(true),
+ Err(e) => {
+ if e.kind() == io::ErrorKind::WouldBlock {
+ Ok(false)
+ } else {
+ Err(e)
+ }
+ }
+ };
+ if !err_done && fds[errfd].revents != 0 && handle(err_pipe.read_to_end(&mut err))? {
+ err_done = true;
+ nfds -= 1;
+ }
+ data(false, &mut err, err_done);
+ if !out_done && fds[0].revents != 0 && handle(out_pipe.read_to_end(&mut out))? {
+ out_done = true;
+ fds[0].fd = err_pipe.as_raw_fd();
+ errfd = 0;
+ nfds -= 1;
+ }
+ data(true, &mut out, out_done);
+ }
+ Ok(())
+ }
+}
+
+#[cfg(windows)]
+mod imp {
+ use std::io;
+ use std::os::windows::prelude::*;
+ use std::process::{ChildStderr, ChildStdout};
+ use std::slice;
+
+ use miow::iocp::{CompletionPort, CompletionStatus};
+ use miow::pipe::NamedPipe;
+ use miow::Overlapped;
+ use winapi::shared::winerror::ERROR_BROKEN_PIPE;
+
+ struct Pipe<'a> {
+ dst: &'a mut Vec<u8>,
+ overlapped: Overlapped,
+ pipe: NamedPipe,
+ done: bool,
+ }
+
+ pub fn read2(
+ out_pipe: ChildStdout,
+ err_pipe: ChildStderr,
+ data: &mut dyn FnMut(bool, &mut Vec<u8>, bool),
+ ) -> io::Result<()> {
+ let mut out = Vec::new();
+ let mut err = Vec::new();
+
+ let port = CompletionPort::new(1)?;
+ port.add_handle(0, &out_pipe)?;
+ port.add_handle(1, &err_pipe)?;
+
+ unsafe {
+ let mut out_pipe = Pipe::new(out_pipe, &mut out);
+ let mut err_pipe = Pipe::new(err_pipe, &mut err);
+
+ out_pipe.read()?;
+ err_pipe.read()?;
+
+ let mut status = [CompletionStatus::zero(), CompletionStatus::zero()];
+
+ while !out_pipe.done || !err_pipe.done {
+ for status in port.get_many(&mut status, None)? {
+ if status.token() == 0 {
+ out_pipe.complete(status);
+ data(true, out_pipe.dst, out_pipe.done);
+ out_pipe.read()?;
+ } else {
+ err_pipe.complete(status);
+ data(false, err_pipe.dst, err_pipe.done);
+ err_pipe.read()?;
+ }
+ }
+ }
+
+ Ok(())
+ }
+ }
+
+ impl<'a> Pipe<'a> {
+ unsafe fn new<P: IntoRawHandle>(p: P, dst: &'a mut Vec<u8>) -> Pipe<'a> {
+ Pipe {
+ dst: dst,
+ pipe: NamedPipe::from_raw_handle(p.into_raw_handle()),
+ overlapped: Overlapped::zero(),
+ done: false,
+ }
+ }
+
+ unsafe fn read(&mut self) -> io::Result<()> {
+ let dst = slice_to_end(self.dst);
+ match self.pipe.read_overlapped(dst, self.overlapped.raw()) {
+ Ok(_) => Ok(()),
+ Err(e) => {
+ if e.raw_os_error() == Some(ERROR_BROKEN_PIPE as i32) {
+ self.done = true;
+ Ok(())
+ } else {
+ Err(e)
+ }
+ }
+ }
+ }
+
+ unsafe fn complete(&mut self, status: &CompletionStatus) {
+ let prev = self.dst.len();
+ self.dst.set_len(prev + status.bytes_transferred() as usize);
+ if status.bytes_transferred() == 0 {
+ self.done = true;
+ }
+ }
+ }
+
+ unsafe fn slice_to_end(v: &mut Vec<u8>) -> &mut [u8] {
+ if v.capacity() == 0 {
+ v.reserve(16);
+ }
+ if v.capacity() == v.len() {
+ v.reserve(1);
+ }
+ slice::from_raw_parts_mut(v.as_mut_ptr().offset(v.len() as isize), v.capacity() - v.len())
+ }
+}
diff --git a/src/tools/compiletest/src/read2/tests.rs b/src/tools/compiletest/src/read2/tests.rs
new file mode 100644
index 000000000..1ca682a46
--- /dev/null
+++ b/src/tools/compiletest/src/read2/tests.rs
@@ -0,0 +1,123 @@
+use crate::read2::{ProcOutput, FILTERED_PATHS_PLACEHOLDER_LEN, HEAD_LEN, TAIL_LEN};
+
+#[test]
+fn test_abbreviate_short_string() {
+ let mut out = ProcOutput::new();
+ out.extend(b"Hello world!", &[]);
+ assert_eq!(b"Hello world!", &*out.into_bytes());
+}
+
+#[test]
+fn test_abbreviate_short_string_multiple_steps() {
+ let mut out = ProcOutput::new();
+
+ out.extend(b"Hello ", &[]);
+ out.extend(b"world!", &[]);
+
+ assert_eq!(b"Hello world!", &*out.into_bytes());
+}
+
+#[test]
+fn test_abbreviate_long_string() {
+ let mut out = ProcOutput::new();
+
+ let data = vec![b'.'; HEAD_LEN + TAIL_LEN + 16];
+ out.extend(&data, &[]);
+
+ let mut expected = vec![b'.'; HEAD_LEN];
+ expected.extend_from_slice(b"\n\n<<<<<< SKIPPED 16 BYTES >>>>>>\n\n");
+ expected.extend_from_slice(&vec![b'.'; TAIL_LEN]);
+
+ // We first check the length to avoid endless terminal output if the length differs, since
+ // `out` is hundreds of KBs in size.
+ let out = out.into_bytes();
+ assert_eq!(expected.len(), out.len());
+ assert_eq!(expected, out);
+}
+
+#[test]
+fn test_abbreviate_long_string_multiple_steps() {
+ let mut out = ProcOutput::new();
+
+ out.extend(&vec![b'.'; HEAD_LEN], &[]);
+ out.extend(&vec![b'.'; TAIL_LEN], &[]);
+ // Also test whether the rotation works
+ out.extend(&vec![b'!'; 16], &[]);
+ out.extend(&vec![b'?'; 16], &[]);
+
+ let mut expected = vec![b'.'; HEAD_LEN];
+ expected.extend_from_slice(b"\n\n<<<<<< SKIPPED 32 BYTES >>>>>>\n\n");
+ expected.extend_from_slice(&vec![b'.'; TAIL_LEN - 32]);
+ expected.extend_from_slice(&vec![b'!'; 16]);
+ expected.extend_from_slice(&vec![b'?'; 16]);
+
+ // We first check the length to avoid endless terminal output if the length differs, since
+ // `out` is hundreds of KBs in size.
+ let out = out.into_bytes();
+ assert_eq!(expected.len(), out.len());
+ assert_eq!(expected, out);
+}
+
+#[test]
+fn test_abbreviate_filterss_are_detected() {
+ let mut out = ProcOutput::new();
+ let filters = &["foo".to_string(), "quux".to_string()];
+
+ out.extend(b"Hello foo", filters);
+ // Check items from a previous extension are not double-counted.
+ out.extend(b"! This is a qu", filters);
+ // Check items are detected across extensions.
+ out.extend(b"ux.", filters);
+
+ match &out {
+ ProcOutput::Full { bytes, filtered_len } => assert_eq!(
+ *filtered_len,
+ bytes.len() + FILTERED_PATHS_PLACEHOLDER_LEN * filters.len()
+ - filters.iter().map(|i| i.len()).sum::<usize>()
+ ),
+ ProcOutput::Abbreviated { .. } => panic!("out should not be abbreviated"),
+ }
+
+ assert_eq!(b"Hello foo! This is a quux.", &*out.into_bytes());
+}
+
+#[test]
+fn test_abbreviate_filters_avoid_abbreviations() {
+ let mut out = ProcOutput::new();
+ let filters = &[std::iter::repeat('a').take(64).collect::<String>()];
+
+ let mut expected = vec![b'.'; HEAD_LEN - FILTERED_PATHS_PLACEHOLDER_LEN as usize];
+ expected.extend_from_slice(filters[0].as_bytes());
+ expected.extend_from_slice(&vec![b'.'; TAIL_LEN]);
+
+ out.extend(&expected, filters);
+
+ // We first check the length to avoid endless terminal output if the length differs, since
+ // `out` is hundreds of KBs in size.
+ let out = out.into_bytes();
+ assert_eq!(expected.len(), out.len());
+ assert_eq!(expected, out);
+}
+
+#[test]
+fn test_abbreviate_filters_can_still_cause_abbreviations() {
+ let mut out = ProcOutput::new();
+ let filters = &[std::iter::repeat('a').take(64).collect::<String>()];
+
+ let mut input = vec![b'.'; HEAD_LEN];
+ input.extend_from_slice(&vec![b'.'; TAIL_LEN]);
+ input.extend_from_slice(filters[0].as_bytes());
+
+ let mut expected = vec![b'.'; HEAD_LEN];
+ expected.extend_from_slice(b"\n\n<<<<<< SKIPPED 64 BYTES >>>>>>\n\n");
+ expected.extend_from_slice(&vec![b'.'; TAIL_LEN - 64]);
+ expected.extend_from_slice(&vec![b'a'; 64]);
+
+ out.extend(&input, filters);
+
+ // We first check the length to avoid endless terminal output if the length differs, since
+ // `out` is hundreds of KBs in size.
+ let out = out.into_bytes();
+ assert_eq!(expected.len(), out.len());
+ assert_eq!(expected, out);
+}
diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs
new file mode 100644
index 000000000..d3e5a2dd6
--- /dev/null
+++ b/src/tools/compiletest/src/runtest.rs
@@ -0,0 +1,3948 @@
+// ignore-tidy-filelength
+
+use crate::common::{expected_output_path, UI_EXTENSIONS, UI_FIXED, UI_STDERR, UI_STDOUT};
+use crate::common::{incremental_dir, output_base_dir, output_base_name, output_testname_unique};
+use crate::common::{Assembly, Incremental, JsDocTest, MirOpt, RunMake, RustdocJson, Ui};
+use crate::common::{Codegen, CodegenUnits, DebugInfo, Debugger, Rustdoc};
+use crate::common::{CompareMode, FailMode, PassMode};
+use crate::common::{Config, TestPaths};
+use crate::common::{Pretty, RunPassValgrind};
+use crate::common::{UI_RUN_STDERR, UI_RUN_STDOUT};
+use crate::compute_diff::{write_diff, write_filtered_diff};
+use crate::errors::{self, Error, ErrorKind};
+use crate::header::TestProps;
+use crate::json;
+use crate::read2::read2_abbreviated;
+use crate::util::get_pointer_width;
+use crate::util::{logv, PathBufExt};
+use crate::ColorConfig;
+use regex::{Captures, Regex};
+use rustfix::{apply_suggestions, get_suggestions_from_json, Filter};
+
+use std::collections::hash_map::DefaultHasher;
+use std::collections::{HashMap, HashSet};
+use std::env;
+use std::ffi::{OsStr, OsString};
+use std::fs::{self, create_dir_all, File, OpenOptions};
+use std::hash::{Hash, Hasher};
+use std::io::prelude::*;
+use std::io::{self, BufReader};
+use std::path::{Path, PathBuf};
+use std::process::{Child, Command, ExitStatus, Output, Stdio};
+use std::str;
+
+use glob::glob;
+use lazy_static::lazy_static;
+use tracing::*;
+
+use crate::extract_gdb_version;
+use crate::is_android_gdb_target;
+
+mod debugger;
+use debugger::{check_debugger_output, DebuggerCommands};
+
+#[cfg(test)]
+mod tests;
+
+#[cfg(windows)]
+fn disable_error_reporting<F: FnOnce() -> R, R>(f: F) -> R {
+ use std::sync::Mutex;
+ use winapi::um::errhandlingapi::SetErrorMode;
+ use winapi::um::winbase::SEM_NOGPFAULTERRORBOX;
+
+ lazy_static! {
+ static ref LOCK: Mutex<()> = Mutex::new(());
+ }
+ // Error mode is a global variable, so lock it so only one thread will change it
+ let _lock = LOCK.lock().unwrap();
+
+ // Tell Windows to not show any UI on errors (such as terminating abnormally).
+ // This is important for running tests, since some of them use abnormal
+ // termination by design. This mode is inherited by all child processes.
+ unsafe {
+ let old_mode = SetErrorMode(SEM_NOGPFAULTERRORBOX); // read inherited flags
+ SetErrorMode(old_mode | SEM_NOGPFAULTERRORBOX);
+ let r = f();
+ SetErrorMode(old_mode);
+ r
+ }
+}
+
+#[cfg(not(windows))]
+fn disable_error_reporting<F: FnOnce() -> R, R>(f: F) -> R {
+ f()
+}
+
+/// The name of the environment variable that holds dynamic library locations.
+pub fn dylib_env_var() -> &'static str {
+ if cfg!(windows) {
+ "PATH"
+ } else if cfg!(target_os = "macos") {
+ "DYLD_LIBRARY_PATH"
+ } else if cfg!(target_os = "haiku") {
+ "LIBRARY_PATH"
+ } else {
+ "LD_LIBRARY_PATH"
+ }
+}
+
+/// The platform-specific library name
+pub fn get_lib_name(lib: &str, dylib: bool) -> String {
+ // In some casess (e.g. MUSL), we build a static
+ // library, rather than a dynamic library.
+ // In this case, the only path we can pass
+ // with '--extern-meta' is the '.lib' file
+ if !dylib {
+ return format!("lib{}.rlib", lib);
+ }
+
+ if cfg!(windows) {
+ format!("{}.dll", lib)
+ } else if cfg!(target_os = "macos") {
+ format!("lib{}.dylib", lib)
+ } else {
+ format!("lib{}.so", lib)
+ }
+}
+
+pub fn run(config: Config, testpaths: &TestPaths, revision: Option<&str>) {
+ match &*config.target {
+ "arm-linux-androideabi"
+ | "armv7-linux-androideabi"
+ | "thumbv7neon-linux-androideabi"
+ | "aarch64-linux-android" => {
+ if !config.adb_device_status {
+ panic!("android device not available");
+ }
+ }
+
+ _ => {
+ // android has its own gdb handling
+ if config.debugger == Some(Debugger::Gdb) && config.gdb.is_none() {
+ panic!("gdb not available but debuginfo gdb debuginfo test requested");
+ }
+ }
+ }
+
+ if config.verbose {
+ // We're going to be dumping a lot of info. Start on a new line.
+ print!("\n\n");
+ }
+ debug!("running {:?}", testpaths.file.display());
+ let mut props = TestProps::from_file(&testpaths.file, revision, &config);
+ if props.incremental {
+ props.incremental_dir = Some(incremental_dir(&config, testpaths));
+ }
+
+ let cx = TestCx { config: &config, props: &props, testpaths, revision };
+ create_dir_all(&cx.output_base_dir()).unwrap();
+ if props.incremental {
+ cx.init_incremental_test();
+ }
+
+ if config.mode == Incremental {
+ // Incremental tests are special because they cannot be run in
+ // parallel.
+ assert!(!props.revisions.is_empty(), "Incremental tests require revisions.");
+ for revision in &props.revisions {
+ let mut revision_props = TestProps::from_file(&testpaths.file, Some(revision), &config);
+ revision_props.incremental_dir = props.incremental_dir.clone();
+ let rev_cx = TestCx {
+ config: &config,
+ props: &revision_props,
+ testpaths,
+ revision: Some(revision),
+ };
+ rev_cx.run_revision();
+ }
+ } else {
+ cx.run_revision();
+ }
+
+ cx.create_stamp();
+}
+
+pub fn compute_stamp_hash(config: &Config) -> String {
+ let mut hash = DefaultHasher::new();
+ config.stage_id.hash(&mut hash);
+ config.run.hash(&mut hash);
+
+ match config.debugger {
+ Some(Debugger::Cdb) => {
+ config.cdb.hash(&mut hash);
+ }
+
+ Some(Debugger::Gdb) => {
+ config.gdb.hash(&mut hash);
+ env::var_os("PATH").hash(&mut hash);
+ env::var_os("PYTHONPATH").hash(&mut hash);
+ }
+
+ Some(Debugger::Lldb) => {
+ config.python.hash(&mut hash);
+ config.lldb_python_dir.hash(&mut hash);
+ env::var_os("PATH").hash(&mut hash);
+ env::var_os("PYTHONPATH").hash(&mut hash);
+ }
+
+ None => {}
+ }
+
+ if let Ui = config.mode {
+ config.force_pass_mode.hash(&mut hash);
+ }
+
+ format!("{:x}", hash.finish())
+}
+
+#[derive(Copy, Clone)]
+struct TestCx<'test> {
+ config: &'test Config,
+ props: &'test TestProps,
+ testpaths: &'test TestPaths,
+ revision: Option<&'test str>,
+}
+
+enum ReadFrom {
+ Path,
+ Stdin(String),
+}
+
+enum TestOutput {
+ Compile,
+ Run,
+}
+
+/// Will this test be executed? Should we use `make_exe_name`?
+#[derive(Copy, Clone, PartialEq)]
+enum WillExecute {
+ Yes,
+ No,
+ Disabled,
+}
+
+/// Should `--emit metadata` be used?
+#[derive(Copy, Clone)]
+enum EmitMetadata {
+ Yes,
+ No,
+}
+
+impl<'test> TestCx<'test> {
+ /// Code executed for each revision in turn (or, if there are no
+ /// revisions, exactly once, with revision == None).
+ fn run_revision(&self) {
+ if self.props.should_ice && self.config.mode != Incremental {
+ self.fatal("cannot use should-ice in a test that is not cfail");
+ }
+ match self.config.mode {
+ RunPassValgrind => self.run_valgrind_test(),
+ Pretty => self.run_pretty_test(),
+ DebugInfo => self.run_debuginfo_test(),
+ Codegen => self.run_codegen_test(),
+ Rustdoc => self.run_rustdoc_test(),
+ RustdocJson => self.run_rustdoc_json_test(),
+ CodegenUnits => self.run_codegen_units_test(),
+ Incremental => self.run_incremental_test(),
+ RunMake => self.run_rmake_test(),
+ Ui => self.run_ui_test(),
+ MirOpt => self.run_mir_opt_test(),
+ Assembly => self.run_assembly_test(),
+ JsDocTest => self.run_js_doc_test(),
+ }
+ }
+
+ fn pass_mode(&self) -> Option<PassMode> {
+ self.props.pass_mode(self.config)
+ }
+
+ fn should_run(&self, pm: Option<PassMode>) -> WillExecute {
+ let test_should_run = match self.config.mode {
+ Ui if pm == Some(PassMode::Run) || self.props.fail_mode == Some(FailMode::Run) => true,
+ MirOpt if pm == Some(PassMode::Run) => true,
+ Ui | MirOpt => false,
+ mode => panic!("unimplemented for mode {:?}", mode),
+ };
+ if test_should_run { self.run_if_enabled() } else { WillExecute::No }
+ }
+
+ fn run_if_enabled(&self) -> WillExecute {
+ if self.config.run_enabled() { WillExecute::Yes } else { WillExecute::Disabled }
+ }
+
+ fn should_run_successfully(&self, pm: Option<PassMode>) -> bool {
+ match self.config.mode {
+ Ui | MirOpt => pm == Some(PassMode::Run),
+ mode => panic!("unimplemented for mode {:?}", mode),
+ }
+ }
+
+ fn should_compile_successfully(&self, pm: Option<PassMode>) -> bool {
+ match self.config.mode {
+ JsDocTest => true,
+ Ui => pm.is_some() || self.props.fail_mode > Some(FailMode::Build),
+ Incremental => {
+ let revision =
+ self.revision.expect("incremental tests require a list of revisions");
+ if revision.starts_with("rpass") || revision.starts_with("rfail") {
+ true
+ } else if revision.starts_with("cfail") {
+ // FIXME: would be nice if incremental revs could start with "cpass"
+ pm.is_some()
+ } else {
+ panic!("revision name must begin with rpass, rfail, or cfail");
+ }
+ }
+ mode => panic!("unimplemented for mode {:?}", mode),
+ }
+ }
+
+ fn check_if_test_should_compile(&self, proc_res: &ProcRes, pm: Option<PassMode>) {
+ if self.should_compile_successfully(pm) {
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("test compilation failed although it shouldn't!", proc_res);
+ }
+ } else {
+ if proc_res.status.success() {
+ self.fatal_proc_rec(
+ &format!("{} test compiled successfully!", self.config.mode)[..],
+ proc_res,
+ );
+ }
+
+ self.check_correct_failure_status(proc_res);
+ }
+ }
+
+ fn run_cfail_test(&self) {
+ let pm = self.pass_mode();
+ let proc_res = self.compile_test(WillExecute::No, self.should_emit_metadata(pm));
+ self.check_if_test_should_compile(&proc_res, pm);
+ self.check_no_compiler_crash(&proc_res, self.props.should_ice);
+
+ let output_to_check = self.get_output(&proc_res);
+ let expected_errors = errors::load_errors(&self.testpaths.file, self.revision);
+ if !expected_errors.is_empty() {
+ if !self.props.error_patterns.is_empty() || !self.props.regex_error_patterns.is_empty()
+ {
+ self.fatal("both error pattern and expected errors specified");
+ }
+ self.check_expected_errors(expected_errors, &proc_res);
+ } else {
+ self.check_all_error_patterns(&output_to_check, &proc_res, pm);
+ }
+ if self.props.should_ice {
+ match proc_res.status.code() {
+ Some(101) => (),
+ _ => self.fatal("expected ICE"),
+ }
+ }
+
+ self.check_forbid_output(&output_to_check, &proc_res);
+ }
+
+ fn run_rfail_test(&self) {
+ let pm = self.pass_mode();
+ let should_run = self.run_if_enabled();
+ let proc_res = self.compile_test(should_run, self.should_emit_metadata(pm));
+
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("compilation failed!", &proc_res);
+ }
+
+ if let WillExecute::Disabled = should_run {
+ return;
+ }
+
+ let proc_res = self.exec_compiled_test();
+
+ // The value our Makefile configures valgrind to return on failure
+ const VALGRIND_ERR: i32 = 100;
+ if proc_res.status.code() == Some(VALGRIND_ERR) {
+ self.fatal_proc_rec("run-fail test isn't valgrind-clean!", &proc_res);
+ }
+
+ let output_to_check = self.get_output(&proc_res);
+ self.check_correct_failure_status(&proc_res);
+ self.check_all_error_patterns(&output_to_check, &proc_res, pm);
+ }
+
+ fn get_output(&self, proc_res: &ProcRes) -> String {
+ if self.props.check_stdout {
+ format!("{}{}", proc_res.stdout, proc_res.stderr)
+ } else {
+ proc_res.stderr.clone()
+ }
+ }
+
+ fn check_correct_failure_status(&self, proc_res: &ProcRes) {
+ let expected_status = Some(self.props.failure_status);
+ let received_status = proc_res.status.code();
+
+ if expected_status != received_status {
+ self.fatal_proc_rec(
+ &format!(
+ "Error: expected failure status ({:?}) but received status {:?}.",
+ expected_status, received_status
+ ),
+ proc_res,
+ );
+ }
+ }
+
+ fn run_rpass_test(&self) {
+ let emit_metadata = self.should_emit_metadata(self.pass_mode());
+ let should_run = self.run_if_enabled();
+ let proc_res = self.compile_test(should_run, emit_metadata);
+
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("compilation failed!", &proc_res);
+ }
+
+ if let WillExecute::Disabled = should_run {
+ return;
+ }
+
+ // FIXME(#41968): Move this check to tidy?
+ let expected_errors = errors::load_errors(&self.testpaths.file, self.revision);
+ assert!(
+ expected_errors.is_empty(),
+ "run-pass tests with expected warnings should be moved to ui/"
+ );
+
+ let proc_res = self.exec_compiled_test();
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("test run failed!", &proc_res);
+ }
+ }
+
+ fn run_valgrind_test(&self) {
+ assert!(self.revision.is_none(), "revisions not relevant here");
+
+ if self.config.valgrind_path.is_none() {
+ assert!(!self.config.force_valgrind);
+ return self.run_rpass_test();
+ }
+
+ let should_run = self.run_if_enabled();
+ let mut proc_res = self.compile_test(should_run, EmitMetadata::No);
+
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("compilation failed!", &proc_res);
+ }
+
+ if let WillExecute::Disabled = should_run {
+ return;
+ }
+
+ let mut new_config = self.config.clone();
+ new_config.runtool = new_config.valgrind_path.clone();
+ let new_cx = TestCx { config: &new_config, ..*self };
+ proc_res = new_cx.exec_compiled_test();
+
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("test run failed!", &proc_res);
+ }
+ }
+
+ fn run_pretty_test(&self) {
+ if self.props.pp_exact.is_some() {
+ logv(self.config, "testing for exact pretty-printing".to_owned());
+ } else {
+ logv(self.config, "testing for converging pretty-printing".to_owned());
+ }
+
+ let rounds = match self.props.pp_exact {
+ Some(_) => 1,
+ None => 2,
+ };
+
+ let src = fs::read_to_string(&self.testpaths.file).unwrap();
+ let mut srcs = vec![src];
+
+ let mut round = 0;
+ while round < rounds {
+ logv(
+ self.config,
+ format!("pretty-printing round {} revision {:?}", round, self.revision),
+ );
+ let read_from =
+ if round == 0 { ReadFrom::Path } else { ReadFrom::Stdin(srcs[round].to_owned()) };
+
+ let proc_res = self.print_source(read_from, &self.props.pretty_mode);
+ if !proc_res.status.success() {
+ self.fatal_proc_rec(
+ &format!(
+ "pretty-printing failed in round {} revision {:?}",
+ round, self.revision
+ ),
+ &proc_res,
+ );
+ }
+
+ let ProcRes { stdout, .. } = proc_res;
+ srcs.push(stdout);
+ round += 1;
+ }
+
+ let mut expected = match self.props.pp_exact {
+ Some(ref file) => {
+ let filepath = self.testpaths.file.parent().unwrap().join(file);
+ fs::read_to_string(&filepath).unwrap()
+ }
+ None => srcs[srcs.len() - 2].clone(),
+ };
+ let mut actual = srcs[srcs.len() - 1].clone();
+
+ if self.props.pp_exact.is_some() {
+ // Now we have to care about line endings
+ let cr = "\r".to_owned();
+ actual = actual.replace(&cr, "");
+ expected = expected.replace(&cr, "");
+ }
+
+ if !self.config.bless {
+ self.compare_source(&expected, &actual);
+ } else if expected != actual {
+ let filepath_buf;
+ let filepath = match &self.props.pp_exact {
+ Some(file) => {
+ filepath_buf = self.testpaths.file.parent().unwrap().join(file);
+ &filepath_buf
+ }
+ None => &self.testpaths.file,
+ };
+ fs::write(filepath, &actual).unwrap();
+ }
+
+ // If we're only making sure that the output matches then just stop here
+ if self.props.pretty_compare_only {
+ return;
+ }
+
+ // Finally, let's make sure it actually appears to remain valid code
+ let proc_res = self.typecheck_source(actual);
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("pretty-printed source does not typecheck", &proc_res);
+ }
+
+ if !self.props.pretty_expanded {
+ return;
+ }
+
+ // additionally, run `-Zunpretty=expanded` and try to build it.
+ let proc_res = self.print_source(ReadFrom::Path, "expanded");
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("pretty-printing (expanded) failed", &proc_res);
+ }
+
+ let ProcRes { stdout: expanded_src, .. } = proc_res;
+ let proc_res = self.typecheck_source(expanded_src);
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("pretty-printed source (expanded) does not typecheck", &proc_res);
+ }
+ }
+
+ fn print_source(&self, read_from: ReadFrom, pretty_type: &str) -> ProcRes {
+ let aux_dir = self.aux_output_dir_name();
+ let input: &str = match read_from {
+ ReadFrom::Stdin(_) => "-",
+ ReadFrom::Path => self.testpaths.file.to_str().unwrap(),
+ };
+
+ let mut rustc = Command::new(&self.config.rustc_path);
+ rustc
+ .arg(input)
+ .args(&["-Z", &format!("unpretty={}", pretty_type)])
+ .args(&["--target", &self.config.target])
+ .arg("-L")
+ .arg(&aux_dir)
+ .args(&self.props.compile_flags)
+ .envs(self.props.rustc_env.clone());
+ self.maybe_add_external_args(
+ &mut rustc,
+ self.split_maybe_args(&self.config.target_rustcflags),
+ );
+
+ let src = match read_from {
+ ReadFrom::Stdin(src) => Some(src),
+ ReadFrom::Path => None,
+ };
+
+ self.compose_and_run(
+ rustc,
+ self.config.compile_lib_path.to_str().unwrap(),
+ Some(aux_dir.to_str().unwrap()),
+ src,
+ )
+ }
+
+ fn compare_source(&self, expected: &str, actual: &str) {
+ if expected != actual {
+ self.fatal(&format!(
+ "pretty-printed source does not match expected source\n\
+ expected:\n\
+ ------------------------------------------\n\
+ {}\n\
+ ------------------------------------------\n\
+ actual:\n\
+ ------------------------------------------\n\
+ {}\n\
+ ------------------------------------------\n\
+ diff:\n\
+ ------------------------------------------\n\
+ {}\n",
+ expected,
+ actual,
+ write_diff(expected, actual, 3),
+ ));
+ }
+ }
+
+ fn set_revision_flags(&self, cmd: &mut Command) {
+ if let Some(revision) = self.revision {
+ // Normalize revisions to be lowercase and replace `-`s with `_`s.
+ // Otherwise the `--cfg` flag is not valid.
+ let normalized_revision = revision.to_lowercase().replace("-", "_");
+ cmd.args(&["--cfg", &normalized_revision]);
+ }
+ }
+
+ fn typecheck_source(&self, src: String) -> ProcRes {
+ let mut rustc = Command::new(&self.config.rustc_path);
+
+ let out_dir = self.output_base_name().with_extension("pretty-out");
+ let _ = fs::remove_dir_all(&out_dir);
+ create_dir_all(&out_dir).unwrap();
+
+ let target = if self.props.force_host { &*self.config.host } else { &*self.config.target };
+
+ let aux_dir = self.aux_output_dir_name();
+
+ rustc
+ .arg("-")
+ .arg("-Zno-codegen")
+ .arg("--out-dir")
+ .arg(&out_dir)
+ .arg(&format!("--target={}", target))
+ .arg("-L")
+ .arg(&self.config.build_base)
+ .arg("-L")
+ .arg(aux_dir);
+ self.set_revision_flags(&mut rustc);
+ self.maybe_add_external_args(
+ &mut rustc,
+ self.split_maybe_args(&self.config.target_rustcflags),
+ );
+ rustc.args(&self.props.compile_flags);
+
+ self.compose_and_run_compiler(rustc, Some(src))
+ }
+
+ fn run_debuginfo_test(&self) {
+ match self.config.debugger.unwrap() {
+ Debugger::Cdb => self.run_debuginfo_cdb_test(),
+ Debugger::Gdb => self.run_debuginfo_gdb_test(),
+ Debugger::Lldb => self.run_debuginfo_lldb_test(),
+ }
+ }
+
+ fn run_debuginfo_cdb_test(&self) {
+ let config = Config {
+ target_rustcflags: self.cleanup_debug_info_options(&self.config.target_rustcflags),
+ host_rustcflags: self.cleanup_debug_info_options(&self.config.host_rustcflags),
+ ..self.config.clone()
+ };
+
+ let test_cx = TestCx { config: &config, ..*self };
+
+ test_cx.run_debuginfo_cdb_test_no_opt();
+ }
+
+ fn run_debuginfo_cdb_test_no_opt(&self) {
+ let exe_file = self.make_exe_name();
+
+ // Existing PDB files are update in-place. When changing the debuginfo
+ // the compiler generates for something, this can lead to the situation
+ // where both the old and the new version of the debuginfo for the same
+ // type is present in the PDB, which is very confusing.
+ // Therefore we delete any existing PDB file before compiling the test
+ // case.
+ // FIXME: If can reliably detect that MSVC's link.exe is used, then
+ // passing `/INCREMENTAL:NO` might be a cleaner way to do this.
+ let pdb_file = exe_file.with_extension(".pdb");
+ if pdb_file.exists() {
+ std::fs::remove_file(pdb_file).unwrap();
+ }
+
+ // compile test file (it should have 'compile-flags:-g' in the header)
+ let should_run = self.run_if_enabled();
+ let compile_result = self.compile_test(should_run, EmitMetadata::No);
+ if !compile_result.status.success() {
+ self.fatal_proc_rec("compilation failed!", &compile_result);
+ }
+ if let WillExecute::Disabled = should_run {
+ return;
+ }
+
+ let prefixes = {
+ static PREFIXES: &[&str] = &["cdb", "cdbg"];
+ // No "native rust support" variation for CDB yet.
+ PREFIXES
+ };
+
+ // Parse debugger commands etc from test files
+ let DebuggerCommands { commands, check_lines, breakpoint_lines, .. } =
+ match DebuggerCommands::parse_from(
+ &self.testpaths.file,
+ self.config,
+ prefixes,
+ self.revision,
+ ) {
+ Ok(cmds) => cmds,
+ Err(e) => self.fatal(&e),
+ };
+
+ // https://docs.microsoft.com/en-us/windows-hardware/drivers/debugger/debugger-commands
+ let mut script_str = String::with_capacity(2048);
+ script_str.push_str("version\n"); // List CDB (and more) version info in test output
+ script_str.push_str(".nvlist\n"); // List loaded `*.natvis` files, bulk of custom MSVC debug
+
+ // If a .js file exists next to the source file being tested, then this is a JavaScript
+ // debugging extension that needs to be loaded.
+ let mut js_extension = self.testpaths.file.clone();
+ js_extension.set_extension("cdb.js");
+ if js_extension.exists() {
+ script_str.push_str(&format!(".scriptload \"{}\"\n", js_extension.to_string_lossy()));
+ }
+
+ // Set breakpoints on every line that contains the string "#break"
+ let source_file_name = self.testpaths.file.file_name().unwrap().to_string_lossy();
+ for line in &breakpoint_lines {
+ script_str.push_str(&format!("bp `{}:{}`\n", source_file_name, line));
+ }
+
+ // Append the other `cdb-command:`s
+ for line in &commands {
+ script_str.push_str(line);
+ script_str.push_str("\n");
+ }
+
+ script_str.push_str("\nqq\n"); // Quit the debugger (including remote debugger, if any)
+
+ // Write the script into a file
+ debug!("script_str = {}", script_str);
+ self.dump_output_file(&script_str, "debugger.script");
+ let debugger_script = self.make_out_name("debugger.script");
+
+ let cdb_path = &self.config.cdb.as_ref().unwrap();
+ let mut cdb = Command::new(cdb_path);
+ cdb.arg("-lines") // Enable source line debugging.
+ .arg("-cf")
+ .arg(&debugger_script)
+ .arg(&exe_file);
+
+ let debugger_run_result = self.compose_and_run(
+ cdb,
+ self.config.run_lib_path.to_str().unwrap(),
+ None, // aux_path
+ None, // input
+ );
+
+ if !debugger_run_result.status.success() {
+ self.fatal_proc_rec("Error while running CDB", &debugger_run_result);
+ }
+
+ if let Err(e) = check_debugger_output(&debugger_run_result, &check_lines) {
+ self.fatal_proc_rec(&e, &debugger_run_result);
+ }
+ }
+
+ fn run_debuginfo_gdb_test(&self) {
+ let config = Config {
+ target_rustcflags: self.cleanup_debug_info_options(&self.config.target_rustcflags),
+ host_rustcflags: self.cleanup_debug_info_options(&self.config.host_rustcflags),
+ ..self.config.clone()
+ };
+
+ let test_cx = TestCx { config: &config, ..*self };
+
+ test_cx.run_debuginfo_gdb_test_no_opt();
+ }
+
+ fn run_debuginfo_gdb_test_no_opt(&self) {
+ let prefixes = if self.config.gdb_native_rust {
+ // GDB with Rust
+ static PREFIXES: &[&str] = &["gdb", "gdbr"];
+ println!("NOTE: compiletest thinks it is using GDB with native rust support");
+ PREFIXES
+ } else {
+ // Generic GDB
+ static PREFIXES: &[&str] = &["gdb", "gdbg"];
+ println!("NOTE: compiletest thinks it is using GDB without native rust support");
+ PREFIXES
+ };
+
+ let DebuggerCommands { commands, check_lines, breakpoint_lines } =
+ match DebuggerCommands::parse_from(
+ &self.testpaths.file,
+ self.config,
+ prefixes,
+ self.revision,
+ ) {
+ Ok(cmds) => cmds,
+ Err(e) => self.fatal(&e),
+ };
+ let mut cmds = commands.join("\n");
+
+ // compile test file (it should have 'compile-flags:-g' in the header)
+ let should_run = self.run_if_enabled();
+ let compiler_run_result = self.compile_test(should_run, EmitMetadata::No);
+ if !compiler_run_result.status.success() {
+ self.fatal_proc_rec("compilation failed!", &compiler_run_result);
+ }
+ if let WillExecute::Disabled = should_run {
+ return;
+ }
+
+ let exe_file = self.make_exe_name();
+
+ let debugger_run_result;
+ if is_android_gdb_target(&self.config.target) {
+ cmds = cmds.replace("run", "continue");
+
+ let tool_path = match self.config.android_cross_path.to_str() {
+ Some(x) => x.to_owned(),
+ None => self.fatal("cannot find android cross path"),
+ };
+
+ // write debugger script
+ let mut script_str = String::with_capacity(2048);
+ script_str.push_str(&format!("set charset {}\n", Self::charset()));
+ script_str.push_str(&format!("set sysroot {}\n", tool_path));
+ script_str.push_str(&format!("file {}\n", exe_file.to_str().unwrap()));
+ script_str.push_str("target remote :5039\n");
+ script_str.push_str(&format!(
+ "set solib-search-path \
+ ./{}/stage2/lib/rustlib/{}/lib/\n",
+ self.config.host, self.config.target
+ ));
+ for line in &breakpoint_lines {
+ script_str.push_str(
+ &format!(
+ "break {:?}:{}\n",
+ self.testpaths.file.file_name().unwrap().to_string_lossy(),
+ *line
+ )[..],
+ );
+ }
+ script_str.push_str(&cmds);
+ script_str.push_str("\nquit\n");
+
+ debug!("script_str = {}", script_str);
+ self.dump_output_file(&script_str, "debugger.script");
+
+ let adb_path = &self.config.adb_path;
+
+ Command::new(adb_path)
+ .arg("push")
+ .arg(&exe_file)
+ .arg(&self.config.adb_test_dir)
+ .status()
+ .unwrap_or_else(|_| panic!("failed to exec `{:?}`", adb_path));
+
+ Command::new(adb_path)
+ .args(&["forward", "tcp:5039", "tcp:5039"])
+ .status()
+ .unwrap_or_else(|_| panic!("failed to exec `{:?}`", adb_path));
+
+ let adb_arg = format!(
+ "export LD_LIBRARY_PATH={}; \
+ gdbserver{} :5039 {}/{}",
+ self.config.adb_test_dir.clone(),
+ if self.config.target.contains("aarch64") { "64" } else { "" },
+ self.config.adb_test_dir.clone(),
+ exe_file.file_name().unwrap().to_str().unwrap()
+ );
+
+ debug!("adb arg: {}", adb_arg);
+ let mut adb = Command::new(adb_path)
+ .args(&["shell", &adb_arg])
+ .stdout(Stdio::piped())
+ .stderr(Stdio::inherit())
+ .spawn()
+ .unwrap_or_else(|_| panic!("failed to exec `{:?}`", adb_path));
+
+ // Wait for the gdbserver to print out "Listening on port ..."
+ // at which point we know that it's started and then we can
+ // execute the debugger below.
+ let mut stdout = BufReader::new(adb.stdout.take().unwrap());
+ let mut line = String::new();
+ loop {
+ line.truncate(0);
+ stdout.read_line(&mut line).unwrap();
+ if line.starts_with("Listening on port 5039") {
+ break;
+ }
+ }
+ drop(stdout);
+
+ let mut debugger_script = OsString::from("-command=");
+ debugger_script.push(self.make_out_name("debugger.script"));
+ let debugger_opts: &[&OsStr] =
+ &["-quiet".as_ref(), "-batch".as_ref(), "-nx".as_ref(), &debugger_script];
+
+ let gdb_path = self.config.gdb.as_ref().unwrap();
+ let Output { status, stdout, stderr } = Command::new(&gdb_path)
+ .args(debugger_opts)
+ .output()
+ .unwrap_or_else(|_| panic!("failed to exec `{:?}`", gdb_path));
+ let cmdline = {
+ let mut gdb = Command::new(&format!("{}-gdb", self.config.target));
+ gdb.args(debugger_opts);
+ let cmdline = self.make_cmdline(&gdb, "");
+ logv(self.config, format!("executing {}", cmdline));
+ cmdline
+ };
+
+ debugger_run_result = ProcRes {
+ status,
+ stdout: String::from_utf8(stdout).unwrap(),
+ stderr: String::from_utf8(stderr).unwrap(),
+ cmdline,
+ };
+ if adb.kill().is_err() {
+ println!("Adb process is already finished.");
+ }
+ } else {
+ let rust_src_root =
+ self.config.find_rust_src_root().expect("Could not find Rust source root");
+ let rust_pp_module_rel_path = Path::new("./src/etc");
+ let rust_pp_module_abs_path =
+ rust_src_root.join(rust_pp_module_rel_path).to_str().unwrap().to_owned();
+ // write debugger script
+ let mut script_str = String::with_capacity(2048);
+ script_str.push_str(&format!("set charset {}\n", Self::charset()));
+ script_str.push_str("show version\n");
+
+ match self.config.gdb_version {
+ Some(version) => {
+ println!("NOTE: compiletest thinks it is using GDB version {}", version);
+
+ if version > extract_gdb_version("7.4").unwrap() {
+ // Add the directory containing the pretty printers to
+ // GDB's script auto loading safe path
+ script_str.push_str(&format!(
+ "add-auto-load-safe-path {}\n",
+ rust_pp_module_abs_path.replace(r"\", r"\\")
+ ));
+
+ let output_base_dir = self.output_base_dir().to_str().unwrap().to_owned();
+
+ // Add the directory containing the output binary to
+ // include embedded pretty printers to GDB's script
+ // auto loading safe path
+ script_str.push_str(&format!(
+ "add-auto-load-safe-path {}\n",
+ output_base_dir.replace(r"\", r"\\")
+ ));
+ }
+ }
+ _ => {
+ println!(
+ "NOTE: compiletest does not know which version of \
+ GDB it is using"
+ );
+ }
+ }
+
+ // The following line actually doesn't have to do anything with
+ // pretty printing, it just tells GDB to print values on one line:
+ script_str.push_str("set print pretty off\n");
+
+ // Add the pretty printer directory to GDB's source-file search path
+ script_str
+ .push_str(&format!("directory {}\n", rust_pp_module_abs_path.replace(r"\", r"\\")));
+
+ // Load the target executable
+ script_str
+ .push_str(&format!("file {}\n", exe_file.to_str().unwrap().replace(r"\", r"\\")));
+
+ // Force GDB to print values in the Rust format.
+ if self.config.gdb_native_rust {
+ script_str.push_str("set language rust\n");
+ }
+
+ // Add line breakpoints
+ for line in &breakpoint_lines {
+ script_str.push_str(&format!(
+ "break '{}':{}\n",
+ self.testpaths.file.file_name().unwrap().to_string_lossy(),
+ *line
+ ));
+ }
+
+ script_str.push_str(&cmds);
+ script_str.push_str("\nquit\n");
+
+ debug!("script_str = {}", script_str);
+ self.dump_output_file(&script_str, "debugger.script");
+
+ let mut debugger_script = OsString::from("-command=");
+ debugger_script.push(self.make_out_name("debugger.script"));
+
+ let debugger_opts: &[&OsStr] =
+ &["-quiet".as_ref(), "-batch".as_ref(), "-nx".as_ref(), &debugger_script];
+
+ let mut gdb = Command::new(self.config.gdb.as_ref().unwrap());
+ gdb.args(debugger_opts).env("PYTHONPATH", rust_pp_module_abs_path);
+
+ debugger_run_result =
+ self.compose_and_run(gdb, self.config.run_lib_path.to_str().unwrap(), None, None);
+ }
+
+ if !debugger_run_result.status.success() {
+ self.fatal_proc_rec("gdb failed to execute", &debugger_run_result);
+ }
+
+ if let Err(e) = check_debugger_output(&debugger_run_result, &check_lines) {
+ self.fatal_proc_rec(&e, &debugger_run_result);
+ }
+ }
+
+ fn run_debuginfo_lldb_test(&self) {
+ if self.config.lldb_python_dir.is_none() {
+ self.fatal("Can't run LLDB test because LLDB's python path is not set.");
+ }
+
+ let config = Config {
+ target_rustcflags: self.cleanup_debug_info_options(&self.config.target_rustcflags),
+ host_rustcflags: self.cleanup_debug_info_options(&self.config.host_rustcflags),
+ ..self.config.clone()
+ };
+
+ let test_cx = TestCx { config: &config, ..*self };
+
+ test_cx.run_debuginfo_lldb_test_no_opt();
+ }
+
+ fn run_debuginfo_lldb_test_no_opt(&self) {
+ // compile test file (it should have 'compile-flags:-g' in the header)
+ let should_run = self.run_if_enabled();
+ let compile_result = self.compile_test(should_run, EmitMetadata::No);
+ if !compile_result.status.success() {
+ self.fatal_proc_rec("compilation failed!", &compile_result);
+ }
+ if let WillExecute::Disabled = should_run {
+ return;
+ }
+
+ let exe_file = self.make_exe_name();
+
+ match self.config.lldb_version {
+ Some(ref version) => {
+ println!("NOTE: compiletest thinks it is using LLDB version {}", version);
+ }
+ _ => {
+ println!(
+ "NOTE: compiletest does not know which version of \
+ LLDB it is using"
+ );
+ }
+ }
+
+ let prefixes = if self.config.lldb_native_rust {
+ static PREFIXES: &[&str] = &["lldb", "lldbr"];
+ println!("NOTE: compiletest thinks it is using LLDB with native rust support");
+ PREFIXES
+ } else {
+ static PREFIXES: &[&str] = &["lldb", "lldbg"];
+ println!("NOTE: compiletest thinks it is using LLDB without native rust support");
+ PREFIXES
+ };
+
+ // Parse debugger commands etc from test files
+ let DebuggerCommands { commands, check_lines, breakpoint_lines, .. } =
+ match DebuggerCommands::parse_from(
+ &self.testpaths.file,
+ self.config,
+ prefixes,
+ self.revision,
+ ) {
+ Ok(cmds) => cmds,
+ Err(e) => self.fatal(&e),
+ };
+
+ // Write debugger script:
+ // We don't want to hang when calling `quit` while the process is still running
+ let mut script_str = String::from("settings set auto-confirm true\n");
+
+ // Make LLDB emit its version, so we have it documented in the test output
+ script_str.push_str("version\n");
+
+ // Switch LLDB into "Rust mode"
+ let rust_src_root =
+ self.config.find_rust_src_root().expect("Could not find Rust source root");
+ let rust_pp_module_rel_path = Path::new("./src/etc/lldb_lookup.py");
+ let rust_pp_module_abs_path =
+ rust_src_root.join(rust_pp_module_rel_path).to_str().unwrap().to_owned();
+
+ let rust_type_regexes = vec![
+ "^(alloc::([a-z_]+::)+)String$",
+ "^&(mut )?str$",
+ "^&(mut )?\\[.+\\]$",
+ "^(std::ffi::([a-z_]+::)+)OsString$",
+ "^(alloc::([a-z_]+::)+)Vec<.+>$",
+ "^(alloc::([a-z_]+::)+)VecDeque<.+>$",
+ "^(alloc::([a-z_]+::)+)BTreeSet<.+>$",
+ "^(alloc::([a-z_]+::)+)BTreeMap<.+>$",
+ "^(std::collections::([a-z_]+::)+)HashMap<.+>$",
+ "^(std::collections::([a-z_]+::)+)HashSet<.+>$",
+ "^(alloc::([a-z_]+::)+)Rc<.+>$",
+ "^(alloc::([a-z_]+::)+)Arc<.+>$",
+ "^(core::([a-z_]+::)+)Cell<.+>$",
+ "^(core::([a-z_]+::)+)Ref<.+>$",
+ "^(core::([a-z_]+::)+)RefMut<.+>$",
+ "^(core::([a-z_]+::)+)RefCell<.+>$",
+ ];
+
+ script_str
+ .push_str(&format!("command script import {}\n", &rust_pp_module_abs_path[..])[..]);
+ script_str.push_str("type synthetic add -l lldb_lookup.synthetic_lookup -x '.*' ");
+ script_str.push_str("--category Rust\n");
+ for type_regex in rust_type_regexes {
+ script_str.push_str("type summary add -F lldb_lookup.summary_lookup -e -x -h ");
+ script_str.push_str(&format!("'{}' ", type_regex));
+ script_str.push_str("--category Rust\n");
+ }
+ script_str.push_str("type category enable Rust\n");
+
+ // Set breakpoints on every line that contains the string "#break"
+ let source_file_name = self.testpaths.file.file_name().unwrap().to_string_lossy();
+ for line in &breakpoint_lines {
+ script_str.push_str(&format!(
+ "breakpoint set --file '{}' --line {}\n",
+ source_file_name, line
+ ));
+ }
+
+ // Append the other commands
+ for line in &commands {
+ script_str.push_str(line);
+ script_str.push_str("\n");
+ }
+
+ // Finally, quit the debugger
+ script_str.push_str("\nquit\n");
+
+ // Write the script into a file
+ debug!("script_str = {}", script_str);
+ self.dump_output_file(&script_str, "debugger.script");
+ let debugger_script = self.make_out_name("debugger.script");
+
+ // Let LLDB execute the script via lldb_batchmode.py
+ let debugger_run_result = self.run_lldb(&exe_file, &debugger_script, &rust_src_root);
+
+ if !debugger_run_result.status.success() {
+ self.fatal_proc_rec("Error while running LLDB", &debugger_run_result);
+ }
+
+ if let Err(e) = check_debugger_output(&debugger_run_result, &check_lines) {
+ self.fatal_proc_rec(&e, &debugger_run_result);
+ }
+ }
+
+ fn run_lldb(
+ &self,
+ test_executable: &Path,
+ debugger_script: &Path,
+ rust_src_root: &Path,
+ ) -> ProcRes {
+ // Prepare the lldb_batchmode which executes the debugger script
+ let lldb_script_path = rust_src_root.join("src/etc/lldb_batchmode.py");
+ self.cmd2procres(
+ Command::new(&self.config.python)
+ .arg(&lldb_script_path)
+ .arg(test_executable)
+ .arg(debugger_script)
+ .env("PYTHONUNBUFFERED", "1") // Help debugging #78665
+ .env("PYTHONPATH", self.config.lldb_python_dir.as_ref().unwrap()),
+ )
+ }
+
+ fn cmd2procres(&self, cmd: &mut Command) -> ProcRes {
+ let (status, out, err) = match cmd.output() {
+ Ok(Output { status, stdout, stderr }) => {
+ (status, String::from_utf8(stdout).unwrap(), String::from_utf8(stderr).unwrap())
+ }
+ Err(e) => self.fatal(&format!(
+ "Failed to setup Python process for \
+ LLDB script: {}",
+ e
+ )),
+ };
+
+ self.dump_output(&out, &err);
+ ProcRes { status, stdout: out, stderr: err, cmdline: format!("{:?}", cmd) }
+ }
+
+ fn cleanup_debug_info_options(&self, options: &Option<String>) -> Option<String> {
+ if options.is_none() {
+ return None;
+ }
+
+ // Remove options that are either unwanted (-O) or may lead to duplicates due to RUSTFLAGS.
+ let options_to_remove = ["-O".to_owned(), "-g".to_owned(), "--debuginfo".to_owned()];
+ let new_options = self
+ .split_maybe_args(options)
+ .into_iter()
+ .filter(|x| !options_to_remove.contains(x))
+ .collect::<Vec<String>>();
+
+ Some(new_options.join(" "))
+ }
+
+ fn maybe_add_external_args(&self, cmd: &mut Command, args: Vec<String>) {
+ // Filter out the arguments that should not be added by runtest here.
+ //
+ // Notable use-cases are: do not add our optimisation flag if
+ // `compile-flags: -Copt-level=x` and similar for debug-info level as well.
+ const OPT_FLAGS: &[&str] = &["-O", "-Copt-level=", /*-C<space>*/ "opt-level="];
+ const DEBUG_FLAGS: &[&str] = &["-g", "-Cdebuginfo=", /*-C<space>*/ "debuginfo="];
+
+ // FIXME: ideally we would "just" check the `cmd` itself, but it does not allow inspecting
+ // its arguments. They need to be collected separately. For now I cannot be bothered to
+ // implement this the "right" way.
+ let have_opt_flag =
+ self.props.compile_flags.iter().any(|arg| OPT_FLAGS.iter().any(|f| arg.starts_with(f)));
+ let have_debug_flag = self
+ .props
+ .compile_flags
+ .iter()
+ .any(|arg| DEBUG_FLAGS.iter().any(|f| arg.starts_with(f)));
+
+ for arg in args {
+ if OPT_FLAGS.iter().any(|f| arg.starts_with(f)) && have_opt_flag {
+ continue;
+ }
+ if DEBUG_FLAGS.iter().any(|f| arg.starts_with(f)) && have_debug_flag {
+ continue;
+ }
+ cmd.arg(arg);
+ }
+ }
+
+ fn check_all_error_patterns(
+ &self,
+ output_to_check: &str,
+ proc_res: &ProcRes,
+ pm: Option<PassMode>,
+ ) {
+ if self.props.error_patterns.is_empty() && self.props.regex_error_patterns.is_empty() {
+ if pm.is_some() {
+ // FIXME(#65865)
+ return;
+ } else {
+ self.fatal(&format!(
+ "no error pattern specified in {:?}",
+ self.testpaths.file.display()
+ ));
+ }
+ }
+
+ let mut missing_patterns: Vec<String> = Vec::new();
+
+ self.check_error_patterns(output_to_check, &mut missing_patterns);
+ self.check_regex_error_patterns(output_to_check, proc_res, &mut missing_patterns);
+
+ if missing_patterns.is_empty() {
+ return;
+ }
+
+ if missing_patterns.len() == 1 {
+ self.fatal_proc_rec(
+ &format!("error pattern '{}' not found!", missing_patterns[0]),
+ proc_res,
+ );
+ } else {
+ for pattern in missing_patterns {
+ self.error(&format!("error pattern '{}' not found!", pattern));
+ }
+ self.fatal_proc_rec("multiple error patterns not found", proc_res);
+ }
+ }
+
+ fn check_error_patterns(&self, output_to_check: &str, missing_patterns: &mut Vec<String>) {
+ debug!("check_error_patterns");
+ for pattern in &self.props.error_patterns {
+ if output_to_check.contains(pattern.trim()) {
+ debug!("found error pattern {}", pattern);
+ } else {
+ missing_patterns.push(pattern.to_string());
+ }
+ }
+ }
+
+ fn check_regex_error_patterns(
+ &self,
+ output_to_check: &str,
+ proc_res: &ProcRes,
+ missing_patterns: &mut Vec<String>,
+ ) {
+ debug!("check_regex_error_patterns");
+
+ for pattern in &self.props.regex_error_patterns {
+ let pattern = pattern.trim();
+ let re = match Regex::new(pattern) {
+ Ok(re) => re,
+ Err(err) => {
+ self.fatal_proc_rec(
+ &format!("invalid regex error pattern '{}': {:?}", pattern, err),
+ proc_res,
+ );
+ }
+ };
+ if re.is_match(output_to_check) {
+ debug!("found regex error pattern {}", pattern);
+ } else {
+ missing_patterns.push(pattern.to_string());
+ }
+ }
+ }
+
+ fn check_no_compiler_crash(&self, proc_res: &ProcRes, should_ice: bool) {
+ match proc_res.status.code() {
+ Some(101) if !should_ice => {
+ self.fatal_proc_rec("compiler encountered internal error", proc_res)
+ }
+ None => self.fatal_proc_rec("compiler terminated by signal", proc_res),
+ _ => (),
+ }
+ }
+
+ fn check_forbid_output(&self, output_to_check: &str, proc_res: &ProcRes) {
+ for pat in &self.props.forbid_output {
+ if output_to_check.contains(pat) {
+ self.fatal_proc_rec("forbidden pattern found in compiler output", proc_res);
+ }
+ }
+ }
+
+ fn check_expected_errors(&self, expected_errors: Vec<errors::Error>, proc_res: &ProcRes) {
+ debug!(
+ "check_expected_errors: expected_errors={:?} proc_res.status={:?}",
+ expected_errors, proc_res.status
+ );
+ if proc_res.status.success()
+ && expected_errors.iter().any(|x| x.kind == Some(ErrorKind::Error))
+ {
+ self.fatal_proc_rec("process did not return an error status", proc_res);
+ }
+
+ if self.props.known_bug {
+ if !expected_errors.is_empty() {
+ self.fatal_proc_rec(
+ "`known_bug` tests should not have an expected errors",
+ proc_res,
+ );
+ }
+ return;
+ }
+
+ // On Windows, keep all '\' path separators to match the paths reported in the JSON output
+ // from the compiler
+ let os_file_name = self.testpaths.file.display().to_string();
+
+ // on windows, translate all '\' path separators to '/'
+ let file_name = format!("{}", self.testpaths.file.display()).replace(r"\", "/");
+
+ // If the testcase being checked contains at least one expected "help"
+ // message, then we'll ensure that all "help" messages are expected.
+ // Otherwise, all "help" messages reported by the compiler will be ignored.
+ // This logic also applies to "note" messages.
+ let expect_help = expected_errors.iter().any(|ee| ee.kind == Some(ErrorKind::Help));
+ let expect_note = expected_errors.iter().any(|ee| ee.kind == Some(ErrorKind::Note));
+
+ // Parse the JSON output from the compiler and extract out the messages.
+ let actual_errors = json::parse_output(&os_file_name, &proc_res.stderr, proc_res);
+ let mut unexpected = Vec::new();
+ let mut found = vec![false; expected_errors.len()];
+ for actual_error in &actual_errors {
+ let opt_index =
+ expected_errors.iter().enumerate().position(|(index, expected_error)| {
+ !found[index]
+ && actual_error.line_num == expected_error.line_num
+ && (expected_error.kind.is_none()
+ || actual_error.kind == expected_error.kind)
+ && actual_error.msg.contains(&expected_error.msg)
+ });
+
+ match opt_index {
+ Some(index) => {
+ // found a match, everybody is happy
+ assert!(!found[index]);
+ found[index] = true;
+ }
+
+ None => {
+ // If the test is a known bug, don't require that the error is annotated
+ if self.is_unexpected_compiler_message(actual_error, expect_help, expect_note) {
+ self.error(&format!(
+ "{}:{}: unexpected {}: '{}'",
+ file_name,
+ actual_error.line_num,
+ actual_error
+ .kind
+ .as_ref()
+ .map_or(String::from("message"), |k| k.to_string()),
+ actual_error.msg
+ ));
+ unexpected.push(actual_error);
+ }
+ }
+ }
+ }
+
+ let mut not_found = Vec::new();
+ // anything not yet found is a problem
+ for (index, expected_error) in expected_errors.iter().enumerate() {
+ if !found[index] {
+ self.error(&format!(
+ "{}:{}: expected {} not found: {}",
+ file_name,
+ expected_error.line_num,
+ expected_error.kind.as_ref().map_or("message".into(), |k| k.to_string()),
+ expected_error.msg
+ ));
+ not_found.push(expected_error);
+ }
+ }
+
+ if !unexpected.is_empty() || !not_found.is_empty() {
+ self.error(&format!(
+ "{} unexpected errors found, {} expected errors not found",
+ unexpected.len(),
+ not_found.len()
+ ));
+ println!("status: {}\ncommand: {}", proc_res.status, proc_res.cmdline);
+ if !unexpected.is_empty() {
+ println!("unexpected errors (from JSON output): {:#?}\n", unexpected);
+ }
+ if !not_found.is_empty() {
+ println!("not found errors (from test file): {:#?}\n", not_found);
+ }
+ panic!();
+ }
+ }
+
+ /// Returns `true` if we should report an error about `actual_error`,
+ /// which did not match any of the expected error. We always require
+ /// errors/warnings to be explicitly listed, but only require
+ /// helps/notes if there are explicit helps/notes given.
+ fn is_unexpected_compiler_message(
+ &self,
+ actual_error: &Error,
+ expect_help: bool,
+ expect_note: bool,
+ ) -> bool {
+ match actual_error.kind {
+ Some(ErrorKind::Help) => expect_help,
+ Some(ErrorKind::Note) => expect_note,
+ Some(ErrorKind::Error) | Some(ErrorKind::Warning) => true,
+ Some(ErrorKind::Suggestion) | None => false,
+ }
+ }
+
+ fn should_emit_metadata(&self, pm: Option<PassMode>) -> EmitMetadata {
+ match (pm, self.props.fail_mode, self.config.mode) {
+ (Some(PassMode::Check), ..) | (_, Some(FailMode::Check), Ui) => EmitMetadata::Yes,
+ _ => EmitMetadata::No,
+ }
+ }
+
+ fn compile_test(&self, will_execute: WillExecute, emit_metadata: EmitMetadata) -> ProcRes {
+ self.compile_test_general(will_execute, emit_metadata, self.props.local_pass_mode())
+ }
+
+ fn compile_test_general(
+ &self,
+ will_execute: WillExecute,
+ emit_metadata: EmitMetadata,
+ local_pm: Option<PassMode>,
+ ) -> ProcRes {
+ // Only use `make_exe_name` when the test ends up being executed.
+ let output_file = match will_execute {
+ WillExecute::Yes => TargetLocation::ThisFile(self.make_exe_name()),
+ WillExecute::No | WillExecute::Disabled => {
+ TargetLocation::ThisDirectory(self.output_base_dir())
+ }
+ };
+
+ let allow_unused = match self.config.mode {
+ Ui => {
+ // UI tests tend to have tons of unused code as
+ // it's just testing various pieces of the compile, but we don't
+ // want to actually assert warnings about all this code. Instead
+ // let's just ignore unused code warnings by defaults and tests
+ // can turn it back on if needed.
+ if !self.is_rustdoc()
+ // Note that we use the local pass mode here as we don't want
+ // to set unused to allow if we've overridden the pass mode
+ // via command line flags.
+ && local_pm != Some(PassMode::Run)
+ {
+ AllowUnused::Yes
+ } else {
+ AllowUnused::No
+ }
+ }
+ _ => AllowUnused::No,
+ };
+
+ let mut rustc =
+ self.make_compile_args(&self.testpaths.file, output_file, emit_metadata, allow_unused);
+
+ rustc.arg("-L").arg(&self.aux_output_dir_name());
+
+ self.compose_and_run_compiler(rustc, None)
+ }
+
+ fn document(&self, out_dir: &Path) -> ProcRes {
+ if self.props.build_aux_docs {
+ for rel_ab in &self.props.aux_builds {
+ let aux_testpaths = self.compute_aux_test_paths(rel_ab);
+ let aux_props =
+ self.props.from_aux_file(&aux_testpaths.file, self.revision, self.config);
+ let aux_cx = TestCx {
+ config: self.config,
+ props: &aux_props,
+ testpaths: &aux_testpaths,
+ revision: self.revision,
+ };
+ // Create the directory for the stdout/stderr files.
+ create_dir_all(aux_cx.output_base_dir()).unwrap();
+ let auxres = aux_cx.document(out_dir);
+ if !auxres.status.success() {
+ return auxres;
+ }
+ }
+ }
+
+ let aux_dir = self.aux_output_dir_name();
+
+ let rustdoc_path = self.config.rustdoc_path.as_ref().expect("--rustdoc-path not passed");
+ let mut rustdoc = Command::new(rustdoc_path);
+
+ rustdoc
+ .arg("-L")
+ .arg(self.config.run_lib_path.to_str().unwrap())
+ .arg("-L")
+ .arg(aux_dir)
+ .arg("-o")
+ .arg(out_dir)
+ .arg("--deny")
+ .arg("warnings")
+ .arg(&self.testpaths.file)
+ .args(&self.props.compile_flags);
+
+ if self.config.mode == RustdocJson {
+ rustdoc.arg("--output-format").arg("json").arg("-Zunstable-options");
+ }
+
+ if let Some(ref linker) = self.config.linker {
+ rustdoc.arg(format!("-Clinker={}", linker));
+ }
+
+ self.compose_and_run_compiler(rustdoc, None)
+ }
+
+ fn exec_compiled_test(&self) -> ProcRes {
+ let env = &self.props.exec_env;
+
+ let proc_res = match &*self.config.target {
+ // This is pretty similar to below, we're transforming:
+ //
+ // program arg1 arg2
+ //
+ // into
+ //
+ // remote-test-client run program 2 support-lib.so support-lib2.so arg1 arg2
+ //
+ // The test-client program will upload `program` to the emulator
+ // along with all other support libraries listed (in this case
+ // `support-lib.so` and `support-lib2.so`. It will then execute
+ // the program on the emulator with the arguments specified
+ // (in the environment we give the process) and then report back
+ // the same result.
+ _ if self.config.remote_test_client.is_some() => {
+ let aux_dir = self.aux_output_dir_name();
+ let ProcArgs { prog, args } = self.make_run_args();
+ let mut support_libs = Vec::new();
+ if let Ok(entries) = aux_dir.read_dir() {
+ for entry in entries {
+ let entry = entry.unwrap();
+ if !entry.path().is_file() {
+ continue;
+ }
+ support_libs.push(entry.path());
+ }
+ }
+ let mut test_client =
+ Command::new(self.config.remote_test_client.as_ref().unwrap());
+ test_client
+ .args(&["run", &support_libs.len().to_string(), &prog])
+ .args(support_libs)
+ .args(args)
+ .envs(env.clone());
+ self.compose_and_run(
+ test_client,
+ self.config.run_lib_path.to_str().unwrap(),
+ Some(aux_dir.to_str().unwrap()),
+ None,
+ )
+ }
+ _ if self.config.target.contains("vxworks") => {
+ let aux_dir = self.aux_output_dir_name();
+ let ProcArgs { prog, args } = self.make_run_args();
+ let mut wr_run = Command::new("wr-run");
+ wr_run.args(&[&prog]).args(args).envs(env.clone());
+ self.compose_and_run(
+ wr_run,
+ self.config.run_lib_path.to_str().unwrap(),
+ Some(aux_dir.to_str().unwrap()),
+ None,
+ )
+ }
+ _ => {
+ let aux_dir = self.aux_output_dir_name();
+ let ProcArgs { prog, args } = self.make_run_args();
+ let mut program = Command::new(&prog);
+ program.args(args).current_dir(&self.output_base_dir()).envs(env.clone());
+ self.compose_and_run(
+ program,
+ self.config.run_lib_path.to_str().unwrap(),
+ Some(aux_dir.to_str().unwrap()),
+ None,
+ )
+ }
+ };
+
+ if proc_res.status.success() {
+ // delete the executable after running it to save space.
+ // it is ok if the deletion failed.
+ let _ = fs::remove_file(self.make_exe_name());
+ }
+
+ proc_res
+ }
+
+ /// For each `aux-build: foo/bar` annotation, we check to find the
+ /// file in an `auxiliary` directory relative to the test itself.
+ fn compute_aux_test_paths(&self, rel_ab: &str) -> TestPaths {
+ let test_ab = self
+ .testpaths
+ .file
+ .parent()
+ .expect("test file path has no parent")
+ .join("auxiliary")
+ .join(rel_ab);
+ if !test_ab.exists() {
+ self.fatal(&format!("aux-build `{}` source not found", test_ab.display()))
+ }
+
+ TestPaths {
+ file: test_ab,
+ relative_dir: self
+ .testpaths
+ .relative_dir
+ .join(self.output_testname_unique())
+ .join("auxiliary")
+ .join(rel_ab)
+ .parent()
+ .expect("aux-build path has no parent")
+ .to_path_buf(),
+ }
+ }
+
+ fn is_vxworks_pure_static(&self) -> bool {
+ if self.config.target.contains("vxworks") {
+ match env::var("RUST_VXWORKS_TEST_DYLINK") {
+ Ok(s) => s != "1",
+ _ => true,
+ }
+ } else {
+ false
+ }
+ }
+
+ fn is_vxworks_pure_dynamic(&self) -> bool {
+ self.config.target.contains("vxworks") && !self.is_vxworks_pure_static()
+ }
+
+ fn build_all_auxiliary(&self, rustc: &mut Command) -> PathBuf {
+ let aux_dir = self.aux_output_dir_name();
+
+ if !self.props.aux_builds.is_empty() {
+ let _ = fs::remove_dir_all(&aux_dir);
+ create_dir_all(&aux_dir).unwrap();
+ }
+
+ for rel_ab in &self.props.aux_builds {
+ self.build_auxiliary(rel_ab, &aux_dir);
+ }
+
+ for (aux_name, aux_path) in &self.props.aux_crates {
+ let is_dylib = self.build_auxiliary(&aux_path, &aux_dir);
+ let lib_name =
+ get_lib_name(&aux_path.trim_end_matches(".rs").replace('-', "_"), is_dylib);
+ rustc.arg("--extern").arg(format!("{}={}/{}", aux_name, aux_dir.display(), lib_name));
+ }
+
+ aux_dir
+ }
+
+ fn compose_and_run_compiler(&self, mut rustc: Command, input: Option<String>) -> ProcRes {
+ let aux_dir = self.build_all_auxiliary(&mut rustc);
+ self.props.unset_rustc_env.clone().iter().fold(&mut rustc, |rustc, v| rustc.env_remove(v));
+ rustc.envs(self.props.rustc_env.clone());
+ self.compose_and_run(
+ rustc,
+ self.config.compile_lib_path.to_str().unwrap(),
+ Some(aux_dir.to_str().unwrap()),
+ input,
+ )
+ }
+
+ /// Builds an aux dependency.
+ ///
+ /// Returns whether or not it is a dylib.
+ fn build_auxiliary(&self, source_path: &str, aux_dir: &Path) -> bool {
+ let aux_testpaths = self.compute_aux_test_paths(source_path);
+ let aux_props = self.props.from_aux_file(&aux_testpaths.file, self.revision, self.config);
+ let aux_output = TargetLocation::ThisDirectory(self.aux_output_dir_name());
+ let aux_cx = TestCx {
+ config: self.config,
+ props: &aux_props,
+ testpaths: &aux_testpaths,
+ revision: self.revision,
+ };
+ // Create the directory for the stdout/stderr files.
+ create_dir_all(aux_cx.output_base_dir()).unwrap();
+ let input_file = &aux_testpaths.file;
+ let mut aux_rustc =
+ aux_cx.make_compile_args(input_file, aux_output, EmitMetadata::No, AllowUnused::No);
+
+ for key in &aux_props.unset_rustc_env {
+ aux_rustc.env_remove(key);
+ }
+ aux_rustc.envs(aux_props.rustc_env.clone());
+
+ let (dylib, crate_type) = if aux_props.no_prefer_dynamic {
+ (true, None)
+ } else if self.config.target.contains("emscripten")
+ || (self.config.target.contains("musl")
+ && !aux_props.force_host
+ && !self.config.host.contains("musl"))
+ || self.config.target.contains("wasm32")
+ || self.config.target.contains("nvptx")
+ || self.is_vxworks_pure_static()
+ || self.config.target.contains("sgx")
+ || self.config.target.contains("bpf")
+ {
+ // We primarily compile all auxiliary libraries as dynamic libraries
+ // to avoid code size bloat and large binaries as much as possible
+ // for the test suite (otherwise including libstd statically in all
+ // executables takes up quite a bit of space).
+ //
+ // For targets like MUSL or Emscripten, however, there is no support for
+ // dynamic libraries so we just go back to building a normal library. Note,
+ // however, that for MUSL if the library is built with `force_host` then
+ // it's ok to be a dylib as the host should always support dylibs.
+ (false, Some("lib"))
+ } else {
+ (true, Some("dylib"))
+ };
+
+ if let Some(crate_type) = crate_type {
+ aux_rustc.args(&["--crate-type", crate_type]);
+ }
+
+ aux_rustc.arg("-L").arg(&aux_dir);
+
+ let auxres = aux_cx.compose_and_run(
+ aux_rustc,
+ aux_cx.config.compile_lib_path.to_str().unwrap(),
+ Some(aux_dir.to_str().unwrap()),
+ None,
+ );
+ if !auxres.status.success() {
+ self.fatal_proc_rec(
+ &format!(
+ "auxiliary build of {:?} failed to compile: ",
+ aux_testpaths.file.display()
+ ),
+ &auxres,
+ );
+ }
+ dylib
+ }
+
+ fn read2_abbreviated(&self, child: Child) -> Output {
+ let mut filter_paths_from_len = Vec::new();
+ let mut add_path = |path: &Path| {
+ let path = path.display().to_string();
+ let windows = path.replace("\\", "\\\\");
+ if windows != path {
+ filter_paths_from_len.push(windows);
+ }
+ filter_paths_from_len.push(path);
+ };
+
+ // List of paths that will not be measured when determining whether the output is larger
+ // than the output truncation threshold.
+ //
+ // Note: avoid adding a subdirectory of an already filtered directory here, otherwise the
+ // same slice of text will be double counted and the truncation might not happen.
+ add_path(&self.config.src_base);
+ add_path(&self.config.build_base);
+
+ read2_abbreviated(child, &filter_paths_from_len).expect("failed to read output")
+ }
+
+ fn compose_and_run(
+ &self,
+ mut command: Command,
+ lib_path: &str,
+ aux_path: Option<&str>,
+ input: Option<String>,
+ ) -> ProcRes {
+ let cmdline = {
+ let cmdline = self.make_cmdline(&command, lib_path);
+ logv(self.config, format!("executing {}", cmdline));
+ cmdline
+ };
+
+ command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::piped());
+
+ // Need to be sure to put both the lib_path and the aux path in the dylib
+ // search path for the child.
+ let mut path =
+ env::split_paths(&env::var_os(dylib_env_var()).unwrap_or_default()).collect::<Vec<_>>();
+ if let Some(p) = aux_path {
+ path.insert(0, PathBuf::from(p))
+ }
+ path.insert(0, PathBuf::from(lib_path));
+
+ // Add the new dylib search path var
+ let newpath = env::join_paths(&path).unwrap();
+ command.env(dylib_env_var(), newpath);
+
+ let mut child = disable_error_reporting(|| command.spawn())
+ .unwrap_or_else(|_| panic!("failed to exec `{:?}`", &command));
+ if let Some(input) = input {
+ child.stdin.as_mut().unwrap().write_all(input.as_bytes()).unwrap();
+ }
+
+ let Output { status, stdout, stderr } = self.read2_abbreviated(child);
+
+ let result = ProcRes {
+ status,
+ stdout: String::from_utf8_lossy(&stdout).into_owned(),
+ stderr: String::from_utf8_lossy(&stderr).into_owned(),
+ cmdline,
+ };
+
+ self.dump_output(&result.stdout, &result.stderr);
+
+ result
+ }
+
+ fn is_rustdoc(&self) -> bool {
+ self.config.src_base.ends_with("rustdoc-ui")
+ || self.config.src_base.ends_with("rustdoc-js")
+ || self.config.src_base.ends_with("rustdoc-json")
+ }
+
+ fn make_compile_args(
+ &self,
+ input_file: &Path,
+ output_file: TargetLocation,
+ emit_metadata: EmitMetadata,
+ allow_unused: AllowUnused,
+ ) -> Command {
+ let is_aux = input_file.components().map(|c| c.as_os_str()).any(|c| c == "auxiliary");
+ let is_rustdoc = self.is_rustdoc() && !is_aux;
+ let mut rustc = if !is_rustdoc {
+ Command::new(&self.config.rustc_path)
+ } else {
+ Command::new(&self.config.rustdoc_path.clone().expect("no rustdoc built yet"))
+ };
+ rustc.arg(input_file);
+
+ // Use a single thread for efficiency and a deterministic error message order
+ rustc.arg("-Zthreads=1");
+
+ // Optionally prevent default --target if specified in test compile-flags.
+ let custom_target = self.props.compile_flags.iter().any(|x| x.starts_with("--target"));
+
+ if !custom_target {
+ let target =
+ if self.props.force_host { &*self.config.host } else { &*self.config.target };
+
+ rustc.arg(&format!("--target={}", target));
+ }
+ self.set_revision_flags(&mut rustc);
+
+ if !is_rustdoc {
+ if let Some(ref incremental_dir) = self.props.incremental_dir {
+ rustc.args(&["-C", &format!("incremental={}", incremental_dir.display())]);
+ rustc.args(&["-Z", "incremental-verify-ich"]);
+ }
+
+ if self.config.mode == CodegenUnits {
+ rustc.args(&["-Z", "human_readable_cgu_names"]);
+ }
+ }
+
+ if self.config.optimize_tests && !is_rustdoc {
+ match self.config.mode {
+ Ui => {
+ // If optimize-tests is true we still only want to optimize tests that actually get
+ // executed and that don't specify their own optimization levels.
+ // Note: aux libs don't have a pass-mode, so they won't get optimized
+ // unless compile-flags are set in the aux file.
+ if self.config.optimize_tests
+ && self.props.pass_mode(&self.config) == Some(PassMode::Run)
+ && !self
+ .props
+ .compile_flags
+ .iter()
+ .any(|arg| arg == "-O" || arg.contains("opt-level"))
+ {
+ rustc.arg("-O");
+ }
+ }
+ DebugInfo => { /* debuginfo tests must be unoptimized */ }
+ _ => {
+ rustc.arg("-O");
+ }
+ }
+ }
+
+ match self.config.mode {
+ Incremental => {
+ // If we are extracting and matching errors in the new
+ // fashion, then you want JSON mode. Old-skool error
+ // patterns still match the raw compiler output.
+ if self.props.error_patterns.is_empty()
+ && self.props.regex_error_patterns.is_empty()
+ {
+ rustc.args(&["--error-format", "json"]);
+ rustc.args(&["--json", "future-incompat"]);
+ }
+ rustc.arg("-Zui-testing");
+ rustc.arg("-Zdeduplicate-diagnostics=no");
+ }
+ Ui => {
+ if !self.props.compile_flags.iter().any(|s| s.starts_with("--error-format")) {
+ rustc.args(&["--error-format", "json"]);
+ rustc.args(&["--json", "future-incompat"]);
+ }
+ rustc.arg("-Ccodegen-units=1");
+ rustc.arg("-Zui-testing");
+ rustc.arg("-Zdeduplicate-diagnostics=no");
+ // FIXME: use this for other modes too, for perf?
+ rustc.arg("-Cstrip=debuginfo");
+ }
+ MirOpt => {
+ rustc.args(&[
+ "-Copt-level=1",
+ "-Zdump-mir=all",
+ "-Zvalidate-mir",
+ "-Zdump-mir-exclude-pass-number",
+ "-Zmir-pretty-relative-line-numbers=yes",
+ ]);
+ if let Some(pass) = &self.props.mir_unit_test {
+ rustc.args(&["-Zmir-opt-level=0", &format!("-Zmir-enable-passes=+{}", pass)]);
+ } else {
+ rustc.arg("-Zmir-opt-level=4");
+ }
+
+ let mir_dump_dir = self.get_mir_dump_dir();
+ let _ = fs::remove_dir_all(&mir_dump_dir);
+ create_dir_all(mir_dump_dir.as_path()).unwrap();
+ let mut dir_opt = "-Zdump-mir-dir=".to_string();
+ dir_opt.push_str(mir_dump_dir.to_str().unwrap());
+ debug!("dir_opt: {:?}", dir_opt);
+
+ rustc.arg(dir_opt);
+ }
+ RunPassValgrind | Pretty | DebugInfo | Codegen | Rustdoc | RustdocJson | RunMake
+ | CodegenUnits | JsDocTest | Assembly => {
+ // do not use JSON output
+ }
+ }
+
+ if let (false, EmitMetadata::Yes) = (is_rustdoc, emit_metadata) {
+ rustc.args(&["--emit", "metadata"]);
+ }
+
+ if !is_rustdoc {
+ if self.config.target == "wasm32-unknown-unknown" || self.is_vxworks_pure_static() {
+ // rustc.arg("-g"); // get any backtrace at all on errors
+ } else if !self.props.no_prefer_dynamic {
+ rustc.args(&["-C", "prefer-dynamic"]);
+ }
+ }
+
+ match output_file {
+ TargetLocation::ThisFile(path) => {
+ rustc.arg("-o").arg(path);
+ }
+ TargetLocation::ThisDirectory(path) => {
+ if is_rustdoc {
+ // `rustdoc` uses `-o` for the output directory.
+ rustc.arg("-o").arg(path);
+ } else {
+ rustc.arg("--out-dir").arg(path);
+ }
+ }
+ }
+
+ match self.config.compare_mode {
+ Some(CompareMode::Polonius) => {
+ rustc.args(&["-Zpolonius"]);
+ }
+ Some(CompareMode::Chalk) => {
+ rustc.args(&["-Zchalk"]);
+ }
+ Some(CompareMode::SplitDwarf) => {
+ rustc.args(&["-Csplit-debuginfo=unpacked", "-Zunstable-options"]);
+ }
+ Some(CompareMode::SplitDwarfSingle) => {
+ rustc.args(&["-Csplit-debuginfo=packed", "-Zunstable-options"]);
+ }
+ None => {}
+ }
+
+ // Add `-A unused` before `config` flags and in-test (`props`) flags, so that they can
+ // overwrite this.
+ if let AllowUnused::Yes = allow_unused {
+ rustc.args(&["-A", "unused"]);
+ }
+
+ if self.props.force_host {
+ self.maybe_add_external_args(
+ &mut rustc,
+ self.split_maybe_args(&self.config.host_rustcflags),
+ );
+ } else {
+ self.maybe_add_external_args(
+ &mut rustc,
+ self.split_maybe_args(&self.config.target_rustcflags),
+ );
+ if !is_rustdoc {
+ if let Some(ref linker) = self.config.linker {
+ rustc.arg(format!("-Clinker={}", linker));
+ }
+ }
+ }
+
+ // Use dynamic musl for tests because static doesn't allow creating dylibs
+ if self.config.host.contains("musl") || self.is_vxworks_pure_dynamic() {
+ rustc.arg("-Ctarget-feature=-crt-static");
+ }
+
+ rustc.args(&self.props.compile_flags);
+
+ rustc
+ }
+
+ fn make_exe_name(&self) -> PathBuf {
+ // Using a single letter here to keep the path length down for
+ // Windows. Some test names get very long. rustc creates `rcgu`
+ // files with the module name appended to it which can more than
+ // double the length.
+ let mut f = self.output_base_dir().join("a");
+ // FIXME: This is using the host architecture exe suffix, not target!
+ if self.config.target.contains("emscripten") {
+ f = f.with_extra_extension("js");
+ } else if self.config.target.contains("wasm32") {
+ f = f.with_extra_extension("wasm");
+ } else if self.config.target.contains("spirv") {
+ f = f.with_extra_extension("spv");
+ } else if !env::consts::EXE_SUFFIX.is_empty() {
+ f = f.with_extra_extension(env::consts::EXE_SUFFIX);
+ }
+ f
+ }
+
+ fn make_run_args(&self) -> ProcArgs {
+ // If we've got another tool to run under (valgrind),
+ // then split apart its command
+ let mut args = self.split_maybe_args(&self.config.runtool);
+
+ // If this is emscripten, then run tests under nodejs
+ if self.config.target.contains("emscripten") {
+ if let Some(ref p) = self.config.nodejs {
+ args.push(p.clone());
+ } else {
+ self.fatal("no NodeJS binary found (--nodejs)");
+ }
+ // If this is otherwise wasm, then run tests under nodejs with our
+ // shim
+ } else if self.config.target.contains("wasm32") {
+ if let Some(ref p) = self.config.nodejs {
+ args.push(p.clone());
+ } else {
+ self.fatal("no NodeJS binary found (--nodejs)");
+ }
+
+ let src = self
+ .config
+ .src_base
+ .parent()
+ .unwrap() // chop off `ui`
+ .parent()
+ .unwrap() // chop off `test`
+ .parent()
+ .unwrap(); // chop off `src`
+ args.push(src.join("src/etc/wasm32-shim.js").display().to_string());
+ }
+
+ let exe_file = self.make_exe_name();
+
+ // FIXME (#9639): This needs to handle non-utf8 paths
+ args.push(exe_file.to_str().unwrap().to_owned());
+
+ // Add the arguments in the run_flags directive
+ args.extend(self.split_maybe_args(&self.props.run_flags));
+
+ let prog = args.remove(0);
+ ProcArgs { prog, args }
+ }
+
+ fn split_maybe_args(&self, argstr: &Option<String>) -> Vec<String> {
+ match *argstr {
+ Some(ref s) => s
+ .split(' ')
+ .filter_map(|s| {
+ if s.chars().all(|c| c.is_whitespace()) { None } else { Some(s.to_owned()) }
+ })
+ .collect(),
+ None => Vec::new(),
+ }
+ }
+
+ fn make_cmdline(&self, command: &Command, libpath: &str) -> String {
+ use crate::util;
+
+ // Linux and mac don't require adjusting the library search path
+ if cfg!(unix) {
+ format!("{:?}", command)
+ } else {
+ // Build the LD_LIBRARY_PATH variable as it would be seen on the command line
+ // for diagnostic purposes
+ fn lib_path_cmd_prefix(path: &str) -> String {
+ format!("{}=\"{}\"", util::lib_path_env_var(), util::make_new_path(path))
+ }
+
+ format!("{} {:?}", lib_path_cmd_prefix(libpath), command)
+ }
+ }
+
+ fn dump_output(&self, out: &str, err: &str) {
+ let revision = if let Some(r) = self.revision { format!("{}.", r) } else { String::new() };
+
+ self.dump_output_file(out, &format!("{}out", revision));
+ self.dump_output_file(err, &format!("{}err", revision));
+ self.maybe_dump_to_stdout(out, err);
+ }
+
+ fn dump_output_file(&self, out: &str, extension: &str) {
+ let outfile = self.make_out_name(extension);
+ fs::write(&outfile, out).unwrap();
+ }
+
+ /// Creates a filename for output with the given extension.
+ /// E.g., `/.../testname.revision.mode/testname.extension`.
+ fn make_out_name(&self, extension: &str) -> PathBuf {
+ self.output_base_name().with_extension(extension)
+ }
+
+ /// Gets the directory where auxiliary files are written.
+ /// E.g., `/.../testname.revision.mode/auxiliary/`.
+ fn aux_output_dir_name(&self) -> PathBuf {
+ self.output_base_dir()
+ .join("auxiliary")
+ .with_extra_extension(self.config.mode.disambiguator())
+ }
+
+ /// Generates a unique name for the test, such as `testname.revision.mode`.
+ fn output_testname_unique(&self) -> PathBuf {
+ output_testname_unique(self.config, self.testpaths, self.safe_revision())
+ }
+
+ /// The revision, ignored for incremental compilation since it wants all revisions in
+ /// the same directory.
+ fn safe_revision(&self) -> Option<&str> {
+ if self.config.mode == Incremental { None } else { self.revision }
+ }
+
+ /// Gets the absolute path to the directory where all output for the given
+ /// test/revision should reside.
+ /// E.g., `/path/to/build/host-triple/test/ui/relative/testname.revision.mode/`.
+ fn output_base_dir(&self) -> PathBuf {
+ output_base_dir(self.config, self.testpaths, self.safe_revision())
+ }
+
+ /// Gets the absolute path to the base filename used as output for the given
+ /// test/revision.
+ /// E.g., `/.../relative/testname.revision.mode/testname`.
+ fn output_base_name(&self) -> PathBuf {
+ output_base_name(self.config, self.testpaths, self.safe_revision())
+ }
+
+ fn maybe_dump_to_stdout(&self, out: &str, err: &str) {
+ if self.config.verbose {
+ println!("------stdout------------------------------");
+ println!("{}", out);
+ println!("------stderr------------------------------");
+ println!("{}", err);
+ println!("------------------------------------------");
+ }
+ }
+
+ fn error(&self, err: &str) {
+ match self.revision {
+ Some(rev) => println!("\nerror in revision `{}`: {}", rev, err),
+ None => println!("\nerror: {}", err),
+ }
+ }
+
+ fn fatal(&self, err: &str) -> ! {
+ self.error(err);
+ error!("fatal error, panic: {:?}", err);
+ panic!("fatal error");
+ }
+
+ fn fatal_proc_rec(&self, err: &str, proc_res: &ProcRes) -> ! {
+ self.error(err);
+ proc_res.fatal(None, || ());
+ }
+
+ fn fatal_proc_rec_with_ctx(
+ &self,
+ err: &str,
+ proc_res: &ProcRes,
+ on_failure: impl FnOnce(Self),
+ ) -> ! {
+ self.error(err);
+ proc_res.fatal(None, || on_failure(*self));
+ }
+
+ // codegen tests (using FileCheck)
+
+ fn compile_test_and_save_ir(&self) -> ProcRes {
+ let aux_dir = self.aux_output_dir_name();
+
+ let output_file = TargetLocation::ThisDirectory(self.output_base_dir());
+ let input_file = &self.testpaths.file;
+ let mut rustc =
+ self.make_compile_args(input_file, output_file, EmitMetadata::No, AllowUnused::No);
+ rustc.arg("-L").arg(aux_dir).arg("--emit=llvm-ir");
+
+ self.compose_and_run_compiler(rustc, None)
+ }
+
+ fn compile_test_and_save_assembly(&self) -> (ProcRes, PathBuf) {
+ // This works with both `--emit asm` (as default output name for the assembly)
+ // and `ptx-linker` because the latter can write output at requested location.
+ let output_path = self.output_base_name().with_extension("s");
+
+ let output_file = TargetLocation::ThisFile(output_path.clone());
+ let input_file = &self.testpaths.file;
+ let mut rustc =
+ self.make_compile_args(input_file, output_file, EmitMetadata::No, AllowUnused::No);
+
+ rustc.arg("-L").arg(self.aux_output_dir_name());
+
+ match self.props.assembly_output.as_ref().map(AsRef::as_ref) {
+ Some("emit-asm") => {
+ rustc.arg("--emit=asm");
+ }
+
+ Some("ptx-linker") => {
+ // No extra flags needed.
+ }
+
+ Some(_) => self.fatal("unknown 'assembly-output' header"),
+ None => self.fatal("missing 'assembly-output' header"),
+ }
+
+ (self.compose_and_run_compiler(rustc, None), output_path)
+ }
+
+ fn verify_with_filecheck(&self, output: &Path) -> ProcRes {
+ let mut filecheck = Command::new(self.config.llvm_filecheck.as_ref().unwrap());
+ filecheck.arg("--input-file").arg(output).arg(&self.testpaths.file);
+ // It would be more appropriate to make most of the arguments configurable through
+ // a comment-attribute similar to `compile-flags`. For example, --check-prefixes is a very
+ // useful flag.
+ //
+ // For now, though…
+ let prefix_for_target =
+ if self.config.target.contains("msvc") { "MSVC" } else { "NONMSVC" };
+ let prefixes = if let Some(rev) = self.revision {
+ format!("CHECK,{},{}", prefix_for_target, rev)
+ } else {
+ format!("CHECK,{}", prefix_for_target)
+ };
+ if self.config.llvm_version.unwrap_or(0) >= 130000 {
+ filecheck.args(&["--allow-unused-prefixes", "--check-prefixes", &prefixes]);
+ } else {
+ filecheck.args(&["--check-prefixes", &prefixes]);
+ }
+ self.compose_and_run(filecheck, "", None, None)
+ }
+
+ fn run_codegen_test(&self) {
+ if self.config.llvm_filecheck.is_none() {
+ self.fatal("missing --llvm-filecheck");
+ }
+
+ let proc_res = self.compile_test_and_save_ir();
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("compilation failed!", &proc_res);
+ }
+
+ let output_path = self.output_base_name().with_extension("ll");
+ let proc_res = self.verify_with_filecheck(&output_path);
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("verification with 'FileCheck' failed", &proc_res);
+ }
+ }
+
+ fn run_assembly_test(&self) {
+ if self.config.llvm_filecheck.is_none() {
+ self.fatal("missing --llvm-filecheck");
+ }
+
+ let (proc_res, output_path) = self.compile_test_and_save_assembly();
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("compilation failed!", &proc_res);
+ }
+
+ let proc_res = self.verify_with_filecheck(&output_path);
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("verification with 'FileCheck' failed", &proc_res);
+ }
+ }
+
+ fn charset() -> &'static str {
+ // FreeBSD 10.1 defaults to GDB 6.1.1 which doesn't support "auto" charset
+ if cfg!(target_os = "freebsd") { "ISO-8859-1" } else { "UTF-8" }
+ }
+
+ fn run_rustdoc_test(&self) {
+ assert!(self.revision.is_none(), "revisions not relevant here");
+
+ let out_dir = self.output_base_dir();
+ let _ = fs::remove_dir_all(&out_dir);
+ create_dir_all(&out_dir).unwrap();
+
+ let proc_res = self.document(&out_dir);
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("rustdoc failed!", &proc_res);
+ }
+
+ if self.props.check_test_line_numbers_match {
+ self.check_rustdoc_test_option(proc_res);
+ } else {
+ let root = self.config.find_rust_src_root().unwrap();
+ let mut cmd = Command::new(&self.config.python);
+ cmd.arg(root.join("src/etc/htmldocck.py")).arg(&out_dir).arg(&self.testpaths.file);
+ if self.config.bless {
+ cmd.arg("--bless");
+ }
+ let res = self.cmd2procres(&mut cmd);
+ if !res.status.success() {
+ self.fatal_proc_rec_with_ctx("htmldocck failed!", &res, |mut this| {
+ this.compare_to_default_rustdoc(&out_dir)
+ });
+ }
+ }
+ }
+
+ fn compare_to_default_rustdoc(&mut self, out_dir: &Path) {
+ if !self.config.has_tidy {
+ return;
+ }
+ println!("info: generating a diff against nightly rustdoc");
+
+ let suffix =
+ self.safe_revision().map_or("nightly".into(), |path| path.to_owned() + "-nightly");
+ let compare_dir = output_base_dir(self.config, self.testpaths, Some(&suffix));
+ // Don't give an error if the directory didn't already exist
+ let _ = fs::remove_dir_all(&compare_dir);
+ create_dir_all(&compare_dir).unwrap();
+
+ // We need to create a new struct for the lifetimes on `config` to work.
+ let new_rustdoc = TestCx {
+ config: &Config {
+ // FIXME: use beta or a user-specified rustdoc instead of
+ // hardcoding the default toolchain
+ rustdoc_path: Some("rustdoc".into()),
+ // Needed for building auxiliary docs below
+ rustc_path: "rustc".into(),
+ ..self.config.clone()
+ },
+ ..*self
+ };
+
+ let output_file = TargetLocation::ThisDirectory(new_rustdoc.aux_output_dir_name());
+ let mut rustc = new_rustdoc.make_compile_args(
+ &new_rustdoc.testpaths.file,
+ output_file,
+ EmitMetadata::No,
+ AllowUnused::Yes,
+ );
+ rustc.arg("-L").arg(&new_rustdoc.aux_output_dir_name());
+ new_rustdoc.build_all_auxiliary(&mut rustc);
+
+ let proc_res = new_rustdoc.document(&compare_dir);
+ if !proc_res.status.success() {
+ eprintln!("failed to run nightly rustdoc");
+ return;
+ }
+
+ #[rustfmt::skip]
+ let tidy_args = [
+ "--indent", "yes",
+ "--indent-spaces", "2",
+ "--wrap", "0",
+ "--show-warnings", "no",
+ "--markup", "yes",
+ "--quiet", "yes",
+ "-modify",
+ ];
+ let tidy_dir = |dir| {
+ for entry in walkdir::WalkDir::new(dir) {
+ let entry = entry.expect("failed to read file");
+ if entry.file_type().is_file()
+ && entry.path().extension().and_then(|p| p.to_str()) == Some("html".into())
+ {
+ let status =
+ Command::new("tidy").args(&tidy_args).arg(entry.path()).status().unwrap();
+ // `tidy` returns 1 if it modified the file.
+ assert!(status.success() || status.code() == Some(1));
+ }
+ }
+ };
+ tidy_dir(out_dir);
+ tidy_dir(&compare_dir);
+
+ let pager = {
+ let output = Command::new("git").args(&["config", "--get", "core.pager"]).output().ok();
+ output.and_then(|out| {
+ if out.status.success() {
+ Some(String::from_utf8(out.stdout).expect("invalid UTF8 in git pager"))
+ } else {
+ None
+ }
+ })
+ };
+
+ let diff_filename = format!("build/tmp/rustdoc-compare-{}.diff", std::process::id());
+
+ if !write_filtered_diff(
+ &diff_filename,
+ out_dir,
+ &compare_dir,
+ self.config.verbose,
+ |file_type, extension| {
+ file_type.is_file()
+ && (extension == Some("html".into()) || extension == Some("js".into()))
+ },
+ ) {
+ return;
+ }
+
+ match self.config.color {
+ ColorConfig::AlwaysColor => colored::control::set_override(true),
+ ColorConfig::NeverColor => colored::control::set_override(false),
+ _ => {}
+ }
+
+ if let Some(pager) = pager {
+ let pager = pager.trim();
+ if self.config.verbose {
+ eprintln!("using pager {}", pager);
+ }
+ let output = Command::new(pager)
+ // disable paging; we want this to be non-interactive
+ .env("PAGER", "")
+ .stdin(File::open(&diff_filename).unwrap())
+ // Capture output and print it explicitly so it will in turn be
+ // captured by libtest.
+ .output()
+ .unwrap();
+ assert!(output.status.success());
+ println!("{}", String::from_utf8_lossy(&output.stdout));
+ eprintln!("{}", String::from_utf8_lossy(&output.stderr));
+ } else {
+ use colored::Colorize;
+ eprintln!("warning: no pager configured, falling back to unified diff");
+ eprintln!(
+ "help: try configuring a git pager (e.g. `delta`) with `git config --global core.pager delta`"
+ );
+ let mut out = io::stdout();
+ let mut diff = BufReader::new(File::open(&diff_filename).unwrap());
+ let mut line = Vec::new();
+ loop {
+ line.truncate(0);
+ match diff.read_until(b'\n', &mut line) {
+ Ok(0) => break,
+ Ok(_) => {}
+ Err(e) => eprintln!("ERROR: {:?}", e),
+ }
+ match String::from_utf8(line.clone()) {
+ Ok(line) => {
+ if line.starts_with("+") {
+ write!(&mut out, "{}", line.green()).unwrap();
+ } else if line.starts_with("-") {
+ write!(&mut out, "{}", line.red()).unwrap();
+ } else if line.starts_with("@") {
+ write!(&mut out, "{}", line.blue()).unwrap();
+ } else {
+ out.write_all(line.as_bytes()).unwrap();
+ }
+ }
+ Err(_) => {
+ write!(&mut out, "{}", String::from_utf8_lossy(&line).reversed()).unwrap();
+ }
+ }
+ }
+ };
+ }
+
+ fn run_rustdoc_json_test(&self) {
+ //FIXME: Add bless option.
+
+ assert!(self.revision.is_none(), "revisions not relevant here");
+
+ let out_dir = self.output_base_dir();
+ let _ = fs::remove_dir_all(&out_dir);
+ create_dir_all(&out_dir).unwrap();
+
+ let proc_res = self.document(&out_dir);
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("rustdoc failed!", &proc_res);
+ }
+
+ let root = self.config.find_rust_src_root().unwrap();
+ let mut json_out = out_dir.join(self.testpaths.file.file_stem().unwrap());
+ json_out.set_extension("json");
+ let res = self.cmd2procres(
+ Command::new(self.config.jsondocck_path.as_ref().unwrap())
+ .arg("--doc-dir")
+ .arg(root.join(&out_dir))
+ .arg("--template")
+ .arg(&self.testpaths.file),
+ );
+
+ if !res.status.success() {
+ self.fatal_proc_rec_with_ctx("jsondocck failed!", &res, |_| {
+ println!("Rustdoc Output:");
+ proc_res.print_info();
+ })
+ }
+
+ let mut json_out = out_dir.join(self.testpaths.file.file_stem().unwrap());
+ json_out.set_extension("json");
+ let res = self.cmd2procres(
+ Command::new(&self.config.python)
+ .arg(root.join("src/etc/check_missing_items.py"))
+ .arg(&json_out),
+ );
+
+ if !res.status.success() {
+ self.fatal_proc_rec("check_missing_items failed!", &res);
+ }
+ }
+
+ fn get_lines<P: AsRef<Path>>(
+ &self,
+ path: &P,
+ mut other_files: Option<&mut Vec<String>>,
+ ) -> Vec<usize> {
+ let content = fs::read_to_string(&path).unwrap();
+ let mut ignore = false;
+ content
+ .lines()
+ .enumerate()
+ .filter_map(|(line_nb, line)| {
+ if (line.trim_start().starts_with("pub mod ")
+ || line.trim_start().starts_with("mod "))
+ && line.ends_with(';')
+ {
+ if let Some(ref mut other_files) = other_files {
+ other_files.push(line.rsplit("mod ").next().unwrap().replace(";", ""));
+ }
+ None
+ } else {
+ let sline = line.split("///").last().unwrap_or("");
+ let line = sline.trim_start();
+ if line.starts_with("```") {
+ if ignore {
+ ignore = false;
+ None
+ } else {
+ ignore = true;
+ Some(line_nb + 1)
+ }
+ } else {
+ None
+ }
+ }
+ })
+ .collect()
+ }
+
+ fn check_rustdoc_test_option(&self, res: ProcRes) {
+ let mut other_files = Vec::new();
+ let mut files: HashMap<String, Vec<usize>> = HashMap::new();
+ let cwd = env::current_dir().unwrap();
+ files.insert(
+ self.testpaths
+ .file
+ .strip_prefix(&cwd)
+ .unwrap_or(&self.testpaths.file)
+ .to_str()
+ .unwrap()
+ .replace('\\', "/"),
+ self.get_lines(&self.testpaths.file, Some(&mut other_files)),
+ );
+ for other_file in other_files {
+ let mut path = self.testpaths.file.clone();
+ path.set_file_name(&format!("{}.rs", other_file));
+ files.insert(
+ path.strip_prefix(&cwd).unwrap_or(&path).to_str().unwrap().replace('\\', "/"),
+ self.get_lines(&path, None),
+ );
+ }
+
+ let mut tested = 0;
+ for _ in res.stdout.split('\n').filter(|s| s.starts_with("test ")).inspect(|s| {
+ if let Some((left, right)) = s.split_once(" - ") {
+ let path = left.rsplit("test ").next().unwrap();
+ if let Some(ref mut v) = files.get_mut(&path.replace('\\', "/")) {
+ tested += 1;
+ let mut iter = right.split("(line ");
+ iter.next();
+ let line = iter
+ .next()
+ .unwrap_or(")")
+ .split(')')
+ .next()
+ .unwrap_or("0")
+ .parse()
+ .unwrap_or(0);
+ if let Ok(pos) = v.binary_search(&line) {
+ v.remove(pos);
+ } else {
+ self.fatal_proc_rec(
+ &format!("Not found doc test: \"{}\" in \"{}\":{:?}", s, path, v),
+ &res,
+ );
+ }
+ }
+ }
+ }) {}
+ if tested == 0 {
+ self.fatal_proc_rec(&format!("No test has been found... {:?}", files), &res);
+ } else {
+ for (entry, v) in &files {
+ if !v.is_empty() {
+ self.fatal_proc_rec(
+ &format!(
+ "Not found test at line{} \"{}\":{:?}",
+ if v.len() > 1 { "s" } else { "" },
+ entry,
+ v
+ ),
+ &res,
+ );
+ }
+ }
+ }
+ }
+
+ fn run_codegen_units_test(&self) {
+ assert!(self.revision.is_none(), "revisions not relevant here");
+
+ let proc_res = self.compile_test(WillExecute::No, EmitMetadata::No);
+
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("compilation failed!", &proc_res);
+ }
+
+ self.check_no_compiler_crash(&proc_res, self.props.should_ice);
+
+ const PREFIX: &str = "MONO_ITEM ";
+ const CGU_MARKER: &str = "@@";
+
+ let actual: Vec<MonoItem> = proc_res
+ .stdout
+ .lines()
+ .filter(|line| line.starts_with(PREFIX))
+ .map(|line| str_to_mono_item(line, true))
+ .collect();
+
+ let expected: Vec<MonoItem> = errors::load_errors(&self.testpaths.file, None)
+ .iter()
+ .map(|e| str_to_mono_item(&e.msg[..], false))
+ .collect();
+
+ let mut missing = Vec::new();
+ let mut wrong_cgus = Vec::new();
+
+ for expected_item in &expected {
+ let actual_item_with_same_name = actual.iter().find(|ti| ti.name == expected_item.name);
+
+ if let Some(actual_item) = actual_item_with_same_name {
+ if !expected_item.codegen_units.is_empty() &&
+ // Also check for codegen units
+ expected_item.codegen_units != actual_item.codegen_units
+ {
+ wrong_cgus.push((expected_item.clone(), actual_item.clone()));
+ }
+ } else {
+ missing.push(expected_item.string.clone());
+ }
+ }
+
+ let unexpected: Vec<_> = actual
+ .iter()
+ .filter(|acgu| !expected.iter().any(|ecgu| acgu.name == ecgu.name))
+ .map(|acgu| acgu.string.clone())
+ .collect();
+
+ if !missing.is_empty() {
+ missing.sort();
+
+ println!("\nThese items should have been contained but were not:\n");
+
+ for item in &missing {
+ println!("{}", item);
+ }
+
+ println!("\n");
+ }
+
+ if !unexpected.is_empty() {
+ let sorted = {
+ let mut sorted = unexpected.clone();
+ sorted.sort();
+ sorted
+ };
+
+ println!("\nThese items were contained but should not have been:\n");
+
+ for item in sorted {
+ println!("{}", item);
+ }
+
+ println!("\n");
+ }
+
+ if !wrong_cgus.is_empty() {
+ wrong_cgus.sort_by_key(|pair| pair.0.name.clone());
+ println!("\nThe following items were assigned to wrong codegen units:\n");
+
+ for &(ref expected_item, ref actual_item) in &wrong_cgus {
+ println!("{}", expected_item.name);
+ println!(" expected: {}", codegen_units_to_str(&expected_item.codegen_units));
+ println!(" actual: {}", codegen_units_to_str(&actual_item.codegen_units));
+ println!();
+ }
+ }
+
+ if !(missing.is_empty() && unexpected.is_empty() && wrong_cgus.is_empty()) {
+ panic!();
+ }
+
+ #[derive(Clone, Eq, PartialEq)]
+ struct MonoItem {
+ name: String,
+ codegen_units: HashSet<String>,
+ string: String,
+ }
+
+ // [MONO_ITEM] name [@@ (cgu)+]
+ fn str_to_mono_item(s: &str, cgu_has_crate_disambiguator: bool) -> MonoItem {
+ let s = if s.starts_with(PREFIX) { (&s[PREFIX.len()..]).trim() } else { s.trim() };
+
+ let full_string = format!("{}{}", PREFIX, s);
+
+ let parts: Vec<&str> =
+ s.split(CGU_MARKER).map(str::trim).filter(|s| !s.is_empty()).collect();
+
+ let name = parts[0].trim();
+
+ let cgus = if parts.len() > 1 {
+ let cgus_str = parts[1];
+
+ cgus_str
+ .split(' ')
+ .map(str::trim)
+ .filter(|s| !s.is_empty())
+ .map(|s| {
+ if cgu_has_crate_disambiguator {
+ remove_crate_disambiguators_from_set_of_cgu_names(s)
+ } else {
+ s.to_string()
+ }
+ })
+ .collect()
+ } else {
+ HashSet::new()
+ };
+
+ MonoItem { name: name.to_owned(), codegen_units: cgus, string: full_string }
+ }
+
+ fn codegen_units_to_str(cgus: &HashSet<String>) -> String {
+ let mut cgus: Vec<_> = cgus.iter().collect();
+ cgus.sort();
+
+ let mut string = String::new();
+ for cgu in cgus {
+ string.push_str(&cgu[..]);
+ string.push_str(" ");
+ }
+
+ string
+ }
+
+ // Given a cgu-name-prefix of the form <crate-name>.<crate-disambiguator> or
+ // the form <crate-name1>.<crate-disambiguator1>-in-<crate-name2>.<crate-disambiguator2>,
+ // remove all crate-disambiguators.
+ fn remove_crate_disambiguator_from_cgu(cgu: &str) -> String {
+ lazy_static! {
+ static ref RE: Regex =
+ Regex::new(r"^[^\.]+(?P<d1>\.[[:alnum:]]+)(-in-[^\.]+(?P<d2>\.[[:alnum:]]+))?")
+ .unwrap();
+ }
+
+ let captures =
+ RE.captures(cgu).unwrap_or_else(|| panic!("invalid cgu name encountered: {}", cgu));
+
+ let mut new_name = cgu.to_owned();
+
+ if let Some(d2) = captures.name("d2") {
+ new_name.replace_range(d2.start()..d2.end(), "");
+ }
+
+ let d1 = captures.name("d1").unwrap();
+ new_name.replace_range(d1.start()..d1.end(), "");
+
+ new_name
+ }
+
+ // The name of merged CGUs is constructed as the names of the original
+ // CGUs joined with "--". This function splits such composite CGU names
+ // and handles each component individually.
+ fn remove_crate_disambiguators_from_set_of_cgu_names(cgus: &str) -> String {
+ cgus.split("--")
+ .map(|cgu| remove_crate_disambiguator_from_cgu(cgu))
+ .collect::<Vec<_>>()
+ .join("--")
+ }
+ }
+
+ fn init_incremental_test(&self) {
+ // (See `run_incremental_test` for an overview of how incremental tests work.)
+
+ // Before any of the revisions have executed, create the
+ // incremental workproduct directory. Delete any old
+ // incremental work products that may be there from prior
+ // runs.
+ let incremental_dir = self.props.incremental_dir.as_ref().unwrap();
+ if incremental_dir.exists() {
+ // Canonicalizing the path will convert it to the //?/ format
+ // on Windows, which enables paths longer than 260 character
+ let canonicalized = incremental_dir.canonicalize().unwrap();
+ fs::remove_dir_all(canonicalized).unwrap();
+ }
+ fs::create_dir_all(&incremental_dir).unwrap();
+
+ if self.config.verbose {
+ println!("init_incremental_test: incremental_dir={}", incremental_dir.display());
+ }
+ }
+
+ fn run_incremental_test(&self) {
+ // Basic plan for a test incremental/foo/bar.rs:
+ // - load list of revisions rpass1, cfail2, rpass3
+ // - each should begin with `rpass`, `cfail`, or `rfail`
+ // - if `rpass`, expect compile and execution to succeed
+ // - if `cfail`, expect compilation to fail
+ // - if `rfail`, expect execution to fail
+ // - create a directory build/foo/bar.incremental
+ // - compile foo/bar.rs with -C incremental=.../foo/bar.incremental and -C rpass1
+ // - because name of revision starts with "rpass", expect success
+ // - compile foo/bar.rs with -C incremental=.../foo/bar.incremental and -C cfail2
+ // - because name of revision starts with "cfail", expect an error
+ // - load expected errors as usual, but filter for those that end in `[rfail2]`
+ // - compile foo/bar.rs with -C incremental=.../foo/bar.incremental and -C rpass3
+ // - because name of revision starts with "rpass", expect success
+ // - execute build/foo/bar.exe and save output
+ //
+ // FIXME -- use non-incremental mode as an oracle? That doesn't apply
+ // to #[rustc_dirty] and clean tests I guess
+
+ let revision = self.revision.expect("incremental tests require a list of revisions");
+
+ // Incremental workproduct directory should have already been created.
+ let incremental_dir = self.props.incremental_dir.as_ref().unwrap();
+ assert!(incremental_dir.exists(), "init_incremental_test failed to create incremental dir");
+
+ if self.config.verbose {
+ print!("revision={:?} props={:#?}", revision, self.props);
+ }
+
+ if revision.starts_with("rpass") {
+ if self.props.should_ice {
+ self.fatal("can only use should-ice in cfail tests");
+ }
+ self.run_rpass_test();
+ } else if revision.starts_with("rfail") {
+ if self.props.should_ice {
+ self.fatal("can only use should-ice in cfail tests");
+ }
+ self.run_rfail_test();
+ } else if revision.starts_with("cfail") {
+ self.run_cfail_test();
+ } else {
+ self.fatal("revision name must begin with rpass, rfail, or cfail");
+ }
+ }
+
+ fn run_rmake_test(&self) {
+ let cwd = env::current_dir().unwrap();
+ let src_root = self.config.src_base.parent().unwrap().parent().unwrap().parent().unwrap();
+ let src_root = cwd.join(&src_root);
+
+ let tmpdir = cwd.join(self.output_base_name());
+ if tmpdir.exists() {
+ self.aggressive_rm_rf(&tmpdir).unwrap();
+ }
+ create_dir_all(&tmpdir).unwrap();
+
+ let host = &self.config.host;
+ let make = if host.contains("dragonfly")
+ || host.contains("freebsd")
+ || host.contains("netbsd")
+ || host.contains("openbsd")
+ {
+ "gmake"
+ } else {
+ "make"
+ };
+
+ let mut cmd = Command::new(make);
+ cmd.current_dir(&self.testpaths.file)
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .env("TARGET", &self.config.target)
+ .env("PYTHON", &self.config.python)
+ .env("S", src_root)
+ .env("RUST_BUILD_STAGE", &self.config.stage_id)
+ .env("RUSTC", cwd.join(&self.config.rustc_path))
+ .env("TMPDIR", &tmpdir)
+ .env("LD_LIB_PATH_ENVVAR", dylib_env_var())
+ .env("HOST_RPATH_DIR", cwd.join(&self.config.compile_lib_path))
+ .env("TARGET_RPATH_DIR", cwd.join(&self.config.run_lib_path))
+ .env("LLVM_COMPONENTS", &self.config.llvm_components)
+ // We for sure don't want these tests to run in parallel, so make
+ // sure they don't have access to these vars if we run via `make`
+ // at the top level
+ .env_remove("MAKEFLAGS")
+ .env_remove("MFLAGS")
+ .env_remove("CARGO_MAKEFLAGS");
+
+ if let Some(ref rustdoc) = self.config.rustdoc_path {
+ cmd.env("RUSTDOC", cwd.join(rustdoc));
+ }
+
+ if let Some(ref rust_demangler) = self.config.rust_demangler_path {
+ cmd.env("RUST_DEMANGLER", cwd.join(rust_demangler));
+ }
+
+ if let Some(ref node) = self.config.nodejs {
+ cmd.env("NODE", node);
+ }
+
+ if let Some(ref linker) = self.config.linker {
+ cmd.env("RUSTC_LINKER", linker);
+ }
+
+ if let Some(ref clang) = self.config.run_clang_based_tests_with {
+ cmd.env("CLANG", clang);
+ }
+
+ if let Some(ref filecheck) = self.config.llvm_filecheck {
+ cmd.env("LLVM_FILECHECK", filecheck);
+ }
+
+ if let Some(ref llvm_bin_dir) = self.config.llvm_bin_dir {
+ cmd.env("LLVM_BIN_DIR", llvm_bin_dir);
+ }
+
+ // We don't want RUSTFLAGS set from the outside to interfere with
+ // compiler flags set in the test cases:
+ cmd.env_remove("RUSTFLAGS");
+
+ // Use dynamic musl for tests because static doesn't allow creating dylibs
+ if self.config.host.contains("musl") {
+ cmd.env("RUSTFLAGS", "-Ctarget-feature=-crt-static").env("IS_MUSL_HOST", "1");
+ }
+
+ if self.config.bless {
+ cmd.env("RUSTC_BLESS_TEST", "--bless");
+ // Assume this option is active if the environment variable is "defined", with _any_ value.
+ // As an example, a `Makefile` can use this option by:
+ //
+ // ifdef RUSTC_BLESS_TEST
+ // cp "$(TMPDIR)"/actual_something.ext expected_something.ext
+ // else
+ // $(DIFF) expected_something.ext "$(TMPDIR)"/actual_something.ext
+ // endif
+ }
+
+ if self.config.target.contains("msvc") && self.config.cc != "" {
+ // We need to pass a path to `lib.exe`, so assume that `cc` is `cl.exe`
+ // and that `lib.exe` lives next to it.
+ let lib = Path::new(&self.config.cc).parent().unwrap().join("lib.exe");
+
+ // MSYS doesn't like passing flags of the form `/foo` as it thinks it's
+ // a path and instead passes `C:\msys64\foo`, so convert all
+ // `/`-arguments to MSVC here to `-` arguments.
+ let cflags = self
+ .config
+ .cflags
+ .split(' ')
+ .map(|s| s.replace("/", "-"))
+ .collect::<Vec<_>>()
+ .join(" ");
+ let cxxflags = self
+ .config
+ .cxxflags
+ .split(' ')
+ .map(|s| s.replace("/", "-"))
+ .collect::<Vec<_>>()
+ .join(" ");
+
+ cmd.env("IS_MSVC", "1")
+ .env("IS_WINDOWS", "1")
+ .env("MSVC_LIB", format!("'{}' -nologo", lib.display()))
+ .env("CC", format!("'{}' {}", self.config.cc, cflags))
+ .env("CXX", format!("'{}' {}", &self.config.cxx, cxxflags));
+ } else {
+ cmd.env("CC", format!("{} {}", self.config.cc, self.config.cflags))
+ .env("CXX", format!("{} {}", self.config.cxx, self.config.cxxflags))
+ .env("AR", &self.config.ar);
+
+ if self.config.target.contains("windows") {
+ cmd.env("IS_WINDOWS", "1");
+ }
+ }
+
+ let output = self.read2_abbreviated(cmd.spawn().expect("failed to spawn `make`"));
+ if !output.status.success() {
+ let res = ProcRes {
+ status: output.status,
+ stdout: String::from_utf8_lossy(&output.stdout).into_owned(),
+ stderr: String::from_utf8_lossy(&output.stderr).into_owned(),
+ cmdline: format!("{:?}", cmd),
+ };
+ self.fatal_proc_rec("make failed", &res);
+ }
+ }
+
+ fn aggressive_rm_rf(&self, path: &Path) -> io::Result<()> {
+ for e in path.read_dir()? {
+ let entry = e?;
+ let path = entry.path();
+ if entry.file_type()?.is_dir() {
+ self.aggressive_rm_rf(&path)?;
+ } else {
+ // Remove readonly files as well on windows (by default we can't)
+ fs::remove_file(&path).or_else(|e| {
+ if cfg!(windows) && e.kind() == io::ErrorKind::PermissionDenied {
+ let mut meta = entry.metadata()?.permissions();
+ meta.set_readonly(false);
+ fs::set_permissions(&path, meta)?;
+ fs::remove_file(&path)
+ } else {
+ Err(e)
+ }
+ })?;
+ }
+ }
+ fs::remove_dir(path)
+ }
+
+ fn run_js_doc_test(&self) {
+ if let Some(nodejs) = &self.config.nodejs {
+ let out_dir = self.output_base_dir();
+
+ self.document(&out_dir);
+
+ let root = self.config.find_rust_src_root().unwrap();
+ let file_stem =
+ self.testpaths.file.file_stem().and_then(|f| f.to_str()).expect("no file stem");
+ let res = self.cmd2procres(
+ Command::new(&nodejs)
+ .arg(root.join("src/tools/rustdoc-js/tester.js"))
+ .arg("--doc-folder")
+ .arg(out_dir)
+ .arg("--crate-name")
+ .arg(file_stem.replace("-", "_"))
+ .arg("--test-file")
+ .arg(self.testpaths.file.with_extension("js")),
+ );
+ if !res.status.success() {
+ self.fatal_proc_rec("rustdoc-js test failed!", &res);
+ }
+ } else {
+ self.fatal("no nodeJS");
+ }
+ }
+
+ fn load_compare_outputs(
+ &self,
+ proc_res: &ProcRes,
+ output_kind: TestOutput,
+ explicit_format: bool,
+ ) -> usize {
+ let stderr_bits = format!("{}.stderr", get_pointer_width(&self.config.target));
+ let (stderr_kind, stdout_kind) = match output_kind {
+ TestOutput::Compile => (
+ {
+ if self.props.stderr_per_bitwidth { &stderr_bits } else { UI_STDERR }
+ },
+ UI_STDOUT,
+ ),
+ TestOutput::Run => (UI_RUN_STDERR, UI_RUN_STDOUT),
+ };
+
+ let expected_stderr = self.load_expected_output(stderr_kind);
+ let expected_stdout = self.load_expected_output(stdout_kind);
+
+ let normalized_stdout = match output_kind {
+ TestOutput::Run if self.config.remote_test_client.is_some() => {
+ // When tests are run using the remote-test-client, the string
+ // 'uploaded "$TEST_BUILD_DIR/<test_executable>, waiting for result"'
+ // is printed to stdout by the client and then captured in the ProcRes,
+ // so it needs to be removed when comparing the run-pass test execution output
+ lazy_static! {
+ static ref REMOTE_TEST_RE: Regex = Regex::new(
+ "^uploaded \"\\$TEST_BUILD_DIR(/[[:alnum:]_\\-.]+)+\", waiting for result\n"
+ )
+ .unwrap();
+ }
+ REMOTE_TEST_RE
+ .replace(
+ &self.normalize_output(&proc_res.stdout, &self.props.normalize_stdout),
+ "",
+ )
+ .to_string()
+ }
+ _ => self.normalize_output(&proc_res.stdout, &self.props.normalize_stdout),
+ };
+
+ let stderr = if explicit_format {
+ proc_res.stderr.clone()
+ } else {
+ json::extract_rendered(&proc_res.stderr)
+ };
+
+ let normalized_stderr = self.normalize_output(&stderr, &self.props.normalize_stderr);
+ let mut errors = 0;
+ match output_kind {
+ TestOutput::Compile => {
+ if !self.props.dont_check_compiler_stdout {
+ errors +=
+ self.compare_output(stdout_kind, &normalized_stdout, &expected_stdout);
+ }
+ if !self.props.dont_check_compiler_stderr {
+ errors +=
+ self.compare_output(stderr_kind, &normalized_stderr, &expected_stderr);
+ }
+ }
+ TestOutput::Run => {
+ errors += self.compare_output(stdout_kind, &normalized_stdout, &expected_stdout);
+ errors += self.compare_output(stderr_kind, &normalized_stderr, &expected_stderr);
+ }
+ }
+ errors
+ }
+
+ fn run_ui_test(&self) {
+ if let Some(FailMode::Build) = self.props.fail_mode {
+ // Make sure a build-fail test cannot fail due to failing analysis (e.g. typeck).
+ let pm = Some(PassMode::Check);
+ let proc_res = self.compile_test_general(WillExecute::No, EmitMetadata::Yes, pm);
+ self.check_if_test_should_compile(&proc_res, pm);
+ }
+
+ let pm = self.pass_mode();
+ let should_run = self.should_run(pm);
+ let emit_metadata = self.should_emit_metadata(pm);
+ let proc_res = self.compile_test(should_run, emit_metadata);
+ self.check_if_test_should_compile(&proc_res, pm);
+
+ // if the user specified a format in the ui test
+ // print the output to the stderr file, otherwise extract
+ // the rendered error messages from json and print them
+ let explicit = self.props.compile_flags.iter().any(|s| s.contains("--error-format"));
+
+ let expected_fixed = self.load_expected_output(UI_FIXED);
+
+ self.check_and_prune_duplicate_outputs(&proc_res, &[], &[]);
+
+ let mut errors = self.load_compare_outputs(&proc_res, TestOutput::Compile, explicit);
+ let rustfix_input = json::rustfix_diagnostics_only(&proc_res.stderr);
+
+ if self.config.compare_mode.is_some() {
+ // don't test rustfix with nll right now
+ } else if self.config.rustfix_coverage {
+ // Find out which tests have `MachineApplicable` suggestions but are missing
+ // `run-rustfix` or `run-rustfix-only-machine-applicable` headers.
+ //
+ // This will return an empty `Vec` in case the executed test file has a
+ // `compile-flags: --error-format=xxxx` header with a value other than `json`.
+ let suggestions = get_suggestions_from_json(
+ &rustfix_input,
+ &HashSet::new(),
+ Filter::MachineApplicableOnly,
+ )
+ .unwrap_or_default();
+ if !suggestions.is_empty()
+ && !self.props.run_rustfix
+ && !self.props.rustfix_only_machine_applicable
+ {
+ let mut coverage_file_path = self.config.build_base.clone();
+ coverage_file_path.push("rustfix_missing_coverage.txt");
+ debug!("coverage_file_path: {}", coverage_file_path.display());
+
+ let mut file = OpenOptions::new()
+ .create(true)
+ .append(true)
+ .open(coverage_file_path.as_path())
+ .expect("could not create or open file");
+
+ if writeln!(file, "{}", self.testpaths.file.display()).is_err() {
+ panic!("couldn't write to {}", coverage_file_path.display());
+ }
+ }
+ } else if self.props.run_rustfix {
+ // Apply suggestions from rustc to the code itself
+ let unfixed_code = self.load_expected_output_from_path(&self.testpaths.file).unwrap();
+ let suggestions = get_suggestions_from_json(
+ &rustfix_input,
+ &HashSet::new(),
+ if self.props.rustfix_only_machine_applicable {
+ Filter::MachineApplicableOnly
+ } else {
+ Filter::Everything
+ },
+ )
+ .unwrap();
+ let fixed_code = apply_suggestions(&unfixed_code, &suggestions).unwrap_or_else(|e| {
+ panic!(
+ "failed to apply suggestions for {:?} with rustfix: {}",
+ self.testpaths.file, e
+ )
+ });
+
+ errors += self.compare_output("fixed", &fixed_code, &expected_fixed);
+ } else if !expected_fixed.is_empty() {
+ panic!(
+ "the `// run-rustfix` directive wasn't found but a `*.fixed` \
+ file was found"
+ );
+ }
+
+ if errors > 0 {
+ println!("To update references, rerun the tests and pass the `--bless` flag");
+ let relative_path_to_file =
+ self.testpaths.relative_dir.join(self.testpaths.file.file_name().unwrap());
+ println!(
+ "To only update this specific test, also pass `--test-args {}`",
+ relative_path_to_file.display(),
+ );
+ self.fatal_proc_rec(
+ &format!("{} errors occurred comparing output.", errors),
+ &proc_res,
+ );
+ }
+
+ let expected_errors = errors::load_errors(&self.testpaths.file, self.revision);
+
+ if let WillExecute::Yes = should_run {
+ let proc_res = self.exec_compiled_test();
+ let run_output_errors = if self.props.check_run_results {
+ self.load_compare_outputs(&proc_res, TestOutput::Run, explicit)
+ } else {
+ 0
+ };
+ if run_output_errors > 0 {
+ self.fatal_proc_rec(
+ &format!("{} errors occurred comparing run output.", run_output_errors),
+ &proc_res,
+ );
+ }
+ if self.should_run_successfully(pm) {
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("test run failed!", &proc_res);
+ }
+ } else if proc_res.status.success() {
+ self.fatal_proc_rec("test run succeeded!", &proc_res);
+ }
+
+ if !self.props.error_patterns.is_empty() || !self.props.regex_error_patterns.is_empty()
+ {
+ // "// error-pattern" comments
+ let output_to_check = self.get_output(&proc_res);
+ self.check_all_error_patterns(&output_to_check, &proc_res, pm);
+ }
+ }
+
+ debug!(
+ "run_ui_test: explicit={:?} config.compare_mode={:?} expected_errors={:?} \
+ proc_res.status={:?} props.error_patterns={:?}",
+ explicit,
+ self.config.compare_mode,
+ expected_errors,
+ proc_res.status,
+ self.props.error_patterns
+ );
+ if !explicit && self.config.compare_mode.is_none() {
+ let check_patterns = should_run == WillExecute::No
+ && (!self.props.error_patterns.is_empty()
+ || !self.props.regex_error_patterns.is_empty());
+
+ let check_annotations = !check_patterns || !expected_errors.is_empty();
+
+ if check_patterns {
+ // "// error-pattern" comments
+ let output_to_check = self.get_output(&proc_res);
+ self.check_all_error_patterns(&output_to_check, &proc_res, pm);
+ }
+
+ if check_annotations {
+ // "//~ERROR comments"
+ self.check_expected_errors(expected_errors, &proc_res);
+ }
+ }
+
+ if self.props.run_rustfix && self.config.compare_mode.is_none() {
+ // And finally, compile the fixed code and make sure it both
+ // succeeds and has no diagnostics.
+ let mut rustc = self.make_compile_args(
+ &self.testpaths.file.with_extension(UI_FIXED),
+ TargetLocation::ThisFile(self.make_exe_name()),
+ emit_metadata,
+ AllowUnused::No,
+ );
+ rustc.arg("-L").arg(&self.aux_output_dir_name());
+ let res = self.compose_and_run_compiler(rustc, None);
+ if !res.status.success() {
+ self.fatal_proc_rec("failed to compile fixed code", &res);
+ }
+ if !res.stderr.is_empty()
+ && !self.props.rustfix_only_machine_applicable
+ && !json::rustfix_diagnostics_only(&res.stderr).is_empty()
+ {
+ self.fatal_proc_rec("fixed code is still producing diagnostics", &res);
+ }
+ }
+ }
+
+ fn run_mir_opt_test(&self) {
+ let pm = self.pass_mode();
+ let should_run = self.should_run(pm);
+ let emit_metadata = self.should_emit_metadata(pm);
+ let proc_res = self.compile_test(should_run, emit_metadata);
+
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("compilation failed!", &proc_res);
+ }
+
+ self.check_mir_dump();
+
+ if let WillExecute::Yes = should_run {
+ let proc_res = self.exec_compiled_test();
+
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("test run failed!", &proc_res);
+ }
+ }
+ }
+
+ fn check_mir_dump(&self) {
+ let test_file_contents = fs::read_to_string(&self.testpaths.file).unwrap();
+
+ let test_dir = self.testpaths.file.parent().unwrap();
+ let test_crate =
+ self.testpaths.file.file_stem().unwrap().to_str().unwrap().replace("-", "_");
+
+ let mut bit_width = String::new();
+ if test_file_contents.lines().any(|l| l == "// EMIT_MIR_FOR_EACH_BIT_WIDTH") {
+ bit_width = format!(".{}", get_pointer_width(&self.config.target));
+ }
+
+ if self.config.bless {
+ for e in
+ glob(&format!("{}/{}.*{}.mir", test_dir.display(), test_crate, bit_width)).unwrap()
+ {
+ std::fs::remove_file(e.unwrap()).unwrap();
+ }
+ for e in
+ glob(&format!("{}/{}.*{}.diff", test_dir.display(), test_crate, bit_width)).unwrap()
+ {
+ std::fs::remove_file(e.unwrap()).unwrap();
+ }
+ }
+
+ for l in test_file_contents.lines() {
+ if l.starts_with("// EMIT_MIR ") {
+ let test_name = l.trim_start_matches("// EMIT_MIR ").trim();
+ let mut test_names = test_name.split(' ');
+ // sometimes we specify two files so that we get a diff between the two files
+ let test_name = test_names.next().unwrap();
+ let mut expected_file;
+ let from_file;
+ let to_file;
+
+ if test_name.ends_with(".diff") {
+ let trimmed = test_name.trim_end_matches(".diff");
+ let test_against = format!("{}.after.mir", trimmed);
+ from_file = format!("{}.before.mir", trimmed);
+ expected_file = format!("{}{}.diff", trimmed, bit_width);
+ assert!(
+ test_names.next().is_none(),
+ "two mir pass names specified for MIR diff"
+ );
+ to_file = Some(test_against);
+ } else if let Some(first_pass) = test_names.next() {
+ let second_pass = test_names.next().unwrap();
+ assert!(
+ test_names.next().is_none(),
+ "three mir pass names specified for MIR diff"
+ );
+ expected_file =
+ format!("{}{}.{}-{}.diff", test_name, bit_width, first_pass, second_pass);
+ let second_file = format!("{}.{}.mir", test_name, second_pass);
+ from_file = format!("{}.{}.mir", test_name, first_pass);
+ to_file = Some(second_file);
+ } else {
+ let ext_re = Regex::new(r#"(\.(mir|dot|html))$"#).unwrap();
+ let cap = ext_re
+ .captures_iter(test_name)
+ .next()
+ .expect("test_name has an invalid extension");
+ let extension = cap.get(1).unwrap().as_str();
+ expected_file = format!(
+ "{}{}{}",
+ test_name.trim_end_matches(extension),
+ bit_width,
+ extension,
+ );
+ from_file = test_name.to_string();
+ assert!(
+ test_names.next().is_none(),
+ "two mir pass names specified for MIR dump"
+ );
+ to_file = None;
+ };
+ if !expected_file.starts_with(&test_crate) {
+ expected_file = format!("{}.{}", test_crate, expected_file);
+ }
+ let expected_file = test_dir.join(expected_file);
+
+ let dumped_string = if let Some(after) = to_file {
+ self.diff_mir_files(from_file.into(), after.into())
+ } else {
+ let mut output_file = PathBuf::new();
+ output_file.push(self.get_mir_dump_dir());
+ output_file.push(&from_file);
+ debug!(
+ "comparing the contents of: {} with {}",
+ output_file.display(),
+ expected_file.display()
+ );
+ if !output_file.exists() {
+ panic!(
+ "Output file `{}` from test does not exist, available files are in `{}`",
+ output_file.display(),
+ output_file.parent().unwrap().display()
+ );
+ }
+ self.check_mir_test_timestamp(&from_file, &output_file);
+ let dumped_string = fs::read_to_string(&output_file).unwrap();
+ self.normalize_output(&dumped_string, &[])
+ };
+
+ if self.config.bless {
+ let _ = std::fs::remove_file(&expected_file);
+ std::fs::write(expected_file, dumped_string.as_bytes()).unwrap();
+ } else {
+ if !expected_file.exists() {
+ panic!(
+ "Output file `{}` from test does not exist",
+ expected_file.display()
+ );
+ }
+ let expected_string = fs::read_to_string(&expected_file).unwrap();
+ if dumped_string != expected_string {
+ print!("{}", write_diff(&expected_string, &dumped_string, 3));
+ panic!(
+ "Actual MIR output differs from expected MIR output {}",
+ expected_file.display()
+ );
+ }
+ }
+ }
+ }
+ }
+
+ fn diff_mir_files(&self, before: PathBuf, after: PathBuf) -> String {
+ let to_full_path = |path: PathBuf| {
+ let full = self.get_mir_dump_dir().join(&path);
+ if !full.exists() {
+ panic!(
+ "the mir dump file for {} does not exist (requested in {})",
+ path.display(),
+ self.testpaths.file.display(),
+ );
+ }
+ full
+ };
+ let before = to_full_path(before);
+ let after = to_full_path(after);
+ debug!("comparing the contents of: {} with {}", before.display(), after.display());
+ let before = fs::read_to_string(before).unwrap();
+ let after = fs::read_to_string(after).unwrap();
+ let before = self.normalize_output(&before, &[]);
+ let after = self.normalize_output(&after, &[]);
+ let mut dumped_string = String::new();
+ for result in diff::lines(&before, &after) {
+ use std::fmt::Write;
+ match result {
+ diff::Result::Left(s) => writeln!(dumped_string, "- {}", s).unwrap(),
+ diff::Result::Right(s) => writeln!(dumped_string, "+ {}", s).unwrap(),
+ diff::Result::Both(s, _) => writeln!(dumped_string, " {}", s).unwrap(),
+ }
+ }
+ dumped_string
+ }
+
+ fn check_mir_test_timestamp(&self, test_name: &str, output_file: &Path) {
+ let t = |file| fs::metadata(file).unwrap().modified().unwrap();
+ let source_file = &self.testpaths.file;
+ let output_time = t(output_file);
+ let source_time = t(source_file);
+ if source_time > output_time {
+ debug!("source file time: {:?} output file time: {:?}", source_time, output_time);
+ panic!(
+ "test source file `{}` is newer than potentially stale output file `{}`.",
+ source_file.display(),
+ test_name
+ );
+ }
+ }
+
+ fn get_mir_dump_dir(&self) -> PathBuf {
+ let mut mir_dump_dir = PathBuf::from(self.config.build_base.as_path());
+ debug!("input_file: {:?}", self.testpaths.file);
+ mir_dump_dir.push(&self.testpaths.relative_dir);
+ mir_dump_dir.push(self.testpaths.file.file_stem().unwrap());
+ mir_dump_dir
+ }
+
+ fn normalize_output(&self, output: &str, custom_rules: &[(String, String)]) -> String {
+ let cflags = self.props.compile_flags.join(" ");
+ let json = cflags.contains("--error-format json")
+ || cflags.contains("--error-format pretty-json")
+ || cflags.contains("--error-format=json")
+ || cflags.contains("--error-format=pretty-json")
+ || cflags.contains("--output-format json")
+ || cflags.contains("--output-format=json");
+
+ let mut normalized = output.to_string();
+
+ let mut normalize_path = |from: &Path, to: &str| {
+ let mut from = from.display().to_string();
+ if json {
+ from = from.replace("\\", "\\\\");
+ }
+ normalized = normalized.replace(&from, to);
+ };
+
+ let parent_dir = self.testpaths.file.parent().unwrap();
+ normalize_path(parent_dir, "$DIR");
+
+ // Paths into the libstd/libcore
+ let base_dir = self.config.src_base.parent().unwrap().parent().unwrap().parent().unwrap();
+ let src_dir = base_dir.join("library");
+ normalize_path(&src_dir, "$SRC_DIR");
+
+ // `ui-fulldeps` tests can show paths to the compiler source when testing macros from
+ // `rustc_macros`
+ // eg. /home/user/rust/compiler
+ let compiler_src_dir = base_dir.join("compiler");
+ normalize_path(&compiler_src_dir, "$COMPILER_DIR");
+
+ if let Some(virtual_rust_source_base_dir) =
+ option_env!("CFG_VIRTUAL_RUST_SOURCE_BASE_DIR").map(PathBuf::from)
+ {
+ normalize_path(&virtual_rust_source_base_dir.join("library"), "$SRC_DIR");
+ normalize_path(&virtual_rust_source_base_dir.join("compiler"), "$COMPILER_DIR");
+ }
+
+ // Paths into the build directory
+ let test_build_dir = &self.config.build_base;
+ let parent_build_dir = test_build_dir.parent().unwrap().parent().unwrap().parent().unwrap();
+
+ // eg. /home/user/rust/build/x86_64-unknown-linux-gnu/test/ui
+ normalize_path(test_build_dir, "$TEST_BUILD_DIR");
+ // eg. /home/user/rust/build
+ normalize_path(parent_build_dir, "$BUILD_DIR");
+
+ // Paths into lib directory.
+ normalize_path(&parent_build_dir.parent().unwrap().join("lib"), "$LIB_DIR");
+
+ if json {
+ // escaped newlines in json strings should be readable
+ // in the stderr files. There's no point int being correct,
+ // since only humans process the stderr files.
+ // Thus we just turn escaped newlines back into newlines.
+ normalized = normalized.replace("\\n", "\n");
+ }
+
+ // If there are `$SRC_DIR` normalizations with line and column numbers, then replace them
+ // with placeholders as we do not want tests needing updated when compiler source code
+ // changes.
+ // eg. $SRC_DIR/libcore/mem.rs:323:14 becomes $SRC_DIR/libcore/mem.rs:LL:COL
+ lazy_static! {
+ static ref SRC_DIR_RE: Regex =
+ Regex::new("SRC_DIR(.+):\\d+:\\d+(: \\d+:\\d+)?").unwrap();
+ }
+
+ normalized = SRC_DIR_RE.replace_all(&normalized, "SRC_DIR$1:LL:COL").into_owned();
+
+ normalized = Self::normalize_platform_differences(&normalized);
+ normalized = normalized.replace("\t", "\\t"); // makes tabs visible
+
+ // Remove test annotations like `//~ ERROR text` from the output,
+ // since they duplicate actual errors and make the output hard to read.
+ // This mirrors the regex in src/tools/tidy/src/style.rs, please update
+ // both if either are changed.
+ lazy_static! {
+ static ref ANNOTATION_RE: Regex = Regex::new("\\s*//(\\[.*\\])?~.*").unwrap();
+ }
+
+ normalized = ANNOTATION_RE.replace_all(&normalized, "").into_owned();
+
+ // This code normalizes various hashes in v0 symbol mangling that is
+ // emitted in the ui and mir-opt tests.
+ lazy_static! {
+ static ref V0_CRATE_HASH_PREFIX_RE: Regex =
+ Regex::new(r"_R.*?Cs[0-9a-zA-Z]+_").unwrap();
+ static ref V0_CRATE_HASH_RE: Regex = Regex::new(r"Cs[0-9a-zA-Z]+_").unwrap();
+ }
+
+ const V0_CRATE_HASH_PLACEHOLDER: &str = r"CsCRATE_HASH_";
+ if V0_CRATE_HASH_PREFIX_RE.is_match(&normalized) {
+ // Normalize crate hash
+ normalized =
+ V0_CRATE_HASH_RE.replace_all(&normalized, V0_CRATE_HASH_PLACEHOLDER).into_owned();
+ }
+
+ lazy_static! {
+ static ref V0_BACK_REF_PREFIX_RE: Regex = Regex::new(r"\(_R.*?B[0-9a-zA-Z]_").unwrap();
+ static ref V0_BACK_REF_RE: Regex = Regex::new(r"B[0-9a-zA-Z]_").unwrap();
+ }
+
+ const V0_BACK_REF_PLACEHOLDER: &str = r"B<REF>_";
+ if V0_BACK_REF_PREFIX_RE.is_match(&normalized) {
+ // Normalize back references (see RFC 2603)
+ normalized =
+ V0_BACK_REF_RE.replace_all(&normalized, V0_BACK_REF_PLACEHOLDER).into_owned();
+ }
+
+ // Custom normalization rules
+ for rule in custom_rules {
+ let re = Regex::new(&rule.0).expect("bad regex in custom normalization rule");
+ normalized = re.replace_all(&normalized, &rule.1[..]).into_owned();
+ }
+ normalized
+ }
+
+ /// Normalize output differences across platforms. Generally changes Windows output to be more
+ /// Unix-like.
+ ///
+ /// Replaces backslashes in paths with forward slashes, and replaces CRLF line endings
+ /// with LF.
+ fn normalize_platform_differences(output: &str) -> String {
+ lazy_static! {
+ /// Used to find Windows paths.
+ ///
+ /// It's not possible to detect paths in the error messages generally, but this is a
+ /// decent enough heuristic.
+ static ref PATH_BACKSLASH_RE: Regex = Regex::new(r#"(?x)
+ (?:
+ # Match paths that don't include spaces.
+ (?:\\[\pL\pN\.\-_']+)+\.\pL+
+ |
+ # If the path starts with a well-known root, then allow spaces.
+ \$(?:DIR|SRC_DIR|TEST_BUILD_DIR|BUILD_DIR|LIB_DIR)(?:\\[\pL\pN\.\-_' ]+)+
+ )"#
+ ).unwrap();
+ }
+
+ let output = output.replace(r"\\", r"\");
+
+ PATH_BACKSLASH_RE
+ .replace_all(&output, |caps: &Captures<'_>| {
+ println!("{}", &caps[0]);
+ caps[0].replace(r"\", "/")
+ })
+ .replace("\r\n", "\n")
+ }
+
+ fn expected_output_path(&self, kind: &str) -> PathBuf {
+ let mut path =
+ expected_output_path(&self.testpaths, self.revision, &self.config.compare_mode, kind);
+
+ if !path.exists() {
+ if let Some(CompareMode::Polonius) = self.config.compare_mode {
+ path = expected_output_path(&self.testpaths, self.revision, &None, kind);
+ }
+ }
+
+ if !path.exists() {
+ path = expected_output_path(&self.testpaths, self.revision, &None, kind);
+ }
+
+ path
+ }
+
+ fn load_expected_output(&self, kind: &str) -> String {
+ let path = self.expected_output_path(kind);
+ if path.exists() {
+ match self.load_expected_output_from_path(&path) {
+ Ok(x) => x,
+ Err(x) => self.fatal(&x),
+ }
+ } else {
+ String::new()
+ }
+ }
+
+ fn load_expected_output_from_path(&self, path: &Path) -> Result<String, String> {
+ fs::read_to_string(path).map_err(|err| {
+ format!("failed to load expected output from `{}`: {}", path.display(), err)
+ })
+ }
+
+ fn delete_file(&self, file: &PathBuf) {
+ if !file.exists() {
+ // Deleting a nonexistant file would error.
+ return;
+ }
+ if let Err(e) = fs::remove_file(file) {
+ self.fatal(&format!("failed to delete `{}`: {}", file.display(), e,));
+ }
+ }
+
+ fn compare_output(&self, kind: &str, actual: &str, expected: &str) -> usize {
+ if actual == expected {
+ return 0;
+ }
+
+ if !self.config.bless {
+ if expected.is_empty() {
+ println!("normalized {}:\n{}\n", kind, actual);
+ } else {
+ println!("diff of {}:\n", kind);
+ print!("{}", write_diff(expected, actual, 3));
+ }
+ }
+
+ let mode = self.config.compare_mode.as_ref().map_or("", |m| m.to_str());
+ let output_file = self
+ .output_base_name()
+ .with_extra_extension(self.revision.unwrap_or(""))
+ .with_extra_extension(mode)
+ .with_extra_extension(kind);
+
+ let mut files = vec![output_file];
+ if self.config.bless {
+ // Delete non-revision .stderr/.stdout file if revisions are used.
+ // Without this, we'd just generate the new files and leave the old files around.
+ if self.revision.is_some() {
+ let old =
+ expected_output_path(self.testpaths, None, &self.config.compare_mode, kind);
+ self.delete_file(&old);
+ }
+ files.push(expected_output_path(
+ self.testpaths,
+ self.revision,
+ &self.config.compare_mode,
+ kind,
+ ));
+ }
+
+ for output_file in &files {
+ if actual.is_empty() {
+ self.delete_file(output_file);
+ } else if let Err(err) = fs::write(&output_file, &actual) {
+ self.fatal(&format!(
+ "failed to write {} to `{}`: {}",
+ kind,
+ output_file.display(),
+ err,
+ ));
+ }
+ }
+
+ println!("\nThe actual {0} differed from the expected {0}.", kind);
+ for output_file in files {
+ println!("Actual {} saved to {}", kind, output_file.display());
+ }
+ if self.config.bless { 0 } else { 1 }
+ }
+
+ fn check_and_prune_duplicate_outputs(
+ &self,
+ proc_res: &ProcRes,
+ modes: &[CompareMode],
+ require_same_modes: &[CompareMode],
+ ) {
+ for kind in UI_EXTENSIONS {
+ let canon_comparison_path =
+ expected_output_path(&self.testpaths, self.revision, &None, kind);
+
+ let canon = match self.load_expected_output_from_path(&canon_comparison_path) {
+ Ok(canon) => canon,
+ _ => continue,
+ };
+ let bless = self.config.bless;
+ let check_and_prune_duplicate_outputs = |mode: &CompareMode, require_same: bool| {
+ let examined_path =
+ expected_output_path(&self.testpaths, self.revision, &Some(mode.clone()), kind);
+
+ // If there is no output, there is nothing to do
+ let examined_content = match self.load_expected_output_from_path(&examined_path) {
+ Ok(content) => content,
+ _ => return,
+ };
+
+ let is_duplicate = canon == examined_content;
+
+ match (bless, require_same, is_duplicate) {
+ // If we're blessing and the output is the same, then delete the file.
+ (true, _, true) => {
+ self.delete_file(&examined_path);
+ }
+ // If we want them to be the same, but they are different, then error.
+ // We do this wether we bless or not
+ (_, true, false) => {
+ self.fatal_proc_rec(
+ &format!("`{}` should not have different output from base test!", kind),
+ proc_res,
+ );
+ }
+ _ => {}
+ }
+ };
+ for mode in modes {
+ check_and_prune_duplicate_outputs(mode, false);
+ }
+ for mode in require_same_modes {
+ check_and_prune_duplicate_outputs(mode, true);
+ }
+ }
+ }
+
+ fn create_stamp(&self) {
+ let stamp = crate::stamp(&self.config, self.testpaths, self.revision);
+ fs::write(&stamp, compute_stamp_hash(&self.config)).unwrap();
+ }
+}
+
+struct ProcArgs {
+ prog: String,
+ args: Vec<String>,
+}
+
+pub struct ProcRes {
+ status: ExitStatus,
+ stdout: String,
+ stderr: String,
+ cmdline: String,
+}
+
+impl ProcRes {
+ pub fn print_info(&self) {
+ fn render(name: &str, contents: &str) -> String {
+ let contents = json::extract_rendered(contents);
+ let contents = contents.trim();
+ if contents.is_empty() {
+ format!("{name}: none")
+ } else {
+ format!(
+ "\
+ --- {name} -------------------------------\n\
+ {contents}\n\
+ ------------------------------------------",
+ )
+ }
+ }
+
+ println!(
+ "status: {}\ncommand: {}\n{}\n{}\n",
+ self.status,
+ self.cmdline,
+ render("stdout", &self.stdout),
+ render("stderr", &self.stderr),
+ );
+ }
+
+ pub fn fatal(&self, err: Option<&str>, on_failure: impl FnOnce()) -> ! {
+ if let Some(e) = err {
+ println!("\nerror: {}", e);
+ }
+ self.print_info();
+ on_failure();
+ // Use resume_unwind instead of panic!() to prevent a panic message + backtrace from
+ // compiletest, which is unnecessary noise.
+ std::panic::resume_unwind(Box::new(()));
+ }
+}
+
+#[derive(Debug)]
+enum TargetLocation {
+ ThisFile(PathBuf),
+ ThisDirectory(PathBuf),
+}
+
+enum AllowUnused {
+ Yes,
+ No,
+}
diff --git a/src/tools/compiletest/src/runtest/debugger.rs b/src/tools/compiletest/src/runtest/debugger.rs
new file mode 100644
index 000000000..379ff0bab
--- /dev/null
+++ b/src/tools/compiletest/src/runtest/debugger.rs
@@ -0,0 +1,122 @@
+use crate::common::Config;
+use crate::header::line_directive;
+use crate::runtest::ProcRes;
+
+use std::fs::File;
+use std::io::{BufRead, BufReader};
+use std::path::Path;
+
+pub(super) struct DebuggerCommands {
+ pub commands: Vec<String>,
+ pub check_lines: Vec<String>,
+ pub breakpoint_lines: Vec<usize>,
+}
+
+impl DebuggerCommands {
+ pub(super) fn parse_from(
+ file: &Path,
+ config: &Config,
+ debugger_prefixes: &[&str],
+ rev: Option<&str>,
+ ) -> Result<Self, String> {
+ let directives = debugger_prefixes
+ .iter()
+ .map(|prefix| (format!("{}-command", prefix), format!("{}-check", prefix)))
+ .collect::<Vec<_>>();
+
+ let mut breakpoint_lines = vec![];
+ let mut commands = vec![];
+ let mut check_lines = vec![];
+ let mut counter = 0;
+ let reader = BufReader::new(File::open(file).unwrap());
+ for line in reader.lines() {
+ counter += 1;
+ match line {
+ Ok(line) => {
+ let (lnrev, line) = line_directive("//", &line).unwrap_or((None, &line));
+
+ // Skip any revision specific directive that doesn't match the current
+ // revision being tested
+ if lnrev.is_some() && lnrev != rev {
+ continue;
+ }
+
+ if line.contains("#break") {
+ breakpoint_lines.push(counter);
+ }
+
+ for &(ref command_directive, ref check_directive) in &directives {
+ config
+ .parse_name_value_directive(&line, command_directive)
+ .map(|cmd| commands.push(cmd));
+
+ config
+ .parse_name_value_directive(&line, check_directive)
+ .map(|cmd| check_lines.push(cmd));
+ }
+ }
+ Err(e) => return Err(format!("Error while parsing debugger commands: {}", e)),
+ }
+ }
+
+ Ok(Self { commands, check_lines, breakpoint_lines })
+ }
+}
+
+pub(super) fn check_debugger_output(
+ debugger_run_result: &ProcRes,
+ check_lines: &[String],
+) -> Result<(), String> {
+ let num_check_lines = check_lines.len();
+
+ let mut check_line_index = 0;
+ for line in debugger_run_result.stdout.lines() {
+ if check_line_index >= num_check_lines {
+ break;
+ }
+
+ if check_single_line(line, &(check_lines[check_line_index])[..]) {
+ check_line_index += 1;
+ }
+ }
+ if check_line_index != num_check_lines && num_check_lines > 0 {
+ Err(format!("line not found in debugger output: {}", check_lines[check_line_index]))
+ } else {
+ Ok(())
+ }
+}
+
+fn check_single_line(line: &str, check_line: &str) -> bool {
+ // Allow check lines to leave parts unspecified (e.g., uninitialized
+ // bits in the wrong case of an enum) with the notation "[...]".
+ let line = line.trim();
+ let check_line = check_line.trim();
+ let can_start_anywhere = check_line.starts_with("[...]");
+ let can_end_anywhere = check_line.ends_with("[...]");
+
+ let check_fragments: Vec<&str> =
+ check_line.split("[...]").filter(|frag| !frag.is_empty()).collect();
+ if check_fragments.is_empty() {
+ return true;
+ }
+
+ let (mut rest, first_fragment) = if can_start_anywhere {
+ match line.find(check_fragments[0]) {
+ Some(pos) => (&line[pos + check_fragments[0].len()..], 1),
+ None => return false,
+ }
+ } else {
+ (line, 0)
+ };
+
+ for current_fragment in &check_fragments[first_fragment..] {
+ match rest.find(current_fragment) {
+ Some(pos) => {
+ rest = &rest[pos + current_fragment.len()..];
+ }
+ None => return false,
+ }
+ }
+
+ if !can_end_anywhere && !rest.is_empty() { false } else { true }
+}
diff --git a/src/tools/compiletest/src/runtest/tests.rs b/src/tools/compiletest/src/runtest/tests.rs
new file mode 100644
index 000000000..511051111
--- /dev/null
+++ b/src/tools/compiletest/src/runtest/tests.rs
@@ -0,0 +1,50 @@
+use super::*;
+
+#[test]
+fn normalize_platform_differences() {
+ assert_eq!(TestCx::normalize_platform_differences(r"$DIR\foo.rs"), "$DIR/foo.rs");
+ assert_eq!(
+ TestCx::normalize_platform_differences(r"$BUILD_DIR\..\parser.rs"),
+ "$BUILD_DIR/../parser.rs"
+ );
+ assert_eq!(
+ TestCx::normalize_platform_differences(r"$DIR\bar.rs hello\nworld"),
+ r"$DIR/bar.rs hello\nworld"
+ );
+ assert_eq!(
+ TestCx::normalize_platform_differences(r"either bar\baz.rs or bar\baz\mod.rs"),
+ r"either bar/baz.rs or bar/baz/mod.rs",
+ );
+ assert_eq!(TestCx::normalize_platform_differences(r"`.\some\path.rs`"), r"`./some/path.rs`",);
+ assert_eq!(TestCx::normalize_platform_differences(r"`some\path.rs`"), r"`some/path.rs`",);
+ assert_eq!(
+ TestCx::normalize_platform_differences(r"$DIR\path-with-dashes.rs"),
+ r"$DIR/path-with-dashes.rs"
+ );
+ assert_eq!(
+ TestCx::normalize_platform_differences(r"$DIR\path_with_underscores.rs"),
+ r"$DIR/path_with_underscores.rs",
+ );
+ assert_eq!(TestCx::normalize_platform_differences(r"$DIR\foo.rs:12:11"), "$DIR/foo.rs:12:11",);
+ assert_eq!(
+ TestCx::normalize_platform_differences(r"$DIR\path with spaces 'n' quotes"),
+ "$DIR/path with spaces 'n' quotes",
+ );
+ assert_eq!(
+ TestCx::normalize_platform_differences(r"$DIR\file_with\no_extension"),
+ "$DIR/file_with/no_extension",
+ );
+
+ assert_eq!(TestCx::normalize_platform_differences(r"\n"), r"\n");
+ assert_eq!(TestCx::normalize_platform_differences(r"{ \n"), r"{ \n");
+ assert_eq!(TestCx::normalize_platform_differences(r"`\]`"), r"`\]`");
+ assert_eq!(TestCx::normalize_platform_differences(r#""\{""#), r#""\{""#);
+ assert_eq!(
+ TestCx::normalize_platform_differences(r#"write!(&mut v, "Hello\n")"#),
+ r#"write!(&mut v, "Hello\n")"#
+ );
+ assert_eq!(
+ TestCx::normalize_platform_differences(r#"println!("test\ntest")"#),
+ r#"println!("test\ntest")"#,
+ );
+}
diff --git a/src/tools/compiletest/src/tests.rs b/src/tools/compiletest/src/tests.rs
new file mode 100644
index 000000000..e6725dba2
--- /dev/null
+++ b/src/tools/compiletest/src/tests.rs
@@ -0,0 +1,78 @@
+use super::header::extract_llvm_version;
+use super::*;
+
+#[test]
+fn test_extract_gdb_version() {
+ macro_rules! test { ($($expectation:literal: $input:literal,)*) => {{$(
+ assert_eq!(extract_gdb_version($input), Some($expectation));
+ )*}}}
+
+ test! {
+ 7000001: "GNU gdb (GDB) CentOS 7.0.1-45.el5.centos",
+
+ 7002000: "GNU gdb (GDB) Red Hat Enterprise Linux 7.2-90.el6",
+
+ 7004000: "GNU gdb (Ubuntu/Linaro 7.4-2012.04-0ubuntu2.1) 7.4-2012.04",
+ 7004001: "GNU gdb (GDB) 7.4.1-debian",
+
+ 7006001: "GNU gdb (GDB) Red Hat Enterprise Linux 7.6.1-80.el7",
+
+ 7007001: "GNU gdb (Ubuntu 7.7.1-0ubuntu5~14.04.2) 7.7.1",
+ 7007001: "GNU gdb (Debian 7.7.1+dfsg-5) 7.7.1",
+ 7007001: "GNU gdb (GDB) Fedora 7.7.1-21.fc20",
+
+ 7008000: "GNU gdb (GDB; openSUSE 13.2) 7.8",
+ 7009001: "GNU gdb (GDB) Fedora 7.9.1-20.fc22",
+ 7010001: "GNU gdb (GDB) Fedora 7.10.1-31.fc23",
+
+ 7011000: "GNU gdb (Ubuntu 7.11-0ubuntu1) 7.11",
+ 7011001: "GNU gdb (Ubuntu 7.11.1-0ubuntu1~16.04) 7.11.1",
+ 7011001: "GNU gdb (Debian 7.11.1-2) 7.11.1",
+ 7011001: "GNU gdb (GDB) Fedora 7.11.1-86.fc24",
+ 7011001: "GNU gdb (GDB; openSUSE Leap 42.1) 7.11.1",
+ 7011001: "GNU gdb (GDB; openSUSE Tumbleweed) 7.11.1",
+
+ 7011090: "7.11.90",
+ 7011090: "GNU gdb (Ubuntu 7.11.90.20161005-0ubuntu1) 7.11.90.20161005-git",
+
+ 7012000: "7.12",
+ 7012000: "GNU gdb (GDB) 7.12",
+ 7012000: "GNU gdb (GDB) 7.12.20161027-git",
+ 7012050: "GNU gdb (GDB) 7.12.50.20161027-git",
+
+ 9002000: "GNU gdb (Ubuntu 9.2-0ubuntu1~20.04) 9.2",
+ 10001000: "GNU gdb (GDB) 10.1 [GDB v10.1 for FreeBSD]",
+ }
+}
+
+#[test]
+fn test_extract_lldb_version() {
+ // Apple variants
+ assert_eq!(extract_lldb_version("LLDB-179.5"), Some((179, false)));
+ assert_eq!(extract_lldb_version("lldb-300.2.51"), Some((300, false)));
+
+ // Upstream versions
+ assert_eq!(extract_lldb_version("lldb version 6.0.1"), Some((600, false)));
+ assert_eq!(extract_lldb_version("lldb version 9.0.0"), Some((900, false)));
+}
+
+#[test]
+fn is_test_test() {
+ assert_eq!(true, is_test(&OsString::from("a_test.rs")));
+ assert_eq!(false, is_test(&OsString::from(".a_test.rs")));
+ assert_eq!(false, is_test(&OsString::from("a_cat.gif")));
+ assert_eq!(false, is_test(&OsString::from("#a_dog_gif")));
+ assert_eq!(false, is_test(&OsString::from("~a_temp_file")));
+}
+
+#[test]
+fn test_extract_llvm_version() {
+ assert_eq!(extract_llvm_version("8.1.2-rust"), Some(80102));
+ assert_eq!(extract_llvm_version("9.0.1-rust-1.43.0-dev"), Some(90001));
+ assert_eq!(extract_llvm_version("9.3.1-rust-1.43.0-dev"), Some(90301));
+ assert_eq!(extract_llvm_version("10.0.0-rust"), Some(100000));
+ assert_eq!(extract_llvm_version("11.1.0"), Some(110100));
+ assert_eq!(extract_llvm_version("12.0.0libcxx"), Some(120000));
+ assert_eq!(extract_llvm_version("12.0.0-rc3"), Some(120000));
+ assert_eq!(extract_llvm_version("13.0.0git"), Some(130000));
+}
diff --git a/src/tools/compiletest/src/util.rs b/src/tools/compiletest/src/util.rs
new file mode 100644
index 000000000..22df18ee9
--- /dev/null
+++ b/src/tools/compiletest/src/util.rs
@@ -0,0 +1,259 @@
+use crate::common::Config;
+use std::env;
+use std::ffi::OsStr;
+use std::path::PathBuf;
+
+use tracing::*;
+
+#[cfg(test)]
+mod tests;
+
+/// Conversion table from triple OS name to Rust SYSNAME
+const OS_TABLE: &[(&str, &str)] = &[
+ ("android", "android"),
+ ("androideabi", "android"),
+ ("cuda", "cuda"),
+ ("darwin", "macos"),
+ ("dragonfly", "dragonfly"),
+ ("emscripten", "emscripten"),
+ ("freebsd", "freebsd"),
+ ("fuchsia", "fuchsia"),
+ ("haiku", "haiku"),
+ ("hermit", "hermit"),
+ ("illumos", "illumos"),
+ ("ios", "ios"),
+ ("l4re", "l4re"),
+ ("linux", "linux"),
+ ("mingw32", "windows"),
+ ("none", "none"),
+ ("netbsd", "netbsd"),
+ ("openbsd", "openbsd"),
+ ("redox", "redox"),
+ ("sgx", "sgx"),
+ ("solaris", "solaris"),
+ ("watchos", "watchos"),
+ ("win32", "windows"),
+ ("windows", "windows"),
+ ("vxworks", "vxworks"),
+];
+
+const ARCH_TABLE: &[(&str, &str)] = &[
+ ("aarch64", "aarch64"),
+ ("aarch64_be", "aarch64"),
+ ("amd64", "x86_64"),
+ ("arm", "arm"),
+ ("arm64", "aarch64"),
+ ("armv4t", "arm"),
+ ("armv5te", "arm"),
+ ("armv7", "arm"),
+ ("armv7s", "arm"),
+ ("asmjs", "asmjs"),
+ ("avr", "avr"),
+ ("bpfeb", "bpf"),
+ ("bpfel", "bpf"),
+ ("hexagon", "hexagon"),
+ ("i386", "x86"),
+ ("i586", "x86"),
+ ("i686", "x86"),
+ ("m68k", "m68k"),
+ ("mips", "mips"),
+ ("mips64", "mips64"),
+ ("mips64el", "mips64"),
+ ("mipsisa32r6", "mips"),
+ ("mipsisa32r6el", "mips"),
+ ("mipsisa64r6", "mips64"),
+ ("mipsisa64r6el", "mips64"),
+ ("mipsel", "mips"),
+ ("mipsisa32r6", "mips"),
+ ("mipsisa32r6el", "mips"),
+ ("mipsisa64r6", "mips64"),
+ ("mipsisa64r6el", "mips64"),
+ ("msp430", "msp430"),
+ ("nvptx64", "nvptx64"),
+ ("powerpc", "powerpc"),
+ ("powerpc64", "powerpc64"),
+ ("powerpc64le", "powerpc64"),
+ ("riscv64gc", "riscv64"),
+ ("s390x", "s390x"),
+ ("sparc", "sparc"),
+ ("sparc64", "sparc64"),
+ ("sparcv9", "sparc64"),
+ ("thumbv6m", "thumb"),
+ ("thumbv7em", "thumb"),
+ ("thumbv7m", "thumb"),
+ ("wasm32", "wasm32"),
+ ("x86_64", "x86_64"),
+ ("xcore", "xcore"),
+];
+
+pub const ASAN_SUPPORTED_TARGETS: &[&str] = &[
+ "aarch64-apple-darwin",
+ "aarch64-fuchsia",
+ "aarch64-unknown-linux-gnu",
+ "x86_64-apple-darwin",
+ "x86_64-fuchsia",
+ "x86_64-unknown-freebsd",
+ "x86_64-unknown-linux-gnu",
+];
+
+// FIXME(rcvalle): More targets are likely supported.
+pub const CFI_SUPPORTED_TARGETS: &[&str] = &[
+ "aarch64-apple-darwin",
+ "aarch64-fuchsia",
+ "aarch64-linux-android",
+ "aarch64-unknown-freebsd",
+ "aarch64-unknown-linux-gnu",
+ "x86_64-apple-darwin",
+ "x86_64-fuchsia",
+ "x86_64-pc-solaris",
+ "x86_64-unknown-freebsd",
+ "x86_64-unknown-illumos",
+ "x86_64-unknown-linux-gnu",
+ "x86_64-unknown-linux-musl",
+ "x86_64-unknown-netbsd",
+];
+
+pub const LSAN_SUPPORTED_TARGETS: &[&str] = &[
+ // FIXME: currently broken, see #88132
+ // "aarch64-apple-darwin",
+ "aarch64-unknown-linux-gnu",
+ "x86_64-apple-darwin",
+ "x86_64-unknown-linux-gnu",
+];
+
+pub const MSAN_SUPPORTED_TARGETS: &[&str] =
+ &["aarch64-unknown-linux-gnu", "x86_64-unknown-freebsd", "x86_64-unknown-linux-gnu"];
+
+pub const TSAN_SUPPORTED_TARGETS: &[&str] = &[
+ "aarch64-apple-darwin",
+ "aarch64-unknown-linux-gnu",
+ "x86_64-apple-darwin",
+ "x86_64-unknown-freebsd",
+ "x86_64-unknown-linux-gnu",
+];
+
+pub const HWASAN_SUPPORTED_TARGETS: &[&str] =
+ &["aarch64-linux-android", "aarch64-unknown-linux-gnu"];
+
+pub const MEMTAG_SUPPORTED_TARGETS: &[&str] =
+ &["aarch64-linux-android", "aarch64-unknown-linux-gnu"];
+
+pub const SHADOWCALLSTACK_SUPPORTED_TARGETS: &[&str] = &["aarch64-linux-android"];
+
+const BIG_ENDIAN: &[&str] = &[
+ "aarch64_be",
+ "armebv7r",
+ "mips",
+ "mips64",
+ "mipsisa32r6",
+ "mipsisa64r6",
+ "powerpc",
+ "powerpc64",
+ "s390x",
+ "sparc",
+ "sparc64",
+ "sparcv9",
+];
+
+static ASM_SUPPORTED_ARCHS: &[&str] = &[
+ "x86", "x86_64", "arm", "aarch64", "riscv32",
+ "riscv64",
+ // These targets require an additional asm_experimental_arch feature.
+ // "nvptx64", "hexagon", "mips", "mips64", "spirv", "wasm32",
+];
+
+pub fn has_asm_support(triple: &str) -> bool {
+ ASM_SUPPORTED_ARCHS.contains(&get_arch(triple))
+}
+
+pub fn matches_os(triple: &str, name: &str) -> bool {
+ // For the wasm32 bare target we ignore anything also ignored on emscripten
+ // and then we also recognize `wasm32-bare` as the os for the target
+ if triple == "wasm32-unknown-unknown" {
+ return name == "emscripten" || name == "wasm32-bare";
+ }
+ let triple: Vec<_> = triple.split('-').collect();
+ for &(triple_os, os) in OS_TABLE {
+ if triple.contains(&triple_os) {
+ return os == name;
+ }
+ }
+ panic!("Cannot determine OS from triple");
+}
+
+/// Determine the architecture from `triple`
+pub fn get_arch(triple: &str) -> &'static str {
+ let triple: Vec<_> = triple.split('-').collect();
+ for &(triple_arch, arch) in ARCH_TABLE {
+ if triple.contains(&triple_arch) {
+ return arch;
+ }
+ }
+ panic!("Cannot determine Architecture from triple");
+}
+
+/// Determine the endianness from `triple`
+pub fn is_big_endian(triple: &str) -> bool {
+ let triple_arch = triple.split('-').next().unwrap();
+ BIG_ENDIAN.contains(&triple_arch)
+}
+
+pub fn matches_env(triple: &str, name: &str) -> bool {
+ if let Some(env) = triple.split('-').nth(3) { env.starts_with(name) } else { false }
+}
+
+pub fn get_pointer_width(triple: &str) -> &'static str {
+ if (triple.contains("64") && !triple.ends_with("gnux32") && !triple.ends_with("gnu_ilp32"))
+ || triple.starts_with("s390x")
+ {
+ "64bit"
+ } else if triple.starts_with("avr") {
+ "16bit"
+ } else {
+ "32bit"
+ }
+}
+
+pub fn make_new_path(path: &str) -> String {
+ assert!(cfg!(windows));
+ // Windows just uses PATH as the library search path, so we have to
+ // maintain the current value while adding our own
+ match env::var(lib_path_env_var()) {
+ Ok(curr) => format!("{}{}{}", path, path_div(), curr),
+ Err(..) => path.to_owned(),
+ }
+}
+
+pub fn lib_path_env_var() -> &'static str {
+ "PATH"
+}
+fn path_div() -> &'static str {
+ ";"
+}
+
+pub fn logv(config: &Config, s: String) {
+ debug!("{}", s);
+ if config.verbose {
+ println!("{}", s);
+ }
+}
+
+pub trait PathBufExt {
+ /// Append an extension to the path, even if it already has one.
+ fn with_extra_extension<S: AsRef<OsStr>>(&self, extension: S) -> PathBuf;
+}
+
+impl PathBufExt for PathBuf {
+ fn with_extra_extension<S: AsRef<OsStr>>(&self, extension: S) -> PathBuf {
+ if extension.as_ref().is_empty() {
+ self.clone()
+ } else {
+ let mut fname = self.file_name().unwrap().to_os_string();
+ if !extension.as_ref().to_str().unwrap().starts_with('.') {
+ fname.push(".");
+ }
+ fname.push(extension);
+ self.with_file_name(fname)
+ }
+ }
+}
diff --git a/src/tools/compiletest/src/util/tests.rs b/src/tools/compiletest/src/util/tests.rs
new file mode 100644
index 000000000..663027173
--- /dev/null
+++ b/src/tools/compiletest/src/util/tests.rs
@@ -0,0 +1,51 @@
+use super::*;
+
+#[test]
+#[should_panic(expected = "Cannot determine Architecture from triple")]
+fn test_get_arch_failure() {
+ get_arch("abc");
+}
+
+#[test]
+fn test_get_arch() {
+ assert_eq!("x86_64", get_arch("x86_64-unknown-linux-gnu"));
+ assert_eq!("x86_64", get_arch("amd64"));
+ assert_eq!("nvptx64", get_arch("nvptx64-nvidia-cuda"));
+}
+
+#[test]
+#[should_panic(expected = "Cannot determine OS from triple")]
+fn test_matches_os_failure() {
+ matches_os("abc", "abc");
+}
+
+#[test]
+fn test_matches_os() {
+ assert!(matches_os("x86_64-unknown-linux-gnu", "linux"));
+ assert!(matches_os("wasm32-unknown-unknown", "emscripten"));
+ assert!(matches_os("wasm32-unknown-unknown", "wasm32-bare"));
+ assert!(!matches_os("wasm32-unknown-unknown", "windows"));
+ assert!(matches_os("thumbv6m0-none-eabi", "none"));
+ assert!(matches_os("riscv32imc-unknown-none-elf", "none"));
+ assert!(matches_os("nvptx64-nvidia-cuda", "cuda"));
+ assert!(matches_os("x86_64-fortanix-unknown-sgx", "sgx"));
+}
+
+#[test]
+fn is_big_endian_test() {
+ assert!(!is_big_endian("no"));
+ assert!(is_big_endian("sparc-unknown-unknown"));
+}
+
+#[test]
+fn path_buf_with_extra_extension_test() {
+ assert_eq!(
+ PathBuf::from("foo.rs.stderr"),
+ PathBuf::from("foo.rs").with_extra_extension("stderr")
+ );
+ assert_eq!(
+ PathBuf::from("foo.rs.stderr"),
+ PathBuf::from("foo.rs").with_extra_extension(".stderr")
+ );
+ assert_eq!(PathBuf::from("foo.rs"), PathBuf::from("foo.rs").with_extra_extension(""));
+}
diff --git a/src/tools/error_index_generator/Cargo.toml b/src/tools/error_index_generator/Cargo.toml
new file mode 100644
index 000000000..c84b79e11
--- /dev/null
+++ b/src/tools/error_index_generator/Cargo.toml
@@ -0,0 +1,14 @@
+[package]
+name = "error_index_generator"
+version = "0.0.0"
+edition = "2021"
+
+[dependencies]
+rustdoc = { path = "../../librustdoc" }
+
+[build-dependencies]
+walkdir = "2"
+
+[[bin]]
+name = "error_index_generator"
+path = "main.rs"
diff --git a/src/tools/error_index_generator/build.rs b/src/tools/error_index_generator/build.rs
new file mode 100644
index 000000000..70b00b36c
--- /dev/null
+++ b/src/tools/error_index_generator/build.rs
@@ -0,0 +1,31 @@
+use std::path::PathBuf;
+use std::{env, fs};
+use walkdir::WalkDir;
+
+fn main() {
+ // The src directory (we are in src/tools/error_index_generator)
+ // Note that we could skip one of the .. but this ensures we at least loosely find the right
+ // directory.
+ let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
+
+ let error_codes_path = "../../../compiler/rustc_error_codes/src/error_codes.rs";
+
+ println!("cargo:rerun-if-changed={}", error_codes_path);
+ let file = fs::read_to_string(error_codes_path)
+ .unwrap()
+ .replace(": include_str!(\"./error_codes/", ": include_str!(\"./");
+ let contents = format!("(|| {{\n{}\n}})()", file);
+ fs::write(&out_dir.join("all_error_codes.rs"), &contents).unwrap();
+
+ // We copy the md files as well to the target directory.
+ for entry in WalkDir::new("../../../compiler/rustc_error_codes/src/error_codes") {
+ let entry = entry.unwrap();
+ match entry.path().extension() {
+ Some(s) if s == "md" => {}
+ _ => continue,
+ }
+ println!("cargo:rerun-if-changed={}", entry.path().to_str().unwrap());
+ let md_content = fs::read_to_string(entry.path()).unwrap();
+ fs::write(&out_dir.join(entry.file_name()), &md_content).unwrap();
+ }
+}
diff --git a/src/tools/error_index_generator/main.rs b/src/tools/error_index_generator/main.rs
new file mode 100644
index 000000000..1ce02e48c
--- /dev/null
+++ b/src/tools/error_index_generator/main.rs
@@ -0,0 +1,298 @@
+#![feature(rustc_private)]
+
+extern crate rustc_driver;
+extern crate rustc_span;
+
+use std::cell::RefCell;
+use std::collections::BTreeMap;
+use std::env;
+use std::error::Error;
+use std::fs::File;
+use std::io::Write;
+use std::path::Path;
+use std::path::PathBuf;
+
+use rustc_span::edition::DEFAULT_EDITION;
+
+use rustdoc::html::markdown::{ErrorCodes, HeadingOffset, IdMap, Markdown, Playground};
+
+pub struct ErrorMetadata {
+ pub description: Option<String>,
+}
+
+/// Mapping from error codes to metadata that can be (de)serialized.
+pub type ErrorMetadataMap = BTreeMap<String, ErrorMetadata>;
+
+enum OutputFormat {
+ HTML(HTMLFormatter),
+ Markdown(MarkdownFormatter),
+ Unknown(String),
+}
+
+impl OutputFormat {
+ fn from(format: &str, resource_suffix: &str) -> OutputFormat {
+ match &*format.to_lowercase() {
+ "html" => OutputFormat::HTML(HTMLFormatter(
+ RefCell::new(IdMap::new()),
+ resource_suffix.to_owned(),
+ )),
+ "markdown" => OutputFormat::Markdown(MarkdownFormatter),
+ s => OutputFormat::Unknown(s.to_owned()),
+ }
+ }
+}
+
+trait Formatter {
+ fn header(&self, output: &mut dyn Write) -> Result<(), Box<dyn Error>>;
+ fn title(&self, output: &mut dyn Write) -> Result<(), Box<dyn Error>>;
+ fn error_code_block(
+ &self,
+ output: &mut dyn Write,
+ info: &ErrorMetadata,
+ err_code: &str,
+ ) -> Result<(), Box<dyn Error>>;
+ fn footer(&self, output: &mut dyn Write) -> Result<(), Box<dyn Error>>;
+}
+
+struct HTMLFormatter(RefCell<IdMap>, String);
+struct MarkdownFormatter;
+
+impl Formatter for HTMLFormatter {
+ fn header(&self, output: &mut dyn Write) -> Result<(), Box<dyn Error>> {
+ write!(
+ output,
+ r##"<!DOCTYPE html>
+<html>
+<head>
+<title>Rust Compiler Error Index</title>
+<meta charset="utf-8">
+<!-- Include rust.css after light.css so its rules take priority. -->
+<link rel="stylesheet" type="text/css" href="rustdoc{suffix}.css"/>
+<link rel="stylesheet" type="text/css" href="light{suffix}.css"/>
+<link rel="stylesheet" type="text/css" href="rust.css"/>
+<style>
+.error-undescribed {{
+ display: none;
+}}
+</style>
+</head>
+<body>
+"##,
+ suffix = self.1
+ )?;
+ Ok(())
+ }
+
+ fn title(&self, output: &mut dyn Write) -> Result<(), Box<dyn Error>> {
+ write!(output, "<h1>Rust Compiler Error Index</h1>\n")?;
+ Ok(())
+ }
+
+ fn error_code_block(
+ &self,
+ output: &mut dyn Write,
+ info: &ErrorMetadata,
+ err_code: &str,
+ ) -> Result<(), Box<dyn Error>> {
+ // Enclose each error in a div so they can be shown/hidden en masse.
+ let desc_desc = match info.description {
+ Some(_) => "error-described",
+ None => "error-undescribed",
+ };
+ write!(output, "<div class=\"{}\">", desc_desc)?;
+
+ // Error title (with self-link).
+ write!(
+ output,
+ "<h2 id=\"{0}\" class=\"section-header\"><a href=\"#{0}\">{0}</a></h2>\n",
+ err_code
+ )?;
+
+ // Description rendered as markdown.
+ match info.description {
+ Some(ref desc) => {
+ let mut id_map = self.0.borrow_mut();
+ let playground = Playground {
+ crate_name: None,
+ url: String::from("https://play.rust-lang.org/"),
+ };
+ write!(
+ output,
+ "{}",
+ Markdown {
+ content: desc,
+ links: &[],
+ ids: &mut id_map,
+ error_codes: ErrorCodes::Yes,
+ edition: DEFAULT_EDITION,
+ playground: &Some(playground),
+ heading_offset: HeadingOffset::H1,
+ }
+ .into_string()
+ )?
+ }
+ None => write!(output, "<p>No description.</p>\n")?,
+ }
+
+ write!(output, "</div>\n")?;
+ Ok(())
+ }
+
+ fn footer(&self, output: &mut dyn Write) -> Result<(), Box<dyn Error>> {
+ write!(
+ output,
+ r##"<script>
+function onEach(arr, func) {{
+ if (arr && arr.length > 0 && func) {{
+ var length = arr.length;
+ var i;
+ for (i = 0; i < length; ++i) {{
+ if (func(arr[i])) {{
+ return true;
+ }}
+ }}
+ }}
+ return false;
+}}
+
+function onEachLazy(lazyArray, func) {{
+ return onEach(
+ Array.prototype.slice.call(lazyArray),
+ func);
+}}
+
+function hasClass(elem, className) {{
+ return elem && elem.classList && elem.classList.contains(className);
+}}
+
+onEachLazy(document.getElementsByClassName('rust-example-rendered'), function(e) {{
+ if (hasClass(e, 'compile_fail')) {{
+ e.addEventListener("mouseover", function(event) {{
+ e.parentElement.previousElementSibling.childNodes[0].style.color = '#f00';
+ }});
+ e.addEventListener("mouseout", function(event) {{
+ e.parentElement.previousElementSibling.childNodes[0].style.color = '';
+ }});
+ }} else if (hasClass(e, 'ignore')) {{
+ e.addEventListener("mouseover", function(event) {{
+ e.parentElement.previousElementSibling.childNodes[0].style.color = '#ff9200';
+ }});
+ e.addEventListener("mouseout", function(event) {{
+ e.parentElement.previousElementSibling.childNodes[0].style.color = '';
+ }});
+ }}
+}});
+</script>
+</body>
+</html>"##
+ )?;
+ Ok(())
+ }
+}
+
+impl Formatter for MarkdownFormatter {
+ #[allow(unused_variables)]
+ fn header(&self, output: &mut dyn Write) -> Result<(), Box<dyn Error>> {
+ Ok(())
+ }
+
+ fn title(&self, output: &mut dyn Write) -> Result<(), Box<dyn Error>> {
+ write!(output, "# Rust Compiler Error Index\n")?;
+ Ok(())
+ }
+
+ fn error_code_block(
+ &self,
+ output: &mut dyn Write,
+ info: &ErrorMetadata,
+ err_code: &str,
+ ) -> Result<(), Box<dyn Error>> {
+ Ok(match info.description {
+ Some(ref desc) => write!(output, "## {}\n{}\n", err_code, desc)?,
+ None => (),
+ })
+ }
+
+ #[allow(unused_variables)]
+ fn footer(&self, output: &mut dyn Write) -> Result<(), Box<dyn Error>> {
+ Ok(())
+ }
+}
+
+/// Output an HTML page for the errors in `err_map` to `output_path`.
+fn render_error_page<T: Formatter>(
+ err_map: &ErrorMetadataMap,
+ output_path: &Path,
+ formatter: T,
+) -> Result<(), Box<dyn Error>> {
+ let mut output_file = File::create(output_path)?;
+
+ formatter.header(&mut output_file)?;
+ formatter.title(&mut output_file)?;
+
+ for (err_code, info) in err_map {
+ formatter.error_code_block(&mut output_file, info, err_code)?;
+ }
+
+ formatter.footer(&mut output_file)
+}
+
+fn main_with_result(format: OutputFormat, dst: &Path) -> Result<(), Box<dyn Error>> {
+ let long_codes = register_all();
+ let mut err_map = BTreeMap::new();
+ for (code, desc) in long_codes {
+ err_map.insert(code.to_string(), ErrorMetadata { description: desc.map(String::from) });
+ }
+ match format {
+ OutputFormat::Unknown(s) => panic!("Unknown output format: {}", s),
+ OutputFormat::HTML(h) => render_error_page(&err_map, dst, h)?,
+ OutputFormat::Markdown(m) => render_error_page(&err_map, dst, m)?,
+ }
+ Ok(())
+}
+
+fn parse_args() -> (OutputFormat, PathBuf) {
+ let mut args = env::args().skip(1);
+ let format = args.next();
+ let dst = args.next();
+ let resource_suffix = args.next().unwrap_or_else(String::new);
+ let format = format
+ .map(|a| OutputFormat::from(&a, &resource_suffix))
+ .unwrap_or(OutputFormat::from("html", &resource_suffix));
+ let dst = dst.map(PathBuf::from).unwrap_or_else(|| match format {
+ OutputFormat::HTML(..) => PathBuf::from("doc/error-index.html"),
+ OutputFormat::Markdown(..) => PathBuf::from("doc/error-index.md"),
+ OutputFormat::Unknown(..) => PathBuf::from("<nul>"),
+ });
+ (format, dst)
+}
+
+fn main() {
+ rustc_driver::init_env_logger("RUST_LOG");
+ let (format, dst) = parse_args();
+ let result =
+ rustc_span::create_default_session_globals_then(move || main_with_result(format, &dst));
+ if let Err(e) = result {
+ panic!("{}", e.to_string());
+ }
+}
+
+fn register_all() -> Vec<(&'static str, Option<&'static str>)> {
+ let mut long_codes: Vec<(&'static str, Option<&'static str>)> = Vec::new();
+ macro_rules! register_diagnostics {
+ ($($ecode:ident: $message:expr,)* ; $($code:ident,)*) => (
+ $(
+ {long_codes.extend([
+ (stringify!($ecode), Some($message)),
+ ].iter());}
+ )*
+ $(
+ {long_codes.extend([
+ stringify!($code),
+ ].iter().cloned().map(|s| (s, None)).collect::<Vec<_>>());}
+ )*
+ )
+ }
+ include!(concat!(env!("OUT_DIR"), "/all_error_codes.rs"));
+ long_codes
+}
diff --git a/src/tools/expand-yaml-anchors/Cargo.toml b/src/tools/expand-yaml-anchors/Cargo.toml
new file mode 100644
index 000000000..9a25b6c1f
--- /dev/null
+++ b/src/tools/expand-yaml-anchors/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "expand-yaml-anchors"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+yaml-rust = "0.4.3"
+yaml-merge-keys = "0.4.0"
diff --git a/src/tools/expand-yaml-anchors/src/main.rs b/src/tools/expand-yaml-anchors/src/main.rs
new file mode 100644
index 000000000..8992d165d
--- /dev/null
+++ b/src/tools/expand-yaml-anchors/src/main.rs
@@ -0,0 +1,205 @@
+use std::error::Error;
+use std::path::{Path, PathBuf};
+use yaml_rust::{Yaml, YamlEmitter, YamlLoader};
+
+/// List of directories containing files to expand. The first tuple element is the source
+/// directory, while the second tuple element is the destination directory.
+#[rustfmt::skip]
+static TO_EXPAND: &[(&str, &str)] = &[
+ ("src/ci/github-actions", ".github/workflows"),
+];
+
+/// Name of a special key that will be removed from all the maps in expanded configuration files.
+/// This key can then be used to contain shared anchors.
+static REMOVE_MAP_KEY: &str = "x--expand-yaml-anchors--remove";
+
+/// Message that will be included at the top of all the expanded files. {source} will be replaced
+/// with the source filename relative to the base path.
+static HEADER_MESSAGE: &str = "\
+#############################################################
+# WARNING: automatically generated file, DO NOT CHANGE! #
+#############################################################
+
+# This file was automatically generated by the expand-yaml-anchors tool. The
+# source file that generated this one is:
+#
+# {source}
+#
+# Once you make changes to that file you need to run:
+#
+# ./x.py run src/tools/expand-yaml-anchors/
+#
+# The CI build will fail if the tool is not run after changes to this file.
+
+";
+
+enum Mode {
+ Check,
+ Generate,
+}
+
+struct App {
+ mode: Mode,
+ base: PathBuf,
+}
+
+impl App {
+ fn from_args() -> Result<Self, Box<dyn Error>> {
+ // Parse CLI arguments
+ let args = std::env::args().skip(1).collect::<Vec<_>>();
+ let (mode, base) = match args.iter().map(|s| s.as_str()).collect::<Vec<_>>().as_slice() {
+ ["generate", ref base] => (Mode::Generate, PathBuf::from(base)),
+ ["check", ref base] => (Mode::Check, PathBuf::from(base)),
+ _ => {
+ eprintln!("usage: expand-yaml-anchors <source-dir> <dest-dir>");
+ std::process::exit(1);
+ }
+ };
+
+ Ok(App { mode, base })
+ }
+
+ fn run(&self) -> Result<(), Box<dyn Error>> {
+ for (source, dest) in TO_EXPAND {
+ let source = self.base.join(source);
+ let dest = self.base.join(dest);
+ for entry in std::fs::read_dir(&source)? {
+ let path = entry?.path();
+ if !path.is_file() || path.extension().and_then(|e| e.to_str()) != Some("yml") {
+ continue;
+ }
+
+ let dest_path = dest.join(path.file_name().unwrap());
+ self.expand(&path, &dest_path).with_context(|| match self.mode {
+ Mode::Generate => format!(
+ "failed to expand {} into {}",
+ self.path(&path),
+ self.path(&dest_path)
+ ),
+ Mode::Check => format!(
+ "{} is not up to date; please run \
+ `x.py run src/tools/expand-yaml-anchors`.",
+ self.path(&dest_path)
+ ),
+ })?;
+ }
+ }
+ Ok(())
+ }
+
+ fn expand(&self, source: &Path, dest: &Path) -> Result<(), Box<dyn Error>> {
+ let content = std::fs::read_to_string(source)
+ .with_context(|| format!("failed to read {}", self.path(source)))?;
+
+ let mut buf =
+ HEADER_MESSAGE.replace("{source}", &self.path(source).to_string().replace("\\", "/"));
+
+ let documents = YamlLoader::load_from_str(&content)
+ .with_context(|| format!("failed to parse {}", self.path(source)))?;
+ for mut document in documents.into_iter() {
+ document = yaml_merge_keys::merge_keys(document)
+ .with_context(|| format!("failed to expand {}", self.path(source)))?;
+ document = filter_document(document);
+
+ YamlEmitter::new(&mut buf).dump(&document).map_err(|err| WithContext {
+ context: "failed to serialize the expanded yaml".into(),
+ source: Box::new(err),
+ })?;
+ buf.push('\n');
+ }
+
+ match self.mode {
+ Mode::Check => {
+ let old = std::fs::read_to_string(dest)
+ .with_context(|| format!("failed to read {}", self.path(dest)))?;
+ if old != buf {
+ return Err(Box::new(StrError(format!(
+ "{} and {} are different",
+ self.path(source),
+ self.path(dest),
+ ))));
+ }
+ }
+ Mode::Generate => {
+ std::fs::write(dest, buf.as_bytes())
+ .with_context(|| format!("failed to write to {}", self.path(dest)))?;
+ }
+ }
+ Ok(())
+ }
+
+ fn path<'a>(&self, path: &'a Path) -> impl std::fmt::Display + 'a {
+ path.strip_prefix(&self.base).unwrap_or(path).display()
+ }
+}
+
+fn filter_document(document: Yaml) -> Yaml {
+ match document {
+ Yaml::Hash(map) => Yaml::Hash(
+ map.into_iter()
+ .filter(|(key, _)| {
+ if let Yaml::String(string) = &key { string != REMOVE_MAP_KEY } else { true }
+ })
+ .map(|(key, value)| (filter_document(key), filter_document(value)))
+ .collect(),
+ ),
+ Yaml::Array(vec) => Yaml::Array(vec.into_iter().map(filter_document).collect()),
+ other => other,
+ }
+}
+
+fn main() {
+ if let Err(err) = App::from_args().and_then(|app| app.run()) {
+ eprintln!("error: {}", err);
+
+ let mut source = err.as_ref() as &dyn Error;
+ while let Some(err) = source.source() {
+ eprintln!("caused by: {}", err);
+ source = err;
+ }
+
+ std::process::exit(1);
+ }
+}
+
+#[derive(Debug)]
+struct StrError(String);
+
+impl Error for StrError {}
+
+impl std::fmt::Display for StrError {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.0, f)
+ }
+}
+
+#[derive(Debug)]
+struct WithContext {
+ context: String,
+ source: Box<dyn Error>,
+}
+
+impl std::fmt::Display for WithContext {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}", self.context)
+ }
+}
+
+impl Error for WithContext {
+ fn source(&self) -> Option<&(dyn Error + 'static)> {
+ Some(self.source.as_ref())
+ }
+}
+
+pub(crate) trait ResultExt<T> {
+ fn with_context<F: FnOnce() -> String>(self, f: F) -> Result<T, Box<dyn Error>>;
+}
+
+impl<T, E: Into<Box<dyn Error>>> ResultExt<T> for Result<T, E> {
+ fn with_context<F: FnOnce() -> String>(self, f: F) -> Result<T, Box<dyn Error>> {
+ match self {
+ Ok(ok) => Ok(ok),
+ Err(err) => Err(WithContext { source: err.into(), context: f() }.into()),
+ }
+ }
+}
diff --git a/src/tools/html-checker/Cargo.toml b/src/tools/html-checker/Cargo.toml
new file mode 100644
index 000000000..72d61d9bd
--- /dev/null
+++ b/src/tools/html-checker/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "html-checker"
+version = "0.1.0"
+edition = "2021"
+
+[[bin]]
+name = "html-checker"
+path = "main.rs"
+
+[dependencies]
+walkdir = "2"
+rayon = "1.5"
diff --git a/src/tools/html-checker/main.rs b/src/tools/html-checker/main.rs
new file mode 100644
index 000000000..9b4d2c525
--- /dev/null
+++ b/src/tools/html-checker/main.rs
@@ -0,0 +1,125 @@
+use rayon::iter::{ParallelBridge, ParallelIterator};
+use std::env;
+use std::path::Path;
+use std::process::{Command, Output};
+
+fn check_html_file(file: &Path) -> usize {
+ let to_mute = &[
+ // "disabled" on <link> or "autocomplete" on <select> emit this warning
+ "PROPRIETARY_ATTRIBUTE",
+ // It complains when multiple in the same page link to the same anchor for some reason...
+ "ANCHOR_NOT_UNIQUE",
+ // If a <span> contains only HTML elements and no text, it complains about it.
+ "TRIM_EMPTY_ELEMENT",
+ // FIXME: the three next warnings are about <pre> elements which are not supposed to
+ // contain HTML. The solution here would be to replace them with a <div>
+ "MISSING_ENDTAG_BEFORE",
+ "INSERTING_TAG",
+ "DISCARDING_UNEXPECTED",
+ // This error is caused by nesting the Notable Traits tooltip within an <h4> tag.
+ // The solution is to avoid doing that, but we need to have the <h4> tags for accessibility
+ // reasons, and we need the Notable Traits tooltip to help everyone understand the Iterator
+ // combinators
+ "TAG_NOT_ALLOWED_IN",
+ ];
+ let to_mute_s = to_mute.join(",");
+ let mut command = Command::new("tidy");
+ command
+ .arg("-errors")
+ .arg("-quiet")
+ .arg("--mute-id") // this option is useful in case we want to mute more warnings
+ .arg("yes")
+ .arg("--mute")
+ .arg(&to_mute_s)
+ .arg(file);
+
+ let Output { status, stderr, .. } = command.output().expect("failed to run tidy command");
+ if status.success() {
+ 0
+ } else {
+ let stderr = String::from_utf8(stderr).expect("String::from_utf8 failed...");
+ if stderr.is_empty() && status.code() != Some(2) {
+ 0
+ } else {
+ eprintln!(
+ "=> Errors for `{}` (error code: {}) <=",
+ file.display(),
+ status.code().unwrap_or(-1)
+ );
+ eprintln!("{}", stderr);
+ stderr.lines().count()
+ }
+ }
+}
+
+const DOCS_TO_CHECK: &[&str] =
+ &["alloc", "core", "proc_macro", "implementors", "src", "std", "test"];
+
+// Returns the number of files read and the number of errors.
+fn find_all_html_files(dir: &Path) -> (usize, usize) {
+ walkdir::WalkDir::new(dir)
+ .into_iter()
+ .filter_entry(|e| {
+ e.depth() != 1
+ || e.file_name()
+ .to_str()
+ .map(|s| DOCS_TO_CHECK.into_iter().any(|d| *d == s))
+ .unwrap_or(false)
+ })
+ .par_bridge()
+ .map(|entry| {
+ let entry = entry.expect("failed to read file");
+ if !entry.file_type().is_file() {
+ return (0, 0);
+ }
+ let entry = entry.path();
+ // (Number of files processed, number of errors)
+ if entry.extension().and_then(|s| s.to_str()) == Some("html") {
+ (1, check_html_file(&entry))
+ } else {
+ (0, 0)
+ }
+ })
+ .reduce(|| (0, 0), |a, b| (a.0 + b.0, a.1 + b.1))
+}
+
+/// Default `tidy` command for macOS is too old that it does not have `mute-id` and `mute` options.
+/// `tidy` on macOS Monterey was released on 31 October 2006, and the same date can be seen seven
+/// years ago at <https://stackoverflow.com/questions/22283382/overwrite-osx-tidy>. Accordingly,
+/// the macOS environment using pre-installed `tidy` should immediately suspend HTML checker process
+/// and show a hint to install a newer one.
+#[cfg(target_os = "macos")]
+fn check_tidy_version() -> Result<(), String> {
+ let output = Command::new("tidy").arg("-v").output().expect("failed to run tidy command");
+ let version = String::from_utf8(output.stdout).expect("failed to read version of tidy command");
+ if version.contains("HTML Tidy for Mac OS X released on 31 October 2006") {
+ eprintln!("The pre-installed HTML Tidy for macOS is not supported.");
+ eprintln!("Consider installing a newer one and re-running.");
+ eprintln!("If you're using Homebrew, you can install it by the following command:");
+ eprintln!(" brew install tidy-html5");
+ eprintln!();
+ Err("HTML check failed: 1 error".to_string())
+ } else {
+ Ok(())
+ }
+}
+
+fn main() -> Result<(), String> {
+ let args = env::args().collect::<Vec<_>>();
+ if args.len() != 2 {
+ return Err(format!("Usage: {} <doc folder>", args[0]));
+ }
+ #[cfg(target_os = "macos")]
+ check_tidy_version()?;
+
+ println!("Running HTML checker...");
+
+ let (files_read, errors) = find_all_html_files(&Path::new(&args[1]));
+ println!("Done! Read {} files...", files_read);
+ if errors > 0 {
+ Err(format!("HTML check failed: {} errors", errors))
+ } else {
+ println!("No error found!");
+ Ok(())
+ }
+}
diff --git a/src/tools/jsondocck/Cargo.toml b/src/tools/jsondocck/Cargo.toml
new file mode 100644
index 000000000..ccabe6483
--- /dev/null
+++ b/src/tools/jsondocck/Cargo.toml
@@ -0,0 +1,13 @@
+[package]
+name = "jsondocck"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+jsonpath_lib = "0.2"
+getopts = "0.2"
+regex = "1.4"
+shlex = "1.0"
+serde_json = "1.0"
+fs-err = "2.5.0"
+once_cell = "1.0"
diff --git a/src/tools/jsondocck/src/cache.rs b/src/tools/jsondocck/src/cache.rs
new file mode 100644
index 000000000..a188750c5
--- /dev/null
+++ b/src/tools/jsondocck/src/cache.rs
@@ -0,0 +1,77 @@
+use crate::error::CkError;
+use serde_json::Value;
+use std::collections::HashMap;
+use std::io;
+use std::path::{Path, PathBuf};
+
+use fs_err as fs;
+
+#[derive(Debug)]
+pub struct Cache {
+ root: PathBuf,
+ files: HashMap<PathBuf, String>,
+ values: HashMap<PathBuf, Value>,
+ pub variables: HashMap<String, Value>,
+ last_path: Option<PathBuf>,
+}
+
+impl Cache {
+ /// Create a new cache, used to read files only once and otherwise store their contents.
+ pub fn new(doc_dir: &str) -> Cache {
+ Cache {
+ root: Path::new(doc_dir).to_owned(),
+ files: HashMap::new(),
+ values: HashMap::new(),
+ variables: HashMap::new(),
+ last_path: None,
+ }
+ }
+
+ fn resolve_path(&mut self, path: &String) -> PathBuf {
+ if path != "-" {
+ let resolve = self.root.join(path);
+ self.last_path = Some(resolve.clone());
+ resolve
+ } else {
+ self.last_path
+ .as_ref()
+ // FIXME: Point to a line number
+ .expect("No last path set. Make sure to specify a full path before using `-`")
+ .clone()
+ }
+ }
+
+ fn read_file(&mut self, path: PathBuf) -> Result<String, io::Error> {
+ if let Some(f) = self.files.get(&path) {
+ return Ok(f.clone());
+ }
+
+ let file = fs::read_to_string(&path)?;
+
+ self.files.insert(path, file.clone());
+
+ Ok(file)
+ }
+
+ /// Get the text from a file. If called multiple times, the file will only be read once
+ pub fn get_file(&mut self, path: &String) -> Result<String, io::Error> {
+ let path = self.resolve_path(path);
+ self.read_file(path)
+ }
+
+ /// Parse the JSON from a file. If called multiple times, the file will only be read once.
+ pub fn get_value(&mut self, path: &String) -> Result<Value, CkError> {
+ let path = self.resolve_path(path);
+
+ if let Some(v) = self.values.get(&path) {
+ return Ok(v.clone());
+ }
+
+ let content = self.read_file(path.clone())?;
+ let val = serde_json::from_str::<Value>(&content)?;
+
+ self.values.insert(path, val.clone());
+
+ Ok(val)
+ }
+}
diff --git a/src/tools/jsondocck/src/config.rs b/src/tools/jsondocck/src/config.rs
new file mode 100644
index 000000000..9b3ba3f3f
--- /dev/null
+++ b/src/tools/jsondocck/src/config.rs
@@ -0,0 +1,37 @@
+use getopts::Options;
+
+#[derive(Debug)]
+pub struct Config {
+ /// The directory documentation output was generated in
+ pub doc_dir: String,
+ /// The file documentation was generated for, with docck commands to check
+ pub template: String,
+}
+
+/// Create a Config from a vector of command-line arguments
+pub fn parse_config(args: Vec<String>) -> Config {
+ let mut opts = Options::new();
+ opts.reqopt("", "doc-dir", "Path to the documentation directory", "PATH")
+ .reqopt("", "template", "Path to the template file", "PATH")
+ .optflag("h", "help", "show this message");
+
+ let (argv0, args_) = args.split_first().unwrap();
+ if args.len() == 1 {
+ let message = format!("Usage: {} <doc-dir> <template>", argv0);
+ println!("{}", opts.usage(&message));
+ std::process::exit(1);
+ }
+
+ let matches = opts.parse(args_).unwrap();
+
+ if matches.opt_present("h") || matches.opt_present("help") {
+ let message = format!("Usage: {} <doc-dir> <template>", argv0);
+ println!("{}", opts.usage(&message));
+ std::process::exit(1);
+ }
+
+ Config {
+ doc_dir: matches.opt_str("doc-dir").unwrap(),
+ template: matches.opt_str("template").unwrap(),
+ }
+}
diff --git a/src/tools/jsondocck/src/error.rs b/src/tools/jsondocck/src/error.rs
new file mode 100644
index 000000000..53b9af287
--- /dev/null
+++ b/src/tools/jsondocck/src/error.rs
@@ -0,0 +1,28 @@
+use crate::Command;
+use std::error::Error;
+use std::fmt;
+
+#[derive(Debug)]
+pub enum CkError {
+ /// A check failed. File didn't exist or failed to match the command
+ FailedCheck(String, Command),
+ /// An error triggered by some other error
+ Induced(Box<dyn Error>),
+}
+
+impl fmt::Display for CkError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ CkError::FailedCheck(msg, cmd) => {
+ write!(f, "Failed check: {} on line {}", msg, cmd.lineno)
+ }
+ CkError::Induced(err) => write!(f, "Check failed: {}", err),
+ }
+ }
+}
+
+impl<T: Error + 'static> From<T> for CkError {
+ fn from(err: T) -> CkError {
+ CkError::Induced(Box::new(err))
+ }
+}
diff --git a/src/tools/jsondocck/src/main.rs b/src/tools/jsondocck/src/main.rs
new file mode 100644
index 000000000..c44624666
--- /dev/null
+++ b/src/tools/jsondocck/src/main.rs
@@ -0,0 +1,339 @@
+use jsonpath_lib::select;
+use once_cell::sync::Lazy;
+use regex::{Regex, RegexBuilder};
+use serde_json::Value;
+use std::borrow::Cow;
+use std::{env, fmt, fs};
+
+mod cache;
+mod config;
+mod error;
+
+use cache::Cache;
+use config::parse_config;
+use error::CkError;
+
+fn main() -> Result<(), String> {
+ let config = parse_config(env::args().collect());
+
+ let mut failed = Vec::new();
+ let mut cache = Cache::new(&config.doc_dir);
+ let commands = get_commands(&config.template)
+ .map_err(|_| format!("Jsondocck failed for {}", &config.template))?;
+
+ for command in commands {
+ if let Err(e) = check_command(command, &mut cache) {
+ failed.push(e);
+ }
+ }
+
+ if failed.is_empty() {
+ Ok(())
+ } else {
+ for i in failed {
+ eprintln!("{}", i);
+ }
+ Err(format!("Jsondocck failed for {}", &config.template))
+ }
+}
+
+#[derive(Debug)]
+pub struct Command {
+ negated: bool,
+ kind: CommandKind,
+ args: Vec<String>,
+ lineno: usize,
+}
+
+#[derive(Debug)]
+pub enum CommandKind {
+ Has,
+ Count,
+ Is,
+ Set,
+}
+
+impl CommandKind {
+ fn validate(&self, args: &[String], command_num: usize, lineno: usize) -> bool {
+ let count = match self {
+ CommandKind::Has => (1..=3).contains(&args.len()),
+ CommandKind::Count | CommandKind::Is => 3 == args.len(),
+ CommandKind::Set => 4 == args.len(),
+ };
+
+ if !count {
+ print_err(&format!("Incorrect number of arguments to `@{}`", self), lineno);
+ return false;
+ }
+
+ if args[0] == "-" && command_num == 0 {
+ print_err(&format!("Tried to use the previous path in the first command"), lineno);
+ return false;
+ }
+
+ if let CommandKind::Count = self {
+ if args[2].parse::<usize>().is_err() {
+ print_err(
+ &format!("Third argument to @count must be a valid usize (got `{}`)", args[2]),
+ lineno,
+ );
+ return false;
+ }
+ }
+
+ true
+ }
+}
+
+impl fmt::Display for CommandKind {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let text = match self {
+ CommandKind::Has => "has",
+ CommandKind::Count => "count",
+ CommandKind::Is => "is",
+ CommandKind::Set => "set",
+ };
+ write!(f, "{}", text)
+ }
+}
+
+static LINE_PATTERN: Lazy<Regex> = Lazy::new(|| {
+ RegexBuilder::new(
+ r#"
+ \s(?P<invalid>!?)@(?P<negated>!?)
+ (?P<cmd>[A-Za-z]+(?:-[A-Za-z]+)*)
+ (?P<args>.*)$
+ "#,
+ )
+ .ignore_whitespace(true)
+ .unicode(true)
+ .build()
+ .unwrap()
+});
+
+fn print_err(msg: &str, lineno: usize) {
+ eprintln!("Invalid command: {} on line {}", msg, lineno)
+}
+
+/// Get a list of commands from a file. Does the work of ensuring the commands
+/// are syntactically valid.
+fn get_commands(template: &str) -> Result<Vec<Command>, ()> {
+ let mut commands = Vec::new();
+ let mut errors = false;
+ let file = fs::read_to_string(template).unwrap();
+
+ for (lineno, line) in file.split('\n').enumerate() {
+ let lineno = lineno + 1;
+
+ let cap = match LINE_PATTERN.captures(line) {
+ Some(c) => c,
+ None => continue,
+ };
+
+ let negated = cap.name("negated").unwrap().as_str() == "!";
+ let cmd = cap.name("cmd").unwrap().as_str();
+
+ let cmd = match cmd {
+ "has" => CommandKind::Has,
+ "count" => CommandKind::Count,
+ "is" => CommandKind::Is,
+ "set" => CommandKind::Set,
+ _ => {
+ print_err(&format!("Unrecognized command name `@{}`", cmd), lineno);
+ errors = true;
+ continue;
+ }
+ };
+
+ if let Some(m) = cap.name("invalid") {
+ if m.as_str() == "!" {
+ print_err(
+ &format!(
+ "`!@{0}{1}`, (help: try with `@!{1}`)",
+ if negated { "!" } else { "" },
+ cmd,
+ ),
+ lineno,
+ );
+ errors = true;
+ continue;
+ }
+ }
+
+ let args = cap.name("args").map_or(Some(vec![]), |m| shlex::split(m.as_str()));
+
+ let args = match args {
+ Some(args) => args,
+ None => {
+ print_err(
+ &format!(
+ "Invalid arguments to shlex::split: `{}`",
+ cap.name("args").unwrap().as_str()
+ ),
+ lineno,
+ );
+ errors = true;
+ continue;
+ }
+ };
+
+ if !cmd.validate(&args, commands.len(), lineno) {
+ errors = true;
+ continue;
+ }
+
+ commands.push(Command { negated, kind: cmd, args, lineno })
+ }
+
+ if !errors { Ok(commands) } else { Err(()) }
+}
+
+/// Performs the actual work of ensuring a command passes. Generally assumes the command
+/// is syntactically valid.
+fn check_command(command: Command, cache: &mut Cache) -> Result<(), CkError> {
+ // FIXME: Be more granular about why, (e.g. syntax error, count not equal)
+ let result = match command.kind {
+ CommandKind::Has => {
+ match command.args.len() {
+ // @has <path> = file existence
+ 1 => cache.get_file(&command.args[0]).is_ok(),
+ // @has <path> <jsonpath> = check path exists
+ 2 => {
+ let val = cache.get_value(&command.args[0])?;
+ let results = select(&val, &command.args[1]).unwrap();
+ !results.is_empty()
+ }
+ // @has <path> <jsonpath> <value> = check *any* item matched by path equals value
+ 3 => {
+ let val = cache.get_value(&command.args[0])?;
+ let results = select(&val, &command.args[1]).unwrap();
+ let pat = string_to_value(&command.args[2], cache);
+ let has = results.contains(&pat.as_ref());
+ // Give better error for when @has check fails
+ if !command.negated && !has {
+ return Err(CkError::FailedCheck(
+ format!(
+ "{} matched to {:?} but didn't have {:?}",
+ &command.args[1],
+ results,
+ pat.as_ref()
+ ),
+ command,
+ ));
+ } else {
+ has
+ }
+ }
+ _ => unreachable!(),
+ }
+ }
+ CommandKind::Count => {
+ // @count <path> <jsonpath> <count> = Check that the jsonpath matches exactly [count] times
+ assert_eq!(command.args.len(), 3);
+ let expected: usize = command.args[2].parse().unwrap();
+
+ let val = cache.get_value(&command.args[0])?;
+ let results = select(&val, &command.args[1]).unwrap();
+ let eq = results.len() == expected;
+ if !command.negated && !eq {
+ return Err(CkError::FailedCheck(
+ format!(
+ "`{}` matched to `{:?}` with length {}, but expected length {}",
+ &command.args[1],
+ results,
+ results.len(),
+ expected
+ ),
+ command,
+ ));
+ } else {
+ eq
+ }
+ }
+ CommandKind::Is => {
+ // @has <path> <jsonpath> <value> = check *exactly one* item matched by path, and it equals value
+ assert_eq!(command.args.len(), 3);
+ let val = cache.get_value(&command.args[0])?;
+ let results = select(&val, &command.args[1]).unwrap();
+ let pat = string_to_value(&command.args[2], cache);
+ let is = results.len() == 1 && results[0] == pat.as_ref();
+ if !command.negated && !is {
+ return Err(CkError::FailedCheck(
+ format!(
+ "{} matched to {:?}, but expected {:?}",
+ &command.args[1],
+ results,
+ pat.as_ref()
+ ),
+ command,
+ ));
+ } else {
+ is
+ }
+ }
+ CommandKind::Set => {
+ // @set <name> = <path> <jsonpath>
+ assert_eq!(command.args.len(), 4);
+ assert_eq!(command.args[1], "=", "Expected an `=`");
+ let val = cache.get_value(&command.args[2])?;
+ let results = select(&val, &command.args[3]).unwrap();
+ assert_eq!(
+ results.len(),
+ 1,
+ "Expected 1 match for `{}` (because of @set): matched to {:?}",
+ command.args[3],
+ results
+ );
+ match results.len() {
+ 0 => false,
+ 1 => {
+ let r = cache.variables.insert(command.args[0].clone(), results[0].clone());
+ assert!(r.is_none(), "Name collision: {} is duplicated", command.args[0]);
+ true
+ }
+ _ => {
+ panic!(
+ "Got multiple results in `@set` for `{}`: {:?}",
+ &command.args[3], results
+ );
+ }
+ }
+ }
+ };
+
+ if result == command.negated {
+ if command.negated {
+ Err(CkError::FailedCheck(
+ format!(
+ "`@!{} {}` matched when it shouldn't",
+ command.kind,
+ command.args.join(" ")
+ ),
+ command,
+ ))
+ } else {
+ // FIXME: In the future, try 'peeling back' each step, and see at what level the match failed
+ Err(CkError::FailedCheck(
+ format!(
+ "`@{} {}` didn't match when it should",
+ command.kind,
+ command.args.join(" ")
+ ),
+ command,
+ ))
+ }
+ } else {
+ Ok(())
+ }
+}
+
+fn string_to_value<'a>(s: &str, cache: &'a Cache) -> Cow<'a, Value> {
+ if s.starts_with("$") {
+ Cow::Borrowed(&cache.variables.get(&s[1..]).unwrap_or_else(|| {
+ // FIXME(adotinthevoid): Show line number
+ panic!("No variable: `{}`. Current state: `{:?}`", &s[1..], cache.variables)
+ }))
+ } else {
+ Cow::Owned(serde_json::from_str(s).expect(&format!("Cannot convert `{}` to json", s)))
+ }
+}
diff --git a/src/tools/linkchecker/Cargo.toml b/src/tools/linkchecker/Cargo.toml
new file mode 100644
index 000000000..1d8f2f918
--- /dev/null
+++ b/src/tools/linkchecker/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "linkchecker"
+version = "0.1.0"
+edition = "2021"
+
+[[bin]]
+name = "linkchecker"
+path = "main.rs"
+
+[dependencies]
+regex = "1"
+once_cell = "1"
diff --git a/src/tools/linkchecker/linkcheck.sh b/src/tools/linkchecker/linkcheck.sh
new file mode 100755
index 000000000..9eeebf444
--- /dev/null
+++ b/src/tools/linkchecker/linkcheck.sh
@@ -0,0 +1,116 @@
+#!/bin/sh
+#
+# This is a script that can be used in each book's CI to validate links using
+# the same tool as rust-lang/rust.
+#
+# This requires the rust-docs rustup component to be installed in the nightly
+# toolchain.
+#
+# Usage:
+# ./linkcheck.sh <name-of-book>
+#
+# Options:
+#
+# -i "Iterative" mode. The script will not clean up after it is done so
+# you can inspect the result, and re-run more quickly.
+#
+# --all Check all books. This can help make sure you don't break links
+# from other books into your book.
+
+set -e
+
+if [ ! -f book.toml ] && [ ! -f src/SUMMARY.md ]
+then
+ echo "Run command in root directory of the book."
+ exit 1
+fi
+
+html_dir="$(rustc +nightly --print sysroot)/share/doc/rust/html"
+
+if [ ! -d "$html_dir" ]
+then
+ echo "HTML docs are missing from sysroot: $html_dir"
+ echo "Make sure the nightly rust-docs rustup component is installed."
+ exit 1
+fi
+
+# Avoid failure caused by newer mdbook.
+export MDBOOK_OUTPUT__HTML__INPUT_404=""
+
+book_name=""
+# Iterative will avoid cleaning up, so you can quickly run it repeatedly.
+iterative=0
+# If "1", test all books, else only this book.
+all_books=0
+
+while [ "$1" != "" ]
+do
+ case "$1" in
+ -i)
+ iterative=1
+ ;;
+ --all)
+ all_books=1
+ ;;
+ *)
+ if [ -n "$book_name" ]
+ then
+ echo "only one argument allowed"
+ exit 1
+ fi
+ book_name="$1"
+ ;;
+ esac
+ shift
+done
+
+if [ -z "$book_name" ]
+then
+ echo "usage: $0 <name-of-book>"
+ exit 1
+fi
+
+if [ ! -d "$html_dir/$book_name" ]
+then
+ echo "book name \"$book_name\" not found in sysroot \"$html_dir\""
+ exit 1
+fi
+
+if [ "$iterative" = "0" ]
+then
+ echo "Cleaning old directories..."
+ rm -rf linkcheck linkchecker
+fi
+
+if [ ! -e "linkchecker/main.rs" ] || [ "$iterative" = "0" ]
+then
+ echo "Downloading linkchecker source..."
+ nightly_hash=$(rustc +nightly -Vv | grep commit-hash | cut -f2 -d" ")
+ url="https://raw.githubusercontent.com/rust-lang/rust"
+ mkdir linkchecker
+ curl -o linkchecker/Cargo.toml ${url}/${nightly_hash}/src/tools/linkchecker/Cargo.toml
+ curl -o linkchecker/main.rs ${url}/${nightly_hash}/src/tools/linkchecker/main.rs
+fi
+
+echo "Building book \"$book_name\"..."
+mdbook build
+
+cp -R "$html_dir" linkcheck
+rm -rf "linkcheck/$book_name"
+cp -R book "linkcheck/$book_name"
+
+if [ "$all_books" = "1" ]
+then
+ check_path="linkcheck"
+else
+ check_path="linkcheck/$book_name"
+fi
+echo "Running linkchecker on \"$check_path\"..."
+cargo run --release --manifest-path=linkchecker/Cargo.toml -- "$check_path"
+
+if [ "$iterative" = "0" ]
+then
+ rm -rf linkcheck linkchecker
+fi
+
+echo "Link check completed successfully!"
diff --git a/src/tools/linkchecker/main.rs b/src/tools/linkchecker/main.rs
new file mode 100644
index 000000000..a7c78d80c
--- /dev/null
+++ b/src/tools/linkchecker/main.rs
@@ -0,0 +1,560 @@
+//! Script to check the validity of `href` links in our HTML documentation.
+//!
+//! In the past we've been quite error prone to writing in broken links as most
+//! of them are manually rather than automatically added. As files move over
+//! time or apis change old links become stale or broken. The purpose of this
+//! script is to check all relative links in our documentation to make sure they
+//! actually point to a valid place.
+//!
+//! Currently this doesn't actually do any HTML parsing or anything fancy like
+//! that, it just has a simple "regex" to search for `href` and `id` tags.
+//! These values are then translated to file URLs if possible and then the
+//! destination is asserted to exist.
+//!
+//! A few exceptions are allowed as there's known bugs in rustdoc, but this
+//! should catch the majority of "broken link" cases.
+
+use std::cell::RefCell;
+use std::collections::{HashMap, HashSet};
+use std::env;
+use std::fs;
+use std::io::ErrorKind;
+use std::path::{Component, Path, PathBuf};
+use std::rc::Rc;
+use std::time::Instant;
+
+use once_cell::sync::Lazy;
+use regex::Regex;
+
+// Add linkcheck exceptions here
+// If at all possible you should use intra-doc links to avoid linkcheck issues. These
+// are cases where that does not work
+// [(generated_documentation_page, &[broken_links])]
+#[rustfmt::skip]
+const LINKCHECK_EXCEPTIONS: &[(&str, &[&str])] = &[
+ // These try to link to std::collections, but are defined in alloc
+ // https://github.com/rust-lang/rust/issues/74481
+ ("std/collections/btree_map/struct.BTreeMap.html", &["#insert-and-complex-keys"]),
+ ("std/collections/btree_set/struct.BTreeSet.html", &["#insert-and-complex-keys"]),
+ ("alloc/collections/btree_map/struct.BTreeMap.html", &["#insert-and-complex-keys"]),
+ ("alloc/collections/btree_set/struct.BTreeSet.html", &["#insert-and-complex-keys"]),
+
+ // These try to link to various things in std, but are defined in core.
+ // The docs in std::primitive use proper intra-doc links, so these seem fine to special-case.
+ // Most these are broken because liballoc uses `#[lang_item]` magic to define things on
+ // primitives that aren't available in core.
+ ("alloc/slice/trait.Join.html", &["#method.join"]),
+ ("alloc/slice/trait.Concat.html", &["#method.concat"]),
+ ("alloc/slice/index.html", &["#method.concat", "#method.join"]),
+ ("alloc/vec/struct.Vec.html", &["#method.sort_by_key", "#method.sort_by_cached_key"]),
+ ("core/primitive.str.html", &["#method.to_ascii_uppercase", "#method.to_ascii_lowercase"]),
+ ("core/primitive.slice.html", &["#method.to_ascii_uppercase", "#method.to_ascii_lowercase",
+ "core/slice::sort_by_key", "core\\slice::sort_by_key",
+ "#method.sort_by_cached_key"]),
+];
+
+#[rustfmt::skip]
+const INTRA_DOC_LINK_EXCEPTIONS: &[(&str, &[&str])] = &[
+ // This will never have links that are not in other pages.
+ // To avoid repeating the exceptions twice, an empty list means all broken links are allowed.
+ ("reference/print.html", &[]),
+ // All the reference 'links' are actually ENBF highlighted as code
+ ("reference/comments.html", &[
+ "/</code> <code>!",
+ "*</code> <code>!",
+ ]),
+ ("reference/identifiers.html", &[
+ "a</code>-<code>z</code> <code>A</code>-<code>Z",
+ "a</code>-<code>z</code> <code>A</code>-<code>Z</code> <code>0</code>-<code>9</code> <code>_",
+ "a</code>-<code>z</code> <code>A</code>-<code>Z</code>] [<code>a</code>-<code>z</code> <code>A</code>-<code>Z</code> <code>0</code>-<code>9</code> <code>_",
+ ]),
+ ("reference/tokens.html", &[
+ "0</code>-<code>1",
+ "0</code>-<code>7",
+ "0</code>-<code>9",
+ "0</code>-<code>9",
+ "0</code>-<code>9</code> <code>a</code>-<code>f</code> <code>A</code>-<code>F",
+ ]),
+ ("reference/notation.html", &[
+ "b</code> <code>B",
+ "a</code>-<code>z",
+ ]),
+ // This is being used in the sense of 'inclusive range', not a markdown link
+ ("core/ops/struct.RangeInclusive.html", &["begin</code>, <code>end"]),
+ ("std/ops/struct.RangeInclusive.html", &["begin</code>, <code>end"]),
+ ("core/slice/trait.SliceIndex.html", &["begin</code>, <code>end"]),
+ ("alloc/slice/trait.SliceIndex.html", &["begin</code>, <code>end"]),
+ ("std/slice/trait.SliceIndex.html", &["begin</code>, <code>end"]),
+ ("core/primitive.str.html", &["begin</code>, <code>end"]),
+ ("std/primitive.str.html", &["begin</code>, <code>end"]),
+
+];
+
+static BROKEN_INTRA_DOC_LINK: Lazy<Regex> =
+ Lazy::new(|| Regex::new(r#"\[<code>(.*)</code>\]"#).unwrap());
+
+macro_rules! t {
+ ($e:expr) => {
+ match $e {
+ Ok(e) => e,
+ Err(e) => panic!("{} failed with {:?}", stringify!($e), e),
+ }
+ };
+}
+
+fn main() {
+ let docs = env::args_os().nth(1).expect("doc path should be first argument");
+ let docs = env::current_dir().unwrap().join(docs);
+ let mut checker = Checker { root: docs.clone(), cache: HashMap::new() };
+ let mut report = Report {
+ errors: 0,
+ start: Instant::now(),
+ html_files: 0,
+ html_redirects: 0,
+ links_checked: 0,
+ links_ignored_external: 0,
+ links_ignored_exception: 0,
+ intra_doc_exceptions: 0,
+ };
+ checker.walk(&docs, &mut report);
+ report.report();
+ if report.errors != 0 {
+ println!("found some broken links");
+ std::process::exit(1);
+ }
+}
+
+struct Checker {
+ root: PathBuf,
+ cache: Cache,
+}
+
+struct Report {
+ errors: u32,
+ start: Instant,
+ html_files: u32,
+ html_redirects: u32,
+ links_checked: u32,
+ links_ignored_external: u32,
+ links_ignored_exception: u32,
+ intra_doc_exceptions: u32,
+}
+
+/// A cache entry.
+enum FileEntry {
+ /// An HTML file.
+ ///
+ /// This includes the contents of the HTML file, and an optional set of
+ /// HTML IDs. The IDs are used for checking fragments. They are computed
+ /// as-needed. The source is discarded (replaced with an empty string)
+ /// after the file has been checked, to conserve on memory.
+ HtmlFile { source: Rc<String>, ids: RefCell<HashSet<String>> },
+ /// This file is an HTML redirect to the given local path.
+ Redirect { target: PathBuf },
+ /// This is not an HTML file.
+ OtherFile,
+ /// This is a directory.
+ Dir,
+ /// The file doesn't exist.
+ Missing,
+}
+
+/// A cache to speed up file access.
+type Cache = HashMap<String, FileEntry>;
+
+fn small_url_encode(s: &str) -> String {
+ s.replace("<", "%3C")
+ .replace(">", "%3E")
+ .replace(" ", "%20")
+ .replace("?", "%3F")
+ .replace("'", "%27")
+ .replace("&", "%26")
+ .replace(",", "%2C")
+ .replace(":", "%3A")
+ .replace(";", "%3B")
+ .replace("[", "%5B")
+ .replace("]", "%5D")
+ .replace("\"", "%22")
+}
+
+impl Checker {
+ /// Primary entry point for walking the filesystem to find HTML files to check.
+ fn walk(&mut self, dir: &Path, report: &mut Report) {
+ for entry in t!(dir.read_dir()).map(|e| t!(e)) {
+ let path = entry.path();
+ // Goes through symlinks
+ let metadata = t!(fs::metadata(&path));
+ if metadata.is_dir() {
+ self.walk(&path, report);
+ } else {
+ self.check(&path, report);
+ }
+ }
+ }
+
+ /// Checks a single file.
+ fn check(&mut self, file: &Path, report: &mut Report) {
+ let (pretty_path, entry) = self.load_file(file, report);
+ let source = match entry {
+ FileEntry::Missing => panic!("missing file {:?} while walking", file),
+ FileEntry::Dir => unreachable!("never with `check` path"),
+ FileEntry::OtherFile => return,
+ FileEntry::Redirect { .. } => return,
+ FileEntry::HtmlFile { source, ids } => {
+ parse_ids(&mut ids.borrow_mut(), &pretty_path, source, report);
+ source.clone()
+ }
+ };
+
+ // Search for anything that's the regex 'href[ ]*=[ ]*".*?"'
+ with_attrs_in_source(&source, " href", |url, i, base| {
+ // Ignore external URLs
+ if url.starts_with("http:")
+ || url.starts_with("https:")
+ || url.starts_with("javascript:")
+ || url.starts_with("ftp:")
+ || url.starts_with("irc:")
+ || url.starts_with("data:")
+ {
+ report.links_ignored_external += 1;
+ return;
+ }
+ report.links_checked += 1;
+ let (url, fragment) = match url.split_once('#') {
+ None => (url, None),
+ Some((url, fragment)) => (url, Some(fragment)),
+ };
+ // NB: the `splitn` always succeeds, even if the delimiter is not present.
+ let url = url.splitn(2, '?').next().unwrap();
+
+ // Once we've plucked out the URL, parse it using our base url and
+ // then try to extract a file path.
+ let mut path = file.to_path_buf();
+ if !base.is_empty() || !url.is_empty() {
+ path.pop();
+ for part in Path::new(base).join(url).components() {
+ match part {
+ Component::Prefix(_) | Component::RootDir => {
+ // Avoid absolute paths as they make the docs not
+ // relocatable by making assumptions on where the docs
+ // are hosted relative to the site root.
+ report.errors += 1;
+ println!(
+ "{}:{}: absolute path - {}",
+ pretty_path,
+ i + 1,
+ Path::new(base).join(url).display()
+ );
+ return;
+ }
+ Component::CurDir => {}
+ Component::ParentDir => {
+ path.pop();
+ }
+ Component::Normal(s) => {
+ path.push(s);
+ }
+ }
+ }
+ }
+
+ let (target_pretty_path, target_entry) = self.load_file(&path, report);
+ let (target_source, target_ids) = match target_entry {
+ FileEntry::Missing => {
+ if is_exception(file, &target_pretty_path) {
+ report.links_ignored_exception += 1;
+ } else {
+ report.errors += 1;
+ println!(
+ "{}:{}: broken link - `{}`",
+ pretty_path,
+ i + 1,
+ target_pretty_path
+ );
+ }
+ return;
+ }
+ FileEntry::Dir => {
+ // Links to directories show as directory listings when viewing
+ // the docs offline so it's best to avoid them.
+ report.errors += 1;
+ println!(
+ "{}:{}: directory link to `{}` \
+ (directory links should use index.html instead)",
+ pretty_path,
+ i + 1,
+ target_pretty_path
+ );
+ return;
+ }
+ FileEntry::OtherFile => return,
+ FileEntry::Redirect { target } => {
+ let t = target.clone();
+ drop(target);
+ let (target, redir_entry) = self.load_file(&t, report);
+ match redir_entry {
+ FileEntry::Missing => {
+ report.errors += 1;
+ println!(
+ "{}:{}: broken redirect from `{}` to `{}`",
+ pretty_path,
+ i + 1,
+ target_pretty_path,
+ target
+ );
+ return;
+ }
+ FileEntry::Redirect { target } => {
+ // Redirect to a redirect, this link checker
+ // currently doesn't support this, since it would
+ // require cycle checking, etc.
+ report.errors += 1;
+ println!(
+ "{}:{}: redirect from `{}` to `{}` \
+ which is also a redirect (not supported)",
+ pretty_path,
+ i + 1,
+ target_pretty_path,
+ target.display()
+ );
+ return;
+ }
+ FileEntry::Dir => {
+ report.errors += 1;
+ println!(
+ "{}:{}: redirect from `{}` to `{}` \
+ which is a directory \
+ (directory links should use index.html instead)",
+ pretty_path,
+ i + 1,
+ target_pretty_path,
+ target
+ );
+ return;
+ }
+ FileEntry::OtherFile => return,
+ FileEntry::HtmlFile { source, ids } => (source, ids),
+ }
+ }
+ FileEntry::HtmlFile { source, ids } => (source, ids),
+ };
+
+ // Alright, if we've found an HTML file for the target link. If
+ // this is a fragment link, also check that the `id` exists.
+ if let Some(ref fragment) = fragment {
+ // Fragments like `#1-6` are most likely line numbers to be
+ // interpreted by javascript, so we're ignoring these
+ if fragment.splitn(2, '-').all(|f| f.chars().all(|c| c.is_numeric())) {
+ return;
+ }
+
+ parse_ids(&mut target_ids.borrow_mut(), &pretty_path, target_source, report);
+
+ if target_ids.borrow().contains(*fragment) {
+ return;
+ }
+
+ if is_exception(file, &format!("#{}", fragment)) {
+ report.links_ignored_exception += 1;
+ } else {
+ report.errors += 1;
+ print!("{}:{}: broken link fragment ", pretty_path, i + 1);
+ println!("`#{}` pointing to `{}`", fragment, target_pretty_path);
+ };
+ }
+ });
+
+ // Search for intra-doc links that rustdoc didn't warn about
+ // FIXME(#77199, 77200) Rustdoc should just warn about these directly.
+ // NOTE: only looks at one line at a time; in practice this should find most links
+ for (i, line) in source.lines().enumerate() {
+ for broken_link in BROKEN_INTRA_DOC_LINK.captures_iter(line) {
+ if is_intra_doc_exception(file, &broken_link[1]) {
+ report.intra_doc_exceptions += 1;
+ } else {
+ report.errors += 1;
+ print!("{}:{}: broken intra-doc link - ", pretty_path, i + 1);
+ println!("{}", &broken_link[0]);
+ }
+ }
+ }
+ // we don't need the source anymore,
+ // so drop to reduce memory-usage
+ match self.cache.get_mut(&pretty_path).unwrap() {
+ FileEntry::HtmlFile { source, .. } => *source = Rc::new(String::new()),
+ _ => unreachable!("must be html file"),
+ }
+ }
+
+ /// Load a file from disk, or from the cache if available.
+ fn load_file(&mut self, file: &Path, report: &mut Report) -> (String, &FileEntry) {
+ // https://docs.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499-
+ #[cfg(windows)]
+ const ERROR_INVALID_NAME: i32 = 123;
+
+ let pretty_path =
+ file.strip_prefix(&self.root).unwrap_or(&file).to_str().unwrap().to_string();
+
+ let entry =
+ self.cache.entry(pretty_path.clone()).or_insert_with(|| match fs::metadata(file) {
+ Ok(metadata) if metadata.is_dir() => FileEntry::Dir,
+ Ok(_) => {
+ if file.extension().and_then(|s| s.to_str()) != Some("html") {
+ FileEntry::OtherFile
+ } else {
+ report.html_files += 1;
+ load_html_file(file, report)
+ }
+ }
+ Err(e) if e.kind() == ErrorKind::NotFound => FileEntry::Missing,
+ Err(e) => {
+ // If a broken intra-doc link contains `::`, on windows, it will cause `ERROR_INVALID_NAME` rather than `NotFound`.
+ // Explicitly check for that so that the broken link can be allowed in `LINKCHECK_EXCEPTIONS`.
+ #[cfg(windows)]
+ if e.raw_os_error() == Some(ERROR_INVALID_NAME)
+ && file.as_os_str().to_str().map_or(false, |s| s.contains("::"))
+ {
+ return FileEntry::Missing;
+ }
+ panic!("unexpected read error for {}: {}", file.display(), e);
+ }
+ });
+ (pretty_path, entry)
+ }
+}
+
+impl Report {
+ fn report(&self) {
+ println!("checked links in: {:.1}s", self.start.elapsed().as_secs_f64());
+ println!("number of HTML files scanned: {}", self.html_files);
+ println!("number of HTML redirects found: {}", self.html_redirects);
+ println!("number of links checked: {}", self.links_checked);
+ println!("number of links ignored due to external: {}", self.links_ignored_external);
+ println!("number of links ignored due to exceptions: {}", self.links_ignored_exception);
+ println!("number of intra doc links ignored: {}", self.intra_doc_exceptions);
+ println!("errors found: {}", self.errors);
+ }
+}
+
+fn load_html_file(file: &Path, report: &mut Report) -> FileEntry {
+ let source = match fs::read_to_string(file) {
+ Ok(s) => Rc::new(s),
+ Err(err) => {
+ // This usually should not fail since `metadata` was already
+ // called successfully on this file.
+ panic!("unexpected read error for {}: {}", file.display(), err);
+ }
+ };
+ match maybe_redirect(&source) {
+ Some(target) => {
+ report.html_redirects += 1;
+ let target = file.parent().unwrap().join(target);
+ FileEntry::Redirect { target }
+ }
+ None => FileEntry::HtmlFile { source: source.clone(), ids: RefCell::new(HashSet::new()) },
+ }
+}
+
+fn is_intra_doc_exception(file: &Path, link: &str) -> bool {
+ if let Some(entry) = INTRA_DOC_LINK_EXCEPTIONS.iter().find(|&(f, _)| file.ends_with(f)) {
+ entry.1.is_empty() || entry.1.contains(&link)
+ } else {
+ false
+ }
+}
+
+fn is_exception(file: &Path, link: &str) -> bool {
+ if let Some(entry) = LINKCHECK_EXCEPTIONS.iter().find(|&(f, _)| file.ends_with(f)) {
+ entry.1.contains(&link)
+ } else {
+ // FIXME(#63351): Concat trait in alloc/slice reexported in primitive page
+ //
+ // NOTE: This cannot be added to `LINKCHECK_EXCEPTIONS` because the resolved path
+ // calculated in `check` function is outside `build/<triple>/doc` dir.
+ // So the `strip_prefix` method just returns the old absolute broken path.
+ if file.ends_with("std/primitive.slice.html") {
+ if link.ends_with("primitive.slice.html") {
+ return true;
+ }
+ }
+ false
+ }
+}
+
+/// If the given HTML file contents is an HTML redirect, this returns the
+/// destination path given in the redirect.
+fn maybe_redirect(source: &str) -> Option<String> {
+ const REDIRECT_RUSTDOC: (usize, &str) = (7, "<p>Redirecting to <a href=");
+ const REDIRECT_MDBOOK: (usize, &str) = (8 - 7, "<p>Redirecting to... <a href=");
+
+ let mut lines = source.lines();
+
+ let mut find_redirect = |(line_rel, redirect_pattern): (usize, &str)| {
+ let redirect_line = lines.nth(line_rel)?;
+
+ redirect_line.find(redirect_pattern).map(|i| {
+ let rest = &redirect_line[(i + redirect_pattern.len() + 1)..];
+ let pos_quote = rest.find('"').unwrap();
+ rest[..pos_quote].to_owned()
+ })
+ };
+
+ find_redirect(REDIRECT_RUSTDOC).or_else(|| find_redirect(REDIRECT_MDBOOK))
+}
+
+fn with_attrs_in_source<F: FnMut(&str, usize, &str)>(source: &str, attr: &str, mut f: F) {
+ let mut base = "";
+ for (i, mut line) in source.lines().enumerate() {
+ while let Some(j) = line.find(attr) {
+ let rest = &line[j + attr.len()..];
+ // The base tag should always be the first link in the document so
+ // we can get away with using one pass.
+ let is_base = line[..j].ends_with("<base");
+ line = rest;
+ let pos_equals = match rest.find('=') {
+ Some(i) => i,
+ None => continue,
+ };
+ if rest[..pos_equals].trim_start_matches(' ') != "" {
+ continue;
+ }
+
+ let rest = &rest[pos_equals + 1..];
+
+ let pos_quote = match rest.find(&['"', '\''][..]) {
+ Some(i) => i,
+ None => continue,
+ };
+ let quote_delim = rest.as_bytes()[pos_quote] as char;
+
+ if rest[..pos_quote].trim_start_matches(' ') != "" {
+ continue;
+ }
+ let rest = &rest[pos_quote + 1..];
+ let url = match rest.find(quote_delim) {
+ Some(i) => &rest[..i],
+ None => continue,
+ };
+ if is_base {
+ base = url;
+ continue;
+ }
+ f(url, i, base)
+ }
+ }
+}
+
+fn parse_ids(ids: &mut HashSet<String>, file: &str, source: &str, report: &mut Report) {
+ if ids.is_empty() {
+ with_attrs_in_source(source, " id", |fragment, i, _| {
+ let frag = fragment.trim_start_matches("#").to_owned();
+ let encoded = small_url_encode(&frag);
+ if !ids.insert(frag) {
+ report.errors += 1;
+ println!("{}:{}: id is not unique: `{}`", file, i, fragment);
+ }
+ // Just in case, we also add the encoded id.
+ ids.insert(encoded);
+ });
+ }
+}
diff --git a/src/tools/linkchecker/tests/basic_broken/foo.html b/src/tools/linkchecker/tests/basic_broken/foo.html
new file mode 100644
index 000000000..cb27c55c9
--- /dev/null
+++ b/src/tools/linkchecker/tests/basic_broken/foo.html
@@ -0,0 +1,5 @@
+<html>
+<body>
+<a href="bar.html">test</a>
+</body>
+</html>
diff --git a/src/tools/linkchecker/tests/broken_fragment_local/foo.html b/src/tools/linkchecker/tests/broken_fragment_local/foo.html
new file mode 100644
index 000000000..66c457ad0
--- /dev/null
+++ b/src/tools/linkchecker/tests/broken_fragment_local/foo.html
@@ -0,0 +1,5 @@
+<html>
+<body>
+<a href="#somefrag">test</a>
+</body>
+</html>
diff --git a/src/tools/linkchecker/tests/broken_fragment_remote/bar.html b/src/tools/linkchecker/tests/broken_fragment_remote/bar.html
new file mode 100644
index 000000000..7879e1ce9
--- /dev/null
+++ b/src/tools/linkchecker/tests/broken_fragment_remote/bar.html
@@ -0,0 +1,4 @@
+<html>
+<body>
+</body>
+</html>
diff --git a/src/tools/linkchecker/tests/broken_fragment_remote/inner/foo.html b/src/tools/linkchecker/tests/broken_fragment_remote/inner/foo.html
new file mode 100644
index 000000000..7683060b3
--- /dev/null
+++ b/src/tools/linkchecker/tests/broken_fragment_remote/inner/foo.html
@@ -0,0 +1,5 @@
+<html>
+<body>
+<a href="../bar.html#somefrag">test</a>
+</body>
+</html>
diff --git a/src/tools/linkchecker/tests/broken_redir/foo.html b/src/tools/linkchecker/tests/broken_redir/foo.html
new file mode 100644
index 000000000..bd3e3ad33
--- /dev/null
+++ b/src/tools/linkchecker/tests/broken_redir/foo.html
@@ -0,0 +1,5 @@
+<html>
+<body>
+ <a href="redir-bad.html">bad redir</a>
+</body>
+</html>
diff --git a/src/tools/linkchecker/tests/broken_redir/redir-bad.html b/src/tools/linkchecker/tests/broken_redir/redir-bad.html
new file mode 100644
index 000000000..9c580d8e0
--- /dev/null
+++ b/src/tools/linkchecker/tests/broken_redir/redir-bad.html
@@ -0,0 +1,11 @@
+<!DOCTYPE html>
+<html lang="en">
+<head>
+ <meta http-equiv="refresh" content="0;URL=sometarget">
+ <title>Redirection</title>
+</head>
+<body>
+ <p>Redirecting to <a href="sometarget">sometarget</a>...</p>
+ <script>location.replace("sometarget" + location.search + location.hash);</script>
+</body>
+</html>
diff --git a/src/tools/linkchecker/tests/checks.rs b/src/tools/linkchecker/tests/checks.rs
new file mode 100644
index 000000000..1a0b1b00e
--- /dev/null
+++ b/src/tools/linkchecker/tests/checks.rs
@@ -0,0 +1,113 @@
+use std::path::Path;
+use std::process::{Command, ExitStatus};
+
+fn run(dirname: &str) -> (ExitStatus, String, String) {
+ let output = Command::new(env!("CARGO_BIN_EXE_linkchecker"))
+ .current_dir(Path::new(env!("CARGO_MANIFEST_DIR")).join("tests"))
+ .arg(dirname)
+ .output()
+ .unwrap();
+ let stdout = String::from_utf8(output.stdout).unwrap();
+ let stderr = String::from_utf8(output.stderr).unwrap();
+ (output.status, stdout, stderr)
+}
+
+fn broken_test(dirname: &str, expected: &str) {
+ let (status, stdout, stderr) = run(dirname);
+ assert!(!status.success());
+ if !contains(expected, &stdout) {
+ panic!(
+ "stdout did not contain expected text: {}\n\
+ --- stdout:\n\
+ {}\n\
+ --- stderr:\n\
+ {}\n",
+ expected, stdout, stderr
+ );
+ }
+}
+
+fn contains(expected: &str, actual: &str) -> bool {
+ // Normalize for Windows paths.
+ let actual = actual.replace('\\', "/");
+ actual.lines().any(|mut line| {
+ for (i, part) in expected.split("[..]").enumerate() {
+ match line.find(part) {
+ Some(j) => {
+ if i == 0 && j != 0 {
+ return false;
+ }
+ line = &line[j + part.len()..];
+ }
+ None => return false,
+ }
+ }
+ line.is_empty() || expected.ends_with("[..]")
+ })
+}
+
+fn valid_test(dirname: &str) {
+ let (status, stdout, stderr) = run(dirname);
+ if !status.success() {
+ panic!(
+ "test did not succeed as expected\n\
+ --- stdout:\n\
+ {}\n\
+ --- stderr:\n\
+ {}\n",
+ stdout, stderr
+ );
+ }
+}
+
+#[test]
+fn valid() {
+ valid_test("valid/inner");
+}
+
+#[test]
+fn basic_broken() {
+ broken_test("basic_broken", "foo.html:3: broken link - `bar.html`");
+}
+
+#[test]
+fn broken_fragment_local() {
+ broken_test(
+ "broken_fragment_local",
+ "foo.html:3: broken link fragment `#somefrag` pointing to `foo.html`",
+ );
+}
+
+#[test]
+fn broken_fragment_remote() {
+ broken_test(
+ "broken_fragment_remote/inner",
+ "foo.html:3: broken link fragment `#somefrag` pointing to \
+ `[..]/broken_fragment_remote/bar.html`",
+ );
+}
+
+#[test]
+fn broken_redir() {
+ broken_test(
+ "broken_redir",
+ "foo.html:3: broken redirect from `redir-bad.html` to `sometarget`",
+ );
+}
+
+#[test]
+fn directory_link() {
+ broken_test(
+ "directory_link",
+ "foo.html:3: directory link to `somedir` (directory links should use index.html instead)",
+ );
+}
+
+#[test]
+fn redirect_loop() {
+ broken_test(
+ "redirect_loop",
+ "foo.html:3: redirect from `redir-bad.html` to `[..]redirect_loop/redir-bad.html` \
+ which is also a redirect (not supported)",
+ );
+}
diff --git a/src/tools/linkchecker/tests/directory_link/foo.html b/src/tools/linkchecker/tests/directory_link/foo.html
new file mode 100644
index 000000000..40a8461b8
--- /dev/null
+++ b/src/tools/linkchecker/tests/directory_link/foo.html
@@ -0,0 +1,5 @@
+<html>
+<body>
+ <a href="somedir">dir link</a>
+</body>
+</html>
diff --git a/src/tools/linkchecker/tests/directory_link/somedir/index.html b/src/tools/linkchecker/tests/directory_link/somedir/index.html
new file mode 100644
index 000000000..7879e1ce9
--- /dev/null
+++ b/src/tools/linkchecker/tests/directory_link/somedir/index.html
@@ -0,0 +1,4 @@
+<html>
+<body>
+</body>
+</html>
diff --git a/src/tools/linkchecker/tests/redirect_loop/foo.html b/src/tools/linkchecker/tests/redirect_loop/foo.html
new file mode 100644
index 000000000..bee58b212
--- /dev/null
+++ b/src/tools/linkchecker/tests/redirect_loop/foo.html
@@ -0,0 +1,5 @@
+<html>
+<body>
+ <a href="redir-bad.html">loop link</a>
+</body>
+</html>
diff --git a/src/tools/linkchecker/tests/redirect_loop/redir-bad.html b/src/tools/linkchecker/tests/redirect_loop/redir-bad.html
new file mode 100644
index 000000000..bc567caa7
--- /dev/null
+++ b/src/tools/linkchecker/tests/redirect_loop/redir-bad.html
@@ -0,0 +1,11 @@
+<!DOCTYPE html>
+<html lang="en">
+<head>
+ <meta http-equiv="refresh" content="0;URL=redir-bad.html">
+ <title>Redirection</title>
+</head>
+<body>
+ <p>Redirecting to <a href="redir-bad.html">redir-bad.html</a>...</p>
+ <script>location.replace("redir-bad.html" + location.search + location.hash);</script>
+</body>
+</html>
diff --git a/src/tools/linkchecker/tests/valid/inner/bar.html b/src/tools/linkchecker/tests/valid/inner/bar.html
new file mode 100644
index 000000000..4b500d78b
--- /dev/null
+++ b/src/tools/linkchecker/tests/valid/inner/bar.html
@@ -0,0 +1,7 @@
+<html>
+<body>
+
+ <h2 id="barfrag">Bar</h2>
+
+</body>
+</html>
diff --git a/src/tools/linkchecker/tests/valid/inner/foo.html b/src/tools/linkchecker/tests/valid/inner/foo.html
new file mode 100644
index 000000000..3c6a7483b
--- /dev/null
+++ b/src/tools/linkchecker/tests/valid/inner/foo.html
@@ -0,0 +1,14 @@
+<html>
+<body>
+ <a href="#localfrag">test local frag</a>
+ <a href="../outer.html">remote link</a>
+ <a href="../outer.html#somefrag">remote link with fragment</a>
+ <a href="bar.html">this book</a>
+ <a href="bar.html#barfrag">this book with fragment</a>
+ <a href="https://example.com/doesnotexist">external links not validated</a>
+ <a href="redir.html#redirfrag">Redirect</a>
+
+ <h2 id="localfrag">Local</h2>
+
+</body>
+</html>
diff --git a/src/tools/linkchecker/tests/valid/inner/redir-bad.html b/src/tools/linkchecker/tests/valid/inner/redir-bad.html
new file mode 100644
index 000000000..f32683efe
--- /dev/null
+++ b/src/tools/linkchecker/tests/valid/inner/redir-bad.html
@@ -0,0 +1,12 @@
+<!DOCTYPE html>
+<html lang="en">
+<head>
+ <meta http-equiv="refresh" content="0;URL=xxx">
+ <title>Redirection</title>
+</head>
+<body>
+ <p>Redirecting to <a href="xxx">xxx</a>...</p>
+ <script>location.replace("xxx" + location.search + location.hash);</script>
+ These files are skipped, but probably shouldn't be.
+</body>
+</html>
diff --git a/src/tools/linkchecker/tests/valid/inner/redir-target.html b/src/tools/linkchecker/tests/valid/inner/redir-target.html
new file mode 100644
index 000000000..bd59884a0
--- /dev/null
+++ b/src/tools/linkchecker/tests/valid/inner/redir-target.html
@@ -0,0 +1,5 @@
+<html>
+<body>
+ <h2 id="redirfrag">Redir</h2>
+</body>
+</html>
diff --git a/src/tools/linkchecker/tests/valid/inner/redir.html b/src/tools/linkchecker/tests/valid/inner/redir.html
new file mode 100644
index 000000000..3a52a8973
--- /dev/null
+++ b/src/tools/linkchecker/tests/valid/inner/redir.html
@@ -0,0 +1,11 @@
+<!DOCTYPE html>
+<html lang="en">
+<head>
+ <meta http-equiv="refresh" content="0;URL=redir-target.html">
+ <title>Redirection</title>
+</head>
+<body>
+ <p>Redirecting to <a href="redir-target.html">redir-target.html</a>...</p>
+ <script>location.replace("redir-target.html" + location.search + location.hash);</script>
+</body>
+</html>
diff --git a/src/tools/linkchecker/tests/valid/outer.html b/src/tools/linkchecker/tests/valid/outer.html
new file mode 100644
index 000000000..35f799f20
--- /dev/null
+++ b/src/tools/linkchecker/tests/valid/outer.html
@@ -0,0 +1,5 @@
+<html>
+<body>
+<a id="somefrag"></a>
+</body>
+</html>
diff --git a/src/tools/lint-docs/Cargo.toml b/src/tools/lint-docs/Cargo.toml
new file mode 100644
index 000000000..3578bda82
--- /dev/null
+++ b/src/tools/lint-docs/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "lint-docs"
+version = "0.1.0"
+edition = "2021"
+description = "A script to extract the lint documentation for the rustc book."
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+serde_json = "1.0.57"
+tempfile = "3.1.0"
+walkdir = "2.3.1"
diff --git a/src/tools/lint-docs/src/groups.rs b/src/tools/lint-docs/src/groups.rs
new file mode 100644
index 000000000..9696e35b7
--- /dev/null
+++ b/src/tools/lint-docs/src/groups.rs
@@ -0,0 +1,148 @@
+use crate::{Lint, LintExtractor};
+use std::collections::{BTreeMap, BTreeSet};
+use std::error::Error;
+use std::fmt::Write;
+use std::fs;
+use std::process::Command;
+
+/// Descriptions of rustc lint groups.
+static GROUP_DESCRIPTIONS: &[(&str, &str)] = &[
+ ("unused", "Lints that detect things being declared but not used, or excess syntax"),
+ ("rustdoc", "Rustdoc-specific lints"),
+ ("rust-2018-idioms", "Lints to nudge you toward idiomatic features of Rust 2018"),
+ ("nonstandard-style", "Violation of standard naming conventions"),
+ ("future-incompatible", "Lints that detect code that has future-compatibility problems"),
+ ("rust-2018-compatibility", "Lints used to transition code from the 2015 edition to 2018"),
+ ("rust-2021-compatibility", "Lints used to transition code from the 2018 edition to 2021"),
+];
+
+type LintGroups = BTreeMap<String, BTreeSet<String>>;
+
+impl<'a> LintExtractor<'a> {
+ /// Updates the documentation of lint groups.
+ pub(crate) fn generate_group_docs(&self, lints: &[Lint]) -> Result<(), Box<dyn Error>> {
+ let groups = self.collect_groups()?;
+ let groups_path = self.out_path.join("groups.md");
+ let contents = fs::read_to_string(&groups_path)
+ .map_err(|e| format!("could not read {}: {}", groups_path.display(), e))?;
+ let new_contents =
+ contents.replace("{{groups-table}}", &self.make_groups_table(lints, &groups)?);
+ // Delete the output because rustbuild uses hard links in its copies.
+ let _ = fs::remove_file(&groups_path);
+ fs::write(&groups_path, new_contents)
+ .map_err(|e| format!("could not write to {}: {}", groups_path.display(), e))?;
+ Ok(())
+ }
+
+ /// Collects the group names from rustc.
+ fn collect_groups(&self) -> Result<LintGroups, Box<dyn Error>> {
+ let mut result = BTreeMap::new();
+ let mut cmd = Command::new(self.rustc_path);
+ cmd.arg("-Whelp");
+ let output = cmd.output().map_err(|e| format!("failed to run command {:?}\n{}", cmd, e))?;
+ if !output.status.success() {
+ return Err(format!(
+ "failed to collect lint info: {:?}\n--- stderr\n{}--- stdout\n{}\n",
+ output.status,
+ std::str::from_utf8(&output.stderr).unwrap(),
+ std::str::from_utf8(&output.stdout).unwrap(),
+ )
+ .into());
+ }
+ let stdout = std::str::from_utf8(&output.stdout).unwrap();
+ let lines = stdout.lines();
+ let group_start = lines.skip_while(|line| !line.contains("groups provided")).skip(1);
+ let table_start = group_start.skip_while(|line| !line.contains("----")).skip(1);
+ for line in table_start {
+ if line.is_empty() {
+ break;
+ }
+ let mut parts = line.trim().splitn(2, ' ');
+ let name = parts.next().expect("name in group");
+ if name == "warnings" {
+ // This is special.
+ continue;
+ }
+ let lints = parts
+ .next()
+ .ok_or_else(|| format!("expected lints following name, got `{}`", line))?;
+ let lints = lints.split(',').map(|l| l.trim().to_string()).collect();
+ assert!(result.insert(name.to_string(), lints).is_none());
+ }
+ if result.is_empty() {
+ return Err(
+ format!("expected at least one group in -Whelp output, got:\n{}", stdout).into()
+ );
+ }
+ Ok(result)
+ }
+
+ fn make_groups_table(
+ &self,
+ lints: &[Lint],
+ groups: &LintGroups,
+ ) -> Result<String, Box<dyn Error>> {
+ let mut result = String::new();
+ let mut to_link = Vec::new();
+ result.push_str("| Group | Description | Lints |\n");
+ result.push_str("|-------|-------------|-------|\n");
+ result.push_str("| warnings | All lints that are set to issue warnings | See [warn-by-default] for the default set of warnings |\n");
+ for (group_name, group_lints) in groups {
+ let description = match GROUP_DESCRIPTIONS.iter().find(|(n, _)| n == group_name) {
+ Some((_, desc)) => desc,
+ None if self.validate => {
+ return Err(format!(
+ "lint group `{}` does not have a description, \
+ please update the GROUP_DESCRIPTIONS list in \
+ src/tools/lint-docs/src/groups.rs",
+ group_name
+ )
+ .into());
+ }
+ None => {
+ eprintln!(
+ "warning: lint group `{}` is missing from the GROUP_DESCRIPTIONS list\n\
+ If this is a new lint group, please update the GROUP_DESCRIPTIONS in \
+ src/tools/lint-docs/src/groups.rs",
+ group_name
+ );
+ continue;
+ }
+ };
+ to_link.extend(group_lints);
+ let brackets: Vec<_> = group_lints.iter().map(|l| format!("[{}]", l)).collect();
+ write!(result, "| {} | {} | {} |\n", group_name, description, brackets.join(", "))
+ .unwrap();
+ }
+ result.push('\n');
+ result.push_str("[warn-by-default]: listing/warn-by-default.md\n");
+ for lint_name in to_link {
+ let lint_def = match lints.iter().find(|l| l.name == lint_name.replace("-", "_")) {
+ Some(def) => def,
+ None => {
+ let msg = format!(
+ "`rustc -W help` defined lint `{}` but that lint does not \
+ appear to exist\n\
+ Check that the lint definition includes the appropriate doc comments.",
+ lint_name
+ );
+ if self.validate {
+ return Err(msg.into());
+ } else {
+ eprintln!("warning: {}", msg);
+ continue;
+ }
+ }
+ };
+ write!(
+ result,
+ "[{}]: listing/{}#{}\n",
+ lint_name,
+ lint_def.level.doc_filename(),
+ lint_name
+ )
+ .unwrap();
+ }
+ Ok(result)
+ }
+}
diff --git a/src/tools/lint-docs/src/lib.rs b/src/tools/lint-docs/src/lib.rs
new file mode 100644
index 000000000..857feb773
--- /dev/null
+++ b/src/tools/lint-docs/src/lib.rs
@@ -0,0 +1,502 @@
+use std::error::Error;
+use std::fmt::Write;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+use walkdir::WalkDir;
+
+mod groups;
+
+pub struct LintExtractor<'a> {
+ /// Path to the `src` directory, where it will scan for `.rs` files to
+ /// find lint declarations.
+ pub src_path: &'a Path,
+ /// Path where to save the output.
+ pub out_path: &'a Path,
+ /// Path to the `rustc` executable.
+ pub rustc_path: &'a Path,
+ /// The target arch to build the docs for.
+ pub rustc_target: &'a str,
+ /// Verbose output.
+ pub verbose: bool,
+ /// Validate the style and the code example.
+ pub validate: bool,
+}
+
+struct Lint {
+ name: String,
+ doc: Vec<String>,
+ level: Level,
+ path: PathBuf,
+ lineno: usize,
+}
+
+impl Lint {
+ fn doc_contains(&self, text: &str) -> bool {
+ self.doc.iter().any(|line| line.contains(text))
+ }
+
+ fn is_ignored(&self) -> bool {
+ self.doc
+ .iter()
+ .filter(|line| line.starts_with("```rust"))
+ .all(|line| line.contains(",ignore"))
+ }
+
+ /// Checks the doc style of the lint.
+ fn check_style(&self) -> Result<(), Box<dyn Error>> {
+ for &expected in &["### Example", "### Explanation", "{{produces}}"] {
+ if expected == "{{produces}}" && self.is_ignored() {
+ continue;
+ }
+ if !self.doc_contains(expected) {
+ return Err(format!("lint docs should contain the line `{}`", expected).into());
+ }
+ }
+ if let Some(first) = self.doc.first() {
+ if !first.starts_with(&format!("The `{}` lint", self.name)) {
+ return Err(format!(
+ "lint docs should start with the text \"The `{}` lint\" to introduce the lint",
+ self.name
+ )
+ .into());
+ }
+ }
+ Ok(())
+ }
+}
+
+#[derive(Clone, Copy, PartialEq)]
+enum Level {
+ Allow,
+ Warn,
+ Deny,
+}
+
+impl Level {
+ fn doc_filename(&self) -> &str {
+ match self {
+ Level::Allow => "allowed-by-default.md",
+ Level::Warn => "warn-by-default.md",
+ Level::Deny => "deny-by-default.md",
+ }
+ }
+}
+
+impl<'a> LintExtractor<'a> {
+ /// Collects all lints, and writes the markdown documentation at the given directory.
+ pub fn extract_lint_docs(&self) -> Result<(), Box<dyn Error>> {
+ let mut lints = self.gather_lints()?;
+ for lint in &mut lints {
+ self.generate_output_example(lint).map_err(|e| {
+ format!(
+ "failed to test example in lint docs for `{}` in {}:{}: {}",
+ lint.name,
+ lint.path.display(),
+ lint.lineno,
+ e
+ )
+ })?;
+ }
+ self.save_lints_markdown(&lints)?;
+ self.generate_group_docs(&lints)?;
+ Ok(())
+ }
+
+ /// Collects all lints from all files in the given directory.
+ fn gather_lints(&self) -> Result<Vec<Lint>, Box<dyn Error>> {
+ let mut lints = Vec::new();
+ for entry in WalkDir::new(self.src_path).into_iter().filter_map(|e| e.ok()) {
+ if !entry.path().extension().map_or(false, |ext| ext == "rs") {
+ continue;
+ }
+ lints.extend(self.lints_from_file(entry.path())?);
+ }
+ if lints.is_empty() {
+ return Err("no lints were found!".into());
+ }
+ Ok(lints)
+ }
+
+ /// Collects all lints from the given file.
+ fn lints_from_file(&self, path: &Path) -> Result<Vec<Lint>, Box<dyn Error>> {
+ let mut lints = Vec::new();
+ let contents = fs::read_to_string(path)
+ .map_err(|e| format!("could not read {}: {}", path.display(), e))?;
+ let mut lines = contents.lines().enumerate();
+ 'outer: loop {
+ // Find a lint declaration.
+ let lint_start = loop {
+ match lines.next() {
+ Some((lineno, line)) => {
+ if line.trim().starts_with("declare_lint!") {
+ break lineno + 1;
+ }
+ }
+ None => return Ok(lints),
+ }
+ };
+ // Read the lint.
+ let mut doc_lines = Vec::new();
+ let (doc, name) = loop {
+ match lines.next() {
+ Some((lineno, line)) => {
+ let line = line.trim();
+ if let Some(text) = line.strip_prefix("/// ") {
+ doc_lines.push(text.to_string());
+ } else if line == "///" {
+ doc_lines.push("".to_string());
+ } else if line.starts_with("// ") {
+ // Ignore comments.
+ continue;
+ } else if line.starts_with("#[allow") {
+ // Ignore allow of lints (useful for
+ // invalid_rust_codeblocks).
+ continue;
+ } else {
+ let name = lint_name(line).map_err(|e| {
+ format!(
+ "could not determine lint name in {}:{}: {}, line was `{}`",
+ path.display(),
+ lineno,
+ e,
+ line
+ )
+ })?;
+ if doc_lines.is_empty() {
+ if self.validate {
+ return Err(format!(
+ "did not find doc lines for lint `{}` in {}",
+ name,
+ path.display()
+ )
+ .into());
+ } else {
+ eprintln!(
+ "warning: lint `{}` in {} does not define any doc lines, \
+ these are required for the lint documentation",
+ name,
+ path.display()
+ );
+ continue 'outer;
+ }
+ }
+ break (doc_lines, name);
+ }
+ }
+ None => {
+ return Err(format!(
+ "unexpected EOF for lint definition at {}:{}",
+ path.display(),
+ lint_start
+ )
+ .into());
+ }
+ }
+ };
+ // These lints are specifically undocumented. This should be reserved
+ // for internal rustc-lints only.
+ if name == "deprecated_in_future" {
+ continue;
+ }
+ // Read the level.
+ let level = loop {
+ match lines.next() {
+ // Ignore comments.
+ Some((_, line)) if line.trim().starts_with("// ") => {}
+ Some((lineno, line)) => match line.trim() {
+ "Allow," => break Level::Allow,
+ "Warn," => break Level::Warn,
+ "Deny," => break Level::Deny,
+ _ => {
+ return Err(format!(
+ "unexpected lint level `{}` in {}:{}",
+ line,
+ path.display(),
+ lineno
+ )
+ .into());
+ }
+ },
+ None => {
+ return Err(format!(
+ "expected lint level in {}:{}, got EOF",
+ path.display(),
+ lint_start
+ )
+ .into());
+ }
+ }
+ };
+ // The rest of the lint definition is ignored.
+ assert!(!doc.is_empty());
+ lints.push(Lint { name, doc, level, path: PathBuf::from(path), lineno: lint_start });
+ }
+ }
+
+ /// Mutates the lint definition to replace the `{{produces}}` marker with the
+ /// actual output from the compiler.
+ fn generate_output_example(&self, lint: &mut Lint) -> Result<(), Box<dyn Error>> {
+ // Explicit list of lints that are allowed to not have an example. Please
+ // try to avoid adding to this list.
+ if matches!(
+ lint.name.as_str(),
+ "unused_features" // broken lint
+ | "unstable_features" // deprecated
+ ) {
+ return Ok(());
+ }
+ if lint.doc_contains("[rustdoc book]") && !lint.doc_contains("{{produces}}") {
+ // Rustdoc lints are documented in the rustdoc book, don't check these.
+ return Ok(());
+ }
+ if self.validate {
+ lint.check_style()?;
+ }
+ // Unfortunately some lints have extra requirements that this simple test
+ // setup can't handle (like extern crates). An alternative is to use a
+ // separate test suite, and use an include mechanism such as mdbook's
+ // `{{#rustdoc_include}}`.
+ if !lint.is_ignored() {
+ if let Err(e) = self.replace_produces(lint) {
+ if self.validate {
+ return Err(e);
+ }
+ eprintln!(
+ "warning: the code example in lint `{}` in {} failed to \
+ generate the expected output: {}",
+ lint.name,
+ lint.path.display(),
+ e
+ );
+ }
+ }
+ Ok(())
+ }
+
+ /// Mutates the lint docs to replace the `{{produces}}` marker with the actual
+ /// output from the compiler.
+ fn replace_produces(&self, lint: &mut Lint) -> Result<(), Box<dyn Error>> {
+ let mut lines = lint.doc.iter_mut();
+ loop {
+ // Find start of example.
+ let options = loop {
+ match lines.next() {
+ Some(line) if line.starts_with("```rust") => {
+ break line[7..].split(',').collect::<Vec<_>>();
+ }
+ Some(line) if line.contains("{{produces}}") => {
+ return Err("lint marker {{{{produces}}}} found, \
+ but expected to immediately follow a rust code block"
+ .into());
+ }
+ Some(_) => {}
+ None => return Ok(()),
+ }
+ };
+ // Find the end of example.
+ let mut example = Vec::new();
+ loop {
+ match lines.next() {
+ Some(line) if line == "```" => break,
+ Some(line) => example.push(line),
+ None => {
+ return Err(format!(
+ "did not find end of example triple ticks ```, docs were:\n{:?}",
+ lint.doc
+ )
+ .into());
+ }
+ }
+ }
+ // Find the {{produces}} line.
+ loop {
+ match lines.next() {
+ Some(line) if line.is_empty() => {}
+ Some(line) if line == "{{produces}}" => {
+ let output = self.generate_lint_output(&lint.name, &example, &options)?;
+ line.replace_range(
+ ..,
+ &format!(
+ "This will produce:\n\
+ \n\
+ ```text\n\
+ {}\
+ ```",
+ output
+ ),
+ );
+ break;
+ }
+ // No {{produces}} after example, find next example.
+ Some(_line) => break,
+ None => return Ok(()),
+ }
+ }
+ }
+ }
+
+ /// Runs the compiler against the example, and extracts the output.
+ fn generate_lint_output(
+ &self,
+ name: &str,
+ example: &[&mut String],
+ options: &[&str],
+ ) -> Result<String, Box<dyn Error>> {
+ if self.verbose {
+ eprintln!("compiling lint {}", name);
+ }
+ let tempdir = tempfile::TempDir::new()?;
+ let tempfile = tempdir.path().join("lint_example.rs");
+ let mut source = String::new();
+ let needs_main = !example.iter().any(|line| line.contains("fn main"));
+ // Remove `# ` prefix for hidden lines.
+ let unhidden = example.iter().map(|line| line.strip_prefix("# ").unwrap_or(line));
+ let mut lines = unhidden.peekable();
+ while let Some(line) = lines.peek() {
+ if line.starts_with("#!") {
+ source.push_str(line);
+ source.push('\n');
+ lines.next();
+ } else {
+ break;
+ }
+ }
+ if needs_main {
+ source.push_str("fn main() {\n");
+ }
+ for line in lines {
+ source.push_str(line);
+ source.push('\n')
+ }
+ if needs_main {
+ source.push_str("}\n");
+ }
+ fs::write(&tempfile, source)
+ .map_err(|e| format!("failed to write {}: {}", tempfile.display(), e))?;
+ let mut cmd = Command::new(self.rustc_path);
+ if options.contains(&"edition2015") {
+ cmd.arg("--edition=2015");
+ } else {
+ cmd.arg("--edition=2018");
+ }
+ cmd.arg("--error-format=json");
+ cmd.arg("--target").arg(self.rustc_target);
+ if options.contains(&"test") {
+ cmd.arg("--test");
+ }
+ cmd.arg("lint_example.rs");
+ cmd.current_dir(tempdir.path());
+ let output = cmd.output().map_err(|e| format!("failed to run command {:?}\n{}", cmd, e))?;
+ let stderr = std::str::from_utf8(&output.stderr).unwrap();
+ let msgs = stderr
+ .lines()
+ .filter(|line| line.starts_with('{'))
+ .map(serde_json::from_str)
+ .collect::<Result<Vec<serde_json::Value>, _>>()?;
+ match msgs
+ .iter()
+ .find(|msg| matches!(&msg["code"]["code"], serde_json::Value::String(s) if s==name))
+ {
+ Some(msg) => {
+ let rendered = msg["rendered"].as_str().expect("rendered field should exist");
+ Ok(rendered.to_string())
+ }
+ None => {
+ match msgs.iter().find(
+ |msg| matches!(&msg["rendered"], serde_json::Value::String(s) if s.contains(name)),
+ ) {
+ Some(msg) => {
+ let rendered = msg["rendered"].as_str().expect("rendered field should exist");
+ Ok(rendered.to_string())
+ }
+ None => {
+ let rendered: Vec<&str> =
+ msgs.iter().filter_map(|msg| msg["rendered"].as_str()).collect();
+ let non_json: Vec<&str> =
+ stderr.lines().filter(|line| !line.starts_with('{')).collect();
+ Err(format!(
+ "did not find lint `{}` in output of example, got:\n{}\n{}",
+ name,
+ non_json.join("\n"),
+ rendered.join("\n")
+ )
+ .into())
+ }
+ }
+ }
+ }
+ }
+
+ /// Saves the mdbook lint chapters at the given path.
+ fn save_lints_markdown(&self, lints: &[Lint]) -> Result<(), Box<dyn Error>> {
+ self.save_level(lints, Level::Allow, ALLOWED_MD)?;
+ self.save_level(lints, Level::Warn, WARN_MD)?;
+ self.save_level(lints, Level::Deny, DENY_MD)?;
+ Ok(())
+ }
+
+ fn save_level(&self, lints: &[Lint], level: Level, header: &str) -> Result<(), Box<dyn Error>> {
+ let mut result = String::new();
+ result.push_str(header);
+ let mut these_lints: Vec<_> = lints.iter().filter(|lint| lint.level == level).collect();
+ these_lints.sort_unstable_by_key(|lint| &lint.name);
+ for lint in &these_lints {
+ write!(result, "* [`{}`](#{})\n", lint.name, lint.name.replace("_", "-")).unwrap();
+ }
+ result.push('\n');
+ for lint in &these_lints {
+ write!(result, "## {}\n\n", lint.name.replace("_", "-")).unwrap();
+ for line in &lint.doc {
+ result.push_str(line);
+ result.push('\n');
+ }
+ result.push('\n');
+ }
+ let out_path = self.out_path.join("listing").join(level.doc_filename());
+ // Delete the output because rustbuild uses hard links in its copies.
+ let _ = fs::remove_file(&out_path);
+ fs::write(&out_path, result)
+ .map_err(|e| format!("could not write to {}: {}", out_path.display(), e))?;
+ Ok(())
+ }
+}
+
+/// Extracts the lint name (removing the visibility modifier, and checking validity).
+fn lint_name(line: &str) -> Result<String, &'static str> {
+ // Skip over any potential `pub` visibility.
+ match line.trim().split(' ').next_back() {
+ Some(name) => {
+ if !name.ends_with(',') {
+ return Err("lint name should end with comma");
+ }
+ let name = &name[..name.len() - 1];
+ if !name.chars().all(|ch| ch.is_uppercase() || ch.is_ascii_digit() || ch == '_')
+ || name.is_empty()
+ {
+ return Err("lint name did not have expected format");
+ }
+ Ok(name.to_lowercase().to_string())
+ }
+ None => Err("could not find lint name"),
+ }
+}
+
+static ALLOWED_MD: &str = r#"# Allowed-by-default Lints
+
+These lints are all set to the 'allow' level by default. As such, they won't show up
+unless you set them to a higher lint level with a flag or attribute.
+
+"#;
+
+static WARN_MD: &str = r#"# Warn-by-default Lints
+
+These lints are all set to the 'warn' level by default.
+
+"#;
+
+static DENY_MD: &str = r#"# Deny-by-default Lints
+
+These lints are all set to the 'deny' level by default.
+
+"#;
diff --git a/src/tools/lint-docs/src/main.rs b/src/tools/lint-docs/src/main.rs
new file mode 100644
index 000000000..2055fed2b
--- /dev/null
+++ b/src/tools/lint-docs/src/main.rs
@@ -0,0 +1,84 @@
+use std::error::Error;
+use std::path::PathBuf;
+
+fn main() {
+ if let Err(e) = doit() {
+ eprintln!("error: {}", e);
+ eprintln!(
+ "
+This error was generated by the lint-docs tool.
+This tool extracts documentation for lints from the source code and places
+them in the rustc book. See the declare_lint! documentation
+https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint_defs/macro.declare_lint.html
+for an example of the format of documentation this tool expects.
+
+To re-run these tests, run: ./x.py test --keep-stage=0 src/tools/lint-docs
+The --keep-stage flag should be used if you have already built the compiler
+and are only modifying the doc comments to avoid rebuilding the compiler.
+"
+ );
+ std::process::exit(1);
+ }
+}
+
+fn doit() -> Result<(), Box<dyn Error>> {
+ let mut args = std::env::args().skip(1);
+ let mut src_path = None;
+ let mut out_path = None;
+ let mut rustc_path = None;
+ let mut rustc_target = None;
+ let mut verbose = false;
+ let mut validate = false;
+ while let Some(arg) = args.next() {
+ match arg.as_str() {
+ "--src" => {
+ src_path = match args.next() {
+ Some(s) => Some(PathBuf::from(s)),
+ None => return Err("--src requires a value".into()),
+ };
+ }
+ "--out" => {
+ out_path = match args.next() {
+ Some(s) => Some(PathBuf::from(s)),
+ None => return Err("--out requires a value".into()),
+ };
+ }
+ "--rustc" => {
+ rustc_path = match args.next() {
+ Some(s) => Some(PathBuf::from(s)),
+ None => return Err("--rustc requires a value".into()),
+ };
+ }
+ "--rustc-target" => {
+ rustc_target = match args.next() {
+ Some(s) => Some(s),
+ None => return Err("--rustc-target requires a value".into()),
+ };
+ }
+ "-v" | "--verbose" => verbose = true,
+ "--validate" => validate = true,
+ s => return Err(format!("unexpected argument `{}`", s).into()),
+ }
+ }
+ if src_path.is_none() {
+ return Err("--src must be specified to the directory with the compiler source".into());
+ }
+ if out_path.is_none() {
+ return Err("--out must be specified to the directory with the lint listing docs".into());
+ }
+ if rustc_path.is_none() {
+ return Err("--rustc must be specified to the path of rustc".into());
+ }
+ if rustc_target.is_none() {
+ return Err("--rustc-target must be specified to the rustc target".into());
+ }
+ let le = lint_docs::LintExtractor {
+ src_path: &src_path.unwrap(),
+ out_path: &out_path.unwrap(),
+ rustc_path: &rustc_path.unwrap(),
+ rustc_target: &rustc_target.unwrap(),
+ verbose,
+ validate,
+ };
+ le.extract_lint_docs()
+}
diff --git a/src/tools/lld-wrapper/Cargo.toml b/src/tools/lld-wrapper/Cargo.toml
new file mode 100644
index 000000000..bf5138b16
--- /dev/null
+++ b/src/tools/lld-wrapper/Cargo.toml
@@ -0,0 +1,5 @@
+[package]
+name = "lld-wrapper"
+version = "0.1.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
diff --git a/src/tools/lld-wrapper/src/main.rs b/src/tools/lld-wrapper/src/main.rs
new file mode 100644
index 000000000..90bd24a75
--- /dev/null
+++ b/src/tools/lld-wrapper/src/main.rs
@@ -0,0 +1,106 @@
+//! Script to invoke the bundled rust-lld with the correct flavor.
+//!
+//! lld supports multiple command line interfaces. If `-flavor <flavor>` are passed as the first
+//! two arguments the `<flavor>` command line interface is used to process the remaining arguments.
+//! If no `-flavor` argument is present the flavor is determined by the executable name.
+//!
+//! In Rust with `-Z gcc-ld=lld` we have gcc or clang invoke rust-lld. Since there is no way to
+//! make gcc/clang pass `-flavor <flavor>` as the first two arguments in the linker invocation
+//! and since Windows does not support symbolic links for files this wrapper is used in place of a
+//! symbolic link. It execs `../rust-lld -flavor <flavor>` by propagating the flavor argument
+//! passed to the wrapper as the first two arguments. On Windows it spawns a `..\rust-lld.exe`
+//! child process.
+
+use std::fmt::Display;
+use std::path::{Path, PathBuf};
+use std::{env, process};
+
+trait UnwrapOrExitWith<T> {
+ fn unwrap_or_exit_with(self, context: &str) -> T;
+}
+
+impl<T> UnwrapOrExitWith<T> for Option<T> {
+ fn unwrap_or_exit_with(self, context: &str) -> T {
+ self.unwrap_or_else(|| {
+ eprintln!("lld-wrapper: {}", context);
+ process::exit(1);
+ })
+ }
+}
+
+impl<T, E: Display> UnwrapOrExitWith<T> for Result<T, E> {
+ fn unwrap_or_exit_with(self, context: &str) -> T {
+ self.unwrap_or_else(|err| {
+ eprintln!("lld-wrapper: {}: {}", context, err);
+ process::exit(1);
+ })
+ }
+}
+
+/// Returns the path to rust-lld in the parent directory.
+///
+/// Exits if the parent directory cannot be determined.
+fn get_rust_lld_path(current_exe_path: &Path) -> PathBuf {
+ let mut rust_lld_exe_name = "rust-lld".to_owned();
+ rust_lld_exe_name.push_str(env::consts::EXE_SUFFIX);
+ let mut rust_lld_path = current_exe_path
+ .parent()
+ .unwrap_or_exit_with("directory containing current executable could not be determined")
+ .parent()
+ .unwrap_or_exit_with("parent directory could not be determined")
+ .to_owned();
+ rust_lld_path.push(rust_lld_exe_name);
+ rust_lld_path
+}
+
+/// Returns the command for invoking rust-lld with the correct flavor.
+/// LLD only accepts the flavor argument at the first two arguments, so move it there.
+///
+/// Exits on error.
+fn get_rust_lld_command(current_exe_path: &Path) -> process::Command {
+ let rust_lld_path = get_rust_lld_path(current_exe_path);
+ let mut command = process::Command::new(rust_lld_path);
+
+ let mut flavor = None;
+ let args = env::args_os()
+ .skip(1)
+ .filter(|arg| match arg.to_str().and_then(|s| s.strip_prefix("-rustc-lld-flavor=")) {
+ Some(suffix) => {
+ flavor = Some(suffix.to_string());
+ false
+ }
+ None => true,
+ })
+ .collect::<Vec<_>>();
+
+ command.arg("-flavor");
+ command.arg(flavor.unwrap_or_exit_with("-rustc-lld-flavor=<flavor> is not passed"));
+ command.args(args);
+ command
+}
+
+#[cfg(unix)]
+fn exec_lld(mut command: process::Command) {
+ use std::os::unix::prelude::CommandExt;
+ Result::<(), _>::Err(command.exec()).unwrap_or_exit_with("could not exec rust-lld");
+ unreachable!("lld-wrapper: after exec without error");
+}
+
+#[cfg(not(unix))]
+fn exec_lld(mut command: process::Command) {
+ // Windows has no exec(), spawn a child process and wait for it.
+ let exit_status = command.status().unwrap_or_exit_with("error running rust-lld child process");
+ let code = exit_status
+ .code()
+ .ok_or(exit_status)
+ .unwrap_or_exit_with("rust-lld child process exited with error");
+ // Return the original lld exit code.
+ process::exit(code);
+}
+
+fn main() {
+ let current_exe_path =
+ env::current_exe().unwrap_or_exit_with("could not get the path of the current executable");
+
+ exec_lld(get_rust_lld_command(current_exe_path.as_ref()));
+}
diff --git a/src/tools/publish_toolstate.py b/src/tools/publish_toolstate.py
new file mode 100755
index 000000000..fe5195738
--- /dev/null
+++ b/src/tools/publish_toolstate.py
@@ -0,0 +1,373 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# This script computes the new "current" toolstate for the toolstate repo (not to be
+# confused with publishing the test results, which happens in `src/bootstrap/toolstate.rs`).
+# It gets called from `src/ci/publish_toolstate.sh` when a new commit lands on `master`
+# (i.e., after it passed all checks on `auto`).
+
+from __future__ import print_function
+
+import sys
+import re
+import os
+import json
+import datetime
+import collections
+import textwrap
+try:
+ import urllib2
+ from urllib2 import HTTPError
+except ImportError:
+ import urllib.request as urllib2
+ from urllib.error import HTTPError
+try:
+ import typing
+except ImportError:
+ pass
+
+# List of people to ping when the status of a tool or a book changed.
+# These should be collaborators of the rust-lang/rust repository (with at least
+# read privileges on it). CI will fail otherwise.
+MAINTAINERS = {
+ 'miri': {'oli-obk', 'RalfJung'},
+ 'rls': {'Xanewok'},
+ 'rustfmt': {'topecongiro', 'calebcartwright'},
+ 'book': {'carols10cents', 'steveklabnik'},
+ 'nomicon': {'frewsxcv', 'Gankra', 'JohnTitor'},
+ 'reference': {'steveklabnik', 'Havvy', 'matthewjasper', 'ehuss'},
+ 'rust-by-example': {'steveklabnik', 'marioidival'},
+ 'embedded-book': {'adamgreig', 'andre-richter', 'jamesmunns', 'therealprof'},
+ 'edition-guide': {'ehuss', 'steveklabnik'},
+ 'rustc-dev-guide': {'spastorino', 'amanjeev', 'JohnTitor'},
+}
+
+LABELS = {
+ 'miri': ['A-miri', 'C-bug'],
+ 'rls': ['A-rls', 'C-bug'],
+ 'rustfmt': ['A-rustfmt', 'C-bug'],
+ 'book': ['C-bug'],
+ 'nomicon': ['C-bug'],
+ 'reference': ['C-bug'],
+ 'rust-by-example': ['C-bug'],
+ 'embedded-book': ['C-bug'],
+ 'edition-guide': ['C-bug'],
+ 'rustc-dev-guide': ['C-bug'],
+}
+
+REPOS = {
+ 'miri': 'https://github.com/rust-lang/miri',
+ 'rls': 'https://github.com/rust-lang/rls',
+ 'rustfmt': 'https://github.com/rust-lang/rustfmt',
+ 'book': 'https://github.com/rust-lang/book',
+ 'nomicon': 'https://github.com/rust-lang/nomicon',
+ 'reference': 'https://github.com/rust-lang/reference',
+ 'rust-by-example': 'https://github.com/rust-lang/rust-by-example',
+ 'embedded-book': 'https://github.com/rust-embedded/book',
+ 'edition-guide': 'https://github.com/rust-lang/edition-guide',
+ 'rustc-dev-guide': 'https://github.com/rust-lang/rustc-dev-guide',
+}
+
+def load_json_from_response(resp):
+ # type: (typing.Any) -> typing.Any
+ content = resp.read()
+ if isinstance(content, bytes):
+ content_str = content.decode('utf-8')
+ else:
+ print("Refusing to decode " + str(type(content)) + " to str")
+ return json.loads(content_str)
+
+def validate_maintainers(repo, github_token):
+ # type: (str, str) -> None
+ '''Ensure all maintainers are assignable on a GitHub repo'''
+ next_link_re = re.compile(r'<([^>]+)>; rel="next"')
+
+ # Load the list of assignable people in the GitHub repo
+ assignable = [] # type: typing.List[str]
+ url = 'https://api.github.com/repos/' \
+ + '%s/collaborators?per_page=100' % repo # type: typing.Optional[str]
+ while url is not None:
+ response = urllib2.urlopen(urllib2.Request(url, headers={
+ 'Authorization': 'token ' + github_token,
+ # Properly load nested teams.
+ 'Accept': 'application/vnd.github.hellcat-preview+json',
+ }))
+ assignable.extend(user['login'] for user in load_json_from_response(response))
+ # Load the next page if available
+ url = None
+ link_header = response.headers.get('Link')
+ if link_header:
+ matches = next_link_re.match(link_header)
+ if matches is not None:
+ url = matches.group(1)
+
+ errors = False
+ for tool, maintainers in MAINTAINERS.items():
+ for maintainer in maintainers:
+ if maintainer not in assignable:
+ errors = True
+ print(
+ "error: %s maintainer @%s is not assignable in the %s repo"
+ % (tool, maintainer, repo),
+ )
+
+ if errors:
+ print()
+ print(" To be assignable, a person needs to be explicitly listed as a")
+ print(" collaborator in the repository settings. The simple way to")
+ print(" fix this is to ask someone with 'admin' privileges on the repo")
+ print(" to add the person or whole team as a collaborator with 'read'")
+ print(" privileges. Those privileges don't grant any extra permissions")
+ print(" so it's safe to apply them.")
+ print()
+ print("The build will fail due to this.")
+ exit(1)
+
+
+def read_current_status(current_commit, path):
+ # type: (str, str) -> typing.Mapping[str, typing.Any]
+ '''Reads build status of `current_commit` from content of `history/*.tsv`
+ '''
+ with open(path, 'r') as f:
+ for line in f:
+ (commit, status) = line.split('\t', 1)
+ if commit == current_commit:
+ return json.loads(status)
+ return {}
+
+
+def gh_url():
+ # type: () -> str
+ return os.environ['TOOLSTATE_ISSUES_API_URL']
+
+
+def maybe_delink(message):
+ # type: (str) -> str
+ if os.environ.get('TOOLSTATE_SKIP_MENTIONS') is not None:
+ return message.replace("@", "")
+ return message
+
+
+def issue(
+ tool,
+ status,
+ assignees,
+ relevant_pr_number,
+ relevant_pr_user,
+ labels,
+ github_token,
+):
+ # type: (str, str, typing.Iterable[str], str, str, typing.List[str], str) -> None
+ '''Open an issue about the toolstate failure.'''
+ if status == 'test-fail':
+ status_description = 'has failing tests'
+ else:
+ status_description = 'no longer builds'
+ request = json.dumps({
+ 'body': maybe_delink(textwrap.dedent('''\
+ Hello, this is your friendly neighborhood mergebot.
+ After merging PR {}, I observed that the tool {} {}.
+ A follow-up PR to the repository {} is needed to fix the fallout.
+
+ cc @{}, do you think you would have time to do the follow-up work?
+ If so, that would be great!
+ ''').format(
+ relevant_pr_number, tool, status_description,
+ REPOS.get(tool), relevant_pr_user
+ )),
+ 'title': '`{}` no longer builds after {}'.format(tool, relevant_pr_number),
+ 'assignees': list(assignees),
+ 'labels': labels,
+ })
+ print("Creating issue:\n{}".format(request))
+ response = urllib2.urlopen(urllib2.Request(
+ gh_url(),
+ request.encode(),
+ {
+ 'Authorization': 'token ' + github_token,
+ 'Content-Type': 'application/json',
+ }
+ ))
+ response.read()
+
+
+def update_latest(
+ current_commit,
+ relevant_pr_number,
+ relevant_pr_url,
+ relevant_pr_user,
+ pr_reviewer,
+ current_datetime,
+ github_token,
+):
+ # type: (str, str, str, str, str, str, str) -> str
+ '''Updates `_data/latest.json` to match build result of the given commit.
+ '''
+ with open('_data/latest.json', 'r+') as f:
+ latest = json.load(f, object_pairs_hook=collections.OrderedDict)
+
+ current_status = {
+ os: read_current_status(current_commit, 'history/' + os + '.tsv')
+ for os in ['windows', 'linux']
+ }
+
+ slug = 'rust-lang/rust'
+ message = textwrap.dedent('''\
+ 📣 Toolstate changed by {}!
+
+ Tested on commit {}@{}.
+ Direct link to PR: <{}>
+
+ ''').format(relevant_pr_number, slug, current_commit, relevant_pr_url)
+ anything_changed = False
+ for status in latest:
+ tool = status['tool']
+ changed = False
+ create_issue_for_status = None # set to the status that caused the issue
+
+ for os, s in current_status.items():
+ old = status[os]
+ new = s.get(tool, old)
+ status[os] = new
+ maintainers = ' '.join('@'+name for name in MAINTAINERS.get(tool, ()))
+ # comparing the strings, but they are ordered appropriately:
+ # "test-pass" > "test-fail" > "build-fail"
+ if new > old:
+ # things got fixed or at least the status quo improved
+ changed = True
+ message += '🎉 {} on {}: {} → {} (cc {}).\n' \
+ .format(tool, os, old, new, maintainers)
+ elif new < old:
+ # tests or builds are failing and were not failing before
+ changed = True
+ title = '💔 {} on {}: {} → {}' \
+ .format(tool, os, old, new)
+ message += '{} (cc {}).\n' \
+ .format(title, maintainers)
+ # See if we need to create an issue.
+ if tool == 'miri':
+ # Create issue if tests used to pass before. Don't open a *second*
+ # issue when we regress from "test-fail" to "build-fail".
+ if old == 'test-pass':
+ create_issue_for_status = new
+ else:
+ # Create issue if things no longer build.
+ # (No issue for mere test failures to avoid spurious issues.)
+ if new == 'build-fail':
+ create_issue_for_status = new
+
+ if create_issue_for_status is not None:
+ try:
+ issue(
+ tool, create_issue_for_status, MAINTAINERS.get(tool, ()),
+ relevant_pr_number, relevant_pr_user, LABELS.get(tool, []),
+ github_token,
+ )
+ except HTTPError as e:
+ # network errors will simply end up not creating an issue, but that's better
+ # than failing the entire build job
+ print("HTTPError when creating issue for status regression: {0}\n{1!r}"
+ .format(e, e.read()))
+ except IOError as e:
+ print("I/O error when creating issue for status regression: {0}".format(e))
+ except:
+ print("Unexpected error when creating issue for status regression: {0}"
+ .format(sys.exc_info()[0]))
+ raise
+
+ if changed:
+ status['commit'] = current_commit
+ status['datetime'] = current_datetime
+ anything_changed = True
+
+ if not anything_changed:
+ return ''
+
+ f.seek(0)
+ f.truncate(0)
+ json.dump(latest, f, indent=4, separators=(',', ': '))
+ return message
+
+
+# Warning: Do not try to add a function containing the body of this try block.
+# There are variables declared within that are implicitly global; it is unknown
+# which ones precisely but at least this is true for `github_token`.
+try:
+ if __name__ != '__main__':
+ exit(0)
+ repo = os.environ.get('TOOLSTATE_VALIDATE_MAINTAINERS_REPO')
+ if repo:
+ github_token = os.environ.get('TOOLSTATE_REPO_ACCESS_TOKEN')
+ if github_token:
+ # FIXME: This is currently broken. Starting on 2021-09-15, GitHub
+ # seems to have changed it so that to list the collaborators
+ # requires admin permissions. I think this will probably just need
+ # to be removed since we are probably not going to use an admin
+ # token, and I don't see another way to do this.
+ print('maintainer validation disabled')
+ # validate_maintainers(repo, github_token)
+ else:
+ print('skipping toolstate maintainers validation since no GitHub token is present')
+ # When validating maintainers don't run the full script.
+ exit(0)
+
+ cur_commit = sys.argv[1]
+ cur_datetime = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
+ cur_commit_msg = sys.argv[2]
+ save_message_to_path = sys.argv[3]
+ github_token = sys.argv[4]
+
+ # assume that PR authors are also owners of the repo where the branch lives
+ relevant_pr_match = re.search(
+ r'Auto merge of #([0-9]+) - ([^:]+):[^,]+, r=(\S+)',
+ cur_commit_msg,
+ )
+ if relevant_pr_match:
+ number = relevant_pr_match.group(1)
+ relevant_pr_user = relevant_pr_match.group(2)
+ relevant_pr_number = 'rust-lang/rust#' + number
+ relevant_pr_url = 'https://github.com/rust-lang/rust/pull/' + number
+ pr_reviewer = relevant_pr_match.group(3)
+ else:
+ number = '-1'
+ relevant_pr_user = 'ghost'
+ relevant_pr_number = '<unknown PR>'
+ relevant_pr_url = '<unknown>'
+ pr_reviewer = 'ghost'
+
+ message = update_latest(
+ cur_commit,
+ relevant_pr_number,
+ relevant_pr_url,
+ relevant_pr_user,
+ pr_reviewer,
+ cur_datetime,
+ github_token,
+ )
+ if not message:
+ print('<Nothing changed>')
+ sys.exit(0)
+
+ print(message)
+
+ if not github_token:
+ print('Dry run only, not committing anything')
+ sys.exit(0)
+
+ with open(save_message_to_path, 'w') as f:
+ f.write(message)
+
+ # Write the toolstate comment on the PR as well.
+ issue_url = gh_url() + '/{}/comments'.format(number)
+ response = urllib2.urlopen(urllib2.Request(
+ issue_url,
+ json.dumps({'body': maybe_delink(message)}).encode(),
+ {
+ 'Authorization': 'token ' + github_token,
+ 'Content-Type': 'application/json',
+ }
+ ))
+ response.read()
+except HTTPError as e:
+ print("HTTPError: %s\n%r" % (e, e.read()))
+ raise
diff --git a/src/tools/rust-analyzer/.cargo/config.toml b/src/tools/rust-analyzer/.cargo/config.toml
new file mode 100644
index 000000000..24745d1c8
--- /dev/null
+++ b/src/tools/rust-analyzer/.cargo/config.toml
@@ -0,0 +1,11 @@
+[alias]
+xtask = "run --package xtask --bin xtask --"
+tq = "test -- -q"
+qt = "tq"
+lint = "clippy --all-targets -- -Aclippy::collapsible_if -Aclippy::needless_pass_by_value -Aclippy::nonminimal_bool -Aclippy::redundant_pattern_matching --cap-lints warn"
+
+[target.x86_64-pc-windows-msvc]
+linker = "rust-lld"
+
+[env]
+CARGO_WORKSPACE_DIR = { value = "", relative = true } \ No newline at end of file
diff --git a/src/tools/rust-analyzer/.editorconfig b/src/tools/rust-analyzer/.editorconfig
new file mode 100644
index 000000000..314f79d3f
--- /dev/null
+++ b/src/tools/rust-analyzer/.editorconfig
@@ -0,0 +1,19 @@
+# https://EditorConfig.org
+root = true
+
+[*]
+charset = utf-8
+trim_trailing_whitespace = true
+end_of_line = lf
+insert_final_newline = true
+indent_style = space
+
+[*.{rs,toml}]
+indent_size = 4
+
+[*.ts]
+indent_size = 4
+[*.js]
+indent_size = 4
+[*.json]
+indent_size = 4
diff --git a/src/tools/rust-analyzer/.git-blame-ignore-revs b/src/tools/rust-analyzer/.git-blame-ignore-revs
new file mode 100644
index 000000000..a302e2378
--- /dev/null
+++ b/src/tools/rust-analyzer/.git-blame-ignore-revs
@@ -0,0 +1,8 @@
+# for this file to take effect make sure you use git ^2.23 and
+# add ignoreFile to your git configuration:
+# ```
+# git config --global blame.ignoreRevsFile .git-blame-ignore-revs
+# ```
+
+# prettier format
+f247090558c9ba3c551566eae5882b7ca865225f
diff --git a/src/tools/rust-analyzer/.vscode/extensions.json b/src/tools/rust-analyzer/.vscode/extensions.json
new file mode 100644
index 000000000..027eeabc4
--- /dev/null
+++ b/src/tools/rust-analyzer/.vscode/extensions.json
@@ -0,0 +1,9 @@
+{
+ // See http://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
+ // Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
+
+ // List of extensions which should be recommended for users of this workspace.
+ "recommendations": ["vadimcn.vscode-lldb"],
+ // List of extensions recommended by VS Code that should not be recommended for users of this workspace.
+ "unwantedRecommendations": []
+}
diff --git a/src/tools/rust-analyzer/.vscode/launch.json b/src/tools/rust-analyzer/.vscode/launch.json
new file mode 100644
index 000000000..021b8f048
--- /dev/null
+++ b/src/tools/rust-analyzer/.vscode/launch.json
@@ -0,0 +1,131 @@
+{
+ // Use IntelliSense to learn about possible attributes.
+ // Hover to view descriptions of existing attributes.
+ // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
+
+ // NOTE: --disable-extensions
+ // Disable all installed extensions to increase performance of the debug instance
+ // and prevent potential conflicts with other installed extensions.
+
+ "version": "0.2.0",
+ "configurations": [
+ {
+ // Used for testing the extension with the installed LSP server.
+ "name": "Run Installed Extension",
+ "type": "extensionHost",
+ "request": "launch",
+ "runtimeExecutable": "${execPath}",
+ "args": [
+ // "--user-data-dir=${workspaceFolder}/target/code",
+ "--disable-extensions",
+ "--extensionDevelopmentPath=${workspaceFolder}/editors/code"
+ ],
+ "outFiles": [
+ "${workspaceFolder}/editors/code/out/**/*.js"
+ ],
+ "preLaunchTask": "Build Extension",
+ "skipFiles": [
+ "<node_internals>/**/*.js"
+ ]
+ },
+ {
+ // Used for testing the extension with a local build of the LSP server (in `target/debug`).
+ "name": "Run Extension (Debug Build)",
+ "type": "extensionHost",
+ "request": "launch",
+ "runtimeExecutable": "${execPath}",
+ "args": [
+ "--disable-extensions",
+ "--extensionDevelopmentPath=${workspaceFolder}/editors/code"
+ ],
+ "outFiles": [
+ "${workspaceFolder}/editors/code/out/**/*.js"
+ ],
+ "preLaunchTask": "Build Server and Extension",
+ "skipFiles": [
+ "<node_internals>/**/*.js"
+ ],
+ "env": {
+ "__RA_LSP_SERVER_DEBUG": "${workspaceFolder}/target/debug/rust-analyzer"
+ }
+ },
+ {
+ // Used for testing the extension with a local build of the LSP server (in `target/release`).
+ "name": "Run Extension (Release Build)",
+ "type": "extensionHost",
+ "request": "launch",
+ "runtimeExecutable": "${execPath}",
+ "args": [
+ "--disable-extensions",
+ "--extensionDevelopmentPath=${workspaceFolder}/editors/code"
+ ],
+ "outFiles": [
+ "${workspaceFolder}/editors/code/out/**/*.js"
+ ],
+ "preLaunchTask": "Build Server (Release) and Extension",
+ "skipFiles": [
+ "<node_internals>/**/*.js"
+ ],
+ "env": {
+ "__RA_LSP_SERVER_DEBUG": "${workspaceFolder}/target/release/rust-analyzer"
+ }
+ },
+ {
+ // Used for testing the extension with a local build of the LSP server (in `target/release`)
+ // with all other extendions loaded.
+ "name": "Run With Extensions",
+ "type": "extensionHost",
+ "request": "launch",
+ "runtimeExecutable": "${execPath}",
+ "args": [
+ "--disable-extension", "matklad.rust-analyzer",
+ "--extensionDevelopmentPath=${workspaceFolder}/editors/code"
+ ],
+ "outFiles": [
+ "${workspaceFolder}/editors/code/out/**/*.js"
+ ],
+ "preLaunchTask": "Build Server (Release) and Extension",
+ "skipFiles": [
+ "<node_internals>/**/*.js"
+ ],
+ "env": {
+ "__RA_LSP_SERVER_DEBUG": "${workspaceFolder}/target/release/rust-analyzer"
+ }
+ },
+ {
+ // Used to attach LLDB to a running LSP server.
+ // NOTE: Might require root permissions. For this run:
+ //
+ // `echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope`
+ //
+ // Don't forget to set `debug = 2` in `Cargo.toml` before building the server
+
+ "name": "Attach To Server",
+ "type": "lldb",
+ "request": "attach",
+ "program": "${workspaceFolder}/target/debug/rust-analyzer",
+ "pid": "${command:pickMyProcess}",
+ "sourceLanguages": [
+ "rust"
+ ]
+ },
+ {
+ "name": "Run Unit Tests",
+ "type": "extensionHost",
+ "request": "launch",
+ "runtimeExecutable": "${execPath}",
+ "args": [
+ "--extensionDevelopmentPath=${workspaceFolder}/editors/code",
+ "--extensionTestsPath=${workspaceFolder}/editors/code/out/tests/unit" ],
+ "sourceMaps": true,
+ "outFiles": [ "${workspaceFolder}/editors/code/out/tests/unit/**/*.js" ],
+ "preLaunchTask": "Pretest"
+ },
+ {
+ "name": "Win Attach to Server",
+ "type": "cppvsdbg",
+ "processId":"${command:pickProcess}",
+ "request": "attach"
+ }
+ ]
+}
diff --git a/src/tools/rust-analyzer/.vscode/tasks.json b/src/tools/rust-analyzer/.vscode/tasks.json
new file mode 100644
index 000000000..a25dff19e
--- /dev/null
+++ b/src/tools/rust-analyzer/.vscode/tasks.json
@@ -0,0 +1,67 @@
+// See https://go.microsoft.com/fwlink/?LinkId=733558
+// for the documentation about the tasks.json format
+{
+ "version": "2.0.0",
+ "tasks": [
+ {
+ "label": "Build Extension in Background",
+ "group": "build",
+ "type": "npm",
+ "script": "watch",
+ "path": "editors/code/",
+ "problemMatcher": {
+ "base": "$tsc-watch",
+ "fileLocation": ["relative", "${workspaceFolder}/editors/code/"]
+ },
+ "isBackground": true,
+ },
+ {
+ "label": "Build Extension",
+ "group": "build",
+ "type": "npm",
+ "script": "build",
+ "path": "editors/code/",
+ "problemMatcher": {
+ "base": "$tsc",
+ "fileLocation": ["relative", "${workspaceFolder}/editors/code/"]
+ },
+ },
+ {
+ "label": "Build Server",
+ "group": "build",
+ "type": "shell",
+ "command": "cargo build --package rust-analyzer",
+ "problemMatcher": "$rustc"
+ },
+ {
+ "label": "Build Server (Release)",
+ "group": "build",
+ "type": "shell",
+ "command": "cargo build --release --package rust-analyzer",
+ "problemMatcher": "$rustc"
+ },
+ {
+ "label": "Pretest",
+ "group": "build",
+ "isBackground": false,
+ "type": "npm",
+ "script": "pretest",
+ "path": "editors/code/",
+ "problemMatcher": {
+ "base": "$tsc",
+ "fileLocation": ["relative", "${workspaceFolder}/editors/code/"]
+ }
+ },
+
+ {
+ "label": "Build Server and Extension",
+ "dependsOn": ["Build Server", "Build Extension"],
+ "problemMatcher": "$rustc"
+ },
+ {
+ "label": "Build Server (Release) and Extension",
+ "dependsOn": ["Build Server (Release)", "Build Extension"],
+ "problemMatcher": "$rustc"
+ }
+ ]
+}
diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock
new file mode 100644
index 000000000..703f0e5b8
--- /dev/null
+++ b/src/tools/rust-analyzer/Cargo.lock
@@ -0,0 +1,2101 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "addr2line"
+version = "0.17.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b"
+dependencies = [
+ "gimli",
+]
+
+[[package]]
+name = "adler"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+
+[[package]]
+name = "always-assert"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fbf688625d06217d5b1bb0ea9d9c44a1635fd0ee3534466388d18203174f4d11"
+dependencies = [
+ "log",
+]
+
+[[package]]
+name = "ansi_term"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "anyhow"
+version = "1.0.58"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bb07d2053ccdbe10e2af2995a2f116c1330396493dc1269f6a91d0ae82e19704"
+
+[[package]]
+name = "anymap"
+version = "1.0.0-beta.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8f1f8f5a6f3d50d89e3797d7593a50f96bb2aaa20ca0cc7be1fb673232c91d72"
+
+[[package]]
+name = "arbitrary"
+version = "1.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a7924531f38b1970ff630f03eb20a2fde69db5c590c93b0f3482e95dcc5fd60"
+
+[[package]]
+name = "arrayvec"
+version = "0.7.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6"
+
+[[package]]
+name = "atty"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
+dependencies = [
+ "hermit-abi",
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+
+[[package]]
+name = "backtrace"
+version = "0.3.65"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "11a17d453482a265fd5f8479f2a3f405566e6ca627837aaddb85af8b1ab8ef61"
+dependencies = [
+ "addr2line",
+ "cc",
+ "cfg-if",
+ "libc",
+ "miniz_oxide",
+ "object 0.28.4",
+ "rustc-demangle",
+]
+
+[[package]]
+name = "base-db"
+version = "0.0.0"
+dependencies = [
+ "cfg",
+ "profile",
+ "rustc-hash",
+ "salsa",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "tt",
+ "vfs",
+]
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "camino"
+version = "1.0.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "869119e97797867fd90f5e22af7d0bd274bd4635ebb9eb68c04f3f513ae6c412"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "cargo-platform"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cbdb825da8a5df079a43676dbe042702f1707b1109f713a01420fbb4cc71fa27"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "cargo_metadata"
+version = "0.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3abb7553d5b9b8421c6de7cb02606ff15e0c6eea7d8eadd75ef013fd636bec36"
+dependencies = [
+ "camino",
+ "cargo-platform",
+ "semver",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "cc"
+version = "1.0.73"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11"
+
+[[package]]
+name = "cfg"
+version = "0.0.0"
+dependencies = [
+ "arbitrary",
+ "derive_arbitrary",
+ "expect-test",
+ "mbe",
+ "oorandom",
+ "rustc-hash",
+ "syntax",
+ "tt",
+]
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "chalk-derive"
+version = "0.83.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "83553c2ef7717e58aecdf42dd9e3c876229f5a1f35a16435b5ddc4addef81827"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "synstructure",
+]
+
+[[package]]
+name = "chalk-ir"
+version = "0.83.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dd42107d579d8ec2a5af20a8de62a37524a67bf6a4c0ff08a950068f0bfea91"
+dependencies = [
+ "bitflags",
+ "chalk-derive",
+ "lazy_static",
+]
+
+[[package]]
+name = "chalk-recursive"
+version = "0.83.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c444031541a76c13c145e76d91f1548e9feb2240e7f0c3e77879ceb694994f2d"
+dependencies = [
+ "chalk-derive",
+ "chalk-ir",
+ "chalk-solve",
+ "rustc-hash",
+ "tracing",
+]
+
+[[package]]
+name = "chalk-solve"
+version = "0.83.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c76f2db19c5e8a3d42340cf5b4d90b8c218750536fca35e2bb285ab6653c0bc8"
+dependencies = [
+ "chalk-derive",
+ "chalk-ir",
+ "ena",
+ "indexmap",
+ "itertools",
+ "petgraph",
+ "rustc-hash",
+ "tracing",
+]
+
+[[package]]
+name = "countme"
+version = "3.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636"
+dependencies = [
+ "dashmap",
+ "once_cell",
+ "rustc-hash",
+]
+
+[[package]]
+name = "cov-mark"
+version = "2.0.0-pre.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0d48d8f76bd9331f19fe2aaf3821a9f9fb32c3963e1e3d6ce82a8c09cef7444a"
+
+[[package]]
+name = "crc32fast"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "crossbeam"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4ae5588f6b3c3cb05239e90bd110f257254aecd01e4635400391aeae07497845"
+dependencies = [
+ "cfg-if",
+ "crossbeam-channel",
+ "crossbeam-deque",
+ "crossbeam-epoch",
+ "crossbeam-queue",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-channel"
+version = "0.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c02a4d71819009c192cf4872265391563fd6a84c81ff2c0f2a7026ca4c1d85c"
+dependencies = [
+ "cfg-if",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-deque"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e"
+dependencies = [
+ "cfg-if",
+ "crossbeam-epoch",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-epoch"
+version = "0.9.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "07db9d94cbd326813772c968ccd25999e5f8ae22f4f8d1b11effa37ef6ce281d"
+dependencies = [
+ "autocfg",
+ "cfg-if",
+ "crossbeam-utils",
+ "memoffset",
+ "once_cell",
+ "scopeguard",
+]
+
+[[package]]
+name = "crossbeam-queue"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f25d8400f4a7a5778f0e4e52384a48cbd9b5c495d110786187fc750075277a2"
+dependencies = [
+ "cfg-if",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.8.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7d82ee10ce34d7bc12c2122495e7593a9c41347ecdd64185af4ecf72cb1a7f83"
+dependencies = [
+ "cfg-if",
+ "once_cell",
+]
+
+[[package]]
+name = "dashmap"
+version = "5.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3495912c9c1ccf2e18976439f4443f3fee0fd61f424ff99fde6a66b15ecb448f"
+dependencies = [
+ "cfg-if",
+ "hashbrown",
+ "lock_api",
+ "parking_lot_core 0.9.3",
+]
+
+[[package]]
+name = "derive_arbitrary"
+version = "1.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c9a577516173adb681466d517d39bd468293bc2c2a16439375ef0f35bba45f3d"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "dissimilar"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8c97b9233581d84b8e1e689cdd3a47b6f69770084fc246e86a7f78b0d9c1d4a5"
+
+[[package]]
+name = "dot"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a74b6c4d4a1cff5f454164363c16b72fa12463ca6b31f4b5f2035a65fa3d5906"
+
+[[package]]
+name = "drop_bomb"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9bda8e21c04aca2ae33ffc2fd8c23134f3cac46db123ba97bd9d3f3b8a4a85e1"
+
+[[package]]
+name = "either"
+version = "1.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f107b87b6afc2a64fd13cac55fe06d6c8859f12d4b14cbcdd2c67d0976781be"
+
+[[package]]
+name = "ena"
+version = "0.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d7402b94a93c24e742487327a7cd839dc9d36fec9de9fb25b09f2dae459f36c3"
+dependencies = [
+ "log",
+]
+
+[[package]]
+name = "expect-test"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1d4661aca38d826eb7c72fe128e4238220616de4c0cc00db7bfc38e2e1364dd3"
+dependencies = [
+ "dissimilar",
+ "once_cell",
+]
+
+[[package]]
+name = "filetime"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e94a7bbaa59354bc20dd75b67f23e2797b4490e9d6928203fb105c79e448c86c"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "redox_syscall",
+ "windows-sys 0.36.1",
+]
+
+[[package]]
+name = "fixedbitset"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d"
+
+[[package]]
+name = "flate2"
+version = "1.0.24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6"
+dependencies = [
+ "crc32fast",
+ "miniz_oxide",
+]
+
+[[package]]
+name = "flycheck"
+version = "0.0.0"
+dependencies = [
+ "cargo_metadata",
+ "crossbeam-channel",
+ "jod-thread",
+ "paths",
+ "serde",
+ "serde_json",
+ "stdx",
+ "toolchain",
+ "tracing",
+]
+
+[[package]]
+name = "form_urlencoded"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191"
+dependencies = [
+ "matches",
+ "percent-encoding",
+]
+
+[[package]]
+name = "fs_extra"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2022715d62ab30faffd124d40b76f4134a550a87792276512b18d63272333394"
+
+[[package]]
+name = "fsevent-sys"
+version = "4.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "fst"
+version = "0.4.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7ab85b9b05e3978cc9a9cf8fea7f01b494e1a09ed3037e16ba39edc7a29eb61a"
+
+[[package]]
+name = "gimli"
+version = "0.26.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4"
+
+[[package]]
+name = "hashbrown"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db0d4cf898abf0081f964436dc980e96670a0f36863e4b83aaacdb65c9d7ccc3"
+
+[[package]]
+name = "heck"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c"
+dependencies = [
+ "unicode-segmentation",
+]
+
+[[package]]
+name = "hermit-abi"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "hir"
+version = "0.0.0"
+dependencies = [
+ "arrayvec",
+ "base-db",
+ "cfg",
+ "either",
+ "hir-def",
+ "hir-expand",
+ "hir-ty",
+ "itertools",
+ "once_cell",
+ "profile",
+ "rustc-hash",
+ "smallvec",
+ "stdx",
+ "syntax",
+ "tt",
+]
+
+[[package]]
+name = "hir-def"
+version = "0.0.0"
+dependencies = [
+ "anymap",
+ "arrayvec",
+ "base-db",
+ "bitflags",
+ "cfg",
+ "cov-mark",
+ "dashmap",
+ "drop_bomb",
+ "either",
+ "expect-test",
+ "fst",
+ "hashbrown",
+ "hir-expand",
+ "indexmap",
+ "itertools",
+ "la-arena",
+ "limit",
+ "mbe",
+ "once_cell",
+ "profile",
+ "rustc-hash",
+ "smallvec",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "tracing",
+ "tt",
+]
+
+[[package]]
+name = "hir-expand"
+version = "0.0.0"
+dependencies = [
+ "base-db",
+ "cfg",
+ "cov-mark",
+ "either",
+ "expect-test",
+ "hashbrown",
+ "itertools",
+ "la-arena",
+ "limit",
+ "mbe",
+ "profile",
+ "rustc-hash",
+ "smallvec",
+ "stdx",
+ "syntax",
+ "tracing",
+ "tt",
+]
+
+[[package]]
+name = "hir-ty"
+version = "0.0.0"
+dependencies = [
+ "arrayvec",
+ "base-db",
+ "chalk-ir",
+ "chalk-recursive",
+ "chalk-solve",
+ "cov-mark",
+ "ena",
+ "expect-test",
+ "hir-def",
+ "hir-expand",
+ "itertools",
+ "la-arena",
+ "limit",
+ "once_cell",
+ "profile",
+ "rustc-hash",
+ "scoped-tls",
+ "smallvec",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "tracing",
+ "tracing-subscriber",
+ "tracing-tree",
+ "typed-arena",
+]
+
+[[package]]
+name = "home"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2456aef2e6b6a9784192ae780c0f15bc57df0e918585282325e8c8ac27737654"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "ide"
+version = "0.0.0"
+dependencies = [
+ "cfg",
+ "cov-mark",
+ "crossbeam-channel",
+ "dot",
+ "either",
+ "expect-test",
+ "hir",
+ "ide-assists",
+ "ide-completion",
+ "ide-db",
+ "ide-diagnostics",
+ "ide-ssr",
+ "itertools",
+ "oorandom",
+ "profile",
+ "pulldown-cmark",
+ "pulldown-cmark-to-cmark",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "text-edit",
+ "toolchain",
+ "tracing",
+ "url",
+]
+
+[[package]]
+name = "ide-assists"
+version = "0.0.0"
+dependencies = [
+ "cov-mark",
+ "either",
+ "expect-test",
+ "hir",
+ "ide-db",
+ "itertools",
+ "profile",
+ "sourcegen",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "text-edit",
+]
+
+[[package]]
+name = "ide-completion"
+version = "0.0.0"
+dependencies = [
+ "base-db",
+ "cov-mark",
+ "expect-test",
+ "hir",
+ "ide-db",
+ "itertools",
+ "once_cell",
+ "profile",
+ "smallvec",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "text-edit",
+]
+
+[[package]]
+name = "ide-db"
+version = "0.0.0"
+dependencies = [
+ "arrayvec",
+ "base-db",
+ "cov-mark",
+ "either",
+ "expect-test",
+ "fst",
+ "hir",
+ "indexmap",
+ "itertools",
+ "limit",
+ "once_cell",
+ "parser",
+ "profile",
+ "rayon",
+ "rustc-hash",
+ "sourcegen",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "text-edit",
+ "tracing",
+ "xshell",
+]
+
+[[package]]
+name = "ide-diagnostics"
+version = "0.0.0"
+dependencies = [
+ "cfg",
+ "cov-mark",
+ "either",
+ "expect-test",
+ "hir",
+ "ide-db",
+ "itertools",
+ "profile",
+ "sourcegen",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "text-edit",
+]
+
+[[package]]
+name = "ide-ssr"
+version = "0.0.0"
+dependencies = [
+ "cov-mark",
+ "expect-test",
+ "hir",
+ "ide-db",
+ "itertools",
+ "parser",
+ "syntax",
+ "test-utils",
+ "text-edit",
+]
+
+[[package]]
+name = "idna"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8"
+dependencies = [
+ "matches",
+ "unicode-bidi",
+ "unicode-normalization",
+]
+
+[[package]]
+name = "indexmap"
+version = "1.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e"
+dependencies = [
+ "autocfg",
+ "hashbrown",
+]
+
+[[package]]
+name = "inotify"
+version = "0.9.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff"
+dependencies = [
+ "bitflags",
+ "inotify-sys",
+ "libc",
+]
+
+[[package]]
+name = "inotify-sys"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "instant"
+version = "0.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "itertools"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3"
+dependencies = [
+ "either",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d"
+
+[[package]]
+name = "jod-thread"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b23360e99b8717f20aaa4598f5a6541efbe30630039fbc7706cf954a87947ae"
+
+[[package]]
+name = "kqueue"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4d6112e8f37b59803ac47a42d14f1f3a59bbf72fc6857ffc5be455e28a691f8e"
+dependencies = [
+ "kqueue-sys",
+ "libc",
+]
+
+[[package]]
+name = "kqueue-sys"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8367585489f01bc55dd27404dcf56b95e6da061a256a666ab23be9ba96a2e587"
+dependencies = [
+ "bitflags",
+ "libc",
+]
+
+[[package]]
+name = "la-arena"
+version = "0.3.0"
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "libc"
+version = "0.2.126"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836"
+
+[[package]]
+name = "libloading"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "efbc0f03f9a775e9f6aed295c6a1ba2253c5757a9e03d55c6caa46a681abcddd"
+dependencies = [
+ "cfg-if",
+ "winapi",
+]
+
+[[package]]
+name = "libmimalloc-sys"
+version = "0.1.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "11ca136052550448f55df7898c6dbe651c6b574fe38a0d9ea687a9f8088a2e2c"
+dependencies = [
+ "cc",
+]
+
+[[package]]
+name = "limit"
+version = "0.0.0"
+
+[[package]]
+name = "lock_api"
+version = "0.4.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "327fa5b6a6940e4699ec49a9beae1ea4845c6bab9314e4f84ac68742139d8c53"
+dependencies = [
+ "autocfg",
+ "scopeguard",
+]
+
+[[package]]
+name = "log"
+version = "0.4.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "lsp-server"
+version = "0.6.0"
+dependencies = [
+ "crossbeam-channel",
+ "log",
+ "lsp-types",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "lsp-types"
+version = "0.93.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70c74e2173b2b31f8655d33724b4b45ac13f439386f66290f539c22b144c2212"
+dependencies = [
+ "bitflags",
+ "serde",
+ "serde_json",
+ "serde_repr",
+ "url",
+]
+
+[[package]]
+name = "matchers"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
+dependencies = [
+ "regex-automata",
+]
+
+[[package]]
+name = "matches"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f"
+
+[[package]]
+name = "mbe"
+version = "0.0.0"
+dependencies = [
+ "cov-mark",
+ "parser",
+ "rustc-hash",
+ "smallvec",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "tracing",
+ "tt",
+]
+
+[[package]]
+name = "memchr"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
+
+[[package]]
+name = "memmap2"
+version = "0.5.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d5172b50c23043ff43dd53e51392f36519d9b35a8f3a410d30ece5d1aedd58ae"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "memoffset"
+version = "0.6.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "mimalloc"
+version = "0.1.29"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2f64ad83c969af2e732e907564deb0d0ed393cec4af80776f77dd77a1a427698"
+dependencies = [
+ "libmimalloc-sys",
+]
+
+[[package]]
+name = "miniz_oxide"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc"
+dependencies = [
+ "adler",
+]
+
+[[package]]
+name = "mio"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "57ee1c23c7c63b0c9250c339ffdc69255f110b298b901b9f6c82547b7b87caaf"
+dependencies = [
+ "libc",
+ "log",
+ "wasi",
+ "windows-sys 0.36.1",
+]
+
+[[package]]
+name = "miow"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a7377f7792b3afb6a3cba68daa54ca23c032137010460d667fda53a8d66be00e"
+dependencies = [
+ "windows-sys 0.28.0",
+]
+
+[[package]]
+name = "notify"
+version = "5.0.0-pre.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "553f9844ad0b0824605c20fb55a661679782680410abfb1a8144c2e7e437e7a7"
+dependencies = [
+ "bitflags",
+ "crossbeam-channel",
+ "filetime",
+ "fsevent-sys",
+ "inotify",
+ "kqueue",
+ "libc",
+ "mio",
+ "walkdir",
+ "winapi",
+]
+
+[[package]]
+name = "num_cpus"
+version = "1.13.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1"
+dependencies = [
+ "hermit-abi",
+ "libc",
+]
+
+[[package]]
+name = "object"
+version = "0.28.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e42c982f2d955fac81dd7e1d0e1426a7d702acd9c98d19ab01083a6a0328c424"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "object"
+version = "0.29.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1"
+
+[[package]]
+name = "oorandom"
+version = "11.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575"
+
+[[package]]
+name = "parking_lot"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
+dependencies = [
+ "instant",
+ "lock_api",
+ "parking_lot_core 0.8.5",
+]
+
+[[package]]
+name = "parking_lot"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
+dependencies = [
+ "lock_api",
+ "parking_lot_core 0.9.3",
+]
+
+[[package]]
+name = "parking_lot_core"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216"
+dependencies = [
+ "cfg-if",
+ "instant",
+ "libc",
+ "redox_syscall",
+ "smallvec",
+ "winapi",
+]
+
+[[package]]
+name = "parking_lot_core"
+version = "0.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "redox_syscall",
+ "smallvec",
+ "windows-sys 0.36.1",
+]
+
+[[package]]
+name = "parser"
+version = "0.0.0"
+dependencies = [
+ "drop_bomb",
+ "expect-test",
+ "limit",
+ "rustc-ap-rustc_lexer",
+ "sourcegen",
+]
+
+[[package]]
+name = "paste"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0c520e05135d6e763148b6426a837e239041653ba7becd2e538c076c738025fc"
+
+[[package]]
+name = "paths"
+version = "0.0.0"
+
+[[package]]
+name = "percent-encoding"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
+
+[[package]]
+name = "perf-event"
+version = "0.4.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5396562cd2eaa828445d6d34258ae21ee1eb9d40fe626ca7f51c8dccb4af9d66"
+dependencies = [
+ "libc",
+ "perf-event-open-sys",
+]
+
+[[package]]
+name = "perf-event-open-sys"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ce9bedf5da2c234fdf2391ede2b90fabf585355f33100689bc364a3ea558561a"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "petgraph"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "467d164a6de56270bd7c4d070df81d07beace25012d5103ced4e9ff08d6afdb7"
+dependencies = [
+ "fixedbitset",
+ "indexmap",
+]
+
+[[package]]
+name = "pin-project-lite"
+version = "0.2.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
+
+[[package]]
+name = "proc-macro-api"
+version = "0.0.0"
+dependencies = [
+ "memmap2",
+ "object 0.29.0",
+ "paths",
+ "profile",
+ "serde",
+ "serde_json",
+ "snap",
+ "stdx",
+ "tracing",
+ "tt",
+]
+
+[[package]]
+name = "proc-macro-srv"
+version = "0.0.0"
+dependencies = [
+ "crossbeam",
+ "expect-test",
+ "libloading",
+ "mbe",
+ "memmap2",
+ "object 0.29.0",
+ "paths",
+ "proc-macro-api",
+ "proc-macro-test",
+ "tt",
+]
+
+[[package]]
+name = "proc-macro-srv-cli"
+version = "0.0.0"
+dependencies = [
+ "proc-macro-srv",
+]
+
+[[package]]
+name = "proc-macro-test"
+version = "0.0.0"
+dependencies = [
+ "cargo_metadata",
+ "proc-macro-test-impl",
+ "toolchain",
+]
+
+[[package]]
+name = "proc-macro-test-impl"
+version = "0.0.0"
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.40"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "profile"
+version = "0.0.0"
+dependencies = [
+ "cfg-if",
+ "countme",
+ "la-arena",
+ "libc",
+ "once_cell",
+ "perf-event",
+ "tikv-jemalloc-ctl",
+ "winapi",
+]
+
+[[package]]
+name = "project-model"
+version = "0.0.0"
+dependencies = [
+ "anyhow",
+ "base-db",
+ "cargo_metadata",
+ "cfg",
+ "expect-test",
+ "la-arena",
+ "paths",
+ "profile",
+ "rustc-hash",
+ "semver",
+ "serde",
+ "serde_json",
+ "stdx",
+ "toolchain",
+ "tracing",
+]
+
+[[package]]
+name = "pulldown-cmark"
+version = "0.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34f197a544b0c9ab3ae46c359a7ec9cbbb5c7bf97054266fecb7ead794a181d6"
+dependencies = [
+ "bitflags",
+ "memchr",
+ "unicase",
+]
+
+[[package]]
+name = "pulldown-cmark-to-cmark"
+version = "10.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c1353ac408192fa925228d3e60ff746167d03f4f7e54835d78ef79e08225d913"
+dependencies = [
+ "pulldown-cmark",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "rayon"
+version = "1.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d"
+dependencies = [
+ "autocfg",
+ "crossbeam-deque",
+ "either",
+ "rayon-core",
+]
+
+[[package]]
+name = "rayon-core"
+version = "1.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f"
+dependencies = [
+ "crossbeam-channel",
+ "crossbeam-deque",
+ "crossbeam-utils",
+ "num_cpus",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.2.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62f25bc4c7e55e0b0b7a1d43fb893f4fa1361d0abe38b9ce4f323c2adfe6ef42"
+dependencies = [
+ "bitflags",
+]
+
+[[package]]
+name = "regex"
+version = "1.5.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d83f127d94bdbcda4c8cc2e50f6f84f4b611f69c902699ca385a39c3a75f9ff1"
+dependencies = [
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
+dependencies = [
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64"
+
+[[package]]
+name = "rowan"
+version = "0.15.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e88acf7b001007e9e8c989fe7449f6601d909e5dd2c56399fc158977ad6c56e8"
+dependencies = [
+ "countme",
+ "hashbrown",
+ "memoffset",
+ "rustc-hash",
+ "text-size",
+]
+
+[[package]]
+name = "rust-analyzer"
+version = "0.0.0"
+dependencies = [
+ "always-assert",
+ "anyhow",
+ "cfg",
+ "crossbeam-channel",
+ "dissimilar",
+ "expect-test",
+ "flycheck",
+ "hir",
+ "hir-def",
+ "hir-ty",
+ "ide",
+ "ide-db",
+ "ide-ssr",
+ "itertools",
+ "jod-thread",
+ "lsp-server",
+ "lsp-types",
+ "mbe",
+ "mimalloc",
+ "num_cpus",
+ "oorandom",
+ "parking_lot 0.12.1",
+ "proc-macro-api",
+ "proc-macro-srv",
+ "profile",
+ "project-model",
+ "rayon",
+ "rustc-hash",
+ "serde",
+ "serde_json",
+ "sourcegen",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "threadpool",
+ "tikv-jemallocator",
+ "toolchain",
+ "tracing",
+ "tracing-log",
+ "tracing-subscriber",
+ "tracing-tree",
+ "tt",
+ "vfs",
+ "vfs-notify",
+ "winapi",
+ "xflags",
+ "xshell",
+]
+
+[[package]]
+name = "rustc-ap-rustc_lexer"
+version = "725.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f950742ef8a203aa7661aad3ab880438ddeb7f95d4b837c30d65db1a2c5df68e"
+dependencies = [
+ "unicode-xid",
+]
+
+[[package]]
+name = "rustc-demangle"
+version = "0.1.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342"
+
+[[package]]
+name = "rustc-hash"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
+
+[[package]]
+name = "ryu"
+version = "1.0.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695"
+
+[[package]]
+name = "salsa"
+version = "0.17.0-pre.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b223dccb46c32753144d0b51290da7230bb4aedcd8379d6b4c9a474c18bf17a"
+dependencies = [
+ "crossbeam-utils",
+ "indexmap",
+ "lock_api",
+ "log",
+ "oorandom",
+ "parking_lot 0.11.2",
+ "rustc-hash",
+ "salsa-macros",
+ "smallvec",
+]
+
+[[package]]
+name = "salsa-macros"
+version = "0.17.0-pre.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac6c2e352df550bf019da7b16164ed2f7fa107c39653d1311d1bba42d1582ff7"
+dependencies = [
+ "heck",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "scoped-tls"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ea6a9290e3c9cf0f18145ef7ffa62d68ee0bf5fcd651017e586dc7fd5da448c2"
+
+[[package]]
+name = "scopeguard"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
+
+[[package]]
+name = "semver"
+version = "1.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a2333e6df6d6598f2b1974829f853c2b4c5f4a6e503c10af918081aa6f8564e1"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "serde"
+version = "1.0.138"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1578c6245786b9d168c5447eeacfb96856573ca56c9d68fdcf394be134882a47"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.138"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "023e9b1467aef8a10fb88f25611870ada9800ef7e22afce356bb0d2387b6f27c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.82"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "82c2c1fdcd807d1098552c5b9a36e425e42e9fbd7c6a37a8425f390f781f7fa7"
+dependencies = [
+ "indexmap",
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "serde_repr"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a2ad84e47328a31223de7fed7a4f5087f2d6ddfe586cf3ca25b7a165bc0a5aed"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "sharded-slab"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31"
+dependencies = [
+ "lazy_static",
+]
+
+[[package]]
+name = "smallvec"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1"
+
+[[package]]
+name = "smol_str"
+version = "0.1.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7475118a28b7e3a2e157ce0131ba8c5526ea96e90ee601d9f6bb2e286a35ab44"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "snap"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "45456094d1983e2ee2a18fdfebce3189fa451699d0502cb8e3b49dba5ba41451"
+
+[[package]]
+name = "sourcegen"
+version = "0.0.0"
+dependencies = [
+ "xshell",
+]
+
+[[package]]
+name = "stdx"
+version = "0.0.0"
+dependencies = [
+ "always-assert",
+ "backtrace",
+ "libc",
+ "miow",
+ "winapi",
+]
+
+[[package]]
+name = "syn"
+version = "1.0.98"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "synstructure"
+version = "0.12.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "unicode-xid",
+]
+
+[[package]]
+name = "syntax"
+version = "0.0.0"
+dependencies = [
+ "cov-mark",
+ "expect-test",
+ "indexmap",
+ "itertools",
+ "once_cell",
+ "parser",
+ "proc-macro2",
+ "profile",
+ "quote",
+ "rayon",
+ "rowan",
+ "rustc-ap-rustc_lexer",
+ "rustc-hash",
+ "smol_str",
+ "sourcegen",
+ "stdx",
+ "test-utils",
+ "text-edit",
+ "ungrammar",
+]
+
+[[package]]
+name = "test-utils"
+version = "0.0.0"
+dependencies = [
+ "dissimilar",
+ "profile",
+ "rustc-hash",
+ "stdx",
+ "text-size",
+]
+
+[[package]]
+name = "text-edit"
+version = "0.0.0"
+dependencies = [
+ "itertools",
+ "text-size",
+]
+
+[[package]]
+name = "text-size"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a"
+
+[[package]]
+name = "thread_local"
+version = "1.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180"
+dependencies = [
+ "once_cell",
+]
+
+[[package]]
+name = "threadpool"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa"
+dependencies = [
+ "num_cpus",
+]
+
+[[package]]
+name = "tikv-jemalloc-ctl"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e37706572f4b151dff7a0146e040804e9c26fe3a3118591112f05cf12a4216c1"
+dependencies = [
+ "libc",
+ "paste",
+ "tikv-jemalloc-sys",
+]
+
+[[package]]
+name = "tikv-jemalloc-sys"
+version = "0.5.1+5.3.0-patched"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "931e876f91fed0827f863a2d153897790da0b24d882c721a79cb3beb0b903261"
+dependencies = [
+ "cc",
+ "fs_extra",
+ "libc",
+]
+
+[[package]]
+name = "tikv-jemallocator"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "20612db8a13a6c06d57ec83953694185a367e16945f66565e8028d2c0bd76979"
+dependencies = [
+ "libc",
+ "tikv-jemalloc-sys",
+]
+
+[[package]]
+name = "tinyvec"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
+dependencies = [
+ "tinyvec_macros",
+]
+
+[[package]]
+name = "tinyvec_macros"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
+
+[[package]]
+name = "toolchain"
+version = "0.0.0"
+dependencies = [
+ "home",
+]
+
+[[package]]
+name = "tracing"
+version = "0.1.35"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a400e31aa60b9d44a52a8ee0343b5b18566b03a8321e0d321f695cf56e940160"
+dependencies = [
+ "cfg-if",
+ "pin-project-lite",
+ "tracing-attributes",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-attributes"
+version = "0.1.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "11c75893af559bc8e10716548bdef5cb2b983f8e637db9d0e15126b61b484ee2"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "tracing-core"
+version = "0.1.28"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b7358be39f2f274f322d2aaed611acc57f382e8eb1e5b48cb9ae30933495ce7"
+dependencies = [
+ "once_cell",
+ "valuable",
+]
+
+[[package]]
+name = "tracing-log"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922"
+dependencies = [
+ "lazy_static",
+ "log",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-subscriber"
+version = "0.3.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3a713421342a5a666b7577783721d3117f1b69a393df803ee17bb73b1e122a59"
+dependencies = [
+ "matchers",
+ "once_cell",
+ "regex",
+ "sharded-slab",
+ "thread_local",
+ "tracing",
+ "tracing-core",
+ "tracing-log",
+]
+
+[[package]]
+name = "tracing-tree"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d07e90b329c621ade432823988574e820212648aa40e7a2497777d58de0fb453"
+dependencies = [
+ "ansi_term",
+ "atty",
+ "tracing-core",
+ "tracing-log",
+ "tracing-subscriber",
+]
+
+[[package]]
+name = "tt"
+version = "0.0.0"
+dependencies = [
+ "smol_str",
+ "stdx",
+]
+
+[[package]]
+name = "typed-arena"
+version = "2.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0685c84d5d54d1c26f7d3eb96cd41550adb97baed141a761cf335d3d33bcd0ae"
+
+[[package]]
+name = "ungrammar"
+version = "1.16.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a3e5df347f0bf3ec1d670aad6ca5c6a1859cd9ea61d2113125794654ccced68f"
+
+[[package]]
+name = "unicase"
+version = "2.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
+dependencies = [
+ "version_check",
+]
+
+[[package]]
+name = "unicode-bidi"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992"
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5bd2fe26506023ed7b5e1e315add59d6f584c621d037f9368fea9cfb988f368c"
+
+[[package]]
+name = "unicode-normalization"
+version = "0.1.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "854cbdc4f7bc6ae19c820d44abdc3277ac3e1b2b93db20a636825d9322fb60e6"
+dependencies = [
+ "tinyvec",
+]
+
+[[package]]
+name = "unicode-segmentation"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99"
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04"
+
+[[package]]
+name = "url"
+version = "2.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c"
+dependencies = [
+ "form_urlencoded",
+ "idna",
+ "matches",
+ "percent-encoding",
+ "serde",
+]
+
+[[package]]
+name = "valuable"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
+
+[[package]]
+name = "version_check"
+version = "0.9.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+
+[[package]]
+name = "vfs"
+version = "0.0.0"
+dependencies = [
+ "fst",
+ "indexmap",
+ "paths",
+ "rustc-hash",
+]
+
+[[package]]
+name = "vfs-notify"
+version = "0.0.0"
+dependencies = [
+ "crossbeam-channel",
+ "jod-thread",
+ "notify",
+ "paths",
+ "tracing",
+ "vfs",
+ "walkdir",
+]
+
+[[package]]
+name = "walkdir"
+version = "2.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56"
+dependencies = [
+ "same-file",
+ "winapi",
+ "winapi-util",
+]
+
+[[package]]
+name = "wasi"
+version = "0.11.0+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-util"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
+[[package]]
+name = "windows-sys"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "82ca39602d5cbfa692c4b67e3bcbb2751477355141c1ed434c94da4186836ff6"
+dependencies = [
+ "windows_aarch64_msvc 0.28.0",
+ "windows_i686_gnu 0.28.0",
+ "windows_i686_msvc 0.28.0",
+ "windows_x86_64_gnu 0.28.0",
+ "windows_x86_64_msvc 0.28.0",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2"
+dependencies = [
+ "windows_aarch64_msvc 0.36.1",
+ "windows_i686_gnu 0.36.1",
+ "windows_i686_msvc 0.36.1",
+ "windows_x86_64_gnu 0.36.1",
+ "windows_x86_64_msvc 0.36.1",
+]
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "52695a41e536859d5308cc613b4a022261a274390b25bd29dfff4bf08505f3c2"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f54725ac23affef038fecb177de6c9bf065787c2f432f79e3c373da92f3e1d8a"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "51d5158a43cc43623c0729d1ad6647e62fa384a3d135fd15108d37c683461f64"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bc31f409f565611535130cfe7ee8e6655d3fa99c1c61013981e491921b5ce954"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f2b8c7cbd3bfdddd9ab98769f9746a7fad1bca236554cd032b78d768bc0e89f"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680"
+
+[[package]]
+name = "write-json"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06069a848f95fceae3e5e03c0ddc8cb78452b56654ee0c8e68f938cf790fb9e3"
+
+[[package]]
+name = "xflags"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f14fe1ed41a5a2b5ef3f565586c4a8a559ee55d3953faab360a771135bdee00"
+dependencies = [
+ "xflags-macros",
+]
+
+[[package]]
+name = "xflags-macros"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "45d11d5fc2a97287eded8b170ca80533b3c42646dd7fa386a5eb045817921022"
+
+[[package]]
+name = "xshell"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d47097dc5c85234b1e41851b3422dd6d19b3befdd35b4ae5ce386724aeca981"
+dependencies = [
+ "xshell-macros",
+]
+
+[[package]]
+name = "xshell-macros"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "88301b56c26dd9bf5c43d858538f82d6f3f7764767defbc5d34e59459901c41a"
+
+[[package]]
+name = "xtask"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "flate2",
+ "write-json",
+ "xflags",
+ "xshell",
+]
diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml
new file mode 100644
index 000000000..6b68ca823
--- /dev/null
+++ b/src/tools/rust-analyzer/Cargo.toml
@@ -0,0 +1,33 @@
+[workspace]
+members = ["xtask/", "lib/*", "crates/*"]
+exclude = ["crates/proc-macro-test/imp"]
+
+[profile.dev]
+# Disabling debug info speeds up builds a bunch,
+# and we don't rely on it for debugging that much.
+debug = 0
+
+[profile.dev.package]
+# These speed up local tests.
+rowan.opt-level = 3
+rustc-hash.opt-level = 3
+smol_str.opt-level = 3
+text-size.opt-level = 3
+# This speeds up `cargo xtask dist`.
+miniz_oxide.opt-level = 3
+
+[profile.release]
+incremental = true
+# Set this to 1 or 2 to get more useful backtraces in debugger.
+debug = 0
+
+[patch.'crates-io']
+# rowan = { path = "../rowan" }
+
+# chalk-solve = { path = "../chalk/chalk-solve" }
+# chalk-ir = { path = "../chalk/chalk-ir" }
+# chalk-recursive = { path = "../chalk/chalk-recursive" }
+
+# ungrammar = { path = "../ungrammar" }
+
+# salsa = { path = "../salsa" }
diff --git a/src/tools/rust-analyzer/LICENSE-APACHE b/src/tools/rust-analyzer/LICENSE-APACHE
new file mode 100644
index 000000000..16fe87b06
--- /dev/null
+++ b/src/tools/rust-analyzer/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/src/tools/rust-analyzer/LICENSE-MIT b/src/tools/rust-analyzer/LICENSE-MIT
new file mode 100644
index 000000000..31aa79387
--- /dev/null
+++ b/src/tools/rust-analyzer/LICENSE-MIT
@@ -0,0 +1,23 @@
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/src/tools/rust-analyzer/PRIVACY.md b/src/tools/rust-analyzer/PRIVACY.md
new file mode 100644
index 000000000..89e252be7
--- /dev/null
+++ b/src/tools/rust-analyzer/PRIVACY.md
@@ -0,0 +1 @@
+See the [Privacy](https://rust-analyzer.github.io/manual.html#privacy) section of the user manual.
diff --git a/src/tools/rust-analyzer/README.md b/src/tools/rust-analyzer/README.md
new file mode 100644
index 000000000..8bb0517ed
--- /dev/null
+++ b/src/tools/rust-analyzer/README.md
@@ -0,0 +1,49 @@
+<p align="center">
+ <img
+ src="https://raw.githubusercontent.com/rust-analyzer/rust-analyzer/master/assets/logo-wide.svg"
+ alt="rust-analyzer logo">
+</p>
+
+rust-analyzer is a modular compiler frontend for the Rust language.
+It is a part of a larger rls-2.0 effort to create excellent IDE support for Rust.
+
+## Quick Start
+
+https://rust-analyzer.github.io/manual.html#installation
+
+## Documentation
+
+If you want to **contribute** to rust-analyzer or are just curious about how
+things work under the hood, check the [./docs/dev](./docs/dev) folder.
+
+If you want to **use** rust-analyzer's language server with your editor of
+choice, check [the manual](https://rust-analyzer.github.io/manual.html) folder.
+It also contains some tips & tricks to help you be more productive when using rust-analyzer.
+
+## Security and Privacy
+
+See the corresponding sections of [the manual](https://rust-analyzer.github.io/manual.html#security).
+
+## Communication
+
+For usage and troubleshooting requests, please use "IDEs and Editors" category of the Rust forum:
+
+https://users.rust-lang.org/c/ide/14
+
+For questions about development and implementation, join rust-analyzer working group on Zulip:
+
+https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer
+
+## Quick Links
+
+* Website: https://rust-analyzer.github.io/
+* Metrics: https://rust-analyzer.github.io/metrics/
+* API docs: https://rust-lang.github.io/rust-analyzer/ide/
+* Changelog: https://rust-analyzer.github.io/thisweek
+
+## License
+
+Rust analyzer is primarily distributed under the terms of both the MIT
+license and the Apache License (Version 2.0).
+
+See LICENSE-APACHE and LICENSE-MIT for details.
diff --git a/src/tools/rust-analyzer/assets/logo-square.svg b/src/tools/rust-analyzer/assets/logo-square.svg
new file mode 100644
index 000000000..fe1c1fa02
--- /dev/null
+++ b/src/tools/rust-analyzer/assets/logo-square.svg
@@ -0,0 +1,88 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ width="24mm"
+ height="24mm"
+ viewBox="0 0 23.999999 24.000001"
+ version="1.1"
+ id="svg8"
+ inkscape:version="0.92.4 5da689c313, 2019-01-14"
+ sodipodi:docname="ra.svg">
+ <defs
+ id="defs2" />
+ <sodipodi:namedview
+ id="base"
+ pagecolor="#ffffff"
+ bordercolor="#666666"
+ borderopacity="1.0"
+ inkscape:pageopacity="0.0"
+ inkscape:pageshadow="2"
+ inkscape:zoom="3.959798"
+ inkscape:cx="-31.307418"
+ inkscape:cy="43.570897"
+ inkscape:document-units="mm"
+ inkscape:current-layer="layer1"
+ showgrid="false"
+ inkscape:window-width="1920"
+ inkscape:window-height="1006"
+ inkscape:window-x="0"
+ inkscape:window-y="0"
+ inkscape:window-maximized="1" />
+ <metadata
+ id="metadata5">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ <dc:title />
+ </cc:Work>
+ </rdf:RDF>
+ </metadata>
+ <g
+ inkscape:label="Ebene 1"
+ inkscape:groupmode="layer"
+ id="layer1"
+ transform="translate(-48.088531,-60.285631)">
+ <g
+ aria-label="r."
+ style="font-style:normal;font-weight:normal;font-size:10.58333302px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:#ffffff;stroke-width:0.5px;paint-order:stroke"
+ id="text3715">
+ <path
+ d="m 55.00077,63.442544 c -0.588718,-0.01704 -1.180779,0.251078 -1.524352,0.735633 -0.163942,0.198364 -0.296316,0.49938 -0.394953,0.683311 -0.101099,-0.416482 -0.202199,-0.832964 -0.303298,-1.249445 -0.671966,0 -1.343932,0 -2.015897,0 0,0.370348 0,0.740695 0,1.111043 0.246841,0 0.493682,0 0.740523,0 0,1.128958 0,2.257916 0,3.386874 -0.246841,0 -0.493682,0 -0.740523,0 0,0.373792 0,0.747585 0,1.121378 1.174777,0 2.349555,0 3.524332,0 0,-0.373793 0,-0.747586 0,-1.121378 -0.37052,0 -0.74104,0 -1.11156,0 0,-0.53623 0,-1.072458 0,-1.608688 0.190282,-0.586609 0.512347,-1.195617 1.085749,-1.482555 0.177384,-0.100666 0.369603,-0.139942 0.305897,0.108125 0,0.278138 0,0.556274 0,0.834412 0.349333,0 0.698666,0 1.047998,0 0.104042,-0.783071 0.208084,-1.566141 0.312126,-2.349211 -0.293304,-0.117433 -0.610556,-0.17161 -0.926042,-0.169499 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold'"
+ id="path817" />
+ <circle
+ cx="59.49345"
+ cy="68.231422"
+ r="1.1800417"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold'"
+ id="path819" />
+ </g>
+ <rect
+ style="fill:#30363b;fill-opacity:1;stroke:#20262b;stroke-width:0.39559129;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ id="rect3721"
+ width="10.604409"
+ height="12.604408"
+ x="61.286327"
+ y="71.483421" />
+ <g
+ aria-label="a"
+ style="font-style:normal;font-weight:normal;font-size:10.58333302px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.26458332"
+ id="text3719">
+ <path
+ d="m 69.065615,79.143583 q 0,0.3175 0.08467,0.460375 0.08996,0.142875 0.28575,0.216958 l -0.343958,1.100667 q -0.497417,-0.04762 -0.841375,-0.216959 -0.338667,-0.174625 -0.534459,-0.523875 -0.322791,0.386292 -0.825499,0.576792 -0.502709,0.185208 -1.026584,0.185208 -0.867833,0 -1.386416,-0.492125 -0.513292,-0.497416 -0.513292,-1.275291 0,-0.915459 0.714375,-1.412875 0.719667,-0.497417 2.021417,-0.497417 h 0.756708 v -0.211667 q 0,-0.439208 -0.28575,-0.650875 -0.280458,-0.211666 -0.8255,-0.211666 -0.269875,0 -0.693208,0.07937 -0.423334,0.07408 -0.846667,0.216958 l -0.386292,-1.11125 q 0.545042,-0.206375 1.132417,-0.312208 0.592667,-0.105834 1.058333,-0.105834 1.254125,0 1.852083,0.513292 0.60325,0.508 0.60325,1.471083 z m -2.624666,0.60325 q 0.269875,0 0.566208,-0.15875 0.296334,-0.164042 0.449792,-0.460375 v -0.910167 h -0.41275 q -0.6985,0 -1.026583,0.216958 -0.328084,0.211667 -0.328084,0.624417 0,0.322792 0.195792,0.508 0.201083,0.179917 0.555625,0.179917 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';fill:#ffffff;stroke-width:0.26458332"
+ id="path822" />
+ </g>
+ </g>
+</svg>
diff --git a/src/tools/rust-analyzer/assets/logo-wide.svg b/src/tools/rust-analyzer/assets/logo-wide.svg
new file mode 100644
index 000000000..c5fb55b36
--- /dev/null
+++ b/src/tools/rust-analyzer/assets/logo-wide.svg
@@ -0,0 +1,142 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ width="85.797134mm"
+ height="24.747536mm"
+ viewBox="0 0 85.797134 24.747536"
+ version="1.1"
+ id="svg8"
+ inkscape:version="0.92.4 5da689c313, 2019-01-14"
+ sodipodi:docname="rust analyzer.svg">
+ <defs
+ id="defs2" />
+ <style>
+ #text3715 {
+ fill: #000000;
+ stroke: #ffffff;
+ stroke-width: 0.5;
+ paint-order: stroke;
+ }
+ @media (prefers-color-scheme: dark) {
+ #text3715 {
+ fill: #ffffff;
+ stroke: none;
+ }
+ }
+ </style>
+ <sodipodi:namedview
+ id="base"
+ pagecolor="#ffffff"
+ bordercolor="#666666"
+ borderopacity="1.0"
+ inkscape:pageopacity="0.0"
+ inkscape:pageshadow="2"
+ inkscape:zoom="1.979899"
+ inkscape:cx="64.938033"
+ inkscape:cy="-10.231391"
+ inkscape:document-units="mm"
+ inkscape:current-layer="layer1"
+ showgrid="false"
+ inkscape:window-width="1920"
+ inkscape:window-height="1006"
+ inkscape:window-x="0"
+ inkscape:window-y="0"
+ inkscape:window-maximized="1" />
+ <metadata
+ id="metadata5">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ <dc:title />
+ </cc:Work>
+ </rdf:RDF>
+ </metadata>
+ <g
+ inkscape:label="Ebene 1"
+ inkscape:groupmode="layer"
+ id="layer1"
+ transform="translate(-29.534624,-59.398722)">
+ <g
+ aria-label="rust."
+ style="font-style:normal;font-weight:normal;font-size:10.58333302px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px"
+ id="text3715">
+ <path
+ d="m 35.95077,62.913236 c -0.588719,-0.01704 -1.180779,0.251078 -1.524352,0.735632 -0.163943,0.198364 -0.296317,0.499384 -0.394954,0.683311 -0.101099,-0.416482 -0.202198,-0.832963 -0.303298,-1.249444 -0.671965,0 -1.343931,0 -2.015897,0 0,0.370348 0,0.740695 0,1.111043 0.246841,0 0.493682,0 0.740523,0 0,1.128958 0,2.257916 0,3.386873 -0.246841,0 -0.493682,0 -0.740523,0 0,0.373965 0,0.747931 0,1.121896 1.174777,0 2.349555,0 3.524332,0 0,-0.373965 0,-0.747931 0,-1.121896 -0.37052,0 -0.74104,0 -1.11156,0 0,-0.536229 0,-1.072458 0,-1.608687 0.190283,-0.586609 0.512347,-1.195617 1.085749,-1.482555 0.177393,-0.100673 0.369604,-0.139934 0.305898,0.108135 0,0.278134 0,0.556268 0,0.834401 0.349332,0 0.698665,0 1.047998,0 0.104041,-0.78307 0.208084,-1.56614 0.312125,-2.34921 -0.293304,-0.117432 -0.610556,-0.17161 -0.926041,-0.169499 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';"
+ id="path817" />
+ <path
+ d="m 39.681433,63.082627 v 3.847042 q 0,0.407458 0.148167,0.560917 0.153458,0.153458 0.423333,0.153458 0.259292,0 0.518583,-0.164042 0.259292,-0.164041 0.433917,-0.4445 V 63.082627 H 42.8776 v 5.61975 h -1.4605 l -0.07408,-0.656166 q -0.28575,0.41275 -0.751416,0.624416 -0.465667,0.211667 -0.973667,0.211667 -0.814917,0 -1.211792,-0.470958 -0.396875,-0.47625 -0.396875,-1.275292 v -4.053417 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';"
+ id="path819" />
+ <path
+ d="m 46.6135,67.686377 q 0.418042,0 0.672042,-0.132291 0.259291,-0.132292 0.259291,-0.396875 0,-0.179917 -0.100541,-0.301625 -0.100542,-0.121709 -0.386292,-0.232834 -0.28575,-0.111125 -0.846667,-0.264583 -0.513291,-0.137583 -0.910166,-0.34925 -0.391584,-0.211667 -0.613834,-0.545042 -0.216958,-0.333375 -0.216958,-0.830791 0,-0.502709 0.280458,-0.894292 0.280459,-0.391583 0.8255,-0.613833 0.545042,-0.227542 1.3335,-0.227542 0.751417,0 1.307042,0.195792 0.560917,0.1905 0.968375,0.486833 l -0.66675,0.98425 q -0.34925,-0.216958 -0.751417,-0.343958 -0.402166,-0.132292 -0.809625,-0.132292 -0.407458,0 -0.608541,0.111125 -0.195792,0.105833 -0.195792,0.322792 0,0.142875 0.100542,0.248708 0.105833,0.100542 0.391583,0.211667 0.28575,0.105833 0.836083,0.264583 0.545042,0.153458 0.947209,0.354542 0.407458,0.201083 0.629708,0.53975 0.22225,0.333375 0.22225,0.883708 0,0.613833 -0.365125,1.031875 -0.365125,0.41275 -0.968375,0.619125 -0.60325,0.206375 -1.322917,0.206375 -0.814916,0 -1.439333,-0.232833 -0.624417,-0.232834 -1.063625,-0.613834 L 44.9625,67.093711 q 0.328083,0.254 0.740833,0.423333 0.418042,0.169333 0.910167,0.169333 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';"
+ id="path821" />
+ <path
+ d="m 55.768067,68.374294 q -0.328083,0.211667 -0.79375,0.359833 -0.465666,0.148167 -1.047749,0.148167 -1.100667,0 -1.635125,-0.560917 -0.534459,-0.566208 -0.534459,-1.534583 v -2.550583 h -1.180041 v -1.153584 h 1.180041 v -1.217083 l 1.672167,-0.201083 v 1.418166 h 1.80975 l -0.164042,1.153584 h -1.645708 v 2.550583 q 0,0.418042 0.1905,0.597958 0.1905,0.179917 0.608541,0.179917 0.296334,0 0.53975,-0.06879 0.248709,-0.07408 0.4445,-0.185208 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';"
+ id="path823" />
+ <circle
+ cx="59.493385"
+ cy="67.702255"
+ r="1.1799999"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';"
+ id="path825" />
+ </g>
+ <rect
+ style="fill:#30363b;fill-opacity:1;stroke:#20262b;stroke-width:0.39205828;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ id="rect3721"
+ width="54.407944"
+ height="12.607942"
+ x="60.730652"
+ y="71.342285" />
+ <g
+ aria-label="analyzer"
+ style="font-style:normal;font-weight:normal;font-size:10.58333302px;line-height:1.25;font-family:sans-serif;letter-spacing:0px;word-spacing:0px;fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.26458332"
+ id="text3719">
+ <path
+ d="m 68.007281,79.143583 q 0,0.3175 0.08467,0.460375 0.08996,0.142875 0.28575,0.216958 l -0.343959,1.100667 q -0.497416,-0.04762 -0.841375,-0.216959 -0.338666,-0.174625 -0.534458,-0.523875 -0.322792,0.386292 -0.8255,0.576792 -0.502708,0.185208 -1.026583,0.185208 -0.867833,0 -1.386417,-0.492125 -0.513291,-0.497416 -0.513291,-1.275291 0,-0.915459 0.714375,-1.412875 0.719666,-0.497417 2.021416,-0.497417 h 0.756708 v -0.211667 q 0,-0.439208 -0.28575,-0.650875 -0.280458,-0.211666 -0.825499,-0.211666 -0.269875,0 -0.693209,0.07937 -0.423333,0.07408 -0.846666,0.216958 l -0.386292,-1.11125 q 0.545042,-0.206375 1.132417,-0.312208 0.592666,-0.105834 1.058333,-0.105834 1.254125,0 1.852083,0.513292 0.60325,0.508 0.60325,1.471083 z m -2.624666,0.60325 q 0.269875,0 0.566208,-0.15875 0.296333,-0.164042 0.449791,-0.460375 v -0.910167 h -0.412749 q -0.6985,0 -1.026584,0.216958 -0.328083,0.211667 -0.328083,0.624417 0,0.322792 0.195792,0.508 0.201083,0.179917 0.555625,0.179917 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';fill:#ffffff;stroke-width:0.26458332"
+ id="path828" />
+ <path
+ d="m 69.626515,80.762833 v -5.61975 h 1.4605 l 0.116417,0.650875 q 0.375708,-0.423334 0.79375,-0.629709 0.418041,-0.206375 0.9525,-0.206375 0.719666,0 1.132416,0.439209 0.41275,0.439208 0.41275,1.23825 v 4.1275 h -1.672166 v -3.645959 q 0,-0.343958 -0.04762,-0.545041 -0.04763,-0.206375 -0.169334,-0.291042 -0.116416,-0.08996 -0.322791,-0.08996 -0.174625,0 -0.343959,0.07937 -0.164041,0.07408 -0.322791,0.216958 -0.15875,0.142875 -0.3175,0.343958 v 3.931709 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';fill:#ffffff;stroke-width:0.26458332"
+ id="path830" />
+ <path
+ d="m 80.707248,79.143583 q 0,0.3175 0.08467,0.460375 0.08996,0.142875 0.28575,0.216958 l -0.343958,1.100667 q -0.497417,-0.04762 -0.841375,-0.216959 -0.338667,-0.174625 -0.534458,-0.523875 -0.322792,0.386292 -0.8255,0.576792 -0.502709,0.185208 -1.026584,0.185208 -0.867833,0 -1.386416,-0.492125 -0.513292,-0.497416 -0.513292,-1.275291 0,-0.915459 0.714375,-1.412875 0.719667,-0.497417 2.021417,-0.497417 h 0.756708 v -0.211667 q 0,-0.439208 -0.28575,-0.650875 -0.280458,-0.211666 -0.8255,-0.211666 -0.269875,0 -0.693208,0.07937 -0.423334,0.07408 -0.846667,0.216958 l -0.386292,-1.11125 q 0.545042,-0.206375 1.132417,-0.312208 0.592667,-0.105834 1.058333,-0.105834 1.254125,0 1.852084,0.513292 0.603249,0.508 0.603249,1.471083 z m -2.624666,0.60325 q 0.269875,0 0.566208,-0.15875 0.296334,-0.164042 0.449792,-0.460375 v -0.910167 h -0.41275 q -0.6985,0 -1.026583,0.216958 -0.328084,0.211667 -0.328084,0.624417 0,0.322792 0.195792,0.508 0.201083,0.179917 0.555625,0.179917 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';fill:#ffffff;stroke-width:0.26458332"
+ id="path832" />
+ <path
+ d="m 85.258066,72.91 v 6.085416 q 0,0.338667 0.1905,0.486833 0.195792,0.142875 0.534458,0.142875 0.216959,0 0.418042,-0.04762 0.201083,-0.05292 0.375708,-0.121708 l 0.402167,1.116542 q -0.28575,0.148166 -0.687917,0.259291 -0.402166,0.111125 -0.936625,0.111125 -1.016,0 -1.49225,-0.582083 -0.47625,-0.587375 -0.47625,-1.571625 V 74.053 H 81.929608 V 72.91 Z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';fill:#ffffff;stroke-width:0.26458332"
+ id="path834" />
+ <path
+ d="m 93.989299,75.143083 -1.87325,5.61975 q -0.248708,0.746125 -0.661458,1.248833 -0.407458,0.508 -1.021292,0.783167 -0.608541,0.275166 -1.465791,0.322791 l -0.1905,-1.180041 q 0.555625,-0.06879 0.894291,-0.206375 0.343959,-0.137584 0.550334,-0.375709 0.211666,-0.232833 0.365125,-0.592666 h -0.5715 l -1.783292,-5.61975 h 1.767417 l 1.090083,4.550833 1.185333,-4.550833 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';fill:#ffffff;stroke-width:0.26458332"
+ id="path836" />
+ <path
+ d="m 97.021408,79.498124 h 2.815166 l -0.15875,1.264709 h -4.630208 v -1.180042 l 2.788708,-3.180292 h -2.555875 v -1.259416 h 4.503208 v 1.17475 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';fill:#ffffff;stroke-width:0.26458332"
+ id="path838" />
+ <path
+ d="m 102.82635,78.439791 q 0.0582,0.460375 0.23813,0.746125 0.1852,0.280458 0.47095,0.41275 0.28575,0.127 0.6403,0.127 0.38629,0 0.74612,-0.127 0.35983,-0.127 0.69321,-0.338667 l 0.67204,0.910167 q -0.39687,0.338667 -0.96308,0.555625 -0.56092,0.216958 -1.28059,0.216958 -0.96308,0 -1.61395,-0.381 -0.65088,-0.386291 -0.97896,-1.058333 -0.32809,-0.672042 -0.32809,-1.545167 0,-0.830791 0.3175,-1.508125 0.3175,-0.682625 0.92605,-1.084791 0.61383,-0.407459 1.49754,-0.407459 0.80433,0 1.39171,0.343959 0.59266,0.343958 0.91545,0.989541 0.32809,0.645584 0.32809,1.550459 0,0.142875 -0.0106,0.306916 -0.005,0.164042 -0.0212,0.291042 z m 1.03717,-2.360083 q -0.44979,0 -0.72496,0.322791 -0.27517,0.322792 -0.33338,1.031875 h 2.06375 q -0.005,-0.613833 -0.23812,-0.98425 -0.23283,-0.370416 -0.76729,-0.370416 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';fill:#ffffff;stroke-width:0.26458332"
+ id="path840" />
+ <path
+ d="m 107.77933,80.762833 v -1.121834 h 0.74084 v -3.386666 h -0.74084 v -1.11125 h 2.01613 l 0.30692,1.264708 q 0.30162,-0.724958 0.76729,-1.0795 0.47096,-0.354542 1.14829,-0.354542 0.28575,0 0.508,0.04762 0.22225,0.04233 0.41804,0.121709 l -0.508,1.381125 q -0.15346,-0.04233 -0.30692,-0.0635 -0.15345,-0.02117 -0.33866,-0.02117 -0.55034,0 -0.96838,0.449792 -0.41275,0.449791 -0.62971,1.143 v 1.608666 h 1.11125 v 1.121834 z m 3.80471,-3.27025 v -1.640417 l 0.24871,-0.709083 h 1.11125 l -0.31221,2.3495 z"
+ style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Fira Code';-inkscape-font-specification:'Fira Code Bold';fill:#ffffff;stroke-width:0.26458332"
+ id="path842" />
+ </g>
+ </g>
+</svg>
diff --git a/src/tools/rust-analyzer/bench_data/glorious_old_parser b/src/tools/rust-analyzer/bench_data/glorious_old_parser
new file mode 100644
index 000000000..7e900dfeb
--- /dev/null
+++ b/src/tools/rust-analyzer/bench_data/glorious_old_parser
@@ -0,0 +1,8562 @@
+use crate::ast::{AngleBracketedArgs, ParenthesizedArgs, AttrStyle, BareFnTy};
+use crate::ast::{GenericBound, TraitBoundModifier};
+use crate::ast::Unsafety;
+use crate::ast::{Mod, AnonConst, Arg, Arm, Guard, Attribute, BindingMode, TraitItemKind};
+use crate::ast::Block;
+use crate::ast::{BlockCheckMode, CaptureBy, Movability};
+use crate::ast::{Constness, Crate};
+use crate::ast::Defaultness;
+use crate::ast::EnumDef;
+use crate::ast::{Expr, ExprKind, RangeLimits};
+use crate::ast::{Field, FnDecl, FnHeader};
+use crate::ast::{ForeignItem, ForeignItemKind, FunctionRetTy};
+use crate::ast::{GenericParam, GenericParamKind};
+use crate::ast::GenericArg;
+use crate::ast::{Ident, ImplItem, IsAsync, IsAuto, Item, ItemKind};
+use crate::ast::{Label, Lifetime, Lit, LitKind};
+use crate::ast::Local;
+use crate::ast::MacStmtStyle;
+use crate::ast::{Mac, Mac_, MacDelimiter};
+use crate::ast::{MutTy, Mutability};
+use crate::ast::{Pat, PatKind, PathSegment};
+use crate::ast::{PolyTraitRef, QSelf};
+use crate::ast::{Stmt, StmtKind};
+use crate::ast::{VariantData, StructField};
+use crate::ast::StrStyle;
+use crate::ast::SelfKind;
+use crate::ast::{TraitItem, TraitRef, TraitObjectSyntax};
+use crate::ast::{Ty, TyKind, TypeBinding, GenericBounds};
+use crate::ast::{Visibility, VisibilityKind, WhereClause, CrateSugar};
+use crate::ast::{UseTree, UseTreeKind};
+use crate::ast::{BinOpKind, UnOp};
+use crate::ast::{RangeEnd, RangeSyntax};
+use crate::{ast, attr};
+use crate::ext::base::DummyResult;
+use crate::source_map::{self, SourceMap, Spanned, respan};
+use crate::parse::{self, SeqSep, classify, token};
+use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace};
+use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
+use crate::parse::token::DelimToken;
+use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
+use crate::util::parser::{AssocOp, Fixity};
+use crate::print::pprust;
+use crate::ptr::P;
+use crate::parse::PResult;
+use crate::ThinVec;
+use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
+use crate::symbol::{Symbol, keywords};
+
+use errors::{Applicability, DiagnosticBuilder, DiagnosticId};
+use rustc_target::spec::abi::{self, Abi};
+use syntax_pos::{Span, MultiSpan, BytePos, FileName};
+use log::{debug, trace};
+
+use std::borrow::Cow;
+use std::cmp;
+use std::mem;
+use std::path::{self, Path, PathBuf};
+use std::slice;
+
+#[derive(Debug)]
+/// Whether the type alias or associated type is a concrete type or an existential type
+pub enum AliasKind {
+ /// Just a new name for the same type
+ Weak(P<Ty>),
+ /// Only trait impls of the type will be usable, not the actual type itself
+ Existential(GenericBounds),
+}
+
+bitflags::bitflags! {
+ struct Restrictions: u8 {
+ const STMT_EXPR = 1 << 0;
+ const NO_STRUCT_LITERAL = 1 << 1;
+ }
+}
+
+type ItemInfo = (Ident, ItemKind, Option<Vec<Attribute>>);
+
+/// Specifies how to parse a path.
+#[derive(Copy, Clone, PartialEq)]
+pub enum PathStyle {
+ /// In some contexts, notably in expressions, paths with generic arguments are ambiguous
+ /// with something else. For example, in expressions `segment < ....` can be interpreted
+ /// as a comparison and `segment ( ....` can be interpreted as a function call.
+ /// In all such contexts the non-path interpretation is preferred by default for practical
+ /// reasons, but the path interpretation can be forced by the disambiguator `::`, e.g.
+ /// `x<y>` - comparisons, `x::<y>` - unambiguously a path.
+ Expr,
+ /// In other contexts, notably in types, no ambiguity exists and paths can be written
+ /// without the disambiguator, e.g., `x<y>` - unambiguously a path.
+ /// Paths with disambiguators are still accepted, `x::<Y>` - unambiguously a path too.
+ Type,
+ /// A path with generic arguments disallowed, e.g., `foo::bar::Baz`, used in imports,
+ /// visibilities or attributes.
+ /// Technically, this variant is unnecessary and e.g., `Expr` can be used instead
+ /// (paths in "mod" contexts have to be checked later for absence of generic arguments
+ /// anyway, due to macros), but it is used to avoid weird suggestions about expected
+ /// tokens when something goes wrong.
+ Mod,
+}
+
+#[derive(Clone, Copy, PartialEq, Debug)]
+enum SemiColonMode {
+ Break,
+ Ignore,
+ Comma,
+}
+
+#[derive(Clone, Copy, PartialEq, Debug)]
+enum BlockMode {
+ Break,
+ Ignore,
+}
+
+/// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression
+/// dropped into the token stream, which happens while parsing the result of
+/// macro expansion). Placement of these is not as complex as I feared it would
+/// be. The important thing is to make sure that lookahead doesn't balk at
+/// `token::Interpolated` tokens.
+macro_rules! maybe_whole_expr {
+ ($p:expr) => {
+ if let token::Interpolated(nt) = $p.token.clone() {
+ match *nt {
+ token::NtExpr(ref e) | token::NtLiteral(ref e) => {
+ $p.bump();
+ return Ok((*e).clone());
+ }
+ token::NtPath(ref path) => {
+ $p.bump();
+ let span = $p.span;
+ let kind = ExprKind::Path(None, (*path).clone());
+ return Ok($p.mk_expr(span, kind, ThinVec::new()));
+ }
+ token::NtBlock(ref block) => {
+ $p.bump();
+ let span = $p.span;
+ let kind = ExprKind::Block((*block).clone(), None);
+ return Ok($p.mk_expr(span, kind, ThinVec::new()));
+ }
+ _ => {},
+ };
+ }
+ }
+}
+
+/// As maybe_whole_expr, but for things other than expressions
+macro_rules! maybe_whole {
+ ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
+ if let token::Interpolated(nt) = $p.token.clone() {
+ if let token::$constructor($x) = (*nt).clone() {
+ $p.bump();
+ return Ok($e);
+ }
+ }
+ };
+}
+
+fn maybe_append(mut lhs: Vec<Attribute>, mut rhs: Option<Vec<Attribute>>) -> Vec<Attribute> {
+ if let Some(ref mut rhs) = rhs {
+ lhs.append(rhs);
+ }
+ lhs
+}
+
+#[derive(Debug, Clone, Copy, PartialEq)]
+enum PrevTokenKind {
+ DocComment,
+ Comma,
+ Plus,
+ Interpolated,
+ Eof,
+ Ident,
+ Other,
+}
+
+trait RecoverQPath: Sized {
+ const PATH_STYLE: PathStyle = PathStyle::Expr;
+ fn to_ty(&self) -> Option<P<Ty>>;
+ fn to_recovered(&self, qself: Option<QSelf>, path: ast::Path) -> Self;
+ fn to_string(&self) -> String;
+}
+
+impl RecoverQPath for Ty {
+ const PATH_STYLE: PathStyle = PathStyle::Type;
+ fn to_ty(&self) -> Option<P<Ty>> {
+ Some(P(self.clone()))
+ }
+ fn to_recovered(&self, qself: Option<QSelf>, path: ast::Path) -> Self {
+ Self { span: path.span, node: TyKind::Path(qself, path), id: self.id }
+ }
+ fn to_string(&self) -> String {
+ pprust::ty_to_string(self)
+ }
+}
+
+impl RecoverQPath for Pat {
+ fn to_ty(&self) -> Option<P<Ty>> {
+ self.to_ty()
+ }
+ fn to_recovered(&self, qself: Option<QSelf>, path: ast::Path) -> Self {
+ Self { span: path.span, node: PatKind::Path(qself, path), id: self.id }
+ }
+ fn to_string(&self) -> String {
+ pprust::pat_to_string(self)
+ }
+}
+
+impl RecoverQPath for Expr {
+ fn to_ty(&self) -> Option<P<Ty>> {
+ self.to_ty()
+ }
+ fn to_recovered(&self, qself: Option<QSelf>, path: ast::Path) -> Self {
+ Self { span: path.span, node: ExprKind::Path(qself, path),
+ id: self.id, attrs: self.attrs.clone() }
+ }
+ fn to_string(&self) -> String {
+ pprust::expr_to_string(self)
+ }
+}
+
+/* ident is handled by common.rs */
+
+#[derive(Clone)]
+pub struct Parser<'a> {
+ pub sess: &'a ParseSess,
+ /// the current token:
+ pub token: token::Token,
+ /// the span of the current token:
+ pub span: Span,
+ /// the span of the previous token:
+ meta_var_span: Option<Span>,
+ pub prev_span: Span,
+ /// the previous token kind
+ prev_token_kind: PrevTokenKind,
+ restrictions: Restrictions,
+ /// Used to determine the path to externally loaded source files
+ crate directory: Directory<'a>,
+ /// Whether to parse sub-modules in other files.
+ pub recurse_into_file_modules: bool,
+ /// Name of the root module this parser originated from. If `None`, then the
+ /// name is not known. This does not change while the parser is descending
+ /// into modules, and sub-parsers have new values for this name.
+ pub root_module_name: Option<String>,
+ crate expected_tokens: Vec<TokenType>,
+ token_cursor: TokenCursor,
+ desugar_doc_comments: bool,
+ /// Whether we should configure out of line modules as we parse.
+ pub cfg_mods: bool,
+ /// This field is used to keep track of how many left angle brackets we have seen. This is
+ /// required in order to detect extra leading left angle brackets (`<` characters) and error
+ /// appropriately.
+ ///
+ /// See the comments in the `parse_path_segment` function for more details.
+ crate unmatched_angle_bracket_count: u32,
+ crate max_angle_bracket_count: u32,
+ /// List of all unclosed delimiters found by the lexer. If an entry is used for error recovery
+ /// it gets removed from here. Every entry left at the end gets emitted as an independent
+ /// error.
+ crate unclosed_delims: Vec<UnmatchedBrace>,
+}
+
+
+#[derive(Clone)]
+struct TokenCursor {
+ frame: TokenCursorFrame,
+ stack: Vec<TokenCursorFrame>,
+}
+
+#[derive(Clone)]
+struct TokenCursorFrame {
+ delim: token::DelimToken,
+ span: DelimSpan,
+ open_delim: bool,
+ tree_cursor: tokenstream::Cursor,
+ close_delim: bool,
+ last_token: LastToken,
+}
+
+/// This is used in `TokenCursorFrame` above to track tokens that are consumed
+/// by the parser, and then that's transitively used to record the tokens that
+/// each parse AST item is created with.
+///
+/// Right now this has two states, either collecting tokens or not collecting
+/// tokens. If we're collecting tokens we just save everything off into a local
+/// `Vec`. This should eventually though likely save tokens from the original
+/// token stream and just use slicing of token streams to avoid creation of a
+/// whole new vector.
+///
+/// The second state is where we're passively not recording tokens, but the last
+/// token is still tracked for when we want to start recording tokens. This
+/// "last token" means that when we start recording tokens we'll want to ensure
+/// that this, the first token, is included in the output.
+///
+/// You can find some more example usage of this in the `collect_tokens` method
+/// on the parser.
+#[derive(Clone)]
+enum LastToken {
+ Collecting(Vec<TreeAndJoint>),
+ Was(Option<TreeAndJoint>),
+}
+
+impl TokenCursorFrame {
+ fn new(sp: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self {
+ TokenCursorFrame {
+ delim: delim,
+ span: sp,
+ open_delim: delim == token::NoDelim,
+ tree_cursor: tts.clone().into_trees(),
+ close_delim: delim == token::NoDelim,
+ last_token: LastToken::Was(None),
+ }
+ }
+}
+
+impl TokenCursor {
+ fn next(&mut self) -> TokenAndSpan {
+ loop {
+ let tree = if !self.frame.open_delim {
+ self.frame.open_delim = true;
+ TokenTree::open_tt(self.frame.span.open, self.frame.delim)
+ } else if let Some(tree) = self.frame.tree_cursor.next() {
+ tree
+ } else if !self.frame.close_delim {
+ self.frame.close_delim = true;
+ TokenTree::close_tt(self.frame.span.close, self.frame.delim)
+ } else if let Some(frame) = self.stack.pop() {
+ self.frame = frame;
+ continue
+ } else {
+ return TokenAndSpan { tok: token::Eof, sp: syntax_pos::DUMMY_SP }
+ };
+
+ match self.frame.last_token {
+ LastToken::Collecting(ref mut v) => v.push(tree.clone().into()),
+ LastToken::Was(ref mut t) => *t = Some(tree.clone().into()),
+ }
+
+ match tree {
+ TokenTree::Token(sp, tok) => return TokenAndSpan { tok: tok, sp: sp },
+ TokenTree::Delimited(sp, delim, tts) => {
+ let frame = TokenCursorFrame::new(sp, delim, &tts);
+ self.stack.push(mem::replace(&mut self.frame, frame));
+ }
+ }
+ }
+ }
+
+ fn next_desugared(&mut self) -> TokenAndSpan {
+ let (sp, name) = match self.next() {
+ TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name),
+ tok => return tok,
+ };
+
+ let stripped = strip_doc_comment_decoration(&name.as_str());
+
+ // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
+ // required to wrap the text.
+ let mut num_of_hashes = 0;
+ let mut count = 0;
+ for ch in stripped.chars() {
+ count = match ch {
+ '"' => 1,
+ '#' if count > 0 => count + 1,
+ _ => 0,
+ };
+ num_of_hashes = cmp::max(num_of_hashes, count);
+ }
+
+ let delim_span = DelimSpan::from_single(sp);
+ let body = TokenTree::Delimited(
+ delim_span,
+ token::Bracket,
+ [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"), false)),
+ TokenTree::Token(sp, token::Eq),
+ TokenTree::Token(sp, token::Literal(
+ token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))
+ ]
+ .iter().cloned().collect::<TokenStream>().into(),
+ );
+
+ self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(
+ delim_span,
+ token::NoDelim,
+ &if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
+ [TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body]
+ .iter().cloned().collect::<TokenStream>().into()
+ } else {
+ [TokenTree::Token(sp, token::Pound), body]
+ .iter().cloned().collect::<TokenStream>().into()
+ },
+ )));
+
+ self.next()
+ }
+}
+
+#[derive(Clone, PartialEq)]
+crate enum TokenType {
+ Token(token::Token),
+ Keyword(keywords::Keyword),
+ Operator,
+ Lifetime,
+ Ident,
+ Path,
+ Type,
+ Const,
+}
+
+impl TokenType {
+ fn to_string(&self) -> String {
+ match *self {
+ TokenType::Token(ref t) => format!("`{}`", pprust::token_to_string(t)),
+ TokenType::Keyword(kw) => format!("`{}`", kw.name()),
+ TokenType::Operator => "an operator".to_string(),
+ TokenType::Lifetime => "lifetime".to_string(),
+ TokenType::Ident => "identifier".to_string(),
+ TokenType::Path => "path".to_string(),
+ TokenType::Type => "type".to_string(),
+ TokenType::Const => "const".to_string(),
+ }
+ }
+}
+
+/// Returns `true` if `IDENT t` can start a type -- `IDENT::a::b`, `IDENT<u8, u8>`,
+/// `IDENT<<u8 as Trait>::AssocTy>`.
+///
+/// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
+/// that `IDENT` is not the ident of a fn trait.
+fn can_continue_type_after_non_fn_ident(t: &token::Token) -> bool {
+ t == &token::ModSep || t == &token::Lt ||
+ t == &token::BinOp(token::Shl)
+}
+
+/// Information about the path to a module.
+pub struct ModulePath {
+ name: String,
+ path_exists: bool,
+ pub result: Result<ModulePathSuccess, Error>,
+}
+
+pub struct ModulePathSuccess {
+ pub path: PathBuf,
+ pub directory_ownership: DirectoryOwnership,
+ warn: bool,
+}
+
+pub enum Error {
+ FileNotFoundForModule {
+ mod_name: String,
+ default_path: String,
+ secondary_path: String,
+ dir_path: String,
+ },
+ DuplicatePaths {
+ mod_name: String,
+ default_path: String,
+ secondary_path: String,
+ },
+ UselessDocComment,
+ InclusiveRangeWithNoEnd,
+}
+
+impl Error {
+ fn span_err<S: Into<MultiSpan>>(self,
+ sp: S,
+ handler: &errors::Handler) -> DiagnosticBuilder<'_> {
+ match self {
+ Error::FileNotFoundForModule { ref mod_name,
+ ref default_path,
+ ref secondary_path,
+ ref dir_path } => {
+ let mut err = struct_span_err!(handler, sp, E0583,
+ "file not found for module `{}`", mod_name);
+ err.help(&format!("name the file either {} or {} inside the directory \"{}\"",
+ default_path,
+ secondary_path,
+ dir_path));
+ err
+ }
+ Error::DuplicatePaths { ref mod_name, ref default_path, ref secondary_path } => {
+ let mut err = struct_span_err!(handler, sp, E0584,
+ "file for module `{}` found at both {} and {}",
+ mod_name,
+ default_path,
+ secondary_path);
+ err.help("delete or rename one of them to remove the ambiguity");
+ err
+ }
+ Error::UselessDocComment => {
+ let mut err = struct_span_err!(handler, sp, E0585,
+ "found a documentation comment that doesn't document anything");
+ err.help("doc comments must come before what they document, maybe a comment was \
+ intended with `//`?");
+ err
+ }
+ Error::InclusiveRangeWithNoEnd => {
+ let mut err = struct_span_err!(handler, sp, E0586,
+ "inclusive range with no end");
+ err.help("inclusive ranges must be bounded at the end (`..=b` or `a..=b`)");
+ err
+ }
+ }
+ }
+}
+
+#[derive(Debug)]
+enum LhsExpr {
+ NotYetParsed,
+ AttributesParsed(ThinVec<Attribute>),
+ AlreadyParsed(P<Expr>),
+}
+
+impl From<Option<ThinVec<Attribute>>> for LhsExpr {
+ fn from(o: Option<ThinVec<Attribute>>) -> Self {
+ if let Some(attrs) = o {
+ LhsExpr::AttributesParsed(attrs)
+ } else {
+ LhsExpr::NotYetParsed
+ }
+ }
+}
+
+impl From<P<Expr>> for LhsExpr {
+ fn from(expr: P<Expr>) -> Self {
+ LhsExpr::AlreadyParsed(expr)
+ }
+}
+
+/// Creates a placeholder argument.
+fn dummy_arg(span: Span) -> Arg {
+ let ident = Ident::new(keywords::Invalid.name(), span);
+ let pat = P(Pat {
+ id: ast::DUMMY_NODE_ID,
+ node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None),
+ span,
+ });
+ let ty = Ty {
+ node: TyKind::Err,
+ span,
+ id: ast::DUMMY_NODE_ID
+ };
+ Arg { ty: P(ty), pat: pat, id: ast::DUMMY_NODE_ID }
+}
+
+#[derive(Copy, Clone, Debug)]
+enum TokenExpectType {
+ Expect,
+ NoExpect,
+}
+
+impl<'a> Parser<'a> {
+ pub fn new(sess: &'a ParseSess,
+ tokens: TokenStream,
+ directory: Option<Directory<'a>>,
+ recurse_into_file_modules: bool,
+ desugar_doc_comments: bool)
+ -> Self {
+ let mut parser = Parser {
+ sess,
+ token: token::Whitespace,
+ span: syntax_pos::DUMMY_SP,
+ prev_span: syntax_pos::DUMMY_SP,
+ meta_var_span: None,
+ prev_token_kind: PrevTokenKind::Other,
+ restrictions: Restrictions::empty(),
+ recurse_into_file_modules,
+ directory: Directory {
+ path: Cow::from(PathBuf::new()),
+ ownership: DirectoryOwnership::Owned { relative: None }
+ },
+ root_module_name: None,
+ expected_tokens: Vec::new(),
+ token_cursor: TokenCursor {
+ frame: TokenCursorFrame::new(
+ DelimSpan::dummy(),
+ token::NoDelim,
+ &tokens.into(),
+ ),
+ stack: Vec::new(),
+ },
+ desugar_doc_comments,
+ cfg_mods: true,
+ unmatched_angle_bracket_count: 0,
+ max_angle_bracket_count: 0,
+ unclosed_delims: Vec::new(),
+ };
+
+ let tok = parser.next_tok();
+ parser.token = tok.tok;
+ parser.span = tok.sp;
+
+ if let Some(directory) = directory {
+ parser.directory = directory;
+ } else if !parser.span.is_dummy() {
+ if let FileName::Real(mut path) = sess.source_map().span_to_unmapped_path(parser.span) {
+ path.pop();
+ parser.directory.path = Cow::from(path);
+ }
+ }
+
+ parser.process_potential_macro_variable();
+ parser
+ }
+
+ fn next_tok(&mut self) -> TokenAndSpan {
+ let mut next = if self.desugar_doc_comments {
+ self.token_cursor.next_desugared()
+ } else {
+ self.token_cursor.next()
+ };
+ if next.sp.is_dummy() {
+ // Tweak the location for better diagnostics, but keep syntactic context intact.
+ next.sp = self.prev_span.with_ctxt(next.sp.ctxt());
+ }
+ next
+ }
+
+ /// Converts the current token to a string using `self`'s reader.
+ pub fn this_token_to_string(&self) -> String {
+ pprust::token_to_string(&self.token)
+ }
+
+ fn token_descr(&self) -> Option<&'static str> {
+ Some(match &self.token {
+ t if t.is_special_ident() => "reserved identifier",
+ t if t.is_used_keyword() => "keyword",
+ t if t.is_unused_keyword() => "reserved keyword",
+ token::DocComment(..) => "doc comment",
+ _ => return None,
+ })
+ }
+
+ fn this_token_descr(&self) -> String {
+ if let Some(prefix) = self.token_descr() {
+ format!("{} `{}`", prefix, self.this_token_to_string())
+ } else {
+ format!("`{}`", self.this_token_to_string())
+ }
+ }
+
+ fn unexpected_last<T>(&self, t: &token::Token) -> PResult<'a, T> {
+ let token_str = pprust::token_to_string(t);
+ Err(self.span_fatal(self.prev_span, &format!("unexpected token: `{}`", token_str)))
+ }
+
+ crate fn unexpected<T>(&mut self) -> PResult<'a, T> {
+ match self.expect_one_of(&[], &[]) {
+ Err(e) => Err(e),
+ Ok(_) => unreachable!(),
+ }
+ }
+
+ /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
+ pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> {
+ if self.expected_tokens.is_empty() {
+ if self.token == *t {
+ self.bump();
+ Ok(false)
+ } else {
+ let token_str = pprust::token_to_string(t);
+ let this_token_str = self.this_token_descr();
+ let mut err = self.fatal(&format!("expected `{}`, found {}",
+ token_str,
+ this_token_str));
+
+ let sp = if self.token == token::Token::Eof {
+ // EOF, don't want to point at the following char, but rather the last token
+ self.prev_span
+ } else {
+ self.sess.source_map().next_point(self.prev_span)
+ };
+ let label_exp = format!("expected `{}`", token_str);
+ match self.recover_closing_delimiter(&[t.clone()], err) {
+ Err(e) => err = e,
+ Ok(recovered) => {
+ return Ok(recovered);
+ }
+ }
+ let cm = self.sess.source_map();
+ match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
+ (Ok(ref a), Ok(ref b)) if a.line == b.line => {
+ // When the spans are in the same line, it means that the only content
+ // between them is whitespace, point only at the found token.
+ err.span_label(self.span, label_exp);
+ }
+ _ => {
+ err.span_label(sp, label_exp);
+ err.span_label(self.span, "unexpected token");
+ }
+ }
+ Err(err)
+ }
+ } else {
+ self.expect_one_of(slice::from_ref(t), &[])
+ }
+ }
+
+ fn recover_closing_delimiter(
+ &mut self,
+ tokens: &[token::Token],
+ mut err: DiagnosticBuilder<'a>,
+ ) -> PResult<'a, bool> {
+ let mut pos = None;
+ // we want to use the last closing delim that would apply
+ for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
+ if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
+ && Some(self.span) > unmatched.unclosed_span
+ {
+ pos = Some(i);
+ }
+ }
+ match pos {
+ Some(pos) => {
+ // Recover and assume that the detected unclosed delimiter was meant for
+ // this location. Emit the diagnostic and act as if the delimiter was
+ // present for the parser's sake.
+
+ // Don't attempt to recover from this unclosed delimiter more than once.
+ let unmatched = self.unclosed_delims.remove(pos);
+ let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
+
+ // We want to suggest the inclusion of the closing delimiter where it makes
+ // the most sense, which is immediately after the last token:
+ //
+ // {foo(bar {}}
+ // - ^
+ // | |
+ // | help: `)` may belong here (FIXME: #58270)
+ // |
+ // unclosed delimiter
+ if let Some(sp) = unmatched.unclosed_span {
+ err.span_label(sp, "unclosed delimiter");
+ }
+ err.span_suggestion_short(
+ self.sess.source_map().next_point(self.prev_span),
+ &format!("{} may belong here", delim.to_string()),
+ delim.to_string(),
+ Applicability::MaybeIncorrect,
+ );
+ err.emit();
+ self.expected_tokens.clear(); // reduce errors
+ Ok(true)
+ }
+ _ => Err(err),
+ }
+ }
+
+ /// Expect next token to be edible or inedible token. If edible,
+ /// then consume it; if inedible, then return without consuming
+ /// anything. Signal a fatal error if next token is unexpected.
+ pub fn expect_one_of(
+ &mut self,
+ edible: &[token::Token],
+ inedible: &[token::Token],
+ ) -> PResult<'a, bool /* recovered */> {
+ fn tokens_to_string(tokens: &[TokenType]) -> String {
+ let mut i = tokens.iter();
+ // This might be a sign we need a connect method on Iterator.
+ let b = i.next()
+ .map_or(String::new(), |t| t.to_string());
+ i.enumerate().fold(b, |mut b, (i, a)| {
+ if tokens.len() > 2 && i == tokens.len() - 2 {
+ b.push_str(", or ");
+ } else if tokens.len() == 2 && i == tokens.len() - 2 {
+ b.push_str(" or ");
+ } else {
+ b.push_str(", ");
+ }
+ b.push_str(&a.to_string());
+ b
+ })
+ }
+ if edible.contains(&self.token) {
+ self.bump();
+ Ok(false)
+ } else if inedible.contains(&self.token) {
+ // leave it in the input
+ Ok(false)
+ } else {
+ let mut expected = edible.iter()
+ .map(|x| TokenType::Token(x.clone()))
+ .chain(inedible.iter().map(|x| TokenType::Token(x.clone())))
+ .chain(self.expected_tokens.iter().cloned())
+ .collect::<Vec<_>>();
+ expected.sort_by_cached_key(|x| x.to_string());
+ expected.dedup();
+ let expect = tokens_to_string(&expected[..]);
+ let actual = self.this_token_to_string();
+ let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 {
+ let short_expect = if expected.len() > 6 {
+ format!("{} possible tokens", expected.len())
+ } else {
+ expect.clone()
+ };
+ (format!("expected one of {}, found `{}`", expect, actual),
+ (self.sess.source_map().next_point(self.prev_span),
+ format!("expected one of {} here", short_expect)))
+ } else if expected.is_empty() {
+ (format!("unexpected token: `{}`", actual),
+ (self.prev_span, "unexpected token after this".to_string()))
+ } else {
+ (format!("expected {}, found `{}`", expect, actual),
+ (self.sess.source_map().next_point(self.prev_span),
+ format!("expected {} here", expect)))
+ };
+ let mut err = self.fatal(&msg_exp);
+ if self.token.is_ident_named("and") {
+ err.span_suggestion_short(
+ self.span,
+ "use `&&` instead of `and` for the boolean operator",
+ "&&".to_string(),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ if self.token.is_ident_named("or") {
+ err.span_suggestion_short(
+ self.span,
+ "use `||` instead of `or` for the boolean operator",
+ "||".to_string(),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ let sp = if self.token == token::Token::Eof {
+ // This is EOF, don't want to point at the following char, but rather the last token
+ self.prev_span
+ } else {
+ label_sp
+ };
+ match self.recover_closing_delimiter(&expected.iter().filter_map(|tt| match tt {
+ TokenType::Token(t) => Some(t.clone()),
+ _ => None,
+ }).collect::<Vec<_>>(), err) {
+ Err(e) => err = e,
+ Ok(recovered) => {
+ return Ok(recovered);
+ }
+ }
+
+ let cm = self.sess.source_map();
+ match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
+ (Ok(ref a), Ok(ref b)) if a.line == b.line => {
+ // When the spans are in the same line, it means that the only content between
+ // them is whitespace, point at the found token in that case:
+ //
+ // X | () => { syntax error };
+ // | ^^^^^ expected one of 8 possible tokens here
+ //
+ // instead of having:
+ //
+ // X | () => { syntax error };
+ // | -^^^^^ unexpected token
+ // | |
+ // | expected one of 8 possible tokens here
+ err.span_label(self.span, label_exp);
+ }
+ _ if self.prev_span == syntax_pos::DUMMY_SP => {
+ // Account for macro context where the previous span might not be
+ // available to avoid incorrect output (#54841).
+ err.span_label(self.span, "unexpected token");
+ }
+ _ => {
+ err.span_label(sp, label_exp);
+ err.span_label(self.span, "unexpected token");
+ }
+ }
+ Err(err)
+ }
+ }
+
+ /// Returns the span of expr, if it was not interpolated or the span of the interpolated token.
+ fn interpolated_or_expr_span(&self,
+ expr: PResult<'a, P<Expr>>)
+ -> PResult<'a, (Span, P<Expr>)> {
+ expr.map(|e| {
+ if self.prev_token_kind == PrevTokenKind::Interpolated {
+ (self.prev_span, e)
+ } else {
+ (e.span, e)
+ }
+ })
+ }
+
+ fn expected_ident_found(&self) -> DiagnosticBuilder<'a> {
+ let mut err = self.struct_span_err(self.span,
+ &format!("expected identifier, found {}",
+ self.this_token_descr()));
+ if let token::Ident(ident, false) = &self.token {
+ if ident.is_reserved() && !ident.is_path_segment_keyword() &&
+ ident.name != keywords::Underscore.name()
+ {
+ err.span_suggestion(
+ self.span,
+ "you can escape reserved keywords to use them as identifiers",
+ format!("r#{}", ident),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ }
+ if let Some(token_descr) = self.token_descr() {
+ err.span_label(self.span, format!("expected identifier, found {}", token_descr));
+ } else {
+ err.span_label(self.span, "expected identifier");
+ if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) {
+ err.span_suggestion(
+ self.span,
+ "remove this comma",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ err
+ }
+
+ pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> {
+ self.parse_ident_common(true)
+ }
+
+ fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> {
+ match self.token {
+ token::Ident(ident, _) => {
+ if self.token.is_reserved_ident() {
+ let mut err = self.expected_ident_found();
+ if recover {
+ err.emit();
+ } else {
+ return Err(err);
+ }
+ }
+ let span = self.span;
+ self.bump();
+ Ok(Ident::new(ident.name, span))
+ }
+ _ => {
+ Err(if self.prev_token_kind == PrevTokenKind::DocComment {
+ self.span_fatal_err(self.prev_span, Error::UselessDocComment)
+ } else {
+ self.expected_ident_found()
+ })
+ }
+ }
+ }
+
+ /// Checks if the next token is `tok`, and returns `true` if so.
+ ///
+ /// This method will automatically add `tok` to `expected_tokens` if `tok` is not
+ /// encountered.
+ crate fn check(&mut self, tok: &token::Token) -> bool {
+ let is_present = self.token == *tok;
+ if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); }
+ is_present
+ }
+
+ /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
+ pub fn eat(&mut self, tok: &token::Token) -> bool {
+ let is_present = self.check(tok);
+ if is_present { self.bump() }
+ is_present
+ }
+
+ fn check_keyword(&mut self, kw: keywords::Keyword) -> bool {
+ self.expected_tokens.push(TokenType::Keyword(kw));
+ self.token.is_keyword(kw)
+ }
+
+ /// If the next token is the given keyword, eats it and returns
+ /// `true`. Otherwise, returns `false`.
+ pub fn eat_keyword(&mut self, kw: keywords::Keyword) -> bool {
+ if self.check_keyword(kw) {
+ self.bump();
+ true
+ } else {
+ false
+ }
+ }
+
+ fn eat_keyword_noexpect(&mut self, kw: keywords::Keyword) -> bool {
+ if self.token.is_keyword(kw) {
+ self.bump();
+ true
+ } else {
+ false
+ }
+ }
+
+ /// If the given word is not a keyword, signals an error.
+ /// If the next token is not the given word, signals an error.
+ /// Otherwise, eats it.
+ fn expect_keyword(&mut self, kw: keywords::Keyword) -> PResult<'a, ()> {
+ if !self.eat_keyword(kw) {
+ self.unexpected()
+ } else {
+ Ok(())
+ }
+ }
+
+ fn check_ident(&mut self) -> bool {
+ if self.token.is_ident() {
+ true
+ } else {
+ self.expected_tokens.push(TokenType::Ident);
+ false
+ }
+ }
+
+ fn check_path(&mut self) -> bool {
+ if self.token.is_path_start() {
+ true
+ } else {
+ self.expected_tokens.push(TokenType::Path);
+ false
+ }
+ }
+
+ fn check_type(&mut self) -> bool {
+ if self.token.can_begin_type() {
+ true
+ } else {
+ self.expected_tokens.push(TokenType::Type);
+ false
+ }
+ }
+
+ fn check_const_arg(&mut self) -> bool {
+ if self.token.can_begin_const_arg() {
+ true
+ } else {
+ self.expected_tokens.push(TokenType::Const);
+ false
+ }
+ }
+
+ /// Expects and consumes a `+`. if `+=` is seen, replaces it with a `=`
+ /// and continues. If a `+` is not seen, returns `false`.
+ ///
+ /// This is used when token-splitting `+=` into `+`.
+ /// See issue #47856 for an example of when this may occur.
+ fn eat_plus(&mut self) -> bool {
+ self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus)));
+ match self.token {
+ token::BinOp(token::Plus) => {
+ self.bump();
+ true
+ }
+ token::BinOpEq(token::Plus) => {
+ let span = self.span.with_lo(self.span.lo() + BytePos(1));
+ self.bump_with(token::Eq, span);
+ true
+ }
+ _ => false,
+ }
+ }
+
+
+ /// Checks to see if the next token is either `+` or `+=`.
+ /// Otherwise returns `false`.
+ fn check_plus(&mut self) -> bool {
+ if self.token.is_like_plus() {
+ true
+ }
+ else {
+ self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus)));
+ false
+ }
+ }
+
+ /// Expects and consumes an `&`. If `&&` is seen, replaces it with a single
+ /// `&` and continues. If an `&` is not seen, signals an error.
+ fn expect_and(&mut self) -> PResult<'a, ()> {
+ self.expected_tokens.push(TokenType::Token(token::BinOp(token::And)));
+ match self.token {
+ token::BinOp(token::And) => {
+ self.bump();
+ Ok(())
+ }
+ token::AndAnd => {
+ let span = self.span.with_lo(self.span.lo() + BytePos(1));
+ Ok(self.bump_with(token::BinOp(token::And), span))
+ }
+ _ => self.unexpected()
+ }
+ }
+
+ /// Expects and consumes an `|`. If `||` is seen, replaces it with a single
+ /// `|` and continues. If an `|` is not seen, signals an error.
+ fn expect_or(&mut self) -> PResult<'a, ()> {
+ self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or)));
+ match self.token {
+ token::BinOp(token::Or) => {
+ self.bump();
+ Ok(())
+ }
+ token::OrOr => {
+ let span = self.span.with_lo(self.span.lo() + BytePos(1));
+ Ok(self.bump_with(token::BinOp(token::Or), span))
+ }
+ _ => self.unexpected()
+ }
+ }
+
+ fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
+ match suffix {
+ None => {/* everything ok */}
+ Some(suf) => {
+ let text = suf.as_str();
+ if text.is_empty() {
+ self.span_bug(sp, "found empty literal suffix in Some")
+ }
+ let msg = format!("{} with a suffix is invalid", kind);
+ self.struct_span_err(sp, &msg)
+ .span_label(sp, msg)
+ .emit();
+ }
+ }
+ }
+
+ /// Attempts to consume a `<`. If `<<` is seen, replaces it with a single
+ /// `<` and continue. If `<-` is seen, replaces it with a single `<`
+ /// and continue. If a `<` is not seen, returns false.
+ ///
+ /// This is meant to be used when parsing generics on a path to get the
+ /// starting token.
+ fn eat_lt(&mut self) -> bool {
+ self.expected_tokens.push(TokenType::Token(token::Lt));
+ let ate = match self.token {
+ token::Lt => {
+ self.bump();
+ true
+ }
+ token::BinOp(token::Shl) => {
+ let span = self.span.with_lo(self.span.lo() + BytePos(1));
+ self.bump_with(token::Lt, span);
+ true
+ }
+ token::LArrow => {
+ let span = self.span.with_lo(self.span.lo() + BytePos(1));
+ self.bump_with(token::BinOp(token::Minus), span);
+ true
+ }
+ _ => false,
+ };
+
+ if ate {
+ // See doc comment for `unmatched_angle_bracket_count`.
+ self.unmatched_angle_bracket_count += 1;
+ self.max_angle_bracket_count += 1;
+ debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
+ }
+
+ ate
+ }
+
+ fn expect_lt(&mut self) -> PResult<'a, ()> {
+ if !self.eat_lt() {
+ self.unexpected()
+ } else {
+ Ok(())
+ }
+ }
+
+ /// Expects and consumes a single `>` token. if a `>>` is seen, replaces it
+ /// with a single `>` and continues. If a `>` is not seen, signals an error.
+ fn expect_gt(&mut self) -> PResult<'a, ()> {
+ self.expected_tokens.push(TokenType::Token(token::Gt));
+ let ate = match self.token {
+ token::Gt => {
+ self.bump();
+ Some(())
+ }
+ token::BinOp(token::Shr) => {
+ let span = self.span.with_lo(self.span.lo() + BytePos(1));
+ Some(self.bump_with(token::Gt, span))
+ }
+ token::BinOpEq(token::Shr) => {
+ let span = self.span.with_lo(self.span.lo() + BytePos(1));
+ Some(self.bump_with(token::Ge, span))
+ }
+ token::Ge => {
+ let span = self.span.with_lo(self.span.lo() + BytePos(1));
+ Some(self.bump_with(token::Eq, span))
+ }
+ _ => None,
+ };
+
+ match ate {
+ Some(_) => {
+ // See doc comment for `unmatched_angle_bracket_count`.
+ if self.unmatched_angle_bracket_count > 0 {
+ self.unmatched_angle_bracket_count -= 1;
+ debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
+ }
+
+ Ok(())
+ },
+ None => self.unexpected(),
+ }
+ }
+
+ /// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
+ /// passes through any errors encountered. Used for error recovery.
+ fn eat_to_tokens(&mut self, kets: &[&token::Token]) {
+ let handler = self.diagnostic();
+
+ if let Err(ref mut err) = self.parse_seq_to_before_tokens(kets,
+ SeqSep::none(),
+ TokenExpectType::Expect,
+ |p| Ok(p.parse_token_tree())) {
+ handler.cancel(err);
+ }
+ }
+
+ /// Parses a sequence, including the closing delimiter. The function
+ /// `f` must consume tokens until reaching the next separator or
+ /// closing bracket.
+ pub fn parse_seq_to_end<T, F>(&mut self,
+ ket: &token::Token,
+ sep: SeqSep,
+ f: F)
+ -> PResult<'a, Vec<T>> where
+ F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
+ {
+ let (val, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
+ if !recovered {
+ self.bump();
+ }
+ Ok(val)
+ }
+
+ /// Parses a sequence, not including the closing delimiter. The function
+ /// `f` must consume tokens until reaching the next separator or
+ /// closing bracket.
+ pub fn parse_seq_to_before_end<T, F>(
+ &mut self,
+ ket: &token::Token,
+ sep: SeqSep,
+ f: F,
+ ) -> PResult<'a, (Vec<T>, bool)>
+ where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>
+ {
+ self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
+ }
+
+ fn parse_seq_to_before_tokens<T, F>(
+ &mut self,
+ kets: &[&token::Token],
+ sep: SeqSep,
+ expect: TokenExpectType,
+ mut f: F,
+ ) -> PResult<'a, (Vec<T>, bool /* recovered */)>
+ where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>
+ {
+ let mut first = true;
+ let mut recovered = false;
+ let mut v = vec![];
+ while !kets.iter().any(|k| {
+ match expect {
+ TokenExpectType::Expect => self.check(k),
+ TokenExpectType::NoExpect => self.token == **k,
+ }
+ }) {
+ match self.token {
+ token::CloseDelim(..) | token::Eof => break,
+ _ => {}
+ };
+ if let Some(ref t) = sep.sep {
+ if first {
+ first = false;
+ } else {
+ match self.expect(t) {
+ Ok(false) => {}
+ Ok(true) => {
+ recovered = true;
+ break;
+ }
+ Err(mut e) => {
+ // Attempt to keep parsing if it was a similar separator
+ if let Some(ref tokens) = t.similar_tokens() {
+ if tokens.contains(&self.token) {
+ self.bump();
+ }
+ }
+ e.emit();
+ // Attempt to keep parsing if it was an omitted separator
+ match f(self) {
+ Ok(t) => {
+ v.push(t);
+ continue;
+ },
+ Err(mut e) => {
+ e.cancel();
+ break;
+ }
+ }
+ }
+ }
+ }
+ }
+ if sep.trailing_sep_allowed && kets.iter().any(|k| {
+ match expect {
+ TokenExpectType::Expect => self.check(k),
+ TokenExpectType::NoExpect => self.token == **k,
+ }
+ }) {
+ break;
+ }
+
+ let t = f(self)?;
+ v.push(t);
+ }
+
+ Ok((v, recovered))
+ }
+
+ /// Parses a sequence, including the closing delimiter. The function
+ /// `f` must consume tokens until reaching the next separator or
+ /// closing bracket.
+ fn parse_unspanned_seq<T, F>(
+ &mut self,
+ bra: &token::Token,
+ ket: &token::Token,
+ sep: SeqSep,
+ f: F,
+ ) -> PResult<'a, Vec<T>> where
+ F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
+ {
+ self.expect(bra)?;
+ let (result, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
+ if !recovered {
+ self.eat(ket);
+ }
+ Ok(result)
+ }
+
+ /// Advance the parser by one token
+ pub fn bump(&mut self) {
+ if self.prev_token_kind == PrevTokenKind::Eof {
+ // Bumping after EOF is a bad sign, usually an infinite loop.
+ self.bug("attempted to bump the parser past EOF (may be stuck in a loop)");
+ }
+
+ self.prev_span = self.meta_var_span.take().unwrap_or(self.span);
+
+ // Record last token kind for possible error recovery.
+ self.prev_token_kind = match self.token {
+ token::DocComment(..) => PrevTokenKind::DocComment,
+ token::Comma => PrevTokenKind::Comma,
+ token::BinOp(token::Plus) => PrevTokenKind::Plus,
+ token::Interpolated(..) => PrevTokenKind::Interpolated,
+ token::Eof => PrevTokenKind::Eof,
+ token::Ident(..) => PrevTokenKind::Ident,
+ _ => PrevTokenKind::Other,
+ };
+
+ let next = self.next_tok();
+ self.span = next.sp;
+ self.token = next.tok;
+ self.expected_tokens.clear();
+ // check after each token
+ self.process_potential_macro_variable();
+ }
+
+ /// Advance the parser using provided token as a next one. Use this when
+ /// consuming a part of a token. For example a single `<` from `<<`.
+ fn bump_with(&mut self, next: token::Token, span: Span) {
+ self.prev_span = self.span.with_hi(span.lo());
+ // It would be incorrect to record the kind of the current token, but
+ // fortunately for tokens currently using `bump_with`, the
+ // prev_token_kind will be of no use anyway.
+ self.prev_token_kind = PrevTokenKind::Other;
+ self.span = span;
+ self.token = next;
+ self.expected_tokens.clear();
+ }
+
+ pub fn look_ahead<R, F>(&self, dist: usize, f: F) -> R where
+ F: FnOnce(&token::Token) -> R,
+ {
+ if dist == 0 {
+ return f(&self.token)
+ }
+
+ f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
+ Some(tree) => match tree {
+ TokenTree::Token(_, tok) => tok,
+ TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim),
+ },
+ None => token::CloseDelim(self.token_cursor.frame.delim),
+ })
+ }
+
+ fn look_ahead_span(&self, dist: usize) -> Span {
+ if dist == 0 {
+ return self.span
+ }
+
+ match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
+ Some(TokenTree::Token(span, _)) => span,
+ Some(TokenTree::Delimited(span, ..)) => span.entire(),
+ None => self.look_ahead_span(dist - 1),
+ }
+ }
+ pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> {
+ self.sess.span_diagnostic.struct_span_fatal(self.span, m)
+ }
+ pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
+ self.sess.span_diagnostic.struct_span_fatal(sp, m)
+ }
+ fn span_fatal_err<S: Into<MultiSpan>>(&self, sp: S, err: Error) -> DiagnosticBuilder<'a> {
+ err.span_err(sp, self.diagnostic())
+ }
+ fn bug(&self, m: &str) -> ! {
+ self.sess.span_diagnostic.span_bug(self.span, m)
+ }
+ fn span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) {
+ self.sess.span_diagnostic.span_err(sp, m)
+ }
+ fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
+ self.sess.span_diagnostic.struct_span_err(sp, m)
+ }
+ crate fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! {
+ self.sess.span_diagnostic.span_bug(sp, m)
+ }
+
+ fn cancel(&self, err: &mut DiagnosticBuilder<'_>) {
+ self.sess.span_diagnostic.cancel(err)
+ }
+
+ crate fn diagnostic(&self) -> &'a errors::Handler {
+ &self.sess.span_diagnostic
+ }
+
+ /// Is the current token one of the keywords that signals a bare function type?
+ fn token_is_bare_fn_keyword(&mut self) -> bool {
+ self.check_keyword(keywords::Fn) ||
+ self.check_keyword(keywords::Unsafe) ||
+ self.check_keyword(keywords::Extern)
+ }
+
+ /// Parses a `TyKind::BareFn` type.
+ fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>) -> PResult<'a, TyKind> {
+ /*
+
+ [unsafe] [extern "ABI"] fn (S) -> T
+ ^~~~^ ^~~~^ ^~^ ^
+ | | | |
+ | | | Return type
+ | | Argument types
+ | |
+ | ABI
+ Function Style
+ */
+
+ let unsafety = self.parse_unsafety();
+ let abi = if self.eat_keyword(keywords::Extern) {
+ self.parse_opt_abi()?.unwrap_or(Abi::C)
+ } else {
+ Abi::Rust
+ };
+
+ self.expect_keyword(keywords::Fn)?;
+ let (inputs, variadic) = self.parse_fn_args(false, true)?;
+ let ret_ty = self.parse_ret_ty(false)?;
+ let decl = P(FnDecl {
+ inputs,
+ output: ret_ty,
+ variadic,
+ });
+ Ok(TyKind::BareFn(P(BareFnTy {
+ abi,
+ unsafety,
+ generic_params,
+ decl,
+ })))
+ }
+
+ /// Parses asyncness: `async` or nothing.
+ fn parse_asyncness(&mut self) -> IsAsync {
+ if self.eat_keyword(keywords::Async) {
+ IsAsync::Async {
+ closure_id: ast::DUMMY_NODE_ID,
+ return_impl_trait_id: ast::DUMMY_NODE_ID,
+ }
+ } else {
+ IsAsync::NotAsync
+ }
+ }
+
+ /// Parses unsafety: `unsafe` or nothing.
+ fn parse_unsafety(&mut self) -> Unsafety {
+ if self.eat_keyword(keywords::Unsafe) {
+ Unsafety::Unsafe
+ } else {
+ Unsafety::Normal
+ }
+ }
+
+ /// Parses the items in a trait declaration.
+ pub fn parse_trait_item(&mut self, at_end: &mut bool) -> PResult<'a, TraitItem> {
+ maybe_whole!(self, NtTraitItem, |x| x);
+ let attrs = self.parse_outer_attributes()?;
+ let (mut item, tokens) = self.collect_tokens(|this| {
+ this.parse_trait_item_(at_end, attrs)
+ })?;
+ // See `parse_item` for why this clause is here.
+ if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
+ item.tokens = Some(tokens);
+ }
+ Ok(item)
+ }
+
+ fn parse_trait_item_(&mut self,
+ at_end: &mut bool,
+ mut attrs: Vec<Attribute>) -> PResult<'a, TraitItem> {
+ let lo = self.span;
+
+ let (name, node, generics) = if self.eat_keyword(keywords::Type) {
+ self.parse_trait_item_assoc_ty()?
+ } else if self.is_const_item() {
+ self.expect_keyword(keywords::Const)?;
+ let ident = self.parse_ident()?;
+ self.expect(&token::Colon)?;
+ let ty = self.parse_ty()?;
+ let default = if self.eat(&token::Eq) {
+ let expr = self.parse_expr()?;
+ self.expect(&token::Semi)?;
+ Some(expr)
+ } else {
+ self.expect(&token::Semi)?;
+ None
+ };
+ (ident, TraitItemKind::Const(ty, default), ast::Generics::default())
+ } else if let Some(mac) = self.parse_assoc_macro_invoc("trait", None, &mut false)? {
+ // trait item macro.
+ (keywords::Invalid.ident(), ast::TraitItemKind::Macro(mac), ast::Generics::default())
+ } else {
+ let (constness, unsafety, asyncness, abi) = self.parse_fn_front_matter()?;
+
+ let ident = self.parse_ident()?;
+ let mut generics = self.parse_generics()?;
+
+ let d = self.parse_fn_decl_with_self(|p: &mut Parser<'a>| {
+ // This is somewhat dubious; We don't want to allow
+ // argument names to be left off if there is a
+ // definition...
+
+ // We don't allow argument names to be left off in edition 2018.
+ p.parse_arg_general(p.span.rust_2018(), true)
+ })?;
+ generics.where_clause = self.parse_where_clause()?;
+
+ let sig = ast::MethodSig {
+ header: FnHeader {
+ unsafety,
+ constness,
+ abi,
+ asyncness,
+ },
+ decl: d,
+ };
+
+ let body = match self.token {
+ token::Semi => {
+ self.bump();
+ *at_end = true;
+ debug!("parse_trait_methods(): parsing required method");
+ None
+ }
+ token::OpenDelim(token::Brace) => {
+ debug!("parse_trait_methods(): parsing provided method");
+ *at_end = true;
+ let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
+ attrs.extend(inner_attrs.iter().cloned());
+ Some(body)
+ }
+ token::Interpolated(ref nt) => {
+ match **nt {
+ token::NtBlock(..) => {
+ *at_end = true;
+ let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
+ attrs.extend(inner_attrs.iter().cloned());
+ Some(body)
+ }
+ _ => {
+ let token_str = self.this_token_descr();
+ let mut err = self.fatal(&format!("expected `;` or `{{`, found {}",
+ token_str));
+ err.span_label(self.span, "expected `;` or `{`");
+ return Err(err);
+ }
+ }
+ }
+ _ => {
+ let token_str = self.this_token_descr();
+ let mut err = self.fatal(&format!("expected `;` or `{{`, found {}",
+ token_str));
+ err.span_label(self.span, "expected `;` or `{`");
+ return Err(err);
+ }
+ };
+ (ident, ast::TraitItemKind::Method(sig, body), generics)
+ };
+
+ Ok(TraitItem {
+ id: ast::DUMMY_NODE_ID,
+ ident: name,
+ attrs,
+ generics,
+ node,
+ span: lo.to(self.prev_span),
+ tokens: None,
+ })
+ }
+
+ /// Parses an optional return type `[ -> TY ]` in a function declaration.
+ fn parse_ret_ty(&mut self, allow_plus: bool) -> PResult<'a, FunctionRetTy> {
+ if self.eat(&token::RArrow) {
+ Ok(FunctionRetTy::Ty(self.parse_ty_common(allow_plus, true)?))
+ } else {
+ Ok(FunctionRetTy::Default(self.span.shrink_to_lo()))
+ }
+ }
+
+ /// Parses a type.
+ pub fn parse_ty(&mut self) -> PResult<'a, P<Ty>> {
+ self.parse_ty_common(true, true)
+ }
+
+ /// Parses a type in restricted contexts where `+` is not permitted.
+ ///
+ /// Example 1: `&'a TYPE`
+ /// `+` is prohibited to maintain operator priority (P(+) < P(&)).
+ /// Example 2: `value1 as TYPE + value2`
+ /// `+` is prohibited to avoid interactions with expression grammar.
+ fn parse_ty_no_plus(&mut self) -> PResult<'a, P<Ty>> {
+ self.parse_ty_common(false, true)
+ }
+
+ fn parse_ty_common(&mut self, allow_plus: bool, allow_qpath_recovery: bool)
+ -> PResult<'a, P<Ty>> {
+ maybe_whole!(self, NtTy, |x| x);
+
+ let lo = self.span;
+ let mut impl_dyn_multi = false;
+ let node = if self.eat(&token::OpenDelim(token::Paren)) {
+ // `(TYPE)` is a parenthesized type.
+ // `(TYPE,)` is a tuple with a single field of type TYPE.
+ let mut ts = vec![];
+ let mut last_comma = false;
+ while self.token != token::CloseDelim(token::Paren) {
+ ts.push(self.parse_ty()?);
+ if self.eat(&token::Comma) {
+ last_comma = true;
+ } else {
+ last_comma = false;
+ break;
+ }
+ }
+ let trailing_plus = self.prev_token_kind == PrevTokenKind::Plus;
+ self.expect(&token::CloseDelim(token::Paren))?;
+
+ if ts.len() == 1 && !last_comma {
+ let ty = ts.into_iter().nth(0).unwrap().into_inner();
+ let maybe_bounds = allow_plus && self.token.is_like_plus();
+ match ty.node {
+ // `(TY_BOUND_NOPAREN) + BOUND + ...`.
+ TyKind::Path(None, ref path) if maybe_bounds => {
+ self.parse_remaining_bounds(Vec::new(), path.clone(), lo, true)?
+ }
+ TyKind::TraitObject(ref bounds, TraitObjectSyntax::None)
+ if maybe_bounds && bounds.len() == 1 && !trailing_plus => {
+ let path = match bounds[0] {
+ GenericBound::Trait(ref pt, ..) => pt.trait_ref.path.clone(),
+ GenericBound::Outlives(..) => self.bug("unexpected lifetime bound"),
+ };
+ self.parse_remaining_bounds(Vec::new(), path, lo, true)?
+ }
+ // `(TYPE)`
+ _ => TyKind::Paren(P(ty))
+ }
+ } else {
+ TyKind::Tup(ts)
+ }
+ } else if self.eat(&token::Not) {
+ // Never type `!`
+ TyKind::Never
+ } else if self.eat(&token::BinOp(token::Star)) {
+ // Raw pointer
+ TyKind::Ptr(self.parse_ptr()?)
+ } else if self.eat(&token::OpenDelim(token::Bracket)) {
+ // Array or slice
+ let t = self.parse_ty()?;
+ // Parse optional `; EXPR` in `[TYPE; EXPR]`
+ let t = match self.maybe_parse_fixed_length_of_vec()? {
+ None => TyKind::Slice(t),
+ Some(length) => TyKind::Array(t, AnonConst {
+ id: ast::DUMMY_NODE_ID,
+ value: length,
+ }),
+ };
+ self.expect(&token::CloseDelim(token::Bracket))?;
+ t
+ } else if self.check(&token::BinOp(token::And)) || self.check(&token::AndAnd) {
+ // Reference
+ self.expect_and()?;
+ self.parse_borrowed_pointee()?
+ } else if self.eat_keyword_noexpect(keywords::Typeof) {
+ // `typeof(EXPR)`
+ // In order to not be ambiguous, the type must be surrounded by parens.
+ self.expect(&token::OpenDelim(token::Paren))?;
+ let e = AnonConst {
+ id: ast::DUMMY_NODE_ID,
+ value: self.parse_expr()?,
+ };
+ self.expect(&token::CloseDelim(token::Paren))?;
+ TyKind::Typeof(e)
+ } else if self.eat_keyword(keywords::Underscore) {
+ // A type to be inferred `_`
+ TyKind::Infer
+ } else if self.token_is_bare_fn_keyword() {
+ // Function pointer type
+ self.parse_ty_bare_fn(Vec::new())?
+ } else if self.check_keyword(keywords::For) {
+ // Function pointer type or bound list (trait object type) starting with a poly-trait.
+ // `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
+ // `for<'lt> Trait1<'lt> + Trait2 + 'a`
+ let lo = self.span;
+ let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
+ if self.token_is_bare_fn_keyword() {
+ self.parse_ty_bare_fn(lifetime_defs)?
+ } else {
+ let path = self.parse_path(PathStyle::Type)?;
+ let parse_plus = allow_plus && self.check_plus();
+ self.parse_remaining_bounds(lifetime_defs, path, lo, parse_plus)?
+ }
+ } else if self.eat_keyword(keywords::Impl) {
+ // Always parse bounds greedily for better error recovery.
+ let bounds = self.parse_generic_bounds(None)?;
+ impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus;
+ TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds)
+ } else if self.check_keyword(keywords::Dyn) &&
+ (self.span.rust_2018() ||
+ self.look_ahead(1, |t| t.can_begin_bound() &&
+ !can_continue_type_after_non_fn_ident(t))) {
+ self.bump(); // `dyn`
+ // Always parse bounds greedily for better error recovery.
+ let bounds = self.parse_generic_bounds(None)?;
+ impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus;
+ TyKind::TraitObject(bounds, TraitObjectSyntax::Dyn)
+ } else if self.check(&token::Question) ||
+ self.check_lifetime() && self.look_ahead(1, |t| t.is_like_plus()) {
+ // Bound list (trait object type)
+ TyKind::TraitObject(self.parse_generic_bounds_common(allow_plus, None)?,
+ TraitObjectSyntax::None)
+ } else if self.eat_lt() {
+ // Qualified path
+ let (qself, path) = self.parse_qpath(PathStyle::Type)?;
+ TyKind::Path(Some(qself), path)
+ } else if self.token.is_path_start() {
+ // Simple path
+ let path = self.parse_path(PathStyle::Type)?;
+ if self.eat(&token::Not) {
+ // Macro invocation in type position
+ let (delim, tts) = self.expect_delimited_token_tree()?;
+ let node = Mac_ { path, tts, delim };
+ TyKind::Mac(respan(lo.to(self.prev_span), node))
+ } else {
+ // Just a type path or bound list (trait object type) starting with a trait.
+ // `Type`
+ // `Trait1 + Trait2 + 'a`
+ if allow_plus && self.check_plus() {
+ self.parse_remaining_bounds(Vec::new(), path, lo, true)?
+ } else {
+ TyKind::Path(None, path)
+ }
+ }
+ } else {
+ let msg = format!("expected type, found {}", self.this_token_descr());
+ return Err(self.fatal(&msg));
+ };
+
+ let span = lo.to(self.prev_span);
+ let ty = Ty { node, span, id: ast::DUMMY_NODE_ID };
+
+ // Try to recover from use of `+` with incorrect priority.
+ self.maybe_report_ambiguous_plus(allow_plus, impl_dyn_multi, &ty);
+ self.maybe_recover_from_bad_type_plus(allow_plus, &ty)?;
+ let ty = self.maybe_recover_from_bad_qpath(ty, allow_qpath_recovery)?;
+
+ Ok(P(ty))
+ }
+
+ fn parse_remaining_bounds(&mut self, generic_params: Vec<GenericParam>, path: ast::Path,
+ lo: Span, parse_plus: bool) -> PResult<'a, TyKind> {
+ let poly_trait_ref = PolyTraitRef::new(generic_params, path, lo.to(self.prev_span));
+ let mut bounds = vec![GenericBound::Trait(poly_trait_ref, TraitBoundModifier::None)];
+ if parse_plus {
+ self.eat_plus(); // `+`, or `+=` gets split and `+` is discarded
+ bounds.append(&mut self.parse_generic_bounds(None)?);
+ }
+ Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None))
+ }
+
+ fn maybe_report_ambiguous_plus(&mut self, allow_plus: bool, impl_dyn_multi: bool, ty: &Ty) {
+ if !allow_plus && impl_dyn_multi {
+ let sum_with_parens = format!("({})", pprust::ty_to_string(&ty));
+ self.struct_span_err(ty.span, "ambiguous `+` in a type")
+ .span_suggestion(
+ ty.span,
+ "use parentheses to disambiguate",
+ sum_with_parens,
+ Applicability::MachineApplicable
+ ).emit();
+ }
+ }
+
+ fn maybe_recover_from_bad_type_plus(&mut self, allow_plus: bool, ty: &Ty) -> PResult<'a, ()> {
+ // Do not add `+` to expected tokens.
+ if !allow_plus || !self.token.is_like_plus() {
+ return Ok(())
+ }
+
+ self.bump(); // `+`
+ let bounds = self.parse_generic_bounds(None)?;
+ let sum_span = ty.span.to(self.prev_span);
+
+ let mut err = struct_span_err!(self.sess.span_diagnostic, sum_span, E0178,
+ "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(ty));
+
+ match ty.node {
+ TyKind::Rptr(ref lifetime, ref mut_ty) => {
+ let sum_with_parens = pprust::to_string(|s| {
+ use crate::print::pprust::PrintState;
+
+ s.s.word("&")?;
+ s.print_opt_lifetime(lifetime)?;
+ s.print_mutability(mut_ty.mutbl)?;
+ s.popen()?;
+ s.print_type(&mut_ty.ty)?;
+ s.print_type_bounds(" +", &bounds)?;
+ s.pclose()
+ });
+ err.span_suggestion(
+ sum_span,
+ "try adding parentheses",
+ sum_with_parens,
+ Applicability::MachineApplicable
+ );
+ }
+ TyKind::Ptr(..) | TyKind::BareFn(..) => {
+ err.span_label(sum_span, "perhaps you forgot parentheses?");
+ }
+ _ => {
+ err.span_label(sum_span, "expected a path");
+ },
+ }
+ err.emit();
+ Ok(())
+ }
+
+ // Try to recover from associated item paths like `[T]::AssocItem`/`(T, U)::AssocItem`.
+ fn maybe_recover_from_bad_qpath<T: RecoverQPath>(&mut self, base: T, allow_recovery: bool)
+ -> PResult<'a, T> {
+ // Do not add `::` to expected tokens.
+ if !allow_recovery || self.token != token::ModSep {
+ return Ok(base);
+ }
+ let ty = match base.to_ty() {
+ Some(ty) => ty,
+ None => return Ok(base),
+ };
+
+ self.bump(); // `::`
+ let mut segments = Vec::new();
+ self.parse_path_segments(&mut segments, T::PATH_STYLE, true)?;
+
+ let span = ty.span.to(self.prev_span);
+ let path_span = span.to(span); // use an empty path since `position` == 0
+ let recovered = base.to_recovered(
+ Some(QSelf { ty, path_span, position: 0 }),
+ ast::Path { segments, span },
+ );
+
+ self.diagnostic()
+ .struct_span_err(span, "missing angle brackets in associated item path")
+ .span_suggestion( // this is a best-effort recovery
+ span, "try", recovered.to_string(), Applicability::MaybeIncorrect
+ ).emit();
+
+ Ok(recovered)
+ }
+
+ fn parse_borrowed_pointee(&mut self) -> PResult<'a, TyKind> {
+ let opt_lifetime = if self.check_lifetime() { Some(self.expect_lifetime()) } else { None };
+ let mutbl = self.parse_mutability();
+ let ty = self.parse_ty_no_plus()?;
+ return Ok(TyKind::Rptr(opt_lifetime, MutTy { ty: ty, mutbl: mutbl }));
+ }
+
+ fn parse_ptr(&mut self) -> PResult<'a, MutTy> {
+ let mutbl = if self.eat_keyword(keywords::Mut) {
+ Mutability::Mutable
+ } else if self.eat_keyword(keywords::Const) {
+ Mutability::Immutable
+ } else {
+ let span = self.prev_span;
+ let msg = "expected mut or const in raw pointer type";
+ self.struct_span_err(span, msg)
+ .span_label(span, msg)
+ .help("use `*mut T` or `*const T` as appropriate")
+ .emit();
+ Mutability::Immutable
+ };
+ let t = self.parse_ty_no_plus()?;
+ Ok(MutTy { ty: t, mutbl: mutbl })
+ }
+
+ fn is_named_argument(&mut self) -> bool {
+ let offset = match self.token {
+ token::Interpolated(ref nt) => match **nt {
+ token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
+ _ => 0,
+ }
+ token::BinOp(token::And) | token::AndAnd => 1,
+ _ if self.token.is_keyword(keywords::Mut) => 1,
+ _ => 0,
+ };
+
+ self.look_ahead(offset, |t| t.is_ident()) &&
+ self.look_ahead(offset + 1, |t| t == &token::Colon)
+ }
+
+ /// Skips unexpected attributes and doc comments in this position and emits an appropriate
+ /// error.
+ fn eat_incorrect_doc_comment(&mut self, applied_to: &str) {
+ if let token::DocComment(_) = self.token {
+ let mut err = self.diagnostic().struct_span_err(
+ self.span,
+ &format!("documentation comments cannot be applied to {}", applied_to),
+ );
+ err.span_label(self.span, "doc comments are not allowed here");
+ err.emit();
+ self.bump();
+ } else if self.token == token::Pound && self.look_ahead(1, |t| {
+ *t == token::OpenDelim(token::Bracket)
+ }) {
+ let lo = self.span;
+ // Skip every token until next possible arg.
+ while self.token != token::CloseDelim(token::Bracket) {
+ self.bump();
+ }
+ let sp = lo.to(self.span);
+ self.bump();
+ let mut err = self.diagnostic().struct_span_err(
+ sp,
+ &format!("attributes cannot be applied to {}", applied_to),
+ );
+ err.span_label(sp, "attributes are not allowed here");
+ err.emit();
+ }
+ }
+
+ /// This version of parse arg doesn't necessarily require identifier names.
+ fn parse_arg_general(&mut self, require_name: bool, is_trait_item: bool) -> PResult<'a, Arg> {
+ maybe_whole!(self, NtArg, |x| x);
+
+ if let Ok(Some(_)) = self.parse_self_arg() {
+ let mut err = self.struct_span_err(self.prev_span,
+ "unexpected `self` argument in function");
+ err.span_label(self.prev_span,
+ "`self` is only valid as the first argument of an associated function");
+ return Err(err);
+ }
+
+ let (pat, ty) = if require_name || self.is_named_argument() {
+ debug!("parse_arg_general parse_pat (require_name:{})",
+ require_name);
+ self.eat_incorrect_doc_comment("method arguments");
+ let pat = self.parse_pat(Some("argument name"))?;
+
+ if let Err(mut err) = self.expect(&token::Colon) {
+ // If we find a pattern followed by an identifier, it could be an (incorrect)
+ // C-style parameter declaration.
+ if self.check_ident() && self.look_ahead(1, |t| {
+ *t == token::Comma || *t == token::CloseDelim(token::Paren)
+ }) {
+ let ident = self.parse_ident().unwrap();
+ let span = pat.span.with_hi(ident.span.hi());
+
+ err.span_suggestion(
+ span,
+ "declare the type after the parameter binding",
+ String::from("<identifier>: <type>"),
+ Applicability::HasPlaceholders,
+ );
+ } else if require_name && is_trait_item {
+ if let PatKind::Ident(_, ident, _) = pat.node {
+ err.span_suggestion(
+ pat.span,
+ "explicitly ignore parameter",
+ format!("_: {}", ident),
+ Applicability::MachineApplicable,
+ );
+ }
+
+ err.note("anonymous parameters are removed in the 2018 edition (see RFC 1685)");
+ }
+
+ return Err(err);
+ }
+
+ self.eat_incorrect_doc_comment("a method argument's type");
+ (pat, self.parse_ty()?)
+ } else {
+ debug!("parse_arg_general ident_to_pat");
+ let parser_snapshot_before_ty = self.clone();
+ self.eat_incorrect_doc_comment("a method argument's type");
+ let mut ty = self.parse_ty();
+ if ty.is_ok() && self.token != token::Comma &&
+ self.token != token::CloseDelim(token::Paren) {
+ // This wasn't actually a type, but a pattern looking like a type,
+ // so we are going to rollback and re-parse for recovery.
+ ty = self.unexpected();
+ }
+ match ty {
+ Ok(ty) => {
+ let ident = Ident::new(keywords::Invalid.name(), self.prev_span);
+ let pat = P(Pat {
+ id: ast::DUMMY_NODE_ID,
+ node: PatKind::Ident(
+ BindingMode::ByValue(Mutability::Immutable), ident, None),
+ span: ty.span,
+ });
+ (pat, ty)
+ }
+ Err(mut err) => {
+ // Recover from attempting to parse the argument as a type without pattern.
+ err.cancel();
+ mem::replace(self, parser_snapshot_before_ty);
+ let pat = self.parse_pat(Some("argument name"))?;
+ self.expect(&token::Colon)?;
+ let ty = self.parse_ty()?;
+
+ let mut err = self.diagnostic().struct_span_err_with_code(
+ pat.span,
+ "patterns aren't allowed in methods without bodies",
+ DiagnosticId::Error("E0642".into()),
+ );
+ err.span_suggestion_short(
+ pat.span,
+ "give this argument a name or use an underscore to ignore it",
+ "_".to_owned(),
+ Applicability::MachineApplicable,
+ );
+ err.emit();
+
+ // Pretend the pattern is `_`, to avoid duplicate errors from AST validation.
+ let pat = P(Pat {
+ node: PatKind::Wild,
+ span: pat.span,
+ id: ast::DUMMY_NODE_ID
+ });
+ (pat, ty)
+ }
+ }
+ };
+
+ Ok(Arg { ty, pat, id: ast::DUMMY_NODE_ID })
+ }
+
+ /// Parses a single function argument.
+ crate fn parse_arg(&mut self) -> PResult<'a, Arg> {
+ self.parse_arg_general(true, false)
+ }
+
+ /// Parses an argument in a lambda header (e.g., `|arg, arg|`).
+ fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> {
+ let pat = self.parse_pat(Some("argument name"))?;
+ let t = if self.eat(&token::Colon) {
+ self.parse_ty()?
+ } else {
+ P(Ty {
+ id: ast::DUMMY_NODE_ID,
+ node: TyKind::Infer,
+ span: self.prev_span,
+ })
+ };
+ Ok(Arg {
+ ty: t,
+ pat,
+ id: ast::DUMMY_NODE_ID
+ })
+ }
+
+ fn maybe_parse_fixed_length_of_vec(&mut self) -> PResult<'a, Option<P<ast::Expr>>> {
+ if self.eat(&token::Semi) {
+ Ok(Some(self.parse_expr()?))
+ } else {
+ Ok(None)
+ }
+ }
+
+ /// Matches `token_lit = LIT_INTEGER | ...`.
+ fn parse_lit_token(&mut self) -> PResult<'a, LitKind> {
+ let out = match self.token {
+ token::Interpolated(ref nt) => match **nt {
+ token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
+ ExprKind::Lit(ref lit) => { lit.node.clone() }
+ _ => { return self.unexpected_last(&self.token); }
+ },
+ _ => { return self.unexpected_last(&self.token); }
+ },
+ token::Literal(lit, suf) => {
+ let diag = Some((self.span, &self.sess.span_diagnostic));
+ let (suffix_illegal, result) = parse::lit_token(lit, suf, diag);
+
+ if suffix_illegal {
+ let sp = self.span;
+ self.expect_no_suffix(sp, lit.literal_name(), suf)
+ }
+
+ result.unwrap()
+ }
+ token::Dot if self.look_ahead(1, |t| match t {
+ token::Literal(parse::token::Lit::Integer(_) , _) => true,
+ _ => false,
+ }) => { // recover from `let x = .4;`
+ let lo = self.span;
+ self.bump();
+ if let token::Literal(
+ parse::token::Lit::Integer(val),
+ suffix,
+ ) = self.token {
+ let suffix = suffix.and_then(|s| {
+ let s = s.as_str().get();
+ if ["f32", "f64"].contains(&s) {
+ Some(s)
+ } else {
+ None
+ }
+ }).unwrap_or("");
+ self.bump();
+ let sp = lo.to(self.prev_span);
+ let mut err = self.diagnostic()
+ .struct_span_err(sp, "float literals must have an integer part");
+ err.span_suggestion(
+ sp,
+ "must have an integer part",
+ format!("0.{}{}", val, suffix),
+ Applicability::MachineApplicable,
+ );
+ err.emit();
+ return Ok(match suffix {
+ "f32" => ast::LitKind::Float(val, ast::FloatTy::F32),
+ "f64" => ast::LitKind::Float(val, ast::FloatTy::F64),
+ _ => ast::LitKind::FloatUnsuffixed(val),
+ });
+ } else {
+ unreachable!();
+ };
+ }
+ _ => { return self.unexpected_last(&self.token); }
+ };
+
+ self.bump();
+ Ok(out)
+ }
+
+ /// Matches `lit = true | false | token_lit`.
+ crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
+ let lo = self.span;
+ let lit = if self.eat_keyword(keywords::True) {
+ LitKind::Bool(true)
+ } else if self.eat_keyword(keywords::False) {
+ LitKind::Bool(false)
+ } else {
+ let lit = self.parse_lit_token()?;
+ lit
+ };
+ Ok(source_map::Spanned { node: lit, span: lo.to(self.prev_span) })
+ }
+
+ /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
+ crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
+ maybe_whole_expr!(self);
+
+ let minus_lo = self.span;
+ let minus_present = self.eat(&token::BinOp(token::Minus));
+ let lo = self.span;
+ let literal = self.parse_lit()?;
+ let hi = self.prev_span;
+ let expr = self.mk_expr(lo.to(hi), ExprKind::Lit(literal), ThinVec::new());
+
+ if minus_present {
+ let minus_hi = self.prev_span;
+ let unary = self.mk_unary(UnOp::Neg, expr);
+ Ok(self.mk_expr(minus_lo.to(minus_hi), unary, ThinVec::new()))
+ } else {
+ Ok(expr)
+ }
+ }
+
+ fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> {
+ match self.token {
+ token::Ident(ident, _) if self.token.is_path_segment_keyword() => {
+ let span = self.span;
+ self.bump();
+ Ok(Ident::new(ident.name, span))
+ }
+ _ => self.parse_ident(),
+ }
+ }
+
+ fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> {
+ match self.token {
+ token::Ident(ident, false) if ident.name == keywords::Underscore.name() => {
+ let span = self.span;
+ self.bump();
+ Ok(Ident::new(ident.name, span))
+ }
+ _ => self.parse_ident(),
+ }
+ }
+
+ /// Parses a qualified path.
+ /// Assumes that the leading `<` has been parsed already.
+ ///
+ /// `qualified_path = <type [as trait_ref]>::path`
+ ///
+ /// # Examples
+ /// `<T>::default`
+ /// `<T as U>::a`
+ /// `<T as U>::F::a<S>` (without disambiguator)
+ /// `<T as U>::F::a::<S>` (with disambiguator)
+ fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (QSelf, ast::Path)> {
+ let lo = self.prev_span;
+ let ty = self.parse_ty()?;
+
+ // `path` will contain the prefix of the path up to the `>`,
+ // if any (e.g., `U` in the `<T as U>::*` examples
+ // above). `path_span` has the span of that path, or an empty
+ // span in the case of something like `<T>::Bar`.
+ let (mut path, path_span);
+ if self.eat_keyword(keywords::As) {
+ let path_lo = self.span;
+ path = self.parse_path(PathStyle::Type)?;
+ path_span = path_lo.to(self.prev_span);
+ } else {
+ path = ast::Path { segments: Vec::new(), span: syntax_pos::DUMMY_SP };
+ path_span = self.span.to(self.span);
+ }
+
+ // See doc comment for `unmatched_angle_bracket_count`.
+ self.expect(&token::Gt)?;
+ if self.unmatched_angle_bracket_count > 0 {
+ self.unmatched_angle_bracket_count -= 1;
+ debug!("parse_qpath: (decrement) count={:?}", self.unmatched_angle_bracket_count);
+ }
+
+ self.expect(&token::ModSep)?;
+
+ let qself = QSelf { ty, path_span, position: path.segments.len() };
+ self.parse_path_segments(&mut path.segments, style, true)?;
+
+ Ok((qself, ast::Path { segments: path.segments, span: lo.to(self.prev_span) }))
+ }
+
+ /// Parses simple paths.
+ ///
+ /// `path = [::] segment+`
+ /// `segment = ident | ident[::]<args> | ident[::](args) [-> type]`
+ ///
+ /// # Examples
+ /// `a::b::C<D>` (without disambiguator)
+ /// `a::b::C::<D>` (with disambiguator)
+ /// `Fn(Args)` (without disambiguator)
+ /// `Fn::(Args)` (with disambiguator)
+ pub fn parse_path(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
+ self.parse_path_common(style, true)
+ }
+
+ crate fn parse_path_common(&mut self, style: PathStyle, enable_warning: bool)
+ -> PResult<'a, ast::Path> {
+ maybe_whole!(self, NtPath, |path| {
+ if style == PathStyle::Mod &&
+ path.segments.iter().any(|segment| segment.args.is_some()) {
+ self.diagnostic().span_err(path.span, "unexpected generic arguments in path");
+ }
+ path
+ });
+
+ let lo = self.meta_var_span.unwrap_or(self.span);
+ let mut segments = Vec::new();
+ let mod_sep_ctxt = self.span.ctxt();
+ if self.eat(&token::ModSep) {
+ segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
+ }
+ self.parse_path_segments(&mut segments, style, enable_warning)?;
+
+ Ok(ast::Path { segments, span: lo.to(self.prev_span) })
+ }
+
+ /// Like `parse_path`, but also supports parsing `Word` meta items into paths for
+ /// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]`
+ /// attributes.
+ pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
+ let meta_ident = match self.token {
+ token::Interpolated(ref nt) => match **nt {
+ token::NtMeta(ref meta) => match meta.node {
+ ast::MetaItemKind::Word => Some(meta.ident.clone()),
+ _ => None,
+ },
+ _ => None,
+ },
+ _ => None,
+ };
+ if let Some(path) = meta_ident {
+ self.bump();
+ return Ok(path);
+ }
+ self.parse_path(style)
+ }
+
+ fn parse_path_segments(&mut self,
+ segments: &mut Vec<PathSegment>,
+ style: PathStyle,
+ enable_warning: bool)
+ -> PResult<'a, ()> {
+ loop {
+ let segment = self.parse_path_segment(style, enable_warning)?;
+ if style == PathStyle::Expr {
+ // In order to check for trailing angle brackets, we must have finished
+ // recursing (`parse_path_segment` can indirectly call this function),
+ // that is, the next token must be the highlighted part of the below example:
+ //
+ // `Foo::<Bar as Baz<T>>::Qux`
+ // ^ here
+ //
+ // As opposed to the below highlight (if we had only finished the first
+ // recursion):
+ //
+ // `Foo::<Bar as Baz<T>>::Qux`
+ // ^ here
+ //
+ // `PathStyle::Expr` is only provided at the root invocation and never in
+ // `parse_path_segment` to recurse and therefore can be checked to maintain
+ // this invariant.
+ self.check_trailing_angle_brackets(&segment, token::ModSep);
+ }
+ segments.push(segment);
+
+ if self.is_import_coupler() || !self.eat(&token::ModSep) {
+ return Ok(());
+ }
+ }
+ }
+
+ fn parse_path_segment(&mut self, style: PathStyle, enable_warning: bool)
+ -> PResult<'a, PathSegment> {
+ let ident = self.parse_path_segment_ident()?;
+
+ let is_args_start = |token: &token::Token| match *token {
+ token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren) => true,
+ _ => false,
+ };
+ let check_args_start = |this: &mut Self| {
+ this.expected_tokens.extend_from_slice(
+ &[TokenType::Token(token::Lt), TokenType::Token(token::OpenDelim(token::Paren))]
+ );
+ is_args_start(&this.token)
+ };
+
+ Ok(if style == PathStyle::Type && check_args_start(self) ||
+ style != PathStyle::Mod && self.check(&token::ModSep)
+ && self.look_ahead(1, |t| is_args_start(t)) {
+ // Generic arguments are found - `<`, `(`, `::<` or `::(`.
+ if self.eat(&token::ModSep) && style == PathStyle::Type && enable_warning {
+ self.diagnostic().struct_span_warn(self.prev_span, "unnecessary path disambiguator")
+ .span_label(self.prev_span, "try removing `::`").emit();
+ }
+ let lo = self.span;
+
+ // We use `style == PathStyle::Expr` to check if this is in a recursion or not. If
+ // it isn't, then we reset the unmatched angle bracket count as we're about to start
+ // parsing a new path.
+ if style == PathStyle::Expr {
+ self.unmatched_angle_bracket_count = 0;
+ self.max_angle_bracket_count = 0;
+ }
+
+ let args = if self.eat_lt() {
+ // `<'a, T, A = U>`
+ let (args, bindings) =
+ self.parse_generic_args_with_leaning_angle_bracket_recovery(style, lo)?;
+ self.expect_gt()?;
+ let span = lo.to(self.prev_span);
+ AngleBracketedArgs { args, bindings, span }.into()
+ } else {
+ // `(T, U) -> R`
+ self.bump(); // `(`
+ let (inputs, recovered) = self.parse_seq_to_before_tokens(
+ &[&token::CloseDelim(token::Paren)],
+ SeqSep::trailing_allowed(token::Comma),
+ TokenExpectType::Expect,
+ |p| p.parse_ty())?;
+ if !recovered {
+ self.bump(); // `)`
+ }
+ let span = lo.to(self.prev_span);
+ let output = if self.eat(&token::RArrow) {
+ Some(self.parse_ty_common(false, false)?)
+ } else {
+ None
+ };
+ ParenthesizedArgs { inputs, output, span }.into()
+ };
+
+ PathSegment { ident, args, id: ast::DUMMY_NODE_ID }
+ } else {
+ // Generic arguments are not found.
+ PathSegment::from_ident(ident)
+ })
+ }
+
+ crate fn check_lifetime(&mut self) -> bool {
+ self.expected_tokens.push(TokenType::Lifetime);
+ self.token.is_lifetime()
+ }
+
+ /// Parses a single lifetime `'a` or panics.
+ crate fn expect_lifetime(&mut self) -> Lifetime {
+ if let Some(ident) = self.token.lifetime() {
+ let span = self.span;
+ self.bump();
+ Lifetime { ident: Ident::new(ident.name, span), id: ast::DUMMY_NODE_ID }
+ } else {
+ self.span_bug(self.span, "not a lifetime")
+ }
+ }
+
+ fn eat_label(&mut self) -> Option<Label> {
+ if let Some(ident) = self.token.lifetime() {
+ let span = self.span;
+ self.bump();
+ Some(Label { ident: Ident::new(ident.name, span) })
+ } else {
+ None
+ }
+ }
+
+ /// Parses mutability (`mut` or nothing).
+ fn parse_mutability(&mut self) -> Mutability {
+ if self.eat_keyword(keywords::Mut) {
+ Mutability::Mutable
+ } else {
+ Mutability::Immutable
+ }
+ }
+
+ fn parse_field_name(&mut self) -> PResult<'a, Ident> {
+ if let token::Literal(token::Integer(name), None) = self.token {
+ self.bump();
+ Ok(Ident::new(name, self.prev_span))
+ } else {
+ self.parse_ident_common(false)
+ }
+ }
+
+ /// Parse ident (COLON expr)?
+ fn parse_field(&mut self) -> PResult<'a, Field> {
+ let attrs = self.parse_outer_attributes()?;
+ let lo = self.span;
+
+ // Check if a colon exists one ahead. This means we're parsing a fieldname.
+ let (fieldname, expr, is_shorthand) = if self.look_ahead(1, |t| {
+ t == &token::Colon || t == &token::Eq
+ }) {
+ let fieldname = self.parse_field_name()?;
+
+ // Check for an equals token. This means the source incorrectly attempts to
+ // initialize a field with an eq rather than a colon.
+ if self.token == token::Eq {
+ self.diagnostic()
+ .struct_span_err(self.span, "expected `:`, found `=`")
+ .span_suggestion(
+ fieldname.span.shrink_to_hi().to(self.span),
+ "replace equals symbol with a colon",
+ ":".to_string(),
+ Applicability::MachineApplicable,
+ )
+ .emit();
+ }
+ self.bump(); // `:`
+ (fieldname, self.parse_expr()?, false)
+ } else {
+ let fieldname = self.parse_ident_common(false)?;
+
+ // Mimic `x: x` for the `x` field shorthand.
+ let path = ast::Path::from_ident(fieldname);
+ let expr = self.mk_expr(fieldname.span, ExprKind::Path(None, path), ThinVec::new());
+ (fieldname, expr, true)
+ };
+ Ok(ast::Field {
+ ident: fieldname,
+ span: lo.to(expr.span),
+ expr,
+ is_shorthand,
+ attrs: attrs.into(),
+ })
+ }
+
+ fn mk_expr(&mut self, span: Span, node: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> {
+ P(Expr { node, span, attrs, id: ast::DUMMY_NODE_ID })
+ }
+
+ fn mk_unary(&mut self, unop: ast::UnOp, expr: P<Expr>) -> ast::ExprKind {
+ ExprKind::Unary(unop, expr)
+ }
+
+ fn mk_binary(&mut self, binop: ast::BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
+ ExprKind::Binary(binop, lhs, rhs)
+ }
+
+ fn mk_call(&mut self, f: P<Expr>, args: Vec<P<Expr>>) -> ast::ExprKind {
+ ExprKind::Call(f, args)
+ }
+
+ fn mk_index(&mut self, expr: P<Expr>, idx: P<Expr>) -> ast::ExprKind {
+ ExprKind::Index(expr, idx)
+ }
+
+ fn mk_range(&mut self,
+ start: Option<P<Expr>>,
+ end: Option<P<Expr>>,
+ limits: RangeLimits)
+ -> PResult<'a, ast::ExprKind> {
+ if end.is_none() && limits == RangeLimits::Closed {
+ Err(self.span_fatal_err(self.span, Error::InclusiveRangeWithNoEnd))
+ } else {
+ Ok(ExprKind::Range(start, end, limits))
+ }
+ }
+
+ fn mk_assign_op(&mut self, binop: ast::BinOp,
+ lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
+ ExprKind::AssignOp(binop, lhs, rhs)
+ }
+
+ pub fn mk_mac_expr(&mut self, span: Span, m: Mac_, attrs: ThinVec<Attribute>) -> P<Expr> {
+ P(Expr {
+ id: ast::DUMMY_NODE_ID,
+ node: ExprKind::Mac(source_map::Spanned {node: m, span: span}),
+ span,
+ attrs,
+ })
+ }
+
+ fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> {
+ let delim = match self.token {
+ token::OpenDelim(delim) => delim,
+ _ => {
+ let msg = "expected open delimiter";
+ let mut err = self.fatal(msg);
+ err.span_label(self.span, msg);
+ return Err(err)
+ }
+ };
+ let tts = match self.parse_token_tree() {
+ TokenTree::Delimited(_, _, tts) => tts,
+ _ => unreachable!(),
+ };
+ let delim = match delim {
+ token::Paren => MacDelimiter::Parenthesis,
+ token::Bracket => MacDelimiter::Bracket,
+ token::Brace => MacDelimiter::Brace,
+ token::NoDelim => self.bug("unexpected no delimiter"),
+ };
+ Ok((delim, tts.into()))
+ }
+
+ /// At the bottom (top?) of the precedence hierarchy,
+ /// Parses things like parenthesized exprs, macros, `return`, etc.
+ ///
+ /// N.B., this does not parse outer attributes, and is private because it only works
+ /// correctly if called from `parse_dot_or_call_expr()`.
+ fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
+ maybe_whole_expr!(self);
+
+ // Outer attributes are already parsed and will be
+ // added to the return value after the fact.
+ //
+ // Therefore, prevent sub-parser from parsing
+ // attributes by giving them a empty "already parsed" list.
+ let mut attrs = ThinVec::new();
+
+ let lo = self.span;
+ let mut hi = self.span;
+
+ let ex: ExprKind;
+
+ // Note: when adding new syntax here, don't forget to adjust Token::can_begin_expr().
+ match self.token {
+ token::OpenDelim(token::Paren) => {
+ self.bump();
+
+ attrs.extend(self.parse_inner_attributes()?);
+
+ // (e) is parenthesized e
+ // (e,) is a tuple with only one field, e
+ let mut es = vec![];
+ let mut trailing_comma = false;
+ let mut recovered = false;
+ while self.token != token::CloseDelim(token::Paren) {
+ es.push(self.parse_expr()?);
+ recovered = self.expect_one_of(
+ &[],
+ &[token::Comma, token::CloseDelim(token::Paren)],
+ )?;
+ if self.eat(&token::Comma) {
+ trailing_comma = true;
+ } else {
+ trailing_comma = false;
+ break;
+ }
+ }
+ if !recovered {
+ self.bump();
+ }
+
+ hi = self.prev_span;
+ ex = if es.len() == 1 && !trailing_comma {
+ ExprKind::Paren(es.into_iter().nth(0).unwrap())
+ } else {
+ ExprKind::Tup(es)
+ };
+ }
+ token::OpenDelim(token::Brace) => {
+ return self.parse_block_expr(None, lo, BlockCheckMode::Default, attrs);
+ }
+ token::BinOp(token::Or) | token::OrOr => {
+ return self.parse_lambda_expr(attrs);
+ }
+ token::OpenDelim(token::Bracket) => {
+ self.bump();
+
+ attrs.extend(self.parse_inner_attributes()?);
+
+ if self.eat(&token::CloseDelim(token::Bracket)) {
+ // Empty vector.
+ ex = ExprKind::Array(Vec::new());
+ } else {
+ // Nonempty vector.
+ let first_expr = self.parse_expr()?;
+ if self.eat(&token::Semi) {
+ // Repeating array syntax: [ 0; 512 ]
+ let count = AnonConst {
+ id: ast::DUMMY_NODE_ID,
+ value: self.parse_expr()?,
+ };
+ self.expect(&token::CloseDelim(token::Bracket))?;
+ ex = ExprKind::Repeat(first_expr, count);
+ } else if self.eat(&token::Comma) {
+ // Vector with two or more elements.
+ let remaining_exprs = self.parse_seq_to_end(
+ &token::CloseDelim(token::Bracket),
+ SeqSep::trailing_allowed(token::Comma),
+ |p| Ok(p.parse_expr()?)
+ )?;
+ let mut exprs = vec![first_expr];
+ exprs.extend(remaining_exprs);
+ ex = ExprKind::Array(exprs);
+ } else {
+ // Vector with one element.
+ self.expect(&token::CloseDelim(token::Bracket))?;
+ ex = ExprKind::Array(vec![first_expr]);
+ }
+ }
+ hi = self.prev_span;
+ }
+ _ => {
+ if self.eat_lt() {
+ let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
+ hi = path.span;
+ return Ok(self.mk_expr(lo.to(hi), ExprKind::Path(Some(qself), path), attrs));
+ }
+ if self.span.rust_2018() && self.check_keyword(keywords::Async)
+ {
+ if self.is_async_block() { // check for `async {` and `async move {`
+ return self.parse_async_block(attrs);
+ } else {
+ return self.parse_lambda_expr(attrs);
+ }
+ }
+ if self.check_keyword(keywords::Move) || self.check_keyword(keywords::Static) {
+ return self.parse_lambda_expr(attrs);
+ }
+ if self.eat_keyword(keywords::If) {
+ return self.parse_if_expr(attrs);
+ }
+ if self.eat_keyword(keywords::For) {
+ let lo = self.prev_span;
+ return self.parse_for_expr(None, lo, attrs);
+ }
+ if self.eat_keyword(keywords::While) {
+ let lo = self.prev_span;
+ return self.parse_while_expr(None, lo, attrs);
+ }
+ if let Some(label) = self.eat_label() {
+ let lo = label.ident.span;
+ self.expect(&token::Colon)?;
+ if self.eat_keyword(keywords::While) {
+ return self.parse_while_expr(Some(label), lo, attrs)
+ }
+ if self.eat_keyword(keywords::For) {
+ return self.parse_for_expr(Some(label), lo, attrs)
+ }
+ if self.eat_keyword(keywords::Loop) {
+ return self.parse_loop_expr(Some(label), lo, attrs)
+ }
+ if self.token == token::OpenDelim(token::Brace) {
+ return self.parse_block_expr(Some(label),
+ lo,
+ BlockCheckMode::Default,
+ attrs);
+ }
+ let msg = "expected `while`, `for`, `loop` or `{` after a label";
+ let mut err = self.fatal(msg);
+ err.span_label(self.span, msg);
+ return Err(err);
+ }
+ if self.eat_keyword(keywords::Loop) {
+ let lo = self.prev_span;
+ return self.parse_loop_expr(None, lo, attrs);
+ }
+ if self.eat_keyword(keywords::Continue) {
+ let label = self.eat_label();
+ let ex = ExprKind::Continue(label);
+ let hi = self.prev_span;
+ return Ok(self.mk_expr(lo.to(hi), ex, attrs));
+ }
+ if self.eat_keyword(keywords::Match) {
+ let match_sp = self.prev_span;
+ return self.parse_match_expr(attrs).map_err(|mut err| {
+ err.span_label(match_sp, "while parsing this match expression");
+ err
+ });
+ }
+ if self.eat_keyword(keywords::Unsafe) {
+ return self.parse_block_expr(
+ None,
+ lo,
+ BlockCheckMode::Unsafe(ast::UserProvided),
+ attrs);
+ }
+ if self.is_do_catch_block() {
+ let mut db = self.fatal("found removed `do catch` syntax");
+ db.help("Following RFC #2388, the new non-placeholder syntax is `try`");
+ return Err(db);
+ }
+ if self.is_try_block() {
+ let lo = self.span;
+ assert!(self.eat_keyword(keywords::Try));
+ return self.parse_try_block(lo, attrs);
+ }
+ if self.eat_keyword(keywords::Return) {
+ if self.token.can_begin_expr() {
+ let e = self.parse_expr()?;
+ hi = e.span;
+ ex = ExprKind::Ret(Some(e));
+ } else {
+ ex = ExprKind::Ret(None);
+ }
+ } else if self.eat_keyword(keywords::Break) {
+ let label = self.eat_label();
+ let e = if self.token.can_begin_expr()
+ && !(self.token == token::OpenDelim(token::Brace)
+ && self.restrictions.contains(
+ Restrictions::NO_STRUCT_LITERAL)) {
+ Some(self.parse_expr()?)
+ } else {
+ None
+ };
+ ex = ExprKind::Break(label, e);
+ hi = self.prev_span;
+ } else if self.eat_keyword(keywords::Yield) {
+ if self.token.can_begin_expr() {
+ let e = self.parse_expr()?;
+ hi = e.span;
+ ex = ExprKind::Yield(Some(e));
+ } else {
+ ex = ExprKind::Yield(None);
+ }
+ } else if self.token.is_keyword(keywords::Let) {
+ // Catch this syntax error here, instead of in `parse_ident`, so
+ // that we can explicitly mention that let is not to be used as an expression
+ let mut db = self.fatal("expected expression, found statement (`let`)");
+ db.span_label(self.span, "expected expression");
+ db.note("variable declaration using `let` is a statement");
+ return Err(db);
+ } else if self.token.is_path_start() {
+ let pth = self.parse_path(PathStyle::Expr)?;
+
+ // `!`, as an operator, is prefix, so we know this isn't that
+ if self.eat(&token::Not) {
+ // MACRO INVOCATION expression
+ let (delim, tts) = self.expect_delimited_token_tree()?;
+ let hi = self.prev_span;
+ let node = Mac_ { path: pth, tts, delim };
+ return Ok(self.mk_mac_expr(lo.to(hi), node, attrs))
+ }
+ if self.check(&token::OpenDelim(token::Brace)) {
+ // This is a struct literal, unless we're prohibited
+ // from parsing struct literals here.
+ let prohibited = self.restrictions.contains(
+ Restrictions::NO_STRUCT_LITERAL
+ );
+ if !prohibited {
+ return self.parse_struct_expr(lo, pth, attrs);
+ }
+ }
+
+ hi = pth.span;
+ ex = ExprKind::Path(None, pth);
+ } else {
+ if !self.unclosed_delims.is_empty() && self.check(&token::Semi) {
+ // Don't complain about bare semicolons after unclosed braces
+ // recovery in order to keep the error count down. Fixing the
+ // delimiters will possibly also fix the bare semicolon found in
+ // expression context. For example, silence the following error:
+ // ```
+ // error: expected expression, found `;`
+ // --> file.rs:2:13
+ // |
+ // 2 | foo(bar(;
+ // | ^ expected expression
+ // ```
+ self.bump();
+ return Ok(self.mk_expr(self.span, ExprKind::Err, ThinVec::new()));
+ }
+ match self.parse_literal_maybe_minus() {
+ Ok(expr) => {
+ hi = expr.span;
+ ex = expr.node.clone();
+ }
+ Err(mut err) => {
+ self.cancel(&mut err);
+ let msg = format!("expected expression, found {}",
+ self.this_token_descr());
+ let mut err = self.fatal(&msg);
+ err.span_label(self.span, "expected expression");
+ return Err(err);
+ }
+ }
+ }
+ }
+ }
+
+ let expr = Expr { node: ex, span: lo.to(hi), id: ast::DUMMY_NODE_ID, attrs };
+ let expr = self.maybe_recover_from_bad_qpath(expr, true)?;
+
+ return Ok(P(expr));
+ }
+
+ fn parse_struct_expr(&mut self, lo: Span, pth: ast::Path, mut attrs: ThinVec<Attribute>)
+ -> PResult<'a, P<Expr>> {
+ let struct_sp = lo.to(self.prev_span);
+ self.bump();
+ let mut fields = Vec::new();
+ let mut base = None;
+
+ attrs.extend(self.parse_inner_attributes()?);
+
+ while self.token != token::CloseDelim(token::Brace) {
+ if self.eat(&token::DotDot) {
+ let exp_span = self.prev_span;
+ match self.parse_expr() {
+ Ok(e) => {
+ base = Some(e);
+ }
+ Err(mut e) => {
+ e.emit();
+ self.recover_stmt();
+ }
+ }
+ if self.token == token::Comma {
+ let mut err = self.sess.span_diagnostic.mut_span_err(
+ exp_span.to(self.prev_span),
+ "cannot use a comma after the base struct",
+ );
+ err.span_suggestion_short(
+ self.span,
+ "remove this comma",
+ String::new(),
+ Applicability::MachineApplicable
+ );
+ err.note("the base struct must always be the last field");
+ err.emit();
+ self.recover_stmt();
+ }
+ break;
+ }
+
+ let mut recovery_field = None;
+ if let token::Ident(ident, _) = self.token {
+ if !self.token.is_reserved_ident() && self.look_ahead(1, |t| *t == token::Colon) {
+ // Use in case of error after field-looking code: `S { foo: () with a }`
+ let mut ident = ident.clone();
+ ident.span = self.span;
+ recovery_field = Some(ast::Field {
+ ident,
+ span: self.span,
+ expr: self.mk_expr(self.span, ExprKind::Err, ThinVec::new()),
+ is_shorthand: false,
+ attrs: ThinVec::new(),
+ });
+ }
+ }
+ let mut parsed_field = None;
+ match self.parse_field() {
+ Ok(f) => parsed_field = Some(f),
+ Err(mut e) => {
+ e.span_label(struct_sp, "while parsing this struct");
+ e.emit();
+
+ // If the next token is a comma, then try to parse
+ // what comes next as additional fields, rather than
+ // bailing out until next `}`.
+ if self.token != token::Comma {
+ self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore);
+ if self.token != token::Comma {
+ break;
+ }
+ }
+ }
+ }
+
+ match self.expect_one_of(&[token::Comma],
+ &[token::CloseDelim(token::Brace)]) {
+ Ok(_) => if let Some(f) = parsed_field.or(recovery_field) {
+ // only include the field if there's no parse error for the field name
+ fields.push(f);
+ }
+ Err(mut e) => {
+ if let Some(f) = recovery_field {
+ fields.push(f);
+ }
+ e.span_label(struct_sp, "while parsing this struct");
+ e.emit();
+ self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore);
+ self.eat(&token::Comma);
+ }
+ }
+ }
+
+ let span = lo.to(self.span);
+ self.expect(&token::CloseDelim(token::Brace))?;
+ return Ok(self.mk_expr(span, ExprKind::Struct(pth, fields, base), attrs));
+ }
+
+ fn parse_or_use_outer_attributes(&mut self,
+ already_parsed_attrs: Option<ThinVec<Attribute>>)
+ -> PResult<'a, ThinVec<Attribute>> {
+ if let Some(attrs) = already_parsed_attrs {
+ Ok(attrs)
+ } else {
+ self.parse_outer_attributes().map(|a| a.into())
+ }
+ }
+
+ /// Parses a block or unsafe block.
+ fn parse_block_expr(&mut self, opt_label: Option<Label>,
+ lo: Span, blk_mode: BlockCheckMode,
+ outer_attrs: ThinVec<Attribute>)
+ -> PResult<'a, P<Expr>> {
+ self.expect(&token::OpenDelim(token::Brace))?;
+
+ let mut attrs = outer_attrs;
+ attrs.extend(self.parse_inner_attributes()?);
+
+ let blk = self.parse_block_tail(lo, blk_mode)?;
+ return Ok(self.mk_expr(blk.span, ExprKind::Block(blk, opt_label), attrs));
+ }
+
+ /// Parses `a.b` or `a(13)` or `a[4]` or just `a`.
+ fn parse_dot_or_call_expr(&mut self,
+ already_parsed_attrs: Option<ThinVec<Attribute>>)
+ -> PResult<'a, P<Expr>> {
+ let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
+
+ let b = self.parse_bottom_expr();
+ let (span, b) = self.interpolated_or_expr_span(b)?;
+ self.parse_dot_or_call_expr_with(b, span, attrs)
+ }
+
+ fn parse_dot_or_call_expr_with(&mut self,
+ e0: P<Expr>,
+ lo: Span,
+ mut attrs: ThinVec<Attribute>)
+ -> PResult<'a, P<Expr>> {
+ // Stitch the list of outer attributes onto the return value.
+ // A little bit ugly, but the best way given the current code
+ // structure
+ self.parse_dot_or_call_expr_with_(e0, lo)
+ .map(|expr|
+ expr.map(|mut expr| {
+ attrs.extend::<Vec<_>>(expr.attrs.into());
+ expr.attrs = attrs;
+ match expr.node {
+ ExprKind::If(..) | ExprKind::IfLet(..) => {
+ if !expr.attrs.is_empty() {
+ // Just point to the first attribute in there...
+ let span = expr.attrs[0].span;
+
+ self.span_err(span,
+ "attributes are not yet allowed on `if` \
+ expressions");
+ }
+ }
+ _ => {}
+ }
+ expr
+ })
+ )
+ }
+
+ // Assuming we have just parsed `.`, continue parsing into an expression.
+ fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
+ let segment = self.parse_path_segment(PathStyle::Expr, true)?;
+ self.check_trailing_angle_brackets(&segment, token::OpenDelim(token::Paren));
+
+ Ok(match self.token {
+ token::OpenDelim(token::Paren) => {
+ // Method call `expr.f()`
+ let mut args = self.parse_unspanned_seq(
+ &token::OpenDelim(token::Paren),
+ &token::CloseDelim(token::Paren),
+ SeqSep::trailing_allowed(token::Comma),
+ |p| Ok(p.parse_expr()?)
+ )?;
+ args.insert(0, self_arg);
+
+ let span = lo.to(self.prev_span);
+ self.mk_expr(span, ExprKind::MethodCall(segment, args), ThinVec::new())
+ }
+ _ => {
+ // Field access `expr.f`
+ if let Some(args) = segment.args {
+ self.span_err(args.span(),
+ "field expressions may not have generic arguments");
+ }
+
+ let span = lo.to(self.prev_span);
+ self.mk_expr(span, ExprKind::Field(self_arg, segment.ident), ThinVec::new())
+ }
+ })
+ }
+
+ /// This function checks if there are trailing angle brackets and produces
+ /// a diagnostic to suggest removing them.
+ ///
+ /// ```ignore (diagnostic)
+ /// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>();
+ /// ^^ help: remove extra angle brackets
+ /// ```
+ fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: token::Token) {
+ // This function is intended to be invoked after parsing a path segment where there are two
+ // cases:
+ //
+ // 1. A specific token is expected after the path segment.
+ // eg. `x.foo(`, `x.foo::<u32>(` (parenthesis - method call),
+ // `Foo::`, or `Foo::<Bar>::` (mod sep - continued path).
+ // 2. No specific token is expected after the path segment.
+ // eg. `x.foo` (field access)
+ //
+ // This function is called after parsing `.foo` and before parsing the token `end` (if
+ // present). This includes any angle bracket arguments, such as `.foo::<u32>` or
+ // `Foo::<Bar>`.
+
+ // We only care about trailing angle brackets if we previously parsed angle bracket
+ // arguments. This helps stop us incorrectly suggesting that extra angle brackets be
+ // removed in this case:
+ //
+ // `x.foo >> (3)` (where `x.foo` is a `u32` for example)
+ //
+ // This case is particularly tricky as we won't notice it just looking at the tokens -
+ // it will appear the same (in terms of upcoming tokens) as below (since the `::<u32>` will
+ // have already been parsed):
+ //
+ // `x.foo::<u32>>>(3)`
+ let parsed_angle_bracket_args = segment.args
+ .as_ref()
+ .map(|args| args.is_angle_bracketed())
+ .unwrap_or(false);
+
+ debug!(
+ "check_trailing_angle_brackets: parsed_angle_bracket_args={:?}",
+ parsed_angle_bracket_args,
+ );
+ if !parsed_angle_bracket_args {
+ return;
+ }
+
+ // Keep the span at the start so we can highlight the sequence of `>` characters to be
+ // removed.
+ let lo = self.span;
+
+ // We need to look-ahead to see if we have `>` characters without moving the cursor forward
+ // (since we might have the field access case and the characters we're eating are
+ // actual operators and not trailing characters - ie `x.foo >> 3`).
+ let mut position = 0;
+
+ // We can encounter `>` or `>>` tokens in any order, so we need to keep track of how
+ // many of each (so we can correctly pluralize our error messages) and continue to
+ // advance.
+ let mut number_of_shr = 0;
+ let mut number_of_gt = 0;
+ while self.look_ahead(position, |t| {
+ trace!("check_trailing_angle_brackets: t={:?}", t);
+ if *t == token::BinOp(token::BinOpToken::Shr) {
+ number_of_shr += 1;
+ true
+ } else if *t == token::Gt {
+ number_of_gt += 1;
+ true
+ } else {
+ false
+ }
+ }) {
+ position += 1;
+ }
+
+ // If we didn't find any trailing `>` characters, then we have nothing to error about.
+ debug!(
+ "check_trailing_angle_brackets: number_of_gt={:?} number_of_shr={:?}",
+ number_of_gt, number_of_shr,
+ );
+ if number_of_gt < 1 && number_of_shr < 1 {
+ return;
+ }
+
+ // Finally, double check that we have our end token as otherwise this is the
+ // second case.
+ if self.look_ahead(position, |t| {
+ trace!("check_trailing_angle_brackets: t={:?}", t);
+ *t == end
+ }) {
+ // Eat from where we started until the end token so that parsing can continue
+ // as if we didn't have those extra angle brackets.
+ self.eat_to_tokens(&[&end]);
+ let span = lo.until(self.span);
+
+ let plural = number_of_gt > 1 || number_of_shr >= 1;
+ self.diagnostic()
+ .struct_span_err(
+ span,
+ &format!("unmatched angle bracket{}", if plural { "s" } else { "" }),
+ )
+ .span_suggestion(
+ span,
+ &format!("remove extra angle bracket{}", if plural { "s" } else { "" }),
+ String::new(),
+ Applicability::MachineApplicable,
+ )
+ .emit();
+ }
+ }
+
+ fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
+ let mut e = e0;
+ let mut hi;
+ loop {
+ // expr?
+ while self.eat(&token::Question) {
+ let hi = self.prev_span;
+ e = self.mk_expr(lo.to(hi), ExprKind::Try(e), ThinVec::new());
+ }
+
+ // expr.f
+ if self.eat(&token::Dot) {
+ match self.token {
+ token::Ident(..) => {
+ e = self.parse_dot_suffix(e, lo)?;
+ }
+ token::Literal(token::Integer(name), _) => {
+ let span = self.span;
+ self.bump();
+ let field = ExprKind::Field(e, Ident::new(name, span));
+ e = self.mk_expr(lo.to(span), field, ThinVec::new());
+ }
+ token::Literal(token::Float(n), _suf) => {
+ self.bump();
+ let fstr = n.as_str();
+ let mut err = self.diagnostic()
+ .struct_span_err(self.prev_span, &format!("unexpected token: `{}`", n));
+ err.span_label(self.prev_span, "unexpected token");
+ if fstr.chars().all(|x| "0123456789.".contains(x)) {
+ let float = match fstr.parse::<f64>().ok() {
+ Some(f) => f,
+ None => continue,
+ };
+ let sugg = pprust::to_string(|s| {
+ use crate::print::pprust::PrintState;
+ s.popen()?;
+ s.print_expr(&e)?;
+ s.s.word( ".")?;
+ s.print_usize(float.trunc() as usize)?;
+ s.pclose()?;
+ s.s.word(".")?;
+ s.s.word(fstr.splitn(2, ".").last().unwrap().to_string())
+ });
+ err.span_suggestion(
+ lo.to(self.prev_span),
+ "try parenthesizing the first index",
+ sugg,
+ Applicability::MachineApplicable
+ );
+ }
+ return Err(err);
+
+ }
+ _ => {
+ // FIXME Could factor this out into non_fatal_unexpected or something.
+ let actual = self.this_token_to_string();
+ self.span_err(self.span, &format!("unexpected token: `{}`", actual));
+ }
+ }
+ continue;
+ }
+ if self.expr_is_complete(&e) { break; }
+ match self.token {
+ // expr(...)
+ token::OpenDelim(token::Paren) => {
+ let es = self.parse_unspanned_seq(
+ &token::OpenDelim(token::Paren),
+ &token::CloseDelim(token::Paren),
+ SeqSep::trailing_allowed(token::Comma),
+ |p| Ok(p.parse_expr()?)
+ )?;
+ hi = self.prev_span;
+
+ let nd = self.mk_call(e, es);
+ e = self.mk_expr(lo.to(hi), nd, ThinVec::new());
+ }
+
+ // expr[...]
+ // Could be either an index expression or a slicing expression.
+ token::OpenDelim(token::Bracket) => {
+ self.bump();
+ let ix = self.parse_expr()?;
+ hi = self.span;
+ self.expect(&token::CloseDelim(token::Bracket))?;
+ let index = self.mk_index(e, ix);
+ e = self.mk_expr(lo.to(hi), index, ThinVec::new())
+ }
+ _ => return Ok(e)
+ }
+ }
+ return Ok(e);
+ }
+
+ crate fn process_potential_macro_variable(&mut self) {
+ let (token, span) = match self.token {
+ token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() &&
+ self.look_ahead(1, |t| t.is_ident()) => {
+ self.bump();
+ let name = match self.token {
+ token::Ident(ident, _) => ident,
+ _ => unreachable!()
+ };
+ let mut err = self.fatal(&format!("unknown macro variable `{}`", name));
+ err.span_label(self.span, "unknown macro variable");
+ err.emit();
+ self.bump();
+ return
+ }
+ token::Interpolated(ref nt) => {
+ self.meta_var_span = Some(self.span);
+ // Interpolated identifier and lifetime tokens are replaced with usual identifier
+ // and lifetime tokens, so the former are never encountered during normal parsing.
+ match **nt {
+ token::NtIdent(ident, is_raw) => (token::Ident(ident, is_raw), ident.span),
+ token::NtLifetime(ident) => (token::Lifetime(ident), ident.span),
+ _ => return,
+ }
+ }
+ _ => return,
+ };
+ self.token = token;
+ self.span = span;
+ }
+
+ /// Parses a single token tree from the input.
+ crate fn parse_token_tree(&mut self) -> TokenTree {
+ match self.token {
+ token::OpenDelim(..) => {
+ let frame = mem::replace(&mut self.token_cursor.frame,
+ self.token_cursor.stack.pop().unwrap());
+ self.span = frame.span.entire();
+ self.bump();
+ TokenTree::Delimited(
+ frame.span,
+ frame.delim,
+ frame.tree_cursor.stream.into(),
+ )
+ },
+ token::CloseDelim(_) | token::Eof => unreachable!(),
+ _ => {
+ let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span);
+ self.bump();
+ TokenTree::Token(span, token)
+ }
+ }
+ }
+
+ // parse a stream of tokens into a list of TokenTree's,
+ // up to EOF.
+ pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
+ let mut tts = Vec::new();
+ while self.token != token::Eof {
+ tts.push(self.parse_token_tree());
+ }
+ Ok(tts)
+ }
+
+ pub fn parse_tokens(&mut self) -> TokenStream {
+ let mut result = Vec::new();
+ loop {
+ match self.token {
+ token::Eof | token::CloseDelim(..) => break,
+ _ => result.push(self.parse_token_tree().into()),
+ }
+ }
+ TokenStream::new(result)
+ }
+
+ /// Parse a prefix-unary-operator expr
+ fn parse_prefix_expr(&mut self,
+ already_parsed_attrs: Option<ThinVec<Attribute>>)
+ -> PResult<'a, P<Expr>> {
+ let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
+ let lo = self.span;
+ // Note: when adding new unary operators, don't forget to adjust Token::can_begin_expr()
+ let (hi, ex) = match self.token {
+ token::Not => {
+ self.bump();
+ let e = self.parse_prefix_expr(None);
+ let (span, e) = self.interpolated_or_expr_span(e)?;
+ (lo.to(span), self.mk_unary(UnOp::Not, e))
+ }
+ // Suggest `!` for bitwise negation when encountering a `~`
+ token::Tilde => {
+ self.bump();
+ let e = self.parse_prefix_expr(None);
+ let (span, e) = self.interpolated_or_expr_span(e)?;
+ let span_of_tilde = lo;
+ let mut err = self.diagnostic()
+ .struct_span_err(span_of_tilde, "`~` cannot be used as a unary operator");
+ err.span_suggestion_short(
+ span_of_tilde,
+ "use `!` to perform bitwise negation",
+ "!".to_owned(),
+ Applicability::MachineApplicable
+ );
+ err.emit();
+ (lo.to(span), self.mk_unary(UnOp::Not, e))
+ }
+ token::BinOp(token::Minus) => {
+ self.bump();
+ let e = self.parse_prefix_expr(None);
+ let (span, e) = self.interpolated_or_expr_span(e)?;
+ (lo.to(span), self.mk_unary(UnOp::Neg, e))
+ }
+ token::BinOp(token::Star) => {
+ self.bump();
+ let e = self.parse_prefix_expr(None);
+ let (span, e) = self.interpolated_or_expr_span(e)?;
+ (lo.to(span), self.mk_unary(UnOp::Deref, e))
+ }
+ token::BinOp(token::And) | token::AndAnd => {
+ self.expect_and()?;
+ let m = self.parse_mutability();
+ let e = self.parse_prefix_expr(None);
+ let (span, e) = self.interpolated_or_expr_span(e)?;
+ (lo.to(span), ExprKind::AddrOf(m, e))
+ }
+ token::Ident(..) if self.token.is_keyword(keywords::In) => {
+ self.bump();
+ let place = self.parse_expr_res(
+ Restrictions::NO_STRUCT_LITERAL,
+ None,
+ )?;
+ let blk = self.parse_block()?;
+ let span = blk.span;
+ let blk_expr = self.mk_expr(span, ExprKind::Block(blk, None), ThinVec::new());
+ (lo.to(span), ExprKind::ObsoleteInPlace(place, blk_expr))
+ }
+ token::Ident(..) if self.token.is_keyword(keywords::Box) => {
+ self.bump();
+ let e = self.parse_prefix_expr(None);
+ let (span, e) = self.interpolated_or_expr_span(e)?;
+ (lo.to(span), ExprKind::Box(e))
+ }
+ token::Ident(..) if self.token.is_ident_named("not") => {
+ // `not` is just an ordinary identifier in Rust-the-language,
+ // but as `rustc`-the-compiler, we can issue clever diagnostics
+ // for confused users who really want to say `!`
+ let token_cannot_continue_expr = |t: &token::Token| match *t {
+ // These tokens can start an expression after `!`, but
+ // can't continue an expression after an ident
+ token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw),
+ token::Literal(..) | token::Pound => true,
+ token::Interpolated(ref nt) => match **nt {
+ token::NtIdent(..) | token::NtExpr(..) |
+ token::NtBlock(..) | token::NtPath(..) => true,
+ _ => false,
+ },
+ _ => false
+ };
+ let cannot_continue_expr = self.look_ahead(1, token_cannot_continue_expr);
+ if cannot_continue_expr {
+ self.bump();
+ // Emit the error ...
+ let mut err = self.diagnostic()
+ .struct_span_err(self.span,
+ &format!("unexpected {} after identifier",
+ self.this_token_descr()));
+ // span the `not` plus trailing whitespace to avoid
+ // trailing whitespace after the `!` in our suggestion
+ let to_replace = self.sess.source_map()
+ .span_until_non_whitespace(lo.to(self.span));
+ err.span_suggestion_short(
+ to_replace,
+ "use `!` to perform logical negation",
+ "!".to_owned(),
+ Applicability::MachineApplicable
+ );
+ err.emit();
+ // —and recover! (just as if we were in the block
+ // for the `token::Not` arm)
+ let e = self.parse_prefix_expr(None);
+ let (span, e) = self.interpolated_or_expr_span(e)?;
+ (lo.to(span), self.mk_unary(UnOp::Not, e))
+ } else {
+ return self.parse_dot_or_call_expr(Some(attrs));
+ }
+ }
+ _ => { return self.parse_dot_or_call_expr(Some(attrs)); }
+ };
+ return Ok(self.mk_expr(lo.to(hi), ex, attrs));
+ }
+
+ /// Parses an associative expression.
+ ///
+ /// This parses an expression accounting for associativity and precedence of the operators in
+ /// the expression.
+ #[inline]
+ fn parse_assoc_expr(&mut self,
+ already_parsed_attrs: Option<ThinVec<Attribute>>)
+ -> PResult<'a, P<Expr>> {
+ self.parse_assoc_expr_with(0, already_parsed_attrs.into())
+ }
+
+ /// Parses an associative expression with operators of at least `min_prec` precedence.
+ fn parse_assoc_expr_with(&mut self,
+ min_prec: usize,
+ lhs: LhsExpr)
+ -> PResult<'a, P<Expr>> {
+ let mut lhs = if let LhsExpr::AlreadyParsed(expr) = lhs {
+ expr
+ } else {
+ let attrs = match lhs {
+ LhsExpr::AttributesParsed(attrs) => Some(attrs),
+ _ => None,
+ };
+ if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token) {
+ return self.parse_prefix_range_expr(attrs);
+ } else {
+ self.parse_prefix_expr(attrs)?
+ }
+ };
+
+ if self.expr_is_complete(&lhs) {
+ // Semi-statement forms are odd. See https://github.com/rust-lang/rust/issues/29071
+ return Ok(lhs);
+ }
+ self.expected_tokens.push(TokenType::Operator);
+ while let Some(op) = AssocOp::from_token(&self.token) {
+
+ // Adjust the span for interpolated LHS to point to the `$lhs` token and not to what
+ // it refers to. Interpolated identifiers are unwrapped early and never show up here
+ // as `PrevTokenKind::Interpolated` so if LHS is a single identifier we always process
+ // it as "interpolated", it doesn't change the answer for non-interpolated idents.
+ let lhs_span = match (self.prev_token_kind, &lhs.node) {
+ (PrevTokenKind::Interpolated, _) => self.prev_span,
+ (PrevTokenKind::Ident, &ExprKind::Path(None, ref path))
+ if path.segments.len() == 1 => self.prev_span,
+ _ => lhs.span,
+ };
+
+ let cur_op_span = self.span;
+ let restrictions = if op.is_assign_like() {
+ self.restrictions & Restrictions::NO_STRUCT_LITERAL
+ } else {
+ self.restrictions
+ };
+ if op.precedence() < min_prec {
+ break;
+ }
+ // Check for deprecated `...` syntax
+ if self.token == token::DotDotDot && op == AssocOp::DotDotEq {
+ self.err_dotdotdot_syntax(self.span);
+ }
+
+ self.bump();
+ if op.is_comparison() {
+ self.check_no_chained_comparison(&lhs, &op);
+ }
+ // Special cases:
+ if op == AssocOp::As {
+ lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Cast)?;
+ continue
+ } else if op == AssocOp::Colon {
+ lhs = match self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Type) {
+ Ok(lhs) => lhs,
+ Err(mut err) => {
+ err.span_label(self.span,
+ "expecting a type here because of type ascription");
+ let cm = self.sess.source_map();
+ let cur_pos = cm.lookup_char_pos(self.span.lo());
+ let op_pos = cm.lookup_char_pos(cur_op_span.hi());
+ if cur_pos.line != op_pos.line {
+ err.span_suggestion(
+ cur_op_span,
+ "try using a semicolon",
+ ";".to_string(),
+ Applicability::MaybeIncorrect // speculative
+ );
+ }
+ return Err(err);
+ }
+ };
+ continue
+ } else if op == AssocOp::DotDot || op == AssocOp::DotDotEq {
+ // If we didn’t have to handle `x..`/`x..=`, it would be pretty easy to
+ // generalise it to the Fixity::None code.
+ //
+ // We have 2 alternatives here: `x..y`/`x..=y` and `x..`/`x..=` The other
+ // two variants are handled with `parse_prefix_range_expr` call above.
+ let rhs = if self.is_at_start_of_range_notation_rhs() {
+ Some(self.parse_assoc_expr_with(op.precedence() + 1,
+ LhsExpr::NotYetParsed)?)
+ } else {
+ None
+ };
+ let (lhs_span, rhs_span) = (lhs.span, if let Some(ref x) = rhs {
+ x.span
+ } else {
+ cur_op_span
+ });
+ let limits = if op == AssocOp::DotDot {
+ RangeLimits::HalfOpen
+ } else {
+ RangeLimits::Closed
+ };
+
+ let r = self.mk_range(Some(lhs), rhs, limits)?;
+ lhs = self.mk_expr(lhs_span.to(rhs_span), r, ThinVec::new());
+ break
+ }
+
+ let rhs = match op.fixity() {
+ Fixity::Right => self.with_res(
+ restrictions - Restrictions::STMT_EXPR,
+ |this| {
+ this.parse_assoc_expr_with(op.precedence(),
+ LhsExpr::NotYetParsed)
+ }),
+ Fixity::Left => self.with_res(
+ restrictions - Restrictions::STMT_EXPR,
+ |this| {
+ this.parse_assoc_expr_with(op.precedence() + 1,
+ LhsExpr::NotYetParsed)
+ }),
+ // We currently have no non-associative operators that are not handled above by
+ // the special cases. The code is here only for future convenience.
+ Fixity::None => self.with_res(
+ restrictions - Restrictions::STMT_EXPR,
+ |this| {
+ this.parse_assoc_expr_with(op.precedence() + 1,
+ LhsExpr::NotYetParsed)
+ }),
+ }?;
+
+ // Make sure that the span of the parent node is larger than the span of lhs and rhs,
+ // including the attributes.
+ let lhs_span = lhs
+ .attrs
+ .iter()
+ .filter(|a| a.style == AttrStyle::Outer)
+ .next()
+ .map_or(lhs_span, |a| a.span);
+ let span = lhs_span.to(rhs.span);
+ lhs = match op {
+ AssocOp::Add | AssocOp::Subtract | AssocOp::Multiply | AssocOp::Divide |
+ AssocOp::Modulus | AssocOp::LAnd | AssocOp::LOr | AssocOp::BitXor |
+ AssocOp::BitAnd | AssocOp::BitOr | AssocOp::ShiftLeft | AssocOp::ShiftRight |
+ AssocOp::Equal | AssocOp::Less | AssocOp::LessEqual | AssocOp::NotEqual |
+ AssocOp::Greater | AssocOp::GreaterEqual => {
+ let ast_op = op.to_ast_binop().unwrap();
+ let binary = self.mk_binary(source_map::respan(cur_op_span, ast_op), lhs, rhs);
+ self.mk_expr(span, binary, ThinVec::new())
+ }
+ AssocOp::Assign =>
+ self.mk_expr(span, ExprKind::Assign(lhs, rhs), ThinVec::new()),
+ AssocOp::ObsoleteInPlace =>
+ self.mk_expr(span, ExprKind::ObsoleteInPlace(lhs, rhs), ThinVec::new()),
+ AssocOp::AssignOp(k) => {
+ let aop = match k {
+ token::Plus => BinOpKind::Add,
+ token::Minus => BinOpKind::Sub,
+ token::Star => BinOpKind::Mul,
+ token::Slash => BinOpKind::Div,
+ token::Percent => BinOpKind::Rem,
+ token::Caret => BinOpKind::BitXor,
+ token::And => BinOpKind::BitAnd,
+ token::Or => BinOpKind::BitOr,
+ token::Shl => BinOpKind::Shl,
+ token::Shr => BinOpKind::Shr,
+ };
+ let aopexpr = self.mk_assign_op(source_map::respan(cur_op_span, aop), lhs, rhs);
+ self.mk_expr(span, aopexpr, ThinVec::new())
+ }
+ AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotEq => {
+ self.bug("AssocOp should have been handled by special case")
+ }
+ };
+
+ if op.fixity() == Fixity::None { break }
+ }
+ Ok(lhs)
+ }
+
+ fn parse_assoc_op_cast(&mut self, lhs: P<Expr>, lhs_span: Span,
+ expr_kind: fn(P<Expr>, P<Ty>) -> ExprKind)
+ -> PResult<'a, P<Expr>> {
+ let mk_expr = |this: &mut Self, rhs: P<Ty>| {
+ this.mk_expr(lhs_span.to(rhs.span), expr_kind(lhs, rhs), ThinVec::new())
+ };
+
+ // Save the state of the parser before parsing type normally, in case there is a
+ // LessThan comparison after this cast.
+ let parser_snapshot_before_type = self.clone();
+ match self.parse_ty_no_plus() {
+ Ok(rhs) => {
+ Ok(mk_expr(self, rhs))
+ }
+ Err(mut type_err) => {
+ // Rewind to before attempting to parse the type with generics, to recover
+ // from situations like `x as usize < y` in which we first tried to parse
+ // `usize < y` as a type with generic arguments.
+ let parser_snapshot_after_type = self.clone();
+ mem::replace(self, parser_snapshot_before_type);
+
+ match self.parse_path(PathStyle::Expr) {
+ Ok(path) => {
+ let (op_noun, op_verb) = match self.token {
+ token::Lt => ("comparison", "comparing"),
+ token::BinOp(token::Shl) => ("shift", "shifting"),
+ _ => {
+ // We can end up here even without `<` being the next token, for
+ // example because `parse_ty_no_plus` returns `Err` on keywords,
+ // but `parse_path` returns `Ok` on them due to error recovery.
+ // Return original error and parser state.
+ mem::replace(self, parser_snapshot_after_type);
+ return Err(type_err);
+ }
+ };
+
+ // Successfully parsed the type path leaving a `<` yet to parse.
+ type_err.cancel();
+
+ // Report non-fatal diagnostics, keep `x as usize` as an expression
+ // in AST and continue parsing.
+ let msg = format!("`<` is interpreted as a start of generic \
+ arguments for `{}`, not a {}", path, op_noun);
+ let mut err = self.sess.span_diagnostic.struct_span_err(self.span, &msg);
+ err.span_label(self.look_ahead_span(1).to(parser_snapshot_after_type.span),
+ "interpreted as generic arguments");
+ err.span_label(self.span, format!("not interpreted as {}", op_noun));
+
+ let expr = mk_expr(self, P(Ty {
+ span: path.span,
+ node: TyKind::Path(None, path),
+ id: ast::DUMMY_NODE_ID
+ }));
+
+ let expr_str = self.sess.source_map().span_to_snippet(expr.span)
+ .unwrap_or_else(|_| pprust::expr_to_string(&expr));
+ err.span_suggestion(
+ expr.span,
+ &format!("try {} the cast value", op_verb),
+ format!("({})", expr_str),
+ Applicability::MachineApplicable
+ );
+ err.emit();
+
+ Ok(expr)
+ }
+ Err(mut path_err) => {
+ // Couldn't parse as a path, return original error and parser state.
+ path_err.cancel();
+ mem::replace(self, parser_snapshot_after_type);
+ Err(type_err)
+ }
+ }
+ }
+ }
+ }
+
+ /// Produce an error if comparison operators are chained (RFC #558).
+ /// We only need to check lhs, not rhs, because all comparison ops
+ /// have same precedence and are left-associative
+ fn check_no_chained_comparison(&mut self, lhs: &Expr, outer_op: &AssocOp) {
+ debug_assert!(outer_op.is_comparison(),
+ "check_no_chained_comparison: {:?} is not comparison",
+ outer_op);
+ match lhs.node {
+ ExprKind::Binary(op, _, _) if op.node.is_comparison() => {
+ // respan to include both operators
+ let op_span = op.span.to(self.span);
+ let mut err = self.diagnostic().struct_span_err(op_span,
+ "chained comparison operators require parentheses");
+ if op.node == BinOpKind::Lt &&
+ *outer_op == AssocOp::Less || // Include `<` to provide this recommendation
+ *outer_op == AssocOp::Greater // even in a case like the following:
+ { // Foo<Bar<Baz<Qux, ()>>>
+ err.help(
+ "use `::<...>` instead of `<...>` if you meant to specify type arguments");
+ err.help("or use `(...)` if you meant to specify fn arguments");
+ }
+ err.emit();
+ }
+ _ => {}
+ }
+ }
+
+ /// Parse prefix-forms of range notation: `..expr`, `..`, `..=expr`
+ fn parse_prefix_range_expr(&mut self,
+ already_parsed_attrs: Option<ThinVec<Attribute>>)
+ -> PResult<'a, P<Expr>> {
+ // Check for deprecated `...` syntax
+ if self.token == token::DotDotDot {
+ self.err_dotdotdot_syntax(self.span);
+ }
+
+ debug_assert!([token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token),
+ "parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq",
+ self.token);
+ let tok = self.token.clone();
+ let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
+ let lo = self.span;
+ let mut hi = self.span;
+ self.bump();
+ let opt_end = if self.is_at_start_of_range_notation_rhs() {
+ // RHS must be parsed with more associativity than the dots.
+ let next_prec = AssocOp::from_token(&tok).unwrap().precedence() + 1;
+ Some(self.parse_assoc_expr_with(next_prec,
+ LhsExpr::NotYetParsed)
+ .map(|x|{
+ hi = x.span;
+ x
+ })?)
+ } else {
+ None
+ };
+ let limits = if tok == token::DotDot {
+ RangeLimits::HalfOpen
+ } else {
+ RangeLimits::Closed
+ };
+
+ let r = self.mk_range(None, opt_end, limits)?;
+ Ok(self.mk_expr(lo.to(hi), r, attrs))
+ }
+
+ fn is_at_start_of_range_notation_rhs(&self) -> bool {
+ if self.token.can_begin_expr() {
+ // parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`.
+ if self.token == token::OpenDelim(token::Brace) {
+ return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
+ }
+ true
+ } else {
+ false
+ }
+ }
+
+ /// Parses an `if` or `if let` expression (`if` token already eaten).
+ fn parse_if_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
+ if self.check_keyword(keywords::Let) {
+ return self.parse_if_let_expr(attrs);
+ }
+ let lo = self.prev_span;
+ let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
+
+ // Verify that the parsed `if` condition makes sense as a condition. If it is a block, then
+ // verify that the last statement is either an implicit return (no `;`) or an explicit
+ // return. This won't catch blocks with an explicit `return`, but that would be caught by
+ // the dead code lint.
+ if self.eat_keyword(keywords::Else) || !cond.returns() {
+ let sp = self.sess.source_map().next_point(lo);
+ let mut err = self.diagnostic()
+ .struct_span_err(sp, "missing condition for `if` statemement");
+ err.span_label(sp, "expected if condition here");
+ return Err(err)
+ }
+ let not_block = self.token != token::OpenDelim(token::Brace);
+ let thn = self.parse_block().map_err(|mut err| {
+ if not_block {
+ err.span_label(lo, "this `if` statement has a condition, but no block");
+ }
+ err
+ })?;
+ let mut els: Option<P<Expr>> = None;
+ let mut hi = thn.span;
+ if self.eat_keyword(keywords::Else) {
+ let elexpr = self.parse_else_expr()?;
+ hi = elexpr.span;
+ els = Some(elexpr);
+ }
+ Ok(self.mk_expr(lo.to(hi), ExprKind::If(cond, thn, els), attrs))
+ }
+
+ /// Parses an `if let` expression (`if` token already eaten).
+ fn parse_if_let_expr(&mut self, attrs: ThinVec<Attribute>)
+ -> PResult<'a, P<Expr>> {
+ let lo = self.prev_span;
+ self.expect_keyword(keywords::Let)?;
+ let pats = self.parse_pats()?;
+ self.expect(&token::Eq)?;
+ let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
+ let thn = self.parse_block()?;
+ let (hi, els) = if self.eat_keyword(keywords::Else) {
+ let expr = self.parse_else_expr()?;
+ (expr.span, Some(expr))
+ } else {
+ (thn.span, None)
+ };
+ Ok(self.mk_expr(lo.to(hi), ExprKind::IfLet(pats, expr, thn, els), attrs))
+ }
+
+ /// Parses `move |args| expr`.
+ fn parse_lambda_expr(&mut self,
+ attrs: ThinVec<Attribute>)
+ -> PResult<'a, P<Expr>>
+ {
+ let lo = self.span;
+ let movability = if self.eat_keyword(keywords::Static) {
+ Movability::Static
+ } else {
+ Movability::Movable
+ };
+ let asyncness = if self.span.rust_2018() {
+ self.parse_asyncness()
+ } else {
+ IsAsync::NotAsync
+ };
+ let capture_clause = if self.eat_keyword(keywords::Move) {
+ CaptureBy::Value
+ } else {
+ CaptureBy::Ref
+ };
+ let decl = self.parse_fn_block_decl()?;
+ let decl_hi = self.prev_span;
+ let body = match decl.output {
+ FunctionRetTy::Default(_) => {
+ let restrictions = self.restrictions - Restrictions::STMT_EXPR;
+ self.parse_expr_res(restrictions, None)?
+ },
+ _ => {
+ // If an explicit return type is given, require a
+ // block to appear (RFC 968).
+ let body_lo = self.span;
+ self.parse_block_expr(None, body_lo, BlockCheckMode::Default, ThinVec::new())?
+ }
+ };
+
+ Ok(self.mk_expr(
+ lo.to(body.span),
+ ExprKind::Closure(capture_clause, asyncness, movability, decl, body, lo.to(decl_hi)),
+ attrs))
+ }
+
+ // `else` token already eaten
+ fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
+ if self.eat_keyword(keywords::If) {
+ return self.parse_if_expr(ThinVec::new());
+ } else {
+ let blk = self.parse_block()?;
+ return Ok(self.mk_expr(blk.span, ExprKind::Block(blk, None), ThinVec::new()));
+ }
+ }
+
+ /// Parse a 'for' .. 'in' expression ('for' token already eaten)
+ fn parse_for_expr(&mut self, opt_label: Option<Label>,
+ span_lo: Span,
+ mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
+ // Parse: `for <src_pat> in <src_expr> <src_loop_block>`
+
+ let pat = self.parse_top_level_pat()?;
+ if !self.eat_keyword(keywords::In) {
+ let in_span = self.prev_span.between(self.span);
+ let mut err = self.sess.span_diagnostic
+ .struct_span_err(in_span, "missing `in` in `for` loop");
+ err.span_suggestion_short(
+ in_span, "try adding `in` here", " in ".into(),
+ // has been misleading, at least in the past (closed Issue #48492)
+ Applicability::MaybeIncorrect
+ );
+ err.emit();
+ }
+ let in_span = self.prev_span;
+ if self.eat_keyword(keywords::In) {
+ // a common typo: `for _ in in bar {}`
+ let mut err = self.sess.span_diagnostic.struct_span_err(
+ self.prev_span,
+ "expected iterable, found keyword `in`",
+ );
+ err.span_suggestion_short(
+ in_span.until(self.prev_span),
+ "remove the duplicated `in`",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ err.note("if you meant to use emplacement syntax, it is obsolete (for now, anyway)");
+ err.note("for more information on the status of emplacement syntax, see <\
+ https://github.com/rust-lang/rust/issues/27779#issuecomment-378416911>");
+ err.emit();
+ }
+ let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
+ let (iattrs, loop_block) = self.parse_inner_attrs_and_block()?;
+ attrs.extend(iattrs);
+
+ let hi = self.prev_span;
+ Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_label), attrs))
+ }
+
+ /// Parses a `while` or `while let` expression (`while` token already eaten).
+ fn parse_while_expr(&mut self, opt_label: Option<Label>,
+ span_lo: Span,
+ mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
+ if self.token.is_keyword(keywords::Let) {
+ return self.parse_while_let_expr(opt_label, span_lo, attrs);
+ }
+ let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
+ let (iattrs, body) = self.parse_inner_attrs_and_block()?;
+ attrs.extend(iattrs);
+ let span = span_lo.to(body.span);
+ return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_label), attrs));
+ }
+
+ /// Parses a `while let` expression (`while` token already eaten).
+ fn parse_while_let_expr(&mut self, opt_label: Option<Label>,
+ span_lo: Span,
+ mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
+ self.expect_keyword(keywords::Let)?;
+ let pats = self.parse_pats()?;
+ self.expect(&token::Eq)?;
+ let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
+ let (iattrs, body) = self.parse_inner_attrs_and_block()?;
+ attrs.extend(iattrs);
+ let span = span_lo.to(body.span);
+ return Ok(self.mk_expr(span, ExprKind::WhileLet(pats, expr, body, opt_label), attrs));
+ }
+
+ // parse `loop {...}`, `loop` token already eaten
+ fn parse_loop_expr(&mut self, opt_label: Option<Label>,
+ span_lo: Span,
+ mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
+ let (iattrs, body) = self.parse_inner_attrs_and_block()?;
+ attrs.extend(iattrs);
+ let span = span_lo.to(body.span);
+ Ok(self.mk_expr(span, ExprKind::Loop(body, opt_label), attrs))
+ }
+
+ /// Parses an `async move {...}` expression.
+ pub fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>)
+ -> PResult<'a, P<Expr>>
+ {
+ let span_lo = self.span;
+ self.expect_keyword(keywords::Async)?;
+ let capture_clause = if self.eat_keyword(keywords::Move) {
+ CaptureBy::Value
+ } else {
+ CaptureBy::Ref
+ };
+ let (iattrs, body) = self.parse_inner_attrs_and_block()?;
+ attrs.extend(iattrs);
+ Ok(self.mk_expr(
+ span_lo.to(body.span),
+ ExprKind::Async(capture_clause, ast::DUMMY_NODE_ID, body), attrs))
+ }
+
+ /// Parses a `try {...}` expression (`try` token already eaten).
+ fn parse_try_block(&mut self, span_lo: Span, mut attrs: ThinVec<Attribute>)
+ -> PResult<'a, P<Expr>>
+ {
+ let (iattrs, body) = self.parse_inner_attrs_and_block()?;
+ attrs.extend(iattrs);
+ Ok(self.mk_expr(span_lo.to(body.span), ExprKind::TryBlock(body), attrs))
+ }
+
+ // `match` token already eaten
+ fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
+ let match_span = self.prev_span;
+ let lo = self.prev_span;
+ let discriminant = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL,
+ None)?;
+ if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) {
+ if self.token == token::Token::Semi {
+ e.span_suggestion_short(
+ match_span,
+ "try removing this `match`",
+ String::new(),
+ Applicability::MaybeIncorrect // speculative
+ );
+ }
+ return Err(e)
+ }
+ attrs.extend(self.parse_inner_attributes()?);
+
+ let mut arms: Vec<Arm> = Vec::new();
+ while self.token != token::CloseDelim(token::Brace) {
+ match self.parse_arm() {
+ Ok(arm) => arms.push(arm),
+ Err(mut e) => {
+ // Recover by skipping to the end of the block.
+ e.emit();
+ self.recover_stmt();
+ let span = lo.to(self.span);
+ if self.token == token::CloseDelim(token::Brace) {
+ self.bump();
+ }
+ return Ok(self.mk_expr(span, ExprKind::Match(discriminant, arms), attrs));
+ }
+ }
+ }
+ let hi = self.span;
+ self.bump();
+ return Ok(self.mk_expr(lo.to(hi), ExprKind::Match(discriminant, arms), attrs));
+ }
+
+ crate fn parse_arm(&mut self) -> PResult<'a, Arm> {
+ maybe_whole!(self, NtArm, |x| x);
+
+ let attrs = self.parse_outer_attributes()?;
+ let pats = self.parse_pats()?;
+ let guard = if self.eat_keyword(keywords::If) {
+ Some(Guard::If(self.parse_expr()?))
+ } else {
+ None
+ };
+ let arrow_span = self.span;
+ self.expect(&token::FatArrow)?;
+ let arm_start_span = self.span;
+
+ let expr = self.parse_expr_res(Restrictions::STMT_EXPR, None)
+ .map_err(|mut err| {
+ err.span_label(arrow_span, "while parsing the `match` arm starting here");
+ err
+ })?;
+
+ let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
+ && self.token != token::CloseDelim(token::Brace);
+
+ if require_comma {
+ let cm = self.sess.source_map();
+ self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)])
+ .map_err(|mut err| {
+ match (cm.span_to_lines(expr.span), cm.span_to_lines(arm_start_span)) {
+ (Ok(ref expr_lines), Ok(ref arm_start_lines))
+ if arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col
+ && expr_lines.lines.len() == 2
+ && self.token == token::FatArrow => {
+ // We check whether there's any trailing code in the parse span,
+ // if there isn't, we very likely have the following:
+ //
+ // X | &Y => "y"
+ // | -- - missing comma
+ // | |
+ // | arrow_span
+ // X | &X => "x"
+ // | - ^^ self.span
+ // | |
+ // | parsed until here as `"y" & X`
+ err.span_suggestion_short(
+ cm.next_point(arm_start_span),
+ "missing a comma here to end this `match` arm",
+ ",".to_owned(),
+ Applicability::MachineApplicable
+ );
+ }
+ _ => {
+ err.span_label(arrow_span,
+ "while parsing the `match` arm starting here");
+ }
+ }
+ err
+ })?;
+ } else {
+ self.eat(&token::Comma);
+ }
+
+ Ok(ast::Arm {
+ attrs,
+ pats,
+ guard,
+ body: expr,
+ })
+ }
+
+ /// Parses an expression.
+ #[inline]
+ pub fn parse_expr(&mut self) -> PResult<'a, P<Expr>> {
+ self.parse_expr_res(Restrictions::empty(), None)
+ }
+
+ /// Evaluates the closure with restrictions in place.
+ ///
+ /// Afters the closure is evaluated, restrictions are reset.
+ fn with_res<F, T>(&mut self, r: Restrictions, f: F) -> T
+ where F: FnOnce(&mut Self) -> T
+ {
+ let old = self.restrictions;
+ self.restrictions = r;
+ let r = f(self);
+ self.restrictions = old;
+ return r;
+
+ }
+
+ /// Parses an expression, subject to the given restrictions.
+ #[inline]
+ fn parse_expr_res(&mut self, r: Restrictions,
+ already_parsed_attrs: Option<ThinVec<Attribute>>)
+ -> PResult<'a, P<Expr>> {
+ self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs))
+ }
+
+ /// Parses the RHS of a local variable declaration (e.g., '= 14;').
+ fn parse_initializer(&mut self, skip_eq: bool) -> PResult<'a, Option<P<Expr>>> {
+ if self.eat(&token::Eq) {
+ Ok(Some(self.parse_expr()?))
+ } else if skip_eq {
+ Ok(Some(self.parse_expr()?))
+ } else {
+ Ok(None)
+ }
+ }
+
+ /// Parses patterns, separated by '|' s.
+ fn parse_pats(&mut self) -> PResult<'a, Vec<P<Pat>>> {
+ // Allow a '|' before the pats (RFC 1925 + RFC 2530)
+ self.eat(&token::BinOp(token::Or));
+
+ let mut pats = Vec::new();
+ loop {
+ pats.push(self.parse_top_level_pat()?);
+
+ if self.token == token::OrOr {
+ let mut err = self.struct_span_err(self.span,
+ "unexpected token `||` after pattern");
+ err.span_suggestion(
+ self.span,
+ "use a single `|` to specify multiple patterns",
+ "|".to_owned(),
+ Applicability::MachineApplicable
+ );
+ err.emit();
+ self.bump();
+ } else if self.eat(&token::BinOp(token::Or)) {
+ // This is a No-op. Continue the loop to parse the next
+ // pattern.
+ } else {
+ return Ok(pats);
+ }
+ };
+ }
+
+ // Parses a parenthesized list of patterns like
+ // `()`, `(p)`, `(p,)`, `(p, q)`, or `(p, .., q)`. Returns:
+ // - a vector of the patterns that were parsed
+ // - an option indicating the index of the `..` element
+ // - a boolean indicating whether a trailing comma was present.
+ // Trailing commas are significant because (p) and (p,) are different patterns.
+ fn parse_parenthesized_pat_list(&mut self) -> PResult<'a, (Vec<P<Pat>>, Option<usize>, bool)> {
+ self.expect(&token::OpenDelim(token::Paren))?;
+ let result = self.parse_pat_list()?;
+ self.expect(&token::CloseDelim(token::Paren))?;
+ Ok(result)
+ }
+
+ fn parse_pat_list(&mut self) -> PResult<'a, (Vec<P<Pat>>, Option<usize>, bool)> {
+ let mut fields = Vec::new();
+ let mut ddpos = None;
+ let mut trailing_comma = false;
+ loop {
+ if self.eat(&token::DotDot) {
+ if ddpos.is_none() {
+ ddpos = Some(fields.len());
+ } else {
+ // Emit a friendly error, ignore `..` and continue parsing
+ self.struct_span_err(
+ self.prev_span,
+ "`..` can only be used once per tuple or tuple struct pattern",
+ )
+ .span_label(self.prev_span, "can only be used once per pattern")
+ .emit();
+ }
+ } else if !self.check(&token::CloseDelim(token::Paren)) {
+ fields.push(self.parse_pat(None)?);
+ } else {
+ break
+ }
+
+ trailing_comma = self.eat(&token::Comma);
+ if !trailing_comma {
+ break
+ }
+ }
+
+ if ddpos == Some(fields.len()) && trailing_comma {
+ // `..` needs to be followed by `)` or `, pat`, `..,)` is disallowed.
+ let msg = "trailing comma is not permitted after `..`";
+ self.struct_span_err(self.prev_span, msg)
+ .span_label(self.prev_span, msg)
+ .emit();
+ }
+
+ Ok((fields, ddpos, trailing_comma))
+ }
+
+ fn parse_pat_vec_elements(
+ &mut self,
+ ) -> PResult<'a, (Vec<P<Pat>>, Option<P<Pat>>, Vec<P<Pat>>)> {
+ let mut before = Vec::new();
+ let mut slice = None;
+ let mut after = Vec::new();
+ let mut first = true;
+ let mut before_slice = true;
+
+ while self.token != token::CloseDelim(token::Bracket) {
+ if first {
+ first = false;
+ } else {
+ self.expect(&token::Comma)?;
+
+ if self.token == token::CloseDelim(token::Bracket)
+ && (before_slice || !after.is_empty()) {
+ break
+ }
+ }
+
+ if before_slice {
+ if self.eat(&token::DotDot) {
+
+ if self.check(&token::Comma) ||
+ self.check(&token::CloseDelim(token::Bracket)) {
+ slice = Some(P(Pat {
+ id: ast::DUMMY_NODE_ID,
+ node: PatKind::Wild,
+ span: self.prev_span,
+ }));
+ before_slice = false;
+ }
+ continue
+ }
+ }
+
+ let subpat = self.parse_pat(None)?;
+ if before_slice && self.eat(&token::DotDot) {
+ slice = Some(subpat);
+ before_slice = false;
+ } else if before_slice {
+ before.push(subpat);
+ } else {
+ after.push(subpat);
+ }
+ }
+
+ Ok((before, slice, after))
+ }
+
+ fn parse_pat_field(
+ &mut self,
+ lo: Span,
+ attrs: Vec<Attribute>
+ ) -> PResult<'a, source_map::Spanned<ast::FieldPat>> {
+ // Check if a colon exists one ahead. This means we're parsing a fieldname.
+ let hi;
+ let (subpat, fieldname, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) {
+ // Parsing a pattern of the form "fieldname: pat"
+ let fieldname = self.parse_field_name()?;
+ self.bump();
+ let pat = self.parse_pat(None)?;
+ hi = pat.span;
+ (pat, fieldname, false)
+ } else {
+ // Parsing a pattern of the form "(box) (ref) (mut) fieldname"
+ let is_box = self.eat_keyword(keywords::Box);
+ let boxed_span = self.span;
+ let is_ref = self.eat_keyword(keywords::Ref);
+ let is_mut = self.eat_keyword(keywords::Mut);
+ let fieldname = self.parse_ident()?;
+ hi = self.prev_span;
+
+ let bind_type = match (is_ref, is_mut) {
+ (true, true) => BindingMode::ByRef(Mutability::Mutable),
+ (true, false) => BindingMode::ByRef(Mutability::Immutable),
+ (false, true) => BindingMode::ByValue(Mutability::Mutable),
+ (false, false) => BindingMode::ByValue(Mutability::Immutable),
+ };
+ let fieldpat = P(Pat {
+ id: ast::DUMMY_NODE_ID,
+ node: PatKind::Ident(bind_type, fieldname, None),
+ span: boxed_span.to(hi),
+ });
+
+ let subpat = if is_box {
+ P(Pat {
+ id: ast::DUMMY_NODE_ID,
+ node: PatKind::Box(fieldpat),
+ span: lo.to(hi),
+ })
+ } else {
+ fieldpat
+ };
+ (subpat, fieldname, true)
+ };
+
+ Ok(source_map::Spanned {
+ span: lo.to(hi),
+ node: ast::FieldPat {
+ ident: fieldname,
+ pat: subpat,
+ is_shorthand,
+ attrs: attrs.into(),
+ }
+ })
+ }
+
+ /// Parses the fields of a struct-like pattern.
+ fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<source_map::Spanned<ast::FieldPat>>, bool)> {
+ let mut fields = Vec::new();
+ let mut etc = false;
+ let mut ate_comma = true;
+ let mut delayed_err: Option<DiagnosticBuilder<'a>> = None;
+ let mut etc_span = None;
+
+ while self.token != token::CloseDelim(token::Brace) {
+ let attrs = self.parse_outer_attributes()?;
+ let lo = self.span;
+
+ // check that a comma comes after every field
+ if !ate_comma {
+ let err = self.struct_span_err(self.prev_span, "expected `,`");
+ if let Some(mut delayed) = delayed_err {
+ delayed.emit();
+ }
+ return Err(err);
+ }
+ ate_comma = false;
+
+ if self.check(&token::DotDot) || self.token == token::DotDotDot {
+ etc = true;
+ let mut etc_sp = self.span;
+
+ if self.token == token::DotDotDot { // Issue #46718
+ // Accept `...` as if it were `..` to avoid further errors
+ let mut err = self.struct_span_err(self.span,
+ "expected field pattern, found `...`");
+ err.span_suggestion(
+ self.span,
+ "to omit remaining fields, use one fewer `.`",
+ "..".to_owned(),
+ Applicability::MachineApplicable
+ );
+ err.emit();
+ }
+ self.bump(); // `..` || `...`
+
+ if self.token == token::CloseDelim(token::Brace) {
+ etc_span = Some(etc_sp);
+ break;
+ }
+ let token_str = self.this_token_descr();
+ let mut err = self.fatal(&format!("expected `}}`, found {}", token_str));
+
+ err.span_label(self.span, "expected `}`");
+ let mut comma_sp = None;
+ if self.token == token::Comma { // Issue #49257
+ etc_sp = etc_sp.to(self.sess.source_map().span_until_non_whitespace(self.span));
+ err.span_label(etc_sp,
+ "`..` must be at the end and cannot have a trailing comma");
+ comma_sp = Some(self.span);
+ self.bump();
+ ate_comma = true;
+ }
+
+ etc_span = Some(etc_sp.until(self.span));
+ if self.token == token::CloseDelim(token::Brace) {
+ // If the struct looks otherwise well formed, recover and continue.
+ if let Some(sp) = comma_sp {
+ err.span_suggestion_short(
+ sp,
+ "remove this comma",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ }
+ err.emit();
+ break;
+ } else if self.token.is_ident() && ate_comma {
+ // Accept fields coming after `..,`.
+ // This way we avoid "pattern missing fields" errors afterwards.
+ // We delay this error until the end in order to have a span for a
+ // suggested fix.
+ if let Some(mut delayed_err) = delayed_err {
+ delayed_err.emit();
+ return Err(err);
+ } else {
+ delayed_err = Some(err);
+ }
+ } else {
+ if let Some(mut err) = delayed_err {
+ err.emit();
+ }
+ return Err(err);
+ }
+ }
+
+ fields.push(match self.parse_pat_field(lo, attrs) {
+ Ok(field) => field,
+ Err(err) => {
+ if let Some(mut delayed_err) = delayed_err {
+ delayed_err.emit();
+ }
+ return Err(err);
+ }
+ });
+ ate_comma = self.eat(&token::Comma);
+ }
+
+ if let Some(mut err) = delayed_err {
+ if let Some(etc_span) = etc_span {
+ err.multipart_suggestion(
+ "move the `..` to the end of the field list",
+ vec![
+ (etc_span, String::new()),
+ (self.span, format!("{}.. }}", if ate_comma { "" } else { ", " })),
+ ],
+ Applicability::MachineApplicable,
+ );
+ }
+ err.emit();
+ }
+ return Ok((fields, etc));
+ }
+
+ fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> {
+ if self.token.is_path_start() {
+ let lo = self.span;
+ let (qself, path) = if self.eat_lt() {
+ // Parse a qualified path
+ let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
+ (Some(qself), path)
+ } else {
+ // Parse an unqualified path
+ (None, self.parse_path(PathStyle::Expr)?)
+ };
+ let hi = self.prev_span;
+ Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path), ThinVec::new()))
+ } else {
+ self.parse_literal_maybe_minus()
+ }
+ }
+
+ // helper function to decide whether to parse as ident binding or to try to do
+ // something more complex like range patterns
+ fn parse_as_ident(&mut self) -> bool {
+ self.look_ahead(1, |t| match *t {
+ token::OpenDelim(token::Paren) | token::OpenDelim(token::Brace) |
+ token::DotDotDot | token::DotDotEq | token::ModSep | token::Not => Some(false),
+ // ensure slice patterns [a, b.., c] and [a, b, c..] don't go into the
+ // range pattern branch
+ token::DotDot => None,
+ _ => Some(true),
+ }).unwrap_or_else(|| self.look_ahead(2, |t| match *t {
+ token::Comma | token::CloseDelim(token::Bracket) => true,
+ _ => false,
+ }))
+ }
+
+ /// A wrapper around `parse_pat` with some special error handling for the
+ /// "top-level" patterns in a match arm, `for` loop, `let`, &c. (in contrast
+ /// to subpatterns within such).
+ fn parse_top_level_pat(&mut self) -> PResult<'a, P<Pat>> {
+ let pat = self.parse_pat(None)?;
+ if self.token == token::Comma {
+ // An unexpected comma after a top-level pattern is a clue that the
+ // user (perhaps more accustomed to some other language) forgot the
+ // parentheses in what should have been a tuple pattern; return a
+ // suggestion-enhanced error here rather than choking on the comma
+ // later.
+ let comma_span = self.span;
+ self.bump();
+ if let Err(mut err) = self.parse_pat_list() {
+ // We didn't expect this to work anyway; we just wanted
+ // to advance to the end of the comma-sequence so we know
+ // the span to suggest parenthesizing
+ err.cancel();
+ }
+ let seq_span = pat.span.to(self.prev_span);
+ let mut err = self.struct_span_err(comma_span,
+ "unexpected `,` in pattern");
+ if let Ok(seq_snippet) = self.sess.source_map().span_to_snippet(seq_span) {
+ err.span_suggestion(
+ seq_span,
+ "try adding parentheses to match on a tuple..",
+ format!("({})", seq_snippet),
+ Applicability::MachineApplicable
+ ).span_suggestion(
+ seq_span,
+ "..or a vertical bar to match on multiple alternatives",
+ format!("{}", seq_snippet.replace(",", " |")),
+ Applicability::MachineApplicable
+ );
+ }
+ return Err(err);
+ }
+ Ok(pat)
+ }
+
+ /// Parses a pattern.
+ pub fn parse_pat(&mut self, expected: Option<&'static str>) -> PResult<'a, P<Pat>> {
+ self.parse_pat_with_range_pat(true, expected)
+ }
+
+ /// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are
+ /// allowed).
+ fn parse_pat_with_range_pat(
+ &mut self,
+ allow_range_pat: bool,
+ expected: Option<&'static str>,
+ ) -> PResult<'a, P<Pat>> {
+ maybe_whole!(self, NtPat, |x| x);
+
+ let lo = self.span;
+ let pat;
+ match self.token {
+ token::BinOp(token::And) | token::AndAnd => {
+ // Parse &pat / &mut pat
+ self.expect_and()?;
+ let mutbl = self.parse_mutability();
+ if let token::Lifetime(ident) = self.token {
+ let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern",
+ ident));
+ err.span_label(self.span, "unexpected lifetime");
+ return Err(err);
+ }
+ let subpat = self.parse_pat_with_range_pat(false, expected)?;
+ pat = PatKind::Ref(subpat, mutbl);
+ }
+ token::OpenDelim(token::Paren) => {
+ // Parse (pat,pat,pat,...) as tuple pattern
+ let (fields, ddpos, trailing_comma) = self.parse_parenthesized_pat_list()?;
+ pat = if fields.len() == 1 && ddpos.is_none() && !trailing_comma {
+ PatKind::Paren(fields.into_iter().nth(0).unwrap())
+ } else {
+ PatKind::Tuple(fields, ddpos)
+ };
+ }
+ token::OpenDelim(token::Bracket) => {
+ // Parse [pat,pat,...] as slice pattern
+ self.bump();
+ let (before, slice, after) = self.parse_pat_vec_elements()?;
+ self.expect(&token::CloseDelim(token::Bracket))?;
+ pat = PatKind::Slice(before, slice, after);
+ }
+ // At this point, token != &, &&, (, [
+ _ => if self.eat_keyword(keywords::Underscore) {
+ // Parse _
+ pat = PatKind::Wild;
+ } else if self.eat_keyword(keywords::Mut) {
+ // Parse mut ident @ pat / mut ref ident @ pat
+ let mutref_span = self.prev_span.to(self.span);
+ let binding_mode = if self.eat_keyword(keywords::Ref) {
+ self.diagnostic()
+ .struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect")
+ .span_suggestion(
+ mutref_span,
+ "try switching the order",
+ "ref mut".into(),
+ Applicability::MachineApplicable
+ ).emit();
+ BindingMode::ByRef(Mutability::Mutable)
+ } else {
+ BindingMode::ByValue(Mutability::Mutable)
+ };
+ pat = self.parse_pat_ident(binding_mode)?;
+ } else if self.eat_keyword(keywords::Ref) {
+ // Parse ref ident @ pat / ref mut ident @ pat
+ let mutbl = self.parse_mutability();
+ pat = self.parse_pat_ident(BindingMode::ByRef(mutbl))?;
+ } else if self.eat_keyword(keywords::Box) {
+ // Parse box pat
+ let subpat = self.parse_pat_with_range_pat(false, None)?;
+ pat = PatKind::Box(subpat);
+ } else if self.token.is_ident() && !self.token.is_reserved_ident() &&
+ self.parse_as_ident() {
+ // Parse ident @ pat
+ // This can give false positives and parse nullary enums,
+ // they are dealt with later in resolve
+ let binding_mode = BindingMode::ByValue(Mutability::Immutable);
+ pat = self.parse_pat_ident(binding_mode)?;
+ } else if self.token.is_path_start() {
+ // Parse pattern starting with a path
+ let (qself, path) = if self.eat_lt() {
+ // Parse a qualified path
+ let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
+ (Some(qself), path)
+ } else {
+ // Parse an unqualified path
+ (None, self.parse_path(PathStyle::Expr)?)
+ };
+ match self.token {
+ token::Not if qself.is_none() => {
+ // Parse macro invocation
+ self.bump();
+ let (delim, tts) = self.expect_delimited_token_tree()?;
+ let mac = respan(lo.to(self.prev_span), Mac_ { path, tts, delim });
+ pat = PatKind::Mac(mac);
+ }
+ token::DotDotDot | token::DotDotEq | token::DotDot => {
+ let end_kind = match self.token {
+ token::DotDot => RangeEnd::Excluded,
+ token::DotDotDot => RangeEnd::Included(RangeSyntax::DotDotDot),
+ token::DotDotEq => RangeEnd::Included(RangeSyntax::DotDotEq),
+ _ => panic!("can only parse `..`/`...`/`..=` for ranges \
+ (checked above)"),
+ };
+ let op_span = self.span;
+ // Parse range
+ let span = lo.to(self.prev_span);
+ let begin = self.mk_expr(span, ExprKind::Path(qself, path), ThinVec::new());
+ self.bump();
+ let end = self.parse_pat_range_end()?;
+ let op = Spanned { span: op_span, node: end_kind };
+ pat = PatKind::Range(begin, end, op);
+ }
+ token::OpenDelim(token::Brace) => {
+ if qself.is_some() {
+ let msg = "unexpected `{` after qualified path";
+ let mut err = self.fatal(msg);
+ err.span_label(self.span, msg);
+ return Err(err);
+ }
+ // Parse struct pattern
+ self.bump();
+ let (fields, etc) = self.parse_pat_fields().unwrap_or_else(|mut e| {
+ e.emit();
+ self.recover_stmt();
+ (vec![], false)
+ });
+ self.bump();
+ pat = PatKind::Struct(path, fields, etc);
+ }
+ token::OpenDelim(token::Paren) => {
+ if qself.is_some() {
+ let msg = "unexpected `(` after qualified path";
+ let mut err = self.fatal(msg);
+ err.span_label(self.span, msg);
+ return Err(err);
+ }
+ // Parse tuple struct or enum pattern
+ let (fields, ddpos, _) = self.parse_parenthesized_pat_list()?;
+ pat = PatKind::TupleStruct(path, fields, ddpos)
+ }
+ _ => pat = PatKind::Path(qself, path),
+ }
+ } else {
+ // Try to parse everything else as literal with optional minus
+ match self.parse_literal_maybe_minus() {
+ Ok(begin) => {
+ let op_span = self.span;
+ if self.check(&token::DotDot) || self.check(&token::DotDotEq) ||
+ self.check(&token::DotDotDot) {
+ let end_kind = if self.eat(&token::DotDotDot) {
+ RangeEnd::Included(RangeSyntax::DotDotDot)
+ } else if self.eat(&token::DotDotEq) {
+ RangeEnd::Included(RangeSyntax::DotDotEq)
+ } else if self.eat(&token::DotDot) {
+ RangeEnd::Excluded
+ } else {
+ panic!("impossible case: we already matched \
+ on a range-operator token")
+ };
+ let end = self.parse_pat_range_end()?;
+ let op = Spanned { span: op_span, node: end_kind };
+ pat = PatKind::Range(begin, end, op);
+ } else {
+ pat = PatKind::Lit(begin);
+ }
+ }
+ Err(mut err) => {
+ self.cancel(&mut err);
+ let expected = expected.unwrap_or("pattern");
+ let msg = format!(
+ "expected {}, found {}",
+ expected,
+ self.this_token_descr(),
+ );
+ let mut err = self.fatal(&msg);
+ err.span_label(self.span, format!("expected {}", expected));
+ return Err(err);
+ }
+ }
+ }
+ }
+
+ let pat = Pat { node: pat, span: lo.to(self.prev_span), id: ast::DUMMY_NODE_ID };
+ let pat = self.maybe_recover_from_bad_qpath(pat, true)?;
+
+ if !allow_range_pat {
+ match pat.node {
+ PatKind::Range(
+ _, _, Spanned { node: RangeEnd::Included(RangeSyntax::DotDotDot), .. }
+ ) => {},
+ PatKind::Range(..) => {
+ let mut err = self.struct_span_err(
+ pat.span,
+ "the range pattern here has ambiguous interpretation",
+ );
+ err.span_suggestion(
+ pat.span,
+ "add parentheses to clarify the precedence",
+ format!("({})", pprust::pat_to_string(&pat)),
+ // "ambiguous interpretation" implies that we have to be guessing
+ Applicability::MaybeIncorrect
+ );
+ return Err(err);
+ }
+ _ => {}
+ }
+ }
+
+ Ok(P(pat))
+ }
+
+ /// Parses `ident` or `ident @ pat`.
+ /// used by the copy foo and ref foo patterns to give a good
+ /// error message when parsing mistakes like `ref foo(a, b)`.
+ fn parse_pat_ident(&mut self,
+ binding_mode: ast::BindingMode)
+ -> PResult<'a, PatKind> {
+ let ident = self.parse_ident()?;
+ let sub = if self.eat(&token::At) {
+ Some(self.parse_pat(Some("binding pattern"))?)
+ } else {
+ None
+ };
+
+ // just to be friendly, if they write something like
+ // ref Some(i)
+ // we end up here with ( as the current token. This shortly
+ // leads to a parse error. Note that if there is no explicit
+ // binding mode then we do not end up here, because the lookahead
+ // will direct us over to parse_enum_variant()
+ if self.token == token::OpenDelim(token::Paren) {
+ return Err(self.span_fatal(
+ self.prev_span,
+ "expected identifier, found enum pattern"))
+ }
+
+ Ok(PatKind::Ident(binding_mode, ident, sub))
+ }
+
+ /// Parses a local variable declaration.
+ fn parse_local(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Local>> {
+ let lo = self.prev_span;
+ let pat = self.parse_top_level_pat()?;
+
+ let (err, ty) = if self.eat(&token::Colon) {
+ // Save the state of the parser before parsing type normally, in case there is a `:`
+ // instead of an `=` typo.
+ let parser_snapshot_before_type = self.clone();
+ let colon_sp = self.prev_span;
+ match self.parse_ty() {
+ Ok(ty) => (None, Some(ty)),
+ Err(mut err) => {
+ // Rewind to before attempting to parse the type and continue parsing
+ let parser_snapshot_after_type = self.clone();
+ mem::replace(self, parser_snapshot_before_type);
+
+ let snippet = self.sess.source_map().span_to_snippet(pat.span).unwrap();
+ err.span_label(pat.span, format!("while parsing the type for `{}`", snippet));
+ (Some((parser_snapshot_after_type, colon_sp, err)), None)
+ }
+ }
+ } else {
+ (None, None)
+ };
+ let init = match (self.parse_initializer(err.is_some()), err) {
+ (Ok(init), None) => { // init parsed, ty parsed
+ init
+ }
+ (Ok(init), Some((_, colon_sp, mut err))) => { // init parsed, ty error
+ // Could parse the type as if it were the initializer, it is likely there was a
+ // typo in the code: `:` instead of `=`. Add suggestion and emit the error.
+ err.span_suggestion_short(
+ colon_sp,
+ "use `=` if you meant to assign",
+ "=".to_string(),
+ Applicability::MachineApplicable
+ );
+ err.emit();
+ // As this was parsed successfully, continue as if the code has been fixed for the
+ // rest of the file. It will still fail due to the emitted error, but we avoid
+ // extra noise.
+ init
+ }
+ (Err(mut init_err), Some((snapshot, _, ty_err))) => { // init error, ty error
+ init_err.cancel();
+ // Couldn't parse the type nor the initializer, only raise the type error and
+ // return to the parser state before parsing the type as the initializer.
+ // let x: <parse_error>;
+ mem::replace(self, snapshot);
+ return Err(ty_err);
+ }
+ (Err(err), None) => { // init error, ty parsed
+ // Couldn't parse the initializer and we're not attempting to recover a failed
+ // parse of the type, return the error.
+ return Err(err);
+ }
+ };
+ let hi = if self.token == token::Semi {
+ self.span
+ } else {
+ self.prev_span
+ };
+ Ok(P(ast::Local {
+ ty,
+ pat,
+ init,
+ id: ast::DUMMY_NODE_ID,
+ span: lo.to(hi),
+ attrs,
+ }))
+ }
+
+ /// Parses a structure field.
+ fn parse_name_and_ty(&mut self,
+ lo: Span,
+ vis: Visibility,
+ attrs: Vec<Attribute>)
+ -> PResult<'a, StructField> {
+ let name = self.parse_ident()?;
+ self.expect(&token::Colon)?;
+ let ty = self.parse_ty()?;
+ Ok(StructField {
+ span: lo.to(self.prev_span),
+ ident: Some(name),
+ vis,
+ id: ast::DUMMY_NODE_ID,
+ ty,
+ attrs,
+ })
+ }
+
+ /// Emits an expected-item-after-attributes error.
+ fn expected_item_err(&mut self, attrs: &[Attribute]) -> PResult<'a, ()> {
+ let message = match attrs.last() {
+ Some(&Attribute { is_sugared_doc: true, .. }) => "expected item after doc comment",
+ _ => "expected item after attributes",
+ };
+
+ let mut err = self.diagnostic().struct_span_err(self.prev_span, message);
+ if attrs.last().unwrap().is_sugared_doc {
+ err.span_label(self.prev_span, "this doc comment doesn't document anything");
+ }
+ Err(err)
+ }
+
+ /// Parse a statement. This stops just before trailing semicolons on everything but items.
+ /// e.g., a `StmtKind::Semi` parses to a `StmtKind::Expr`, leaving the trailing `;` unconsumed.
+ pub fn parse_stmt(&mut self) -> PResult<'a, Option<Stmt>> {
+ Ok(self.parse_stmt_(true))
+ }
+
+ // Eat tokens until we can be relatively sure we reached the end of the
+ // statement. This is something of a best-effort heuristic.
+ //
+ // We terminate when we find an unmatched `}` (without consuming it).
+ fn recover_stmt(&mut self) {
+ self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore)
+ }
+
+ // If `break_on_semi` is `Break`, then we will stop consuming tokens after
+ // finding (and consuming) a `;` outside of `{}` or `[]` (note that this is
+ // approximate - it can mean we break too early due to macros, but that
+ // should only lead to sub-optimal recovery, not inaccurate parsing).
+ //
+ // If `break_on_block` is `Break`, then we will stop consuming tokens
+ // after finding (and consuming) a brace-delimited block.
+ fn recover_stmt_(&mut self, break_on_semi: SemiColonMode, break_on_block: BlockMode) {
+ let mut brace_depth = 0;
+ let mut bracket_depth = 0;
+ let mut in_block = false;
+ debug!("recover_stmt_ enter loop (semi={:?}, block={:?})",
+ break_on_semi, break_on_block);
+ loop {
+ debug!("recover_stmt_ loop {:?}", self.token);
+ match self.token {
+ token::OpenDelim(token::DelimToken::Brace) => {
+ brace_depth += 1;
+ self.bump();
+ if break_on_block == BlockMode::Break &&
+ brace_depth == 1 &&
+ bracket_depth == 0 {
+ in_block = true;
+ }
+ }
+ token::OpenDelim(token::DelimToken::Bracket) => {
+ bracket_depth += 1;
+ self.bump();
+ }
+ token::CloseDelim(token::DelimToken::Brace) => {
+ if brace_depth == 0 {
+ debug!("recover_stmt_ return - close delim {:?}", self.token);
+ break;
+ }
+ brace_depth -= 1;
+ self.bump();
+ if in_block && bracket_depth == 0 && brace_depth == 0 {
+ debug!("recover_stmt_ return - block end {:?}", self.token);
+ break;
+ }
+ }
+ token::CloseDelim(token::DelimToken::Bracket) => {
+ bracket_depth -= 1;
+ if bracket_depth < 0 {
+ bracket_depth = 0;
+ }
+ self.bump();
+ }
+ token::Eof => {
+ debug!("recover_stmt_ return - Eof");
+ break;
+ }
+ token::Semi => {
+ self.bump();
+ if break_on_semi == SemiColonMode::Break &&
+ brace_depth == 0 &&
+ bracket_depth == 0 {
+ debug!("recover_stmt_ return - Semi");
+ break;
+ }
+ }
+ token::Comma => {
+ if break_on_semi == SemiColonMode::Comma &&
+ brace_depth == 0 &&
+ bracket_depth == 0 {
+ debug!("recover_stmt_ return - Semi");
+ break;
+ } else {
+ self.bump();
+ }
+ }
+ _ => {
+ self.bump()
+ }
+ }
+ }
+ }
+
+ fn parse_stmt_(&mut self, macro_legacy_warnings: bool) -> Option<Stmt> {
+ self.parse_stmt_without_recovery(macro_legacy_warnings).unwrap_or_else(|mut e| {
+ e.emit();
+ self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
+ None
+ })
+ }
+
+ fn is_async_block(&mut self) -> bool {
+ self.token.is_keyword(keywords::Async) &&
+ (
+ ( // `async move {`
+ self.look_ahead(1, |t| t.is_keyword(keywords::Move)) &&
+ self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))
+ ) || ( // `async {`
+ self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace))
+ )
+ )
+ }
+
+ fn is_do_catch_block(&mut self) -> bool {
+ self.token.is_keyword(keywords::Do) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Catch)) &&
+ self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) &&
+ !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
+ }
+
+ fn is_try_block(&mut self) -> bool {
+ self.token.is_keyword(keywords::Try) &&
+ self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) &&
+ self.span.rust_2018() &&
+ // prevent `while try {} {}`, `if try {} {} else {}`, etc.
+ !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
+ }
+
+ fn is_union_item(&self) -> bool {
+ self.token.is_keyword(keywords::Union) &&
+ self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
+ }
+
+ fn is_crate_vis(&self) -> bool {
+ self.token.is_keyword(keywords::Crate) && self.look_ahead(1, |t| t != &token::ModSep)
+ }
+
+ fn is_existential_type_decl(&self) -> bool {
+ self.token.is_keyword(keywords::Existential) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Type))
+ }
+
+ fn is_auto_trait_item(&mut self) -> bool {
+ // auto trait
+ (self.token.is_keyword(keywords::Auto)
+ && self.look_ahead(1, |t| t.is_keyword(keywords::Trait)))
+ || // unsafe auto trait
+ (self.token.is_keyword(keywords::Unsafe) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Auto)) &&
+ self.look_ahead(2, |t| t.is_keyword(keywords::Trait)))
+ }
+
+ fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span)
+ -> PResult<'a, Option<P<Item>>> {
+ let token_lo = self.span;
+ let (ident, def) = match self.token {
+ token::Ident(ident, false) if ident.name == keywords::Macro.name() => {
+ self.bump();
+ let ident = self.parse_ident()?;
+ let tokens = if self.check(&token::OpenDelim(token::Brace)) {
+ match self.parse_token_tree() {
+ TokenTree::Delimited(_, _, tts) => tts,
+ _ => unreachable!(),
+ }
+ } else if self.check(&token::OpenDelim(token::Paren)) {
+ let args = self.parse_token_tree();
+ let body = if self.check(&token::OpenDelim(token::Brace)) {
+ self.parse_token_tree()
+ } else {
+ self.unexpected()?;
+ unreachable!()
+ };
+ TokenStream::new(vec![
+ args.into(),
+ TokenTree::Token(token_lo.to(self.prev_span), token::FatArrow).into(),
+ body.into(),
+ ])
+ } else {
+ self.unexpected()?;
+ unreachable!()
+ };
+
+ (ident, ast::MacroDef { tokens: tokens.into(), legacy: false })
+ }
+ token::Ident(ident, _) if ident.name == "macro_rules" &&
+ self.look_ahead(1, |t| *t == token::Not) => {
+ let prev_span = self.prev_span;
+ self.complain_if_pub_macro(&vis.node, prev_span);
+ self.bump();
+ self.bump();
+
+ let ident = self.parse_ident()?;
+ let (delim, tokens) = self.expect_delimited_token_tree()?;
+ if delim != MacDelimiter::Brace {
+ if !self.eat(&token::Semi) {
+ let msg = "macros that expand to items must either \
+ be surrounded with braces or followed by a semicolon";
+ self.span_err(self.prev_span, msg);
+ }
+ }
+
+ (ident, ast::MacroDef { tokens: tokens, legacy: true })
+ }
+ _ => return Ok(None),
+ };
+
+ let span = lo.to(self.prev_span);
+ Ok(Some(self.mk_item(span, ident, ItemKind::MacroDef(def), vis.clone(), attrs.to_vec())))
+ }
+
+ fn parse_stmt_without_recovery(&mut self,
+ macro_legacy_warnings: bool)
+ -> PResult<'a, Option<Stmt>> {
+ maybe_whole!(self, NtStmt, |x| Some(x));
+
+ let attrs = self.parse_outer_attributes()?;
+ let lo = self.span;
+
+ Ok(Some(if self.eat_keyword(keywords::Let) {
+ Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: StmtKind::Local(self.parse_local(attrs.into())?),
+ span: lo.to(self.prev_span),
+ }
+ } else if let Some(macro_def) = self.eat_macro_def(
+ &attrs,
+ &source_map::respan(lo, VisibilityKind::Inherited),
+ lo,
+ )? {
+ Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: StmtKind::Item(macro_def),
+ span: lo.to(self.prev_span),
+ }
+ // Starts like a simple path, being careful to avoid contextual keywords
+ // such as a union items, item with `crate` visibility or auto trait items.
+ // Our goal here is to parse an arbitrary path `a::b::c` but not something that starts
+ // like a path (1 token), but it fact not a path.
+ // `union::b::c` - path, `union U { ... }` - not a path.
+ // `crate::b::c` - path, `crate struct S;` - not a path.
+ } else if self.token.is_path_start() &&
+ !self.token.is_qpath_start() &&
+ !self.is_union_item() &&
+ !self.is_crate_vis() &&
+ !self.is_existential_type_decl() &&
+ !self.is_auto_trait_item() {
+ let pth = self.parse_path(PathStyle::Expr)?;
+
+ if !self.eat(&token::Not) {
+ let expr = if self.check(&token::OpenDelim(token::Brace)) {
+ self.parse_struct_expr(lo, pth, ThinVec::new())?
+ } else {
+ let hi = self.prev_span;
+ self.mk_expr(lo.to(hi), ExprKind::Path(None, pth), ThinVec::new())
+ };
+
+ let expr = self.with_res(Restrictions::STMT_EXPR, |this| {
+ let expr = this.parse_dot_or_call_expr_with(expr, lo, attrs.into())?;
+ this.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(expr))
+ })?;
+
+ return Ok(Some(Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: StmtKind::Expr(expr),
+ span: lo.to(self.prev_span),
+ }));
+ }
+
+ // it's a macro invocation
+ let id = match self.token {
+ token::OpenDelim(_) => keywords::Invalid.ident(), // no special identifier
+ _ => self.parse_ident()?,
+ };
+
+ // check that we're pointing at delimiters (need to check
+ // again after the `if`, because of `parse_ident`
+ // consuming more tokens).
+ match self.token {
+ token::OpenDelim(_) => {}
+ _ => {
+ // we only expect an ident if we didn't parse one
+ // above.
+ let ident_str = if id.name == keywords::Invalid.name() {
+ "identifier, "
+ } else {
+ ""
+ };
+ let tok_str = self.this_token_descr();
+ let mut err = self.fatal(&format!("expected {}`(` or `{{`, found {}",
+ ident_str,
+ tok_str));
+ err.span_label(self.span, format!("expected {}`(` or `{{`", ident_str));
+ return Err(err)
+ },
+ }
+
+ let (delim, tts) = self.expect_delimited_token_tree()?;
+ let hi = self.prev_span;
+
+ let style = if delim == MacDelimiter::Brace {
+ MacStmtStyle::Braces
+ } else {
+ MacStmtStyle::NoBraces
+ };
+
+ if id.name == keywords::Invalid.name() {
+ let mac = respan(lo.to(hi), Mac_ { path: pth, tts, delim });
+ let node = if delim == MacDelimiter::Brace ||
+ self.token == token::Semi || self.token == token::Eof {
+ StmtKind::Mac(P((mac, style, attrs.into())))
+ }
+ // We used to incorrectly stop parsing macro-expanded statements here.
+ // If the next token will be an error anyway but could have parsed with the
+ // earlier behavior, stop parsing here and emit a warning to avoid breakage.
+ else if macro_legacy_warnings && self.token.can_begin_expr() && match self.token {
+ // These can continue an expression, so we can't stop parsing and warn.
+ token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) |
+ token::BinOp(token::Minus) | token::BinOp(token::Star) |
+ token::BinOp(token::And) | token::BinOp(token::Or) |
+ token::AndAnd | token::OrOr |
+ token::DotDot | token::DotDotDot | token::DotDotEq => false,
+ _ => true,
+ } {
+ self.warn_missing_semicolon();
+ StmtKind::Mac(P((mac, style, attrs.into())))
+ } else {
+ let e = self.mk_mac_expr(lo.to(hi), mac.node, ThinVec::new());
+ let e = self.parse_dot_or_call_expr_with(e, lo, attrs.into())?;
+ let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?;
+ StmtKind::Expr(e)
+ };
+ Stmt {
+ id: ast::DUMMY_NODE_ID,
+ span: lo.to(hi),
+ node,
+ }
+ } else {
+ // if it has a special ident, it's definitely an item
+ //
+ // Require a semicolon or braces.
+ if style != MacStmtStyle::Braces {
+ if !self.eat(&token::Semi) {
+ self.span_err(self.prev_span,
+ "macros that expand to items must \
+ either be surrounded with braces or \
+ followed by a semicolon");
+ }
+ }
+ let span = lo.to(hi);
+ Stmt {
+ id: ast::DUMMY_NODE_ID,
+ span,
+ node: StmtKind::Item({
+ self.mk_item(
+ span, id /*id is good here*/,
+ ItemKind::Mac(respan(span, Mac_ { path: pth, tts, delim })),
+ respan(lo, VisibilityKind::Inherited),
+ attrs)
+ }),
+ }
+ }
+ } else {
+ // FIXME: Bad copy of attrs
+ let old_directory_ownership =
+ mem::replace(&mut self.directory.ownership, DirectoryOwnership::UnownedViaBlock);
+ let item = self.parse_item_(attrs.clone(), false, true)?;
+ self.directory.ownership = old_directory_ownership;
+
+ match item {
+ Some(i) => Stmt {
+ id: ast::DUMMY_NODE_ID,
+ span: lo.to(i.span),
+ node: StmtKind::Item(i),
+ },
+ None => {
+ let unused_attrs = |attrs: &[Attribute], s: &mut Self| {
+ if !attrs.is_empty() {
+ if s.prev_token_kind == PrevTokenKind::DocComment {
+ s.span_fatal_err(s.prev_span, Error::UselessDocComment).emit();
+ } else if attrs.iter().any(|a| a.style == AttrStyle::Outer) {
+ s.span_err(s.span, "expected statement after outer attribute");
+ }
+ }
+ };
+
+ // Do not attempt to parse an expression if we're done here.
+ if self.token == token::Semi {
+ unused_attrs(&attrs, self);
+ self.bump();
+ return Ok(None);
+ }
+
+ if self.token == token::CloseDelim(token::Brace) {
+ unused_attrs(&attrs, self);
+ return Ok(None);
+ }
+
+ // Remainder are line-expr stmts.
+ let e = self.parse_expr_res(
+ Restrictions::STMT_EXPR, Some(attrs.into()))?;
+ Stmt {
+ id: ast::DUMMY_NODE_ID,
+ span: lo.to(e.span),
+ node: StmtKind::Expr(e),
+ }
+ }
+ }
+ }))
+ }
+
+ /// Checks if this expression is a successfully parsed statement.
+ fn expr_is_complete(&mut self, e: &Expr) -> bool {
+ self.restrictions.contains(Restrictions::STMT_EXPR) &&
+ !classify::expr_requires_semi_to_be_stmt(e)
+ }
+
+ /// Parses a block. No inner attributes are allowed.
+ pub fn parse_block(&mut self) -> PResult<'a, P<Block>> {
+ maybe_whole!(self, NtBlock, |x| x);
+
+ let lo = self.span;
+
+ if !self.eat(&token::OpenDelim(token::Brace)) {
+ let sp = self.span;
+ let tok = self.this_token_descr();
+ let mut e = self.span_fatal(sp, &format!("expected `{{`, found {}", tok));
+ let do_not_suggest_help =
+ self.token.is_keyword(keywords::In) || self.token == token::Colon;
+
+ if self.token.is_ident_named("and") {
+ e.span_suggestion_short(
+ self.span,
+ "use `&&` instead of `and` for the boolean operator",
+ "&&".to_string(),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ if self.token.is_ident_named("or") {
+ e.span_suggestion_short(
+ self.span,
+ "use `||` instead of `or` for the boolean operator",
+ "||".to_string(),
+ Applicability::MaybeIncorrect,
+ );
+ }
+
+ // Check to see if the user has written something like
+ //
+ // if (cond)
+ // bar;
+ //
+ // Which is valid in other languages, but not Rust.
+ match self.parse_stmt_without_recovery(false) {
+ Ok(Some(stmt)) => {
+ if self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace))
+ || do_not_suggest_help {
+ // if the next token is an open brace (e.g., `if a b {`), the place-
+ // inside-a-block suggestion would be more likely wrong than right
+ e.span_label(sp, "expected `{`");
+ return Err(e);
+ }
+ let mut stmt_span = stmt.span;
+ // expand the span to include the semicolon, if it exists
+ if self.eat(&token::Semi) {
+ stmt_span = stmt_span.with_hi(self.prev_span.hi());
+ }
+ let sugg = pprust::to_string(|s| {
+ use crate::print::pprust::{PrintState, INDENT_UNIT};
+ s.ibox(INDENT_UNIT)?;
+ s.bopen()?;
+ s.print_stmt(&stmt)?;
+ s.bclose_maybe_open(stmt.span, INDENT_UNIT, false)
+ });
+ e.span_suggestion(
+ stmt_span,
+ "try placing this code inside a block",
+ sugg,
+ // speculative, has been misleading in the past (closed Issue #46836)
+ Applicability::MaybeIncorrect
+ );
+ }
+ Err(mut e) => {
+ self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
+ self.cancel(&mut e);
+ }
+ _ => ()
+ }
+ e.span_label(sp, "expected `{`");
+ return Err(e);
+ }
+
+ self.parse_block_tail(lo, BlockCheckMode::Default)
+ }
+
+ /// Parses a block. Inner attributes are allowed.
+ fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (Vec<Attribute>, P<Block>)> {
+ maybe_whole!(self, NtBlock, |x| (Vec::new(), x));
+
+ let lo = self.span;
+ self.expect(&token::OpenDelim(token::Brace))?;
+ Ok((self.parse_inner_attributes()?,
+ self.parse_block_tail(lo, BlockCheckMode::Default)?))
+ }
+
+ /// Parses the rest of a block expression or function body.
+ /// Precondition: already parsed the '{'.
+ fn parse_block_tail(&mut self, lo: Span, s: BlockCheckMode) -> PResult<'a, P<Block>> {
+ let mut stmts = vec![];
+ while !self.eat(&token::CloseDelim(token::Brace)) {
+ let stmt = match self.parse_full_stmt(false) {
+ Err(mut err) => {
+ err.emit();
+ self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
+ Some(Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: StmtKind::Expr(DummyResult::raw_expr(self.span, true)),
+ span: self.span,
+ })
+ }
+ Ok(stmt) => stmt,
+ };
+ if let Some(stmt) = stmt {
+ stmts.push(stmt);
+ } else if self.token == token::Eof {
+ break;
+ } else {
+ // Found only `;` or `}`.
+ continue;
+ };
+ }
+ Ok(P(ast::Block {
+ stmts,
+ id: ast::DUMMY_NODE_ID,
+ rules: s,
+ span: lo.to(self.prev_span),
+ }))
+ }
+
+ /// Parses a statement, including the trailing semicolon.
+ crate fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> {
+ // skip looking for a trailing semicolon when we have an interpolated statement
+ maybe_whole!(self, NtStmt, |x| Some(x));
+
+ let mut stmt = match self.parse_stmt_without_recovery(macro_legacy_warnings)? {
+ Some(stmt) => stmt,
+ None => return Ok(None),
+ };
+
+ match stmt.node {
+ StmtKind::Expr(ref expr) if self.token != token::Eof => {
+ // expression without semicolon
+ if classify::expr_requires_semi_to_be_stmt(expr) {
+ // Just check for errors and recover; do not eat semicolon yet.
+ if let Err(mut e) =
+ self.expect_one_of(&[], &[token::Semi, token::CloseDelim(token::Brace)])
+ {
+ e.emit();
+ self.recover_stmt();
+ }
+ }
+ }
+ StmtKind::Local(..) => {
+ // We used to incorrectly allow a macro-expanded let statement to lack a semicolon.
+ if macro_legacy_warnings && self.token != token::Semi {
+ self.warn_missing_semicolon();
+ } else {
+ self.expect_one_of(&[], &[token::Semi])?;
+ }
+ }
+ _ => {}
+ }
+
+ if self.eat(&token::Semi) {
+ stmt = stmt.add_trailing_semicolon();
+ }
+
+ stmt.span = stmt.span.with_hi(self.prev_span.hi());
+ Ok(Some(stmt))
+ }
+
+ fn warn_missing_semicolon(&self) {
+ self.diagnostic().struct_span_warn(self.span, {
+ &format!("expected `;`, found {}", self.this_token_descr())
+ }).note({
+ "This was erroneously allowed and will become a hard error in a future release"
+ }).emit();
+ }
+
+ fn err_dotdotdot_syntax(&self, span: Span) {
+ self.diagnostic().struct_span_err(span, {
+ "unexpected token: `...`"
+ }).span_suggestion(
+ span, "use `..` for an exclusive range", "..".to_owned(),
+ Applicability::MaybeIncorrect
+ ).span_suggestion(
+ span, "or `..=` for an inclusive range", "..=".to_owned(),
+ Applicability::MaybeIncorrect
+ ).emit();
+ }
+
+ /// Parses bounds of a type parameter `BOUND + BOUND + ...`, possibly with trailing `+`.
+ ///
+ /// ```
+ /// BOUND = TY_BOUND | LT_BOUND
+ /// LT_BOUND = LIFETIME (e.g., `'a`)
+ /// TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN)
+ /// TY_BOUND_NOPAREN = [?] [for<LT_PARAM_DEFS>] SIMPLE_PATH (e.g., `?for<'a: 'b> m::Trait<'a>`)
+ /// ```
+ fn parse_generic_bounds_common(&mut self,
+ allow_plus: bool,
+ colon_span: Option<Span>) -> PResult<'a, GenericBounds> {
+ let mut bounds = Vec::new();
+ let mut negative_bounds = Vec::new();
+ let mut last_plus_span = None;
+ loop {
+ // This needs to be synchronized with `Token::can_begin_bound`.
+ let is_bound_start = self.check_path() || self.check_lifetime() ||
+ self.check(&token::Not) || // used for error reporting only
+ self.check(&token::Question) ||
+ self.check_keyword(keywords::For) ||
+ self.check(&token::OpenDelim(token::Paren));
+ if is_bound_start {
+ let lo = self.span;
+ let has_parens = self.eat(&token::OpenDelim(token::Paren));
+ let inner_lo = self.span;
+ let is_negative = self.eat(&token::Not);
+ let question = if self.eat(&token::Question) { Some(self.prev_span) } else { None };
+ if self.token.is_lifetime() {
+ if let Some(question_span) = question {
+ self.span_err(question_span,
+ "`?` may only modify trait bounds, not lifetime bounds");
+ }
+ bounds.push(GenericBound::Outlives(self.expect_lifetime()));
+ if has_parens {
+ let inner_span = inner_lo.to(self.prev_span);
+ self.expect(&token::CloseDelim(token::Paren))?;
+ let mut err = self.struct_span_err(
+ lo.to(self.prev_span),
+ "parenthesized lifetime bounds are not supported"
+ );
+ if let Ok(snippet) = self.sess.source_map().span_to_snippet(inner_span) {
+ err.span_suggestion_short(
+ lo.to(self.prev_span),
+ "remove the parentheses",
+ snippet.to_owned(),
+ Applicability::MachineApplicable
+ );
+ }
+ err.emit();
+ }
+ } else {
+ let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
+ let path = self.parse_path(PathStyle::Type)?;
+ if has_parens {
+ self.expect(&token::CloseDelim(token::Paren))?;
+ }
+ let poly_span = lo.to(self.prev_span);
+ if is_negative {
+ negative_bounds.push(
+ last_plus_span.or(colon_span).unwrap()
+ .to(poly_span));
+ } else {
+ let poly_trait = PolyTraitRef::new(lifetime_defs, path, poly_span);
+ let modifier = if question.is_some() {
+ TraitBoundModifier::Maybe
+ } else {
+ TraitBoundModifier::None
+ };
+ bounds.push(GenericBound::Trait(poly_trait, modifier));
+ }
+ }
+ } else {
+ break
+ }
+
+ if !allow_plus || !self.eat_plus() {
+ break
+ } else {
+ last_plus_span = Some(self.prev_span);
+ }
+ }
+
+ if !negative_bounds.is_empty() {
+ let plural = negative_bounds.len() > 1;
+ let mut err = self.struct_span_err(negative_bounds,
+ "negative trait bounds are not supported");
+ let bound_list = colon_span.unwrap().to(self.prev_span);
+ let mut new_bound_list = String::new();
+ if !bounds.is_empty() {
+ let mut snippets = bounds.iter().map(|bound| bound.span())
+ .map(|span| self.sess.source_map().span_to_snippet(span));
+ while let Some(Ok(snippet)) = snippets.next() {
+ new_bound_list.push_str(" + ");
+ new_bound_list.push_str(&snippet);
+ }
+ new_bound_list = new_bound_list.replacen(" +", ":", 1);
+ }
+ err.span_suggestion_short(bound_list,
+ &format!("remove the trait bound{}",
+ if plural { "s" } else { "" }),
+ new_bound_list,
+ Applicability::MachineApplicable);
+ err.emit();
+ }
+
+ return Ok(bounds);
+ }
+
+ fn parse_generic_bounds(&mut self, colon_span: Option<Span>) -> PResult<'a, GenericBounds> {
+ self.parse_generic_bounds_common(true, colon_span)
+ }
+
+ /// Parses bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`.
+ ///
+ /// ```
+ /// BOUND = LT_BOUND (e.g., `'a`)
+ /// ```
+ fn parse_lt_param_bounds(&mut self) -> GenericBounds {
+ let mut lifetimes = Vec::new();
+ while self.check_lifetime() {
+ lifetimes.push(ast::GenericBound::Outlives(self.expect_lifetime()));
+
+ if !self.eat_plus() {
+ break
+ }
+ }
+ lifetimes
+ }
+
+ /// Matches `typaram = IDENT (`?` unbound)? optbounds ( EQ ty )?`.
+ fn parse_ty_param(&mut self,
+ preceding_attrs: Vec<Attribute>)
+ -> PResult<'a, GenericParam> {
+ let ident = self.parse_ident()?;
+
+ // Parse optional colon and param bounds.
+ let bounds = if self.eat(&token::Colon) {
+ self.parse_generic_bounds(None)?
+ } else {
+ Vec::new()
+ };
+
+ let default = if self.eat(&token::Eq) {
+ Some(self.parse_ty()?)
+ } else {
+ None
+ };
+
+ Ok(GenericParam {
+ ident,
+ id: ast::DUMMY_NODE_ID,
+ attrs: preceding_attrs.into(),
+ bounds,
+ kind: GenericParamKind::Type {
+ default,
+ }
+ })
+ }
+
+ /// Parses the following grammar:
+ ///
+ /// TraitItemAssocTy = Ident ["<"...">"] [":" [GenericBounds]] ["where" ...] ["=" Ty]
+ fn parse_trait_item_assoc_ty(&mut self)
+ -> PResult<'a, (Ident, TraitItemKind, ast::Generics)> {
+ let ident = self.parse_ident()?;
+ let mut generics = self.parse_generics()?;
+
+ // Parse optional colon and param bounds.
+ let bounds = if self.eat(&token::Colon) {
+ self.parse_generic_bounds(None)?
+ } else {
+ Vec::new()
+ };
+ generics.where_clause = self.parse_where_clause()?;
+
+ let default = if self.eat(&token::Eq) {
+ Some(self.parse_ty()?)
+ } else {
+ None
+ };
+ self.expect(&token::Semi)?;
+
+ Ok((ident, TraitItemKind::Type(bounds, default), generics))
+ }
+
+ fn parse_const_param(&mut self, preceding_attrs: Vec<Attribute>) -> PResult<'a, GenericParam> {
+ self.expect_keyword(keywords::Const)?;
+ let ident = self.parse_ident()?;
+ self.expect(&token::Colon)?;
+ let ty = self.parse_ty()?;
+
+ Ok(GenericParam {
+ ident,
+ id: ast::DUMMY_NODE_ID,
+ attrs: preceding_attrs.into(),
+ bounds: Vec::new(),
+ kind: GenericParamKind::Const {
+ ty,
+ }
+ })
+ }
+
+ /// Parses a (possibly empty) list of lifetime and type parameters, possibly including
+ /// a trailing comma and erroneous trailing attributes.
+ crate fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
+ let mut params = Vec::new();
+ loop {
+ let attrs = self.parse_outer_attributes()?;
+ if self.check_lifetime() {
+ let lifetime = self.expect_lifetime();
+ // Parse lifetime parameter.
+ let bounds = if self.eat(&token::Colon) {
+ self.parse_lt_param_bounds()
+ } else {
+ Vec::new()
+ };
+ params.push(ast::GenericParam {
+ ident: lifetime.ident,
+ id: lifetime.id,
+ attrs: attrs.into(),
+ bounds,
+ kind: ast::GenericParamKind::Lifetime,
+ });
+ } else if self.check_keyword(keywords::Const) {
+ // Parse const parameter.
+ params.push(self.parse_const_param(attrs)?);
+ } else if self.check_ident() {
+ // Parse type parameter.
+ params.push(self.parse_ty_param(attrs)?);
+ } else {
+ // Check for trailing attributes and stop parsing.
+ if !attrs.is_empty() {
+ if !params.is_empty() {
+ self.struct_span_err(
+ attrs[0].span,
+ &format!("trailing attribute after generic parameter"),
+ )
+ .span_label(attrs[0].span, "attributes must go before parameters")
+ .emit();
+ } else {
+ self.struct_span_err(
+ attrs[0].span,
+ &format!("attribute without generic parameters"),
+ )
+ .span_label(
+ attrs[0].span,
+ "attributes are only permitted when preceding parameters",
+ )
+ .emit();
+ }
+ }
+ break
+ }
+
+ if !self.eat(&token::Comma) {
+ break
+ }
+ }
+ Ok(params)
+ }
+
+ /// Parses a set of optional generic type parameter declarations. Where
+ /// clauses are not parsed here, and must be added later via
+ /// `parse_where_clause()`.
+ ///
+ /// matches generics = ( ) | ( < > ) | ( < typaramseq ( , )? > ) | ( < lifetimes ( , )? > )
+ /// | ( < lifetimes , typaramseq ( , )? > )
+ /// where typaramseq = ( typaram ) | ( typaram , typaramseq )
+ fn parse_generics(&mut self) -> PResult<'a, ast::Generics> {
+ maybe_whole!(self, NtGenerics, |x| x);
+
+ let span_lo = self.span;
+ if self.eat_lt() {
+ let params = self.parse_generic_params()?;
+ self.expect_gt()?;
+ Ok(ast::Generics {
+ params,
+ where_clause: WhereClause {
+ id: ast::DUMMY_NODE_ID,
+ predicates: Vec::new(),
+ span: syntax_pos::DUMMY_SP,
+ },
+ span: span_lo.to(self.prev_span),
+ })
+ } else {
+ Ok(ast::Generics::default())
+ }
+ }
+
+ /// Parses generic args (within a path segment) with recovery for extra leading angle brackets.
+ /// For the purposes of understanding the parsing logic of generic arguments, this function
+ /// can be thought of being the same as just calling `self.parse_generic_args()` if the source
+ /// had the correct amount of leading angle brackets.
+ ///
+ /// ```ignore (diagnostics)
+ /// bar::<<<<T as Foo>::Output>();
+ /// ^^ help: remove extra angle brackets
+ /// ```
+ fn parse_generic_args_with_leaning_angle_bracket_recovery(
+ &mut self,
+ style: PathStyle,
+ lo: Span,
+ ) -> PResult<'a, (Vec<GenericArg>, Vec<TypeBinding>)> {
+ // We need to detect whether there are extra leading left angle brackets and produce an
+ // appropriate error and suggestion. This cannot be implemented by looking ahead at
+ // upcoming tokens for a matching `>` character - if there are unmatched `<` tokens
+ // then there won't be matching `>` tokens to find.
+ //
+ // To explain how this detection works, consider the following example:
+ //
+ // ```ignore (diagnostics)
+ // bar::<<<<T as Foo>::Output>();
+ // ^^ help: remove extra angle brackets
+ // ```
+ //
+ // Parsing of the left angle brackets starts in this function. We start by parsing the
+ // `<` token (incrementing the counter of unmatched angle brackets on `Parser` via
+ // `eat_lt`):
+ //
+ // *Upcoming tokens:* `<<<<T as Foo>::Output>;`
+ // *Unmatched count:* 1
+ // *`parse_path_segment` calls deep:* 0
+ //
+ // This has the effect of recursing as this function is called if a `<` character
+ // is found within the expected generic arguments:
+ //
+ // *Upcoming tokens:* `<<<T as Foo>::Output>;`
+ // *Unmatched count:* 2
+ // *`parse_path_segment` calls deep:* 1
+ //
+ // Eventually we will have recursed until having consumed all of the `<` tokens and
+ // this will be reflected in the count:
+ //
+ // *Upcoming tokens:* `T as Foo>::Output>;`
+ // *Unmatched count:* 4
+ // `parse_path_segment` calls deep:* 3
+ //
+ // The parser will continue until reaching the first `>` - this will decrement the
+ // unmatched angle bracket count and return to the parent invocation of this function
+ // having succeeded in parsing:
+ //
+ // *Upcoming tokens:* `::Output>;`
+ // *Unmatched count:* 3
+ // *`parse_path_segment` calls deep:* 2
+ //
+ // This will continue until the next `>` character which will also return successfully
+ // to the parent invocation of this function and decrement the count:
+ //
+ // *Upcoming tokens:* `;`
+ // *Unmatched count:* 2
+ // *`parse_path_segment` calls deep:* 1
+ //
+ // At this point, this function will expect to find another matching `>` character but
+ // won't be able to and will return an error. This will continue all the way up the
+ // call stack until the first invocation:
+ //
+ // *Upcoming tokens:* `;`
+ // *Unmatched count:* 2
+ // *`parse_path_segment` calls deep:* 0
+ //
+ // In doing this, we have managed to work out how many unmatched leading left angle
+ // brackets there are, but we cannot recover as the unmatched angle brackets have
+ // already been consumed. To remedy this, we keep a snapshot of the parser state
+ // before we do the above. We can then inspect whether we ended up with a parsing error
+ // and unmatched left angle brackets and if so, restore the parser state before we
+ // consumed any `<` characters to emit an error and consume the erroneous tokens to
+ // recover by attempting to parse again.
+ //
+ // In practice, the recursion of this function is indirect and there will be other
+ // locations that consume some `<` characters - as long as we update the count when
+ // this happens, it isn't an issue.
+
+ let is_first_invocation = style == PathStyle::Expr;
+ // Take a snapshot before attempting to parse - we can restore this later.
+ let snapshot = if is_first_invocation {
+ Some(self.clone())
+ } else {
+ None
+ };
+
+ debug!("parse_generic_args_with_leading_angle_bracket_recovery: (snapshotting)");
+ match self.parse_generic_args() {
+ Ok(value) => Ok(value),
+ Err(ref mut e) if is_first_invocation && self.unmatched_angle_bracket_count > 0 => {
+ // Cancel error from being unable to find `>`. We know the error
+ // must have been this due to a non-zero unmatched angle bracket
+ // count.
+ e.cancel();
+
+ // Swap `self` with our backup of the parser state before attempting to parse
+ // generic arguments.
+ let snapshot = mem::replace(self, snapshot.unwrap());
+
+ debug!(
+ "parse_generic_args_with_leading_angle_bracket_recovery: (snapshot failure) \
+ snapshot.count={:?}",
+ snapshot.unmatched_angle_bracket_count,
+ );
+
+ // Eat the unmatched angle brackets.
+ for _ in 0..snapshot.unmatched_angle_bracket_count {
+ self.eat_lt();
+ }
+
+ // Make a span over ${unmatched angle bracket count} characters.
+ let span = lo.with_hi(
+ lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count)
+ );
+ let plural = snapshot.unmatched_angle_bracket_count > 1;
+ self.diagnostic()
+ .struct_span_err(
+ span,
+ &format!(
+ "unmatched angle bracket{}",
+ if plural { "s" } else { "" }
+ ),
+ )
+ .span_suggestion(
+ span,
+ &format!(
+ "remove extra angle bracket{}",
+ if plural { "s" } else { "" }
+ ),
+ String::new(),
+ Applicability::MachineApplicable,
+ )
+ .emit();
+
+ // Try again without unmatched angle bracket characters.
+ self.parse_generic_args()
+ },
+ Err(e) => Err(e),
+ }
+ }
+
+ /// Parses (possibly empty) list of lifetime and type arguments and associated type bindings,
+ /// possibly including trailing comma.
+ fn parse_generic_args(&mut self) -> PResult<'a, (Vec<GenericArg>, Vec<TypeBinding>)> {
+ let mut args = Vec::new();
+ let mut bindings = Vec::new();
+ let mut misplaced_assoc_ty_bindings: Vec<Span> = Vec::new();
+ let mut assoc_ty_bindings: Vec<Span> = Vec::new();
+
+ let args_lo = self.span;
+
+ loop {
+ if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
+ // Parse lifetime argument.
+ args.push(GenericArg::Lifetime(self.expect_lifetime()));
+ misplaced_assoc_ty_bindings.append(&mut assoc_ty_bindings);
+ } else if self.check_ident() && self.look_ahead(1, |t| t == &token::Eq) {
+ // Parse associated type binding.
+ let lo = self.span;
+ let ident = self.parse_ident()?;
+ self.bump();
+ let ty = self.parse_ty()?;
+ let span = lo.to(self.prev_span);
+ bindings.push(TypeBinding {
+ id: ast::DUMMY_NODE_ID,
+ ident,
+ ty,
+ span,
+ });
+ assoc_ty_bindings.push(span);
+ } else if self.check_const_arg() {
+ // FIXME(const_generics): to distinguish between idents for types and consts,
+ // we should introduce a GenericArg::Ident in the AST and distinguish when
+ // lowering to the HIR. For now, idents for const args are not permitted.
+
+ // Parse const argument.
+ let expr = if let token::OpenDelim(token::Brace) = self.token {
+ self.parse_block_expr(None, self.span, BlockCheckMode::Default, ThinVec::new())?
+ } else if self.token.is_ident() {
+ // FIXME(const_generics): to distinguish between idents for types and consts,
+ // we should introduce a GenericArg::Ident in the AST and distinguish when
+ // lowering to the HIR. For now, idents for const args are not permitted.
+ return Err(
+ self.fatal("identifiers may currently not be used for const generics")
+ );
+ } else {
+ // FIXME(const_generics): this currently conflicts with emplacement syntax
+ // with negative integer literals.
+ self.parse_literal_maybe_minus()?
+ };
+ let value = AnonConst {
+ id: ast::DUMMY_NODE_ID,
+ value: expr,
+ };
+ args.push(GenericArg::Const(value));
+ misplaced_assoc_ty_bindings.append(&mut assoc_ty_bindings);
+ } else if self.check_type() {
+ // Parse type argument.
+ args.push(GenericArg::Type(self.parse_ty()?));
+ misplaced_assoc_ty_bindings.append(&mut assoc_ty_bindings);
+ } else {
+ break
+ }
+
+ if !self.eat(&token::Comma) {
+ break
+ }
+ }
+
+ // FIXME: we would like to report this in ast_validation instead, but we currently do not
+ // preserve ordering of generic parameters with respect to associated type binding, so we
+ // lose that information after parsing.
+ if misplaced_assoc_ty_bindings.len() > 0 {
+ let mut err = self.struct_span_err(
+ args_lo.to(self.prev_span),
+ "associated type bindings must be declared after generic parameters",
+ );
+ for span in misplaced_assoc_ty_bindings {
+ err.span_label(
+ span,
+ "this associated type binding should be moved after the generic parameters",
+ );
+ }
+ err.emit();
+ }
+
+ Ok((args, bindings))
+ }
+
+ /// Parses an optional where-clause and places it in `generics`.
+ ///
+ /// ```ignore (only-for-syntax-highlight)
+ /// where T : Trait<U, V> + 'b, 'a : 'b
+ /// ```
+ fn parse_where_clause(&mut self) -> PResult<'a, WhereClause> {
+ maybe_whole!(self, NtWhereClause, |x| x);
+
+ let mut where_clause = WhereClause {
+ id: ast::DUMMY_NODE_ID,
+ predicates: Vec::new(),
+ span: syntax_pos::DUMMY_SP,
+ };
+
+ if !self.eat_keyword(keywords::Where) {
+ return Ok(where_clause);
+ }
+ let lo = self.prev_span;
+
+ // We are considering adding generics to the `where` keyword as an alternative higher-rank
+ // parameter syntax (as in `where<'a>` or `where<T>`. To avoid that being a breaking
+ // change we parse those generics now, but report an error.
+ if self.choose_generics_over_qpath() {
+ let generics = self.parse_generics()?;
+ self.struct_span_err(
+ generics.span,
+ "generic parameters on `where` clauses are reserved for future use",
+ )
+ .span_label(generics.span, "currently unsupported")
+ .emit();
+ }
+
+ loop {
+ let lo = self.span;
+ if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
+ let lifetime = self.expect_lifetime();
+ // Bounds starting with a colon are mandatory, but possibly empty.
+ self.expect(&token::Colon)?;
+ let bounds = self.parse_lt_param_bounds();
+ where_clause.predicates.push(ast::WherePredicate::RegionPredicate(
+ ast::WhereRegionPredicate {
+ span: lo.to(self.prev_span),
+ lifetime,
+ bounds,
+ }
+ ));
+ } else if self.check_type() {
+ // Parse optional `for<'a, 'b>`.
+ // This `for` is parsed greedily and applies to the whole predicate,
+ // the bounded type can have its own `for` applying only to it.
+ // Example 1: for<'a> Trait1<'a>: Trait2<'a /*ok*/>
+ // Example 2: (for<'a> Trait1<'a>): Trait2<'a /*not ok*/>
+ // Example 3: for<'a> for<'b> Trait1<'a, 'b>: Trait2<'a /*ok*/, 'b /*not ok*/>
+ let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
+
+ // Parse type with mandatory colon and (possibly empty) bounds,
+ // or with mandatory equality sign and the second type.
+ let ty = self.parse_ty()?;
+ if self.eat(&token::Colon) {
+ let bounds = self.parse_generic_bounds(None)?;
+ where_clause.predicates.push(ast::WherePredicate::BoundPredicate(
+ ast::WhereBoundPredicate {
+ span: lo.to(self.prev_span),
+ bound_generic_params: lifetime_defs,
+ bounded_ty: ty,
+ bounds,
+ }
+ ));
+ // FIXME: Decide what should be used here, `=` or `==`.
+ // FIXME: We are just dropping the binders in lifetime_defs on the floor here.
+ } else if self.eat(&token::Eq) || self.eat(&token::EqEq) {
+ let rhs_ty = self.parse_ty()?;
+ where_clause.predicates.push(ast::WherePredicate::EqPredicate(
+ ast::WhereEqPredicate {
+ span: lo.to(self.prev_span),
+ lhs_ty: ty,
+ rhs_ty,
+ id: ast::DUMMY_NODE_ID,
+ }
+ ));
+ } else {
+ return self.unexpected();
+ }
+ } else {
+ break
+ }
+
+ if !self.eat(&token::Comma) {
+ break
+ }
+ }
+
+ where_clause.span = lo.to(self.prev_span);
+ Ok(where_clause)
+ }
+
+ fn parse_fn_args(&mut self, named_args: bool, allow_variadic: bool)
+ -> PResult<'a, (Vec<Arg> , bool)> {
+ self.expect(&token::OpenDelim(token::Paren))?;
+
+ let sp = self.span;
+ let mut variadic = false;
+ let (args, recovered): (Vec<Option<Arg>>, bool) =
+ self.parse_seq_to_before_end(
+ &token::CloseDelim(token::Paren),
+ SeqSep::trailing_allowed(token::Comma),
+ |p| {
+ if p.token == token::DotDotDot {
+ p.bump();
+ variadic = true;
+ if allow_variadic {
+ if p.token != token::CloseDelim(token::Paren) {
+ let span = p.span;
+ p.span_err(span,
+ "`...` must be last in argument list for variadic function");
+ }
+ Ok(None)
+ } else {
+ let span = p.prev_span;
+ if p.token == token::CloseDelim(token::Paren) {
+ // continue parsing to present any further errors
+ p.struct_span_err(
+ span,
+ "only foreign functions are allowed to be variadic"
+ ).emit();
+ Ok(Some(dummy_arg(span)))
+ } else {
+ // this function definition looks beyond recovery, stop parsing
+ p.span_err(span,
+ "only foreign functions are allowed to be variadic");
+ Ok(None)
+ }
+ }
+ } else {
+ match p.parse_arg_general(named_args, false) {
+ Ok(arg) => Ok(Some(arg)),
+ Err(mut e) => {
+ e.emit();
+ let lo = p.prev_span;
+ // Skip every token until next possible arg or end.
+ p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(token::Paren)]);
+ // Create a placeholder argument for proper arg count (#34264).
+ let span = lo.to(p.prev_span);
+ Ok(Some(dummy_arg(span)))
+ }
+ }
+ }
+ }
+ )?;
+
+ if !recovered {
+ self.eat(&token::CloseDelim(token::Paren));
+ }
+
+ let args: Vec<_> = args.into_iter().filter_map(|x| x).collect();
+
+ if variadic && args.is_empty() {
+ self.span_err(sp,
+ "variadic function must be declared with at least one named argument");
+ }
+
+ Ok((args, variadic))
+ }
+
+ /// Parses the argument list and result type of a function declaration.
+ fn parse_fn_decl(&mut self, allow_variadic: bool) -> PResult<'a, P<FnDecl>> {
+
+ let (args, variadic) = self.parse_fn_args(true, allow_variadic)?;
+ let ret_ty = self.parse_ret_ty(true)?;
+
+ Ok(P(FnDecl {
+ inputs: args,
+ output: ret_ty,
+ variadic,
+ }))
+ }
+
+ /// Returns the parsed optional self argument and whether a self shortcut was used.
+ fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> {
+ let expect_ident = |this: &mut Self| match this.token {
+ // Preserve hygienic context.
+ token::Ident(ident, _) =>
+ { let span = this.span; this.bump(); Ident::new(ident.name, span) }
+ _ => unreachable!()
+ };
+ let isolated_self = |this: &mut Self, n| {
+ this.look_ahead(n, |t| t.is_keyword(keywords::SelfLower)) &&
+ this.look_ahead(n + 1, |t| t != &token::ModSep)
+ };
+
+ // Parse optional self parameter of a method.
+ // Only a limited set of initial token sequences is considered self parameters, anything
+ // else is parsed as a normal function parameter list, so some lookahead is required.
+ let eself_lo = self.span;
+ let (eself, eself_ident, eself_hi) = match self.token {
+ token::BinOp(token::And) => {
+ // &self
+ // &mut self
+ // &'lt self
+ // &'lt mut self
+ // &not_self
+ (if isolated_self(self, 1) {
+ self.bump();
+ SelfKind::Region(None, Mutability::Immutable)
+ } else if self.look_ahead(1, |t| t.is_keyword(keywords::Mut)) &&
+ isolated_self(self, 2) {
+ self.bump();
+ self.bump();
+ SelfKind::Region(None, Mutability::Mutable)
+ } else if self.look_ahead(1, |t| t.is_lifetime()) &&
+ isolated_self(self, 2) {
+ self.bump();
+ let lt = self.expect_lifetime();
+ SelfKind::Region(Some(lt), Mutability::Immutable)
+ } else if self.look_ahead(1, |t| t.is_lifetime()) &&
+ self.look_ahead(2, |t| t.is_keyword(keywords::Mut)) &&
+ isolated_self(self, 3) {
+ self.bump();
+ let lt = self.expect_lifetime();
+ self.bump();
+ SelfKind::Region(Some(lt), Mutability::Mutable)
+ } else {
+ return Ok(None);
+ }, expect_ident(self), self.prev_span)
+ }
+ token::BinOp(token::Star) => {
+ // *self
+ // *const self
+ // *mut self
+ // *not_self
+ // Emit special error for `self` cases.
+ let msg = "cannot pass `self` by raw pointer";
+ (if isolated_self(self, 1) {
+ self.bump();
+ self.struct_span_err(self.span, msg)
+ .span_label(self.span, msg)
+ .emit();
+ SelfKind::Value(Mutability::Immutable)
+ } else if self.look_ahead(1, |t| t.is_mutability()) &&
+ isolated_self(self, 2) {
+ self.bump();
+ self.bump();
+ self.struct_span_err(self.span, msg)
+ .span_label(self.span, msg)
+ .emit();
+ SelfKind::Value(Mutability::Immutable)
+ } else {
+ return Ok(None);
+ }, expect_ident(self), self.prev_span)
+ }
+ token::Ident(..) => {
+ if isolated_self(self, 0) {
+ // self
+ // self: TYPE
+ let eself_ident = expect_ident(self);
+ let eself_hi = self.prev_span;
+ (if self.eat(&token::Colon) {
+ let ty = self.parse_ty()?;
+ SelfKind::Explicit(ty, Mutability::Immutable)
+ } else {
+ SelfKind::Value(Mutability::Immutable)
+ }, eself_ident, eself_hi)
+ } else if self.token.is_keyword(keywords::Mut) &&
+ isolated_self(self, 1) {
+ // mut self
+ // mut self: TYPE
+ self.bump();
+ let eself_ident = expect_ident(self);
+ let eself_hi = self.prev_span;
+ (if self.eat(&token::Colon) {
+ let ty = self.parse_ty()?;
+ SelfKind::Explicit(ty, Mutability::Mutable)
+ } else {
+ SelfKind::Value(Mutability::Mutable)
+ }, eself_ident, eself_hi)
+ } else {
+ return Ok(None);
+ }
+ }
+ _ => return Ok(None),
+ };
+
+ let eself = source_map::respan(eself_lo.to(eself_hi), eself);
+ Ok(Some(Arg::from_self(eself, eself_ident)))
+ }
+
+ /// Parses the parameter list and result type of a function that may have a `self` parameter.
+ fn parse_fn_decl_with_self<F>(&mut self, parse_arg_fn: F) -> PResult<'a, P<FnDecl>>
+ where F: FnMut(&mut Parser<'a>) -> PResult<'a, Arg>,
+ {
+ self.expect(&token::OpenDelim(token::Paren))?;
+
+ // Parse optional self argument
+ let self_arg = self.parse_self_arg()?;
+
+ // Parse the rest of the function parameter list.
+ let sep = SeqSep::trailing_allowed(token::Comma);
+ let (fn_inputs, recovered) = if let Some(self_arg) = self_arg {
+ if self.check(&token::CloseDelim(token::Paren)) {
+ (vec![self_arg], false)
+ } else if self.eat(&token::Comma) {
+ let mut fn_inputs = vec![self_arg];
+ let (mut input, recovered) = self.parse_seq_to_before_end(
+ &token::CloseDelim(token::Paren), sep, parse_arg_fn)?;
+ fn_inputs.append(&mut input);
+ (fn_inputs, recovered)
+ } else {
+ return self.unexpected();
+ }
+ } else {
+ self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), sep, parse_arg_fn)?
+ };
+
+ if !recovered {
+ // Parse closing paren and return type.
+ self.expect(&token::CloseDelim(token::Paren))?;
+ }
+ Ok(P(FnDecl {
+ inputs: fn_inputs,
+ output: self.parse_ret_ty(true)?,
+ variadic: false
+ }))
+ }
+
+ /// Parses the `|arg, arg|` header of a closure.
+ fn parse_fn_block_decl(&mut self) -> PResult<'a, P<FnDecl>> {
+ let inputs_captures = {
+ if self.eat(&token::OrOr) {
+ Vec::new()
+ } else {
+ self.expect(&token::BinOp(token::Or))?;
+ let args = self.parse_seq_to_before_tokens(
+ &[&token::BinOp(token::Or), &token::OrOr],
+ SeqSep::trailing_allowed(token::Comma),
+ TokenExpectType::NoExpect,
+ |p| p.parse_fn_block_arg()
+ )?.0;
+ self.expect_or()?;
+ args
+ }
+ };
+ let output = self.parse_ret_ty(true)?;
+
+ Ok(P(FnDecl {
+ inputs: inputs_captures,
+ output,
+ variadic: false
+ }))
+ }
+
+ /// Parses the name and optional generic types of a function header.
+ fn parse_fn_header(&mut self) -> PResult<'a, (Ident, ast::Generics)> {
+ let id = self.parse_ident()?;
+ let generics = self.parse_generics()?;
+ Ok((id, generics))
+ }
+
+ fn mk_item(&mut self, span: Span, ident: Ident, node: ItemKind, vis: Visibility,
+ attrs: Vec<Attribute>) -> P<Item> {
+ P(Item {
+ ident,
+ attrs,
+ id: ast::DUMMY_NODE_ID,
+ node,
+ vis,
+ span,
+ tokens: None,
+ })
+ }
+
+ /// Parses an item-position function declaration.
+ fn parse_item_fn(&mut self,
+ unsafety: Unsafety,
+ asyncness: IsAsync,
+ constness: Spanned<Constness>,
+ abi: Abi)
+ -> PResult<'a, ItemInfo> {
+ let (ident, mut generics) = self.parse_fn_header()?;
+ let decl = self.parse_fn_decl(false)?;
+ generics.where_clause = self.parse_where_clause()?;
+ let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
+ let header = FnHeader { unsafety, asyncness, constness, abi };
+ Ok((ident, ItemKind::Fn(decl, header, generics, body), Some(inner_attrs)))
+ }
+
+ /// Returns `true` if we are looking at `const ID`
+ /// (returns `false` for things like `const fn`, etc.).
+ fn is_const_item(&mut self) -> bool {
+ self.token.is_keyword(keywords::Const) &&
+ !self.look_ahead(1, |t| t.is_keyword(keywords::Fn)) &&
+ !self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe))
+ }
+
+ /// Parses all the "front matter" for a `fn` declaration, up to
+ /// and including the `fn` keyword:
+ ///
+ /// - `const fn`
+ /// - `unsafe fn`
+ /// - `const unsafe fn`
+ /// - `extern fn`
+ /// - etc.
+ fn parse_fn_front_matter(&mut self)
+ -> PResult<'a, (
+ Spanned<Constness>,
+ Unsafety,
+ IsAsync,
+ Abi
+ )>
+ {
+ let is_const_fn = self.eat_keyword(keywords::Const);
+ let const_span = self.prev_span;
+ let unsafety = self.parse_unsafety();
+ let asyncness = self.parse_asyncness();
+ let (constness, unsafety, abi) = if is_const_fn {
+ (respan(const_span, Constness::Const), unsafety, Abi::Rust)
+ } else {
+ let abi = if self.eat_keyword(keywords::Extern) {
+ self.parse_opt_abi()?.unwrap_or(Abi::C)
+ } else {
+ Abi::Rust
+ };
+ (respan(self.prev_span, Constness::NotConst), unsafety, abi)
+ };
+ self.expect_keyword(keywords::Fn)?;
+ Ok((constness, unsafety, asyncness, abi))
+ }
+
+ /// Parses an impl item.
+ pub fn parse_impl_item(&mut self, at_end: &mut bool) -> PResult<'a, ImplItem> {
+ maybe_whole!(self, NtImplItem, |x| x);
+ let attrs = self.parse_outer_attributes()?;
+ let (mut item, tokens) = self.collect_tokens(|this| {
+ this.parse_impl_item_(at_end, attrs)
+ })?;
+
+ // See `parse_item` for why this clause is here.
+ if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
+ item.tokens = Some(tokens);
+ }
+ Ok(item)
+ }
+
+ fn parse_impl_item_(&mut self,
+ at_end: &mut bool,
+ mut attrs: Vec<Attribute>) -> PResult<'a, ImplItem> {
+ let lo = self.span;
+ let vis = self.parse_visibility(false)?;
+ let defaultness = self.parse_defaultness();
+ let (name, node, generics) = if let Some(type_) = self.eat_type() {
+ let (name, alias, generics) = type_?;
+ let kind = match alias {
+ AliasKind::Weak(typ) => ast::ImplItemKind::Type(typ),
+ AliasKind::Existential(bounds) => ast::ImplItemKind::Existential(bounds),
+ };
+ (name, kind, generics)
+ } else if self.is_const_item() {
+ // This parses the grammar:
+ // ImplItemConst = "const" Ident ":" Ty "=" Expr ";"
+ self.expect_keyword(keywords::Const)?;
+ let name = self.parse_ident()?;
+ self.expect(&token::Colon)?;
+ let typ = self.parse_ty()?;
+ self.expect(&token::Eq)?;
+ let expr = self.parse_expr()?;
+ self.expect(&token::Semi)?;
+ (name, ast::ImplItemKind::Const(typ, expr), ast::Generics::default())
+ } else {
+ let (name, inner_attrs, generics, node) = self.parse_impl_method(&vis, at_end)?;
+ attrs.extend(inner_attrs);
+ (name, node, generics)
+ };
+
+ Ok(ImplItem {
+ id: ast::DUMMY_NODE_ID,
+ span: lo.to(self.prev_span),
+ ident: name,
+ vis,
+ defaultness,
+ attrs,
+ generics,
+ node,
+ tokens: None,
+ })
+ }
+
+ fn complain_if_pub_macro(&mut self, vis: &VisibilityKind, sp: Span) {
+ match *vis {
+ VisibilityKind::Inherited => {}
+ _ => {
+ let is_macro_rules: bool = match self.token {
+ token::Ident(sid, _) => sid.name == Symbol::intern("macro_rules"),
+ _ => false,
+ };
+ let mut err = if is_macro_rules {
+ let mut err = self.diagnostic()
+ .struct_span_err(sp, "can't qualify macro_rules invocation with `pub`");
+ err.span_suggestion(
+ sp,
+ "try exporting the macro",
+ "#[macro_export]".to_owned(),
+ Applicability::MaybeIncorrect // speculative
+ );
+ err
+ } else {
+ let mut err = self.diagnostic()
+ .struct_span_err(sp, "can't qualify macro invocation with `pub`");
+ err.help("try adjusting the macro to put `pub` inside the invocation");
+ err
+ };
+ err.emit();
+ }
+ }
+ }
+
+ fn missing_assoc_item_kind_err(&mut self, item_type: &str, prev_span: Span)
+ -> DiagnosticBuilder<'a>
+ {
+ let expected_kinds = if item_type == "extern" {
+ "missing `fn`, `type`, or `static`"
+ } else {
+ "missing `fn`, `type`, or `const`"
+ };
+
+ // Given this code `path(`, it seems like this is not
+ // setting the visibility of a macro invocation, but rather
+ // a mistyped method declaration.
+ // Create a diagnostic pointing out that `fn` is missing.
+ //
+ // x | pub path(&self) {
+ // | ^ missing `fn`, `type`, or `const`
+ // pub path(
+ // ^^ `sp` below will point to this
+ let sp = prev_span.between(self.prev_span);
+ let mut err = self.diagnostic().struct_span_err(
+ sp,
+ &format!("{} for {}-item declaration",
+ expected_kinds, item_type));
+ err.span_label(sp, expected_kinds);
+ err
+ }
+
+ /// Parse a method or a macro invocation in a trait impl.
+ fn parse_impl_method(&mut self, vis: &Visibility, at_end: &mut bool)
+ -> PResult<'a, (Ident, Vec<Attribute>, ast::Generics,
+ ast::ImplItemKind)> {
+ // code copied from parse_macro_use_or_failure... abstraction!
+ if let Some(mac) = self.parse_assoc_macro_invoc("impl", Some(vis), at_end)? {
+ // method macro
+ Ok((keywords::Invalid.ident(), vec![], ast::Generics::default(),
+ ast::ImplItemKind::Macro(mac)))
+ } else {
+ let (constness, unsafety, asyncness, abi) = self.parse_fn_front_matter()?;
+ let ident = self.parse_ident()?;
+ let mut generics = self.parse_generics()?;
+ let decl = self.parse_fn_decl_with_self(|p| p.parse_arg())?;
+ generics.where_clause = self.parse_where_clause()?;
+ *at_end = true;
+ let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
+ let header = ast::FnHeader { abi, unsafety, constness, asyncness };
+ Ok((ident, inner_attrs, generics, ast::ImplItemKind::Method(
+ ast::MethodSig { header, decl },
+ body
+ )))
+ }
+ }
+
+ /// Parses `trait Foo { ... }` or `trait Foo = Bar;`.
+ fn parse_item_trait(&mut self, is_auto: IsAuto, unsafety: Unsafety) -> PResult<'a, ItemInfo> {
+ let ident = self.parse_ident()?;
+ let mut tps = self.parse_generics()?;
+
+ // Parse optional colon and supertrait bounds.
+ let bounds = if self.eat(&token::Colon) {
+ self.parse_generic_bounds(Some(self.prev_span))?
+ } else {
+ Vec::new()
+ };
+
+ if self.eat(&token::Eq) {
+ // it's a trait alias
+ let bounds = self.parse_generic_bounds(None)?;
+ tps.where_clause = self.parse_where_clause()?;
+ self.expect(&token::Semi)?;
+ if is_auto == IsAuto::Yes {
+ let msg = "trait aliases cannot be `auto`";
+ self.struct_span_err(self.prev_span, msg)
+ .span_label(self.prev_span, msg)
+ .emit();
+ }
+ if unsafety != Unsafety::Normal {
+ let msg = "trait aliases cannot be `unsafe`";
+ self.struct_span_err(self.prev_span, msg)
+ .span_label(self.prev_span, msg)
+ .emit();
+ }
+ Ok((ident, ItemKind::TraitAlias(tps, bounds), None))
+ } else {
+ // it's a normal trait
+ tps.where_clause = self.parse_where_clause()?;
+ self.expect(&token::OpenDelim(token::Brace))?;
+ let mut trait_items = vec![];
+ while !self.eat(&token::CloseDelim(token::Brace)) {
+ let mut at_end = false;
+ match self.parse_trait_item(&mut at_end) {
+ Ok(item) => trait_items.push(item),
+ Err(mut e) => {
+ e.emit();
+ if !at_end {
+ self.recover_stmt_(SemiColonMode::Break, BlockMode::Break);
+ }
+ }
+ }
+ }
+ Ok((ident, ItemKind::Trait(is_auto, unsafety, tps, bounds, trait_items), None))
+ }
+ }
+
+ fn choose_generics_over_qpath(&self) -> bool {
+ // There's an ambiguity between generic parameters and qualified paths in impls.
+ // If we see `<` it may start both, so we have to inspect some following tokens.
+ // The following combinations can only start generics,
+ // but not qualified paths (with one exception):
+ // `<` `>` - empty generic parameters
+ // `<` `#` - generic parameters with attributes
+ // `<` (LIFETIME|IDENT) `>` - single generic parameter
+ // `<` (LIFETIME|IDENT) `,` - first generic parameter in a list
+ // `<` (LIFETIME|IDENT) `:` - generic parameter with bounds
+ // `<` (LIFETIME|IDENT) `=` - generic parameter with a default
+ // `<` const - generic const parameter
+ // The only truly ambiguous case is
+ // `<` IDENT `>` `::` IDENT ...
+ // we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`)
+ // because this is what almost always expected in practice, qualified paths in impls
+ // (`impl <Type>::AssocTy { ... }`) aren't even allowed by type checker at the moment.
+ self.token == token::Lt &&
+ (self.look_ahead(1, |t| t == &token::Pound || t == &token::Gt) ||
+ self.look_ahead(1, |t| t.is_lifetime() || t.is_ident()) &&
+ self.look_ahead(2, |t| t == &token::Gt || t == &token::Comma ||
+ t == &token::Colon || t == &token::Eq) ||
+ self.look_ahead(1, |t| t.is_keyword(keywords::Const)))
+ }
+
+ fn parse_impl_body(&mut self) -> PResult<'a, (Vec<ImplItem>, Vec<Attribute>)> {
+ self.expect(&token::OpenDelim(token::Brace))?;
+ let attrs = self.parse_inner_attributes()?;
+
+ let mut impl_items = Vec::new();
+ while !self.eat(&token::CloseDelim(token::Brace)) {
+ let mut at_end = false;
+ match self.parse_impl_item(&mut at_end) {
+ Ok(impl_item) => impl_items.push(impl_item),
+ Err(mut err) => {
+ err.emit();
+ if !at_end {
+ self.recover_stmt_(SemiColonMode::Break, BlockMode::Break);
+ }
+ }
+ }
+ }
+ Ok((impl_items, attrs))
+ }
+
+ /// Parses an implementation item, `impl` keyword is already parsed.
+ ///
+ /// impl<'a, T> TYPE { /* impl items */ }
+ /// impl<'a, T> TRAIT for TYPE { /* impl items */ }
+ /// impl<'a, T> !TRAIT for TYPE { /* impl items */ }
+ ///
+ /// We actually parse slightly more relaxed grammar for better error reporting and recovery.
+ /// `impl` GENERICS `!`? TYPE `for`? (TYPE | `..`) (`where` PREDICATES)? `{` BODY `}`
+ /// `impl` GENERICS `!`? TYPE (`where` PREDICATES)? `{` BODY `}`
+ fn parse_item_impl(&mut self, unsafety: Unsafety, defaultness: Defaultness)
+ -> PResult<'a, ItemInfo> {
+ // First, parse generic parameters if necessary.
+ let mut generics = if self.choose_generics_over_qpath() {
+ self.parse_generics()?
+ } else {
+ ast::Generics::default()
+ };
+
+ // Disambiguate `impl !Trait for Type { ... }` and `impl ! { ... }` for the never type.
+ let polarity = if self.check(&token::Not) && self.look_ahead(1, |t| t.can_begin_type()) {
+ self.bump(); // `!`
+ ast::ImplPolarity::Negative
+ } else {
+ ast::ImplPolarity::Positive
+ };
+
+ // Parse both types and traits as a type, then reinterpret if necessary.
+ let ty_first = self.parse_ty()?;
+
+ // If `for` is missing we try to recover.
+ let has_for = self.eat_keyword(keywords::For);
+ let missing_for_span = self.prev_span.between(self.span);
+
+ let ty_second = if self.token == token::DotDot {
+ // We need to report this error after `cfg` expansion for compatibility reasons
+ self.bump(); // `..`, do not add it to expected tokens
+ Some(P(Ty { node: TyKind::Err, span: self.prev_span, id: ast::DUMMY_NODE_ID }))
+ } else if has_for || self.token.can_begin_type() {
+ Some(self.parse_ty()?)
+ } else {
+ None
+ };
+
+ generics.where_clause = self.parse_where_clause()?;
+
+ let (impl_items, attrs) = self.parse_impl_body()?;
+
+ let item_kind = match ty_second {
+ Some(ty_second) => {
+ // impl Trait for Type
+ if !has_for {
+ self.struct_span_err(missing_for_span, "missing `for` in a trait impl")
+ .span_suggestion_short(
+ missing_for_span,
+ "add `for` here",
+ " for ".to_string(),
+ Applicability::MachineApplicable,
+ ).emit();
+ }
+
+ let ty_first = ty_first.into_inner();
+ let path = match ty_first.node {
+ // This notably includes paths passed through `ty` macro fragments (#46438).
+ TyKind::Path(None, path) => path,
+ _ => {
+ self.span_err(ty_first.span, "expected a trait, found type");
+ ast::Path::from_ident(Ident::new(keywords::Invalid.name(), ty_first.span))
+ }
+ };
+ let trait_ref = TraitRef { path, ref_id: ty_first.id };
+
+ ItemKind::Impl(unsafety, polarity, defaultness,
+ generics, Some(trait_ref), ty_second, impl_items)
+ }
+ None => {
+ // impl Type
+ ItemKind::Impl(unsafety, polarity, defaultness,
+ generics, None, ty_first, impl_items)
+ }
+ };
+
+ Ok((keywords::Invalid.ident(), item_kind, Some(attrs)))
+ }
+
+ fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, Vec<GenericParam>> {
+ if self.eat_keyword(keywords::For) {
+ self.expect_lt()?;
+ let params = self.parse_generic_params()?;
+ self.expect_gt()?;
+ // We rely on AST validation to rule out invalid cases: There must not be type
+ // parameters, and the lifetime parameters must not have bounds.
+ Ok(params)
+ } else {
+ Ok(Vec::new())
+ }
+ }
+
+ /// Parses `struct Foo { ... }`.
+ fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> {
+ let class_name = self.parse_ident()?;
+
+ let mut generics = self.parse_generics()?;
+
+ // There is a special case worth noting here, as reported in issue #17904.
+ // If we are parsing a tuple struct it is the case that the where clause
+ // should follow the field list. Like so:
+ //
+ // struct Foo<T>(T) where T: Copy;
+ //
+ // If we are parsing a normal record-style struct it is the case
+ // that the where clause comes before the body, and after the generics.
+ // So if we look ahead and see a brace or a where-clause we begin
+ // parsing a record style struct.
+ //
+ // Otherwise if we look ahead and see a paren we parse a tuple-style
+ // struct.
+
+ let vdata = if self.token.is_keyword(keywords::Where) {
+ generics.where_clause = self.parse_where_clause()?;
+ if self.eat(&token::Semi) {
+ // If we see a: `struct Foo<T> where T: Copy;` style decl.
+ VariantData::Unit(ast::DUMMY_NODE_ID)
+ } else {
+ // If we see: `struct Foo<T> where T: Copy { ... }`
+ VariantData::Struct(self.parse_record_struct_body()?, ast::DUMMY_NODE_ID)
+ }
+ // No `where` so: `struct Foo<T>;`
+ } else if self.eat(&token::Semi) {
+ VariantData::Unit(ast::DUMMY_NODE_ID)
+ // Record-style struct definition
+ } else if self.token == token::OpenDelim(token::Brace) {
+ VariantData::Struct(self.parse_record_struct_body()?, ast::DUMMY_NODE_ID)
+ // Tuple-style struct definition with optional where-clause.
+ } else if self.token == token::OpenDelim(token::Paren) {
+ let body = VariantData::Tuple(self.parse_tuple_struct_body()?, ast::DUMMY_NODE_ID);
+ generics.where_clause = self.parse_where_clause()?;
+ self.expect(&token::Semi)?;
+ body
+ } else {
+ let token_str = self.this_token_descr();
+ let mut err = self.fatal(&format!(
+ "expected `where`, `{{`, `(`, or `;` after struct name, found {}",
+ token_str
+ ));
+ err.span_label(self.span, "expected `where`, `{`, `(`, or `;` after struct name");
+ return Err(err);
+ };
+
+ Ok((class_name, ItemKind::Struct(vdata, generics), None))
+ }
+
+ /// Parses `union Foo { ... }`.
+ fn parse_item_union(&mut self) -> PResult<'a, ItemInfo> {
+ let class_name = self.parse_ident()?;
+
+ let mut generics = self.parse_generics()?;
+
+ let vdata = if self.token.is_keyword(keywords::Where) {
+ generics.where_clause = self.parse_where_clause()?;
+ VariantData::Struct(self.parse_record_struct_body()?, ast::DUMMY_NODE_ID)
+ } else if self.token == token::OpenDelim(token::Brace) {
+ VariantData::Struct(self.parse_record_struct_body()?, ast::DUMMY_NODE_ID)
+ } else {
+ let token_str = self.this_token_descr();
+ let mut err = self.fatal(&format!(
+ "expected `where` or `{{` after union name, found {}", token_str));
+ err.span_label(self.span, "expected `where` or `{` after union name");
+ return Err(err);
+ };
+
+ Ok((class_name, ItemKind::Union(vdata, generics), None))
+ }
+
+ fn consume_block(&mut self, delim: token::DelimToken) {
+ let mut brace_depth = 0;
+ loop {
+ if self.eat(&token::OpenDelim(delim)) {
+ brace_depth += 1;
+ } else if self.eat(&token::CloseDelim(delim)) {
+ if brace_depth == 0 {
+ return;
+ } else {
+ brace_depth -= 1;
+ continue;
+ }
+ } else if self.token == token::Eof || self.eat(&token::CloseDelim(token::NoDelim)) {
+ return;
+ } else {
+ self.bump();
+ }
+ }
+ }
+
+ fn parse_record_struct_body(&mut self) -> PResult<'a, Vec<StructField>> {
+ let mut fields = Vec::new();
+ if self.eat(&token::OpenDelim(token::Brace)) {
+ while self.token != token::CloseDelim(token::Brace) {
+ let field = self.parse_struct_decl_field().map_err(|e| {
+ self.recover_stmt();
+ e
+ });
+ match field {
+ Ok(field) => fields.push(field),
+ Err(mut err) => {
+ err.emit();
+ }
+ }
+ }
+ self.eat(&token::CloseDelim(token::Brace));
+ } else {
+ let token_str = self.this_token_descr();
+ let mut err = self.fatal(&format!(
+ "expected `where`, or `{{` after struct name, found {}", token_str));
+ err.span_label(self.span, "expected `where`, or `{` after struct name");
+ return Err(err);
+ }
+
+ Ok(fields)
+ }
+
+ fn parse_tuple_struct_body(&mut self) -> PResult<'a, Vec<StructField>> {
+ // This is the case where we find `struct Foo<T>(T) where T: Copy;`
+ // Unit like structs are handled in parse_item_struct function
+ let fields = self.parse_unspanned_seq(
+ &token::OpenDelim(token::Paren),
+ &token::CloseDelim(token::Paren),
+ SeqSep::trailing_allowed(token::Comma),
+ |p| {
+ let attrs = p.parse_outer_attributes()?;
+ let lo = p.span;
+ let vis = p.parse_visibility(true)?;
+ let ty = p.parse_ty()?;
+ Ok(StructField {
+ span: lo.to(ty.span),
+ vis,
+ ident: None,
+ id: ast::DUMMY_NODE_ID,
+ ty,
+ attrs,
+ })
+ })?;
+
+ Ok(fields)
+ }
+
+ /// Parses a structure field declaration.
+ fn parse_single_struct_field(&mut self,
+ lo: Span,
+ vis: Visibility,
+ attrs: Vec<Attribute> )
+ -> PResult<'a, StructField> {
+ let mut seen_comma: bool = false;
+ let a_var = self.parse_name_and_ty(lo, vis, attrs)?;
+ if self.token == token::Comma {
+ seen_comma = true;
+ }
+ match self.token {
+ token::Comma => {
+ self.bump();
+ }
+ token::CloseDelim(token::Brace) => {}
+ token::DocComment(_) => {
+ let previous_span = self.prev_span;
+ let mut err = self.span_fatal_err(self.span, Error::UselessDocComment);
+ self.bump(); // consume the doc comment
+ let comma_after_doc_seen = self.eat(&token::Comma);
+ // `seen_comma` is always false, because we are inside doc block
+ // condition is here to make code more readable
+ if seen_comma == false && comma_after_doc_seen == true {
+ seen_comma = true;
+ }
+ if comma_after_doc_seen || self.token == token::CloseDelim(token::Brace) {
+ err.emit();
+ } else {
+ if seen_comma == false {
+ let sp = self.sess.source_map().next_point(previous_span);
+ err.span_suggestion(
+ sp,
+ "missing comma here",
+ ",".into(),
+ Applicability::MachineApplicable
+ );
+ }
+ return Err(err);
+ }
+ }
+ _ => {
+ let sp = self.sess.source_map().next_point(self.prev_span);
+ let mut err = self.struct_span_err(sp, &format!("expected `,`, or `}}`, found {}",
+ self.this_token_descr()));
+ if self.token.is_ident() {
+ // This is likely another field; emit the diagnostic and keep going
+ err.span_suggestion(
+ sp,
+ "try adding a comma",
+ ",".into(),
+ Applicability::MachineApplicable,
+ );
+ err.emit();
+ } else {
+ return Err(err)
+ }
+ }
+ }
+ Ok(a_var)
+ }
+
+ /// Parses an element of a struct declaration.
+ fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> {
+ let attrs = self.parse_outer_attributes()?;
+ let lo = self.span;
+ let vis = self.parse_visibility(false)?;
+ self.parse_single_struct_field(lo, vis, attrs)
+ }
+
+ /// Parses `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`,
+ /// `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`.
+ /// If the following element can't be a tuple (i.e., it's a function definition), then
+ /// it's not a tuple struct field), and the contents within the parentheses isn't valid,
+ /// so emit a proper diagnostic.
+ pub fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibility> {
+ maybe_whole!(self, NtVis, |x| x);
+
+ self.expected_tokens.push(TokenType::Keyword(keywords::Crate));
+ if self.is_crate_vis() {
+ self.bump(); // `crate`
+ return Ok(respan(self.prev_span, VisibilityKind::Crate(CrateSugar::JustCrate)));
+ }
+
+ if !self.eat_keyword(keywords::Pub) {
+ // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
+ // keyword to grab a span from for inherited visibility; an empty span at the
+ // beginning of the current token would seem to be the "Schelling span".
+ return Ok(respan(self.span.shrink_to_lo(), VisibilityKind::Inherited))
+ }
+ let lo = self.prev_span;
+
+ if self.check(&token::OpenDelim(token::Paren)) {
+ // We don't `self.bump()` the `(` yet because this might be a struct definition where
+ // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
+ // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
+ // by the following tokens.
+ if self.look_ahead(1, |t| t.is_keyword(keywords::Crate)) {
+ // `pub(crate)`
+ self.bump(); // `(`
+ self.bump(); // `crate`
+ self.expect(&token::CloseDelim(token::Paren))?; // `)`
+ let vis = respan(
+ lo.to(self.prev_span),
+ VisibilityKind::Crate(CrateSugar::PubCrate),
+ );
+ return Ok(vis)
+ } else if self.look_ahead(1, |t| t.is_keyword(keywords::In)) {
+ // `pub(in path)`
+ self.bump(); // `(`
+ self.bump(); // `in`
+ let path = self.parse_path(PathStyle::Mod)?; // `path`
+ self.expect(&token::CloseDelim(token::Paren))?; // `)`
+ let vis = respan(lo.to(self.prev_span), VisibilityKind::Restricted {
+ path: P(path),
+ id: ast::DUMMY_NODE_ID,
+ });
+ return Ok(vis)
+ } else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren)) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Super) ||
+ t.is_keyword(keywords::SelfLower))
+ {
+ // `pub(self)` or `pub(super)`
+ self.bump(); // `(`
+ let path = self.parse_path(PathStyle::Mod)?; // `super`/`self`
+ self.expect(&token::CloseDelim(token::Paren))?; // `)`
+ let vis = respan(lo.to(self.prev_span), VisibilityKind::Restricted {
+ path: P(path),
+ id: ast::DUMMY_NODE_ID,
+ });
+ return Ok(vis)
+ } else if !can_take_tuple { // Provide this diagnostic if this is not a tuple struct
+ // `pub(something) fn ...` or `struct X { pub(something) y: Z }`
+ self.bump(); // `(`
+ let msg = "incorrect visibility restriction";
+ let suggestion = r##"some possible visibility restrictions are:
+`pub(crate)`: visible only on the current crate
+`pub(super)`: visible only in the current module's parent
+`pub(in path::to::module)`: visible only on the specified path"##;
+ let path = self.parse_path(PathStyle::Mod)?;
+ let sp = self.prev_span;
+ let help_msg = format!("make this visible only to module `{}` with `in`", path);
+ self.expect(&token::CloseDelim(token::Paren))?; // `)`
+ let mut err = struct_span_err!(self.sess.span_diagnostic, sp, E0704, "{}", msg);
+ err.help(suggestion);
+ err.span_suggestion(
+ sp, &help_msg, format!("in {}", path), Applicability::MachineApplicable
+ );
+ err.emit(); // emit diagnostic, but continue with public visibility
+ }
+ }
+
+ Ok(respan(lo, VisibilityKind::Public))
+ }
+
+ /// Parses defaultness (i.e., `default` or nothing).
+ fn parse_defaultness(&mut self) -> Defaultness {
+ // `pub` is included for better error messages
+ if self.check_keyword(keywords::Default) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Impl) ||
+ t.is_keyword(keywords::Const) ||
+ t.is_keyword(keywords::Fn) ||
+ t.is_keyword(keywords::Unsafe) ||
+ t.is_keyword(keywords::Extern) ||
+ t.is_keyword(keywords::Type) ||
+ t.is_keyword(keywords::Pub)) {
+ self.bump(); // `default`
+ Defaultness::Default
+ } else {
+ Defaultness::Final
+ }
+ }
+
+ fn maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool {
+ if self.eat(&token::Semi) {
+ let mut err = self.struct_span_err(self.prev_span, "expected item, found `;`");
+ err.span_suggestion_short(
+ self.prev_span,
+ "remove this semicolon",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ if !items.is_empty() {
+ let previous_item = &items[items.len()-1];
+ let previous_item_kind_name = match previous_item.node {
+ // say "braced struct" because tuple-structs and
+ // braceless-empty-struct declarations do take a semicolon
+ ItemKind::Struct(..) => Some("braced struct"),
+ ItemKind::Enum(..) => Some("enum"),
+ ItemKind::Trait(..) => Some("trait"),
+ ItemKind::Union(..) => Some("union"),
+ _ => None,
+ };
+ if let Some(name) = previous_item_kind_name {
+ err.help(&format!("{} declarations are not followed by a semicolon", name));
+ }
+ }
+ err.emit();
+ true
+ } else {
+ false
+ }
+ }
+
+ /// Given a termination token, parses all of the items in a module.
+ fn parse_mod_items(&mut self, term: &token::Token, inner_lo: Span) -> PResult<'a, Mod> {
+ let mut items = vec![];
+ while let Some(item) = self.parse_item()? {
+ items.push(item);
+ self.maybe_consume_incorrect_semicolon(&items);
+ }
+
+ if !self.eat(term) {
+ let token_str = self.this_token_descr();
+ if !self.maybe_consume_incorrect_semicolon(&items) {
+ let mut err = self.fatal(&format!("expected item, found {}", token_str));
+ err.span_label(self.span, "expected item");
+ return Err(err);
+ }
+ }
+
+ let hi = if self.span.is_dummy() {
+ inner_lo
+ } else {
+ self.prev_span
+ };
+
+ Ok(ast::Mod {
+ inner: inner_lo.to(hi),
+ items,
+ inline: true
+ })
+ }
+
+ fn parse_item_const(&mut self, m: Option<Mutability>) -> PResult<'a, ItemInfo> {
+ let id = if m.is_none() { self.parse_ident_or_underscore() } else { self.parse_ident() }?;
+ self.expect(&token::Colon)?;
+ let ty = self.parse_ty()?;
+ self.expect(&token::Eq)?;
+ let e = self.parse_expr()?;
+ self.expect(&token::Semi)?;
+ let item = match m {
+ Some(m) => ItemKind::Static(ty, m, e),
+ None => ItemKind::Const(ty, e),
+ };
+ Ok((id, item, None))
+ }
+
+ /// Parse a `mod <foo> { ... }` or `mod <foo>;` item
+ fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> {
+ let (in_cfg, outer_attrs) = {
+ let mut strip_unconfigured = crate::config::StripUnconfigured {
+ sess: self.sess,
+ features: None, // don't perform gated feature checking
+ };
+ let mut outer_attrs = outer_attrs.to_owned();
+ strip_unconfigured.process_cfg_attrs(&mut outer_attrs);
+ (!self.cfg_mods || strip_unconfigured.in_cfg(&outer_attrs), outer_attrs)
+ };
+
+ let id_span = self.span;
+ let id = self.parse_ident()?;
+ if self.eat(&token::Semi) {
+ if in_cfg && self.recurse_into_file_modules {
+ // This mod is in an external file. Let's go get it!
+ let ModulePathSuccess { path, directory_ownership, warn } =
+ self.submod_path(id, &outer_attrs, id_span)?;
+ let (module, mut attrs) =
+ self.eval_src_mod(path, directory_ownership, id.to_string(), id_span)?;
+ // Record that we fetched the mod from an external file
+ if warn {
+ let attr = Attribute {
+ id: attr::mk_attr_id(),
+ style: ast::AttrStyle::Outer,
+ path: ast::Path::from_ident(Ident::from_str("warn_directory_ownership")),
+ tokens: TokenStream::empty(),
+ is_sugared_doc: false,
+ span: syntax_pos::DUMMY_SP,
+ };
+ attr::mark_known(&attr);
+ attrs.push(attr);
+ }
+ Ok((id, ItemKind::Mod(module), Some(attrs)))
+ } else {
+ let placeholder = ast::Mod {
+ inner: syntax_pos::DUMMY_SP,
+ items: Vec::new(),
+ inline: false
+ };
+ Ok((id, ItemKind::Mod(placeholder), None))
+ }
+ } else {
+ let old_directory = self.directory.clone();
+ self.push_directory(id, &outer_attrs);
+
+ self.expect(&token::OpenDelim(token::Brace))?;
+ let mod_inner_lo = self.span;
+ let attrs = self.parse_inner_attributes()?;
+ let module = self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo)?;
+
+ self.directory = old_directory;
+ Ok((id, ItemKind::Mod(module), Some(attrs)))
+ }
+ }
+
+ fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) {
+ if let Some(path) = attr::first_attr_value_str_by_name(attrs, "path") {
+ self.directory.path.to_mut().push(&path.as_str());
+ self.directory.ownership = DirectoryOwnership::Owned { relative: None };
+ } else {
+ // We have to push on the current module name in the case of relative
+ // paths in order to ensure that any additional module paths from inline
+ // `mod x { ... }` come after the relative extension.
+ //
+ // For example, a `mod z { ... }` inside `x/y.rs` should set the current
+ // directory path to `/x/y/z`, not `/x/z` with a relative offset of `y`.
+ if let DirectoryOwnership::Owned { relative } = &mut self.directory.ownership {
+ if let Some(ident) = relative.take() { // remove the relative offset
+ self.directory.path.to_mut().push(ident.as_str());
+ }
+ }
+ self.directory.path.to_mut().push(&id.as_str());
+ }
+ }
+
+ pub fn submod_path_from_attr(attrs: &[Attribute], dir_path: &Path) -> Option<PathBuf> {
+ if let Some(s) = attr::first_attr_value_str_by_name(attrs, "path") {
+ let s = s.as_str();
+
+ // On windows, the base path might have the form
+ // `\\?\foo\bar` in which case it does not tolerate
+ // mixed `/` and `\` separators, so canonicalize
+ // `/` to `\`.
+ #[cfg(windows)]
+ let s = s.replace("/", "\\");
+ Some(dir_path.join(s))
+ } else {
+ None
+ }
+ }
+
+ /// Returns a path to a module.
+ pub fn default_submod_path(
+ id: ast::Ident,
+ relative: Option<ast::Ident>,
+ dir_path: &Path,
+ source_map: &SourceMap) -> ModulePath
+ {
+ // If we're in a foo.rs file instead of a mod.rs file,
+ // we need to look for submodules in
+ // `./foo/<id>.rs` and `./foo/<id>/mod.rs` rather than
+ // `./<id>.rs` and `./<id>/mod.rs`.
+ let relative_prefix_string;
+ let relative_prefix = if let Some(ident) = relative {
+ relative_prefix_string = format!("{}{}", ident.as_str(), path::MAIN_SEPARATOR);
+ &relative_prefix_string
+ } else {
+ ""
+ };
+
+ let mod_name = id.to_string();
+ let default_path_str = format!("{}{}.rs", relative_prefix, mod_name);
+ let secondary_path_str = format!("{}{}{}mod.rs",
+ relative_prefix, mod_name, path::MAIN_SEPARATOR);
+ let default_path = dir_path.join(&default_path_str);
+ let secondary_path = dir_path.join(&secondary_path_str);
+ let default_exists = source_map.file_exists(&default_path);
+ let secondary_exists = source_map.file_exists(&secondary_path);
+
+ let result = match (default_exists, secondary_exists) {
+ (true, false) => Ok(ModulePathSuccess {
+ path: default_path,
+ directory_ownership: DirectoryOwnership::Owned {
+ relative: Some(id),
+ },
+ warn: false,
+ }),
+ (false, true) => Ok(ModulePathSuccess {
+ path: secondary_path,
+ directory_ownership: DirectoryOwnership::Owned {
+ relative: None,
+ },
+ warn: false,
+ }),
+ (false, false) => Err(Error::FileNotFoundForModule {
+ mod_name: mod_name.clone(),
+ default_path: default_path_str,
+ secondary_path: secondary_path_str,
+ dir_path: dir_path.display().to_string(),
+ }),
+ (true, true) => Err(Error::DuplicatePaths {
+ mod_name: mod_name.clone(),
+ default_path: default_path_str,
+ secondary_path: secondary_path_str,
+ }),
+ };
+
+ ModulePath {
+ name: mod_name,
+ path_exists: default_exists || secondary_exists,
+ result,
+ }
+ }
+
+ fn submod_path(&mut self,
+ id: ast::Ident,
+ outer_attrs: &[Attribute],
+ id_sp: Span)
+ -> PResult<'a, ModulePathSuccess> {
+ if let Some(path) = Parser::submod_path_from_attr(outer_attrs, &self.directory.path) {
+ return Ok(ModulePathSuccess {
+ directory_ownership: match path.file_name().and_then(|s| s.to_str()) {
+ // All `#[path]` files are treated as though they are a `mod.rs` file.
+ // This means that `mod foo;` declarations inside `#[path]`-included
+ // files are siblings,
+ //
+ // Note that this will produce weirdness when a file named `foo.rs` is
+ // `#[path]` included and contains a `mod foo;` declaration.
+ // If you encounter this, it's your own darn fault :P
+ Some(_) => DirectoryOwnership::Owned { relative: None },
+ _ => DirectoryOwnership::UnownedViaMod(true),
+ },
+ path,
+ warn: false,
+ });
+ }
+
+ let relative = match self.directory.ownership {
+ DirectoryOwnership::Owned { relative } => relative,
+ DirectoryOwnership::UnownedViaBlock |
+ DirectoryOwnership::UnownedViaMod(_) => None,
+ };
+ let paths = Parser::default_submod_path(
+ id, relative, &self.directory.path, self.sess.source_map());
+
+ match self.directory.ownership {
+ DirectoryOwnership::Owned { .. } => {
+ paths.result.map_err(|err| self.span_fatal_err(id_sp, err))
+ },
+ DirectoryOwnership::UnownedViaBlock => {
+ let msg =
+ "Cannot declare a non-inline module inside a block \
+ unless it has a path attribute";
+ let mut err = self.diagnostic().struct_span_err(id_sp, msg);
+ if paths.path_exists {
+ let msg = format!("Maybe `use` the module `{}` instead of redeclaring it",
+ paths.name);
+ err.span_note(id_sp, &msg);
+ }
+ Err(err)
+ }
+ DirectoryOwnership::UnownedViaMod(warn) => {
+ if warn {
+ if let Ok(result) = paths.result {
+ return Ok(ModulePathSuccess { warn: true, ..result });
+ }
+ }
+ let mut err = self.diagnostic().struct_span_err(id_sp,
+ "cannot declare a new module at this location");
+ if !id_sp.is_dummy() {
+ let src_path = self.sess.source_map().span_to_filename(id_sp);
+ if let FileName::Real(src_path) = src_path {
+ if let Some(stem) = src_path.file_stem() {
+ let mut dest_path = src_path.clone();
+ dest_path.set_file_name(stem);
+ dest_path.push("mod.rs");
+ err.span_note(id_sp,
+ &format!("maybe move this module `{}` to its own \
+ directory via `{}`", src_path.display(),
+ dest_path.display()));
+ }
+ }
+ }
+ if paths.path_exists {
+ err.span_note(id_sp,
+ &format!("... or maybe `use` the module `{}` instead \
+ of possibly redeclaring it",
+ paths.name));
+ }
+ Err(err)
+ }
+ }
+ }
+
+ /// Reads a module from a source file.
+ fn eval_src_mod(&mut self,
+ path: PathBuf,
+ directory_ownership: DirectoryOwnership,
+ name: String,
+ id_sp: Span)
+ -> PResult<'a, (ast::Mod, Vec<Attribute> )> {
+ let mut included_mod_stack = self.sess.included_mod_stack.borrow_mut();
+ if let Some(i) = included_mod_stack.iter().position(|p| *p == path) {
+ let mut err = String::from("circular modules: ");
+ let len = included_mod_stack.len();
+ for p in &included_mod_stack[i.. len] {
+ err.push_str(&p.to_string_lossy());
+ err.push_str(" -> ");
+ }
+ err.push_str(&path.to_string_lossy());
+ return Err(self.span_fatal(id_sp, &err[..]));
+ }
+ included_mod_stack.push(path.clone());
+ drop(included_mod_stack);
+
+ let mut p0 =
+ new_sub_parser_from_file(self.sess, &path, directory_ownership, Some(name), id_sp);
+ p0.cfg_mods = self.cfg_mods;
+ let mod_inner_lo = p0.span;
+ let mod_attrs = p0.parse_inner_attributes()?;
+ let mut m0 = p0.parse_mod_items(&token::Eof, mod_inner_lo)?;
+ m0.inline = false;
+ self.sess.included_mod_stack.borrow_mut().pop();
+ Ok((m0, mod_attrs))
+ }
+
+ /// Parses a function declaration from a foreign module.
+ fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
+ -> PResult<'a, ForeignItem> {
+ self.expect_keyword(keywords::Fn)?;
+
+ let (ident, mut generics) = self.parse_fn_header()?;
+ let decl = self.parse_fn_decl(true)?;
+ generics.where_clause = self.parse_where_clause()?;
+ let hi = self.span;
+ self.expect(&token::Semi)?;
+ Ok(ast::ForeignItem {
+ ident,
+ attrs,
+ node: ForeignItemKind::Fn(decl, generics),
+ id: ast::DUMMY_NODE_ID,
+ span: lo.to(hi),
+ vis,
+ })
+ }
+
+ /// Parses a static item from a foreign module.
+ /// Assumes that the `static` keyword is already parsed.
+ fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
+ -> PResult<'a, ForeignItem> {
+ let mutbl = self.eat_keyword(keywords::Mut);
+ let ident = self.parse_ident()?;
+ self.expect(&token::Colon)?;
+ let ty = self.parse_ty()?;
+ let hi = self.span;
+ self.expect(&token::Semi)?;
+ Ok(ForeignItem {
+ ident,
+ attrs,
+ node: ForeignItemKind::Static(ty, mutbl),
+ id: ast::DUMMY_NODE_ID,
+ span: lo.to(hi),
+ vis,
+ })
+ }
+
+ /// Parses a type from a foreign module.
+ fn parse_item_foreign_type(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
+ -> PResult<'a, ForeignItem> {
+ self.expect_keyword(keywords::Type)?;
+
+ let ident = self.parse_ident()?;
+ let hi = self.span;
+ self.expect(&token::Semi)?;
+ Ok(ast::ForeignItem {
+ ident: ident,
+ attrs: attrs,
+ node: ForeignItemKind::Ty,
+ id: ast::DUMMY_NODE_ID,
+ span: lo.to(hi),
+ vis: vis
+ })
+ }
+
+ fn parse_crate_name_with_dashes(&mut self) -> PResult<'a, ast::Ident> {
+ let error_msg = "crate name using dashes are not valid in `extern crate` statements";
+ let suggestion_msg = "if the original crate name uses dashes you need to use underscores \
+ in the code";
+ let mut ident = if self.token.is_keyword(keywords::SelfLower) {
+ self.parse_path_segment_ident()
+ } else {
+ self.parse_ident()
+ }?;
+ let mut idents = vec![];
+ let mut replacement = vec![];
+ let mut fixed_crate_name = false;
+ // Accept `extern crate name-like-this` for better diagnostics
+ let dash = token::Token::BinOp(token::BinOpToken::Minus);
+ if self.token == dash { // Do not include `-` as part of the expected tokens list
+ while self.eat(&dash) {
+ fixed_crate_name = true;
+ replacement.push((self.prev_span, "_".to_string()));
+ idents.push(self.parse_ident()?);
+ }
+ }
+ if fixed_crate_name {
+ let fixed_name_sp = ident.span.to(idents.last().unwrap().span);
+ let mut fixed_name = format!("{}", ident.name);
+ for part in idents {
+ fixed_name.push_str(&format!("_{}", part.name));
+ }
+ ident = Ident::from_str(&fixed_name).with_span_pos(fixed_name_sp);
+
+ let mut err = self.struct_span_err(fixed_name_sp, error_msg);
+ err.span_label(fixed_name_sp, "dash-separated idents are not valid");
+ err.multipart_suggestion(
+ suggestion_msg,
+ replacement,
+ Applicability::MachineApplicable,
+ );
+ err.emit();
+ }
+ Ok(ident)
+ }
+
+ /// Parses `extern crate` links.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// extern crate foo;
+ /// extern crate bar as foo;
+ /// ```
+ fn parse_item_extern_crate(&mut self,
+ lo: Span,
+ visibility: Visibility,
+ attrs: Vec<Attribute>)
+ -> PResult<'a, P<Item>> {
+ // Accept `extern crate name-like-this` for better diagnostics
+ let orig_name = self.parse_crate_name_with_dashes()?;
+ let (item_name, orig_name) = if let Some(rename) = self.parse_rename()? {
+ (rename, Some(orig_name.name))
+ } else {
+ (orig_name, None)
+ };
+ self.expect(&token::Semi)?;
+
+ let span = lo.to(self.prev_span);
+ Ok(self.mk_item(span, item_name, ItemKind::ExternCrate(orig_name), visibility, attrs))
+ }
+
+ /// Parses `extern` for foreign ABIs modules.
+ ///
+ /// `extern` is expected to have been
+ /// consumed before calling this method.
+ ///
+ /// # Examples
+ ///
+ /// ```ignore (only-for-syntax-highlight)
+ /// extern "C" {}
+ /// extern {}
+ /// ```
+ fn parse_item_foreign_mod(&mut self,
+ lo: Span,
+ opt_abi: Option<Abi>,
+ visibility: Visibility,
+ mut attrs: Vec<Attribute>)
+ -> PResult<'a, P<Item>> {
+ self.expect(&token::OpenDelim(token::Brace))?;
+
+ let abi = opt_abi.unwrap_or(Abi::C);
+
+ attrs.extend(self.parse_inner_attributes()?);
+
+ let mut foreign_items = vec![];
+ while !self.eat(&token::CloseDelim(token::Brace)) {
+ foreign_items.push(self.parse_foreign_item()?);
+ }
+
+ let prev_span = self.prev_span;
+ let m = ast::ForeignMod {
+ abi,
+ items: foreign_items
+ };
+ let invalid = keywords::Invalid.ident();
+ Ok(self.mk_item(lo.to(prev_span), invalid, ItemKind::ForeignMod(m), visibility, attrs))
+ }
+
+ /// Parses `type Foo = Bar;`
+ /// or
+ /// `existential type Foo: Bar;`
+ /// or
+ /// `return `None``
+ /// without modifying the parser state.
+ fn eat_type(&mut self) -> Option<PResult<'a, (Ident, AliasKind, ast::Generics)>> {
+ // This parses the grammar:
+ // Ident ["<"...">"] ["where" ...] ("=" | ":") Ty ";"
+ if self.check_keyword(keywords::Type) ||
+ self.check_keyword(keywords::Existential) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Type)) {
+ let existential = self.eat_keyword(keywords::Existential);
+ assert!(self.eat_keyword(keywords::Type));
+ Some(self.parse_existential_or_alias(existential))
+ } else {
+ None
+ }
+ }
+
+ /// Parses a type alias or existential type.
+ fn parse_existential_or_alias(
+ &mut self,
+ existential: bool,
+ ) -> PResult<'a, (Ident, AliasKind, ast::Generics)> {
+ let ident = self.parse_ident()?;
+ let mut tps = self.parse_generics()?;
+ tps.where_clause = self.parse_where_clause()?;
+ let alias = if existential {
+ self.expect(&token::Colon)?;
+ let bounds = self.parse_generic_bounds(None)?;
+ AliasKind::Existential(bounds)
+ } else {
+ self.expect(&token::Eq)?;
+ let ty = self.parse_ty()?;
+ AliasKind::Weak(ty)
+ };
+ self.expect(&token::Semi)?;
+ Ok((ident, alias, tps))
+ }
+
+ /// Parses the part of an enum declaration following the `{`.
+ fn parse_enum_def(&mut self, _generics: &ast::Generics) -> PResult<'a, EnumDef> {
+ let mut variants = Vec::new();
+ let mut all_nullary = true;
+ let mut any_disr = vec![];
+ while self.token != token::CloseDelim(token::Brace) {
+ let variant_attrs = self.parse_outer_attributes()?;
+ let vlo = self.span;
+
+ let struct_def;
+ let mut disr_expr = None;
+ let ident = self.parse_ident()?;
+ if self.check(&token::OpenDelim(token::Brace)) {
+ // Parse a struct variant.
+ all_nullary = false;
+ struct_def = VariantData::Struct(self.parse_record_struct_body()?,
+ ast::DUMMY_NODE_ID);
+ } else if self.check(&token::OpenDelim(token::Paren)) {
+ all_nullary = false;
+ struct_def = VariantData::Tuple(self.parse_tuple_struct_body()?,
+ ast::DUMMY_NODE_ID);
+ } else if self.eat(&token::Eq) {
+ disr_expr = Some(AnonConst {
+ id: ast::DUMMY_NODE_ID,
+ value: self.parse_expr()?,
+ });
+ if let Some(sp) = disr_expr.as_ref().map(|c| c.value.span) {
+ any_disr.push(sp);
+ }
+ struct_def = VariantData::Unit(ast::DUMMY_NODE_ID);
+ } else {
+ struct_def = VariantData::Unit(ast::DUMMY_NODE_ID);
+ }
+
+ let vr = ast::Variant_ {
+ ident,
+ attrs: variant_attrs,
+ data: struct_def,
+ disr_expr,
+ };
+ variants.push(respan(vlo.to(self.prev_span), vr));
+
+ if !self.eat(&token::Comma) { break; }
+ }
+ self.expect(&token::CloseDelim(token::Brace))?;
+ if !any_disr.is_empty() && !all_nullary {
+ let mut err =self.struct_span_err(
+ any_disr.clone(),
+ "discriminator values can only be used with a field-less enum",
+ );
+ for sp in any_disr {
+ err.span_label(sp, "only valid in field-less enums");
+ }
+ err.emit();
+ }
+
+ Ok(ast::EnumDef { variants })
+ }
+
+ /// Parses an enum declaration.
+ fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> {
+ let id = self.parse_ident()?;
+ let mut generics = self.parse_generics()?;
+ generics.where_clause = self.parse_where_clause()?;
+ self.expect(&token::OpenDelim(token::Brace))?;
+
+ let enum_definition = self.parse_enum_def(&generics).map_err(|e| {
+ self.recover_stmt();
+ self.eat(&token::CloseDelim(token::Brace));
+ e
+ })?;
+ Ok((id, ItemKind::Enum(enum_definition, generics), None))
+ }
+
+ /// Parses a string as an ABI spec on an extern type or module. Consumes
+ /// the `extern` keyword, if one is found.
+ fn parse_opt_abi(&mut self) -> PResult<'a, Option<Abi>> {
+ match self.token {
+ token::Literal(token::Str_(s), suf) | token::Literal(token::StrRaw(s, _), suf) => {
+ let sp = self.span;
+ self.expect_no_suffix(sp, "ABI spec", suf);
+ self.bump();
+ match abi::lookup(&s.as_str()) {
+ Some(abi) => Ok(Some(abi)),
+ None => {
+ let prev_span = self.prev_span;
+ let mut err = struct_span_err!(
+ self.sess.span_diagnostic,
+ prev_span,
+ E0703,
+ "invalid ABI: found `{}`",
+ s);
+ err.span_label(prev_span, "invalid ABI");
+ err.help(&format!("valid ABIs: {}", abi::all_names().join(", ")));
+ err.emit();
+ Ok(None)
+ }
+ }
+ }
+
+ _ => Ok(None),
+ }
+ }
+
+ fn is_static_global(&mut self) -> bool {
+ if self.check_keyword(keywords::Static) {
+ // Check if this could be a closure
+ !self.look_ahead(1, |token| {
+ if token.is_keyword(keywords::Move) {
+ return true;
+ }
+ match *token {
+ token::BinOp(token::Or) | token::OrOr => true,
+ _ => false,
+ }
+ })
+ } else {
+ false
+ }
+ }
+
+ fn parse_item_(
+ &mut self,
+ attrs: Vec<Attribute>,
+ macros_allowed: bool,
+ attributes_allowed: bool,
+ ) -> PResult<'a, Option<P<Item>>> {
+ let (ret, tokens) = self.collect_tokens(|this| {
+ this.parse_item_implementation(attrs, macros_allowed, attributes_allowed)
+ })?;
+
+ // Once we've parsed an item and recorded the tokens we got while
+ // parsing we may want to store `tokens` into the item we're about to
+ // return. Note, though, that we specifically didn't capture tokens
+ // related to outer attributes. The `tokens` field here may later be
+ // used with procedural macros to convert this item back into a token
+ // stream, but during expansion we may be removing attributes as we go
+ // along.
+ //
+ // If we've got inner attributes then the `tokens` we've got above holds
+ // these inner attributes. If an inner attribute is expanded we won't
+ // actually remove it from the token stream, so we'll just keep yielding
+ // it (bad!). To work around this case for now we just avoid recording
+ // `tokens` if we detect any inner attributes. This should help keep
+ // expansion correct, but we should fix this bug one day!
+ Ok(ret.map(|item| {
+ item.map(|mut i| {
+ if !i.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
+ i.tokens = Some(tokens);
+ }
+ i
+ })
+ }))
+ }
+
+ /// Parses one of the items allowed by the flags.
+ fn parse_item_implementation(
+ &mut self,
+ attrs: Vec<Attribute>,
+ macros_allowed: bool,
+ attributes_allowed: bool,
+ ) -> PResult<'a, Option<P<Item>>> {
+ maybe_whole!(self, NtItem, |item| {
+ let mut item = item.into_inner();
+ let mut attrs = attrs;
+ mem::swap(&mut item.attrs, &mut attrs);
+ item.attrs.extend(attrs);
+ Some(P(item))
+ });
+
+ let lo = self.span;
+
+ let visibility = self.parse_visibility(false)?;
+
+ if self.eat_keyword(keywords::Use) {
+ // USE ITEM
+ let item_ = ItemKind::Use(P(self.parse_use_tree()?));
+ self.expect(&token::Semi)?;
+
+ let span = lo.to(self.prev_span);
+ let item = self.mk_item(span, keywords::Invalid.ident(), item_, visibility, attrs);
+ return Ok(Some(item));
+ }
+
+ if self.eat_keyword(keywords::Extern) {
+ if self.eat_keyword(keywords::Crate) {
+ return Ok(Some(self.parse_item_extern_crate(lo, visibility, attrs)?));
+ }
+
+ let opt_abi = self.parse_opt_abi()?;
+
+ if self.eat_keyword(keywords::Fn) {
+ // EXTERN FUNCTION ITEM
+ let fn_span = self.prev_span;
+ let abi = opt_abi.unwrap_or(Abi::C);
+ let (ident, item_, extra_attrs) =
+ self.parse_item_fn(Unsafety::Normal,
+ IsAsync::NotAsync,
+ respan(fn_span, Constness::NotConst),
+ abi)?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ } else if self.check(&token::OpenDelim(token::Brace)) {
+ return Ok(Some(self.parse_item_foreign_mod(lo, opt_abi, visibility, attrs)?));
+ }
+
+ self.unexpected()?;
+ }
+
+ if self.is_static_global() {
+ self.bump();
+ // STATIC ITEM
+ let m = if self.eat_keyword(keywords::Mut) {
+ Mutability::Mutable
+ } else {
+ Mutability::Immutable
+ };
+ let (ident, item_, extra_attrs) = self.parse_item_const(Some(m))?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+ if self.eat_keyword(keywords::Const) {
+ let const_span = self.prev_span;
+ if self.check_keyword(keywords::Fn)
+ || (self.check_keyword(keywords::Unsafe)
+ && self.look_ahead(1, |t| t.is_keyword(keywords::Fn))) {
+ // CONST FUNCTION ITEM
+ let unsafety = self.parse_unsafety();
+ self.bump();
+ let (ident, item_, extra_attrs) =
+ self.parse_item_fn(unsafety,
+ IsAsync::NotAsync,
+ respan(const_span, Constness::Const),
+ Abi::Rust)?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+
+ // CONST ITEM
+ if self.eat_keyword(keywords::Mut) {
+ let prev_span = self.prev_span;
+ let mut err = self.diagnostic()
+ .struct_span_err(prev_span, "const globals cannot be mutable");
+ err.span_label(prev_span, "cannot be mutable");
+ err.span_suggestion(
+ const_span,
+ "you might want to declare a static instead",
+ "static".to_owned(),
+ Applicability::MaybeIncorrect,
+ );
+ err.emit();
+ }
+ let (ident, item_, extra_attrs) = self.parse_item_const(None)?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+
+ // `unsafe async fn` or `async fn`
+ if (
+ self.check_keyword(keywords::Unsafe) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Async))
+ ) || (
+ self.check_keyword(keywords::Async) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Fn))
+ )
+ {
+ // ASYNC FUNCTION ITEM
+ let unsafety = self.parse_unsafety();
+ self.expect_keyword(keywords::Async)?;
+ self.expect_keyword(keywords::Fn)?;
+ let fn_span = self.prev_span;
+ let (ident, item_, extra_attrs) =
+ self.parse_item_fn(unsafety,
+ IsAsync::Async {
+ closure_id: ast::DUMMY_NODE_ID,
+ return_impl_trait_id: ast::DUMMY_NODE_ID,
+ },
+ respan(fn_span, Constness::NotConst),
+ Abi::Rust)?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+ if self.check_keyword(keywords::Unsafe) &&
+ (self.look_ahead(1, |t| t.is_keyword(keywords::Trait)) ||
+ self.look_ahead(1, |t| t.is_keyword(keywords::Auto)))
+ {
+ // UNSAFE TRAIT ITEM
+ self.bump(); // `unsafe`
+ let is_auto = if self.eat_keyword(keywords::Trait) {
+ IsAuto::No
+ } else {
+ self.expect_keyword(keywords::Auto)?;
+ self.expect_keyword(keywords::Trait)?;
+ IsAuto::Yes
+ };
+ let (ident, item_, extra_attrs) =
+ self.parse_item_trait(is_auto, Unsafety::Unsafe)?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+ if self.check_keyword(keywords::Impl) ||
+ self.check_keyword(keywords::Unsafe) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Impl)) ||
+ self.check_keyword(keywords::Default) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Impl)) ||
+ self.check_keyword(keywords::Default) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe)) {
+ // IMPL ITEM
+ let defaultness = self.parse_defaultness();
+ let unsafety = self.parse_unsafety();
+ self.expect_keyword(keywords::Impl)?;
+ let (ident, item, extra_attrs) = self.parse_item_impl(unsafety, defaultness)?;
+ let span = lo.to(self.prev_span);
+ return Ok(Some(self.mk_item(span, ident, item, visibility,
+ maybe_append(attrs, extra_attrs))));
+ }
+ if self.check_keyword(keywords::Fn) {
+ // FUNCTION ITEM
+ self.bump();
+ let fn_span = self.prev_span;
+ let (ident, item_, extra_attrs) =
+ self.parse_item_fn(Unsafety::Normal,
+ IsAsync::NotAsync,
+ respan(fn_span, Constness::NotConst),
+ Abi::Rust)?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+ if self.check_keyword(keywords::Unsafe)
+ && self.look_ahead(1, |t| *t != token::OpenDelim(token::Brace)) {
+ // UNSAFE FUNCTION ITEM
+ self.bump(); // `unsafe`
+ // `{` is also expected after `unsafe`, in case of error, include it in the diagnostic
+ self.check(&token::OpenDelim(token::Brace));
+ let abi = if self.eat_keyword(keywords::Extern) {
+ self.parse_opt_abi()?.unwrap_or(Abi::C)
+ } else {
+ Abi::Rust
+ };
+ self.expect_keyword(keywords::Fn)?;
+ let fn_span = self.prev_span;
+ let (ident, item_, extra_attrs) =
+ self.parse_item_fn(Unsafety::Unsafe,
+ IsAsync::NotAsync,
+ respan(fn_span, Constness::NotConst),
+ abi)?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+ if self.eat_keyword(keywords::Mod) {
+ // MODULE ITEM
+ let (ident, item_, extra_attrs) =
+ self.parse_item_mod(&attrs[..])?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+ if let Some(type_) = self.eat_type() {
+ let (ident, alias, generics) = type_?;
+ // TYPE ITEM
+ let item_ = match alias {
+ AliasKind::Weak(ty) => ItemKind::Ty(ty, generics),
+ AliasKind::Existential(bounds) => ItemKind::Existential(bounds, generics),
+ };
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ attrs);
+ return Ok(Some(item));
+ }
+ if self.eat_keyword(keywords::Enum) {
+ // ENUM ITEM
+ let (ident, item_, extra_attrs) = self.parse_item_enum()?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+ if self.check_keyword(keywords::Trait)
+ || (self.check_keyword(keywords::Auto)
+ && self.look_ahead(1, |t| t.is_keyword(keywords::Trait)))
+ {
+ let is_auto = if self.eat_keyword(keywords::Trait) {
+ IsAuto::No
+ } else {
+ self.expect_keyword(keywords::Auto)?;
+ self.expect_keyword(keywords::Trait)?;
+ IsAuto::Yes
+ };
+ // TRAIT ITEM
+ let (ident, item_, extra_attrs) =
+ self.parse_item_trait(is_auto, Unsafety::Normal)?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+ if self.eat_keyword(keywords::Struct) {
+ // STRUCT ITEM
+ let (ident, item_, extra_attrs) = self.parse_item_struct()?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+ if self.is_union_item() {
+ // UNION ITEM
+ self.bump();
+ let (ident, item_, extra_attrs) = self.parse_item_union()?;
+ let prev_span = self.prev_span;
+ let item = self.mk_item(lo.to(prev_span),
+ ident,
+ item_,
+ visibility,
+ maybe_append(attrs, extra_attrs));
+ return Ok(Some(item));
+ }
+ if let Some(macro_def) = self.eat_macro_def(&attrs, &visibility, lo)? {
+ return Ok(Some(macro_def));
+ }
+
+ // Verify whether we have encountered a struct or method definition where the user forgot to
+ // add the `struct` or `fn` keyword after writing `pub`: `pub S {}`
+ if visibility.node.is_pub() &&
+ self.check_ident() &&
+ self.look_ahead(1, |t| *t != token::Not)
+ {
+ // Space between `pub` keyword and the identifier
+ //
+ // pub S {}
+ // ^^^ `sp` points here
+ let sp = self.prev_span.between(self.span);
+ let full_sp = self.prev_span.to(self.span);
+ let ident_sp = self.span;
+ if self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) {
+ // possible public struct definition where `struct` was forgotten
+ let ident = self.parse_ident().unwrap();
+ let msg = format!("add `struct` here to parse `{}` as a public struct",
+ ident);
+ let mut err = self.diagnostic()
+ .struct_span_err(sp, "missing `struct` for struct definition");
+ err.span_suggestion_short(
+ sp, &msg, " struct ".into(), Applicability::MaybeIncorrect // speculative
+ );
+ return Err(err);
+ } else if self.look_ahead(1, |t| *t == token::OpenDelim(token::Paren)) {
+ let ident = self.parse_ident().unwrap();
+ self.bump(); // `(`
+ let kw_name = if let Ok(Some(_)) = self.parse_self_arg() {
+ "method"
+ } else {
+ "function"
+ };
+ self.consume_block(token::Paren);
+ let (kw, kw_name, ambiguous) = if self.check(&token::RArrow) {
+ self.eat_to_tokens(&[&token::OpenDelim(token::Brace)]);
+ self.bump(); // `{`
+ ("fn", kw_name, false)
+ } else if self.check(&token::OpenDelim(token::Brace)) {
+ self.bump(); // `{`
+ ("fn", kw_name, false)
+ } else if self.check(&token::Colon) {
+ let kw = "struct";
+ (kw, kw, false)
+ } else {
+ ("fn` or `struct", "function or struct", true)
+ };
+ self.consume_block(token::Brace);
+
+ let msg = format!("missing `{}` for {} definition", kw, kw_name);
+ let mut err = self.diagnostic().struct_span_err(sp, &msg);
+ if !ambiguous {
+ let suggestion = format!("add `{}` here to parse `{}` as a public {}",
+ kw,
+ ident,
+ kw_name);
+ err.span_suggestion_short(
+ sp, &suggestion, format!(" {} ", kw), Applicability::MachineApplicable
+ );
+ } else {
+ if let Ok(snippet) = self.sess.source_map().span_to_snippet(ident_sp) {
+ err.span_suggestion(
+ full_sp,
+ "if you meant to call a macro, try",
+ format!("{}!", snippet),
+ // this is the `ambiguous` conditional branch
+ Applicability::MaybeIncorrect
+ );
+ } else {
+ err.help("if you meant to call a macro, remove the `pub` \
+ and add a trailing `!` after the identifier");
+ }
+ }
+ return Err(err);
+ } else if self.look_ahead(1, |t| *t == token::Lt) {
+ let ident = self.parse_ident().unwrap();
+ self.eat_to_tokens(&[&token::Gt]);
+ self.bump(); // `>`
+ let (kw, kw_name, ambiguous) = if self.eat(&token::OpenDelim(token::Paren)) {
+ if let Ok(Some(_)) = self.parse_self_arg() {
+ ("fn", "method", false)
+ } else {
+ ("fn", "function", false)
+ }
+ } else if self.check(&token::OpenDelim(token::Brace)) {
+ ("struct", "struct", false)
+ } else {
+ ("fn` or `struct", "function or struct", true)
+ };
+ let msg = format!("missing `{}` for {} definition", kw, kw_name);
+ let mut err = self.diagnostic().struct_span_err(sp, &msg);
+ if !ambiguous {
+ err.span_suggestion_short(
+ sp,
+ &format!("add `{}` here to parse `{}` as a public {}", kw, ident, kw_name),
+ format!(" {} ", kw),
+ Applicability::MachineApplicable,
+ );
+ }
+ return Err(err);
+ }
+ }
+ self.parse_macro_use_or_failure(attrs, macros_allowed, attributes_allowed, lo, visibility)
+ }
+
+ /// Parses a foreign item.
+ crate fn parse_foreign_item(&mut self) -> PResult<'a, ForeignItem> {
+ maybe_whole!(self, NtForeignItem, |ni| ni);
+
+ let attrs = self.parse_outer_attributes()?;
+ let lo = self.span;
+ let visibility = self.parse_visibility(false)?;
+
+ // FOREIGN STATIC ITEM
+ // Treat `const` as `static` for error recovery, but don't add it to expected tokens.
+ if self.check_keyword(keywords::Static) || self.token.is_keyword(keywords::Const) {
+ if self.token.is_keyword(keywords::Const) {
+ self.diagnostic()
+ .struct_span_err(self.span, "extern items cannot be `const`")
+ .span_suggestion(
+ self.span,
+ "try using a static value",
+ "static".to_owned(),
+ Applicability::MachineApplicable
+ ).emit();
+ }
+ self.bump(); // `static` or `const`
+ return Ok(self.parse_item_foreign_static(visibility, lo, attrs)?);
+ }
+ // FOREIGN FUNCTION ITEM
+ if self.check_keyword(keywords::Fn) {
+ return Ok(self.parse_item_foreign_fn(visibility, lo, attrs)?);
+ }
+ // FOREIGN TYPE ITEM
+ if self.check_keyword(keywords::Type) {
+ return Ok(self.parse_item_foreign_type(visibility, lo, attrs)?);
+ }
+
+ match self.parse_assoc_macro_invoc("extern", Some(&visibility), &mut false)? {
+ Some(mac) => {
+ Ok(
+ ForeignItem {
+ ident: keywords::Invalid.ident(),
+ span: lo.to(self.prev_span),
+ id: ast::DUMMY_NODE_ID,
+ attrs,
+ vis: visibility,
+ node: ForeignItemKind::Macro(mac),
+ }
+ )
+ }
+ None => {
+ if !attrs.is_empty() {
+ self.expected_item_err(&attrs)?;
+ }
+
+ self.unexpected()
+ }
+ }
+ }
+
+ /// This is the fall-through for parsing items.
+ fn parse_macro_use_or_failure(
+ &mut self,
+ attrs: Vec<Attribute> ,
+ macros_allowed: bool,
+ attributes_allowed: bool,
+ lo: Span,
+ visibility: Visibility
+ ) -> PResult<'a, Option<P<Item>>> {
+ if macros_allowed && self.token.is_path_start() {
+ // MACRO INVOCATION ITEM
+
+ let prev_span = self.prev_span;
+ self.complain_if_pub_macro(&visibility.node, prev_span);
+
+ let mac_lo = self.span;
+
+ // item macro.
+ let pth = self.parse_path(PathStyle::Mod)?;
+ self.expect(&token::Not)?;
+
+ // a 'special' identifier (like what `macro_rules!` uses)
+ // is optional. We should eventually unify invoc syntax
+ // and remove this.
+ let id = if self.token.is_ident() {
+ self.parse_ident()?
+ } else {
+ keywords::Invalid.ident() // no special identifier
+ };
+ // eat a matched-delimiter token tree:
+ let (delim, tts) = self.expect_delimited_token_tree()?;
+ if delim != MacDelimiter::Brace {
+ if !self.eat(&token::Semi) {
+ self.span_err(self.prev_span,
+ "macros that expand to items must either \
+ be surrounded with braces or followed by \
+ a semicolon");
+ }
+ }
+
+ let hi = self.prev_span;
+ let mac = respan(mac_lo.to(hi), Mac_ { path: pth, tts, delim });
+ let item = self.mk_item(lo.to(hi), id, ItemKind::Mac(mac), visibility, attrs);
+ return Ok(Some(item));
+ }
+
+ // FAILURE TO PARSE ITEM
+ match visibility.node {
+ VisibilityKind::Inherited => {}
+ _ => {
+ return Err(self.span_fatal(self.prev_span, "unmatched visibility `pub`"));
+ }
+ }
+
+ if !attributes_allowed && !attrs.is_empty() {
+ self.expected_item_err(&attrs)?;
+ }
+ Ok(None)
+ }
+
+ /// Parses a macro invocation inside a `trait`, `impl` or `extern` block.
+ fn parse_assoc_macro_invoc(&mut self, item_kind: &str, vis: Option<&Visibility>,
+ at_end: &mut bool) -> PResult<'a, Option<Mac>>
+ {
+ if self.token.is_path_start() {
+ let prev_span = self.prev_span;
+ let lo = self.span;
+ let pth = self.parse_path(PathStyle::Mod)?;
+
+ if pth.segments.len() == 1 {
+ if !self.eat(&token::Not) {
+ return Err(self.missing_assoc_item_kind_err(item_kind, prev_span));
+ }
+ } else {
+ self.expect(&token::Not)?;
+ }
+
+ if let Some(vis) = vis {
+ self.complain_if_pub_macro(&vis.node, prev_span);
+ }
+
+ *at_end = true;
+
+ // eat a matched-delimiter token tree:
+ let (delim, tts) = self.expect_delimited_token_tree()?;
+ if delim != MacDelimiter::Brace {
+ self.expect(&token::Semi)?;
+ }
+
+ Ok(Some(respan(lo.to(self.prev_span), Mac_ { path: pth, tts, delim })))
+ } else {
+ Ok(None)
+ }
+ }
+
+ fn collect_tokens<F, R>(&mut self, f: F) -> PResult<'a, (R, TokenStream)>
+ where F: FnOnce(&mut Self) -> PResult<'a, R>
+ {
+ // Record all tokens we parse when parsing this item.
+ let mut tokens = Vec::new();
+ let prev_collecting = match self.token_cursor.frame.last_token {
+ LastToken::Collecting(ref mut list) => {
+ Some(mem::replace(list, Vec::new()))
+ }
+ LastToken::Was(ref mut last) => {
+ tokens.extend(last.take());
+ None
+ }
+ };
+ self.token_cursor.frame.last_token = LastToken::Collecting(tokens);
+ let prev = self.token_cursor.stack.len();
+ let ret = f(self);
+ let last_token = if self.token_cursor.stack.len() == prev {
+ &mut self.token_cursor.frame.last_token
+ } else {
+ &mut self.token_cursor.stack[prev].last_token
+ };
+
+ // Pull out the tokens that we've collected from the call to `f` above.
+ let mut collected_tokens = match *last_token {
+ LastToken::Collecting(ref mut v) => mem::replace(v, Vec::new()),
+ LastToken::Was(_) => panic!("our vector went away?"),
+ };
+
+ // If we're not at EOF our current token wasn't actually consumed by
+ // `f`, but it'll still be in our list that we pulled out. In that case
+ // put it back.
+ let extra_token = if self.token != token::Eof {
+ collected_tokens.pop()
+ } else {
+ None
+ };
+
+ // If we were previously collecting tokens, then this was a recursive
+ // call. In that case we need to record all the tokens we collected in
+ // our parent list as well. To do that we push a clone of our stream
+ // onto the previous list.
+ match prev_collecting {
+ Some(mut list) => {
+ list.extend(collected_tokens.iter().cloned());
+ list.extend(extra_token);
+ *last_token = LastToken::Collecting(list);
+ }
+ None => {
+ *last_token = LastToken::Was(extra_token);
+ }
+ }
+
+ Ok((ret?, TokenStream::new(collected_tokens)))
+ }
+
+ pub fn parse_item(&mut self) -> PResult<'a, Option<P<Item>>> {
+ let attrs = self.parse_outer_attributes()?;
+ self.parse_item_(attrs, true, false)
+ }
+
+ /// `::{` or `::*`
+ fn is_import_coupler(&mut self) -> bool {
+ self.check(&token::ModSep) &&
+ self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace) ||
+ *t == token::BinOp(token::Star))
+ }
+
+ /// Parses a `UseTree`.
+ ///
+ /// ```
+ /// USE_TREE = [`::`] `*` |
+ /// [`::`] `{` USE_TREE_LIST `}` |
+ /// PATH `::` `*` |
+ /// PATH `::` `{` USE_TREE_LIST `}` |
+ /// PATH [`as` IDENT]
+ /// ```
+ fn parse_use_tree(&mut self) -> PResult<'a, UseTree> {
+ let lo = self.span;
+
+ let mut prefix = ast::Path { segments: Vec::new(), span: lo.shrink_to_lo() };
+ let kind = if self.check(&token::OpenDelim(token::Brace)) ||
+ self.check(&token::BinOp(token::Star)) ||
+ self.is_import_coupler() {
+ // `use *;` or `use ::*;` or `use {...};` or `use ::{...};`
+ let mod_sep_ctxt = self.span.ctxt();
+ if self.eat(&token::ModSep) {
+ prefix.segments.push(
+ PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt))
+ );
+ }
+
+ if self.eat(&token::BinOp(token::Star)) {
+ UseTreeKind::Glob
+ } else {
+ UseTreeKind::Nested(self.parse_use_tree_list()?)
+ }
+ } else {
+ // `use path::*;` or `use path::{...};` or `use path;` or `use path as bar;`
+ prefix = self.parse_path(PathStyle::Mod)?;
+
+ if self.eat(&token::ModSep) {
+ if self.eat(&token::BinOp(token::Star)) {
+ UseTreeKind::Glob
+ } else {
+ UseTreeKind::Nested(self.parse_use_tree_list()?)
+ }
+ } else {
+ UseTreeKind::Simple(self.parse_rename()?, ast::DUMMY_NODE_ID, ast::DUMMY_NODE_ID)
+ }
+ };
+
+ Ok(UseTree { prefix, kind, span: lo.to(self.prev_span) })
+ }
+
+ /// Parses a `UseTreeKind::Nested(list)`.
+ ///
+ /// ```
+ /// USE_TREE_LIST = Ø | (USE_TREE `,`)* USE_TREE [`,`]
+ /// ```
+ fn parse_use_tree_list(&mut self) -> PResult<'a, Vec<(UseTree, ast::NodeId)>> {
+ self.parse_unspanned_seq(&token::OpenDelim(token::Brace),
+ &token::CloseDelim(token::Brace),
+ SeqSep::trailing_allowed(token::Comma), |this| {
+ Ok((this.parse_use_tree()?, ast::DUMMY_NODE_ID))
+ })
+ }
+
+ fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
+ if self.eat_keyword(keywords::As) {
+ self.parse_ident_or_underscore().map(Some)
+ } else {
+ Ok(None)
+ }
+ }
+
+ /// Parses a source module as a crate. This is the main entry point for the parser.
+ pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> {
+ let lo = self.span;
+ let krate = Ok(ast::Crate {
+ attrs: self.parse_inner_attributes()?,
+ module: self.parse_mod_items(&token::Eof, lo)?,
+ span: lo.to(self.span),
+ });
+ emit_unclosed_delims(&self.unclosed_delims, self.diagnostic());
+ self.unclosed_delims.clear();
+ krate
+ }
+
+ pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
+ let ret = match self.token {
+ token::Literal(token::Str_(s), suf) => (s, ast::StrStyle::Cooked, suf),
+ token::Literal(token::StrRaw(s, n), suf) => (s, ast::StrStyle::Raw(n), suf),
+ _ => return None
+ };
+ self.bump();
+ Some(ret)
+ }
+
+ pub fn parse_str(&mut self) -> PResult<'a, (Symbol, StrStyle)> {
+ match self.parse_optional_str() {
+ Some((s, style, suf)) => {
+ let sp = self.prev_span;
+ self.expect_no_suffix(sp, "string literal", suf);
+ Ok((s, style))
+ }
+ _ => {
+ let msg = "expected string literal";
+ let mut err = self.fatal(msg);
+ err.span_label(self.span, msg);
+ Err(err)
+ }
+ }
+ }
+}
+
+pub fn emit_unclosed_delims(unclosed_delims: &[UnmatchedBrace], handler: &errors::Handler) {
+ for unmatched in unclosed_delims {
+ let mut err = handler.struct_span_err(unmatched.found_span, &format!(
+ "incorrect close delimiter: `{}`",
+ pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
+ ));
+ err.span_label(unmatched.found_span, "incorrect close delimiter");
+ if let Some(sp) = unmatched.candidate_span {
+ err.span_label(sp, "close delimiter possibly meant for this");
+ }
+ if let Some(sp) = unmatched.unclosed_span {
+ err.span_label(sp, "un-closed delimiter");
+ }
+ err.emit();
+ }
+}
diff --git a/src/tools/rust-analyzer/bench_data/numerous_macro_rules b/src/tools/rust-analyzer/bench_data/numerous_macro_rules
new file mode 100644
index 000000000..bf89ed594
--- /dev/null
+++ b/src/tools/rust-analyzer/bench_data/numerous_macro_rules
@@ -0,0 +1,560 @@
+macro_rules! __ra_macro_fixture0 {($T : ident )=>( int_module ! ($T , # [ stable ( feature = "rust1" , since = "1.0.0" )]);); ($T : ident , # [$attr : meta ])=>( doc_comment ! { concat ! ( "The smallest value that can be represented by this integer type.\nUse [`" , stringify ! ($T ), "::MIN" , "`](../../std/primitive." , stringify ! ($T ), ".html#associatedconstant.MIN) instead.\n\n# Examples\n\n```rust\n// deprecated way\nlet min = std::" , stringify ! ($T ), "::MIN;\n\n// intended way\nlet min = " , stringify ! ($T ), "::MIN;\n```\n" ), # [$attr ] pub const MIN : $T = $T :: MIN ; } doc_comment ! { concat ! ( "The largest value that can be represented by this integer type.\nUse [`" , stringify ! ($T ), "::MAX" , "`](../../std/primitive." , stringify ! ($T ), ".html#associatedconstant.MAX) instead.\n\n# Examples\n\n```rust\n// deprecated way\nlet max = std::" , stringify ! ($T ), "::MAX;\n\n// intended way\nlet max = " , stringify ! ($T ), "::MAX;\n```\n" ), # [$attr ] pub const MAX : $T = $T :: MAX ; })}
+macro_rules! __ra_macro_fixture1 {($($ty : ty : add ($addfn : path ), mul / div ($bigty : ident );)*)=>($(impl FullOps for $ty { fn full_add ( self , other : $ty , carry : bool )-> ( bool , $ty ){ let ( v , carry1 )= intrinsics :: add_with_overflow ( self , other ); let ( v , carry2 )= intrinsics :: add_with_overflow ( v , if carry { 1 } else { 0 }); ( carry1 || carry2 , v )} fn full_mul ( self , other : $ty , carry : $ty )-> ($ty , $ty ){ let v = ( self as $bigty )* ( other as $bigty )+ ( carry as $bigty ); (( v >> <$ty >:: BITS ) as $ty , v as $ty )} fn full_mul_add ( self , other : $ty , other2 : $ty , carry : $ty )-> ($ty , $ty ){ let v = ( self as $bigty )* ( other as $bigty )+ ( other2 as $bigty )+ ( carry as $bigty ); (( v >> <$ty >:: BITS ) as $ty , v as $ty )} fn full_div_rem ( self , other : $ty , borrow : $ty )-> ($ty , $ty ){ debug_assert ! ( borrow < other ); let lhs = (( borrow as $bigty )<< <$ty >:: BITS )| ( self as $bigty ); let rhs = other as $bigty ; (( lhs / rhs ) as $ty , ( lhs % rhs ) as $ty )}})* )}
+macro_rules! __ra_macro_fixture2 {($name : ident : type =$ty : ty , n =$n : expr )=>{# [ doc = " Stack-allocated arbitrary-precision (up to certain limit) integer." ]# [ doc = "" ]# [ doc = " This is backed by a fixed-size array of given type (\\\"digit\\\")." ]# [ doc = " While the array is not very large (normally some hundred bytes)," ]# [ doc = " copying it recklessly may result in the performance hit." ]# [ doc = " Thus this is intentionally not `Copy`." ]# [ doc = "" ]# [ doc = " All operations available to bignums panic in the case of overflows." ]# [ doc = " The caller is responsible to use large enough bignum types." ] pub struct $name {# [ doc = " One plus the offset to the maximum \\\"digit\\\" in use." ]# [ doc = " This does not decrease, so be aware of the computation order." ]# [ doc = " `base[size..]` should be zero." ] size : usize , # [ doc = " Digits. `[a, b, c, ...]` represents `a + b*2^W + c*2^(2W) + ...`" ]# [ doc = " where `W` is the number of bits in the digit type." ] base : [$ty ; $n ], } impl $name {# [ doc = " Makes a bignum from one digit." ] pub fn from_small ( v : $ty )-> $name { let mut base = [ 0 ; $n ]; base [ 0 ]= v ; $name { size : 1 , base : base }}# [ doc = " Makes a bignum from `u64` value." ] pub fn from_u64 ( mut v : u64 )-> $name { let mut base = [ 0 ; $n ]; let mut sz = 0 ; while v > 0 { base [ sz ]= v as $ty ; v >>= <$ty >:: BITS ; sz += 1 ; }$name { size : sz , base : base }}# [ doc = " Returns the internal digits as a slice `[a, b, c, ...]` such that the numeric" ]# [ doc = " value is `a + b * 2^W + c * 2^(2W) + ...` where `W` is the number of bits in" ]# [ doc = " the digit type." ] pub fn digits (& self )-> & [$ty ]{& self . base [.. self . size ]}# [ doc = " Returns the `i`-th bit where bit 0 is the least significant one." ]# [ doc = " In other words, the bit with weight `2^i`." ] pub fn get_bit (& self , i : usize )-> u8 { let digitbits = <$ty >:: BITS as usize ; let d = i / digitbits ; let b = i % digitbits ; (( self . base [ d ]>> b )& 1 ) as u8 }# [ doc = " Returns `true` if the bignum is zero." ] pub fn is_zero (& self )-> bool { self . digits (). iter (). all (|& v | v == 0 )}# [ doc = " Returns the number of bits necessary to represent this value. Note that zero" ]# [ doc = " is considered to need 0 bits." ] pub fn bit_length (& self )-> usize { let digits = self . digits (); let zeros = digits . iter (). rev (). take_while (|&& x | x == 0 ). count (); let end = digits . len ()- zeros ; let nonzero = & digits [.. end ]; if nonzero . is_empty (){ return 0 ; } let digitbits = <$ty >:: BITS as usize ; let mut i = nonzero . len ()* digitbits - 1 ; while self . get_bit ( i )== 0 { i -= 1 ; } i + 1 }# [ doc = " Adds `other` to itself and returns its own mutable reference." ] pub fn add < 'a > (& 'a mut self , other : &$name )-> & 'a mut $name { use crate :: cmp ; use crate :: num :: bignum :: FullOps ; let mut sz = cmp :: max ( self . size , other . size ); let mut carry = false ; for ( a , b ) in self . base [.. sz ]. iter_mut (). zip (& other . base [.. sz ]){ let ( c , v )= (* a ). full_add (* b , carry ); * a = v ; carry = c ; } if carry { self . base [ sz ]= 1 ; sz += 1 ; } self . size = sz ; self } pub fn add_small (& mut self , other : $ty )-> & mut $name { use crate :: num :: bignum :: FullOps ; let ( mut carry , v )= self . base [ 0 ]. full_add ( other , false ); self . base [ 0 ]= v ; let mut i = 1 ; while carry { let ( c , v )= self . base [ i ]. full_add ( 0 , carry ); self . base [ i ]= v ; carry = c ; i += 1 ; } if i > self . size { self . size = i ; } self }# [ doc = " Subtracts `other` from itself and returns its own mutable reference." ] pub fn sub < 'a > (& 'a mut self , other : &$name )-> & 'a mut $name { use crate :: cmp ; use crate :: num :: bignum :: FullOps ; let sz = cmp :: max ( self . size , other . size ); let mut noborrow = true ; for ( a , b ) in self . base [.. sz ]. iter_mut (). zip (& other . base [.. sz ]){ let ( c , v )= (* a ). full_add (!* b , noborrow ); * a = v ; noborrow = c ; } assert ! ( noborrow ); self . size = sz ; self }# [ doc = " Multiplies itself by a digit-sized `other` and returns its own" ]# [ doc = " mutable reference." ] pub fn mul_small (& mut self , other : $ty )-> & mut $name { use crate :: num :: bignum :: FullOps ; let mut sz = self . size ; let mut carry = 0 ; for a in & mut self . base [.. sz ]{ let ( c , v )= (* a ). full_mul ( other , carry ); * a = v ; carry = c ; } if carry > 0 { self . base [ sz ]= carry ; sz += 1 ; } self . size = sz ; self }# [ doc = " Multiplies itself by `2^bits` and returns its own mutable reference." ] pub fn mul_pow2 (& mut self , bits : usize )-> & mut $name { let digitbits = <$ty >:: BITS as usize ; let digits = bits / digitbits ; let bits = bits % digitbits ; assert ! ( digits < $n ); debug_assert ! ( self . base [$n - digits ..]. iter (). all (|& v | v == 0 )); debug_assert ! ( bits == 0 || ( self . base [$n - digits - 1 ]>> ( digitbits - bits ))== 0 ); for i in ( 0 .. self . size ). rev (){ self . base [ i + digits ]= self . base [ i ]; } for i in 0 .. digits { self . base [ i ]= 0 ; } let mut sz = self . size + digits ; if bits > 0 { let last = sz ; let overflow = self . base [ last - 1 ]>> ( digitbits - bits ); if overflow > 0 { self . base [ last ]= overflow ; sz += 1 ; } for i in ( digits + 1 .. last ). rev (){ self . base [ i ]= ( self . base [ i ]<< bits )| ( self . base [ i - 1 ]>> ( digitbits - bits )); } self . base [ digits ]<<= bits ; } self . size = sz ; self }# [ doc = " Multiplies itself by `5^e` and returns its own mutable reference." ] pub fn mul_pow5 (& mut self , mut e : usize )-> & mut $name { use crate :: mem ; use crate :: num :: bignum :: SMALL_POW5 ; let table_index = mem :: size_of ::<$ty > (). trailing_zeros () as usize ; let ( small_power , small_e )= SMALL_POW5 [ table_index ]; let small_power = small_power as $ty ; while e >= small_e { self . mul_small ( small_power ); e -= small_e ; } let mut rest_power = 1 ; for _ in 0 .. e { rest_power *= 5 ; } self . mul_small ( rest_power ); self }# [ doc = " Multiplies itself by a number described by `other[0] + other[1] * 2^W +" ]# [ doc = " other[2] * 2^(2W) + ...` (where `W` is the number of bits in the digit type)" ]# [ doc = " and returns its own mutable reference." ] pub fn mul_digits < 'a > (& 'a mut self , other : & [$ty ])-> & 'a mut $name { fn mul_inner ( ret : & mut [$ty ; $n ], aa : & [$ty ], bb : & [$ty ])-> usize { use crate :: num :: bignum :: FullOps ; let mut retsz = 0 ; for ( i , & a ) in aa . iter (). enumerate (){ if a == 0 { continue ; } let mut sz = bb . len (); let mut carry = 0 ; for ( j , & b ) in bb . iter (). enumerate (){ let ( c , v )= a . full_mul_add ( b , ret [ i + j ], carry ); ret [ i + j ]= v ; carry = c ; } if carry > 0 { ret [ i + sz ]= carry ; sz += 1 ; } if retsz < i + sz { retsz = i + sz ; }} retsz } let mut ret = [ 0 ; $n ]; let retsz = if self . size < other . len (){ mul_inner (& mut ret , & self . digits (), other )} else { mul_inner (& mut ret , other , & self . digits ())}; self . base = ret ; self . size = retsz ; self }# [ doc = " Divides itself by a digit-sized `other` and returns its own" ]# [ doc = " mutable reference *and* the remainder." ] pub fn div_rem_small (& mut self , other : $ty )-> (& mut $name , $ty ){ use crate :: num :: bignum :: FullOps ; assert ! ( other > 0 ); let sz = self . size ; let mut borrow = 0 ; for a in self . base [.. sz ]. iter_mut (). rev (){ let ( q , r )= (* a ). full_div_rem ( other , borrow ); * a = q ; borrow = r ; }( self , borrow )}# [ doc = " Divide self by another bignum, overwriting `q` with the quotient and `r` with the" ]# [ doc = " remainder." ] pub fn div_rem (& self , d : &$name , q : & mut $name , r : & mut $name ){ assert ! (! d . is_zero ()); let digitbits = <$ty >:: BITS as usize ; for digit in & mut q . base [..]{* digit = 0 ; } for digit in & mut r . base [..]{* digit = 0 ; } r . size = d . size ; q . size = 1 ; let mut q_is_zero = true ; let end = self . bit_length (); for i in ( 0 .. end ). rev (){ r . mul_pow2 ( 1 ); r . base [ 0 ]|= self . get_bit ( i ) as $ty ; if &* r >= d { r . sub ( d ); let digit_idx = i / digitbits ; let bit_idx = i % digitbits ; if q_is_zero { q . size = digit_idx + 1 ; q_is_zero = false ; } q . base [ digit_idx ]|= 1 << bit_idx ; }} debug_assert ! ( q . base [ q . size ..]. iter (). all (|& d | d == 0 )); debug_assert ! ( r . base [ r . size ..]. iter (). all (|& d | d == 0 )); }} impl crate :: cmp :: PartialEq for $name { fn eq (& self , other : &$name )-> bool { self . base [..]== other . base [..]}} impl crate :: cmp :: Eq for $name {} impl crate :: cmp :: PartialOrd for $name { fn partial_cmp (& self , other : &$name )-> crate :: option :: Option < crate :: cmp :: Ordering > { crate :: option :: Option :: Some ( self . cmp ( other ))}} impl crate :: cmp :: Ord for $name { fn cmp (& self , other : &$name )-> crate :: cmp :: Ordering { use crate :: cmp :: max ; let sz = max ( self . size , other . size ); let lhs = self . base [.. sz ]. iter (). cloned (). rev (); let rhs = other . base [.. sz ]. iter (). cloned (). rev (); lhs . cmp ( rhs )}} impl crate :: clone :: Clone for $name { fn clone (& self )-> Self { Self { size : self . size , base : self . base }}} impl crate :: fmt :: Debug for $name { fn fmt (& self , f : & mut crate :: fmt :: Formatter < '_ >)-> crate :: fmt :: Result { let sz = if self . size < 1 { 1 } else { self . size }; let digitlen = <$ty >:: BITS as usize / 4 ; write ! ( f , "{:#x}" , self . base [ sz - 1 ])?; for & v in self . base [.. sz - 1 ]. iter (). rev (){ write ! ( f , "_{:01$x}" , v , digitlen )?; } crate :: result :: Result :: Ok (())}}}; }
+macro_rules! __ra_macro_fixture3 {($t : ty )=>{# [ stable ( feature = "rust1" , since = "1.0.0" )] impl FromStr for $t { type Err = ParseFloatError ; # [ doc = " Converts a string in base 10 to a float." ]# [ doc = " Accepts an optional decimal exponent." ]# [ doc = "" ]# [ doc = " This function accepts strings such as" ]# [ doc = "" ]# [ doc = " * \\\'3.14\\\'" ]# [ doc = " * \\\'-3.14\\\'" ]# [ doc = " * \\\'2.5E10\\\', or equivalently, \\\'2.5e10\\\'" ]# [ doc = " * \\\'2.5E-10\\\'" ]# [ doc = " * \\\'5.\\\'" ]# [ doc = " * \\\'.5\\\', or, equivalently, \\\'0.5\\\'" ]# [ doc = " * \\\'inf\\\', \\\'-inf\\\', \\\'NaN\\\'" ]# [ doc = "" ]# [ doc = " Leading and trailing whitespace represent an error." ]# [ doc = "" ]# [ doc = " # Grammar" ]# [ doc = "" ]# [ doc = " All strings that adhere to the following [EBNF] grammar" ]# [ doc = " will result in an [`Ok`] being returned:" ]# [ doc = "" ]# [ doc = " ```txt" ]# [ doc = " Float ::= Sign? ( \\\'inf\\\' | \\\'NaN\\\' | Number )" ]# [ doc = " Number ::= ( Digit+ |" ]# [ doc = " Digit+ \\\'.\\\' Digit* |" ]# [ doc = " Digit* \\\'.\\\' Digit+ ) Exp?" ]# [ doc = " Exp ::= [eE] Sign? Digit+" ]# [ doc = " Sign ::= [+-]" ]# [ doc = " Digit ::= [0-9]" ]# [ doc = " ```" ]# [ doc = "" ]# [ doc = " [EBNF]: https://www.w3.org/TR/REC-xml/#sec-notation" ]# [ doc = "" ]# [ doc = " # Known bugs" ]# [ doc = "" ]# [ doc = " In some situations, some strings that should create a valid float" ]# [ doc = " instead return an error. See [issue #31407] for details." ]# [ doc = "" ]# [ doc = " [issue #31407]: https://github.com/rust-lang/rust/issues/31407" ]# [ doc = "" ]# [ doc = " # Arguments" ]# [ doc = "" ]# [ doc = " * src - A string" ]# [ doc = "" ]# [ doc = " # Return value" ]# [ doc = "" ]# [ doc = " `Err(ParseFloatError)` if the string did not represent a valid" ]# [ doc = " number. Otherwise, `Ok(n)` where `n` is the floating-point" ]# [ doc = " number represented by `src`." ]# [ inline ] fn from_str ( src : & str )-> Result < Self , ParseFloatError > { dec2flt ( src )}}}; }
+macro_rules! __ra_macro_fixture4 {($(# [$stability : meta ]$Ty : ident ($Int : ty ); )+ )=>{$(doc_comment ! { concat ! ( "An integer that is known not to equal zero.\n\nThis enables some memory layout optimization.\nFor example, `Option<" , stringify ! ($Ty ), ">` is the same size as `" , stringify ! ($Int ), "`:\n\n```rust\nuse std::mem::size_of;\nassert_eq!(size_of::<Option<core::num::" , stringify ! ($Ty ), ">>(), size_of::<" , stringify ! ($Int ), ">());\n```" ), # [$stability ]# [ derive ( Copy , Clone , Eq , PartialEq , Ord , PartialOrd , Hash )]# [ repr ( transparent )]# [ rustc_layout_scalar_valid_range_start ( 1 )]# [ rustc_nonnull_optimization_guaranteed ] pub struct $Ty ($Int ); } impl $Ty {# [ doc = " Creates a non-zero without checking the value." ]# [ doc = "" ]# [ doc = " # Safety" ]# [ doc = "" ]# [ doc = " The value must not be zero." ]# [$stability ]# [ rustc_const_stable ( feature = "nonzero" , since = "1.34.0" )]# [ inline ] pub const unsafe fn new_unchecked ( n : $Int )-> Self { unsafe { Self ( n )}}# [ doc = " Creates a non-zero if the given value is not zero." ]# [$stability ]# [ rustc_const_stable ( feature = "const_nonzero_int_methods" , since = "1.47.0" )]# [ inline ] pub const fn new ( n : $Int )-> Option < Self > { if n != 0 { Some ( unsafe { Self ( n )})} else { None }}# [ doc = " Returns the value as a primitive type." ]# [$stability ]# [ inline ]# [ rustc_const_stable ( feature = "nonzero" , since = "1.34.0" )] pub const fn get ( self )-> $Int { self . 0 }}# [ stable ( feature = "from_nonzero" , since = "1.31.0" )] impl From <$Ty > for $Int { doc_comment ! { concat ! ( "Converts a `" , stringify ! ($Ty ), "` into an `" , stringify ! ($Int ), "`" ), # [ inline ] fn from ( nonzero : $Ty )-> Self { nonzero . 0 }}}# [ stable ( feature = "nonzero_bitor" , since = "1.45.0" )] impl BitOr for $Ty { type Output = Self ; # [ inline ] fn bitor ( self , rhs : Self )-> Self :: Output { unsafe {$Ty :: new_unchecked ( self . get ()| rhs . get ())}}}# [ stable ( feature = "nonzero_bitor" , since = "1.45.0" )] impl BitOr <$Int > for $Ty { type Output = Self ; # [ inline ] fn bitor ( self , rhs : $Int )-> Self :: Output { unsafe {$Ty :: new_unchecked ( self . get ()| rhs )}}}# [ stable ( feature = "nonzero_bitor" , since = "1.45.0" )] impl BitOr <$Ty > for $Int { type Output = $Ty ; # [ inline ] fn bitor ( self , rhs : $Ty )-> Self :: Output { unsafe {$Ty :: new_unchecked ( self | rhs . get ())}}}# [ stable ( feature = "nonzero_bitor" , since = "1.45.0" )] impl BitOrAssign for $Ty {# [ inline ] fn bitor_assign (& mut self , rhs : Self ){* self = * self | rhs ; }}# [ stable ( feature = "nonzero_bitor" , since = "1.45.0" )] impl BitOrAssign <$Int > for $Ty {# [ inline ] fn bitor_assign (& mut self , rhs : $Int ){* self = * self | rhs ; }} impl_nonzero_fmt ! {# [$stability ]( Debug , Display , Binary , Octal , LowerHex , UpperHex ) for $Ty })+ }}
+macro_rules! __ra_macro_fixture5 {($($t : ty )*)=>{$(# [ stable ( feature = "nonzero_parse" , since = "1.35.0" )] impl FromStr for $t { type Err = ParseIntError ; fn from_str ( src : & str )-> Result < Self , Self :: Err > { Self :: new ( from_str_radix ( src , 10 )?). ok_or ( ParseIntError { kind : IntErrorKind :: Zero })}})*}}
+macro_rules! __ra_macro_fixture6 {($($t : ident )*)=>($(sh_impl_unsigned ! {$t , usize })*)}
+macro_rules! __ra_macro_fixture7 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Add for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn add ( self , other : Wrapping <$t >)-> Wrapping <$t > { Wrapping ( self . 0 . wrapping_add ( other . 0 ))}} forward_ref_binop ! { impl Add , add for Wrapping <$t >, Wrapping <$t >, # [ stable ( feature = "wrapping_ref" , since = "1.14.0" )]}# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl AddAssign for Wrapping <$t > {# [ inline ] fn add_assign (& mut self , other : Wrapping <$t >){* self = * self + other ; }} forward_ref_op_assign ! { impl AddAssign , add_assign for Wrapping <$t >, Wrapping <$t > }# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Sub for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn sub ( self , other : Wrapping <$t >)-> Wrapping <$t > { Wrapping ( self . 0 . wrapping_sub ( other . 0 ))}} forward_ref_binop ! { impl Sub , sub for Wrapping <$t >, Wrapping <$t >, # [ stable ( feature = "wrapping_ref" , since = "1.14.0" )]}# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl SubAssign for Wrapping <$t > {# [ inline ] fn sub_assign (& mut self , other : Wrapping <$t >){* self = * self - other ; }} forward_ref_op_assign ! { impl SubAssign , sub_assign for Wrapping <$t >, Wrapping <$t > }# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Mul for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn mul ( self , other : Wrapping <$t >)-> Wrapping <$t > { Wrapping ( self . 0 . wrapping_mul ( other . 0 ))}} forward_ref_binop ! { impl Mul , mul for Wrapping <$t >, Wrapping <$t >, # [ stable ( feature = "wrapping_ref" , since = "1.14.0" )]}# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl MulAssign for Wrapping <$t > {# [ inline ] fn mul_assign (& mut self , other : Wrapping <$t >){* self = * self * other ; }} forward_ref_op_assign ! { impl MulAssign , mul_assign for Wrapping <$t >, Wrapping <$t > }# [ stable ( feature = "wrapping_div" , since = "1.3.0" )] impl Div for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn div ( self , other : Wrapping <$t >)-> Wrapping <$t > { Wrapping ( self . 0 . wrapping_div ( other . 0 ))}} forward_ref_binop ! { impl Div , div for Wrapping <$t >, Wrapping <$t >, # [ stable ( feature = "wrapping_ref" , since = "1.14.0" )]}# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl DivAssign for Wrapping <$t > {# [ inline ] fn div_assign (& mut self , other : Wrapping <$t >){* self = * self / other ; }} forward_ref_op_assign ! { impl DivAssign , div_assign for Wrapping <$t >, Wrapping <$t > }# [ stable ( feature = "wrapping_impls" , since = "1.7.0" )] impl Rem for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn rem ( self , other : Wrapping <$t >)-> Wrapping <$t > { Wrapping ( self . 0 . wrapping_rem ( other . 0 ))}} forward_ref_binop ! { impl Rem , rem for Wrapping <$t >, Wrapping <$t >, # [ stable ( feature = "wrapping_ref" , since = "1.14.0" )]}# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl RemAssign for Wrapping <$t > {# [ inline ] fn rem_assign (& mut self , other : Wrapping <$t >){* self = * self % other ; }} forward_ref_op_assign ! { impl RemAssign , rem_assign for Wrapping <$t >, Wrapping <$t > }# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Not for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn not ( self )-> Wrapping <$t > { Wrapping (! self . 0 )}} forward_ref_unop ! { impl Not , not for Wrapping <$t >, # [ stable ( feature = "wrapping_ref" , since = "1.14.0" )]}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl BitXor for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn bitxor ( self , other : Wrapping <$t >)-> Wrapping <$t > { Wrapping ( self . 0 ^ other . 0 )}} forward_ref_binop ! { impl BitXor , bitxor for Wrapping <$t >, Wrapping <$t >, # [ stable ( feature = "wrapping_ref" , since = "1.14.0" )]}# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl BitXorAssign for Wrapping <$t > {# [ inline ] fn bitxor_assign (& mut self , other : Wrapping <$t >){* self = * self ^ other ; }} forward_ref_op_assign ! { impl BitXorAssign , bitxor_assign for Wrapping <$t >, Wrapping <$t > }# [ stable ( feature = "rust1" , since = "1.0.0" )] impl BitOr for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn bitor ( self , other : Wrapping <$t >)-> Wrapping <$t > { Wrapping ( self . 0 | other . 0 )}} forward_ref_binop ! { impl BitOr , bitor for Wrapping <$t >, Wrapping <$t >, # [ stable ( feature = "wrapping_ref" , since = "1.14.0" )]}# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl BitOrAssign for Wrapping <$t > {# [ inline ] fn bitor_assign (& mut self , other : Wrapping <$t >){* self = * self | other ; }} forward_ref_op_assign ! { impl BitOrAssign , bitor_assign for Wrapping <$t >, Wrapping <$t > }# [ stable ( feature = "rust1" , since = "1.0.0" )] impl BitAnd for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn bitand ( self , other : Wrapping <$t >)-> Wrapping <$t > { Wrapping ( self . 0 & other . 0 )}} forward_ref_binop ! { impl BitAnd , bitand for Wrapping <$t >, Wrapping <$t >, # [ stable ( feature = "wrapping_ref" , since = "1.14.0" )]}# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl BitAndAssign for Wrapping <$t > {# [ inline ] fn bitand_assign (& mut self , other : Wrapping <$t >){* self = * self & other ; }} forward_ref_op_assign ! { impl BitAndAssign , bitand_assign for Wrapping <$t >, Wrapping <$t > }# [ stable ( feature = "wrapping_neg" , since = "1.10.0" )] impl Neg for Wrapping <$t > { type Output = Self ; # [ inline ] fn neg ( self )-> Self { Wrapping ( 0 )- self }} forward_ref_unop ! { impl Neg , neg for Wrapping <$t >, # [ stable ( feature = "wrapping_ref" , since = "1.14.0" )]})*)}
+macro_rules! __ra_macro_fixture8 {($($t : ty )*)=>($(impl Wrapping <$t > { doc_comment ! { concat ! ( "Returns the smallest value that can be represented by this integer type.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nassert_eq!(<Wrapping<" , stringify ! ($t ), ">>::MIN, Wrapping(" , stringify ! ($t ), "::MIN));\n```" ), # [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const MIN : Self = Self (<$t >:: MIN ); } doc_comment ! { concat ! ( "Returns the largest value that can be represented by this integer type.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nassert_eq!(<Wrapping<" , stringify ! ($t ), ">>::MAX, Wrapping(" , stringify ! ($t ), "::MAX));\n```" ), # [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const MAX : Self = Self (<$t >:: MAX ); } doc_comment ! { concat ! ( "Returns the number of ones in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nlet n = Wrapping(0b01001100" , stringify ! ($t ), ");\n\nassert_eq!(n.count_ones(), 3);\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn count_ones ( self )-> u32 { self . 0 . count_ones ()}} doc_comment ! { concat ! ( "Returns the number of zeros in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nassert_eq!(Wrapping(!0" , stringify ! ($t ), ").count_zeros(), 0);\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn count_zeros ( self )-> u32 { self . 0 . count_zeros ()}} doc_comment ! { concat ! ( "Returns the number of trailing zeros in the binary representation\nof `self`.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nlet n = Wrapping(0b0101000" , stringify ! ($t ), ");\n\nassert_eq!(n.trailing_zeros(), 3);\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn trailing_zeros ( self )-> u32 { self . 0 . trailing_zeros ()}}# [ doc = " Shifts the bits to the left by a specified amount, `n`," ]# [ doc = " wrapping the truncated bits to the end of the resulting" ]# [ doc = " integer." ]# [ doc = "" ]# [ doc = " Please note this isn\\\'t the same operation as the `<<` shifting" ]# [ doc = " operator!" ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " Basic usage:" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " #![feature(wrapping_int_impl)]" ]# [ doc = " use std::num::Wrapping;" ]# [ doc = "" ]# [ doc = " let n: Wrapping<i64> = Wrapping(0x0123456789ABCDEF);" ]# [ doc = " let m: Wrapping<i64> = Wrapping(-0x76543210FEDCBA99);" ]# [ doc = "" ]# [ doc = " assert_eq!(n.rotate_left(32), m);" ]# [ doc = " ```" ]# [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn rotate_left ( self , n : u32 )-> Self { Wrapping ( self . 0 . rotate_left ( n ))}# [ doc = " Shifts the bits to the right by a specified amount, `n`," ]# [ doc = " wrapping the truncated bits to the beginning of the resulting" ]# [ doc = " integer." ]# [ doc = "" ]# [ doc = " Please note this isn\\\'t the same operation as the `>>` shifting" ]# [ doc = " operator!" ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " Basic usage:" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " #![feature(wrapping_int_impl)]" ]# [ doc = " use std::num::Wrapping;" ]# [ doc = "" ]# [ doc = " let n: Wrapping<i64> = Wrapping(0x0123456789ABCDEF);" ]# [ doc = " let m: Wrapping<i64> = Wrapping(-0xFEDCBA987654322);" ]# [ doc = "" ]# [ doc = " assert_eq!(n.rotate_right(4), m);" ]# [ doc = " ```" ]# [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn rotate_right ( self , n : u32 )-> Self { Wrapping ( self . 0 . rotate_right ( n ))}# [ doc = " Reverses the byte order of the integer." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " Basic usage:" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " #![feature(wrapping_int_impl)]" ]# [ doc = " use std::num::Wrapping;" ]# [ doc = "" ]# [ doc = " let n: Wrapping<i16> = Wrapping(0b0000000_01010101);" ]# [ doc = " assert_eq!(n, Wrapping(85));" ]# [ doc = "" ]# [ doc = " let m = n.swap_bytes();" ]# [ doc = "" ]# [ doc = " assert_eq!(m, Wrapping(0b01010101_00000000));" ]# [ doc = " assert_eq!(m, Wrapping(21760));" ]# [ doc = " ```" ]# [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn swap_bytes ( self )-> Self { Wrapping ( self . 0 . swap_bytes ())}# [ doc = " Reverses the bit pattern of the integer." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " Please note that this example is shared between integer types." ]# [ doc = " Which explains why `i16` is used here." ]# [ doc = "" ]# [ doc = " Basic usage:" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use std::num::Wrapping;" ]# [ doc = "" ]# [ doc = " let n = Wrapping(0b0000000_01010101i16);" ]# [ doc = " assert_eq!(n, Wrapping(85));" ]# [ doc = "" ]# [ doc = " let m = n.reverse_bits();" ]# [ doc = "" ]# [ doc = " assert_eq!(m.0 as u16, 0b10101010_00000000);" ]# [ doc = " assert_eq!(m, Wrapping(-22016));" ]# [ doc = " ```" ]# [ stable ( feature = "reverse_bits" , since = "1.37.0" )]# [ rustc_const_stable ( feature = "const_reverse_bits" , since = "1.37.0" )]# [ inline ]# [ must_use ] pub const fn reverse_bits ( self )-> Self { Wrapping ( self . 0 . reverse_bits ())} doc_comment ! { concat ! ( "Converts an integer from big endian to the target's endianness.\n\nOn big endian this is a no-op. On little endian the bytes are\nswapped.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nlet n = Wrapping(0x1A" , stringify ! ($t ), ");\n\nif cfg!(target_endian = \"big\") {\n assert_eq!(<Wrapping<" , stringify ! ($t ), ">>::from_be(n), n)\n} else {\n assert_eq!(<Wrapping<" , stringify ! ($t ), ">>::from_be(n), n.swap_bytes())\n}\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn from_be ( x : Self )-> Self { Wrapping (<$t >:: from_be ( x . 0 ))}} doc_comment ! { concat ! ( "Converts an integer from little endian to the target's endianness.\n\nOn little endian this is a no-op. On big endian the bytes are\nswapped.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nlet n = Wrapping(0x1A" , stringify ! ($t ), ");\n\nif cfg!(target_endian = \"little\") {\n assert_eq!(<Wrapping<" , stringify ! ($t ), ">>::from_le(n), n)\n} else {\n assert_eq!(<Wrapping<" , stringify ! ($t ), ">>::from_le(n), n.swap_bytes())\n}\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn from_le ( x : Self )-> Self { Wrapping (<$t >:: from_le ( x . 0 ))}} doc_comment ! { concat ! ( "Converts `self` to big endian from the target's endianness.\n\nOn big endian this is a no-op. On little endian the bytes are\nswapped.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nlet n = Wrapping(0x1A" , stringify ! ($t ), ");\n\nif cfg!(target_endian = \"big\") {\n assert_eq!(n.to_be(), n)\n} else {\n assert_eq!(n.to_be(), n.swap_bytes())\n}\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn to_be ( self )-> Self { Wrapping ( self . 0 . to_be ())}} doc_comment ! { concat ! ( "Converts `self` to little endian from the target's endianness.\n\nOn little endian this is a no-op. On big endian the bytes are\nswapped.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nlet n = Wrapping(0x1A" , stringify ! ($t ), ");\n\nif cfg!(target_endian = \"little\") {\n assert_eq!(n.to_le(), n)\n} else {\n assert_eq!(n.to_le(), n.swap_bytes())\n}\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn to_le ( self )-> Self { Wrapping ( self . 0 . to_le ())}} doc_comment ! { concat ! ( "Raises self to the power of `exp`, using exponentiation by squaring.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nassert_eq!(Wrapping(3" , stringify ! ($t ), ").pow(4), Wrapping(81));\n```\n\nResults that are too large are wrapped:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nassert_eq!(Wrapping(3i8).pow(5), Wrapping(-13));\nassert_eq!(Wrapping(3i8).pow(6), Wrapping(-39));\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub fn pow ( self , exp : u32 )-> Self { Wrapping ( self . 0 . wrapping_pow ( exp ))}}})*)}
+macro_rules! __ra_macro_fixture9 {($($t : ty )*)=>($(impl Wrapping <$t > { doc_comment ! { concat ! ( "Returns the number of leading zeros in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nlet n = Wrapping(" , stringify ! ($t ), "::MAX) >> 2;\n\nassert_eq!(n.leading_zeros(), 3);\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn leading_zeros ( self )-> u32 { self . 0 . leading_zeros ()}} doc_comment ! { concat ! ( "Computes the absolute value of `self`, wrapping around at\nthe boundary of the type.\n\nThe only case where such wrapping can occur is when one takes the absolute value of the negative\nminimal value for the type this is a positive value that is too large to represent in the type. In\nsuch a case, this function returns `MIN` itself.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nassert_eq!(Wrapping(100" , stringify ! ($t ), ").abs(), Wrapping(100));\nassert_eq!(Wrapping(-100" , stringify ! ($t ), ").abs(), Wrapping(100));\nassert_eq!(Wrapping(" , stringify ! ($t ), "::MIN).abs(), Wrapping(" , stringify ! ($t ), "::MIN));\nassert_eq!(Wrapping(-128i8).abs().0 as u8, 128u8);\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub fn abs ( self )-> Wrapping <$t > { Wrapping ( self . 0 . wrapping_abs ())}} doc_comment ! { concat ! ( "Returns a number representing sign of `self`.\n\n - `0` if the number is zero\n - `1` if the number is positive\n - `-1` if the number is negative\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nassert_eq!(Wrapping(10" , stringify ! ($t ), ").signum(), Wrapping(1));\nassert_eq!(Wrapping(0" , stringify ! ($t ), ").signum(), Wrapping(0));\nassert_eq!(Wrapping(-10" , stringify ! ($t ), ").signum(), Wrapping(-1));\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub fn signum ( self )-> Wrapping <$t > { Wrapping ( self . 0 . signum ())}} doc_comment ! { concat ! ( "Returns `true` if `self` is positive and `false` if the number is zero or\nnegative.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nassert!(Wrapping(10" , stringify ! ($t ), ").is_positive());\nassert!(!Wrapping(-10" , stringify ! ($t ), ").is_positive());\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn is_positive ( self )-> bool { self . 0 . is_positive ()}} doc_comment ! { concat ! ( "Returns `true` if `self` is negative and `false` if the number is zero or\npositive.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nassert!(Wrapping(-10" , stringify ! ($t ), ").is_negative());\nassert!(!Wrapping(10" , stringify ! ($t ), ").is_negative());\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn is_negative ( self )-> bool { self . 0 . is_negative ()}}})*)}
+macro_rules! __ra_macro_fixture10 {($($t : ty )*)=>($(impl Wrapping <$t > { doc_comment ! { concat ! ( "Returns the number of leading zeros in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nlet n = Wrapping(" , stringify ! ($t ), "::MAX) >> 2;\n\nassert_eq!(n.leading_zeros(), 2);\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub const fn leading_zeros ( self )-> u32 { self . 0 . leading_zeros ()}} doc_comment ! { concat ! ( "Returns `true` if and only if `self == 2^k` for some `k`.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_int_impl)]\nuse std::num::Wrapping;\n\nassert!(Wrapping(16" , stringify ! ($t ), ").is_power_of_two());\nassert!(!Wrapping(10" , stringify ! ($t ), ").is_power_of_two());\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_int_impl" , issue = "32463" )] pub fn is_power_of_two ( self )-> bool { self . 0 . is_power_of_two ()}} doc_comment ! { concat ! ( "Returns the smallest power of two greater than or equal to `self`.\n\nWhen return value overflows (i.e., `self > (1 << (N-1))` for type\n`uN`), overflows to `2^N = 0`.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_next_power_of_two)]\nuse std::num::Wrapping;\n\nassert_eq!(Wrapping(2" , stringify ! ($t ), ").next_power_of_two(), Wrapping(2));\nassert_eq!(Wrapping(3" , stringify ! ($t ), ").next_power_of_two(), Wrapping(4));\nassert_eq!(Wrapping(200_u8).next_power_of_two(), Wrapping(0));\n```" ), # [ inline ]# [ unstable ( feature = "wrapping_next_power_of_two" , issue = "32463" , reason = "needs decision on wrapping behaviour" )] pub fn next_power_of_two ( self )-> Self { Wrapping ( self . 0 . wrapping_next_power_of_two ())}}})*)}
+macro_rules! __ra_macro_fixture11 {($($t : ty )*)=>{$(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl FromStr for $t { type Err = ParseIntError ; fn from_str ( src : & str )-> Result < Self , ParseIntError > { from_str_radix ( src , 10 )}})*}}
+macro_rules! __ra_macro_fixture12 {($($t : ty )*)=>($(impl FromStrRadixHelper for $t {# [ inline ] fn min_value ()-> Self { Self :: MIN }# [ inline ] fn max_value ()-> Self { Self :: MAX }# [ inline ] fn from_u32 ( u : u32 )-> Self { u as Self }# [ inline ] fn checked_mul (& self , other : u32 )-> Option < Self > { Self :: checked_mul (* self , other as Self )}# [ inline ] fn checked_sub (& self , other : u32 )-> Option < Self > { Self :: checked_sub (* self , other as Self )}# [ inline ] fn checked_add (& self , other : u32 )-> Option < Self > { Self :: checked_add (* self , other as Self )}})*)}
+macro_rules! __ra_macro_fixture13 {($($Arg : ident ),+)=>{ fnptr_impls_safety_abi ! { extern "Rust" fn ($($Arg ),+)-> Ret , $($Arg ),+ } fnptr_impls_safety_abi ! { extern "C" fn ($($Arg ),+)-> Ret , $($Arg ),+ } fnptr_impls_safety_abi ! { extern "C" fn ($($Arg ),+ , ...)-> Ret , $($Arg ),+ } fnptr_impls_safety_abi ! { unsafe extern "Rust" fn ($($Arg ),+)-> Ret , $($Arg ),+ } fnptr_impls_safety_abi ! { unsafe extern "C" fn ($($Arg ),+)-> Ret , $($Arg ),+ } fnptr_impls_safety_abi ! { unsafe extern "C" fn ($($Arg ),+ , ...)-> Ret , $($Arg ),+ }}; ()=>{ fnptr_impls_safety_abi ! { extern "Rust" fn ()-> Ret , } fnptr_impls_safety_abi ! { extern "C" fn ()-> Ret , } fnptr_impls_safety_abi ! { unsafe extern "Rust" fn ()-> Ret , } fnptr_impls_safety_abi ! { unsafe extern "C" fn ()-> Ret , }}; }
+macro_rules! __ra_macro_fixture14 {($($t : ty )*)=>{$(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Clone for $t {# [ inline ] fn clone (& self )-> Self {* self }})* }}
+macro_rules! __ra_macro_fixture15 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl PartialEq for $t {# [ inline ] fn eq (& self , other : &$t )-> bool {(* self )== (* other )}# [ inline ] fn ne (& self , other : &$t )-> bool {(* self )!= (* other )}})*)}
+macro_rules! __ra_macro_fixture16 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Eq for $t {})*)}
+macro_rules! __ra_macro_fixture17 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl PartialOrd for $t {# [ inline ] fn partial_cmp (& self , other : &$t )-> Option < Ordering > { match ( self <= other , self >= other ){( false , false )=> None , ( false , true )=> Some ( Greater ), ( true , false )=> Some ( Less ), ( true , true )=> Some ( Equal ), }}# [ inline ] fn lt (& self , other : &$t )-> bool {(* self )< (* other )}# [ inline ] fn le (& self , other : &$t )-> bool {(* self )<= (* other )}# [ inline ] fn ge (& self , other : &$t )-> bool {(* self )>= (* other )}# [ inline ] fn gt (& self , other : &$t )-> bool {(* self )> (* other )}})*)}
+macro_rules! __ra_macro_fixture18 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl PartialOrd for $t {# [ inline ] fn partial_cmp (& self , other : &$t )-> Option < Ordering > { Some ( self . cmp ( other ))}# [ inline ] fn lt (& self , other : &$t )-> bool {(* self )< (* other )}# [ inline ] fn le (& self , other : &$t )-> bool {(* self )<= (* other )}# [ inline ] fn ge (& self , other : &$t )-> bool {(* self )>= (* other )}# [ inline ] fn gt (& self , other : &$t )-> bool {(* self )> (* other )}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Ord for $t {# [ inline ] fn cmp (& self , other : &$t )-> Ordering { if * self < * other { Less } else if * self == * other { Equal } else { Greater }}})*)}
+macro_rules! __ra_macro_fixture19 {($Float : ident =>$($Int : ident )+ )=>{# [ unstable ( feature = "convert_float_to_int" , issue = "67057" )] impl private :: Sealed for $Float {}$(# [ unstable ( feature = "convert_float_to_int" , issue = "67057" )] impl FloatToInt <$Int > for $Float {# [ doc ( hidden )]# [ inline ] unsafe fn to_int_unchecked ( self )-> $Int { unsafe { crate :: intrinsics :: float_to_int_unchecked ( self )}}})+ }}
+macro_rules! __ra_macro_fixture20 {($target : ty , # [$attr : meta ])=>{ impl_from ! ( bool , $target , # [$attr ], concat ! ( "Converts a `bool` to a `" , stringify ! ($target ), "`. The resulting value is `0` for `false` and `1` for `true`\nvalues.\n\n# Examples\n\n```\nassert_eq!(" , stringify ! ($target ), "::from(true), 1);\nassert_eq!(" , stringify ! ($target ), "::from(false), 0);\n```" )); }; }
+macro_rules! __ra_macro_fixture21 {($Small : ty , $Large : ty , # [$attr : meta ], $doc : expr )=>{# [$attr ]# [ doc = $doc ] impl From <$Small > for $Large {# [ inline ] fn from ( small : $Small )-> Self { small as Self }}}; ($Small : ty , $Large : ty , # [$attr : meta ])=>{ impl_from ! ($Small , $Large , # [$attr ], concat ! ( "Converts `" , stringify ! ($Small ), "` to `" , stringify ! ($Large ), "` losslessly." )); }}
+macro_rules! __ra_macro_fixture22 {($source : ty , $($target : ty ),*)=>{$(# [ stable ( feature = "try_from" , since = "1.34.0" )] impl TryFrom <$source > for $target { type Error = TryFromIntError ; # [ doc = " Try to create the target number type from a source" ]# [ doc = " number type. This returns an error if the source value" ]# [ doc = " is outside of the range of the target type." ]# [ inline ] fn try_from ( u : $source )-> Result < Self , Self :: Error > { if u > ( Self :: MAX as $source ){ Err ( TryFromIntError (()))} else { Ok ( u as Self )}}})*}}
+macro_rules! __ra_macro_fixture23 {($source : ty , $($target : ty ),*)=>{$(# [ stable ( feature = "try_from" , since = "1.34.0" )] impl TryFrom <$source > for $target { type Error = TryFromIntError ; # [ doc = " Try to create the target number type from a source" ]# [ doc = " number type. This returns an error if the source value" ]# [ doc = " is outside of the range of the target type." ]# [ inline ] fn try_from ( u : $source )-> Result < Self , Self :: Error > { let min = Self :: MIN as $source ; let max = Self :: MAX as $source ; if u < min || u > max { Err ( TryFromIntError (()))} else { Ok ( u as Self )}}})*}}
+macro_rules! __ra_macro_fixture24 {($source : ty , $($target : ty ),*)=>{$(# [ stable ( feature = "try_from" , since = "1.34.0" )] impl TryFrom <$source > for $target { type Error = TryFromIntError ; # [ doc = " Try to create the target number type from a source" ]# [ doc = " number type. This returns an error if the source value" ]# [ doc = " is outside of the range of the target type." ]# [ inline ] fn try_from ( u : $source )-> Result < Self , Self :: Error > { if u >= 0 { Ok ( u as Self )} else { Err ( TryFromIntError (()))}}})*}}
+macro_rules! __ra_macro_fixture25 {($source : ty , $($target : ty ),*)=>{$(# [ stable ( feature = "try_from" , since = "1.34.0" )] impl TryFrom <$source > for $target { type Error = TryFromIntError ; # [ doc = " Try to create the target number type from a source" ]# [ doc = " number type. This returns an error if the source value" ]# [ doc = " is outside of the range of the target type." ]# [ inline ] fn try_from ( value : $source )-> Result < Self , Self :: Error > { Ok ( value as Self )}})*}}
+macro_rules! __ra_macro_fixture26 {($mac : ident , $source : ty , $($target : ty ),*)=>{$($mac ! ($target , $source ); )*}}
+macro_rules! __ra_macro_fixture27 {($Small : ty , $Large : ty , # [$attr : meta ], $doc : expr )=>{# [$attr ]# [ doc = $doc ] impl From <$Small > for $Large {# [ inline ] fn from ( small : $Small )-> Self { unsafe { Self :: new_unchecked ( small . get (). into ())}}}}; ($Small : ty , $Large : ty , # [$attr : meta ])=>{ nzint_impl_from ! ($Small , $Large , # [$attr ], concat ! ( "Converts `" , stringify ! ($Small ), "` to `" , stringify ! ($Large ), "` losslessly." )); }}
+macro_rules! __ra_macro_fixture28 {($Int : ty , $NonZeroInt : ty , # [$attr : meta ], $doc : expr )=>{# [$attr ]# [ doc = $doc ] impl TryFrom <$Int > for $NonZeroInt { type Error = TryFromIntError ; # [ inline ] fn try_from ( value : $Int )-> Result < Self , Self :: Error > { Self :: new ( value ). ok_or ( TryFromIntError (()))}}}; ($Int : ty , $NonZeroInt : ty , # [$attr : meta ])=>{ nzint_impl_try_from_int ! ($Int , $NonZeroInt , # [$attr ], concat ! ( "Attempts to convert `" , stringify ! ($Int ), "` to `" , stringify ! ($NonZeroInt ), "`." )); }}
+macro_rules! __ra_macro_fixture29 {($From : ty =>$To : ty , $doc : expr )=>{# [ stable ( feature = "nzint_try_from_nzint_conv" , since = "1.49.0" )]# [ doc = $doc ] impl TryFrom <$From > for $To { type Error = TryFromIntError ; # [ inline ] fn try_from ( value : $From )-> Result < Self , Self :: Error > { TryFrom :: try_from ( value . get ()). map (| v | { unsafe { Self :: new_unchecked ( v )}})}}}; ($To : ty : $($From : ty ),*)=>{$(nzint_impl_try_from_nzint ! ($From =>$To , concat ! ( "Attempts to convert `" , stringify ! ($From ), "` to `" , stringify ! ($To ), "`." , )); )*}; }
+macro_rules! __ra_macro_fixture30 {($t : ty , $v : expr , $doc : tt )=>{# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Default for $t {# [ inline ]# [ doc = $doc ] fn default ()-> $t {$v }}}}
+macro_rules! __ra_macro_fixture31 {($t : ident )=>{# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T : ? Sized > Hash for $t < T > {# [ inline ] fn hash < H : Hasher > (& self , _: & mut H ){}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T : ? Sized > cmp :: PartialEq for $t < T > { fn eq (& self , _other : &$t < T >)-> bool { true }}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T : ? Sized > cmp :: Eq for $t < T > {}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T : ? Sized > cmp :: PartialOrd for $t < T > { fn partial_cmp (& self , _other : &$t < T >)-> Option < cmp :: Ordering > { Option :: Some ( cmp :: Ordering :: Equal )}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T : ? Sized > cmp :: Ord for $t < T > { fn cmp (& self , _other : &$t < T >)-> cmp :: Ordering { cmp :: Ordering :: Equal }}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T : ? Sized > Copy for $t < T > {}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T : ? Sized > Clone for $t < T > { fn clone (& self )-> Self { Self }}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T : ? Sized > Default for $t < T > { fn default ()-> Self { Self }}# [ unstable ( feature = "structural_match" , issue = "31434" )] impl < T : ? Sized > StructuralPartialEq for $t < T > {}# [ unstable ( feature = "structural_match" , issue = "31434" )] impl < T : ? Sized > StructuralEq for $t < T > {}}; }
+macro_rules! __ra_macro_fixture32 {($($t : ty )*)=>{$(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Copy for $t {})* }}
+macro_rules! __ra_macro_fixture33 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Add for $t { type Output = $t ; # [ inline ]# [ rustc_inherit_overflow_checks ] fn add ( self , other : $t )-> $t { self + other }} forward_ref_binop ! { impl Add , add for $t , $t })*)}
+macro_rules! __ra_macro_fixture34 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Sub for $t { type Output = $t ; # [ inline ]# [ rustc_inherit_overflow_checks ] fn sub ( self , other : $t )-> $t { self - other }} forward_ref_binop ! { impl Sub , sub for $t , $t })*)}
+macro_rules! __ra_macro_fixture35 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Mul for $t { type Output = $t ; # [ inline ]# [ rustc_inherit_overflow_checks ] fn mul ( self , other : $t )-> $t { self * other }} forward_ref_binop ! { impl Mul , mul for $t , $t })*)}
+macro_rules! __ra_macro_fixture36 {($($t : ty )*)=>($(# [ doc = " This operation rounds towards zero, truncating any" ]# [ doc = " fractional part of the exact result." ]# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Div for $t { type Output = $t ; # [ inline ] fn div ( self , other : $t )-> $t { self / other }} forward_ref_binop ! { impl Div , div for $t , $t })*)}
+macro_rules! __ra_macro_fixture37 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Div for $t { type Output = $t ; # [ inline ] fn div ( self , other : $t )-> $t { self / other }} forward_ref_binop ! { impl Div , div for $t , $t })*)}
+macro_rules! __ra_macro_fixture38 {($($t : ty )*)=>($(# [ doc = " This operation satisfies `n % d == n - (n / d) * d`. The" ]# [ doc = " result has the same sign as the left operand." ]# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Rem for $t { type Output = $t ; # [ inline ] fn rem ( self , other : $t )-> $t { self % other }} forward_ref_binop ! { impl Rem , rem for $t , $t })*)}
+macro_rules! __ra_macro_fixture39 {($($t : ty )*)=>($(# [ doc = " The remainder from the division of two floats." ]# [ doc = "" ]# [ doc = " The remainder has the same sign as the dividend and is computed as:" ]# [ doc = " `x - (x / y).trunc() * y`." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = " ```" ]# [ doc = " let x: f32 = 50.50;" ]# [ doc = " let y: f32 = 8.125;" ]# [ doc = " let remainder = x - (x / y).trunc() * y;" ]# [ doc = "" ]# [ doc = " // The answer to both operations is 1.75" ]# [ doc = " assert_eq!(x % y, remainder);" ]# [ doc = " ```" ]# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Rem for $t { type Output = $t ; # [ inline ] fn rem ( self , other : $t )-> $t { self % other }} forward_ref_binop ! { impl Rem , rem for $t , $t })*)}
+macro_rules! __ra_macro_fixture40 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Neg for $t { type Output = $t ; # [ inline ]# [ rustc_inherit_overflow_checks ] fn neg ( self )-> $t {- self }} forward_ref_unop ! { impl Neg , neg for $t })*)}
+macro_rules! __ra_macro_fixture41 {($($t : ty )+)=>($(# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl AddAssign for $t {# [ inline ]# [ rustc_inherit_overflow_checks ] fn add_assign (& mut self , other : $t ){* self += other }} forward_ref_op_assign ! { impl AddAssign , add_assign for $t , $t })+)}
+macro_rules! __ra_macro_fixture42 {($($t : ty )+)=>($(# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl SubAssign for $t {# [ inline ]# [ rustc_inherit_overflow_checks ] fn sub_assign (& mut self , other : $t ){* self -= other }} forward_ref_op_assign ! { impl SubAssign , sub_assign for $t , $t })+)}
+macro_rules! __ra_macro_fixture43 {($($t : ty )+)=>($(# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl MulAssign for $t {# [ inline ]# [ rustc_inherit_overflow_checks ] fn mul_assign (& mut self , other : $t ){* self *= other }} forward_ref_op_assign ! { impl MulAssign , mul_assign for $t , $t })+)}
+macro_rules! __ra_macro_fixture44 {($($t : ty )+)=>($(# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl DivAssign for $t {# [ inline ] fn div_assign (& mut self , other : $t ){* self /= other }} forward_ref_op_assign ! { impl DivAssign , div_assign for $t , $t })+)}
+macro_rules! __ra_macro_fixture45 {($($t : ty )+)=>($(# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl RemAssign for $t {# [ inline ] fn rem_assign (& mut self , other : $t ){* self %= other }} forward_ref_op_assign ! { impl RemAssign , rem_assign for $t , $t })+)}
+macro_rules! __ra_macro_fixture46 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Not for $t { type Output = $t ; # [ inline ] fn not ( self )-> $t {! self }} forward_ref_unop ! { impl Not , not for $t })*)}
+macro_rules! __ra_macro_fixture47 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl BitAnd for $t { type Output = $t ; # [ inline ] fn bitand ( self , rhs : $t )-> $t { self & rhs }} forward_ref_binop ! { impl BitAnd , bitand for $t , $t })*)}
+macro_rules! __ra_macro_fixture48 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl BitOr for $t { type Output = $t ; # [ inline ] fn bitor ( self , rhs : $t )-> $t { self | rhs }} forward_ref_binop ! { impl BitOr , bitor for $t , $t })*)}
+macro_rules! __ra_macro_fixture49 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl BitXor for $t { type Output = $t ; # [ inline ] fn bitxor ( self , other : $t )-> $t { self ^ other }} forward_ref_binop ! { impl BitXor , bitxor for $t , $t })*)}
+macro_rules! __ra_macro_fixture50 {($($t : ty )*)=>($(shl_impl ! {$t , u8 } shl_impl ! {$t , u16 } shl_impl ! {$t , u32 } shl_impl ! {$t , u64 } shl_impl ! {$t , u128 } shl_impl ! {$t , usize } shl_impl ! {$t , i8 } shl_impl ! {$t , i16 } shl_impl ! {$t , i32 } shl_impl ! {$t , i64 } shl_impl ! {$t , i128 } shl_impl ! {$t , isize })*)}
+macro_rules! __ra_macro_fixture51 {($($t : ty )*)=>($(shr_impl ! {$t , u8 } shr_impl ! {$t , u16 } shr_impl ! {$t , u32 } shr_impl ! {$t , u64 } shr_impl ! {$t , u128 } shr_impl ! {$t , usize } shr_impl ! {$t , i8 } shr_impl ! {$t , i16 } shr_impl ! {$t , i32 } shr_impl ! {$t , i64 } shr_impl ! {$t , i128 } shr_impl ! {$t , isize })*)}
+macro_rules! __ra_macro_fixture52 {($($t : ty )+)=>($(# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl BitAndAssign for $t {# [ inline ] fn bitand_assign (& mut self , other : $t ){* self &= other }} forward_ref_op_assign ! { impl BitAndAssign , bitand_assign for $t , $t })+)}
+macro_rules! __ra_macro_fixture53 {($($t : ty )+)=>($(# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl BitOrAssign for $t {# [ inline ] fn bitor_assign (& mut self , other : $t ){* self |= other }} forward_ref_op_assign ! { impl BitOrAssign , bitor_assign for $t , $t })+)}
+macro_rules! __ra_macro_fixture54 {($($t : ty )+)=>($(# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl BitXorAssign for $t {# [ inline ] fn bitxor_assign (& mut self , other : $t ){* self ^= other }} forward_ref_op_assign ! { impl BitXorAssign , bitxor_assign for $t , $t })+)}
+macro_rules! __ra_macro_fixture55 {($($t : ty )*)=>($(shl_assign_impl ! {$t , u8 } shl_assign_impl ! {$t , u16 } shl_assign_impl ! {$t , u32 } shl_assign_impl ! {$t , u64 } shl_assign_impl ! {$t , u128 } shl_assign_impl ! {$t , usize } shl_assign_impl ! {$t , i8 } shl_assign_impl ! {$t , i16 } shl_assign_impl ! {$t , i32 } shl_assign_impl ! {$t , i64 } shl_assign_impl ! {$t , i128 } shl_assign_impl ! {$t , isize })*)}
+macro_rules! __ra_macro_fixture56 {($($t : ty )*)=>($(shr_assign_impl ! {$t , u8 } shr_assign_impl ! {$t , u16 } shr_assign_impl ! {$t , u32 } shr_assign_impl ! {$t , u64 } shr_assign_impl ! {$t , u128 } shr_assign_impl ! {$t , usize } shr_assign_impl ! {$t , i8 } shr_assign_impl ! {$t , i16 } shr_assign_impl ! {$t , i32 } shr_assign_impl ! {$t , i64 } shr_assign_impl ! {$t , i128 } shr_assign_impl ! {$t , isize })*)}
+macro_rules! __ra_macro_fixture57 {{$n : expr , $t : ident $($ts : ident )*}=>{# [ stable ( since = "1.4.0" , feature = "array_default" )] impl < T > Default for [ T ; $n ] where T : Default { fn default ()-> [ T ; $n ]{[$t :: default (), $($ts :: default ()),*]}} array_impl_default ! {($n - 1 ), $($ts )*}}; {$n : expr ,}=>{# [ stable ( since = "1.4.0" , feature = "array_default" )] impl < T > Default for [ T ; $n ]{ fn default ()-> [ T ; $n ]{[]}}}; }
+macro_rules! __ra_macro_fixture58 {($($t : ty ),+)=>{$(# [ unstable ( feature = "c_variadic" , reason = "the `c_variadic` feature has not been properly tested on \\n all supported platforms" , issue = "44930" )] impl sealed_trait :: VaArgSafe for $t {})+ }}
+macro_rules! __ra_macro_fixture59 {{ narrower than or same width as usize : $([$u_narrower : ident $i_narrower : ident ]),+; wider than usize : $([$u_wider : ident $i_wider : ident ]),+; }=>{$(# [ allow ( unreachable_patterns )]# [ unstable ( feature = "step_trait" , reason = "recently redesigned" , issue = "42168" )] unsafe impl Step for $u_narrower { step_identical_methods ! (); # [ inline ] fn steps_between ( start : & Self , end : & Self )-> Option < usize > { if * start <= * end { Some ((* end - * start ) as usize )} else { None }}# [ inline ] fn forward_checked ( start : Self , n : usize )-> Option < Self > { match Self :: try_from ( n ){ Ok ( n )=> start . checked_add ( n ), Err (_)=> None , }}# [ inline ] fn backward_checked ( start : Self , n : usize )-> Option < Self > { match Self :: try_from ( n ){ Ok ( n )=> start . checked_sub ( n ), Err (_)=> None , }}}# [ allow ( unreachable_patterns )]# [ unstable ( feature = "step_trait" , reason = "recently redesigned" , issue = "42168" )] unsafe impl Step for $i_narrower { step_identical_methods ! (); # [ inline ] fn steps_between ( start : & Self , end : & Self )-> Option < usize > { if * start <= * end { Some ((* end as isize ). wrapping_sub (* start as isize ) as usize )} else { None }}# [ inline ] fn forward_checked ( start : Self , n : usize )-> Option < Self > { match $u_narrower :: try_from ( n ){ Ok ( n )=>{ let wrapped = start . wrapping_add ( n as Self ); if wrapped >= start { Some ( wrapped )} else { None }} Err (_)=> None , }}# [ inline ] fn backward_checked ( start : Self , n : usize )-> Option < Self > { match $u_narrower :: try_from ( n ){ Ok ( n )=>{ let wrapped = start . wrapping_sub ( n as Self ); if wrapped <= start { Some ( wrapped )} else { None }} Err (_)=> None , }}})+ $(# [ allow ( unreachable_patterns )]# [ unstable ( feature = "step_trait" , reason = "recently redesigned" , issue = "42168" )] unsafe impl Step for $u_wider { step_identical_methods ! (); # [ inline ] fn steps_between ( start : & Self , end : & Self )-> Option < usize > { if * start <= * end { usize :: try_from (* end - * start ). ok ()} else { None }}# [ inline ] fn forward_checked ( start : Self , n : usize )-> Option < Self > { start . checked_add ( n as Self )}# [ inline ] fn backward_checked ( start : Self , n : usize )-> Option < Self > { start . checked_sub ( n as Self )}}# [ allow ( unreachable_patterns )]# [ unstable ( feature = "step_trait" , reason = "recently redesigned" , issue = "42168" )] unsafe impl Step for $i_wider { step_identical_methods ! (); # [ inline ] fn steps_between ( start : & Self , end : & Self )-> Option < usize > { if * start <= * end { match end . checked_sub (* start ){ Some ( result )=> usize :: try_from ( result ). ok (), None => None , }} else { None }}# [ inline ] fn forward_checked ( start : Self , n : usize )-> Option < Self > { start . checked_add ( n as Self )}# [ inline ] fn backward_checked ( start : Self , n : usize )-> Option < Self > { start . checked_sub ( n as Self )}})+ }; }
+macro_rules! __ra_macro_fixture60 {($($t : ty )*)=>($(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl ExactSizeIterator for ops :: Range <$t > {})*)}
+macro_rules! __ra_macro_fixture61 {($($t : ty )*)=>($(# [ stable ( feature = "inclusive_range" , since = "1.26.0" )] impl ExactSizeIterator for ops :: RangeInclusive <$t > {})*)}
+macro_rules! __ra_macro_fixture62 {(@ impls $zero : expr , $one : expr , # [$attr : meta ], $($a : ty )*)=>($(# [$attr ] impl Sum for $a { fn sum < I : Iterator < Item = Self >> ( iter : I )-> Self { iter . fold ($zero , Add :: add )}}# [$attr ] impl Product for $a { fn product < I : Iterator < Item = Self >> ( iter : I )-> Self { iter . fold ($one , Mul :: mul )}}# [$attr ] impl < 'a > Sum <& 'a $a > for $a { fn sum < I : Iterator < Item =& 'a Self >> ( iter : I )-> Self { iter . fold ($zero , Add :: add )}}# [$attr ] impl < 'a > Product <& 'a $a > for $a { fn product < I : Iterator < Item =& 'a Self >> ( iter : I )-> Self { iter . fold ($one , Mul :: mul )}})*); ($($a : ty )*)=>( integer_sum_product ! (@ impls 0 , 1 , # [ stable ( feature = "iter_arith_traits" , since = "1.12.0" )], $($a )*); integer_sum_product ! (@ impls Wrapping ( 0 ), Wrapping ( 1 ), # [ stable ( feature = "wrapping_iter_arith" , since = "1.14.0" )], $(Wrapping <$a >)*); ); }
+macro_rules! __ra_macro_fixture63 {($($a : ident )*)=>($(# [ stable ( feature = "iter_arith_traits" , since = "1.12.0" )] impl Sum for $a { fn sum < I : Iterator < Item = Self >> ( iter : I )-> Self { iter . fold ( 0.0 , Add :: add )}}# [ stable ( feature = "iter_arith_traits" , since = "1.12.0" )] impl Product for $a { fn product < I : Iterator < Item = Self >> ( iter : I )-> Self { iter . fold ( 1.0 , Mul :: mul )}}# [ stable ( feature = "iter_arith_traits" , since = "1.12.0" )] impl < 'a > Sum <& 'a $a > for $a { fn sum < I : Iterator < Item =& 'a Self >> ( iter : I )-> Self { iter . fold ( 0.0 , Add :: add )}}# [ stable ( feature = "iter_arith_traits" , since = "1.12.0" )] impl < 'a > Product <& 'a $a > for $a { fn product < I : Iterator < Item =& 'a Self >> ( iter : I )-> Self { iter . fold ( 1.0 , Mul :: mul )}})*)}
+macro_rules! __ra_macro_fixture64 {($cfg_cas : meta , $cfg_align : meta , $stable : meta , $stable_cxchg : meta , $stable_debug : meta , $stable_access : meta , $stable_from : meta , $stable_nand : meta , $const_stable : meta , $stable_init_const : meta , $s_int_type : literal , $int_ref : expr , $extra_feature : expr , $min_fn : ident , $max_fn : ident , $align : expr , $atomic_new : expr , $int_type : ident $atomic_type : ident $atomic_init : ident )=>{# [ doc = " An integer type which can be safely shared between threads." ]# [ doc = "" ]# [ doc = " This type has the same in-memory representation as the underlying" ]# [ doc = " integer type, [`" ]# [ doc = $s_int_type ]# [ doc = " `](" ]# [ doc = $int_ref ]# [ doc = " ). For more about the differences between atomic types and" ]# [ doc = " non-atomic types as well as information about the portability of" ]# [ doc = " this type, please see the [module-level documentation]." ]# [ doc = "" ]# [ doc = " **Note:** This type is only available on platforms that support" ]# [ doc = " atomic loads and stores of [`" ]# [ doc = $s_int_type ]# [ doc = " `](" ]# [ doc = $int_ref ]# [ doc = " )." ]# [ doc = "" ]# [ doc = " [module-level documentation]: crate::sync::atomic" ]# [$stable ]# [ repr ( C , align ($align ))] pub struct $atomic_type { v : UnsafeCell <$int_type >, }# [ doc = " An atomic integer initialized to `0`." ]# [$stable_init_const ]# [ rustc_deprecated ( since = "1.34.0" , reason = "the `new` function is now preferred" , suggestion = $atomic_new , )] pub const $atomic_init : $atomic_type = $atomic_type :: new ( 0 ); # [$stable ] impl Default for $atomic_type {# [ inline ] fn default ()-> Self { Self :: new ( Default :: default ())}}# [$stable_from ] impl From <$int_type > for $atomic_type { doc_comment ! { concat ! ( "Converts an `" , stringify ! ($int_type ), "` into an `" , stringify ! ($atomic_type ), "`." ), # [ inline ] fn from ( v : $int_type )-> Self { Self :: new ( v )}}}# [$stable_debug ] impl fmt :: Debug for $atomic_type { fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { fmt :: Debug :: fmt (& self . load ( Ordering :: SeqCst ), f )}}# [$stable ] unsafe impl Sync for $atomic_type {} impl $atomic_type { doc_comment ! { concat ! ( "Creates a new atomic integer.\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::" , stringify ! ($atomic_type ), ";\n\nlet atomic_forty_two = " , stringify ! ($atomic_type ), "::new(42);\n```" ), # [ inline ]# [$stable ]# [$const_stable ] pub const fn new ( v : $int_type )-> Self { Self { v : UnsafeCell :: new ( v )}}} doc_comment ! { concat ! ( "Returns a mutable reference to the underlying integer.\n\nThis is safe because the mutable reference guarantees that no other threads are\nconcurrently accessing the atomic data.\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet mut some_var = " , stringify ! ($atomic_type ), "::new(10);\nassert_eq!(*some_var.get_mut(), 10);\n*some_var.get_mut() = 5;\nassert_eq!(some_var.load(Ordering::SeqCst), 5);\n```" ), # [ inline ]# [$stable_access ] pub fn get_mut (& mut self )-> & mut $int_type { self . v . get_mut ()}} doc_comment ! { concat ! ( "Get atomic access to a `&mut " , stringify ! ($int_type ), "`.\n\n" , if_not_8_bit ! {$int_type , concat ! ( "**Note:** This function is only available on targets where `" , stringify ! ($int_type ), "` has an alignment of " , $align , " bytes." )}, "\n\n# Examples\n\n```\n#![feature(atomic_from_mut)]\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet mut some_int = 123;\nlet a = " , stringify ! ($atomic_type ), "::from_mut(&mut some_int);\na.store(100, Ordering::Relaxed);\nassert_eq!(some_int, 100);\n```\n " ), # [ inline ]# [$cfg_align ]# [ unstable ( feature = "atomic_from_mut" , issue = "76314" )] pub fn from_mut ( v : & mut $int_type )-> & Self { use crate :: mem :: align_of ; let []= [(); align_of ::< Self > ()- align_of ::<$int_type > ()]; unsafe {&* ( v as * mut $int_type as * mut Self )}}} doc_comment ! { concat ! ( "Consumes the atomic and returns the contained value.\n\nThis is safe because passing `self` by value guarantees that no other threads are\nconcurrently accessing the atomic data.\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::" , stringify ! ($atomic_type ), ";\n\nlet some_var = " , stringify ! ($atomic_type ), "::new(5);\nassert_eq!(some_var.into_inner(), 5);\n```" ), # [ inline ]# [$stable_access ]# [ rustc_const_unstable ( feature = "const_cell_into_inner" , issue = "78729" )] pub const fn into_inner ( self )-> $int_type { self . v . into_inner ()}} doc_comment ! { concat ! ( "Loads a value from the atomic integer.\n\n`load` takes an [`Ordering`] argument which describes the memory ordering of this operation.\nPossible values are [`SeqCst`], [`Acquire`] and [`Relaxed`].\n\n# Panics\n\nPanics if `order` is [`Release`] or [`AcqRel`].\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet some_var = " , stringify ! ($atomic_type ), "::new(5);\n\nassert_eq!(some_var.load(Ordering::Relaxed), 5);\n```" ), # [ inline ]# [$stable ] pub fn load (& self , order : Ordering )-> $int_type { unsafe { atomic_load ( self . v . get (), order )}}} doc_comment ! { concat ! ( "Stores a value into the atomic integer.\n\n`store` takes an [`Ordering`] argument which describes the memory ordering of this operation.\n Possible values are [`SeqCst`], [`Release`] and [`Relaxed`].\n\n# Panics\n\nPanics if `order` is [`Acquire`] or [`AcqRel`].\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet some_var = " , stringify ! ($atomic_type ), "::new(5);\n\nsome_var.store(10, Ordering::Relaxed);\nassert_eq!(some_var.load(Ordering::Relaxed), 10);\n```" ), # [ inline ]# [$stable ] pub fn store (& self , val : $int_type , order : Ordering ){ unsafe { atomic_store ( self . v . get (), val , order ); }}} doc_comment ! { concat ! ( "Stores a value into the atomic integer, returning the previous value.\n\n`swap` takes an [`Ordering`] argument which describes the memory ordering\nof this operation. All ordering modes are possible. Note that using\n[`Acquire`] makes the store part of this operation [`Relaxed`], and\nusing [`Release`] makes the load part [`Relaxed`].\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet some_var = " , stringify ! ($atomic_type ), "::new(5);\n\nassert_eq!(some_var.swap(10, Ordering::Relaxed), 5);\n```" ), # [ inline ]# [$stable ]# [$cfg_cas ] pub fn swap (& self , val : $int_type , order : Ordering )-> $int_type { unsafe { atomic_swap ( self . v . get (), val , order )}}} doc_comment ! { concat ! ( "Stores a value into the atomic integer if the current value is the same as\nthe `current` value.\n\nThe return value is always the previous value. If it is equal to `current`, then the\nvalue was updated.\n\n`compare_and_swap` also takes an [`Ordering`] argument which describes the memory\nordering of this operation. Notice that even when using [`AcqRel`], the operation\nmight fail and hence just perform an `Acquire` load, but not have `Release` semantics.\nUsing [`Acquire`] makes the store part of this operation [`Relaxed`] if it\nhappens, and using [`Release`] makes the load part [`Relaxed`].\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet some_var = " , stringify ! ($atomic_type ), "::new(5);\n\nassert_eq!(some_var.compare_and_swap(5, 10, Ordering::Relaxed), 5);\nassert_eq!(some_var.load(Ordering::Relaxed), 10);\n\nassert_eq!(some_var.compare_and_swap(6, 12, Ordering::Relaxed), 10);\nassert_eq!(some_var.load(Ordering::Relaxed), 10);\n```" ), # [ inline ]# [$stable ]# [$cfg_cas ] pub fn compare_and_swap (& self , current : $int_type , new : $int_type , order : Ordering )-> $int_type { match self . compare_exchange ( current , new , order , strongest_failure_ordering ( order )){ Ok ( x )=> x , Err ( x )=> x , }}} doc_comment ! { concat ! ( "Stores a value into the atomic integer if the current value is the same as\nthe `current` value.\n\nThe return value is a result indicating whether the new value was written and\ncontaining the previous value. On success this value is guaranteed to be equal to\n`current`.\n\n`compare_exchange` takes two [`Ordering`] arguments to describe the memory\nordering of this operation. The first describes the required ordering if the\noperation succeeds while the second describes the required ordering when the\noperation fails. Using [`Acquire`] as success ordering makes the store part\nof this operation [`Relaxed`], and using [`Release`] makes the successful load\n[`Relaxed`]. The failure ordering can only be [`SeqCst`], [`Acquire`] or [`Relaxed`]\nand must be equivalent to or weaker than the success ordering.\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet some_var = " , stringify ! ($atomic_type ), "::new(5);\n\nassert_eq!(some_var.compare_exchange(5, 10,\n Ordering::Acquire,\n Ordering::Relaxed),\n Ok(5));\nassert_eq!(some_var.load(Ordering::Relaxed), 10);\n\nassert_eq!(some_var.compare_exchange(6, 12,\n Ordering::SeqCst,\n Ordering::Acquire),\n Err(10));\nassert_eq!(some_var.load(Ordering::Relaxed), 10);\n```" ), # [ inline ]# [$stable_cxchg ]# [$cfg_cas ] pub fn compare_exchange (& self , current : $int_type , new : $int_type , success : Ordering , failure : Ordering )-> Result <$int_type , $int_type > { unsafe { atomic_compare_exchange ( self . v . get (), current , new , success , failure )}}} doc_comment ! { concat ! ( "Stores a value into the atomic integer if the current value is the same as\nthe `current` value.\n\nUnlike [`" , stringify ! ($atomic_type ), "::compare_exchange`], this function is allowed to spuriously fail even\nwhen the comparison succeeds, which can result in more efficient code on some\nplatforms. The return value is a result indicating whether the new value was\nwritten and containing the previous value.\n\n`compare_exchange_weak` takes two [`Ordering`] arguments to describe the memory\nordering of this operation. The first describes the required ordering if the\noperation succeeds while the second describes the required ordering when the\noperation fails. Using [`Acquire`] as success ordering makes the store part\nof this operation [`Relaxed`], and using [`Release`] makes the successful load\n[`Relaxed`]. The failure ordering can only be [`SeqCst`], [`Acquire`] or [`Relaxed`]\nand must be equivalent to or weaker than the success ordering.\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet val = " , stringify ! ($atomic_type ), "::new(4);\n\nlet mut old = val.load(Ordering::Relaxed);\nloop {\n let new = old * 2;\n match val.compare_exchange_weak(old, new, Ordering::SeqCst, Ordering::Relaxed) {\n Ok(_) => break,\n Err(x) => old = x,\n }\n}\n```" ), # [ inline ]# [$stable_cxchg ]# [$cfg_cas ] pub fn compare_exchange_weak (& self , current : $int_type , new : $int_type , success : Ordering , failure : Ordering )-> Result <$int_type , $int_type > { unsafe { atomic_compare_exchange_weak ( self . v . get (), current , new , success , failure )}}} doc_comment ! { concat ! ( "Adds to the current value, returning the previous value.\n\nThis operation wraps around on overflow.\n\n`fetch_add` takes an [`Ordering`] argument which describes the memory ordering\nof this operation. All ordering modes are possible. Note that using\n[`Acquire`] makes the store part of this operation [`Relaxed`], and\nusing [`Release`] makes the load part [`Relaxed`].\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet foo = " , stringify ! ($atomic_type ), "::new(0);\nassert_eq!(foo.fetch_add(10, Ordering::SeqCst), 0);\nassert_eq!(foo.load(Ordering::SeqCst), 10);\n```" ), # [ inline ]# [$stable ]# [$cfg_cas ] pub fn fetch_add (& self , val : $int_type , order : Ordering )-> $int_type { unsafe { atomic_add ( self . v . get (), val , order )}}} doc_comment ! { concat ! ( "Subtracts from the current value, returning the previous value.\n\nThis operation wraps around on overflow.\n\n`fetch_sub` takes an [`Ordering`] argument which describes the memory ordering\nof this operation. All ordering modes are possible. Note that using\n[`Acquire`] makes the store part of this operation [`Relaxed`], and\nusing [`Release`] makes the load part [`Relaxed`].\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet foo = " , stringify ! ($atomic_type ), "::new(20);\nassert_eq!(foo.fetch_sub(10, Ordering::SeqCst), 20);\nassert_eq!(foo.load(Ordering::SeqCst), 10);\n```" ), # [ inline ]# [$stable ]# [$cfg_cas ] pub fn fetch_sub (& self , val : $int_type , order : Ordering )-> $int_type { unsafe { atomic_sub ( self . v . get (), val , order )}}} doc_comment ! { concat ! ( "Bitwise \"and\" with the current value.\n\nPerforms a bitwise \"and\" operation on the current value and the argument `val`, and\nsets the new value to the result.\n\nReturns the previous value.\n\n`fetch_and` takes an [`Ordering`] argument which describes the memory ordering\nof this operation. All ordering modes are possible. Note that using\n[`Acquire`] makes the store part of this operation [`Relaxed`], and\nusing [`Release`] makes the load part [`Relaxed`].\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet foo = " , stringify ! ($atomic_type ), "::new(0b101101);\nassert_eq!(foo.fetch_and(0b110011, Ordering::SeqCst), 0b101101);\nassert_eq!(foo.load(Ordering::SeqCst), 0b100001);\n```" ), # [ inline ]# [$stable ]# [$cfg_cas ] pub fn fetch_and (& self , val : $int_type , order : Ordering )-> $int_type { unsafe { atomic_and ( self . v . get (), val , order )}}} doc_comment ! { concat ! ( "Bitwise \"nand\" with the current value.\n\nPerforms a bitwise \"nand\" operation on the current value and the argument `val`, and\nsets the new value to the result.\n\nReturns the previous value.\n\n`fetch_nand` takes an [`Ordering`] argument which describes the memory ordering\nof this operation. All ordering modes are possible. Note that using\n[`Acquire`] makes the store part of this operation [`Relaxed`], and\nusing [`Release`] makes the load part [`Relaxed`].\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "\nuse std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet foo = " , stringify ! ($atomic_type ), "::new(0x13);\nassert_eq!(foo.fetch_nand(0x31, Ordering::SeqCst), 0x13);\nassert_eq!(foo.load(Ordering::SeqCst), !(0x13 & 0x31));\n```" ), # [ inline ]# [$stable_nand ]# [$cfg_cas ] pub fn fetch_nand (& self , val : $int_type , order : Ordering )-> $int_type { unsafe { atomic_nand ( self . v . get (), val , order )}}} doc_comment ! { concat ! ( "Bitwise \"or\" with the current value.\n\nPerforms a bitwise \"or\" operation on the current value and the argument `val`, and\nsets the new value to the result.\n\nReturns the previous value.\n\n`fetch_or` takes an [`Ordering`] argument which describes the memory ordering\nof this operation. All ordering modes are possible. Note that using\n[`Acquire`] makes the store part of this operation [`Relaxed`], and\nusing [`Release`] makes the load part [`Relaxed`].\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet foo = " , stringify ! ($atomic_type ), "::new(0b101101);\nassert_eq!(foo.fetch_or(0b110011, Ordering::SeqCst), 0b101101);\nassert_eq!(foo.load(Ordering::SeqCst), 0b111111);\n```" ), # [ inline ]# [$stable ]# [$cfg_cas ] pub fn fetch_or (& self , val : $int_type , order : Ordering )-> $int_type { unsafe { atomic_or ( self . v . get (), val , order )}}} doc_comment ! { concat ! ( "Bitwise \"xor\" with the current value.\n\nPerforms a bitwise \"xor\" operation on the current value and the argument `val`, and\nsets the new value to the result.\n\nReturns the previous value.\n\n`fetch_xor` takes an [`Ordering`] argument which describes the memory ordering\nof this operation. All ordering modes are possible. Note that using\n[`Acquire`] makes the store part of this operation [`Relaxed`], and\nusing [`Release`] makes the load part [`Relaxed`].\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet foo = " , stringify ! ($atomic_type ), "::new(0b101101);\nassert_eq!(foo.fetch_xor(0b110011, Ordering::SeqCst), 0b101101);\nassert_eq!(foo.load(Ordering::SeqCst), 0b011110);\n```" ), # [ inline ]# [$stable ]# [$cfg_cas ] pub fn fetch_xor (& self , val : $int_type , order : Ordering )-> $int_type { unsafe { atomic_xor ( self . v . get (), val , order )}}} doc_comment ! { concat ! ( "Fetches the value, and applies a function to it that returns an optional\nnew value. Returns a `Result` of `Ok(previous_value)` if the function returned `Some(_)`, else\n`Err(previous_value)`.\n\nNote: This may call the function multiple times if the value has been changed from other threads in\nthe meantime, as long as the function returns `Some(_)`, but the function will have been applied\nonly once to the stored value.\n\n`fetch_update` takes two [`Ordering`] arguments to describe the memory ordering of this operation.\nThe first describes the required ordering for when the operation finally succeeds while the second\ndescribes the required ordering for loads. These correspond to the success and failure orderings of\n[`" , stringify ! ($atomic_type ), "::compare_exchange`] respectively.\n\nUsing [`Acquire`] as success ordering makes the store part\nof this operation [`Relaxed`], and using [`Release`] makes the final successful load\n[`Relaxed`]. The (failed) load ordering can only be [`SeqCst`], [`Acquire`] or [`Relaxed`]\nand must be equivalent to or weaker than the success ordering.\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```rust\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet x = " , stringify ! ($atomic_type ), "::new(7);\nassert_eq!(x.fetch_update(Ordering::SeqCst, Ordering::SeqCst, |_| None), Err(7));\nassert_eq!(x.fetch_update(Ordering::SeqCst, Ordering::SeqCst, |x| Some(x + 1)), Ok(7));\nassert_eq!(x.fetch_update(Ordering::SeqCst, Ordering::SeqCst, |x| Some(x + 1)), Ok(8));\nassert_eq!(x.load(Ordering::SeqCst), 9);\n```" ), # [ inline ]# [ stable ( feature = "no_more_cas" , since = "1.45.0" )]# [$cfg_cas ] pub fn fetch_update < F > (& self , set_order : Ordering , fetch_order : Ordering , mut f : F )-> Result <$int_type , $int_type > where F : FnMut ($int_type )-> Option <$int_type > { let mut prev = self . load ( fetch_order ); while let Some ( next )= f ( prev ){ match self . compare_exchange_weak ( prev , next , set_order , fetch_order ){ x @ Ok (_)=> return x , Err ( next_prev )=> prev = next_prev }} Err ( prev )}} doc_comment ! { concat ! ( "Maximum with the current value.\n\nFinds the maximum of the current value and the argument `val`, and\nsets the new value to the result.\n\nReturns the previous value.\n\n`fetch_max` takes an [`Ordering`] argument which describes the memory ordering\nof this operation. All ordering modes are possible. Note that using\n[`Acquire`] makes the store part of this operation [`Relaxed`], and\nusing [`Release`] makes the load part [`Relaxed`].\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet foo = " , stringify ! ($atomic_type ), "::new(23);\nassert_eq!(foo.fetch_max(42, Ordering::SeqCst), 23);\nassert_eq!(foo.load(Ordering::SeqCst), 42);\n```\n\nIf you want to obtain the maximum value in one step, you can use the following:\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet foo = " , stringify ! ($atomic_type ), "::new(23);\nlet bar = 42;\nlet max_foo = foo.fetch_max(bar, Ordering::SeqCst).max(bar);\nassert!(max_foo == 42);\n```" ), # [ inline ]# [ stable ( feature = "atomic_min_max" , since = "1.45.0" )]# [$cfg_cas ] pub fn fetch_max (& self , val : $int_type , order : Ordering )-> $int_type { unsafe {$max_fn ( self . v . get (), val , order )}}} doc_comment ! { concat ! ( "Minimum with the current value.\n\nFinds the minimum of the current value and the argument `val`, and\nsets the new value to the result.\n\nReturns the previous value.\n\n`fetch_min` takes an [`Ordering`] argument which describes the memory ordering\nof this operation. All ordering modes are possible. Note that using\n[`Acquire`] makes the store part of this operation [`Relaxed`], and\nusing [`Release`] makes the load part [`Relaxed`].\n\n**Note**: This method is only available on platforms that support atomic\noperations on [`" , $s_int_type , "`](" , $int_ref , ").\n\n# Examples\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet foo = " , stringify ! ($atomic_type ), "::new(23);\nassert_eq!(foo.fetch_min(42, Ordering::Relaxed), 23);\nassert_eq!(foo.load(Ordering::Relaxed), 23);\nassert_eq!(foo.fetch_min(22, Ordering::Relaxed), 23);\nassert_eq!(foo.load(Ordering::Relaxed), 22);\n```\n\nIf you want to obtain the minimum value in one step, you can use the following:\n\n```\n" , $extra_feature , "use std::sync::atomic::{" , stringify ! ($atomic_type ), ", Ordering};\n\nlet foo = " , stringify ! ($atomic_type ), "::new(23);\nlet bar = 12;\nlet min_foo = foo.fetch_min(bar, Ordering::SeqCst).min(bar);\nassert_eq!(min_foo, 12);\n```" ), # [ inline ]# [ stable ( feature = "atomic_min_max" , since = "1.45.0" )]# [$cfg_cas ] pub fn fetch_min (& self , val : $int_type , order : Ordering )-> $int_type { unsafe {$min_fn ( self . v . get (), val , order )}}} doc_comment ! { concat ! ( "Returns a mutable pointer to the underlying integer.\n\nDoing non-atomic reads and writes on the resulting integer can be a data race.\nThis method is mostly useful for FFI, where the function signature may use\n`*mut " , stringify ! ($int_type ), "` instead of `&" , stringify ! ($atomic_type ), "`.\n\nReturning an `*mut` pointer from a shared reference to this atomic is safe because the\natomic types work with interior mutability. All modifications of an atomic change the value\nthrough a shared reference, and can do so safely as long as they use atomic operations. Any\nuse of the returned raw pointer requires an `unsafe` block and still has to uphold the same\nrestriction: operations on it must be atomic.\n\n# Examples\n\n```ignore (extern-declaration)\n# fn main() {\n" , $extra_feature , "use std::sync::atomic::" , stringify ! ($atomic_type ), ";\n\nextern {\n fn my_atomic_op(arg: *mut " , stringify ! ($int_type ), ");\n}\n\nlet mut atomic = " , stringify ! ($atomic_type ), "::new(1);\n" , "unsafe {\n my_atomic_op(atomic.as_mut_ptr());\n}\n# }\n```" ), # [ inline ]# [ unstable ( feature = "atomic_mut_ptr" , reason = "recently added" , issue = "66893" )] pub fn as_mut_ptr (& self )-> * mut $int_type { self . v . get ()}}}}}
+macro_rules! __ra_macro_fixture65 {($($target_pointer_width : literal $align : literal )* )=>{$(# [ cfg ( target_has_atomic_load_store = "ptr" )]# [ cfg ( target_pointer_width = $target_pointer_width )] atomic_int ! { cfg ( target_has_atomic = "ptr" ), cfg ( target_has_atomic_equal_alignment = "ptr" ), stable ( feature = "rust1" , since = "1.0.0" ), stable ( feature = "extended_compare_and_swap" , since = "1.10.0" ), stable ( feature = "atomic_debug" , since = "1.3.0" ), stable ( feature = "atomic_access" , since = "1.15.0" ), stable ( feature = "atomic_from" , since = "1.23.0" ), stable ( feature = "atomic_nand" , since = "1.27.0" ), rustc_const_stable ( feature = "const_integer_atomics" , since = "1.34.0" ), stable ( feature = "rust1" , since = "1.0.0" ), "isize" , "../../../std/primitive.isize.html" , "" , atomic_min , atomic_max , $align , "AtomicIsize::new(0)" , isize AtomicIsize ATOMIC_ISIZE_INIT }# [ cfg ( target_has_atomic_load_store = "ptr" )]# [ cfg ( target_pointer_width = $target_pointer_width )] atomic_int ! { cfg ( target_has_atomic = "ptr" ), cfg ( target_has_atomic_equal_alignment = "ptr" ), stable ( feature = "rust1" , since = "1.0.0" ), stable ( feature = "extended_compare_and_swap" , since = "1.10.0" ), stable ( feature = "atomic_debug" , since = "1.3.0" ), stable ( feature = "atomic_access" , since = "1.15.0" ), stable ( feature = "atomic_from" , since = "1.23.0" ), stable ( feature = "atomic_nand" , since = "1.27.0" ), rustc_const_stable ( feature = "const_integer_atomics" , since = "1.34.0" ), stable ( feature = "rust1" , since = "1.0.0" ), "usize" , "../../../std/primitive.usize.html" , "" , atomic_umin , atomic_umax , $align , "AtomicUsize::new(0)" , usize AtomicUsize ATOMIC_USIZE_INIT })* }; }
+macro_rules! __ra_macro_fixture66 {($ty : ident )=>{# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Debug for $ty { fn fmt (& self , fmt : & mut Formatter < '_ >)-> Result { float_to_decimal_common ( fmt , self , true , 1 )}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Display for $ty { fn fmt (& self , fmt : & mut Formatter < '_ >)-> Result { float_to_decimal_common ( fmt , self , false , 0 )}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl LowerExp for $ty { fn fmt (& self , fmt : & mut Formatter < '_ >)-> Result { float_to_exponential_common ( fmt , self , false )}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl UpperExp for $ty { fn fmt (& self , fmt : & mut Formatter < '_ >)-> Result { float_to_exponential_common ( fmt , self , true )}}}; }
+macro_rules! __ra_macro_fixture67 {($($t : ident )*)=>($(impl DisplayInt for $t { fn zero ()-> Self { 0 } fn from_u8 ( u : u8 )-> Self { u as Self } fn to_u8 (& self )-> u8 {* self as u8 } fn to_u16 (& self )-> u16 {* self as u16 } fn to_u32 (& self )-> u32 {* self as u32 } fn to_u64 (& self )-> u64 {* self as u64 } fn to_u128 (& self )-> u128 {* self as u128 }})* )}
+macro_rules! __ra_macro_fixture68 {($($t : ident )*)=>($(impl DisplayInt for $t { fn zero ()-> Self { 0 } fn from_u8 ( u : u8 )-> Self { u as Self } fn to_u8 (& self )-> u8 {* self as u8 } fn to_u16 (& self )-> u16 {* self as u16 } fn to_u32 (& self )-> u32 {* self as u32 } fn to_u64 (& self )-> u64 {* self as u64 } fn to_u128 (& self )-> u128 {* self as u128 }})* )}
+macro_rules! __ra_macro_fixture69 {($T : ident , $base : expr , $prefix : expr , $($x : pat =>$conv : expr ),+)=>{ impl GenericRadix for $T { const BASE : u8 = $base ; const PREFIX : & 'static str = $prefix ; fn digit ( x : u8 )-> u8 { match x {$($x =>$conv ,)+ x => panic ! ( "number not in the range 0..={}: {}" , Self :: BASE - 1 , x ), }}}}}
+macro_rules! __ra_macro_fixture70 {($Int : ident , $Uint : ident )=>{ int_base ! { fmt :: Binary for $Int as $Uint -> Binary } int_base ! { fmt :: Octal for $Int as $Uint -> Octal } int_base ! { fmt :: LowerHex for $Int as $Uint -> LowerHex } int_base ! { fmt :: UpperHex for $Int as $Uint -> UpperHex } int_base ! { fmt :: Binary for $Uint as $Uint -> Binary } int_base ! { fmt :: Octal for $Uint as $Uint -> Octal } int_base ! { fmt :: LowerHex for $Uint as $Uint -> LowerHex } int_base ! { fmt :: UpperHex for $Uint as $Uint -> UpperHex }}; }
+macro_rules! __ra_macro_fixture71 {($($T : ident )*)=>{$(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl fmt :: Debug for $T {# [ inline ] fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { if f . debug_lower_hex (){ fmt :: LowerHex :: fmt ( self , f )} else if f . debug_upper_hex (){ fmt :: UpperHex :: fmt ( self , f )} else { fmt :: Display :: fmt ( self , f )}}})*}; }
+macro_rules! __ra_macro_fixture72 {($($t : ident ),* as $u : ident via $conv_fn : ident named $name : ident )=>{ fn $name ( mut n : $u , is_nonnegative : bool , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { let mut buf = [ MaybeUninit ::< u8 >:: uninit (); 39 ]; let mut curr = buf . len () as isize ; let buf_ptr = MaybeUninit :: slice_as_mut_ptr (& mut buf ); let lut_ptr = DEC_DIGITS_LUT . as_ptr (); unsafe { assert ! ( crate :: mem :: size_of ::<$u > ()>= 2 ); while n >= 10000 { let rem = ( n % 10000 ) as isize ; n /= 10000 ; let d1 = ( rem / 100 )<< 1 ; let d2 = ( rem % 100 )<< 1 ; curr -= 4 ; ptr :: copy_nonoverlapping ( lut_ptr . offset ( d1 ), buf_ptr . offset ( curr ), 2 ); ptr :: copy_nonoverlapping ( lut_ptr . offset ( d2 ), buf_ptr . offset ( curr + 2 ), 2 ); } let mut n = n as isize ; if n >= 100 { let d1 = ( n % 100 )<< 1 ; n /= 100 ; curr -= 2 ; ptr :: copy_nonoverlapping ( lut_ptr . offset ( d1 ), buf_ptr . offset ( curr ), 2 ); } if n < 10 { curr -= 1 ; * buf_ptr . offset ( curr )= ( n as u8 )+ b'0' ; } else { let d1 = n << 1 ; curr -= 2 ; ptr :: copy_nonoverlapping ( lut_ptr . offset ( d1 ), buf_ptr . offset ( curr ), 2 ); }} let buf_slice = unsafe { str :: from_utf8_unchecked ( slice :: from_raw_parts ( buf_ptr . offset ( curr ), buf . len ()- curr as usize ))}; f . pad_integral ( is_nonnegative , "" , buf_slice )}$(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl fmt :: Display for $t {# [ allow ( unused_comparisons )] fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { let is_nonnegative = * self >= 0 ; let n = if is_nonnegative { self .$conv_fn ()} else {(! self .$conv_fn ()). wrapping_add ( 1 )}; $name ( n , is_nonnegative , f )}})* }; }
+macro_rules! __ra_macro_fixture73 {($($t : ident ),* as $u : ident via $conv_fn : ident named $name : ident )=>{ fn $name ( mut n : $u , is_nonnegative : bool , upper : bool , f : & mut fmt :: Formatter < '_ > )-> fmt :: Result { let ( mut n , mut exponent , trailing_zeros , added_precision )= { let mut exponent = 0 ; while n % 10 == 0 && n >= 10 { n /= 10 ; exponent += 1 ; } let trailing_zeros = exponent ; let ( added_precision , subtracted_precision )= match f . precision (){ Some ( fmt_prec )=>{ let mut tmp = n ; let mut prec = 0 ; while tmp >= 10 { tmp /= 10 ; prec += 1 ; }( fmt_prec . saturating_sub ( prec ), prec . saturating_sub ( fmt_prec ))} None =>( 0 , 0 )}; for _ in 1 .. subtracted_precision { n /= 10 ; exponent += 1 ; } if subtracted_precision != 0 { let rem = n % 10 ; n /= 10 ; exponent += 1 ; if rem >= 5 { n += 1 ; }}( n , exponent , trailing_zeros , added_precision )}; let mut buf = [ MaybeUninit ::< u8 >:: uninit (); 40 ]; let mut curr = buf . len () as isize ; let buf_ptr = MaybeUninit :: slice_as_mut_ptr (& mut buf ); let lut_ptr = DEC_DIGITS_LUT . as_ptr (); while n >= 100 { let d1 = (( n % 100 ) as isize )<< 1 ; curr -= 2 ; unsafe { ptr :: copy_nonoverlapping ( lut_ptr . offset ( d1 ), buf_ptr . offset ( curr ), 2 ); } n /= 100 ; exponent += 2 ; } let mut n = n as isize ; if n >= 10 { curr -= 1 ; unsafe {* buf_ptr . offset ( curr )= ( n as u8 % 10_u8 )+ b'0' ; } n /= 10 ; exponent += 1 ; } if exponent != trailing_zeros || added_precision != 0 { curr -= 1 ; unsafe {* buf_ptr . offset ( curr )= b'.' ; }} let buf_slice = unsafe { curr -= 1 ; * buf_ptr . offset ( curr )= ( n as u8 )+ b'0' ; let len = buf . len ()- curr as usize ; slice :: from_raw_parts ( buf_ptr . offset ( curr ), len )}; let mut exp_buf = [ MaybeUninit ::< u8 >:: uninit (); 3 ]; let exp_ptr = MaybeUninit :: slice_as_mut_ptr (& mut exp_buf ); let exp_slice = unsafe {* exp_ptr . offset ( 0 )= if upper { b'E' } else { b'e' }; let len = if exponent < 10 {* exp_ptr . offset ( 1 )= ( exponent as u8 )+ b'0' ; 2 } else { let off = exponent << 1 ; ptr :: copy_nonoverlapping ( lut_ptr . offset ( off ), exp_ptr . offset ( 1 ), 2 ); 3 }; slice :: from_raw_parts ( exp_ptr , len )}; let parts = & [ flt2dec :: Part :: Copy ( buf_slice ), flt2dec :: Part :: Zero ( added_precision ), flt2dec :: Part :: Copy ( exp_slice )]; let sign = if ! is_nonnegative { "-" } else if f . sign_plus (){ "+" } else { "" }; let formatted = flt2dec :: Formatted { sign , parts }; f . pad_formatted_parts (& formatted )}$(# [ stable ( feature = "integer_exp_format" , since = "1.42.0" )] impl fmt :: LowerExp for $t {# [ allow ( unused_comparisons )] fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { let is_nonnegative = * self >= 0 ; let n = if is_nonnegative { self .$conv_fn ()} else {(! self .$conv_fn ()). wrapping_add ( 1 )}; $name ( n , is_nonnegative , false , f )}})* $(# [ stable ( feature = "integer_exp_format" , since = "1.42.0" )] impl fmt :: UpperExp for $t {# [ allow ( unused_comparisons )] fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { let is_nonnegative = * self >= 0 ; let n = if is_nonnegative { self .$conv_fn ()} else {(! self .$conv_fn ()). wrapping_add ( 1 )}; $name ( n , is_nonnegative , true , f )}})* }; }
+macro_rules! __ra_macro_fixture74 {($($tr : ident ),*)=>{$(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T : ? Sized + $tr > $tr for & T { fn fmt (& self , f : & mut Formatter < '_ >)-> Result {$tr :: fmt (&** self , f )}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T : ? Sized + $tr > $tr for & mut T { fn fmt (& self , f : & mut Formatter < '_ >)-> Result {$tr :: fmt (&** self , f )}})* }}
+macro_rules! __ra_macro_fixture75 {()=>(); ($($name : ident ,)+ )=>(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl <$($name : Debug ),+> Debug for ($($name ,)+) where last_type ! ($($name ,)+): ? Sized {# [ allow ( non_snake_case , unused_assignments )] fn fmt (& self , f : & mut Formatter < '_ >)-> Result { let mut builder = f . debug_tuple ( "" ); let ($(ref $name ,)+)= * self ; $(builder . field (&$name ); )+ builder . finish ()}} peel ! {$($name ,)+ })}
+macro_rules! __ra_macro_fixture76 {($(($ty : ident , $meth : ident ),)*)=>{$(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Hash for $ty { fn hash < H : Hasher > (& self , state : & mut H ){ state .$meth (* self )} fn hash_slice < H : Hasher > ( data : & [$ty ], state : & mut H ){ let newlen = data . len ()* mem :: size_of ::<$ty > (); let ptr = data . as_ptr () as * const u8 ; state . write ( unsafe { slice :: from_raw_parts ( ptr , newlen )})}})*}}
+macro_rules! __ra_macro_fixture77 {()=>(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Hash for (){ fn hash < H : Hasher > (& self , _state : & mut H ){}}); ($($name : ident )+)=>(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl <$($name : Hash ),+> Hash for ($($name ,)+) where last_type ! ($($name ,)+): ? Sized {# [ allow ( non_snake_case )] fn hash < S : Hasher > (& self , state : & mut S ){ let ($(ref $name ,)+)= * self ; $($name . hash ( state );)+ }}); }
+macro_rules! __ra_macro_fixture78 {($([$($p : tt )*]$t : ty ,)*)=>{$(impl <$($p )*> AlwaysApplicableOrd for $t {})* }}
+macro_rules! __ra_macro_fixture79 {($traitname : ident , $($ty : ty )*)=>{$(impl $traitname <$ty > for $ty {})* }}
+macro_rules! __ra_macro_fixture80 {( struct $name : ident -> $ptr : ty , $elem : ty , $raw_mut : tt , {$($mut_ : tt )?}, {$($extra : tt )*})=>{ macro_rules ! next_unchecked {($self : ident )=>{& $($mut_ )? *$self . post_inc_start ( 1 )}} macro_rules ! next_back_unchecked {($self : ident )=>{& $($mut_ )? *$self . pre_dec_end ( 1 )}} macro_rules ! zst_shrink {($self : ident , $n : ident )=>{$self . end = ($self . end as * $raw_mut u8 ). wrapping_offset (-$n ) as * $raw_mut T ; }} impl < 'a , T > $name < 'a , T > {# [ inline ( always )] fn make_slice (& self )-> & 'a [ T ]{ unsafe { from_raw_parts ( self . ptr . as_ptr (), len ! ( self ))}}# [ inline ( always )] unsafe fn post_inc_start (& mut self , offset : isize )-> * $raw_mut T { if mem :: size_of ::< T > ()== 0 { zst_shrink ! ( self , offset ); self . ptr . as_ptr ()} else { let old = self . ptr . as_ptr (); self . ptr = unsafe { NonNull :: new_unchecked ( self . ptr . as_ptr (). offset ( offset ))}; old }}# [ inline ( always )] unsafe fn pre_dec_end (& mut self , offset : isize )-> * $raw_mut T { if mem :: size_of ::< T > ()== 0 { zst_shrink ! ( self , offset ); self . ptr . as_ptr ()} else { self . end = unsafe { self . end . offset (- offset )}; self . end }}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < T > ExactSizeIterator for $name < '_ , T > {# [ inline ( always )] fn len (& self )-> usize { len ! ( self )}# [ inline ( always )] fn is_empty (& self )-> bool { is_empty ! ( self )}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < 'a , T > Iterator for $name < 'a , T > { type Item = $elem ; # [ inline ] fn next (& mut self )-> Option <$elem > { unsafe { assume (! self . ptr . as_ptr (). is_null ()); if mem :: size_of ::< T > ()!= 0 { assume (! self . end . is_null ()); } if is_empty ! ( self ){ None } else { Some ( next_unchecked ! ( self ))}}}# [ inline ] fn size_hint (& self )-> ( usize , Option < usize >){ let exact = len ! ( self ); ( exact , Some ( exact ))}# [ inline ] fn count ( self )-> usize { len ! ( self )}# [ inline ] fn nth (& mut self , n : usize )-> Option <$elem > { if n >= len ! ( self ){ if mem :: size_of ::< T > ()== 0 { self . end = self . ptr . as_ptr (); } else { unsafe { self . ptr = NonNull :: new_unchecked ( self . end as * mut T ); }} return None ; } unsafe { self . post_inc_start ( n as isize ); Some ( next_unchecked ! ( self ))}}# [ inline ] fn last ( mut self )-> Option <$elem > { self . next_back ()}# [ inline ] fn for_each < F > ( mut self , mut f : F ) where Self : Sized , F : FnMut ( Self :: Item ), { while let Some ( x )= self . next (){ f ( x ); }}# [ inline ] fn all < F > (& mut self , mut f : F )-> bool where Self : Sized , F : FnMut ( Self :: Item )-> bool , { while let Some ( x )= self . next (){ if ! f ( x ){ return false ; }} true }# [ inline ] fn any < F > (& mut self , mut f : F )-> bool where Self : Sized , F : FnMut ( Self :: Item )-> bool , { while let Some ( x )= self . next (){ if f ( x ){ return true ; }} false }# [ inline ] fn find < P > (& mut self , mut predicate : P )-> Option < Self :: Item > where Self : Sized , P : FnMut (& Self :: Item )-> bool , { while let Some ( x )= self . next (){ if predicate (& x ){ return Some ( x ); }} None }# [ inline ] fn find_map < B , F > (& mut self , mut f : F )-> Option < B > where Self : Sized , F : FnMut ( Self :: Item )-> Option < B >, { while let Some ( x )= self . next (){ if let Some ( y )= f ( x ){ return Some ( y ); }} None }# [ inline ]# [ rustc_inherit_overflow_checks ] fn position < P > (& mut self , mut predicate : P )-> Option < usize > where Self : Sized , P : FnMut ( Self :: Item )-> bool , { let n = len ! ( self ); let mut i = 0 ; while let Some ( x )= self . next (){ if predicate ( x ){ unsafe { assume ( i < n )}; return Some ( i ); } i += 1 ; } None }# [ inline ] fn rposition < P > (& mut self , mut predicate : P )-> Option < usize > where P : FnMut ( Self :: Item )-> bool , Self : Sized + ExactSizeIterator + DoubleEndedIterator { let n = len ! ( self ); let mut i = n ; while let Some ( x )= self . next_back (){ i -= 1 ; if predicate ( x ){ unsafe { assume ( i < n )}; return Some ( i ); }} None }# [ doc ( hidden )] unsafe fn __iterator_get_unchecked (& mut self , idx : usize )-> Self :: Item { unsafe {& $($mut_ )? * self . ptr . as_ptr (). add ( idx )}}$($extra )* }# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < 'a , T > DoubleEndedIterator for $name < 'a , T > {# [ inline ] fn next_back (& mut self )-> Option <$elem > { unsafe { assume (! self . ptr . as_ptr (). is_null ()); if mem :: size_of ::< T > ()!= 0 { assume (! self . end . is_null ()); } if is_empty ! ( self ){ None } else { Some ( next_back_unchecked ! ( self ))}}}# [ inline ] fn nth_back (& mut self , n : usize )-> Option <$elem > { if n >= len ! ( self ){ self . end = self . ptr . as_ptr (); return None ; } unsafe { self . pre_dec_end ( n as isize ); Some ( next_back_unchecked ! ( self ))}}}# [ stable ( feature = "fused" , since = "1.26.0" )] impl < T > FusedIterator for $name < '_ , T > {}# [ unstable ( feature = "trusted_len" , issue = "37572" )] unsafe impl < T > TrustedLen for $name < '_ , T > {}}}
+macro_rules! __ra_macro_fixture81 {($name : ident : $elem : ident , $iter_of : ty )=>{# [ stable ( feature = "rust1" , since = "1.0.0" )] impl < 'a , $elem , P > Iterator for $name < 'a , $elem , P > where P : FnMut (& T )-> bool , { type Item = $iter_of ; # [ inline ] fn next (& mut self )-> Option <$iter_of > { self . inner . next ()}# [ inline ] fn size_hint (& self )-> ( usize , Option < usize >){ self . inner . size_hint ()}}# [ stable ( feature = "fused" , since = "1.26.0" )] impl < 'a , $elem , P > FusedIterator for $name < 'a , $elem , P > where P : FnMut (& T )-> bool {}}; }
+macro_rules! __ra_macro_fixture82 {( clone $t : ident with |$s : ident | $e : expr )=>{ impl < 'a , P > Clone for $t < 'a , P > where P : Pattern < 'a , Searcher : Clone >, { fn clone (& self )-> Self { let $s = self ; $e }}}; }
+macro_rules! __ra_macro_fixture83 {{ forward : $(# [$forward_iterator_attribute : meta ])* struct $forward_iterator : ident ; reverse : $(# [$reverse_iterator_attribute : meta ])* struct $reverse_iterator : ident ; stability : $(# [$common_stability_attribute : meta ])* internal : $internal_iterator : ident yielding ($iterty : ty ); delegate $($t : tt )* }=>{$(# [$forward_iterator_attribute ])* $(# [$common_stability_attribute ])* pub struct $forward_iterator < 'a , P : Pattern < 'a >> ( pub ( super )$internal_iterator < 'a , P >); $(# [$common_stability_attribute ])* impl < 'a , P > fmt :: Debug for $forward_iterator < 'a , P > where P : Pattern < 'a , Searcher : fmt :: Debug >, { fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { f . debug_tuple ( stringify ! ($forward_iterator )). field (& self . 0 ). finish ()}}$(# [$common_stability_attribute ])* impl < 'a , P : Pattern < 'a >> Iterator for $forward_iterator < 'a , P > { type Item = $iterty ; # [ inline ] fn next (& mut self )-> Option <$iterty > { self . 0 . next ()}}$(# [$common_stability_attribute ])* impl < 'a , P > Clone for $forward_iterator < 'a , P > where P : Pattern < 'a , Searcher : Clone >, { fn clone (& self )-> Self {$forward_iterator ( self . 0 . clone ())}}$(# [$reverse_iterator_attribute ])* $(# [$common_stability_attribute ])* pub struct $reverse_iterator < 'a , P : Pattern < 'a >> ( pub ( super )$internal_iterator < 'a , P >); $(# [$common_stability_attribute ])* impl < 'a , P > fmt :: Debug for $reverse_iterator < 'a , P > where P : Pattern < 'a , Searcher : fmt :: Debug >, { fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { f . debug_tuple ( stringify ! ($reverse_iterator )). field (& self . 0 ). finish ()}}$(# [$common_stability_attribute ])* impl < 'a , P > Iterator for $reverse_iterator < 'a , P > where P : Pattern < 'a , Searcher : ReverseSearcher < 'a >>, { type Item = $iterty ; # [ inline ] fn next (& mut self )-> Option <$iterty > { self . 0 . next_back ()}}$(# [$common_stability_attribute ])* impl < 'a , P > Clone for $reverse_iterator < 'a , P > where P : Pattern < 'a , Searcher : Clone >, { fn clone (& self )-> Self {$reverse_iterator ( self . 0 . clone ())}}# [ stable ( feature = "fused" , since = "1.26.0" )] impl < 'a , P : Pattern < 'a >> FusedIterator for $forward_iterator < 'a , P > {}# [ stable ( feature = "fused" , since = "1.26.0" )] impl < 'a , P > FusedIterator for $reverse_iterator < 'a , P > where P : Pattern < 'a , Searcher : ReverseSearcher < 'a >>, {} generate_pattern_iterators ! ($($t )* with $(# [$common_stability_attribute ])*, $forward_iterator , $reverse_iterator , $iterty ); }; { double ended ; with $(# [$common_stability_attribute : meta ])*, $forward_iterator : ident , $reverse_iterator : ident , $iterty : ty }=>{$(# [$common_stability_attribute ])* impl < 'a , P > DoubleEndedIterator for $forward_iterator < 'a , P > where P : Pattern < 'a , Searcher : DoubleEndedSearcher < 'a >>, {# [ inline ] fn next_back (& mut self )-> Option <$iterty > { self . 0 . next_back ()}}$(# [$common_stability_attribute ])* impl < 'a , P > DoubleEndedIterator for $reverse_iterator < 'a , P > where P : Pattern < 'a , Searcher : DoubleEndedSearcher < 'a >>, {# [ inline ] fn next_back (& mut self )-> Option <$iterty > { self . 0 . next ()}}}; { single ended ; with $(# [$common_stability_attribute : meta ])*, $forward_iterator : ident , $reverse_iterator : ident , $iterty : ty }=>{}}
+macro_rules! __ra_macro_fixture84 {($($Name : ident ),+)=>{$(# [ stable ( feature = "str_escape" , since = "1.34.0" )] impl < 'a > fmt :: Display for $Name < 'a > { fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { self . clone (). try_for_each (| c | f . write_char ( c ))}}# [ stable ( feature = "str_escape" , since = "1.34.0" )] impl < 'a > Iterator for $Name < 'a > { type Item = char ; # [ inline ] fn next (& mut self )-> Option < char > { self . inner . next ()}# [ inline ] fn size_hint (& self )-> ( usize , Option < usize >){ self . inner . size_hint ()}# [ inline ] fn try_fold < Acc , Fold , R > (& mut self , init : Acc , fold : Fold )-> R where Self : Sized , Fold : FnMut ( Acc , Self :: Item )-> R , R : Try < Ok = Acc >{ self . inner . try_fold ( init , fold )}# [ inline ] fn fold < Acc , Fold > ( self , init : Acc , fold : Fold )-> Acc where Fold : FnMut ( Acc , Self :: Item )-> Acc , { self . inner . fold ( init , fold )}}# [ stable ( feature = "str_escape" , since = "1.34.0" )] impl < 'a > FusedIterator for $Name < 'a > {})+}}
+macro_rules! __ra_macro_fixture85 {($($(# [$attr : meta ])* struct $Name : ident impl $(<$($lifetime : lifetime ),+> )? Fn = |$($arg : ident : $ArgTy : ty ),*| -> $ReturnTy : ty $body : block ; )+)=>{$($(# [$attr ])* struct $Name ; impl $(<$($lifetime ),+> )? Fn < ($($ArgTy , )*)> for $Name {# [ inline ] extern "rust-call" fn call (& self , ($($arg , )*): ($($ArgTy , )*))-> $ReturnTy {$body }} impl $(<$($lifetime ),+> )? FnMut < ($($ArgTy , )*)> for $Name {# [ inline ] extern "rust-call" fn call_mut (& mut self , ($($arg , )*): ($($ArgTy , )*))-> $ReturnTy { Fn :: call (&* self , ($($arg , )*))}} impl $(<$($lifetime ),+> )? FnOnce < ($($ArgTy , )*)> for $Name { type Output = $ReturnTy ; # [ inline ] extern "rust-call" fn call_once ( self , ($($arg , )*): ($($ArgTy , )*))-> $ReturnTy { Fn :: call (& self , ($($arg , )*))}})+ }}
+macro_rules! __ra_macro_fixture86 {($($Tuple : ident {$(($idx : tt )-> $T : ident )+ })+)=>{$(# [ stable ( feature = "rust1" , since = "1.0.0" )] impl <$($T : PartialEq ),+> PartialEq for ($($T ,)+) where last_type ! ($($T ,)+): ? Sized {# [ inline ] fn eq (& self , other : & ($($T ,)+))-> bool {$(self .$idx == other .$idx )&&+ }# [ inline ] fn ne (& self , other : & ($($T ,)+))-> bool {$(self .$idx != other .$idx )||+ }}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl <$($T : Eq ),+> Eq for ($($T ,)+) where last_type ! ($($T ,)+): ? Sized {}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl <$($T : PartialOrd + PartialEq ),+> PartialOrd for ($($T ,)+) where last_type ! ($($T ,)+): ? Sized {# [ inline ] fn partial_cmp (& self , other : & ($($T ,)+))-> Option < Ordering > { lexical_partial_cmp ! ($(self .$idx , other .$idx ),+)}# [ inline ] fn lt (& self , other : & ($($T ,)+))-> bool { lexical_ord ! ( lt , $(self .$idx , other .$idx ),+)}# [ inline ] fn le (& self , other : & ($($T ,)+))-> bool { lexical_ord ! ( le , $(self .$idx , other .$idx ),+)}# [ inline ] fn ge (& self , other : & ($($T ,)+))-> bool { lexical_ord ! ( ge , $(self .$idx , other .$idx ),+)}# [ inline ] fn gt (& self , other : & ($($T ,)+))-> bool { lexical_ord ! ( gt , $(self .$idx , other .$idx ),+)}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl <$($T : Ord ),+> Ord for ($($T ,)+) where last_type ! ($($T ,)+): ? Sized {# [ inline ] fn cmp (& self , other : & ($($T ,)+))-> Ordering { lexical_cmp ! ($(self .$idx , other .$idx ),+)}}# [ stable ( feature = "rust1" , since = "1.0.0" )] impl <$($T : Default ),+> Default for ($($T ,)+){# [ inline ] fn default ()-> ($($T ,)+){($({let x : $T = Default :: default (); x },)+)}})+ }}
+macro_rules! __ra_macro_fixture87 {($x : expr , $($tt : tt )*)=>{# [ doc = $x ]$($tt )* }; }
+macro_rules! __ra_macro_fixture88 {($x : expr , $($tt : tt )*)=>{# [ doc = $x ]$($tt )* }; }
+macro_rules! __ra_macro_fixture89 {(# [$stability : meta ]($($Trait : ident ),+ ) for $Ty : ident )=>{$(# [$stability ] impl fmt ::$Trait for $Ty {# [ inline ] fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { self . get (). fmt ( f )}})+ }}
+macro_rules! __ra_macro_fixture90 {($t : ident , $f : ident )=>{# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Shl <$f > for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn shl ( self , other : $f )-> Wrapping <$t > { Wrapping ( self . 0 . wrapping_shl (( other & self :: shift_max ::$t as $f ) as u32 ))}} forward_ref_binop ! { impl Shl , shl for Wrapping <$t >, $f , # [ stable ( feature = "wrapping_ref_ops" , since = "1.39.0" )]}# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl ShlAssign <$f > for Wrapping <$t > {# [ inline ] fn shl_assign (& mut self , other : $f ){* self = * self << other ; }} forward_ref_op_assign ! { impl ShlAssign , shl_assign for Wrapping <$t >, $f }# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Shr <$f > for Wrapping <$t > { type Output = Wrapping <$t >; # [ inline ] fn shr ( self , other : $f )-> Wrapping <$t > { Wrapping ( self . 0 . wrapping_shr (( other & self :: shift_max ::$t as $f ) as u32 ))}} forward_ref_binop ! { impl Shr , shr for Wrapping <$t >, $f , # [ stable ( feature = "wrapping_ref_ops" , since = "1.39.0" )]}# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl ShrAssign <$f > for Wrapping <$t > {# [ inline ] fn shr_assign (& mut self , other : $f ){* self = * self >> other ; }} forward_ref_op_assign ! { impl ShrAssign , shr_assign for Wrapping <$t >, $f }}; }
+macro_rules! __ra_macro_fixture91 {( impl $imp : ident , $method : ident for $t : ty , $u : ty )=>{ forward_ref_binop ! ( impl $imp , $method for $t , $u , # [ stable ( feature = "rust1" , since = "1.0.0" )]); }; ( impl $imp : ident , $method : ident for $t : ty , $u : ty , # [$attr : meta ])=>{# [$attr ] impl < 'a > $imp <$u > for & 'a $t { type Output = <$t as $imp <$u >>:: Output ; # [ inline ] fn $method ( self , other : $u )-> <$t as $imp <$u >>:: Output {$imp ::$method (* self , other )}}# [$attr ] impl $imp <&$u > for $t { type Output = <$t as $imp <$u >>:: Output ; # [ inline ] fn $method ( self , other : &$u )-> <$t as $imp <$u >>:: Output {$imp ::$method ( self , * other )}}# [$attr ] impl $imp <&$u > for &$t { type Output = <$t as $imp <$u >>:: Output ; # [ inline ] fn $method ( self , other : &$u )-> <$t as $imp <$u >>:: Output {$imp ::$method (* self , * other )}}}}
+macro_rules! __ra_macro_fixture92 {( impl $imp : ident , $method : ident for $t : ty , $u : ty )=>{ forward_ref_op_assign ! ( impl $imp , $method for $t , $u , # [ stable ( feature = "op_assign_builtins_by_ref" , since = "1.22.0" )]); }; ( impl $imp : ident , $method : ident for $t : ty , $u : ty , # [$attr : meta ])=>{# [$attr ] impl $imp <&$u > for $t {# [ inline ] fn $method (& mut self , other : &$u ){$imp ::$method ( self , * other ); }}}}
+macro_rules! __ra_macro_fixture93 {( impl $imp : ident , $method : ident for $t : ty )=>{ forward_ref_unop ! ( impl $imp , $method for $t , # [ stable ( feature = "rust1" , since = "1.0.0" )]); }; ( impl $imp : ident , $method : ident for $t : ty , # [$attr : meta ])=>{# [$attr ] impl $imp for &$t { type Output = <$t as $imp >:: Output ; # [ inline ] fn $method ( self )-> <$t as $imp >:: Output {$imp ::$method (* self )}}}}
+macro_rules! __ra_macro_fixture94 {($FnTy : ty , $($Arg : ident ),*)=>{# [ stable ( feature = "fnptr_impls" , since = "1.4.0" )] impl < Ret , $($Arg ),*> PartialEq for $FnTy {# [ inline ] fn eq (& self , other : & Self )-> bool {* self as usize == * other as usize }}# [ stable ( feature = "fnptr_impls" , since = "1.4.0" )] impl < Ret , $($Arg ),*> Eq for $FnTy {}# [ stable ( feature = "fnptr_impls" , since = "1.4.0" )] impl < Ret , $($Arg ),*> PartialOrd for $FnTy {# [ inline ] fn partial_cmp (& self , other : & Self )-> Option < Ordering > {(* self as usize ). partial_cmp (& (* other as usize ))}}# [ stable ( feature = "fnptr_impls" , since = "1.4.0" )] impl < Ret , $($Arg ),*> Ord for $FnTy {# [ inline ] fn cmp (& self , other : & Self )-> Ordering {(* self as usize ). cmp (& (* other as usize ))}}# [ stable ( feature = "fnptr_impls" , since = "1.4.0" )] impl < Ret , $($Arg ),*> hash :: Hash for $FnTy { fn hash < HH : hash :: Hasher > (& self , state : & mut HH ){ state . write_usize (* self as usize )}}# [ stable ( feature = "fnptr_impls" , since = "1.4.0" )] impl < Ret , $($Arg ),*> fmt :: Pointer for $FnTy { fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { fmt :: Pointer :: fmt (& (* self as usize as * const ()), f )}}# [ stable ( feature = "fnptr_impls" , since = "1.4.0" )] impl < Ret , $($Arg ),*> fmt :: Debug for $FnTy { fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result { fmt :: Pointer :: fmt (& (* self as usize as * const ()), f )}}}}
+macro_rules! __ra_macro_fixture95 {($t : ty , $f : ty )=>{# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Shl <$f > for $t { type Output = $t ; # [ inline ]# [ rustc_inherit_overflow_checks ] fn shl ( self , other : $f )-> $t { self << other }} forward_ref_binop ! { impl Shl , shl for $t , $f }}; }
+macro_rules! __ra_macro_fixture96 {($t : ty , $f : ty )=>{# [ stable ( feature = "rust1" , since = "1.0.0" )] impl Shr <$f > for $t { type Output = $t ; # [ inline ]# [ rustc_inherit_overflow_checks ] fn shr ( self , other : $f )-> $t { self >> other }} forward_ref_binop ! { impl Shr , shr for $t , $f }}; }
+macro_rules! __ra_macro_fixture97 {($t : ty , $f : ty )=>{# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl ShlAssign <$f > for $t {# [ inline ]# [ rustc_inherit_overflow_checks ] fn shl_assign (& mut self , other : $f ){* self <<= other }} forward_ref_op_assign ! { impl ShlAssign , shl_assign for $t , $f }}; }
+macro_rules! __ra_macro_fixture98 {($t : ty , $f : ty )=>{# [ stable ( feature = "op_assign_traits" , since = "1.8.0" )] impl ShrAssign <$f > for $t {# [ inline ]# [ rustc_inherit_overflow_checks ] fn shr_assign (& mut self , other : $f ){* self >>= other }} forward_ref_op_assign ! { impl ShrAssign , shr_assign for $t , $f }}; }
+macro_rules! __ra_macro_fixture99 {( fmt ::$Trait : ident for $T : ident as $U : ident -> $Radix : ident )=>{# [ stable ( feature = "rust1" , since = "1.0.0" )] impl fmt ::$Trait for $T { fn fmt (& self , f : & mut fmt :: Formatter < '_ >)-> fmt :: Result {$Radix . fmt_int (* self as $U , f )}}}; }
+macro_rules! __ra_macro_fixture100 {($name : ident , $($other : ident ,)*)=>( tuple ! {$($other ,)* })}
+macro_rules! __ra_macro_fixture101 {{ unsafe fn $name : ident : $adjacent_kv : ident }=>{# [ doc = " Given a leaf edge handle into an owned tree, returns a handle to the next KV," ]# [ doc = " while deallocating any node left behind yet leaving the corresponding edge" ]# [ doc = " in its parent node dangling." ]# [ doc = "" ]# [ doc = " # Safety" ]# [ doc = " - The leaf edge must not be the last one in the direction travelled." ]# [ doc = " - The node carrying the next KV returned must not have been deallocated by a" ]# [ doc = " previous call on any handle obtained for this tree." ] unsafe fn $name < K , V > ( leaf_edge : Handle < NodeRef < marker :: Owned , K , V , marker :: Leaf >, marker :: Edge >, )-> Handle < NodeRef < marker :: Owned , K , V , marker :: LeafOrInternal >, marker :: KV > { let mut edge = leaf_edge . forget_node_type (); loop { edge = match edge .$adjacent_kv (){ Ok ( internal_kv )=> return internal_kv , Err ( last_edge )=>{ unsafe { let parent_edge = last_edge . into_node (). deallocate_and_ascend (); unwrap_unchecked ( parent_edge ). forget_node_type ()}}}}}}; }
+macro_rules! __ra_macro_fixture102 {([$($vars : tt )*]$lhs : ty , $rhs : ty , $($constraints : tt )*)=>{# [ stable ( feature = "vec_deque_partial_eq_slice" , since = "1.17.0" )] impl < A , B , $($vars )*> PartialEq <$rhs > for $lhs where A : PartialEq < B >, $($constraints )* { fn eq (& self , other : &$rhs )-> bool { if self . len ()!= other . len (){ return false ; } let ( sa , sb )= self . as_slices (); let ( oa , ob )= other [..]. split_at ( sa . len ()); sa == oa && sb == ob }}}}
+macro_rules! __ra_macro_fixture103 {($lhs : ty , $rhs : ty )=>{# [ stable ( feature = "rust1" , since = "1.0.0" )]# [ allow ( unused_lifetimes )] impl < 'a , 'b > PartialEq <$rhs > for $lhs {# [ inline ] fn eq (& self , other : &$rhs )-> bool { PartialEq :: eq (& self [..], & other [..])}# [ inline ] fn ne (& self , other : &$rhs )-> bool { PartialEq :: ne (& self [..], & other [..])}}# [ stable ( feature = "rust1" , since = "1.0.0" )]# [ allow ( unused_lifetimes )] impl < 'a , 'b > PartialEq <$lhs > for $rhs {# [ inline ] fn eq (& self , other : &$lhs )-> bool { PartialEq :: eq (& self [..], & other [..])}# [ inline ] fn ne (& self , other : &$lhs )-> bool { PartialEq :: ne (& self [..], & other [..])}}}; }
+macro_rules! __ra_macro_fixture104 {($t : ty , $is_zero : expr )=>{ unsafe impl IsZero for $t {# [ inline ] fn is_zero (& self )-> bool {$is_zero (* self )}}}; }
+macro_rules! __ra_macro_fixture105 {([$($vars : tt )*]$lhs : ty , $rhs : ty $(where $ty : ty : $bound : ident )?, # [$stability : meta ])=>{# [$stability ] impl < A , B , $($vars )*> PartialEq <$rhs > for $lhs where A : PartialEq < B >, $($ty : $bound )? {# [ inline ] fn eq (& self , other : &$rhs )-> bool { self [..]== other [..]}# [ inline ] fn ne (& self , other : &$rhs )-> bool { self [..]!= other [..]}}}}
+macro_rules! __ra_macro_fixture106 {('owned : $($oty : ident ,)* 'interned : $($ity : ident ,)* )=>{# [ repr ( C )]# [ allow ( non_snake_case )] pub struct HandleCounters {$($oty : AtomicUsize ,)* $($ity : AtomicUsize ,)* } impl HandleCounters { extern "C" fn get ()-> & 'static Self { static COUNTERS : HandleCounters = HandleCounters {$($oty : AtomicUsize :: new ( 1 ),)* $($ity : AtomicUsize :: new ( 1 ),)* }; & COUNTERS }}# [ repr ( C )]# [ allow ( non_snake_case )] pub ( super ) struct HandleStore < S : server :: Types > {$($oty : handle :: OwnedStore < S ::$oty >,)* $($ity : handle :: InternedStore < S ::$ity >,)* } impl < S : server :: Types > HandleStore < S > { pub ( super ) fn new ( handle_counters : & 'static HandleCounters )-> Self { HandleStore {$($oty : handle :: OwnedStore :: new (& handle_counters .$oty ),)* $($ity : handle :: InternedStore :: new (& handle_counters .$ity ),)* }}}$(# [ repr ( C )] pub ( crate ) struct $oty ( handle :: Handle ); impl ! Send for $oty {} impl ! Sync for $oty {} impl Drop for $oty { fn drop (& mut self ){$oty ( self . 0 ). drop (); }} impl < S > Encode < S > for $oty { fn encode ( self , w : & mut Writer , s : & mut S ){ let handle = self . 0 ; mem :: forget ( self ); handle . encode ( w , s ); }} impl < S : server :: Types > DecodeMut < '_ , '_ , HandleStore < server :: MarkedTypes < S >>> for Marked < S ::$oty , $oty > { fn decode ( r : & mut Reader < '_ >, s : & mut HandleStore < server :: MarkedTypes < S >>)-> Self { s .$oty . take ( handle :: Handle :: decode ( r , & mut ()))}} impl < S > Encode < S > for &$oty { fn encode ( self , w : & mut Writer , s : & mut S ){ self . 0 . encode ( w , s ); }} impl < S : server :: Types > Decode < '_ , 's , HandleStore < server :: MarkedTypes < S >>> for & 's Marked < S ::$oty , $oty > { fn decode ( r : & mut Reader < '_ >, s : & 's HandleStore < server :: MarkedTypes < S >>)-> Self {& s .$oty [ handle :: Handle :: decode ( r , & mut ())]}} impl < S > Encode < S > for & mut $oty { fn encode ( self , w : & mut Writer , s : & mut S ){ self . 0 . encode ( w , s ); }} impl < S : server :: Types > DecodeMut < '_ , 's , HandleStore < server :: MarkedTypes < S >>> for & 's mut Marked < S ::$oty , $oty > { fn decode ( r : & mut Reader < '_ >, s : & 's mut HandleStore < server :: MarkedTypes < S >> )-> Self {& mut s .$oty [ handle :: Handle :: decode ( r , & mut ())]}} impl < S : server :: Types > Encode < HandleStore < server :: MarkedTypes < S >>> for Marked < S ::$oty , $oty > { fn encode ( self , w : & mut Writer , s : & mut HandleStore < server :: MarkedTypes < S >>){ s .$oty . alloc ( self ). encode ( w , s ); }} impl < S > DecodeMut < '_ , '_ , S > for $oty { fn decode ( r : & mut Reader < '_ >, s : & mut S )-> Self {$oty ( handle :: Handle :: decode ( r , s ))}})* $(# [ repr ( C )]# [ derive ( Copy , Clone , PartialEq , Eq , Hash )] pub ( crate ) struct $ity ( handle :: Handle ); impl ! Send for $ity {} impl ! Sync for $ity {} impl < S > Encode < S > for $ity { fn encode ( self , w : & mut Writer , s : & mut S ){ self . 0 . encode ( w , s ); }} impl < S : server :: Types > DecodeMut < '_ , '_ , HandleStore < server :: MarkedTypes < S >>> for Marked < S ::$ity , $ity > { fn decode ( r : & mut Reader < '_ >, s : & mut HandleStore < server :: MarkedTypes < S >>)-> Self { s .$ity . copy ( handle :: Handle :: decode ( r , & mut ()))}} impl < S : server :: Types > Encode < HandleStore < server :: MarkedTypes < S >>> for Marked < S ::$ity , $ity > { fn encode ( self , w : & mut Writer , s : & mut HandleStore < server :: MarkedTypes < S >>){ s .$ity . alloc ( self ). encode ( w , s ); }} impl < S > DecodeMut < '_ , '_ , S > for $ity { fn decode ( r : & mut Reader < '_ >, s : & mut S )-> Self {$ity ( handle :: Handle :: decode ( r , s ))}})* }}
+macro_rules! __ra_macro_fixture107 {($S : ident , $self : ident , $m : ident )=>{$m ! { FreeFunctions { fn drop ($self : $S :: FreeFunctions ); fn track_env_var ( var : & str , value : Option <& str >); }, TokenStream { fn drop ($self : $S :: TokenStream ); fn clone ($self : &$S :: TokenStream )-> $S :: TokenStream ; fn new ()-> $S :: TokenStream ; fn is_empty ($self : &$S :: TokenStream )-> bool ; fn from_str ( src : & str )-> $S :: TokenStream ; fn to_string ($self : &$S :: TokenStream )-> String ; fn from_token_tree ( tree : TokenTree <$S :: Group , $S :: Punct , $S :: Ident , $S :: Literal >, )-> $S :: TokenStream ; fn into_iter ($self : $S :: TokenStream )-> $S :: TokenStreamIter ; }, TokenStreamBuilder { fn drop ($self : $S :: TokenStreamBuilder ); fn new ()-> $S :: TokenStreamBuilder ; fn push ($self : & mut $S :: TokenStreamBuilder , stream : $S :: TokenStream ); fn build ($self : $S :: TokenStreamBuilder )-> $S :: TokenStream ; }, TokenStreamIter { fn drop ($self : $S :: TokenStreamIter ); fn clone ($self : &$S :: TokenStreamIter )-> $S :: TokenStreamIter ; fn next ($self : & mut $S :: TokenStreamIter , )-> Option < TokenTree <$S :: Group , $S :: Punct , $S :: Ident , $S :: Literal >>; }, Group { fn drop ($self : $S :: Group ); fn clone ($self : &$S :: Group )-> $S :: Group ; fn new ( delimiter : Delimiter , stream : $S :: TokenStream )-> $S :: Group ; fn delimiter ($self : &$S :: Group )-> Delimiter ; fn stream ($self : &$S :: Group )-> $S :: TokenStream ; fn span ($self : &$S :: Group )-> $S :: Span ; fn span_open ($self : &$S :: Group )-> $S :: Span ; fn span_close ($self : &$S :: Group )-> $S :: Span ; fn set_span ($self : & mut $S :: Group , span : $S :: Span ); }, Punct { fn new ( ch : char , spacing : Spacing )-> $S :: Punct ; fn as_char ($self : $S :: Punct )-> char ; fn spacing ($self : $S :: Punct )-> Spacing ; fn span ($self : $S :: Punct )-> $S :: Span ; fn with_span ($self : $S :: Punct , span : $S :: Span )-> $S :: Punct ; }, Ident { fn new ( string : & str , span : $S :: Span , is_raw : bool )-> $S :: Ident ; fn span ($self : $S :: Ident )-> $S :: Span ; fn with_span ($self : $S :: Ident , span : $S :: Span )-> $S :: Ident ; }, Literal { fn drop ($self : $S :: Literal ); fn clone ($self : &$S :: Literal )-> $S :: Literal ; fn debug_kind ($self : &$S :: Literal )-> String ; fn symbol ($self : &$S :: Literal )-> String ; fn suffix ($self : &$S :: Literal )-> Option < String >; fn integer ( n : & str )-> $S :: Literal ; fn typed_integer ( n : & str , kind : & str )-> $S :: Literal ; fn float ( n : & str )-> $S :: Literal ; fn f32 ( n : & str )-> $S :: Literal ; fn f64 ( n : & str )-> $S :: Literal ; fn string ( string : & str )-> $S :: Literal ; fn character ( ch : char )-> $S :: Literal ; fn byte_string ( bytes : & [ u8 ])-> $S :: Literal ; fn span ($self : &$S :: Literal )-> $S :: Span ; fn set_span ($self : & mut $S :: Literal , span : $S :: Span ); fn subspan ($self : &$S :: Literal , start : Bound < usize >, end : Bound < usize >, )-> Option <$S :: Span >; }, SourceFile { fn drop ($self : $S :: SourceFile ); fn clone ($self : &$S :: SourceFile )-> $S :: SourceFile ; fn eq ($self : &$S :: SourceFile , other : &$S :: SourceFile )-> bool ; fn path ($self : &$S :: SourceFile )-> String ; fn is_real ($self : &$S :: SourceFile )-> bool ; }, MultiSpan { fn drop ($self : $S :: MultiSpan ); fn new ()-> $S :: MultiSpan ; fn push ($self : & mut $S :: MultiSpan , span : $S :: Span ); }, Diagnostic { fn drop ($self : $S :: Diagnostic ); fn new ( level : Level , msg : & str , span : $S :: MultiSpan )-> $S :: Diagnostic ; fn sub ($self : & mut $S :: Diagnostic , level : Level , msg : & str , span : $S :: MultiSpan , ); fn emit ($self : $S :: Diagnostic ); }, Span { fn debug ($self : $S :: Span )-> String ; fn def_site ()-> $S :: Span ; fn call_site ()-> $S :: Span ; fn mixed_site ()-> $S :: Span ; fn source_file ($self : $S :: Span )-> $S :: SourceFile ; fn parent ($self : $S :: Span )-> Option <$S :: Span >; fn source ($self : $S :: Span )-> $S :: Span ; fn start ($self : $S :: Span )-> LineColumn ; fn end ($self : $S :: Span )-> LineColumn ; fn join ($self : $S :: Span , other : $S :: Span )-> Option <$S :: Span >; fn resolved_at ($self : $S :: Span , at : $S :: Span )-> $S :: Span ; fn source_text ($self : $S :: Span )-> Option < String >; }, }}; }
+macro_rules! __ra_macro_fixture108 {( le $ty : ty )=>{ impl < S > Encode < S > for $ty { fn encode ( self , w : & mut Writer , _: & mut S ){ w . write_all (& self . to_le_bytes ()). unwrap (); }} impl < S > DecodeMut < '_ , '_ , S > for $ty { fn decode ( r : & mut Reader < '_ >, _: & mut S )-> Self { const N : usize = :: std :: mem :: size_of ::<$ty > (); let mut bytes = [ 0 ; N ]; bytes . copy_from_slice (& r [.. N ]); * r = & r [ N ..]; Self :: from_le_bytes ( bytes )}}}; ( struct $name : ident {$($field : ident ),* $(,)? })=>{ impl < S > Encode < S > for $name { fn encode ( self , w : & mut Writer , s : & mut S ){$(self .$field . encode ( w , s );)* }} impl < S > DecodeMut < '_ , '_ , S > for $name { fn decode ( r : & mut Reader < '_ >, s : & mut S )-> Self {$name {$($field : DecodeMut :: decode ( r , s )),* }}}}; ( enum $name : ident $(<$($T : ident ),+>)? {$($variant : ident $(($field : ident ))*),* $(,)? })=>{ impl < S , $($($T : Encode < S >),+)?> Encode < S > for $name $(<$($T ),+>)? { fn encode ( self , w : & mut Writer , s : & mut S ){# [ allow ( non_upper_case_globals )] mod tag {# [ repr ( u8 )] enum Tag {$($variant ),* }$(pub const $variant : u8 = Tag ::$variant as u8 ;)* } match self {$($name ::$variant $(($field ))* =>{ tag ::$variant . encode ( w , s ); $($field . encode ( w , s );)* })* }}} impl < S , $($($T : for < 's > DecodeMut < 'a , 's , S >),+)?> DecodeMut < 'a , '_ , S > for $name $(<$($T ),+>)? { fn decode ( r : & mut Reader < 'a >, s : & mut S )-> Self {# [ allow ( non_upper_case_globals )] mod tag {# [ repr ( u8 )] enum Tag {$($variant ),* }$(pub const $variant : u8 = Tag ::$variant as u8 ;)* } match u8 :: decode ( r , s ){$(tag ::$variant =>{$(let $field = DecodeMut :: decode ( r , s );)* $name ::$variant $(($field ))* })* _ => unreachable ! (), }}}}}
+macro_rules! __ra_macro_fixture109 {($($ty : ty ),* $(,)?)=>{$(impl Mark for $ty { type Unmarked = Self ; fn mark ( unmarked : Self :: Unmarked )-> Self { unmarked }} impl Unmark for $ty { type Unmarked = Self ; fn unmark ( self )-> Self :: Unmarked { self }})* }}
+macro_rules! __ra_macro_fixture110 {($($name : ident {$(fn $method : ident ($($arg : ident : $arg_ty : ty ),* $(,)?)$(-> $ret_ty : ty )*;)* }),* $(,)?)=>{$(impl $name {$(pub ( crate ) fn $method ($($arg : $arg_ty ),*)$(-> $ret_ty )* { Bridge :: with (| bridge | { let mut b = bridge . cached_buffer . take (); b . clear (); api_tags :: Method ::$name ( api_tags ::$name ::$method ). encode (& mut b , & mut ()); reverse_encode ! ( b ; $($arg ),*); b = bridge . dispatch . call ( b ); let r = Result ::<_, PanicMessage >:: decode (& mut & b [..], & mut ()); bridge . cached_buffer = b ; r . unwrap_or_else (| e | panic :: resume_unwind ( e . into ()))})})* })* }}
+macro_rules! __ra_macro_fixture111 {($($name : ident {$(fn $method : ident ($($arg : ident : $arg_ty : ty ),* $(,)?)$(-> $ret_ty : ty )?;)* }),* $(,)?)=>{ pub trait Types {$(associated_item ! ( type $name );)* }$(pub trait $name : Types {$(associated_item ! ( fn $method (& mut self , $($arg : $arg_ty ),*)$(-> $ret_ty )?);)* })* pub trait Server : Types $(+ $name )* {} impl < S : Types $(+ $name )*> Server for S {}}}
+macro_rules! __ra_macro_fixture112 {($($name : ident {$(fn $method : ident ($($arg : ident : $arg_ty : ty ),* $(,)?)$(-> $ret_ty : ty )?;)* }),* $(,)?)=>{ impl < S : Types > Types for MarkedTypes < S > {$(type $name = Marked < S ::$name , client ::$name >;)* }$(impl < S : $name > $name for MarkedTypes < S > {$(fn $method (& mut self , $($arg : $arg_ty ),*)$(-> $ret_ty )? {<_>:: mark ($name ::$method (& mut self . 0 , $($arg . unmark ()),*))})* })* }}
+macro_rules! __ra_macro_fixture113 {($($name : ident {$(fn $method : ident ($($arg : ident : $arg_ty : ty ),* $(,)?)$(-> $ret_ty : ty )?;)* }),* $(,)?)=>{ pub trait DispatcherTrait {$(type $name ;)* fn dispatch (& mut self , b : Buffer < u8 >)-> Buffer < u8 >; } impl < S : Server > DispatcherTrait for Dispatcher < MarkedTypes < S >> {$(type $name = < MarkedTypes < S > as Types >::$name ;)* fn dispatch (& mut self , mut b : Buffer < u8 >)-> Buffer < u8 > { let Dispatcher { handle_store , server }= self ; let mut reader = & b [..]; match api_tags :: Method :: decode (& mut reader , & mut ()){$(api_tags :: Method ::$name ( m )=> match m {$(api_tags ::$name ::$method =>{ let mut call_method = || { reverse_decode ! ( reader , handle_store ; $($arg : $arg_ty ),*); $name ::$method ( server , $($arg ),*)}; let r = if thread :: panicking (){ Ok ( call_method ())} else { panic :: catch_unwind ( panic :: AssertUnwindSafe ( call_method )). map_err ( PanicMessage :: from )}; b . clear (); r . encode (& mut b , handle_store ); })* }),* } b }}}}
+macro_rules! __ra_macro_fixture114 {($($name : ident {$(fn $method : ident ($($arg : ident : $arg_ty : ty ),* $(,)?)$(-> $ret_ty : ty )*;)* }),* $(,)?)=>{$(pub ( super ) enum $name {$($method ),* } rpc_encode_decode ! ( enum $name {$($method ),* }); )* pub ( super ) enum Method {$($name ($name )),* } rpc_encode_decode ! ( enum Method {$($name ( m )),* }); }}
+macro_rules! __ra_macro_fixture115 {($(if # [ cfg ($meta : meta )]{$($tokens : tt )* }) else * else {$($tokens2 : tt )* })=>{$crate :: cfg_if ! {@ __items (); $((($meta )($($tokens )*)), )* (()($($tokens2 )*)), }}; ( if # [ cfg ($i_met : meta )]{$($i_tokens : tt )* }$(else if # [ cfg ($e_met : meta )]{$($e_tokens : tt )* })* )=>{$crate :: cfg_if ! {@ __items (); (($i_met )($($i_tokens )*)), $((($e_met )($($e_tokens )*)), )* (()()), }}; (@ __items ($($not : meta ,)*); )=>{}; (@ __items ($($not : meta ,)*); (($($m : meta ),*)($($tokens : tt )*)), $($rest : tt )*)=>{# [ cfg ( all ($($m ,)* not ( any ($($not ),*))))]$crate :: cfg_if ! {@ __identity $($tokens )* }$crate :: cfg_if ! {@ __items ($($not ,)* $($m ,)*); $($rest )* }}; (@ __identity $($tokens : tt )*)=>{$($tokens )* }; }
+macro_rules! __ra_macro_fixture116 {($lhs : ty , $rhs : ty )=>{# [ stable ( feature = "cmp_os_str" , since = "1.8.0" )] impl < 'a , 'b > PartialEq <$rhs > for $lhs {# [ inline ] fn eq (& self , other : &$rhs )-> bool {< OsStr as PartialEq >:: eq ( self , other )}}# [ stable ( feature = "cmp_os_str" , since = "1.8.0" )] impl < 'a , 'b > PartialEq <$lhs > for $rhs {# [ inline ] fn eq (& self , other : &$lhs )-> bool {< OsStr as PartialEq >:: eq ( self , other )}}# [ stable ( feature = "cmp_os_str" , since = "1.8.0" )] impl < 'a , 'b > PartialOrd <$rhs > for $lhs {# [ inline ] fn partial_cmp (& self , other : &$rhs )-> Option < cmp :: Ordering > {< OsStr as PartialOrd >:: partial_cmp ( self , other )}}# [ stable ( feature = "cmp_os_str" , since = "1.8.0" )] impl < 'a , 'b > PartialOrd <$lhs > for $rhs {# [ inline ] fn partial_cmp (& self , other : &$lhs )-> Option < cmp :: Ordering > {< OsStr as PartialOrd >:: partial_cmp ( self , other )}}}; }
+macro_rules! __ra_macro_fixture117 {()=>{}; ($(# [$attr : meta ])* $vis : vis static $name : ident : $t : ty = $init : expr ; $($rest : tt )*)=>($crate :: __thread_local_inner ! ($(# [$attr ])* $vis $name , $t , $init ); $crate :: thread_local ! ($($rest )*); ); ($(# [$attr : meta ])* $vis : vis static $name : ident : $t : ty = $init : expr )=>($crate :: __thread_local_inner ! ($(# [$attr ])* $vis $name , $t , $init ); ); }
+macro_rules! __ra_macro_fixture118 {($($t : ty )*)=>($(impl ReadNumberHelper for $t { const ZERO : Self = 0 ; # [ inline ] fn checked_mul (& self , other : u32 )-> Option < Self > { Self :: checked_mul (* self , other . try_into (). ok ()?)}# [ inline ] fn checked_add (& self , other : u32 )-> Option < Self > { Self :: checked_add (* self , other . try_into (). ok ()?)}})*)}
+macro_rules! __ra_macro_fixture119 {($lhs : ty , $rhs : ty )=>{# [ stable ( feature = "partialeq_path" , since = "1.6.0" )] impl < 'a , 'b > PartialEq <$rhs > for $lhs {# [ inline ] fn eq (& self , other : &$rhs )-> bool {< Path as PartialEq >:: eq ( self , other )}}# [ stable ( feature = "partialeq_path" , since = "1.6.0" )] impl < 'a , 'b > PartialEq <$lhs > for $rhs {# [ inline ] fn eq (& self , other : &$lhs )-> bool {< Path as PartialEq >:: eq ( self , other )}}# [ stable ( feature = "cmp_path" , since = "1.8.0" )] impl < 'a , 'b > PartialOrd <$rhs > for $lhs {# [ inline ] fn partial_cmp (& self , other : &$rhs )-> Option < cmp :: Ordering > {< Path as PartialOrd >:: partial_cmp ( self , other )}}# [ stable ( feature = "cmp_path" , since = "1.8.0" )] impl < 'a , 'b > PartialOrd <$lhs > for $rhs {# [ inline ] fn partial_cmp (& self , other : &$lhs )-> Option < cmp :: Ordering > {< Path as PartialOrd >:: partial_cmp ( self , other )}}}; }
+macro_rules! __ra_macro_fixture120 {($lhs : ty , $rhs : ty )=>{# [ stable ( feature = "cmp_path" , since = "1.8.0" )] impl < 'a , 'b > PartialEq <$rhs > for $lhs {# [ inline ] fn eq (& self , other : &$rhs )-> bool {< Path as PartialEq >:: eq ( self , other . as_ref ())}}# [ stable ( feature = "cmp_path" , since = "1.8.0" )] impl < 'a , 'b > PartialEq <$lhs > for $rhs {# [ inline ] fn eq (& self , other : &$lhs )-> bool {< Path as PartialEq >:: eq ( self . as_ref (), other )}}# [ stable ( feature = "cmp_path" , since = "1.8.0" )] impl < 'a , 'b > PartialOrd <$rhs > for $lhs {# [ inline ] fn partial_cmp (& self , other : &$rhs )-> Option < cmp :: Ordering > {< Path as PartialOrd >:: partial_cmp ( self , other . as_ref ())}}# [ stable ( feature = "cmp_path" , since = "1.8.0" )] impl < 'a , 'b > PartialOrd <$lhs > for $rhs {# [ inline ] fn partial_cmp (& self , other : &$lhs )-> Option < cmp :: Ordering > {< Path as PartialOrd >:: partial_cmp ( self . as_ref (), other )}}}; }
+macro_rules! __ra_macro_fixture121 {(@ key $t : ty , $init : expr )=>{{# [ inline ] fn __init ()-> $t {$init } unsafe fn __getit ()-> $crate :: option :: Option <& 'static $t > {# [ cfg ( all ( target_arch = "wasm32" , not ( target_feature = "atomics" )))] static __KEY : $crate :: thread :: __StaticLocalKeyInner <$t > = $crate :: thread :: __StaticLocalKeyInner :: new (); # [ thread_local ]# [ cfg ( all ( target_thread_local , not ( all ( target_arch = "wasm32" , not ( target_feature = "atomics" ))), ))] static __KEY : $crate :: thread :: __FastLocalKeyInner <$t > = $crate :: thread :: __FastLocalKeyInner :: new (); # [ cfg ( all ( not ( target_thread_local ), not ( all ( target_arch = "wasm32" , not ( target_feature = "atomics" ))), ))] static __KEY : $crate :: thread :: __OsLocalKeyInner <$t > = $crate :: thread :: __OsLocalKeyInner :: new (); # [ allow ( unused_unsafe )] unsafe { __KEY . get ( __init )}} unsafe {$crate :: thread :: LocalKey :: new ( __getit )}}}; ($(# [$attr : meta ])* $vis : vis $name : ident , $t : ty , $init : expr )=>{$(# [$attr ])* $vis const $name : $crate :: thread :: LocalKey <$t > = $crate :: __thread_local_inner ! (@ key $t , $init ); }}
+macro_rules! __ra_macro_fixture122 {({$($then_tt : tt )* } else {$($else_tt : tt )* })=>{ cfg_if :: cfg_if ! { if # [ cfg ( all ( target_os = "linux" , target_env = "gnu" ))]{$($then_tt )* } else {$($else_tt )* }}}; ($($block_inner : tt )*)=>{# [ cfg ( all ( target_os = "linux" , target_env = "gnu" ))]{$($block_inner )* }}; }
+macro_rules! __ra_macro_fixture123 {($($t : ident )*)=>($(impl IsMinusOne for $t { fn is_minus_one (& self )-> bool {* self == - 1 }})*)}
+macro_rules! __ra_macro_fixture124 {($(if # [ cfg ($($meta : meta ),*)]{$($it : item )* }) else * else {$($it2 : item )* })=>{ cfg_if ! {@ __items (); $((($($meta ),*)($($it )*)), )* (()($($it2 )*)), }}; ( if # [ cfg ($($i_met : meta ),*)]{$($i_it : item )* }$(else if # [ cfg ($($e_met : meta ),*)]{$($e_it : item )* })* )=>{ cfg_if ! {@ __items (); (($($i_met ),*)($($i_it )*)), $((($($e_met ),*)($($e_it )*)), )* (()()), }}; (@ __items ($($not : meta ,)*); )=>{}; (@ __items ($($not : meta ,)*); (($($m : meta ),*)($($it : item )*)), $($rest : tt )*)=>{ cfg_if ! {@ __apply cfg ( all ($($m ,)* not ( any ($($not ),*)))), $($it )* } cfg_if ! {@ __items ($($not ,)* $($m ,)*); $($rest )* }}; (@ __apply $m : meta , $($it : item )*)=>{$(# [$m ]$it )* }; }
+macro_rules! __ra_macro_fixture125 {($bench_macro : ident , $bench_ahash_serial : ident , $bench_std_serial : ident , $bench_ahash_highbits : ident , $bench_std_highbits : ident , $bench_ahash_random : ident , $bench_std_random : ident )=>{$bench_macro ! ($bench_ahash_serial , AHashMap , 0 ..); $bench_macro ! ($bench_std_serial , StdHashMap , 0 ..); $bench_macro ! ($bench_ahash_highbits , AHashMap , ( 0 ..). map ( usize :: swap_bytes )); $bench_macro ! ($bench_std_highbits , StdHashMap , ( 0 ..). map ( usize :: swap_bytes )); $bench_macro ! ($bench_ahash_random , AHashMap , RandomKeys :: new ()); $bench_macro ! ($bench_std_random , StdHashMap , RandomKeys :: new ()); }; }
+macro_rules! __ra_macro_fixture126 {($name : ident , $maptype : ident , $keydist : expr )=>{# [ bench ] fn $name ( b : & mut Bencher ){ let mut m = $maptype :: with_capacity_and_hasher ( SIZE , Default :: default ()); b . iter (|| { m . clear (); for i in ($keydist ). take ( SIZE ){ m . insert ( i , i ); } black_box (& mut m ); })}}; }
+macro_rules! __ra_macro_fixture127 {($name : ident , $maptype : ident , $keydist : expr )=>{# [ bench ] fn $name ( b : & mut Bencher ){ let mut base = $maptype :: default (); for i in ($keydist ). take ( SIZE ){ base . insert ( i , i ); } let skip = $keydist . skip ( SIZE ); b . iter (|| { let mut m = base . clone (); let mut add_iter = skip . clone (); let mut remove_iter = $keydist ; for ( add , remove ) in (& mut add_iter ). zip (& mut remove_iter ). take ( SIZE ){ m . insert ( add , add ); black_box ( m . remove (& remove )); } black_box ( m ); })}}; }
+macro_rules! __ra_macro_fixture128 {($name : ident , $maptype : ident , $keydist : expr )=>{# [ bench ] fn $name ( b : & mut Bencher ){ let mut m = $maptype :: default (); for i in $keydist . take ( SIZE ){ m . insert ( i , i ); } b . iter (|| { for i in $keydist . take ( SIZE ){ black_box ( m . get (& i )); }})}}; }
+macro_rules! __ra_macro_fixture129 {($name : ident , $maptype : ident , $keydist : expr )=>{# [ bench ] fn $name ( b : & mut Bencher ){ let mut m = $maptype :: default (); let mut iter = $keydist ; for i in (& mut iter ). take ( SIZE ){ m . insert ( i , i ); } b . iter (|| { for i in (& mut iter ). take ( SIZE ){ black_box ( m . get (& i )); }})}}; }
+macro_rules! __ra_macro_fixture130 {($name : ident , $maptype : ident , $keydist : expr )=>{# [ bench ] fn $name ( b : & mut Bencher ){ let mut m = $maptype :: default (); for i in ($keydist ). take ( SIZE ){ m . insert ( i , i ); } b . iter (|| { for i in & m { black_box ( i ); }})}}; }
+macro_rules! __ra_macro_fixture131 {($(if # [ cfg ($($meta : meta ),*)]{$($it : item )* }) else * else {$($it2 : item )* })=>{ cfg_if ! {@ __items (); $((($($meta ),*)($($it )*)), )* (()($($it2 )*)), }}; ( if # [ cfg ($($i_met : meta ),*)]{$($i_it : item )* }$(else if # [ cfg ($($e_met : meta ),*)]{$($e_it : item )* })* )=>{ cfg_if ! {@ __items (); (($($i_met ),*)($($i_it )*)), $((($($e_met ),*)($($e_it )*)), )* (()()), }}; (@ __items ($($not : meta ,)*); )=>{}; (@ __items ($($not : meta ,)*); (($($m : meta ),*)($($it : item )*)), $($rest : tt )*)=>{ cfg_if ! {@ __apply cfg ( all ($($m ,)* not ( any ($($not ),*)))), $($it )* } cfg_if ! {@ __items ($($not ,)* $($m ,)*); $($rest )* }}; (@ __apply $m : meta , $($it : item )*)=>{$(# [$m ]$it )* }; }
+macro_rules! __ra_macro_fixture132 {($($(# [$attr : meta ])* pub $t : ident $i : ident {$($field : tt )* })*)=>($(s ! ( it : $(# [$attr ])* pub $t $i {$($field )* }); )*); ( it : $(# [$attr : meta ])* pub union $i : ident {$($field : tt )* })=>( compile_error ! ( "unions cannot derive extra traits, use s_no_extra_traits instead" ); ); ( it : $(# [$attr : meta ])* pub struct $i : ident {$($field : tt )* })=>( __item ! {# [ repr ( C )]# [ cfg_attr ( feature = "extra_traits" , derive ( Debug , Eq , Hash , PartialEq ))]# [ allow ( deprecated )]$(# [$attr ])* pub struct $i {$($field )* }}# [ allow ( deprecated )] impl :: Copy for $i {}# [ allow ( deprecated )] impl :: Clone for $i { fn clone (& self )-> $i {* self }}); }
+macro_rules! __ra_macro_fixture133 {($i : item )=>{$i }; }
+macro_rules! __ra_macro_fixture134 {($($(# [$attr : meta ])* pub $t : ident $i : ident {$($field : tt )* })*)=>($(s_no_extra_traits ! ( it : $(# [$attr ])* pub $t $i {$($field )* }); )*); ( it : $(# [$attr : meta ])* pub union $i : ident {$($field : tt )* })=>( cfg_if ! { if # [ cfg ( libc_union )]{ __item ! {# [ repr ( C )]$(# [$attr ])* pub union $i {$($field )* }} impl :: Copy for $i {} impl :: Clone for $i { fn clone (& self )-> $i {* self }}}}); ( it : $(# [$attr : meta ])* pub struct $i : ident {$($field : tt )* })=>( __item ! {# [ repr ( C )]$(# [$attr ])* pub struct $i {$($field )* }}# [ allow ( deprecated )] impl :: Copy for $i {}# [ allow ( deprecated )] impl :: Clone for $i { fn clone (& self )-> $i {* self }}); }
+macro_rules! __ra_macro_fixture135 {($($(# [$attr : meta ])* pub const $name : ident : $t1 : ty = $t2 : ident {$($field : tt )* };)*)=>($(# [ cfg ( libc_align )]$(# [$attr ])* pub const $name : $t1 = $t2 {$($field )* }; # [ cfg ( not ( libc_align ))]$(# [$attr ])* pub const $name : $t1 = $t2 {$($field )* __align : [], }; )*)}
+macro_rules! __ra_macro_fixture136 {($($args : tt )* )=>{$(define_ioctl ! ($args ); )* }}
+macro_rules! __ra_macro_fixture137 {({$name : ident , $ioctl : ident , $arg_type : ty })=>{ pub unsafe fn $name ( fd : c_int , arg : $arg_type )-> c_int { untyped_ioctl ( fd , bindings ::$ioctl , arg )}}; }
+macro_rules! __ra_macro_fixture138 {($($T : ty ),*)=>{$(impl IdentFragment for $T { fn fmt (& self , f : & mut fmt :: Formatter )-> fmt :: Result { fmt :: Display :: fmt ( self , f )}})* }}
+macro_rules! __ra_macro_fixture139 {($($t : ident =>$name : ident )*)=>($(impl ToTokens for $t { fn to_tokens (& self , tokens : & mut TokenStream ){ tokens . append ( Literal ::$name (* self )); }})*)}
+macro_rules! __ra_macro_fixture140 {($($l : tt )*)=>{$(impl < 'q , T : 'q > RepAsIteratorExt < 'q > for [ T ; $l ]{ type Iter = slice :: Iter < 'q , T >; fn quote_into_iter (& 'q self )-> ( Self :: Iter , HasIter ){( self . iter (), HasIter )}})* }}
+macro_rules! __ra_macro_fixture141 {($name : ident $spanned : ident $char1 : tt )=>{ pub fn $name ( tokens : & mut TokenStream ){ tokens . append ( Punct :: new ($char1 , Spacing :: Alone )); } pub fn $spanned ( tokens : & mut TokenStream , span : Span ){ let mut punct = Punct :: new ($char1 , Spacing :: Alone ); punct . set_span ( span ); tokens . append ( punct ); }}; ($name : ident $spanned : ident $char1 : tt $char2 : tt )=>{ pub fn $name ( tokens : & mut TokenStream ){ tokens . append ( Punct :: new ($char1 , Spacing :: Joint )); tokens . append ( Punct :: new ($char2 , Spacing :: Alone )); } pub fn $spanned ( tokens : & mut TokenStream , span : Span ){ let mut punct = Punct :: new ($char1 , Spacing :: Joint ); punct . set_span ( span ); tokens . append ( punct ); let mut punct = Punct :: new ($char2 , Spacing :: Alone ); punct . set_span ( span ); tokens . append ( punct ); }}; ($name : ident $spanned : ident $char1 : tt $char2 : tt $char3 : tt )=>{ pub fn $name ( tokens : & mut TokenStream ){ tokens . append ( Punct :: new ($char1 , Spacing :: Joint )); tokens . append ( Punct :: new ($char2 , Spacing :: Joint )); tokens . append ( Punct :: new ($char3 , Spacing :: Alone )); } pub fn $spanned ( tokens : & mut TokenStream , span : Span ){ let mut punct = Punct :: new ($char1 , Spacing :: Joint ); punct . set_span ( span ); tokens . append ( punct ); let mut punct = Punct :: new ($char2 , Spacing :: Joint ); punct . set_span ( span ); tokens . append ( punct ); let mut punct = Punct :: new ($char3 , Spacing :: Alone ); punct . set_span ( span ); tokens . append ( punct ); }}; }
+macro_rules! __ra_macro_fixture142 {($display : tt $name : ty )=>{# [ cfg ( feature = "parsing" )] impl Token for $name { fn peek ( cursor : Cursor )-> bool { fn peek ( input : ParseStream )-> bool {<$name as Parse >:: parse ( input ). is_ok ()} peek_impl ( cursor , peek )} fn display ()-> & 'static str {$display }}# [ cfg ( feature = "parsing" )] impl private :: Sealed for $name {}}; }
+macro_rules! __ra_macro_fixture143 {($display : tt $ty : ident $get : ident )=>{# [ cfg ( feature = "parsing" )] impl Token for $ty { fn peek ( cursor : Cursor )-> bool { cursor .$get (). is_some ()} fn display ()-> & 'static str {$display }}# [ cfg ( feature = "parsing" )] impl private :: Sealed for $ty {}}; }
+macro_rules! __ra_macro_fixture144 {($($token : tt pub struct $name : ident /$len : tt # [$doc : meta ])*)=>{$(# [ repr ( C )]# [$doc ]# [ doc = "" ]# [ doc = " Don\\\'t try to remember the name of this type &mdash; use the" ]# [ doc = " [`Token!`] macro instead." ]# [ doc = "" ]# [ doc = " [`Token!`]: crate::token" ] pub struct $name { pub spans : [ Span ; $len ], }# [ doc ( hidden )]# [ allow ( non_snake_case )] pub fn $name < S : IntoSpans < [ Span ; $len ]>> ( spans : S )-> $name {$name { spans : spans . into_spans (), }} impl std :: default :: Default for $name { fn default ()-> Self {$name { spans : [ Span :: call_site (); $len ], }}}# [ cfg ( feature = "clone-impls" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "clone-impls" )))] impl Copy for $name {}# [ cfg ( feature = "clone-impls" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "clone-impls" )))] impl Clone for $name { fn clone (& self )-> Self {* self }}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl Debug for $name { fn fmt (& self , f : & mut fmt :: Formatter )-> fmt :: Result { f . write_str ( stringify ! ($name ))}}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl cmp :: Eq for $name {}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl PartialEq for $name { fn eq (& self , _other : &$name )-> bool { true }}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl Hash for $name { fn hash < H : Hasher > (& self , _state : & mut H ){}} impl_deref_if_len_is_1 ! ($name /$len ); )* }; }
+macro_rules! __ra_macro_fixture145 {($($token : tt pub struct $name : ident # [$doc : meta ])*)=>{$(# [$doc ]# [ doc = "" ]# [ doc = " Don\\\'t try to remember the name of this type &mdash; use the" ]# [ doc = " [`Token!`] macro instead." ]# [ doc = "" ]# [ doc = " [`Token!`]: crate::token" ] pub struct $name { pub span : Span , }# [ doc ( hidden )]# [ allow ( non_snake_case )] pub fn $name < S : IntoSpans < [ Span ; 1 ]>> ( span : S )-> $name {$name { span : span . into_spans ()[ 0 ], }} impl std :: default :: Default for $name { fn default ()-> Self {$name { span : Span :: call_site (), }}}# [ cfg ( feature = "clone-impls" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "clone-impls" )))] impl Copy for $name {}# [ cfg ( feature = "clone-impls" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "clone-impls" )))] impl Clone for $name { fn clone (& self )-> Self {* self }}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl Debug for $name { fn fmt (& self , f : & mut fmt :: Formatter )-> fmt :: Result { f . write_str ( stringify ! ($name ))}}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl cmp :: Eq for $name {}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl PartialEq for $name { fn eq (& self , _other : &$name )-> bool { true }}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl Hash for $name { fn hash < H : Hasher > (& self , _state : & mut H ){}}# [ cfg ( feature = "printing" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "printing" )))] impl ToTokens for $name { fn to_tokens (& self , tokens : & mut TokenStream ){ printing :: keyword ($token , self . span , tokens ); }}# [ cfg ( feature = "parsing" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "parsing" )))] impl Parse for $name { fn parse ( input : ParseStream )-> Result < Self > { Ok ($name { span : parsing :: keyword ( input , $token )?, })}}# [ cfg ( feature = "parsing" )] impl Token for $name { fn peek ( cursor : Cursor )-> bool { parsing :: peek_keyword ( cursor , $token )} fn display ()-> & 'static str { concat ! ( "`" , $token , "`" )}}# [ cfg ( feature = "parsing" )] impl private :: Sealed for $name {})* }; }
+macro_rules! __ra_macro_fixture146 {($($token : tt pub struct $name : ident /$len : tt # [$doc : meta ])*)=>{$(define_punctuation_structs ! {$token pub struct $name /$len # [$doc ]}# [ cfg ( feature = "printing" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "printing" )))] impl ToTokens for $name { fn to_tokens (& self , tokens : & mut TokenStream ){ printing :: punct ($token , & self . spans , tokens ); }}# [ cfg ( feature = "parsing" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "parsing" )))] impl Parse for $name { fn parse ( input : ParseStream )-> Result < Self > { Ok ($name { spans : parsing :: punct ( input , $token )?, })}}# [ cfg ( feature = "parsing" )] impl Token for $name { fn peek ( cursor : Cursor )-> bool { parsing :: peek_punct ( cursor , $token )} fn display ()-> & 'static str { concat ! ( "`" , $token , "`" )}}# [ cfg ( feature = "parsing" )] impl private :: Sealed for $name {})* }; }
+macro_rules! __ra_macro_fixture147 {($($token : tt pub struct $name : ident # [$doc : meta ])*)=>{$(# [$doc ] pub struct $name { pub span : Span , }# [ doc ( hidden )]# [ allow ( non_snake_case )] pub fn $name < S : IntoSpans < [ Span ; 1 ]>> ( span : S )-> $name {$name { span : span . into_spans ()[ 0 ], }} impl std :: default :: Default for $name { fn default ()-> Self {$name { span : Span :: call_site (), }}}# [ cfg ( feature = "clone-impls" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "clone-impls" )))] impl Copy for $name {}# [ cfg ( feature = "clone-impls" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "clone-impls" )))] impl Clone for $name { fn clone (& self )-> Self {* self }}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl Debug for $name { fn fmt (& self , f : & mut fmt :: Formatter )-> fmt :: Result { f . write_str ( stringify ! ($name ))}}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl cmp :: Eq for $name {}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl PartialEq for $name { fn eq (& self , _other : &$name )-> bool { true }}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl Hash for $name { fn hash < H : Hasher > (& self , _state : & mut H ){}} impl $name {# [ cfg ( feature = "printing" )] pub fn surround < F > (& self , tokens : & mut TokenStream , f : F ) where F : FnOnce (& mut TokenStream ), { printing :: delim ($token , self . span , tokens , f ); }}# [ cfg ( feature = "parsing" )] impl private :: Sealed for $name {})* }; }
+macro_rules! __ra_macro_fixture148 {($token : ident )=>{ impl From < Token ! [$token ]> for Ident { fn from ( token : Token ! [$token ])-> Ident { Ident :: new ( stringify ! ($token ), token . span )}}}; }
+macro_rules! __ra_macro_fixture149 {([$($attrs_pub : tt )*] struct $name : ident # full $($rest : tt )* )=>{# [ cfg ( feature = "full" )]$($attrs_pub )* struct $name $($rest )* # [ cfg ( not ( feature = "full" ))]$($attrs_pub )* struct $name { _noconstruct : :: std :: marker :: PhantomData <:: proc_macro2 :: Span >, }# [ cfg ( all ( not ( feature = "full" ), feature = "printing" ))] impl :: quote :: ToTokens for $name { fn to_tokens (& self , _: & mut :: proc_macro2 :: TokenStream ){ unreachable ! ()}}}; ([$($attrs_pub : tt )*] struct $name : ident $($rest : tt )* )=>{$($attrs_pub )* struct $name $($rest )* }; ($($t : tt )*)=>{ strip_attrs_pub ! ( ast_struct ! ($($t )*)); }; }
+macro_rules! __ra_macro_fixture150 {([$($attrs_pub : tt )*] enum $name : ident # no_visit $($rest : tt )* )=>( ast_enum ! ([$($attrs_pub )*] enum $name $($rest )*); ); ([$($attrs_pub : tt )*] enum $name : ident $($rest : tt )* )=>($($attrs_pub )* enum $name $($rest )* ); ($($t : tt )*)=>{ strip_attrs_pub ! ( ast_enum ! ($($t )*)); }; }
+macro_rules! __ra_macro_fixture151 {($(# [$enum_attr : meta ])* $pub : ident $enum : ident $name : ident #$tag : ident $body : tt $($remaining : tt )* )=>{ ast_enum ! ($(# [$enum_attr ])* $pub $enum $name #$tag $body ); ast_enum_of_structs_impl ! ($pub $enum $name $body $($remaining )*); }; ($(# [$enum_attr : meta ])* $pub : ident $enum : ident $name : ident $body : tt $($remaining : tt )* )=>{ ast_enum ! ($(# [$enum_attr ])* $pub $enum $name $body ); ast_enum_of_structs_impl ! ($pub $enum $name $body $($remaining )*); }; }
+macro_rules! __ra_macro_fixture152 {($ident : ident )=>{# [ allow ( non_camel_case_types )] pub struct $ident { pub span : $crate :: __private :: Span , }# [ doc ( hidden )]# [ allow ( dead_code , non_snake_case )] pub fn $ident < __S : $crate :: __private :: IntoSpans < [$crate :: __private :: Span ; 1 ]>> ( span : __S , )-> $ident {$ident { span : $crate :: __private :: IntoSpans :: into_spans ( span )[ 0 ], }} impl $crate :: __private :: Default for $ident { fn default ()-> Self {$ident { span : $crate :: __private :: Span :: call_site (), }}}$crate :: impl_parse_for_custom_keyword ! ($ident ); $crate :: impl_to_tokens_for_custom_keyword ! ($ident ); $crate :: impl_clone_for_custom_keyword ! ($ident ); $crate :: impl_extra_traits_for_custom_keyword ! ($ident ); }; }
+macro_rules! __ra_macro_fixture153 {($($expr_type : ty , $variant : ident , $msg : expr , )* )=>{$(# [ cfg ( all ( feature = "full" , feature = "printing" ))]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "parsing" )))] impl Parse for $expr_type { fn parse ( input : ParseStream )-> Result < Self > { let mut expr : Expr = input . parse ()?; loop { match expr { Expr ::$variant ( inner )=> return Ok ( inner ), Expr :: Group ( next )=> expr = * next . expr , _ => return Err ( Error :: new_spanned ( expr , $msg )), }}}})* }; }
+macro_rules! __ra_macro_fixture154 {($ty : ident )=>{# [ cfg ( feature = "clone-impls" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "clone-impls" )))] impl < 'a > Clone for $ty < 'a > { fn clone (& self )-> Self {$ty ( self . 0 )}}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl < 'a > Debug for $ty < 'a > { fn fmt (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . debug_tuple ( stringify ! ($ty )). field ( self . 0 ). finish ()}}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl < 'a > Eq for $ty < 'a > {}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl < 'a > PartialEq for $ty < 'a > { fn eq (& self , other : & Self )-> bool { self . 0 == other . 0 }}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl < 'a > Hash for $ty < 'a > { fn hash < H : Hasher > (& self , state : & mut H ){ self . 0 . hash ( state ); }}}; }
+macro_rules! __ra_macro_fixture155 {($ty : ident )=>{# [ cfg ( feature = "clone-impls" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "clone-impls" )))] impl Clone for $ty { fn clone (& self )-> Self {$ty { repr : self . repr . clone (), }}}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl PartialEq for $ty { fn eq (& self , other : & Self )-> bool { self . repr . token . to_string ()== other . repr . token . to_string ()}}# [ cfg ( feature = "extra-traits" )]# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "extra-traits" )))] impl Hash for $ty { fn hash < H > (& self , state : & mut H ) where H : Hasher , { self . repr . token . to_string (). hash ( state ); }}# [ cfg ( feature = "parsing" )]# [ doc ( hidden )]# [ allow ( non_snake_case )] pub fn $ty ( marker : lookahead :: TokenMarker )-> $ty { match marker {}}}; }
+macro_rules! __ra_macro_fixture156 {($name : ident / 1 )=>{ impl Deref for $name { type Target = WithSpan ; fn deref (& self )-> & Self :: Target { unsafe {&* ( self as * const Self as * const WithSpan )}}} impl DerefMut for $name { fn deref_mut (& mut self )-> & mut Self :: Target { unsafe {& mut * ( self as * mut Self as * mut WithSpan )}}}}; ($name : ident /$len : tt )=>{}; }
+macro_rules! __ra_macro_fixture157 {($($await_rule : tt )*)=>{# [ doc = " A type-macro that expands to the name of the Rust type representation of a" ]# [ doc = " given token." ]# [ doc = "" ]# [ doc = " See the [token module] documentation for details and examples." ]# [ doc = "" ]# [ doc = " [token module]: crate::token" ]# [ macro_export ] macro_rules ! Token {[ abstract ]=>{$crate :: token :: Abstract }; [ as ]=>{$crate :: token :: As }; [ async ]=>{$crate :: token :: Async }; [ auto ]=>{$crate :: token :: Auto }; $($await_rule =>{$crate :: token :: Await };)* [ become ]=>{$crate :: token :: Become }; [ box ]=>{$crate :: token :: Box }; [ break ]=>{$crate :: token :: Break }; [ const ]=>{$crate :: token :: Const }; [ continue ]=>{$crate :: token :: Continue }; [ crate ]=>{$crate :: token :: Crate }; [ default ]=>{$crate :: token :: Default }; [ do ]=>{$crate :: token :: Do }; [ dyn ]=>{$crate :: token :: Dyn }; [ else ]=>{$crate :: token :: Else }; [ enum ]=>{$crate :: token :: Enum }; [ extern ]=>{$crate :: token :: Extern }; [ final ]=>{$crate :: token :: Final }; [ fn ]=>{$crate :: token :: Fn }; [ for ]=>{$crate :: token :: For }; [ if ]=>{$crate :: token :: If }; [ impl ]=>{$crate :: token :: Impl }; [ in ]=>{$crate :: token :: In }; [ let ]=>{$crate :: token :: Let }; [ loop ]=>{$crate :: token :: Loop }; [ macro ]=>{$crate :: token :: Macro }; [ match ]=>{$crate :: token :: Match }; [ mod ]=>{$crate :: token :: Mod }; [ move ]=>{$crate :: token :: Move }; [ mut ]=>{$crate :: token :: Mut }; [ override ]=>{$crate :: token :: Override }; [ priv ]=>{$crate :: token :: Priv }; [ pub ]=>{$crate :: token :: Pub }; [ ref ]=>{$crate :: token :: Ref }; [ return ]=>{$crate :: token :: Return }; [ Self ]=>{$crate :: token :: SelfType }; [ self ]=>{$crate :: token :: SelfValue }; [ static ]=>{$crate :: token :: Static }; [ struct ]=>{$crate :: token :: Struct }; [ super ]=>{$crate :: token :: Super }; [ trait ]=>{$crate :: token :: Trait }; [ try ]=>{$crate :: token :: Try }; [ type ]=>{$crate :: token :: Type }; [ typeof ]=>{$crate :: token :: Typeof }; [ union ]=>{$crate :: token :: Union }; [ unsafe ]=>{$crate :: token :: Unsafe }; [ unsized ]=>{$crate :: token :: Unsized }; [ use ]=>{$crate :: token :: Use }; [ virtual ]=>{$crate :: token :: Virtual }; [ where ]=>{$crate :: token :: Where }; [ while ]=>{$crate :: token :: While }; [ yield ]=>{$crate :: token :: Yield }; [+]=>{$crate :: token :: Add }; [+=]=>{$crate :: token :: AddEq }; [&]=>{$crate :: token :: And }; [&&]=>{$crate :: token :: AndAnd }; [&=]=>{$crate :: token :: AndEq }; [@]=>{$crate :: token :: At }; [!]=>{$crate :: token :: Bang }; [^]=>{$crate :: token :: Caret }; [^=]=>{$crate :: token :: CaretEq }; [:]=>{$crate :: token :: Colon }; [::]=>{$crate :: token :: Colon2 }; [,]=>{$crate :: token :: Comma }; [/]=>{$crate :: token :: Div }; [/=]=>{$crate :: token :: DivEq }; [$]=>{$crate :: token :: Dollar }; [.]=>{$crate :: token :: Dot }; [..]=>{$crate :: token :: Dot2 }; [...]=>{$crate :: token :: Dot3 }; [..=]=>{$crate :: token :: DotDotEq }; [=]=>{$crate :: token :: Eq }; [==]=>{$crate :: token :: EqEq }; [>=]=>{$crate :: token :: Ge }; [>]=>{$crate :: token :: Gt }; [<=]=>{$crate :: token :: Le }; [<]=>{$crate :: token :: Lt }; [*=]=>{$crate :: token :: MulEq }; [!=]=>{$crate :: token :: Ne }; [|]=>{$crate :: token :: Or }; [|=]=>{$crate :: token :: OrEq }; [||]=>{$crate :: token :: OrOr }; [#]=>{$crate :: token :: Pound }; [?]=>{$crate :: token :: Question }; [->]=>{$crate :: token :: RArrow }; [<-]=>{$crate :: token :: LArrow }; [%]=>{$crate :: token :: Rem }; [%=]=>{$crate :: token :: RemEq }; [=>]=>{$crate :: token :: FatArrow }; [;]=>{$crate :: token :: Semi }; [<<]=>{$crate :: token :: Shl }; [<<=]=>{$crate :: token :: ShlEq }; [>>]=>{$crate :: token :: Shr }; [>>=]=>{$crate :: token :: ShrEq }; [*]=>{$crate :: token :: Star }; [-]=>{$crate :: token :: Sub }; [-=]=>{$crate :: token :: SubEq }; [~]=>{$crate :: token :: Tilde }; [_]=>{$crate :: token :: Underscore }; }}; }
+macro_rules! __ra_macro_fixture158 {($mac : ident ! ($(# [$m : meta ])* $pub : ident $($t : tt )*))=>{ check_keyword_matches ! ( pub $pub ); $mac ! ([$(# [$m ])* $pub ]$($t )*); }; }
+macro_rules! __ra_macro_fixture159 {($pub : ident $enum : ident $name : ident {$($(# [$variant_attr : meta ])* $variant : ident $(($($member : ident )::+))*, )* }$($remaining : tt )* )=>{ check_keyword_matches ! ( pub $pub ); check_keyword_matches ! ( enum $enum ); $($(ast_enum_from_struct ! ($name ::$variant , $($member )::+); )*)* # [ cfg ( feature = "printing" )] generate_to_tokens ! {$($remaining )* () tokens $name {$($variant $($($member )::+)*,)* }}}; }
+macro_rules! __ra_macro_fixture160 {($ident : ident )=>{ impl $crate :: token :: CustomToken for $ident { fn peek ( cursor : $crate :: buffer :: Cursor )-> $crate :: __private :: bool { if let Some (( ident , _rest ))= cursor . ident (){ ident == stringify ! ($ident )} else { false }} fn display ()-> & 'static $crate :: __private :: str { concat ! ( "`" , stringify ! ($ident ), "`" )}} impl $crate :: parse :: Parse for $ident { fn parse ( input : $crate :: parse :: ParseStream )-> $crate :: parse :: Result <$ident > { input . step (| cursor | { if let $crate :: __private :: Some (( ident , rest ))= cursor . ident (){ if ident == stringify ! ($ident ){ return $crate :: __private :: Ok (($ident { span : ident . span ()}, rest )); }}$crate :: __private :: Err ( cursor . error ( concat ! ( "expected `" , stringify ! ($ident ), "`" )))})}}}; }
+macro_rules! __ra_macro_fixture161 {($ident : ident )=>{ impl $crate :: __private :: ToTokens for $ident { fn to_tokens (& self , tokens : & mut $crate :: __private :: TokenStream2 ){ let ident = $crate :: Ident :: new ( stringify ! ($ident ), self . span ); $crate :: __private :: TokenStreamExt :: append ( tokens , ident ); }}}; }
+macro_rules! __ra_macro_fixture162 {($ident : ident )=>{ impl $crate :: __private :: Copy for $ident {} impl $crate :: __private :: Clone for $ident { fn clone (& self )-> Self {* self }}}; }
+macro_rules! __ra_macro_fixture163 {($ident : ident )=>{ impl $crate :: __private :: Debug for $ident { fn fmt (& self , f : & mut $crate :: __private :: Formatter )-> $crate :: __private :: fmt :: Result {$crate :: __private :: Formatter :: write_str ( f , concat ! ( "Keyword [" , stringify ! ($ident ), "]" ), )}} impl $crate :: __private :: Eq for $ident {} impl $crate :: __private :: PartialEq for $ident { fn eq (& self , _other : & Self )-> $crate :: __private :: bool { true }} impl $crate :: __private :: Hash for $ident { fn hash < __H : $crate :: __private :: Hasher > (& self , _state : & mut __H ){}}}; }
+macro_rules! __ra_macro_fixture164 {( struct struct )=>{}; ( enum enum )=>{}; ( pub pub )=>{}; }
+macro_rules! __ra_macro_fixture165 {($name : ident :: Verbatim , $member : ident )=>{}; ($name : ident ::$variant : ident , crate :: private )=>{}; ($name : ident ::$variant : ident , $member : ident )=>{ impl From <$member > for $name { fn from ( e : $member )-> $name {$name ::$variant ( e )}}}; }
+macro_rules! __ra_macro_fixture166 {( do_not_generate_to_tokens $($foo : tt )*)=>(); (($($arms : tt )*)$tokens : ident $name : ident {$variant : ident , $($next : tt )*})=>{ generate_to_tokens ! (($($arms )* $name ::$variant =>{})$tokens $name {$($next )* }); }; (($($arms : tt )*)$tokens : ident $name : ident {$variant : ident $member : ident , $($next : tt )*})=>{ generate_to_tokens ! (($($arms )* $name ::$variant ( _e )=> _e . to_tokens ($tokens ),)$tokens $name {$($next )* }); }; (($($arms : tt )*)$tokens : ident $name : ident {$variant : ident crate :: private , $($next : tt )*})=>{ generate_to_tokens ! (($($arms )* $name ::$variant (_)=> unreachable ! (),)$tokens $name {$($next )* }); }; (($($arms : tt )*)$tokens : ident $name : ident {})=>{# [ cfg_attr ( doc_cfg , doc ( cfg ( feature = "printing" )))] impl :: quote :: ToTokens for $name { fn to_tokens (& self , $tokens : & mut :: proc_macro2 :: TokenStream ){ match self {$($arms )* }}}}; }
+macro_rules! __ra_macro_fixture167 {($(# [$attr : meta ])* static ref $N : ident : $T : ty = $e : expr ; $($t : tt )*)=>{ __lazy_static_internal ! ($(# [$attr ])* () static ref $N : $T = $e ; $($t )*); }; ($(# [$attr : meta ])* pub static ref $N : ident : $T : ty = $e : expr ; $($t : tt )*)=>{ __lazy_static_internal ! ($(# [$attr ])* ( pub ) static ref $N : $T = $e ; $($t )*); }; ($(# [$attr : meta ])* pub ($($vis : tt )+) static ref $N : ident : $T : ty = $e : expr ; $($t : tt )*)=>{ __lazy_static_internal ! ($(# [$attr ])* ( pub ($($vis )+)) static ref $N : $T = $e ; $($t )*); }; ()=>()}
+macro_rules! __ra_macro_fixture168 {($($record : ident ($($whatever : tt )+ )),+ )=>{$(impl_value ! {$record ($($whatever )+ )})+ }}
+macro_rules! __ra_macro_fixture169 {($($len : tt ),+ )=>{$(impl < 'a > private :: ValidLen < 'a > for [(& 'a Field , Option <& 'a ( dyn Value + 'a )>); $len ]{})+ }}
+macro_rules! __ra_macro_fixture170 {($(# [$attr : meta ])* ($($vis : tt )*) static ref $N : ident : $T : ty = $e : expr ; $($t : tt )*)=>{ __lazy_static_internal ! (@ MAKE TY , $(# [$attr ])*, ($($vis )*), $N ); __lazy_static_internal ! (@ TAIL , $N : $T = $e ); lazy_static ! ($($t )*); }; (@ TAIL , $N : ident : $T : ty = $e : expr )=>{ impl $crate :: __Deref for $N { type Target = $T ; fn deref (& self )-> &$T {# [ inline ( always )] fn __static_ref_initialize ()-> $T {$e }# [ inline ( always )] fn __stability ()-> & 'static $T { __lazy_static_create ! ( LAZY , $T ); LAZY . get ( __static_ref_initialize )} __stability ()}} impl $crate :: LazyStatic for $N { fn initialize ( lazy : & Self ){ let _ = &** lazy ; }}}; (@ MAKE TY , $(# [$attr : meta ])*, ($($vis : tt )*), $N : ident )=>{# [ allow ( missing_copy_implementations )]# [ allow ( non_camel_case_types )]# [ allow ( dead_code )]$(# [$attr ])* $($vis )* struct $N { __private_field : ()}# [ doc ( hidden )]$($vis )* static $N : $N = $N { __private_field : ()}; }; ()=>()}
+macro_rules! __ra_macro_fixture171 {($record : ident ($($value_ty : tt ),+ ))=>{$(impl_one_value ! ($value_ty , | this : $value_ty | this , $record ); )+ }; ($record : ident ($($value_ty : tt ),+ as $as_ty : ty ))=>{$(impl_one_value ! ($value_ty , | this : $value_ty | this as $as_ty , $record ); )+ }; }
+macro_rules! __ra_macro_fixture172 {( bool , $op : expr , $record : ident )=>{ impl_one_value ! ( normal , bool , $op , $record ); }; ($value_ty : tt , $op : expr , $record : ident )=>{ impl_one_value ! ( normal , $value_ty , $op , $record ); impl_one_value ! ( nonzero , $value_ty , $op , $record ); }; ( normal , $value_ty : tt , $op : expr , $record : ident )=>{ impl $crate :: sealed :: Sealed for $value_ty {} impl $crate :: field :: Value for $value_ty { fn record (& self , key : &$crate :: field :: Field , visitor : & mut dyn $crate :: field :: Visit ){ visitor .$record ( key , $op (* self ))}}}; ( nonzero , $value_ty : tt , $op : expr , $record : ident )=>{# [ allow ( clippy :: useless_attribute , unused )] use num ::*; impl $crate :: sealed :: Sealed for ty_to_nonzero ! ($value_ty ){} impl $crate :: field :: Value for ty_to_nonzero ! ($value_ty ){ fn record (& self , key : &$crate :: field :: Field , visitor : & mut dyn $crate :: field :: Visit ){ visitor .$record ( key , $op ( self . get ()))}}}; }
+macro_rules! __ra_macro_fixture173 {($(# [ doc $($doc : tt )*])* # [ project = $proj_mut_ident : ident ]# [ project_ref = $proj_ref_ident : ident ]# [ project_replace = $proj_replace_ident : ident ]$($tt : tt )* )=>{$crate :: __pin_project_internal ! {[$proj_mut_ident ][$proj_ref_ident ][$proj_replace_ident ]$(# [ doc $($doc )*])* $($tt )* }}; ($(# [ doc $($doc : tt )*])* # [ project = $proj_mut_ident : ident ]# [ project_ref = $proj_ref_ident : ident ]$($tt : tt )* )=>{$crate :: __pin_project_internal ! {[$proj_mut_ident ][$proj_ref_ident ][]$(# [ doc $($doc )*])* $($tt )* }}; ($(# [ doc $($doc : tt )*])* # [ project = $proj_mut_ident : ident ]# [ project_replace = $proj_replace_ident : ident ]$($tt : tt )* )=>{$crate :: __pin_project_internal ! {[$proj_mut_ident ][][$proj_replace_ident ]$(# [ doc $($doc )*])* $($tt )* }}; ($(# [ doc $($doc : tt )*])* # [ project_ref = $proj_ref_ident : ident ]# [ project_replace = $proj_replace_ident : ident ]$($tt : tt )* )=>{$crate :: __pin_project_internal ! {[][$proj_ref_ident ][$proj_replace_ident ]$(# [ doc $($doc )*])* $($tt )* }}; ($(# [ doc $($doc : tt )*])* # [ project = $proj_mut_ident : ident ]$($tt : tt )* )=>{$crate :: __pin_project_internal ! {[$proj_mut_ident ][][]$(# [ doc $($doc )*])* $($tt )* }}; ($(# [ doc $($doc : tt )*])* # [ project_ref = $proj_ref_ident : ident ]$($tt : tt )* )=>{$crate :: __pin_project_internal ! {[][$proj_ref_ident ][]$(# [ doc $($doc )*])* $($tt )* }}; ($(# [ doc $($doc : tt )*])* # [ project_replace = $proj_replace_ident : ident ]$($tt : tt )* )=>{$crate :: __pin_project_internal ! {[][][$proj_replace_ident ]$(# [ doc $($doc )*])* $($tt )* }}; ($($tt : tt )* )=>{$crate :: __pin_project_internal ! {[][][]$($tt )* }}; }
+macro_rules! __ra_macro_fixture174 {(@ struct => internal ; [$($proj_mut_ident : ident )?][$($proj_ref_ident : ident )?][$($proj_replace_ident : ident )?][$proj_vis : vis ][$(# [$attrs : meta ])* $vis : vis struct $ident : ident ][$($def_generics : tt )*][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )*)?]{$($(# [$pin : ident ])? $field_vis : vis $field : ident : $field_ty : ty ),+ })=>{$(# [$attrs ])* $vis struct $ident $($def_generics )* $(where $($where_clause )*)? {$($field_vis $field : $field_ty ),+ }$crate :: __pin_project_internal ! {@ struct => make_proj_ty => named ; [$proj_vis ][$($proj_mut_ident )?][ make_proj_field_mut ][$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]{$($(# [$pin ])? $field_vis $field : $field_ty ),+ }}$crate :: __pin_project_internal ! {@ struct => make_proj_ty => named ; [$proj_vis ][$($proj_ref_ident )?][ make_proj_field_ref ][$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]{$($(# [$pin ])? $field_vis $field : $field_ty ),+ }}$crate :: __pin_project_internal ! {@ struct => make_proj_replace_ty => named ; [$proj_vis ][$($proj_replace_ident )?][ make_proj_field_replace ][$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]{$($(# [$pin ])? $field_vis $field : $field_ty ),+ }}# [ allow ( explicit_outlives_requirements )]# [ allow ( single_use_lifetimes )]# [ allow ( clippy :: unknown_clippy_lints )]# [ allow ( clippy :: redundant_pub_crate )]# [ allow ( clippy :: used_underscore_binding )] const _: ()= {$crate :: __pin_project_internal ! {@ struct => make_proj_ty => unnamed ; [$proj_vis ][$($proj_mut_ident )?][ Projection ][ make_proj_field_mut ][$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]{$($(# [$pin ])? $field_vis $field : $field_ty ),+ }}$crate :: __pin_project_internal ! {@ struct => make_proj_ty => unnamed ; [$proj_vis ][$($proj_ref_ident )?][ ProjectionRef ][ make_proj_field_ref ][$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]{$($(# [$pin ])? $field_vis $field : $field_ty ),+ }}$crate :: __pin_project_internal ! {@ struct => make_proj_replace_ty => unnamed ; [$proj_vis ][$($proj_replace_ident )?][ ProjectionReplace ][ make_proj_field_replace ][$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]{$($(# [$pin ])? $field_vis $field : $field_ty ),+ }} impl <$($impl_generics )*> $ident <$($ty_generics )*> $(where $($where_clause )*)? {$crate :: __pin_project_internal ! {@ struct => make_proj_method ; [$proj_vis ][$($proj_mut_ident )?][ Projection ][ project get_unchecked_mut mut ][$($ty_generics )*]{$($(# [$pin ])? $field_vis $field ),+ }}$crate :: __pin_project_internal ! {@ struct => make_proj_method ; [$proj_vis ][$($proj_ref_ident )?][ ProjectionRef ][ project_ref get_ref ][$($ty_generics )*]{$($(# [$pin ])? $field_vis $field ),+ }}$crate :: __pin_project_internal ! {@ struct => make_proj_replace_method ; [$proj_vis ][$($proj_replace_ident )?][ ProjectionReplace ][$($ty_generics )*]{$($(# [$pin ])? $field_vis $field ),+ }}}$crate :: __pin_project_internal ! {@ make_unpin_impl ; [$vis $ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]$($field : $crate :: __pin_project_internal ! (@ make_unpin_bound ; $(# [$pin ])? $field_ty )),+ }$crate :: __pin_project_internal ! {@ make_drop_impl ; [$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]}# [ forbid ( safe_packed_borrows )] fn __assert_not_repr_packed <$($impl_generics )*> ( this : &$ident <$($ty_generics )*>)$(where $($where_clause )*)? {$(let _ = & this .$field ; )+ }}; }; (@ enum => internal ; [$($proj_mut_ident : ident )?][$($proj_ref_ident : ident )?][$($proj_replace_ident : ident )?][$proj_vis : vis ][$(# [$attrs : meta ])* $vis : vis enum $ident : ident ][$($def_generics : tt )*][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )*)?]{$($(# [$variant_attrs : meta ])* $variant : ident $({$($(# [$pin : ident ])? $field : ident : $field_ty : ty ),+ })? ),+ })=>{$(# [$attrs ])* $vis enum $ident $($def_generics )* $(where $($where_clause )*)? {$($(# [$variant_attrs ])* $variant $({$($field : $field_ty ),+ })? ),+ }$crate :: __pin_project_internal ! {@ enum => make_proj_ty ; [$proj_vis ][$($proj_mut_ident )?][ make_proj_field_mut ][$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]{$($variant $({$($(# [$pin ])? $field : $field_ty ),+ })? ),+ }}$crate :: __pin_project_internal ! {@ enum => make_proj_ty ; [$proj_vis ][$($proj_ref_ident )?][ make_proj_field_ref ][$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]{$($variant $({$($(# [$pin ])? $field : $field_ty ),+ })? ),+ }}$crate :: __pin_project_internal ! {@ enum => make_proj_replace_ty ; [$proj_vis ][$($proj_replace_ident )?][ make_proj_field_replace ][$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]{$($variant $({$($(# [$pin ])? $field : $field_ty ),+ })? ),+ }}# [ allow ( single_use_lifetimes )]# [ allow ( clippy :: unknown_clippy_lints )]# [ allow ( clippy :: used_underscore_binding )] const _: ()= { impl <$($impl_generics )*> $ident <$($ty_generics )*> $(where $($where_clause )*)? {$crate :: __pin_project_internal ! {@ enum => make_proj_method ; [$proj_vis ][$($proj_mut_ident )?][ project get_unchecked_mut mut ][$($ty_generics )*]{$($variant $({$($(# [$pin ])? $field ),+ })? ),+ }}$crate :: __pin_project_internal ! {@ enum => make_proj_method ; [$proj_vis ][$($proj_ref_ident )?][ project_ref get_ref ][$($ty_generics )*]{$($variant $({$($(# [$pin ])? $field ),+ })? ),+ }}$crate :: __pin_project_internal ! {@ enum => make_proj_replace_method ; [$proj_vis ][$($proj_replace_ident )?][$($ty_generics )*]{$($variant $({$($(# [$pin ])? $field ),+ })? ),+ }}}$crate :: __pin_project_internal ! {@ make_unpin_impl ; [$vis $ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]$($variant : ($($($crate :: __pin_project_internal ! (@ make_unpin_bound ; $(# [$pin ])? $field_ty )),+ )?)),+ }$crate :: __pin_project_internal ! {@ make_drop_impl ; [$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]}}; }; (@ struct => make_proj_ty => unnamed ; [$proj_vis : vis ][$_proj_ty_ident : ident ][$proj_ty_ident : ident ][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]$($field : tt )* )=>{}; (@ struct => make_proj_ty => unnamed ; [$proj_vis : vis ][][$proj_ty_ident : ident ][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]$($field : tt )* )=>{$crate :: __pin_project_internal ! {@ struct => make_proj_ty => named ; [$proj_vis ][$proj_ty_ident ][$make_proj_field ][$ident ][$($impl_generics )*][$($ty_generics )*][$(where $($where_clause )*)?]$($field )* }}; (@ struct => make_proj_ty => named ; [$proj_vis : vis ][$proj_ty_ident : ident ][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]{$($(# [$pin : ident ])? $field_vis : vis $field : ident : $field_ty : ty ),+ })=>{# [ allow ( dead_code )]# [ allow ( single_use_lifetimes )]# [ allow ( clippy :: unknown_clippy_lints )]# [ allow ( clippy :: mut_mut )]# [ allow ( clippy :: redundant_pub_crate )]# [ allow ( clippy :: ref_option_ref )]# [ allow ( clippy :: type_repetition_in_bounds )]$proj_vis struct $proj_ty_ident < '__pin , $($impl_generics )*> where $ident <$($ty_generics )*>: '__pin $(, $($where_clause )*)? {$($field_vis $field : $crate :: __pin_project_internal ! (@$make_proj_field ; $(# [$pin ])? $field_ty )),+ }}; (@ struct => make_proj_ty => named ; [$proj_vis : vis ][][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]$($field : tt )* )=>{}; (@ struct => make_proj_replace_ty => unnamed ; [$proj_vis : vis ][$_proj_ty_ident : ident ][$proj_ty_ident : ident ][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]$($field : tt )* )=>{}; (@ struct => make_proj_replace_ty => unnamed ; [$proj_vis : vis ][][$proj_ty_ident : ident ][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]$($field : tt )* )=>{}; (@ struct => make_proj_replace_ty => named ; [$proj_vis : vis ][$proj_ty_ident : ident ][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]{$($(# [$pin : ident ])? $field_vis : vis $field : ident : $field_ty : ty ),+ })=>{# [ allow ( dead_code )]# [ allow ( single_use_lifetimes )]# [ allow ( clippy :: mut_mut )]# [ allow ( clippy :: redundant_pub_crate )]# [ allow ( clippy :: type_repetition_in_bounds )]$proj_vis struct $proj_ty_ident <$($impl_generics )*> where $($($where_clause )*)? {$($field_vis $field : $crate :: __pin_project_internal ! (@$make_proj_field ; $(# [$pin ])? $field_ty )),+ }}; (@ struct => make_proj_replace_ty => named ; [$proj_vis : vis ][][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]$($field : tt )* )=>{}; (@ enum => make_proj_ty ; [$proj_vis : vis ][$proj_ty_ident : ident ][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]{$($variant : ident $({$($(# [$pin : ident ])? $field : ident : $field_ty : ty ),+ })? ),+ })=>{# [ allow ( dead_code )]# [ allow ( single_use_lifetimes )]# [ allow ( clippy :: unknown_clippy_lints )]# [ allow ( clippy :: mut_mut )]# [ allow ( clippy :: redundant_pub_crate )]# [ allow ( clippy :: ref_option_ref )]# [ allow ( clippy :: type_repetition_in_bounds )]$proj_vis enum $proj_ty_ident < '__pin , $($impl_generics )*> where $ident <$($ty_generics )*>: '__pin $(, $($where_clause )*)? {$($variant $({$($field : $crate :: __pin_project_internal ! (@$make_proj_field ; $(# [$pin ])? $field_ty )),+ })? ),+ }}; (@ enum => make_proj_ty ; [$proj_vis : vis ][][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]$($variant : tt )* )=>{}; (@ enum => make_proj_replace_ty ; [$proj_vis : vis ][$proj_ty_ident : ident ][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]{$($variant : ident $({$($(# [$pin : ident ])? $field : ident : $field_ty : ty ),+ })? ),+ })=>{# [ allow ( dead_code )]# [ allow ( single_use_lifetimes )]# [ allow ( clippy :: mut_mut )]# [ allow ( clippy :: redundant_pub_crate )]# [ allow ( clippy :: type_repetition_in_bounds )]$proj_vis enum $proj_ty_ident <$($impl_generics )*> where $($($where_clause )*)? {$($variant $({$($field : $crate :: __pin_project_internal ! (@$make_proj_field ; $(# [$pin ])? $field_ty )),+ })? ),+ }}; (@ enum => make_proj_replace_ty ; [$proj_vis : vis ][][$make_proj_field : ident ][$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]$($variant : tt )* )=>{}; (@ make_proj_replace_block ; [$($proj_path : tt )+]{$($(# [$pin : ident ])? $field_vis : vis $field : ident ),+ })=>{ let result = $($proj_path )* {$($field : $crate :: __pin_project_internal ! (@ make_replace_field_proj ; $(# [$pin ])? $field )),+ }; {($($crate :: __pin_project_internal ! (@ make_unsafe_drop_in_place_guard ; $(# [$pin ])? $field ), )* ); } result }; (@ make_proj_replace_block ; [$($proj_path : tt )+])=>{$($proj_path )* }; (@ struct => make_proj_method ; [$proj_vis : vis ][$proj_ty_ident : ident ][$_proj_ty_ident : ident ][$method_ident : ident $get_method : ident $($mut : ident )?][$($ty_generics : tt )*]{$($(# [$pin : ident ])? $field_vis : vis $field : ident ),+ })=>{$proj_vis fn $method_ident < '__pin > ( self : $crate :: __private :: Pin <& '__pin $($mut )? Self >, )-> $proj_ty_ident < '__pin , $($ty_generics )*> { unsafe { let Self {$($field ),* }= self .$get_method (); $proj_ty_ident {$($field : $crate :: __pin_project_internal ! (@ make_unsafe_field_proj ; $(# [$pin ])? $field )),+ }}}}; (@ struct => make_proj_method ; [$proj_vis : vis ][][$proj_ty_ident : ident ][$method_ident : ident $get_method : ident $($mut : ident )?][$($ty_generics : tt )*]$($variant : tt )* )=>{$crate :: __pin_project_internal ! {@ struct => make_proj_method ; [$proj_vis ][$proj_ty_ident ][$proj_ty_ident ][$method_ident $get_method $($mut )?][$($ty_generics )*]$($variant )* }}; (@ struct => make_proj_replace_method ; [$proj_vis : vis ][$proj_ty_ident : ident ][$_proj_ty_ident : ident ][$($ty_generics : tt )*]{$($(# [$pin : ident ])? $field_vis : vis $field : ident ),+ })=>{$proj_vis fn project_replace ( self : $crate :: __private :: Pin <& mut Self >, replacement : Self , )-> $proj_ty_ident <$($ty_generics )*> { unsafe { let __self_ptr : * mut Self = self . get_unchecked_mut (); let __guard = $crate :: __private :: UnsafeOverwriteGuard { target : __self_ptr , value : $crate :: __private :: ManuallyDrop :: new ( replacement ), }; let Self {$($field ),* }= & mut * __self_ptr ; $crate :: __pin_project_internal ! {@ make_proj_replace_block ; [$proj_ty_ident ]{$($(# [$pin ])? $field ),+ }}}}}; (@ struct => make_proj_replace_method ; [$proj_vis : vis ][][$proj_ty_ident : ident ][$($ty_generics : tt )*]$($variant : tt )* )=>{}; (@ enum => make_proj_method ; [$proj_vis : vis ][$proj_ty_ident : ident ][$method_ident : ident $get_method : ident $($mut : ident )?][$($ty_generics : tt )*]{$($variant : ident $({$($(# [$pin : ident ])? $field : ident ),+ })? ),+ })=>{$proj_vis fn $method_ident < '__pin > ( self : $crate :: __private :: Pin <& '__pin $($mut )? Self >, )-> $proj_ty_ident < '__pin , $($ty_generics )*> { unsafe { match self .$get_method (){$(Self ::$variant $({$($field ),+ })? =>{$proj_ty_ident ::$variant $({$($field : $crate :: __pin_project_internal ! (@ make_unsafe_field_proj ; $(# [$pin ])? $field )),+ })? }),+ }}}}; (@ enum => make_proj_method ; [$proj_vis : vis ][][$method_ident : ident $get_method : ident $($mut : ident )?][$($ty_generics : tt )*]$($variant : tt )* )=>{}; (@ enum => make_proj_replace_method ; [$proj_vis : vis ][$proj_ty_ident : ident ][$($ty_generics : tt )*]{$($variant : ident $({$($(# [$pin : ident ])? $field : ident ),+ })? ),+ })=>{$proj_vis fn project_replace ( self : $crate :: __private :: Pin <& mut Self >, replacement : Self , )-> $proj_ty_ident <$($ty_generics )*> { unsafe { let __self_ptr : * mut Self = self . get_unchecked_mut (); let __guard = $crate :: __private :: UnsafeOverwriteGuard { target : __self_ptr , value : $crate :: __private :: ManuallyDrop :: new ( replacement ), }; match & mut * __self_ptr {$(Self ::$variant $({$($field ),+ })? =>{$crate :: __pin_project_internal ! {@ make_proj_replace_block ; [$proj_ty_ident :: $variant ]$({$($(# [$pin ])? $field ),+ })? }}),+ }}}}; (@ enum => make_proj_replace_method ; [$proj_vis : vis ][][$($ty_generics : tt )*]$($variant : tt )* )=>{}; (@ make_unpin_impl ; [$vis : vis $ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?]$($field : tt )* )=>{# [ allow ( non_snake_case )]$vis struct __Origin < '__pin , $($impl_generics )*> $(where $($where_clause )*)? { __dummy_lifetime : $crate :: __private :: PhantomData <& '__pin ()>, $($field )* } impl < '__pin , $($impl_generics )*> $crate :: __private :: Unpin for $ident <$($ty_generics )*> where __Origin < '__pin , $($ty_generics )*>: $crate :: __private :: Unpin $(, $($where_clause )*)? {}}; (@ make_drop_impl ; [$ident : ident ][$($impl_generics : tt )*][$($ty_generics : tt )*][$(where $($where_clause : tt )* )?])=>{ trait MustNotImplDrop {}# [ allow ( clippy :: drop_bounds , drop_bounds )] impl < T : $crate :: __private :: Drop > MustNotImplDrop for T {} impl <$($impl_generics )*> MustNotImplDrop for $ident <$($ty_generics )*> $(where $($where_clause )*)? {}}; (@ make_unpin_bound ; # [ pin ]$field_ty : ty )=>{$field_ty }; (@ make_unpin_bound ; $field_ty : ty )=>{$crate :: __private :: AlwaysUnpin <$field_ty > }; (@ make_unsafe_field_proj ; # [ pin ]$field : ident )=>{$crate :: __private :: Pin :: new_unchecked ($field )}; (@ make_unsafe_field_proj ; $field : ident )=>{$field }; (@ make_replace_field_proj ; # [ pin ]$field : ident )=>{$crate :: __private :: PhantomData }; (@ make_replace_field_proj ; $field : ident )=>{$crate :: __private :: ptr :: read ($field )}; (@ make_unsafe_drop_in_place_guard ; # [ pin ]$field : ident )=>{$crate :: __private :: UnsafeDropInPlaceGuard ($field )}; (@ make_unsafe_drop_in_place_guard ; $field : ident )=>{()}; (@ make_proj_field_mut ; # [ pin ]$field_ty : ty )=>{$crate :: __private :: Pin <& '__pin mut ($field_ty )> }; (@ make_proj_field_mut ; $field_ty : ty )=>{& '__pin mut ($field_ty )}; (@ make_proj_field_ref ; # [ pin ]$field_ty : ty )=>{$crate :: __private :: Pin <& '__pin ($field_ty )> }; (@ make_proj_field_ref ; $field_ty : ty )=>{& '__pin ($field_ty )}; (@ make_proj_field_replace ; # [ pin ]$field_ty : ty )=>{$crate :: __private :: PhantomData <$field_ty > }; (@ make_proj_field_replace ; $field_ty : ty )=>{$field_ty }; ([$($proj_mut_ident : ident )?][$($proj_ref_ident : ident )?][$($proj_replace_ident : ident )?]$(# [$attrs : meta ])* pub struct $ident : ident $(< $($lifetime : lifetime $(: $lifetime_bound : lifetime )? ),* $(,)? $($generics : ident $(: $generics_bound : path )? $(: ?$generics_unsized_bound : path )? $(: $generics_lifetime_bound : lifetime )? $(= $generics_default : ty )? ),* $(,)? >)? $(where $($where_clause_ty : ty $(: $where_clause_bound : path )? $(: ?$where_clause_unsized_bound : path )? $(: $where_clause_lifetime_bound : lifetime )? ),* $(,)? )? {$($(# [$pin : ident ])? $field_vis : vis $field : ident : $field_ty : ty ),+ $(,)? })=>{$crate :: __pin_project_internal ! {@ struct => internal ; [$($proj_mut_ident )?][$($proj_ref_ident )?][$($proj_replace_ident )?][ pub ( crate )][$(# [$attrs ])* pub struct $ident ][$(< $($lifetime $(: $lifetime_bound )? ,)* $($generics $(: $generics_bound )? $(: ?$generics_unsized_bound )? $(: $generics_lifetime_bound )? $(= $generics_default )? ),* >)?][$($($lifetime $(: $lifetime_bound )? ,)* $($generics $(: $generics_bound )? $(: ?$generics_unsized_bound )? $(: $generics_lifetime_bound )? ),* )?][$($($lifetime ,)* $($generics ),* )?][$(where $($where_clause_ty $(: $where_clause_bound )? $(: ?$where_clause_unsized_bound )? $(: $where_clause_lifetime_bound )? ),* )?]{$($(# [$pin ])? $field_vis $field : $field_ty ),+ }}}; ([$($proj_mut_ident : ident )?][$($proj_ref_ident : ident )?][$($proj_replace_ident : ident )?]$(# [$attrs : meta ])* $vis : vis struct $ident : ident $(< $($lifetime : lifetime $(: $lifetime_bound : lifetime )? ),* $(,)? $($generics : ident $(: $generics_bound : path )? $(: ?$generics_unsized_bound : path )? $(: $generics_lifetime_bound : lifetime )? $(= $generics_default : ty )? ),* $(,)? >)? $(where $($where_clause_ty : ty $(: $where_clause_bound : path )? $(: ?$where_clause_unsized_bound : path )? $(: $where_clause_lifetime_bound : lifetime )? ),* $(,)? )? {$($(# [$pin : ident ])? $field_vis : vis $field : ident : $field_ty : ty ),+ $(,)? })=>{$crate :: __pin_project_internal ! {@ struct => internal ; [$($proj_mut_ident )?][$($proj_ref_ident )?][$($proj_replace_ident )?][$vis ][$(# [$attrs ])* $vis struct $ident ][$(< $($lifetime $(: $lifetime_bound )? ,)* $($generics $(: $generics_bound )? $(: ?$generics_unsized_bound )? $(: $generics_lifetime_bound )? $(= $generics_default )? ),* >)?][$($($lifetime $(: $lifetime_bound )? ,)* $($generics $(: $generics_bound )? $(: ?$generics_unsized_bound )? $(: $generics_lifetime_bound )? ),* )?][$($($lifetime ,)* $($generics ),* )?][$(where $($where_clause_ty $(: $where_clause_bound )? $(: ?$where_clause_unsized_bound )? $(: $where_clause_lifetime_bound )? ),* )?]{$($(# [$pin ])? $field_vis $field : $field_ty ),+ }}}; ([$($proj_mut_ident : ident )?][$($proj_ref_ident : ident )?][$($proj_replace_ident : ident )?]$(# [$attrs : meta ])* pub enum $ident : ident $(< $($lifetime : lifetime $(: $lifetime_bound : lifetime )? ),* $(,)? $($generics : ident $(: $generics_bound : path )? $(: ?$generics_unsized_bound : path )? $(: $generics_lifetime_bound : lifetime )? $(= $generics_default : ty )? ),* $(,)? >)? $(where $($where_clause_ty : ty $(: $where_clause_bound : path )? $(: ?$where_clause_unsized_bound : path )? $(: $where_clause_lifetime_bound : lifetime )? ),* $(,)? )? {$($(# [$variant_attrs : meta ])* $variant : ident $({$($(# [$pin : ident ])? $field : ident : $field_ty : ty ),+ $(,)? })? ),+ $(,)? })=>{$crate :: __pin_project_internal ! {@ enum => internal ; [$($proj_mut_ident )?][$($proj_ref_ident )?][$($proj_replace_ident )?][ pub ( crate )][$(# [$attrs ])* pub enum $ident ][$(< $($lifetime $(: $lifetime_bound )? ,)* $($generics $(: $generics_bound )? $(: ?$generics_unsized_bound )? $(: $generics_lifetime_bound )? $(= $generics_default )? ),* >)?][$($($lifetime $(: $lifetime_bound )? ,)* $($generics $(: $generics_bound )? $(: ?$generics_unsized_bound )? $(: $generics_lifetime_bound )? ),* )?][$($($lifetime ,)* $($generics ),* )?][$(where $($where_clause_ty $(: $where_clause_bound )? $(: ?$where_clause_unsized_bound )? $(: $where_clause_lifetime_bound )? ),* )?]{$($(# [$variant_attrs ])* $variant $({$($(# [$pin ])? $field : $field_ty ),+ })? ),+ }}}; ([$($proj_mut_ident : ident )?][$($proj_ref_ident : ident )?][$($proj_replace_ident : ident )?]$(# [$attrs : meta ])* $vis : vis enum $ident : ident $(< $($lifetime : lifetime $(: $lifetime_bound : lifetime )? ),* $(,)? $($generics : ident $(: $generics_bound : path )? $(: ?$generics_unsized_bound : path )? $(: $generics_lifetime_bound : lifetime )? $(= $generics_default : ty )? ),* $(,)? >)? $(where $($where_clause_ty : ty $(: $where_clause_bound : path )? $(: ?$where_clause_unsized_bound : path )? $(: $where_clause_lifetime_bound : lifetime )? ),* $(,)? )? {$($(# [$variant_attrs : meta ])* $variant : ident $({$($(# [$pin : ident ])? $field : ident : $field_ty : ty ),+ $(,)? })? ),+ $(,)? })=>{$crate :: __pin_project_internal ! {@ enum => internal ; [$($proj_mut_ident )?][$($proj_ref_ident )?][$($proj_replace_ident )?][$vis ][$(# [$attrs ])* $vis enum $ident ][$(< $($lifetime $(: $lifetime_bound )? ,)* $($generics $(: $generics_bound )? $(: ?$generics_unsized_bound )? $(: $generics_lifetime_bound )? $(= $generics_default )? ),* >)?][$($($lifetime $(: $lifetime_bound )? ,)* $($generics $(: $generics_bound )? $(: ?$generics_unsized_bound )? $(: $generics_lifetime_bound )? ),* )?][$($($lifetime ,)* $($generics ),* )?][$(where $($where_clause_ty $(: $where_clause_bound )? $(: ?$where_clause_unsized_bound )? $(: $where_clause_lifetime_bound )? ),* )?]{$($(# [$variant_attrs ])* $variant $({$($(# [$pin ])? $field : $field_ty ),+ })? ),+ }}}; }
+macro_rules! __ra_macro_fixture175 {($t : ty , $example : tt )=>{ impl AtomicCell <$t > {# [ doc = " Increments the current value by `val` and returns the previous value." ]# [ doc = "" ]# [ doc = " The addition wraps on overflow." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use crossbeam_utils::atomic::AtomicCell;" ]# [ doc = "" ]# [ doc = $example ]# [ doc = "" ]# [ doc = " assert_eq!(a.fetch_add(3), 7);" ]# [ doc = " assert_eq!(a.load(), 10);" ]# [ doc = " ```" ]# [ inline ] pub fn fetch_add (& self , val : $t )-> $t { if can_transmute ::<$t , atomic :: AtomicUsize > (){ let a = unsafe {&* ( self . value . get () as * const atomic :: AtomicUsize )}; a . fetch_add ( val as usize , Ordering :: AcqRel ) as $t } else { let _guard = lock ( self . value . get () as usize ). write (); let value = unsafe {& mut * ( self . value . get ())}; let old = * value ; * value = value . wrapping_add ( val ); old }}# [ doc = " Decrements the current value by `val` and returns the previous value." ]# [ doc = "" ]# [ doc = " The subtraction wraps on overflow." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use crossbeam_utils::atomic::AtomicCell;" ]# [ doc = "" ]# [ doc = $example ]# [ doc = "" ]# [ doc = " assert_eq!(a.fetch_sub(3), 7);" ]# [ doc = " assert_eq!(a.load(), 4);" ]# [ doc = " ```" ]# [ inline ] pub fn fetch_sub (& self , val : $t )-> $t { if can_transmute ::<$t , atomic :: AtomicUsize > (){ let a = unsafe {&* ( self . value . get () as * const atomic :: AtomicUsize )}; a . fetch_sub ( val as usize , Ordering :: AcqRel ) as $t } else { let _guard = lock ( self . value . get () as usize ). write (); let value = unsafe {& mut * ( self . value . get ())}; let old = * value ; * value = value . wrapping_sub ( val ); old }}# [ doc = " Applies bitwise \\\"and\\\" to the current value and returns the previous value." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use crossbeam_utils::atomic::AtomicCell;" ]# [ doc = "" ]# [ doc = $example ]# [ doc = "" ]# [ doc = " assert_eq!(a.fetch_and(3), 7);" ]# [ doc = " assert_eq!(a.load(), 3);" ]# [ doc = " ```" ]# [ inline ] pub fn fetch_and (& self , val : $t )-> $t { if can_transmute ::<$t , atomic :: AtomicUsize > (){ let a = unsafe {&* ( self . value . get () as * const atomic :: AtomicUsize )}; a . fetch_and ( val as usize , Ordering :: AcqRel ) as $t } else { let _guard = lock ( self . value . get () as usize ). write (); let value = unsafe {& mut * ( self . value . get ())}; let old = * value ; * value &= val ; old }}# [ doc = " Applies bitwise \\\"or\\\" to the current value and returns the previous value." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use crossbeam_utils::atomic::AtomicCell;" ]# [ doc = "" ]# [ doc = $example ]# [ doc = "" ]# [ doc = " assert_eq!(a.fetch_or(16), 7);" ]# [ doc = " assert_eq!(a.load(), 23);" ]# [ doc = " ```" ]# [ inline ] pub fn fetch_or (& self , val : $t )-> $t { if can_transmute ::<$t , atomic :: AtomicUsize > (){ let a = unsafe {&* ( self . value . get () as * const atomic :: AtomicUsize )}; a . fetch_or ( val as usize , Ordering :: AcqRel ) as $t } else { let _guard = lock ( self . value . get () as usize ). write (); let value = unsafe {& mut * ( self . value . get ())}; let old = * value ; * value |= val ; old }}# [ doc = " Applies bitwise \\\"xor\\\" to the current value and returns the previous value." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use crossbeam_utils::atomic::AtomicCell;" ]# [ doc = "" ]# [ doc = $example ]# [ doc = "" ]# [ doc = " assert_eq!(a.fetch_xor(2), 7);" ]# [ doc = " assert_eq!(a.load(), 5);" ]# [ doc = " ```" ]# [ inline ] pub fn fetch_xor (& self , val : $t )-> $t { if can_transmute ::<$t , atomic :: AtomicUsize > (){ let a = unsafe {&* ( self . value . get () as * const atomic :: AtomicUsize )}; a . fetch_xor ( val as usize , Ordering :: AcqRel ) as $t } else { let _guard = lock ( self . value . get () as usize ). write (); let value = unsafe {& mut * ( self . value . get ())}; let old = * value ; * value ^= val ; old }}}}; ($t : ty , $atomic : ty , $example : tt )=>{ impl AtomicCell <$t > {# [ doc = " Increments the current value by `val` and returns the previous value." ]# [ doc = "" ]# [ doc = " The addition wraps on overflow." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use crossbeam_utils::atomic::AtomicCell;" ]# [ doc = "" ]# [ doc = $example ]# [ doc = "" ]# [ doc = " assert_eq!(a.fetch_add(3), 7);" ]# [ doc = " assert_eq!(a.load(), 10);" ]# [ doc = " ```" ]# [ inline ] pub fn fetch_add (& self , val : $t )-> $t { let a = unsafe {&* ( self . value . get () as * const $atomic )}; a . fetch_add ( val , Ordering :: AcqRel )}# [ doc = " Decrements the current value by `val` and returns the previous value." ]# [ doc = "" ]# [ doc = " The subtraction wraps on overflow." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use crossbeam_utils::atomic::AtomicCell;" ]# [ doc = "" ]# [ doc = $example ]# [ doc = "" ]# [ doc = " assert_eq!(a.fetch_sub(3), 7);" ]# [ doc = " assert_eq!(a.load(), 4);" ]# [ doc = " ```" ]# [ inline ] pub fn fetch_sub (& self , val : $t )-> $t { let a = unsafe {&* ( self . value . get () as * const $atomic )}; a . fetch_sub ( val , Ordering :: AcqRel )}# [ doc = " Applies bitwise \\\"and\\\" to the current value and returns the previous value." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use crossbeam_utils::atomic::AtomicCell;" ]# [ doc = "" ]# [ doc = $example ]# [ doc = "" ]# [ doc = " assert_eq!(a.fetch_and(3), 7);" ]# [ doc = " assert_eq!(a.load(), 3);" ]# [ doc = " ```" ]# [ inline ] pub fn fetch_and (& self , val : $t )-> $t { let a = unsafe {&* ( self . value . get () as * const $atomic )}; a . fetch_and ( val , Ordering :: AcqRel )}# [ doc = " Applies bitwise \\\"or\\\" to the current value and returns the previous value." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use crossbeam_utils::atomic::AtomicCell;" ]# [ doc = "" ]# [ doc = $example ]# [ doc = "" ]# [ doc = " assert_eq!(a.fetch_or(16), 7);" ]# [ doc = " assert_eq!(a.load(), 23);" ]# [ doc = " ```" ]# [ inline ] pub fn fetch_or (& self , val : $t )-> $t { let a = unsafe {&* ( self . value . get () as * const $atomic )}; a . fetch_or ( val , Ordering :: AcqRel )}# [ doc = " Applies bitwise \\\"xor\\\" to the current value and returns the previous value." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " use crossbeam_utils::atomic::AtomicCell;" ]# [ doc = "" ]# [ doc = $example ]# [ doc = "" ]# [ doc = " assert_eq!(a.fetch_xor(2), 7);" ]# [ doc = " assert_eq!(a.load(), 5);" ]# [ doc = " ```" ]# [ inline ] pub fn fetch_xor (& self , val : $t )-> $t { let a = unsafe {&* ( self . value . get () as * const $atomic )}; a . fetch_xor ( val , Ordering :: AcqRel )}}}; }
+macro_rules! __ra_macro_fixture176 {($atomic : ident , $val : ty )=>{ impl AtomicConsume for :: core :: sync :: atomic ::$atomic { type Val = $val ; impl_consume ! (); }}; }
+macro_rules! __ra_macro_fixture177 {($t : ty , $min : expr , $max : expr )=>{ impl Bounded for $t {# [ inline ] fn min_value ()-> $t {$min }# [ inline ] fn max_value ()-> $t {$max }}}; }
+macro_rules! __ra_macro_fixture178 {($m : ident )=>{ for_each_tuple_ ! {$m !! A , B , C , D , E , F , G , H , I , J , K , L , M , N , O , P , Q , R , S , T , }}; }
+macro_rules! __ra_macro_fixture179 {($T : ident )=>{ impl ToPrimitive for $T { impl_to_primitive_int_to_int ! {$T : fn to_isize -> isize ; fn to_i8 -> i8 ; fn to_i16 -> i16 ; fn to_i32 -> i32 ; fn to_i64 -> i64 ; # [ cfg ( has_i128 )] fn to_i128 -> i128 ; } impl_to_primitive_int_to_uint ! {$T : fn to_usize -> usize ; fn to_u8 -> u8 ; fn to_u16 -> u16 ; fn to_u32 -> u32 ; fn to_u64 -> u64 ; # [ cfg ( has_i128 )] fn to_u128 -> u128 ; }# [ inline ] fn to_f32 (& self )-> Option < f32 > { Some (* self as f32 )}# [ inline ] fn to_f64 (& self )-> Option < f64 > { Some (* self as f64 )}}}; }
+macro_rules! __ra_macro_fixture180 {($T : ident )=>{ impl ToPrimitive for $T { impl_to_primitive_uint_to_int ! {$T : fn to_isize -> isize ; fn to_i8 -> i8 ; fn to_i16 -> i16 ; fn to_i32 -> i32 ; fn to_i64 -> i64 ; # [ cfg ( has_i128 )] fn to_i128 -> i128 ; } impl_to_primitive_uint_to_uint ! {$T : fn to_usize -> usize ; fn to_u8 -> u8 ; fn to_u16 -> u16 ; fn to_u32 -> u32 ; fn to_u64 -> u64 ; # [ cfg ( has_i128 )] fn to_u128 -> u128 ; }# [ inline ] fn to_f32 (& self )-> Option < f32 > { Some (* self as f32 )}# [ inline ] fn to_f64 (& self )-> Option < f64 > { Some (* self as f64 )}}}; }
+macro_rules! __ra_macro_fixture181 {($T : ident )=>{ impl ToPrimitive for $T { impl_to_primitive_float_to_signed_int ! {$T : fn to_isize -> isize ; fn to_i8 -> i8 ; fn to_i16 -> i16 ; fn to_i32 -> i32 ; fn to_i64 -> i64 ; # [ cfg ( has_i128 )] fn to_i128 -> i128 ; } impl_to_primitive_float_to_unsigned_int ! {$T : fn to_usize -> usize ; fn to_u8 -> u8 ; fn to_u16 -> u16 ; fn to_u32 -> u32 ; fn to_u64 -> u64 ; # [ cfg ( has_i128 )] fn to_u128 -> u128 ; } impl_to_primitive_float_to_float ! {$T : fn to_f32 -> f32 ; fn to_f64 -> f64 ; }}}; }
+macro_rules! __ra_macro_fixture182 {($T : ty , $to_ty : ident )=>{# [ allow ( deprecated )] impl FromPrimitive for $T {# [ inline ] fn from_isize ( n : isize )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_i8 ( n : i8 )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_i16 ( n : i16 )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_i32 ( n : i32 )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_i64 ( n : i64 )-> Option <$T > { n .$to_ty ()}# [ cfg ( has_i128 )]# [ inline ] fn from_i128 ( n : i128 )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_usize ( n : usize )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_u8 ( n : u8 )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_u16 ( n : u16 )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_u32 ( n : u32 )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_u64 ( n : u64 )-> Option <$T > { n .$to_ty ()}# [ cfg ( has_i128 )]# [ inline ] fn from_u128 ( n : u128 )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_f32 ( n : f32 )-> Option <$T > { n .$to_ty ()}# [ inline ] fn from_f64 ( n : f64 )-> Option <$T > { n .$to_ty ()}}}; }
+macro_rules! __ra_macro_fixture183 {($T : ty , $conv : ident )=>{ impl NumCast for $T {# [ inline ]# [ allow ( deprecated )] fn from < N : ToPrimitive > ( n : N )-> Option <$T > { n .$conv ()}}}; }
+macro_rules! __ra_macro_fixture184 {(@ $T : ty =>$(# [$cfg : meta ])* impl $U : ty )=>{$(# [$cfg ])* impl AsPrimitive <$U > for $T {# [ inline ] fn as_ ( self )-> $U { self as $U }}}; (@ $T : ty =>{$($U : ty ),* })=>{$(impl_as_primitive ! (@ $T => impl $U ); )*}; ($T : ty =>{$($U : ty ),* })=>{ impl_as_primitive ! (@ $T =>{$($U ),* }); impl_as_primitive ! (@ $T =>{ u8 , u16 , u32 , u64 , usize }); impl_as_primitive ! (@ $T =># [ cfg ( has_i128 )] impl u128 ); impl_as_primitive ! (@ $T =>{ i8 , i16 , i32 , i64 , isize }); impl_as_primitive ! (@ $T =># [ cfg ( has_i128 )] impl i128 ); }; }
+macro_rules! __ra_macro_fixture185 {($(# [$doc : meta ]$constant : ident ,)+)=>(# [ allow ( non_snake_case )] pub trait FloatConst {$(# [$doc ] fn $constant ()-> Self ;)+ # [ doc = "Return the full circle constant `τ`." ]# [ inline ] fn TAU ()-> Self where Self : Sized + Add < Self , Output = Self >{ Self :: PI ()+ Self :: PI ()}# [ doc = "Return `log10(2.0)`." ]# [ inline ] fn LOG10_2 ()-> Self where Self : Sized + Div < Self , Output = Self >{ Self :: LN_2 ()/ Self :: LN_10 ()}# [ doc = "Return `log2(10.0)`." ]# [ inline ] fn LOG2_10 ()-> Self where Self : Sized + Div < Self , Output = Self >{ Self :: LN_10 ()/ Self :: LN_2 ()}} float_const_impl ! {@ float f32 , $($constant ,)+ } float_const_impl ! {@ float f64 , $($constant ,)+ }); (@ float $T : ident , $($constant : ident ,)+)=>( impl FloatConst for $T { constant ! {$($constant ()-> $T :: consts ::$constant ; )+ TAU ()-> 6.28318530717958647692528676655900577 ; LOG10_2 ()-> 0.301029995663981195213738894724493027 ; LOG2_10 ()-> 3.32192809488736234787031942948939018 ; }}); }
+macro_rules! __ra_macro_fixture186 {($t : ty , $v : expr )=>{ impl Zero for $t {# [ inline ] fn zero ()-> $t {$v }# [ inline ] fn is_zero (& self )-> bool {* self == $v }}}; }
+macro_rules! __ra_macro_fixture187 {($t : ty , $v : expr )=>{ impl One for $t {# [ inline ] fn one ()-> $t {$v }# [ inline ] fn is_one (& self )-> bool {* self == $v }}}; }
+macro_rules! __ra_macro_fixture188 {($T : ty , $S : ty , $U : ty )=>{ impl PrimInt for $T {# [ inline ] fn count_ones ( self )-> u32 {<$T >:: count_ones ( self )}# [ inline ] fn count_zeros ( self )-> u32 {<$T >:: count_zeros ( self )}# [ inline ] fn leading_zeros ( self )-> u32 {<$T >:: leading_zeros ( self )}# [ inline ] fn trailing_zeros ( self )-> u32 {<$T >:: trailing_zeros ( self )}# [ inline ] fn rotate_left ( self , n : u32 )-> Self {<$T >:: rotate_left ( self , n )}# [ inline ] fn rotate_right ( self , n : u32 )-> Self {<$T >:: rotate_right ( self , n )}# [ inline ] fn signed_shl ( self , n : u32 )-> Self {(( self as $S )<< n ) as $T }# [ inline ] fn signed_shr ( self , n : u32 )-> Self {(( self as $S )>> n ) as $T }# [ inline ] fn unsigned_shl ( self , n : u32 )-> Self {(( self as $U )<< n ) as $T }# [ inline ] fn unsigned_shr ( self , n : u32 )-> Self {(( self as $U )>> n ) as $T }# [ inline ] fn swap_bytes ( self )-> Self {<$T >:: swap_bytes ( self )}# [ inline ] fn from_be ( x : Self )-> Self {<$T >:: from_be ( x )}# [ inline ] fn from_le ( x : Self )-> Self {<$T >:: from_le ( x )}# [ inline ] fn to_be ( self )-> Self {<$T >:: to_be ( self )}# [ inline ] fn to_le ( self )-> Self {<$T >:: to_le ( self )}# [ inline ] fn pow ( self , exp : u32 )-> Self {<$T >:: pow ( self , exp )}}}; }
+macro_rules! __ra_macro_fixture189 {($trait_name : ident , $method : ident , $t : ty )=>{ impl $trait_name for $t {# [ inline ] fn $method (& self , v : &$t )-> Option <$t > {<$t >::$method (* self , * v )}}}; }
+macro_rules! __ra_macro_fixture190 {($trait_name : ident , $method : ident , $t : ty )=>{ impl $trait_name for $t {# [ inline ] fn $method (& self )-> Option <$t > {<$t >::$method (* self )}}}; }
+macro_rules! __ra_macro_fixture191 {($trait_name : ident , $method : ident , $t : ty )=>{ impl $trait_name for $t {# [ inline ] fn $method (& self , rhs : u32 )-> Option <$t > {<$t >::$method (* self , rhs )}}}; }
+macro_rules! __ra_macro_fixture192 {($trait_name : ident for $($t : ty )*)=>{$(impl $trait_name for $t { type Output = Self ; # [ inline ] fn mul_add ( self , a : Self , b : Self )-> Self :: Output {( self * a )+ b }})*}}
+macro_rules! __ra_macro_fixture193 {($trait_name : ident for $($t : ty )*)=>{$(impl $trait_name for $t {# [ inline ] fn mul_add_assign (& mut self , a : Self , b : Self ){* self = (* self * a )+ b }})*}}
+macro_rules! __ra_macro_fixture194 {($trait_name : ident , $method : ident , $t : ty )=>{ impl $trait_name for $t {# [ inline ] fn $method (& self , v : & Self )-> ( Self , bool ){<$t >::$method (* self , * v )}}}; }
+macro_rules! __ra_macro_fixture195 {($trait_name : ident for $($t : ty )*)=>{$(impl $trait_name for $t {# [ inline ] fn saturating_add ( self , v : Self )-> Self { Self :: saturating_add ( self , v )}# [ inline ] fn saturating_sub ( self , v : Self )-> Self { Self :: saturating_sub ( self , v )}})*}}
+macro_rules! __ra_macro_fixture196 {($trait_name : ident , $method : ident , $t : ty )=>{ impl $trait_name for $t {# [ inline ] fn $method (& self , v : & Self )-> Self {<$t >::$method (* self , * v )}}}; }
+macro_rules! __ra_macro_fixture197 {($trait_name : ident , $method : ident , $t : ty )=>{ impl $trait_name for $t {# [ inline ] fn $method (& self , v : & Self )-> Self {<$t >::$method (* self , * v )}}}; ($trait_name : ident , $method : ident , $t : ty , $rhs : ty )=>{ impl $trait_name <$rhs > for $t {# [ inline ] fn $method (& self , v : &$rhs )-> Self {<$t >::$method (* self , * v )}}}; }
+macro_rules! __ra_macro_fixture198 {($trait_name : ident , $method : ident , $t : ty )=>{ impl $trait_name for $t {# [ inline ] fn $method (& self )-> $t {<$t >::$method (* self )}}}; }
+macro_rules! __ra_macro_fixture199 {($trait_name : ident , $method : ident , $t : ty )=>{ impl $trait_name for $t {# [ inline ] fn $method (& self , rhs : u32 )-> $t {<$t >::$method (* self , rhs )}}}; }
+macro_rules! __ra_macro_fixture200 {($t : ty )=>{ pow_impl ! ($t , u8 ); pow_impl ! ($t , usize ); }; ($t : ty , $rhs : ty )=>{ pow_impl ! ($t , $rhs , usize , pow ); }; ($t : ty , $rhs : ty , $desired_rhs : ty , $method : expr )=>{ impl Pow <$rhs > for $t { type Output = $t ; # [ inline ] fn pow ( self , rhs : $rhs )-> $t {($method )( self , <$desired_rhs >:: from ( rhs ))}} impl < 'a > Pow <& 'a $rhs > for $t { type Output = $t ; # [ inline ] fn pow ( self , rhs : & 'a $rhs )-> $t {($method )( self , <$desired_rhs >:: from (* rhs ))}} impl < 'a > Pow <$rhs > for & 'a $t { type Output = $t ; # [ inline ] fn pow ( self , rhs : $rhs )-> $t {($method )(* self , <$desired_rhs >:: from ( rhs ))}} impl < 'a , 'b > Pow <& 'a $rhs > for & 'b $t { type Output = $t ; # [ inline ] fn pow ( self , rhs : & 'a $rhs )-> $t {($method )(* self , <$desired_rhs >:: from (* rhs ))}}}; }
+macro_rules! __ra_macro_fixture201 {($($t : ty )*)=>($(impl Signed for $t {# [ inline ] fn abs (& self )-> $t { if self . is_negative (){-* self } else {* self }}# [ inline ] fn abs_sub (& self , other : &$t )-> $t { if * self <= * other { 0 } else {* self - * other }}# [ inline ] fn signum (& self )-> $t { match * self { n if n > 0 => 1 , 0 => 0 , _ =>- 1 , }}# [ inline ] fn is_positive (& self )-> bool {* self > 0 }# [ inline ] fn is_negative (& self )-> bool {* self < 0 }})*)}
+macro_rules! __ra_macro_fixture202 {($t : ty )=>{ impl Signed for $t {# [ doc = " Computes the absolute value. Returns `NAN` if the number is `NAN`." ]# [ inline ] fn abs (& self )-> $t { FloatCore :: abs (* self )}# [ doc = " The positive difference of two numbers. Returns `0.0` if the number is" ]# [ doc = " less than or equal to `other`, otherwise the difference between`self`" ]# [ doc = " and `other` is returned." ]# [ inline ] fn abs_sub (& self , other : &$t )-> $t { if * self <= * other { 0. } else {* self - * other }}# [ doc = " # Returns" ]# [ doc = "" ]# [ doc = " - `1.0` if the number is positive, `+0.0` or `INFINITY`" ]# [ doc = " - `-1.0` if the number is negative, `-0.0` or `NEG_INFINITY`" ]# [ doc = " - `NAN` if the number is NaN" ]# [ inline ] fn signum (& self )-> $t { FloatCore :: signum (* self )}# [ doc = " Returns `true` if the number is positive, including `+0.0` and `INFINITY`" ]# [ inline ] fn is_positive (& self )-> bool { FloatCore :: is_sign_positive (* self )}# [ doc = " Returns `true` if the number is negative, including `-0.0` and `NEG_INFINITY`" ]# [ inline ] fn is_negative (& self )-> bool { FloatCore :: is_sign_negative (* self )}}}; }
+macro_rules! __ra_macro_fixture203 {($name : ident for $($t : ty )*)=>($(impl $name for $t {})*)}
+macro_rules! __ra_macro_fixture204 {($name : ident for $($t : ty )*)=>($(impl $name for $t { type FromStrRadixErr = :: core :: num :: ParseIntError ; # [ inline ] fn from_str_radix ( s : & str , radix : u32 )-> Result < Self , :: core :: num :: ParseIntError > {<$t >:: from_str_radix ( s , radix )}})*)}
+macro_rules! __ra_macro_fixture205 {($name : ident for $($t : ident )*)=>($(impl $name for $t { type FromStrRadixErr = ParseFloatError ; fn from_str_radix ( src : & str , radix : u32 )-> Result < Self , Self :: FromStrRadixErr > { use self :: FloatErrorKind ::*; use self :: ParseFloatError as PFE ; match src { "inf" => return Ok ( core ::$t :: INFINITY ), "-inf" => return Ok ( core ::$t :: NEG_INFINITY ), "NaN" => return Ok ( core ::$t :: NAN ), _ =>{}, } fn slice_shift_char ( src : & str )-> Option < ( char , & str )> { let mut chars = src . chars (); if let Some ( ch )= chars . next (){ Some (( ch , chars . as_str ()))} else { None }} let ( is_positive , src )= match slice_shift_char ( src ){ None => return Err ( PFE { kind : Empty }), Some (( '-' , "" ))=> return Err ( PFE { kind : Empty }), Some (( '-' , src ))=>( false , src ), Some ((_, _))=>( true , src ), }; let mut sig = if is_positive { 0.0 } else {- 0.0 }; let mut prev_sig = sig ; let mut cs = src . chars (). enumerate (); let mut exp_info = None ::< ( char , usize )>; for ( i , c ) in cs . by_ref (){ match c . to_digit ( radix ){ Some ( digit )=>{ sig = sig * ( radix as $t ); if is_positive { sig = sig + (( digit as isize ) as $t ); } else { sig = sig - (( digit as isize ) as $t ); } if prev_sig != 0.0 { if is_positive && sig <= prev_sig { return Ok ( core ::$t :: INFINITY ); } if ! is_positive && sig >= prev_sig { return Ok ( core ::$t :: NEG_INFINITY ); } if is_positive && ( prev_sig != ( sig - digit as $t )/ radix as $t ){ return Ok ( core ::$t :: INFINITY ); } if ! is_positive && ( prev_sig != ( sig + digit as $t )/ radix as $t ){ return Ok ( core ::$t :: NEG_INFINITY ); }} prev_sig = sig ; }, None => match c { 'e' | 'E' | 'p' | 'P' =>{ exp_info = Some (( c , i + 1 )); break ; }, '.' =>{ break ; }, _ =>{ return Err ( PFE { kind : Invalid }); }, }, }} if exp_info . is_none (){ let mut power = 1.0 ; for ( i , c ) in cs . by_ref (){ match c . to_digit ( radix ){ Some ( digit )=>{ power = power / ( radix as $t ); sig = if is_positive { sig + ( digit as $t )* power } else { sig - ( digit as $t )* power }; if is_positive && sig < prev_sig { return Ok ( core ::$t :: INFINITY ); } if ! is_positive && sig > prev_sig { return Ok ( core ::$t :: NEG_INFINITY ); } prev_sig = sig ; }, None => match c { 'e' | 'E' | 'p' | 'P' =>{ exp_info = Some (( c , i + 1 )); break ; }, _ =>{ return Err ( PFE { kind : Invalid }); }, }, }}} let exp = match exp_info { Some (( c , offset ))=>{ let base = match c { 'E' | 'e' if radix == 10 => 10.0 , 'P' | 'p' if radix == 16 => 2.0 , _ => return Err ( PFE { kind : Invalid }), }; let src = & src [ offset ..]; let ( is_positive , exp )= match slice_shift_char ( src ){ Some (( '-' , src ))=>( false , src . parse ::< usize > ()), Some (( '+' , src ))=>( true , src . parse ::< usize > ()), Some ((_, _))=>( true , src . parse ::< usize > ()), None => return Err ( PFE { kind : Invalid }), }; # [ cfg ( feature = "std" )] fn pow ( base : $t , exp : usize )-> $t { Float :: powi ( base , exp as i32 )} match ( is_positive , exp ){( true , Ok ( exp ))=> pow ( base , exp ), ( false , Ok ( exp ))=> 1.0 / pow ( base , exp ), (_, Err (_))=> return Err ( PFE { kind : Invalid }), }}, None => 1.0 , }; Ok ( sig * exp )}})*)}
+macro_rules! __ra_macro_fixture206 {($m : ident !! )=>($m ! {}); ($m : ident !! $h : ident , $($t : ident ,)* )=>($m ! {$h $($t )* } for_each_tuple_ ! {$m !! $($t ,)* }); }
+macro_rules! __ra_macro_fixture207 {($($name : ident )* )=>( impl <$($name : Bounded ,)*> Bounded for ($($name ,)*){# [ inline ] fn min_value ()-> Self {($($name :: min_value (),)*)}# [ inline ] fn max_value ()-> Self {($($name :: max_value (),)*)}}); }
+macro_rules! __ra_macro_fixture208 {($T : ty , $U : ty )=>{ impl Roots for $T {# [ inline ] fn nth_root (& self , n : u32 )-> Self { if * self >= 0 {(* self as $U ). nth_root ( n ) as Self } else { assert ! ( n . is_odd (), "even roots of a negative are imaginary" ); - (( self . wrapping_neg () as $U ). nth_root ( n ) as Self )}}# [ inline ] fn sqrt (& self )-> Self { assert ! (* self >= 0 , "the square root of a negative is imaginary" ); (* self as $U ). sqrt () as Self }# [ inline ] fn cbrt (& self )-> Self { if * self >= 0 {(* self as $U ). cbrt () as Self } else {- (( self . wrapping_neg () as $U ). cbrt () as Self )}}}}; }
+macro_rules! __ra_macro_fixture209 {($T : ident )=>{ impl Roots for $T {# [ inline ] fn nth_root (& self , n : u32 )-> Self { fn go ( a : $T , n : u32 )-> $T { match n { 0 => panic ! ( "can't find a root of degree 0!" ), 1 => return a , 2 => return a . sqrt (), 3 => return a . cbrt (), _ =>(), } if bits ::<$T > ()<= n || a < ( 1 << n ){ return ( a > 0 ) as $T ; } if bits ::<$T > ()> 64 { return if a <= core :: u64 :: MAX as $T {( a as u64 ). nth_root ( n ) as $T } else { let lo = ( a >> n ). nth_root ( n )<< 1 ; let hi = lo + 1 ; if hi . next_power_of_two (). trailing_zeros ()* n >= bits ::<$T > (){ match checked_pow ( hi , n as usize ){ Some ( x ) if x <= a => hi , _ => lo , }} else { if hi . pow ( n )<= a { hi } else { lo }}}; }# [ cfg ( feature = "std" )]# [ inline ] fn guess ( x : $T , n : u32 )-> $T { if bits ::<$T > ()<= 32 || x <= core :: u32 :: MAX as $T { 1 << (( log2 ( x )+ n - 1 )/ n )} else {(( x as f64 ). ln ()/ f64 :: from ( n )). exp () as $T }}# [ cfg ( not ( feature = "std" ))]# [ inline ] fn guess ( x : $T , n : u32 )-> $T { 1 << (( log2 ( x )+ n - 1 )/ n )} let n1 = n - 1 ; let next = | x : $T | { let y = match checked_pow ( x , n1 as usize ){ Some ( ax )=> a / ax , None => 0 , }; ( y + x * n1 as $T )/ n as $T }; fixpoint ( guess ( a , n ), next )} go (* self , n )}# [ inline ] fn sqrt (& self )-> Self { fn go ( a : $T )-> $T { if bits ::<$T > ()> 64 { return if a <= core :: u64 :: MAX as $T {( a as u64 ). sqrt () as $T } else { let lo = ( a >> 2u32 ). sqrt ()<< 1 ; let hi = lo + 1 ; if hi * hi <= a { hi } else { lo }}; } if a < 4 { return ( a > 0 ) as $T ; }# [ cfg ( feature = "std" )]# [ inline ] fn guess ( x : $T )-> $T {( x as f64 ). sqrt () as $T }# [ cfg ( not ( feature = "std" ))]# [ inline ] fn guess ( x : $T )-> $T { 1 << (( log2 ( x )+ 1 )/ 2 )} let next = | x : $T | ( a / x + x )>> 1 ; fixpoint ( guess ( a ), next )} go (* self )}# [ inline ] fn cbrt (& self )-> Self { fn go ( a : $T )-> $T { if bits ::<$T > ()> 64 { return if a <= core :: u64 :: MAX as $T {( a as u64 ). cbrt () as $T } else { let lo = ( a >> 3u32 ). cbrt ()<< 1 ; let hi = lo + 1 ; if hi * hi * hi <= a { hi } else { lo }}; } if bits ::<$T > ()<= 32 { let mut x = a ; let mut y2 = 0 ; let mut y = 0 ; let smax = bits ::<$T > ()/ 3 ; for s in ( 0 .. smax + 1 ). rev (){ let s = s * 3 ; y2 *= 4 ; y *= 2 ; let b = 3 * ( y2 + y )+ 1 ; if x >> s >= b { x -= b << s ; y2 += 2 * y + 1 ; y += 1 ; }} return y ; } if a < 8 { return ( a > 0 ) as $T ; } if a <= core :: u32 :: MAX as $T { return ( a as u32 ). cbrt () as $T ; }# [ cfg ( feature = "std" )]# [ inline ] fn guess ( x : $T )-> $T {( x as f64 ). cbrt () as $T }# [ cfg ( not ( feature = "std" ))]# [ inline ] fn guess ( x : $T )-> $T { 1 << (( log2 ( x )+ 2 )/ 3 )} let next = | x : $T | ( a / ( x * x )+ x * 2 )/ 3 ; fixpoint ( guess ( a ), next )} go (* self )}}}; }
+macro_rules! __ra_macro_fixture210 {($T : ty , $test_mod : ident )=>{ impl Integer for $T {# [ doc = " Floored integer division" ]# [ inline ] fn div_floor (& self , other : & Self )-> Self { let ( d , r )= self . div_rem ( other ); if ( r > 0 && * other < 0 )|| ( r < 0 && * other > 0 ){ d - 1 } else { d }}# [ doc = " Floored integer modulo" ]# [ inline ] fn mod_floor (& self , other : & Self )-> Self { let r = * self % * other ; if ( r > 0 && * other < 0 )|| ( r < 0 && * other > 0 ){ r + * other } else { r }}# [ doc = " Calculates `div_floor` and `mod_floor` simultaneously" ]# [ inline ] fn div_mod_floor (& self , other : & Self )-> ( Self , Self ){ let ( d , r )= self . div_rem ( other ); if ( r > 0 && * other < 0 )|| ( r < 0 && * other > 0 ){( d - 1 , r + * other )} else {( d , r )}}# [ inline ] fn div_ceil (& self , other : & Self )-> Self { let ( d , r )= self . div_rem ( other ); if ( r > 0 && * other > 0 )|| ( r < 0 && * other < 0 ){ d + 1 } else { d }}# [ doc = " Calculates the Greatest Common Divisor (GCD) of the number and" ]# [ doc = " `other`. The result is always positive." ]# [ inline ] fn gcd (& self , other : & Self )-> Self { let mut m = * self ; let mut n = * other ; if m == 0 || n == 0 { return ( m | n ). abs (); } let shift = ( m | n ). trailing_zeros (); if m == Self :: min_value ()|| n == Self :: min_value (){ return ( 1 << shift ). abs (); } m = m . abs (); n = n . abs (); m >>= m . trailing_zeros (); n >>= n . trailing_zeros (); while m != n { if m > n { m -= n ; m >>= m . trailing_zeros (); } else { n -= m ; n >>= n . trailing_zeros (); }} m << shift }# [ inline ] fn extended_gcd_lcm (& self , other : & Self )-> ( ExtendedGcd < Self >, Self ){ let egcd = self . extended_gcd ( other ); let lcm = if egcd . gcd . is_zero (){ Self :: zero ()} else {(* self * (* other / egcd . gcd )). abs ()}; ( egcd , lcm )}# [ doc = " Calculates the Lowest Common Multiple (LCM) of the number and" ]# [ doc = " `other`." ]# [ inline ] fn lcm (& self , other : & Self )-> Self { self . gcd_lcm ( other ). 1 }# [ doc = " Calculates the Greatest Common Divisor (GCD) and" ]# [ doc = " Lowest Common Multiple (LCM) of the number and `other`." ]# [ inline ] fn gcd_lcm (& self , other : & Self )-> ( Self , Self ){ if self . is_zero ()&& other . is_zero (){ return ( Self :: zero (), Self :: zero ()); } let gcd = self . gcd ( other ); let lcm = (* self * (* other / gcd )). abs (); ( gcd , lcm )}# [ doc = " Deprecated, use `is_multiple_of` instead." ]# [ inline ] fn divides (& self , other : & Self )-> bool { self . is_multiple_of ( other )}# [ doc = " Returns `true` if the number is a multiple of `other`." ]# [ inline ] fn is_multiple_of (& self , other : & Self )-> bool {* self % * other == 0 }# [ doc = " Returns `true` if the number is divisible by `2`" ]# [ inline ] fn is_even (& self )-> bool {(* self )& 1 == 0 }# [ doc = " Returns `true` if the number is not divisible by `2`" ]# [ inline ] fn is_odd (& self )-> bool {! self . is_even ()}# [ doc = " Simultaneous truncated integer division and modulus." ]# [ inline ] fn div_rem (& self , other : & Self )-> ( Self , Self ){(* self / * other , * self % * other )}}# [ cfg ( test )] mod $test_mod { use core :: mem ; use Integer ; # [ doc = " Checks that the division rule holds for:" ]# [ doc = "" ]# [ doc = " - `n`: numerator (dividend)" ]# [ doc = " - `d`: denominator (divisor)" ]# [ doc = " - `qr`: quotient and remainder" ]# [ cfg ( test )] fn test_division_rule (( n , d ): ($T , $T ), ( q , r ): ($T , $T )){ assert_eq ! ( d * q + r , n ); }# [ test ] fn test_div_rem (){ fn test_nd_dr ( nd : ($T , $T ), qr : ($T , $T )){ let ( n , d )= nd ; let separate_div_rem = ( n / d , n % d ); let combined_div_rem = n . div_rem (& d ); assert_eq ! ( separate_div_rem , qr ); assert_eq ! ( combined_div_rem , qr ); test_division_rule ( nd , separate_div_rem ); test_division_rule ( nd , combined_div_rem ); } test_nd_dr (( 8 , 3 ), ( 2 , 2 )); test_nd_dr (( 8 , - 3 ), (- 2 , 2 )); test_nd_dr ((- 8 , 3 ), (- 2 , - 2 )); test_nd_dr ((- 8 , - 3 ), ( 2 , - 2 )); test_nd_dr (( 1 , 2 ), ( 0 , 1 )); test_nd_dr (( 1 , - 2 ), ( 0 , 1 )); test_nd_dr ((- 1 , 2 ), ( 0 , - 1 )); test_nd_dr ((- 1 , - 2 ), ( 0 , - 1 )); }# [ test ] fn test_div_mod_floor (){ fn test_nd_dm ( nd : ($T , $T ), dm : ($T , $T )){ let ( n , d )= nd ; let separate_div_mod_floor = ( n . div_floor (& d ), n . mod_floor (& d )); let combined_div_mod_floor = n . div_mod_floor (& d ); assert_eq ! ( separate_div_mod_floor , dm ); assert_eq ! ( combined_div_mod_floor , dm ); test_division_rule ( nd , separate_div_mod_floor ); test_division_rule ( nd , combined_div_mod_floor ); } test_nd_dm (( 8 , 3 ), ( 2 , 2 )); test_nd_dm (( 8 , - 3 ), (- 3 , - 1 )); test_nd_dm ((- 8 , 3 ), (- 3 , 1 )); test_nd_dm ((- 8 , - 3 ), ( 2 , - 2 )); test_nd_dm (( 1 , 2 ), ( 0 , 1 )); test_nd_dm (( 1 , - 2 ), (- 1 , - 1 )); test_nd_dm ((- 1 , 2 ), (- 1 , 1 )); test_nd_dm ((- 1 , - 2 ), ( 0 , - 1 )); }# [ test ] fn test_gcd (){ assert_eq ! (( 10 as $T ). gcd (& 2 ), 2 as $T ); assert_eq ! (( 10 as $T ). gcd (& 3 ), 1 as $T ); assert_eq ! (( 0 as $T ). gcd (& 3 ), 3 as $T ); assert_eq ! (( 3 as $T ). gcd (& 3 ), 3 as $T ); assert_eq ! (( 56 as $T ). gcd (& 42 ), 14 as $T ); assert_eq ! (( 3 as $T ). gcd (&- 3 ), 3 as $T ); assert_eq ! ((- 6 as $T ). gcd (& 3 ), 3 as $T ); assert_eq ! ((- 4 as $T ). gcd (&- 2 ), 2 as $T ); }# [ test ] fn test_gcd_cmp_with_euclidean (){ fn euclidean_gcd ( mut m : $T , mut n : $T )-> $T { while m != 0 { mem :: swap (& mut m , & mut n ); m %= n ; } n . abs ()} for i in - 127 .. 127 { for j in - 127 .. 127 { assert_eq ! ( euclidean_gcd ( i , j ), i . gcd (& j )); }} let i = 127 ; for j in - 127 .. 127 { assert_eq ! ( euclidean_gcd ( i , j ), i . gcd (& j )); } assert_eq ! ( 127 . gcd (& 127 ), 127 ); }# [ test ] fn test_gcd_min_val (){ let min = <$T >:: min_value (); let max = <$T >:: max_value (); let max_pow2 = max / 2 + 1 ; assert_eq ! ( min . gcd (& max ), 1 as $T ); assert_eq ! ( max . gcd (& min ), 1 as $T ); assert_eq ! ( min . gcd (& max_pow2 ), max_pow2 ); assert_eq ! ( max_pow2 . gcd (& min ), max_pow2 ); assert_eq ! ( min . gcd (& 42 ), 2 as $T ); assert_eq ! (( 42 as $T ). gcd (& min ), 2 as $T ); }# [ test ]# [ should_panic ] fn test_gcd_min_val_min_val (){ let min = <$T >:: min_value (); assert ! ( min . gcd (& min )>= 0 ); }# [ test ]# [ should_panic ] fn test_gcd_min_val_0 (){ let min = <$T >:: min_value (); assert ! ( min . gcd (& 0 )>= 0 ); }# [ test ]# [ should_panic ] fn test_gcd_0_min_val (){ let min = <$T >:: min_value (); assert ! (( 0 as $T ). gcd (& min )>= 0 ); }# [ test ] fn test_lcm (){ assert_eq ! (( 1 as $T ). lcm (& 0 ), 0 as $T ); assert_eq ! (( 0 as $T ). lcm (& 1 ), 0 as $T ); assert_eq ! (( 1 as $T ). lcm (& 1 ), 1 as $T ); assert_eq ! ((- 1 as $T ). lcm (& 1 ), 1 as $T ); assert_eq ! (( 1 as $T ). lcm (&- 1 ), 1 as $T ); assert_eq ! ((- 1 as $T ). lcm (&- 1 ), 1 as $T ); assert_eq ! (( 8 as $T ). lcm (& 9 ), 72 as $T ); assert_eq ! (( 11 as $T ). lcm (& 5 ), 55 as $T ); }# [ test ] fn test_gcd_lcm (){ use core :: iter :: once ; for i in once ( 0 ). chain (( 1 ..). take ( 127 ). flat_map (| a | once ( a ). chain ( once (- a )))). chain ( once (- 128 )){ for j in once ( 0 ). chain (( 1 ..). take ( 127 ). flat_map (| a | once ( a ). chain ( once (- a )))). chain ( once (- 128 )){ assert_eq ! ( i . gcd_lcm (& j ), ( i . gcd (& j ), i . lcm (& j ))); }}}# [ test ] fn test_extended_gcd_lcm (){ use core :: fmt :: Debug ; use traits :: NumAssign ; use ExtendedGcd ; fn check < A : Copy + Debug + Integer + NumAssign > ( a : A , b : A ){ let ExtendedGcd { gcd , x , y , .. }= a . extended_gcd (& b ); assert_eq ! ( gcd , x * a + y * b ); } use core :: iter :: once ; for i in once ( 0 ). chain (( 1 ..). take ( 127 ). flat_map (| a | once ( a ). chain ( once (- a )))). chain ( once (- 128 )){ for j in once ( 0 ). chain (( 1 ..). take ( 127 ). flat_map (| a | once ( a ). chain ( once (- a )))). chain ( once (- 128 )){ check ( i , j ); let ( ExtendedGcd { gcd , .. }, lcm )= i . extended_gcd_lcm (& j ); assert_eq ! (( gcd , lcm ), ( i . gcd (& j ), i . lcm (& j ))); }}}# [ test ] fn test_even (){ assert_eq ! ((- 4 as $T ). is_even (), true ); assert_eq ! ((- 3 as $T ). is_even (), false ); assert_eq ! ((- 2 as $T ). is_even (), true ); assert_eq ! ((- 1 as $T ). is_even (), false ); assert_eq ! (( 0 as $T ). is_even (), true ); assert_eq ! (( 1 as $T ). is_even (), false ); assert_eq ! (( 2 as $T ). is_even (), true ); assert_eq ! (( 3 as $T ). is_even (), false ); assert_eq ! (( 4 as $T ). is_even (), true ); }# [ test ] fn test_odd (){ assert_eq ! ((- 4 as $T ). is_odd (), false ); assert_eq ! ((- 3 as $T ). is_odd (), true ); assert_eq ! ((- 2 as $T ). is_odd (), false ); assert_eq ! ((- 1 as $T ). is_odd (), true ); assert_eq ! (( 0 as $T ). is_odd (), false ); assert_eq ! (( 1 as $T ). is_odd (), true ); assert_eq ! (( 2 as $T ). is_odd (), false ); assert_eq ! (( 3 as $T ). is_odd (), true ); assert_eq ! (( 4 as $T ). is_odd (), false ); }}}; }
+macro_rules! __ra_macro_fixture211 {($T : ty , $test_mod : ident )=>{ impl Integer for $T {# [ doc = " Unsigned integer division. Returns the same result as `div` (`/`)." ]# [ inline ] fn div_floor (& self , other : & Self )-> Self {* self / * other }# [ doc = " Unsigned integer modulo operation. Returns the same result as `rem` (`%`)." ]# [ inline ] fn mod_floor (& self , other : & Self )-> Self {* self % * other }# [ inline ] fn div_ceil (& self , other : & Self )-> Self {* self / * other + ( 0 != * self % * other ) as Self }# [ doc = " Calculates the Greatest Common Divisor (GCD) of the number and `other`" ]# [ inline ] fn gcd (& self , other : & Self )-> Self { let mut m = * self ; let mut n = * other ; if m == 0 || n == 0 { return m | n ; } let shift = ( m | n ). trailing_zeros (); m >>= m . trailing_zeros (); n >>= n . trailing_zeros (); while m != n { if m > n { m -= n ; m >>= m . trailing_zeros (); } else { n -= m ; n >>= n . trailing_zeros (); }} m << shift }# [ inline ] fn extended_gcd_lcm (& self , other : & Self )-> ( ExtendedGcd < Self >, Self ){ let egcd = self . extended_gcd ( other ); let lcm = if egcd . gcd . is_zero (){ Self :: zero ()} else {* self * (* other / egcd . gcd )}; ( egcd , lcm )}# [ doc = " Calculates the Lowest Common Multiple (LCM) of the number and `other`." ]# [ inline ] fn lcm (& self , other : & Self )-> Self { self . gcd_lcm ( other ). 1 }# [ doc = " Calculates the Greatest Common Divisor (GCD) and" ]# [ doc = " Lowest Common Multiple (LCM) of the number and `other`." ]# [ inline ] fn gcd_lcm (& self , other : & Self )-> ( Self , Self ){ if self . is_zero ()&& other . is_zero (){ return ( Self :: zero (), Self :: zero ()); } let gcd = self . gcd ( other ); let lcm = * self * (* other / gcd ); ( gcd , lcm )}# [ doc = " Deprecated, use `is_multiple_of` instead." ]# [ inline ] fn divides (& self , other : & Self )-> bool { self . is_multiple_of ( other )}# [ doc = " Returns `true` if the number is a multiple of `other`." ]# [ inline ] fn is_multiple_of (& self , other : & Self )-> bool {* self % * other == 0 }# [ doc = " Returns `true` if the number is divisible by `2`." ]# [ inline ] fn is_even (& self )-> bool {* self % 2 == 0 }# [ doc = " Returns `true` if the number is not divisible by `2`." ]# [ inline ] fn is_odd (& self )-> bool {! self . is_even ()}# [ doc = " Simultaneous truncated integer division and modulus." ]# [ inline ] fn div_rem (& self , other : & Self )-> ( Self , Self ){(* self / * other , * self % * other )}}# [ cfg ( test )] mod $test_mod { use core :: mem ; use Integer ; # [ test ] fn test_div_mod_floor (){ assert_eq ! (( 10 as $T ). div_floor (& ( 3 as $T )), 3 as $T ); assert_eq ! (( 10 as $T ). mod_floor (& ( 3 as $T )), 1 as $T ); assert_eq ! (( 10 as $T ). div_mod_floor (& ( 3 as $T )), ( 3 as $T , 1 as $T )); assert_eq ! (( 5 as $T ). div_floor (& ( 5 as $T )), 1 as $T ); assert_eq ! (( 5 as $T ). mod_floor (& ( 5 as $T )), 0 as $T ); assert_eq ! (( 5 as $T ). div_mod_floor (& ( 5 as $T )), ( 1 as $T , 0 as $T )); assert_eq ! (( 3 as $T ). div_floor (& ( 7 as $T )), 0 as $T ); assert_eq ! (( 3 as $T ). mod_floor (& ( 7 as $T )), 3 as $T ); assert_eq ! (( 3 as $T ). div_mod_floor (& ( 7 as $T )), ( 0 as $T , 3 as $T )); }# [ test ] fn test_gcd (){ assert_eq ! (( 10 as $T ). gcd (& 2 ), 2 as $T ); assert_eq ! (( 10 as $T ). gcd (& 3 ), 1 as $T ); assert_eq ! (( 0 as $T ). gcd (& 3 ), 3 as $T ); assert_eq ! (( 3 as $T ). gcd (& 3 ), 3 as $T ); assert_eq ! (( 56 as $T ). gcd (& 42 ), 14 as $T ); }# [ test ] fn test_gcd_cmp_with_euclidean (){ fn euclidean_gcd ( mut m : $T , mut n : $T )-> $T { while m != 0 { mem :: swap (& mut m , & mut n ); m %= n ; } n } for i in 0 .. 255 { for j in 0 .. 255 { assert_eq ! ( euclidean_gcd ( i , j ), i . gcd (& j )); }} let i = 255 ; for j in 0 .. 255 { assert_eq ! ( euclidean_gcd ( i , j ), i . gcd (& j )); } assert_eq ! ( 255 . gcd (& 255 ), 255 ); }# [ test ] fn test_lcm (){ assert_eq ! (( 1 as $T ). lcm (& 0 ), 0 as $T ); assert_eq ! (( 0 as $T ). lcm (& 1 ), 0 as $T ); assert_eq ! (( 1 as $T ). lcm (& 1 ), 1 as $T ); assert_eq ! (( 8 as $T ). lcm (& 9 ), 72 as $T ); assert_eq ! (( 11 as $T ). lcm (& 5 ), 55 as $T ); assert_eq ! (( 15 as $T ). lcm (& 17 ), 255 as $T ); }# [ test ] fn test_gcd_lcm (){ for i in ( 0 ..). take ( 256 ){ for j in ( 0 ..). take ( 256 ){ assert_eq ! ( i . gcd_lcm (& j ), ( i . gcd (& j ), i . lcm (& j ))); }}}# [ test ] fn test_is_multiple_of (){ assert ! (( 6 as $T ). is_multiple_of (& ( 6 as $T ))); assert ! (( 6 as $T ). is_multiple_of (& ( 3 as $T ))); assert ! (( 6 as $T ). is_multiple_of (& ( 1 as $T ))); }# [ test ] fn test_even (){ assert_eq ! (( 0 as $T ). is_even (), true ); assert_eq ! (( 1 as $T ). is_even (), false ); assert_eq ! (( 2 as $T ). is_even (), true ); assert_eq ! (( 3 as $T ). is_even (), false ); assert_eq ! (( 4 as $T ). is_even (), true ); }# [ test ] fn test_odd (){ assert_eq ! (( 0 as $T ). is_odd (), false ); assert_eq ! (( 1 as $T ). is_odd (), true ); assert_eq ! (( 2 as $T ). is_odd (), false ); assert_eq ! (( 3 as $T ). is_odd (), true ); assert_eq ! (( 4 as $T ). is_odd (), false ); }}}; }
+macro_rules! __ra_macro_fixture212 {($I : ident , $U : ident )=>{ mod $I { use check ; use neg ; use num_integer :: Roots ; use pos ; use std :: mem ; # [ test ]# [ should_panic ] fn zeroth_root (){( 123 as $I ). nth_root ( 0 ); }# [ test ] fn sqrt (){ check (& pos ::<$I > (), 2 ); }# [ test ]# [ should_panic ] fn sqrt_neg (){(- 123 as $I ). sqrt (); }# [ test ] fn cbrt (){ check (& pos ::<$I > (), 3 ); }# [ test ] fn cbrt_neg (){ check (& neg ::<$I > (), 3 ); }# [ test ] fn nth_root (){ let bits = 8 * mem :: size_of ::<$I > () as u32 - 1 ; let pos = pos ::<$I > (); for n in 4 .. bits { check (& pos , n ); }}# [ test ] fn nth_root_neg (){ let bits = 8 * mem :: size_of ::<$I > () as u32 - 1 ; let neg = neg ::<$I > (); for n in 2 .. bits / 2 { check (& neg , 2 * n + 1 ); }}# [ test ] fn bit_size (){ let bits = 8 * mem :: size_of ::<$I > () as u32 - 1 ; assert_eq ! ($I :: max_value (). nth_root ( bits - 1 ), 2 ); assert_eq ! ($I :: max_value (). nth_root ( bits ), 1 ); assert_eq ! ($I :: min_value (). nth_root ( bits ), - 2 ); assert_eq ! (($I :: min_value ()+ 1 ). nth_root ( bits ), - 1 ); }} mod $U { use check ; use num_integer :: Roots ; use pos ; use std :: mem ; # [ test ]# [ should_panic ] fn zeroth_root (){( 123 as $U ). nth_root ( 0 ); }# [ test ] fn sqrt (){ check (& pos ::<$U > (), 2 ); }# [ test ] fn cbrt (){ check (& pos ::<$U > (), 3 ); }# [ test ] fn nth_root (){ let bits = 8 * mem :: size_of ::<$I > () as u32 - 1 ; let pos = pos ::<$I > (); for n in 4 .. bits { check (& pos , n ); }}# [ test ] fn bit_size (){ let bits = 8 * mem :: size_of ::<$U > () as u32 ; assert_eq ! ($U :: max_value (). nth_root ( bits - 1 ), 2 ); assert_eq ! ($U :: max_value (). nth_root ( bits ), 1 ); }}}; }
+macro_rules! __ra_macro_fixture213 {($name : ident , $ranges : ident )=>{# [ test ] fn $name (){ let set = ranges_to_set ( general_category ::$ranges ); let hashset : HashSet < u32 > = set . iter (). cloned (). collect (); let trie = TrieSetOwned :: from_codepoints (& set ). unwrap (); for cp in 0 .. 0x110000 { assert ! ( trie . contains_u32 ( cp )== hashset . contains (& cp )); } assert ! (! trie . contains_u32 ( 0x110000 )); assert ! (! hashset . contains (& 0x110000 )); }}; }
+macro_rules! __ra_macro_fixture214 {{$(mod $module : ident ; [$($prop : ident , )*]; )*}=>{$(# [ allow ( unused )] mod $module ; $(pub fn $prop ( c : char )-> bool { self ::$module ::$prop . contains_char ( c )})* )*}; }
+macro_rules! __ra_macro_fixture215 {($name : ident : $input : expr , $($x : tt )* )=>{# [ test ] fn $name (){ let expected_sets = vec ! [$($x )*]; let range_set : RangeSet = $input . parse (). expect ( "parse failed" ); assert_eq ! ( range_set . ranges . len (), expected_sets . len ()); for it in range_set . ranges . iter (). zip ( expected_sets . iter ()){ let ( ai , bi )= it ; assert_eq ! ( ai . comparator_set . len (), * bi ); }}}; }
+macro_rules! __ra_macro_fixture216 {($name : ident : $input : expr , $($x : tt )* )=>{# [ test ] fn $name (){ let expected_sets = vec ! [$($x )*]; let range_set = RangeSet :: parse ($input , Compat :: Npm ). expect ( "parse failed" ); assert_eq ! ( range_set . ranges . len (), expected_sets . len ()); for it in range_set . ranges . iter (). zip ( expected_sets . iter ()){ let ( ai , bi )= it ; assert_eq ! ( ai . comparator_set . len (), * bi ); }}}; }
+macro_rules! __ra_macro_fixture217 {($($name : ident : $value : expr , )* )=>{$(# [ test ] fn $name (){ assert ! ($value . parse ::< RangeSet > (). is_err ()); })* }; }
+macro_rules! __ra_macro_fixture218 {($($name : ident : $value : expr , )* )=>{$(# [ test ] fn $name (){ let ( input , expected_range )= $value ; let parsed_range = parse_range ( input ); let range = from_pair_iterator ( parsed_range , range_set :: Compat :: Cargo ). expect ( "parsing failed" ); let num_comparators = range . comparator_set . len (); let expected_comparators = expected_range . comparator_set . len (); assert_eq ! ( expected_comparators , num_comparators , "expected number of comparators: {}, got: {}" , expected_comparators , num_comparators ); assert_eq ! ( range , expected_range ); })* }; }
+macro_rules! __ra_macro_fixture219 {($($name : ident : $value : expr , )* )=>{$(# [ test ] fn $name (){ let ( input , expected_range )= $value ; let parsed_range = parse_range ( input ); let range = from_pair_iterator ( parsed_range , range_set :: Compat :: Npm ). expect ( "parsing failed" ); let num_comparators = range . comparator_set . len (); let expected_comparators = expected_range . comparator_set . len (); assert_eq ! ( expected_comparators , num_comparators , "expected number of comparators: {}, got: {}" , expected_comparators , num_comparators ); assert_eq ! ( range , expected_range ); })* }; }
+macro_rules! __ra_macro_fixture220 {($ty : ident $(<$lifetime : tt >)*)=>{ impl <$($lifetime ,)* E > Copy for $ty <$($lifetime ,)* E > {} impl <$($lifetime ,)* E > Clone for $ty <$($lifetime ,)* E > { fn clone (& self )-> Self {* self }}}; }
+macro_rules! __ra_macro_fixture221 {($ty : ty , $doc : tt , $name : ident , $method : ident $($cast : tt )*)=>{# [ doc = "A deserializer holding" ]# [ doc = $doc ] pub struct $name < E > { value : $ty , marker : PhantomData < E > } impl_copy_clone ! ($name ); impl < 'de , E > IntoDeserializer < 'de , E > for $ty where E : de :: Error , { type Deserializer = $name < E >; fn into_deserializer ( self )-> $name < E > {$name { value : self , marker : PhantomData , }}} impl < 'de , E > de :: Deserializer < 'de > for $name < E > where E : de :: Error , { type Error = E ; forward_to_deserialize_any ! { bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string bytes byte_buf option unit unit_struct newtype_struct seq tuple tuple_struct map struct enum identifier ignored_any } fn deserialize_any < V > ( self , visitor : V )-> Result < V :: Value , Self :: Error > where V : de :: Visitor < 'de >, { visitor .$method ( self . value $($cast )*)}} impl < E > Debug for $name < E > { fn fmt (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . debug_struct ( stringify ! ($name )). field ( "value" , & self . value ). finish ()}}}}
+macro_rules! __ra_macro_fixture222 {($($tt : tt )*)=>{}; }
+macro_rules! __ra_macro_fixture223 {($ty : ident , $deserialize : ident $($methods : tt )*)=>{ impl < 'de > Deserialize < 'de > for $ty {# [ inline ] fn deserialize < D > ( deserializer : D )-> Result < Self , D :: Error > where D : Deserializer < 'de >, { struct PrimitiveVisitor ; impl < 'de > Visitor < 'de > for PrimitiveVisitor { type Value = $ty ; fn expecting (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . write_str ( stringify ! ($ty ))}$($methods )* } deserializer .$deserialize ( PrimitiveVisitor )}}}; }
+macro_rules! __ra_macro_fixture224 {($ty : ident < T $(: $tbound1 : ident $(+ $tbound2 : ident )*)* $(, $typaram : ident : $bound1 : ident $(+ $bound2 : ident )*)* >, $access : ident , $clear : expr , $with_capacity : expr , $reserve : expr , $insert : expr )=>{ impl < 'de , T $(, $typaram )*> Deserialize < 'de > for $ty < T $(, $typaram )*> where T : Deserialize < 'de > $(+ $tbound1 $(+ $tbound2 )*)*, $($typaram : $bound1 $(+ $bound2 )*,)* { fn deserialize < D > ( deserializer : D )-> Result < Self , D :: Error > where D : Deserializer < 'de >, { struct SeqVisitor < T $(, $typaram )*> { marker : PhantomData <$ty < T $(, $typaram )*>>, } impl < 'de , T $(, $typaram )*> Visitor < 'de > for SeqVisitor < T $(, $typaram )*> where T : Deserialize < 'de > $(+ $tbound1 $(+ $tbound2 )*)*, $($typaram : $bound1 $(+ $bound2 )*,)* { type Value = $ty < T $(, $typaram )*>; fn expecting (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . write_str ( "a sequence" )}# [ inline ] fn visit_seq < A > ( self , mut $access : A )-> Result < Self :: Value , A :: Error > where A : SeqAccess < 'de >, { let mut values = $with_capacity ; while let Some ( value )= try ! ($access . next_element ()){$insert (& mut values , value ); } Ok ( values )}} let visitor = SeqVisitor { marker : PhantomData }; deserializer . deserialize_seq ( visitor )} fn deserialize_in_place < D > ( deserializer : D , place : & mut Self )-> Result < (), D :: Error > where D : Deserializer < 'de >, { struct SeqInPlaceVisitor < 'a , T : 'a $(, $typaram : 'a )*> (& 'a mut $ty < T $(, $typaram )*>); impl < 'a , 'de , T $(, $typaram )*> Visitor < 'de > for SeqInPlaceVisitor < 'a , T $(, $typaram )*> where T : Deserialize < 'de > $(+ $tbound1 $(+ $tbound2 )*)*, $($typaram : $bound1 $(+ $bound2 )*,)* { type Value = (); fn expecting (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . write_str ( "a sequence" )}# [ inline ] fn visit_seq < A > ( mut self , mut $access : A )-> Result < Self :: Value , A :: Error > where A : SeqAccess < 'de >, {$clear (& mut self . 0 ); $reserve (& mut self . 0 , size_hint :: cautious ($access . size_hint ())); while let Some ( value )= try ! ($access . next_element ()){$insert (& mut self . 0 , value ); } Ok (())}} deserializer . deserialize_seq ( SeqInPlaceVisitor ( place ))}}}}
+macro_rules! __ra_macro_fixture225 {($($len : expr =>($($n : tt )+))+)=>{$(impl < 'de , T > Visitor < 'de > for ArrayVisitor < [ T ; $len ]> where T : Deserialize < 'de >, { type Value = [ T ; $len ]; fn expecting (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . write_str ( concat ! ( "an array of length " , $len ))}# [ inline ] fn visit_seq < A > ( self , mut seq : A )-> Result < Self :: Value , A :: Error > where A : SeqAccess < 'de >, { Ok ([$(match try ! ( seq . next_element ()){ Some ( val )=> val , None => return Err ( Error :: invalid_length ($n , & self )), }),+])}} impl < 'a , 'de , T > Visitor < 'de > for ArrayInPlaceVisitor < 'a , [ T ; $len ]> where T : Deserialize < 'de >, { type Value = (); fn expecting (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . write_str ( concat ! ( "an array of length " , $len ))}# [ inline ] fn visit_seq < A > ( self , mut seq : A )-> Result < Self :: Value , A :: Error > where A : SeqAccess < 'de >, { let mut fail_idx = None ; for ( idx , dest ) in self . 0 [..]. iter_mut (). enumerate (){ if try ! ( seq . next_element_seed ( InPlaceSeed ( dest ))). is_none (){ fail_idx = Some ( idx ); break ; }} if let Some ( idx )= fail_idx { return Err ( Error :: invalid_length ( idx , & self )); } Ok (())}} impl < 'de , T > Deserialize < 'de > for [ T ; $len ] where T : Deserialize < 'de >, { fn deserialize < D > ( deserializer : D )-> Result < Self , D :: Error > where D : Deserializer < 'de >, { deserializer . deserialize_tuple ($len , ArrayVisitor ::< [ T ; $len ]>:: new ())} fn deserialize_in_place < D > ( deserializer : D , place : & mut Self )-> Result < (), D :: Error > where D : Deserializer < 'de >, { deserializer . deserialize_tuple ($len , ArrayInPlaceVisitor ( place ))}})+ }}
+macro_rules! __ra_macro_fixture226 {($($len : tt =>($($n : tt $name : ident )+))+)=>{$(impl < 'de , $($name : Deserialize < 'de >),+> Deserialize < 'de > for ($($name ,)+){# [ inline ] fn deserialize < D > ( deserializer : D )-> Result < Self , D :: Error > where D : Deserializer < 'de >, { struct TupleVisitor <$($name ,)+> { marker : PhantomData < ($($name ,)+)>, } impl < 'de , $($name : Deserialize < 'de >),+> Visitor < 'de > for TupleVisitor <$($name ,)+> { type Value = ($($name ,)+); fn expecting (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . write_str ( concat ! ( "a tuple of size " , $len ))}# [ inline ]# [ allow ( non_snake_case )] fn visit_seq < A > ( self , mut seq : A )-> Result < Self :: Value , A :: Error > where A : SeqAccess < 'de >, {$(let $name = match try ! ( seq . next_element ()){ Some ( value )=> value , None => return Err ( Error :: invalid_length ($n , & self )), }; )+ Ok (($($name ,)+))}} deserializer . deserialize_tuple ($len , TupleVisitor { marker : PhantomData })}# [ inline ] fn deserialize_in_place < D > ( deserializer : D , place : & mut Self )-> Result < (), D :: Error > where D : Deserializer < 'de >, { struct TupleInPlaceVisitor < 'a , $($name : 'a ,)+> (& 'a mut ($($name ,)+)); impl < 'a , 'de , $($name : Deserialize < 'de >),+> Visitor < 'de > for TupleInPlaceVisitor < 'a , $($name ,)+> { type Value = (); fn expecting (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . write_str ( concat ! ( "a tuple of size " , $len ))}# [ inline ]# [ allow ( non_snake_case )] fn visit_seq < A > ( self , mut seq : A )-> Result < Self :: Value , A :: Error > where A : SeqAccess < 'de >, {$(if try ! ( seq . next_element_seed ( InPlaceSeed (& mut ( self . 0 ).$n ))). is_none (){ return Err ( Error :: invalid_length ($n , & self )); })+ Ok (())}} deserializer . deserialize_tuple ($len , TupleInPlaceVisitor ( place ))}})+ }}
+macro_rules! __ra_macro_fixture227 {($ty : ident < K $(: $kbound1 : ident $(+ $kbound2 : ident )*)*, V $(, $typaram : ident : $bound1 : ident $(+ $bound2 : ident )*)* >, $access : ident , $with_capacity : expr )=>{ impl < 'de , K , V $(, $typaram )*> Deserialize < 'de > for $ty < K , V $(, $typaram )*> where K : Deserialize < 'de > $(+ $kbound1 $(+ $kbound2 )*)*, V : Deserialize < 'de >, $($typaram : $bound1 $(+ $bound2 )*),* { fn deserialize < D > ( deserializer : D )-> Result < Self , D :: Error > where D : Deserializer < 'de >, { struct MapVisitor < K , V $(, $typaram )*> { marker : PhantomData <$ty < K , V $(, $typaram )*>>, } impl < 'de , K , V $(, $typaram )*> Visitor < 'de > for MapVisitor < K , V $(, $typaram )*> where K : Deserialize < 'de > $(+ $kbound1 $(+ $kbound2 )*)*, V : Deserialize < 'de >, $($typaram : $bound1 $(+ $bound2 )*),* { type Value = $ty < K , V $(, $typaram )*>; fn expecting (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . write_str ( "a map" )}# [ inline ] fn visit_map < A > ( self , mut $access : A )-> Result < Self :: Value , A :: Error > where A : MapAccess < 'de >, { let mut values = $with_capacity ; while let Some (( key , value ))= try ! ($access . next_entry ()){ values . insert ( key , value ); } Ok ( values )}} let visitor = MapVisitor { marker : PhantomData }; deserializer . deserialize_map ( visitor )}}}}
+macro_rules! __ra_macro_fixture228 {($expecting : tt $ty : ty ; $size : tt )=>{ impl < 'de > Deserialize < 'de > for $ty { fn deserialize < D > ( deserializer : D )-> Result < Self , D :: Error > where D : Deserializer < 'de >, { if deserializer . is_human_readable (){ deserializer . deserialize_str ( FromStrVisitor :: new ($expecting ))} else {< [ u8 ; $size ]>:: deserialize ( deserializer ). map (<$ty >:: from )}}}}; }
+macro_rules! __ra_macro_fixture229 {($expecting : tt $ty : ty , $new : expr )=>{ impl < 'de > Deserialize < 'de > for $ty { fn deserialize < D > ( deserializer : D )-> Result < Self , D :: Error > where D : Deserializer < 'de >, { if deserializer . is_human_readable (){ deserializer . deserialize_str ( FromStrVisitor :: new ($expecting ))} else {< (_, u16 )>:: deserialize ( deserializer ). map (| ( ip , port )| $new ( ip , port ))}}}}; }
+macro_rules! __ra_macro_fixture230 {($name_kind : ident ($($variant : ident ; $bytes : expr ; $index : expr ),* )$expecting_message : expr , $variants_name : ident )=>{ enum $name_kind {$($variant ),* } static $variants_name : & 'static [& 'static str ]= & [$(stringify ! ($variant )),*]; impl < 'de > Deserialize < 'de > for $name_kind { fn deserialize < D > ( deserializer : D )-> Result < Self , D :: Error > where D : Deserializer < 'de >, { struct KindVisitor ; impl < 'de > Visitor < 'de > for KindVisitor { type Value = $name_kind ; fn expecting (& self , formatter : & mut fmt :: Formatter )-> fmt :: Result { formatter . write_str ($expecting_message )} fn visit_u64 < E > ( self , value : u64 )-> Result < Self :: Value , E > where E : Error , { match value {$($index => Ok ($name_kind :: $variant ), )* _ => Err ( Error :: invalid_value ( Unexpected :: Unsigned ( value ), & self ),), }} fn visit_str < E > ( self , value : & str )-> Result < Self :: Value , E > where E : Error , { match value {$(stringify ! ($variant )=> Ok ($name_kind :: $variant ), )* _ => Err ( Error :: unknown_variant ( value , $variants_name )), }} fn visit_bytes < E > ( self , value : & [ u8 ])-> Result < Self :: Value , E > where E : Error , { match value {$($bytes => Ok ($name_kind :: $variant ), )* _ =>{ match str :: from_utf8 ( value ){ Ok ( value )=> Err ( Error :: unknown_variant ( value , $variants_name )), Err (_)=> Err ( Error :: invalid_value ( Unexpected :: Bytes ( value ), & self )), }}}}} deserializer . deserialize_identifier ( KindVisitor )}}}}
+macro_rules! __ra_macro_fixture231 {($(# [ doc = $doc : tt ])* ($($id : ident ),* ), $ty : ty , $func : expr )=>{$(# [ doc = $doc ])* impl < 'de $(, $id : Deserialize < 'de >,)*> Deserialize < 'de > for $ty { fn deserialize < D > ( deserializer : D )-> Result < Self , D :: Error > where D : Deserializer < 'de >, { Deserialize :: deserialize ( deserializer ). map ($func )}}}}
+macro_rules! __ra_macro_fixture232 {($($T : ident , )+ )=>{$(# [ cfg ( num_nonzero )] impl < 'de > Deserialize < 'de > for num ::$T { fn deserialize < D > ( deserializer : D )-> Result < Self , D :: Error > where D : Deserializer < 'de >, { let value = try ! ( Deserialize :: deserialize ( deserializer )); match < num ::$T >:: new ( value ){ Some ( nonzero )=> Ok ( nonzero ), None => Err ( Error :: custom ( "expected a non-zero value" )), }}})+ }; }
+macro_rules! __ra_macro_fixture233 {( Error : Sized $(+ $($supertrait : ident )::+)*)=>{# [ doc = " The `Error` trait allows `Deserialize` implementations to create descriptive" ]# [ doc = " error messages belonging to the `Deserializer` against which they are" ]# [ doc = " currently running." ]# [ doc = "" ]# [ doc = " Every `Deserializer` declares an `Error` type that encompasses both" ]# [ doc = " general-purpose deserialization errors as well as errors specific to the" ]# [ doc = " particular deserialization format. For example the `Error` type of" ]# [ doc = " `serde_json` can represent errors like an invalid JSON escape sequence or an" ]# [ doc = " unterminated string literal, in addition to the error cases that are part of" ]# [ doc = " this trait." ]# [ doc = "" ]# [ doc = " Most deserializers should only need to provide the `Error::custom` method" ]# [ doc = " and inherit the default behavior for the other methods." ]# [ doc = "" ]# [ doc = " # Example implementation" ]# [ doc = "" ]# [ doc = " The [example data format] presented on the website shows an error" ]# [ doc = " type appropriate for a basic JSON data format." ]# [ doc = "" ]# [ doc = " [example data format]: https://serde.rs/data-format.html" ] pub trait Error : Sized $(+ $($supertrait )::+)* {# [ doc = " Raised when there is general error when deserializing a type." ]# [ doc = "" ]# [ doc = " The message should not be capitalized and should not end with a period." ]# [ doc = "" ]# [ doc = " ```edition2018" ]# [ doc = " # use std::str::FromStr;" ]# [ doc = " #" ]# [ doc = " # struct IpAddr;" ]# [ doc = " #" ]# [ doc = " # impl FromStr for IpAddr {" ]# [ doc = " # type Err = String;" ]# [ doc = " #" ]# [ doc = " # fn from_str(_: &str) -> Result<Self, String> {" ]# [ doc = " # unimplemented!()" ]# [ doc = " # }" ]# [ doc = " # }" ]# [ doc = " #" ]# [ doc = " use serde::de::{self, Deserialize, Deserializer};" ]# [ doc = "" ]# [ doc = " impl<\\\'de> Deserialize<\\\'de> for IpAddr {" ]# [ doc = " fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>" ]# [ doc = " where" ]# [ doc = " D: Deserializer<\\\'de>," ]# [ doc = " {" ]# [ doc = " let s = String::deserialize(deserializer)?;" ]# [ doc = " s.parse().map_err(de::Error::custom)" ]# [ doc = " }" ]# [ doc = " }" ]# [ doc = " ```" ] fn custom < T > ( msg : T )-> Self where T : Display ; # [ doc = " Raised when a `Deserialize` receives a type different from what it was" ]# [ doc = " expecting." ]# [ doc = "" ]# [ doc = " The `unexp` argument provides information about what type was received." ]# [ doc = " This is the type that was present in the input file or other source data" ]# [ doc = " of the Deserializer." ]# [ doc = "" ]# [ doc = " The `exp` argument provides information about what type was being" ]# [ doc = " expected. This is the type that is written in the program." ]# [ doc = "" ]# [ doc = " For example if we try to deserialize a String out of a JSON file" ]# [ doc = " containing an integer, the unexpected type is the integer and the" ]# [ doc = " expected type is the string." ]# [ cold ] fn invalid_type ( unexp : Unexpected , exp : & Expected )-> Self { Error :: custom ( format_args ! ( "invalid type: {}, expected {}" , unexp , exp ))}# [ doc = " Raised when a `Deserialize` receives a value of the right type but that" ]# [ doc = " is wrong for some other reason." ]# [ doc = "" ]# [ doc = " The `unexp` argument provides information about what value was received." ]# [ doc = " This is the value that was present in the input file or other source" ]# [ doc = " data of the Deserializer." ]# [ doc = "" ]# [ doc = " The `exp` argument provides information about what value was being" ]# [ doc = " expected. This is the type that is written in the program." ]# [ doc = "" ]# [ doc = " For example if we try to deserialize a String out of some binary data" ]# [ doc = " that is not valid UTF-8, the unexpected value is the bytes and the" ]# [ doc = " expected value is a string." ]# [ cold ] fn invalid_value ( unexp : Unexpected , exp : & Expected )-> Self { Error :: custom ( format_args ! ( "invalid value: {}, expected {}" , unexp , exp ))}# [ doc = " Raised when deserializing a sequence or map and the input data contains" ]# [ doc = " too many or too few elements." ]# [ doc = "" ]# [ doc = " The `len` argument is the number of elements encountered. The sequence" ]# [ doc = " or map may have expected more arguments or fewer arguments." ]# [ doc = "" ]# [ doc = " The `exp` argument provides information about what data was being" ]# [ doc = " expected. For example `exp` might say that a tuple of size 6 was" ]# [ doc = " expected." ]# [ cold ] fn invalid_length ( len : usize , exp : & Expected )-> Self { Error :: custom ( format_args ! ( "invalid length {}, expected {}" , len , exp ))}# [ doc = " Raised when a `Deserialize` enum type received a variant with an" ]# [ doc = " unrecognized name." ]# [ cold ] fn unknown_variant ( variant : & str , expected : & 'static [& 'static str ])-> Self { if expected . is_empty (){ Error :: custom ( format_args ! ( "unknown variant `{}`, there are no variants" , variant ))} else { Error :: custom ( format_args ! ( "unknown variant `{}`, expected {}" , variant , OneOf { names : expected }))}}# [ doc = " Raised when a `Deserialize` struct type received a field with an" ]# [ doc = " unrecognized name." ]# [ cold ] fn unknown_field ( field : & str , expected : & 'static [& 'static str ])-> Self { if expected . is_empty (){ Error :: custom ( format_args ! ( "unknown field `{}`, there are no fields" , field ))} else { Error :: custom ( format_args ! ( "unknown field `{}`, expected {}" , field , OneOf { names : expected }))}}# [ doc = " Raised when a `Deserialize` struct type expected to receive a required" ]# [ doc = " field with a particular name but that field was not present in the" ]# [ doc = " input." ]# [ cold ] fn missing_field ( field : & 'static str )-> Self { Error :: custom ( format_args ! ( "missing field `{}`" , field ))}# [ doc = " Raised when a `Deserialize` struct type received more than one of the" ]# [ doc = " same field." ]# [ cold ] fn duplicate_field ( field : & 'static str )-> Self { Error :: custom ( format_args ! ( "duplicate field `{}`" , field ))}}}}
+macro_rules! __ra_macro_fixture234 {($ty : ident , $method : ident $($cast : tt )*)=>{ impl Serialize for $ty {# [ inline ] fn serialize < S > (& self , serializer : S )-> Result < S :: Ok , S :: Error > where S : Serializer , { serializer .$method (* self $($cast )*)}}}}
+macro_rules! __ra_macro_fixture235 {($($len : tt )+)=>{$(impl < T > Serialize for [ T ; $len ] where T : Serialize , {# [ inline ] fn serialize < S > (& self , serializer : S )-> Result < S :: Ok , S :: Error > where S : Serializer , { let mut seq = try ! ( serializer . serialize_tuple ($len )); for e in self { try ! ( seq . serialize_element ( e )); } seq . end ()}})+ }}
+macro_rules! __ra_macro_fixture236 {($ty : ident < T $(: $tbound1 : ident $(+ $tbound2 : ident )*)* $(, $typaram : ident : $bound : ident )* >)=>{ impl < T $(, $typaram )*> Serialize for $ty < T $(, $typaram )*> where T : Serialize $(+ $tbound1 $(+ $tbound2 )*)*, $($typaram : $bound ,)* {# [ inline ] fn serialize < S > (& self , serializer : S )-> Result < S :: Ok , S :: Error > where S : Serializer , { serializer . collect_seq ( self )}}}}
+macro_rules! __ra_macro_fixture237 {($($len : expr =>($($n : tt $name : ident )+))+)=>{$(impl <$($name ),+> Serialize for ($($name ,)+) where $($name : Serialize ,)+ {# [ inline ] fn serialize < S > (& self , serializer : S )-> Result < S :: Ok , S :: Error > where S : Serializer , { let mut tuple = try ! ( serializer . serialize_tuple ($len )); $(try ! ( tuple . serialize_element (& self .$n )); )+ tuple . end ()}})+ }}
+macro_rules! __ra_macro_fixture238 {($ty : ident < K $(: $kbound1 : ident $(+ $kbound2 : ident )*)*, V $(, $typaram : ident : $bound : ident )* >)=>{ impl < K , V $(, $typaram )*> Serialize for $ty < K , V $(, $typaram )*> where K : Serialize $(+ $kbound1 $(+ $kbound2 )*)*, V : Serialize , $($typaram : $bound ,)* {# [ inline ] fn serialize < S > (& self , serializer : S )-> Result < S :: Ok , S :: Error > where S : Serializer , { serializer . collect_map ( self )}}}}
+macro_rules! __ra_macro_fixture239 {($(# [ doc = $doc : tt ])* <$($desc : tt )+ )=>{$(# [ doc = $doc ])* impl <$($desc )+ {# [ inline ] fn serialize < S > (& self , serializer : S )-> Result < S :: Ok , S :: Error > where S : Serializer , {(** self ). serialize ( serializer )}}}; }
+macro_rules! __ra_macro_fixture240 {($($T : ident , )+ )=>{$(# [ cfg ( num_nonzero )] impl Serialize for num ::$T { fn serialize < S > (& self , serializer : S )-> Result < S :: Ok , S :: Error > where S : Serializer , { self . get (). serialize ( serializer )}})+ }}
+macro_rules! __ra_macro_fixture241 {( Error : Sized $(+ $($supertrait : ident )::+)*)=>{# [ doc = " Trait used by `Serialize` implementations to generically construct" ]# [ doc = " errors belonging to the `Serializer` against which they are" ]# [ doc = " currently running." ]# [ doc = "" ]# [ doc = " # Example implementation" ]# [ doc = "" ]# [ doc = " The [example data format] presented on the website shows an error" ]# [ doc = " type appropriate for a basic JSON data format." ]# [ doc = "" ]# [ doc = " [example data format]: https://serde.rs/data-format.html" ] pub trait Error : Sized $(+ $($supertrait )::+)* {# [ doc = " Used when a [`Serialize`] implementation encounters any error" ]# [ doc = " while serializing a type." ]# [ doc = "" ]# [ doc = " The message should not be capitalized and should not end with a" ]# [ doc = " period." ]# [ doc = "" ]# [ doc = " For example, a filesystem [`Path`] may refuse to serialize" ]# [ doc = " itself if it contains invalid UTF-8 data." ]# [ doc = "" ]# [ doc = " ```edition2018" ]# [ doc = " # struct Path;" ]# [ doc = " #" ]# [ doc = " # impl Path {" ]# [ doc = " # fn to_str(&self) -> Option<&str> {" ]# [ doc = " # unimplemented!()" ]# [ doc = " # }" ]# [ doc = " # }" ]# [ doc = " #" ]# [ doc = " use serde::ser::{self, Serialize, Serializer};" ]# [ doc = "" ]# [ doc = " impl Serialize for Path {" ]# [ doc = " fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>" ]# [ doc = " where" ]# [ doc = " S: Serializer," ]# [ doc = " {" ]# [ doc = " match self.to_str() {" ]# [ doc = " Some(s) => serializer.serialize_str(s)," ]# [ doc = " None => Err(ser::Error::custom(\\\"path contains invalid UTF-8 characters\\\"))," ]# [ doc = " }" ]# [ doc = " }" ]# [ doc = " }" ]# [ doc = " ```" ]# [ doc = "" ]# [ doc = " [`Path`]: https://doc.rust-lang.org/std/path/struct.Path.html" ]# [ doc = " [`Serialize`]: ../trait.Serialize.html" ] fn custom < T > ( msg : T )-> Self where T : Display ; }}}
+macro_rules! __ra_macro_fixture242 {($t : ty , $($attr : meta ),* )=>{$(# [$attr ])* impl < L , R > AsRef <$t > for Either < L , R > where L : AsRef <$t >, R : AsRef <$t > { fn as_ref (& self )-> &$t { either ! (* self , ref inner => inner . as_ref ())}}$(# [$attr ])* impl < L , R > AsMut <$t > for Either < L , R > where L : AsMut <$t >, R : AsMut <$t > { fn as_mut (& mut self )-> & mut $t { either ! (* self , ref mut inner => inner . as_mut ())}}}; }
+macro_rules! __ra_macro_fixture243 {($C : ident $P : ident ; $A : ident , $($I : ident ),* ; $($X : ident )*)=>(# [ derive ( Clone , Debug )] pub struct $C < I : Iterator > { item : Option < I :: Item >, iter : I , c : $P < I >, } impl < I : Iterator + Clone > From < I > for $C < I > { fn from ( mut iter : I )-> Self {$C { item : iter . next (), iter : iter . clone (), c : $P :: from ( iter ), }}} impl < I : Iterator + Clone > From < I > for $C < Fuse < I >> { fn from ( iter : I )-> Self { let mut iter = iter . fuse (); $C { item : iter . next (), iter : iter . clone (), c : $P :: from ( iter ), }}} impl < I , $A > Iterator for $C < I > where I : Iterator < Item = $A > + Clone , I :: Item : Clone { type Item = ($($I ),*); fn next (& mut self )-> Option < Self :: Item > { if let Some (($($X ),*,))= self . c . next (){ let z = self . item . clone (). unwrap (); Some (( z , $($X ),*))} else { self . item = self . iter . next (); self . item . clone (). and_then (| z | { self . c = $P :: from ( self . iter . clone ()); self . c . next (). map (| ($($X ),*,)| ( z , $($X ),*))})}}} impl < I , $A > HasCombination < I > for ($($I ),*) where I : Iterator < Item = $A > + Clone , I :: Item : Clone { type Combination = $C < Fuse < I >>; })}
+macro_rules! __ra_macro_fixture244 (($_A : ident , $_B : ident , )=>(); ($A : ident , $($B : ident ,)*)=>( impl_cons_iter ! ($($B ,)*); # [ allow ( non_snake_case )] impl < X , Iter , $($B ),*> Iterator for ConsTuples < Iter , (($($B ,)*), X )> where Iter : Iterator < Item = (($($B ,)*), X )>, { type Item = ($($B ,)* X , ); fn next (& mut self )-> Option < Self :: Item > { self . iter . next (). map (| (($($B ,)*), x )| ($($B ,)* x , ))} fn size_hint (& self )-> ( usize , Option < usize >){ self . iter . size_hint ()} fn fold < Acc , Fold > ( self , accum : Acc , mut f : Fold )-> Acc where Fold : FnMut ( Acc , Self :: Item )-> Acc , { self . iter . fold ( accum , move | acc , (($($B ,)*), x )| f ( acc , ($($B ,)* x , )))}}# [ allow ( non_snake_case )] impl < X , Iter , $($B ),*> DoubleEndedIterator for ConsTuples < Iter , (($($B ,)*), X )> where Iter : DoubleEndedIterator < Item = (($($B ,)*), X )>, { fn next_back (& mut self )-> Option < Self :: Item > { self . iter . next (). map (| (($($B ,)*), x )| ($($B ,)* x , ))}}); );
+macro_rules! __ra_macro_fixture245 {($($fmt_trait : ident )*)=>{$(impl < 'a , I > fmt ::$fmt_trait for Format < 'a , I > where I : Iterator , I :: Item : fmt ::$fmt_trait , { fn fmt (& self , f : & mut fmt :: Formatter )-> fmt :: Result { self . format ( f , fmt ::$fmt_trait :: fmt )}})* }}
+macro_rules! __ra_macro_fixture246 {([$($typarm : tt )*]$type_ : ty )=>{ impl <$($typarm )*> PeekingNext for $type_ { fn peeking_next < F > (& mut self , accept : F )-> Option < Self :: Item > where F : FnOnce (& Self :: Item )-> bool { let saved_state = self . clone (); if let Some ( r )= self . next (){ if ! accept (& r ){* self = saved_state ; } else { return Some ( r )}} None }}}}
+macro_rules! __ra_macro_fixture247 {($dummy : ident ,)=>{}; ($dummy : ident , $($Y : ident ,)*)=>( impl_tuple_collect ! ($($Y ,)*); impl < A > TupleCollect for ($(ignore_ident ! ($Y , A ),)*){ type Item = A ; type Buffer = [ Option < A >; count_ident ! ($($Y ,)*)- 1 ]; # [ allow ( unused_assignments , unused_mut )] fn collect_from_iter < I > ( iter : I , buf : & mut Self :: Buffer )-> Option < Self > where I : IntoIterator < Item = A >{ let mut iter = iter . into_iter (); $(let mut $Y = None ; )* loop {$($Y = iter . next (); if $Y . is_none (){ break })* return Some (($($Y . unwrap ()),*,))} let mut i = 0 ; let mut s = buf . as_mut (); $(if i < s . len (){ s [ i ]= $Y ; i += 1 ; })* return None ; } fn collect_from_iter_no_buf < I > ( iter : I )-> Option < Self > where I : IntoIterator < Item = A >{ let mut iter = iter . into_iter (); Some (($({let $Y = iter . next ()?; $Y }, )*))} fn num_items ()-> usize { count_ident ! ($($Y ,)*)} fn left_shift_push (& mut self , mut item : A ){ use std :: mem :: replace ; let & mut ($(ref mut $Y ),*,)= self ; macro_rules ! replace_item {($i : ident )=>{ item = replace ($i , item ); }}; rev_for_each_ident ! ( replace_item , $($Y ,)*); drop ( item ); }})}
+macro_rules! __ra_macro_fixture248 {($($B : ident ),*)=>(# [ allow ( non_snake_case )] impl <$($B : IntoIterator ),*> From < ($($B ,)*)> for Zip < ($($B :: IntoIter ,)*)> { fn from ( t : ($($B ,)*))-> Self { let ($($B ,)*)= t ; Zip { t : ($($B . into_iter (),)*)}}}# [ allow ( non_snake_case )]# [ allow ( unused_assignments )] impl <$($B ),*> Iterator for Zip < ($($B ,)*)> where $($B : Iterator , )* { type Item = ($($B :: Item ,)*); fn next (& mut self )-> Option < Self :: Item > { let ($(ref mut $B ,)*)= self . t ; $(let $B = match $B . next (){ None => return None , Some ( elt )=> elt }; )* Some (($($B ,)*))} fn size_hint (& self )-> ( usize , Option < usize >){ let sh = (:: std :: usize :: MAX , None ); let ($(ref $B ,)*)= self . t ; $(let sh = size_hint :: min ($B . size_hint (), sh ); )* sh }}# [ allow ( non_snake_case )] impl <$($B ),*> ExactSizeIterator for Zip < ($($B ,)*)> where $($B : ExactSizeIterator , )* {}# [ allow ( non_snake_case )] impl <$($B ),*> DoubleEndedIterator for Zip < ($($B ,)*)> where $($B : DoubleEndedIterator + ExactSizeIterator , )* {# [ inline ] fn next_back (& mut self )-> Option < Self :: Item > { let ($(ref mut $B ,)*)= self . t ; let size = * [$($B . len (), )*]. iter (). min (). unwrap (); $(if $B . len ()!= size { for _ in 0 ..$B . len ()- size {$B . next_back (); }})* match ($($B . next_back (),)*){($(Some ($B ),)*)=> Some (($($B ,)*)), _ => None , }}}); }
+macro_rules! __ra_macro_fixture249 {($iter : ty =>$item : ty , impl $($args : tt )* )=>{ delegate_iterator ! {$iter =>$item , impl $($args )* } impl $($args )* IndexedParallelIterator for $iter { fn drive < C > ( self , consumer : C )-> C :: Result where C : Consumer < Self :: Item > { self . inner . drive ( consumer )} fn len (& self )-> usize { self . inner . len ()} fn with_producer < CB > ( self , callback : CB )-> CB :: Output where CB : ProducerCallback < Self :: Item > { self . inner . with_producer ( callback )}}}}
+macro_rules! __ra_macro_fixture250 {($t : ty =>$iter : ident <$($i : tt ),*>, impl $($args : tt )*)=>{ impl $($args )* IntoParallelIterator for $t { type Item = <$t as IntoIterator >:: Item ; type Iter = $iter <$($i ),*>; fn into_par_iter ( self )-> Self :: Iter { use std :: iter :: FromIterator ; $iter { inner : Vec :: from_iter ( self ). into_par_iter ()}}}}; }
+macro_rules! __ra_macro_fixture251 {($iter : ty =>$item : ty , impl $($args : tt )* )=>{ impl $($args )* ParallelIterator for $iter { type Item = $item ; fn drive_unindexed < C > ( self , consumer : C )-> C :: Result where C : UnindexedConsumer < Self :: Item > { self . inner . drive_unindexed ( consumer )} fn opt_len (& self )-> Option < usize > { self . inner . opt_len ()}}}}
+macro_rules! __ra_macro_fixture252 {($($Tuple : ident {$(($idx : tt )-> $T : ident )+ })+)=>{$(impl <$($T , )+> IntoParallelIterator for ($($T , )+) where $($T : IntoParallelIterator , $T :: Iter : IndexedParallelIterator , )+ { type Item = ($($T :: Item , )+); type Iter = MultiZip < ($($T :: Iter , )+)>; fn into_par_iter ( self )-> Self :: Iter { MultiZip { tuple : ($(self .$idx . into_par_iter (), )+ ), }}} impl < 'a , $($T , )+> IntoParallelIterator for & 'a ($($T , )+) where $($T : IntoParallelRefIterator < 'a >, $T :: Iter : IndexedParallelIterator , )+ { type Item = ($($T :: Item , )+); type Iter = MultiZip < ($($T :: Iter , )+)>; fn into_par_iter ( self )-> Self :: Iter { MultiZip { tuple : ($(self .$idx . par_iter (), )+ ), }}} impl < 'a , $($T , )+> IntoParallelIterator for & 'a mut ($($T , )+) where $($T : IntoParallelRefMutIterator < 'a >, $T :: Iter : IndexedParallelIterator , )+ { type Item = ($($T :: Item , )+); type Iter = MultiZip < ($($T :: Iter , )+)>; fn into_par_iter ( self )-> Self :: Iter { MultiZip { tuple : ($(self .$idx . par_iter_mut (), )+ ), }}} impl <$($T , )+> ParallelIterator for MultiZip < ($($T , )+)> where $($T : IndexedParallelIterator , )+ { type Item = ($($T :: Item , )+); fn drive_unindexed < CONSUMER > ( self , consumer : CONSUMER )-> CONSUMER :: Result where CONSUMER : UnindexedConsumer < Self :: Item >, { self . drive ( consumer )} fn opt_len (& self )-> Option < usize > { Some ( self . len ())}} impl <$($T , )+> IndexedParallelIterator for MultiZip < ($($T , )+)> where $($T : IndexedParallelIterator , )+ { fn drive < CONSUMER > ( self , consumer : CONSUMER )-> CONSUMER :: Result where CONSUMER : Consumer < Self :: Item >, { reduce ! ($(self . tuple .$idx ),+ => IndexedParallelIterator :: zip ). map ( flatten ! ($($T ),+)). drive ( consumer )} fn len (& self )-> usize { reduce ! ($(self . tuple .$idx . len ()),+ => Ord :: min )} fn with_producer < CB > ( self , callback : CB )-> CB :: Output where CB : ProducerCallback < Self :: Item >, { reduce ! ($(self . tuple .$idx ),+ => IndexedParallelIterator :: zip ). map ( flatten ! ($($T ),+)). with_producer ( callback )}})+ }}
+macro_rules! __ra_macro_fixture253 {($t : ty )=>{ impl ParallelIterator for Iter <$t > { type Item = $t ; fn drive_unindexed < C > ( self , consumer : C )-> C :: Result where C : UnindexedConsumer < Self :: Item >, { bridge ( self , consumer )} fn opt_len (& self )-> Option < usize > { Some ( self . len ())}} impl IndexedParallelIterator for Iter <$t > { fn drive < C > ( self , consumer : C )-> C :: Result where C : Consumer < Self :: Item >, { bridge ( self , consumer )} fn len (& self )-> usize { self . range . len ()} fn with_producer < CB > ( self , callback : CB )-> CB :: Output where CB : ProducerCallback < Self :: Item >, { callback . callback ( IterProducer { range : self . range })}} impl Producer for IterProducer <$t > { type Item = < Range <$t > as Iterator >:: Item ; type IntoIter = Range <$t >; fn into_iter ( self )-> Self :: IntoIter { self . range } fn split_at ( self , index : usize )-> ( Self , Self ){ assert ! ( index <= self . range . len ()); let mid = self . range . start . wrapping_add ( index as $t ); let left = self . range . start .. mid ; let right = mid .. self . range . end ; ( IterProducer { range : left }, IterProducer { range : right })}}}; }
+macro_rules! __ra_macro_fixture254 {($t : ty , $len_t : ty )=>{ impl UnindexedRangeLen <$len_t > for Range <$t > { fn len (& self )-> $len_t { let & Range { start , end }= self ; if end > start { end . wrapping_sub ( start ) as $len_t } else { 0 }}} impl ParallelIterator for Iter <$t > { type Item = $t ; fn drive_unindexed < C > ( self , consumer : C )-> C :: Result where C : UnindexedConsumer < Self :: Item >, {# [ inline ] fn offset ( start : $t )-> impl Fn ( usize )-> $t { move | i | start . wrapping_add ( i as $t )} if let Some ( len )= self . opt_len (){( 0 .. len ). into_par_iter (). map ( offset ( self . range . start )). drive ( consumer )} else { bridge_unindexed ( IterProducer { range : self . range }, consumer )}} fn opt_len (& self )-> Option < usize > { let len = self . range . len (); if len <= usize :: MAX as $len_t { Some ( len as usize )} else { None }}} impl UnindexedProducer for IterProducer <$t > { type Item = $t ; fn split ( mut self )-> ( Self , Option < Self >){ let index = self . range . len ()/ 2 ; if index > 0 { let mid = self . range . start . wrapping_add ( index as $t ); let right = mid .. self . range . end ; self . range . end = mid ; ( self , Some ( IterProducer { range : right }))} else {( self , None )}} fn fold_with < F > ( self , folder : F )-> F where F : Folder < Self :: Item >, { folder . consume_iter ( self )}}}; }
+macro_rules! __ra_macro_fixture255 {($t : ty )=>{ parallel_range_impl ! {$t } impl IndexedParallelIterator for Iter <$t > { fn drive < C > ( self , consumer : C )-> C :: Result where C : Consumer < Self :: Item >, { convert ! ( self . drive ( consumer ))} fn len (& self )-> usize { self . range . len ()} fn with_producer < CB > ( self , callback : CB )-> CB :: Output where CB : ProducerCallback < Self :: Item >, { convert ! ( self . with_producer ( callback ))}}}; }
+macro_rules! __ra_macro_fixture256 {($t : ty )=>{ impl ParallelIterator for Iter <$t > { type Item = $t ; fn drive_unindexed < C > ( self , consumer : C )-> C :: Result where C : UnindexedConsumer < Self :: Item >, { convert ! ( self . drive_unindexed ( consumer ))} fn opt_len (& self )-> Option < usize > { convert ! ( self . opt_len ())}}}; }
+macro_rules! __ra_macro_fixture257 {($f : ident , $name : ident )=>{# [ test ] fn $name (){ let mut rng = thread_rng (); for len in ( 0 .. 25 ). chain ( 500 .. 501 ){ for & modulus in & [ 5 , 10 , 100 ]{ let dist = Uniform :: new ( 0 , modulus ); for _ in 0 .. 100 { let v : Vec < i32 > = rng . sample_iter (& dist ). take ( len ). collect (); let mut tmp = v . clone (); tmp .$f (| a , b | a . cmp ( b )); assert ! ( tmp . windows ( 2 ). all (| w | w [ 0 ]<= w [ 1 ])); let mut tmp = v . clone (); tmp .$f (| a , b | b . cmp ( a )); assert ! ( tmp . windows ( 2 ). all (| w | w [ 0 ]>= w [ 1 ])); }}} for & len in & [ 1_000 , 10_000 , 100_000 ]{ for & modulus in & [ 5 , 10 , 100 , 10_000 ]{ let dist = Uniform :: new ( 0 , modulus ); let mut v : Vec < i32 > = rng . sample_iter (& dist ). take ( len ). collect (); v .$f (| a , b | a . cmp ( b )); assert ! ( v . windows ( 2 ). all (| w | w [ 0 ]<= w [ 1 ])); }} for & len in & [ 1_000 , 10_000 , 100_000 ]{ let len_dist = Uniform :: new ( 0 , len ); for & modulus in & [ 5 , 10 , 1000 , 50_000 ]{ let dist = Uniform :: new ( 0 , modulus ); let mut v : Vec < i32 > = rng . sample_iter (& dist ). take ( len ). collect (); v . sort (); v . reverse (); for _ in 0 .. 5 { let a = rng . sample (& len_dist ); let b = rng . sample (& len_dist ); if a < b { v [ a .. b ]. reverse (); } else { v . swap ( a , b ); }} v .$f (| a , b | a . cmp ( b )); assert ! ( v . windows ( 2 ). all (| w | w [ 0 ]<= w [ 1 ])); }} let mut v : Vec <_> = ( 0 .. 100 ). collect (); v .$f (|_, _| * [ Less , Equal , Greater ]. choose (& mut thread_rng ()). unwrap ()); v .$f (| a , b | a . cmp ( b )); for i in 0 .. v . len (){ assert_eq ! ( v [ i ], i ); }[ 0i32 ; 0 ].$f (| a , b | a . cmp ( b )); [(); 10 ].$f (| a , b | a . cmp ( b )); [(); 100 ].$f (| a , b | a . cmp ( b )); let mut v = [ 0xDEAD_BEEFu64 ]; v .$f (| a , b | a . cmp ( b )); assert ! ( v == [ 0xDEAD_BEEF ]); }}; }
+macro_rules! __ra_macro_fixture258 {($($name : ident # [$expr : meta ])*)=>{$(# [ doc = " First sanity check that the expression is OK." ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " #![deny(unused_must_use)]" ]# [ doc = "" ]# [ doc = " use rayon::prelude::*;" ]# [ doc = "" ]# [ doc = " let v: Vec<_> = (0..100).map(Some).collect();" ]# [ doc = " let _ =" ]# [$expr ]# [ doc = " ```" ]# [ doc = "" ]# [ doc = " Now trigger the `must_use`." ]# [ doc = "" ]# [ doc = " ```compile_fail" ]# [ doc = " #![deny(unused_must_use)]" ]# [ doc = "" ]# [ doc = " use rayon::prelude::*;" ]# [ doc = "" ]# [ doc = " let v: Vec<_> = (0..100).map(Some).collect();" ]# [$expr ]# [ doc = " ```" ] mod $name {})*}}
+macro_rules! __ra_macro_fixture259 {($name : ident : $style : expr ; $input : expr =>$result : expr )=>{# [ test ] fn $name (){ assert_eq ! ($style . paint ($input ). to_string (), $result . to_string ()); let mut v = Vec :: new (); $style . paint ($input . as_bytes ()). write_to (& mut v ). unwrap (); assert_eq ! ( v . as_slice (), $result . as_bytes ()); }}; }
+macro_rules! __ra_macro_fixture260 {($name : ident : $first : expr ; $next : expr =>$result : expr )=>{# [ test ] fn $name (){ assert_eq ! ($result , Difference :: between (&$first , &$next )); }}; }
+macro_rules! __ra_macro_fixture261 {($name : ident : $obj : expr =>$result : expr )=>{# [ test ] fn $name (){ assert_eq ! ($result , format ! ( "{:?}" , $obj )); }}; }
+macro_rules! __ra_macro_fixture262 {($name : ident , $ty_int : ty , $max : expr , $bytes : expr , $read : ident , $write : ident )=>{ mod $name {# [ allow ( unused_imports )] use super :: { qc_sized , Wi128 }; use crate :: { BigEndian , ByteOrder , LittleEndian , NativeEndian , }; # [ test ] fn big_endian (){ fn prop ( n : $ty_int )-> bool { let mut buf = [ 0 ; 16 ]; BigEndian ::$write (& mut buf , n . clone (), $bytes ); n == BigEndian ::$read (& buf [..$bytes ], $bytes )} qc_sized ( prop as fn ($ty_int )-> bool , $max ); }# [ test ] fn little_endian (){ fn prop ( n : $ty_int )-> bool { let mut buf = [ 0 ; 16 ]; LittleEndian ::$write (& mut buf , n . clone (), $bytes ); n == LittleEndian ::$read (& buf [..$bytes ], $bytes )} qc_sized ( prop as fn ($ty_int )-> bool , $max ); }# [ test ] fn native_endian (){ fn prop ( n : $ty_int )-> bool { let mut buf = [ 0 ; 16 ]; NativeEndian ::$write (& mut buf , n . clone (), $bytes ); n == NativeEndian ::$read (& buf [..$bytes ], $bytes )} qc_sized ( prop as fn ($ty_int )-> bool , $max ); }}}; ($name : ident , $ty_int : ty , $max : expr , $read : ident , $write : ident )=>{ mod $name {# [ allow ( unused_imports )] use super :: { qc_sized , Wi128 }; use crate :: { BigEndian , ByteOrder , LittleEndian , NativeEndian , }; use core :: mem :: size_of ; # [ test ] fn big_endian (){ fn prop ( n : $ty_int )-> bool { let bytes = size_of ::<$ty_int > (); let mut buf = [ 0 ; 16 ]; BigEndian ::$write (& mut buf [ 16 - bytes ..], n . clone ()); n == BigEndian ::$read (& buf [ 16 - bytes ..])} qc_sized ( prop as fn ($ty_int )-> bool , $max - 1 ); }# [ test ] fn little_endian (){ fn prop ( n : $ty_int )-> bool { let bytes = size_of ::<$ty_int > (); let mut buf = [ 0 ; 16 ]; LittleEndian ::$write (& mut buf [.. bytes ], n . clone ()); n == LittleEndian ::$read (& buf [.. bytes ])} qc_sized ( prop as fn ($ty_int )-> bool , $max - 1 ); }# [ test ] fn native_endian (){ fn prop ( n : $ty_int )-> bool { let bytes = size_of ::<$ty_int > (); let mut buf = [ 0 ; 16 ]; NativeEndian ::$write (& mut buf [.. bytes ], n . clone ()); n == NativeEndian ::$read (& buf [.. bytes ])} qc_sized ( prop as fn ($ty_int )-> bool , $max - 1 ); }}}; }
+macro_rules! __ra_macro_fixture263 {($name : ident , $maximally_small : expr , $zero : expr , $read : ident , $write : ident )=>{ mod $name { use crate :: { BigEndian , ByteOrder , LittleEndian , NativeEndian , }; # [ test ]# [ should_panic ] fn read_big_endian (){ let buf = [ 0 ; $maximally_small ]; BigEndian ::$read (& buf ); }# [ test ]# [ should_panic ] fn read_little_endian (){ let buf = [ 0 ; $maximally_small ]; LittleEndian ::$read (& buf ); }# [ test ]# [ should_panic ] fn read_native_endian (){ let buf = [ 0 ; $maximally_small ]; NativeEndian ::$read (& buf ); }# [ test ]# [ should_panic ] fn write_big_endian (){ let mut buf = [ 0 ; $maximally_small ]; BigEndian ::$write (& mut buf , $zero ); }# [ test ]# [ should_panic ] fn write_little_endian (){ let mut buf = [ 0 ; $maximally_small ]; LittleEndian ::$write (& mut buf , $zero ); }# [ test ]# [ should_panic ] fn write_native_endian (){ let mut buf = [ 0 ; $maximally_small ]; NativeEndian ::$write (& mut buf , $zero ); }}}; ($name : ident , $maximally_small : expr , $read : ident )=>{ mod $name { use crate :: { BigEndian , ByteOrder , LittleEndian , NativeEndian , }; # [ test ]# [ should_panic ] fn read_big_endian (){ let buf = [ 0 ; $maximally_small ]; BigEndian ::$read (& buf , $maximally_small + 1 ); }# [ test ]# [ should_panic ] fn read_little_endian (){ let buf = [ 0 ; $maximally_small ]; LittleEndian ::$read (& buf , $maximally_small + 1 ); }# [ test ]# [ should_panic ] fn read_native_endian (){ let buf = [ 0 ; $maximally_small ]; NativeEndian ::$read (& buf , $maximally_small + 1 ); }}}; }
+macro_rules! __ra_macro_fixture264 {($name : ident , $read : ident , $write : ident , $num_bytes : expr , $numbers : expr )=>{ mod $name { use crate :: { BigEndian , ByteOrder , LittleEndian , NativeEndian , }; # [ test ]# [ should_panic ] fn read_big_endian (){ let bytes = [ 0 ; $num_bytes ]; let mut numbers = $numbers ; BigEndian ::$read (& bytes , & mut numbers ); }# [ test ]# [ should_panic ] fn read_little_endian (){ let bytes = [ 0 ; $num_bytes ]; let mut numbers = $numbers ; LittleEndian ::$read (& bytes , & mut numbers ); }# [ test ]# [ should_panic ] fn read_native_endian (){ let bytes = [ 0 ; $num_bytes ]; let mut numbers = $numbers ; NativeEndian ::$read (& bytes , & mut numbers ); }# [ test ]# [ should_panic ] fn write_big_endian (){ let mut bytes = [ 0 ; $num_bytes ]; let numbers = $numbers ; BigEndian ::$write (& numbers , & mut bytes ); }# [ test ]# [ should_panic ] fn write_little_endian (){ let mut bytes = [ 0 ; $num_bytes ]; let numbers = $numbers ; LittleEndian ::$write (& numbers , & mut bytes ); }# [ test ]# [ should_panic ] fn write_native_endian (){ let mut bytes = [ 0 ; $num_bytes ]; let numbers = $numbers ; NativeEndian ::$write (& numbers , & mut bytes ); }}}; }
+macro_rules! __ra_macro_fixture265 {($name : ident , $which : ident , $re : expr )=>{ test_lit ! ($name , $which , $re ,); }; ($name : ident , $which : ident , $re : expr , $($lit : expr ),*)=>{# [ test ] fn $name (){ let expr = ParserBuilder :: new (). build (). parse ($re ). unwrap (); let lits = Literals ::$which (& expr ); assert_lit_eq ! ( Unicode , lits , $($lit ),*); let expr = ParserBuilder :: new (). allow_invalid_utf8 ( true ). unicode ( false ). build (). parse ($re ). unwrap (); let lits = Literals ::$which (& expr ); assert_lit_eq ! ( Bytes , lits , $($lit ),*); }}; }
+macro_rules! __ra_macro_fixture266 {($name : ident , $which : ident , $re : expr )=>{ test_exhausted ! ($name , $which , $re ,); }; ($name : ident , $which : ident , $re : expr , $($lit : expr ),*)=>{# [ test ] fn $name (){ let expr = ParserBuilder :: new (). build (). parse ($re ). unwrap (); let mut lits = Literals :: empty (); lits . set_limit_size ( 20 ). set_limit_class ( 10 ); $which (& mut lits , & expr ); assert_lit_eq ! ( Unicode , lits , $($lit ),*); let expr = ParserBuilder :: new (). allow_invalid_utf8 ( true ). unicode ( false ). build (). parse ($re ). unwrap (); let mut lits = Literals :: empty (); lits . set_limit_size ( 20 ). set_limit_class ( 10 ); $which (& mut lits , & expr ); assert_lit_eq ! ( Bytes , lits , $($lit ),*); }}; }
+macro_rules! __ra_macro_fixture267 {($name : ident , $given : expr , $expected : expr )=>{# [ test ] fn $name (){ let given : Vec < Literal > = $given . into_iter (). map (| ul | { let cut = ul . is_cut (); Literal { v : ul . v . into_bytes (), cut : cut }}). collect (); let lits = create_lits ( given ); let got = lits . unambiguous_prefixes (); assert_eq ! ($expected , escape_lits ( got . literals ())); }}; }
+macro_rules! __ra_macro_fixture268 {($name : ident , $trim : expr , $given : expr , $expected : expr )=>{# [ test ] fn $name (){ let given : Vec < Literal > = $given . into_iter (). map (| ul | { let cut = ul . is_cut (); Literal { v : ul . v . into_bytes (), cut : cut }}). collect (); let lits = create_lits ( given ); let got = lits . trim_suffix ($trim ). unwrap (); assert_eq ! ($expected , escape_lits ( got . literals ())); }}; }
+macro_rules! __ra_macro_fixture269 {($name : ident , $given : expr , $expected : expr )=>{# [ test ] fn $name (){ let given : Vec < Literal > = $given . into_iter (). map (| s : & str | Literal { v : s . to_owned (). into_bytes (), cut : false , }). collect (); let lits = create_lits ( given ); let got = lits . longest_common_prefix (); assert_eq ! ($expected , escape_bytes ( got )); }}; }
+macro_rules! __ra_macro_fixture270 {($name : ident , $given : expr , $expected : expr )=>{# [ test ] fn $name (){ let given : Vec < Literal > = $given . into_iter (). map (| s : & str | Literal { v : s . to_owned (). into_bytes (), cut : false , }). collect (); let lits = create_lits ( given ); let got = lits . longest_common_suffix (); assert_eq ! ($expected , escape_bytes ( got )); }}; }
+macro_rules! __ra_macro_fixture271 {($name : ident , $text : expr )=>{# [ test ] fn $name (){ assert_eq ! ( None , find_cap_ref ($text . as_bytes ())); }}; ($name : ident , $text : expr , $capref : expr )=>{# [ test ] fn $name (){ assert_eq ! ( Some ($capref ), find_cap_ref ($text . as_bytes ())); }}; }
+macro_rules! __ra_macro_fixture272 {($name : ident , $regex_mod : ident , $only_utf8 : expr )=>{ pub mod $name { use super :: RegexOptions ; use error :: Error ; use exec :: ExecBuilder ; use $regex_mod :: Regex ; # [ doc = " A configurable builder for a regular expression." ]# [ doc = "" ]# [ doc = " A builder can be used to configure how the regex is built, for example, by" ]# [ doc = " setting the default flags (which can be overridden in the expression" ]# [ doc = " itself) or setting various limits." ]# [ derive ( Debug )] pub struct RegexBuilder ( RegexOptions ); impl RegexBuilder {# [ doc = " Create a new regular expression builder with the given pattern." ]# [ doc = "" ]# [ doc = " If the pattern is invalid, then an error will be returned when" ]# [ doc = " `build` is called." ] pub fn new ( pattern : & str )-> RegexBuilder { let mut builder = RegexBuilder ( RegexOptions :: default ()); builder . 0 . pats . push ( pattern . to_owned ()); builder }# [ doc = " Consume the builder and compile the regular expression." ]# [ doc = "" ]# [ doc = " Note that calling `as_str` on the resulting `Regex` will produce the" ]# [ doc = " pattern given to `new` verbatim. Notably, it will not incorporate any" ]# [ doc = " of the flags set on this builder." ] pub fn build (& self )-> Result < Regex , Error > { ExecBuilder :: new_options ( self . 0 . clone ()). only_utf8 ($only_utf8 ). build (). map ( Regex :: from )}# [ doc = " Set the value for the case insensitive (`i`) flag." ]# [ doc = "" ]# [ doc = " When enabled, letters in the pattern will match both upper case and" ]# [ doc = " lower case variants." ] pub fn case_insensitive (& mut self , yes : bool , )-> & mut RegexBuilder { self . 0 . case_insensitive = yes ; self }# [ doc = " Set the value for the multi-line matching (`m`) flag." ]# [ doc = "" ]# [ doc = " When enabled, `^` matches the beginning of lines and `$` matches the" ]# [ doc = " end of lines." ]# [ doc = "" ]# [ doc = " By default, they match beginning/end of the input." ] pub fn multi_line (& mut self , yes : bool )-> & mut RegexBuilder { self . 0 . multi_line = yes ; self }# [ doc = " Set the value for the any character (`s`) flag, where in `.` matches" ]# [ doc = " anything when `s` is set and matches anything except for new line when" ]# [ doc = " it is not set (the default)." ]# [ doc = "" ]# [ doc = " N.B. \\\"matches anything\\\" means \\\"any byte\\\" when Unicode is disabled and" ]# [ doc = " means \\\"any valid UTF-8 encoding of any Unicode scalar value\\\" when" ]# [ doc = " Unicode is enabled." ] pub fn dot_matches_new_line (& mut self , yes : bool , )-> & mut RegexBuilder { self . 0 . dot_matches_new_line = yes ; self }# [ doc = " Set the value for the greedy swap (`U`) flag." ]# [ doc = "" ]# [ doc = " When enabled, a pattern like `a*` is lazy (tries to find shortest" ]# [ doc = " match) and `a*?` is greedy (tries to find longest match)." ]# [ doc = "" ]# [ doc = " By default, `a*` is greedy and `a*?` is lazy." ] pub fn swap_greed (& mut self , yes : bool )-> & mut RegexBuilder { self . 0 . swap_greed = yes ; self }# [ doc = " Set the value for the ignore whitespace (`x`) flag." ]# [ doc = "" ]# [ doc = " When enabled, whitespace such as new lines and spaces will be ignored" ]# [ doc = " between expressions of the pattern, and `#` can be used to start a" ]# [ doc = " comment until the next new line." ] pub fn ignore_whitespace (& mut self , yes : bool , )-> & mut RegexBuilder { self . 0 . ignore_whitespace = yes ; self }# [ doc = " Set the value for the Unicode (`u`) flag." ]# [ doc = "" ]# [ doc = " Enabled by default. When disabled, character classes such as `\\\\w` only" ]# [ doc = " match ASCII word characters instead of all Unicode word characters." ] pub fn unicode (& mut self , yes : bool )-> & mut RegexBuilder { self . 0 . unicode = yes ; self }# [ doc = " Whether to support octal syntax or not." ]# [ doc = "" ]# [ doc = " Octal syntax is a little-known way of uttering Unicode codepoints in" ]# [ doc = " a regular expression. For example, `a`, `\\\\x61`, `\\\\u0061` and" ]# [ doc = " `\\\\141` are all equivalent regular expressions, where the last example" ]# [ doc = " shows octal syntax." ]# [ doc = "" ]# [ doc = " While supporting octal syntax isn\\\'t in and of itself a problem, it does" ]# [ doc = " make good error messages harder. That is, in PCRE based regex engines," ]# [ doc = " syntax like `\\\\0` invokes a backreference, which is explicitly" ]# [ doc = " unsupported in Rust\\\'s regex engine. However, many users expect it to" ]# [ doc = " be supported. Therefore, when octal support is disabled, the error" ]# [ doc = " message will explicitly mention that backreferences aren\\\'t supported." ]# [ doc = "" ]# [ doc = " Octal syntax is disabled by default." ] pub fn octal (& mut self , yes : bool )-> & mut RegexBuilder { self . 0 . octal = yes ; self }# [ doc = " Set the approximate size limit of the compiled regular expression." ]# [ doc = "" ]# [ doc = " This roughly corresponds to the number of bytes occupied by a single" ]# [ doc = " compiled program. If the program exceeds this number, then a" ]# [ doc = " compilation error is returned." ] pub fn size_limit (& mut self , limit : usize , )-> & mut RegexBuilder { self . 0 . size_limit = limit ; self }# [ doc = " Set the approximate size of the cache used by the DFA." ]# [ doc = "" ]# [ doc = " This roughly corresponds to the number of bytes that the DFA will" ]# [ doc = " use while searching." ]# [ doc = "" ]# [ doc = " Note that this is a *per thread* limit. There is no way to set a global" ]# [ doc = " limit. In particular, if a regex is used from multiple threads" ]# [ doc = " simultaneously, then each thread may use up to the number of bytes" ]# [ doc = " specified here." ] pub fn dfa_size_limit (& mut self , limit : usize , )-> & mut RegexBuilder { self . 0 . dfa_size_limit = limit ; self }# [ doc = " Set the nesting limit for this parser." ]# [ doc = "" ]# [ doc = " The nesting limit controls how deep the abstract syntax tree is allowed" ]# [ doc = " to be. If the AST exceeds the given limit (e.g., with too many nested" ]# [ doc = " groups), then an error is returned by the parser." ]# [ doc = "" ]# [ doc = " The purpose of this limit is to act as a heuristic to prevent stack" ]# [ doc = " overflow for consumers that do structural induction on an `Ast` using" ]# [ doc = " explicit recursion. While this crate never does this (instead using" ]# [ doc = " constant stack space and moving the call stack to the heap), other" ]# [ doc = " crates may." ]# [ doc = "" ]# [ doc = " This limit is not checked until the entire Ast is parsed. Therefore," ]# [ doc = " if callers want to put a limit on the amount of heap space used, then" ]# [ doc = " they should impose a limit on the length, in bytes, of the concrete" ]# [ doc = " pattern string. In particular, this is viable since this parser" ]# [ doc = " implementation will limit itself to heap space proportional to the" ]# [ doc = " length of the pattern string." ]# [ doc = "" ]# [ doc = " Note that a nest limit of `0` will return a nest limit error for most" ]# [ doc = " patterns but not all. For example, a nest limit of `0` permits `a` but" ]# [ doc = " not `ab`, since `ab` requires a concatenation, which results in a nest" ]# [ doc = " depth of `1`. In general, a nest limit is not something that manifests" ]# [ doc = " in an obvious way in the concrete syntax, therefore, it should not be" ]# [ doc = " used in a granular way." ] pub fn nest_limit (& mut self , limit : u32 )-> & mut RegexBuilder { self . 0 . nest_limit = limit ; self }}}}; }
+macro_rules! __ra_macro_fixture273 {($name : ident , $regex_mod : ident , $only_utf8 : expr )=>{ pub mod $name { use super :: RegexOptions ; use error :: Error ; use exec :: ExecBuilder ; use re_set ::$regex_mod :: RegexSet ; # [ doc = " A configurable builder for a set of regular expressions." ]# [ doc = "" ]# [ doc = " A builder can be used to configure how the regexes are built, for example," ]# [ doc = " by setting the default flags (which can be overridden in the expression" ]# [ doc = " itself) or setting various limits." ]# [ derive ( Debug )] pub struct RegexSetBuilder ( RegexOptions ); impl RegexSetBuilder {# [ doc = " Create a new regular expression builder with the given pattern." ]# [ doc = "" ]# [ doc = " If the pattern is invalid, then an error will be returned when" ]# [ doc = " `build` is called." ] pub fn new < I , S > ( patterns : I )-> RegexSetBuilder where S : AsRef < str >, I : IntoIterator < Item = S >, { let mut builder = RegexSetBuilder ( RegexOptions :: default ()); for pat in patterns { builder . 0 . pats . push ( pat . as_ref (). to_owned ()); } builder }# [ doc = " Consume the builder and compile the regular expressions into a set." ] pub fn build (& self )-> Result < RegexSet , Error > { ExecBuilder :: new_options ( self . 0 . clone ()). only_utf8 ($only_utf8 ). build (). map ( RegexSet :: from )}# [ doc = " Set the value for the case insensitive (`i`) flag." ] pub fn case_insensitive (& mut self , yes : bool , )-> & mut RegexSetBuilder { self . 0 . case_insensitive = yes ; self }# [ doc = " Set the value for the multi-line matching (`m`) flag." ] pub fn multi_line (& mut self , yes : bool , )-> & mut RegexSetBuilder { self . 0 . multi_line = yes ; self }# [ doc = " Set the value for the any character (`s`) flag, where in `.` matches" ]# [ doc = " anything when `s` is set and matches anything except for new line when" ]# [ doc = " it is not set (the default)." ]# [ doc = "" ]# [ doc = " N.B. \\\"matches anything\\\" means \\\"any byte\\\" for `regex::bytes::RegexSet`" ]# [ doc = " expressions and means \\\"any Unicode scalar value\\\" for `regex::RegexSet`" ]# [ doc = " expressions." ] pub fn dot_matches_new_line (& mut self , yes : bool , )-> & mut RegexSetBuilder { self . 0 . dot_matches_new_line = yes ; self }# [ doc = " Set the value for the greedy swap (`U`) flag." ] pub fn swap_greed (& mut self , yes : bool , )-> & mut RegexSetBuilder { self . 0 . swap_greed = yes ; self }# [ doc = " Set the value for the ignore whitespace (`x`) flag." ] pub fn ignore_whitespace (& mut self , yes : bool , )-> & mut RegexSetBuilder { self . 0 . ignore_whitespace = yes ; self }# [ doc = " Set the value for the Unicode (`u`) flag." ] pub fn unicode (& mut self , yes : bool )-> & mut RegexSetBuilder { self . 0 . unicode = yes ; self }# [ doc = " Whether to support octal syntax or not." ]# [ doc = "" ]# [ doc = " Octal syntax is a little-known way of uttering Unicode codepoints in" ]# [ doc = " a regular expression. For example, `a`, `\\\\x61`, `\\\\u0061` and" ]# [ doc = " `\\\\141` are all equivalent regular expressions, where the last example" ]# [ doc = " shows octal syntax." ]# [ doc = "" ]# [ doc = " While supporting octal syntax isn\\\'t in and of itself a problem, it does" ]# [ doc = " make good error messages harder. That is, in PCRE based regex engines," ]# [ doc = " syntax like `\\\\0` invokes a backreference, which is explicitly" ]# [ doc = " unsupported in Rust\\\'s regex engine. However, many users expect it to" ]# [ doc = " be supported. Therefore, when octal support is disabled, the error" ]# [ doc = " message will explicitly mention that backreferences aren\\\'t supported." ]# [ doc = "" ]# [ doc = " Octal syntax is disabled by default." ] pub fn octal (& mut self , yes : bool )-> & mut RegexSetBuilder { self . 0 . octal = yes ; self }# [ doc = " Set the approximate size limit of the compiled regular expression." ]# [ doc = "" ]# [ doc = " This roughly corresponds to the number of bytes occupied by a single" ]# [ doc = " compiled program. If the program exceeds this number, then a" ]# [ doc = " compilation error is returned." ] pub fn size_limit (& mut self , limit : usize , )-> & mut RegexSetBuilder { self . 0 . size_limit = limit ; self }# [ doc = " Set the approximate size of the cache used by the DFA." ]# [ doc = "" ]# [ doc = " This roughly corresponds to the number of bytes that the DFA will" ]# [ doc = " use while searching." ]# [ doc = "" ]# [ doc = " Note that this is a *per thread* limit. There is no way to set a global" ]# [ doc = " limit. In particular, if a regex is used from multiple threads" ]# [ doc = " simultaneously, then each thread may use up to the number of bytes" ]# [ doc = " specified here." ] pub fn dfa_size_limit (& mut self , limit : usize , )-> & mut RegexSetBuilder { self . 0 . dfa_size_limit = limit ; self }# [ doc = " Set the nesting limit for this parser." ]# [ doc = "" ]# [ doc = " The nesting limit controls how deep the abstract syntax tree is allowed" ]# [ doc = " to be. If the AST exceeds the given limit (e.g., with too many nested" ]# [ doc = " groups), then an error is returned by the parser." ]# [ doc = "" ]# [ doc = " The purpose of this limit is to act as a heuristic to prevent stack" ]# [ doc = " overflow for consumers that do structural induction on an `Ast` using" ]# [ doc = " explicit recursion. While this crate never does this (instead using" ]# [ doc = " constant stack space and moving the call stack to the heap), other" ]# [ doc = " crates may." ]# [ doc = "" ]# [ doc = " This limit is not checked until the entire Ast is parsed. Therefore," ]# [ doc = " if callers want to put a limit on the amount of heap space used, then" ]# [ doc = " they should impose a limit on the length, in bytes, of the concrete" ]# [ doc = " pattern string. In particular, this is viable since this parser" ]# [ doc = " implementation will limit itself to heap space proportional to the" ]# [ doc = " length of the pattern string." ]# [ doc = "" ]# [ doc = " Note that a nest limit of `0` will return a nest limit error for most" ]# [ doc = " patterns but not all. For example, a nest limit of `0` permits `a` but" ]# [ doc = " not `ab`, since `ab` requires a concatenation, which results in a nest" ]# [ doc = " depth of `1`. In general, a nest limit is not something that manifests" ]# [ doc = " in an obvious way in the concrete syntax, therefore, it should not be" ]# [ doc = " used in a granular way." ] pub fn nest_limit (& mut self , limit : u32 , )-> & mut RegexSetBuilder { self . 0 . nest_limit = limit ; self }}}}; }
+macro_rules! __ra_macro_fixture274 {($name : ident , $builder_mod : ident , $text_ty : ty , $as_bytes : expr , $(# [$doc_regexset_example : meta ])* )=>{ pub mod $name { use std :: fmt ; use std :: iter ; use std :: slice ; use std :: vec ; use error :: Error ; use exec :: Exec ; use re_builder ::$builder_mod :: RegexSetBuilder ; use re_trait :: RegularExpression ; # [ doc = " Match multiple (possibly overlapping) regular expressions in a single scan." ]# [ doc = "" ]# [ doc = " A regex set corresponds to the union of two or more regular expressions." ]# [ doc = " That is, a regex set will match text where at least one of its" ]# [ doc = " constituent regular expressions matches. A regex set as its formulated here" ]# [ doc = " provides a touch more power: it will also report *which* regular" ]# [ doc = " expressions in the set match. Indeed, this is the key difference between" ]# [ doc = " regex sets and a single `Regex` with many alternates, since only one" ]# [ doc = " alternate can match at a time." ]# [ doc = "" ]# [ doc = " For example, consider regular expressions to match email addresses and" ]# [ doc = " domains: `[a-z]+@[a-z]+\\\\.(com|org|net)` and `[a-z]+\\\\.(com|org|net)`. If a" ]# [ doc = " regex set is constructed from those regexes, then searching the text" ]# [ doc = " `foo@example.com` will report both regexes as matching. Of course, one" ]# [ doc = " could accomplish this by compiling each regex on its own and doing two" ]# [ doc = " searches over the text. The key advantage of using a regex set is that it" ]# [ doc = " will report the matching regexes using a *single pass through the text*." ]# [ doc = " If one has hundreds or thousands of regexes to match repeatedly (like a URL" ]# [ doc = " router for a complex web application or a user agent matcher), then a regex" ]# [ doc = " set can realize huge performance gains." ]# [ doc = "" ]# [ doc = " # Example" ]# [ doc = "" ]# [ doc = " This shows how the above two regexes (for matching email addresses and" ]# [ doc = " domains) might work:" ]# [ doc = "" ]$(# [$doc_regexset_example ])* # [ doc = "" ]# [ doc = " Note that it would be possible to adapt the above example to using `Regex`" ]# [ doc = " with an expression like:" ]# [ doc = "" ]# [ doc = " ```ignore" ]# [ doc = " (?P<email>[a-z]+@(?P<email_domain>[a-z]+[.](com|org|net)))|(?P<domain>[a-z]+[.](com|org|net))" ]# [ doc = " ```" ]# [ doc = "" ]# [ doc = " After a match, one could then inspect the capture groups to figure out" ]# [ doc = " which alternates matched. The problem is that it is hard to make this" ]# [ doc = " approach scale when there are many regexes since the overlap between each" ]# [ doc = " alternate isn\\\'t always obvious to reason about." ]# [ doc = "" ]# [ doc = " # Limitations" ]# [ doc = "" ]# [ doc = " Regex sets are limited to answering the following two questions:" ]# [ doc = "" ]# [ doc = " 1. Does any regex in the set match?" ]# [ doc = " 2. If so, which regexes in the set match?" ]# [ doc = "" ]# [ doc = " As with the main `Regex` type, it is cheaper to ask (1) instead of (2)" ]# [ doc = " since the matching engines can stop after the first match is found." ]# [ doc = "" ]# [ doc = " Other features like finding the location of successive matches or their" ]# [ doc = " sub-captures aren\\\'t supported. If you need this functionality, the" ]# [ doc = " recommended approach is to compile each regex in the set independently and" ]# [ doc = " selectively match them based on which regexes in the set matched." ]# [ doc = "" ]# [ doc = " # Performance" ]# [ doc = "" ]# [ doc = " A `RegexSet` has the same performance characteristics as `Regex`. Namely," ]# [ doc = " search takes `O(mn)` time, where `m` is proportional to the size of the" ]# [ doc = " regex set and `n` is proportional to the length of the search text." ]# [ derive ( Clone )] pub struct RegexSet ( Exec ); impl RegexSet {# [ doc = " Create a new regex set with the given regular expressions." ]# [ doc = "" ]# [ doc = " This takes an iterator of `S`, where `S` is something that can produce" ]# [ doc = " a `&str`. If any of the strings in the iterator are not valid regular" ]# [ doc = " expressions, then an error is returned." ]# [ doc = "" ]# [ doc = " # Example" ]# [ doc = "" ]# [ doc = " Create a new regex set from an iterator of strings:" ]# [ doc = "" ]# [ doc = " ```rust" ]# [ doc = " # use regex::RegexSet;" ]# [ doc = " let set = RegexSet::new(&[r\\\"\\\\w+\\\", r\\\"\\\\d+\\\"]).unwrap();" ]# [ doc = " assert!(set.is_match(\\\"foo\\\"));" ]# [ doc = " ```" ] pub fn new < I , S > ( exprs : I )-> Result < RegexSet , Error > where S : AsRef < str >, I : IntoIterator < Item = S >{ RegexSetBuilder :: new ( exprs ). build ()}# [ doc = " Create a new empty regex set." ]# [ doc = "" ]# [ doc = " # Example" ]# [ doc = "" ]# [ doc = " ```rust" ]# [ doc = " # use regex::RegexSet;" ]# [ doc = " let set = RegexSet::empty();" ]# [ doc = " assert!(set.is_empty());" ]# [ doc = " ```" ] pub fn empty ()-> RegexSet { RegexSetBuilder :: new (& [ "" ; 0 ]). build (). unwrap ()}# [ doc = " Returns true if and only if one of the regexes in this set matches" ]# [ doc = " the text given." ]# [ doc = "" ]# [ doc = " This method should be preferred if you only need to test whether any" ]# [ doc = " of the regexes in the set should match, but don\\\'t care about *which*" ]# [ doc = " regexes matched. This is because the underlying matching engine will" ]# [ doc = " quit immediately after seeing the first match instead of continuing to" ]# [ doc = " find all matches." ]# [ doc = "" ]# [ doc = " Note that as with searches using `Regex`, the expression is unanchored" ]# [ doc = " by default. That is, if the regex does not start with `^` or `\\\\A`, or" ]# [ doc = " end with `$` or `\\\\z`, then it is permitted to match anywhere in the" ]# [ doc = " text." ]# [ doc = "" ]# [ doc = " # Example" ]# [ doc = "" ]# [ doc = " Tests whether a set matches some text:" ]# [ doc = "" ]# [ doc = " ```rust" ]# [ doc = " # use regex::RegexSet;" ]# [ doc = " let set = RegexSet::new(&[r\\\"\\\\w+\\\", r\\\"\\\\d+\\\"]).unwrap();" ]# [ doc = " assert!(set.is_match(\\\"foo\\\"));" ]# [ doc = " assert!(!set.is_match(\\\"\\u{2603}\\\"));" ]# [ doc = " ```" ] pub fn is_match (& self , text : $text_ty )-> bool { self . is_match_at ( text , 0 )}# [ doc = " Returns the same as is_match, but starts the search at the given" ]# [ doc = " offset." ]# [ doc = "" ]# [ doc = " The significance of the starting point is that it takes the surrounding" ]# [ doc = " context into consideration. For example, the `\\\\A` anchor can only" ]# [ doc = " match when `start == 0`." ]# [ doc ( hidden )] pub fn is_match_at (& self , text : $text_ty , start : usize )-> bool { self . 0 . searcher (). is_match_at ($as_bytes ( text ), start )}# [ doc = " Returns the set of regular expressions that match in the given text." ]# [ doc = "" ]# [ doc = " The set returned contains the index of each regular expression that" ]# [ doc = " matches in the given text. The index is in correspondence with the" ]# [ doc = " order of regular expressions given to `RegexSet`\\\'s constructor." ]# [ doc = "" ]# [ doc = " The set can also be used to iterate over the matched indices." ]# [ doc = "" ]# [ doc = " Note that as with searches using `Regex`, the expression is unanchored" ]# [ doc = " by default. That is, if the regex does not start with `^` or `\\\\A`, or" ]# [ doc = " end with `$` or `\\\\z`, then it is permitted to match anywhere in the" ]# [ doc = " text." ]# [ doc = "" ]# [ doc = " # Example" ]# [ doc = "" ]# [ doc = " Tests which regular expressions match the given text:" ]# [ doc = "" ]# [ doc = " ```rust" ]# [ doc = " # use regex::RegexSet;" ]# [ doc = " let set = RegexSet::new(&[" ]# [ doc = " r\\\"\\\\w+\\\"," ]# [ doc = " r\\\"\\\\d+\\\"," ]# [ doc = " r\\\"\\\\pL+\\\"," ]# [ doc = " r\\\"foo\\\"," ]# [ doc = " r\\\"bar\\\"," ]# [ doc = " r\\\"barfoo\\\"," ]# [ doc = " r\\\"foobar\\\"," ]# [ doc = " ]).unwrap();" ]# [ doc = " let matches: Vec<_> = set.matches(\\\"foobar\\\").into_iter().collect();" ]# [ doc = " assert_eq!(matches, vec![0, 2, 3, 4, 6]);" ]# [ doc = "" ]# [ doc = " // You can also test whether a particular regex matched:" ]# [ doc = " let matches = set.matches(\\\"foobar\\\");" ]# [ doc = " assert!(!matches.matched(5));" ]# [ doc = " assert!(matches.matched(6));" ]# [ doc = " ```" ] pub fn matches (& self , text : $text_ty )-> SetMatches { let mut matches = vec ! [ false ; self . 0 . regex_strings (). len ()]; let any = self . read_matches_at (& mut matches , text , 0 ); SetMatches { matched_any : any , matches : matches , }}# [ doc = " Returns the same as matches, but starts the search at the given" ]# [ doc = " offset and stores the matches into the slice given." ]# [ doc = "" ]# [ doc = " The significance of the starting point is that it takes the surrounding" ]# [ doc = " context into consideration. For example, the `\\\\A` anchor can only" ]# [ doc = " match when `start == 0`." ]# [ doc = "" ]# [ doc = " `matches` must have a length that is at least the number of regexes" ]# [ doc = " in this set." ]# [ doc = "" ]# [ doc = " This method returns true if and only if at least one member of" ]# [ doc = " `matches` is true after executing the set against `text`." ]# [ doc ( hidden )] pub fn read_matches_at (& self , matches : & mut [ bool ], text : $text_ty , start : usize , )-> bool { self . 0 . searcher (). many_matches_at ( matches , $as_bytes ( text ), start )}# [ doc = " Returns the total number of regular expressions in this set." ] pub fn len (& self )-> usize { self . 0 . regex_strings (). len ()}# [ doc = " Returns `true` if this set contains no regular expressions." ] pub fn is_empty (& self )-> bool { self . 0 . regex_strings (). is_empty ()}# [ doc = " Returns the patterns that this set will match on." ]# [ doc = "" ]# [ doc = " This function can be used to determine the pattern for a match. The" ]# [ doc = " slice returned has exactly as many patterns givens to this regex set," ]# [ doc = " and the order of the slice is the same as the order of the patterns" ]# [ doc = " provided to the set." ]# [ doc = "" ]# [ doc = " # Example" ]# [ doc = "" ]# [ doc = " ```rust" ]# [ doc = " # use regex::RegexSet;" ]# [ doc = " let set = RegexSet::new(&[" ]# [ doc = " r\\\"\\\\w+\\\"," ]# [ doc = " r\\\"\\\\d+\\\"," ]# [ doc = " r\\\"\\\\pL+\\\"," ]# [ doc = " r\\\"foo\\\"," ]# [ doc = " r\\\"bar\\\"," ]# [ doc = " r\\\"barfoo\\\"," ]# [ doc = " r\\\"foobar\\\"," ]# [ doc = " ]).unwrap();" ]# [ doc = " let matches: Vec<_> = set" ]# [ doc = " .matches(\\\"foobar\\\")" ]# [ doc = " .into_iter()" ]# [ doc = " .map(|match_idx| &set.patterns()[match_idx])" ]# [ doc = " .collect();" ]# [ doc = " assert_eq!(matches, vec![r\\\"\\\\w+\\\", r\\\"\\\\pL+\\\", r\\\"foo\\\", r\\\"bar\\\", r\\\"foobar\\\"]);" ]# [ doc = " ```" ] pub fn patterns (& self )-> & [ String ]{ self . 0 . regex_strings ()}}# [ doc = " A set of matches returned by a regex set." ]# [ derive ( Clone , Debug )] pub struct SetMatches { matched_any : bool , matches : Vec < bool >, } impl SetMatches {# [ doc = " Whether this set contains any matches." ] pub fn matched_any (& self )-> bool { self . matched_any }# [ doc = " Whether the regex at the given index matched." ]# [ doc = "" ]# [ doc = " The index for a regex is determined by its insertion order upon the" ]# [ doc = " initial construction of a `RegexSet`, starting at `0`." ]# [ doc = "" ]# [ doc = " # Panics" ]# [ doc = "" ]# [ doc = " If `regex_index` is greater than or equal to `self.len()`." ] pub fn matched (& self , regex_index : usize )-> bool { self . matches [ regex_index ]}# [ doc = " The total number of regexes in the set that created these matches." ] pub fn len (& self )-> usize { self . matches . len ()}# [ doc = " Returns an iterator over indexes in the regex that matched." ]# [ doc = "" ]# [ doc = " This will always produces matches in ascending order of index, where" ]# [ doc = " the index corresponds to the index of the regex that matched with" ]# [ doc = " respect to its position when initially building the set." ] pub fn iter (& self )-> SetMatchesIter { SetMatchesIter ((&* self . matches ). into_iter (). enumerate ())}} impl IntoIterator for SetMatches { type IntoIter = SetMatchesIntoIter ; type Item = usize ; fn into_iter ( self )-> Self :: IntoIter { SetMatchesIntoIter ( self . matches . into_iter (). enumerate ())}} impl < 'a > IntoIterator for & 'a SetMatches { type IntoIter = SetMatchesIter < 'a >; type Item = usize ; fn into_iter ( self )-> Self :: IntoIter { self . iter ()}}# [ doc = " An owned iterator over the set of matches from a regex set." ]# [ doc = "" ]# [ doc = " This will always produces matches in ascending order of index, where the" ]# [ doc = " index corresponds to the index of the regex that matched with respect to" ]# [ doc = " its position when initially building the set." ]# [ derive ( Debug )] pub struct SetMatchesIntoIter ( iter :: Enumerate < vec :: IntoIter < bool >>); impl Iterator for SetMatchesIntoIter { type Item = usize ; fn next (& mut self )-> Option < usize > { loop { match self . 0 . next (){ None => return None , Some ((_, false ))=>{} Some (( i , true ))=> return Some ( i ), }}} fn size_hint (& self )-> ( usize , Option < usize >){ self . 0 . size_hint ()}} impl DoubleEndedIterator for SetMatchesIntoIter { fn next_back (& mut self )-> Option < usize > { loop { match self . 0 . next_back (){ None => return None , Some ((_, false ))=>{} Some (( i , true ))=> return Some ( i ), }}}} impl iter :: FusedIterator for SetMatchesIntoIter {}# [ doc = " A borrowed iterator over the set of matches from a regex set." ]# [ doc = "" ]# [ doc = " The lifetime `\\\'a` refers to the lifetime of a `SetMatches` value." ]# [ doc = "" ]# [ doc = " This will always produces matches in ascending order of index, where the" ]# [ doc = " index corresponds to the index of the regex that matched with respect to" ]# [ doc = " its position when initially building the set." ]# [ derive ( Clone , Debug )] pub struct SetMatchesIter < 'a > ( iter :: Enumerate < slice :: Iter < 'a , bool >>); impl < 'a > Iterator for SetMatchesIter < 'a > { type Item = usize ; fn next (& mut self )-> Option < usize > { loop { match self . 0 . next (){ None => return None , Some ((_, & false ))=>{} Some (( i , & true ))=> return Some ( i ), }}} fn size_hint (& self )-> ( usize , Option < usize >){ self . 0 . size_hint ()}} impl < 'a > DoubleEndedIterator for SetMatchesIter < 'a > { fn next_back (& mut self )-> Option < usize > { loop { match self . 0 . next_back (){ None => return None , Some ((_, & false ))=>{} Some (( i , & true ))=> return Some ( i ), }}}} impl < 'a > iter :: FusedIterator for SetMatchesIter < 'a > {}# [ doc ( hidden )] impl From < Exec > for RegexSet { fn from ( exec : Exec )-> Self { RegexSet ( exec )}} impl fmt :: Debug for RegexSet { fn fmt (& self , f : & mut fmt :: Formatter )-> fmt :: Result { write ! ( f , "RegexSet({:?})" , self . 0 . regex_strings ())}}# [ allow ( dead_code )] fn as_bytes_str ( text : & str )-> & [ u8 ]{ text . as_bytes ()}# [ allow ( dead_code )] fn as_bytes_bytes ( text : & [ u8 ])-> & [ u8 ]{ text }}}}
+macro_rules! __ra_macro_fixture275 {($($max_len : expr =>$t : ident ),* as $conv_fn : ident )=>{$(impl_IntegerCommon ! ($max_len , $t ); impl IntegerPrivate < [ u8 ; $max_len ]> for $t {# [ allow ( unused_comparisons )]# [ inline ] fn write_to ( self , buf : & mut [ u8 ; $max_len ])-> & [ u8 ]{ let is_nonnegative = self >= 0 ; let mut n = if is_nonnegative { self as $conv_fn } else {(! ( self as $conv_fn )). wrapping_add ( 1 )}; let mut curr = buf . len () as isize ; let buf_ptr = buf . as_mut_ptr (); let lut_ptr = DEC_DIGITS_LUT . as_ptr (); unsafe { if mem :: size_of ::<$t > ()>= 2 { while n >= 10000 { let rem = ( n % 10000 ) as isize ; n /= 10000 ; let d1 = ( rem / 100 )<< 1 ; let d2 = ( rem % 100 )<< 1 ; curr -= 4 ; ptr :: copy_nonoverlapping ( lut_ptr . offset ( d1 ), buf_ptr . offset ( curr ), 2 ); ptr :: copy_nonoverlapping ( lut_ptr . offset ( d2 ), buf_ptr . offset ( curr + 2 ), 2 ); }} let mut n = n as isize ; if n >= 100 { let d1 = ( n % 100 )<< 1 ; n /= 100 ; curr -= 2 ; ptr :: copy_nonoverlapping ( lut_ptr . offset ( d1 ), buf_ptr . offset ( curr ), 2 ); } if n < 10 { curr -= 1 ; * buf_ptr . offset ( curr )= ( n as u8 )+ b'0' ; } else { let d1 = n << 1 ; curr -= 2 ; ptr :: copy_nonoverlapping ( lut_ptr . offset ( d1 ), buf_ptr . offset ( curr ), 2 ); } if ! is_nonnegative { curr -= 1 ; * buf_ptr . offset ( curr )= b'-' ; }} let len = buf . len ()- curr as usize ; unsafe { slice :: from_raw_parts ( buf_ptr . offset ( curr ), len )}}})*}; }
+macro_rules! __ra_macro_fixture276 {($max_len : expr , $t : ident )=>{ impl Integer for $t {# [ inline ] fn write ( self , buf : & mut Buffer )-> & str { unsafe { debug_assert ! ($max_len <= I128_MAX_LEN ); let buf = mem :: transmute ::<& mut [ u8 ; I128_MAX_LEN ], & mut [ u8 ; $max_len ]> (& mut buf . bytes , ); let bytes = self . write_to ( buf ); str :: from_utf8_unchecked ( bytes )}}} impl private :: Sealed for $t {}}; }
+macro_rules! __ra_macro_fixture277 {(($name : ident $($generics : tt )*)=>$item : ty )=>{ impl $($generics )* Iterator for $name $($generics )* { type Item = $item ; # [ inline ] fn next (& mut self )-> Option < Self :: Item > { self . iter . next ()}# [ inline ] fn size_hint (& self )-> ( usize , Option < usize >){ self . iter . size_hint ()}} impl $($generics )* DoubleEndedIterator for $name $($generics )* {# [ inline ] fn next_back (& mut self )-> Option < Self :: Item > { self . iter . next_back ()}} impl $($generics )* ExactSizeIterator for $name $($generics )* {# [ inline ] fn len (& self )-> usize { self . iter . len ()}} impl $($generics )* FusedIterator for $name $($generics )* {}}}
+macro_rules! __ra_macro_fixture278 {($($ty : ident )*)=>{$(impl From <$ty > for Value { fn from ( n : $ty )-> Self { Value :: Number ( n . into ())}})* }; }
+macro_rules! __ra_macro_fixture279 {($($eq : ident [$($ty : ty )*])*)=>{$($(impl PartialEq <$ty > for Value { fn eq (& self , other : &$ty )-> bool {$eq ( self , * other as _)}} impl PartialEq < Value > for $ty { fn eq (& self , other : & Value )-> bool {$eq ( other , * self as _)}} impl < 'a > PartialEq <$ty > for & 'a Value { fn eq (& self , other : &$ty )-> bool {$eq (* self , * other as _)}} impl < 'a > PartialEq <$ty > for & 'a mut Value { fn eq (& self , other : &$ty )-> bool {$eq (* self , * other as _)}})*)* }}
+macro_rules! __ra_macro_fixture280 {($($ty : ty ),* )=>{$(impl From <$ty > for Number {# [ inline ] fn from ( u : $ty )-> Self { let n = {# [ cfg ( not ( feature = "arbitrary_precision" ))]{ N :: PosInt ( u as u64 )}# [ cfg ( feature = "arbitrary_precision" )]{ itoa :: Buffer :: new (). format ( u ). to_owned ()}}; Number { n }}})* }; }
+macro_rules! __ra_macro_fixture281 {($($ty : ty ),* )=>{$(impl From <$ty > for Number {# [ inline ] fn from ( i : $ty )-> Self { let n = {# [ cfg ( not ( feature = "arbitrary_precision" ))]{ if i < 0 { N :: NegInt ( i as i64 )} else { N :: PosInt ( i as u64 )}}# [ cfg ( feature = "arbitrary_precision" )]{ itoa :: Buffer :: new (). format ( i ). to_owned ()}}; Number { n }}})* }; }
+macro_rules! __ra_macro_fixture282 (($($size : expr ),+)=>{$(unsafe impl < T > Array for [ T ; $size ]{ type Item = T ; fn size ()-> usize {$size }})+ });
+macro_rules! __ra_macro_fixture283 {($($name : ident ( repeats : $repeats : expr , latches : $latches : expr , delay : $delay : expr , threads : $threads : expr , single_unparks : $single_unparks : expr ); )* )=>{$(# [ test ] fn $name (){ let delay = Duration :: from_micros ($delay ); for _ in 0 ..$repeats { run_parking_test ($latches , delay , $threads , $single_unparks ); }})* }; }
+macro_rules! __ra_macro_fixture284 {($C : ident $P : ident ; $A : ident , $($I : ident ),* ; $($X : ident )*)=>(# [ derive ( Clone , Debug )] pub struct $C < I : Iterator > { item : Option < I :: Item >, iter : I , c : $P < I >, } impl < I : Iterator + Clone > From < I > for $C < I > { fn from ( mut iter : I )-> Self {$C { item : iter . next (), iter : iter . clone (), c : $P :: from ( iter ), }}} impl < I : Iterator + Clone > From < I > for $C < Fuse < I >> { fn from ( iter : I )-> Self { let mut iter = iter . fuse (); $C { item : iter . next (), iter : iter . clone (), c : $P :: from ( iter ), }}} impl < I , $A > Iterator for $C < I > where I : Iterator < Item = $A > + Clone , I :: Item : Clone { type Item = ($($I ),*); fn next (& mut self )-> Option < Self :: Item > { if let Some (($($X ),*,))= self . c . next (){ let z = self . item . clone (). unwrap (); Some (( z , $($X ),*))} else { self . item = self . iter . next (); self . item . clone (). and_then (| z | { self . c = $P :: from ( self . iter . clone ()); self . c . next (). map (| ($($X ),*,)| ( z , $($X ),*))})}}} impl < I , $A > HasCombination < I > for ($($I ),*) where I : Iterator < Item = $A > + Clone , I :: Item : Clone { type Combination = $C < Fuse < I >>; })}
+macro_rules! __ra_macro_fixture285 (($_A : ident , $_B : ident , )=>(); ($A : ident , $($B : ident ,)*)=>( impl_cons_iter ! ($($B ,)*); # [ allow ( non_snake_case )] impl < X , Iter , $($B ),*> Iterator for ConsTuples < Iter , (($($B ,)*), X )> where Iter : Iterator < Item = (($($B ,)*), X )>, { type Item = ($($B ,)* X , ); fn next (& mut self )-> Option < Self :: Item > { self . iter . next (). map (| (($($B ,)*), x )| ($($B ,)* x , ))} fn size_hint (& self )-> ( usize , Option < usize >){ self . iter . size_hint ()} fn fold < Acc , Fold > ( self , accum : Acc , mut f : Fold )-> Acc where Fold : FnMut ( Acc , Self :: Item )-> Acc , { self . iter . fold ( accum , move | acc , (($($B ,)*), x )| f ( acc , ($($B ,)* x , )))}}# [ allow ( non_snake_case )] impl < X , Iter , $($B ),*> DoubleEndedIterator for ConsTuples < Iter , (($($B ,)*), X )> where Iter : DoubleEndedIterator < Item = (($($B ,)*), X )>, { fn next_back (& mut self )-> Option < Self :: Item > { self . iter . next (). map (| (($($B ,)*), x )| ($($B ,)* x , ))}}); );
+macro_rules! __ra_macro_fixture286 {($($fmt_trait : ident )*)=>{$(impl < 'a , I > fmt ::$fmt_trait for Format < 'a , I > where I : Iterator , I :: Item : fmt ::$fmt_trait , { fn fmt (& self , f : & mut fmt :: Formatter )-> fmt :: Result { self . format ( f , fmt ::$fmt_trait :: fmt )}})* }}
+macro_rules! __ra_macro_fixture287 {([$($typarm : tt )*]$type_ : ty )=>{ impl <$($typarm )*> PeekingNext for $type_ { fn peeking_next < F > (& mut self , accept : F )-> Option < Self :: Item > where F : FnOnce (& Self :: Item )-> bool { let saved_state = self . clone (); if let Some ( r )= self . next (){ if ! accept (& r ){* self = saved_state ; } else { return Some ( r )}} None }}}}
+macro_rules! __ra_macro_fixture288 {()=>(); ($N : expr ; $A : ident ; $($X : ident ),* ; $($Y : ident ),* ; $($Y_rev : ident ),*)=>( impl <$A > TupleCollect for ($($X ),*,){ type Item = $A ; type Buffer = [ Option <$A >; $N - 1 ]; # [ allow ( unused_assignments , unused_mut )] fn collect_from_iter < I > ( iter : I , buf : & mut Self :: Buffer )-> Option < Self > where I : IntoIterator < Item = $A > { let mut iter = iter . into_iter (); $(let mut $Y = None ; )* loop {$($Y = iter . next (); if $Y . is_none (){ break })* return Some (($($Y . unwrap ()),*,))} let mut i = 0 ; let mut s = buf . as_mut (); $(if i < s . len (){ s [ i ]= $Y ; i += 1 ; })* return None ; }# [ allow ( unused_assignments )] fn collect_from_iter_no_buf < I > ( iter : I )-> Option < Self > where I : IntoIterator < Item = $A > { let mut iter = iter . into_iter (); loop {$(let $Y = if let Some ($Y )= iter . next (){$Y } else { break ; }; )* return Some (($($Y ),*,))} return None ; } fn num_items ()-> usize {$N } fn left_shift_push (& mut self , item : $A ){ use std :: mem :: replace ; let & mut ($(ref mut $Y ),*,)= self ; let tmp = item ; $(let tmp = replace ($Y_rev , tmp ); )* drop ( tmp ); }})}
+macro_rules! __ra_macro_fixture289 {($($B : ident ),*)=>(# [ allow ( non_snake_case )] impl <$($B : IntoIterator ),*> From < ($($B ,)*)> for Zip < ($($B :: IntoIter ,)*)> { fn from ( t : ($($B ,)*))-> Self { let ($($B ,)*)= t ; Zip { t : ($($B . into_iter (),)*)}}}# [ allow ( non_snake_case )]# [ allow ( unused_assignments )] impl <$($B ),*> Iterator for Zip < ($($B ,)*)> where $($B : Iterator , )* { type Item = ($($B :: Item ,)*); fn next (& mut self )-> Option < Self :: Item > { let ($(ref mut $B ,)*)= self . t ; $(let $B = match $B . next (){ None => return None , Some ( elt )=> elt }; )* Some (($($B ,)*))} fn size_hint (& self )-> ( usize , Option < usize >){ let sh = (:: std :: usize :: MAX , None ); let ($(ref $B ,)*)= self . t ; $(let sh = size_hint :: min ($B . size_hint (), sh ); )* sh }}# [ allow ( non_snake_case )] impl <$($B ),*> ExactSizeIterator for Zip < ($($B ,)*)> where $($B : ExactSizeIterator , )* {}); }
+macro_rules! __ra_macro_fixture290 {( impl $Op : ident for TextRange by fn $f : ident = $op : tt )=>{ impl $Op <& TextSize > for TextRange { type Output = TextRange ; # [ inline ] fn $f ( self , other : & TextSize )-> TextRange { self $op * other }} impl < T > $Op < T > for & TextRange where TextRange : $Op < T , Output = TextRange >, { type Output = TextRange ; # [ inline ] fn $f ( self , other : T )-> TextRange {* self $op other }}}; }
+macro_rules! __ra_macro_fixture291 {( impl $Op : ident for TextSize by fn $f : ident = $op : tt )=>{ impl $Op < TextSize > for TextSize { type Output = TextSize ; # [ inline ] fn $f ( self , other : TextSize )-> TextSize { TextSize { raw : self . raw $op other . raw }}} impl $Op <& TextSize > for TextSize { type Output = TextSize ; # [ inline ] fn $f ( self , other : & TextSize )-> TextSize { self $op * other }} impl < T > $Op < T > for & TextSize where TextSize : $Op < T , Output = TextSize >, { type Output = TextSize ; # [ inline ] fn $f ( self , other : T )-> TextSize {* self $op other }}}; }
+macro_rules! __ra_macro_fixture292 {($expr : expr )=>{ const _: i32 = 0 / $expr as i32 ; }; }
+macro_rules! __ra_macro_fixture293 {($index_type : ty , )=>(); ($index_type : ty , $($len : expr ,)*)=>($(fix_array_impl ! ($index_type , $len );)* ); }
+macro_rules! __ra_macro_fixture294 {($index_type : ty , $len : expr )=>( unsafe impl < T > Array for [ T ; $len ]{ type Item = T ; type Index = $index_type ; const CAPACITY : usize = $len ; # [ doc ( hidden )] fn as_slice (& self )-> & [ Self :: Item ]{ self }# [ doc ( hidden )] fn as_mut_slice (& mut self )-> & mut [ Self :: Item ]{ self }})}
+macro_rules! __ra_macro_fixture295 {($($variant : ident $(($($sub_variant : ident ),*))?),* for $enum : ident )=>{$(impl From <$variant > for $enum { fn from ( it : $variant )-> $enum {$enum ::$variant ( it )}}$($(impl From <$sub_variant > for $enum { fn from ( it : $sub_variant )-> $enum {$enum ::$variant ($variant ::$sub_variant ( it ))}})*)? )* }}
+macro_rules! __ra_macro_fixture296 {($name : ident )=>{ impl $name { pub ( crate ) fn expand_tt (& self , invocation : & str )-> tt :: Subtree { self . try_expand_tt ( invocation ). unwrap ()} fn try_expand_tt (& self , invocation : & str )-> Result < tt :: Subtree , ExpandError > { let source_file = ast :: SourceFile :: parse ( invocation ). tree (); let macro_invocation = source_file . syntax (). descendants (). find_map ( ast :: MacroCall :: cast ). unwrap (); let ( invocation_tt , _)= ast_to_token_tree (& macro_invocation . token_tree (). unwrap ()). ok_or_else (|| ExpandError :: ConversionError )?; self . rules . expand (& invocation_tt ). result ()}# [ allow ( unused )] fn assert_expand_err (& self , invocation : & str , err : & ExpandError ){ assert_eq ! ( self . try_expand_tt ( invocation ). as_ref (), Err ( err )); }# [ allow ( unused )] fn expand_items (& self , invocation : & str )-> SyntaxNode { let expanded = self . expand_tt ( invocation ); token_tree_to_syntax_node (& expanded , FragmentKind :: Items ). unwrap (). 0 . syntax_node ()}# [ allow ( unused )] fn expand_statements (& self , invocation : & str )-> SyntaxNode { let expanded = self . expand_tt ( invocation ); token_tree_to_syntax_node (& expanded , FragmentKind :: Statements ). unwrap (). 0 . syntax_node ()}# [ allow ( unused )] fn expand_expr (& self , invocation : & str )-> SyntaxNode { let expanded = self . expand_tt ( invocation ); token_tree_to_syntax_node (& expanded , FragmentKind :: Expr ). unwrap (). 0 . syntax_node ()}# [ allow ( unused )] fn assert_expand_tt (& self , invocation : & str , expected : & str ){ let expansion = self . expand_tt ( invocation ); assert_eq ! ( expansion . to_string (), expected ); }# [ allow ( unused )] fn assert_expand (& self , invocation : & str , expected : & str ){ let expansion = self . expand_tt ( invocation ); let actual = format ! ( "{:?}" , expansion ); test_utils :: assert_eq_text ! (& expected . trim (), & actual . trim ()); } fn assert_expand_items (& self , invocation : & str , expected : & str )-> &$name { self . assert_expansion ( FragmentKind :: Items , invocation , expected ); self }# [ allow ( unused )] fn assert_expand_statements (& self , invocation : & str , expected : & str )-> &$name { self . assert_expansion ( FragmentKind :: Statements , invocation , expected ); self } fn assert_expansion (& self , kind : FragmentKind , invocation : & str , expected : & str ){ let expanded = self . expand_tt ( invocation ); assert_eq ! ( expanded . to_string (), expected ); let expected = expected . replace ( "$crate" , "C_C__C" ); let expected = { let wrapped = format ! ( "wrap_macro!( {} )" , expected ); let wrapped = ast :: SourceFile :: parse (& wrapped ); let wrapped = wrapped . tree (). syntax (). descendants (). find_map ( ast :: TokenTree :: cast ). unwrap (); let mut wrapped = ast_to_token_tree (& wrapped ). unwrap (). 0 ; wrapped . delimiter = None ; wrapped }; let expanded_tree = token_tree_to_syntax_node (& expanded , kind ). unwrap (). 0 . syntax_node (); let expanded_tree = debug_dump_ignore_spaces (& expanded_tree ). trim (). to_string (); let expected_tree = token_tree_to_syntax_node (& expected , kind ). unwrap (). 0 . syntax_node (); let expected_tree = debug_dump_ignore_spaces (& expected_tree ). trim (). to_string (); let expected_tree = expected_tree . replace ( "C_C__C" , "$crate" ); assert_eq ! ( expanded_tree , expected_tree , "\nleft:\n{}\nright:\n{}" , expanded_tree , expected_tree , ); }}}; }
+macro_rules! __ra_macro_fixture297 {($($name : ident ( num_producers : $num_producers : expr , num_consumers : $num_consumers : expr , max_queue_size : $max_queue_size : expr , messages_per_producer : $messages_per_producer : expr , notification_style : $notification_style : expr , timeout : $timeout : expr , delay_seconds : $delay_seconds : expr ); )* )=>{$(# [ test ] fn $name (){ let delay = Duration :: from_secs ($delay_seconds ); run_queue_test ($num_producers , $num_consumers , $max_queue_size , $messages_per_producer , $notification_style , $timeout , delay , ); })* }; }
+macro_rules! __ra_macro_fixture298 {($t : ident : $s1 : expr =>$s2 : expr )=>{# [ test ] fn $t (){ assert_eq ! ($s1 . to_camel_case (), $s2 )}}}
+macro_rules! __ra_macro_fixture299 {($t : ident : $s1 : expr =>$s2 : expr )=>{# [ test ] fn $t (){ assert_eq ! ($s1 . to_kebab_case (), $s2 )}}}
+macro_rules! __ra_macro_fixture300 {($t : ident : $s1 : expr =>$s2 : expr )=>{# [ test ] fn $t (){ assert_eq ! ($s1 . to_mixed_case (), $s2 )}}}
+macro_rules! __ra_macro_fixture301 {($t : ident : $s1 : expr =>$s2 : expr )=>{# [ test ] fn $t (){ assert_eq ! ($s1 . to_shouty_kebab_case (), $s2 )}}}
+macro_rules! __ra_macro_fixture302 {($t : ident : $s1 : expr =>$s2 : expr )=>{# [ test ] fn $t (){ assert_eq ! ($s1 . to_shouty_snake_case (), $s2 )}}}
+macro_rules! __ra_macro_fixture303 {($t : ident : $s1 : expr =>$s2 : expr )=>{# [ test ] fn $t (){ assert_eq ! ($s1 . to_snake_case (), $s2 )}}}
+macro_rules! __ra_macro_fixture304 {($t : ident : $s1 : expr =>$s2 : expr )=>{# [ test ] fn $t (){ assert_eq ! ($s1 . to_title_case (), $s2 )}}}
+macro_rules! __ra_macro_fixture305 {($($struct_name : ident ),+ $(,)?)=>{$(unsafe impl < E : Endian > Pod for $struct_name < E > {})+ }}
+macro_rules! __ra_macro_fixture306 {($($struct_name : ident ),+ $(,)?)=>{$(unsafe impl Pod for $struct_name {})+ }}
+macro_rules! __ra_macro_fixture307 {($name : ident , {$($in : tt )* })=>{# [ test ] fn $name (){ syn :: parse_file ( stringify ! ($($in )*)). unwrap (); }}}
+macro_rules! __ra_macro_fixture308 {($name : ident , $op : ident )=>{ fn $name ( sets : Vec < Vec <& str >>)-> Vec < String > { let fsts : Vec < Fst <_>> = sets . into_iter (). map ( fst_set ). collect (); let op : OpBuilder = fsts . iter (). collect (); let mut stream = op .$op (). into_stream (); let mut keys = vec ! []; while let Some (( key , _))= stream . next (){ keys . push ( String :: from_utf8 ( key . to_vec ()). unwrap ()); } keys }}; }
+macro_rules! __ra_macro_fixture309 {($name : ident , $op : ident )=>{ fn $name ( sets : Vec < Vec < (& str , u64 )>>)-> Vec < ( String , u64 )> { let fsts : Vec < Fst <_>> = sets . into_iter (). map ( fst_map ). collect (); let op : OpBuilder = fsts . iter (). collect (); let mut stream = op .$op (). into_stream (); let mut keys = vec ! []; while let Some (( key , outs ))= stream . next (){ let merged = outs . iter (). fold ( 0 , | a , b | a + b . value ); let s = String :: from_utf8 ( key . to_vec ()). unwrap (); keys . push (( s , merged )); } keys }}; }
+macro_rules! __ra_macro_fixture310 {($name : ident , $($s : expr ),+)=>{# [ test ] fn $name (){ let mut items = vec ! [$($s ),*]; let fst = fst_set (& items ); let mut rdr = fst . stream (); items . sort (); items . dedup (); for item in & items { assert_eq ! ( rdr . next (). unwrap (). 0 , item . as_bytes ()); } assert_eq ! ( rdr . next (), None ); for item in & items { assert ! ( fst . get ( item ). is_some ()); }}}}
+macro_rules! __ra_macro_fixture311 {($name : ident , $($s : expr ),+)=>{# [ test ]# [ should_panic ] fn $name (){ let mut bfst = Builder :: memory (); $(bfst . add ($s ). unwrap ();)* }}}
+macro_rules! __ra_macro_fixture312 {($name : ident , $($s : expr , $o : expr ),+)=>{# [ test ] fn $name (){ let fst = fst_map ( vec ! [$(($s , $o )),*]); let mut rdr = fst . stream (); $({let ( s , o )= rdr . next (). unwrap (); assert_eq ! (( s , o . value ()), ($s . as_bytes (), $o )); })* assert_eq ! ( rdr . next (), None ); $({assert_eq ! ( fst . get ($s . as_bytes ()), Some ( Output :: new ($o ))); })* }}}
+macro_rules! __ra_macro_fixture313 {($name : ident , $($s : expr , $o : expr ),+)=>{# [ test ]# [ should_panic ] fn $name (){ let mut bfst = Builder :: memory (); $(bfst . insert ($s , $o ). unwrap ();)* }}}
+macro_rules! __ra_macro_fixture314 {($name : ident , min : $min : expr , max : $max : expr , imin : $imin : expr , imax : $imax : expr , $($s : expr ),* )=>{# [ test ] fn $name (){ let items : Vec <& 'static str > = vec ! [$($s ),*]; let items : Vec <_> = items . into_iter (). enumerate (). map (| ( i , k )| ( k , i as u64 )). collect (); let fst = fst_map ( items . clone ()); let mut rdr = Stream :: new ( fst . as_ref (), AlwaysMatch , $min , $max ); for i in $imin ..$imax { assert_eq ! ( rdr . next (). unwrap (), ( items [ i ]. 0 . as_bytes (), Output :: new ( items [ i ]. 1 )), ); } assert_eq ! ( rdr . next (), None ); }}}
+macro_rules! __ra_macro_fixture315 {($ty : ty , $tag : ident )=>{ impl TryFrom < Response > for $ty { type Error = & 'static str ; fn try_from ( value : Response )-> Result < Self , Self :: Error > { match value { Response ::$tag ( res )=> Ok ( res ), _ => Err ( concat ! ( "Failed to convert response to " , stringify ! ($tag ))), }}}}; }
+macro_rules! __ra_macro_fixture316 {( CloneAny )=>{# [ doc = " A type to emulate dynamic typing." ]# [ doc = "" ]# [ doc = " Every type with no non-`\\\'static` references implements `Any`." ] define ! ( CloneAny remainder ); }; ( Any )=>{# [ doc = " A type to emulate dynamic typing with cloning." ]# [ doc = "" ]# [ doc = " Every type with no non-`\\\'static` references that implements `Clone` implements `Any`." ] define ! ( Any remainder ); }; ($t : ident remainder )=>{# [ doc = " See the [`std::any` documentation](https://doc.rust-lang.org/std/any/index.html) for" ]# [ doc = " more details on `Any` in general." ]# [ doc = "" ]# [ doc = " This trait is not `std::any::Any` but rather a type extending that for this library\\u{2019}s" ]# [ doc = " purposes so that it can be combined with marker traits like " ]# [ doc = " <code><a class=trait title=core::marker::Send" ]# [ doc = " href=http://doc.rust-lang.org/std/marker/trait.Send.html>Send</a></code> and" ]# [ doc = " <code><a class=trait title=core::marker::Sync" ]# [ doc = " href=http://doc.rust-lang.org/std/marker/trait.Sync.html>Sync</a></code>." ]# [ doc = "" ] define ! ($t trait ); }; ( CloneAny trait )=>{# [ doc = " See also [`Any`](trait.Any.html) for a version without the `Clone` requirement." ] pub trait CloneAny : Any + CloneToAny {} impl < T : StdAny + Clone > CloneAny for T {}}; ( Any trait )=>{# [ doc = " See also [`CloneAny`](trait.CloneAny.html) for a cloneable version of this trait." ] pub trait Any : StdAny {} impl < T : StdAny > Any for T {}}; }
+macro_rules! __ra_macro_fixture317 {($base : ident , $(+ $bounds : ident )*)=>{ impl fmt :: Debug for $base $(+ $bounds )* {# [ inline ] fn fmt (& self , f : & mut fmt :: Formatter )-> fmt :: Result { f . pad ( stringify ! ($base $(+ $bounds )*))}} impl UncheckedAnyExt for $base $(+ $bounds )* {# [ inline ] unsafe fn downcast_ref_unchecked < T : 'static > (& self )-> & T {&* ( self as * const Self as * const T )}# [ inline ] unsafe fn downcast_mut_unchecked < T : 'static > (& mut self )-> & mut T {& mut * ( self as * mut Self as * mut T )}# [ inline ] unsafe fn downcast_unchecked < T : 'static > ( self : Box < Self >)-> Box < T > { Box :: from_raw ( Box :: into_raw ( self ) as * mut T )}} impl < T : $base $(+ $bounds )*> IntoBox <$base $(+ $bounds )*> for T {# [ inline ] fn into_box ( self )-> Box <$base $(+ $bounds )*> { Box :: new ( self )}}}}
+macro_rules! __ra_macro_fixture318 {($t : ty , $method : ident )=>{ impl Clone for Box <$t > {# [ inline ] fn clone (& self )-> Box <$t > {(** self ).$method ()}}}}
+macro_rules! __ra_macro_fixture319 {( field : $t : ident .$field : ident ; new ()=>$new : expr ; with_capacity ($with_capacity_arg : ident )=>$with_capacity : expr ; )=>{ impl < A : ? Sized + UncheckedAnyExt > $t < A > {# [ doc = " Create an empty collection." ]# [ inline ] pub fn new ()-> $t < A > {$t {$field : $new , }}# [ doc = " Creates an empty collection with the given initial capacity." ]# [ inline ] pub fn with_capacity ($with_capacity_arg : usize )-> $t < A > {$t {$field : $with_capacity , }}# [ doc = " Returns the number of elements the collection can hold without reallocating." ]# [ inline ] pub fn capacity (& self )-> usize { self .$field . capacity ()}# [ doc = " Reserves capacity for at least `additional` more elements to be inserted" ]# [ doc = " in the collection. The collection may reserve more space to avoid" ]# [ doc = " frequent reallocations." ]# [ doc = "" ]# [ doc = " # Panics" ]# [ doc = "" ]# [ doc = " Panics if the new allocation size overflows `usize`." ]# [ inline ] pub fn reserve (& mut self , additional : usize ){ self .$field . reserve ( additional )}# [ doc = " Shrinks the capacity of the collection as much as possible. It will drop" ]# [ doc = " down as much as possible while maintaining the internal rules" ]# [ doc = " and possibly leaving some space in accordance with the resize policy." ]# [ inline ] pub fn shrink_to_fit (& mut self ){ self .$field . shrink_to_fit ()}# [ doc = " Returns the number of items in the collection." ]# [ inline ] pub fn len (& self )-> usize { self .$field . len ()}# [ doc = " Returns true if there are no items in the collection." ]# [ inline ] pub fn is_empty (& self )-> bool { self .$field . is_empty ()}# [ doc = " Removes all items from the collection. Keeps the allocated memory for reuse." ]# [ inline ] pub fn clear (& mut self ){ self .$field . clear ()}}}}
+macro_rules! __ra_macro_fixture320 {($name : ident , $init : ty )=>{# [ test ] fn $name (){ let mut map = <$init >:: new (); assert_eq ! ( map . insert ( A ( 10 )), None ); assert_eq ! ( map . insert ( B ( 20 )), None ); assert_eq ! ( map . insert ( C ( 30 )), None ); assert_eq ! ( map . insert ( D ( 40 )), None ); assert_eq ! ( map . insert ( E ( 50 )), None ); assert_eq ! ( map . insert ( F ( 60 )), None ); match map . entry ::< A > (){ Entry :: Vacant (_)=> unreachable ! (), Entry :: Occupied ( mut view )=>{ assert_eq ! ( view . get (), & A ( 10 )); assert_eq ! ( view . insert ( A ( 100 )), A ( 10 )); }} assert_eq ! ( map . get ::< A > (). unwrap (), & A ( 100 )); assert_eq ! ( map . len (), 6 ); match map . entry ::< B > (){ Entry :: Vacant (_)=> unreachable ! (), Entry :: Occupied ( mut view )=>{ let v = view . get_mut (); let new_v = B ( v . 0 * 10 ); * v = new_v ; }} assert_eq ! ( map . get ::< B > (). unwrap (), & B ( 200 )); assert_eq ! ( map . len (), 6 ); match map . entry ::< C > (){ Entry :: Vacant (_)=> unreachable ! (), Entry :: Occupied ( view )=>{ assert_eq ! ( view . remove (), C ( 30 )); }} assert_eq ! ( map . get ::< C > (), None ); assert_eq ! ( map . len (), 5 ); match map . entry ::< J > (){ Entry :: Occupied (_)=> unreachable ! (), Entry :: Vacant ( view )=>{ assert_eq ! (* view . insert ( J ( 1000 )), J ( 1000 )); }} assert_eq ! ( map . get ::< J > (). unwrap (), & J ( 1000 )); assert_eq ! ( map . len (), 6 ); map . entry ::< B > (). or_insert ( B ( 71 )). 0 += 1 ; assert_eq ! ( map . get ::< B > (). unwrap (), & B ( 201 )); assert_eq ! ( map . len (), 6 ); map . entry ::< C > (). or_insert ( C ( 300 )). 0 += 1 ; assert_eq ! ( map . get ::< C > (). unwrap (), & C ( 301 )); assert_eq ! ( map . len (), 7 ); }}}
+macro_rules! __ra_macro_fixture321 {($(# [$outer : meta ])* pub struct $BitFlags : ident : $T : ty {$($(# [$inner : ident $($args : tt )*])* const $Flag : ident = $value : expr ; )+ })=>{ __bitflags ! {$(# [$outer ])* ( pub )$BitFlags : $T {$($(# [$inner $($args )*])* $Flag = $value ; )+ }}}; ($(# [$outer : meta ])* struct $BitFlags : ident : $T : ty {$($(# [$inner : ident $($args : tt )*])* const $Flag : ident = $value : expr ; )+ })=>{ __bitflags ! {$(# [$outer ])* ()$BitFlags : $T {$($(# [$inner $($args )*])* $Flag = $value ; )+ }}}; ($(# [$outer : meta ])* pub ($($vis : tt )+) struct $BitFlags : ident : $T : ty {$($(# [$inner : ident $($args : tt )*])* const $Flag : ident = $value : expr ; )+ })=>{ __bitflags ! {$(# [$outer ])* ( pub ($($vis )+))$BitFlags : $T {$($(# [$inner $($args )*])* $Flag = $value ; )+ }}}; }
+macro_rules! __ra_macro_fixture322 {($(# [$outer : meta ])* ($($vis : tt )*)$BitFlags : ident : $T : ty {$($(# [$inner : ident $($args : tt )*])* $Flag : ident = $value : expr ; )+ })=>{$(# [$outer ])* # [ derive ( Copy , PartialEq , Eq , Clone , PartialOrd , Ord , Hash )]$($vis )* struct $BitFlags { bits : $T , } __impl_bitflags ! {$BitFlags : $T {$($(# [$inner $($args )*])* $Flag = $value ; )+ }}}; }
+macro_rules! __ra_macro_fixture323 {($BitFlags : ident : $T : ty {$($(# [$attr : ident $($args : tt )*])* $Flag : ident = $value : expr ; )+ })=>{ impl $crate :: _core :: fmt :: Debug for $BitFlags { fn fmt (& self , f : & mut $crate :: _core :: fmt :: Formatter )-> $crate :: _core :: fmt :: Result {# [ allow ( non_snake_case )] trait __BitFlags {$(# [ inline ] fn $Flag (& self )-> bool { false })+ } impl __BitFlags for $BitFlags {$(__impl_bitflags ! {# [ allow ( deprecated )]# [ inline ]$(? # [$attr $($args )*])* fn $Flag (& self )-> bool { if Self ::$Flag . bits == 0 && self . bits != 0 { false } else { self . bits & Self ::$Flag . bits == Self ::$Flag . bits }}})+ } let mut first = true ; $(if <$BitFlags as __BitFlags >::$Flag ( self ){ if ! first { f . write_str ( " | " )?; } first = false ; f . write_str ( __bitflags_stringify ! ($Flag ))?; })+ let extra_bits = self . bits & !$BitFlags :: all (). bits (); if extra_bits != 0 { if ! first { f . write_str ( " | " )?; } first = false ; f . write_str ( "0x" )?; $crate :: _core :: fmt :: LowerHex :: fmt (& extra_bits , f )?; } if first { f . write_str ( "(empty)" )?; } Ok (())}} impl $crate :: _core :: fmt :: Binary for $BitFlags { fn fmt (& self , f : & mut $crate :: _core :: fmt :: Formatter )-> $crate :: _core :: fmt :: Result {$crate :: _core :: fmt :: Binary :: fmt (& self . bits , f )}} impl $crate :: _core :: fmt :: Octal for $BitFlags { fn fmt (& self , f : & mut $crate :: _core :: fmt :: Formatter )-> $crate :: _core :: fmt :: Result {$crate :: _core :: fmt :: Octal :: fmt (& self . bits , f )}} impl $crate :: _core :: fmt :: LowerHex for $BitFlags { fn fmt (& self , f : & mut $crate :: _core :: fmt :: Formatter )-> $crate :: _core :: fmt :: Result {$crate :: _core :: fmt :: LowerHex :: fmt (& self . bits , f )}} impl $crate :: _core :: fmt :: UpperHex for $BitFlags { fn fmt (& self , f : & mut $crate :: _core :: fmt :: Formatter )-> $crate :: _core :: fmt :: Result {$crate :: _core :: fmt :: UpperHex :: fmt (& self . bits , f )}}# [ allow ( dead_code )] impl $BitFlags {$($(# [$attr $($args )*])* pub const $Flag : $BitFlags = $BitFlags { bits : $value }; )+ __fn_bitflags ! {# [ doc = " Returns an empty set of flags" ]# [ inline ] pub const fn empty ()-> $BitFlags {$BitFlags { bits : 0 }}} __fn_bitflags ! {# [ doc = " Returns the set containing all flags." ]# [ inline ] pub const fn all ()-> $BitFlags {# [ allow ( non_snake_case )] trait __BitFlags {$(const $Flag : $T = 0 ; )+ } impl __BitFlags for $BitFlags {$(__impl_bitflags ! {# [ allow ( deprecated )]$(? # [$attr $($args )*])* const $Flag : $T = Self ::$Flag . bits ; })+ }$BitFlags { bits : $(<$BitFlags as __BitFlags >::$Flag )|+ }}} __fn_bitflags ! {# [ doc = " Returns the raw value of the flags currently stored." ]# [ inline ] pub const fn bits (& self )-> $T { self . bits }}# [ doc = " Convert from underlying bit representation, unless that" ]# [ doc = " representation contains bits that do not correspond to a flag." ]# [ inline ] pub fn from_bits ( bits : $T )-> $crate :: _core :: option :: Option <$BitFlags > { if ( bits & !$BitFlags :: all (). bits ())== 0 {$crate :: _core :: option :: Option :: Some ($BitFlags { bits })} else {$crate :: _core :: option :: Option :: None }} __fn_bitflags ! {# [ doc = " Convert from underlying bit representation, dropping any bits" ]# [ doc = " that do not correspond to flags." ]# [ inline ] pub const fn from_bits_truncate ( bits : $T )-> $BitFlags {$BitFlags { bits : bits & $BitFlags :: all (). bits }}} __fn_bitflags ! {# [ doc = " Convert from underlying bit representation, preserving all" ]# [ doc = " bits (even those not corresponding to a defined flag)." ]# [ inline ] pub const unsafe fn from_bits_unchecked ( bits : $T )-> $BitFlags {$BitFlags { bits }}} __fn_bitflags ! {# [ doc = " Returns `true` if no flags are currently stored." ]# [ inline ] pub const fn is_empty (& self )-> bool { self . bits ()== $BitFlags :: empty (). bits ()}} __fn_bitflags ! {# [ doc = " Returns `true` if all flags are currently set." ]# [ inline ] pub const fn is_all (& self )-> bool { self . bits == $BitFlags :: all (). bits }} __fn_bitflags ! {# [ doc = " Returns `true` if there are flags common to both `self` and `other`." ]# [ inline ] pub const fn intersects (& self , other : $BitFlags )-> bool {!$BitFlags { bits : self . bits & other . bits }. is_empty ()}} __fn_bitflags ! {# [ doc = " Returns `true` all of the flags in `other` are contained within `self`." ]# [ inline ] pub const fn contains (& self , other : $BitFlags )-> bool {( self . bits & other . bits )== other . bits }}# [ doc = " Inserts the specified flags in-place." ]# [ inline ] pub fn insert (& mut self , other : $BitFlags ){ self . bits |= other . bits ; }# [ doc = " Removes the specified flags in-place." ]# [ inline ] pub fn remove (& mut self , other : $BitFlags ){ self . bits &= ! other . bits ; }# [ doc = " Toggles the specified flags in-place." ]# [ inline ] pub fn toggle (& mut self , other : $BitFlags ){ self . bits ^= other . bits ; }# [ doc = " Inserts or removes the specified flags depending on the passed value." ]# [ inline ] pub fn set (& mut self , other : $BitFlags , value : bool ){ if value { self . insert ( other ); } else { self . remove ( other ); }}} impl $crate :: _core :: ops :: BitOr for $BitFlags { type Output = $BitFlags ; # [ doc = " Returns the union of the two sets of flags." ]# [ inline ] fn bitor ( self , other : $BitFlags )-> $BitFlags {$BitFlags { bits : self . bits | other . bits }}} impl $crate :: _core :: ops :: BitOrAssign for $BitFlags {# [ doc = " Adds the set of flags." ]# [ inline ] fn bitor_assign (& mut self , other : $BitFlags ){ self . bits |= other . bits ; }} impl $crate :: _core :: ops :: BitXor for $BitFlags { type Output = $BitFlags ; # [ doc = " Returns the left flags, but with all the right flags toggled." ]# [ inline ] fn bitxor ( self , other : $BitFlags )-> $BitFlags {$BitFlags { bits : self . bits ^ other . bits }}} impl $crate :: _core :: ops :: BitXorAssign for $BitFlags {# [ doc = " Toggles the set of flags." ]# [ inline ] fn bitxor_assign (& mut self , other : $BitFlags ){ self . bits ^= other . bits ; }} impl $crate :: _core :: ops :: BitAnd for $BitFlags { type Output = $BitFlags ; # [ doc = " Returns the intersection between the two sets of flags." ]# [ inline ] fn bitand ( self , other : $BitFlags )-> $BitFlags {$BitFlags { bits : self . bits & other . bits }}} impl $crate :: _core :: ops :: BitAndAssign for $BitFlags {# [ doc = " Disables all flags disabled in the set." ]# [ inline ] fn bitand_assign (& mut self , other : $BitFlags ){ self . bits &= other . bits ; }} impl $crate :: _core :: ops :: Sub for $BitFlags { type Output = $BitFlags ; # [ doc = " Returns the set difference of the two sets of flags." ]# [ inline ] fn sub ( self , other : $BitFlags )-> $BitFlags {$BitFlags { bits : self . bits & ! other . bits }}} impl $crate :: _core :: ops :: SubAssign for $BitFlags {# [ doc = " Disables all flags enabled in the set." ]# [ inline ] fn sub_assign (& mut self , other : $BitFlags ){ self . bits &= ! other . bits ; }} impl $crate :: _core :: ops :: Not for $BitFlags { type Output = $BitFlags ; # [ doc = " Returns the complement of this set of flags." ]# [ inline ] fn not ( self )-> $BitFlags {$BitFlags { bits : ! self . bits }& $BitFlags :: all ()}} impl $crate :: _core :: iter :: Extend <$BitFlags > for $BitFlags { fn extend < T : $crate :: _core :: iter :: IntoIterator < Item =$BitFlags >> (& mut self , iterator : T ){ for item in iterator { self . insert ( item )}}} impl $crate :: _core :: iter :: FromIterator <$BitFlags > for $BitFlags { fn from_iter < T : $crate :: _core :: iter :: IntoIterator < Item =$BitFlags >> ( iterator : T )-> $BitFlags { let mut result = Self :: empty (); result . extend ( iterator ); result }}}; ($(# [$filtered : meta ])* ? # [ cfg $($cfgargs : tt )*]$(? # [$rest : ident $($restargs : tt )*])* fn $($item : tt )* )=>{ __impl_bitflags ! {$(# [$filtered ])* # [ cfg $($cfgargs )*]$(? # [$rest $($restargs )*])* fn $($item )* }}; ($(# [$filtered : meta ])* ? # [$next : ident $($nextargs : tt )*]$(? # [$rest : ident $($restargs : tt )*])* fn $($item : tt )* )=>{ __impl_bitflags ! {$(# [$filtered ])* $(? # [$rest $($restargs )*])* fn $($item )* }}; ($(# [$filtered : meta ])* fn $($item : tt )* )=>{$(# [$filtered ])* fn $($item )* }; ($(# [$filtered : meta ])* ? # [ cfg $($cfgargs : tt )*]$(? # [$rest : ident $($restargs : tt )*])* const $($item : tt )* )=>{ __impl_bitflags ! {$(# [$filtered ])* # [ cfg $($cfgargs )*]$(? # [$rest $($restargs )*])* const $($item )* }}; ($(# [$filtered : meta ])* ? # [$next : ident $($nextargs : tt )*]$(? # [$rest : ident $($restargs : tt )*])* const $($item : tt )* )=>{ __impl_bitflags ! {$(# [$filtered ])* $(? # [$rest $($restargs )*])* const $($item )* }}; ($(# [$filtered : meta ])* const $($item : tt )* )=>{$(# [$filtered ])* const $($item )* }; }
+macro_rules! __ra_macro_fixture324 {($($item : item )*)=>{$(# [ cfg ( feature = "os-poll" )]# [ cfg_attr ( docsrs , doc ( cfg ( feature = "os-poll" )))]$item )* }}
+macro_rules! __ra_macro_fixture325 {($($item : item )*)=>{$(# [ cfg ( not ( feature = "os-poll" ))]$item )* }}
+macro_rules! __ra_macro_fixture326 {($($item : item )*)=>{$(# [ cfg ( any ( feature = "net" , all ( unix , feature = "os-ext" )))]# [ cfg_attr ( docsrs , doc ( any ( feature = "net" , all ( unix , feature = "os-ext" ))))]$item )* }}
+macro_rules! __ra_macro_fixture327 {($($item : item )*)=>{$(# [ cfg ( feature = "net" )]# [ cfg_attr ( docsrs , doc ( cfg ( feature = "net" )))]$item )* }}
+macro_rules! __ra_macro_fixture328 {($($item : item )*)=>{$(# [ cfg ( feature = "os-ext" )]# [ cfg_attr ( docsrs , doc ( cfg ( feature = "os-ext" )))]$item )* }}
+macro_rules! __ra_macro_fixture329 {($name : ident , $read : ident , $bytes : expr , $data : expr )=>{ mod $name { use byteorder :: { BigEndian , ByteOrder , LittleEndian , NativeEndian , }; use test :: black_box as bb ; use test :: Bencher ; const NITER : usize = 100_000 ; # [ bench ] fn read_big_endian ( b : & mut Bencher ){ let buf = $data ; b . iter (|| { for _ in 0 .. NITER { bb ( BigEndian ::$read (& buf , $bytes )); }}); }# [ bench ] fn read_little_endian ( b : & mut Bencher ){ let buf = $data ; b . iter (|| { for _ in 0 .. NITER { bb ( LittleEndian ::$read (& buf , $bytes )); }}); }# [ bench ] fn read_native_endian ( b : & mut Bencher ){ let buf = $data ; b . iter (|| { for _ in 0 .. NITER { bb ( NativeEndian ::$read (& buf , $bytes )); }}); }}}; ($ty : ident , $max : ident , $read : ident , $write : ident , $size : expr , $data : expr )=>{ mod $ty { use byteorder :: { BigEndian , ByteOrder , LittleEndian , NativeEndian , }; use std ::$ty ; use test :: black_box as bb ; use test :: Bencher ; const NITER : usize = 100_000 ; # [ bench ] fn read_big_endian ( b : & mut Bencher ){ let buf = $data ; b . iter (|| { for _ in 0 .. NITER { bb ( BigEndian ::$read (& buf )); }}); }# [ bench ] fn read_little_endian ( b : & mut Bencher ){ let buf = $data ; b . iter (|| { for _ in 0 .. NITER { bb ( LittleEndian ::$read (& buf )); }}); }# [ bench ] fn read_native_endian ( b : & mut Bencher ){ let buf = $data ; b . iter (|| { for _ in 0 .. NITER { bb ( NativeEndian ::$read (& buf )); }}); }# [ bench ] fn write_big_endian ( b : & mut Bencher ){ let mut buf = $data ; let n = $ty ::$max ; b . iter (|| { for _ in 0 .. NITER { bb ( BigEndian ::$write (& mut buf , n )); }}); }# [ bench ] fn write_little_endian ( b : & mut Bencher ){ let mut buf = $data ; let n = $ty ::$max ; b . iter (|| { for _ in 0 .. NITER { bb ( LittleEndian ::$write (& mut buf , n )); }}); }# [ bench ] fn write_native_endian ( b : & mut Bencher ){ let mut buf = $data ; let n = $ty ::$max ; b . iter (|| { for _ in 0 .. NITER { bb ( NativeEndian ::$write (& mut buf , n )); }}); }}}; }
+macro_rules! __ra_macro_fixture330 {($name : ident , $numty : ty , $read : ident , $write : ident )=>{ mod $name { use std :: mem :: size_of ; use byteorder :: { BigEndian , ByteOrder , LittleEndian }; use rand :: distributions ; use rand :: { self , Rng }; use test :: Bencher ; # [ bench ] fn read_big_endian ( b : & mut Bencher ){ let mut numbers : Vec <$numty > = rand :: thread_rng (). sample_iter (& distributions :: Standard ). take ( 100000 ). collect (); let mut bytes = vec ! [ 0 ; numbers . len ()* size_of ::<$numty > ()]; BigEndian ::$write (& numbers , & mut bytes ); b . bytes = bytes . len () as u64 ; b . iter (|| { BigEndian ::$read (& bytes , & mut numbers ); }); }# [ bench ] fn read_little_endian ( b : & mut Bencher ){ let mut numbers : Vec <$numty > = rand :: thread_rng (). sample_iter (& distributions :: Standard ). take ( 100000 ). collect (); let mut bytes = vec ! [ 0 ; numbers . len ()* size_of ::<$numty > ()]; LittleEndian ::$write (& numbers , & mut bytes ); b . bytes = bytes . len () as u64 ; b . iter (|| { LittleEndian ::$read (& bytes , & mut numbers ); }); }# [ bench ] fn write_big_endian ( b : & mut Bencher ){ let numbers : Vec <$numty > = rand :: thread_rng (). sample_iter (& distributions :: Standard ). take ( 100000 ). collect (); let mut bytes = vec ! [ 0 ; numbers . len ()* size_of ::<$numty > ()]; b . bytes = bytes . len () as u64 ; b . iter (|| { BigEndian ::$write (& numbers , & mut bytes ); }); }# [ bench ] fn write_little_endian ( b : & mut Bencher ){ let numbers : Vec <$numty > = rand :: thread_rng (). sample_iter (& distributions :: Standard ). take ( 100000 ). collect (); let mut bytes = vec ! [ 0 ; numbers . len ()* size_of ::<$numty > ()]; b . bytes = bytes . len () as u64 ; b . iter (|| { LittleEndian ::$write (& numbers , & mut bytes ); }); }}}; }
+macro_rules! __ra_macro_fixture331 {{$($(#$attr : tt )* fn $fn_name : ident ($($arg : tt )*)-> $ret : ty {$($code : tt )* })*}=>($(# [ test ]$(#$attr )* fn $fn_name (){ fn prop ($($arg )*)-> $ret {$($code )* }:: quickcheck :: quickcheck ( quickcheck ! (@ fn prop []$($arg )*)); })* ); (@ fn $f : ident [$($t : tt )*])=>{$f as fn ($($t ),*)-> _ }; (@ fn $f : ident [$($p : tt )*]: $($tail : tt )*)=>{ quickcheck ! (@ fn $f [$($p )* _]$($tail )*)}; (@ fn $f : ident [$($p : tt )*]$t : tt $($tail : tt )*)=>{ quickcheck ! (@ fn $f [$($p )*]$($tail )*)}; }
+macro_rules! __ra_macro_fixture332 {($from : ty =>$to : ty ; $by : ident )=>( impl < 'a > From <$from > for UniCase <$to > { fn from ( s : $from )-> Self { UniCase :: unicode ( s .$by ())}}); ($from : ty =>$to : ty )=>( from_impl ! ($from =>$to ; into ); )}
+macro_rules! __ra_macro_fixture333 {($to : ty )=>( impl < 'a > Into <$to > for UniCase <$to > { fn into ( self )-> $to { self . into_inner ()}}); }
+macro_rules! __ra_macro_fixture334 {($name : ident , $ty : ident )=>{ fn $name ()-> usize { let mut rng = rand_xorshift :: XorShiftRng :: from_seed ([ 123u8 ; 16 ]); let mut mv = MeanAndVariance :: new (); let mut throwaway = 0 ; for _ in 0 .. SAMPLES { let f = loop { let f = $ty :: from_bits ( rng . gen ()); if f . is_finite (){ break f ; }}; let t1 = std :: time :: SystemTime :: now (); for _ in 0 .. ITERATIONS { throwaway += ryu :: Buffer :: new (). format_finite ( f ). len (); } let duration = t1 . elapsed (). unwrap (); let nanos = duration . as_secs ()* 1_000_000_000 + duration . subsec_nanos () as u64 ; mv . update ( nanos as f64 / ITERATIONS as f64 ); } println ! ( "{:12} {:8.3} {:8.3}" , concat ! ( stringify ! ($name ), ":" ), mv . mean , mv . stddev (), ); throwaway }}; }
+macro_rules! __ra_macro_fixture335 {($(# [$doc : meta ])* pub trait $name : ident $($methods : tt )*)=>{ macro_rules ! $name {($m : ident $extra : tt )=>{$m ! {$extra pub trait $name $($methods )* }}} remove_sections ! {[]$(# [$doc ])* pub trait $name $($methods )* }}}
+macro_rules! __ra_macro_fixture336 {($name : ident <$($typarm : tt ),*> where {$($bounds : tt )* } item : $item : ty , iter : $iter : ty , )=>( pub struct $name <$($typarm ),*> where $($bounds )* { iter : $iter , } impl <$($typarm ),*> Iterator for $name <$($typarm ),*> where $($bounds )* { type Item = $item ; # [ inline ] fn next (& mut self )-> Option < Self :: Item > { self . iter . next ()}# [ inline ] fn size_hint (& self )-> ( usize , Option < usize >){ self . iter . size_hint ()}}); }
+macro_rules! __ra_macro_fixture337 {($($fmt_trait : ident )*)=>{$(impl < 'a , I > fmt ::$fmt_trait for Format < 'a , I > where I : Iterator , I :: Item : fmt ::$fmt_trait , { fn fmt (& self , f : & mut fmt :: Formatter )-> fmt :: Result { self . format ( f , fmt ::$fmt_trait :: fmt )}})* }}
+macro_rules! __ra_macro_fixture338 {($($t : ty ),*)=>{$(not_zero_impl ! ($t , 0 ); )* }}
+macro_rules! __ra_macro_fixture339 {($name : ident )=>{ impl Clone for $name {# [ inline ] fn clone (& self )-> Self {* self }}}; }
+macro_rules! __ra_macro_fixture340 {([$($stack : tt )*])=>{$($stack )* }; ([$($stack : tt )*]{$($tail : tt )* })=>{$($stack )* { remove_sections_inner ! ([]$($tail )*); }}; ([$($stack : tt )*]$t : tt $($tail : tt )*)=>{ remove_sections ! ([$($stack )* $t ]$($tail )*); }; }
+macro_rules! __ra_macro_fixture341 {($t : ty ,$z : expr )=>{ impl Zero for $t { fn zero ()-> Self {$z as $t } fn is_zero (& self )-> bool { self == & Self :: zero ()}}}; }
+macro_rules! __ra_macro_fixture342 {($($ident : ident ),* $(,)?)=>{$(# [ allow ( bad_style )] pub const $ident : super :: Name = super :: Name :: new_inline ( stringify ! ($ident )); )* }; }
+macro_rules! __ra_macro_fixture343 {($($trait : ident =>$expand : ident ),* )=>{# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum BuiltinDeriveExpander {$($trait ),* } impl BuiltinDeriveExpander { pub fn expand (& self , db : & dyn AstDatabase , id : LazyMacroId , tt : & tt :: Subtree , )-> Result < tt :: Subtree , mbe :: ExpandError > { let expander = match * self {$(BuiltinDeriveExpander ::$trait =>$expand , )* }; expander ( db , id , tt )} fn find_by_name ( name : & name :: Name )-> Option < Self > { match name {$(id if id == & name :: name ! [$trait ]=> Some ( BuiltinDeriveExpander ::$trait ), )* _ => None , }}}}; }
+macro_rules! __ra_macro_fixture344 {( LAZY : $(($name : ident , $kind : ident )=>$expand : ident ),* , EAGER : $(($e_name : ident , $e_kind : ident )=>$e_expand : ident ),* )=>{# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum BuiltinFnLikeExpander {$($kind ),* }# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum EagerExpander {$($e_kind ),* } impl BuiltinFnLikeExpander { pub fn expand (& self , db : & dyn AstDatabase , id : LazyMacroId , tt : & tt :: Subtree , )-> ExpandResult < tt :: Subtree > { let expander = match * self {$(BuiltinFnLikeExpander ::$kind =>$expand , )* }; expander ( db , id , tt )}} impl EagerExpander { pub fn expand (& self , db : & dyn AstDatabase , arg_id : EagerMacroId , tt : & tt :: Subtree , )-> ExpandResult < Option < ( tt :: Subtree , FragmentKind )>> { let expander = match * self {$(EagerExpander ::$e_kind =>$e_expand , )* }; expander ( db , arg_id , tt )}} fn find_by_name ( ident : & name :: Name )-> Option < Either < BuiltinFnLikeExpander , EagerExpander >> { match ident {$(id if id == & name :: name ! [$name ]=> Some ( Either :: Left ( BuiltinFnLikeExpander ::$kind )), )* $(id if id == & name :: name ! [$e_name ]=> Some ( Either :: Right ( EagerExpander ::$e_kind )), )* _ => return None , }}}; }
+macro_rules! __ra_macro_fixture345 {($($ty : ty =>$this : ident $im : block );*)=>{$(impl ToTokenTree for $ty { fn to_token ($this )-> tt :: TokenTree { let leaf : tt :: Leaf = $im . into (); leaf . into ()}} impl ToTokenTree for &$ty { fn to_token ($this )-> tt :: TokenTree { let leaf : tt :: Leaf = $im . clone (). into (); leaf . into ()}})* }}
+macro_rules! __ra_macro_fixture346 {($name : ident )=>{ impl $crate :: salsa :: InternKey for $name { fn from_intern_id ( v : $crate :: salsa :: InternId )-> Self {$name ( v )} fn as_intern_id (& self )-> $crate :: salsa :: InternId { self . 0 }}}; }
+macro_rules! __ra_macro_fixture347 {($($var : ident ($t : ty )),+ )=>{$(impl From <$t > for AttrOwner { fn from ( t : $t )-> AttrOwner { AttrOwner ::$var ( t )}})+ }; }
+macro_rules! __ra_macro_fixture348 {($($typ : ident in $fld : ident -> $ast : ty ),+ $(,)? )=>{# [ derive ( Debug , Copy , Clone , Eq , PartialEq , Hash )] pub enum ModItem {$($typ ( FileItemTreeId <$typ >), )+ }$(impl From < FileItemTreeId <$typ >> for ModItem { fn from ( id : FileItemTreeId <$typ >)-> ModItem { ModItem ::$typ ( id )}})+ $(impl ItemTreeNode for $typ { type Source = $ast ; fn ast_id (& self )-> FileAstId < Self :: Source > { self . ast_id } fn lookup ( tree : & ItemTree , index : Idx < Self >)-> & Self {& tree . data ().$fld [ index ]} fn id_from_mod_item ( mod_item : ModItem )-> Option < FileItemTreeId < Self >> { if let ModItem ::$typ ( id )= mod_item { Some ( id )} else { None }} fn id_to_mod_item ( id : FileItemTreeId < Self >)-> ModItem { ModItem ::$typ ( id )}} impl Index < Idx <$typ >> for ItemTree { type Output = $typ ; fn index (& self , index : Idx <$typ >)-> & Self :: Output {& self . data ().$fld [ index ]}})+ }; }
+macro_rules! __ra_macro_fixture349 {($($fld : ident : $t : ty ),+ $(,)? )=>{$(impl Index < Idx <$t >> for ItemTree { type Output = $t ; fn index (& self , index : Idx <$t >)-> & Self :: Output {& self . data ().$fld [ index ]}})+ }; }
+macro_rules! __ra_macro_fixture350 {($e : ident {$($v : ident ($t : ty )),* $(,)? })=>{$(impl From <$t > for $e { fn from ( it : $t )-> $e {$e ::$v ( it )}})* }}
+macro_rules! __ra_macro_fixture351 {($id : ident , $loc : ident , $intern : ident , $lookup : ident )=>{ impl_intern_key ! ($id ); impl Intern for $loc { type ID = $id ; fn intern ( self , db : & dyn db :: DefDatabase )-> $id { db .$intern ( self )}} impl Lookup for $id { type Data = $loc ; fn lookup (& self , db : & dyn db :: DefDatabase )-> $loc { db .$lookup (* self )}}}; }
+macro_rules! __ra_macro_fixture352 {([$derives : ident $($derive_t : tt )*]=>$(# [$($attrs : tt )*])* $inner : path )=>{# [ proc_macro_derive ($derives $($derive_t )*)]# [ allow ( non_snake_case )]$(# [$($attrs )*])* pub fn $derives ( i : $crate :: macros :: TokenStream )-> $crate :: macros :: TokenStream { match $crate :: macros :: parse ::<$crate :: macros :: DeriveInput > ( i ){ Ok ( p )=>{ match $crate :: Structure :: try_new (& p ){ Ok ( s )=>$crate :: MacroResult :: into_stream ($inner ( s )), Err ( e )=> e . to_compile_error (). into (), }} Err ( e )=> e . to_compile_error (). into (), }}}; }
+macro_rules! __ra_macro_fixture353 {($I : ident =>$t : ty )=>{ impl <$I : Interner > Zip <$I > for $t { fn zip_with < 'i , Z : Zipper < 'i , $I >> ( _zipper : & mut Z , _variance : Variance , a : & Self , b : & Self , )-> Fallible < ()> where I : 'i , { if a != b { return Err ( NoSolution ); } Ok (())}}}; }
+macro_rules! __ra_macro_fixture354 {($($n : ident ),*)=>{ impl <$($n : Fold < I >,)* I : Interner > Fold < I > for ($($n ,)*){ type Result = ($($n :: Result ,)*); fn fold_with < 'i > ( self , folder : & mut dyn Folder < 'i , I >, outer_binder : DebruijnIndex )-> Fallible < Self :: Result > where I : 'i , {# [ allow ( non_snake_case )] let ($($n ),*)= self ; Ok (($($n . fold_with ( folder , outer_binder )?,)*))}}}}
+macro_rules! __ra_macro_fixture355 {($t : ty )=>{ impl < I : Interner > $crate :: fold :: Fold < I > for $t { type Result = Self ; fn fold_with < 'i > ( self , _folder : & mut dyn ($crate :: fold :: Folder < 'i , I >), _outer_binder : DebruijnIndex , )-> :: chalk_ir :: Fallible < Self :: Result > where I : 'i , { Ok ( self )}}}; }
+macro_rules! __ra_macro_fixture356 {($t : ident )=>{ impl < I : Interner > $crate :: fold :: Fold < I > for $t < I > { type Result = $t < I >; fn fold_with < 'i > ( self , _folder : & mut dyn ($crate :: fold :: Folder < 'i , I >), _outer_binder : DebruijnIndex , )-> :: chalk_ir :: Fallible < Self :: Result > where I : 'i , { Ok ( self )}}}; }
+macro_rules! __ra_macro_fixture357 {($($n : ident ),*)=>{ impl <$($n : Visit < I >,)* I : Interner > Visit < I > for ($($n ,)*){ fn visit_with < 'i , BT > (& self , visitor : & mut dyn Visitor < 'i , I , BreakTy = BT >, outer_binder : DebruijnIndex )-> ControlFlow < BT > where I : 'i {# [ allow ( non_snake_case )] let & ($(ref $n ),*)= self ; $(try_break ! ($n . visit_with ( visitor , outer_binder )); )* ControlFlow :: CONTINUE }}}}
+macro_rules! __ra_macro_fixture358 {($t : ty )=>{ impl < I : Interner > $crate :: visit :: Visit < I > for $t { fn visit_with < 'i , B > (& self , _visitor : & mut dyn ($crate :: visit :: Visitor < 'i , I , BreakTy = B >), _outer_binder : DebruijnIndex , )-> ControlFlow < B > where I : 'i , { ControlFlow :: CONTINUE }}}; }
+macro_rules! __ra_macro_fixture359 {($t : ident )=>{ impl < I : Interner > $crate :: visit :: Visit < I > for $t < I > { fn visit_with < 'i , B > (& self , _visitor : & mut dyn ($crate :: visit :: Visitor < 'i , I , BreakTy = B >), _outer_binder : DebruijnIndex , )-> ControlFlow < B > where I : 'i , { ControlFlow :: CONTINUE }}}; }
+macro_rules! __ra_macro_fixture360 {( for ($($t : tt )*)$u : ty )=>{ impl <$($t )*> CastTo <$u > for $u { fn cast_to ( self , _interner : &<$u as HasInterner >:: Interner )-> $u { self }}}; ($u : ty )=>{ impl CastTo <$u > for $u { fn cast_to ( self , interner : &<$u as HasInterner >:: Interner )-> $u { self }}}; }
+macro_rules! __ra_macro_fixture361 {($($id : ident ), *)=>{$(impl < I : Interner > std :: fmt :: Debug for $id < I > { fn fmt (& self , fmt : & mut std :: fmt :: Formatter < '_ >)-> Result < (), std :: fmt :: Error > { write ! ( fmt , "{}({:?})" , stringify ! ($id ), self . 0 )}})* }; }
+macro_rules! __ra_macro_fixture362 {($seq : ident , $data : ident =>$elem : ty , $intern : ident =>$interned : ident )=>{ interned_slice_common ! ($seq , $data =>$elem , $intern =>$interned ); impl < I : Interner > $seq < I > {# [ doc = " Tries to create a sequence using an iterator of element-like things." ] pub fn from_fallible < E > ( interner : & I , elements : impl IntoIterator < Item = Result < impl CastTo <$elem >, E >>, )-> Result < Self , E > { Ok ( Self { interned : I ::$intern ( interner , elements . into_iter (). casted ( interner ))?, })}# [ doc = " Create a sequence from elements" ] pub fn from_iter ( interner : & I , elements : impl IntoIterator < Item = impl CastTo <$elem >>, )-> Self { Self :: from_fallible ( interner , elements . into_iter (). map (| el | -> Result <$elem , ()> { Ok ( el . cast ( interner ))}), ). unwrap ()}# [ doc = " Create a sequence from a single element." ] pub fn from1 ( interner : & I , element : impl CastTo <$elem >)-> Self { Self :: from_iter ( interner , Some ( element ))}}}; }
+macro_rules! __ra_macro_fixture363 {($seq : ident , $data : ident =>$elem : ty , $intern : ident =>$interned : ident )=>{# [ doc = " List of interned elements." ]# [ derive ( Copy , Clone , PartialEq , Eq , Hash , PartialOrd , Ord , HasInterner )] pub struct $seq < I : Interner > { interned : I ::$interned , } impl < I : Interner > $seq < I > {# [ doc = " Get the interned elements." ] pub fn interned (& self )-> & I ::$interned {& self . interned }# [ doc = " Returns a slice containing the elements." ] pub fn as_slice (& self , interner : & I )-> & [$elem ]{ Interner ::$data ( interner , & self . interned )}# [ doc = " Index into the sequence." ] pub fn at (& self , interner : & I , index : usize )-> &$elem {& self . as_slice ( interner )[ index ]}# [ doc = " Create an empty sequence." ] pub fn empty ( interner : & I )-> Self { Self :: from_iter ( interner , None ::<$elem >)}# [ doc = " Check whether this is an empty sequence." ] pub fn is_empty (& self , interner : & I )-> bool { self . as_slice ( interner ). is_empty ()}# [ doc = " Get an iterator over the elements of the sequence." ] pub fn iter (& self , interner : & I )-> std :: slice :: Iter < '_ , $elem > { self . as_slice ( interner ). iter ()}# [ doc = " Get the length of the sequence." ] pub fn len (& self , interner : & I )-> usize { self . as_slice ( interner ). len ()}}}; }
+macro_rules! __ra_macro_fixture364 {($(# [$attrs : meta ])* $vis : vis static $name : ident : $ty : ty )=>($(# [$attrs ])* $vis static $name : $crate :: ScopedKey <$ty > = $crate :: ScopedKey { inner : { thread_local ! ( static FOO : :: std :: cell :: Cell < usize > = {:: std :: cell :: Cell :: new ( 0 )}); & FOO }, _marker : :: std :: marker :: PhantomData , }; )}
+macro_rules! __ra_macro_fixture365 {($(($def : path , $ast : path , $meth : ident )),* ,)=>{$(impl ToDef for $ast { type Def = $def ; fn to_def ( sema : & SemanticsImpl , src : InFile < Self >)-> Option < Self :: Def > { sema . with_ctx (| ctx | ctx .$meth ( src )). map (<$def >:: from )}})*}}
+macro_rules! __ra_macro_fixture366 {($(($id : path , $ty : path )),*)=>{$(impl From <$id > for $ty { fn from ( id : $id )-> $ty {$ty { id }}} impl From <$ty > for $id { fn from ( ty : $ty )-> $id { ty . id }})*}}
+macro_rules! __ra_macro_fixture367 {($(($def : ident , $def_id : ident ),)*)=>{$(impl HasAttrs for $def { fn attrs ( self , db : & dyn HirDatabase )-> Attrs { let def = AttrDefId ::$def_id ( self . into ()); db . attrs ( def )} fn docs ( self , db : & dyn HirDatabase )-> Option < Documentation > { let def = AttrDefId ::$def_id ( self . into ()); db . attrs ( def ). docs ()} fn resolve_doc_path ( self , db : & dyn HirDatabase , link : & str , ns : Option < Namespace >)-> Option < ModuleDef > { let def = AttrDefId ::$def_id ( self . into ()); resolve_doc_path ( db , def , link , ns ). map ( ModuleDef :: from )}})*}; }
+macro_rules! __ra_macro_fixture368 {($($variant : ident ),* for $enum : ident )=>{$(impl HasAttrs for $variant { fn attrs ( self , db : & dyn HirDatabase )-> Attrs {$enum ::$variant ( self ). attrs ( db )} fn docs ( self , db : & dyn HirDatabase )-> Option < Documentation > {$enum ::$variant ( self ). docs ( db )} fn resolve_doc_path ( self , db : & dyn HirDatabase , link : & str , ns : Option < Namespace >)-> Option < ModuleDef > {$enum ::$variant ( self ). resolve_doc_path ( db , link , ns )}})*}; }
+macro_rules! __ra_macro_fixture369 {{$($(#$attr : tt )* fn $fn_name : ident ($($arg : tt )*)-> $ret : ty {$($code : tt )* })*}=>($(# [ test ]$(#$attr )* fn $fn_name (){ fn prop ($($arg )*)-> $ret {$($code )* }:: quickcheck :: quickcheck ( quickcheck ! (@ fn prop []$($arg )*)); })* ); (@ fn $f : ident [$($t : tt )*])=>{$f as fn ($($t ),*)-> _ }; (@ fn $f : ident [$($p : tt )*]: $($tail : tt )*)=>{ quickcheck ! (@ fn $f [$($p )* _]$($tail )*)}; (@ fn $f : ident [$($p : tt )*]$t : tt $($tail : tt )*)=>{ quickcheck ! (@ fn $f [$($p )*]$($tail )*)}; }
+macro_rules! __ra_macro_fixture370 {($($bool : expr , )+)=>{ fn _static_assert (){$(let _ = std :: mem :: transmute ::< [ u8 ; $bool as usize ], u8 >; )+ }}}
+macro_rules! __ra_macro_fixture371 {($ty : ident is $($marker : ident ) and +)=>{# [ test ]# [ allow ( non_snake_case )] fn $ty (){ fn assert_implemented < T : $($marker +)+> (){} assert_implemented ::<$ty > (); }}; ($ty : ident is not $($marker : ident ) or +)=>{# [ test ]# [ allow ( non_snake_case )] fn $ty (){$({trait IsNotImplemented { fn assert_not_implemented (){}} impl < T : $marker > IsNotImplemented for T {} trait IsImplemented { fn assert_not_implemented (){}} impl IsImplemented for $ty {}<$ty >:: assert_not_implemented (); })+ }}; }
+macro_rules! __ra_macro_fixture372 {($($types : ident )*)=>{$(assert_impl ! ($types is UnwindSafe and RefUnwindSafe ); )* }; }
+macro_rules! __ra_macro_fixture373 {($($(# [$attr : meta ])* $name : ident ($value : expr , $expected : expr )),* )=>{$($(# [$attr ])* # [ test ] fn $name (){# [ cfg ( feature = "std" )]{ let mut buf = [ b'\0' ; 40 ]; let len = itoa :: write (& mut buf [..], $value ). unwrap (); assert_eq ! (& buf [ 0 .. len ], $expected . as_bytes ()); } let mut s = String :: new (); itoa :: fmt (& mut s , $value ). unwrap (); assert_eq ! ( s , $expected ); })* }}
+macro_rules! __ra_macro_fixture374 {($($name : ident =>$description : expr ,)+)=>{# [ doc = " Errors that can occur during parsing." ]# [ doc = "" ]# [ doc = " This may be extended in the future so exhaustive matching is" ]# [ doc = " discouraged with an unused variant." ]# [ allow ( clippy :: manual_non_exhaustive )]# [ derive ( PartialEq , Eq , Clone , Copy , Debug )] pub enum ParseError {$($name , )+ # [ doc = " Unused variant enable non-exhaustive matching" ]# [ doc ( hidden )] __FutureProof , } impl fmt :: Display for ParseError { fn fmt (& self , fmt : & mut Formatter < '_ >)-> fmt :: Result { match * self {$(ParseError ::$name => fmt . write_str ($description ), )+ ParseError :: __FutureProof =>{ unreachable ! ( "Don't abuse the FutureProof!" ); }}}}}}
+macro_rules! __ra_macro_fixture375 {($($name : ident =>$description : expr ,)+)=>{# [ doc = " Non-fatal syntax violations that can occur during parsing." ]# [ doc = "" ]# [ doc = " This may be extended in the future so exhaustive matching is" ]# [ doc = " discouraged with an unused variant." ]# [ allow ( clippy :: manual_non_exhaustive )]# [ derive ( PartialEq , Eq , Clone , Copy , Debug )] pub enum SyntaxViolation {$($name , )+ # [ doc = " Unused variant enable non-exhaustive matching" ]# [ doc ( hidden )] __FutureProof , } impl SyntaxViolation { pub fn description (& self )-> & 'static str { match * self {$(SyntaxViolation ::$name =>$description , )+ SyntaxViolation :: __FutureProof =>{ unreachable ! ( "Don't abuse the FutureProof!" ); }}}}}}
+macro_rules! __ra_macro_fixture376 {('owned : $($oty : ident ,)* 'interned : $($ity : ident ,)* )=>{# [ repr ( C )]# [ allow ( non_snake_case )] pub struct HandleCounters {$($oty : AtomicUsize ,)* $($ity : AtomicUsize ,)* } impl HandleCounters { extern "C" fn get ()-> & 'static Self { static COUNTERS : HandleCounters = HandleCounters {$($oty : AtomicUsize :: new ( 1 ),)* $($ity : AtomicUsize :: new ( 1 ),)* }; & COUNTERS }}# [ repr ( C )]# [ allow ( non_snake_case )] pub ( super ) struct HandleStore < S : server :: Types > {$($oty : handle :: OwnedStore < S ::$oty >,)* $($ity : handle :: InternedStore < S ::$ity >,)* } impl < S : server :: Types > HandleStore < S > { pub ( super ) fn new ( handle_counters : & 'static HandleCounters )-> Self { HandleStore {$($oty : handle :: OwnedStore :: new (& handle_counters .$oty ),)* $($ity : handle :: InternedStore :: new (& handle_counters .$ity ),)* }}}$(# [ repr ( C )] pub struct $oty ( pub ( crate ) handle :: Handle ); impl Drop for $oty { fn drop (& mut self ){$oty ( self . 0 ). drop (); }} impl < S > Encode < S > for $oty { fn encode ( self , w : & mut Writer , s : & mut S ){ let handle = self . 0 ; mem :: forget ( self ); handle . encode ( w , s ); }} impl < S : server :: Types > DecodeMut < '_ , '_ , HandleStore < server :: MarkedTypes < S >>> for Marked < S ::$oty , $oty > { fn decode ( r : & mut Reader < '_ >, s : & mut HandleStore < server :: MarkedTypes < S >>)-> Self { s .$oty . take ( handle :: Handle :: decode ( r , & mut ()))}} impl < S > Encode < S > for &$oty { fn encode ( self , w : & mut Writer , s : & mut S ){ self . 0 . encode ( w , s ); }} impl < 's , S : server :: Types ,> Decode < '_ , 's , HandleStore < server :: MarkedTypes < S >>> for & 's Marked < S ::$oty , $oty > { fn decode ( r : & mut Reader < '_ >, s : & 's HandleStore < server :: MarkedTypes < S >>)-> Self {& s .$oty [ handle :: Handle :: decode ( r , & mut ())]}} impl < S > Encode < S > for & mut $oty { fn encode ( self , w : & mut Writer , s : & mut S ){ self . 0 . encode ( w , s ); }} impl < 's , S : server :: Types > DecodeMut < '_ , 's , HandleStore < server :: MarkedTypes < S >>> for & 's mut Marked < S ::$oty , $oty > { fn decode ( r : & mut Reader < '_ >, s : & 's mut HandleStore < server :: MarkedTypes < S >> )-> Self {& mut s .$oty [ handle :: Handle :: decode ( r , & mut ())]}} impl < S : server :: Types > Encode < HandleStore < server :: MarkedTypes < S >>> for Marked < S ::$oty , $oty > { fn encode ( self , w : & mut Writer , s : & mut HandleStore < server :: MarkedTypes < S >>){ s .$oty . alloc ( self ). encode ( w , s ); }} impl < S > DecodeMut < '_ , '_ , S > for $oty { fn decode ( r : & mut Reader < '_ >, s : & mut S )-> Self {$oty ( handle :: Handle :: decode ( r , s ))}})* $(# [ repr ( C )]# [ derive ( Copy , Clone , PartialEq , Eq , Hash )] pub ( crate ) struct $ity ( handle :: Handle ); impl < S > Encode < S > for $ity { fn encode ( self , w : & mut Writer , s : & mut S ){ self . 0 . encode ( w , s ); }} impl < S : server :: Types > DecodeMut < '_ , '_ , HandleStore < server :: MarkedTypes < S >>> for Marked < S ::$ity , $ity > { fn decode ( r : & mut Reader < '_ >, s : & mut HandleStore < server :: MarkedTypes < S >>)-> Self { s .$ity . copy ( handle :: Handle :: decode ( r , & mut ()))}} impl < S : server :: Types > Encode < HandleStore < server :: MarkedTypes < S >>> for Marked < S ::$ity , $ity > { fn encode ( self , w : & mut Writer , s : & mut HandleStore < server :: MarkedTypes < S >>){ s .$ity . alloc ( self ). encode ( w , s ); }} impl < S > DecodeMut < '_ , '_ , S > for $ity { fn decode ( r : & mut Reader < '_ >, s : & mut S )-> Self {$ity ( handle :: Handle :: decode ( r , s ))}})* }}
+macro_rules! __ra_macro_fixture377 {($S : ident , $self : ident , $m : ident )=>{$m ! { FreeFunctions { fn drop ($self : $S :: FreeFunctions ); fn track_env_var ( var : & str , value : Option <& str >); }, TokenStream { fn drop ($self : $S :: TokenStream ); fn clone ($self : &$S :: TokenStream )-> $S :: TokenStream ; fn new ()-> $S :: TokenStream ; fn is_empty ($self : &$S :: TokenStream )-> bool ; fn from_str ( src : & str )-> $S :: TokenStream ; fn to_string ($self : &$S :: TokenStream )-> String ; fn from_token_tree ( tree : TokenTree <$S :: Group , $S :: Punct , $S :: Ident , $S :: Literal >, )-> $S :: TokenStream ; fn into_iter ($self : $S :: TokenStream )-> $S :: TokenStreamIter ; }, TokenStreamBuilder { fn drop ($self : $S :: TokenStreamBuilder ); fn new ()-> $S :: TokenStreamBuilder ; fn push ($self : & mut $S :: TokenStreamBuilder , stream : $S :: TokenStream ); fn build ($self : $S :: TokenStreamBuilder )-> $S :: TokenStream ; }, TokenStreamIter { fn drop ($self : $S :: TokenStreamIter ); fn clone ($self : &$S :: TokenStreamIter )-> $S :: TokenStreamIter ; fn next ($self : & mut $S :: TokenStreamIter , )-> Option < TokenTree <$S :: Group , $S :: Punct , $S :: Ident , $S :: Literal >>; }, Group { fn drop ($self : $S :: Group ); fn clone ($self : &$S :: Group )-> $S :: Group ; fn new ( delimiter : Delimiter , stream : $S :: TokenStream )-> $S :: Group ; fn delimiter ($self : &$S :: Group )-> Delimiter ; fn stream ($self : &$S :: Group )-> $S :: TokenStream ; fn span ($self : &$S :: Group )-> $S :: Span ; fn span_open ($self : &$S :: Group )-> $S :: Span ; fn span_close ($self : &$S :: Group )-> $S :: Span ; fn set_span ($self : & mut $S :: Group , span : $S :: Span ); }, Punct { fn new ( ch : char , spacing : Spacing )-> $S :: Punct ; fn as_char ($self : $S :: Punct )-> char ; fn spacing ($self : $S :: Punct )-> Spacing ; fn span ($self : $S :: Punct )-> $S :: Span ; fn with_span ($self : $S :: Punct , span : $S :: Span )-> $S :: Punct ; }, Ident { fn new ( string : & str , span : $S :: Span , is_raw : bool )-> $S :: Ident ; fn span ($self : $S :: Ident )-> $S :: Span ; fn with_span ($self : $S :: Ident , span : $S :: Span )-> $S :: Ident ; }, Literal { fn drop ($self : $S :: Literal ); fn clone ($self : &$S :: Literal )-> $S :: Literal ; fn debug_kind ($self : &$S :: Literal )-> String ; fn symbol ($self : &$S :: Literal )-> String ; fn suffix ($self : &$S :: Literal )-> Option < String >; fn integer ( n : & str )-> $S :: Literal ; fn typed_integer ( n : & str , kind : & str )-> $S :: Literal ; fn float ( n : & str )-> $S :: Literal ; fn f32 ( n : & str )-> $S :: Literal ; fn f64 ( n : & str )-> $S :: Literal ; fn string ( string : & str )-> $S :: Literal ; fn character ( ch : char )-> $S :: Literal ; fn byte_string ( bytes : & [ u8 ])-> $S :: Literal ; fn span ($self : &$S :: Literal )-> $S :: Span ; fn set_span ($self : & mut $S :: Literal , span : $S :: Span ); fn subspan ($self : &$S :: Literal , start : Bound < usize >, end : Bound < usize >, )-> Option <$S :: Span >; }, SourceFile { fn drop ($self : $S :: SourceFile ); fn clone ($self : &$S :: SourceFile )-> $S :: SourceFile ; fn eq ($self : &$S :: SourceFile , other : &$S :: SourceFile )-> bool ; fn path ($self : &$S :: SourceFile )-> String ; fn is_real ($self : &$S :: SourceFile )-> bool ; }, MultiSpan { fn drop ($self : $S :: MultiSpan ); fn new ()-> $S :: MultiSpan ; fn push ($self : & mut $S :: MultiSpan , span : $S :: Span ); }, Diagnostic { fn drop ($self : $S :: Diagnostic ); fn new ( level : Level , msg : & str , span : $S :: MultiSpan )-> $S :: Diagnostic ; fn sub ($self : & mut $S :: Diagnostic , level : Level , msg : & str , span : $S :: MultiSpan , ); fn emit ($self : $S :: Diagnostic ); }, Span { fn debug ($self : $S :: Span )-> String ; fn def_site ()-> $S :: Span ; fn call_site ()-> $S :: Span ; fn mixed_site ()-> $S :: Span ; fn source_file ($self : $S :: Span )-> $S :: SourceFile ; fn parent ($self : $S :: Span )-> Option <$S :: Span >; fn source ($self : $S :: Span )-> $S :: Span ; fn start ($self : $S :: Span )-> LineColumn ; fn end ($self : $S :: Span )-> LineColumn ; fn join ($self : $S :: Span , other : $S :: Span )-> Option <$S :: Span >; fn resolved_at ($self : $S :: Span , at : $S :: Span )-> $S :: Span ; fn source_text ($self : $S :: Span )-> Option < String >; }, }}; }
+macro_rules! __ra_macro_fixture378 {( le $ty : ty )=>{ impl < S > Encode < S > for $ty { fn encode ( self , w : & mut Writer , _: & mut S ){ w . write_all (& self . to_le_bytes ()). unwrap (); }} impl < S > DecodeMut < '_ , '_ , S > for $ty { fn decode ( r : & mut Reader < '_ >, _: & mut S )-> Self { const N : usize = :: std :: mem :: size_of ::<$ty > (); let mut bytes = [ 0 ; N ]; bytes . copy_from_slice (& r [.. N ]); * r = & r [ N ..]; Self :: from_le_bytes ( bytes )}}}; ( struct $name : ident {$($field : ident ),* $(,)? })=>{ impl < S > Encode < S > for $name { fn encode ( self , w : & mut Writer , s : & mut S ){$(self .$field . encode ( w , s );)* }} impl < S > DecodeMut < '_ , '_ , S > for $name { fn decode ( r : & mut Reader < '_ >, s : & mut S )-> Self {$name {$($field : DecodeMut :: decode ( r , s )),* }}}}; ( enum $name : ident $(<$($T : ident ),+>)? {$($variant : ident $(($field : ident ))*),* $(,)? })=>{ impl < S , $($($T : Encode < S >),+)?> Encode < S > for $name $(<$($T ),+>)? { fn encode ( self , w : & mut Writer , s : & mut S ){# [ allow ( non_upper_case_globals )] mod tag {# [ repr ( u8 )] enum Tag {$($variant ),* }$(pub const $variant : u8 = Tag ::$variant as u8 ;)* } match self {$($name ::$variant $(($field ))* =>{ tag ::$variant . encode ( w , s ); $($field . encode ( w , s );)* })* }}} impl < 'a , S , $($($T : for < 's > DecodeMut < 'a , 's , S >),+)?> DecodeMut < 'a , '_ , S > for $name $(<$($T ),+>)? { fn decode ( r : & mut Reader < 'a >, s : & mut S )-> Self {# [ allow ( non_upper_case_globals )] mod tag {# [ repr ( u8 )] enum Tag {$($variant ),* }$(pub const $variant : u8 = Tag ::$variant as u8 ;)* } match u8 :: decode ( r , s ){$(tag ::$variant =>{$(let $field = DecodeMut :: decode ( r , s );)* $name ::$variant $(($field ))* })* _ => unreachable ! (), }}}}}
+macro_rules! __ra_macro_fixture379 {($($ty : ty ),* $(,)?)=>{$(impl Mark for $ty { type Unmarked = Self ; fn mark ( unmarked : Self :: Unmarked )-> Self { unmarked }} impl Unmark for $ty { type Unmarked = Self ; fn unmark ( self )-> Self :: Unmarked { self }})* }}
+macro_rules! __ra_macro_fixture380 {($($name : ident {$(fn $method : ident ($($arg : ident : $arg_ty : ty ),* $(,)?)$(-> $ret_ty : ty )*;)* }),* $(,)?)=>{$(impl $name {# [ allow ( unused )]$(pub ( crate ) fn $method ($($arg : $arg_ty ),*)$(-> $ret_ty )* { panic ! ( "hello" ); })* })* }}
+macro_rules! __ra_macro_fixture381 {($($name : ident {$(fn $method : ident ($($arg : ident : $arg_ty : ty ),* $(,)?)$(-> $ret_ty : ty )?;)* }),* $(,)?)=>{ pub trait Types {$(associated_item ! ( type $name );)* }$(pub trait $name : Types {$(associated_item ! ( fn $method (& mut self , $($arg : $arg_ty ),*)$(-> $ret_ty )?);)* })* pub trait Server : Types $(+ $name )* {} impl < S : Types $(+ $name )*> Server for S {}}}
+macro_rules! __ra_macro_fixture382 {($($name : ident {$(fn $method : ident ($($arg : ident : $arg_ty : ty ),* $(,)?)$(-> $ret_ty : ty )?;)* }),* $(,)?)=>{ impl < S : Types > Types for MarkedTypes < S > {$(type $name = Marked < S ::$name , client ::$name >;)* }$(impl < S : $name > $name for MarkedTypes < S > {$(fn $method (& mut self , $($arg : $arg_ty ),*)$(-> $ret_ty )? {<_>:: mark ($name ::$method (& mut self . 0 , $($arg . unmark ()),*))})* })* }}
+macro_rules! __ra_macro_fixture383 {($($name : ident {$(fn $method : ident ($($arg : ident : $arg_ty : ty ),* $(,)?)$(-> $ret_ty : ty )?;)* }),* $(,)?)=>{ pub trait DispatcherTrait {$(type $name ;)* fn dispatch (& mut self , b : Buffer < u8 >)-> Buffer < u8 >; } impl < S : Server > DispatcherTrait for Dispatcher < MarkedTypes < S >> {$(type $name = < MarkedTypes < S > as Types >::$name ;)* fn dispatch (& mut self , mut b : Buffer < u8 >)-> Buffer < u8 > { let Dispatcher { handle_store , server }= self ; let mut reader = & b [..]; match api_tags :: Method :: decode (& mut reader , & mut ()){$(api_tags :: Method ::$name ( m )=> match m {$(api_tags ::$name ::$method =>{ let mut call_method = || { reverse_decode ! ( reader , handle_store ; $($arg : $arg_ty ),*); $name ::$method ( server , $($arg ),*)}; let r = if thread :: panicking (){ Ok ( call_method ())} else { panic :: catch_unwind ( panic :: AssertUnwindSafe ( call_method )). map_err ( PanicMessage :: from )}; b . clear (); r . encode (& mut b , handle_store ); })* }),* } b }}}}
+macro_rules! __ra_macro_fixture384 {($($name : ident {$(fn $method : ident ($($arg : ident : $arg_ty : ty ),* $(,)?)$(-> $ret_ty : ty )*;)* }),* $(,)?)=>{$(pub ( super ) enum $name {$($method ),* } rpc_encode_decode ! ( enum $name {$($method ),* }); )* pub ( super ) enum Method {$($name ($name )),* } rpc_encode_decode ! ( enum Method {$($name ( m )),* }); }}
+macro_rules! __ra_macro_fixture385 {($(($ident : ident , $string : literal )),*$(,)?)=>{$(pub ( crate ) const $ident : SemanticTokenType = SemanticTokenType :: new ($string );)* pub ( crate ) const SUPPORTED_TYPES : & [ SemanticTokenType ]= & [ SemanticTokenType :: COMMENT , SemanticTokenType :: KEYWORD , SemanticTokenType :: STRING , SemanticTokenType :: NUMBER , SemanticTokenType :: REGEXP , SemanticTokenType :: OPERATOR , SemanticTokenType :: NAMESPACE , SemanticTokenType :: TYPE , SemanticTokenType :: STRUCT , SemanticTokenType :: CLASS , SemanticTokenType :: INTERFACE , SemanticTokenType :: ENUM , SemanticTokenType :: ENUM_MEMBER , SemanticTokenType :: TYPE_PARAMETER , SemanticTokenType :: FUNCTION , SemanticTokenType :: METHOD , SemanticTokenType :: PROPERTY , SemanticTokenType :: MACRO , SemanticTokenType :: VARIABLE , SemanticTokenType :: PARAMETER , $($ident ),* ]; }; }
+macro_rules! __ra_macro_fixture386 {($(($ident : ident , $string : literal )),*$(,)?)=>{$(pub ( crate ) const $ident : SemanticTokenModifier = SemanticTokenModifier :: new ($string );)* pub ( crate ) const SUPPORTED_MODIFIERS : & [ SemanticTokenModifier ]= & [ SemanticTokenModifier :: DOCUMENTATION , SemanticTokenModifier :: DECLARATION , SemanticTokenModifier :: DEFINITION , SemanticTokenModifier :: STATIC , SemanticTokenModifier :: ABSTRACT , SemanticTokenModifier :: DEPRECATED , SemanticTokenModifier :: READONLY , $($ident ),* ]; }; }
+macro_rules! __ra_macro_fixture387 {( struct $name : ident {$($(# [ doc =$doc : literal ])* $field : ident $(| $alias : ident )?: $ty : ty = $default : expr , )* })=>{# [ allow ( non_snake_case )]# [ derive ( Debug , Clone )] struct $name {$($field : $ty ,)* } impl $name { fn from_json ( mut json : serde_json :: Value )-> $name {$name {$($field : get_field (& mut json , stringify ! ($field ), None $(. or ( Some ( stringify ! ($alias ))))?, $default , ), )*}} fn json_schema ()-> serde_json :: Value { schema (& [$({let field = stringify ! ($field ); let ty = stringify ! ($ty ); ( field , ty , & [$($doc ),*], $default )},)* ])}# [ cfg ( test )] fn manual ()-> String { manual (& [$({let field = stringify ! ($field ); let ty = stringify ! ($ty ); ( field , ty , & [$($doc ),*], $default )},)* ])}}}; }
+macro_rules! __ra_macro_fixture388 {($($name : ident ($value : expr ),)*)=>{ mod bench_ryu { use super ::*; $(# [ bench ] fn $name ( b : & mut Bencher ){ let mut buf = ryu :: Buffer :: new (); b . iter ( move || { let value = black_box ($value ); let formatted = buf . format_finite ( value ); black_box ( formatted ); }); })* } mod bench_std_fmt { use super ::*; $(# [ bench ] fn $name ( b : & mut Bencher ){ let mut buf = Vec :: with_capacity ( 20 ); b . iter (|| { buf . clear (); let value = black_box ($value ); write ! (& mut buf , "{}" , value ). unwrap (); black_box ( buf . as_slice ()); }); })* }}; }
+macro_rules! __ra_macro_fixture389 {($($T : ident ),*)=>{$(mod $T { use test :: Bencher ; use num_integer :: { Average , Integer }; use super :: { UncheckedAverage , NaiveAverage , ModuloAverage }; use super :: { bench_ceil , bench_floor , bench_unchecked }; naive_average ! ($T ); unchecked_average ! ($T ); modulo_average ! ($T ); const SIZE : $T = 30 ; fn overflowing ()-> Vec < ($T , $T )> {(($T :: max_value ()- SIZE )..$T :: max_value ()). flat_map (| x | -> Vec <_> {(($T :: max_value ()- 100 ).. ($T :: max_value ()- 100 + SIZE )). map (| y | ( x , y )). collect ()}). collect ()} fn small ()-> Vec < ($T , $T )> {( 0 .. SIZE ). flat_map (| x | -> Vec <_> {( 0 .. SIZE ). map (| y | ( x , y )). collect ()}). collect ()} fn rand ()-> Vec < ($T , $T )> { small (). into_iter (). map (| ( x , y )| ( super :: lcg ( x ), super :: lcg ( y ))). collect ()} mod ceil { use super ::*; mod small { use super ::*; # [ bench ] fn optimized ( b : & mut Bencher ){ let v = small (); bench_ceil ( b , & v , | x : &$T , y : &$T | x . average_ceil ( y )); }# [ bench ] fn naive ( b : & mut Bencher ){ let v = small (); bench_ceil ( b , & v , | x : &$T , y : &$T | x . naive_average_ceil ( y )); }# [ bench ] fn unchecked ( b : & mut Bencher ){ let v = small (); bench_unchecked ( b , & v , | x : &$T , y : &$T | x . unchecked_average_ceil ( y )); }# [ bench ] fn modulo ( b : & mut Bencher ){ let v = small (); bench_ceil ( b , & v , | x : &$T , y : &$T | x . modulo_average_ceil ( y )); }} mod overflowing { use super ::*; # [ bench ] fn optimized ( b : & mut Bencher ){ let v = overflowing (); bench_ceil ( b , & v , | x : &$T , y : &$T | x . average_ceil ( y )); }# [ bench ] fn naive ( b : & mut Bencher ){ let v = overflowing (); bench_ceil ( b , & v , | x : &$T , y : &$T | x . naive_average_ceil ( y )); }# [ bench ] fn unchecked ( b : & mut Bencher ){ let v = overflowing (); bench_unchecked ( b , & v , | x : &$T , y : &$T | x . unchecked_average_ceil ( y )); }# [ bench ] fn modulo ( b : & mut Bencher ){ let v = overflowing (); bench_ceil ( b , & v , | x : &$T , y : &$T | x . modulo_average_ceil ( y )); }} mod rand { use super ::*; # [ bench ] fn optimized ( b : & mut Bencher ){ let v = rand (); bench_ceil ( b , & v , | x : &$T , y : &$T | x . average_ceil ( y )); }# [ bench ] fn naive ( b : & mut Bencher ){ let v = rand (); bench_ceil ( b , & v , | x : &$T , y : &$T | x . naive_average_ceil ( y )); }# [ bench ] fn unchecked ( b : & mut Bencher ){ let v = rand (); bench_unchecked ( b , & v , | x : &$T , y : &$T | x . unchecked_average_ceil ( y )); }# [ bench ] fn modulo ( b : & mut Bencher ){ let v = rand (); bench_ceil ( b , & v , | x : &$T , y : &$T | x . modulo_average_ceil ( y )); }}} mod floor { use super ::*; mod small { use super ::*; # [ bench ] fn optimized ( b : & mut Bencher ){ let v = small (); bench_floor ( b , & v , | x : &$T , y : &$T | x . average_floor ( y )); }# [ bench ] fn naive ( b : & mut Bencher ){ let v = small (); bench_floor ( b , & v , | x : &$T , y : &$T | x . naive_average_floor ( y )); }# [ bench ] fn unchecked ( b : & mut Bencher ){ let v = small (); bench_unchecked ( b , & v , | x : &$T , y : &$T | x . unchecked_average_floor ( y )); }# [ bench ] fn modulo ( b : & mut Bencher ){ let v = small (); bench_floor ( b , & v , | x : &$T , y : &$T | x . modulo_average_floor ( y )); }} mod overflowing { use super ::*; # [ bench ] fn optimized ( b : & mut Bencher ){ let v = overflowing (); bench_floor ( b , & v , | x : &$T , y : &$T | x . average_floor ( y )); }# [ bench ] fn naive ( b : & mut Bencher ){ let v = overflowing (); bench_floor ( b , & v , | x : &$T , y : &$T | x . naive_average_floor ( y )); }# [ bench ] fn unchecked ( b : & mut Bencher ){ let v = overflowing (); bench_unchecked ( b , & v , | x : &$T , y : &$T | x . unchecked_average_floor ( y )); }# [ bench ] fn modulo ( b : & mut Bencher ){ let v = overflowing (); bench_floor ( b , & v , | x : &$T , y : &$T | x . modulo_average_floor ( y )); }} mod rand { use super ::*; # [ bench ] fn optimized ( b : & mut Bencher ){ let v = rand (); bench_floor ( b , & v , | x : &$T , y : &$T | x . average_floor ( y )); }# [ bench ] fn naive ( b : & mut Bencher ){ let v = rand (); bench_floor ( b , & v , | x : &$T , y : &$T | x . naive_average_floor ( y )); }# [ bench ] fn unchecked ( b : & mut Bencher ){ let v = rand (); bench_unchecked ( b , & v , | x : &$T , y : &$T | x . unchecked_average_floor ( y )); }# [ bench ] fn modulo ( b : & mut Bencher ){ let v = rand (); bench_floor ( b , & v , | x : &$T , y : &$T | x . modulo_average_floor ( y )); }}}})*}}
+macro_rules! __ra_macro_fixture390 {($T : ident )=>{ impl super :: NaiveAverage for $T { fn naive_average_floor (& self , other : &$T )-> $T { match self . checked_add (* other ){ Some ( z )=> z . div_floor (& 2 ), None =>{ if self > other { let diff = self - other ; other + diff . div_floor (& 2 )} else { let diff = other - self ; self + diff . div_floor (& 2 )}}}} fn naive_average_ceil (& self , other : &$T )-> $T { match self . checked_add (* other ){ Some ( z )=> z . div_ceil (& 2 ), None =>{ if self > other { let diff = self - other ; self - diff . div_floor (& 2 )} else { let diff = other - self ; other - diff . div_floor (& 2 )}}}}}}; }
+macro_rules! __ra_macro_fixture391 {($T : ident )=>{ impl super :: UncheckedAverage for $T { fn unchecked_average_floor (& self , other : &$T )-> $T { self . wrapping_add (* other )/ 2 } fn unchecked_average_ceil (& self , other : &$T )-> $T {( self . wrapping_add (* other )/ 2 ). wrapping_add ( 1 )}}}; }
+macro_rules! __ra_macro_fixture392 {($T : ident )=>{ impl super :: ModuloAverage for $T { fn modulo_average_ceil (& self , other : &$T )-> $T { let ( q1 , r1 )= self . div_mod_floor (& 2 ); let ( q2 , r2 )= other . div_mod_floor (& 2 ); q1 + q2 + ( r1 | r2 )} fn modulo_average_floor (& self , other : &$T )-> $T { let ( q1 , r1 )= self . div_mod_floor (& 2 ); let ( q2 , r2 )= other . div_mod_floor (& 2 ); q1 + q2 + ( r1 * r2 )}}}; }
+macro_rules! __ra_macro_fixture393 {($N : expr , $FUN : ident , $BENCH_NAME : ident , )=>( mod $BENCH_NAME { use super ::*; pub fn sum ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. $N ). collect (); c . bench_function (& ( stringify ! ($BENCH_NAME ). replace ( '_' , " " )+ " sum" ), move | b | { b . iter (|| { cloned (& v ).$FUN (| x , y | x + y )})}); } pub fn complex_iter ( c : & mut Criterion ){ let u = ( 3 ..). take ($N / 2 ); let v = ( 5 ..). take ($N / 2 ); let it = u . chain ( v ); c . bench_function (& ( stringify ! ($BENCH_NAME ). replace ( '_' , " " )+ " complex iter" ), move | b | { b . iter (|| { it . clone (). map (| x | x as f32 ).$FUN ( f32 :: atan2 )})}); } pub fn string_format ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. ($N / 4 )). collect (); c . bench_function (& ( stringify ! ($BENCH_NAME ). replace ( '_' , " " )+ " string format" ), move | b | { b . iter (|| { cloned (& v ). map (| x | x . to_string ()).$FUN (| x , y | format ! ( "{} + {}" , x , y ))})}); }} criterion_group ! ($BENCH_NAME , $BENCH_NAME :: sum , $BENCH_NAME :: complex_iter , $BENCH_NAME :: string_format , ); )}
+macro_rules! __ra_macro_fixture394 {($ast : ident , $kind : ident )=>{# [ derive ( PartialEq , Eq , Hash )]# [ repr ( transparent )] struct $ast ( SyntaxNode ); impl $ast {# [ allow ( unused )] fn cast ( node : SyntaxNode )-> Option < Self > { if node . kind ()== $kind { Some ( Self ( node ))} else { None }}}}; }
+macro_rules! __ra_macro_fixture395 {($I : ident , $U : ident )=>{ mod $I { mod ceil { use num_integer :: Average ; # [ test ] fn same_sign (){ assert_eq ! (( 14 as $I ). average_ceil (& 16 ), 15 as $I ); assert_eq ! (( 14 as $I ). average_ceil (& 17 ), 16 as $I ); let max = $crate :: std ::$I :: MAX ; assert_eq ! (( max - 3 ). average_ceil (& ( max - 1 )), max - 2 ); assert_eq ! (( max - 3 ). average_ceil (& ( max - 2 )), max - 2 ); }# [ test ] fn different_sign (){ assert_eq ! (( 14 as $I ). average_ceil (&- 4 ), 5 as $I ); assert_eq ! (( 14 as $I ). average_ceil (&- 5 ), 5 as $I ); let min = $crate :: std ::$I :: MIN ; let max = $crate :: std ::$I :: MAX ; assert_eq ! ( min . average_ceil (& max ), 0 as $I ); }} mod floor { use num_integer :: Average ; # [ test ] fn same_sign (){ assert_eq ! (( 14 as $I ). average_floor (& 16 ), 15 as $I ); assert_eq ! (( 14 as $I ). average_floor (& 17 ), 15 as $I ); let max = $crate :: std ::$I :: MAX ; assert_eq ! (( max - 3 ). average_floor (& ( max - 1 )), max - 2 ); assert_eq ! (( max - 3 ). average_floor (& ( max - 2 )), max - 3 ); }# [ test ] fn different_sign (){ assert_eq ! (( 14 as $I ). average_floor (&- 4 ), 5 as $I ); assert_eq ! (( 14 as $I ). average_floor (&- 5 ), 4 as $I ); let min = $crate :: std ::$I :: MIN ; let max = $crate :: std ::$I :: MAX ; assert_eq ! ( min . average_floor (& max ), - 1 as $I ); }}} mod $U { mod ceil { use num_integer :: Average ; # [ test ] fn bounded (){ assert_eq ! (( 14 as $U ). average_ceil (& 16 ), 15 as $U ); assert_eq ! (( 14 as $U ). average_ceil (& 17 ), 16 as $U ); }# [ test ] fn overflow (){ let max = $crate :: std ::$U :: MAX ; assert_eq ! (( max - 3 ). average_ceil (& ( max - 1 )), max - 2 ); assert_eq ! (( max - 3 ). average_ceil (& ( max - 2 )), max - 2 ); }} mod floor { use num_integer :: Average ; # [ test ] fn bounded (){ assert_eq ! (( 14 as $U ). average_floor (& 16 ), 15 as $U ); assert_eq ! (( 14 as $U ). average_floor (& 17 ), 15 as $U ); }# [ test ] fn overflow (){ let max = $crate :: std ::$U :: MAX ; assert_eq ! (( max - 3 ). average_floor (& ( max - 1 )), max - 2 ); assert_eq ! (( max - 3 ). average_floor (& ( max - 2 )), max - 3 ); }}}}; }
+macro_rules! __ra_macro_fixture396 {($N : expr ; $BENCH_GROUP : ident , $TUPLE_FUN : ident , $TUPLES : ident , $TUPLE_WINDOWS : ident ; $SLICE_FUN : ident , $CHUNKS : ident , $WINDOWS : ident ; $FOR_CHUNKS : ident , $FOR_WINDOWS : ident )=>( fn $FOR_CHUNKS ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. $N * 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($FOR_CHUNKS ). replace ( '_' , " " ), move | b | { b . iter (|| { let mut j = 0 ; for _ in 0 .. 1_000 { s += $SLICE_FUN (& v [ j .. ( j + $N )]); j += $N ; } s })}); } fn $FOR_WINDOWS ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($FOR_WINDOWS ). replace ( '_' , " " ), move | b | { b . iter (|| { for i in 0 .. ( 1_000 - $N ){ s += $SLICE_FUN (& v [ i .. ( i + $N )]); } s })}); } fn $TUPLES ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. $N * 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($TUPLES ). replace ( '_' , " " ), move | b | { b . iter (|| { for x in v . iter (). tuples (){ s += $TUPLE_FUN (& x ); } s })}); } fn $CHUNKS ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. $N * 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($CHUNKS ). replace ( '_' , " " ), move | b | { b . iter (|| { for x in v . chunks ($N ){ s += $SLICE_FUN ( x ); } s })}); } fn $TUPLE_WINDOWS ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($TUPLE_WINDOWS ). replace ( '_' , " " ), move | b | { b . iter (|| { for x in v . iter (). tuple_windows (){ s += $TUPLE_FUN (& x ); } s })}); } fn $WINDOWS ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($WINDOWS ). replace ( '_' , " " ), move | b | { b . iter (|| { for x in v . windows ($N ){ s += $SLICE_FUN ( x ); } s })}); } criterion_group ! ($BENCH_GROUP , $FOR_CHUNKS , $FOR_WINDOWS , $TUPLES , $CHUNKS , $TUPLE_WINDOWS , $WINDOWS , ); )}
+macro_rules! __ra_macro_fixture397 {($N : expr , $FUN : ident , $BENCH_NAME : ident , )=>( mod $BENCH_NAME { use super ::*; pub fn sum ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. $N ). collect (); c . bench_function (& ( stringify ! ($BENCH_NAME ). replace ( '_' , " " )+ " sum" ), move | b | { b . iter (|| { cloned (& v ).$FUN (| x , y | x + y )})}); } pub fn complex_iter ( c : & mut Criterion ){ let u = ( 3 ..). take ($N / 2 ); let v = ( 5 ..). take ($N / 2 ); let it = u . chain ( v ); c . bench_function (& ( stringify ! ($BENCH_NAME ). replace ( '_' , " " )+ " complex iter" ), move | b | { b . iter (|| { it . clone (). map (| x | x as f32 ).$FUN ( f32 :: atan2 )})}); } pub fn string_format ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. ($N / 4 )). collect (); c . bench_function (& ( stringify ! ($BENCH_NAME ). replace ( '_' , " " )+ " string format" ), move | b | { b . iter (|| { cloned (& v ). map (| x | x . to_string ()).$FUN (| x , y | format ! ( "{} + {}" , x , y ))})}); }} criterion_group ! ($BENCH_NAME , $BENCH_NAME :: sum , $BENCH_NAME :: complex_iter , $BENCH_NAME :: string_format , ); )}
+macro_rules! __ra_macro_fixture398 {($N : expr ; $BENCH_GROUP : ident , $TUPLE_FUN : ident , $TUPLES : ident , $TUPLE_WINDOWS : ident ; $SLICE_FUN : ident , $CHUNKS : ident , $WINDOWS : ident ; $FOR_CHUNKS : ident , $FOR_WINDOWS : ident )=>( fn $FOR_CHUNKS ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. $N * 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($FOR_CHUNKS ). replace ( '_' , " " ), move | b | { b . iter (|| { let mut j = 0 ; for _ in 0 .. 1_000 { s += $SLICE_FUN (& v [ j .. ( j + $N )]); j += $N ; } s })}); } fn $FOR_WINDOWS ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($FOR_WINDOWS ). replace ( '_' , " " ), move | b | { b . iter (|| { for i in 0 .. ( 1_000 - $N ){ s += $SLICE_FUN (& v [ i .. ( i + $N )]); } s })}); } fn $TUPLES ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. $N * 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($TUPLES ). replace ( '_' , " " ), move | b | { b . iter (|| { for x in v . iter (). tuples (){ s += $TUPLE_FUN (& x ); } s })}); } fn $CHUNKS ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. $N * 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($CHUNKS ). replace ( '_' , " " ), move | b | { b . iter (|| { for x in v . chunks ($N ){ s += $SLICE_FUN ( x ); } s })}); } fn $TUPLE_WINDOWS ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($TUPLE_WINDOWS ). replace ( '_' , " " ), move | b | { b . iter (|| { for x in v . iter (). tuple_windows (){ s += $TUPLE_FUN (& x ); } s })}); } fn $WINDOWS ( c : & mut Criterion ){ let v : Vec < u32 > = ( 0 .. 1_000 ). collect (); let mut s = 0 ; c . bench_function (& stringify ! ($WINDOWS ). replace ( '_' , " " ), move | b | { b . iter (|| { for x in v . windows ($N ){ s += $SLICE_FUN ( x ); } s })}); } criterion_group ! ($BENCH_GROUP , $FOR_CHUNKS , $FOR_WINDOWS , $TUPLES , $CHUNKS , $TUPLE_WINDOWS , $WINDOWS , ); )}
+macro_rules! __ra_macro_fixture399 {($name : ident : $e : expr )=>{# [ cfg_attr ( target_arch = "wasm32" , wasm_bindgen_test :: wasm_bindgen_test )]# [ test ] fn $name (){ let ( subscriber , handle )= subscriber :: mock (). event ( event :: mock (). with_fields ( field :: mock ( "answer" ). with_value (& 42 ). and ( field :: mock ( "to_question" ). with_value (& "life, the universe, and everything" ), ). only (), ), ). done (). run_with_handle (); with_default ( subscriber , || { info ! ( answer = $e , to_question = "life, the universe, and everything" ); }); handle . assert_finished (); }}; }
+macro_rules! __ra_macro_fixture400 {($T : ty )=>{ impl GcdOld for $T {# [ doc = " Calculates the Greatest Common Divisor (GCD) of the number and" ]# [ doc = " `other`. The result is always positive." ]# [ inline ] fn gcd_old (& self , other : & Self )-> Self { let mut m = * self ; let mut n = * other ; if m == 0 || n == 0 { return ( m | n ). abs (); } let shift = ( m | n ). trailing_zeros (); if m == Self :: min_value ()|| n == Self :: min_value (){ return ( 1 << shift ). abs (); } m = m . abs (); n = n . abs (); n >>= n . trailing_zeros (); while m != 0 { m >>= m . trailing_zeros (); if n > m { std :: mem :: swap (& mut n , & mut m )} m -= n ; } n << shift }}}; }
+macro_rules! __ra_macro_fixture401 {($T : ty )=>{ impl GcdOld for $T {# [ doc = " Calculates the Greatest Common Divisor (GCD) of the number and" ]# [ doc = " `other`. The result is always positive." ]# [ inline ] fn gcd_old (& self , other : & Self )-> Self { let mut m = * self ; let mut n = * other ; if m == 0 || n == 0 { return m | n ; } let shift = ( m | n ). trailing_zeros (); n >>= n . trailing_zeros (); while m != 0 { m >>= m . trailing_zeros (); if n > m { std :: mem :: swap (& mut n , & mut m )} m -= n ; } n << shift }}}; }
+macro_rules! __ra_macro_fixture402 {($T : ident )=>{ mod $T { use crate :: { run_bench , GcdOld }; use num_integer :: Integer ; use test :: Bencher ; # [ bench ] fn bench_gcd ( b : & mut Bencher ){ run_bench ( b , $T :: gcd ); }# [ bench ] fn bench_gcd_old ( b : & mut Bencher ){ run_bench ( b , $T :: gcd_old ); }}}; }
+macro_rules! __ra_macro_fixture403 {($f : ident , $($t : ty ),+)=>{$(paste :: item ! { qc :: quickcheck ! { fn [< $f _ $t >]( i : RandIter <$t >, k : u16 )-> (){$f ( i , k )}}})+ }; }
+macro_rules! __ra_macro_fixture404 {($name : ident )=>{# [ derive ( Debug )] struct $name { message : & 'static str , drop : DetectDrop , } impl Display for $name { fn fmt (& self , f : & mut fmt :: Formatter )-> fmt :: Result { f . write_str ( self . message )}}}; }
+macro_rules! __ra_macro_fixture405 {($($(# [$attr : meta ])* $name : ident ($value : expr )),* )=>{ mod bench_itoa_write { use test :: { Bencher , black_box }; $($(# [$attr ])* # [ bench ] fn $name ( b : & mut Bencher ){ use itoa ; let mut buf = Vec :: with_capacity ( 40 ); b . iter (|| { buf . clear (); itoa :: write (& mut buf , black_box ($value )). unwrap ()}); })* } mod bench_itoa_fmt { use test :: { Bencher , black_box }; $($(# [$attr ])* # [ bench ] fn $name ( b : & mut Bencher ){ use itoa ; let mut buf = String :: with_capacity ( 40 ); b . iter (|| { buf . clear (); itoa :: fmt (& mut buf , black_box ($value )). unwrap ()}); })* } mod bench_std_fmt { use test :: { Bencher , black_box }; $($(# [$attr ])* # [ bench ] fn $name ( b : & mut Bencher ){ use std :: io :: Write ; let mut buf = Vec :: with_capacity ( 40 ); b . iter (|| { buf . clear (); write ! (& mut buf , "{}" , black_box ($value )). unwrap ()}); })* }}}
+macro_rules! __ra_macro_fixture406 {($typ : ty {$($b_name : ident =>$g_name : ident ($($args : expr ),*),)* })=>{$(# [ bench ] fn $b_name ( b : & mut Bencher ){$g_name ::<$typ > ($($args ,)* b )})* }}
+macro_rules! __ra_macro_fixture407 {($($T : ident ),*)=>{$(mod $T { use test :: Bencher ; use num_integer :: Roots ; # [ bench ] fn sqrt_rand ( b : & mut Bencher ){:: bench_rand_pos ( b , $T :: sqrt , 2 ); }# [ bench ] fn sqrt_small ( b : & mut Bencher ){:: bench_small_pos ( b , $T :: sqrt , 2 ); }# [ bench ] fn cbrt_rand ( b : & mut Bencher ){:: bench_rand ( b , $T :: cbrt , 3 ); }# [ bench ] fn cbrt_small ( b : & mut Bencher ){:: bench_small ( b , $T :: cbrt , 3 ); }# [ bench ] fn fourth_root_rand ( b : & mut Bencher ){:: bench_rand_pos ( b , | x : &$T | x . nth_root ( 4 ), 4 ); }# [ bench ] fn fourth_root_small ( b : & mut Bencher ){:: bench_small_pos ( b , | x : &$T | x . nth_root ( 4 ), 4 ); }# [ bench ] fn fifth_root_rand ( b : & mut Bencher ){:: bench_rand ( b , | x : &$T | x . nth_root ( 5 ), 5 ); }# [ bench ] fn fifth_root_small ( b : & mut Bencher ){:: bench_small ( b , | x : &$T | x . nth_root ( 5 ), 5 ); }})*}}
+macro_rules! __ra_macro_fixture408 {($name : ident , $level : expr )=>{# [ doc = " Creates a new `Diagnostic` with the given `message` at the span" ]# [ doc = " `self`." ] pub fn $name < T : Into < String >> ( self , message : T )-> Diagnostic { Diagnostic :: spanned ( self , $level , message )}}; }
+macro_rules! __ra_macro_fixture409 {($($name : ident =>$kind : ident ,)*)=>($(# [ doc = " Creates a new suffixed integer literal with the specified value." ]# [ doc = "" ]# [ doc = " This function will create an integer like `1u32` where the integer" ]# [ doc = " value specified is the first part of the token and the integral is" ]# [ doc = " also suffixed at the end." ]# [ doc = " Literals created from negative numbers may not survive round-trips through" ]# [ doc = " `TokenStream` or strings and may be broken into two tokens (`-` and positive literal)." ]# [ doc = "" ]# [ doc = " Literals created through this method have the `Span::call_site()`" ]# [ doc = " span by default, which can be configured with the `set_span` method" ]# [ doc = " below." ] pub fn $name ( n : $kind )-> Literal { Literal ( bridge :: client :: Literal :: typed_integer (& n . to_string (), stringify ! ($kind )))})*)}
+macro_rules! __ra_macro_fixture410 {($($name : ident =>$kind : ident ,)*)=>($(# [ doc = " Creates a new unsuffixed integer literal with the specified value." ]# [ doc = "" ]# [ doc = " This function will create an integer like `1` where the integer" ]# [ doc = " value specified is the first part of the token. No suffix is" ]# [ doc = " specified on this token, meaning that invocations like" ]# [ doc = " `Literal::i8_unsuffixed(1)` are equivalent to" ]# [ doc = " `Literal::u32_unsuffixed(1)`." ]# [ doc = " Literals created from negative numbers may not survive rountrips through" ]# [ doc = " `TokenStream` or strings and may be broken into two tokens (`-` and positive literal)." ]# [ doc = "" ]# [ doc = " Literals created through this method have the `Span::call_site()`" ]# [ doc = " span by default, which can be configured with the `set_span` method" ]# [ doc = " below." ] pub fn $name ( n : $kind )-> Literal { Literal ( bridge :: client :: Literal :: integer (& n . to_string ()))})*)}
+macro_rules! __ra_macro_fixture411 {($spanned : ident , $regular : ident , $level : expr )=>{# [ doc = " Adds a new child diagnostic message to `self` with the level" ]# [ doc = " identified by this method\\\'s name with the given `spans` and" ]# [ doc = " `message`." ] pub fn $spanned < S , T > ( mut self , spans : S , message : T )-> Diagnostic where S : MultiSpan , T : Into < String >, { self . children . push ( Diagnostic :: spanned ( spans , $level , message )); self }# [ doc = " Adds a new child diagnostic message to `self` with the level" ]# [ doc = " identified by this method\\\'s name with the given `message`." ] pub fn $regular < T : Into < String >> ( mut self , message : T )-> Diagnostic { self . children . push ( Diagnostic :: new ($level , message )); self }}; }
+macro_rules! __ra_macro_fixture412 {($($arg : tt )*)=>{{ let res = $crate :: fmt :: format ($crate :: __export :: format_args ! ($($arg )*)); res }}}
+macro_rules! __ra_macro_fixture413 {($dst : expr , $($arg : tt )*)=>($dst . write_fmt ($crate :: format_args ! ($($arg )*)))}
+macro_rules! __ra_macro_fixture414 {($dst : expr $(,)?)=>($crate :: write ! ($dst , "\n" )); ($dst : expr , $($arg : tt )*)=>($dst . write_fmt ($crate :: format_args_nl ! ($($arg )*))); }
+macro_rules! __ra_macro_fixture415 {($($name : ident =>$kind : ident ,)*)=>($(# [ doc = " Creates a new suffixed integer literal with the specified value." ]# [ doc = "" ]# [ doc = " This function will create an integer like `1u32` where the integer" ]# [ doc = " value specified is the first part of the token and the integral is" ]# [ doc = " also suffixed at the end. Literals created from negative numbers may" ]# [ doc = " not survive rountrips through `TokenStream` or strings and may be" ]# [ doc = " broken into two tokens (`-` and positive literal)." ]# [ doc = "" ]# [ doc = " Literals created through this method have the `Span::call_site()`" ]# [ doc = " span by default, which can be configured with the `set_span` method" ]# [ doc = " below." ] pub fn $name ( n : $kind )-> Literal { Literal :: _new ( imp :: Literal ::$name ( n ))})*)}
+macro_rules! __ra_macro_fixture416 {($($name : ident =>$kind : ident ,)*)=>($(# [ doc = " Creates a new unsuffixed integer literal with the specified value." ]# [ doc = "" ]# [ doc = " This function will create an integer like `1` where the integer" ]# [ doc = " value specified is the first part of the token. No suffix is" ]# [ doc = " specified on this token, meaning that invocations like" ]# [ doc = " `Literal::i8_unsuffixed(1)` are equivalent to" ]# [ doc = " `Literal::u32_unsuffixed(1)`. Literals created from negative numbers" ]# [ doc = " may not survive rountrips through `TokenStream` or strings and may" ]# [ doc = " be broken into two tokens (`-` and positive literal)." ]# [ doc = "" ]# [ doc = " Literals created through this method have the `Span::call_site()`" ]# [ doc = " span by default, which can be configured with the `set_span` method" ]# [ doc = " below." ] pub fn $name ( n : $kind )-> Literal { Literal :: _new ( imp :: Literal ::$name ( n ))})*)}
+macro_rules! __ra_macro_fixture417 {($($name : ident =>$kind : ident ,)*)=>($(pub fn $name ( n : $kind )-> Literal { Literal :: _new ( format ! ( concat ! ( "{}" , stringify ! ($kind )), n ))})*)}
+macro_rules! __ra_macro_fixture418 {($($name : ident =>$kind : ident ,)*)=>($(pub fn $name ( n : $kind )-> Literal { Literal :: _new ( n . to_string ())})*)}
+macro_rules! __ra_macro_fixture419 {(<$visitor : ident : Visitor <$lifetime : tt >> $($func : ident )*)=>{$(forward_to_deserialize_any_helper ! {$func <$lifetime , $visitor >})* }; ($($func : ident )*)=>{$(forward_to_deserialize_any_helper ! {$func < 'de , V >})* }; }
+macro_rules! __ra_macro_fixture420 {( bool <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_bool <$l , $v > ()}}; ( i8 <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_i8 <$l , $v > ()}}; ( i16 <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_i16 <$l , $v > ()}}; ( i32 <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_i32 <$l , $v > ()}}; ( i64 <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_i64 <$l , $v > ()}}; ( i128 <$l : tt , $v : ident >)=>{ serde_if_integer128 ! { forward_to_deserialize_any_method ! { deserialize_i128 <$l , $v > ()}}}; ( u8 <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_u8 <$l , $v > ()}}; ( u16 <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_u16 <$l , $v > ()}}; ( u32 <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_u32 <$l , $v > ()}}; ( u64 <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_u64 <$l , $v > ()}}; ( u128 <$l : tt , $v : ident >)=>{ serde_if_integer128 ! { forward_to_deserialize_any_method ! { deserialize_u128 <$l , $v > ()}}}; ( f32 <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_f32 <$l , $v > ()}}; ( f64 <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_f64 <$l , $v > ()}}; ( char <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_char <$l , $v > ()}}; ( str <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_str <$l , $v > ()}}; ( string <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_string <$l , $v > ()}}; ( bytes <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_bytes <$l , $v > ()}}; ( byte_buf <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_byte_buf <$l , $v > ()}}; ( option <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_option <$l , $v > ()}}; ( unit <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_unit <$l , $v > ()}}; ( unit_struct <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_unit_struct <$l , $v > ( name : & 'static str )}}; ( newtype_struct <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_newtype_struct <$l , $v > ( name : & 'static str )}}; ( seq <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_seq <$l , $v > ()}}; ( tuple <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_tuple <$l , $v > ( len : usize )}}; ( tuple_struct <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_tuple_struct <$l , $v > ( name : & 'static str , len : usize )}}; ( map <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_map <$l , $v > ()}}; ( struct <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_struct <$l , $v > ( name : & 'static str , fields : & 'static [& 'static str ])}}; ( enum <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_enum <$l , $v > ( name : & 'static str , variants : & 'static [& 'static str ])}}; ( identifier <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_identifier <$l , $v > ()}}; ( ignored_any <$l : tt , $v : ident >)=>{ forward_to_deserialize_any_method ! { deserialize_ignored_any <$l , $v > ()}}; }
+macro_rules! __ra_macro_fixture421 {($func : ident <$l : tt , $v : ident > ($($arg : ident : $ty : ty ),*))=>{# [ inline ] fn $func <$v > ( self , $($arg : $ty ,)* visitor : $v )-> $crate :: __private :: Result <$v :: Value , Self :: Error > where $v : $crate :: de :: Visitor <$l >, {$(let _ = $arg ; )* self . deserialize_any ( visitor )}}; }
+macro_rules! __ra_macro_fixture422 {($($f : ident : $t : ty ,)*)=>{$(fn $f ( self , v : $t )-> fmt :: Result { Display :: fmt (& v , self )})* }; }
+macro_rules! __ra_macro_fixture423 {($name : ident , $level : expr )=>{# [ doc = " Creates a new `Diagnostic` with the given `message` at the span" ]# [ doc = " `self`." ]# [ unstable ( feature = "proc_macro_diagnostic" , issue = "54140" )] pub fn $name < T : Into < String >> ( self , message : T )-> Diagnostic { Diagnostic :: spanned ( self , $level , message )}}; }
+macro_rules! __ra_macro_fixture424 {($($name : ident =>$kind : ident ,)*)=>($(# [ doc = " Creates a new suffixed integer literal with the specified value." ]# [ doc = "" ]# [ doc = " This function will create an integer like `1u32` where the integer" ]# [ doc = " value specified is the first part of the token and the integral is" ]# [ doc = " also suffixed at the end." ]# [ doc = " Literals created from negative numbers may not survive round-trips through" ]# [ doc = " `TokenStream` or strings and may be broken into two tokens (`-` and positive literal)." ]# [ doc = "" ]# [ doc = " Literals created through this method have the `Span::call_site()`" ]# [ doc = " span by default, which can be configured with the `set_span` method" ]# [ doc = " below." ]# [ stable ( feature = "proc_macro_lib2" , since = "1.29.0" )] pub fn $name ( n : $kind )-> Literal { Literal ( bridge :: client :: Literal :: typed_integer (& n . to_string (), stringify ! ($kind )))})*)}
+macro_rules! __ra_macro_fixture425 {($($name : ident =>$kind : ident ,)*)=>($(# [ doc = " Creates a new unsuffixed integer literal with the specified value." ]# [ doc = "" ]# [ doc = " This function will create an integer like `1` where the integer" ]# [ doc = " value specified is the first part of the token. No suffix is" ]# [ doc = " specified on this token, meaning that invocations like" ]# [ doc = " `Literal::i8_unsuffixed(1)` are equivalent to" ]# [ doc = " `Literal::u32_unsuffixed(1)`." ]# [ doc = " Literals created from negative numbers may not survive rountrips through" ]# [ doc = " `TokenStream` or strings and may be broken into two tokens (`-` and positive literal)." ]# [ doc = "" ]# [ doc = " Literals created through this method have the `Span::call_site()`" ]# [ doc = " span by default, which can be configured with the `set_span` method" ]# [ doc = " below." ]# [ stable ( feature = "proc_macro_lib2" , since = "1.29.0" )] pub fn $name ( n : $kind )-> Literal { Literal ( bridge :: client :: Literal :: integer (& n . to_string ()))})*)}
+macro_rules! __ra_macro_fixture426 {( type FreeFunctions )=>( type FreeFunctions : 'static ;); ( type TokenStream )=>( type TokenStream : 'static + Clone ;); ( type TokenStreamBuilder )=>( type TokenStreamBuilder : 'static ;); ( type TokenStreamIter )=>( type TokenStreamIter : 'static + Clone ;); ( type Group )=>( type Group : 'static + Clone ;); ( type Punct )=>( type Punct : 'static + Copy + Eq + Hash ;); ( type Ident )=>( type Ident : 'static + Copy + Eq + Hash ;); ( type Literal )=>( type Literal : 'static + Clone ;); ( type SourceFile )=>( type SourceFile : 'static + Clone ;); ( type MultiSpan )=>( type MultiSpan : 'static ;); ( type Diagnostic )=>( type Diagnostic : 'static ;); ( type Span )=>( type Span : 'static + Copy + Eq + Hash ;); ( fn drop (& mut self , $arg : ident : $arg_ty : ty ))=>( fn drop (& mut self , $arg : $arg_ty ){ mem :: drop ($arg )}); ( fn clone (& mut self , $arg : ident : $arg_ty : ty )-> $ret_ty : ty )=>( fn clone (& mut self , $arg : $arg_ty )-> $ret_ty {$arg . clone ()}); ($($item : tt )*)=>($($item )*;)}
+macro_rules! __ra_macro_fixture427 {($spanned : ident , $regular : ident , $level : expr )=>{# [ doc = " Adds a new child diagnostic message to `self` with the level" ]# [ doc = " identified by this method\\\'s name with the given `spans` and" ]# [ doc = " `message`." ]# [ unstable ( feature = "proc_macro_diagnostic" , issue = "54140" )] pub fn $spanned < S , T > ( mut self , spans : S , message : T )-> Diagnostic where S : MultiSpan , T : Into < String >, { self . children . push ( Diagnostic :: spanned ( spans , $level , message )); self }# [ doc = " Adds a new child diagnostic message to `self` with the level" ]# [ doc = " identified by this method\\\'s name with the given `message`." ]# [ unstable ( feature = "proc_macro_diagnostic" , issue = "54140" )] pub fn $regular < T : Into < String >> ( mut self , message : T )-> Diagnostic { self . children . push ( Diagnostic :: new ($level , message )); self }}; }
+macro_rules! __ra_macro_fixture428 {($SelfT : ty , $ActualT : ident , $UnsignedT : ty , $BITS : expr , $Min : expr , $Max : expr , $Feature : expr , $EndFeature : expr , $rot : expr , $rot_op : expr , $rot_result : expr , $swap_op : expr , $swapped : expr , $reversed : expr , $le_bytes : expr , $be_bytes : expr , $to_xe_bytes_doc : expr , $from_xe_bytes_doc : expr )=>{ doc_comment ! { concat ! ( "The smallest value that can be represented by this integer type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(" , stringify ! ($SelfT ), "::MIN, " , stringify ! ($Min ), ");" , $EndFeature , "\n```" ), # [ stable ( feature = "assoc_int_consts" , since = "1.43.0" )] pub const MIN : Self = ! 0 ^ ((! 0 as $UnsignedT )>> 1 ) as Self ; } doc_comment ! { concat ! ( "The largest value that can be represented by this integer type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(" , stringify ! ($SelfT ), "::MAX, " , stringify ! ($Max ), ");" , $EndFeature , "\n```" ), # [ stable ( feature = "assoc_int_consts" , since = "1.43.0" )] pub const MAX : Self = ! Self :: MIN ; } doc_comment ! { concat ! ( "The size of this integer type in bits.\n\n# Examples\n\n```\n" , $Feature , "#![feature(int_bits_const)]\nassert_eq!(" , stringify ! ($SelfT ), "::BITS, " , stringify ! ($BITS ), ");" , $EndFeature , "\n```" ), # [ unstable ( feature = "int_bits_const" , issue = "76904" )] pub const BITS : u32 = $BITS ; } doc_comment ! { concat ! ( "Converts a string slice in a given base to an integer.\n\nThe string is expected to be an optional `+` or `-` sign followed by digits.\nLeading and trailing whitespace represent an error. Digits are a subset of these characters,\ndepending on `radix`:\n\n * `0-9`\n * `a-z`\n * `A-Z`\n\n# Panics\n\nThis function panics if `radix` is not in the range from 2 to 36.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(" , stringify ! ($SelfT ), "::from_str_radix(\"A\", 16), Ok(10));" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )] pub fn from_str_radix ( src : & str , radix : u32 )-> Result < Self , ParseIntError > { from_str_radix ( src , radix )}} doc_comment ! { concat ! ( "Returns the number of ones in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0b100_0000" , stringify ! ($SelfT ), ";\n\nassert_eq!(n.count_ones(), 1);" , $EndFeature , "\n```\n" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ inline ] pub const fn count_ones ( self )-> u32 {( self as $UnsignedT ). count_ones ()}} doc_comment ! { concat ! ( "Returns the number of zeros in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(" , stringify ! ($SelfT ), "::MAX.count_zeros(), 1);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ inline ] pub const fn count_zeros ( self )-> u32 {(! self ). count_ones ()}} doc_comment ! { concat ! ( "Returns the number of leading zeros in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = -1" , stringify ! ($SelfT ), ";\n\nassert_eq!(n.leading_zeros(), 0);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ inline ] pub const fn leading_zeros ( self )-> u32 {( self as $UnsignedT ). leading_zeros ()}} doc_comment ! { concat ! ( "Returns the number of trailing zeros in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = -4" , stringify ! ($SelfT ), ";\n\nassert_eq!(n.trailing_zeros(), 2);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ inline ] pub const fn trailing_zeros ( self )-> u32 {( self as $UnsignedT ). trailing_zeros ()}} doc_comment ! { concat ! ( "Returns the number of leading ones in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = -1" , stringify ! ($SelfT ), ";\n\nassert_eq!(n.leading_ones(), " , stringify ! ($BITS ), ");" , $EndFeature , "\n```" ), # [ stable ( feature = "leading_trailing_ones" , since = "1.46.0" )]# [ rustc_const_stable ( feature = "leading_trailing_ones" , since = "1.46.0" )]# [ inline ] pub const fn leading_ones ( self )-> u32 {( self as $UnsignedT ). leading_ones ()}} doc_comment ! { concat ! ( "Returns the number of trailing ones in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 3" , stringify ! ($SelfT ), ";\n\nassert_eq!(n.trailing_ones(), 2);" , $EndFeature , "\n```" ), # [ stable ( feature = "leading_trailing_ones" , since = "1.46.0" )]# [ rustc_const_stable ( feature = "leading_trailing_ones" , since = "1.46.0" )]# [ inline ] pub const fn trailing_ones ( self )-> u32 {( self as $UnsignedT ). trailing_ones ()}} doc_comment ! { concat ! ( "Shifts the bits to the left by a specified amount, `n`,\nwrapping the truncated bits to the end of the resulting integer.\n\nPlease note this isn't the same operation as the `<<` shifting operator!\n\n# Examples\n\nBasic usage:\n\n```\nlet n = " , $rot_op , stringify ! ($SelfT ), ";\nlet m = " , $rot_result , ";\n\nassert_eq!(n.rotate_left(" , $rot , "), m);\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn rotate_left ( self , n : u32 )-> Self {( self as $UnsignedT ). rotate_left ( n ) as Self }} doc_comment ! { concat ! ( "Shifts the bits to the right by a specified amount, `n`,\nwrapping the truncated bits to the beginning of the resulting\ninteger.\n\nPlease note this isn't the same operation as the `>>` shifting operator!\n\n# Examples\n\nBasic usage:\n\n```\nlet n = " , $rot_result , stringify ! ($SelfT ), ";\nlet m = " , $rot_op , ";\n\nassert_eq!(n.rotate_right(" , $rot , "), m);\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn rotate_right ( self , n : u32 )-> Self {( self as $UnsignedT ). rotate_right ( n ) as Self }} doc_comment ! { concat ! ( "Reverses the byte order of the integer.\n\n# Examples\n\nBasic usage:\n\n```\nlet n = " , $swap_op , stringify ! ($SelfT ), ";\n\nlet m = n.swap_bytes();\n\nassert_eq!(m, " , $swapped , ");\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ inline ] pub const fn swap_bytes ( self )-> Self {( self as $UnsignedT ). swap_bytes () as Self }} doc_comment ! { concat ! ( "Reverses the order of bits in the integer. The least significant bit becomes the most significant bit,\n second least-significant bit becomes second most-significant bit, etc.\n\n# Examples\n\nBasic usage:\n\n```\nlet n = " , $swap_op , stringify ! ($SelfT ), ";\nlet m = n.reverse_bits();\n\nassert_eq!(m, " , $reversed , ");\nassert_eq!(0, 0" , stringify ! ($SelfT ), ".reverse_bits());\n```" ), # [ stable ( feature = "reverse_bits" , since = "1.37.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ inline ]# [ must_use ] pub const fn reverse_bits ( self )-> Self {( self as $UnsignedT ). reverse_bits () as Self }} doc_comment ! { concat ! ( "Converts an integer from big endian to the target's endianness.\n\nOn big endian this is a no-op. On little endian the bytes are swapped.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0x1A" , stringify ! ($SelfT ), ";\n\nif cfg!(target_endian = \"big\") {\n assert_eq!(" , stringify ! ($SelfT ), "::from_be(n), n)\n} else {\n assert_eq!(" , stringify ! ($SelfT ), "::from_be(n), n.swap_bytes())\n}" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_conversions" , since = "1.32.0" )]# [ inline ] pub const fn from_be ( x : Self )-> Self {# [ cfg ( target_endian = "big" )]{ x }# [ cfg ( not ( target_endian = "big" ))]{ x . swap_bytes ()}}} doc_comment ! { concat ! ( "Converts an integer from little endian to the target's endianness.\n\nOn little endian this is a no-op. On big endian the bytes are swapped.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0x1A" , stringify ! ($SelfT ), ";\n\nif cfg!(target_endian = \"little\") {\n assert_eq!(" , stringify ! ($SelfT ), "::from_le(n), n)\n} else {\n assert_eq!(" , stringify ! ($SelfT ), "::from_le(n), n.swap_bytes())\n}" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_conversions" , since = "1.32.0" )]# [ inline ] pub const fn from_le ( x : Self )-> Self {# [ cfg ( target_endian = "little" )]{ x }# [ cfg ( not ( target_endian = "little" ))]{ x . swap_bytes ()}}} doc_comment ! { concat ! ( "Converts `self` to big endian from the target's endianness.\n\nOn big endian this is a no-op. On little endian the bytes are swapped.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0x1A" , stringify ! ($SelfT ), ";\n\nif cfg!(target_endian = \"big\") {\n assert_eq!(n.to_be(), n)\n} else {\n assert_eq!(n.to_be(), n.swap_bytes())\n}" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_conversions" , since = "1.32.0" )]# [ inline ] pub const fn to_be ( self )-> Self {# [ cfg ( target_endian = "big" )]{ self }# [ cfg ( not ( target_endian = "big" ))]{ self . swap_bytes ()}}} doc_comment ! { concat ! ( "Converts `self` to little endian from the target's endianness.\n\nOn little endian this is a no-op. On big endian the bytes are swapped.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0x1A" , stringify ! ($SelfT ), ";\n\nif cfg!(target_endian = \"little\") {\n assert_eq!(n.to_le(), n)\n} else {\n assert_eq!(n.to_le(), n.swap_bytes())\n}" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_conversions" , since = "1.32.0" )]# [ inline ] pub const fn to_le ( self )-> Self {# [ cfg ( target_endian = "little" )]{ self }# [ cfg ( not ( target_endian = "little" ))]{ self . swap_bytes ()}}} doc_comment ! { concat ! ( "Checked integer addition. Computes `self + rhs`, returning `None`\nif overflow occurred.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!((" , stringify ! ($SelfT ), "::MAX - 2).checked_add(1), Some(" , stringify ! ($SelfT ), "::MAX - 1));\nassert_eq!((" , stringify ! ($SelfT ), "::MAX - 2).checked_add(3), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_add ( self , rhs : Self )-> Option < Self > { let ( a , b )= self . overflowing_add ( rhs ); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Unchecked integer addition. Computes `self + rhs`, assuming overflow\ncannot occur. This results in undefined behavior when `self + rhs > " , stringify ! ($SelfT ), "::MAX` or `self + rhs < " , stringify ! ($SelfT ), "::MIN`." ), # [ unstable ( feature = "unchecked_math" , reason = "niche optimization path" , issue = "none" , )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub unsafe fn unchecked_add ( self , rhs : Self )-> Self { unsafe { intrinsics :: unchecked_add ( self , rhs )}}} doc_comment ! { concat ! ( "Checked integer subtraction. Computes `self - rhs`, returning `None` if\noverflow occurred.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!((" , stringify ! ($SelfT ), "::MIN + 2).checked_sub(1), Some(" , stringify ! ($SelfT ), "::MIN + 1));\nassert_eq!((" , stringify ! ($SelfT ), "::MIN + 2).checked_sub(3), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_sub ( self , rhs : Self )-> Option < Self > { let ( a , b )= self . overflowing_sub ( rhs ); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Unchecked integer subtraction. Computes `self - rhs`, assuming overflow\ncannot occur. This results in undefined behavior when `self - rhs > " , stringify ! ($SelfT ), "::MAX` or `self - rhs < " , stringify ! ($SelfT ), "::MIN`." ), # [ unstable ( feature = "unchecked_math" , reason = "niche optimization path" , issue = "none" , )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub unsafe fn unchecked_sub ( self , rhs : Self )-> Self { unsafe { intrinsics :: unchecked_sub ( self , rhs )}}} doc_comment ! { concat ! ( "Checked integer multiplication. Computes `self * rhs`, returning `None` if\noverflow occurred.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(" , stringify ! ($SelfT ), "::MAX.checked_mul(1), Some(" , stringify ! ($SelfT ), "::MAX));\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.checked_mul(2), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_mul ( self , rhs : Self )-> Option < Self > { let ( a , b )= self . overflowing_mul ( rhs ); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Unchecked integer multiplication. Computes `self * rhs`, assuming overflow\ncannot occur. This results in undefined behavior when `self * rhs > " , stringify ! ($SelfT ), "::MAX` or `self * rhs < " , stringify ! ($SelfT ), "::MIN`." ), # [ unstable ( feature = "unchecked_math" , reason = "niche optimization path" , issue = "none" , )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub unsafe fn unchecked_mul ( self , rhs : Self )-> Self { unsafe { intrinsics :: unchecked_mul ( self , rhs )}}} doc_comment ! { concat ! ( "Checked integer division. Computes `self / rhs`, returning `None` if `rhs == 0`\nor the division results in overflow.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!((" , stringify ! ($SelfT ), "::MIN + 1).checked_div(-1), Some(" , stringify ! ($Max ), "));\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.checked_div(-1), None);\nassert_eq!((1" , stringify ! ($SelfT ), ").checked_div(0), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_unstable ( feature = "const_checked_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_div ( self , rhs : Self )-> Option < Self > { if unlikely ! ( rhs == 0 || ( self == Self :: MIN && rhs == - 1 )){ None } else { Some ( unsafe { intrinsics :: unchecked_div ( self , rhs )})}}} doc_comment ! { concat ! ( "Checked Euclidean division. Computes `self.div_euclid(rhs)`,\nreturning `None` if `rhs == 0` or the division results in overflow.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!((" , stringify ! ($SelfT ), "::MIN + 1).checked_div_euclid(-1), Some(" , stringify ! ($Max ), "));\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.checked_div_euclid(-1), None);\nassert_eq!((1" , stringify ! ($SelfT ), ").checked_div_euclid(0), None);\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_div_euclid ( self , rhs : Self )-> Option < Self > { if unlikely ! ( rhs == 0 || ( self == Self :: MIN && rhs == - 1 )){ None } else { Some ( self . div_euclid ( rhs ))}}} doc_comment ! { concat ! ( "Checked integer remainder. Computes `self % rhs`, returning `None` if\n`rhs == 0` or the division results in overflow.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!(5" , stringify ! ($SelfT ), ".checked_rem(2), Some(1));\nassert_eq!(5" , stringify ! ($SelfT ), ".checked_rem(0), None);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.checked_rem(-1), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_unstable ( feature = "const_checked_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_rem ( self , rhs : Self )-> Option < Self > { if unlikely ! ( rhs == 0 || ( self == Self :: MIN && rhs == - 1 )){ None } else { Some ( unsafe { intrinsics :: unchecked_rem ( self , rhs )})}}} doc_comment ! { concat ! ( "Checked Euclidean remainder. Computes `self.rem_euclid(rhs)`, returning `None`\nif `rhs == 0` or the division results in overflow.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(5" , stringify ! ($SelfT ), ".checked_rem_euclid(2), Some(1));\nassert_eq!(5" , stringify ! ($SelfT ), ".checked_rem_euclid(0), None);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.checked_rem_euclid(-1), None);\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_rem_euclid ( self , rhs : Self )-> Option < Self > { if unlikely ! ( rhs == 0 || ( self == Self :: MIN && rhs == - 1 )){ None } else { Some ( self . rem_euclid ( rhs ))}}} doc_comment ! { concat ! ( "Checked negation. Computes `-self`, returning `None` if `self == MIN`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!(5" , stringify ! ($SelfT ), ".checked_neg(), Some(-5));\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.checked_neg(), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ inline ] pub const fn checked_neg ( self )-> Option < Self > { let ( a , b )= self . overflowing_neg (); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Checked shift left. Computes `self << rhs`, returning `None` if `rhs` is larger\nthan or equal to the number of bits in `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(0x1" , stringify ! ($SelfT ), ".checked_shl(4), Some(0x10));\nassert_eq!(0x1" , stringify ! ($SelfT ), ".checked_shl(129), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_shl ( self , rhs : u32 )-> Option < Self > { let ( a , b )= self . overflowing_shl ( rhs ); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Checked shift right. Computes `self >> rhs`, returning `None` if `rhs` is\nlarger than or equal to the number of bits in `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(0x10" , stringify ! ($SelfT ), ".checked_shr(4), Some(0x1));\nassert_eq!(0x10" , stringify ! ($SelfT ), ".checked_shr(128), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_shr ( self , rhs : u32 )-> Option < Self > { let ( a , b )= self . overflowing_shr ( rhs ); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Checked absolute value. Computes `self.abs()`, returning `None` if\n`self == MIN`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!((-5" , stringify ! ($SelfT ), ").checked_abs(), Some(5));\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.checked_abs(), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_abs" , since = "1.13.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ inline ] pub const fn checked_abs ( self )-> Option < Self > { if self . is_negative (){ self . checked_neg ()} else { Some ( self )}}} doc_comment ! { concat ! ( "Checked exponentiation. Computes `self.pow(exp)`, returning `None` if\noverflow occurred.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(8" , stringify ! ($SelfT ), ".checked_pow(2), Some(64));\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.checked_pow(2), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_pow" , since = "1.34.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_pow ( self , mut exp : u32 )-> Option < Self > { if exp == 0 { return Some ( 1 ); } let mut base = self ; let mut acc : Self = 1 ; while exp > 1 { if ( exp & 1 )== 1 { acc = try_opt ! ( acc . checked_mul ( base )); } exp /= 2 ; base = try_opt ! ( base . checked_mul ( base )); } Some ( try_opt ! ( acc . checked_mul ( base )))}} doc_comment ! { concat ! ( "Saturating integer addition. Computes `self + rhs`, saturating at the numeric\nbounds instead of overflowing.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".saturating_add(1), 101);\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.saturating_add(100), " , stringify ! ($SelfT ), "::MAX);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.saturating_add(-1), " , stringify ! ($SelfT ), "::MIN);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_saturating_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn saturating_add ( self , rhs : Self )-> Self { intrinsics :: saturating_add ( self , rhs )}} doc_comment ! { concat ! ( "Saturating integer subtraction. Computes `self - rhs`, saturating at the\nnumeric bounds instead of overflowing.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".saturating_sub(127), -27);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.saturating_sub(100), " , stringify ! ($SelfT ), "::MIN);\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.saturating_sub(-1), " , stringify ! ($SelfT ), "::MAX);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_saturating_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn saturating_sub ( self , rhs : Self )-> Self { intrinsics :: saturating_sub ( self , rhs )}} doc_comment ! { concat ! ( "Saturating integer negation. Computes `-self`, returning `MAX` if `self == MIN`\ninstead of overflowing.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".saturating_neg(), -100);\nassert_eq!((-100" , stringify ! ($SelfT ), ").saturating_neg(), 100);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.saturating_neg(), " , stringify ! ($SelfT ), "::MAX);\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.saturating_neg(), " , stringify ! ($SelfT ), "::MIN + 1);" , $EndFeature , "\n```" ), # [ stable ( feature = "saturating_neg" , since = "1.45.0" )]# [ rustc_const_stable ( feature = "const_saturating_int_methods" , since = "1.47.0" )]# [ inline ] pub const fn saturating_neg ( self )-> Self { intrinsics :: saturating_sub ( 0 , self )}} doc_comment ! { concat ! ( "Saturating absolute value. Computes `self.abs()`, returning `MAX` if `self ==\nMIN` instead of overflowing.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".saturating_abs(), 100);\nassert_eq!((-100" , stringify ! ($SelfT ), ").saturating_abs(), 100);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.saturating_abs(), " , stringify ! ($SelfT ), "::MAX);\nassert_eq!((" , stringify ! ($SelfT ), "::MIN + 1).saturating_abs(), " , stringify ! ($SelfT ), "::MAX);" , $EndFeature , "\n```" ), # [ stable ( feature = "saturating_neg" , since = "1.45.0" )]# [ rustc_const_stable ( feature = "const_saturating_int_methods" , since = "1.47.0" )]# [ inline ] pub const fn saturating_abs ( self )-> Self { if self . is_negative (){ self . saturating_neg ()} else { self }}} doc_comment ! { concat ! ( "Saturating integer multiplication. Computes `self * rhs`, saturating at the\nnumeric bounds instead of overflowing.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!(10" , stringify ! ($SelfT ), ".saturating_mul(12), 120);\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.saturating_mul(10), " , stringify ! ($SelfT ), "::MAX);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.saturating_mul(10), " , stringify ! ($SelfT ), "::MIN);" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_saturating_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn saturating_mul ( self , rhs : Self )-> Self { match self . checked_mul ( rhs ){ Some ( x )=> x , None => if ( self < 0 )== ( rhs < 0 ){ Self :: MAX } else { Self :: MIN }}}} doc_comment ! { concat ! ( "Saturating integer exponentiation. Computes `self.pow(exp)`,\nsaturating at the numeric bounds instead of overflowing.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!((-4" , stringify ! ($SelfT ), ").saturating_pow(3), -64);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.saturating_pow(2), " , stringify ! ($SelfT ), "::MAX);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.saturating_pow(3), " , stringify ! ($SelfT ), "::MIN);" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_pow" , since = "1.34.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn saturating_pow ( self , exp : u32 )-> Self { match self . checked_pow ( exp ){ Some ( x )=> x , None if self < 0 && exp % 2 == 1 => Self :: MIN , None => Self :: MAX , }}} doc_comment ! { concat ! ( "Wrapping (modular) addition. Computes `self + rhs`, wrapping around at the\nboundary of the type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".wrapping_add(27), 127);\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.wrapping_add(2), " , stringify ! ($SelfT ), "::MIN + 1);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_add ( self , rhs : Self )-> Self { intrinsics :: wrapping_add ( self , rhs )}} doc_comment ! { concat ! ( "Wrapping (modular) subtraction. Computes `self - rhs`, wrapping around at the\nboundary of the type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(0" , stringify ! ($SelfT ), ".wrapping_sub(127), -127);\nassert_eq!((-2" , stringify ! ($SelfT ), ").wrapping_sub(" , stringify ! ($SelfT ), "::MAX), " , stringify ! ($SelfT ), "::MAX);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_sub ( self , rhs : Self )-> Self { intrinsics :: wrapping_sub ( self , rhs )}} doc_comment ! { concat ! ( "Wrapping (modular) multiplication. Computes `self * rhs`, wrapping around at\nthe boundary of the type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(10" , stringify ! ($SelfT ), ".wrapping_mul(12), 120);\nassert_eq!(11i8.wrapping_mul(12), -124);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_mul ( self , rhs : Self )-> Self { intrinsics :: wrapping_mul ( self , rhs )}} doc_comment ! { concat ! ( "Wrapping (modular) division. Computes `self / rhs`, wrapping around at the\nboundary of the type.\n\nThe only case where such wrapping can occur is when one divides `MIN / -1` on a signed type (where\n`MIN` is the negative minimal value for the type); this is equivalent to `-MIN`, a positive value\nthat is too large to represent in the type. In such a case, this function returns `MIN` itself.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".wrapping_div(10), 10);\nassert_eq!((-128i8).wrapping_div(-1), -128);" , $EndFeature , "\n```" ), # [ stable ( feature = "num_wrapping" , since = "1.2.0" )]# [ rustc_const_unstable ( feature = "const_wrapping_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_div ( self , rhs : Self )-> Self { self . overflowing_div ( rhs ). 0 }} doc_comment ! { concat ! ( "Wrapping Euclidean division. Computes `self.div_euclid(rhs)`,\nwrapping around at the boundary of the type.\n\nWrapping will only occur in `MIN / -1` on a signed type (where `MIN` is the negative minimal value\nfor the type). This is equivalent to `-MIN`, a positive value that is too large to represent in the\ntype. In this case, this method returns `MIN` itself.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(100" , stringify ! ($SelfT ), ".wrapping_div_euclid(10), 10);\nassert_eq!((-128i8).wrapping_div_euclid(-1), -128);\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_div_euclid ( self , rhs : Self )-> Self { self . overflowing_div_euclid ( rhs ). 0 }} doc_comment ! { concat ! ( "Wrapping (modular) remainder. Computes `self % rhs`, wrapping around at the\nboundary of the type.\n\nSuch wrap-around never actually occurs mathematically; implementation artifacts make `x % y`\ninvalid for `MIN / -1` on a signed type (where `MIN` is the negative minimal value). In such a case,\nthis function returns `0`.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".wrapping_rem(10), 0);\nassert_eq!((-128i8).wrapping_rem(-1), 0);" , $EndFeature , "\n```" ), # [ stable ( feature = "num_wrapping" , since = "1.2.0" )]# [ rustc_const_unstable ( feature = "const_wrapping_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_rem ( self , rhs : Self )-> Self { self . overflowing_rem ( rhs ). 0 }} doc_comment ! { concat ! ( "Wrapping Euclidean remainder. Computes `self.rem_euclid(rhs)`, wrapping around\nat the boundary of the type.\n\nWrapping will only occur in `MIN % -1` on a signed type (where `MIN` is the negative minimal value\nfor the type). In this case, this method returns 0.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(100" , stringify ! ($SelfT ), ".wrapping_rem_euclid(10), 0);\nassert_eq!((-128i8).wrapping_rem_euclid(-1), 0);\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_rem_euclid ( self , rhs : Self )-> Self { self . overflowing_rem_euclid ( rhs ). 0 }} doc_comment ! { concat ! ( "Wrapping (modular) negation. Computes `-self`, wrapping around at the boundary\nof the type.\n\nThe only case where such wrapping can occur is when one negates `MIN` on a signed type (where `MIN`\nis the negative minimal value for the type); this is a positive value that is too large to represent\nin the type. In such a case, this function returns `MIN` itself.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".wrapping_neg(), -100);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.wrapping_neg(), " , stringify ! ($SelfT ), "::MIN);" , $EndFeature , "\n```" ), # [ stable ( feature = "num_wrapping" , since = "1.2.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ inline ] pub const fn wrapping_neg ( self )-> Self { self . overflowing_neg (). 0 }} doc_comment ! { concat ! ( "Panic-free bitwise shift-left; yields `self << mask(rhs)`, where `mask` removes\nany high-order bits of `rhs` that would cause the shift to exceed the bitwidth of the type.\n\nNote that this is *not* the same as a rotate-left; the RHS of a wrapping shift-left is restricted to\nthe range of the type, rather than the bits shifted out of the LHS being returned to the other end.\nThe primitive integer types all implement a `[`rotate_left`](#method.rotate_left) function,\nwhich may be what you want instead.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!((-1" , stringify ! ($SelfT ), ").wrapping_shl(7), -128);\nassert_eq!((-1" , stringify ! ($SelfT ), ").wrapping_shl(128), -1);" , $EndFeature , "\n```" ), # [ stable ( feature = "num_wrapping" , since = "1.2.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_shl ( self , rhs : u32 )-> Self { unsafe { intrinsics :: unchecked_shl ( self , ( rhs & ($BITS - 1 )) as $SelfT )}}} doc_comment ! { concat ! ( "Panic-free bitwise shift-right; yields `self >> mask(rhs)`, where `mask`\nremoves any high-order bits of `rhs` that would cause the shift to exceed the bitwidth of the type.\n\nNote that this is *not* the same as a rotate-right; the RHS of a wrapping shift-right is restricted\nto the range of the type, rather than the bits shifted out of the LHS being returned to the other\nend. The primitive integer types all implement a [`rotate_right`](#method.rotate_right) function,\nwhich may be what you want instead.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!((-128" , stringify ! ($SelfT ), ").wrapping_shr(7), -1);\nassert_eq!((-128i16).wrapping_shr(64), -128);" , $EndFeature , "\n```" ), # [ stable ( feature = "num_wrapping" , since = "1.2.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_shr ( self , rhs : u32 )-> Self { unsafe { intrinsics :: unchecked_shr ( self , ( rhs & ($BITS - 1 )) as $SelfT )}}} doc_comment ! { concat ! ( "Wrapping (modular) absolute value. Computes `self.abs()`, wrapping around at\nthe boundary of the type.\n\nThe only case where such wrapping can occur is when one takes the absolute value of the negative\nminimal value for the type; this is a positive value that is too large to represent in the type. In\nsuch a case, this function returns `MIN` itself.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".wrapping_abs(), 100);\nassert_eq!((-100" , stringify ! ($SelfT ), ").wrapping_abs(), 100);\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.wrapping_abs(), " , stringify ! ($SelfT ), "::MIN);\nassert_eq!((-128i8).wrapping_abs() as u8, 128);" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_abs" , since = "1.13.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ allow ( unused_attributes )]# [ inline ] pub const fn wrapping_abs ( self )-> Self { if self . is_negative (){ self . wrapping_neg ()} else { self }}} doc_comment ! { concat ! ( "Computes the absolute value of `self` without any wrapping\nor panicking.\n\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "#![feature(unsigned_abs)]\nassert_eq!(100" , stringify ! ($SelfT ), ".unsigned_abs(), 100" , stringify ! ($UnsignedT ), ");\nassert_eq!((-100" , stringify ! ($SelfT ), ").unsigned_abs(), 100" , stringify ! ($UnsignedT ), ");\nassert_eq!((-128i8).unsigned_abs(), 128u8);" , $EndFeature , "\n```" ), # [ unstable ( feature = "unsigned_abs" , issue = "74913" )]# [ inline ] pub const fn unsigned_abs ( self )-> $UnsignedT { self . wrapping_abs () as $UnsignedT }} doc_comment ! { concat ! ( "Wrapping (modular) exponentiation. Computes `self.pow(exp)`,\nwrapping around at the boundary of the type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(3" , stringify ! ($SelfT ), ".wrapping_pow(4), 81);\nassert_eq!(3i8.wrapping_pow(5), -13);\nassert_eq!(3i8.wrapping_pow(6), -39);" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_pow" , since = "1.34.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_pow ( self , mut exp : u32 )-> Self { if exp == 0 { return 1 ; } let mut base = self ; let mut acc : Self = 1 ; while exp > 1 { if ( exp & 1 )== 1 { acc = acc . wrapping_mul ( base ); } exp /= 2 ; base = base . wrapping_mul ( base ); } acc . wrapping_mul ( base )}} doc_comment ! { concat ! ( "Calculates `self` + `rhs`\n\nReturns a tuple of the addition along with a boolean indicating whether an arithmetic overflow would\noccur. If an overflow would have occurred then the wrapped value is returned.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!(5" , stringify ! ($SelfT ), ".overflowing_add(2), (7, false));\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.overflowing_add(1), (" , stringify ! ($SelfT ), "::MIN, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_add ( self , rhs : Self )-> ( Self , bool ){ let ( a , b )= intrinsics :: add_with_overflow ( self as $ActualT , rhs as $ActualT ); ( a as Self , b )}} doc_comment ! { concat ! ( "Calculates `self` - `rhs`\n\nReturns a tuple of the subtraction along with a boolean indicating whether an arithmetic overflow\nwould occur. If an overflow would have occurred then the wrapped value is returned.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!(5" , stringify ! ($SelfT ), ".overflowing_sub(2), (3, false));\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.overflowing_sub(1), (" , stringify ! ($SelfT ), "::MAX, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_sub ( self , rhs : Self )-> ( Self , bool ){ let ( a , b )= intrinsics :: sub_with_overflow ( self as $ActualT , rhs as $ActualT ); ( a as Self , b )}} doc_comment ! { concat ! ( "Calculates the multiplication of `self` and `rhs`.\n\nReturns a tuple of the multiplication along with a boolean indicating whether an arithmetic overflow\nwould occur. If an overflow would have occurred then the wrapped value is returned.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(5" , stringify ! ($SelfT ), ".overflowing_mul(2), (10, false));\nassert_eq!(1_000_000_000i32.overflowing_mul(10), (1410065408, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_mul ( self , rhs : Self )-> ( Self , bool ){ let ( a , b )= intrinsics :: mul_with_overflow ( self as $ActualT , rhs as $ActualT ); ( a as Self , b )}} doc_comment ! { concat ! ( "Calculates the divisor when `self` is divided by `rhs`.\n\nReturns a tuple of the divisor along with a boolean indicating whether an arithmetic overflow would\noccur. If an overflow would occur then self is returned.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!(5" , stringify ! ($SelfT ), ".overflowing_div(2), (2, false));\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.overflowing_div(-1), (" , stringify ! ($SelfT ), "::MIN, true));" , $EndFeature , "\n```" ), # [ inline ]# [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_unstable ( feature = "const_overflowing_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ] pub const fn overflowing_div ( self , rhs : Self )-> ( Self , bool ){ if unlikely ! ( self == Self :: MIN && rhs == - 1 ){( self , true )} else {( self / rhs , false )}}} doc_comment ! { concat ! ( "Calculates the quotient of Euclidean division `self.div_euclid(rhs)`.\n\nReturns a tuple of the divisor along with a boolean indicating whether an arithmetic overflow would\noccur. If an overflow would occur then `self` is returned.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(5" , stringify ! ($SelfT ), ".overflowing_div_euclid(2), (2, false));\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.overflowing_div_euclid(-1), (" , stringify ! ($SelfT ), "::MIN, true));\n```" ), # [ inline ]# [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ] pub const fn overflowing_div_euclid ( self , rhs : Self )-> ( Self , bool ){ if unlikely ! ( self == Self :: MIN && rhs == - 1 ){( self , true )} else {( self . div_euclid ( rhs ), false )}}} doc_comment ! { concat ! ( "Calculates the remainder when `self` is divided by `rhs`.\n\nReturns a tuple of the remainder after dividing along with a boolean indicating whether an\narithmetic overflow would occur. If an overflow would occur then 0 is returned.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!(5" , stringify ! ($SelfT ), ".overflowing_rem(2), (1, false));\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.overflowing_rem(-1), (0, true));" , $EndFeature , "\n```" ), # [ inline ]# [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_unstable ( feature = "const_overflowing_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ] pub const fn overflowing_rem ( self , rhs : Self )-> ( Self , bool ){ if unlikely ! ( self == Self :: MIN && rhs == - 1 ){( 0 , true )} else {( self % rhs , false )}}} doc_comment ! { concat ! ( "Overflowing Euclidean remainder. Calculates `self.rem_euclid(rhs)`.\n\nReturns a tuple of the remainder after dividing along with a boolean indicating whether an\narithmetic overflow would occur. If an overflow would occur then 0 is returned.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(5" , stringify ! ($SelfT ), ".overflowing_rem_euclid(2), (1, false));\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.overflowing_rem_euclid(-1), (0, true));\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_rem_euclid ( self , rhs : Self )-> ( Self , bool ){ if unlikely ! ( self == Self :: MIN && rhs == - 1 ){( 0 , true )} else {( self . rem_euclid ( rhs ), false )}}} doc_comment ! { concat ! ( "Negates self, overflowing if this is equal to the minimum value.\n\nReturns a tuple of the negated version of self along with a boolean indicating whether an overflow\nhappened. If `self` is the minimum value (e.g., `i32::MIN` for values of type `i32`), then the\nminimum value will be returned again and `true` will be returned for an overflow happening.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(2" , stringify ! ($SelfT ), ".overflowing_neg(), (-2, false));\nassert_eq!(" , stringify ! ($SelfT ), "::MIN.overflowing_neg(), (" , stringify ! ($SelfT ), "::MIN, true));" , $EndFeature , "\n```" ), # [ inline ]# [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ allow ( unused_attributes )] pub const fn overflowing_neg ( self )-> ( Self , bool ){ if unlikely ! ( self == Self :: MIN ){( Self :: MIN , true )} else {(- self , false )}}} doc_comment ! { concat ! ( "Shifts self left by `rhs` bits.\n\nReturns a tuple of the shifted version of self along with a boolean indicating whether the shift\nvalue was larger than or equal to the number of bits. If the shift value is too large, then value is\nmasked (N-1) where N is the number of bits, and this value is then used to perform the shift.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(0x1" , stringify ! ($SelfT ), ".overflowing_shl(4), (0x10, false));\nassert_eq!(0x1i32.overflowing_shl(36), (0x10, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_shl ( self , rhs : u32 )-> ( Self , bool ){( self . wrapping_shl ( rhs ), ( rhs > ($BITS - 1 )))}} doc_comment ! { concat ! ( "Shifts self right by `rhs` bits.\n\nReturns a tuple of the shifted version of self along with a boolean indicating whether the shift\nvalue was larger than or equal to the number of bits. If the shift value is too large, then value is\nmasked (N-1) where N is the number of bits, and this value is then used to perform the shift.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(0x10" , stringify ! ($SelfT ), ".overflowing_shr(4), (0x1, false));\nassert_eq!(0x10i32.overflowing_shr(36), (0x1, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_shr ( self , rhs : u32 )-> ( Self , bool ){( self . wrapping_shr ( rhs ), ( rhs > ($BITS - 1 )))}} doc_comment ! { concat ! ( "Computes the absolute value of `self`.\n\nReturns a tuple of the absolute version of self along with a boolean indicating whether an overflow\nhappened. If self is the minimum value (e.g., " , stringify ! ($SelfT ), "::MIN for values of type\n " , stringify ! ($SelfT ), "), then the minimum value will be returned again and true will be returned\nfor an overflow happening.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(10" , stringify ! ($SelfT ), ".overflowing_abs(), (10, false));\nassert_eq!((-10" , stringify ! ($SelfT ), ").overflowing_abs(), (10, false));\nassert_eq!((" , stringify ! ($SelfT ), "::MIN).overflowing_abs(), (" , stringify ! ($SelfT ), "::MIN, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_abs" , since = "1.13.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ inline ] pub const fn overflowing_abs ( self )-> ( Self , bool ){( self . wrapping_abs (), self == Self :: MIN )}} doc_comment ! { concat ! ( "Raises self to the power of `exp`, using exponentiation by squaring.\n\nReturns a tuple of the exponentiation along with a bool indicating\nwhether an overflow happened.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(3" , stringify ! ($SelfT ), ".overflowing_pow(4), (81, false));\nassert_eq!(3i8.overflowing_pow(5), (-13, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_pow" , since = "1.34.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_pow ( self , mut exp : u32 )-> ( Self , bool ){ if exp == 0 { return ( 1 , false ); } let mut base = self ; let mut acc : Self = 1 ; let mut overflown = false ; let mut r ; while exp > 1 { if ( exp & 1 )== 1 { r = acc . overflowing_mul ( base ); acc = r . 0 ; overflown |= r . 1 ; } exp /= 2 ; r = base . overflowing_mul ( base ); base = r . 0 ; overflown |= r . 1 ; } r = acc . overflowing_mul ( base ); r . 1 |= overflown ; r }} doc_comment ! { concat ! ( "Raises self to the power of `exp`, using exponentiation by squaring.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let x: " , stringify ! ($SelfT ), " = 2; // or any other integer type\n\nassert_eq!(x.pow(5), 32);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ]# [ rustc_inherit_overflow_checks ] pub const fn pow ( self , mut exp : u32 )-> Self { if exp == 0 { return 1 ; } let mut base = self ; let mut acc = 1 ; while exp > 1 { if ( exp & 1 )== 1 { acc = acc * base ; } exp /= 2 ; base = base * base ; } acc * base }} doc_comment ! { concat ! ( "Calculates the quotient of Euclidean division of `self` by `rhs`.\n\nThis computes the integer `n` such that `self = n * rhs + self.rem_euclid(rhs)`,\nwith `0 <= self.rem_euclid(rhs) < rhs`.\n\nIn other words, the result is `self / rhs` rounded to the integer `n`\nsuch that `self >= n * rhs`.\nIf `self > 0`, this is equal to round towards zero (the default in Rust);\nif `self < 0`, this is equal to round towards +/- infinity.\n\n# Panics\n\nThis function will panic if `rhs` is 0 or the division results in overflow.\n\n# Examples\n\nBasic usage:\n\n```\nlet a: " , stringify ! ($SelfT ), " = 7; // or any other integer type\nlet b = 4;\n\nassert_eq!(a.div_euclid(b), 1); // 7 >= 4 * 1\nassert_eq!(a.div_euclid(-b), -1); // 7 >= -4 * -1\nassert_eq!((-a).div_euclid(b), -2); // -7 >= 4 * -2\nassert_eq!((-a).div_euclid(-b), 2); // -7 >= -4 * 2\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ]# [ rustc_inherit_overflow_checks ] pub const fn div_euclid ( self , rhs : Self )-> Self { let q = self / rhs ; if self % rhs < 0 { return if rhs > 0 { q - 1 } else { q + 1 }} q }} doc_comment ! { concat ! ( "Calculates the least nonnegative remainder of `self (mod rhs)`.\n\nThis is done as if by the Euclidean division algorithm -- given\n`r = self.rem_euclid(rhs)`, `self = rhs * self.div_euclid(rhs) + r`, and\n`0 <= r < abs(rhs)`.\n\n# Panics\n\nThis function will panic if `rhs` is 0 or the division results in overflow.\n\n# Examples\n\nBasic usage:\n\n```\nlet a: " , stringify ! ($SelfT ), " = 7; // or any other integer type\nlet b = 4;\n\nassert_eq!(a.rem_euclid(b), 3);\nassert_eq!((-a).rem_euclid(b), 1);\nassert_eq!(a.rem_euclid(-b), 3);\nassert_eq!((-a).rem_euclid(-b), 1);\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ]# [ rustc_inherit_overflow_checks ] pub const fn rem_euclid ( self , rhs : Self )-> Self { let r = self % rhs ; if r < 0 { if rhs < 0 { r - rhs } else { r + rhs }} else { r }}} doc_comment ! { concat ! ( "Computes the absolute value of `self`.\n\n# Overflow behavior\n\nThe absolute value of `" , stringify ! ($SelfT ), "::MIN` cannot be represented as an\n`" , stringify ! ($SelfT ), "`, and attempting to calculate it will cause an overflow. This means that\ncode in debug mode will trigger a panic on this case and optimized code will return `" , stringify ! ($SelfT ), "::MIN` without a panic.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(10" , stringify ! ($SelfT ), ".abs(), 10);\nassert_eq!((-10" , stringify ! ($SelfT ), ").abs(), 10);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ allow ( unused_attributes )]# [ inline ]# [ rustc_inherit_overflow_checks ] pub const fn abs ( self )-> Self { if self . is_negative (){- self } else { self }}} doc_comment ! { concat ! ( "Returns a number representing sign of `self`.\n\n - `0` if the number is zero\n - `1` if the number is positive\n - `-1` if the number is negative\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(10" , stringify ! ($SelfT ), ".signum(), 1);\nassert_eq!(0" , stringify ! ($SelfT ), ".signum(), 0);\nassert_eq!((-10" , stringify ! ($SelfT ), ").signum(), -1);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_sign" , since = "1.47.0" )]# [ inline ] pub const fn signum ( self )-> Self { match self { n if n > 0 => 1 , 0 => 0 , _ =>- 1 , }}} doc_comment ! { concat ! ( "Returns `true` if `self` is positive and `false` if the number is zero or\nnegative.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert!(10" , stringify ! ($SelfT ), ".is_positive());\nassert!(!(-10" , stringify ! ($SelfT ), ").is_positive());" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ inline ] pub const fn is_positive ( self )-> bool { self > 0 }} doc_comment ! { concat ! ( "Returns `true` if `self` is negative and `false` if the number is zero or\npositive.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert!((-10" , stringify ! ($SelfT ), ").is_negative());\nassert!(!10" , stringify ! ($SelfT ), ".is_negative());" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_int_methods" , since = "1.32.0" )]# [ inline ] pub const fn is_negative ( self )-> bool { self < 0 }} doc_comment ! { concat ! ( "Return the memory representation of this integer as a byte array in\nbig-endian (network) byte order.\n" , $to_xe_bytes_doc , "\n# Examples\n\n```\nlet bytes = " , $swap_op , stringify ! ($SelfT ), ".to_be_bytes();\nassert_eq!(bytes, " , $be_bytes , ");\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ inline ] pub const fn to_be_bytes ( self )-> [ u8 ; mem :: size_of ::< Self > ()]{ self . to_be (). to_ne_bytes ()}} doc_comment ! { concat ! ( "Return the memory representation of this integer as a byte array in\nlittle-endian byte order.\n" , $to_xe_bytes_doc , "\n# Examples\n\n```\nlet bytes = " , $swap_op , stringify ! ($SelfT ), ".to_le_bytes();\nassert_eq!(bytes, " , $le_bytes , ");\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ inline ] pub const fn to_le_bytes ( self )-> [ u8 ; mem :: size_of ::< Self > ()]{ self . to_le (). to_ne_bytes ()}} doc_comment ! { concat ! ( "\nReturn the memory representation of this integer as a byte array in\nnative byte order.\n\nAs the target platform's native endianness is used, portable code\nshould use [`to_be_bytes`] or [`to_le_bytes`], as appropriate,\ninstead.\n" , $to_xe_bytes_doc , "\n[`to_be_bytes`]: #method.to_be_bytes\n[`to_le_bytes`]: #method.to_le_bytes\n\n# Examples\n\n```\nlet bytes = " , $swap_op , stringify ! ($SelfT ), ".to_ne_bytes();\nassert_eq!(\n bytes,\n if cfg!(target_endian = \"big\") {\n " , $be_bytes , "\n } else {\n " , $le_bytes , "\n }\n);\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ cfg_attr ( not ( bootstrap ), rustc_allow_const_fn_unstable ( const_fn_transmute ))]# [ cfg_attr ( bootstrap , allow_internal_unstable ( const_fn_transmute ))]# [ inline ] pub const fn to_ne_bytes ( self )-> [ u8 ; mem :: size_of ::< Self > ()]{ unsafe { mem :: transmute ( self )}}} doc_comment ! { concat ! ( "\nReturn the memory representation of this integer as a byte array in\nnative byte order.\n\n[`to_ne_bytes`] should be preferred over this whenever possible.\n\n[`to_ne_bytes`]: #method.to_ne_bytes\n" , "\n# Examples\n\n```\n#![feature(num_as_ne_bytes)]\nlet num = " , $swap_op , stringify ! ($SelfT ), ";\nlet bytes = num.as_ne_bytes();\nassert_eq!(\n bytes,\n if cfg!(target_endian = \"big\") {\n &" , $be_bytes , "\n } else {\n &" , $le_bytes , "\n }\n);\n```" ), # [ unstable ( feature = "num_as_ne_bytes" , issue = "76976" )]# [ inline ] pub fn as_ne_bytes (& self )-> & [ u8 ; mem :: size_of ::< Self > ()]{ unsafe {&* ( self as * const Self as * const _)}}} doc_comment ! { concat ! ( "Create an integer value from its representation as a byte array in\nbig endian.\n" , $from_xe_bytes_doc , "\n# Examples\n\n```\nlet value = " , stringify ! ($SelfT ), "::from_be_bytes(" , $be_bytes , ");\nassert_eq!(value, " , $swap_op , ");\n```\n\nWhen starting from a slice rather than an array, fallible conversion APIs can be used:\n\n```\nuse std::convert::TryInto;\n\nfn read_be_" , stringify ! ($SelfT ), "(input: &mut &[u8]) -> " , stringify ! ($SelfT ), " {\n let (int_bytes, rest) = input.split_at(std::mem::size_of::<" , stringify ! ($SelfT ), ">());\n *input = rest;\n " , stringify ! ($SelfT ), "::from_be_bytes(int_bytes.try_into().unwrap())\n}\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ inline ] pub const fn from_be_bytes ( bytes : [ u8 ; mem :: size_of ::< Self > ()])-> Self { Self :: from_be ( Self :: from_ne_bytes ( bytes ))}} doc_comment ! { concat ! ( "\nCreate an integer value from its representation as a byte array in\nlittle endian.\n" , $from_xe_bytes_doc , "\n# Examples\n\n```\nlet value = " , stringify ! ($SelfT ), "::from_le_bytes(" , $le_bytes , ");\nassert_eq!(value, " , $swap_op , ");\n```\n\nWhen starting from a slice rather than an array, fallible conversion APIs can be used:\n\n```\nuse std::convert::TryInto;\n\nfn read_le_" , stringify ! ($SelfT ), "(input: &mut &[u8]) -> " , stringify ! ($SelfT ), " {\n let (int_bytes, rest) = input.split_at(std::mem::size_of::<" , stringify ! ($SelfT ), ">());\n *input = rest;\n " , stringify ! ($SelfT ), "::from_le_bytes(int_bytes.try_into().unwrap())\n}\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ inline ] pub const fn from_le_bytes ( bytes : [ u8 ; mem :: size_of ::< Self > ()])-> Self { Self :: from_le ( Self :: from_ne_bytes ( bytes ))}} doc_comment ! { concat ! ( "Create an integer value from its memory representation as a byte\narray in native endianness.\n\nAs the target platform's native endianness is used, portable code\nlikely wants to use [`from_be_bytes`] or [`from_le_bytes`], as\nappropriate instead.\n\n[`from_be_bytes`]: #method.from_be_bytes\n[`from_le_bytes`]: #method.from_le_bytes\n" , $from_xe_bytes_doc , "\n# Examples\n\n```\nlet value = " , stringify ! ($SelfT ), "::from_ne_bytes(if cfg!(target_endian = \"big\") {\n " , $be_bytes , "\n} else {\n " , $le_bytes , "\n});\nassert_eq!(value, " , $swap_op , ");\n```\n\nWhen starting from a slice rather than an array, fallible conversion APIs can be used:\n\n```\nuse std::convert::TryInto;\n\nfn read_ne_" , stringify ! ($SelfT ), "(input: &mut &[u8]) -> " , stringify ! ($SelfT ), " {\n let (int_bytes, rest) = input.split_at(std::mem::size_of::<" , stringify ! ($SelfT ), ">());\n *input = rest;\n " , stringify ! ($SelfT ), "::from_ne_bytes(int_bytes.try_into().unwrap())\n}\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ cfg_attr ( not ( bootstrap ), rustc_allow_const_fn_unstable ( const_fn_transmute ))]# [ cfg_attr ( bootstrap , allow_internal_unstable ( const_fn_transmute ))]# [ inline ] pub const fn from_ne_bytes ( bytes : [ u8 ; mem :: size_of ::< Self > ()])-> Self { unsafe { mem :: transmute ( bytes )}}} doc_comment ! { concat ! ( "**This method is soft-deprecated.**\n\nAlthough using it won’t cause a compilation warning,\nnew code should use [`" , stringify ! ($SelfT ), "::MIN" , "`](#associatedconstant.MIN) instead.\n\nReturns the smallest value that can be represented by this integer type." ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ inline ( always )]# [ rustc_promotable ]# [ rustc_const_stable ( feature = "const_min_value" , since = "1.32.0" )] pub const fn min_value ()-> Self { Self :: MIN }} doc_comment ! { concat ! ( "**This method is soft-deprecated.**\n\nAlthough using it won’t cause a compilation warning,\nnew code should use [`" , stringify ! ($SelfT ), "::MAX" , "`](#associatedconstant.MAX) instead.\n\nReturns the largest value that can be represented by this integer type." ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ inline ( always )]# [ rustc_promotable ]# [ rustc_const_stable ( feature = "const_max_value" , since = "1.32.0" )] pub const fn max_value ()-> Self { Self :: MAX }}}}
+macro_rules! __ra_macro_fixture429 {($x : expr , $($tt : tt )*)=>{# [ doc = $x ]$($tt )* }; }
+macro_rules! __ra_macro_fixture430 {()=>{ "\n\n**Note**: This function returns an array of length 2, 4 or 8 bytes\ndepending on the target pointer size.\n\n" }; }
+macro_rules! __ra_macro_fixture431 {()=>{ "\n\n**Note**: This function takes an array of length 2, 4 or 8 bytes\ndepending on the target pointer size.\n\n" }; }
+macro_rules! __ra_macro_fixture432 {($SelfT : ty , $ActualT : ty , $BITS : expr , $MaxV : expr , $Feature : expr , $EndFeature : expr , $rot : expr , $rot_op : expr , $rot_result : expr , $swap_op : expr , $swapped : expr , $reversed : expr , $le_bytes : expr , $be_bytes : expr , $to_xe_bytes_doc : expr , $from_xe_bytes_doc : expr )=>{ doc_comment ! { concat ! ( "The smallest value that can be represented by this integer type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(" , stringify ! ($SelfT ), "::MIN, 0);" , $EndFeature , "\n```" ), # [ stable ( feature = "assoc_int_consts" , since = "1.43.0" )] pub const MIN : Self = 0 ; } doc_comment ! { concat ! ( "The largest value that can be represented by this integer type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(" , stringify ! ($SelfT ), "::MAX, " , stringify ! ($MaxV ), ");" , $EndFeature , "\n```" ), # [ stable ( feature = "assoc_int_consts" , since = "1.43.0" )] pub const MAX : Self = ! 0 ; } doc_comment ! { concat ! ( "The size of this integer type in bits.\n\n# Examples\n\n```\n" , $Feature , "#![feature(int_bits_const)]\nassert_eq!(" , stringify ! ($SelfT ), "::BITS, " , stringify ! ($BITS ), ");" , $EndFeature , "\n```" ), # [ unstable ( feature = "int_bits_const" , issue = "76904" )] pub const BITS : u32 = $BITS ; } doc_comment ! { concat ! ( "Converts a string slice in a given base to an integer.\n\nThe string is expected to be an optional `+` sign\nfollowed by digits.\nLeading and trailing whitespace represent an error.\nDigits are a subset of these characters, depending on `radix`:\n\n* `0-9`\n* `a-z`\n* `A-Z`\n\n# Panics\n\nThis function panics if `radix` is not in the range from 2 to 36.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(" , stringify ! ($SelfT ), "::from_str_radix(\"A\", 16), Ok(10));" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )] pub fn from_str_radix ( src : & str , radix : u32 )-> Result < Self , ParseIntError > { from_str_radix ( src , radix )}} doc_comment ! { concat ! ( "Returns the number of ones in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0b01001100" , stringify ! ($SelfT ), ";\n\nassert_eq!(n.count_ones(), 3);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ inline ] pub const fn count_ones ( self )-> u32 { intrinsics :: ctpop ( self as $ActualT ) as u32 }} doc_comment ! { concat ! ( "Returns the number of zeros in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(" , stringify ! ($SelfT ), "::MAX.count_zeros(), 0);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ inline ] pub const fn count_zeros ( self )-> u32 {(! self ). count_ones ()}} doc_comment ! { concat ! ( "Returns the number of leading zeros in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = " , stringify ! ($SelfT ), "::MAX >> 2;\n\nassert_eq!(n.leading_zeros(), 2);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ inline ] pub const fn leading_zeros ( self )-> u32 { intrinsics :: ctlz ( self as $ActualT ) as u32 }} doc_comment ! { concat ! ( "Returns the number of trailing zeros in the binary representation\nof `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0b0101000" , stringify ! ($SelfT ), ";\n\nassert_eq!(n.trailing_zeros(), 3);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ inline ] pub const fn trailing_zeros ( self )-> u32 { intrinsics :: cttz ( self ) as u32 }} doc_comment ! { concat ! ( "Returns the number of leading ones in the binary representation of `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = !(" , stringify ! ($SelfT ), "::MAX >> 2);\n\nassert_eq!(n.leading_ones(), 2);" , $EndFeature , "\n```" ), # [ stable ( feature = "leading_trailing_ones" , since = "1.46.0" )]# [ rustc_const_stable ( feature = "leading_trailing_ones" , since = "1.46.0" )]# [ inline ] pub const fn leading_ones ( self )-> u32 {(! self ). leading_zeros ()}} doc_comment ! { concat ! ( "Returns the number of trailing ones in the binary representation\nof `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0b1010111" , stringify ! ($SelfT ), ";\n\nassert_eq!(n.trailing_ones(), 3);" , $EndFeature , "\n```" ), # [ stable ( feature = "leading_trailing_ones" , since = "1.46.0" )]# [ rustc_const_stable ( feature = "leading_trailing_ones" , since = "1.46.0" )]# [ inline ] pub const fn trailing_ones ( self )-> u32 {(! self ). trailing_zeros ()}} doc_comment ! { concat ! ( "Shifts the bits to the left by a specified amount, `n`,\nwrapping the truncated bits to the end of the resulting integer.\n\nPlease note this isn't the same operation as the `<<` shifting operator!\n\n# Examples\n\nBasic usage:\n\n```\nlet n = " , $rot_op , stringify ! ($SelfT ), ";\nlet m = " , $rot_result , ";\n\nassert_eq!(n.rotate_left(" , $rot , "), m);\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn rotate_left ( self , n : u32 )-> Self { intrinsics :: rotate_left ( self , n as $SelfT )}} doc_comment ! { concat ! ( "Shifts the bits to the right by a specified amount, `n`,\nwrapping the truncated bits to the beginning of the resulting\ninteger.\n\nPlease note this isn't the same operation as the `>>` shifting operator!\n\n# Examples\n\nBasic usage:\n\n```\nlet n = " , $rot_result , stringify ! ($SelfT ), ";\nlet m = " , $rot_op , ";\n\nassert_eq!(n.rotate_right(" , $rot , "), m);\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn rotate_right ( self , n : u32 )-> Self { intrinsics :: rotate_right ( self , n as $SelfT )}} doc_comment ! { concat ! ( "\nReverses the byte order of the integer.\n\n# Examples\n\nBasic usage:\n\n```\nlet n = " , $swap_op , stringify ! ($SelfT ), ";\nlet m = n.swap_bytes();\n\nassert_eq!(m, " , $swapped , ");\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ inline ] pub const fn swap_bytes ( self )-> Self { intrinsics :: bswap ( self as $ActualT ) as Self }} doc_comment ! { concat ! ( "Reverses the order of bits in the integer. The least significant bit becomes the most significant bit,\n second least-significant bit becomes second most-significant bit, etc.\n\n# Examples\n\nBasic usage:\n\n```\nlet n = " , $swap_op , stringify ! ($SelfT ), ";\nlet m = n.reverse_bits();\n\nassert_eq!(m, " , $reversed , ");\nassert_eq!(0, 0" , stringify ! ($SelfT ), ".reverse_bits());\n```" ), # [ stable ( feature = "reverse_bits" , since = "1.37.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ inline ]# [ must_use ] pub const fn reverse_bits ( self )-> Self { intrinsics :: bitreverse ( self as $ActualT ) as Self }} doc_comment ! { concat ! ( "Converts an integer from big endian to the target's endianness.\n\nOn big endian this is a no-op. On little endian the bytes are\nswapped.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0x1A" , stringify ! ($SelfT ), ";\n\nif cfg!(target_endian = \"big\") {\n assert_eq!(" , stringify ! ($SelfT ), "::from_be(n), n)\n} else {\n assert_eq!(" , stringify ! ($SelfT ), "::from_be(n), n.swap_bytes())\n}" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ inline ] pub const fn from_be ( x : Self )-> Self {# [ cfg ( target_endian = "big" )]{ x }# [ cfg ( not ( target_endian = "big" ))]{ x . swap_bytes ()}}} doc_comment ! { concat ! ( "Converts an integer from little endian to the target's endianness.\n\nOn little endian this is a no-op. On big endian the bytes are\nswapped.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0x1A" , stringify ! ($SelfT ), ";\n\nif cfg!(target_endian = \"little\") {\n assert_eq!(" , stringify ! ($SelfT ), "::from_le(n), n)\n} else {\n assert_eq!(" , stringify ! ($SelfT ), "::from_le(n), n.swap_bytes())\n}" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ inline ] pub const fn from_le ( x : Self )-> Self {# [ cfg ( target_endian = "little" )]{ x }# [ cfg ( not ( target_endian = "little" ))]{ x . swap_bytes ()}}} doc_comment ! { concat ! ( "Converts `self` to big endian from the target's endianness.\n\nOn big endian this is a no-op. On little endian the bytes are\nswapped.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0x1A" , stringify ! ($SelfT ), ";\n\nif cfg!(target_endian = \"big\") {\n assert_eq!(n.to_be(), n)\n} else {\n assert_eq!(n.to_be(), n.swap_bytes())\n}" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ inline ] pub const fn to_be ( self )-> Self {# [ cfg ( target_endian = "big" )]{ self }# [ cfg ( not ( target_endian = "big" ))]{ self . swap_bytes ()}}} doc_comment ! { concat ! ( "Converts `self` to little endian from the target's endianness.\n\nOn little endian this is a no-op. On big endian the bytes are\nswapped.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "let n = 0x1A" , stringify ! ($SelfT ), ";\n\nif cfg!(target_endian = \"little\") {\n assert_eq!(n.to_le(), n)\n} else {\n assert_eq!(n.to_le(), n.swap_bytes())\n}" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_math" , since = "1.32.0" )]# [ inline ] pub const fn to_le ( self )-> Self {# [ cfg ( target_endian = "little" )]{ self }# [ cfg ( not ( target_endian = "little" ))]{ self . swap_bytes ()}}} doc_comment ! { concat ! ( "Checked integer addition. Computes `self + rhs`, returning `None`\nif overflow occurred.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!((" , stringify ! ($SelfT ), "::MAX - 2).checked_add(1), " , "Some(" , stringify ! ($SelfT ), "::MAX - 1));\nassert_eq!((" , stringify ! ($SelfT ), "::MAX - 2).checked_add(3), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_add ( self , rhs : Self )-> Option < Self > { let ( a , b )= self . overflowing_add ( rhs ); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Unchecked integer addition. Computes `self + rhs`, assuming overflow\ncannot occur. This results in undefined behavior when `self + rhs > " , stringify ! ($SelfT ), "::MAX` or `self + rhs < " , stringify ! ($SelfT ), "::MIN`." ), # [ unstable ( feature = "unchecked_math" , reason = "niche optimization path" , issue = "none" , )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub unsafe fn unchecked_add ( self , rhs : Self )-> Self { unsafe { intrinsics :: unchecked_add ( self , rhs )}}} doc_comment ! { concat ! ( "Checked integer subtraction. Computes `self - rhs`, returning\n`None` if overflow occurred.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(1" , stringify ! ($SelfT ), ".checked_sub(1), Some(0));\nassert_eq!(0" , stringify ! ($SelfT ), ".checked_sub(1), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_sub ( self , rhs : Self )-> Option < Self > { let ( a , b )= self . overflowing_sub ( rhs ); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Unchecked integer subtraction. Computes `self - rhs`, assuming overflow\ncannot occur. This results in undefined behavior when `self - rhs > " , stringify ! ($SelfT ), "::MAX` or `self - rhs < " , stringify ! ($SelfT ), "::MIN`." ), # [ unstable ( feature = "unchecked_math" , reason = "niche optimization path" , issue = "none" , )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub unsafe fn unchecked_sub ( self , rhs : Self )-> Self { unsafe { intrinsics :: unchecked_sub ( self , rhs )}}} doc_comment ! { concat ! ( "Checked integer multiplication. Computes `self * rhs`, returning\n`None` if overflow occurred.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(5" , stringify ! ($SelfT ), ".checked_mul(1), Some(5));\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.checked_mul(2), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_mul ( self , rhs : Self )-> Option < Self > { let ( a , b )= self . overflowing_mul ( rhs ); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Unchecked integer multiplication. Computes `self * rhs`, assuming overflow\ncannot occur. This results in undefined behavior when `self * rhs > " , stringify ! ($SelfT ), "::MAX` or `self * rhs < " , stringify ! ($SelfT ), "::MIN`." ), # [ unstable ( feature = "unchecked_math" , reason = "niche optimization path" , issue = "none" , )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub unsafe fn unchecked_mul ( self , rhs : Self )-> Self { unsafe { intrinsics :: unchecked_mul ( self , rhs )}}} doc_comment ! { concat ! ( "Checked integer division. Computes `self / rhs`, returning `None`\nif `rhs == 0`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(128" , stringify ! ($SelfT ), ".checked_div(2), Some(64));\nassert_eq!(1" , stringify ! ($SelfT ), ".checked_div(0), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_unstable ( feature = "const_checked_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_div ( self , rhs : Self )-> Option < Self > { if unlikely ! ( rhs == 0 ){ None } else { Some ( unsafe { intrinsics :: unchecked_div ( self , rhs )})}}} doc_comment ! { concat ! ( "Checked Euclidean division. Computes `self.div_euclid(rhs)`, returning `None`\nif `rhs == 0`.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(128" , stringify ! ($SelfT ), ".checked_div_euclid(2), Some(64));\nassert_eq!(1" , stringify ! ($SelfT ), ".checked_div_euclid(0), None);\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_div_euclid ( self , rhs : Self )-> Option < Self > { if unlikely ! ( rhs == 0 ){ None } else { Some ( self . div_euclid ( rhs ))}}} doc_comment ! { concat ! ( "Checked integer remainder. Computes `self % rhs`, returning `None`\nif `rhs == 0`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(5" , stringify ! ($SelfT ), ".checked_rem(2), Some(1));\nassert_eq!(5" , stringify ! ($SelfT ), ".checked_rem(0), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_unstable ( feature = "const_checked_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_rem ( self , rhs : Self )-> Option < Self > { if unlikely ! ( rhs == 0 ){ None } else { Some ( unsafe { intrinsics :: unchecked_rem ( self , rhs )})}}} doc_comment ! { concat ! ( "Checked Euclidean modulo. Computes `self.rem_euclid(rhs)`, returning `None`\nif `rhs == 0`.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(5" , stringify ! ($SelfT ), ".checked_rem_euclid(2), Some(1));\nassert_eq!(5" , stringify ! ($SelfT ), ".checked_rem_euclid(0), None);\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_rem_euclid ( self , rhs : Self )-> Option < Self > { if unlikely ! ( rhs == 0 ){ None } else { Some ( self . rem_euclid ( rhs ))}}} doc_comment ! { concat ! ( "Checked negation. Computes `-self`, returning `None` unless `self ==\n0`.\n\nNote that negating any positive integer will overflow.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(0" , stringify ! ($SelfT ), ".checked_neg(), Some(0));\nassert_eq!(1" , stringify ! ($SelfT ), ".checked_neg(), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ inline ] pub const fn checked_neg ( self )-> Option < Self > { let ( a , b )= self . overflowing_neg (); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Checked shift left. Computes `self << rhs`, returning `None`\nif `rhs` is larger than or equal to the number of bits in `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(0x1" , stringify ! ($SelfT ), ".checked_shl(4), Some(0x10));\nassert_eq!(0x10" , stringify ! ($SelfT ), ".checked_shl(129), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_shl ( self , rhs : u32 )-> Option < Self > { let ( a , b )= self . overflowing_shl ( rhs ); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Checked shift right. Computes `self >> rhs`, returning `None`\nif `rhs` is larger than or equal to the number of bits in `self`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(0x10" , stringify ! ($SelfT ), ".checked_shr(4), Some(0x1));\nassert_eq!(0x10" , stringify ! ($SelfT ), ".checked_shr(129), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_checked_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_shr ( self , rhs : u32 )-> Option < Self > { let ( a , b )= self . overflowing_shr ( rhs ); if unlikely ! ( b ){ None } else { Some ( a )}}} doc_comment ! { concat ! ( "Checked exponentiation. Computes `self.pow(exp)`, returning `None` if\noverflow occurred.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(2" , stringify ! ($SelfT ), ".checked_pow(5), Some(32));\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.checked_pow(2), None);" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_pow" , since = "1.34.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn checked_pow ( self , mut exp : u32 )-> Option < Self > { if exp == 0 { return Some ( 1 ); } let mut base = self ; let mut acc : Self = 1 ; while exp > 1 { if ( exp & 1 )== 1 { acc = try_opt ! ( acc . checked_mul ( base )); } exp /= 2 ; base = try_opt ! ( base . checked_mul ( base )); } Some ( try_opt ! ( acc . checked_mul ( base )))}} doc_comment ! { concat ! ( "Saturating integer addition. Computes `self + rhs`, saturating at\nthe numeric bounds instead of overflowing.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".saturating_add(1), 101);\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.saturating_add(127), " , stringify ! ($SelfT ), "::MAX);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ rustc_const_stable ( feature = "const_saturating_int_methods" , since = "1.47.0" )]# [ inline ] pub const fn saturating_add ( self , rhs : Self )-> Self { intrinsics :: saturating_add ( self , rhs )}} doc_comment ! { concat ! ( "Saturating integer subtraction. Computes `self - rhs`, saturating\nat the numeric bounds instead of overflowing.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".saturating_sub(27), 73);\nassert_eq!(13" , stringify ! ($SelfT ), ".saturating_sub(127), 0);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ rustc_const_stable ( feature = "const_saturating_int_methods" , since = "1.47.0" )]# [ inline ] pub const fn saturating_sub ( self , rhs : Self )-> Self { intrinsics :: saturating_sub ( self , rhs )}} doc_comment ! { concat ! ( "Saturating integer multiplication. Computes `self * rhs`,\nsaturating at the numeric bounds instead of overflowing.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!(2" , stringify ! ($SelfT ), ".saturating_mul(10), 20);\nassert_eq!((" , stringify ! ($SelfT ), "::MAX).saturating_mul(10), " , stringify ! ($SelfT ), "::MAX);" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_saturating_int_methods" , since = "1.47.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn saturating_mul ( self , rhs : Self )-> Self { match self . checked_mul ( rhs ){ Some ( x )=> x , None => Self :: MAX , }}} doc_comment ! { concat ! ( "Saturating integer exponentiation. Computes `self.pow(exp)`,\nsaturating at the numeric bounds instead of overflowing.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "\nassert_eq!(4" , stringify ! ($SelfT ), ".saturating_pow(3), 64);\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.saturating_pow(2), " , stringify ! ($SelfT ), "::MAX);" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_pow" , since = "1.34.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn saturating_pow ( self , exp : u32 )-> Self { match self . checked_pow ( exp ){ Some ( x )=> x , None => Self :: MAX , }}} doc_comment ! { concat ! ( "Wrapping (modular) addition. Computes `self + rhs`,\nwrapping around at the boundary of the type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(200" , stringify ! ($SelfT ), ".wrapping_add(55), 255);\nassert_eq!(200" , stringify ! ($SelfT ), ".wrapping_add(" , stringify ! ($SelfT ), "::MAX), 199);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_add ( self , rhs : Self )-> Self { intrinsics :: wrapping_add ( self , rhs )}} doc_comment ! { concat ! ( "Wrapping (modular) subtraction. Computes `self - rhs`,\nwrapping around at the boundary of the type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".wrapping_sub(100), 0);\nassert_eq!(100" , stringify ! ($SelfT ), ".wrapping_sub(" , stringify ! ($SelfT ), "::MAX), 101);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_sub ( self , rhs : Self )-> Self { intrinsics :: wrapping_sub ( self , rhs )}}# [ doc = " Wrapping (modular) multiplication. Computes `self *" ]# [ doc = " rhs`, wrapping around at the boundary of the type." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " Basic usage:" ]# [ doc = "" ]# [ doc = " Please note that this example is shared between integer types." ]# [ doc = " Which explains why `u8` is used here." ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " assert_eq!(10u8.wrapping_mul(12), 120);" ]# [ doc = " assert_eq!(25u8.wrapping_mul(12), 44);" ]# [ doc = " ```" ]# [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_mul ( self , rhs : Self )-> Self { intrinsics :: wrapping_mul ( self , rhs )} doc_comment ! { concat ! ( "Wrapping (modular) division. Computes `self / rhs`.\nWrapped division on unsigned types is just normal division.\nThere's no way wrapping could ever happen.\nThis function exists, so that all operations\nare accounted for in the wrapping operations.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".wrapping_div(10), 10);" , $EndFeature , "\n```" ), # [ stable ( feature = "num_wrapping" , since = "1.2.0" )]# [ rustc_const_unstable ( feature = "const_wrapping_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_div ( self , rhs : Self )-> Self { self / rhs }} doc_comment ! { concat ! ( "Wrapping Euclidean division. Computes `self.div_euclid(rhs)`.\nWrapped division on unsigned types is just normal division.\nThere's no way wrapping could ever happen.\nThis function exists, so that all operations\nare accounted for in the wrapping operations.\nSince, for the positive integers, all common\ndefinitions of division are equal, this\nis exactly equal to `self.wrapping_div(rhs)`.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(100" , stringify ! ($SelfT ), ".wrapping_div_euclid(10), 10);\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_div_euclid ( self , rhs : Self )-> Self { self / rhs }} doc_comment ! { concat ! ( "Wrapping (modular) remainder. Computes `self % rhs`.\nWrapped remainder calculation on unsigned types is\njust the regular remainder calculation.\nThere's no way wrapping could ever happen.\nThis function exists, so that all operations\nare accounted for in the wrapping operations.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(100" , stringify ! ($SelfT ), ".wrapping_rem(10), 0);" , $EndFeature , "\n```" ), # [ stable ( feature = "num_wrapping" , since = "1.2.0" )]# [ rustc_const_unstable ( feature = "const_wrapping_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_rem ( self , rhs : Self )-> Self { self % rhs }} doc_comment ! { concat ! ( "Wrapping Euclidean modulo. Computes `self.rem_euclid(rhs)`.\nWrapped modulo calculation on unsigned types is\njust the regular remainder calculation.\nThere's no way wrapping could ever happen.\nThis function exists, so that all operations\nare accounted for in the wrapping operations.\nSince, for the positive integers, all common\ndefinitions of division are equal, this\nis exactly equal to `self.wrapping_rem(rhs)`.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(100" , stringify ! ($SelfT ), ".wrapping_rem_euclid(10), 0);\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_rem_euclid ( self , rhs : Self )-> Self { self % rhs }}# [ doc = " Wrapping (modular) negation. Computes `-self`," ]# [ doc = " wrapping around at the boundary of the type." ]# [ doc = "" ]# [ doc = " Since unsigned types do not have negative equivalents" ]# [ doc = " all applications of this function will wrap (except for `-0`)." ]# [ doc = " For values smaller than the corresponding signed type\\\'s maximum" ]# [ doc = " the result is the same as casting the corresponding signed value." ]# [ doc = " Any larger values are equivalent to `MAX + 1 - (val - MAX - 1)` where" ]# [ doc = " `MAX` is the corresponding signed type\\\'s maximum." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " Basic usage:" ]# [ doc = "" ]# [ doc = " Please note that this example is shared between integer types." ]# [ doc = " Which explains why `i8` is used here." ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " assert_eq!(100i8.wrapping_neg(), -100);" ]# [ doc = " assert_eq!((-128i8).wrapping_neg(), -128);" ]# [ doc = " ```" ]# [ stable ( feature = "num_wrapping" , since = "1.2.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ inline ] pub const fn wrapping_neg ( self )-> Self { self . overflowing_neg (). 0 } doc_comment ! { concat ! ( "Panic-free bitwise shift-left; yields `self << mask(rhs)`,\nwhere `mask` removes any high-order bits of `rhs` that\nwould cause the shift to exceed the bitwidth of the type.\n\nNote that this is *not* the same as a rotate-left; the\nRHS of a wrapping shift-left is restricted to the range\nof the type, rather than the bits shifted out of the LHS\nbeing returned to the other end. The primitive integer\ntypes all implement a [`rotate_left`](#method.rotate_left) function,\nwhich may be what you want instead.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(1" , stringify ! ($SelfT ), ".wrapping_shl(7), 128);\nassert_eq!(1" , stringify ! ($SelfT ), ".wrapping_shl(128), 1);" , $EndFeature , "\n```" ), # [ stable ( feature = "num_wrapping" , since = "1.2.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_shl ( self , rhs : u32 )-> Self { unsafe { intrinsics :: unchecked_shl ( self , ( rhs & ($BITS - 1 )) as $SelfT )}}} doc_comment ! { concat ! ( "Panic-free bitwise shift-right; yields `self >> mask(rhs)`,\nwhere `mask` removes any high-order bits of `rhs` that\nwould cause the shift to exceed the bitwidth of the type.\n\nNote that this is *not* the same as a rotate-right; the\nRHS of a wrapping shift-right is restricted to the range\nof the type, rather than the bits shifted out of the LHS\nbeing returned to the other end. The primitive integer\ntypes all implement a [`rotate_right`](#method.rotate_right) function,\nwhich may be what you want instead.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(128" , stringify ! ($SelfT ), ".wrapping_shr(7), 1);\nassert_eq!(128" , stringify ! ($SelfT ), ".wrapping_shr(128), 128);" , $EndFeature , "\n```" ), # [ stable ( feature = "num_wrapping" , since = "1.2.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_shr ( self , rhs : u32 )-> Self { unsafe { intrinsics :: unchecked_shr ( self , ( rhs & ($BITS - 1 )) as $SelfT )}}} doc_comment ! { concat ! ( "Wrapping (modular) exponentiation. Computes `self.pow(exp)`,\nwrapping around at the boundary of the type.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(3" , stringify ! ($SelfT ), ".wrapping_pow(5), 243);\nassert_eq!(3u8.wrapping_pow(6), 217);" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_pow" , since = "1.34.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn wrapping_pow ( self , mut exp : u32 )-> Self { if exp == 0 { return 1 ; } let mut base = self ; let mut acc : Self = 1 ; while exp > 1 { if ( exp & 1 )== 1 { acc = acc . wrapping_mul ( base ); } exp /= 2 ; base = base . wrapping_mul ( base ); } acc . wrapping_mul ( base )}} doc_comment ! { concat ! ( "Calculates `self` + `rhs`\n\nReturns a tuple of the addition along with a boolean indicating\nwhether an arithmetic overflow would occur. If an overflow would\nhave occurred then the wrapped value is returned.\n\n# Examples\n\nBasic usage\n\n```\n" , $Feature , "\nassert_eq!(5" , stringify ! ($SelfT ), ".overflowing_add(2), (7, false));\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.overflowing_add(1), (0, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_add ( self , rhs : Self )-> ( Self , bool ){ let ( a , b )= intrinsics :: add_with_overflow ( self as $ActualT , rhs as $ActualT ); ( a as Self , b )}} doc_comment ! { concat ! ( "Calculates `self` - `rhs`\n\nReturns a tuple of the subtraction along with a boolean indicating\nwhether an arithmetic overflow would occur. If an overflow would\nhave occurred then the wrapped value is returned.\n\n# Examples\n\nBasic usage\n\n```\n" , $Feature , "\nassert_eq!(5" , stringify ! ($SelfT ), ".overflowing_sub(2), (3, false));\nassert_eq!(0" , stringify ! ($SelfT ), ".overflowing_sub(1), (" , stringify ! ($SelfT ), "::MAX, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_sub ( self , rhs : Self )-> ( Self , bool ){ let ( a , b )= intrinsics :: sub_with_overflow ( self as $ActualT , rhs as $ActualT ); ( a as Self , b )}}# [ doc = " Calculates the multiplication of `self` and `rhs`." ]# [ doc = "" ]# [ doc = " Returns a tuple of the multiplication along with a boolean" ]# [ doc = " indicating whether an arithmetic overflow would occur. If an" ]# [ doc = " overflow would have occurred then the wrapped value is returned." ]# [ doc = "" ]# [ doc = " # Examples" ]# [ doc = "" ]# [ doc = " Basic usage:" ]# [ doc = "" ]# [ doc = " Please note that this example is shared between integer types." ]# [ doc = " Which explains why `u32` is used here." ]# [ doc = "" ]# [ doc = " ```" ]# [ doc = " assert_eq!(5u32.overflowing_mul(2), (10, false));" ]# [ doc = " assert_eq!(1_000_000_000u32.overflowing_mul(10), (1410065408, true));" ]# [ doc = " ```" ]# [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_mul ( self , rhs : Self )-> ( Self , bool ){ let ( a , b )= intrinsics :: mul_with_overflow ( self as $ActualT , rhs as $ActualT ); ( a as Self , b )} doc_comment ! { concat ! ( "Calculates the divisor when `self` is divided by `rhs`.\n\nReturns a tuple of the divisor along with a boolean indicating\nwhether an arithmetic overflow would occur. Note that for unsigned\nintegers overflow never occurs, so the second value is always\n`false`.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage\n\n```\n" , $Feature , "assert_eq!(5" , stringify ! ($SelfT ), ".overflowing_div(2), (2, false));" , $EndFeature , "\n```" ), # [ inline ]# [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_unstable ( feature = "const_overflowing_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ] pub const fn overflowing_div ( self , rhs : Self )-> ( Self , bool ){( self / rhs , false )}} doc_comment ! { concat ! ( "Calculates the quotient of Euclidean division `self.div_euclid(rhs)`.\n\nReturns a tuple of the divisor along with a boolean indicating\nwhether an arithmetic overflow would occur. Note that for unsigned\nintegers overflow never occurs, so the second value is always\n`false`.\nSince, for the positive integers, all common\ndefinitions of division are equal, this\nis exactly equal to `self.overflowing_div(rhs)`.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage\n\n```\nassert_eq!(5" , stringify ! ($SelfT ), ".overflowing_div_euclid(2), (2, false));\n```" ), # [ inline ]# [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ] pub const fn overflowing_div_euclid ( self , rhs : Self )-> ( Self , bool ){( self / rhs , false )}} doc_comment ! { concat ! ( "Calculates the remainder when `self` is divided by `rhs`.\n\nReturns a tuple of the remainder after dividing along with a boolean\nindicating whether an arithmetic overflow would occur. Note that for\nunsigned integers overflow never occurs, so the second value is\nalways `false`.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage\n\n```\n" , $Feature , "assert_eq!(5" , stringify ! ($SelfT ), ".overflowing_rem(2), (1, false));" , $EndFeature , "\n```" ), # [ inline ]# [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_unstable ( feature = "const_overflowing_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ] pub const fn overflowing_rem ( self , rhs : Self )-> ( Self , bool ){( self % rhs , false )}} doc_comment ! { concat ! ( "Calculates the remainder `self.rem_euclid(rhs)` as if by Euclidean division.\n\nReturns a tuple of the modulo after dividing along with a boolean\nindicating whether an arithmetic overflow would occur. Note that for\nunsigned integers overflow never occurs, so the second value is\nalways `false`.\nSince, for the positive integers, all common\ndefinitions of division are equal, this operation\nis exactly equal to `self.overflowing_rem(rhs)`.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage\n\n```\nassert_eq!(5" , stringify ! ($SelfT ), ".overflowing_rem_euclid(2), (1, false));\n```" ), # [ inline ]# [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ] pub const fn overflowing_rem_euclid ( self , rhs : Self )-> ( Self , bool ){( self % rhs , false )}} doc_comment ! { concat ! ( "Negates self in an overflowing fashion.\n\nReturns `!self + 1` using wrapping operations to return the value\nthat represents the negation of this unsigned value. Note that for\npositive unsigned values overflow always occurs, but negating 0 does\nnot overflow.\n\n# Examples\n\nBasic usage\n\n```\n" , $Feature , "assert_eq!(0" , stringify ! ($SelfT ), ".overflowing_neg(), (0, false));\nassert_eq!(2" , stringify ! ($SelfT ), ".overflowing_neg(), (-2i32 as " , stringify ! ($SelfT ), ", true));" , $EndFeature , "\n```" ), # [ inline ]# [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )] pub const fn overflowing_neg ( self )-> ( Self , bool ){((! self ). wrapping_add ( 1 ), self != 0 )}} doc_comment ! { concat ! ( "Shifts self left by `rhs` bits.\n\nReturns a tuple of the shifted version of self along with a boolean\nindicating whether the shift value was larger than or equal to the\nnumber of bits. If the shift value is too large, then value is\nmasked (N-1) where N is the number of bits, and this value is then\nused to perform the shift.\n\n# Examples\n\nBasic usage\n\n```\n" , $Feature , "assert_eq!(0x1" , stringify ! ($SelfT ), ".overflowing_shl(4), (0x10, false));\nassert_eq!(0x1" , stringify ! ($SelfT ), ".overflowing_shl(132), (0x10, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_shl ( self , rhs : u32 )-> ( Self , bool ){( self . wrapping_shl ( rhs ), ( rhs > ($BITS - 1 )))}} doc_comment ! { concat ! ( "Shifts self right by `rhs` bits.\n\nReturns a tuple of the shifted version of self along with a boolean\nindicating whether the shift value was larger than or equal to the\nnumber of bits. If the shift value is too large, then value is\nmasked (N-1) where N is the number of bits, and this value is then\nused to perform the shift.\n\n# Examples\n\nBasic usage\n\n```\n" , $Feature , "assert_eq!(0x10" , stringify ! ($SelfT ), ".overflowing_shr(4), (0x1, false));\nassert_eq!(0x10" , stringify ! ($SelfT ), ".overflowing_shr(132), (0x1, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "wrapping" , since = "1.7.0" )]# [ rustc_const_stable ( feature = "const_wrapping_math" , since = "1.32.0" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_shr ( self , rhs : u32 )-> ( Self , bool ){( self . wrapping_shr ( rhs ), ( rhs > ($BITS - 1 )))}} doc_comment ! { concat ! ( "Raises self to the power of `exp`, using exponentiation by squaring.\n\nReturns a tuple of the exponentiation along with a bool indicating\nwhether an overflow happened.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(3" , stringify ! ($SelfT ), ".overflowing_pow(5), (243, false));\nassert_eq!(3u8.overflowing_pow(6), (217, true));" , $EndFeature , "\n```" ), # [ stable ( feature = "no_panic_pow" , since = "1.34.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ] pub const fn overflowing_pow ( self , mut exp : u32 )-> ( Self , bool ){ if exp == 0 { return ( 1 , false ); } let mut base = self ; let mut acc : Self = 1 ; let mut overflown = false ; let mut r ; while exp > 1 { if ( exp & 1 )== 1 { r = acc . overflowing_mul ( base ); acc = r . 0 ; overflown |= r . 1 ; } exp /= 2 ; r = base . overflowing_mul ( base ); base = r . 0 ; overflown |= r . 1 ; } r = acc . overflowing_mul ( base ); r . 1 |= overflown ; r }} doc_comment ! { concat ! ( "Raises self to the power of `exp`, using exponentiation by squaring.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(2" , stringify ! ($SelfT ), ".pow(5), 32);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ]# [ rustc_inherit_overflow_checks ] pub const fn pow ( self , mut exp : u32 )-> Self { if exp == 0 { return 1 ; } let mut base = self ; let mut acc = 1 ; while exp > 1 { if ( exp & 1 )== 1 { acc = acc * base ; } exp /= 2 ; base = base * base ; } acc * base }} doc_comment ! { concat ! ( "Performs Euclidean division.\n\nSince, for the positive integers, all common\ndefinitions of division are equal, this\nis exactly equal to `self / rhs`.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(7" , stringify ! ($SelfT ), ".div_euclid(4), 1); // or any other integer type\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ]# [ rustc_inherit_overflow_checks ] pub const fn div_euclid ( self , rhs : Self )-> Self { self / rhs }} doc_comment ! { concat ! ( "Calculates the least remainder of `self (mod rhs)`.\n\nSince, for the positive integers, all common\ndefinitions of division are equal, this\nis exactly equal to `self % rhs`.\n\n# Panics\n\nThis function will panic if `rhs` is 0.\n\n# Examples\n\nBasic usage:\n\n```\nassert_eq!(7" , stringify ! ($SelfT ), ".rem_euclid(4), 3); // or any other integer type\n```" ), # [ stable ( feature = "euclidean_division" , since = "1.38.0" )]# [ rustc_const_unstable ( feature = "const_euclidean_int_methods" , issue = "53718" )]# [ must_use = "this returns the result of the operation, \\n without modifying the original" ]# [ inline ]# [ rustc_inherit_overflow_checks ] pub const fn rem_euclid ( self , rhs : Self )-> Self { self % rhs }} doc_comment ! { concat ! ( "Returns `true` if and only if `self == 2^k` for some `k`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert!(16" , stringify ! ($SelfT ), ".is_power_of_two());\nassert!(!10" , stringify ! ($SelfT ), ".is_power_of_two());" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_stable ( feature = "const_is_power_of_two" , since = "1.32.0" )]# [ inline ] pub const fn is_power_of_two ( self )-> bool { self . count_ones ()== 1 }}# [ inline ]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )] const fn one_less_than_next_power_of_two ( self )-> Self { if self <= 1 { return 0 ; } let p = self - 1 ; let z = unsafe { intrinsics :: ctlz_nonzero ( p )}; <$SelfT >:: MAX >> z } doc_comment ! { concat ! ( "Returns the smallest power of two greater than or equal to `self`.\n\nWhen return value overflows (i.e., `self > (1 << (N-1))` for type\n`uN`), it panics in debug mode and return value is wrapped to 0 in\nrelease mode (the only situation in which method can return 0).\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(2" , stringify ! ($SelfT ), ".next_power_of_two(), 2);\nassert_eq!(3" , stringify ! ($SelfT ), ".next_power_of_two(), 4);" , $EndFeature , "\n```" ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )]# [ inline ]# [ rustc_inherit_overflow_checks ] pub const fn next_power_of_two ( self )-> Self { self . one_less_than_next_power_of_two ()+ 1 }} doc_comment ! { concat ! ( "Returns the smallest power of two greater than or equal to `n`. If\nthe next power of two is greater than the type's maximum value,\n`None` is returned, otherwise the power of two is wrapped in `Some`.\n\n# Examples\n\nBasic usage:\n\n```\n" , $Feature , "assert_eq!(2" , stringify ! ($SelfT ), ".checked_next_power_of_two(), Some(2));\nassert_eq!(3" , stringify ! ($SelfT ), ".checked_next_power_of_two(), Some(4));\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.checked_next_power_of_two(), None);" , $EndFeature , "\n```" ), # [ inline ]# [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )] pub const fn checked_next_power_of_two ( self )-> Option < Self > { self . one_less_than_next_power_of_two (). checked_add ( 1 )}} doc_comment ! { concat ! ( "Returns the smallest power of two greater than or equal to `n`. If\nthe next power of two is greater than the type's maximum value,\nthe return value is wrapped to `0`.\n\n# Examples\n\nBasic usage:\n\n```\n#![feature(wrapping_next_power_of_two)]\n" , $Feature , "\nassert_eq!(2" , stringify ! ($SelfT ), ".wrapping_next_power_of_two(), 2);\nassert_eq!(3" , stringify ! ($SelfT ), ".wrapping_next_power_of_two(), 4);\nassert_eq!(" , stringify ! ($SelfT ), "::MAX.wrapping_next_power_of_two(), 0);" , $EndFeature , "\n```" ), # [ unstable ( feature = "wrapping_next_power_of_two" , issue = "32463" , reason = "needs decision on wrapping behaviour" )]# [ rustc_const_unstable ( feature = "const_int_pow" , issue = "53718" )] pub const fn wrapping_next_power_of_two ( self )-> Self { self . one_less_than_next_power_of_two (). wrapping_add ( 1 )}} doc_comment ! { concat ! ( "Return the memory representation of this integer as a byte array in\nbig-endian (network) byte order.\n" , $to_xe_bytes_doc , "\n# Examples\n\n```\nlet bytes = " , $swap_op , stringify ! ($SelfT ), ".to_be_bytes();\nassert_eq!(bytes, " , $be_bytes , ");\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ inline ] pub const fn to_be_bytes ( self )-> [ u8 ; mem :: size_of ::< Self > ()]{ self . to_be (). to_ne_bytes ()}} doc_comment ! { concat ! ( "Return the memory representation of this integer as a byte array in\nlittle-endian byte order.\n" , $to_xe_bytes_doc , "\n# Examples\n\n```\nlet bytes = " , $swap_op , stringify ! ($SelfT ), ".to_le_bytes();\nassert_eq!(bytes, " , $le_bytes , ");\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ inline ] pub const fn to_le_bytes ( self )-> [ u8 ; mem :: size_of ::< Self > ()]{ self . to_le (). to_ne_bytes ()}} doc_comment ! { concat ! ( "\nReturn the memory representation of this integer as a byte array in\nnative byte order.\n\nAs the target platform's native endianness is used, portable code\nshould use [`to_be_bytes`] or [`to_le_bytes`], as appropriate,\ninstead.\n" , $to_xe_bytes_doc , "\n[`to_be_bytes`]: #method.to_be_bytes\n[`to_le_bytes`]: #method.to_le_bytes\n\n# Examples\n\n```\nlet bytes = " , $swap_op , stringify ! ($SelfT ), ".to_ne_bytes();\nassert_eq!(\n bytes,\n if cfg!(target_endian = \"big\") {\n " , $be_bytes , "\n } else {\n " , $le_bytes , "\n }\n);\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ cfg_attr ( not ( bootstrap ), rustc_allow_const_fn_unstable ( const_fn_transmute ))]# [ cfg_attr ( bootstrap , allow_internal_unstable ( const_fn_transmute ))]# [ inline ] pub const fn to_ne_bytes ( self )-> [ u8 ; mem :: size_of ::< Self > ()]{ unsafe { mem :: transmute ( self )}}} doc_comment ! { concat ! ( "\nReturn the memory representation of this integer as a byte array in\nnative byte order.\n\n[`to_ne_bytes`] should be preferred over this whenever possible.\n\n[`to_ne_bytes`]: #method.to_ne_bytes\n" , "\n# Examples\n\n```\n#![feature(num_as_ne_bytes)]\nlet num = " , $swap_op , stringify ! ($SelfT ), ";\nlet bytes = num.as_ne_bytes();\nassert_eq!(\n bytes,\n if cfg!(target_endian = \"big\") {\n &" , $be_bytes , "\n } else {\n &" , $le_bytes , "\n }\n);\n```" ), # [ unstable ( feature = "num_as_ne_bytes" , issue = "76976" )]# [ inline ] pub fn as_ne_bytes (& self )-> & [ u8 ; mem :: size_of ::< Self > ()]{ unsafe {&* ( self as * const Self as * const _)}}} doc_comment ! { concat ! ( "Create a native endian integer value from its representation\nas a byte array in big endian.\n" , $from_xe_bytes_doc , "\n# Examples\n\n```\nlet value = " , stringify ! ($SelfT ), "::from_be_bytes(" , $be_bytes , ");\nassert_eq!(value, " , $swap_op , ");\n```\n\nWhen starting from a slice rather than an array, fallible conversion APIs can be used:\n\n```\nuse std::convert::TryInto;\n\nfn read_be_" , stringify ! ($SelfT ), "(input: &mut &[u8]) -> " , stringify ! ($SelfT ), " {\n let (int_bytes, rest) = input.split_at(std::mem::size_of::<" , stringify ! ($SelfT ), ">());\n *input = rest;\n " , stringify ! ($SelfT ), "::from_be_bytes(int_bytes.try_into().unwrap())\n}\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ inline ] pub const fn from_be_bytes ( bytes : [ u8 ; mem :: size_of ::< Self > ()])-> Self { Self :: from_be ( Self :: from_ne_bytes ( bytes ))}} doc_comment ! { concat ! ( "\nCreate a native endian integer value from its representation\nas a byte array in little endian.\n" , $from_xe_bytes_doc , "\n# Examples\n\n```\nlet value = " , stringify ! ($SelfT ), "::from_le_bytes(" , $le_bytes , ");\nassert_eq!(value, " , $swap_op , ");\n```\n\nWhen starting from a slice rather than an array, fallible conversion APIs can be used:\n\n```\nuse std::convert::TryInto;\n\nfn read_le_" , stringify ! ($SelfT ), "(input: &mut &[u8]) -> " , stringify ! ($SelfT ), " {\n let (int_bytes, rest) = input.split_at(std::mem::size_of::<" , stringify ! ($SelfT ), ">());\n *input = rest;\n " , stringify ! ($SelfT ), "::from_le_bytes(int_bytes.try_into().unwrap())\n}\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ inline ] pub const fn from_le_bytes ( bytes : [ u8 ; mem :: size_of ::< Self > ()])-> Self { Self :: from_le ( Self :: from_ne_bytes ( bytes ))}} doc_comment ! { concat ! ( "Create a native endian integer value from its memory representation\nas a byte array in native endianness.\n\nAs the target platform's native endianness is used, portable code\nlikely wants to use [`from_be_bytes`] or [`from_le_bytes`], as\nappropriate instead.\n\n[`from_be_bytes`]: #method.from_be_bytes\n[`from_le_bytes`]: #method.from_le_bytes\n" , $from_xe_bytes_doc , "\n# Examples\n\n```\nlet value = " , stringify ! ($SelfT ), "::from_ne_bytes(if cfg!(target_endian = \"big\") {\n " , $be_bytes , "\n} else {\n " , $le_bytes , "\n});\nassert_eq!(value, " , $swap_op , ");\n```\n\nWhen starting from a slice rather than an array, fallible conversion APIs can be used:\n\n```\nuse std::convert::TryInto;\n\nfn read_ne_" , stringify ! ($SelfT ), "(input: &mut &[u8]) -> " , stringify ! ($SelfT ), " {\n let (int_bytes, rest) = input.split_at(std::mem::size_of::<" , stringify ! ($SelfT ), ">());\n *input = rest;\n " , stringify ! ($SelfT ), "::from_ne_bytes(int_bytes.try_into().unwrap())\n}\n```" ), # [ stable ( feature = "int_to_from_bytes" , since = "1.32.0" )]# [ rustc_const_stable ( feature = "const_int_conversion" , since = "1.44.0" )]# [ cfg_attr ( not ( bootstrap ), rustc_allow_const_fn_unstable ( const_fn_transmute ))]# [ cfg_attr ( bootstrap , allow_internal_unstable ( const_fn_transmute ))]# [ inline ] pub const fn from_ne_bytes ( bytes : [ u8 ; mem :: size_of ::< Self > ()])-> Self { unsafe { mem :: transmute ( bytes )}}} doc_comment ! { concat ! ( "**This method is soft-deprecated.**\n\nAlthough using it won’t cause compilation warning,\nnew code should use [`" , stringify ! ($SelfT ), "::MIN" , "`](#associatedconstant.MIN) instead.\n\nReturns the smallest value that can be represented by this integer type." ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_promotable ]# [ inline ( always )]# [ rustc_const_stable ( feature = "const_max_value" , since = "1.32.0" )] pub const fn min_value ()-> Self { Self :: MIN }} doc_comment ! { concat ! ( "**This method is soft-deprecated.**\n\nAlthough using it won’t cause compilation warning,\nnew code should use [`" , stringify ! ($SelfT ), "::MAX" , "`](#associatedconstant.MAX) instead.\n\nReturns the largest value that can be represented by this integer type." ), # [ stable ( feature = "rust1" , since = "1.0.0" )]# [ rustc_promotable ]# [ inline ( always )]# [ rustc_const_stable ( feature = "const_max_value" , since = "1.32.0" )] pub const fn max_value ()-> Self { Self :: MAX }}}}
+macro_rules! __ra_macro_fixture433 {($type : ident )=>{ const EXPLICIT_SIG_BITS : u8 = Self :: SIG_BITS - 1 ; const MAX_EXP : i16 = ( 1 << ( Self :: EXP_BITS - 1 ))- 1 ; const MIN_EXP : i16 = -< Self as RawFloat >:: MAX_EXP + 1 ; const MAX_EXP_INT : i16 = < Self as RawFloat >:: MAX_EXP - ( Self :: SIG_BITS as i16 - 1 ); const MAX_ENCODED_EXP : i16 = ( 1 << Self :: EXP_BITS )- 1 ; const MIN_EXP_INT : i16 = < Self as RawFloat >:: MIN_EXP - ( Self :: SIG_BITS as i16 - 1 ); const MAX_SIG : u64 = ( 1 << Self :: SIG_BITS )- 1 ; const MIN_SIG : u64 = 1 << ( Self :: SIG_BITS - 1 ); const INFINITY : Self = $type :: INFINITY ; const NAN : Self = $type :: NAN ; const ZERO : Self = 0.0 ; }; }
+macro_rules! __ra_macro_fixture434 {()=>{# [ inline ] unsafe fn forward_unchecked ( start : Self , n : usize )-> Self { unsafe { start . unchecked_add ( n as Self )}}# [ inline ] unsafe fn backward_unchecked ( start : Self , n : usize )-> Self { unsafe { start . unchecked_sub ( n as Self )}}# [ inline ] fn forward ( start : Self , n : usize )-> Self { if Self :: forward_checked ( start , n ). is_none (){ let _ = Add :: add ( Self :: MAX , 1 ); } start . wrapping_add ( n as Self )}# [ inline ] fn backward ( start : Self , n : usize )-> Self { if Self :: backward_checked ( start , n ). is_none (){ let _ = Sub :: sub ( Self :: MIN , 1 ); } start . wrapping_sub ( n as Self )}}; }
+macro_rules! __ra_macro_fixture435 {( u8 , $($tt : tt )*)=>{ "" }; ( i8 , $($tt : tt )*)=>{ "" }; ($_: ident , $($tt : tt )*)=>{$($tt )* }; }
+macro_rules! __ra_macro_fixture436 {( forward )=>{# [ inline ] fn haystack (& self )-> & 'a str { self . 0 . haystack ()}# [ inline ] fn next (& mut self )-> SearchStep { self . 0 . next ()}# [ inline ] fn next_match (& mut self )-> Option < ( usize , usize )> { self . 0 . next_match ()}# [ inline ] fn next_reject (& mut self )-> Option < ( usize , usize )> { self . 0 . next_reject ()}}; ( reverse )=>{# [ inline ] fn next_back (& mut self )-> SearchStep { self . 0 . next_back ()}# [ inline ] fn next_match_back (& mut self )-> Option < ( usize , usize )> { self . 0 . next_match_back ()}# [ inline ] fn next_reject_back (& mut self )-> Option < ( usize , usize )> { self . 0 . next_reject_back ()}}; }
+macro_rules! __ra_macro_fixture437 {($t : ty , $pmap : expr , $smap : expr )=>{ type Searcher = $t ; # [ inline ] fn into_searcher ( self , haystack : & 'a str )-> $t {($smap )(($pmap )( self ). into_searcher ( haystack ))}# [ inline ] fn is_contained_in ( self , haystack : & 'a str )-> bool {($pmap )( self ). is_contained_in ( haystack )}# [ inline ] fn is_prefix_of ( self , haystack : & 'a str )-> bool {($pmap )( self ). is_prefix_of ( haystack )}# [ inline ] fn strip_prefix_of ( self , haystack : & 'a str )-> Option <& 'a str > {($pmap )( self ). strip_prefix_of ( haystack )}# [ inline ] fn is_suffix_of ( self , haystack : & 'a str )-> bool where $t : ReverseSearcher < 'a >, {($pmap )( self ). is_suffix_of ( haystack )}# [ inline ] fn strip_suffix_of ( self , haystack : & 'a str )-> Option <& 'a str > where $t : ReverseSearcher < 'a >, {($pmap )( self ). strip_suffix_of ( haystack )}}; }
+macro_rules! __ra_macro_fixture438 {()=>{# [ inline ] fn is_ascii (& self )-> bool { self . is_ascii ()}# [ inline ] fn to_ascii_uppercase (& self )-> Self :: Owned { self . to_ascii_uppercase ()}# [ inline ] fn to_ascii_lowercase (& self )-> Self :: Owned { self . to_ascii_lowercase ()}# [ inline ] fn eq_ignore_ascii_case (& self , o : & Self )-> bool { self . eq_ignore_ascii_case ( o )}# [ inline ] fn make_ascii_uppercase (& mut self ){ self . make_ascii_uppercase (); }# [ inline ] fn make_ascii_lowercase (& mut self ){ self . make_ascii_lowercase (); }}; }
+macro_rules! __ra_macro_fixture439 {()=>($crate :: vec :: Vec :: new ()); ($elem : expr ; $n : expr )=>($crate :: vec :: from_elem ($elem , $n )); ($($x : expr ),+ $(,)?)=>(< [_]>:: into_vec ( box [$($x ),+])); }
+macro_rules! __ra_macro_fixture440 {($left : expr , $right : expr $(,)?)=>({ match (&$left , &$right ){( left_val , right_val )=>{ if ! (* left_val == * right_val ){ panic ! ( r#"assertion failed: `(left == right)`\n left: `{:?}`,\n right: `{:?}`"# , &* left_val , &* right_val )}}}}); ($left : expr , $right : expr , $($arg : tt )+)=>({ match (& ($left ), & ($right )){( left_val , right_val )=>{ if ! (* left_val == * right_val ){ panic ! ( r#"assertion failed: `(left == right)`\n left: `{:?}`,\n right: `{:?}`: {}"# , &* left_val , &* right_val , $crate :: format_args ! ($($arg )+))}}}}); }
+macro_rules! __ra_macro_fixture441 {()=>({$crate :: panic ! ( "explicit panic" )}); ($msg : expr $(,)?)=>({$crate :: rt :: begin_panic ($msg )}); ($fmt : expr , $($arg : tt )+)=>({$crate :: rt :: begin_panic_fmt (&$crate :: format_args ! ($fmt , $($arg )+))}); }
+macro_rules! __ra_macro_fixture442 {($expression : expr , $($pattern : pat )|+ $(if $guard : expr )? $(,)?)=>{ match $expression {$($pattern )|+ $(if $guard )? => true , _ => false }}}
+macro_rules! __ra_macro_fixture443 {()=>{# [ inline ] fn load_consume (& self )-> Self :: Val { self . load ( Ordering :: Acquire )}}; }
+macro_rules! __ra_macro_fixture444 {($($tt : tt )*)=>{$($tt )* }}
+macro_rules! __ra_macro_fixture445 {($tyname : ident , $($($field : ident ).+),*)=>{ fn fmt (& self , f : & mut :: std :: fmt :: Formatter )-> :: std :: fmt :: Result { f . debug_struct ( stringify ! ($tyname ))$(. field ( stringify ! ($($field ).+), & self .$($field ).+))* . finish ()}}}
+macro_rules! __ra_macro_fixture446 {($($field : ident ),*)=>{ fn clone (& self )-> Self { Self {$($field : self .$field . clone (),)* }}}}
+macro_rules! __ra_macro_fixture447 {($method : ident )=>{ fn $method < V > ( self , visitor : V )-> Result < V :: Value > where V : de :: Visitor < 'de >, { self . deserialize_number ( visitor )}}; }
+macro_rules! __ra_macro_fixture448 {($method : ident =>$visit : ident )=>{ fn $method < V > ( self , visitor : V )-> Result < V :: Value > where V : de :: Visitor < 'de >, { self . de . eat_char (); self . de . scratch . clear (); let string = tri ! ( self . de . read . parse_str (& mut self . de . scratch )); match ( string . parse (), string ){( Ok ( integer ), _)=> visitor .$visit ( integer ), ( Err (_), Reference :: Borrowed ( s ))=> visitor . visit_borrowed_str ( s ), ( Err (_), Reference :: Copied ( s ))=> visitor . visit_str ( s ), }}}; }
+macro_rules! __ra_macro_fixture449 {($method : ident )=>{# [ cfg ( not ( feature = "arbitrary_precision" ))] fn $method < V > ( self , visitor : V )-> Result < V :: Value , Error > where V : Visitor < 'de >, { match self { Value :: Number ( n )=> n . deserialize_any ( visitor ), _ => Err ( self . invalid_type (& visitor )), }}# [ cfg ( feature = "arbitrary_precision" )] fn $method < V > ( self , visitor : V )-> Result < V :: Value , Error > where V : Visitor < 'de >, { match self { Value :: Number ( n )=> n .$method ( visitor ), _ => self . deserialize_any ( visitor ), }}}; }
+macro_rules! __ra_macro_fixture450 {($method : ident )=>{# [ cfg ( not ( feature = "arbitrary_precision" ))] fn $method < V > ( self , visitor : V )-> Result < V :: Value , Error > where V : Visitor < 'de >, { match * self { Value :: Number ( ref n )=> n . deserialize_any ( visitor ), _ => Err ( self . invalid_type (& visitor )), }}# [ cfg ( feature = "arbitrary_precision" )] fn $method < V > ( self , visitor : V )-> Result < V :: Value , Error > where V : Visitor < 'de >, { match * self { Value :: Number ( ref n )=> n .$method ( visitor ), _ => self . deserialize_any ( visitor ), }}}; }
+macro_rules! __ra_macro_fixture451 {($method : ident =>$visit : ident )=>{ fn $method < V > ( self , visitor : V )-> Result < V :: Value , Error > where V : Visitor < 'de >, { match ( self . key . parse (), self . key ){( Ok ( integer ), _)=> visitor .$visit ( integer ), ( Err (_), Cow :: Borrowed ( s ))=> visitor . visit_borrowed_str ( s ), # [ cfg ( any ( feature = "std" , feature = "alloc" ))]( Err (_), Cow :: Owned ( s ))=> visitor . visit_string ( s ), }}}; }
+macro_rules! __ra_macro_fixture452 {(@ expand [$($num_string : tt )*])=>{# [ cfg ( not ( feature = "arbitrary_precision" ))]# [ inline ] fn deserialize_any < V > ( self , visitor : V )-> Result < V :: Value , Error > where V : Visitor < 'de >, { match self . n { N :: PosInt ( u )=> visitor . visit_u64 ( u ), N :: NegInt ( i )=> visitor . visit_i64 ( i ), N :: Float ( f )=> visitor . visit_f64 ( f ), }}# [ cfg ( feature = "arbitrary_precision" )]# [ inline ] fn deserialize_any < V > ( self , visitor : V )-> Result < V :: Value , Error > where V : Visitor < 'de > { if let Some ( u )= self . as_u64 (){ return visitor . visit_u64 ( u ); } else if let Some ( i )= self . as_i64 (){ return visitor . visit_i64 ( i ); } else if let Some ( f )= self . as_f64 (){ if ryu :: Buffer :: new (). format_finite ( f )== self . n || f . to_string ()== self . n { return visitor . visit_f64 ( f ); }} visitor . visit_map ( NumberDeserializer { number : Some ( self .$($num_string )*), })}}; ( owned )=>{ deserialize_any ! (@ expand [ n ]); }; ( ref )=>{ deserialize_any ! (@ expand [ n . clone ()]); }; }
+macro_rules! __ra_macro_fixture453 {($deserialize : ident =>$visit : ident )=>{# [ cfg ( not ( feature = "arbitrary_precision" ))] fn $deserialize < V > ( self , visitor : V )-> Result < V :: Value , Error > where V : Visitor < 'de >, { self . deserialize_any ( visitor )}# [ cfg ( feature = "arbitrary_precision" )] fn $deserialize < V > ( self , visitor : V )-> Result < V :: Value , Error > where V : de :: Visitor < 'de >, { visitor .$visit ( self . n . parse (). map_err (|_| invalid_number ())?)}}; }
+macro_rules! __ra_macro_fixture454 {()=>{ fn __rayon_private__ (& self )-> crate :: private :: PrivateMarker { crate :: private :: PrivateMarker }}; }
+macro_rules! __ra_macro_fixture455 {()=>{ fn __rayon_private__ (& self )-> crate :: private :: PrivateMarker { crate :: private :: PrivateMarker }}; }
+macro_rules! __ra_macro_fixture456 {($map_elt : expr )=>{ fn next (& mut self )-> Option < Self :: Item > { self . iter . next (). map ($map_elt )} fn size_hint (& self )-> ( usize , Option < usize >){ self . iter . size_hint ()} fn count ( self )-> usize { self . iter . len ()} fn nth (& mut self , n : usize )-> Option < Self :: Item > { self . iter . nth ( n ). map ($map_elt )} fn last ( mut self )-> Option < Self :: Item > { self . next_back ()} fn collect < C > ( self )-> C where C : FromIterator < Self :: Item >, { self . iter . map ($map_elt ). collect ()}}; }
+macro_rules! __ra_macro_fixture457 {($map_elt : expr )=>{ fn next_back (& mut self )-> Option < Self :: Item > { self . iter . next_back (). map ($map_elt )}}; }
+macro_rules! __ra_macro_fixture458 {()=>{# [ doc = " This trait is private; this method exists to make it" ]# [ doc = " impossible to implement outside the crate." ]# [ doc ( hidden )] fn __rayon_private__ (& self )-> crate :: private :: PrivateMarker ; }; }
+macro_rules! __ra_macro_fixture459 {($ident : ident )=>{{# [ cfg ( test )]{ extern "C" {# [ no_mangle ] static $ident : std :: sync :: atomic :: AtomicUsize ; } unsafe {$ident . fetch_add ( 1 , std :: sync :: atomic :: Ordering :: SeqCst ); }}}}; }
+macro_rules! __ra_macro_fixture460 {($ident : ident )=>{# [ no_mangle ] static $ident : std :: sync :: atomic :: AtomicUsize = std :: sync :: atomic :: AtomicUsize :: new ( 0 ); let _checker = $crate :: mark :: MarkChecker :: new (&$ident ); }; }
+macro_rules! __ra_macro_fixture461 {( target : $target : expr , $($arg : tt )+)=>( log ! ( target : $target , $crate :: Level :: Debug , $($arg )+)); ($($arg : tt )+)=>( log ! ($crate :: Level :: Debug , $($arg )+))}
+macro_rules! __ra_macro_fixture462 {()=>($crate :: eprint ! ( "\n" )); ($($arg : tt )*)=>({$crate :: io :: _eprint ($crate :: format_args_nl ! ($($arg )*)); })}
+macro_rules! __ra_macro_fixture463 {( target : $target : expr , $($arg : tt )+)=>( log ! ( target : $target , $crate :: Level :: Warn , $($arg )+)); ($($arg : tt )+)=>( log ! ($crate :: Level :: Warn , $($arg )+))}
+macro_rules! __ra_macro_fixture464 {( target : $target : expr , $lvl : expr , $($arg : tt )+)=>({ let lvl = $lvl ; if lvl <= $crate :: STATIC_MAX_LEVEL && lvl <= $crate :: max_level (){$crate :: __private_api_log ( __log_format_args ! ($($arg )+), lvl , & ($target , __log_module_path ! (), __log_file ! (), __log_line ! ()), ); }}); ($lvl : expr , $($arg : tt )+)=>( log ! ( target : __log_module_path ! (), $lvl , $($arg )+))}
+macro_rules! __ra_macro_fixture465 {($($args : tt )*)=>{ format_args ! ($($args )*)}; }
+macro_rules! __ra_macro_fixture466 {()=>{ module_path ! ()}; }
+macro_rules! __ra_macro_fixture467 {()=>{ file ! ()}; }
+macro_rules! __ra_macro_fixture468 {()=>{ line ! ()}; }
+macro_rules! __ra_macro_fixture469 {($left : expr , $right : expr )=>{ assert_eq_text ! ($left , $right ,)}; ($left : expr , $right : expr , $($tt : tt )*)=>{{ let left = $left ; let right = $right ; if left != right { if left . trim ()== right . trim (){ std :: eprintln ! ( "Left:\n{:?}\n\nRight:\n{:?}\n\nWhitespace difference\n" , left , right ); } else { let diff = $crate :: __diff ( left , right ); std :: eprintln ! ( "Left:\n{}\n\nRight:\n{}\n\nDiff:\n{}\n" , left , right , $crate :: format_diff ( diff )); } std :: eprintln ! ($($tt )*); panic ! ( "text differs" ); }}}; }
+macro_rules! __ra_macro_fixture470 {($($arg : tt )*)=>($crate :: io :: _eprint ($crate :: format_args ! ($($arg )*))); }
+macro_rules! __ra_macro_fixture471 {($left : expr , $right : expr $(,)?)=>({ match (&$left , &$right ){( left_val , right_val )=>{ if * left_val == * right_val { panic ! ( r#"assertion failed: `(left != right)`\n left: `{:?}`,\n right: `{:?}`"# , &* left_val , &* right_val )}}}}); ($left : expr , $right : expr , $($arg : tt )+)=>({ match (& ($left ), & ($right )){( left_val , right_val )=>{ if * left_val == * right_val { panic ! ( r#"assertion failed: `(left != right)`\n left: `{:?}`,\n right: `{:?}`: {}"# , &* left_val , &* right_val , $crate :: format_args ! ($($arg )+))}}}}); }
+macro_rules! __ra_macro_fixture472 {[[$data : literal ]]=>{$crate :: Expect { position : $crate :: Position { file : file ! (), line : line ! (), column : column ! (), }, data : $data , }}; [[]]=>{$crate :: expect ! [[ "" ]]}; }
+macro_rules! __ra_macro_fixture473 {( self )=>{$crate :: name :: known :: SELF_PARAM }; ( Self )=>{$crate :: name :: known :: SELF_TYPE }; ('static )=>{$crate :: name :: known :: STATIC_LIFETIME }; ($ident : ident )=>{$crate :: name :: known ::$ident }; }
+macro_rules! __ra_macro_fixture474 {()=>({ panic ! ( "internal error: entered unreachable code" )}); ($msg : expr $(,)?)=>({$crate :: unreachable ! ( "{}" , $msg )}); ($fmt : expr , $($arg : tt )*)=>({ panic ! ($crate :: concat ! ( "internal error: entered unreachable code: " , $fmt ), $($arg )*)}); }
+macro_rules! __ra_macro_fixture475 {( target : $target : expr , $($arg : tt )+)=>( log ! ( target : $target , $crate :: Level :: Error , $($arg )+)); ($($arg : tt )+)=>( log ! ($crate :: Level :: Error , $($arg )+))}
+macro_rules! __ra_macro_fixture476 {( target : $target : expr , $($arg : tt )+)=>( log ! ( target : $target , $crate :: Level :: Trace , $($arg )+)); ($($arg : tt )+)=>( log ! ($crate :: Level :: Trace , $($arg )+))}
+macro_rules! __ra_macro_fixture477 {($buf : expr )=>(); ($buf : expr , $lit : literal $($arg : tt )*)=>{{ use :: std :: fmt :: Write as _; let _ = :: std :: write ! ($buf , $lit $($arg )*); }}; }
+macro_rules! __ra_macro_fixture478 {( match $node : ident {$($tt : tt )* })=>{ match_ast ! ( match ($node ){$($tt )* })}; ( match ($node : expr ){$(ast ::$ast : ident ($it : ident )=>$res : expr , )* _ =>$catch_all : expr $(,)? })=>{{$(if let Some ($it )= ast ::$ast :: cast ($node . clone ()){$res } else )* {$catch_all }}}; }
+macro_rules! __ra_macro_fixture479 {($start : ident $(:: $seg : ident )*)=>({$crate :: __known_path ! ($start $(:: $seg )*); $crate :: path :: ModPath :: from_segments ($crate :: path :: PathKind :: Abs , vec ! [$crate :: path :: __name ! [$start ], $($crate :: path :: __name ! [$seg ],)* ])}); }
+macro_rules! __ra_macro_fixture480 {( core :: iter :: IntoIterator )=>{}; ( core :: iter :: Iterator )=>{}; ( core :: result :: Result )=>{}; ( core :: option :: Option )=>{}; ( core :: ops :: Range )=>{}; ( core :: ops :: RangeFrom )=>{}; ( core :: ops :: RangeFull )=>{}; ( core :: ops :: RangeTo )=>{}; ( core :: ops :: RangeToInclusive )=>{}; ( core :: ops :: RangeInclusive )=>{}; ( core :: future :: Future )=>{}; ( core :: ops :: Try )=>{}; ($path : path )=>{ compile_error ! ( "Please register your known path in the path module" )}; }
+macro_rules! __ra_macro_fixture481 {($changed : ident , ($this : ident / $def : ident ). $field : ident , $glob_imports : ident [$lookup : ident ], $def_import_type : ident )=>{{ let existing = $this .$field . entry ($lookup . 1 . clone ()); match ( existing , $def .$field ){( Entry :: Vacant ( entry ), Some (_))=>{ match $def_import_type { ImportType :: Glob =>{$glob_imports .$field . insert ($lookup . clone ()); } ImportType :: Named =>{$glob_imports .$field . remove (&$lookup ); }} if let Some ( fld )= $def .$field { entry . insert ( fld ); }$changed = true ; }( Entry :: Occupied ( mut entry ), Some (_)) if $glob_imports .$field . contains (&$lookup )&& matches ! ($def_import_type , ImportType :: Named )=>{ mark :: hit ! ( import_shadowed ); $glob_imports .$field . remove (&$lookup ); if let Some ( fld )= $def .$field { entry . insert ( fld ); }$changed = true ; }_ =>{}}}}; }
+macro_rules! __ra_macro_fixture482 {($(# $attr_args : tt )* const fn $($item : tt )* )=>{$(# $attr_args )* fn $($item )* }; ($(# $attr_args : tt )* pub const fn $($item : tt )* )=>{$(# $attr_args )* pub fn $($item )* }; ($(# $attr_args : tt )* pub const unsafe fn $($item : tt )* )=>{$(# $attr_args )* pub unsafe fn $($item )* }; }
+macro_rules! __ra_macro_fixture483 {{ type Mirror = $tinyname : ident ; $($(# [$attr : meta ])* $v : vis fn $fname : ident ($seif : ident : $seifty : ty $(,$argname : ident : $argtype : ty )*)$(-> $ret : ty )? ; )* }=>{$($(# [$attr ])* # [ inline ( always )]$v fn $fname ($seif : $seifty , $($argname : $argtype ),*)$(-> $ret )? { match $seif {$tinyname :: Inline ( i )=> i .$fname ($($argname ),*), $tinyname :: Heap ( h )=> h .$fname ($($argname ),*), }})* }; }
+macro_rules! __ra_macro_fixture484 {([$($stack : tt )*])=>{$($stack )* }; ([$($stack : tt )*]@ escape $_x : tt $($t : tt )*)=>{ remove_sections_inner ! ([$($stack )*]$($t )*); }; ([$($stack : tt )*]@ section $x : ident $($t : tt )*)=>{ remove_sections_inner ! ([$($stack )*]$($t )*); }; ([$($stack : tt )*]$t : tt $($tail : tt )*)=>{ remove_sections_inner ! ([$($stack )* $t ]$($tail )*); }; }
+macro_rules! __ra_macro_fixture485 {($name : ident , $($field : ident ),+ $(,)*)=>( fn clone (& self )-> Self {$name {$($field : self . $field . clone ()),* }}); }
+macro_rules! __ra_macro_fixture486 {( type FreeFunctions )=>( type FreeFunctions : 'static ;); ( type TokenStream )=>( type TokenStream : 'static + Clone ;); ( type TokenStreamBuilder )=>( type TokenStreamBuilder : 'static ;); ( type TokenStreamIter )=>( type TokenStreamIter : 'static + Clone ;); ( type Group )=>( type Group : 'static + Clone ;); ( type Punct )=>( type Punct : 'static + Copy + Eq + Hash ;); ( type Ident )=>( type Ident : 'static + Copy + Eq + Hash ;); ( type Literal )=>( type Literal : 'static + Clone ;); ( type SourceFile )=>( type SourceFile : 'static + Clone ;); ( type MultiSpan )=>( type MultiSpan : 'static ;); ( type Diagnostic )=>( type Diagnostic : 'static ;); ( type Span )=>( type Span : 'static + Copy + Eq + Hash ;); ( fn drop (& mut self , $arg : ident : $arg_ty : ty ))=>( fn drop (& mut self , $arg : $arg_ty ){ mem :: drop ($arg )}); ( fn clone (& mut self , $arg : ident : $arg_ty : ty )-> $ret_ty : ty )=>( fn clone (& mut self , $arg : $arg_ty )-> $ret_ty {$arg . clone ()}); ($($item : tt )*)=>($($item )*;)}
+macro_rules! __ra_macro_fixture487 {($bit : expr , $is_fn_name : ident , $set_fn_name : ident )=>{ fn $is_fn_name (& self )-> bool { self . bools & ( 0b1 << $bit )> 0 } fn $set_fn_name (& mut self , yes : bool ){ if yes { self . bools |= 1 << $bit ; } else { self . bools &= ! ( 1 << $bit ); }}}; }
+macro_rules! __ra_macro_fixture488 {($($(# [$cfg : meta ])* fn $method : ident -> $i : ident ; )*)=>{$(# [ inline ]$(# [$cfg ])* fn $method (& self )-> Option <$i > {( self . 0 ).$method ()})*}}
+macro_rules! __ra_macro_fixture489 {($($(# [$cfg : meta ])* fn $method : ident ($i : ident ); )*)=>{$(# [ inline ]$(# [$cfg ])* fn $method ( n : $i )-> Option < Self > { T ::$method ( n ). map ( Wrapping )})*}}
+macro_rules! __ra_macro_fixture490 {($SrcT : ident : $($(# [$cfg : meta ])* fn $method : ident -> $DstT : ident ; )*)=>{$(# [ inline ]$(# [$cfg ])* fn $method (& self )-> Option <$DstT > { let min = $DstT :: MIN as $SrcT ; let max = $DstT :: MAX as $SrcT ; if size_of ::<$SrcT > ()<= size_of ::<$DstT > ()|| ( min <= * self && * self <= max ){ Some (* self as $DstT )} else { None }})*}}
+macro_rules! __ra_macro_fixture491 {($SrcT : ident : $($(# [$cfg : meta ])* fn $method : ident -> $DstT : ident ; )*)=>{$(# [ inline ]$(# [$cfg ])* fn $method (& self )-> Option <$DstT > { let max = $DstT :: MAX as $SrcT ; if 0 <= * self && ( size_of ::<$SrcT > ()<= size_of ::<$DstT > ()|| * self <= max ){ Some (* self as $DstT )} else { None }})*}}
+macro_rules! __ra_macro_fixture492 {($SrcT : ident : $($(# [$cfg : meta ])* fn $method : ident -> $DstT : ident ; )*)=>{$(# [ inline ]$(# [$cfg ])* fn $method (& self )-> Option <$DstT > { let max = $DstT :: MAX as $SrcT ; if size_of ::<$SrcT > ()< size_of ::<$DstT > ()|| * self <= max { Some (* self as $DstT )} else { None }})*}}
+macro_rules! __ra_macro_fixture493 {($SrcT : ident : $($(# [$cfg : meta ])* fn $method : ident -> $DstT : ident ; )*)=>{$(# [ inline ]$(# [$cfg ])* fn $method (& self )-> Option <$DstT > { let max = $DstT :: MAX as $SrcT ; if size_of ::<$SrcT > ()<= size_of ::<$DstT > ()|| * self <= max { Some (* self as $DstT )} else { None }})*}}
+macro_rules! __ra_macro_fixture494 {($f : ident : $($(# [$cfg : meta ])* fn $method : ident -> $i : ident ; )*)=>{$(# [ inline ]$(# [$cfg ])* fn $method (& self )-> Option <$i > { if size_of ::<$f > ()> size_of ::<$i > (){ const MIN_M1 : $f = $i :: MIN as $f - 1.0 ; const MAX_P1 : $f = $i :: MAX as $f + 1.0 ; if * self > MIN_M1 && * self < MAX_P1 { return Some ( float_to_int_unchecked ! (* self =>$i )); }} else { const MIN : $f = $i :: MIN as $f ; const MAX_P1 : $f = $i :: MAX as $f ; if * self >= MIN && * self < MAX_P1 { return Some ( float_to_int_unchecked ! (* self =>$i )); }} None })*}}
+macro_rules! __ra_macro_fixture495 {($f : ident : $($(# [$cfg : meta ])* fn $method : ident -> $u : ident ; )*)=>{$(# [ inline ]$(# [$cfg ])* fn $method (& self )-> Option <$u > { if size_of ::<$f > ()> size_of ::<$u > (){ const MAX_P1 : $f = $u :: MAX as $f + 1.0 ; if * self > - 1.0 && * self < MAX_P1 { return Some ( float_to_int_unchecked ! (* self =>$u )); }} else { const MAX_P1 : $f = $u :: MAX as $f ; if * self > - 1.0 && * self < MAX_P1 { return Some ( float_to_int_unchecked ! (* self =>$u )); }} None })*}}
+macro_rules! __ra_macro_fixture496 {($SrcT : ident : $(fn $method : ident -> $DstT : ident ; )*)=>{$(# [ inline ] fn $method (& self )-> Option <$DstT > { Some (* self as $DstT )})*}}
+macro_rules! __ra_macro_fixture497 {($($method : ident ()-> $ret : expr ; )*)=>{$(# [ inline ] fn $method ()-> Self {$ret })*}; }
+macro_rules! __ra_macro_fixture498 {($(Self :: $method : ident ( self $(, $arg : ident : $ty : ty )* )-> $ret : ty ; )*)=>{$(# [ inline ] fn $method ( self $(, $arg : $ty )* )-> $ret { Self ::$method ( self $(, $arg )* )})*}; ($($base : ident :: $method : ident ( self $(, $arg : ident : $ty : ty )* )-> $ret : ty ; )*)=>{$(# [ inline ] fn $method ( self $(, $arg : $ty )* )-> $ret {< Self as $base >::$method ( self $(, $arg )* )})*}; ($($base : ident :: $method : ident ($($arg : ident : $ty : ty ),* )-> $ret : ty ; )*)=>{$(# [ inline ] fn $method ($($arg : $ty ),* )-> $ret {< Self as $base >::$method ($($arg ),* )})*}}
+macro_rules! __ra_macro_fixture499 {($tyname : ident , $($($field : ident ).+),*)=>{ fn fmt (& self , f : & mut :: std :: fmt :: Formatter )-> :: std :: fmt :: Result { f . debug_struct ( stringify ! ($tyname ))$(. field ( stringify ! ($($field ).+), & self .$($field ).+))* . finish ()}}}
+macro_rules! __ra_macro_fixture500 {($($field : ident ),*)=>{ fn clone (& self )-> Self { Self {$($field : self .$field . clone (),)* }}}}
+macro_rules! __ra_macro_fixture501 {($($json : tt )+)=>{ json_internal ! ($($json )+)}; }
+macro_rules! __ra_macro_fixture502 {(@ array [$($elems : expr ,)*])=>{ json_internal_vec ! [$($elems ,)*]}; (@ array [$($elems : expr ),*])=>{ json_internal_vec ! [$($elems ),*]}; (@ array [$($elems : expr ,)*] null $($rest : tt )*)=>{ json_internal ! (@ array [$($elems ,)* json_internal ! ( null )]$($rest )*)}; (@ array [$($elems : expr ,)*] true $($rest : tt )*)=>{ json_internal ! (@ array [$($elems ,)* json_internal ! ( true )]$($rest )*)}; (@ array [$($elems : expr ,)*] false $($rest : tt )*)=>{ json_internal ! (@ array [$($elems ,)* json_internal ! ( false )]$($rest )*)}; (@ array [$($elems : expr ,)*][$($array : tt )*]$($rest : tt )*)=>{ json_internal ! (@ array [$($elems ,)* json_internal ! ([$($array )*])]$($rest )*)}; (@ array [$($elems : expr ,)*]{$($map : tt )*}$($rest : tt )*)=>{ json_internal ! (@ array [$($elems ,)* json_internal ! ({$($map )*})]$($rest )*)}; (@ array [$($elems : expr ,)*]$next : expr , $($rest : tt )*)=>{ json_internal ! (@ array [$($elems ,)* json_internal ! ($next ),]$($rest )*)}; (@ array [$($elems : expr ,)*]$last : expr )=>{ json_internal ! (@ array [$($elems ,)* json_internal ! ($last )])}; (@ array [$($elems : expr ),*], $($rest : tt )*)=>{ json_internal ! (@ array [$($elems ,)*]$($rest )*)}; (@ array [$($elems : expr ),*]$unexpected : tt $($rest : tt )*)=>{ json_unexpected ! ($unexpected )}; (@ object $object : ident ()()())=>{}; (@ object $object : ident [$($key : tt )+]($value : expr ), $($rest : tt )*)=>{ let _ = $object . insert (($($key )+). into (), $value ); json_internal ! (@ object $object ()($($rest )*)($($rest )*)); }; (@ object $object : ident [$($key : tt )+]($value : expr )$unexpected : tt $($rest : tt )*)=>{ json_unexpected ! ($unexpected ); }; (@ object $object : ident [$($key : tt )+]($value : expr ))=>{ let _ = $object . insert (($($key )+). into (), $value ); }; (@ object $object : ident ($($key : tt )+)(: null $($rest : tt )*)$copy : tt )=>{ json_internal ! (@ object $object [$($key )+]( json_internal ! ( null ))$($rest )*); }; (@ object $object : ident ($($key : tt )+)(: true $($rest : tt )*)$copy : tt )=>{ json_internal ! (@ object $object [$($key )+]( json_internal ! ( true ))$($rest )*); }; (@ object $object : ident ($($key : tt )+)(: false $($rest : tt )*)$copy : tt )=>{ json_internal ! (@ object $object [$($key )+]( json_internal ! ( false ))$($rest )*); }; (@ object $object : ident ($($key : tt )+)(: [$($array : tt )*]$($rest : tt )*)$copy : tt )=>{ json_internal ! (@ object $object [$($key )+]( json_internal ! ([$($array )*]))$($rest )*); }; (@ object $object : ident ($($key : tt )+)(: {$($map : tt )*}$($rest : tt )*)$copy : tt )=>{ json_internal ! (@ object $object [$($key )+]( json_internal ! ({$($map )*}))$($rest )*); }; (@ object $object : ident ($($key : tt )+)(: $value : expr , $($rest : tt )*)$copy : tt )=>{ json_internal ! (@ object $object [$($key )+]( json_internal ! ($value )), $($rest )*); }; (@ object $object : ident ($($key : tt )+)(: $value : expr )$copy : tt )=>{ json_internal ! (@ object $object [$($key )+]( json_internal ! ($value ))); }; (@ object $object : ident ($($key : tt )+)(:)$copy : tt )=>{ json_internal ! (); }; (@ object $object : ident ($($key : tt )+)()$copy : tt )=>{ json_internal ! (); }; (@ object $object : ident ()(: $($rest : tt )*)($colon : tt $($copy : tt )*))=>{ json_unexpected ! ($colon ); }; (@ object $object : ident ($($key : tt )*)(, $($rest : tt )*)($comma : tt $($copy : tt )*))=>{ json_unexpected ! ($comma ); }; (@ object $object : ident ()(($key : expr ): $($rest : tt )*)$copy : tt )=>{ json_internal ! (@ object $object ($key )(: $($rest )*)(: $($rest )*)); }; (@ object $object : ident ($($key : tt )*)(: $($unexpected : tt )+)$copy : tt )=>{ json_expect_expr_comma ! ($($unexpected )+); }; (@ object $object : ident ($($key : tt )*)($tt : tt $($rest : tt )*)$copy : tt )=>{ json_internal ! (@ object $object ($($key )* $tt )($($rest )*)($($rest )*)); }; ( null )=>{$crate :: Value :: Null }; ( true )=>{$crate :: Value :: Bool ( true )}; ( false )=>{$crate :: Value :: Bool ( false )}; ([])=>{$crate :: Value :: Array ( json_internal_vec ! [])}; ([$($tt : tt )+ ])=>{$crate :: Value :: Array ( json_internal ! (@ array []$($tt )+))}; ({})=>{$crate :: Value :: Object ($crate :: Map :: new ())}; ({$($tt : tt )+ })=>{$crate :: Value :: Object ({ let mut object = $crate :: Map :: new (); json_internal ! (@ object object ()($($tt )+)($($tt )+)); object })}; ($other : expr )=>{$crate :: to_value (&$other ). unwrap ()}; }
+macro_rules! __ra_macro_fixture503 {($($content : tt )*)=>{ vec ! [$($content )*]}; }
+macro_rules! __ra_macro_fixture504 {($($cfg : tt )*)=>{}; }
+macro_rules! __ra_macro_fixture505 {($($tokens : tt )*)=>{$crate :: crossbeam_channel_internal ! ($($tokens )* )}; }
+macro_rules! __ra_macro_fixture506 {(@ list ()($($head : tt )*))=>{$crate :: crossbeam_channel_internal ! (@ case ($($head )*)()())}; (@ list ( default =>$($tail : tt )*)($($head : tt )*))=>{$crate :: crossbeam_channel_internal ! (@ list ( default ()=>$($tail )*)($($head )*))}; (@ list ( default -> $($tail : tt )*)($($head : tt )*))=>{ compile_error ! ( "expected `=>` after `default` case, found `->`" )}; (@ list ( default $args : tt -> $($tail : tt )*)($($head : tt )*))=>{ compile_error ! ( "expected `=>` after `default` case, found `->`" )}; (@ list ( recv ($($args : tt )*)=>$($tail : tt )*)($($head : tt )*))=>{ compile_error ! ( "expected `->` after `recv` case, found `=>`" )}; (@ list ( send ($($args : tt )*)=>$($tail : tt )*)($($head : tt )*))=>{ compile_error ! ( "expected `->` after `send` operation, found `=>`" )}; (@ list ($case : ident $args : tt -> $res : tt -> $($tail : tt )*)($($head : tt )*))=>{ compile_error ! ( "expected `=>`, found `->`" )}; (@ list ($case : ident $args : tt $(-> $res : pat )* =>$body : block ; $($tail : tt )*)($($head : tt )*))=>{ compile_error ! ( "did you mean to put a comma instead of the semicolon after `}`?" )}; (@ list ($case : ident ($($args : tt )*)$(-> $res : pat )* =>$body : expr , $($tail : tt )*)($($head : tt )*))=>{$crate :: crossbeam_channel_internal ! (@ list ($($tail )*)($($head )* $case ($($args )*)$(-> $res )* =>{$body },))}; (@ list ($case : ident ($($args : tt )*)$(-> $res : pat )* =>$body : block $($tail : tt )*)($($head : tt )*))=>{$crate :: crossbeam_channel_internal ! (@ list ($($tail )*)($($head )* $case ($($args )*)$(-> $res )* =>{$body },))}; (@ list ($case : ident ($($args : tt )*)$(-> $res : pat )* =>$body : expr )($($head : tt )*))=>{$crate :: crossbeam_channel_internal ! (@ list ()($($head )* $case ($($args )*)$(-> $res )* =>{$body },))}; (@ list ($case : ident ($($args : tt )*)$(-> $res : pat )* =>$body : expr ,)($($head : tt )*))=>{$crate :: crossbeam_channel_internal ! (@ list ()($($head )* $case ($($args )*)$(-> $res )* =>{$body },))}; (@ list ($($tail : tt )*)($($head : tt )*))=>{$crate :: crossbeam_channel_internal ! (@ list_error1 $($tail )*)}; (@ list_error1 recv $($tail : tt )*)=>{$crate :: crossbeam_channel_internal ! (@ list_error2 recv $($tail )*)}; (@ list_error1 send $($tail : tt )*)=>{$crate :: crossbeam_channel_internal ! (@ list_error2 send $($tail )*)}; (@ list_error1 default $($tail : tt )*)=>{$crate :: crossbeam_channel_internal ! (@ list_error2 default $($tail )*)}; (@ list_error1 $t : tt $($tail : tt )*)=>{ compile_error ! ( concat ! ( "expected one of `recv`, `send`, or `default`, found `" , stringify ! ($t ), "`" , ))}; (@ list_error1 $($tail : tt )*)=>{$crate :: crossbeam_channel_internal ! (@ list_error2 $($tail )*); }; (@ list_error2 $case : ident )=>{ compile_error ! ( concat ! ( "missing argument list after `" , stringify ! ($case ), "`" , ))}; (@ list_error2 $case : ident =>$($tail : tt )*)=>{ compile_error ! ( concat ! ( "missing argument list after `" , stringify ! ($case ), "`" , ))}; (@ list_error2 $($tail : tt )*)=>{$crate :: crossbeam_channel_internal ! (@ list_error3 $($tail )*)}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )*)=>{ compile_error ! ( concat ! ( "missing `=>` after `" , stringify ! ($case ), "` case" , ))}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )* =>)=>{ compile_error ! ( "expected expression after `=>`" )}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )* =>$body : expr ; $($tail : tt )*)=>{ compile_error ! ( concat ! ( "did you mean to put a comma instead of the semicolon after `" , stringify ! ($body ), "`?" , ))}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )* => recv ($($a : tt )*)$($tail : tt )*)=>{ compile_error ! ( "expected an expression after `=>`" )}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )* => send ($($a : tt )*)$($tail : tt )*)=>{ compile_error ! ( "expected an expression after `=>`" )}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )* => default ($($a : tt )*)$($tail : tt )*)=>{ compile_error ! ( "expected an expression after `=>`" )}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )* =>$f : ident ($($a : tt )*)$($tail : tt )*)=>{ compile_error ! ( concat ! ( "did you mean to put a comma after `" , stringify ! ($f ), "(" , stringify ! ($($a )*), ")`?" , ))}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )* =>$f : ident ! ($($a : tt )*)$($tail : tt )*)=>{ compile_error ! ( concat ! ( "did you mean to put a comma after `" , stringify ! ($f ), "!(" , stringify ! ($($a )*), ")`?" , ))}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )* =>$f : ident ! [$($a : tt )*]$($tail : tt )*)=>{ compile_error ! ( concat ! ( "did you mean to put a comma after `" , stringify ! ($f ), "![" , stringify ! ($($a )*), "]`?" , ))}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )* =>$f : ident ! {$($a : tt )*}$($tail : tt )*)=>{ compile_error ! ( concat ! ( "did you mean to put a comma after `" , stringify ! ($f ), "!{" , stringify ! ($($a )*), "}`?" , ))}; (@ list_error3 $case : ident ($($args : tt )*)$(-> $r : pat )* =>$body : tt $($tail : tt )*)=>{ compile_error ! ( concat ! ( "did you mean to put a comma after `" , stringify ! ($body ), "`?" , ))}; (@ list_error3 $case : ident ($($args : tt )*)-> =>$($tail : tt )*)=>{ compile_error ! ( "missing pattern after `->`" )}; (@ list_error3 $case : ident ($($args : tt )*)$t : tt $(-> $r : pat )* =>$($tail : tt )*)=>{ compile_error ! ( concat ! ( "expected `->`, found `" , stringify ! ($t ), "`" , ))}; (@ list_error3 $case : ident ($($args : tt )*)-> $t : tt $($tail : tt )*)=>{ compile_error ! ( concat ! ( "expected a pattern, found `" , stringify ! ($t ), "`" , ))}; (@ list_error3 recv ($($args : tt )*)$t : tt $($tail : tt )*)=>{ compile_error ! ( concat ! ( "expected `->`, found `" , stringify ! ($t ), "`" , ))}; (@ list_error3 send ($($args : tt )*)$t : tt $($tail : tt )*)=>{ compile_error ! ( concat ! ( "expected `->`, found `" , stringify ! ($t ), "`" , ))}; (@ list_error3 recv $args : tt $($tail : tt )*)=>{ compile_error ! ( concat ! ( "expected an argument list after `recv`, found `" , stringify ! ($args ), "`" , ))}; (@ list_error3 send $args : tt $($tail : tt )*)=>{ compile_error ! ( concat ! ( "expected an argument list after `send`, found `" , stringify ! ($args ), "`" , ))}; (@ list_error3 default $args : tt $($tail : tt )*)=>{ compile_error ! ( concat ! ( "expected an argument list or `=>` after `default`, found `" , stringify ! ($args ), "`" , ))}; (@ list_error3 $($tail : tt )*)=>{$crate :: crossbeam_channel_internal ! (@ list_error4 $($tail )*)}; (@ list_error4 $($tail : tt )*)=>{ compile_error ! ( "invalid syntax" )}; (@ case ()$cases : tt $default : tt )=>{$crate :: crossbeam_channel_internal ! (@ init $cases $default )}; (@ case ( recv ($r : expr )-> $res : pat =>$body : tt , $($tail : tt )*)($($cases : tt )*)$default : tt )=>{$crate :: crossbeam_channel_internal ! (@ case ($($tail )*)($($cases )* recv ($r )-> $res =>$body ,)$default )}; (@ case ( recv ($r : expr ,)-> $res : pat =>$body : tt , $($tail : tt )*)($($cases : tt )*)$default : tt )=>{$crate :: crossbeam_channel_internal ! (@ case ($($tail )*)($($cases )* recv ($r )-> $res =>$body ,)$default )}; (@ case ( recv ($($args : tt )*)-> $res : pat =>$body : tt , $($tail : tt )*)($($cases : tt )*)$default : tt )=>{ compile_error ! ( concat ! ( "invalid argument list in `recv(" , stringify ! ($($args )*), ")`" , ))}; (@ case ( recv $t : tt $($tail : tt )*)($($cases : tt )*)$default : tt )=>{ compile_error ! ( concat ! ( "expected an argument list after `recv`, found `" , stringify ! ($t ), "`" , ))}; (@ case ( send ($s : expr , $m : expr )-> $res : pat =>$body : tt , $($tail : tt )*)($($cases : tt )*)$default : tt )=>{$crate :: crossbeam_channel_internal ! (@ case ($($tail )*)($($cases )* send ($s , $m )-> $res =>$body ,)$default )}; (@ case ( send ($s : expr , $m : expr ,)-> $res : pat =>$body : tt , $($tail : tt )*)($($cases : tt )*)$default : tt )=>{$crate :: crossbeam_channel_internal ! (@ case ($($tail )*)($($cases )* send ($s , $m )-> $res =>$body ,)$default )}; (@ case ( send ($($args : tt )*)-> $res : pat =>$body : tt , $($tail : tt )*)($($cases : tt )*)$default : tt )=>{ compile_error ! ( concat ! ( "invalid argument list in `send(" , stringify ! ($($args )*), ")`" , ))}; (@ case ( send $t : tt $($tail : tt )*)($($cases : tt )*)$default : tt )=>{ compile_error ! ( concat ! ( "expected an argument list after `send`, found `" , stringify ! ($t ), "`" , ))}; (@ case ( default ()=>$body : tt , $($tail : tt )*)$cases : tt ())=>{$crate :: crossbeam_channel_internal ! (@ case ($($tail )*)$cases ( default ()=>$body ,))}; (@ case ( default ($timeout : expr )=>$body : tt , $($tail : tt )*)$cases : tt ())=>{$crate :: crossbeam_channel_internal ! (@ case ($($tail )*)$cases ( default ($timeout )=>$body ,))}; (@ case ( default ($timeout : expr ,)=>$body : tt , $($tail : tt )*)$cases : tt ())=>{$crate :: crossbeam_channel_internal ! (@ case ($($tail )*)$cases ( default ($timeout )=>$body ,))}; (@ case ( default $($tail : tt )*)$cases : tt ($($def : tt )+))=>{ compile_error ! ( "there can be only one `default` case in a `select!` block" )}; (@ case ( default ($($args : tt )*)=>$body : tt , $($tail : tt )*)$cases : tt $default : tt )=>{ compile_error ! ( concat ! ( "invalid argument list in `default(" , stringify ! ($($args )*), ")`" , ))}; (@ case ( default $t : tt $($tail : tt )*)$cases : tt $default : tt )=>{ compile_error ! ( concat ! ( "expected an argument list or `=>` after `default`, found `" , stringify ! ($t ), "`" , ))}; (@ case ($case : ident $($tail : tt )*)$cases : tt $default : tt )=>{ compile_error ! ( concat ! ( "expected one of `recv`, `send`, or `default`, found `" , stringify ! ($case ), "`" , ))}; (@ init ( recv ($r : expr )-> $res : pat =>$recv_body : tt ,)( default ()=>$default_body : tt ,))=>{{ match $r { ref _r =>{ let _r : &$crate :: Receiver <_> = _r ; match _r . try_recv (){:: std :: result :: Result :: Err ($crate :: TryRecvError :: Empty )=>{$default_body } _res =>{ let _res = _res . map_err (|_| $crate :: RecvError ); let $res = _res ; $recv_body }}}}}}; (@ init ( recv ($r : expr )-> $res : pat =>$body : tt ,)())=>{{ match $r { ref _r =>{ let _r : &$crate :: Receiver <_> = _r ; let _res = _r . recv (); let $res = _res ; $body }}}}; (@ init ( recv ($r : expr )-> $res : pat =>$recv_body : tt ,)( default ($timeout : expr )=>$default_body : tt ,))=>{{ match $r { ref _r =>{ let _r : &$crate :: Receiver <_> = _r ; match _r . recv_timeout ($timeout ){:: std :: result :: Result :: Err ($crate :: RecvTimeoutError :: Timeout )=>{$default_body } _res =>{ let _res = _res . map_err (|_| $crate :: RecvError ); let $res = _res ; $recv_body }}}}}}; (@ init ($($cases : tt )*)$default : tt )=>{{ const _LEN : usize = $crate :: crossbeam_channel_internal ! (@ count ($($cases )*)); let _handle : &$crate :: internal :: SelectHandle = &$crate :: never ::< ()> (); # [ allow ( unused_mut )] let mut _sel = [( _handle , 0 , :: std :: ptr :: null ()); _LEN ]; $crate :: crossbeam_channel_internal ! (@ add _sel ($($cases )*)$default (( 0usize _oper0 )( 1usize _oper1 )( 2usize _oper2 )( 3usize _oper3 )( 4usize _oper4 )( 5usize _oper5 )( 6usize _oper6 )( 7usize _oper7 )( 8usize _oper8 )( 9usize _oper9 )( 10usize _oper10 )( 11usize _oper11 )( 12usize _oper12 )( 13usize _oper13 )( 14usize _oper14 )( 15usize _oper15 )( 16usize _oper16 )( 17usize _oper17 )( 18usize _oper18 )( 19usize _oper19 )( 20usize _oper20 )( 21usize _oper21 )( 22usize _oper22 )( 23usize _oper23 )( 24usize _oper24 )( 25usize _oper25 )( 26usize _oper26 )( 27usize _oper27 )( 28usize _oper28 )( 29usize _oper29 )( 30usize _oper30 )( 31usize _oper31 ))())}}; (@ count ())=>{ 0 }; (@ count ($oper : ident $args : tt -> $res : pat =>$body : tt , $($cases : tt )*))=>{ 1 + $crate :: crossbeam_channel_internal ! (@ count ($($cases )*))}; (@ add $sel : ident ()()$labels : tt $cases : tt )=>{{ let _oper : $crate :: SelectedOperation < '_ > = { let _oper = $crate :: internal :: select (& mut $sel ); unsafe {:: std :: mem :: transmute ( _oper )}}; $crate :: crossbeam_channel_internal ! {@ complete $sel _oper $cases }}}; (@ add $sel : ident ()( default ()=>$body : tt ,)$labels : tt $cases : tt )=>{{ let _oper : :: std :: option :: Option <$crate :: SelectedOperation < '_ >> = { let _oper = $crate :: internal :: try_select (& mut $sel ); unsafe {:: std :: mem :: transmute ( _oper )}}; match _oper { None =>{{$sel }; $body } Some ( _oper )=>{$crate :: crossbeam_channel_internal ! {@ complete $sel _oper $cases }}}}}; (@ add $sel : ident ()( default ($timeout : expr )=>$body : tt ,)$labels : tt $cases : tt )=>{{ let _oper : :: std :: option :: Option <$crate :: SelectedOperation < '_ >> = { let _oper = $crate :: internal :: select_timeout (& mut $sel , $timeout ); unsafe {:: std :: mem :: transmute ( _oper )}}; match _oper {:: std :: option :: Option :: None =>{{$sel }; $body }:: std :: option :: Option :: Some ( _oper )=>{$crate :: crossbeam_channel_internal ! {@ complete $sel _oper $cases }}}}}; (@ add $sel : ident $input : tt $default : tt ()$cases : tt )=>{ compile_error ! ( "too many operations in a `select!` block" )}; (@ add $sel : ident ( recv ($r : expr )-> $res : pat =>$body : tt , $($tail : tt )*)$default : tt (($i : tt $var : ident )$($labels : tt )*)($($cases : tt )*))=>{{ match $r { ref _r =>{ let $var : &$crate :: Receiver <_> = unsafe { let _r : &$crate :: Receiver <_> = _r ; unsafe fn unbind < 'a , T > ( x : & T )-> & 'a T {:: std :: mem :: transmute ( x )} unbind ( _r )}; $sel [$i ]= ($var , $i , $var as * const $crate :: Receiver <_> as * const u8 ); $crate :: crossbeam_channel_internal ! (@ add $sel ($($tail )*)$default ($($labels )*)($($cases )* [$i ] recv ($var )-> $res =>$body ,))}}}}; (@ add $sel : ident ( send ($s : expr , $m : expr )-> $res : pat =>$body : tt , $($tail : tt )*)$default : tt (($i : tt $var : ident )$($labels : tt )*)($($cases : tt )*))=>{{ match $s { ref _s =>{ let $var : &$crate :: Sender <_> = unsafe { let _s : &$crate :: Sender <_> = _s ; unsafe fn unbind < 'a , T > ( x : & T )-> & 'a T {:: std :: mem :: transmute ( x )} unbind ( _s )}; $sel [$i ]= ($var , $i , $var as * const $crate :: Sender <_> as * const u8 ); $crate :: crossbeam_channel_internal ! (@ add $sel ($($tail )*)$default ($($labels )*)($($cases )* [$i ] send ($var , $m )-> $res =>$body ,))}}}}; (@ complete $sel : ident $oper : ident ([$i : tt ] recv ($r : ident )-> $res : pat =>$body : tt , $($tail : tt )*))=>{{ if $oper . index ()== $i { let _res = $oper . recv ($r ); {$sel }; let $res = _res ; $body } else {$crate :: crossbeam_channel_internal ! {@ complete $sel $oper ($($tail )*)}}}}; (@ complete $sel : ident $oper : ident ([$i : tt ] send ($s : ident , $m : expr )-> $res : pat =>$body : tt , $($tail : tt )*))=>{{ if $oper . index ()== $i { let _res = $oper . send ($s , $m ); {$sel }; let $res = _res ; $body } else {$crate :: crossbeam_channel_internal ! {@ complete $sel $oper ($($tail )*)}}}}; (@ complete $sel : ident $oper : ident ())=>{{ unreachable ! ( "internal error in crossbeam-channel: invalid case" )}}; (@$($tokens : tt )*)=>{ compile_error ! ( concat ! ( "internal error in crossbeam-channel: " , stringify ! (@$($tokens )*), ))}; ()=>{ compile_error ! ( "empty `select!` block" )}; ($($case : ident $(($($args : tt )*))* =>$body : expr $(,)*)*)=>{$crate :: crossbeam_channel_internal ! (@ list ($($case $(($($args )*))* =>{$body },)*)())}; ($($tokens : tt )*)=>{$crate :: crossbeam_channel_internal ! (@ list ($($tokens )*)())}; }
+macro_rules! __ra_macro_fixture507 {($($tokens : tt )*)=>{ return Err ( crate :: errors :: error ! ($($tokens )*))}}
+macro_rules! __ra_macro_fixture508 {($fmt : expr )=>{$crate :: SsrError :: new ( format ! ($fmt ))}; ($fmt : expr , $($arg : tt )+)=>{$crate :: SsrError :: new ( format ! ($fmt , $($arg )+))}}
+macro_rules! __ra_macro_fixture509 {[;]=>{$crate :: SyntaxKind :: SEMICOLON }; [,]=>{$crate :: SyntaxKind :: COMMA }; [ '(' ]=>{$crate :: SyntaxKind :: L_PAREN }; [ ')' ]=>{$crate :: SyntaxKind :: R_PAREN }; [ '{' ]=>{$crate :: SyntaxKind :: L_CURLY }; [ '}' ]=>{$crate :: SyntaxKind :: R_CURLY }; [ '[' ]=>{$crate :: SyntaxKind :: L_BRACK }; [ ']' ]=>{$crate :: SyntaxKind :: R_BRACK }; [<]=>{$crate :: SyntaxKind :: L_ANGLE }; [>]=>{$crate :: SyntaxKind :: R_ANGLE }; [@]=>{$crate :: SyntaxKind :: AT }; [#]=>{$crate :: SyntaxKind :: POUND }; [~]=>{$crate :: SyntaxKind :: TILDE }; [?]=>{$crate :: SyntaxKind :: QUESTION }; [$]=>{$crate :: SyntaxKind :: DOLLAR }; [&]=>{$crate :: SyntaxKind :: AMP }; [|]=>{$crate :: SyntaxKind :: PIPE }; [+]=>{$crate :: SyntaxKind :: PLUS }; [*]=>{$crate :: SyntaxKind :: STAR }; [/]=>{$crate :: SyntaxKind :: SLASH }; [^]=>{$crate :: SyntaxKind :: CARET }; [%]=>{$crate :: SyntaxKind :: PERCENT }; [_]=>{$crate :: SyntaxKind :: UNDERSCORE }; [.]=>{$crate :: SyntaxKind :: DOT }; [..]=>{$crate :: SyntaxKind :: DOT2 }; [...]=>{$crate :: SyntaxKind :: DOT3 }; [..=]=>{$crate :: SyntaxKind :: DOT2EQ }; [:]=>{$crate :: SyntaxKind :: COLON }; [::]=>{$crate :: SyntaxKind :: COLON2 }; [=]=>{$crate :: SyntaxKind :: EQ }; [==]=>{$crate :: SyntaxKind :: EQ2 }; [=>]=>{$crate :: SyntaxKind :: FAT_ARROW }; [!]=>{$crate :: SyntaxKind :: BANG }; [!=]=>{$crate :: SyntaxKind :: NEQ }; [-]=>{$crate :: SyntaxKind :: MINUS }; [->]=>{$crate :: SyntaxKind :: THIN_ARROW }; [<=]=>{$crate :: SyntaxKind :: LTEQ }; [>=]=>{$crate :: SyntaxKind :: GTEQ }; [+=]=>{$crate :: SyntaxKind :: PLUSEQ }; [-=]=>{$crate :: SyntaxKind :: MINUSEQ }; [|=]=>{$crate :: SyntaxKind :: PIPEEQ }; [&=]=>{$crate :: SyntaxKind :: AMPEQ }; [^=]=>{$crate :: SyntaxKind :: CARETEQ }; [/=]=>{$crate :: SyntaxKind :: SLASHEQ }; [*=]=>{$crate :: SyntaxKind :: STAREQ }; [%=]=>{$crate :: SyntaxKind :: PERCENTEQ }; [&&]=>{$crate :: SyntaxKind :: AMP2 }; [||]=>{$crate :: SyntaxKind :: PIPE2 }; [<<]=>{$crate :: SyntaxKind :: SHL }; [>>]=>{$crate :: SyntaxKind :: SHR }; [<<=]=>{$crate :: SyntaxKind :: SHLEQ }; [>>=]=>{$crate :: SyntaxKind :: SHREQ }; [ as ]=>{$crate :: SyntaxKind :: AS_KW }; [ async ]=>{$crate :: SyntaxKind :: ASYNC_KW }; [ await ]=>{$crate :: SyntaxKind :: AWAIT_KW }; [ box ]=>{$crate :: SyntaxKind :: BOX_KW }; [ break ]=>{$crate :: SyntaxKind :: BREAK_KW }; [ const ]=>{$crate :: SyntaxKind :: CONST_KW }; [ continue ]=>{$crate :: SyntaxKind :: CONTINUE_KW }; [ crate ]=>{$crate :: SyntaxKind :: CRATE_KW }; [ dyn ]=>{$crate :: SyntaxKind :: DYN_KW }; [ else ]=>{$crate :: SyntaxKind :: ELSE_KW }; [ enum ]=>{$crate :: SyntaxKind :: ENUM_KW }; [ extern ]=>{$crate :: SyntaxKind :: EXTERN_KW }; [ false ]=>{$crate :: SyntaxKind :: FALSE_KW }; [ fn ]=>{$crate :: SyntaxKind :: FN_KW }; [ for ]=>{$crate :: SyntaxKind :: FOR_KW }; [ if ]=>{$crate :: SyntaxKind :: IF_KW }; [ impl ]=>{$crate :: SyntaxKind :: IMPL_KW }; [ in ]=>{$crate :: SyntaxKind :: IN_KW }; [ let ]=>{$crate :: SyntaxKind :: LET_KW }; [ loop ]=>{$crate :: SyntaxKind :: LOOP_KW }; [ macro ]=>{$crate :: SyntaxKind :: MACRO_KW }; [ match ]=>{$crate :: SyntaxKind :: MATCH_KW }; [ mod ]=>{$crate :: SyntaxKind :: MOD_KW }; [ move ]=>{$crate :: SyntaxKind :: MOVE_KW }; [ mut ]=>{$crate :: SyntaxKind :: MUT_KW }; [ pub ]=>{$crate :: SyntaxKind :: PUB_KW }; [ ref ]=>{$crate :: SyntaxKind :: REF_KW }; [ return ]=>{$crate :: SyntaxKind :: RETURN_KW }; [ self ]=>{$crate :: SyntaxKind :: SELF_KW }; [ static ]=>{$crate :: SyntaxKind :: STATIC_KW }; [ struct ]=>{$crate :: SyntaxKind :: STRUCT_KW }; [ super ]=>{$crate :: SyntaxKind :: SUPER_KW }; [ trait ]=>{$crate :: SyntaxKind :: TRAIT_KW }; [ true ]=>{$crate :: SyntaxKind :: TRUE_KW }; [ try ]=>{$crate :: SyntaxKind :: TRY_KW }; [ type ]=>{$crate :: SyntaxKind :: TYPE_KW }; [ unsafe ]=>{$crate :: SyntaxKind :: UNSAFE_KW }; [ use ]=>{$crate :: SyntaxKind :: USE_KW }; [ where ]=>{$crate :: SyntaxKind :: WHERE_KW }; [ while ]=>{$crate :: SyntaxKind :: WHILE_KW }; [ yield ]=>{$crate :: SyntaxKind :: YIELD_KW }; [ auto ]=>{$crate :: SyntaxKind :: AUTO_KW }; [ default ]=>{$crate :: SyntaxKind :: DEFAULT_KW }; [ existential ]=>{$crate :: SyntaxKind :: EXISTENTIAL_KW }; [ union ]=>{$crate :: SyntaxKind :: UNION_KW }; [ raw ]=>{$crate :: SyntaxKind :: RAW_KW }; [ macro_rules ]=>{$crate :: SyntaxKind :: MACRO_RULES_KW }; [ lifetime_ident ]=>{$crate :: SyntaxKind :: LIFETIME_IDENT }; [ ident ]=>{$crate :: SyntaxKind :: IDENT }; [ shebang ]=>{$crate :: SyntaxKind :: SHEBANG }; }
+macro_rules! __ra_macro_fixture510 {($($args : tt )*)=>{ return Err ( match_error ! ($($args )*))}; }
+macro_rules! __ra_macro_fixture511 {($e : expr )=>{{ MatchFailed { reason : if recording_match_fail_reasons (){ Some ( format ! ( "{}" , $e ))} else { None }}}}; ($fmt : expr , $($arg : tt )+)=>{{ MatchFailed { reason : if recording_match_fail_reasons (){ Some ( format ! ($fmt , $($arg )+))} else { None }}}}; }
+macro_rules! __ra_macro_fixture512 {()=>($crate :: print ! ( "\n" )); ($($arg : tt )*)=>({$crate :: io :: _print ($crate :: format_args_nl ! ($($arg )*)); })}
+macro_rules! __ra_macro_fixture513 {($cmd : tt )=>{{# [ cfg ( trick_rust_analyzer_into_highlighting_interpolated_bits )] format_args ! ($cmd ); use $crate :: Cmd as __CMD ; let cmd : $crate :: Cmd = $crate :: __cmd ! ( __CMD $cmd ); cmd }}; }
+macro_rules! __ra_macro_fixture514 {($reader : ident , $s : ident ;)=>{}; ($reader : ident , $s : ident ; $first : ident : $first_ty : ty $(, $rest : ident : $rest_ty : ty )*)=>{ reverse_decode ! ($reader , $s ; $($rest : $rest_ty ),*); let $first = <$first_ty >:: decode (& mut $reader , $s ); }}
+macro_rules! __ra_macro_fixture515 {($kind : ident , $($ty : ty ),*)=>{ match $kind {$(stringify ! ($ty )=>{ let n : $ty = n . parse (). unwrap (); format ! ( concat ! ( "{}" , stringify ! ($ty )), n )})* _ => unimplemented ! ( "unknown args for typed_integer: n {}, kind {}" , n , $kind ), }}}
+macro_rules! __ra_macro_fixture516 {()=>( panic ! ( "not implemented" )); ($($arg : tt )+)=>( panic ! ( "not implemented: {}" , $crate :: format_args ! ($($arg )+))); }
+macro_rules! __ra_macro_fixture517 {($cond : expr )=>{{ let cond = !$crate :: always ! (!$cond ); cond }}; ($cond : expr , $fmt : literal $($arg : tt )*)=>{{ let cond = !$crate :: always ! (!$cond , $fmt $($arg )*); cond }}; }
+macro_rules! __ra_macro_fixture518 {($cond : expr )=>{$crate :: always ! ($cond , "assertion failed: {}" , stringify ! ($cond ))}; ($cond : expr , $fmt : literal $($arg : tt )*)=>{{ let cond = $cond ; if cfg ! ( debug_assertions )|| $crate :: __FORCE { assert ! ( cond , $fmt $($arg )*); } if ! cond {$crate :: __log_error ! ($fmt $($arg )*); } cond }}; }
+macro_rules! __ra_macro_fixture519 {($msg : literal $(,)?)=>{ return $crate :: private :: Err ($crate :: anyhow ! ($msg ))}; ($err : expr $(,)?)=>{ return $crate :: private :: Err ($crate :: anyhow ! ($err ))}; ($fmt : expr , $($arg : tt )*)=>{ return $crate :: private :: Err ($crate :: anyhow ! ($fmt , $($arg )*))}; }
+macro_rules! __ra_macro_fixture520 {($msg : literal $(,)?)=>{$crate :: private :: new_adhoc ($msg )}; ($err : expr $(,)?)=>({ use $crate :: private :: kind ::*; match $err { error =>(& error ). anyhow_kind (). new ( error ), }}); ($fmt : expr , $($arg : tt )*)=>{$crate :: private :: new_adhoc ( format ! ($fmt , $($arg )*))}; }
+macro_rules! __ra_macro_fixture521 {( target : $target : expr , $($arg : tt )+)=>( log ! ( target : $target , $crate :: Level :: Info , $($arg )+)); ($($arg : tt )+)=>( log ! ($crate :: Level :: Info , $($arg )+))}
+macro_rules! __ra_macro_fixture522 {[$($sl : expr , $sc : expr ; $el : expr , $ec : expr =>$text : expr ),+]=>{ vec ! [$(TextDocumentContentChangeEvent { range : Some ( Range { start : Position { line : $sl , character : $sc }, end : Position { line : $el , character : $ec }, }), range_length : None , text : String :: from ($text ), }),+]}; }
+macro_rules! __ra_macro_fixture523 {[$path : expr ]=>{$crate :: ExpectFile { path : std :: path :: PathBuf :: from ($path ), position : file ! (), }}; }
+macro_rules! __ra_macro_fixture524 {($($key : literal : $value : tt ),*$(,)?)=>{{$(map . insert ($key . into (), serde_json :: json ! ($value )); )*}}; }
+macro_rules! __ra_macro_fixture525 {($expr : expr , $or : expr )=>{ try_ ! ($expr ). unwrap_or ($or )}; }
+macro_rules! __ra_macro_fixture526 {($expr : expr )=>{|| -> _ { Some ($expr )}()}; }
+macro_rules! __ra_macro_fixture527 {($($arg : tt )*)=>($crate :: io :: _print ($crate :: format_args ! ($($arg )*))); }
+macro_rules! __ra_macro_fixture528 {($fmt : literal , $($tt : tt ),*)=>{ mbe :: ExpandError :: ProcMacroError ( tt :: ExpansionError :: Unknown ( format ! ($fmt , $($tt ),*)))}; ($fmt : literal )=>{ mbe :: ExpandError :: ProcMacroError ( tt :: ExpansionError :: Unknown ($fmt . to_string ()))}}
+macro_rules! __ra_macro_fixture529 {($($tt : tt )* )=>{$crate :: quote :: IntoTt :: to_subtree ($crate :: __quote ! ($($tt )*))}}
+macro_rules! __ra_macro_fixture530 {()=>{ Vec ::< tt :: TokenTree >:: new ()}; (@ SUBTREE $delim : ident $($tt : tt )* )=>{{ let children = $crate :: __quote ! ($($tt )*); tt :: Subtree { delimiter : Some ( tt :: Delimiter { kind : tt :: DelimiterKind ::$delim , id : tt :: TokenId :: unspecified (), }), token_trees : $crate :: quote :: IntoTt :: to_tokens ( children ), }}}; (@ PUNCT $first : literal )=>{{ vec ! [ tt :: Leaf :: Punct ( tt :: Punct { char : $first , spacing : tt :: Spacing :: Alone , id : tt :: TokenId :: unspecified (), }). into ()]}}; (@ PUNCT $first : literal , $sec : literal )=>{{ vec ! [ tt :: Leaf :: Punct ( tt :: Punct { char : $first , spacing : tt :: Spacing :: Joint , id : tt :: TokenId :: unspecified (), }). into (), tt :: Leaf :: Punct ( tt :: Punct { char : $sec , spacing : tt :: Spacing :: Alone , id : tt :: TokenId :: unspecified (), }). into ()]}}; (# $first : ident $($tail : tt )* )=>{{ let token = $crate :: quote :: ToTokenTree :: to_token ($first ); let mut tokens = vec ! [ token . into ()]; let mut tail_tokens = $crate :: quote :: IntoTt :: to_tokens ($crate :: __quote ! ($($tail )*)); tokens . append (& mut tail_tokens ); tokens }}; (## $first : ident $($tail : tt )* )=>{{ let mut tokens = $first . into_iter (). map ($crate :: quote :: ToTokenTree :: to_token ). collect ::< Vec < tt :: TokenTree >> (); let mut tail_tokens = $crate :: quote :: IntoTt :: to_tokens ($crate :: __quote ! ($($tail )*)); tokens . append (& mut tail_tokens ); tokens }}; ({$($tt : tt )* })=>{$crate :: __quote ! (@ SUBTREE Brace $($tt )*)}; ([$($tt : tt )* ])=>{$crate :: __quote ! (@ SUBTREE Bracket $($tt )*)}; (($($tt : tt )* ))=>{$crate :: __quote ! (@ SUBTREE Parenthesis $($tt )*)}; ($tt : literal )=>{ vec ! [$crate :: quote :: ToTokenTree :: to_token ($tt ). into ()]}; ($tt : ident )=>{ vec ! [{ tt :: Leaf :: Ident ( tt :: Ident { text : stringify ! ($tt ). into (), id : tt :: TokenId :: unspecified (), }). into ()}]}; (-> )=>{$crate :: __quote ! (@ PUNCT '-' , '>' )}; (& )=>{$crate :: __quote ! (@ PUNCT '&' )}; (, )=>{$crate :: __quote ! (@ PUNCT ',' )}; (: )=>{$crate :: __quote ! (@ PUNCT ':' )}; (; )=>{$crate :: __quote ! (@ PUNCT ';' )}; (:: )=>{$crate :: __quote ! (@ PUNCT ':' , ':' )}; (. )=>{$crate :: __quote ! (@ PUNCT '.' )}; (< )=>{$crate :: __quote ! (@ PUNCT '<' )}; (> )=>{$crate :: __quote ! (@ PUNCT '>' )}; ($first : tt $($tail : tt )+ )=>{{ let mut tokens = $crate :: quote :: IntoTt :: to_tokens ($crate :: __quote ! ($first )); let mut tail_tokens = $crate :: quote :: IntoTt :: to_tokens ($crate :: __quote ! ($($tail )*)); tokens . append (& mut tail_tokens ); tokens }}; }
+macro_rules! __ra_macro_fixture531 {($($name : ident )*)=>{$(if let Some ( it )= & self .$name { f . field ( stringify ! ($name ), it ); })*}}
+macro_rules! __ra_macro_fixture532 {($fmt : expr )=>{ RenameError ( format ! ($fmt ))}; ($fmt : expr , $($arg : tt )+)=>{ RenameError ( format ! ($fmt , $($arg )+))}}
+macro_rules! __ra_macro_fixture533 {($($tokens : tt )*)=>{ return Err ( format_err ! ($($tokens )*))}}
+macro_rules! __ra_macro_fixture534 {()=>{$crate :: __private :: TokenStream :: new ()}; ($($tt : tt )*)=>{{ let mut _s = $crate :: __private :: TokenStream :: new (); $crate :: quote_each_token ! ( _s $($tt )*); _s }}; }
+macro_rules! __ra_macro_fixture535 {($tokens : ident $($tts : tt )*)=>{$crate :: quote_tokens_with_context ! ($tokens (@ @ @ @ @ @ $($tts )*)(@ @ @ @ @ $($tts )* @)(@ @ @ @ $($tts )* @ @)(@ @ @ $(($tts ))* @ @ @)(@ @ $($tts )* @ @ @ @)(@ $($tts )* @ @ @ @ @)($($tts )* @ @ @ @ @ @)); }; }
+macro_rules! __ra_macro_fixture536 {($tokens : ident ($($b3 : tt )*)($($b2 : tt )*)($($b1 : tt )*)($($curr : tt )*)($($a1 : tt )*)($($a2 : tt )*)($($a3 : tt )*))=>{$($crate :: quote_token_with_context ! ($tokens $b3 $b2 $b1 $curr $a1 $a2 $a3 ); )* }; }
+macro_rules! __ra_macro_fixture537 {($tokens : ident $b3 : tt $b2 : tt $b1 : tt @ $a1 : tt $a2 : tt $a3 : tt )=>{}; ($tokens : ident $b3 : tt $b2 : tt $b1 : tt (#)($($inner : tt )* )* $a3 : tt )=>{{ use $crate :: __private :: ext ::*; let has_iter = $crate :: __private :: ThereIsNoIteratorInRepetition ; $crate :: pounded_var_names ! ( quote_bind_into_iter ! ( has_iter )()$($inner )*); let _: $crate :: __private :: HasIterator = has_iter ; while true {$crate :: pounded_var_names ! ( quote_bind_next_or_break ! ()()$($inner )*); $crate :: quote_each_token ! ($tokens $($inner )*); }}}; ($tokens : ident $b3 : tt $b2 : tt # (($($inner : tt )* ))* $a2 : tt $a3 : tt )=>{}; ($tokens : ident $b3 : tt # ($($inner : tt )* )(*)$a1 : tt $a2 : tt $a3 : tt )=>{}; ($tokens : ident $b3 : tt $b2 : tt $b1 : tt (#)($($inner : tt )* )$sep : tt *)=>{{ use $crate :: __private :: ext ::*; let mut _i = 0usize ; let has_iter = $crate :: __private :: ThereIsNoIteratorInRepetition ; $crate :: pounded_var_names ! ( quote_bind_into_iter ! ( has_iter )()$($inner )*); let _: $crate :: __private :: HasIterator = has_iter ; while true {$crate :: pounded_var_names ! ( quote_bind_next_or_break ! ()()$($inner )*); if _i > 0 {$crate :: quote_token ! ($tokens $sep ); } _i += 1 ; $crate :: quote_each_token ! ($tokens $($inner )*); }}}; ($tokens : ident $b3 : tt $b2 : tt # (($($inner : tt )* ))$sep : tt * $a3 : tt )=>{}; ($tokens : ident $b3 : tt # ($($inner : tt )* )($sep : tt )* $a2 : tt $a3 : tt )=>{}; ($tokens : ident # ($($inner : tt )* )* (*)$a1 : tt $a2 : tt $a3 : tt )=>{$crate :: quote_token ! ($tokens *); }; ($tokens : ident # ($($inner : tt )* )$sep : tt (*)$a1 : tt $a2 : tt $a3 : tt )=>{}; ($tokens : ident $b3 : tt $b2 : tt $b1 : tt (#)$var : ident $a2 : tt $a3 : tt )=>{$crate :: ToTokens :: to_tokens (&$var , & mut $tokens ); }; ($tokens : ident $b3 : tt $b2 : tt # ($var : ident )$a1 : tt $a2 : tt $a3 : tt )=>{}; ($tokens : ident $b3 : tt $b2 : tt $b1 : tt ($curr : tt )$a1 : tt $a2 : tt $a3 : tt )=>{$crate :: quote_token ! ($tokens $curr ); }; }
+macro_rules! __ra_macro_fixture538 {($tokens : ident ($($inner : tt )* ))=>{$crate :: __private :: push_group (& mut $tokens , $crate :: __private :: Delimiter :: Parenthesis , $crate :: quote ! ($($inner )*), ); }; ($tokens : ident [$($inner : tt )* ])=>{$crate :: __private :: push_group (& mut $tokens , $crate :: __private :: Delimiter :: Bracket , $crate :: quote ! ($($inner )*), ); }; ($tokens : ident {$($inner : tt )* })=>{$crate :: __private :: push_group (& mut $tokens , $crate :: __private :: Delimiter :: Brace , $crate :: quote ! ($($inner )*), ); }; ($tokens : ident +)=>{$crate :: __private :: push_add (& mut $tokens ); }; ($tokens : ident +=)=>{$crate :: __private :: push_add_eq (& mut $tokens ); }; ($tokens : ident &)=>{$crate :: __private :: push_and (& mut $tokens ); }; ($tokens : ident &&)=>{$crate :: __private :: push_and_and (& mut $tokens ); }; ($tokens : ident &=)=>{$crate :: __private :: push_and_eq (& mut $tokens ); }; ($tokens : ident @)=>{$crate :: __private :: push_at (& mut $tokens ); }; ($tokens : ident !)=>{$crate :: __private :: push_bang (& mut $tokens ); }; ($tokens : ident ^)=>{$crate :: __private :: push_caret (& mut $tokens ); }; ($tokens : ident ^=)=>{$crate :: __private :: push_caret_eq (& mut $tokens ); }; ($tokens : ident :)=>{$crate :: __private :: push_colon (& mut $tokens ); }; ($tokens : ident ::)=>{$crate :: __private :: push_colon2 (& mut $tokens ); }; ($tokens : ident ,)=>{$crate :: __private :: push_comma (& mut $tokens ); }; ($tokens : ident /)=>{$crate :: __private :: push_div (& mut $tokens ); }; ($tokens : ident /=)=>{$crate :: __private :: push_div_eq (& mut $tokens ); }; ($tokens : ident .)=>{$crate :: __private :: push_dot (& mut $tokens ); }; ($tokens : ident ..)=>{$crate :: __private :: push_dot2 (& mut $tokens ); }; ($tokens : ident ...)=>{$crate :: __private :: push_dot3 (& mut $tokens ); }; ($tokens : ident ..=)=>{$crate :: __private :: push_dot_dot_eq (& mut $tokens ); }; ($tokens : ident =)=>{$crate :: __private :: push_eq (& mut $tokens ); }; ($tokens : ident ==)=>{$crate :: __private :: push_eq_eq (& mut $tokens ); }; ($tokens : ident >=)=>{$crate :: __private :: push_ge (& mut $tokens ); }; ($tokens : ident >)=>{$crate :: __private :: push_gt (& mut $tokens ); }; ($tokens : ident <=)=>{$crate :: __private :: push_le (& mut $tokens ); }; ($tokens : ident <)=>{$crate :: __private :: push_lt (& mut $tokens ); }; ($tokens : ident *=)=>{$crate :: __private :: push_mul_eq (& mut $tokens ); }; ($tokens : ident !=)=>{$crate :: __private :: push_ne (& mut $tokens ); }; ($tokens : ident |)=>{$crate :: __private :: push_or (& mut $tokens ); }; ($tokens : ident |=)=>{$crate :: __private :: push_or_eq (& mut $tokens ); }; ($tokens : ident ||)=>{$crate :: __private :: push_or_or (& mut $tokens ); }; ($tokens : ident #)=>{$crate :: __private :: push_pound (& mut $tokens ); }; ($tokens : ident ?)=>{$crate :: __private :: push_question (& mut $tokens ); }; ($tokens : ident ->)=>{$crate :: __private :: push_rarrow (& mut $tokens ); }; ($tokens : ident <-)=>{$crate :: __private :: push_larrow (& mut $tokens ); }; ($tokens : ident %)=>{$crate :: __private :: push_rem (& mut $tokens ); }; ($tokens : ident %=)=>{$crate :: __private :: push_rem_eq (& mut $tokens ); }; ($tokens : ident =>)=>{$crate :: __private :: push_fat_arrow (& mut $tokens ); }; ($tokens : ident ;)=>{$crate :: __private :: push_semi (& mut $tokens ); }; ($tokens : ident <<)=>{$crate :: __private :: push_shl (& mut $tokens ); }; ($tokens : ident <<=)=>{$crate :: __private :: push_shl_eq (& mut $tokens ); }; ($tokens : ident >>)=>{$crate :: __private :: push_shr (& mut $tokens ); }; ($tokens : ident >>=)=>{$crate :: __private :: push_shr_eq (& mut $tokens ); }; ($tokens : ident *)=>{$crate :: __private :: push_star (& mut $tokens ); }; ($tokens : ident -)=>{$crate :: __private :: push_sub (& mut $tokens ); }; ($tokens : ident -=)=>{$crate :: __private :: push_sub_eq (& mut $tokens ); }; ($tokens : ident $ident : ident )=>{$crate :: __private :: push_ident (& mut $tokens , stringify ! ($ident )); }; ($tokens : ident $other : tt )=>{$crate :: __private :: parse (& mut $tokens , stringify ! ($other )); }; }
+macro_rules! __ra_macro_fixture539 {($call : ident ! $extra : tt $($tts : tt )*)=>{$crate :: pounded_var_names_with_context ! ($call ! $extra (@ $($tts )*)($($tts )* @))}; }
+macro_rules! __ra_macro_fixture540 {($call : ident ! $extra : tt ($($b1 : tt )*)($($curr : tt )*))=>{$($crate :: pounded_var_with_context ! ($call ! $extra $b1 $curr ); )* }; }
+macro_rules! __ra_macro_fixture541 {($call : ident ! $extra : tt $b1 : tt ($($inner : tt )* ))=>{$crate :: pounded_var_names ! ($call ! $extra $($inner )*); }; ($call : ident ! $extra : tt $b1 : tt [$($inner : tt )* ])=>{$crate :: pounded_var_names ! ($call ! $extra $($inner )*); }; ($call : ident ! $extra : tt $b1 : tt {$($inner : tt )* })=>{$crate :: pounded_var_names ! ($call ! $extra $($inner )*); }; ($call : ident ! ($($extra : tt )*)# $var : ident )=>{$crate ::$call ! ($($extra )* $var ); }; ($call : ident ! $extra : tt $b1 : tt $curr : tt )=>{}; }
+macro_rules! __ra_macro_fixture542 {($has_iter : ident $var : ident )=>{# [ allow ( unused_mut )] let ( mut $var , i )= $var . quote_into_iter (); let $has_iter = $has_iter | i ; }; }
+macro_rules! __ra_macro_fixture543 {($var : ident )=>{ let $var = match $var . next (){ Some ( _x )=>$crate :: __private :: RepInterp ( _x ), None => break , }; }; }
+macro_rules! __ra_macro_fixture544 {($fmt : expr )=>{$crate :: format_ident_impl ! ([:: std :: option :: Option :: None , $fmt ])}; ($fmt : expr , $($rest : tt )*)=>{$crate :: format_ident_impl ! ([:: std :: option :: Option :: None , $fmt ]$($rest )*)}; }
+macro_rules! __ra_macro_fixture545 {([$span : expr , $($fmt : tt )*])=>{$crate :: __private :: mk_ident (& format ! ($($fmt )*), $span )}; ([$old : expr , $($fmt : tt )*] span = $span : expr )=>{$crate :: format_ident_impl ! ([$old , $($fmt )*] span = $span ,)}; ([$old : expr , $($fmt : tt )*] span = $span : expr , $($rest : tt )*)=>{$crate :: format_ident_impl ! ([:: std :: option :: Option :: Some ::<$crate :: __private :: Span > ($span ), $($fmt )* ]$($rest )*)}; ([$span : expr , $($fmt : tt )*]$name : ident = $arg : expr )=>{$crate :: format_ident_impl ! ([$span , $($fmt )*]$name = $arg ,)}; ([$span : expr , $($fmt : tt )*]$name : ident = $arg : expr , $($rest : tt )*)=>{ match $crate :: __private :: IdentFragmentAdapter (&$arg ){ arg =>$crate :: format_ident_impl ! ([$span . or ( arg . span ()), $($fmt )*, $name = arg ]$($rest )*), }}; ([$span : expr , $($fmt : tt )*]$arg : expr )=>{$crate :: format_ident_impl ! ([$span , $($fmt )*]$arg ,)}; ([$span : expr , $($fmt : tt )*]$arg : expr , $($rest : tt )*)=>{ match $crate :: __private :: IdentFragmentAdapter (&$arg ){ arg =>$crate :: format_ident_impl ! ([$span . or ( arg . span ()), $($fmt )*, arg ]$($rest )*), }}; }
+macro_rules! __ra_macro_fixture546 {()=>( panic ! ( "not yet implemented" )); ($($arg : tt )+)=>( panic ! ( "not yet implemented: {}" , $crate :: format_args ! ($($arg )+))); }
+macro_rules! __ra_macro_fixture547 {($($name : expr ),+ $(,)?)=>{{ let mut v = ArrayVec ::< [ LangItemTarget ; 2 ]>:: new (); $(v . extend ( db . lang_item ( cur_crate , $name . into ())); )+ v }}; }
+macro_rules! __ra_macro_fixture548 {($ctor : pat , $param : pat )=>{ crate :: Ty :: Apply ( crate :: ApplicationTy { ctor : $ctor , parameters : $param })}; ($ctor : pat )=>{ ty_app ! ($ctor , _)}; }
+macro_rules! __ra_macro_fixture549 {(@ one $x : expr )=>( 1usize ); ($elem : expr ; $n : expr )=>({$crate :: SmallVec :: from_elem ($elem , $n )}); ($($x : expr ),*$(,)*)=>({ let count = 0usize $(+ $crate :: smallvec ! (@ one $x ))*; # [ allow ( unused_mut )] let mut vec = $crate :: SmallVec :: new (); if count <= vec . inline_size (){$(vec . push ($x );)* vec } else {$crate :: SmallVec :: from_vec ($crate :: alloc :: vec ! [$($x ,)*])}}); }
+macro_rules! __ra_macro_fixture550 {($($q : path )*)=>{$(let before = memory_usage (). allocated ; $q . in_db ( self ). sweep ( sweep ); let after = memory_usage (). allocated ; let q : $q = Default :: default (); let name = format ! ( "{:?}" , q ); acc . push (( name , before - after )); let before = memory_usage (). allocated ; $q . in_db ( self ). sweep ( sweep . discard_everything ()); let after = memory_usage (). allocated ; let q : $q = Default :: default (); let name = format ! ( "{:?} (deps)" , q ); acc . push (( name , before - after )); let before = memory_usage (). allocated ; $q . in_db ( self ). purge (); let after = memory_usage (). allocated ; let q : $q = Default :: default (); let name = format ! ( "{:?} (purge)" , q ); acc . push (( name , before - after )); )*}}
+macro_rules! __ra_macro_fixture551 {($($arg : tt )*)=>( if $crate :: cfg ! ( debug_assertions ){$crate :: assert ! ($($arg )*); })}
+macro_rules! __ra_macro_fixture552 {()=>{{ let anchor = match self . l_curly_token (){ Some ( it )=> it . into (), None => return self . clone (), }; InsertPosition :: After ( anchor )}}; }
+macro_rules! __ra_macro_fixture553 {($anchor : expr )=>{ if let Some ( comma )= $anchor . syntax (). siblings_with_tokens ( Direction :: Next ). find (| it | it . kind ()== T ! [,]){ InsertPosition :: After ( comma )} else { to_insert . insert ( 0 , make :: token ( T ! [,]). into ()); InsertPosition :: After ($anchor . syntax (). clone (). into ())}}; }
+macro_rules! __ra_macro_fixture554 {($anchor : expr )=>{ if let Some ( comma )= $anchor . syntax (). siblings_with_tokens ( Direction :: Next ). find (| it | it . kind ()== T ! [,]){ InsertPosition :: After ( comma )} else { to_insert . insert ( 0 , make :: token ( T ! [,]). into ()); InsertPosition :: After ($anchor . syntax (). clone (). into ())}}; }
+macro_rules! __ra_macro_fixture555 {()=>{{ let anchor = match self . l_angle_token (){ Some ( it )=> it . into (), None => return self . clone (), }; InsertPosition :: After ( anchor )}}; }
+macro_rules! __ra_macro_fixture556 {()=>{ for _ in 0 .. level { buf . push_str ( " " ); }}; }
+macro_rules! __ra_macro_fixture557 {()=>{ ExpandError :: BindingError ( format ! ( "" ))}; ($($tt : tt )*)=>{ ExpandError :: BindingError ( format ! ($($tt )*))}; }
+macro_rules! __ra_macro_fixture558 {($($tt : tt )*)=>{ return Err ( err ! ($($tt )*))}; }
+macro_rules! __ra_macro_fixture559 {($($tt : tt )*)=>{ ParseError :: UnexpectedToken (($($tt )*). to_string ())}; }
diff --git a/src/tools/rust-analyzer/crates/base-db/Cargo.toml b/src/tools/rust-analyzer/crates/base-db/Cargo.toml
new file mode 100644
index 000000000..f02a51ab6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/base-db/Cargo.toml
@@ -0,0 +1,22 @@
+[package]
+name = "base-db"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+salsa = "0.17.0-pre.2"
+rustc-hash = "1.1.0"
+
+syntax = { path = "../syntax", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+test-utils = { path = "../test-utils", version = "0.0.0" }
+vfs = { path = "../vfs", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/base-db/src/change.rs b/src/tools/rust-analyzer/crates/base-db/src/change.rs
new file mode 100644
index 000000000..b57f23457
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/base-db/src/change.rs
@@ -0,0 +1,85 @@
+//! Defines a unit of change that can applied to the database to get the next
+//! state. Changes are transactional.
+
+use std::{fmt, sync::Arc};
+
+use salsa::Durability;
+use vfs::FileId;
+
+use crate::{CrateGraph, SourceDatabaseExt, SourceRoot, SourceRootId};
+
+/// Encapsulate a bunch of raw `.set` calls on the database.
+#[derive(Default)]
+pub struct Change {
+ pub roots: Option<Vec<SourceRoot>>,
+ pub files_changed: Vec<(FileId, Option<Arc<String>>)>,
+ pub crate_graph: Option<CrateGraph>,
+}
+
+impl fmt::Debug for Change {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut d = fmt.debug_struct("Change");
+ if let Some(roots) = &self.roots {
+ d.field("roots", roots);
+ }
+ if !self.files_changed.is_empty() {
+ d.field("files_changed", &self.files_changed.len());
+ }
+ if self.crate_graph.is_some() {
+ d.field("crate_graph", &self.crate_graph);
+ }
+ d.finish()
+ }
+}
+
+impl Change {
+ pub fn new() -> Change {
+ Change::default()
+ }
+
+ pub fn set_roots(&mut self, roots: Vec<SourceRoot>) {
+ self.roots = Some(roots);
+ }
+
+ pub fn change_file(&mut self, file_id: FileId, new_text: Option<Arc<String>>) {
+ self.files_changed.push((file_id, new_text))
+ }
+
+ pub fn set_crate_graph(&mut self, graph: CrateGraph) {
+ self.crate_graph = Some(graph);
+ }
+
+ pub fn apply(self, db: &mut dyn SourceDatabaseExt) {
+ let _p = profile::span("RootDatabase::apply_change");
+ if let Some(roots) = self.roots {
+ for (idx, root) in roots.into_iter().enumerate() {
+ let root_id = SourceRootId(idx as u32);
+ let durability = durability(&root);
+ for file_id in root.iter() {
+ db.set_file_source_root_with_durability(file_id, root_id, durability);
+ }
+ db.set_source_root_with_durability(root_id, Arc::new(root), durability);
+ }
+ }
+
+ for (file_id, text) in self.files_changed {
+ let source_root_id = db.file_source_root(file_id);
+ let source_root = db.source_root(source_root_id);
+ let durability = durability(&source_root);
+ // XXX: can't actually remove the file, just reset the text
+ let text = text.unwrap_or_default();
+ db.set_file_text_with_durability(file_id, text, durability)
+ }
+ if let Some(crate_graph) = self.crate_graph {
+ db.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH)
+ }
+ }
+}
+
+fn durability(source_root: &SourceRoot) -> Durability {
+ if source_root.is_library {
+ Durability::HIGH
+ } else {
+ Durability::LOW
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/base-db/src/fixture.rs b/src/tools/rust-analyzer/crates/base-db/src/fixture.rs
new file mode 100644
index 000000000..8e6e6a11a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/base-db/src/fixture.rs
@@ -0,0 +1,494 @@
+//! A set of high-level utility fixture methods to use in tests.
+use std::{mem, str::FromStr, sync::Arc};
+
+use cfg::CfgOptions;
+use rustc_hash::FxHashMap;
+use test_utils::{
+ extract_range_or_offset, Fixture, RangeOrOffset, CURSOR_MARKER, ESCAPED_CURSOR_MARKER,
+};
+use tt::Subtree;
+use vfs::{file_set::FileSet, VfsPath};
+
+use crate::{
+ input::{CrateName, CrateOrigin, LangCrateOrigin},
+ Change, CrateDisplayName, CrateGraph, CrateId, Dependency, Edition, Env, FileId, FilePosition,
+ FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, SourceDatabaseExt,
+ SourceRoot, SourceRootId,
+};
+
+pub const WORKSPACE: SourceRootId = SourceRootId(0);
+
+pub trait WithFixture: Default + SourceDatabaseExt + 'static {
+ fn with_single_file(ra_fixture: &str) -> (Self, FileId) {
+ let fixture = ChangeFixture::parse(ra_fixture);
+ let mut db = Self::default();
+ fixture.change.apply(&mut db);
+ assert_eq!(fixture.files.len(), 1);
+ (db, fixture.files[0])
+ }
+
+ fn with_many_files(ra_fixture: &str) -> (Self, Vec<FileId>) {
+ let fixture = ChangeFixture::parse(ra_fixture);
+ let mut db = Self::default();
+ fixture.change.apply(&mut db);
+ assert!(fixture.file_position.is_none());
+ (db, fixture.files)
+ }
+
+ fn with_files(ra_fixture: &str) -> Self {
+ let fixture = ChangeFixture::parse(ra_fixture);
+ let mut db = Self::default();
+ fixture.change.apply(&mut db);
+ assert!(fixture.file_position.is_none());
+ db
+ }
+
+ fn with_files_extra_proc_macros(
+ ra_fixture: &str,
+ proc_macros: Vec<(String, ProcMacro)>,
+ ) -> Self {
+ let fixture = ChangeFixture::parse_with_proc_macros(ra_fixture, proc_macros);
+ let mut db = Self::default();
+ fixture.change.apply(&mut db);
+ assert!(fixture.file_position.is_none());
+ db
+ }
+
+ fn with_position(ra_fixture: &str) -> (Self, FilePosition) {
+ let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture);
+ let offset = range_or_offset.expect_offset();
+ (db, FilePosition { file_id, offset })
+ }
+
+ fn with_range(ra_fixture: &str) -> (Self, FileRange) {
+ let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture);
+ let range = range_or_offset.expect_range();
+ (db, FileRange { file_id, range })
+ }
+
+ fn with_range_or_offset(ra_fixture: &str) -> (Self, FileId, RangeOrOffset) {
+ let fixture = ChangeFixture::parse(ra_fixture);
+ let mut db = Self::default();
+ fixture.change.apply(&mut db);
+ let (file_id, range_or_offset) = fixture
+ .file_position
+ .expect("Could not find file position in fixture. Did you forget to add an `$0`?");
+ (db, file_id, range_or_offset)
+ }
+
+ fn test_crate(&self) -> CrateId {
+ let crate_graph = self.crate_graph();
+ let mut it = crate_graph.iter();
+ let res = it.next().unwrap();
+ assert!(it.next().is_none());
+ res
+ }
+}
+
+impl<DB: SourceDatabaseExt + Default + 'static> WithFixture for DB {}
+
+pub struct ChangeFixture {
+ pub file_position: Option<(FileId, RangeOrOffset)>,
+ pub files: Vec<FileId>,
+ pub change: Change,
+}
+
+impl ChangeFixture {
+ pub fn parse(ra_fixture: &str) -> ChangeFixture {
+ Self::parse_with_proc_macros(ra_fixture, Vec::new())
+ }
+
+ pub fn parse_with_proc_macros(
+ ra_fixture: &str,
+ mut proc_macros: Vec<(String, ProcMacro)>,
+ ) -> ChangeFixture {
+ let (mini_core, proc_macro_names, fixture) = Fixture::parse(ra_fixture);
+ let mut change = Change::new();
+
+ let mut files = Vec::new();
+ let mut crate_graph = CrateGraph::default();
+ let mut crates = FxHashMap::default();
+ let mut crate_deps = Vec::new();
+ let mut default_crate_root: Option<FileId> = None;
+ let mut default_cfg = CfgOptions::default();
+
+ let mut file_set = FileSet::default();
+ let mut current_source_root_kind = SourceRootKind::Local;
+ let source_root_prefix = "/".to_string();
+ let mut file_id = FileId(0);
+ let mut roots = Vec::new();
+
+ let mut file_position = None;
+
+ for entry in fixture {
+ let text = if entry.text.contains(CURSOR_MARKER) {
+ if entry.text.contains(ESCAPED_CURSOR_MARKER) {
+ entry.text.replace(ESCAPED_CURSOR_MARKER, CURSOR_MARKER)
+ } else {
+ let (range_or_offset, text) = extract_range_or_offset(&entry.text);
+ assert!(file_position.is_none());
+ file_position = Some((file_id, range_or_offset));
+ text
+ }
+ } else {
+ entry.text.clone()
+ };
+
+ let meta = FileMeta::from(entry);
+ assert!(meta.path.starts_with(&source_root_prefix));
+ if !meta.deps.is_empty() {
+ assert!(meta.krate.is_some(), "can't specify deps without naming the crate")
+ }
+
+ if let Some(kind) = &meta.introduce_new_source_root {
+ let root = match current_source_root_kind {
+ SourceRootKind::Local => SourceRoot::new_local(mem::take(&mut file_set)),
+ SourceRootKind::Library => SourceRoot::new_library(mem::take(&mut file_set)),
+ };
+ roots.push(root);
+ current_source_root_kind = *kind;
+ }
+
+ if let Some((krate, origin, version)) = meta.krate {
+ let crate_name = CrateName::normalize_dashes(&krate);
+ let crate_id = crate_graph.add_crate_root(
+ file_id,
+ meta.edition,
+ Some(crate_name.clone().into()),
+ version,
+ meta.cfg.clone(),
+ meta.cfg,
+ meta.env,
+ Ok(Vec::new()),
+ false,
+ origin,
+ );
+ let prev = crates.insert(crate_name.clone(), crate_id);
+ assert!(prev.is_none());
+ for dep in meta.deps {
+ let prelude = meta.extern_prelude.contains(&dep);
+ let dep = CrateName::normalize_dashes(&dep);
+ crate_deps.push((crate_name.clone(), dep, prelude))
+ }
+ } else if meta.path == "/main.rs" || meta.path == "/lib.rs" {
+ assert!(default_crate_root.is_none());
+ default_crate_root = Some(file_id);
+ default_cfg = meta.cfg;
+ }
+
+ change.change_file(file_id, Some(Arc::new(text)));
+ let path = VfsPath::new_virtual_path(meta.path);
+ file_set.insert(file_id, path);
+ files.push(file_id);
+ file_id.0 += 1;
+ }
+
+ if crates.is_empty() {
+ let crate_root = default_crate_root
+ .expect("missing default crate root, specify a main.rs or lib.rs");
+ crate_graph.add_crate_root(
+ crate_root,
+ Edition::CURRENT,
+ Some(CrateName::new("test").unwrap().into()),
+ None,
+ default_cfg.clone(),
+ default_cfg,
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ } else {
+ for (from, to, prelude) in crate_deps {
+ let from_id = crates[&from];
+ let to_id = crates[&to];
+ crate_graph
+ .add_dep(
+ from_id,
+ Dependency::with_prelude(CrateName::new(&to).unwrap(), to_id, prelude),
+ )
+ .unwrap();
+ }
+ }
+
+ if let Some(mini_core) = mini_core {
+ let core_file = file_id;
+ file_id.0 += 1;
+
+ let mut fs = FileSet::default();
+ fs.insert(core_file, VfsPath::new_virtual_path("/sysroot/core/lib.rs".to_string()));
+ roots.push(SourceRoot::new_library(fs));
+
+ change.change_file(core_file, Some(Arc::new(mini_core.source_code())));
+
+ let all_crates = crate_graph.crates_in_topological_order();
+
+ let core_crate = crate_graph.add_crate_root(
+ core_file,
+ Edition::Edition2021,
+ Some(CrateDisplayName::from_canonical_name("core".to_string())),
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::Lang(LangCrateOrigin::Core),
+ );
+
+ for krate in all_crates {
+ crate_graph
+ .add_dep(krate, Dependency::new(CrateName::new("core").unwrap(), core_crate))
+ .unwrap();
+ }
+ }
+
+ if !proc_macro_names.is_empty() {
+ let proc_lib_file = file_id;
+ file_id.0 += 1;
+
+ proc_macros.extend(default_test_proc_macros());
+ let (proc_macro, source) = filter_test_proc_macros(&proc_macro_names, proc_macros);
+ let mut fs = FileSet::default();
+ fs.insert(
+ proc_lib_file,
+ VfsPath::new_virtual_path("/sysroot/proc_macros/lib.rs".to_string()),
+ );
+ roots.push(SourceRoot::new_library(fs));
+
+ change.change_file(proc_lib_file, Some(Arc::new(source)));
+
+ let all_crates = crate_graph.crates_in_topological_order();
+
+ let proc_macros_crate = crate_graph.add_crate_root(
+ proc_lib_file,
+ Edition::Edition2021,
+ Some(CrateDisplayName::from_canonical_name("proc_macros".to_string())),
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(proc_macro),
+ true,
+ CrateOrigin::CratesIo { repo: None },
+ );
+
+ for krate in all_crates {
+ crate_graph
+ .add_dep(
+ krate,
+ Dependency::new(CrateName::new("proc_macros").unwrap(), proc_macros_crate),
+ )
+ .unwrap();
+ }
+ }
+
+ let root = match current_source_root_kind {
+ SourceRootKind::Local => SourceRoot::new_local(mem::take(&mut file_set)),
+ SourceRootKind::Library => SourceRoot::new_library(mem::take(&mut file_set)),
+ };
+ roots.push(root);
+ change.set_roots(roots);
+ change.set_crate_graph(crate_graph);
+
+ ChangeFixture { file_position, files, change }
+ }
+}
+
+fn default_test_proc_macros() -> [(String, ProcMacro); 4] {
+ [
+ (
+ r#"
+#[proc_macro_attribute]
+pub fn identity(_attr: TokenStream, item: TokenStream) -> TokenStream {
+ item
+}
+"#
+ .into(),
+ ProcMacro {
+ name: "identity".into(),
+ kind: crate::ProcMacroKind::Attr,
+ expander: Arc::new(IdentityProcMacroExpander),
+ },
+ ),
+ (
+ r#"
+#[proc_macro_derive(DeriveIdentity)]
+pub fn derive_identity(item: TokenStream) -> TokenStream {
+ item
+}
+"#
+ .into(),
+ ProcMacro {
+ name: "DeriveIdentity".into(),
+ kind: crate::ProcMacroKind::CustomDerive,
+ expander: Arc::new(IdentityProcMacroExpander),
+ },
+ ),
+ (
+ r#"
+#[proc_macro_attribute]
+pub fn input_replace(attr: TokenStream, _item: TokenStream) -> TokenStream {
+ attr
+}
+"#
+ .into(),
+ ProcMacro {
+ name: "input_replace".into(),
+ kind: crate::ProcMacroKind::Attr,
+ expander: Arc::new(AttributeInputReplaceProcMacroExpander),
+ },
+ ),
+ (
+ r#"
+#[proc_macro]
+pub fn mirror(input: TokenStream) -> TokenStream {
+ input
+}
+"#
+ .into(),
+ ProcMacro {
+ name: "mirror".into(),
+ kind: crate::ProcMacroKind::FuncLike,
+ expander: Arc::new(MirrorProcMacroExpander),
+ },
+ ),
+ ]
+}
+
+fn filter_test_proc_macros(
+ proc_macro_names: &[String],
+ proc_macro_defs: Vec<(String, ProcMacro)>,
+) -> (Vec<ProcMacro>, String) {
+ // The source here is only required so that paths to the macros exist and are resolvable.
+ let mut source = String::new();
+ let mut proc_macros = Vec::new();
+
+ for (c, p) in proc_macro_defs {
+ if !proc_macro_names.iter().any(|name| name == &stdx::to_lower_snake_case(&p.name)) {
+ continue;
+ }
+ proc_macros.push(p);
+ source += &c;
+ }
+
+ (proc_macros, source)
+}
+
+#[derive(Debug, Clone, Copy)]
+enum SourceRootKind {
+ Local,
+ Library,
+}
+
+#[derive(Debug)]
+struct FileMeta {
+ path: String,
+ krate: Option<(String, CrateOrigin, Option<String>)>,
+ deps: Vec<String>,
+ extern_prelude: Vec<String>,
+ cfg: CfgOptions,
+ edition: Edition,
+ env: Env,
+ introduce_new_source_root: Option<SourceRootKind>,
+}
+
+fn parse_crate(crate_str: String) -> (String, CrateOrigin, Option<String>) {
+ if let Some((a, b)) = crate_str.split_once('@') {
+ let (version, origin) = match b.split_once(':') {
+ Some(("CratesIo", data)) => match data.split_once(',') {
+ Some((version, url)) => {
+ (version, CrateOrigin::CratesIo { repo: Some(url.to_owned()) })
+ }
+ _ => panic!("Bad crates.io parameter: {}", data),
+ },
+ _ => panic!("Bad string for crate origin: {}", b),
+ };
+ (a.to_owned(), origin, Some(version.to_string()))
+ } else {
+ let crate_origin = match &*crate_str {
+ "std" => CrateOrigin::Lang(LangCrateOrigin::Std),
+ "core" => CrateOrigin::Lang(LangCrateOrigin::Core),
+ _ => CrateOrigin::CratesIo { repo: None },
+ };
+ (crate_str, crate_origin, None)
+ }
+}
+
+impl From<Fixture> for FileMeta {
+ fn from(f: Fixture) -> FileMeta {
+ let mut cfg = CfgOptions::default();
+ f.cfg_atoms.iter().for_each(|it| cfg.insert_atom(it.into()));
+ f.cfg_key_values.iter().for_each(|(k, v)| cfg.insert_key_value(k.into(), v.into()));
+ let deps = f.deps;
+ FileMeta {
+ path: f.path,
+ krate: f.krate.map(parse_crate),
+ extern_prelude: f.extern_prelude.unwrap_or_else(|| deps.clone()),
+ deps,
+ cfg,
+ edition: f.edition.as_ref().map_or(Edition::CURRENT, |v| Edition::from_str(v).unwrap()),
+ env: f.env.into_iter().collect(),
+ introduce_new_source_root: f.introduce_new_source_root.map(|kind| match &*kind {
+ "local" => SourceRootKind::Local,
+ "library" => SourceRootKind::Library,
+ invalid => panic!("invalid source root kind '{}'", invalid),
+ }),
+ }
+ }
+}
+
+// Identity mapping
+#[derive(Debug)]
+struct IdentityProcMacroExpander;
+impl ProcMacroExpander for IdentityProcMacroExpander {
+ fn expand(
+ &self,
+ subtree: &Subtree,
+ _: Option<&Subtree>,
+ _: &Env,
+ ) -> Result<Subtree, ProcMacroExpansionError> {
+ Ok(subtree.clone())
+ }
+}
+
+// Pastes the attribute input as its output
+#[derive(Debug)]
+struct AttributeInputReplaceProcMacroExpander;
+impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander {
+ fn expand(
+ &self,
+ _: &Subtree,
+ attrs: Option<&Subtree>,
+ _: &Env,
+ ) -> Result<Subtree, ProcMacroExpansionError> {
+ attrs
+ .cloned()
+ .ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into()))
+ }
+}
+
+#[derive(Debug)]
+struct MirrorProcMacroExpander;
+impl ProcMacroExpander for MirrorProcMacroExpander {
+ fn expand(
+ &self,
+ input: &Subtree,
+ _: Option<&Subtree>,
+ _: &Env,
+ ) -> Result<Subtree, ProcMacroExpansionError> {
+ fn traverse(input: &Subtree) -> Subtree {
+ let mut res = Subtree::default();
+ res.delimiter = input.delimiter;
+ for tt in input.token_trees.iter().rev() {
+ let tt = match tt {
+ tt::TokenTree::Leaf(leaf) => tt::TokenTree::Leaf(leaf.clone()),
+ tt::TokenTree::Subtree(sub) => tt::TokenTree::Subtree(traverse(sub)),
+ };
+ res.token_trees.push(tt);
+ }
+ res
+ }
+ Ok(traverse(input))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs
new file mode 100644
index 000000000..9b5a10acf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs
@@ -0,0 +1,792 @@
+//! This module specifies the input to rust-analyzer. In some sense, this is
+//! **the** most important module, because all other fancy stuff is strictly
+//! derived from this input.
+//!
+//! Note that neither this module, nor any other part of the analyzer's core do
+//! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how
+//! actual IO is done and lowered to input.
+
+use std::{fmt, iter::FromIterator, ops, panic::RefUnwindSafe, str::FromStr, sync::Arc};
+
+use cfg::CfgOptions;
+use rustc_hash::{FxHashMap, FxHashSet};
+use syntax::SmolStr;
+use tt::Subtree;
+use vfs::{file_set::FileSet, FileId, VfsPath};
+
+/// Files are grouped into source roots. A source root is a directory on the
+/// file systems which is watched for changes. Typically it corresponds to a
+/// Rust crate. Source roots *might* be nested: in this case, a file belongs to
+/// the nearest enclosing source root. Paths to files are always relative to a
+/// source root, and the analyzer does not know the root path of the source root at
+/// all. So, a file from one source root can't refer to a file in another source
+/// root by path.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct SourceRootId(pub u32);
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct SourceRoot {
+ /// Sysroot or crates.io library.
+ ///
+ /// Libraries are considered mostly immutable, this assumption is used to
+ /// optimize salsa's query structure
+ pub is_library: bool,
+ pub(crate) file_set: FileSet,
+}
+
+impl SourceRoot {
+ pub fn new_local(file_set: FileSet) -> SourceRoot {
+ SourceRoot { is_library: false, file_set }
+ }
+ pub fn new_library(file_set: FileSet) -> SourceRoot {
+ SourceRoot { is_library: true, file_set }
+ }
+ pub fn path_for_file(&self, file: &FileId) -> Option<&VfsPath> {
+ self.file_set.path_for_file(file)
+ }
+ pub fn file_for_path(&self, path: &VfsPath) -> Option<&FileId> {
+ self.file_set.file_for_path(path)
+ }
+ pub fn iter(&self) -> impl Iterator<Item = FileId> + '_ {
+ self.file_set.iter()
+ }
+}
+
+/// `CrateGraph` is a bit of information which turns a set of text files into a
+/// number of Rust crates.
+///
+/// Each crate is defined by the `FileId` of its root module, the set of enabled
+/// `cfg` flags and the set of dependencies.
+///
+/// Note that, due to cfg's, there might be several crates for a single `FileId`!
+///
+/// For the purposes of analysis, a crate does not have a name. Instead, names
+/// are specified on dependency edges. That is, a crate might be known under
+/// different names in different dependent crates.
+///
+/// Note that `CrateGraph` is build-system agnostic: it's a concept of the Rust
+/// language proper, not a concept of the build system. In practice, we get
+/// `CrateGraph` by lowering `cargo metadata` output.
+///
+/// `CrateGraph` is `!Serialize` by design, see
+/// <https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/architecture.md#serialization>
+#[derive(Debug, Clone, Default /* Serialize, Deserialize */)]
+pub struct CrateGraph {
+ arena: FxHashMap<CrateId, CrateData>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct CrateId(pub u32);
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct CrateName(SmolStr);
+
+impl CrateName {
+ /// Creates a crate name, checking for dashes in the string provided.
+ /// Dashes are not allowed in the crate names,
+ /// hence the input string is returned as `Err` for those cases.
+ pub fn new(name: &str) -> Result<CrateName, &str> {
+ if name.contains('-') {
+ Err(name)
+ } else {
+ Ok(Self(SmolStr::new(name)))
+ }
+ }
+
+ /// Creates a crate name, unconditionally replacing the dashes with underscores.
+ pub fn normalize_dashes(name: &str) -> CrateName {
+ Self(SmolStr::new(name.replace('-', "_")))
+ }
+
+ pub fn as_smol_str(&self) -> &SmolStr {
+ &self.0
+ }
+}
+
+impl fmt::Display for CrateName {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+impl ops::Deref for CrateName {
+ type Target = str;
+ fn deref(&self) -> &str {
+ &*self.0
+ }
+}
+
+/// Origin of the crates. It is used in emitting monikers.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum CrateOrigin {
+ /// Crates that are from crates.io official registry,
+ CratesIo { repo: Option<String> },
+ /// Crates that are provided by the language, like std, core, proc-macro, ...
+ Lang(LangCrateOrigin),
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum LangCrateOrigin {
+ Alloc,
+ Core,
+ ProcMacro,
+ Std,
+ Test,
+ Other,
+}
+
+impl From<&str> for LangCrateOrigin {
+ fn from(s: &str) -> Self {
+ match s {
+ "alloc" => LangCrateOrigin::Alloc,
+ "core" => LangCrateOrigin::Core,
+ "proc-macro" => LangCrateOrigin::ProcMacro,
+ "std" => LangCrateOrigin::Std,
+ "test" => LangCrateOrigin::Test,
+ _ => LangCrateOrigin::Other,
+ }
+ }
+}
+
+impl fmt::Display for LangCrateOrigin {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let text = match self {
+ LangCrateOrigin::Alloc => "alloc",
+ LangCrateOrigin::Core => "core",
+ LangCrateOrigin::ProcMacro => "proc_macro",
+ LangCrateOrigin::Std => "std",
+ LangCrateOrigin::Test => "test",
+ LangCrateOrigin::Other => "other",
+ };
+ f.write_str(text)
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct CrateDisplayName {
+ // The name we use to display various paths (with `_`).
+ crate_name: CrateName,
+ // The name as specified in Cargo.toml (with `-`).
+ canonical_name: String,
+}
+
+impl CrateDisplayName {
+ pub fn canonical_name(&self) -> &str {
+ &self.canonical_name
+ }
+ pub fn crate_name(&self) -> &CrateName {
+ &self.crate_name
+ }
+}
+
+impl From<CrateName> for CrateDisplayName {
+ fn from(crate_name: CrateName) -> CrateDisplayName {
+ let canonical_name = crate_name.to_string();
+ CrateDisplayName { crate_name, canonical_name }
+ }
+}
+
+impl fmt::Display for CrateDisplayName {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.crate_name.fmt(f)
+ }
+}
+
+impl ops::Deref for CrateDisplayName {
+ type Target = str;
+ fn deref(&self) -> &str {
+ &*self.crate_name
+ }
+}
+
+impl CrateDisplayName {
+ pub fn from_canonical_name(canonical_name: String) -> CrateDisplayName {
+ let crate_name = CrateName::normalize_dashes(&canonical_name);
+ CrateDisplayName { crate_name, canonical_name }
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct ProcMacroId(pub u32);
+
+#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)]
+pub enum ProcMacroKind {
+ CustomDerive,
+ FuncLike,
+ Attr,
+}
+
+pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
+ fn expand(
+ &self,
+ subtree: &Subtree,
+ attrs: Option<&Subtree>,
+ env: &Env,
+ ) -> Result<Subtree, ProcMacroExpansionError>;
+}
+
+pub enum ProcMacroExpansionError {
+ Panic(String),
+ /// Things like "proc macro server was killed by OOM".
+ System(String),
+}
+
+pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, String>;
+
+#[derive(Debug, Clone)]
+pub struct ProcMacro {
+ pub name: SmolStr,
+ pub kind: ProcMacroKind,
+ pub expander: Arc<dyn ProcMacroExpander>,
+}
+
+#[derive(Debug, Clone)]
+pub struct CrateData {
+ pub root_file_id: FileId,
+ pub edition: Edition,
+ pub version: Option<String>,
+ /// A name used in the package's project declaration: for Cargo projects,
+ /// its `[package].name` can be different for other project types or even
+ /// absent (a dummy crate for the code snippet, for example).
+ ///
+ /// For purposes of analysis, crates are anonymous (only names in
+ /// `Dependency` matters), this name should only be used for UI.
+ pub display_name: Option<CrateDisplayName>,
+ pub cfg_options: CfgOptions,
+ pub potential_cfg_options: CfgOptions,
+ pub env: Env,
+ pub dependencies: Vec<Dependency>,
+ pub proc_macro: ProcMacroLoadResult,
+ pub origin: CrateOrigin,
+ pub is_proc_macro: bool,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum Edition {
+ Edition2015,
+ Edition2018,
+ Edition2021,
+}
+
+impl Edition {
+ pub const CURRENT: Edition = Edition::Edition2018;
+}
+
+#[derive(Default, Debug, Clone, PartialEq, Eq)]
+pub struct Env {
+ entries: FxHashMap<String, String>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Dependency {
+ pub crate_id: CrateId,
+ pub name: CrateName,
+ prelude: bool,
+}
+
+impl Dependency {
+ pub fn new(name: CrateName, crate_id: CrateId) -> Self {
+ Self { name, crate_id, prelude: true }
+ }
+
+ pub fn with_prelude(name: CrateName, crate_id: CrateId, prelude: bool) -> Self {
+ Self { name, crate_id, prelude }
+ }
+
+ /// Whether this dependency is to be added to the depending crate's extern prelude.
+ pub fn is_prelude(&self) -> bool {
+ self.prelude
+ }
+}
+
+impl CrateGraph {
+ pub fn add_crate_root(
+ &mut self,
+ root_file_id: FileId,
+ edition: Edition,
+ display_name: Option<CrateDisplayName>,
+ version: Option<String>,
+ cfg_options: CfgOptions,
+ potential_cfg_options: CfgOptions,
+ env: Env,
+ proc_macro: ProcMacroLoadResult,
+ is_proc_macro: bool,
+ origin: CrateOrigin,
+ ) -> CrateId {
+ let data = CrateData {
+ root_file_id,
+ edition,
+ version,
+ display_name,
+ cfg_options,
+ potential_cfg_options,
+ env,
+ proc_macro,
+ dependencies: Vec::new(),
+ origin,
+ is_proc_macro,
+ };
+ let crate_id = CrateId(self.arena.len() as u32);
+ let prev = self.arena.insert(crate_id, data);
+ assert!(prev.is_none());
+ crate_id
+ }
+
+ pub fn add_dep(
+ &mut self,
+ from: CrateId,
+ dep: Dependency,
+ ) -> Result<(), CyclicDependenciesError> {
+ let _p = profile::span("add_dep");
+
+ // Check if adding a dep from `from` to `to` creates a cycle. To figure
+ // that out, look for a path in the *opposite* direction, from `to` to
+ // `from`.
+ if let Some(path) = self.find_path(&mut FxHashSet::default(), dep.crate_id, from) {
+ let path = path.into_iter().map(|it| (it, self[it].display_name.clone())).collect();
+ let err = CyclicDependenciesError { path };
+ assert!(err.from().0 == from && err.to().0 == dep.crate_id);
+ return Err(err);
+ }
+
+ self.arena.get_mut(&from).unwrap().add_dep(dep);
+ Ok(())
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.arena.is_empty()
+ }
+
+ pub fn iter(&self) -> impl Iterator<Item = CrateId> + '_ {
+ self.arena.keys().copied()
+ }
+
+ /// Returns an iterator over all transitive dependencies of the given crate,
+ /// including the crate itself.
+ pub fn transitive_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> {
+ let mut worklist = vec![of];
+ let mut deps = FxHashSet::default();
+
+ while let Some(krate) = worklist.pop() {
+ if !deps.insert(krate) {
+ continue;
+ }
+
+ worklist.extend(self[krate].dependencies.iter().map(|dep| dep.crate_id));
+ }
+
+ deps.into_iter()
+ }
+
+ /// Returns all transitive reverse dependencies of the given crate,
+ /// including the crate itself.
+ pub fn transitive_rev_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> {
+ let mut worklist = vec![of];
+ let mut rev_deps = FxHashSet::default();
+ rev_deps.insert(of);
+
+ let mut inverted_graph = FxHashMap::<_, Vec<_>>::default();
+ self.arena.iter().for_each(|(&krate, data)| {
+ data.dependencies
+ .iter()
+ .for_each(|dep| inverted_graph.entry(dep.crate_id).or_default().push(krate))
+ });
+
+ while let Some(krate) = worklist.pop() {
+ if let Some(krate_rev_deps) = inverted_graph.get(&krate) {
+ krate_rev_deps
+ .iter()
+ .copied()
+ .filter(|&rev_dep| rev_deps.insert(rev_dep))
+ .for_each(|rev_dep| worklist.push(rev_dep));
+ }
+ }
+
+ rev_deps.into_iter()
+ }
+
+ /// Returns all crates in the graph, sorted in topological order (ie. dependencies of a crate
+ /// come before the crate itself).
+ pub fn crates_in_topological_order(&self) -> Vec<CrateId> {
+ let mut res = Vec::new();
+ let mut visited = FxHashSet::default();
+
+ for krate in self.arena.keys().copied() {
+ go(self, &mut visited, &mut res, krate);
+ }
+
+ return res;
+
+ fn go(
+ graph: &CrateGraph,
+ visited: &mut FxHashSet<CrateId>,
+ res: &mut Vec<CrateId>,
+ source: CrateId,
+ ) {
+ if !visited.insert(source) {
+ return;
+ }
+ for dep in graph[source].dependencies.iter() {
+ go(graph, visited, res, dep.crate_id)
+ }
+ res.push(source)
+ }
+ }
+
+ // FIXME: this only finds one crate with the given root; we could have multiple
+ pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> {
+ let (&crate_id, _) =
+ self.arena.iter().find(|(_crate_id, data)| data.root_file_id == file_id)?;
+ Some(crate_id)
+ }
+
+ /// Extends this crate graph by adding a complete disjoint second crate
+ /// graph.
+ ///
+ /// The ids of the crates in the `other` graph are shifted by the return
+ /// amount.
+ pub fn extend(&mut self, other: CrateGraph) -> u32 {
+ let start = self.arena.len() as u32;
+ self.arena.extend(other.arena.into_iter().map(|(id, mut data)| {
+ let new_id = id.shift(start);
+ for dep in &mut data.dependencies {
+ dep.crate_id = dep.crate_id.shift(start);
+ }
+ (new_id, data)
+ }));
+ start
+ }
+
+ fn find_path(
+ &self,
+ visited: &mut FxHashSet<CrateId>,
+ from: CrateId,
+ to: CrateId,
+ ) -> Option<Vec<CrateId>> {
+ if !visited.insert(from) {
+ return None;
+ }
+
+ if from == to {
+ return Some(vec![to]);
+ }
+
+ for dep in &self[from].dependencies {
+ let crate_id = dep.crate_id;
+ if let Some(mut path) = self.find_path(visited, crate_id, to) {
+ path.push(from);
+ return Some(path);
+ }
+ }
+
+ None
+ }
+
+ // Work around for https://github.com/rust-lang/rust-analyzer/issues/6038.
+ // As hacky as it gets.
+ pub fn patch_cfg_if(&mut self) -> bool {
+ let cfg_if = self.hacky_find_crate("cfg_if");
+ let std = self.hacky_find_crate("std");
+ match (cfg_if, std) {
+ (Some(cfg_if), Some(std)) => {
+ self.arena.get_mut(&cfg_if).unwrap().dependencies.clear();
+ self.arena
+ .get_mut(&std)
+ .unwrap()
+ .dependencies
+ .push(Dependency::new(CrateName::new("cfg_if").unwrap(), cfg_if));
+ true
+ }
+ _ => false,
+ }
+ }
+
+ fn hacky_find_crate(&self, display_name: &str) -> Option<CrateId> {
+ self.iter().find(|it| self[*it].display_name.as_deref() == Some(display_name))
+ }
+}
+
+impl ops::Index<CrateId> for CrateGraph {
+ type Output = CrateData;
+ fn index(&self, crate_id: CrateId) -> &CrateData {
+ &self.arena[&crate_id]
+ }
+}
+
+impl CrateId {
+ fn shift(self, amount: u32) -> CrateId {
+ CrateId(self.0 + amount)
+ }
+}
+
+impl CrateData {
+ fn add_dep(&mut self, dep: Dependency) {
+ self.dependencies.push(dep)
+ }
+}
+
+impl FromStr for Edition {
+ type Err = ParseEditionError;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ let res = match s {
+ "2015" => Edition::Edition2015,
+ "2018" => Edition::Edition2018,
+ "2021" => Edition::Edition2021,
+ _ => return Err(ParseEditionError { invalid_input: s.to_string() }),
+ };
+ Ok(res)
+ }
+}
+
+impl fmt::Display for Edition {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(match self {
+ Edition::Edition2015 => "2015",
+ Edition::Edition2018 => "2018",
+ Edition::Edition2021 => "2021",
+ })
+ }
+}
+
+impl FromIterator<(String, String)> for Env {
+ fn from_iter<T: IntoIterator<Item = (String, String)>>(iter: T) -> Self {
+ Env { entries: FromIterator::from_iter(iter) }
+ }
+}
+
+impl Env {
+ pub fn set(&mut self, env: &str, value: String) {
+ self.entries.insert(env.to_owned(), value);
+ }
+
+ pub fn get(&self, env: &str) -> Option<String> {
+ self.entries.get(env).cloned()
+ }
+
+ pub fn iter(&self) -> impl Iterator<Item = (&str, &str)> {
+ self.entries.iter().map(|(k, v)| (k.as_str(), v.as_str()))
+ }
+}
+
+#[derive(Debug)]
+pub struct ParseEditionError {
+ invalid_input: String,
+}
+
+impl fmt::Display for ParseEditionError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "invalid edition: {:?}", self.invalid_input)
+ }
+}
+
+impl std::error::Error for ParseEditionError {}
+
+#[derive(Debug)]
+pub struct CyclicDependenciesError {
+ path: Vec<(CrateId, Option<CrateDisplayName>)>,
+}
+
+impl CyclicDependenciesError {
+ fn from(&self) -> &(CrateId, Option<CrateDisplayName>) {
+ self.path.first().unwrap()
+ }
+ fn to(&self) -> &(CrateId, Option<CrateDisplayName>) {
+ self.path.last().unwrap()
+ }
+}
+
+impl fmt::Display for CyclicDependenciesError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let render = |(id, name): &(CrateId, Option<CrateDisplayName>)| match name {
+ Some(it) => format!("{}({:?})", it, id),
+ None => format!("{:?}", id),
+ };
+ let path = self.path.iter().rev().map(render).collect::<Vec<String>>().join(" -> ");
+ write!(
+ f,
+ "cyclic deps: {} -> {}, alternative path: {}",
+ render(self.from()),
+ render(self.to()),
+ path
+ )
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::CrateOrigin;
+
+ use super::{CfgOptions, CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId};
+
+ #[test]
+ fn detect_cyclic_dependency_indirect() {
+ let mut graph = CrateGraph::default();
+ let crate1 = graph.add_crate_root(
+ FileId(1u32),
+ Edition2018,
+ None,
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ let crate2 = graph.add_crate_root(
+ FileId(2u32),
+ Edition2018,
+ None,
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ let crate3 = graph.add_crate_root(
+ FileId(3u32),
+ Edition2018,
+ None,
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ assert!(graph
+ .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
+ .is_ok());
+ assert!(graph
+ .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3))
+ .is_ok());
+ assert!(graph
+ .add_dep(crate3, Dependency::new(CrateName::new("crate1").unwrap(), crate1))
+ .is_err());
+ }
+
+ #[test]
+ fn detect_cyclic_dependency_direct() {
+ let mut graph = CrateGraph::default();
+ let crate1 = graph.add_crate_root(
+ FileId(1u32),
+ Edition2018,
+ None,
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ let crate2 = graph.add_crate_root(
+ FileId(2u32),
+ Edition2018,
+ None,
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ assert!(graph
+ .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
+ .is_ok());
+ assert!(graph
+ .add_dep(crate2, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
+ .is_err());
+ }
+
+ #[test]
+ fn it_works() {
+ let mut graph = CrateGraph::default();
+ let crate1 = graph.add_crate_root(
+ FileId(1u32),
+ Edition2018,
+ None,
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ let crate2 = graph.add_crate_root(
+ FileId(2u32),
+ Edition2018,
+ None,
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ let crate3 = graph.add_crate_root(
+ FileId(3u32),
+ Edition2018,
+ None,
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ assert!(graph
+ .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
+ .is_ok());
+ assert!(graph
+ .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3))
+ .is_ok());
+ }
+
+ #[test]
+ fn dashes_are_normalized() {
+ let mut graph = CrateGraph::default();
+ let crate1 = graph.add_crate_root(
+ FileId(1u32),
+ Edition2018,
+ None,
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ let crate2 = graph.add_crate_root(
+ FileId(2u32),
+ Edition2018,
+ None,
+ None,
+ CfgOptions::default(),
+ CfgOptions::default(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ assert!(graph
+ .add_dep(
+ crate1,
+ Dependency::new(CrateName::normalize_dashes("crate-name-with-dashes"), crate2)
+ )
+ .is_ok());
+ assert_eq!(
+ graph[crate1].dependencies,
+ vec![Dependency::new(CrateName::new("crate_name_with_dashes").unwrap(), crate2)]
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/base-db/src/lib.rs b/src/tools/rust-analyzer/crates/base-db/src/lib.rs
new file mode 100644
index 000000000..2d0a95b09
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/base-db/src/lib.rs
@@ -0,0 +1,131 @@
+//! base_db defines basic database traits. The concrete DB is defined by ide.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod input;
+mod change;
+pub mod fixture;
+
+use std::{panic, sync::Arc};
+
+use rustc_hash::FxHashSet;
+use syntax::{ast, Parse, SourceFile, TextRange, TextSize};
+
+pub use crate::{
+ change::Change,
+ input::{
+ CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency,
+ Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
+ ProcMacroId, ProcMacroKind, ProcMacroLoadResult, SourceRoot, SourceRootId,
+ },
+};
+pub use salsa::{self, Cancelled};
+pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, FileId, VfsPath};
+
+#[macro_export]
+macro_rules! impl_intern_key {
+ ($name:ident) => {
+ impl $crate::salsa::InternKey for $name {
+ fn from_intern_id(v: $crate::salsa::InternId) -> Self {
+ $name(v)
+ }
+ fn as_intern_id(&self) -> $crate::salsa::InternId {
+ self.0
+ }
+ }
+ };
+}
+
+pub trait Upcast<T: ?Sized> {
+ fn upcast(&self) -> &T;
+}
+
+#[derive(Clone, Copy, Debug)]
+pub struct FilePosition {
+ pub file_id: FileId,
+ pub offset: TextSize,
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
+pub struct FileRange {
+ pub file_id: FileId,
+ pub range: TextRange,
+}
+
+pub const DEFAULT_LRU_CAP: usize = 128;
+
+pub trait FileLoader {
+ /// Text of the file.
+ fn file_text(&self, file_id: FileId) -> Arc<String>;
+ fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>;
+ fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>>;
+}
+
+/// Database which stores all significant input facts: source code and project
+/// model. Everything else in rust-analyzer is derived from these queries.
+#[salsa::query_group(SourceDatabaseStorage)]
+pub trait SourceDatabase: FileLoader + std::fmt::Debug {
+ // Parses the file into the syntax tree.
+ #[salsa::invoke(parse_query)]
+ fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>;
+
+ /// The crate graph.
+ #[salsa::input]
+ fn crate_graph(&self) -> Arc<CrateGraph>;
+}
+
+fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
+ let _p = profile::span("parse_query").detail(|| format!("{:?}", file_id));
+ let text = db.file_text(file_id);
+ SourceFile::parse(&*text)
+}
+
+/// We don't want to give HIR knowledge of source roots, hence we extract these
+/// methods into a separate DB.
+#[salsa::query_group(SourceDatabaseExtStorage)]
+pub trait SourceDatabaseExt: SourceDatabase {
+ #[salsa::input]
+ fn file_text(&self, file_id: FileId) -> Arc<String>;
+ /// Path to a file, relative to the root of its source root.
+ /// Source root of the file.
+ #[salsa::input]
+ fn file_source_root(&self, file_id: FileId) -> SourceRootId;
+ /// Contents of the source root.
+ #[salsa::input]
+ fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>;
+
+ fn source_root_crates(&self, id: SourceRootId) -> Arc<FxHashSet<CrateId>>;
+}
+
+fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<FxHashSet<CrateId>> {
+ let graph = db.crate_graph();
+ let res = graph
+ .iter()
+ .filter(|&krate| {
+ let root_file = graph[krate].root_file_id;
+ db.file_source_root(root_file) == id
+ })
+ .collect();
+ Arc::new(res)
+}
+
+/// Silly workaround for cyclic deps between the traits
+pub struct FileLoaderDelegate<T>(pub T);
+
+impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
+ fn file_text(&self, file_id: FileId) -> Arc<String> {
+ SourceDatabaseExt::file_text(self.0, file_id)
+ }
+ fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
+ // FIXME: this *somehow* should be platform agnostic...
+ let source_root = self.0.file_source_root(path.anchor);
+ let source_root = self.0.source_root(source_root);
+ source_root.file_set.resolve_path(path)
+ }
+
+ fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
+ let _p = profile::span("relevant_crates");
+ let source_root = self.0.file_source_root(file_id);
+ self.0.source_root_crates(source_root)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/cfg/Cargo.toml b/src/tools/rust-analyzer/crates/cfg/Cargo.toml
new file mode 100644
index 000000000..c9664a83a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/cfg/Cargo.toml
@@ -0,0 +1,26 @@
+[package]
+name = "cfg"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+rustc-hash = "1.1.0"
+
+tt = { path = "../tt", version = "0.0.0" }
+
+[dev-dependencies]
+mbe = { path = "../mbe" }
+syntax = { path = "../syntax" }
+expect-test = "1.4.0"
+oorandom = "11.1.3"
+# We depend on both individually instead of using `features = ["derive"]` to microoptimize the
+# build graph: if the feature was enabled, syn would be built early on in the graph if `smolstr`
+# supports `arbitrary`. This way, we avoid feature unification.
+arbitrary = "1.1.0"
+derive_arbitrary = "1.1.0"
diff --git a/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs
new file mode 100644
index 000000000..fd9e31ed3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs
@@ -0,0 +1,145 @@
+//! The condition expression used in `#[cfg(..)]` attributes.
+//!
+//! See: <https://doc.rust-lang.org/reference/conditional-compilation.html#conditional-compilation>
+
+use std::{fmt, slice::Iter as SliceIter};
+
+use tt::SmolStr;
+
+/// A simple configuration value passed in from the outside.
+#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub enum CfgAtom {
+ /// eg. `#[cfg(test)]`
+ Flag(SmolStr),
+ /// eg. `#[cfg(target_os = "linux")]`
+ ///
+ /// Note that a key can have multiple values that are all considered "active" at the same time.
+ /// For example, `#[cfg(target_feature = "sse")]` and `#[cfg(target_feature = "sse2")]`.
+ KeyValue { key: SmolStr, value: SmolStr },
+}
+
+impl CfgAtom {
+ /// Returns `true` when the atom comes from the target specification.
+ ///
+ /// If this returns `true`, then changing this atom requires changing the compilation target. If
+ /// it returns `false`, the atom might come from a build script or the build system.
+ pub fn is_target_defined(&self) -> bool {
+ match self {
+ CfgAtom::Flag(flag) => matches!(&**flag, "unix" | "windows"),
+ CfgAtom::KeyValue { key, value: _ } => matches!(
+ &**key,
+ "target_arch"
+ | "target_os"
+ | "target_env"
+ | "target_family"
+ | "target_endian"
+ | "target_pointer_width"
+ | "target_vendor" // NOTE: `target_feature` is left out since it can be configured via `-Ctarget-feature`
+ ),
+ }
+ }
+}
+
+impl fmt::Display for CfgAtom {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ CfgAtom::Flag(name) => name.fmt(f),
+ CfgAtom::KeyValue { key, value } => write!(f, "{} = {:?}", key, value),
+ }
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+#[cfg_attr(test, derive(derive_arbitrary::Arbitrary))]
+pub enum CfgExpr {
+ Invalid,
+ Atom(CfgAtom),
+ All(Vec<CfgExpr>),
+ Any(Vec<CfgExpr>),
+ Not(Box<CfgExpr>),
+}
+
+impl From<CfgAtom> for CfgExpr {
+ fn from(atom: CfgAtom) -> Self {
+ CfgExpr::Atom(atom)
+ }
+}
+
+impl CfgExpr {
+ pub fn parse(tt: &tt::Subtree) -> CfgExpr {
+ next_cfg_expr(&mut tt.token_trees.iter()).unwrap_or(CfgExpr::Invalid)
+ }
+ /// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates.
+ pub fn fold(&self, query: &dyn Fn(&CfgAtom) -> bool) -> Option<bool> {
+ match self {
+ CfgExpr::Invalid => None,
+ CfgExpr::Atom(atom) => Some(query(atom)),
+ CfgExpr::All(preds) => {
+ preds.iter().try_fold(true, |s, pred| Some(s && pred.fold(query)?))
+ }
+ CfgExpr::Any(preds) => {
+ preds.iter().try_fold(false, |s, pred| Some(s || pred.fold(query)?))
+ }
+ CfgExpr::Not(pred) => pred.fold(query).map(|s| !s),
+ }
+ }
+}
+
+fn next_cfg_expr(it: &mut SliceIter<'_, tt::TokenTree>) -> Option<CfgExpr> {
+ let name = match it.next() {
+ None => return None,
+ Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.text.clone(),
+ Some(_) => return Some(CfgExpr::Invalid),
+ };
+
+ // Peek
+ let ret = match it.as_slice().first() {
+ Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => {
+ match it.as_slice().get(1) {
+ Some(tt::TokenTree::Leaf(tt::Leaf::Literal(literal))) => {
+ it.next();
+ it.next();
+ // FIXME: escape? raw string?
+ let value =
+ SmolStr::new(literal.text.trim_start_matches('"').trim_end_matches('"'));
+ CfgAtom::KeyValue { key: name, value }.into()
+ }
+ _ => return Some(CfgExpr::Invalid),
+ }
+ }
+ Some(tt::TokenTree::Subtree(subtree)) => {
+ it.next();
+ let mut sub_it = subtree.token_trees.iter();
+ let mut subs = std::iter::from_fn(|| next_cfg_expr(&mut sub_it)).collect();
+ match name.as_str() {
+ "all" => CfgExpr::All(subs),
+ "any" => CfgExpr::Any(subs),
+ "not" => CfgExpr::Not(Box::new(subs.pop().unwrap_or(CfgExpr::Invalid))),
+ _ => CfgExpr::Invalid,
+ }
+ }
+ _ => CfgAtom::Flag(name).into(),
+ };
+
+ // Eat comma separator
+ if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = it.as_slice().first() {
+ if punct.char == ',' {
+ it.next();
+ }
+ }
+ Some(ret)
+}
+
+#[cfg(test)]
+impl arbitrary::Arbitrary<'_> for CfgAtom {
+ fn arbitrary(u: &mut arbitrary::Unstructured<'_>) -> arbitrary::Result<Self> {
+ if u.arbitrary()? {
+ Ok(CfgAtom::Flag(String::arbitrary(u)?.into()))
+ } else {
+ Ok(CfgAtom::KeyValue {
+ key: String::arbitrary(u)?.into(),
+ value: String::arbitrary(u)?.into(),
+ })
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/cfg/src/dnf.rs b/src/tools/rust-analyzer/crates/cfg/src/dnf.rs
new file mode 100644
index 000000000..fd80e1ebe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/cfg/src/dnf.rs
@@ -0,0 +1,345 @@
+//! Disjunctive Normal Form construction.
+//!
+//! Algorithm from <https://www.cs.drexel.edu/~jjohnson/2015-16/fall/CS270/Lectures/3/dnf.pdf>,
+//! which would have been much easier to read if it used pattern matching. It's also missing the
+//! entire "distribute ANDs over ORs" part, which is not trivial. Oh well.
+//!
+//! This is currently both messy and inefficient. Feel free to improve, there are unit tests.
+
+use std::fmt::{self, Write};
+
+use rustc_hash::FxHashSet;
+
+use crate::{CfgAtom, CfgDiff, CfgExpr, CfgOptions, InactiveReason};
+
+/// A `#[cfg]` directive in Disjunctive Normal Form (DNF).
+pub struct DnfExpr {
+ conjunctions: Vec<Conjunction>,
+}
+
+struct Conjunction {
+ literals: Vec<Literal>,
+}
+
+struct Literal {
+ negate: bool,
+ var: Option<CfgAtom>, // None = Invalid
+}
+
+impl DnfExpr {
+ pub fn new(expr: CfgExpr) -> Self {
+ let builder = Builder { expr: DnfExpr { conjunctions: Vec::new() } };
+
+ builder.lower(expr)
+ }
+
+ /// Computes a list of present or absent atoms in `opts` that cause this expression to evaluate
+ /// to `false`.
+ ///
+ /// Note that flipping a subset of these atoms might be sufficient to make the whole expression
+ /// evaluate to `true`. For that, see `compute_enable_hints`.
+ ///
+ /// Returns `None` when `self` is already true, or contains errors.
+ pub fn why_inactive(&self, opts: &CfgOptions) -> Option<InactiveReason> {
+ let mut res = InactiveReason { enabled: Vec::new(), disabled: Vec::new() };
+
+ for conj in &self.conjunctions {
+ let mut conj_is_true = true;
+ for lit in &conj.literals {
+ let atom = lit.var.as_ref()?;
+ let enabled = opts.enabled.contains(atom);
+ if lit.negate == enabled {
+ // Literal is false, but needs to be true for this conjunction.
+ conj_is_true = false;
+
+ if enabled {
+ res.enabled.push(atom.clone());
+ } else {
+ res.disabled.push(atom.clone());
+ }
+ }
+ }
+
+ if conj_is_true {
+ // This expression is not actually inactive.
+ return None;
+ }
+ }
+
+ res.enabled.sort_unstable();
+ res.enabled.dedup();
+ res.disabled.sort_unstable();
+ res.disabled.dedup();
+ Some(res)
+ }
+
+ /// Returns `CfgDiff` objects that would enable this directive if applied to `opts`.
+ pub fn compute_enable_hints<'a>(
+ &'a self,
+ opts: &'a CfgOptions,
+ ) -> impl Iterator<Item = CfgDiff> + 'a {
+ // A cfg is enabled if any of `self.conjunctions` evaluate to `true`.
+
+ self.conjunctions.iter().filter_map(move |conj| {
+ let mut enable = FxHashSet::default();
+ let mut disable = FxHashSet::default();
+ for lit in &conj.literals {
+ let atom = lit.var.as_ref()?;
+ let enabled = opts.enabled.contains(atom);
+ if lit.negate && enabled {
+ disable.insert(atom.clone());
+ }
+ if !lit.negate && !enabled {
+ enable.insert(atom.clone());
+ }
+ }
+
+ // Check that this actually makes `conj` true.
+ for lit in &conj.literals {
+ let atom = lit.var.as_ref()?;
+ let enabled = enable.contains(atom)
+ || (opts.enabled.contains(atom) && !disable.contains(atom));
+ if enabled == lit.negate {
+ return None;
+ }
+ }
+
+ if enable.is_empty() && disable.is_empty() {
+ return None;
+ }
+
+ let mut diff = CfgDiff {
+ enable: enable.into_iter().collect(),
+ disable: disable.into_iter().collect(),
+ };
+
+ // Undo the FxHashMap randomization for consistent output.
+ diff.enable.sort_unstable();
+ diff.disable.sort_unstable();
+
+ Some(diff)
+ })
+ }
+}
+
+impl fmt::Display for DnfExpr {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ if self.conjunctions.len() != 1 {
+ f.write_str("any(")?;
+ }
+ for (i, conj) in self.conjunctions.iter().enumerate() {
+ if i != 0 {
+ f.write_str(", ")?;
+ }
+
+ conj.fmt(f)?;
+ }
+ if self.conjunctions.len() != 1 {
+ f.write_char(')')?;
+ }
+
+ Ok(())
+ }
+}
+
+impl Conjunction {
+ fn new(parts: Vec<CfgExpr>) -> Self {
+ let mut literals = Vec::new();
+ for part in parts {
+ match part {
+ CfgExpr::Invalid | CfgExpr::Atom(_) | CfgExpr::Not(_) => {
+ literals.push(Literal::new(part));
+ }
+ CfgExpr::All(conj) => {
+ // Flatten.
+ literals.extend(Conjunction::new(conj).literals);
+ }
+ CfgExpr::Any(_) => unreachable!("disjunction in conjunction"),
+ }
+ }
+
+ Self { literals }
+ }
+}
+
+impl fmt::Display for Conjunction {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ if self.literals.len() != 1 {
+ f.write_str("all(")?;
+ }
+ for (i, lit) in self.literals.iter().enumerate() {
+ if i != 0 {
+ f.write_str(", ")?;
+ }
+
+ lit.fmt(f)?;
+ }
+ if self.literals.len() != 1 {
+ f.write_str(")")?;
+ }
+
+ Ok(())
+ }
+}
+
+impl Literal {
+ fn new(expr: CfgExpr) -> Self {
+ match expr {
+ CfgExpr::Invalid => Self { negate: false, var: None },
+ CfgExpr::Atom(atom) => Self { negate: false, var: Some(atom) },
+ CfgExpr::Not(expr) => match *expr {
+ CfgExpr::Invalid => Self { negate: true, var: None },
+ CfgExpr::Atom(atom) => Self { negate: true, var: Some(atom) },
+ _ => unreachable!("non-atom {:?}", expr),
+ },
+ CfgExpr::Any(_) | CfgExpr::All(_) => unreachable!("non-literal {:?}", expr),
+ }
+ }
+}
+
+impl fmt::Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ if self.negate {
+ write!(f, "not(")?;
+ }
+
+ match &self.var {
+ Some(var) => var.fmt(f)?,
+ None => f.write_str("<invalid>")?,
+ }
+
+ if self.negate {
+ f.write_char(')')?;
+ }
+
+ Ok(())
+ }
+}
+
+struct Builder {
+ expr: DnfExpr,
+}
+
+impl Builder {
+ fn lower(mut self, expr: CfgExpr) -> DnfExpr {
+ let expr = make_nnf(expr);
+ let expr = make_dnf(expr);
+
+ match expr {
+ CfgExpr::Invalid | CfgExpr::Atom(_) | CfgExpr::Not(_) => {
+ self.expr.conjunctions.push(Conjunction::new(vec![expr]));
+ }
+ CfgExpr::All(conj) => {
+ self.expr.conjunctions.push(Conjunction::new(conj));
+ }
+ CfgExpr::Any(mut disj) => {
+ disj.reverse();
+ while let Some(conj) = disj.pop() {
+ match conj {
+ CfgExpr::Invalid | CfgExpr::Atom(_) | CfgExpr::All(_) | CfgExpr::Not(_) => {
+ self.expr.conjunctions.push(Conjunction::new(vec![conj]));
+ }
+ CfgExpr::Any(inner_disj) => {
+ // Flatten.
+ disj.extend(inner_disj.into_iter().rev());
+ }
+ }
+ }
+ }
+ }
+
+ self.expr
+ }
+}
+
+fn make_dnf(expr: CfgExpr) -> CfgExpr {
+ match expr {
+ CfgExpr::Invalid | CfgExpr::Atom(_) | CfgExpr::Not(_) => expr,
+ CfgExpr::Any(e) => flatten(CfgExpr::Any(e.into_iter().map(make_dnf).collect())),
+ CfgExpr::All(e) => {
+ let e = e.into_iter().map(make_dnf).collect::<Vec<_>>();
+
+ flatten(CfgExpr::Any(distribute_conj(&e)))
+ }
+ }
+}
+
+/// Turns a conjunction of expressions into a disjunction of expressions.
+fn distribute_conj(conj: &[CfgExpr]) -> Vec<CfgExpr> {
+ fn go(out: &mut Vec<CfgExpr>, with: &mut Vec<CfgExpr>, rest: &[CfgExpr]) {
+ match rest {
+ [head, tail @ ..] => match head {
+ CfgExpr::Any(disj) => {
+ for part in disj {
+ with.push(part.clone());
+ go(out, with, tail);
+ with.pop();
+ }
+ }
+ _ => {
+ with.push(head.clone());
+ go(out, with, tail);
+ with.pop();
+ }
+ },
+ _ => {
+ // Turn accumulated parts into a new conjunction.
+ out.push(CfgExpr::All(with.clone()));
+ }
+ }
+ }
+
+ let mut out = Vec::new(); // contains only `all()`
+ let mut with = Vec::new();
+
+ go(&mut out, &mut with, conj);
+
+ out
+}
+
+fn make_nnf(expr: CfgExpr) -> CfgExpr {
+ match expr {
+ CfgExpr::Invalid | CfgExpr::Atom(_) => expr,
+ CfgExpr::Any(expr) => CfgExpr::Any(expr.into_iter().map(make_nnf).collect()),
+ CfgExpr::All(expr) => CfgExpr::All(expr.into_iter().map(make_nnf).collect()),
+ CfgExpr::Not(operand) => match *operand {
+ CfgExpr::Invalid | CfgExpr::Atom(_) => CfgExpr::Not(operand.clone()), // Original negated expr
+ CfgExpr::Not(expr) => {
+ // Remove double negation.
+ make_nnf(*expr)
+ }
+ // Convert negated conjunction/disjunction using DeMorgan's Law.
+ CfgExpr::Any(inner) => CfgExpr::All(
+ inner.into_iter().map(|expr| make_nnf(CfgExpr::Not(Box::new(expr)))).collect(),
+ ),
+ CfgExpr::All(inner) => CfgExpr::Any(
+ inner.into_iter().map(|expr| make_nnf(CfgExpr::Not(Box::new(expr)))).collect(),
+ ),
+ },
+ }
+}
+
+/// Collapses nested `any()` and `all()` predicates.
+fn flatten(expr: CfgExpr) -> CfgExpr {
+ match expr {
+ CfgExpr::All(inner) => CfgExpr::All(
+ inner
+ .into_iter()
+ .flat_map(|e| match e {
+ CfgExpr::All(inner) => inner,
+ _ => vec![e],
+ })
+ .collect(),
+ ),
+ CfgExpr::Any(inner) => CfgExpr::Any(
+ inner
+ .into_iter()
+ .flat_map(|e| match e {
+ CfgExpr::Any(inner) => inner,
+ _ => vec![e],
+ })
+ .collect(),
+ ),
+ _ => expr,
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/cfg/src/lib.rs b/src/tools/rust-analyzer/crates/cfg/src/lib.rs
new file mode 100644
index 000000000..d78ef4fb1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/cfg/src/lib.rs
@@ -0,0 +1,202 @@
+//! cfg defines conditional compiling options, `cfg` attribute parser and evaluator
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod cfg_expr;
+mod dnf;
+#[cfg(test)]
+mod tests;
+
+use std::fmt;
+
+use rustc_hash::FxHashSet;
+use tt::SmolStr;
+
+pub use cfg_expr::{CfgAtom, CfgExpr};
+pub use dnf::DnfExpr;
+
+/// Configuration options used for conditional compilation on items with `cfg` attributes.
+/// We have two kind of options in different namespaces: atomic options like `unix`, and
+/// key-value options like `target_arch="x86"`.
+///
+/// Note that for key-value options, one key can have multiple values (but not none).
+/// `feature` is an example. We have both `feature="foo"` and `feature="bar"` if features
+/// `foo` and `bar` are both enabled. And here, we store key-value options as a set of tuple
+/// of key and value in `key_values`.
+///
+/// See: <https://doc.rust-lang.org/reference/conditional-compilation.html#set-configuration-options>
+#[derive(Clone, PartialEq, Eq, Default)]
+pub struct CfgOptions {
+ enabled: FxHashSet<CfgAtom>,
+}
+
+impl fmt::Debug for CfgOptions {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut items = self
+ .enabled
+ .iter()
+ .map(|atom| match atom {
+ CfgAtom::Flag(it) => it.to_string(),
+ CfgAtom::KeyValue { key, value } => format!("{}={}", key, value),
+ })
+ .collect::<Vec<_>>();
+ items.sort();
+ f.debug_tuple("CfgOptions").field(&items).finish()
+ }
+}
+
+impl CfgOptions {
+ pub fn check(&self, cfg: &CfgExpr) -> Option<bool> {
+ cfg.fold(&|atom| self.enabled.contains(atom))
+ }
+
+ pub fn insert_atom(&mut self, key: SmolStr) {
+ self.enabled.insert(CfgAtom::Flag(key));
+ }
+
+ pub fn insert_key_value(&mut self, key: SmolStr, value: SmolStr) {
+ self.enabled.insert(CfgAtom::KeyValue { key, value });
+ }
+
+ pub fn apply_diff(&mut self, diff: CfgDiff) {
+ for atom in diff.enable {
+ self.enabled.insert(atom);
+ }
+
+ for atom in diff.disable {
+ self.enabled.remove(&atom);
+ }
+ }
+
+ pub fn get_cfg_keys(&self) -> impl Iterator<Item = &SmolStr> {
+ self.enabled.iter().map(|x| match x {
+ CfgAtom::Flag(key) => key,
+ CfgAtom::KeyValue { key, .. } => key,
+ })
+ }
+
+ pub fn get_cfg_values<'a>(
+ &'a self,
+ cfg_key: &'a str,
+ ) -> impl Iterator<Item = &'a SmolStr> + 'a {
+ self.enabled.iter().filter_map(move |x| match x {
+ CfgAtom::KeyValue { key, value } if cfg_key == key => Some(value),
+ _ => None,
+ })
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct CfgDiff {
+ // Invariants: No duplicates, no atom that's both in `enable` and `disable`.
+ enable: Vec<CfgAtom>,
+ disable: Vec<CfgAtom>,
+}
+
+impl CfgDiff {
+ /// Create a new CfgDiff. Will return None if the same item appears more than once in the set
+ /// of both.
+ pub fn new(enable: Vec<CfgAtom>, disable: Vec<CfgAtom>) -> Option<CfgDiff> {
+ let mut occupied = FxHashSet::default();
+ for item in enable.iter().chain(disable.iter()) {
+ if !occupied.insert(item) {
+ // was present
+ return None;
+ }
+ }
+
+ Some(CfgDiff { enable, disable })
+ }
+
+ /// Returns the total number of atoms changed by this diff.
+ pub fn len(&self) -> usize {
+ self.enable.len() + self.disable.len()
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+
+impl fmt::Display for CfgDiff {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ if !self.enable.is_empty() {
+ f.write_str("enable ")?;
+ for (i, atom) in self.enable.iter().enumerate() {
+ let sep = match i {
+ 0 => "",
+ _ if i == self.enable.len() - 1 => " and ",
+ _ => ", ",
+ };
+ f.write_str(sep)?;
+
+ atom.fmt(f)?;
+ }
+
+ if !self.disable.is_empty() {
+ f.write_str("; ")?;
+ }
+ }
+
+ if !self.disable.is_empty() {
+ f.write_str("disable ")?;
+ for (i, atom) in self.disable.iter().enumerate() {
+ let sep = match i {
+ 0 => "",
+ _ if i == self.enable.len() - 1 => " and ",
+ _ => ", ",
+ };
+ f.write_str(sep)?;
+
+ atom.fmt(f)?;
+ }
+ }
+
+ Ok(())
+ }
+}
+
+pub struct InactiveReason {
+ enabled: Vec<CfgAtom>,
+ disabled: Vec<CfgAtom>,
+}
+
+impl fmt::Display for InactiveReason {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ if !self.enabled.is_empty() {
+ for (i, atom) in self.enabled.iter().enumerate() {
+ let sep = match i {
+ 0 => "",
+ _ if i == self.enabled.len() - 1 => " and ",
+ _ => ", ",
+ };
+ f.write_str(sep)?;
+
+ atom.fmt(f)?;
+ }
+ let is_are = if self.enabled.len() == 1 { "is" } else { "are" };
+ write!(f, " {} enabled", is_are)?;
+
+ if !self.disabled.is_empty() {
+ f.write_str(" and ")?;
+ }
+ }
+
+ if !self.disabled.is_empty() {
+ for (i, atom) in self.disabled.iter().enumerate() {
+ let sep = match i {
+ 0 => "",
+ _ if i == self.disabled.len() - 1 => " and ",
+ _ => ", ",
+ };
+ f.write_str(sep)?;
+
+ atom.fmt(f)?;
+ }
+ let is_are = if self.disabled.len() == 1 { "is" } else { "are" };
+ write!(f, " {} disabled", is_are)?;
+ }
+
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/cfg/src/tests.rs b/src/tools/rust-analyzer/crates/cfg/src/tests.rs
new file mode 100644
index 000000000..bdc3f854e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/cfg/src/tests.rs
@@ -0,0 +1,224 @@
+use arbitrary::{Arbitrary, Unstructured};
+use expect_test::{expect, Expect};
+use mbe::syntax_node_to_token_tree;
+use syntax::{ast, AstNode};
+
+use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
+
+fn assert_parse_result(input: &str, expected: CfgExpr) {
+ let (tt, _) = {
+ let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ syntax_node_to_token_tree(tt.syntax())
+ };
+ let cfg = CfgExpr::parse(&tt);
+ assert_eq!(cfg, expected);
+}
+
+fn check_dnf(input: &str, expect: Expect) {
+ let (tt, _) = {
+ let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ syntax_node_to_token_tree(tt.syntax())
+ };
+ let cfg = CfgExpr::parse(&tt);
+ let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
+ expect.assert_eq(&actual);
+}
+
+fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
+ let (tt, _) = {
+ let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ syntax_node_to_token_tree(tt.syntax())
+ };
+ let cfg = CfgExpr::parse(&tt);
+ let dnf = DnfExpr::new(cfg);
+ let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
+ expect.assert_eq(&why_inactive);
+}
+
+#[track_caller]
+fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
+ let (tt, _) = {
+ let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ syntax_node_to_token_tree(tt.syntax())
+ };
+ let cfg = CfgExpr::parse(&tt);
+ let dnf = DnfExpr::new(cfg);
+ let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
+ assert_eq!(hints, expected_hints);
+}
+
+#[test]
+fn test_cfg_expr_parser() {
+ assert_parse_result("#![cfg(foo)]", CfgAtom::Flag("foo".into()).into());
+ assert_parse_result("#![cfg(foo,)]", CfgAtom::Flag("foo".into()).into());
+ assert_parse_result(
+ "#![cfg(not(foo))]",
+ CfgExpr::Not(Box::new(CfgAtom::Flag("foo".into()).into())),
+ );
+ assert_parse_result("#![cfg(foo(bar))]", CfgExpr::Invalid);
+
+ // Only take the first
+ assert_parse_result(r#"#![cfg(foo, bar = "baz")]"#, CfgAtom::Flag("foo".into()).into());
+
+ assert_parse_result(
+ r#"#![cfg(all(foo, bar = "baz"))]"#,
+ CfgExpr::All(vec![
+ CfgAtom::Flag("foo".into()).into(),
+ CfgAtom::KeyValue { key: "bar".into(), value: "baz".into() }.into(),
+ ]),
+ );
+
+ assert_parse_result(
+ r#"#![cfg(any(not(), all(), , bar = "baz",))]"#,
+ CfgExpr::Any(vec![
+ CfgExpr::Not(Box::new(CfgExpr::Invalid)),
+ CfgExpr::All(vec![]),
+ CfgExpr::Invalid,
+ CfgAtom::KeyValue { key: "bar".into(), value: "baz".into() }.into(),
+ ]),
+ );
+}
+
+#[test]
+fn smoke() {
+ check_dnf("#![cfg(test)]", expect![[r#"#![cfg(test)]"#]]);
+ check_dnf("#![cfg(not(test))]", expect![[r#"#![cfg(not(test))]"#]]);
+ check_dnf("#![cfg(not(not(test)))]", expect![[r#"#![cfg(test)]"#]]);
+
+ check_dnf("#![cfg(all(a, b))]", expect![[r#"#![cfg(all(a, b))]"#]]);
+ check_dnf("#![cfg(any(a, b))]", expect![[r#"#![cfg(any(a, b))]"#]]);
+
+ check_dnf("#![cfg(not(a))]", expect![[r#"#![cfg(not(a))]"#]]);
+}
+
+#[test]
+fn distribute() {
+ check_dnf("#![cfg(all(any(a, b), c))]", expect![[r#"#![cfg(any(all(a, c), all(b, c)))]"#]]);
+ check_dnf("#![cfg(all(c, any(a, b)))]", expect![[r#"#![cfg(any(all(c, a), all(c, b)))]"#]]);
+ check_dnf(
+ "#![cfg(all(any(a, b), any(c, d)))]",
+ expect![[r#"#![cfg(any(all(a, c), all(a, d), all(b, c), all(b, d)))]"#]],
+ );
+
+ check_dnf(
+ "#![cfg(all(any(a, b, c), any(d, e, f), g))]",
+ expect![[
+ r#"#![cfg(any(all(a, d, g), all(a, e, g), all(a, f, g), all(b, d, g), all(b, e, g), all(b, f, g), all(c, d, g), all(c, e, g), all(c, f, g)))]"#
+ ]],
+ );
+}
+
+#[test]
+fn demorgan() {
+ check_dnf("#![cfg(not(all(a, b)))]", expect![[r#"#![cfg(any(not(a), not(b)))]"#]]);
+ check_dnf("#![cfg(not(any(a, b)))]", expect![[r#"#![cfg(all(not(a), not(b)))]"#]]);
+
+ check_dnf("#![cfg(not(all(not(a), b)))]", expect![[r#"#![cfg(any(a, not(b)))]"#]]);
+ check_dnf("#![cfg(not(any(a, not(b))))]", expect![[r#"#![cfg(all(not(a), b))]"#]]);
+}
+
+#[test]
+fn nested() {
+ check_dnf("#![cfg(all(any(a), not(all(any(b)))))]", expect![[r#"#![cfg(all(a, not(b)))]"#]]);
+
+ check_dnf("#![cfg(any(any(a, b)))]", expect![[r#"#![cfg(any(a, b))]"#]]);
+ check_dnf("#![cfg(not(any(any(a, b))))]", expect![[r#"#![cfg(all(not(a), not(b)))]"#]]);
+ check_dnf("#![cfg(all(all(a, b)))]", expect![[r#"#![cfg(all(a, b))]"#]]);
+ check_dnf("#![cfg(not(all(all(a, b))))]", expect![[r#"#![cfg(any(not(a), not(b)))]"#]]);
+}
+
+#[test]
+fn regression() {
+ check_dnf("#![cfg(all(not(not(any(any(any()))))))]", expect![[r##"#![cfg(any())]"##]]);
+ check_dnf("#![cfg(all(any(all(any()))))]", expect![[r##"#![cfg(any())]"##]]);
+ check_dnf("#![cfg(all(all(any())))]", expect![[r##"#![cfg(any())]"##]]);
+
+ check_dnf("#![cfg(all(all(any(), x)))]", expect![[r##"#![cfg(any())]"##]]);
+ check_dnf("#![cfg(all(all(any()), x))]", expect![[r##"#![cfg(any())]"##]]);
+ check_dnf("#![cfg(all(all(any(x))))]", expect![[r##"#![cfg(x)]"##]]);
+ check_dnf("#![cfg(all(all(any(x), x)))]", expect![[r##"#![cfg(all(x, x))]"##]]);
+}
+
+#[test]
+fn hints() {
+ let mut opts = CfgOptions::default();
+
+ check_enable_hints("#![cfg(test)]", &opts, &["enable test"]);
+ check_enable_hints("#![cfg(not(test))]", &opts, &[]);
+
+ check_enable_hints("#![cfg(any(a, b))]", &opts, &["enable a", "enable b"]);
+ check_enable_hints("#![cfg(any(b, a))]", &opts, &["enable b", "enable a"]);
+
+ check_enable_hints("#![cfg(all(a, b))]", &opts, &["enable a and b"]);
+
+ opts.insert_atom("test".into());
+
+ check_enable_hints("#![cfg(test)]", &opts, &[]);
+ check_enable_hints("#![cfg(not(test))]", &opts, &["disable test"]);
+}
+
+/// Tests that we don't suggest hints for cfgs that express an inconsistent formula.
+#[test]
+fn hints_impossible() {
+ let mut opts = CfgOptions::default();
+
+ check_enable_hints("#![cfg(all(test, not(test)))]", &opts, &[]);
+
+ opts.insert_atom("test".into());
+
+ check_enable_hints("#![cfg(all(test, not(test)))]", &opts, &[]);
+}
+
+#[test]
+fn why_inactive() {
+ let mut opts = CfgOptions::default();
+ opts.insert_atom("test".into());
+ opts.insert_atom("test2".into());
+
+ check_why_inactive("#![cfg(a)]", &opts, expect![["a is disabled"]]);
+ check_why_inactive("#![cfg(not(test))]", &opts, expect![["test is enabled"]]);
+
+ check_why_inactive(
+ "#![cfg(all(not(test), not(test2)))]",
+ &opts,
+ expect![["test and test2 are enabled"]],
+ );
+ check_why_inactive("#![cfg(all(a, b))]", &opts, expect![["a and b are disabled"]]);
+ check_why_inactive(
+ "#![cfg(all(not(test), a))]",
+ &opts,
+ expect![["test is enabled and a is disabled"]],
+ );
+ check_why_inactive(
+ "#![cfg(all(not(test), test2, a))]",
+ &opts,
+ expect![["test is enabled and a is disabled"]],
+ );
+ check_why_inactive(
+ "#![cfg(all(not(test), not(test2), a))]",
+ &opts,
+ expect![["test and test2 are enabled and a is disabled"]],
+ );
+}
+
+#[test]
+fn proptest() {
+ const REPEATS: usize = 512;
+
+ let mut rng = oorandom::Rand32::new(123456789);
+ let mut buf = Vec::new();
+ for _ in 0..REPEATS {
+ buf.clear();
+ while buf.len() < 512 {
+ buf.extend(rng.rand_u32().to_ne_bytes());
+ }
+
+ let mut u = Unstructured::new(&buf);
+ let cfg = CfgExpr::arbitrary(&mut u).unwrap();
+ DnfExpr::new(cfg);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml
new file mode 100644
index 000000000..d3d180ece
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml
@@ -0,0 +1,22 @@
+[package]
+name = "flycheck"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+crossbeam-channel = "0.5.5"
+tracing = "0.1.35"
+cargo_metadata = "0.15.0"
+serde = { version = "1.0.137", features = ["derive"] }
+serde_json = "1.0.81"
+jod-thread = "0.1.2"
+
+toolchain = { path = "../toolchain", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
+paths = { path = "../paths", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/flycheck/src/lib.rs b/src/tools/rust-analyzer/crates/flycheck/src/lib.rs
new file mode 100644
index 000000000..4e8bc881a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/flycheck/src/lib.rs
@@ -0,0 +1,396 @@
+//! Flycheck provides the functionality needed to run `cargo check` or
+//! another compatible command (f.x. clippy) in a background thread and provide
+//! LSP diagnostics based on the output of the command.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use std::{
+ fmt, io,
+ process::{ChildStderr, ChildStdout, Command, Stdio},
+ time::Duration,
+};
+
+use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
+use paths::AbsPathBuf;
+use serde::Deserialize;
+use stdx::{process::streaming_output, JodChild};
+
+pub use cargo_metadata::diagnostic::{
+ Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan,
+ DiagnosticSpanMacroExpansion,
+};
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum FlycheckConfig {
+ CargoCommand {
+ command: String,
+ target_triple: Option<String>,
+ all_targets: bool,
+ no_default_features: bool,
+ all_features: bool,
+ features: Vec<String>,
+ extra_args: Vec<String>,
+ },
+ CustomCommand {
+ command: String,
+ args: Vec<String>,
+ },
+}
+
+impl fmt::Display for FlycheckConfig {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ FlycheckConfig::CargoCommand { command, .. } => write!(f, "cargo {}", command),
+ FlycheckConfig::CustomCommand { command, args } => {
+ write!(f, "{} {}", command, args.join(" "))
+ }
+ }
+ }
+}
+
+/// Flycheck wraps the shared state and communication machinery used for
+/// running `cargo check` (or other compatible command) and providing
+/// diagnostics based on the output.
+/// The spawned thread is shut down when this struct is dropped.
+#[derive(Debug)]
+pub struct FlycheckHandle {
+ // XXX: drop order is significant
+ sender: Sender<Restart>,
+ _thread: jod_thread::JoinHandle,
+}
+
+impl FlycheckHandle {
+ pub fn spawn(
+ id: usize,
+ sender: Box<dyn Fn(Message) + Send>,
+ config: FlycheckConfig,
+ workspace_root: AbsPathBuf,
+ ) -> FlycheckHandle {
+ let actor = FlycheckActor::new(id, sender, config, workspace_root);
+ let (sender, receiver) = unbounded::<Restart>();
+ let thread = jod_thread::Builder::new()
+ .name("Flycheck".to_owned())
+ .spawn(move || actor.run(receiver))
+ .expect("failed to spawn thread");
+ FlycheckHandle { sender, _thread: thread }
+ }
+
+ /// Schedule a re-start of the cargo check worker.
+ pub fn update(&self) {
+ self.sender.send(Restart).unwrap();
+ }
+}
+
+pub enum Message {
+ /// Request adding a diagnostic with fixes included to a file
+ AddDiagnostic { workspace_root: AbsPathBuf, diagnostic: Diagnostic },
+
+ /// Request check progress notification to client
+ Progress {
+ /// Flycheck instance ID
+ id: usize,
+ progress: Progress,
+ },
+}
+
+impl fmt::Debug for Message {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Message::AddDiagnostic { workspace_root, diagnostic } => f
+ .debug_struct("AddDiagnostic")
+ .field("workspace_root", workspace_root)
+ .field("diagnostic_code", &diagnostic.code.as_ref().map(|it| &it.code))
+ .finish(),
+ Message::Progress { id, progress } => {
+ f.debug_struct("Progress").field("id", id).field("progress", progress).finish()
+ }
+ }
+ }
+}
+
+#[derive(Debug)]
+pub enum Progress {
+ DidStart,
+ DidCheckCrate(String),
+ DidFinish(io::Result<()>),
+ DidCancel,
+}
+
+struct Restart;
+
+struct FlycheckActor {
+ id: usize,
+ sender: Box<dyn Fn(Message) + Send>,
+ config: FlycheckConfig,
+ workspace_root: AbsPathBuf,
+ /// CargoHandle exists to wrap around the communication needed to be able to
+ /// run `cargo check` without blocking. Currently the Rust standard library
+ /// doesn't provide a way to read sub-process output without blocking, so we
+ /// have to wrap sub-processes output handling in a thread and pass messages
+ /// back over a channel.
+ cargo_handle: Option<CargoHandle>,
+}
+
+enum Event {
+ Restart(Restart),
+ CheckEvent(Option<CargoMessage>),
+}
+
+impl FlycheckActor {
+ fn new(
+ id: usize,
+ sender: Box<dyn Fn(Message) + Send>,
+ config: FlycheckConfig,
+ workspace_root: AbsPathBuf,
+ ) -> FlycheckActor {
+ FlycheckActor { id, sender, config, workspace_root, cargo_handle: None }
+ }
+ fn progress(&self, progress: Progress) {
+ self.send(Message::Progress { id: self.id, progress });
+ }
+ fn next_event(&self, inbox: &Receiver<Restart>) -> Option<Event> {
+ let check_chan = self.cargo_handle.as_ref().map(|cargo| &cargo.receiver);
+ select! {
+ recv(inbox) -> msg => msg.ok().map(Event::Restart),
+ recv(check_chan.unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())),
+ }
+ }
+ fn run(mut self, inbox: Receiver<Restart>) {
+ while let Some(event) = self.next_event(&inbox) {
+ match event {
+ Event::Restart(Restart) => {
+ // Cancel the previously spawned process
+ self.cancel_check_process();
+ while let Ok(Restart) = inbox.recv_timeout(Duration::from_millis(50)) {}
+
+ let command = self.check_command();
+ tracing::debug!(?command, "will restart flycheck");
+ match CargoHandle::spawn(command) {
+ Ok(cargo_handle) => {
+ tracing::debug!(
+ command = ?self.check_command(),
+ "did restart flycheck"
+ );
+ self.cargo_handle = Some(cargo_handle);
+ self.progress(Progress::DidStart);
+ }
+ Err(error) => {
+ tracing::error!(
+ command = ?self.check_command(),
+ %error, "failed to restart flycheck"
+ );
+ }
+ }
+ }
+ Event::CheckEvent(None) => {
+ tracing::debug!("flycheck finished");
+
+ // Watcher finished
+ let cargo_handle = self.cargo_handle.take().unwrap();
+ let res = cargo_handle.join();
+ if res.is_err() {
+ tracing::error!(
+ "Flycheck failed to run the following command: {:?}",
+ self.check_command()
+ );
+ }
+ self.progress(Progress::DidFinish(res));
+ }
+ Event::CheckEvent(Some(message)) => match message {
+ CargoMessage::CompilerArtifact(msg) => {
+ self.progress(Progress::DidCheckCrate(msg.target.name));
+ }
+
+ CargoMessage::Diagnostic(msg) => {
+ self.send(Message::AddDiagnostic {
+ workspace_root: self.workspace_root.clone(),
+ diagnostic: msg,
+ });
+ }
+ },
+ }
+ }
+ // If we rerun the thread, we need to discard the previous check results first
+ self.cancel_check_process();
+ }
+
+ fn cancel_check_process(&mut self) {
+ if let Some(cargo_handle) = self.cargo_handle.take() {
+ cargo_handle.cancel();
+ self.progress(Progress::DidCancel);
+ }
+ }
+
+ fn check_command(&self) -> Command {
+ let mut cmd = match &self.config {
+ FlycheckConfig::CargoCommand {
+ command,
+ target_triple,
+ no_default_features,
+ all_targets,
+ all_features,
+ extra_args,
+ features,
+ } => {
+ let mut cmd = Command::new(toolchain::cargo());
+ cmd.arg(command);
+ cmd.current_dir(&self.workspace_root);
+ cmd.args(&["--workspace", "--message-format=json", "--manifest-path"])
+ .arg(self.workspace_root.join("Cargo.toml").as_os_str());
+
+ if let Some(target) = target_triple {
+ cmd.args(&["--target", target.as_str()]);
+ }
+ if *all_targets {
+ cmd.arg("--all-targets");
+ }
+ if *all_features {
+ cmd.arg("--all-features");
+ } else {
+ if *no_default_features {
+ cmd.arg("--no-default-features");
+ }
+ if !features.is_empty() {
+ cmd.arg("--features");
+ cmd.arg(features.join(" "));
+ }
+ }
+ cmd.args(extra_args);
+ cmd
+ }
+ FlycheckConfig::CustomCommand { command, args } => {
+ let mut cmd = Command::new(command);
+ cmd.args(args);
+ cmd
+ }
+ };
+ cmd.current_dir(&self.workspace_root);
+ cmd
+ }
+
+ fn send(&self, check_task: Message) {
+ (self.sender)(check_task);
+ }
+}
+
+/// A handle to a cargo process used for fly-checking.
+struct CargoHandle {
+ /// The handle to the actual cargo process. As we cannot cancel directly from with
+ /// a read syscall dropping and therefor terminating the process is our best option.
+ child: JodChild,
+ thread: jod_thread::JoinHandle<io::Result<(bool, String)>>,
+ receiver: Receiver<CargoMessage>,
+}
+
+impl CargoHandle {
+ fn spawn(mut command: Command) -> std::io::Result<CargoHandle> {
+ command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null());
+ let mut child = JodChild::spawn(command)?;
+
+ let stdout = child.stdout.take().unwrap();
+ let stderr = child.stderr.take().unwrap();
+
+ let (sender, receiver) = unbounded();
+ let actor = CargoActor::new(sender, stdout, stderr);
+ let thread = jod_thread::Builder::new()
+ .name("CargoHandle".to_owned())
+ .spawn(move || actor.run())
+ .expect("failed to spawn thread");
+ Ok(CargoHandle { child, thread, receiver })
+ }
+
+ fn cancel(mut self) {
+ let _ = self.child.kill();
+ let _ = self.child.wait();
+ }
+
+ fn join(mut self) -> io::Result<()> {
+ let _ = self.child.kill();
+ let exit_status = self.child.wait()?;
+ let (read_at_least_one_message, error) = self.thread.join()?;
+ if read_at_least_one_message || exit_status.success() {
+ Ok(())
+ } else {
+ Err(io::Error::new(io::ErrorKind::Other, format!(
+ "Cargo watcher failed, the command produced no valid metadata (exit code: {:?}):\n{}",
+ exit_status, error
+ )))
+ }
+ }
+}
+
+struct CargoActor {
+ sender: Sender<CargoMessage>,
+ stdout: ChildStdout,
+ stderr: ChildStderr,
+}
+
+impl CargoActor {
+ fn new(sender: Sender<CargoMessage>, stdout: ChildStdout, stderr: ChildStderr) -> CargoActor {
+ CargoActor { sender, stdout, stderr }
+ }
+
+ fn run(self) -> io::Result<(bool, String)> {
+ // We manually read a line at a time, instead of using serde's
+ // stream deserializers, because the deserializer cannot recover
+ // from an error, resulting in it getting stuck, because we try to
+ // be resilient against failures.
+ //
+ // Because cargo only outputs one JSON object per line, we can
+ // simply skip a line if it doesn't parse, which just ignores any
+ // erroneus output.
+
+ let mut error = String::new();
+ let mut read_at_least_one_message = false;
+ let output = streaming_output(
+ self.stdout,
+ self.stderr,
+ &mut |line| {
+ read_at_least_one_message = true;
+
+ // Try to deserialize a message from Cargo or Rustc.
+ let mut deserializer = serde_json::Deserializer::from_str(line);
+ deserializer.disable_recursion_limit();
+ if let Ok(message) = JsonMessage::deserialize(&mut deserializer) {
+ match message {
+ // Skip certain kinds of messages to only spend time on what's useful
+ JsonMessage::Cargo(message) => match message {
+ cargo_metadata::Message::CompilerArtifact(artifact)
+ if !artifact.fresh =>
+ {
+ self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap();
+ }
+ cargo_metadata::Message::CompilerMessage(msg) => {
+ self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap();
+ }
+ _ => (),
+ },
+ JsonMessage::Rustc(message) => {
+ self.sender.send(CargoMessage::Diagnostic(message)).unwrap();
+ }
+ }
+ }
+ },
+ &mut |line| {
+ error.push_str(line);
+ error.push('\n');
+ },
+ );
+ match output {
+ Ok(_) => Ok((read_at_least_one_message, error)),
+ Err(e) => Err(io::Error::new(e.kind(), format!("{:?}: {}", e, error))),
+ }
+ }
+}
+
+enum CargoMessage {
+ CompilerArtifact(cargo_metadata::Artifact),
+ Diagnostic(Diagnostic),
+}
+
+#[derive(Deserialize)]
+#[serde(untagged)]
+enum JsonMessage {
+ Cargo(cargo_metadata::Message),
+ Rustc(Diagnostic),
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
new file mode 100644
index 000000000..e8cff2f3e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
@@ -0,0 +1,43 @@
+[package]
+name = "hir-def"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+anymap = "1.0.0-beta.2"
+arrayvec = "0.7.2"
+bitflags = "1.3.2"
+cov-mark = "2.0.0-pre.1"
+# We need to freeze the version of the crate, as the raw-api feature is considered unstable
+dashmap = { version = "=5.3.4", features = ["raw-api"] }
+drop_bomb = "0.1.5"
+either = "1.7.0"
+fst = { version = "0.4.7", default-features = false }
+hashbrown = { version = "0.12.1", default-features = false }
+indexmap = "1.9.1"
+itertools = "0.10.3"
+la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+once_cell = "1.12.0"
+rustc-hash = "1.1.0"
+smallvec = "1.9.0"
+tracing = "0.1.35"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+hir-expand = { path = "../hir-expand", version = "0.0.0" }
+mbe = { path = "../mbe", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+limit = { path = "../limit", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
+expect-test = "1.4.0"
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/adt.rs b/src/tools/rust-analyzer/crates/hir-def/src/adt.rs
new file mode 100644
index 000000000..277135d6d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/adt.rs
@@ -0,0 +1,365 @@
+//! Defines hir-level representation of structs, enums and unions
+
+use std::sync::Arc;
+
+use base_db::CrateId;
+use either::Either;
+use hir_expand::{
+ name::{AsName, Name},
+ InFile,
+};
+use la_arena::{Arena, ArenaMap};
+use syntax::ast::{self, HasName, HasVisibility};
+use tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree};
+
+use crate::{
+ body::{CfgExpander, LowerCtx},
+ db::DefDatabase,
+ intern::Interned,
+ item_tree::{AttrOwner, Field, Fields, ItemTree, ModItem, RawVisibilityId},
+ src::HasChildSource,
+ src::HasSource,
+ trace::Trace,
+ type_ref::TypeRef,
+ visibility::RawVisibility,
+ EnumId, LocalEnumVariantId, LocalFieldId, Lookup, ModuleId, StructId, UnionId, VariantId,
+};
+use cfg::CfgOptions;
+
+/// Note that we use `StructData` for unions as well!
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct StructData {
+ pub name: Name,
+ pub variant_data: Arc<VariantData>,
+ pub repr: Option<ReprKind>,
+ pub visibility: RawVisibility,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct EnumData {
+ pub name: Name,
+ pub variants: Arena<EnumVariantData>,
+ pub visibility: RawVisibility,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct EnumVariantData {
+ pub name: Name,
+ pub variant_data: Arc<VariantData>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum VariantData {
+ Record(Arena<FieldData>),
+ Tuple(Arena<FieldData>),
+ Unit,
+}
+
+/// A single field of an enum variant or struct
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct FieldData {
+ pub name: Name,
+ pub type_ref: Interned<TypeRef>,
+ pub visibility: RawVisibility,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ReprKind {
+ Packed,
+ Other,
+}
+
+fn repr_from_value(
+ db: &dyn DefDatabase,
+ krate: CrateId,
+ item_tree: &ItemTree,
+ of: AttrOwner,
+) -> Option<ReprKind> {
+ item_tree.attrs(db, krate, of).by_key("repr").tt_values().find_map(parse_repr_tt)
+}
+
+fn parse_repr_tt(tt: &Subtree) -> Option<ReprKind> {
+ match tt.delimiter {
+ Some(Delimiter { kind: DelimiterKind::Parenthesis, .. }) => {}
+ _ => return None,
+ }
+
+ let mut it = tt.token_trees.iter();
+ match it.next()? {
+ TokenTree::Leaf(Leaf::Ident(ident)) if ident.text == "packed" => Some(ReprKind::Packed),
+ _ => Some(ReprKind::Other),
+ }
+}
+
+impl StructData {
+ pub(crate) fn struct_data_query(db: &dyn DefDatabase, id: StructId) -> Arc<StructData> {
+ let loc = id.lookup(db);
+ let krate = loc.container.krate;
+ let item_tree = loc.id.item_tree(db);
+ let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
+ let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone();
+
+ let strukt = &item_tree[loc.id.value];
+ let variant_data = lower_fields(db, krate, &item_tree, &cfg_options, &strukt.fields, None);
+ Arc::new(StructData {
+ name: strukt.name.clone(),
+ variant_data: Arc::new(variant_data),
+ repr,
+ visibility: item_tree[strukt.visibility].clone(),
+ })
+ }
+ pub(crate) fn union_data_query(db: &dyn DefDatabase, id: UnionId) -> Arc<StructData> {
+ let loc = id.lookup(db);
+ let krate = loc.container.krate;
+ let item_tree = loc.id.item_tree(db);
+ let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
+ let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone();
+
+ let union = &item_tree[loc.id.value];
+ let variant_data = lower_fields(db, krate, &item_tree, &cfg_options, &union.fields, None);
+
+ Arc::new(StructData {
+ name: union.name.clone(),
+ variant_data: Arc::new(variant_data),
+ repr,
+ visibility: item_tree[union.visibility].clone(),
+ })
+ }
+}
+
+impl EnumData {
+ pub(crate) fn enum_data_query(db: &dyn DefDatabase, e: EnumId) -> Arc<EnumData> {
+ let loc = e.lookup(db);
+ let krate = loc.container.krate;
+ let item_tree = loc.id.item_tree(db);
+ let cfg_options = db.crate_graph()[krate].cfg_options.clone();
+
+ let enum_ = &item_tree[loc.id.value];
+ let mut variants = Arena::new();
+ for tree_id in enum_.variants.clone() {
+ if item_tree.attrs(db, krate, tree_id.into()).is_cfg_enabled(&cfg_options) {
+ let var = &item_tree[tree_id];
+ let var_data = lower_fields(
+ db,
+ krate,
+ &item_tree,
+ &cfg_options,
+ &var.fields,
+ Some(enum_.visibility),
+ );
+
+ variants.alloc(EnumVariantData {
+ name: var.name.clone(),
+ variant_data: Arc::new(var_data),
+ });
+ }
+ }
+
+ Arc::new(EnumData {
+ name: enum_.name.clone(),
+ variants,
+ visibility: item_tree[enum_.visibility].clone(),
+ })
+ }
+
+ pub fn variant(&self, name: &Name) -> Option<LocalEnumVariantId> {
+ let (id, _) = self.variants.iter().find(|(_id, data)| &data.name == name)?;
+ Some(id)
+ }
+}
+
+impl HasChildSource<LocalEnumVariantId> for EnumId {
+ type Value = ast::Variant;
+ fn child_source(
+ &self,
+ db: &dyn DefDatabase,
+ ) -> InFile<ArenaMap<LocalEnumVariantId, Self::Value>> {
+ let src = self.lookup(db).source(db);
+ let mut trace = Trace::new_for_map();
+ lower_enum(db, &mut trace, &src, self.lookup(db).container);
+ src.with_value(trace.into_map())
+ }
+}
+
+fn lower_enum(
+ db: &dyn DefDatabase,
+ trace: &mut Trace<EnumVariantData, ast::Variant>,
+ ast: &InFile<ast::Enum>,
+ module_id: ModuleId,
+) {
+ let expander = CfgExpander::new(db, ast.file_id, module_id.krate);
+ let variants = ast
+ .value
+ .variant_list()
+ .into_iter()
+ .flat_map(|it| it.variants())
+ .filter(|var| expander.is_cfg_enabled(db, var));
+ for var in variants {
+ trace.alloc(
+ || var.clone(),
+ || EnumVariantData {
+ name: var.name().map_or_else(Name::missing, |it| it.as_name()),
+ variant_data: Arc::new(VariantData::new(db, ast.with_value(var.kind()), module_id)),
+ },
+ );
+ }
+}
+
+impl VariantData {
+ fn new(db: &dyn DefDatabase, flavor: InFile<ast::StructKind>, module_id: ModuleId) -> Self {
+ let mut expander = CfgExpander::new(db, flavor.file_id, module_id.krate);
+ let mut trace = Trace::new_for_arena();
+ match lower_struct(db, &mut expander, &mut trace, &flavor) {
+ StructKind::Tuple => VariantData::Tuple(trace.into_arena()),
+ StructKind::Record => VariantData::Record(trace.into_arena()),
+ StructKind::Unit => VariantData::Unit,
+ }
+ }
+
+ pub fn fields(&self) -> &Arena<FieldData> {
+ const EMPTY: &Arena<FieldData> = &Arena::new();
+ match &self {
+ VariantData::Record(fields) | VariantData::Tuple(fields) => fields,
+ _ => EMPTY,
+ }
+ }
+
+ pub fn field(&self, name: &Name) -> Option<LocalFieldId> {
+ self.fields().iter().find_map(|(id, data)| if &data.name == name { Some(id) } else { None })
+ }
+
+ pub fn kind(&self) -> StructKind {
+ match self {
+ VariantData::Record(_) => StructKind::Record,
+ VariantData::Tuple(_) => StructKind::Tuple,
+ VariantData::Unit => StructKind::Unit,
+ }
+ }
+}
+
+impl HasChildSource<LocalFieldId> for VariantId {
+ type Value = Either<ast::TupleField, ast::RecordField>;
+
+ fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<LocalFieldId, Self::Value>> {
+ let (src, module_id) = match self {
+ VariantId::EnumVariantId(it) => {
+ // I don't really like the fact that we call into parent source
+ // here, this might add to more queries then necessary.
+ let src = it.parent.child_source(db);
+ (src.map(|map| map[it.local_id].kind()), it.parent.lookup(db).container)
+ }
+ VariantId::StructId(it) => {
+ (it.lookup(db).source(db).map(|it| it.kind()), it.lookup(db).container)
+ }
+ VariantId::UnionId(it) => (
+ it.lookup(db).source(db).map(|it| {
+ it.record_field_list()
+ .map(ast::StructKind::Record)
+ .unwrap_or(ast::StructKind::Unit)
+ }),
+ it.lookup(db).container,
+ ),
+ };
+ let mut expander = CfgExpander::new(db, src.file_id, module_id.krate);
+ let mut trace = Trace::new_for_map();
+ lower_struct(db, &mut expander, &mut trace, &src);
+ src.with_value(trace.into_map())
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum StructKind {
+ Tuple,
+ Record,
+ Unit,
+}
+
+fn lower_struct(
+ db: &dyn DefDatabase,
+ expander: &mut CfgExpander,
+ trace: &mut Trace<FieldData, Either<ast::TupleField, ast::RecordField>>,
+ ast: &InFile<ast::StructKind>,
+) -> StructKind {
+ let ctx = LowerCtx::new(db, ast.file_id);
+
+ match &ast.value {
+ ast::StructKind::Tuple(fl) => {
+ for (i, fd) in fl.fields().enumerate() {
+ if !expander.is_cfg_enabled(db, &fd) {
+ continue;
+ }
+
+ trace.alloc(
+ || Either::Left(fd.clone()),
+ || FieldData {
+ name: Name::new_tuple_field(i),
+ type_ref: Interned::new(TypeRef::from_ast_opt(&ctx, fd.ty())),
+ visibility: RawVisibility::from_ast(db, ast.with_value(fd.visibility())),
+ },
+ );
+ }
+ StructKind::Tuple
+ }
+ ast::StructKind::Record(fl) => {
+ for fd in fl.fields() {
+ if !expander.is_cfg_enabled(db, &fd) {
+ continue;
+ }
+
+ trace.alloc(
+ || Either::Right(fd.clone()),
+ || FieldData {
+ name: fd.name().map(|n| n.as_name()).unwrap_or_else(Name::missing),
+ type_ref: Interned::new(TypeRef::from_ast_opt(&ctx, fd.ty())),
+ visibility: RawVisibility::from_ast(db, ast.with_value(fd.visibility())),
+ },
+ );
+ }
+ StructKind::Record
+ }
+ ast::StructKind::Unit => StructKind::Unit,
+ }
+}
+
+fn lower_fields(
+ db: &dyn DefDatabase,
+ krate: CrateId,
+ item_tree: &ItemTree,
+ cfg_options: &CfgOptions,
+ fields: &Fields,
+ override_visibility: Option<RawVisibilityId>,
+) -> VariantData {
+ match fields {
+ Fields::Record(flds) => {
+ let mut arena = Arena::new();
+ for field_id in flds.clone() {
+ if item_tree.attrs(db, krate, field_id.into()).is_cfg_enabled(cfg_options) {
+ arena.alloc(lower_field(item_tree, &item_tree[field_id], override_visibility));
+ }
+ }
+ VariantData::Record(arena)
+ }
+ Fields::Tuple(flds) => {
+ let mut arena = Arena::new();
+ for field_id in flds.clone() {
+ if item_tree.attrs(db, krate, field_id.into()).is_cfg_enabled(cfg_options) {
+ arena.alloc(lower_field(item_tree, &item_tree[field_id], override_visibility));
+ }
+ }
+ VariantData::Tuple(arena)
+ }
+ Fields::Unit => VariantData::Unit,
+ }
+}
+
+fn lower_field(
+ item_tree: &ItemTree,
+ field: &Field,
+ override_visibility: Option<RawVisibilityId>,
+) -> FieldData {
+ FieldData {
+ name: field.name.clone(),
+ type_ref: field.type_ref.clone(),
+ visibility: item_tree[override_visibility.unwrap_or(field.visibility)].clone(),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
new file mode 100644
index 000000000..2b39c6f8d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
@@ -0,0 +1,1002 @@
+//! A higher level attributes based on TokenTree, with also some shortcuts.
+
+use std::{fmt, hash::Hash, ops, sync::Arc};
+
+use base_db::CrateId;
+use cfg::{CfgExpr, CfgOptions};
+use either::Either;
+use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile};
+use itertools::Itertools;
+use la_arena::{ArenaMap, Idx, RawIdx};
+use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
+use smallvec::{smallvec, SmallVec};
+use syntax::{
+ ast::{self, AstNode, HasAttrs, IsString},
+ match_ast, AstPtr, AstToken, SmolStr, SyntaxNode, TextRange, TextSize,
+};
+use tt::Subtree;
+
+use crate::{
+ db::DefDatabase,
+ intern::Interned,
+ item_tree::{AttrOwner, Fields, ItemTreeId, ItemTreeNode},
+ nameres::{ModuleOrigin, ModuleSource},
+ path::{ModPath, PathKind},
+ src::{HasChildSource, HasSource},
+ AdtId, AttrDefId, EnumId, GenericParamId, LocalEnumVariantId, LocalFieldId, Lookup, MacroId,
+ VariantId,
+};
+
+/// Holds documentation
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Documentation(String);
+
+impl Documentation {
+ pub fn new(s: String) -> Self {
+ Documentation(s)
+ }
+
+ pub fn as_str(&self) -> &str {
+ &self.0
+ }
+}
+
+impl From<Documentation> for String {
+ fn from(Documentation(string): Documentation) -> Self {
+ string
+ }
+}
+
+/// Syntactical attributes, without filtering of `cfg_attr`s.
+#[derive(Default, Debug, Clone, PartialEq, Eq)]
+pub(crate) struct RawAttrs {
+ entries: Option<Arc<[Attr]>>,
+}
+
+#[derive(Default, Debug, Clone, PartialEq, Eq)]
+pub struct Attrs(RawAttrs);
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct AttrsWithOwner {
+ attrs: Attrs,
+ owner: AttrDefId,
+}
+
+impl ops::Deref for RawAttrs {
+ type Target = [Attr];
+
+ fn deref(&self) -> &[Attr] {
+ match &self.entries {
+ Some(it) => &*it,
+ None => &[],
+ }
+ }
+}
+impl Attrs {
+ pub fn get(&self, id: AttrId) -> Option<&Attr> {
+ (**self).iter().find(|attr| attr.id == id)
+ }
+}
+
+impl ops::Deref for Attrs {
+ type Target = [Attr];
+
+ fn deref(&self) -> &[Attr] {
+ match &self.0.entries {
+ Some(it) => &*it,
+ None => &[],
+ }
+ }
+}
+
+impl ops::Deref for AttrsWithOwner {
+ type Target = Attrs;
+
+ fn deref(&self) -> &Attrs {
+ &self.attrs
+ }
+}
+
+impl RawAttrs {
+ pub(crate) const EMPTY: Self = Self { entries: None };
+
+ pub(crate) fn new(db: &dyn DefDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self {
+ let entries = collect_attrs(owner)
+ .filter_map(|(id, attr)| match attr {
+ Either::Left(attr) => {
+ attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id))
+ }
+ Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
+ id,
+ input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
+ path: Interned::new(ModPath::from(hir_expand::name!(doc))),
+ }),
+ })
+ .collect::<Arc<_>>();
+
+ Self { entries: if entries.is_empty() { None } else { Some(entries) } }
+ }
+
+ fn from_attrs_owner(db: &dyn DefDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self {
+ let hygiene = Hygiene::new(db.upcast(), owner.file_id);
+ Self::new(db, owner.value, &hygiene)
+ }
+
+ pub(crate) fn merge(&self, other: Self) -> Self {
+ // FIXME: This needs to fixup `AttrId`s
+ match (&self.entries, other.entries) {
+ (None, None) => Self::EMPTY,
+ (None, entries @ Some(_)) => Self { entries },
+ (Some(entries), None) => Self { entries: Some(entries.clone()) },
+ (Some(a), Some(b)) => {
+ let last_ast_index = a.last().map_or(0, |it| it.id.ast_index + 1);
+ Self {
+ entries: Some(
+ a.iter()
+ .cloned()
+ .chain(b.iter().map(|it| {
+ let mut it = it.clone();
+ it.id.ast_index += last_ast_index;
+ it
+ }))
+ .collect(),
+ ),
+ }
+ }
+ }
+ }
+
+ /// Processes `cfg_attr`s, returning the resulting semantic `Attrs`.
+ pub(crate) fn filter(self, db: &dyn DefDatabase, krate: CrateId) -> Attrs {
+ let has_cfg_attrs = self.iter().any(|attr| {
+ attr.path.as_ident().map_or(false, |name| *name == hir_expand::name![cfg_attr])
+ });
+ if !has_cfg_attrs {
+ return Attrs(self);
+ }
+
+ let crate_graph = db.crate_graph();
+ let new_attrs = self
+ .iter()
+ .flat_map(|attr| -> SmallVec<[_; 1]> {
+ let is_cfg_attr =
+ attr.path.as_ident().map_or(false, |name| *name == hir_expand::name![cfg_attr]);
+ if !is_cfg_attr {
+ return smallvec![attr.clone()];
+ }
+
+ let subtree = match attr.token_tree_value() {
+ Some(it) => it,
+ _ => return smallvec![attr.clone()],
+ };
+
+ // Input subtree is: `(cfg, $(attr),+)`
+ // Split it up into a `cfg` subtree and the `attr` subtrees.
+ // FIXME: There should be a common API for this.
+ let mut parts = subtree.token_trees.split(|tt| {
+ matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. })))
+ });
+ let cfg = match parts.next() {
+ Some(it) => it,
+ None => return smallvec![],
+ };
+ let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
+ let cfg = CfgExpr::parse(&cfg);
+ let index = attr.id;
+ let attrs = parts.filter(|a| !a.is_empty()).filter_map(|attr| {
+ let tree = Subtree { delimiter: None, token_trees: attr.to_vec() };
+ // FIXME hygiene
+ let hygiene = Hygiene::new_unhygienic();
+ Attr::from_tt(db, &tree, &hygiene, index)
+ });
+
+ let cfg_options = &crate_graph[krate].cfg_options;
+ if cfg_options.check(&cfg) == Some(false) {
+ smallvec![]
+ } else {
+ cov_mark::hit!(cfg_attr_active);
+
+ attrs.collect()
+ }
+ })
+ .collect();
+
+ Attrs(RawAttrs { entries: Some(new_attrs) })
+ }
+}
+
+impl Attrs {
+ pub const EMPTY: Self = Self(RawAttrs::EMPTY);
+
+ pub(crate) fn variants_attrs_query(
+ db: &dyn DefDatabase,
+ e: EnumId,
+ ) -> Arc<ArenaMap<LocalEnumVariantId, Attrs>> {
+ // FIXME: There should be some proper form of mapping between item tree enum variant ids and hir enum variant ids
+ let mut res = ArenaMap::default();
+
+ let loc = e.lookup(db);
+ let krate = loc.container.krate;
+ let item_tree = loc.id.item_tree(db);
+ let enum_ = &item_tree[loc.id.value];
+ let crate_graph = db.crate_graph();
+ let cfg_options = &crate_graph[krate].cfg_options;
+
+ let mut idx = 0;
+ for variant in enum_.variants.clone() {
+ let attrs = item_tree.attrs(db, krate, variant.into());
+ if attrs.is_cfg_enabled(cfg_options) {
+ res.insert(Idx::from_raw(RawIdx::from(idx)), attrs);
+ idx += 1;
+ }
+ }
+
+ Arc::new(res)
+ }
+
+ pub(crate) fn fields_attrs_query(
+ db: &dyn DefDatabase,
+ v: VariantId,
+ ) -> Arc<ArenaMap<LocalFieldId, Attrs>> {
+ // FIXME: There should be some proper form of mapping between item tree field ids and hir field ids
+ let mut res = ArenaMap::default();
+
+ let crate_graph = db.crate_graph();
+ let (fields, item_tree, krate) = match v {
+ VariantId::EnumVariantId(it) => {
+ let e = it.parent;
+ let loc = e.lookup(db);
+ let krate = loc.container.krate;
+ let item_tree = loc.id.item_tree(db);
+ let enum_ = &item_tree[loc.id.value];
+
+ let cfg_options = &crate_graph[krate].cfg_options;
+ let variant = 'tri: loop {
+ let mut idx = 0;
+ for variant in enum_.variants.clone() {
+ let attrs = item_tree.attrs(db, krate, variant.into());
+ if attrs.is_cfg_enabled(cfg_options) {
+ if it.local_id == Idx::from_raw(RawIdx::from(idx)) {
+ break 'tri variant;
+ }
+ idx += 1;
+ }
+ }
+ return Arc::new(res);
+ };
+ (item_tree[variant].fields.clone(), item_tree, krate)
+ }
+ VariantId::StructId(it) => {
+ let loc = it.lookup(db);
+ let krate = loc.container.krate;
+ let item_tree = loc.id.item_tree(db);
+ let struct_ = &item_tree[loc.id.value];
+ (struct_.fields.clone(), item_tree, krate)
+ }
+ VariantId::UnionId(it) => {
+ let loc = it.lookup(db);
+ let krate = loc.container.krate;
+ let item_tree = loc.id.item_tree(db);
+ let union_ = &item_tree[loc.id.value];
+ (union_.fields.clone(), item_tree, krate)
+ }
+ };
+
+ let fields = match fields {
+ Fields::Record(fields) | Fields::Tuple(fields) => fields,
+ Fields::Unit => return Arc::new(res),
+ };
+
+ let cfg_options = &crate_graph[krate].cfg_options;
+
+ let mut idx = 0;
+ for field in fields {
+ let attrs = item_tree.attrs(db, krate, field.into());
+ if attrs.is_cfg_enabled(cfg_options) {
+ res.insert(Idx::from_raw(RawIdx::from(idx)), attrs);
+ idx += 1;
+ }
+ }
+
+ Arc::new(res)
+ }
+
+ pub fn by_key(&self, key: &'static str) -> AttrQuery<'_> {
+ AttrQuery { attrs: self, key }
+ }
+}
+
+impl Attrs {
+ pub fn cfg(&self) -> Option<CfgExpr> {
+ let mut cfgs = self.by_key("cfg").tt_values().map(CfgExpr::parse);
+ let first = cfgs.next()?;
+ match cfgs.next() {
+ Some(second) => {
+ let cfgs = [first, second].into_iter().chain(cfgs);
+ Some(CfgExpr::All(cfgs.collect()))
+ }
+ None => Some(first),
+ }
+ }
+ pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> bool {
+ match self.cfg() {
+ None => true,
+ Some(cfg) => cfg_options.check(&cfg) != Some(false),
+ }
+ }
+
+ pub fn lang(&self) -> Option<&SmolStr> {
+ self.by_key("lang").string_value()
+ }
+
+ pub fn docs(&self) -> Option<Documentation> {
+ let docs = self.by_key("doc").attrs().filter_map(|attr| attr.string_value());
+ let indent = doc_indent(self);
+ let mut buf = String::new();
+ for doc in docs {
+ // str::lines doesn't yield anything for the empty string
+ if !doc.is_empty() {
+ buf.extend(Itertools::intersperse(
+ doc.lines().map(|line| {
+ line.char_indices()
+ .nth(indent)
+ .map_or(line, |(offset, _)| &line[offset..])
+ .trim_end()
+ }),
+ "\n",
+ ));
+ }
+ buf.push('\n');
+ }
+ buf.pop();
+ if buf.is_empty() {
+ None
+ } else {
+ Some(Documentation(buf))
+ }
+ }
+
+ pub fn has_doc_hidden(&self) -> bool {
+ self.by_key("doc").tt_values().any(|tt| {
+ tt.delimiter_kind() == Some(DelimiterKind::Parenthesis) &&
+ matches!(&*tt.token_trees, [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.text == "hidden")
+ })
+ }
+
+ pub fn is_proc_macro(&self) -> bool {
+ self.by_key("proc_macro").exists()
+ }
+
+ pub fn is_proc_macro_attribute(&self) -> bool {
+ self.by_key("proc_macro_attribute").exists()
+ }
+
+ pub fn is_proc_macro_derive(&self) -> bool {
+ self.by_key("proc_macro_derive").exists()
+ }
+}
+
+impl AttrsWithOwner {
+ pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Self {
+ // FIXME: this should use `Trace` to avoid duplication in `source_map` below
+ let raw_attrs = match def {
+ AttrDefId::ModuleId(module) => {
+ let def_map = module.def_map(db);
+ let mod_data = &def_map[module.local_id];
+
+ match mod_data.origin {
+ ModuleOrigin::File { definition, declaration_tree_id, .. } => {
+ let decl_attrs = declaration_tree_id
+ .item_tree(db)
+ .raw_attrs(AttrOwner::ModItem(declaration_tree_id.value.into()))
+ .clone();
+ let tree = db.file_item_tree(definition.into());
+ let def_attrs = tree.raw_attrs(AttrOwner::TopLevel).clone();
+ decl_attrs.merge(def_attrs)
+ }
+ ModuleOrigin::CrateRoot { definition } => {
+ let tree = db.file_item_tree(definition.into());
+ tree.raw_attrs(AttrOwner::TopLevel).clone()
+ }
+ ModuleOrigin::Inline { definition_tree_id, .. } => definition_tree_id
+ .item_tree(db)
+ .raw_attrs(AttrOwner::ModItem(definition_tree_id.value.into()))
+ .clone(),
+ ModuleOrigin::BlockExpr { block } => RawAttrs::from_attrs_owner(
+ db,
+ InFile::new(block.file_id, block.to_node(db.upcast()))
+ .as_ref()
+ .map(|it| it as &dyn ast::HasAttrs),
+ ),
+ }
+ }
+ AttrDefId::FieldId(it) => {
+ return Self { attrs: db.fields_attrs(it.parent)[it.local_id].clone(), owner: def };
+ }
+ AttrDefId::EnumVariantId(it) => {
+ return Self {
+ attrs: db.variants_attrs(it.parent)[it.local_id].clone(),
+ owner: def,
+ };
+ }
+ AttrDefId::AdtId(it) => match it {
+ AdtId::StructId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ AdtId::EnumId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ AdtId::UnionId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ },
+ AttrDefId::TraitId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ AttrDefId::MacroId(it) => match it {
+ MacroId::Macro2Id(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ MacroId::MacroRulesId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ MacroId::ProcMacroId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ },
+ AttrDefId::ImplId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ AttrDefId::ConstId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ AttrDefId::StaticId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ AttrDefId::FunctionId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ AttrDefId::TypeAliasId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ AttrDefId::GenericParamId(it) => match it {
+ GenericParamId::ConstParamId(it) => {
+ let src = it.parent().child_source(db);
+ RawAttrs::from_attrs_owner(
+ db,
+ src.with_value(src.value[it.local_id()].as_ref().either(
+ |it| match it {
+ ast::TypeOrConstParam::Type(it) => it as _,
+ ast::TypeOrConstParam::Const(it) => it as _,
+ },
+ |it| it as _,
+ )),
+ )
+ }
+ GenericParamId::TypeParamId(it) => {
+ let src = it.parent().child_source(db);
+ RawAttrs::from_attrs_owner(
+ db,
+ src.with_value(src.value[it.local_id()].as_ref().either(
+ |it| match it {
+ ast::TypeOrConstParam::Type(it) => it as _,
+ ast::TypeOrConstParam::Const(it) => it as _,
+ },
+ |it| it as _,
+ )),
+ )
+ }
+ GenericParamId::LifetimeParamId(it) => {
+ let src = it.parent.child_source(db);
+ RawAttrs::from_attrs_owner(db, src.with_value(&src.value[it.local_id]))
+ }
+ },
+ AttrDefId::ExternBlockId(it) => attrs_from_item_tree(it.lookup(db).id, db),
+ };
+
+ let attrs = raw_attrs.filter(db, def.krate(db));
+ Self { attrs, owner: def }
+ }
+
+ pub fn source_map(&self, db: &dyn DefDatabase) -> AttrSourceMap {
+ let owner = match self.owner {
+ AttrDefId::ModuleId(module) => {
+ // Modules can have 2 attribute owners (the `mod x;` item, and the module file itself).
+
+ let def_map = module.def_map(db);
+ let mod_data = &def_map[module.local_id];
+ match mod_data.declaration_source(db) {
+ Some(it) => {
+ let mut map = AttrSourceMap::new(InFile::new(it.file_id, &it.value));
+ if let InFile { file_id, value: ModuleSource::SourceFile(file) } =
+ mod_data.definition_source(db)
+ {
+ map.append_module_inline_attrs(AttrSourceMap::new(InFile::new(
+ file_id, &file,
+ )));
+ }
+ return map;
+ }
+ None => {
+ let InFile { file_id, value } = mod_data.definition_source(db);
+ let attrs_owner = match &value {
+ ModuleSource::SourceFile(file) => file as &dyn ast::HasAttrs,
+ ModuleSource::Module(module) => module as &dyn ast::HasAttrs,
+ ModuleSource::BlockExpr(block) => block as &dyn ast::HasAttrs,
+ };
+ return AttrSourceMap::new(InFile::new(file_id, attrs_owner));
+ }
+ }
+ }
+ AttrDefId::FieldId(id) => {
+ let map = db.fields_attrs_source_map(id.parent);
+ let file_id = id.parent.file_id(db);
+ let root = db.parse_or_expand(file_id).unwrap();
+ let owner = match &map[id.local_id] {
+ Either::Left(it) => ast::AnyHasAttrs::new(it.to_node(&root)),
+ Either::Right(it) => ast::AnyHasAttrs::new(it.to_node(&root)),
+ };
+ InFile::new(file_id, owner)
+ }
+ AttrDefId::AdtId(adt) => match adt {
+ AdtId::StructId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ AdtId::UnionId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ AdtId::EnumId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ },
+ AttrDefId::FunctionId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ AttrDefId::EnumVariantId(id) => {
+ let map = db.variants_attrs_source_map(id.parent);
+ let file_id = id.parent.lookup(db).id.file_id();
+ let root = db.parse_or_expand(file_id).unwrap();
+ InFile::new(file_id, ast::AnyHasAttrs::new(map[id.local_id].to_node(&root)))
+ }
+ AttrDefId::StaticId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ AttrDefId::ConstId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ AttrDefId::TraitId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ AttrDefId::TypeAliasId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ AttrDefId::MacroId(id) => match id {
+ MacroId::Macro2Id(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ MacroId::MacroRulesId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ MacroId::ProcMacroId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ },
+ AttrDefId::ImplId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ AttrDefId::GenericParamId(id) => match id {
+ GenericParamId::ConstParamId(id) => {
+ id.parent().child_source(db).map(|source| match &source[id.local_id()] {
+ Either::Left(ast::TypeOrConstParam::Type(id)) => {
+ ast::AnyHasAttrs::new(id.clone())
+ }
+ Either::Left(ast::TypeOrConstParam::Const(id)) => {
+ ast::AnyHasAttrs::new(id.clone())
+ }
+ Either::Right(id) => ast::AnyHasAttrs::new(id.clone()),
+ })
+ }
+ GenericParamId::TypeParamId(id) => {
+ id.parent().child_source(db).map(|source| match &source[id.local_id()] {
+ Either::Left(ast::TypeOrConstParam::Type(id)) => {
+ ast::AnyHasAttrs::new(id.clone())
+ }
+ Either::Left(ast::TypeOrConstParam::Const(id)) => {
+ ast::AnyHasAttrs::new(id.clone())
+ }
+ Either::Right(id) => ast::AnyHasAttrs::new(id.clone()),
+ })
+ }
+ GenericParamId::LifetimeParamId(id) => id
+ .parent
+ .child_source(db)
+ .map(|source| ast::AnyHasAttrs::new(source[id.local_id].clone())),
+ },
+ AttrDefId::ExternBlockId(id) => id.lookup(db).source(db).map(ast::AnyHasAttrs::new),
+ };
+
+ AttrSourceMap::new(owner.as_ref().map(|node| node as &dyn HasAttrs))
+ }
+
+ pub fn docs_with_rangemap(
+ &self,
+ db: &dyn DefDatabase,
+ ) -> Option<(Documentation, DocsRangeMap)> {
+ let docs =
+ self.by_key("doc").attrs().filter_map(|attr| attr.string_value().map(|s| (s, attr.id)));
+ let indent = doc_indent(self);
+ let mut buf = String::new();
+ let mut mapping = Vec::new();
+ for (doc, idx) in docs {
+ if !doc.is_empty() {
+ let mut base_offset = 0;
+ for raw_line in doc.split('\n') {
+ let line = raw_line.trim_end();
+ let line_len = line.len();
+ let (offset, line) = match line.char_indices().nth(indent) {
+ Some((offset, _)) => (offset, &line[offset..]),
+ None => (0, line),
+ };
+ let buf_offset = buf.len();
+ buf.push_str(line);
+ mapping.push((
+ TextRange::new(buf_offset.try_into().ok()?, buf.len().try_into().ok()?),
+ idx,
+ TextRange::at(
+ (base_offset + offset).try_into().ok()?,
+ line_len.try_into().ok()?,
+ ),
+ ));
+ buf.push('\n');
+ base_offset += raw_line.len() + 1;
+ }
+ } else {
+ buf.push('\n');
+ }
+ }
+ buf.pop();
+ if buf.is_empty() {
+ None
+ } else {
+ Some((Documentation(buf), DocsRangeMap { mapping, source_map: self.source_map(db) }))
+ }
+ }
+}
+
+fn doc_indent(attrs: &Attrs) -> usize {
+ attrs
+ .by_key("doc")
+ .attrs()
+ .filter_map(|attr| attr.string_value())
+ .flat_map(|s| s.lines())
+ .filter(|line| !line.chars().all(|c| c.is_whitespace()))
+ .map(|line| line.chars().take_while(|c| c.is_whitespace()).count())
+ .min()
+ .unwrap_or(0)
+}
+
+fn inner_attributes(
+ syntax: &SyntaxNode,
+) -> Option<impl Iterator<Item = Either<ast::Attr, ast::Comment>>> {
+ let node = match_ast! {
+ match syntax {
+ ast::SourceFile(_) => syntax.clone(),
+ ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(),
+ ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(),
+ ast::Impl(it) => it.assoc_item_list()?.syntax().clone(),
+ ast::Module(it) => it.item_list()?.syntax().clone(),
+ ast::BlockExpr(it) => {
+ use syntax::SyntaxKind::{BLOCK_EXPR , EXPR_STMT};
+ // Block expressions accept outer and inner attributes, but only when they are the outer
+ // expression of an expression statement or the final expression of another block expression.
+ let may_carry_attributes = matches!(
+ it.syntax().parent().map(|it| it.kind()),
+ Some(BLOCK_EXPR | EXPR_STMT)
+ );
+ if !may_carry_attributes {
+ return None
+ }
+ syntax.clone()
+ },
+ _ => return None,
+ }
+ };
+
+ let attrs = ast::AttrDocCommentIter::from_syntax_node(&node).filter(|el| match el {
+ Either::Left(attr) => attr.kind().is_inner(),
+ Either::Right(comment) => comment.is_inner(),
+ });
+ Some(attrs)
+}
+
+#[derive(Debug)]
+pub struct AttrSourceMap {
+ source: Vec<Either<ast::Attr, ast::Comment>>,
+ file_id: HirFileId,
+ /// If this map is for a module, this will be the [`HirFileId`] of the module's definition site,
+ /// while `file_id` will be the one of the module declaration site.
+ /// The usize is the index into `source` from which point on the entries reside in the def site
+ /// file.
+ mod_def_site_file_id: Option<(HirFileId, usize)>,
+}
+
+impl AttrSourceMap {
+ fn new(owner: InFile<&dyn ast::HasAttrs>) -> Self {
+ Self {
+ source: collect_attrs(owner.value).map(|(_, it)| it).collect(),
+ file_id: owner.file_id,
+ mod_def_site_file_id: None,
+ }
+ }
+
+ /// Append a second source map to this one, this is required for modules, whose outline and inline
+ /// attributes can reside in different files
+ fn append_module_inline_attrs(&mut self, other: Self) {
+ assert!(self.mod_def_site_file_id.is_none() && other.mod_def_site_file_id.is_none());
+ let len = self.source.len();
+ self.source.extend(other.source);
+ if other.file_id != self.file_id {
+ self.mod_def_site_file_id = Some((other.file_id, len));
+ }
+ }
+
+ /// Maps the lowered `Attr` back to its original syntax node.
+ ///
+ /// `attr` must come from the `owner` used for AttrSourceMap
+ ///
+ /// Note that the returned syntax node might be a `#[cfg_attr]`, or a doc comment, instead of
+ /// the attribute represented by `Attr`.
+ pub fn source_of(&self, attr: &Attr) -> InFile<&Either<ast::Attr, ast::Comment>> {
+ self.source_of_id(attr.id)
+ }
+
+ fn source_of_id(&self, id: AttrId) -> InFile<&Either<ast::Attr, ast::Comment>> {
+ let ast_idx = id.ast_index as usize;
+ let file_id = match self.mod_def_site_file_id {
+ Some((file_id, def_site_cut)) if def_site_cut <= ast_idx => file_id,
+ _ => self.file_id,
+ };
+
+ self.source
+ .get(ast_idx)
+ .map(|it| InFile::new(file_id, it))
+ .unwrap_or_else(|| panic!("cannot find attr at index {:?}", id))
+ }
+}
+
+/// A struct to map text ranges from [`Documentation`] back to TextRanges in the syntax tree.
+#[derive(Debug)]
+pub struct DocsRangeMap {
+ source_map: AttrSourceMap,
+ // (docstring-line-range, attr_index, attr-string-range)
+ // a mapping from the text range of a line of the [`Documentation`] to the attribute index and
+ // the original (untrimmed) syntax doc line
+ mapping: Vec<(TextRange, AttrId, TextRange)>,
+}
+
+impl DocsRangeMap {
+ /// Maps a [`TextRange`] relative to the documentation string back to its AST range
+ pub fn map(&self, range: TextRange) -> Option<InFile<TextRange>> {
+ let found = self.mapping.binary_search_by(|(probe, ..)| probe.ordering(range)).ok()?;
+ let (line_docs_range, idx, original_line_src_range) = self.mapping[found];
+ if !line_docs_range.contains_range(range) {
+ return None;
+ }
+
+ let relative_range = range - line_docs_range.start();
+
+ let InFile { file_id, value: source } = self.source_map.source_of_id(idx);
+ match source {
+ Either::Left(attr) => {
+ let string = get_doc_string_in_attr(attr)?;
+ let text_range = string.open_quote_text_range()?;
+ let range = TextRange::at(
+ text_range.end() + original_line_src_range.start() + relative_range.start(),
+ string.syntax().text_range().len().min(range.len()),
+ );
+ Some(InFile { file_id, value: range })
+ }
+ Either::Right(comment) => {
+ let text_range = comment.syntax().text_range();
+ let range = TextRange::at(
+ text_range.start()
+ + TextSize::try_from(comment.prefix().len()).ok()?
+ + original_line_src_range.start()
+ + relative_range.start(),
+ text_range.len().min(range.len()),
+ );
+ Some(InFile { file_id, value: range })
+ }
+ }
+ }
+}
+
+fn get_doc_string_in_attr(it: &ast::Attr) -> Option<ast::String> {
+ match it.expr() {
+ // #[doc = lit]
+ Some(ast::Expr::Literal(lit)) => match lit.kind() {
+ ast::LiteralKind::String(it) => Some(it),
+ _ => None,
+ },
+ // #[cfg_attr(..., doc = "", ...)]
+ None => {
+ // FIXME: See highlight injection for what to do here
+ None
+ }
+ _ => None,
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct AttrId {
+ pub(crate) ast_index: u32,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Attr {
+ pub(crate) id: AttrId,
+ pub(crate) path: Interned<ModPath>,
+ pub(crate) input: Option<Interned<AttrInput>>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum AttrInput {
+ /// `#[attr = "string"]`
+ Literal(SmolStr),
+ /// `#[attr(subtree)]`
+ TokenTree(tt::Subtree, mbe::TokenMap),
+}
+
+impl fmt::Display for AttrInput {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()),
+ AttrInput::TokenTree(subtree, _) => subtree.fmt(f),
+ }
+ }
+}
+
+impl Attr {
+ fn from_src(
+ db: &dyn DefDatabase,
+ ast: ast::Meta,
+ hygiene: &Hygiene,
+ id: AttrId,
+ ) -> Option<Attr> {
+ let path = Interned::new(ModPath::from_src(db.upcast(), ast.path()?, hygiene)?);
+ let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
+ let value = match lit.kind() {
+ ast::LiteralKind::String(string) => string.value()?.into(),
+ _ => lit.syntax().first_token()?.text().trim_matches('"').into(),
+ };
+ Some(Interned::new(AttrInput::Literal(value)))
+ } else if let Some(tt) = ast.token_tree() {
+ let (tree, map) = syntax_node_to_token_tree(tt.syntax());
+ Some(Interned::new(AttrInput::TokenTree(tree, map)))
+ } else {
+ None
+ };
+ Some(Attr { id, path, input })
+ }
+
+ fn from_tt(
+ db: &dyn DefDatabase,
+ tt: &tt::Subtree,
+ hygiene: &Hygiene,
+ id: AttrId,
+ ) -> Option<Attr> {
+ let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
+ let ast = ast::Meta::cast(parse.syntax_node())?;
+
+ Self::from_src(db, ast, hygiene, id)
+ }
+
+ pub fn path(&self) -> &ModPath {
+ &self.path
+ }
+}
+
+impl Attr {
+ /// #[path = "string"]
+ pub fn string_value(&self) -> Option<&SmolStr> {
+ match self.input.as_deref()? {
+ AttrInput::Literal(it) => Some(it),
+ _ => None,
+ }
+ }
+
+ /// #[path(ident)]
+ pub fn single_ident_value(&self) -> Option<&tt::Ident> {
+ match self.input.as_deref()? {
+ AttrInput::TokenTree(subtree, _) => match &*subtree.token_trees {
+ [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
+ _ => None,
+ },
+ _ => None,
+ }
+ }
+
+ /// #[path TokenTree]
+ pub fn token_tree_value(&self) -> Option<&Subtree> {
+ match self.input.as_deref()? {
+ AttrInput::TokenTree(subtree, _) => Some(subtree),
+ _ => None,
+ }
+ }
+
+ /// Parses this attribute as a token tree consisting of comma separated paths.
+ pub fn parse_path_comma_token_tree(&self) -> Option<impl Iterator<Item = ModPath> + '_> {
+ let args = self.token_tree_value()?;
+
+ if args.delimiter_kind() != Some(DelimiterKind::Parenthesis) {
+ return None;
+ }
+ let paths = args
+ .token_trees
+ .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
+ .filter_map(|tts| {
+ if tts.is_empty() {
+ return None;
+ }
+ let segments = tts.iter().filter_map(|tt| match tt {
+ tt::TokenTree::Leaf(tt::Leaf::Ident(id)) => Some(id.as_name()),
+ _ => None,
+ });
+ Some(ModPath::from_segments(PathKind::Plain, segments))
+ });
+
+ Some(paths)
+ }
+}
+
+#[derive(Debug, Clone, Copy)]
+pub struct AttrQuery<'attr> {
+ attrs: &'attr Attrs,
+ key: &'static str,
+}
+
+impl<'attr> AttrQuery<'attr> {
+ pub fn tt_values(self) -> impl Iterator<Item = &'attr Subtree> {
+ self.attrs().filter_map(|attr| attr.token_tree_value())
+ }
+
+ pub fn string_value(self) -> Option<&'attr SmolStr> {
+ self.attrs().find_map(|attr| attr.string_value())
+ }
+
+ pub fn exists(self) -> bool {
+ self.attrs().next().is_some()
+ }
+
+ pub fn attrs(self) -> impl Iterator<Item = &'attr Attr> + Clone {
+ let key = self.key;
+ self.attrs
+ .iter()
+ .filter(move |attr| attr.path.as_ident().map_or(false, |s| s.to_smol_str() == key))
+ }
+
+ /// Find string value for a specific key inside token tree
+ ///
+ /// ```ignore
+ /// #[doc(html_root_url = "url")]
+ /// ^^^^^^^^^^^^^ key
+ /// ```
+ pub fn find_string_value_in_tt(self, key: &'attr str) -> Option<&SmolStr> {
+ self.tt_values().find_map(|tt| {
+ let name = tt.token_trees.iter()
+ .skip_while(|tt| !matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text, ..} )) if text == key))
+ .nth(2);
+
+ match name {
+ Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal{ref text, ..}))) => Some(text),
+ _ => None
+ }
+ })
+ }
+}
+
+fn attrs_from_item_tree<N: ItemTreeNode>(id: ItemTreeId<N>, db: &dyn DefDatabase) -> RawAttrs {
+ let tree = id.item_tree(db);
+ let mod_item = N::id_to_mod_item(id.value);
+ tree.raw_attrs(mod_item.into()).clone()
+}
+
+fn collect_attrs(
+ owner: &dyn ast::HasAttrs,
+) -> impl Iterator<Item = (AttrId, Either<ast::Attr, ast::Comment>)> {
+ let inner_attrs = inner_attributes(owner.syntax()).into_iter().flatten();
+ let outer_attrs =
+ ast::AttrDocCommentIter::from_syntax_node(owner.syntax()).filter(|el| match el {
+ Either::Left(attr) => attr.kind().is_outer(),
+ Either::Right(comment) => comment.is_outer(),
+ });
+ outer_attrs
+ .chain(inner_attrs)
+ .enumerate()
+ .map(|(id, attr)| (AttrId { ast_index: id as u32 }, attr))
+}
+
+pub(crate) fn variants_attrs_source_map(
+ db: &dyn DefDatabase,
+ def: EnumId,
+) -> Arc<ArenaMap<LocalEnumVariantId, AstPtr<ast::Variant>>> {
+ let mut res = ArenaMap::default();
+ let child_source = def.child_source(db);
+
+ for (idx, variant) in child_source.value.iter() {
+ res.insert(idx, AstPtr::new(variant));
+ }
+
+ Arc::new(res)
+}
+
+pub(crate) fn fields_attrs_source_map(
+ db: &dyn DefDatabase,
+ def: VariantId,
+) -> Arc<ArenaMap<LocalFieldId, Either<AstPtr<ast::TupleField>, AstPtr<ast::RecordField>>>> {
+ let mut res = ArenaMap::default();
+ let child_source = def.child_source(db);
+
+ for (idx, variant) in child_source.value.iter() {
+ res.insert(
+ idx,
+ variant
+ .as_ref()
+ .either(|l| Either::Left(AstPtr::new(l)), |r| Either::Right(AstPtr::new(r))),
+ );
+ }
+
+ Arc::new(res)
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/body.rs
new file mode 100644
index 000000000..080a307b1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body.rs
@@ -0,0 +1,471 @@
+//! Defines `Body`: a lowered representation of bodies of functions, statics and
+//! consts.
+mod lower;
+#[cfg(test)]
+mod tests;
+pub mod scope;
+
+use std::{ops::Index, sync::Arc};
+
+use base_db::CrateId;
+use cfg::{CfgExpr, CfgOptions};
+use drop_bomb::DropBomb;
+use either::Either;
+use hir_expand::{hygiene::Hygiene, ExpandError, ExpandResult, HirFileId, InFile, MacroCallId};
+use la_arena::{Arena, ArenaMap};
+use limit::Limit;
+use profile::Count;
+use rustc_hash::FxHashMap;
+use syntax::{ast, AstPtr, SyntaxNodePtr};
+
+use crate::{
+ attr::{Attrs, RawAttrs},
+ db::DefDatabase,
+ expr::{dummy_expr_id, Expr, ExprId, Label, LabelId, Pat, PatId},
+ item_scope::BuiltinShadowMode,
+ macro_id_to_def_id,
+ nameres::DefMap,
+ path::{ModPath, Path},
+ src::HasSource,
+ AsMacroCall, BlockId, DefWithBodyId, HasModule, LocalModuleId, Lookup, MacroId, ModuleId,
+ UnresolvedMacro,
+};
+
+pub use lower::LowerCtx;
+
+/// A subset of Expander that only deals with cfg attributes. We only need it to
+/// avoid cyclic queries in crate def map during enum processing.
+#[derive(Debug)]
+pub(crate) struct CfgExpander {
+ cfg_options: CfgOptions,
+ hygiene: Hygiene,
+ krate: CrateId,
+}
+
+#[derive(Debug)]
+pub struct Expander {
+ cfg_expander: CfgExpander,
+ def_map: Arc<DefMap>,
+ current_file_id: HirFileId,
+ module: LocalModuleId,
+ recursion_limit: usize,
+}
+
+impl CfgExpander {
+ pub(crate) fn new(
+ db: &dyn DefDatabase,
+ current_file_id: HirFileId,
+ krate: CrateId,
+ ) -> CfgExpander {
+ let hygiene = Hygiene::new(db.upcast(), current_file_id);
+ let cfg_options = db.crate_graph()[krate].cfg_options.clone();
+ CfgExpander { cfg_options, hygiene, krate }
+ }
+
+ pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
+ RawAttrs::new(db, owner, &self.hygiene).filter(db, self.krate)
+ }
+
+ pub(crate) fn is_cfg_enabled(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> bool {
+ let attrs = self.parse_attrs(db, owner);
+ attrs.is_cfg_enabled(&self.cfg_options)
+ }
+}
+
+impl Expander {
+ pub fn new(db: &dyn DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander {
+ let cfg_expander = CfgExpander::new(db, current_file_id, module.krate);
+ let def_map = module.def_map(db);
+ Expander {
+ cfg_expander,
+ def_map,
+ current_file_id,
+ module: module.local_id,
+ recursion_limit: 0,
+ }
+ }
+
+ pub fn enter_expand<T: ast::AstNode>(
+ &mut self,
+ db: &dyn DefDatabase,
+ macro_call: ast::MacroCall,
+ ) -> Result<ExpandResult<Option<(Mark, T)>>, UnresolvedMacro> {
+ if self.recursion_limit(db).check(self.recursion_limit + 1).is_err() {
+ cov_mark::hit!(your_stack_belongs_to_me);
+ return Ok(ExpandResult::only_err(ExpandError::Other(
+ "reached recursion limit during macro expansion".into(),
+ )));
+ }
+
+ let macro_call = InFile::new(self.current_file_id, &macro_call);
+
+ let resolver =
+ |path| self.resolve_path_as_macro(db, &path).map(|it| macro_id_to_def_id(db, it));
+
+ let mut err = None;
+ let call_id =
+ macro_call.as_call_id_with_errors(db, self.def_map.krate(), resolver, &mut |e| {
+ err.get_or_insert(e);
+ })?;
+ let call_id = match call_id {
+ Ok(it) => it,
+ Err(_) => {
+ return Ok(ExpandResult { value: None, err });
+ }
+ };
+
+ Ok(self.enter_expand_inner(db, call_id, err))
+ }
+
+ pub fn enter_expand_id<T: ast::AstNode>(
+ &mut self,
+ db: &dyn DefDatabase,
+ call_id: MacroCallId,
+ ) -> ExpandResult<Option<(Mark, T)>> {
+ self.enter_expand_inner(db, call_id, None)
+ }
+
+ fn enter_expand_inner<T: ast::AstNode>(
+ &mut self,
+ db: &dyn DefDatabase,
+ call_id: MacroCallId,
+ mut err: Option<ExpandError>,
+ ) -> ExpandResult<Option<(Mark, T)>> {
+ if err.is_none() {
+ err = db.macro_expand_error(call_id);
+ }
+
+ let file_id = call_id.as_file();
+
+ let raw_node = match db.parse_or_expand(file_id) {
+ Some(it) => it,
+ None => {
+ // Only `None` if the macro expansion produced no usable AST.
+ if err.is_none() {
+ tracing::warn!("no error despite `parse_or_expand` failing");
+ }
+
+ return ExpandResult::only_err(err.unwrap_or_else(|| {
+ ExpandError::Other("failed to parse macro invocation".into())
+ }));
+ }
+ };
+
+ let node = match T::cast(raw_node) {
+ Some(it) => it,
+ None => {
+ // This can happen without being an error, so only forward previous errors.
+ return ExpandResult { value: None, err };
+ }
+ };
+
+ tracing::debug!("macro expansion {:#?}", node.syntax());
+
+ self.recursion_limit += 1;
+ let mark =
+ Mark { file_id: self.current_file_id, bomb: DropBomb::new("expansion mark dropped") };
+ self.cfg_expander.hygiene = Hygiene::new(db.upcast(), file_id);
+ self.current_file_id = file_id;
+
+ ExpandResult { value: Some((mark, node)), err }
+ }
+
+ pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
+ self.cfg_expander.hygiene = Hygiene::new(db.upcast(), mark.file_id);
+ self.current_file_id = mark.file_id;
+ self.recursion_limit -= 1;
+ mark.bomb.defuse();
+ }
+
+ pub(crate) fn to_source<T>(&self, value: T) -> InFile<T> {
+ InFile { file_id: self.current_file_id, value }
+ }
+
+ pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
+ self.cfg_expander.parse_attrs(db, owner)
+ }
+
+ pub(crate) fn cfg_options(&self) -> &CfgOptions {
+ &self.cfg_expander.cfg_options
+ }
+
+ pub fn current_file_id(&self) -> HirFileId {
+ self.current_file_id
+ }
+
+ fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option<Path> {
+ let ctx = LowerCtx::with_hygiene(db, &self.cfg_expander.hygiene);
+ Path::from_src(path, &ctx)
+ }
+
+ fn resolve_path_as_macro(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<MacroId> {
+ self.def_map.resolve_path(db, self.module, path, BuiltinShadowMode::Other).0.take_macros()
+ }
+
+ fn recursion_limit(&self, db: &dyn DefDatabase) -> Limit {
+ let limit = db.crate_limits(self.cfg_expander.krate).recursion_limit as _;
+
+ #[cfg(not(test))]
+ return Limit::new(limit);
+
+ // Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug
+ #[cfg(test)]
+ return Limit::new(std::cmp::min(32, limit));
+ }
+}
+
+#[derive(Debug)]
+pub struct Mark {
+ file_id: HirFileId,
+ bomb: DropBomb,
+}
+
+/// The body of an item (function, const etc.).
+#[derive(Debug, Eq, PartialEq)]
+pub struct Body {
+ pub exprs: Arena<Expr>,
+ pub pats: Arena<Pat>,
+ pub or_pats: FxHashMap<PatId, Arc<[PatId]>>,
+ pub labels: Arena<Label>,
+ /// The patterns for the function's parameters. While the parameter types are
+ /// part of the function signature, the patterns are not (they don't change
+ /// the external type of the function).
+ ///
+ /// If this `Body` is for the body of a constant, this will just be
+ /// empty.
+ pub params: Vec<PatId>,
+ /// The `ExprId` of the actual body expression.
+ pub body_expr: ExprId,
+ /// Block expressions in this body that may contain inner items.
+ block_scopes: Vec<BlockId>,
+ _c: Count<Self>,
+}
+
+pub type ExprPtr = AstPtr<ast::Expr>;
+pub type ExprSource = InFile<ExprPtr>;
+
+pub type PatPtr = Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>;
+pub type PatSource = InFile<PatPtr>;
+
+pub type LabelPtr = AstPtr<ast::Label>;
+pub type LabelSource = InFile<LabelPtr>;
+/// An item body together with the mapping from syntax nodes to HIR expression
+/// IDs. This is needed to go from e.g. a position in a file to the HIR
+/// expression containing it; but for type inference etc., we want to operate on
+/// a structure that is agnostic to the actual positions of expressions in the
+/// file, so that we don't recompute types whenever some whitespace is typed.
+///
+/// One complication here is that, due to macro expansion, a single `Body` might
+/// be spread across several files. So, for each ExprId and PatId, we record
+/// both the HirFileId and the position inside the file. However, we only store
+/// AST -> ExprId mapping for non-macro files, as it is not clear how to handle
+/// this properly for macros.
+#[derive(Default, Debug, Eq, PartialEq)]
+pub struct BodySourceMap {
+ expr_map: FxHashMap<ExprSource, ExprId>,
+ expr_map_back: ArenaMap<ExprId, Result<ExprSource, SyntheticSyntax>>,
+
+ pat_map: FxHashMap<PatSource, PatId>,
+ pat_map_back: ArenaMap<PatId, Result<PatSource, SyntheticSyntax>>,
+
+ label_map: FxHashMap<LabelSource, LabelId>,
+ label_map_back: ArenaMap<LabelId, LabelSource>,
+
+ /// We don't create explicit nodes for record fields (`S { record_field: 92 }`).
+ /// Instead, we use id of expression (`92`) to identify the field.
+ field_map: FxHashMap<InFile<AstPtr<ast::RecordExprField>>, ExprId>,
+ field_map_back: FxHashMap<ExprId, InFile<AstPtr<ast::RecordExprField>>>,
+
+ expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, HirFileId>,
+
+ /// Diagnostics accumulated during body lowering. These contain `AstPtr`s and so are stored in
+ /// the source map (since they're just as volatile).
+ diagnostics: Vec<BodyDiagnostic>,
+}
+
+#[derive(Default, Debug, Eq, PartialEq, Clone, Copy)]
+pub struct SyntheticSyntax;
+
+#[derive(Debug, Eq, PartialEq)]
+pub enum BodyDiagnostic {
+ InactiveCode { node: InFile<SyntaxNodePtr>, cfg: CfgExpr, opts: CfgOptions },
+ MacroError { node: InFile<AstPtr<ast::MacroCall>>, message: String },
+ UnresolvedProcMacro { node: InFile<AstPtr<ast::MacroCall>>, krate: CrateId },
+ UnresolvedMacroCall { node: InFile<AstPtr<ast::MacroCall>>, path: ModPath },
+}
+
+impl Body {
+ pub(crate) fn body_with_source_map_query(
+ db: &dyn DefDatabase,
+ def: DefWithBodyId,
+ ) -> (Arc<Body>, Arc<BodySourceMap>) {
+ let _p = profile::span("body_with_source_map_query");
+ let mut params = None;
+
+ let (file_id, module, body) = match def {
+ DefWithBodyId::FunctionId(f) => {
+ let f = f.lookup(db);
+ let src = f.source(db);
+ params = src.value.param_list();
+ (src.file_id, f.module(db), src.value.body().map(ast::Expr::from))
+ }
+ DefWithBodyId::ConstId(c) => {
+ let c = c.lookup(db);
+ let src = c.source(db);
+ (src.file_id, c.module(db), src.value.body())
+ }
+ DefWithBodyId::StaticId(s) => {
+ let s = s.lookup(db);
+ let src = s.source(db);
+ (src.file_id, s.module(db), src.value.body())
+ }
+ };
+ let expander = Expander::new(db, file_id, module);
+ let (mut body, source_map) = Body::new(db, expander, params, body);
+ body.shrink_to_fit();
+ (Arc::new(body), Arc::new(source_map))
+ }
+
+ pub(crate) fn body_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc<Body> {
+ db.body_with_source_map(def).0
+ }
+
+ /// Returns an iterator over all block expressions in this body that define inner items.
+ pub fn blocks<'a>(
+ &'a self,
+ db: &'a dyn DefDatabase,
+ ) -> impl Iterator<Item = (BlockId, Arc<DefMap>)> + '_ {
+ self.block_scopes
+ .iter()
+ .map(move |&block| (block, db.block_def_map(block).expect("block ID without DefMap")))
+ }
+
+ pub fn pattern_representative(&self, pat: PatId) -> PatId {
+ self.or_pats.get(&pat).and_then(|pats| pats.first().copied()).unwrap_or(pat)
+ }
+
+ /// Retrieves all ident patterns this pattern shares the ident with.
+ pub fn ident_patterns_for<'slf>(&'slf self, pat: &'slf PatId) -> &'slf [PatId] {
+ match self.or_pats.get(pat) {
+ Some(pats) => &**pats,
+ None => std::slice::from_ref(pat),
+ }
+ }
+
+ fn new(
+ db: &dyn DefDatabase,
+ expander: Expander,
+ params: Option<ast::ParamList>,
+ body: Option<ast::Expr>,
+ ) -> (Body, BodySourceMap) {
+ lower::lower(db, expander, params, body)
+ }
+
+ fn shrink_to_fit(&mut self) {
+ let Self { _c: _, body_expr: _, block_scopes, or_pats, exprs, labels, params, pats } = self;
+ block_scopes.shrink_to_fit();
+ or_pats.shrink_to_fit();
+ exprs.shrink_to_fit();
+ labels.shrink_to_fit();
+ params.shrink_to_fit();
+ pats.shrink_to_fit();
+ }
+}
+
+impl Default for Body {
+ fn default() -> Self {
+ Self {
+ body_expr: dummy_expr_id(),
+ exprs: Default::default(),
+ pats: Default::default(),
+ or_pats: Default::default(),
+ labels: Default::default(),
+ params: Default::default(),
+ block_scopes: Default::default(),
+ _c: Default::default(),
+ }
+ }
+}
+
+impl Index<ExprId> for Body {
+ type Output = Expr;
+
+ fn index(&self, expr: ExprId) -> &Expr {
+ &self.exprs[expr]
+ }
+}
+
+impl Index<PatId> for Body {
+ type Output = Pat;
+
+ fn index(&self, pat: PatId) -> &Pat {
+ &self.pats[pat]
+ }
+}
+
+impl Index<LabelId> for Body {
+ type Output = Label;
+
+ fn index(&self, label: LabelId) -> &Label {
+ &self.labels[label]
+ }
+}
+
+// FIXME: Change `node_` prefix to something more reasonable.
+// Perhaps `expr_syntax` and `expr_id`?
+impl BodySourceMap {
+ pub fn expr_syntax(&self, expr: ExprId) -> Result<ExprSource, SyntheticSyntax> {
+ self.expr_map_back[expr].clone()
+ }
+
+ pub fn node_expr(&self, node: InFile<&ast::Expr>) -> Option<ExprId> {
+ let src = node.map(AstPtr::new);
+ self.expr_map.get(&src).cloned()
+ }
+
+ pub fn node_macro_file(&self, node: InFile<&ast::MacroCall>) -> Option<HirFileId> {
+ let src = node.map(AstPtr::new);
+ self.expansions.get(&src).cloned()
+ }
+
+ pub fn pat_syntax(&self, pat: PatId) -> Result<PatSource, SyntheticSyntax> {
+ self.pat_map_back[pat].clone()
+ }
+
+ pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<PatId> {
+ let src = node.map(|it| Either::Left(AstPtr::new(it)));
+ self.pat_map.get(&src).cloned()
+ }
+
+ pub fn node_self_param(&self, node: InFile<&ast::SelfParam>) -> Option<PatId> {
+ let src = node.map(|it| Either::Right(AstPtr::new(it)));
+ self.pat_map.get(&src).cloned()
+ }
+
+ pub fn label_syntax(&self, label: LabelId) -> LabelSource {
+ self.label_map_back[label].clone()
+ }
+
+ pub fn node_label(&self, node: InFile<&ast::Label>) -> Option<LabelId> {
+ let src = node.map(AstPtr::new);
+ self.label_map.get(&src).cloned()
+ }
+
+ pub fn field_syntax(&self, expr: ExprId) -> InFile<AstPtr<ast::RecordExprField>> {
+ self.field_map_back[&expr].clone()
+ }
+ pub fn node_field(&self, node: InFile<&ast::RecordExprField>) -> Option<ExprId> {
+ let src = node.map(AstPtr::new);
+ self.field_map.get(&src).cloned()
+ }
+
+ pub fn macro_expansion_expr(&self, node: InFile<&ast::MacroExpr>) -> Option<ExprId> {
+ let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::MacroExpr>).map(AstPtr::upcast);
+ self.expr_map.get(&src).copied()
+ }
+
+ /// Get a reference to the body source map's diagnostics.
+ pub fn diagnostics(&self) -> &[BodyDiagnostic] {
+ &self.diagnostics
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
new file mode 100644
index 000000000..66f9c24e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
@@ -0,0 +1,1023 @@
+//! Transforms `ast::Expr` into an equivalent `hir_def::expr::Expr`
+//! representation.
+
+use std::{mem, sync::Arc};
+
+use either::Either;
+use hir_expand::{
+ ast_id_map::AstIdMap,
+ hygiene::Hygiene,
+ name::{name, AsName, Name},
+ AstId, ExpandError, HirFileId, InFile,
+};
+use la_arena::Arena;
+use once_cell::unsync::OnceCell;
+use profile::Count;
+use rustc_hash::FxHashMap;
+use syntax::{
+ ast::{
+ self, ArrayExprKind, AstChildren, HasArgList, HasLoopBody, HasName, LiteralKind,
+ SlicePatComponents,
+ },
+ AstNode, AstPtr, SyntaxNodePtr,
+};
+
+use crate::{
+ adt::StructKind,
+ body::{Body, BodySourceMap, Expander, LabelSource, PatPtr, SyntheticSyntax},
+ body::{BodyDiagnostic, ExprSource, PatSource},
+ builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint},
+ db::DefDatabase,
+ expr::{
+ dummy_expr_id, Array, BindingAnnotation, Expr, ExprId, FloatTypeWrapper, Label, LabelId,
+ Literal, MatchArm, Pat, PatId, RecordFieldPat, RecordLitField, Statement,
+ },
+ intern::Interned,
+ item_scope::BuiltinShadowMode,
+ path::{GenericArgs, Path},
+ type_ref::{Mutability, Rawness, TypeRef},
+ AdtId, BlockLoc, ModuleDefId, UnresolvedMacro,
+};
+
+pub struct LowerCtx<'a> {
+ pub db: &'a dyn DefDatabase,
+ hygiene: Hygiene,
+ ast_id_map: Option<(HirFileId, OnceCell<Arc<AstIdMap>>)>,
+}
+
+impl<'a> LowerCtx<'a> {
+ pub fn new(db: &'a dyn DefDatabase, file_id: HirFileId) -> Self {
+ LowerCtx {
+ db,
+ hygiene: Hygiene::new(db.upcast(), file_id),
+ ast_id_map: Some((file_id, OnceCell::new())),
+ }
+ }
+
+ pub fn with_hygiene(db: &'a dyn DefDatabase, hygiene: &Hygiene) -> Self {
+ LowerCtx { db, hygiene: hygiene.clone(), ast_id_map: None }
+ }
+
+ pub(crate) fn hygiene(&self) -> &Hygiene {
+ &self.hygiene
+ }
+
+ pub(crate) fn lower_path(&self, ast: ast::Path) -> Option<Path> {
+ Path::from_src(ast, self)
+ }
+
+ pub(crate) fn ast_id<N: AstNode>(&self, db: &dyn DefDatabase, item: &N) -> Option<AstId<N>> {
+ let &(file_id, ref ast_id_map) = self.ast_id_map.as_ref()?;
+ let ast_id_map = ast_id_map.get_or_init(|| db.ast_id_map(file_id));
+ Some(InFile::new(file_id, ast_id_map.ast_id(item)))
+ }
+}
+
+pub(super) fn lower(
+ db: &dyn DefDatabase,
+ expander: Expander,
+ params: Option<ast::ParamList>,
+ body: Option<ast::Expr>,
+) -> (Body, BodySourceMap) {
+ ExprCollector {
+ db,
+ source_map: BodySourceMap::default(),
+ ast_id_map: db.ast_id_map(expander.current_file_id),
+ body: Body {
+ exprs: Arena::default(),
+ pats: Arena::default(),
+ labels: Arena::default(),
+ params: Vec::new(),
+ body_expr: dummy_expr_id(),
+ block_scopes: Vec::new(),
+ _c: Count::new(),
+ or_pats: Default::default(),
+ },
+ expander,
+ name_to_pat_grouping: Default::default(),
+ is_lowering_inside_or_pat: false,
+ is_lowering_assignee_expr: false,
+ }
+ .collect(params, body)
+}
+
+struct ExprCollector<'a> {
+ db: &'a dyn DefDatabase,
+ expander: Expander,
+ ast_id_map: Arc<AstIdMap>,
+ body: Body,
+ source_map: BodySourceMap,
+ // a poor-mans union-find?
+ name_to_pat_grouping: FxHashMap<Name, Vec<PatId>>,
+ is_lowering_inside_or_pat: bool,
+ is_lowering_assignee_expr: bool,
+}
+
+impl ExprCollector<'_> {
+ fn collect(
+ mut self,
+ param_list: Option<ast::ParamList>,
+ body: Option<ast::Expr>,
+ ) -> (Body, BodySourceMap) {
+ if let Some(param_list) = param_list {
+ if let Some(self_param) = param_list.self_param() {
+ let ptr = AstPtr::new(&self_param);
+ let param_pat = self.alloc_pat(
+ Pat::Bind {
+ name: name![self],
+ mode: BindingAnnotation::new(
+ self_param.mut_token().is_some() && self_param.amp_token().is_none(),
+ false,
+ ),
+ subpat: None,
+ },
+ Either::Right(ptr),
+ );
+ self.body.params.push(param_pat);
+ }
+
+ for pat in param_list.params().filter_map(|param| param.pat()) {
+ let param_pat = self.collect_pat(pat);
+ self.body.params.push(param_pat);
+ }
+ };
+
+ self.body.body_expr = self.collect_expr_opt(body);
+ (self.body, self.source_map)
+ }
+
+ fn ctx(&self) -> LowerCtx<'_> {
+ LowerCtx::new(self.db, self.expander.current_file_id)
+ }
+
+ fn alloc_expr(&mut self, expr: Expr, ptr: AstPtr<ast::Expr>) -> ExprId {
+ let src = self.expander.to_source(ptr);
+ let id = self.make_expr(expr, Ok(src.clone()));
+ self.source_map.expr_map.insert(src, id);
+ id
+ }
+ // desugared exprs don't have ptr, that's wrong and should be fixed
+ // somehow.
+ fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId {
+ self.make_expr(expr, Err(SyntheticSyntax))
+ }
+ fn missing_expr(&mut self) -> ExprId {
+ self.alloc_expr_desugared(Expr::Missing)
+ }
+ fn make_expr(&mut self, expr: Expr, src: Result<ExprSource, SyntheticSyntax>) -> ExprId {
+ let id = self.body.exprs.alloc(expr);
+ self.source_map.expr_map_back.insert(id, src);
+ id
+ }
+
+ fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId {
+ let src = self.expander.to_source(ptr);
+ let id = self.make_pat(pat, Ok(src.clone()));
+ self.source_map.pat_map.insert(src, id);
+ id
+ }
+ fn missing_pat(&mut self) -> PatId {
+ self.make_pat(Pat::Missing, Err(SyntheticSyntax))
+ }
+ fn make_pat(&mut self, pat: Pat, src: Result<PatSource, SyntheticSyntax>) -> PatId {
+ let id = self.body.pats.alloc(pat);
+ self.source_map.pat_map_back.insert(id, src);
+ id
+ }
+
+ fn alloc_label(&mut self, label: Label, ptr: AstPtr<ast::Label>) -> LabelId {
+ let src = self.expander.to_source(ptr);
+ let id = self.make_label(label, src.clone());
+ self.source_map.label_map.insert(src, id);
+ id
+ }
+ fn make_label(&mut self, label: Label, src: LabelSource) -> LabelId {
+ let id = self.body.labels.alloc(label);
+ self.source_map.label_map_back.insert(id, src);
+ id
+ }
+
+ fn collect_expr(&mut self, expr: ast::Expr) -> ExprId {
+ self.maybe_collect_expr(expr).unwrap_or_else(|| self.missing_expr())
+ }
+
+ /// Returns `None` if and only if the expression is `#[cfg]`d out.
+ fn maybe_collect_expr(&mut self, expr: ast::Expr) -> Option<ExprId> {
+ let syntax_ptr = AstPtr::new(&expr);
+ self.check_cfg(&expr)?;
+
+ Some(match expr {
+ ast::Expr::IfExpr(e) => {
+ let then_branch = self.collect_block_opt(e.then_branch());
+
+ let else_branch = e.else_branch().map(|b| match b {
+ ast::ElseBranch::Block(it) => self.collect_block(it),
+ ast::ElseBranch::IfExpr(elif) => {
+ let expr: ast::Expr = ast::Expr::cast(elif.syntax().clone()).unwrap();
+ self.collect_expr(expr)
+ }
+ });
+
+ let condition = self.collect_expr_opt(e.condition());
+
+ self.alloc_expr(Expr::If { condition, then_branch, else_branch }, syntax_ptr)
+ }
+ ast::Expr::LetExpr(e) => {
+ let pat = self.collect_pat_opt(e.pat());
+ let expr = self.collect_expr_opt(e.expr());
+ self.alloc_expr(Expr::Let { pat, expr }, syntax_ptr)
+ }
+ ast::Expr::BlockExpr(e) => match e.modifier() {
+ Some(ast::BlockModifier::Try(_)) => {
+ let body = self.collect_block(e);
+ self.alloc_expr(Expr::TryBlock { body }, syntax_ptr)
+ }
+ Some(ast::BlockModifier::Unsafe(_)) => {
+ let body = self.collect_block(e);
+ self.alloc_expr(Expr::Unsafe { body }, syntax_ptr)
+ }
+ // FIXME: we need to record these effects somewhere...
+ Some(ast::BlockModifier::Label(label)) => {
+ let label = self.collect_label(label);
+ let res = self.collect_block(e);
+ match &mut self.body.exprs[res] {
+ Expr::Block { label: block_label, .. } => {
+ *block_label = Some(label);
+ }
+ _ => unreachable!(),
+ }
+ res
+ }
+ Some(ast::BlockModifier::Async(_)) => {
+ let body = self.collect_block(e);
+ self.alloc_expr(Expr::Async { body }, syntax_ptr)
+ }
+ Some(ast::BlockModifier::Const(_)) => {
+ let body = self.collect_block(e);
+ self.alloc_expr(Expr::Const { body }, syntax_ptr)
+ }
+ None => self.collect_block(e),
+ },
+ ast::Expr::LoopExpr(e) => {
+ let label = e.label().map(|label| self.collect_label(label));
+ let body = self.collect_block_opt(e.loop_body());
+ self.alloc_expr(Expr::Loop { body, label }, syntax_ptr)
+ }
+ ast::Expr::WhileExpr(e) => {
+ let label = e.label().map(|label| self.collect_label(label));
+ let body = self.collect_block_opt(e.loop_body());
+
+ let condition = self.collect_expr_opt(e.condition());
+
+ self.alloc_expr(Expr::While { condition, body, label }, syntax_ptr)
+ }
+ ast::Expr::ForExpr(e) => {
+ let label = e.label().map(|label| self.collect_label(label));
+ let iterable = self.collect_expr_opt(e.iterable());
+ let pat = self.collect_pat_opt(e.pat());
+ let body = self.collect_block_opt(e.loop_body());
+ self.alloc_expr(Expr::For { iterable, pat, body, label }, syntax_ptr)
+ }
+ ast::Expr::CallExpr(e) => {
+ let callee = self.collect_expr_opt(e.expr());
+ let args = if let Some(arg_list) = e.arg_list() {
+ arg_list.args().filter_map(|e| self.maybe_collect_expr(e)).collect()
+ } else {
+ Box::default()
+ };
+ self.alloc_expr(
+ Expr::Call { callee, args, is_assignee_expr: self.is_lowering_assignee_expr },
+ syntax_ptr,
+ )
+ }
+ ast::Expr::MethodCallExpr(e) => {
+ let receiver = self.collect_expr_opt(e.receiver());
+ let args = if let Some(arg_list) = e.arg_list() {
+ arg_list.args().filter_map(|e| self.maybe_collect_expr(e)).collect()
+ } else {
+ Box::default()
+ };
+ let method_name = e.name_ref().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
+ let generic_args = e
+ .generic_arg_list()
+ .and_then(|it| GenericArgs::from_ast(&self.ctx(), it))
+ .map(Box::new);
+ self.alloc_expr(
+ Expr::MethodCall { receiver, method_name, args, generic_args },
+ syntax_ptr,
+ )
+ }
+ ast::Expr::MatchExpr(e) => {
+ let expr = self.collect_expr_opt(e.expr());
+ let arms = if let Some(match_arm_list) = e.match_arm_list() {
+ match_arm_list
+ .arms()
+ .filter_map(|arm| {
+ self.check_cfg(&arm).map(|()| MatchArm {
+ pat: self.collect_pat_opt(arm.pat()),
+ expr: self.collect_expr_opt(arm.expr()),
+ guard: arm
+ .guard()
+ .map(|guard| self.collect_expr_opt(guard.condition())),
+ })
+ })
+ .collect()
+ } else {
+ Box::default()
+ };
+ self.alloc_expr(Expr::Match { expr, arms }, syntax_ptr)
+ }
+ ast::Expr::PathExpr(e) => {
+ let path = e
+ .path()
+ .and_then(|path| self.expander.parse_path(self.db, path))
+ .map(Expr::Path)
+ .unwrap_or(Expr::Missing);
+ self.alloc_expr(path, syntax_ptr)
+ }
+ ast::Expr::ContinueExpr(e) => self.alloc_expr(
+ Expr::Continue { label: e.lifetime().map(|l| Name::new_lifetime(&l)) },
+ syntax_ptr,
+ ),
+ ast::Expr::BreakExpr(e) => {
+ let expr = e.expr().map(|e| self.collect_expr(e));
+ self.alloc_expr(
+ Expr::Break { expr, label: e.lifetime().map(|l| Name::new_lifetime(&l)) },
+ syntax_ptr,
+ )
+ }
+ ast::Expr::ParenExpr(e) => {
+ let inner = self.collect_expr_opt(e.expr());
+ // make the paren expr point to the inner expression as well
+ let src = self.expander.to_source(syntax_ptr);
+ self.source_map.expr_map.insert(src, inner);
+ inner
+ }
+ ast::Expr::ReturnExpr(e) => {
+ let expr = e.expr().map(|e| self.collect_expr(e));
+ self.alloc_expr(Expr::Return { expr }, syntax_ptr)
+ }
+ ast::Expr::YieldExpr(e) => {
+ let expr = e.expr().map(|e| self.collect_expr(e));
+ self.alloc_expr(Expr::Yield { expr }, syntax_ptr)
+ }
+ ast::Expr::RecordExpr(e) => {
+ let path =
+ e.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new);
+ let is_assignee_expr = self.is_lowering_assignee_expr;
+ let record_lit = if let Some(nfl) = e.record_expr_field_list() {
+ let fields = nfl
+ .fields()
+ .filter_map(|field| {
+ self.check_cfg(&field)?;
+
+ let name = field.field_name()?.as_name();
+
+ let expr = match field.expr() {
+ Some(e) => self.collect_expr(e),
+ None => self.missing_expr(),
+ };
+ let src = self.expander.to_source(AstPtr::new(&field));
+ self.source_map.field_map.insert(src.clone(), expr);
+ self.source_map.field_map_back.insert(expr, src);
+ Some(RecordLitField { name, expr })
+ })
+ .collect();
+ let spread = nfl.spread().map(|s| self.collect_expr(s));
+ let ellipsis = nfl.dotdot_token().is_some();
+ Expr::RecordLit { path, fields, spread, ellipsis, is_assignee_expr }
+ } else {
+ Expr::RecordLit {
+ path,
+ fields: Box::default(),
+ spread: None,
+ ellipsis: false,
+ is_assignee_expr,
+ }
+ };
+
+ self.alloc_expr(record_lit, syntax_ptr)
+ }
+ ast::Expr::FieldExpr(e) => {
+ let expr = self.collect_expr_opt(e.expr());
+ let name = match e.field_access() {
+ Some(kind) => kind.as_name(),
+ _ => Name::missing(),
+ };
+ self.alloc_expr(Expr::Field { expr, name }, syntax_ptr)
+ }
+ ast::Expr::AwaitExpr(e) => {
+ let expr = self.collect_expr_opt(e.expr());
+ self.alloc_expr(Expr::Await { expr }, syntax_ptr)
+ }
+ ast::Expr::TryExpr(e) => {
+ let expr = self.collect_expr_opt(e.expr());
+ self.alloc_expr(Expr::Try { expr }, syntax_ptr)
+ }
+ ast::Expr::CastExpr(e) => {
+ let expr = self.collect_expr_opt(e.expr());
+ let type_ref = Interned::new(TypeRef::from_ast_opt(&self.ctx(), e.ty()));
+ self.alloc_expr(Expr::Cast { expr, type_ref }, syntax_ptr)
+ }
+ ast::Expr::RefExpr(e) => {
+ let expr = self.collect_expr_opt(e.expr());
+ let raw_tok = e.raw_token().is_some();
+ let mutability = if raw_tok {
+ if e.mut_token().is_some() {
+ Mutability::Mut
+ } else if e.const_token().is_some() {
+ Mutability::Shared
+ } else {
+ unreachable!("parser only remaps to raw_token() if matching mutability token follows")
+ }
+ } else {
+ Mutability::from_mutable(e.mut_token().is_some())
+ };
+ let rawness = Rawness::from_raw(raw_tok);
+ self.alloc_expr(Expr::Ref { expr, rawness, mutability }, syntax_ptr)
+ }
+ ast::Expr::PrefixExpr(e) => {
+ let expr = self.collect_expr_opt(e.expr());
+ match e.op_kind() {
+ Some(op) => self.alloc_expr(Expr::UnaryOp { expr, op }, syntax_ptr),
+ None => self.alloc_expr(Expr::Missing, syntax_ptr),
+ }
+ }
+ ast::Expr::ClosureExpr(e) => {
+ let mut args = Vec::new();
+ let mut arg_types = Vec::new();
+ if let Some(pl) = e.param_list() {
+ for param in pl.params() {
+ let pat = self.collect_pat_opt(param.pat());
+ let type_ref =
+ param.ty().map(|it| Interned::new(TypeRef::from_ast(&self.ctx(), it)));
+ args.push(pat);
+ arg_types.push(type_ref);
+ }
+ }
+ let ret_type = e
+ .ret_type()
+ .and_then(|r| r.ty())
+ .map(|it| Interned::new(TypeRef::from_ast(&self.ctx(), it)));
+ let body = self.collect_expr_opt(e.body());
+ self.alloc_expr(
+ Expr::Closure {
+ args: args.into(),
+ arg_types: arg_types.into(),
+ ret_type,
+ body,
+ },
+ syntax_ptr,
+ )
+ }
+ ast::Expr::BinExpr(e) => {
+ let op = e.op_kind();
+ if let Some(ast::BinaryOp::Assignment { op: None }) = op {
+ self.is_lowering_assignee_expr = true;
+ }
+ let lhs = self.collect_expr_opt(e.lhs());
+ self.is_lowering_assignee_expr = false;
+ let rhs = self.collect_expr_opt(e.rhs());
+ self.alloc_expr(Expr::BinaryOp { lhs, rhs, op }, syntax_ptr)
+ }
+ ast::Expr::TupleExpr(e) => {
+ let exprs = e.fields().map(|expr| self.collect_expr(expr)).collect();
+ self.alloc_expr(
+ Expr::Tuple { exprs, is_assignee_expr: self.is_lowering_assignee_expr },
+ syntax_ptr,
+ )
+ }
+ ast::Expr::BoxExpr(e) => {
+ let expr = self.collect_expr_opt(e.expr());
+ self.alloc_expr(Expr::Box { expr }, syntax_ptr)
+ }
+
+ ast::Expr::ArrayExpr(e) => {
+ let kind = e.kind();
+
+ match kind {
+ ArrayExprKind::ElementList(e) => {
+ let elements = e.map(|expr| self.collect_expr(expr)).collect();
+ self.alloc_expr(
+ Expr::Array(Array::ElementList {
+ elements,
+ is_assignee_expr: self.is_lowering_assignee_expr,
+ }),
+ syntax_ptr,
+ )
+ }
+ ArrayExprKind::Repeat { initializer, repeat } => {
+ let initializer = self.collect_expr_opt(initializer);
+ let repeat = self.collect_expr_opt(repeat);
+ self.alloc_expr(
+ Expr::Array(Array::Repeat { initializer, repeat }),
+ syntax_ptr,
+ )
+ }
+ }
+ }
+
+ ast::Expr::Literal(e) => self.alloc_expr(Expr::Literal(e.kind().into()), syntax_ptr),
+ ast::Expr::IndexExpr(e) => {
+ let base = self.collect_expr_opt(e.base());
+ let index = self.collect_expr_opt(e.index());
+ self.alloc_expr(Expr::Index { base, index }, syntax_ptr)
+ }
+ ast::Expr::RangeExpr(e) => {
+ let lhs = e.start().map(|lhs| self.collect_expr(lhs));
+ let rhs = e.end().map(|rhs| self.collect_expr(rhs));
+ match e.op_kind() {
+ Some(range_type) => {
+ self.alloc_expr(Expr::Range { lhs, rhs, range_type }, syntax_ptr)
+ }
+ None => self.alloc_expr(Expr::Missing, syntax_ptr),
+ }
+ }
+ ast::Expr::MacroExpr(e) => {
+ let e = e.macro_call()?;
+ let macro_ptr = AstPtr::new(&e);
+ let id = self.collect_macro_call(e, macro_ptr, true, |this, expansion| {
+ expansion.map(|it| this.collect_expr(it))
+ });
+ match id {
+ Some(id) => {
+ // Make the macro-call point to its expanded expression so we can query
+ // semantics on syntax pointers to the macro
+ let src = self.expander.to_source(syntax_ptr);
+ self.source_map.expr_map.insert(src, id);
+ id
+ }
+ None => self.alloc_expr(Expr::Missing, syntax_ptr),
+ }
+ }
+ ast::Expr::MacroStmts(e) => {
+ let statements = e.statements().filter_map(|s| self.collect_stmt(s)).collect();
+ let tail = e.expr().map(|e| self.collect_expr(e));
+
+ self.alloc_expr(Expr::MacroStmts { tail, statements }, syntax_ptr)
+ }
+ ast::Expr::UnderscoreExpr(_) => self.alloc_expr(Expr::Underscore, syntax_ptr),
+ })
+ }
+
+ fn collect_macro_call<F, T, U>(
+ &mut self,
+ mcall: ast::MacroCall,
+ syntax_ptr: AstPtr<ast::MacroCall>,
+ record_diagnostics: bool,
+ collector: F,
+ ) -> U
+ where
+ F: FnOnce(&mut Self, Option<T>) -> U,
+ T: ast::AstNode,
+ {
+ // File containing the macro call. Expansion errors will be attached here.
+ let outer_file = self.expander.current_file_id;
+
+ let macro_call_ptr = self.expander.to_source(AstPtr::new(&mcall));
+ let res = self.expander.enter_expand(self.db, mcall);
+
+ let res = match res {
+ Ok(res) => res,
+ Err(UnresolvedMacro { path }) => {
+ if record_diagnostics {
+ self.source_map.diagnostics.push(BodyDiagnostic::UnresolvedMacroCall {
+ node: InFile::new(outer_file, syntax_ptr),
+ path,
+ });
+ }
+ return collector(self, None);
+ }
+ };
+
+ if record_diagnostics {
+ match &res.err {
+ Some(ExpandError::UnresolvedProcMacro(krate)) => {
+ self.source_map.diagnostics.push(BodyDiagnostic::UnresolvedProcMacro {
+ node: InFile::new(outer_file, syntax_ptr),
+ krate: *krate,
+ });
+ }
+ Some(err) => {
+ self.source_map.diagnostics.push(BodyDiagnostic::MacroError {
+ node: InFile::new(outer_file, syntax_ptr),
+ message: err.to_string(),
+ });
+ }
+ None => {}
+ }
+ }
+
+ match res.value {
+ Some((mark, expansion)) => {
+ self.source_map.expansions.insert(macro_call_ptr, self.expander.current_file_id);
+ let prev_ast_id_map = mem::replace(
+ &mut self.ast_id_map,
+ self.db.ast_id_map(self.expander.current_file_id),
+ );
+
+ let id = collector(self, Some(expansion));
+ self.ast_id_map = prev_ast_id_map;
+ self.expander.exit(self.db, mark);
+ id
+ }
+ None => collector(self, None),
+ }
+ }
+
+ fn collect_expr_opt(&mut self, expr: Option<ast::Expr>) -> ExprId {
+ match expr {
+ Some(expr) => self.collect_expr(expr),
+ None => self.missing_expr(),
+ }
+ }
+
+ fn collect_stmt(&mut self, s: ast::Stmt) -> Option<Statement> {
+ match s {
+ ast::Stmt::LetStmt(stmt) => {
+ if self.check_cfg(&stmt).is_none() {
+ return None;
+ }
+ let pat = self.collect_pat_opt(stmt.pat());
+ let type_ref =
+ stmt.ty().map(|it| Interned::new(TypeRef::from_ast(&self.ctx(), it)));
+ let initializer = stmt.initializer().map(|e| self.collect_expr(e));
+ let else_branch = stmt
+ .let_else()
+ .and_then(|let_else| let_else.block_expr())
+ .map(|block| self.collect_block(block));
+ Some(Statement::Let { pat, type_ref, initializer, else_branch })
+ }
+ ast::Stmt::ExprStmt(stmt) => {
+ let expr = stmt.expr();
+ if let Some(expr) = &expr {
+ if self.check_cfg(expr).is_none() {
+ return None;
+ }
+ }
+ let has_semi = stmt.semicolon_token().is_some();
+ // Note that macro could be expanded to multiple statements
+ if let Some(expr @ ast::Expr::MacroExpr(mac)) = &expr {
+ let mac_call = mac.macro_call()?;
+ let syntax_ptr = AstPtr::new(expr);
+ let macro_ptr = AstPtr::new(&mac_call);
+ let stmt = self.collect_macro_call(
+ mac_call,
+ macro_ptr,
+ false,
+ |this, expansion: Option<ast::MacroStmts>| match expansion {
+ Some(expansion) => {
+ let statements = expansion
+ .statements()
+ .filter_map(|stmt| this.collect_stmt(stmt))
+ .collect();
+ let tail = expansion.expr().map(|expr| this.collect_expr(expr));
+
+ let mac_stmts = this.alloc_expr(
+ Expr::MacroStmts { tail, statements },
+ AstPtr::new(&ast::Expr::MacroStmts(expansion)),
+ );
+
+ Some(mac_stmts)
+ }
+ None => None,
+ },
+ );
+
+ let expr = match stmt {
+ Some(expr) => {
+ // Make the macro-call point to its expanded expression so we can query
+ // semantics on syntax pointers to the macro
+ let src = self.expander.to_source(syntax_ptr);
+ self.source_map.expr_map.insert(src, expr);
+ expr
+ }
+ None => self.alloc_expr(Expr::Missing, syntax_ptr),
+ };
+ Some(Statement::Expr { expr, has_semi })
+ } else {
+ let expr = self.collect_expr_opt(expr);
+ Some(Statement::Expr { expr, has_semi })
+ }
+ }
+ ast::Stmt::Item(_item) => None,
+ }
+ }
+
+ fn collect_block(&mut self, block: ast::BlockExpr) -> ExprId {
+ let file_local_id = self.ast_id_map.ast_id(&block);
+ let ast_id = AstId::new(self.expander.current_file_id, file_local_id);
+ let block_loc =
+ BlockLoc { ast_id, module: self.expander.def_map.module_id(self.expander.module) };
+ let block_id = self.db.intern_block(block_loc);
+
+ let (module, def_map) = match self.db.block_def_map(block_id) {
+ Some(def_map) => {
+ self.body.block_scopes.push(block_id);
+ (def_map.root(), def_map)
+ }
+ None => (self.expander.module, self.expander.def_map.clone()),
+ };
+ let prev_def_map = mem::replace(&mut self.expander.def_map, def_map);
+ let prev_local_module = mem::replace(&mut self.expander.module, module);
+
+ let mut statements: Vec<_> =
+ block.statements().filter_map(|s| self.collect_stmt(s)).collect();
+ let tail = block.tail_expr().and_then(|e| self.maybe_collect_expr(e));
+ let tail = tail.or_else(|| {
+ let stmt = statements.pop()?;
+ if let Statement::Expr { expr, has_semi: false } = stmt {
+ return Some(expr);
+ }
+ statements.push(stmt);
+ None
+ });
+
+ let syntax_node_ptr = AstPtr::new(&block.into());
+ let expr_id = self.alloc_expr(
+ Expr::Block {
+ id: block_id,
+ statements: statements.into_boxed_slice(),
+ tail,
+ label: None,
+ },
+ syntax_node_ptr,
+ );
+
+ self.expander.def_map = prev_def_map;
+ self.expander.module = prev_local_module;
+ expr_id
+ }
+
+ fn collect_block_opt(&mut self, expr: Option<ast::BlockExpr>) -> ExprId {
+ match expr {
+ Some(block) => self.collect_block(block),
+ None => self.missing_expr(),
+ }
+ }
+
+ fn collect_label(&mut self, ast_label: ast::Label) -> LabelId {
+ let label = Label {
+ name: ast_label.lifetime().as_ref().map_or_else(Name::missing, Name::new_lifetime),
+ };
+ self.alloc_label(label, AstPtr::new(&ast_label))
+ }
+
+ fn collect_pat(&mut self, pat: ast::Pat) -> PatId {
+ let pat_id = self.collect_pat_(pat);
+ for (_, pats) in self.name_to_pat_grouping.drain() {
+ let pats = Arc::<[_]>::from(pats);
+ self.body.or_pats.extend(pats.iter().map(|&pat| (pat, pats.clone())));
+ }
+ self.is_lowering_inside_or_pat = false;
+ pat_id
+ }
+
+ fn collect_pat_opt(&mut self, pat: Option<ast::Pat>) -> PatId {
+ match pat {
+ Some(pat) => self.collect_pat(pat),
+ None => self.missing_pat(),
+ }
+ }
+
+ fn collect_pat_(&mut self, pat: ast::Pat) -> PatId {
+ let pattern = match &pat {
+ ast::Pat::IdentPat(bp) => {
+ let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
+
+ let key = self.is_lowering_inside_or_pat.then(|| name.clone());
+ let annotation =
+ BindingAnnotation::new(bp.mut_token().is_some(), bp.ref_token().is_some());
+ let subpat = bp.pat().map(|subpat| self.collect_pat_(subpat));
+ let pattern = if annotation == BindingAnnotation::Unannotated && subpat.is_none() {
+ // This could also be a single-segment path pattern. To
+ // decide that, we need to try resolving the name.
+ let (resolved, _) = self.expander.def_map.resolve_path(
+ self.db,
+ self.expander.module,
+ &name.clone().into(),
+ BuiltinShadowMode::Other,
+ );
+ match resolved.take_values() {
+ Some(ModuleDefId::ConstId(_)) => Pat::Path(name.into()),
+ Some(ModuleDefId::EnumVariantId(_)) => {
+ // this is only really valid for unit variants, but
+ // shadowing other enum variants with a pattern is
+ // an error anyway
+ Pat::Path(name.into())
+ }
+ Some(ModuleDefId::AdtId(AdtId::StructId(s)))
+ if self.db.struct_data(s).variant_data.kind() != StructKind::Record =>
+ {
+ // Funnily enough, record structs *can* be shadowed
+ // by pattern bindings (but unit or tuple structs
+ // can't).
+ Pat::Path(name.into())
+ }
+ // shadowing statics is an error as well, so we just ignore that case here
+ _ => Pat::Bind { name, mode: annotation, subpat },
+ }
+ } else {
+ Pat::Bind { name, mode: annotation, subpat }
+ };
+
+ let ptr = AstPtr::new(&pat);
+ let pat = self.alloc_pat(pattern, Either::Left(ptr));
+ if let Some(key) = key {
+ self.name_to_pat_grouping.entry(key).or_default().push(pat);
+ }
+ return pat;
+ }
+ ast::Pat::TupleStructPat(p) => {
+ let path =
+ p.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new);
+ let (args, ellipsis) = self.collect_tuple_pat(p.fields());
+ Pat::TupleStruct { path, args, ellipsis }
+ }
+ ast::Pat::RefPat(p) => {
+ let pat = self.collect_pat_opt(p.pat());
+ let mutability = Mutability::from_mutable(p.mut_token().is_some());
+ Pat::Ref { pat, mutability }
+ }
+ ast::Pat::PathPat(p) => {
+ let path =
+ p.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new);
+ path.map(Pat::Path).unwrap_or(Pat::Missing)
+ }
+ ast::Pat::OrPat(p) => {
+ self.is_lowering_inside_or_pat = true;
+ let pats = p.pats().map(|p| self.collect_pat_(p)).collect();
+ Pat::Or(pats)
+ }
+ ast::Pat::ParenPat(p) => return self.collect_pat_opt_(p.pat()),
+ ast::Pat::TuplePat(p) => {
+ let (args, ellipsis) = self.collect_tuple_pat(p.fields());
+ Pat::Tuple { args, ellipsis }
+ }
+ ast::Pat::WildcardPat(_) => Pat::Wild,
+ ast::Pat::RecordPat(p) => {
+ let path =
+ p.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new);
+ let args = p
+ .record_pat_field_list()
+ .expect("every struct should have a field list")
+ .fields()
+ .filter_map(|f| {
+ let ast_pat = f.pat()?;
+ let pat = self.collect_pat_(ast_pat);
+ let name = f.field_name()?.as_name();
+ Some(RecordFieldPat { name, pat })
+ })
+ .collect();
+
+ let ellipsis = p
+ .record_pat_field_list()
+ .expect("every struct should have a field list")
+ .rest_pat()
+ .is_some();
+
+ Pat::Record { path, args, ellipsis }
+ }
+ ast::Pat::SlicePat(p) => {
+ let SlicePatComponents { prefix, slice, suffix } = p.components();
+
+ // FIXME properly handle `RestPat`
+ Pat::Slice {
+ prefix: prefix.into_iter().map(|p| self.collect_pat_(p)).collect(),
+ slice: slice.map(|p| self.collect_pat_(p)),
+ suffix: suffix.into_iter().map(|p| self.collect_pat_(p)).collect(),
+ }
+ }
+ ast::Pat::LiteralPat(lit) => {
+ if let Some(ast_lit) = lit.literal() {
+ let expr = Expr::Literal(ast_lit.kind().into());
+ let expr_ptr = AstPtr::new(&ast::Expr::Literal(ast_lit));
+ let expr_id = self.alloc_expr(expr, expr_ptr);
+ Pat::Lit(expr_id)
+ } else {
+ Pat::Missing
+ }
+ }
+ ast::Pat::RestPat(_) => {
+ // `RestPat` requires special handling and should not be mapped
+ // to a Pat. Here we are using `Pat::Missing` as a fallback for
+ // when `RestPat` is mapped to `Pat`, which can easily happen
+ // when the source code being analyzed has a malformed pattern
+ // which includes `..` in a place where it isn't valid.
+
+ Pat::Missing
+ }
+ ast::Pat::BoxPat(boxpat) => {
+ let inner = self.collect_pat_opt_(boxpat.pat());
+ Pat::Box { inner }
+ }
+ ast::Pat::ConstBlockPat(const_block_pat) => {
+ if let Some(expr) = const_block_pat.block_expr() {
+ let expr_id = self.collect_block(expr);
+ Pat::ConstBlock(expr_id)
+ } else {
+ Pat::Missing
+ }
+ }
+ ast::Pat::MacroPat(mac) => match mac.macro_call() {
+ Some(call) => {
+ let macro_ptr = AstPtr::new(&call);
+ let src = self.expander.to_source(Either::Left(AstPtr::new(&pat)));
+ let pat =
+ self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| {
+ this.collect_pat_opt_(expanded_pat)
+ });
+ self.source_map.pat_map.insert(src, pat);
+ return pat;
+ }
+ None => Pat::Missing,
+ },
+ // FIXME: implement
+ ast::Pat::RangePat(_) => Pat::Missing,
+ };
+ let ptr = AstPtr::new(&pat);
+ self.alloc_pat(pattern, Either::Left(ptr))
+ }
+
+ fn collect_pat_opt_(&mut self, pat: Option<ast::Pat>) -> PatId {
+ match pat {
+ Some(pat) => self.collect_pat_(pat),
+ None => self.missing_pat(),
+ }
+ }
+
+ fn collect_tuple_pat(&mut self, args: AstChildren<ast::Pat>) -> (Box<[PatId]>, Option<usize>) {
+ // Find the location of the `..`, if there is one. Note that we do not
+ // consider the possibility of there being multiple `..` here.
+ let ellipsis = args.clone().position(|p| matches!(p, ast::Pat::RestPat(_)));
+ // We want to skip the `..` pattern here, since we account for it above.
+ let args = args
+ .filter(|p| !matches!(p, ast::Pat::RestPat(_)))
+ .map(|p| self.collect_pat_(p))
+ .collect();
+
+ (args, ellipsis)
+ }
+
+ /// Returns `None` (and emits diagnostics) when `owner` if `#[cfg]`d out, and `Some(())` when
+ /// not.
+ fn check_cfg(&mut self, owner: &dyn ast::HasAttrs) -> Option<()> {
+ match self.expander.parse_attrs(self.db, owner).cfg() {
+ Some(cfg) => {
+ if self.expander.cfg_options().check(&cfg) != Some(false) {
+ return Some(());
+ }
+
+ self.source_map.diagnostics.push(BodyDiagnostic::InactiveCode {
+ node: InFile::new(
+ self.expander.current_file_id,
+ SyntaxNodePtr::new(owner.syntax()),
+ ),
+ cfg,
+ opts: self.expander.cfg_options().clone(),
+ });
+
+ None
+ }
+ None => Some(()),
+ }
+ }
+}
+
+impl From<ast::LiteralKind> for Literal {
+ fn from(ast_lit_kind: ast::LiteralKind) -> Self {
+ match ast_lit_kind {
+ // FIXME: these should have actual values filled in, but unsure on perf impact
+ LiteralKind::IntNumber(lit) => {
+ if let builtin @ Some(_) = lit.suffix().and_then(BuiltinFloat::from_suffix) {
+ Literal::Float(
+ FloatTypeWrapper::new(lit.float_value().unwrap_or(Default::default())),
+ builtin,
+ )
+ } else if let builtin @ Some(_) = lit.suffix().and_then(BuiltinInt::from_suffix) {
+ Literal::Int(lit.value().unwrap_or(0) as i128, builtin)
+ } else {
+ let builtin = lit.suffix().and_then(BuiltinUint::from_suffix);
+ Literal::Uint(lit.value().unwrap_or(0), builtin)
+ }
+ }
+ LiteralKind::FloatNumber(lit) => {
+ let ty = lit.suffix().and_then(BuiltinFloat::from_suffix);
+ Literal::Float(FloatTypeWrapper::new(lit.value().unwrap_or(Default::default())), ty)
+ }
+ LiteralKind::ByteString(bs) => {
+ let text = bs.value().map(Box::from).unwrap_or_else(Default::default);
+ Literal::ByteString(text)
+ }
+ LiteralKind::String(s) => {
+ let text = s.value().map(Box::from).unwrap_or_else(Default::default);
+ Literal::String(text)
+ }
+ LiteralKind::Byte(b) => {
+ Literal::Uint(b.value().unwrap_or_default() as u128, Some(BuiltinUint::U8))
+ }
+ LiteralKind::Char(c) => Literal::Char(c.value().unwrap_or_default()),
+ LiteralKind::Bool(val) => Literal::Bool(val),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs
new file mode 100644
index 000000000..f4c390dce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs
@@ -0,0 +1,571 @@
+//! Name resolution for expressions.
+use std::sync::Arc;
+
+use hir_expand::name::Name;
+use la_arena::{Arena, Idx};
+use rustc_hash::FxHashMap;
+
+use crate::{
+ body::Body,
+ db::DefDatabase,
+ expr::{Expr, ExprId, LabelId, Pat, PatId, Statement},
+ BlockId, DefWithBodyId,
+};
+
+pub type ScopeId = Idx<ScopeData>;
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct ExprScopes {
+ scopes: Arena<ScopeData>,
+ scope_by_expr: FxHashMap<ExprId, ScopeId>,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct ScopeEntry {
+ name: Name,
+ pat: PatId,
+}
+
+impl ScopeEntry {
+ pub fn name(&self) -> &Name {
+ &self.name
+ }
+
+ pub fn pat(&self) -> PatId {
+ self.pat
+ }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct ScopeData {
+ parent: Option<ScopeId>,
+ block: Option<BlockId>,
+ label: Option<(LabelId, Name)>,
+ entries: Vec<ScopeEntry>,
+}
+
+impl ExprScopes {
+ pub(crate) fn expr_scopes_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc<ExprScopes> {
+ let body = db.body(def);
+ Arc::new(ExprScopes::new(&*body))
+ }
+
+ fn new(body: &Body) -> ExprScopes {
+ let mut scopes =
+ ExprScopes { scopes: Arena::default(), scope_by_expr: FxHashMap::default() };
+ let mut root = scopes.root_scope();
+ scopes.add_params_bindings(body, root, &body.params);
+ compute_expr_scopes(body.body_expr, body, &mut scopes, &mut root);
+ scopes
+ }
+
+ pub fn entries(&self, scope: ScopeId) -> &[ScopeEntry] {
+ &self.scopes[scope].entries
+ }
+
+ /// If `scope` refers to a block expression scope, returns the corresponding `BlockId`.
+ pub fn block(&self, scope: ScopeId) -> Option<BlockId> {
+ self.scopes[scope].block
+ }
+
+ /// If `scope` refers to a labeled expression scope, returns the corresponding `Label`.
+ pub fn label(&self, scope: ScopeId) -> Option<(LabelId, Name)> {
+ self.scopes[scope].label.clone()
+ }
+
+ pub fn scope_chain(&self, scope: Option<ScopeId>) -> impl Iterator<Item = ScopeId> + '_ {
+ std::iter::successors(scope, move |&scope| self.scopes[scope].parent)
+ }
+
+ pub fn resolve_name_in_scope(&self, scope: ScopeId, name: &Name) -> Option<&ScopeEntry> {
+ self.scope_chain(Some(scope))
+ .find_map(|scope| self.entries(scope).iter().find(|it| it.name == *name))
+ }
+
+ pub fn scope_for(&self, expr: ExprId) -> Option<ScopeId> {
+ self.scope_by_expr.get(&expr).copied()
+ }
+
+ pub fn scope_by_expr(&self) -> &FxHashMap<ExprId, ScopeId> {
+ &self.scope_by_expr
+ }
+
+ fn root_scope(&mut self) -> ScopeId {
+ self.scopes.alloc(ScopeData { parent: None, block: None, label: None, entries: vec![] })
+ }
+
+ fn new_scope(&mut self, parent: ScopeId) -> ScopeId {
+ self.scopes.alloc(ScopeData {
+ parent: Some(parent),
+ block: None,
+ label: None,
+ entries: vec![],
+ })
+ }
+
+ fn new_labeled_scope(&mut self, parent: ScopeId, label: Option<(LabelId, Name)>) -> ScopeId {
+ self.scopes.alloc(ScopeData { parent: Some(parent), block: None, label, entries: vec![] })
+ }
+
+ fn new_block_scope(
+ &mut self,
+ parent: ScopeId,
+ block: BlockId,
+ label: Option<(LabelId, Name)>,
+ ) -> ScopeId {
+ self.scopes.alloc(ScopeData {
+ parent: Some(parent),
+ block: Some(block),
+ label,
+ entries: vec![],
+ })
+ }
+
+ fn add_bindings(&mut self, body: &Body, scope: ScopeId, pat: PatId) {
+ let pattern = &body[pat];
+ if let Pat::Bind { name, .. } = pattern {
+ let entry = ScopeEntry { name: name.clone(), pat };
+ self.scopes[scope].entries.push(entry);
+ }
+
+ pattern.walk_child_pats(|pat| self.add_bindings(body, scope, pat));
+ }
+
+ fn add_params_bindings(&mut self, body: &Body, scope: ScopeId, params: &[PatId]) {
+ params.iter().for_each(|pat| self.add_bindings(body, scope, *pat));
+ }
+
+ fn set_scope(&mut self, node: ExprId, scope: ScopeId) {
+ self.scope_by_expr.insert(node, scope);
+ }
+}
+
+fn compute_block_scopes(
+ statements: &[Statement],
+ tail: Option<ExprId>,
+ body: &Body,
+ scopes: &mut ExprScopes,
+ scope: &mut ScopeId,
+) {
+ for stmt in statements {
+ match stmt {
+ Statement::Let { pat, initializer, else_branch, .. } => {
+ if let Some(expr) = initializer {
+ compute_expr_scopes(*expr, body, scopes, scope);
+ }
+ if let Some(expr) = else_branch {
+ compute_expr_scopes(*expr, body, scopes, scope);
+ }
+
+ *scope = scopes.new_scope(*scope);
+ scopes.add_bindings(body, *scope, *pat);
+ }
+ Statement::Expr { expr, .. } => {
+ compute_expr_scopes(*expr, body, scopes, scope);
+ }
+ }
+ }
+ if let Some(expr) = tail {
+ compute_expr_scopes(expr, body, scopes, scope);
+ }
+}
+
+fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope: &mut ScopeId) {
+ let make_label =
+ |label: &Option<LabelId>| label.map(|label| (label, body.labels[label].name.clone()));
+
+ scopes.set_scope(expr, *scope);
+ match &body[expr] {
+ Expr::MacroStmts { statements, tail } => {
+ compute_block_scopes(statements, *tail, body, scopes, scope);
+ }
+ Expr::Block { statements, tail, id, label } => {
+ let mut scope = scopes.new_block_scope(*scope, *id, make_label(label));
+ // Overwrite the old scope for the block expr, so that every block scope can be found
+ // via the block itself (important for blocks that only contain items, no expressions).
+ scopes.set_scope(expr, scope);
+ compute_block_scopes(statements, *tail, body, scopes, &mut scope);
+ }
+ Expr::For { iterable, pat, body: body_expr, label } => {
+ compute_expr_scopes(*iterable, body, scopes, scope);
+ let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
+ scopes.add_bindings(body, scope, *pat);
+ compute_expr_scopes(*body_expr, body, scopes, &mut scope);
+ }
+ Expr::While { condition, body: body_expr, label } => {
+ let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
+ compute_expr_scopes(*condition, body, scopes, &mut scope);
+ compute_expr_scopes(*body_expr, body, scopes, &mut scope);
+ }
+ Expr::Loop { body: body_expr, label } => {
+ let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
+ compute_expr_scopes(*body_expr, body, scopes, &mut scope);
+ }
+ Expr::Closure { args, body: body_expr, .. } => {
+ let mut scope = scopes.new_scope(*scope);
+ scopes.add_params_bindings(body, scope, args);
+ compute_expr_scopes(*body_expr, body, scopes, &mut scope);
+ }
+ Expr::Match { expr, arms } => {
+ compute_expr_scopes(*expr, body, scopes, scope);
+ for arm in arms.iter() {
+ let mut scope = scopes.new_scope(*scope);
+ scopes.add_bindings(body, scope, arm.pat);
+ if let Some(guard) = arm.guard {
+ scope = scopes.new_scope(scope);
+ compute_expr_scopes(guard, body, scopes, &mut scope);
+ }
+ compute_expr_scopes(arm.expr, body, scopes, &mut scope);
+ }
+ }
+ &Expr::If { condition, then_branch, else_branch } => {
+ let mut then_branch_scope = scopes.new_scope(*scope);
+ compute_expr_scopes(condition, body, scopes, &mut then_branch_scope);
+ compute_expr_scopes(then_branch, body, scopes, &mut then_branch_scope);
+ if let Some(else_branch) = else_branch {
+ compute_expr_scopes(else_branch, body, scopes, scope);
+ }
+ }
+ &Expr::Let { pat, expr } => {
+ compute_expr_scopes(expr, body, scopes, scope);
+ *scope = scopes.new_scope(*scope);
+ scopes.add_bindings(body, *scope, pat);
+ }
+ e => e.walk_child_exprs(|e| compute_expr_scopes(e, body, scopes, scope)),
+ };
+}
+
+#[cfg(test)]
+mod tests {
+ use base_db::{fixture::WithFixture, FileId, SourceDatabase};
+ use hir_expand::{name::AsName, InFile};
+ use syntax::{algo::find_node_at_offset, ast, AstNode};
+ use test_utils::{assert_eq_text, extract_offset};
+
+ use crate::{db::DefDatabase, test_db::TestDB, FunctionId, ModuleDefId};
+
+ fn find_function(db: &TestDB, file_id: FileId) -> FunctionId {
+ let krate = db.test_crate();
+ let crate_def_map = db.crate_def_map(krate);
+
+ let module = crate_def_map.modules_for_file(file_id).next().unwrap();
+ let (_, def) = crate_def_map[module].scope.entries().next().unwrap();
+ match def.take_values().unwrap() {
+ ModuleDefId::FunctionId(it) => it,
+ _ => panic!(),
+ }
+ }
+
+ fn do_check(ra_fixture: &str, expected: &[&str]) {
+ let (offset, code) = extract_offset(ra_fixture);
+ let code = {
+ let mut buf = String::new();
+ let off: usize = offset.into();
+ buf.push_str(&code[..off]);
+ buf.push_str("$0marker");
+ buf.push_str(&code[off..]);
+ buf
+ };
+
+ let (db, position) = TestDB::with_position(&code);
+ let file_id = position.file_id;
+ let offset = position.offset;
+
+ let file_syntax = db.parse(file_id).syntax_node();
+ let marker: ast::PathExpr = find_node_at_offset(&file_syntax, offset).unwrap();
+ let function = find_function(&db, file_id);
+
+ let scopes = db.expr_scopes(function.into());
+ let (_body, source_map) = db.body_with_source_map(function.into());
+
+ let expr_id = source_map
+ .node_expr(InFile { file_id: file_id.into(), value: &marker.into() })
+ .unwrap();
+ let scope = scopes.scope_for(expr_id);
+
+ let actual = scopes
+ .scope_chain(scope)
+ .flat_map(|scope| scopes.entries(scope))
+ .map(|it| it.name().to_smol_str())
+ .collect::<Vec<_>>()
+ .join("\n");
+ let expected = expected.join("\n");
+ assert_eq_text!(&expected, &actual);
+ }
+
+ #[test]
+ fn test_lambda_scope() {
+ do_check(
+ r"
+ fn quux(foo: i32) {
+ let f = |bar, baz: i32| {
+ $0
+ };
+ }",
+ &["bar", "baz", "foo"],
+ );
+ }
+
+ #[test]
+ fn test_call_scope() {
+ do_check(
+ r"
+ fn quux() {
+ f(|x| $0 );
+ }",
+ &["x"],
+ );
+ }
+
+ #[test]
+ fn test_method_call_scope() {
+ do_check(
+ r"
+ fn quux() {
+ z.f(|x| $0 );
+ }",
+ &["x"],
+ );
+ }
+
+ #[test]
+ fn test_loop_scope() {
+ do_check(
+ r"
+ fn quux() {
+ loop {
+ let x = ();
+ $0
+ };
+ }",
+ &["x"],
+ );
+ }
+
+ #[test]
+ fn test_match() {
+ do_check(
+ r"
+ fn quux() {
+ match () {
+ Some(x) => {
+ $0
+ }
+ };
+ }",
+ &["x"],
+ );
+ }
+
+ #[test]
+ fn test_shadow_variable() {
+ do_check(
+ r"
+ fn foo(x: String) {
+ let x : &str = &x$0;
+ }",
+ &["x"],
+ );
+ }
+
+ #[test]
+ fn test_bindings_after_at() {
+ do_check(
+ r"
+fn foo() {
+ match Some(()) {
+ opt @ Some(unit) => {
+ $0
+ }
+ _ => {}
+ }
+}
+",
+ &["opt", "unit"],
+ );
+ }
+
+ #[test]
+ fn macro_inner_item() {
+ do_check(
+ r"
+ macro_rules! mac {
+ () => {{
+ fn inner() {}
+ inner();
+ }};
+ }
+
+ fn foo() {
+ mac!();
+ $0
+ }
+ ",
+ &[],
+ );
+ }
+
+ #[test]
+ fn broken_inner_item() {
+ do_check(
+ r"
+ fn foo() {
+ trait {}
+ $0
+ }
+ ",
+ &[],
+ );
+ }
+
+ fn do_check_local_name(ra_fixture: &str, expected_offset: u32) {
+ let (db, position) = TestDB::with_position(ra_fixture);
+ let file_id = position.file_id;
+ let offset = position.offset;
+
+ let file = db.parse(file_id).ok().unwrap();
+ let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into())
+ .expect("failed to find a name at the target offset");
+ let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset).unwrap();
+
+ let function = find_function(&db, file_id);
+
+ let scopes = db.expr_scopes(function.into());
+ let (_body, source_map) = db.body_with_source_map(function.into());
+
+ let expr_scope = {
+ let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap();
+ let expr_id =
+ source_map.node_expr(InFile { file_id: file_id.into(), value: &expr_ast }).unwrap();
+ scopes.scope_for(expr_id).unwrap()
+ };
+
+ let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap();
+ let pat_src = source_map.pat_syntax(resolved.pat()).unwrap();
+
+ let local_name = pat_src.value.either(
+ |it| it.syntax_node_ptr().to_node(file.syntax()),
+ |it| it.syntax_node_ptr().to_node(file.syntax()),
+ );
+ assert_eq!(local_name.text_range(), expected_name.syntax().text_range());
+ }
+
+ #[test]
+ fn test_resolve_local_name() {
+ do_check_local_name(
+ r#"
+fn foo(x: i32, y: u32) {
+ {
+ let z = x * 2;
+ }
+ {
+ let t = x$0 * 3;
+ }
+}
+"#,
+ 7,
+ );
+ }
+
+ #[test]
+ fn test_resolve_local_name_declaration() {
+ do_check_local_name(
+ r#"
+fn foo(x: String) {
+ let x : &str = &x$0;
+}
+"#,
+ 7,
+ );
+ }
+
+ #[test]
+ fn test_resolve_local_name_shadow() {
+ do_check_local_name(
+ r"
+fn foo(x: String) {
+ let x : &str = &x;
+ x$0
+}
+",
+ 28,
+ );
+ }
+
+ #[test]
+ fn ref_patterns_contribute_bindings() {
+ do_check_local_name(
+ r"
+fn foo() {
+ if let Some(&from) = bar() {
+ from$0;
+ }
+}
+",
+ 28,
+ );
+ }
+
+ #[test]
+ fn while_let_adds_binding() {
+ do_check_local_name(
+ r#"
+fn test() {
+ let foo: Option<f32> = None;
+ while let Option::Some(spam) = foo {
+ spam$0
+ }
+}
+"#,
+ 75,
+ );
+ do_check_local_name(
+ r#"
+fn test() {
+ let foo: Option<f32> = None;
+ while (((let Option::Some(_) = foo))) && let Option::Some(spam) = foo {
+ spam$0
+ }
+}
+"#,
+ 107,
+ );
+ }
+
+ #[test]
+ fn match_guard_if_let() {
+ do_check_local_name(
+ r#"
+fn test() {
+ let foo: Option<f32> = None;
+ match foo {
+ _ if let Option::Some(spam) = foo => spam$0,
+ }
+}
+"#,
+ 93,
+ );
+ }
+
+ #[test]
+ fn let_chains_can_reference_previous_lets() {
+ do_check_local_name(
+ r#"
+fn test() {
+ let foo: Option<i32> = None;
+ if let Some(spam) = foo && spa$0m > 1 && let Some(spam) = foo && spam > 1 {}
+}
+"#,
+ 61,
+ );
+ do_check_local_name(
+ r#"
+fn test() {
+ let foo: Option<i32> = None;
+ if let Some(spam) = foo && spam > 1 && let Some(spam) = foo && sp$0am > 1 {}
+}
+"#,
+ 100,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs
new file mode 100644
index 000000000..c9601f855
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs
@@ -0,0 +1,127 @@
+mod block;
+
+use base_db::{fixture::WithFixture, SourceDatabase};
+use expect_test::Expect;
+
+use crate::ModuleDefId;
+
+use super::*;
+
+fn lower(ra_fixture: &str) -> Arc<Body> {
+ let db = crate::test_db::TestDB::with_files(ra_fixture);
+
+ let krate = db.crate_graph().iter().next().unwrap();
+ let def_map = db.crate_def_map(krate);
+ let mut fn_def = None;
+ 'outer: for (_, module) in def_map.modules() {
+ for decl in module.scope.declarations() {
+ if let ModuleDefId::FunctionId(it) = decl {
+ fn_def = Some(it);
+ break 'outer;
+ }
+ }
+ }
+
+ db.body(fn_def.unwrap().into())
+}
+
+fn block_def_map_at(ra_fixture: &str) -> String {
+ let (db, position) = crate::test_db::TestDB::with_position(ra_fixture);
+
+ let module = db.module_at_position(position);
+ module.def_map(&db).dump(&db)
+}
+
+fn check_block_scopes_at(ra_fixture: &str, expect: Expect) {
+ let (db, position) = crate::test_db::TestDB::with_position(ra_fixture);
+
+ let module = db.module_at_position(position);
+ let actual = module.def_map(&db).dump_block_scopes(&db);
+ expect.assert_eq(&actual);
+}
+
+fn check_at(ra_fixture: &str, expect: Expect) {
+ let actual = block_def_map_at(ra_fixture);
+ expect.assert_eq(&actual);
+}
+
+#[test]
+fn your_stack_belongs_to_me() {
+ cov_mark::check!(your_stack_belongs_to_me);
+ lower(
+ r#"
+macro_rules! n_nuple {
+ ($e:tt) => ();
+ ($($rest:tt)*) => {{
+ (n_nuple!($($rest)*)None,)
+ }};
+}
+fn main() { n_nuple!(1,2,3); }
+"#,
+ );
+}
+
+#[test]
+fn recursion_limit() {
+ cov_mark::check!(your_stack_belongs_to_me);
+
+ lower(
+ r#"
+#![recursion_limit = "2"]
+macro_rules! n_nuple {
+ ($e:tt) => ();
+ ($first:tt $($rest:tt)*) => {{
+ n_nuple!($($rest)*)
+ }};
+}
+fn main() { n_nuple!(1,2,3); }
+"#,
+ );
+}
+
+#[test]
+fn issue_3642_bad_macro_stackover() {
+ lower(
+ r#"
+#[macro_export]
+macro_rules! match_ast {
+ (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
+
+ (match ($node:expr) {
+ $( ast::$ast:ident($it:ident) => $res:expr, )*
+ _ => $catch_all:expr $(,)?
+ }) => {{
+ $( if let Some($it) = ast::$ast::cast($node.clone()) { $res } else )*
+ { $catch_all }
+ }};
+}
+
+fn main() {
+ let anchor = match_ast! {
+ match parent {
+ as => {},
+ _ => return None
+ }
+ };
+}"#,
+ );
+}
+
+#[test]
+fn macro_resolve() {
+ // Regression test for a path resolution bug introduced with inner item handling.
+ lower(
+ r#"
+macro_rules! vec {
+ () => { () };
+ ($elem:expr; $n:expr) => { () };
+ ($($x:expr),+ $(,)?) => { () };
+}
+mod m {
+ fn outer() {
+ let _ = vec![FileSet::default(); self.len()];
+ }
+}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs
new file mode 100644
index 000000000..3bba08cfc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs
@@ -0,0 +1,397 @@
+use super::*;
+use expect_test::expect;
+
+#[test]
+fn inner_item_smoke() {
+ check_at(
+ r#"
+struct inner {}
+fn outer() {
+ $0
+ fn inner() {}
+}
+"#,
+ expect![[r#"
+ block scope
+ inner: v
+
+ crate
+ inner: t
+ outer: v
+ "#]],
+ );
+}
+
+#[test]
+fn use_from_crate() {
+ check_at(
+ r#"
+struct Struct {}
+fn outer() {
+ fn Struct() {}
+ use Struct as PlainStruct;
+ use crate::Struct as CrateStruct;
+ use self::Struct as SelfStruct;
+ use super::Struct as SuperStruct;
+ $0
+}
+"#,
+ expect![[r#"
+ block scope
+ CrateStruct: t
+ PlainStruct: t v
+ SelfStruct: t
+ Struct: v
+ SuperStruct: _
+
+ crate
+ Struct: t
+ outer: v
+ "#]],
+ );
+}
+
+#[test]
+fn merge_namespaces() {
+ check_at(
+ r#"
+struct name {}
+fn outer() {
+ fn name() {}
+
+ use name as imported; // should import both `name`s
+
+ $0
+}
+"#,
+ expect![[r#"
+ block scope
+ imported: t v
+ name: v
+
+ crate
+ name: t
+ outer: v
+ "#]],
+ );
+}
+
+#[test]
+fn nested_blocks() {
+ check_at(
+ r#"
+fn outer() {
+ struct inner1 {}
+ fn inner() {
+ use inner1;
+ use outer;
+ fn inner2() {}
+ $0
+ }
+}
+"#,
+ expect![[r#"
+ block scope
+ inner1: t
+ inner2: v
+ outer: v
+
+ block scope
+ inner: v
+ inner1: t
+
+ crate
+ outer: v
+ "#]],
+ );
+}
+
+#[test]
+fn super_imports() {
+ check_at(
+ r#"
+mod module {
+ fn f() {
+ use super::Struct;
+ $0
+ }
+}
+
+struct Struct {}
+"#,
+ expect![[r#"
+ block scope
+ Struct: t
+
+ crate
+ Struct: t
+ module: t
+
+ crate::module
+ f: v
+ "#]],
+ );
+}
+
+#[test]
+fn nested_module_scoping() {
+ check_block_scopes_at(
+ r#"
+fn f() {
+ mod module {
+ struct Struct {}
+ fn f() {
+ use self::Struct;
+ $0
+ }
+ }
+}
+ "#,
+ expect![[r#"
+ BlockId(1) in ModuleId { krate: CrateId(0), block: Some(BlockId(0)), local_id: Idx::<ModuleData>(1) }
+ BlockId(0) in ModuleId { krate: CrateId(0), block: None, local_id: Idx::<ModuleData>(0) }
+ crate scope
+ "#]],
+ );
+}
+
+#[test]
+fn legacy_macro_items() {
+ // Checks that legacy-scoped `macro_rules!` from parent namespaces are resolved and expanded
+ // correctly.
+ check_at(
+ r#"
+macro_rules! mark {
+ () => {
+ struct Hit {}
+ }
+}
+
+fn f() {
+ mark!();
+ $0
+}
+"#,
+ expect![[r#"
+ block scope
+ Hit: t
+
+ crate
+ f: v
+ "#]],
+ );
+}
+
+#[test]
+fn macro_resolve() {
+ check_at(
+ r#"
+//- /lib.rs crate:lib deps:core
+use core::cov_mark;
+
+fn f() {
+ fn nested() {
+ cov_mark::mark!(Hit);
+ $0
+ }
+}
+//- /core.rs crate:core
+pub mod cov_mark {
+ #[macro_export]
+ macro_rules! _mark {
+ ($name:ident) => {
+ struct $name {}
+ }
+ }
+
+ pub use crate::_mark as mark;
+}
+"#,
+ expect![[r#"
+ block scope
+ Hit: t
+
+ block scope
+ nested: v
+
+ crate
+ cov_mark: t
+ f: v
+ "#]],
+ );
+}
+
+#[test]
+fn macro_resolve_legacy() {
+ check_at(
+ r#"
+//- /lib.rs
+mod module;
+
+//- /module.rs
+macro_rules! m {
+ () => {
+ struct Def {}
+ };
+}
+
+fn f() {
+ {
+ m!();
+ $0
+ }
+}
+ "#,
+ expect![[r#"
+ block scope
+ Def: t
+
+ crate
+ module: t
+
+ crate::module
+ f: v
+ "#]],
+ )
+}
+
+#[test]
+fn super_does_not_resolve_to_block_module() {
+ check_at(
+ r#"
+fn main() {
+ struct Struct {}
+ mod module {
+ use super::Struct;
+
+ $0
+ }
+}
+ "#,
+ expect![[r#"
+ block scope
+ Struct: t
+ module: t
+
+ block scope::module
+ Struct: _
+
+ crate
+ main: v
+ "#]],
+ );
+}
+
+#[test]
+fn underscore_import() {
+ // This used to panic, because the default (private) visibility inside block expressions would
+ // point into the containing `DefMap`, which visibilities should never be able to do.
+ cov_mark::check!(adjust_vis_in_block_def_map);
+ check_at(
+ r#"
+mod m {
+ fn main() {
+ use Tr as _;
+ trait Tr {}
+ $0
+ }
+}
+ "#,
+ expect![[r#"
+ block scope
+ _: t
+ Tr: t
+
+ crate
+ m: t
+
+ crate::m
+ main: v
+ "#]],
+ );
+}
+
+#[test]
+fn nested_macro_item_decl() {
+ cov_mark::check!(macro_call_in_macro_stmts_is_added_to_item_tree);
+ check_at(
+ r#"
+macro_rules! inner_declare {
+ ($ident:ident) => {
+ static $ident: u32 = 0;
+ };
+}
+macro_rules! declare {
+ ($ident:ident) => {
+ inner_declare!($ident);
+ };
+}
+
+fn foo() {
+ declare!(bar);
+ bar;
+ $0
+}
+ "#,
+ expect![[r#"
+ block scope
+ bar: v
+
+ crate
+ foo: v
+ "#]],
+ )
+}
+
+#[test]
+fn is_visible_from_same_def_map() {
+ // Regression test for https://github.com/rust-lang/rust-analyzer/issues/9481
+ cov_mark::check!(is_visible_from_same_block_def_map);
+ check_at(
+ r#"
+fn outer() {
+ mod tests {
+ use super::*;
+ }
+ use crate::name;
+ $0
+}
+ "#,
+ expect![[r#"
+ block scope
+ name: _
+ tests: t
+
+ block scope::tests
+ name: _
+ outer: v
+
+ crate
+ outer: v
+ "#]],
+ );
+}
+
+#[test]
+fn stmt_macro_expansion_with_trailing_expr() {
+ cov_mark::check!(macro_stmt_with_trailing_macro_expr);
+ check_at(
+ r#"
+macro_rules! mac {
+ () => { mac!($) };
+ ($x:tt) => { fn inner() {} };
+}
+fn foo() {
+ mac!();
+ $0
+}
+ "#,
+ expect![[r#"
+ block scope
+ inner: v
+
+ crate
+ foo: v
+ "#]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/builtin_attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/builtin_attr.rs
new file mode 100644
index 000000000..0e7ce5f85
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/builtin_attr.rs
@@ -0,0 +1,654 @@
+//! Builtin attributes resolved by nameres.
+//!
+//! The actual definitions were copied from rustc's `compiler/rustc_feature/src/builtin_attrs.rs`.
+//!
+//! It was last synchronized with upstream commit c1a2db3372a4d6896744919284f3287650a38ab7.
+//!
+//! The macros were adjusted to only expand to the attribute name, since that is all we need to do
+//! name resolution, and `BUILTIN_ATTRIBUTES` is almost entirely unchanged from the original, to
+//! ease updating.
+
+use once_cell::sync::OnceCell;
+use rustc_hash::FxHashMap;
+
+/// Ignored attribute namespaces used by tools.
+pub const TOOL_MODULES: &[&str] = &["rustfmt", "clippy"];
+
+pub struct BuiltinAttribute {
+ pub name: &'static str,
+ pub template: AttributeTemplate,
+}
+
+/// A template that the attribute input must match.
+/// Only top-level shape (`#[attr]` vs `#[attr(...)]` vs `#[attr = ...]`) is considered now.
+#[derive(Clone, Copy)]
+pub struct AttributeTemplate {
+ pub word: bool,
+ pub list: Option<&'static str>,
+ pub name_value_str: Option<&'static str>,
+}
+
+pub fn find_builtin_attr_idx(name: &str) -> Option<usize> {
+ static BUILTIN_LOOKUP_TABLE: OnceCell<FxHashMap<&'static str, usize>> = OnceCell::new();
+ BUILTIN_LOOKUP_TABLE
+ .get_or_init(|| {
+ INERT_ATTRIBUTES.iter().map(|attr| attr.name).enumerate().map(|(a, b)| (b, a)).collect()
+ })
+ .get(name)
+ .copied()
+}
+
+// impl AttributeTemplate {
+// const DEFAULT: AttributeTemplate =
+// AttributeTemplate { word: false, list: None, name_value_str: None };
+// }
+
+/// A convenience macro for constructing attribute templates.
+/// E.g., `template!(Word, List: "description")` means that the attribute
+/// supports forms `#[attr]` and `#[attr(description)]`.
+macro_rules! template {
+ (Word) => { template!(@ true, None, None) };
+ (List: $descr: expr) => { template!(@ false, Some($descr), None) };
+ (NameValueStr: $descr: expr) => { template!(@ false, None, Some($descr)) };
+ (Word, List: $descr: expr) => { template!(@ true, Some($descr), None) };
+ (Word, NameValueStr: $descr: expr) => { template!(@ true, None, Some($descr)) };
+ (List: $descr1: expr, NameValueStr: $descr2: expr) => {
+ template!(@ false, Some($descr1), Some($descr2))
+ };
+ (Word, List: $descr1: expr, NameValueStr: $descr2: expr) => {
+ template!(@ true, Some($descr1), Some($descr2))
+ };
+ (@ $word: expr, $list: expr, $name_value_str: expr) => {
+ AttributeTemplate {
+ word: $word, list: $list, name_value_str: $name_value_str
+ }
+ };
+}
+
+macro_rules! ungated {
+ ($attr:ident, $typ:expr, $tpl:expr, $duplicates:expr $(, @only_local: $only_local:expr)? $(,)?) => {
+ BuiltinAttribute { name: stringify!($attr), template: $tpl }
+ };
+}
+
+macro_rules! gated {
+ ($attr:ident, $typ:expr, $tpl:expr, $duplicates:expr $(, @only_local: $only_local:expr)?, $gate:ident, $msg:expr $(,)?) => {
+ BuiltinAttribute { name: stringify!($attr), template: $tpl }
+ };
+ ($attr:ident, $typ:expr, $tpl:expr, $duplicates:expr $(, @only_local: $only_local:expr)?, $msg:expr $(,)?) => {
+ BuiltinAttribute { name: stringify!($attr), template: $tpl }
+ };
+}
+
+macro_rules! rustc_attr {
+ (TEST, $attr:ident, $typ:expr, $tpl:expr, $duplicate:expr $(, @only_local: $only_local:expr)? $(,)?) => {
+ rustc_attr!(
+ $attr,
+ $typ,
+ $tpl,
+ $duplicate,
+ $(@only_local: $only_local,)?
+ concat!(
+ "the `#[",
+ stringify!($attr),
+ "]` attribute is just used for rustc unit tests \
+ and will never be stable",
+ ),
+ )
+ };
+ ($attr:ident, $typ:expr, $tpl:expr, $duplicates:expr $(, @only_local: $only_local:expr)?, $msg:expr $(,)?) => {
+ BuiltinAttribute { name: stringify!($attr), template: $tpl }
+ };
+}
+
+#[allow(unused_macros)]
+macro_rules! experimental {
+ ($attr:ident) => {
+ concat!("the `#[", stringify!($attr), "]` attribute is an experimental feature")
+ };
+}
+
+/// "Inert" built-in attributes that have a special meaning to rustc or rustdoc.
+#[rustfmt::skip]
+pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
+ // ==========================================================================
+ // Stable attributes:
+ // ==========================================================================
+
+ // Conditional compilation:
+ ungated!(cfg, Normal, template!(List: "predicate"), DuplicatesOk),
+ ungated!(cfg_attr, Normal, template!(List: "predicate, attr1, attr2, ..."), DuplicatesOk),
+
+ // Testing:
+ ungated!(ignore, Normal, template!(Word, NameValueStr: "reason"), WarnFollowing),
+ ungated!(
+ should_panic, Normal,
+ template!(Word, List: r#"expected = "reason"#, NameValueStr: "reason"), FutureWarnFollowing,
+ ),
+ // FIXME(Centril): This can be used on stable but shouldn't.
+ ungated!(reexport_test_harness_main, CrateLevel, template!(NameValueStr: "name"), ErrorFollowing),
+
+ // Macros:
+ ungated!(automatically_derived, Normal, template!(Word), WarnFollowing),
+ ungated!(macro_use, Normal, template!(Word, List: "name1, name2, ..."), WarnFollowingWordOnly),
+ ungated!(macro_escape, Normal, template!(Word), WarnFollowing), // Deprecated synonym for `macro_use`.
+ ungated!(macro_export, Normal, template!(Word, List: "local_inner_macros"), WarnFollowing),
+ ungated!(proc_macro, Normal, template!(Word), ErrorFollowing),
+ ungated!(
+ proc_macro_derive, Normal,
+ template!(List: "TraitName, /*opt*/ attributes(name1, name2, ...)"), ErrorFollowing,
+ ),
+ ungated!(proc_macro_attribute, Normal, template!(Word), ErrorFollowing),
+
+ // Lints:
+ ungated!(
+ warn, Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#), DuplicatesOk
+ ),
+ ungated!(
+ allow, Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#), DuplicatesOk
+ ),
+ gated!(
+ expect, Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#), DuplicatesOk,
+ lint_reasons, experimental!(expect)
+ ),
+ ungated!(
+ forbid, Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#), DuplicatesOk
+ ),
+ ungated!(
+ deny, Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#), DuplicatesOk
+ ),
+ ungated!(must_use, Normal, template!(Word, NameValueStr: "reason"), FutureWarnFollowing),
+ gated!(
+ must_not_suspend, Normal, template!(Word, NameValueStr: "reason"), WarnFollowing,
+ experimental!(must_not_suspend)
+ ),
+ ungated!(
+ deprecated, Normal,
+ template!(
+ Word,
+ List: r#"/*opt*/ since = "version", /*opt*/ note = "reason""#,
+ NameValueStr: "reason"
+ ),
+ ErrorFollowing
+ ),
+
+ // Crate properties:
+ ungated!(crate_name, CrateLevel, template!(NameValueStr: "name"), FutureWarnFollowing),
+ ungated!(crate_type, CrateLevel, template!(NameValueStr: "bin|lib|..."), DuplicatesOk),
+ // crate_id is deprecated
+ ungated!(crate_id, CrateLevel, template!(NameValueStr: "ignored"), FutureWarnFollowing),
+
+ // ABI, linking, symbols, and FFI
+ ungated!(
+ link, Normal,
+ template!(List: r#"name = "...", /*opt*/ kind = "dylib|static|...", /*opt*/ wasm_import_module = "...""#),
+ DuplicatesOk,
+ ),
+ ungated!(link_name, Normal, template!(NameValueStr: "name"), FutureWarnPreceding),
+ ungated!(no_link, Normal, template!(Word), WarnFollowing),
+ ungated!(repr, Normal, template!(List: "C"), DuplicatesOk),
+ ungated!(export_name, Normal, template!(NameValueStr: "name"), FutureWarnPreceding),
+ ungated!(link_section, Normal, template!(NameValueStr: "name"), FutureWarnPreceding),
+ ungated!(no_mangle, Normal, template!(Word), WarnFollowing, @only_local: true),
+ ungated!(used, Normal, template!(Word, List: "compiler|linker"), WarnFollowing, @only_local: true),
+
+ // Limits:
+ ungated!(recursion_limit, CrateLevel, template!(NameValueStr: "N"), FutureWarnFollowing),
+ ungated!(type_length_limit, CrateLevel, template!(NameValueStr: "N"), FutureWarnFollowing),
+ gated!(
+ const_eval_limit, CrateLevel, template!(NameValueStr: "N"), ErrorFollowing,
+ const_eval_limit, experimental!(const_eval_limit)
+ ),
+ gated!(
+ move_size_limit, CrateLevel, template!(NameValueStr: "N"), ErrorFollowing,
+ large_assignments, experimental!(move_size_limit)
+ ),
+
+ // Entry point:
+ ungated!(start, Normal, template!(Word), WarnFollowing),
+ ungated!(no_start, CrateLevel, template!(Word), WarnFollowing),
+ ungated!(no_main, CrateLevel, template!(Word), WarnFollowing),
+
+ // Modules, prelude, and resolution:
+ ungated!(path, Normal, template!(NameValueStr: "file"), FutureWarnFollowing),
+ ungated!(no_std, CrateLevel, template!(Word), WarnFollowing),
+ ungated!(no_implicit_prelude, Normal, template!(Word), WarnFollowing),
+ ungated!(non_exhaustive, Normal, template!(Word), WarnFollowing),
+
+ // Runtime
+ ungated!(
+ windows_subsystem, CrateLevel,
+ template!(NameValueStr: "windows|console"), FutureWarnFollowing
+ ),
+ ungated!(panic_handler, Normal, template!(Word), WarnFollowing), // RFC 2070
+
+ // Code generation:
+ ungated!(inline, Normal, template!(Word, List: "always|never"), FutureWarnFollowing, @only_local: true),
+ ungated!(cold, Normal, template!(Word), WarnFollowing, @only_local: true),
+ ungated!(no_builtins, CrateLevel, template!(Word), WarnFollowing),
+ ungated!(target_feature, Normal, template!(List: r#"enable = "name""#), DuplicatesOk),
+ ungated!(track_caller, Normal, template!(Word), WarnFollowing),
+ gated!(
+ no_sanitize, Normal,
+ template!(List: "address, memory, thread"), DuplicatesOk,
+ experimental!(no_sanitize)
+ ),
+ gated!(no_coverage, Normal, template!(Word), WarnFollowing, experimental!(no_coverage)),
+
+ ungated!(
+ doc, Normal, template!(List: "hidden|inline|...", NameValueStr: "string"), DuplicatesOk
+ ),
+
+ // ==========================================================================
+ // Unstable attributes:
+ // ==========================================================================
+
+ // RFC #3191: #[debugger_visualizer] support
+ gated!(
+ debugger_visualizer, Normal, template!(List: r#"natvis_file = "...", gdb_script_file = "...""#),
+ DuplicatesOk, experimental!(debugger_visualizer)
+ ),
+
+ // Linking:
+ gated!(naked, Normal, template!(Word), WarnFollowing, @only_local: true, naked_functions, experimental!(naked)),
+ gated!(
+ link_ordinal, Normal, template!(List: "ordinal"), ErrorPreceding, raw_dylib,
+ experimental!(link_ordinal)
+ ),
+
+ // Plugins:
+ // XXX Modified for use in rust-analyzer
+ // BuiltinAttribute {
+ // name: sym::plugin,
+ // only_local: false,
+ // type_: CrateLevel,
+ // template: template!(List: "name"),
+ // duplicates: DuplicatesOk,
+ // gate: Gated(
+ // Stability::Deprecated(
+ // "https://github.com/rust-lang/rust/pull/64675",
+ // Some("may be removed in a future compiler version"),
+ // ),
+ // sym::plugin,
+ // "compiler plugins are deprecated",
+ // cfg_fn!(plugin)
+ // ),
+ // },
+ BuiltinAttribute {
+ name: "plugin",
+ template: template!(List: "name"),
+ },
+
+ // Testing:
+ gated!(
+ test_runner, CrateLevel, template!(List: "path"), ErrorFollowing, custom_test_frameworks,
+ "custom test frameworks are an unstable feature",
+ ),
+ // RFC #1268
+ gated!(
+ marker, Normal, template!(Word), WarnFollowing, marker_trait_attr, experimental!(marker)
+ ),
+ gated!(
+ thread_local, Normal, template!(Word), WarnFollowing,
+ "`#[thread_local]` is an experimental feature, and does not currently handle destructors",
+ ),
+ gated!(no_core, CrateLevel, template!(Word), WarnFollowing, experimental!(no_core)),
+ // RFC 2412
+ gated!(
+ optimize, Normal, template!(List: "size|speed"), ErrorPreceding, optimize_attribute,
+ experimental!(optimize),
+ ),
+ // RFC 2867
+ gated!(
+ instruction_set, Normal, template!(List: "set"), ErrorPreceding,
+ isa_attribute, experimental!(instruction_set)
+ ),
+
+ gated!(
+ ffi_returns_twice, Normal, template!(Word), WarnFollowing, experimental!(ffi_returns_twice)
+ ),
+ gated!(ffi_pure, Normal, template!(Word), WarnFollowing, experimental!(ffi_pure)),
+ gated!(ffi_const, Normal, template!(Word), WarnFollowing, experimental!(ffi_const)),
+ gated!(
+ register_attr, CrateLevel, template!(List: "attr1, attr2, ..."), DuplicatesOk,
+ experimental!(register_attr),
+ ),
+ gated!(
+ register_tool, CrateLevel, template!(List: "tool1, tool2, ..."), DuplicatesOk,
+ experimental!(register_tool),
+ ),
+
+ gated!(
+ cmse_nonsecure_entry, Normal, template!(Word), WarnFollowing,
+ experimental!(cmse_nonsecure_entry)
+ ),
+ // RFC 2632
+ gated!(
+ const_trait, Normal, template!(Word), WarnFollowing, const_trait_impl,
+ "`const` is a temporary placeholder for marking a trait that is suitable for `const` \
+ `impls` and all default bodies as `const`, which may be removed or renamed in the \
+ future."
+ ),
+ // lang-team MCP 147
+ gated!(
+ deprecated_safe, Normal, template!(List: r#"since = "version", note = "...""#), ErrorFollowing,
+ experimental!(deprecated_safe),
+ ),
+
+ // ==========================================================================
+ // Internal attributes: Stability, deprecation, and unsafe:
+ // ==========================================================================
+
+ ungated!(feature, CrateLevel, template!(List: "name1, name2, ..."), DuplicatesOk),
+ // DuplicatesOk since it has its own validation
+ ungated!(
+ stable, Normal, template!(List: r#"feature = "name", since = "version""#), DuplicatesOk,
+ ),
+ ungated!(
+ unstable, Normal,
+ template!(List: r#"feature = "name", reason = "...", issue = "N""#), DuplicatesOk,
+ ),
+ ungated!(rustc_const_unstable, Normal, template!(List: r#"feature = "name""#), DuplicatesOk),
+ ungated!(rustc_const_stable, Normal, template!(List: r#"feature = "name""#), DuplicatesOk),
+ gated!(
+ allow_internal_unstable, Normal, template!(Word, List: "feat1, feat2, ..."), DuplicatesOk,
+ "allow_internal_unstable side-steps feature gating and stability checks",
+ ),
+ gated!(
+ rustc_allow_const_fn_unstable, Normal,
+ template!(Word, List: "feat1, feat2, ..."), DuplicatesOk,
+ "rustc_allow_const_fn_unstable side-steps feature gating and stability checks"
+ ),
+ gated!(
+ allow_internal_unsafe, Normal, template!(Word), WarnFollowing,
+ "allow_internal_unsafe side-steps the unsafe_code lint",
+ ),
+
+ // ==========================================================================
+ // Internal attributes: Type system related:
+ // ==========================================================================
+
+ gated!(fundamental, Normal, template!(Word), WarnFollowing, experimental!(fundamental)),
+ gated!(
+ may_dangle, Normal, template!(Word), WarnFollowing, dropck_eyepatch,
+ "`may_dangle` has unstable semantics and may be removed in the future",
+ ),
+
+ // ==========================================================================
+ // Internal attributes: Runtime related:
+ // ==========================================================================
+
+ rustc_attr!(rustc_allocator, Normal, template!(Word), WarnFollowing, IMPL_DETAIL),
+ rustc_attr!(rustc_allocator_nounwind, Normal, template!(Word), WarnFollowing, IMPL_DETAIL),
+ gated!(
+ alloc_error_handler, Normal, template!(Word), WarnFollowing,
+ experimental!(alloc_error_handler)
+ ),
+ gated!(
+ default_lib_allocator, Normal, template!(Word), WarnFollowing, allocator_internals,
+ experimental!(default_lib_allocator),
+ ),
+ gated!(
+ needs_allocator, Normal, template!(Word), WarnFollowing, allocator_internals,
+ experimental!(needs_allocator),
+ ),
+ gated!(panic_runtime, Normal, template!(Word), WarnFollowing, experimental!(panic_runtime)),
+ gated!(
+ needs_panic_runtime, Normal, template!(Word), WarnFollowing,
+ experimental!(needs_panic_runtime)
+ ),
+ gated!(
+ compiler_builtins, Normal, template!(Word), WarnFollowing,
+ "the `#[compiler_builtins]` attribute is used to identify the `compiler_builtins` crate \
+ which contains compiler-rt intrinsics and will never be stable",
+ ),
+ gated!(
+ profiler_runtime, Normal, template!(Word), WarnFollowing,
+ "the `#[profiler_runtime]` attribute is used to identify the `profiler_builtins` crate \
+ which contains the profiler runtime and will never be stable",
+ ),
+
+ // ==========================================================================
+ // Internal attributes, Linkage:
+ // ==========================================================================
+
+ gated!(
+ linkage, Normal, template!(NameValueStr: "external|internal|..."), ErrorPreceding, @only_local: true,
+ "the `linkage` attribute is experimental and not portable across platforms",
+ ),
+ rustc_attr!(
+ rustc_std_internal_symbol, Normal, template!(Word), WarnFollowing, @only_local: true, INTERNAL_UNSTABLE
+ ),
+
+ // ==========================================================================
+ // Internal attributes, Macro related:
+ // ==========================================================================
+
+ rustc_attr!(
+ rustc_builtin_macro, Normal,
+ template!(Word, List: "name, /*opt*/ attributes(name1, name2, ...)"), ErrorFollowing,
+ IMPL_DETAIL,
+ ),
+ rustc_attr!(rustc_proc_macro_decls, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE),
+ rustc_attr!(
+ rustc_macro_transparency, Normal,
+ template!(NameValueStr: "transparent|semitransparent|opaque"), ErrorFollowing,
+ "used internally for testing macro hygiene",
+ ),
+
+ // ==========================================================================
+ // Internal attributes, Diagnostics related:
+ // ==========================================================================
+
+ rustc_attr!(
+ rustc_on_unimplemented, Normal,
+ template!(
+ List: r#"/*opt*/ message = "...", /*opt*/ label = "...", /*opt*/ note = "...""#,
+ NameValueStr: "message"
+ ),
+ ErrorFollowing,
+ INTERNAL_UNSTABLE
+ ),
+ // Enumerates "identity-like" conversion methods to suggest on type mismatch.
+ rustc_attr!(
+ rustc_conversion_suggestion, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
+ ),
+ // Prevents field reads in the marked trait or method to be considered
+ // during dead code analysis.
+ rustc_attr!(
+ rustc_trivial_field_reads, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
+ ),
+ // Used by the `rustc::potential_query_instability` lint to warn methods which
+ // might not be stable during incremental compilation.
+ rustc_attr!(rustc_lint_query_instability, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE),
+ // Used by the `rustc::untranslatable_diagnostic` and `rustc::diagnostic_outside_of_impl` lints
+ // to assist in changes to diagnostic APIs.
+ rustc_attr!(rustc_lint_diagnostics, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE),
+
+ // ==========================================================================
+ // Internal attributes, Const related:
+ // ==========================================================================
+
+ rustc_attr!(rustc_promotable, Normal, template!(Word), WarnFollowing, IMPL_DETAIL),
+ rustc_attr!(
+ rustc_legacy_const_generics, Normal, template!(List: "N"), ErrorFollowing,
+ INTERNAL_UNSTABLE
+ ),
+ // Do not const-check this function's body. It will always get replaced during CTFE.
+ rustc_attr!(
+ rustc_do_not_const_check, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
+ ),
+
+ // ==========================================================================
+ // Internal attributes, Layout related:
+ // ==========================================================================
+
+ rustc_attr!(
+ rustc_layout_scalar_valid_range_start, Normal, template!(List: "value"), ErrorFollowing,
+ "the `#[rustc_layout_scalar_valid_range_start]` attribute is just used to enable \
+ niche optimizations in libcore and libstd and will never be stable",
+ ),
+ rustc_attr!(
+ rustc_layout_scalar_valid_range_end, Normal, template!(List: "value"), ErrorFollowing,
+ "the `#[rustc_layout_scalar_valid_range_end]` attribute is just used to enable \
+ niche optimizations in libcore and libstd and will never be stable",
+ ),
+ rustc_attr!(
+ rustc_nonnull_optimization_guaranteed, Normal, template!(Word), WarnFollowing,
+ "the `#[rustc_nonnull_optimization_guaranteed]` attribute is just used to enable \
+ niche optimizations in libcore and libstd and will never be stable",
+ ),
+
+ // ==========================================================================
+ // Internal attributes, Misc:
+ // ==========================================================================
+ gated!(
+ lang, Normal, template!(NameValueStr: "name"), DuplicatesOk, @only_local: true, lang_items,
+ "language items are subject to change",
+ ),
+ rustc_attr!(
+ rustc_pass_by_value, Normal,
+ template!(Word), ErrorFollowing,
+ "#[rustc_pass_by_value] is used to mark types that must be passed by value instead of reference."
+ ),
+ rustc_attr!(
+ rustc_coherence_is_core, AttributeType::CrateLevel, template!(Word), ErrorFollowing, @only_local: true,
+ "#![rustc_coherence_is_core] allows inherent methods on builtin types, only intended to be used in `core`."
+ ),
+ rustc_attr!(
+ rustc_allow_incoherent_impl, AttributeType::Normal, template!(Word), ErrorFollowing, @only_local: true,
+ "#[rustc_allow_incoherent_impl] has to be added to all impl items of an incoherent inherent impl."
+ ),
+ rustc_attr!(
+ rustc_has_incoherent_inherent_impls, AttributeType::Normal, template!(Word), ErrorFollowing,
+ "#[rustc_has_incoherent_inherent_impls] allows the addition of incoherent inherent impls for \
+ the given type by annotating all impl items with #[rustc_allow_incoherent_impl]."
+ ),
+ rustc_attr!(
+ rustc_box, AttributeType::Normal, template!(Word), ErrorFollowing,
+ "#[rustc_box] allows creating boxes \
+ and it is only intended to be used in `alloc`."
+ ),
+
+ // modified for r-a
+ // BuiltinAttribute {
+ // name: sym::rustc_diagnostic_item,
+ // // FIXME: This can be `true` once we always use `tcx.is_diagnostic_item`.
+ // only_local: false,
+ // type_: Normal,
+ // template: template!(NameValueStr: "name"),
+ // duplicates: ErrorFollowing,
+ // gate: Gated(
+ // Stability::Unstable,
+ // sym::rustc_attrs,
+ // "diagnostic items compiler internal support for linting",
+ // cfg_fn!(rustc_attrs),
+ // ),
+ // },
+ BuiltinAttribute {
+ name: "rustc_diagnostic_item",
+ template: template!(NameValueStr: "name"),
+ },
+ gated!(
+ // Used in resolve:
+ prelude_import, Normal, template!(Word), WarnFollowing,
+ "`#[prelude_import]` is for use by rustc only",
+ ),
+ gated!(
+ rustc_paren_sugar, Normal, template!(Word), WarnFollowing, unboxed_closures,
+ "unboxed_closures are still evolving",
+ ),
+ rustc_attr!(
+ rustc_inherit_overflow_checks, Normal, template!(Word), WarnFollowing, @only_local: true,
+ "the `#[rustc_inherit_overflow_checks]` attribute is just used to control \
+ overflow checking behavior of several libcore functions that are inlined \
+ across crates and will never be stable",
+ ),
+ rustc_attr!(
+ rustc_reservation_impl, Normal,
+ template!(NameValueStr: "reservation message"), ErrorFollowing,
+ "the `#[rustc_reservation_impl]` attribute is internally used \
+ for reserving for `for<T> From<!> for T` impl"
+ ),
+ rustc_attr!(
+ rustc_test_marker, Normal, template!(Word), WarnFollowing,
+ "the `#[rustc_test_marker]` attribute is used internally to track tests",
+ ),
+ rustc_attr!(
+ rustc_unsafe_specialization_marker, Normal, template!(Word), WarnFollowing,
+ "the `#[rustc_unsafe_specialization_marker]` attribute is used to check specializations"
+ ),
+ rustc_attr!(
+ rustc_specialization_trait, Normal, template!(Word), WarnFollowing,
+ "the `#[rustc_specialization_trait]` attribute is used to check specializations"
+ ),
+ rustc_attr!(
+ rustc_main, Normal, template!(Word), WarnFollowing,
+ "the `#[rustc_main]` attribute is used internally to specify test entry point function",
+ ),
+ rustc_attr!(
+ rustc_skip_array_during_method_dispatch, Normal, template!(Word), WarnFollowing,
+ "the `#[rustc_skip_array_during_method_dispatch]` attribute is used to exclude a trait \
+ from method dispatch when the receiver is an array, for compatibility in editions < 2021."
+ ),
+ rustc_attr!(
+ rustc_must_implement_one_of, Normal, template!(List: "function1, function2, ..."), ErrorFollowing,
+ "the `#[rustc_must_implement_one_of]` attribute is used to change minimal complete \
+ definition of a trait, it's currently in experimental form and should be changed before \
+ being exposed outside of the std"
+ ),
+
+ // ==========================================================================
+ // Internal attributes, Testing:
+ // ==========================================================================
+
+ rustc_attr!(TEST, rustc_outlives, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_capture_analysis, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_insignificant_dtor, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_strict_coherence, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_variance, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_layout, Normal, template!(List: "field1, field2, ..."), WarnFollowing),
+ rustc_attr!(TEST, rustc_regions, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(
+ TEST, rustc_error, Normal,
+ template!(Word, List: "delay_span_bug_from_inside_query"), WarnFollowingWordOnly
+ ),
+ rustc_attr!(TEST, rustc_dump_user_substs, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_evaluate_where_clauses, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(
+ TEST, rustc_if_this_changed, Normal, template!(Word, List: "DepNode"), DuplicatesOk
+ ),
+ rustc_attr!(
+ TEST, rustc_then_this_would_need, Normal, template!(List: "DepNode"), DuplicatesOk
+ ),
+ rustc_attr!(
+ TEST, rustc_clean, Normal,
+ template!(List: r#"cfg = "...", /*opt*/ label = "...", /*opt*/ except = "...""#),
+ DuplicatesOk,
+ ),
+ rustc_attr!(
+ TEST, rustc_partition_reused, Normal,
+ template!(List: r#"cfg = "...", module = "...""#), DuplicatesOk,
+ ),
+ rustc_attr!(
+ TEST, rustc_partition_codegened, Normal,
+ template!(List: r#"cfg = "...", module = "...""#), DuplicatesOk,
+ ),
+ rustc_attr!(
+ TEST, rustc_expected_cgu_reuse, Normal,
+ template!(List: r#"cfg = "...", module = "...", kind = "...""#), DuplicatesOk,
+ ),
+ rustc_attr!(TEST, rustc_symbol_name, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_polymorphize_error, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_def_path, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_mir, Normal, template!(List: "arg1, arg2, ..."), DuplicatesOk),
+ rustc_attr!(TEST, rustc_dump_program_clauses, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_dump_env_program_clauses, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_object_lifetime_default, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_dump_vtable, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_dummy, Normal, template!(Word /* doesn't matter*/), DuplicatesOk),
+ gated!(
+ omit_gdb_pretty_printer_section, Normal, template!(Word), WarnFollowing,
+ "the `#[omit_gdb_pretty_printer_section]` attribute is just used for the Rust test suite",
+ ),
+];
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs b/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs
new file mode 100644
index 000000000..25a408036
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs
@@ -0,0 +1,158 @@
+//! This module defines built-in types.
+//!
+//! A peculiarity of built-in types is that they are always available and are
+//! not associated with any particular crate.
+
+use std::fmt;
+
+use hir_expand::name::{name, AsName, Name};
+/// Different signed int types.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum BuiltinInt {
+ Isize,
+ I8,
+ I16,
+ I32,
+ I64,
+ I128,
+}
+
+/// Different unsigned int types.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum BuiltinUint {
+ Usize,
+ U8,
+ U16,
+ U32,
+ U64,
+ U128,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum BuiltinFloat {
+ F32,
+ F64,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum BuiltinType {
+ Char,
+ Bool,
+ Str,
+ Int(BuiltinInt),
+ Uint(BuiltinUint),
+ Float(BuiltinFloat),
+}
+
+impl BuiltinType {
+ #[rustfmt::skip]
+ pub const ALL: &'static [(Name, BuiltinType)] = &[
+ (name![char], BuiltinType::Char),
+ (name![bool], BuiltinType::Bool),
+ (name![str], BuiltinType::Str),
+
+ (name![isize], BuiltinType::Int(BuiltinInt::Isize)),
+ (name![i8], BuiltinType::Int(BuiltinInt::I8)),
+ (name![i16], BuiltinType::Int(BuiltinInt::I16)),
+ (name![i32], BuiltinType::Int(BuiltinInt::I32)),
+ (name![i64], BuiltinType::Int(BuiltinInt::I64)),
+ (name![i128], BuiltinType::Int(BuiltinInt::I128)),
+
+ (name![usize], BuiltinType::Uint(BuiltinUint::Usize)),
+ (name![u8], BuiltinType::Uint(BuiltinUint::U8)),
+ (name![u16], BuiltinType::Uint(BuiltinUint::U16)),
+ (name![u32], BuiltinType::Uint(BuiltinUint::U32)),
+ (name![u64], BuiltinType::Uint(BuiltinUint::U64)),
+ (name![u128], BuiltinType::Uint(BuiltinUint::U128)),
+
+ (name![f32], BuiltinType::Float(BuiltinFloat::F32)),
+ (name![f64], BuiltinType::Float(BuiltinFloat::F64)),
+ ];
+
+ pub fn by_name(name: &Name) -> Option<Self> {
+ Self::ALL.iter().find_map(|(n, ty)| if n == name { Some(*ty) } else { None })
+ }
+}
+
+impl AsName for BuiltinType {
+ fn as_name(&self) -> Name {
+ match self {
+ BuiltinType::Char => name![char],
+ BuiltinType::Bool => name![bool],
+ BuiltinType::Str => name![str],
+ BuiltinType::Int(it) => match it {
+ BuiltinInt::Isize => name![isize],
+ BuiltinInt::I8 => name![i8],
+ BuiltinInt::I16 => name![i16],
+ BuiltinInt::I32 => name![i32],
+ BuiltinInt::I64 => name![i64],
+ BuiltinInt::I128 => name![i128],
+ },
+ BuiltinType::Uint(it) => match it {
+ BuiltinUint::Usize => name![usize],
+ BuiltinUint::U8 => name![u8],
+ BuiltinUint::U16 => name![u16],
+ BuiltinUint::U32 => name![u32],
+ BuiltinUint::U64 => name![u64],
+ BuiltinUint::U128 => name![u128],
+ },
+ BuiltinType::Float(it) => match it {
+ BuiltinFloat::F32 => name![f32],
+ BuiltinFloat::F64 => name![f64],
+ },
+ }
+ }
+}
+
+impl fmt::Display for BuiltinType {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let type_name = self.as_name();
+ type_name.fmt(f)
+ }
+}
+
+#[rustfmt::skip]
+impl BuiltinInt {
+ pub fn from_suffix(suffix: &str) -> Option<BuiltinInt> {
+ let res = match suffix {
+ "isize" => Self::Isize,
+ "i8" => Self::I8,
+ "i16" => Self::I16,
+ "i32" => Self::I32,
+ "i64" => Self::I64,
+ "i128" => Self::I128,
+
+ _ => return None,
+ };
+ Some(res)
+ }
+}
+
+#[rustfmt::skip]
+impl BuiltinUint {
+ pub fn from_suffix(suffix: &str) -> Option<BuiltinUint> {
+ let res = match suffix {
+ "usize" => Self::Usize,
+ "u8" => Self::U8,
+ "u16" => Self::U16,
+ "u32" => Self::U32,
+ "u64" => Self::U64,
+ "u128" => Self::U128,
+
+ _ => return None,
+ };
+ Some(res)
+ }
+}
+
+#[rustfmt::skip]
+impl BuiltinFloat {
+ pub fn from_suffix(suffix: &str) -> Option<BuiltinFloat> {
+ let res = match suffix {
+ "f32" => BuiltinFloat::F32,
+ "f64" => BuiltinFloat::F64,
+ _ => return None,
+ };
+ Some(res)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs b/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
new file mode 100644
index 000000000..5b1435e8f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
@@ -0,0 +1,207 @@
+//! When *constructing* `hir`, we start at some parent syntax node and recursively
+//! lower the children.
+//!
+//! This modules allows one to go in the opposite direction: start with a syntax
+//! node for a *child*, and get its hir.
+
+use either::Either;
+use hir_expand::HirFileId;
+use syntax::ast::HasDocComments;
+
+use crate::{
+ db::DefDatabase,
+ dyn_map::DynMap,
+ item_scope::ItemScope,
+ keys,
+ src::{HasChildSource, HasSource},
+ AdtId, AssocItemId, DefWithBodyId, EnumId, EnumVariantId, FieldId, ImplId, Lookup, MacroId,
+ ModuleDefId, ModuleId, TraitId, VariantId,
+};
+
+pub trait ChildBySource {
+ fn child_by_source(&self, db: &dyn DefDatabase, file_id: HirFileId) -> DynMap {
+ let mut res = DynMap::default();
+ self.child_by_source_to(db, &mut res, file_id);
+ res
+ }
+ fn child_by_source_to(&self, db: &dyn DefDatabase, map: &mut DynMap, file_id: HirFileId);
+}
+
+impl ChildBySource for TraitId {
+ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
+ let data = db.trait_data(*self);
+
+ data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
+ |(ast_id, call_id)| {
+ res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id);
+ },
+ );
+ data.items.iter().for_each(|&(_, item)| {
+ add_assoc_item(db, res, file_id, item);
+ });
+ }
+}
+
+impl ChildBySource for ImplId {
+ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
+ let data = db.impl_data(*self);
+ data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
+ |(ast_id, call_id)| {
+ res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id);
+ },
+ );
+ data.items.iter().for_each(|&item| {
+ add_assoc_item(db, res, file_id, item);
+ });
+ }
+}
+
+fn add_assoc_item(db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId, item: AssocItemId) {
+ match item {
+ AssocItemId::FunctionId(func) => {
+ let loc = func.lookup(db);
+ if loc.id.file_id() == file_id {
+ res[keys::FUNCTION].insert(loc.source(db).value, func)
+ }
+ }
+ AssocItemId::ConstId(konst) => {
+ let loc = konst.lookup(db);
+ if loc.id.file_id() == file_id {
+ res[keys::CONST].insert(loc.source(db).value, konst)
+ }
+ }
+ AssocItemId::TypeAliasId(ty) => {
+ let loc = ty.lookup(db);
+ if loc.id.file_id() == file_id {
+ res[keys::TYPE_ALIAS].insert(loc.source(db).value, ty)
+ }
+ }
+ }
+}
+
+impl ChildBySource for ModuleId {
+ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
+ let def_map = self.def_map(db);
+ let module_data = &def_map[self.local_id];
+ module_data.scope.child_by_source_to(db, res, file_id);
+ }
+}
+
+impl ChildBySource for ItemScope {
+ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
+ self.declarations().for_each(|item| add_module_def(db, res, file_id, item));
+ self.impls().for_each(|imp| add_impl(db, res, file_id, imp));
+ self.unnamed_consts().for_each(|konst| {
+ let loc = konst.lookup(db);
+ if loc.id.file_id() == file_id {
+ res[keys::CONST].insert(loc.source(db).value, konst);
+ }
+ });
+ self.attr_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each(
+ |(ast_id, call_id)| {
+ res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id);
+ },
+ );
+ self.legacy_macros().for_each(|(_, ids)| {
+ ids.iter().for_each(|&id| {
+ if let MacroId::MacroRulesId(id) = id {
+ let loc = id.lookup(db);
+ if loc.id.file_id() == file_id {
+ res[keys::MACRO_RULES].insert(loc.source(db).value, id);
+ }
+ }
+ })
+ });
+ self.derive_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each(
+ |(ast_id, calls)| {
+ let adt = ast_id.to_node(db.upcast());
+ calls.for_each(|(attr_id, call_id, calls)| {
+ if let Some(Either::Left(attr)) =
+ adt.doc_comments_and_attrs().nth(attr_id.ast_index as usize)
+ {
+ res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into()));
+ }
+ });
+ },
+ );
+
+ fn add_module_def(
+ db: &dyn DefDatabase,
+ map: &mut DynMap,
+ file_id: HirFileId,
+ item: ModuleDefId,
+ ) {
+ macro_rules! insert {
+ ($map:ident[$key:path].$insert:ident($id:ident)) => {{
+ let loc = $id.lookup(db);
+ if loc.id.file_id() == file_id {
+ $map[$key].$insert(loc.source(db).value, $id)
+ }
+ }};
+ }
+ match item {
+ ModuleDefId::FunctionId(id) => insert!(map[keys::FUNCTION].insert(id)),
+ ModuleDefId::ConstId(id) => insert!(map[keys::CONST].insert(id)),
+ ModuleDefId::StaticId(id) => insert!(map[keys::STATIC].insert(id)),
+ ModuleDefId::TypeAliasId(id) => insert!(map[keys::TYPE_ALIAS].insert(id)),
+ ModuleDefId::TraitId(id) => insert!(map[keys::TRAIT].insert(id)),
+ ModuleDefId::AdtId(adt) => match adt {
+ AdtId::StructId(id) => insert!(map[keys::STRUCT].insert(id)),
+ AdtId::UnionId(id) => insert!(map[keys::UNION].insert(id)),
+ AdtId::EnumId(id) => insert!(map[keys::ENUM].insert(id)),
+ },
+ ModuleDefId::MacroId(id) => match id {
+ MacroId::Macro2Id(id) => insert!(map[keys::MACRO2].insert(id)),
+ MacroId::MacroRulesId(id) => insert!(map[keys::MACRO_RULES].insert(id)),
+ MacroId::ProcMacroId(id) => insert!(map[keys::PROC_MACRO].insert(id)),
+ },
+ ModuleDefId::ModuleId(_)
+ | ModuleDefId::EnumVariantId(_)
+ | ModuleDefId::BuiltinType(_) => (),
+ }
+ }
+ fn add_impl(db: &dyn DefDatabase, map: &mut DynMap, file_id: HirFileId, imp: ImplId) {
+ let loc = imp.lookup(db);
+ if loc.id.file_id() == file_id {
+ map[keys::IMPL].insert(loc.source(db).value, imp)
+ }
+ }
+ }
+}
+
+impl ChildBySource for VariantId {
+ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, _: HirFileId) {
+ let arena_map = self.child_source(db);
+ let arena_map = arena_map.as_ref();
+ let parent = *self;
+ for (local_id, source) in arena_map.value.iter() {
+ let id = FieldId { parent, local_id };
+ match source.clone() {
+ Either::Left(source) => res[keys::TUPLE_FIELD].insert(source, id),
+ Either::Right(source) => res[keys::RECORD_FIELD].insert(source, id),
+ }
+ }
+ }
+}
+
+impl ChildBySource for EnumId {
+ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, _: HirFileId) {
+ let arena_map = self.child_source(db);
+ let arena_map = arena_map.as_ref();
+ for (local_id, source) in arena_map.value.iter() {
+ let id = EnumVariantId { parent: *self, local_id };
+ res[keys::VARIANT].insert(source.clone(), id)
+ }
+ }
+}
+
+impl ChildBySource for DefWithBodyId {
+ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
+ let body = db.body(*self);
+ for (_, def_map) in body.blocks(db) {
+ // All block expressions are merged into the same map, because they logically all add
+ // inner items to the containing `DefWithBodyId`.
+ def_map[def_map.root()].scope.child_by_source_to(db, res, file_id);
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs
new file mode 100644
index 000000000..35c870895
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/data.rs
@@ -0,0 +1,579 @@
+//! Contains basic data about various HIR declarations.
+
+use std::sync::Arc;
+
+use hir_expand::{name::Name, AstId, ExpandResult, HirFileId, MacroCallId, MacroDefKind};
+use smallvec::SmallVec;
+use syntax::ast;
+
+use crate::{
+ attr::Attrs,
+ body::{Expander, Mark},
+ db::DefDatabase,
+ intern::Interned,
+ item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, ModItem, Param, TreeId},
+ nameres::{attr_resolution::ResolvedAttr, proc_macro::ProcMacroKind, DefMap},
+ type_ref::{TraitRef, TypeBound, TypeRef},
+ visibility::RawVisibility,
+ AssocItemId, AstIdWithPath, ConstId, ConstLoc, FunctionId, FunctionLoc, HasModule, ImplId,
+ Intern, ItemContainerId, ItemLoc, Lookup, Macro2Id, MacroRulesId, ModuleId, ProcMacroId,
+ StaticId, TraitId, TypeAliasId, TypeAliasLoc,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct FunctionData {
+ pub name: Name,
+ pub params: Vec<(Option<Name>, Interned<TypeRef>)>,
+ pub ret_type: Interned<TypeRef>,
+ pub async_ret_type: Option<Interned<TypeRef>>,
+ pub attrs: Attrs,
+ pub visibility: RawVisibility,
+ pub abi: Option<Interned<str>>,
+ pub legacy_const_generics_indices: Box<[u32]>,
+ flags: FnFlags,
+}
+
+impl FunctionData {
+ pub(crate) fn fn_data_query(db: &dyn DefDatabase, func: FunctionId) -> Arc<FunctionData> {
+ let loc = func.lookup(db);
+ let krate = loc.container.module(db).krate;
+ let crate_graph = db.crate_graph();
+ let cfg_options = &crate_graph[krate].cfg_options;
+ let item_tree = loc.id.item_tree(db);
+ let func = &item_tree[loc.id.value];
+ let visibility = if let ItemContainerId::TraitId(trait_id) = loc.container {
+ db.trait_data(trait_id).visibility.clone()
+ } else {
+ item_tree[func.visibility].clone()
+ };
+
+ let enabled_params = func
+ .params
+ .clone()
+ .filter(|&param| item_tree.attrs(db, krate, param.into()).is_cfg_enabled(cfg_options));
+
+ // If last cfg-enabled param is a `...` param, it's a varargs function.
+ let is_varargs = enabled_params
+ .clone()
+ .next_back()
+ .map_or(false, |param| matches!(item_tree[param], Param::Varargs));
+
+ let mut flags = func.flags;
+ if is_varargs {
+ flags |= FnFlags::IS_VARARGS;
+ }
+ if flags.contains(FnFlags::HAS_SELF_PARAM) {
+ // If there's a self param in the syntax, but it is cfg'd out, remove the flag.
+ let is_cfgd_out = match func.params.clone().next() {
+ Some(param) => {
+ !item_tree.attrs(db, krate, param.into()).is_cfg_enabled(cfg_options)
+ }
+ None => {
+ stdx::never!("fn HAS_SELF_PARAM but no parameters allocated");
+ true
+ }
+ };
+ if is_cfgd_out {
+ cov_mark::hit!(cfgd_out_self_param);
+ flags.remove(FnFlags::HAS_SELF_PARAM);
+ }
+ }
+
+ let legacy_const_generics_indices = item_tree
+ .attrs(db, krate, ModItem::from(loc.id.value).into())
+ .by_key("rustc_legacy_const_generics")
+ .tt_values()
+ .next()
+ .map(parse_rustc_legacy_const_generics)
+ .unwrap_or_default();
+
+ Arc::new(FunctionData {
+ name: func.name.clone(),
+ params: enabled_params
+ .clone()
+ .filter_map(|id| match &item_tree[id] {
+ Param::Normal(name, ty) => Some((name.clone(), ty.clone())),
+ Param::Varargs => None,
+ })
+ .collect(),
+ ret_type: func.ret_type.clone(),
+ async_ret_type: func.async_ret_type.clone(),
+ attrs: item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()),
+ visibility,
+ abi: func.abi.clone(),
+ legacy_const_generics_indices,
+ flags,
+ })
+ }
+
+ pub fn has_body(&self) -> bool {
+ self.flags.contains(FnFlags::HAS_BODY)
+ }
+
+ /// True if the first param is `self`. This is relevant to decide whether this
+ /// can be called as a method.
+ pub fn has_self_param(&self) -> bool {
+ self.flags.contains(FnFlags::HAS_SELF_PARAM)
+ }
+
+ pub fn has_default_kw(&self) -> bool {
+ self.flags.contains(FnFlags::HAS_DEFAULT_KW)
+ }
+
+ pub fn has_const_kw(&self) -> bool {
+ self.flags.contains(FnFlags::HAS_CONST_KW)
+ }
+
+ pub fn has_async_kw(&self) -> bool {
+ self.flags.contains(FnFlags::HAS_ASYNC_KW)
+ }
+
+ pub fn has_unsafe_kw(&self) -> bool {
+ self.flags.contains(FnFlags::HAS_UNSAFE_KW)
+ }
+
+ pub fn is_varargs(&self) -> bool {
+ self.flags.contains(FnFlags::IS_VARARGS)
+ }
+}
+
+fn parse_rustc_legacy_const_generics(tt: &tt::Subtree) -> Box<[u32]> {
+ let mut indices = Vec::new();
+ for args in tt.token_trees.chunks(2) {
+ match &args[0] {
+ tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => match lit.text.parse() {
+ Ok(index) => indices.push(index),
+ Err(_) => break,
+ },
+ _ => break,
+ }
+
+ if let Some(comma) = args.get(1) {
+ match comma {
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.char == ',' => {}
+ _ => break,
+ }
+ }
+ }
+
+ indices.into_boxed_slice()
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct TypeAliasData {
+ pub name: Name,
+ pub type_ref: Option<Interned<TypeRef>>,
+ pub visibility: RawVisibility,
+ pub is_extern: bool,
+ /// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl).
+ pub bounds: Vec<Interned<TypeBound>>,
+}
+
+impl TypeAliasData {
+ pub(crate) fn type_alias_data_query(
+ db: &dyn DefDatabase,
+ typ: TypeAliasId,
+ ) -> Arc<TypeAliasData> {
+ let loc = typ.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let typ = &item_tree[loc.id.value];
+ let visibility = if let ItemContainerId::TraitId(trait_id) = loc.container {
+ db.trait_data(trait_id).visibility.clone()
+ } else {
+ item_tree[typ.visibility].clone()
+ };
+
+ Arc::new(TypeAliasData {
+ name: typ.name.clone(),
+ type_ref: typ.type_ref.clone(),
+ visibility,
+ is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)),
+ bounds: typ.bounds.to_vec(),
+ })
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct TraitData {
+ pub name: Name,
+ pub items: Vec<(Name, AssocItemId)>,
+ pub is_auto: bool,
+ pub is_unsafe: bool,
+ pub visibility: RawVisibility,
+ /// Whether the trait has `#[rust_skip_array_during_method_dispatch]`. `hir_ty` will ignore
+ /// method calls to this trait's methods when the receiver is an array and the crate edition is
+ /// 2015 or 2018.
+ pub skip_array_during_method_dispatch: bool,
+ // box it as the vec is usually empty anyways
+ pub attribute_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
+}
+
+impl TraitData {
+ pub(crate) fn trait_data_query(db: &dyn DefDatabase, tr: TraitId) -> Arc<TraitData> {
+ let tr_loc @ ItemLoc { container: module_id, id: tree_id } = tr.lookup(db);
+ let item_tree = tree_id.item_tree(db);
+ let tr_def = &item_tree[tree_id.value];
+ let _cx = stdx::panic_context::enter(format!(
+ "trait_data_query({:?} -> {:?} -> {:?})",
+ tr, tr_loc, tr_def
+ ));
+ let name = tr_def.name.clone();
+ let is_auto = tr_def.is_auto;
+ let is_unsafe = tr_def.is_unsafe;
+ let visibility = item_tree[tr_def.visibility].clone();
+ let skip_array_during_method_dispatch = item_tree
+ .attrs(db, module_id.krate(), ModItem::from(tree_id.value).into())
+ .by_key("rustc_skip_array_during_method_dispatch")
+ .exists();
+
+ let mut collector =
+ AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::TraitId(tr));
+ collector.collect(&item_tree, tree_id.tree_id(), &tr_def.items);
+ let (items, attribute_calls) = collector.finish();
+
+ Arc::new(TraitData {
+ name,
+ attribute_calls,
+ items,
+ is_auto,
+ is_unsafe,
+ visibility,
+ skip_array_during_method_dispatch,
+ })
+ }
+
+ pub fn associated_types(&self) -> impl Iterator<Item = TypeAliasId> + '_ {
+ self.items.iter().filter_map(|(_name, item)| match item {
+ AssocItemId::TypeAliasId(t) => Some(*t),
+ _ => None,
+ })
+ }
+
+ pub fn associated_type_by_name(&self, name: &Name) -> Option<TypeAliasId> {
+ self.items.iter().find_map(|(item_name, item)| match item {
+ AssocItemId::TypeAliasId(t) if item_name == name => Some(*t),
+ _ => None,
+ })
+ }
+
+ pub fn method_by_name(&self, name: &Name) -> Option<FunctionId> {
+ self.items.iter().find_map(|(item_name, item)| match item {
+ AssocItemId::FunctionId(t) if item_name == name => Some(*t),
+ _ => None,
+ })
+ }
+
+ pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
+ self.attribute_calls.iter().flat_map(|it| it.iter()).copied()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct ImplData {
+ pub target_trait: Option<Interned<TraitRef>>,
+ pub self_ty: Interned<TypeRef>,
+ pub items: Vec<AssocItemId>,
+ pub is_negative: bool,
+ // box it as the vec is usually empty anyways
+ pub attribute_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
+}
+
+impl ImplData {
+ pub(crate) fn impl_data_query(db: &dyn DefDatabase, id: ImplId) -> Arc<ImplData> {
+ let _p = profile::span("impl_data_query");
+ let ItemLoc { container: module_id, id: tree_id } = id.lookup(db);
+
+ let item_tree = tree_id.item_tree(db);
+ let impl_def = &item_tree[tree_id.value];
+ let target_trait = impl_def.target_trait.clone();
+ let self_ty = impl_def.self_ty.clone();
+ let is_negative = impl_def.is_negative;
+
+ let mut collector =
+ AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::ImplId(id));
+ collector.collect(&item_tree, tree_id.tree_id(), &impl_def.items);
+
+ let (items, attribute_calls) = collector.finish();
+ let items = items.into_iter().map(|(_, item)| item).collect();
+
+ Arc::new(ImplData { target_trait, self_ty, items, is_negative, attribute_calls })
+ }
+
+ pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
+ self.attribute_calls.iter().flat_map(|it| it.iter()).copied()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Macro2Data {
+ pub name: Name,
+ pub visibility: RawVisibility,
+}
+
+impl Macro2Data {
+ pub(crate) fn macro2_data_query(db: &dyn DefDatabase, makro: Macro2Id) -> Arc<Macro2Data> {
+ let loc = makro.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let makro = &item_tree[loc.id.value];
+
+ Arc::new(Macro2Data {
+ name: makro.name.clone(),
+ visibility: item_tree[makro.visibility].clone(),
+ })
+ }
+}
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct MacroRulesData {
+ pub name: Name,
+ pub macro_export: bool,
+}
+
+impl MacroRulesData {
+ pub(crate) fn macro_rules_data_query(
+ db: &dyn DefDatabase,
+ makro: MacroRulesId,
+ ) -> Arc<MacroRulesData> {
+ let loc = makro.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let makro = &item_tree[loc.id.value];
+
+ let macro_export = item_tree
+ .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into())
+ .by_key("macro_export")
+ .exists();
+
+ Arc::new(MacroRulesData { name: makro.name.clone(), macro_export })
+ }
+}
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct ProcMacroData {
+ pub name: Name,
+ /// Derive helpers, if this is a derive
+ pub helpers: Option<Box<[Name]>>,
+}
+
+impl ProcMacroData {
+ pub(crate) fn proc_macro_data_query(
+ db: &dyn DefDatabase,
+ makro: ProcMacroId,
+ ) -> Arc<ProcMacroData> {
+ let loc = makro.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let makro = &item_tree[loc.id.value];
+
+ let (name, helpers) = if let Some(def) = item_tree
+ .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into())
+ .parse_proc_macro_decl(&makro.name)
+ {
+ (
+ def.name,
+ match def.kind {
+ ProcMacroKind::CustomDerive { helpers } => Some(helpers),
+ ProcMacroKind::FnLike | ProcMacroKind::Attr => None,
+ },
+ )
+ } else {
+ // eeeh...
+ stdx::never!("proc macro declaration is not a proc macro");
+ (makro.name.clone(), None)
+ };
+ Arc::new(ProcMacroData { name, helpers })
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct ConstData {
+ /// `None` for `const _: () = ();`
+ pub name: Option<Name>,
+ pub type_ref: Interned<TypeRef>,
+ pub visibility: RawVisibility,
+}
+
+impl ConstData {
+ pub(crate) fn const_data_query(db: &dyn DefDatabase, konst: ConstId) -> Arc<ConstData> {
+ let loc = konst.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let konst = &item_tree[loc.id.value];
+ let visibility = if let ItemContainerId::TraitId(trait_id) = loc.container {
+ db.trait_data(trait_id).visibility.clone()
+ } else {
+ item_tree[konst.visibility].clone()
+ };
+
+ Arc::new(ConstData {
+ name: konst.name.clone(),
+ type_ref: konst.type_ref.clone(),
+ visibility,
+ })
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct StaticData {
+ pub name: Name,
+ pub type_ref: Interned<TypeRef>,
+ pub visibility: RawVisibility,
+ pub mutable: bool,
+ pub is_extern: bool,
+}
+
+impl StaticData {
+ pub(crate) fn static_data_query(db: &dyn DefDatabase, konst: StaticId) -> Arc<StaticData> {
+ let loc = konst.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let statik = &item_tree[loc.id.value];
+
+ Arc::new(StaticData {
+ name: statik.name.clone(),
+ type_ref: statik.type_ref.clone(),
+ visibility: item_tree[statik.visibility].clone(),
+ mutable: statik.mutable,
+ is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)),
+ })
+ }
+}
+
+struct AssocItemCollector<'a> {
+ db: &'a dyn DefDatabase,
+ module_id: ModuleId,
+ def_map: Arc<DefMap>,
+ container: ItemContainerId,
+ expander: Expander,
+
+ items: Vec<(Name, AssocItemId)>,
+ attr_calls: Vec<(AstId<ast::Item>, MacroCallId)>,
+}
+
+impl<'a> AssocItemCollector<'a> {
+ fn new(
+ db: &'a dyn DefDatabase,
+ module_id: ModuleId,
+ file_id: HirFileId,
+ container: ItemContainerId,
+ ) -> Self {
+ Self {
+ db,
+ module_id,
+ def_map: module_id.def_map(db),
+ container,
+ expander: Expander::new(db, file_id, module_id),
+ items: Vec::new(),
+ attr_calls: Vec::new(),
+ }
+ }
+
+ fn finish(
+ self,
+ ) -> (Vec<(Name, AssocItemId)>, Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>) {
+ (
+ self.items,
+ if self.attr_calls.is_empty() { None } else { Some(Box::new(self.attr_calls)) },
+ )
+ }
+
+ // FIXME: proc-macro diagnostics
+ fn collect(&mut self, item_tree: &ItemTree, tree_id: TreeId, assoc_items: &[AssocItem]) {
+ let container = self.container;
+ self.items.reserve(assoc_items.len());
+
+ 'items: for &item in assoc_items {
+ let attrs = item_tree.attrs(self.db, self.module_id.krate, ModItem::from(item).into());
+ if !attrs.is_cfg_enabled(self.expander.cfg_options()) {
+ continue;
+ }
+
+ 'attrs: for attr in &*attrs {
+ let ast_id =
+ AstId::new(self.expander.current_file_id(), item.ast_id(&item_tree).upcast());
+ let ast_id_with_path = AstIdWithPath { path: (*attr.path).clone(), ast_id };
+
+ if let Ok(ResolvedAttr::Macro(call_id)) = self.def_map.resolve_attr_macro(
+ self.db,
+ self.module_id.local_id,
+ ast_id_with_path,
+ attr,
+ ) {
+ self.attr_calls.push((ast_id, call_id));
+ // If proc attribute macro expansion is disabled, skip expanding it here
+ if !self.db.enable_proc_attr_macros() {
+ continue 'attrs;
+ }
+ let loc = self.db.lookup_intern_macro_call(call_id);
+ if let MacroDefKind::ProcMacro(exp, ..) = loc.def.kind {
+ // If there's no expander for the proc macro (e.g. the
+ // proc macro is ignored, or building the proc macro
+ // crate failed), skip expansion like we would if it was
+ // disabled. This is analogous to the handling in
+ // `DefCollector::collect_macros`.
+ if exp.is_dummy() {
+ continue 'attrs;
+ }
+ }
+ match self.expander.enter_expand_id::<ast::MacroItems>(self.db, call_id) {
+ ExpandResult { value: Some((mark, _)), .. } => {
+ self.collect_macro_items(mark);
+ continue 'items;
+ }
+ ExpandResult { .. } => {}
+ }
+ }
+ }
+
+ match item {
+ AssocItem::Function(id) => {
+ let item = &item_tree[id];
+
+ let def =
+ FunctionLoc { container, id: ItemTreeId::new(tree_id, id) }.intern(self.db);
+ self.items.push((item.name.clone(), def.into()));
+ }
+ AssocItem::Const(id) => {
+ let item = &item_tree[id];
+
+ let name = match item.name.clone() {
+ Some(name) => name,
+ None => continue,
+ };
+ let def =
+ ConstLoc { container, id: ItemTreeId::new(tree_id, id) }.intern(self.db);
+ self.items.push((name, def.into()));
+ }
+ AssocItem::TypeAlias(id) => {
+ let item = &item_tree[id];
+
+ let def = TypeAliasLoc { container, id: ItemTreeId::new(tree_id, id) }
+ .intern(self.db);
+ self.items.push((item.name.clone(), def.into()));
+ }
+ AssocItem::MacroCall(call) => {
+ if let Some(root) = self.db.parse_or_expand(self.expander.current_file_id()) {
+ let call = &item_tree[call];
+
+ let ast_id_map = self.db.ast_id_map(self.expander.current_file_id());
+ let call = ast_id_map.get(call.ast_id).to_node(&root);
+ let _cx = stdx::panic_context::enter(format!(
+ "collect_items MacroCall: {}",
+ call
+ ));
+ let res = self.expander.enter_expand::<ast::MacroItems>(self.db, call);
+
+ if let Ok(ExpandResult { value: Some((mark, _)), .. }) = res {
+ self.collect_macro_items(mark);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ fn collect_macro_items(&mut self, mark: Mark) {
+ let tree_id = item_tree::TreeId::new(self.expander.current_file_id(), None);
+ let item_tree = tree_id.item_tree(self.db);
+ let iter: SmallVec<[_; 2]> =
+ item_tree.top_level_items().iter().filter_map(ModItem::as_assoc_item).collect();
+
+ self.collect(&item_tree, tree_id, &iter);
+
+ self.expander.exit(self.db, mark);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs
new file mode 100644
index 000000000..df6dcb024
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs
@@ -0,0 +1,243 @@
+//! Defines database & queries for name resolution.
+use std::sync::Arc;
+
+use base_db::{salsa, CrateId, SourceDatabase, Upcast};
+use either::Either;
+use hir_expand::{db::AstDatabase, HirFileId};
+use la_arena::ArenaMap;
+use syntax::{ast, AstPtr, SmolStr};
+
+use crate::{
+ adt::{EnumData, StructData},
+ attr::{Attrs, AttrsWithOwner},
+ body::{scope::ExprScopes, Body, BodySourceMap},
+ data::{
+ ConstData, FunctionData, ImplData, Macro2Data, MacroRulesData, ProcMacroData, StaticData,
+ TraitData, TypeAliasData,
+ },
+ generics::GenericParams,
+ import_map::ImportMap,
+ intern::Interned,
+ item_tree::{AttrOwner, ItemTree},
+ lang_item::{LangItemTarget, LangItems},
+ nameres::DefMap,
+ visibility::{self, Visibility},
+ AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, ExternBlockId,
+ ExternBlockLoc, FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalEnumVariantId,
+ LocalFieldId, Macro2Id, Macro2Loc, MacroRulesId, MacroRulesLoc, ProcMacroId, ProcMacroLoc,
+ StaticId, StaticLoc, StructId, StructLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc,
+ UnionId, UnionLoc, VariantId,
+};
+
+#[salsa::query_group(InternDatabaseStorage)]
+pub trait InternDatabase: SourceDatabase {
+ #[salsa::interned]
+ fn intern_function(&self, loc: FunctionLoc) -> FunctionId;
+ #[salsa::interned]
+ fn intern_struct(&self, loc: StructLoc) -> StructId;
+ #[salsa::interned]
+ fn intern_union(&self, loc: UnionLoc) -> UnionId;
+ #[salsa::interned]
+ fn intern_enum(&self, loc: EnumLoc) -> EnumId;
+ #[salsa::interned]
+ fn intern_const(&self, loc: ConstLoc) -> ConstId;
+ #[salsa::interned]
+ fn intern_static(&self, loc: StaticLoc) -> StaticId;
+ #[salsa::interned]
+ fn intern_trait(&self, loc: TraitLoc) -> TraitId;
+ #[salsa::interned]
+ fn intern_type_alias(&self, loc: TypeAliasLoc) -> TypeAliasId;
+ #[salsa::interned]
+ fn intern_impl(&self, loc: ImplLoc) -> ImplId;
+ #[salsa::interned]
+ fn intern_extern_block(&self, loc: ExternBlockLoc) -> ExternBlockId;
+ #[salsa::interned]
+ fn intern_block(&self, loc: BlockLoc) -> BlockId;
+ #[salsa::interned]
+ fn intern_macro2(&self, loc: Macro2Loc) -> Macro2Id;
+ #[salsa::interned]
+ fn intern_proc_macro(&self, loc: ProcMacroLoc) -> ProcMacroId;
+ #[salsa::interned]
+ fn intern_macro_rules(&self, loc: MacroRulesLoc) -> MacroRulesId;
+}
+
+#[salsa::query_group(DefDatabaseStorage)]
+pub trait DefDatabase: InternDatabase + AstDatabase + Upcast<dyn AstDatabase> {
+ #[salsa::input]
+ fn enable_proc_attr_macros(&self) -> bool;
+
+ #[salsa::invoke(ItemTree::file_item_tree_query)]
+ fn file_item_tree(&self, file_id: HirFileId) -> Arc<ItemTree>;
+
+ #[salsa::invoke(crate_def_map_wait)]
+ #[salsa::transparent]
+ fn crate_def_map(&self, krate: CrateId) -> Arc<DefMap>;
+
+ #[salsa::invoke(DefMap::crate_def_map_query)]
+ fn crate_def_map_query(&self, krate: CrateId) -> Arc<DefMap>;
+
+ /// Computes the block-level `DefMap`, returning `None` when `block` doesn't contain any inner
+ /// items directly.
+ ///
+ /// For example:
+ ///
+ /// ```
+ /// fn f() { // (0)
+ /// { // (1)
+ /// fn inner() {}
+ /// }
+ /// }
+ /// ```
+ ///
+ /// The `block_def_map` for block 0 would return `None`, while `block_def_map` of block 1 would
+ /// return a `DefMap` containing `inner`.
+ #[salsa::invoke(DefMap::block_def_map_query)]
+ fn block_def_map(&self, block: BlockId) -> Option<Arc<DefMap>>;
+
+ #[salsa::invoke(StructData::struct_data_query)]
+ fn struct_data(&self, id: StructId) -> Arc<StructData>;
+
+ #[salsa::invoke(StructData::union_data_query)]
+ fn union_data(&self, id: UnionId) -> Arc<StructData>;
+
+ #[salsa::invoke(EnumData::enum_data_query)]
+ fn enum_data(&self, e: EnumId) -> Arc<EnumData>;
+
+ #[salsa::invoke(ImplData::impl_data_query)]
+ fn impl_data(&self, e: ImplId) -> Arc<ImplData>;
+
+ #[salsa::invoke(TraitData::trait_data_query)]
+ fn trait_data(&self, e: TraitId) -> Arc<TraitData>;
+
+ #[salsa::invoke(TypeAliasData::type_alias_data_query)]
+ fn type_alias_data(&self, e: TypeAliasId) -> Arc<TypeAliasData>;
+
+ #[salsa::invoke(FunctionData::fn_data_query)]
+ fn function_data(&self, func: FunctionId) -> Arc<FunctionData>;
+
+ #[salsa::invoke(ConstData::const_data_query)]
+ fn const_data(&self, konst: ConstId) -> Arc<ConstData>;
+
+ #[salsa::invoke(StaticData::static_data_query)]
+ fn static_data(&self, konst: StaticId) -> Arc<StaticData>;
+
+ #[salsa::invoke(Macro2Data::macro2_data_query)]
+ fn macro2_data(&self, makro: Macro2Id) -> Arc<Macro2Data>;
+
+ #[salsa::invoke(MacroRulesData::macro_rules_data_query)]
+ fn macro_rules_data(&self, makro: MacroRulesId) -> Arc<MacroRulesData>;
+
+ #[salsa::invoke(ProcMacroData::proc_macro_data_query)]
+ fn proc_macro_data(&self, makro: ProcMacroId) -> Arc<ProcMacroData>;
+
+ #[salsa::invoke(Body::body_with_source_map_query)]
+ fn body_with_source_map(&self, def: DefWithBodyId) -> (Arc<Body>, Arc<BodySourceMap>);
+
+ #[salsa::invoke(Body::body_query)]
+ fn body(&self, def: DefWithBodyId) -> Arc<Body>;
+
+ #[salsa::invoke(ExprScopes::expr_scopes_query)]
+ fn expr_scopes(&self, def: DefWithBodyId) -> Arc<ExprScopes>;
+
+ #[salsa::invoke(GenericParams::generic_params_query)]
+ fn generic_params(&self, def: GenericDefId) -> Interned<GenericParams>;
+
+ #[salsa::invoke(Attrs::variants_attrs_query)]
+ fn variants_attrs(&self, def: EnumId) -> Arc<ArenaMap<LocalEnumVariantId, Attrs>>;
+
+ #[salsa::invoke(Attrs::fields_attrs_query)]
+ fn fields_attrs(&self, def: VariantId) -> Arc<ArenaMap<LocalFieldId, Attrs>>;
+
+ #[salsa::invoke(crate::attr::variants_attrs_source_map)]
+ fn variants_attrs_source_map(
+ &self,
+ def: EnumId,
+ ) -> Arc<ArenaMap<LocalEnumVariantId, AstPtr<ast::Variant>>>;
+
+ #[salsa::invoke(crate::attr::fields_attrs_source_map)]
+ fn fields_attrs_source_map(
+ &self,
+ def: VariantId,
+ ) -> Arc<ArenaMap<LocalFieldId, Either<AstPtr<ast::TupleField>, AstPtr<ast::RecordField>>>>;
+
+ #[salsa::invoke(AttrsWithOwner::attrs_query)]
+ fn attrs(&self, def: AttrDefId) -> AttrsWithOwner;
+
+ #[salsa::invoke(LangItems::crate_lang_items_query)]
+ fn crate_lang_items(&self, krate: CrateId) -> Arc<LangItems>;
+
+ #[salsa::invoke(LangItems::lang_item_query)]
+ fn lang_item(&self, start_crate: CrateId, item: SmolStr) -> Option<LangItemTarget>;
+
+ #[salsa::invoke(ImportMap::import_map_query)]
+ fn import_map(&self, krate: CrateId) -> Arc<ImportMap>;
+
+ #[salsa::invoke(visibility::field_visibilities_query)]
+ fn field_visibilities(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Visibility>>;
+
+ // FIXME: unify function_visibility and const_visibility?
+ #[salsa::invoke(visibility::function_visibility_query)]
+ fn function_visibility(&self, def: FunctionId) -> Visibility;
+
+ #[salsa::invoke(visibility::const_visibility_query)]
+ fn const_visibility(&self, def: ConstId) -> Visibility;
+
+ #[salsa::transparent]
+ fn crate_limits(&self, crate_id: CrateId) -> CrateLimits;
+
+ fn crate_supports_no_std(&self, crate_id: CrateId) -> bool;
+}
+
+fn crate_def_map_wait(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> {
+ let _p = profile::span("crate_def_map:wait");
+ db.crate_def_map_query(krate)
+}
+
+pub struct CrateLimits {
+ /// The maximum depth for potentially infinitely-recursive compile-time operations like macro expansion or auto-dereference.
+ pub recursion_limit: u32,
+}
+
+fn crate_limits(db: &dyn DefDatabase, crate_id: CrateId) -> CrateLimits {
+ let def_map = db.crate_def_map(crate_id);
+
+ CrateLimits {
+ // 128 is the default in rustc.
+ recursion_limit: def_map.recursion_limit().unwrap_or(128),
+ }
+}
+
+fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool {
+ let file = db.crate_graph()[crate_id].root_file_id;
+ let item_tree = db.file_item_tree(file.into());
+ let attrs = item_tree.raw_attrs(AttrOwner::TopLevel);
+ for attr in &**attrs {
+ match attr.path().as_ident().and_then(|id| id.as_text()) {
+ Some(ident) if ident == "no_std" => return true,
+ Some(ident) if ident == "cfg_attr" => {}
+ _ => continue,
+ }
+
+ // This is a `cfg_attr`; check if it could possibly expand to `no_std`.
+ // Syntax is: `#[cfg_attr(condition(cfg, style), attr0, attr1, <...>)]`
+ let tt = match attr.token_tree_value() {
+ Some(tt) => &tt.token_trees,
+ None => continue,
+ };
+
+ let segments = tt.split(|tt| match tt {
+ tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ',' => true,
+ _ => false,
+ });
+ for output in segments.skip(1) {
+ match output {
+ [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.text == "no_std" => {
+ return true
+ }
+ _ => {}
+ }
+ }
+ }
+
+ false
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs
new file mode 100644
index 000000000..166aa04da
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs
@@ -0,0 +1,116 @@
+//! This module defines a `DynMap` -- a container for heterogeneous maps.
+//!
+//! This means that `DynMap` stores a bunch of hash maps inside, and those maps
+//! can be of different types.
+//!
+//! It is used like this:
+//!
+//! ```
+//! // keys define submaps of a `DynMap`
+//! const STRING_TO_U32: Key<String, u32> = Key::new();
+//! const U32_TO_VEC: Key<u32, Vec<bool>> = Key::new();
+//!
+//! // Note: concrete type, no type params!
+//! let mut map = DynMap::new();
+//!
+//! // To access a specific map, index the `DynMap` by `Key`:
+//! map[STRING_TO_U32].insert("hello".to_string(), 92);
+//! let value = map[U32_TO_VEC].get(92);
+//! assert!(value.is_none());
+//! ```
+//!
+//! This is a work of fiction. Any similarities to Kotlin's `BindingContext` are
+//! a coincidence.
+use std::{
+ hash::Hash,
+ marker::PhantomData,
+ ops::{Index, IndexMut},
+};
+
+use anymap::Map;
+use rustc_hash::FxHashMap;
+
+pub struct Key<K, V, P = (K, V)> {
+ _phantom: PhantomData<(K, V, P)>,
+}
+
+impl<K, V, P> Key<K, V, P> {
+ pub(crate) const fn new() -> Key<K, V, P> {
+ Key { _phantom: PhantomData }
+ }
+}
+
+impl<K, V, P> Copy for Key<K, V, P> {}
+
+impl<K, V, P> Clone for Key<K, V, P> {
+ fn clone(&self) -> Key<K, V, P> {
+ *self
+ }
+}
+
+pub trait Policy {
+ type K;
+ type V;
+
+ fn insert(map: &mut DynMap, key: Self::K, value: Self::V);
+ fn get<'a>(map: &'a DynMap, key: &Self::K) -> Option<&'a Self::V>;
+ fn is_empty(map: &DynMap) -> bool;
+}
+
+impl<K: Hash + Eq + 'static, V: 'static> Policy for (K, V) {
+ type K = K;
+ type V = V;
+ fn insert(map: &mut DynMap, key: K, value: V) {
+ map.map.entry::<FxHashMap<K, V>>().or_insert_with(Default::default).insert(key, value);
+ }
+ fn get<'a>(map: &'a DynMap, key: &K) -> Option<&'a V> {
+ map.map.get::<FxHashMap<K, V>>()?.get(key)
+ }
+ fn is_empty(map: &DynMap) -> bool {
+ map.map.get::<FxHashMap<K, V>>().map_or(true, |it| it.is_empty())
+ }
+}
+
+pub struct DynMap {
+ pub(crate) map: Map,
+}
+
+impl Default for DynMap {
+ fn default() -> Self {
+ DynMap { map: Map::new() }
+ }
+}
+
+#[repr(transparent)]
+pub struct KeyMap<KEY> {
+ map: DynMap,
+ _phantom: PhantomData<KEY>,
+}
+
+impl<P: Policy> KeyMap<Key<P::K, P::V, P>> {
+ pub fn insert(&mut self, key: P::K, value: P::V) {
+ P::insert(&mut self.map, key, value)
+ }
+ pub fn get(&self, key: &P::K) -> Option<&P::V> {
+ P::get(&self.map, key)
+ }
+
+ pub fn is_empty(&self) -> bool {
+ P::is_empty(&self.map)
+ }
+}
+
+impl<P: Policy> Index<Key<P::K, P::V, P>> for DynMap {
+ type Output = KeyMap<Key<P::K, P::V, P>>;
+ fn index(&self, _key: Key<P::K, P::V, P>) -> &Self::Output {
+ // Safe due to `#[repr(transparent)]`.
+ unsafe { std::mem::transmute::<&DynMap, &KeyMap<Key<P::K, P::V, P>>>(self) }
+ }
+}
+
+impl<P: Policy> IndexMut<Key<P::K, P::V, P>> for DynMap {
+ fn index_mut(&mut self, _key: Key<P::K, P::V, P>) -> &mut Self::Output {
+ // Safe due to `#[repr(transparent)]`.
+ unsafe { std::mem::transmute::<&mut DynMap, &mut KeyMap<Key<P::K, P::V, P>>>(self) }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr.rs
new file mode 100644
index 000000000..c1b3788ac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr.rs
@@ -0,0 +1,444 @@
+//! This module describes hir-level representation of expressions.
+//!
+//! This representation is:
+//!
+//! 1. Identity-based. Each expression has an `id`, so we can distinguish
+//! between different `1` in `1 + 1`.
+//! 2. Independent of syntax. Though syntactic provenance information can be
+//! attached separately via id-based side map.
+//! 3. Unresolved. Paths are stored as sequences of names, and not as defs the
+//! names refer to.
+//! 4. Desugared. There's no `if let`.
+//!
+//! See also a neighboring `body` module.
+
+use hir_expand::name::Name;
+use la_arena::{Idx, RawIdx};
+
+use crate::{
+ builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint},
+ intern::Interned,
+ path::{GenericArgs, Path},
+ type_ref::{Mutability, Rawness, TypeRef},
+ BlockId,
+};
+
+pub use syntax::ast::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp};
+
+pub type ExprId = Idx<Expr>;
+
+/// FIXME: this is a hacky function which should be removed
+pub(crate) fn dummy_expr_id() -> ExprId {
+ ExprId::from_raw(RawIdx::from(u32::MAX))
+}
+
+pub type PatId = Idx<Pat>;
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Label {
+ pub name: Name,
+}
+pub type LabelId = Idx<Label>;
+
+// We convert float values into bits and that's how we don't need to deal with f32 and f64.
+// For PartialEq, bits comparison should work, as ordering is not important
+// https://github.com/rust-lang/rust-analyzer/issues/12380#issuecomment-1137284360
+#[derive(Default, Debug, Clone, Eq, PartialEq)]
+pub struct FloatTypeWrapper(u64);
+
+impl FloatTypeWrapper {
+ pub fn new(value: f64) -> Self {
+ Self(value.to_bits())
+ }
+}
+
+impl std::fmt::Display for FloatTypeWrapper {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{:?}", f64::from_bits(self.0))
+ }
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum Literal {
+ String(Box<str>),
+ ByteString(Box<[u8]>),
+ Char(char),
+ Bool(bool),
+ Int(i128, Option<BuiltinInt>),
+ Uint(u128, Option<BuiltinUint>),
+ // Here we are using a wrapper around float because f32 and f64 do not implement Eq, so they
+ // could not be used directly here, to understand how the wrapper works go to definition of
+ // FloatTypeWrapper
+ Float(FloatTypeWrapper, Option<BuiltinFloat>),
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum Expr {
+ /// This is produced if the syntax tree does not have a required expression piece.
+ Missing,
+ Path(Path),
+ If {
+ condition: ExprId,
+ then_branch: ExprId,
+ else_branch: Option<ExprId>,
+ },
+ Let {
+ pat: PatId,
+ expr: ExprId,
+ },
+ Block {
+ id: BlockId,
+ statements: Box<[Statement]>,
+ tail: Option<ExprId>,
+ label: Option<LabelId>,
+ },
+ Loop {
+ body: ExprId,
+ label: Option<LabelId>,
+ },
+ While {
+ condition: ExprId,
+ body: ExprId,
+ label: Option<LabelId>,
+ },
+ For {
+ iterable: ExprId,
+ pat: PatId,
+ body: ExprId,
+ label: Option<LabelId>,
+ },
+ Call {
+ callee: ExprId,
+ args: Box<[ExprId]>,
+ is_assignee_expr: bool,
+ },
+ MethodCall {
+ receiver: ExprId,
+ method_name: Name,
+ args: Box<[ExprId]>,
+ generic_args: Option<Box<GenericArgs>>,
+ },
+ Match {
+ expr: ExprId,
+ arms: Box<[MatchArm]>,
+ },
+ Continue {
+ label: Option<Name>,
+ },
+ Break {
+ expr: Option<ExprId>,
+ label: Option<Name>,
+ },
+ Return {
+ expr: Option<ExprId>,
+ },
+ Yield {
+ expr: Option<ExprId>,
+ },
+ RecordLit {
+ path: Option<Box<Path>>,
+ fields: Box<[RecordLitField]>,
+ spread: Option<ExprId>,
+ ellipsis: bool,
+ is_assignee_expr: bool,
+ },
+ Field {
+ expr: ExprId,
+ name: Name,
+ },
+ Await {
+ expr: ExprId,
+ },
+ Try {
+ expr: ExprId,
+ },
+ TryBlock {
+ body: ExprId,
+ },
+ Async {
+ body: ExprId,
+ },
+ Const {
+ body: ExprId,
+ },
+ Cast {
+ expr: ExprId,
+ type_ref: Interned<TypeRef>,
+ },
+ Ref {
+ expr: ExprId,
+ rawness: Rawness,
+ mutability: Mutability,
+ },
+ Box {
+ expr: ExprId,
+ },
+ UnaryOp {
+ expr: ExprId,
+ op: UnaryOp,
+ },
+ BinaryOp {
+ lhs: ExprId,
+ rhs: ExprId,
+ op: Option<BinaryOp>,
+ },
+ Range {
+ lhs: Option<ExprId>,
+ rhs: Option<ExprId>,
+ range_type: RangeOp,
+ },
+ Index {
+ base: ExprId,
+ index: ExprId,
+ },
+ Closure {
+ args: Box<[PatId]>,
+ arg_types: Box<[Option<Interned<TypeRef>>]>,
+ ret_type: Option<Interned<TypeRef>>,
+ body: ExprId,
+ },
+ Tuple {
+ exprs: Box<[ExprId]>,
+ is_assignee_expr: bool,
+ },
+ Unsafe {
+ body: ExprId,
+ },
+ MacroStmts {
+ statements: Box<[Statement]>,
+ tail: Option<ExprId>,
+ },
+ Array(Array),
+ Literal(Literal),
+ Underscore,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum Array {
+ ElementList { elements: Box<[ExprId]>, is_assignee_expr: bool },
+ Repeat { initializer: ExprId, repeat: ExprId },
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct MatchArm {
+ pub pat: PatId,
+ pub guard: Option<ExprId>,
+ pub expr: ExprId,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct RecordLitField {
+ pub name: Name,
+ pub expr: ExprId,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum Statement {
+ Let {
+ pat: PatId,
+ type_ref: Option<Interned<TypeRef>>,
+ initializer: Option<ExprId>,
+ else_branch: Option<ExprId>,
+ },
+ Expr {
+ expr: ExprId,
+ has_semi: bool,
+ },
+}
+
+impl Expr {
+ pub fn walk_child_exprs(&self, mut f: impl FnMut(ExprId)) {
+ match self {
+ Expr::Missing => {}
+ Expr::Path(_) => {}
+ Expr::If { condition, then_branch, else_branch } => {
+ f(*condition);
+ f(*then_branch);
+ if let &Some(else_branch) = else_branch {
+ f(else_branch);
+ }
+ }
+ Expr::Let { expr, .. } => {
+ f(*expr);
+ }
+ Expr::MacroStmts { tail, statements } | Expr::Block { statements, tail, .. } => {
+ for stmt in statements.iter() {
+ match stmt {
+ Statement::Let { initializer, .. } => {
+ if let &Some(expr) = initializer {
+ f(expr);
+ }
+ }
+ Statement::Expr { expr: expression, .. } => f(*expression),
+ }
+ }
+ if let &Some(expr) = tail {
+ f(expr);
+ }
+ }
+ Expr::TryBlock { body }
+ | Expr::Unsafe { body }
+ | Expr::Async { body }
+ | Expr::Const { body } => f(*body),
+ Expr::Loop { body, .. } => f(*body),
+ Expr::While { condition, body, .. } => {
+ f(*condition);
+ f(*body);
+ }
+ Expr::For { iterable, body, .. } => {
+ f(*iterable);
+ f(*body);
+ }
+ Expr::Call { callee, args, .. } => {
+ f(*callee);
+ args.iter().copied().for_each(f);
+ }
+ Expr::MethodCall { receiver, args, .. } => {
+ f(*receiver);
+ args.iter().copied().for_each(f);
+ }
+ Expr::Match { expr, arms } => {
+ f(*expr);
+ arms.iter().map(|arm| arm.expr).for_each(f);
+ }
+ Expr::Continue { .. } => {}
+ Expr::Break { expr, .. } | Expr::Return { expr } | Expr::Yield { expr } => {
+ if let &Some(expr) = expr {
+ f(expr);
+ }
+ }
+ Expr::RecordLit { fields, spread, .. } => {
+ for field in fields.iter() {
+ f(field.expr);
+ }
+ if let &Some(expr) = spread {
+ f(expr);
+ }
+ }
+ Expr::Closure { body, .. } => {
+ f(*body);
+ }
+ Expr::BinaryOp { lhs, rhs, .. } => {
+ f(*lhs);
+ f(*rhs);
+ }
+ Expr::Range { lhs, rhs, .. } => {
+ if let &Some(lhs) = rhs {
+ f(lhs);
+ }
+ if let &Some(rhs) = lhs {
+ f(rhs);
+ }
+ }
+ Expr::Index { base, index } => {
+ f(*base);
+ f(*index);
+ }
+ Expr::Field { expr, .. }
+ | Expr::Await { expr }
+ | Expr::Try { expr }
+ | Expr::Cast { expr, .. }
+ | Expr::Ref { expr, .. }
+ | Expr::UnaryOp { expr, .. }
+ | Expr::Box { expr } => {
+ f(*expr);
+ }
+ Expr::Tuple { exprs, .. } => exprs.iter().copied().for_each(f),
+ Expr::Array(a) => match a {
+ Array::ElementList { elements, .. } => elements.iter().copied().for_each(f),
+ Array::Repeat { initializer, repeat } => {
+ f(*initializer);
+ f(*repeat)
+ }
+ },
+ Expr::Literal(_) => {}
+ Expr::Underscore => {}
+ }
+ }
+}
+
+/// Explicit binding annotations given in the HIR for a binding. Note
+/// that this is not the final binding *mode* that we infer after type
+/// inference.
+#[derive(Clone, PartialEq, Eq, Debug, Copy)]
+pub enum BindingAnnotation {
+ /// No binding annotation given: this means that the final binding mode
+ /// will depend on whether we have skipped through a `&` reference
+ /// when matching. For example, the `x` in `Some(x)` will have binding
+ /// mode `None`; if you do `let Some(x) = &Some(22)`, it will
+ /// ultimately be inferred to be by-reference.
+ Unannotated,
+
+ /// Annotated with `mut x` -- could be either ref or not, similar to `None`.
+ Mutable,
+
+ /// Annotated as `ref`, like `ref x`
+ Ref,
+
+ /// Annotated as `ref mut x`.
+ RefMut,
+}
+
+impl BindingAnnotation {
+ pub fn new(is_mutable: bool, is_ref: bool) -> Self {
+ match (is_mutable, is_ref) {
+ (true, true) => BindingAnnotation::RefMut,
+ (false, true) => BindingAnnotation::Ref,
+ (true, false) => BindingAnnotation::Mutable,
+ (false, false) => BindingAnnotation::Unannotated,
+ }
+ }
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct RecordFieldPat {
+ pub name: Name,
+ pub pat: PatId,
+}
+
+/// Close relative to rustc's hir::PatKind
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum Pat {
+ Missing,
+ Wild,
+ Tuple { args: Box<[PatId]>, ellipsis: Option<usize> },
+ Or(Box<[PatId]>),
+ Record { path: Option<Box<Path>>, args: Box<[RecordFieldPat]>, ellipsis: bool },
+ Range { start: ExprId, end: ExprId },
+ Slice { prefix: Box<[PatId]>, slice: Option<PatId>, suffix: Box<[PatId]> },
+ Path(Box<Path>),
+ Lit(ExprId),
+ Bind { mode: BindingAnnotation, name: Name, subpat: Option<PatId> },
+ TupleStruct { path: Option<Box<Path>>, args: Box<[PatId]>, ellipsis: Option<usize> },
+ Ref { pat: PatId, mutability: Mutability },
+ Box { inner: PatId },
+ ConstBlock(ExprId),
+}
+
+impl Pat {
+ pub fn walk_child_pats(&self, mut f: impl FnMut(PatId)) {
+ match self {
+ Pat::Range { .. }
+ | Pat::Lit(..)
+ | Pat::Path(..)
+ | Pat::ConstBlock(..)
+ | Pat::Wild
+ | Pat::Missing => {}
+ Pat::Bind { subpat, .. } => {
+ subpat.iter().copied().for_each(f);
+ }
+ Pat::Or(args) | Pat::Tuple { args, .. } | Pat::TupleStruct { args, .. } => {
+ args.iter().copied().for_each(f);
+ }
+ Pat::Ref { pat, .. } => f(*pat),
+ Pat::Slice { prefix, slice, suffix } => {
+ let total_iter = prefix.iter().chain(slice.iter()).chain(suffix.iter());
+ total_iter.copied().for_each(f);
+ }
+ Pat::Record { args, .. } => {
+ args.iter().map(|f| f.pat).for_each(f);
+ }
+ Pat::Box { inner } => f(*inner),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
new file mode 100644
index 000000000..89e961f84
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
@@ -0,0 +1,1134 @@
+//! An algorithm to find a path to refer to a certain item.
+
+use std::iter;
+
+use hir_expand::name::{known, AsName, Name};
+use rustc_hash::FxHashSet;
+
+use crate::{
+ db::DefDatabase,
+ item_scope::ItemInNs,
+ nameres::DefMap,
+ path::{ModPath, PathKind},
+ visibility::Visibility,
+ ModuleDefId, ModuleId,
+};
+
+/// Find a path that can be used to refer to a certain item. This can depend on
+/// *from where* you're referring to the item, hence the `from` parameter.
+pub fn find_path(db: &dyn DefDatabase, item: ItemInNs, from: ModuleId) -> Option<ModPath> {
+ let _p = profile::span("find_path");
+ find_path_inner(db, item, from, None)
+}
+
+pub fn find_path_prefixed(
+ db: &dyn DefDatabase,
+ item: ItemInNs,
+ from: ModuleId,
+ prefix_kind: PrefixKind,
+) -> Option<ModPath> {
+ let _p = profile::span("find_path_prefixed");
+ find_path_inner(db, item, from, Some(prefix_kind))
+}
+
+const MAX_PATH_LEN: usize = 15;
+
+trait ModPathExt {
+ fn starts_with_std(&self) -> bool;
+ fn can_start_with_std(&self) -> bool;
+}
+
+impl ModPathExt for ModPath {
+ fn starts_with_std(&self) -> bool {
+ self.segments().first() == Some(&known::std)
+ }
+
+ // Can we replace the first segment with `std::` and still get a valid, identical path?
+ fn can_start_with_std(&self) -> bool {
+ let first_segment = self.segments().first();
+ first_segment == Some(&known::alloc) || first_segment == Some(&known::core)
+ }
+}
+
+fn check_self_super(def_map: &DefMap, item: ItemInNs, from: ModuleId) -> Option<ModPath> {
+ if item == ItemInNs::Types(from.into()) {
+ // - if the item is the module we're in, use `self`
+ Some(ModPath::from_segments(PathKind::Super(0), None))
+ } else if let Some(parent_id) = def_map[from.local_id].parent {
+ // - if the item is the parent module, use `super` (this is not used recursively, since `super::super` is ugly)
+ let parent_id = def_map.module_id(parent_id);
+ if item == ItemInNs::Types(ModuleDefId::ModuleId(parent_id)) {
+ Some(ModPath::from_segments(PathKind::Super(1), None))
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum PrefixKind {
+ /// Causes paths to always start with either `self`, `super`, `crate` or a crate-name.
+ /// This is the same as plain, just that paths will start with `self` iprepended f the path
+ /// starts with an identifier that is not a crate.
+ BySelf,
+ /// Causes paths to ignore imports in the local module.
+ Plain,
+ /// Causes paths to start with `crate` where applicable, effectively forcing paths to be absolute.
+ ByCrate,
+}
+
+impl PrefixKind {
+ #[inline]
+ fn prefix(self) -> PathKind {
+ match self {
+ PrefixKind::BySelf => PathKind::Super(0),
+ PrefixKind::Plain => PathKind::Plain,
+ PrefixKind::ByCrate => PathKind::Crate,
+ }
+ }
+
+ #[inline]
+ fn is_absolute(&self) -> bool {
+ self == &PrefixKind::ByCrate
+ }
+}
+/// Attempts to find a path to refer to the given `item` visible from the `from` ModuleId
+fn find_path_inner(
+ db: &dyn DefDatabase,
+ item: ItemInNs,
+ from: ModuleId,
+ prefixed: Option<PrefixKind>,
+) -> Option<ModPath> {
+ // FIXME: Do fast path for std/core libs?
+
+ let mut visited_modules = FxHashSet::default();
+ let def_map = from.def_map(db);
+ find_path_inner_(db, &def_map, from, item, MAX_PATH_LEN, prefixed, &mut visited_modules)
+}
+
+fn find_path_inner_(
+ db: &dyn DefDatabase,
+ def_map: &DefMap,
+ from: ModuleId,
+ item: ItemInNs,
+ max_len: usize,
+ mut prefixed: Option<PrefixKind>,
+ visited_modules: &mut FxHashSet<ModuleId>,
+) -> Option<ModPath> {
+ if max_len == 0 {
+ return None;
+ }
+
+ // Base cases:
+
+ // - if the item is already in scope, return the name under which it is
+ let scope_name = def_map.with_ancestor_maps(db, from.local_id, &mut |def_map, local_id| {
+ def_map[local_id].scope.name_of(item).map(|(name, _)| name.clone())
+ });
+ if prefixed.is_none() {
+ if let Some(scope_name) = scope_name {
+ return Some(ModPath::from_segments(PathKind::Plain, Some(scope_name)));
+ }
+ }
+
+ // - if the item is a builtin, it's in scope
+ if let ItemInNs::Types(ModuleDefId::BuiltinType(builtin)) = item {
+ return Some(ModPath::from_segments(PathKind::Plain, Some(builtin.as_name())));
+ }
+
+ // - if the item is the crate root, return `crate`
+ let crate_root = def_map.crate_root(db);
+ if item == ItemInNs::Types(ModuleDefId::ModuleId(crate_root)) {
+ return Some(ModPath::from_segments(PathKind::Crate, None));
+ }
+
+ if prefixed.filter(PrefixKind::is_absolute).is_none() {
+ if let modpath @ Some(_) = check_self_super(&def_map, item, from) {
+ return modpath;
+ }
+ }
+
+ // - if the item is the crate root of a dependency crate, return the name from the extern prelude
+ let root_def_map = crate_root.def_map(db);
+ if let ItemInNs::Types(ModuleDefId::ModuleId(item)) = item {
+ for (name, &def_id) in root_def_map.extern_prelude() {
+ if item == def_id {
+ let name = scope_name.unwrap_or_else(|| name.clone());
+
+ let name_already_occupied_in_type_ns = def_map
+ .with_ancestor_maps(db, from.local_id, &mut |def_map, local_id| {
+ def_map[local_id]
+ .scope
+ .type_(&name)
+ .filter(|&(id, _)| id != ModuleDefId::ModuleId(def_id))
+ })
+ .is_some();
+ let kind = if name_already_occupied_in_type_ns {
+ cov_mark::hit!(ambiguous_crate_start);
+ PathKind::Abs
+ } else {
+ PathKind::Plain
+ };
+ return Some(ModPath::from_segments(kind, Some(name)));
+ }
+ }
+ }
+
+ // - if the item is in the prelude, return the name from there
+ if let Some(prelude_module) = root_def_map.prelude() {
+ // Preludes in block DefMaps are ignored, only the crate DefMap is searched
+ let prelude_def_map = prelude_module.def_map(db);
+ let prelude_scope = &prelude_def_map[prelude_module.local_id].scope;
+ if let Some((name, vis)) = prelude_scope.name_of(item) {
+ if vis.is_visible_from(db, from) {
+ return Some(ModPath::from_segments(PathKind::Plain, Some(name.clone())));
+ }
+ }
+ }
+
+ // Recursive case:
+ // - if the item is an enum variant, refer to it via the enum
+ if let Some(ModuleDefId::EnumVariantId(variant)) = item.as_module_def_id() {
+ if let Some(mut path) = find_path(db, ItemInNs::Types(variant.parent.into()), from) {
+ let data = db.enum_data(variant.parent);
+ path.push_segment(data.variants[variant.local_id].name.clone());
+ return Some(path);
+ }
+ // If this doesn't work, it seems we have no way of referring to the
+ // enum; that's very weird, but there might still be a reexport of the
+ // variant somewhere
+ }
+
+ // - otherwise, look for modules containing (reexporting) it and import it from one of those
+ let prefer_no_std = db.crate_supports_no_std(crate_root.krate);
+ let mut best_path = None;
+ let mut best_path_len = max_len;
+
+ if item.krate(db) == Some(from.krate) {
+ // Item was defined in the same crate that wants to import it. It cannot be found in any
+ // dependency in this case.
+ // FIXME: this should have a fast path that doesn't look through the prelude again?
+ for (module_id, name) in find_local_import_locations(db, item, from) {
+ if !visited_modules.insert(module_id) {
+ cov_mark::hit!(recursive_imports);
+ continue;
+ }
+ if let Some(mut path) = find_path_inner_(
+ db,
+ def_map,
+ from,
+ ItemInNs::Types(ModuleDefId::ModuleId(module_id)),
+ best_path_len - 1,
+ prefixed,
+ visited_modules,
+ ) {
+ path.push_segment(name);
+
+ let new_path = match best_path {
+ Some(best_path) => select_best_path(best_path, path, prefer_no_std),
+ None => path,
+ };
+ best_path_len = new_path.len();
+ best_path = Some(new_path);
+ }
+ }
+ } else {
+ // Item was defined in some upstream crate. This means that it must be exported from one,
+ // too (unless we can't name it at all). It could *also* be (re)exported by the same crate
+ // that wants to import it here, but we always prefer to use the external path here.
+
+ let crate_graph = db.crate_graph();
+ let extern_paths = crate_graph[from.krate].dependencies.iter().filter_map(|dep| {
+ let import_map = db.import_map(dep.crate_id);
+ import_map.import_info_for(item).and_then(|info| {
+ // Determine best path for containing module and append last segment from `info`.
+ // FIXME: we should guide this to look up the path locally, or from the same crate again?
+ let mut path = find_path_inner_(
+ db,
+ def_map,
+ from,
+ ItemInNs::Types(ModuleDefId::ModuleId(info.container)),
+ best_path_len - 1,
+ prefixed,
+ visited_modules,
+ )?;
+ cov_mark::hit!(partially_imported);
+ path.push_segment(info.path.segments.last()?.clone());
+ Some(path)
+ })
+ });
+
+ for path in extern_paths {
+ let new_path = match best_path {
+ Some(best_path) => select_best_path(best_path, path, prefer_no_std),
+ None => path,
+ };
+ best_path = Some(new_path);
+ }
+ }
+
+ // If the item is declared inside a block expression, don't use a prefix, as we don't handle
+ // that correctly (FIXME).
+ if let Some(item_module) = item.as_module_def_id().and_then(|did| did.module(db)) {
+ if item_module.def_map(db).block_id().is_some() && prefixed.is_some() {
+ cov_mark::hit!(prefixed_in_block_expression);
+ prefixed = Some(PrefixKind::Plain);
+ }
+ }
+
+ match prefixed.map(PrefixKind::prefix) {
+ Some(prefix) => best_path.or_else(|| {
+ scope_name.map(|scope_name| ModPath::from_segments(prefix, Some(scope_name)))
+ }),
+ None => best_path,
+ }
+}
+
+fn select_best_path(old_path: ModPath, new_path: ModPath, prefer_no_std: bool) -> ModPath {
+ if old_path.starts_with_std() && new_path.can_start_with_std() {
+ if prefer_no_std {
+ cov_mark::hit!(prefer_no_std_paths);
+ new_path
+ } else {
+ cov_mark::hit!(prefer_std_paths);
+ old_path
+ }
+ } else if new_path.starts_with_std() && old_path.can_start_with_std() {
+ if prefer_no_std {
+ cov_mark::hit!(prefer_no_std_paths);
+ old_path
+ } else {
+ cov_mark::hit!(prefer_std_paths);
+ new_path
+ }
+ } else if new_path.len() < old_path.len() {
+ new_path
+ } else {
+ old_path
+ }
+}
+
+/// Finds locations in `from.krate` from which `item` can be imported by `from`.
+fn find_local_import_locations(
+ db: &dyn DefDatabase,
+ item: ItemInNs,
+ from: ModuleId,
+) -> Vec<(ModuleId, Name)> {
+ let _p = profile::span("find_local_import_locations");
+
+ // `from` can import anything below `from` with visibility of at least `from`, and anything
+ // above `from` with any visibility. That means we do not need to descend into private siblings
+ // of `from` (and similar).
+
+ let def_map = from.def_map(db);
+
+ // Compute the initial worklist. We start with all direct child modules of `from` as well as all
+ // of its (recursive) parent modules.
+ let data = &def_map[from.local_id];
+ let mut worklist =
+ data.children.values().map(|child| def_map.module_id(*child)).collect::<Vec<_>>();
+ // FIXME: do we need to traverse out of block expressions here?
+ for ancestor in iter::successors(from.containing_module(db), |m| m.containing_module(db)) {
+ worklist.push(ancestor);
+ }
+
+ let def_map = def_map.crate_root(db).def_map(db);
+
+ let mut seen: FxHashSet<_> = FxHashSet::default();
+
+ let mut locations = Vec::new();
+ while let Some(module) = worklist.pop() {
+ if !seen.insert(module) {
+ continue; // already processed this module
+ }
+
+ let ext_def_map;
+ let data = if module.krate == from.krate {
+ if module.block.is_some() {
+ // Re-query the block's DefMap
+ ext_def_map = module.def_map(db);
+ &ext_def_map[module.local_id]
+ } else {
+ // Reuse the root DefMap
+ &def_map[module.local_id]
+ }
+ } else {
+ // The crate might reexport a module defined in another crate.
+ ext_def_map = module.def_map(db);
+ &ext_def_map[module.local_id]
+ };
+
+ if let Some((name, vis)) = data.scope.name_of(item) {
+ if vis.is_visible_from(db, from) {
+ let is_private = match vis {
+ Visibility::Module(private_to) => private_to.local_id == module.local_id,
+ Visibility::Public => false,
+ };
+ let is_original_def = match item.as_module_def_id() {
+ Some(module_def_id) => data.scope.declarations().any(|it| it == module_def_id),
+ None => false,
+ };
+
+ // Ignore private imports. these could be used if we are
+ // in a submodule of this module, but that's usually not
+ // what the user wants; and if this module can import
+ // the item and we're a submodule of it, so can we.
+ // Also this keeps the cached data smaller.
+ if !is_private || is_original_def {
+ locations.push((module, name.clone()));
+ }
+ }
+ }
+
+ // Descend into all modules visible from `from`.
+ for (ty, vis) in data.scope.types() {
+ if let ModuleDefId::ModuleId(module) = ty {
+ if vis.is_visible_from(db, from) {
+ worklist.push(module);
+ }
+ }
+ }
+ }
+
+ locations
+}
+
+#[cfg(test)]
+mod tests {
+ use base_db::fixture::WithFixture;
+ use hir_expand::hygiene::Hygiene;
+ use syntax::ast::AstNode;
+
+ use crate::test_db::TestDB;
+
+ use super::*;
+
+ /// `code` needs to contain a cursor marker; checks that `find_path` for the
+ /// item the `path` refers to returns that same path when called from the
+ /// module the cursor is in.
+ fn check_found_path_(ra_fixture: &str, path: &str, prefix_kind: Option<PrefixKind>) {
+ let (db, pos) = TestDB::with_position(ra_fixture);
+ let module = db.module_at_position(pos);
+ let parsed_path_file = syntax::SourceFile::parse(&format!("use {};", path));
+ let ast_path =
+ parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap();
+ let mod_path = ModPath::from_src(&db, ast_path, &Hygiene::new_unhygienic()).unwrap();
+
+ let def_map = module.def_map(&db);
+ let resolved = def_map
+ .resolve_path(
+ &db,
+ module.local_id,
+ &mod_path,
+ crate::item_scope::BuiltinShadowMode::Module,
+ )
+ .0
+ .take_types()
+ .unwrap();
+
+ let found_path = find_path_inner(&db, ItemInNs::Types(resolved), module, prefix_kind);
+ assert_eq!(found_path, Some(mod_path), "{:?}", prefix_kind);
+ }
+
+ fn check_found_path(
+ ra_fixture: &str,
+ unprefixed: &str,
+ prefixed: &str,
+ absolute: &str,
+ self_prefixed: &str,
+ ) {
+ check_found_path_(ra_fixture, unprefixed, None);
+ check_found_path_(ra_fixture, prefixed, Some(PrefixKind::Plain));
+ check_found_path_(ra_fixture, absolute, Some(PrefixKind::ByCrate));
+ check_found_path_(ra_fixture, self_prefixed, Some(PrefixKind::BySelf));
+ }
+
+ #[test]
+ fn same_module() {
+ check_found_path(
+ r#"
+struct S;
+$0
+ "#,
+ "S",
+ "S",
+ "crate::S",
+ "self::S",
+ );
+ }
+
+ #[test]
+ fn enum_variant() {
+ check_found_path(
+ r#"
+enum E { A }
+$0
+ "#,
+ "E::A",
+ "E::A",
+ "E::A",
+ "E::A",
+ );
+ }
+
+ #[test]
+ fn sub_module() {
+ check_found_path(
+ r#"
+mod foo {
+ pub struct S;
+}
+$0
+ "#,
+ "foo::S",
+ "foo::S",
+ "crate::foo::S",
+ "self::foo::S",
+ );
+ }
+
+ #[test]
+ fn super_module() {
+ check_found_path(
+ r#"
+//- /main.rs
+mod foo;
+//- /foo.rs
+mod bar;
+struct S;
+//- /foo/bar.rs
+$0
+ "#,
+ "super::S",
+ "super::S",
+ "crate::foo::S",
+ "super::S",
+ );
+ }
+
+ #[test]
+ fn self_module() {
+ check_found_path(
+ r#"
+//- /main.rs
+mod foo;
+//- /foo.rs
+$0
+ "#,
+ "self",
+ "self",
+ "crate::foo",
+ "self",
+ );
+ }
+
+ #[test]
+ fn crate_root() {
+ check_found_path(
+ r#"
+//- /main.rs
+mod foo;
+//- /foo.rs
+$0
+ "#,
+ "crate",
+ "crate",
+ "crate",
+ "crate",
+ );
+ }
+
+ #[test]
+ fn same_crate() {
+ check_found_path(
+ r#"
+//- /main.rs
+mod foo;
+struct S;
+//- /foo.rs
+$0
+ "#,
+ "crate::S",
+ "crate::S",
+ "crate::S",
+ "crate::S",
+ );
+ }
+
+ #[test]
+ fn different_crate() {
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:std
+$0
+//- /std.rs crate:std
+pub struct S;
+ "#,
+ "std::S",
+ "std::S",
+ "std::S",
+ "std::S",
+ );
+ }
+
+ #[test]
+ fn different_crate_renamed() {
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:std
+extern crate std as std_renamed;
+$0
+//- /std.rs crate:std
+pub struct S;
+ "#,
+ "std_renamed::S",
+ "std_renamed::S",
+ "std_renamed::S",
+ "std_renamed::S",
+ );
+ }
+
+ #[test]
+ fn partially_imported() {
+ cov_mark::check!(partially_imported);
+ // Tests that short paths are used even for external items, when parts of the path are
+ // already in scope.
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:syntax
+
+use syntax::ast;
+$0
+
+//- /lib.rs crate:syntax
+pub mod ast {
+ pub enum ModuleItem {
+ A, B, C,
+ }
+}
+ "#,
+ "ast::ModuleItem",
+ "syntax::ast::ModuleItem",
+ "syntax::ast::ModuleItem",
+ "syntax::ast::ModuleItem",
+ );
+
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:syntax
+$0
+
+//- /lib.rs crate:syntax
+pub mod ast {
+ pub enum ModuleItem {
+ A, B, C,
+ }
+}
+ "#,
+ "syntax::ast::ModuleItem",
+ "syntax::ast::ModuleItem",
+ "syntax::ast::ModuleItem",
+ "syntax::ast::ModuleItem",
+ );
+ }
+
+ #[test]
+ fn same_crate_reexport() {
+ check_found_path(
+ r#"
+mod bar {
+ mod foo { pub(super) struct S; }
+ pub(crate) use foo::*;
+}
+$0
+ "#,
+ "bar::S",
+ "bar::S",
+ "crate::bar::S",
+ "self::bar::S",
+ );
+ }
+
+ #[test]
+ fn same_crate_reexport_rename() {
+ check_found_path(
+ r#"
+mod bar {
+ mod foo { pub(super) struct S; }
+ pub(crate) use foo::S as U;
+}
+$0
+ "#,
+ "bar::U",
+ "bar::U",
+ "crate::bar::U",
+ "self::bar::U",
+ );
+ }
+
+ #[test]
+ fn different_crate_reexport() {
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:std
+$0
+//- /std.rs crate:std deps:core
+pub use core::S;
+//- /core.rs crate:core
+pub struct S;
+ "#,
+ "std::S",
+ "std::S",
+ "std::S",
+ "std::S",
+ );
+ }
+
+ #[test]
+ fn prelude() {
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:std
+$0
+//- /std.rs crate:std
+pub mod prelude {
+ pub mod rust_2018 {
+ pub struct S;
+ }
+}
+ "#,
+ "S",
+ "S",
+ "S",
+ "S",
+ );
+ }
+
+ #[test]
+ fn enum_variant_from_prelude() {
+ let code = r#"
+//- /main.rs crate:main deps:std
+$0
+//- /std.rs crate:std
+pub mod prelude {
+ pub mod rust_2018 {
+ pub enum Option<T> { Some(T), None }
+ pub use Option::*;
+ }
+}
+ "#;
+ check_found_path(code, "None", "None", "None", "None");
+ check_found_path(code, "Some", "Some", "Some", "Some");
+ }
+
+ #[test]
+ fn shortest_path() {
+ check_found_path(
+ r#"
+//- /main.rs
+pub mod foo;
+pub mod baz;
+struct S;
+$0
+//- /foo.rs
+pub mod bar { pub struct S; }
+//- /baz.rs
+pub use crate::foo::bar::S;
+ "#,
+ "baz::S",
+ "baz::S",
+ "crate::baz::S",
+ "self::baz::S",
+ );
+ }
+
+ #[test]
+ fn discount_private_imports() {
+ check_found_path(
+ r#"
+//- /main.rs
+mod foo;
+pub mod bar { pub struct S; }
+use bar::S;
+//- /foo.rs
+$0
+ "#,
+ // crate::S would be shorter, but using private imports seems wrong
+ "crate::bar::S",
+ "crate::bar::S",
+ "crate::bar::S",
+ "crate::bar::S",
+ );
+ }
+
+ #[test]
+ fn import_cycle() {
+ check_found_path(
+ r#"
+//- /main.rs
+pub mod foo;
+pub mod bar;
+pub mod baz;
+//- /bar.rs
+$0
+//- /foo.rs
+pub use super::baz;
+pub struct S;
+//- /baz.rs
+pub use super::foo;
+ "#,
+ "crate::foo::S",
+ "crate::foo::S",
+ "crate::foo::S",
+ "crate::foo::S",
+ );
+ }
+
+ #[test]
+ fn prefer_std_paths_over_alloc() {
+ cov_mark::check!(prefer_std_paths);
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:alloc,std
+$0
+
+//- /std.rs crate:std deps:alloc
+pub mod sync {
+ pub use alloc::sync::Arc;
+}
+
+//- /zzz.rs crate:alloc
+pub mod sync {
+ pub struct Arc;
+}
+ "#,
+ "std::sync::Arc",
+ "std::sync::Arc",
+ "std::sync::Arc",
+ "std::sync::Arc",
+ );
+ }
+
+ #[test]
+ fn prefer_core_paths_over_std() {
+ cov_mark::check!(prefer_no_std_paths);
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:core,std
+#![no_std]
+
+$0
+
+//- /std.rs crate:std deps:core
+
+pub mod fmt {
+ pub use core::fmt::Error;
+}
+
+//- /zzz.rs crate:core
+
+pub mod fmt {
+ pub struct Error;
+}
+ "#,
+ "core::fmt::Error",
+ "core::fmt::Error",
+ "core::fmt::Error",
+ "core::fmt::Error",
+ );
+
+ // Should also work (on a best-effort basis) if `no_std` is conditional.
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:core,std
+#![cfg_attr(not(test), no_std)]
+
+$0
+
+//- /std.rs crate:std deps:core
+
+pub mod fmt {
+ pub use core::fmt::Error;
+}
+
+//- /zzz.rs crate:core
+
+pub mod fmt {
+ pub struct Error;
+}
+ "#,
+ "core::fmt::Error",
+ "core::fmt::Error",
+ "core::fmt::Error",
+ "core::fmt::Error",
+ );
+ }
+
+ #[test]
+ fn prefer_alloc_paths_over_std() {
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:alloc,std
+#![no_std]
+
+$0
+
+//- /std.rs crate:std deps:alloc
+
+pub mod sync {
+ pub use alloc::sync::Arc;
+}
+
+//- /zzz.rs crate:alloc
+
+pub mod sync {
+ pub struct Arc;
+}
+ "#,
+ "alloc::sync::Arc",
+ "alloc::sync::Arc",
+ "alloc::sync::Arc",
+ "alloc::sync::Arc",
+ );
+ }
+
+ #[test]
+ fn prefer_shorter_paths_if_not_alloc() {
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:megaalloc,std
+$0
+
+//- /std.rs crate:std deps:megaalloc
+pub mod sync {
+ pub use megaalloc::sync::Arc;
+}
+
+//- /zzz.rs crate:megaalloc
+pub struct Arc;
+ "#,
+ "megaalloc::Arc",
+ "megaalloc::Arc",
+ "megaalloc::Arc",
+ "megaalloc::Arc",
+ );
+ }
+
+ #[test]
+ fn builtins_are_in_scope() {
+ let code = r#"
+$0
+
+pub mod primitive {
+ pub use u8;
+}
+ "#;
+ check_found_path(code, "u8", "u8", "u8", "u8");
+ check_found_path(code, "u16", "u16", "u16", "u16");
+ }
+
+ #[test]
+ fn inner_items() {
+ check_found_path(
+ r#"
+fn main() {
+ struct Inner {}
+ $0
+}
+ "#,
+ "Inner",
+ "Inner",
+ "Inner",
+ "Inner",
+ );
+ }
+
+ #[test]
+ fn inner_items_from_outer_scope() {
+ check_found_path(
+ r#"
+fn main() {
+ struct Struct {}
+ {
+ $0
+ }
+}
+ "#,
+ "Struct",
+ "Struct",
+ "Struct",
+ "Struct",
+ );
+ }
+
+ #[test]
+ fn inner_items_from_inner_module() {
+ cov_mark::check!(prefixed_in_block_expression);
+ check_found_path(
+ r#"
+fn main() {
+ mod module {
+ struct Struct {}
+ }
+ {
+ $0
+ }
+}
+ "#,
+ "module::Struct",
+ "module::Struct",
+ "module::Struct",
+ "module::Struct",
+ );
+ }
+
+ #[test]
+ fn outer_items_with_inner_items_present() {
+ check_found_path(
+ r#"
+mod module {
+ pub struct CompleteMe;
+}
+
+fn main() {
+ fn inner() {}
+ $0
+}
+ "#,
+ // FIXME: these could use fewer/better prefixes
+ "module::CompleteMe",
+ "crate::module::CompleteMe",
+ "crate::module::CompleteMe",
+ "crate::module::CompleteMe",
+ )
+ }
+
+ #[test]
+ fn from_inside_module() {
+ // This worked correctly, but the test suite logic was broken.
+ cov_mark::check!(submodule_in_testdb);
+ check_found_path(
+ r#"
+mod baz {
+ pub struct Foo {}
+}
+
+mod bar {
+ fn bar() {
+ $0
+ }
+}
+ "#,
+ "crate::baz::Foo",
+ "crate::baz::Foo",
+ "crate::baz::Foo",
+ "crate::baz::Foo",
+ )
+ }
+
+ #[test]
+ fn from_inside_module_with_inner_items() {
+ check_found_path(
+ r#"
+mod baz {
+ pub struct Foo {}
+}
+
+mod bar {
+ fn bar() {
+ fn inner() {}
+ $0
+ }
+}
+ "#,
+ "crate::baz::Foo",
+ "crate::baz::Foo",
+ "crate::baz::Foo",
+ "crate::baz::Foo",
+ )
+ }
+
+ #[test]
+ fn recursive_pub_mod_reexport() {
+ cov_mark::check!(recursive_imports);
+ check_found_path(
+ r#"
+fn main() {
+ let _ = 22_i32.as_name$0();
+}
+
+pub mod name {
+ pub trait AsName {
+ fn as_name(&self) -> String;
+ }
+ impl AsName for i32 {
+ fn as_name(&self) -> String {
+ format!("Name: {}", self)
+ }
+ }
+ pub use crate::name;
+}
+"#,
+ "name::AsName",
+ "name::AsName",
+ "crate::name::AsName",
+ "self::name::AsName",
+ );
+ }
+
+ #[test]
+ fn extern_crate() {
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:dep
+$0
+//- /dep.rs crate:dep
+"#,
+ "dep",
+ "dep",
+ "dep",
+ "dep",
+ );
+
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:dep
+fn f() {
+ fn inner() {}
+ $0
+}
+//- /dep.rs crate:dep
+"#,
+ "dep",
+ "dep",
+ "dep",
+ "dep",
+ );
+ }
+
+ #[test]
+ fn prelude_with_inner_items() {
+ check_found_path(
+ r#"
+//- /main.rs crate:main deps:std
+fn f() {
+ fn inner() {}
+ $0
+}
+//- /std.rs crate:std
+pub mod prelude {
+ pub mod rust_2018 {
+ pub enum Option { None }
+ pub use Option::*;
+ }
+}
+ "#,
+ "None",
+ "None",
+ "None",
+ "None",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
new file mode 100644
index 000000000..2397cf501
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
@@ -0,0 +1,522 @@
+//! Many kinds of items or constructs can have generic parameters: functions,
+//! structs, impls, traits, etc. This module provides a common HIR for these
+//! generic parameters. See also the `Generics` type and the `generics_of` query
+//! in rustc.
+
+use base_db::FileId;
+use either::Either;
+use hir_expand::{
+ name::{AsName, Name},
+ ExpandResult, HirFileId, InFile,
+};
+use la_arena::{Arena, ArenaMap, Idx};
+use once_cell::unsync::Lazy;
+use std::ops::DerefMut;
+use stdx::impl_from;
+use syntax::ast::{self, HasGenericParams, HasName, HasTypeBounds};
+
+use crate::{
+ body::{Expander, LowerCtx},
+ child_by_source::ChildBySource,
+ db::DefDatabase,
+ dyn_map::DynMap,
+ intern::Interned,
+ keys,
+ src::{HasChildSource, HasSource},
+ type_ref::{LifetimeRef, TypeBound, TypeRef},
+ AdtId, ConstParamId, GenericDefId, HasModule, LifetimeParamId, LocalLifetimeParamId,
+ LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId,
+};
+
+/// Data about a generic type parameter (to a function, struct, impl, ...).
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct TypeParamData {
+ pub name: Option<Name>,
+ pub default: Option<Interned<TypeRef>>,
+ pub provenance: TypeParamProvenance,
+}
+
+/// Data about a generic lifetime parameter (to a function, struct, impl, ...).
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct LifetimeParamData {
+ pub name: Name,
+}
+
+/// Data about a generic const parameter (to a function, struct, impl, ...).
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct ConstParamData {
+ pub name: Name,
+ pub ty: Interned<TypeRef>,
+ pub has_default: bool,
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
+pub enum TypeParamProvenance {
+ TypeParamList,
+ TraitSelf,
+ ArgumentImplTrait,
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub enum TypeOrConstParamData {
+ TypeParamData(TypeParamData),
+ ConstParamData(ConstParamData),
+}
+
+impl TypeOrConstParamData {
+ pub fn name(&self) -> Option<&Name> {
+ match self {
+ TypeOrConstParamData::TypeParamData(x) => x.name.as_ref(),
+ TypeOrConstParamData::ConstParamData(x) => Some(&x.name),
+ }
+ }
+
+ pub fn has_default(&self) -> bool {
+ match self {
+ TypeOrConstParamData::TypeParamData(x) => x.default.is_some(),
+ TypeOrConstParamData::ConstParamData(x) => x.has_default,
+ }
+ }
+
+ pub fn type_param(&self) -> Option<&TypeParamData> {
+ match self {
+ TypeOrConstParamData::TypeParamData(x) => Some(x),
+ TypeOrConstParamData::ConstParamData(_) => None,
+ }
+ }
+
+ pub fn const_param(&self) -> Option<&ConstParamData> {
+ match self {
+ TypeOrConstParamData::TypeParamData(_) => None,
+ TypeOrConstParamData::ConstParamData(x) => Some(x),
+ }
+ }
+
+ pub fn is_trait_self(&self) -> bool {
+ match self {
+ TypeOrConstParamData::TypeParamData(x) => {
+ x.provenance == TypeParamProvenance::TraitSelf
+ }
+ TypeOrConstParamData::ConstParamData(_) => false,
+ }
+ }
+}
+
+impl_from!(TypeParamData, ConstParamData for TypeOrConstParamData);
+
+/// Data about the generic parameters of a function, struct, impl, etc.
+#[derive(Clone, PartialEq, Eq, Debug, Default, Hash)]
+pub struct GenericParams {
+ pub type_or_consts: Arena<TypeOrConstParamData>,
+ pub lifetimes: Arena<LifetimeParamData>,
+ pub where_predicates: Vec<WherePredicate>,
+}
+
+/// A single predicate from a where clause, i.e. `where Type: Trait`. Combined
+/// where clauses like `where T: Foo + Bar` are turned into multiple of these.
+/// It might still result in multiple actual predicates though, because of
+/// associated type bindings like `Iterator<Item = u32>`.
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub enum WherePredicate {
+ TypeBound {
+ target: WherePredicateTypeTarget,
+ bound: Interned<TypeBound>,
+ },
+ Lifetime {
+ target: LifetimeRef,
+ bound: LifetimeRef,
+ },
+ ForLifetime {
+ lifetimes: Box<[Name]>,
+ target: WherePredicateTypeTarget,
+ bound: Interned<TypeBound>,
+ },
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub enum WherePredicateTypeTarget {
+ TypeRef(Interned<TypeRef>),
+ /// For desugared where predicates that can directly refer to a type param.
+ TypeOrConstParam(LocalTypeOrConstParamId),
+}
+
+impl GenericParams {
+ /// Iterator of type_or_consts field
+ pub fn iter<'a>(
+ &'a self,
+ ) -> impl DoubleEndedIterator<Item = (Idx<TypeOrConstParamData>, &TypeOrConstParamData)> {
+ self.type_or_consts.iter()
+ }
+
+ pub(crate) fn generic_params_query(
+ db: &dyn DefDatabase,
+ def: GenericDefId,
+ ) -> Interned<GenericParams> {
+ let _p = profile::span("generic_params_query");
+
+ macro_rules! id_to_generics {
+ ($id:ident) => {{
+ let id = $id.lookup(db).id;
+ let tree = id.item_tree(db);
+ let item = &tree[id.value];
+ item.generic_params.clone()
+ }};
+ }
+
+ match def {
+ GenericDefId::FunctionId(id) => {
+ let loc = id.lookup(db);
+ let tree = loc.id.item_tree(db);
+ let item = &tree[loc.id.value];
+
+ let mut generic_params = GenericParams::clone(&item.explicit_generic_params);
+
+ let module = loc.container.module(db);
+ let func_data = db.function_data(id);
+
+ // Don't create an `Expander` nor call `loc.source(db)` if not needed since this
+ // causes a reparse after the `ItemTree` has been created.
+ let mut expander = Lazy::new(|| Expander::new(db, loc.source(db).file_id, module));
+ for (_, param) in &func_data.params {
+ generic_params.fill_implicit_impl_trait_args(db, &mut expander, param);
+ }
+
+ Interned::new(generic_params)
+ }
+ GenericDefId::AdtId(AdtId::StructId(id)) => id_to_generics!(id),
+ GenericDefId::AdtId(AdtId::EnumId(id)) => id_to_generics!(id),
+ GenericDefId::AdtId(AdtId::UnionId(id)) => id_to_generics!(id),
+ GenericDefId::TraitId(id) => id_to_generics!(id),
+ GenericDefId::TypeAliasId(id) => id_to_generics!(id),
+ GenericDefId::ImplId(id) => id_to_generics!(id),
+ GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => {
+ Interned::new(GenericParams::default())
+ }
+ }
+ }
+
+ pub(crate) fn fill(&mut self, lower_ctx: &LowerCtx<'_>, node: &dyn HasGenericParams) {
+ if let Some(params) = node.generic_param_list() {
+ self.fill_params(lower_ctx, params)
+ }
+ if let Some(where_clause) = node.where_clause() {
+ self.fill_where_predicates(lower_ctx, where_clause);
+ }
+ }
+
+ pub(crate) fn fill_bounds(
+ &mut self,
+ lower_ctx: &LowerCtx<'_>,
+ node: &dyn ast::HasTypeBounds,
+ target: Either<TypeRef, LifetimeRef>,
+ ) {
+ for bound in
+ node.type_bound_list().iter().flat_map(|type_bound_list| type_bound_list.bounds())
+ {
+ self.add_where_predicate_from_bound(lower_ctx, bound, None, target.clone());
+ }
+ }
+
+ fn fill_params(&mut self, lower_ctx: &LowerCtx<'_>, params: ast::GenericParamList) {
+ for type_or_const_param in params.type_or_const_params() {
+ match type_or_const_param {
+ ast::TypeOrConstParam::Type(type_param) => {
+ let name = type_param.name().map_or_else(Name::missing, |it| it.as_name());
+ // FIXME: Use `Path::from_src`
+ let default = type_param
+ .default_type()
+ .map(|it| Interned::new(TypeRef::from_ast(lower_ctx, it)));
+ let param = TypeParamData {
+ name: Some(name.clone()),
+ default,
+ provenance: TypeParamProvenance::TypeParamList,
+ };
+ self.type_or_consts.alloc(param.into());
+ let type_ref = TypeRef::Path(name.into());
+ self.fill_bounds(lower_ctx, &type_param, Either::Left(type_ref));
+ }
+ ast::TypeOrConstParam::Const(const_param) => {
+ let name = const_param.name().map_or_else(Name::missing, |it| it.as_name());
+ let ty = const_param
+ .ty()
+ .map_or(TypeRef::Error, |it| TypeRef::from_ast(lower_ctx, it));
+ let param = ConstParamData {
+ name,
+ ty: Interned::new(ty),
+ has_default: const_param.default_val().is_some(),
+ };
+ self.type_or_consts.alloc(param.into());
+ }
+ }
+ }
+ for lifetime_param in params.lifetime_params() {
+ let name =
+ lifetime_param.lifetime().map_or_else(Name::missing, |lt| Name::new_lifetime(&lt));
+ let param = LifetimeParamData { name: name.clone() };
+ self.lifetimes.alloc(param);
+ let lifetime_ref = LifetimeRef::new_name(name);
+ self.fill_bounds(lower_ctx, &lifetime_param, Either::Right(lifetime_ref));
+ }
+ }
+
+ fn fill_where_predicates(&mut self, lower_ctx: &LowerCtx<'_>, where_clause: ast::WhereClause) {
+ for pred in where_clause.predicates() {
+ let target = if let Some(type_ref) = pred.ty() {
+ Either::Left(TypeRef::from_ast(lower_ctx, type_ref))
+ } else if let Some(lifetime) = pred.lifetime() {
+ Either::Right(LifetimeRef::new(&lifetime))
+ } else {
+ continue;
+ };
+
+ let lifetimes: Option<Box<_>> = pred.generic_param_list().map(|param_list| {
+ // Higher-Ranked Trait Bounds
+ param_list
+ .lifetime_params()
+ .map(|lifetime_param| {
+ lifetime_param
+ .lifetime()
+ .map_or_else(Name::missing, |lt| Name::new_lifetime(&lt))
+ })
+ .collect()
+ });
+ for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) {
+ self.add_where_predicate_from_bound(
+ lower_ctx,
+ bound,
+ lifetimes.as_ref(),
+ target.clone(),
+ );
+ }
+ }
+ }
+
+ fn add_where_predicate_from_bound(
+ &mut self,
+ lower_ctx: &LowerCtx<'_>,
+ bound: ast::TypeBound,
+ hrtb_lifetimes: Option<&Box<[Name]>>,
+ target: Either<TypeRef, LifetimeRef>,
+ ) {
+ let bound = TypeBound::from_ast(lower_ctx, bound);
+ let predicate = match (target, bound) {
+ (Either::Left(type_ref), bound) => match hrtb_lifetimes {
+ Some(hrtb_lifetimes) => WherePredicate::ForLifetime {
+ lifetimes: hrtb_lifetimes.clone(),
+ target: WherePredicateTypeTarget::TypeRef(Interned::new(type_ref)),
+ bound: Interned::new(bound),
+ },
+ None => WherePredicate::TypeBound {
+ target: WherePredicateTypeTarget::TypeRef(Interned::new(type_ref)),
+ bound: Interned::new(bound),
+ },
+ },
+ (Either::Right(lifetime), TypeBound::Lifetime(bound)) => {
+ WherePredicate::Lifetime { target: lifetime, bound }
+ }
+ _ => return,
+ };
+ self.where_predicates.push(predicate);
+ }
+
+ pub(crate) fn fill_implicit_impl_trait_args(
+ &mut self,
+ db: &dyn DefDatabase,
+ expander: &mut impl DerefMut<Target = Expander>,
+ type_ref: &TypeRef,
+ ) {
+ type_ref.walk(&mut |type_ref| {
+ if let TypeRef::ImplTrait(bounds) = type_ref {
+ let param = TypeParamData {
+ name: None,
+ default: None,
+ provenance: TypeParamProvenance::ArgumentImplTrait,
+ };
+ let param_id = self.type_or_consts.alloc(param.into());
+ for bound in bounds {
+ self.where_predicates.push(WherePredicate::TypeBound {
+ target: WherePredicateTypeTarget::TypeOrConstParam(param_id),
+ bound: bound.clone(),
+ });
+ }
+ }
+ if let TypeRef::Macro(mc) = type_ref {
+ let macro_call = mc.to_node(db.upcast());
+ match expander.enter_expand::<ast::Type>(db, macro_call) {
+ Ok(ExpandResult { value: Some((mark, expanded)), .. }) => {
+ let ctx = LowerCtx::new(db, expander.current_file_id());
+ let type_ref = TypeRef::from_ast(&ctx, expanded);
+ self.fill_implicit_impl_trait_args(db, expander, &type_ref);
+ expander.exit(db, mark);
+ }
+ _ => {}
+ }
+ }
+ });
+ }
+
+ pub(crate) fn shrink_to_fit(&mut self) {
+ let Self { lifetimes, type_or_consts: types, where_predicates } = self;
+ lifetimes.shrink_to_fit();
+ types.shrink_to_fit();
+ where_predicates.shrink_to_fit();
+ }
+
+ pub fn find_type_by_name(&self, name: &Name, parent: GenericDefId) -> Option<TypeParamId> {
+ self.type_or_consts.iter().find_map(|(id, p)| {
+ if p.name().as_ref() == Some(&name) && p.type_param().is_some() {
+ Some(TypeParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent }))
+ } else {
+ None
+ }
+ })
+ }
+
+ pub fn find_const_by_name(&self, name: &Name, parent: GenericDefId) -> Option<ConstParamId> {
+ self.type_or_consts.iter().find_map(|(id, p)| {
+ if p.name().as_ref() == Some(&name) && p.const_param().is_some() {
+ Some(ConstParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent }))
+ } else {
+ None
+ }
+ })
+ }
+
+ pub fn find_trait_self_param(&self) -> Option<LocalTypeOrConstParamId> {
+ self.type_or_consts.iter().find_map(|(id, p)| {
+ matches!(
+ p,
+ TypeOrConstParamData::TypeParamData(TypeParamData {
+ provenance: TypeParamProvenance::TraitSelf,
+ ..
+ })
+ )
+ .then(|| id)
+ })
+ }
+}
+
+fn file_id_and_params_of(
+ def: GenericDefId,
+ db: &dyn DefDatabase,
+) -> (HirFileId, Option<ast::GenericParamList>) {
+ match def {
+ GenericDefId::FunctionId(it) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ GenericDefId::AdtId(AdtId::StructId(it)) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ GenericDefId::AdtId(AdtId::UnionId(it)) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ GenericDefId::AdtId(AdtId::EnumId(it)) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ GenericDefId::TraitId(it) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ GenericDefId::TypeAliasId(it) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ GenericDefId::ImplId(it) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ // We won't be using this ID anyway
+ GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => (FileId(!0).into(), None),
+ }
+}
+
+impl HasChildSource<LocalTypeOrConstParamId> for GenericDefId {
+ type Value = Either<ast::TypeOrConstParam, ast::Trait>;
+ fn child_source(
+ &self,
+ db: &dyn DefDatabase,
+ ) -> InFile<ArenaMap<LocalTypeOrConstParamId, Self::Value>> {
+ let generic_params = db.generic_params(*self);
+ let mut idx_iter = generic_params.type_or_consts.iter().map(|(idx, _)| idx);
+
+ let (file_id, generic_params_list) = file_id_and_params_of(*self, db);
+
+ let mut params = ArenaMap::default();
+
+ // For traits the first type index is `Self`, we need to add it before the other params.
+ if let GenericDefId::TraitId(id) = *self {
+ let trait_ref = id.lookup(db).source(db).value;
+ let idx = idx_iter.next().unwrap();
+ params.insert(idx, Either::Right(trait_ref))
+ }
+
+ if let Some(generic_params_list) = generic_params_list {
+ for (idx, ast_param) in idx_iter.zip(generic_params_list.type_or_const_params()) {
+ params.insert(idx, Either::Left(ast_param));
+ }
+ }
+
+ InFile::new(file_id, params)
+ }
+}
+
+impl HasChildSource<LocalLifetimeParamId> for GenericDefId {
+ type Value = ast::LifetimeParam;
+ fn child_source(
+ &self,
+ db: &dyn DefDatabase,
+ ) -> InFile<ArenaMap<LocalLifetimeParamId, Self::Value>> {
+ let generic_params = db.generic_params(*self);
+ let idx_iter = generic_params.lifetimes.iter().map(|(idx, _)| idx);
+
+ let (file_id, generic_params_list) = file_id_and_params_of(*self, db);
+
+ let mut params = ArenaMap::default();
+
+ if let Some(generic_params_list) = generic_params_list {
+ for (idx, ast_param) in idx_iter.zip(generic_params_list.lifetime_params()) {
+ params.insert(idx, ast_param);
+ }
+ }
+
+ InFile::new(file_id, params)
+ }
+}
+
+impl ChildBySource for GenericDefId {
+ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
+ let (gfile_id, generic_params_list) = file_id_and_params_of(*self, db);
+ if gfile_id != file_id {
+ return;
+ }
+
+ let generic_params = db.generic_params(*self);
+ let mut toc_idx_iter = generic_params.type_or_consts.iter().map(|(idx, _)| idx);
+ let lts_idx_iter = generic_params.lifetimes.iter().map(|(idx, _)| idx);
+
+ // For traits the first type index is `Self`, skip it.
+ if let GenericDefId::TraitId(_) = *self {
+ toc_idx_iter.next().unwrap(); // advance_by(1);
+ }
+
+ if let Some(generic_params_list) = generic_params_list {
+ for (local_id, ast_param) in
+ toc_idx_iter.zip(generic_params_list.type_or_const_params())
+ {
+ let id = TypeOrConstParamId { parent: *self, local_id };
+ match ast_param {
+ ast::TypeOrConstParam::Type(a) => res[keys::TYPE_PARAM].insert(a, id),
+ ast::TypeOrConstParam::Const(a) => res[keys::CONST_PARAM].insert(a, id),
+ }
+ }
+ for (local_id, ast_param) in lts_idx_iter.zip(generic_params_list.lifetime_params()) {
+ let id = LifetimeParamId { parent: *self, local_id };
+ res[keys::LIFETIME_PARAM].insert(ast_param, id);
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
new file mode 100644
index 000000000..688055e43
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
@@ -0,0 +1,1108 @@
+//! A map of all publicly exported items in a crate.
+
+use std::{fmt, hash::BuildHasherDefault, sync::Arc};
+
+use base_db::CrateId;
+use fst::{self, Streamer};
+use hir_expand::name::Name;
+use indexmap::{map::Entry, IndexMap};
+use itertools::Itertools;
+use rustc_hash::{FxHashSet, FxHasher};
+
+use crate::{
+ db::DefDatabase, item_scope::ItemInNs, visibility::Visibility, AssocItemId, ModuleDefId,
+ ModuleId, TraitId,
+};
+
+type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<FxHasher>>;
+
+/// Item import details stored in the `ImportMap`.
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct ImportInfo {
+ /// A path that can be used to import the item, relative to the crate's root.
+ pub path: ImportPath,
+ /// The module containing this item.
+ pub container: ModuleId,
+ /// Whether the import is a trait associated item or not.
+ pub is_trait_assoc_item: bool,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct ImportPath {
+ pub segments: Vec<Name>,
+}
+
+impl fmt::Display for ImportPath {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.segments.iter().format("::"), f)
+ }
+}
+
+impl ImportPath {
+ fn len(&self) -> usize {
+ self.segments.len()
+ }
+}
+
+/// A map from publicly exported items to the path needed to import/name them from a downstream
+/// crate.
+///
+/// Reexports of items are taken into account, ie. if something is exported under multiple
+/// names, the one with the shortest import path will be used.
+///
+/// Note that all paths are relative to the containing crate's root, so the crate name still needs
+/// to be prepended to the `ModPath` before the path is valid.
+#[derive(Default)]
+pub struct ImportMap {
+ map: FxIndexMap<ItemInNs, ImportInfo>,
+
+ /// List of keys stored in `map`, sorted lexicographically by their `ModPath`. Indexed by the
+ /// values returned by running `fst`.
+ ///
+ /// Since a path can refer to multiple items due to namespacing, we store all items with the
+ /// same path right after each other. This allows us to find all items after the FST gives us
+ /// the index of the first one.
+ importables: Vec<ItemInNs>,
+ fst: fst::Map<Vec<u8>>,
+}
+
+impl ImportMap {
+ pub fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
+ let _p = profile::span("import_map_query");
+
+ let mut import_map = collect_import_map(db, krate);
+
+ let mut importables = import_map
+ .map
+ .iter()
+ .map(|(item, info)| (item, fst_path(&info.path)))
+ .collect::<Vec<_>>();
+ importables.sort_by(|(_, fst_path), (_, fst_path2)| fst_path.cmp(fst_path2));
+
+ // Build the FST, taking care not to insert duplicate values.
+
+ let mut builder = fst::MapBuilder::memory();
+ let mut last_batch_start = 0;
+
+ for idx in 0..importables.len() {
+ let key = &importables[last_batch_start].1;
+ if let Some((_, fst_path)) = importables.get(idx + 1) {
+ if key == fst_path {
+ continue;
+ }
+ }
+
+ let _ = builder.insert(key, last_batch_start as u64);
+
+ last_batch_start = idx + 1;
+ }
+
+ import_map.fst = builder.into_map();
+ import_map.importables = importables.iter().map(|&(&item, _)| item).collect();
+
+ Arc::new(import_map)
+ }
+
+ /// Returns the `ModPath` needed to import/mention `item`, relative to this crate's root.
+ pub fn path_of(&self, item: ItemInNs) -> Option<&ImportPath> {
+ self.import_info_for(item).map(|it| &it.path)
+ }
+
+ pub fn import_info_for(&self, item: ItemInNs) -> Option<&ImportInfo> {
+ self.map.get(&item)
+ }
+
+ fn collect_trait_assoc_items(
+ &mut self,
+ db: &dyn DefDatabase,
+ tr: TraitId,
+ is_type_in_ns: bool,
+ original_import_info: &ImportInfo,
+ ) {
+ let _p = profile::span("collect_trait_assoc_items");
+ for (assoc_item_name, item) in &db.trait_data(tr).items {
+ let module_def_id = match item {
+ AssocItemId::FunctionId(f) => ModuleDefId::from(*f),
+ AssocItemId::ConstId(c) => ModuleDefId::from(*c),
+ // cannot use associated type aliases directly: need a `<Struct as Trait>::TypeAlias`
+ // qualifier, ergo no need to store it for imports in import_map
+ AssocItemId::TypeAliasId(_) => {
+ cov_mark::hit!(type_aliases_ignored);
+ continue;
+ }
+ };
+ let assoc_item = if is_type_in_ns {
+ ItemInNs::Types(module_def_id)
+ } else {
+ ItemInNs::Values(module_def_id)
+ };
+
+ let mut assoc_item_info = original_import_info.clone();
+ assoc_item_info.path.segments.push(assoc_item_name.to_owned());
+ assoc_item_info.is_trait_assoc_item = true;
+ self.map.insert(assoc_item, assoc_item_info);
+ }
+ }
+}
+
+fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMap {
+ let _p = profile::span("collect_import_map");
+
+ let def_map = db.crate_def_map(krate);
+ let mut import_map = ImportMap::default();
+
+ // We look only into modules that are public(ly reexported), starting with the crate root.
+ let empty = ImportPath { segments: vec![] };
+ let root = def_map.module_id(def_map.root());
+ let mut worklist = vec![(root, empty)];
+ while let Some((module, mod_path)) = worklist.pop() {
+ let ext_def_map;
+ let mod_data = if module.krate == krate {
+ &def_map[module.local_id]
+ } else {
+ // The crate might reexport a module defined in another crate.
+ ext_def_map = module.def_map(db);
+ &ext_def_map[module.local_id]
+ };
+
+ let visible_items = mod_data.scope.entries().filter_map(|(name, per_ns)| {
+ let per_ns = per_ns.filter_visibility(|vis| vis == Visibility::Public);
+ if per_ns.is_none() { None } else { Some((name, per_ns)) }
+ });
+
+ for (name, per_ns) in visible_items {
+ let mk_path = || {
+ let mut path = mod_path.clone();
+ path.segments.push(name.clone());
+ path
+ };
+
+ for item in per_ns.iter_items() {
+ let path = mk_path();
+ let path_len = path.len();
+ let import_info =
+ ImportInfo { path, container: module, is_trait_assoc_item: false };
+
+ if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() {
+ import_map.collect_trait_assoc_items(
+ db,
+ tr,
+ matches!(item, ItemInNs::Types(_)),
+ &import_info,
+ );
+ }
+
+ match import_map.map.entry(item) {
+ Entry::Vacant(entry) => {
+ entry.insert(import_info);
+ }
+ Entry::Occupied(mut entry) => {
+ // If the new path is shorter, prefer that one.
+ if path_len < entry.get().path.len() {
+ *entry.get_mut() = import_info;
+ } else {
+ continue;
+ }
+ }
+ }
+
+ // If we've just added a path to a module, descend into it. We might traverse
+ // modules multiple times, but only if the new path to it is shorter than the
+ // first (else we `continue` above).
+ if let Some(ModuleDefId::ModuleId(mod_id)) = item.as_module_def_id() {
+ worklist.push((mod_id, mk_path()));
+ }
+ }
+ }
+ }
+
+ import_map
+}
+
+impl PartialEq for ImportMap {
+ fn eq(&self, other: &Self) -> bool {
+ // `fst` and `importables` are built from `map`, so we don't need to compare them.
+ self.map == other.map
+ }
+}
+
+impl Eq for ImportMap {}
+
+impl fmt::Debug for ImportMap {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut importable_paths: Vec<_> = self
+ .map
+ .iter()
+ .map(|(item, info)| {
+ let ns = match item {
+ ItemInNs::Types(_) => "t",
+ ItemInNs::Values(_) => "v",
+ ItemInNs::Macros(_) => "m",
+ };
+ format!("- {} ({})", info.path, ns)
+ })
+ .collect();
+
+ importable_paths.sort();
+ f.write_str(&importable_paths.join("\n"))
+ }
+}
+
+fn fst_path(path: &ImportPath) -> String {
+ let _p = profile::span("fst_path");
+ let mut s = path.to_string();
+ s.make_ascii_lowercase();
+ s
+}
+
+#[derive(Debug, Eq, PartialEq, Hash)]
+pub enum ImportKind {
+ Module,
+ Function,
+ Adt,
+ EnumVariant,
+ Const,
+ Static,
+ Trait,
+ TypeAlias,
+ BuiltinType,
+ AssociatedItem,
+ Macro,
+}
+
+/// A way to match import map contents against the search query.
+#[derive(Debug)]
+pub enum SearchMode {
+ /// Import map entry should strictly match the query string.
+ Equals,
+ /// Import map entry should contain the query string.
+ Contains,
+ /// Import map entry should contain all letters from the query string,
+ /// in the same order, but not necessary adjacent.
+ Fuzzy,
+}
+
+#[derive(Debug)]
+pub struct Query {
+ query: String,
+ lowercased: String,
+ name_only: bool,
+ assoc_items_only: bool,
+ search_mode: SearchMode,
+ case_sensitive: bool,
+ limit: usize,
+ exclude_import_kinds: FxHashSet<ImportKind>,
+}
+
+impl Query {
+ pub fn new(query: String) -> Self {
+ let lowercased = query.to_lowercase();
+ Self {
+ query,
+ lowercased,
+ name_only: false,
+ assoc_items_only: false,
+ search_mode: SearchMode::Contains,
+ case_sensitive: false,
+ limit: usize::max_value(),
+ exclude_import_kinds: FxHashSet::default(),
+ }
+ }
+
+ /// Matches entries' names only, ignoring the rest of
+ /// the qualifier.
+ /// Example: for `std::marker::PhantomData`, the name is `PhantomData`.
+ pub fn name_only(self) -> Self {
+ Self { name_only: true, ..self }
+ }
+
+ /// Matches only the entries that are associated items, ignoring the rest.
+ pub fn assoc_items_only(self) -> Self {
+ Self { assoc_items_only: true, ..self }
+ }
+
+ /// Specifies the way to search for the entries using the query.
+ pub fn search_mode(self, search_mode: SearchMode) -> Self {
+ Self { search_mode, ..self }
+ }
+
+ /// Limits the returned number of items to `limit`.
+ pub fn limit(self, limit: usize) -> Self {
+ Self { limit, ..self }
+ }
+
+ /// Respect casing of the query string when matching.
+ pub fn case_sensitive(self) -> Self {
+ Self { case_sensitive: true, ..self }
+ }
+
+ /// Do not include imports of the specified kind in the search results.
+ pub fn exclude_import_kind(mut self, import_kind: ImportKind) -> Self {
+ self.exclude_import_kinds.insert(import_kind);
+ self
+ }
+
+ fn import_matches(&self, import: &ImportInfo, enforce_lowercase: bool) -> bool {
+ let _p = profile::span("import_map::Query::import_matches");
+ if import.is_trait_assoc_item {
+ if self.exclude_import_kinds.contains(&ImportKind::AssociatedItem) {
+ return false;
+ }
+ } else if self.assoc_items_only {
+ return false;
+ }
+
+ let mut input = if import.is_trait_assoc_item || self.name_only {
+ import.path.segments.last().unwrap().to_string()
+ } else {
+ import.path.to_string()
+ };
+ if enforce_lowercase || !self.case_sensitive {
+ input.make_ascii_lowercase();
+ }
+
+ let query_string =
+ if !enforce_lowercase && self.case_sensitive { &self.query } else { &self.lowercased };
+
+ match self.search_mode {
+ SearchMode::Equals => &input == query_string,
+ SearchMode::Contains => input.contains(query_string),
+ SearchMode::Fuzzy => {
+ let mut unchecked_query_chars = query_string.chars();
+ let mut mismatching_query_char = unchecked_query_chars.next();
+
+ for input_char in input.chars() {
+ match mismatching_query_char {
+ None => return true,
+ Some(matching_query_char) if matching_query_char == input_char => {
+ mismatching_query_char = unchecked_query_chars.next();
+ }
+ _ => (),
+ }
+ }
+ mismatching_query_char.is_none()
+ }
+ }
+ }
+}
+
+/// Searches dependencies of `krate` for an importable path matching `query`.
+///
+/// This returns a list of items that could be imported from dependencies of `krate`.
+pub fn search_dependencies<'a>(
+ db: &'a dyn DefDatabase,
+ krate: CrateId,
+ query: Query,
+) -> FxHashSet<ItemInNs> {
+ let _p = profile::span("search_dependencies").detail(|| format!("{:?}", query));
+
+ let graph = db.crate_graph();
+ let import_maps: Vec<_> =
+ graph[krate].dependencies.iter().map(|dep| db.import_map(dep.crate_id)).collect();
+
+ let automaton = fst::automaton::Subsequence::new(&query.lowercased);
+
+ let mut op = fst::map::OpBuilder::new();
+ for map in &import_maps {
+ op = op.add(map.fst.search(&automaton));
+ }
+
+ let mut stream = op.union();
+
+ let mut all_indexed_values = FxHashSet::default();
+ while let Some((_, indexed_values)) = stream.next() {
+ all_indexed_values.extend(indexed_values.iter().copied());
+ }
+
+ let mut res = FxHashSet::default();
+ for indexed_value in all_indexed_values {
+ let import_map = &import_maps[indexed_value.index];
+ let importables = &import_map.importables[indexed_value.value as usize..];
+
+ let common_importable_data = &import_map.map[&importables[0]];
+ if !query.import_matches(common_importable_data, true) {
+ continue;
+ }
+
+ // Path shared by the importable items in this group.
+ let common_importables_path_fst = fst_path(&common_importable_data.path);
+ // Add the items from this `ModPath` group. Those are all subsequent items in
+ // `importables` whose paths match `path`.
+ let iter = importables
+ .iter()
+ .copied()
+ .take_while(|item| common_importables_path_fst == fst_path(&import_map.map[item].path))
+ .filter(|&item| match item_import_kind(item) {
+ Some(import_kind) => !query.exclude_import_kinds.contains(&import_kind),
+ None => true,
+ })
+ .filter(|item| {
+ !query.case_sensitive // we've already checked the common importables path case-insensitively
+ || query.import_matches(&import_map.map[item], false)
+ });
+ res.extend(iter);
+
+ if res.len() >= query.limit {
+ return res;
+ }
+ }
+
+ res
+}
+
+fn item_import_kind(item: ItemInNs) -> Option<ImportKind> {
+ Some(match item.as_module_def_id()? {
+ ModuleDefId::ModuleId(_) => ImportKind::Module,
+ ModuleDefId::FunctionId(_) => ImportKind::Function,
+ ModuleDefId::AdtId(_) => ImportKind::Adt,
+ ModuleDefId::EnumVariantId(_) => ImportKind::EnumVariant,
+ ModuleDefId::ConstId(_) => ImportKind::Const,
+ ModuleDefId::StaticId(_) => ImportKind::Static,
+ ModuleDefId::TraitId(_) => ImportKind::Trait,
+ ModuleDefId::TypeAliasId(_) => ImportKind::TypeAlias,
+ ModuleDefId::BuiltinType(_) => ImportKind::BuiltinType,
+ ModuleDefId::MacroId(_) => ImportKind::Macro,
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use base_db::{fixture::WithFixture, SourceDatabase, Upcast};
+ use expect_test::{expect, Expect};
+
+ use crate::{test_db::TestDB, ItemContainerId, Lookup};
+
+ use super::*;
+
+ fn check_search(ra_fixture: &str, crate_name: &str, query: Query, expect: Expect) {
+ let db = TestDB::with_files(ra_fixture);
+ let crate_graph = db.crate_graph();
+ let krate = crate_graph
+ .iter()
+ .find(|krate| {
+ crate_graph[*krate].display_name.as_ref().map(|n| n.to_string())
+ == Some(crate_name.to_string())
+ })
+ .unwrap();
+
+ let actual = search_dependencies(db.upcast(), krate, query)
+ .into_iter()
+ .filter_map(|dependency| {
+ let dependency_krate = dependency.krate(db.upcast())?;
+ let dependency_imports = db.import_map(dependency_krate);
+
+ let (path, mark) = match assoc_item_path(&db, &dependency_imports, dependency) {
+ Some(assoc_item_path) => (assoc_item_path, "a"),
+ None => (
+ dependency_imports.path_of(dependency)?.to_string(),
+ match dependency {
+ ItemInNs::Types(ModuleDefId::FunctionId(_))
+ | ItemInNs::Values(ModuleDefId::FunctionId(_)) => "f",
+ ItemInNs::Types(_) => "t",
+ ItemInNs::Values(_) => "v",
+ ItemInNs::Macros(_) => "m",
+ },
+ ),
+ };
+
+ Some(format!(
+ "{}::{} ({})\n",
+ crate_graph[dependency_krate].display_name.as_ref()?,
+ path,
+ mark
+ ))
+ })
+ // HashSet iteration order isn't defined - it's different on
+ // x86_64 and i686 at the very least
+ .sorted()
+ .collect::<String>();
+ expect.assert_eq(&actual)
+ }
+
+ fn assoc_item_path(
+ db: &dyn DefDatabase,
+ dependency_imports: &ImportMap,
+ dependency: ItemInNs,
+ ) -> Option<String> {
+ let dependency_assoc_item_id = match dependency {
+ ItemInNs::Types(ModuleDefId::FunctionId(id))
+ | ItemInNs::Values(ModuleDefId::FunctionId(id)) => AssocItemId::from(id),
+ ItemInNs::Types(ModuleDefId::ConstId(id))
+ | ItemInNs::Values(ModuleDefId::ConstId(id)) => AssocItemId::from(id),
+ ItemInNs::Types(ModuleDefId::TypeAliasId(id))
+ | ItemInNs::Values(ModuleDefId::TypeAliasId(id)) => AssocItemId::from(id),
+ _ => return None,
+ };
+
+ let trait_ = assoc_to_trait(db, dependency)?;
+ if let ModuleDefId::TraitId(tr) = trait_.as_module_def_id()? {
+ let trait_data = db.trait_data(tr);
+ let assoc_item_name =
+ trait_data.items.iter().find_map(|(assoc_item_name, assoc_item_id)| {
+ if &dependency_assoc_item_id == assoc_item_id {
+ Some(assoc_item_name)
+ } else {
+ None
+ }
+ })?;
+ return Some(format!("{}::{}", dependency_imports.path_of(trait_)?, assoc_item_name));
+ }
+ None
+ }
+
+ fn assoc_to_trait(db: &dyn DefDatabase, item: ItemInNs) -> Option<ItemInNs> {
+ let assoc: AssocItemId = match item {
+ ItemInNs::Types(it) | ItemInNs::Values(it) => match it {
+ ModuleDefId::TypeAliasId(it) => it.into(),
+ ModuleDefId::FunctionId(it) => it.into(),
+ ModuleDefId::ConstId(it) => it.into(),
+ _ => return None,
+ },
+ _ => return None,
+ };
+
+ let container = match assoc {
+ AssocItemId::FunctionId(it) => it.lookup(db).container,
+ AssocItemId::ConstId(it) => it.lookup(db).container,
+ AssocItemId::TypeAliasId(it) => it.lookup(db).container,
+ };
+
+ match container {
+ ItemContainerId::TraitId(it) => Some(ItemInNs::Types(it.into())),
+ _ => None,
+ }
+ }
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let db = TestDB::with_files(ra_fixture);
+ let crate_graph = db.crate_graph();
+
+ let actual = crate_graph
+ .iter()
+ .filter_map(|krate| {
+ let cdata = &crate_graph[krate];
+ let name = cdata.display_name.as_ref()?;
+
+ let map = db.import_map(krate);
+
+ Some(format!("{}:\n{:?}\n", name, map))
+ })
+ .sorted()
+ .collect::<String>();
+
+ expect.assert_eq(&actual)
+ }
+
+ #[test]
+ fn smoke() {
+ check(
+ r"
+ //- /main.rs crate:main deps:lib
+
+ mod private {
+ pub use lib::Pub;
+ pub struct InPrivateModule;
+ }
+
+ pub mod publ1 {
+ use lib::Pub;
+ }
+
+ pub mod real_pub {
+ pub use lib::Pub;
+ }
+ pub mod real_pu2 { // same path length as above
+ pub use lib::Pub;
+ }
+
+ //- /lib.rs crate:lib
+ pub struct Pub {}
+ pub struct Pub2; // t + v
+ struct Priv;
+ ",
+ expect![[r#"
+ lib:
+ - Pub (t)
+ - Pub2 (t)
+ - Pub2 (v)
+ main:
+ - publ1 (t)
+ - real_pu2 (t)
+ - real_pub (t)
+ - real_pub::Pub (t)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn prefers_shortest_path() {
+ check(
+ r"
+ //- /main.rs crate:main
+
+ pub mod sub {
+ pub mod subsub {
+ pub struct Def {}
+ }
+
+ pub use super::sub::subsub::Def;
+ }
+ ",
+ expect![[r#"
+ main:
+ - sub (t)
+ - sub::Def (t)
+ - sub::subsub (t)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn type_reexport_cross_crate() {
+ // Reexports need to be visible from a crate, even if the original crate exports the item
+ // at a shorter path.
+ check(
+ r"
+ //- /main.rs crate:main deps:lib
+ pub mod m {
+ pub use lib::S;
+ }
+ //- /lib.rs crate:lib
+ pub struct S;
+ ",
+ expect![[r#"
+ lib:
+ - S (t)
+ - S (v)
+ main:
+ - m (t)
+ - m::S (t)
+ - m::S (v)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_reexport() {
+ check(
+ r"
+ //- /main.rs crate:main deps:lib
+ pub mod m {
+ pub use lib::pub_macro;
+ }
+ //- /lib.rs crate:lib
+ #[macro_export]
+ macro_rules! pub_macro {
+ () => {};
+ }
+ ",
+ expect![[r#"
+ lib:
+ - pub_macro (m)
+ main:
+ - m (t)
+ - m::pub_macro (m)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn module_reexport() {
+ // Reexporting modules from a dependency adds all contents to the import map.
+ check(
+ r"
+ //- /main.rs crate:main deps:lib
+ pub use lib::module as reexported_module;
+ //- /lib.rs crate:lib
+ pub mod module {
+ pub struct S;
+ }
+ ",
+ expect![[r#"
+ lib:
+ - module (t)
+ - module::S (t)
+ - module::S (v)
+ main:
+ - reexported_module (t)
+ - reexported_module::S (t)
+ - reexported_module::S (v)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn cyclic_module_reexport() {
+ // A cyclic reexport does not hang.
+ check(
+ r"
+ //- /lib.rs crate:lib
+ pub mod module {
+ pub struct S;
+ pub use super::sub::*;
+ }
+
+ pub mod sub {
+ pub use super::module;
+ }
+ ",
+ expect![[r#"
+ lib:
+ - module (t)
+ - module::S (t)
+ - module::S (v)
+ - sub (t)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn private_macro() {
+ check(
+ r"
+ //- /lib.rs crate:lib
+ macro_rules! private_macro {
+ () => {};
+ }
+ ",
+ expect![[r#"
+ lib:
+
+ "#]],
+ );
+ }
+
+ #[test]
+ fn namespacing() {
+ check(
+ r"
+ //- /lib.rs crate:lib
+ pub struct Thing; // t + v
+ #[macro_export]
+ macro_rules! Thing { // m
+ () => {};
+ }
+ ",
+ expect![[r#"
+ lib:
+ - Thing (m)
+ - Thing (t)
+ - Thing (v)
+ "#]],
+ );
+
+ check(
+ r"
+ //- /lib.rs crate:lib
+ pub mod Thing {} // t
+ #[macro_export]
+ macro_rules! Thing { // m
+ () => {};
+ }
+ ",
+ expect![[r#"
+ lib:
+ - Thing (m)
+ - Thing (t)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn fuzzy_import_trait_and_assoc_items() {
+ cov_mark::check!(type_aliases_ignored);
+ let ra_fixture = r#"
+ //- /main.rs crate:main deps:dep
+ //- /dep.rs crate:dep
+ pub mod fmt {
+ pub trait Display {
+ type FmtTypeAlias;
+ const FMT_CONST: bool;
+
+ fn format_function();
+ fn format_method(&self);
+ }
+ }
+ "#;
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("fmt".to_string()).search_mode(SearchMode::Fuzzy),
+ expect![[r#"
+ dep::fmt (t)
+ dep::fmt::Display (t)
+ dep::fmt::Display::FMT_CONST (a)
+ dep::fmt::Display::format_function (a)
+ dep::fmt::Display::format_method (a)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn assoc_items_filtering() {
+ let ra_fixture = r#"
+ //- /main.rs crate:main deps:dep
+ //- /dep.rs crate:dep
+ pub mod fmt {
+ pub trait Display {
+ type FmtTypeAlias;
+ const FMT_CONST: bool;
+
+ fn format_function();
+ fn format_method(&self);
+ }
+ }
+ "#;
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("fmt".to_string()).search_mode(SearchMode::Fuzzy).assoc_items_only(),
+ expect![[r#"
+ dep::fmt::Display::FMT_CONST (a)
+ dep::fmt::Display::format_function (a)
+ dep::fmt::Display::format_method (a)
+ "#]],
+ );
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("fmt".to_string())
+ .search_mode(SearchMode::Fuzzy)
+ .exclude_import_kind(ImportKind::AssociatedItem),
+ expect![[r#"
+ dep::fmt (t)
+ dep::fmt::Display (t)
+ "#]],
+ );
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("fmt".to_string())
+ .search_mode(SearchMode::Fuzzy)
+ .assoc_items_only()
+ .exclude_import_kind(ImportKind::AssociatedItem),
+ expect![[r#""#]],
+ );
+ }
+
+ #[test]
+ fn search_mode() {
+ let ra_fixture = r#"
+ //- /main.rs crate:main deps:dep
+ //- /dep.rs crate:dep deps:tdep
+ use tdep::fmt as fmt_dep;
+ pub mod fmt {
+ pub trait Display {
+ fn fmt();
+ }
+ }
+ #[macro_export]
+ macro_rules! Fmt {
+ () => {};
+ }
+ pub struct Fmt;
+
+ pub fn format() {}
+ pub fn no() {}
+
+ //- /tdep.rs crate:tdep
+ pub mod fmt {
+ pub struct NotImportableFromMain;
+ }
+ "#;
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("fmt".to_string()).search_mode(SearchMode::Fuzzy),
+ expect![[r#"
+ dep::Fmt (m)
+ dep::Fmt (t)
+ dep::Fmt (v)
+ dep::fmt (t)
+ dep::fmt::Display (t)
+ dep::fmt::Display::fmt (a)
+ dep::format (f)
+ "#]],
+ );
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("fmt".to_string()).search_mode(SearchMode::Equals),
+ expect![[r#"
+ dep::Fmt (m)
+ dep::Fmt (t)
+ dep::Fmt (v)
+ dep::fmt (t)
+ dep::fmt::Display::fmt (a)
+ "#]],
+ );
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("fmt".to_string()).search_mode(SearchMode::Contains),
+ expect![[r#"
+ dep::Fmt (m)
+ dep::Fmt (t)
+ dep::Fmt (v)
+ dep::fmt (t)
+ dep::fmt::Display (t)
+ dep::fmt::Display::fmt (a)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn name_only() {
+ let ra_fixture = r#"
+ //- /main.rs crate:main deps:dep
+ //- /dep.rs crate:dep deps:tdep
+ use tdep::fmt as fmt_dep;
+ pub mod fmt {
+ pub trait Display {
+ fn fmt();
+ }
+ }
+ #[macro_export]
+ macro_rules! Fmt {
+ () => {};
+ }
+ pub struct Fmt;
+
+ pub fn format() {}
+ pub fn no() {}
+
+ //- /tdep.rs crate:tdep
+ pub mod fmt {
+ pub struct NotImportableFromMain;
+ }
+ "#;
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("fmt".to_string()),
+ expect![[r#"
+ dep::Fmt (m)
+ dep::Fmt (t)
+ dep::Fmt (v)
+ dep::fmt (t)
+ dep::fmt::Display (t)
+ dep::fmt::Display::fmt (a)
+ "#]],
+ );
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("fmt".to_string()).name_only(),
+ expect![[r#"
+ dep::Fmt (m)
+ dep::Fmt (t)
+ dep::Fmt (v)
+ dep::fmt (t)
+ dep::fmt::Display::fmt (a)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn search_casing() {
+ let ra_fixture = r#"
+ //- /main.rs crate:main deps:dep
+ //- /dep.rs crate:dep
+
+ pub struct fmt;
+ pub struct FMT;
+ "#;
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("FMT".to_string()),
+ expect![[r#"
+ dep::FMT (t)
+ dep::FMT (v)
+ dep::fmt (t)
+ dep::fmt (v)
+ "#]],
+ );
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("FMT".to_string()).case_sensitive(),
+ expect![[r#"
+ dep::FMT (t)
+ dep::FMT (v)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn search_limit() {
+ check_search(
+ r#"
+ //- /main.rs crate:main deps:dep
+ //- /dep.rs crate:dep
+ pub mod fmt {
+ pub trait Display {
+ fn fmt();
+ }
+ }
+ #[macro_export]
+ macro_rules! Fmt {
+ () => {};
+ }
+ pub struct Fmt;
+
+ pub fn format() {}
+ pub fn no() {}
+ "#,
+ "main",
+ Query::new("".to_string()).limit(2),
+ expect![[r#"
+ dep::Fmt (m)
+ dep::Fmt (t)
+ dep::Fmt (v)
+ dep::fmt (t)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn search_exclusions() {
+ let ra_fixture = r#"
+ //- /main.rs crate:main deps:dep
+ //- /dep.rs crate:dep
+
+ pub struct fmt;
+ pub struct FMT;
+ "#;
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("FMT".to_string()),
+ expect![[r#"
+ dep::FMT (t)
+ dep::FMT (v)
+ dep::fmt (t)
+ dep::fmt (v)
+ "#]],
+ );
+
+ check_search(
+ ra_fixture,
+ "main",
+ Query::new("FMT".to_string()).exclude_import_kind(ImportKind::Adt),
+ expect![[r#""#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/intern.rs b/src/tools/rust-analyzer/crates/hir-def/src/intern.rs
new file mode 100644
index 000000000..f08521a34
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/intern.rs
@@ -0,0 +1,227 @@
+//! Global `Arc`-based object interning infrastructure.
+//!
+//! Eventually this should probably be replaced with salsa-based interning.
+
+use std::{
+ fmt::{self, Debug, Display},
+ hash::{BuildHasherDefault, Hash, Hasher},
+ ops::Deref,
+ sync::Arc,
+};
+
+use dashmap::{DashMap, SharedValue};
+use hashbrown::HashMap;
+use once_cell::sync::OnceCell;
+use rustc_hash::FxHasher;
+
+use crate::generics::GenericParams;
+
+type InternMap<T> = DashMap<Arc<T>, (), BuildHasherDefault<FxHasher>>;
+type Guard<T> = dashmap::RwLockWriteGuard<
+ 'static,
+ HashMap<Arc<T>, SharedValue<()>, BuildHasherDefault<FxHasher>>,
+>;
+
+pub struct Interned<T: Internable + ?Sized> {
+ arc: Arc<T>,
+}
+
+impl<T: Internable> Interned<T> {
+ pub fn new(obj: T) -> Self {
+ match Interned::lookup(&obj) {
+ Ok(this) => this,
+ Err(shard) => {
+ let arc = Arc::new(obj);
+ Self::alloc(arc, shard)
+ }
+ }
+ }
+}
+
+impl<T: Internable + ?Sized> Interned<T> {
+ fn lookup(obj: &T) -> Result<Self, Guard<T>> {
+ let storage = T::storage().get();
+ let shard_idx = storage.determine_map(obj);
+ let shard = &storage.shards()[shard_idx];
+ let shard = shard.write();
+
+ // Atomically,
+ // - check if `obj` is already in the map
+ // - if so, clone its `Arc` and return it
+ // - if not, box it up, insert it, and return a clone
+ // This needs to be atomic (locking the shard) to avoid races with other thread, which could
+ // insert the same object between us looking it up and inserting it.
+
+ // FIXME: avoid double lookup/hashing by using raw entry API (once stable, or when
+ // hashbrown can be plugged into dashmap)
+ match shard.get_key_value(obj) {
+ Some((arc, _)) => Ok(Self { arc: arc.clone() }),
+ None => Err(shard),
+ }
+ }
+
+ fn alloc(arc: Arc<T>, mut shard: Guard<T>) -> Self {
+ let arc2 = arc.clone();
+
+ shard.insert(arc2, SharedValue::new(()));
+
+ Self { arc }
+ }
+}
+
+impl Interned<str> {
+ pub fn new_str(s: &str) -> Self {
+ match Interned::lookup(s) {
+ Ok(this) => this,
+ Err(shard) => {
+ let arc = Arc::<str>::from(s);
+ Self::alloc(arc, shard)
+ }
+ }
+ }
+}
+
+impl<T: Internable + ?Sized> Drop for Interned<T> {
+ #[inline]
+ fn drop(&mut self) {
+ // When the last `Ref` is dropped, remove the object from the global map.
+ if Arc::strong_count(&self.arc) == 2 {
+ // Only `self` and the global map point to the object.
+
+ self.drop_slow();
+ }
+ }
+}
+
+impl<T: Internable + ?Sized> Interned<T> {
+ #[cold]
+ fn drop_slow(&mut self) {
+ let storage = T::storage().get();
+ let shard_idx = storage.determine_map(&self.arc);
+ let shard = &storage.shards()[shard_idx];
+ let mut shard = shard.write();
+
+ // FIXME: avoid double lookup
+ let (arc, _) = shard.get_key_value(&self.arc).expect("interned value removed prematurely");
+
+ if Arc::strong_count(arc) != 2 {
+ // Another thread has interned another copy
+ return;
+ }
+
+ shard.remove(&self.arc);
+
+ // Shrink the backing storage if the shard is less than 50% occupied.
+ if shard.len() * 2 < shard.capacity() {
+ shard.shrink_to_fit();
+ }
+ }
+}
+
+/// Compares interned `Ref`s using pointer equality.
+impl<T: Internable> PartialEq for Interned<T> {
+ // NOTE: No `?Sized` because `ptr_eq` doesn't work right with trait objects.
+
+ #[inline]
+ fn eq(&self, other: &Self) -> bool {
+ Arc::ptr_eq(&self.arc, &other.arc)
+ }
+}
+
+impl<T: Internable> Eq for Interned<T> {}
+
+impl PartialEq for Interned<str> {
+ fn eq(&self, other: &Self) -> bool {
+ Arc::ptr_eq(&self.arc, &other.arc)
+ }
+}
+
+impl Eq for Interned<str> {}
+
+impl<T: Internable + ?Sized> Hash for Interned<T> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ // NOTE: Cast disposes vtable pointer / slice/str length.
+ state.write_usize(Arc::as_ptr(&self.arc) as *const () as usize)
+ }
+}
+
+impl<T: Internable + ?Sized> AsRef<T> for Interned<T> {
+ #[inline]
+ fn as_ref(&self) -> &T {
+ &self.arc
+ }
+}
+
+impl<T: Internable + ?Sized> Deref for Interned<T> {
+ type Target = T;
+
+ #[inline]
+ fn deref(&self) -> &Self::Target {
+ &self.arc
+ }
+}
+
+impl<T: Internable + ?Sized> Clone for Interned<T> {
+ fn clone(&self) -> Self {
+ Self { arc: self.arc.clone() }
+ }
+}
+
+impl<T: Debug + Internable + ?Sized> Debug for Interned<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ (*self.arc).fmt(f)
+ }
+}
+
+impl<T: Display + Internable + ?Sized> Display for Interned<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ (*self.arc).fmt(f)
+ }
+}
+
+pub struct InternStorage<T: ?Sized> {
+ map: OnceCell<InternMap<T>>,
+}
+
+impl<T: ?Sized> InternStorage<T> {
+ pub const fn new() -> Self {
+ Self { map: OnceCell::new() }
+ }
+}
+
+impl<T: Internable + ?Sized> InternStorage<T> {
+ fn get(&self) -> &InternMap<T> {
+ self.map.get_or_init(DashMap::default)
+ }
+}
+
+pub trait Internable: Hash + Eq + 'static {
+ fn storage() -> &'static InternStorage<Self>;
+}
+
+/// Implements `Internable` for a given list of types, making them usable with `Interned`.
+#[macro_export]
+#[doc(hidden)]
+macro_rules! _impl_internable {
+ ( $($t:path),+ $(,)? ) => { $(
+ impl Internable for $t {
+ fn storage() -> &'static InternStorage<Self> {
+ static STORAGE: InternStorage<$t> = InternStorage::new();
+ &STORAGE
+ }
+ }
+ )+ };
+}
+
+pub use crate::_impl_internable as impl_internable;
+
+impl_internable!(
+ crate::type_ref::TypeRef,
+ crate::type_ref::TraitRef,
+ crate::type_ref::TypeBound,
+ crate::path::ModPath,
+ crate::path::GenericArgs,
+ crate::attr::AttrInput,
+ GenericParams,
+ str,
+);
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
new file mode 100644
index 000000000..a11a92204
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
@@ -0,0 +1,464 @@
+//! Describes items defined or visible (ie, imported) in a certain scope.
+//! This is shared between modules and blocks.
+
+use std::collections::hash_map::Entry;
+
+use base_db::CrateId;
+use hir_expand::{name::Name, AstId, MacroCallId};
+use itertools::Itertools;
+use once_cell::sync::Lazy;
+use profile::Count;
+use rustc_hash::{FxHashMap, FxHashSet};
+use smallvec::{smallvec, SmallVec};
+use stdx::format_to;
+use syntax::ast;
+
+use crate::{
+ attr::AttrId, db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType,
+ ConstId, HasModule, ImplId, LocalModuleId, MacroId, ModuleDefId, ModuleId, TraitId,
+};
+
+#[derive(Copy, Clone)]
+pub(crate) enum ImportType {
+ Glob,
+ Named,
+}
+
+#[derive(Debug, Default)]
+pub struct PerNsGlobImports {
+ types: FxHashSet<(LocalModuleId, Name)>,
+ values: FxHashSet<(LocalModuleId, Name)>,
+ macros: FxHashSet<(LocalModuleId, Name)>,
+}
+
+#[derive(Debug, Default, PartialEq, Eq)]
+pub struct ItemScope {
+ _c: Count<Self>,
+
+ /// Defs visible in this scope. This includes `declarations`, but also
+ /// imports.
+ types: FxHashMap<Name, (ModuleDefId, Visibility)>,
+ values: FxHashMap<Name, (ModuleDefId, Visibility)>,
+ macros: FxHashMap<Name, (MacroId, Visibility)>,
+ unresolved: FxHashSet<Name>,
+
+ /// The defs declared in this scope. Each def has a single scope where it is
+ /// declared.
+ declarations: Vec<ModuleDefId>,
+
+ impls: Vec<ImplId>,
+ unnamed_consts: Vec<ConstId>,
+ /// Traits imported via `use Trait as _;`.
+ unnamed_trait_imports: FxHashMap<TraitId, Visibility>,
+ /// Macros visible in current module in legacy textual scope
+ ///
+ /// For macros invoked by an unqualified identifier like `bar!()`, `legacy_macros` will be searched in first.
+ /// If it yields no result, then it turns to module scoped `macros`.
+ /// It macros with name qualified with a path like `crate::foo::bar!()`, `legacy_macros` will be skipped,
+ /// and only normal scoped `macros` will be searched in.
+ ///
+ /// Note that this automatically inherit macros defined textually before the definition of module itself.
+ ///
+ /// Module scoped macros will be inserted into `items` instead of here.
+ // FIXME: Macro shadowing in one module is not properly handled. Non-item place macros will
+ // be all resolved to the last one defined if shadowing happens.
+ legacy_macros: FxHashMap<Name, SmallVec<[MacroId; 1]>>,
+ /// The derive macro invocations in this scope.
+ attr_macros: FxHashMap<AstId<ast::Item>, MacroCallId>,
+ /// The derive macro invocations in this scope, keyed by the owner item over the actual derive attributes
+ /// paired with the derive macro invocations for the specific attribute.
+ derive_macros: FxHashMap<AstId<ast::Adt>, SmallVec<[DeriveMacroInvocation; 1]>>,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+struct DeriveMacroInvocation {
+ attr_id: AttrId,
+ attr_call_id: MacroCallId,
+ derive_call_ids: SmallVec<[Option<MacroCallId>; 1]>,
+}
+
+pub(crate) static BUILTIN_SCOPE: Lazy<FxHashMap<Name, PerNs>> = Lazy::new(|| {
+ BuiltinType::ALL
+ .iter()
+ .map(|(name, ty)| (name.clone(), PerNs::types((*ty).into(), Visibility::Public)))
+ .collect()
+});
+
+/// Shadow mode for builtin type which can be shadowed by module.
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub(crate) enum BuiltinShadowMode {
+ /// Prefer user-defined modules (or other types) over builtins.
+ Module,
+ /// Prefer builtins over user-defined modules (but not other types).
+ Other,
+}
+
+/// Legacy macros can only be accessed through special methods like `get_legacy_macros`.
+/// Other methods will only resolve values, types and module scoped macros only.
+impl ItemScope {
+ pub fn entries<'a>(&'a self) -> impl Iterator<Item = (&'a Name, PerNs)> + 'a {
+ // FIXME: shadowing
+ self.types
+ .keys()
+ .chain(self.values.keys())
+ .chain(self.macros.keys())
+ .chain(self.unresolved.iter())
+ .sorted()
+ .unique()
+ .map(move |name| (name, self.get(name)))
+ }
+
+ pub fn declarations(&self) -> impl Iterator<Item = ModuleDefId> + '_ {
+ self.declarations.iter().copied()
+ }
+
+ pub fn impls(&self) -> impl Iterator<Item = ImplId> + ExactSizeIterator + '_ {
+ self.impls.iter().copied()
+ }
+
+ pub fn values(
+ &self,
+ ) -> impl Iterator<Item = (ModuleDefId, Visibility)> + ExactSizeIterator + '_ {
+ self.values.values().copied()
+ }
+
+ pub fn types(
+ &self,
+ ) -> impl Iterator<Item = (ModuleDefId, Visibility)> + ExactSizeIterator + '_ {
+ self.types.values().copied()
+ }
+
+ pub fn unnamed_consts(&self) -> impl Iterator<Item = ConstId> + '_ {
+ self.unnamed_consts.iter().copied()
+ }
+
+ /// Iterate over all module scoped macros
+ pub(crate) fn macros(&self) -> impl Iterator<Item = (&Name, MacroId)> + '_ {
+ self.entries().filter_map(|(name, def)| def.take_macros().map(|macro_| (name, macro_)))
+ }
+
+ /// Iterate over all legacy textual scoped macros visible at the end of the module
+ pub fn legacy_macros(&self) -> impl Iterator<Item = (&Name, &[MacroId])> + '_ {
+ self.legacy_macros.iter().map(|(name, def)| (name, &**def))
+ }
+
+ /// Get a name from current module scope, legacy macros are not included
+ pub(crate) fn get(&self, name: &Name) -> PerNs {
+ PerNs {
+ types: self.types.get(name).copied(),
+ values: self.values.get(name).copied(),
+ macros: self.macros.get(name).copied(),
+ }
+ }
+
+ pub(crate) fn type_(&self, name: &Name) -> Option<(ModuleDefId, Visibility)> {
+ self.types.get(name).copied()
+ }
+
+ /// XXX: this is O(N) rather than O(1), try to not introduce new usages.
+ pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility)> {
+ let (def, mut iter) = match item {
+ ItemInNs::Macros(def) => {
+ return self
+ .macros
+ .iter()
+ .find_map(|(name, &(other_def, vis))| (other_def == def).then(|| (name, vis)));
+ }
+ ItemInNs::Types(def) => (def, self.types.iter()),
+ ItemInNs::Values(def) => (def, self.values.iter()),
+ };
+ iter.find_map(|(name, &(other_def, vis))| (other_def == def).then(|| (name, vis)))
+ }
+
+ pub(crate) fn traits<'a>(&'a self) -> impl Iterator<Item = TraitId> + 'a {
+ self.types
+ .values()
+ .filter_map(|&(def, _)| match def {
+ ModuleDefId::TraitId(t) => Some(t),
+ _ => None,
+ })
+ .chain(self.unnamed_trait_imports.keys().copied())
+ }
+
+ pub(crate) fn declare(&mut self, def: ModuleDefId) {
+ self.declarations.push(def)
+ }
+
+ pub(crate) fn get_legacy_macro(&self, name: &Name) -> Option<&[MacroId]> {
+ self.legacy_macros.get(name).map(|it| &**it)
+ }
+
+ pub(crate) fn define_impl(&mut self, imp: ImplId) {
+ self.impls.push(imp)
+ }
+
+ pub(crate) fn define_unnamed_const(&mut self, konst: ConstId) {
+ self.unnamed_consts.push(konst);
+ }
+
+ pub(crate) fn define_legacy_macro(&mut self, name: Name, mac: MacroId) {
+ self.legacy_macros.entry(name).or_default().push(mac);
+ }
+
+ pub(crate) fn add_attr_macro_invoc(&mut self, item: AstId<ast::Item>, call: MacroCallId) {
+ self.attr_macros.insert(item, call);
+ }
+
+ pub(crate) fn attr_macro_invocs(
+ &self,
+ ) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
+ self.attr_macros.iter().map(|(k, v)| (*k, *v))
+ }
+
+ pub(crate) fn set_derive_macro_invoc(
+ &mut self,
+ adt: AstId<ast::Adt>,
+ call: MacroCallId,
+ id: AttrId,
+ idx: usize,
+ ) {
+ if let Some(derives) = self.derive_macros.get_mut(&adt) {
+ if let Some(DeriveMacroInvocation { derive_call_ids, .. }) =
+ derives.iter_mut().find(|&&mut DeriveMacroInvocation { attr_id, .. }| id == attr_id)
+ {
+ derive_call_ids[idx] = Some(call);
+ }
+ }
+ }
+
+ /// We are required to set this up front as derive invocation recording happens out of order
+ /// due to the fixed pointer iteration loop being able to record some derives later than others
+ /// independent of their indices.
+ pub(crate) fn init_derive_attribute(
+ &mut self,
+ adt: AstId<ast::Adt>,
+ attr_id: AttrId,
+ attr_call_id: MacroCallId,
+ len: usize,
+ ) {
+ self.derive_macros.entry(adt).or_default().push(DeriveMacroInvocation {
+ attr_id,
+ attr_call_id,
+ derive_call_ids: smallvec![None; len],
+ });
+ }
+
+ pub(crate) fn derive_macro_invocs(
+ &self,
+ ) -> impl Iterator<
+ Item = (
+ AstId<ast::Adt>,
+ impl Iterator<Item = (AttrId, MacroCallId, &[Option<MacroCallId>])>,
+ ),
+ > + '_ {
+ self.derive_macros.iter().map(|(k, v)| {
+ (
+ *k,
+ v.iter().map(|DeriveMacroInvocation { attr_id, attr_call_id, derive_call_ids }| {
+ (*attr_id, *attr_call_id, &**derive_call_ids)
+ }),
+ )
+ })
+ }
+
+ pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option<Visibility> {
+ self.unnamed_trait_imports.get(&tr).copied()
+ }
+
+ pub(crate) fn push_unnamed_trait(&mut self, tr: TraitId, vis: Visibility) {
+ self.unnamed_trait_imports.insert(tr, vis);
+ }
+
+ pub(crate) fn push_res_with_import(
+ &mut self,
+ glob_imports: &mut PerNsGlobImports,
+ lookup: (LocalModuleId, Name),
+ def: PerNs,
+ def_import_type: ImportType,
+ ) -> bool {
+ let mut changed = false;
+
+ macro_rules! check_changed {
+ (
+ $changed:ident,
+ ( $this:ident / $def:ident ) . $field:ident,
+ $glob_imports:ident [ $lookup:ident ],
+ $def_import_type:ident
+ ) => {{
+ if let Some(fld) = $def.$field {
+ let existing = $this.$field.entry($lookup.1.clone());
+ match existing {
+ Entry::Vacant(entry) => {
+ match $def_import_type {
+ ImportType::Glob => {
+ $glob_imports.$field.insert($lookup.clone());
+ }
+ ImportType::Named => {
+ $glob_imports.$field.remove(&$lookup);
+ }
+ }
+
+ entry.insert(fld);
+ $changed = true;
+ }
+ Entry::Occupied(mut entry)
+ if $glob_imports.$field.contains(&$lookup)
+ && matches!($def_import_type, ImportType::Named) =>
+ {
+ cov_mark::hit!(import_shadowed);
+ $glob_imports.$field.remove(&$lookup);
+ entry.insert(fld);
+ $changed = true;
+ }
+ _ => {}
+ }
+ }
+ }};
+ }
+
+ check_changed!(changed, (self / def).types, glob_imports[lookup], def_import_type);
+ check_changed!(changed, (self / def).values, glob_imports[lookup], def_import_type);
+ check_changed!(changed, (self / def).macros, glob_imports[lookup], def_import_type);
+
+ if def.is_none() && self.unresolved.insert(lookup.1) {
+ changed = true;
+ }
+
+ changed
+ }
+
+ pub(crate) fn resolutions<'a>(&'a self) -> impl Iterator<Item = (Option<Name>, PerNs)> + 'a {
+ self.entries().map(|(name, res)| (Some(name.clone()), res)).chain(
+ self.unnamed_trait_imports
+ .iter()
+ .map(|(tr, vis)| (None, PerNs::types(ModuleDefId::TraitId(*tr), *vis))),
+ )
+ }
+
+ pub(crate) fn collect_legacy_macros(&self) -> FxHashMap<Name, SmallVec<[MacroId; 1]>> {
+ self.legacy_macros.clone()
+ }
+
+ /// Marks everything that is not a procedural macro as private to `this_module`.
+ pub(crate) fn censor_non_proc_macros(&mut self, this_module: ModuleId) {
+ self.types
+ .values_mut()
+ .chain(self.values.values_mut())
+ .map(|(_, v)| v)
+ .chain(self.unnamed_trait_imports.values_mut())
+ .for_each(|vis| *vis = Visibility::Module(this_module));
+
+ for (mac, vis) in self.macros.values_mut() {
+ if let MacroId::ProcMacroId(_) = mac {
+ // FIXME: Technically this is insufficient since reexports of proc macros are also
+ // forbidden. Practically nobody does that.
+ continue;
+ }
+
+ *vis = Visibility::Module(this_module);
+ }
+ }
+
+ pub(crate) fn dump(&self, buf: &mut String) {
+ let mut entries: Vec<_> = self.resolutions().collect();
+ entries.sort_by_key(|(name, _)| name.clone());
+
+ for (name, def) in entries {
+ format_to!(buf, "{}:", name.map_or("_".to_string(), |name| name.to_string()));
+
+ if def.types.is_some() {
+ buf.push_str(" t");
+ }
+ if def.values.is_some() {
+ buf.push_str(" v");
+ }
+ if def.macros.is_some() {
+ buf.push_str(" m");
+ }
+ if def.is_none() {
+ buf.push_str(" _");
+ }
+
+ buf.push('\n');
+ }
+ }
+
+ pub(crate) fn shrink_to_fit(&mut self) {
+ // Exhaustive match to require handling new fields.
+ let Self {
+ _c: _,
+ types,
+ values,
+ macros,
+ unresolved,
+ declarations,
+ impls,
+ unnamed_consts,
+ unnamed_trait_imports,
+ legacy_macros,
+ attr_macros,
+ derive_macros,
+ } = self;
+ types.shrink_to_fit();
+ values.shrink_to_fit();
+ macros.shrink_to_fit();
+ unresolved.shrink_to_fit();
+ declarations.shrink_to_fit();
+ impls.shrink_to_fit();
+ unnamed_consts.shrink_to_fit();
+ unnamed_trait_imports.shrink_to_fit();
+ legacy_macros.shrink_to_fit();
+ attr_macros.shrink_to_fit();
+ derive_macros.shrink_to_fit();
+ }
+}
+
+impl PerNs {
+ pub(crate) fn from_def(def: ModuleDefId, v: Visibility, has_constructor: bool) -> PerNs {
+ match def {
+ ModuleDefId::ModuleId(_) => PerNs::types(def, v),
+ ModuleDefId::FunctionId(_) => PerNs::values(def, v),
+ ModuleDefId::AdtId(adt) => match adt {
+ AdtId::UnionId(_) => PerNs::types(def, v),
+ AdtId::EnumId(_) => PerNs::types(def, v),
+ AdtId::StructId(_) => {
+ if has_constructor {
+ PerNs::both(def, def, v)
+ } else {
+ PerNs::types(def, v)
+ }
+ }
+ },
+ ModuleDefId::EnumVariantId(_) => PerNs::both(def, def, v),
+ ModuleDefId::ConstId(_) | ModuleDefId::StaticId(_) => PerNs::values(def, v),
+ ModuleDefId::TraitId(_) => PerNs::types(def, v),
+ ModuleDefId::TypeAliasId(_) => PerNs::types(def, v),
+ ModuleDefId::BuiltinType(_) => PerNs::types(def, v),
+ ModuleDefId::MacroId(mac) => PerNs::macros(mac, v),
+ }
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
+pub enum ItemInNs {
+ Types(ModuleDefId),
+ Values(ModuleDefId),
+ Macros(MacroId),
+}
+
+impl ItemInNs {
+ pub fn as_module_def_id(self) -> Option<ModuleDefId> {
+ match self {
+ ItemInNs::Types(id) | ItemInNs::Values(id) => Some(id),
+ ItemInNs::Macros(_) => None,
+ }
+ }
+
+ /// Returns the crate defining this item (or `None` if `self` is built-in).
+ pub fn krate(&self, db: &dyn DefDatabase) -> Option<CrateId> {
+ match self {
+ ItemInNs::Types(did) | ItemInNs::Values(did) => did.module(db).map(|m| m.krate),
+ ItemInNs::Macros(id) => Some(id.module(db).krate),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
new file mode 100644
index 000000000..375587ee9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
@@ -0,0 +1,961 @@
+//! A simplified AST that only contains items.
+//!
+//! This is the primary IR used throughout `hir_def`. It is the input to the name resolution
+//! algorithm, as well as to the queries defined in `adt.rs`, `data.rs`, and most things in
+//! `attr.rs`.
+//!
+//! `ItemTree`s are built per `HirFileId`, from the syntax tree of the parsed file. This means that
+//! they are crate-independent: they don't know which `#[cfg]`s are active or which module they
+//! belong to, since those concepts don't exist at this level (a single `ItemTree` might be part of
+//! multiple crates, or might be included into the same crate twice via `#[path]`).
+//!
+//! One important purpose of this layer is to provide an "invalidation barrier" for incremental
+//! computations: when typing inside an item body, the `ItemTree` of the modified file is typically
+//! unaffected, so we don't have to recompute name resolution results or item data (see `data.rs`).
+//!
+//! The `ItemTree` for the currently open file can be displayed by using the VS Code command
+//! "Rust Analyzer: Debug ItemTree".
+//!
+//! Compared to rustc's architecture, `ItemTree` has properties from both rustc's AST and HIR: many
+//! syntax-level Rust features are already desugared to simpler forms in the `ItemTree`, but name
+//! resolution has not yet been performed. `ItemTree`s are per-file, while rustc's AST and HIR are
+//! per-crate, because we are interested in incrementally computing it.
+//!
+//! The representation of items in the `ItemTree` should generally mirror the surface syntax: it is
+//! usually a bad idea to desugar a syntax-level construct to something that is structurally
+//! different here. Name resolution needs to be able to process attributes and expand macros
+//! (including attribute macros), and having a 1-to-1 mapping between syntax and the `ItemTree`
+//! avoids introducing subtle bugs.
+//!
+//! In general, any item in the `ItemTree` stores its `AstId`, which allows mapping it back to its
+//! surface syntax.
+
+mod lower;
+mod pretty;
+#[cfg(test)]
+mod tests;
+
+use std::{
+ fmt::{self, Debug},
+ hash::{Hash, Hasher},
+ marker::PhantomData,
+ ops::Index,
+ sync::Arc,
+};
+
+use ast::{AstNode, HasName, StructKind};
+use base_db::CrateId;
+use either::Either;
+use hir_expand::{
+ ast_id_map::FileAstId,
+ hygiene::Hygiene,
+ name::{name, AsName, Name},
+ ExpandTo, HirFileId, InFile,
+};
+use la_arena::{Arena, Idx, IdxRange, RawIdx};
+use profile::Count;
+use rustc_hash::FxHashMap;
+use smallvec::SmallVec;
+use stdx::never;
+use syntax::{ast, match_ast, SyntaxKind};
+
+use crate::{
+ attr::{Attrs, RawAttrs},
+ db::DefDatabase,
+ generics::GenericParams,
+ intern::Interned,
+ path::{path, AssociatedTypeBinding, GenericArgs, ImportAlias, ModPath, Path, PathKind},
+ type_ref::{Mutability, TraitRef, TypeBound, TypeRef},
+ visibility::RawVisibility,
+ BlockId,
+};
+
+#[derive(Copy, Clone, Eq, PartialEq)]
+pub struct RawVisibilityId(u32);
+
+impl RawVisibilityId {
+ pub const PUB: Self = RawVisibilityId(u32::max_value());
+ pub const PRIV: Self = RawVisibilityId(u32::max_value() - 1);
+ pub const PUB_CRATE: Self = RawVisibilityId(u32::max_value() - 2);
+}
+
+impl fmt::Debug for RawVisibilityId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut f = f.debug_tuple("RawVisibilityId");
+ match *self {
+ Self::PUB => f.field(&"pub"),
+ Self::PRIV => f.field(&"pub(self)"),
+ Self::PUB_CRATE => f.field(&"pub(crate)"),
+ _ => f.field(&self.0),
+ };
+ f.finish()
+ }
+}
+
+/// The item tree of a source file.
+#[derive(Debug, Default, Eq, PartialEq)]
+pub struct ItemTree {
+ _c: Count<Self>,
+
+ top_level: SmallVec<[ModItem; 1]>,
+ attrs: FxHashMap<AttrOwner, RawAttrs>,
+
+ data: Option<Box<ItemTreeData>>,
+}
+
+impl ItemTree {
+ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
+ let _p = profile::span("file_item_tree_query").detail(|| format!("{:?}", file_id));
+ let syntax = match db.parse_or_expand(file_id) {
+ Some(node) => node,
+ None => return Default::default(),
+ };
+ if never!(syntax.kind() == SyntaxKind::ERROR) {
+ // FIXME: not 100% sure why these crop up, but return an empty tree to avoid a panic
+ return Default::default();
+ }
+
+ let ctx = lower::Ctx::new(db, file_id);
+ let mut top_attrs = None;
+ let mut item_tree = match_ast! {
+ match syntax {
+ ast::SourceFile(file) => {
+ top_attrs = Some(RawAttrs::new(db, &file, ctx.hygiene()));
+ ctx.lower_module_items(&file)
+ },
+ ast::MacroItems(items) => {
+ ctx.lower_module_items(&items)
+ },
+ ast::MacroStmts(stmts) => {
+ // The produced statements can include items, which should be added as top-level
+ // items.
+ ctx.lower_macro_stmts(stmts)
+ },
+ _ => {
+ panic!("cannot create item tree from {:?} {}", syntax, syntax);
+ },
+ }
+ };
+
+ if let Some(attrs) = top_attrs {
+ item_tree.attrs.insert(AttrOwner::TopLevel, attrs);
+ }
+ item_tree.shrink_to_fit();
+ Arc::new(item_tree)
+ }
+
+ /// Returns an iterator over all items located at the top level of the `HirFileId` this
+ /// `ItemTree` was created from.
+ pub fn top_level_items(&self) -> &[ModItem] {
+ &self.top_level
+ }
+
+ /// Returns the inner attributes of the source file.
+ pub fn top_level_attrs(&self, db: &dyn DefDatabase, krate: CrateId) -> Attrs {
+ self.attrs.get(&AttrOwner::TopLevel).unwrap_or(&RawAttrs::EMPTY).clone().filter(db, krate)
+ }
+
+ pub(crate) fn raw_attrs(&self, of: AttrOwner) -> &RawAttrs {
+ self.attrs.get(&of).unwrap_or(&RawAttrs::EMPTY)
+ }
+
+ pub(crate) fn attrs(&self, db: &dyn DefDatabase, krate: CrateId, of: AttrOwner) -> Attrs {
+ self.raw_attrs(of).clone().filter(db, krate)
+ }
+
+ pub fn pretty_print(&self) -> String {
+ pretty::print_item_tree(self)
+ }
+
+ fn data(&self) -> &ItemTreeData {
+ self.data.as_ref().expect("attempted to access data of empty ItemTree")
+ }
+
+ fn data_mut(&mut self) -> &mut ItemTreeData {
+ self.data.get_or_insert_with(Box::default)
+ }
+
+ fn block_item_tree(db: &dyn DefDatabase, block: BlockId) -> Arc<ItemTree> {
+ let loc = db.lookup_intern_block(block);
+ let block = loc.ast_id.to_node(db.upcast());
+ let ctx = lower::Ctx::new(db, loc.ast_id.file_id);
+ Arc::new(ctx.lower_block(&block))
+ }
+
+ fn shrink_to_fit(&mut self) {
+ if let Some(data) = &mut self.data {
+ let ItemTreeData {
+ imports,
+ extern_crates,
+ extern_blocks,
+ functions,
+ params,
+ structs,
+ fields,
+ unions,
+ enums,
+ variants,
+ consts,
+ statics,
+ traits,
+ impls,
+ type_aliases,
+ mods,
+ macro_calls,
+ macro_rules,
+ macro_defs,
+ vis,
+ } = &mut **data;
+
+ imports.shrink_to_fit();
+ extern_crates.shrink_to_fit();
+ extern_blocks.shrink_to_fit();
+ functions.shrink_to_fit();
+ params.shrink_to_fit();
+ structs.shrink_to_fit();
+ fields.shrink_to_fit();
+ unions.shrink_to_fit();
+ enums.shrink_to_fit();
+ variants.shrink_to_fit();
+ consts.shrink_to_fit();
+ statics.shrink_to_fit();
+ traits.shrink_to_fit();
+ impls.shrink_to_fit();
+ type_aliases.shrink_to_fit();
+ mods.shrink_to_fit();
+ macro_calls.shrink_to_fit();
+ macro_rules.shrink_to_fit();
+ macro_defs.shrink_to_fit();
+
+ vis.arena.shrink_to_fit();
+ }
+ }
+}
+
+#[derive(Default, Debug, Eq, PartialEq)]
+struct ItemVisibilities {
+ arena: Arena<RawVisibility>,
+}
+
+impl ItemVisibilities {
+ fn alloc(&mut self, vis: RawVisibility) -> RawVisibilityId {
+ match &vis {
+ RawVisibility::Public => RawVisibilityId::PUB,
+ RawVisibility::Module(path) if path.segments().is_empty() => match &path.kind {
+ PathKind::Super(0) => RawVisibilityId::PRIV,
+ PathKind::Crate => RawVisibilityId::PUB_CRATE,
+ _ => RawVisibilityId(self.arena.alloc(vis).into_raw().into()),
+ },
+ _ => RawVisibilityId(self.arena.alloc(vis).into_raw().into()),
+ }
+ }
+}
+
+static VIS_PUB: RawVisibility = RawVisibility::Public;
+static VIS_PRIV: RawVisibility = RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)));
+static VIS_PUB_CRATE: RawVisibility = RawVisibility::Module(ModPath::from_kind(PathKind::Crate));
+
+#[derive(Default, Debug, Eq, PartialEq)]
+struct ItemTreeData {
+ imports: Arena<Import>,
+ extern_crates: Arena<ExternCrate>,
+ extern_blocks: Arena<ExternBlock>,
+ functions: Arena<Function>,
+ params: Arena<Param>,
+ structs: Arena<Struct>,
+ fields: Arena<Field>,
+ unions: Arena<Union>,
+ enums: Arena<Enum>,
+ variants: Arena<Variant>,
+ consts: Arena<Const>,
+ statics: Arena<Static>,
+ traits: Arena<Trait>,
+ impls: Arena<Impl>,
+ type_aliases: Arena<TypeAlias>,
+ mods: Arena<Mod>,
+ macro_calls: Arena<MacroCall>,
+ macro_rules: Arena<MacroRules>,
+ macro_defs: Arena<MacroDef>,
+
+ vis: ItemVisibilities,
+}
+
+#[derive(Debug, Eq, PartialEq, Hash)]
+pub enum AttrOwner {
+ /// Attributes on an item.
+ ModItem(ModItem),
+ /// Inner attributes of the source file.
+ TopLevel,
+
+ Variant(Idx<Variant>),
+ Field(Idx<Field>),
+ Param(Idx<Param>),
+}
+
+macro_rules! from_attrs {
+ ( $( $var:ident($t:ty) ),+ ) => {
+ $(
+ impl From<$t> for AttrOwner {
+ fn from(t: $t) -> AttrOwner {
+ AttrOwner::$var(t)
+ }
+ }
+ )+
+ };
+}
+
+from_attrs!(ModItem(ModItem), Variant(Idx<Variant>), Field(Idx<Field>), Param(Idx<Param>));
+
+/// Trait implemented by all item nodes in the item tree.
+pub trait ItemTreeNode: Clone {
+ type Source: AstNode + Into<ast::Item>;
+
+ fn ast_id(&self) -> FileAstId<Self::Source>;
+
+ /// Looks up an instance of `Self` in an item tree.
+ fn lookup(tree: &ItemTree, index: Idx<Self>) -> &Self;
+
+ /// Downcasts a `ModItem` to a `FileItemTreeId` specific to this type.
+ fn id_from_mod_item(mod_item: ModItem) -> Option<FileItemTreeId<Self>>;
+
+ /// Upcasts a `FileItemTreeId` to a generic `ModItem`.
+ fn id_to_mod_item(id: FileItemTreeId<Self>) -> ModItem;
+}
+
+pub struct FileItemTreeId<N: ItemTreeNode> {
+ index: Idx<N>,
+ _p: PhantomData<N>,
+}
+
+impl<N: ItemTreeNode> Clone for FileItemTreeId<N> {
+ fn clone(&self) -> Self {
+ Self { index: self.index, _p: PhantomData }
+ }
+}
+impl<N: ItemTreeNode> Copy for FileItemTreeId<N> {}
+
+impl<N: ItemTreeNode> PartialEq for FileItemTreeId<N> {
+ fn eq(&self, other: &FileItemTreeId<N>) -> bool {
+ self.index == other.index
+ }
+}
+impl<N: ItemTreeNode> Eq for FileItemTreeId<N> {}
+
+impl<N: ItemTreeNode> Hash for FileItemTreeId<N> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.index.hash(state)
+ }
+}
+
+impl<N: ItemTreeNode> fmt::Debug for FileItemTreeId<N> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.index.fmt(f)
+ }
+}
+
+/// Identifies a particular [`ItemTree`].
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
+pub struct TreeId {
+ file: HirFileId,
+ block: Option<BlockId>,
+}
+
+impl TreeId {
+ pub(crate) fn new(file: HirFileId, block: Option<BlockId>) -> Self {
+ Self { file, block }
+ }
+
+ pub(crate) fn item_tree(&self, db: &dyn DefDatabase) -> Arc<ItemTree> {
+ match self.block {
+ Some(block) => ItemTree::block_item_tree(db, block),
+ None => db.file_item_tree(self.file),
+ }
+ }
+
+ pub(crate) fn file_id(self) -> HirFileId {
+ self.file
+ }
+
+ pub(crate) fn is_block(self) -> bool {
+ self.block.is_some()
+ }
+}
+
+#[derive(Debug)]
+pub struct ItemTreeId<N: ItemTreeNode> {
+ tree: TreeId,
+ pub value: FileItemTreeId<N>,
+}
+
+impl<N: ItemTreeNode> ItemTreeId<N> {
+ pub fn new(tree: TreeId, idx: FileItemTreeId<N>) -> Self {
+ Self { tree, value: idx }
+ }
+
+ pub fn file_id(self) -> HirFileId {
+ self.tree.file
+ }
+
+ pub fn tree_id(self) -> TreeId {
+ self.tree
+ }
+
+ pub fn item_tree(self, db: &dyn DefDatabase) -> Arc<ItemTree> {
+ self.tree.item_tree(db)
+ }
+}
+
+impl<N: ItemTreeNode> Copy for ItemTreeId<N> {}
+impl<N: ItemTreeNode> Clone for ItemTreeId<N> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+impl<N: ItemTreeNode> PartialEq for ItemTreeId<N> {
+ fn eq(&self, other: &Self) -> bool {
+ self.tree == other.tree && self.value == other.value
+ }
+}
+
+impl<N: ItemTreeNode> Eq for ItemTreeId<N> {}
+
+impl<N: ItemTreeNode> Hash for ItemTreeId<N> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.tree.hash(state);
+ self.value.hash(state);
+ }
+}
+
+macro_rules! mod_items {
+ ( $( $typ:ident in $fld:ident -> $ast:ty ),+ $(,)? ) => {
+ #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
+ pub enum ModItem {
+ $(
+ $typ(FileItemTreeId<$typ>),
+ )+
+ }
+
+ $(
+ impl From<FileItemTreeId<$typ>> for ModItem {
+ fn from(id: FileItemTreeId<$typ>) -> ModItem {
+ ModItem::$typ(id)
+ }
+ }
+ )+
+
+ $(
+ impl ItemTreeNode for $typ {
+ type Source = $ast;
+
+ fn ast_id(&self) -> FileAstId<Self::Source> {
+ self.ast_id
+ }
+
+ fn lookup(tree: &ItemTree, index: Idx<Self>) -> &Self {
+ &tree.data().$fld[index]
+ }
+
+ fn id_from_mod_item(mod_item: ModItem) -> Option<FileItemTreeId<Self>> {
+ match mod_item {
+ ModItem::$typ(id) => Some(id),
+ _ => None,
+ }
+ }
+
+ fn id_to_mod_item(id: FileItemTreeId<Self>) -> ModItem {
+ ModItem::$typ(id)
+ }
+ }
+
+ impl Index<Idx<$typ>> for ItemTree {
+ type Output = $typ;
+
+ fn index(&self, index: Idx<$typ>) -> &Self::Output {
+ &self.data().$fld[index]
+ }
+ }
+ )+
+ };
+}
+
+mod_items! {
+ Import in imports -> ast::Use,
+ ExternCrate in extern_crates -> ast::ExternCrate,
+ ExternBlock in extern_blocks -> ast::ExternBlock,
+ Function in functions -> ast::Fn,
+ Struct in structs -> ast::Struct,
+ Union in unions -> ast::Union,
+ Enum in enums -> ast::Enum,
+ Const in consts -> ast::Const,
+ Static in statics -> ast::Static,
+ Trait in traits -> ast::Trait,
+ Impl in impls -> ast::Impl,
+ TypeAlias in type_aliases -> ast::TypeAlias,
+ Mod in mods -> ast::Module,
+ MacroCall in macro_calls -> ast::MacroCall,
+ MacroRules in macro_rules -> ast::MacroRules,
+ MacroDef in macro_defs -> ast::MacroDef,
+}
+
+macro_rules! impl_index {
+ ( $($fld:ident: $t:ty),+ $(,)? ) => {
+ $(
+ impl Index<Idx<$t>> for ItemTree {
+ type Output = $t;
+
+ fn index(&self, index: Idx<$t>) -> &Self::Output {
+ &self.data().$fld[index]
+ }
+ }
+ )+
+ };
+}
+
+impl_index!(fields: Field, variants: Variant, params: Param);
+
+impl Index<RawVisibilityId> for ItemTree {
+ type Output = RawVisibility;
+ fn index(&self, index: RawVisibilityId) -> &Self::Output {
+ match index {
+ RawVisibilityId::PRIV => &VIS_PRIV,
+ RawVisibilityId::PUB => &VIS_PUB,
+ RawVisibilityId::PUB_CRATE => &VIS_PUB_CRATE,
+ _ => &self.data().vis.arena[Idx::from_raw(index.0.into())],
+ }
+ }
+}
+
+impl<N: ItemTreeNode> Index<FileItemTreeId<N>> for ItemTree {
+ type Output = N;
+ fn index(&self, id: FileItemTreeId<N>) -> &N {
+ N::lookup(self, id.index)
+ }
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Import {
+ pub visibility: RawVisibilityId,
+ pub ast_id: FileAstId<ast::Use>,
+ pub use_tree: UseTree,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct UseTree {
+ pub index: Idx<ast::UseTree>,
+ kind: UseTreeKind,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum UseTreeKind {
+ /// ```
+ /// use path::to::Item;
+ /// use path::to::Item as Renamed;
+ /// use path::to::Trait as _;
+ /// ```
+ Single { path: Interned<ModPath>, alias: Option<ImportAlias> },
+
+ /// ```
+ /// use *; // (invalid, but can occur in nested tree)
+ /// use path::*;
+ /// ```
+ Glob { path: Option<Interned<ModPath>> },
+
+ /// ```
+ /// use prefix::{self, Item, ...};
+ /// ```
+ Prefixed { prefix: Option<Interned<ModPath>>, list: Box<[UseTree]> },
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct ExternCrate {
+ pub name: Name,
+ pub alias: Option<ImportAlias>,
+ pub visibility: RawVisibilityId,
+ pub ast_id: FileAstId<ast::ExternCrate>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct ExternBlock {
+ pub abi: Option<Interned<str>>,
+ pub ast_id: FileAstId<ast::ExternBlock>,
+ pub children: Box<[ModItem]>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Function {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub explicit_generic_params: Interned<GenericParams>,
+ pub abi: Option<Interned<str>>,
+ pub params: IdxRange<Param>,
+ pub ret_type: Interned<TypeRef>,
+ pub async_ret_type: Option<Interned<TypeRef>>,
+ pub ast_id: FileAstId<ast::Fn>,
+ pub(crate) flags: FnFlags,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum Param {
+ Normal(Option<Name>, Interned<TypeRef>),
+ Varargs,
+}
+
+bitflags::bitflags! {
+ #[derive(Default)]
+ pub(crate) struct FnFlags: u8 {
+ const HAS_SELF_PARAM = 1 << 0;
+ const HAS_BODY = 1 << 1;
+ const HAS_DEFAULT_KW = 1 << 2;
+ const HAS_CONST_KW = 1 << 3;
+ const HAS_ASYNC_KW = 1 << 4;
+ const HAS_UNSAFE_KW = 1 << 5;
+ const IS_VARARGS = 1 << 6;
+ }
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Struct {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub generic_params: Interned<GenericParams>,
+ pub fields: Fields,
+ pub ast_id: FileAstId<ast::Struct>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Union {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub generic_params: Interned<GenericParams>,
+ pub fields: Fields,
+ pub ast_id: FileAstId<ast::Union>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Enum {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub generic_params: Interned<GenericParams>,
+ pub variants: IdxRange<Variant>,
+ pub ast_id: FileAstId<ast::Enum>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Const {
+ /// `None` for `const _: () = ();`
+ pub name: Option<Name>,
+ pub visibility: RawVisibilityId,
+ pub type_ref: Interned<TypeRef>,
+ pub ast_id: FileAstId<ast::Const>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Static {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub mutable: bool,
+ pub type_ref: Interned<TypeRef>,
+ pub ast_id: FileAstId<ast::Static>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Trait {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub generic_params: Interned<GenericParams>,
+ pub is_auto: bool,
+ pub is_unsafe: bool,
+ pub items: Box<[AssocItem]>,
+ pub ast_id: FileAstId<ast::Trait>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Impl {
+ pub generic_params: Interned<GenericParams>,
+ pub target_trait: Option<Interned<TraitRef>>,
+ pub self_ty: Interned<TypeRef>,
+ pub is_negative: bool,
+ pub items: Box<[AssocItem]>,
+ pub ast_id: FileAstId<ast::Impl>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct TypeAlias {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ /// Bounds on the type alias itself. Only valid in trait declarations, eg. `type Assoc: Copy;`.
+ pub bounds: Box<[Interned<TypeBound>]>,
+ pub generic_params: Interned<GenericParams>,
+ pub type_ref: Option<Interned<TypeRef>>,
+ pub ast_id: FileAstId<ast::TypeAlias>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Mod {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub kind: ModKind,
+ pub ast_id: FileAstId<ast::Module>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum ModKind {
+ /// `mod m { ... }`
+ Inline { items: Box<[ModItem]> },
+
+ /// `mod m;`
+ Outline,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct MacroCall {
+ /// Path to the called macro.
+ pub path: Interned<ModPath>,
+ pub ast_id: FileAstId<ast::MacroCall>,
+ pub expand_to: ExpandTo,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct MacroRules {
+ /// The name of the declared macro.
+ pub name: Name,
+ pub ast_id: FileAstId<ast::MacroRules>,
+}
+
+/// "Macros 2.0" macro definition.
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct MacroDef {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub ast_id: FileAstId<ast::MacroDef>,
+}
+
+impl Import {
+ /// Maps a `UseTree` contained in this import back to its AST node.
+ pub fn use_tree_to_ast(
+ &self,
+ db: &dyn DefDatabase,
+ file_id: HirFileId,
+ index: Idx<ast::UseTree>,
+ ) -> ast::UseTree {
+ // Re-lower the AST item and get the source map.
+ // Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
+ let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
+ let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
+ let hygiene = Hygiene::new(db.upcast(), file_id);
+ let (_, source_map) =
+ lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree");
+ source_map[index].clone()
+ }
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub enum ImportKind {
+ /// The `ModPath` is imported normally.
+ Plain,
+ /// This is a glob-import of all names in the `ModPath`.
+ Glob,
+ /// This is a `some::path::self` import, which imports `some::path` only in type namespace.
+ TypeOnly,
+}
+
+impl UseTree {
+ /// Expands the `UseTree` into individually imported `ModPath`s.
+ pub fn expand(
+ &self,
+ mut cb: impl FnMut(Idx<ast::UseTree>, ModPath, ImportKind, Option<ImportAlias>),
+ ) {
+ self.expand_impl(None, &mut cb)
+ }
+
+ fn expand_impl(
+ &self,
+ prefix: Option<ModPath>,
+ cb: &mut dyn FnMut(Idx<ast::UseTree>, ModPath, ImportKind, Option<ImportAlias>),
+ ) {
+ fn concat_mod_paths(
+ prefix: Option<ModPath>,
+ path: &ModPath,
+ ) -> Option<(ModPath, ImportKind)> {
+ match (prefix, &path.kind) {
+ (None, _) => Some((path.clone(), ImportKind::Plain)),
+ (Some(mut prefix), PathKind::Plain) => {
+ for segment in path.segments() {
+ prefix.push_segment(segment.clone());
+ }
+ Some((prefix, ImportKind::Plain))
+ }
+ (Some(mut prefix), PathKind::Super(n))
+ if *n > 0 && prefix.segments().is_empty() =>
+ {
+ // `super::super` + `super::rest`
+ match &mut prefix.kind {
+ PathKind::Super(m) => {
+ cov_mark::hit!(concat_super_mod_paths);
+ *m += *n;
+ for segment in path.segments() {
+ prefix.push_segment(segment.clone());
+ }
+ Some((prefix, ImportKind::Plain))
+ }
+ _ => None,
+ }
+ }
+ (Some(prefix), PathKind::Super(0)) if path.segments().is_empty() => {
+ // `some::path::self` == `some::path`
+ Some((prefix, ImportKind::TypeOnly))
+ }
+ (Some(_), _) => None,
+ }
+ }
+
+ match &self.kind {
+ UseTreeKind::Single { path, alias } => {
+ if let Some((path, kind)) = concat_mod_paths(prefix, path) {
+ cb(self.index, path, kind, alias.clone());
+ }
+ }
+ UseTreeKind::Glob { path: Some(path) } => {
+ if let Some((path, _)) = concat_mod_paths(prefix, path) {
+ cb(self.index, path, ImportKind::Glob, None);
+ }
+ }
+ UseTreeKind::Glob { path: None } => {
+ if let Some(prefix) = prefix {
+ cb(self.index, prefix, ImportKind::Glob, None);
+ }
+ }
+ UseTreeKind::Prefixed { prefix: additional_prefix, list } => {
+ let prefix = match additional_prefix {
+ Some(path) => match concat_mod_paths(prefix, path) {
+ Some((path, ImportKind::Plain)) => Some(path),
+ _ => return,
+ },
+ None => prefix,
+ };
+ for tree in &**list {
+ tree.expand_impl(prefix.clone(), cb);
+ }
+ }
+ }
+ }
+}
+
+macro_rules! impl_froms {
+ ($e:ident { $($v:ident ($t:ty)),* $(,)? }) => {
+ $(
+ impl From<$t> for $e {
+ fn from(it: $t) -> $e {
+ $e::$v(it)
+ }
+ }
+ )*
+ }
+}
+
+impl ModItem {
+ pub fn as_assoc_item(&self) -> Option<AssocItem> {
+ match self {
+ ModItem::Import(_)
+ | ModItem::ExternCrate(_)
+ | ModItem::ExternBlock(_)
+ | ModItem::Struct(_)
+ | ModItem::Union(_)
+ | ModItem::Enum(_)
+ | ModItem::Static(_)
+ | ModItem::Trait(_)
+ | ModItem::Impl(_)
+ | ModItem::Mod(_)
+ | ModItem::MacroRules(_)
+ | ModItem::MacroDef(_) => None,
+ ModItem::MacroCall(call) => Some(AssocItem::MacroCall(*call)),
+ ModItem::Const(konst) => Some(AssocItem::Const(*konst)),
+ ModItem::TypeAlias(alias) => Some(AssocItem::TypeAlias(*alias)),
+ ModItem::Function(func) => Some(AssocItem::Function(*func)),
+ }
+ }
+
+ pub fn downcast<N: ItemTreeNode>(self) -> Option<FileItemTreeId<N>> {
+ N::id_from_mod_item(self)
+ }
+
+ pub fn ast_id(&self, tree: &ItemTree) -> FileAstId<ast::Item> {
+ match self {
+ ModItem::Import(it) => tree[it.index].ast_id().upcast(),
+ ModItem::ExternCrate(it) => tree[it.index].ast_id().upcast(),
+ ModItem::ExternBlock(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Function(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Struct(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Union(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Enum(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Const(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Static(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Trait(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Impl(it) => tree[it.index].ast_id().upcast(),
+ ModItem::TypeAlias(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Mod(it) => tree[it.index].ast_id().upcast(),
+ ModItem::MacroCall(it) => tree[it.index].ast_id().upcast(),
+ ModItem::MacroRules(it) => tree[it.index].ast_id().upcast(),
+ ModItem::MacroDef(it) => tree[it.index].ast_id().upcast(),
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, Eq, PartialEq)]
+pub enum AssocItem {
+ Function(FileItemTreeId<Function>),
+ TypeAlias(FileItemTreeId<TypeAlias>),
+ Const(FileItemTreeId<Const>),
+ MacroCall(FileItemTreeId<MacroCall>),
+}
+
+impl_froms!(AssocItem {
+ Function(FileItemTreeId<Function>),
+ TypeAlias(FileItemTreeId<TypeAlias>),
+ Const(FileItemTreeId<Const>),
+ MacroCall(FileItemTreeId<MacroCall>),
+});
+
+impl From<AssocItem> for ModItem {
+ fn from(item: AssocItem) -> Self {
+ match item {
+ AssocItem::Function(it) => it.into(),
+ AssocItem::TypeAlias(it) => it.into(),
+ AssocItem::Const(it) => it.into(),
+ AssocItem::MacroCall(it) => it.into(),
+ }
+ }
+}
+
+impl AssocItem {
+ pub fn ast_id(self, tree: &ItemTree) -> FileAstId<ast::AssocItem> {
+ match self {
+ AssocItem::Function(id) => tree[id].ast_id.upcast(),
+ AssocItem::TypeAlias(id) => tree[id].ast_id.upcast(),
+ AssocItem::Const(id) => tree[id].ast_id.upcast(),
+ AssocItem::MacroCall(id) => tree[id].ast_id.upcast(),
+ }
+ }
+}
+
+#[derive(Debug, Eq, PartialEq)]
+pub struct Variant {
+ pub name: Name,
+ pub fields: Fields,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum Fields {
+ Record(IdxRange<Field>),
+ Tuple(IdxRange<Field>),
+ Unit,
+}
+
+/// A single field of an enum variant or struct
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Field {
+ pub name: Name,
+ pub type_ref: Interned<TypeRef>,
+ pub visibility: RawVisibilityId,
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
new file mode 100644
index 000000000..7f2551e94
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
@@ -0,0 +1,773 @@
+//! AST -> `ItemTree` lowering code.
+
+use std::{collections::hash_map::Entry, sync::Arc};
+
+use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId};
+use syntax::ast::{self, HasModuleItem};
+
+use crate::{
+ generics::{GenericParams, TypeParamData, TypeParamProvenance},
+ type_ref::{LifetimeRef, TraitBoundModifier, TraitRef},
+};
+
+use super::*;
+
+fn id<N: ItemTreeNode>(index: Idx<N>) -> FileItemTreeId<N> {
+ FileItemTreeId { index, _p: PhantomData }
+}
+
+pub(super) struct Ctx<'a> {
+ db: &'a dyn DefDatabase,
+ tree: ItemTree,
+ source_ast_id_map: Arc<AstIdMap>,
+ body_ctx: crate::body::LowerCtx<'a>,
+}
+
+impl<'a> Ctx<'a> {
+ pub(super) fn new(db: &'a dyn DefDatabase, file: HirFileId) -> Self {
+ Self {
+ db,
+ tree: ItemTree::default(),
+ source_ast_id_map: db.ast_id_map(file),
+ body_ctx: crate::body::LowerCtx::new(db, file),
+ }
+ }
+
+ pub(super) fn hygiene(&self) -> &Hygiene {
+ self.body_ctx.hygiene()
+ }
+
+ pub(super) fn lower_module_items(mut self, item_owner: &dyn HasModuleItem) -> ItemTree {
+ self.tree.top_level =
+ item_owner.items().flat_map(|item| self.lower_mod_item(&item)).collect();
+ self.tree
+ }
+
+ pub(super) fn lower_macro_stmts(mut self, stmts: ast::MacroStmts) -> ItemTree {
+ self.tree.top_level = stmts
+ .statements()
+ .filter_map(|stmt| {
+ match stmt {
+ ast::Stmt::Item(item) => Some(item),
+ // Macro calls can be both items and expressions. The syntax library always treats
+ // them as expressions here, so we undo that.
+ ast::Stmt::ExprStmt(es) => match es.expr()? {
+ ast::Expr::MacroExpr(expr) => {
+ cov_mark::hit!(macro_call_in_macro_stmts_is_added_to_item_tree);
+ Some(expr.macro_call()?.into())
+ }
+ _ => None,
+ },
+ _ => None,
+ }
+ })
+ .flat_map(|item| self.lower_mod_item(&item))
+ .collect();
+
+ if let Some(ast::Expr::MacroExpr(tail_macro)) = stmts.expr() {
+ if let Some(call) = tail_macro.macro_call() {
+ cov_mark::hit!(macro_stmt_with_trailing_macro_expr);
+ if let Some(mod_item) = self.lower_mod_item(&call.into()) {
+ self.tree.top_level.push(mod_item);
+ }
+ }
+ }
+
+ self.tree
+ }
+
+ pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree {
+ self.tree.top_level = block
+ .statements()
+ .filter_map(|stmt| match stmt {
+ ast::Stmt::Item(item) => self.lower_mod_item(&item),
+ // Macro calls can be both items and expressions. The syntax library always treats
+ // them as expressions here, so we undo that.
+ ast::Stmt::ExprStmt(es) => match es.expr()? {
+ ast::Expr::MacroExpr(expr) => self.lower_mod_item(&expr.macro_call()?.into()),
+ _ => None,
+ },
+ _ => None,
+ })
+ .collect();
+
+ self.tree
+ }
+
+ fn data(&mut self) -> &mut ItemTreeData {
+ self.tree.data_mut()
+ }
+
+ fn lower_mod_item(&mut self, item: &ast::Item) -> Option<ModItem> {
+ let attrs = RawAttrs::new(self.db, item, self.hygiene());
+ let item: ModItem = match item {
+ ast::Item::Struct(ast) => self.lower_struct(ast)?.into(),
+ ast::Item::Union(ast) => self.lower_union(ast)?.into(),
+ ast::Item::Enum(ast) => self.lower_enum(ast)?.into(),
+ ast::Item::Fn(ast) => self.lower_function(ast)?.into(),
+ ast::Item::TypeAlias(ast) => self.lower_type_alias(ast)?.into(),
+ ast::Item::Static(ast) => self.lower_static(ast)?.into(),
+ ast::Item::Const(ast) => self.lower_const(ast).into(),
+ ast::Item::Module(ast) => self.lower_module(ast)?.into(),
+ ast::Item::Trait(ast) => self.lower_trait(ast)?.into(),
+ ast::Item::Impl(ast) => self.lower_impl(ast)?.into(),
+ ast::Item::Use(ast) => self.lower_use(ast)?.into(),
+ ast::Item::ExternCrate(ast) => self.lower_extern_crate(ast)?.into(),
+ ast::Item::MacroCall(ast) => self.lower_macro_call(ast)?.into(),
+ ast::Item::MacroRules(ast) => self.lower_macro_rules(ast)?.into(),
+ ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(),
+ ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(),
+ };
+
+ self.add_attrs(item.into(), attrs);
+
+ Some(item)
+ }
+
+ fn add_attrs(&mut self, item: AttrOwner, attrs: RawAttrs) {
+ match self.tree.attrs.entry(item) {
+ Entry::Occupied(mut entry) => {
+ *entry.get_mut() = entry.get().merge(attrs);
+ }
+ Entry::Vacant(entry) => {
+ entry.insert(attrs);
+ }
+ }
+ }
+
+ fn lower_assoc_item(&mut self, item: &ast::AssocItem) -> Option<AssocItem> {
+ match item {
+ ast::AssocItem::Fn(ast) => self.lower_function(ast).map(Into::into),
+ ast::AssocItem::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into),
+ ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()),
+ ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into),
+ }
+ }
+
+ fn lower_struct(&mut self, strukt: &ast::Struct) -> Option<FileItemTreeId<Struct>> {
+ let visibility = self.lower_visibility(strukt);
+ let name = strukt.name()?.as_name();
+ let generic_params = self.lower_generic_params(GenericsOwner::Struct, strukt);
+ let fields = self.lower_fields(&strukt.kind());
+ let ast_id = self.source_ast_id_map.ast_id(strukt);
+ let res = Struct { name, visibility, generic_params, fields, ast_id };
+ Some(id(self.data().structs.alloc(res)))
+ }
+
+ fn lower_fields(&mut self, strukt_kind: &ast::StructKind) -> Fields {
+ match strukt_kind {
+ ast::StructKind::Record(it) => {
+ let range = self.lower_record_fields(it);
+ Fields::Record(range)
+ }
+ ast::StructKind::Tuple(it) => {
+ let range = self.lower_tuple_fields(it);
+ Fields::Tuple(range)
+ }
+ ast::StructKind::Unit => Fields::Unit,
+ }
+ }
+
+ fn lower_record_fields(&mut self, fields: &ast::RecordFieldList) -> IdxRange<Field> {
+ let start = self.next_field_idx();
+ for field in fields.fields() {
+ if let Some(data) = self.lower_record_field(&field) {
+ let idx = self.data().fields.alloc(data);
+ self.add_attrs(idx.into(), RawAttrs::new(self.db, &field, self.hygiene()));
+ }
+ }
+ let end = self.next_field_idx();
+ IdxRange::new(start..end)
+ }
+
+ fn lower_record_field(&mut self, field: &ast::RecordField) -> Option<Field> {
+ let name = field.name()?.as_name();
+ let visibility = self.lower_visibility(field);
+ let type_ref = self.lower_type_ref_opt(field.ty());
+ let res = Field { name, type_ref, visibility };
+ Some(res)
+ }
+
+ fn lower_tuple_fields(&mut self, fields: &ast::TupleFieldList) -> IdxRange<Field> {
+ let start = self.next_field_idx();
+ for (i, field) in fields.fields().enumerate() {
+ let data = self.lower_tuple_field(i, &field);
+ let idx = self.data().fields.alloc(data);
+ self.add_attrs(idx.into(), RawAttrs::new(self.db, &field, self.hygiene()));
+ }
+ let end = self.next_field_idx();
+ IdxRange::new(start..end)
+ }
+
+ fn lower_tuple_field(&mut self, idx: usize, field: &ast::TupleField) -> Field {
+ let name = Name::new_tuple_field(idx);
+ let visibility = self.lower_visibility(field);
+ let type_ref = self.lower_type_ref_opt(field.ty());
+ Field { name, type_ref, visibility }
+ }
+
+ fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {
+ let visibility = self.lower_visibility(union);
+ let name = union.name()?.as_name();
+ let generic_params = self.lower_generic_params(GenericsOwner::Union, union);
+ let fields = match union.record_field_list() {
+ Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)),
+ None => Fields::Record(IdxRange::new(self.next_field_idx()..self.next_field_idx())),
+ };
+ let ast_id = self.source_ast_id_map.ast_id(union);
+ let res = Union { name, visibility, generic_params, fields, ast_id };
+ Some(id(self.data().unions.alloc(res)))
+ }
+
+ fn lower_enum(&mut self, enum_: &ast::Enum) -> Option<FileItemTreeId<Enum>> {
+ let visibility = self.lower_visibility(enum_);
+ let name = enum_.name()?.as_name();
+ let generic_params = self.lower_generic_params(GenericsOwner::Enum, enum_);
+ let variants = match &enum_.variant_list() {
+ Some(variant_list) => self.lower_variants(variant_list),
+ None => IdxRange::new(self.next_variant_idx()..self.next_variant_idx()),
+ };
+ let ast_id = self.source_ast_id_map.ast_id(enum_);
+ let res = Enum { name, visibility, generic_params, variants, ast_id };
+ Some(id(self.data().enums.alloc(res)))
+ }
+
+ fn lower_variants(&mut self, variants: &ast::VariantList) -> IdxRange<Variant> {
+ let start = self.next_variant_idx();
+ for variant in variants.variants() {
+ if let Some(data) = self.lower_variant(&variant) {
+ let idx = self.data().variants.alloc(data);
+ self.add_attrs(idx.into(), RawAttrs::new(self.db, &variant, self.hygiene()));
+ }
+ }
+ let end = self.next_variant_idx();
+ IdxRange::new(start..end)
+ }
+
+ fn lower_variant(&mut self, variant: &ast::Variant) -> Option<Variant> {
+ let name = variant.name()?.as_name();
+ let fields = self.lower_fields(&variant.kind());
+ let res = Variant { name, fields };
+ Some(res)
+ }
+
+ fn lower_function(&mut self, func: &ast::Fn) -> Option<FileItemTreeId<Function>> {
+ let visibility = self.lower_visibility(func);
+ let name = func.name()?.as_name();
+
+ let mut has_self_param = false;
+ let start_param = self.next_param_idx();
+ if let Some(param_list) = func.param_list() {
+ if let Some(self_param) = param_list.self_param() {
+ let self_type = match self_param.ty() {
+ Some(type_ref) => TypeRef::from_ast(&self.body_ctx, type_ref),
+ None => {
+ let self_type = TypeRef::Path(name![Self].into());
+ match self_param.kind() {
+ ast::SelfParamKind::Owned => self_type,
+ ast::SelfParamKind::Ref => TypeRef::Reference(
+ Box::new(self_type),
+ self_param.lifetime().as_ref().map(LifetimeRef::new),
+ Mutability::Shared,
+ ),
+ ast::SelfParamKind::MutRef => TypeRef::Reference(
+ Box::new(self_type),
+ self_param.lifetime().as_ref().map(LifetimeRef::new),
+ Mutability::Mut,
+ ),
+ }
+ }
+ };
+ let ty = Interned::new(self_type);
+ let idx = self.data().params.alloc(Param::Normal(None, ty));
+ self.add_attrs(idx.into(), RawAttrs::new(self.db, &self_param, self.hygiene()));
+ has_self_param = true;
+ }
+ for param in param_list.params() {
+ let idx = match param.dotdotdot_token() {
+ Some(_) => self.data().params.alloc(Param::Varargs),
+ None => {
+ let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty());
+ let ty = Interned::new(type_ref);
+ let mut pat = param.pat();
+ // FIXME: This really shouldn't be here, in fact FunctionData/ItemTree's function shouldn't know about
+ // pattern names at all
+ let name = 'name: loop {
+ match pat {
+ Some(ast::Pat::RefPat(ref_pat)) => pat = ref_pat.pat(),
+ Some(ast::Pat::IdentPat(ident)) => {
+ break 'name ident.name().map(|it| it.as_name())
+ }
+ _ => break 'name None,
+ }
+ };
+ self.data().params.alloc(Param::Normal(name, ty))
+ }
+ };
+ self.add_attrs(idx.into(), RawAttrs::new(self.db, &param, self.hygiene()));
+ }
+ }
+ let end_param = self.next_param_idx();
+ let params = IdxRange::new(start_param..end_param);
+
+ let ret_type = match func.ret_type() {
+ Some(rt) => match rt.ty() {
+ Some(type_ref) => TypeRef::from_ast(&self.body_ctx, type_ref),
+ None if rt.thin_arrow_token().is_some() => TypeRef::Error,
+ None => TypeRef::unit(),
+ },
+ None => TypeRef::unit(),
+ };
+
+ let (ret_type, async_ret_type) = if func.async_token().is_some() {
+ let async_ret_type = ret_type.clone();
+ let future_impl = desugar_future_path(ret_type);
+ let ty_bound = Interned::new(TypeBound::Path(future_impl, TraitBoundModifier::None));
+ (TypeRef::ImplTrait(vec![ty_bound]), Some(async_ret_type))
+ } else {
+ (ret_type, None)
+ };
+
+ let abi = func.abi().map(lower_abi);
+
+ let ast_id = self.source_ast_id_map.ast_id(func);
+
+ let mut flags = FnFlags::default();
+ if func.body().is_some() {
+ flags |= FnFlags::HAS_BODY;
+ }
+ if has_self_param {
+ flags |= FnFlags::HAS_SELF_PARAM;
+ }
+ if func.default_token().is_some() {
+ flags |= FnFlags::HAS_DEFAULT_KW;
+ }
+ if func.const_token().is_some() {
+ flags |= FnFlags::HAS_CONST_KW;
+ }
+ if func.async_token().is_some() {
+ flags |= FnFlags::HAS_ASYNC_KW;
+ }
+ if func.unsafe_token().is_some() {
+ flags |= FnFlags::HAS_UNSAFE_KW;
+ }
+
+ let mut res = Function {
+ name,
+ visibility,
+ explicit_generic_params: Interned::new(GenericParams::default()),
+ abi,
+ params,
+ ret_type: Interned::new(ret_type),
+ async_ret_type: async_ret_type.map(Interned::new),
+ ast_id,
+ flags,
+ };
+ res.explicit_generic_params =
+ self.lower_generic_params(GenericsOwner::Function(&res), func);
+
+ Some(id(self.data().functions.alloc(res)))
+ }
+
+ fn lower_type_alias(
+ &mut self,
+ type_alias: &ast::TypeAlias,
+ ) -> Option<FileItemTreeId<TypeAlias>> {
+ let name = type_alias.name()?.as_name();
+ let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it));
+ let visibility = self.lower_visibility(type_alias);
+ let bounds = self.lower_type_bounds(type_alias);
+ let generic_params = self.lower_generic_params(GenericsOwner::TypeAlias, type_alias);
+ let ast_id = self.source_ast_id_map.ast_id(type_alias);
+ let res = TypeAlias {
+ name,
+ visibility,
+ bounds: bounds.into_boxed_slice(),
+ generic_params,
+ type_ref,
+ ast_id,
+ };
+ Some(id(self.data().type_aliases.alloc(res)))
+ }
+
+ fn lower_static(&mut self, static_: &ast::Static) -> Option<FileItemTreeId<Static>> {
+ let name = static_.name()?.as_name();
+ let type_ref = self.lower_type_ref_opt(static_.ty());
+ let visibility = self.lower_visibility(static_);
+ let mutable = static_.mut_token().is_some();
+ let ast_id = self.source_ast_id_map.ast_id(static_);
+ let res = Static { name, visibility, mutable, type_ref, ast_id };
+ Some(id(self.data().statics.alloc(res)))
+ }
+
+ fn lower_const(&mut self, konst: &ast::Const) -> FileItemTreeId<Const> {
+ let name = konst.name().map(|it| it.as_name());
+ let type_ref = self.lower_type_ref_opt(konst.ty());
+ let visibility = self.lower_visibility(konst);
+ let ast_id = self.source_ast_id_map.ast_id(konst);
+ let res = Const { name, visibility, type_ref, ast_id };
+ id(self.data().consts.alloc(res))
+ }
+
+ fn lower_module(&mut self, module: &ast::Module) -> Option<FileItemTreeId<Mod>> {
+ let name = module.name()?.as_name();
+ let visibility = self.lower_visibility(module);
+ let kind = if module.semicolon_token().is_some() {
+ ModKind::Outline
+ } else {
+ ModKind::Inline {
+ items: module
+ .item_list()
+ .map(|list| list.items().flat_map(|item| self.lower_mod_item(&item)).collect())
+ .unwrap_or_else(|| {
+ cov_mark::hit!(name_res_works_for_broken_modules);
+ Box::new([]) as Box<[_]>
+ }),
+ }
+ };
+ let ast_id = self.source_ast_id_map.ast_id(module);
+ let res = Mod { name, visibility, kind, ast_id };
+ Some(id(self.data().mods.alloc(res)))
+ }
+
+ fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option<FileItemTreeId<Trait>> {
+ let name = trait_def.name()?.as_name();
+ let visibility = self.lower_visibility(trait_def);
+ let generic_params = self.lower_generic_params(GenericsOwner::Trait(trait_def), trait_def);
+ let is_auto = trait_def.auto_token().is_some();
+ let is_unsafe = trait_def.unsafe_token().is_some();
+ let items = trait_def.assoc_item_list().map(|list| {
+ list.assoc_items()
+ .filter_map(|item| {
+ let attrs = RawAttrs::new(self.db, &item, self.hygiene());
+ self.lower_assoc_item(&item).map(|item| {
+ self.add_attrs(ModItem::from(item).into(), attrs);
+ item
+ })
+ })
+ .collect()
+ });
+ let ast_id = self.source_ast_id_map.ast_id(trait_def);
+ let res = Trait {
+ name,
+ visibility,
+ generic_params,
+ is_auto,
+ is_unsafe,
+ items: items.unwrap_or_default(),
+ ast_id,
+ };
+ Some(id(self.data().traits.alloc(res)))
+ }
+
+ fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option<FileItemTreeId<Impl>> {
+ let generic_params = self.lower_generic_params(GenericsOwner::Impl, impl_def);
+ // FIXME: If trait lowering fails, due to a non PathType for example, we treat this impl
+ // as if it was an non-trait impl. Ideally we want to create a unique missing ref that only
+ // equals itself.
+ let target_trait = impl_def.trait_().and_then(|tr| self.lower_trait_ref(&tr));
+ let self_ty = self.lower_type_ref(&impl_def.self_ty()?);
+ let is_negative = impl_def.excl_token().is_some();
+
+ // We cannot use `assoc_items()` here as that does not include macro calls.
+ let items = impl_def
+ .assoc_item_list()
+ .into_iter()
+ .flat_map(|it| it.assoc_items())
+ .filter_map(|item| {
+ let assoc = self.lower_assoc_item(&item)?;
+ let attrs = RawAttrs::new(self.db, &item, self.hygiene());
+ self.add_attrs(ModItem::from(assoc).into(), attrs);
+ Some(assoc)
+ })
+ .collect();
+ let ast_id = self.source_ast_id_map.ast_id(impl_def);
+ let res = Impl { generic_params, target_trait, self_ty, is_negative, items, ast_id };
+ Some(id(self.data().impls.alloc(res)))
+ }
+
+ fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Import>> {
+ let visibility = self.lower_visibility(use_item);
+ let ast_id = self.source_ast_id_map.ast_id(use_item);
+ let (use_tree, _) = lower_use_tree(self.db, self.hygiene(), use_item.use_tree()?)?;
+
+ let res = Import { visibility, ast_id, use_tree };
+ Some(id(self.data().imports.alloc(res)))
+ }
+
+ fn lower_extern_crate(
+ &mut self,
+ extern_crate: &ast::ExternCrate,
+ ) -> Option<FileItemTreeId<ExternCrate>> {
+ let name = extern_crate.name_ref()?.as_name();
+ let alias = extern_crate.rename().map(|a| {
+ a.name().map(|it| it.as_name()).map_or(ImportAlias::Underscore, ImportAlias::Alias)
+ });
+ let visibility = self.lower_visibility(extern_crate);
+ let ast_id = self.source_ast_id_map.ast_id(extern_crate);
+
+ let res = ExternCrate { name, alias, visibility, ast_id };
+ Some(id(self.data().extern_crates.alloc(res)))
+ }
+
+ fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<MacroCall>> {
+ let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, self.hygiene())?);
+ let ast_id = self.source_ast_id_map.ast_id(m);
+ let expand_to = hir_expand::ExpandTo::from_call_site(m);
+ let res = MacroCall { path, ast_id, expand_to };
+ Some(id(self.data().macro_calls.alloc(res)))
+ }
+
+ fn lower_macro_rules(&mut self, m: &ast::MacroRules) -> Option<FileItemTreeId<MacroRules>> {
+ let name = m.name().map(|it| it.as_name())?;
+ let ast_id = self.source_ast_id_map.ast_id(m);
+
+ let res = MacroRules { name, ast_id };
+ Some(id(self.data().macro_rules.alloc(res)))
+ }
+
+ fn lower_macro_def(&mut self, m: &ast::MacroDef) -> Option<FileItemTreeId<MacroDef>> {
+ let name = m.name().map(|it| it.as_name())?;
+
+ let ast_id = self.source_ast_id_map.ast_id(m);
+ let visibility = self.lower_visibility(m);
+
+ let res = MacroDef { name, ast_id, visibility };
+ Some(id(self.data().macro_defs.alloc(res)))
+ }
+
+ fn lower_extern_block(&mut self, block: &ast::ExternBlock) -> FileItemTreeId<ExternBlock> {
+ let ast_id = self.source_ast_id_map.ast_id(block);
+ let abi = block.abi().map(lower_abi);
+ let children: Box<[_]> = block.extern_item_list().map_or(Box::new([]), |list| {
+ list.extern_items()
+ .filter_map(|item| {
+ // Note: All items in an `extern` block need to be lowered as if they're outside of one
+ // (in other words, the knowledge that they're in an extern block must not be used).
+ // This is because an extern block can contain macros whose ItemTree's top-level items
+ // should be considered to be in an extern block too.
+ let attrs = RawAttrs::new(self.db, &item, self.hygiene());
+ let id: ModItem = match item {
+ ast::ExternItem::Fn(ast) => self.lower_function(&ast)?.into(),
+ ast::ExternItem::Static(ast) => self.lower_static(&ast)?.into(),
+ ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(&ty)?.into(),
+ ast::ExternItem::MacroCall(call) => self.lower_macro_call(&call)?.into(),
+ };
+ self.add_attrs(id.into(), attrs);
+ Some(id)
+ })
+ .collect()
+ });
+
+ let res = ExternBlock { abi, ast_id, children };
+ id(self.data().extern_blocks.alloc(res))
+ }
+
+ fn lower_generic_params(
+ &mut self,
+ owner: GenericsOwner<'_>,
+ node: &dyn ast::HasGenericParams,
+ ) -> Interned<GenericParams> {
+ let mut generics = GenericParams::default();
+ match owner {
+ GenericsOwner::Function(_)
+ | GenericsOwner::Struct
+ | GenericsOwner::Enum
+ | GenericsOwner::Union
+ | GenericsOwner::TypeAlias => {
+ generics.fill(&self.body_ctx, node);
+ }
+ GenericsOwner::Trait(trait_def) => {
+ // traits get the Self type as an implicit first type parameter
+ generics.type_or_consts.alloc(
+ TypeParamData {
+ name: Some(name![Self]),
+ default: None,
+ provenance: TypeParamProvenance::TraitSelf,
+ }
+ .into(),
+ );
+ // add super traits as bounds on Self
+ // i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar
+ let self_param = TypeRef::Path(name![Self].into());
+ generics.fill_bounds(&self.body_ctx, trait_def, Either::Left(self_param));
+ generics.fill(&self.body_ctx, node);
+ }
+ GenericsOwner::Impl => {
+ // Note that we don't add `Self` here: in `impl`s, `Self` is not a
+ // type-parameter, but rather is a type-alias for impl's target
+ // type, so this is handled by the resolver.
+ generics.fill(&self.body_ctx, node);
+ }
+ }
+
+ generics.shrink_to_fit();
+ Interned::new(generics)
+ }
+
+ fn lower_type_bounds(&mut self, node: &dyn ast::HasTypeBounds) -> Vec<Interned<TypeBound>> {
+ match node.type_bound_list() {
+ Some(bound_list) => bound_list
+ .bounds()
+ .map(|it| Interned::new(TypeBound::from_ast(&self.body_ctx, it)))
+ .collect(),
+ None => Vec::new(),
+ }
+ }
+
+ fn lower_visibility(&mut self, item: &dyn ast::HasVisibility) -> RawVisibilityId {
+ let vis = RawVisibility::from_ast_with_hygiene(self.db, item.visibility(), self.hygiene());
+ self.data().vis.alloc(vis)
+ }
+
+ fn lower_trait_ref(&mut self, trait_ref: &ast::Type) -> Option<Interned<TraitRef>> {
+ let trait_ref = TraitRef::from_ast(&self.body_ctx, trait_ref.clone())?;
+ Some(Interned::new(trait_ref))
+ }
+
+ fn lower_type_ref(&mut self, type_ref: &ast::Type) -> Interned<TypeRef> {
+ let tyref = TypeRef::from_ast(&self.body_ctx, type_ref.clone());
+ Interned::new(tyref)
+ }
+
+ fn lower_type_ref_opt(&mut self, type_ref: Option<ast::Type>) -> Interned<TypeRef> {
+ match type_ref.map(|ty| self.lower_type_ref(&ty)) {
+ Some(it) => it,
+ None => Interned::new(TypeRef::Error),
+ }
+ }
+
+ fn next_field_idx(&self) -> Idx<Field> {
+ Idx::from_raw(RawIdx::from(
+ self.tree.data.as_ref().map_or(0, |data| data.fields.len() as u32),
+ ))
+ }
+ fn next_variant_idx(&self) -> Idx<Variant> {
+ Idx::from_raw(RawIdx::from(
+ self.tree.data.as_ref().map_or(0, |data| data.variants.len() as u32),
+ ))
+ }
+ fn next_param_idx(&self) -> Idx<Param> {
+ Idx::from_raw(RawIdx::from(
+ self.tree.data.as_ref().map_or(0, |data| data.params.len() as u32),
+ ))
+ }
+}
+
+fn desugar_future_path(orig: TypeRef) -> Path {
+ let path = path![core::future::Future];
+ let mut generic_args: Vec<_> =
+ std::iter::repeat(None).take(path.segments().len() - 1).collect();
+ let mut last = GenericArgs::empty();
+ let binding =
+ AssociatedTypeBinding { name: name![Output], type_ref: Some(orig), bounds: Vec::new() };
+ last.bindings.push(binding);
+ generic_args.push(Some(Interned::new(last)));
+
+ Path::from_known_path(path, generic_args)
+}
+
+enum GenericsOwner<'a> {
+ /// We need access to the partially-lowered `Function` for lowering `impl Trait` in argument
+ /// position.
+ Function(&'a Function),
+ Struct,
+ Enum,
+ Union,
+ /// The `TraitDef` is needed to fill the source map for the implicit `Self` parameter.
+ Trait(&'a ast::Trait),
+ TypeAlias,
+ Impl,
+}
+
+fn lower_abi(abi: ast::Abi) -> Interned<str> {
+ // FIXME: Abi::abi() -> Option<SyntaxToken>?
+ match abi.syntax().last_token() {
+ Some(tok) if tok.kind() == SyntaxKind::STRING => {
+ // FIXME: Better way to unescape?
+ Interned::new_str(tok.text().trim_matches('"'))
+ }
+ _ => {
+ // `extern` default to be `extern "C"`.
+ Interned::new_str("C")
+ }
+ }
+}
+
+struct UseTreeLowering<'a> {
+ db: &'a dyn DefDatabase,
+ hygiene: &'a Hygiene,
+ mapping: Arena<ast::UseTree>,
+}
+
+impl UseTreeLowering<'_> {
+ fn lower_use_tree(&mut self, tree: ast::UseTree) -> Option<UseTree> {
+ if let Some(use_tree_list) = tree.use_tree_list() {
+ let prefix = match tree.path() {
+ // E.g. use something::{{{inner}}};
+ None => None,
+ // E.g. `use something::{inner}` (prefix is `None`, path is `something`)
+ // or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`)
+ Some(path) => {
+ match ModPath::from_src(self.db.upcast(), path, self.hygiene) {
+ Some(it) => Some(it),
+ None => return None, // FIXME: report errors somewhere
+ }
+ }
+ };
+
+ let list =
+ use_tree_list.use_trees().filter_map(|tree| self.lower_use_tree(tree)).collect();
+
+ Some(
+ self.use_tree(
+ UseTreeKind::Prefixed { prefix: prefix.map(Interned::new), list },
+ tree,
+ ),
+ )
+ } else {
+ let is_glob = tree.star_token().is_some();
+ let path = match tree.path() {
+ Some(path) => Some(ModPath::from_src(self.db.upcast(), path, self.hygiene)?),
+ None => None,
+ };
+ let alias = tree.rename().map(|a| {
+ a.name().map(|it| it.as_name()).map_or(ImportAlias::Underscore, ImportAlias::Alias)
+ });
+ if alias.is_some() && is_glob {
+ return None;
+ }
+
+ match (path, alias, is_glob) {
+ (path, None, true) => {
+ if path.is_none() {
+ cov_mark::hit!(glob_enum_group);
+ }
+ Some(self.use_tree(UseTreeKind::Glob { path: path.map(Interned::new) }, tree))
+ }
+ // Globs can't be renamed
+ (_, Some(_), true) | (None, None, false) => None,
+ // `bla::{ as Name}` is invalid
+ (None, Some(_), false) => None,
+ (Some(path), alias, false) => Some(
+ self.use_tree(UseTreeKind::Single { path: Interned::new(path), alias }, tree),
+ ),
+ }
+ }
+ }
+
+ fn use_tree(&mut self, kind: UseTreeKind, ast: ast::UseTree) -> UseTree {
+ let index = self.mapping.alloc(ast);
+ UseTree { index, kind }
+ }
+}
+
+pub(super) fn lower_use_tree(
+ db: &dyn DefDatabase,
+ hygiene: &Hygiene,
+ tree: ast::UseTree,
+) -> Option<(UseTree, Arena<ast::UseTree>)> {
+ let mut lowering = UseTreeLowering { db, hygiene, mapping: Arena::new() };
+ let tree = lowering.lower_use_tree(tree)?;
+ Some((tree, lowering.mapping))
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
new file mode 100644
index 000000000..f12d9a127
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
@@ -0,0 +1,754 @@
+//! `ItemTree` debug printer.
+
+use std::fmt::{self, Write};
+
+use itertools::Itertools;
+
+use crate::{
+ attr::RawAttrs,
+ generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget},
+ path::GenericArg,
+ type_ref::TraitBoundModifier,
+ visibility::RawVisibility,
+};
+
+use super::*;
+
+pub(super) fn print_item_tree(tree: &ItemTree) -> String {
+ let mut p = Printer { tree, buf: String::new(), indent_level: 0, needs_indent: true };
+
+ if let Some(attrs) = tree.attrs.get(&AttrOwner::TopLevel) {
+ p.print_attrs(attrs, true);
+ }
+ p.blank();
+
+ for item in tree.top_level_items() {
+ p.print_mod_item(*item);
+ }
+
+ let mut s = p.buf.trim_end_matches('\n').to_string();
+ s.push('\n');
+ s
+}
+
+macro_rules! w {
+ ($dst:expr, $($arg:tt)*) => {
+ { let _ = write!($dst, $($arg)*); }
+ };
+}
+
+macro_rules! wln {
+ ($dst:expr) => {
+ { let _ = writeln!($dst); }
+ };
+ ($dst:expr, $($arg:tt)*) => {
+ { let _ = writeln!($dst, $($arg)*); }
+ };
+}
+
+struct Printer<'a> {
+ tree: &'a ItemTree,
+ buf: String,
+ indent_level: usize,
+ needs_indent: bool,
+}
+
+impl<'a> Printer<'a> {
+ fn indented(&mut self, f: impl FnOnce(&mut Self)) {
+ self.indent_level += 1;
+ wln!(self);
+ f(self);
+ self.indent_level -= 1;
+ self.buf = self.buf.trim_end_matches('\n').to_string();
+ }
+
+ /// Ensures that a blank line is output before the next text.
+ fn blank(&mut self) {
+ let mut iter = self.buf.chars().rev().fuse();
+ match (iter.next(), iter.next()) {
+ (Some('\n'), Some('\n') | None) | (None, None) => {}
+ (Some('\n'), Some(_)) => {
+ self.buf.push('\n');
+ }
+ (Some(_), _) => {
+ self.buf.push('\n');
+ self.buf.push('\n');
+ }
+ (None, Some(_)) => unreachable!(),
+ }
+ }
+
+ fn whitespace(&mut self) {
+ match self.buf.chars().next_back() {
+ None | Some('\n' | ' ') => {}
+ _ => self.buf.push(' '),
+ }
+ }
+
+ fn print_attrs(&mut self, attrs: &RawAttrs, inner: bool) {
+ let inner = if inner { "!" } else { "" };
+ for attr in &**attrs {
+ wln!(
+ self,
+ "#{}[{}{}]",
+ inner,
+ attr.path,
+ attr.input.as_ref().map(|it| it.to_string()).unwrap_or_default(),
+ );
+ }
+ }
+
+ fn print_attrs_of(&mut self, of: impl Into<AttrOwner>) {
+ if let Some(attrs) = self.tree.attrs.get(&of.into()) {
+ self.print_attrs(attrs, false);
+ }
+ }
+
+ fn print_visibility(&mut self, vis: RawVisibilityId) {
+ match &self.tree[vis] {
+ RawVisibility::Module(path) => w!(self, "pub({}) ", path),
+ RawVisibility::Public => w!(self, "pub "),
+ };
+ }
+
+ fn print_fields(&mut self, fields: &Fields) {
+ match fields {
+ Fields::Record(fields) => {
+ self.whitespace();
+ w!(self, "{{");
+ self.indented(|this| {
+ for field in fields.clone() {
+ let Field { visibility, name, type_ref } = &this.tree[field];
+ this.print_attrs_of(field);
+ this.print_visibility(*visibility);
+ w!(this, "{}: ", name);
+ this.print_type_ref(type_ref);
+ wln!(this, ",");
+ }
+ });
+ w!(self, "}}");
+ }
+ Fields::Tuple(fields) => {
+ w!(self, "(");
+ self.indented(|this| {
+ for field in fields.clone() {
+ let Field { visibility, name, type_ref } = &this.tree[field];
+ this.print_attrs_of(field);
+ this.print_visibility(*visibility);
+ w!(this, "{}: ", name);
+ this.print_type_ref(type_ref);
+ wln!(this, ",");
+ }
+ });
+ w!(self, ")");
+ }
+ Fields::Unit => {}
+ }
+ }
+
+ fn print_fields_and_where_clause(&mut self, fields: &Fields, params: &GenericParams) {
+ match fields {
+ Fields::Record(_) => {
+ if self.print_where_clause(params) {
+ wln!(self);
+ }
+ self.print_fields(fields);
+ }
+ Fields::Unit => {
+ self.print_where_clause(params);
+ self.print_fields(fields);
+ }
+ Fields::Tuple(_) => {
+ self.print_fields(fields);
+ self.print_where_clause(params);
+ }
+ }
+ }
+
+ fn print_use_tree(&mut self, use_tree: &UseTree) {
+ match &use_tree.kind {
+ UseTreeKind::Single { path, alias } => {
+ w!(self, "{}", path);
+ if let Some(alias) = alias {
+ w!(self, " as {}", alias);
+ }
+ }
+ UseTreeKind::Glob { path } => {
+ if let Some(path) = path {
+ w!(self, "{}::", path);
+ }
+ w!(self, "*");
+ }
+ UseTreeKind::Prefixed { prefix, list } => {
+ if let Some(prefix) = prefix {
+ w!(self, "{}::", prefix);
+ }
+ w!(self, "{{");
+ for (i, tree) in list.iter().enumerate() {
+ if i != 0 {
+ w!(self, ", ");
+ }
+ self.print_use_tree(tree);
+ }
+ w!(self, "}}");
+ }
+ }
+ }
+
+ fn print_mod_item(&mut self, item: ModItem) {
+ self.print_attrs_of(item);
+
+ match item {
+ ModItem::Import(it) => {
+ let Import { visibility, use_tree, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "use ");
+ self.print_use_tree(use_tree);
+ wln!(self, ";");
+ }
+ ModItem::ExternCrate(it) => {
+ let ExternCrate { name, alias, visibility, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "extern crate {}", name);
+ if let Some(alias) = alias {
+ w!(self, " as {}", alias);
+ }
+ wln!(self, ";");
+ }
+ ModItem::ExternBlock(it) => {
+ let ExternBlock { abi, ast_id: _, children } = &self.tree[it];
+ w!(self, "extern ");
+ if let Some(abi) = abi {
+ w!(self, "\"{}\" ", abi);
+ }
+ w!(self, "{{");
+ self.indented(|this| {
+ for child in &**children {
+ this.print_mod_item(*child);
+ }
+ });
+ wln!(self, "}}");
+ }
+ ModItem::Function(it) => {
+ let Function {
+ name,
+ visibility,
+ explicit_generic_params,
+ abi,
+ params,
+ ret_type,
+ async_ret_type: _,
+ ast_id: _,
+ flags,
+ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ if flags.contains(FnFlags::HAS_DEFAULT_KW) {
+ w!(self, "default ");
+ }
+ if flags.contains(FnFlags::HAS_CONST_KW) {
+ w!(self, "const ");
+ }
+ if flags.contains(FnFlags::HAS_ASYNC_KW) {
+ w!(self, "async ");
+ }
+ if flags.contains(FnFlags::HAS_UNSAFE_KW) {
+ w!(self, "unsafe ");
+ }
+ if let Some(abi) = abi {
+ w!(self, "extern \"{}\" ", abi);
+ }
+ w!(self, "fn {}", name);
+ self.print_generic_params(explicit_generic_params);
+ w!(self, "(");
+ if !params.is_empty() {
+ self.indented(|this| {
+ for (i, param) in params.clone().enumerate() {
+ this.print_attrs_of(param);
+ match &this.tree[param] {
+ Param::Normal(name, ty) => {
+ match name {
+ Some(name) => w!(this, "{}: ", name),
+ None => w!(this, "_: "),
+ }
+ this.print_type_ref(ty);
+ w!(this, ",");
+ if flags.contains(FnFlags::HAS_SELF_PARAM) && i == 0 {
+ wln!(this, " // self");
+ } else {
+ wln!(this);
+ }
+ }
+ Param::Varargs => {
+ wln!(this, "...");
+ }
+ };
+ }
+ });
+ }
+ w!(self, ") -> ");
+ self.print_type_ref(ret_type);
+ self.print_where_clause(explicit_generic_params);
+ if flags.contains(FnFlags::HAS_BODY) {
+ wln!(self, " {{ ... }}");
+ } else {
+ wln!(self, ";");
+ }
+ }
+ ModItem::Struct(it) => {
+ let Struct { visibility, name, fields, generic_params, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "struct {}", name);
+ self.print_generic_params(generic_params);
+ self.print_fields_and_where_clause(fields, generic_params);
+ if matches!(fields, Fields::Record(_)) {
+ wln!(self);
+ } else {
+ wln!(self, ";");
+ }
+ }
+ ModItem::Union(it) => {
+ let Union { name, visibility, fields, generic_params, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "union {}", name);
+ self.print_generic_params(generic_params);
+ self.print_fields_and_where_clause(fields, generic_params);
+ if matches!(fields, Fields::Record(_)) {
+ wln!(self);
+ } else {
+ wln!(self, ";");
+ }
+ }
+ ModItem::Enum(it) => {
+ let Enum { name, visibility, variants, generic_params, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "enum {}", name);
+ self.print_generic_params(generic_params);
+ self.print_where_clause_and_opening_brace(generic_params);
+ self.indented(|this| {
+ for variant in variants.clone() {
+ let Variant { name, fields } = &this.tree[variant];
+ this.print_attrs_of(variant);
+ w!(this, "{}", name);
+ this.print_fields(fields);
+ wln!(this, ",");
+ }
+ });
+ wln!(self, "}}");
+ }
+ ModItem::Const(it) => {
+ let Const { name, visibility, type_ref, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "const ");
+ match name {
+ Some(name) => w!(self, "{}", name),
+ None => w!(self, "_"),
+ }
+ w!(self, ": ");
+ self.print_type_ref(type_ref);
+ wln!(self, " = _;");
+ }
+ ModItem::Static(it) => {
+ let Static { name, visibility, mutable, type_ref, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "static ");
+ if *mutable {
+ w!(self, "mut ");
+ }
+ w!(self, "{}: ", name);
+ self.print_type_ref(type_ref);
+ w!(self, " = _;");
+ wln!(self);
+ }
+ ModItem::Trait(it) => {
+ let Trait {
+ name,
+ visibility,
+ is_auto,
+ is_unsafe,
+ items,
+ generic_params,
+ ast_id: _,
+ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ if *is_unsafe {
+ w!(self, "unsafe ");
+ }
+ if *is_auto {
+ w!(self, "auto ");
+ }
+ w!(self, "trait {}", name);
+ self.print_generic_params(generic_params);
+ self.print_where_clause_and_opening_brace(generic_params);
+ self.indented(|this| {
+ for item in &**items {
+ this.print_mod_item((*item).into());
+ }
+ });
+ wln!(self, "}}");
+ }
+ ModItem::Impl(it) => {
+ let Impl { target_trait, self_ty, is_negative, items, generic_params, ast_id: _ } =
+ &self.tree[it];
+ w!(self, "impl");
+ self.print_generic_params(generic_params);
+ w!(self, " ");
+ if *is_negative {
+ w!(self, "!");
+ }
+ if let Some(tr) = target_trait {
+ self.print_path(&tr.path);
+ w!(self, " for ");
+ }
+ self.print_type_ref(self_ty);
+ self.print_where_clause_and_opening_brace(generic_params);
+ self.indented(|this| {
+ for item in &**items {
+ this.print_mod_item((*item).into());
+ }
+ });
+ wln!(self, "}}");
+ }
+ ModItem::TypeAlias(it) => {
+ let TypeAlias { name, visibility, bounds, type_ref, generic_params, ast_id: _ } =
+ &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "type {}", name);
+ self.print_generic_params(generic_params);
+ if !bounds.is_empty() {
+ w!(self, ": ");
+ self.print_type_bounds(bounds);
+ }
+ if let Some(ty) = type_ref {
+ w!(self, " = ");
+ self.print_type_ref(ty);
+ }
+ self.print_where_clause(generic_params);
+ w!(self, ";");
+ wln!(self);
+ }
+ ModItem::Mod(it) => {
+ let Mod { name, visibility, kind, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "mod {}", name);
+ match kind {
+ ModKind::Inline { items } => {
+ w!(self, " {{");
+ self.indented(|this| {
+ for item in &**items {
+ this.print_mod_item(*item);
+ }
+ });
+ wln!(self, "}}");
+ }
+ ModKind::Outline => {
+ wln!(self, ";");
+ }
+ }
+ }
+ ModItem::MacroCall(it) => {
+ let MacroCall { path, ast_id: _, expand_to: _ } = &self.tree[it];
+ wln!(self, "{}!(...);", path);
+ }
+ ModItem::MacroRules(it) => {
+ let MacroRules { name, ast_id: _ } = &self.tree[it];
+ wln!(self, "macro_rules! {} {{ ... }}", name);
+ }
+ ModItem::MacroDef(it) => {
+ let MacroDef { name, visibility, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ wln!(self, "macro {} {{ ... }}", name);
+ }
+ }
+
+ self.blank();
+ }
+
+ fn print_type_ref(&mut self, type_ref: &TypeRef) {
+ // FIXME: deduplicate with `HirDisplay` impl
+ match type_ref {
+ TypeRef::Never => w!(self, "!"),
+ TypeRef::Placeholder => w!(self, "_"),
+ TypeRef::Tuple(fields) => {
+ w!(self, "(");
+ for (i, field) in fields.iter().enumerate() {
+ if i != 0 {
+ w!(self, ", ");
+ }
+ self.print_type_ref(field);
+ }
+ w!(self, ")");
+ }
+ TypeRef::Path(path) => self.print_path(path),
+ TypeRef::RawPtr(pointee, mtbl) => {
+ let mtbl = match mtbl {
+ Mutability::Shared => "*const",
+ Mutability::Mut => "*mut",
+ };
+ w!(self, "{} ", mtbl);
+ self.print_type_ref(pointee);
+ }
+ TypeRef::Reference(pointee, lt, mtbl) => {
+ let mtbl = match mtbl {
+ Mutability::Shared => "",
+ Mutability::Mut => "mut ",
+ };
+ w!(self, "&");
+ if let Some(lt) = lt {
+ w!(self, "{} ", lt.name);
+ }
+ w!(self, "{}", mtbl);
+ self.print_type_ref(pointee);
+ }
+ TypeRef::Array(elem, len) => {
+ w!(self, "[");
+ self.print_type_ref(elem);
+ w!(self, "; {}]", len);
+ }
+ TypeRef::Slice(elem) => {
+ w!(self, "[");
+ self.print_type_ref(elem);
+ w!(self, "]");
+ }
+ TypeRef::Fn(args_and_ret, varargs) => {
+ let ((_, return_type), args) =
+ args_and_ret.split_last().expect("TypeRef::Fn is missing return type");
+ w!(self, "fn(");
+ for (i, (_, typeref)) in args.iter().enumerate() {
+ if i != 0 {
+ w!(self, ", ");
+ }
+ self.print_type_ref(typeref);
+ }
+ if *varargs {
+ if !args.is_empty() {
+ w!(self, ", ");
+ }
+ w!(self, "...");
+ }
+ w!(self, ") -> ");
+ self.print_type_ref(return_type);
+ }
+ TypeRef::Macro(_ast_id) => {
+ w!(self, "<macro>");
+ }
+ TypeRef::Error => w!(self, "{{unknown}}"),
+ TypeRef::ImplTrait(bounds) => {
+ w!(self, "impl ");
+ self.print_type_bounds(bounds);
+ }
+ TypeRef::DynTrait(bounds) => {
+ w!(self, "dyn ");
+ self.print_type_bounds(bounds);
+ }
+ }
+ }
+
+ fn print_type_bounds(&mut self, bounds: &[Interned<TypeBound>]) {
+ for (i, bound) in bounds.iter().enumerate() {
+ if i != 0 {
+ w!(self, " + ");
+ }
+
+ match bound.as_ref() {
+ TypeBound::Path(path, modifier) => {
+ match modifier {
+ TraitBoundModifier::None => (),
+ TraitBoundModifier::Maybe => w!(self, "?"),
+ }
+ self.print_path(path)
+ }
+ TypeBound::ForLifetime(lifetimes, path) => {
+ w!(self, "for<{}> ", lifetimes.iter().format(", "));
+ self.print_path(path);
+ }
+ TypeBound::Lifetime(lt) => w!(self, "{}", lt.name),
+ TypeBound::Error => w!(self, "{{unknown}}"),
+ }
+ }
+ }
+
+ fn print_path(&mut self, path: &Path) {
+ match path.type_anchor() {
+ Some(anchor) => {
+ w!(self, "<");
+ self.print_type_ref(anchor);
+ w!(self, ">::");
+ }
+ None => match path.kind() {
+ PathKind::Plain => {}
+ PathKind::Super(0) => w!(self, "self::"),
+ PathKind::Super(n) => {
+ for _ in 0..*n {
+ w!(self, "super::");
+ }
+ }
+ PathKind::Crate => w!(self, "crate::"),
+ PathKind::Abs => w!(self, "::"),
+ PathKind::DollarCrate(_) => w!(self, "$crate::"),
+ },
+ }
+
+ for (i, segment) in path.segments().iter().enumerate() {
+ if i != 0 {
+ w!(self, "::");
+ }
+
+ w!(self, "{}", segment.name);
+ if let Some(generics) = segment.args_and_bindings {
+ // NB: these are all in type position, so `::<` turbofish syntax is not necessary
+ w!(self, "<");
+ let mut first = true;
+ let args = if generics.has_self_type {
+ let (self_ty, args) = generics.args.split_first().unwrap();
+ w!(self, "Self=");
+ self.print_generic_arg(self_ty);
+ first = false;
+ args
+ } else {
+ &generics.args
+ };
+ for arg in args {
+ if !first {
+ w!(self, ", ");
+ }
+ first = false;
+ self.print_generic_arg(arg);
+ }
+ for binding in &generics.bindings {
+ if !first {
+ w!(self, ", ");
+ }
+ first = false;
+ w!(self, "{}", binding.name);
+ if !binding.bounds.is_empty() {
+ w!(self, ": ");
+ self.print_type_bounds(&binding.bounds);
+ }
+ if let Some(ty) = &binding.type_ref {
+ w!(self, " = ");
+ self.print_type_ref(ty);
+ }
+ }
+
+ w!(self, ">");
+ }
+ }
+ }
+
+ fn print_generic_arg(&mut self, arg: &GenericArg) {
+ match arg {
+ GenericArg::Type(ty) => self.print_type_ref(ty),
+ GenericArg::Const(c) => w!(self, "{}", c),
+ GenericArg::Lifetime(lt) => w!(self, "{}", lt.name),
+ }
+ }
+
+ fn print_generic_params(&mut self, params: &GenericParams) {
+ if params.type_or_consts.is_empty() && params.lifetimes.is_empty() {
+ return;
+ }
+
+ w!(self, "<");
+ let mut first = true;
+ for (_, lt) in params.lifetimes.iter() {
+ if !first {
+ w!(self, ", ");
+ }
+ first = false;
+ w!(self, "{}", lt.name);
+ }
+ for (idx, x) in params.type_or_consts.iter() {
+ if !first {
+ w!(self, ", ");
+ }
+ first = false;
+ match x {
+ TypeOrConstParamData::TypeParamData(ty) => match &ty.name {
+ Some(name) => w!(self, "{}", name),
+ None => w!(self, "_anon_{}", idx.into_raw()),
+ },
+ TypeOrConstParamData::ConstParamData(konst) => {
+ w!(self, "const {}: ", konst.name);
+ self.print_type_ref(&konst.ty);
+ }
+ }
+ }
+ w!(self, ">");
+ }
+
+ fn print_where_clause_and_opening_brace(&mut self, params: &GenericParams) {
+ if self.print_where_clause(params) {
+ w!(self, "\n{{");
+ } else {
+ self.whitespace();
+ w!(self, "{{");
+ }
+ }
+
+ fn print_where_clause(&mut self, params: &GenericParams) -> bool {
+ if params.where_predicates.is_empty() {
+ return false;
+ }
+
+ w!(self, "\nwhere");
+ self.indented(|this| {
+ for (i, pred) in params.where_predicates.iter().enumerate() {
+ if i != 0 {
+ wln!(this, ",");
+ }
+
+ let (target, bound) = match pred {
+ WherePredicate::TypeBound { target, bound } => (target, bound),
+ WherePredicate::Lifetime { target, bound } => {
+ wln!(this, "{}: {},", target.name, bound.name);
+ continue;
+ }
+ WherePredicate::ForLifetime { lifetimes, target, bound } => {
+ w!(this, "for<");
+ for (i, lt) in lifetimes.iter().enumerate() {
+ if i != 0 {
+ w!(this, ", ");
+ }
+ w!(this, "{}", lt);
+ }
+ w!(this, "> ");
+ (target, bound)
+ }
+ };
+
+ match target {
+ WherePredicateTypeTarget::TypeRef(ty) => this.print_type_ref(ty),
+ WherePredicateTypeTarget::TypeOrConstParam(id) => {
+ match &params.type_or_consts[*id].name() {
+ Some(name) => w!(this, "{}", name),
+ None => w!(this, "_anon_{}", id.into_raw()),
+ }
+ }
+ }
+ w!(this, ": ");
+ this.print_type_bounds(std::slice::from_ref(bound));
+ }
+ });
+ true
+ }
+}
+
+impl<'a> Write for Printer<'a> {
+ fn write_str(&mut self, s: &str) -> fmt::Result {
+ for line in s.split_inclusive('\n') {
+ if self.needs_indent {
+ match self.buf.chars().last() {
+ Some('\n') | None => {}
+ _ => self.buf.push('\n'),
+ }
+ self.buf.push_str(&" ".repeat(self.indent_level));
+ self.needs_indent = false;
+ }
+
+ self.buf.push_str(line);
+ self.needs_indent = line.ends_with('\n');
+ }
+
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
new file mode 100644
index 000000000..5cdf36cc6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
@@ -0,0 +1,360 @@
+use base_db::fixture::WithFixture;
+use expect_test::{expect, Expect};
+
+use crate::{db::DefDatabase, test_db::TestDB};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let (db, file_id) = TestDB::with_single_file(ra_fixture);
+ let item_tree = db.file_item_tree(file_id.into());
+ let pretty = item_tree.pretty_print();
+ expect.assert_eq(&pretty);
+}
+
+#[test]
+fn imports() {
+ check(
+ r#"
+//! file comment
+#![no_std]
+//! another file comment
+
+extern crate self as renamed;
+pub(super) extern crate bli;
+
+pub use crate::path::{nested, items as renamed, Trait as _};
+use globs::*;
+
+/// docs on import
+use crate::{A, B};
+
+use a::{c, d::{e}};
+ "#,
+ expect![[r##"
+ #![doc = " file comment"]
+ #![no_std]
+ #![doc = " another file comment"]
+
+ pub(self) extern crate self as renamed;
+
+ pub(super) extern crate bli;
+
+ pub use crate::path::{nested, items as renamed, Trait as _};
+
+ pub(self) use globs::*;
+
+ #[doc = " docs on import"]
+ pub(self) use crate::{A, B};
+
+ pub(self) use a::{c, d::{e}};
+ "##]],
+ );
+}
+
+#[test]
+fn extern_blocks() {
+ check(
+ r#"
+#[on_extern_block]
+extern "C" {
+ #[on_extern_type]
+ type ExType;
+
+ #[on_extern_static]
+ static EX_STATIC: u8;
+
+ #[on_extern_fn]
+ fn ex_fn();
+}
+ "#,
+ expect![[r##"
+ #[on_extern_block]
+ extern "C" {
+ #[on_extern_type]
+ pub(self) type ExType;
+
+ #[on_extern_static]
+ pub(self) static EX_STATIC: u8 = _;
+
+ #[on_extern_fn]
+ pub(self) fn ex_fn() -> ();
+ }
+ "##]],
+ );
+}
+
+#[test]
+fn adts() {
+ check(
+ r#"
+struct Unit;
+
+#[derive(Debug)]
+struct Struct {
+ /// fld docs
+ fld: (),
+}
+
+struct Tuple(#[attr] u8);
+
+union Ize {
+ a: (),
+ b: (),
+}
+
+enum E {
+ /// comment on Unit
+ Unit,
+ /// comment on Tuple
+ Tuple(u8),
+ Struct {
+ /// comment on a: u8
+ a: u8,
+ }
+}
+ "#,
+ expect![[r##"
+ pub(self) struct Unit;
+
+ #[derive(Debug)]
+ pub(self) struct Struct {
+ #[doc = " fld docs"]
+ pub(self) fld: (),
+ }
+
+ pub(self) struct Tuple(
+ #[attr]
+ pub(self) 0: u8,
+ );
+
+ pub(self) union Ize {
+ pub(self) a: (),
+ pub(self) b: (),
+ }
+
+ pub(self) enum E {
+ #[doc = " comment on Unit"]
+ Unit,
+ #[doc = " comment on Tuple"]
+ Tuple(
+ pub(self) 0: u8,
+ ),
+ Struct {
+ #[doc = " comment on a: u8"]
+ pub(self) a: u8,
+ },
+ }
+ "##]],
+ );
+}
+
+#[test]
+fn misc() {
+ check(
+ r#"
+pub static mut ST: () = ();
+
+const _: Anon = ();
+
+#[attr]
+fn f(#[attr] arg: u8, _: ()) {
+ #![inner_attr_in_fn]
+}
+
+trait Tr: SuperTrait + 'lifetime {
+ type Assoc: AssocBound = Default;
+ fn method(&self);
+}
+ "#,
+ expect![[r##"
+ pub static mut ST: () = _;
+
+ pub(self) const _: Anon = _;
+
+ #[attr]
+ #[inner_attr_in_fn]
+ pub(self) fn f(
+ #[attr]
+ arg: u8,
+ _: (),
+ ) -> () { ... }
+
+ pub(self) trait Tr<Self>
+ where
+ Self: SuperTrait,
+ Self: 'lifetime
+ {
+ pub(self) type Assoc: AssocBound = Default;
+
+ pub(self) fn method(
+ _: &Self, // self
+ ) -> ();
+ }
+ "##]],
+ );
+}
+
+#[test]
+fn modules() {
+ check(
+ r#"
+/// outer
+mod inline {
+ //! inner
+
+ use super::*;
+
+ fn fn_in_module() {}
+}
+
+mod outline;
+ "#,
+ expect![[r##"
+ #[doc = " outer"]
+ #[doc = " inner"]
+ pub(self) mod inline {
+ pub(self) use super::*;
+
+ pub(self) fn fn_in_module() -> () { ... }
+ }
+
+ pub(self) mod outline;
+ "##]],
+ );
+}
+
+#[test]
+fn macros() {
+ check(
+ r#"
+macro_rules! m {
+ () => {};
+}
+
+pub macro m2() {}
+
+m!();
+ "#,
+ expect![[r#"
+ macro_rules! m { ... }
+
+ pub macro m2 { ... }
+
+ m!(...);
+ "#]],
+ );
+}
+
+#[test]
+fn mod_paths() {
+ check(
+ r#"
+struct S {
+ a: self::Ty,
+ b: super::SuperTy,
+ c: super::super::SuperSuperTy,
+ d: ::abs::Path,
+ e: crate::Crate,
+ f: plain::path::Ty,
+}
+ "#,
+ expect![[r#"
+ pub(self) struct S {
+ pub(self) a: self::Ty,
+ pub(self) b: super::SuperTy,
+ pub(self) c: super::super::SuperSuperTy,
+ pub(self) d: ::abs::Path,
+ pub(self) e: crate::Crate,
+ pub(self) f: plain::path::Ty,
+ }
+ "#]],
+ )
+}
+
+#[test]
+fn types() {
+ check(
+ r#"
+struct S {
+ a: Mixed<'a, T, Item=(), OtherItem=u8>,
+ b: <Fully as Qualified>::Syntax,
+ c: <TypeAnchored>::Path::<'a>,
+ d: dyn for<'a> Trait<'a>,
+}
+ "#,
+ expect![[r#"
+ pub(self) struct S {
+ pub(self) a: Mixed<'a, T, Item = (), OtherItem = u8>,
+ pub(self) b: Qualified<Self=Fully>::Syntax,
+ pub(self) c: <TypeAnchored>::Path<'a>,
+ pub(self) d: dyn for<'a> Trait<'a>,
+ }
+ "#]],
+ )
+}
+
+#[test]
+fn generics() {
+ check(
+ r#"
+struct S<'a, 'b: 'a, T: Copy + 'a + 'b, const K: u8 = 0> {
+ field: &'a &'b T,
+}
+
+struct Tuple<T: Copy, U: ?Sized>(T, U);
+
+impl<'a, 'b: 'a, T: Copy + 'a + 'b, const K: u8 = 0> S<'a, 'b, T, K> {
+ fn f<G: 'a>(arg: impl Copy) -> impl Copy {}
+}
+
+enum Enum<'a, T, const U: u8> {}
+union Union<'a, T, const U: u8> {}
+
+trait Tr<'a, T: 'a>: Super where Self: for<'a> Tr<'a, T> {}
+ "#,
+ expect![[r#"
+ pub(self) struct S<'a, 'b, T, const K: u8>
+ where
+ T: Copy,
+ T: 'a,
+ T: 'b
+ {
+ pub(self) field: &'a &'b T,
+ }
+
+ pub(self) struct Tuple<T, U>(
+ pub(self) 0: T,
+ pub(self) 1: U,
+ )
+ where
+ T: Copy,
+ U: ?Sized;
+
+ impl<'a, 'b, T, const K: u8> S<'a, 'b, T, K>
+ where
+ T: Copy,
+ T: 'a,
+ T: 'b
+ {
+ pub(self) fn f<G>(
+ arg: impl Copy,
+ ) -> impl Copy
+ where
+ G: 'a { ... }
+ }
+
+ pub(self) enum Enum<'a, T, const U: u8> {
+ }
+
+ pub(self) union Union<'a, T, const U: u8> {
+ }
+
+ pub(self) trait Tr<'a, Self, T>
+ where
+ Self: Super,
+ T: 'a,
+ Self: for<'a> Tr<'a, T>
+ {
+ }
+ "#]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/keys.rs b/src/tools/rust-analyzer/crates/hir-def/src/keys.rs
new file mode 100644
index 000000000..c5cb9a2af
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/keys.rs
@@ -0,0 +1,70 @@
+//! keys to be used with `DynMap`
+
+use std::marker::PhantomData;
+
+use hir_expand::MacroCallId;
+use rustc_hash::FxHashMap;
+use syntax::{ast, AstNode, AstPtr};
+
+use crate::{
+ attr::AttrId,
+ dyn_map::{DynMap, Policy},
+ ConstId, EnumId, EnumVariantId, FieldId, FunctionId, ImplId, LifetimeParamId, Macro2Id,
+ MacroRulesId, ProcMacroId, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId,
+ UnionId,
+};
+
+pub type Key<K, V> = crate::dyn_map::Key<K, V, AstPtrPolicy<K, V>>;
+
+pub const FUNCTION: Key<ast::Fn, FunctionId> = Key::new();
+pub const CONST: Key<ast::Const, ConstId> = Key::new();
+pub const STATIC: Key<ast::Static, StaticId> = Key::new();
+pub const TYPE_ALIAS: Key<ast::TypeAlias, TypeAliasId> = Key::new();
+pub const IMPL: Key<ast::Impl, ImplId> = Key::new();
+pub const TRAIT: Key<ast::Trait, TraitId> = Key::new();
+pub const STRUCT: Key<ast::Struct, StructId> = Key::new();
+pub const UNION: Key<ast::Union, UnionId> = Key::new();
+pub const ENUM: Key<ast::Enum, EnumId> = Key::new();
+
+pub const VARIANT: Key<ast::Variant, EnumVariantId> = Key::new();
+pub const TUPLE_FIELD: Key<ast::TupleField, FieldId> = Key::new();
+pub const RECORD_FIELD: Key<ast::RecordField, FieldId> = Key::new();
+pub const TYPE_PARAM: Key<ast::TypeParam, TypeOrConstParamId> = Key::new();
+pub const CONST_PARAM: Key<ast::ConstParam, TypeOrConstParamId> = Key::new();
+pub const LIFETIME_PARAM: Key<ast::LifetimeParam, LifetimeParamId> = Key::new();
+
+pub const MACRO_RULES: Key<ast::MacroRules, MacroRulesId> = Key::new();
+pub const MACRO2: Key<ast::MacroDef, Macro2Id> = Key::new();
+pub const PROC_MACRO: Key<ast::Fn, ProcMacroId> = Key::new();
+pub const ATTR_MACRO_CALL: Key<ast::Item, MacroCallId> = Key::new();
+pub const DERIVE_MACRO_CALL: Key<ast::Attr, (AttrId, MacroCallId, Box<[Option<MacroCallId>]>)> =
+ Key::new();
+
+/// XXX: AST Nodes and SyntaxNodes have identity equality semantics: nodes are
+/// equal if they point to exactly the same object.
+///
+/// In general, we do not guarantee that we have exactly one instance of a
+/// syntax tree for each file. We probably should add such guarantee, but, for
+/// the time being, we will use identity-less AstPtr comparison.
+pub struct AstPtrPolicy<AST, ID> {
+ _phantom: PhantomData<(AST, ID)>,
+}
+
+impl<AST: AstNode + 'static, ID: 'static> Policy for AstPtrPolicy<AST, ID> {
+ type K = AST;
+ type V = ID;
+ fn insert(map: &mut DynMap, key: AST, value: ID) {
+ let key = AstPtr::new(&key);
+ map.map
+ .entry::<FxHashMap<AstPtr<AST>, ID>>()
+ .or_insert_with(Default::default)
+ .insert(key, value);
+ }
+ fn get<'a>(map: &'a DynMap, key: &AST) -> Option<&'a ID> {
+ let key = AstPtr::new(key);
+ map.map.get::<FxHashMap<AstPtr<AST>, ID>>()?.get(&key)
+ }
+ fn is_empty(map: &DynMap) -> bool {
+ map.map.get::<FxHashMap<AstPtr<AST>, ID>>().map_or(true, |it| it.is_empty())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
new file mode 100644
index 000000000..877850184
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
@@ -0,0 +1,174 @@
+//! Collects lang items: items marked with `#[lang = "..."]` attribute.
+//!
+//! This attribute to tell the compiler about semi built-in std library
+//! features, such as Fn family of traits.
+use std::sync::Arc;
+
+use rustc_hash::FxHashMap;
+use syntax::SmolStr;
+
+use crate::{
+ db::DefDatabase, AdtId, AttrDefId, CrateId, EnumId, EnumVariantId, FunctionId, ImplId,
+ ModuleDefId, StaticId, StructId, TraitId,
+};
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum LangItemTarget {
+ EnumId(EnumId),
+ FunctionId(FunctionId),
+ ImplDefId(ImplId),
+ StaticId(StaticId),
+ StructId(StructId),
+ TraitId(TraitId),
+ EnumVariantId(EnumVariantId),
+}
+
+impl LangItemTarget {
+ pub fn as_enum(self) -> Option<EnumId> {
+ match self {
+ LangItemTarget::EnumId(id) => Some(id),
+ _ => None,
+ }
+ }
+
+ pub fn as_function(self) -> Option<FunctionId> {
+ match self {
+ LangItemTarget::FunctionId(id) => Some(id),
+ _ => None,
+ }
+ }
+
+ pub fn as_impl_def(self) -> Option<ImplId> {
+ match self {
+ LangItemTarget::ImplDefId(id) => Some(id),
+ _ => None,
+ }
+ }
+
+ pub fn as_static(self) -> Option<StaticId> {
+ match self {
+ LangItemTarget::StaticId(id) => Some(id),
+ _ => None,
+ }
+ }
+
+ pub fn as_struct(self) -> Option<StructId> {
+ match self {
+ LangItemTarget::StructId(id) => Some(id),
+ _ => None,
+ }
+ }
+
+ pub fn as_trait(self) -> Option<TraitId> {
+ match self {
+ LangItemTarget::TraitId(id) => Some(id),
+ _ => None,
+ }
+ }
+
+ pub fn as_enum_variant(self) -> Option<EnumVariantId> {
+ match self {
+ LangItemTarget::EnumVariantId(id) => Some(id),
+ _ => None,
+ }
+ }
+}
+
+#[derive(Default, Debug, Clone, PartialEq, Eq)]
+pub struct LangItems {
+ items: FxHashMap<SmolStr, LangItemTarget>,
+}
+
+impl LangItems {
+ pub fn target(&self, item: &str) -> Option<LangItemTarget> {
+ self.items.get(item).copied()
+ }
+
+ /// Salsa query. This will look for lang items in a specific crate.
+ pub(crate) fn crate_lang_items_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<LangItems> {
+ let _p = profile::span("crate_lang_items_query");
+
+ let mut lang_items = LangItems::default();
+
+ let crate_def_map = db.crate_def_map(krate);
+
+ for (_, module_data) in crate_def_map.modules() {
+ for impl_def in module_data.scope.impls() {
+ lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDefId)
+ }
+
+ for def in module_data.scope.declarations() {
+ match def {
+ ModuleDefId::TraitId(trait_) => {
+ lang_items.collect_lang_item(db, trait_, LangItemTarget::TraitId);
+ db.trait_data(trait_).items.iter().for_each(|&(_, assoc_id)| {
+ if let crate::AssocItemId::FunctionId(f) = assoc_id {
+ lang_items.collect_lang_item(db, f, LangItemTarget::FunctionId);
+ }
+ });
+ }
+ ModuleDefId::AdtId(AdtId::EnumId(e)) => {
+ lang_items.collect_lang_item(db, e, LangItemTarget::EnumId);
+ db.enum_data(e).variants.iter().for_each(|(local_id, _)| {
+ lang_items.collect_lang_item(
+ db,
+ EnumVariantId { parent: e, local_id },
+ LangItemTarget::EnumVariantId,
+ );
+ });
+ }
+ ModuleDefId::AdtId(AdtId::StructId(s)) => {
+ lang_items.collect_lang_item(db, s, LangItemTarget::StructId);
+ }
+ ModuleDefId::FunctionId(f) => {
+ lang_items.collect_lang_item(db, f, LangItemTarget::FunctionId);
+ }
+ ModuleDefId::StaticId(s) => {
+ lang_items.collect_lang_item(db, s, LangItemTarget::StaticId);
+ }
+ _ => {}
+ }
+ }
+ }
+
+ Arc::new(lang_items)
+ }
+
+ /// Salsa query. Look for a lang item, starting from the specified crate and recursively
+ /// traversing its dependencies.
+ pub(crate) fn lang_item_query(
+ db: &dyn DefDatabase,
+ start_crate: CrateId,
+ item: SmolStr,
+ ) -> Option<LangItemTarget> {
+ let _p = profile::span("lang_item_query");
+ let lang_items = db.crate_lang_items(start_crate);
+ let start_crate_target = lang_items.items.get(&item);
+ if let Some(&target) = start_crate_target {
+ return Some(target);
+ }
+ db.crate_graph()[start_crate]
+ .dependencies
+ .iter()
+ .find_map(|dep| db.lang_item(dep.crate_id, item.clone()))
+ }
+
+ fn collect_lang_item<T>(
+ &mut self,
+ db: &dyn DefDatabase,
+ item: T,
+ constructor: fn(T) -> LangItemTarget,
+ ) where
+ T: Into<AttrDefId> + Copy,
+ {
+ let _p = profile::span("collect_lang_item");
+ if let Some(lang_item_name) = lang_attr(db, item) {
+ self.items.entry(lang_item_name).or_insert_with(|| constructor(item));
+ }
+ }
+}
+
+pub fn lang_attr(db: &dyn DefDatabase, item: impl Into<AttrDefId> + Copy) -> Option<SmolStr> {
+ let attrs = db.attrs(item.into());
+ attrs.by_key("lang").string_value().cloned()
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
new file mode 100644
index 000000000..56603f4b1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
@@ -0,0 +1,980 @@
+//! `hir_def` crate contains everything between macro expansion and type
+//! inference.
+//!
+//! It defines various items (structs, enums, traits) which comprises Rust code,
+//! as well as an algorithm for resolving paths to such entities.
+//!
+//! Note that `hir_def` is a work in progress, so not all of the above is
+//! actually true.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+#[allow(unused)]
+macro_rules! eprintln {
+ ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
+}
+
+pub mod db;
+
+pub mod attr;
+pub mod path;
+pub mod type_ref;
+pub mod builtin_type;
+pub mod builtin_attr;
+pub mod per_ns;
+pub mod item_scope;
+
+pub mod dyn_map;
+pub mod keys;
+
+pub mod item_tree;
+pub mod intern;
+
+pub mod adt;
+pub mod data;
+pub mod generics;
+pub mod lang_item;
+
+pub mod expr;
+pub mod body;
+pub mod resolver;
+
+mod trace;
+pub mod nameres;
+
+pub mod src;
+pub mod child_by_source;
+
+pub mod visibility;
+pub mod find_path;
+pub mod import_map;
+
+#[cfg(test)]
+mod test_db;
+#[cfg(test)]
+mod macro_expansion_tests;
+
+use std::{
+ hash::{Hash, Hasher},
+ sync::Arc,
+};
+
+use attr::Attr;
+use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind};
+use hir_expand::{
+ ast_id_map::FileAstId,
+ builtin_attr_macro::BuiltinAttrExpander,
+ builtin_derive_macro::BuiltinDeriveExpander,
+ builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
+ eager::{expand_eager_macro, ErrorEmitted, ErrorSink},
+ hygiene::Hygiene,
+ proc_macro::ProcMacroExpander,
+ AstId, ExpandError, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId,
+ MacroDefKind, UnresolvedMacro,
+};
+use item_tree::ExternBlock;
+use la_arena::Idx;
+use nameres::DefMap;
+use stdx::impl_from;
+use syntax::ast;
+
+use crate::{
+ adt::VariantData,
+ attr::AttrId,
+ builtin_type::BuiltinType,
+ item_tree::{
+ Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, MacroDef, MacroRules, ModItem,
+ Static, Struct, Trait, TypeAlias, Union,
+ },
+};
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct ModuleId {
+ krate: CrateId,
+ /// If this `ModuleId` was derived from a `DefMap` for a block expression, this stores the
+ /// `BlockId` of that block expression. If `None`, this module is part of the crate-level
+ /// `DefMap` of `krate`.
+ block: Option<BlockId>,
+ /// The module's ID in its originating `DefMap`.
+ pub local_id: LocalModuleId,
+}
+
+impl ModuleId {
+ pub fn def_map(&self, db: &dyn db::DefDatabase) -> Arc<DefMap> {
+ match self.block {
+ Some(block) => {
+ db.block_def_map(block).unwrap_or_else(|| {
+ // NOTE: This should be unreachable - all `ModuleId`s come from their `DefMap`s,
+ // so the `DefMap` here must exist.
+ unreachable!("no `block_def_map` for `ModuleId` {:?}", self);
+ })
+ }
+ None => db.crate_def_map(self.krate),
+ }
+ }
+
+ pub fn krate(&self) -> CrateId {
+ self.krate
+ }
+
+ pub fn containing_module(&self, db: &dyn db::DefDatabase) -> Option<ModuleId> {
+ self.def_map(db).containing_module(self.local_id)
+ }
+
+ pub fn containing_block(&self) -> Option<BlockId> {
+ self.block
+ }
+}
+
+/// An ID of a module, **local** to a specific crate
+pub type LocalModuleId = Idx<nameres::ModuleData>;
+
+#[derive(Debug)]
+pub struct ItemLoc<N: ItemTreeNode> {
+ pub container: ModuleId,
+ pub id: ItemTreeId<N>,
+}
+
+impl<N: ItemTreeNode> Clone for ItemLoc<N> {
+ fn clone(&self) -> Self {
+ Self { container: self.container, id: self.id }
+ }
+}
+
+impl<N: ItemTreeNode> Copy for ItemLoc<N> {}
+
+impl<N: ItemTreeNode> PartialEq for ItemLoc<N> {
+ fn eq(&self, other: &Self) -> bool {
+ self.container == other.container && self.id == other.id
+ }
+}
+
+impl<N: ItemTreeNode> Eq for ItemLoc<N> {}
+
+impl<N: ItemTreeNode> Hash for ItemLoc<N> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.container.hash(state);
+ self.id.hash(state);
+ }
+}
+
+#[derive(Debug)]
+pub struct AssocItemLoc<N: ItemTreeNode> {
+ pub container: ItemContainerId,
+ pub id: ItemTreeId<N>,
+}
+
+impl<N: ItemTreeNode> Clone for AssocItemLoc<N> {
+ fn clone(&self) -> Self {
+ Self { container: self.container, id: self.id }
+ }
+}
+
+impl<N: ItemTreeNode> Copy for AssocItemLoc<N> {}
+
+impl<N: ItemTreeNode> PartialEq for AssocItemLoc<N> {
+ fn eq(&self, other: &Self) -> bool {
+ self.container == other.container && self.id == other.id
+ }
+}
+
+impl<N: ItemTreeNode> Eq for AssocItemLoc<N> {}
+
+impl<N: ItemTreeNode> Hash for AssocItemLoc<N> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.container.hash(state);
+ self.id.hash(state);
+ }
+}
+
+macro_rules! impl_intern {
+ ($id:ident, $loc:ident, $intern:ident, $lookup:ident) => {
+ impl_intern_key!($id);
+
+ impl Intern for $loc {
+ type ID = $id;
+ fn intern(self, db: &dyn db::DefDatabase) -> $id {
+ db.$intern(self)
+ }
+ }
+
+ impl Lookup for $id {
+ type Data = $loc;
+ fn lookup(&self, db: &dyn db::DefDatabase) -> $loc {
+ db.$lookup(*self)
+ }
+ }
+ };
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct FunctionId(salsa::InternId);
+type FunctionLoc = AssocItemLoc<Function>;
+impl_intern!(FunctionId, FunctionLoc, intern_function, lookup_intern_function);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct StructId(salsa::InternId);
+type StructLoc = ItemLoc<Struct>;
+impl_intern!(StructId, StructLoc, intern_struct, lookup_intern_struct);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct UnionId(salsa::InternId);
+pub type UnionLoc = ItemLoc<Union>;
+impl_intern!(UnionId, UnionLoc, intern_union, lookup_intern_union);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct EnumId(salsa::InternId);
+pub type EnumLoc = ItemLoc<Enum>;
+impl_intern!(EnumId, EnumLoc, intern_enum, lookup_intern_enum);
+
+// FIXME: rename to `VariantId`, only enums can ave variants
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct EnumVariantId {
+ pub parent: EnumId,
+ pub local_id: LocalEnumVariantId,
+}
+
+pub type LocalEnumVariantId = Idx<adt::EnumVariantData>;
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct FieldId {
+ pub parent: VariantId,
+ pub local_id: LocalFieldId,
+}
+
+pub type LocalFieldId = Idx<adt::FieldData>;
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct ConstId(salsa::InternId);
+type ConstLoc = AssocItemLoc<Const>;
+impl_intern!(ConstId, ConstLoc, intern_const, lookup_intern_const);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct StaticId(salsa::InternId);
+pub type StaticLoc = AssocItemLoc<Static>;
+impl_intern!(StaticId, StaticLoc, intern_static, lookup_intern_static);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TraitId(salsa::InternId);
+pub type TraitLoc = ItemLoc<Trait>;
+impl_intern!(TraitId, TraitLoc, intern_trait, lookup_intern_trait);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TypeAliasId(salsa::InternId);
+type TypeAliasLoc = AssocItemLoc<TypeAlias>;
+impl_intern!(TypeAliasId, TypeAliasLoc, intern_type_alias, lookup_intern_type_alias);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct ImplId(salsa::InternId);
+type ImplLoc = ItemLoc<Impl>;
+impl_intern!(ImplId, ImplLoc, intern_impl, lookup_intern_impl);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct ExternBlockId(salsa::InternId);
+type ExternBlockLoc = ItemLoc<ExternBlock>;
+impl_intern!(ExternBlockId, ExternBlockLoc, intern_extern_block, lookup_intern_extern_block);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum MacroExpander {
+ Declarative,
+ BuiltIn(BuiltinFnLikeExpander),
+ BuiltInAttr(BuiltinAttrExpander),
+ BuiltInDerive(BuiltinDeriveExpander),
+ BuiltInEager(EagerExpander),
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct Macro2Id(salsa::InternId);
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Macro2Loc {
+ pub container: ModuleId,
+ pub id: ItemTreeId<MacroDef>,
+ pub expander: MacroExpander,
+}
+impl_intern!(Macro2Id, Macro2Loc, intern_macro2, lookup_intern_macro2);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct MacroRulesId(salsa::InternId);
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct MacroRulesLoc {
+ pub container: ModuleId,
+ pub id: ItemTreeId<MacroRules>,
+ pub local_inner: bool,
+ pub expander: MacroExpander,
+}
+impl_intern!(MacroRulesId, MacroRulesLoc, intern_macro_rules, lookup_intern_macro_rules);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct ProcMacroId(salsa::InternId);
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct ProcMacroLoc {
+ // FIXME: this should be a crate? or just a crate-root module
+ pub container: ModuleId,
+ pub id: ItemTreeId<Function>,
+ pub expander: ProcMacroExpander,
+ pub kind: ProcMacroKind,
+}
+impl_intern!(ProcMacroId, ProcMacroLoc, intern_proc_macro, lookup_intern_proc_macro);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct BlockId(salsa::InternId);
+#[derive(Debug, Hash, PartialEq, Eq, Clone)]
+pub struct BlockLoc {
+ ast_id: AstId<ast::BlockExpr>,
+ /// The containing module.
+ module: ModuleId,
+}
+impl_intern!(BlockId, BlockLoc, intern_block, lookup_intern_block);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TypeOrConstParamId {
+ pub parent: GenericDefId,
+ pub local_id: LocalTypeOrConstParamId,
+}
+
+/// A TypeOrConstParamId with an invariant that it actually belongs to a type
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TypeParamId(TypeOrConstParamId);
+
+impl TypeParamId {
+ pub fn parent(&self) -> GenericDefId {
+ self.0.parent
+ }
+ pub fn local_id(&self) -> LocalTypeOrConstParamId {
+ self.0.local_id
+ }
+}
+
+impl TypeParamId {
+ /// Caller should check if this toc id really belongs to a type
+ pub fn from_unchecked(x: TypeOrConstParamId) -> Self {
+ Self(x)
+ }
+}
+
+impl From<TypeParamId> for TypeOrConstParamId {
+ fn from(x: TypeParamId) -> Self {
+ x.0
+ }
+}
+
+/// A TypeOrConstParamId with an invariant that it actually belongs to a const
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct ConstParamId(TypeOrConstParamId);
+
+impl ConstParamId {
+ pub fn parent(&self) -> GenericDefId {
+ self.0.parent
+ }
+ pub fn local_id(&self) -> LocalTypeOrConstParamId {
+ self.0.local_id
+ }
+}
+
+impl ConstParamId {
+ /// Caller should check if this toc id really belongs to a const
+ pub fn from_unchecked(x: TypeOrConstParamId) -> Self {
+ Self(x)
+ }
+}
+
+impl From<ConstParamId> for TypeOrConstParamId {
+ fn from(x: ConstParamId) -> Self {
+ x.0
+ }
+}
+
+pub type LocalTypeOrConstParamId = Idx<generics::TypeOrConstParamData>;
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct LifetimeParamId {
+ pub parent: GenericDefId,
+ pub local_id: LocalLifetimeParamId,
+}
+pub type LocalLifetimeParamId = Idx<generics::LifetimeParamData>;
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum ItemContainerId {
+ ExternBlockId(ExternBlockId),
+ ModuleId(ModuleId),
+ ImplId(ImplId),
+ TraitId(TraitId),
+}
+impl_from!(ModuleId for ItemContainerId);
+
+/// A Data Type
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub enum AdtId {
+ StructId(StructId),
+ UnionId(UnionId),
+ EnumId(EnumId),
+}
+impl_from!(StructId, UnionId, EnumId for AdtId);
+
+/// A macro
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub enum MacroId {
+ Macro2Id(Macro2Id),
+ MacroRulesId(MacroRulesId),
+ ProcMacroId(ProcMacroId),
+}
+impl_from!(Macro2Id, MacroRulesId, ProcMacroId for MacroId);
+
+impl MacroId {
+ pub fn is_attribute(self, db: &dyn db::DefDatabase) -> bool {
+ match self {
+ MacroId::ProcMacroId(it) => it.lookup(db).kind == ProcMacroKind::Attr,
+ _ => false,
+ }
+ }
+}
+
+/// A generic param
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum GenericParamId {
+ TypeParamId(TypeParamId),
+ ConstParamId(ConstParamId),
+ LifetimeParamId(LifetimeParamId),
+}
+impl_from!(TypeParamId, LifetimeParamId, ConstParamId for GenericParamId);
+
+/// The defs which can be visible in the module.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum ModuleDefId {
+ ModuleId(ModuleId),
+ FunctionId(FunctionId),
+ AdtId(AdtId),
+ // Can't be directly declared, but can be imported.
+ EnumVariantId(EnumVariantId),
+ ConstId(ConstId),
+ StaticId(StaticId),
+ TraitId(TraitId),
+ TypeAliasId(TypeAliasId),
+ BuiltinType(BuiltinType),
+ MacroId(MacroId),
+}
+impl_from!(
+ MacroId(Macro2Id, MacroRulesId, ProcMacroId),
+ ModuleId,
+ FunctionId,
+ AdtId(StructId, EnumId, UnionId),
+ EnumVariantId,
+ ConstId,
+ StaticId,
+ TraitId,
+ TypeAliasId,
+ BuiltinType
+ for ModuleDefId
+);
+
+/// The defs which have a body.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum DefWithBodyId {
+ FunctionId(FunctionId),
+ StaticId(StaticId),
+ ConstId(ConstId),
+}
+
+impl_from!(FunctionId, ConstId, StaticId for DefWithBodyId);
+
+impl DefWithBodyId {
+ pub fn as_generic_def_id(self) -> Option<GenericDefId> {
+ match self {
+ DefWithBodyId::FunctionId(f) => Some(f.into()),
+ DefWithBodyId::StaticId(_) => None,
+ DefWithBodyId::ConstId(c) => Some(c.into()),
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum AssocItemId {
+ FunctionId(FunctionId),
+ ConstId(ConstId),
+ TypeAliasId(TypeAliasId),
+}
+// FIXME: not every function, ... is actually an assoc item. maybe we should make
+// sure that you can only turn actual assoc items into AssocItemIds. This would
+// require not implementing From, and instead having some checked way of
+// casting them, and somehow making the constructors private, which would be annoying.
+impl_from!(FunctionId, ConstId, TypeAliasId for AssocItemId);
+
+#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
+pub enum GenericDefId {
+ FunctionId(FunctionId),
+ AdtId(AdtId),
+ TraitId(TraitId),
+ TypeAliasId(TypeAliasId),
+ ImplId(ImplId),
+ // enum variants cannot have generics themselves, but their parent enums
+ // can, and this makes some code easier to write
+ EnumVariantId(EnumVariantId),
+ // consts can have type parameters from their parents (i.e. associated consts of traits)
+ ConstId(ConstId),
+}
+impl_from!(
+ FunctionId,
+ AdtId(StructId, EnumId, UnionId),
+ TraitId,
+ TypeAliasId,
+ ImplId,
+ EnumVariantId,
+ ConstId
+ for GenericDefId
+);
+
+impl From<AssocItemId> for GenericDefId {
+ fn from(item: AssocItemId) -> Self {
+ match item {
+ AssocItemId::FunctionId(f) => f.into(),
+ AssocItemId::ConstId(c) => c.into(),
+ AssocItemId::TypeAliasId(t) => t.into(),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum AttrDefId {
+ ModuleId(ModuleId),
+ FieldId(FieldId),
+ AdtId(AdtId),
+ FunctionId(FunctionId),
+ EnumVariantId(EnumVariantId),
+ StaticId(StaticId),
+ ConstId(ConstId),
+ TraitId(TraitId),
+ TypeAliasId(TypeAliasId),
+ MacroId(MacroId),
+ ImplId(ImplId),
+ GenericParamId(GenericParamId),
+ ExternBlockId(ExternBlockId),
+}
+
+impl_from!(
+ ModuleId,
+ FieldId,
+ AdtId(StructId, EnumId, UnionId),
+ EnumVariantId,
+ StaticId,
+ ConstId,
+ FunctionId,
+ TraitId,
+ TypeAliasId,
+ MacroId(Macro2Id, MacroRulesId, ProcMacroId),
+ ImplId,
+ GenericParamId
+ for AttrDefId
+);
+
+impl From<ItemContainerId> for AttrDefId {
+ fn from(acid: ItemContainerId) -> Self {
+ match acid {
+ ItemContainerId::ModuleId(mid) => AttrDefId::ModuleId(mid),
+ ItemContainerId::ImplId(iid) => AttrDefId::ImplId(iid),
+ ItemContainerId::TraitId(tid) => AttrDefId::TraitId(tid),
+ ItemContainerId::ExternBlockId(id) => AttrDefId::ExternBlockId(id),
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum VariantId {
+ EnumVariantId(EnumVariantId),
+ StructId(StructId),
+ UnionId(UnionId),
+}
+impl_from!(EnumVariantId, StructId, UnionId for VariantId);
+
+impl VariantId {
+ pub fn variant_data(self, db: &dyn db::DefDatabase) -> Arc<VariantData> {
+ match self {
+ VariantId::StructId(it) => db.struct_data(it).variant_data.clone(),
+ VariantId::UnionId(it) => db.union_data(it).variant_data.clone(),
+ VariantId::EnumVariantId(it) => {
+ db.enum_data(it.parent).variants[it.local_id].variant_data.clone()
+ }
+ }
+ }
+
+ pub fn file_id(self, db: &dyn db::DefDatabase) -> HirFileId {
+ match self {
+ VariantId::EnumVariantId(it) => it.parent.lookup(db).id.file_id(),
+ VariantId::StructId(it) => it.lookup(db).id.file_id(),
+ VariantId::UnionId(it) => it.lookup(db).id.file_id(),
+ }
+ }
+
+ pub fn adt_id(self) -> AdtId {
+ match self {
+ VariantId::EnumVariantId(it) => it.parent.into(),
+ VariantId::StructId(it) => it.into(),
+ VariantId::UnionId(it) => it.into(),
+ }
+ }
+}
+
+trait Intern {
+ type ID;
+ fn intern(self, db: &dyn db::DefDatabase) -> Self::ID;
+}
+
+pub trait Lookup {
+ type Data;
+ fn lookup(&self, db: &dyn db::DefDatabase) -> Self::Data;
+}
+
+pub trait HasModule {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId;
+}
+
+impl HasModule for ItemContainerId {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
+ match *self {
+ ItemContainerId::ModuleId(it) => it,
+ ItemContainerId::ImplId(it) => it.lookup(db).container,
+ ItemContainerId::TraitId(it) => it.lookup(db).container,
+ ItemContainerId::ExternBlockId(it) => it.lookup(db).container,
+ }
+ }
+}
+
+impl<N: ItemTreeNode> HasModule for AssocItemLoc<N> {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
+ self.container.module(db)
+ }
+}
+
+impl HasModule for AdtId {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
+ match self {
+ AdtId::StructId(it) => it.lookup(db).container,
+ AdtId::UnionId(it) => it.lookup(db).container,
+ AdtId::EnumId(it) => it.lookup(db).container,
+ }
+ }
+}
+
+impl HasModule for VariantId {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
+ match self {
+ VariantId::EnumVariantId(it) => it.parent.lookup(db).container,
+ VariantId::StructId(it) => it.lookup(db).container,
+ VariantId::UnionId(it) => it.lookup(db).container,
+ }
+ }
+}
+
+impl HasModule for MacroId {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
+ match self {
+ MacroId::MacroRulesId(it) => it.lookup(db).container,
+ MacroId::Macro2Id(it) => it.lookup(db).container,
+ MacroId::ProcMacroId(it) => it.lookup(db).container,
+ }
+ }
+}
+
+impl HasModule for DefWithBodyId {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
+ match self {
+ DefWithBodyId::FunctionId(it) => it.lookup(db).module(db),
+ DefWithBodyId::StaticId(it) => it.lookup(db).module(db),
+ DefWithBodyId::ConstId(it) => it.lookup(db).module(db),
+ }
+ }
+}
+
+impl DefWithBodyId {
+ pub fn as_mod_item(self, db: &dyn db::DefDatabase) -> ModItem {
+ match self {
+ DefWithBodyId::FunctionId(it) => it.lookup(db).id.value.into(),
+ DefWithBodyId::StaticId(it) => it.lookup(db).id.value.into(),
+ DefWithBodyId::ConstId(it) => it.lookup(db).id.value.into(),
+ }
+ }
+}
+
+impl HasModule for GenericDefId {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
+ match self {
+ GenericDefId::FunctionId(it) => it.lookup(db).module(db),
+ GenericDefId::AdtId(it) => it.module(db),
+ GenericDefId::TraitId(it) => it.lookup(db).container,
+ GenericDefId::TypeAliasId(it) => it.lookup(db).module(db),
+ GenericDefId::ImplId(it) => it.lookup(db).container,
+ GenericDefId::EnumVariantId(it) => it.parent.lookup(db).container,
+ GenericDefId::ConstId(it) => it.lookup(db).module(db),
+ }
+ }
+}
+
+impl HasModule for TypeAliasId {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
+ self.lookup(db).module(db)
+ }
+}
+
+impl HasModule for TraitId {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
+ self.lookup(db).container
+ }
+}
+
+impl ModuleDefId {
+ /// Returns the module containing `self` (or `self`, if `self` is itself a module).
+ ///
+ /// Returns `None` if `self` refers to a primitive type.
+ pub fn module(&self, db: &dyn db::DefDatabase) -> Option<ModuleId> {
+ Some(match self {
+ ModuleDefId::ModuleId(id) => *id,
+ ModuleDefId::FunctionId(id) => id.lookup(db).module(db),
+ ModuleDefId::AdtId(id) => id.module(db),
+ ModuleDefId::EnumVariantId(id) => id.parent.lookup(db).container,
+ ModuleDefId::ConstId(id) => id.lookup(db).container.module(db),
+ ModuleDefId::StaticId(id) => id.lookup(db).module(db),
+ ModuleDefId::TraitId(id) => id.lookup(db).container,
+ ModuleDefId::TypeAliasId(id) => id.lookup(db).module(db),
+ ModuleDefId::MacroId(id) => id.module(db),
+ ModuleDefId::BuiltinType(_) => return None,
+ })
+ }
+}
+
+impl AttrDefId {
+ pub fn krate(&self, db: &dyn db::DefDatabase) -> CrateId {
+ match self {
+ AttrDefId::ModuleId(it) => it.krate,
+ AttrDefId::FieldId(it) => it.parent.module(db).krate,
+ AttrDefId::AdtId(it) => it.module(db).krate,
+ AttrDefId::FunctionId(it) => it.lookup(db).module(db).krate,
+ AttrDefId::EnumVariantId(it) => it.parent.lookup(db).container.krate,
+ AttrDefId::StaticId(it) => it.lookup(db).module(db).krate,
+ AttrDefId::ConstId(it) => it.lookup(db).module(db).krate,
+ AttrDefId::TraitId(it) => it.lookup(db).container.krate,
+ AttrDefId::TypeAliasId(it) => it.lookup(db).module(db).krate,
+ AttrDefId::ImplId(it) => it.lookup(db).container.krate,
+ AttrDefId::ExternBlockId(it) => it.lookup(db).container.krate,
+ AttrDefId::GenericParamId(it) => {
+ match it {
+ GenericParamId::TypeParamId(it) => it.parent(),
+ GenericParamId::ConstParamId(it) => it.parent(),
+ GenericParamId::LifetimeParamId(it) => it.parent,
+ }
+ .module(db)
+ .krate
+ }
+ AttrDefId::MacroId(it) => it.module(db).krate,
+ }
+ }
+}
+
+/// A helper trait for converting to MacroCallId
+pub trait AsMacroCall {
+ fn as_call_id(
+ &self,
+ db: &dyn db::DefDatabase,
+ krate: CrateId,
+ resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
+ ) -> Option<MacroCallId> {
+ self.as_call_id_with_errors(db, krate, resolver, &mut |_| ()).ok()?.ok()
+ }
+
+ fn as_call_id_with_errors(
+ &self,
+ db: &dyn db::DefDatabase,
+ krate: CrateId,
+ resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
+ error_sink: &mut dyn FnMut(ExpandError),
+ ) -> Result<Result<MacroCallId, ErrorEmitted>, UnresolvedMacro>;
+}
+
+impl AsMacroCall for InFile<&ast::MacroCall> {
+ fn as_call_id_with_errors(
+ &self,
+ db: &dyn db::DefDatabase,
+ krate: CrateId,
+ resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
+ mut error_sink: &mut dyn FnMut(ExpandError),
+ ) -> Result<Result<MacroCallId, ErrorEmitted>, UnresolvedMacro> {
+ let expands_to = hir_expand::ExpandTo::from_call_site(self.value);
+ let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
+ let h = Hygiene::new(db.upcast(), self.file_id);
+ let path =
+ self.value.path().and_then(|path| path::ModPath::from_src(db.upcast(), path, &h));
+
+ let path = match error_sink
+ .option(path, || ExpandError::Other("malformed macro invocation".into()))
+ {
+ Ok(path) => path,
+ Err(error) => {
+ return Ok(Err(error));
+ }
+ };
+
+ macro_call_as_call_id(
+ db,
+ &AstIdWithPath::new(ast_id.file_id, ast_id.value, path),
+ expands_to,
+ krate,
+ resolver,
+ error_sink,
+ )
+ }
+}
+
+/// Helper wrapper for `AstId` with `ModPath`
+#[derive(Clone, Debug, Eq, PartialEq)]
+struct AstIdWithPath<T: ast::AstNode> {
+ ast_id: AstId<T>,
+ path: path::ModPath,
+}
+
+impl<T: ast::AstNode> AstIdWithPath<T> {
+ fn new(file_id: HirFileId, ast_id: FileAstId<T>, path: path::ModPath) -> AstIdWithPath<T> {
+ AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path }
+ }
+}
+
+fn macro_call_as_call_id(
+ db: &dyn db::DefDatabase,
+ call: &AstIdWithPath<ast::MacroCall>,
+ expand_to: ExpandTo,
+ krate: CrateId,
+ resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
+ error_sink: &mut dyn FnMut(ExpandError),
+) -> Result<Result<MacroCallId, ErrorEmitted>, UnresolvedMacro> {
+ let def =
+ resolver(call.path.clone()).ok_or_else(|| UnresolvedMacro { path: call.path.clone() })?;
+
+ let res = if let MacroDefKind::BuiltInEager(..) = def.kind {
+ let macro_call = InFile::new(call.ast_id.file_id, call.ast_id.to_node(db.upcast()));
+
+ expand_eager_macro(db.upcast(), krate, macro_call, def, &resolver, error_sink)?
+ } else {
+ Ok(def.as_lazy_macro(
+ db.upcast(),
+ krate,
+ MacroCallKind::FnLike { ast_id: call.ast_id, expand_to },
+ ))
+ };
+ Ok(res)
+}
+
+pub fn macro_id_to_def_id(db: &dyn db::DefDatabase, id: MacroId) -> MacroDefId {
+ match id {
+ MacroId::Macro2Id(it) => {
+ let loc = it.lookup(db);
+
+ let item_tree = loc.id.item_tree(db);
+ let makro = &item_tree[loc.id.value];
+ let in_file = |m: FileAstId<ast::MacroDef>| InFile::new(loc.id.file_id(), m.upcast());
+ MacroDefId {
+ krate: loc.container.krate,
+ kind: match loc.expander {
+ MacroExpander::Declarative => MacroDefKind::Declarative(in_file(makro.ast_id)),
+ MacroExpander::BuiltIn(it) => MacroDefKind::BuiltIn(it, in_file(makro.ast_id)),
+ MacroExpander::BuiltInAttr(it) => {
+ MacroDefKind::BuiltInAttr(it, in_file(makro.ast_id))
+ }
+ MacroExpander::BuiltInDerive(it) => {
+ MacroDefKind::BuiltInDerive(it, in_file(makro.ast_id))
+ }
+ MacroExpander::BuiltInEager(it) => {
+ MacroDefKind::BuiltInEager(it, in_file(makro.ast_id))
+ }
+ },
+ local_inner: false,
+ }
+ }
+ MacroId::MacroRulesId(it) => {
+ let loc = it.lookup(db);
+
+ let item_tree = loc.id.item_tree(db);
+ let makro = &item_tree[loc.id.value];
+ let in_file = |m: FileAstId<ast::MacroRules>| InFile::new(loc.id.file_id(), m.upcast());
+ MacroDefId {
+ krate: loc.container.krate,
+ kind: match loc.expander {
+ MacroExpander::Declarative => MacroDefKind::Declarative(in_file(makro.ast_id)),
+ MacroExpander::BuiltIn(it) => MacroDefKind::BuiltIn(it, in_file(makro.ast_id)),
+ MacroExpander::BuiltInAttr(it) => {
+ MacroDefKind::BuiltInAttr(it, in_file(makro.ast_id))
+ }
+ MacroExpander::BuiltInDerive(it) => {
+ MacroDefKind::BuiltInDerive(it, in_file(makro.ast_id))
+ }
+ MacroExpander::BuiltInEager(it) => {
+ MacroDefKind::BuiltInEager(it, in_file(makro.ast_id))
+ }
+ },
+ local_inner: loc.local_inner,
+ }
+ }
+ MacroId::ProcMacroId(it) => {
+ let loc = it.lookup(db);
+
+ let item_tree = loc.id.item_tree(db);
+ let makro = &item_tree[loc.id.value];
+ MacroDefId {
+ krate: loc.container.krate,
+ kind: MacroDefKind::ProcMacro(
+ loc.expander,
+ loc.kind,
+ InFile::new(loc.id.file_id(), makro.ast_id),
+ ),
+ local_inner: false,
+ }
+ }
+ }
+}
+
+fn derive_macro_as_call_id(
+ db: &dyn db::DefDatabase,
+ item_attr: &AstIdWithPath<ast::Adt>,
+ derive_attr: AttrId,
+ derive_pos: u32,
+ krate: CrateId,
+ resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
+) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
+ let (macro_id, def_id) = resolver(item_attr.path.clone())
+ .ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
+ let call_id = def_id.as_lazy_macro(
+ db.upcast(),
+ krate,
+ MacroCallKind::Derive {
+ ast_id: item_attr.ast_id,
+ derive_index: derive_pos,
+ derive_attr_index: derive_attr.ast_index,
+ },
+ );
+ Ok((macro_id, def_id, call_id))
+}
+
+fn attr_macro_as_call_id(
+ db: &dyn db::DefDatabase,
+ item_attr: &AstIdWithPath<ast::Item>,
+ macro_attr: &Attr,
+ krate: CrateId,
+ def: MacroDefId,
+ is_derive: bool,
+) -> MacroCallId {
+ let mut arg = match macro_attr.input.as_deref() {
+ Some(attr::AttrInput::TokenTree(tt, map)) => (tt.clone(), map.clone()),
+ _ => Default::default(),
+ };
+
+ // The parentheses are always disposed here.
+ arg.0.delimiter = None;
+
+ let res = def.as_lazy_macro(
+ db.upcast(),
+ krate,
+ MacroCallKind::Attr {
+ ast_id: item_attr.ast_id,
+ attr_args: Arc::new(arg),
+ invoc_attr_index: macro_attr.id.ast_index,
+ is_derive,
+ },
+ );
+ res
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests.rs
new file mode 100644
index 000000000..81b9c5c4b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests.rs
@@ -0,0 +1,354 @@
+//! This module contains tests for macro expansion. Effectively, it covers `tt`,
+//! `mbe`, `proc_macro_api` and `hir_expand` crates. This might seem like a
+//! wrong architecture at the first glance, but is intentional.
+//!
+//! Physically, macro expansion process is intertwined with name resolution. You
+//! can not expand *just* the syntax. So, to be able to write integration tests
+//! of the "expand this code please" form, we have to do it after name
+//! resolution. That is, in this crate. We *could* fake some dependencies and
+//! write unit-tests (in fact, we used to do that), but that makes tests brittle
+//! and harder to understand.
+
+mod mbe;
+mod builtin_fn_macro;
+mod builtin_derive_macro;
+mod proc_macros;
+
+use std::{iter, ops::Range, sync::Arc};
+
+use ::mbe::TokenMap;
+use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
+use expect_test::Expect;
+use hir_expand::{
+ db::{AstDatabase, TokenExpander},
+ AstId, InFile, MacroDefId, MacroDefKind, MacroFile,
+};
+use stdx::format_to;
+use syntax::{
+ ast::{self, edit::IndentLevel},
+ AstNode, SyntaxElement,
+ SyntaxKind::{self, COMMENT, EOF, IDENT, LIFETIME_IDENT},
+ SyntaxNode, TextRange, T,
+};
+use tt::{Subtree, TokenId};
+
+use crate::{
+ db::DefDatabase, macro_id_to_def_id, nameres::ModuleSource, resolver::HasResolver,
+ src::HasSource, test_db::TestDB, AdtId, AsMacroCall, Lookup, ModuleDefId,
+};
+
+#[track_caller]
+fn check(ra_fixture: &str, mut expect: Expect) {
+ let extra_proc_macros = vec![(
+ r#"
+#[proc_macro_attribute]
+pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream {
+ item
+}
+"#
+ .into(),
+ ProcMacro {
+ name: "identity_when_valid".into(),
+ kind: base_db::ProcMacroKind::Attr,
+ expander: Arc::new(IdentityWhenValidProcMacroExpander),
+ },
+ )];
+ let db = TestDB::with_files_extra_proc_macros(ra_fixture, extra_proc_macros);
+ let krate = db.crate_graph().iter().next().unwrap();
+ let def_map = db.crate_def_map(krate);
+ let local_id = def_map.root();
+ let module = def_map.module_id(local_id);
+ let resolver = module.resolver(&db);
+ let source = def_map[local_id].definition_source(&db);
+ let source_file = match source.value {
+ ModuleSource::SourceFile(it) => it,
+ ModuleSource::Module(_) | ModuleSource::BlockExpr(_) => panic!(),
+ };
+
+ // What we want to do is to replace all macros (fn-like, derive, attr) with
+ // their expansions. Turns out, we don't actually store enough information
+ // to do this precisely though! Specifically, if a macro expands to nothing,
+ // it leaves zero traces in def-map, so we can't get its expansion after the
+ // fact.
+ //
+ // This is the usual
+ // <https://github.com/rust-lang/rust-analyzer/issues/3407>
+ // resolve/record tension!
+ //
+ // So here we try to do a resolve, which is necessary a heuristic. For macro
+ // calls, we use `as_call_id_with_errors`. For derives, we look at the impls
+ // in the module and assume that, if impls's source is a different
+ // `HirFileId`, than it came from macro expansion.
+
+ let mut text_edits = Vec::new();
+ let mut expansions = Vec::new();
+
+ for macro_ in source_file.syntax().descendants().filter_map(ast::Macro::cast) {
+ let mut show_token_ids = false;
+ for comment in macro_.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
+ show_token_ids |= comment.to_string().contains("+tokenids");
+ }
+ if !show_token_ids {
+ continue;
+ }
+
+ let call_offset = macro_.syntax().text_range().start().into();
+ let file_ast_id = db.ast_id_map(source.file_id).ast_id(&macro_);
+ let ast_id = AstId::new(source.file_id, file_ast_id.upcast());
+ let kind = MacroDefKind::Declarative(ast_id);
+
+ let macro_def = db.macro_def(MacroDefId { krate, kind, local_inner: false }).unwrap();
+ if let TokenExpander::DeclarativeMacro { mac, def_site_token_map } = &*macro_def {
+ let tt = match &macro_ {
+ ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(),
+ ast::Macro::MacroDef(_) => unimplemented!(""),
+ };
+
+ let tt_start = tt.syntax().text_range().start();
+ tt.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token).for_each(
+ |token| {
+ let range = token.text_range().checked_sub(tt_start).unwrap();
+ if let Some(id) = def_site_token_map.token_by_range(range) {
+ let offset = (range.end() + tt_start).into();
+ text_edits.push((offset..offset, format!("#{}", id.0)));
+ }
+ },
+ );
+ text_edits.push((
+ call_offset..call_offset,
+ format!("// call ids will be shifted by {:?}\n", mac.shift()),
+ ));
+ }
+ }
+
+ for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
+ let macro_call = InFile::new(source.file_id, &macro_call);
+ let mut error = None;
+ let macro_call_id = macro_call
+ .as_call_id_with_errors(
+ &db,
+ krate,
+ |path| {
+ resolver.resolve_path_as_macro(&db, &path).map(|it| macro_id_to_def_id(&db, it))
+ },
+ &mut |err| error = Some(err),
+ )
+ .unwrap()
+ .unwrap();
+ let macro_file = MacroFile { macro_call_id };
+ let mut expansion_result = db.parse_macro_expansion(macro_file);
+ expansion_result.err = expansion_result.err.or(error);
+ expansions.push((macro_call.value.clone(), expansion_result, db.macro_arg(macro_call_id)));
+ }
+
+ for (call, exp, arg) in expansions.into_iter().rev() {
+ let mut tree = false;
+ let mut expect_errors = false;
+ let mut show_token_ids = false;
+ for comment in call.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
+ tree |= comment.to_string().contains("+tree");
+ expect_errors |= comment.to_string().contains("+errors");
+ show_token_ids |= comment.to_string().contains("+tokenids");
+ }
+
+ let mut expn_text = String::new();
+ if let Some(err) = exp.err {
+ format_to!(expn_text, "/* error: {} */", err);
+ }
+ if let Some((parse, token_map)) = exp.value {
+ if expect_errors {
+ assert!(!parse.errors().is_empty(), "no parse errors in expansion");
+ for e in parse.errors() {
+ format_to!(expn_text, "/* parse error: {} */\n", e);
+ }
+ } else {
+ assert!(
+ parse.errors().is_empty(),
+ "parse errors in expansion: \n{:#?}",
+ parse.errors()
+ );
+ }
+ let pp = pretty_print_macro_expansion(
+ parse.syntax_node(),
+ show_token_ids.then(|| &*token_map),
+ );
+ let indent = IndentLevel::from_node(call.syntax());
+ let pp = reindent(indent, pp);
+ format_to!(expn_text, "{}", pp);
+
+ if tree {
+ let tree = format!("{:#?}", parse.syntax_node())
+ .split_inclusive('\n')
+ .map(|line| format!("// {}", line))
+ .collect::<String>();
+ format_to!(expn_text, "\n{}", tree)
+ }
+ }
+ let range = call.syntax().text_range();
+ let range: Range<usize> = range.into();
+
+ if show_token_ids {
+ if let Some((tree, map, _)) = arg.as_deref() {
+ let tt_range = call.token_tree().unwrap().syntax().text_range();
+ let mut ranges = Vec::new();
+ extract_id_ranges(&mut ranges, map, tree);
+ for (range, id) in ranges {
+ let idx = (tt_range.start() + range.end()).into();
+ text_edits.push((idx..idx, format!("#{}", id.0)));
+ }
+ }
+ text_edits.push((range.start..range.start, "// ".into()));
+ call.to_string().match_indices('\n').for_each(|(offset, _)| {
+ let offset = offset + 1 + range.start;
+ text_edits.push((offset..offset, "// ".into()));
+ });
+ text_edits.push((range.end..range.end, "\n".into()));
+ text_edits.push((range.end..range.end, expn_text));
+ } else {
+ text_edits.push((range, expn_text));
+ }
+ }
+
+ text_edits.sort_by_key(|(range, _)| range.start);
+ text_edits.reverse();
+ let mut expanded_text = source_file.to_string();
+ for (range, text) in text_edits {
+ expanded_text.replace_range(range, &text);
+ }
+
+ for decl_id in def_map[local_id].scope.declarations() {
+ // FIXME: I'm sure there's already better way to do this
+ let src = match decl_id {
+ ModuleDefId::AdtId(AdtId::StructId(struct_id)) => {
+ Some(struct_id.lookup(&db).source(&db).syntax().cloned())
+ }
+ ModuleDefId::FunctionId(function_id) => {
+ Some(function_id.lookup(&db).source(&db).syntax().cloned())
+ }
+ _ => None,
+ };
+ if let Some(src) = src {
+ if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) {
+ let pp = pretty_print_macro_expansion(src.value, None);
+ format_to!(expanded_text, "\n{}", pp)
+ }
+ }
+ }
+
+ for impl_id in def_map[local_id].scope.impls() {
+ let src = impl_id.lookup(&db).source(&db);
+ if src.file_id.is_builtin_derive(&db).is_some() {
+ let pp = pretty_print_macro_expansion(src.value.syntax().clone(), None);
+ format_to!(expanded_text, "\n{}", pp)
+ }
+ }
+
+ expect.indent(false);
+ expect.assert_eq(&expanded_text);
+}
+
+fn extract_id_ranges(ranges: &mut Vec<(TextRange, TokenId)>, map: &TokenMap, tree: &Subtree) {
+ tree.token_trees.iter().for_each(|tree| match tree {
+ tt::TokenTree::Leaf(leaf) => {
+ let id = match leaf {
+ tt::Leaf::Literal(it) => it.id,
+ tt::Leaf::Punct(it) => it.id,
+ tt::Leaf::Ident(it) => it.id,
+ };
+ ranges.extend(map.ranges_by_token(id, SyntaxKind::ERROR).map(|range| (range, id)));
+ }
+ tt::TokenTree::Subtree(tree) => extract_id_ranges(ranges, map, tree),
+ });
+}
+
+fn reindent(indent: IndentLevel, pp: String) -> String {
+ if !pp.contains('\n') {
+ return pp;
+ }
+ let mut lines = pp.split_inclusive('\n');
+ let mut res = lines.next().unwrap().to_string();
+ for line in lines {
+ if line.trim().is_empty() {
+ res.push_str(line)
+ } else {
+ format_to!(res, "{}{}", indent, line)
+ }
+ }
+ res
+}
+
+fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> String {
+ let mut res = String::new();
+ let mut prev_kind = EOF;
+ let mut indent_level = 0;
+ for token in iter::successors(expn.first_token(), |t| t.next_token()) {
+ let curr_kind = token.kind();
+ let space = match (prev_kind, curr_kind) {
+ _ if prev_kind.is_trivia() || curr_kind.is_trivia() => "",
+ (T!['{'], T!['}']) => "",
+ (T![=], _) | (_, T![=]) => " ",
+ (_, T!['{']) => " ",
+ (T![;] | T!['{'] | T!['}'], _) => "\n",
+ (_, T!['}']) => "\n",
+ (IDENT | LIFETIME_IDENT, IDENT | LIFETIME_IDENT) => " ",
+ _ if prev_kind.is_keyword() && curr_kind.is_keyword() => " ",
+ (IDENT, _) if curr_kind.is_keyword() => " ",
+ (_, IDENT) if prev_kind.is_keyword() => " ",
+ (T![>], IDENT) => " ",
+ (T![>], _) if curr_kind.is_keyword() => " ",
+ (T![->], _) | (_, T![->]) => " ",
+ (T![&&], _) | (_, T![&&]) => " ",
+ (T![,], _) => " ",
+ (T![:], IDENT | T!['(']) => " ",
+ (T![:], _) if curr_kind.is_keyword() => " ",
+ (T![fn], T!['(']) => "",
+ (T![']'], _) if curr_kind.is_keyword() => " ",
+ (T![']'], T![#]) => "\n",
+ (T![Self], T![::]) => "",
+ _ if prev_kind.is_keyword() => " ",
+ _ => "",
+ };
+
+ match prev_kind {
+ T!['{'] => indent_level += 1,
+ T!['}'] => indent_level -= 1,
+ _ => (),
+ }
+
+ res.push_str(space);
+ if space == "\n" {
+ let level = if curr_kind == T!['}'] { indent_level - 1 } else { indent_level };
+ res.push_str(&" ".repeat(level));
+ }
+ prev_kind = curr_kind;
+ format_to!(res, "{}", token);
+ if let Some(map) = map {
+ if let Some(id) = map.token_by_range(token.text_range()) {
+ format_to!(res, "#{}", id.0);
+ }
+ }
+ }
+ res
+}
+
+// Identity mapping, but only works when the input is syntactically valid. This
+// simulates common proc macros that unnecessarily parse their input and return
+// compile errors.
+#[derive(Debug)]
+struct IdentityWhenValidProcMacroExpander;
+impl base_db::ProcMacroExpander for IdentityWhenValidProcMacroExpander {
+ fn expand(
+ &self,
+ subtree: &Subtree,
+ _: Option<&Subtree>,
+ _: &base_db::Env,
+ ) -> Result<Subtree, base_db::ProcMacroExpansionError> {
+ let (parse, _) =
+ ::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems);
+ if parse.errors().is_empty() {
+ Ok(subtree.clone())
+ } else {
+ panic!("got invalid macro input: {:?}", parse.errors());
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
new file mode 100644
index 000000000..6819e9114
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
@@ -0,0 +1,95 @@
+//! Tests for `builtin_derive_macro.rs` from `hir_expand`.
+
+use expect_test::expect;
+
+use crate::macro_expansion_tests::check;
+
+#[test]
+fn test_copy_expand_simple() {
+ check(
+ r#"
+//- minicore: derive, copy
+#[derive(Copy)]
+struct Foo;
+"#,
+ expect![[r##"
+#[derive(Copy)]
+struct Foo;
+
+impl < > core::marker::Copy for Foo< > {}"##]],
+ );
+}
+
+#[test]
+fn test_copy_expand_in_core() {
+ cov_mark::check!(test_copy_expand_in_core);
+ check(
+ r#"
+//- /lib.rs crate:core
+#[rustc_builtin_macro]
+macro derive {}
+#[rustc_builtin_macro]
+macro Copy {}
+#[derive(Copy)]
+struct Foo;
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro derive {}
+#[rustc_builtin_macro]
+macro Copy {}
+#[derive(Copy)]
+struct Foo;
+
+impl < > crate ::marker::Copy for Foo< > {}"##]],
+ );
+}
+
+#[test]
+fn test_copy_expand_with_type_params() {
+ check(
+ r#"
+//- minicore: derive, copy
+#[derive(Copy)]
+struct Foo<A, B>;
+"#,
+ expect![[r##"
+#[derive(Copy)]
+struct Foo<A, B>;
+
+impl <T0: core::marker::Copy, T1: core::marker::Copy> core::marker::Copy for Foo<T0, T1> {}"##]],
+ );
+}
+
+#[test]
+fn test_copy_expand_with_lifetimes() {
+ // We currently just ignore lifetimes
+ check(
+ r#"
+//- minicore: derive, copy
+#[derive(Copy)]
+struct Foo<A, B, 'a, 'b>;
+"#,
+ expect![[r##"
+#[derive(Copy)]
+struct Foo<A, B, 'a, 'b>;
+
+impl <T0: core::marker::Copy, T1: core::marker::Copy> core::marker::Copy for Foo<T0, T1> {}"##]],
+ );
+}
+
+#[test]
+fn test_clone_expand() {
+ check(
+ r#"
+//- minicore: derive, clone
+#[derive(Clone)]
+struct Foo<A, B>;
+"#,
+ expect![[r##"
+#[derive(Clone)]
+struct Foo<A, B>;
+
+impl <T0: core::clone::Clone, T1: core::clone::Clone> core::clone::Clone for Foo<T0, T1> {}"##]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
new file mode 100644
index 000000000..92dffa7f3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
@@ -0,0 +1,377 @@
+//! Tests for `builtin_fn_macro.rs` from `hir_expand`.
+
+use expect_test::expect;
+
+use crate::macro_expansion_tests::check;
+
+#[test]
+fn test_column_expand() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! column {() => {}}
+
+fn main() { column!(); }
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! column {() => {}}
+
+fn main() { 0; }
+"##]],
+ );
+}
+
+#[test]
+fn test_line_expand() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! line {() => {}}
+
+fn main() { line!() }
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! line {() => {}}
+
+fn main() { 0 }
+"##]],
+ );
+}
+
+#[test]
+fn test_stringify_expand() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! stringify {() => {}}
+
+fn main() {
+ stringify!(
+ a
+ b
+ c
+ );
+}
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! stringify {() => {}}
+
+fn main() {
+ "a b c";
+}
+"##]],
+ );
+}
+
+#[test]
+fn test_env_expand() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! env {() => {}}
+
+fn main() { env!("TEST_ENV_VAR"); }
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! env {() => {}}
+
+fn main() { "__RA_UNIMPLEMENTED__"; }
+"##]],
+ );
+}
+
+#[test]
+fn test_option_env_expand() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! option_env {() => {}}
+
+fn main() { option_env!("TEST_ENV_VAR"); }
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! option_env {() => {}}
+
+fn main() { std::option::Option::None:: < &str>; }
+"##]],
+ );
+}
+
+#[test]
+fn test_file_expand() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! file {() => {}}
+
+fn main() { file!(); }
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! file {() => {}}
+
+fn main() { ""; }
+"##]],
+ );
+}
+
+#[test]
+fn test_assert_expand() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! assert {
+ ($cond:expr) => ({ /* compiler built-in */ });
+ ($cond:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+
+fn main() {
+ assert!(true, "{} {:?}", arg1(a, b, c), arg2);
+}
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! assert {
+ ($cond:expr) => ({ /* compiler built-in */ });
+ ($cond:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+
+fn main() {
+ {
+ if !true {
+ $crate::panic!("{} {:?}", arg1(a, b, c), arg2);
+ }
+ };
+}
+"##]],
+ );
+}
+
+#[test]
+fn test_compile_error_expand() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! compile_error {
+ ($msg:expr) => ({ /* compiler built-in */ });
+ ($msg:expr,) => ({ /* compiler built-in */ })
+}
+
+// This expands to nothing (since it's in item position), but emits an error.
+compile_error!("error!");
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! compile_error {
+ ($msg:expr) => ({ /* compiler built-in */ });
+ ($msg:expr,) => ({ /* compiler built-in */ })
+}
+
+/* error: error! */
+"##]],
+ );
+}
+
+#[test]
+fn test_format_args_expand() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! format_args {
+ ($fmt:expr) => ({ /* compiler built-in */ });
+ ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+
+fn main() {
+ format_args!("{} {:?}", arg1(a, b, c), arg2);
+}
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! format_args {
+ ($fmt:expr) => ({ /* compiler built-in */ });
+ ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+
+fn main() {
+ std::fmt::Arguments::new_v1(&[], &[std::fmt::ArgumentV1::new(&(arg1(a, b, c)), std::fmt::Display::fmt), std::fmt::ArgumentV1::new(&(arg2), std::fmt::Display::fmt), ]);
+}
+"##]],
+ );
+}
+
+#[test]
+fn test_format_args_expand_with_comma_exprs() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! format_args {
+ ($fmt:expr) => ({ /* compiler built-in */ });
+ ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+
+fn main() {
+ format_args!("{} {:?}", a::<A,B>(), b);
+}
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! format_args {
+ ($fmt:expr) => ({ /* compiler built-in */ });
+ ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+
+fn main() {
+ std::fmt::Arguments::new_v1(&[], &[std::fmt::ArgumentV1::new(&(a::<A, B>()), std::fmt::Display::fmt), std::fmt::ArgumentV1::new(&(b), std::fmt::Display::fmt), ]);
+}
+"##]],
+ );
+}
+
+#[test]
+fn test_format_args_expand_with_broken_member_access() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! format_args {
+ ($fmt:expr) => ({ /* compiler built-in */ });
+ ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+
+fn main() {
+ let _ =
+ format_args!/*+errors*/("{} {:?}", a.);
+}
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! format_args {
+ ($fmt:expr) => ({ /* compiler built-in */ });
+ ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+
+fn main() {
+ let _ =
+ /* parse error: expected field name or number */
+std::fmt::Arguments::new_v1(&[], &[std::fmt::ArgumentV1::new(&(a.), std::fmt::Display::fmt), ]);
+}
+"##]],
+ );
+}
+
+#[test]
+fn test_include_bytes_expand() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! include_bytes {
+ ($file:expr) => {{ /* compiler built-in */ }};
+ ($file:expr,) => {{ /* compiler built-in */ }};
+}
+
+fn main() { include_bytes("foo"); }
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! include_bytes {
+ ($file:expr) => {{ /* compiler built-in */ }};
+ ($file:expr,) => {{ /* compiler built-in */ }};
+}
+
+fn main() { include_bytes("foo"); }
+"##]],
+ );
+}
+
+#[test]
+fn test_concat_expand() {
+ check(
+ r##"
+#[rustc_builtin_macro]
+macro_rules! concat {}
+
+fn main() { concat!("foo", "r", 0, r#"bar"#, "\n", false); }
+"##,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! concat {}
+
+fn main() { "foor0bar\nfalse"; }
+"##]],
+ );
+}
+
+#[test]
+fn test_concat_bytes_expand() {
+ check(
+ r##"
+#[rustc_builtin_macro]
+macro_rules! concat_bytes {}
+
+fn main() { concat_bytes!(b'A', b"BC", [68, b'E', 70]); }
+"##,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! concat_bytes {}
+
+fn main() { [b'A', 66, 67, 68, b'E', 70]; }
+"##]],
+ );
+}
+
+#[test]
+fn test_concat_with_captured_expr() {
+ check(
+ r##"
+#[rustc_builtin_macro]
+macro_rules! concat {}
+
+macro_rules! surprise {
+ () => { "s" };
+}
+
+macro_rules! stuff {
+ ($string:expr) => { concat!($string) };
+}
+
+fn main() { concat!(surprise!()); }
+"##,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! concat {}
+
+macro_rules! surprise {
+ () => { "s" };
+}
+
+macro_rules! stuff {
+ ($string:expr) => { concat!($string) };
+}
+
+fn main() { "s"; }
+"##]],
+ );
+}
+
+#[test]
+fn test_concat_idents_expand() {
+ check(
+ r##"
+#[rustc_builtin_macro]
+macro_rules! concat_idents {}
+
+fn main() { concat_idents!(foo, bar); }
+"##,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! concat_idents {}
+
+fn main() { foobar; }
+"##]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
new file mode 100644
index 000000000..30d39d52f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -0,0 +1,1632 @@
+//! Tests specific to declarative macros, aka macros by example. This covers
+//! both stable `macro_rules!` macros as well as unstable `macro` macros.
+
+mod tt_conversion;
+mod matching;
+mod meta_syntax;
+mod regression;
+
+use expect_test::expect;
+
+use crate::macro_expansion_tests::check;
+
+#[test]
+fn token_mapping_smoke_test() {
+ check(
+ r#"
+// +tokenids
+macro_rules! f {
+ ( struct $ident:ident ) => {
+ struct $ident {
+ map: ::std::collections::HashSet<()>,
+ }
+ };
+}
+
+// +tokenids
+f!(struct MyTraitMap2);
+"#,
+ expect![[r##"
+// call ids will be shifted by Shift(30)
+// +tokenids
+macro_rules! f {#0
+ (#1 struct#2 $#3ident#4:#5ident#6 )#1 =#7>#8 {#9
+ struct#10 $#11ident#12 {#13
+ map#14:#15 :#16:#17std#18:#19:#20collections#21:#22:#23HashSet#24<#25(#26)#26>#27,#28
+ }#13
+ }#9;#29
+}#0
+
+// // +tokenids
+// f!(struct#1 MyTraitMap2#2);
+struct#10 MyTraitMap2#32 {#13
+ map#14:#15 ::std#18::collections#21::HashSet#24<#25(#26)#26>#27,#28
+}#13
+"##]],
+ );
+}
+
+#[test]
+fn token_mapping_floats() {
+ // Regression test for https://github.com/rust-lang/rust-analyzer/issues/12216
+ // (and related issues)
+ check(
+ r#"
+// +tokenids
+macro_rules! f {
+ ($($tt:tt)*) => {
+ $($tt)*
+ };
+}
+
+// +tokenids
+f! {
+ fn main() {
+ 1;
+ 1.0;
+ let x = 1;
+ }
+}
+
+
+"#,
+ expect![[r##"
+// call ids will be shifted by Shift(18)
+// +tokenids
+macro_rules! f {#0
+ (#1$#2(#3$#4tt#5:#6tt#7)#3*#8)#1 =#9>#10 {#11
+ $#12(#13$#14tt#15)#13*#16
+ }#11;#17
+}#0
+
+// // +tokenids
+// f! {
+// fn#1 main#2() {
+// 1#5;#6
+// 1.0#7;#8
+// let#9 x#10 =#11 1#12;#13
+// }
+// }
+fn#19 main#20(#21)#21 {#22
+ 1#23;#24
+ 1.0#25;#26
+ let#27 x#28 =#29 1#30;#31
+}#22
+
+
+"##]],
+ );
+}
+
+#[test]
+fn mbe_smoke_test() {
+ check(
+ r#"
+macro_rules! impl_froms {
+ ($e:ident: $($v:ident),*) => {
+ $(
+ impl From<$v> for $e {
+ fn from(it: $v) -> $e { $e::$v(it) }
+ }
+ )*
+ }
+}
+impl_froms!(TokenTree: Leaf, Subtree);
+"#,
+ expect![[r#"
+macro_rules! impl_froms {
+ ($e:ident: $($v:ident),*) => {
+ $(
+ impl From<$v> for $e {
+ fn from(it: $v) -> $e { $e::$v(it) }
+ }
+ )*
+ }
+}
+impl From<Leaf> for TokenTree {
+ fn from(it: Leaf) -> TokenTree {
+ TokenTree::Leaf(it)
+ }
+}
+impl From<Subtree> for TokenTree {
+ fn from(it: Subtree) -> TokenTree {
+ TokenTree::Subtree(it)
+ }
+}
+"#]],
+ );
+}
+
+#[test]
+fn wrong_nesting_level() {
+ check(
+ r#"
+macro_rules! m {
+ ($($i:ident);*) => ($i)
+}
+m!{a}
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($($i:ident);*) => ($i)
+}
+/* error: expected simple binding, found nested binding `i` */
+"#]],
+ );
+}
+
+#[test]
+fn match_by_first_token_literally() {
+ check(
+ r#"
+macro_rules! m {
+ ($i:ident) => ( mod $i {} );
+ (= $i:ident) => ( fn $i() {} );
+ (+ $i:ident) => ( struct $i; )
+}
+m! { foo }
+m! { = bar }
+m! { + Baz }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($i:ident) => ( mod $i {} );
+ (= $i:ident) => ( fn $i() {} );
+ (+ $i:ident) => ( struct $i; )
+}
+mod foo {}
+fn bar() {}
+struct Baz;
+"#]],
+ );
+}
+
+#[test]
+fn match_by_last_token_literally() {
+ check(
+ r#"
+macro_rules! m {
+ ($i:ident) => ( mod $i {} );
+ ($i:ident =) => ( fn $i() {} );
+ ($i:ident +) => ( struct $i; )
+}
+m! { foo }
+m! { bar = }
+m! { Baz + }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($i:ident) => ( mod $i {} );
+ ($i:ident =) => ( fn $i() {} );
+ ($i:ident +) => ( struct $i; )
+}
+mod foo {}
+fn bar() {}
+struct Baz;
+"#]],
+ );
+}
+
+#[test]
+fn match_by_ident() {
+ check(
+ r#"
+macro_rules! m {
+ ($i:ident) => ( mod $i {} );
+ (spam $i:ident) => ( fn $i() {} );
+ (eggs $i:ident) => ( struct $i; )
+}
+m! { foo }
+m! { spam bar }
+m! { eggs Baz }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($i:ident) => ( mod $i {} );
+ (spam $i:ident) => ( fn $i() {} );
+ (eggs $i:ident) => ( struct $i; )
+}
+mod foo {}
+fn bar() {}
+struct Baz;
+"#]],
+ );
+}
+
+#[test]
+fn match_by_separator_token() {
+ check(
+ r#"
+macro_rules! m {
+ ($($i:ident),*) => ($(mod $i {} )*);
+ ($($i:ident)#*) => ($(fn $i() {} )*);
+ ($i:ident ,# $ j:ident) => ( struct $i; struct $ j; )
+}
+
+m! { foo, bar }
+
+m! { foo# bar }
+
+m! { Foo,# Bar }
+"#,
+ expect![[r##"
+macro_rules! m {
+ ($($i:ident),*) => ($(mod $i {} )*);
+ ($($i:ident)#*) => ($(fn $i() {} )*);
+ ($i:ident ,# $ j:ident) => ( struct $i; struct $ j; )
+}
+
+mod foo {}
+mod bar {}
+
+fn foo() {}
+fn bar() {}
+
+struct Foo;
+struct Bar;
+"##]],
+ );
+}
+
+#[test]
+fn test_match_group_pattern_with_multiple_defs() {
+ check(
+ r#"
+macro_rules! m {
+ ($($i:ident),*) => ( impl Bar { $(fn $i() {})* } );
+}
+m! { foo, bar }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($($i:ident),*) => ( impl Bar { $(fn $i() {})* } );
+}
+impl Bar {
+ fn foo() {}
+ fn bar() {}
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_match_group_pattern_with_multiple_statement() {
+ check(
+ r#"
+macro_rules! m {
+ ($($i:ident),*) => ( fn baz() { $($i ();)* } );
+}
+m! { foo, bar }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($($i:ident),*) => ( fn baz() { $($i ();)* } );
+}
+fn baz() {
+ foo();
+ bar();
+}
+"#]],
+ )
+}
+
+#[test]
+fn test_match_group_pattern_with_multiple_statement_without_semi() {
+ check(
+ r#"
+macro_rules! m {
+ ($($i:ident),*) => ( fn baz() { $($i() );* } );
+}
+m! { foo, bar }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($($i:ident),*) => ( fn baz() { $($i() );* } );
+}
+fn baz() {
+ foo();
+ bar()
+}
+"#]],
+ )
+}
+
+#[test]
+fn test_match_group_empty_fixed_token() {
+ check(
+ r#"
+macro_rules! m {
+ ($($i:ident)* #abc) => ( fn baz() { $($i ();)* } );
+}
+m!{#abc}
+"#,
+ expect![[r##"
+macro_rules! m {
+ ($($i:ident)* #abc) => ( fn baz() { $($i ();)* } );
+}
+fn baz() {}
+"##]],
+ )
+}
+
+#[test]
+fn test_match_group_in_subtree() {
+ check(
+ r#"
+macro_rules! m {
+ (fn $name:ident { $($i:ident)* } ) => ( fn $name() { $($i ();)* } );
+}
+m! { fn baz { a b } }
+"#,
+ expect![[r#"
+macro_rules! m {
+ (fn $name:ident { $($i:ident)* } ) => ( fn $name() { $($i ();)* } );
+}
+fn baz() {
+ a();
+ b();
+}
+"#]],
+ )
+}
+
+#[test]
+fn test_expr_order() {
+ check(
+ r#"
+macro_rules! m {
+ ($ i:expr) => { fn bar() { $ i * 3; } }
+}
+// +tree
+m! { 1 + 2 }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($ i:expr) => { fn bar() { $ i * 3; } }
+}
+fn bar() {
+ (1+2)*3;
+}
+// MACRO_ITEMS@0..17
+// FN@0..17
+// FN_KW@0..2 "fn"
+// NAME@2..5
+// IDENT@2..5 "bar"
+// PARAM_LIST@5..7
+// L_PAREN@5..6 "("
+// R_PAREN@6..7 ")"
+// BLOCK_EXPR@7..17
+// STMT_LIST@7..17
+// L_CURLY@7..8 "{"
+// EXPR_STMT@8..16
+// BIN_EXPR@8..15
+// PAREN_EXPR@8..13
+// L_PAREN@8..9 "("
+// BIN_EXPR@9..12
+// LITERAL@9..10
+// INT_NUMBER@9..10 "1"
+// PLUS@10..11 "+"
+// LITERAL@11..12
+// INT_NUMBER@11..12 "2"
+// R_PAREN@12..13 ")"
+// STAR@13..14 "*"
+// LITERAL@14..15
+// INT_NUMBER@14..15 "3"
+// SEMICOLON@15..16 ";"
+// R_CURLY@16..17 "}"
+
+"#]],
+ )
+}
+
+#[test]
+fn test_match_group_with_multichar_sep() {
+ check(
+ r#"
+macro_rules! m {
+ (fn $name:ident { $($i:literal)* }) => ( fn $name() -> bool { $($i)&&* } );
+}
+m! (fn baz { true false } );
+"#,
+ expect![[r#"
+macro_rules! m {
+ (fn $name:ident { $($i:literal)* }) => ( fn $name() -> bool { $($i)&&* } );
+}
+fn baz() -> bool {
+ true && false
+}
+"#]],
+ );
+
+ check(
+ r#"
+macro_rules! m {
+ (fn $name:ident { $($i:literal)&&* }) => ( fn $name() -> bool { $($i)&&* } );
+}
+m! (fn baz { true && false } );
+"#,
+ expect![[r#"
+macro_rules! m {
+ (fn $name:ident { $($i:literal)&&* }) => ( fn $name() -> bool { $($i)&&* } );
+}
+fn baz() -> bool {
+ true && false
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_match_group_zero_match() {
+ check(
+ r#"
+macro_rules! m { ( $($i:ident)* ) => (); }
+m!();
+"#,
+ expect![[r#"
+macro_rules! m { ( $($i:ident)* ) => (); }
+
+"#]],
+ );
+}
+
+#[test]
+fn test_match_group_in_group() {
+ check(
+ r#"
+macro_rules! m {
+ [ $( ( $($i:ident)* ) )* ] => [ ok![$( ( $($i)* ) )*]; ]
+}
+m! ( (a b) );
+"#,
+ expect![[r#"
+macro_rules! m {
+ [ $( ( $($i:ident)* ) )* ] => [ ok![$( ( $($i)* ) )*]; ]
+}
+ok![(a b)];
+"#]],
+ )
+}
+
+#[test]
+fn test_expand_to_item_list() {
+ check(
+ r#"
+macro_rules! structs {
+ ($($i:ident),*) => { $(struct $i { field: u32 } )* }
+}
+
+// +tree
+structs!(Foo, Bar);
+ "#,
+ expect![[r#"
+macro_rules! structs {
+ ($($i:ident),*) => { $(struct $i { field: u32 } )* }
+}
+
+struct Foo {
+ field: u32
+}
+struct Bar {
+ field: u32
+}
+// MACRO_ITEMS@0..40
+// STRUCT@0..20
+// STRUCT_KW@0..6 "struct"
+// NAME@6..9
+// IDENT@6..9 "Foo"
+// RECORD_FIELD_LIST@9..20
+// L_CURLY@9..10 "{"
+// RECORD_FIELD@10..19
+// NAME@10..15
+// IDENT@10..15 "field"
+// COLON@15..16 ":"
+// PATH_TYPE@16..19
+// PATH@16..19
+// PATH_SEGMENT@16..19
+// NAME_REF@16..19
+// IDENT@16..19 "u32"
+// R_CURLY@19..20 "}"
+// STRUCT@20..40
+// STRUCT_KW@20..26 "struct"
+// NAME@26..29
+// IDENT@26..29 "Bar"
+// RECORD_FIELD_LIST@29..40
+// L_CURLY@29..30 "{"
+// RECORD_FIELD@30..39
+// NAME@30..35
+// IDENT@30..35 "field"
+// COLON@35..36 ":"
+// PATH_TYPE@36..39
+// PATH@36..39
+// PATH_SEGMENT@36..39
+// NAME_REF@36..39
+// IDENT@36..39 "u32"
+// R_CURLY@39..40 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_two_idents() {
+ check(
+ r#"
+macro_rules! m {
+ ($i:ident, $j:ident) => { fn foo() { let a = $i; let b = $j; } }
+}
+m! { foo, bar }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($i:ident, $j:ident) => { fn foo() { let a = $i; let b = $j; } }
+}
+fn foo() {
+ let a = foo;
+ let b = bar;
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_tt_to_stmts() {
+ check(
+ r#"
+macro_rules! m {
+ () => {
+ let a = 0;
+ a = 10 + 1;
+ a
+ }
+}
+
+fn f() -> i32 {
+ m!/*+tree*/{}
+}
+"#,
+ expect![[r#"
+macro_rules! m {
+ () => {
+ let a = 0;
+ a = 10 + 1;
+ a
+ }
+}
+
+fn f() -> i32 {
+ let a = 0;
+ a = 10+1;
+ a
+// MACRO_STMTS@0..15
+// LET_STMT@0..7
+// LET_KW@0..3 "let"
+// IDENT_PAT@3..4
+// NAME@3..4
+// IDENT@3..4 "a"
+// EQ@4..5 "="
+// LITERAL@5..6
+// INT_NUMBER@5..6 "0"
+// SEMICOLON@6..7 ";"
+// EXPR_STMT@7..14
+// BIN_EXPR@7..13
+// PATH_EXPR@7..8
+// PATH@7..8
+// PATH_SEGMENT@7..8
+// NAME_REF@7..8
+// IDENT@7..8 "a"
+// EQ@8..9 "="
+// BIN_EXPR@9..13
+// LITERAL@9..11
+// INT_NUMBER@9..11 "10"
+// PLUS@11..12 "+"
+// LITERAL@12..13
+// INT_NUMBER@12..13 "1"
+// SEMICOLON@13..14 ";"
+// PATH_EXPR@14..15
+// PATH@14..15
+// PATH_SEGMENT@14..15
+// NAME_REF@14..15
+// IDENT@14..15 "a"
+
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_match_literal() {
+ check(
+ r#"
+macro_rules! m {
+ ('(') => { fn l_paren() {} }
+}
+m!['('];
+"#,
+ expect![[r#"
+macro_rules! m {
+ ('(') => { fn l_paren() {} }
+}
+fn l_paren() {}
+"#]],
+ );
+}
+
+#[test]
+fn test_parse_macro_def_simple() {
+ cov_mark::check!(parse_macro_def_simple);
+ check(
+ r#"
+macro m($id:ident) { fn $id() {} }
+m!(bar);
+"#,
+ expect![[r#"
+macro m($id:ident) { fn $id() {} }
+fn bar() {}
+"#]],
+ );
+}
+
+#[test]
+fn test_parse_macro_def_rules() {
+ cov_mark::check!(parse_macro_def_rules);
+
+ check(
+ r#"
+macro m {
+ ($id:ident) => { fn $id() {} }
+}
+m!(bar);
+"#,
+ expect![[r#"
+macro m {
+ ($id:ident) => { fn $id() {} }
+}
+fn bar() {}
+"#]],
+ );
+}
+
+#[test]
+fn test_macro_2_0_panic_2015() {
+ check(
+ r#"
+macro panic_2015 {
+ () => (),
+ (bar) => (),
+}
+panic_2015!(bar);
+"#,
+ expect![[r#"
+macro panic_2015 {
+ () => (),
+ (bar) => (),
+}
+
+"#]],
+ );
+}
+
+#[test]
+fn test_path() {
+ check(
+ r#"
+macro_rules! m {
+ ($p:path) => { fn foo() { let a = $p; } }
+}
+
+m! { foo }
+
+m! { bar::<u8>::baz::<u8> }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($p:path) => { fn foo() { let a = $p; } }
+}
+
+fn foo() {
+ let a = foo;
+}
+
+fn foo() {
+ let a = bar::<u8>::baz::<u8> ;
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_two_paths() {
+ check(
+ r#"
+macro_rules! m {
+ ($i:path, $j:path) => { fn foo() { let a = $ i; let b = $j; } }
+}
+m! { foo, bar }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($i:path, $j:path) => { fn foo() { let a = $ i; let b = $j; } }
+}
+fn foo() {
+ let a = foo;
+ let b = bar;
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_path_with_path() {
+ check(
+ r#"
+macro_rules! m {
+ ($p:path) => { fn foo() { let a = $p::bar; } }
+}
+m! { foo }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($p:path) => { fn foo() { let a = $p::bar; } }
+}
+fn foo() {
+ let a = foo::bar;
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_expr() {
+ check(
+ r#"
+macro_rules! m {
+ ($e:expr) => { fn bar() { $e; } }
+}
+
+m! { 2 + 2 * baz(3).quux() }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($e:expr) => { fn bar() { $e; } }
+}
+
+fn bar() {
+ (2+2*baz(3).quux());
+}
+"#]],
+ )
+}
+
+#[test]
+fn test_last_expr() {
+ check(
+ r#"
+macro_rules! vec {
+ ($($item:expr),*) => {{
+ let mut v = Vec::new();
+ $( v.push($item); )*
+ v
+ }};
+}
+
+fn f() {
+ vec![1,2,3];
+}
+"#,
+ expect![[r#"
+macro_rules! vec {
+ ($($item:expr),*) => {{
+ let mut v = Vec::new();
+ $( v.push($item); )*
+ v
+ }};
+}
+
+fn f() {
+ {
+ let mut v = Vec::new();
+ v.push(1);
+ v.push(2);
+ v.push(3);
+ v
+ };
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_expr_with_attr() {
+ check(
+ r#"
+macro_rules! m { ($a:expr) => { ok!(); } }
+m!(#[allow(a)]());
+"#,
+ expect![[r#"
+macro_rules! m { ($a:expr) => { ok!(); } }
+ok!();
+"#]],
+ )
+}
+
+#[test]
+fn test_ty() {
+ check(
+ r#"
+macro_rules! m {
+ ($t:ty) => ( fn bar() -> $t {} )
+}
+m! { Baz<u8> }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($t:ty) => ( fn bar() -> $t {} )
+}
+fn bar() -> Baz<u8> {}
+"#]],
+ )
+}
+
+#[test]
+fn test_ty_with_complex_type() {
+ check(
+ r#"
+macro_rules! m {
+ ($t:ty) => ( fn bar() -> $ t {} )
+}
+
+m! { &'a Baz<u8> }
+
+m! { extern "Rust" fn() -> Ret }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($t:ty) => ( fn bar() -> $ t {} )
+}
+
+fn bar() -> & 'a Baz<u8> {}
+
+fn bar() -> extern "Rust"fn() -> Ret {}
+"#]],
+ );
+}
+
+#[test]
+fn test_pat_() {
+ check(
+ r#"
+macro_rules! m {
+ ($p:pat) => { fn foo() { let $p; } }
+}
+m! { (a, b) }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($p:pat) => { fn foo() { let $p; } }
+}
+fn foo() {
+ let (a, b);
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_stmt() {
+ check(
+ r#"
+macro_rules! m {
+ ($s:stmt) => ( fn bar() { $s; } )
+}
+m! { 2 }
+m! { let a = 0 }
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($s:stmt) => ( fn bar() { $s; } )
+}
+fn bar() {
+ 2;
+}
+fn bar() {
+ let a = 0;
+}
+"#]],
+ )
+}
+
+#[test]
+fn test_single_item() {
+ check(
+ r#"
+macro_rules! m { ($i:item) => ( $i ) }
+m! { mod c {} }
+"#,
+ expect![[r#"
+macro_rules! m { ($i:item) => ( $i ) }
+mod c {}
+"#]],
+ )
+}
+
+#[test]
+fn test_all_items() {
+ check(
+ r#"
+macro_rules! m { ($($i:item)*) => ($($i )*) }
+m! {
+ extern crate a;
+ mod b;
+ mod c {}
+ use d;
+ const E: i32 = 0;
+ static F: i32 = 0;
+ impl G {}
+ struct H;
+ enum I { Foo }
+ trait J {}
+ fn h() {}
+ extern {}
+ type T = u8;
+}
+"#,
+ expect![[r#"
+macro_rules! m { ($($i:item)*) => ($($i )*) }
+extern crate a;
+mod b;
+mod c {}
+use d;
+const E: i32 = 0;
+static F: i32 = 0;
+impl G {}
+struct H;
+enum I {
+ Foo
+}
+trait J {}
+fn h() {}
+extern {}
+type T = u8;
+"#]],
+ );
+}
+
+#[test]
+fn test_block() {
+ check(
+ r#"
+macro_rules! m { ($b:block) => { fn foo() $b } }
+m! { { 1; } }
+"#,
+ expect![[r#"
+macro_rules! m { ($b:block) => { fn foo() $b } }
+fn foo() {
+ 1;
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_meta() {
+ check(
+ r#"
+macro_rules! m {
+ ($m:meta) => ( #[$m] fn bar() {} )
+}
+m! { cfg(target_os = "windows") }
+m! { hello::world }
+"#,
+ expect![[r##"
+macro_rules! m {
+ ($m:meta) => ( #[$m] fn bar() {} )
+}
+#[cfg(target_os = "windows")] fn bar() {}
+#[hello::world] fn bar() {}
+"##]],
+ );
+}
+
+#[test]
+fn test_meta_doc_comments() {
+ cov_mark::check!(test_meta_doc_comments);
+ check(
+ r#"
+macro_rules! m {
+ ($(#[$m:meta])+) => ( $(#[$m])+ fn bar() {} )
+}
+m! {
+ /// Single Line Doc 1
+ /**
+ MultiLines Doc
+ */
+}
+"#,
+ expect![[r##"
+macro_rules! m {
+ ($(#[$m:meta])+) => ( $(#[$m])+ fn bar() {} )
+}
+#[doc = " Single Line Doc 1"]
+#[doc = "\n MultiLines Doc\n "] fn bar() {}
+"##]],
+ );
+}
+
+#[test]
+fn test_meta_extended_key_value_attributes() {
+ check(
+ r#"
+macro_rules! m {
+ (#[$m:meta]) => ( #[$m] fn bar() {} )
+}
+m! { #[doc = concat!("The `", "bla", "` lang item.")] }
+"#,
+ expect![[r##"
+macro_rules! m {
+ (#[$m:meta]) => ( #[$m] fn bar() {} )
+}
+#[doc = concat!("The `", "bla", "` lang item.")] fn bar() {}
+"##]],
+ );
+}
+
+#[test]
+fn test_meta_doc_comments_non_latin() {
+ check(
+ r#"
+macro_rules! m {
+ ($(#[$ m:meta])+) => ( $(#[$m])+ fn bar() {} )
+}
+m! {
+ /// 錦瑟無端五十弦,一弦一柱思華年。
+ /**
+ 莊生曉夢迷蝴蝶,望帝春心託杜鵑。
+ */
+}
+"#,
+ expect![[r##"
+macro_rules! m {
+ ($(#[$ m:meta])+) => ( $(#[$m])+ fn bar() {} )
+}
+#[doc = " 錦瑟無端五十弦,一弦一柱思華年。"]
+#[doc = "\n 莊生曉夢迷蝴蝶,望帝春心託杜鵑。\n "] fn bar() {}
+"##]],
+ );
+}
+
+#[test]
+fn test_meta_doc_comments_escaped_characters() {
+ check(
+ r#"
+macro_rules! m {
+ ($(#[$m:meta])+) => ( $(#[$m])+ fn bar() {} )
+}
+m! {
+ /// \ " '
+}
+"#,
+ expect![[r##"
+macro_rules! m {
+ ($(#[$m:meta])+) => ( $(#[$m])+ fn bar() {} )
+}
+#[doc = " \\ \" \'"] fn bar() {}
+"##]],
+ );
+}
+
+#[test]
+fn test_tt_block() {
+ check(
+ r#"
+macro_rules! m { ($tt:tt) => { fn foo() $tt } }
+m! { { 1; } }
+"#,
+ expect![[r#"
+macro_rules! m { ($tt:tt) => { fn foo() $tt } }
+fn foo() {
+ 1;
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_tt_group() {
+ check(
+ r#"
+macro_rules! m { ($($tt:tt)*) => { $($tt)* } }
+m! { fn foo() {} }"
+"#,
+ expect![[r#"
+macro_rules! m { ($($tt:tt)*) => { $($tt)* } }
+fn foo() {}"
+"#]],
+ );
+}
+
+#[test]
+fn test_tt_composite() {
+ check(
+ r#"
+macro_rules! m { ($tt:tt) => { ok!(); } }
+m! { => }
+m! { = > }
+"#,
+ expect![[r#"
+macro_rules! m { ($tt:tt) => { ok!(); } }
+ok!();
+/* error: leftover tokens */ok!();
+"#]],
+ );
+}
+
+#[test]
+fn test_tt_composite2() {
+ check(
+ r#"
+macro_rules! m { ($($tt:tt)*) => { abs!(=> $($tt)*); } }
+m! {#}
+"#,
+ expect![[r##"
+macro_rules! m { ($($tt:tt)*) => { abs!(=> $($tt)*); } }
+abs!( = > #);
+"##]],
+ );
+}
+
+#[test]
+fn test_tt_with_composite_without_space() {
+ // Test macro input without any spaces
+ // See https://github.com/rust-lang/rust-analyzer/issues/6692
+ check(
+ r#"
+macro_rules! m { ($ op:tt, $j:path) => ( ok!(); ) }
+m!(==,Foo::Bool)
+"#,
+ expect![[r#"
+macro_rules! m { ($ op:tt, $j:path) => ( ok!(); ) }
+ok!();
+"#]],
+ );
+}
+
+#[test]
+fn test_underscore() {
+ check(
+ r#"
+macro_rules! m { ($_:tt) => { ok!(); } }
+m! { => }
+"#,
+ expect![[r#"
+macro_rules! m { ($_:tt) => { ok!(); } }
+ok!();
+"#]],
+ );
+}
+
+#[test]
+fn test_underscore_not_greedily() {
+ check(
+ r#"
+// `_` overlaps with `$a:ident` but rustc matches it under the `_` token.
+macro_rules! m1 {
+ ($($a:ident)* _) => { ok!(); }
+}
+m1![a b c d _];
+
+// `_ => ou` overlaps with `$a:expr => $b:ident` but rustc matches it under `_ => $c:expr`.
+macro_rules! m2 {
+ ($($a:expr => $b:ident)* _ => $c:expr) => { ok!(); }
+}
+m2![a => b c => d _ => ou]
+"#,
+ expect![[r#"
+// `_` overlaps with `$a:ident` but rustc matches it under the `_` token.
+macro_rules! m1 {
+ ($($a:ident)* _) => { ok!(); }
+}
+ok!();
+
+// `_ => ou` overlaps with `$a:expr => $b:ident` but rustc matches it under `_ => $c:expr`.
+macro_rules! m2 {
+ ($($a:expr => $b:ident)* _ => $c:expr) => { ok!(); }
+}
+ok!();
+"#]],
+ );
+}
+
+#[test]
+fn test_underscore_flavors() {
+ check(
+ r#"
+macro_rules! m1 { ($a:ty) => { ok!(); } }
+m1![_];
+
+macro_rules! m2 { ($a:lifetime) => { ok!(); } }
+m2!['_];
+"#,
+ expect![[r#"
+macro_rules! m1 { ($a:ty) => { ok!(); } }
+ok!();
+
+macro_rules! m2 { ($a:lifetime) => { ok!(); } }
+ok!();
+"#]],
+ );
+}
+
+#[test]
+fn test_vertical_bar_with_pat() {
+ check(
+ r#"
+macro_rules! m { (|$pat:pat| ) => { ok!(); } }
+m! { |x| }
+ "#,
+ expect![[r#"
+macro_rules! m { (|$pat:pat| ) => { ok!(); } }
+ok!();
+ "#]],
+ );
+}
+
+#[test]
+fn test_dollar_crate_lhs_is_not_meta() {
+ check(
+ r#"
+macro_rules! m {
+ ($crate) => { err!(); };
+ () => { ok!(); };
+}
+m!{}
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($crate) => { err!(); };
+ () => { ok!(); };
+}
+ok!();
+"#]],
+ );
+}
+
+#[test]
+fn test_lifetime() {
+ check(
+ r#"
+macro_rules! m {
+ ($lt:lifetime) => { struct Ref<$lt>{ s: &$ lt str } }
+}
+m! {'a}
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($lt:lifetime) => { struct Ref<$lt>{ s: &$ lt str } }
+}
+struct Ref<'a> {
+ s: &'a str
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_literal() {
+ check(
+ r#"
+macro_rules! m {
+ ($type:ty, $lit:literal) => { const VALUE: $type = $ lit; };
+}
+m!(u8, 0);
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($type:ty, $lit:literal) => { const VALUE: $type = $ lit; };
+}
+const VALUE: u8 = 0;
+"#]],
+ );
+
+ check(
+ r#"
+macro_rules! m {
+ ($type:ty, $lit:literal) => { const VALUE: $ type = $ lit; };
+}
+m!(i32, -1);
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($type:ty, $lit:literal) => { const VALUE: $ type = $ lit; };
+}
+const VALUE: i32 = -1;
+"#]],
+ );
+}
+
+#[test]
+fn test_boolean_is_ident() {
+ check(
+ r#"
+macro_rules! m {
+ ($lit0:literal, $lit1:literal) => { const VALUE: (bool, bool) = ($lit0, $lit1); };
+}
+m!(true, false);
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($lit0:literal, $lit1:literal) => { const VALUE: (bool, bool) = ($lit0, $lit1); };
+}
+const VALUE: (bool, bool) = (true , false );
+"#]],
+ );
+}
+
+#[test]
+fn test_vis() {
+ check(
+ r#"
+macro_rules! m {
+ ($vis:vis $name:ident) => { $vis fn $name() {} }
+}
+m!(pub foo);
+m!(foo);
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($vis:vis $name:ident) => { $vis fn $name() {} }
+}
+pub fn foo() {}
+fn foo() {}
+"#]],
+ );
+}
+
+#[test]
+fn test_inner_macro_rules() {
+ check(
+ r#"
+macro_rules! m {
+ ($a:ident, $b:ident, $c:tt) => {
+ macro_rules! inner {
+ ($bi:ident) => { fn $bi() -> u8 { $c } }
+ }
+
+ inner!($a);
+ fn $b() -> u8 { $c }
+ }
+}
+m!(x, y, 1);
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($a:ident, $b:ident, $c:tt) => {
+ macro_rules! inner {
+ ($bi:ident) => { fn $bi() -> u8 { $c } }
+ }
+
+ inner!($a);
+ fn $b() -> u8 { $c }
+ }
+}
+macro_rules !inner {
+ ($bi: ident) = > {
+ fn $bi()-> u8 {
+ 1
+ }
+ }
+}
+inner!(x);
+fn y() -> u8 {
+ 1
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_expr_after_path_colons() {
+ check(
+ r#"
+macro_rules! m {
+ ($k:expr) => { fn f() { K::$k; } }
+}
+// +tree +errors
+m!(C("0"));
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($k:expr) => { fn f() { K::$k; } }
+}
+/* parse error: expected identifier */
+/* parse error: expected SEMICOLON */
+/* parse error: expected SEMICOLON */
+/* parse error: expected expression */
+fn f() {
+ K::(C("0"));
+}
+// MACRO_ITEMS@0..19
+// FN@0..19
+// FN_KW@0..2 "fn"
+// NAME@2..3
+// IDENT@2..3 "f"
+// PARAM_LIST@3..5
+// L_PAREN@3..4 "("
+// R_PAREN@4..5 ")"
+// BLOCK_EXPR@5..19
+// STMT_LIST@5..19
+// L_CURLY@5..6 "{"
+// EXPR_STMT@6..10
+// PATH_EXPR@6..10
+// PATH@6..10
+// PATH@6..7
+// PATH_SEGMENT@6..7
+// NAME_REF@6..7
+// IDENT@6..7 "K"
+// COLON2@7..9 "::"
+// ERROR@9..10
+// L_PAREN@9..10 "("
+// EXPR_STMT@10..16
+// CALL_EXPR@10..16
+// PATH_EXPR@10..11
+// PATH@10..11
+// PATH_SEGMENT@10..11
+// NAME_REF@10..11
+// IDENT@10..11 "C"
+// ARG_LIST@11..16
+// L_PAREN@11..12 "("
+// LITERAL@12..15
+// STRING@12..15 "\"0\""
+// R_PAREN@15..16 ")"
+// ERROR@16..17
+// R_PAREN@16..17 ")"
+// SEMICOLON@17..18 ";"
+// R_CURLY@18..19 "}"
+
+"#]],
+ );
+}
+
+#[test]
+fn test_match_is_not_greedy() {
+ check(
+ r#"
+macro_rules! foo {
+ ($($i:ident $(,)*),*) => {};
+}
+foo!(a,b);
+"#,
+ expect![[r#"
+macro_rules! foo {
+ ($($i:ident $(,)*),*) => {};
+}
+
+"#]],
+ );
+}
+
+#[test]
+fn expr_interpolation() {
+ check(
+ r#"
+macro_rules! m { ($expr:expr) => { map($expr) } }
+fn f() {
+ let _ = m!(x + foo);
+}
+"#,
+ expect![[r#"
+macro_rules! m { ($expr:expr) => { map($expr) } }
+fn f() {
+ let _ = map((x+foo));
+}
+"#]],
+ )
+}
+
+#[test]
+fn mbe_are_not_attributes() {
+ check(
+ r#"
+macro_rules! error {
+ () => {struct Bar}
+}
+
+#[error]
+struct Foo;
+"#,
+ expect![[r##"
+macro_rules! error {
+ () => {struct Bar}
+}
+
+#[error]
+struct Foo;
+"##]],
+ )
+}
+
+#[test]
+fn test_dollar_dollar() {
+ check(
+ r#"
+macro_rules! register_struct { ($Struct:ident) => {
+ macro_rules! register_methods { ($$($method:ident),*) => {
+ macro_rules! implement_methods { ($$$$($$val:expr),*) => {
+ struct $Struct;
+ impl $Struct { $$(fn $method() -> &'static [u32] { &[$$$$($$$$val),*] })*}
+ }}
+ }}
+}}
+
+register_struct!(Foo);
+register_methods!(alpha, beta);
+implement_methods!(1, 2, 3);
+"#,
+ expect![[r#"
+macro_rules! register_struct { ($Struct:ident) => {
+ macro_rules! register_methods { ($$($method:ident),*) => {
+ macro_rules! implement_methods { ($$$$($$val:expr),*) => {
+ struct $Struct;
+ impl $Struct { $$(fn $method() -> &'static [u32] { &[$$$$($$$$val),*] })*}
+ }}
+ }}
+}}
+
+macro_rules !register_methods {
+ ($($method: ident), *) = > {
+ macro_rules!implement_methods {
+ ($$($val: expr), *) = > {
+ struct Foo;
+ impl Foo {
+ $(fn $method()-> & 'static[u32] {
+ &[$$($$val), *]
+ }
+ )*
+ }
+ }
+ }
+ }
+}
+macro_rules !implement_methods {
+ ($($val: expr), *) = > {
+ struct Foo;
+ impl Foo {
+ fn alpha()-> & 'static[u32] {
+ &[$($val), *]
+ }
+ fn beta()-> & 'static[u32] {
+ &[$($val), *]
+ }
+ }
+ }
+}
+struct Foo;
+impl Foo {
+ fn alpha() -> & 'static[u32] {
+ &[1, 2, 3]
+ }
+ fn beta() -> & 'static[u32] {
+ &[1, 2, 3]
+ }
+}
+"#]],
+ )
+}
+
+#[test]
+fn test_metavar_exprs() {
+ check(
+ r#"
+macro_rules! m {
+ ( $( $t:tt )* ) => ( $( ${ignore(t)} -${index()} )-* );
+}
+const _: i32 = m!(a b c);
+ "#,
+ expect![[r#"
+macro_rules! m {
+ ( $( $t:tt )* ) => ( $( ${ignore(t)} -${index()} )-* );
+}
+const _: i32 = -0--1--2;
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs
new file mode 100644
index 000000000..bc162d0fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs
@@ -0,0 +1,138 @@
+//! Test that `$var:expr` captures function correctly.
+
+use expect_test::expect;
+
+use crate::macro_expansion_tests::check;
+
+#[test]
+fn unary_minus_is_a_literal() {
+ check(
+ r#"
+macro_rules! m { ($x:literal) => (literal!();); ($x:tt) => (not_a_literal!();); }
+m!(92);
+m!(-92);
+m!(-9.2);
+m!(--92);
+"#,
+ expect![[r#"
+macro_rules! m { ($x:literal) => (literal!();); ($x:tt) => (not_a_literal!();); }
+literal!();
+literal!();
+literal!();
+/* error: leftover tokens */not_a_literal!();
+"#]],
+ )
+}
+
+#[test]
+fn test_expand_bad_literal() {
+ check(
+ r#"
+macro_rules! m { ($i:literal) => {}; }
+m!(&k");
+"#,
+ expect![[r#"
+macro_rules! m { ($i:literal) => {}; }
+/* error: Failed to lower macro args to token tree */"#]],
+ );
+}
+
+#[test]
+fn test_empty_comments() {
+ check(
+ r#"
+macro_rules! m{ ($fmt:expr) => (); }
+m!(/**/);
+"#,
+ expect![[r#"
+macro_rules! m{ ($fmt:expr) => (); }
+/* error: expected Expr */
+"#]],
+ );
+}
+
+#[test]
+fn asi() {
+ // Thanks, Christopher!
+ //
+ // https://internals.rust-lang.org/t/understanding-decisions-behind-semicolons/15181/29
+ check(
+ r#"
+macro_rules! asi { ($($stmt:stmt)*) => ($($stmt)*); }
+
+fn main() {
+ asi! {
+ let a = 2
+ let b = 5
+ drop(b-a)
+ println!("{}", a+b)
+ }
+}
+"#,
+ expect![[r#"
+macro_rules! asi { ($($stmt:stmt)*) => ($($stmt)*); }
+
+fn main() {
+ let a = 2let b = 5drop(b-a)println!("{}", a+b)
+}
+"#]],
+ )
+}
+
+#[test]
+fn stmt_boundaries() {
+ // FIXME: this actually works OK under rustc.
+ check(
+ r#"
+macro_rules! m {
+ ($($s:stmt)*) => (stringify!($($s |)*);)
+}
+m!(;;92;let x = 92; loop {};);
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($($s:stmt)*) => (stringify!($($s |)*);)
+}
+stringify!(;
+|;
+|92|;
+|let x = 92|;
+|loop {}
+|;
+|);
+"#]],
+ );
+}
+
+#[test]
+fn range_patterns() {
+ // FIXME: rustc thinks there are three patterns here, not one.
+ check(
+ r#"
+macro_rules! m {
+ ($($p:pat)*) => (stringify!($($p |)*);)
+}
+m!(.. .. ..);
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($($p:pat)*) => (stringify!($($p |)*);)
+}
+stringify!(.. .. ..|);
+"#]],
+ );
+}
+
+#[test]
+fn trailing_vis() {
+ check(
+ r#"
+macro_rules! m { ($($i:ident)? $vis:vis) => () }
+m!(x pub);
+"#,
+ expect![[r#"
+macro_rules! m { ($($i:ident)? $vis:vis) => () }
+
+"#]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs
new file mode 100644
index 000000000..8aff78408
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs
@@ -0,0 +1,154 @@
+//! Test for the syntax of macros themselves.
+
+use expect_test::expect;
+
+use crate::macro_expansion_tests::check;
+
+#[test]
+fn well_formed_macro_rules() {
+ check(
+ r#"
+macro_rules! m {
+ ($i:ident) => ();
+ ($(x),*) => ();
+ ($(x)_*) => ();
+ ($(x)i*) => ();
+ ($($i:ident)*) => ($_);
+ ($($true:ident)*) => ($true);
+ ($($false:ident)*) => ($false);
+ (double_dollar) => ($$);
+ ($) => (m!($););
+ ($($t:tt)*) => ($( ${ignore(t)} ${index()} )-*);
+}
+m!($);
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($i:ident) => ();
+ ($(x),*) => ();
+ ($(x)_*) => ();
+ ($(x)i*) => ();
+ ($($i:ident)*) => ($_);
+ ($($true:ident)*) => ($true);
+ ($($false:ident)*) => ($false);
+ (double_dollar) => ($$);
+ ($) => (m!($););
+ ($($t:tt)*) => ($( ${ignore(t)} ${index()} )-*);
+}
+m!($);
+"#]],
+ )
+}
+
+#[test]
+fn malformed_macro_rules() {
+ check(
+ r#"
+macro_rules! i1 { invalid }
+i1!();
+
+macro_rules! e1 { $i:ident => () }
+e1!();
+macro_rules! e2 { ($i:ident) () }
+e2!();
+macro_rules! e3 { ($(i:ident)_) => () }
+e3!();
+
+macro_rules! f1 { ($i) => ($i) }
+f1!();
+macro_rules! f2 { ($i:) => ($i) }
+f2!();
+macro_rules! f3 { ($i:_) => () }
+f3!();
+
+macro_rules! m1 { ($$i) => () }
+m1!();
+macro_rules! m2 { () => ( ${invalid()} ) }
+m2!();
+"#,
+ expect![[r#"
+macro_rules! i1 { invalid }
+/* error: invalid macro definition: expected subtree */
+
+macro_rules! e1 { $i:ident => () }
+/* error: invalid macro definition: expected subtree */
+macro_rules! e2 { ($i:ident) () }
+/* error: invalid macro definition: expected `=` */
+macro_rules! e3 { ($(i:ident)_) => () }
+/* error: invalid macro definition: invalid repeat */
+
+macro_rules! f1 { ($i) => ($i) }
+/* error: invalid macro definition: missing fragment specifier */
+macro_rules! f2 { ($i:) => ($i) }
+/* error: invalid macro definition: missing fragment specifier */
+macro_rules! f3 { ($i:_) => () }
+/* error: invalid macro definition: missing fragment specifier */
+
+macro_rules! m1 { ($$i) => () }
+/* error: invalid macro definition: `$$` is not allowed on the pattern side */
+macro_rules! m2 { () => ( ${invalid()} ) }
+/* error: invalid macro definition: invalid metavariable expression */
+"#]],
+ )
+}
+
+#[test]
+fn test_rustc_issue_57597() {
+ // <https://github.com/rust-lang/rust/blob/master/src/test/ui/issues/issue-57597.rs>
+ check(
+ r#"
+macro_rules! m0 { ($($($i:ident)?)+) => {}; }
+macro_rules! m1 { ($($($i:ident)?)*) => {}; }
+macro_rules! m2 { ($($($i:ident)?)?) => {}; }
+macro_rules! m3 { ($($($($i:ident)?)?)?) => {}; }
+macro_rules! m4 { ($($($($i:ident)*)?)?) => {}; }
+macro_rules! m5 { ($($($($i:ident)?)*)?) => {}; }
+macro_rules! m6 { ($($($($i:ident)?)?)*) => {}; }
+macro_rules! m7 { ($($($($i:ident)*)*)?) => {}; }
+macro_rules! m8 { ($($($($i:ident)?)*)*) => {}; }
+macro_rules! m9 { ($($($($i:ident)?)*)+) => {}; }
+macro_rules! mA { ($($($($i:ident)+)?)*) => {}; }
+macro_rules! mB { ($($($($i:ident)+)*)?) => {}; }
+
+m0!();
+m1!();
+m2!();
+m3!();
+m4!();
+m5!();
+m6!();
+m7!();
+m8!();
+m9!();
+mA!();
+mB!();
+ "#,
+ expect![[r#"
+macro_rules! m0 { ($($($i:ident)?)+) => {}; }
+macro_rules! m1 { ($($($i:ident)?)*) => {}; }
+macro_rules! m2 { ($($($i:ident)?)?) => {}; }
+macro_rules! m3 { ($($($($i:ident)?)?)?) => {}; }
+macro_rules! m4 { ($($($($i:ident)*)?)?) => {}; }
+macro_rules! m5 { ($($($($i:ident)?)*)?) => {}; }
+macro_rules! m6 { ($($($($i:ident)?)?)*) => {}; }
+macro_rules! m7 { ($($($($i:ident)*)*)?) => {}; }
+macro_rules! m8 { ($($($($i:ident)?)*)*) => {}; }
+macro_rules! m9 { ($($($($i:ident)?)*)+) => {}; }
+macro_rules! mA { ($($($($i:ident)+)?)*) => {}; }
+macro_rules! mB { ($($($($i:ident)+)*)?) => {}; }
+
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+/* error: invalid macro definition: empty token tree in repetition */
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
new file mode 100644
index 000000000..2dff4adf2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
@@ -0,0 +1,911 @@
+//! Real world regressions and issues, not particularly minimized.
+//!
+//! While it's OK to just dump large macros here, it's preferable to come up
+//! with a minimal example for the program and put a specific test to the parent
+//! directory.
+
+use expect_test::expect;
+
+use crate::macro_expansion_tests::check;
+
+#[test]
+fn test_vec() {
+ check(
+ r#"
+macro_rules! vec {
+ ($($item:expr),*) => {{
+ let mut v = Vec::new();
+ $( v.push($item); )*
+ v
+ }};
+}
+fn main() {
+ vec!();
+ vec![1u32,2];
+}
+"#,
+ expect![[r#"
+macro_rules! vec {
+ ($($item:expr),*) => {{
+ let mut v = Vec::new();
+ $( v.push($item); )*
+ v
+ }};
+}
+fn main() {
+ {
+ let mut v = Vec::new();
+ v
+ };
+ {
+ let mut v = Vec::new();
+ v.push(1u32);
+ v.push(2);
+ v
+ };
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_winapi_struct() {
+ // from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/macros.rs#L366
+
+ check(
+ r#"
+macro_rules! STRUCT {
+ ($(#[$attrs:meta])* struct $name:ident {
+ $($field:ident: $ftype:ty,)+
+ }) => (
+ #[repr(C)] #[derive(Copy)] $(#[$attrs])*
+ pub struct $name {
+ $(pub $field: $ftype,)+
+ }
+ impl Clone for $name {
+ #[inline]
+ fn clone(&self) -> $name { *self }
+ }
+ #[cfg(feature = "impl-default")]
+ impl Default for $name {
+ #[inline]
+ fn default() -> $name { unsafe { $crate::_core::mem::zeroed() } }
+ }
+ );
+}
+
+// from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/shared/d3d9caps.rs
+STRUCT!{struct D3DVSHADERCAPS2_0 {Caps: u8,}}
+
+STRUCT!{#[cfg_attr(target_arch = "x86", repr(packed))] struct D3DCONTENTPROTECTIONCAPS {Caps : u8 ,}}
+"#,
+ expect![[r##"
+macro_rules! STRUCT {
+ ($(#[$attrs:meta])* struct $name:ident {
+ $($field:ident: $ftype:ty,)+
+ }) => (
+ #[repr(C)] #[derive(Copy)] $(#[$attrs])*
+ pub struct $name {
+ $(pub $field: $ftype,)+
+ }
+ impl Clone for $name {
+ #[inline]
+ fn clone(&self) -> $name { *self }
+ }
+ #[cfg(feature = "impl-default")]
+ impl Default for $name {
+ #[inline]
+ fn default() -> $name { unsafe { $crate::_core::mem::zeroed() } }
+ }
+ );
+}
+
+#[repr(C)]
+#[derive(Copy)] pub struct D3DVSHADERCAPS2_0 {
+ pub Caps: u8,
+}
+impl Clone for D3DVSHADERCAPS2_0 {
+ #[inline] fn clone(&self ) -> D3DVSHADERCAPS2_0 {
+ *self
+ }
+}
+#[cfg(feature = "impl-default")] impl Default for D3DVSHADERCAPS2_0 {
+ #[inline] fn default() -> D3DVSHADERCAPS2_0 {
+ unsafe {
+ $crate::_core::mem::zeroed()
+ }
+ }
+}
+
+#[repr(C)]
+#[derive(Copy)]
+#[cfg_attr(target_arch = "x86", repr(packed))] pub struct D3DCONTENTPROTECTIONCAPS {
+ pub Caps: u8,
+}
+impl Clone for D3DCONTENTPROTECTIONCAPS {
+ #[inline] fn clone(&self ) -> D3DCONTENTPROTECTIONCAPS {
+ *self
+ }
+}
+#[cfg(feature = "impl-default")] impl Default for D3DCONTENTPROTECTIONCAPS {
+ #[inline] fn default() -> D3DCONTENTPROTECTIONCAPS {
+ unsafe {
+ $crate::_core::mem::zeroed()
+ }
+ }
+}
+"##]],
+ );
+}
+
+#[test]
+fn test_int_base() {
+ check(
+ r#"
+macro_rules! int_base {
+ ($Trait:ident for $T:ident as $U:ident -> $Radix:ident) => {
+ #[stable(feature = "rust1", since = "1.0.0")]
+ impl fmt::$Trait for $T {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ $Radix.fmt_int(*self as $U, f)
+ }
+ }
+ }
+}
+int_base!{Binary for isize as usize -> Binary}
+"#,
+ expect![[r##"
+macro_rules! int_base {
+ ($Trait:ident for $T:ident as $U:ident -> $Radix:ident) => {
+ #[stable(feature = "rust1", since = "1.0.0")]
+ impl fmt::$Trait for $T {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ $Radix.fmt_int(*self as $U, f)
+ }
+ }
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")] impl fmt::Binary for isize {
+ fn fmt(&self , f: &mut fmt::Formatter< '_>) -> fmt::Result {
+ Binary.fmt_int(*self as usize, f)
+ }
+}
+"##]],
+ );
+}
+
+#[test]
+fn test_generate_pattern_iterators() {
+ // From <https://github.com/rust-lang/rust/blob/316a391dcb7d66dc25f1f9a4ec9d368ef7615005/src/libcore/str/mod.rs>.
+ check(
+ r#"
+macro_rules! generate_pattern_iterators {
+ { double ended; with $(#[$common_stability_attribute:meta])*,
+ $forward_iterator:ident,
+ $reverse_iterator:ident, $iterty:ty
+ } => { ok!(); }
+}
+generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str );
+"#,
+ expect![[r##"
+macro_rules! generate_pattern_iterators {
+ { double ended; with $(#[$common_stability_attribute:meta])*,
+ $forward_iterator:ident,
+ $reverse_iterator:ident, $iterty:ty
+ } => { ok!(); }
+}
+ok!();
+"##]],
+ );
+}
+
+#[test]
+fn test_impl_fn_for_zst() {
+ // From <https://github.com/rust-lang/rust/blob/5d20ff4d2718c820632b38c1e49d4de648a9810b/src/libcore/internal_macros.rs>.
+ check(
+ r#"
+macro_rules! impl_fn_for_zst {
+ {$( $( #[$attr: meta] )*
+ struct $Name: ident impl$( <$( $lifetime : lifetime ),+> )? Fn =
+ |$( $arg: ident: $ArgTy: ty ),*| -> $ReturnTy: ty $body: block;
+ )+} => {$(
+ $( #[$attr] )*
+ struct $Name;
+
+ impl $( <$( $lifetime ),+> )? Fn<($( $ArgTy, )*)> for $Name {
+ #[inline]
+ extern "rust-call" fn call(&self, ($( $arg, )*): ($( $ArgTy, )*)) -> $ReturnTy {
+ $body
+ }
+ }
+
+ impl $( <$( $lifetime ),+> )? FnMut<($( $ArgTy, )*)> for $Name {
+ #[inline]
+ extern "rust-call" fn call_mut(
+ &mut self,
+ ($( $arg, )*): ($( $ArgTy, )*)
+ ) -> $ReturnTy {
+ Fn::call(&*self, ($( $arg, )*))
+ }
+ }
+
+ impl $( <$( $lifetime ),+> )? FnOnce<($( $ArgTy, )*)> for $Name {
+ type Output = $ReturnTy;
+
+ #[inline]
+ extern "rust-call" fn call_once(self, ($( $arg, )*): ($( $ArgTy, )*)) -> $ReturnTy {
+ Fn::call(&self, ($( $arg, )*))
+ }
+ }
+ )+}
+}
+
+impl_fn_for_zst ! {
+ #[derive(Clone)]
+ struct CharEscapeDebugContinue impl Fn = |c: char| -> char::EscapeDebug {
+ c.escape_debug_ext(false)
+ };
+
+ #[derive(Clone)]
+ struct CharEscapeUnicode impl Fn = |c: char| -> char::EscapeUnicode {
+ c.escape_unicode()
+ };
+
+ #[derive(Clone)]
+ struct CharEscapeDefault impl Fn = |c: char| -> char::EscapeDefault {
+ c.escape_default()
+ };
+}
+
+"#,
+ expect![[r##"
+macro_rules! impl_fn_for_zst {
+ {$( $( #[$attr: meta] )*
+ struct $Name: ident impl$( <$( $lifetime : lifetime ),+> )? Fn =
+ |$( $arg: ident: $ArgTy: ty ),*| -> $ReturnTy: ty $body: block;
+ )+} => {$(
+ $( #[$attr] )*
+ struct $Name;
+
+ impl $( <$( $lifetime ),+> )? Fn<($( $ArgTy, )*)> for $Name {
+ #[inline]
+ extern "rust-call" fn call(&self, ($( $arg, )*): ($( $ArgTy, )*)) -> $ReturnTy {
+ $body
+ }
+ }
+
+ impl $( <$( $lifetime ),+> )? FnMut<($( $ArgTy, )*)> for $Name {
+ #[inline]
+ extern "rust-call" fn call_mut(
+ &mut self,
+ ($( $arg, )*): ($( $ArgTy, )*)
+ ) -> $ReturnTy {
+ Fn::call(&*self, ($( $arg, )*))
+ }
+ }
+
+ impl $( <$( $lifetime ),+> )? FnOnce<($( $ArgTy, )*)> for $Name {
+ type Output = $ReturnTy;
+
+ #[inline]
+ extern "rust-call" fn call_once(self, ($( $arg, )*): ($( $ArgTy, )*)) -> $ReturnTy {
+ Fn::call(&self, ($( $arg, )*))
+ }
+ }
+ )+}
+}
+
+#[derive(Clone)] struct CharEscapeDebugContinue;
+impl Fn<(char, )> for CharEscapeDebugContinue {
+ #[inline] extern "rust-call"fn call(&self , (c, ): (char, )) -> char::EscapeDebug { {
+ c.escape_debug_ext(false )
+ }
+ }
+}
+impl FnMut<(char, )> for CharEscapeDebugContinue {
+ #[inline] extern "rust-call"fn call_mut(&mut self , (c, ): (char, )) -> char::EscapeDebug {
+ Fn::call(&*self , (c, ))
+ }
+}
+impl FnOnce<(char, )> for CharEscapeDebugContinue {
+ type Output = char::EscapeDebug;
+ #[inline] extern "rust-call"fn call_once(self , (c, ): (char, )) -> char::EscapeDebug {
+ Fn::call(&self , (c, ))
+ }
+}
+#[derive(Clone)] struct CharEscapeUnicode;
+impl Fn<(char, )> for CharEscapeUnicode {
+ #[inline] extern "rust-call"fn call(&self , (c, ): (char, )) -> char::EscapeUnicode { {
+ c.escape_unicode()
+ }
+ }
+}
+impl FnMut<(char, )> for CharEscapeUnicode {
+ #[inline] extern "rust-call"fn call_mut(&mut self , (c, ): (char, )) -> char::EscapeUnicode {
+ Fn::call(&*self , (c, ))
+ }
+}
+impl FnOnce<(char, )> for CharEscapeUnicode {
+ type Output = char::EscapeUnicode;
+ #[inline] extern "rust-call"fn call_once(self , (c, ): (char, )) -> char::EscapeUnicode {
+ Fn::call(&self , (c, ))
+ }
+}
+#[derive(Clone)] struct CharEscapeDefault;
+impl Fn<(char, )> for CharEscapeDefault {
+ #[inline] extern "rust-call"fn call(&self , (c, ): (char, )) -> char::EscapeDefault { {
+ c.escape_default()
+ }
+ }
+}
+impl FnMut<(char, )> for CharEscapeDefault {
+ #[inline] extern "rust-call"fn call_mut(&mut self , (c, ): (char, )) -> char::EscapeDefault {
+ Fn::call(&*self , (c, ))
+ }
+}
+impl FnOnce<(char, )> for CharEscapeDefault {
+ type Output = char::EscapeDefault;
+ #[inline] extern "rust-call"fn call_once(self , (c, ): (char, )) -> char::EscapeDefault {
+ Fn::call(&self , (c, ))
+ }
+}
+
+"##]],
+ );
+}
+
+#[test]
+fn test_impl_nonzero_fmt() {
+ // From <https://github.com/rust-lang/rust/blob/316a391dcb7d66dc25f1f9a4ec9d368ef7615005/src/libcore/num/mod.rs#L12>.
+ check(
+ r#"
+macro_rules! impl_nonzero_fmt {
+ ( #[$stability: meta] ( $( $Trait: ident ),+ ) for $Ty: ident ) => { ok!(); }
+}
+impl_nonzero_fmt! {
+ #[stable(feature= "nonzero",since="1.28.0")]
+ (Debug, Display, Binary, Octal, LowerHex, UpperHex) for NonZeroU8
+}
+"#,
+ expect![[r##"
+macro_rules! impl_nonzero_fmt {
+ ( #[$stability: meta] ( $( $Trait: ident ),+ ) for $Ty: ident ) => { ok!(); }
+}
+ok!();
+"##]],
+ );
+}
+
+#[test]
+fn test_cfg_if_items() {
+ // From <https://github.com/rust-lang/rust/blob/33fe1131cadba69d317156847be9a402b89f11bb/src/libstd/macros.rs#L986>.
+ check(
+ r#"
+macro_rules! __cfg_if_items {
+ (($($not:meta,)*) ; ) => {};
+ (($($not:meta,)*) ; ( ($($m:meta),*) ($($it:item)*) ), $($rest:tt)*) => {
+ __cfg_if_items! { ($($not,)* $($m,)*) ; $($rest)* }
+ }
+}
+__cfg_if_items! {
+ (rustdoc,);
+ ( () (
+ #[ cfg(any(target_os = "redox", unix))]
+ #[ stable(feature = "rust1", since = "1.0.0")]
+ pub use sys::ext as unix;
+
+ #[cfg(windows)]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub use sys::ext as windows;
+
+ #[cfg(any(target_os = "linux", target_os = "l4re"))]
+ pub mod linux;
+ )),
+}
+"#,
+ expect![[r#"
+macro_rules! __cfg_if_items {
+ (($($not:meta,)*) ; ) => {};
+ (($($not:meta,)*) ; ( ($($m:meta),*) ($($it:item)*) ), $($rest:tt)*) => {
+ __cfg_if_items! { ($($not,)* $($m,)*) ; $($rest)* }
+ }
+}
+__cfg_if_items! {
+ (rustdoc, );
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_cfg_if_main() {
+ // From <https://github.com/rust-lang/rust/blob/3d211248393686e0f73851fc7548f6605220fbe1/src/libpanic_unwind/macros.rs#L9>.
+ check(
+ r#"
+macro_rules! cfg_if {
+ ($(if #[cfg($($meta:meta),*)] { $($it:item)* } )else* else { $($it2:item)* })
+ => {
+ __cfg_if_items! {
+ () ;
+ $( ( ($($meta),*) ($($it)*) ), )*
+ ( () ($($it2)*) ),
+ }
+ };
+
+ // Internal macro to Apply a cfg attribute to a list of items
+ (@__apply $m:meta, $($it:item)*) => { $(#[$m] $it)* };
+}
+
+cfg_if! {
+ if #[cfg(target_env = "msvc")] {
+ // no extra unwinder support needed
+ } else if #[cfg(all(target_arch = "wasm32", not(target_os = "emscripten")))] {
+ // no unwinder on the system!
+ } else {
+ mod libunwind;
+ pub use libunwind::*;
+ }
+}
+
+cfg_if! {
+ @__apply cfg(all(not(any(not(any(target_os = "solaris", target_os = "illumos")))))),
+}
+"#,
+ expect![[r##"
+macro_rules! cfg_if {
+ ($(if #[cfg($($meta:meta),*)] { $($it:item)* } )else* else { $($it2:item)* })
+ => {
+ __cfg_if_items! {
+ () ;
+ $( ( ($($meta),*) ($($it)*) ), )*
+ ( () ($($it2)*) ),
+ }
+ };
+
+ // Internal macro to Apply a cfg attribute to a list of items
+ (@__apply $m:meta, $($it:item)*) => { $(#[$m] $it)* };
+}
+
+__cfg_if_items! {
+ ();
+ ((target_env = "msvc")()), ((all(target_arch = "wasm32", not(target_os = "emscripten")))()), (()(mod libunwind;
+ pub use libunwind::*;
+ )),
+}
+
+
+"##]],
+ );
+}
+
+#[test]
+fn test_proptest_arbitrary() {
+ // From <https://github.com/AltSysrq/proptest/blob/d1c4b049337d2f75dd6f49a095115f7c532e5129/proptest/src/arbitrary/macros.rs#L16>.
+ check(
+ r#"
+macro_rules! arbitrary {
+ ([$($bounds : tt)*] $typ: ty, $strat: ty, $params: ty;
+ $args: ident => $logic: expr) => {
+ impl<$($bounds)*> $crate::arbitrary::Arbitrary for $typ {
+ type Parameters = $params;
+ type Strategy = $strat;
+ fn arbitrary_with($args: Self::Parameters) -> Self::Strategy {
+ $logic
+ }
+ }
+ };
+}
+
+arbitrary!(
+ [A:Arbitrary]
+ Vec<A> ,
+ VecStrategy<A::Strategy>,
+ RangedParams1<A::Parameters>;
+ args => {
+ let product_unpack![range, a] = args;
+ vec(any_with::<A>(a), range)
+ }
+);
+"#,
+ expect![[r#"
+macro_rules! arbitrary {
+ ([$($bounds : tt)*] $typ: ty, $strat: ty, $params: ty;
+ $args: ident => $logic: expr) => {
+ impl<$($bounds)*> $crate::arbitrary::Arbitrary for $typ {
+ type Parameters = $params;
+ type Strategy = $strat;
+ fn arbitrary_with($args: Self::Parameters) -> Self::Strategy {
+ $logic
+ }
+ }
+ };
+}
+
+impl <A: Arbitrary> $crate::arbitrary::Arbitrary for Vec<A> {
+ type Parameters = RangedParams1<A::Parameters>;
+ type Strategy = VecStrategy<A::Strategy>;
+ fn arbitrary_with(args: Self::Parameters) -> Self::Strategy { {
+ let product_unpack![range, a] = args;
+ vec(any_with::<A>(a), range)
+ }
+ }
+}
+"#]],
+ );
+}
+
+#[test]
+fn test_old_ridl() {
+ // This is from winapi 2.8, which do not have a link from github.
+ check(
+ r#"
+#[macro_export]
+macro_rules! RIDL {
+ (interface $interface:ident ($vtbl:ident) : $pinterface:ident ($pvtbl:ident)
+ {$(
+ fn $method:ident(&mut self $(,$p:ident : $t:ty)*) -> $rtr:ty
+ ),+}
+ ) => {
+ impl $interface {
+ $(pub unsafe fn $method(&mut self) -> $rtr {
+ ((*self.lpVtbl).$method)(self $(,$p)*)
+ })+
+ }
+ };
+}
+
+RIDL!{interface ID3D11Asynchronous(ID3D11AsynchronousVtbl): ID3D11DeviceChild(ID3D11DeviceChildVtbl) {
+ fn GetDataSize(&mut self) -> UINT
+}}
+"#,
+ expect![[r##"
+#[macro_export]
+macro_rules! RIDL {
+ (interface $interface:ident ($vtbl:ident) : $pinterface:ident ($pvtbl:ident)
+ {$(
+ fn $method:ident(&mut self $(,$p:ident : $t:ty)*) -> $rtr:ty
+ ),+}
+ ) => {
+ impl $interface {
+ $(pub unsafe fn $method(&mut self) -> $rtr {
+ ((*self.lpVtbl).$method)(self $(,$p)*)
+ })+
+ }
+ };
+}
+
+impl ID3D11Asynchronous {
+ pub unsafe fn GetDataSize(&mut self ) -> UINT {
+ ((*self .lpVtbl).GetDataSize)(self )
+ }
+}
+"##]],
+ );
+}
+
+#[test]
+fn test_quick_error() {
+ check(
+ r#"
+macro_rules! quick_error {
+ (SORT [enum $name:ident $( #[$meta:meta] )*]
+ items [$($( #[$imeta:meta] )*
+ => $iitem:ident: $imode:tt [$( $ivar:ident: $ityp:ty ),*]
+ {$( $ifuncs:tt )*} )* ]
+ buf [ ]
+ queue [ ]
+ ) => {
+ quick_error!(ENUMINITION [enum $name $( #[$meta] )*]
+ body []
+ queue [$(
+ $( #[$imeta] )*
+ =>
+ $iitem: $imode [$( $ivar: $ityp ),*]
+ )*]
+ );
+ };
+}
+quick_error ! (
+ SORT
+ [enum Wrapped #[derive(Debug)]]
+ items [
+ => One: UNIT [] {}
+ => Two: TUPLE [s :String] {display ("two: {}" , s) from ()} ]
+ buf [ ]
+ queue [ ]
+);
+
+"#,
+ expect![[r##"
+macro_rules! quick_error {
+ (SORT [enum $name:ident $( #[$meta:meta] )*]
+ items [$($( #[$imeta:meta] )*
+ => $iitem:ident: $imode:tt [$( $ivar:ident: $ityp:ty ),*]
+ {$( $ifuncs:tt )*} )* ]
+ buf [ ]
+ queue [ ]
+ ) => {
+ quick_error!(ENUMINITION [enum $name $( #[$meta] )*]
+ body []
+ queue [$(
+ $( #[$imeta] )*
+ =>
+ $iitem: $imode [$( $ivar: $ityp ),*]
+ )*]
+ );
+ };
+}
+quick_error!(ENUMINITION[enum Wrapped#[derive(Debug)]]body[]queue[ = > One: UNIT[] = > Two: TUPLE[s: String]]);
+
+"##]],
+ )
+}
+
+#[test]
+fn test_empty_repeat_vars_in_empty_repeat_vars() {
+ check(
+ r#"
+macro_rules! delegate_impl {
+ ([$self_type:ident, $self_wrap:ty, $self_map:ident]
+ pub trait $name:ident $(: $sup:ident)* $(+ $more_sup:ident)* {
+
+ $(
+ @escape [type $assoc_name_ext:ident]
+ )*
+ $(
+ @section type
+ $(
+ $(#[$_assoc_attr:meta])*
+ type $assoc_name:ident $(: $assoc_bound:ty)*;
+ )+
+ )*
+ $(
+ @section self
+ $(
+ $(#[$_method_attr:meta])*
+ fn $method_name:ident(self $(: $self_selftype:ty)* $(,$marg:ident : $marg_ty:ty)*) -> $mret:ty;
+ )+
+ )*
+ $(
+ @section nodelegate
+ $($tail:tt)*
+ )*
+ }) => {
+ impl<> $name for $self_wrap where $self_type: $name {
+ $(
+ $(
+ fn $method_name(self $(: $self_selftype)* $(,$marg: $marg_ty)*) -> $mret {
+ $self_map!(self).$method_name($($marg),*)
+ }
+ )*
+ )*
+ }
+ }
+}
+delegate_impl ! {
+ [G, &'a mut G, deref] pub trait Data: GraphBase {@section type type NodeWeight;}
+}
+"#,
+ expect![[r##"
+macro_rules! delegate_impl {
+ ([$self_type:ident, $self_wrap:ty, $self_map:ident]
+ pub trait $name:ident $(: $sup:ident)* $(+ $more_sup:ident)* {
+
+ $(
+ @escape [type $assoc_name_ext:ident]
+ )*
+ $(
+ @section type
+ $(
+ $(#[$_assoc_attr:meta])*
+ type $assoc_name:ident $(: $assoc_bound:ty)*;
+ )+
+ )*
+ $(
+ @section self
+ $(
+ $(#[$_method_attr:meta])*
+ fn $method_name:ident(self $(: $self_selftype:ty)* $(,$marg:ident : $marg_ty:ty)*) -> $mret:ty;
+ )+
+ )*
+ $(
+ @section nodelegate
+ $($tail:tt)*
+ )*
+ }) => {
+ impl<> $name for $self_wrap where $self_type: $name {
+ $(
+ $(
+ fn $method_name(self $(: $self_selftype)* $(,$marg: $marg_ty)*) -> $mret {
+ $self_map!(self).$method_name($($marg),*)
+ }
+ )*
+ )*
+ }
+ }
+}
+impl <> Data for & 'amut G where G: Data {}
+"##]],
+ );
+}
+
+#[test]
+fn test_issue_2520() {
+ check(
+ r#"
+macro_rules! my_macro {
+ {
+ ( $(
+ $( [] $sname:ident : $stype:ty )?
+ $( [$expr:expr] $nname:ident : $ntype:ty )?
+ ),* )
+ } => {ok!(
+ Test {
+ $(
+ $( $sname, )?
+ )*
+ }
+ );};
+}
+
+my_macro! {
+ ([] p1: u32, [|_| S0K0] s: S0K0, [] k0: i32)
+}
+ "#,
+ expect![[r#"
+macro_rules! my_macro {
+ {
+ ( $(
+ $( [] $sname:ident : $stype:ty )?
+ $( [$expr:expr] $nname:ident : $ntype:ty )?
+ ),* )
+ } => {ok!(
+ Test {
+ $(
+ $( $sname, )?
+ )*
+ }
+ );};
+}
+
+ok!(Test {
+ p1, k0,
+}
+);
+ "#]],
+ );
+}
+
+#[test]
+fn test_repeat_bad_var() {
+ // FIXME: the second rule of the macro should be removed and an error about
+ // `$( $c )+` raised
+ check(
+ r#"
+macro_rules! foo {
+ ($( $b:ident )+) => { ok!($( $c )+); };
+ ($( $b:ident )+) => { ok!($( $b )+); }
+}
+
+foo!(b0 b1);
+"#,
+ expect![[r#"
+macro_rules! foo {
+ ($( $b:ident )+) => { ok!($( $c )+); };
+ ($( $b:ident )+) => { ok!($( $b )+); }
+}
+
+ok!(b0 b1);
+"#]],
+ );
+}
+
+#[test]
+fn test_issue_3861() {
+ // This is should (and does) produce a parse error. It used to infinite loop
+ // instead.
+ check(
+ r#"
+macro_rules! rgb_color {
+ ($p:expr, $t:ty) => {
+ pub fn new() {
+ let _ = 0 as $t << $p;
+ }
+ };
+}
+// +tree +errors
+rgb_color!(8 + 8, u32);
+"#,
+ expect![[r#"
+macro_rules! rgb_color {
+ ($p:expr, $t:ty) => {
+ pub fn new() {
+ let _ = 0 as $t << $p;
+ }
+ };
+}
+/* parse error: expected type */
+/* parse error: expected R_PAREN */
+/* parse error: expected R_ANGLE */
+/* parse error: expected COMMA */
+/* parse error: expected R_ANGLE */
+/* parse error: expected SEMICOLON */
+/* parse error: expected SEMICOLON */
+/* parse error: expected expression */
+pub fn new() {
+ let _ = 0as u32<<(8+8);
+}
+// MACRO_ITEMS@0..31
+// FN@0..31
+// VISIBILITY@0..3
+// PUB_KW@0..3 "pub"
+// FN_KW@3..5 "fn"
+// NAME@5..8
+// IDENT@5..8 "new"
+// PARAM_LIST@8..10
+// L_PAREN@8..9 "("
+// R_PAREN@9..10 ")"
+// BLOCK_EXPR@10..31
+// STMT_LIST@10..31
+// L_CURLY@10..11 "{"
+// LET_STMT@11..27
+// LET_KW@11..14 "let"
+// WILDCARD_PAT@14..15
+// UNDERSCORE@14..15 "_"
+// EQ@15..16 "="
+// CAST_EXPR@16..27
+// LITERAL@16..17
+// INT_NUMBER@16..17 "0"
+// AS_KW@17..19 "as"
+// PATH_TYPE@19..27
+// PATH@19..27
+// PATH_SEGMENT@19..27
+// NAME_REF@19..22
+// IDENT@19..22 "u32"
+// GENERIC_ARG_LIST@22..27
+// L_ANGLE@22..23 "<"
+// TYPE_ARG@23..27
+// DYN_TRAIT_TYPE@23..27
+// TYPE_BOUND_LIST@23..27
+// TYPE_BOUND@23..26
+// PATH_TYPE@23..26
+// PATH@23..26
+// PATH_SEGMENT@23..26
+// L_ANGLE@23..24 "<"
+// PAREN_TYPE@24..26
+// L_PAREN@24..25 "("
+// ERROR@25..26
+// INT_NUMBER@25..26 "8"
+// PLUS@26..27 "+"
+// EXPR_STMT@27..28
+// LITERAL@27..28
+// INT_NUMBER@27..28 "8"
+// ERROR@28..29
+// R_PAREN@28..29 ")"
+// SEMICOLON@29..30 ";"
+// R_CURLY@30..31 "}"
+
+"#]],
+ );
+}
+
+#[test]
+fn test_no_space_after_semi_colon() {
+ check(
+ r#"
+macro_rules! with_std {
+ ($($i:item)*) => ($(#[cfg(feature = "std")]$i)*)
+}
+
+with_std! {mod m;mod f;}
+"#,
+ expect![[r##"
+macro_rules! with_std {
+ ($($i:item)*) => ($(#[cfg(feature = "std")]$i)*)
+}
+
+#[cfg(feature = "std")] mod m;
+#[cfg(feature = "std")] mod f;
+"##]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs
new file mode 100644
index 000000000..0710b1ac3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs
@@ -0,0 +1,200 @@
+//! Unlike rustc, rust-analyzer's syntax tree are not "made of" token trees.
+//! Rather, token trees are an explicit bridge between the parser and
+//! (procedural or declarative) macros.
+//!
+//! This module tests tt <-> syntax tree conversion specifically. In particular,
+//! it, among other things, check that we convert `tt` to the right kind of
+//! syntax node depending on the macro call-site.
+use expect_test::expect;
+
+use crate::macro_expansion_tests::check;
+
+#[test]
+fn round_trips_compound_tokens() {
+ check(
+ r#"
+macro_rules! m {
+ () => { type qual: ::T = qual::T; }
+}
+m!();
+"#,
+ expect![[r#"
+macro_rules! m {
+ () => { type qual: ::T = qual::T; }
+}
+type qual: ::T = qual::T;
+"#]],
+ )
+}
+
+#[test]
+fn round_trips_literals() {
+ check(
+ r#"
+macro_rules! m {
+ () => {
+ let _ = 'c';
+ let _ = 1000;
+ let _ = 12E+99_f64;
+ let _ = "rust1";
+ let _ = -92;
+ }
+}
+fn f() {
+ m!()
+}
+"#,
+ expect![[r#"
+macro_rules! m {
+ () => {
+ let _ = 'c';
+ let _ = 1000;
+ let _ = 12E+99_f64;
+ let _ = "rust1";
+ let _ = -92;
+ }
+}
+fn f() {
+ let _ = 'c';
+ let _ = 1000;
+ let _ = 12E+99_f64;
+ let _ = "rust1";
+ let _ = -92;
+}
+"#]],
+ );
+}
+
+#[test]
+fn roundtrip_lifetime() {
+ check(
+ r#"
+macro_rules! m {
+ ($($t:tt)*) => { $($t)*}
+}
+m!(static bar: &'static str = "hello";);
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($($t:tt)*) => { $($t)*}
+}
+static bar: & 'static str = "hello";
+"#]],
+ );
+}
+
+#[test]
+fn broken_parenthesis_sequence() {
+ check(
+ r#"
+macro_rules! m1 { ($x:ident) => { ($x } }
+macro_rules! m2 { ($x:ident) => {} }
+
+m1!();
+m2!(x
+"#,
+ expect![[r#"
+macro_rules! m1 { ($x:ident) => { ($x } }
+macro_rules! m2 { ($x:ident) => {} }
+
+/* error: invalid macro definition: expected subtree */
+/* error: Failed to lower macro args to token tree */
+"#]],
+ )
+}
+
+#[test]
+fn expansion_does_not_parse_as_expression() {
+ check(
+ r#"
+macro_rules! stmts {
+ () => { fn foo() {} }
+}
+
+fn f() { let _ = stmts!/*+errors*/(); }
+"#,
+ expect![[r#"
+macro_rules! stmts {
+ () => { fn foo() {} }
+}
+
+fn f() { let _ = /* parse error: expected expression */
+fn foo() {}; }
+"#]],
+ )
+}
+
+#[test]
+fn broken_pat() {
+ check(
+ r#"
+macro_rules! m1 { () => (Some(x) left overs) }
+macro_rules! m2 { () => ($) }
+
+fn main() {
+ let m1!() = ();
+ let m2!/*+errors*/() = ();
+}
+"#,
+ expect![[r#"
+macro_rules! m1 { () => (Some(x) left overs) }
+macro_rules! m2 { () => ($) }
+
+fn main() {
+ let Some(x)left overs = ();
+ let /* parse error: expected pattern */
+$ = ();
+}
+"#]],
+ )
+}
+
+#[test]
+fn float_literal_in_tt() {
+ check(
+ r#"
+macro_rules! constant {
+ ($( $ret:expr; )*) => {};
+}
+macro_rules! float_const_impl {
+ () => ( constant!(0.3; 3.3;); );
+}
+float_const_impl! {}
+"#,
+ expect![[r#"
+macro_rules! constant {
+ ($( $ret:expr; )*) => {};
+}
+macro_rules! float_const_impl {
+ () => ( constant!(0.3; 3.3;); );
+}
+constant!(0.3;
+3.3;
+);
+"#]],
+ );
+}
+
+#[test]
+fn float_literal_in_output() {
+ check(
+ r#"
+macro_rules! constant {
+ ($e:expr ;) => {$e};
+}
+
+const _: () = constant!(0.0;);
+const _: () = constant!(0.;);
+const _: () = constant!(0e0;);
+"#,
+ expect![[r#"
+macro_rules! constant {
+ ($e:expr ;) => {$e};
+}
+
+const _: () = 0.0;
+const _: () = 0.;
+const _: () = 0e0;
+"#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
new file mode 100644
index 000000000..72c44a0fb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
@@ -0,0 +1,130 @@
+//! Tests for user-defined procedural macros.
+//!
+//! Note `//- proc_macros: identity` fixture metas in tests -- we don't use real
+//! proc-macros here, as that would be slow. Instead, we use several hard-coded
+//! in-memory macros.
+use expect_test::expect;
+
+use crate::macro_expansion_tests::check;
+
+#[test]
+fn attribute_macro_attr_censoring() {
+ cov_mark::check!(attribute_macro_attr_censoring);
+ check(
+ r#"
+//- proc_macros: identity
+#[attr1] #[proc_macros::identity] #[attr2]
+struct S;
+"#,
+ expect![[r##"
+#[attr1] #[proc_macros::identity] #[attr2]
+struct S;
+
+#[attr1]
+#[attr2] struct S;"##]],
+ );
+}
+
+#[test]
+fn derive_censoring() {
+ cov_mark::check!(derive_censoring);
+ check(
+ r#"
+//- proc_macros: derive_identity
+//- minicore:derive
+#[attr1]
+#[derive(Foo)]
+#[derive(proc_macros::DeriveIdentity)]
+#[derive(Bar)]
+#[attr2]
+struct S;
+"#,
+ expect![[r##"
+#[attr1]
+#[derive(Foo)]
+#[derive(proc_macros::DeriveIdentity)]
+#[derive(Bar)]
+#[attr2]
+struct S;
+
+#[attr1]
+#[derive(Bar)]
+#[attr2] struct S;"##]],
+ );
+}
+
+#[test]
+fn attribute_macro_syntax_completion_1() {
+ // this is just the case where the input is actually valid
+ check(
+ r#"
+//- proc_macros: identity_when_valid
+#[proc_macros::identity_when_valid]
+fn foo() { bar.baz(); blub }
+"#,
+ expect![[r##"
+#[proc_macros::identity_when_valid]
+fn foo() { bar.baz(); blub }
+
+fn foo() {
+ bar.baz();
+ blub
+}"##]],
+ );
+}
+
+#[test]
+fn attribute_macro_syntax_completion_2() {
+ // common case of dot completion while typing
+ check(
+ r#"
+//- proc_macros: identity_when_valid
+#[proc_macros::identity_when_valid]
+fn foo() { bar.; blub }
+"#,
+ expect![[r##"
+#[proc_macros::identity_when_valid]
+fn foo() { bar.; blub }
+
+fn foo() {
+ bar. ;
+ blub
+}"##]],
+ );
+}
+
+#[test]
+fn float_parsing_panic() {
+ // Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211
+ check(
+ r#"
+//- proc_macros: identity
+macro_rules! id {
+ ($($t:tt)*) => {
+ $($t)*
+ };
+}
+id /*+errors*/! {
+ #[proc_macros::identity]
+ impl Foo for WrapBj {
+ async fn foo(&self) {
+ self.0. id().await;
+ }
+ }
+}
+"#,
+ expect![[r##"
+macro_rules! id {
+ ($($t:tt)*) => {
+ $($t)*
+ };
+}
+/* parse error: expected SEMICOLON */
+#[proc_macros::identity] impl Foo for WrapBj {
+ async fn foo(&self ) {
+ self .0.id().await ;
+ }
+}
+"##]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
new file mode 100644
index 000000000..6eb530ecc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
@@ -0,0 +1,545 @@
+//! This module implements import-resolution/macro expansion algorithm.
+//!
+//! The result of this module is `DefMap`: a data structure which contains:
+//!
+//! * a tree of modules for the crate
+//! * for each module, a set of items visible in the module (directly declared
+//! or imported)
+//!
+//! Note that `DefMap` contains fully macro expanded code.
+//!
+//! Computing `DefMap` can be partitioned into several logically
+//! independent "phases". The phases are mutually recursive though, there's no
+//! strict ordering.
+//!
+//! ## Collecting RawItems
+//!
+//! This happens in the `raw` module, which parses a single source file into a
+//! set of top-level items. Nested imports are desugared to flat imports in this
+//! phase. Macro calls are represented as a triple of (Path, Option<Name>,
+//! TokenTree).
+//!
+//! ## Collecting Modules
+//!
+//! This happens in the `collector` module. In this phase, we recursively walk
+//! tree of modules, collect raw items from submodules, populate module scopes
+//! with defined items (so, we assign item ids in this phase) and record the set
+//! of unresolved imports and macros.
+//!
+//! While we walk tree of modules, we also record macro_rules definitions and
+//! expand calls to macro_rules defined macros.
+//!
+//! ## Resolving Imports
+//!
+//! We maintain a list of currently unresolved imports. On every iteration, we
+//! try to resolve some imports from this list. If the import is resolved, we
+//! record it, by adding an item to current module scope and, if necessary, by
+//! recursively populating glob imports.
+//!
+//! ## Resolving Macros
+//!
+//! macro_rules from the same crate use a global mutable namespace. We expand
+//! them immediately, when we collect modules.
+//!
+//! Macros from other crates (including proc-macros) can be used with
+//! `foo::bar!` syntax. We handle them similarly to imports. There's a list of
+//! unexpanded macros. On every iteration, we try to resolve each macro call
+//! path and, upon success, we run macro expansion and "collect module" phase on
+//! the result
+
+pub mod attr_resolution;
+pub mod proc_macro;
+pub mod diagnostics;
+mod collector;
+mod mod_resolution;
+mod path_resolution;
+
+#[cfg(test)]
+mod tests;
+
+use std::{cmp::Ord, ops::Deref, sync::Arc};
+
+use base_db::{CrateId, Edition, FileId};
+use hir_expand::{name::Name, InFile, MacroCallId, MacroDefId};
+use itertools::Itertools;
+use la_arena::Arena;
+use profile::Count;
+use rustc_hash::FxHashMap;
+use stdx::format_to;
+use syntax::{ast, SmolStr};
+
+use crate::{
+ db::DefDatabase,
+ item_scope::{BuiltinShadowMode, ItemScope},
+ item_tree::{ItemTreeId, Mod, TreeId},
+ nameres::{diagnostics::DefDiagnostic, path_resolution::ResolveMode},
+ path::ModPath,
+ per_ns::PerNs,
+ visibility::Visibility,
+ AstId, BlockId, BlockLoc, FunctionId, LocalModuleId, MacroId, ModuleId, ProcMacroId,
+};
+
+/// Contains the results of (early) name resolution.
+///
+/// A `DefMap` stores the module tree and the definitions that are in scope in every module after
+/// item-level macros have been expanded.
+///
+/// Every crate has a primary `DefMap` whose root is the crate's main file (`main.rs`/`lib.rs`),
+/// computed by the `crate_def_map` query. Additionally, every block expression introduces the
+/// opportunity to write arbitrary item and module hierarchies, and thus gets its own `DefMap` that
+/// is computed by the `block_def_map` query.
+#[derive(Debug, PartialEq, Eq)]
+pub struct DefMap {
+ _c: Count<Self>,
+ block: Option<BlockInfo>,
+ root: LocalModuleId,
+ modules: Arena<ModuleData>,
+ krate: CrateId,
+ /// The prelude module for this crate. This either comes from an import
+ /// marked with the `prelude_import` attribute, or (in the normal case) from
+ /// a dependency (`std` or `core`).
+ prelude: Option<ModuleId>,
+ extern_prelude: FxHashMap<Name, ModuleId>,
+
+ /// Side table for resolving derive helpers.
+ exported_derives: FxHashMap<MacroDefId, Box<[Name]>>,
+ fn_proc_macro_mapping: FxHashMap<FunctionId, ProcMacroId>,
+ /// The error that occurred when failing to load the proc-macro dll.
+ proc_macro_loading_error: Option<Box<str>>,
+ /// Tracks which custom derives are in scope for an item, to allow resolution of derive helper
+ /// attributes.
+ derive_helpers_in_scope: FxHashMap<AstId<ast::Item>, Vec<(Name, MacroId, MacroCallId)>>,
+
+ /// Custom attributes registered with `#![register_attr]`.
+ registered_attrs: Vec<SmolStr>,
+ /// Custom tool modules registered with `#![register_tool]`.
+ registered_tools: Vec<SmolStr>,
+
+ edition: Edition,
+ recursion_limit: Option<u32>,
+ diagnostics: Vec<DefDiagnostic>,
+}
+
+/// For `DefMap`s computed for a block expression, this stores its location in the parent map.
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+struct BlockInfo {
+ /// The `BlockId` this `DefMap` was created from.
+ block: BlockId,
+ /// The containing module.
+ parent: ModuleId,
+}
+
+impl std::ops::Index<LocalModuleId> for DefMap {
+ type Output = ModuleData;
+ fn index(&self, id: LocalModuleId) -> &ModuleData {
+ &self.modules[id]
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
+pub enum ModuleOrigin {
+ CrateRoot {
+ definition: FileId,
+ },
+ /// Note that non-inline modules, by definition, live inside non-macro file.
+ File {
+ is_mod_rs: bool,
+ declaration: AstId<ast::Module>,
+ declaration_tree_id: ItemTreeId<Mod>,
+ definition: FileId,
+ },
+ Inline {
+ definition_tree_id: ItemTreeId<Mod>,
+ definition: AstId<ast::Module>,
+ },
+ /// Pseudo-module introduced by a block scope (contains only inner items).
+ BlockExpr {
+ block: AstId<ast::BlockExpr>,
+ },
+}
+
+impl ModuleOrigin {
+ pub fn declaration(&self) -> Option<AstId<ast::Module>> {
+ match self {
+ ModuleOrigin::File { declaration: module, .. }
+ | ModuleOrigin::Inline { definition: module, .. } => Some(*module),
+ ModuleOrigin::CrateRoot { .. } | ModuleOrigin::BlockExpr { .. } => None,
+ }
+ }
+
+ pub fn file_id(&self) -> Option<FileId> {
+ match self {
+ ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => {
+ Some(*definition)
+ }
+ _ => None,
+ }
+ }
+
+ pub fn is_inline(&self) -> bool {
+ match self {
+ ModuleOrigin::Inline { .. } | ModuleOrigin::BlockExpr { .. } => true,
+ ModuleOrigin::CrateRoot { .. } | ModuleOrigin::File { .. } => false,
+ }
+ }
+
+ /// Returns a node which defines this module.
+ /// That is, a file or a `mod foo {}` with items.
+ fn definition_source(&self, db: &dyn DefDatabase) -> InFile<ModuleSource> {
+ match self {
+ ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => {
+ let file_id = *definition;
+ let sf = db.parse(file_id).tree();
+ InFile::new(file_id.into(), ModuleSource::SourceFile(sf))
+ }
+ ModuleOrigin::Inline { definition, .. } => InFile::new(
+ definition.file_id,
+ ModuleSource::Module(definition.to_node(db.upcast())),
+ ),
+ ModuleOrigin::BlockExpr { block } => {
+ InFile::new(block.file_id, ModuleSource::BlockExpr(block.to_node(db.upcast())))
+ }
+ }
+ }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct ModuleData {
+ /// Where does this module come from?
+ pub origin: ModuleOrigin,
+ /// Declared visibility of this module.
+ pub visibility: Visibility,
+
+ pub parent: Option<LocalModuleId>,
+ pub children: FxHashMap<Name, LocalModuleId>,
+ pub scope: ItemScope,
+}
+
+impl DefMap {
+ pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> {
+ let _p = profile::span("crate_def_map_query").detail(|| {
+ db.crate_graph()[krate].display_name.as_deref().unwrap_or_default().to_string()
+ });
+
+ let crate_graph = db.crate_graph();
+
+ let edition = crate_graph[krate].edition;
+ let origin = ModuleOrigin::CrateRoot { definition: crate_graph[krate].root_file_id };
+ let def_map = DefMap::empty(krate, edition, ModuleData::new(origin, Visibility::Public));
+ let def_map = collector::collect_defs(
+ db,
+ def_map,
+ TreeId::new(crate_graph[krate].root_file_id.into(), None),
+ );
+
+ Arc::new(def_map)
+ }
+
+ pub(crate) fn block_def_map_query(
+ db: &dyn DefDatabase,
+ block_id: BlockId,
+ ) -> Option<Arc<DefMap>> {
+ let block: BlockLoc = db.lookup_intern_block(block_id);
+
+ let tree_id = TreeId::new(block.ast_id.file_id, Some(block_id));
+ let item_tree = tree_id.item_tree(db);
+ if item_tree.top_level_items().is_empty() {
+ return None;
+ }
+
+ let parent_map = block.module.def_map(db);
+ let krate = block.module.krate;
+ let local_id = LocalModuleId::from_raw(la_arena::RawIdx::from(0));
+ // NB: we use `None` as block here, which would be wrong for implicit
+ // modules declared by blocks with items. At the moment, we don't use
+ // this visibility for anything outside IDE, so that's probably OK.
+ let visibility = Visibility::Module(ModuleId { krate, local_id, block: None });
+ let module_data =
+ ModuleData::new(ModuleOrigin::BlockExpr { block: block.ast_id }, visibility);
+
+ let mut def_map = DefMap::empty(krate, parent_map.edition, module_data);
+ def_map.block = Some(BlockInfo { block: block_id, parent: block.module });
+
+ let def_map = collector::collect_defs(db, def_map, tree_id);
+ Some(Arc::new(def_map))
+ }
+
+ fn empty(krate: CrateId, edition: Edition, module_data: ModuleData) -> DefMap {
+ let mut modules: Arena<ModuleData> = Arena::default();
+ let root = modules.alloc(module_data);
+
+ DefMap {
+ _c: Count::new(),
+ block: None,
+ krate,
+ edition,
+ recursion_limit: None,
+ extern_prelude: FxHashMap::default(),
+ exported_derives: FxHashMap::default(),
+ fn_proc_macro_mapping: FxHashMap::default(),
+ proc_macro_loading_error: None,
+ derive_helpers_in_scope: FxHashMap::default(),
+ prelude: None,
+ root,
+ modules,
+ registered_attrs: Vec::new(),
+ registered_tools: Vec::new(),
+ diagnostics: Vec::new(),
+ }
+ }
+
+ pub fn modules_for_file(&self, file_id: FileId) -> impl Iterator<Item = LocalModuleId> + '_ {
+ self.modules
+ .iter()
+ .filter(move |(_id, data)| data.origin.file_id() == Some(file_id))
+ .map(|(id, _data)| id)
+ }
+
+ pub fn modules(&self) -> impl Iterator<Item = (LocalModuleId, &ModuleData)> + '_ {
+ self.modules.iter()
+ }
+
+ pub fn derive_helpers_in_scope(
+ &self,
+ id: AstId<ast::Adt>,
+ ) -> Option<&[(Name, MacroId, MacroCallId)]> {
+ self.derive_helpers_in_scope.get(&id.map(|it| it.upcast())).map(Deref::deref)
+ }
+
+ pub fn registered_tools(&self) -> &[SmolStr] {
+ &self.registered_tools
+ }
+
+ pub fn registered_attrs(&self) -> &[SmolStr] {
+ &self.registered_attrs
+ }
+
+ pub fn root(&self) -> LocalModuleId {
+ self.root
+ }
+
+ pub fn fn_as_proc_macro(&self, id: FunctionId) -> Option<ProcMacroId> {
+ self.fn_proc_macro_mapping.get(&id).copied()
+ }
+
+ pub fn proc_macro_loading_error(&self) -> Option<&str> {
+ self.proc_macro_loading_error.as_deref()
+ }
+
+ pub(crate) fn krate(&self) -> CrateId {
+ self.krate
+ }
+
+ pub(crate) fn block_id(&self) -> Option<BlockId> {
+ self.block.as_ref().map(|block| block.block)
+ }
+
+ pub(crate) fn prelude(&self) -> Option<ModuleId> {
+ self.prelude
+ }
+
+ pub(crate) fn extern_prelude(&self) -> impl Iterator<Item = (&Name, &ModuleId)> + '_ {
+ self.extern_prelude.iter()
+ }
+
+ pub fn module_id(&self, local_id: LocalModuleId) -> ModuleId {
+ let block = self.block.as_ref().map(|b| b.block);
+ ModuleId { krate: self.krate, local_id, block }
+ }
+
+ pub(crate) fn crate_root(&self, db: &dyn DefDatabase) -> ModuleId {
+ self.with_ancestor_maps(db, self.root, &mut |def_map, _module| {
+ if def_map.block.is_none() { Some(def_map.module_id(def_map.root)) } else { None }
+ })
+ .expect("DefMap chain without root")
+ }
+
+ pub(crate) fn resolve_path(
+ &self,
+ db: &dyn DefDatabase,
+ original_module: LocalModuleId,
+ path: &ModPath,
+ shadow: BuiltinShadowMode,
+ ) -> (PerNs, Option<usize>) {
+ let res =
+ self.resolve_path_fp_with_macro(db, ResolveMode::Other, original_module, path, shadow);
+ (res.resolved_def, res.segment_index)
+ }
+
+ pub(crate) fn resolve_path_locally(
+ &self,
+ db: &dyn DefDatabase,
+ original_module: LocalModuleId,
+ path: &ModPath,
+ shadow: BuiltinShadowMode,
+ ) -> (PerNs, Option<usize>) {
+ let res = self.resolve_path_fp_with_macro_single(
+ db,
+ ResolveMode::Other,
+ original_module,
+ path,
+ shadow,
+ );
+ (res.resolved_def, res.segment_index)
+ }
+
+ /// Ascends the `DefMap` hierarchy and calls `f` with every `DefMap` and containing module.
+ ///
+ /// If `f` returns `Some(val)`, iteration is stopped and `Some(val)` is returned. If `f` returns
+ /// `None`, iteration continues.
+ pub fn with_ancestor_maps<T>(
+ &self,
+ db: &dyn DefDatabase,
+ local_mod: LocalModuleId,
+ f: &mut dyn FnMut(&DefMap, LocalModuleId) -> Option<T>,
+ ) -> Option<T> {
+ if let Some(it) = f(self, local_mod) {
+ return Some(it);
+ }
+ let mut block = self.block;
+ while let Some(block_info) = block {
+ let parent = block_info.parent.def_map(db);
+ if let Some(it) = f(&parent, block_info.parent.local_id) {
+ return Some(it);
+ }
+ block = parent.block;
+ }
+
+ None
+ }
+
+ /// If this `DefMap` is for a block expression, returns the module containing the block (which
+ /// might again be a block, or a module inside a block).
+ pub fn parent(&self) -> Option<ModuleId> {
+ Some(self.block?.parent)
+ }
+
+ /// Returns the module containing `local_mod`, either the parent `mod`, or the module containing
+ /// the block, if `self` corresponds to a block expression.
+ pub fn containing_module(&self, local_mod: LocalModuleId) -> Option<ModuleId> {
+ match &self[local_mod].parent {
+ Some(parent) => Some(self.module_id(*parent)),
+ None => self.block.as_ref().map(|block| block.parent),
+ }
+ }
+
+ // FIXME: this can use some more human-readable format (ideally, an IR
+ // even), as this should be a great debugging aid.
+ pub fn dump(&self, db: &dyn DefDatabase) -> String {
+ let mut buf = String::new();
+ let mut arc;
+ let mut current_map = self;
+ while let Some(block) = &current_map.block {
+ go(&mut buf, current_map, "block scope", current_map.root);
+ buf.push('\n');
+ arc = block.parent.def_map(db);
+ current_map = &*arc;
+ }
+ go(&mut buf, current_map, "crate", current_map.root);
+ return buf;
+
+ fn go(buf: &mut String, map: &DefMap, path: &str, module: LocalModuleId) {
+ format_to!(buf, "{}\n", path);
+
+ map.modules[module].scope.dump(buf);
+
+ for (name, child) in
+ map.modules[module].children.iter().sorted_by(|a, b| Ord::cmp(&a.0, &b.0))
+ {
+ let path = format!("{}::{}", path, name);
+ buf.push('\n');
+ go(buf, map, &path, *child);
+ }
+ }
+ }
+
+ pub fn dump_block_scopes(&self, db: &dyn DefDatabase) -> String {
+ let mut buf = String::new();
+ let mut arc;
+ let mut current_map = self;
+ while let Some(block) = &current_map.block {
+ format_to!(buf, "{:?} in {:?}\n", block.block, block.parent);
+ arc = block.parent.def_map(db);
+ current_map = &*arc;
+ }
+
+ format_to!(buf, "crate scope\n");
+ buf
+ }
+
+ fn shrink_to_fit(&mut self) {
+ // Exhaustive match to require handling new fields.
+ let Self {
+ _c: _,
+ exported_derives,
+ extern_prelude,
+ diagnostics,
+ modules,
+ registered_attrs,
+ registered_tools,
+ fn_proc_macro_mapping,
+ derive_helpers_in_scope,
+ proc_macro_loading_error: _,
+ block: _,
+ edition: _,
+ recursion_limit: _,
+ krate: _,
+ prelude: _,
+ root: _,
+ } = self;
+
+ extern_prelude.shrink_to_fit();
+ exported_derives.shrink_to_fit();
+ diagnostics.shrink_to_fit();
+ modules.shrink_to_fit();
+ registered_attrs.shrink_to_fit();
+ registered_tools.shrink_to_fit();
+ fn_proc_macro_mapping.shrink_to_fit();
+ derive_helpers_in_scope.shrink_to_fit();
+ for (_, module) in modules.iter_mut() {
+ module.children.shrink_to_fit();
+ module.scope.shrink_to_fit();
+ }
+ }
+
+ /// Get a reference to the def map's diagnostics.
+ pub fn diagnostics(&self) -> &[DefDiagnostic] {
+ self.diagnostics.as_slice()
+ }
+
+ pub fn recursion_limit(&self) -> Option<u32> {
+ self.recursion_limit
+ }
+}
+
+impl ModuleData {
+ pub(crate) fn new(origin: ModuleOrigin, visibility: Visibility) -> Self {
+ ModuleData {
+ origin,
+ visibility,
+ parent: None,
+ children: FxHashMap::default(),
+ scope: ItemScope::default(),
+ }
+ }
+
+ /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items.
+ pub fn definition_source(&self, db: &dyn DefDatabase) -> InFile<ModuleSource> {
+ self.origin.definition_source(db)
+ }
+
+ /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
+ /// `None` for the crate root or block.
+ pub fn declaration_source(&self, db: &dyn DefDatabase) -> Option<InFile<ast::Module>> {
+ let decl = self.origin.declaration()?;
+ let value = decl.to_node(db.upcast());
+ Some(InFile { file_id: decl.file_id, value })
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ModuleSource {
+ SourceFile(ast::SourceFile),
+ Module(ast::Module),
+ BlockExpr(ast::BlockExpr),
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs
new file mode 100644
index 000000000..3650204ee
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs
@@ -0,0 +1,98 @@
+//! Post-nameres attribute resolution.
+
+use hir_expand::MacroCallId;
+use syntax::{ast, SmolStr};
+
+use crate::{
+ attr::Attr,
+ attr_macro_as_call_id, builtin_attr,
+ db::DefDatabase,
+ item_scope::BuiltinShadowMode,
+ macro_id_to_def_id,
+ nameres::path_resolution::ResolveMode,
+ path::{ModPath, PathKind},
+ AstIdWithPath, LocalModuleId, UnresolvedMacro,
+};
+
+use super::DefMap;
+
+pub enum ResolvedAttr {
+ /// Attribute resolved to an attribute macro.
+ Macro(MacroCallId),
+ /// Attribute resolved to something else that does not require expansion.
+ Other,
+}
+
+impl DefMap {
+ pub(crate) fn resolve_attr_macro(
+ &self,
+ db: &dyn DefDatabase,
+ original_module: LocalModuleId,
+ ast_id: AstIdWithPath<ast::Item>,
+ attr: &Attr,
+ ) -> Result<ResolvedAttr, UnresolvedMacro> {
+ // NB: does not currently work for derive helpers as they aren't recorded in the `DefMap`
+
+ if self.is_builtin_or_registered_attr(&ast_id.path) {
+ return Ok(ResolvedAttr::Other);
+ }
+
+ let resolved_res = self.resolve_path_fp_with_macro(
+ db,
+ ResolveMode::Other,
+ original_module,
+ &ast_id.path,
+ BuiltinShadowMode::Module,
+ );
+ let def = match resolved_res.resolved_def.take_macros() {
+ Some(def) => {
+ if def.is_attribute(db) {
+ def
+ } else {
+ return Ok(ResolvedAttr::Other);
+ }
+ }
+ None => return Err(UnresolvedMacro { path: ast_id.path }),
+ };
+
+ Ok(ResolvedAttr::Macro(attr_macro_as_call_id(
+ db,
+ &ast_id,
+ attr,
+ self.krate,
+ macro_id_to_def_id(db, def),
+ false,
+ )))
+ }
+
+ pub(crate) fn is_builtin_or_registered_attr(&self, path: &ModPath) -> bool {
+ if path.kind != PathKind::Plain {
+ return false;
+ }
+
+ let segments = path.segments();
+
+ if let Some(name) = segments.first() {
+ let name = name.to_smol_str();
+ let pred = |n: &_| *n == name;
+
+ let registered = self.registered_tools.iter().map(SmolStr::as_str);
+ let is_tool = builtin_attr::TOOL_MODULES.iter().copied().chain(registered).any(pred);
+ // FIXME: tool modules can be shadowed by actual modules
+ if is_tool {
+ return true;
+ }
+
+ if segments.len() == 1 {
+ let registered = self.registered_attrs.iter().map(SmolStr::as_str);
+ let is_inert = builtin_attr::INERT_ATTRIBUTES
+ .iter()
+ .map(|it| it.name)
+ .chain(registered)
+ .any(pred);
+ return is_inert;
+ }
+ }
+ false
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
new file mode 100644
index 000000000..8a6bb929c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
@@ -0,0 +1,2202 @@
+//! The core of the module-level name resolution algorithm.
+//!
+//! `DefCollector::collect` contains the fixed-point iteration loop which
+//! resolves imports and expands macros.
+
+use std::{iter, mem};
+
+use base_db::{CrateId, Edition, FileId};
+use cfg::{CfgExpr, CfgOptions};
+use either::Either;
+use hir_expand::{
+ ast_id_map::FileAstId,
+ builtin_attr_macro::find_builtin_attr,
+ builtin_derive_macro::find_builtin_derive,
+ builtin_fn_macro::find_builtin_macro,
+ name::{name, AsName, Name},
+ proc_macro::ProcMacroExpander,
+ ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
+ MacroDefKind,
+};
+use itertools::{izip, Itertools};
+use la_arena::Idx;
+use limit::Limit;
+use rustc_hash::{FxHashMap, FxHashSet};
+use stdx::always;
+use syntax::{ast, SmolStr};
+
+use crate::{
+ attr::{Attr, AttrId, Attrs},
+ attr_macro_as_call_id,
+ db::DefDatabase,
+ derive_macro_as_call_id,
+ item_scope::{ImportType, PerNsGlobImports},
+ item_tree::{
+ self, Fields, FileItemTreeId, ImportKind, ItemTree, ItemTreeId, ItemTreeNode, MacroCall,
+ MacroDef, MacroRules, Mod, ModItem, ModKind, TreeId,
+ },
+ macro_call_as_call_id, macro_id_to_def_id,
+ nameres::{
+ diagnostics::DefDiagnostic,
+ mod_resolution::ModDir,
+ path_resolution::ReachedFixedPoint,
+ proc_macro::{ProcMacroDef, ProcMacroKind},
+ BuiltinShadowMode, DefMap, ModuleData, ModuleOrigin, ResolveMode,
+ },
+ path::{ImportAlias, ModPath, PathKind},
+ per_ns::PerNs,
+ visibility::{RawVisibility, Visibility},
+ AdtId, AstId, AstIdWithPath, ConstLoc, EnumLoc, EnumVariantId, ExternBlockLoc, FunctionId,
+ FunctionLoc, ImplLoc, Intern, ItemContainerId, LocalModuleId, Macro2Id, Macro2Loc,
+ MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, ModuleDefId, ModuleId, ProcMacroId,
+ ProcMacroLoc, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro,
+};
+
+static GLOB_RECURSION_LIMIT: Limit = Limit::new(100);
+static EXPANSION_DEPTH_LIMIT: Limit = Limit::new(128);
+static FIXED_POINT_LIMIT: Limit = Limit::new(8192);
+
+pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: TreeId) -> DefMap {
+ let crate_graph = db.crate_graph();
+
+ let mut deps = FxHashMap::default();
+ // populate external prelude and dependency list
+ let krate = &crate_graph[def_map.krate];
+ for dep in &krate.dependencies {
+ tracing::debug!("crate dep {:?} -> {:?}", dep.name, dep.crate_id);
+ let dep_def_map = db.crate_def_map(dep.crate_id);
+ let dep_root = dep_def_map.module_id(dep_def_map.root);
+
+ deps.insert(dep.as_name(), dep_root.into());
+
+ if dep.is_prelude() && !tree_id.is_block() {
+ def_map.extern_prelude.insert(dep.as_name(), dep_root);
+ }
+ }
+
+ let cfg_options = &krate.cfg_options;
+ let proc_macros = match &krate.proc_macro {
+ Ok(proc_macros) => {
+ proc_macros
+ .iter()
+ .enumerate()
+ .map(|(idx, it)| {
+ // FIXME: a hacky way to create a Name from string.
+ let name = tt::Ident { text: it.name.clone(), id: tt::TokenId::unspecified() };
+ (
+ name.as_name(),
+ ProcMacroExpander::new(def_map.krate, base_db::ProcMacroId(idx as u32)),
+ )
+ })
+ .collect()
+ }
+ Err(e) => {
+ def_map.proc_macro_loading_error = Some(e.clone().into_boxed_str());
+ Vec::new()
+ }
+ };
+ let is_proc_macro = krate.is_proc_macro;
+
+ let mut collector = DefCollector {
+ db,
+ def_map,
+ deps,
+ glob_imports: FxHashMap::default(),
+ unresolved_imports: Vec::new(),
+ indeterminate_imports: Vec::new(),
+ unresolved_macros: Vec::new(),
+ mod_dirs: FxHashMap::default(),
+ cfg_options,
+ proc_macros,
+ from_glob_import: Default::default(),
+ skip_attrs: Default::default(),
+ is_proc_macro,
+ };
+ if tree_id.is_block() {
+ collector.seed_with_inner(tree_id);
+ } else {
+ collector.seed_with_top_level();
+ }
+ collector.collect();
+ let mut def_map = collector.finish();
+ def_map.shrink_to_fit();
+ def_map
+}
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+enum PartialResolvedImport {
+ /// None of any namespaces is resolved
+ Unresolved,
+ /// One of namespaces is resolved
+ Indeterminate(PerNs),
+ /// All namespaces are resolved, OR it comes from other crate
+ Resolved(PerNs),
+}
+
+impl PartialResolvedImport {
+ fn namespaces(self) -> PerNs {
+ match self {
+ PartialResolvedImport::Unresolved => PerNs::none(),
+ PartialResolvedImport::Indeterminate(ns) | PartialResolvedImport::Resolved(ns) => ns,
+ }
+ }
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+enum ImportSource {
+ Import { id: ItemTreeId<item_tree::Import>, use_tree: Idx<ast::UseTree> },
+ ExternCrate(ItemTreeId<item_tree::ExternCrate>),
+}
+
+#[derive(Debug, Eq, PartialEq)]
+struct Import {
+ path: ModPath,
+ alias: Option<ImportAlias>,
+ visibility: RawVisibility,
+ kind: ImportKind,
+ is_prelude: bool,
+ is_extern_crate: bool,
+ is_macro_use: bool,
+ source: ImportSource,
+}
+
+impl Import {
+ fn from_use(
+ db: &dyn DefDatabase,
+ krate: CrateId,
+ tree: &ItemTree,
+ id: ItemTreeId<item_tree::Import>,
+ ) -> Vec<Self> {
+ let it = &tree[id.value];
+ let attrs = &tree.attrs(db, krate, ModItem::from(id.value).into());
+ let visibility = &tree[it.visibility];
+ let is_prelude = attrs.by_key("prelude_import").exists();
+
+ let mut res = Vec::new();
+ it.use_tree.expand(|idx, path, kind, alias| {
+ res.push(Self {
+ path,
+ alias,
+ visibility: visibility.clone(),
+ kind,
+ is_prelude,
+ is_extern_crate: false,
+ is_macro_use: false,
+ source: ImportSource::Import { id, use_tree: idx },
+ });
+ });
+ res
+ }
+
+ fn from_extern_crate(
+ db: &dyn DefDatabase,
+ krate: CrateId,
+ tree: &ItemTree,
+ id: ItemTreeId<item_tree::ExternCrate>,
+ ) -> Self {
+ let it = &tree[id.value];
+ let attrs = &tree.attrs(db, krate, ModItem::from(id.value).into());
+ let visibility = &tree[it.visibility];
+ Self {
+ path: ModPath::from_segments(PathKind::Plain, iter::once(it.name.clone())),
+ alias: it.alias.clone(),
+ visibility: visibility.clone(),
+ kind: ImportKind::Plain,
+ is_prelude: false,
+ is_extern_crate: true,
+ is_macro_use: attrs.by_key("macro_use").exists(),
+ source: ImportSource::ExternCrate(id),
+ }
+ }
+}
+
+#[derive(Debug, Eq, PartialEq)]
+struct ImportDirective {
+ module_id: LocalModuleId,
+ import: Import,
+ status: PartialResolvedImport,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+struct MacroDirective {
+ module_id: LocalModuleId,
+ depth: usize,
+ kind: MacroDirectiveKind,
+ container: ItemContainerId,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+enum MacroDirectiveKind {
+ FnLike { ast_id: AstIdWithPath<ast::MacroCall>, expand_to: ExpandTo },
+ Derive { ast_id: AstIdWithPath<ast::Adt>, derive_attr: AttrId, derive_pos: usize },
+ Attr { ast_id: AstIdWithPath<ast::Item>, attr: Attr, mod_item: ModItem, tree: TreeId },
+}
+
+/// Walks the tree of module recursively
+struct DefCollector<'a> {
+ db: &'a dyn DefDatabase,
+ def_map: DefMap,
+ deps: FxHashMap<Name, ModuleId>,
+ glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, Visibility)>>,
+ unresolved_imports: Vec<ImportDirective>,
+ indeterminate_imports: Vec<ImportDirective>,
+ unresolved_macros: Vec<MacroDirective>,
+ mod_dirs: FxHashMap<LocalModuleId, ModDir>,
+ cfg_options: &'a CfgOptions,
+ /// List of procedural macros defined by this crate. This is read from the dynamic library
+ /// built by the build system, and is the list of proc. macros we can actually expand. It is
+ /// empty when proc. macro support is disabled (in which case we still do name resolution for
+ /// them).
+ proc_macros: Vec<(Name, ProcMacroExpander)>,
+ is_proc_macro: bool,
+ from_glob_import: PerNsGlobImports,
+ /// If we fail to resolve an attribute on a `ModItem`, we fall back to ignoring the attribute.
+ /// This map is used to skip all attributes up to and including the one that failed to resolve,
+ /// in order to not expand them twice.
+ ///
+ /// This also stores the attributes to skip when we resolve derive helpers and non-macro
+ /// non-builtin attributes in general.
+ skip_attrs: FxHashMap<InFile<ModItem>, AttrId>,
+}
+
+impl DefCollector<'_> {
+ fn seed_with_top_level(&mut self) {
+ let _p = profile::span("seed_with_top_level");
+
+ let file_id = self.db.crate_graph()[self.def_map.krate].root_file_id;
+ let item_tree = self.db.file_item_tree(file_id.into());
+ let module_id = self.def_map.root;
+
+ let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate);
+ if attrs.cfg().map_or(true, |cfg| self.cfg_options.check(&cfg) != Some(false)) {
+ self.inject_prelude(&attrs);
+
+ // Process other crate-level attributes.
+ for attr in &*attrs {
+ let attr_name = match attr.path.as_ident() {
+ Some(name) => name,
+ None => continue,
+ };
+
+ if *attr_name == hir_expand::name![recursion_limit] {
+ if let Some(limit) = attr.string_value() {
+ if let Ok(limit) = limit.parse() {
+ self.def_map.recursion_limit = Some(limit);
+ }
+ }
+ continue;
+ }
+
+ if *attr_name == hir_expand::name![crate_type] {
+ if let Some("proc-macro") = attr.string_value().map(SmolStr::as_str) {
+ self.is_proc_macro = true;
+ }
+ continue;
+ }
+
+ let attr_is_register_like = *attr_name == hir_expand::name![register_attr]
+ || *attr_name == hir_expand::name![register_tool];
+ if !attr_is_register_like {
+ continue;
+ }
+
+ let registered_name = match attr.single_ident_value() {
+ Some(ident) => ident.as_name(),
+ _ => continue,
+ };
+
+ if *attr_name == hir_expand::name![register_attr] {
+ self.def_map.registered_attrs.push(registered_name.to_smol_str());
+ cov_mark::hit!(register_attr);
+ } else {
+ self.def_map.registered_tools.push(registered_name.to_smol_str());
+ cov_mark::hit!(register_tool);
+ }
+ }
+
+ ModCollector {
+ def_collector: self,
+ macro_depth: 0,
+ module_id,
+ tree_id: TreeId::new(file_id.into(), None),
+ item_tree: &item_tree,
+ mod_dir: ModDir::root(),
+ }
+ .collect_in_top_module(item_tree.top_level_items());
+ }
+ }
+
+ fn seed_with_inner(&mut self, tree_id: TreeId) {
+ let item_tree = tree_id.item_tree(self.db);
+ let module_id = self.def_map.root;
+
+ let is_cfg_enabled = item_tree
+ .top_level_attrs(self.db, self.def_map.krate)
+ .cfg()
+ .map_or(true, |cfg| self.cfg_options.check(&cfg) != Some(false));
+ if is_cfg_enabled {
+ ModCollector {
+ def_collector: self,
+ macro_depth: 0,
+ module_id,
+ tree_id,
+ item_tree: &item_tree,
+ mod_dir: ModDir::root(),
+ }
+ .collect_in_top_module(item_tree.top_level_items());
+ }
+ }
+
+ fn resolution_loop(&mut self) {
+ let _p = profile::span("DefCollector::resolution_loop");
+
+ // main name resolution fixed-point loop.
+ let mut i = 0;
+ 'resolve_attr: loop {
+ 'resolve_macros: loop {
+ self.db.unwind_if_cancelled();
+
+ {
+ let _p = profile::span("resolve_imports loop");
+
+ 'resolve_imports: loop {
+ if self.resolve_imports() == ReachedFixedPoint::Yes {
+ break 'resolve_imports;
+ }
+ }
+ }
+ if self.resolve_macros() == ReachedFixedPoint::Yes {
+ break 'resolve_macros;
+ }
+
+ i += 1;
+ if FIXED_POINT_LIMIT.check(i).is_err() {
+ tracing::error!("name resolution is stuck");
+ break 'resolve_attr;
+ }
+ }
+
+ if self.reseed_with_unresolved_attribute() == ReachedFixedPoint::Yes {
+ break 'resolve_attr;
+ }
+ }
+ }
+
+ fn collect(&mut self) {
+ let _p = profile::span("DefCollector::collect");
+
+ self.resolution_loop();
+
+ // Resolve all indeterminate resolved imports again
+ // As some of the macros will expand newly import shadowing partial resolved imports
+ // FIXME: We maybe could skip this, if we handle the indeterminate imports in `resolve_imports`
+ // correctly
+ let partial_resolved = self.indeterminate_imports.drain(..).map(|directive| {
+ ImportDirective { status: PartialResolvedImport::Unresolved, ..directive }
+ });
+ self.unresolved_imports.extend(partial_resolved);
+ self.resolve_imports();
+
+ let unresolved_imports = mem::take(&mut self.unresolved_imports);
+ // show unresolved imports in completion, etc
+ for directive in &unresolved_imports {
+ self.record_resolved_import(directive);
+ }
+ self.unresolved_imports = unresolved_imports;
+
+ if self.is_proc_macro {
+ // A crate exporting procedural macros is not allowed to export anything else.
+ //
+ // Additionally, while the proc macro entry points must be `pub`, they are not publicly
+ // exported in type/value namespace. This function reduces the visibility of all items
+ // in the crate root that aren't proc macros.
+ let root = self.def_map.root;
+ let module_id = self.def_map.module_id(root);
+ let root = &mut self.def_map.modules[root];
+ root.scope.censor_non_proc_macros(module_id);
+ }
+ }
+
+ /// When the fixed-point loop reaches a stable state, we might still have
+ /// some unresolved attributes left over. This takes one of them, and feeds
+ /// the item it's applied to back into name resolution.
+ ///
+ /// This effectively ignores the fact that the macro is there and just treats the items as
+ /// normal code.
+ ///
+ /// This improves UX for unresolved attributes, and replicates the
+ /// behavior before we supported proc. attribute macros.
+ fn reseed_with_unresolved_attribute(&mut self) -> ReachedFixedPoint {
+ cov_mark::hit!(unresolved_attribute_fallback);
+
+ let unresolved_attr =
+ self.unresolved_macros.iter().enumerate().find_map(|(idx, directive)| match &directive
+ .kind
+ {
+ MacroDirectiveKind::Attr { ast_id, mod_item, attr, tree } => {
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
+ directive.module_id,
+ MacroCallKind::Attr {
+ ast_id: ast_id.ast_id,
+ attr_args: Default::default(),
+ invoc_attr_index: attr.id.ast_index,
+ is_derive: false,
+ },
+ attr.path().clone(),
+ ));
+
+ self.skip_attrs.insert(ast_id.ast_id.with_value(*mod_item), attr.id);
+
+ Some((idx, directive, *mod_item, *tree))
+ }
+ _ => None,
+ });
+
+ match unresolved_attr {
+ Some((pos, &MacroDirective { module_id, depth, container, .. }, mod_item, tree_id)) => {
+ let item_tree = &tree_id.item_tree(self.db);
+ let mod_dir = self.mod_dirs[&module_id].clone();
+ ModCollector {
+ def_collector: self,
+ macro_depth: depth,
+ module_id,
+ tree_id,
+ item_tree,
+ mod_dir,
+ }
+ .collect(&[mod_item], container);
+
+ self.unresolved_macros.swap_remove(pos);
+ // Continue name resolution with the new data.
+ ReachedFixedPoint::No
+ }
+ None => ReachedFixedPoint::Yes,
+ }
+ }
+
+ fn inject_prelude(&mut self, crate_attrs: &Attrs) {
+ // See compiler/rustc_builtin_macros/src/standard_library_imports.rs
+
+ if crate_attrs.by_key("no_core").exists() {
+ // libcore does not get a prelude.
+ return;
+ }
+
+ let krate = if crate_attrs.by_key("no_std").exists() {
+ name![core]
+ } else {
+ let std = name![std];
+ if self.def_map.extern_prelude().any(|(name, _)| *name == std) {
+ std
+ } else {
+ // If `std` does not exist for some reason, fall back to core. This mostly helps
+ // keep r-a's own tests minimal.
+ name![core]
+ }
+ };
+
+ let edition = match self.def_map.edition {
+ Edition::Edition2015 => name![rust_2015],
+ Edition::Edition2018 => name![rust_2018],
+ Edition::Edition2021 => name![rust_2021],
+ };
+
+ let path_kind = if self.def_map.edition == Edition::Edition2015 {
+ PathKind::Plain
+ } else {
+ PathKind::Abs
+ };
+ let path =
+ ModPath::from_segments(path_kind, [krate.clone(), name![prelude], edition].into_iter());
+ // Fall back to the older `std::prelude::v1` for compatibility with Rust <1.52.0
+ // FIXME remove this fallback
+ let fallback_path =
+ ModPath::from_segments(path_kind, [krate, name![prelude], name![v1]].into_iter());
+
+ for path in &[path, fallback_path] {
+ let (per_ns, _) = self.def_map.resolve_path(
+ self.db,
+ self.def_map.root,
+ path,
+ BuiltinShadowMode::Other,
+ );
+
+ match per_ns.types {
+ Some((ModuleDefId::ModuleId(m), _)) => {
+ self.def_map.prelude = Some(m);
+ return;
+ }
+ types => {
+ tracing::debug!(
+ "could not resolve prelude path `{}` to module (resolved to {:?})",
+ path,
+ types
+ );
+ }
+ }
+ }
+ }
+
+ /// Adds a definition of procedural macro `name` to the root module.
+ ///
+ /// # Notes on procedural macro resolution
+ ///
+ /// Procedural macro functionality is provided by the build system: It has to build the proc
+ /// macro and pass the resulting dynamic library to rust-analyzer.
+ ///
+ /// When procedural macro support is enabled, the list of proc macros exported by a crate is
+ /// known before we resolve names in the crate. This list is stored in `self.proc_macros` and is
+ /// derived from the dynamic library.
+ ///
+ /// However, we *also* would like to be able to at least *resolve* macros on our own, without
+ /// help by the build system. So, when the macro isn't found in `self.proc_macros`, we instead
+ /// use a dummy expander that always errors. This comes with the drawback of macros potentially
+ /// going out of sync with what the build system sees (since we resolve using VFS state, but
+ /// Cargo builds only on-disk files). We could and probably should add diagnostics for that.
+ fn export_proc_macro(
+ &mut self,
+ def: ProcMacroDef,
+ id: ItemTreeId<item_tree::Function>,
+ fn_id: FunctionId,
+ module_id: ModuleId,
+ ) {
+ let kind = def.kind.to_basedb_kind();
+ let (expander, kind) = match self.proc_macros.iter().find(|(n, _)| n == &def.name) {
+ Some(&(_, expander)) => (expander, kind),
+ None => (ProcMacroExpander::dummy(self.def_map.krate), kind),
+ };
+
+ let proc_macro_id =
+ ProcMacroLoc { container: module_id, id, expander, kind }.intern(self.db);
+ self.define_proc_macro(def.name.clone(), proc_macro_id);
+ if let ProcMacroKind::CustomDerive { helpers } = def.kind {
+ self.def_map
+ .exported_derives
+ .insert(macro_id_to_def_id(self.db, proc_macro_id.into()), helpers);
+ }
+ self.def_map.fn_proc_macro_mapping.insert(fn_id, proc_macro_id);
+ }
+
+ /// Define a macro with `macro_rules`.
+ ///
+ /// It will define the macro in legacy textual scope, and if it has `#[macro_export]`,
+ /// then it is also defined in the root module scope.
+ /// You can `use` or invoke it by `crate::macro_name` anywhere, before or after the definition.
+ ///
+ /// It is surprising that the macro will never be in the current module scope.
+ /// These code fails with "unresolved import/macro",
+ /// ```rust,compile_fail
+ /// mod m { macro_rules! foo { () => {} } }
+ /// use m::foo as bar;
+ /// ```
+ ///
+ /// ```rust,compile_fail
+ /// macro_rules! foo { () => {} }
+ /// self::foo!();
+ /// crate::foo!();
+ /// ```
+ ///
+ /// Well, this code compiles, because the plain path `foo` in `use` is searched
+ /// in the legacy textual scope only.
+ /// ```rust
+ /// macro_rules! foo { () => {} }
+ /// use foo as bar;
+ /// ```
+ fn define_macro_rules(
+ &mut self,
+ module_id: LocalModuleId,
+ name: Name,
+ macro_: MacroRulesId,
+ export: bool,
+ ) {
+ // Textual scoping
+ self.define_legacy_macro(module_id, name.clone(), macro_.into());
+
+ // Module scoping
+ // In Rust, `#[macro_export]` macros are unconditionally visible at the
+ // crate root, even if the parent modules is **not** visible.
+ if export {
+ let module_id = self.def_map.root;
+ self.def_map.modules[module_id].scope.declare(macro_.into());
+ self.update(
+ module_id,
+ &[(Some(name), PerNs::macros(macro_.into(), Visibility::Public))],
+ Visibility::Public,
+ ImportType::Named,
+ );
+ }
+ }
+
+ /// Define a legacy textual scoped macro in module
+ ///
+ /// We use a map `legacy_macros` to store all legacy textual scoped macros visible per module.
+ /// It will clone all macros from parent legacy scope, whose definition is prior to
+ /// the definition of current module.
+ /// And also, `macro_use` on a module will import all legacy macros visible inside to
+ /// current legacy scope, with possible shadowing.
+ fn define_legacy_macro(&mut self, module_id: LocalModuleId, name: Name, mac: MacroId) {
+ // Always shadowing
+ self.def_map.modules[module_id].scope.define_legacy_macro(name, mac);
+ }
+
+ /// Define a macro 2.0 macro
+ ///
+ /// The scoped of macro 2.0 macro is equal to normal function
+ fn define_macro_def(
+ &mut self,
+ module_id: LocalModuleId,
+ name: Name,
+ macro_: Macro2Id,
+ vis: &RawVisibility,
+ ) {
+ let vis =
+ self.def_map.resolve_visibility(self.db, module_id, vis).unwrap_or(Visibility::Public);
+ self.def_map.modules[module_id].scope.declare(macro_.into());
+ self.update(
+ module_id,
+ &[(Some(name), PerNs::macros(macro_.into(), Visibility::Public))],
+ vis,
+ ImportType::Named,
+ );
+ }
+
+ /// Define a proc macro
+ ///
+ /// A proc macro is similar to normal macro scope, but it would not visible in legacy textual scoped.
+ /// And unconditionally exported.
+ fn define_proc_macro(&mut self, name: Name, macro_: ProcMacroId) {
+ let module_id = self.def_map.root;
+ self.def_map.modules[module_id].scope.declare(macro_.into());
+ self.update(
+ module_id,
+ &[(Some(name), PerNs::macros(macro_.into(), Visibility::Public))],
+ Visibility::Public,
+ ImportType::Named,
+ );
+ }
+
+ /// Import macros from `#[macro_use] extern crate`.
+ fn import_macros_from_extern_crate(
+ &mut self,
+ current_module_id: LocalModuleId,
+ extern_crate: &item_tree::ExternCrate,
+ ) {
+ tracing::debug!(
+ "importing macros from extern crate: {:?} ({:?})",
+ extern_crate,
+ self.def_map.edition,
+ );
+
+ if let Some(m) = self.resolve_extern_crate(&extern_crate.name) {
+ if m == self.def_map.module_id(current_module_id) {
+ cov_mark::hit!(ignore_macro_use_extern_crate_self);
+ return;
+ }
+
+ cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use);
+ self.import_all_macros_exported(current_module_id, m.krate);
+ }
+ }
+
+ /// Import all exported macros from another crate
+ ///
+ /// Exported macros are just all macros in the root module scope.
+ /// Note that it contains not only all `#[macro_export]` macros, but also all aliases
+ /// created by `use` in the root module, ignoring the visibility of `use`.
+ fn import_all_macros_exported(&mut self, current_module_id: LocalModuleId, krate: CrateId) {
+ let def_map = self.db.crate_def_map(krate);
+ for (name, def) in def_map[def_map.root].scope.macros() {
+ // `#[macro_use]` brings macros into legacy scope. Yes, even non-`macro_rules!` macros.
+ self.define_legacy_macro(current_module_id, name.clone(), def);
+ }
+ }
+
+ /// Tries to resolve every currently unresolved import.
+ fn resolve_imports(&mut self) -> ReachedFixedPoint {
+ let mut res = ReachedFixedPoint::Yes;
+ let imports = mem::take(&mut self.unresolved_imports);
+
+ self.unresolved_imports = imports
+ .into_iter()
+ .filter_map(|mut directive| {
+ directive.status = self.resolve_import(directive.module_id, &directive.import);
+ match directive.status {
+ PartialResolvedImport::Indeterminate(_) => {
+ self.record_resolved_import(&directive);
+ self.indeterminate_imports.push(directive);
+ res = ReachedFixedPoint::No;
+ None
+ }
+ PartialResolvedImport::Resolved(_) => {
+ self.record_resolved_import(&directive);
+ res = ReachedFixedPoint::No;
+ None
+ }
+ PartialResolvedImport::Unresolved => Some(directive),
+ }
+ })
+ .collect();
+ res
+ }
+
+ fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport {
+ let _p = profile::span("resolve_import").detail(|| format!("{}", import.path));
+ tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.edition);
+ if import.is_extern_crate {
+ let name = import
+ .path
+ .as_ident()
+ .expect("extern crate should have been desugared to one-element path");
+
+ let res = self.resolve_extern_crate(name);
+
+ match res {
+ Some(res) => {
+ PartialResolvedImport::Resolved(PerNs::types(res.into(), Visibility::Public))
+ }
+ None => PartialResolvedImport::Unresolved,
+ }
+ } else {
+ let res = self.def_map.resolve_path_fp_with_macro(
+ self.db,
+ ResolveMode::Import,
+ module_id,
+ &import.path,
+ BuiltinShadowMode::Module,
+ );
+
+ let def = res.resolved_def;
+ if res.reached_fixedpoint == ReachedFixedPoint::No || def.is_none() {
+ return PartialResolvedImport::Unresolved;
+ }
+
+ if let Some(krate) = res.krate {
+ if krate != self.def_map.krate {
+ return PartialResolvedImport::Resolved(
+ def.filter_visibility(|v| matches!(v, Visibility::Public)),
+ );
+ }
+ }
+
+ // Check whether all namespace is resolved
+ if def.take_types().is_some()
+ && def.take_values().is_some()
+ && def.take_macros().is_some()
+ {
+ PartialResolvedImport::Resolved(def)
+ } else {
+ PartialResolvedImport::Indeterminate(def)
+ }
+ }
+ }
+
+ fn resolve_extern_crate(&self, name: &Name) -> Option<ModuleId> {
+ if *name == name!(self) {
+ cov_mark::hit!(extern_crate_self_as);
+ let root = match self.def_map.block {
+ Some(_) => {
+ let def_map = self.def_map.crate_root(self.db).def_map(self.db);
+ def_map.module_id(def_map.root())
+ }
+ None => self.def_map.module_id(self.def_map.root()),
+ };
+ Some(root)
+ } else {
+ self.deps.get(name).copied()
+ }
+ }
+
+ fn record_resolved_import(&mut self, directive: &ImportDirective) {
+ let _p = profile::span("record_resolved_import");
+
+ let module_id = directive.module_id;
+ let import = &directive.import;
+ let mut def = directive.status.namespaces();
+ let vis = self
+ .def_map
+ .resolve_visibility(self.db, module_id, &directive.import.visibility)
+ .unwrap_or(Visibility::Public);
+
+ match import.kind {
+ ImportKind::Plain | ImportKind::TypeOnly => {
+ let name = match &import.alias {
+ Some(ImportAlias::Alias(name)) => Some(name),
+ Some(ImportAlias::Underscore) => None,
+ None => match import.path.segments().last() {
+ Some(last_segment) => Some(last_segment),
+ None => {
+ cov_mark::hit!(bogus_paths);
+ return;
+ }
+ },
+ };
+
+ if import.kind == ImportKind::TypeOnly {
+ def.values = None;
+ def.macros = None;
+ }
+
+ tracing::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def);
+
+ // extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658
+ if import.is_extern_crate && module_id == self.def_map.root {
+ if let (Some(ModuleDefId::ModuleId(def)), Some(name)) = (def.take_types(), name)
+ {
+ self.def_map.extern_prelude.insert(name.clone(), def);
+ }
+ }
+
+ self.update(module_id, &[(name.cloned(), def)], vis, ImportType::Named);
+ }
+ ImportKind::Glob => {
+ tracing::debug!("glob import: {:?}", import);
+ match def.take_types() {
+ Some(ModuleDefId::ModuleId(m)) => {
+ if import.is_prelude {
+ // Note: This dodgily overrides the injected prelude. The rustc
+ // implementation seems to work the same though.
+ cov_mark::hit!(std_prelude);
+ self.def_map.prelude = Some(m);
+ } else if m.krate != self.def_map.krate {
+ cov_mark::hit!(glob_across_crates);
+ // glob import from other crate => we can just import everything once
+ let item_map = m.def_map(self.db);
+ let scope = &item_map[m.local_id].scope;
+
+ // Module scoped macros is included
+ let items = scope
+ .resolutions()
+ // only keep visible names...
+ .map(|(n, res)| {
+ (n, res.filter_visibility(|v| v.is_visible_from_other_crate()))
+ })
+ .filter(|(_, res)| !res.is_none())
+ .collect::<Vec<_>>();
+
+ self.update(module_id, &items, vis, ImportType::Glob);
+ } else {
+ // glob import from same crate => we do an initial
+ // import, and then need to propagate any further
+ // additions
+ let def_map;
+ let scope = if m.block == self.def_map.block_id() {
+ &self.def_map[m.local_id].scope
+ } else {
+ def_map = m.def_map(self.db);
+ &def_map[m.local_id].scope
+ };
+
+ // Module scoped macros is included
+ let items = scope
+ .resolutions()
+ // only keep visible names...
+ .map(|(n, res)| {
+ (
+ n,
+ res.filter_visibility(|v| {
+ v.is_visible_from_def_map(
+ self.db,
+ &self.def_map,
+ module_id,
+ )
+ }),
+ )
+ })
+ .filter(|(_, res)| !res.is_none())
+ .collect::<Vec<_>>();
+
+ self.update(module_id, &items, vis, ImportType::Glob);
+ // record the glob import in case we add further items
+ let glob = self.glob_imports.entry(m.local_id).or_default();
+ if !glob.iter().any(|(mid, _)| *mid == module_id) {
+ glob.push((module_id, vis));
+ }
+ }
+ }
+ Some(ModuleDefId::AdtId(AdtId::EnumId(e))) => {
+ cov_mark::hit!(glob_enum);
+ // glob import from enum => just import all the variants
+
+ // XXX: urgh, so this works by accident! Here, we look at
+ // the enum data, and, in theory, this might require us to
+ // look back at the crate_def_map, creating a cycle. For
+ // example, `enum E { crate::some_macro!(); }`. Luckily, the
+ // only kind of macro that is allowed inside enum is a
+ // `cfg_macro`, and we don't need to run name resolution for
+ // it, but this is sheer luck!
+ let enum_data = self.db.enum_data(e);
+ let resolutions = enum_data
+ .variants
+ .iter()
+ .map(|(local_id, variant_data)| {
+ let name = variant_data.name.clone();
+ let variant = EnumVariantId { parent: e, local_id };
+ let res = PerNs::both(variant.into(), variant.into(), vis);
+ (Some(name), res)
+ })
+ .collect::<Vec<_>>();
+ self.update(module_id, &resolutions, vis, ImportType::Glob);
+ }
+ Some(d) => {
+ tracing::debug!("glob import {:?} from non-module/enum {:?}", import, d);
+ }
+ None => {
+ tracing::debug!("glob import {:?} didn't resolve as type", import);
+ }
+ }
+ }
+ }
+ }
+
+ fn update(
+ &mut self,
+ module_id: LocalModuleId,
+ resolutions: &[(Option<Name>, PerNs)],
+ vis: Visibility,
+ import_type: ImportType,
+ ) {
+ self.db.unwind_if_cancelled();
+ self.update_recursive(module_id, resolutions, vis, import_type, 0)
+ }
+
+ fn update_recursive(
+ &mut self,
+ module_id: LocalModuleId,
+ resolutions: &[(Option<Name>, PerNs)],
+ // All resolutions are imported with this visibility; the visibilities in
+ // the `PerNs` values are ignored and overwritten
+ vis: Visibility,
+ import_type: ImportType,
+ depth: usize,
+ ) {
+ if GLOB_RECURSION_LIMIT.check(depth).is_err() {
+ // prevent stack overflows (but this shouldn't be possible)
+ panic!("infinite recursion in glob imports!");
+ }
+ let mut changed = false;
+
+ for (name, res) in resolutions {
+ match name {
+ Some(name) => {
+ let scope = &mut self.def_map.modules[module_id].scope;
+ changed |= scope.push_res_with_import(
+ &mut self.from_glob_import,
+ (module_id, name.clone()),
+ res.with_visibility(vis),
+ import_type,
+ );
+ }
+ None => {
+ let tr = match res.take_types() {
+ Some(ModuleDefId::TraitId(tr)) => tr,
+ Some(other) => {
+ tracing::debug!("non-trait `_` import of {:?}", other);
+ continue;
+ }
+ None => continue,
+ };
+ let old_vis = self.def_map.modules[module_id].scope.unnamed_trait_vis(tr);
+ let should_update = match old_vis {
+ None => true,
+ Some(old_vis) => {
+ let max_vis = old_vis.max(vis, &self.def_map).unwrap_or_else(|| {
+ panic!("`Tr as _` imports with unrelated visibilities {:?} and {:?} (trait {:?})", old_vis, vis, tr);
+ });
+
+ if max_vis == old_vis {
+ false
+ } else {
+ cov_mark::hit!(upgrade_underscore_visibility);
+ true
+ }
+ }
+ };
+
+ if should_update {
+ changed = true;
+ self.def_map.modules[module_id].scope.push_unnamed_trait(tr, vis);
+ }
+ }
+ }
+ }
+
+ if !changed {
+ return;
+ }
+ let glob_imports = self
+ .glob_imports
+ .get(&module_id)
+ .into_iter()
+ .flatten()
+ .filter(|(glob_importing_module, _)| {
+ // we know all resolutions have the same visibility (`vis`), so we
+ // just need to check that once
+ vis.is_visible_from_def_map(self.db, &self.def_map, *glob_importing_module)
+ })
+ .cloned()
+ .collect::<Vec<_>>();
+
+ for (glob_importing_module, glob_import_vis) in glob_imports {
+ self.update_recursive(
+ glob_importing_module,
+ resolutions,
+ glob_import_vis,
+ ImportType::Glob,
+ depth + 1,
+ );
+ }
+ }
+
+ fn resolve_macros(&mut self) -> ReachedFixedPoint {
+ let mut macros = mem::take(&mut self.unresolved_macros);
+ let mut resolved = Vec::new();
+ let mut push_resolved = |directive: &MacroDirective, call_id| {
+ resolved.push((directive.module_id, directive.depth, directive.container, call_id));
+ };
+ let mut res = ReachedFixedPoint::Yes;
+ macros.retain(|directive| {
+ let resolver = |path| {
+ let resolved_res = self.def_map.resolve_path_fp_with_macro(
+ self.db,
+ ResolveMode::Other,
+ directive.module_id,
+ &path,
+ BuiltinShadowMode::Module,
+ );
+ resolved_res
+ .resolved_def
+ .take_macros()
+ .map(|it| (it, macro_id_to_def_id(self.db, it)))
+ };
+ let resolver_def_id = |path| resolver(path).map(|(_, it)| it);
+
+ match &directive.kind {
+ MacroDirectiveKind::FnLike { ast_id, expand_to } => {
+ let call_id = macro_call_as_call_id(
+ self.db,
+ ast_id,
+ *expand_to,
+ self.def_map.krate,
+ &resolver_def_id,
+ &mut |_err| (),
+ );
+ if let Ok(Ok(call_id)) = call_id {
+ push_resolved(directive, call_id);
+ res = ReachedFixedPoint::No;
+ return false;
+ }
+ }
+ MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
+ let id = derive_macro_as_call_id(
+ self.db,
+ ast_id,
+ *derive_attr,
+ *derive_pos as u32,
+ self.def_map.krate,
+ &resolver,
+ );
+
+ if let Ok((macro_id, def_id, call_id)) = id {
+ self.def_map.modules[directive.module_id].scope.set_derive_macro_invoc(
+ ast_id.ast_id,
+ call_id,
+ *derive_attr,
+ *derive_pos,
+ );
+ // Record its helper attributes.
+ if def_id.krate != self.def_map.krate {
+ let def_map = self.db.crate_def_map(def_id.krate);
+ if let Some(helpers) = def_map.exported_derives.get(&def_id) {
+ self.def_map
+ .derive_helpers_in_scope
+ .entry(ast_id.ast_id.map(|it| it.upcast()))
+ .or_default()
+ .extend(izip!(
+ helpers.iter().cloned(),
+ iter::repeat(macro_id),
+ iter::repeat(call_id),
+ ));
+ }
+ }
+
+ push_resolved(directive, call_id);
+ res = ReachedFixedPoint::No;
+ return false;
+ }
+ }
+ MacroDirectiveKind::Attr { ast_id: file_ast_id, mod_item, attr, tree } => {
+ let &AstIdWithPath { ast_id, ref path } = file_ast_id;
+ let file_id = ast_id.file_id;
+
+ let mut recollect_without = |collector: &mut Self| {
+ // Remove the original directive since we resolved it.
+ let mod_dir = collector.mod_dirs[&directive.module_id].clone();
+ collector.skip_attrs.insert(InFile::new(file_id, *mod_item), attr.id);
+
+ let item_tree = tree.item_tree(self.db);
+ ModCollector {
+ def_collector: collector,
+ macro_depth: directive.depth,
+ module_id: directive.module_id,
+ tree_id: *tree,
+ item_tree: &item_tree,
+ mod_dir,
+ }
+ .collect(&[*mod_item], directive.container);
+ res = ReachedFixedPoint::No;
+ false
+ };
+
+ if let Some(ident) = path.as_ident() {
+ if let Some(helpers) = self.def_map.derive_helpers_in_scope.get(&ast_id) {
+ if helpers.iter().any(|(it, ..)| it == ident) {
+ cov_mark::hit!(resolved_derive_helper);
+ // Resolved to derive helper. Collect the item's attributes again,
+ // starting after the derive helper.
+ return recollect_without(self);
+ }
+ }
+ }
+
+ let def = match resolver_def_id(path.clone()) {
+ Some(def) if def.is_attribute() => def,
+ _ => return true,
+ };
+ if matches!(
+ def,
+ MacroDefId { kind:MacroDefKind::BuiltInAttr(expander, _),.. }
+ if expander.is_derive()
+ ) {
+ // Resolved to `#[derive]`
+
+ let item_tree = tree.item_tree(self.db);
+ let ast_adt_id: FileAstId<ast::Adt> = match *mod_item {
+ ModItem::Struct(strukt) => item_tree[strukt].ast_id().upcast(),
+ ModItem::Union(union) => item_tree[union].ast_id().upcast(),
+ ModItem::Enum(enum_) => item_tree[enum_].ast_id().upcast(),
+ _ => {
+ let diag = DefDiagnostic::invalid_derive_target(
+ directive.module_id,
+ ast_id,
+ attr.id,
+ );
+ self.def_map.diagnostics.push(diag);
+ return recollect_without(self);
+ }
+ };
+ let ast_id = ast_id.with_value(ast_adt_id);
+
+ match attr.parse_path_comma_token_tree() {
+ Some(derive_macros) => {
+ let mut len = 0;
+ for (idx, path) in derive_macros.enumerate() {
+ let ast_id = AstIdWithPath::new(file_id, ast_id.value, path);
+ self.unresolved_macros.push(MacroDirective {
+ module_id: directive.module_id,
+ depth: directive.depth + 1,
+ kind: MacroDirectiveKind::Derive {
+ ast_id,
+ derive_attr: attr.id,
+ derive_pos: idx,
+ },
+ container: directive.container,
+ });
+ len = idx;
+ }
+
+ // We treat the #[derive] macro as an attribute call, but we do not resolve it for nameres collection.
+ // This is just a trick to be able to resolve the input to derives as proper paths.
+ // Check the comment in [`builtin_attr_macro`].
+ let call_id = attr_macro_as_call_id(
+ self.db,
+ file_ast_id,
+ attr,
+ self.def_map.krate,
+ def,
+ true,
+ );
+ self.def_map.modules[directive.module_id]
+ .scope
+ .init_derive_attribute(ast_id, attr.id, call_id, len + 1);
+ }
+ None => {
+ let diag = DefDiagnostic::malformed_derive(
+ directive.module_id,
+ ast_id,
+ attr.id,
+ );
+ self.def_map.diagnostics.push(diag);
+ }
+ }
+
+ return recollect_without(self);
+ }
+
+ // Not resolved to a derive helper or the derive attribute, so try to treat as a normal attribute.
+ let call_id = attr_macro_as_call_id(
+ self.db,
+ file_ast_id,
+ attr,
+ self.def_map.krate,
+ def,
+ false,
+ );
+ let loc: MacroCallLoc = self.db.lookup_intern_macro_call(call_id);
+
+ // If proc attribute macro expansion is disabled, skip expanding it here
+ if !self.db.enable_proc_attr_macros() {
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
+ directive.module_id,
+ loc.kind,
+ loc.def.krate,
+ ));
+ return recollect_without(self);
+ }
+
+ // Skip #[test]/#[bench] expansion, which would merely result in more memory usage
+ // due to duplicating functions into macro expansions
+ if matches!(
+ loc.def.kind,
+ MacroDefKind::BuiltInAttr(expander, _)
+ if expander.is_test() || expander.is_bench()
+ ) {
+ return recollect_without(self);
+ }
+
+ if let MacroDefKind::ProcMacro(exp, ..) = loc.def.kind {
+ if exp.is_dummy() {
+ // If there's no expander for the proc macro (e.g.
+ // because proc macros are disabled, or building the
+ // proc macro crate failed), report this and skip
+ // expansion like we would if it was disabled
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
+ directive.module_id,
+ loc.kind,
+ loc.def.krate,
+ ));
+
+ return recollect_without(self);
+ }
+ }
+
+ self.def_map.modules[directive.module_id]
+ .scope
+ .add_attr_macro_invoc(ast_id, call_id);
+
+ push_resolved(directive, call_id);
+ res = ReachedFixedPoint::No;
+ return false;
+ }
+ }
+
+ true
+ });
+ // Attribute resolution can add unresolved macro invocations, so concatenate the lists.
+ macros.extend(mem::take(&mut self.unresolved_macros));
+ self.unresolved_macros = macros;
+
+ for (module_id, depth, container, macro_call_id) in resolved {
+ self.collect_macro_expansion(module_id, macro_call_id, depth, container);
+ }
+
+ res
+ }
+
+ fn collect_macro_expansion(
+ &mut self,
+ module_id: LocalModuleId,
+ macro_call_id: MacroCallId,
+ depth: usize,
+ container: ItemContainerId,
+ ) {
+ if EXPANSION_DEPTH_LIMIT.check(depth).is_err() {
+ cov_mark::hit!(macro_expansion_overflow);
+ tracing::warn!("macro expansion is too deep");
+ return;
+ }
+ let file_id = macro_call_id.as_file();
+
+ // First, fetch the raw expansion result for purposes of error reporting. This goes through
+ // `macro_expand_error` to avoid depending on the full expansion result (to improve
+ // incrementality).
+ let loc: MacroCallLoc = self.db.lookup_intern_macro_call(macro_call_id);
+ let err = self.db.macro_expand_error(macro_call_id);
+ if let Some(err) = err {
+ let diag = match err {
+ hir_expand::ExpandError::UnresolvedProcMacro(krate) => {
+ always!(krate == loc.def.krate);
+ // Missing proc macros are non-fatal, so they are handled specially.
+ DefDiagnostic::unresolved_proc_macro(module_id, loc.kind.clone(), loc.def.krate)
+ }
+ _ => DefDiagnostic::macro_error(module_id, loc.kind.clone(), err.to_string()),
+ };
+
+ self.def_map.diagnostics.push(diag);
+ }
+
+ // Then, fetch and process the item tree. This will reuse the expansion result from above.
+ let item_tree = self.db.file_item_tree(file_id);
+ let mod_dir = self.mod_dirs[&module_id].clone();
+ ModCollector {
+ def_collector: &mut *self,
+ macro_depth: depth,
+ tree_id: TreeId::new(file_id, None),
+ module_id,
+ item_tree: &item_tree,
+ mod_dir,
+ }
+ .collect(item_tree.top_level_items(), container);
+ }
+
+ fn finish(mut self) -> DefMap {
+ // Emit diagnostics for all remaining unexpanded macros.
+
+ let _p = profile::span("DefCollector::finish");
+
+ for directive in &self.unresolved_macros {
+ match &directive.kind {
+ MacroDirectiveKind::FnLike { ast_id, expand_to } => {
+ let macro_call_as_call_id = macro_call_as_call_id(
+ self.db,
+ ast_id,
+ *expand_to,
+ self.def_map.krate,
+ |path| {
+ let resolved_res = self.def_map.resolve_path_fp_with_macro(
+ self.db,
+ ResolveMode::Other,
+ directive.module_id,
+ &path,
+ BuiltinShadowMode::Module,
+ );
+ resolved_res
+ .resolved_def
+ .take_macros()
+ .map(|it| macro_id_to_def_id(self.db, it))
+ },
+ &mut |_| (),
+ );
+ if let Err(UnresolvedMacro { path }) = macro_call_as_call_id {
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
+ directive.module_id,
+ MacroCallKind::FnLike { ast_id: ast_id.ast_id, expand_to: *expand_to },
+ path,
+ ));
+ }
+ }
+ MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
+ directive.module_id,
+ MacroCallKind::Derive {
+ ast_id: ast_id.ast_id,
+ derive_attr_index: derive_attr.ast_index,
+ derive_index: *derive_pos as u32,
+ },
+ ast_id.path.clone(),
+ ));
+ }
+ // These are diagnosed by `reseed_with_unresolved_attribute`, as that function consumes them
+ MacroDirectiveKind::Attr { .. } => {}
+ }
+ }
+
+ // Emit diagnostics for all remaining unresolved imports.
+
+ // We'd like to avoid emitting a diagnostics avalanche when some `extern crate` doesn't
+ // resolve. We first emit diagnostics for unresolved extern crates and collect the missing
+ // crate names. Then we emit diagnostics for unresolved imports, but only if the import
+ // doesn't start with an unresolved crate's name. Due to renaming and reexports, this is a
+ // heuristic, but it works in practice.
+ let mut diagnosed_extern_crates = FxHashSet::default();
+ for directive in &self.unresolved_imports {
+ if let ImportSource::ExternCrate(krate) = directive.import.source {
+ let item_tree = krate.item_tree(self.db);
+ let extern_crate = &item_tree[krate.value];
+
+ diagnosed_extern_crates.insert(extern_crate.name.clone());
+
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_extern_crate(
+ directive.module_id,
+ InFile::new(krate.file_id(), extern_crate.ast_id),
+ ));
+ }
+ }
+
+ for directive in &self.unresolved_imports {
+ if let ImportSource::Import { id: import, use_tree } = directive.import.source {
+ if matches!(
+ (directive.import.path.segments().first(), &directive.import.path.kind),
+ (Some(krate), PathKind::Plain | PathKind::Abs) if diagnosed_extern_crates.contains(krate)
+ ) {
+ continue;
+ }
+
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_import(
+ directive.module_id,
+ import,
+ use_tree,
+ ));
+ }
+ }
+
+ self.def_map
+ }
+}
+
+/// Walks a single module, populating defs, imports and macros
+struct ModCollector<'a, 'b> {
+ def_collector: &'a mut DefCollector<'b>,
+ macro_depth: usize,
+ module_id: LocalModuleId,
+ tree_id: TreeId,
+ item_tree: &'a ItemTree,
+ mod_dir: ModDir,
+}
+
+impl ModCollector<'_, '_> {
+ fn collect_in_top_module(&mut self, items: &[ModItem]) {
+ let module = self.def_collector.def_map.module_id(self.module_id);
+ self.collect(items, module.into())
+ }
+
+ fn collect(&mut self, items: &[ModItem], container: ItemContainerId) {
+ let krate = self.def_collector.def_map.krate;
+
+ // Note: don't assert that inserted value is fresh: it's simply not true
+ // for macros.
+ self.def_collector.mod_dirs.insert(self.module_id, self.mod_dir.clone());
+
+ // Prelude module is always considered to be `#[macro_use]`.
+ if let Some(prelude_module) = self.def_collector.def_map.prelude {
+ if prelude_module.krate != krate {
+ cov_mark::hit!(prelude_is_macro_use);
+ self.def_collector.import_all_macros_exported(self.module_id, prelude_module.krate);
+ }
+ }
+
+ // This should be processed eagerly instead of deferred to resolving.
+ // `#[macro_use] extern crate` is hoisted to imports macros before collecting
+ // any other items.
+ for &item in items {
+ let attrs = self.item_tree.attrs(self.def_collector.db, krate, item.into());
+ if attrs.cfg().map_or(true, |cfg| self.is_cfg_enabled(&cfg)) {
+ if let ModItem::ExternCrate(id) = item {
+ let import = &self.item_tree[id];
+ let attrs = self.item_tree.attrs(
+ self.def_collector.db,
+ krate,
+ ModItem::from(id).into(),
+ );
+ if attrs.by_key("macro_use").exists() {
+ self.def_collector.import_macros_from_extern_crate(self.module_id, import);
+ }
+ }
+ }
+ }
+
+ for &item in items {
+ let attrs = self.item_tree.attrs(self.def_collector.db, krate, item.into());
+ if let Some(cfg) = attrs.cfg() {
+ if !self.is_cfg_enabled(&cfg) {
+ self.emit_unconfigured_diagnostic(item, &cfg);
+ continue;
+ }
+ }
+
+ if let Err(()) = self.resolve_attributes(&attrs, item, container) {
+ // Do not process the item. It has at least one non-builtin attribute, so the
+ // fixed-point algorithm is required to resolve the rest of them.
+ continue;
+ }
+
+ let db = self.def_collector.db;
+ let module = self.def_collector.def_map.module_id(self.module_id);
+ let def_map = &mut self.def_collector.def_map;
+ let update_def =
+ |def_collector: &mut DefCollector<'_>, id, name: &Name, vis, has_constructor| {
+ def_collector.def_map.modules[self.module_id].scope.declare(id);
+ def_collector.update(
+ self.module_id,
+ &[(Some(name.clone()), PerNs::from_def(id, vis, has_constructor))],
+ vis,
+ ImportType::Named,
+ )
+ };
+ let resolve_vis = |def_map: &DefMap, visibility| {
+ def_map
+ .resolve_visibility(db, self.module_id, visibility)
+ .unwrap_or(Visibility::Public)
+ };
+
+ match item {
+ ModItem::Mod(m) => self.collect_module(m, &attrs),
+ ModItem::Import(import_id) => {
+ let imports = Import::from_use(
+ db,
+ krate,
+ self.item_tree,
+ ItemTreeId::new(self.tree_id, import_id),
+ );
+ self.def_collector.unresolved_imports.extend(imports.into_iter().map(
+ |import| ImportDirective {
+ module_id: self.module_id,
+ import,
+ status: PartialResolvedImport::Unresolved,
+ },
+ ));
+ }
+ ModItem::ExternCrate(import_id) => {
+ self.def_collector.unresolved_imports.push(ImportDirective {
+ module_id: self.module_id,
+ import: Import::from_extern_crate(
+ db,
+ krate,
+ self.item_tree,
+ ItemTreeId::new(self.tree_id, import_id),
+ ),
+ status: PartialResolvedImport::Unresolved,
+ })
+ }
+ ModItem::ExternBlock(block) => self.collect(
+ &self.item_tree[block].children,
+ ItemContainerId::ExternBlockId(
+ ExternBlockLoc {
+ container: module,
+ id: ItemTreeId::new(self.tree_id, block),
+ }
+ .intern(db),
+ ),
+ ),
+ ModItem::MacroCall(mac) => self.collect_macro_call(&self.item_tree[mac], container),
+ ModItem::MacroRules(id) => self.collect_macro_rules(id, module),
+ ModItem::MacroDef(id) => self.collect_macro_def(id, module),
+ ModItem::Impl(imp) => {
+ let impl_id =
+ ImplLoc { container: module, id: ItemTreeId::new(self.tree_id, imp) }
+ .intern(db);
+ self.def_collector.def_map.modules[self.module_id].scope.define_impl(impl_id)
+ }
+ ModItem::Function(id) => {
+ let it = &self.item_tree[id];
+ let fn_id =
+ FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db);
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ if self.def_collector.is_proc_macro {
+ if self.module_id == def_map.root {
+ if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) {
+ let crate_root = def_map.module_id(def_map.root);
+ self.def_collector.export_proc_macro(
+ proc_macro,
+ ItemTreeId::new(self.tree_id, id),
+ fn_id,
+ crate_root,
+ );
+ }
+ }
+ }
+
+ update_def(self.def_collector, fn_id.into(), &it.name, vis, false);
+ }
+ ModItem::Struct(id) => {
+ let it = &self.item_tree[id];
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(
+ self.def_collector,
+ StructLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
+ .intern(db)
+ .into(),
+ &it.name,
+ vis,
+ !matches!(it.fields, Fields::Record(_)),
+ );
+ }
+ ModItem::Union(id) => {
+ let it = &self.item_tree[id];
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(
+ self.def_collector,
+ UnionLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
+ .intern(db)
+ .into(),
+ &it.name,
+ vis,
+ false,
+ );
+ }
+ ModItem::Enum(id) => {
+ let it = &self.item_tree[id];
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(
+ self.def_collector,
+ EnumLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
+ .intern(db)
+ .into(),
+ &it.name,
+ vis,
+ false,
+ );
+ }
+ ModItem::Const(id) => {
+ let it = &self.item_tree[id];
+ let const_id =
+ ConstLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db);
+
+ match &it.name {
+ Some(name) => {
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(self.def_collector, const_id.into(), name, vis, false);
+ }
+ None => {
+ // const _: T = ...;
+ self.def_collector.def_map.modules[self.module_id]
+ .scope
+ .define_unnamed_const(const_id);
+ }
+ }
+ }
+ ModItem::Static(id) => {
+ let it = &self.item_tree[id];
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(
+ self.def_collector,
+ StaticLoc { container, id: ItemTreeId::new(self.tree_id, id) }
+ .intern(db)
+ .into(),
+ &it.name,
+ vis,
+ false,
+ );
+ }
+ ModItem::Trait(id) => {
+ let it = &self.item_tree[id];
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(
+ self.def_collector,
+ TraitLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
+ .intern(db)
+ .into(),
+ &it.name,
+ vis,
+ false,
+ );
+ }
+ ModItem::TypeAlias(id) => {
+ let it = &self.item_tree[id];
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(
+ self.def_collector,
+ TypeAliasLoc { container, id: ItemTreeId::new(self.tree_id, id) }
+ .intern(db)
+ .into(),
+ &it.name,
+ vis,
+ false,
+ );
+ }
+ }
+ }
+ }
+
+ fn collect_module(&mut self, module_id: FileItemTreeId<Mod>, attrs: &Attrs) {
+ let path_attr = attrs.by_key("path").string_value();
+ let is_macro_use = attrs.by_key("macro_use").exists();
+ let module = &self.item_tree[module_id];
+ match &module.kind {
+ // inline module, just recurse
+ ModKind::Inline { items } => {
+ let module_id = self.push_child_module(
+ module.name.clone(),
+ AstId::new(self.file_id(), module.ast_id),
+ None,
+ &self.item_tree[module.visibility],
+ module_id,
+ );
+
+ if let Some(mod_dir) = self.mod_dir.descend_into_definition(&module.name, path_attr)
+ {
+ ModCollector {
+ def_collector: &mut *self.def_collector,
+ macro_depth: self.macro_depth,
+ module_id,
+ tree_id: self.tree_id,
+ item_tree: self.item_tree,
+ mod_dir,
+ }
+ .collect_in_top_module(&*items);
+ if is_macro_use {
+ self.import_all_legacy_macros(module_id);
+ }
+ }
+ }
+ // out of line module, resolve, parse and recurse
+ ModKind::Outline => {
+ let ast_id = AstId::new(self.tree_id.file_id(), module.ast_id);
+ let db = self.def_collector.db;
+ match self.mod_dir.resolve_declaration(db, self.file_id(), &module.name, path_attr)
+ {
+ Ok((file_id, is_mod_rs, mod_dir)) => {
+ let item_tree = db.file_item_tree(file_id.into());
+ let krate = self.def_collector.def_map.krate;
+ let is_enabled = item_tree
+ .top_level_attrs(db, krate)
+ .cfg()
+ .map_or(true, |cfg| self.is_cfg_enabled(&cfg));
+ if is_enabled {
+ let module_id = self.push_child_module(
+ module.name.clone(),
+ ast_id,
+ Some((file_id, is_mod_rs)),
+ &self.item_tree[module.visibility],
+ module_id,
+ );
+ ModCollector {
+ def_collector: self.def_collector,
+ macro_depth: self.macro_depth,
+ module_id,
+ tree_id: TreeId::new(file_id.into(), None),
+ item_tree: &item_tree,
+ mod_dir,
+ }
+ .collect_in_top_module(item_tree.top_level_items());
+ let is_macro_use = is_macro_use
+ || item_tree
+ .top_level_attrs(db, krate)
+ .by_key("macro_use")
+ .exists();
+ if is_macro_use {
+ self.import_all_legacy_macros(module_id);
+ }
+ }
+ }
+ Err(candidates) => {
+ self.push_child_module(
+ module.name.clone(),
+ ast_id,
+ None,
+ &self.item_tree[module.visibility],
+ module_id,
+ );
+ self.def_collector.def_map.diagnostics.push(
+ DefDiagnostic::unresolved_module(self.module_id, ast_id, candidates),
+ );
+ }
+ };
+ }
+ }
+ }
+
+ fn push_child_module(
+ &mut self,
+ name: Name,
+ declaration: AstId<ast::Module>,
+ definition: Option<(FileId, bool)>,
+ visibility: &crate::visibility::RawVisibility,
+ mod_tree_id: FileItemTreeId<Mod>,
+ ) -> LocalModuleId {
+ let def_map = &mut self.def_collector.def_map;
+ let vis = def_map
+ .resolve_visibility(self.def_collector.db, self.module_id, visibility)
+ .unwrap_or(Visibility::Public);
+ let modules = &mut def_map.modules;
+ let origin = match definition {
+ None => ModuleOrigin::Inline {
+ definition: declaration,
+ definition_tree_id: ItemTreeId::new(self.tree_id, mod_tree_id),
+ },
+ Some((definition, is_mod_rs)) => ModuleOrigin::File {
+ declaration,
+ definition,
+ is_mod_rs,
+ declaration_tree_id: ItemTreeId::new(self.tree_id, mod_tree_id),
+ },
+ };
+
+ let res = modules.alloc(ModuleData::new(origin, vis));
+ modules[res].parent = Some(self.module_id);
+ for (name, mac) in modules[self.module_id].scope.collect_legacy_macros() {
+ for &mac in &mac {
+ modules[res].scope.define_legacy_macro(name.clone(), mac);
+ }
+ }
+ modules[self.module_id].children.insert(name.clone(), res);
+
+ let module = def_map.module_id(res);
+ let def = ModuleDefId::from(module);
+
+ def_map.modules[self.module_id].scope.declare(def);
+ self.def_collector.update(
+ self.module_id,
+ &[(Some(name), PerNs::from_def(def, vis, false))],
+ vis,
+ ImportType::Named,
+ );
+ res
+ }
+
+ /// Resolves attributes on an item.
+ ///
+ /// Returns `Err` when some attributes could not be resolved to builtins and have been
+ /// registered as unresolved.
+ ///
+ /// If `ignore_up_to` is `Some`, attributes preceding and including that attribute will be
+ /// assumed to be resolved already.
+ fn resolve_attributes(
+ &mut self,
+ attrs: &Attrs,
+ mod_item: ModItem,
+ container: ItemContainerId,
+ ) -> Result<(), ()> {
+ let mut ignore_up_to =
+ self.def_collector.skip_attrs.get(&InFile::new(self.file_id(), mod_item)).copied();
+ let iter = attrs
+ .iter()
+ .dedup_by(|a, b| {
+ // FIXME: this should not be required, all attributes on an item should have a
+ // unique ID!
+ // Still, this occurs because `#[cfg_attr]` can "expand" to multiple attributes:
+ // #[cfg_attr(not(off), unresolved, unresolved)]
+ // struct S;
+ // We should come up with a different way to ID attributes.
+ a.id == b.id
+ })
+ .skip_while(|attr| match ignore_up_to {
+ Some(id) if attr.id == id => {
+ ignore_up_to = None;
+ true
+ }
+ Some(_) => true,
+ None => false,
+ });
+
+ for attr in iter {
+ if self.def_collector.def_map.is_builtin_or_registered_attr(&attr.path) {
+ continue;
+ }
+ tracing::debug!("non-builtin attribute {}", attr.path);
+
+ let ast_id = AstIdWithPath::new(
+ self.file_id(),
+ mod_item.ast_id(self.item_tree),
+ attr.path.as_ref().clone(),
+ );
+ self.def_collector.unresolved_macros.push(MacroDirective {
+ module_id: self.module_id,
+ depth: self.macro_depth + 1,
+ kind: MacroDirectiveKind::Attr {
+ ast_id,
+ attr: attr.clone(),
+ mod_item,
+ tree: self.tree_id,
+ },
+ container,
+ });
+
+ return Err(());
+ }
+
+ Ok(())
+ }
+
+ fn collect_macro_rules(&mut self, id: FileItemTreeId<MacroRules>, module: ModuleId) {
+ let krate = self.def_collector.def_map.krate;
+ let mac = &self.item_tree[id];
+ let attrs = self.item_tree.attrs(self.def_collector.db, krate, ModItem::from(id).into());
+ let ast_id = InFile::new(self.file_id(), mac.ast_id.upcast());
+
+ let export_attr = attrs.by_key("macro_export");
+
+ let is_export = export_attr.exists();
+ let local_inner = if is_export {
+ export_attr.tt_values().flat_map(|it| &it.token_trees).any(|it| match it {
+ tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+ ident.text.contains("local_inner_macros")
+ }
+ _ => false,
+ })
+ } else {
+ false
+ };
+
+ // Case 1: builtin macros
+ let expander = if attrs.by_key("rustc_builtin_macro").exists() {
+ // `#[rustc_builtin_macro = "builtin_name"]` overrides the `macro_rules!` name.
+ let name;
+ let name = match attrs.by_key("rustc_builtin_macro").string_value() {
+ Some(it) => {
+ // FIXME: a hacky way to create a Name from string.
+ name = tt::Ident { text: it.clone(), id: tt::TokenId::unspecified() }.as_name();
+ &name
+ }
+ None => {
+ let explicit_name =
+ attrs.by_key("rustc_builtin_macro").tt_values().next().and_then(|tt| {
+ match tt.token_trees.first() {
+ Some(tt::TokenTree::Leaf(tt::Leaf::Ident(name))) => Some(name),
+ _ => None,
+ }
+ });
+ match explicit_name {
+ Some(ident) => {
+ name = ident.as_name();
+ &name
+ }
+ None => &mac.name,
+ }
+ }
+ };
+ match find_builtin_macro(name) {
+ Some(Either::Left(it)) => MacroExpander::BuiltIn(it),
+ Some(Either::Right(it)) => MacroExpander::BuiltInEager(it),
+ None => {
+ self.def_collector
+ .def_map
+ .diagnostics
+ .push(DefDiagnostic::unimplemented_builtin_macro(self.module_id, ast_id));
+ return;
+ }
+ }
+ } else {
+ // Case 2: normal `macro_rules!` macro
+ MacroExpander::Declarative
+ };
+
+ let macro_id = MacroRulesLoc {
+ container: module,
+ id: ItemTreeId::new(self.tree_id, id),
+ local_inner,
+ expander,
+ }
+ .intern(self.def_collector.db);
+ self.def_collector.define_macro_rules(
+ self.module_id,
+ mac.name.clone(),
+ macro_id,
+ is_export,
+ );
+ }
+
+ fn collect_macro_def(&mut self, id: FileItemTreeId<MacroDef>, module: ModuleId) {
+ let krate = self.def_collector.def_map.krate;
+ let mac = &self.item_tree[id];
+ let ast_id = InFile::new(self.file_id(), mac.ast_id.upcast());
+
+ // Case 1: builtin macros
+ let attrs = self.item_tree.attrs(self.def_collector.db, krate, ModItem::from(id).into());
+ let expander = if attrs.by_key("rustc_builtin_macro").exists() {
+ if let Some(expander) = find_builtin_macro(&mac.name) {
+ match expander {
+ Either::Left(it) => MacroExpander::BuiltIn(it),
+ Either::Right(it) => MacroExpander::BuiltInEager(it),
+ }
+ } else if let Some(expander) = find_builtin_derive(&mac.name) {
+ MacroExpander::BuiltInDerive(expander)
+ } else if let Some(expander) = find_builtin_attr(&mac.name) {
+ MacroExpander::BuiltInAttr(expander)
+ } else {
+ self.def_collector
+ .def_map
+ .diagnostics
+ .push(DefDiagnostic::unimplemented_builtin_macro(self.module_id, ast_id));
+ return;
+ }
+ } else {
+ // Case 2: normal `macro`
+ MacroExpander::Declarative
+ };
+
+ let macro_id =
+ Macro2Loc { container: module, id: ItemTreeId::new(self.tree_id, id), expander }
+ .intern(self.def_collector.db);
+ self.def_collector.define_macro_def(
+ self.module_id,
+ mac.name.clone(),
+ macro_id,
+ &self.item_tree[mac.visibility],
+ );
+ }
+
+ fn collect_macro_call(&mut self, mac: &MacroCall, container: ItemContainerId) {
+ let ast_id = AstIdWithPath::new(self.file_id(), mac.ast_id, ModPath::clone(&mac.path));
+
+ // Case 1: try to resolve in legacy scope and expand macro_rules
+ let mut error = None;
+ match macro_call_as_call_id(
+ self.def_collector.db,
+ &ast_id,
+ mac.expand_to,
+ self.def_collector.def_map.krate,
+ |path| {
+ path.as_ident().and_then(|name| {
+ self.def_collector.def_map.with_ancestor_maps(
+ self.def_collector.db,
+ self.module_id,
+ &mut |map, module| {
+ map[module]
+ .scope
+ .get_legacy_macro(name)
+ .and_then(|it| it.last())
+ .map(|&it| macro_id_to_def_id(self.def_collector.db, it.into()))
+ },
+ )
+ })
+ },
+ &mut |err| {
+ error.get_or_insert(err);
+ },
+ ) {
+ Ok(Ok(macro_call_id)) => {
+ // Legacy macros need to be expanded immediately, so that any macros they produce
+ // are in scope.
+ self.def_collector.collect_macro_expansion(
+ self.module_id,
+ macro_call_id,
+ self.macro_depth + 1,
+ container,
+ );
+
+ if let Some(err) = error {
+ self.def_collector.def_map.diagnostics.push(DefDiagnostic::macro_error(
+ self.module_id,
+ MacroCallKind::FnLike { ast_id: ast_id.ast_id, expand_to: mac.expand_to },
+ err.to_string(),
+ ));
+ }
+
+ return;
+ }
+ Ok(Err(_)) => {
+ // Built-in macro failed eager expansion.
+
+ self.def_collector.def_map.diagnostics.push(DefDiagnostic::macro_error(
+ self.module_id,
+ MacroCallKind::FnLike { ast_id: ast_id.ast_id, expand_to: mac.expand_to },
+ error.unwrap().to_string(),
+ ));
+ return;
+ }
+ Err(UnresolvedMacro { .. }) => (),
+ }
+
+ // Case 2: resolve in module scope, expand during name resolution.
+ self.def_collector.unresolved_macros.push(MacroDirective {
+ module_id: self.module_id,
+ depth: self.macro_depth + 1,
+ kind: MacroDirectiveKind::FnLike { ast_id, expand_to: mac.expand_to },
+ container,
+ });
+ }
+
+ fn import_all_legacy_macros(&mut self, module_id: LocalModuleId) {
+ let macros = self.def_collector.def_map[module_id].scope.collect_legacy_macros();
+ for (name, macs) in macros {
+ macs.last().map(|&mac| {
+ self.def_collector.define_legacy_macro(self.module_id, name.clone(), mac)
+ });
+ }
+ }
+
+ fn is_cfg_enabled(&self, cfg: &CfgExpr) -> bool {
+ self.def_collector.cfg_options.check(cfg) != Some(false)
+ }
+
+ fn emit_unconfigured_diagnostic(&mut self, item: ModItem, cfg: &CfgExpr) {
+ let ast_id = item.ast_id(self.item_tree);
+
+ let ast_id = InFile::new(self.file_id(), ast_id);
+ self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code(
+ self.module_id,
+ ast_id,
+ cfg.clone(),
+ self.def_collector.cfg_options.clone(),
+ ));
+ }
+
+ fn file_id(&self) -> HirFileId {
+ self.tree_id.file_id()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{db::DefDatabase, test_db::TestDB};
+ use base_db::{fixture::WithFixture, SourceDatabase};
+
+ use super::*;
+
+ fn do_collect_defs(db: &dyn DefDatabase, def_map: DefMap) -> DefMap {
+ let mut collector = DefCollector {
+ db,
+ def_map,
+ deps: FxHashMap::default(),
+ glob_imports: FxHashMap::default(),
+ unresolved_imports: Vec::new(),
+ indeterminate_imports: Vec::new(),
+ unresolved_macros: Vec::new(),
+ mod_dirs: FxHashMap::default(),
+ cfg_options: &CfgOptions::default(),
+ proc_macros: Default::default(),
+ from_glob_import: Default::default(),
+ skip_attrs: Default::default(),
+ is_proc_macro: false,
+ };
+ collector.seed_with_top_level();
+ collector.collect();
+ collector.def_map
+ }
+
+ fn do_resolve(not_ra_fixture: &str) -> DefMap {
+ let (db, file_id) = TestDB::with_single_file(not_ra_fixture);
+ let krate = db.test_crate();
+
+ let edition = db.crate_graph()[krate].edition;
+ let module_origin = ModuleOrigin::CrateRoot { definition: file_id };
+ let def_map =
+ DefMap::empty(krate, edition, ModuleData::new(module_origin, Visibility::Public));
+ do_collect_defs(&db, def_map)
+ }
+
+ #[test]
+ fn test_macro_expand_will_stop_1() {
+ do_resolve(
+ r#"
+macro_rules! foo {
+ ($($ty:ty)*) => { foo!($($ty)*); }
+}
+foo!(KABOOM);
+"#,
+ );
+ do_resolve(
+ r#"
+macro_rules! foo {
+ ($($ty:ty)*) => { foo!(() $($ty)*); }
+}
+foo!(KABOOM);
+"#,
+ );
+ }
+
+ #[ignore]
+ #[test]
+ fn test_macro_expand_will_stop_2() {
+ // FIXME: this test does succeed, but takes quite a while: 90 seconds in
+ // the release mode. That's why the argument is not an ra_fixture --
+ // otherwise injection highlighting gets stuck.
+ //
+ // We need to find a way to fail this faster.
+ do_resolve(
+ r#"
+macro_rules! foo {
+ ($($ty:ty)*) => { foo!($($ty)* $($ty)*); }
+}
+foo!(KABOOM);
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
new file mode 100644
index 000000000..0d01f6d0a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
@@ -0,0 +1,137 @@
+//! Diagnostics emitted during DefMap construction.
+
+use base_db::CrateId;
+use cfg::{CfgExpr, CfgOptions};
+use hir_expand::MacroCallKind;
+use la_arena::Idx;
+use syntax::ast;
+
+use crate::{
+ attr::AttrId,
+ item_tree::{self, ItemTreeId},
+ nameres::LocalModuleId,
+ path::ModPath,
+ AstId,
+};
+
+#[derive(Debug, PartialEq, Eq)]
+pub enum DefDiagnosticKind {
+ UnresolvedModule { ast: AstId<ast::Module>, candidates: Box<[String]> },
+
+ UnresolvedExternCrate { ast: AstId<ast::ExternCrate> },
+
+ UnresolvedImport { id: ItemTreeId<item_tree::Import>, index: Idx<ast::UseTree> },
+
+ UnconfiguredCode { ast: AstId<ast::Item>, cfg: CfgExpr, opts: CfgOptions },
+
+ UnresolvedProcMacro { ast: MacroCallKind, krate: CrateId },
+
+ UnresolvedMacroCall { ast: MacroCallKind, path: ModPath },
+
+ MacroError { ast: MacroCallKind, message: String },
+
+ UnimplementedBuiltinMacro { ast: AstId<ast::Macro> },
+
+ InvalidDeriveTarget { ast: AstId<ast::Item>, id: u32 },
+
+ MalformedDerive { ast: AstId<ast::Adt>, id: u32 },
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct DefDiagnostic {
+ pub in_module: LocalModuleId,
+ pub kind: DefDiagnosticKind,
+}
+
+impl DefDiagnostic {
+ pub(super) fn unresolved_module(
+ container: LocalModuleId,
+ declaration: AstId<ast::Module>,
+ candidates: Box<[String]>,
+ ) -> Self {
+ Self {
+ in_module: container,
+ kind: DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates },
+ }
+ }
+
+ pub(super) fn unresolved_extern_crate(
+ container: LocalModuleId,
+ declaration: AstId<ast::ExternCrate>,
+ ) -> Self {
+ Self {
+ in_module: container,
+ kind: DefDiagnosticKind::UnresolvedExternCrate { ast: declaration },
+ }
+ }
+
+ pub(super) fn unresolved_import(
+ container: LocalModuleId,
+ id: ItemTreeId<item_tree::Import>,
+ index: Idx<ast::UseTree>,
+ ) -> Self {
+ Self { in_module: container, kind: DefDiagnosticKind::UnresolvedImport { id, index } }
+ }
+
+ pub(super) fn unconfigured_code(
+ container: LocalModuleId,
+ ast: AstId<ast::Item>,
+ cfg: CfgExpr,
+ opts: CfgOptions,
+ ) -> Self {
+ Self { in_module: container, kind: DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } }
+ }
+
+ pub(super) fn unresolved_proc_macro(
+ container: LocalModuleId,
+ ast: MacroCallKind,
+ krate: CrateId,
+ ) -> Self {
+ Self { in_module: container, kind: DefDiagnosticKind::UnresolvedProcMacro { ast, krate } }
+ }
+
+ pub(super) fn macro_error(
+ container: LocalModuleId,
+ ast: MacroCallKind,
+ message: String,
+ ) -> Self {
+ Self { in_module: container, kind: DefDiagnosticKind::MacroError { ast, message } }
+ }
+
+ pub(super) fn unresolved_macro_call(
+ container: LocalModuleId,
+ ast: MacroCallKind,
+ path: ModPath,
+ ) -> Self {
+ Self { in_module: container, kind: DefDiagnosticKind::UnresolvedMacroCall { ast, path } }
+ }
+
+ pub(super) fn unimplemented_builtin_macro(
+ container: LocalModuleId,
+ ast: AstId<ast::Macro>,
+ ) -> Self {
+ Self { in_module: container, kind: DefDiagnosticKind::UnimplementedBuiltinMacro { ast } }
+ }
+
+ pub(super) fn invalid_derive_target(
+ container: LocalModuleId,
+ ast: AstId<ast::Item>,
+ id: AttrId,
+ ) -> Self {
+ Self {
+ in_module: container,
+ kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id: id.ast_index },
+ }
+ }
+
+ pub(super) fn malformed_derive(
+ container: LocalModuleId,
+ ast: AstId<ast::Adt>,
+ id: AttrId,
+ ) -> Self {
+ Self {
+ in_module: container,
+ kind: DefDiagnosticKind::MalformedDerive { ast, id: id.ast_index },
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
new file mode 100644
index 000000000..52a620fe2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
@@ -0,0 +1,161 @@
+//! This module resolves `mod foo;` declaration to file.
+use arrayvec::ArrayVec;
+use base_db::{AnchoredPath, FileId};
+use hir_expand::name::Name;
+use limit::Limit;
+use syntax::SmolStr;
+
+use crate::{db::DefDatabase, HirFileId};
+
+const MOD_DEPTH_LIMIT: Limit = Limit::new(32);
+
+#[derive(Clone, Debug)]
+pub(super) struct ModDir {
+ /// `` for `mod.rs`, `lib.rs`
+ /// `foo/` for `foo.rs`
+ /// `foo/bar/` for `mod bar { mod x; }` nested in `foo.rs`
+ /// Invariant: path.is_empty() || path.ends_with('/')
+ dir_path: DirPath,
+ /// inside `./foo.rs`, mods with `#[path]` should *not* be relative to `./foo/`
+ root_non_dir_owner: bool,
+ depth: u32,
+}
+
+impl ModDir {
+ pub(super) fn root() -> ModDir {
+ ModDir { dir_path: DirPath::empty(), root_non_dir_owner: false, depth: 0 }
+ }
+
+ pub(super) fn descend_into_definition(
+ &self,
+ name: &Name,
+ attr_path: Option<&SmolStr>,
+ ) -> Option<ModDir> {
+ let path = match attr_path.map(SmolStr::as_str) {
+ None => {
+ let mut path = self.dir_path.clone();
+ path.push(&name.to_smol_str());
+ path
+ }
+ Some(attr_path) => {
+ let mut path = self.dir_path.join_attr(attr_path, self.root_non_dir_owner);
+ if !(path.is_empty() || path.ends_with('/')) {
+ path.push('/')
+ }
+ DirPath::new(path)
+ }
+ };
+ self.child(path, false)
+ }
+
+ fn child(&self, dir_path: DirPath, root_non_dir_owner: bool) -> Option<ModDir> {
+ let depth = self.depth + 1;
+ if MOD_DEPTH_LIMIT.check(depth as usize).is_err() {
+ tracing::error!("MOD_DEPTH_LIMIT exceeded");
+ cov_mark::hit!(circular_mods);
+ return None;
+ }
+ Some(ModDir { dir_path, root_non_dir_owner, depth })
+ }
+
+ pub(super) fn resolve_declaration(
+ &self,
+ db: &dyn DefDatabase,
+ file_id: HirFileId,
+ name: &Name,
+ attr_path: Option<&SmolStr>,
+ ) -> Result<(FileId, bool, ModDir), Box<[String]>> {
+ let orig_file_id = file_id.original_file(db.upcast());
+
+ let mut candidate_files = ArrayVec::<_, 2>::new();
+ match attr_path {
+ Some(attr_path) => {
+ candidate_files.push(self.dir_path.join_attr(attr_path, self.root_non_dir_owner))
+ }
+ None if file_id.is_include_macro(db.upcast()) => {
+ candidate_files.push(format!("{}.rs", name));
+ candidate_files.push(format!("{}/mod.rs", name));
+ }
+ None => {
+ candidate_files.push(format!("{}{}.rs", self.dir_path.0, name));
+ candidate_files.push(format!("{}{}/mod.rs", self.dir_path.0, name));
+ }
+ };
+
+ for candidate in candidate_files.iter() {
+ let path = AnchoredPath { anchor: orig_file_id, path: candidate.as_str() };
+ if let Some(file_id) = db.resolve_path(path) {
+ let is_mod_rs = candidate.ends_with("/mod.rs");
+
+ let (dir_path, root_non_dir_owner) = if is_mod_rs || attr_path.is_some() {
+ (DirPath::empty(), false)
+ } else {
+ (DirPath::new(format!("{}/", name)), true)
+ };
+ if let Some(mod_dir) = self.child(dir_path, root_non_dir_owner) {
+ return Ok((file_id, is_mod_rs, mod_dir));
+ }
+ }
+ }
+ Err(candidate_files.into_iter().collect())
+ }
+}
+
+#[derive(Clone, Debug)]
+struct DirPath(String);
+
+impl DirPath {
+ fn assert_invariant(&self) {
+ assert!(self.0.is_empty() || self.0.ends_with('/'));
+ }
+ fn new(repr: String) -> DirPath {
+ let res = DirPath(repr);
+ res.assert_invariant();
+ res
+ }
+ fn empty() -> DirPath {
+ DirPath::new(String::new())
+ }
+ fn push(&mut self, name: &str) {
+ self.0.push_str(name);
+ self.0.push('/');
+ self.assert_invariant();
+ }
+ fn parent(&self) -> Option<&str> {
+ if self.0.is_empty() {
+ return None;
+ };
+ let idx =
+ self.0[..self.0.len() - '/'.len_utf8()].rfind('/').map_or(0, |it| it + '/'.len_utf8());
+ Some(&self.0[..idx])
+ }
+ /// So this is the case which doesn't really work I think if we try to be
+ /// 100% platform agnostic:
+ ///
+ /// ```
+ /// mod a {
+ /// #[path="C://sad/face"]
+ /// mod b { mod c; }
+ /// }
+ /// ```
+ ///
+ /// Here, we need to join logical dir path to a string path from an
+ /// attribute. Ideally, we should somehow losslessly communicate the whole
+ /// construction to `FileLoader`.
+ fn join_attr(&self, mut attr: &str, relative_to_parent: bool) -> String {
+ let base = if relative_to_parent { self.parent().unwrap() } else { &self.0 };
+
+ if attr.starts_with("./") {
+ attr = &attr["./".len()..];
+ }
+ let tmp;
+ let attr = if attr.contains('\\') {
+ tmp = attr.replace('\\', "/");
+ &tmp
+ } else {
+ attr
+ };
+ let res = format!("{}{}", base, attr);
+ res
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
new file mode 100644
index 000000000..c579bc919
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
@@ -0,0 +1,448 @@
+//! This modules implements a function to resolve a path `foo::bar::baz` to a
+//! def, which is used within the name resolution.
+//!
+//! When name resolution is finished, the result of resolving a path is either
+//! `Some(def)` or `None`. However, when we are in process of resolving imports
+//! or macros, there's a third possibility:
+//!
+//! I can't resolve this path right now, but I might be resolve this path
+//! later, when more macros are expanded.
+//!
+//! `ReachedFixedPoint` signals about this.
+
+use base_db::Edition;
+use hir_expand::name::Name;
+
+use crate::{
+ db::DefDatabase,
+ item_scope::BUILTIN_SCOPE,
+ nameres::{BuiltinShadowMode, DefMap},
+ path::{ModPath, PathKind},
+ per_ns::PerNs,
+ visibility::{RawVisibility, Visibility},
+ AdtId, CrateId, EnumVariantId, LocalModuleId, ModuleDefId, ModuleId,
+};
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub(super) enum ResolveMode {
+ Import,
+ Other,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub(super) enum ReachedFixedPoint {
+ Yes,
+ No,
+}
+
+#[derive(Debug, Clone)]
+pub(super) struct ResolvePathResult {
+ pub(super) resolved_def: PerNs,
+ pub(super) segment_index: Option<usize>,
+ pub(super) reached_fixedpoint: ReachedFixedPoint,
+ pub(super) krate: Option<CrateId>,
+}
+
+impl ResolvePathResult {
+ fn empty(reached_fixedpoint: ReachedFixedPoint) -> ResolvePathResult {
+ ResolvePathResult::with(PerNs::none(), reached_fixedpoint, None, None)
+ }
+
+ fn with(
+ resolved_def: PerNs,
+ reached_fixedpoint: ReachedFixedPoint,
+ segment_index: Option<usize>,
+ krate: Option<CrateId>,
+ ) -> ResolvePathResult {
+ ResolvePathResult { resolved_def, segment_index, reached_fixedpoint, krate }
+ }
+}
+
+impl DefMap {
+ pub(super) fn resolve_name_in_extern_prelude(
+ &self,
+ db: &dyn DefDatabase,
+ name: &Name,
+ ) -> Option<ModuleId> {
+ match self.block {
+ Some(_) => self.crate_root(db).def_map(db).extern_prelude.get(name).copied(),
+ None => self.extern_prelude.get(name).copied(),
+ }
+ }
+
+ pub(crate) fn resolve_visibility(
+ &self,
+ db: &dyn DefDatabase,
+ original_module: LocalModuleId,
+ visibility: &RawVisibility,
+ ) -> Option<Visibility> {
+ let mut vis = match visibility {
+ RawVisibility::Module(path) => {
+ let (result, remaining) =
+ self.resolve_path(db, original_module, path, BuiltinShadowMode::Module);
+ if remaining.is_some() {
+ return None;
+ }
+ let types = result.take_types()?;
+ match types {
+ ModuleDefId::ModuleId(m) => Visibility::Module(m),
+ _ => {
+ // error: visibility needs to refer to module
+ return None;
+ }
+ }
+ }
+ RawVisibility::Public => Visibility::Public,
+ };
+
+ // In block expressions, `self` normally refers to the containing non-block module, and
+ // `super` to its parent (etc.). However, visibilities must only refer to a module in the
+ // DefMap they're written in, so we restrict them when that happens.
+ if let Visibility::Module(m) = vis {
+ if self.block_id() != m.block {
+ cov_mark::hit!(adjust_vis_in_block_def_map);
+ vis = Visibility::Module(self.module_id(self.root()));
+ tracing::debug!("visibility {:?} points outside DefMap, adjusting to {:?}", m, vis);
+ }
+ }
+
+ Some(vis)
+ }
+
+ // Returns Yes if we are sure that additions to `ItemMap` wouldn't change
+ // the result.
+ pub(super) fn resolve_path_fp_with_macro(
+ &self,
+ db: &dyn DefDatabase,
+ mode: ResolveMode,
+ mut original_module: LocalModuleId,
+ path: &ModPath,
+ shadow: BuiltinShadowMode,
+ ) -> ResolvePathResult {
+ let mut result = ResolvePathResult::empty(ReachedFixedPoint::No);
+
+ let mut arc;
+ let mut current_map = self;
+ loop {
+ let new = current_map.resolve_path_fp_with_macro_single(
+ db,
+ mode,
+ original_module,
+ path,
+ shadow,
+ );
+
+ // Merge `new` into `result`.
+ result.resolved_def = result.resolved_def.or(new.resolved_def);
+ if result.reached_fixedpoint == ReachedFixedPoint::No {
+ result.reached_fixedpoint = new.reached_fixedpoint;
+ }
+ // FIXME: this doesn't seem right; what if the different namespace resolutions come from different crates?
+ result.krate = result.krate.or(new.krate);
+ result.segment_index = match (result.segment_index, new.segment_index) {
+ (Some(idx), None) => Some(idx),
+ (Some(old), Some(new)) => Some(old.max(new)),
+ (None, new) => new,
+ };
+
+ match &current_map.block {
+ Some(block) => {
+ original_module = block.parent.local_id;
+ arc = block.parent.def_map(db);
+ current_map = &*arc;
+ }
+ None => return result,
+ }
+ }
+ }
+
+ pub(super) fn resolve_path_fp_with_macro_single(
+ &self,
+ db: &dyn DefDatabase,
+ mode: ResolveMode,
+ original_module: LocalModuleId,
+ path: &ModPath,
+ shadow: BuiltinShadowMode,
+ ) -> ResolvePathResult {
+ let graph = db.crate_graph();
+ let _cx = stdx::panic_context::enter(format!(
+ "DefMap {:?} crate_name={:?} block={:?} path={}",
+ self.krate, graph[self.krate].display_name, self.block, path
+ ));
+
+ let mut segments = path.segments().iter().enumerate();
+ let mut curr_per_ns: PerNs = match path.kind {
+ PathKind::DollarCrate(krate) => {
+ if krate == self.krate {
+ cov_mark::hit!(macro_dollar_crate_self);
+ PerNs::types(self.crate_root(db).into(), Visibility::Public)
+ } else {
+ let def_map = db.crate_def_map(krate);
+ let module = def_map.module_id(def_map.root);
+ cov_mark::hit!(macro_dollar_crate_other);
+ PerNs::types(module.into(), Visibility::Public)
+ }
+ }
+ PathKind::Crate => PerNs::types(self.crate_root(db).into(), Visibility::Public),
+ // plain import or absolute path in 2015: crate-relative with
+ // fallback to extern prelude (with the simplification in
+ // rust-lang/rust#57745)
+ // FIXME there must be a nicer way to write this condition
+ PathKind::Plain | PathKind::Abs
+ if self.edition == Edition::Edition2015
+ && (path.kind == PathKind::Abs || mode == ResolveMode::Import) =>
+ {
+ let (_, segment) = match segments.next() {
+ Some((idx, segment)) => (idx, segment),
+ None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
+ };
+ tracing::debug!("resolving {:?} in crate root (+ extern prelude)", segment);
+ self.resolve_name_in_crate_root_or_extern_prelude(db, segment)
+ }
+ PathKind::Plain => {
+ let (_, segment) = match segments.next() {
+ Some((idx, segment)) => (idx, segment),
+ None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
+ };
+ // The first segment may be a builtin type. If the path has more
+ // than one segment, we first try resolving it as a module
+ // anyway.
+ // FIXME: If the next segment doesn't resolve in the module and
+ // BuiltinShadowMode wasn't Module, then we need to try
+ // resolving it as a builtin.
+ let prefer_module =
+ if path.segments().len() == 1 { shadow } else { BuiltinShadowMode::Module };
+
+ tracing::debug!("resolving {:?} in module", segment);
+ self.resolve_name_in_module(db, original_module, segment, prefer_module)
+ }
+ PathKind::Super(lvl) => {
+ let mut module = original_module;
+ for i in 0..lvl {
+ match self.modules[module].parent {
+ Some(it) => module = it,
+ None => match &self.block {
+ Some(block) => {
+ // Look up remaining path in parent `DefMap`
+ let new_path = ModPath::from_segments(
+ PathKind::Super(lvl - i),
+ path.segments().to_vec(),
+ );
+ tracing::debug!(
+ "`super` path: {} -> {} in parent map",
+ path,
+ new_path
+ );
+ return block.parent.def_map(db).resolve_path_fp_with_macro(
+ db,
+ mode,
+ block.parent.local_id,
+ &new_path,
+ shadow,
+ );
+ }
+ None => {
+ tracing::debug!("super path in root module");
+ return ResolvePathResult::empty(ReachedFixedPoint::Yes);
+ }
+ },
+ }
+ }
+
+ // Resolve `self` to the containing crate-rooted module if we're a block
+ self.with_ancestor_maps(db, module, &mut |def_map, module| {
+ if def_map.block.is_some() {
+ None // keep ascending
+ } else {
+ Some(PerNs::types(def_map.module_id(module).into(), Visibility::Public))
+ }
+ })
+ .expect("block DefMap not rooted in crate DefMap")
+ }
+ PathKind::Abs => {
+ // 2018-style absolute path -- only extern prelude
+ let segment = match segments.next() {
+ Some((_, segment)) => segment,
+ None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
+ };
+ if let Some(&def) = self.extern_prelude.get(segment) {
+ tracing::debug!("absolute path {:?} resolved to crate {:?}", path, def);
+ PerNs::types(def.into(), Visibility::Public)
+ } else {
+ return ResolvePathResult::empty(ReachedFixedPoint::No); // extern crate declarations can add to the extern prelude
+ }
+ }
+ };
+
+ for (i, segment) in segments {
+ let (curr, vis) = match curr_per_ns.take_types_vis() {
+ Some(r) => r,
+ None => {
+ // we still have path segments left, but the path so far
+ // didn't resolve in the types namespace => no resolution
+ // (don't break here because `curr_per_ns` might contain
+ // something in the value namespace, and it would be wrong
+ // to return that)
+ return ResolvePathResult::empty(ReachedFixedPoint::No);
+ }
+ };
+ // resolve segment in curr
+
+ curr_per_ns = match curr {
+ ModuleDefId::ModuleId(module) => {
+ if module.krate != self.krate {
+ let path = ModPath::from_segments(
+ PathKind::Super(0),
+ path.segments()[i..].iter().cloned(),
+ );
+ tracing::debug!("resolving {:?} in other crate", path);
+ let defp_map = module.def_map(db);
+ let (def, s) = defp_map.resolve_path(db, module.local_id, &path, shadow);
+ return ResolvePathResult::with(
+ def,
+ ReachedFixedPoint::Yes,
+ s.map(|s| s + i),
+ Some(module.krate),
+ );
+ }
+
+ let def_map;
+ let module_data = if module.block == self.block_id() {
+ &self[module.local_id]
+ } else {
+ def_map = module.def_map(db);
+ &def_map[module.local_id]
+ };
+
+ // Since it is a qualified path here, it should not contains legacy macros
+ module_data.scope.get(segment)
+ }
+ ModuleDefId::AdtId(AdtId::EnumId(e)) => {
+ // enum variant
+ cov_mark::hit!(can_import_enum_variant);
+ let enum_data = db.enum_data(e);
+ match enum_data.variant(segment) {
+ Some(local_id) => {
+ let variant = EnumVariantId { parent: e, local_id };
+ match &*enum_data.variants[local_id].variant_data {
+ crate::adt::VariantData::Record(_) => {
+ PerNs::types(variant.into(), Visibility::Public)
+ }
+ crate::adt::VariantData::Tuple(_)
+ | crate::adt::VariantData::Unit => {
+ PerNs::both(variant.into(), variant.into(), Visibility::Public)
+ }
+ }
+ }
+ None => {
+ return ResolvePathResult::with(
+ PerNs::types(e.into(), vis),
+ ReachedFixedPoint::Yes,
+ Some(i),
+ Some(self.krate),
+ );
+ }
+ }
+ }
+ s => {
+ // could be an inherent method call in UFCS form
+ // (`Struct::method`), or some other kind of associated item
+ tracing::debug!(
+ "path segment {:?} resolved to non-module {:?}, but is not last",
+ segment,
+ curr,
+ );
+
+ return ResolvePathResult::with(
+ PerNs::types(s, vis),
+ ReachedFixedPoint::Yes,
+ Some(i),
+ Some(self.krate),
+ );
+ }
+ };
+ }
+
+ ResolvePathResult::with(curr_per_ns, ReachedFixedPoint::Yes, None, Some(self.krate))
+ }
+
+ fn resolve_name_in_module(
+ &self,
+ db: &dyn DefDatabase,
+ module: LocalModuleId,
+ name: &Name,
+ shadow: BuiltinShadowMode,
+ ) -> PerNs {
+ // Resolve in:
+ // - legacy scope of macro
+ // - current module / scope
+ // - extern prelude
+ // - std prelude
+ let from_legacy_macro = self[module]
+ .scope
+ .get_legacy_macro(name)
+ // FIXME: shadowing
+ .and_then(|it| it.last())
+ .map_or_else(PerNs::none, |&m| PerNs::macros(m.into(), Visibility::Public));
+ let from_scope = self[module].scope.get(name);
+ let from_builtin = match self.block {
+ Some(_) => {
+ // Only resolve to builtins in the root `DefMap`.
+ PerNs::none()
+ }
+ None => BUILTIN_SCOPE.get(name).copied().unwrap_or_else(PerNs::none),
+ };
+ let from_scope_or_builtin = match shadow {
+ BuiltinShadowMode::Module => from_scope.or(from_builtin),
+ BuiltinShadowMode::Other => match from_scope.take_types() {
+ Some(ModuleDefId::ModuleId(_)) => from_builtin.or(from_scope),
+ Some(_) | None => from_scope.or(from_builtin),
+ },
+ };
+ let from_extern_prelude = self
+ .extern_prelude
+ .get(name)
+ .map_or(PerNs::none(), |&it| PerNs::types(it.into(), Visibility::Public));
+
+ let from_prelude = self.resolve_in_prelude(db, name);
+
+ from_legacy_macro.or(from_scope_or_builtin).or(from_extern_prelude).or(from_prelude)
+ }
+
+ fn resolve_name_in_crate_root_or_extern_prelude(
+ &self,
+ db: &dyn DefDatabase,
+ name: &Name,
+ ) -> PerNs {
+ let arc;
+ let crate_def_map = match self.block {
+ Some(_) => {
+ arc = self.crate_root(db).def_map(db);
+ &arc
+ }
+ None => self,
+ };
+ let from_crate_root = crate_def_map[crate_def_map.root].scope.get(name);
+ let from_extern_prelude = self
+ .resolve_name_in_extern_prelude(db, name)
+ .map_or(PerNs::none(), |it| PerNs::types(it.into(), Visibility::Public));
+
+ from_crate_root.or(from_extern_prelude)
+ }
+
+ fn resolve_in_prelude(&self, db: &dyn DefDatabase, name: &Name) -> PerNs {
+ if let Some(prelude) = self.prelude {
+ let keep;
+ let def_map = if prelude.krate == self.krate {
+ self
+ } else {
+ // Extend lifetime
+ keep = prelude.def_map(db);
+ &keep
+ };
+ def_map[prelude.local_id].scope.get(name)
+ } else {
+ PerNs::none()
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs
new file mode 100644
index 000000000..5089ef2d8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs
@@ -0,0 +1,81 @@
+//! Nameres-specific procedural macro data and helpers.
+
+use hir_expand::name::{AsName, Name};
+use tt::{Leaf, TokenTree};
+
+use crate::attr::Attrs;
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct ProcMacroDef {
+ pub name: Name,
+ pub kind: ProcMacroKind,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub enum ProcMacroKind {
+ CustomDerive { helpers: Box<[Name]> },
+ FnLike,
+ Attr,
+}
+
+impl ProcMacroKind {
+ pub(super) fn to_basedb_kind(&self) -> base_db::ProcMacroKind {
+ match self {
+ ProcMacroKind::CustomDerive { .. } => base_db::ProcMacroKind::CustomDerive,
+ ProcMacroKind::FnLike => base_db::ProcMacroKind::FuncLike,
+ ProcMacroKind::Attr => base_db::ProcMacroKind::Attr,
+ }
+ }
+}
+
+impl Attrs {
+ #[rustfmt::skip]
+ pub fn parse_proc_macro_decl(&self, func_name: &Name) -> Option<ProcMacroDef> {
+ if self.is_proc_macro() {
+ Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::FnLike })
+ } else if self.is_proc_macro_attribute() {
+ Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Attr })
+ } else if self.by_key("proc_macro_derive").exists() {
+ let derive = self.by_key("proc_macro_derive").tt_values().next()?;
+
+ match &*derive.token_trees {
+ // `#[proc_macro_derive(Trait)]`
+ [TokenTree::Leaf(Leaf::Ident(trait_name))] => Some(ProcMacroDef {
+ name: trait_name.as_name(),
+ kind: ProcMacroKind::CustomDerive { helpers: Box::new([]) },
+ }),
+
+ // `#[proc_macro_derive(Trait, attibutes(helper1, helper2, ...))]`
+ [
+ TokenTree::Leaf(Leaf::Ident(trait_name)),
+ TokenTree::Leaf(Leaf::Punct(comma)),
+ TokenTree::Leaf(Leaf::Ident(attributes)),
+ TokenTree::Subtree(helpers)
+ ] if comma.char == ',' && attributes.text == "attributes" =>
+ {
+ let helpers = helpers.token_trees.iter()
+ .filter(|tt| !matches!(tt, TokenTree::Leaf(Leaf::Punct(comma)) if comma.char == ','))
+ .map(|tt| {
+ match tt {
+ TokenTree::Leaf(Leaf::Ident(helper)) => Some(helper.as_name()),
+ _ => None
+ }
+ })
+ .collect::<Option<Box<[_]>>>()?;
+
+ Some(ProcMacroDef {
+ name: trait_name.as_name(),
+ kind: ProcMacroKind::CustomDerive { helpers },
+ })
+ }
+
+ _ => {
+ tracing::trace!("malformed `#[proc_macro_derive]`: {}", derive);
+ None
+ }
+ }
+ } else {
+ None
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
new file mode 100644
index 000000000..70dd2eb3a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
@@ -0,0 +1,933 @@
+mod globs;
+mod incremental;
+mod macros;
+mod mod_resolution;
+mod primitives;
+
+use std::sync::Arc;
+
+use base_db::{fixture::WithFixture, SourceDatabase};
+use expect_test::{expect, Expect};
+
+use crate::{db::DefDatabase, test_db::TestDB};
+
+use super::DefMap;
+
+fn compute_crate_def_map(ra_fixture: &str) -> Arc<DefMap> {
+ let db = TestDB::with_files(ra_fixture);
+ let krate = db.crate_graph().iter().next().unwrap();
+ db.crate_def_map(krate)
+}
+
+fn render_crate_def_map(ra_fixture: &str) -> String {
+ let db = TestDB::with_files(ra_fixture);
+ let krate = db.crate_graph().iter().next().unwrap();
+ db.crate_def_map(krate).dump(&db)
+}
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = render_crate_def_map(ra_fixture);
+ expect.assert_eq(&actual);
+}
+
+#[test]
+fn crate_def_map_smoke_test() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+struct S;
+use crate::foo::bar::E;
+use self::E::V;
+
+//- /foo/mod.rs
+pub mod bar;
+fn f() {}
+
+//- /foo/bar.rs
+pub struct Baz;
+
+union U { to_be: bool, not_to_be: u8 }
+enum E { V }
+
+extern {
+ type Ext;
+ static EXT: u8;
+ fn ext();
+}
+"#,
+ expect![[r#"
+ crate
+ E: t
+ S: t v
+ V: t v
+ foo: t
+
+ crate::foo
+ bar: t
+ f: v
+
+ crate::foo::bar
+ Baz: t v
+ E: t
+ EXT: v
+ Ext: t
+ U: t
+ ext: v
+ "#]],
+ );
+}
+
+#[test]
+fn crate_def_map_super_super() {
+ check(
+ r#"
+mod a {
+ const A: usize = 0;
+ mod b {
+ const B: usize = 0;
+ mod c {
+ use super::super::*;
+ }
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ a: t
+
+ crate::a
+ A: v
+ b: t
+
+ crate::a::b
+ B: v
+ c: t
+
+ crate::a::b::c
+ A: v
+ b: t
+ "#]],
+ );
+}
+
+#[test]
+fn crate_def_map_fn_mod_same_name() {
+ check(
+ r#"
+mod m {
+ pub mod z {}
+ pub fn z() {}
+}
+"#,
+ expect![[r#"
+ crate
+ m: t
+
+ crate::m
+ z: t v
+
+ crate::m::z
+ "#]],
+ );
+}
+
+#[test]
+fn bogus_paths() {
+ cov_mark::check!(bogus_paths);
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+struct S;
+use self;
+
+//- /foo/mod.rs
+use super;
+use crate;
+"#,
+ expect![[r#"
+ crate
+ S: t v
+ foo: t
+
+ crate::foo
+ "#]],
+ );
+}
+
+#[test]
+fn use_as() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use crate::foo::Baz as Foo;
+
+//- /foo/mod.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Foo: t v
+ foo: t
+
+ crate::foo
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn use_trees() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use crate::foo::bar::{Baz, Quux};
+
+//- /foo/mod.rs
+pub mod bar;
+
+//- /foo/bar.rs
+pub struct Baz;
+pub enum Quux {};
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ Quux: t
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ Quux: t
+ "#]],
+ );
+}
+
+#[test]
+fn re_exports() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use self::foo::Baz;
+
+//- /foo/mod.rs
+pub mod bar;
+pub use self::bar::Baz;
+
+//- /foo/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ foo: t
+
+ crate::foo
+ Baz: t v
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn std_prelude() {
+ cov_mark::check!(std_prelude);
+ check(
+ r#"
+//- /main.rs crate:main deps:test_crate
+#[prelude_import]
+use ::test_crate::prelude::*;
+
+use Foo::*;
+
+//- /lib.rs crate:test_crate
+pub mod prelude;
+
+//- /prelude.rs
+pub enum Foo { Bar, Baz }
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn can_import_enum_variant() {
+ cov_mark::check!(can_import_enum_variant);
+ check(
+ r#"
+enum E { V }
+use self::E::V;
+"#,
+ expect![[r#"
+ crate
+ E: t
+ V: t v
+ "#]],
+ );
+}
+
+#[test]
+fn edition_2015_imports() {
+ check(
+ r#"
+//- /main.rs crate:main deps:other_crate edition:2015
+mod foo;
+mod bar;
+
+//- /bar.rs
+struct Bar;
+
+//- /foo.rs
+use bar::Bar;
+use other_crate::FromLib;
+
+//- /lib.rs crate:other_crate edition:2018
+pub struct FromLib;
+"#,
+ expect![[r#"
+ crate
+ bar: t
+ foo: t
+
+ crate::bar
+ Bar: t v
+
+ crate::foo
+ Bar: t v
+ FromLib: t v
+ "#]],
+ );
+}
+
+#[test]
+fn item_map_using_self() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use crate::foo::bar::Baz::{self};
+
+//- /foo/mod.rs
+pub mod bar;
+
+//- /foo/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn item_map_across_crates() {
+ check(
+ r#"
+//- /main.rs crate:main deps:test_crate
+use test_crate::Baz;
+
+//- /lib.rs crate:test_crate
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn extern_crate_rename() {
+ check(
+ r#"
+//- /main.rs crate:main deps:alloc
+extern crate alloc as alloc_crate;
+mod alloc;
+mod sync;
+
+//- /sync.rs
+use alloc_crate::Arc;
+
+//- /lib.rs crate:alloc
+pub struct Arc;
+"#,
+ expect![[r#"
+ crate
+ alloc: t
+ alloc_crate: t
+ sync: t
+
+ crate::alloc
+
+ crate::sync
+ Arc: t v
+ "#]],
+ );
+}
+
+#[test]
+fn extern_crate_rename_2015_edition() {
+ check(
+ r#"
+//- /main.rs crate:main deps:alloc edition:2015
+extern crate alloc as alloc_crate;
+mod alloc;
+mod sync;
+
+//- /sync.rs
+use alloc_crate::Arc;
+
+//- /lib.rs crate:alloc
+pub struct Arc;
+"#,
+ expect![[r#"
+ crate
+ alloc: t
+ alloc_crate: t
+ sync: t
+
+ crate::alloc
+
+ crate::sync
+ Arc: t v
+ "#]],
+ );
+}
+
+#[test]
+fn macro_use_extern_crate_self() {
+ cov_mark::check!(ignore_macro_use_extern_crate_self);
+ check(
+ r#"
+//- /main.rs crate:main
+#[macro_use]
+extern crate self as bla;
+"#,
+ expect![[r#"
+ crate
+ bla: t
+ "#]],
+ );
+}
+
+#[test]
+fn reexport_across_crates() {
+ check(
+ r#"
+//- /main.rs crate:main deps:test_crate
+use test_crate::Baz;
+
+//- /lib.rs crate:test_crate
+pub use foo::Baz;
+mod foo;
+
+//- /foo.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn values_dont_shadow_extern_crates() {
+ check(
+ r#"
+//- /main.rs crate:main deps:foo
+fn foo() {}
+use foo::Bar;
+
+//- /foo/lib.rs crate:foo
+pub struct Bar;
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ foo: v
+ "#]],
+ );
+}
+
+#[test]
+fn no_std_prelude() {
+ check(
+ r#"
+ //- /main.rs crate:main deps:core,std
+ #![cfg_attr(not(never), no_std)]
+ use Rust;
+
+ //- /core.rs crate:core
+ pub mod prelude {
+ pub mod rust_2018 {
+ pub struct Rust;
+ }
+ }
+ //- /std.rs crate:std deps:core
+ pub mod prelude {
+ pub mod rust_2018 {
+ }
+ }
+ "#,
+ expect![[r#"
+ crate
+ Rust: t v
+ "#]],
+ );
+}
+
+#[test]
+fn edition_specific_preludes() {
+ // We can't test the 2015 prelude here since you can't reexport its contents with 2015's
+ // absolute paths.
+
+ check(
+ r#"
+ //- /main.rs edition:2018 crate:main deps:std
+ use Rust2018;
+
+ //- /std.rs crate:std
+ pub mod prelude {
+ pub mod rust_2018 {
+ pub struct Rust2018;
+ }
+ }
+ "#,
+ expect![[r#"
+ crate
+ Rust2018: t v
+ "#]],
+ );
+ check(
+ r#"
+ //- /main.rs edition:2021 crate:main deps:std
+ use Rust2021;
+
+ //- /std.rs crate:std
+ pub mod prelude {
+ pub mod rust_2021 {
+ pub struct Rust2021;
+ }
+ }
+ "#,
+ expect![[r#"
+ crate
+ Rust2021: t v
+ "#]],
+ );
+}
+
+#[test]
+fn std_prelude_takes_precedence_above_core_prelude() {
+ check(
+ r#"
+//- /main.rs crate:main deps:core,std
+use {Foo, Bar};
+
+//- /std.rs crate:std deps:core
+pub mod prelude {
+ pub mod rust_2018 {
+ pub struct Foo;
+ pub use core::prelude::rust_2018::Bar;
+ }
+}
+
+//- /core.rs crate:core
+pub mod prelude {
+ pub mod rust_2018 {
+ pub struct Bar;
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Foo: t v
+ "#]],
+ );
+}
+
+#[test]
+fn cfg_not_test() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+use {Foo, Bar, Baz};
+
+//- /lib.rs crate:std
+pub mod prelude {
+ pub mod rust_2018 {
+ #[cfg(test)]
+ pub struct Foo;
+ #[cfg(not(test))]
+ pub struct Bar;
+ #[cfg(all(not(any()), feature = "foo", feature = "bar", opt = "42"))]
+ pub struct Baz;
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Baz: _
+ Foo: _
+ "#]],
+ );
+}
+
+#[test]
+fn cfg_test() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+use {Foo, Bar, Baz};
+
+//- /lib.rs crate:std cfg:test,feature=foo,feature=bar,opt=42
+pub mod prelude {
+ pub mod rust_2018 {
+ #[cfg(test)]
+ pub struct Foo;
+ #[cfg(not(test))]
+ pub struct Bar;
+ #[cfg(all(not(any()), feature = "foo", feature = "bar", opt = "42"))]
+ pub struct Baz;
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Bar: _
+ Baz: t v
+ Foo: t v
+ "#]],
+ );
+}
+
+#[test]
+fn infer_multiple_namespace() {
+ check(
+ r#"
+//- /main.rs
+mod a {
+ pub type T = ();
+ pub use crate::b::*;
+}
+
+use crate::a::T;
+
+mod b {
+ pub const T: () = ();
+}
+"#,
+ expect![[r#"
+ crate
+ T: t v
+ a: t
+ b: t
+
+ crate::a
+ T: t v
+
+ crate::b
+ T: v
+ "#]],
+ );
+}
+
+#[test]
+fn underscore_import() {
+ check(
+ r#"
+//- /main.rs
+use tr::Tr as _;
+use tr::Tr2 as _;
+
+mod tr {
+ pub trait Tr {}
+ pub trait Tr2 {}
+}
+ "#,
+ expect![[r#"
+ crate
+ _: t
+ _: t
+ tr: t
+
+ crate::tr
+ Tr: t
+ Tr2: t
+ "#]],
+ );
+}
+
+#[test]
+fn underscore_reexport() {
+ check(
+ r#"
+//- /main.rs
+mod tr {
+ pub trait PubTr {}
+ pub trait PrivTr {}
+}
+mod reex {
+ use crate::tr::PrivTr as _;
+ pub use crate::tr::PubTr as _;
+}
+use crate::reex::*;
+ "#,
+ expect![[r#"
+ crate
+ _: t
+ reex: t
+ tr: t
+
+ crate::reex
+ _: t
+ _: t
+
+ crate::tr
+ PrivTr: t
+ PubTr: t
+ "#]],
+ );
+}
+
+#[test]
+fn underscore_pub_crate_reexport() {
+ cov_mark::check!(upgrade_underscore_visibility);
+ check(
+ r#"
+//- /main.rs crate:main deps:lib
+use lib::*;
+
+//- /lib.rs crate:lib
+use tr::Tr as _;
+pub use tr::Tr as _;
+
+mod tr {
+ pub trait Tr {}
+}
+ "#,
+ expect![[r#"
+ crate
+ _: t
+ "#]],
+ );
+}
+
+#[test]
+fn underscore_nontrait() {
+ check(
+ r#"
+//- /main.rs
+mod m {
+ pub struct Struct;
+ pub enum Enum {}
+ pub const CONST: () = ();
+}
+use crate::m::{Struct as _, Enum as _, CONST as _};
+ "#,
+ expect![[r#"
+ crate
+ m: t
+
+ crate::m
+ CONST: v
+ Enum: t
+ Struct: t v
+ "#]],
+ );
+}
+
+#[test]
+fn underscore_name_conflict() {
+ check(
+ r#"
+//- /main.rs
+struct Tr;
+
+use tr::Tr as _;
+
+mod tr {
+ pub trait Tr {}
+}
+ "#,
+ expect![[r#"
+ crate
+ _: t
+ Tr: t v
+ tr: t
+
+ crate::tr
+ Tr: t
+ "#]],
+ );
+}
+
+#[test]
+fn cfg_the_entire_crate() {
+ check(
+ r#"
+//- /main.rs
+#![cfg(never)]
+
+pub struct S;
+pub enum E {}
+pub fn f() {}
+ "#,
+ expect![[r#"
+ crate
+ "#]],
+ );
+}
+
+#[test]
+fn use_crate_as() {
+ check(
+ r#"
+use crate as foo;
+
+use foo::bar as baz;
+
+fn bar() {}
+ "#,
+ expect![[r#"
+ crate
+ bar: v
+ baz: v
+ foo: t
+ "#]],
+ );
+}
+
+#[test]
+fn self_imports_only_types() {
+ check(
+ r#"
+//- /main.rs
+mod m {
+ pub macro S() {}
+ pub struct S;
+}
+
+use self::m::S::{self};
+ "#,
+ expect![[r#"
+ crate
+ S: t
+ m: t
+
+ crate::m
+ S: t v m
+ "#]],
+ );
+}
+
+#[test]
+fn import_from_extern_crate_only_imports_public_items() {
+ check(
+ r#"
+//- /lib.rs crate:lib deps:settings,macros
+use macros::settings;
+use settings::Settings;
+//- /settings.rs crate:settings
+pub struct Settings;
+//- /macros.rs crate:macros
+mod settings {}
+pub const settings: () = ();
+ "#,
+ expect![[r#"
+ crate
+ Settings: t v
+ settings: v
+ "#]],
+ )
+}
+
+#[test]
+fn non_prelude_deps() {
+ check(
+ r#"
+//- /lib.rs crate:lib deps:dep extern-prelude:
+use dep::Struct;
+//- /dep.rs crate:dep
+pub struct Struct;
+ "#,
+ expect![[r#"
+ crate
+ Struct: _
+ "#]],
+ );
+ check(
+ r#"
+//- /lib.rs crate:lib deps:dep extern-prelude:
+extern crate dep;
+use dep::Struct;
+//- /dep.rs crate:dep
+pub struct Struct;
+ "#,
+ expect![[r#"
+ crate
+ Struct: t v
+ dep: t
+ "#]],
+ );
+}
+
+#[test]
+fn braced_supers_in_use_tree() {
+ cov_mark::check!(concat_super_mod_paths);
+ check(
+ r#"
+mod some_module {
+ pub fn unknown_func() {}
+}
+
+mod other_module {
+ mod some_submodule {
+ use { super::{ super::unknown_func, }, };
+ }
+}
+
+use some_module::unknown_func;
+ "#,
+ expect![[r#"
+ crate
+ other_module: t
+ some_module: t
+ unknown_func: v
+
+ crate::other_module
+ some_submodule: t
+
+ crate::other_module::some_submodule
+ unknown_func: v
+
+ crate::some_module
+ unknown_func: v
+ "#]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/globs.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/globs.rs
new file mode 100644
index 000000000..b2a6a592c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/globs.rs
@@ -0,0 +1,338 @@
+use super::*;
+
+#[test]
+fn glob_1() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use foo::*;
+
+//- /foo/mod.rs
+pub mod bar;
+pub use self::bar::Baz;
+pub struct Foo;
+
+//- /foo/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ Foo: t v
+ bar: t
+ foo: t
+
+ crate::foo
+ Baz: t v
+ Foo: t v
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn glob_2() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use foo::*;
+
+//- /foo/mod.rs
+pub mod bar;
+pub use self::bar::*;
+pub struct Foo;
+
+//- /foo/bar.rs
+pub struct Baz;
+pub use super::*;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ Foo: t v
+ bar: t
+ foo: t
+
+ crate::foo
+ Baz: t v
+ Foo: t v
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ Foo: t v
+ bar: t
+ "#]],
+ );
+}
+
+#[test]
+fn glob_privacy_1() {
+ check(
+ r"
+//- /lib.rs
+mod foo;
+use foo::*;
+
+//- /foo/mod.rs
+pub mod bar;
+pub use self::bar::*;
+struct PrivateStructFoo;
+
+//- /foo/bar.rs
+pub struct Baz;
+struct PrivateStructBar;
+pub use super::*;
+",
+ expect![[r#"
+ crate
+ Baz: t v
+ bar: t
+ foo: t
+
+ crate::foo
+ Baz: t v
+ PrivateStructFoo: t v
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ PrivateStructBar: t v
+ PrivateStructFoo: t v
+ bar: t
+ "#]],
+ );
+}
+
+#[test]
+fn glob_privacy_2() {
+ check(
+ r"
+//- /lib.rs
+mod foo;
+use foo::*;
+use foo::bar::*;
+
+//- /foo/mod.rs
+mod bar;
+fn Foo() {};
+pub struct Foo {};
+
+//- /foo/bar.rs
+pub(super) struct PrivateBaz;
+struct PrivateBar;
+pub(crate) struct PubCrateStruct;
+",
+ expect![[r#"
+ crate
+ Foo: t
+ PubCrateStruct: t v
+ foo: t
+
+ crate::foo
+ Foo: t v
+ bar: t
+
+ crate::foo::bar
+ PrivateBar: t v
+ PrivateBaz: t v
+ PubCrateStruct: t v
+ "#]],
+ );
+}
+
+#[test]
+fn glob_across_crates() {
+ cov_mark::check!(glob_across_crates);
+ check(
+ r#"
+//- /main.rs crate:main deps:test_crate
+use test_crate::*;
+
+//- /lib.rs crate:test_crate
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn glob_privacy_across_crates() {
+ check(
+ r#"
+//- /main.rs crate:main deps:test_crate
+use test_crate::*;
+
+//- /lib.rs crate:test_crate
+pub struct Baz;
+struct Foo;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn glob_enum() {
+ cov_mark::check!(glob_enum);
+ check(
+ r#"
+enum Foo { Bar, Baz }
+use self::Foo::*;
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Baz: t v
+ Foo: t
+ "#]],
+ );
+}
+
+#[test]
+fn glob_enum_group() {
+ cov_mark::check!(glob_enum_group);
+ check(
+ r#"
+enum Foo { Bar, Baz }
+use self::Foo::{*};
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Baz: t v
+ Foo: t
+ "#]],
+ );
+}
+
+#[test]
+fn glob_shadowed_def() {
+ cov_mark::check!(import_shadowed);
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+mod bar;
+use foo::*;
+use bar::baz;
+use baz::Bar;
+
+//- /foo.rs
+pub mod baz { pub struct Foo; }
+
+//- /bar.rs
+pub mod baz { pub struct Bar; }
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ bar: t
+ baz: t
+ foo: t
+
+ crate::bar
+ baz: t
+
+ crate::bar::baz
+ Bar: t v
+
+ crate::foo
+ baz: t
+
+ crate::foo::baz
+ Foo: t v
+ "#]],
+ );
+}
+
+#[test]
+fn glob_shadowed_def_reversed() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+mod bar;
+use bar::baz;
+use foo::*;
+use baz::Bar;
+
+//- /foo.rs
+pub mod baz { pub struct Foo; }
+
+//- /bar.rs
+pub mod baz { pub struct Bar; }
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ bar: t
+ baz: t
+ foo: t
+
+ crate::bar
+ baz: t
+
+ crate::bar::baz
+ Bar: t v
+
+ crate::foo
+ baz: t
+
+ crate::foo::baz
+ Foo: t v
+ "#]],
+ );
+}
+
+#[test]
+fn glob_shadowed_def_dependencies() {
+ check(
+ r#"
+mod a { pub mod foo { pub struct X; } }
+mod b { pub use super::a::foo; }
+mod c { pub mod foo { pub struct Y; } }
+mod d {
+ use super::c::foo;
+ use super::b::*;
+ use foo::Y;
+}
+"#,
+ expect![[r#"
+ crate
+ a: t
+ b: t
+ c: t
+ d: t
+
+ crate::a
+ foo: t
+
+ crate::a::foo
+ X: t v
+
+ crate::b
+ foo: t
+
+ crate::c
+ foo: t
+
+ crate::c::foo
+ Y: t v
+
+ crate::d
+ Y: t v
+ foo: t
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
new file mode 100644
index 000000000..2e8cb3621
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
@@ -0,0 +1,237 @@
+use std::sync::Arc;
+
+use base_db::SourceDatabaseExt;
+
+use crate::{AdtId, ModuleDefId};
+
+use super::*;
+
+fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: &str) {
+ let (mut db, pos) = TestDB::with_position(ra_fixture_initial);
+ let krate = db.test_crate();
+ {
+ let events = db.log_executed(|| {
+ db.crate_def_map(krate);
+ });
+ assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
+ }
+ db.set_file_text(pos.file_id, Arc::new(ra_fixture_change.to_string()));
+
+ {
+ let events = db.log_executed(|| {
+ db.crate_def_map(krate);
+ });
+ assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
+ }
+}
+
+#[test]
+fn typing_inside_a_function_should_not_invalidate_def_map() {
+ check_def_map_is_not_recomputed(
+ r"
+ //- /lib.rs
+ mod foo;$0
+
+ use crate::foo::bar::Baz;
+
+ enum E { A, B }
+ use E::*;
+
+ fn foo() -> i32 {
+ 1 + 1
+ }
+
+ #[cfg(never)]
+ fn no() {}
+ //- /foo/mod.rs
+ pub mod bar;
+
+ //- /foo/bar.rs
+ pub struct Baz;
+ ",
+ r"
+ mod foo;
+
+ use crate::foo::bar::Baz;
+
+ enum E { A, B }
+ use E::*;
+
+ fn foo() -> i32 { 92 }
+
+ #[cfg(never)]
+ fn no() {}
+ ",
+ );
+}
+
+#[test]
+fn typing_inside_a_macro_should_not_invalidate_def_map() {
+ let (mut db, pos) = TestDB::with_position(
+ r"
+ //- /lib.rs
+ macro_rules! m {
+ ($ident:ident) => {
+ fn f() {
+ $ident + $ident;
+ };
+ }
+ }
+ mod foo;
+
+ //- /foo/mod.rs
+ pub mod bar;
+
+ //- /foo/bar.rs
+ $0
+ m!(X);
+ ",
+ );
+ let krate = db.test_crate();
+ {
+ let events = db.log_executed(|| {
+ let crate_def_map = db.crate_def_map(krate);
+ let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
+ assert_eq!(module_data.scope.resolutions().count(), 1);
+ });
+ assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
+ }
+ db.set_file_text(pos.file_id, Arc::new("m!(Y);".to_string()));
+
+ {
+ let events = db.log_executed(|| {
+ let crate_def_map = db.crate_def_map(krate);
+ let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
+ assert_eq!(module_data.scope.resolutions().count(), 1);
+ });
+ assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
+ }
+}
+
+#[test]
+fn typing_inside_a_function_should_not_invalidate_expansions() {
+ let (mut db, pos) = TestDB::with_position(
+ r#"
+//- /lib.rs
+macro_rules! m {
+ ($ident:ident) => {
+ fn $ident() { };
+ }
+}
+mod foo;
+
+//- /foo/mod.rs
+pub mod bar;
+
+//- /foo/bar.rs
+m!(X);
+fn quux() { 1$0 }
+m!(Y);
+m!(Z);
+"#,
+ );
+ let krate = db.test_crate();
+ {
+ let events = db.log_executed(|| {
+ let crate_def_map = db.crate_def_map(krate);
+ let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
+ assert_eq!(module_data.scope.resolutions().count(), 4);
+ });
+ let n_recalculated_item_trees = events.iter().filter(|it| it.contains("item_tree")).count();
+ assert_eq!(n_recalculated_item_trees, 6);
+ let n_reparsed_macros =
+ events.iter().filter(|it| it.contains("parse_macro_expansion")).count();
+ assert_eq!(n_reparsed_macros, 3);
+ }
+
+ let new_text = r#"
+m!(X);
+fn quux() { 92 }
+m!(Y);
+m!(Z);
+"#;
+ db.set_file_text(pos.file_id, Arc::new(new_text.to_string()));
+
+ {
+ let events = db.log_executed(|| {
+ let crate_def_map = db.crate_def_map(krate);
+ let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
+ assert_eq!(module_data.scope.resolutions().count(), 4);
+ });
+ let n_recalculated_item_trees = events.iter().filter(|it| it.contains("item_tree")).count();
+ assert_eq!(n_recalculated_item_trees, 1);
+ let n_reparsed_macros =
+ events.iter().filter(|it| it.contains("parse_macro_expansion")).count();
+ assert_eq!(n_reparsed_macros, 0);
+ }
+}
+
+#[test]
+fn item_tree_prevents_reparsing() {
+ // The `ItemTree` is used by both name resolution and the various queries in `adt.rs` and
+ // `data.rs`. After computing the `ItemTree` and deleting the parse tree, we should be able to
+ // run those other queries without triggering a reparse.
+
+ let (db, pos) = TestDB::with_position(
+ r#"
+pub struct S;
+pub union U {}
+pub enum E {
+ Variant,
+}
+pub fn f(_: S) { $0 }
+pub trait Tr {}
+impl Tr for () {}
+pub const C: u8 = 0;
+pub static ST: u8 = 0;
+pub type Ty = ();
+"#,
+ );
+ let krate = db.test_crate();
+ {
+ let events = db.log_executed(|| {
+ db.file_item_tree(pos.file_id.into());
+ });
+ let n_calculated_item_trees = events.iter().filter(|it| it.contains("item_tree")).count();
+ assert_eq!(n_calculated_item_trees, 1);
+ let n_parsed_files = events.iter().filter(|it| it.contains("parse(")).count();
+ assert_eq!(n_parsed_files, 1);
+ }
+
+ // Delete the parse tree.
+ base_db::ParseQuery.in_db(&db).purge();
+
+ {
+ let events = db.log_executed(|| {
+ let crate_def_map = db.crate_def_map(krate);
+ let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
+ assert_eq!(module_data.scope.resolutions().count(), 8);
+ assert_eq!(module_data.scope.impls().count(), 1);
+
+ for imp in module_data.scope.impls() {
+ db.impl_data(imp);
+ }
+
+ for (_, res) in module_data.scope.resolutions() {
+ match res.values.or(res.types).unwrap().0 {
+ ModuleDefId::FunctionId(f) => drop(db.function_data(f)),
+ ModuleDefId::AdtId(adt) => match adt {
+ AdtId::StructId(it) => drop(db.struct_data(it)),
+ AdtId::UnionId(it) => drop(db.union_data(it)),
+ AdtId::EnumId(it) => drop(db.enum_data(it)),
+ },
+ ModuleDefId::ConstId(it) => drop(db.const_data(it)),
+ ModuleDefId::StaticId(it) => drop(db.static_data(it)),
+ ModuleDefId::TraitId(it) => drop(db.trait_data(it)),
+ ModuleDefId::TypeAliasId(it) => drop(db.type_alias_data(it)),
+ ModuleDefId::EnumVariantId(_)
+ | ModuleDefId::ModuleId(_)
+ | ModuleDefId::MacroId(_)
+ | ModuleDefId::BuiltinType(_) => unreachable!(),
+ }
+ }
+ });
+ let n_reparsed_files = events.iter().filter(|it| it.contains("parse(")).count();
+ assert_eq!(n_reparsed_files, 0);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs
new file mode 100644
index 000000000..3ece1379a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs
@@ -0,0 +1,1187 @@
+use super::*;
+use itertools::Itertools;
+
+#[test]
+fn macro_rules_are_globally_visible() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! structs {
+ ($($i:ident),*) => {
+ $(struct $i { field: u32 } )*
+ }
+}
+structs!(Foo);
+mod nested;
+
+//- /nested.rs
+structs!(Bar, Baz);
+"#,
+ expect![[r#"
+ crate
+ Foo: t
+ nested: t
+
+ crate::nested
+ Bar: t
+ Baz: t
+ "#]],
+ );
+}
+
+#[test]
+fn macro_rules_can_define_modules() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! m {
+ ($name:ident) => { mod $name; }
+}
+m!(n1);
+mod m { m!(n3) }
+
+//- /n1.rs
+m!(n2)
+//- /n1/n2.rs
+struct X;
+//- /m/n3.rs
+struct Y;
+"#,
+ expect![[r#"
+ crate
+ m: t
+ n1: t
+
+ crate::m
+ n3: t
+
+ crate::m::n3
+ Y: t v
+
+ crate::n1
+ n2: t
+
+ crate::n1::n2
+ X: t v
+ "#]],
+ );
+}
+
+#[test]
+fn macro_rules_from_other_crates_are_visible() {
+ check(
+ r#"
+//- /main.rs crate:main deps:foo
+foo::structs!(Foo, Bar)
+mod bar;
+
+//- /bar.rs
+use crate::*;
+
+//- /lib.rs crate:foo
+#[macro_export]
+macro_rules! structs {
+ ($($i:ident),*) => {
+ $(struct $i { field: u32 } )*
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Bar: t
+ Foo: t
+ bar: t
+
+ crate::bar
+ Bar: t
+ Foo: t
+ bar: t
+ "#]],
+ );
+}
+
+#[test]
+fn macro_rules_export_with_local_inner_macros_are_visible() {
+ check(
+ r#"
+//- /main.rs crate:main deps:foo
+foo::structs!(Foo, Bar)
+mod bar;
+
+//- /bar.rs
+use crate::*;
+
+//- /lib.rs crate:foo
+#[macro_export(local_inner_macros)]
+macro_rules! structs {
+ ($($i:ident),*) => {
+ $(struct $i { field: u32 } )*
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Bar: t
+ Foo: t
+ bar: t
+
+ crate::bar
+ Bar: t
+ Foo: t
+ bar: t
+ "#]],
+ );
+}
+
+#[test]
+fn local_inner_macros_makes_local_macros_usable() {
+ check(
+ r#"
+//- /main.rs crate:main deps:foo
+foo::structs!(Foo, Bar);
+mod bar;
+
+//- /bar.rs
+use crate::*;
+
+//- /lib.rs crate:foo
+#[macro_export(local_inner_macros)]
+macro_rules! structs {
+ ($($i:ident),*) => {
+ inner!($($i),*);
+ }
+}
+#[macro_export]
+macro_rules! inner {
+ ($($i:ident),*) => {
+ $(struct $i { field: u32 } )*
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Bar: t
+ Foo: t
+ bar: t
+
+ crate::bar
+ Bar: t
+ Foo: t
+ bar: t
+ "#]],
+ );
+}
+
+#[test]
+fn unexpanded_macro_should_expand_by_fixedpoint_loop() {
+ check(
+ r#"
+//- /main.rs crate:main deps:foo
+macro_rules! baz {
+ () => {
+ use foo::bar;
+ }
+}
+foo!();
+bar!();
+baz!();
+
+//- /lib.rs crate:foo
+#[macro_export]
+macro_rules! foo {
+ () => {
+ struct Foo { field: u32 }
+ }
+}
+#[macro_export]
+macro_rules! bar {
+ () => {
+ use foo::foo;
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Foo: t
+ bar: m
+ foo: m
+ "#]],
+ );
+}
+
+#[test]
+fn macro_rules_from_other_crates_are_visible_with_macro_use() {
+ cov_mark::check!(macro_rules_from_other_crates_are_visible_with_macro_use);
+ check(
+ r#"
+//- /main.rs crate:main deps:foo
+structs!(Foo);
+structs_priv!(Bar);
+structs_not_exported!(MacroNotResolved1);
+crate::structs!(MacroNotResolved2);
+
+mod bar;
+
+#[macro_use]
+extern crate foo;
+
+//- /bar.rs
+structs!(Baz);
+crate::structs!(MacroNotResolved3);
+
+//- /lib.rs crate:foo
+#[macro_export]
+macro_rules! structs {
+ ($i:ident) => { struct $i; }
+}
+
+macro_rules! structs_not_exported {
+ ($i:ident) => { struct $i; }
+}
+
+mod priv_mod {
+ #[macro_export]
+ macro_rules! structs_priv {
+ ($i:ident) => { struct $i; }
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Foo: t v
+ bar: t
+ foo: t
+
+ crate::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn prelude_is_macro_use() {
+ cov_mark::check!(prelude_is_macro_use);
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+structs!(Foo);
+structs_priv!(Bar);
+structs_outside!(Out);
+crate::structs!(MacroNotResolved2);
+
+mod bar;
+
+//- /bar.rs
+structs!(Baz);
+crate::structs!(MacroNotResolved3);
+
+//- /lib.rs crate:std
+pub mod prelude {
+ pub mod rust_2018 {
+ #[macro_export]
+ macro_rules! structs {
+ ($i:ident) => { struct $i; }
+ }
+
+ mod priv_mod {
+ #[macro_export]
+ macro_rules! structs_priv {
+ ($i:ident) => { struct $i; }
+ }
+ }
+ }
+}
+
+#[macro_export]
+macro_rules! structs_outside {
+ ($i:ident) => { struct $i; }
+}
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Foo: t v
+ Out: t v
+ bar: t
+
+ crate::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn prelude_cycle() {
+ check(
+ r#"
+#[prelude_import]
+use self::prelude::*;
+
+declare_mod!();
+
+mod prelude {
+ macro_rules! declare_mod {
+ () => (mod foo {})
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ prelude: t
+
+ crate::prelude
+ "#]],
+ );
+}
+
+#[test]
+fn legacy_macro_use_before_def() {
+ check(
+ r#"
+m!();
+
+macro_rules! m {
+ () => {
+ struct S;
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ S: t v
+ "#]],
+ );
+ // FIXME: should not expand. legacy macro scoping is not implemented.
+}
+
+#[test]
+fn plain_macros_are_legacy_textual_scoped() {
+ check(
+ r#"
+//- /main.rs
+mod m1;
+bar!(NotFoundNotMacroUse);
+
+mod m2 { foo!(NotFoundBeforeInside2); }
+
+macro_rules! foo {
+ ($x:ident) => { struct $x; }
+}
+foo!(Ok);
+
+mod m3;
+foo!(OkShadowStop);
+bar!(NotFoundMacroUseStop);
+
+#[macro_use]
+mod m5 {
+ #[macro_use]
+ mod m6 {
+ macro_rules! foo {
+ ($x:ident) => { fn $x() {} }
+ }
+ }
+}
+foo!(ok_double_macro_use_shadow);
+
+baz!(NotFoundBefore);
+#[macro_use]
+mod m7 {
+ macro_rules! baz {
+ ($x:ident) => { struct $x; }
+ }
+}
+baz!(OkAfter);
+
+//- /m1.rs
+foo!(NotFoundBeforeInside1);
+macro_rules! bar {
+ ($x:ident) => { struct $x; }
+}
+
+//- /m3/mod.rs
+foo!(OkAfterInside);
+macro_rules! foo {
+ ($x:ident) => { fn $x() {} }
+}
+foo!(ok_shadow);
+
+#[macro_use]
+mod m4;
+bar!(OkMacroUse);
+
+mod m5;
+baz!(OkMacroUseInner);
+
+//- /m3/m4.rs
+foo!(ok_shadow_deep);
+macro_rules! bar {
+ ($x:ident) => { struct $x; }
+}
+//- /m3/m5.rs
+#![macro_use]
+macro_rules! baz {
+ ($x:ident) => { struct $x; }
+}
+
+
+"#,
+ expect![[r#"
+ crate
+ NotFoundBefore: t v
+ Ok: t v
+ OkAfter: t v
+ OkShadowStop: t v
+ m1: t
+ m2: t
+ m3: t
+ m5: t
+ m7: t
+ ok_double_macro_use_shadow: v
+
+ crate::m1
+
+ crate::m2
+
+ crate::m3
+ OkAfterInside: t v
+ OkMacroUse: t v
+ OkMacroUseInner: t v
+ m4: t
+ m5: t
+ ok_shadow: v
+
+ crate::m3::m4
+ ok_shadow_deep: v
+
+ crate::m3::m5
+
+ crate::m5
+ m6: t
+
+ crate::m5::m6
+
+ crate::m7
+ "#]],
+ );
+ // FIXME: should not see `NotFoundBefore`
+}
+
+#[test]
+fn type_value_macro_live_in_different_scopes() {
+ check(
+ r#"
+#[macro_export]
+macro_rules! foo {
+ ($x:ident) => { type $x = (); }
+}
+
+foo!(foo);
+use foo as bar;
+
+use self::foo as baz;
+fn baz() {}
+"#,
+ expect![[r#"
+ crate
+ bar: t m
+ baz: t v m
+ foo: t m
+ "#]],
+ );
+}
+
+#[test]
+fn macro_use_can_be_aliased() {
+ check(
+ r#"
+//- /main.rs crate:main deps:foo
+#[macro_use]
+extern crate foo;
+
+foo!(Direct);
+bar!(Alias);
+
+//- /lib.rs crate:foo
+use crate::foo as bar;
+
+mod m {
+ #[macro_export]
+ macro_rules! foo {
+ ($x:ident) => { struct $x; }
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Alias: t v
+ Direct: t v
+ foo: t
+ "#]],
+ );
+}
+
+#[test]
+fn path_qualified_macros() {
+ check(
+ r#"
+macro_rules! foo {
+ ($x:ident) => { struct $x; }
+}
+
+crate::foo!(NotResolved);
+
+crate::bar!(OkCrate);
+bar!(OkPlain);
+alias1!(NotHere);
+m::alias1!(OkAliasPlain);
+m::alias2!(OkAliasSuper);
+m::alias3!(OkAliasCrate);
+not_found!(NotFound);
+
+mod m {
+ #[macro_export]
+ macro_rules! bar {
+ ($x:ident) => { struct $x; }
+ }
+ pub use bar as alias1;
+ pub use super::bar as alias2;
+ pub use crate::bar as alias3;
+ pub use self::bar as not_found;
+}
+"#,
+ expect![[r#"
+ crate
+ OkAliasCrate: t v
+ OkAliasPlain: t v
+ OkAliasSuper: t v
+ OkCrate: t v
+ OkPlain: t v
+ bar: m
+ m: t
+
+ crate::m
+ alias1: m
+ alias2: m
+ alias3: m
+ not_found: _
+ "#]],
+ );
+}
+
+#[test]
+fn macro_dollar_crate_is_correct_in_item() {
+ cov_mark::check!(macro_dollar_crate_self);
+ check(
+ r#"
+//- /main.rs crate:main deps:foo
+#[macro_use]
+extern crate foo;
+
+#[macro_use]
+mod m {
+ macro_rules! current {
+ () => {
+ use $crate::Foo as FooSelf;
+ }
+ }
+}
+
+struct Foo;
+
+current!();
+not_current1!();
+foo::not_current2!();
+
+//- /lib.rs crate:foo
+mod m {
+ #[macro_export]
+ macro_rules! not_current1 {
+ () => {
+ use $crate::Bar;
+ }
+ }
+}
+
+#[macro_export]
+macro_rules! not_current2 {
+ () => {
+ use $crate::Baz;
+ }
+}
+
+pub struct Bar;
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Baz: t v
+ Foo: t v
+ FooSelf: t v
+ foo: t
+ m: t
+
+ crate::m
+ "#]],
+ );
+}
+
+#[test]
+fn macro_dollar_crate_is_correct_in_indirect_deps() {
+ cov_mark::check!(macro_dollar_crate_other);
+ // From std
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+foo!();
+
+//- /std.rs crate:std deps:core
+pub use core::foo;
+
+pub mod prelude {
+ pub mod rust_2018 {}
+}
+
+#[macro_use]
+mod std_macros;
+
+//- /core.rs crate:core
+#[macro_export]
+macro_rules! foo {
+ () => {
+ use $crate::bar;
+ }
+}
+
+pub struct bar;
+"#,
+ expect![[r#"
+ crate
+ bar: t v
+ "#]],
+ );
+}
+
+#[test]
+fn expand_derive() {
+ let map = compute_crate_def_map(
+ r#"
+//- /main.rs crate:main deps:core
+use core::Copy;
+
+#[core::derive(Copy, core::Clone)]
+struct Foo;
+
+//- /core.rs crate:core
+#[rustc_builtin_macro]
+pub macro derive($item:item) {}
+#[rustc_builtin_macro]
+pub macro Copy {}
+#[rustc_builtin_macro]
+pub macro Clone {}
+"#,
+ );
+ assert_eq!(map.modules[map.root].scope.impls().len(), 2);
+}
+
+#[test]
+fn resolve_builtin_derive() {
+ check(
+ r#"
+//- /main.rs crate:main deps:core
+use core::*;
+
+//- /core.rs crate:core
+#[rustc_builtin_macro]
+pub macro Clone {}
+
+pub trait Clone {}
+"#,
+ expect![[r#"
+ crate
+ Clone: t m
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_derive_with_unresolved_attributes_fall_back() {
+ // Tests that we still resolve derives after ignoring an unresolved attribute.
+ cov_mark::check!(unresolved_attribute_fallback);
+ let map = compute_crate_def_map(
+ r#"
+//- /main.rs crate:main deps:core
+use core::{Clone, derive};
+
+#[derive(Clone)]
+#[unresolved]
+struct Foo;
+
+//- /core.rs crate:core
+#[rustc_builtin_macro]
+pub macro derive($item:item) {}
+#[rustc_builtin_macro]
+pub macro Clone {}
+"#,
+ );
+ assert_eq!(map.modules[map.root].scope.impls().len(), 1);
+}
+
+#[test]
+fn unresolved_attributes_fall_back_track_per_file_moditems() {
+ // Tests that we track per-file ModItems when ignoring an unresolved attribute.
+ // Just tracking the `ModItem` leads to `Foo` getting ignored.
+
+ check(
+ r#"
+ //- /main.rs crate:main
+
+ mod submod;
+
+ #[unresolved]
+ struct Foo;
+
+ //- /submod.rs
+ #[unresolved]
+ struct Bar;
+ "#,
+ expect![[r#"
+ crate
+ Foo: t v
+ submod: t
+
+ crate::submod
+ Bar: t v
+ "#]],
+ );
+}
+
+#[test]
+fn unresolved_attrs_extern_block_hang() {
+ // Regression test for https://github.com/rust-lang/rust-analyzer/issues/8905
+ check(
+ r#"
+#[unresolved]
+extern "C" {
+ #[unresolved]
+ fn f();
+}
+ "#,
+ expect![[r#"
+ crate
+ f: v
+ "#]],
+ );
+}
+
+#[test]
+fn macros_in_extern_block() {
+ check(
+ r#"
+macro_rules! m {
+ () => { static S: u8; };
+}
+
+extern {
+ m!();
+}
+ "#,
+ expect![[r#"
+ crate
+ S: v
+ "#]],
+ );
+}
+
+#[test]
+fn resolves_derive_helper() {
+ cov_mark::check!(resolved_derive_helper);
+ check(
+ r#"
+//- /main.rs crate:main deps:proc
+#[rustc_builtin_macro]
+pub macro derive($item:item) {}
+
+#[derive(proc::Derive)]
+#[helper]
+#[unresolved]
+struct S;
+
+//- /proc.rs crate:proc
+#![crate_type="proc-macro"]
+#[proc_macro_derive(Derive, attributes(helper))]
+fn derive() {}
+ "#,
+ expect![[r#"
+ crate
+ S: t v
+ derive: m
+ "#]],
+ );
+}
+
+#[test]
+fn unresolved_attr_with_cfg_attr_hang() {
+ // Another regression test for https://github.com/rust-lang/rust-analyzer/issues/8905
+ check(
+ r#"
+#[cfg_attr(not(off), unresolved, unresolved)]
+struct S;
+ "#,
+ expect![[r#"
+ crate
+ S: t v
+ "#]],
+ );
+}
+
+#[test]
+fn macro_expansion_overflow() {
+ cov_mark::check!(macro_expansion_overflow);
+ check(
+ r#"
+macro_rules! a {
+ ($e:expr; $($t:tt)*) => {
+ b!(static = (); $($t)*);
+ };
+ () => {};
+}
+
+macro_rules! b {
+ (static = $e:expr; $($t:tt)*) => {
+ a!($e; $($t)*);
+ };
+ () => {};
+}
+
+b! { static = #[] ();}
+"#,
+ expect![[r#"
+ crate
+ "#]],
+ );
+}
+
+#[test]
+fn macros_defining_macros() {
+ check(
+ r#"
+macro_rules! item {
+ ($item:item) => { $item }
+}
+
+item! {
+ macro_rules! indirect_macro { () => { struct S {} } }
+}
+
+indirect_macro!();
+ "#,
+ expect![[r#"
+ crate
+ S: t
+ "#]],
+ );
+}
+
+#[test]
+fn resolves_proc_macros() {
+ check(
+ r#"
+#![crate_type="proc-macro"]
+struct TokenStream;
+
+#[proc_macro]
+pub fn function_like_macro(args: TokenStream) -> TokenStream {
+ args
+}
+
+#[proc_macro_attribute]
+pub fn attribute_macro(_args: TokenStream, item: TokenStream) -> TokenStream {
+ item
+}
+
+#[proc_macro_derive(DummyTrait)]
+pub fn derive_macro(_item: TokenStream) -> TokenStream {
+ TokenStream
+}
+
+#[proc_macro_derive(AnotherTrait, attributes(helper_attr))]
+pub fn derive_macro_2(_item: TokenStream) -> TokenStream {
+ TokenStream
+}
+"#,
+ expect![[r#"
+ crate
+ AnotherTrait: m
+ DummyTrait: m
+ TokenStream: t v
+ attribute_macro: v m
+ derive_macro: v
+ derive_macro_2: v
+ function_like_macro: v m
+ "#]],
+ );
+}
+
+#[test]
+fn proc_macro_censoring() {
+ // Make sure that only proc macros are publicly exported from proc-macro crates.
+
+ check(
+ r#"
+//- /main.rs crate:main deps:macros
+pub use macros::*;
+
+//- /macros.rs crate:macros
+#![crate_type="proc-macro"]
+pub struct TokenStream;
+
+#[proc_macro]
+pub fn function_like_macro(args: TokenStream) -> TokenStream {
+ args
+}
+
+#[proc_macro_attribute]
+pub fn attribute_macro(_args: TokenStream, item: TokenStream) -> TokenStream {
+ item
+}
+
+#[proc_macro_derive(DummyTrait)]
+pub fn derive_macro(_item: TokenStream) -> TokenStream {
+ TokenStream
+}
+
+#[macro_export]
+macro_rules! mbe {
+ () => {};
+}
+"#,
+ expect![[r#"
+ crate
+ DummyTrait: m
+ attribute_macro: m
+ function_like_macro: m
+ "#]],
+ );
+}
+
+#[test]
+fn collects_derive_helpers() {
+ let def_map = compute_crate_def_map(
+ r#"
+#![crate_type="proc-macro"]
+struct TokenStream;
+
+#[proc_macro_derive(AnotherTrait, attributes(helper_attr))]
+pub fn derive_macro_2(_item: TokenStream) -> TokenStream {
+ TokenStream
+}
+"#,
+ );
+
+ assert_eq!(def_map.exported_derives.len(), 1);
+ match def_map.exported_derives.values().next() {
+ Some(helpers) => match &**helpers {
+ [attr] => assert_eq!(attr.to_string(), "helper_attr"),
+ _ => unreachable!(),
+ },
+ _ => unreachable!(),
+ }
+}
+
+#[test]
+fn resolve_macro_def() {
+ check(
+ r#"
+pub macro structs($($i:ident),*) {
+ $(struct $i { field: u32 } )*
+}
+structs!(Foo);
+"#,
+ expect![[r#"
+ crate
+ Foo: t
+ structs: m
+ "#]],
+ );
+}
+
+#[test]
+fn macro_in_prelude() {
+ check(
+ r#"
+//- /lib.rs crate:lib deps:std
+global_asm!();
+
+//- /std.rs crate:std
+pub mod prelude {
+ pub mod rust_2018 {
+ pub macro global_asm() {
+ pub struct S;
+ }
+ }
+}
+ "#,
+ expect![[r#"
+ crate
+ S: t v
+ "#]],
+ )
+}
+
+#[test]
+fn issue9358_bad_macro_stack_overflow() {
+ cov_mark::check!(issue9358_bad_macro_stack_overflow);
+ check(
+ r#"
+macro_rules! m {
+ ($cond:expr) => { m!($cond, stringify!($cond)) };
+ ($cond:expr, $($arg:tt)*) => { $cond };
+}
+m!(
+"#,
+ expect![[r#"
+ crate
+ "#]],
+ )
+}
+
+#[test]
+fn eager_macro_correctly_resolves_contents() {
+ // Eager macros resolve any contained macros when expanded. This should work correctly with the
+ // usual name resolution rules, so both of these `include!`s should include the right file.
+
+ check(
+ r#"
+//- /lib.rs
+#[rustc_builtin_macro]
+macro_rules! include { () => {} }
+
+include!(inner_a!());
+include!(crate::inner_b!());
+
+#[macro_export]
+macro_rules! inner_a {
+ () => { "inc_a.rs" };
+}
+#[macro_export]
+macro_rules! inner_b {
+ () => { "inc_b.rs" };
+}
+//- /inc_a.rs
+struct A;
+//- /inc_b.rs
+struct B;
+"#,
+ expect![[r#"
+ crate
+ A: t v
+ B: t v
+ inner_a: m
+ inner_b: m
+ "#]],
+ );
+}
+
+#[test]
+fn eager_macro_correctly_resolves_dollar_crate() {
+ // MBE -> eager -> $crate::mbe
+ check(
+ r#"
+//- /lib.rs
+#[rustc_builtin_macro]
+macro_rules! include { () => {} }
+
+#[macro_export]
+macro_rules! inner {
+ () => { "inc.rs" };
+}
+
+macro_rules! m {
+ () => { include!($crate::inner!()); };
+}
+
+m!();
+
+//- /inc.rs
+struct A;
+"#,
+ expect![[r#"
+ crate
+ A: t v
+ inner: m
+ "#]],
+ );
+ // eager -> MBE -> $crate::mbe
+ check(
+ r#"
+//- /lib.rs
+#[rustc_builtin_macro]
+macro_rules! include { () => {} }
+
+#[macro_export]
+macro_rules! inner {
+ () => { "inc.rs" };
+}
+
+macro_rules! n {
+ () => {
+ $crate::inner!()
+ };
+}
+
+include!(n!());
+
+//- /inc.rs
+struct A;
+"#,
+ expect![[r#"
+ crate
+ A: t v
+ inner: m
+ "#]],
+ );
+}
+
+#[test]
+fn macro_use_imports_all_macro_types() {
+ let def_map = compute_crate_def_map(
+ r#"
+//- /main.rs crate:main deps:lib
+#[macro_use]
+extern crate lib;
+
+//- /lib.rs crate:lib deps:proc
+pub use proc::*;
+
+#[macro_export]
+macro_rules! legacy { () => () }
+
+pub macro macro20 {}
+
+//- /proc.rs crate:proc
+#![crate_type="proc-macro"]
+
+struct TokenStream;
+
+#[proc_macro_attribute]
+fn proc_attr(a: TokenStream, b: TokenStream) -> TokenStream { a }
+ "#,
+ );
+
+ let root = &def_map[def_map.root()].scope;
+ let actual = root
+ .legacy_macros()
+ .sorted_by(|a, b| std::cmp::Ord::cmp(&a.0, &b.0))
+ .map(|(name, _)| format!("{name}\n"))
+ .collect::<String>();
+
+ expect![[r#"
+ legacy
+ macro20
+ proc_attr
+ "#]]
+ .assert_eq(&actual);
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/mod_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/mod_resolution.rs
new file mode 100644
index 000000000..79a74873b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/mod_resolution.rs
@@ -0,0 +1,843 @@
+use super::*;
+
+#[test]
+fn name_res_works_for_broken_modules() {
+ cov_mark::check!(name_res_works_for_broken_modules);
+ check(
+ r"
+//- /lib.rs
+mod foo // no `;`, no body
+use self::foo::Baz;
+
+//- /foo/mod.rs
+pub mod bar;
+pub use self::bar::Baz;
+
+//- /foo/bar.rs
+pub struct Baz;
+",
+ expect![[r#"
+ crate
+ Baz: _
+ foo: t
+
+ crate::foo
+ "#]],
+ );
+}
+
+#[test]
+fn nested_module_resolution() {
+ check(
+ r#"
+//- /lib.rs
+mod n1;
+
+//- /n1.rs
+mod n2;
+
+//- /n1/n2.rs
+struct X;
+"#,
+ expect![[r#"
+ crate
+ n1: t
+
+ crate::n1
+ n2: t
+
+ crate::n1::n2
+ X: t v
+ "#]],
+ );
+}
+
+#[test]
+fn nested_module_resolution_2() {
+ check(
+ r#"
+//- /lib.rs
+mod prelude;
+mod iter;
+
+//- /prelude.rs
+pub use crate::iter::Iterator;
+
+//- /iter.rs
+pub use self::traits::Iterator;
+mod traits;
+
+//- /iter/traits.rs
+pub use self::iterator::Iterator;
+mod iterator;
+
+//- /iter/traits/iterator.rs
+pub trait Iterator;
+"#,
+ expect![[r#"
+ crate
+ iter: t
+ prelude: t
+
+ crate::iter
+ Iterator: t
+ traits: t
+
+ crate::iter::traits
+ Iterator: t
+ iterator: t
+
+ crate::iter::traits::iterator
+ Iterator: t
+
+ crate::prelude
+ Iterator: t
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_works_for_non_standard_filenames() {
+ check(
+ r#"
+//- /my_library.rs crate:my_library
+mod foo;
+use self::foo::Bar;
+
+//- /foo/mod.rs
+pub struct Bar;
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ foo: t
+
+ crate::foo
+ Bar: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_works_for_raw_modules() {
+ check(
+ r#"
+//- /lib.rs
+mod r#async;
+use self::r#async::Bar;
+
+//- /async.rs
+pub struct Bar;
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ async: t
+
+ crate::async
+ Bar: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_path() {
+ check(
+ r#"
+//- /lib.rs
+#[path = "bar/baz/foo.rs"]
+mod foo;
+use self::foo::Bar;
+
+//- /bar/baz/foo.rs
+pub struct Bar;
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ foo: t
+
+ crate::foo
+ Bar: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_module_with_path_in_mod_rs() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo/mod.rs
+#[path = "baz.rs"]
+pub mod bar;
+use self::bar::Baz;
+
+//- /foo/baz.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_module_with_path_non_crate_root() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo.rs
+#[path = "baz.rs"]
+pub mod bar;
+use self::bar::Baz;
+
+//- /baz.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_module_decl_path_super() {
+ check(
+ r#"
+//- /main.rs
+#[path = "bar/baz/module.rs"]
+mod foo;
+pub struct Baz;
+
+//- /bar/baz/module.rs
+use super::Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ foo: t
+
+ crate::foo
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_explicit_path_mod_rs() {
+ check(
+ r#"
+//- /main.rs
+#[path = "module/mod.rs"]
+mod foo;
+
+//- /module/mod.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_relative_path() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo.rs
+#[path = "./sub.rs"]
+pub mod foo_bar;
+
+//- /sub.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ foo_bar: t
+
+ crate::foo::foo_bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_relative_path_2() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo/mod.rs
+#[path="../sub.rs"]
+pub mod foo_bar;
+
+//- /sub.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ foo_bar: t
+
+ crate::foo::foo_bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_relative_path_outside_root() {
+ check(
+ r#"
+//- /a/b/c/d/e/main.rs crate:main
+#[path="../../../../../outside.rs"]
+mod foo;
+
+//- /outside.rs
+mod bar;
+
+//- /bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+"#]],
+ );
+}
+
+#[test]
+fn module_resolution_explicit_path_mod_rs_2() {
+ check(
+ r#"
+//- /main.rs
+#[path = "module/bar/mod.rs"]
+mod foo;
+
+//- /module/bar/mod.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_explicit_path_mod_rs_with_win_separator() {
+ check(
+ r#"
+//- /main.rs
+#[path = r"module\bar\mod.rs"]
+mod foo;
+
+//- /module/bar/mod.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_with_path_attribute() {
+ check(
+ r#"
+//- /main.rs
+#[path = "models"]
+mod foo { mod bar; }
+
+//- /models/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module() {
+ check(
+ r#"
+//- /main.rs
+mod foo { mod bar; }
+
+//- /foo/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_2_with_path_attribute() {
+ check(
+ r#"
+//- /main.rs
+#[path = "models/db"]
+mod foo { mod bar; }
+
+//- /models/db/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_3() {
+ check(
+ r#"
+//- /main.rs
+#[path = "models/db"]
+mod foo {
+ #[path = "users.rs"]
+ mod bar;
+}
+
+//- /models/db/users.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_empty_path() {
+ check(
+ r#"
+//- /main.rs
+#[path = ""]
+mod foo {
+ #[path = "users.rs"]
+ mod bar;
+}
+
+//- /users.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_empty_path() {
+ check(
+ r#"
+//- /main.rs
+#[path = ""] // Should try to read `/` (a directory)
+mod foo;
+
+//- /foo.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_relative_path() {
+ check(
+ r#"
+//- /main.rs
+#[path = "./models"]
+mod foo { mod bar; }
+
+//- /models/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_in_crate_root() {
+ check(
+ r#"
+//- /main.rs
+mod foo {
+ #[path = "baz.rs"]
+ mod bar;
+}
+use self::foo::bar::Baz;
+
+//- /foo/baz.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_in_mod_rs() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo/mod.rs
+mod bar {
+ #[path = "qwe.rs"]
+ pub mod baz;
+}
+use self::bar::baz::Baz;
+
+//- /foo/bar/qwe.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ bar: t
+
+ crate::foo::bar
+ baz: t
+
+ crate::foo::bar::baz
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_in_non_crate_root() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo.rs
+mod bar {
+ #[path = "qwe.rs"]
+ pub mod baz;
+}
+use self::bar::baz::Baz;
+
+//- /foo/bar/qwe.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ bar: t
+
+ crate::foo::bar
+ baz: t
+
+ crate::foo::bar::baz
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_in_non_crate_root_2() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo.rs
+#[path = "bar"]
+mod bar {
+ pub mod baz;
+}
+use self::bar::baz::Baz;
+
+//- /bar/baz.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ bar: t
+
+ crate::foo::bar
+ baz: t
+
+ crate::foo::bar::baz
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_module_in_non_crate_root_2() {
+ check(
+ r#"
+//- /main.rs
+#[path="module/m2.rs"]
+mod module;
+
+//- /module/m2.rs
+pub mod submod;
+
+//- /module/submod.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ module: t
+
+ crate::module
+ submod: t
+
+ crate::module::submod
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn nested_out_of_line_module() {
+ check(
+ r#"
+//- /lib.rs
+mod a {
+ mod b {
+ mod c;
+ }
+}
+
+//- /a/b/c.rs
+struct X;
+"#,
+ expect![[r#"
+ crate
+ a: t
+
+ crate::a
+ b: t
+
+ crate::a::b
+ c: t
+
+ crate::a::b::c
+ X: t v
+ "#]],
+ );
+}
+
+#[test]
+fn nested_out_of_line_module_with_path() {
+ check(
+ r#"
+//- /lib.rs
+mod a {
+ #[path = "d/e"]
+ mod b {
+ mod c;
+ }
+}
+
+//- /a/d/e/c.rs
+struct X;
+"#,
+ expect![[r#"
+ crate
+ a: t
+
+ crate::a
+ b: t
+
+ crate::a::b
+ c: t
+
+ crate::a::b::c
+ X: t v
+ "#]],
+ );
+}
+
+#[test]
+fn circular_mods() {
+ cov_mark::check!(circular_mods);
+ compute_crate_def_map(
+ r#"
+//- /lib.rs
+mod foo;
+//- /foo.rs
+#[path = "./foo.rs"]
+mod foo;
+"#,
+ );
+
+ compute_crate_def_map(
+ r#"
+//- /lib.rs
+mod foo;
+//- /foo.rs
+#[path = "./bar.rs"]
+mod bar;
+//- /bar.rs
+#[path = "./foo.rs"]
+mod foo;
+"#,
+ );
+}
+
+#[test]
+fn abs_path_ignores_local() {
+ check(
+ r#"
+//- /main.rs crate:main deps:core
+pub use ::core::hash::Hash;
+pub mod core {}
+
+//- /lib.rs crate:core
+pub mod hash { pub trait Hash {} }
+"#,
+ expect![[r#"
+ crate
+ Hash: t
+ core: t
+
+ crate::core
+ "#]],
+ );
+}
+
+#[test]
+fn cfg_in_module_file() {
+ // Inner `#![cfg]` in a module file makes the whole module disappear.
+ check(
+ r#"
+//- /main.rs
+mod module;
+
+//- /module.rs
+#![cfg(NEVER)]
+
+struct AlsoShoulntAppear;
+ "#,
+ expect![[r#"
+ crate
+ "#]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/primitives.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/primitives.rs
new file mode 100644
index 000000000..215e8952d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/primitives.rs
@@ -0,0 +1,23 @@
+use super::*;
+
+#[test]
+fn primitive_reexport() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use foo::int;
+
+//- /foo.rs
+pub use i32 as int;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+ int: t
+
+ crate::foo
+ int: t
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path.rs b/src/tools/rust-analyzer/crates/hir-def/src/path.rs
new file mode 100644
index 000000000..2f13a9fbf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/path.rs
@@ -0,0 +1,222 @@
+//! A desugared representation of paths like `crate::foo` or `<Type as Trait>::bar`.
+mod lower;
+
+use std::{
+ fmt::{self, Display},
+ iter,
+};
+
+use crate::{
+ body::LowerCtx,
+ intern::Interned,
+ type_ref::{ConstScalarOrPath, LifetimeRef},
+};
+use hir_expand::name::Name;
+use syntax::ast;
+
+use crate::type_ref::{TypeBound, TypeRef};
+
+pub use hir_expand::mod_path::{path, ModPath, PathKind};
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ImportAlias {
+ /// Unnamed alias, as in `use Foo as _;`
+ Underscore,
+ /// Named alias
+ Alias(Name),
+}
+
+impl Display for ImportAlias {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ImportAlias::Underscore => f.write_str("_"),
+ ImportAlias::Alias(name) => f.write_str(&name.to_smol_str()),
+ }
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Path {
+ /// Type based path like `<T>::foo`.
+ /// Note that paths like `<Type as Trait>::foo` are desugard to `Trait::<Self=Type>::foo`.
+ type_anchor: Option<Interned<TypeRef>>,
+ mod_path: Interned<ModPath>,
+ /// Invariant: the same len as `self.mod_path.segments`
+ generic_args: Box<[Option<Interned<GenericArgs>>]>,
+}
+
+/// Generic arguments to a path segment (e.g. the `i32` in `Option<i32>`). This
+/// also includes bindings of associated types, like in `Iterator<Item = Foo>`.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct GenericArgs {
+ pub args: Vec<GenericArg>,
+ /// This specifies whether the args contain a Self type as the first
+ /// element. This is the case for path segments like `<T as Trait>`, where
+ /// `T` is actually a type parameter for the path `Trait` specifying the
+ /// Self type. Otherwise, when we have a path `Trait<X, Y>`, the Self type
+ /// is left out.
+ pub has_self_type: bool,
+ /// Associated type bindings like in `Iterator<Item = T>`.
+ pub bindings: Vec<AssociatedTypeBinding>,
+ /// Whether these generic args were desugared from `Trait(Arg) -> Output`
+ /// parenthesis notation typically used for the `Fn` traits.
+ pub desugared_from_fn: bool,
+}
+
+/// An associated type binding like in `Iterator<Item = T>`.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AssociatedTypeBinding {
+ /// The name of the associated type.
+ pub name: Name,
+ /// The type bound to this associated type (in `Item = T`, this would be the
+ /// `T`). This can be `None` if there are bounds instead.
+ pub type_ref: Option<TypeRef>,
+ /// Bounds for the associated type, like in `Iterator<Item:
+ /// SomeOtherTrait>`. (This is the unstable `associated_type_bounds`
+ /// feature.)
+ pub bounds: Vec<Interned<TypeBound>>,
+}
+
+/// A single generic argument.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum GenericArg {
+ Type(TypeRef),
+ Lifetime(LifetimeRef),
+ Const(ConstScalarOrPath),
+}
+
+impl Path {
+ /// Converts an `ast::Path` to `Path`. Works with use trees.
+ /// It correctly handles `$crate` based path from macro call.
+ pub fn from_src(path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path> {
+ lower::lower_path(path, ctx)
+ }
+
+ /// Converts a known mod path to `Path`.
+ pub fn from_known_path(
+ path: ModPath,
+ generic_args: impl Into<Box<[Option<Interned<GenericArgs>>]>>,
+ ) -> Path {
+ let generic_args = generic_args.into();
+ assert_eq!(path.len(), generic_args.len());
+ Path { type_anchor: None, mod_path: Interned::new(path), generic_args }
+ }
+
+ pub fn kind(&self) -> &PathKind {
+ &self.mod_path.kind
+ }
+
+ pub fn type_anchor(&self) -> Option<&TypeRef> {
+ self.type_anchor.as_deref()
+ }
+
+ pub fn segments(&self) -> PathSegments<'_> {
+ PathSegments { segments: self.mod_path.segments(), generic_args: &self.generic_args }
+ }
+
+ pub fn mod_path(&self) -> &ModPath {
+ &self.mod_path
+ }
+
+ pub fn qualifier(&self) -> Option<Path> {
+ if self.mod_path.is_ident() {
+ return None;
+ }
+ let res = Path {
+ type_anchor: self.type_anchor.clone(),
+ mod_path: Interned::new(ModPath::from_segments(
+ self.mod_path.kind,
+ self.mod_path.segments()[..self.mod_path.segments().len() - 1].iter().cloned(),
+ )),
+ generic_args: self.generic_args[..self.generic_args.len() - 1].to_vec().into(),
+ };
+ Some(res)
+ }
+
+ pub fn is_self_type(&self) -> bool {
+ self.type_anchor.is_none() && *self.generic_args == [None] && self.mod_path.is_Self()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathSegment<'a> {
+ pub name: &'a Name,
+ pub args_and_bindings: Option<&'a GenericArgs>,
+}
+
+pub struct PathSegments<'a> {
+ segments: &'a [Name],
+ generic_args: &'a [Option<Interned<GenericArgs>>],
+}
+
+impl<'a> PathSegments<'a> {
+ pub const EMPTY: PathSegments<'static> = PathSegments { segments: &[], generic_args: &[] };
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+ pub fn len(&self) -> usize {
+ self.segments.len()
+ }
+ pub fn first(&self) -> Option<PathSegment<'a>> {
+ self.get(0)
+ }
+ pub fn last(&self) -> Option<PathSegment<'a>> {
+ self.get(self.len().checked_sub(1)?)
+ }
+ pub fn get(&self, idx: usize) -> Option<PathSegment<'a>> {
+ assert_eq!(self.segments.len(), self.generic_args.len());
+ let res = PathSegment {
+ name: self.segments.get(idx)?,
+ args_and_bindings: self.generic_args.get(idx).unwrap().as_ref().map(|it| &**it),
+ };
+ Some(res)
+ }
+ pub fn skip(&self, len: usize) -> PathSegments<'a> {
+ assert_eq!(self.segments.len(), self.generic_args.len());
+ PathSegments { segments: &self.segments[len..], generic_args: &self.generic_args[len..] }
+ }
+ pub fn take(&self, len: usize) -> PathSegments<'a> {
+ assert_eq!(self.segments.len(), self.generic_args.len());
+ PathSegments { segments: &self.segments[..len], generic_args: &self.generic_args[..len] }
+ }
+ pub fn iter(&self) -> impl Iterator<Item = PathSegment<'a>> {
+ self.segments.iter().zip(self.generic_args.iter()).map(|(name, args)| PathSegment {
+ name,
+ args_and_bindings: args.as_ref().map(|it| &**it),
+ })
+ }
+}
+
+impl GenericArgs {
+ pub(crate) fn from_ast(
+ lower_ctx: &LowerCtx<'_>,
+ node: ast::GenericArgList,
+ ) -> Option<GenericArgs> {
+ lower::lower_generic_args(lower_ctx, node)
+ }
+
+ pub(crate) fn empty() -> GenericArgs {
+ GenericArgs {
+ args: Vec::new(),
+ has_self_type: false,
+ bindings: Vec::new(),
+ desugared_from_fn: false,
+ }
+ }
+}
+
+impl From<Name> for Path {
+ fn from(name: Name) -> Path {
+ Path {
+ type_anchor: None,
+ mod_path: Interned::new(ModPath::from_segments(PathKind::Plain, iter::once(name))),
+ generic_args: Box::new([None]),
+ }
+ }
+}
+
+impl From<Name> for Box<Path> {
+ fn from(name: Name) -> Box<Path> {
+ Box::new(Path::from(name))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
new file mode 100644
index 000000000..0428f1a39
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
@@ -0,0 +1,230 @@
+//! Transforms syntax into `Path` objects, ideally with accounting for hygiene
+
+use crate::{intern::Interned, type_ref::ConstScalarOrPath};
+
+use either::Either;
+use hir_expand::name::{name, AsName};
+use syntax::ast::{self, AstNode, HasTypeBounds};
+
+use super::AssociatedTypeBinding;
+use crate::{
+ body::LowerCtx,
+ path::{GenericArg, GenericArgs, ModPath, Path, PathKind},
+ type_ref::{LifetimeRef, TypeBound, TypeRef},
+};
+
+/// Converts an `ast::Path` to `Path`. Works with use trees.
+/// It correctly handles `$crate` based path from macro call.
+pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path> {
+ let mut kind = PathKind::Plain;
+ let mut type_anchor = None;
+ let mut segments = Vec::new();
+ let mut generic_args = Vec::new();
+ let hygiene = ctx.hygiene();
+ loop {
+ let segment = path.segment()?;
+
+ if segment.coloncolon_token().is_some() {
+ kind = PathKind::Abs;
+ }
+
+ match segment.kind()? {
+ ast::PathSegmentKind::Name(name_ref) => {
+ // FIXME: this should just return name
+ match hygiene.name_ref_to_name(ctx.db.upcast(), name_ref) {
+ Either::Left(name) => {
+ let args = segment
+ .generic_arg_list()
+ .and_then(|it| lower_generic_args(ctx, it))
+ .or_else(|| {
+ lower_generic_args_from_fn_path(
+ ctx,
+ segment.param_list(),
+ segment.ret_type(),
+ )
+ })
+ .map(Interned::new);
+ segments.push(name);
+ generic_args.push(args)
+ }
+ Either::Right(crate_id) => {
+ kind = PathKind::DollarCrate(crate_id);
+ break;
+ }
+ }
+ }
+ ast::PathSegmentKind::SelfTypeKw => {
+ segments.push(name![Self]);
+ generic_args.push(None)
+ }
+ ast::PathSegmentKind::Type { type_ref, trait_ref } => {
+ assert!(path.qualifier().is_none()); // this can only occur at the first segment
+
+ let self_type = TypeRef::from_ast(ctx, type_ref?);
+
+ match trait_ref {
+ // <T>::foo
+ None => {
+ type_anchor = Some(Interned::new(self_type));
+ kind = PathKind::Plain;
+ }
+ // <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
+ Some(trait_ref) => {
+ let Path { mod_path, generic_args: path_generic_args, .. } =
+ Path::from_src(trait_ref.path()?, ctx)?;
+ let num_segments = mod_path.segments().len();
+ kind = mod_path.kind;
+
+ segments.extend(mod_path.segments().iter().cloned().rev());
+ generic_args.extend(Vec::from(path_generic_args).into_iter().rev());
+
+ // Insert the type reference (T in the above example) as Self parameter for the trait
+ let last_segment =
+ generic_args.iter_mut().rev().nth(num_segments.saturating_sub(1))?;
+ let mut args_inner = match last_segment {
+ Some(it) => it.as_ref().clone(),
+ None => GenericArgs::empty(),
+ };
+ args_inner.has_self_type = true;
+ args_inner.args.insert(0, GenericArg::Type(self_type));
+ *last_segment = Some(Interned::new(args_inner));
+ }
+ }
+ }
+ ast::PathSegmentKind::CrateKw => {
+ kind = PathKind::Crate;
+ break;
+ }
+ ast::PathSegmentKind::SelfKw => {
+ // don't break out if `self` is the last segment of a path, this mean we got a
+ // use tree like `foo::{self}` which we want to resolve as `foo`
+ if !segments.is_empty() {
+ kind = PathKind::Super(0);
+ break;
+ }
+ }
+ ast::PathSegmentKind::SuperKw => {
+ let nested_super_count = if let PathKind::Super(n) = kind { n } else { 0 };
+ kind = PathKind::Super(nested_super_count + 1);
+ }
+ }
+ path = match qualifier(&path) {
+ Some(it) => it,
+ None => break,
+ };
+ }
+ segments.reverse();
+ generic_args.reverse();
+
+ if segments.is_empty() && kind == PathKind::Plain && type_anchor.is_none() {
+ // plain empty paths don't exist, this means we got a single `self` segment as our path
+ kind = PathKind::Super(0);
+ }
+
+ // handle local_inner_macros :
+ // Basically, even in rustc it is quite hacky:
+ // https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456
+ // We follow what it did anyway :)
+ if segments.len() == 1 && kind == PathKind::Plain {
+ if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
+ if let Some(crate_id) = hygiene.local_inner_macros(ctx.db.upcast(), path) {
+ kind = PathKind::DollarCrate(crate_id);
+ }
+ }
+ }
+
+ let mod_path = Interned::new(ModPath::from_segments(kind, segments));
+ return Some(Path { type_anchor, mod_path, generic_args: generic_args.into() });
+
+ fn qualifier(path: &ast::Path) -> Option<ast::Path> {
+ if let Some(q) = path.qualifier() {
+ return Some(q);
+ }
+ // FIXME: this bottom up traversal is not too precise.
+ // Should we handle do a top-down analysis, recording results?
+ let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?;
+ let use_tree = use_tree_list.parent_use_tree();
+ use_tree.path()
+ }
+}
+
+pub(super) fn lower_generic_args(
+ lower_ctx: &LowerCtx<'_>,
+ node: ast::GenericArgList,
+) -> Option<GenericArgs> {
+ let mut args = Vec::new();
+ let mut bindings = Vec::new();
+ for generic_arg in node.generic_args() {
+ match generic_arg {
+ ast::GenericArg::TypeArg(type_arg) => {
+ let type_ref = TypeRef::from_ast_opt(lower_ctx, type_arg.ty());
+ args.push(GenericArg::Type(type_ref));
+ }
+ ast::GenericArg::AssocTypeArg(assoc_type_arg) => {
+ if let Some(name_ref) = assoc_type_arg.name_ref() {
+ let name = name_ref.as_name();
+ let type_ref = assoc_type_arg.ty().map(|it| TypeRef::from_ast(lower_ctx, it));
+ let bounds = if let Some(l) = assoc_type_arg.type_bound_list() {
+ l.bounds()
+ .map(|it| Interned::new(TypeBound::from_ast(lower_ctx, it)))
+ .collect()
+ } else {
+ Vec::new()
+ };
+ bindings.push(AssociatedTypeBinding { name, type_ref, bounds });
+ }
+ }
+ ast::GenericArg::LifetimeArg(lifetime_arg) => {
+ if let Some(lifetime) = lifetime_arg.lifetime() {
+ let lifetime_ref = LifetimeRef::new(&lifetime);
+ args.push(GenericArg::Lifetime(lifetime_ref))
+ }
+ }
+ ast::GenericArg::ConstArg(arg) => {
+ let arg = ConstScalarOrPath::from_expr_opt(arg.expr());
+ args.push(GenericArg::Const(arg))
+ }
+ }
+ }
+
+ if args.is_empty() && bindings.is_empty() {
+ return None;
+ }
+ Some(GenericArgs { args, has_self_type: false, bindings, desugared_from_fn: false })
+}
+
+/// Collect `GenericArgs` from the parts of a fn-like path, i.e. `Fn(X, Y)
+/// -> Z` (which desugars to `Fn<(X, Y), Output=Z>`).
+fn lower_generic_args_from_fn_path(
+ ctx: &LowerCtx<'_>,
+ params: Option<ast::ParamList>,
+ ret_type: Option<ast::RetType>,
+) -> Option<GenericArgs> {
+ let mut args = Vec::new();
+ let mut bindings = Vec::new();
+ let params = params?;
+ let mut param_types = Vec::new();
+ for param in params.params() {
+ let type_ref = TypeRef::from_ast_opt(ctx, param.ty());
+ param_types.push(type_ref);
+ }
+ let arg = GenericArg::Type(TypeRef::Tuple(param_types));
+ args.push(arg);
+ if let Some(ret_type) = ret_type {
+ let type_ref = TypeRef::from_ast_opt(ctx, ret_type.ty());
+ bindings.push(AssociatedTypeBinding {
+ name: name![Output],
+ type_ref: Some(type_ref),
+ bounds: Vec::new(),
+ });
+ } else {
+ // -> ()
+ let type_ref = TypeRef::Tuple(Vec::new());
+ bindings.push(AssociatedTypeBinding {
+ name: name![Output],
+ type_ref: Some(type_ref),
+ bounds: Vec::new(),
+ });
+ }
+ Some(GenericArgs { args, has_self_type: false, bindings, desugared_from_fn: true })
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs b/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs
new file mode 100644
index 000000000..bf5bf10c4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs
@@ -0,0 +1,95 @@
+//! In rust, it is possible to have a value, a type and a macro with the same
+//! name without conflicts.
+//!
+//! `PerNs` (per namespace) captures this.
+
+use crate::{item_scope::ItemInNs, visibility::Visibility, MacroId, ModuleDefId};
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct PerNs {
+ pub types: Option<(ModuleDefId, Visibility)>,
+ pub values: Option<(ModuleDefId, Visibility)>,
+ pub macros: Option<(MacroId, Visibility)>,
+}
+
+impl Default for PerNs {
+ fn default() -> Self {
+ PerNs { types: None, values: None, macros: None }
+ }
+}
+
+impl PerNs {
+ pub fn none() -> PerNs {
+ PerNs { types: None, values: None, macros: None }
+ }
+
+ pub fn values(t: ModuleDefId, v: Visibility) -> PerNs {
+ PerNs { types: None, values: Some((t, v)), macros: None }
+ }
+
+ pub fn types(t: ModuleDefId, v: Visibility) -> PerNs {
+ PerNs { types: Some((t, v)), values: None, macros: None }
+ }
+
+ pub fn both(types: ModuleDefId, values: ModuleDefId, v: Visibility) -> PerNs {
+ PerNs { types: Some((types, v)), values: Some((values, v)), macros: None }
+ }
+
+ pub fn macros(macro_: MacroId, v: Visibility) -> PerNs {
+ PerNs { types: None, values: None, macros: Some((macro_, v)) }
+ }
+
+ pub fn is_none(&self) -> bool {
+ self.types.is_none() && self.values.is_none() && self.macros.is_none()
+ }
+
+ pub fn take_types(self) -> Option<ModuleDefId> {
+ self.types.map(|it| it.0)
+ }
+
+ pub fn take_types_vis(self) -> Option<(ModuleDefId, Visibility)> {
+ self.types
+ }
+
+ pub fn take_values(self) -> Option<ModuleDefId> {
+ self.values.map(|it| it.0)
+ }
+
+ pub fn take_macros(self) -> Option<MacroId> {
+ self.macros.map(|it| it.0)
+ }
+
+ pub fn filter_visibility(self, mut f: impl FnMut(Visibility) -> bool) -> PerNs {
+ let _p = profile::span("PerNs::filter_visibility");
+ PerNs {
+ types: self.types.filter(|(_, v)| f(*v)),
+ values: self.values.filter(|(_, v)| f(*v)),
+ macros: self.macros.filter(|(_, v)| f(*v)),
+ }
+ }
+
+ pub fn with_visibility(self, vis: Visibility) -> PerNs {
+ PerNs {
+ types: self.types.map(|(it, _)| (it, vis)),
+ values: self.values.map(|(it, _)| (it, vis)),
+ macros: self.macros.map(|(it, _)| (it, vis)),
+ }
+ }
+
+ pub fn or(self, other: PerNs) -> PerNs {
+ PerNs {
+ types: self.types.or(other.types),
+ values: self.values.or(other.values),
+ macros: self.macros.or(other.macros),
+ }
+ }
+
+ pub fn iter_items(self) -> impl Iterator<Item = ItemInNs> {
+ let _p = profile::span("PerNs::iter_items");
+ self.types
+ .map(|it| ItemInNs::Types(it.0))
+ .into_iter()
+ .chain(self.values.map(|it| ItemInNs::Values(it.0)).into_iter())
+ .chain(self.macros.map(|it| ItemInNs::Macros(it.0)).into_iter())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
new file mode 100644
index 000000000..3163fa0f9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
@@ -0,0 +1,912 @@
+//! Name resolution façade.
+use std::{hash::BuildHasherDefault, sync::Arc};
+
+use base_db::CrateId;
+use hir_expand::name::{name, Name};
+use indexmap::IndexMap;
+use rustc_hash::FxHashSet;
+use smallvec::{smallvec, SmallVec};
+
+use crate::{
+ body::scope::{ExprScopes, ScopeId},
+ builtin_type::BuiltinType,
+ db::DefDatabase,
+ expr::{ExprId, LabelId, PatId},
+ generics::{GenericParams, TypeOrConstParamData},
+ intern::Interned,
+ item_scope::{BuiltinShadowMode, BUILTIN_SCOPE},
+ nameres::DefMap,
+ path::{ModPath, PathKind},
+ per_ns::PerNs,
+ visibility::{RawVisibility, Visibility},
+ AdtId, AssocItemId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId,
+ FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
+ LocalModuleId, Lookup, Macro2Id, MacroId, MacroRulesId, ModuleDefId, ModuleId, ProcMacroId,
+ StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, VariantId,
+};
+
+#[derive(Debug, Clone)]
+pub struct Resolver {
+ /// The stack of scopes, where the inner-most scope is the last item.
+ ///
+ /// When using, you generally want to process the scopes in reverse order,
+ /// there's `scopes` *method* for that.
+ ///
+ /// Invariant: There exists at least one Scope::ModuleScope at the start of the vec.
+ scopes: Vec<Scope>,
+}
+
+// FIXME how to store these best
+#[derive(Debug, Clone)]
+struct ModuleItemMap {
+ def_map: Arc<DefMap>,
+ module_id: LocalModuleId,
+}
+
+#[derive(Debug, Clone)]
+struct ExprScope {
+ owner: DefWithBodyId,
+ expr_scopes: Arc<ExprScopes>,
+ scope_id: ScopeId,
+}
+
+#[derive(Debug, Clone)]
+enum Scope {
+ /// All the items and imported names of a module
+ ModuleScope(ModuleItemMap),
+ /// Brings the generic parameters of an item into scope
+ GenericParams { def: GenericDefId, params: Interned<GenericParams> },
+ /// Brings `Self` in `impl` block into scope
+ ImplDefScope(ImplId),
+ /// Brings `Self` in enum, struct and union definitions into scope
+ AdtScope(AdtId),
+ /// Local bindings
+ ExprScope(ExprScope),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum TypeNs {
+ SelfType(ImplId),
+ GenericParam(TypeParamId),
+ AdtId(AdtId),
+ AdtSelfType(AdtId),
+ // Yup, enum variants are added to the types ns, but any usage of variant as
+ // type is an error.
+ EnumVariantId(EnumVariantId),
+ TypeAliasId(TypeAliasId),
+ BuiltinType(BuiltinType),
+ TraitId(TraitId),
+ // Module belong to type ns, but the resolver is used when all module paths
+ // are fully resolved.
+ // ModuleId(ModuleId)
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum ResolveValueResult {
+ ValueNs(ValueNs),
+ Partial(TypeNs, usize),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum ValueNs {
+ ImplSelf(ImplId),
+ LocalBinding(PatId),
+ FunctionId(FunctionId),
+ ConstId(ConstId),
+ StaticId(StaticId),
+ StructId(StructId),
+ EnumVariantId(EnumVariantId),
+ GenericParam(ConstParamId),
+}
+
+impl Resolver {
+ /// Resolve known trait from std, like `std::futures::Future`
+ pub fn resolve_known_trait(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<TraitId> {
+ let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?;
+ match res {
+ ModuleDefId::TraitId(it) => Some(it),
+ _ => None,
+ }
+ }
+
+ /// Resolve known struct from std, like `std::boxed::Box`
+ pub fn resolve_known_struct(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<StructId> {
+ let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?;
+ match res {
+ ModuleDefId::AdtId(AdtId::StructId(it)) => Some(it),
+ _ => None,
+ }
+ }
+
+ /// Resolve known enum from std, like `std::result::Result`
+ pub fn resolve_known_enum(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<EnumId> {
+ let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?;
+ match res {
+ ModuleDefId::AdtId(AdtId::EnumId(it)) => Some(it),
+ _ => None,
+ }
+ }
+
+ fn scopes(&self) -> impl Iterator<Item = &Scope> {
+ self.scopes.iter().rev()
+ }
+
+ fn resolve_module_path(
+ &self,
+ db: &dyn DefDatabase,
+ path: &ModPath,
+ shadow: BuiltinShadowMode,
+ ) -> PerNs {
+ let (item_map, module) = self.module_scope();
+ let (module_res, segment_index) = item_map.resolve_path(db, module, path, shadow);
+ if segment_index.is_some() {
+ return PerNs::none();
+ }
+ module_res
+ }
+
+ pub fn resolve_module_path_in_items(&self, db: &dyn DefDatabase, path: &ModPath) -> PerNs {
+ self.resolve_module_path(db, path, BuiltinShadowMode::Module)
+ }
+
+ // FIXME: This shouldn't exist
+ pub fn resolve_module_path_in_trait_assoc_items(
+ &self,
+ db: &dyn DefDatabase,
+ path: &ModPath,
+ ) -> Option<PerNs> {
+ let (item_map, module) = self.module_scope();
+ let (module_res, idx) = item_map.resolve_path(db, module, path, BuiltinShadowMode::Module);
+ match module_res.take_types()? {
+ ModuleDefId::TraitId(it) => {
+ let idx = idx?;
+ let unresolved = &path.segments()[idx..];
+ let assoc = match unresolved {
+ [it] => it,
+ _ => return None,
+ };
+ let &(_, assoc) = db.trait_data(it).items.iter().find(|(n, _)| n == assoc)?;
+ Some(match assoc {
+ AssocItemId::FunctionId(it) => PerNs::values(it.into(), Visibility::Public),
+ AssocItemId::ConstId(it) => PerNs::values(it.into(), Visibility::Public),
+ AssocItemId::TypeAliasId(it) => PerNs::types(it.into(), Visibility::Public),
+ })
+ }
+ _ => None,
+ }
+ }
+
+ pub fn resolve_path_in_type_ns(
+ &self,
+ db: &dyn DefDatabase,
+ path: &ModPath,
+ ) -> Option<(TypeNs, Option<usize>)> {
+ let first_name = path.segments().first()?;
+ let skip_to_mod = path.kind != PathKind::Plain;
+ for scope in self.scopes() {
+ match scope {
+ Scope::ExprScope(_) => continue,
+ Scope::GenericParams { .. } | Scope::ImplDefScope(_) if skip_to_mod => continue,
+
+ Scope::GenericParams { params, def } => {
+ if let Some(id) = params.find_type_by_name(first_name, *def) {
+ let idx = if path.segments().len() == 1 { None } else { Some(1) };
+ return Some((TypeNs::GenericParam(id), idx));
+ }
+ }
+ Scope::ImplDefScope(impl_) => {
+ if first_name == &name![Self] {
+ let idx = if path.segments().len() == 1 { None } else { Some(1) };
+ return Some((TypeNs::SelfType(*impl_), idx));
+ }
+ }
+ Scope::AdtScope(adt) => {
+ if first_name == &name![Self] {
+ let idx = if path.segments().len() == 1 { None } else { Some(1) };
+ return Some((TypeNs::AdtSelfType(*adt), idx));
+ }
+ }
+ Scope::ModuleScope(m) => {
+ if let Some(res) = m.resolve_path_in_type_ns(db, path) {
+ return Some(res);
+ }
+ }
+ }
+ }
+ None
+ }
+
+ pub fn resolve_path_in_type_ns_fully(
+ &self,
+ db: &dyn DefDatabase,
+ path: &ModPath,
+ ) -> Option<TypeNs> {
+ let (res, unresolved) = self.resolve_path_in_type_ns(db, path)?;
+ if unresolved.is_some() {
+ return None;
+ }
+ Some(res)
+ }
+
+ pub fn resolve_visibility(
+ &self,
+ db: &dyn DefDatabase,
+ visibility: &RawVisibility,
+ ) -> Option<Visibility> {
+ match visibility {
+ RawVisibility::Module(_) => {
+ let (item_map, module) = self.module_scope();
+ item_map.resolve_visibility(db, module, visibility)
+ }
+ RawVisibility::Public => Some(Visibility::Public),
+ }
+ }
+
+ pub fn resolve_path_in_value_ns(
+ &self,
+ db: &dyn DefDatabase,
+ path: &ModPath,
+ ) -> Option<ResolveValueResult> {
+ let n_segments = path.segments().len();
+ let tmp = name![self];
+ let first_name = if path.is_self() { &tmp } else { path.segments().first()? };
+ let skip_to_mod = path.kind != PathKind::Plain && !path.is_self();
+ for scope in self.scopes() {
+ match scope {
+ Scope::AdtScope(_)
+ | Scope::ExprScope(_)
+ | Scope::GenericParams { .. }
+ | Scope::ImplDefScope(_)
+ if skip_to_mod =>
+ {
+ continue
+ }
+
+ Scope::ExprScope(scope) if n_segments <= 1 => {
+ let entry = scope
+ .expr_scopes
+ .entries(scope.scope_id)
+ .iter()
+ .find(|entry| entry.name() == first_name);
+
+ if let Some(e) = entry {
+ return Some(ResolveValueResult::ValueNs(ValueNs::LocalBinding(e.pat())));
+ }
+ }
+ Scope::ExprScope(_) => continue,
+
+ Scope::GenericParams { params, def } if n_segments > 1 => {
+ if let Some(id) = params.find_type_by_name(first_name, *def) {
+ let ty = TypeNs::GenericParam(id);
+ return Some(ResolveValueResult::Partial(ty, 1));
+ }
+ }
+ Scope::GenericParams { params, def } if n_segments == 1 => {
+ if let Some(id) = params.find_const_by_name(first_name, *def) {
+ let val = ValueNs::GenericParam(id);
+ return Some(ResolveValueResult::ValueNs(val));
+ }
+ }
+ Scope::GenericParams { .. } => continue,
+
+ Scope::ImplDefScope(impl_) => {
+ if first_name == &name![Self] {
+ if n_segments > 1 {
+ let ty = TypeNs::SelfType(*impl_);
+ return Some(ResolveValueResult::Partial(ty, 1));
+ } else {
+ return Some(ResolveValueResult::ValueNs(ValueNs::ImplSelf(*impl_)));
+ }
+ }
+ }
+ Scope::AdtScope(adt) => {
+ if n_segments == 1 {
+ // bare `Self` doesn't work in the value namespace in a struct/enum definition
+ continue;
+ }
+ if first_name == &name![Self] {
+ let ty = TypeNs::AdtSelfType(*adt);
+ return Some(ResolveValueResult::Partial(ty, 1));
+ }
+ }
+
+ Scope::ModuleScope(m) => {
+ if let Some(def) = m.resolve_path_in_value_ns(db, path) {
+ return Some(def);
+ }
+ }
+ }
+ }
+
+ // If a path of the shape `u16::from_le_bytes` failed to resolve at all, then we fall back
+ // to resolving to the primitive type, to allow this to still work in the presence of
+ // `use core::u16;`.
+ if path.kind == PathKind::Plain && path.segments().len() > 1 {
+ match BuiltinType::by_name(&path.segments()[0]) {
+ Some(builtin) => {
+ return Some(ResolveValueResult::Partial(TypeNs::BuiltinType(builtin), 1));
+ }
+ None => {}
+ }
+ }
+
+ None
+ }
+
+ pub fn resolve_path_in_value_ns_fully(
+ &self,
+ db: &dyn DefDatabase,
+ path: &ModPath,
+ ) -> Option<ValueNs> {
+ match self.resolve_path_in_value_ns(db, path)? {
+ ResolveValueResult::ValueNs(it) => Some(it),
+ ResolveValueResult::Partial(..) => None,
+ }
+ }
+
+ pub fn resolve_path_as_macro(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<MacroId> {
+ let (item_map, module) = self.module_scope();
+ item_map.resolve_path(db, module, path, BuiltinShadowMode::Other).0.take_macros()
+ }
+
+ /// Returns a set of names available in the current scope.
+ ///
+ /// Note that this is a somewhat fuzzy concept -- internally, the compiler
+ /// doesn't necessary follow a strict scoping discipline. Rather, it just
+ /// tells for each ident what it resolves to.
+ ///
+ /// A good example is something like `str::from_utf8`. From scopes point of
+ /// view, this code is erroneous -- both `str` module and `str` type occupy
+ /// the same type namespace.
+ ///
+ /// We don't try to model that super-correctly -- this functionality is
+ /// primarily exposed for completions.
+ ///
+ /// Note that in Rust one name can be bound to several items:
+ ///
+ /// ```
+ /// macro_rules! t { () => (()) }
+ /// type t = t!();
+ /// const t: t = t!()
+ /// ```
+ ///
+ /// That's why we return a multimap.
+ ///
+ /// The shadowing is accounted for: in
+ ///
+ /// ```
+ /// let x = 92;
+ /// {
+ /// let x = 92;
+ /// $0
+ /// }
+ /// ```
+ ///
+ /// there will be only one entry for `x` in the result.
+ ///
+ /// The result is ordered *roughly* from the innermost scope to the
+ /// outermost: when the name is introduced in two namespaces in two scopes,
+ /// we use the position of the first scope.
+ pub fn names_in_scope(
+ &self,
+ db: &dyn DefDatabase,
+ ) -> FxIndexMap<Name, SmallVec<[ScopeDef; 1]>> {
+ let mut res = ScopeNames::default();
+ for scope in self.scopes() {
+ scope.process_names(&mut res, db);
+ }
+ res.map
+ }
+
+ pub fn traits_in_scope(&self, db: &dyn DefDatabase) -> FxHashSet<TraitId> {
+ let mut traits = FxHashSet::default();
+ for scope in self.scopes() {
+ match scope {
+ Scope::ModuleScope(m) => {
+ if let Some(prelude) = m.def_map.prelude() {
+ let prelude_def_map = prelude.def_map(db);
+ traits.extend(prelude_def_map[prelude.local_id].scope.traits());
+ }
+ traits.extend(m.def_map[m.module_id].scope.traits());
+
+ // Add all traits that are in scope because of the containing DefMaps
+ m.def_map.with_ancestor_maps(db, m.module_id, &mut |def_map, module| {
+ if let Some(prelude) = def_map.prelude() {
+ let prelude_def_map = prelude.def_map(db);
+ traits.extend(prelude_def_map[prelude.local_id].scope.traits());
+ }
+ traits.extend(def_map[module].scope.traits());
+ None::<()>
+ });
+ }
+ &Scope::ImplDefScope(impl_) => {
+ if let Some(target_trait) = &db.impl_data(impl_).target_trait {
+ if let Some(TypeNs::TraitId(trait_)) =
+ self.resolve_path_in_type_ns_fully(db, target_trait.path.mod_path())
+ {
+ traits.insert(trait_);
+ }
+ }
+ }
+ _ => (),
+ }
+ }
+ traits
+ }
+
+ fn module_scope(&self) -> (&DefMap, LocalModuleId) {
+ self.scopes()
+ .find_map(|scope| match scope {
+ Scope::ModuleScope(m) => Some((&*m.def_map, m.module_id)),
+ _ => None,
+ })
+ .expect("module scope invariant violated")
+ }
+
+ pub fn module(&self) -> ModuleId {
+ let (def_map, local_id) = self.module_scope();
+ def_map.module_id(local_id)
+ }
+
+ pub fn krate(&self) -> CrateId {
+ self.def_map().krate()
+ }
+
+ pub fn def_map(&self) -> &DefMap {
+ self.scopes
+ .get(0)
+ .and_then(|scope| match scope {
+ Scope::ModuleScope(m) => Some(&m.def_map),
+ _ => None,
+ })
+ .expect("module scope invariant violated")
+ }
+
+ pub fn where_predicates_in_scope(
+ &self,
+ ) -> impl Iterator<Item = &crate::generics::WherePredicate> {
+ self.scopes()
+ .filter_map(|scope| match scope {
+ Scope::GenericParams { params, .. } => Some(params),
+ _ => None,
+ })
+ .flat_map(|params| params.where_predicates.iter())
+ }
+
+ pub fn generic_def(&self) -> Option<GenericDefId> {
+ self.scopes().find_map(|scope| match scope {
+ Scope::GenericParams { def, .. } => Some(*def),
+ _ => None,
+ })
+ }
+
+ pub fn body_owner(&self) -> Option<DefWithBodyId> {
+ self.scopes().find_map(|scope| match scope {
+ Scope::ExprScope(it) => Some(it.owner),
+ _ => None,
+ })
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum ScopeDef {
+ ModuleDef(ModuleDefId),
+ Unknown,
+ ImplSelfType(ImplId),
+ AdtSelfType(AdtId),
+ GenericParam(GenericParamId),
+ Local(PatId),
+ Label(LabelId),
+}
+
+impl Scope {
+ fn process_names(&self, acc: &mut ScopeNames, db: &dyn DefDatabase) {
+ match self {
+ Scope::ModuleScope(m) => {
+ // FIXME: should we provide `self` here?
+ // f(
+ // Name::self_param(),
+ // PerNs::types(Resolution::Def {
+ // def: m.module.into(),
+ // }),
+ // );
+ m.def_map[m.module_id].scope.entries().for_each(|(name, def)| {
+ acc.add_per_ns(name, def);
+ });
+ m.def_map[m.module_id].scope.legacy_macros().for_each(|(name, macs)| {
+ macs.iter().for_each(|&mac| {
+ acc.add(
+ name,
+ ScopeDef::ModuleDef(ModuleDefId::MacroId(MacroId::from(mac))),
+ );
+ })
+ });
+ m.def_map.extern_prelude().for_each(|(name, &def)| {
+ acc.add(name, ScopeDef::ModuleDef(ModuleDefId::ModuleId(def)));
+ });
+ BUILTIN_SCOPE.iter().for_each(|(name, &def)| {
+ acc.add_per_ns(name, def);
+ });
+ if let Some(prelude) = m.def_map.prelude() {
+ let prelude_def_map = prelude.def_map(db);
+ for (name, def) in prelude_def_map[prelude.local_id].scope.entries() {
+ acc.add_per_ns(name, def)
+ }
+ }
+ }
+ Scope::GenericParams { params, def: parent } => {
+ let parent = *parent;
+ for (local_id, param) in params.type_or_consts.iter() {
+ if let Some(name) = &param.name() {
+ let id = TypeOrConstParamId { parent, local_id };
+ let data = &db.generic_params(parent).type_or_consts[local_id];
+ acc.add(
+ name,
+ ScopeDef::GenericParam(match data {
+ TypeOrConstParamData::TypeParamData(_) => {
+ GenericParamId::TypeParamId(TypeParamId::from_unchecked(id))
+ }
+ TypeOrConstParamData::ConstParamData(_) => {
+ GenericParamId::ConstParamId(ConstParamId::from_unchecked(id))
+ }
+ }),
+ );
+ }
+ }
+ for (local_id, param) in params.lifetimes.iter() {
+ let id = LifetimeParamId { parent, local_id };
+ acc.add(&param.name, ScopeDef::GenericParam(id.into()))
+ }
+ }
+ Scope::ImplDefScope(i) => {
+ acc.add(&name![Self], ScopeDef::ImplSelfType(*i));
+ }
+ Scope::AdtScope(i) => {
+ acc.add(&name![Self], ScopeDef::AdtSelfType(*i));
+ }
+ Scope::ExprScope(scope) => {
+ if let Some((label, name)) = scope.expr_scopes.label(scope.scope_id) {
+ acc.add(&name, ScopeDef::Label(label))
+ }
+ scope.expr_scopes.entries(scope.scope_id).iter().for_each(|e| {
+ acc.add_local(e.name(), e.pat());
+ });
+ }
+ }
+ }
+}
+
+// needs arbitrary_self_types to be a method... or maybe move to the def?
+pub fn resolver_for_expr(db: &dyn DefDatabase, owner: DefWithBodyId, expr_id: ExprId) -> Resolver {
+ let scopes = db.expr_scopes(owner);
+ resolver_for_scope(db, owner, scopes.scope_for(expr_id))
+}
+
+pub fn resolver_for_scope(
+ db: &dyn DefDatabase,
+ owner: DefWithBodyId,
+ scope_id: Option<ScopeId>,
+) -> Resolver {
+ let mut r = owner.resolver(db);
+ let scopes = db.expr_scopes(owner);
+ let scope_chain = scopes.scope_chain(scope_id).collect::<Vec<_>>();
+ r.scopes.reserve(scope_chain.len());
+
+ for scope in scope_chain.into_iter().rev() {
+ if let Some(block) = scopes.block(scope) {
+ if let Some(def_map) = db.block_def_map(block) {
+ let root = def_map.root();
+ r = r.push_module_scope(def_map, root);
+ // FIXME: This adds as many module scopes as there are blocks, but resolving in each
+ // already traverses all parents, so this is O(n²). I think we could only store the
+ // innermost module scope instead?
+ }
+ }
+
+ r = r.push_expr_scope(owner, Arc::clone(&scopes), scope);
+ }
+ r
+}
+
+impl Resolver {
+ fn push_scope(mut self, scope: Scope) -> Resolver {
+ self.scopes.push(scope);
+ self
+ }
+
+ fn push_generic_params_scope(self, db: &dyn DefDatabase, def: GenericDefId) -> Resolver {
+ let params = db.generic_params(def);
+ self.push_scope(Scope::GenericParams { def, params })
+ }
+
+ fn push_impl_def_scope(self, impl_def: ImplId) -> Resolver {
+ self.push_scope(Scope::ImplDefScope(impl_def))
+ }
+
+ fn push_module_scope(self, def_map: Arc<DefMap>, module_id: LocalModuleId) -> Resolver {
+ self.push_scope(Scope::ModuleScope(ModuleItemMap { def_map, module_id }))
+ }
+
+ fn push_expr_scope(
+ self,
+ owner: DefWithBodyId,
+ expr_scopes: Arc<ExprScopes>,
+ scope_id: ScopeId,
+ ) -> Resolver {
+ self.push_scope(Scope::ExprScope(ExprScope { owner, expr_scopes, scope_id }))
+ }
+}
+
+impl ModuleItemMap {
+ fn resolve_path_in_value_ns(
+ &self,
+ db: &dyn DefDatabase,
+ path: &ModPath,
+ ) -> Option<ResolveValueResult> {
+ let (module_def, idx) =
+ self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other);
+ match idx {
+ None => {
+ let value = to_value_ns(module_def)?;
+ Some(ResolveValueResult::ValueNs(value))
+ }
+ Some(idx) => {
+ let ty = match module_def.take_types()? {
+ ModuleDefId::AdtId(it) => TypeNs::AdtId(it),
+ ModuleDefId::TraitId(it) => TypeNs::TraitId(it),
+ ModuleDefId::TypeAliasId(it) => TypeNs::TypeAliasId(it),
+ ModuleDefId::BuiltinType(it) => TypeNs::BuiltinType(it),
+
+ ModuleDefId::ModuleId(_)
+ | ModuleDefId::FunctionId(_)
+ | ModuleDefId::EnumVariantId(_)
+ | ModuleDefId::ConstId(_)
+ | ModuleDefId::MacroId(_)
+ | ModuleDefId::StaticId(_) => return None,
+ };
+ Some(ResolveValueResult::Partial(ty, idx))
+ }
+ }
+ }
+
+ fn resolve_path_in_type_ns(
+ &self,
+ db: &dyn DefDatabase,
+ path: &ModPath,
+ ) -> Option<(TypeNs, Option<usize>)> {
+ let (module_def, idx) =
+ self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other);
+ let res = to_type_ns(module_def)?;
+ Some((res, idx))
+ }
+}
+
+fn to_value_ns(per_ns: PerNs) -> Option<ValueNs> {
+ let res = match per_ns.take_values()? {
+ ModuleDefId::FunctionId(it) => ValueNs::FunctionId(it),
+ ModuleDefId::AdtId(AdtId::StructId(it)) => ValueNs::StructId(it),
+ ModuleDefId::EnumVariantId(it) => ValueNs::EnumVariantId(it),
+ ModuleDefId::ConstId(it) => ValueNs::ConstId(it),
+ ModuleDefId::StaticId(it) => ValueNs::StaticId(it),
+
+ ModuleDefId::AdtId(AdtId::EnumId(_) | AdtId::UnionId(_))
+ | ModuleDefId::TraitId(_)
+ | ModuleDefId::TypeAliasId(_)
+ | ModuleDefId::BuiltinType(_)
+ | ModuleDefId::MacroId(_)
+ | ModuleDefId::ModuleId(_) => return None,
+ };
+ Some(res)
+}
+
+fn to_type_ns(per_ns: PerNs) -> Option<TypeNs> {
+ let res = match per_ns.take_types()? {
+ ModuleDefId::AdtId(it) => TypeNs::AdtId(it),
+ ModuleDefId::EnumVariantId(it) => TypeNs::EnumVariantId(it),
+
+ ModuleDefId::TypeAliasId(it) => TypeNs::TypeAliasId(it),
+ ModuleDefId::BuiltinType(it) => TypeNs::BuiltinType(it),
+
+ ModuleDefId::TraitId(it) => TypeNs::TraitId(it),
+
+ ModuleDefId::FunctionId(_)
+ | ModuleDefId::ConstId(_)
+ | ModuleDefId::MacroId(_)
+ | ModuleDefId::StaticId(_)
+ | ModuleDefId::ModuleId(_) => return None,
+ };
+ Some(res)
+}
+
+type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<rustc_hash::FxHasher>>;
+#[derive(Default)]
+struct ScopeNames {
+ map: FxIndexMap<Name, SmallVec<[ScopeDef; 1]>>,
+}
+
+impl ScopeNames {
+ fn add(&mut self, name: &Name, def: ScopeDef) {
+ let set = self.map.entry(name.clone()).or_default();
+ if !set.contains(&def) {
+ set.push(def)
+ }
+ }
+ fn add_per_ns(&mut self, name: &Name, def: PerNs) {
+ if let &Some((ty, _)) = &def.types {
+ self.add(name, ScopeDef::ModuleDef(ty))
+ }
+ if let &Some((def, _)) = &def.values {
+ self.add(name, ScopeDef::ModuleDef(def))
+ }
+ if let &Some((mac, _)) = &def.macros {
+ self.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac)))
+ }
+ if def.is_none() {
+ self.add(name, ScopeDef::Unknown)
+ }
+ }
+ fn add_local(&mut self, name: &Name, pat: PatId) {
+ let set = self.map.entry(name.clone()).or_default();
+ // XXX: hack, account for local (and only local) shadowing.
+ //
+ // This should be somewhat more principled and take namespaces into
+ // accounts, but, alas, scoping rules are a hoax. `str` type and `str`
+ // module can be both available in the same scope.
+ if set.iter().any(|it| matches!(it, &ScopeDef::Local(_))) {
+ cov_mark::hit!(shadowing_shows_single_completion);
+ return;
+ }
+ set.push(ScopeDef::Local(pat))
+ }
+}
+
+pub trait HasResolver: Copy {
+ /// Builds a resolver for type references inside this def.
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver;
+}
+
+impl HasResolver for ModuleId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ let mut def_map = self.def_map(db);
+ let mut modules: SmallVec<[_; 2]> = smallvec![(def_map.clone(), self.local_id)];
+ while let Some(parent) = def_map.parent() {
+ def_map = parent.def_map(db);
+ modules.push((def_map.clone(), parent.local_id));
+ }
+ let mut resolver = Resolver { scopes: Vec::with_capacity(modules.len()) };
+ for (def_map, module) in modules.into_iter().rev() {
+ resolver = resolver.push_module_scope(def_map, module);
+ }
+ resolver
+ }
+}
+
+impl HasResolver for TraitId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into())
+ }
+}
+
+impl<T: Into<AdtId> + Copy> HasResolver for T {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ let def = self.into();
+ def.module(db)
+ .resolver(db)
+ .push_generic_params_scope(db, def.into())
+ .push_scope(Scope::AdtScope(def))
+ }
+}
+
+impl HasResolver for FunctionId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into())
+ }
+}
+
+impl HasResolver for ConstId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db)
+ }
+}
+
+impl HasResolver for StaticId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db)
+ }
+}
+
+impl HasResolver for TypeAliasId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into())
+ }
+}
+
+impl HasResolver for ImplId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db)
+ .container
+ .resolver(db)
+ .push_generic_params_scope(db, self.into())
+ .push_impl_def_scope(self)
+ }
+}
+
+impl HasResolver for ExternBlockId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ // Same as parent's
+ self.lookup(db).container.resolver(db)
+ }
+}
+
+impl HasResolver for DefWithBodyId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ match self {
+ DefWithBodyId::ConstId(c) => c.resolver(db),
+ DefWithBodyId::FunctionId(f) => f.resolver(db),
+ DefWithBodyId::StaticId(s) => s.resolver(db),
+ }
+ }
+}
+
+impl HasResolver for ItemContainerId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ match self {
+ ItemContainerId::ModuleId(it) => it.resolver(db),
+ ItemContainerId::TraitId(it) => it.resolver(db),
+ ItemContainerId::ImplId(it) => it.resolver(db),
+ ItemContainerId::ExternBlockId(it) => it.resolver(db),
+ }
+ }
+}
+
+impl HasResolver for GenericDefId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ match self {
+ GenericDefId::FunctionId(inner) => inner.resolver(db),
+ GenericDefId::AdtId(adt) => adt.resolver(db),
+ GenericDefId::TraitId(inner) => inner.resolver(db),
+ GenericDefId::TypeAliasId(inner) => inner.resolver(db),
+ GenericDefId::ImplId(inner) => inner.resolver(db),
+ GenericDefId::EnumVariantId(inner) => inner.parent.resolver(db),
+ GenericDefId::ConstId(inner) => inner.resolver(db),
+ }
+ }
+}
+
+impl HasResolver for VariantId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ match self {
+ VariantId::EnumVariantId(it) => it.parent.resolver(db),
+ VariantId::StructId(it) => it.resolver(db),
+ VariantId::UnionId(it) => it.resolver(db),
+ }
+ }
+}
+
+impl HasResolver for MacroId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ match self {
+ MacroId::Macro2Id(it) => it.resolver(db),
+ MacroId::MacroRulesId(it) => it.resolver(db),
+ MacroId::ProcMacroId(it) => it.resolver(db),
+ }
+ }
+}
+
+impl HasResolver for Macro2Id {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db)
+ }
+}
+
+impl HasResolver for ProcMacroId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db)
+ }
+}
+
+impl HasResolver for MacroRulesId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/src.rs b/src/tools/rust-analyzer/crates/hir-def/src/src.rs
new file mode 100644
index 000000000..f69356cac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/src.rs
@@ -0,0 +1,85 @@
+//! Utilities for mapping between hir IDs and the surface syntax.
+
+use hir_expand::InFile;
+use la_arena::ArenaMap;
+use syntax::ast;
+
+use crate::{
+ db::DefDatabase, item_tree::ItemTreeNode, AssocItemLoc, ItemLoc, Macro2Loc, MacroRulesLoc,
+ ProcMacroLoc,
+};
+
+pub trait HasSource {
+ type Value;
+ fn source(&self, db: &dyn DefDatabase) -> InFile<Self::Value>;
+}
+
+impl<N: ItemTreeNode> HasSource for AssocItemLoc<N> {
+ type Value = N::Source;
+
+ fn source(&self, db: &dyn DefDatabase) -> InFile<N::Source> {
+ let tree = self.id.item_tree(db);
+ let ast_id_map = db.ast_id_map(self.id.file_id());
+ let root = db.parse_or_expand(self.id.file_id()).unwrap();
+ let node = &tree[self.id.value];
+
+ InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root))
+ }
+}
+
+impl<N: ItemTreeNode> HasSource for ItemLoc<N> {
+ type Value = N::Source;
+
+ fn source(&self, db: &dyn DefDatabase) -> InFile<N::Source> {
+ let tree = self.id.item_tree(db);
+ let ast_id_map = db.ast_id_map(self.id.file_id());
+ let root = db.parse_or_expand(self.id.file_id()).unwrap();
+ let node = &tree[self.id.value];
+
+ InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root))
+ }
+}
+
+impl HasSource for Macro2Loc {
+ type Value = ast::MacroDef;
+
+ fn source(&self, db: &dyn DefDatabase) -> InFile<Self::Value> {
+ let tree = self.id.item_tree(db);
+ let ast_id_map = db.ast_id_map(self.id.file_id());
+ let root = db.parse_or_expand(self.id.file_id()).unwrap();
+ let node = &tree[self.id.value];
+
+ InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root))
+ }
+}
+
+impl HasSource for MacroRulesLoc {
+ type Value = ast::MacroRules;
+
+ fn source(&self, db: &dyn DefDatabase) -> InFile<Self::Value> {
+ let tree = self.id.item_tree(db);
+ let ast_id_map = db.ast_id_map(self.id.file_id());
+ let root = db.parse_or_expand(self.id.file_id()).unwrap();
+ let node = &tree[self.id.value];
+
+ InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root))
+ }
+}
+
+impl HasSource for ProcMacroLoc {
+ type Value = ast::Fn;
+
+ fn source(&self, db: &dyn DefDatabase) -> InFile<Self::Value> {
+ let tree = self.id.item_tree(db);
+ let ast_id_map = db.ast_id_map(self.id.file_id());
+ let root = db.parse_or_expand(self.id.file_id()).unwrap();
+ let node = &tree[self.id.value];
+
+ InFile::new(self.id.file_id(), ast_id_map.get(node.ast_id()).to_node(&root))
+ }
+}
+
+pub trait HasChildSource<ChildId> {
+ type Value;
+ fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<ChildId, Self::Value>>;
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
new file mode 100644
index 000000000..9cdc18d6b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
@@ -0,0 +1,245 @@
+//! Database used for testing `hir_def`.
+
+use std::{
+ fmt, panic,
+ sync::{Arc, Mutex},
+};
+
+use base_db::{
+ salsa, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, FilePosition,
+ SourceDatabase, Upcast,
+};
+use hir_expand::{db::AstDatabase, InFile};
+use rustc_hash::FxHashSet;
+use syntax::{algo, ast, AstNode};
+
+use crate::{
+ db::DefDatabase,
+ nameres::{DefMap, ModuleSource},
+ src::HasSource,
+ LocalModuleId, Lookup, ModuleDefId, ModuleId,
+};
+
+#[salsa::database(
+ base_db::SourceDatabaseExtStorage,
+ base_db::SourceDatabaseStorage,
+ hir_expand::db::AstDatabaseStorage,
+ crate::db::InternDatabaseStorage,
+ crate::db::DefDatabaseStorage
+)]
+pub(crate) struct TestDB {
+ storage: salsa::Storage<TestDB>,
+ events: Mutex<Option<Vec<salsa::Event>>>,
+}
+
+impl Default for TestDB {
+ fn default() -> Self {
+ let mut this = Self { storage: Default::default(), events: Default::default() };
+ this.set_enable_proc_attr_macros(true);
+ this
+ }
+}
+
+impl Upcast<dyn AstDatabase> for TestDB {
+ fn upcast(&self) -> &(dyn AstDatabase + 'static) {
+ &*self
+ }
+}
+
+impl Upcast<dyn DefDatabase> for TestDB {
+ fn upcast(&self) -> &(dyn DefDatabase + 'static) {
+ &*self
+ }
+}
+
+impl salsa::Database for TestDB {
+ fn salsa_event(&self, event: salsa::Event) {
+ let mut events = self.events.lock().unwrap();
+ if let Some(events) = &mut *events {
+ events.push(event);
+ }
+ }
+}
+
+impl fmt::Debug for TestDB {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("TestDB").finish()
+ }
+}
+
+impl panic::RefUnwindSafe for TestDB {}
+
+impl FileLoader for TestDB {
+ fn file_text(&self, file_id: FileId) -> Arc<String> {
+ FileLoaderDelegate(self).file_text(file_id)
+ }
+ fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
+ FileLoaderDelegate(self).resolve_path(path)
+ }
+ fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
+ FileLoaderDelegate(self).relevant_crates(file_id)
+ }
+}
+
+impl TestDB {
+ pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId {
+ for &krate in self.relevant_crates(file_id).iter() {
+ let crate_def_map = self.crate_def_map(krate);
+ for (local_id, data) in crate_def_map.modules() {
+ if data.origin.file_id() == Some(file_id) {
+ return crate_def_map.module_id(local_id);
+ }
+ }
+ }
+ panic!("Can't find module for file")
+ }
+
+ pub(crate) fn module_at_position(&self, position: FilePosition) -> ModuleId {
+ let file_module = self.module_for_file(position.file_id);
+ let mut def_map = file_module.def_map(self);
+ let module = self.mod_at_position(&def_map, position);
+
+ def_map = match self.block_at_position(&def_map, position) {
+ Some(it) => it,
+ None => return def_map.module_id(module),
+ };
+ loop {
+ let new_map = self.block_at_position(&def_map, position);
+ match new_map {
+ Some(new_block) if !Arc::ptr_eq(&new_block, &def_map) => {
+ def_map = new_block;
+ }
+ _ => {
+ // FIXME: handle `mod` inside block expression
+ return def_map.module_id(def_map.root());
+ }
+ }
+ }
+ }
+
+ /// Finds the smallest/innermost module in `def_map` containing `position`.
+ fn mod_at_position(&self, def_map: &DefMap, position: FilePosition) -> LocalModuleId {
+ let mut size = None;
+ let mut res = def_map.root();
+ for (module, data) in def_map.modules() {
+ let src = data.definition_source(self);
+ if src.file_id != position.file_id.into() {
+ continue;
+ }
+
+ let range = match src.value {
+ ModuleSource::SourceFile(it) => it.syntax().text_range(),
+ ModuleSource::Module(it) => it.syntax().text_range(),
+ ModuleSource::BlockExpr(it) => it.syntax().text_range(),
+ };
+
+ if !range.contains(position.offset) {
+ continue;
+ }
+
+ let new_size = match size {
+ None => range.len(),
+ Some(size) => {
+ if range.len() < size {
+ range.len()
+ } else {
+ size
+ }
+ }
+ };
+
+ if size != Some(new_size) {
+ cov_mark::hit!(submodule_in_testdb);
+ size = Some(new_size);
+ res = module;
+ }
+ }
+
+ res
+ }
+
+ fn block_at_position(&self, def_map: &DefMap, position: FilePosition) -> Option<Arc<DefMap>> {
+ // Find the smallest (innermost) function in `def_map` containing the cursor.
+ let mut size = None;
+ let mut fn_def = None;
+ for (_, module) in def_map.modules() {
+ let file_id = module.definition_source(self).file_id;
+ if file_id != position.file_id.into() {
+ continue;
+ }
+ for decl in module.scope.declarations() {
+ if let ModuleDefId::FunctionId(it) = decl {
+ let range = it.lookup(self).source(self).value.syntax().text_range();
+
+ if !range.contains(position.offset) {
+ continue;
+ }
+
+ let new_size = match size {
+ None => range.len(),
+ Some(size) => {
+ if range.len() < size {
+ range.len()
+ } else {
+ size
+ }
+ }
+ };
+ if size != Some(new_size) {
+ size = Some(new_size);
+ fn_def = Some(it);
+ }
+ }
+ }
+ }
+
+ // Find the innermost block expression that has a `DefMap`.
+ let def_with_body = fn_def?.into();
+ let (_, source_map) = self.body_with_source_map(def_with_body);
+ let scopes = self.expr_scopes(def_with_body);
+ let root = self.parse(position.file_id);
+
+ let scope_iter = algo::ancestors_at_offset(&root.syntax_node(), position.offset)
+ .filter_map(|node| {
+ let block = ast::BlockExpr::cast(node)?;
+ let expr = ast::Expr::from(block);
+ let expr_id = source_map.node_expr(InFile::new(position.file_id.into(), &expr))?;
+ let scope = scopes.scope_for(expr_id).unwrap();
+ Some(scope)
+ });
+
+ for scope in scope_iter {
+ let containing_blocks =
+ scopes.scope_chain(Some(scope)).filter_map(|scope| scopes.block(scope));
+
+ for block in containing_blocks {
+ if let Some(def_map) = self.block_def_map(block) {
+ return Some(def_map);
+ }
+ }
+ }
+
+ None
+ }
+
+ pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
+ *self.events.lock().unwrap() = Some(Vec::new());
+ f();
+ self.events.lock().unwrap().take().unwrap()
+ }
+
+ pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
+ let events = self.log(f);
+ events
+ .into_iter()
+ .filter_map(|e| match e.kind {
+ // This is pretty horrible, but `Debug` is the only way to inspect
+ // QueryDescriptor at the moment.
+ salsa::EventKind::WillExecute { database_key } => {
+ Some(format!("{:?}", database_key.debug(self)))
+ }
+ _ => None,
+ })
+ .collect()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/trace.rs b/src/tools/rust-analyzer/crates/hir-def/src/trace.rs
new file mode 100644
index 000000000..6e6ceb8e4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/trace.rs
@@ -0,0 +1,51 @@
+//! Trace is a pretty niche data structure which is used when lowering a CST
+//! into HIR.
+//!
+//! Lowering process calculates two bits of information:
+//! * the lowered syntax itself
+//! * a mapping between lowered syntax and original syntax
+//!
+//! Due to the way salsa works, the mapping is usually hot lava, as it contains
+//! absolute offsets. The `Trace` structure (inspired, at least in name, by
+//! Kotlin's `BindingTrace`) allows use the same code to compute both
+//! projections.
+use la_arena::{Arena, ArenaMap, Idx, RawIdx};
+
+pub(crate) struct Trace<T, V> {
+ arena: Option<Arena<T>>,
+ map: Option<ArenaMap<Idx<T>, V>>,
+ len: u32,
+}
+
+impl<T, V> Trace<T, V> {
+ pub(crate) fn new_for_arena() -> Trace<T, V> {
+ Trace { arena: Some(Arena::default()), map: None, len: 0 }
+ }
+
+ pub(crate) fn new_for_map() -> Trace<T, V> {
+ Trace { arena: None, map: Some(ArenaMap::default()), len: 0 }
+ }
+
+ pub(crate) fn alloc(&mut self, value: impl FnOnce() -> V, data: impl FnOnce() -> T) -> Idx<T> {
+ let id = if let Some(arena) = &mut self.arena {
+ arena.alloc(data())
+ } else {
+ let id = Idx::<T>::from_raw(RawIdx::from(self.len));
+ self.len += 1;
+ id
+ };
+
+ if let Some(map) = &mut self.map {
+ map.insert(id, value());
+ }
+ id
+ }
+
+ pub(crate) fn into_arena(mut self) -> Arena<T> {
+ self.arena.take().unwrap()
+ }
+
+ pub(crate) fn into_map(mut self) -> ArenaMap<Idx<T>, V> {
+ self.map.take().unwrap()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/type_ref.rs b/src/tools/rust-analyzer/crates/hir-def/src/type_ref.rs
new file mode 100644
index 000000000..924805962
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/type_ref.rs
@@ -0,0 +1,486 @@
+//! HIR for references to types. Paths in these are not yet resolved. They can
+//! be directly created from an ast::TypeRef, without further queries.
+
+use std::fmt::Write;
+
+use hir_expand::{
+ name::{AsName, Name},
+ AstId,
+};
+use syntax::ast::{self, HasName};
+
+use crate::{
+ body::LowerCtx,
+ builtin_type::{BuiltinInt, BuiltinType, BuiltinUint},
+ expr::Literal,
+ intern::Interned,
+ path::Path,
+};
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
+pub enum Mutability {
+ Shared,
+ Mut,
+}
+
+impl Mutability {
+ pub fn from_mutable(mutable: bool) -> Mutability {
+ if mutable {
+ Mutability::Mut
+ } else {
+ Mutability::Shared
+ }
+ }
+
+ pub fn as_keyword_for_ref(self) -> &'static str {
+ match self {
+ Mutability::Shared => "",
+ Mutability::Mut => "mut ",
+ }
+ }
+
+ pub fn as_keyword_for_ptr(self) -> &'static str {
+ match self {
+ Mutability::Shared => "const ",
+ Mutability::Mut => "mut ",
+ }
+ }
+
+ /// Returns `true` if the mutability is [`Mut`].
+ ///
+ /// [`Mut`]: Mutability::Mut
+ #[must_use]
+ pub fn is_mut(&self) -> bool {
+ matches!(self, Self::Mut)
+ }
+
+ /// Returns `true` if the mutability is [`Shared`].
+ ///
+ /// [`Shared`]: Mutability::Shared
+ #[must_use]
+ pub fn is_shared(&self) -> bool {
+ matches!(self, Self::Shared)
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
+pub enum Rawness {
+ RawPtr,
+ Ref,
+}
+
+impl Rawness {
+ pub fn from_raw(is_raw: bool) -> Rawness {
+ if is_raw {
+ Rawness::RawPtr
+ } else {
+ Rawness::Ref
+ }
+ }
+}
+
+#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+pub struct TraitRef {
+ pub path: Path,
+}
+
+impl TraitRef {
+ /// Converts an `ast::PathType` to a `hir::TraitRef`.
+ pub(crate) fn from_ast(ctx: &LowerCtx<'_>, node: ast::Type) -> Option<Self> {
+ // FIXME: Use `Path::from_src`
+ match node {
+ ast::Type::PathType(path) => {
+ path.path().and_then(|it| ctx.lower_path(it)).map(|path| TraitRef { path })
+ }
+ _ => None,
+ }
+ }
+}
+
+/// Compare ty::Ty
+///
+/// Note: Most users of `TypeRef` that end up in the salsa database intern it using
+/// `Interned<TypeRef>` to save space. But notably, nested `TypeRef`s are not interned, since that
+/// does not seem to save any noticeable amount of memory.
+#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+pub enum TypeRef {
+ Never,
+ Placeholder,
+ Tuple(Vec<TypeRef>),
+ Path(Path),
+ RawPtr(Box<TypeRef>, Mutability),
+ Reference(Box<TypeRef>, Option<LifetimeRef>, Mutability),
+ // FIXME: for full const generics, the latter element (length) here is going to have to be an
+ // expression that is further lowered later in hir_ty.
+ Array(Box<TypeRef>, ConstScalarOrPath),
+ Slice(Box<TypeRef>),
+ /// A fn pointer. Last element of the vector is the return type.
+ Fn(Vec<(Option<Name>, TypeRef)>, bool /*varargs*/),
+ ImplTrait(Vec<Interned<TypeBound>>),
+ DynTrait(Vec<Interned<TypeBound>>),
+ Macro(AstId<ast::MacroCall>),
+ Error,
+}
+
+#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+pub struct LifetimeRef {
+ pub name: Name,
+}
+
+impl LifetimeRef {
+ pub(crate) fn new_name(name: Name) -> Self {
+ LifetimeRef { name }
+ }
+
+ pub(crate) fn new(lifetime: &ast::Lifetime) -> Self {
+ LifetimeRef { name: Name::new_lifetime(lifetime) }
+ }
+
+ pub fn missing() -> LifetimeRef {
+ LifetimeRef { name: Name::missing() }
+ }
+}
+
+#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+pub enum TypeBound {
+ Path(Path, TraitBoundModifier),
+ ForLifetime(Box<[Name]>, Path),
+ Lifetime(LifetimeRef),
+ Error,
+}
+
+/// A modifier on a bound, currently this is only used for `?Sized`, where the
+/// modifier is `Maybe`.
+#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+pub enum TraitBoundModifier {
+ None,
+ Maybe,
+}
+
+impl TypeRef {
+ /// Converts an `ast::TypeRef` to a `hir::TypeRef`.
+ pub fn from_ast(ctx: &LowerCtx<'_>, node: ast::Type) -> Self {
+ match node {
+ ast::Type::ParenType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()),
+ ast::Type::TupleType(inner) => {
+ TypeRef::Tuple(inner.fields().map(|it| TypeRef::from_ast(ctx, it)).collect())
+ }
+ ast::Type::NeverType(..) => TypeRef::Never,
+ ast::Type::PathType(inner) => {
+ // FIXME: Use `Path::from_src`
+ inner
+ .path()
+ .and_then(|it| ctx.lower_path(it))
+ .map(TypeRef::Path)
+ .unwrap_or(TypeRef::Error)
+ }
+ ast::Type::PtrType(inner) => {
+ let inner_ty = TypeRef::from_ast_opt(ctx, inner.ty());
+ let mutability = Mutability::from_mutable(inner.mut_token().is_some());
+ TypeRef::RawPtr(Box::new(inner_ty), mutability)
+ }
+ ast::Type::ArrayType(inner) => {
+ // FIXME: This is a hack. We should probably reuse the machinery of
+ // `hir_def::body::lower` to lower this into an `Expr` and then evaluate it at the
+ // `hir_ty` level, which would allow knowing the type of:
+ // let v: [u8; 2 + 2] = [0u8; 4];
+ let len = ConstScalarOrPath::from_expr_opt(inner.expr());
+ TypeRef::Array(Box::new(TypeRef::from_ast_opt(ctx, inner.ty())), len)
+ }
+ ast::Type::SliceType(inner) => {
+ TypeRef::Slice(Box::new(TypeRef::from_ast_opt(ctx, inner.ty())))
+ }
+ ast::Type::RefType(inner) => {
+ let inner_ty = TypeRef::from_ast_opt(ctx, inner.ty());
+ let lifetime = inner.lifetime().map(|lt| LifetimeRef::new(&lt));
+ let mutability = Mutability::from_mutable(inner.mut_token().is_some());
+ TypeRef::Reference(Box::new(inner_ty), lifetime, mutability)
+ }
+ ast::Type::InferType(_inner) => TypeRef::Placeholder,
+ ast::Type::FnPtrType(inner) => {
+ let ret_ty = inner
+ .ret_type()
+ .and_then(|rt| rt.ty())
+ .map(|it| TypeRef::from_ast(ctx, it))
+ .unwrap_or_else(|| TypeRef::Tuple(Vec::new()));
+ let mut is_varargs = false;
+ let mut params = if let Some(pl) = inner.param_list() {
+ if let Some(param) = pl.params().last() {
+ is_varargs = param.dotdotdot_token().is_some();
+ }
+
+ pl.params()
+ .map(|it| {
+ let type_ref = TypeRef::from_ast_opt(ctx, it.ty());
+ let name = match it.pat() {
+ Some(ast::Pat::IdentPat(it)) => Some(
+ it.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing),
+ ),
+ _ => None,
+ };
+ (name, type_ref)
+ })
+ .collect()
+ } else {
+ Vec::new()
+ };
+ params.push((None, ret_ty));
+ TypeRef::Fn(params, is_varargs)
+ }
+ // for types are close enough for our purposes to the inner type for now...
+ ast::Type::ForType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()),
+ ast::Type::ImplTraitType(inner) => {
+ TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
+ }
+ ast::Type::DynTraitType(inner) => {
+ TypeRef::DynTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
+ }
+ ast::Type::MacroType(mt) => match mt.macro_call() {
+ Some(mc) => ctx.ast_id(ctx.db, &mc).map(TypeRef::Macro).unwrap_or(TypeRef::Error),
+ None => TypeRef::Error,
+ },
+ }
+ }
+
+ pub(crate) fn from_ast_opt(ctx: &LowerCtx<'_>, node: Option<ast::Type>) -> Self {
+ match node {
+ Some(node) => TypeRef::from_ast(ctx, node),
+ None => TypeRef::Error,
+ }
+ }
+
+ pub(crate) fn unit() -> TypeRef {
+ TypeRef::Tuple(Vec::new())
+ }
+
+ pub fn walk(&self, f: &mut impl FnMut(&TypeRef)) {
+ go(self, f);
+
+ fn go(type_ref: &TypeRef, f: &mut impl FnMut(&TypeRef)) {
+ f(type_ref);
+ match type_ref {
+ TypeRef::Fn(params, _) => {
+ params.iter().for_each(|(_, param_type)| go(param_type, f))
+ }
+ TypeRef::Tuple(types) => types.iter().for_each(|t| go(t, f)),
+ TypeRef::RawPtr(type_ref, _)
+ | TypeRef::Reference(type_ref, ..)
+ | TypeRef::Array(type_ref, _)
+ | TypeRef::Slice(type_ref) => go(type_ref, f),
+ TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => {
+ for bound in bounds {
+ match bound.as_ref() {
+ TypeBound::Path(path, _) | TypeBound::ForLifetime(_, path) => {
+ go_path(path, f)
+ }
+ TypeBound::Lifetime(_) | TypeBound::Error => (),
+ }
+ }
+ }
+ TypeRef::Path(path) => go_path(path, f),
+ TypeRef::Never | TypeRef::Placeholder | TypeRef::Macro(_) | TypeRef::Error => {}
+ };
+ }
+
+ fn go_path(path: &Path, f: &mut impl FnMut(&TypeRef)) {
+ if let Some(type_ref) = path.type_anchor() {
+ go(type_ref, f);
+ }
+ for segment in path.segments().iter() {
+ if let Some(args_and_bindings) = segment.args_and_bindings {
+ for arg in &args_and_bindings.args {
+ match arg {
+ crate::path::GenericArg::Type(type_ref) => {
+ go(type_ref, f);
+ }
+ crate::path::GenericArg::Const(_)
+ | crate::path::GenericArg::Lifetime(_) => {}
+ }
+ }
+ for binding in &args_and_bindings.bindings {
+ if let Some(type_ref) = &binding.type_ref {
+ go(type_ref, f);
+ }
+ for bound in &binding.bounds {
+ match bound.as_ref() {
+ TypeBound::Path(path, _) | TypeBound::ForLifetime(_, path) => {
+ go_path(path, f)
+ }
+ TypeBound::Lifetime(_) | TypeBound::Error => (),
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+pub(crate) fn type_bounds_from_ast(
+ lower_ctx: &LowerCtx<'_>,
+ type_bounds_opt: Option<ast::TypeBoundList>,
+) -> Vec<Interned<TypeBound>> {
+ if let Some(type_bounds) = type_bounds_opt {
+ type_bounds.bounds().map(|it| Interned::new(TypeBound::from_ast(lower_ctx, it))).collect()
+ } else {
+ vec![]
+ }
+}
+
+impl TypeBound {
+ pub(crate) fn from_ast(ctx: &LowerCtx<'_>, node: ast::TypeBound) -> Self {
+ let lower_path_type = |path_type: ast::PathType| ctx.lower_path(path_type.path()?);
+
+ match node.kind() {
+ ast::TypeBoundKind::PathType(path_type) => {
+ let m = match node.question_mark_token() {
+ Some(_) => TraitBoundModifier::Maybe,
+ None => TraitBoundModifier::None,
+ };
+ lower_path_type(path_type)
+ .map(|p| TypeBound::Path(p, m))
+ .unwrap_or(TypeBound::Error)
+ }
+ ast::TypeBoundKind::ForType(for_type) => {
+ let lt_refs = match for_type.generic_param_list() {
+ Some(gpl) => gpl
+ .lifetime_params()
+ .flat_map(|lp| lp.lifetime().map(|lt| Name::new_lifetime(&lt)))
+ .collect(),
+ None => Box::default(),
+ };
+ let path = for_type.ty().and_then(|ty| match ty {
+ ast::Type::PathType(path_type) => lower_path_type(path_type),
+ _ => None,
+ });
+ match path {
+ Some(p) => TypeBound::ForLifetime(lt_refs, p),
+ None => TypeBound::Error,
+ }
+ }
+ ast::TypeBoundKind::Lifetime(lifetime) => {
+ TypeBound::Lifetime(LifetimeRef::new(&lifetime))
+ }
+ }
+ }
+
+ pub fn as_path(&self) -> Option<(&Path, &TraitBoundModifier)> {
+ match self {
+ TypeBound::Path(p, m) => Some((p, m)),
+ TypeBound::ForLifetime(_, p) => Some((p, &TraitBoundModifier::None)),
+ TypeBound::Lifetime(_) | TypeBound::Error => None,
+ }
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum ConstScalarOrPath {
+ Scalar(ConstScalar),
+ Path(Name),
+}
+
+impl std::fmt::Display for ConstScalarOrPath {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ ConstScalarOrPath::Scalar(s) => s.fmt(f),
+ ConstScalarOrPath::Path(n) => n.fmt(f),
+ }
+ }
+}
+
+impl ConstScalarOrPath {
+ pub(crate) fn from_expr_opt(expr: Option<ast::Expr>) -> Self {
+ match expr {
+ Some(x) => Self::from_expr(x),
+ None => Self::Scalar(ConstScalar::Unknown),
+ }
+ }
+
+ // FIXME: as per the comments on `TypeRef::Array`, this evaluation should not happen at this
+ // parse stage.
+ fn from_expr(expr: ast::Expr) -> Self {
+ match expr {
+ ast::Expr::PathExpr(p) => {
+ match p.path().and_then(|x| x.segment()).and_then(|x| x.name_ref()) {
+ Some(x) => Self::Path(x.as_name()),
+ None => Self::Scalar(ConstScalar::Unknown),
+ }
+ }
+ ast::Expr::PrefixExpr(prefix_expr) => match prefix_expr.op_kind() {
+ Some(ast::UnaryOp::Neg) => {
+ let unsigned = Self::from_expr_opt(prefix_expr.expr());
+ // Add sign
+ match unsigned {
+ Self::Scalar(ConstScalar::UInt(num)) => {
+ Self::Scalar(ConstScalar::Int(-(num as i128)))
+ }
+ other => other,
+ }
+ }
+ _ => Self::from_expr_opt(prefix_expr.expr()),
+ },
+ ast::Expr::Literal(literal) => Self::Scalar(match literal.kind() {
+ ast::LiteralKind::IntNumber(num) => {
+ num.value().map(ConstScalar::UInt).unwrap_or(ConstScalar::Unknown)
+ }
+ ast::LiteralKind::Char(c) => {
+ c.value().map(ConstScalar::Char).unwrap_or(ConstScalar::Unknown)
+ }
+ ast::LiteralKind::Bool(f) => ConstScalar::Bool(f),
+ _ => ConstScalar::Unknown,
+ }),
+ _ => Self::Scalar(ConstScalar::Unknown),
+ }
+ }
+}
+
+/// A concrete constant value
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum ConstScalar {
+ Int(i128),
+ UInt(u128),
+ Bool(bool),
+ Char(char),
+
+ /// Case of an unknown value that rustc might know but we don't
+ // FIXME: this is a hack to get around chalk not being able to represent unevaluatable
+ // constants
+ // https://github.com/rust-lang/rust-analyzer/pull/8813#issuecomment-840679177
+ // https://rust-lang.zulipchat.com/#narrow/stream/144729-wg-traits/topic/Handling.20non.20evaluatable.20constants'.20equality/near/238386348
+ Unknown,
+}
+
+impl ConstScalar {
+ pub fn builtin_type(&self) -> BuiltinType {
+ match self {
+ ConstScalar::UInt(_) | ConstScalar::Unknown => BuiltinType::Uint(BuiltinUint::U128),
+ ConstScalar::Int(_) => BuiltinType::Int(BuiltinInt::I128),
+ ConstScalar::Char(_) => BuiltinType::Char,
+ ConstScalar::Bool(_) => BuiltinType::Bool,
+ }
+ }
+}
+
+impl From<Literal> for ConstScalar {
+ fn from(literal: Literal) -> Self {
+ match literal {
+ Literal::Char(c) => Self::Char(c),
+ Literal::Bool(flag) => Self::Bool(flag),
+ Literal::Int(num, _) => Self::Int(num),
+ Literal::Uint(num, _) => Self::UInt(num),
+ _ => Self::Unknown,
+ }
+ }
+}
+
+impl std::fmt::Display for ConstScalar {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
+ match self {
+ ConstScalar::Int(num) => num.fmt(f),
+ ConstScalar::UInt(num) => num.fmt(f),
+ ConstScalar::Bool(flag) => flag.fmt(f),
+ ConstScalar::Char(c) => write!(f, "'{c}'"),
+ ConstScalar::Unknown => f.write_char('_'),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs
new file mode 100644
index 000000000..6e22a877a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs
@@ -0,0 +1,242 @@
+//! Defines hir-level representation of visibility (e.g. `pub` and `pub(crate)`).
+
+use std::{iter, sync::Arc};
+
+use hir_expand::{hygiene::Hygiene, InFile};
+use la_arena::ArenaMap;
+use syntax::ast;
+
+use crate::{
+ db::DefDatabase,
+ nameres::DefMap,
+ path::{ModPath, PathKind},
+ resolver::HasResolver,
+ ConstId, FunctionId, HasModule, LocalFieldId, ModuleId, VariantId,
+};
+
+/// Visibility of an item, not yet resolved.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum RawVisibility {
+ /// `pub(in module)`, `pub(crate)` or `pub(super)`. Also private, which is
+ /// equivalent to `pub(self)`.
+ Module(ModPath),
+ /// `pub`.
+ Public,
+}
+
+impl RawVisibility {
+ pub(crate) const fn private() -> RawVisibility {
+ RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)))
+ }
+
+ pub(crate) fn from_ast(
+ db: &dyn DefDatabase,
+ node: InFile<Option<ast::Visibility>>,
+ ) -> RawVisibility {
+ Self::from_ast_with_hygiene(db, node.value, &Hygiene::new(db.upcast(), node.file_id))
+ }
+
+ pub(crate) fn from_ast_with_hygiene(
+ db: &dyn DefDatabase,
+ node: Option<ast::Visibility>,
+ hygiene: &Hygiene,
+ ) -> RawVisibility {
+ Self::from_ast_with_hygiene_and_default(db, node, RawVisibility::private(), hygiene)
+ }
+
+ pub(crate) fn from_ast_with_hygiene_and_default(
+ db: &dyn DefDatabase,
+ node: Option<ast::Visibility>,
+ default: RawVisibility,
+ hygiene: &Hygiene,
+ ) -> RawVisibility {
+ let node = match node {
+ None => return default,
+ Some(node) => node,
+ };
+ match node.kind() {
+ ast::VisibilityKind::In(path) => {
+ let path = ModPath::from_src(db.upcast(), path, hygiene);
+ let path = match path {
+ None => return RawVisibility::private(),
+ Some(path) => path,
+ };
+ RawVisibility::Module(path)
+ }
+ ast::VisibilityKind::PubCrate => {
+ let path = ModPath::from_kind(PathKind::Crate);
+ RawVisibility::Module(path)
+ }
+ ast::VisibilityKind::PubSuper => {
+ let path = ModPath::from_kind(PathKind::Super(1));
+ RawVisibility::Module(path)
+ }
+ ast::VisibilityKind::PubSelf => {
+ let path = ModPath::from_kind(PathKind::Plain);
+ RawVisibility::Module(path)
+ }
+ ast::VisibilityKind::Pub => RawVisibility::Public,
+ }
+ }
+
+ pub fn resolve(
+ &self,
+ db: &dyn DefDatabase,
+ resolver: &crate::resolver::Resolver,
+ ) -> Visibility {
+ // we fall back to public visibility (i.e. fail open) if the path can't be resolved
+ resolver.resolve_visibility(db, self).unwrap_or(Visibility::Public)
+ }
+}
+
+/// Visibility of an item, with the path resolved.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum Visibility {
+ /// Visibility is restricted to a certain module.
+ Module(ModuleId),
+ /// Visibility is unrestricted.
+ Public,
+}
+
+impl Visibility {
+ pub fn is_visible_from(self, db: &dyn DefDatabase, from_module: ModuleId) -> bool {
+ let to_module = match self {
+ Visibility::Module(m) => m,
+ Visibility::Public => return true,
+ };
+ // if they're not in the same crate, it can't be visible
+ if from_module.krate != to_module.krate {
+ return false;
+ }
+ let def_map = from_module.def_map(db);
+ self.is_visible_from_def_map(db, &def_map, from_module.local_id)
+ }
+
+ pub(crate) fn is_visible_from_other_crate(self) -> bool {
+ matches!(self, Visibility::Public)
+ }
+
+ pub(crate) fn is_visible_from_def_map(
+ self,
+ db: &dyn DefDatabase,
+ def_map: &DefMap,
+ mut from_module: crate::LocalModuleId,
+ ) -> bool {
+ let mut to_module = match self {
+ Visibility::Module(m) => m,
+ Visibility::Public => return true,
+ };
+
+ // `to_module` might be the root module of a block expression. Those have the same
+ // visibility as the containing module (even though no items are directly nameable from
+ // there, getting this right is important for method resolution).
+ // In that case, we adjust the visibility of `to_module` to point to the containing module.
+ // Additional complication: `to_module` might be in `from_module`'s `DefMap`, which we're
+ // currently computing, so we must not call the `def_map` query for it.
+ let arc;
+ let to_module_def_map =
+ if to_module.krate == def_map.krate() && to_module.block == def_map.block_id() {
+ cov_mark::hit!(is_visible_from_same_block_def_map);
+ def_map
+ } else {
+ arc = to_module.def_map(db);
+ &arc
+ };
+ let is_block_root = matches!(to_module.block, Some(_) if to_module_def_map[to_module.local_id].parent.is_none());
+ if is_block_root {
+ to_module = to_module_def_map.containing_module(to_module.local_id).unwrap();
+ }
+
+ // from_module needs to be a descendant of to_module
+ let mut def_map = def_map;
+ let mut parent_arc;
+ loop {
+ if def_map.module_id(from_module) == to_module {
+ return true;
+ }
+ match def_map[from_module].parent {
+ Some(parent) => from_module = parent,
+ None => {
+ match def_map.parent() {
+ Some(module) => {
+ parent_arc = module.def_map(db);
+ def_map = &*parent_arc;
+ from_module = module.local_id;
+ }
+ // Reached the root module, nothing left to check.
+ None => return false,
+ }
+ }
+ }
+ }
+ }
+
+ /// Returns the most permissive visibility of `self` and `other`.
+ ///
+ /// If there is no subset relation between `self` and `other`, returns `None` (ie. they're only
+ /// visible in unrelated modules).
+ pub(crate) fn max(self, other: Visibility, def_map: &DefMap) -> Option<Visibility> {
+ match (self, other) {
+ (Visibility::Module(_) | Visibility::Public, Visibility::Public)
+ | (Visibility::Public, Visibility::Module(_)) => Some(Visibility::Public),
+ (Visibility::Module(mod_a), Visibility::Module(mod_b)) => {
+ if mod_a.krate != mod_b.krate {
+ return None;
+ }
+
+ let mut a_ancestors = iter::successors(Some(mod_a.local_id), |&m| {
+ let parent_id = def_map[m].parent?;
+ Some(parent_id)
+ });
+ let mut b_ancestors = iter::successors(Some(mod_b.local_id), |&m| {
+ let parent_id = def_map[m].parent?;
+ Some(parent_id)
+ });
+
+ if a_ancestors.any(|m| m == mod_b.local_id) {
+ // B is above A
+ return Some(Visibility::Module(mod_b));
+ }
+
+ if b_ancestors.any(|m| m == mod_a.local_id) {
+ // A is above B
+ return Some(Visibility::Module(mod_a));
+ }
+
+ None
+ }
+ }
+ }
+}
+
+/// Resolve visibility of all specific fields of a struct or union variant.
+pub(crate) fn field_visibilities_query(
+ db: &dyn DefDatabase,
+ variant_id: VariantId,
+) -> Arc<ArenaMap<LocalFieldId, Visibility>> {
+ let var_data = match variant_id {
+ VariantId::StructId(it) => db.struct_data(it).variant_data.clone(),
+ VariantId::UnionId(it) => db.union_data(it).variant_data.clone(),
+ VariantId::EnumVariantId(it) => {
+ db.enum_data(it.parent).variants[it.local_id].variant_data.clone()
+ }
+ };
+ let resolver = variant_id.module(db).resolver(db);
+ let mut res = ArenaMap::default();
+ for (field_id, field_data) in var_data.fields().iter() {
+ res.insert(field_id, field_data.visibility.resolve(db, &resolver))
+ }
+ Arc::new(res)
+}
+
+/// Resolve visibility of a function.
+pub(crate) fn function_visibility_query(db: &dyn DefDatabase, def: FunctionId) -> Visibility {
+ let resolver = def.resolver(db);
+ db.function_data(def).visibility.resolve(db, &resolver)
+}
+
+/// Resolve visibility of a const.
+pub(crate) fn const_visibility_query(db: &dyn DefDatabase, def: ConstId) -> Visibility {
+ let resolver = def.resolver(db);
+ db.const_data(def).visibility.resolve(db, &resolver)
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
new file mode 100644
index 000000000..dfd470ffc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
@@ -0,0 +1,34 @@
+[package]
+name = "hir-expand"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+tracing = "0.1.35"
+either = "1.7.0"
+rustc-hash = "1.1.0"
+la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+itertools = "0.10.3"
+hashbrown = { version = "0.12.1", features = [
+ "inline-more",
+], default-features = false }
+smallvec = { version = "1.9.0", features = ["const_new"] }
+
+stdx = { path = "../stdx", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+mbe = { path = "../mbe", version = "0.0.0" }
+limit = { path = "../limit", version = "0.0.0" }
+
+[dev-dependencies]
+expect-test = "1.4.0"
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
new file mode 100644
index 000000000..c1ddef03b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
@@ -0,0 +1,181 @@
+//! `AstIdMap` allows to create stable IDs for "large" syntax nodes like items
+//! and macro calls.
+//!
+//! Specifically, it enumerates all items in a file and uses position of a an
+//! item as an ID. That way, id's don't change unless the set of items itself
+//! changes.
+
+use std::{
+ any::type_name,
+ fmt,
+ hash::{BuildHasher, BuildHasherDefault, Hash, Hasher},
+ marker::PhantomData,
+};
+
+use la_arena::{Arena, Idx};
+use profile::Count;
+use rustc_hash::FxHasher;
+use syntax::{ast, match_ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
+
+/// `AstId` points to an AST node in a specific file.
+pub struct FileAstId<N: AstNode> {
+ raw: ErasedFileAstId,
+ _ty: PhantomData<fn() -> N>,
+}
+
+impl<N: AstNode> Clone for FileAstId<N> {
+ fn clone(&self) -> FileAstId<N> {
+ *self
+ }
+}
+impl<N: AstNode> Copy for FileAstId<N> {}
+
+impl<N: AstNode> PartialEq for FileAstId<N> {
+ fn eq(&self, other: &Self) -> bool {
+ self.raw == other.raw
+ }
+}
+impl<N: AstNode> Eq for FileAstId<N> {}
+impl<N: AstNode> Hash for FileAstId<N> {
+ fn hash<H: Hasher>(&self, hasher: &mut H) {
+ self.raw.hash(hasher);
+ }
+}
+
+impl<N: AstNode> fmt::Debug for FileAstId<N> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "FileAstId::<{}>({})", type_name::<N>(), self.raw.into_raw())
+ }
+}
+
+impl<N: AstNode> FileAstId<N> {
+ // Can't make this a From implementation because of coherence
+ pub fn upcast<M: AstNode>(self) -> FileAstId<M>
+ where
+ N: Into<M>,
+ {
+ FileAstId { raw: self.raw, _ty: PhantomData }
+ }
+}
+
+type ErasedFileAstId = Idx<SyntaxNodePtr>;
+
+/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back.
+#[derive(Default)]
+pub struct AstIdMap {
+ /// Maps stable id to unstable ptr.
+ arena: Arena<SyntaxNodePtr>,
+ /// Reverse: map ptr to id.
+ map: hashbrown::HashMap<Idx<SyntaxNodePtr>, (), ()>,
+ _c: Count<Self>,
+}
+
+impl fmt::Debug for AstIdMap {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("AstIdMap").field("arena", &self.arena).finish()
+ }
+}
+
+impl PartialEq for AstIdMap {
+ fn eq(&self, other: &Self) -> bool {
+ self.arena == other.arena
+ }
+}
+impl Eq for AstIdMap {}
+
+impl AstIdMap {
+ pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
+ assert!(node.parent().is_none());
+ let mut res = AstIdMap::default();
+ // By walking the tree in breadth-first order we make sure that parents
+ // get lower ids then children. That is, adding a new child does not
+ // change parent's id. This means that, say, adding a new function to a
+ // trait does not change ids of top-level items, which helps caching.
+ bdfs(node, |it| {
+ match_ast! {
+ match it {
+ ast::Item(module_item) => {
+ res.alloc(module_item.syntax());
+ true
+ },
+ ast::BlockExpr(block) => {
+ res.alloc(block.syntax());
+ true
+ },
+ _ => false,
+ }
+ }
+ });
+ res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ());
+ for (idx, ptr) in res.arena.iter() {
+ let hash = hash_ptr(ptr);
+ match res.map.raw_entry_mut().from_hash(hash, |idx2| *idx2 == idx) {
+ hashbrown::hash_map::RawEntryMut::Occupied(_) => unreachable!(),
+ hashbrown::hash_map::RawEntryMut::Vacant(entry) => {
+ entry.insert_with_hasher(hash, idx, (), |&idx| hash_ptr(&res.arena[idx]));
+ }
+ }
+ }
+ res
+ }
+
+ pub fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> {
+ let raw = self.erased_ast_id(item.syntax());
+ FileAstId { raw, _ty: PhantomData }
+ }
+ fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
+ let ptr = SyntaxNodePtr::new(item);
+ let hash = hash_ptr(&ptr);
+ match self.map.raw_entry().from_hash(hash, |&idx| self.arena[idx] == ptr) {
+ Some((&idx, &())) => idx,
+ None => panic!(
+ "Can't find {:?} in AstIdMap:\n{:?}",
+ item,
+ self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(),
+ ),
+ }
+ }
+
+ pub fn get<N: AstNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
+ AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
+ }
+
+ fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId {
+ self.arena.alloc(SyntaxNodePtr::new(item))
+ }
+}
+
+fn hash_ptr(ptr: &SyntaxNodePtr) -> u64 {
+ let mut hasher = BuildHasherDefault::<FxHasher>::default().build_hasher();
+ ptr.hash(&mut hasher);
+ hasher.finish()
+}
+
+/// Walks the subtree in bdfs order, calling `f` for each node. What is bdfs
+/// order? It is a mix of breadth-first and depth first orders. Nodes for which
+/// `f` returns true are visited breadth-first, all the other nodes are explored
+/// depth-first.
+///
+/// In other words, the size of the bfs queue is bound by the number of "true"
+/// nodes.
+fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) {
+ let mut curr_layer = vec![node.clone()];
+ let mut next_layer = vec![];
+ while !curr_layer.is_empty() {
+ curr_layer.drain(..).for_each(|node| {
+ let mut preorder = node.preorder();
+ while let Some(event) = preorder.next() {
+ match event {
+ syntax::WalkEvent::Enter(node) => {
+ if f(node.clone()) {
+ next_layer.extend(node.children());
+ preorder.skip_subtree();
+ }
+ }
+ syntax::WalkEvent::Leave(_) => {}
+ }
+ }
+ });
+ std::mem::swap(&mut curr_layer, &mut next_layer);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
new file mode 100644
index 000000000..0c886ac4d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
@@ -0,0 +1,130 @@
+//! Builtin attributes.
+
+use crate::{db::AstDatabase, name, ExpandResult, MacroCallId, MacroCallKind};
+
+macro_rules! register_builtin {
+ ( $(($name:ident, $variant:ident) => $expand:ident),* ) => {
+ #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+ pub enum BuiltinAttrExpander {
+ $($variant),*
+ }
+
+ impl BuiltinAttrExpander {
+ pub fn expand(
+ &self,
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+ ) -> ExpandResult<tt::Subtree> {
+ let expander = match *self {
+ $( BuiltinAttrExpander::$variant => $expand, )*
+ };
+ expander(db, id, tt)
+ }
+
+ fn find_by_name(name: &name::Name) -> Option<Self> {
+ match name {
+ $( id if id == &name::name![$name] => Some(BuiltinAttrExpander::$variant), )*
+ _ => None,
+ }
+ }
+ }
+
+ };
+}
+
+impl BuiltinAttrExpander {
+ pub fn is_derive(self) -> bool {
+ matches!(self, BuiltinAttrExpander::Derive)
+ }
+ pub fn is_test(self) -> bool {
+ matches!(self, BuiltinAttrExpander::Test)
+ }
+ pub fn is_bench(self) -> bool {
+ matches!(self, BuiltinAttrExpander::Bench)
+ }
+}
+
+register_builtin! {
+ (bench, Bench) => dummy_attr_expand,
+ (cfg_accessible, CfgAccessible) => dummy_attr_expand,
+ (cfg_eval, CfgEval) => dummy_attr_expand,
+ (derive, Derive) => derive_attr_expand,
+ (global_allocator, GlobalAllocator) => dummy_attr_expand,
+ (test, Test) => dummy_attr_expand,
+ (test_case, TestCase) => dummy_attr_expand
+}
+
+pub fn find_builtin_attr(ident: &name::Name) -> Option<BuiltinAttrExpander> {
+ BuiltinAttrExpander::find_by_name(ident)
+}
+
+fn dummy_attr_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ ExpandResult::ok(tt.clone())
+}
+
+/// We generate a very specific expansion here, as we do not actually expand the `#[derive]` attribute
+/// itself in name res, but we do want to expand it to something for the IDE layer, so that the input
+/// derive attributes can be downmapped, and resolved as proper paths.
+/// This is basically a hack, that simplifies the hacks we need in a lot of ide layer places to
+/// somewhat inconsistently resolve derive attributes.
+///
+/// As such, we expand `#[derive(Foo, bar::Bar)]` into
+/// ```
+/// #[Foo]
+/// #[bar::Bar]
+/// ();
+/// ```
+/// which allows fallback path resolution in hir::Semantics to properly identify our derives.
+/// Since we do not expand the attribute in nameres though, we keep the original item.
+///
+/// The ideal expansion here would be for the `#[derive]` to re-emit the annotated item and somehow
+/// use the input paths in its output as well.
+/// But that would bring two problems with it, for one every derive would duplicate the item token tree
+/// wasting a lot of memory, and it would also require some way to use a path in a way that makes it
+/// always resolve as a derive without nameres recollecting them.
+/// So this hacky approach is a lot more friendly for us, though it does require a bit of support in
+/// [`hir::Semantics`] to make this work.
+fn derive_attr_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let loc = db.lookup_intern_macro_call(id);
+ let derives = match &loc.kind {
+ MacroCallKind::Attr { attr_args, is_derive: true, .. } => &attr_args.0,
+ _ => return ExpandResult::ok(Default::default()),
+ };
+ pseudo_derive_attr_expansion(tt, derives)
+}
+
+pub fn pseudo_derive_attr_expansion(
+ tt: &tt::Subtree,
+ args: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let mk_leaf = |char| {
+ tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
+ char,
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ }))
+ };
+
+ let mut token_trees = Vec::new();
+ for tt in (&args.token_trees)
+ .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))))
+ {
+ token_trees.push(mk_leaf('#'));
+ token_trees.push(mk_leaf('['));
+ token_trees.extend(tt.iter().cloned());
+ token_trees.push(mk_leaf(']'));
+ }
+ token_trees.push(mk_leaf('('));
+ token_trees.push(mk_leaf(')'));
+ token_trees.push(mk_leaf(';'));
+ ExpandResult::ok(tt::Subtree { delimiter: tt.delimiter, token_trees })
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
new file mode 100644
index 000000000..79989bc2e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
@@ -0,0 +1,249 @@
+//! Builtin derives.
+
+use base_db::{CrateOrigin, LangCrateOrigin};
+use tracing::debug;
+
+use syntax::{
+ ast::{self, AstNode, HasGenericParams, HasModuleItem, HasName},
+ match_ast,
+};
+use tt::TokenId;
+
+use crate::{db::AstDatabase, name, quote, ExpandError, ExpandResult, MacroCallId};
+
+macro_rules! register_builtin {
+ ( $($trait:ident => $expand:ident),* ) => {
+ #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+ pub enum BuiltinDeriveExpander {
+ $($trait),*
+ }
+
+ impl BuiltinDeriveExpander {
+ pub fn expand(
+ &self,
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+ ) -> ExpandResult<tt::Subtree> {
+ let expander = match *self {
+ $( BuiltinDeriveExpander::$trait => $expand, )*
+ };
+ expander(db, id, tt)
+ }
+
+ fn find_by_name(name: &name::Name) -> Option<Self> {
+ match name {
+ $( id if id == &name::name![$trait] => Some(BuiltinDeriveExpander::$trait), )*
+ _ => None,
+ }
+ }
+ }
+
+ };
+}
+
+register_builtin! {
+ Copy => copy_expand,
+ Clone => clone_expand,
+ Default => default_expand,
+ Debug => debug_expand,
+ Hash => hash_expand,
+ Ord => ord_expand,
+ PartialOrd => partial_ord_expand,
+ Eq => eq_expand,
+ PartialEq => partial_eq_expand
+}
+
+pub fn find_builtin_derive(ident: &name::Name) -> Option<BuiltinDeriveExpander> {
+ BuiltinDeriveExpander::find_by_name(ident)
+}
+
+struct BasicAdtInfo {
+ name: tt::Ident,
+ type_or_const_params: usize,
+}
+
+fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
+ let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems);
+ let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| {
+ debug!("derive node didn't parse");
+ ExpandError::Other("invalid item definition".into())
+ })?;
+ let item = macro_items.items().next().ok_or_else(|| {
+ debug!("no module item parsed");
+ ExpandError::Other("no item found".into())
+ })?;
+ let node = item.syntax();
+ let (name, params) = match_ast! {
+ match node {
+ ast::Struct(it) => (it.name(), it.generic_param_list()),
+ ast::Enum(it) => (it.name(), it.generic_param_list()),
+ ast::Union(it) => (it.name(), it.generic_param_list()),
+ _ => {
+ debug!("unexpected node is {:?}", node);
+ return Err(ExpandError::Other("expected struct, enum or union".into()))
+ },
+ }
+ };
+ let name = name.ok_or_else(|| {
+ debug!("parsed item has no name");
+ ExpandError::Other("missing name".into())
+ })?;
+ let name_token_id =
+ token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified);
+ let name_token = tt::Ident { id: name_token_id, text: name.text().into() };
+ let type_or_const_params =
+ params.map_or(0, |type_param_list| type_param_list.type_or_const_params().count());
+ Ok(BasicAdtInfo { name: name_token, type_or_const_params })
+}
+
+fn make_type_args(n: usize, bound: Vec<tt::TokenTree>) -> Vec<tt::TokenTree> {
+ let mut result = Vec::<tt::TokenTree>::with_capacity(n * 2);
+ result.push(
+ tt::Leaf::Punct(tt::Punct {
+ char: '<',
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ })
+ .into(),
+ );
+ for i in 0..n {
+ if i > 0 {
+ result.push(
+ tt::Leaf::Punct(tt::Punct {
+ char: ',',
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ })
+ .into(),
+ );
+ }
+ result.push(
+ tt::Leaf::Ident(tt::Ident {
+ id: tt::TokenId::unspecified(),
+ text: format!("T{}", i).into(),
+ })
+ .into(),
+ );
+ result.extend(bound.iter().cloned());
+ }
+ result.push(
+ tt::Leaf::Punct(tt::Punct {
+ char: '>',
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ })
+ .into(),
+ );
+ result
+}
+
+fn expand_simple_derive(tt: &tt::Subtree, trait_path: tt::Subtree) -> ExpandResult<tt::Subtree> {
+ let info = match parse_adt(tt) {
+ Ok(info) => info,
+ Err(e) => return ExpandResult::only_err(e),
+ };
+ let name = info.name;
+ let trait_path_clone = trait_path.token_trees.clone();
+ let bound = (quote! { : ##trait_path_clone }).token_trees;
+ let type_params = make_type_args(info.type_or_const_params, bound);
+ let type_args = make_type_args(info.type_or_const_params, Vec::new());
+ let trait_path = trait_path.token_trees;
+ let expanded = quote! {
+ impl ##type_params ##trait_path for #name ##type_args {}
+ };
+ ExpandResult::ok(expanded)
+}
+
+fn find_builtin_crate(db: &dyn AstDatabase, id: MacroCallId) -> tt::TokenTree {
+ // FIXME: make hygiene works for builtin derive macro
+ // such that $crate can be used here.
+ let cg = db.crate_graph();
+ let krate = db.lookup_intern_macro_call(id).krate;
+
+ let tt = if matches!(cg[krate].origin, CrateOrigin::Lang(LangCrateOrigin::Core)) {
+ cov_mark::hit!(test_copy_expand_in_core);
+ quote! { crate }
+ } else {
+ quote! { core }
+ };
+
+ tt.token_trees[0].clone()
+}
+
+fn copy_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::marker::Copy })
+}
+
+fn clone_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::clone::Clone })
+}
+
+fn default_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::default::Default })
+}
+
+fn debug_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::fmt::Debug })
+}
+
+fn hash_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::hash::Hash })
+}
+
+fn eq_expand(db: &dyn AstDatabase, id: MacroCallId, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::cmp::Eq })
+}
+
+fn partial_eq_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::cmp::PartialEq })
+}
+
+fn ord_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::cmp::Ord })
+}
+
+fn partial_ord_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let krate = find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::cmp::PartialOrd })
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
new file mode 100644
index 000000000..76da7c9f1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
@@ -0,0 +1,669 @@
+//! Builtin macro
+
+use base_db::{AnchoredPath, Edition, FileId};
+use cfg::CfgExpr;
+use either::Either;
+use mbe::{parse_exprs_with_sep, parse_to_token_tree};
+use syntax::{
+ ast::{self, AstToken},
+ SmolStr,
+};
+
+use crate::{db::AstDatabase, name, quote, ExpandError, ExpandResult, MacroCallId, MacroCallLoc};
+
+macro_rules! register_builtin {
+ ( LAZY: $(($name:ident, $kind: ident) => $expand:ident),* , EAGER: $(($e_name:ident, $e_kind: ident) => $e_expand:ident),* ) => {
+ #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+ pub enum BuiltinFnLikeExpander {
+ $($kind),*
+ }
+
+ #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+ pub enum EagerExpander {
+ $($e_kind),*
+ }
+
+ impl BuiltinFnLikeExpander {
+ pub fn expand(
+ &self,
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+ ) -> ExpandResult<tt::Subtree> {
+ let expander = match *self {
+ $( BuiltinFnLikeExpander::$kind => $expand, )*
+ };
+ expander(db, id, tt)
+ }
+ }
+
+ impl EagerExpander {
+ pub fn expand(
+ &self,
+ db: &dyn AstDatabase,
+ arg_id: MacroCallId,
+ tt: &tt::Subtree,
+ ) -> ExpandResult<ExpandedEager> {
+ let expander = match *self {
+ $( EagerExpander::$e_kind => $e_expand, )*
+ };
+ expander(db, arg_id, tt)
+ }
+ }
+
+ fn find_by_name(ident: &name::Name) -> Option<Either<BuiltinFnLikeExpander, EagerExpander>> {
+ match ident {
+ $( id if id == &name::name![$name] => Some(Either::Left(BuiltinFnLikeExpander::$kind)), )*
+ $( id if id == &name::name![$e_name] => Some(Either::Right(EagerExpander::$e_kind)), )*
+ _ => return None,
+ }
+ }
+ };
+}
+
+#[derive(Debug, Default)]
+pub struct ExpandedEager {
+ pub(crate) subtree: tt::Subtree,
+ /// The included file ID of the include macro.
+ pub(crate) included_file: Option<FileId>,
+}
+
+impl ExpandedEager {
+ fn new(subtree: tt::Subtree) -> Self {
+ ExpandedEager { subtree, included_file: None }
+ }
+}
+
+pub fn find_builtin_macro(
+ ident: &name::Name,
+) -> Option<Either<BuiltinFnLikeExpander, EagerExpander>> {
+ find_by_name(ident)
+}
+
+register_builtin! {
+ LAZY:
+ (column, Column) => column_expand,
+ (file, File) => file_expand,
+ (line, Line) => line_expand,
+ (module_path, ModulePath) => module_path_expand,
+ (assert, Assert) => assert_expand,
+ (stringify, Stringify) => stringify_expand,
+ (format_args, FormatArgs) => format_args_expand,
+ (const_format_args, ConstFormatArgs) => format_args_expand,
+ // format_args_nl only differs in that it adds a newline in the end,
+ // so we use the same stub expansion for now
+ (format_args_nl, FormatArgsNl) => format_args_expand,
+ (llvm_asm, LlvmAsm) => asm_expand,
+ (asm, Asm) => asm_expand,
+ (global_asm, GlobalAsm) => global_asm_expand,
+ (cfg, Cfg) => cfg_expand,
+ (core_panic, CorePanic) => panic_expand,
+ (std_panic, StdPanic) => panic_expand,
+ (unreachable, Unreachable) => unreachable_expand,
+ (log_syntax, LogSyntax) => log_syntax_expand,
+ (trace_macros, TraceMacros) => trace_macros_expand,
+
+ EAGER:
+ (compile_error, CompileError) => compile_error_expand,
+ (concat, Concat) => concat_expand,
+ (concat_idents, ConcatIdents) => concat_idents_expand,
+ (concat_bytes, ConcatBytes) => concat_bytes_expand,
+ (include, Include) => include_expand,
+ (include_bytes, IncludeBytes) => include_bytes_expand,
+ (include_str, IncludeStr) => include_str_expand,
+ (env, Env) => env_expand,
+ (option_env, OptionEnv) => option_env_expand
+}
+
+const DOLLAR_CRATE: tt::Ident =
+ tt::Ident { text: SmolStr::new_inline("$crate"), id: tt::TokenId::unspecified() };
+
+fn module_path_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ _tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ // Just return a dummy result.
+ ExpandResult::ok(quote! { "module::path" })
+}
+
+fn line_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ _tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ // dummy implementation for type-checking purposes
+ let line_num = 0;
+ let expanded = quote! {
+ #line_num
+ };
+
+ ExpandResult::ok(expanded)
+}
+
+fn log_syntax_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ _tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ ExpandResult::ok(quote! {})
+}
+
+fn trace_macros_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ _tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ ExpandResult::ok(quote! {})
+}
+
+fn stringify_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let pretty = tt::pretty(&tt.token_trees);
+
+ let expanded = quote! {
+ #pretty
+ };
+
+ ExpandResult::ok(expanded)
+}
+
+fn column_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ _tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ // dummy implementation for type-checking purposes
+ let col_num = 0;
+ let expanded = quote! {
+ #col_num
+ };
+
+ ExpandResult::ok(expanded)
+}
+
+fn assert_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let args = parse_exprs_with_sep(tt, ',');
+ let expanded = match &*args {
+ [cond, panic_args @ ..] => {
+ let comma = tt::Subtree {
+ delimiter: None,
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
+ char: ',',
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ }))],
+ };
+ let cond = cond.clone();
+ let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma);
+ quote! {{
+ if !#cond {
+ #DOLLAR_CRATE::panic!(##panic_args);
+ }
+ }}
+ }
+ [] => quote! {{}},
+ };
+
+ ExpandResult::ok(expanded)
+}
+
+fn file_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ _tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ // FIXME: RA purposefully lacks knowledge of absolute file names
+ // so just return "".
+ let file_name = "";
+
+ let expanded = quote! {
+ #file_name
+ };
+
+ ExpandResult::ok(expanded)
+}
+
+fn format_args_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ // We expand `format_args!("", a1, a2)` to
+ // ```
+ // std::fmt::Arguments::new_v1(&[], &[
+ // std::fmt::ArgumentV1::new(&arg1,std::fmt::Display::fmt),
+ // std::fmt::ArgumentV1::new(&arg2,std::fmt::Display::fmt),
+ // ])
+ // ```,
+ // which is still not really correct, but close enough for now
+ let mut args = parse_exprs_with_sep(tt, ',');
+
+ if args.is_empty() {
+ return ExpandResult::only_err(mbe::ExpandError::NoMatchingRule.into());
+ }
+ for arg in &mut args {
+ // Remove `key =`.
+ if matches!(arg.token_trees.get(1), Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p))) if p.char == '=' && p.spacing != tt::Spacing::Joint)
+ {
+ arg.token_trees.drain(..2);
+ }
+ }
+ let _format_string = args.remove(0);
+ let arg_tts = args.into_iter().flat_map(|arg| {
+ quote! { std::fmt::ArgumentV1::new(&(#arg), std::fmt::Display::fmt), }
+ }.token_trees);
+ let expanded = quote! {
+ std::fmt::Arguments::new_v1(&[], &[##arg_tts])
+ };
+ ExpandResult::ok(expanded)
+}
+
+fn asm_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ // We expand all assembly snippets to `format_args!` invocations to get format syntax
+ // highlighting for them.
+
+ let mut literals = Vec::new();
+ for tt in tt.token_trees.chunks(2) {
+ match tt {
+ [tt::TokenTree::Leaf(tt::Leaf::Literal(lit))]
+ | [tt::TokenTree::Leaf(tt::Leaf::Literal(lit)), tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', id: _, spacing: _ }))] =>
+ {
+ let krate = DOLLAR_CRATE.clone();
+ literals.push(quote!(#krate::format_args!(#lit);));
+ }
+ _ => break,
+ }
+ }
+
+ let expanded = quote! {{
+ ##literals
+ loop {}
+ }};
+ ExpandResult::ok(expanded)
+}
+
+fn global_asm_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ _tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ // Expand to nothing (at item-level)
+ ExpandResult::ok(quote! {})
+}
+
+fn cfg_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let loc = db.lookup_intern_macro_call(id);
+ let expr = CfgExpr::parse(tt);
+ let enabled = db.crate_graph()[loc.krate].cfg_options.check(&expr) != Some(false);
+ let expanded = if enabled { quote!(true) } else { quote!(false) };
+ ExpandResult::ok(expanded)
+}
+
+fn panic_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
+ // Expand to a macro call `$crate::panic::panic_{edition}`
+ let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 {
+ quote!(#DOLLAR_CRATE::panic::panic_2021!)
+ } else {
+ quote!(#DOLLAR_CRATE::panic::panic_2015!)
+ };
+
+ // Pass the original arguments
+ call.token_trees.push(tt::TokenTree::Subtree(tt.clone()));
+ ExpandResult::ok(call)
+}
+
+fn unreachable_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
+ // Expand to a macro call `$crate::panic::unreachable_{edition}`
+ let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 {
+ quote!(#DOLLAR_CRATE::panic::unreachable_2021!)
+ } else {
+ quote!(#DOLLAR_CRATE::panic::unreachable_2015!)
+ };
+
+ // Pass the original arguments
+ call.token_trees.push(tt::TokenTree::Subtree(tt.clone()));
+ ExpandResult::ok(call)
+}
+
+fn unquote_str(lit: &tt::Literal) -> Option<String> {
+ let lit = ast::make::tokens::literal(&lit.to_string());
+ let token = ast::String::cast(lit)?;
+ token.value().map(|it| it.into_owned())
+}
+
+fn unquote_byte_string(lit: &tt::Literal) -> Option<Vec<u8>> {
+ let lit = ast::make::tokens::literal(&lit.to_string());
+ let token = ast::ByteString::cast(lit)?;
+ token.value().map(|it| it.into_owned())
+}
+
+fn compile_error_expand(
+ _db: &dyn AstDatabase,
+ _id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<ExpandedEager> {
+ let err = match &*tt.token_trees {
+ [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => {
+ let text = it.text.as_str();
+ if text.starts_with('"') && text.ends_with('"') {
+ // FIXME: does not handle raw strings
+ ExpandError::Other(text[1..text.len() - 1].into())
+ } else {
+ ExpandError::Other("`compile_error!` argument must be a string".into())
+ }
+ }
+ _ => ExpandError::Other("`compile_error!` argument must be a string".into()),
+ };
+
+ ExpandResult { value: ExpandedEager::new(quote! {}), err: Some(err) }
+}
+
+fn concat_expand(
+ _db: &dyn AstDatabase,
+ _arg_id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<ExpandedEager> {
+ let mut err = None;
+ let mut text = String::new();
+ for (i, mut t) in tt.token_trees.iter().enumerate() {
+ // FIXME: hack on top of a hack: `$e:expr` captures get surrounded in parentheses
+ // to ensure the right parsing order, so skip the parentheses here. Ideally we'd
+ // implement rustc's model. cc https://github.com/rust-lang/rust-analyzer/pull/10623
+ if let tt::TokenTree::Subtree(tt::Subtree { delimiter: Some(delim), token_trees }) = t {
+ if let [tt] = &**token_trees {
+ if delim.kind == tt::DelimiterKind::Parenthesis {
+ t = tt;
+ }
+ }
+ }
+
+ match t {
+ tt::TokenTree::Leaf(tt::Leaf::Literal(it)) if i % 2 == 0 => {
+ // concat works with string and char literals, so remove any quotes.
+ // It also works with integer, float and boolean literals, so just use the rest
+ // as-is.
+ let component = unquote_str(it).unwrap_or_else(|| it.text.to_string());
+ text.push_str(&component);
+ }
+ // handle boolean literals
+ tt::TokenTree::Leaf(tt::Leaf::Ident(id))
+ if i % 2 == 0 && (id.text == "true" || id.text == "false") =>
+ {
+ text.push_str(id.text.as_str());
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
+ _ => {
+ err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
+ }
+ }
+ }
+ ExpandResult { value: ExpandedEager::new(quote!(#text)), err }
+}
+
+fn concat_bytes_expand(
+ _db: &dyn AstDatabase,
+ _arg_id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<ExpandedEager> {
+ let mut bytes = Vec::new();
+ let mut err = None;
+ for (i, t) in tt.token_trees.iter().enumerate() {
+ match t {
+ tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
+ let token = ast::make::tokens::literal(&lit.to_string());
+ match token.kind() {
+ syntax::SyntaxKind::BYTE => bytes.push(token.text().to_string()),
+ syntax::SyntaxKind::BYTE_STRING => {
+ let components = unquote_byte_string(lit).unwrap_or_else(Vec::new);
+ components.into_iter().for_each(|x| bytes.push(x.to_string()));
+ }
+ _ => {
+ err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
+ break;
+ }
+ }
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
+ tt::TokenTree::Subtree(tree)
+ if tree.delimiter_kind() == Some(tt::DelimiterKind::Bracket) =>
+ {
+ if let Err(e) = concat_bytes_expand_subtree(tree, &mut bytes) {
+ err.get_or_insert(e);
+ break;
+ }
+ }
+ _ => {
+ err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
+ break;
+ }
+ }
+ }
+ let ident = tt::Ident { text: bytes.join(", ").into(), id: tt::TokenId::unspecified() };
+ ExpandResult { value: ExpandedEager::new(quote!([#ident])), err }
+}
+
+fn concat_bytes_expand_subtree(
+ tree: &tt::Subtree,
+ bytes: &mut Vec<String>,
+) -> Result<(), ExpandError> {
+ for (ti, tt) in tree.token_trees.iter().enumerate() {
+ match tt {
+ tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
+ let lit = ast::make::tokens::literal(&lit.to_string());
+ match lit.kind() {
+ syntax::SyntaxKind::BYTE | syntax::SyntaxKind::INT_NUMBER => {
+ bytes.push(lit.text().to_string())
+ }
+ _ => {
+ return Err(mbe::ExpandError::UnexpectedToken.into());
+ }
+ }
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if ti % 2 == 1 && punct.char == ',' => (),
+ _ => {
+ return Err(mbe::ExpandError::UnexpectedToken.into());
+ }
+ }
+ }
+ Ok(())
+}
+
+fn concat_idents_expand(
+ _db: &dyn AstDatabase,
+ _arg_id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<ExpandedEager> {
+ let mut err = None;
+ let mut ident = String::new();
+ for (i, t) in tt.token_trees.iter().enumerate() {
+ match t {
+ tt::TokenTree::Leaf(tt::Leaf::Ident(id)) => {
+ ident.push_str(id.text.as_str());
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
+ _ => {
+ err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
+ }
+ }
+ }
+ let ident = tt::Ident { text: ident.into(), id: tt::TokenId::unspecified() };
+ ExpandResult { value: ExpandedEager::new(quote!(#ident)), err }
+}
+
+fn relative_file(
+ db: &dyn AstDatabase,
+ call_id: MacroCallId,
+ path_str: &str,
+ allow_recursion: bool,
+) -> Result<FileId, ExpandError> {
+ let call_site = call_id.as_file().original_file(db);
+ let path = AnchoredPath { anchor: call_site, path: path_str };
+ let res = db
+ .resolve_path(path)
+ .ok_or_else(|| ExpandError::Other(format!("failed to load file `{path_str}`").into()))?;
+ // Prevent include itself
+ if res == call_site && !allow_recursion {
+ Err(ExpandError::Other(format!("recursive inclusion of `{path_str}`").into()))
+ } else {
+ Ok(res)
+ }
+}
+
+fn parse_string(tt: &tt::Subtree) -> Result<String, ExpandError> {
+ tt.token_trees
+ .get(0)
+ .and_then(|tt| match tt {
+ tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(it),
+ _ => None,
+ })
+ .ok_or(mbe::ExpandError::ConversionError.into())
+}
+
+fn include_expand(
+ db: &dyn AstDatabase,
+ arg_id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<ExpandedEager> {
+ let res = (|| {
+ let path = parse_string(tt)?;
+ let file_id = relative_file(db, arg_id, &path, false)?;
+
+ let subtree =
+ parse_to_token_tree(&db.file_text(file_id)).ok_or(mbe::ExpandError::ConversionError)?.0;
+ Ok((subtree, file_id))
+ })();
+
+ match res {
+ Ok((subtree, file_id)) => {
+ ExpandResult::ok(ExpandedEager { subtree, included_file: Some(file_id) })
+ }
+ Err(e) => ExpandResult::only_err(e),
+ }
+}
+
+fn include_bytes_expand(
+ _db: &dyn AstDatabase,
+ _arg_id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<ExpandedEager> {
+ if let Err(e) = parse_string(tt) {
+ return ExpandResult::only_err(e);
+ }
+
+ // FIXME: actually read the file here if the user asked for macro expansion
+ let res = tt::Subtree {
+ delimiter: None,
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
+ text: r#"b"""#.into(),
+ id: tt::TokenId::unspecified(),
+ }))],
+ };
+ ExpandResult::ok(ExpandedEager::new(res))
+}
+
+fn include_str_expand(
+ db: &dyn AstDatabase,
+ arg_id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<ExpandedEager> {
+ let path = match parse_string(tt) {
+ Ok(it) => it,
+ Err(e) => return ExpandResult::only_err(e),
+ };
+
+ // FIXME: we're not able to read excluded files (which is most of them because
+ // it's unusual to `include_str!` a Rust file), but we can return an empty string.
+ // Ideally, we'd be able to offer a precise expansion if the user asks for macro
+ // expansion.
+ let file_id = match relative_file(db, arg_id, &path, true) {
+ Ok(file_id) => file_id,
+ Err(_) => {
+ return ExpandResult::ok(ExpandedEager::new(quote!("")));
+ }
+ };
+
+ let text = db.file_text(file_id);
+ let text = &*text;
+
+ ExpandResult::ok(ExpandedEager::new(quote!(#text)))
+}
+
+fn get_env_inner(db: &dyn AstDatabase, arg_id: MacroCallId, key: &str) -> Option<String> {
+ let krate = db.lookup_intern_macro_call(arg_id).krate;
+ db.crate_graph()[krate].env.get(key)
+}
+
+fn env_expand(
+ db: &dyn AstDatabase,
+ arg_id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<ExpandedEager> {
+ let key = match parse_string(tt) {
+ Ok(it) => it,
+ Err(e) => return ExpandResult::only_err(e),
+ };
+
+ let mut err = None;
+ let s = get_env_inner(db, arg_id, &key).unwrap_or_else(|| {
+ // The only variable rust-analyzer ever sets is `OUT_DIR`, so only diagnose that to avoid
+ // unnecessary diagnostics for eg. `CARGO_PKG_NAME`.
+ if key == "OUT_DIR" {
+ err = Some(ExpandError::Other(
+ r#"`OUT_DIR` not set, enable "build scripts" to fix"#.into(),
+ ));
+ }
+
+ // If the variable is unset, still return a dummy string to help type inference along.
+ // We cannot use an empty string here, because for
+ // `include!(concat!(env!("OUT_DIR"), "/foo.rs"))` will become
+ // `include!("foo.rs"), which might go to infinite loop
+ "__RA_UNIMPLEMENTED__".to_string()
+ });
+ let expanded = quote! { #s };
+
+ ExpandResult { value: ExpandedEager::new(expanded), err }
+}
+
+fn option_env_expand(
+ db: &dyn AstDatabase,
+ arg_id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<ExpandedEager> {
+ let key = match parse_string(tt) {
+ Ok(it) => it,
+ Err(e) => return ExpandResult::only_err(e),
+ };
+
+ let expanded = match get_env_inner(db, arg_id, &key) {
+ None => quote! { std::option::Option::None::<&str> },
+ Some(s) => quote! { std::option::Some(#s) },
+ };
+
+ ExpandResult::ok(ExpandedEager::new(expanded))
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
new file mode 100644
index 000000000..bd60c3d26
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
@@ -0,0 +1,509 @@
+//! Defines database & queries for macro expansion.
+
+use std::sync::Arc;
+
+use base_db::{salsa, SourceDatabase};
+use either::Either;
+use limit::Limit;
+use mbe::syntax_node_to_token_tree;
+use rustc_hash::FxHashSet;
+use syntax::{
+ ast::{self, HasAttrs, HasDocComments},
+ AstNode, GreenNode, Parse, SyntaxNode, SyntaxToken, T,
+};
+
+use crate::{
+ ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion, fixup,
+ hygiene::HygieneFrame, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander,
+ ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind,
+ MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander,
+};
+
+/// Total limit on the number of tokens produced by any macro invocation.
+///
+/// If an invocation produces more tokens than this limit, it will not be stored in the database and
+/// an error will be emitted.
+///
+/// Actual max for `analysis-stats .` at some point: 30672.
+static TOKEN_LIMIT: Limit = Limit::new(524_288);
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum TokenExpander {
+ /// Old-style `macro_rules` or the new macros 2.0
+ DeclarativeMacro { mac: mbe::DeclarativeMacro, def_site_token_map: mbe::TokenMap },
+ /// Stuff like `line!` and `file!`.
+ Builtin(BuiltinFnLikeExpander),
+ /// `global_allocator` and such.
+ BuiltinAttr(BuiltinAttrExpander),
+ /// `derive(Copy)` and such.
+ BuiltinDerive(BuiltinDeriveExpander),
+ /// The thing we love the most here in rust-analyzer -- procedural macros.
+ ProcMacro(ProcMacroExpander),
+}
+
+impl TokenExpander {
+ fn expand(
+ &self,
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+ ) -> ExpandResult<tt::Subtree> {
+ match self {
+ TokenExpander::DeclarativeMacro { mac, .. } => mac.expand(tt).map_err(Into::into),
+ TokenExpander::Builtin(it) => it.expand(db, id, tt).map_err(Into::into),
+ TokenExpander::BuiltinAttr(it) => it.expand(db, id, tt),
+ TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt),
+ TokenExpander::ProcMacro(_) => {
+ // We store the result in salsa db to prevent non-deterministic behavior in
+ // some proc-macro implementation
+ // See #4315 for details
+ db.expand_proc_macro(id)
+ }
+ }
+ }
+
+ pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
+ match self {
+ TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_down(id),
+ TokenExpander::Builtin(..)
+ | TokenExpander::BuiltinAttr(..)
+ | TokenExpander::BuiltinDerive(..)
+ | TokenExpander::ProcMacro(..) => id,
+ }
+ }
+
+ pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
+ match self {
+ TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_up(id),
+ TokenExpander::Builtin(..)
+ | TokenExpander::BuiltinAttr(..)
+ | TokenExpander::BuiltinDerive(..)
+ | TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
+ }
+ }
+}
+
+// FIXME: rename to ExpandDatabase
+#[salsa::query_group(AstDatabaseStorage)]
+pub trait AstDatabase: SourceDatabase {
+ fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
+
+ /// Main public API -- parses a hir file, not caring whether it's a real
+ /// file or a macro expansion.
+ #[salsa::transparent]
+ fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>;
+ /// Implementation for the macro case.
+ fn parse_macro_expansion(
+ &self,
+ macro_file: MacroFile,
+ ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>>;
+
+ /// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
+ /// reason why we use salsa at all.
+ ///
+ /// We encode macro definitions into ids of macro calls, this what allows us
+ /// to be incremental.
+ #[salsa::interned]
+ fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
+
+ /// Lowers syntactic macro call to a token tree representation.
+ #[salsa::transparent]
+ fn macro_arg(
+ &self,
+ id: MacroCallId,
+ ) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>;
+ /// Extracts syntax node, corresponding to a macro call. That's a firewall
+ /// query, only typing in the macro call itself changes the returned
+ /// subtree.
+ fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>;
+ /// Gets the expander for this macro. This compiles declarative macros, and
+ /// just fetches procedural ones.
+ fn macro_def(&self, id: MacroDefId) -> Result<Arc<TokenExpander>, mbe::ParseError>;
+
+ /// Expand macro call to a token tree. This query is LRUed (we keep 128 or so results in memory)
+ fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>>;
+ /// Special case of the previous query for procedural macros. We can't LRU
+ /// proc macros, since they are not deterministic in general, and
+ /// non-determinism breaks salsa in a very, very, very bad way. @edwin0cheng
+ /// heroically debugged this once!
+ fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<tt::Subtree>;
+ /// Firewall query that returns the error from the `macro_expand` query.
+ fn macro_expand_error(&self, macro_call: MacroCallId) -> Option<ExpandError>;
+
+ fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>;
+}
+
+/// This expands the given macro call, but with different arguments. This is
+/// used for completion, where we want to see what 'would happen' if we insert a
+/// token. The `token_to_map` mapped down into the expansion, with the mapped
+/// token returned.
+pub fn expand_speculative(
+ db: &dyn AstDatabase,
+ actual_macro_call: MacroCallId,
+ speculative_args: &SyntaxNode,
+ token_to_map: SyntaxToken,
+) -> Option<(SyntaxNode, SyntaxToken)> {
+ let loc = db.lookup_intern_macro_call(actual_macro_call);
+ let macro_def = db.macro_def(loc.def).ok()?;
+ let token_range = token_to_map.text_range();
+
+ // Build the subtree and token mapping for the speculative args
+ let censor = censor_for_macro_input(&loc, speculative_args);
+ let mut fixups = fixup::fixup_syntax(speculative_args);
+ fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
+ let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
+ speculative_args,
+ fixups.token_map,
+ fixups.next_id,
+ fixups.replace,
+ fixups.append,
+ );
+
+ let (attr_arg, token_id) = match loc.kind {
+ MacroCallKind::Attr { invoc_attr_index, is_derive, .. } => {
+ let attr = if is_derive {
+ // for pseudo-derive expansion we actually pass the attribute itself only
+ ast::Attr::cast(speculative_args.clone())
+ } else {
+ // Attributes may have an input token tree, build the subtree and map for this as well
+ // then try finding a token id for our token if it is inside this input subtree.
+ let item = ast::Item::cast(speculative_args.clone())?;
+ item.doc_comments_and_attrs().nth(invoc_attr_index as usize).and_then(Either::left)
+ }?;
+ match attr.token_tree() {
+ Some(token_tree) => {
+ let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
+ tree.delimiter = None;
+
+ let shift = mbe::Shift::new(&tt);
+ shift.shift_all(&mut tree);
+
+ let token_id = if token_tree.syntax().text_range().contains_range(token_range) {
+ let attr_input_start =
+ token_tree.left_delimiter_token()?.text_range().start();
+ let range = token_range.checked_sub(attr_input_start)?;
+ let token_id = shift.shift(map.token_by_range(range)?);
+ Some(token_id)
+ } else {
+ None
+ };
+ (Some(tree), token_id)
+ }
+ _ => (None, None),
+ }
+ }
+ _ => (None, None),
+ };
+ let token_id = match token_id {
+ Some(token_id) => token_id,
+ // token wasn't inside an attribute input so it has to be in the general macro input
+ None => {
+ let range = token_range.checked_sub(speculative_args.text_range().start())?;
+ let token_id = spec_args_tmap.token_by_range(range)?;
+ macro_def.map_id_down(token_id)
+ }
+ };
+
+ // Do the actual expansion, we need to directly expand the proc macro due to the attribute args
+ // Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
+ let mut speculative_expansion = match loc.def.kind {
+ MacroDefKind::ProcMacro(expander, ..) => {
+ tt.delimiter = None;
+ expander.expand(db, loc.krate, &tt, attr_arg.as_ref())
+ }
+ MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
+ pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
+ }
+ _ => macro_def.expand(db, actual_macro_call, &tt),
+ };
+
+ let expand_to = macro_expand_to(db, actual_macro_call);
+ fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info);
+ let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
+
+ let range = rev_tmap.first_range_by_token(token_id, token_to_map.kind())?;
+ let token = node.syntax_node().covering_element(range).into_token()?;
+ Some((node.syntax_node(), token))
+}
+
+fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
+ let map = db.parse_or_expand(file_id).map(|it| AstIdMap::from_source(&it)).unwrap_or_default();
+ Arc::new(map)
+}
+
+fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
+ match file_id.0 {
+ HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()),
+ HirFileIdRepr::MacroFile(macro_file) => {
+ // FIXME: Note how we convert from `Parse` to `SyntaxNode` here,
+ // forgetting about parse errors.
+ db.parse_macro_expansion(macro_file).value.map(|(it, _)| it.syntax_node())
+ }
+ }
+}
+
+fn parse_macro_expansion(
+ db: &dyn AstDatabase,
+ macro_file: MacroFile,
+) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> {
+ let _p = profile::span("parse_macro_expansion");
+ let result = db.macro_expand(macro_file.macro_call_id);
+
+ if let Some(err) = &result.err {
+ // Note:
+ // The final goal we would like to make all parse_macro success,
+ // such that the following log will not call anyway.
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ let node = loc.kind.to_node(db);
+
+ // collect parent information for warning log
+ let parents =
+ std::iter::successors(loc.kind.file_id().call_node(db), |it| it.file_id.call_node(db))
+ .map(|n| format!("{:#}", n.value))
+ .collect::<Vec<_>>()
+ .join("\n");
+
+ tracing::debug!(
+ "fail on macro_parse: (reason: {:?} macro_call: {:#}) parents: {}",
+ err,
+ node.value,
+ parents
+ );
+ }
+ let tt = match result.value {
+ Some(tt) => tt,
+ None => return ExpandResult { value: None, err: result.err },
+ };
+
+ let expand_to = macro_expand_to(db, macro_file.macro_call_id);
+
+ tracing::debug!("expanded = {}", tt.as_debug_string());
+ tracing::debug!("kind = {:?}", expand_to);
+
+ let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
+
+ ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: result.err }
+}
+
+fn macro_arg(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>> {
+ let arg = db.macro_arg_text(id)?;
+ let loc = db.lookup_intern_macro_call(id);
+
+ let node = SyntaxNode::new_root(arg);
+ let censor = censor_for_macro_input(&loc, &node);
+ let mut fixups = fixup::fixup_syntax(&node);
+ fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
+ let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
+ &node,
+ fixups.token_map,
+ fixups.next_id,
+ fixups.replace,
+ fixups.append,
+ );
+
+ if loc.def.is_proc_macro() {
+ // proc macros expect their inputs without parentheses, MBEs expect it with them included
+ tt.delimiter = None;
+ }
+
+ Some(Arc::new((tt, tmap, fixups.undo_info)))
+}
+
+fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
+ (|| {
+ let censor = match loc.kind {
+ MacroCallKind::FnLike { .. } => return None,
+ MacroCallKind::Derive { derive_attr_index, .. } => {
+ cov_mark::hit!(derive_censoring);
+ ast::Item::cast(node.clone())?
+ .attrs()
+ .take(derive_attr_index as usize + 1)
+ // FIXME
+ .filter(|attr| attr.simple_name().as_deref() == Some("derive"))
+ .map(|it| it.syntax().clone())
+ .collect()
+ }
+ MacroCallKind::Attr { is_derive: true, .. } => return None,
+ MacroCallKind::Attr { invoc_attr_index, .. } => {
+ cov_mark::hit!(attribute_macro_attr_censoring);
+ ast::Item::cast(node.clone())?
+ .doc_comments_and_attrs()
+ .nth(invoc_attr_index as usize)
+ .and_then(Either::left)
+ .map(|attr| attr.syntax().clone())
+ .into_iter()
+ .collect()
+ }
+ };
+ Some(censor)
+ })()
+ .unwrap_or_default()
+}
+
+fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
+ let loc = db.lookup_intern_macro_call(id);
+ let arg = loc.kind.arg(db)?;
+ if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
+ let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
+ let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
+ let well_formed_tt =
+ matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
+ if !well_formed_tt {
+ // Don't expand malformed (unbalanced) macro invocations. This is
+ // less than ideal, but trying to expand unbalanced macro calls
+ // sometimes produces pathological, deeply nested code which breaks
+ // all kinds of things.
+ //
+ // Some day, we'll have explicit recursion counters for all
+ // recursive things, at which point this code might be removed.
+ cov_mark::hit!(issue9358_bad_macro_stack_overflow);
+ return None;
+ }
+ }
+ Some(arg.green().into())
+}
+
+fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Result<Arc<TokenExpander>, mbe::ParseError> {
+ match id.kind {
+ MacroDefKind::Declarative(ast_id) => {
+ let (mac, def_site_token_map) = match ast_id.to_node(db) {
+ ast::Macro::MacroRules(macro_rules) => {
+ let arg = macro_rules
+ .token_tree()
+ .ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?;
+ let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
+ let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt)?;
+ (mac, def_site_token_map)
+ }
+ ast::Macro::MacroDef(macro_def) => {
+ let arg = macro_def
+ .body()
+ .ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?;
+ let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
+ let mac = mbe::DeclarativeMacro::parse_macro2(&tt)?;
+ (mac, def_site_token_map)
+ }
+ };
+ Ok(Arc::new(TokenExpander::DeclarativeMacro { mac, def_site_token_map }))
+ }
+ MacroDefKind::BuiltIn(expander, _) => Ok(Arc::new(TokenExpander::Builtin(expander))),
+ MacroDefKind::BuiltInAttr(expander, _) => {
+ Ok(Arc::new(TokenExpander::BuiltinAttr(expander)))
+ }
+ MacroDefKind::BuiltInDerive(expander, _) => {
+ Ok(Arc::new(TokenExpander::BuiltinDerive(expander)))
+ }
+ MacroDefKind::BuiltInEager(..) => {
+ // FIXME: Return a random error here just to make the types align.
+ // This obviously should do something real instead.
+ Err(mbe::ParseError::UnexpectedToken("unexpected eager macro".into()))
+ }
+ MacroDefKind::ProcMacro(expander, ..) => Ok(Arc::new(TokenExpander::ProcMacro(expander))),
+ }
+}
+
+fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>> {
+ let _p = profile::span("macro_expand");
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
+ if let Some(eager) = &loc.eager {
+ return ExpandResult {
+ value: Some(eager.arg_or_expansion.clone()),
+ // FIXME: There could be errors here!
+ err: None,
+ };
+ }
+
+ let macro_arg = match db.macro_arg(id) {
+ Some(it) => it,
+ None => {
+ return ExpandResult::only_err(ExpandError::Other(
+ "Failed to lower macro args to token tree".into(),
+ ))
+ }
+ };
+
+ let expander = match db.macro_def(loc.def) {
+ Ok(it) => it,
+ // FIXME: This is weird -- we effectively report macro *definition*
+ // errors lazily, when we try to expand the macro. Instead, they should
+ // be reported at the definition site (when we construct a def map).
+ Err(err) => {
+ return ExpandResult::only_err(ExpandError::Other(
+ format!("invalid macro definition: {}", err).into(),
+ ))
+ }
+ };
+ let ExpandResult { value: mut tt, err } = expander.expand(db, id, &macro_arg.0);
+ // Set a hard limit for the expanded tt
+ let count = tt.count();
+ if TOKEN_LIMIT.check(count).is_err() {
+ return ExpandResult::only_err(ExpandError::Other(
+ format!(
+ "macro invocation exceeds token limit: produced {} tokens, limit is {}",
+ count,
+ TOKEN_LIMIT.inner(),
+ )
+ .into(),
+ ));
+ }
+
+ fixup::reverse_fixups(&mut tt, &macro_arg.1, &macro_arg.2);
+
+ ExpandResult { value: Some(Arc::new(tt)), err }
+}
+
+fn macro_expand_error(db: &dyn AstDatabase, macro_call: MacroCallId) -> Option<ExpandError> {
+ db.macro_expand(macro_call).err
+}
+
+fn expand_proc_macro(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<tt::Subtree> {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
+ let macro_arg = match db.macro_arg(id) {
+ Some(it) => it,
+ None => {
+ return ExpandResult::only_err(ExpandError::Other("No arguments for proc-macro".into()))
+ }
+ };
+
+ let expander = match loc.def.kind {
+ MacroDefKind::ProcMacro(expander, ..) => expander,
+ _ => unreachable!(),
+ };
+
+ let attr_arg = match &loc.kind {
+ MacroCallKind::Attr { attr_args, .. } => {
+ let mut attr_args = attr_args.0.clone();
+ mbe::Shift::new(&macro_arg.0).shift_all(&mut attr_args);
+ Some(attr_args)
+ }
+ _ => None,
+ };
+
+ expander.expand(db, loc.krate, &macro_arg.0, attr_arg.as_ref())
+}
+
+fn hygiene_frame(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
+ Arc::new(HygieneFrame::new(db, file_id))
+}
+
+fn macro_expand_to(db: &dyn AstDatabase, id: MacroCallId) -> ExpandTo {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
+ loc.kind.expand_to()
+}
+
+fn token_tree_to_syntax_node(
+ tt: &tt::Subtree,
+ expand_to: ExpandTo,
+) -> (Parse<SyntaxNode>, mbe::TokenMap) {
+ let entry_point = match expand_to {
+ ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
+ ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
+ ExpandTo::Pattern => mbe::TopEntryPoint::Pattern,
+ ExpandTo::Type => mbe::TopEntryPoint::Type,
+ ExpandTo::Expr => mbe::TopEntryPoint::Expr,
+ };
+ mbe::token_tree_to_syntax_node(tt, entry_point)
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
new file mode 100644
index 000000000..5fd099aea
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
@@ -0,0 +1,266 @@
+//! Eager expansion related utils
+//!
+//! Here is a dump of a discussion from Vadim Petrochenkov about Eager Expansion and
+//! Its name resolution :
+//!
+//! > Eagerly expanded macros (and also macros eagerly expanded by eagerly expanded macros,
+//! > which actually happens in practice too!) are resolved at the location of the "root" macro
+//! > that performs the eager expansion on its arguments.
+//! > If some name cannot be resolved at the eager expansion time it's considered unresolved,
+//! > even if becomes available later (e.g. from a glob import or other macro).
+//!
+//! > Eagerly expanded macros don't add anything to the module structure of the crate and
+//! > don't build any speculative module structures, i.e. they are expanded in a "flat"
+//! > way even if tokens in them look like modules.
+//!
+//! > In other words, it kinda works for simple cases for which it was originally intended,
+//! > and we need to live with it because it's available on stable and widely relied upon.
+//!
+//!
+//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
+use std::sync::Arc;
+
+use base_db::CrateId;
+use syntax::{ted, SyntaxNode};
+
+use crate::{
+ ast::{self, AstNode},
+ db::AstDatabase,
+ hygiene::Hygiene,
+ mod_path::ModPath,
+ EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
+ MacroCallLoc, MacroDefId, MacroDefKind, UnresolvedMacro,
+};
+
+#[derive(Debug)]
+pub struct ErrorEmitted {
+ _private: (),
+}
+
+pub trait ErrorSink {
+ fn emit(&mut self, err: ExpandError);
+
+ fn option<T>(
+ &mut self,
+ opt: Option<T>,
+ error: impl FnOnce() -> ExpandError,
+ ) -> Result<T, ErrorEmitted> {
+ match opt {
+ Some(it) => Ok(it),
+ None => {
+ self.emit(error());
+ Err(ErrorEmitted { _private: () })
+ }
+ }
+ }
+
+ fn option_with<T>(
+ &mut self,
+ opt: impl FnOnce() -> Option<T>,
+ error: impl FnOnce() -> ExpandError,
+ ) -> Result<T, ErrorEmitted> {
+ self.option(opt(), error)
+ }
+
+ fn result<T>(&mut self, res: Result<T, ExpandError>) -> Result<T, ErrorEmitted> {
+ match res {
+ Ok(it) => Ok(it),
+ Err(e) => {
+ self.emit(e);
+ Err(ErrorEmitted { _private: () })
+ }
+ }
+ }
+
+ fn expand_result_option<T>(&mut self, res: ExpandResult<Option<T>>) -> Result<T, ErrorEmitted> {
+ match (res.value, res.err) {
+ (None, Some(err)) => {
+ self.emit(err);
+ Err(ErrorEmitted { _private: () })
+ }
+ (Some(value), opt_err) => {
+ if let Some(err) = opt_err {
+ self.emit(err);
+ }
+ Ok(value)
+ }
+ (None, None) => unreachable!("`ExpandResult` without value or error"),
+ }
+ }
+}
+
+impl ErrorSink for &'_ mut dyn FnMut(ExpandError) {
+ fn emit(&mut self, err: ExpandError) {
+ self(err);
+ }
+}
+
+pub fn expand_eager_macro(
+ db: &dyn AstDatabase,
+ krate: CrateId,
+ macro_call: InFile<ast::MacroCall>,
+ def: MacroDefId,
+ resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
+ diagnostic_sink: &mut dyn FnMut(ExpandError),
+) -> Result<Result<MacroCallId, ErrorEmitted>, UnresolvedMacro> {
+ let hygiene = Hygiene::new(db, macro_call.file_id);
+ let parsed_args = macro_call
+ .value
+ .token_tree()
+ .map(|tt| mbe::syntax_node_to_token_tree(tt.syntax()).0)
+ .unwrap_or_default();
+
+ let ast_map = db.ast_id_map(macro_call.file_id);
+ let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(&macro_call.value));
+ let expand_to = ExpandTo::from_call_site(&macro_call.value);
+
+ // Note:
+ // When `lazy_expand` is called, its *parent* file must be already exists.
+ // Here we store an eager macro id for the argument expanded subtree here
+ // for that purpose.
+ let arg_id = db.intern_macro_call(MacroCallLoc {
+ def,
+ krate,
+ eager: Some(EagerCallInfo {
+ arg_or_expansion: Arc::new(parsed_args.clone()),
+ included_file: None,
+ }),
+ kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
+ });
+
+ let parsed_args = mbe::token_tree_to_syntax_node(&parsed_args, mbe::TopEntryPoint::Expr).0;
+ let result = match eager_macro_recur(
+ db,
+ &hygiene,
+ InFile::new(arg_id.as_file(), parsed_args.syntax_node()),
+ krate,
+ resolver,
+ diagnostic_sink,
+ ) {
+ Ok(Ok(it)) => it,
+ Ok(Err(err)) => return Ok(Err(err)),
+ Err(err) => return Err(err),
+ };
+ let subtree = to_subtree(&result);
+
+ if let MacroDefKind::BuiltInEager(eager, _) = def.kind {
+ let res = eager.expand(db, arg_id, &subtree);
+ if let Some(err) = res.err {
+ diagnostic_sink(err);
+ }
+
+ let loc = MacroCallLoc {
+ def,
+ krate,
+ eager: Some(EagerCallInfo {
+ arg_or_expansion: Arc::new(res.value.subtree),
+ included_file: res.value.included_file,
+ }),
+ kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
+ };
+
+ Ok(Ok(db.intern_macro_call(loc)))
+ } else {
+ panic!("called `expand_eager_macro` on non-eager macro def {:?}", def);
+ }
+}
+
+fn to_subtree(node: &SyntaxNode) -> tt::Subtree {
+ let mut subtree = mbe::syntax_node_to_token_tree(node).0;
+ subtree.delimiter = None;
+ subtree
+}
+
+fn lazy_expand(
+ db: &dyn AstDatabase,
+ def: &MacroDefId,
+ macro_call: InFile<ast::MacroCall>,
+ krate: CrateId,
+) -> ExpandResult<Option<InFile<SyntaxNode>>> {
+ let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value);
+
+ let expand_to = ExpandTo::from_call_site(&macro_call.value);
+ let id = def.as_lazy_macro(
+ db,
+ krate,
+ MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), expand_to },
+ );
+
+ let err = db.macro_expand_error(id);
+ let value = db.parse_or_expand(id.as_file()).map(|node| InFile::new(id.as_file(), node));
+
+ ExpandResult { value, err }
+}
+
+fn eager_macro_recur(
+ db: &dyn AstDatabase,
+ hygiene: &Hygiene,
+ curr: InFile<SyntaxNode>,
+ krate: CrateId,
+ macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
+ mut diagnostic_sink: &mut dyn FnMut(ExpandError),
+) -> Result<Result<SyntaxNode, ErrorEmitted>, UnresolvedMacro> {
+ let original = curr.value.clone_for_update();
+
+ let children = original.descendants().filter_map(ast::MacroCall::cast);
+ let mut replacements = Vec::new();
+
+ // Collect replacement
+ for child in children {
+ let def = match child.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
+ Some(path) => macro_resolver(path.clone()).ok_or_else(|| UnresolvedMacro { path })?,
+ None => {
+ diagnostic_sink(ExpandError::Other("malformed macro invocation".into()));
+ continue;
+ }
+ };
+ let insert = match def.kind {
+ MacroDefKind::BuiltInEager(..) => {
+ let id = match expand_eager_macro(
+ db,
+ krate,
+ curr.with_value(child.clone()),
+ def,
+ macro_resolver,
+ diagnostic_sink,
+ ) {
+ Ok(Ok(it)) => it,
+ Ok(Err(err)) => return Ok(Err(err)),
+ Err(err) => return Err(err),
+ };
+ db.parse_or_expand(id.as_file())
+ .expect("successful macro expansion should be parseable")
+ .clone_for_update()
+ }
+ MacroDefKind::Declarative(_)
+ | MacroDefKind::BuiltIn(..)
+ | MacroDefKind::BuiltInAttr(..)
+ | MacroDefKind::BuiltInDerive(..)
+ | MacroDefKind::ProcMacro(..) => {
+ let res = lazy_expand(db, &def, curr.with_value(child.clone()), krate);
+ let val = match diagnostic_sink.expand_result_option(res) {
+ Ok(it) => it,
+ Err(err) => return Ok(Err(err)),
+ };
+
+ // replace macro inside
+ let hygiene = Hygiene::new(db, val.file_id);
+ match eager_macro_recur(db, &hygiene, val, krate, macro_resolver, diagnostic_sink) {
+ Ok(Ok(it)) => it,
+ Ok(Err(err)) => return Ok(Err(err)),
+ Err(err) => return Err(err),
+ }
+ }
+ };
+
+ // check if the whole original syntax is replaced
+ if child.syntax() == &original {
+ return Ok(Ok(insert));
+ }
+
+ replacements.push((child, insert));
+ }
+
+ replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
+ Ok(Ok(original))
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
new file mode 100644
index 000000000..9999790fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
@@ -0,0 +1,382 @@
+//! To make attribute macros work reliably when typing, we need to take care to
+//! fix up syntax errors in the code we're passing to them.
+use std::mem;
+
+use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
+use rustc_hash::FxHashMap;
+use syntax::{
+ ast::{self, AstNode},
+ match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange,
+};
+use tt::Subtree;
+
+/// The result of calculating fixes for a syntax node -- a bunch of changes
+/// (appending to and replacing nodes), the information that is needed to
+/// reverse those changes afterwards, and a token map.
+#[derive(Debug)]
+pub(crate) struct SyntaxFixups {
+ pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ pub(crate) undo_info: SyntaxFixupUndoInfo,
+ pub(crate) token_map: TokenMap,
+ pub(crate) next_id: u32,
+}
+
+/// This is the information needed to reverse the fixups.
+#[derive(Debug, PartialEq, Eq)]
+pub struct SyntaxFixupUndoInfo {
+ original: Vec<Subtree>,
+}
+
+const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
+
+pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
+ let mut append = FxHashMap::<SyntaxElement, _>::default();
+ let mut replace = FxHashMap::<SyntaxElement, _>::default();
+ let mut preorder = node.preorder();
+ let mut original = Vec::new();
+ let mut token_map = TokenMap::default();
+ let mut next_id = 0;
+ while let Some(event) = preorder.next() {
+ let node = match event {
+ syntax::WalkEvent::Enter(node) => node,
+ syntax::WalkEvent::Leave(_) => continue,
+ };
+
+ if can_handle_error(&node) && has_error_to_handle(&node) {
+ // the node contains an error node, we have to completely replace it by something valid
+ let (original_tree, new_tmap, new_next_id) =
+ mbe::syntax_node_to_token_tree_with_modifications(
+ &node,
+ mem::take(&mut token_map),
+ next_id,
+ Default::default(),
+ Default::default(),
+ );
+ token_map = new_tmap;
+ next_id = new_next_id;
+ let idx = original.len() as u32;
+ original.push(original_tree);
+ let replacement = SyntheticToken {
+ kind: SyntaxKind::IDENT,
+ text: "__ra_fixup".into(),
+ range: node.text_range(),
+ id: SyntheticTokenId(idx),
+ };
+ replace.insert(node.clone().into(), vec![replacement]);
+ preorder.skip_subtree();
+ continue;
+ }
+
+ // In some other situations, we can fix things by just appending some tokens.
+ let end_range = TextRange::empty(node.text_range().end());
+ match_ast! {
+ match node {
+ ast::FieldExpr(it) => {
+ if it.name_ref().is_none() {
+ // incomplete field access: some_expr.|
+ append.insert(node.clone().into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::IDENT,
+ text: "__ra_fixup".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ ast::ExprStmt(it) => {
+ if it.semicolon_token().is_none() {
+ append.insert(node.clone().into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::SEMICOLON,
+ text: ";".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ ast::LetStmt(it) => {
+ if it.semicolon_token().is_none() {
+ append.insert(node.clone().into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::SEMICOLON,
+ text: ";".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ ast::IfExpr(it) => {
+ if it.condition().is_none() {
+ // insert placeholder token after the if token
+ let if_token = match it.if_token() {
+ Some(t) => t,
+ None => continue,
+ };
+ append.insert(if_token.into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::IDENT,
+ text: "__ra_fixup".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ if it.then_branch().is_none() {
+ append.insert(node.clone().into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::L_CURLY,
+ text: "{".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ SyntheticToken {
+ kind: SyntaxKind::R_CURLY,
+ text: "}".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ // FIXME: foo::
+ // FIXME: for, loop, match etc.
+ _ => (),
+ }
+ }
+ }
+ SyntaxFixups {
+ append,
+ replace,
+ token_map,
+ next_id,
+ undo_info: SyntaxFixupUndoInfo { original },
+ }
+}
+
+fn has_error(node: &SyntaxNode) -> bool {
+ node.children().any(|c| c.kind() == SyntaxKind::ERROR)
+}
+
+fn can_handle_error(node: &SyntaxNode) -> bool {
+ ast::Expr::can_cast(node.kind())
+}
+
+fn has_error_to_handle(node: &SyntaxNode) -> bool {
+ has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
+}
+
+pub(crate) fn reverse_fixups(
+ tt: &mut Subtree,
+ token_map: &TokenMap,
+ undo_info: &SyntaxFixupUndoInfo,
+) {
+ tt.token_trees.retain(|tt| match tt {
+ tt::TokenTree::Leaf(leaf) => {
+ token_map.synthetic_token_id(leaf.id()).is_none()
+ || token_map.synthetic_token_id(leaf.id()) != Some(EMPTY_ID)
+ }
+ tt::TokenTree::Subtree(st) => st.delimiter.map_or(true, |d| {
+ token_map.synthetic_token_id(d.id).is_none()
+ || token_map.synthetic_token_id(d.id) != Some(EMPTY_ID)
+ }),
+ });
+ tt.token_trees.iter_mut().for_each(|tt| match tt {
+ tt::TokenTree::Subtree(tt) => reverse_fixups(tt, token_map, undo_info),
+ tt::TokenTree::Leaf(leaf) => {
+ if let Some(id) = token_map.synthetic_token_id(leaf.id()) {
+ let original = &undo_info.original[id.0 as usize];
+ *tt = tt::TokenTree::Subtree(original.clone());
+ }
+ }
+ });
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use super::reverse_fixups;
+
+ #[track_caller]
+ fn check(ra_fixture: &str, mut expect: Expect) {
+ let parsed = syntax::SourceFile::parse(ra_fixture);
+ let fixups = super::fixup_syntax(&parsed.syntax_node());
+ let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
+ &parsed.syntax_node(),
+ fixups.token_map,
+ fixups.next_id,
+ fixups.replace,
+ fixups.append,
+ );
+
+ let mut actual = tt.to_string();
+ actual.push('\n');
+
+ expect.indent(false);
+ expect.assert_eq(&actual);
+
+ // the fixed-up tree should be syntactically valid
+ let (parse, _) = mbe::token_tree_to_syntax_node(&tt, ::mbe::TopEntryPoint::MacroItems);
+ assert_eq!(
+ parse.errors(),
+ &[],
+ "parse has syntax errors. parse tree:\n{:#?}",
+ parse.syntax_node()
+ );
+
+ reverse_fixups(&mut tt, &tmap, &fixups.undo_info);
+
+ // the fixed-up + reversed version should be equivalent to the original input
+ // (but token IDs don't matter)
+ let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node());
+ assert_eq!(tt.to_string(), original_as_tt.to_string());
+ }
+
+ #[test]
+ fn incomplete_field_expr_1() {
+ check(
+ r#"
+fn foo() {
+ a.
+}
+"#,
+ expect![[r#"
+fn foo () {a . __ra_fixup}
+"#]],
+ )
+ }
+
+ #[test]
+ fn incomplete_field_expr_2() {
+ check(
+ r#"
+fn foo() {
+ a. ;
+}
+"#,
+ expect![[r#"
+fn foo () {a . __ra_fixup ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn incomplete_field_expr_3() {
+ check(
+ r#"
+fn foo() {
+ a. ;
+ bar();
+}
+"#,
+ expect![[r#"
+fn foo () {a . __ra_fixup ; bar () ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn incomplete_let() {
+ check(
+ r#"
+fn foo() {
+ let x = a
+}
+"#,
+ expect![[r#"
+fn foo () {let x = a ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn incomplete_field_expr_in_let() {
+ check(
+ r#"
+fn foo() {
+ let x = a.
+}
+"#,
+ expect![[r#"
+fn foo () {let x = a . __ra_fixup ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn field_expr_before_call() {
+ // another case that easily happens while typing
+ check(
+ r#"
+fn foo() {
+ a.b
+ bar();
+}
+"#,
+ expect![[r#"
+fn foo () {a . b ; bar () ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn extraneous_comma() {
+ check(
+ r#"
+fn foo() {
+ bar(,);
+}
+"#,
+ expect![[r#"
+fn foo () {__ra_fixup ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn fixup_if_1() {
+ check(
+ r#"
+fn foo() {
+ if a
+}
+"#,
+ expect![[r#"
+fn foo () {if a {}}
+"#]],
+ )
+ }
+
+ #[test]
+ fn fixup_if_2() {
+ check(
+ r#"
+fn foo() {
+ if
+}
+"#,
+ expect![[r#"
+fn foo () {if __ra_fixup {}}
+"#]],
+ )
+ }
+
+ #[test]
+ fn fixup_if_3() {
+ check(
+ r#"
+fn foo() {
+ if {}
+}
+"#,
+ // the {} gets parsed as the condition, I think?
+ expect![[r#"
+fn foo () {if {} {}}
+"#]],
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
new file mode 100644
index 000000000..d60734372
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
@@ -0,0 +1,256 @@
+//! This modules handles hygiene information.
+//!
+//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
+//! this moment, this is horribly incomplete and handles only `$crate`.
+use std::sync::Arc;
+
+use base_db::CrateId;
+use db::TokenExpander;
+use either::Either;
+use mbe::Origin;
+use syntax::{
+ ast::{self, HasDocComments},
+ AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize,
+};
+
+use crate::{
+ db::{self, AstDatabase},
+ fixup,
+ name::{AsName, Name},
+ HirFileId, HirFileIdRepr, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile,
+};
+
+#[derive(Clone, Debug)]
+pub struct Hygiene {
+ frames: Option<HygieneFrames>,
+}
+
+impl Hygiene {
+ pub fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Hygiene {
+ Hygiene { frames: Some(HygieneFrames::new(db, file_id)) }
+ }
+
+ pub fn new_unhygienic() -> Hygiene {
+ Hygiene { frames: None }
+ }
+
+ // FIXME: this should just return name
+ pub fn name_ref_to_name(
+ &self,
+ db: &dyn AstDatabase,
+ name_ref: ast::NameRef,
+ ) -> Either<Name, CrateId> {
+ if let Some(frames) = &self.frames {
+ if name_ref.text() == "$crate" {
+ if let Some(krate) = frames.root_crate(db, name_ref.syntax()) {
+ return Either::Right(krate);
+ }
+ }
+ }
+
+ Either::Left(name_ref.as_name())
+ }
+
+ pub fn local_inner_macros(&self, db: &dyn AstDatabase, path: ast::Path) -> Option<CrateId> {
+ let mut token = path.syntax().first_token()?.text_range();
+ let frames = self.frames.as_ref()?;
+ let mut current = &frames.0;
+
+ loop {
+ let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?;
+ if origin == Origin::Def {
+ return if current.local_inner {
+ frames.root_crate(db, path.syntax())
+ } else {
+ None
+ };
+ }
+ current = current.call_site.as_ref()?;
+ token = mapped.value;
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+struct HygieneFrames(Arc<HygieneFrame>);
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct HygieneFrame {
+ expansion: Option<HygieneInfo>,
+
+ // Indicate this is a local inner macro
+ local_inner: bool,
+ krate: Option<CrateId>,
+
+ call_site: Option<Arc<HygieneFrame>>,
+ def_site: Option<Arc<HygieneFrame>>,
+}
+
+impl HygieneFrames {
+ fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Self {
+ // Note that this intentionally avoids the `hygiene_frame` query to avoid blowing up memory
+ // usage. The query is only helpful for nested `HygieneFrame`s as it avoids redundant work.
+ HygieneFrames(Arc::new(HygieneFrame::new(db, file_id)))
+ }
+
+ fn root_crate(&self, db: &dyn AstDatabase, node: &SyntaxNode) -> Option<CrateId> {
+ let mut token = node.first_token()?.text_range();
+ let mut result = self.0.krate;
+ let mut current = self.0.clone();
+
+ while let Some((mapped, origin)) =
+ current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token))
+ {
+ result = current.krate;
+
+ let site = match origin {
+ Origin::Def => &current.def_site,
+ Origin::Call => &current.call_site,
+ };
+
+ let site = match site {
+ None => break,
+ Some(it) => it,
+ };
+
+ current = site.clone();
+ token = mapped.value;
+ }
+
+ result
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+struct HygieneInfo {
+ file: MacroFile,
+ /// The start offset of the `macro_rules!` arguments or attribute input.
+ attr_input_or_mac_def_start: Option<InFile<TextSize>>,
+
+ macro_def: Arc<TokenExpander>,
+ macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
+ macro_arg_shift: mbe::Shift,
+ exp_map: Arc<mbe::TokenMap>,
+}
+
+impl HygieneInfo {
+ fn map_ident_up(
+ &self,
+ db: &dyn AstDatabase,
+ token: TextRange,
+ ) -> Option<(InFile<TextRange>, Origin)> {
+ let token_id = self.exp_map.token_by_range(token)?;
+ let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
+
+ let loc = db.lookup_intern_macro_call(self.file.macro_call_id);
+
+ let (token_map, tt) = match &loc.kind {
+ MacroCallKind::Attr { attr_args, .. } => match self.macro_arg_shift.unshift(token_id) {
+ Some(unshifted) => {
+ token_id = unshifted;
+ (&attr_args.1, self.attr_input_or_mac_def_start?)
+ }
+ None => (
+ &self.macro_arg.1,
+ InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()),
+ ),
+ },
+ _ => match origin {
+ mbe::Origin::Call => (
+ &self.macro_arg.1,
+ InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()),
+ ),
+ mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def_start) {
+ (TokenExpander::DeclarativeMacro { def_site_token_map, .. }, Some(tt)) => {
+ (def_site_token_map, *tt)
+ }
+ _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
+ },
+ },
+ };
+
+ let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?;
+ Some((tt.with_value(range + tt.value), origin))
+ }
+}
+
+fn make_hygiene_info(
+ db: &dyn AstDatabase,
+ macro_file: MacroFile,
+ loc: &MacroCallLoc,
+) -> Option<HygieneInfo> {
+ let def = loc.def.ast_id().left().and_then(|id| {
+ let def_tt = match id.to_node(db) {
+ ast::Macro::MacroRules(mac) => mac.token_tree()?,
+ ast::Macro::MacroDef(mac) => mac.body()?,
+ };
+ Some(InFile::new(id.file_id, def_tt))
+ });
+ let attr_input_or_mac_def = def.or_else(|| match loc.kind {
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+ let tt = ast_id
+ .to_node(db)
+ .doc_comments_and_attrs()
+ .nth(invoc_attr_index as usize)
+ .and_then(Either::left)?
+ .token_tree()?;
+ Some(InFile::new(ast_id.file_id, tt))
+ }
+ _ => None,
+ });
+
+ let macro_def = db.macro_def(loc.def).ok()?;
+ let (_, exp_map) = db.parse_macro_expansion(macro_file).value?;
+ let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
+
+ Some(HygieneInfo {
+ file: macro_file,
+ attr_input_or_mac_def_start: attr_input_or_mac_def
+ .map(|it| it.map(|tt| tt.syntax().text_range().start())),
+ macro_arg_shift: mbe::Shift::new(&macro_arg.0),
+ macro_arg,
+ macro_def,
+ exp_map,
+ })
+}
+
+impl HygieneFrame {
+ pub(crate) fn new(db: &dyn AstDatabase, file_id: HirFileId) -> HygieneFrame {
+ let (info, krate, local_inner) = match file_id.0 {
+ HirFileIdRepr::FileId(_) => (None, None, false),
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ let info =
+ make_hygiene_info(db, macro_file, &loc).map(|info| (loc.kind.file_id(), info));
+ match loc.def.kind {
+ MacroDefKind::Declarative(_) => {
+ (info, Some(loc.def.krate), loc.def.local_inner)
+ }
+ MacroDefKind::BuiltIn(..) => (info, Some(loc.def.krate), false),
+ MacroDefKind::BuiltInAttr(..) => (info, None, false),
+ MacroDefKind::BuiltInDerive(..) => (info, None, false),
+ MacroDefKind::BuiltInEager(..) => (info, None, false),
+ MacroDefKind::ProcMacro(..) => (info, None, false),
+ }
+ }
+ };
+
+ let (calling_file, info) = match info {
+ None => {
+ return HygieneFrame {
+ expansion: None,
+ local_inner,
+ krate,
+ call_site: None,
+ def_site: None,
+ };
+ }
+ Some(it) => it,
+ };
+
+ let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id));
+ let call_site = Some(db.hygiene_frame(calling_file));
+
+ HygieneFrame { expansion: Some(info), local_inner, krate, call_site, def_site }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
new file mode 100644
index 000000000..252293090
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
@@ -0,0 +1,1000 @@
+//! `hir_expand` deals with macro expansion.
+//!
+//! Specifically, it implements a concept of `MacroFile` -- a file whose syntax
+//! tree originates not from the text of some `FileId`, but from some macro
+//! expansion.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+pub mod db;
+pub mod ast_id_map;
+pub mod name;
+pub mod hygiene;
+pub mod builtin_attr_macro;
+pub mod builtin_derive_macro;
+pub mod builtin_fn_macro;
+pub mod proc_macro;
+pub mod quote;
+pub mod eager;
+pub mod mod_path;
+mod fixup;
+
+pub use mbe::{Origin, ValueResult};
+
+use std::{fmt, hash::Hash, iter, sync::Arc};
+
+use base_db::{impl_intern_key, salsa, CrateId, FileId, FileRange, ProcMacroKind};
+use either::Either;
+use syntax::{
+ algo::{self, skip_trivia_token},
+ ast::{self, AstNode, HasDocComments},
+ Direction, SyntaxNode, SyntaxToken,
+};
+
+use crate::{
+ ast_id_map::FileAstId,
+ builtin_attr_macro::BuiltinAttrExpander,
+ builtin_derive_macro::BuiltinDeriveExpander,
+ builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
+ db::TokenExpander,
+ mod_path::ModPath,
+ proc_macro::ProcMacroExpander,
+};
+
+pub type ExpandResult<T> = ValueResult<T, ExpandError>;
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum ExpandError {
+ UnresolvedProcMacro(CrateId),
+ Mbe(mbe::ExpandError),
+ Other(Box<str>),
+}
+
+impl From<mbe::ExpandError> for ExpandError {
+ fn from(mbe: mbe::ExpandError) -> Self {
+ Self::Mbe(mbe)
+ }
+}
+
+impl fmt::Display for ExpandError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ExpandError::UnresolvedProcMacro(_) => f.write_str("unresolved proc-macro"),
+ ExpandError::Mbe(it) => it.fmt(f),
+ ExpandError::Other(it) => f.write_str(it),
+ }
+ }
+}
+
+/// Input to the analyzer is a set of files, where each file is identified by
+/// `FileId` and contains source code. However, another source of source code in
+/// Rust are macros: each macro can be thought of as producing a "temporary
+/// file". To assign an id to such a file, we use the id of the macro call that
+/// produced the file. So, a `HirFileId` is either a `FileId` (source code
+/// written by user), or a `MacroCallId` (source code produced by macro).
+///
+/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file
+/// containing the call plus the offset of the macro call in the file. Note that
+/// this is a recursive definition! However, the size_of of `HirFileId` is
+/// finite (because everything bottoms out at the real `FileId`) and small
+/// (`MacroCallId` uses the location interning. You can check details here:
+/// <https://en.wikipedia.org/wiki/String_interning>).
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct HirFileId(HirFileIdRepr);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+enum HirFileIdRepr {
+ FileId(FileId),
+ MacroFile(MacroFile),
+}
+
+impl From<FileId> for HirFileId {
+ fn from(id: FileId) -> Self {
+ HirFileId(HirFileIdRepr::FileId(id))
+ }
+}
+
+impl From<MacroFile> for HirFileId {
+ fn from(id: MacroFile) -> Self {
+ HirFileId(HirFileIdRepr::MacroFile(id))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct MacroFile {
+ pub macro_call_id: MacroCallId,
+}
+
+/// `MacroCallId` identifies a particular macro invocation, like
+/// `println!("Hello, {}", world)`.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct MacroCallId(salsa::InternId);
+impl_intern_key!(MacroCallId);
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroCallLoc {
+ pub def: MacroDefId,
+ pub(crate) krate: CrateId,
+ eager: Option<EagerCallInfo>,
+ pub kind: MacroCallKind,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct MacroDefId {
+ pub krate: CrateId,
+ pub kind: MacroDefKind,
+ pub local_inner: bool,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum MacroDefKind {
+ Declarative(AstId<ast::Macro>),
+ BuiltIn(BuiltinFnLikeExpander, AstId<ast::Macro>),
+ // FIXME: maybe just Builtin and rename BuiltinFnLikeExpander to BuiltinExpander
+ BuiltInAttr(BuiltinAttrExpander, AstId<ast::Macro>),
+ BuiltInDerive(BuiltinDeriveExpander, AstId<ast::Macro>),
+ BuiltInEager(EagerExpander, AstId<ast::Macro>),
+ ProcMacro(ProcMacroExpander, ProcMacroKind, AstId<ast::Fn>),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+struct EagerCallInfo {
+ /// NOTE: This can be *either* the expansion result, *or* the argument to the eager macro!
+ arg_or_expansion: Arc<tt::Subtree>,
+ included_file: Option<FileId>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum MacroCallKind {
+ FnLike {
+ ast_id: AstId<ast::MacroCall>,
+ expand_to: ExpandTo,
+ },
+ Derive {
+ ast_id: AstId<ast::Adt>,
+ /// Syntactical index of the invoking `#[derive]` attribute.
+ ///
+ /// Outer attributes are counted first, then inner attributes. This does not support
+ /// out-of-line modules, which may have attributes spread across 2 files!
+ derive_attr_index: u32,
+ /// Index of the derive macro in the derive attribute
+ derive_index: u32,
+ },
+ Attr {
+ ast_id: AstId<ast::Item>,
+ attr_args: Arc<(tt::Subtree, mbe::TokenMap)>,
+ /// Syntactical index of the invoking `#[attribute]`.
+ ///
+ /// Outer attributes are counted first, then inner attributes. This does not support
+ /// out-of-line modules, which may have attributes spread across 2 files!
+ invoc_attr_index: u32,
+ /// Whether this attribute is the `#[derive]` attribute.
+ is_derive: bool,
+ },
+}
+
+impl HirFileId {
+ /// For macro-expansion files, returns the file original source file the
+ /// expansion originated from.
+ pub fn original_file(self, db: &dyn db::AstDatabase) -> FileId {
+ let mut file_id = self;
+ loop {
+ match file_id.0 {
+ HirFileIdRepr::FileId(id) => break id,
+ HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id);
+ file_id = match loc.eager {
+ Some(EagerCallInfo { included_file: Some(file), .. }) => file.into(),
+ _ => loc.kind.file_id(),
+ };
+ }
+ }
+ }
+ }
+
+ pub fn expansion_level(self, db: &dyn db::AstDatabase) -> u32 {
+ let mut level = 0;
+ let mut curr = self;
+ while let HirFileIdRepr::MacroFile(macro_file) = curr.0 {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+
+ level += 1;
+ curr = loc.kind.file_id();
+ }
+ level
+ }
+
+ /// If this is a macro call, returns the syntax node of the call.
+ pub fn call_node(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxNode>> {
+ match self.0 {
+ HirFileIdRepr::FileId(_) => None,
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ Some(loc.kind.to_node(db))
+ }
+ }
+ }
+
+ /// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
+ pub fn original_call_node(self, db: &dyn db::AstDatabase) -> Option<(FileId, SyntaxNode)> {
+ let mut call = match self.0 {
+ HirFileIdRepr::FileId(_) => return None,
+ HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
+ db.lookup_intern_macro_call(macro_call_id).kind.to_node(db)
+ }
+ };
+ loop {
+ match call.file_id.0 {
+ HirFileIdRepr::FileId(file_id) => break Some((file_id, call.value)),
+ HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
+ call = db.lookup_intern_macro_call(macro_call_id).kind.to_node(db);
+ }
+ }
+ }
+ }
+
+ /// Return expansion information if it is a macro-expansion file
+ pub fn expansion_info(self, db: &dyn db::AstDatabase) -> Option<ExpansionInfo> {
+ match self.0 {
+ HirFileIdRepr::FileId(_) => None,
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+
+ let arg_tt = loc.kind.arg(db)?;
+
+ let macro_def = db.macro_def(loc.def).ok()?;
+ let (parse, exp_map) = db.parse_macro_expansion(macro_file).value?;
+ let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
+
+ let def = loc.def.ast_id().left().and_then(|id| {
+ let def_tt = match id.to_node(db) {
+ ast::Macro::MacroRules(mac) => mac.token_tree()?,
+ ast::Macro::MacroDef(_)
+ if matches!(*macro_def, TokenExpander::BuiltinAttr(_)) =>
+ {
+ return None
+ }
+ ast::Macro::MacroDef(mac) => mac.body()?,
+ };
+ Some(InFile::new(id.file_id, def_tt))
+ });
+ let attr_input_or_mac_def = def.or_else(|| match loc.kind {
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+ let tt = ast_id
+ .to_node(db)
+ .doc_comments_and_attrs()
+ .nth(invoc_attr_index as usize)
+ .and_then(Either::left)?
+ .token_tree()?;
+ Some(InFile::new(ast_id.file_id, tt))
+ }
+ _ => None,
+ });
+
+ Some(ExpansionInfo {
+ expanded: InFile::new(self, parse.syntax_node()),
+ arg: InFile::new(loc.kind.file_id(), arg_tt),
+ attr_input_or_mac_def,
+ macro_arg_shift: mbe::Shift::new(&macro_arg.0),
+ macro_arg,
+ macro_def,
+ exp_map,
+ })
+ }
+ }
+ }
+
+ /// Indicate it is macro file generated for builtin derive
+ pub fn is_builtin_derive(&self, db: &dyn db::AstDatabase) -> Option<InFile<ast::Attr>> {
+ match self.0 {
+ HirFileIdRepr::FileId(_) => None,
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ let attr = match loc.def.kind {
+ MacroDefKind::BuiltInDerive(..) => loc.kind.to_node(db),
+ _ => return None,
+ };
+ Some(attr.with_value(ast::Attr::cast(attr.value.clone())?))
+ }
+ }
+ }
+
+ pub fn is_custom_derive(&self, db: &dyn db::AstDatabase) -> bool {
+ match self.0 {
+ HirFileIdRepr::FileId(_) => false,
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ matches!(loc.def.kind, MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _))
+ }
+ }
+ }
+
+ /// Return whether this file is an include macro
+ pub fn is_include_macro(&self, db: &dyn db::AstDatabase) -> bool {
+ match self.0 {
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ matches!(loc.eager, Some(EagerCallInfo { included_file: Some(_), .. }))
+ }
+ _ => false,
+ }
+ }
+
+ /// Return whether this file is an attr macro
+ pub fn is_attr_macro(&self, db: &dyn db::AstDatabase) -> bool {
+ match self.0 {
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ matches!(loc.kind, MacroCallKind::Attr { .. })
+ }
+ _ => false,
+ }
+ }
+
+ /// Return whether this file is the pseudo expansion of the derive attribute.
+ /// See [`crate::builtin_attr_macro::derive_attr_expand`].
+ pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::AstDatabase) -> bool {
+ match self.0 {
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ matches!(loc.kind, MacroCallKind::Attr { is_derive: true, .. })
+ }
+ _ => false,
+ }
+ }
+
+ pub fn is_macro(self) -> bool {
+ matches!(self.0, HirFileIdRepr::MacroFile(_))
+ }
+
+ pub fn macro_file(self) -> Option<MacroFile> {
+ match self.0 {
+ HirFileIdRepr::FileId(_) => None,
+ HirFileIdRepr::MacroFile(m) => Some(m),
+ }
+ }
+}
+
+impl MacroDefId {
+ pub fn as_lazy_macro(
+ self,
+ db: &dyn db::AstDatabase,
+ krate: CrateId,
+ kind: MacroCallKind,
+ ) -> MacroCallId {
+ db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind })
+ }
+
+ pub fn ast_id(&self) -> Either<AstId<ast::Macro>, AstId<ast::Fn>> {
+ let id = match self.kind {
+ MacroDefKind::ProcMacro(.., id) => return Either::Right(id),
+ MacroDefKind::Declarative(id)
+ | MacroDefKind::BuiltIn(_, id)
+ | MacroDefKind::BuiltInAttr(_, id)
+ | MacroDefKind::BuiltInDerive(_, id)
+ | MacroDefKind::BuiltInEager(_, id) => id,
+ };
+ Either::Left(id)
+ }
+
+ pub fn is_proc_macro(&self) -> bool {
+ matches!(self.kind, MacroDefKind::ProcMacro(..))
+ }
+
+ pub fn is_attribute(&self) -> bool {
+ matches!(
+ self.kind,
+ MacroDefKind::BuiltInAttr(..) | MacroDefKind::ProcMacro(_, ProcMacroKind::Attr, _)
+ )
+ }
+}
+
+// FIXME: attribute indices do not account for `cfg_attr`, which means that we'll strip the whole
+// `cfg_attr` instead of just one of the attributes it expands to
+
+impl MacroCallKind {
+ /// Returns the file containing the macro invocation.
+ fn file_id(&self) -> HirFileId {
+ match *self {
+ MacroCallKind::FnLike { ast_id: InFile { file_id, .. }, .. }
+ | MacroCallKind::Derive { ast_id: InFile { file_id, .. }, .. }
+ | MacroCallKind::Attr { ast_id: InFile { file_id, .. }, .. } => file_id,
+ }
+ }
+
+ pub fn to_node(&self, db: &dyn db::AstDatabase) -> InFile<SyntaxNode> {
+ match self {
+ MacroCallKind::FnLike { ast_id, .. } => {
+ ast_id.with_value(ast_id.to_node(db).syntax().clone())
+ }
+ MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
+ // FIXME: handle `cfg_attr`
+ ast_id.with_value(ast_id.to_node(db)).map(|it| {
+ it.doc_comments_and_attrs()
+ .nth(*derive_attr_index as usize)
+ .and_then(|it| match it {
+ Either::Left(attr) => Some(attr.syntax().clone()),
+ Either::Right(_) => None,
+ })
+ .unwrap_or_else(|| it.syntax().clone())
+ })
+ }
+ MacroCallKind::Attr { ast_id, is_derive: true, invoc_attr_index, .. } => {
+ // FIXME: handle `cfg_attr`
+ ast_id.with_value(ast_id.to_node(db)).map(|it| {
+ it.doc_comments_and_attrs()
+ .nth(*invoc_attr_index as usize)
+ .and_then(|it| match it {
+ Either::Left(attr) => Some(attr.syntax().clone()),
+ Either::Right(_) => None,
+ })
+ .unwrap_or_else(|| it.syntax().clone())
+ })
+ }
+ MacroCallKind::Attr { ast_id, .. } => {
+ ast_id.with_value(ast_id.to_node(db).syntax().clone())
+ }
+ }
+ }
+
+ /// Returns the original file range that best describes the location of this macro call.
+ ///
+ /// Unlike `MacroCallKind::original_call_range`, this also spans the item of attributes and derives.
+ pub fn original_call_range_with_body(self, db: &dyn db::AstDatabase) -> FileRange {
+ let mut kind = self;
+ let file_id = loop {
+ match kind.file_id().0 {
+ HirFileIdRepr::MacroFile(file) => {
+ kind = db.lookup_intern_macro_call(file.macro_call_id).kind;
+ }
+ HirFileIdRepr::FileId(file_id) => break file_id,
+ }
+ };
+
+ let range = match kind {
+ MacroCallKind::FnLike { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
+ MacroCallKind::Derive { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
+ MacroCallKind::Attr { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
+ };
+
+ FileRange { range, file_id }
+ }
+
+ /// Returns the original file range that best describes the location of this macro call.
+ ///
+ /// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros
+ /// get the whole `ast::MacroCall`, attribute macros get the attribute's range, and derives
+ /// get only the specific derive that is being referred to.
+ pub fn original_call_range(self, db: &dyn db::AstDatabase) -> FileRange {
+ let mut kind = self;
+ let file_id = loop {
+ match kind.file_id().0 {
+ HirFileIdRepr::MacroFile(file) => {
+ kind = db.lookup_intern_macro_call(file.macro_call_id).kind;
+ }
+ HirFileIdRepr::FileId(file_id) => break file_id,
+ }
+ };
+
+ let range = match kind {
+ MacroCallKind::FnLike { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
+ MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
+ // FIXME: should be the range of the macro name, not the whole derive
+ ast_id
+ .to_node(db)
+ .doc_comments_and_attrs()
+ .nth(derive_attr_index as usize)
+ .expect("missing derive")
+ .expect_left("derive is a doc comment?")
+ .syntax()
+ .text_range()
+ }
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => ast_id
+ .to_node(db)
+ .doc_comments_and_attrs()
+ .nth(invoc_attr_index as usize)
+ .expect("missing attribute")
+ .expect_left("attribute macro is a doc comment?")
+ .syntax()
+ .text_range(),
+ };
+
+ FileRange { range, file_id }
+ }
+
+ fn arg(&self, db: &dyn db::AstDatabase) -> Option<SyntaxNode> {
+ match self {
+ MacroCallKind::FnLike { ast_id, .. } => {
+ Some(ast_id.to_node(db).token_tree()?.syntax().clone())
+ }
+ MacroCallKind::Derive { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()),
+ MacroCallKind::Attr { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()),
+ }
+ }
+
+ fn expand_to(&self) -> ExpandTo {
+ match self {
+ MacroCallKind::FnLike { expand_to, .. } => *expand_to,
+ MacroCallKind::Derive { .. } => ExpandTo::Items,
+ MacroCallKind::Attr { is_derive: true, .. } => ExpandTo::Statements,
+ MacroCallKind::Attr { .. } => ExpandTo::Items, // is this always correct?
+ }
+ }
+}
+
+impl MacroCallId {
+ pub fn as_file(self) -> HirFileId {
+ MacroFile { macro_call_id: self }.into()
+ }
+}
+
+/// ExpansionInfo mainly describes how to map text range between src and expanded macro
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct ExpansionInfo {
+ expanded: InFile<SyntaxNode>,
+ /// The argument TokenTree or item for attributes
+ arg: InFile<SyntaxNode>,
+ /// The `macro_rules!` or attribute input.
+ attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
+
+ macro_def: Arc<TokenExpander>,
+ macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
+ /// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg
+ /// and as such we need to shift tokens if they are part of an attributes input instead of their item.
+ macro_arg_shift: mbe::Shift,
+ exp_map: Arc<mbe::TokenMap>,
+}
+
+impl ExpansionInfo {
+ pub fn expanded(&self) -> InFile<SyntaxNode> {
+ self.expanded.clone()
+ }
+
+ pub fn call_node(&self) -> Option<InFile<SyntaxNode>> {
+ Some(self.arg.with_value(self.arg.value.parent()?))
+ }
+
+ /// Map a token down from macro input into the macro expansion.
+ ///
+ /// The inner workings of this function differ slightly depending on the type of macro we are dealing with:
+ /// - declarative:
+ /// For declarative macros, we need to accommodate for the macro definition site(which acts as a second unchanging input)
+ /// , as tokens can mapped in and out of it.
+ /// To do this we shift all ids in the expansion by the maximum id of the definition site giving us an easy
+ /// way to map all the tokens.
+ /// - attribute:
+ /// Attributes have two different inputs, the input tokentree in the attribute node and the item
+ /// the attribute is annotating. Similarly as for declarative macros we need to do a shift here
+ /// as well. Currently this is done by shifting the attribute input by the maximum id of the item.
+ /// - function-like and derives:
+ /// Both of these only have one simple call site input so no special handling is required here.
+ pub fn map_token_down(
+ &self,
+ db: &dyn db::AstDatabase,
+ item: Option<ast::Item>,
+ token: InFile<&SyntaxToken>,
+ ) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
+ assert_eq!(token.file_id, self.arg.file_id);
+ let token_id_in_attr_input = if let Some(item) = item {
+ // check if we are mapping down in an attribute input
+ // this is a special case as attributes can have two inputs
+ let call_id = self.expanded.file_id.macro_file()?.macro_call_id;
+ let loc = db.lookup_intern_macro_call(call_id);
+
+ let token_range = token.value.text_range();
+ match &loc.kind {
+ MacroCallKind::Attr { attr_args, invoc_attr_index, is_derive, .. } => {
+ let attr = item
+ .doc_comments_and_attrs()
+ .nth(*invoc_attr_index as usize)
+ .and_then(Either::left)?;
+ match attr.token_tree() {
+ Some(token_tree)
+ if token_tree.syntax().text_range().contains_range(token_range) =>
+ {
+ let attr_input_start =
+ token_tree.left_delimiter_token()?.text_range().start();
+ let relative_range =
+ token.value.text_range().checked_sub(attr_input_start)?;
+ // shift by the item's tree's max id
+ let token_id = attr_args.1.token_by_range(relative_range)?;
+ let token_id = if *is_derive {
+ // we do not shift for `#[derive]`, as we only need to downmap the derive attribute tokens
+ token_id
+ } else {
+ self.macro_arg_shift.shift(token_id)
+ };
+ Some(token_id)
+ }
+ _ => None,
+ }
+ }
+ _ => None,
+ }
+ } else {
+ None
+ };
+
+ let token_id = match token_id_in_attr_input {
+ Some(token_id) => token_id,
+ // the token is not inside an attribute's input so do the lookup in the macro_arg as ususal
+ None => {
+ let relative_range =
+ token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
+ let token_id = self.macro_arg.1.token_by_range(relative_range)?;
+ // conditionally shift the id by a declaratives macro definition
+ self.macro_def.map_id_down(token_id)
+ }
+ };
+
+ let tokens = self
+ .exp_map
+ .ranges_by_token(token_id, token.value.kind())
+ .flat_map(move |range| self.expanded.value.covering_element(range).into_token());
+
+ Some(tokens.map(move |token| self.expanded.with_value(token)))
+ }
+
+ /// Map a token up out of the expansion it resides in into the arguments of the macro call of the expansion.
+ pub fn map_token_up(
+ &self,
+ db: &dyn db::AstDatabase,
+ token: InFile<&SyntaxToken>,
+ ) -> Option<(InFile<SyntaxToken>, Origin)> {
+ // Fetch the id through its text range,
+ let token_id = self.exp_map.token_by_range(token.value.text_range())?;
+ // conditionally unshifting the id to accommodate for macro-rules def site
+ let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
+
+ let call_id = self.expanded.file_id.macro_file()?.macro_call_id;
+ let loc = db.lookup_intern_macro_call(call_id);
+
+ // Attributes are a bit special for us, they have two inputs, the input tokentree and the annotated item.
+ let (token_map, tt) = match &loc.kind {
+ MacroCallKind::Attr { attr_args, is_derive: true, .. } => {
+ (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
+ }
+ MacroCallKind::Attr { attr_args, .. } => {
+ // try unshifting the the token id, if unshifting fails, the token resides in the non-item attribute input
+ // note that the `TokenExpander::map_id_up` earlier only unshifts for declarative macros, so we don't double unshift with this
+ match self.macro_arg_shift.unshift(token_id) {
+ Some(unshifted) => {
+ token_id = unshifted;
+ (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
+ }
+ None => (&self.macro_arg.1, self.arg.clone()),
+ }
+ }
+ _ => match origin {
+ mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()),
+ mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def) {
+ (TokenExpander::DeclarativeMacro { def_site_token_map, .. }, Some(tt)) => {
+ (def_site_token_map, tt.syntax().cloned())
+ }
+ _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
+ },
+ },
+ };
+
+ let range = token_map.first_range_by_token(token_id, token.value.kind())?;
+ let token =
+ tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
+ Some((tt.with_value(token), origin))
+ }
+}
+
+/// `AstId` points to an AST node in any file.
+///
+/// It is stable across reparses, and can be used as salsa key/value.
+pub type AstId<N> = InFile<FileAstId<N>>;
+
+impl<N: AstNode> AstId<N> {
+ pub fn to_node(&self, db: &dyn db::AstDatabase) -> N {
+ let root = db.parse_or_expand(self.file_id).unwrap();
+ db.ast_id_map(self.file_id).get(self.value).to_node(&root)
+ }
+}
+
+/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
+///
+/// Typical usages are:
+///
+/// * `InFile<SyntaxNode>` -- syntax node in a file
+/// * `InFile<ast::FnDef>` -- ast node in a file
+/// * `InFile<TextSize>` -- offset in a file
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
+pub struct InFile<T> {
+ pub file_id: HirFileId,
+ pub value: T,
+}
+
+impl<T> InFile<T> {
+ pub fn new(file_id: HirFileId, value: T) -> InFile<T> {
+ InFile { file_id, value }
+ }
+
+ pub fn with_value<U>(&self, value: U) -> InFile<U> {
+ InFile::new(self.file_id, value)
+ }
+
+ pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> InFile<U> {
+ InFile::new(self.file_id, f(self.value))
+ }
+
+ pub fn as_ref(&self) -> InFile<&T> {
+ self.with_value(&self.value)
+ }
+
+ pub fn file_syntax(&self, db: &dyn db::AstDatabase) -> SyntaxNode {
+ db.parse_or_expand(self.file_id).expect("source created from invalid file")
+ }
+}
+
+impl<T: Clone> InFile<&T> {
+ pub fn cloned(&self) -> InFile<T> {
+ self.with_value(self.value.clone())
+ }
+}
+
+impl<T> InFile<Option<T>> {
+ pub fn transpose(self) -> Option<InFile<T>> {
+ let value = self.value?;
+ Some(InFile::new(self.file_id, value))
+ }
+}
+
+impl<'a> InFile<&'a SyntaxNode> {
+ pub fn ancestors_with_macros(
+ self,
+ db: &dyn db::AstDatabase,
+ ) -> impl Iterator<Item = InFile<SyntaxNode>> + Clone + '_ {
+ iter::successors(Some(self.cloned()), move |node| match node.value.parent() {
+ Some(parent) => Some(node.with_value(parent)),
+ None => node.file_id.call_node(db),
+ })
+ }
+
+ /// Skips the attributed item that caused the macro invocation we are climbing up
+ pub fn ancestors_with_macros_skip_attr_item(
+ self,
+ db: &dyn db::AstDatabase,
+ ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
+ let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
+ Some(parent) => Some(node.with_value(parent)),
+ None => {
+ let parent_node = node.file_id.call_node(db)?;
+ if node.file_id.is_attr_macro(db) {
+ // macro call was an attributed item, skip it
+ // FIXME: does this fail if this is a direct expansion of another macro?
+ parent_node.map(|node| node.parent()).transpose()
+ } else {
+ Some(parent_node)
+ }
+ }
+ };
+ iter::successors(succ(&self.cloned()), succ)
+ }
+
+ /// Falls back to the macro call range if the node cannot be mapped up fully.
+ ///
+ /// For attributes and derives, this will point back to the attribute only.
+ /// For the entire item `InFile::use original_file_range_full`.
+ pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange {
+ match self.file_id.0 {
+ HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
+ HirFileIdRepr::MacroFile(mac_file) => {
+ if let Some(res) = self.original_file_range_opt(db) {
+ return res;
+ }
+ // Fall back to whole macro call.
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ loc.kind.original_call_range(db)
+ }
+ }
+ }
+
+ /// Attempts to map the syntax node back up its macro calls.
+ pub fn original_file_range_opt(self, db: &dyn db::AstDatabase) -> Option<FileRange> {
+ match ascend_node_border_tokens(db, self) {
+ Some(InFile { file_id, value: (first, last) }) => {
+ let original_file = file_id.original_file(db);
+ let range = first.text_range().cover(last.text_range());
+ if file_id != original_file.into() {
+ tracing::error!("Failed mapping up more for {:?}", range);
+ return None;
+ }
+ Some(FileRange { file_id: original_file, range })
+ }
+ _ if !self.file_id.is_macro() => Some(FileRange {
+ file_id: self.file_id.original_file(db),
+ range: self.value.text_range(),
+ }),
+ _ => None,
+ }
+ }
+}
+
+impl InFile<SyntaxToken> {
+ pub fn upmap(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxToken>> {
+ let expansion = self.file_id.expansion_info(db)?;
+ expansion.map_token_up(db, self.as_ref()).map(|(it, _)| it)
+ }
+
+ /// Falls back to the macro call range if the node cannot be mapped up fully.
+ pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange {
+ match self.file_id.0 {
+ HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
+ HirFileIdRepr::MacroFile(mac_file) => {
+ if let Some(res) = self.original_file_range_opt(db) {
+ return res;
+ }
+ // Fall back to whole macro call.
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ loc.kind.original_call_range(db)
+ }
+ }
+ }
+
+ /// Attempts to map the syntax node back up its macro calls.
+ pub fn original_file_range_opt(self, db: &dyn db::AstDatabase) -> Option<FileRange> {
+ match self.file_id.0 {
+ HirFileIdRepr::FileId(file_id) => {
+ Some(FileRange { file_id, range: self.value.text_range() })
+ }
+ HirFileIdRepr::MacroFile(_) => {
+ let expansion = self.file_id.expansion_info(db)?;
+ let InFile { file_id, value } = ascend_call_token(db, &expansion, self)?;
+ let original_file = file_id.original_file(db);
+ if file_id != original_file.into() {
+ return None;
+ }
+ Some(FileRange { file_id: original_file, range: value.text_range() })
+ }
+ }
+ }
+
+ pub fn ancestors_with_macros(
+ self,
+ db: &dyn db::AstDatabase,
+ ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
+ self.value.parent().into_iter().flat_map({
+ let file_id = self.file_id;
+ move |parent| InFile::new(file_id, &parent).ancestors_with_macros(db)
+ })
+ }
+}
+
+fn ascend_node_border_tokens(
+ db: &dyn db::AstDatabase,
+ InFile { file_id, value: node }: InFile<&SyntaxNode>,
+) -> Option<InFile<(SyntaxToken, SyntaxToken)>> {
+ let expansion = file_id.expansion_info(db)?;
+
+ let first_token = |node: &SyntaxNode| skip_trivia_token(node.first_token()?, Direction::Next);
+ let last_token = |node: &SyntaxNode| skip_trivia_token(node.last_token()?, Direction::Prev);
+
+ let first = first_token(node)?;
+ let last = last_token(node)?;
+ let first = ascend_call_token(db, &expansion, InFile::new(file_id, first))?;
+ let last = ascend_call_token(db, &expansion, InFile::new(file_id, last))?;
+ (first.file_id == last.file_id).then(|| InFile::new(first.file_id, (first.value, last.value)))
+}
+
+fn ascend_call_token(
+ db: &dyn db::AstDatabase,
+ expansion: &ExpansionInfo,
+ token: InFile<SyntaxToken>,
+) -> Option<InFile<SyntaxToken>> {
+ let mut mapping = expansion.map_token_up(db, token.as_ref())?;
+ while let (mapped, Origin::Call) = mapping {
+ match mapped.file_id.expansion_info(db) {
+ Some(info) => mapping = info.map_token_up(db, mapped.as_ref())?,
+ None => return Some(mapped),
+ }
+ }
+ None
+}
+
+impl<N: AstNode> InFile<N> {
+ pub fn descendants<T: AstNode>(self) -> impl Iterator<Item = InFile<T>> {
+ self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n))
+ }
+
+ pub fn original_ast_node(self, db: &dyn db::AstDatabase) -> Option<InFile<N>> {
+ // This kind of upmapping can only be achieved in attribute expanded files,
+ // as we don't have node inputs otherwise and therefor can't find an `N` node in the input
+ if !self.file_id.is_macro() {
+ return Some(self);
+ } else if !self.file_id.is_attr_macro(db) {
+ return None;
+ }
+
+ if let Some(InFile { file_id, value: (first, last) }) =
+ ascend_node_border_tokens(db, self.syntax())
+ {
+ if file_id.is_macro() {
+ let range = first.text_range().cover(last.text_range());
+ tracing::error!("Failed mapping out of macro file for {:?}", range);
+ return None;
+ }
+ // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes
+ let anc = algo::least_common_ancestor(&first.parent()?, &last.parent()?)?;
+ let value = anc.ancestors().find_map(N::cast)?;
+ return Some(InFile::new(file_id, value));
+ }
+ None
+ }
+
+ pub fn syntax(&self) -> InFile<&SyntaxNode> {
+ self.with_value(self.value.syntax())
+ }
+}
+
+/// In Rust, macros expand token trees to token trees. When we want to turn a
+/// token tree into an AST node, we need to figure out what kind of AST node we
+/// want: something like `foo` can be a type, an expression, or a pattern.
+///
+/// Naively, one would think that "what this expands to" is a property of a
+/// particular macro: macro `m1` returns an item, while macro `m2` returns an
+/// expression, etc. That's not the case -- macros are polymorphic in the
+/// result, and can expand to any type of the AST node.
+///
+/// What defines the actual AST node is the syntactic context of the macro
+/// invocation. As a contrived example, in `let T![*] = T![*];` the first `T`
+/// expands to a pattern, while the second one expands to an expression.
+///
+/// `ExpandTo` captures this bit of information about a particular macro call
+/// site.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum ExpandTo {
+ Statements,
+ Items,
+ Pattern,
+ Type,
+ Expr,
+}
+
+impl ExpandTo {
+ pub fn from_call_site(call: &ast::MacroCall) -> ExpandTo {
+ use syntax::SyntaxKind::*;
+
+ let syn = call.syntax();
+
+ let parent = match syn.parent() {
+ Some(it) => it,
+ None => return ExpandTo::Statements,
+ };
+
+ // FIXME: macros in statement position are treated as expression statements, they should
+ // probably be their own statement kind. The *grand*parent indicates what's valid.
+ if parent.kind() == MACRO_EXPR
+ && parent
+ .parent()
+ .map_or(true, |p| matches!(p.kind(), EXPR_STMT | STMT_LIST | MACRO_STMTS))
+ {
+ return ExpandTo::Statements;
+ }
+
+ match parent.kind() {
+ MACRO_ITEMS | SOURCE_FILE | ITEM_LIST => ExpandTo::Items,
+ MACRO_STMTS | EXPR_STMT | STMT_LIST => ExpandTo::Statements,
+ MACRO_PAT => ExpandTo::Pattern,
+ MACRO_TYPE => ExpandTo::Type,
+
+ ARG_LIST | ARRAY_EXPR | AWAIT_EXPR | BIN_EXPR | BREAK_EXPR | CALL_EXPR | CAST_EXPR
+ | CLOSURE_EXPR | FIELD_EXPR | FOR_EXPR | IF_EXPR | INDEX_EXPR | LET_EXPR
+ | MATCH_ARM | MATCH_EXPR | MATCH_GUARD | METHOD_CALL_EXPR | PAREN_EXPR | PATH_EXPR
+ | PREFIX_EXPR | RANGE_EXPR | RECORD_EXPR_FIELD | REF_EXPR | RETURN_EXPR | TRY_EXPR
+ | TUPLE_EXPR | WHILE_EXPR | MACRO_EXPR => ExpandTo::Expr,
+ _ => {
+ // Unknown , Just guess it is `Items`
+ ExpandTo::Items
+ }
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct UnresolvedMacro {
+ pub path: ModPath,
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
new file mode 100644
index 000000000..fea09521e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
@@ -0,0 +1,276 @@
+//! A lowering for `use`-paths (more generally, paths without angle-bracketed segments).
+
+use std::{
+ fmt::{self, Display},
+ iter,
+};
+
+use crate::{
+ db::AstDatabase,
+ hygiene::Hygiene,
+ name::{known, Name},
+};
+use base_db::CrateId;
+use either::Either;
+use smallvec::SmallVec;
+use syntax::{ast, AstNode};
+
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct ModPath {
+ pub kind: PathKind,
+ segments: SmallVec<[Name; 1]>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct EscapedModPath<'a>(&'a ModPath);
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum PathKind {
+ Plain,
+ /// `self::` is `Super(0)`
+ Super(u8),
+ Crate,
+ /// Absolute path (::foo)
+ Abs,
+ /// `$crate` from macro expansion
+ DollarCrate(CrateId),
+}
+
+impl ModPath {
+ pub fn from_src(db: &dyn AstDatabase, path: ast::Path, hygiene: &Hygiene) -> Option<ModPath> {
+ convert_path(db, None, path, hygiene)
+ }
+
+ pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath {
+ let segments = segments.into_iter().collect();
+ ModPath { kind, segments }
+ }
+
+ /// Creates a `ModPath` from a `PathKind`, with no extra path segments.
+ pub const fn from_kind(kind: PathKind) -> ModPath {
+ ModPath { kind, segments: SmallVec::new_const() }
+ }
+
+ pub fn segments(&self) -> &[Name] {
+ &self.segments
+ }
+
+ pub fn push_segment(&mut self, segment: Name) {
+ self.segments.push(segment);
+ }
+
+ pub fn pop_segment(&mut self) -> Option<Name> {
+ self.segments.pop()
+ }
+
+ /// Returns the number of segments in the path (counting special segments like `$crate` and
+ /// `super`).
+ pub fn len(&self) -> usize {
+ self.segments.len()
+ + match self.kind {
+ PathKind::Plain => 0,
+ PathKind::Super(i) => i as usize,
+ PathKind::Crate => 1,
+ PathKind::Abs => 0,
+ PathKind::DollarCrate(_) => 1,
+ }
+ }
+
+ pub fn is_ident(&self) -> bool {
+ self.as_ident().is_some()
+ }
+
+ pub fn is_self(&self) -> bool {
+ self.kind == PathKind::Super(0) && self.segments.is_empty()
+ }
+
+ #[allow(non_snake_case)]
+ pub fn is_Self(&self) -> bool {
+ self.kind == PathKind::Plain
+ && matches!(&*self.segments, [name] if *name == known::SELF_TYPE)
+ }
+
+ /// If this path is a single identifier, like `foo`, return its name.
+ pub fn as_ident(&self) -> Option<&Name> {
+ if self.kind != PathKind::Plain {
+ return None;
+ }
+
+ match &*self.segments {
+ [name] => Some(name),
+ _ => None,
+ }
+ }
+
+ pub fn escaped(&self) -> EscapedModPath<'_> {
+ EscapedModPath(self)
+ }
+
+ fn _fmt(&self, f: &mut fmt::Formatter<'_>, escaped: bool) -> fmt::Result {
+ let mut first_segment = true;
+ let mut add_segment = |s| -> fmt::Result {
+ if !first_segment {
+ f.write_str("::")?;
+ }
+ first_segment = false;
+ f.write_str(s)?;
+ Ok(())
+ };
+ match self.kind {
+ PathKind::Plain => {}
+ PathKind::Super(0) => add_segment("self")?,
+ PathKind::Super(n) => {
+ for _ in 0..n {
+ add_segment("super")?;
+ }
+ }
+ PathKind::Crate => add_segment("crate")?,
+ PathKind::Abs => add_segment("")?,
+ PathKind::DollarCrate(_) => add_segment("$crate")?,
+ }
+ for segment in &self.segments {
+ if !first_segment {
+ f.write_str("::")?;
+ }
+ first_segment = false;
+ if escaped {
+ segment.escaped().fmt(f)?
+ } else {
+ segment.fmt(f)?
+ };
+ }
+ Ok(())
+ }
+}
+
+impl Display for ModPath {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self._fmt(f, false)
+ }
+}
+
+impl<'a> Display for EscapedModPath<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0._fmt(f, true)
+ }
+}
+
+impl From<Name> for ModPath {
+ fn from(name: Name) -> ModPath {
+ ModPath::from_segments(PathKind::Plain, iter::once(name))
+ }
+}
+
+fn convert_path(
+ db: &dyn AstDatabase,
+ prefix: Option<ModPath>,
+ path: ast::Path,
+ hygiene: &Hygiene,
+) -> Option<ModPath> {
+ let prefix = match path.qualifier() {
+ Some(qual) => Some(convert_path(db, prefix, qual, hygiene)?),
+ None => prefix,
+ };
+
+ let segment = path.segment()?;
+ let mut mod_path = match segment.kind()? {
+ ast::PathSegmentKind::Name(name_ref) => {
+ match hygiene.name_ref_to_name(db, name_ref) {
+ Either::Left(name) => {
+ // no type args in use
+ let mut res = prefix.unwrap_or_else(|| {
+ ModPath::from_kind(
+ segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
+ )
+ });
+ res.segments.push(name);
+ res
+ }
+ Either::Right(crate_id) => {
+ return Some(ModPath::from_segments(
+ PathKind::DollarCrate(crate_id),
+ iter::empty(),
+ ))
+ }
+ }
+ }
+ ast::PathSegmentKind::SelfTypeKw => {
+ if prefix.is_some() {
+ return None;
+ }
+ ModPath::from_segments(PathKind::Plain, Some(known::SELF_TYPE))
+ }
+ ast::PathSegmentKind::CrateKw => {
+ if prefix.is_some() {
+ return None;
+ }
+ ModPath::from_segments(PathKind::Crate, iter::empty())
+ }
+ ast::PathSegmentKind::SelfKw => {
+ if prefix.is_some() {
+ return None;
+ }
+ ModPath::from_segments(PathKind::Super(0), iter::empty())
+ }
+ ast::PathSegmentKind::SuperKw => {
+ let nested_super_count = match prefix.map(|p| p.kind) {
+ Some(PathKind::Super(n)) => n,
+ Some(_) => return None,
+ None => 0,
+ };
+
+ ModPath::from_segments(PathKind::Super(nested_super_count + 1), iter::empty())
+ }
+ ast::PathSegmentKind::Type { .. } => {
+ // not allowed in imports
+ return None;
+ }
+ };
+
+ // handle local_inner_macros :
+ // Basically, even in rustc it is quite hacky:
+ // https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456
+ // We follow what it did anyway :)
+ if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain {
+ if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
+ if let Some(crate_id) = hygiene.local_inner_macros(db, path) {
+ mod_path.kind = PathKind::DollarCrate(crate_id);
+ }
+ }
+ }
+
+ Some(mod_path)
+}
+
+pub use crate::name as __name;
+
+#[macro_export]
+macro_rules! __known_path {
+ (core::iter::IntoIterator) => {};
+ (core::iter::Iterator) => {};
+ (core::result::Result) => {};
+ (core::option::Option) => {};
+ (core::ops::Range) => {};
+ (core::ops::RangeFrom) => {};
+ (core::ops::RangeFull) => {};
+ (core::ops::RangeTo) => {};
+ (core::ops::RangeToInclusive) => {};
+ (core::ops::RangeInclusive) => {};
+ (core::future::Future) => {};
+ (core::ops::Try) => {};
+ ($path:path) => {
+ compile_error!("Please register your known path in the path module")
+ };
+}
+
+#[macro_export]
+macro_rules! __path {
+ ($start:ident $(:: $seg:ident)*) => ({
+ $crate::__known_path!($start $(:: $seg)*);
+ $crate::mod_path::ModPath::from_segments($crate::mod_path::PathKind::Abs, vec![
+ $crate::mod_path::__name![$start], $($crate::mod_path::__name![$seg],)*
+ ])
+ });
+}
+
+pub use crate::__path as path;
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
new file mode 100644
index 000000000..85b0a7735
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
@@ -0,0 +1,433 @@
+//! See [`Name`].
+
+use std::fmt;
+
+use syntax::{ast, SmolStr, SyntaxKind};
+
+/// `Name` is a wrapper around string, which is used in hir for both references
+/// and declarations. In theory, names should also carry hygiene info, but we are
+/// not there yet!
+#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct Name(Repr);
+
+/// `EscapedName` will add a prefix "r#" to the wrapped `Name` when it is a raw identifier
+#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct EscapedName<'a>(&'a Name);
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
+enum Repr {
+ Text(SmolStr),
+ TupleField(usize),
+}
+
+impl fmt::Display for Name {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match &self.0 {
+ Repr::Text(text) => fmt::Display::fmt(&text, f),
+ Repr::TupleField(idx) => fmt::Display::fmt(&idx, f),
+ }
+ }
+}
+
+fn is_raw_identifier(name: &str) -> bool {
+ let is_keyword = SyntaxKind::from_keyword(name).is_some();
+ is_keyword && !matches!(name, "self" | "crate" | "super" | "Self")
+}
+
+impl<'a> fmt::Display for EscapedName<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match &self.0 .0 {
+ Repr::Text(text) => {
+ if is_raw_identifier(text) {
+ write!(f, "r#{}", &text)
+ } else {
+ fmt::Display::fmt(&text, f)
+ }
+ }
+ Repr::TupleField(idx) => fmt::Display::fmt(&idx, f),
+ }
+ }
+}
+
+impl<'a> EscapedName<'a> {
+ pub fn is_escaped(&self) -> bool {
+ match &self.0 .0 {
+ Repr::Text(it) => is_raw_identifier(&it),
+ Repr::TupleField(_) => false,
+ }
+ }
+
+ /// Returns the textual representation of this name as a [`SmolStr`].
+ /// Prefer using this over [`ToString::to_string`] if possible as this conversion is cheaper in
+ /// the general case.
+ pub fn to_smol_str(&self) -> SmolStr {
+ match &self.0 .0 {
+ Repr::Text(it) => {
+ if is_raw_identifier(&it) {
+ SmolStr::from_iter(["r#", &it])
+ } else {
+ it.clone()
+ }
+ }
+ Repr::TupleField(it) => SmolStr::new(&it.to_string()),
+ }
+ }
+}
+
+impl Name {
+ /// Note: this is private to make creating name from random string hard.
+ /// Hopefully, this should allow us to integrate hygiene cleaner in the
+ /// future, and to switch to interned representation of names.
+ const fn new_text(text: SmolStr) -> Name {
+ Name(Repr::Text(text))
+ }
+
+ pub fn new_tuple_field(idx: usize) -> Name {
+ Name(Repr::TupleField(idx))
+ }
+
+ pub fn new_lifetime(lt: &ast::Lifetime) -> Name {
+ Self::new_text(lt.text().into())
+ }
+
+ /// Shortcut to create inline plain text name
+ const fn new_inline(text: &str) -> Name {
+ Name::new_text(SmolStr::new_inline(text))
+ }
+
+ /// Resolve a name from the text of token.
+ fn resolve(raw_text: &str) -> Name {
+ match raw_text.strip_prefix("r#") {
+ Some(text) => Name::new_text(SmolStr::new(text)),
+ None => Name::new_text(raw_text.into()),
+ }
+ }
+
+ /// A fake name for things missing in the source code.
+ ///
+ /// For example, `impl Foo for {}` should be treated as a trait impl for a
+ /// type with a missing name. Similarly, `struct S { : u32 }` should have a
+ /// single field with a missing name.
+ ///
+ /// Ideally, we want a `gensym` semantics for missing names -- each missing
+ /// name is equal only to itself. It's not clear how to implement this in
+ /// salsa though, so we punt on that bit for a moment.
+ pub const fn missing() -> Name {
+ Name::new_inline("[missing name]")
+ }
+
+ /// Returns the tuple index this name represents if it is a tuple field.
+ pub fn as_tuple_index(&self) -> Option<usize> {
+ match self.0 {
+ Repr::TupleField(idx) => Some(idx),
+ _ => None,
+ }
+ }
+
+ /// Returns the text this name represents if it isn't a tuple field.
+ pub fn as_text(&self) -> Option<SmolStr> {
+ match &self.0 {
+ Repr::Text(it) => Some(it.clone()),
+ _ => None,
+ }
+ }
+
+ /// Returns the textual representation of this name as a [`SmolStr`].
+ /// Prefer using this over [`ToString::to_string`] if possible as this conversion is cheaper in
+ /// the general case.
+ pub fn to_smol_str(&self) -> SmolStr {
+ match &self.0 {
+ Repr::Text(it) => it.clone(),
+ Repr::TupleField(it) => SmolStr::new(&it.to_string()),
+ }
+ }
+
+ pub fn escaped(&self) -> EscapedName<'_> {
+ EscapedName(self)
+ }
+}
+
+pub trait AsName {
+ fn as_name(&self) -> Name;
+}
+
+impl AsName for ast::NameRef {
+ fn as_name(&self) -> Name {
+ match self.as_tuple_field() {
+ Some(idx) => Name::new_tuple_field(idx),
+ None => Name::resolve(&self.text()),
+ }
+ }
+}
+
+impl AsName for ast::Name {
+ fn as_name(&self) -> Name {
+ Name::resolve(&self.text())
+ }
+}
+
+impl AsName for ast::NameOrNameRef {
+ fn as_name(&self) -> Name {
+ match self {
+ ast::NameOrNameRef::Name(it) => it.as_name(),
+ ast::NameOrNameRef::NameRef(it) => it.as_name(),
+ }
+ }
+}
+
+impl AsName for tt::Ident {
+ fn as_name(&self) -> Name {
+ Name::resolve(&self.text)
+ }
+}
+
+impl AsName for ast::FieldKind {
+ fn as_name(&self) -> Name {
+ match self {
+ ast::FieldKind::Name(nr) => nr.as_name(),
+ ast::FieldKind::Index(idx) => {
+ let idx = idx.text().parse::<usize>().unwrap_or(0);
+ Name::new_tuple_field(idx)
+ }
+ }
+ }
+}
+
+impl AsName for base_db::Dependency {
+ fn as_name(&self) -> Name {
+ Name::new_text(SmolStr::new(&*self.name))
+ }
+}
+
+pub mod known {
+ macro_rules! known_names {
+ ($($ident:ident),* $(,)?) => {
+ $(
+ #[allow(bad_style)]
+ pub const $ident: super::Name =
+ super::Name::new_inline(stringify!($ident));
+ )*
+ };
+ }
+
+ known_names!(
+ // Primitives
+ isize,
+ i8,
+ i16,
+ i32,
+ i64,
+ i128,
+ usize,
+ u8,
+ u16,
+ u32,
+ u64,
+ u128,
+ f32,
+ f64,
+ bool,
+ char,
+ str,
+ // Special names
+ macro_rules,
+ doc,
+ cfg,
+ cfg_attr,
+ register_attr,
+ register_tool,
+ // Components of known path (value or mod name)
+ std,
+ core,
+ alloc,
+ iter,
+ ops,
+ future,
+ result,
+ boxed,
+ option,
+ prelude,
+ rust_2015,
+ rust_2018,
+ rust_2021,
+ v1,
+ // Components of known path (type name)
+ Iterator,
+ IntoIterator,
+ Item,
+ Try,
+ Ok,
+ Future,
+ Result,
+ Option,
+ Output,
+ Target,
+ Box,
+ RangeFrom,
+ RangeFull,
+ RangeInclusive,
+ RangeToInclusive,
+ RangeTo,
+ Range,
+ Neg,
+ Not,
+ None,
+ Index,
+ // Components of known path (function name)
+ filter_map,
+ next,
+ iter_mut,
+ len,
+ is_empty,
+ new,
+ // Builtin macros
+ asm,
+ assert,
+ column,
+ compile_error,
+ concat_idents,
+ concat_bytes,
+ concat,
+ const_format_args,
+ core_panic,
+ env,
+ file,
+ format_args_nl,
+ format_args,
+ global_asm,
+ include_bytes,
+ include_str,
+ include,
+ line,
+ llvm_asm,
+ log_syntax,
+ module_path,
+ option_env,
+ std_panic,
+ stringify,
+ trace_macros,
+ unreachable,
+ // Builtin derives
+ Copy,
+ Clone,
+ Default,
+ Debug,
+ Hash,
+ Ord,
+ PartialOrd,
+ Eq,
+ PartialEq,
+ // Builtin attributes
+ bench,
+ cfg_accessible,
+ cfg_eval,
+ crate_type,
+ derive,
+ global_allocator,
+ test,
+ test_case,
+ recursion_limit,
+ // Safe intrinsics
+ abort,
+ add_with_overflow,
+ black_box,
+ bitreverse,
+ bswap,
+ caller_location,
+ ctlz,
+ ctpop,
+ cttz,
+ discriminant_value,
+ forget,
+ likely,
+ maxnumf32,
+ maxnumf64,
+ min_align_of_val,
+ min_align_of,
+ minnumf32,
+ minnumf64,
+ mul_with_overflow,
+ needs_drop,
+ ptr_guaranteed_eq,
+ ptr_guaranteed_ne,
+ rotate_left,
+ rotate_right,
+ rustc_peek,
+ saturating_add,
+ saturating_sub,
+ size_of_val,
+ size_of,
+ sub_with_overflow,
+ type_id,
+ type_name,
+ unlikely,
+ variant_count,
+ wrapping_add,
+ wrapping_mul,
+ wrapping_sub,
+ // known methods of lang items
+ eq,
+ ne,
+ ge,
+ gt,
+ le,
+ lt,
+ // lang items
+ add_assign,
+ add,
+ bitand_assign,
+ bitand,
+ bitor_assign,
+ bitor,
+ bitxor_assign,
+ bitxor,
+ deref_mut,
+ deref,
+ div_assign,
+ div,
+ fn_mut,
+ fn_once,
+ future_trait,
+ index,
+ index_mut,
+ mul_assign,
+ mul,
+ neg,
+ not,
+ owned_box,
+ partial_ord,
+ r#fn,
+ rem_assign,
+ rem,
+ shl_assign,
+ shl,
+ shr_assign,
+ shr,
+ sub_assign,
+ sub,
+ );
+
+ // self/Self cannot be used as an identifier
+ pub const SELF_PARAM: super::Name = super::Name::new_inline("self");
+ pub const SELF_TYPE: super::Name = super::Name::new_inline("Self");
+
+ pub const STATIC_LIFETIME: super::Name = super::Name::new_inline("'static");
+
+ #[macro_export]
+ macro_rules! name {
+ (self) => {
+ $crate::name::known::SELF_PARAM
+ };
+ (Self) => {
+ $crate::name::known::SELF_TYPE
+ };
+ ('static) => {
+ $crate::name::known::STATIC_LIFETIME
+ };
+ ($ident:ident) => {
+ $crate::name::known::$ident
+ };
+ }
+}
+
+pub use crate::name;
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
new file mode 100644
index 000000000..5afdcc0e6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
@@ -0,0 +1,81 @@
+//! Proc Macro Expander stub
+
+use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
+use stdx::never;
+
+use crate::{db::AstDatabase, ExpandError, ExpandResult};
+
+#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
+pub struct ProcMacroExpander {
+ krate: CrateId,
+ proc_macro_id: Option<ProcMacroId>,
+}
+
+impl ProcMacroExpander {
+ pub fn new(krate: CrateId, proc_macro_id: ProcMacroId) -> Self {
+ Self { krate, proc_macro_id: Some(proc_macro_id) }
+ }
+
+ pub fn dummy(krate: CrateId) -> Self {
+ // FIXME: Should store the name for better errors
+ Self { krate, proc_macro_id: None }
+ }
+
+ pub fn is_dummy(&self) -> bool {
+ self.proc_macro_id.is_none()
+ }
+
+ pub fn expand(
+ self,
+ db: &dyn AstDatabase,
+ calling_crate: CrateId,
+ tt: &tt::Subtree,
+ attr_arg: Option<&tt::Subtree>,
+ ) -> ExpandResult<tt::Subtree> {
+ match self.proc_macro_id {
+ Some(id) => {
+ let krate_graph = db.crate_graph();
+ let proc_macros = match &krate_graph[self.krate].proc_macro {
+ Ok(proc_macros) => proc_macros,
+ Err(_) => {
+ never!("Non-dummy expander even though there are no proc macros");
+ return ExpandResult::only_err(ExpandError::Other("Internal error".into()));
+ }
+ };
+ let proc_macro = match proc_macros.get(id.0 as usize) {
+ Some(proc_macro) => proc_macro,
+ None => {
+ never!(
+ "Proc macro index out of bounds: the length is {} but the index is {}",
+ proc_macros.len(),
+ id.0
+ );
+ return ExpandResult::only_err(ExpandError::Other("Internal error".into()));
+ }
+ };
+
+ // Proc macros have access to the environment variables of the invoking crate.
+ let env = &krate_graph[calling_crate].env;
+ match proc_macro.expander.expand(tt, attr_arg, env) {
+ Ok(t) => ExpandResult::ok(t),
+ Err(err) => match err {
+ // Don't discard the item in case something unexpected happened while expanding attributes
+ ProcMacroExpansionError::System(text)
+ if proc_macro.kind == ProcMacroKind::Attr =>
+ {
+ ExpandResult {
+ value: tt.clone(),
+ err: Some(ExpandError::Other(text.into())),
+ }
+ }
+ ProcMacroExpansionError::System(text)
+ | ProcMacroExpansionError::Panic(text) => {
+ ExpandResult::only_err(ExpandError::Other(text.into()))
+ }
+ },
+ }
+ }
+ None => ExpandResult::only_err(ExpandError::UnresolvedProcMacro(self.krate)),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs b/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs
new file mode 100644
index 000000000..82f410ecd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs
@@ -0,0 +1,284 @@
+//! A simplified version of quote-crate like quasi quote macro
+
+// A helper macro quote macro
+// FIXME:
+// 1. Not all puncts are handled
+// 2. #()* pattern repetition not supported now
+// * But we can do it manually, see `test_quote_derive_copy_hack`
+#[doc(hidden)]
+#[macro_export]
+macro_rules! __quote {
+ () => {
+ Vec::<tt::TokenTree>::new()
+ };
+
+ ( @SUBTREE $delim:ident $($tt:tt)* ) => {
+ {
+ let children = $crate::__quote!($($tt)*);
+ tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ kind: tt::DelimiterKind::$delim,
+ id: tt::TokenId::unspecified(),
+ }),
+ token_trees: $crate::quote::IntoTt::to_tokens(children),
+ }
+ }
+ };
+
+ ( @PUNCT $first:literal ) => {
+ {
+ vec![
+ tt::Leaf::Punct(tt::Punct {
+ char: $first,
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ }).into()
+ ]
+ }
+ };
+
+ ( @PUNCT $first:literal, $sec:literal ) => {
+ {
+ vec![
+ tt::Leaf::Punct(tt::Punct {
+ char: $first,
+ spacing: tt::Spacing::Joint,
+ id: tt::TokenId::unspecified(),
+ }).into(),
+ tt::Leaf::Punct(tt::Punct {
+ char: $sec,
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ }).into()
+ ]
+ }
+ };
+
+ // hash variable
+ ( # $first:ident $($tail:tt)* ) => {
+ {
+ let token = $crate::quote::ToTokenTree::to_token($first);
+ let mut tokens = vec![token.into()];
+ let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
+ tokens.append(&mut tail_tokens);
+ tokens
+ }
+ };
+
+ ( ## $first:ident $($tail:tt)* ) => {
+ {
+ let mut tokens = $first.into_iter().map($crate::quote::ToTokenTree::to_token).collect::<Vec<tt::TokenTree>>();
+ let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
+ tokens.append(&mut tail_tokens);
+ tokens
+ }
+ };
+
+ // Brace
+ ( { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE Brace $($tt)*) };
+ // Bracket
+ ( [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE Bracket $($tt)*) };
+ // Parenthesis
+ ( ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE Parenthesis $($tt)*) };
+
+ // Literal
+ ( $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt).into()] };
+ // Ident
+ ( $tt:ident ) => {
+ vec![ {
+ tt::Leaf::Ident(tt::Ident {
+ text: stringify!($tt).into(),
+ id: tt::TokenId::unspecified(),
+ }).into()
+ }]
+ };
+
+ // Puncts
+ // FIXME: Not all puncts are handled
+ ( -> ) => {$crate::__quote!(@PUNCT '-', '>')};
+ ( & ) => {$crate::__quote!(@PUNCT '&')};
+ ( , ) => {$crate::__quote!(@PUNCT ',')};
+ ( : ) => {$crate::__quote!(@PUNCT ':')};
+ ( ; ) => {$crate::__quote!(@PUNCT ';')};
+ ( :: ) => {$crate::__quote!(@PUNCT ':', ':')};
+ ( . ) => {$crate::__quote!(@PUNCT '.')};
+ ( < ) => {$crate::__quote!(@PUNCT '<')};
+ ( > ) => {$crate::__quote!(@PUNCT '>')};
+ ( ! ) => {$crate::__quote!(@PUNCT '!')};
+
+ ( $first:tt $($tail:tt)+ ) => {
+ {
+ let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($first));
+ let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
+
+ tokens.append(&mut tail_tokens);
+ tokens
+ }
+ };
+}
+
+/// FIXME:
+/// It probably should implement in proc-macro
+#[macro_export]
+macro_rules! quote {
+ ( $($tt:tt)* ) => {
+ $crate::quote::IntoTt::to_subtree($crate::__quote!($($tt)*))
+ }
+}
+
+pub(crate) trait IntoTt {
+ fn to_subtree(self) -> tt::Subtree;
+ fn to_tokens(self) -> Vec<tt::TokenTree>;
+}
+
+impl IntoTt for Vec<tt::TokenTree> {
+ fn to_subtree(self) -> tt::Subtree {
+ tt::Subtree { delimiter: None, token_trees: self }
+ }
+
+ fn to_tokens(self) -> Vec<tt::TokenTree> {
+ self
+ }
+}
+
+impl IntoTt for tt::Subtree {
+ fn to_subtree(self) -> tt::Subtree {
+ self
+ }
+
+ fn to_tokens(self) -> Vec<tt::TokenTree> {
+ vec![tt::TokenTree::Subtree(self)]
+ }
+}
+
+pub(crate) trait ToTokenTree {
+ fn to_token(self) -> tt::TokenTree;
+}
+
+impl ToTokenTree for tt::TokenTree {
+ fn to_token(self) -> tt::TokenTree {
+ self
+ }
+}
+
+impl ToTokenTree for tt::Subtree {
+ fn to_token(self) -> tt::TokenTree {
+ self.into()
+ }
+}
+
+macro_rules! impl_to_to_tokentrees {
+ ($($ty:ty => $this:ident $im:block);*) => {
+ $(
+ impl ToTokenTree for $ty {
+ fn to_token($this) -> tt::TokenTree {
+ let leaf: tt::Leaf = $im.into();
+ leaf.into()
+ }
+ }
+
+ impl ToTokenTree for &$ty {
+ fn to_token($this) -> tt::TokenTree {
+ let leaf: tt::Leaf = $im.clone().into();
+ leaf.into()
+ }
+ }
+ )*
+ }
+}
+
+impl_to_to_tokentrees! {
+ u32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} };
+ usize => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} };
+ i32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} };
+ bool => self { tt::Ident{text: self.to_string().into(), id: tt::TokenId::unspecified()} };
+ tt::Leaf => self { self };
+ tt::Literal => self { self };
+ tt::Ident => self { self };
+ tt::Punct => self { self };
+ &str => self { tt::Literal{text: format!("\"{}\"", self.escape_debug()).into(), id: tt::TokenId::unspecified()}};
+ String => self { tt::Literal{text: format!("\"{}\"", self.escape_debug()).into(), id: tt::TokenId::unspecified()}}
+}
+
+#[cfg(test)]
+mod tests {
+ #[test]
+ fn test_quote_delimiters() {
+ assert_eq!(quote!({}).to_string(), "{}");
+ assert_eq!(quote!(()).to_string(), "()");
+ assert_eq!(quote!([]).to_string(), "[]");
+ }
+
+ #[test]
+ fn test_quote_idents() {
+ assert_eq!(quote!(32).to_string(), "32");
+ assert_eq!(quote!(struct).to_string(), "struct");
+ }
+
+ #[test]
+ fn test_quote_hash_simple_literal() {
+ let a = 20;
+ assert_eq!(quote!(#a).to_string(), "20");
+ let s: String = "hello".into();
+ assert_eq!(quote!(#s).to_string(), "\"hello\"");
+ }
+
+ fn mk_ident(name: &str) -> tt::Ident {
+ tt::Ident { text: name.into(), id: tt::TokenId::unspecified() }
+ }
+
+ #[test]
+ fn test_quote_hash_token_tree() {
+ let a = mk_ident("hello");
+
+ let quoted = quote!(#a);
+ assert_eq!(quoted.to_string(), "hello");
+ let t = format!("{:?}", quoted);
+ assert_eq!(t, "SUBTREE $\n IDENT hello 4294967295");
+ }
+
+ #[test]
+ fn test_quote_simple_derive_copy() {
+ let name = mk_ident("Foo");
+
+ let quoted = quote! {
+ impl Clone for #name {
+ fn clone(&self) -> Self {
+ Self {}
+ }
+ }
+ };
+
+ assert_eq!(quoted.to_string(), "impl Clone for Foo {fn clone (& self) -> Self {Self {}}}");
+ }
+
+ #[test]
+ fn test_quote_derive_copy_hack() {
+ // Assume the given struct is:
+ // struct Foo {
+ // name: String,
+ // id: u32,
+ // }
+ let struct_name = mk_ident("Foo");
+ let fields = [mk_ident("name"), mk_ident("id")];
+ let fields = fields.iter().flat_map(|it| quote!(#it: self.#it.clone(), ).token_trees);
+
+ let list = tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ kind: tt::DelimiterKind::Brace,
+ id: tt::TokenId::unspecified(),
+ }),
+ token_trees: fields.collect(),
+ };
+
+ let quoted = quote! {
+ impl Clone for #struct_name {
+ fn clone(&self) -> Self {
+ Self #list
+ }
+ }
+ };
+
+ assert_eq!(quoted.to_string(), "impl Clone for Foo {fn clone (& self) -> Self {Self {name : self . name . clone () , id : self . id . clone () ,}}}");
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
new file mode 100644
index 000000000..5cd444c1a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
@@ -0,0 +1,44 @@
+[package]
+name = "hir-ty"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+itertools = "0.10.3"
+arrayvec = "0.7.2"
+smallvec = "1.9.0"
+ena = "0.14.0"
+tracing = "0.1.35"
+rustc-hash = "1.1.0"
+scoped-tls = "1.0.0"
+chalk-solve = { version = "0.83.0", default-features = false }
+chalk-ir = "0.83.0"
+chalk-recursive = { version = "0.83.0", default-features = false }
+la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+once_cell = "1.12.0"
+typed-arena = "2.0.1"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+hir-def = { path = "../hir-def", version = "0.0.0" }
+hir-expand = { path = "../hir-expand", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+limit = { path = "../limit", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
+expect-test = "1.4.0"
+tracing = "0.1.35"
+tracing-subscriber = { version = "0.3.14", default-features = false, features = [
+ "env-filter",
+ "registry",
+] }
+tracing-tree = "0.2.1"
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
new file mode 100644
index 000000000..b6f226dbf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
@@ -0,0 +1,145 @@
+//! In certain situations, rust automatically inserts derefs as necessary: for
+//! example, field accesses `foo.bar` still work when `foo` is actually a
+//! reference to a type with the field `bar`. This is an approximation of the
+//! logic in rustc (which lives in librustc_typeck/check/autoderef.rs).
+
+use std::sync::Arc;
+
+use chalk_ir::cast::Cast;
+use hir_expand::name::name;
+use limit::Limit;
+use syntax::SmolStr;
+
+use crate::{
+ db::HirDatabase, infer::unify::InferenceTable, Canonical, Goal, Interner, ProjectionTyExt,
+ TraitEnvironment, Ty, TyBuilder, TyKind,
+};
+
+static AUTODEREF_RECURSION_LIMIT: Limit = Limit::new(10);
+
+pub(crate) enum AutoderefKind {
+ Builtin,
+ Overloaded,
+}
+
+pub(crate) struct Autoderef<'a, 'db> {
+ pub(crate) table: &'a mut InferenceTable<'db>,
+ ty: Ty,
+ at_start: bool,
+ steps: Vec<(AutoderefKind, Ty)>,
+}
+
+impl<'a, 'db> Autoderef<'a, 'db> {
+ pub(crate) fn new(table: &'a mut InferenceTable<'db>, ty: Ty) -> Self {
+ let ty = table.resolve_ty_shallow(&ty);
+ Autoderef { table, ty, at_start: true, steps: Vec::new() }
+ }
+
+ pub(crate) fn step_count(&self) -> usize {
+ self.steps.len()
+ }
+
+ pub(crate) fn steps(&self) -> &[(AutoderefKind, Ty)] {
+ &self.steps
+ }
+
+ pub(crate) fn final_ty(&self) -> Ty {
+ self.ty.clone()
+ }
+}
+
+impl Iterator for Autoderef<'_, '_> {
+ type Item = (Ty, usize);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.at_start {
+ self.at_start = false;
+ return Some((self.ty.clone(), 0));
+ }
+
+ if AUTODEREF_RECURSION_LIMIT.check(self.steps.len() + 1).is_err() {
+ return None;
+ }
+
+ let (kind, new_ty) = autoderef_step(self.table, self.ty.clone())?;
+
+ self.steps.push((kind, self.ty.clone()));
+ self.ty = new_ty;
+
+ Some((self.ty.clone(), self.step_count()))
+ }
+}
+
+pub(crate) fn autoderef_step(
+ table: &mut InferenceTable<'_>,
+ ty: Ty,
+) -> Option<(AutoderefKind, Ty)> {
+ if let Some(derefed) = builtin_deref(&ty) {
+ Some((AutoderefKind::Builtin, table.resolve_ty_shallow(derefed)))
+ } else {
+ Some((AutoderefKind::Overloaded, deref_by_trait(table, ty)?))
+ }
+}
+
+// FIXME: replace uses of this with Autoderef above
+pub fn autoderef<'a>(
+ db: &'a dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ ty: Canonical<Ty>,
+) -> impl Iterator<Item = Canonical<Ty>> + 'a {
+ let mut table = InferenceTable::new(db, env);
+ let ty = table.instantiate_canonical(ty);
+ let mut autoderef = Autoderef::new(&mut table, ty);
+ let mut v = Vec::new();
+ while let Some((ty, _steps)) = autoderef.next() {
+ v.push(autoderef.table.canonicalize(ty).value);
+ }
+ v.into_iter()
+}
+
+pub(crate) fn deref(table: &mut InferenceTable<'_>, ty: Ty) -> Option<Ty> {
+ let _p = profile::span("deref");
+ autoderef_step(table, ty).map(|(_, ty)| ty)
+}
+
+fn builtin_deref(ty: &Ty) -> Option<&Ty> {
+ match ty.kind(Interner) {
+ TyKind::Ref(.., ty) => Some(ty),
+ TyKind::Raw(.., ty) => Some(ty),
+ _ => None,
+ }
+}
+
+fn deref_by_trait(table: &mut InferenceTable<'_>, ty: Ty) -> Option<Ty> {
+ let _p = profile::span("deref_by_trait");
+ if table.resolve_ty_shallow(&ty).inference_var(Interner).is_some() {
+ // don't try to deref unknown variables
+ return None;
+ }
+
+ let db = table.db;
+ let deref_trait = db
+ .lang_item(table.trait_env.krate, SmolStr::new_inline("deref"))
+ .and_then(|l| l.as_trait())?;
+ let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?;
+
+ let projection = {
+ let b = TyBuilder::assoc_type_projection(db, target);
+ if b.remaining() != 1 {
+ // the Target type + Deref trait should only have one generic parameter,
+ // namely Deref's Self type
+ return None;
+ }
+ b.push(ty).build()
+ };
+
+ // Check that the type implements Deref at all
+ let trait_ref = projection.trait_ref(db);
+ let implements_goal: Goal = trait_ref.cast(Interner);
+ table.try_obligation(implements_goal.clone())?;
+
+ table.register_obligation(implements_goal);
+
+ let result = table.normalize_projection_ty(projection);
+ Some(table.resolve_ty_shallow(&result))
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs
new file mode 100644
index 000000000..94d7806cb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs
@@ -0,0 +1,311 @@
+//! `TyBuilder`, a helper for building instances of `Ty` and related types.
+
+use std::iter;
+
+use chalk_ir::{
+ cast::{Cast, CastTo, Caster},
+ fold::TypeFoldable,
+ interner::HasInterner,
+ AdtId, BoundVar, DebruijnIndex, Scalar,
+};
+use hir_def::{
+ builtin_type::BuiltinType, generics::TypeOrConstParamData, ConstParamId, GenericDefId, TraitId,
+ TypeAliasId,
+};
+use smallvec::SmallVec;
+
+use crate::{
+ consteval::unknown_const_as_generic, db::HirDatabase, infer::unify::InferenceTable, primitive,
+ to_assoc_type_id, to_chalk_trait_id, utils::generics, Binders, CallableSig, ConstData,
+ ConstValue, GenericArg, GenericArgData, Interner, ProjectionTy, Substitution, TraitRef, Ty,
+ TyDefId, TyExt, TyKind, ValueTyDefId,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ParamKind {
+ Type,
+ Const(Ty),
+}
+
+/// This is a builder for `Ty` or anything that needs a `Substitution`.
+pub struct TyBuilder<D> {
+ /// The `data` field is used to keep track of what we're building (e.g. an
+ /// ADT, a `TraitRef`, ...).
+ data: D,
+ vec: SmallVec<[GenericArg; 2]>,
+ param_kinds: SmallVec<[ParamKind; 2]>,
+}
+
+impl<A> TyBuilder<A> {
+ fn with_data<B>(self, data: B) -> TyBuilder<B> {
+ TyBuilder { data, param_kinds: self.param_kinds, vec: self.vec }
+ }
+}
+
+impl<D> TyBuilder<D> {
+ fn new(data: D, param_kinds: SmallVec<[ParamKind; 2]>) -> TyBuilder<D> {
+ TyBuilder { data, vec: SmallVec::with_capacity(param_kinds.len()), param_kinds }
+ }
+
+ fn build_internal(self) -> (D, Substitution) {
+ assert_eq!(self.vec.len(), self.param_kinds.len());
+ for (a, e) in self.vec.iter().zip(self.param_kinds.iter()) {
+ self.assert_match_kind(a, e);
+ }
+ let subst = Substitution::from_iter(Interner, self.vec);
+ (self.data, subst)
+ }
+
+ pub fn push(mut self, arg: impl CastTo<GenericArg>) -> Self {
+ let arg = arg.cast(Interner);
+ let expected_kind = &self.param_kinds[self.vec.len()];
+ let arg_kind = match arg.data(Interner) {
+ chalk_ir::GenericArgData::Ty(_) => ParamKind::Type,
+ chalk_ir::GenericArgData::Lifetime(_) => panic!("Got lifetime in TyBuilder::push"),
+ chalk_ir::GenericArgData::Const(c) => {
+ let c = c.data(Interner);
+ ParamKind::Const(c.ty.clone())
+ }
+ };
+ assert_eq!(*expected_kind, arg_kind);
+ self.vec.push(arg);
+ self
+ }
+
+ pub fn remaining(&self) -> usize {
+ self.param_kinds.len() - self.vec.len()
+ }
+
+ pub fn fill_with_bound_vars(self, debruijn: DebruijnIndex, starting_from: usize) -> Self {
+ // self.fill is inlined to make borrow checker happy
+ let mut this = self;
+ let other = this.param_kinds.iter().skip(this.vec.len());
+ let filler = (starting_from..).zip(other).map(|(idx, kind)| match kind {
+ ParamKind::Type => {
+ GenericArgData::Ty(TyKind::BoundVar(BoundVar::new(debruijn, idx)).intern(Interner))
+ .intern(Interner)
+ }
+ ParamKind::Const(ty) => GenericArgData::Const(
+ ConstData {
+ value: ConstValue::BoundVar(BoundVar::new(debruijn, idx)),
+ ty: ty.clone(),
+ }
+ .intern(Interner),
+ )
+ .intern(Interner),
+ });
+ this.vec.extend(filler.take(this.remaining()).casted(Interner));
+ assert_eq!(this.remaining(), 0);
+ this
+ }
+
+ pub fn fill_with_unknown(self) -> Self {
+ // self.fill is inlined to make borrow checker happy
+ let mut this = self;
+ let filler = this.param_kinds.iter().skip(this.vec.len()).map(|x| match x {
+ ParamKind::Type => GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner),
+ ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
+ });
+ this.vec.extend(filler.casted(Interner));
+ assert_eq!(this.remaining(), 0);
+ this
+ }
+
+ pub(crate) fn fill_with_inference_vars(self, table: &mut InferenceTable<'_>) -> Self {
+ self.fill(|x| match x {
+ ParamKind::Type => GenericArgData::Ty(table.new_type_var()).intern(Interner),
+ ParamKind::Const(ty) => {
+ GenericArgData::Const(table.new_const_var(ty.clone())).intern(Interner)
+ }
+ })
+ }
+
+ pub fn fill(mut self, filler: impl FnMut(&ParamKind) -> GenericArg) -> Self {
+ self.vec.extend(self.param_kinds.iter().skip(self.vec.len()).map(filler));
+ assert_eq!(self.remaining(), 0);
+ self
+ }
+
+ pub fn use_parent_substs(mut self, parent_substs: &Substitution) -> Self {
+ assert!(self.vec.is_empty());
+ assert!(parent_substs.len(Interner) <= self.param_kinds.len());
+ self.extend(parent_substs.iter(Interner).cloned());
+ self
+ }
+
+ fn extend(&mut self, it: impl Iterator<Item = GenericArg> + Clone) {
+ for x in it.clone().zip(self.param_kinds.iter().skip(self.vec.len())) {
+ self.assert_match_kind(&x.0, &x.1);
+ }
+ self.vec.extend(it);
+ }
+
+ fn assert_match_kind(&self, a: &chalk_ir::GenericArg<Interner>, e: &ParamKind) {
+ match (a.data(Interner), e) {
+ (chalk_ir::GenericArgData::Ty(_), ParamKind::Type)
+ | (chalk_ir::GenericArgData::Const(_), ParamKind::Const(_)) => (),
+ _ => panic!("Mismatched kinds: {:?}, {:?}, {:?}", a, self.vec, self.param_kinds),
+ }
+ }
+}
+
+impl TyBuilder<()> {
+ pub fn unit() -> Ty {
+ TyKind::Tuple(0, Substitution::empty(Interner)).intern(Interner)
+ }
+
+ pub fn usize() -> Ty {
+ TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize)).intern(Interner)
+ }
+
+ pub fn fn_ptr(sig: CallableSig) -> Ty {
+ TyKind::Function(sig.to_fn_ptr()).intern(Interner)
+ }
+
+ pub fn builtin(builtin: BuiltinType) -> Ty {
+ match builtin {
+ BuiltinType::Char => TyKind::Scalar(Scalar::Char).intern(Interner),
+ BuiltinType::Bool => TyKind::Scalar(Scalar::Bool).intern(Interner),
+ BuiltinType::Str => TyKind::Str.intern(Interner),
+ BuiltinType::Int(t) => {
+ TyKind::Scalar(Scalar::Int(primitive::int_ty_from_builtin(t))).intern(Interner)
+ }
+ BuiltinType::Uint(t) => {
+ TyKind::Scalar(Scalar::Uint(primitive::uint_ty_from_builtin(t))).intern(Interner)
+ }
+ BuiltinType::Float(t) => {
+ TyKind::Scalar(Scalar::Float(primitive::float_ty_from_builtin(t))).intern(Interner)
+ }
+ }
+ }
+
+ pub fn slice(argument: Ty) -> Ty {
+ TyKind::Slice(argument).intern(Interner)
+ }
+
+ pub fn placeholder_subst(db: &dyn HirDatabase, def: impl Into<GenericDefId>) -> Substitution {
+ let params = generics(db.upcast(), def.into());
+ params.placeholder_subst(db)
+ }
+
+ pub fn subst_for_def(db: &dyn HirDatabase, def: impl Into<GenericDefId>) -> TyBuilder<()> {
+ let def = def.into();
+ let params = generics(db.upcast(), def);
+ TyBuilder::new(
+ (),
+ params
+ .iter()
+ .map(|(id, data)| match data {
+ TypeOrConstParamData::TypeParamData(_) => ParamKind::Type,
+ TypeOrConstParamData::ConstParamData(_) => {
+ ParamKind::Const(db.const_param_ty(ConstParamId::from_unchecked(id)))
+ }
+ })
+ .collect(),
+ )
+ }
+
+ pub fn build(self) -> Substitution {
+ let ((), subst) = self.build_internal();
+ subst
+ }
+}
+
+impl TyBuilder<hir_def::AdtId> {
+ pub fn adt(db: &dyn HirDatabase, def: hir_def::AdtId) -> TyBuilder<hir_def::AdtId> {
+ TyBuilder::subst_for_def(db, def).with_data(def)
+ }
+
+ pub fn fill_with_defaults(
+ mut self,
+ db: &dyn HirDatabase,
+ mut fallback: impl FnMut() -> Ty,
+ ) -> Self {
+ let defaults = db.generic_defaults(self.data.into());
+ for default_ty in defaults.iter().skip(self.vec.len()) {
+ if let GenericArgData::Ty(x) = default_ty.skip_binders().data(Interner) {
+ if x.is_unknown() {
+ self.vec.push(fallback().cast(Interner));
+ continue;
+ }
+ };
+ // each default can depend on the previous parameters
+ let subst_so_far = Substitution::from_iter(Interner, self.vec.clone());
+ self.vec.push(default_ty.clone().substitute(Interner, &subst_so_far).cast(Interner));
+ }
+ self
+ }
+
+ pub fn build(self) -> Ty {
+ let (adt, subst) = self.build_internal();
+ TyKind::Adt(AdtId(adt), subst).intern(Interner)
+ }
+}
+
+pub struct Tuple(usize);
+impl TyBuilder<Tuple> {
+ pub fn tuple(size: usize) -> TyBuilder<Tuple> {
+ TyBuilder::new(Tuple(size), iter::repeat(ParamKind::Type).take(size).collect())
+ }
+
+ pub fn build(self) -> Ty {
+ let (Tuple(size), subst) = self.build_internal();
+ TyKind::Tuple(size, subst).intern(Interner)
+ }
+}
+
+impl TyBuilder<TraitId> {
+ pub fn trait_ref(db: &dyn HirDatabase, def: TraitId) -> TyBuilder<TraitId> {
+ TyBuilder::subst_for_def(db, def).with_data(def)
+ }
+
+ pub fn build(self) -> TraitRef {
+ let (trait_id, substitution) = self.build_internal();
+ TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution }
+ }
+}
+
+impl TyBuilder<TypeAliasId> {
+ pub fn assoc_type_projection(db: &dyn HirDatabase, def: TypeAliasId) -> TyBuilder<TypeAliasId> {
+ TyBuilder::subst_for_def(db, def).with_data(def)
+ }
+
+ pub fn build(self) -> ProjectionTy {
+ let (type_alias, substitution) = self.build_internal();
+ ProjectionTy { associated_ty_id: to_assoc_type_id(type_alias), substitution }
+ }
+}
+
+impl<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>> TyBuilder<Binders<T>> {
+ fn subst_binders(b: Binders<T>) -> Self {
+ let param_kinds = b
+ .binders
+ .iter(Interner)
+ .map(|x| match x {
+ chalk_ir::VariableKind::Ty(_) => ParamKind::Type,
+ chalk_ir::VariableKind::Lifetime => panic!("Got lifetime parameter"),
+ chalk_ir::VariableKind::Const(ty) => ParamKind::Const(ty.clone()),
+ })
+ .collect();
+ TyBuilder::new(b, param_kinds)
+ }
+
+ pub fn build(self) -> T {
+ let (b, subst) = self.build_internal();
+ b.substitute(Interner, &subst)
+ }
+}
+
+impl TyBuilder<Binders<Ty>> {
+ pub fn def_ty(db: &dyn HirDatabase, def: TyDefId) -> TyBuilder<Binders<Ty>> {
+ TyBuilder::subst_binders(db.ty(def))
+ }
+
+ pub fn impl_self_ty(db: &dyn HirDatabase, def: hir_def::ImplId) -> TyBuilder<Binders<Ty>> {
+ TyBuilder::subst_binders(db.impl_self_ty(def))
+ }
+
+ pub fn value_ty(db: &dyn HirDatabase, def: ValueTyDefId) -> TyBuilder<Binders<Ty>> {
+ TyBuilder::subst_binders(db.value_ty(def))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
new file mode 100644
index 000000000..faec99c7d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
@@ -0,0 +1,799 @@
+//! The implementation of `RustIrDatabase` for Chalk, which provides information
+//! about the code that Chalk needs.
+use std::sync::Arc;
+
+use cov_mark::hit;
+use syntax::SmolStr;
+use tracing::debug;
+
+use chalk_ir::{cast::Cast, fold::shift::Shift, CanonicalVarKinds};
+use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait};
+
+use base_db::CrateId;
+use hir_def::{
+ lang_item::{lang_attr, LangItemTarget},
+ AssocItemId, GenericDefId, HasModule, ItemContainerId, Lookup, ModuleId, TypeAliasId,
+};
+use hir_expand::name::name;
+
+use crate::{
+ db::HirDatabase,
+ display::HirDisplay,
+ from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, make_binders,
+ make_single_type_binders,
+ mapping::{from_chalk, ToChalk, TypeAliasAsValue},
+ method_resolution::{TraitImpls, TyFingerprint, ALL_FLOAT_FPS, ALL_INT_FPS},
+ to_assoc_type_id, to_chalk_trait_id,
+ traits::ChalkContext,
+ utils::generics,
+ AliasEq, AliasTy, BoundVar, CallableDefId, DebruijnIndex, FnDefId, Interner, ProjectionTy,
+ ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder,
+ TyExt, TyKind, WhereClause,
+};
+
+pub(crate) type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum<Interner>;
+pub(crate) type TraitDatum = chalk_solve::rust_ir::TraitDatum<Interner>;
+pub(crate) type StructDatum = chalk_solve::rust_ir::AdtDatum<Interner>;
+pub(crate) type ImplDatum = chalk_solve::rust_ir::ImplDatum<Interner>;
+pub(crate) type OpaqueTyDatum = chalk_solve::rust_ir::OpaqueTyDatum<Interner>;
+
+pub(crate) type AssocTypeId = chalk_ir::AssocTypeId<Interner>;
+pub(crate) type TraitId = chalk_ir::TraitId<Interner>;
+pub(crate) type AdtId = chalk_ir::AdtId<Interner>;
+pub(crate) type ImplId = chalk_ir::ImplId<Interner>;
+pub(crate) type AssociatedTyValueId = chalk_solve::rust_ir::AssociatedTyValueId<Interner>;
+pub(crate) type AssociatedTyValue = chalk_solve::rust_ir::AssociatedTyValue<Interner>;
+pub(crate) type FnDefDatum = chalk_solve::rust_ir::FnDefDatum<Interner>;
+pub(crate) type Variances = chalk_ir::Variances<Interner>;
+
+impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
+ fn associated_ty_data(&self, id: AssocTypeId) -> Arc<AssociatedTyDatum> {
+ self.db.associated_ty_data(id)
+ }
+ fn trait_datum(&self, trait_id: TraitId) -> Arc<TraitDatum> {
+ self.db.trait_datum(self.krate, trait_id)
+ }
+ fn adt_datum(&self, struct_id: AdtId) -> Arc<StructDatum> {
+ self.db.struct_datum(self.krate, struct_id)
+ }
+ fn adt_repr(&self, _struct_id: AdtId) -> Arc<rust_ir::AdtRepr<Interner>> {
+ // FIXME: keep track of these
+ Arc::new(rust_ir::AdtRepr { c: false, packed: false, int: None })
+ }
+ fn discriminant_type(&self, _ty: chalk_ir::Ty<Interner>) -> chalk_ir::Ty<Interner> {
+ // FIXME: keep track of this
+ chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U32)).intern(Interner)
+ }
+ fn impl_datum(&self, impl_id: ImplId) -> Arc<ImplDatum> {
+ self.db.impl_datum(self.krate, impl_id)
+ }
+
+ fn fn_def_datum(
+ &self,
+ fn_def_id: chalk_ir::FnDefId<Interner>,
+ ) -> Arc<rust_ir::FnDefDatum<Interner>> {
+ self.db.fn_def_datum(self.krate, fn_def_id)
+ }
+
+ fn impls_for_trait(
+ &self,
+ trait_id: TraitId,
+ parameters: &[chalk_ir::GenericArg<Interner>],
+ binders: &CanonicalVarKinds<Interner>,
+ ) -> Vec<ImplId> {
+ debug!("impls_for_trait {:?}", trait_id);
+ let trait_: hir_def::TraitId = from_chalk_trait_id(trait_id);
+
+ let ty: Ty = parameters[0].assert_ty_ref(Interner).clone();
+
+ fn binder_kind(
+ ty: &Ty,
+ binders: &CanonicalVarKinds<Interner>,
+ ) -> Option<chalk_ir::TyVariableKind> {
+ if let TyKind::BoundVar(bv) = ty.kind(Interner) {
+ let binders = binders.as_slice(Interner);
+ if bv.debruijn == DebruijnIndex::INNERMOST {
+ if let chalk_ir::VariableKind::Ty(tk) = binders[bv.index].kind {
+ return Some(tk);
+ }
+ }
+ }
+ None
+ }
+
+ let self_ty_fp = TyFingerprint::for_trait_impl(&ty);
+ let fps: &[TyFingerprint] = match binder_kind(&ty, binders) {
+ Some(chalk_ir::TyVariableKind::Integer) => &ALL_INT_FPS,
+ Some(chalk_ir::TyVariableKind::Float) => &ALL_FLOAT_FPS,
+ _ => self_ty_fp.as_ref().map(std::slice::from_ref).unwrap_or(&[]),
+ };
+
+ fn local_impls(db: &dyn HirDatabase, module: ModuleId) -> Option<Arc<TraitImpls>> {
+ let block = module.containing_block()?;
+ hit!(block_local_impls);
+ db.trait_impls_in_block(block)
+ }
+
+ // Note: Since we're using impls_for_trait, only impls where the trait
+ // can be resolved should ever reach Chalk. impl_datum relies on that
+ // and will panic if the trait can't be resolved.
+ let in_deps = self.db.trait_impls_in_deps(self.krate);
+ let in_self = self.db.trait_impls_in_crate(self.krate);
+ let trait_module = trait_.module(self.db.upcast());
+ let type_module = match self_ty_fp {
+ Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(self.db.upcast())),
+ Some(TyFingerprint::ForeignType(type_id)) => {
+ Some(from_foreign_def_id(type_id).module(self.db.upcast()))
+ }
+ Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(self.db.upcast())),
+ _ => None,
+ };
+ let impl_maps = [
+ Some(in_deps),
+ Some(in_self),
+ local_impls(self.db, trait_module),
+ type_module.and_then(|m| local_impls(self.db, m)),
+ ];
+
+ let id_to_chalk = |id: hir_def::ImplId| id.to_chalk(self.db);
+
+ let result: Vec<_> = if fps.is_empty() {
+ debug!("Unrestricted search for {:?} impls...", trait_);
+ impl_maps
+ .iter()
+ .filter_map(|o| o.as_ref())
+ .flat_map(|impls| impls.for_trait(trait_).map(id_to_chalk))
+ .collect()
+ } else {
+ impl_maps
+ .iter()
+ .filter_map(|o| o.as_ref())
+ .flat_map(|impls| {
+ fps.iter().flat_map(move |fp| {
+ impls.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk)
+ })
+ })
+ .collect()
+ };
+
+ debug!("impls_for_trait returned {} impls", result.len());
+ result
+ }
+ fn impl_provided_for(&self, auto_trait_id: TraitId, kind: &chalk_ir::TyKind<Interner>) -> bool {
+ debug!("impl_provided_for {:?}, {:?}", auto_trait_id, kind);
+ false // FIXME
+ }
+ fn associated_ty_value(&self, id: AssociatedTyValueId) -> Arc<AssociatedTyValue> {
+ self.db.associated_ty_value(self.krate, id)
+ }
+
+ fn custom_clauses(&self) -> Vec<chalk_ir::ProgramClause<Interner>> {
+ vec![]
+ }
+ fn local_impls_to_coherence_check(&self, _trait_id: TraitId) -> Vec<ImplId> {
+ // We don't do coherence checking (yet)
+ unimplemented!()
+ }
+ fn interner(&self) -> Interner {
+ Interner
+ }
+ fn well_known_trait_id(
+ &self,
+ well_known_trait: rust_ir::WellKnownTrait,
+ ) -> Option<chalk_ir::TraitId<Interner>> {
+ let lang_attr = lang_attr_from_well_known_trait(well_known_trait);
+ let trait_ = match self.db.lang_item(self.krate, lang_attr.into()) {
+ Some(LangItemTarget::TraitId(trait_)) => trait_,
+ _ => return None,
+ };
+ Some(to_chalk_trait_id(trait_))
+ }
+
+ fn program_clauses_for_env(
+ &self,
+ environment: &chalk_ir::Environment<Interner>,
+ ) -> chalk_ir::ProgramClauses<Interner> {
+ self.db.program_clauses_for_chalk_env(self.krate, environment.clone())
+ }
+
+ fn opaque_ty_data(&self, id: chalk_ir::OpaqueTyId<Interner>) -> Arc<OpaqueTyDatum> {
+ let full_id = self.db.lookup_intern_impl_trait_id(id.into());
+ let bound = match full_id {
+ crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
+ let datas = self
+ .db
+ .return_type_impl_traits(func)
+ .expect("impl trait id without impl traits");
+ let (datas, binders) = (*datas).as_ref().into_value_and_skipped_binders();
+ let data = &datas.impl_traits[idx as usize];
+ let bound = OpaqueTyDatumBound {
+ bounds: make_single_type_binders(data.bounds.skip_binders().to_vec()),
+ where_clauses: chalk_ir::Binders::empty(Interner, vec![]),
+ };
+ chalk_ir::Binders::new(binders, bound)
+ }
+ crate::ImplTraitId::AsyncBlockTypeImplTrait(..) => {
+ if let Some((future_trait, future_output)) = self
+ .db
+ .lang_item(self.krate, SmolStr::new_inline("future_trait"))
+ .and_then(|item| item.as_trait())
+ .and_then(|trait_| {
+ let alias =
+ self.db.trait_data(trait_).associated_type_by_name(&name![Output])?;
+ Some((trait_, alias))
+ })
+ {
+ // Making up Symbol’s value as variable is void: AsyncBlock<T>:
+ //
+ // |--------------------OpaqueTyDatum-------------------|
+ // |-------------OpaqueTyDatumBound--------------|
+ // for<T> <Self> [Future<Self>, Future::Output<Self> = T]
+ // ^1 ^0 ^0 ^0 ^1
+ let impl_bound = WhereClause::Implemented(TraitRef {
+ trait_id: to_chalk_trait_id(future_trait),
+ // Self type as the first parameter.
+ substitution: Substitution::from1(
+ Interner,
+ TyKind::BoundVar(BoundVar {
+ debruijn: DebruijnIndex::INNERMOST,
+ index: 0,
+ })
+ .intern(Interner),
+ ),
+ });
+ let mut binder = vec![];
+ binder.push(crate::wrap_empty_binders(impl_bound));
+ let sized_trait = self
+ .db
+ .lang_item(self.krate, SmolStr::new_inline("sized"))
+ .and_then(|item| item.as_trait());
+ if let Some(sized_trait_) = sized_trait {
+ let sized_bound = WhereClause::Implemented(TraitRef {
+ trait_id: to_chalk_trait_id(sized_trait_),
+ // Self type as the first parameter.
+ substitution: Substitution::from1(
+ Interner,
+ TyKind::BoundVar(BoundVar {
+ debruijn: DebruijnIndex::INNERMOST,
+ index: 0,
+ })
+ .intern(Interner),
+ ),
+ });
+ binder.push(crate::wrap_empty_binders(sized_bound));
+ }
+ let proj_bound = WhereClause::AliasEq(AliasEq {
+ alias: AliasTy::Projection(ProjectionTy {
+ associated_ty_id: to_assoc_type_id(future_output),
+ // Self type as the first parameter.
+ substitution: Substitution::from1(
+ Interner,
+ TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0))
+ .intern(Interner),
+ ),
+ }),
+ // The parameter of the opaque type.
+ ty: TyKind::BoundVar(BoundVar { debruijn: DebruijnIndex::ONE, index: 0 })
+ .intern(Interner),
+ });
+ binder.push(crate::wrap_empty_binders(proj_bound));
+ let bound = OpaqueTyDatumBound {
+ bounds: make_single_type_binders(binder),
+ where_clauses: chalk_ir::Binders::empty(Interner, vec![]),
+ };
+ // The opaque type has 1 parameter.
+ make_single_type_binders(bound)
+ } else {
+ // If failed to find Symbol’s value as variable is void: Future::Output, return empty bounds as fallback.
+ let bound = OpaqueTyDatumBound {
+ bounds: chalk_ir::Binders::empty(Interner, vec![]),
+ where_clauses: chalk_ir::Binders::empty(Interner, vec![]),
+ };
+ // The opaque type has 1 parameter.
+ make_single_type_binders(bound)
+ }
+ }
+ };
+
+ Arc::new(OpaqueTyDatum { opaque_ty_id: id, bound })
+ }
+
+ fn hidden_opaque_type(&self, _id: chalk_ir::OpaqueTyId<Interner>) -> chalk_ir::Ty<Interner> {
+ // FIXME: actually provide the hidden type; it is relevant for auto traits
+ TyKind::Error.intern(Interner)
+ }
+
+ fn is_object_safe(&self, _trait_id: chalk_ir::TraitId<Interner>) -> bool {
+ // FIXME: implement actual object safety
+ true
+ }
+
+ fn closure_kind(
+ &self,
+ _closure_id: chalk_ir::ClosureId<Interner>,
+ _substs: &chalk_ir::Substitution<Interner>,
+ ) -> rust_ir::ClosureKind {
+ // Fn is the closure kind that implements all three traits
+ rust_ir::ClosureKind::Fn
+ }
+ fn closure_inputs_and_output(
+ &self,
+ _closure_id: chalk_ir::ClosureId<Interner>,
+ substs: &chalk_ir::Substitution<Interner>,
+ ) -> chalk_ir::Binders<rust_ir::FnDefInputsAndOutputDatum<Interner>> {
+ let sig_ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
+ let sig = &sig_ty.callable_sig(self.db).expect("first closure param should be fn ptr");
+ let io = rust_ir::FnDefInputsAndOutputDatum {
+ argument_types: sig.params().to_vec(),
+ return_type: sig.ret().clone(),
+ };
+ chalk_ir::Binders::empty(Interner, io.shifted_in(Interner))
+ }
+ fn closure_upvars(
+ &self,
+ _closure_id: chalk_ir::ClosureId<Interner>,
+ _substs: &chalk_ir::Substitution<Interner>,
+ ) -> chalk_ir::Binders<chalk_ir::Ty<Interner>> {
+ let ty = TyBuilder::unit();
+ chalk_ir::Binders::empty(Interner, ty)
+ }
+ fn closure_fn_substitution(
+ &self,
+ _closure_id: chalk_ir::ClosureId<Interner>,
+ _substs: &chalk_ir::Substitution<Interner>,
+ ) -> chalk_ir::Substitution<Interner> {
+ Substitution::empty(Interner)
+ }
+
+ fn trait_name(&self, trait_id: chalk_ir::TraitId<Interner>) -> String {
+ let id = from_chalk_trait_id(trait_id);
+ self.db.trait_data(id).name.to_string()
+ }
+ fn adt_name(&self, chalk_ir::AdtId(adt_id): AdtId) -> String {
+ match adt_id {
+ hir_def::AdtId::StructId(id) => self.db.struct_data(id).name.to_string(),
+ hir_def::AdtId::EnumId(id) => self.db.enum_data(id).name.to_string(),
+ hir_def::AdtId::UnionId(id) => self.db.union_data(id).name.to_string(),
+ }
+ }
+ fn adt_size_align(&self, _id: chalk_ir::AdtId<Interner>) -> Arc<rust_ir::AdtSizeAlign> {
+ // FIXME
+ Arc::new(rust_ir::AdtSizeAlign::from_one_zst(false))
+ }
+ fn assoc_type_name(&self, assoc_ty_id: chalk_ir::AssocTypeId<Interner>) -> String {
+ let id = self.db.associated_ty_data(assoc_ty_id).name;
+ self.db.type_alias_data(id).name.to_string()
+ }
+ fn opaque_type_name(&self, opaque_ty_id: chalk_ir::OpaqueTyId<Interner>) -> String {
+ format!("Opaque_{}", opaque_ty_id.0)
+ }
+ fn fn_def_name(&self, fn_def_id: chalk_ir::FnDefId<Interner>) -> String {
+ format!("fn_{}", fn_def_id.0)
+ }
+ fn generator_datum(
+ &self,
+ _: chalk_ir::GeneratorId<Interner>,
+ ) -> std::sync::Arc<chalk_solve::rust_ir::GeneratorDatum<Interner>> {
+ // FIXME
+ unimplemented!()
+ }
+ fn generator_witness_datum(
+ &self,
+ _: chalk_ir::GeneratorId<Interner>,
+ ) -> std::sync::Arc<chalk_solve::rust_ir::GeneratorWitnessDatum<Interner>> {
+ // FIXME
+ unimplemented!()
+ }
+
+ fn unification_database(&self) -> &dyn chalk_ir::UnificationDatabase<Interner> {
+ &self.db
+ }
+}
+
+impl<'a> chalk_ir::UnificationDatabase<Interner> for &'a dyn HirDatabase {
+ fn fn_def_variance(
+ &self,
+ fn_def_id: chalk_ir::FnDefId<Interner>,
+ ) -> chalk_ir::Variances<Interner> {
+ HirDatabase::fn_def_variance(*self, fn_def_id)
+ }
+
+ fn adt_variance(&self, adt_id: chalk_ir::AdtId<Interner>) -> chalk_ir::Variances<Interner> {
+ HirDatabase::adt_variance(*self, adt_id)
+ }
+}
+
+pub(crate) fn program_clauses_for_chalk_env_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ environment: chalk_ir::Environment<Interner>,
+) -> chalk_ir::ProgramClauses<Interner> {
+ chalk_solve::program_clauses_for_env(&ChalkContext { db, krate }, &environment)
+}
+
+pub(crate) fn associated_ty_data_query(
+ db: &dyn HirDatabase,
+ id: AssocTypeId,
+) -> Arc<AssociatedTyDatum> {
+ debug!("associated_ty_data {:?}", id);
+ let type_alias: TypeAliasId = from_assoc_type_id(id);
+ let trait_ = match type_alias.lookup(db.upcast()).container {
+ ItemContainerId::TraitId(t) => t,
+ _ => panic!("associated type not in trait"),
+ };
+
+ // Lower bounds -- we could/should maybe move this to a separate query in `lower`
+ let type_alias_data = db.type_alias_data(type_alias);
+ let generic_params = generics(db.upcast(), type_alias.into());
+ // let bound_vars = generic_params.bound_vars_subst(DebruijnIndex::INNERMOST);
+ let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast());
+ let ctx = crate::TyLoweringContext::new(db, &resolver)
+ .with_type_param_mode(crate::lower::ParamLoweringMode::Variable);
+ let pro_ty = TyBuilder::assoc_type_projection(db, type_alias)
+ .fill_with_bound_vars(crate::DebruijnIndex::INNERMOST, 0)
+ .build();
+ let self_ty = TyKind::Alias(AliasTy::Projection(pro_ty)).intern(Interner);
+ let mut bounds: Vec<_> = type_alias_data
+ .bounds
+ .iter()
+ .flat_map(|bound| ctx.lower_type_bound(bound, self_ty.clone(), false))
+ .filter_map(|pred| generic_predicate_to_inline_bound(db, &pred, &self_ty))
+ .collect();
+
+ if !ctx.unsized_types.borrow().contains(&self_ty) {
+ let sized_trait = db
+ .lang_item(resolver.krate(), SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
+ let sized_bound = sized_trait.into_iter().map(|sized_trait| {
+ let trait_bound =
+ rust_ir::TraitBound { trait_id: sized_trait, args_no_self: Default::default() };
+ let inline_bound = rust_ir::InlineBound::TraitBound(trait_bound);
+ chalk_ir::Binders::empty(Interner, inline_bound)
+ });
+ bounds.extend(sized_bound);
+ bounds.shrink_to_fit();
+ }
+
+ // FIXME: Re-enable where clauses on associated types when an upstream chalk bug is fixed.
+ // (rust-analyzer#9052)
+ // let where_clauses = convert_where_clauses(db, type_alias.into(), &bound_vars);
+ let bound_data = rust_ir::AssociatedTyDatumBound { bounds, where_clauses: vec![] };
+ let datum = AssociatedTyDatum {
+ trait_id: to_chalk_trait_id(trait_),
+ id,
+ name: type_alias,
+ binders: make_binders(db, &generic_params, bound_data),
+ };
+ Arc::new(datum)
+}
+
+pub(crate) fn trait_datum_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ trait_id: TraitId,
+) -> Arc<TraitDatum> {
+ debug!("trait_datum {:?}", trait_id);
+ let trait_ = from_chalk_trait_id(trait_id);
+ let trait_data = db.trait_data(trait_);
+ debug!("trait {:?} = {:?}", trait_id, trait_data.name);
+ let generic_params = generics(db.upcast(), trait_.into());
+ let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let flags = rust_ir::TraitFlags {
+ auto: trait_data.is_auto,
+ upstream: trait_.lookup(db.upcast()).container.krate() != krate,
+ non_enumerable: true,
+ coinductive: false, // only relevant for Chalk testing
+ // FIXME: set these flags correctly
+ marker: false,
+ fundamental: false,
+ };
+ let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
+ let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect();
+ let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses };
+ let well_known =
+ lang_attr(db.upcast(), trait_).and_then(|name| well_known_trait_from_lang_attr(&name));
+ let trait_datum = TraitDatum {
+ id: trait_id,
+ binders: make_binders(db, &generic_params, trait_datum_bound),
+ flags,
+ associated_ty_ids,
+ well_known,
+ };
+ Arc::new(trait_datum)
+}
+
+fn well_known_trait_from_lang_attr(name: &str) -> Option<WellKnownTrait> {
+ Some(match name {
+ "clone" => WellKnownTrait::Clone,
+ "coerce_unsized" => WellKnownTrait::CoerceUnsized,
+ "copy" => WellKnownTrait::Copy,
+ "discriminant_kind" => WellKnownTrait::DiscriminantKind,
+ "dispatch_from_dyn" => WellKnownTrait::DispatchFromDyn,
+ "drop" => WellKnownTrait::Drop,
+ "fn" => WellKnownTrait::Fn,
+ "fn_mut" => WellKnownTrait::FnMut,
+ "fn_once" => WellKnownTrait::FnOnce,
+ "generator" => WellKnownTrait::Generator,
+ "sized" => WellKnownTrait::Sized,
+ "unpin" => WellKnownTrait::Unpin,
+ "unsize" => WellKnownTrait::Unsize,
+ _ => return None,
+ })
+}
+
+fn lang_attr_from_well_known_trait(attr: WellKnownTrait) -> &'static str {
+ match attr {
+ WellKnownTrait::Clone => "clone",
+ WellKnownTrait::CoerceUnsized => "coerce_unsized",
+ WellKnownTrait::Copy => "copy",
+ WellKnownTrait::DiscriminantKind => "discriminant_kind",
+ WellKnownTrait::DispatchFromDyn => "dispatch_from_dyn",
+ WellKnownTrait::Drop => "drop",
+ WellKnownTrait::Fn => "fn",
+ WellKnownTrait::FnMut => "fn_mut",
+ WellKnownTrait::FnOnce => "fn_once",
+ WellKnownTrait::Generator => "generator",
+ WellKnownTrait::Sized => "sized",
+ WellKnownTrait::Unpin => "unpin",
+ WellKnownTrait::Unsize => "unsize",
+ }
+}
+
+pub(crate) fn struct_datum_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ struct_id: AdtId,
+) -> Arc<StructDatum> {
+ debug!("struct_datum {:?}", struct_id);
+ let chalk_ir::AdtId(adt_id) = struct_id;
+ let generic_params = generics(db.upcast(), adt_id.into());
+ let upstream = adt_id.module(db.upcast()).krate() != krate;
+ let where_clauses = {
+ let generic_params = generics(db.upcast(), adt_id.into());
+ let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ convert_where_clauses(db, adt_id.into(), &bound_vars)
+ };
+ let flags = rust_ir::AdtFlags {
+ upstream,
+ // FIXME set fundamental and phantom_data flags correctly
+ fundamental: false,
+ phantom_data: false,
+ };
+ // FIXME provide enum variants properly (for auto traits)
+ let variant = rust_ir::AdtVariantDatum {
+ fields: Vec::new(), // FIXME add fields (only relevant for auto traits),
+ };
+ let struct_datum_bound = rust_ir::AdtDatumBound { variants: vec![variant], where_clauses };
+ let struct_datum = StructDatum {
+ // FIXME set ADT kind
+ kind: rust_ir::AdtKind::Struct,
+ id: struct_id,
+ binders: make_binders(db, &generic_params, struct_datum_bound),
+ flags,
+ };
+ Arc::new(struct_datum)
+}
+
+pub(crate) fn impl_datum_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ impl_id: ImplId,
+) -> Arc<ImplDatum> {
+ let _p = profile::span("impl_datum");
+ debug!("impl_datum {:?}", impl_id);
+ let impl_: hir_def::ImplId = from_chalk(db, impl_id);
+ impl_def_datum(db, krate, impl_id, impl_)
+}
+
+fn impl_def_datum(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ chalk_id: ImplId,
+ impl_id: hir_def::ImplId,
+) -> Arc<ImplDatum> {
+ let trait_ref = db
+ .impl_trait(impl_id)
+ // ImplIds for impls where the trait ref can't be resolved should never reach Chalk
+ .expect("invalid impl passed to Chalk")
+ .into_value_and_skipped_binders()
+ .0;
+ let impl_data = db.impl_data(impl_id);
+
+ let generic_params = generics(db.upcast(), impl_id.into());
+ let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let trait_ = trait_ref.hir_trait_id();
+ let impl_type = if impl_id.lookup(db.upcast()).container.krate() == krate {
+ rust_ir::ImplType::Local
+ } else {
+ rust_ir::ImplType::External
+ };
+ let where_clauses = convert_where_clauses(db, impl_id.into(), &bound_vars);
+ let negative = impl_data.is_negative;
+ debug!(
+ "impl {:?}: {}{} where {:?}",
+ chalk_id,
+ if negative { "!" } else { "" },
+ trait_ref.display(db),
+ where_clauses
+ );
+
+ let polarity = if negative { rust_ir::Polarity::Negative } else { rust_ir::Polarity::Positive };
+
+ let impl_datum_bound = rust_ir::ImplDatumBound { trait_ref, where_clauses };
+ let trait_data = db.trait_data(trait_);
+ let associated_ty_value_ids = impl_data
+ .items
+ .iter()
+ .filter_map(|item| match item {
+ AssocItemId::TypeAliasId(type_alias) => Some(*type_alias),
+ _ => None,
+ })
+ .filter(|&type_alias| {
+ // don't include associated types that don't exist in the trait
+ let name = &db.type_alias_data(type_alias).name;
+ trait_data.associated_type_by_name(name).is_some()
+ })
+ .map(|type_alias| TypeAliasAsValue(type_alias).to_chalk(db))
+ .collect();
+ debug!("impl_datum: {:?}", impl_datum_bound);
+ let impl_datum = ImplDatum {
+ binders: make_binders(db, &generic_params, impl_datum_bound),
+ impl_type,
+ polarity,
+ associated_ty_value_ids,
+ };
+ Arc::new(impl_datum)
+}
+
+pub(crate) fn associated_ty_value_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ id: AssociatedTyValueId,
+) -> Arc<AssociatedTyValue> {
+ let type_alias: TypeAliasAsValue = from_chalk(db, id);
+ type_alias_associated_ty_value(db, krate, type_alias.0)
+}
+
+fn type_alias_associated_ty_value(
+ db: &dyn HirDatabase,
+ _krate: CrateId,
+ type_alias: TypeAliasId,
+) -> Arc<AssociatedTyValue> {
+ let type_alias_data = db.type_alias_data(type_alias);
+ let impl_id = match type_alias.lookup(db.upcast()).container {
+ ItemContainerId::ImplId(it) => it,
+ _ => panic!("assoc ty value should be in impl"),
+ };
+
+ let trait_ref = db
+ .impl_trait(impl_id)
+ .expect("assoc ty value should not exist")
+ .into_value_and_skipped_binders()
+ .0; // we don't return any assoc ty values if the impl'd trait can't be resolved
+
+ let assoc_ty = db
+ .trait_data(trait_ref.hir_trait_id())
+ .associated_type_by_name(&type_alias_data.name)
+ .expect("assoc ty value should not exist"); // validated when building the impl data as well
+ let (ty, binders) = db.ty(type_alias.into()).into_value_and_skipped_binders();
+ let value_bound = rust_ir::AssociatedTyValueBound { ty };
+ let value = rust_ir::AssociatedTyValue {
+ impl_id: impl_id.to_chalk(db),
+ associated_ty_id: to_assoc_type_id(assoc_ty),
+ value: chalk_ir::Binders::new(binders, value_bound),
+ };
+ Arc::new(value)
+}
+
+pub(crate) fn fn_def_datum_query(
+ db: &dyn HirDatabase,
+ _krate: CrateId,
+ fn_def_id: FnDefId,
+) -> Arc<FnDefDatum> {
+ let callable_def: CallableDefId = from_chalk(db, fn_def_id);
+ let generic_params = generics(db.upcast(), callable_def.into());
+ let (sig, binders) = db.callable_item_signature(callable_def).into_value_and_skipped_binders();
+ let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let where_clauses = convert_where_clauses(db, callable_def.into(), &bound_vars);
+ let bound = rust_ir::FnDefDatumBound {
+ // Note: Chalk doesn't actually use this information yet as far as I am aware, but we provide it anyway
+ inputs_and_output: chalk_ir::Binders::empty(
+ Interner,
+ rust_ir::FnDefInputsAndOutputDatum {
+ argument_types: sig.params().to_vec(),
+ return_type: sig.ret().clone(),
+ }
+ .shifted_in(Interner),
+ ),
+ where_clauses,
+ };
+ let datum = FnDefDatum {
+ id: fn_def_id,
+ sig: chalk_ir::FnSig { abi: (), safety: chalk_ir::Safety::Safe, variadic: sig.is_varargs },
+ binders: chalk_ir::Binders::new(binders, bound),
+ };
+ Arc::new(datum)
+}
+
+pub(crate) fn fn_def_variance_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Variances {
+ let callable_def: CallableDefId = from_chalk(db, fn_def_id);
+ let generic_params = generics(db.upcast(), callable_def.into());
+ Variances::from_iter(
+ Interner,
+ std::iter::repeat(chalk_ir::Variance::Invariant).take(generic_params.len()),
+ )
+}
+
+pub(crate) fn adt_variance_query(
+ db: &dyn HirDatabase,
+ chalk_ir::AdtId(adt_id): AdtId,
+) -> Variances {
+ let generic_params = generics(db.upcast(), adt_id.into());
+ Variances::from_iter(
+ Interner,
+ std::iter::repeat(chalk_ir::Variance::Invariant).take(generic_params.len()),
+ )
+}
+
+pub(super) fn convert_where_clauses(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+ substs: &Substitution,
+) -> Vec<chalk_ir::QuantifiedWhereClause<Interner>> {
+ let generic_predicates = db.generic_predicates(def);
+ let mut result = Vec::with_capacity(generic_predicates.len());
+ for pred in generic_predicates.iter() {
+ result.push(pred.clone().substitute(Interner, substs));
+ }
+ result
+}
+
+pub(super) fn generic_predicate_to_inline_bound(
+ db: &dyn HirDatabase,
+ pred: &QuantifiedWhereClause,
+ self_ty: &Ty,
+) -> Option<chalk_ir::Binders<rust_ir::InlineBound<Interner>>> {
+ // An InlineBound is like a GenericPredicate, except the self type is left out.
+ // We don't have a special type for this, but Chalk does.
+ let self_ty_shifted_in = self_ty.clone().shifted_in_from(Interner, DebruijnIndex::ONE);
+ let (pred, binders) = pred.as_ref().into_value_and_skipped_binders();
+ match pred {
+ WhereClause::Implemented(trait_ref) => {
+ if trait_ref.self_type_parameter(Interner) != self_ty_shifted_in {
+ // we can only convert predicates back to type bounds if they
+ // have the expected self type
+ return None;
+ }
+ let args_no_self = trait_ref.substitution.as_slice(Interner)[1..]
+ .iter()
+ .map(|ty| ty.clone().cast(Interner))
+ .collect();
+ let trait_bound = rust_ir::TraitBound { trait_id: trait_ref.trait_id, args_no_self };
+ Some(chalk_ir::Binders::new(binders, rust_ir::InlineBound::TraitBound(trait_bound)))
+ }
+ WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection_ty), ty }) => {
+ if projection_ty.self_type_parameter(Interner) != self_ty_shifted_in {
+ return None;
+ }
+ let trait_ = projection_ty.trait_(db);
+ let args_no_self = projection_ty.substitution.as_slice(Interner)[1..]
+ .iter()
+ .map(|ty| ty.clone().cast(Interner))
+ .collect();
+ let alias_eq_bound = rust_ir::AliasEqBound {
+ value: ty.clone(),
+ trait_bound: rust_ir::TraitBound {
+ trait_id: to_chalk_trait_id(trait_),
+ args_no_self,
+ },
+ associated_ty_id: projection_ty.associated_ty_id,
+ parameters: Vec::new(), // FIXME we don't support generic associated types yet
+ };
+ Some(chalk_ir::Binders::new(
+ binders,
+ rust_ir::InlineBound::AliasEqBound(alias_eq_bound),
+ ))
+ }
+ _ => None,
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
new file mode 100644
index 000000000..a9c124b42
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
@@ -0,0 +1,358 @@
+//! Various extensions traits for Chalk types.
+
+use chalk_ir::{FloatTy, IntTy, Mutability, Scalar, UintTy};
+use hir_def::{
+ builtin_type::{BuiltinFloat, BuiltinInt, BuiltinType, BuiltinUint},
+ generics::TypeOrConstParamData,
+ type_ref::Rawness,
+ FunctionId, GenericDefId, HasModule, ItemContainerId, Lookup, TraitId,
+};
+use syntax::SmolStr;
+
+use crate::{
+ db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
+ from_placeholder_idx, to_chalk_trait_id, AdtId, AliasEq, AliasTy, Binders, CallableDefId,
+ CallableSig, FnPointer, ImplTraitId, Interner, Lifetime, ProjectionTy, QuantifiedWhereClause,
+ Substitution, TraitRef, Ty, TyBuilder, TyKind, WhereClause,
+};
+
+pub trait TyExt {
+ fn is_unit(&self) -> bool;
+ fn is_never(&self) -> bool;
+ fn is_unknown(&self) -> bool;
+ fn is_ty_var(&self) -> bool;
+
+ fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)>;
+ fn as_builtin(&self) -> Option<BuiltinType>;
+ fn as_tuple(&self) -> Option<&Substitution>;
+ fn as_fn_def(&self, db: &dyn HirDatabase) -> Option<FunctionId>;
+ fn as_reference(&self) -> Option<(&Ty, Lifetime, Mutability)>;
+ fn as_reference_or_ptr(&self) -> Option<(&Ty, Rawness, Mutability)>;
+ fn as_generic_def(&self, db: &dyn HirDatabase) -> Option<GenericDefId>;
+
+ fn callable_def(&self, db: &dyn HirDatabase) -> Option<CallableDefId>;
+ fn callable_sig(&self, db: &dyn HirDatabase) -> Option<CallableSig>;
+
+ fn strip_references(&self) -> &Ty;
+ fn strip_reference(&self) -> &Ty;
+
+ /// If this is a `dyn Trait`, returns that trait.
+ fn dyn_trait(&self) -> Option<TraitId>;
+
+ fn impl_trait_bounds(&self, db: &dyn HirDatabase) -> Option<Vec<QuantifiedWhereClause>>;
+ fn associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<TraitId>;
+
+ /// FIXME: Get rid of this, it's not a good abstraction
+ fn equals_ctor(&self, other: &Ty) -> bool;
+}
+
+impl TyExt for Ty {
+ fn is_unit(&self) -> bool {
+ matches!(self.kind(Interner), TyKind::Tuple(0, _))
+ }
+
+ fn is_never(&self) -> bool {
+ matches!(self.kind(Interner), TyKind::Never)
+ }
+
+ fn is_unknown(&self) -> bool {
+ matches!(self.kind(Interner), TyKind::Error)
+ }
+
+ fn is_ty_var(&self) -> bool {
+ matches!(self.kind(Interner), TyKind::InferenceVar(_, _))
+ }
+
+ fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)> {
+ match self.kind(Interner) {
+ TyKind::Adt(AdtId(adt), parameters) => Some((*adt, parameters)),
+ _ => None,
+ }
+ }
+
+ fn as_builtin(&self) -> Option<BuiltinType> {
+ match self.kind(Interner) {
+ TyKind::Str => Some(BuiltinType::Str),
+ TyKind::Scalar(Scalar::Bool) => Some(BuiltinType::Bool),
+ TyKind::Scalar(Scalar::Char) => Some(BuiltinType::Char),
+ TyKind::Scalar(Scalar::Float(fty)) => Some(BuiltinType::Float(match fty {
+ FloatTy::F64 => BuiltinFloat::F64,
+ FloatTy::F32 => BuiltinFloat::F32,
+ })),
+ TyKind::Scalar(Scalar::Int(ity)) => Some(BuiltinType::Int(match ity {
+ IntTy::Isize => BuiltinInt::Isize,
+ IntTy::I8 => BuiltinInt::I8,
+ IntTy::I16 => BuiltinInt::I16,
+ IntTy::I32 => BuiltinInt::I32,
+ IntTy::I64 => BuiltinInt::I64,
+ IntTy::I128 => BuiltinInt::I128,
+ })),
+ TyKind::Scalar(Scalar::Uint(ity)) => Some(BuiltinType::Uint(match ity {
+ UintTy::Usize => BuiltinUint::Usize,
+ UintTy::U8 => BuiltinUint::U8,
+ UintTy::U16 => BuiltinUint::U16,
+ UintTy::U32 => BuiltinUint::U32,
+ UintTy::U64 => BuiltinUint::U64,
+ UintTy::U128 => BuiltinUint::U128,
+ })),
+ _ => None,
+ }
+ }
+
+ fn as_tuple(&self) -> Option<&Substitution> {
+ match self.kind(Interner) {
+ TyKind::Tuple(_, substs) => Some(substs),
+ _ => None,
+ }
+ }
+
+ fn as_fn_def(&self, db: &dyn HirDatabase) -> Option<FunctionId> {
+ match self.callable_def(db) {
+ Some(CallableDefId::FunctionId(func)) => Some(func),
+ Some(CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_)) | None => None,
+ }
+ }
+ fn as_reference(&self) -> Option<(&Ty, Lifetime, Mutability)> {
+ match self.kind(Interner) {
+ TyKind::Ref(mutability, lifetime, ty) => Some((ty, lifetime.clone(), *mutability)),
+ _ => None,
+ }
+ }
+
+ fn as_reference_or_ptr(&self) -> Option<(&Ty, Rawness, Mutability)> {
+ match self.kind(Interner) {
+ TyKind::Ref(mutability, _, ty) => Some((ty, Rawness::Ref, *mutability)),
+ TyKind::Raw(mutability, ty) => Some((ty, Rawness::RawPtr, *mutability)),
+ _ => None,
+ }
+ }
+
+ fn as_generic_def(&self, db: &dyn HirDatabase) -> Option<GenericDefId> {
+ match *self.kind(Interner) {
+ TyKind::Adt(AdtId(adt), ..) => Some(adt.into()),
+ TyKind::FnDef(callable, ..) => {
+ Some(db.lookup_intern_callable_def(callable.into()).into())
+ }
+ TyKind::AssociatedType(type_alias, ..) => Some(from_assoc_type_id(type_alias).into()),
+ TyKind::Foreign(type_alias, ..) => Some(from_foreign_def_id(type_alias).into()),
+ _ => None,
+ }
+ }
+
+ fn callable_def(&self, db: &dyn HirDatabase) -> Option<CallableDefId> {
+ match self.kind(Interner) {
+ &TyKind::FnDef(def, ..) => Some(db.lookup_intern_callable_def(def.into())),
+ _ => None,
+ }
+ }
+
+ fn callable_sig(&self, db: &dyn HirDatabase) -> Option<CallableSig> {
+ match self.kind(Interner) {
+ TyKind::Function(fn_ptr) => Some(CallableSig::from_fn_ptr(fn_ptr)),
+ TyKind::FnDef(def, parameters) => {
+ let callable_def = db.lookup_intern_callable_def((*def).into());
+ let sig = db.callable_item_signature(callable_def);
+ Some(sig.substitute(Interner, &parameters))
+ }
+ TyKind::Closure(.., substs) => {
+ let sig_param = substs.at(Interner, 0).assert_ty_ref(Interner);
+ sig_param.callable_sig(db)
+ }
+ _ => None,
+ }
+ }
+
+ fn dyn_trait(&self) -> Option<TraitId> {
+ let trait_ref = match self.kind(Interner) {
+ TyKind::Dyn(dyn_ty) => dyn_ty.bounds.skip_binders().interned().get(0).and_then(|b| {
+ match b.skip_binders() {
+ WhereClause::Implemented(trait_ref) => Some(trait_ref),
+ _ => None,
+ }
+ }),
+ _ => None,
+ }?;
+ Some(from_chalk_trait_id(trait_ref.trait_id))
+ }
+
+ fn strip_references(&self) -> &Ty {
+ let mut t: &Ty = self;
+ while let TyKind::Ref(_mutability, _lifetime, ty) = t.kind(Interner) {
+ t = ty;
+ }
+ t
+ }
+
+ fn strip_reference(&self) -> &Ty {
+ self.as_reference().map_or(self, |(ty, _, _)| ty)
+ }
+
+ fn impl_trait_bounds(&self, db: &dyn HirDatabase) -> Option<Vec<QuantifiedWhereClause>> {
+ match self.kind(Interner) {
+ TyKind::OpaqueType(opaque_ty_id, subst) => {
+ match db.lookup_intern_impl_trait_id((*opaque_ty_id).into()) {
+ ImplTraitId::AsyncBlockTypeImplTrait(def, _expr) => {
+ let krate = def.module(db.upcast()).krate();
+ if let Some(future_trait) = db
+ .lang_item(krate, SmolStr::new_inline("future_trait"))
+ .and_then(|item| item.as_trait())
+ {
+ // This is only used by type walking.
+ // Parameters will be walked outside, and projection predicate is not used.
+ // So just provide the Future trait.
+ let impl_bound = Binders::empty(
+ Interner,
+ WhereClause::Implemented(TraitRef {
+ trait_id: to_chalk_trait_id(future_trait),
+ substitution: Substitution::empty(Interner),
+ }),
+ );
+ Some(vec![impl_bound])
+ } else {
+ None
+ }
+ }
+ ImplTraitId::ReturnTypeImplTrait(func, idx) => {
+ db.return_type_impl_traits(func).map(|it| {
+ let data = (*it)
+ .as_ref()
+ .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+ data.substitute(Interner, &subst).into_value_and_skipped_binders().0
+ })
+ }
+ }
+ }
+ TyKind::Alias(AliasTy::Opaque(opaque_ty)) => {
+ let predicates = match db.lookup_intern_impl_trait_id(opaque_ty.opaque_ty_id.into())
+ {
+ ImplTraitId::ReturnTypeImplTrait(func, idx) => {
+ db.return_type_impl_traits(func).map(|it| {
+ let data = (*it)
+ .as_ref()
+ .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+ data.substitute(Interner, &opaque_ty.substitution)
+ })
+ }
+ // It always has an parameter for Future::Output type.
+ ImplTraitId::AsyncBlockTypeImplTrait(..) => unreachable!(),
+ };
+
+ predicates.map(|it| it.into_value_and_skipped_binders().0)
+ }
+ TyKind::Placeholder(idx) => {
+ let id = from_placeholder_idx(db, *idx);
+ let generic_params = db.generic_params(id.parent);
+ let param_data = &generic_params.type_or_consts[id.local_id];
+ match param_data {
+ TypeOrConstParamData::TypeParamData(p) => match p.provenance {
+ hir_def::generics::TypeParamProvenance::ArgumentImplTrait => {
+ let substs = TyBuilder::placeholder_subst(db, id.parent);
+ let predicates = db
+ .generic_predicates(id.parent)
+ .iter()
+ .map(|pred| pred.clone().substitute(Interner, &substs))
+ .filter(|wc| match &wc.skip_binders() {
+ WhereClause::Implemented(tr) => {
+ &tr.self_type_parameter(Interner) == self
+ }
+ WhereClause::AliasEq(AliasEq {
+ alias: AliasTy::Projection(proj),
+ ty: _,
+ }) => &proj.self_type_parameter(Interner) == self,
+ _ => false,
+ })
+ .collect::<Vec<_>>();
+
+ Some(predicates)
+ }
+ _ => None,
+ },
+ _ => None,
+ }
+ }
+ _ => None,
+ }
+ }
+
+ fn associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<TraitId> {
+ match self.kind(Interner) {
+ TyKind::AssociatedType(id, ..) => {
+ match from_assoc_type_id(*id).lookup(db.upcast()).container {
+ ItemContainerId::TraitId(trait_id) => Some(trait_id),
+ _ => None,
+ }
+ }
+ TyKind::Alias(AliasTy::Projection(projection_ty)) => {
+ match from_assoc_type_id(projection_ty.associated_ty_id)
+ .lookup(db.upcast())
+ .container
+ {
+ ItemContainerId::TraitId(trait_id) => Some(trait_id),
+ _ => None,
+ }
+ }
+ _ => None,
+ }
+ }
+
+ fn equals_ctor(&self, other: &Ty) -> bool {
+ match (self.kind(Interner), other.kind(Interner)) {
+ (TyKind::Adt(adt, ..), TyKind::Adt(adt2, ..)) => adt == adt2,
+ (TyKind::Slice(_), TyKind::Slice(_)) | (TyKind::Array(_, _), TyKind::Array(_, _)) => {
+ true
+ }
+ (TyKind::FnDef(def_id, ..), TyKind::FnDef(def_id2, ..)) => def_id == def_id2,
+ (TyKind::OpaqueType(ty_id, ..), TyKind::OpaqueType(ty_id2, ..)) => ty_id == ty_id2,
+ (TyKind::AssociatedType(ty_id, ..), TyKind::AssociatedType(ty_id2, ..)) => {
+ ty_id == ty_id2
+ }
+ (TyKind::Foreign(ty_id, ..), TyKind::Foreign(ty_id2, ..)) => ty_id == ty_id2,
+ (TyKind::Closure(id1, _), TyKind::Closure(id2, _)) => id1 == id2,
+ (TyKind::Ref(mutability, ..), TyKind::Ref(mutability2, ..))
+ | (TyKind::Raw(mutability, ..), TyKind::Raw(mutability2, ..)) => {
+ mutability == mutability2
+ }
+ (
+ TyKind::Function(FnPointer { num_binders, sig, .. }),
+ TyKind::Function(FnPointer { num_binders: num_binders2, sig: sig2, .. }),
+ ) => num_binders == num_binders2 && sig == sig2,
+ (TyKind::Tuple(cardinality, _), TyKind::Tuple(cardinality2, _)) => {
+ cardinality == cardinality2
+ }
+ (TyKind::Str, TyKind::Str) | (TyKind::Never, TyKind::Never) => true,
+ (TyKind::Scalar(scalar), TyKind::Scalar(scalar2)) => scalar == scalar2,
+ _ => false,
+ }
+ }
+}
+
+pub trait ProjectionTyExt {
+ fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef;
+ fn trait_(&self, db: &dyn HirDatabase) -> TraitId;
+}
+
+impl ProjectionTyExt for ProjectionTy {
+ fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef {
+ TraitRef {
+ trait_id: to_chalk_trait_id(self.trait_(db)),
+ substitution: self.substitution.clone(),
+ }
+ }
+
+ fn trait_(&self, db: &dyn HirDatabase) -> TraitId {
+ match from_assoc_type_id(self.associated_ty_id).lookup(db.upcast()).container {
+ ItemContainerId::TraitId(it) => it,
+ _ => panic!("projection ty without parent trait"),
+ }
+ }
+}
+
+pub trait TraitRefExt {
+ fn hir_trait_id(&self) -> TraitId;
+}
+
+impl TraitRefExt for TraitRef {
+ fn hir_trait_id(&self) -> TraitId {
+ from_chalk_trait_id(self.trait_id)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
new file mode 100644
index 000000000..0495a4e64
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
@@ -0,0 +1,469 @@
+//! Constant evaluation details
+
+use std::{
+ collections::HashMap,
+ convert::TryInto,
+ fmt::{Display, Write},
+};
+
+use chalk_ir::{BoundVar, DebruijnIndex, GenericArgData, IntTy, Scalar};
+use hir_def::{
+ expr::{ArithOp, BinaryOp, Expr, ExprId, Literal, Pat, PatId},
+ path::ModPath,
+ resolver::{resolver_for_expr, ResolveValueResult, Resolver, ValueNs},
+ type_ref::ConstScalar,
+ ConstId, DefWithBodyId,
+};
+use la_arena::{Arena, Idx};
+use stdx::never;
+
+use crate::{
+ db::HirDatabase, infer::InferenceContext, lower::ParamLoweringMode, to_placeholder_idx,
+ utils::Generics, Const, ConstData, ConstValue, GenericArg, InferenceResult, Interner, Ty,
+ TyBuilder, TyKind,
+};
+
+/// Extension trait for [`Const`]
+pub trait ConstExt {
+ /// Is a [`Const`] unknown?
+ fn is_unknown(&self) -> bool;
+}
+
+impl ConstExt for Const {
+ fn is_unknown(&self) -> bool {
+ match self.data(Interner).value {
+ // interned Unknown
+ chalk_ir::ConstValue::Concrete(chalk_ir::ConcreteConst {
+ interned: ConstScalar::Unknown,
+ }) => true,
+
+ // interned concrete anything else
+ chalk_ir::ConstValue::Concrete(..) => false,
+
+ _ => {
+ tracing::error!(
+ "is_unknown was called on a non-concrete constant value! {:?}",
+ self
+ );
+ true
+ }
+ }
+ }
+}
+
+pub struct ConstEvalCtx<'a> {
+ pub db: &'a dyn HirDatabase,
+ pub owner: DefWithBodyId,
+ pub exprs: &'a Arena<Expr>,
+ pub pats: &'a Arena<Pat>,
+ pub local_data: HashMap<PatId, ComputedExpr>,
+ infer: &'a InferenceResult,
+}
+
+impl ConstEvalCtx<'_> {
+ fn expr_ty(&mut self, expr: ExprId) -> Ty {
+ self.infer[expr].clone()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ConstEvalError {
+ NotSupported(&'static str),
+ SemanticError(&'static str),
+ Loop,
+ IncompleteExpr,
+ Panic(String),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ComputedExpr {
+ Literal(Literal),
+ Tuple(Box<[ComputedExpr]>),
+}
+
+impl Display for ComputedExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ ComputedExpr::Literal(l) => match l {
+ Literal::Int(x, _) => {
+ if *x >= 10 {
+ write!(f, "{} ({:#X})", x, x)
+ } else {
+ x.fmt(f)
+ }
+ }
+ Literal::Uint(x, _) => {
+ if *x >= 10 {
+ write!(f, "{} ({:#X})", x, x)
+ } else {
+ x.fmt(f)
+ }
+ }
+ Literal::Float(x, _) => x.fmt(f),
+ Literal::Bool(x) => x.fmt(f),
+ Literal::Char(x) => std::fmt::Debug::fmt(x, f),
+ Literal::String(x) => std::fmt::Debug::fmt(x, f),
+ Literal::ByteString(x) => std::fmt::Debug::fmt(x, f),
+ },
+ ComputedExpr::Tuple(t) => {
+ f.write_char('(')?;
+ for x in &**t {
+ x.fmt(f)?;
+ f.write_str(", ")?;
+ }
+ f.write_char(')')
+ }
+ }
+ }
+}
+
+fn scalar_max(scalar: &Scalar) -> i128 {
+ match scalar {
+ Scalar::Bool => 1,
+ Scalar::Char => u32::MAX as i128,
+ Scalar::Int(x) => match x {
+ IntTy::Isize => isize::MAX as i128,
+ IntTy::I8 => i8::MAX as i128,
+ IntTy::I16 => i16::MAX as i128,
+ IntTy::I32 => i32::MAX as i128,
+ IntTy::I64 => i64::MAX as i128,
+ IntTy::I128 => i128::MAX as i128,
+ },
+ Scalar::Uint(x) => match x {
+ chalk_ir::UintTy::Usize => usize::MAX as i128,
+ chalk_ir::UintTy::U8 => u8::MAX as i128,
+ chalk_ir::UintTy::U16 => u16::MAX as i128,
+ chalk_ir::UintTy::U32 => u32::MAX as i128,
+ chalk_ir::UintTy::U64 => u64::MAX as i128,
+ chalk_ir::UintTy::U128 => i128::MAX as i128, // ignore too big u128 for now
+ },
+ Scalar::Float(_) => 0,
+ }
+}
+
+fn is_valid(scalar: &Scalar, value: i128) -> bool {
+ if value < 0 {
+ !matches!(scalar, Scalar::Uint(_)) && -scalar_max(scalar) - 1 <= value
+ } else {
+ value <= scalar_max(scalar)
+ }
+}
+
+pub fn eval_const(
+ expr_id: ExprId,
+ ctx: &mut ConstEvalCtx<'_>,
+) -> Result<ComputedExpr, ConstEvalError> {
+ let expr = &ctx.exprs[expr_id];
+ match expr {
+ Expr::Missing => Err(ConstEvalError::IncompleteExpr),
+ Expr::Literal(l) => Ok(ComputedExpr::Literal(l.clone())),
+ &Expr::UnaryOp { expr, op } => {
+ let ty = &ctx.expr_ty(expr);
+ let ev = eval_const(expr, ctx)?;
+ match op {
+ hir_def::expr::UnaryOp::Deref => Err(ConstEvalError::NotSupported("deref")),
+ hir_def::expr::UnaryOp::Not => {
+ let v = match ev {
+ ComputedExpr::Literal(Literal::Bool(b)) => {
+ return Ok(ComputedExpr::Literal(Literal::Bool(!b)))
+ }
+ ComputedExpr::Literal(Literal::Int(v, _)) => v,
+ ComputedExpr::Literal(Literal::Uint(v, _)) => v
+ .try_into()
+ .map_err(|_| ConstEvalError::NotSupported("too big u128"))?,
+ _ => return Err(ConstEvalError::NotSupported("this kind of operator")),
+ };
+ let r = match ty.kind(Interner) {
+ TyKind::Scalar(Scalar::Uint(x)) => match x {
+ chalk_ir::UintTy::U8 => !(v as u8) as i128,
+ chalk_ir::UintTy::U16 => !(v as u16) as i128,
+ chalk_ir::UintTy::U32 => !(v as u32) as i128,
+ chalk_ir::UintTy::U64 => !(v as u64) as i128,
+ chalk_ir::UintTy::U128 => {
+ return Err(ConstEvalError::NotSupported("negation of u128"))
+ }
+ chalk_ir::UintTy::Usize => !(v as usize) as i128,
+ },
+ TyKind::Scalar(Scalar::Int(x)) => match x {
+ chalk_ir::IntTy::I8 => !(v as i8) as i128,
+ chalk_ir::IntTy::I16 => !(v as i16) as i128,
+ chalk_ir::IntTy::I32 => !(v as i32) as i128,
+ chalk_ir::IntTy::I64 => !(v as i64) as i128,
+ chalk_ir::IntTy::I128 => !v,
+ chalk_ir::IntTy::Isize => !(v as isize) as i128,
+ },
+ _ => return Err(ConstEvalError::NotSupported("unreachable?")),
+ };
+ Ok(ComputedExpr::Literal(Literal::Int(r, None)))
+ }
+ hir_def::expr::UnaryOp::Neg => {
+ let v = match ev {
+ ComputedExpr::Literal(Literal::Int(v, _)) => v,
+ ComputedExpr::Literal(Literal::Uint(v, _)) => v
+ .try_into()
+ .map_err(|_| ConstEvalError::NotSupported("too big u128"))?,
+ _ => return Err(ConstEvalError::NotSupported("this kind of operator")),
+ };
+ Ok(ComputedExpr::Literal(Literal::Int(
+ v.checked_neg().ok_or_else(|| {
+ ConstEvalError::Panic("overflow in negation".to_string())
+ })?,
+ None,
+ )))
+ }
+ }
+ }
+ &Expr::BinaryOp { lhs, rhs, op } => {
+ let ty = &ctx.expr_ty(lhs);
+ let lhs = eval_const(lhs, ctx)?;
+ let rhs = eval_const(rhs, ctx)?;
+ let op = op.ok_or(ConstEvalError::IncompleteExpr)?;
+ let v1 = match lhs {
+ ComputedExpr::Literal(Literal::Int(v, _)) => v,
+ ComputedExpr::Literal(Literal::Uint(v, _)) => {
+ v.try_into().map_err(|_| ConstEvalError::NotSupported("too big u128"))?
+ }
+ _ => return Err(ConstEvalError::NotSupported("this kind of operator")),
+ };
+ let v2 = match rhs {
+ ComputedExpr::Literal(Literal::Int(v, _)) => v,
+ ComputedExpr::Literal(Literal::Uint(v, _)) => {
+ v.try_into().map_err(|_| ConstEvalError::NotSupported("too big u128"))?
+ }
+ _ => return Err(ConstEvalError::NotSupported("this kind of operator")),
+ };
+ match op {
+ BinaryOp::ArithOp(b) => {
+ let panic_arith = ConstEvalError::Panic(
+ "attempt to run invalid arithmetic operation".to_string(),
+ );
+ let r = match b {
+ ArithOp::Add => v1.checked_add(v2).ok_or_else(|| panic_arith.clone())?,
+ ArithOp::Mul => v1.checked_mul(v2).ok_or_else(|| panic_arith.clone())?,
+ ArithOp::Sub => v1.checked_sub(v2).ok_or_else(|| panic_arith.clone())?,
+ ArithOp::Div => v1.checked_div(v2).ok_or_else(|| panic_arith.clone())?,
+ ArithOp::Rem => v1.checked_rem(v2).ok_or_else(|| panic_arith.clone())?,
+ ArithOp::Shl => v1
+ .checked_shl(v2.try_into().map_err(|_| panic_arith.clone())?)
+ .ok_or_else(|| panic_arith.clone())?,
+ ArithOp::Shr => v1
+ .checked_shr(v2.try_into().map_err(|_| panic_arith.clone())?)
+ .ok_or_else(|| panic_arith.clone())?,
+ ArithOp::BitXor => v1 ^ v2,
+ ArithOp::BitOr => v1 | v2,
+ ArithOp::BitAnd => v1 & v2,
+ };
+ if let TyKind::Scalar(s) = ty.kind(Interner) {
+ if !is_valid(s, r) {
+ return Err(panic_arith);
+ }
+ }
+ Ok(ComputedExpr::Literal(Literal::Int(r, None)))
+ }
+ BinaryOp::LogicOp(_) => Err(ConstEvalError::SemanticError("logic op on numbers")),
+ _ => Err(ConstEvalError::NotSupported("bin op on this operators")),
+ }
+ }
+ Expr::Block { statements, tail, .. } => {
+ let mut prev_values = HashMap::<PatId, Option<ComputedExpr>>::default();
+ for statement in &**statements {
+ match *statement {
+ hir_def::expr::Statement::Let { pat: pat_id, initializer, .. } => {
+ let pat = &ctx.pats[pat_id];
+ match pat {
+ Pat::Bind { subpat, .. } if subpat.is_none() => (),
+ _ => {
+ return Err(ConstEvalError::NotSupported("complex patterns in let"))
+ }
+ };
+ let value = match initializer {
+ Some(x) => eval_const(x, ctx)?,
+ None => continue,
+ };
+ if !prev_values.contains_key(&pat_id) {
+ let prev = ctx.local_data.insert(pat_id, value);
+ prev_values.insert(pat_id, prev);
+ } else {
+ ctx.local_data.insert(pat_id, value);
+ }
+ }
+ hir_def::expr::Statement::Expr { .. } => {
+ return Err(ConstEvalError::NotSupported("this kind of statement"))
+ }
+ }
+ }
+ let r = match tail {
+ &Some(x) => eval_const(x, ctx),
+ None => Ok(ComputedExpr::Tuple(Box::new([]))),
+ };
+ // clean up local data, so caller will receive the exact map that passed to us
+ for (name, val) in prev_values {
+ match val {
+ Some(x) => ctx.local_data.insert(name, x),
+ None => ctx.local_data.remove(&name),
+ };
+ }
+ r
+ }
+ Expr::Path(p) => {
+ let resolver = resolver_for_expr(ctx.db.upcast(), ctx.owner, expr_id);
+ let pr = resolver
+ .resolve_path_in_value_ns(ctx.db.upcast(), p.mod_path())
+ .ok_or(ConstEvalError::SemanticError("unresolved path"))?;
+ let pr = match pr {
+ ResolveValueResult::ValueNs(v) => v,
+ ResolveValueResult::Partial(..) => {
+ return match ctx
+ .infer
+ .assoc_resolutions_for_expr(expr_id)
+ .ok_or(ConstEvalError::SemanticError("unresolved assoc item"))?
+ {
+ hir_def::AssocItemId::FunctionId(_) => {
+ Err(ConstEvalError::NotSupported("assoc function"))
+ }
+ hir_def::AssocItemId::ConstId(c) => ctx.db.const_eval(c),
+ hir_def::AssocItemId::TypeAliasId(_) => {
+ Err(ConstEvalError::NotSupported("assoc type alias"))
+ }
+ }
+ }
+ };
+ match pr {
+ ValueNs::LocalBinding(pat_id) => {
+ let r = ctx
+ .local_data
+ .get(&pat_id)
+ .ok_or(ConstEvalError::NotSupported("Unexpected missing local"))?;
+ Ok(r.clone())
+ }
+ ValueNs::ConstId(id) => ctx.db.const_eval(id),
+ ValueNs::GenericParam(_) => {
+ Err(ConstEvalError::NotSupported("const generic without substitution"))
+ }
+ _ => Err(ConstEvalError::NotSupported("path that are not const or local")),
+ }
+ }
+ _ => Err(ConstEvalError::NotSupported("This kind of expression")),
+ }
+}
+
+pub(crate) fn path_to_const(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &ModPath,
+ mode: ParamLoweringMode,
+ args_lazy: impl FnOnce() -> Generics,
+ debruijn: DebruijnIndex,
+) -> Option<Const> {
+ match resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) {
+ Some(ValueNs::GenericParam(p)) => {
+ let ty = db.const_param_ty(p);
+ let args = args_lazy();
+ let value = match mode {
+ ParamLoweringMode::Placeholder => {
+ ConstValue::Placeholder(to_placeholder_idx(db, p.into()))
+ }
+ ParamLoweringMode::Variable => match args.param_idx(p.into()) {
+ Some(x) => ConstValue::BoundVar(BoundVar::new(debruijn, x)),
+ None => {
+ never!(
+ "Generic list doesn't contain this param: {:?}, {}, {:?}",
+ args,
+ path,
+ p
+ );
+ return None;
+ }
+ },
+ };
+ Some(ConstData { ty, value }.intern(Interner))
+ }
+ _ => None,
+ }
+}
+
+pub fn unknown_const(ty: Ty) -> Const {
+ ConstData {
+ ty,
+ value: ConstValue::Concrete(chalk_ir::ConcreteConst { interned: ConstScalar::Unknown }),
+ }
+ .intern(Interner)
+}
+
+pub fn unknown_const_as_generic(ty: Ty) -> GenericArg {
+ GenericArgData::Const(unknown_const(ty)).intern(Interner)
+}
+
+/// Interns a constant scalar with the given type
+pub fn intern_const_scalar(value: ConstScalar, ty: Ty) -> Const {
+ ConstData { ty, value: ConstValue::Concrete(chalk_ir::ConcreteConst { interned: value }) }
+ .intern(Interner)
+}
+
+/// Interns a possibly-unknown target usize
+pub fn usize_const(value: Option<u128>) -> Const {
+ intern_const_scalar(value.map_or(ConstScalar::Unknown, ConstScalar::UInt), TyBuilder::usize())
+}
+
+pub(crate) fn const_eval_recover(
+ _: &dyn HirDatabase,
+ _: &[String],
+ _: &ConstId,
+) -> Result<ComputedExpr, ConstEvalError> {
+ Err(ConstEvalError::Loop)
+}
+
+pub(crate) fn const_eval_query(
+ db: &dyn HirDatabase,
+ const_id: ConstId,
+) -> Result<ComputedExpr, ConstEvalError> {
+ let def = const_id.into();
+ let body = db.body(def);
+ let infer = &db.infer(def);
+ let result = eval_const(
+ body.body_expr,
+ &mut ConstEvalCtx {
+ db,
+ owner: const_id.into(),
+ exprs: &body.exprs,
+ pats: &body.pats,
+ local_data: HashMap::default(),
+ infer,
+ },
+ );
+ result
+}
+
+pub(crate) fn eval_to_const<'a>(
+ expr: Idx<Expr>,
+ mode: ParamLoweringMode,
+ ctx: &mut InferenceContext<'a>,
+ args: impl FnOnce() -> Generics,
+ debruijn: DebruijnIndex,
+) -> Const {
+ if let Expr::Path(p) = &ctx.body.exprs[expr] {
+ let db = ctx.db;
+ let resolver = &ctx.resolver;
+ if let Some(c) = path_to_const(db, resolver, p.mod_path(), mode, args, debruijn) {
+ return c;
+ }
+ }
+ let body = ctx.body.clone();
+ let mut ctx = ConstEvalCtx {
+ db: ctx.db,
+ owner: ctx.owner,
+ exprs: &body.exprs,
+ pats: &body.pats,
+ local_data: HashMap::default(),
+ infer: &ctx.result,
+ };
+ let computed_expr = eval_const(expr, &mut ctx);
+ let const_scalar = match computed_expr {
+ Ok(ComputedExpr::Literal(literal)) => literal.into(),
+ _ => ConstScalar::Unknown,
+ };
+ intern_const_scalar(const_scalar, TyBuilder::usize())
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
new file mode 100644
index 000000000..4a052851a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
@@ -0,0 +1,148 @@
+use base_db::fixture::WithFixture;
+use hir_def::{db::DefDatabase, expr::Literal};
+
+use crate::{consteval::ComputedExpr, db::HirDatabase, test_db::TestDB};
+
+use super::ConstEvalError;
+
+fn check_fail(ra_fixture: &str, error: ConstEvalError) {
+ assert_eq!(eval_goal(ra_fixture), Err(error));
+}
+
+fn check_number(ra_fixture: &str, answer: i128) {
+ let r = eval_goal(ra_fixture).unwrap();
+ match r {
+ ComputedExpr::Literal(Literal::Int(r, _)) => assert_eq!(r, answer),
+ ComputedExpr::Literal(Literal::Uint(r, _)) => assert_eq!(r, answer as u128),
+ x => panic!("Expected number but found {:?}", x),
+ }
+}
+
+fn eval_goal(ra_fixture: &str) -> Result<ComputedExpr, ConstEvalError> {
+ let (db, file_id) = TestDB::with_single_file(ra_fixture);
+ let module_id = db.module_for_file(file_id);
+ let def_map = module_id.def_map(&db);
+ let scope = &def_map[module_id.local_id].scope;
+ let const_id = scope
+ .declarations()
+ .into_iter()
+ .find_map(|x| match x {
+ hir_def::ModuleDefId::ConstId(x) => {
+ if db.const_data(x).name.as_ref()?.to_string() == "GOAL" {
+ Some(x)
+ } else {
+ None
+ }
+ }
+ _ => None,
+ })
+ .unwrap();
+ db.const_eval(const_id)
+}
+
+#[test]
+fn add() {
+ check_number(r#"const GOAL: usize = 2 + 2;"#, 4);
+}
+
+#[test]
+fn bit_op() {
+ check_number(r#"const GOAL: u8 = !0 & !(!0 >> 1)"#, 128);
+ check_number(r#"const GOAL: i8 = !0 & !(!0 >> 1)"#, 0);
+ // FIXME: rustc evaluate this to -128
+ check_fail(
+ r#"const GOAL: i8 = 1 << 7"#,
+ ConstEvalError::Panic("attempt to run invalid arithmetic operation".to_string()),
+ );
+ check_fail(
+ r#"const GOAL: i8 = 1 << 8"#,
+ ConstEvalError::Panic("attempt to run invalid arithmetic operation".to_string()),
+ );
+}
+
+#[test]
+fn locals() {
+ check_number(
+ r#"
+ const GOAL: usize = {
+ let a = 3 + 2;
+ let b = a * a;
+ b
+ };
+ "#,
+ 25,
+ );
+}
+
+#[test]
+fn consts() {
+ check_number(
+ r#"
+ const F1: i32 = 1;
+ const F3: i32 = 3 * F2;
+ const F2: i32 = 2 * F1;
+ const GOAL: i32 = F3;
+ "#,
+ 6,
+ );
+}
+
+#[test]
+fn const_loop() {
+ check_fail(
+ r#"
+ const F1: i32 = 1 * F3;
+ const F3: i32 = 3 * F2;
+ const F2: i32 = 2 * F1;
+ const GOAL: i32 = F3;
+ "#,
+ ConstEvalError::Loop,
+ );
+}
+
+#[test]
+fn const_impl_assoc() {
+ check_number(
+ r#"
+ struct U5;
+ impl U5 {
+ const VAL: usize = 5;
+ }
+ const GOAL: usize = U5::VAL;
+ "#,
+ 5,
+ );
+}
+
+#[test]
+fn const_generic_subst() {
+ // FIXME: this should evaluate to 5
+ check_fail(
+ r#"
+ struct Adder<const N: usize, const M: usize>;
+ impl<const N: usize, const M: usize> Adder<N, M> {
+ const VAL: usize = N + M;
+ }
+ const GOAL: usize = Adder::<2, 3>::VAL;
+ "#,
+ ConstEvalError::NotSupported("const generic without substitution"),
+ );
+}
+
+#[test]
+fn const_trait_assoc() {
+ // FIXME: this should evaluate to 0
+ check_fail(
+ r#"
+ struct U0;
+ trait ToConst {
+ const VAL: usize;
+ }
+ impl ToConst for U0 {
+ const VAL: usize = 0;
+ }
+ const GOAL: usize = U0::VAL;
+ "#,
+ ConstEvalError::IncompleteExpr,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
new file mode 100644
index 000000000..b385b1caf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
@@ -0,0 +1,225 @@
+//! The home of `HirDatabase`, which is the Salsa database containing all the
+//! type inference-related queries.
+
+use std::sync::Arc;
+
+use arrayvec::ArrayVec;
+use base_db::{impl_intern_key, salsa, CrateId, Upcast};
+use hir_def::{
+ db::DefDatabase, expr::ExprId, BlockId, ConstId, ConstParamId, DefWithBodyId, FunctionId,
+ GenericDefId, ImplId, LifetimeParamId, LocalFieldId, TypeOrConstParamId, VariantId,
+};
+use la_arena::ArenaMap;
+
+use crate::{
+ chalk_db,
+ consteval::{ComputedExpr, ConstEvalError},
+ method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
+ Binders, CallableDefId, FnDefId, GenericArg, ImplTraitId, InferenceResult, Interner, PolyFnSig,
+ QuantifiedWhereClause, ReturnTypeImplTraits, TraitRef, Ty, TyDefId, ValueTyDefId,
+};
+use hir_expand::name::Name;
+
+#[salsa::query_group(HirDatabaseStorage)]
+pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
+ #[salsa::invoke(infer_wait)]
+ #[salsa::transparent]
+ fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
+
+ #[salsa::invoke(crate::infer::infer_query)]
+ fn infer_query(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
+
+ #[salsa::invoke(crate::lower::ty_query)]
+ #[salsa::cycle(crate::lower::ty_recover)]
+ fn ty(&self, def: TyDefId) -> Binders<Ty>;
+
+ #[salsa::invoke(crate::lower::value_ty_query)]
+ fn value_ty(&self, def: ValueTyDefId) -> Binders<Ty>;
+
+ #[salsa::invoke(crate::lower::impl_self_ty_query)]
+ #[salsa::cycle(crate::lower::impl_self_ty_recover)]
+ fn impl_self_ty(&self, def: ImplId) -> Binders<Ty>;
+
+ #[salsa::invoke(crate::lower::const_param_ty_query)]
+ fn const_param_ty(&self, def: ConstParamId) -> Ty;
+
+ #[salsa::invoke(crate::consteval::const_eval_query)]
+ #[salsa::cycle(crate::consteval::const_eval_recover)]
+ fn const_eval(&self, def: ConstId) -> Result<ComputedExpr, ConstEvalError>;
+
+ #[salsa::invoke(crate::lower::impl_trait_query)]
+ fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>;
+
+ #[salsa::invoke(crate::lower::field_types_query)]
+ fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>>;
+
+ #[salsa::invoke(crate::lower::callable_item_sig)]
+ fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
+
+ #[salsa::invoke(crate::lower::return_type_impl_traits)]
+ fn return_type_impl_traits(
+ &self,
+ def: FunctionId,
+ ) -> Option<Arc<Binders<ReturnTypeImplTraits>>>;
+
+ #[salsa::invoke(crate::lower::generic_predicates_for_param_query)]
+ #[salsa::cycle(crate::lower::generic_predicates_for_param_recover)]
+ fn generic_predicates_for_param(
+ &self,
+ def: GenericDefId,
+ param_id: TypeOrConstParamId,
+ assoc_name: Option<Name>,
+ ) -> Arc<[Binders<QuantifiedWhereClause>]>;
+
+ #[salsa::invoke(crate::lower::generic_predicates_query)]
+ fn generic_predicates(&self, def: GenericDefId) -> Arc<[Binders<QuantifiedWhereClause>]>;
+
+ #[salsa::invoke(crate::lower::trait_environment_query)]
+ fn trait_environment(&self, def: GenericDefId) -> Arc<crate::TraitEnvironment>;
+
+ #[salsa::invoke(crate::lower::generic_defaults_query)]
+ #[salsa::cycle(crate::lower::generic_defaults_recover)]
+ fn generic_defaults(&self, def: GenericDefId) -> Arc<[Binders<GenericArg>]>;
+
+ #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
+ fn inherent_impls_in_crate(&self, krate: CrateId) -> Arc<InherentImpls>;
+
+ #[salsa::invoke(InherentImpls::inherent_impls_in_block_query)]
+ fn inherent_impls_in_block(&self, block: BlockId) -> Option<Arc<InherentImpls>>;
+
+ /// Collects all crates in the dependency graph that have impls for the
+ /// given fingerprint. This is only used for primitive types; for
+ /// user-defined types we just look at the crate where the type is defined.
+ #[salsa::invoke(crate::method_resolution::inherent_impl_crates_query)]
+ fn inherent_impl_crates(&self, krate: CrateId, fp: TyFingerprint) -> ArrayVec<CrateId, 2>;
+
+ #[salsa::invoke(TraitImpls::trait_impls_in_crate_query)]
+ fn trait_impls_in_crate(&self, krate: CrateId) -> Arc<TraitImpls>;
+
+ #[salsa::invoke(TraitImpls::trait_impls_in_block_query)]
+ fn trait_impls_in_block(&self, krate: BlockId) -> Option<Arc<TraitImpls>>;
+
+ #[salsa::invoke(TraitImpls::trait_impls_in_deps_query)]
+ fn trait_impls_in_deps(&self, krate: CrateId) -> Arc<TraitImpls>;
+
+ // Interned IDs for Chalk integration
+ #[salsa::interned]
+ fn intern_callable_def(&self, callable_def: CallableDefId) -> InternedCallableDefId;
+ #[salsa::interned]
+ fn intern_type_or_const_param_id(
+ &self,
+ param_id: TypeOrConstParamId,
+ ) -> InternedTypeOrConstParamId;
+ #[salsa::interned]
+ fn intern_lifetime_param_id(&self, param_id: LifetimeParamId) -> InternedLifetimeParamId;
+ #[salsa::interned]
+ fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId;
+ #[salsa::interned]
+ fn intern_closure(&self, id: (DefWithBodyId, ExprId)) -> InternedClosureId;
+
+ #[salsa::invoke(chalk_db::associated_ty_data_query)]
+ fn associated_ty_data(&self, id: chalk_db::AssocTypeId) -> Arc<chalk_db::AssociatedTyDatum>;
+
+ #[salsa::invoke(chalk_db::trait_datum_query)]
+ fn trait_datum(&self, krate: CrateId, trait_id: chalk_db::TraitId)
+ -> Arc<chalk_db::TraitDatum>;
+
+ #[salsa::invoke(chalk_db::struct_datum_query)]
+ fn struct_datum(
+ &self,
+ krate: CrateId,
+ struct_id: chalk_db::AdtId,
+ ) -> Arc<chalk_db::StructDatum>;
+
+ #[salsa::invoke(chalk_db::impl_datum_query)]
+ fn impl_datum(&self, krate: CrateId, impl_id: chalk_db::ImplId) -> Arc<chalk_db::ImplDatum>;
+
+ #[salsa::invoke(chalk_db::fn_def_datum_query)]
+ fn fn_def_datum(&self, krate: CrateId, fn_def_id: FnDefId) -> Arc<chalk_db::FnDefDatum>;
+
+ #[salsa::invoke(chalk_db::fn_def_variance_query)]
+ fn fn_def_variance(&self, fn_def_id: FnDefId) -> chalk_db::Variances;
+
+ #[salsa::invoke(chalk_db::adt_variance_query)]
+ fn adt_variance(&self, adt_id: chalk_db::AdtId) -> chalk_db::Variances;
+
+ #[salsa::invoke(chalk_db::associated_ty_value_query)]
+ fn associated_ty_value(
+ &self,
+ krate: CrateId,
+ id: chalk_db::AssociatedTyValueId,
+ ) -> Arc<chalk_db::AssociatedTyValue>;
+
+ #[salsa::invoke(trait_solve_wait)]
+ #[salsa::transparent]
+ fn trait_solve(
+ &self,
+ krate: CrateId,
+ goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
+ ) -> Option<crate::Solution>;
+
+ #[salsa::invoke(crate::traits::trait_solve_query)]
+ fn trait_solve_query(
+ &self,
+ krate: CrateId,
+ goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
+ ) -> Option<crate::Solution>;
+
+ #[salsa::invoke(chalk_db::program_clauses_for_chalk_env_query)]
+ fn program_clauses_for_chalk_env(
+ &self,
+ krate: CrateId,
+ env: chalk_ir::Environment<Interner>,
+ ) -> chalk_ir::ProgramClauses<Interner>;
+}
+
+fn infer_wait(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
+ let _p = profile::span("infer:wait").detail(|| match def {
+ DefWithBodyId::FunctionId(it) => db.function_data(it).name.to_string(),
+ DefWithBodyId::StaticId(it) => db.static_data(it).name.clone().to_string(),
+ DefWithBodyId::ConstId(it) => {
+ db.const_data(it).name.clone().unwrap_or_else(Name::missing).to_string()
+ }
+ });
+ db.infer_query(def)
+}
+
+fn trait_solve_wait(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
+) -> Option<crate::Solution> {
+ let _p = profile::span("trait_solve::wait");
+ db.trait_solve_query(krate, goal)
+}
+
+#[test]
+fn hir_database_is_object_safe() {
+ fn _assert_object_safe(_: &dyn HirDatabase) {}
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct InternedTypeOrConstParamId(salsa::InternId);
+impl_intern_key!(InternedTypeOrConstParamId);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct InternedLifetimeParamId(salsa::InternId);
+impl_intern_key!(InternedLifetimeParamId);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct InternedConstParamId(salsa::InternId);
+impl_intern_key!(InternedConstParamId);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct InternedOpaqueTyId(salsa::InternId);
+impl_intern_key!(InternedOpaqueTyId);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct InternedClosureId(salsa::InternId);
+impl_intern_key!(InternedClosureId);
+
+/// This exists just for Chalk, because Chalk just has a single `FnDefId` where
+/// we have different IDs for struct and enum variant constructors.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct InternedCallableDefId(salsa::InternId);
+impl_intern_key!(InternedCallableDefId);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
new file mode 100644
index 000000000..37eb06be1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
@@ -0,0 +1,13 @@
+//! Type inference-based diagnostics.
+mod expr;
+mod match_check;
+mod unsafe_check;
+mod decl_check;
+
+pub use crate::diagnostics::{
+ decl_check::{incorrect_case, IncorrectCase},
+ expr::{
+ record_literal_missing_fields, record_pattern_missing_fields, BodyValidationDiagnostic,
+ },
+ unsafe_check::{missing_unsafe, unsafe_expressions, UnsafeExpr},
+};
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
new file mode 100644
index 000000000..f7031a854
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -0,0 +1,701 @@
+//! Provides validators for names of declarations.
+//!
+//! This includes the following items:
+//!
+//! - variable bindings (e.g. `let x = foo();`)
+//! - struct fields (e.g. `struct Foo { field: u8 }`)
+//! - enum variants (e.g. `enum Foo { Variant { field: u8 } }`)
+//! - function/method arguments (e.g. `fn foo(arg: u8)`)
+//! - constants (e.g. `const FOO: u8 = 10;`)
+//! - static items (e.g. `static FOO: u8 = 10;`)
+//! - match arm bindings (e.g. `foo @ Some(_)`)
+
+mod case_conv;
+
+use std::fmt;
+
+use base_db::CrateId;
+use hir_def::{
+ adt::VariantData,
+ expr::{Pat, PatId},
+ src::HasSource,
+ AdtId, AttrDefId, ConstId, EnumId, FunctionId, ItemContainerId, Lookup, ModuleDefId, StaticId,
+ StructId,
+};
+use hir_expand::{
+ name::{AsName, Name},
+ HirFileId,
+};
+use stdx::{always, never};
+use syntax::{
+ ast::{self, HasName},
+ AstNode, AstPtr,
+};
+
+use crate::db::HirDatabase;
+
+use self::case_conv::{to_camel_case, to_lower_snake_case, to_upper_snake_case};
+
+mod allow {
+ pub(super) const BAD_STYLE: &str = "bad_style";
+ pub(super) const NONSTANDARD_STYLE: &str = "nonstandard_style";
+ pub(super) const NON_SNAKE_CASE: &str = "non_snake_case";
+ pub(super) const NON_UPPER_CASE_GLOBAL: &str = "non_upper_case_globals";
+ pub(super) const NON_CAMEL_CASE_TYPES: &str = "non_camel_case_types";
+}
+
+pub fn incorrect_case(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ owner: ModuleDefId,
+) -> Vec<IncorrectCase> {
+ let _p = profile::span("validate_module_item");
+ let mut validator = DeclValidator::new(db, krate);
+ validator.validate_item(owner);
+ validator.sink
+}
+
+#[derive(Debug)]
+pub enum CaseType {
+ // `some_var`
+ LowerSnakeCase,
+ // `SOME_CONST`
+ UpperSnakeCase,
+ // `SomeStruct`
+ UpperCamelCase,
+}
+
+impl fmt::Display for CaseType {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let repr = match self {
+ CaseType::LowerSnakeCase => "snake_case",
+ CaseType::UpperSnakeCase => "UPPER_SNAKE_CASE",
+ CaseType::UpperCamelCase => "CamelCase",
+ };
+
+ repr.fmt(f)
+ }
+}
+
+#[derive(Debug)]
+pub enum IdentType {
+ Constant,
+ Enum,
+ Field,
+ Function,
+ Parameter,
+ StaticVariable,
+ Structure,
+ Variable,
+ Variant,
+}
+
+impl fmt::Display for IdentType {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let repr = match self {
+ IdentType::Constant => "Constant",
+ IdentType::Enum => "Enum",
+ IdentType::Field => "Field",
+ IdentType::Function => "Function",
+ IdentType::Parameter => "Parameter",
+ IdentType::StaticVariable => "Static variable",
+ IdentType::Structure => "Structure",
+ IdentType::Variable => "Variable",
+ IdentType::Variant => "Variant",
+ };
+
+ repr.fmt(f)
+ }
+}
+
+#[derive(Debug)]
+pub struct IncorrectCase {
+ pub file: HirFileId,
+ pub ident: AstPtr<ast::Name>,
+ pub expected_case: CaseType,
+ pub ident_type: IdentType,
+ pub ident_text: String,
+ pub suggested_text: String,
+}
+
+pub(super) struct DeclValidator<'a> {
+ db: &'a dyn HirDatabase,
+ krate: CrateId,
+ pub(super) sink: Vec<IncorrectCase>,
+}
+
+#[derive(Debug)]
+struct Replacement {
+ current_name: Name,
+ suggested_text: String,
+ expected_case: CaseType,
+}
+
+impl<'a> DeclValidator<'a> {
+ pub(super) fn new(db: &'a dyn HirDatabase, krate: CrateId) -> DeclValidator<'a> {
+ DeclValidator { db, krate, sink: Vec::new() }
+ }
+
+ pub(super) fn validate_item(&mut self, item: ModuleDefId) {
+ match item {
+ ModuleDefId::FunctionId(func) => self.validate_func(func),
+ ModuleDefId::AdtId(adt) => self.validate_adt(adt),
+ ModuleDefId::ConstId(const_id) => self.validate_const(const_id),
+ ModuleDefId::StaticId(static_id) => self.validate_static(static_id),
+ _ => (),
+ }
+ }
+
+ fn validate_adt(&mut self, adt: AdtId) {
+ match adt {
+ AdtId::StructId(struct_id) => self.validate_struct(struct_id),
+ AdtId::EnumId(enum_id) => self.validate_enum(enum_id),
+ AdtId::UnionId(_) => {
+ // FIXME: Unions aren't yet supported by this validator.
+ }
+ }
+ }
+
+ /// Checks whether not following the convention is allowed for this item.
+ fn allowed(&self, id: AttrDefId, allow_name: &str, recursing: bool) -> bool {
+ let is_allowed = |def_id| {
+ let attrs = self.db.attrs(def_id);
+ // don't bug the user about directly no_mangle annotated stuff, they can't do anything about it
+ (!recursing && attrs.by_key("no_mangle").exists())
+ || attrs.by_key("allow").tt_values().any(|tt| {
+ let allows = tt.to_string();
+ allows.contains(allow_name)
+ || allows.contains(allow::BAD_STYLE)
+ || allows.contains(allow::NONSTANDARD_STYLE)
+ })
+ };
+
+ is_allowed(id)
+ // go upwards one step or give up
+ || match id {
+ AttrDefId::ModuleId(m) => m.containing_module(self.db.upcast()).map(|v| v.into()),
+ AttrDefId::FunctionId(f) => Some(f.lookup(self.db.upcast()).container.into()),
+ AttrDefId::StaticId(sid) => Some(sid.lookup(self.db.upcast()).container.into()),
+ AttrDefId::ConstId(cid) => Some(cid.lookup(self.db.upcast()).container.into()),
+ AttrDefId::TraitId(tid) => Some(tid.lookup(self.db.upcast()).container.into()),
+ AttrDefId::ImplId(iid) => Some(iid.lookup(self.db.upcast()).container.into()),
+ AttrDefId::ExternBlockId(id) => Some(id.lookup(self.db.upcast()).container.into()),
+ // These warnings should not explore macro definitions at all
+ AttrDefId::MacroId(_) => None,
+ AttrDefId::AdtId(aid) => match aid {
+ AdtId::StructId(sid) => Some(sid.lookup(self.db.upcast()).container.into()),
+ AdtId::EnumId(eid) => Some(eid.lookup(self.db.upcast()).container.into()),
+ // Unions aren't yet supported
+ AdtId::UnionId(_) => None,
+ },
+ AttrDefId::FieldId(_) => None,
+ AttrDefId::EnumVariantId(_) => None,
+ AttrDefId::TypeAliasId(_) => None,
+ AttrDefId::GenericParamId(_) => None,
+ }
+ .map(|mid| self.allowed(mid, allow_name, true))
+ .unwrap_or(false)
+ }
+
+ fn validate_func(&mut self, func: FunctionId) {
+ let data = self.db.function_data(func);
+ if matches!(func.lookup(self.db.upcast()).container, ItemContainerId::ExternBlockId(_)) {
+ cov_mark::hit!(extern_func_incorrect_case_ignored);
+ return;
+ }
+
+ let body = self.db.body(func.into());
+
+ // Recursively validate inner scope items, such as static variables and constants.
+ for (_, block_def_map) in body.blocks(self.db.upcast()) {
+ for (_, module) in block_def_map.modules() {
+ for def_id in module.scope.declarations() {
+ let mut validator = DeclValidator::new(self.db, self.krate);
+ validator.validate_item(def_id);
+ }
+ }
+ }
+
+ // Check whether non-snake case identifiers are allowed for this function.
+ if self.allowed(func.into(), allow::NON_SNAKE_CASE, false) {
+ return;
+ }
+
+ // Check the function name.
+ let function_name = data.name.to_string();
+ let fn_name_replacement = to_lower_snake_case(&function_name).map(|new_name| Replacement {
+ current_name: data.name.clone(),
+ suggested_text: new_name,
+ expected_case: CaseType::LowerSnakeCase,
+ });
+
+ // Check the patterns inside the function body.
+ // This includes function parameters.
+ let pats_replacements = body
+ .pats
+ .iter()
+ .filter_map(|(id, pat)| match pat {
+ Pat::Bind { name, .. } => Some((id, name)),
+ _ => None,
+ })
+ .filter_map(|(id, bind_name)| {
+ Some((
+ id,
+ Replacement {
+ current_name: bind_name.clone(),
+ suggested_text: to_lower_snake_case(&bind_name.to_string())?,
+ expected_case: CaseType::LowerSnakeCase,
+ },
+ ))
+ })
+ .collect();
+
+ // If there is at least one element to spawn a warning on, go to the source map and generate a warning.
+ if let Some(fn_name_replacement) = fn_name_replacement {
+ self.create_incorrect_case_diagnostic_for_func(func, fn_name_replacement);
+ }
+
+ self.create_incorrect_case_diagnostic_for_variables(func, pats_replacements);
+ }
+
+ /// Given the information about incorrect names in the function declaration, looks up into the source code
+ /// for exact locations and adds diagnostics into the sink.
+ fn create_incorrect_case_diagnostic_for_func(
+ &mut self,
+ func: FunctionId,
+ fn_name_replacement: Replacement,
+ ) {
+ let fn_loc = func.lookup(self.db.upcast());
+ let fn_src = fn_loc.source(self.db.upcast());
+
+ // Diagnostic for function name.
+ let ast_ptr = match fn_src.value.name() {
+ Some(name) => name,
+ None => {
+ never!(
+ "Replacement ({:?}) was generated for a function without a name: {:?}",
+ fn_name_replacement,
+ fn_src
+ );
+ return;
+ }
+ };
+
+ let diagnostic = IncorrectCase {
+ file: fn_src.file_id,
+ ident_type: IdentType::Function,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: fn_name_replacement.expected_case,
+ ident_text: fn_name_replacement.current_name.to_string(),
+ suggested_text: fn_name_replacement.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+
+ /// Given the information about incorrect variable names, looks up into the source code
+ /// for exact locations and adds diagnostics into the sink.
+ fn create_incorrect_case_diagnostic_for_variables(
+ &mut self,
+ func: FunctionId,
+ pats_replacements: Vec<(PatId, Replacement)>,
+ ) {
+ // XXX: only look at source_map if we do have missing fields
+ if pats_replacements.is_empty() {
+ return;
+ }
+
+ let (_, source_map) = self.db.body_with_source_map(func.into());
+
+ for (id, replacement) in pats_replacements {
+ if let Ok(source_ptr) = source_map.pat_syntax(id) {
+ if let Some(expr) = source_ptr.value.as_ref().left() {
+ let root = source_ptr.file_syntax(self.db.upcast());
+ if let ast::Pat::IdentPat(ident_pat) = expr.to_node(&root) {
+ let parent = match ident_pat.syntax().parent() {
+ Some(parent) => parent,
+ None => continue,
+ };
+ let name_ast = match ident_pat.name() {
+ Some(name_ast) => name_ast,
+ None => continue,
+ };
+
+ let is_param = ast::Param::can_cast(parent.kind());
+
+ // We have to check that it's either `let var = ...` or `var @ Variant(_)` statement,
+ // because e.g. match arms are patterns as well.
+ // In other words, we check that it's a named variable binding.
+ let is_binding = ast::LetStmt::can_cast(parent.kind())
+ || (ast::MatchArm::can_cast(parent.kind())
+ && ident_pat.at_token().is_some());
+ if !(is_param || is_binding) {
+ // This pattern is not an actual variable declaration, e.g. `Some(val) => {..}` match arm.
+ continue;
+ }
+
+ let ident_type =
+ if is_param { IdentType::Parameter } else { IdentType::Variable };
+
+ let diagnostic = IncorrectCase {
+ file: source_ptr.file_id,
+ ident_type,
+ ident: AstPtr::new(&name_ast),
+ expected_case: replacement.expected_case,
+ ident_text: replacement.current_name.to_string(),
+ suggested_text: replacement.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+ }
+ }
+ }
+ }
+
+ fn validate_struct(&mut self, struct_id: StructId) {
+ let data = self.db.struct_data(struct_id);
+
+ let non_camel_case_allowed =
+ self.allowed(struct_id.into(), allow::NON_CAMEL_CASE_TYPES, false);
+ let non_snake_case_allowed = self.allowed(struct_id.into(), allow::NON_SNAKE_CASE, false);
+
+ // Check the structure name.
+ let struct_name = data.name.to_string();
+ let struct_name_replacement = if !non_camel_case_allowed {
+ to_camel_case(&struct_name).map(|new_name| Replacement {
+ current_name: data.name.clone(),
+ suggested_text: new_name,
+ expected_case: CaseType::UpperCamelCase,
+ })
+ } else {
+ None
+ };
+
+ // Check the field names.
+ let mut struct_fields_replacements = Vec::new();
+
+ if !non_snake_case_allowed {
+ if let VariantData::Record(fields) = data.variant_data.as_ref() {
+ for (_, field) in fields.iter() {
+ let field_name = field.name.to_string();
+ if let Some(new_name) = to_lower_snake_case(&field_name) {
+ let replacement = Replacement {
+ current_name: field.name.clone(),
+ suggested_text: new_name,
+ expected_case: CaseType::LowerSnakeCase,
+ };
+ struct_fields_replacements.push(replacement);
+ }
+ }
+ }
+ }
+
+ // If there is at least one element to spawn a warning on, go to the source map and generate a warning.
+ self.create_incorrect_case_diagnostic_for_struct(
+ struct_id,
+ struct_name_replacement,
+ struct_fields_replacements,
+ );
+ }
+
+ /// Given the information about incorrect names in the struct declaration, looks up into the source code
+ /// for exact locations and adds diagnostics into the sink.
+ fn create_incorrect_case_diagnostic_for_struct(
+ &mut self,
+ struct_id: StructId,
+ struct_name_replacement: Option<Replacement>,
+ struct_fields_replacements: Vec<Replacement>,
+ ) {
+ // XXX: Only look at sources if we do have incorrect names.
+ if struct_name_replacement.is_none() && struct_fields_replacements.is_empty() {
+ return;
+ }
+
+ let struct_loc = struct_id.lookup(self.db.upcast());
+ let struct_src = struct_loc.source(self.db.upcast());
+
+ if let Some(replacement) = struct_name_replacement {
+ let ast_ptr = match struct_src.value.name() {
+ Some(name) => name,
+ None => {
+ never!(
+ "Replacement ({:?}) was generated for a structure without a name: {:?}",
+ replacement,
+ struct_src
+ );
+ return;
+ }
+ };
+
+ let diagnostic = IncorrectCase {
+ file: struct_src.file_id,
+ ident_type: IdentType::Structure,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: replacement.expected_case,
+ ident_text: replacement.current_name.to_string(),
+ suggested_text: replacement.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+
+ let struct_fields_list = match struct_src.value.field_list() {
+ Some(ast::FieldList::RecordFieldList(fields)) => fields,
+ _ => {
+ always!(
+ struct_fields_replacements.is_empty(),
+ "Replacements ({:?}) were generated for a structure fields which had no fields list: {:?}",
+ struct_fields_replacements,
+ struct_src
+ );
+ return;
+ }
+ };
+ let mut struct_fields_iter = struct_fields_list.fields();
+ for field_to_rename in struct_fields_replacements {
+ // We assume that parameters in replacement are in the same order as in the
+ // actual params list, but just some of them (ones that named correctly) are skipped.
+ let ast_ptr = loop {
+ match struct_fields_iter.next().and_then(|field| field.name()) {
+ Some(field_name) => {
+ if field_name.as_name() == field_to_rename.current_name {
+ break field_name;
+ }
+ }
+ None => {
+ never!(
+ "Replacement ({:?}) was generated for a structure field which was not found: {:?}",
+ field_to_rename, struct_src
+ );
+ return;
+ }
+ }
+ };
+
+ let diagnostic = IncorrectCase {
+ file: struct_src.file_id,
+ ident_type: IdentType::Field,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: field_to_rename.expected_case,
+ ident_text: field_to_rename.current_name.to_string(),
+ suggested_text: field_to_rename.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+ }
+
+ fn validate_enum(&mut self, enum_id: EnumId) {
+ let data = self.db.enum_data(enum_id);
+
+ // Check whether non-camel case names are allowed for this enum.
+ if self.allowed(enum_id.into(), allow::NON_CAMEL_CASE_TYPES, false) {
+ return;
+ }
+
+ // Check the enum name.
+ let enum_name = data.name.to_string();
+ let enum_name_replacement = to_camel_case(&enum_name).map(|new_name| Replacement {
+ current_name: data.name.clone(),
+ suggested_text: new_name,
+ expected_case: CaseType::UpperCamelCase,
+ });
+
+ // Check the field names.
+ let enum_fields_replacements = data
+ .variants
+ .iter()
+ .filter_map(|(_, variant)| {
+ Some(Replacement {
+ current_name: variant.name.clone(),
+ suggested_text: to_camel_case(&variant.name.to_string())?,
+ expected_case: CaseType::UpperCamelCase,
+ })
+ })
+ .collect();
+
+ // If there is at least one element to spawn a warning on, go to the source map and generate a warning.
+ self.create_incorrect_case_diagnostic_for_enum(
+ enum_id,
+ enum_name_replacement,
+ enum_fields_replacements,
+ )
+ }
+
+ /// Given the information about incorrect names in the struct declaration, looks up into the source code
+ /// for exact locations and adds diagnostics into the sink.
+ fn create_incorrect_case_diagnostic_for_enum(
+ &mut self,
+ enum_id: EnumId,
+ enum_name_replacement: Option<Replacement>,
+ enum_variants_replacements: Vec<Replacement>,
+ ) {
+ // XXX: only look at sources if we do have incorrect names
+ if enum_name_replacement.is_none() && enum_variants_replacements.is_empty() {
+ return;
+ }
+
+ let enum_loc = enum_id.lookup(self.db.upcast());
+ let enum_src = enum_loc.source(self.db.upcast());
+
+ if let Some(replacement) = enum_name_replacement {
+ let ast_ptr = match enum_src.value.name() {
+ Some(name) => name,
+ None => {
+ never!(
+ "Replacement ({:?}) was generated for a enum without a name: {:?}",
+ replacement,
+ enum_src
+ );
+ return;
+ }
+ };
+
+ let diagnostic = IncorrectCase {
+ file: enum_src.file_id,
+ ident_type: IdentType::Enum,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: replacement.expected_case,
+ ident_text: replacement.current_name.to_string(),
+ suggested_text: replacement.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+
+ let enum_variants_list = match enum_src.value.variant_list() {
+ Some(variants) => variants,
+ _ => {
+ always!(
+ enum_variants_replacements.is_empty(),
+ "Replacements ({:?}) were generated for a enum variants which had no fields list: {:?}",
+ enum_variants_replacements,
+ enum_src
+ );
+ return;
+ }
+ };
+ let mut enum_variants_iter = enum_variants_list.variants();
+ for variant_to_rename in enum_variants_replacements {
+ // We assume that parameters in replacement are in the same order as in the
+ // actual params list, but just some of them (ones that named correctly) are skipped.
+ let ast_ptr = loop {
+ match enum_variants_iter.next().and_then(|v| v.name()) {
+ Some(variant_name) => {
+ if variant_name.as_name() == variant_to_rename.current_name {
+ break variant_name;
+ }
+ }
+ None => {
+ never!(
+ "Replacement ({:?}) was generated for a enum variant which was not found: {:?}",
+ variant_to_rename, enum_src
+ );
+ return;
+ }
+ }
+ };
+
+ let diagnostic = IncorrectCase {
+ file: enum_src.file_id,
+ ident_type: IdentType::Variant,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: variant_to_rename.expected_case,
+ ident_text: variant_to_rename.current_name.to_string(),
+ suggested_text: variant_to_rename.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+ }
+
+ fn validate_const(&mut self, const_id: ConstId) {
+ let data = self.db.const_data(const_id);
+
+ if self.allowed(const_id.into(), allow::NON_UPPER_CASE_GLOBAL, false) {
+ return;
+ }
+
+ let name = match &data.name {
+ Some(name) => name,
+ None => return,
+ };
+
+ let const_name = name.to_string();
+ let replacement = if let Some(new_name) = to_upper_snake_case(&const_name) {
+ Replacement {
+ current_name: name.clone(),
+ suggested_text: new_name,
+ expected_case: CaseType::UpperSnakeCase,
+ }
+ } else {
+ // Nothing to do here.
+ return;
+ };
+
+ let const_loc = const_id.lookup(self.db.upcast());
+ let const_src = const_loc.source(self.db.upcast());
+
+ let ast_ptr = match const_src.value.name() {
+ Some(name) => name,
+ None => return,
+ };
+
+ let diagnostic = IncorrectCase {
+ file: const_src.file_id,
+ ident_type: IdentType::Constant,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: replacement.expected_case,
+ ident_text: replacement.current_name.to_string(),
+ suggested_text: replacement.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+
+ fn validate_static(&mut self, static_id: StaticId) {
+ let data = self.db.static_data(static_id);
+ if data.is_extern {
+ cov_mark::hit!(extern_static_incorrect_case_ignored);
+ return;
+ }
+
+ if self.allowed(static_id.into(), allow::NON_UPPER_CASE_GLOBAL, false) {
+ return;
+ }
+
+ let name = &data.name;
+
+ let static_name = name.to_string();
+ let replacement = if let Some(new_name) = to_upper_snake_case(&static_name) {
+ Replacement {
+ current_name: name.clone(),
+ suggested_text: new_name,
+ expected_case: CaseType::UpperSnakeCase,
+ }
+ } else {
+ // Nothing to do here.
+ return;
+ };
+
+ let static_loc = static_id.lookup(self.db.upcast());
+ let static_src = static_loc.source(self.db.upcast());
+
+ let ast_ptr = match static_src.value.name() {
+ Some(name) => name,
+ None => return,
+ };
+
+ let diagnostic = IncorrectCase {
+ file: static_src.file_id,
+ ident_type: IdentType::StaticVariable,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: replacement.expected_case,
+ ident_text: replacement.current_name.to_string(),
+ suggested_text: replacement.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
new file mode 100644
index 000000000..88d607194
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
@@ -0,0 +1,199 @@
+//! Functions for string case manipulation, such as detecting the identifier case,
+//! and converting it into appropriate form.
+
+// Code that was taken from rustc was taken at commit 89fdb30,
+// from file /compiler/rustc_lint/src/nonstandard_style.rs
+
+/// Converts an identifier to an UpperCamelCase form.
+/// Returns `None` if the string is already in UpperCamelCase.
+pub(crate) fn to_camel_case(ident: &str) -> Option<String> {
+ if is_camel_case(ident) {
+ return None;
+ }
+
+ // Taken from rustc.
+ let ret = ident
+ .trim_matches('_')
+ .split('_')
+ .filter(|component| !component.is_empty())
+ .map(|component| {
+ let mut camel_cased_component = String::with_capacity(component.len());
+
+ let mut new_word = true;
+ let mut prev_is_lower_case = true;
+
+ for c in component.chars() {
+ // Preserve the case if an uppercase letter follows a lowercase letter, so that
+ // `camelCase` is converted to `CamelCase`.
+ if prev_is_lower_case && c.is_uppercase() {
+ new_word = true;
+ }
+
+ if new_word {
+ camel_cased_component.extend(c.to_uppercase());
+ } else {
+ camel_cased_component.extend(c.to_lowercase());
+ }
+
+ prev_is_lower_case = c.is_lowercase();
+ new_word = false;
+ }
+
+ camel_cased_component
+ })
+ .fold((String::new(), None), |(acc, prev): (_, Option<String>), next| {
+ // separate two components with an underscore if their boundary cannot
+ // be distinguished using an uppercase/lowercase case distinction
+ let join = prev
+ .and_then(|prev| {
+ let f = next.chars().next()?;
+ let l = prev.chars().last()?;
+ Some(!char_has_case(l) && !char_has_case(f))
+ })
+ .unwrap_or(false);
+ (acc + if join { "_" } else { "" } + &next, Some(next))
+ })
+ .0;
+ Some(ret)
+}
+
+/// Converts an identifier to a lower_snake_case form.
+/// Returns `None` if the string is already in lower_snake_case.
+pub(crate) fn to_lower_snake_case(ident: &str) -> Option<String> {
+ if is_lower_snake_case(ident) {
+ return None;
+ } else if is_upper_snake_case(ident) {
+ return Some(ident.to_lowercase());
+ }
+
+ Some(stdx::to_lower_snake_case(ident))
+}
+
+/// Converts an identifier to an UPPER_SNAKE_CASE form.
+/// Returns `None` if the string is already is UPPER_SNAKE_CASE.
+pub(crate) fn to_upper_snake_case(ident: &str) -> Option<String> {
+ if is_upper_snake_case(ident) {
+ return None;
+ } else if is_lower_snake_case(ident) {
+ return Some(ident.to_uppercase());
+ }
+
+ Some(stdx::to_upper_snake_case(ident))
+}
+
+// Taken from rustc.
+// Modified by replacing the use of unstable feature `array_windows`.
+fn is_camel_case(name: &str) -> bool {
+ let name = name.trim_matches('_');
+ if name.is_empty() {
+ return true;
+ }
+
+ let mut fst = None;
+ // start with a non-lowercase letter rather than non-uppercase
+ // ones (some scripts don't have a concept of upper/lowercase)
+ name.chars().next().map_or(true, |c| !c.is_lowercase())
+ && !name.contains("__")
+ && !name.chars().any(|snd| {
+ let ret = match fst {
+ None => false,
+ Some(fst) => char_has_case(fst) && snd == '_' || char_has_case(snd) && fst == '_',
+ };
+ fst = Some(snd);
+
+ ret
+ })
+}
+
+fn is_lower_snake_case(ident: &str) -> bool {
+ is_snake_case(ident, char::is_uppercase)
+}
+
+fn is_upper_snake_case(ident: &str) -> bool {
+ is_snake_case(ident, char::is_lowercase)
+}
+
+// Taken from rustc.
+// Modified to allow checking for both upper and lower snake case.
+fn is_snake_case<F: Fn(char) -> bool>(ident: &str, wrong_case: F) -> bool {
+ if ident.is_empty() {
+ return true;
+ }
+ let ident = ident.trim_matches('_');
+
+ let mut allow_underscore = true;
+ ident.chars().all(|c| {
+ allow_underscore = match c {
+ '_' if !allow_underscore => return false,
+ '_' => false,
+ // It would be more obvious to check for the correct case,
+ // but some characters do not have a case.
+ c if !wrong_case(c) => true,
+ _ => return false,
+ };
+ true
+ })
+}
+
+// Taken from rustc.
+fn char_has_case(c: char) -> bool {
+ c.is_lowercase() || c.is_uppercase()
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use expect_test::{expect, Expect};
+
+ fn check<F: Fn(&str) -> Option<String>>(fun: F, input: &str, expect: Expect) {
+ // `None` is translated to empty string, meaning that there is nothing to fix.
+ let output = fun(input).unwrap_or_default();
+
+ expect.assert_eq(&output);
+ }
+
+ #[test]
+ fn test_to_lower_snake_case() {
+ check(to_lower_snake_case, "lower_snake_case", expect![[""]]);
+ check(to_lower_snake_case, "UPPER_SNAKE_CASE", expect![["upper_snake_case"]]);
+ check(to_lower_snake_case, "Weird_Case", expect![["weird_case"]]);
+ check(to_lower_snake_case, "CamelCase", expect![["camel_case"]]);
+ check(to_lower_snake_case, "lowerCamelCase", expect![["lower_camel_case"]]);
+ check(to_lower_snake_case, "a", expect![[""]]);
+ check(to_lower_snake_case, "abc", expect![[""]]);
+ check(to_lower_snake_case, "foo__bar", expect![["foo_bar"]]);
+ }
+
+ #[test]
+ fn test_to_camel_case() {
+ check(to_camel_case, "CamelCase", expect![[""]]);
+ check(to_camel_case, "CamelCase_", expect![[""]]);
+ check(to_camel_case, "_CamelCase", expect![[""]]);
+ check(to_camel_case, "lowerCamelCase", expect![["LowerCamelCase"]]);
+ check(to_camel_case, "lower_snake_case", expect![["LowerSnakeCase"]]);
+ check(to_camel_case, "UPPER_SNAKE_CASE", expect![["UpperSnakeCase"]]);
+ check(to_camel_case, "Weird_Case", expect![["WeirdCase"]]);
+ check(to_camel_case, "name", expect![["Name"]]);
+ check(to_camel_case, "A", expect![[""]]);
+ check(to_camel_case, "AABB", expect![[""]]);
+ // Taken from rustc: /compiler/rustc_lint/src/nonstandard_style/tests.rs
+ check(to_camel_case, "X86_64", expect![[""]]);
+ check(to_camel_case, "x86__64", expect![["X86_64"]]);
+ check(to_camel_case, "Abc_123", expect![["Abc123"]]);
+ check(to_camel_case, "A1_b2_c3", expect![["A1B2C3"]]);
+ }
+
+ #[test]
+ fn test_to_upper_snake_case() {
+ check(to_upper_snake_case, "UPPER_SNAKE_CASE", expect![[""]]);
+ check(to_upper_snake_case, "lower_snake_case", expect![["LOWER_SNAKE_CASE"]]);
+ check(to_upper_snake_case, "Weird_Case", expect![["WEIRD_CASE"]]);
+ check(to_upper_snake_case, "CamelCase", expect![["CAMEL_CASE"]]);
+ check(to_upper_snake_case, "lowerCamelCase", expect![["LOWER_CAMEL_CASE"]]);
+ check(to_upper_snake_case, "A", expect![[""]]);
+ check(to_upper_snake_case, "ABC", expect![[""]]);
+ check(to_upper_snake_case, "X86_64", expect![[""]]);
+ check(to_upper_snake_case, "FOO_BAr", expect![["FOO_BAR"]]);
+ check(to_upper_snake_case, "FOO__BAR", expect![["FOO_BAR"]]);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
new file mode 100644
index 000000000..642e03edd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
@@ -0,0 +1,416 @@
+//! Various diagnostics for expressions that are collected together in one pass
+//! through the body using inference results: mismatched arg counts, missing
+//! fields, etc.
+
+use std::fmt;
+use std::sync::Arc;
+
+use hir_def::{path::path, resolver::HasResolver, AdtId, AssocItemId, DefWithBodyId, HasModule};
+use hir_expand::name;
+use itertools::Either;
+use itertools::Itertools;
+use rustc_hash::FxHashSet;
+use typed_arena::Arena;
+
+use crate::{
+ db::HirDatabase,
+ diagnostics::match_check::{
+ self,
+ deconstruct_pat::DeconstructedPat,
+ usefulness::{compute_match_usefulness, MatchCheckCtx},
+ },
+ display::HirDisplay,
+ InferenceResult, Ty, TyExt,
+};
+
+pub(crate) use hir_def::{
+ body::Body,
+ expr::{Expr, ExprId, MatchArm, Pat, PatId},
+ LocalFieldId, VariantId,
+};
+
+pub enum BodyValidationDiagnostic {
+ RecordMissingFields {
+ record: Either<ExprId, PatId>,
+ variant: VariantId,
+ missed_fields: Vec<LocalFieldId>,
+ },
+ ReplaceFilterMapNextWithFindMap {
+ method_call_expr: ExprId,
+ },
+ MissingMatchArms {
+ match_expr: ExprId,
+ uncovered_patterns: String,
+ },
+}
+
+impl BodyValidationDiagnostic {
+ pub fn collect(db: &dyn HirDatabase, owner: DefWithBodyId) -> Vec<BodyValidationDiagnostic> {
+ let _p = profile::span("BodyValidationDiagnostic::collect");
+ let infer = db.infer(owner);
+ let mut validator = ExprValidator::new(owner, infer);
+ validator.validate_body(db);
+ validator.diagnostics
+ }
+}
+
+struct ExprValidator {
+ owner: DefWithBodyId,
+ infer: Arc<InferenceResult>,
+ pub(super) diagnostics: Vec<BodyValidationDiagnostic>,
+}
+
+impl ExprValidator {
+ fn new(owner: DefWithBodyId, infer: Arc<InferenceResult>) -> ExprValidator {
+ ExprValidator { owner, infer, diagnostics: Vec::new() }
+ }
+
+ fn validate_body(&mut self, db: &dyn HirDatabase) {
+ let body = db.body(self.owner);
+ let mut filter_map_next_checker = None;
+
+ for (id, expr) in body.exprs.iter() {
+ if let Some((variant, missed_fields, true)) =
+ record_literal_missing_fields(db, &self.infer, id, expr)
+ {
+ self.diagnostics.push(BodyValidationDiagnostic::RecordMissingFields {
+ record: Either::Left(id),
+ variant,
+ missed_fields,
+ });
+ }
+
+ match expr {
+ Expr::Match { expr, arms } => {
+ self.validate_match(id, *expr, arms, db, self.infer.clone());
+ }
+ Expr::Call { .. } | Expr::MethodCall { .. } => {
+ self.validate_call(db, id, expr, &mut filter_map_next_checker);
+ }
+ _ => {}
+ }
+ }
+ for (id, pat) in body.pats.iter() {
+ if let Some((variant, missed_fields, true)) =
+ record_pattern_missing_fields(db, &self.infer, id, pat)
+ {
+ self.diagnostics.push(BodyValidationDiagnostic::RecordMissingFields {
+ record: Either::Right(id),
+ variant,
+ missed_fields,
+ });
+ }
+ }
+ }
+
+ fn validate_call(
+ &mut self,
+ db: &dyn HirDatabase,
+ call_id: ExprId,
+ expr: &Expr,
+ filter_map_next_checker: &mut Option<FilterMapNextChecker>,
+ ) {
+ // Check that the number of arguments matches the number of parameters.
+
+ // FIXME: Due to shortcomings in the current type system implementation, only emit this
+ // diagnostic if there are no type mismatches in the containing function.
+ if self.infer.expr_type_mismatches().next().is_some() {
+ return;
+ }
+
+ match expr {
+ Expr::MethodCall { receiver, .. } => {
+ let (callee, _) = match self.infer.method_resolution(call_id) {
+ Some(it) => it,
+ None => return,
+ };
+
+ if filter_map_next_checker
+ .get_or_insert_with(|| {
+ FilterMapNextChecker::new(&self.owner.resolver(db.upcast()), db)
+ })
+ .check(call_id, receiver, &callee)
+ .is_some()
+ {
+ self.diagnostics.push(
+ BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap {
+ method_call_expr: call_id,
+ },
+ );
+ }
+ }
+ _ => return,
+ };
+ }
+
+ fn validate_match(
+ &mut self,
+ id: ExprId,
+ match_expr: ExprId,
+ arms: &[MatchArm],
+ db: &dyn HirDatabase,
+ infer: Arc<InferenceResult>,
+ ) {
+ let body = db.body(self.owner);
+
+ let match_expr_ty = &infer[match_expr];
+ if match_expr_ty.is_unknown() {
+ return;
+ }
+
+ let pattern_arena = Arena::new();
+ let cx = MatchCheckCtx {
+ module: self.owner.module(db.upcast()),
+ body: self.owner,
+ db,
+ pattern_arena: &pattern_arena,
+ };
+
+ let mut m_arms = Vec::with_capacity(arms.len());
+ let mut has_lowering_errors = false;
+ for arm in arms {
+ if let Some(pat_ty) = infer.type_of_pat.get(arm.pat) {
+ // We only include patterns whose type matches the type
+ // of the match expression. If we had an InvalidMatchArmPattern
+ // diagnostic or similar we could raise that in an else
+ // block here.
+ //
+ // When comparing the types, we also have to consider that rustc
+ // will automatically de-reference the match expression type if
+ // necessary.
+ //
+ // FIXME we should use the type checker for this.
+ if (pat_ty == match_expr_ty
+ || match_expr_ty
+ .as_reference()
+ .map(|(match_expr_ty, ..)| match_expr_ty == pat_ty)
+ .unwrap_or(false))
+ && types_of_subpatterns_do_match(arm.pat, &body, &infer)
+ {
+ // If we had a NotUsefulMatchArm diagnostic, we could
+ // check the usefulness of each pattern as we added it
+ // to the matrix here.
+ let m_arm = match_check::MatchArm {
+ pat: self.lower_pattern(&cx, arm.pat, db, &body, &mut has_lowering_errors),
+ has_guard: arm.guard.is_some(),
+ };
+ m_arms.push(m_arm);
+ if !has_lowering_errors {
+ continue;
+ }
+ }
+ }
+
+ // If we can't resolve the type of a pattern, or the pattern type doesn't
+ // fit the match expression, we skip this diagnostic. Skipping the entire
+ // diagnostic rather than just not including this match arm is preferred
+ // to avoid the chance of false positives.
+ cov_mark::hit!(validate_match_bailed_out);
+ return;
+ }
+
+ let report = compute_match_usefulness(&cx, &m_arms, match_expr_ty);
+
+ // FIXME Report unreacheble arms
+ // https://github.com/rust-lang/rust/blob/f31622a50/compiler/rustc_mir_build/src/thir/pattern/check_match.rs#L200
+
+ let witnesses = report.non_exhaustiveness_witnesses;
+ if !witnesses.is_empty() {
+ self.diagnostics.push(BodyValidationDiagnostic::MissingMatchArms {
+ match_expr: id,
+ uncovered_patterns: missing_match_arms(&cx, match_expr_ty, witnesses, arms),
+ });
+ }
+ }
+
+ fn lower_pattern<'p>(
+ &self,
+ cx: &MatchCheckCtx<'_, 'p>,
+ pat: PatId,
+ db: &dyn HirDatabase,
+ body: &Body,
+ have_errors: &mut bool,
+ ) -> &'p DeconstructedPat<'p> {
+ let mut patcx = match_check::PatCtxt::new(db, &self.infer, body);
+ let pattern = patcx.lower_pattern(pat);
+ let pattern = cx.pattern_arena.alloc(DeconstructedPat::from_pat(cx, &pattern));
+ if !patcx.errors.is_empty() {
+ *have_errors = true;
+ }
+ pattern
+ }
+}
+
+struct FilterMapNextChecker {
+ filter_map_function_id: Option<hir_def::FunctionId>,
+ next_function_id: Option<hir_def::FunctionId>,
+ prev_filter_map_expr_id: Option<ExprId>,
+}
+
+impl FilterMapNextChecker {
+ fn new(resolver: &hir_def::resolver::Resolver, db: &dyn HirDatabase) -> Self {
+ // Find and store the FunctionIds for Iterator::filter_map and Iterator::next
+ let iterator_path = path![core::iter::Iterator];
+ let mut filter_map_function_id = None;
+ let mut next_function_id = None;
+
+ if let Some(iterator_trait_id) = resolver.resolve_known_trait(db.upcast(), &iterator_path) {
+ let iterator_trait_items = &db.trait_data(iterator_trait_id).items;
+ for item in iterator_trait_items.iter() {
+ if let (name, AssocItemId::FunctionId(id)) = item {
+ if *name == name![filter_map] {
+ filter_map_function_id = Some(*id);
+ }
+ if *name == name![next] {
+ next_function_id = Some(*id);
+ }
+ }
+ if filter_map_function_id.is_some() && next_function_id.is_some() {
+ break;
+ }
+ }
+ }
+ Self { filter_map_function_id, next_function_id, prev_filter_map_expr_id: None }
+ }
+
+ // check for instances of .filter_map(..).next()
+ fn check(
+ &mut self,
+ current_expr_id: ExprId,
+ receiver_expr_id: &ExprId,
+ function_id: &hir_def::FunctionId,
+ ) -> Option<()> {
+ if *function_id == self.filter_map_function_id? {
+ self.prev_filter_map_expr_id = Some(current_expr_id);
+ return None;
+ }
+
+ if *function_id == self.next_function_id? {
+ if let Some(prev_filter_map_expr_id) = self.prev_filter_map_expr_id {
+ if *receiver_expr_id == prev_filter_map_expr_id {
+ return Some(());
+ }
+ }
+ }
+
+ self.prev_filter_map_expr_id = None;
+ None
+ }
+}
+
+pub fn record_literal_missing_fields(
+ db: &dyn HirDatabase,
+ infer: &InferenceResult,
+ id: ExprId,
+ expr: &Expr,
+) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
+ let (fields, exhaustive) = match expr {
+ Expr::RecordLit { fields, spread, ellipsis, is_assignee_expr, .. } => {
+ let exhaustive = if *is_assignee_expr { !*ellipsis } else { spread.is_none() };
+ (fields, exhaustive)
+ }
+ _ => return None,
+ };
+
+ let variant_def = infer.variant_resolution_for_expr(id)?;
+ if let VariantId::UnionId(_) = variant_def {
+ return None;
+ }
+
+ let variant_data = variant_def.variant_data(db.upcast());
+
+ let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
+ let missed_fields: Vec<LocalFieldId> = variant_data
+ .fields()
+ .iter()
+ .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
+ .collect();
+ if missed_fields.is_empty() {
+ return None;
+ }
+ Some((variant_def, missed_fields, exhaustive))
+}
+
+pub fn record_pattern_missing_fields(
+ db: &dyn HirDatabase,
+ infer: &InferenceResult,
+ id: PatId,
+ pat: &Pat,
+) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
+ let (fields, exhaustive) = match pat {
+ Pat::Record { path: _, args, ellipsis } => (args, !ellipsis),
+ _ => return None,
+ };
+
+ let variant_def = infer.variant_resolution_for_pat(id)?;
+ if let VariantId::UnionId(_) = variant_def {
+ return None;
+ }
+
+ let variant_data = variant_def.variant_data(db.upcast());
+
+ let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
+ let missed_fields: Vec<LocalFieldId> = variant_data
+ .fields()
+ .iter()
+ .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
+ .collect();
+ if missed_fields.is_empty() {
+ return None;
+ }
+ Some((variant_def, missed_fields, exhaustive))
+}
+
+fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResult) -> bool {
+ fn walk(pat: PatId, body: &Body, infer: &InferenceResult, has_type_mismatches: &mut bool) {
+ match infer.type_mismatch_for_pat(pat) {
+ Some(_) => *has_type_mismatches = true,
+ None => {
+ body[pat].walk_child_pats(|subpat| walk(subpat, body, infer, has_type_mismatches))
+ }
+ }
+ }
+
+ let mut has_type_mismatches = false;
+ walk(pat, body, infer, &mut has_type_mismatches);
+ !has_type_mismatches
+}
+
+fn missing_match_arms<'p>(
+ cx: &MatchCheckCtx<'_, 'p>,
+ scrut_ty: &Ty,
+ witnesses: Vec<DeconstructedPat<'p>>,
+ arms: &[MatchArm],
+) -> String {
+ struct DisplayWitness<'a, 'p>(&'a DeconstructedPat<'p>, &'a MatchCheckCtx<'a, 'p>);
+ impl<'a, 'p> fmt::Display for DisplayWitness<'a, 'p> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let DisplayWitness(witness, cx) = *self;
+ let pat = witness.to_pat(cx);
+ write!(f, "{}", pat.display(cx.db))
+ }
+ }
+
+ let non_empty_enum = match scrut_ty.as_adt() {
+ Some((AdtId::EnumId(e), _)) => !cx.db.enum_data(e).variants.is_empty(),
+ _ => false,
+ };
+ if arms.is_empty() && !non_empty_enum {
+ format!("type `{}` is non-empty", scrut_ty.display(cx.db))
+ } else {
+ let pat_display = |witness| DisplayWitness(witness, cx);
+ const LIMIT: usize = 3;
+ match &*witnesses {
+ [witness] => format!("`{}` not covered", pat_display(witness)),
+ [head @ .., tail] if head.len() < LIMIT => {
+ let head = head.iter().map(pat_display);
+ format!("`{}` and `{}` not covered", head.format("`, `"), pat_display(tail))
+ }
+ _ => {
+ let (head, tail) = witnesses.split_at(LIMIT);
+ let head = head.iter().map(pat_display);
+ format!("`{}` and {} more not covered", head.format("`, `"), tail.len())
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
new file mode 100644
index 000000000..d51ad72bd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
@@ -0,0 +1,508 @@
+//! Validation of matches.
+//!
+//! This module provides lowering from [hir_def::expr::Pat] to [self::Pat] and match
+//! checking algorithm.
+//!
+//! It is modeled on the rustc module `rustc_mir_build::thir::pattern`.
+
+mod pat_util;
+
+pub(crate) mod deconstruct_pat;
+pub(crate) mod usefulness;
+
+use chalk_ir::Mutability;
+use hir_def::{
+ adt::VariantData, body::Body, expr::PatId, AdtId, EnumVariantId, HasModule, LocalFieldId,
+ VariantId,
+};
+use hir_expand::name::{name, Name};
+use stdx::{always, never};
+
+use crate::{
+ db::HirDatabase,
+ display::{HirDisplay, HirDisplayError, HirFormatter},
+ infer::BindingMode,
+ InferenceResult, Interner, Substitution, Ty, TyExt, TyKind,
+};
+
+use self::pat_util::EnumerateAndAdjustIterator;
+
+pub(crate) use self::usefulness::MatchArm;
+
+#[derive(Clone, Debug)]
+pub(crate) enum PatternError {
+ Unimplemented,
+ UnexpectedType,
+ UnresolvedVariant,
+ MissingField,
+ ExtraFields,
+}
+
+#[derive(Clone, Debug, PartialEq)]
+pub(crate) struct FieldPat {
+ pub(crate) field: LocalFieldId,
+ pub(crate) pattern: Pat,
+}
+
+#[derive(Clone, Debug, PartialEq)]
+pub(crate) struct Pat {
+ pub(crate) ty: Ty,
+ pub(crate) kind: Box<PatKind>,
+}
+
+/// Close relative to `rustc_mir_build::thir::pattern::PatKind`
+#[derive(Clone, Debug, PartialEq)]
+pub(crate) enum PatKind {
+ Wild,
+
+ /// `x`, `ref x`, `x @ P`, etc.
+ Binding {
+ name: Name,
+ subpattern: Option<Pat>,
+ },
+
+ /// `Foo(...)` or `Foo{...}` or `Foo`, where `Foo` is a variant name from an ADT with
+ /// multiple variants.
+ Variant {
+ substs: Substitution,
+ enum_variant: EnumVariantId,
+ subpatterns: Vec<FieldPat>,
+ },
+
+ /// `(...)`, `Foo(...)`, `Foo{...}`, or `Foo`, where `Foo` is a variant name from an ADT with
+ /// a single variant.
+ Leaf {
+ subpatterns: Vec<FieldPat>,
+ },
+
+ /// `box P`, `&P`, `&mut P`, etc.
+ Deref {
+ subpattern: Pat,
+ },
+
+ // FIXME: for now, only bool literals are implemented
+ LiteralBool {
+ value: bool,
+ },
+
+ /// An or-pattern, e.g. `p | q`.
+ /// Invariant: `pats.len() >= 2`.
+ Or {
+ pats: Vec<Pat>,
+ },
+}
+
+pub(crate) struct PatCtxt<'a> {
+ db: &'a dyn HirDatabase,
+ infer: &'a InferenceResult,
+ body: &'a Body,
+ pub(crate) errors: Vec<PatternError>,
+}
+
+impl<'a> PatCtxt<'a> {
+ pub(crate) fn new(db: &'a dyn HirDatabase, infer: &'a InferenceResult, body: &'a Body) -> Self {
+ Self { db, infer, body, errors: Vec::new() }
+ }
+
+ pub(crate) fn lower_pattern(&mut self, pat: PatId) -> Pat {
+ // XXX(iDawer): Collecting pattern adjustments feels imprecise to me.
+ // When lowering of & and box patterns are implemented this should be tested
+ // in a manner of `match_ergonomics_issue_9095` test.
+ // Pattern adjustment is part of RFC 2005-match-ergonomics.
+ // More info https://github.com/rust-lang/rust/issues/42640#issuecomment-313535089
+ let unadjusted_pat = self.lower_pattern_unadjusted(pat);
+ self.infer.pat_adjustments.get(&pat).map(|it| &**it).unwrap_or_default().iter().rev().fold(
+ unadjusted_pat,
+ |subpattern, ref_ty| Pat {
+ ty: ref_ty.clone(),
+ kind: Box::new(PatKind::Deref { subpattern }),
+ },
+ )
+ }
+
+ fn lower_pattern_unadjusted(&mut self, pat: PatId) -> Pat {
+ let mut ty = &self.infer[pat];
+ let variant = self.infer.variant_resolution_for_pat(pat);
+
+ let kind = match self.body[pat] {
+ hir_def::expr::Pat::Wild => PatKind::Wild,
+
+ hir_def::expr::Pat::Lit(expr) => self.lower_lit(expr),
+
+ hir_def::expr::Pat::Path(ref path) => {
+ return self.lower_path(pat, path);
+ }
+
+ hir_def::expr::Pat::Tuple { ref args, ellipsis } => {
+ let arity = match *ty.kind(Interner) {
+ TyKind::Tuple(arity, _) => arity,
+ _ => {
+ never!("unexpected type for tuple pattern: {:?}", ty);
+ self.errors.push(PatternError::UnexpectedType);
+ return Pat { ty: ty.clone(), kind: PatKind::Wild.into() };
+ }
+ };
+ let subpatterns = self.lower_tuple_subpats(args, arity, ellipsis);
+ PatKind::Leaf { subpatterns }
+ }
+
+ hir_def::expr::Pat::Bind { ref name, subpat, .. } => {
+ let bm = self.infer.pat_binding_modes[&pat];
+ match (bm, ty.kind(Interner)) {
+ (BindingMode::Ref(_), TyKind::Ref(.., rty)) => ty = rty,
+ (BindingMode::Ref(_), _) => {
+ never!("`ref {}` has wrong type {:?}", name, ty);
+ self.errors.push(PatternError::UnexpectedType);
+ return Pat { ty: ty.clone(), kind: PatKind::Wild.into() };
+ }
+ _ => (),
+ }
+ PatKind::Binding { name: name.clone(), subpattern: self.lower_opt_pattern(subpat) }
+ }
+
+ hir_def::expr::Pat::TupleStruct { ref args, ellipsis, .. } if variant.is_some() => {
+ let expected_len = variant.unwrap().variant_data(self.db.upcast()).fields().len();
+ let subpatterns = self.lower_tuple_subpats(args, expected_len, ellipsis);
+ self.lower_variant_or_leaf(pat, ty, subpatterns)
+ }
+
+ hir_def::expr::Pat::Record { ref args, .. } if variant.is_some() => {
+ let variant_data = variant.unwrap().variant_data(self.db.upcast());
+ let subpatterns = args
+ .iter()
+ .map(|field| {
+ // XXX(iDawer): field lookup is inefficient
+ variant_data.field(&field.name).map(|lfield_id| FieldPat {
+ field: lfield_id,
+ pattern: self.lower_pattern(field.pat),
+ })
+ })
+ .collect();
+ match subpatterns {
+ Some(subpatterns) => self.lower_variant_or_leaf(pat, ty, subpatterns),
+ None => {
+ self.errors.push(PatternError::MissingField);
+ PatKind::Wild
+ }
+ }
+ }
+ hir_def::expr::Pat::TupleStruct { .. } | hir_def::expr::Pat::Record { .. } => {
+ self.errors.push(PatternError::UnresolvedVariant);
+ PatKind::Wild
+ }
+
+ hir_def::expr::Pat::Or(ref pats) => PatKind::Or { pats: self.lower_patterns(pats) },
+
+ _ => {
+ self.errors.push(PatternError::Unimplemented);
+ PatKind::Wild
+ }
+ };
+
+ Pat { ty: ty.clone(), kind: Box::new(kind) }
+ }
+
+ fn lower_tuple_subpats(
+ &mut self,
+ pats: &[PatId],
+ expected_len: usize,
+ ellipsis: Option<usize>,
+ ) -> Vec<FieldPat> {
+ if pats.len() > expected_len {
+ self.errors.push(PatternError::ExtraFields);
+ return Vec::new();
+ }
+
+ pats.iter()
+ .enumerate_and_adjust(expected_len, ellipsis)
+ .map(|(i, &subpattern)| FieldPat {
+ field: LocalFieldId::from_raw((i as u32).into()),
+ pattern: self.lower_pattern(subpattern),
+ })
+ .collect()
+ }
+
+ fn lower_patterns(&mut self, pats: &[PatId]) -> Vec<Pat> {
+ pats.iter().map(|&p| self.lower_pattern(p)).collect()
+ }
+
+ fn lower_opt_pattern(&mut self, pat: Option<PatId>) -> Option<Pat> {
+ pat.map(|p| self.lower_pattern(p))
+ }
+
+ fn lower_variant_or_leaf(
+ &mut self,
+ pat: PatId,
+ ty: &Ty,
+ subpatterns: Vec<FieldPat>,
+ ) -> PatKind {
+ let kind = match self.infer.variant_resolution_for_pat(pat) {
+ Some(variant_id) => {
+ if let VariantId::EnumVariantId(enum_variant) = variant_id {
+ let substs = match ty.kind(Interner) {
+ TyKind::Adt(_, substs) => substs.clone(),
+ kind => {
+ always!(
+ matches!(kind, TyKind::FnDef(..) | TyKind::Error),
+ "inappropriate type for def: {:?}",
+ ty
+ );
+ self.errors.push(PatternError::UnexpectedType);
+ return PatKind::Wild;
+ }
+ };
+ PatKind::Variant { substs, enum_variant, subpatterns }
+ } else {
+ PatKind::Leaf { subpatterns }
+ }
+ }
+ None => {
+ self.errors.push(PatternError::UnresolvedVariant);
+ PatKind::Wild
+ }
+ };
+ kind
+ }
+
+ fn lower_path(&mut self, pat: PatId, _path: &hir_def::path::Path) -> Pat {
+ let ty = &self.infer[pat];
+
+ let pat_from_kind = |kind| Pat { ty: ty.clone(), kind: Box::new(kind) };
+
+ match self.infer.variant_resolution_for_pat(pat) {
+ Some(_) => pat_from_kind(self.lower_variant_or_leaf(pat, ty, Vec::new())),
+ None => {
+ self.errors.push(PatternError::UnresolvedVariant);
+ pat_from_kind(PatKind::Wild)
+ }
+ }
+ }
+
+ fn lower_lit(&mut self, expr: hir_def::expr::ExprId) -> PatKind {
+ use hir_def::expr::{Expr, Literal::Bool};
+
+ match self.body[expr] {
+ Expr::Literal(Bool(value)) => PatKind::LiteralBool { value },
+ _ => {
+ self.errors.push(PatternError::Unimplemented);
+ PatKind::Wild
+ }
+ }
+ }
+}
+
+impl HirDisplay for Pat {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match &*self.kind {
+ PatKind::Wild => write!(f, "_"),
+ PatKind::Binding { name, subpattern } => {
+ write!(f, "{name}")?;
+ if let Some(subpattern) = subpattern {
+ write!(f, " @ ")?;
+ subpattern.hir_fmt(f)?;
+ }
+ Ok(())
+ }
+ PatKind::Variant { subpatterns, .. } | PatKind::Leaf { subpatterns } => {
+ let variant = match *self.kind {
+ PatKind::Variant { enum_variant, .. } => Some(VariantId::from(enum_variant)),
+ _ => self.ty.as_adt().and_then(|(adt, _)| match adt {
+ AdtId::StructId(s) => Some(s.into()),
+ AdtId::UnionId(u) => Some(u.into()),
+ AdtId::EnumId(_) => None,
+ }),
+ };
+
+ if let Some(variant) = variant {
+ match variant {
+ VariantId::EnumVariantId(v) => {
+ let data = f.db.enum_data(v.parent);
+ write!(f, "{}", data.variants[v.local_id].name)?;
+ }
+ VariantId::StructId(s) => write!(f, "{}", f.db.struct_data(s).name)?,
+ VariantId::UnionId(u) => write!(f, "{}", f.db.union_data(u).name)?,
+ };
+
+ let variant_data = variant.variant_data(f.db.upcast());
+ if let VariantData::Record(rec_fields) = &*variant_data {
+ write!(f, " {{ ")?;
+
+ let mut printed = 0;
+ let subpats = subpatterns
+ .iter()
+ .filter(|p| !matches!(*p.pattern.kind, PatKind::Wild))
+ .map(|p| {
+ printed += 1;
+ WriteWith(move |f| {
+ write!(f, "{}: ", rec_fields[p.field].name)?;
+ p.pattern.hir_fmt(f)
+ })
+ });
+ f.write_joined(subpats, ", ")?;
+
+ if printed < rec_fields.len() {
+ write!(f, "{}..", if printed > 0 { ", " } else { "" })?;
+ }
+
+ return write!(f, " }}");
+ }
+ }
+
+ let num_fields = variant
+ .map_or(subpatterns.len(), |v| v.variant_data(f.db.upcast()).fields().len());
+ if num_fields != 0 || variant.is_none() {
+ write!(f, "(")?;
+ let subpats = (0..num_fields).map(|i| {
+ WriteWith(move |f| {
+ let fid = LocalFieldId::from_raw((i as u32).into());
+ if let Some(p) = subpatterns.get(i) {
+ if p.field == fid {
+ return p.pattern.hir_fmt(f);
+ }
+ }
+ if let Some(p) = subpatterns.iter().find(|p| p.field == fid) {
+ p.pattern.hir_fmt(f)
+ } else {
+ write!(f, "_")
+ }
+ })
+ });
+ f.write_joined(subpats, ", ")?;
+ if let (TyKind::Tuple(..), 1) = (self.ty.kind(Interner), num_fields) {
+ write!(f, ",")?;
+ }
+ write!(f, ")")?;
+ }
+
+ Ok(())
+ }
+ PatKind::Deref { subpattern } => {
+ match self.ty.kind(Interner) {
+ TyKind::Adt(adt, _) if is_box(adt.0, f.db) => write!(f, "box ")?,
+ &TyKind::Ref(mutbl, ..) => {
+ write!(f, "&{}", if mutbl == Mutability::Mut { "mut " } else { "" })?
+ }
+ _ => never!("{:?} is a bad Deref pattern type", self.ty),
+ }
+ subpattern.hir_fmt(f)
+ }
+ PatKind::LiteralBool { value } => write!(f, "{}", value),
+ PatKind::Or { pats } => f.write_joined(pats.iter(), " | "),
+ }
+ }
+}
+
+struct WriteWith<F>(F)
+where
+ F: Fn(&mut HirFormatter<'_>) -> Result<(), HirDisplayError>;
+
+impl<F> HirDisplay for WriteWith<F>
+where
+ F: Fn(&mut HirFormatter<'_>) -> Result<(), HirDisplayError>,
+{
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ (self.0)(f)
+ }
+}
+
+fn is_box(adt: AdtId, db: &dyn HirDatabase) -> bool {
+ let owned_box = name![owned_box].to_smol_str();
+ let krate = adt.module(db.upcast()).krate();
+ let box_adt = db.lang_item(krate, owned_box).and_then(|it| it.as_struct()).map(AdtId::from);
+ Some(adt) == box_adt
+}
+
+pub(crate) trait PatternFoldable: Sized {
+ fn fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ self.super_fold_with(folder)
+ }
+
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self;
+}
+
+pub(crate) trait PatternFolder: Sized {
+ fn fold_pattern(&mut self, pattern: &Pat) -> Pat {
+ pattern.super_fold_with(self)
+ }
+
+ fn fold_pattern_kind(&mut self, kind: &PatKind) -> PatKind {
+ kind.super_fold_with(self)
+ }
+}
+
+impl<T: PatternFoldable> PatternFoldable for Box<T> {
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ let content: T = (**self).fold_with(folder);
+ Box::new(content)
+ }
+}
+
+impl<T: PatternFoldable> PatternFoldable for Vec<T> {
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ self.iter().map(|t| t.fold_with(folder)).collect()
+ }
+}
+
+impl<T: PatternFoldable> PatternFoldable for Option<T> {
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ self.as_ref().map(|t| t.fold_with(folder))
+ }
+}
+
+macro_rules! clone_impls {
+ ($($ty:ty),+) => {
+ $(
+ impl PatternFoldable for $ty {
+ fn super_fold_with<F: PatternFolder>(&self, _: &mut F) -> Self {
+ Clone::clone(self)
+ }
+ }
+ )+
+ }
+}
+
+clone_impls! { LocalFieldId, Ty, Substitution, EnumVariantId }
+
+impl PatternFoldable for FieldPat {
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ FieldPat { field: self.field.fold_with(folder), pattern: self.pattern.fold_with(folder) }
+ }
+}
+
+impl PatternFoldable for Pat {
+ fn fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ folder.fold_pattern(self)
+ }
+
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ Pat { ty: self.ty.fold_with(folder), kind: self.kind.fold_with(folder) }
+ }
+}
+
+impl PatternFoldable for PatKind {
+ fn fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ folder.fold_pattern_kind(self)
+ }
+
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ match self {
+ PatKind::Wild => PatKind::Wild,
+ PatKind::Binding { name, subpattern } => {
+ PatKind::Binding { name: name.clone(), subpattern: subpattern.fold_with(folder) }
+ }
+ PatKind::Variant { substs, enum_variant, subpatterns } => PatKind::Variant {
+ substs: substs.fold_with(folder),
+ enum_variant: enum_variant.fold_with(folder),
+ subpatterns: subpatterns.fold_with(folder),
+ },
+ PatKind::Leaf { subpatterns } => {
+ PatKind::Leaf { subpatterns: subpatterns.fold_with(folder) }
+ }
+ PatKind::Deref { subpattern } => {
+ PatKind::Deref { subpattern: subpattern.fold_with(folder) }
+ }
+ &PatKind::LiteralBool { value } => PatKind::LiteralBool { value },
+ PatKind::Or { pats } => PatKind::Or { pats: pats.fold_with(folder) },
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs
new file mode 100644
index 000000000..bbbe539c1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs
@@ -0,0 +1,1094 @@
+//! [`super::usefulness`] explains most of what is happening in this file. As explained there,
+//! values and patterns are made from constructors applied to fields. This file defines a
+//! `Constructor` enum, a `Fields` struct, and various operations to manipulate them and convert
+//! them from/to patterns.
+//!
+//! There's one idea that is not detailed in [`super::usefulness`] because the details are not
+//! needed there: _constructor splitting_.
+//!
+//! # Constructor splitting
+//!
+//! The idea is as follows: given a constructor `c` and a matrix, we want to specialize in turn
+//! with all the value constructors that are covered by `c`, and compute usefulness for each.
+//! Instead of listing all those constructors (which is intractable), we group those value
+//! constructors together as much as possible. Example:
+//!
+//! ```
+//! match (0, false) {
+//! (0 ..=100, true) => {} // `p_1`
+//! (50..=150, false) => {} // `p_2`
+//! (0 ..=200, _) => {} // `q`
+//! }
+//! ```
+//!
+//! The naive approach would try all numbers in the range `0..=200`. But we can be a lot more
+//! clever: `0` and `1` for example will match the exact same rows, and return equivalent
+//! witnesses. In fact all of `0..50` would. We can thus restrict our exploration to 4
+//! constructors: `0..50`, `50..=100`, `101..=150` and `151..=200`. That is enough and infinitely
+//! more tractable.
+//!
+//! We capture this idea in a function `split(p_1 ... p_n, c)` which returns a list of constructors
+//! `c'` covered by `c`. Given such a `c'`, we require that all value ctors `c''` covered by `c'`
+//! return an equivalent set of witnesses after specializing and computing usefulness.
+//! In the example above, witnesses for specializing by `c''` covered by `0..50` will only differ
+//! in their first element.
+//!
+//! We usually also ask that the `c'` together cover all of the original `c`. However we allow
+//! skipping some constructors as long as it doesn't change whether the resulting list of witnesses
+//! is empty of not. We use this in the wildcard `_` case.
+//!
+//! Splitting is implemented in the [`Constructor::split`] function. We don't do splitting for
+//! or-patterns; instead we just try the alternatives one-by-one. For details on splitting
+//! wildcards, see [`SplitWildcard`]; for integer ranges, see [`SplitIntRange`].
+
+use std::{
+ cell::Cell,
+ cmp::{max, min},
+ iter::once,
+ ops::RangeInclusive,
+};
+
+use hir_def::{EnumVariantId, HasModule, LocalFieldId, VariantId};
+use smallvec::{smallvec, SmallVec};
+use stdx::never;
+
+use crate::{infer::normalize, AdtId, Interner, Scalar, Ty, TyExt, TyKind};
+
+use super::{
+ is_box,
+ usefulness::{helper::Captures, MatchCheckCtx, PatCtxt},
+ FieldPat, Pat, PatKind,
+};
+
+use self::Constructor::*;
+
+/// Recursively expand this pattern into its subpatterns. Only useful for or-patterns.
+fn expand_or_pat(pat: &Pat) -> Vec<&Pat> {
+ fn expand<'p>(pat: &'p Pat, vec: &mut Vec<&'p Pat>) {
+ if let PatKind::Or { pats } = pat.kind.as_ref() {
+ for pat in pats {
+ expand(pat, vec);
+ }
+ } else {
+ vec.push(pat)
+ }
+ }
+
+ let mut pats = Vec::new();
+ expand(pat, &mut pats);
+ pats
+}
+
+/// [Constructor] uses this in umimplemented variants.
+/// It allows porting match expressions from upstream algorithm without losing semantics.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub(super) enum Void {}
+
+/// An inclusive interval, used for precise integer exhaustiveness checking.
+/// `IntRange`s always store a contiguous range. This means that values are
+/// encoded such that `0` encodes the minimum value for the integer,
+/// regardless of the signedness.
+/// For example, the pattern `-128..=127i8` is encoded as `0..=255`.
+/// This makes comparisons and arithmetic on interval endpoints much more
+/// straightforward. See `signed_bias` for details.
+///
+/// `IntRange` is never used to encode an empty range or a "range" that wraps
+/// around the (offset) space: i.e., `range.lo <= range.hi`.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(super) struct IntRange {
+ range: RangeInclusive<u128>,
+}
+
+impl IntRange {
+ #[inline]
+ fn is_integral(ty: &Ty) -> bool {
+ matches!(
+ ty.kind(Interner),
+ TyKind::Scalar(Scalar::Char | Scalar::Int(_) | Scalar::Uint(_) | Scalar::Bool)
+ )
+ }
+
+ fn is_singleton(&self) -> bool {
+ self.range.start() == self.range.end()
+ }
+
+ fn boundaries(&self) -> (u128, u128) {
+ (*self.range.start(), *self.range.end())
+ }
+
+ #[inline]
+ fn from_bool(value: bool) -> IntRange {
+ let val = value as u128;
+ IntRange { range: val..=val }
+ }
+
+ #[inline]
+ fn from_range(lo: u128, hi: u128, scalar_ty: Scalar) -> IntRange {
+ match scalar_ty {
+ Scalar::Bool => IntRange { range: lo..=hi },
+ _ => unimplemented!(),
+ }
+ }
+
+ fn is_subrange(&self, other: &Self) -> bool {
+ other.range.start() <= self.range.start() && self.range.end() <= other.range.end()
+ }
+
+ fn intersection(&self, other: &Self) -> Option<Self> {
+ let (lo, hi) = self.boundaries();
+ let (other_lo, other_hi) = other.boundaries();
+ if lo <= other_hi && other_lo <= hi {
+ Some(IntRange { range: max(lo, other_lo)..=min(hi, other_hi) })
+ } else {
+ None
+ }
+ }
+
+ fn to_pat(&self, _cx: &MatchCheckCtx<'_, '_>, ty: Ty) -> Pat {
+ match ty.kind(Interner) {
+ TyKind::Scalar(Scalar::Bool) => {
+ let kind = match self.boundaries() {
+ (0, 0) => PatKind::LiteralBool { value: false },
+ (1, 1) => PatKind::LiteralBool { value: true },
+ (0, 1) => PatKind::Wild,
+ (lo, hi) => {
+ never!("bad range for bool pattern: {}..={}", lo, hi);
+ PatKind::Wild
+ }
+ };
+ Pat { ty, kind: kind.into() }
+ }
+ _ => unimplemented!(),
+ }
+ }
+
+ /// See `Constructor::is_covered_by`
+ fn is_covered_by(&self, other: &Self) -> bool {
+ if self.intersection(other).is_some() {
+ // Constructor splitting should ensure that all intersections we encounter are actually
+ // inclusions.
+ assert!(self.is_subrange(other));
+ true
+ } else {
+ false
+ }
+ }
+}
+
+/// Represents a border between 2 integers. Because the intervals spanning borders must be able to
+/// cover every integer, we need to be able to represent 2^128 + 1 such borders.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
+enum IntBorder {
+ JustBefore(u128),
+ AfterMax,
+}
+
+/// A range of integers that is partitioned into disjoint subranges. This does constructor
+/// splitting for integer ranges as explained at the top of the file.
+///
+/// This is fed multiple ranges, and returns an output that covers the input, but is split so that
+/// the only intersections between an output range and a seen range are inclusions. No output range
+/// straddles the boundary of one of the inputs.
+///
+/// The following input:
+/// ```
+/// |-------------------------| // `self`
+/// |------| |----------| |----|
+/// |-------| |-------|
+/// ```
+/// would be iterated over as follows:
+/// ```
+/// ||---|--||-|---|---|---|--|
+/// ```
+#[derive(Debug, Clone)]
+struct SplitIntRange {
+ /// The range we are splitting
+ range: IntRange,
+ /// The borders of ranges we have seen. They are all contained within `range`. This is kept
+ /// sorted.
+ borders: Vec<IntBorder>,
+}
+
+impl SplitIntRange {
+ fn new(range: IntRange) -> Self {
+ SplitIntRange { range, borders: Vec::new() }
+ }
+
+ /// Internal use
+ fn to_borders(r: IntRange) -> [IntBorder; 2] {
+ use IntBorder::*;
+ let (lo, hi) = r.boundaries();
+ let lo = JustBefore(lo);
+ let hi = match hi.checked_add(1) {
+ Some(m) => JustBefore(m),
+ None => AfterMax,
+ };
+ [lo, hi]
+ }
+
+ /// Add ranges relative to which we split.
+ fn split(&mut self, ranges: impl Iterator<Item = IntRange>) {
+ let this_range = &self.range;
+ let included_ranges = ranges.filter_map(|r| this_range.intersection(&r));
+ let included_borders = included_ranges.flat_map(|r| {
+ let borders = Self::to_borders(r);
+ once(borders[0]).chain(once(borders[1]))
+ });
+ self.borders.extend(included_borders);
+ self.borders.sort_unstable();
+ }
+
+ /// Iterate over the contained ranges.
+ fn iter(&self) -> impl Iterator<Item = IntRange> + '_ {
+ use IntBorder::*;
+
+ let self_range = Self::to_borders(self.range.clone());
+ // Start with the start of the range.
+ let mut prev_border = self_range[0];
+ self.borders
+ .iter()
+ .copied()
+ // End with the end of the range.
+ .chain(once(self_range[1]))
+ // List pairs of adjacent borders.
+ .map(move |border| {
+ let ret = (prev_border, border);
+ prev_border = border;
+ ret
+ })
+ // Skip duplicates.
+ .filter(|(prev_border, border)| prev_border != border)
+ // Finally, convert to ranges.
+ .map(|(prev_border, border)| {
+ let range = match (prev_border, border) {
+ (JustBefore(n), JustBefore(m)) if n < m => n..=(m - 1),
+ (JustBefore(n), AfterMax) => n..=u128::MAX,
+ _ => unreachable!(), // Ruled out by the sorting and filtering we did
+ };
+ IntRange { range }
+ })
+ }
+}
+
+/// A constructor for array and slice patterns.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub(super) struct Slice {
+ _unimplemented: Void,
+}
+
+impl Slice {
+ fn arity(self) -> usize {
+ match self._unimplemented {}
+ }
+
+ /// See `Constructor::is_covered_by`
+ fn is_covered_by(self, _other: Self) -> bool {
+ match self._unimplemented {}
+ }
+}
+
+/// A value can be decomposed into a constructor applied to some fields. This struct represents
+/// the constructor. See also `Fields`.
+///
+/// `pat_constructor` retrieves the constructor corresponding to a pattern.
+/// `specialize_constructor` returns the list of fields corresponding to a pattern, given a
+/// constructor. `Constructor::apply` reconstructs the pattern from a pair of `Constructor` and
+/// `Fields`.
+#[allow(dead_code)]
+#[derive(Clone, Debug, PartialEq)]
+pub(super) enum Constructor {
+ /// The constructor for patterns that have a single constructor, like tuples, struct patterns
+ /// and fixed-length arrays.
+ Single,
+ /// Enum variants.
+ Variant(EnumVariantId),
+ /// Ranges of integer literal values (`2`, `2..=5` or `2..5`).
+ IntRange(IntRange),
+ /// Ranges of floating-point literal values (`2.0..=5.2`).
+ FloatRange(Void),
+ /// String literals. Strings are not quite the same as `&[u8]` so we treat them separately.
+ Str(Void),
+ /// Array and slice patterns.
+ Slice(Slice),
+ /// Constants that must not be matched structurally. They are treated as black
+ /// boxes for the purposes of exhaustiveness: we must not inspect them, and they
+ /// don't count towards making a match exhaustive.
+ Opaque,
+ /// Fake extra constructor for enums that aren't allowed to be matched exhaustively. Also used
+ /// for those types for which we cannot list constructors explicitly, like `f64` and `str`.
+ NonExhaustive,
+ /// Stands for constructors that are not seen in the matrix, as explained in the documentation
+ /// for [`SplitWildcard`]. The carried `bool` is used for the `non_exhaustive_omitted_patterns`
+ /// lint.
+ Missing { nonexhaustive_enum_missing_real_variants: bool },
+ /// Wildcard pattern.
+ Wildcard,
+ /// Or-pattern.
+ Or,
+}
+
+impl Constructor {
+ pub(super) fn is_wildcard(&self) -> bool {
+ matches!(self, Wildcard)
+ }
+
+ pub(super) fn is_non_exhaustive(&self) -> bool {
+ matches!(self, NonExhaustive)
+ }
+
+ fn as_int_range(&self) -> Option<&IntRange> {
+ match self {
+ IntRange(range) => Some(range),
+ _ => None,
+ }
+ }
+
+ fn as_slice(&self) -> Option<Slice> {
+ match self {
+ Slice(slice) => Some(*slice),
+ _ => None,
+ }
+ }
+
+ pub(super) fn is_unstable_variant(&self, _pcx: PatCtxt<'_, '_>) -> bool {
+ false //FIXME: implement this
+ }
+
+ pub(super) fn is_doc_hidden_variant(&self, _pcx: PatCtxt<'_, '_>) -> bool {
+ false //FIXME: implement this
+ }
+
+ fn variant_id_for_adt(&self, adt: hir_def::AdtId) -> VariantId {
+ match *self {
+ Variant(id) => id.into(),
+ Single => {
+ assert!(!matches!(adt, hir_def::AdtId::EnumId(_)));
+ match adt {
+ hir_def::AdtId::EnumId(_) => unreachable!(),
+ hir_def::AdtId::StructId(id) => id.into(),
+ hir_def::AdtId::UnionId(id) => id.into(),
+ }
+ }
+ _ => panic!("bad constructor {:?} for adt {:?}", self, adt),
+ }
+ }
+
+ /// The number of fields for this constructor. This must be kept in sync with
+ /// `Fields::wildcards`.
+ pub(super) fn arity(&self, pcx: PatCtxt<'_, '_>) -> usize {
+ match self {
+ Single | Variant(_) => match *pcx.ty.kind(Interner) {
+ TyKind::Tuple(arity, ..) => arity,
+ TyKind::Ref(..) => 1,
+ TyKind::Adt(adt, ..) => {
+ if is_box(adt.0, pcx.cx.db) {
+ // The only legal patterns of type `Box` (outside `std`) are `_` and box
+ // patterns. If we're here we can assume this is a box pattern.
+ 1
+ } else {
+ let variant = self.variant_id_for_adt(adt.0);
+ Fields::list_variant_nonhidden_fields(pcx.cx, pcx.ty, variant).count()
+ }
+ }
+ _ => {
+ never!("Unexpected type for `Single` constructor: {:?}", pcx.ty);
+ 0
+ }
+ },
+ Slice(slice) => slice.arity(),
+ Str(..)
+ | FloatRange(..)
+ | IntRange(..)
+ | NonExhaustive
+ | Opaque
+ | Missing { .. }
+ | Wildcard => 0,
+ Or => {
+ never!("The `Or` constructor doesn't have a fixed arity");
+ 0
+ }
+ }
+ }
+
+ /// Some constructors (namely `Wildcard`, `IntRange` and `Slice`) actually stand for a set of actual
+ /// constructors (like variants, integers or fixed-sized slices). When specializing for these
+ /// constructors, we want to be specialising for the actual underlying constructors.
+ /// Naively, we would simply return the list of constructors they correspond to. We instead are
+ /// more clever: if there are constructors that we know will behave the same wrt the current
+ /// matrix, we keep them grouped. For example, all slices of a sufficiently large length
+ /// will either be all useful or all non-useful with a given matrix.
+ ///
+ /// See the branches for details on how the splitting is done.
+ ///
+ /// This function may discard some irrelevant constructors if this preserves behavior and
+ /// diagnostics. Eg. for the `_` case, we ignore the constructors already present in the
+ /// matrix, unless all of them are.
+ pub(super) fn split<'a>(
+ &self,
+ pcx: PatCtxt<'_, '_>,
+ ctors: impl Iterator<Item = &'a Constructor> + Clone,
+ ) -> SmallVec<[Self; 1]> {
+ match self {
+ Wildcard => {
+ let mut split_wildcard = SplitWildcard::new(pcx);
+ split_wildcard.split(pcx, ctors);
+ split_wildcard.into_ctors(pcx)
+ }
+ // Fast-track if the range is trivial. In particular, we don't do the overlapping
+ // ranges check.
+ IntRange(ctor_range) if !ctor_range.is_singleton() => {
+ let mut split_range = SplitIntRange::new(ctor_range.clone());
+ let int_ranges = ctors.filter_map(|ctor| ctor.as_int_range());
+ split_range.split(int_ranges.cloned());
+ split_range.iter().map(IntRange).collect()
+ }
+ Slice(slice) => match slice._unimplemented {},
+ // Any other constructor can be used unchanged.
+ _ => smallvec![self.clone()],
+ }
+ }
+
+ /// Returns whether `self` is covered by `other`, i.e. whether `self` is a subset of `other`.
+ /// For the simple cases, this is simply checking for equality. For the "grouped" constructors,
+ /// this checks for inclusion.
+ // We inline because this has a single call site in `Matrix::specialize_constructor`.
+ #[inline]
+ pub(super) fn is_covered_by(&self, _pcx: PatCtxt<'_, '_>, other: &Self) -> bool {
+ // This must be kept in sync with `is_covered_by_any`.
+ match (self, other) {
+ // Wildcards cover anything
+ (_, Wildcard) => true,
+ // The missing ctors are not covered by anything in the matrix except wildcards.
+ (Missing { .. } | Wildcard, _) => false,
+
+ (Single, Single) => true,
+ (Variant(self_id), Variant(other_id)) => self_id == other_id,
+
+ (IntRange(self_range), IntRange(other_range)) => self_range.is_covered_by(other_range),
+ (FloatRange(void), FloatRange(..)) => match *void {},
+ (Str(void), Str(..)) => match *void {},
+ (Slice(self_slice), Slice(other_slice)) => self_slice.is_covered_by(*other_slice),
+
+ // We are trying to inspect an opaque constant. Thus we skip the row.
+ (Opaque, _) | (_, Opaque) => false,
+ // Only a wildcard pattern can match the special extra constructor.
+ (NonExhaustive, _) => false,
+
+ _ => {
+ never!("trying to compare incompatible constructors {:?} and {:?}", self, other);
+ // Continue with 'whatever is covered' supposed to result in false no-error diagnostic.
+ true
+ }
+ }
+ }
+
+ /// Faster version of `is_covered_by` when applied to many constructors. `used_ctors` is
+ /// assumed to be built from `matrix.head_ctors()` with wildcards filtered out, and `self` is
+ /// assumed to have been split from a wildcard.
+ fn is_covered_by_any(&self, _pcx: PatCtxt<'_, '_>, used_ctors: &[Constructor]) -> bool {
+ if used_ctors.is_empty() {
+ return false;
+ }
+
+ // This must be kept in sync with `is_covered_by`.
+ match self {
+ // If `self` is `Single`, `used_ctors` cannot contain anything else than `Single`s.
+ Single => !used_ctors.is_empty(),
+ Variant(_) => used_ctors.iter().any(|c| c == self),
+ IntRange(range) => used_ctors
+ .iter()
+ .filter_map(|c| c.as_int_range())
+ .any(|other| range.is_covered_by(other)),
+ Slice(slice) => used_ctors
+ .iter()
+ .filter_map(|c| c.as_slice())
+ .any(|other| slice.is_covered_by(other)),
+ // This constructor is never covered by anything else
+ NonExhaustive => false,
+ Str(..) | FloatRange(..) | Opaque | Missing { .. } | Wildcard | Or => {
+ never!("found unexpected ctor in all_ctors: {:?}", self);
+ true
+ }
+ }
+ }
+}
+
+/// A wildcard constructor that we split relative to the constructors in the matrix, as explained
+/// at the top of the file.
+///
+/// A constructor that is not present in the matrix rows will only be covered by the rows that have
+/// wildcards. Thus we can group all of those constructors together; we call them "missing
+/// constructors". Splitting a wildcard would therefore list all present constructors individually
+/// (or grouped if they are integers or slices), and then all missing constructors together as a
+/// group.
+///
+/// However we can go further: since any constructor will match the wildcard rows, and having more
+/// rows can only reduce the amount of usefulness witnesses, we can skip the present constructors
+/// and only try the missing ones.
+/// This will not preserve the whole list of witnesses, but will preserve whether the list is empty
+/// or not. In fact this is quite natural from the point of view of diagnostics too. This is done
+/// in `to_ctors`: in some cases we only return `Missing`.
+#[derive(Debug)]
+pub(super) struct SplitWildcard {
+ /// Constructors seen in the matrix.
+ matrix_ctors: Vec<Constructor>,
+ /// All the constructors for this type
+ all_ctors: SmallVec<[Constructor; 1]>,
+}
+
+impl SplitWildcard {
+ pub(super) fn new(pcx: PatCtxt<'_, '_>) -> Self {
+ let cx = pcx.cx;
+ let make_range = |start, end, scalar| IntRange(IntRange::from_range(start, end, scalar));
+
+ // Unhandled types are treated as non-exhaustive. Being explicit here instead of falling
+ // to catchall arm to ease further implementation.
+ let unhandled = || smallvec![NonExhaustive];
+
+ // This determines the set of all possible constructors for the type `pcx.ty`. For numbers,
+ // arrays and slices we use ranges and variable-length slices when appropriate.
+ //
+ // If the `exhaustive_patterns` feature is enabled, we make sure to omit constructors that
+ // are statically impossible. E.g., for `Option<!>`, we do not include `Some(_)` in the
+ // returned list of constructors.
+ // Invariant: this is empty if and only if the type is uninhabited (as determined by
+ // `cx.is_uninhabited()`).
+ let all_ctors = match pcx.ty.kind(Interner) {
+ TyKind::Scalar(Scalar::Bool) => smallvec![make_range(0, 1, Scalar::Bool)],
+ // TyKind::Array(..) if ... => unhandled(),
+ TyKind::Array(..) | TyKind::Slice(..) => unhandled(),
+ &TyKind::Adt(AdtId(hir_def::AdtId::EnumId(enum_id)), ..) => {
+ let enum_data = cx.db.enum_data(enum_id);
+
+ // If the enum is declared as `#[non_exhaustive]`, we treat it as if it had an
+ // additional "unknown" constructor.
+ // There is no point in enumerating all possible variants, because the user can't
+ // actually match against them all themselves. So we always return only the fictitious
+ // constructor.
+ // E.g., in an example like:
+ //
+ // ```
+ // let err: io::ErrorKind = ...;
+ // match err {
+ // io::ErrorKind::NotFound => {},
+ // }
+ // ```
+ //
+ // we don't want to show every possible IO error, but instead have only `_` as the
+ // witness.
+ let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(pcx.ty);
+
+ let is_exhaustive_pat_feature = cx.feature_exhaustive_patterns();
+
+ // If `exhaustive_patterns` is disabled and our scrutinee is an empty enum, we treat it
+ // as though it had an "unknown" constructor to avoid exposing its emptiness. The
+ // exception is if the pattern is at the top level, because we want empty matches to be
+ // considered exhaustive.
+ let is_secretly_empty = enum_data.variants.is_empty()
+ && !is_exhaustive_pat_feature
+ && !pcx.is_top_level;
+
+ let mut ctors: SmallVec<[_; 1]> = enum_data
+ .variants
+ .iter()
+ .filter(|&(_, _v)| {
+ // If `exhaustive_patterns` is enabled, we exclude variants known to be
+ // uninhabited.
+ let is_uninhabited = is_exhaustive_pat_feature
+ && unimplemented!("after MatchCheckCtx.feature_exhaustive_patterns()");
+ !is_uninhabited
+ })
+ .map(|(local_id, _)| Variant(EnumVariantId { parent: enum_id, local_id }))
+ .collect();
+
+ if is_secretly_empty || is_declared_nonexhaustive {
+ ctors.push(NonExhaustive);
+ }
+ ctors
+ }
+ TyKind::Scalar(Scalar::Char) => unhandled(),
+ TyKind::Scalar(Scalar::Int(..) | Scalar::Uint(..)) => unhandled(),
+ TyKind::Never if !cx.feature_exhaustive_patterns() && !pcx.is_top_level => {
+ smallvec![NonExhaustive]
+ }
+ TyKind::Never => SmallVec::new(),
+ _ if cx.is_uninhabited(pcx.ty) => SmallVec::new(),
+ TyKind::Adt(..) | TyKind::Tuple(..) | TyKind::Ref(..) => smallvec![Single],
+ // This type is one for which we cannot list constructors, like `str` or `f64`.
+ _ => smallvec![NonExhaustive],
+ };
+
+ SplitWildcard { matrix_ctors: Vec::new(), all_ctors }
+ }
+
+ /// Pass a set of constructors relative to which to split this one. Don't call twice, it won't
+ /// do what you want.
+ pub(super) fn split<'a>(
+ &mut self,
+ pcx: PatCtxt<'_, '_>,
+ ctors: impl Iterator<Item = &'a Constructor> + Clone,
+ ) {
+ // Since `all_ctors` never contains wildcards, this won't recurse further.
+ self.all_ctors =
+ self.all_ctors.iter().flat_map(|ctor| ctor.split(pcx, ctors.clone())).collect();
+ self.matrix_ctors = ctors.filter(|c| !c.is_wildcard()).cloned().collect();
+ }
+
+ /// Whether there are any value constructors for this type that are not present in the matrix.
+ fn any_missing(&self, pcx: PatCtxt<'_, '_>) -> bool {
+ self.iter_missing(pcx).next().is_some()
+ }
+
+ /// Iterate over the constructors for this type that are not present in the matrix.
+ pub(super) fn iter_missing<'a, 'p>(
+ &'a self,
+ pcx: PatCtxt<'a, 'p>,
+ ) -> impl Iterator<Item = &'a Constructor> + Captures<'p> {
+ self.all_ctors.iter().filter(move |ctor| !ctor.is_covered_by_any(pcx, &self.matrix_ctors))
+ }
+
+ /// Return the set of constructors resulting from splitting the wildcard. As explained at the
+ /// top of the file, if any constructors are missing we can ignore the present ones.
+ fn into_ctors(self, pcx: PatCtxt<'_, '_>) -> SmallVec<[Constructor; 1]> {
+ if self.any_missing(pcx) {
+ // Some constructors are missing, thus we can specialize with the special `Missing`
+ // constructor, which stands for those constructors that are not seen in the matrix,
+ // and matches the same rows as any of them (namely the wildcard rows). See the top of
+ // the file for details.
+ // However, when all constructors are missing we can also specialize with the full
+ // `Wildcard` constructor. The difference will depend on what we want in diagnostics.
+
+ // If some constructors are missing, we typically want to report those constructors,
+ // e.g.:
+ // ```
+ // enum Direction { N, S, E, W }
+ // let Direction::N = ...;
+ // ```
+ // we can report 3 witnesses: `S`, `E`, and `W`.
+ //
+ // However, if the user didn't actually specify a constructor
+ // in this arm, e.g., in
+ // ```
+ // let x: (Direction, Direction, bool) = ...;
+ // let (_, _, false) = x;
+ // ```
+ // we don't want to show all 16 possible witnesses `(<direction-1>, <direction-2>,
+ // true)` - we are satisfied with `(_, _, true)`. So if all constructors are missing we
+ // prefer to report just a wildcard `_`.
+ //
+ // The exception is: if we are at the top-level, for example in an empty match, we
+ // sometimes prefer reporting the list of constructors instead of just `_`.
+ let report_when_all_missing = pcx.is_top_level && !IntRange::is_integral(pcx.ty);
+ let ctor = if !self.matrix_ctors.is_empty() || report_when_all_missing {
+ if pcx.is_non_exhaustive {
+ Missing {
+ nonexhaustive_enum_missing_real_variants: self
+ .iter_missing(pcx)
+ .any(|c| !(c.is_non_exhaustive() || c.is_unstable_variant(pcx))),
+ }
+ } else {
+ Missing { nonexhaustive_enum_missing_real_variants: false }
+ }
+ } else {
+ Wildcard
+ };
+ return smallvec![ctor];
+ }
+
+ // All the constructors are present in the matrix, so we just go through them all.
+ self.all_ctors
+ }
+}
+
+/// A value can be decomposed into a constructor applied to some fields. This struct represents
+/// those fields, generalized to allow patterns in each field. See also `Constructor`.
+///
+/// This is constructed for a constructor using [`Fields::wildcards()`]. The idea is that
+/// [`Fields::wildcards()`] constructs a list of fields where all entries are wildcards, and then
+/// given a pattern we fill some of the fields with its subpatterns.
+/// In the following example `Fields::wildcards` returns `[_, _, _, _]`. Then in
+/// `extract_pattern_arguments` we fill some of the entries, and the result is
+/// `[Some(0), _, _, _]`.
+/// ```rust
+/// let x: [Option<u8>; 4] = foo();
+/// match x {
+/// [Some(0), ..] => {}
+/// }
+/// ```
+///
+/// Note that the number of fields of a constructor may not match the fields declared in the
+/// original struct/variant. This happens if a private or `non_exhaustive` field is uninhabited,
+/// because the code mustn't observe that it is uninhabited. In that case that field is not
+/// included in `fields`. For that reason, when you have a `mir::Field` you must use
+/// `index_with_declared_idx`.
+#[derive(Clone, Copy)]
+pub(super) struct Fields<'p> {
+ fields: &'p [DeconstructedPat<'p>],
+}
+
+impl<'p> Fields<'p> {
+ fn empty() -> Self {
+ Fields { fields: &[] }
+ }
+
+ fn singleton(cx: &MatchCheckCtx<'_, 'p>, field: DeconstructedPat<'p>) -> Self {
+ let field = cx.pattern_arena.alloc(field);
+ Fields { fields: std::slice::from_ref(field) }
+ }
+
+ pub(super) fn from_iter(
+ cx: &MatchCheckCtx<'_, 'p>,
+ fields: impl IntoIterator<Item = DeconstructedPat<'p>>,
+ ) -> Self {
+ let fields: &[_] = cx.pattern_arena.alloc_extend(fields);
+ Fields { fields }
+ }
+
+ fn wildcards_from_tys(cx: &MatchCheckCtx<'_, 'p>, tys: impl IntoIterator<Item = Ty>) -> Self {
+ Fields::from_iter(cx, tys.into_iter().map(DeconstructedPat::wildcard))
+ }
+
+ // In the cases of either a `#[non_exhaustive]` field list or a non-public field, we hide
+ // uninhabited fields in order not to reveal the uninhabitedness of the whole variant.
+ // This lists the fields we keep along with their types.
+ fn list_variant_nonhidden_fields<'a>(
+ cx: &'a MatchCheckCtx<'a, 'p>,
+ ty: &'a Ty,
+ variant: VariantId,
+ ) -> impl Iterator<Item = (LocalFieldId, Ty)> + Captures<'a> + Captures<'p> {
+ let (adt, substs) = ty.as_adt().unwrap();
+
+ let adt_is_local = variant.module(cx.db.upcast()).krate() == cx.module.krate();
+ // Whether we must not match the fields of this variant exhaustively.
+ let is_non_exhaustive = is_field_list_non_exhaustive(variant, cx) && !adt_is_local;
+
+ let visibility = cx.db.field_visibilities(variant);
+ let field_ty = cx.db.field_types(variant);
+ let fields_len = variant.variant_data(cx.db.upcast()).fields().len() as u32;
+
+ (0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).filter_map(move |fid| {
+ let ty = field_ty[fid].clone().substitute(Interner, substs);
+ let ty = normalize(cx.db, cx.body, ty);
+ let is_visible = matches!(adt, hir_def::AdtId::EnumId(..))
+ || visibility[fid].is_visible_from(cx.db.upcast(), cx.module);
+ let is_uninhabited = cx.is_uninhabited(&ty);
+
+ if is_uninhabited && (!is_visible || is_non_exhaustive) {
+ None
+ } else {
+ Some((fid, ty))
+ }
+ })
+ }
+
+ /// Creates a new list of wildcard fields for a given constructor. The result must have a
+ /// length of `constructor.arity()`.
+ pub(crate) fn wildcards(
+ cx: &MatchCheckCtx<'_, 'p>,
+ ty: &Ty,
+ constructor: &Constructor,
+ ) -> Self {
+ let ret = match constructor {
+ Single | Variant(_) => match ty.kind(Interner) {
+ TyKind::Tuple(_, substs) => {
+ let tys = substs.iter(Interner).map(|ty| ty.assert_ty_ref(Interner));
+ Fields::wildcards_from_tys(cx, tys.cloned())
+ }
+ TyKind::Ref(.., rty) => Fields::wildcards_from_tys(cx, once(rty.clone())),
+ &TyKind::Adt(AdtId(adt), ref substs) => {
+ if is_box(adt, cx.db) {
+ // The only legal patterns of type `Box` (outside `std`) are `_` and box
+ // patterns. If we're here we can assume this is a box pattern.
+ let subst_ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
+ Fields::wildcards_from_tys(cx, once(subst_ty))
+ } else {
+ let variant = constructor.variant_id_for_adt(adt);
+ let tys = Fields::list_variant_nonhidden_fields(cx, ty, variant)
+ .map(|(_, ty)| ty);
+ Fields::wildcards_from_tys(cx, tys)
+ }
+ }
+ ty_kind => {
+ never!("Unexpected type for `Single` constructor: {:?}", ty_kind);
+ Fields::wildcards_from_tys(cx, once(ty.clone()))
+ }
+ },
+ Slice(slice) => match slice._unimplemented {},
+ Str(..)
+ | FloatRange(..)
+ | IntRange(..)
+ | NonExhaustive
+ | Opaque
+ | Missing { .. }
+ | Wildcard => Fields::empty(),
+ Or => {
+ never!("called `Fields::wildcards` on an `Or` ctor");
+ Fields::empty()
+ }
+ };
+ ret
+ }
+
+ /// Returns the list of patterns.
+ pub(super) fn iter_patterns<'a>(
+ &'a self,
+ ) -> impl Iterator<Item = &'p DeconstructedPat<'p>> + Captures<'a> {
+ self.fields.iter()
+ }
+}
+
+/// Values and patterns can be represented as a constructor applied to some fields. This represents
+/// a pattern in this form.
+/// This also keeps track of whether the pattern has been found reachable during analysis. For this
+/// reason we should be careful not to clone patterns for which we care about that. Use
+/// `clone_and_forget_reachability` if you're sure.
+pub(crate) struct DeconstructedPat<'p> {
+ ctor: Constructor,
+ fields: Fields<'p>,
+ ty: Ty,
+ reachable: Cell<bool>,
+}
+
+impl<'p> DeconstructedPat<'p> {
+ pub(super) fn wildcard(ty: Ty) -> Self {
+ Self::new(Wildcard, Fields::empty(), ty)
+ }
+
+ pub(super) fn new(ctor: Constructor, fields: Fields<'p>, ty: Ty) -> Self {
+ DeconstructedPat { ctor, fields, ty, reachable: Cell::new(false) }
+ }
+
+ /// Construct a pattern that matches everything that starts with this constructor.
+ /// For example, if `ctor` is a `Constructor::Variant` for `Option::Some`, we get the pattern
+ /// `Some(_)`.
+ pub(super) fn wild_from_ctor(pcx: PatCtxt<'_, 'p>, ctor: Constructor) -> Self {
+ let fields = Fields::wildcards(pcx.cx, pcx.ty, &ctor);
+ DeconstructedPat::new(ctor, fields, pcx.ty.clone())
+ }
+
+ /// Clone this value. This method emphasizes that cloning loses reachability information and
+ /// should be done carefully.
+ pub(super) fn clone_and_forget_reachability(&self) -> Self {
+ DeconstructedPat::new(self.ctor.clone(), self.fields, self.ty.clone())
+ }
+
+ pub(crate) fn from_pat(cx: &MatchCheckCtx<'_, 'p>, pat: &Pat) -> Self {
+ let mkpat = |pat| DeconstructedPat::from_pat(cx, pat);
+ let ctor;
+ let fields;
+ match pat.kind.as_ref() {
+ PatKind::Binding { subpattern: Some(subpat), .. } => return mkpat(subpat),
+ PatKind::Binding { subpattern: None, .. } | PatKind::Wild => {
+ ctor = Wildcard;
+ fields = Fields::empty();
+ }
+ PatKind::Deref { subpattern } => {
+ ctor = Single;
+ fields = Fields::singleton(cx, mkpat(subpattern));
+ }
+ PatKind::Leaf { subpatterns } | PatKind::Variant { subpatterns, .. } => {
+ match pat.ty.kind(Interner) {
+ TyKind::Tuple(_, substs) => {
+ ctor = Single;
+ let mut wilds: SmallVec<[_; 2]> = substs
+ .iter(Interner)
+ .map(|arg| arg.assert_ty_ref(Interner).clone())
+ .map(DeconstructedPat::wildcard)
+ .collect();
+ for pat in subpatterns {
+ let idx: u32 = pat.field.into_raw().into();
+ wilds[idx as usize] = mkpat(&pat.pattern);
+ }
+ fields = Fields::from_iter(cx, wilds)
+ }
+ TyKind::Adt(adt, substs) if is_box(adt.0, cx.db) => {
+ // The only legal patterns of type `Box` (outside `std`) are `_` and box
+ // patterns. If we're here we can assume this is a box pattern.
+ // FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_,
+ // _)` or a box pattern. As a hack to avoid an ICE with the former, we
+ // ignore other fields than the first one. This will trigger an error later
+ // anyway.
+ // See https://github.com/rust-lang/rust/issues/82772 ,
+ // explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977
+ // The problem is that we can't know from the type whether we'll match
+ // normally or through box-patterns. We'll have to figure out a proper
+ // solution when we introduce generalized deref patterns. Also need to
+ // prevent mixing of those two options.
+ let pat =
+ subpatterns.iter().find(|pat| pat.field.into_raw() == 0u32.into());
+ let field = if let Some(pat) = pat {
+ mkpat(&pat.pattern)
+ } else {
+ let ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
+ DeconstructedPat::wildcard(ty)
+ };
+ ctor = Single;
+ fields = Fields::singleton(cx, field)
+ }
+ &TyKind::Adt(adt, _) => {
+ ctor = match pat.kind.as_ref() {
+ PatKind::Leaf { .. } => Single,
+ PatKind::Variant { enum_variant, .. } => Variant(*enum_variant),
+ _ => {
+ never!();
+ Wildcard
+ }
+ };
+ let variant = ctor.variant_id_for_adt(adt.0);
+ let fields_len = variant.variant_data(cx.db.upcast()).fields().len();
+ // For each field in the variant, we store the relevant index into `self.fields` if any.
+ let mut field_id_to_id: Vec<Option<usize>> = vec![None; fields_len];
+ let tys = Fields::list_variant_nonhidden_fields(cx, &pat.ty, variant)
+ .enumerate()
+ .map(|(i, (fid, ty))| {
+ let field_idx: u32 = fid.into_raw().into();
+ field_id_to_id[field_idx as usize] = Some(i);
+ ty
+ });
+ let mut wilds: SmallVec<[_; 2]> =
+ tys.map(DeconstructedPat::wildcard).collect();
+ for pat in subpatterns {
+ let field_idx: u32 = pat.field.into_raw().into();
+ if let Some(i) = field_id_to_id[field_idx as usize] {
+ wilds[i] = mkpat(&pat.pattern);
+ }
+ }
+ fields = Fields::from_iter(cx, wilds);
+ }
+ _ => {
+ never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty);
+ ctor = Wildcard;
+ fields = Fields::empty();
+ }
+ }
+ }
+ &PatKind::LiteralBool { value } => {
+ ctor = IntRange(IntRange::from_bool(value));
+ fields = Fields::empty();
+ }
+ PatKind::Or { .. } => {
+ ctor = Or;
+ let pats: SmallVec<[_; 2]> = expand_or_pat(pat).into_iter().map(mkpat).collect();
+ fields = Fields::from_iter(cx, pats)
+ }
+ }
+ DeconstructedPat::new(ctor, fields, pat.ty.clone())
+ }
+
+ pub(crate) fn to_pat(&self, cx: &MatchCheckCtx<'_, 'p>) -> Pat {
+ let mut subpatterns = self.iter_fields().map(|p| p.to_pat(cx));
+ let pat = match &self.ctor {
+ Single | Variant(_) => match self.ty.kind(Interner) {
+ TyKind::Tuple(..) => PatKind::Leaf {
+ subpatterns: subpatterns
+ .zip(0u32..)
+ .map(|(p, i)| FieldPat {
+ field: LocalFieldId::from_raw(i.into()),
+ pattern: p,
+ })
+ .collect(),
+ },
+ TyKind::Adt(adt, _) if is_box(adt.0, cx.db) => {
+ // Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
+ // of `std`). So this branch is only reachable when the feature is enabled and
+ // the pattern is a box pattern.
+ PatKind::Deref { subpattern: subpatterns.next().unwrap() }
+ }
+ TyKind::Adt(adt, substs) => {
+ let variant = self.ctor.variant_id_for_adt(adt.0);
+ let subpatterns = Fields::list_variant_nonhidden_fields(cx, self.ty(), variant)
+ .zip(subpatterns)
+ .map(|((field, _ty), pattern)| FieldPat { field, pattern })
+ .collect();
+
+ if let VariantId::EnumVariantId(enum_variant) = variant {
+ PatKind::Variant { substs: substs.clone(), enum_variant, subpatterns }
+ } else {
+ PatKind::Leaf { subpatterns }
+ }
+ }
+ // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
+ // be careful to reconstruct the correct constant pattern here. However a string
+ // literal pattern will never be reported as a non-exhaustiveness witness, so we
+ // ignore this issue.
+ TyKind::Ref(..) => PatKind::Deref { subpattern: subpatterns.next().unwrap() },
+ _ => {
+ never!("unexpected ctor for type {:?} {:?}", self.ctor, self.ty);
+ PatKind::Wild
+ }
+ },
+ &Slice(slice) => match slice._unimplemented {},
+ &Str(void) => match void {},
+ &FloatRange(void) => match void {},
+ IntRange(range) => return range.to_pat(cx, self.ty.clone()),
+ Wildcard | NonExhaustive => PatKind::Wild,
+ Missing { .. } => {
+ never!(
+ "trying to convert a `Missing` constructor into a `Pat`; this is a bug, \
+ `Missing` should have been processed in `apply_constructors`"
+ );
+ PatKind::Wild
+ }
+ Opaque | Or => {
+ never!("can't convert to pattern: {:?}", self.ctor);
+ PatKind::Wild
+ }
+ };
+ Pat { ty: self.ty.clone(), kind: Box::new(pat) }
+ }
+
+ pub(super) fn is_or_pat(&self) -> bool {
+ matches!(self.ctor, Or)
+ }
+
+ pub(super) fn ctor(&self) -> &Constructor {
+ &self.ctor
+ }
+
+ pub(super) fn ty(&self) -> &Ty {
+ &self.ty
+ }
+
+ pub(super) fn iter_fields<'a>(&'a self) -> impl Iterator<Item = &'p DeconstructedPat<'p>> + 'a {
+ self.fields.iter_patterns()
+ }
+
+ /// Specialize this pattern with a constructor.
+ /// `other_ctor` can be different from `self.ctor`, but must be covered by it.
+ pub(super) fn specialize<'a>(
+ &'a self,
+ cx: &MatchCheckCtx<'_, 'p>,
+ other_ctor: &Constructor,
+ ) -> SmallVec<[&'p DeconstructedPat<'p>; 2]> {
+ match (&self.ctor, other_ctor) {
+ (Wildcard, _) => {
+ // We return a wildcard for each field of `other_ctor`.
+ Fields::wildcards(cx, &self.ty, other_ctor).iter_patterns().collect()
+ }
+ (Slice(self_slice), Slice(other_slice))
+ if self_slice.arity() != other_slice.arity() =>
+ {
+ match self_slice._unimplemented {}
+ }
+ _ => self.fields.iter_patterns().collect(),
+ }
+ }
+
+ /// We keep track for each pattern if it was ever reachable during the analysis. This is used
+ /// with `unreachable_spans` to report unreachable subpatterns arising from or patterns.
+ pub(super) fn set_reachable(&self) {
+ self.reachable.set(true)
+ }
+ pub(super) fn is_reachable(&self) -> bool {
+ self.reachable.get()
+ }
+}
+
+fn is_field_list_non_exhaustive(variant_id: VariantId, cx: &MatchCheckCtx<'_, '_>) -> bool {
+ let attr_def_id = match variant_id {
+ VariantId::EnumVariantId(id) => id.into(),
+ VariantId::StructId(id) => id.into(),
+ VariantId::UnionId(id) => id.into(),
+ };
+ cx.db.attrs(attr_def_id).by_key("non_exhaustive").exists()
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_util.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_util.rs
new file mode 100644
index 000000000..b89b4f2bf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_util.rs
@@ -0,0 +1,56 @@
+//! Pattern untilities.
+//!
+//! Originates from `rustc_hir::pat_util`
+
+use std::iter::{Enumerate, ExactSizeIterator};
+
+pub(crate) struct EnumerateAndAdjust<I> {
+ enumerate: Enumerate<I>,
+ gap_pos: usize,
+ gap_len: usize,
+}
+
+impl<I> Iterator for EnumerateAndAdjust<I>
+where
+ I: Iterator,
+{
+ type Item = (usize, <I as Iterator>::Item);
+
+ fn next(&mut self) -> Option<(usize, <I as Iterator>::Item)> {
+ self.enumerate
+ .next()
+ .map(|(i, elem)| (if i < self.gap_pos { i } else { i + self.gap_len }, elem))
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.enumerate.size_hint()
+ }
+}
+
+pub(crate) trait EnumerateAndAdjustIterator {
+ fn enumerate_and_adjust(
+ self,
+ expected_len: usize,
+ gap_pos: Option<usize>,
+ ) -> EnumerateAndAdjust<Self>
+ where
+ Self: Sized;
+}
+
+impl<T: ExactSizeIterator> EnumerateAndAdjustIterator for T {
+ fn enumerate_and_adjust(
+ self,
+ expected_len: usize,
+ gap_pos: Option<usize>,
+ ) -> EnumerateAndAdjust<Self>
+ where
+ Self: Sized,
+ {
+ let actual_len = self.len();
+ EnumerateAndAdjust {
+ enumerate: self.enumerate(),
+ gap_pos: gap_pos.unwrap_or(expected_len),
+ gap_len: expected_len - actual_len,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/usefulness.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/usefulness.rs
new file mode 100644
index 000000000..1221327b9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/usefulness.rs
@@ -0,0 +1,811 @@
+//! Based on rust-lang/rust (last sync f31622a50 2021-11-12)
+//! <https://github.com/rust-lang/rust/blob/f31622a50/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs>
+//!
+//! -----
+//!
+//! This file includes the logic for exhaustiveness and reachability checking for pattern-matching.
+//! Specifically, given a list of patterns for a type, we can tell whether:
+//! (a) each pattern is reachable (reachability)
+//! (b) the patterns cover every possible value for the type (exhaustiveness)
+//!
+//! The algorithm implemented here is a modified version of the one described in [this
+//! paper](http://moscova.inria.fr/~maranget/papers/warn/index.html). We have however generalized
+//! it to accommodate the variety of patterns that Rust supports. We thus explain our version here,
+//! without being as rigorous.
+//!
+//!
+//! # Summary
+//!
+//! The core of the algorithm is the notion of "usefulness". A pattern `q` is said to be *useful*
+//! relative to another pattern `p` of the same type if there is a value that is matched by `q` and
+//! not matched by `p`. This generalizes to many `p`s: `q` is useful w.r.t. a list of patterns
+//! `p_1 .. p_n` if there is a value that is matched by `q` and by none of the `p_i`. We write
+//! `usefulness(p_1 .. p_n, q)` for a function that returns a list of such values. The aim of this
+//! file is to compute it efficiently.
+//!
+//! This is enough to compute reachability: a pattern in a `match` expression is reachable iff it
+//! is useful w.r.t. the patterns above it:
+//! ```rust
+//! match x {
+//! Some(_) => ...,
+//! None => ..., // reachable: `None` is matched by this but not the branch above
+//! Some(0) => ..., // unreachable: all the values this matches are already matched by
+//! // `Some(_)` above
+//! }
+//! ```
+//!
+//! This is also enough to compute exhaustiveness: a match is exhaustive iff the wildcard `_`
+//! pattern is _not_ useful w.r.t. the patterns in the match. The values returned by `usefulness`
+//! are used to tell the user which values are missing.
+//! ```rust
+//! match x {
+//! Some(0) => ...,
+//! None => ...,
+//! // not exhaustive: `_` is useful because it matches `Some(1)`
+//! }
+//! ```
+//!
+//! The entrypoint of this file is the [`compute_match_usefulness`] function, which computes
+//! reachability for each match branch and exhaustiveness for the whole match.
+//!
+//!
+//! # Constructors and fields
+//!
+//! Note: we will often abbreviate "constructor" as "ctor".
+//!
+//! The idea that powers everything that is done in this file is the following: a (matcheable)
+//! value is made from a constructor applied to a number of subvalues. Examples of constructors are
+//! `Some`, `None`, `(,)` (the 2-tuple constructor), `Foo {..}` (the constructor for a struct
+//! `Foo`), and `2` (the constructor for the number `2`). This is natural when we think of
+//! pattern-matching, and this is the basis for what follows.
+//!
+//! Some of the ctors listed above might feel weird: `None` and `2` don't take any arguments.
+//! That's ok: those are ctors that take a list of 0 arguments; they are the simplest case of
+//! ctors. We treat `2` as a ctor because `u64` and other number types behave exactly like a huge
+//! `enum`, with one variant for each number. This allows us to see any matcheable value as made up
+//! from a tree of ctors, each having a set number of children. For example: `Foo { bar: None,
+//! baz: Ok(0) }` is made from 4 different ctors, namely `Foo{..}`, `None`, `Ok` and `0`.
+//!
+//! This idea can be extended to patterns: they are also made from constructors applied to fields.
+//! A pattern for a given type is allowed to use all the ctors for values of that type (which we
+//! call "value constructors"), but there are also pattern-only ctors. The most important one is
+//! the wildcard (`_`), and the others are integer ranges (`0..=10`), variable-length slices (`[x,
+//! ..]`), and or-patterns (`Ok(0) | Err(_)`). Examples of valid patterns are `42`, `Some(_)`, `Foo
+//! { bar: Some(0) | None, baz: _ }`. Note that a binder in a pattern (e.g. `Some(x)`) matches the
+//! same values as a wildcard (e.g. `Some(_)`), so we treat both as wildcards.
+//!
+//! From this deconstruction we can compute whether a given value matches a given pattern; we
+//! simply look at ctors one at a time. Given a pattern `p` and a value `v`, we want to compute
+//! `matches!(v, p)`. It's mostly straightforward: we compare the head ctors and when they match
+//! we compare their fields recursively. A few representative examples:
+//!
+//! - `matches!(v, _) := true`
+//! - `matches!((v0, v1), (p0, p1)) := matches!(v0, p0) && matches!(v1, p1)`
+//! - `matches!(Foo { bar: v0, baz: v1 }, Foo { bar: p0, baz: p1 }) := matches!(v0, p0) && matches!(v1, p1)`
+//! - `matches!(Ok(v0), Ok(p0)) := matches!(v0, p0)`
+//! - `matches!(Ok(v0), Err(p0)) := false` (incompatible variants)
+//! - `matches!(v, 1..=100) := matches!(v, 1) || ... || matches!(v, 100)`
+//! - `matches!([v0], [p0, .., p1]) := false` (incompatible lengths)
+//! - `matches!([v0, v1, v2], [p0, .., p1]) := matches!(v0, p0) && matches!(v2, p1)`
+//! - `matches!(v, p0 | p1) := matches!(v, p0) || matches!(v, p1)`
+//!
+//! Constructors, fields and relevant operations are defined in the [`super::deconstruct_pat`] module.
+//!
+//! Note: this constructors/fields distinction may not straightforwardly apply to every Rust type.
+//! For example a value of type `Rc<u64>` can't be deconstructed that way, and `&str` has an
+//! infinitude of constructors. There are also subtleties with visibility of fields and
+//! uninhabitedness and various other things. The constructors idea can be extended to handle most
+//! of these subtleties though; caveats are documented where relevant throughout the code.
+//!
+//! Whether constructors cover each other is computed by [`Constructor::is_covered_by`].
+//!
+//!
+//! # Specialization
+//!
+//! Recall that we wish to compute `usefulness(p_1 .. p_n, q)`: given a list of patterns `p_1 ..
+//! p_n` and a pattern `q`, all of the same type, we want to find a list of values (called
+//! "witnesses") that are matched by `q` and by none of the `p_i`. We obviously don't just
+//! enumerate all possible values. From the discussion above we see that we can proceed
+//! ctor-by-ctor: for each value ctor of the given type, we ask "is there a value that starts with
+//! this constructor and matches `q` and none of the `p_i`?". As we saw above, there's a lot we can
+//! say from knowing only the first constructor of our candidate value.
+//!
+//! Let's take the following example:
+//! ```
+//! match x {
+//! Enum::Variant1(_) => {} // `p1`
+//! Enum::Variant2(None, 0) => {} // `p2`
+//! Enum::Variant2(Some(_), 0) => {} // `q`
+//! }
+//! ```
+//!
+//! We can easily see that if our candidate value `v` starts with `Variant1` it will not match `q`.
+//! If `v = Variant2(v0, v1)` however, whether or not it matches `p2` and `q` will depend on `v0`
+//! and `v1`. In fact, such a `v` will be a witness of usefulness of `q` exactly when the tuple
+//! `(v0, v1)` is a witness of usefulness of `q'` in the following reduced match:
+//!
+//! ```
+//! match x {
+//! (None, 0) => {} // `p2'`
+//! (Some(_), 0) => {} // `q'`
+//! }
+//! ```
+//!
+//! This motivates a new step in computing usefulness, that we call _specialization_.
+//! Specialization consist of filtering a list of patterns for those that match a constructor, and
+//! then looking into the constructor's fields. This enables usefulness to be computed recursively.
+//!
+//! Instead of acting on a single pattern in each row, we will consider a list of patterns for each
+//! row, and we call such a list a _pattern-stack_. The idea is that we will specialize the
+//! leftmost pattern, which amounts to popping the constructor and pushing its fields, which feels
+//! like a stack. We note a pattern-stack simply with `[p_1 ... p_n]`.
+//! Here's a sequence of specializations of a list of pattern-stacks, to illustrate what's
+//! happening:
+//! ```
+//! [Enum::Variant1(_)]
+//! [Enum::Variant2(None, 0)]
+//! [Enum::Variant2(Some(_), 0)]
+//! //==>> specialize with `Variant2`
+//! [None, 0]
+//! [Some(_), 0]
+//! //==>> specialize with `Some`
+//! [_, 0]
+//! //==>> specialize with `true` (say the type was `bool`)
+//! [0]
+//! //==>> specialize with `0`
+//! []
+//! ```
+//!
+//! The function `specialize(c, p)` takes a value constructor `c` and a pattern `p`, and returns 0
+//! or more pattern-stacks. If `c` does not match the head constructor of `p`, it returns nothing;
+//! otherwise if returns the fields of the constructor. This only returns more than one
+//! pattern-stack if `p` has a pattern-only constructor.
+//!
+//! - Specializing for the wrong constructor returns nothing
+//!
+//! `specialize(None, Some(p0)) := []`
+//!
+//! - Specializing for the correct constructor returns a single row with the fields
+//!
+//! `specialize(Variant1, Variant1(p0, p1, p2)) := [[p0, p1, p2]]`
+//!
+//! `specialize(Foo{..}, Foo { bar: p0, baz: p1 }) := [[p0, p1]]`
+//!
+//! - For or-patterns, we specialize each branch and concatenate the results
+//!
+//! `specialize(c, p0 | p1) := specialize(c, p0) ++ specialize(c, p1)`
+//!
+//! - We treat the other pattern constructors as if they were a large or-pattern of all the
+//! possibilities:
+//!
+//! `specialize(c, _) := specialize(c, Variant1(_) | Variant2(_, _) | ...)`
+//!
+//! `specialize(c, 1..=100) := specialize(c, 1 | ... | 100)`
+//!
+//! `specialize(c, [p0, .., p1]) := specialize(c, [p0, p1] | [p0, _, p1] | [p0, _, _, p1] | ...)`
+//!
+//! - If `c` is a pattern-only constructor, `specialize` is defined on a case-by-case basis. See
+//! the discussion about constructor splitting in [`super::deconstruct_pat`].
+//!
+//!
+//! We then extend this function to work with pattern-stacks as input, by acting on the first
+//! column and keeping the other columns untouched.
+//!
+//! Specialization for the whole matrix is done in [`Matrix::specialize_constructor`]. Note that
+//! or-patterns in the first column are expanded before being stored in the matrix. Specialization
+//! for a single patstack is done from a combination of [`Constructor::is_covered_by`] and
+//! [`PatStack::pop_head_constructor`]. The internals of how it's done mostly live in the
+//! [`Fields`] struct.
+//!
+//!
+//! # Computing usefulness
+//!
+//! We now have all we need to compute usefulness. The inputs to usefulness are a list of
+//! pattern-stacks `p_1 ... p_n` (one per row), and a new pattern_stack `q`. The paper and this
+//! file calls the list of patstacks a _matrix_. They must all have the same number of columns and
+//! the patterns in a given column must all have the same type. `usefulness` returns a (possibly
+//! empty) list of witnesses of usefulness. These witnesses will also be pattern-stacks.
+//!
+//! - base case: `n_columns == 0`.
+//! Since a pattern-stack functions like a tuple of patterns, an empty one functions like the
+//! unit type. Thus `q` is useful iff there are no rows above it, i.e. if `n == 0`.
+//!
+//! - inductive case: `n_columns > 0`.
+//! We need a way to list the constructors we want to try. We will be more clever in the next
+//! section but for now assume we list all value constructors for the type of the first column.
+//!
+//! - for each such ctor `c`:
+//!
+//! - for each `q'` returned by `specialize(c, q)`:
+//!
+//! - we compute `usefulness(specialize(c, p_1) ... specialize(c, p_n), q')`
+//!
+//! - for each witness found, we revert specialization by pushing the constructor `c` on top.
+//!
+//! - We return the concatenation of all the witnesses found, if any.
+//!
+//! Example:
+//! ```
+//! [Some(true)] // p_1
+//! [None] // p_2
+//! [Some(_)] // q
+//! //==>> try `None`: `specialize(None, q)` returns nothing
+//! //==>> try `Some`: `specialize(Some, q)` returns a single row
+//! [true] // p_1'
+//! [_] // q'
+//! //==>> try `true`: `specialize(true, q')` returns a single row
+//! [] // p_1''
+//! [] // q''
+//! //==>> base case; `n != 0` so `q''` is not useful.
+//! //==>> go back up a step
+//! [true] // p_1'
+//! [_] // q'
+//! //==>> try `false`: `specialize(false, q')` returns a single row
+//! [] // q''
+//! //==>> base case; `n == 0` so `q''` is useful. We return the single witness `[]`
+//! witnesses:
+//! []
+//! //==>> undo the specialization with `false`
+//! witnesses:
+//! [false]
+//! //==>> undo the specialization with `Some`
+//! witnesses:
+//! [Some(false)]
+//! //==>> we have tried all the constructors. The output is the single witness `[Some(false)]`.
+//! ```
+//!
+//! This computation is done in [`is_useful`]. In practice we don't care about the list of
+//! witnesses when computing reachability; we only need to know whether any exist. We do keep the
+//! witnesses when computing exhaustiveness to report them to the user.
+//!
+//!
+//! # Making usefulness tractable: constructor splitting
+//!
+//! We're missing one last detail: which constructors do we list? Naively listing all value
+//! constructors cannot work for types like `u64` or `&str`, so we need to be more clever. The
+//! first obvious insight is that we only want to list constructors that are covered by the head
+//! constructor of `q`. If it's a value constructor, we only try that one. If it's a pattern-only
+//! constructor, we use the final clever idea for this algorithm: _constructor splitting_, where we
+//! group together constructors that behave the same.
+//!
+//! The details are not necessary to understand this file, so we explain them in
+//! [`super::deconstruct_pat`]. Splitting is done by the [`Constructor::split`] function.
+
+use std::iter::once;
+
+use hir_def::{AdtId, DefWithBodyId, HasModule, ModuleId};
+use smallvec::{smallvec, SmallVec};
+use typed_arena::Arena;
+
+use crate::{db::HirDatabase, Ty, TyExt};
+
+use super::deconstruct_pat::{Constructor, DeconstructedPat, Fields, SplitWildcard};
+
+use self::{helper::Captures, ArmType::*, Usefulness::*};
+
+pub(crate) struct MatchCheckCtx<'a, 'p> {
+ pub(crate) module: ModuleId,
+ pub(crate) body: DefWithBodyId,
+ pub(crate) db: &'a dyn HirDatabase,
+ /// Lowered patterns from arms plus generated by the check.
+ pub(crate) pattern_arena: &'p Arena<DeconstructedPat<'p>>,
+}
+
+impl<'a, 'p> MatchCheckCtx<'a, 'p> {
+ pub(super) fn is_uninhabited(&self, _ty: &Ty) -> bool {
+ // FIXME(iDawer) implement exhaustive_patterns feature. More info in:
+ // Tracking issue for RFC 1872: exhaustive_patterns feature https://github.com/rust-lang/rust/issues/51085
+ false
+ }
+
+ /// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`.
+ pub(super) fn is_foreign_non_exhaustive_enum(&self, ty: &Ty) -> bool {
+ match ty.as_adt() {
+ Some((adt @ AdtId::EnumId(_), _)) => {
+ let has_non_exhaustive_attr =
+ self.db.attrs(adt.into()).by_key("non_exhaustive").exists();
+ let is_local = adt.module(self.db.upcast()).krate() == self.module.krate();
+ has_non_exhaustive_attr && !is_local
+ }
+ _ => false,
+ }
+ }
+
+ // Rust feature described as "Allows exhaustive pattern matching on types that contain uninhabited types."
+ pub(super) fn feature_exhaustive_patterns(&self) -> bool {
+ // FIXME see MatchCheckCtx::is_uninhabited
+ false
+ }
+}
+
+#[derive(Copy, Clone)]
+pub(super) struct PatCtxt<'a, 'p> {
+ pub(super) cx: &'a MatchCheckCtx<'a, 'p>,
+ /// Type of the current column under investigation.
+ pub(super) ty: &'a Ty,
+ /// Whether the current pattern is the whole pattern as found in a match arm, or if it's a
+ /// subpattern.
+ pub(super) is_top_level: bool,
+ /// Whether the current pattern is from a `non_exhaustive` enum.
+ pub(super) is_non_exhaustive: bool,
+}
+
+/// A row of a matrix. Rows of len 1 are very common, which is why `SmallVec[_; 2]`
+/// works well.
+#[derive(Clone)]
+pub(super) struct PatStack<'p> {
+ pats: SmallVec<[&'p DeconstructedPat<'p>; 2]>,
+}
+
+impl<'p> PatStack<'p> {
+ fn from_pattern(pat: &'p DeconstructedPat<'p>) -> Self {
+ Self::from_vec(smallvec![pat])
+ }
+
+ fn from_vec(vec: SmallVec<[&'p DeconstructedPat<'p>; 2]>) -> Self {
+ PatStack { pats: vec }
+ }
+
+ fn is_empty(&self) -> bool {
+ self.pats.is_empty()
+ }
+
+ fn len(&self) -> usize {
+ self.pats.len()
+ }
+
+ fn head(&self) -> &'p DeconstructedPat<'p> {
+ self.pats[0]
+ }
+
+ // Recursively expand the first pattern into its subpatterns. Only useful if the pattern is an
+ // or-pattern. Panics if `self` is empty.
+ fn expand_or_pat(&self) -> impl Iterator<Item = PatStack<'p>> + Captures<'_> {
+ self.head().iter_fields().map(move |pat| {
+ let mut new_patstack = PatStack::from_pattern(pat);
+ new_patstack.pats.extend_from_slice(&self.pats[1..]);
+ new_patstack
+ })
+ }
+
+ /// This computes `S(self.head().ctor(), self)`. See top of the file for explanations.
+ ///
+ /// Structure patterns with a partial wild pattern (Foo { a: 42, .. }) have their missing
+ /// fields filled with wild patterns.
+ ///
+ /// This is roughly the inverse of `Constructor::apply`.
+ fn pop_head_constructor(&self, cx: &MatchCheckCtx<'_, 'p>, ctor: &Constructor) -> PatStack<'p> {
+ // We pop the head pattern and push the new fields extracted from the arguments of
+ // `self.head()`.
+ let mut new_fields: SmallVec<[_; 2]> = self.head().specialize(cx, ctor);
+ new_fields.extend_from_slice(&self.pats[1..]);
+ PatStack::from_vec(new_fields)
+ }
+}
+
+/// A 2D matrix.
+#[derive(Clone)]
+pub(super) struct Matrix<'p> {
+ patterns: Vec<PatStack<'p>>,
+}
+
+impl<'p> Matrix<'p> {
+ fn empty() -> Self {
+ Matrix { patterns: vec![] }
+ }
+
+ /// Number of columns of this matrix. `None` is the matrix is empty.
+ pub(super) fn _column_count(&self) -> Option<usize> {
+ self.patterns.get(0).map(|r| r.len())
+ }
+
+ /// Pushes a new row to the matrix. If the row starts with an or-pattern, this recursively
+ /// expands it.
+ fn push(&mut self, row: PatStack<'p>) {
+ if !row.is_empty() && row.head().is_or_pat() {
+ self.patterns.extend(row.expand_or_pat());
+ } else {
+ self.patterns.push(row);
+ }
+ }
+
+ /// Iterate over the first component of each row
+ fn heads(&self) -> impl Iterator<Item = &'p DeconstructedPat<'p>> + Clone + Captures<'_> {
+ self.patterns.iter().map(|r| r.head())
+ }
+
+ /// This computes `S(constructor, self)`. See top of the file for explanations.
+ fn specialize_constructor(&self, pcx: PatCtxt<'_, 'p>, ctor: &Constructor) -> Matrix<'p> {
+ let mut matrix = Matrix::empty();
+ for row in &self.patterns {
+ if ctor.is_covered_by(pcx, row.head().ctor()) {
+ let new_row = row.pop_head_constructor(pcx.cx, ctor);
+ matrix.push(new_row);
+ }
+ }
+ matrix
+ }
+}
+
+/// This carries the results of computing usefulness, as described at the top of the file. When
+/// checking usefulness of a match branch, we use the `NoWitnesses` variant, which also keeps track
+/// of potential unreachable sub-patterns (in the presence of or-patterns). When checking
+/// exhaustiveness of a whole match, we use the `WithWitnesses` variant, which carries a list of
+/// witnesses of non-exhaustiveness when there are any.
+/// Which variant to use is dictated by `ArmType`.
+enum Usefulness<'p> {
+ /// If we don't care about witnesses, simply remember if the pattern was useful.
+ NoWitnesses { useful: bool },
+ /// Carries a list of witnesses of non-exhaustiveness. If empty, indicates that the whole
+ /// pattern is unreachable.
+ WithWitnesses(Vec<Witness<'p>>),
+}
+
+impl<'p> Usefulness<'p> {
+ fn new_useful(preference: ArmType) -> Self {
+ match preference {
+ // A single (empty) witness of reachability.
+ FakeExtraWildcard => WithWitnesses(vec![Witness(vec![])]),
+ RealArm => NoWitnesses { useful: true },
+ }
+ }
+ fn new_not_useful(preference: ArmType) -> Self {
+ match preference {
+ FakeExtraWildcard => WithWitnesses(vec![]),
+ RealArm => NoWitnesses { useful: false },
+ }
+ }
+
+ fn is_useful(&self) -> bool {
+ match self {
+ Usefulness::NoWitnesses { useful } => *useful,
+ Usefulness::WithWitnesses(witnesses) => !witnesses.is_empty(),
+ }
+ }
+
+ /// Combine usefulnesses from two branches. This is an associative operation.
+ fn extend(&mut self, other: Self) {
+ match (&mut *self, other) {
+ (WithWitnesses(_), WithWitnesses(o)) if o.is_empty() => {}
+ (WithWitnesses(s), WithWitnesses(o)) if s.is_empty() => *self = WithWitnesses(o),
+ (WithWitnesses(s), WithWitnesses(o)) => s.extend(o),
+ (NoWitnesses { useful: s_useful }, NoWitnesses { useful: o_useful }) => {
+ *s_useful = *s_useful || o_useful
+ }
+ _ => unreachable!(),
+ }
+ }
+
+ /// After calculating usefulness after a specialization, call this to reconstruct a usefulness
+ /// that makes sense for the matrix pre-specialization. This new usefulness can then be merged
+ /// with the results of specializing with the other constructors.
+ fn apply_constructor(
+ self,
+ pcx: PatCtxt<'_, 'p>,
+ matrix: &Matrix<'p>,
+ ctor: &Constructor,
+ ) -> Self {
+ match self {
+ NoWitnesses { .. } => self,
+ WithWitnesses(ref witnesses) if witnesses.is_empty() => self,
+ WithWitnesses(witnesses) => {
+ let new_witnesses = if let Constructor::Missing { .. } = ctor {
+ // We got the special `Missing` constructor, so each of the missing constructors
+ // gives a new pattern that is not caught by the match. We list those patterns.
+ let new_patterns = if pcx.is_non_exhaustive {
+ // Here we don't want the user to try to list all variants, we want them to add
+ // a wildcard, so we only suggest that.
+ vec![DeconstructedPat::wildcard(pcx.ty.clone())]
+ } else {
+ let mut split_wildcard = SplitWildcard::new(pcx);
+ split_wildcard.split(pcx, matrix.heads().map(DeconstructedPat::ctor));
+
+ // This lets us know if we skipped any variants because they are marked
+ // `doc(hidden)` or they are unstable feature gate (only stdlib types).
+ let mut hide_variant_show_wild = false;
+ // Construct for each missing constructor a "wild" version of this
+ // constructor, that matches everything that can be built with
+ // it. For example, if `ctor` is a `Constructor::Variant` for
+ // `Option::Some`, we get the pattern `Some(_)`.
+ let mut new: Vec<DeconstructedPat<'_>> = split_wildcard
+ .iter_missing(pcx)
+ .filter_map(|missing_ctor| {
+ // Check if this variant is marked `doc(hidden)`
+ if missing_ctor.is_doc_hidden_variant(pcx)
+ || missing_ctor.is_unstable_variant(pcx)
+ {
+ hide_variant_show_wild = true;
+ return None;
+ }
+ Some(DeconstructedPat::wild_from_ctor(pcx, missing_ctor.clone()))
+ })
+ .collect();
+
+ if hide_variant_show_wild {
+ new.push(DeconstructedPat::wildcard(pcx.ty.clone()))
+ }
+
+ new
+ };
+
+ witnesses
+ .into_iter()
+ .flat_map(|witness| {
+ new_patterns.iter().map(move |pat| {
+ Witness(
+ witness
+ .0
+ .iter()
+ .chain(once(pat))
+ .map(DeconstructedPat::clone_and_forget_reachability)
+ .collect(),
+ )
+ })
+ })
+ .collect()
+ } else {
+ witnesses
+ .into_iter()
+ .map(|witness| witness.apply_constructor(pcx, ctor))
+ .collect()
+ };
+ WithWitnesses(new_witnesses)
+ }
+ }
+ }
+}
+
+#[derive(Copy, Clone, Debug)]
+enum ArmType {
+ FakeExtraWildcard,
+ RealArm,
+}
+
+/// A witness of non-exhaustiveness for error reporting, represented
+/// as a list of patterns (in reverse order of construction) with
+/// wildcards inside to represent elements that can take any inhabitant
+/// of the type as a value.
+///
+/// A witness against a list of patterns should have the same types
+/// and length as the pattern matched against. Because Rust `match`
+/// is always against a single pattern, at the end the witness will
+/// have length 1, but in the middle of the algorithm, it can contain
+/// multiple patterns.
+///
+/// For example, if we are constructing a witness for the match against
+///
+/// ```
+/// struct Pair(Option<(u32, u32)>, bool);
+///
+/// match (p: Pair) {
+/// Pair(None, _) => {}
+/// Pair(_, false) => {}
+/// }
+/// ```
+///
+/// We'll perform the following steps:
+/// 1. Start with an empty witness
+/// `Witness(vec![])`
+/// 2. Push a witness `true` against the `false`
+/// `Witness(vec![true])`
+/// 3. Push a witness `Some(_)` against the `None`
+/// `Witness(vec![true, Some(_)])`
+/// 4. Apply the `Pair` constructor to the witnesses
+/// `Witness(vec![Pair(Some(_), true)])`
+///
+/// The final `Pair(Some(_), true)` is then the resulting witness.
+pub(crate) struct Witness<'p>(Vec<DeconstructedPat<'p>>);
+
+impl<'p> Witness<'p> {
+ /// Asserts that the witness contains a single pattern, and returns it.
+ fn single_pattern(self) -> DeconstructedPat<'p> {
+ assert_eq!(self.0.len(), 1);
+ self.0.into_iter().next().unwrap()
+ }
+
+ /// Constructs a partial witness for a pattern given a list of
+ /// patterns expanded by the specialization step.
+ ///
+ /// When a pattern P is discovered to be useful, this function is used bottom-up
+ /// to reconstruct a complete witness, e.g., a pattern P' that covers a subset
+ /// of values, V, where each value in that set is not covered by any previously
+ /// used patterns and is covered by the pattern P'. Examples:
+ ///
+ /// left_ty: tuple of 3 elements
+ /// pats: [10, 20, _] => (10, 20, _)
+ ///
+ /// left_ty: struct X { a: (bool, &'static str), b: usize}
+ /// pats: [(false, "foo"), 42] => X { a: (false, "foo"), b: 42 }
+ fn apply_constructor(mut self, pcx: PatCtxt<'_, 'p>, ctor: &Constructor) -> Self {
+ let pat = {
+ let len = self.0.len();
+ let arity = ctor.arity(pcx);
+ let pats = self.0.drain((len - arity)..).rev();
+ let fields = Fields::from_iter(pcx.cx, pats);
+ DeconstructedPat::new(ctor.clone(), fields, pcx.ty.clone())
+ };
+
+ self.0.push(pat);
+
+ self
+ }
+}
+
+/// Algorithm from <http://moscova.inria.fr/~maranget/papers/warn/index.html>.
+/// The algorithm from the paper has been modified to correctly handle empty
+/// types. The changes are:
+/// (0) We don't exit early if the pattern matrix has zero rows. We just
+/// continue to recurse over columns.
+/// (1) all_constructors will only return constructors that are statically
+/// possible. E.g., it will only return `Ok` for `Result<T, !>`.
+///
+/// This finds whether a (row) vector `v` of patterns is 'useful' in relation
+/// to a set of such vectors `m` - this is defined as there being a set of
+/// inputs that will match `v` but not any of the sets in `m`.
+///
+/// All the patterns at each column of the `matrix ++ v` matrix must have the same type.
+///
+/// This is used both for reachability checking (if a pattern isn't useful in
+/// relation to preceding patterns, it is not reachable) and exhaustiveness
+/// checking (if a wildcard pattern is useful in relation to a matrix, the
+/// matrix isn't exhaustive).
+///
+/// `is_under_guard` is used to inform if the pattern has a guard. If it
+/// has one it must not be inserted into the matrix. This shouldn't be
+/// relied on for soundness.
+fn is_useful<'p>(
+ cx: &MatchCheckCtx<'_, 'p>,
+ matrix: &Matrix<'p>,
+ v: &PatStack<'p>,
+ witness_preference: ArmType,
+ is_under_guard: bool,
+ is_top_level: bool,
+) -> Usefulness<'p> {
+ let Matrix { patterns: rows, .. } = matrix;
+
+ // The base case. We are pattern-matching on () and the return value is
+ // based on whether our matrix has a row or not.
+ // NOTE: This could potentially be optimized by checking rows.is_empty()
+ // first and then, if v is non-empty, the return value is based on whether
+ // the type of the tuple we're checking is inhabited or not.
+ if v.is_empty() {
+ let ret = if rows.is_empty() {
+ Usefulness::new_useful(witness_preference)
+ } else {
+ Usefulness::new_not_useful(witness_preference)
+ };
+ return ret;
+ }
+
+ debug_assert!(rows.iter().all(|r| r.len() == v.len()));
+
+ let ty = v.head().ty();
+ let is_non_exhaustive = cx.is_foreign_non_exhaustive_enum(ty);
+ let pcx = PatCtxt { cx, ty, is_top_level, is_non_exhaustive };
+
+ // If the first pattern is an or-pattern, expand it.
+ let mut ret = Usefulness::new_not_useful(witness_preference);
+ if v.head().is_or_pat() {
+ // We try each or-pattern branch in turn.
+ let mut matrix = matrix.clone();
+ for v in v.expand_or_pat() {
+ let usefulness = is_useful(cx, &matrix, &v, witness_preference, is_under_guard, false);
+ ret.extend(usefulness);
+ // If pattern has a guard don't add it to the matrix.
+ if !is_under_guard {
+ // We push the already-seen patterns into the matrix in order to detect redundant
+ // branches like `Some(_) | Some(0)`.
+ matrix.push(v);
+ }
+ }
+ } else {
+ let v_ctor = v.head().ctor();
+
+ // FIXME: implement `overlapping_range_endpoints` lint
+
+ // We split the head constructor of `v`.
+ let split_ctors = v_ctor.split(pcx, matrix.heads().map(DeconstructedPat::ctor));
+ // For each constructor, we compute whether there's a value that starts with it that would
+ // witness the usefulness of `v`.
+ let start_matrix = matrix;
+ for ctor in split_ctors {
+ // We cache the result of `Fields::wildcards` because it is used a lot.
+ let spec_matrix = start_matrix.specialize_constructor(pcx, &ctor);
+ let v = v.pop_head_constructor(cx, &ctor);
+ let usefulness =
+ is_useful(cx, &spec_matrix, &v, witness_preference, is_under_guard, false);
+ let usefulness = usefulness.apply_constructor(pcx, start_matrix, &ctor);
+
+ // FIXME: implement `non_exhaustive_omitted_patterns` lint
+
+ ret.extend(usefulness);
+ }
+ };
+
+ if ret.is_useful() {
+ v.head().set_reachable();
+ }
+
+ ret
+}
+
+/// The arm of a match expression.
+#[derive(Clone, Copy)]
+pub(crate) struct MatchArm<'p> {
+ pub(crate) pat: &'p DeconstructedPat<'p>,
+ pub(crate) has_guard: bool,
+}
+
+/// Indicates whether or not a given arm is reachable.
+#[derive(Clone, Debug)]
+pub(crate) enum Reachability {
+ /// The arm is reachable. This additionally carries a set of or-pattern branches that have been
+ /// found to be unreachable despite the overall arm being reachable. Used only in the presence
+ /// of or-patterns, otherwise it stays empty.
+ // FIXME: store ureachable subpattern IDs
+ Reachable,
+ /// The arm is unreachable.
+ Unreachable,
+}
+
+/// The output of checking a match for exhaustiveness and arm reachability.
+pub(crate) struct UsefulnessReport<'p> {
+ /// For each arm of the input, whether that arm is reachable after the arms above it.
+ pub(crate) _arm_usefulness: Vec<(MatchArm<'p>, Reachability)>,
+ /// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of
+ /// exhaustiveness.
+ pub(crate) non_exhaustiveness_witnesses: Vec<DeconstructedPat<'p>>,
+}
+
+/// The entrypoint for the usefulness algorithm. Computes whether a match is exhaustive and which
+/// of its arms are reachable.
+///
+/// Note: the input patterns must have been lowered through
+/// `check_match::MatchVisitor::lower_pattern`.
+pub(crate) fn compute_match_usefulness<'p>(
+ cx: &MatchCheckCtx<'_, 'p>,
+ arms: &[MatchArm<'p>],
+ scrut_ty: &Ty,
+) -> UsefulnessReport<'p> {
+ let mut matrix = Matrix::empty();
+ let arm_usefulness = arms
+ .iter()
+ .copied()
+ .map(|arm| {
+ let v = PatStack::from_pattern(arm.pat);
+ is_useful(cx, &matrix, &v, RealArm, arm.has_guard, true);
+ if !arm.has_guard {
+ matrix.push(v);
+ }
+ let reachability = if arm.pat.is_reachable() {
+ Reachability::Reachable
+ } else {
+ Reachability::Unreachable
+ };
+ (arm, reachability)
+ })
+ .collect();
+
+ let wild_pattern = cx.pattern_arena.alloc(DeconstructedPat::wildcard(scrut_ty.clone()));
+ let v = PatStack::from_pattern(wild_pattern);
+ let usefulness = is_useful(cx, &matrix, &v, FakeExtraWildcard, false, true);
+ let non_exhaustiveness_witnesses = match usefulness {
+ WithWitnesses(pats) => pats.into_iter().map(Witness::single_pattern).collect(),
+ NoWitnesses { .. } => panic!("bug"),
+ };
+ UsefulnessReport { _arm_usefulness: arm_usefulness, non_exhaustiveness_witnesses }
+}
+
+pub(crate) mod helper {
+ // Copy-pasted from rust/compiler/rustc_data_structures/src/captures.rs
+ /// "Signaling" trait used in impl trait to tag lifetimes that you may
+ /// need to capture but don't really need for other reasons.
+ /// Basically a workaround; see [this comment] for details.
+ ///
+ /// [this comment]: https://github.com/rust-lang/rust/issues/34511#issuecomment-373423999
+ // FIXME(eddyb) false positive, the lifetime parameter is "phantom" but needed.
+ #[allow(unused_lifetimes)]
+ pub(crate) trait Captures<'a> {}
+
+ impl<'a, T: ?Sized> Captures<'a> for T {}
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
new file mode 100644
index 000000000..161b19a73
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -0,0 +1,104 @@
+//! Provides validations for unsafe code. Currently checks if unsafe functions are missing
+//! unsafe blocks.
+
+use hir_def::{
+ body::Body,
+ expr::{Expr, ExprId, UnaryOp},
+ resolver::{resolver_for_expr, ResolveValueResult, ValueNs},
+ DefWithBodyId,
+};
+
+use crate::{
+ db::HirDatabase, utils::is_fn_unsafe_to_call, InferenceResult, Interner, TyExt, TyKind,
+};
+
+pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> Vec<ExprId> {
+ let infer = db.infer(def);
+ let mut res = Vec::new();
+
+ let is_unsafe = match def {
+ DefWithBodyId::FunctionId(it) => db.function_data(it).has_unsafe_kw(),
+ DefWithBodyId::StaticId(_) | DefWithBodyId::ConstId(_) => false,
+ };
+ if is_unsafe {
+ return res;
+ }
+
+ let body = db.body(def);
+ unsafe_expressions(db, &infer, def, &body, body.body_expr, &mut |expr| {
+ if !expr.inside_unsafe_block {
+ res.push(expr.expr);
+ }
+ });
+
+ res
+}
+
+pub struct UnsafeExpr {
+ pub expr: ExprId,
+ pub inside_unsafe_block: bool,
+}
+
+// FIXME: Move this out, its not a diagnostic only thing anymore, and handle unsafe pattern accesses as well
+pub fn unsafe_expressions(
+ db: &dyn HirDatabase,
+ infer: &InferenceResult,
+ def: DefWithBodyId,
+ body: &Body,
+ current: ExprId,
+ unsafe_expr_cb: &mut dyn FnMut(UnsafeExpr),
+) {
+ walk_unsafe(db, infer, def, body, current, false, unsafe_expr_cb)
+}
+
+fn walk_unsafe(
+ db: &dyn HirDatabase,
+ infer: &InferenceResult,
+ def: DefWithBodyId,
+ body: &Body,
+ current: ExprId,
+ inside_unsafe_block: bool,
+ unsafe_expr_cb: &mut dyn FnMut(UnsafeExpr),
+) {
+ let expr = &body.exprs[current];
+ match expr {
+ &Expr::Call { callee, .. } => {
+ if let Some(func) = infer[callee].as_fn_def(db) {
+ if is_fn_unsafe_to_call(db, func) {
+ unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
+ }
+ }
+ }
+ Expr::Path(path) => {
+ let resolver = resolver_for_expr(db.upcast(), def, current);
+ let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path.mod_path());
+ if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id))) = value_or_partial {
+ if db.static_data(id).mutable {
+ unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
+ }
+ }
+ }
+ Expr::MethodCall { .. } => {
+ if infer
+ .method_resolution(current)
+ .map(|(func, _)| is_fn_unsafe_to_call(db, func))
+ .unwrap_or(false)
+ {
+ unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
+ }
+ }
+ Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
+ if let TyKind::Raw(..) = &infer[*expr].kind(Interner) {
+ unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
+ }
+ }
+ Expr::Unsafe { body: child } => {
+ return walk_unsafe(db, infer, def, body, *child, true, unsafe_expr_cb);
+ }
+ _ => {}
+ }
+
+ expr.walk_child_exprs(|child| {
+ walk_unsafe(db, infer, def, body, child, inside_unsafe_block, unsafe_expr_cb);
+ });
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
new file mode 100644
index 000000000..d2f9c2b8b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
@@ -0,0 +1,1315 @@
+//! The `HirDisplay` trait, which serves two purposes: Turning various bits from
+//! HIR back into source code, and just displaying them for debugging/testing
+//! purposes.
+
+use std::fmt::{self, Debug};
+
+use base_db::CrateId;
+use chalk_ir::BoundVar;
+use hir_def::{
+ body,
+ db::DefDatabase,
+ find_path,
+ generics::{TypeOrConstParamData, TypeParamProvenance},
+ intern::{Internable, Interned},
+ item_scope::ItemInNs,
+ path::{Path, PathKind},
+ type_ref::{ConstScalar, TraitBoundModifier, TypeBound, TypeRef},
+ visibility::Visibility,
+ HasModule, ItemContainerId, Lookup, ModuleId, TraitId,
+};
+use hir_expand::{hygiene::Hygiene, name::Name};
+use itertools::Itertools;
+use syntax::SmolStr;
+
+use crate::{
+ db::HirDatabase,
+ from_assoc_type_id, from_foreign_def_id, from_placeholder_idx, lt_from_placeholder_idx,
+ mapping::from_chalk,
+ primitive, subst_prefix, to_assoc_type_id,
+ utils::{self, generics},
+ AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Const, ConstValue, DomainGoal,
+ GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData, LifetimeOutlives, Mutability,
+ OpaqueTy, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Scalar, Substitution, TraitRef,
+ TraitRefExt, Ty, TyExt, TyKind, WhereClause,
+};
+
+pub struct HirFormatter<'a> {
+ pub db: &'a dyn HirDatabase,
+ fmt: &'a mut dyn fmt::Write,
+ buf: String,
+ curr_size: usize,
+ pub(crate) max_size: Option<usize>,
+ omit_verbose_types: bool,
+ display_target: DisplayTarget,
+}
+
+pub trait HirDisplay {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError>;
+
+ /// Returns a `Display`able type that is human-readable.
+ fn into_displayable<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ max_size: Option<usize>,
+ omit_verbose_types: bool,
+ display_target: DisplayTarget,
+ ) -> HirDisplayWrapper<'a, Self>
+ where
+ Self: Sized,
+ {
+ assert!(
+ !matches!(display_target, DisplayTarget::SourceCode { .. }),
+ "HirDisplayWrapper cannot fail with DisplaySourceCodeError, use HirDisplay::hir_fmt directly instead"
+ );
+ HirDisplayWrapper { db, t: self, max_size, omit_verbose_types, display_target }
+ }
+
+ /// Returns a `Display`able type that is human-readable.
+ /// Use this for showing types to the user (e.g. diagnostics)
+ fn display<'a>(&'a self, db: &'a dyn HirDatabase) -> HirDisplayWrapper<'a, Self>
+ where
+ Self: Sized,
+ {
+ HirDisplayWrapper {
+ db,
+ t: self,
+ max_size: None,
+ omit_verbose_types: false,
+ display_target: DisplayTarget::Diagnostics,
+ }
+ }
+
+ /// Returns a `Display`able type that is human-readable and tries to be succinct.
+ /// Use this for showing types to the user where space is constrained (e.g. doc popups)
+ fn display_truncated<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ max_size: Option<usize>,
+ ) -> HirDisplayWrapper<'a, Self>
+ where
+ Self: Sized,
+ {
+ HirDisplayWrapper {
+ db,
+ t: self,
+ max_size,
+ omit_verbose_types: true,
+ display_target: DisplayTarget::Diagnostics,
+ }
+ }
+
+ /// Returns a String representation of `self` that can be inserted into the given module.
+ /// Use this when generating code (e.g. assists)
+ fn display_source_code<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ module_id: ModuleId,
+ ) -> Result<String, DisplaySourceCodeError> {
+ let mut result = String::new();
+ match self.hir_fmt(&mut HirFormatter {
+ db,
+ fmt: &mut result,
+ buf: String::with_capacity(20),
+ curr_size: 0,
+ max_size: None,
+ omit_verbose_types: false,
+ display_target: DisplayTarget::SourceCode { module_id },
+ }) {
+ Ok(()) => {}
+ Err(HirDisplayError::FmtError) => panic!("Writing to String can't fail!"),
+ Err(HirDisplayError::DisplaySourceCodeError(e)) => return Err(e),
+ };
+ Ok(result)
+ }
+
+ /// Returns a String representation of `self` for test purposes
+ fn display_test<'a>(&'a self, db: &'a dyn HirDatabase) -> HirDisplayWrapper<'a, Self>
+ where
+ Self: Sized,
+ {
+ HirDisplayWrapper {
+ db,
+ t: self,
+ max_size: None,
+ omit_verbose_types: false,
+ display_target: DisplayTarget::Test,
+ }
+ }
+}
+
+impl<'a> HirFormatter<'a> {
+ pub fn write_joined<T: HirDisplay>(
+ &mut self,
+ iter: impl IntoIterator<Item = T>,
+ sep: &str,
+ ) -> Result<(), HirDisplayError> {
+ let mut first = true;
+ for e in iter {
+ if !first {
+ write!(self, "{}", sep)?;
+ }
+ first = false;
+
+ // Abbreviate multiple omitted types with a single ellipsis.
+ if self.should_truncate() {
+ return write!(self, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ e.hir_fmt(self)?;
+ }
+ Ok(())
+ }
+
+ /// This allows using the `write!` macro directly with a `HirFormatter`.
+ pub fn write_fmt(&mut self, args: fmt::Arguments<'_>) -> Result<(), HirDisplayError> {
+ // We write to a buffer first to track output size
+ self.buf.clear();
+ fmt::write(&mut self.buf, args)?;
+ self.curr_size += self.buf.len();
+
+ // Then we write to the internal formatter from the buffer
+ self.fmt.write_str(&self.buf).map_err(HirDisplayError::from)
+ }
+
+ pub fn write_str(&mut self, s: &str) -> Result<(), HirDisplayError> {
+ self.fmt.write_str(s)?;
+ Ok(())
+ }
+
+ pub fn write_char(&mut self, c: char) -> Result<(), HirDisplayError> {
+ self.fmt.write_char(c)?;
+ Ok(())
+ }
+
+ pub fn should_truncate(&self) -> bool {
+ match self.max_size {
+ Some(max_size) => self.curr_size >= max_size,
+ None => false,
+ }
+ }
+
+ pub fn omit_verbose_types(&self) -> bool {
+ self.omit_verbose_types
+ }
+}
+
+#[derive(Clone, Copy)]
+pub enum DisplayTarget {
+ /// Display types for inlays, doc popups, autocompletion, etc...
+ /// Showing `{unknown}` or not qualifying paths is fine here.
+ /// There's no reason for this to fail.
+ Diagnostics,
+ /// Display types for inserting them in source files.
+ /// The generated code should compile, so paths need to be qualified.
+ SourceCode { module_id: ModuleId },
+ /// Only for test purpose to keep real types
+ Test,
+}
+
+impl DisplayTarget {
+ fn is_source_code(&self) -> bool {
+ matches!(self, Self::SourceCode { .. })
+ }
+ fn is_test(&self) -> bool {
+ matches!(self, Self::Test)
+ }
+}
+
+#[derive(Debug)]
+pub enum DisplaySourceCodeError {
+ PathNotFound,
+ UnknownType,
+ Closure,
+}
+
+pub enum HirDisplayError {
+ /// Errors that can occur when generating source code
+ DisplaySourceCodeError(DisplaySourceCodeError),
+ /// `FmtError` is required to be compatible with std::fmt::Display
+ FmtError,
+}
+impl From<fmt::Error> for HirDisplayError {
+ fn from(_: fmt::Error) -> Self {
+ Self::FmtError
+ }
+}
+
+pub struct HirDisplayWrapper<'a, T> {
+ db: &'a dyn HirDatabase,
+ t: &'a T,
+ max_size: Option<usize>,
+ omit_verbose_types: bool,
+ display_target: DisplayTarget,
+}
+
+impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T>
+where
+ T: HirDisplay,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.t.hir_fmt(&mut HirFormatter {
+ db: self.db,
+ fmt: f,
+ buf: String::with_capacity(20),
+ curr_size: 0,
+ max_size: self.max_size,
+ omit_verbose_types: self.omit_verbose_types,
+ display_target: self.display_target,
+ }) {
+ Ok(()) => Ok(()),
+ Err(HirDisplayError::FmtError) => Err(fmt::Error),
+ Err(HirDisplayError::DisplaySourceCodeError(_)) => {
+ // This should never happen
+ panic!("HirDisplay::hir_fmt failed with DisplaySourceCodeError when calling Display::fmt!")
+ }
+ }
+ }
+}
+
+const TYPE_HINT_TRUNCATION: &str = "…";
+
+impl<T: HirDisplay> HirDisplay for &'_ T {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ HirDisplay::hir_fmt(*self, f)
+ }
+}
+
+impl<T: HirDisplay + Internable> HirDisplay for Interned<T> {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ HirDisplay::hir_fmt(self.as_ref(), f)
+ }
+}
+
+impl HirDisplay for ProjectionTy {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ if f.should_truncate() {
+ return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ let trait_ = f.db.trait_data(self.trait_(f.db));
+ write!(f, "<")?;
+ self.self_type_parameter(Interner).hir_fmt(f)?;
+ write!(f, " as {}", trait_.name)?;
+ if self.substitution.len(Interner) > 1 {
+ write!(f, "<")?;
+ f.write_joined(&self.substitution.as_slice(Interner)[1..], ", ")?;
+ write!(f, ">")?;
+ }
+ write!(f, ">::{}", f.db.type_alias_data(from_assoc_type_id(self.associated_ty_id)).name)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for OpaqueTy {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ if f.should_truncate() {
+ return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ self.substitution.at(Interner, 0).hir_fmt(f)
+ }
+}
+
+impl HirDisplay for GenericArg {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self.interned() {
+ crate::GenericArgData::Ty(ty) => ty.hir_fmt(f),
+ crate::GenericArgData::Lifetime(lt) => lt.hir_fmt(f),
+ crate::GenericArgData::Const(c) => c.hir_fmt(f),
+ }
+ }
+}
+
+impl HirDisplay for Const {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ let data = self.interned();
+ match data.value {
+ ConstValue::BoundVar(idx) => idx.hir_fmt(f),
+ ConstValue::InferenceVar(..) => write!(f, "#c#"),
+ ConstValue::Placeholder(idx) => {
+ let id = from_placeholder_idx(f.db, idx);
+ let generics = generics(f.db.upcast(), id.parent);
+ let param_data = &generics.params.type_or_consts[id.local_id];
+ write!(f, "{}", param_data.name().unwrap())
+ }
+ ConstValue::Concrete(c) => write!(f, "{}", c.interned),
+ }
+ }
+}
+
+impl HirDisplay for BoundVar {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write!(f, "?{}.{}", self.debruijn.depth(), self.index)
+ }
+}
+
+impl HirDisplay for Ty {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ if f.should_truncate() {
+ return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ match self.kind(Interner) {
+ TyKind::Never => write!(f, "!")?,
+ TyKind::Str => write!(f, "str")?,
+ TyKind::Scalar(Scalar::Bool) => write!(f, "bool")?,
+ TyKind::Scalar(Scalar::Char) => write!(f, "char")?,
+ &TyKind::Scalar(Scalar::Float(t)) => write!(f, "{}", primitive::float_ty_to_string(t))?,
+ &TyKind::Scalar(Scalar::Int(t)) => write!(f, "{}", primitive::int_ty_to_string(t))?,
+ &TyKind::Scalar(Scalar::Uint(t)) => write!(f, "{}", primitive::uint_ty_to_string(t))?,
+ TyKind::Slice(t) => {
+ write!(f, "[")?;
+ t.hir_fmt(f)?;
+ write!(f, "]")?;
+ }
+ TyKind::Array(t, c) => {
+ write!(f, "[")?;
+ t.hir_fmt(f)?;
+ write!(f, "; ")?;
+ c.hir_fmt(f)?;
+ write!(f, "]")?;
+ }
+ TyKind::Raw(m, t) | TyKind::Ref(m, _, t) => {
+ if matches!(self.kind(Interner), TyKind::Raw(..)) {
+ write!(
+ f,
+ "*{}",
+ match m {
+ Mutability::Not => "const ",
+ Mutability::Mut => "mut ",
+ }
+ )?;
+ } else {
+ write!(
+ f,
+ "&{}",
+ match m {
+ Mutability::Not => "",
+ Mutability::Mut => "mut ",
+ }
+ )?;
+ }
+
+ // FIXME: all this just to decide whether to use parentheses...
+ let contains_impl_fn = |bounds: &[QuantifiedWhereClause]| {
+ bounds.iter().any(|bound| {
+ if let WhereClause::Implemented(trait_ref) = bound.skip_binders() {
+ let trait_ = trait_ref.hir_trait_id();
+ fn_traits(f.db.upcast(), trait_).any(|it| it == trait_)
+ } else {
+ false
+ }
+ })
+ };
+ let (preds_to_print, has_impl_fn_pred) = match t.kind(Interner) {
+ TyKind::Dyn(dyn_ty) if dyn_ty.bounds.skip_binders().interned().len() > 1 => {
+ let bounds = dyn_ty.bounds.skip_binders().interned();
+ (bounds.len(), contains_impl_fn(bounds))
+ }
+ TyKind::Alias(AliasTy::Opaque(OpaqueTy {
+ opaque_ty_id,
+ substitution: parameters,
+ }))
+ | TyKind::OpaqueType(opaque_ty_id, parameters) => {
+ let impl_trait_id =
+ f.db.lookup_intern_impl_trait_id((*opaque_ty_id).into());
+ if let ImplTraitId::ReturnTypeImplTrait(func, idx) = impl_trait_id {
+ let datas =
+ f.db.return_type_impl_traits(func)
+ .expect("impl trait id without data");
+ let data = (*datas)
+ .as_ref()
+ .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+ let bounds = data.substitute(Interner, parameters);
+ let mut len = bounds.skip_binders().len();
+
+ // Don't count Sized but count when it absent
+ // (i.e. when explicit ?Sized bound is set).
+ let default_sized = SizedByDefault::Sized {
+ anchor: func.lookup(f.db.upcast()).module(f.db.upcast()).krate(),
+ };
+ let sized_bounds = bounds
+ .skip_binders()
+ .iter()
+ .filter(|b| {
+ matches!(
+ b.skip_binders(),
+ WhereClause::Implemented(trait_ref)
+ if default_sized.is_sized_trait(
+ trait_ref.hir_trait_id(),
+ f.db.upcast(),
+ ),
+ )
+ })
+ .count();
+ match sized_bounds {
+ 0 => len += 1,
+ _ => {
+ len = len.saturating_sub(sized_bounds);
+ }
+ }
+
+ (len, contains_impl_fn(bounds.skip_binders()))
+ } else {
+ (0, false)
+ }
+ }
+ _ => (0, false),
+ };
+
+ if has_impl_fn_pred && preds_to_print <= 2 {
+ return t.hir_fmt(f);
+ }
+
+ if preds_to_print > 1 {
+ write!(f, "(")?;
+ t.hir_fmt(f)?;
+ write!(f, ")")?;
+ } else {
+ t.hir_fmt(f)?;
+ }
+ }
+ TyKind::Tuple(_, substs) => {
+ if substs.len(Interner) == 1 {
+ write!(f, "(")?;
+ substs.at(Interner, 0).hir_fmt(f)?;
+ write!(f, ",)")?;
+ } else {
+ write!(f, "(")?;
+ f.write_joined(&*substs.as_slice(Interner), ", ")?;
+ write!(f, ")")?;
+ }
+ }
+ TyKind::Function(fn_ptr) => {
+ let sig = CallableSig::from_fn_ptr(fn_ptr);
+ sig.hir_fmt(f)?;
+ }
+ TyKind::FnDef(def, parameters) => {
+ let def = from_chalk(f.db, *def);
+ let sig = f.db.callable_item_signature(def).substitute(Interner, parameters);
+ match def {
+ CallableDefId::FunctionId(ff) => {
+ write!(f, "fn {}", f.db.function_data(ff).name)?
+ }
+ CallableDefId::StructId(s) => write!(f, "{}", f.db.struct_data(s).name)?,
+ CallableDefId::EnumVariantId(e) => {
+ write!(f, "{}", f.db.enum_data(e.parent).variants[e.local_id].name)?
+ }
+ };
+ if parameters.len(Interner) > 0 {
+ let generics = generics(f.db.upcast(), def.into());
+ let (parent_params, self_param, type_params, const_params, _impl_trait_params) =
+ generics.provenance_split();
+ let total_len = parent_params + self_param + type_params + const_params;
+ // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
+ if total_len > 0 {
+ write!(f, "<")?;
+ f.write_joined(&parameters.as_slice(Interner)[..total_len], ", ")?;
+ write!(f, ">")?;
+ }
+ }
+ write!(f, "(")?;
+ f.write_joined(sig.params(), ", ")?;
+ write!(f, ")")?;
+ let ret = sig.ret();
+ if !ret.is_unit() {
+ write!(f, " -> ")?;
+ ret.hir_fmt(f)?;
+ }
+ }
+ TyKind::Adt(AdtId(def_id), parameters) => {
+ match f.display_target {
+ DisplayTarget::Diagnostics | DisplayTarget::Test => {
+ let name = match *def_id {
+ hir_def::AdtId::StructId(it) => f.db.struct_data(it).name.clone(),
+ hir_def::AdtId::UnionId(it) => f.db.union_data(it).name.clone(),
+ hir_def::AdtId::EnumId(it) => f.db.enum_data(it).name.clone(),
+ };
+ write!(f, "{}", name)?;
+ }
+ DisplayTarget::SourceCode { module_id } => {
+ if let Some(path) = find_path::find_path(
+ f.db.upcast(),
+ ItemInNs::Types((*def_id).into()),
+ module_id,
+ ) {
+ write!(f, "{}", path)?;
+ } else {
+ return Err(HirDisplayError::DisplaySourceCodeError(
+ DisplaySourceCodeError::PathNotFound,
+ ));
+ }
+ }
+ }
+
+ if parameters.len(Interner) > 0 {
+ let parameters_to_write = if f.display_target.is_source_code()
+ || f.omit_verbose_types()
+ {
+ match self
+ .as_generic_def(f.db)
+ .map(|generic_def_id| f.db.generic_defaults(generic_def_id))
+ .filter(|defaults| !defaults.is_empty())
+ {
+ None => parameters.as_slice(Interner),
+ Some(default_parameters) => {
+ fn should_show(
+ parameter: &GenericArg,
+ default_parameters: &[Binders<GenericArg>],
+ i: usize,
+ parameters: &Substitution,
+ ) -> bool {
+ if parameter.ty(Interner).map(|x| x.kind(Interner))
+ == Some(&TyKind::Error)
+ {
+ return true;
+ }
+ if let Some(ConstValue::Concrete(c)) =
+ parameter.constant(Interner).map(|x| x.data(Interner).value)
+ {
+ if c.interned == ConstScalar::Unknown {
+ return true;
+ }
+ }
+ let default_parameter = match default_parameters.get(i) {
+ Some(x) => x,
+ None => return true,
+ };
+ let actual_default = default_parameter
+ .clone()
+ .substitute(Interner, &subst_prefix(parameters, i));
+ parameter != &actual_default
+ }
+ let mut default_from = 0;
+ for (i, parameter) in parameters.iter(Interner).enumerate() {
+ if should_show(parameter, &default_parameters, i, parameters) {
+ default_from = i + 1;
+ }
+ }
+ &parameters.as_slice(Interner)[0..default_from]
+ }
+ }
+ } else {
+ parameters.as_slice(Interner)
+ };
+ if !parameters_to_write.is_empty() {
+ write!(f, "<")?;
+
+ if f.display_target.is_source_code() {
+ let mut first = true;
+ for generic_arg in parameters_to_write {
+ if !first {
+ write!(f, ", ")?;
+ }
+ first = false;
+
+ if generic_arg.ty(Interner).map(|ty| ty.kind(Interner))
+ == Some(&TyKind::Error)
+ {
+ write!(f, "_")?;
+ } else {
+ generic_arg.hir_fmt(f)?;
+ }
+ }
+ } else {
+ f.write_joined(parameters_to_write, ", ")?;
+ }
+
+ write!(f, ">")?;
+ }
+ }
+ }
+ TyKind::AssociatedType(assoc_type_id, parameters) => {
+ let type_alias = from_assoc_type_id(*assoc_type_id);
+ let trait_ = match type_alias.lookup(f.db.upcast()).container {
+ ItemContainerId::TraitId(it) => it,
+ _ => panic!("not an associated type"),
+ };
+ let trait_ = f.db.trait_data(trait_);
+ let type_alias_data = f.db.type_alias_data(type_alias);
+
+ // Use placeholder associated types when the target is test (https://rust-lang.github.io/chalk/book/clauses/type_equality.html#placeholder-associated-types)
+ if f.display_target.is_test() {
+ write!(f, "{}::{}", trait_.name, type_alias_data.name)?;
+ if parameters.len(Interner) > 0 {
+ write!(f, "<")?;
+ f.write_joined(&*parameters.as_slice(Interner), ", ")?;
+ write!(f, ">")?;
+ }
+ } else {
+ let projection_ty = ProjectionTy {
+ associated_ty_id: to_assoc_type_id(type_alias),
+ substitution: parameters.clone(),
+ };
+
+ projection_ty.hir_fmt(f)?;
+ }
+ }
+ TyKind::Foreign(type_alias) => {
+ let type_alias = f.db.type_alias_data(from_foreign_def_id(*type_alias));
+ write!(f, "{}", type_alias.name)?;
+ }
+ TyKind::OpaqueType(opaque_ty_id, parameters) => {
+ let impl_trait_id = f.db.lookup_intern_impl_trait_id((*opaque_ty_id).into());
+ match impl_trait_id {
+ ImplTraitId::ReturnTypeImplTrait(func, idx) => {
+ let datas =
+ f.db.return_type_impl_traits(func).expect("impl trait id without data");
+ let data = (*datas)
+ .as_ref()
+ .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+ let bounds = data.substitute(Interner, &parameters);
+ let krate = func.lookup(f.db.upcast()).module(f.db.upcast()).krate();
+ write_bounds_like_dyn_trait_with_prefix(
+ "impl",
+ bounds.skip_binders(),
+ SizedByDefault::Sized { anchor: krate },
+ f,
+ )?;
+ // FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution
+ }
+ ImplTraitId::AsyncBlockTypeImplTrait(..) => {
+ write!(f, "impl Future<Output = ")?;
+ parameters.at(Interner, 0).hir_fmt(f)?;
+ write!(f, ">")?;
+ }
+ }
+ }
+ TyKind::Closure(.., substs) => {
+ if f.display_target.is_source_code() {
+ return Err(HirDisplayError::DisplaySourceCodeError(
+ DisplaySourceCodeError::Closure,
+ ));
+ }
+ let sig = substs.at(Interner, 0).assert_ty_ref(Interner).callable_sig(f.db);
+ if let Some(sig) = sig {
+ if sig.params().is_empty() {
+ write!(f, "||")?;
+ } else if f.should_truncate() {
+ write!(f, "|{}|", TYPE_HINT_TRUNCATION)?;
+ } else {
+ write!(f, "|")?;
+ f.write_joined(sig.params(), ", ")?;
+ write!(f, "|")?;
+ };
+
+ write!(f, " -> ")?;
+ sig.ret().hir_fmt(f)?;
+ } else {
+ write!(f, "{{closure}}")?;
+ }
+ }
+ TyKind::Placeholder(idx) => {
+ let id = from_placeholder_idx(f.db, *idx);
+ let generics = generics(f.db.upcast(), id.parent);
+ let param_data = &generics.params.type_or_consts[id.local_id];
+ match param_data {
+ TypeOrConstParamData::TypeParamData(p) => match p.provenance {
+ TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
+ write!(f, "{}", p.name.clone().unwrap_or_else(Name::missing))?
+ }
+ TypeParamProvenance::ArgumentImplTrait => {
+ let substs = generics.placeholder_subst(f.db);
+ let bounds =
+ f.db.generic_predicates(id.parent)
+ .iter()
+ .map(|pred| pred.clone().substitute(Interner, &substs))
+ .filter(|wc| match &wc.skip_binders() {
+ WhereClause::Implemented(tr) => {
+ &tr.self_type_parameter(Interner) == self
+ }
+ WhereClause::AliasEq(AliasEq {
+ alias: AliasTy::Projection(proj),
+ ty: _,
+ }) => &proj.self_type_parameter(Interner) == self,
+ _ => false,
+ })
+ .collect::<Vec<_>>();
+ let krate = id.parent.module(f.db.upcast()).krate();
+ write_bounds_like_dyn_trait_with_prefix(
+ "impl",
+ &bounds,
+ SizedByDefault::Sized { anchor: krate },
+ f,
+ )?;
+ }
+ },
+ TypeOrConstParamData::ConstParamData(p) => {
+ write!(f, "{}", p.name)?;
+ }
+ }
+ }
+ TyKind::BoundVar(idx) => idx.hir_fmt(f)?,
+ TyKind::Dyn(dyn_ty) => {
+ write_bounds_like_dyn_trait_with_prefix(
+ "dyn",
+ dyn_ty.bounds.skip_binders().interned(),
+ SizedByDefault::NotSized,
+ f,
+ )?;
+ }
+ TyKind::Alias(AliasTy::Projection(p_ty)) => p_ty.hir_fmt(f)?,
+ TyKind::Alias(AliasTy::Opaque(opaque_ty)) => {
+ let impl_trait_id = f.db.lookup_intern_impl_trait_id(opaque_ty.opaque_ty_id.into());
+ match impl_trait_id {
+ ImplTraitId::ReturnTypeImplTrait(func, idx) => {
+ let datas =
+ f.db.return_type_impl_traits(func).expect("impl trait id without data");
+ let data = (*datas)
+ .as_ref()
+ .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+ let bounds = data.substitute(Interner, &opaque_ty.substitution);
+ let krate = func.lookup(f.db.upcast()).module(f.db.upcast()).krate();
+ write_bounds_like_dyn_trait_with_prefix(
+ "impl",
+ bounds.skip_binders(),
+ SizedByDefault::Sized { anchor: krate },
+ f,
+ )?;
+ }
+ ImplTraitId::AsyncBlockTypeImplTrait(..) => {
+ write!(f, "{{async block}}")?;
+ }
+ };
+ }
+ TyKind::Error => {
+ if f.display_target.is_source_code() {
+ return Err(HirDisplayError::DisplaySourceCodeError(
+ DisplaySourceCodeError::UnknownType,
+ ));
+ }
+ write!(f, "{{unknown}}")?;
+ }
+ TyKind::InferenceVar(..) => write!(f, "_")?,
+ TyKind::Generator(..) => write!(f, "{{generator}}")?,
+ TyKind::GeneratorWitness(..) => write!(f, "{{generator witness}}")?,
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for CallableSig {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write!(f, "fn(")?;
+ f.write_joined(self.params(), ", ")?;
+ if self.is_varargs {
+ if self.params().is_empty() {
+ write!(f, "...")?;
+ } else {
+ write!(f, ", ...")?;
+ }
+ }
+ write!(f, ")")?;
+ let ret = self.ret();
+ if !ret.is_unit() {
+ write!(f, " -> ")?;
+ ret.hir_fmt(f)?;
+ }
+ Ok(())
+ }
+}
+
+fn fn_traits(db: &dyn DefDatabase, trait_: TraitId) -> impl Iterator<Item = TraitId> {
+ let krate = trait_.lookup(db).container.krate();
+ utils::fn_traits(db, krate)
+}
+
+#[derive(Clone, Copy, PartialEq, Eq)]
+pub enum SizedByDefault {
+ NotSized,
+ Sized { anchor: CrateId },
+}
+
+impl SizedByDefault {
+ fn is_sized_trait(self, trait_: TraitId, db: &dyn DefDatabase) -> bool {
+ match self {
+ Self::NotSized => false,
+ Self::Sized { anchor } => {
+ let sized_trait = db
+ .lang_item(anchor, SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait());
+ Some(trait_) == sized_trait
+ }
+ }
+ }
+}
+
+pub fn write_bounds_like_dyn_trait_with_prefix(
+ prefix: &str,
+ predicates: &[QuantifiedWhereClause],
+ default_sized: SizedByDefault,
+ f: &mut HirFormatter<'_>,
+) -> Result<(), HirDisplayError> {
+ write!(f, "{}", prefix)?;
+ if !predicates.is_empty()
+ || predicates.is_empty() && matches!(default_sized, SizedByDefault::Sized { .. })
+ {
+ write!(f, " ")?;
+ write_bounds_like_dyn_trait(predicates, default_sized, f)
+ } else {
+ Ok(())
+ }
+}
+
+fn write_bounds_like_dyn_trait(
+ predicates: &[QuantifiedWhereClause],
+ default_sized: SizedByDefault,
+ f: &mut HirFormatter<'_>,
+) -> Result<(), HirDisplayError> {
+ // Note: This code is written to produce nice results (i.e.
+ // corresponding to surface Rust) for types that can occur in
+ // actual Rust. It will have weird results if the predicates
+ // aren't as expected (i.e. self types = $0, projection
+ // predicates for a certain trait come after the Implemented
+ // predicate for that trait).
+ let mut first = true;
+ let mut angle_open = false;
+ let mut is_fn_trait = false;
+ let mut is_sized = false;
+ for p in predicates.iter() {
+ match p.skip_binders() {
+ WhereClause::Implemented(trait_ref) => {
+ let trait_ = trait_ref.hir_trait_id();
+ if default_sized.is_sized_trait(trait_, f.db.upcast()) {
+ is_sized = true;
+ if matches!(default_sized, SizedByDefault::Sized { .. }) {
+ // Don't print +Sized, but rather +?Sized if absent.
+ continue;
+ }
+ }
+ if !is_fn_trait {
+ is_fn_trait = fn_traits(f.db.upcast(), trait_).any(|it| it == trait_);
+ }
+ if !is_fn_trait && angle_open {
+ write!(f, ">")?;
+ angle_open = false;
+ }
+ if !first {
+ write!(f, " + ")?;
+ }
+ // We assume that the self type is ^0.0 (i.e. the
+ // existential) here, which is the only thing that's
+ // possible in actual Rust, and hence don't print it
+ write!(f, "{}", f.db.trait_data(trait_).name)?;
+ if let [_, params @ ..] = &*trait_ref.substitution.as_slice(Interner) {
+ if is_fn_trait {
+ if let Some(args) =
+ params.first().and_then(|it| it.assert_ty_ref(Interner).as_tuple())
+ {
+ write!(f, "(")?;
+ f.write_joined(args.as_slice(Interner), ", ")?;
+ write!(f, ")")?;
+ }
+ } else if !params.is_empty() {
+ write!(f, "<")?;
+ f.write_joined(params, ", ")?;
+ // there might be assoc type bindings, so we leave the angle brackets open
+ angle_open = true;
+ }
+ }
+ }
+ WhereClause::AliasEq(alias_eq) if is_fn_trait => {
+ is_fn_trait = false;
+ if !alias_eq.ty.is_unit() {
+ write!(f, " -> ")?;
+ alias_eq.ty.hir_fmt(f)?;
+ }
+ }
+ WhereClause::AliasEq(AliasEq { ty, alias }) => {
+ // in types in actual Rust, these will always come
+ // after the corresponding Implemented predicate
+ if angle_open {
+ write!(f, ", ")?;
+ } else {
+ write!(f, "<")?;
+ angle_open = true;
+ }
+ if let AliasTy::Projection(proj) = alias {
+ let type_alias =
+ f.db.type_alias_data(from_assoc_type_id(proj.associated_ty_id));
+ write!(f, "{} = ", type_alias.name)?;
+ }
+ ty.hir_fmt(f)?;
+ }
+
+ // FIXME implement these
+ WhereClause::LifetimeOutlives(_) => {}
+ WhereClause::TypeOutlives(_) => {}
+ }
+ first = false;
+ }
+ if angle_open {
+ write!(f, ">")?;
+ }
+ if matches!(default_sized, SizedByDefault::Sized { .. }) {
+ if !is_sized {
+ write!(f, "{}?Sized", if first { "" } else { " + " })?;
+ } else if first {
+ write!(f, "Sized")?;
+ }
+ }
+ Ok(())
+}
+
+fn fmt_trait_ref(
+ tr: &TraitRef,
+ f: &mut HirFormatter<'_>,
+ use_as: bool,
+) -> Result<(), HirDisplayError> {
+ if f.should_truncate() {
+ return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ tr.self_type_parameter(Interner).hir_fmt(f)?;
+ if use_as {
+ write!(f, " as ")?;
+ } else {
+ write!(f, ": ")?;
+ }
+ write!(f, "{}", f.db.trait_data(tr.hir_trait_id()).name)?;
+ if tr.substitution.len(Interner) > 1 {
+ write!(f, "<")?;
+ f.write_joined(&tr.substitution.as_slice(Interner)[1..], ", ")?;
+ write!(f, ">")?;
+ }
+ Ok(())
+}
+
+impl HirDisplay for TraitRef {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ fmt_trait_ref(self, f, false)
+ }
+}
+
+impl HirDisplay for WhereClause {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ if f.should_truncate() {
+ return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ match self {
+ WhereClause::Implemented(trait_ref) => trait_ref.hir_fmt(f)?,
+ WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection_ty), ty }) => {
+ write!(f, "<")?;
+ fmt_trait_ref(&projection_ty.trait_ref(f.db), f, true)?;
+ write!(
+ f,
+ ">::{} = ",
+ f.db.type_alias_data(from_assoc_type_id(projection_ty.associated_ty_id)).name,
+ )?;
+ ty.hir_fmt(f)?;
+ }
+ WhereClause::AliasEq(_) => write!(f, "{{error}}")?,
+
+ // FIXME implement these
+ WhereClause::TypeOutlives(..) => {}
+ WhereClause::LifetimeOutlives(..) => {}
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for LifetimeOutlives {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ self.a.hir_fmt(f)?;
+ write!(f, ": ")?;
+ self.b.hir_fmt(f)
+ }
+}
+
+impl HirDisplay for Lifetime {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ self.interned().hir_fmt(f)
+ }
+}
+
+impl HirDisplay for LifetimeData {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ LifetimeData::BoundVar(idx) => idx.hir_fmt(f),
+ LifetimeData::InferenceVar(_) => write!(f, "_"),
+ LifetimeData::Placeholder(idx) => {
+ let id = lt_from_placeholder_idx(f.db, *idx);
+ let generics = generics(f.db.upcast(), id.parent);
+ let param_data = &generics.params.lifetimes[id.local_id];
+ write!(f, "{}", param_data.name)
+ }
+ LifetimeData::Static => write!(f, "'static"),
+ LifetimeData::Empty(_) => Ok(()),
+ LifetimeData::Erased => Ok(()),
+ LifetimeData::Phantom(_, _) => Ok(()),
+ }
+ }
+}
+
+impl HirDisplay for DomainGoal {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ DomainGoal::Holds(wc) => {
+ write!(f, "Holds(")?;
+ wc.hir_fmt(f)?;
+ write!(f, ")")?;
+ }
+ _ => write!(f, "?")?,
+ }
+ Ok(())
+ }
+}
+
+pub fn write_visibility(
+ module_id: ModuleId,
+ vis: Visibility,
+ f: &mut HirFormatter<'_>,
+) -> Result<(), HirDisplayError> {
+ match vis {
+ Visibility::Public => write!(f, "pub "),
+ Visibility::Module(vis_id) => {
+ let def_map = module_id.def_map(f.db.upcast());
+ let root_module_id = def_map.module_id(def_map.root());
+ if vis_id == module_id {
+ // pub(self) or omitted
+ Ok(())
+ } else if root_module_id == vis_id {
+ write!(f, "pub(crate) ")
+ } else if module_id.containing_module(f.db.upcast()) == Some(vis_id) {
+ write!(f, "pub(super) ")
+ } else {
+ write!(f, "pub(in ...) ")
+ }
+ }
+ }
+}
+
+impl HirDisplay for TypeRef {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ TypeRef::Never => write!(f, "!")?,
+ TypeRef::Placeholder => write!(f, "_")?,
+ TypeRef::Tuple(elems) => {
+ write!(f, "(")?;
+ f.write_joined(elems, ", ")?;
+ if elems.len() == 1 {
+ write!(f, ",")?;
+ }
+ write!(f, ")")?;
+ }
+ TypeRef::Path(path) => path.hir_fmt(f)?,
+ TypeRef::RawPtr(inner, mutability) => {
+ let mutability = match mutability {
+ hir_def::type_ref::Mutability::Shared => "*const ",
+ hir_def::type_ref::Mutability::Mut => "*mut ",
+ };
+ write!(f, "{}", mutability)?;
+ inner.hir_fmt(f)?;
+ }
+ TypeRef::Reference(inner, lifetime, mutability) => {
+ let mutability = match mutability {
+ hir_def::type_ref::Mutability::Shared => "",
+ hir_def::type_ref::Mutability::Mut => "mut ",
+ };
+ write!(f, "&")?;
+ if let Some(lifetime) = lifetime {
+ write!(f, "{} ", lifetime.name)?;
+ }
+ write!(f, "{}", mutability)?;
+ inner.hir_fmt(f)?;
+ }
+ TypeRef::Array(inner, len) => {
+ write!(f, "[")?;
+ inner.hir_fmt(f)?;
+ write!(f, "; {}]", len)?;
+ }
+ TypeRef::Slice(inner) => {
+ write!(f, "[")?;
+ inner.hir_fmt(f)?;
+ write!(f, "]")?;
+ }
+ TypeRef::Fn(parameters, is_varargs) => {
+ // FIXME: Function pointer qualifiers.
+ write!(f, "fn(")?;
+ if let Some(((_, return_type), function_parameters)) = parameters.split_last() {
+ for index in 0..function_parameters.len() {
+ let (param_name, param_type) = &function_parameters[index];
+ if let Some(name) = param_name {
+ write!(f, "{}: ", name)?;
+ }
+
+ param_type.hir_fmt(f)?;
+
+ if index != function_parameters.len() - 1 {
+ write!(f, ", ")?;
+ }
+ }
+ if *is_varargs {
+ write!(f, "{}...", if parameters.len() == 1 { "" } else { ", " })?;
+ }
+ write!(f, ")")?;
+ match &return_type {
+ TypeRef::Tuple(tup) if tup.is_empty() => {}
+ _ => {
+ write!(f, " -> ")?;
+ return_type.hir_fmt(f)?;
+ }
+ }
+ }
+ }
+ TypeRef::ImplTrait(bounds) => {
+ write!(f, "impl ")?;
+ f.write_joined(bounds, " + ")?;
+ }
+ TypeRef::DynTrait(bounds) => {
+ write!(f, "dyn ")?;
+ f.write_joined(bounds, " + ")?;
+ }
+ TypeRef::Macro(macro_call) => {
+ let macro_call = macro_call.to_node(f.db.upcast());
+ let ctx = body::LowerCtx::with_hygiene(f.db.upcast(), &Hygiene::new_unhygienic());
+ match macro_call.path() {
+ Some(path) => match Path::from_src(path, &ctx) {
+ Some(path) => path.hir_fmt(f)?,
+ None => write!(f, "{{macro}}")?,
+ },
+ None => write!(f, "{{macro}}")?,
+ }
+ write!(f, "!(..)")?;
+ }
+ TypeRef::Error => write!(f, "{{error}}")?,
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for TypeBound {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ TypeBound::Path(path, modifier) => {
+ match modifier {
+ TraitBoundModifier::None => (),
+ TraitBoundModifier::Maybe => write!(f, "?")?,
+ }
+ path.hir_fmt(f)
+ }
+ TypeBound::Lifetime(lifetime) => write!(f, "{}", lifetime.name),
+ TypeBound::ForLifetime(lifetimes, path) => {
+ write!(f, "for<{}> ", lifetimes.iter().format(", "))?;
+ path.hir_fmt(f)
+ }
+ TypeBound::Error => write!(f, "{{error}}"),
+ }
+ }
+}
+
+impl HirDisplay for Path {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match (self.type_anchor(), self.kind()) {
+ (Some(anchor), _) => {
+ write!(f, "<")?;
+ anchor.hir_fmt(f)?;
+ write!(f, ">")?;
+ }
+ (_, PathKind::Plain) => {}
+ (_, PathKind::Abs) => {}
+ (_, PathKind::Crate) => write!(f, "crate")?,
+ (_, PathKind::Super(0)) => write!(f, "self")?,
+ (_, PathKind::Super(n)) => {
+ for i in 0..*n {
+ if i > 0 {
+ write!(f, "::")?;
+ }
+ write!(f, "super")?;
+ }
+ }
+ (_, PathKind::DollarCrate(id)) => {
+ // Resolve `$crate` to the crate's display name.
+ // FIXME: should use the dependency name instead if available, but that depends on
+ // the crate invoking `HirDisplay`
+ let crate_graph = f.db.crate_graph();
+ let name = crate_graph[*id]
+ .display_name
+ .as_ref()
+ .map(|name| name.canonical_name())
+ .unwrap_or("$crate");
+ write!(f, "{name}")?
+ }
+ }
+
+ for (seg_idx, segment) in self.segments().iter().enumerate() {
+ if !matches!(self.kind(), PathKind::Plain) || seg_idx > 0 {
+ write!(f, "::")?;
+ }
+ write!(f, "{}", segment.name)?;
+ if let Some(generic_args) = segment.args_and_bindings {
+ // We should be in type context, so format as `Foo<Bar>` instead of `Foo::<Bar>`.
+ // Do we actually format expressions?
+ if generic_args.desugared_from_fn {
+ // First argument will be a tuple, which already includes the parentheses.
+ // If the tuple only contains 1 item, write it manually to avoid the trailing `,`.
+ if let hir_def::path::GenericArg::Type(TypeRef::Tuple(v)) =
+ &generic_args.args[0]
+ {
+ if v.len() == 1 {
+ write!(f, "(")?;
+ v[0].hir_fmt(f)?;
+ write!(f, ")")?;
+ } else {
+ generic_args.args[0].hir_fmt(f)?;
+ }
+ }
+ if let Some(ret) = &generic_args.bindings[0].type_ref {
+ if !matches!(ret, TypeRef::Tuple(v) if v.is_empty()) {
+ write!(f, " -> ")?;
+ ret.hir_fmt(f)?;
+ }
+ }
+ return Ok(());
+ }
+
+ write!(f, "<")?;
+ let mut first = true;
+ for arg in &generic_args.args {
+ if first {
+ first = false;
+ if generic_args.has_self_type {
+ // FIXME: Convert to `<Ty as Trait>` form.
+ write!(f, "Self = ")?;
+ }
+ } else {
+ write!(f, ", ")?;
+ }
+ arg.hir_fmt(f)?;
+ }
+ for binding in &generic_args.bindings {
+ if first {
+ first = false;
+ } else {
+ write!(f, ", ")?;
+ }
+ write!(f, "{}", binding.name)?;
+ match &binding.type_ref {
+ Some(ty) => {
+ write!(f, " = ")?;
+ ty.hir_fmt(f)?
+ }
+ None => {
+ write!(f, ": ")?;
+ f.write_joined(&binding.bounds, " + ")?;
+ }
+ }
+ }
+ write!(f, ">")?;
+ }
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for hir_def::path::GenericArg {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ hir_def::path::GenericArg::Type(ty) => ty.hir_fmt(f),
+ hir_def::path::GenericArg::Const(c) => write!(f, "{}", c),
+ hir_def::path::GenericArg::Lifetime(lifetime) => write!(f, "{}", lifetime.name),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
new file mode 100644
index 000000000..46eeea0e6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
@@ -0,0 +1,1088 @@
+//! Type inference, i.e. the process of walking through the code and determining
+//! the type of each expression and pattern.
+//!
+//! For type inference, compare the implementations in rustc (the various
+//! check_* methods in librustc_typeck/check/mod.rs are a good entry point) and
+//! IntelliJ-Rust (org.rust.lang.core.types.infer). Our entry point for
+//! inference here is the `infer` function, which infers the types of all
+//! expressions in a given function.
+//!
+//! During inference, types (i.e. the `Ty` struct) can contain type 'variables'
+//! which represent currently unknown types; as we walk through the expressions,
+//! we might determine that certain variables need to be equal to each other, or
+//! to certain types. To record this, we use the union-find implementation from
+//! the `ena` crate, which is extracted from rustc.
+
+use std::ops::Index;
+use std::sync::Arc;
+
+use chalk_ir::{cast::Cast, ConstValue, DebruijnIndex, Mutability, Safety, Scalar, TypeFlags};
+use hir_def::{
+ body::Body,
+ data::{ConstData, StaticData},
+ expr::{BindingAnnotation, ExprId, PatId},
+ lang_item::LangItemTarget,
+ path::{path, Path},
+ resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs},
+ type_ref::TypeRef,
+ AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, HasModule, Lookup,
+ TraitId, TypeAliasId, VariantId,
+};
+use hir_expand::name::{name, Name};
+use itertools::Either;
+use la_arena::ArenaMap;
+use rustc_hash::FxHashMap;
+use stdx::{always, impl_from};
+
+use crate::{
+ db::HirDatabase, fold_tys, fold_tys_and_consts, infer::coerce::CoerceMany,
+ lower::ImplTraitLoweringMode, to_assoc_type_id, AliasEq, AliasTy, Const, DomainGoal,
+ GenericArg, Goal, ImplTraitId, InEnvironment, Interner, ProjectionTy, Substitution,
+ TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind,
+};
+
+// This lint has a false positive here. See the link below for details.
+//
+// https://github.com/rust-lang/rust/issues/57411
+#[allow(unreachable_pub)]
+pub use coerce::could_coerce;
+#[allow(unreachable_pub)]
+pub use unify::could_unify;
+
+pub(crate) mod unify;
+mod path;
+mod expr;
+mod pat;
+mod coerce;
+mod closure;
+
+/// The entry point of type inference.
+pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
+ let _p = profile::span("infer_query");
+ let resolver = def.resolver(db.upcast());
+ let body = db.body(def);
+ let mut ctx = InferenceContext::new(db, def, &body, resolver);
+
+ match def {
+ DefWithBodyId::ConstId(c) => ctx.collect_const(&db.const_data(c)),
+ DefWithBodyId::FunctionId(f) => ctx.collect_fn(f),
+ DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_data(s)),
+ }
+
+ ctx.infer_body();
+
+ Arc::new(ctx.resolve_all())
+}
+
+/// Fully normalize all the types found within `ty` in context of `owner` body definition.
+///
+/// This is appropriate to use only after type-check: it assumes
+/// that normalization will succeed, for example.
+pub(crate) fn normalize(db: &dyn HirDatabase, owner: DefWithBodyId, ty: Ty) -> Ty {
+ if !ty.data(Interner).flags.intersects(TypeFlags::HAS_PROJECTION) {
+ return ty;
+ }
+ let krate = owner.module(db.upcast()).krate();
+ let trait_env = owner
+ .as_generic_def_id()
+ .map_or_else(|| Arc::new(TraitEnvironment::empty(krate)), |d| db.trait_environment(d));
+ let mut table = unify::InferenceTable::new(db, trait_env);
+
+ let ty_with_vars = table.normalize_associated_types_in(ty);
+ table.resolve_obligations_as_possible();
+ table.propagate_diverging_flag();
+ table.resolve_completely(ty_with_vars)
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+enum ExprOrPatId {
+ ExprId(ExprId),
+ PatId(PatId),
+}
+impl_from!(ExprId, PatId for ExprOrPatId);
+
+/// Binding modes inferred for patterns.
+/// <https://doc.rust-lang.org/reference/patterns.html#binding-modes>
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+pub enum BindingMode {
+ Move,
+ Ref(Mutability),
+}
+
+impl BindingMode {
+ fn convert(annotation: BindingAnnotation) -> BindingMode {
+ match annotation {
+ BindingAnnotation::Unannotated | BindingAnnotation::Mutable => BindingMode::Move,
+ BindingAnnotation::Ref => BindingMode::Ref(Mutability::Not),
+ BindingAnnotation::RefMut => BindingMode::Ref(Mutability::Mut),
+ }
+ }
+}
+
+impl Default for BindingMode {
+ fn default() -> Self {
+ BindingMode::Move
+ }
+}
+
+/// Used to generalize patterns and assignee expressions.
+trait PatLike: Into<ExprOrPatId> + Copy {
+ type BindingMode: Copy;
+
+ fn infer(
+ this: &mut InferenceContext<'_>,
+ id: Self,
+ expected_ty: &Ty,
+ default_bm: Self::BindingMode,
+ ) -> Ty;
+}
+
+impl PatLike for ExprId {
+ type BindingMode = ();
+
+ fn infer(
+ this: &mut InferenceContext<'_>,
+ id: Self,
+ expected_ty: &Ty,
+ _: Self::BindingMode,
+ ) -> Ty {
+ this.infer_assignee_expr(id, expected_ty)
+ }
+}
+
+impl PatLike for PatId {
+ type BindingMode = BindingMode;
+
+ fn infer(
+ this: &mut InferenceContext<'_>,
+ id: Self,
+ expected_ty: &Ty,
+ default_bm: Self::BindingMode,
+ ) -> Ty {
+ this.infer_pat(id, expected_ty, default_bm)
+ }
+}
+
+#[derive(Debug)]
+pub(crate) struct InferOk<T> {
+ value: T,
+ goals: Vec<InEnvironment<Goal>>,
+}
+
+impl<T> InferOk<T> {
+ fn map<U>(self, f: impl FnOnce(T) -> U) -> InferOk<U> {
+ InferOk { value: f(self.value), goals: self.goals }
+ }
+}
+
+#[derive(Debug)]
+pub(crate) struct TypeError;
+pub(crate) type InferResult<T> = Result<InferOk<T>, TypeError>;
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum InferenceDiagnostic {
+ NoSuchField { expr: ExprId },
+ BreakOutsideOfLoop { expr: ExprId },
+ MismatchedArgCount { call_expr: ExprId, expected: usize, found: usize },
+}
+
+/// A mismatch between an expected and an inferred type.
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct TypeMismatch {
+ pub expected: Ty,
+ pub actual: Ty,
+}
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+struct InternedStandardTypes {
+ unknown: Ty,
+ bool_: Ty,
+ unit: Ty,
+}
+
+impl Default for InternedStandardTypes {
+ fn default() -> Self {
+ InternedStandardTypes {
+ unknown: TyKind::Error.intern(Interner),
+ bool_: TyKind::Scalar(Scalar::Bool).intern(Interner),
+ unit: TyKind::Tuple(0, Substitution::empty(Interner)).intern(Interner),
+ }
+ }
+}
+/// Represents coercing a value to a different type of value.
+///
+/// We transform values by following a number of `Adjust` steps in order.
+/// See the documentation on variants of `Adjust` for more details.
+///
+/// Here are some common scenarios:
+///
+/// 1. The simplest cases are where a pointer is not adjusted fat vs thin.
+/// Here the pointer will be dereferenced N times (where a dereference can
+/// happen to raw or borrowed pointers or any smart pointer which implements
+/// Deref, including Box<_>). The types of dereferences is given by
+/// `autoderefs`. It can then be auto-referenced zero or one times, indicated
+/// by `autoref`, to either a raw or borrowed pointer. In these cases unsize is
+/// `false`.
+///
+/// 2. A thin-to-fat coercion involves unsizing the underlying data. We start
+/// with a thin pointer, deref a number of times, unsize the underlying data,
+/// then autoref. The 'unsize' phase may change a fixed length array to a
+/// dynamically sized one, a concrete object to a trait object, or statically
+/// sized struct to a dynamically sized one. E.g., &[i32; 4] -> &[i32] is
+/// represented by:
+///
+/// ```
+/// Deref(None) -> [i32; 4],
+/// Borrow(AutoBorrow::Ref) -> &[i32; 4],
+/// Unsize -> &[i32],
+/// ```
+///
+/// Note that for a struct, the 'deep' unsizing of the struct is not recorded.
+/// E.g., `struct Foo<T> { x: T }` we can coerce &Foo<[i32; 4]> to &Foo<[i32]>
+/// The autoderef and -ref are the same as in the above example, but the type
+/// stored in `unsize` is `Foo<[i32]>`, we don't store any further detail about
+/// the underlying conversions from `[i32; 4]` to `[i32]`.
+///
+/// 3. Coercing a `Box<T>` to `Box<dyn Trait>` is an interesting special case. In
+/// that case, we have the pointer we need coming in, so there are no
+/// autoderefs, and no autoref. Instead we just do the `Unsize` transformation.
+/// At some point, of course, `Box` should move out of the compiler, in which
+/// case this is analogous to transforming a struct. E.g., Box<[i32; 4]> ->
+/// Box<[i32]> is an `Adjust::Unsize` with the target `Box<[i32]>`.
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub struct Adjustment {
+ pub kind: Adjust,
+ pub target: Ty,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum Adjust {
+ /// Go from ! to any type.
+ NeverToAny,
+ /// Dereference once, producing a place.
+ Deref(Option<OverloadedDeref>),
+ /// Take the address and produce either a `&` or `*` pointer.
+ Borrow(AutoBorrow),
+ Pointer(PointerCast),
+}
+
+/// An overloaded autoderef step, representing a `Deref(Mut)::deref(_mut)`
+/// call, with the signature `&'a T -> &'a U` or `&'a mut T -> &'a mut U`.
+/// The target type is `U` in both cases, with the region and mutability
+/// being those shared by both the receiver and the returned reference.
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct OverloadedDeref(pub Mutability);
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum AutoBorrow {
+ /// Converts from T to &T.
+ Ref(Mutability),
+ /// Converts from T to *T.
+ RawPtr(Mutability),
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum PointerCast {
+ /// Go from a fn-item type to a fn-pointer type.
+ ReifyFnPointer,
+
+ /// Go from a safe fn pointer to an unsafe fn pointer.
+ UnsafeFnPointer,
+
+ /// Go from a non-capturing closure to an fn pointer or an unsafe fn pointer.
+ /// It cannot convert a closure that requires unsafe.
+ ClosureFnPointer(Safety),
+
+ /// Go from a mut raw pointer to a const raw pointer.
+ MutToConstPointer,
+
+ #[allow(dead_code)]
+ /// Go from `*const [T; N]` to `*const T`
+ ArrayToPointer,
+
+ /// Unsize a pointer/reference value, e.g., `&[T; n]` to
+ /// `&[T]`. Note that the source could be a thin or fat pointer.
+ /// This will do things like convert thin pointers to fat
+ /// pointers, or convert structs containing thin pointers to
+ /// structs containing fat pointers, or convert between fat
+ /// pointers. We don't store the details of how the transform is
+ /// done (in fact, we don't know that, because it might depend on
+ /// the precise type parameters). We just store the target
+ /// type. Codegen backends and miri figure out what has to be done
+ /// based on the precise source/target type at hand.
+ Unsize,
+}
+
+/// The result of type inference: A mapping from expressions and patterns to types.
+#[derive(Clone, PartialEq, Eq, Debug, Default)]
+pub struct InferenceResult {
+ /// For each method call expr, records the function it resolves to.
+ method_resolutions: FxHashMap<ExprId, (FunctionId, Substitution)>,
+ /// For each field access expr, records the field it resolves to.
+ field_resolutions: FxHashMap<ExprId, FieldId>,
+ /// For each struct literal or pattern, records the variant it resolves to.
+ variant_resolutions: FxHashMap<ExprOrPatId, VariantId>,
+ /// For each associated item record what it resolves to
+ assoc_resolutions: FxHashMap<ExprOrPatId, AssocItemId>,
+ pub diagnostics: Vec<InferenceDiagnostic>,
+ pub type_of_expr: ArenaMap<ExprId, Ty>,
+ /// For each pattern record the type it resolves to.
+ ///
+ /// **Note**: When a pattern type is resolved it may still contain
+ /// unresolved or missing subpatterns or subpatterns of mismatched types.
+ pub type_of_pat: ArenaMap<PatId, Ty>,
+ type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch>,
+ /// Interned Unknown to return references to.
+ standard_types: InternedStandardTypes,
+ /// Stores the types which were implicitly dereferenced in pattern binding modes.
+ pub pat_adjustments: FxHashMap<PatId, Vec<Ty>>,
+ pub pat_binding_modes: FxHashMap<PatId, BindingMode>,
+ pub expr_adjustments: FxHashMap<ExprId, Vec<Adjustment>>,
+}
+
+impl InferenceResult {
+ pub fn method_resolution(&self, expr: ExprId) -> Option<(FunctionId, Substitution)> {
+ self.method_resolutions.get(&expr).cloned()
+ }
+ pub fn field_resolution(&self, expr: ExprId) -> Option<FieldId> {
+ self.field_resolutions.get(&expr).copied()
+ }
+ pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantId> {
+ self.variant_resolutions.get(&id.into()).copied()
+ }
+ pub fn variant_resolution_for_pat(&self, id: PatId) -> Option<VariantId> {
+ self.variant_resolutions.get(&id.into()).copied()
+ }
+ pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option<AssocItemId> {
+ self.assoc_resolutions.get(&id.into()).copied()
+ }
+ pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<AssocItemId> {
+ self.assoc_resolutions.get(&id.into()).copied()
+ }
+ pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> {
+ self.type_mismatches.get(&expr.into())
+ }
+ pub fn type_mismatch_for_pat(&self, pat: PatId) -> Option<&TypeMismatch> {
+ self.type_mismatches.get(&pat.into())
+ }
+ pub fn expr_type_mismatches(&self) -> impl Iterator<Item = (ExprId, &TypeMismatch)> {
+ self.type_mismatches.iter().filter_map(|(expr_or_pat, mismatch)| match *expr_or_pat {
+ ExprOrPatId::ExprId(expr) => Some((expr, mismatch)),
+ _ => None,
+ })
+ }
+ pub fn pat_type_mismatches(&self) -> impl Iterator<Item = (PatId, &TypeMismatch)> {
+ self.type_mismatches.iter().filter_map(|(expr_or_pat, mismatch)| match *expr_or_pat {
+ ExprOrPatId::PatId(pat) => Some((pat, mismatch)),
+ _ => None,
+ })
+ }
+}
+
+impl Index<ExprId> for InferenceResult {
+ type Output = Ty;
+
+ fn index(&self, expr: ExprId) -> &Ty {
+ self.type_of_expr.get(expr).unwrap_or(&self.standard_types.unknown)
+ }
+}
+
+impl Index<PatId> for InferenceResult {
+ type Output = Ty;
+
+ fn index(&self, pat: PatId) -> &Ty {
+ self.type_of_pat.get(pat).unwrap_or(&self.standard_types.unknown)
+ }
+}
+
+/// The inference context contains all information needed during type inference.
+#[derive(Clone, Debug)]
+pub(crate) struct InferenceContext<'a> {
+ pub(crate) db: &'a dyn HirDatabase,
+ pub(crate) owner: DefWithBodyId,
+ pub(crate) body: &'a Body,
+ pub(crate) resolver: Resolver,
+ table: unify::InferenceTable<'a>,
+ trait_env: Arc<TraitEnvironment>,
+ pub(crate) result: InferenceResult,
+ /// The return type of the function being inferred, the closure or async block if we're
+ /// currently within one.
+ ///
+ /// We might consider using a nested inference context for checking
+ /// closures, but currently this is the only field that will change there,
+ /// so it doesn't make sense.
+ return_ty: Ty,
+ diverges: Diverges,
+ breakables: Vec<BreakableContext>,
+}
+
+#[derive(Clone, Debug)]
+struct BreakableContext {
+ may_break: bool,
+ coerce: CoerceMany,
+ label: Option<name::Name>,
+}
+
+fn find_breakable<'c>(
+ ctxs: &'c mut [BreakableContext],
+ label: Option<&name::Name>,
+) -> Option<&'c mut BreakableContext> {
+ match label {
+ Some(_) => ctxs.iter_mut().rev().find(|ctx| ctx.label.as_ref() == label),
+ None => ctxs.last_mut(),
+ }
+}
+
+impl<'a> InferenceContext<'a> {
+ fn new(
+ db: &'a dyn HirDatabase,
+ owner: DefWithBodyId,
+ body: &'a Body,
+ resolver: Resolver,
+ ) -> Self {
+ let krate = owner.module(db.upcast()).krate();
+ let trait_env = owner
+ .as_generic_def_id()
+ .map_or_else(|| Arc::new(TraitEnvironment::empty(krate)), |d| db.trait_environment(d));
+ InferenceContext {
+ result: InferenceResult::default(),
+ table: unify::InferenceTable::new(db, trait_env.clone()),
+ trait_env,
+ return_ty: TyKind::Error.intern(Interner), // set in collect_fn_signature
+ db,
+ owner,
+ body,
+ resolver,
+ diverges: Diverges::Maybe,
+ breakables: Vec::new(),
+ }
+ }
+
+ fn resolve_all(self) -> InferenceResult {
+ let InferenceContext { mut table, mut result, .. } = self;
+
+ // FIXME resolve obligations as well (use Guidance if necessary)
+ table.resolve_obligations_as_possible();
+
+ // make sure diverging type variables are marked as such
+ table.propagate_diverging_flag();
+ for ty in result.type_of_expr.values_mut() {
+ *ty = table.resolve_completely(ty.clone());
+ }
+ for ty in result.type_of_pat.values_mut() {
+ *ty = table.resolve_completely(ty.clone());
+ }
+ for mismatch in result.type_mismatches.values_mut() {
+ mismatch.expected = table.resolve_completely(mismatch.expected.clone());
+ mismatch.actual = table.resolve_completely(mismatch.actual.clone());
+ }
+ for (_, subst) in result.method_resolutions.values_mut() {
+ *subst = table.resolve_completely(subst.clone());
+ }
+ for adjustment in result.expr_adjustments.values_mut().flatten() {
+ adjustment.target = table.resolve_completely(adjustment.target.clone());
+ }
+ for adjustment in result.pat_adjustments.values_mut().flatten() {
+ *adjustment = table.resolve_completely(adjustment.clone());
+ }
+ result
+ }
+
+ fn collect_const(&mut self, data: &ConstData) {
+ self.return_ty = self.make_ty(&data.type_ref);
+ }
+
+ fn collect_static(&mut self, data: &StaticData) {
+ self.return_ty = self.make_ty(&data.type_ref);
+ }
+
+ fn collect_fn(&mut self, func: FunctionId) {
+ let data = self.db.function_data(func);
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver)
+ .with_impl_trait_mode(ImplTraitLoweringMode::Param);
+ let param_tys =
+ data.params.iter().map(|(_, type_ref)| ctx.lower_ty(type_ref)).collect::<Vec<_>>();
+ for (ty, pat) in param_tys.into_iter().zip(self.body.params.iter()) {
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+
+ self.infer_pat(*pat, &ty, BindingMode::default());
+ }
+ let error_ty = &TypeRef::Error;
+ let return_ty = if data.has_async_kw() {
+ data.async_ret_type.as_deref().unwrap_or(error_ty)
+ } else {
+ &*data.ret_type
+ };
+ let return_ty = self.make_ty_with_mode(return_ty, ImplTraitLoweringMode::Opaque);
+ self.return_ty = return_ty;
+
+ if let Some(rpits) = self.db.return_type_impl_traits(func) {
+ // RPIT opaque types use substitution of their parent function.
+ let fn_placeholders = TyBuilder::placeholder_subst(self.db, func);
+ self.return_ty = fold_tys(
+ self.return_ty.clone(),
+ |ty, _| {
+ let opaque_ty_id = match ty.kind(Interner) {
+ TyKind::OpaqueType(opaque_ty_id, _) => *opaque_ty_id,
+ _ => return ty,
+ };
+ let idx = match self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) {
+ ImplTraitId::ReturnTypeImplTrait(_, idx) => idx,
+ _ => unreachable!(),
+ };
+ let bounds = (*rpits).map_ref(|rpits| {
+ rpits.impl_traits[idx as usize].bounds.map_ref(|it| it.into_iter())
+ });
+ let var = self.table.new_type_var();
+ let var_subst = Substitution::from1(Interner, var.clone());
+ for bound in bounds {
+ let predicate =
+ bound.map(|it| it.cloned()).substitute(Interner, &fn_placeholders);
+ let (var_predicate, binders) = predicate
+ .substitute(Interner, &var_subst)
+ .into_value_and_skipped_binders();
+ always!(binders.len(Interner) == 0); // quantified where clauses not yet handled
+ self.push_obligation(var_predicate.cast(Interner));
+ }
+ var
+ },
+ DebruijnIndex::INNERMOST,
+ );
+ }
+ }
+
+ fn infer_body(&mut self) {
+ self.infer_expr_coerce(self.body.body_expr, &Expectation::has_type(self.return_ty.clone()));
+ }
+
+ fn write_expr_ty(&mut self, expr: ExprId, ty: Ty) {
+ self.result.type_of_expr.insert(expr, ty);
+ }
+
+ fn write_expr_adj(&mut self, expr: ExprId, adjustments: Vec<Adjustment>) {
+ self.result.expr_adjustments.insert(expr, adjustments);
+ }
+
+ fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId, subst: Substitution) {
+ self.result.method_resolutions.insert(expr, (func, subst));
+ }
+
+ fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantId) {
+ self.result.variant_resolutions.insert(id, variant);
+ }
+
+ fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: AssocItemId) {
+ self.result.assoc_resolutions.insert(id, item);
+ }
+
+ fn write_pat_ty(&mut self, pat: PatId, ty: Ty) {
+ self.result.type_of_pat.insert(pat, ty);
+ }
+
+ fn push_diagnostic(&mut self, diagnostic: InferenceDiagnostic) {
+ self.result.diagnostics.push(diagnostic);
+ }
+
+ fn make_ty_with_mode(
+ &mut self,
+ type_ref: &TypeRef,
+ impl_trait_mode: ImplTraitLoweringMode,
+ ) -> Ty {
+ // FIXME use right resolver for block
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver)
+ .with_impl_trait_mode(impl_trait_mode);
+ let ty = ctx.lower_ty(type_ref);
+ let ty = self.insert_type_vars(ty);
+ self.normalize_associated_types_in(ty)
+ }
+
+ fn make_ty(&mut self, type_ref: &TypeRef) -> Ty {
+ self.make_ty_with_mode(type_ref, ImplTraitLoweringMode::Disallowed)
+ }
+
+ fn err_ty(&self) -> Ty {
+ self.result.standard_types.unknown.clone()
+ }
+
+ /// Replaces ConstScalar::Unknown by a new type var, so we can maybe still infer it.
+ fn insert_const_vars_shallow(&mut self, c: Const) -> Const {
+ let data = c.data(Interner);
+ match data.value {
+ ConstValue::Concrete(cc) => match cc.interned {
+ hir_def::type_ref::ConstScalar::Unknown => {
+ self.table.new_const_var(data.ty.clone())
+ }
+ _ => c,
+ },
+ _ => c,
+ }
+ }
+
+ /// Replaces Ty::Unknown by a new type var, so we can maybe still infer it.
+ fn insert_type_vars_shallow(&mut self, ty: Ty) -> Ty {
+ match ty.kind(Interner) {
+ TyKind::Error => self.table.new_type_var(),
+ TyKind::InferenceVar(..) => {
+ let ty_resolved = self.resolve_ty_shallow(&ty);
+ if ty_resolved.is_unknown() {
+ self.table.new_type_var()
+ } else {
+ ty
+ }
+ }
+ _ => ty,
+ }
+ }
+
+ fn insert_type_vars(&mut self, ty: Ty) -> Ty {
+ fold_tys_and_consts(
+ ty,
+ |x, _| match x {
+ Either::Left(ty) => Either::Left(self.insert_type_vars_shallow(ty)),
+ Either::Right(c) => Either::Right(self.insert_const_vars_shallow(c)),
+ },
+ DebruijnIndex::INNERMOST,
+ )
+ }
+
+ fn resolve_obligations_as_possible(&mut self) {
+ self.table.resolve_obligations_as_possible();
+ }
+
+ fn push_obligation(&mut self, o: DomainGoal) {
+ self.table.register_obligation(o.cast(Interner));
+ }
+
+ fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
+ self.table.unify(ty1, ty2)
+ }
+
+ /// Recurses through the given type, normalizing associated types mentioned
+ /// in it by replacing them by type variables and registering obligations to
+ /// resolve later. This should be done once for every type we get from some
+ /// type annotation (e.g. from a let type annotation, field type or function
+ /// call). `make_ty` handles this already, but e.g. for field types we need
+ /// to do it as well.
+ fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty {
+ self.table.normalize_associated_types_in(ty)
+ }
+
+ fn resolve_ty_shallow(&mut self, ty: &Ty) -> Ty {
+ self.resolve_obligations_as_possible();
+ self.table.resolve_ty_shallow(ty)
+ }
+
+ fn resolve_associated_type(&mut self, inner_ty: Ty, assoc_ty: Option<TypeAliasId>) -> Ty {
+ self.resolve_associated_type_with_params(inner_ty, assoc_ty, &[])
+ }
+
+ fn resolve_associated_type_with_params(
+ &mut self,
+ inner_ty: Ty,
+ assoc_ty: Option<TypeAliasId>,
+ params: &[GenericArg],
+ ) -> Ty {
+ match assoc_ty {
+ Some(res_assoc_ty) => {
+ let trait_ = match res_assoc_ty.lookup(self.db.upcast()).container {
+ hir_def::ItemContainerId::TraitId(trait_) => trait_,
+ _ => panic!("resolve_associated_type called with non-associated type"),
+ };
+ let ty = self.table.new_type_var();
+ let mut param_iter = params.iter().cloned();
+ let trait_ref = TyBuilder::trait_ref(self.db, trait_)
+ .push(inner_ty)
+ .fill(|_| param_iter.next().unwrap())
+ .build();
+ let alias_eq = AliasEq {
+ alias: AliasTy::Projection(ProjectionTy {
+ associated_ty_id: to_assoc_type_id(res_assoc_ty),
+ substitution: trait_ref.substitution.clone(),
+ }),
+ ty: ty.clone(),
+ };
+ self.push_obligation(trait_ref.cast(Interner));
+ self.push_obligation(alias_eq.cast(Interner));
+ ty
+ }
+ None => self.err_ty(),
+ }
+ }
+
+ fn resolve_variant(&mut self, path: Option<&Path>, value_ns: bool) -> (Ty, Option<VariantId>) {
+ let path = match path {
+ Some(path) => path,
+ None => return (self.err_ty(), None),
+ };
+ let resolver = &self.resolver;
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ // FIXME: this should resolve assoc items as well, see this example:
+ // https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521
+ let (resolution, unresolved) = if value_ns {
+ match resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path()) {
+ Some(ResolveValueResult::ValueNs(value)) => match value {
+ ValueNs::EnumVariantId(var) => {
+ let substs = ctx.substs_from_path(path, var.into(), true);
+ let ty = self.db.ty(var.parent.into());
+ let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
+ return (ty, Some(var.into()));
+ }
+ ValueNs::StructId(strukt) => {
+ let substs = ctx.substs_from_path(path, strukt.into(), true);
+ let ty = self.db.ty(strukt.into());
+ let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
+ return (ty, Some(strukt.into()));
+ }
+ _ => return (self.err_ty(), None),
+ },
+ Some(ResolveValueResult::Partial(typens, unresolved)) => (typens, Some(unresolved)),
+ None => return (self.err_ty(), None),
+ }
+ } else {
+ match resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
+ Some(it) => it,
+ None => return (self.err_ty(), None),
+ }
+ };
+ return match resolution {
+ TypeNs::AdtId(AdtId::StructId(strukt)) => {
+ let substs = ctx.substs_from_path(path, strukt.into(), true);
+ let ty = self.db.ty(strukt.into());
+ let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
+ forbid_unresolved_segments((ty, Some(strukt.into())), unresolved)
+ }
+ TypeNs::AdtId(AdtId::UnionId(u)) => {
+ let substs = ctx.substs_from_path(path, u.into(), true);
+ let ty = self.db.ty(u.into());
+ let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
+ forbid_unresolved_segments((ty, Some(u.into())), unresolved)
+ }
+ TypeNs::EnumVariantId(var) => {
+ let substs = ctx.substs_from_path(path, var.into(), true);
+ let ty = self.db.ty(var.parent.into());
+ let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
+ forbid_unresolved_segments((ty, Some(var.into())), unresolved)
+ }
+ TypeNs::SelfType(impl_id) => {
+ let generics = crate::utils::generics(self.db.upcast(), impl_id.into());
+ let substs = generics.placeholder_subst(self.db);
+ let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
+ self.resolve_variant_on_alias(ty, unresolved, path)
+ }
+ TypeNs::TypeAliasId(it) => {
+ let ty = TyBuilder::def_ty(self.db, it.into())
+ .fill_with_inference_vars(&mut self.table)
+ .build();
+ self.resolve_variant_on_alias(ty, unresolved, path)
+ }
+ TypeNs::AdtSelfType(_) => {
+ // FIXME this could happen in array size expressions, once we're checking them
+ (self.err_ty(), None)
+ }
+ TypeNs::GenericParam(_) => {
+ // FIXME potentially resolve assoc type
+ (self.err_ty(), None)
+ }
+ TypeNs::AdtId(AdtId::EnumId(_)) | TypeNs::BuiltinType(_) | TypeNs::TraitId(_) => {
+ // FIXME diagnostic
+ (self.err_ty(), None)
+ }
+ };
+
+ fn forbid_unresolved_segments(
+ result: (Ty, Option<VariantId>),
+ unresolved: Option<usize>,
+ ) -> (Ty, Option<VariantId>) {
+ if unresolved.is_none() {
+ result
+ } else {
+ // FIXME diagnostic
+ (TyKind::Error.intern(Interner), None)
+ }
+ }
+ }
+
+ fn resolve_variant_on_alias(
+ &mut self,
+ ty: Ty,
+ unresolved: Option<usize>,
+ path: &Path,
+ ) -> (Ty, Option<VariantId>) {
+ let remaining = unresolved.map(|x| path.segments().skip(x).len()).filter(|x| x > &0);
+ match remaining {
+ None => {
+ let variant = ty.as_adt().and_then(|(adt_id, _)| match adt_id {
+ AdtId::StructId(s) => Some(VariantId::StructId(s)),
+ AdtId::UnionId(u) => Some(VariantId::UnionId(u)),
+ AdtId::EnumId(_) => {
+ // FIXME Error E0071, expected struct, variant or union type, found enum `Foo`
+ None
+ }
+ });
+ (ty, variant)
+ }
+ Some(1) => {
+ let segment = path.mod_path().segments().last().unwrap();
+ // this could be an enum variant or associated type
+ if let Some((AdtId::EnumId(enum_id), _)) = ty.as_adt() {
+ let enum_data = self.db.enum_data(enum_id);
+ if let Some(local_id) = enum_data.variant(segment) {
+ let variant = EnumVariantId { parent: enum_id, local_id };
+ return (ty, Some(variant.into()));
+ }
+ }
+ // FIXME potentially resolve assoc type
+ (self.err_ty(), None)
+ }
+ Some(_) => {
+ // FIXME diagnostic
+ (self.err_ty(), None)
+ }
+ }
+ }
+
+ fn resolve_lang_item(&self, name: Name) -> Option<LangItemTarget> {
+ let krate = self.resolver.krate();
+ self.db.lang_item(krate, name.to_smol_str())
+ }
+
+ fn resolve_into_iter_item(&self) -> Option<TypeAliasId> {
+ let path = path![core::iter::IntoIterator];
+ let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Item])
+ }
+
+ fn resolve_ops_try_ok(&self) -> Option<TypeAliasId> {
+ // FIXME resolve via lang_item once try v2 is stable
+ let path = path![core::ops::Try];
+ let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
+ let trait_data = self.db.trait_data(trait_);
+ trait_data
+ // FIXME remove once try v2 is stable
+ .associated_type_by_name(&name![Ok])
+ .or_else(|| trait_data.associated_type_by_name(&name![Output]))
+ }
+
+ fn resolve_ops_neg_output(&self) -> Option<TypeAliasId> {
+ let trait_ = self.resolve_lang_item(name![neg])?.as_trait()?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+ }
+
+ fn resolve_ops_not_output(&self) -> Option<TypeAliasId> {
+ let trait_ = self.resolve_lang_item(name![not])?.as_trait()?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+ }
+
+ fn resolve_future_future_output(&self) -> Option<TypeAliasId> {
+ let trait_ = self.resolve_lang_item(name![future_trait])?.as_trait()?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+ }
+
+ fn resolve_boxed_box(&self) -> Option<AdtId> {
+ let struct_ = self.resolve_lang_item(name![owned_box])?.as_struct()?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range_full(&self) -> Option<AdtId> {
+ let path = path![core::ops::RangeFull];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range(&self) -> Option<AdtId> {
+ let path = path![core::ops::Range];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range_inclusive(&self) -> Option<AdtId> {
+ let path = path![core::ops::RangeInclusive];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range_from(&self) -> Option<AdtId> {
+ let path = path![core::ops::RangeFrom];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range_to(&self) -> Option<AdtId> {
+ let path = path![core::ops::RangeTo];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range_to_inclusive(&self) -> Option<AdtId> {
+ let path = path![core::ops::RangeToInclusive];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_ops_index(&self) -> Option<TraitId> {
+ self.resolve_lang_item(name![index])?.as_trait()
+ }
+
+ fn resolve_ops_index_output(&self) -> Option<TypeAliasId> {
+ let trait_ = self.resolve_ops_index()?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+ }
+}
+
+/// When inferring an expression, we propagate downward whatever type hint we
+/// are able in the form of an `Expectation`.
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub(crate) enum Expectation {
+ None,
+ HasType(Ty),
+ // Castable(Ty), // rustc has this, we currently just don't propagate an expectation for casts
+ RValueLikeUnsized(Ty),
+}
+
+impl Expectation {
+ /// The expectation that the type of the expression needs to equal the given
+ /// type.
+ fn has_type(ty: Ty) -> Self {
+ if ty.is_unknown() {
+ // FIXME: get rid of this?
+ Expectation::None
+ } else {
+ Expectation::HasType(ty)
+ }
+ }
+
+ fn from_option(ty: Option<Ty>) -> Self {
+ ty.map_or(Expectation::None, Expectation::HasType)
+ }
+
+ /// The following explanation is copied straight from rustc:
+ /// Provides an expectation for an rvalue expression given an *optional*
+ /// hint, which is not required for type safety (the resulting type might
+ /// be checked higher up, as is the case with `&expr` and `box expr`), but
+ /// is useful in determining the concrete type.
+ ///
+ /// The primary use case is where the expected type is a fat pointer,
+ /// like `&[isize]`. For example, consider the following statement:
+ ///
+ /// let x: &[isize] = &[1, 2, 3];
+ ///
+ /// In this case, the expected type for the `&[1, 2, 3]` expression is
+ /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
+ /// expectation `ExpectHasType([isize])`, that would be too strong --
+ /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`.
+ /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced
+ /// to the type `&[isize]`. Therefore, we propagate this more limited hint,
+ /// which still is useful, because it informs integer literals and the like.
+ /// See the test case `test/ui/coerce-expect-unsized.rs` and #20169
+ /// for examples of where this comes up,.
+ fn rvalue_hint(table: &mut unify::InferenceTable<'_>, ty: Ty) -> Self {
+ // FIXME: do struct_tail_without_normalization
+ match table.resolve_ty_shallow(&ty).kind(Interner) {
+ TyKind::Slice(_) | TyKind::Str | TyKind::Dyn(_) => Expectation::RValueLikeUnsized(ty),
+ _ => Expectation::has_type(ty),
+ }
+ }
+
+ /// This expresses no expectation on the type.
+ fn none() -> Self {
+ Expectation::None
+ }
+
+ fn resolve(&self, table: &mut unify::InferenceTable<'_>) -> Expectation {
+ match self {
+ Expectation::None => Expectation::None,
+ Expectation::HasType(t) => Expectation::HasType(table.resolve_ty_shallow(t)),
+ Expectation::RValueLikeUnsized(t) => {
+ Expectation::RValueLikeUnsized(table.resolve_ty_shallow(t))
+ }
+ }
+ }
+
+ fn to_option(&self, table: &mut unify::InferenceTable<'_>) -> Option<Ty> {
+ match self.resolve(table) {
+ Expectation::None => None,
+ Expectation::HasType(t) |
+ // Expectation::Castable(t) |
+ Expectation::RValueLikeUnsized(t) => Some(t),
+ }
+ }
+
+ fn only_has_type(&self, table: &mut unify::InferenceTable<'_>) -> Option<Ty> {
+ match self {
+ Expectation::HasType(t) => Some(table.resolve_ty_shallow(t)),
+ // Expectation::Castable(_) |
+ Expectation::RValueLikeUnsized(_) | Expectation::None => None,
+ }
+ }
+
+ /// Comment copied from rustc:
+ /// Disregard "castable to" expectations because they
+ /// can lead us astray. Consider for example `if cond
+ /// {22} else {c} as u8` -- if we propagate the
+ /// "castable to u8" constraint to 22, it will pick the
+ /// type 22u8, which is overly constrained (c might not
+ /// be a u8). In effect, the problem is that the
+ /// "castable to" expectation is not the tightest thing
+ /// we can say, so we want to drop it in this case.
+ /// The tightest thing we can say is "must unify with
+ /// else branch". Note that in the case of a "has type"
+ /// constraint, this limitation does not hold.
+ ///
+ /// If the expected type is just a type variable, then don't use
+ /// an expected type. Otherwise, we might write parts of the type
+ /// when checking the 'then' block which are incompatible with the
+ /// 'else' branch.
+ fn adjust_for_branches(&self, table: &mut unify::InferenceTable<'_>) -> Expectation {
+ match self {
+ Expectation::HasType(ety) => {
+ let ety = table.resolve_ty_shallow(ety);
+ if !ety.is_ty_var() {
+ Expectation::HasType(ety)
+ } else {
+ Expectation::None
+ }
+ }
+ Expectation::RValueLikeUnsized(ety) => Expectation::RValueLikeUnsized(ety.clone()),
+ _ => Expectation::None,
+ }
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
+enum Diverges {
+ Maybe,
+ Always,
+}
+
+impl Diverges {
+ fn is_always(self) -> bool {
+ self == Diverges::Always
+ }
+}
+
+impl std::ops::BitAnd for Diverges {
+ type Output = Self;
+ fn bitand(self, other: Self) -> Self {
+ std::cmp::min(self, other)
+ }
+}
+
+impl std::ops::BitOr for Diverges {
+ type Output = Self;
+ fn bitor(self, other: Self) -> Self {
+ std::cmp::max(self, other)
+ }
+}
+
+impl std::ops::BitAndAssign for Diverges {
+ fn bitand_assign(&mut self, other: Self) {
+ *self = *self & other;
+ }
+}
+
+impl std::ops::BitOrAssign for Diverges {
+ fn bitor_assign(&mut self, other: Self) {
+ *self = *self | other;
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
new file mode 100644
index 000000000..3ead92909
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
@@ -0,0 +1,82 @@
+//! Inference of closure parameter types based on the closure's expected type.
+
+use chalk_ir::{cast::Cast, AliasEq, AliasTy, FnSubst, WhereClause};
+use hir_def::{expr::ExprId, HasModule};
+use smallvec::SmallVec;
+
+use crate::{
+ to_chalk_trait_id, utils, ChalkTraitId, DynTy, FnPointer, FnSig, Interner, Substitution, Ty,
+ TyExt, TyKind,
+};
+
+use super::{Expectation, InferenceContext};
+
+impl InferenceContext<'_> {
+ pub(super) fn deduce_closure_type_from_expectations(
+ &mut self,
+ closure_expr: ExprId,
+ closure_ty: &Ty,
+ sig_ty: &Ty,
+ expectation: &Expectation,
+ ) {
+ let expected_ty = match expectation.to_option(&mut self.table) {
+ Some(ty) => ty,
+ None => return,
+ };
+
+ // Deduction from where-clauses in scope, as well as fn-pointer coercion are handled here.
+ let _ = self.coerce(Some(closure_expr), closure_ty, &expected_ty);
+
+ // Deduction based on the expected `dyn Fn` is done separately.
+ if let TyKind::Dyn(dyn_ty) = expected_ty.kind(Interner) {
+ if let Some(sig) = self.deduce_sig_from_dyn_ty(dyn_ty) {
+ let expected_sig_ty = TyKind::Function(sig).intern(Interner);
+
+ self.unify(sig_ty, &expected_sig_ty);
+ }
+ }
+ }
+
+ fn deduce_sig_from_dyn_ty(&self, dyn_ty: &DynTy) -> Option<FnPointer> {
+ // Search for a predicate like `<$self as FnX<Args>>::Output == Ret`
+
+ let fn_traits: SmallVec<[ChalkTraitId; 3]> =
+ utils::fn_traits(self.db.upcast(), self.owner.module(self.db.upcast()).krate())
+ .map(to_chalk_trait_id)
+ .collect();
+
+ let self_ty = TyKind::Error.intern(Interner);
+ let bounds = dyn_ty.bounds.clone().substitute(Interner, &[self_ty.cast(Interner)]);
+ for bound in bounds.iter(Interner) {
+ // NOTE(skip_binders): the extracted types are rebound by the returned `FnPointer`
+ if let WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection), ty }) =
+ bound.skip_binders()
+ {
+ let assoc_data = self.db.associated_ty_data(projection.associated_ty_id);
+ if !fn_traits.contains(&assoc_data.trait_id) {
+ return None;
+ }
+
+ // Skip `Self`, get the type argument.
+ let arg = projection.substitution.as_slice(Interner).get(1)?;
+ if let Some(subst) = arg.ty(Interner)?.as_tuple() {
+ let generic_args = subst.as_slice(Interner);
+ let mut sig_tys = Vec::new();
+ for arg in generic_args {
+ sig_tys.push(arg.ty(Interner)?.clone());
+ }
+ sig_tys.push(ty.clone());
+
+ cov_mark::hit!(dyn_fn_param_informs_call_site_closure_signature);
+ return Some(FnPointer {
+ num_binders: bound.len(Interner),
+ sig: FnSig { abi: (), safety: chalk_ir::Safety::Safe, variadic: false },
+ substitution: FnSubst(Substitution::from_iter(Interner, sig_tys)),
+ });
+ }
+ }
+ }
+
+ None
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
new file mode 100644
index 000000000..f54440bf5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
@@ -0,0 +1,673 @@
+//! Coercion logic. Coercions are certain type conversions that can implicitly
+//! happen in certain places, e.g. weakening `&mut` to `&` or deref coercions
+//! like going from `&Vec<T>` to `&[T]`.
+//!
+//! See <https://doc.rust-lang.org/nomicon/coercions.html> and
+//! `librustc_typeck/check/coercion.rs`.
+
+use std::{iter, sync::Arc};
+
+use chalk_ir::{cast::Cast, BoundVar, Goal, Mutability, TyVariableKind};
+use hir_def::{expr::ExprId, lang_item::LangItemTarget};
+use stdx::always;
+use syntax::SmolStr;
+
+use crate::{
+ autoderef::{Autoderef, AutoderefKind},
+ db::HirDatabase,
+ infer::{
+ Adjust, Adjustment, AutoBorrow, InferOk, InferenceContext, OverloadedDeref, PointerCast,
+ TypeError, TypeMismatch,
+ },
+ static_lifetime, Canonical, DomainGoal, FnPointer, FnSig, Guidance, InEnvironment, Interner,
+ Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
+};
+
+use super::unify::InferenceTable;
+
+pub(crate) type CoerceResult = Result<InferOk<(Vec<Adjustment>, Ty)>, TypeError>;
+
+/// Do not require any adjustments, i.e. coerce `x -> x`.
+fn identity(_: Ty) -> Vec<Adjustment> {
+ vec![]
+}
+
+fn simple(kind: Adjust) -> impl FnOnce(Ty) -> Vec<Adjustment> {
+ move |target| vec![Adjustment { kind, target }]
+}
+
+/// This always returns `Ok(...)`.
+fn success(
+ adj: Vec<Adjustment>,
+ target: Ty,
+ goals: Vec<InEnvironment<Goal<Interner>>>,
+) -> CoerceResult {
+ Ok(InferOk { goals, value: (adj, target) })
+}
+
+#[derive(Clone, Debug)]
+pub(super) struct CoerceMany {
+ expected_ty: Ty,
+}
+
+impl CoerceMany {
+ pub(super) fn new(expected: Ty) -> Self {
+ CoerceMany { expected_ty: expected }
+ }
+
+ /// Merge two types from different branches, with possible coercion.
+ ///
+ /// Mostly this means trying to coerce one to the other, but
+ /// - if we have two function types for different functions or closures, we need to
+ /// coerce both to function pointers;
+ /// - if we were concerned with lifetime subtyping, we'd need to look for a
+ /// least upper bound.
+ pub(super) fn coerce(
+ &mut self,
+ ctx: &mut InferenceContext<'_>,
+ expr: Option<ExprId>,
+ expr_ty: &Ty,
+ ) {
+ let expr_ty = ctx.resolve_ty_shallow(expr_ty);
+ self.expected_ty = ctx.resolve_ty_shallow(&self.expected_ty);
+
+ // Special case: two function types. Try to coerce both to
+ // pointers to have a chance at getting a match. See
+ // https://github.com/rust-lang/rust/blob/7b805396bf46dce972692a6846ce2ad8481c5f85/src/librustc_typeck/check/coercion.rs#L877-L916
+ let sig = match (self.expected_ty.kind(Interner), expr_ty.kind(Interner)) {
+ (TyKind::FnDef(..) | TyKind::Closure(..), TyKind::FnDef(..) | TyKind::Closure(..)) => {
+ // FIXME: we're ignoring safety here. To be more correct, if we have one FnDef and one Closure,
+ // we should be coercing the closure to a fn pointer of the safety of the FnDef
+ cov_mark::hit!(coerce_fn_reification);
+ let sig =
+ self.expected_ty.callable_sig(ctx.db).expect("FnDef without callable sig");
+ Some(sig)
+ }
+ _ => None,
+ };
+ if let Some(sig) = sig {
+ let target_ty = TyKind::Function(sig.to_fn_ptr()).intern(Interner);
+ let result1 = ctx.table.coerce_inner(self.expected_ty.clone(), &target_ty);
+ let result2 = ctx.table.coerce_inner(expr_ty.clone(), &target_ty);
+ if let (Ok(result1), Ok(result2)) = (result1, result2) {
+ ctx.table.register_infer_ok(result1);
+ ctx.table.register_infer_ok(result2);
+ return self.expected_ty = target_ty;
+ }
+ }
+
+ // It might not seem like it, but order is important here: If the expected
+ // type is a type variable and the new one is `!`, trying it the other
+ // way around first would mean we make the type variable `!`, instead of
+ // just marking it as possibly diverging.
+ if ctx.coerce(expr, &expr_ty, &self.expected_ty).is_ok() {
+ /* self.expected_ty is already correct */
+ } else if ctx.coerce(expr, &self.expected_ty, &expr_ty).is_ok() {
+ self.expected_ty = expr_ty;
+ } else {
+ if let Some(id) = expr {
+ ctx.result.type_mismatches.insert(
+ id.into(),
+ TypeMismatch { expected: self.expected_ty.clone(), actual: expr_ty },
+ );
+ }
+ cov_mark::hit!(coerce_merge_fail_fallback);
+ /* self.expected_ty is already correct */
+ }
+ }
+
+ pub(super) fn complete(self) -> Ty {
+ self.expected_ty
+ }
+}
+
+pub fn could_coerce(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ tys: &Canonical<(Ty, Ty)>,
+) -> bool {
+ coerce(db, env, tys).is_ok()
+}
+
+pub(crate) fn coerce(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ tys: &Canonical<(Ty, Ty)>,
+) -> Result<(Vec<Adjustment>, Ty), TypeError> {
+ let mut table = InferenceTable::new(db, env);
+ let vars = table.fresh_subst(tys.binders.as_slice(Interner));
+ let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner);
+ let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner);
+ let (adjustments, ty) = table.coerce(&ty1_with_vars, &ty2_with_vars)?;
+ // default any type vars that weren't unified back to their original bound vars
+ // (kind of hacky)
+ let find_var = |iv| {
+ vars.iter(Interner).position(|v| match v.interned() {
+ chalk_ir::GenericArgData::Ty(ty) => ty.inference_var(Interner),
+ chalk_ir::GenericArgData::Lifetime(lt) => lt.inference_var(Interner),
+ chalk_ir::GenericArgData::Const(c) => c.inference_var(Interner),
+ } == Some(iv))
+ };
+ let fallback = |iv, kind, default, binder| match kind {
+ chalk_ir::VariableKind::Ty(_ty_kind) => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_ty(Interner).cast(Interner)),
+ chalk_ir::VariableKind::Lifetime => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_lifetime(Interner).cast(Interner)),
+ chalk_ir::VariableKind::Const(ty) => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_const(Interner, ty).cast(Interner)),
+ };
+ // FIXME also map the types in the adjustments
+ Ok((adjustments, table.resolve_with_fallback(ty, &fallback)))
+}
+
+impl<'a> InferenceContext<'a> {
+ /// Unify two types, but may coerce the first one to the second one
+ /// using "implicit coercion rules" if needed.
+ pub(super) fn coerce(
+ &mut self,
+ expr: Option<ExprId>,
+ from_ty: &Ty,
+ to_ty: &Ty,
+ ) -> Result<Ty, TypeError> {
+ let from_ty = self.resolve_ty_shallow(from_ty);
+ let to_ty = self.resolve_ty_shallow(to_ty);
+ let (adjustments, ty) = self.table.coerce(&from_ty, &to_ty)?;
+ if let Some(expr) = expr {
+ self.write_expr_adj(expr, adjustments);
+ }
+ Ok(ty)
+ }
+}
+
+impl<'a> InferenceTable<'a> {
+ /// Unify two types, but may coerce the first one to the second one
+ /// using "implicit coercion rules" if needed.
+ pub(crate) fn coerce(
+ &mut self,
+ from_ty: &Ty,
+ to_ty: &Ty,
+ ) -> Result<(Vec<Adjustment>, Ty), TypeError> {
+ let from_ty = self.resolve_ty_shallow(from_ty);
+ let to_ty = self.resolve_ty_shallow(to_ty);
+ match self.coerce_inner(from_ty, &to_ty) {
+ Ok(InferOk { value: (adjustments, ty), goals }) => {
+ self.register_infer_ok(InferOk { value: (), goals });
+ Ok((adjustments, ty))
+ }
+ Err(e) => {
+ // FIXME deal with error
+ Err(e)
+ }
+ }
+ }
+
+ fn coerce_inner(&mut self, from_ty: Ty, to_ty: &Ty) -> CoerceResult {
+ if from_ty.is_never() {
+ // Subtle: If we are coercing from `!` to `?T`, where `?T` is an unbound
+ // type variable, we want `?T` to fallback to `!` if not
+ // otherwise constrained. An example where this arises:
+ //
+ // let _: Option<?T> = Some({ return; });
+ //
+ // here, we would coerce from `!` to `?T`.
+ if let TyKind::InferenceVar(tv, TyVariableKind::General) = to_ty.kind(Interner) {
+ self.set_diverging(*tv, true);
+ }
+ return success(simple(Adjust::NeverToAny)(to_ty.clone()), to_ty.clone(), vec![]);
+ }
+
+ // Consider coercing the subtype to a DST
+ if let Ok(ret) = self.try_coerce_unsized(&from_ty, to_ty) {
+ return Ok(ret);
+ }
+
+ // Examine the supertype and consider auto-borrowing.
+ match to_ty.kind(Interner) {
+ TyKind::Raw(mt, _) => return self.coerce_ptr(from_ty, to_ty, *mt),
+ TyKind::Ref(mt, _, _) => return self.coerce_ref(from_ty, to_ty, *mt),
+ _ => {}
+ }
+
+ match from_ty.kind(Interner) {
+ TyKind::FnDef(..) => {
+ // Function items are coercible to any closure
+ // type; function pointers are not (that would
+ // require double indirection).
+ // Additionally, we permit coercion of function
+ // items to drop the unsafe qualifier.
+ self.coerce_from_fn_item(from_ty, to_ty)
+ }
+ TyKind::Function(from_fn_ptr) => {
+ // We permit coercion of fn pointers to drop the
+ // unsafe qualifier.
+ self.coerce_from_fn_pointer(from_ty.clone(), from_fn_ptr, to_ty)
+ }
+ TyKind::Closure(_, from_substs) => {
+ // Non-capturing closures are coercible to
+ // function pointers or unsafe function pointers.
+ // It cannot convert closures that require unsafe.
+ self.coerce_closure_to_fn(from_ty.clone(), from_substs, to_ty)
+ }
+ _ => {
+ // Otherwise, just use unification rules.
+ self.unify_and(&from_ty, to_ty, identity)
+ }
+ }
+ }
+
+ /// Unify two types (using sub or lub) and produce a specific coercion.
+ fn unify_and<F>(&mut self, t1: &Ty, t2: &Ty, f: F) -> CoerceResult
+ where
+ F: FnOnce(Ty) -> Vec<Adjustment>,
+ {
+ self.try_unify(t1, t2)
+ .and_then(|InferOk { goals, .. }| success(f(t1.clone()), t1.clone(), goals))
+ }
+
+ fn coerce_ptr(&mut self, from_ty: Ty, to_ty: &Ty, to_mt: Mutability) -> CoerceResult {
+ let (is_ref, from_mt, from_inner) = match from_ty.kind(Interner) {
+ TyKind::Ref(mt, _, ty) => (true, mt, ty),
+ TyKind::Raw(mt, ty) => (false, mt, ty),
+ _ => return self.unify_and(&from_ty, to_ty, identity),
+ };
+
+ coerce_mutabilities(*from_mt, to_mt)?;
+
+ // Check that the types which they point at are compatible.
+ let from_raw = TyKind::Raw(to_mt, from_inner.clone()).intern(Interner);
+
+ // Although references and unsafe ptrs have the same
+ // representation, we still register an Adjust::DerefRef so that
+ // regionck knows that the region for `a` must be valid here.
+ if is_ref {
+ self.unify_and(&from_raw, to_ty, |target| {
+ vec![
+ Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() },
+ Adjustment { kind: Adjust::Borrow(AutoBorrow::RawPtr(to_mt)), target },
+ ]
+ })
+ } else if *from_mt != to_mt {
+ self.unify_and(
+ &from_raw,
+ to_ty,
+ simple(Adjust::Pointer(PointerCast::MutToConstPointer)),
+ )
+ } else {
+ self.unify_and(&from_raw, to_ty, identity)
+ }
+ }
+
+ /// Reborrows `&mut A` to `&mut B` and `&(mut) A` to `&B`.
+ /// To match `A` with `B`, autoderef will be performed,
+ /// calling `deref`/`deref_mut` where necessary.
+ fn coerce_ref(&mut self, from_ty: Ty, to_ty: &Ty, to_mt: Mutability) -> CoerceResult {
+ let from_mt = match from_ty.kind(Interner) {
+ &TyKind::Ref(mt, _, _) => {
+ coerce_mutabilities(mt, to_mt)?;
+ mt
+ }
+ _ => return self.unify_and(&from_ty, to_ty, identity),
+ };
+
+ // NOTE: this code is mostly copied and adapted from rustc, and
+ // currently more complicated than necessary, carrying errors around
+ // etc.. This complication will become necessary when we actually track
+ // details of coercion errors though, so I think it's useful to leave
+ // the structure like it is.
+
+ let snapshot = self.snapshot();
+
+ let mut autoderef = Autoderef::new(self, from_ty.clone());
+ let mut first_error = None;
+ let mut found = None;
+
+ while let Some((referent_ty, autoderefs)) = autoderef.next() {
+ if autoderefs == 0 {
+ // Don't let this pass, otherwise it would cause
+ // &T to autoref to &&T.
+ continue;
+ }
+
+ // At this point, we have deref'd `a` to `referent_ty`. So
+ // imagine we are coercing from `&'a mut Vec<T>` to `&'b mut [T]`.
+ // In the autoderef loop for `&'a mut Vec<T>`, we would get
+ // three callbacks:
+ //
+ // - `&'a mut Vec<T>` -- 0 derefs, just ignore it
+ // - `Vec<T>` -- 1 deref
+ // - `[T]` -- 2 deref
+ //
+ // At each point after the first callback, we want to
+ // check to see whether this would match out target type
+ // (`&'b mut [T]`) if we autoref'd it. We can't just
+ // compare the referent types, though, because we still
+ // have to consider the mutability. E.g., in the case
+ // we've been considering, we have an `&mut` reference, so
+ // the `T` in `[T]` needs to be unified with equality.
+ //
+ // Therefore, we construct reference types reflecting what
+ // the types will be after we do the final auto-ref and
+ // compare those. Note that this means we use the target
+ // mutability [1], since it may be that we are coercing
+ // from `&mut T` to `&U`.
+ let lt = static_lifetime(); // FIXME: handle lifetimes correctly, see rustc
+ let derefd_from_ty = TyKind::Ref(to_mt, lt, referent_ty).intern(Interner);
+ match autoderef.table.try_unify(&derefd_from_ty, to_ty) {
+ Ok(result) => {
+ found = Some(result.map(|()| derefd_from_ty));
+ break;
+ }
+ Err(err) => {
+ if first_error.is_none() {
+ first_error = Some(err);
+ }
+ }
+ }
+ }
+
+ // Extract type or return an error. We return the first error
+ // we got, which should be from relating the "base" type
+ // (e.g., in example above, the failure from relating `Vec<T>`
+ // to the target type), since that should be the least
+ // confusing.
+ let InferOk { value: ty, goals } = match found {
+ Some(d) => d,
+ None => {
+ self.rollback_to(snapshot);
+ let err = first_error.expect("coerce_borrowed_pointer had no error");
+ return Err(err);
+ }
+ };
+ if ty == from_ty && from_mt == Mutability::Not && autoderef.step_count() == 1 {
+ // As a special case, if we would produce `&'a *x`, that's
+ // a total no-op. We end up with the type `&'a T` just as
+ // we started with. In that case, just skip it
+ // altogether. This is just an optimization.
+ //
+ // Note that for `&mut`, we DO want to reborrow --
+ // otherwise, this would be a move, which might be an
+ // error. For example `foo(self.x)` where `self` and
+ // `self.x` both have `&mut `type would be a move of
+ // `self.x`, but we auto-coerce it to `foo(&mut *self.x)`,
+ // which is a borrow.
+ always!(to_mt == Mutability::Not); // can only coerce &T -> &U
+ return success(vec![], ty, goals);
+ }
+
+ let mut adjustments = auto_deref_adjust_steps(&autoderef);
+ adjustments
+ .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(to_mt)), target: ty.clone() });
+
+ success(adjustments, ty, goals)
+ }
+
+ /// Attempts to coerce from the type of a Rust function item into a function pointer.
+ fn coerce_from_fn_item(&mut self, from_ty: Ty, to_ty: &Ty) -> CoerceResult {
+ match to_ty.kind(Interner) {
+ TyKind::Function(_) => {
+ let from_sig = from_ty.callable_sig(self.db).expect("FnDef had no sig");
+
+ // FIXME check ABI: Intrinsics are not coercible to function pointers
+ // FIXME Safe `#[target_feature]` functions are not assignable to safe fn pointers (RFC 2396)
+
+ // FIXME rustc normalizes assoc types in the sig here, not sure if necessary
+
+ let from_sig = from_sig.to_fn_ptr();
+ let from_fn_pointer = TyKind::Function(from_sig.clone()).intern(Interner);
+ let ok = self.coerce_from_safe_fn(
+ from_fn_pointer.clone(),
+ &from_sig,
+ to_ty,
+ |unsafe_ty| {
+ vec![
+ Adjustment {
+ kind: Adjust::Pointer(PointerCast::ReifyFnPointer),
+ target: from_fn_pointer,
+ },
+ Adjustment {
+ kind: Adjust::Pointer(PointerCast::UnsafeFnPointer),
+ target: unsafe_ty,
+ },
+ ]
+ },
+ simple(Adjust::Pointer(PointerCast::ReifyFnPointer)),
+ )?;
+
+ Ok(ok)
+ }
+ _ => self.unify_and(&from_ty, to_ty, identity),
+ }
+ }
+
+ fn coerce_from_fn_pointer(
+ &mut self,
+ from_ty: Ty,
+ from_f: &FnPointer,
+ to_ty: &Ty,
+ ) -> CoerceResult {
+ self.coerce_from_safe_fn(
+ from_ty,
+ from_f,
+ to_ty,
+ simple(Adjust::Pointer(PointerCast::UnsafeFnPointer)),
+ identity,
+ )
+ }
+
+ fn coerce_from_safe_fn<F, G>(
+ &mut self,
+ from_ty: Ty,
+ from_fn_ptr: &FnPointer,
+ to_ty: &Ty,
+ to_unsafe: F,
+ normal: G,
+ ) -> CoerceResult
+ where
+ F: FnOnce(Ty) -> Vec<Adjustment>,
+ G: FnOnce(Ty) -> Vec<Adjustment>,
+ {
+ if let TyKind::Function(to_fn_ptr) = to_ty.kind(Interner) {
+ if let (chalk_ir::Safety::Safe, chalk_ir::Safety::Unsafe) =
+ (from_fn_ptr.sig.safety, to_fn_ptr.sig.safety)
+ {
+ let from_unsafe =
+ TyKind::Function(safe_to_unsafe_fn_ty(from_fn_ptr.clone())).intern(Interner);
+ return self.unify_and(&from_unsafe, to_ty, to_unsafe);
+ }
+ }
+ self.unify_and(&from_ty, to_ty, normal)
+ }
+
+ /// Attempts to coerce from the type of a non-capturing closure into a
+ /// function pointer.
+ fn coerce_closure_to_fn(
+ &mut self,
+ from_ty: Ty,
+ from_substs: &Substitution,
+ to_ty: &Ty,
+ ) -> CoerceResult {
+ match to_ty.kind(Interner) {
+ // if from_substs is non-capturing (FIXME)
+ TyKind::Function(fn_ty) => {
+ // We coerce the closure, which has fn type
+ // `extern "rust-call" fn((arg0,arg1,...)) -> _`
+ // to
+ // `fn(arg0,arg1,...) -> _`
+ // or
+ // `unsafe fn(arg0,arg1,...) -> _`
+ let safety = fn_ty.sig.safety;
+ let pointer_ty = coerce_closure_fn_ty(from_substs, safety);
+ self.unify_and(
+ &pointer_ty,
+ to_ty,
+ simple(Adjust::Pointer(PointerCast::ClosureFnPointer(safety))),
+ )
+ }
+ _ => self.unify_and(&from_ty, to_ty, identity),
+ }
+ }
+
+ /// Coerce a type using `from_ty: CoerceUnsized<ty_ty>`
+ ///
+ /// See: <https://doc.rust-lang.org/nightly/std/marker/trait.CoerceUnsized.html>
+ fn try_coerce_unsized(&mut self, from_ty: &Ty, to_ty: &Ty) -> CoerceResult {
+ // These 'if' statements require some explanation.
+ // The `CoerceUnsized` trait is special - it is only
+ // possible to write `impl CoerceUnsized<B> for A` where
+ // A and B have 'matching' fields. This rules out the following
+ // two types of blanket impls:
+ //
+ // `impl<T> CoerceUnsized<T> for SomeType`
+ // `impl<T> CoerceUnsized<SomeType> for T`
+ //
+ // Both of these trigger a special `CoerceUnsized`-related error (E0376)
+ //
+ // We can take advantage of this fact to avoid performing unnecessary work.
+ // If either `source` or `target` is a type variable, then any applicable impl
+ // would need to be generic over the self-type (`impl<T> CoerceUnsized<SomeType> for T`)
+ // or generic over the `CoerceUnsized` type parameter (`impl<T> CoerceUnsized<T> for
+ // SomeType`).
+ //
+ // However, these are exactly the kinds of impls which are forbidden by
+ // the compiler! Therefore, we can be sure that coercion will always fail
+ // when either the source or target type is a type variable. This allows us
+ // to skip performing any trait selection, and immediately bail out.
+ if from_ty.is_ty_var() {
+ return Err(TypeError);
+ }
+ if to_ty.is_ty_var() {
+ return Err(TypeError);
+ }
+
+ // Handle reborrows before trying to solve `Source: CoerceUnsized<Target>`.
+ let reborrow = match (from_ty.kind(Interner), to_ty.kind(Interner)) {
+ (TyKind::Ref(from_mt, _, from_inner), &TyKind::Ref(to_mt, _, _)) => {
+ coerce_mutabilities(*from_mt, to_mt)?;
+
+ let lt = static_lifetime();
+ Some((
+ Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() },
+ Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(to_mt)),
+ target: TyKind::Ref(to_mt, lt, from_inner.clone()).intern(Interner),
+ },
+ ))
+ }
+ (TyKind::Ref(from_mt, _, from_inner), &TyKind::Raw(to_mt, _)) => {
+ coerce_mutabilities(*from_mt, to_mt)?;
+
+ Some((
+ Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() },
+ Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::RawPtr(to_mt)),
+ target: TyKind::Raw(to_mt, from_inner.clone()).intern(Interner),
+ },
+ ))
+ }
+ _ => None,
+ };
+ let coerce_from =
+ reborrow.as_ref().map_or_else(|| from_ty.clone(), |(_, adj)| adj.target.clone());
+
+ let krate = self.trait_env.krate;
+ let coerce_unsized_trait =
+ match self.db.lang_item(krate, SmolStr::new_inline("coerce_unsized")) {
+ Some(LangItemTarget::TraitId(trait_)) => trait_,
+ _ => return Err(TypeError),
+ };
+
+ let coerce_unsized_tref = {
+ let b = TyBuilder::trait_ref(self.db, coerce_unsized_trait);
+ if b.remaining() != 2 {
+ // The CoerceUnsized trait should have two generic params: Self and T.
+ return Err(TypeError);
+ }
+ b.push(coerce_from).push(to_ty.clone()).build()
+ };
+
+ let goal: InEnvironment<DomainGoal> =
+ InEnvironment::new(&self.trait_env.env, coerce_unsized_tref.cast(Interner));
+
+ let canonicalized = self.canonicalize(goal);
+
+ // FIXME: rustc's coerce_unsized is more specialized -- it only tries to
+ // solve `CoerceUnsized` and `Unsize` goals at this point and leaves the
+ // rest for later. Also, there's some logic about sized type variables.
+ // Need to find out in what cases this is necessary
+ let solution = self
+ .db
+ .trait_solve(krate, canonicalized.value.clone().cast(Interner))
+ .ok_or(TypeError)?;
+
+ match solution {
+ Solution::Unique(v) => {
+ canonicalized.apply_solution(
+ self,
+ Canonical {
+ binders: v.binders,
+ // FIXME handle constraints
+ value: v.value.subst,
+ },
+ );
+ }
+ Solution::Ambig(Guidance::Definite(subst)) => {
+ // FIXME need to record an obligation here
+ canonicalized.apply_solution(self, subst)
+ }
+ // FIXME actually we maybe should also accept unknown guidance here
+ _ => return Err(TypeError),
+ };
+ let unsize =
+ Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), target: to_ty.clone() };
+ let adjustments = match reborrow {
+ None => vec![unsize],
+ Some((deref, autoref)) => vec![deref, autoref, unsize],
+ };
+ success(adjustments, to_ty.clone(), vec![])
+ }
+}
+
+fn coerce_closure_fn_ty(closure_substs: &Substitution, safety: chalk_ir::Safety) -> Ty {
+ let closure_sig = closure_substs.at(Interner, 0).assert_ty_ref(Interner).clone();
+ match closure_sig.kind(Interner) {
+ TyKind::Function(fn_ty) => TyKind::Function(FnPointer {
+ num_binders: fn_ty.num_binders,
+ sig: FnSig { safety, ..fn_ty.sig },
+ substitution: fn_ty.substitution.clone(),
+ })
+ .intern(Interner),
+ _ => TyKind::Error.intern(Interner),
+ }
+}
+
+fn safe_to_unsafe_fn_ty(fn_ty: FnPointer) -> FnPointer {
+ FnPointer {
+ num_binders: fn_ty.num_binders,
+ sig: FnSig { safety: chalk_ir::Safety::Unsafe, ..fn_ty.sig },
+ substitution: fn_ty.substitution,
+ }
+}
+
+fn coerce_mutabilities(from: Mutability, to: Mutability) -> Result<(), TypeError> {
+ match (from, to) {
+ (Mutability::Mut, Mutability::Mut | Mutability::Not)
+ | (Mutability::Not, Mutability::Not) => Ok(()),
+ (Mutability::Not, Mutability::Mut) => Err(TypeError),
+ }
+}
+
+pub(super) fn auto_deref_adjust_steps(autoderef: &Autoderef<'_, '_>) -> Vec<Adjustment> {
+ let steps = autoderef.steps();
+ let targets =
+ steps.iter().skip(1).map(|(_, ty)| ty.clone()).chain(iter::once(autoderef.final_ty()));
+ steps
+ .iter()
+ .map(|(kind, _source)| match kind {
+ // We do not know what kind of deref we require at this point yet
+ AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)),
+ AutoderefKind::Builtin => None,
+ })
+ .zip(targets)
+ .map(|(autoderef, target)| Adjustment { kind: Adjust::Deref(autoderef), target })
+ .collect()
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
new file mode 100644
index 000000000..d164e64a8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
@@ -0,0 +1,1527 @@
+//! Type inference for expressions.
+
+use std::{
+ collections::hash_map::Entry,
+ iter::{repeat, repeat_with},
+ mem,
+};
+
+use chalk_ir::{
+ cast::Cast, fold::Shift, DebruijnIndex, GenericArgData, Mutability, TyVariableKind,
+};
+use hir_def::{
+ expr::{ArithOp, Array, BinaryOp, CmpOp, Expr, ExprId, Literal, Ordering, Statement, UnaryOp},
+ generics::TypeOrConstParamData,
+ path::{GenericArg, GenericArgs},
+ resolver::resolver_for_expr,
+ ConstParamId, FieldId, FunctionId, ItemContainerId, Lookup,
+};
+use hir_expand::name::{name, Name};
+use stdx::always;
+use syntax::ast::RangeOp;
+
+use crate::{
+ autoderef::{self, Autoderef},
+ consteval,
+ infer::coerce::CoerceMany,
+ lower::{
+ const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode,
+ },
+ mapping::{from_chalk, ToChalk},
+ method_resolution::{self, VisibleFromModule},
+ primitive::{self, UintTy},
+ static_lifetime, to_chalk_trait_id,
+ utils::{generics, Generics},
+ AdtId, Binders, CallableDefId, FnPointer, FnSig, FnSubst, Interner, Rawness, Scalar,
+ Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind,
+};
+
+use super::{
+ coerce::auto_deref_adjust_steps, find_breakable, BindingMode, BreakableContext, Diverges,
+ Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch,
+};
+
+impl<'a> InferenceContext<'a> {
+ pub(crate) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
+ let ty = self.infer_expr_inner(tgt_expr, expected);
+ if let Some(expected_ty) = expected.only_has_type(&mut self.table) {
+ let could_unify = self.unify(&ty, &expected_ty);
+ if !could_unify {
+ self.result.type_mismatches.insert(
+ tgt_expr.into(),
+ TypeMismatch { expected: expected_ty, actual: ty.clone() },
+ );
+ }
+ }
+ ty
+ }
+
+ /// Infer type of expression with possibly implicit coerce to the expected type.
+ /// Return the type after possible coercion.
+ pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
+ let ty = self.infer_expr_inner(expr, expected);
+ if let Some(target) = expected.only_has_type(&mut self.table) {
+ match self.coerce(Some(expr), &ty, &target) {
+ Ok(res) => res,
+ Err(_) => {
+ self.result.type_mismatches.insert(
+ expr.into(),
+ TypeMismatch { expected: target.clone(), actual: ty.clone() },
+ );
+ target
+ }
+ }
+ } else {
+ ty
+ }
+ }
+
+ fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
+ self.db.unwind_if_cancelled();
+
+ let ty = match &self.body[tgt_expr] {
+ Expr::Missing => self.err_ty(),
+ &Expr::If { condition, then_branch, else_branch } => {
+ self.infer_expr(
+ condition,
+ &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
+ );
+
+ let condition_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
+ let mut both_arms_diverge = Diverges::Always;
+
+ let result_ty = self.table.new_type_var();
+ let then_ty = self.infer_expr_inner(then_branch, expected);
+ both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe);
+ let mut coerce = CoerceMany::new(result_ty);
+ coerce.coerce(self, Some(then_branch), &then_ty);
+ let else_ty = match else_branch {
+ Some(else_branch) => self.infer_expr_inner(else_branch, expected),
+ None => TyBuilder::unit(),
+ };
+ both_arms_diverge &= self.diverges;
+ // FIXME: create a synthetic `else {}` so we have something to refer to here instead of None?
+ coerce.coerce(self, else_branch, &else_ty);
+
+ self.diverges = condition_diverges | both_arms_diverge;
+
+ coerce.complete()
+ }
+ &Expr::Let { pat, expr } => {
+ let input_ty = self.infer_expr(expr, &Expectation::none());
+ self.infer_pat(pat, &input_ty, BindingMode::default());
+ TyKind::Scalar(Scalar::Bool).intern(Interner)
+ }
+ Expr::Block { statements, tail, label, id: _ } => {
+ let old_resolver = mem::replace(
+ &mut self.resolver,
+ resolver_for_expr(self.db.upcast(), self.owner, tgt_expr),
+ );
+ let ty = match label {
+ Some(_) => {
+ let break_ty = self.table.new_type_var();
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ coerce: CoerceMany::new(break_ty.clone()),
+ label: label.map(|label| self.body[label].name.clone()),
+ });
+ let ty = self.infer_block(
+ tgt_expr,
+ statements,
+ *tail,
+ &Expectation::has_type(break_ty),
+ );
+ let ctxt = self.breakables.pop().expect("breakable stack broken");
+ if ctxt.may_break {
+ ctxt.coerce.complete()
+ } else {
+ ty
+ }
+ }
+ None => self.infer_block(tgt_expr, statements, *tail, expected),
+ };
+ self.resolver = old_resolver;
+ ty
+ }
+ Expr::Unsafe { body } | Expr::Const { body } => self.infer_expr(*body, expected),
+ Expr::TryBlock { body } => {
+ let _inner = self.infer_expr(*body, expected);
+ // FIXME should be std::result::Result<{inner}, _>
+ self.err_ty()
+ }
+ Expr::Async { body } => {
+ let ret_ty = self.table.new_type_var();
+ let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
+ let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
+
+ let inner_ty = self.infer_expr_coerce(*body, &Expectation::has_type(ret_ty));
+
+ self.diverges = prev_diverges;
+ self.return_ty = prev_ret_ty;
+
+ // Use the first type parameter as the output type of future.
+ // existential type AsyncBlockImplTrait<InnerType>: Future<Output = InnerType>
+ let impl_trait_id = crate::ImplTraitId::AsyncBlockTypeImplTrait(self.owner, *body);
+ let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
+ TyKind::OpaqueType(opaque_ty_id, Substitution::from1(Interner, inner_ty))
+ .intern(Interner)
+ }
+ Expr::Loop { body, label } => {
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ coerce: CoerceMany::new(self.table.new_type_var()),
+ label: label.map(|label| self.body[label].name.clone()),
+ });
+ self.infer_expr(*body, &Expectation::has_type(TyBuilder::unit()));
+
+ let ctxt = self.breakables.pop().expect("breakable stack broken");
+
+ if ctxt.may_break {
+ self.diverges = Diverges::Maybe;
+ ctxt.coerce.complete()
+ } else {
+ TyKind::Never.intern(Interner)
+ }
+ }
+ Expr::While { condition, body, label } => {
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ coerce: CoerceMany::new(self.err_ty()),
+ label: label.map(|label| self.body[label].name.clone()),
+ });
+ self.infer_expr(
+ *condition,
+ &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
+ );
+ self.infer_expr(*body, &Expectation::has_type(TyBuilder::unit()));
+ let _ctxt = self.breakables.pop().expect("breakable stack broken");
+ // the body may not run, so it diverging doesn't mean we diverge
+ self.diverges = Diverges::Maybe;
+ TyBuilder::unit()
+ }
+ Expr::For { iterable, body, pat, label } => {
+ let iterable_ty = self.infer_expr(*iterable, &Expectation::none());
+
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ coerce: CoerceMany::new(self.err_ty()),
+ label: label.map(|label| self.body[label].name.clone()),
+ });
+ let pat_ty =
+ self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item());
+
+ self.infer_pat(*pat, &pat_ty, BindingMode::default());
+
+ self.infer_expr(*body, &Expectation::has_type(TyBuilder::unit()));
+ let _ctxt = self.breakables.pop().expect("breakable stack broken");
+ // the body may not run, so it diverging doesn't mean we diverge
+ self.diverges = Diverges::Maybe;
+ TyBuilder::unit()
+ }
+ Expr::Closure { body, args, ret_type, arg_types } => {
+ assert_eq!(args.len(), arg_types.len());
+
+ let mut sig_tys = Vec::new();
+
+ // collect explicitly written argument types
+ for arg_type in arg_types.iter() {
+ let arg_ty = match arg_type {
+ Some(type_ref) => self.make_ty(type_ref),
+ None => self.table.new_type_var(),
+ };
+ sig_tys.push(arg_ty);
+ }
+
+ // add return type
+ let ret_ty = match ret_type {
+ Some(type_ref) => self.make_ty(type_ref),
+ None => self.table.new_type_var(),
+ };
+ sig_tys.push(ret_ty.clone());
+ let sig_ty = TyKind::Function(FnPointer {
+ num_binders: 0,
+ sig: FnSig { abi: (), safety: chalk_ir::Safety::Safe, variadic: false },
+ substitution: FnSubst(
+ Substitution::from_iter(Interner, sig_tys.clone()).shifted_in(Interner),
+ ),
+ })
+ .intern(Interner);
+ let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into();
+ let closure_ty =
+ TyKind::Closure(closure_id, Substitution::from1(Interner, sig_ty.clone()))
+ .intern(Interner);
+
+ // Eagerly try to relate the closure type with the expected
+ // type, otherwise we often won't have enough information to
+ // infer the body.
+ self.deduce_closure_type_from_expectations(
+ tgt_expr,
+ &closure_ty,
+ &sig_ty,
+ expected,
+ );
+
+ // Now go through the argument patterns
+ for (arg_pat, arg_ty) in args.iter().zip(sig_tys) {
+ self.infer_pat(*arg_pat, &arg_ty, BindingMode::default());
+ }
+
+ let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
+ let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
+
+ self.infer_expr_coerce(*body, &Expectation::has_type(ret_ty));
+
+ self.diverges = prev_diverges;
+ self.return_ty = prev_ret_ty;
+
+ closure_ty
+ }
+ Expr::Call { callee, args, .. } => {
+ let callee_ty = self.infer_expr(*callee, &Expectation::none());
+ let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone());
+ let mut res = None;
+ let mut derefed_callee = callee_ty.clone();
+ // manual loop to be able to access `derefs.table`
+ while let Some((callee_deref_ty, _)) = derefs.next() {
+ res = derefs.table.callable_sig(&callee_deref_ty, args.len());
+ if res.is_some() {
+ derefed_callee = callee_deref_ty;
+ break;
+ }
+ }
+ // if the function is unresolved, we use is_varargs=true to
+ // suppress the arg count diagnostic here
+ let is_varargs =
+ derefed_callee.callable_sig(self.db).map_or(false, |sig| sig.is_varargs)
+ || res.is_none();
+ let (param_tys, ret_ty) = match res {
+ Some(res) => {
+ let adjustments = auto_deref_adjust_steps(&derefs);
+ self.write_expr_adj(*callee, adjustments);
+ res
+ }
+ None => (Vec::new(), self.err_ty()), // FIXME diagnostic
+ };
+ let indices_to_skip = self.check_legacy_const_generics(derefed_callee, args);
+ self.register_obligations_for_call(&callee_ty);
+
+ let expected_inputs = self.expected_inputs_for_expected_output(
+ expected,
+ ret_ty.clone(),
+ param_tys.clone(),
+ );
+
+ self.check_call_arguments(
+ tgt_expr,
+ args,
+ &expected_inputs,
+ &param_tys,
+ &indices_to_skip,
+ is_varargs,
+ );
+ self.normalize_associated_types_in(ret_ty)
+ }
+ Expr::MethodCall { receiver, args, method_name, generic_args } => self
+ .infer_method_call(
+ tgt_expr,
+ *receiver,
+ args,
+ method_name,
+ generic_args.as_deref(),
+ expected,
+ ),
+ Expr::Match { expr, arms } => {
+ let input_ty = self.infer_expr(*expr, &Expectation::none());
+
+ let expected = expected.adjust_for_branches(&mut self.table);
+
+ let result_ty = if arms.is_empty() {
+ TyKind::Never.intern(Interner)
+ } else {
+ match &expected {
+ Expectation::HasType(ty) => ty.clone(),
+ _ => self.table.new_type_var(),
+ }
+ };
+ let mut coerce = CoerceMany::new(result_ty);
+
+ let matchee_diverges = self.diverges;
+ let mut all_arms_diverge = Diverges::Always;
+
+ for arm in arms.iter() {
+ self.diverges = Diverges::Maybe;
+ let _pat_ty = self.infer_pat(arm.pat, &input_ty, BindingMode::default());
+ if let Some(guard_expr) = arm.guard {
+ self.infer_expr(
+ guard_expr,
+ &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
+ );
+ }
+
+ let arm_ty = self.infer_expr_inner(arm.expr, &expected);
+ all_arms_diverge &= self.diverges;
+ coerce.coerce(self, Some(arm.expr), &arm_ty);
+ }
+
+ self.diverges = matchee_diverges | all_arms_diverge;
+
+ coerce.complete()
+ }
+ Expr::Path(p) => {
+ // FIXME this could be more efficient...
+ let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
+ self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or_else(|| self.err_ty())
+ }
+ Expr::Continue { .. } => TyKind::Never.intern(Interner),
+ Expr::Break { expr, label } => {
+ let mut coerce = match find_breakable(&mut self.breakables, label.as_ref()) {
+ Some(ctxt) => {
+ // avoiding the borrowck
+ mem::replace(
+ &mut ctxt.coerce,
+ CoerceMany::new(self.result.standard_types.unknown.clone()),
+ )
+ }
+ None => CoerceMany::new(self.result.standard_types.unknown.clone()),
+ };
+
+ let val_ty = if let Some(expr) = *expr {
+ self.infer_expr(expr, &Expectation::none())
+ } else {
+ TyBuilder::unit()
+ };
+
+ // FIXME: create a synthetic `()` during lowering so we have something to refer to here?
+ coerce.coerce(self, *expr, &val_ty);
+
+ if let Some(ctxt) = find_breakable(&mut self.breakables, label.as_ref()) {
+ ctxt.coerce = coerce;
+ ctxt.may_break = true;
+ } else {
+ self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop {
+ expr: tgt_expr,
+ });
+ };
+
+ TyKind::Never.intern(Interner)
+ }
+ Expr::Return { expr } => {
+ if let Some(expr) = expr {
+ self.infer_expr_coerce(*expr, &Expectation::has_type(self.return_ty.clone()));
+ } else {
+ let unit = TyBuilder::unit();
+ let _ = self.coerce(Some(tgt_expr), &unit, &self.return_ty.clone());
+ }
+ TyKind::Never.intern(Interner)
+ }
+ Expr::Yield { expr } => {
+ // FIXME: track yield type for coercion
+ if let Some(expr) = expr {
+ self.infer_expr(*expr, &Expectation::none());
+ }
+ TyKind::Never.intern(Interner)
+ }
+ Expr::RecordLit { path, fields, spread, .. } => {
+ let (ty, def_id) = self.resolve_variant(path.as_deref(), false);
+ if let Some(variant) = def_id {
+ self.write_variant_resolution(tgt_expr.into(), variant);
+ }
+
+ if let Some(t) = expected.only_has_type(&mut self.table) {
+ self.unify(&ty, &t);
+ }
+
+ let substs = ty
+ .as_adt()
+ .map(|(_, s)| s.clone())
+ .unwrap_or_else(|| Substitution::empty(Interner));
+ let field_types = def_id.map(|it| self.db.field_types(it)).unwrap_or_default();
+ let variant_data = def_id.map(|it| it.variant_data(self.db.upcast()));
+ for field in fields.iter() {
+ let field_def =
+ variant_data.as_ref().and_then(|it| match it.field(&field.name) {
+ Some(local_id) => Some(FieldId { parent: def_id.unwrap(), local_id }),
+ None => {
+ self.push_diagnostic(InferenceDiagnostic::NoSuchField {
+ expr: field.expr,
+ });
+ None
+ }
+ });
+ let field_ty = field_def.map_or(self.err_ty(), |it| {
+ field_types[it.local_id].clone().substitute(Interner, &substs)
+ });
+ self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty));
+ }
+ if let Some(expr) = spread {
+ self.infer_expr(*expr, &Expectation::has_type(ty.clone()));
+ }
+ ty
+ }
+ Expr::Field { expr, name } => {
+ let receiver_ty = self.infer_expr_inner(*expr, &Expectation::none());
+
+ let mut autoderef = Autoderef::new(&mut self.table, receiver_ty);
+ let ty = autoderef.by_ref().find_map(|(derefed_ty, _)| {
+ let (field_id, parameters) = match derefed_ty.kind(Interner) {
+ TyKind::Tuple(_, substs) => {
+ return name.as_tuple_index().and_then(|idx| {
+ substs
+ .as_slice(Interner)
+ .get(idx)
+ .map(|a| a.assert_ty_ref(Interner))
+ .cloned()
+ });
+ }
+ TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), parameters) => {
+ let local_id = self.db.struct_data(*s).variant_data.field(name)?;
+ let field = FieldId { parent: (*s).into(), local_id };
+ (field, parameters.clone())
+ }
+ TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), parameters) => {
+ let local_id = self.db.union_data(*u).variant_data.field(name)?;
+ let field = FieldId { parent: (*u).into(), local_id };
+ (field, parameters.clone())
+ }
+ _ => return None,
+ };
+ let is_visible = self.db.field_visibilities(field_id.parent)[field_id.local_id]
+ .is_visible_from(self.db.upcast(), self.resolver.module());
+ if !is_visible {
+ // Write down the first field resolution even if it is not visible
+ // This aids IDE features for private fields like goto def and in
+ // case of autoderef finding an applicable field, this will be
+ // overwritten in a following cycle
+ if let Entry::Vacant(entry) = self.result.field_resolutions.entry(tgt_expr)
+ {
+ entry.insert(field_id);
+ }
+ return None;
+ }
+ // can't have `write_field_resolution` here because `self.table` is borrowed :(
+ self.result.field_resolutions.insert(tgt_expr, field_id);
+ let ty = self.db.field_types(field_id.parent)[field_id.local_id]
+ .clone()
+ .substitute(Interner, &parameters);
+ Some(ty)
+ });
+ let ty = match ty {
+ Some(ty) => {
+ let adjustments = auto_deref_adjust_steps(&autoderef);
+ self.write_expr_adj(*expr, adjustments);
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+ ty
+ }
+ _ => self.err_ty(),
+ };
+ ty
+ }
+ Expr::Await { expr } => {
+ let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ self.resolve_associated_type(inner_ty, self.resolve_future_future_output())
+ }
+ Expr::Try { expr } => {
+ let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok())
+ }
+ Expr::Cast { expr, type_ref } => {
+ // FIXME: propagate the "castable to" expectation (and find a test case that shows this is necessary)
+ let _inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ let cast_ty = self.make_ty(type_ref);
+ // FIXME check the cast...
+ cast_ty
+ }
+ Expr::Ref { expr, rawness, mutability } => {
+ let mutability = lower_to_chalk_mutability(*mutability);
+ let expectation = if let Some((exp_inner, exp_rawness, exp_mutability)) = expected
+ .only_has_type(&mut self.table)
+ .as_ref()
+ .and_then(|t| t.as_reference_or_ptr())
+ {
+ if exp_mutability == Mutability::Mut && mutability == Mutability::Not {
+ // FIXME: record type error - expected mut reference but found shared ref,
+ // which cannot be coerced
+ }
+ if exp_rawness == Rawness::Ref && *rawness == Rawness::RawPtr {
+ // FIXME: record type error - expected reference but found ptr,
+ // which cannot be coerced
+ }
+ Expectation::rvalue_hint(&mut self.table, Ty::clone(exp_inner))
+ } else {
+ Expectation::none()
+ };
+ let inner_ty = self.infer_expr_inner(*expr, &expectation);
+ match rawness {
+ Rawness::RawPtr => TyKind::Raw(mutability, inner_ty),
+ Rawness::Ref => TyKind::Ref(mutability, static_lifetime(), inner_ty),
+ }
+ .intern(Interner)
+ }
+ &Expr::Box { expr } => self.infer_expr_box(expr, expected),
+ Expr::UnaryOp { expr, op } => {
+ let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ let inner_ty = self.resolve_ty_shallow(&inner_ty);
+ match op {
+ UnaryOp::Deref => {
+ autoderef::deref(&mut self.table, inner_ty).unwrap_or_else(|| self.err_ty())
+ }
+ UnaryOp::Neg => {
+ match inner_ty.kind(Interner) {
+ // Fast path for builtins
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_) | Scalar::Float(_))
+ | TyKind::InferenceVar(
+ _,
+ TyVariableKind::Integer | TyVariableKind::Float,
+ ) => inner_ty,
+ // Otherwise we resolve via the std::ops::Neg trait
+ _ => self
+ .resolve_associated_type(inner_ty, self.resolve_ops_neg_output()),
+ }
+ }
+ UnaryOp::Not => {
+ match inner_ty.kind(Interner) {
+ // Fast path for builtins
+ TyKind::Scalar(Scalar::Bool | Scalar::Int(_) | Scalar::Uint(_))
+ | TyKind::InferenceVar(_, TyVariableKind::Integer) => inner_ty,
+ // Otherwise we resolve via the std::ops::Not trait
+ _ => self
+ .resolve_associated_type(inner_ty, self.resolve_ops_not_output()),
+ }
+ }
+ }
+ }
+ Expr::BinaryOp { lhs, rhs, op } => match op {
+ Some(BinaryOp::Assignment { op: None }) => {
+ let lhs = *lhs;
+ let is_ordinary = match &self.body[lhs] {
+ Expr::Array(_)
+ | Expr::RecordLit { .. }
+ | Expr::Tuple { .. }
+ | Expr::Underscore => false,
+ Expr::Call { callee, .. } => !matches!(&self.body[*callee], Expr::Path(_)),
+ _ => true,
+ };
+
+ // In ordinary (non-destructuring) assignments, the type of
+ // `lhs` must be inferred first so that the ADT fields
+ // instantiations in RHS can be coerced to it. Note that this
+ // cannot happen in destructuring assignments because of how
+ // they are desugared.
+ if is_ordinary {
+ let lhs_ty = self.infer_expr(lhs, &Expectation::none());
+ self.infer_expr_coerce(*rhs, &Expectation::has_type(lhs_ty));
+ } else {
+ let rhs_ty = self.infer_expr(*rhs, &Expectation::none());
+ self.infer_assignee_expr(lhs, &rhs_ty);
+ }
+ self.result.standard_types.unit.clone()
+ }
+ Some(BinaryOp::LogicOp(_)) => {
+ let bool_ty = self.result.standard_types.bool_.clone();
+ self.infer_expr_coerce(*lhs, &Expectation::HasType(bool_ty.clone()));
+ let lhs_diverges = self.diverges;
+ self.infer_expr_coerce(*rhs, &Expectation::HasType(bool_ty.clone()));
+ // Depending on the LHS' value, the RHS can never execute.
+ self.diverges = lhs_diverges;
+ bool_ty
+ }
+ Some(op) => self.infer_overloadable_binop(*lhs, *op, *rhs, tgt_expr),
+ _ => self.err_ty(),
+ },
+ Expr::Range { lhs, rhs, range_type } => {
+ let lhs_ty = lhs.map(|e| self.infer_expr_inner(e, &Expectation::none()));
+ let rhs_expect = lhs_ty
+ .as_ref()
+ .map_or_else(Expectation::none, |ty| Expectation::has_type(ty.clone()));
+ let rhs_ty = rhs.map(|e| self.infer_expr(e, &rhs_expect));
+ match (range_type, lhs_ty, rhs_ty) {
+ (RangeOp::Exclusive, None, None) => match self.resolve_range_full() {
+ Some(adt) => TyBuilder::adt(self.db, adt).build(),
+ None => self.err_ty(),
+ },
+ (RangeOp::Exclusive, None, Some(ty)) => match self.resolve_range_to() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ },
+ (RangeOp::Inclusive, None, Some(ty)) => {
+ match self.resolve_range_to_inclusive() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ }
+ }
+ (RangeOp::Exclusive, Some(_), Some(ty)) => match self.resolve_range() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ },
+ (RangeOp::Inclusive, Some(_), Some(ty)) => {
+ match self.resolve_range_inclusive() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ }
+ }
+ (RangeOp::Exclusive, Some(ty), None) => match self.resolve_range_from() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ },
+ (RangeOp::Inclusive, _, None) => self.err_ty(),
+ }
+ }
+ Expr::Index { base, index } => {
+ let base_ty = self.infer_expr_inner(*base, &Expectation::none());
+ let index_ty = self.infer_expr(*index, &Expectation::none());
+
+ if let Some(index_trait) = self.resolve_ops_index() {
+ let canonicalized = self.canonicalize(base_ty.clone());
+ let receiver_adjustments = method_resolution::resolve_indexing_op(
+ self.db,
+ self.trait_env.clone(),
+ canonicalized.value,
+ index_trait,
+ );
+ let (self_ty, adj) = receiver_adjustments
+ .map_or((self.err_ty(), Vec::new()), |adj| {
+ adj.apply(&mut self.table, base_ty)
+ });
+ self.write_expr_adj(*base, adj);
+ self.resolve_associated_type_with_params(
+ self_ty,
+ self.resolve_ops_index_output(),
+ &[GenericArgData::Ty(index_ty).intern(Interner)],
+ )
+ } else {
+ self.err_ty()
+ }
+ }
+ Expr::Tuple { exprs, .. } => {
+ let mut tys = match expected
+ .only_has_type(&mut self.table)
+ .as_ref()
+ .map(|t| t.kind(Interner))
+ {
+ Some(TyKind::Tuple(_, substs)) => substs
+ .iter(Interner)
+ .map(|a| a.assert_ty_ref(Interner).clone())
+ .chain(repeat_with(|| self.table.new_type_var()))
+ .take(exprs.len())
+ .collect::<Vec<_>>(),
+ _ => (0..exprs.len()).map(|_| self.table.new_type_var()).collect(),
+ };
+
+ for (expr, ty) in exprs.iter().zip(tys.iter_mut()) {
+ self.infer_expr_coerce(*expr, &Expectation::has_type(ty.clone()));
+ }
+
+ TyKind::Tuple(tys.len(), Substitution::from_iter(Interner, tys)).intern(Interner)
+ }
+ Expr::Array(array) => {
+ let elem_ty =
+ match expected.to_option(&mut self.table).as_ref().map(|t| t.kind(Interner)) {
+ Some(TyKind::Array(st, _) | TyKind::Slice(st)) => st.clone(),
+ _ => self.table.new_type_var(),
+ };
+ let mut coerce = CoerceMany::new(elem_ty.clone());
+
+ let expected = Expectation::has_type(elem_ty.clone());
+ let len = match array {
+ Array::ElementList { elements, .. } => {
+ for &expr in elements.iter() {
+ let cur_elem_ty = self.infer_expr_inner(expr, &expected);
+ coerce.coerce(self, Some(expr), &cur_elem_ty);
+ }
+ consteval::usize_const(Some(elements.len() as u128))
+ }
+ &Array::Repeat { initializer, repeat } => {
+ self.infer_expr_coerce(initializer, &Expectation::has_type(elem_ty));
+ self.infer_expr(
+ repeat,
+ &Expectation::has_type(
+ TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(Interner),
+ ),
+ );
+
+ if let Some(g_def) = self.owner.as_generic_def_id() {
+ let generics = generics(self.db.upcast(), g_def);
+ consteval::eval_to_const(
+ repeat,
+ ParamLoweringMode::Placeholder,
+ self,
+ || generics,
+ DebruijnIndex::INNERMOST,
+ )
+ } else {
+ consteval::usize_const(None)
+ }
+ }
+ };
+
+ TyKind::Array(coerce.complete(), len).intern(Interner)
+ }
+ Expr::Literal(lit) => match lit {
+ Literal::Bool(..) => TyKind::Scalar(Scalar::Bool).intern(Interner),
+ Literal::String(..) => {
+ TyKind::Ref(Mutability::Not, static_lifetime(), TyKind::Str.intern(Interner))
+ .intern(Interner)
+ }
+ Literal::ByteString(bs) => {
+ let byte_type = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner);
+
+ let len = consteval::usize_const(Some(bs.len() as u128));
+
+ let array_type = TyKind::Array(byte_type, len).intern(Interner);
+ TyKind::Ref(Mutability::Not, static_lifetime(), array_type).intern(Interner)
+ }
+ Literal::Char(..) => TyKind::Scalar(Scalar::Char).intern(Interner),
+ Literal::Int(_v, ty) => match ty {
+ Some(int_ty) => {
+ TyKind::Scalar(Scalar::Int(primitive::int_ty_from_builtin(*int_ty)))
+ .intern(Interner)
+ }
+ None => self.table.new_integer_var(),
+ },
+ Literal::Uint(_v, ty) => match ty {
+ Some(int_ty) => {
+ TyKind::Scalar(Scalar::Uint(primitive::uint_ty_from_builtin(*int_ty)))
+ .intern(Interner)
+ }
+ None => self.table.new_integer_var(),
+ },
+ Literal::Float(_v, ty) => match ty {
+ Some(float_ty) => {
+ TyKind::Scalar(Scalar::Float(primitive::float_ty_from_builtin(*float_ty)))
+ .intern(Interner)
+ }
+ None => self.table.new_float_var(),
+ },
+ },
+ Expr::MacroStmts { tail, statements } => {
+ self.infer_block(tgt_expr, statements, *tail, expected)
+ }
+ Expr::Underscore => {
+ // Underscore expressions may only appear in assignee expressions,
+ // which are handled by `infer_assignee_expr()`, so any underscore
+ // expression reaching this branch is an error.
+ self.err_ty()
+ }
+ };
+ // use a new type variable if we got unknown here
+ let ty = self.insert_type_vars_shallow(ty);
+ self.write_expr_ty(tgt_expr, ty.clone());
+ if self.resolve_ty_shallow(&ty).is_never() {
+ // Any expression that produces a value of type `!` must have diverged
+ self.diverges = Diverges::Always;
+ }
+ ty
+ }
+
+ fn infer_expr_box(&mut self, inner_expr: ExprId, expected: &Expectation) -> Ty {
+ if let Some(box_id) = self.resolve_boxed_box() {
+ let table = &mut self.table;
+ let inner_exp = expected
+ .to_option(table)
+ .as_ref()
+ .map(|e| e.as_adt())
+ .flatten()
+ .filter(|(e_adt, _)| e_adt == &box_id)
+ .map(|(_, subts)| {
+ let g = subts.at(Interner, 0);
+ Expectation::rvalue_hint(table, Ty::clone(g.assert_ty_ref(Interner)))
+ })
+ .unwrap_or_else(Expectation::none);
+
+ let inner_ty = self.infer_expr_inner(inner_expr, &inner_exp);
+ TyBuilder::adt(self.db, box_id)
+ .push(inner_ty)
+ .fill_with_defaults(self.db, || self.table.new_type_var())
+ .build()
+ } else {
+ self.err_ty()
+ }
+ }
+
+ pub(super) fn infer_assignee_expr(&mut self, lhs: ExprId, rhs_ty: &Ty) -> Ty {
+ let is_rest_expr = |expr| {
+ matches!(
+ &self.body[expr],
+ Expr::Range { lhs: None, rhs: None, range_type: RangeOp::Exclusive },
+ )
+ };
+
+ let rhs_ty = self.resolve_ty_shallow(rhs_ty);
+
+ let ty = match &self.body[lhs] {
+ Expr::Tuple { exprs, .. } => {
+ // We don't consider multiple ellipses. This is analogous to
+ // `hir_def::body::lower::ExprCollector::collect_tuple_pat()`.
+ let ellipsis = exprs.iter().position(|e| is_rest_expr(*e));
+ let exprs: Vec<_> = exprs.iter().filter(|e| !is_rest_expr(**e)).copied().collect();
+
+ self.infer_tuple_pat_like(&rhs_ty, (), ellipsis, &exprs)
+ }
+ Expr::Call { callee, args, .. } => {
+ // Tuple structs
+ let path = match &self.body[*callee] {
+ Expr::Path(path) => Some(path),
+ _ => None,
+ };
+
+ // We don't consider multiple ellipses. This is analogous to
+ // `hir_def::body::lower::ExprCollector::collect_tuple_pat()`.
+ let ellipsis = args.iter().position(|e| is_rest_expr(*e));
+ let args: Vec<_> = args.iter().filter(|e| !is_rest_expr(**e)).copied().collect();
+
+ self.infer_tuple_struct_pat_like(path, &rhs_ty, (), lhs, ellipsis, &args)
+ }
+ Expr::Array(Array::ElementList { elements, .. }) => {
+ let elem_ty = match rhs_ty.kind(Interner) {
+ TyKind::Array(st, _) => st.clone(),
+ _ => self.err_ty(),
+ };
+
+ // There's no need to handle `..` as it cannot be bound.
+ let sub_exprs = elements.iter().filter(|e| !is_rest_expr(**e));
+
+ for e in sub_exprs {
+ self.infer_assignee_expr(*e, &elem_ty);
+ }
+
+ match rhs_ty.kind(Interner) {
+ TyKind::Array(_, _) => rhs_ty.clone(),
+ // Even when `rhs_ty` is not an array type, this assignee
+ // expression is inferred to be an array (of unknown element
+ // type and length). This should not be just an error type,
+ // because we are to compute the unifiability of this type and
+ // `rhs_ty` in the end of this function to issue type mismatches.
+ _ => TyKind::Array(self.err_ty(), crate::consteval::usize_const(None))
+ .intern(Interner),
+ }
+ }
+ Expr::RecordLit { path, fields, .. } => {
+ let subs = fields.iter().map(|f| (f.name.clone(), f.expr));
+
+ self.infer_record_pat_like(path.as_deref(), &rhs_ty, (), lhs.into(), subs)
+ }
+ Expr::Underscore => rhs_ty.clone(),
+ _ => {
+ // `lhs` is a place expression, a unit struct, or an enum variant.
+ let lhs_ty = self.infer_expr(lhs, &Expectation::none());
+
+ // This is the only branch where this function may coerce any type.
+ // We are returning early to avoid the unifiability check below.
+ let lhs_ty = self.insert_type_vars_shallow(lhs_ty);
+ let ty = match self.coerce(None, &rhs_ty, &lhs_ty) {
+ Ok(ty) => ty,
+ Err(_) => {
+ self.result.type_mismatches.insert(
+ lhs.into(),
+ TypeMismatch { expected: rhs_ty.clone(), actual: lhs_ty.clone() },
+ );
+ // `rhs_ty` is returned so no further type mismatches are
+ // reported because of this mismatch.
+ rhs_ty
+ }
+ };
+ self.write_expr_ty(lhs, ty.clone());
+ return ty;
+ }
+ };
+
+ let ty = self.insert_type_vars_shallow(ty);
+ if !self.unify(&ty, &rhs_ty) {
+ self.result
+ .type_mismatches
+ .insert(lhs.into(), TypeMismatch { expected: rhs_ty.clone(), actual: ty.clone() });
+ }
+ self.write_expr_ty(lhs, ty.clone());
+ ty
+ }
+
+ fn infer_overloadable_binop(
+ &mut self,
+ lhs: ExprId,
+ op: BinaryOp,
+ rhs: ExprId,
+ tgt_expr: ExprId,
+ ) -> Ty {
+ let lhs_expectation = Expectation::none();
+ let lhs_ty = self.infer_expr(lhs, &lhs_expectation);
+ let rhs_ty = self.table.new_type_var();
+
+ let func = self.resolve_binop_method(op);
+ let func = match func {
+ Some(func) => func,
+ None => {
+ let rhs_ty = self.builtin_binary_op_rhs_expectation(op, lhs_ty.clone());
+ let rhs_ty = self.infer_expr_coerce(rhs, &Expectation::from_option(rhs_ty));
+ return self
+ .builtin_binary_op_return_ty(op, lhs_ty, rhs_ty)
+ .unwrap_or_else(|| self.err_ty());
+ }
+ };
+
+ let subst = TyBuilder::subst_for_def(self.db, func)
+ .push(lhs_ty.clone())
+ .push(rhs_ty.clone())
+ .build();
+ self.write_method_resolution(tgt_expr, func, subst.clone());
+
+ let method_ty = self.db.value_ty(func.into()).substitute(Interner, &subst);
+ self.register_obligations_for_call(&method_ty);
+
+ self.infer_expr_coerce(rhs, &Expectation::has_type(rhs_ty.clone()));
+
+ let ret_ty = match method_ty.callable_sig(self.db) {
+ Some(sig) => sig.ret().clone(),
+ None => self.err_ty(),
+ };
+
+ let ret_ty = self.normalize_associated_types_in(ret_ty);
+
+ // FIXME: record autoref adjustments
+
+ // use knowledge of built-in binary ops, which can sometimes help inference
+ if let Some(builtin_rhs) = self.builtin_binary_op_rhs_expectation(op, lhs_ty.clone()) {
+ self.unify(&builtin_rhs, &rhs_ty);
+ }
+ if let Some(builtin_ret) = self.builtin_binary_op_return_ty(op, lhs_ty, rhs_ty) {
+ self.unify(&builtin_ret, &ret_ty);
+ }
+
+ ret_ty
+ }
+
+ fn infer_block(
+ &mut self,
+ expr: ExprId,
+ statements: &[Statement],
+ tail: Option<ExprId>,
+ expected: &Expectation,
+ ) -> Ty {
+ for stmt in statements {
+ match stmt {
+ Statement::Let { pat, type_ref, initializer, else_branch } => {
+ let decl_ty = type_ref
+ .as_ref()
+ .map(|tr| self.make_ty(tr))
+ .unwrap_or_else(|| self.err_ty());
+
+ // Always use the declared type when specified
+ let mut ty = decl_ty.clone();
+
+ if let Some(expr) = initializer {
+ let actual_ty =
+ self.infer_expr_coerce(*expr, &Expectation::has_type(decl_ty.clone()));
+ if decl_ty.is_unknown() {
+ ty = actual_ty;
+ }
+ }
+
+ if let Some(expr) = else_branch {
+ self.infer_expr_coerce(
+ *expr,
+ &Expectation::has_type(Ty::new(Interner, TyKind::Never)),
+ );
+ }
+
+ self.infer_pat(*pat, &ty, BindingMode::default());
+ }
+ Statement::Expr { expr, .. } => {
+ self.infer_expr(*expr, &Expectation::none());
+ }
+ }
+ }
+
+ if let Some(expr) = tail {
+ self.infer_expr_coerce(expr, expected)
+ } else {
+ // Citing rustc: if there is no explicit tail expression,
+ // that is typically equivalent to a tail expression
+ // of `()` -- except if the block diverges. In that
+ // case, there is no value supplied from the tail
+ // expression (assuming there are no other breaks,
+ // this implies that the type of the block will be
+ // `!`).
+ if self.diverges.is_always() {
+ // we don't even make an attempt at coercion
+ self.table.new_maybe_never_var()
+ } else {
+ if let Some(t) = expected.only_has_type(&mut self.table) {
+ if self.coerce(Some(expr), &TyBuilder::unit(), &t).is_err() {
+ self.result.type_mismatches.insert(
+ expr.into(),
+ TypeMismatch { expected: t.clone(), actual: TyBuilder::unit() },
+ );
+ }
+ t
+ } else {
+ TyBuilder::unit()
+ }
+ }
+ }
+ }
+
+ fn infer_method_call(
+ &mut self,
+ tgt_expr: ExprId,
+ receiver: ExprId,
+ args: &[ExprId],
+ method_name: &Name,
+ generic_args: Option<&GenericArgs>,
+ expected: &Expectation,
+ ) -> Ty {
+ let receiver_ty = self.infer_expr(receiver, &Expectation::none());
+ let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
+
+ let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
+
+ let resolved = method_resolution::lookup_method(
+ &canonicalized_receiver.value,
+ self.db,
+ self.trait_env.clone(),
+ &traits_in_scope,
+ VisibleFromModule::Filter(self.resolver.module()),
+ method_name,
+ );
+ let (receiver_ty, method_ty, substs) = match resolved {
+ Some((adjust, func)) => {
+ let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
+ let generics = generics(self.db.upcast(), func.into());
+ let substs = self.substs_for_method_call(generics, generic_args);
+ self.write_expr_adj(receiver, adjustments);
+ self.write_method_resolution(tgt_expr, func, substs.clone());
+ (ty, self.db.value_ty(func.into()), substs)
+ }
+ None => (
+ receiver_ty,
+ Binders::empty(Interner, self.err_ty()),
+ Substitution::empty(Interner),
+ ),
+ };
+ let method_ty = method_ty.substitute(Interner, &substs);
+ self.register_obligations_for_call(&method_ty);
+ let (formal_receiver_ty, param_tys, ret_ty, is_varargs) =
+ match method_ty.callable_sig(self.db) {
+ Some(sig) => {
+ if !sig.params().is_empty() {
+ (
+ sig.params()[0].clone(),
+ sig.params()[1..].to_vec(),
+ sig.ret().clone(),
+ sig.is_varargs,
+ )
+ } else {
+ (self.err_ty(), Vec::new(), sig.ret().clone(), sig.is_varargs)
+ }
+ }
+ None => (self.err_ty(), Vec::new(), self.err_ty(), true),
+ };
+ self.unify(&formal_receiver_ty, &receiver_ty);
+
+ let expected_inputs =
+ self.expected_inputs_for_expected_output(expected, ret_ty.clone(), param_tys.clone());
+
+ self.check_call_arguments(tgt_expr, args, &expected_inputs, &param_tys, &[], is_varargs);
+ self.normalize_associated_types_in(ret_ty)
+ }
+
+ fn expected_inputs_for_expected_output(
+ &mut self,
+ expected_output: &Expectation,
+ output: Ty,
+ inputs: Vec<Ty>,
+ ) -> Vec<Ty> {
+ if let Some(expected_ty) = expected_output.to_option(&mut self.table) {
+ self.table.fudge_inference(|table| {
+ if table.try_unify(&expected_ty, &output).is_ok() {
+ table.resolve_with_fallback(inputs, &|var, kind, _, _| match kind {
+ chalk_ir::VariableKind::Ty(tk) => var.to_ty(Interner, tk).cast(Interner),
+ chalk_ir::VariableKind::Lifetime => {
+ var.to_lifetime(Interner).cast(Interner)
+ }
+ chalk_ir::VariableKind::Const(ty) => {
+ var.to_const(Interner, ty).cast(Interner)
+ }
+ })
+ } else {
+ Vec::new()
+ }
+ })
+ } else {
+ Vec::new()
+ }
+ }
+
+ fn check_call_arguments(
+ &mut self,
+ expr: ExprId,
+ args: &[ExprId],
+ expected_inputs: &[Ty],
+ param_tys: &[Ty],
+ skip_indices: &[u32],
+ is_varargs: bool,
+ ) {
+ if args.len() != param_tys.len() + skip_indices.len() && !is_varargs {
+ self.push_diagnostic(InferenceDiagnostic::MismatchedArgCount {
+ call_expr: expr,
+ expected: param_tys.len() + skip_indices.len(),
+ found: args.len(),
+ });
+ }
+
+ // Quoting https://github.com/rust-lang/rust/blob/6ef275e6c3cb1384ec78128eceeb4963ff788dca/src/librustc_typeck/check/mod.rs#L3325 --
+ // We do this in a pretty awful way: first we type-check any arguments
+ // that are not closures, then we type-check the closures. This is so
+ // that we have more information about the types of arguments when we
+ // type-check the functions. This isn't really the right way to do this.
+ for &check_closures in &[false, true] {
+ let mut skip_indices = skip_indices.into_iter().copied().fuse().peekable();
+ let param_iter = param_tys.iter().cloned().chain(repeat(self.err_ty()));
+ let expected_iter = expected_inputs
+ .iter()
+ .cloned()
+ .chain(param_iter.clone().skip(expected_inputs.len()));
+ for (idx, ((&arg, param_ty), expected_ty)) in
+ args.iter().zip(param_iter).zip(expected_iter).enumerate()
+ {
+ let is_closure = matches!(&self.body[arg], Expr::Closure { .. });
+ if is_closure != check_closures {
+ continue;
+ }
+
+ while skip_indices.peek().map_or(false, |i| *i < idx as u32) {
+ skip_indices.next();
+ }
+ if skip_indices.peek().copied() == Some(idx as u32) {
+ continue;
+ }
+
+ // the difference between param_ty and expected here is that
+ // expected is the parameter when the expected *return* type is
+ // taken into account. So in `let _: &[i32] = identity(&[1, 2])`
+ // the expected type is already `&[i32]`, whereas param_ty is
+ // still an unbound type variable. We don't always want to force
+ // the parameter to coerce to the expected type (for example in
+ // `coerce_unsize_expected_type_4`).
+ let param_ty = self.normalize_associated_types_in(param_ty);
+ let expected = Expectation::rvalue_hint(&mut self.table, expected_ty);
+ // infer with the expected type we have...
+ let ty = self.infer_expr_inner(arg, &expected);
+
+ // then coerce to either the expected type or just the formal parameter type
+ let coercion_target = if let Some(ty) = expected.only_has_type(&mut self.table) {
+ // if we are coercing to the expectation, unify with the
+ // formal parameter type to connect everything
+ self.unify(&ty, &param_ty);
+ ty
+ } else {
+ param_ty
+ };
+ if !coercion_target.is_unknown() {
+ if self.coerce(Some(arg), &ty, &coercion_target).is_err() {
+ self.result.type_mismatches.insert(
+ arg.into(),
+ TypeMismatch { expected: coercion_target, actual: ty.clone() },
+ );
+ }
+ }
+ }
+ }
+ }
+
+ fn substs_for_method_call(
+ &mut self,
+ def_generics: Generics,
+ generic_args: Option<&GenericArgs>,
+ ) -> Substitution {
+ let (parent_params, self_params, type_params, const_params, impl_trait_params) =
+ def_generics.provenance_split();
+ assert_eq!(self_params, 0); // method shouldn't have another Self param
+ let total_len = parent_params + type_params + const_params + impl_trait_params;
+ let mut substs = Vec::with_capacity(total_len);
+ // Parent arguments are unknown
+ for (id, param) in def_generics.iter_parent() {
+ match param {
+ TypeOrConstParamData::TypeParamData(_) => {
+ substs.push(GenericArgData::Ty(self.table.new_type_var()).intern(Interner));
+ }
+ TypeOrConstParamData::ConstParamData(_) => {
+ let ty = self.db.const_param_ty(ConstParamId::from_unchecked(id));
+ substs
+ .push(GenericArgData::Const(self.table.new_const_var(ty)).intern(Interner));
+ }
+ }
+ }
+ // handle provided arguments
+ if let Some(generic_args) = generic_args {
+ // if args are provided, it should be all of them, but we can't rely on that
+ for (arg, kind_id) in generic_args
+ .args
+ .iter()
+ .filter(|arg| !matches!(arg, GenericArg::Lifetime(_)))
+ .take(type_params + const_params)
+ .zip(def_generics.iter_id().skip(parent_params))
+ {
+ if let Some(g) = generic_arg_to_chalk(
+ self.db,
+ kind_id,
+ arg,
+ self,
+ |this, type_ref| this.make_ty(type_ref),
+ |this, c, ty| {
+ const_or_path_to_chalk(
+ this.db,
+ &this.resolver,
+ ty,
+ c,
+ ParamLoweringMode::Placeholder,
+ || generics(this.db.upcast(), (&this.resolver).generic_def().unwrap()),
+ DebruijnIndex::INNERMOST,
+ )
+ },
+ ) {
+ substs.push(g);
+ }
+ }
+ };
+ for (id, data) in def_generics.iter().skip(substs.len()) {
+ match data {
+ TypeOrConstParamData::TypeParamData(_) => {
+ substs.push(GenericArgData::Ty(self.table.new_type_var()).intern(Interner))
+ }
+ TypeOrConstParamData::ConstParamData(_) => {
+ substs.push(
+ GenericArgData::Const(self.table.new_const_var(
+ self.db.const_param_ty(ConstParamId::from_unchecked(id)),
+ ))
+ .intern(Interner),
+ )
+ }
+ }
+ }
+ assert_eq!(substs.len(), total_len);
+ Substitution::from_iter(Interner, substs)
+ }
+
+ fn register_obligations_for_call(&mut self, callable_ty: &Ty) {
+ let callable_ty = self.resolve_ty_shallow(callable_ty);
+ if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind(Interner) {
+ let def: CallableDefId = from_chalk(self.db, *fn_def);
+ let generic_predicates = self.db.generic_predicates(def.into());
+ for predicate in generic_predicates.iter() {
+ let (predicate, binders) = predicate
+ .clone()
+ .substitute(Interner, parameters)
+ .into_value_and_skipped_binders();
+ always!(binders.len(Interner) == 0); // quantified where clauses not yet handled
+ self.push_obligation(predicate.cast(Interner));
+ }
+ // add obligation for trait implementation, if this is a trait method
+ match def {
+ CallableDefId::FunctionId(f) => {
+ if let ItemContainerId::TraitId(trait_) = f.lookup(self.db.upcast()).container {
+ // construct a TraitRef
+ let substs = crate::subst_prefix(
+ &*parameters,
+ generics(self.db.upcast(), trait_.into()).len(),
+ );
+ self.push_obligation(
+ TraitRef { trait_id: to_chalk_trait_id(trait_), substitution: substs }
+ .cast(Interner),
+ );
+ }
+ }
+ CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {}
+ }
+ }
+ }
+
+ /// Returns the argument indices to skip.
+ fn check_legacy_const_generics(&mut self, callee: Ty, args: &[ExprId]) -> Box<[u32]> {
+ let (func, subst) = match callee.kind(Interner) {
+ TyKind::FnDef(fn_id, subst) => {
+ let callable = CallableDefId::from_chalk(self.db, *fn_id);
+ let func = match callable {
+ CallableDefId::FunctionId(f) => f,
+ _ => return Default::default(),
+ };
+ (func, subst)
+ }
+ _ => return Default::default(),
+ };
+
+ let data = self.db.function_data(func);
+ if data.legacy_const_generics_indices.is_empty() {
+ return Default::default();
+ }
+
+ // only use legacy const generics if the param count matches with them
+ if data.params.len() + data.legacy_const_generics_indices.len() != args.len() {
+ if args.len() <= data.params.len() {
+ return Default::default();
+ } else {
+ // there are more parameters than there should be without legacy
+ // const params; use them
+ let mut indices = data.legacy_const_generics_indices.clone();
+ indices.sort();
+ return indices;
+ }
+ }
+
+ // check legacy const parameters
+ for (subst_idx, arg_idx) in data.legacy_const_generics_indices.iter().copied().enumerate() {
+ let arg = match subst.at(Interner, subst_idx).constant(Interner) {
+ Some(c) => c,
+ None => continue, // not a const parameter?
+ };
+ if arg_idx >= args.len() as u32 {
+ continue;
+ }
+ let _ty = arg.data(Interner).ty.clone();
+ let expected = Expectation::none(); // FIXME use actual const ty, when that is lowered correctly
+ self.infer_expr(args[arg_idx as usize], &expected);
+ // FIXME: evaluate and unify with the const
+ }
+ let mut indices = data.legacy_const_generics_indices.clone();
+ indices.sort();
+ indices
+ }
+
+ fn builtin_binary_op_return_ty(&mut self, op: BinaryOp, lhs_ty: Ty, rhs_ty: Ty) -> Option<Ty> {
+ let lhs_ty = self.resolve_ty_shallow(&lhs_ty);
+ let rhs_ty = self.resolve_ty_shallow(&rhs_ty);
+ match op {
+ BinaryOp::LogicOp(_) | BinaryOp::CmpOp(_) => {
+ Some(TyKind::Scalar(Scalar::Bool).intern(Interner))
+ }
+ BinaryOp::Assignment { .. } => Some(TyBuilder::unit()),
+ BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) => {
+ // all integer combinations are valid here
+ if matches!(
+ lhs_ty.kind(Interner),
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_))
+ | TyKind::InferenceVar(_, TyVariableKind::Integer)
+ ) && matches!(
+ rhs_ty.kind(Interner),
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_))
+ | TyKind::InferenceVar(_, TyVariableKind::Integer)
+ ) {
+ Some(lhs_ty)
+ } else {
+ None
+ }
+ }
+ BinaryOp::ArithOp(_) => match (lhs_ty.kind(Interner), rhs_ty.kind(Interner)) {
+ // (int, int) | (uint, uint) | (float, float)
+ (TyKind::Scalar(Scalar::Int(_)), TyKind::Scalar(Scalar::Int(_)))
+ | (TyKind::Scalar(Scalar::Uint(_)), TyKind::Scalar(Scalar::Uint(_)))
+ | (TyKind::Scalar(Scalar::Float(_)), TyKind::Scalar(Scalar::Float(_))) => {
+ Some(rhs_ty)
+ }
+ // ({int}, int) | ({int}, uint)
+ (
+ TyKind::InferenceVar(_, TyVariableKind::Integer),
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)),
+ ) => Some(rhs_ty),
+ // (int, {int}) | (uint, {int})
+ (
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)),
+ TyKind::InferenceVar(_, TyVariableKind::Integer),
+ ) => Some(lhs_ty),
+ // ({float} | float)
+ (
+ TyKind::InferenceVar(_, TyVariableKind::Float),
+ TyKind::Scalar(Scalar::Float(_)),
+ ) => Some(rhs_ty),
+ // (float, {float})
+ (
+ TyKind::Scalar(Scalar::Float(_)),
+ TyKind::InferenceVar(_, TyVariableKind::Float),
+ ) => Some(lhs_ty),
+ // ({int}, {int}) | ({float}, {float})
+ (
+ TyKind::InferenceVar(_, TyVariableKind::Integer),
+ TyKind::InferenceVar(_, TyVariableKind::Integer),
+ )
+ | (
+ TyKind::InferenceVar(_, TyVariableKind::Float),
+ TyKind::InferenceVar(_, TyVariableKind::Float),
+ ) => Some(rhs_ty),
+ _ => None,
+ },
+ }
+ }
+
+ fn builtin_binary_op_rhs_expectation(&mut self, op: BinaryOp, lhs_ty: Ty) -> Option<Ty> {
+ Some(match op {
+ BinaryOp::LogicOp(..) => TyKind::Scalar(Scalar::Bool).intern(Interner),
+ BinaryOp::Assignment { op: None } => lhs_ty,
+ BinaryOp::CmpOp(CmpOp::Eq { .. }) => match self
+ .resolve_ty_shallow(&lhs_ty)
+ .kind(Interner)
+ {
+ TyKind::Scalar(_) | TyKind::Str => lhs_ty,
+ TyKind::InferenceVar(_, TyVariableKind::Integer | TyVariableKind::Float) => lhs_ty,
+ _ => return None,
+ },
+ BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) => return None,
+ BinaryOp::CmpOp(CmpOp::Ord { .. })
+ | BinaryOp::Assignment { op: Some(_) }
+ | BinaryOp::ArithOp(_) => match self.resolve_ty_shallow(&lhs_ty).kind(Interner) {
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_) | Scalar::Float(_)) => lhs_ty,
+ TyKind::InferenceVar(_, TyVariableKind::Integer | TyVariableKind::Float) => lhs_ty,
+ _ => return None,
+ },
+ })
+ }
+
+ fn resolve_binop_method(&self, op: BinaryOp) -> Option<FunctionId> {
+ let (name, lang_item) = match op {
+ BinaryOp::LogicOp(_) => return None,
+ BinaryOp::ArithOp(aop) => match aop {
+ ArithOp::Add => (name!(add), name!(add)),
+ ArithOp::Mul => (name!(mul), name!(mul)),
+ ArithOp::Sub => (name!(sub), name!(sub)),
+ ArithOp::Div => (name!(div), name!(div)),
+ ArithOp::Rem => (name!(rem), name!(rem)),
+ ArithOp::Shl => (name!(shl), name!(shl)),
+ ArithOp::Shr => (name!(shr), name!(shr)),
+ ArithOp::BitXor => (name!(bitxor), name!(bitxor)),
+ ArithOp::BitOr => (name!(bitor), name!(bitor)),
+ ArithOp::BitAnd => (name!(bitand), name!(bitand)),
+ },
+ BinaryOp::Assignment { op: Some(aop) } => match aop {
+ ArithOp::Add => (name!(add_assign), name!(add_assign)),
+ ArithOp::Mul => (name!(mul_assign), name!(mul_assign)),
+ ArithOp::Sub => (name!(sub_assign), name!(sub_assign)),
+ ArithOp::Div => (name!(div_assign), name!(div_assign)),
+ ArithOp::Rem => (name!(rem_assign), name!(rem_assign)),
+ ArithOp::Shl => (name!(shl_assign), name!(shl_assign)),
+ ArithOp::Shr => (name!(shr_assign), name!(shr_assign)),
+ ArithOp::BitXor => (name!(bitxor_assign), name!(bitxor_assign)),
+ ArithOp::BitOr => (name!(bitor_assign), name!(bitor_assign)),
+ ArithOp::BitAnd => (name!(bitand_assign), name!(bitand_assign)),
+ },
+ BinaryOp::CmpOp(cop) => match cop {
+ CmpOp::Eq { negated: false } => (name!(eq), name!(eq)),
+ CmpOp::Eq { negated: true } => (name!(ne), name!(eq)),
+ CmpOp::Ord { ordering: Ordering::Less, strict: false } => {
+ (name!(le), name!(partial_ord))
+ }
+ CmpOp::Ord { ordering: Ordering::Less, strict: true } => {
+ (name!(lt), name!(partial_ord))
+ }
+ CmpOp::Ord { ordering: Ordering::Greater, strict: false } => {
+ (name!(ge), name!(partial_ord))
+ }
+ CmpOp::Ord { ordering: Ordering::Greater, strict: true } => {
+ (name!(gt), name!(partial_ord))
+ }
+ },
+ BinaryOp::Assignment { op: None } => return None,
+ };
+
+ let trait_ = self.resolve_lang_item(lang_item)?.as_trait()?;
+
+ self.db.trait_data(trait_).method_by_name(&name)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
new file mode 100644
index 000000000..5e7320a5d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
@@ -0,0 +1,354 @@
+//! Type inference for patterns.
+
+use std::iter::repeat_with;
+
+use chalk_ir::Mutability;
+use hir_def::{
+ expr::{BindingAnnotation, Expr, Literal, Pat, PatId},
+ path::Path,
+ type_ref::ConstScalar,
+};
+use hir_expand::name::Name;
+
+use crate::{
+ consteval::intern_const_scalar,
+ infer::{BindingMode, Expectation, InferenceContext, TypeMismatch},
+ lower::lower_to_chalk_mutability,
+ static_lifetime, ConcreteConst, ConstValue, Interner, Substitution, Ty, TyBuilder, TyExt,
+ TyKind,
+};
+
+use super::PatLike;
+
+impl<'a> InferenceContext<'a> {
+ /// Infers type for tuple struct pattern or its corresponding assignee expression.
+ ///
+ /// Ellipses found in the original pattern or expression must be filtered out.
+ pub(super) fn infer_tuple_struct_pat_like<T: PatLike>(
+ &mut self,
+ path: Option<&Path>,
+ expected: &Ty,
+ default_bm: T::BindingMode,
+ id: T,
+ ellipsis: Option<usize>,
+ subs: &[T],
+ ) -> Ty {
+ let (ty, def) = self.resolve_variant(path, true);
+ let var_data = def.map(|it| it.variant_data(self.db.upcast()));
+ if let Some(variant) = def {
+ self.write_variant_resolution(id.into(), variant);
+ }
+ self.unify(&ty, expected);
+
+ let substs =
+ ty.as_adt().map(|(_, s)| s.clone()).unwrap_or_else(|| Substitution::empty(Interner));
+
+ let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
+ let (pre, post) = match ellipsis {
+ Some(idx) => subs.split_at(idx),
+ None => (subs, &[][..]),
+ };
+ let post_idx_offset = field_tys.iter().count().saturating_sub(post.len());
+
+ let pre_iter = pre.iter().enumerate();
+ let post_iter = (post_idx_offset..).zip(post.iter());
+ for (i, &subpat) in pre_iter.chain(post_iter) {
+ let expected_ty = var_data
+ .as_ref()
+ .and_then(|d| d.field(&Name::new_tuple_field(i)))
+ .map_or(self.err_ty(), |field| {
+ field_tys[field].clone().substitute(Interner, &substs)
+ });
+ let expected_ty = self.normalize_associated_types_in(expected_ty);
+ T::infer(self, subpat, &expected_ty, default_bm);
+ }
+
+ ty
+ }
+
+ /// Infers type for record pattern or its corresponding assignee expression.
+ pub(super) fn infer_record_pat_like<T: PatLike>(
+ &mut self,
+ path: Option<&Path>,
+ expected: &Ty,
+ default_bm: T::BindingMode,
+ id: T,
+ subs: impl Iterator<Item = (Name, T)>,
+ ) -> Ty {
+ let (ty, def) = self.resolve_variant(path, false);
+ if let Some(variant) = def {
+ self.write_variant_resolution(id.into(), variant);
+ }
+
+ self.unify(&ty, expected);
+
+ let substs =
+ ty.as_adt().map(|(_, s)| s.clone()).unwrap_or_else(|| Substitution::empty(Interner));
+
+ let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
+ let var_data = def.map(|it| it.variant_data(self.db.upcast()));
+
+ for (name, inner) in subs {
+ let expected_ty = var_data
+ .as_ref()
+ .and_then(|it| it.field(&name))
+ .map_or(self.err_ty(), |f| field_tys[f].clone().substitute(Interner, &substs));
+ let expected_ty = self.normalize_associated_types_in(expected_ty);
+
+ T::infer(self, inner, &expected_ty, default_bm);
+ }
+
+ ty
+ }
+
+ /// Infers type for tuple pattern or its corresponding assignee expression.
+ ///
+ /// Ellipses found in the original pattern or expression must be filtered out.
+ pub(super) fn infer_tuple_pat_like<T: PatLike>(
+ &mut self,
+ expected: &Ty,
+ default_bm: T::BindingMode,
+ ellipsis: Option<usize>,
+ subs: &[T],
+ ) -> Ty {
+ let expectations = match expected.as_tuple() {
+ Some(parameters) => &*parameters.as_slice(Interner),
+ _ => &[],
+ };
+
+ let ((pre, post), n_uncovered_patterns) = match ellipsis {
+ Some(idx) => (subs.split_at(idx), expectations.len().saturating_sub(subs.len())),
+ None => ((&subs[..], &[][..]), 0),
+ };
+ let mut expectations_iter = expectations
+ .iter()
+ .cloned()
+ .map(|a| a.assert_ty_ref(Interner).clone())
+ .chain(repeat_with(|| self.table.new_type_var()));
+
+ let mut inner_tys = Vec::with_capacity(n_uncovered_patterns + subs.len());
+
+ inner_tys.extend(expectations_iter.by_ref().take(n_uncovered_patterns + subs.len()));
+
+ // Process pre
+ for (ty, pat) in inner_tys.iter_mut().zip(pre) {
+ *ty = T::infer(self, *pat, ty, default_bm);
+ }
+
+ // Process post
+ for (ty, pat) in inner_tys.iter_mut().skip(pre.len() + n_uncovered_patterns).zip(post) {
+ *ty = T::infer(self, *pat, ty, default_bm);
+ }
+
+ TyKind::Tuple(inner_tys.len(), Substitution::from_iter(Interner, inner_tys))
+ .intern(Interner)
+ }
+
+ pub(super) fn infer_pat(
+ &mut self,
+ pat: PatId,
+ expected: &Ty,
+ mut default_bm: BindingMode,
+ ) -> Ty {
+ let mut expected = self.resolve_ty_shallow(expected);
+
+ if is_non_ref_pat(&self.body, pat) {
+ let mut pat_adjustments = Vec::new();
+ while let Some((inner, _lifetime, mutability)) = expected.as_reference() {
+ pat_adjustments.push(expected.clone());
+ expected = self.resolve_ty_shallow(inner);
+ default_bm = match default_bm {
+ BindingMode::Move => BindingMode::Ref(mutability),
+ BindingMode::Ref(Mutability::Not) => BindingMode::Ref(Mutability::Not),
+ BindingMode::Ref(Mutability::Mut) => BindingMode::Ref(mutability),
+ }
+ }
+
+ if !pat_adjustments.is_empty() {
+ pat_adjustments.shrink_to_fit();
+ self.result.pat_adjustments.insert(pat, pat_adjustments);
+ }
+ } else if let Pat::Ref { .. } = &self.body[pat] {
+ cov_mark::hit!(match_ergonomics_ref);
+ // When you encounter a `&pat` pattern, reset to Move.
+ // This is so that `w` is by value: `let (_, &w) = &(1, &2);`
+ default_bm = BindingMode::Move;
+ }
+
+ // Lose mutability.
+ let default_bm = default_bm;
+ let expected = expected;
+
+ let ty = match &self.body[pat] {
+ Pat::Tuple { args, ellipsis } => {
+ self.infer_tuple_pat_like(&expected, default_bm, *ellipsis, args)
+ }
+ Pat::Or(pats) => {
+ if let Some((first_pat, rest)) = pats.split_first() {
+ let ty = self.infer_pat(*first_pat, &expected, default_bm);
+ for pat in rest {
+ self.infer_pat(*pat, &expected, default_bm);
+ }
+ ty
+ } else {
+ self.err_ty()
+ }
+ }
+ Pat::Ref { pat, mutability } => {
+ let mutability = lower_to_chalk_mutability(*mutability);
+ let expectation = match expected.as_reference() {
+ Some((inner_ty, _lifetime, exp_mut)) => {
+ if mutability != exp_mut {
+ // FIXME: emit type error?
+ }
+ inner_ty.clone()
+ }
+ _ => self.result.standard_types.unknown.clone(),
+ };
+ let subty = self.infer_pat(*pat, &expectation, default_bm);
+ TyKind::Ref(mutability, static_lifetime(), subty).intern(Interner)
+ }
+ Pat::TupleStruct { path: p, args: subpats, ellipsis } => self
+ .infer_tuple_struct_pat_like(
+ p.as_deref(),
+ &expected,
+ default_bm,
+ pat,
+ *ellipsis,
+ subpats,
+ ),
+ Pat::Record { path: p, args: fields, ellipsis: _ } => {
+ let subs = fields.iter().map(|f| (f.name.clone(), f.pat));
+ self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat.into(), subs)
+ }
+ Pat::Path(path) => {
+ // FIXME use correct resolver for the surrounding expression
+ let resolver = self.resolver.clone();
+ self.infer_path(&resolver, path, pat.into()).unwrap_or_else(|| self.err_ty())
+ }
+ Pat::Bind { mode, name: _, subpat } => {
+ let mode = if mode == &BindingAnnotation::Unannotated {
+ default_bm
+ } else {
+ BindingMode::convert(*mode)
+ };
+ self.result.pat_binding_modes.insert(pat, mode);
+
+ let inner_ty = match subpat {
+ Some(subpat) => self.infer_pat(*subpat, &expected, default_bm),
+ None => expected,
+ };
+ let inner_ty = self.insert_type_vars_shallow(inner_ty);
+
+ let bound_ty = match mode {
+ BindingMode::Ref(mutability) => {
+ TyKind::Ref(mutability, static_lifetime(), inner_ty.clone())
+ .intern(Interner)
+ }
+ BindingMode::Move => inner_ty.clone(),
+ };
+ self.write_pat_ty(pat, bound_ty);
+ return inner_ty;
+ }
+ Pat::Slice { prefix, slice, suffix } => {
+ let elem_ty = match expected.kind(Interner) {
+ TyKind::Array(st, _) | TyKind::Slice(st) => st.clone(),
+ _ => self.err_ty(),
+ };
+
+ for &pat_id in prefix.iter().chain(suffix.iter()) {
+ self.infer_pat(pat_id, &elem_ty, default_bm);
+ }
+
+ if let &Some(slice_pat_id) = slice {
+ let rest_pat_ty = match expected.kind(Interner) {
+ TyKind::Array(_, length) => {
+ let len = match length.data(Interner).value {
+ ConstValue::Concrete(ConcreteConst {
+ interned: ConstScalar::UInt(len),
+ }) => len.checked_sub((prefix.len() + suffix.len()) as u128),
+ _ => None,
+ };
+ TyKind::Array(
+ elem_ty.clone(),
+ intern_const_scalar(
+ len.map_or(ConstScalar::Unknown, |len| ConstScalar::UInt(len)),
+ TyBuilder::usize(),
+ ),
+ )
+ }
+ _ => TyKind::Slice(elem_ty.clone()),
+ }
+ .intern(Interner);
+ self.infer_pat(slice_pat_id, &rest_pat_ty, default_bm);
+ }
+
+ match expected.kind(Interner) {
+ TyKind::Array(_, const_) => TyKind::Array(elem_ty, const_.clone()),
+ _ => TyKind::Slice(elem_ty),
+ }
+ .intern(Interner)
+ }
+ Pat::Wild => expected.clone(),
+ Pat::Range { start, end } => {
+ let start_ty = self.infer_expr(*start, &Expectation::has_type(expected.clone()));
+ self.infer_expr(*end, &Expectation::has_type(start_ty))
+ }
+ Pat::Lit(expr) => self.infer_expr(*expr, &Expectation::has_type(expected.clone())),
+ Pat::Box { inner } => match self.resolve_boxed_box() {
+ Some(box_adt) => {
+ let (inner_ty, alloc_ty) = match expected.as_adt() {
+ Some((adt, subst)) if adt == box_adt => (
+ subst.at(Interner, 0).assert_ty_ref(Interner).clone(),
+ subst.as_slice(Interner).get(1).and_then(|a| a.ty(Interner).cloned()),
+ ),
+ _ => (self.result.standard_types.unknown.clone(), None),
+ };
+
+ let inner_ty = self.infer_pat(*inner, &inner_ty, default_bm);
+ let mut b = TyBuilder::adt(self.db, box_adt).push(inner_ty);
+
+ if let Some(alloc_ty) = alloc_ty {
+ b = b.push(alloc_ty);
+ }
+ b.fill_with_defaults(self.db, || self.table.new_type_var()).build()
+ }
+ None => self.err_ty(),
+ },
+ Pat::ConstBlock(expr) => {
+ self.infer_expr(*expr, &Expectation::has_type(expected.clone()))
+ }
+ Pat::Missing => self.err_ty(),
+ };
+ // use a new type variable if we got error type here
+ let ty = self.insert_type_vars_shallow(ty);
+ if !self.unify(&ty, &expected) {
+ self.result
+ .type_mismatches
+ .insert(pat.into(), TypeMismatch { expected, actual: ty.clone() });
+ }
+ self.write_pat_ty(pat, ty.clone());
+ ty
+ }
+}
+
+fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
+ match &body[pat] {
+ Pat::Tuple { .. }
+ | Pat::TupleStruct { .. }
+ | Pat::Record { .. }
+ | Pat::Range { .. }
+ | Pat::Slice { .. } => true,
+ Pat::Or(pats) => pats.iter().all(|p| is_non_ref_pat(body, *p)),
+ // FIXME: ConstBlock/Path/Lit might actually evaluate to ref, but inference is unimplemented.
+ Pat::Path(..) => true,
+ Pat::ConstBlock(..) => true,
+ Pat::Lit(expr) => !matches!(body[*expr], Expr::Literal(Literal::String(..))),
+ Pat::Bind {
+ mode: BindingAnnotation::Mutable | BindingAnnotation::Unannotated,
+ subpat: Some(subpat),
+ ..
+ } => is_non_ref_pat(body, *subpat),
+ Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => false,
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
new file mode 100644
index 000000000..f580e09e9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
@@ -0,0 +1,295 @@
+//! Path expression resolution.
+
+use chalk_ir::cast::Cast;
+use hir_def::{
+ path::{Path, PathSegment},
+ resolver::{ResolveValueResult, Resolver, TypeNs, ValueNs},
+ AdtId, AssocItemId, EnumVariantId, ItemContainerId, Lookup,
+};
+use hir_expand::name::Name;
+
+use crate::{
+ builder::ParamKind,
+ consteval,
+ method_resolution::{self, VisibleFromModule},
+ GenericArgData, Interner, Substitution, TraitRefExt, Ty, TyBuilder, TyExt, TyKind,
+ ValueTyDefId,
+};
+
+use super::{ExprOrPatId, InferenceContext, TraitRef};
+
+impl<'a> InferenceContext<'a> {
+ pub(super) fn infer_path(
+ &mut self,
+ resolver: &Resolver,
+ path: &Path,
+ id: ExprOrPatId,
+ ) -> Option<Ty> {
+ let ty = self.resolve_value_path(resolver, path, id)?;
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+ Some(ty)
+ }
+
+ fn resolve_value_path(
+ &mut self,
+ resolver: &Resolver,
+ path: &Path,
+ id: ExprOrPatId,
+ ) -> Option<Ty> {
+ let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
+ if path.segments().is_empty() {
+ // This can't actually happen syntax-wise
+ return None;
+ }
+ let ty = self.make_ty(type_ref);
+ let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
+ let ctx = crate::lower::TyLoweringContext::new(self.db, resolver);
+ let (ty, _) = ctx.lower_ty_relative_path(ty, None, remaining_segments_for_ty);
+ self.resolve_ty_assoc_item(
+ ty,
+ path.segments().last().expect("path had at least one segment").name,
+ id,
+ )?
+ } else {
+ let value_or_partial =
+ resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path())?;
+
+ match value_or_partial {
+ ResolveValueResult::ValueNs(it) => (it, None),
+ ResolveValueResult::Partial(def, remaining_index) => {
+ self.resolve_assoc_item(def, path, remaining_index, id)?
+ }
+ }
+ };
+
+ let typable: ValueTyDefId = match value {
+ ValueNs::LocalBinding(pat) => {
+ let ty = self.result.type_of_pat.get(pat)?.clone();
+ return Some(ty);
+ }
+ ValueNs::FunctionId(it) => it.into(),
+ ValueNs::ConstId(it) => it.into(),
+ ValueNs::StaticId(it) => it.into(),
+ ValueNs::StructId(it) => {
+ self.write_variant_resolution(id, it.into());
+
+ it.into()
+ }
+ ValueNs::EnumVariantId(it) => {
+ self.write_variant_resolution(id, it.into());
+
+ it.into()
+ }
+ ValueNs::ImplSelf(impl_id) => {
+ let generics = crate::utils::generics(self.db.upcast(), impl_id.into());
+ let substs = generics.placeholder_subst(self.db);
+ let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
+ if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() {
+ let ty = self.db.value_ty(struct_id.into()).substitute(Interner, &substs);
+ return Some(ty);
+ } else {
+ // FIXME: diagnostic, invalid Self reference
+ return None;
+ }
+ }
+ ValueNs::GenericParam(it) => return Some(self.db.const_param_ty(it)),
+ };
+
+ let parent_substs = self_subst.unwrap_or_else(|| Substitution::empty(Interner));
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let substs = ctx.substs_from_path(path, typable, true);
+ let mut it = substs.as_slice(Interner)[parent_substs.len(Interner)..].iter().cloned();
+ let ty = TyBuilder::value_ty(self.db, typable)
+ .use_parent_substs(&parent_substs)
+ .fill(|x| {
+ it.next().unwrap_or_else(|| match x {
+ ParamKind::Type => {
+ GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
+ }
+ ParamKind::Const(ty) => consteval::unknown_const_as_generic(ty.clone()),
+ })
+ })
+ .build();
+ Some(ty)
+ }
+
+ fn resolve_assoc_item(
+ &mut self,
+ def: TypeNs,
+ path: &Path,
+ remaining_index: usize,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<Substitution>)> {
+ assert!(remaining_index < path.segments().len());
+ // there may be more intermediate segments between the resolved one and
+ // the end. Only the last segment needs to be resolved to a value; from
+ // the segments before that, we need to get either a type or a trait ref.
+
+ let resolved_segment = path.segments().get(remaining_index - 1).unwrap();
+ let remaining_segments = path.segments().skip(remaining_index);
+ let is_before_last = remaining_segments.len() == 1;
+
+ match (def, is_before_last) {
+ (TypeNs::TraitId(trait_), true) => {
+ let segment =
+ remaining_segments.last().expect("there should be at least one segment here");
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let trait_ref =
+ ctx.lower_trait_ref_from_resolved_path(trait_, resolved_segment, None);
+ self.resolve_trait_assoc_item(trait_ref, segment, id)
+ }
+ (def, _) => {
+ // Either we already have a type (e.g. `Vec::new`), or we have a
+ // trait but it's not the last segment, so the next segment
+ // should resolve to an associated type of that trait (e.g. `<T
+ // as Iterator>::Item::default`)
+ let remaining_segments_for_ty =
+ remaining_segments.take(remaining_segments.len() - 1);
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let (ty, _) = ctx.lower_partly_resolved_path(
+ def,
+ resolved_segment,
+ remaining_segments_for_ty,
+ true,
+ );
+ if let TyKind::Error = ty.kind(Interner) {
+ return None;
+ }
+
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+
+ let segment =
+ remaining_segments.last().expect("there should be at least one segment here");
+
+ self.resolve_ty_assoc_item(ty, segment.name, id)
+ }
+ }
+ }
+
+ fn resolve_trait_assoc_item(
+ &mut self,
+ trait_ref: TraitRef,
+ segment: PathSegment<'_>,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<Substitution>)> {
+ let trait_ = trait_ref.hir_trait_id();
+ let item =
+ self.db.trait_data(trait_).items.iter().map(|(_name, id)| (*id)).find_map(|item| {
+ match item {
+ AssocItemId::FunctionId(func) => {
+ if segment.name == &self.db.function_data(func).name {
+ Some(AssocItemId::FunctionId(func))
+ } else {
+ None
+ }
+ }
+
+ AssocItemId::ConstId(konst) => {
+ if self
+ .db
+ .const_data(konst)
+ .name
+ .as_ref()
+ .map_or(false, |n| n == segment.name)
+ {
+ Some(AssocItemId::ConstId(konst))
+ } else {
+ None
+ }
+ }
+ AssocItemId::TypeAliasId(_) => None,
+ }
+ })?;
+ let def = match item {
+ AssocItemId::FunctionId(f) => ValueNs::FunctionId(f),
+ AssocItemId::ConstId(c) => ValueNs::ConstId(c),
+ AssocItemId::TypeAliasId(_) => unreachable!(),
+ };
+
+ self.write_assoc_resolution(id, item);
+ Some((def, Some(trait_ref.substitution)))
+ }
+
+ fn resolve_ty_assoc_item(
+ &mut self,
+ ty: Ty,
+ name: &Name,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<Substitution>)> {
+ if let TyKind::Error = ty.kind(Interner) {
+ return None;
+ }
+
+ if let Some(result) = self.resolve_enum_variant_on_ty(&ty, name, id) {
+ return Some(result);
+ }
+
+ let canonical_ty = self.canonicalize(ty.clone());
+ let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
+
+ method_resolution::iterate_method_candidates(
+ &canonical_ty.value,
+ self.db,
+ self.table.trait_env.clone(),
+ &traits_in_scope,
+ VisibleFromModule::Filter(self.resolver.module()),
+ Some(name),
+ method_resolution::LookupMode::Path,
+ move |_ty, item| {
+ let (def, container) = match item {
+ AssocItemId::FunctionId(f) => {
+ (ValueNs::FunctionId(f), f.lookup(self.db.upcast()).container)
+ }
+ AssocItemId::ConstId(c) => {
+ (ValueNs::ConstId(c), c.lookup(self.db.upcast()).container)
+ }
+ AssocItemId::TypeAliasId(_) => unreachable!(),
+ };
+ let substs = match container {
+ ItemContainerId::ImplId(impl_id) => {
+ let impl_substs = TyBuilder::subst_for_def(self.db, impl_id)
+ .fill_with_inference_vars(&mut self.table)
+ .build();
+ let impl_self_ty =
+ self.db.impl_self_ty(impl_id).substitute(Interner, &impl_substs);
+ self.unify(&impl_self_ty, &ty);
+ Some(impl_substs)
+ }
+ ItemContainerId::TraitId(trait_) => {
+ // we're picking this method
+ let trait_ref = TyBuilder::trait_ref(self.db, trait_)
+ .push(ty.clone())
+ .fill_with_inference_vars(&mut self.table)
+ .build();
+ self.push_obligation(trait_ref.clone().cast(Interner));
+ Some(trait_ref.substitution)
+ }
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
+ };
+
+ self.write_assoc_resolution(id, item);
+ Some((def, substs))
+ },
+ )
+ }
+
+ fn resolve_enum_variant_on_ty(
+ &mut self,
+ ty: &Ty,
+ name: &Name,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<Substitution>)> {
+ let ty = self.resolve_ty_shallow(ty);
+ let (enum_id, subst) = match ty.as_adt() {
+ Some((AdtId::EnumId(e), subst)) => (e, subst),
+ _ => return None,
+ };
+ let enum_data = self.db.enum_data(enum_id);
+ let local_id = enum_data.variant(name)?;
+ let variant = EnumVariantId { parent: enum_id, local_id };
+ self.write_variant_resolution(id, variant.into());
+ Some((ValueNs::EnumVariantId(variant), Some(subst.clone())))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
new file mode 100644
index 000000000..e77b55670
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
@@ -0,0 +1,738 @@
+//! Unification and canonicalization logic.
+
+use std::{fmt, mem, sync::Arc};
+
+use chalk_ir::{
+ cast::Cast, fold::TypeFoldable, interner::HasInterner, zip::Zip, CanonicalVarKind, FloatTy,
+ IntTy, NoSolution, TyVariableKind, UniverseIndex,
+};
+use chalk_solve::infer::ParameterEnaVariableExt;
+use ena::unify::UnifyKey;
+use hir_expand::name;
+use stdx::never;
+
+use super::{InferOk, InferResult, InferenceContext, TypeError};
+use crate::{
+ db::HirDatabase, fold_tys, static_lifetime, traits::FnTrait, AliasEq, AliasTy, BoundVar,
+ Canonical, Const, DebruijnIndex, GenericArg, GenericArgData, Goal, Guidance, InEnvironment,
+ InferenceVar, Interner, Lifetime, ParamKind, ProjectionTy, ProjectionTyExt, Scalar, Solution,
+ Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, VariableKind,
+};
+
+impl<'a> InferenceContext<'a> {
+ pub(super) fn canonicalize<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>(
+ &mut self,
+ t: T,
+ ) -> Canonicalized<T>
+ where
+ T: HasInterner<Interner = Interner>,
+ {
+ self.table.canonicalize(t)
+ }
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct Canonicalized<T>
+where
+ T: HasInterner<Interner = Interner>,
+{
+ pub(crate) value: Canonical<T>,
+ free_vars: Vec<GenericArg>,
+}
+
+impl<T: HasInterner<Interner = Interner>> Canonicalized<T> {
+ pub(super) fn apply_solution(
+ &self,
+ ctx: &mut InferenceTable<'_>,
+ solution: Canonical<Substitution>,
+ ) {
+ // the solution may contain new variables, which we need to convert to new inference vars
+ let new_vars = Substitution::from_iter(
+ Interner,
+ solution.binders.iter(Interner).map(|k| match &k.kind {
+ VariableKind::Ty(TyVariableKind::General) => ctx.new_type_var().cast(Interner),
+ VariableKind::Ty(TyVariableKind::Integer) => ctx.new_integer_var().cast(Interner),
+ VariableKind::Ty(TyVariableKind::Float) => ctx.new_float_var().cast(Interner),
+ // Chalk can sometimes return new lifetime variables. We just use the static lifetime everywhere
+ VariableKind::Lifetime => static_lifetime().cast(Interner),
+ VariableKind::Const(ty) => ctx.new_const_var(ty.clone()).cast(Interner),
+ }),
+ );
+ for (i, v) in solution.value.iter(Interner).enumerate() {
+ let var = self.free_vars[i].clone();
+ if let Some(ty) = v.ty(Interner) {
+ // eagerly replace projections in the type; we may be getting types
+ // e.g. from where clauses where this hasn't happened yet
+ let ty = ctx.normalize_associated_types_in(new_vars.apply(ty.clone(), Interner));
+ ctx.unify(var.assert_ty_ref(Interner), &ty);
+ } else {
+ let _ = ctx.try_unify(&var, &new_vars.apply(v.clone(), Interner));
+ }
+ }
+ }
+}
+
+pub fn could_unify(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ tys: &Canonical<(Ty, Ty)>,
+) -> bool {
+ unify(db, env, tys).is_some()
+}
+
+pub(crate) fn unify(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ tys: &Canonical<(Ty, Ty)>,
+) -> Option<Substitution> {
+ let mut table = InferenceTable::new(db, env);
+ let vars = Substitution::from_iter(
+ Interner,
+ tys.binders.iter(Interner).map(|x| match &x.kind {
+ chalk_ir::VariableKind::Ty(_) => {
+ GenericArgData::Ty(table.new_type_var()).intern(Interner)
+ }
+ chalk_ir::VariableKind::Lifetime => {
+ GenericArgData::Ty(table.new_type_var()).intern(Interner)
+ } // FIXME: maybe wrong?
+ chalk_ir::VariableKind::Const(ty) => {
+ GenericArgData::Const(table.new_const_var(ty.clone())).intern(Interner)
+ }
+ }),
+ );
+ let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner);
+ let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner);
+ if !table.unify(&ty1_with_vars, &ty2_with_vars) {
+ return None;
+ }
+ // default any type vars that weren't unified back to their original bound vars
+ // (kind of hacky)
+ let find_var = |iv| {
+ vars.iter(Interner).position(|v| match v.interned() {
+ chalk_ir::GenericArgData::Ty(ty) => ty.inference_var(Interner),
+ chalk_ir::GenericArgData::Lifetime(lt) => lt.inference_var(Interner),
+ chalk_ir::GenericArgData::Const(c) => c.inference_var(Interner),
+ } == Some(iv))
+ };
+ let fallback = |iv, kind, default, binder| match kind {
+ chalk_ir::VariableKind::Ty(_ty_kind) => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_ty(Interner).cast(Interner)),
+ chalk_ir::VariableKind::Lifetime => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_lifetime(Interner).cast(Interner)),
+ chalk_ir::VariableKind::Const(ty) => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_const(Interner, ty).cast(Interner)),
+ };
+ Some(Substitution::from_iter(
+ Interner,
+ vars.iter(Interner).map(|v| table.resolve_with_fallback(v.clone(), &fallback)),
+ ))
+}
+
+#[derive(Copy, Clone, Debug)]
+pub(crate) struct TypeVariableData {
+ diverging: bool,
+}
+
+type ChalkInferenceTable = chalk_solve::infer::InferenceTable<Interner>;
+
+#[derive(Clone)]
+pub(crate) struct InferenceTable<'a> {
+ pub(crate) db: &'a dyn HirDatabase,
+ pub(crate) trait_env: Arc<TraitEnvironment>,
+ var_unification_table: ChalkInferenceTable,
+ type_variable_table: Vec<TypeVariableData>,
+ pending_obligations: Vec<Canonicalized<InEnvironment<Goal>>>,
+}
+
+pub(crate) struct InferenceTableSnapshot {
+ var_table_snapshot: chalk_solve::infer::InferenceSnapshot<Interner>,
+ pending_obligations: Vec<Canonicalized<InEnvironment<Goal>>>,
+ type_variable_table_snapshot: Vec<TypeVariableData>,
+}
+
+impl<'a> InferenceTable<'a> {
+ pub(crate) fn new(db: &'a dyn HirDatabase, trait_env: Arc<TraitEnvironment>) -> Self {
+ InferenceTable {
+ db,
+ trait_env,
+ var_unification_table: ChalkInferenceTable::new(),
+ type_variable_table: Vec::new(),
+ pending_obligations: Vec::new(),
+ }
+ }
+
+ /// Chalk doesn't know about the `diverging` flag, so when it unifies two
+ /// type variables of which one is diverging, the chosen root might not be
+ /// diverging and we have no way of marking it as such at that time. This
+ /// function goes through all type variables and make sure their root is
+ /// marked as diverging if necessary, so that resolving them gives the right
+ /// result.
+ pub(super) fn propagate_diverging_flag(&mut self) {
+ for i in 0..self.type_variable_table.len() {
+ if !self.type_variable_table[i].diverging {
+ continue;
+ }
+ let v = InferenceVar::from(i as u32);
+ let root = self.var_unification_table.inference_var_root(v);
+ if let Some(data) = self.type_variable_table.get_mut(root.index() as usize) {
+ data.diverging = true;
+ }
+ }
+ }
+
+ pub(super) fn set_diverging(&mut self, iv: InferenceVar, diverging: bool) {
+ self.type_variable_table[iv.index() as usize].diverging = diverging;
+ }
+
+ fn fallback_value(&self, iv: InferenceVar, kind: TyVariableKind) -> Ty {
+ match kind {
+ _ if self
+ .type_variable_table
+ .get(iv.index() as usize)
+ .map_or(false, |data| data.diverging) =>
+ {
+ TyKind::Never
+ }
+ TyVariableKind::General => TyKind::Error,
+ TyVariableKind::Integer => TyKind::Scalar(Scalar::Int(IntTy::I32)),
+ TyVariableKind::Float => TyKind::Scalar(Scalar::Float(FloatTy::F64)),
+ }
+ .intern(Interner)
+ }
+
+ pub(crate) fn canonicalize<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>(
+ &mut self,
+ t: T,
+ ) -> Canonicalized<T>
+ where
+ T: HasInterner<Interner = Interner>,
+ {
+ // try to resolve obligations before canonicalizing, since this might
+ // result in new knowledge about variables
+ self.resolve_obligations_as_possible();
+ let result = self.var_unification_table.canonicalize(Interner, t);
+ let free_vars = result
+ .free_vars
+ .into_iter()
+ .map(|free_var| free_var.to_generic_arg(Interner))
+ .collect();
+ Canonicalized { value: result.quantified, free_vars }
+ }
+
+ /// Recurses through the given type, normalizing associated types mentioned
+ /// in it by replacing them by type variables and registering obligations to
+ /// resolve later. This should be done once for every type we get from some
+ /// type annotation (e.g. from a let type annotation, field type or function
+ /// call). `make_ty` handles this already, but e.g. for field types we need
+ /// to do it as well.
+ pub(crate) fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty {
+ fold_tys(
+ ty,
+ |ty, _| match ty.kind(Interner) {
+ TyKind::Alias(AliasTy::Projection(proj_ty)) => {
+ self.normalize_projection_ty(proj_ty.clone())
+ }
+ _ => ty,
+ },
+ DebruijnIndex::INNERMOST,
+ )
+ }
+
+ pub(crate) fn normalize_projection_ty(&mut self, proj_ty: ProjectionTy) -> Ty {
+ let var = self.new_type_var();
+ let alias_eq = AliasEq { alias: AliasTy::Projection(proj_ty), ty: var.clone() };
+ let obligation = alias_eq.cast(Interner);
+ self.register_obligation(obligation);
+ var
+ }
+
+ fn extend_type_variable_table(&mut self, to_index: usize) {
+ self.type_variable_table.extend(
+ (0..1 + to_index - self.type_variable_table.len())
+ .map(|_| TypeVariableData { diverging: false }),
+ );
+ }
+
+ fn new_var(&mut self, kind: TyVariableKind, diverging: bool) -> Ty {
+ let var = self.var_unification_table.new_variable(UniverseIndex::ROOT);
+ // Chalk might have created some type variables for its own purposes that we don't know about...
+ self.extend_type_variable_table(var.index() as usize);
+ assert_eq!(var.index() as usize, self.type_variable_table.len() - 1);
+ self.type_variable_table[var.index() as usize].diverging = diverging;
+ var.to_ty_with_kind(Interner, kind)
+ }
+
+ pub(crate) fn new_type_var(&mut self) -> Ty {
+ self.new_var(TyVariableKind::General, false)
+ }
+
+ pub(crate) fn new_integer_var(&mut self) -> Ty {
+ self.new_var(TyVariableKind::Integer, false)
+ }
+
+ pub(crate) fn new_float_var(&mut self) -> Ty {
+ self.new_var(TyVariableKind::Float, false)
+ }
+
+ pub(crate) fn new_maybe_never_var(&mut self) -> Ty {
+ self.new_var(TyVariableKind::General, true)
+ }
+
+ pub(crate) fn new_const_var(&mut self, ty: Ty) -> Const {
+ let var = self.var_unification_table.new_variable(UniverseIndex::ROOT);
+ var.to_const(Interner, ty)
+ }
+
+ pub(crate) fn new_lifetime_var(&mut self) -> Lifetime {
+ let var = self.var_unification_table.new_variable(UniverseIndex::ROOT);
+ var.to_lifetime(Interner)
+ }
+
+ pub(crate) fn resolve_with_fallback<T>(
+ &mut self,
+ t: T,
+ fallback: &dyn Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg,
+ ) -> T
+ where
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner>,
+ {
+ self.resolve_with_fallback_inner(&mut Vec::new(), t, &fallback)
+ }
+
+ pub(crate) fn fresh_subst(&mut self, binders: &[CanonicalVarKind<Interner>]) -> Substitution {
+ Substitution::from_iter(
+ Interner,
+ binders.iter().map(|kind| {
+ let param_infer_var =
+ kind.map_ref(|&ui| self.var_unification_table.new_variable(ui));
+ param_infer_var.to_generic_arg(Interner)
+ }),
+ )
+ }
+
+ pub(crate) fn instantiate_canonical<T>(&mut self, canonical: Canonical<T>) -> T
+ where
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + std::fmt::Debug,
+ {
+ let subst = self.fresh_subst(canonical.binders.as_slice(Interner));
+ subst.apply(canonical.value, Interner)
+ }
+
+ fn resolve_with_fallback_inner<T>(
+ &mut self,
+ var_stack: &mut Vec<InferenceVar>,
+ t: T,
+ fallback: &dyn Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg,
+ ) -> T
+ where
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner>,
+ {
+ t.fold_with(
+ &mut resolve::Resolver { table: self, var_stack, fallback },
+ DebruijnIndex::INNERMOST,
+ )
+ .expect("fold failed unexpectedly")
+ }
+
+ pub(crate) fn resolve_completely<T>(&mut self, t: T) -> T
+ where
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner>,
+ {
+ self.resolve_with_fallback(t, &|_, _, d, _| d)
+ }
+
+ /// Unify two types and register new trait goals that arise from that.
+ pub(crate) fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
+ let result = match self.try_unify(ty1, ty2) {
+ Ok(r) => r,
+ Err(_) => return false,
+ };
+ self.register_infer_ok(result);
+ true
+ }
+
+ /// Unify two types and return new trait goals arising from it, so the
+ /// caller needs to deal with them.
+ pub(crate) fn try_unify<T: Zip<Interner>>(&mut self, t1: &T, t2: &T) -> InferResult<()> {
+ match self.var_unification_table.relate(
+ Interner,
+ &self.db,
+ &self.trait_env.env,
+ chalk_ir::Variance::Invariant,
+ t1,
+ t2,
+ ) {
+ Ok(result) => Ok(InferOk { goals: result.goals, value: () }),
+ Err(chalk_ir::NoSolution) => Err(TypeError),
+ }
+ }
+
+ /// If `ty` is a type variable with known type, returns that type;
+ /// otherwise, return ty.
+ pub(crate) fn resolve_ty_shallow(&mut self, ty: &Ty) -> Ty {
+ self.resolve_obligations_as_possible();
+ self.var_unification_table.normalize_ty_shallow(Interner, ty).unwrap_or_else(|| ty.clone())
+ }
+
+ pub(crate) fn snapshot(&mut self) -> InferenceTableSnapshot {
+ let var_table_snapshot = self.var_unification_table.snapshot();
+ let type_variable_table_snapshot = self.type_variable_table.clone();
+ let pending_obligations = self.pending_obligations.clone();
+ InferenceTableSnapshot {
+ var_table_snapshot,
+ pending_obligations,
+ type_variable_table_snapshot,
+ }
+ }
+
+ pub(crate) fn rollback_to(&mut self, snapshot: InferenceTableSnapshot) {
+ self.var_unification_table.rollback_to(snapshot.var_table_snapshot);
+ self.type_variable_table = snapshot.type_variable_table_snapshot;
+ self.pending_obligations = snapshot.pending_obligations;
+ }
+
+ pub(crate) fn run_in_snapshot<T>(&mut self, f: impl FnOnce(&mut InferenceTable<'_>) -> T) -> T {
+ let snapshot = self.snapshot();
+ let result = f(self);
+ self.rollback_to(snapshot);
+ result
+ }
+
+ /// Checks an obligation without registering it. Useful mostly to check
+ /// whether a trait *might* be implemented before deciding to 'lock in' the
+ /// choice (during e.g. method resolution or deref).
+ pub(crate) fn try_obligation(&mut self, goal: Goal) -> Option<Solution> {
+ let in_env = InEnvironment::new(&self.trait_env.env, goal);
+ let canonicalized = self.canonicalize(in_env);
+ let solution = self.db.trait_solve(self.trait_env.krate, canonicalized.value);
+ solution
+ }
+
+ pub(crate) fn register_obligation(&mut self, goal: Goal) {
+ let in_env = InEnvironment::new(&self.trait_env.env, goal);
+ self.register_obligation_in_env(in_env)
+ }
+
+ fn register_obligation_in_env(&mut self, goal: InEnvironment<Goal>) {
+ let canonicalized = self.canonicalize(goal);
+ if !self.try_resolve_obligation(&canonicalized) {
+ self.pending_obligations.push(canonicalized);
+ }
+ }
+
+ pub(crate) fn register_infer_ok<T>(&mut self, infer_ok: InferOk<T>) {
+ infer_ok.goals.into_iter().for_each(|goal| self.register_obligation_in_env(goal));
+ }
+
+ pub(crate) fn resolve_obligations_as_possible(&mut self) {
+ let _span = profile::span("resolve_obligations_as_possible");
+ let mut changed = true;
+ let mut obligations = Vec::new();
+ while changed {
+ changed = false;
+ mem::swap(&mut self.pending_obligations, &mut obligations);
+ for canonicalized in obligations.drain(..) {
+ if !self.check_changed(&canonicalized) {
+ self.pending_obligations.push(canonicalized);
+ continue;
+ }
+ changed = true;
+ let uncanonical = chalk_ir::Substitute::apply(
+ &canonicalized.free_vars,
+ canonicalized.value.value,
+ Interner,
+ );
+ self.register_obligation_in_env(uncanonical);
+ }
+ }
+ }
+
+ pub(crate) fn fudge_inference<T: TypeFoldable<Interner>>(
+ &mut self,
+ f: impl FnOnce(&mut Self) -> T,
+ ) -> T {
+ use chalk_ir::fold::TypeFolder;
+ struct VarFudger<'a, 'b> {
+ table: &'a mut InferenceTable<'b>,
+ highest_known_var: InferenceVar,
+ }
+ impl<'a, 'b> TypeFolder<Interner> for VarFudger<'a, 'b> {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn fold_inference_ty(
+ &mut self,
+ var: chalk_ir::InferenceVar,
+ kind: TyVariableKind,
+ _outer_binder: chalk_ir::DebruijnIndex,
+ ) -> chalk_ir::Fallible<chalk_ir::Ty<Interner>> {
+ Ok(if var < self.highest_known_var {
+ var.to_ty(Interner, kind)
+ } else {
+ self.table.new_type_var()
+ })
+ }
+
+ fn fold_inference_lifetime(
+ &mut self,
+ var: chalk_ir::InferenceVar,
+ _outer_binder: chalk_ir::DebruijnIndex,
+ ) -> chalk_ir::Fallible<chalk_ir::Lifetime<Interner>> {
+ Ok(if var < self.highest_known_var {
+ var.to_lifetime(Interner)
+ } else {
+ self.table.new_lifetime_var()
+ })
+ }
+
+ fn fold_inference_const(
+ &mut self,
+ ty: chalk_ir::Ty<Interner>,
+ var: chalk_ir::InferenceVar,
+ _outer_binder: chalk_ir::DebruijnIndex,
+ ) -> chalk_ir::Fallible<chalk_ir::Const<Interner>> {
+ Ok(if var < self.highest_known_var {
+ var.to_const(Interner, ty)
+ } else {
+ self.table.new_const_var(ty)
+ })
+ }
+ }
+
+ let snapshot = self.snapshot();
+ let highest_known_var = self.new_type_var().inference_var(Interner).expect("inference_var");
+ let result = f(self);
+ self.rollback_to(snapshot);
+ result
+ .fold_with(&mut VarFudger { table: self, highest_known_var }, DebruijnIndex::INNERMOST)
+ .expect("fold_with with VarFudger")
+ }
+
+ /// This checks whether any of the free variables in the `canonicalized`
+ /// have changed (either been unified with another variable, or with a
+ /// value). If this is not the case, we don't need to try to solve the goal
+ /// again -- it'll give the same result as last time.
+ fn check_changed(&mut self, canonicalized: &Canonicalized<InEnvironment<Goal>>) -> bool {
+ canonicalized.free_vars.iter().any(|var| {
+ let iv = match var.data(Interner) {
+ chalk_ir::GenericArgData::Ty(ty) => ty.inference_var(Interner),
+ chalk_ir::GenericArgData::Lifetime(lt) => lt.inference_var(Interner),
+ chalk_ir::GenericArgData::Const(c) => c.inference_var(Interner),
+ }
+ .expect("free var is not inference var");
+ if self.var_unification_table.probe_var(iv).is_some() {
+ return true;
+ }
+ let root = self.var_unification_table.inference_var_root(iv);
+ iv != root
+ })
+ }
+
+ fn try_resolve_obligation(
+ &mut self,
+ canonicalized: &Canonicalized<InEnvironment<Goal>>,
+ ) -> bool {
+ let solution = self.db.trait_solve(self.trait_env.krate, canonicalized.value.clone());
+
+ match solution {
+ Some(Solution::Unique(canonical_subst)) => {
+ canonicalized.apply_solution(
+ self,
+ Canonical {
+ binders: canonical_subst.binders,
+ // FIXME: handle constraints
+ value: canonical_subst.value.subst,
+ },
+ );
+ true
+ }
+ Some(Solution::Ambig(Guidance::Definite(substs))) => {
+ canonicalized.apply_solution(self, substs);
+ false
+ }
+ Some(_) => {
+ // FIXME use this when trying to resolve everything at the end
+ false
+ }
+ None => {
+ // FIXME obligation cannot be fulfilled => diagnostic
+ true
+ }
+ }
+ }
+
+ pub(crate) fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
+ match ty.callable_sig(self.db) {
+ Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())),
+ None => self.callable_sig_from_fn_trait(ty, num_args),
+ }
+ }
+
+ fn callable_sig_from_fn_trait(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
+ let krate = self.trait_env.krate;
+ let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?;
+ let output_assoc_type =
+ self.db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?;
+
+ let mut arg_tys = vec![];
+ let arg_ty = TyBuilder::tuple(num_args)
+ .fill(|x| {
+ let arg = match x {
+ ParamKind::Type => self.new_type_var(),
+ ParamKind::Const(ty) => {
+ never!("Tuple with const parameter");
+ return GenericArgData::Const(self.new_const_var(ty.clone()))
+ .intern(Interner);
+ }
+ };
+ arg_tys.push(arg.clone());
+ GenericArgData::Ty(arg).intern(Interner)
+ })
+ .build();
+
+ let projection = {
+ let b = TyBuilder::assoc_type_projection(self.db, output_assoc_type);
+ if b.remaining() != 2 {
+ return None;
+ }
+ b.push(ty.clone()).push(arg_ty).build()
+ };
+
+ let trait_env = self.trait_env.env.clone();
+ let obligation = InEnvironment {
+ goal: projection.trait_ref(self.db).cast(Interner),
+ environment: trait_env,
+ };
+ let canonical = self.canonicalize(obligation.clone());
+ if self.db.trait_solve(krate, canonical.value.cast(Interner)).is_some() {
+ self.register_obligation(obligation.goal);
+ let return_ty = self.normalize_projection_ty(projection);
+ Some((arg_tys, return_ty))
+ } else {
+ None
+ }
+ }
+}
+
+impl<'a> fmt::Debug for InferenceTable<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("InferenceTable").field("num_vars", &self.type_variable_table.len()).finish()
+ }
+}
+
+mod resolve {
+ use super::InferenceTable;
+ use crate::{
+ ConcreteConst, Const, ConstData, ConstValue, DebruijnIndex, GenericArg, InferenceVar,
+ Interner, Lifetime, Ty, TyVariableKind, VariableKind,
+ };
+ use chalk_ir::{
+ cast::Cast,
+ fold::{TypeFoldable, TypeFolder},
+ Fallible, NoSolution,
+ };
+ use hir_def::type_ref::ConstScalar;
+
+ pub(super) struct Resolver<'a, 'b, F> {
+ pub(super) table: &'a mut InferenceTable<'b>,
+ pub(super) var_stack: &'a mut Vec<InferenceVar>,
+ pub(super) fallback: F,
+ }
+ impl<'a, 'b, 'i, F> TypeFolder<Interner> for Resolver<'a, 'b, F>
+ where
+ F: Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg + 'i,
+ {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn fold_inference_ty(
+ &mut self,
+ var: InferenceVar,
+ kind: TyVariableKind,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Ty> {
+ let var = self.table.var_unification_table.inference_var_root(var);
+ if self.var_stack.contains(&var) {
+ // recursive type
+ let default = self.table.fallback_value(var, kind).cast(Interner);
+ return Ok((self.fallback)(var, VariableKind::Ty(kind), default, outer_binder)
+ .assert_ty_ref(Interner)
+ .clone());
+ }
+ let result = if let Some(known_ty) = self.table.var_unification_table.probe_var(var) {
+ // known_ty may contain other variables that are known by now
+ self.var_stack.push(var);
+ let result =
+ known_ty.fold_with(self, outer_binder).expect("fold failed unexpectedly");
+ self.var_stack.pop();
+ result.assert_ty_ref(Interner).clone()
+ } else {
+ let default = self.table.fallback_value(var, kind).cast(Interner);
+ (self.fallback)(var, VariableKind::Ty(kind), default, outer_binder)
+ .assert_ty_ref(Interner)
+ .clone()
+ };
+ Ok(result)
+ }
+
+ fn fold_inference_const(
+ &mut self,
+ ty: Ty,
+ var: InferenceVar,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Const> {
+ let var = self.table.var_unification_table.inference_var_root(var);
+ let default = ConstData {
+ ty: ty.clone(),
+ value: ConstValue::Concrete(ConcreteConst { interned: ConstScalar::Unknown }),
+ }
+ .intern(Interner)
+ .cast(Interner);
+ if self.var_stack.contains(&var) {
+ // recursive
+ return Ok((self.fallback)(var, VariableKind::Const(ty), default, outer_binder)
+ .assert_const_ref(Interner)
+ .clone());
+ }
+ let result = if let Some(known_ty) = self.table.var_unification_table.probe_var(var) {
+ // known_ty may contain other variables that are known by now
+ self.var_stack.push(var);
+ let result =
+ known_ty.fold_with(self, outer_binder).expect("fold failed unexpectedly");
+ self.var_stack.pop();
+ result.assert_const_ref(Interner).clone()
+ } else {
+ (self.fallback)(var, VariableKind::Const(ty), default, outer_binder)
+ .assert_const_ref(Interner)
+ .clone()
+ };
+ Ok(result)
+ }
+
+ fn fold_inference_lifetime(
+ &mut self,
+ _var: InferenceVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Lifetime> {
+ // fall back all lifetimes to 'static -- currently we don't deal
+ // with any lifetimes, but we can sometimes get some lifetime
+ // variables through Chalk's unification, and this at least makes
+ // sure we don't leak them outside of inference
+ Ok(crate::static_lifetime())
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
new file mode 100644
index 000000000..ca76e08fd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
@@ -0,0 +1,432 @@
+//! Implementation of the Chalk `Interner` trait, which allows customizing the
+//! representation of the various objects Chalk deals with (types, goals etc.).
+
+use crate::{chalk_db, tls, GenericArg};
+use base_db::salsa::InternId;
+use chalk_ir::{Goal, GoalData};
+use hir_def::{
+ intern::{impl_internable, InternStorage, Internable, Interned},
+ type_ref::ConstScalar,
+ TypeAliasId,
+};
+use smallvec::SmallVec;
+use std::{fmt, sync::Arc};
+
+#[derive(Debug, Copy, Clone, Hash, PartialOrd, Ord, PartialEq, Eq)]
+pub struct Interner;
+
+#[derive(PartialEq, Eq, Hash)]
+pub struct InternedWrapper<T>(T);
+
+impl<T: fmt::Debug> fmt::Debug for InternedWrapper<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(&self.0, f)
+ }
+}
+
+impl<T> std::ops::Deref for InternedWrapper<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+impl_internable!(
+ InternedWrapper<Vec<chalk_ir::VariableKind<Interner>>>,
+ InternedWrapper<SmallVec<[GenericArg; 2]>>,
+ InternedWrapper<chalk_ir::TyData<Interner>>,
+ InternedWrapper<chalk_ir::LifetimeData<Interner>>,
+ InternedWrapper<chalk_ir::ConstData<Interner>>,
+ InternedWrapper<ConstScalar>,
+ InternedWrapper<Vec<chalk_ir::CanonicalVarKind<Interner>>>,
+ InternedWrapper<Vec<chalk_ir::ProgramClause<Interner>>>,
+ InternedWrapper<Vec<chalk_ir::QuantifiedWhereClause<Interner>>>,
+ InternedWrapper<Vec<chalk_ir::Variance>>,
+);
+
+impl chalk_ir::interner::Interner for Interner {
+ type InternedType = Interned<InternedWrapper<chalk_ir::TyData<Interner>>>;
+ type InternedLifetime = Interned<InternedWrapper<chalk_ir::LifetimeData<Self>>>;
+ type InternedConst = Interned<InternedWrapper<chalk_ir::ConstData<Self>>>;
+ type InternedConcreteConst = ConstScalar;
+ type InternedGenericArg = chalk_ir::GenericArgData<Self>;
+ type InternedGoal = Arc<GoalData<Self>>;
+ type InternedGoals = Vec<Goal<Self>>;
+ type InternedSubstitution = Interned<InternedWrapper<SmallVec<[GenericArg; 2]>>>;
+ type InternedProgramClause = chalk_ir::ProgramClauseData<Self>;
+ type InternedProgramClauses = Interned<InternedWrapper<Vec<chalk_ir::ProgramClause<Self>>>>;
+ type InternedQuantifiedWhereClauses =
+ Interned<InternedWrapper<Vec<chalk_ir::QuantifiedWhereClause<Self>>>>;
+ type InternedVariableKinds = Interned<InternedWrapper<Vec<chalk_ir::VariableKind<Interner>>>>;
+ type InternedCanonicalVarKinds =
+ Interned<InternedWrapper<Vec<chalk_ir::CanonicalVarKind<Self>>>>;
+ type InternedConstraints = Vec<chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>>;
+ type InternedVariances = Interned<InternedWrapper<Vec<chalk_ir::Variance>>>;
+ type DefId = InternId;
+ type InternedAdtId = hir_def::AdtId;
+ type Identifier = TypeAliasId;
+ type FnAbi = ();
+
+ fn debug_adt_id(
+ type_kind_id: chalk_db::AdtId,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_struct_id(type_kind_id, fmt)))
+ }
+
+ fn debug_trait_id(
+ type_kind_id: chalk_db::TraitId,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_trait_id(type_kind_id, fmt)))
+ }
+
+ fn debug_assoc_type_id(
+ id: chalk_db::AssocTypeId,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_assoc_type_id(id, fmt)))
+ }
+
+ fn debug_alias(
+ alias: &chalk_ir::AliasTy<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ use std::fmt::Debug;
+ match alias {
+ chalk_ir::AliasTy::Projection(projection_ty) => {
+ Interner::debug_projection_ty(projection_ty, fmt)
+ }
+ chalk_ir::AliasTy::Opaque(opaque_ty) => Some(opaque_ty.fmt(fmt)),
+ }
+ }
+
+ fn debug_projection_ty(
+ proj: &chalk_ir::ProjectionTy<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_projection_ty(proj, fmt)))
+ }
+
+ fn debug_opaque_ty(
+ opaque_ty: &chalk_ir::OpaqueTy<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", opaque_ty.opaque_ty_id))
+ }
+
+ fn debug_opaque_ty_id(
+ opaque_ty_id: chalk_ir::OpaqueTyId<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "OpaqueTy#{}", opaque_ty_id.0))
+ }
+
+ fn debug_ty(ty: &chalk_ir::Ty<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", ty.data(Interner)))
+ }
+
+ fn debug_lifetime(
+ lifetime: &chalk_ir::Lifetime<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", lifetime.data(Interner)))
+ }
+
+ fn debug_generic_arg(
+ parameter: &GenericArg,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", parameter.data(Interner).inner_debug()))
+ }
+
+ fn debug_goal(goal: &Goal<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
+ let goal_data = goal.data(Interner);
+ Some(write!(fmt, "{:?}", goal_data))
+ }
+
+ fn debug_goals(
+ goals: &chalk_ir::Goals<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", goals.debug(Interner)))
+ }
+
+ fn debug_program_clause_implication(
+ pci: &chalk_ir::ProgramClauseImplication<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", pci.debug(Interner)))
+ }
+
+ fn debug_substitution(
+ substitution: &chalk_ir::Substitution<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", substitution.debug(Interner)))
+ }
+
+ fn debug_separator_trait_ref(
+ separator_trait_ref: &chalk_ir::SeparatorTraitRef<'_, Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", separator_trait_ref.debug(Interner)))
+ }
+
+ fn debug_fn_def_id(
+ fn_def_id: chalk_ir::FnDefId<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_fn_def_id(fn_def_id, fmt)))
+ }
+ fn debug_const(
+ constant: &chalk_ir::Const<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", constant.data(Interner)))
+ }
+ fn debug_variable_kinds(
+ variable_kinds: &chalk_ir::VariableKinds<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", variable_kinds.as_slice(Interner)))
+ }
+ fn debug_variable_kinds_with_angles(
+ variable_kinds: &chalk_ir::VariableKinds<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", variable_kinds.inner_debug(Interner)))
+ }
+ fn debug_canonical_var_kinds(
+ canonical_var_kinds: &chalk_ir::CanonicalVarKinds<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", canonical_var_kinds.as_slice(Interner)))
+ }
+ fn debug_program_clause(
+ clause: &chalk_ir::ProgramClause<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", clause.data(Interner)))
+ }
+ fn debug_program_clauses(
+ clauses: &chalk_ir::ProgramClauses<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", clauses.as_slice(Interner)))
+ }
+ fn debug_quantified_where_clauses(
+ clauses: &chalk_ir::QuantifiedWhereClauses<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", clauses.as_slice(Interner)))
+ }
+
+ fn intern_ty(self, kind: chalk_ir::TyKind<Self>) -> Self::InternedType {
+ let flags = kind.compute_flags(self);
+ Interned::new(InternedWrapper(chalk_ir::TyData { kind, flags }))
+ }
+
+ fn ty_data<'a>(self, ty: &'a Self::InternedType) -> &'a chalk_ir::TyData<Self> {
+ &ty.0
+ }
+
+ fn intern_lifetime(self, lifetime: chalk_ir::LifetimeData<Self>) -> Self::InternedLifetime {
+ Interned::new(InternedWrapper(lifetime))
+ }
+
+ fn lifetime_data<'a>(
+ self,
+ lifetime: &'a Self::InternedLifetime,
+ ) -> &'a chalk_ir::LifetimeData<Self> {
+ &lifetime.0
+ }
+
+ fn intern_const(self, constant: chalk_ir::ConstData<Self>) -> Self::InternedConst {
+ Interned::new(InternedWrapper(constant))
+ }
+
+ fn const_data<'a>(self, constant: &'a Self::InternedConst) -> &'a chalk_ir::ConstData<Self> {
+ &constant.0
+ }
+
+ fn const_eq(
+ self,
+ _ty: &Self::InternedType,
+ c1: &Self::InternedConcreteConst,
+ c2: &Self::InternedConcreteConst,
+ ) -> bool {
+ (c1 == &ConstScalar::Unknown) || (c2 == &ConstScalar::Unknown) || (c1 == c2)
+ }
+
+ fn intern_generic_arg(
+ self,
+ parameter: chalk_ir::GenericArgData<Self>,
+ ) -> Self::InternedGenericArg {
+ parameter
+ }
+
+ fn generic_arg_data<'a>(
+ self,
+ parameter: &'a Self::InternedGenericArg,
+ ) -> &'a chalk_ir::GenericArgData<Self> {
+ parameter
+ }
+
+ fn intern_goal(self, goal: GoalData<Self>) -> Self::InternedGoal {
+ Arc::new(goal)
+ }
+
+ fn intern_goals<E>(
+ self,
+ data: impl IntoIterator<Item = Result<Goal<Self>, E>>,
+ ) -> Result<Self::InternedGoals, E> {
+ data.into_iter().collect()
+ }
+
+ fn goal_data<'a>(self, goal: &'a Self::InternedGoal) -> &'a GoalData<Self> {
+ goal
+ }
+
+ fn goals_data<'a>(self, goals: &'a Self::InternedGoals) -> &'a [Goal<Interner>] {
+ goals
+ }
+
+ fn intern_substitution<E>(
+ self,
+ data: impl IntoIterator<Item = Result<GenericArg, E>>,
+ ) -> Result<Self::InternedSubstitution, E> {
+ Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
+ }
+
+ fn substitution_data<'a>(
+ self,
+ substitution: &'a Self::InternedSubstitution,
+ ) -> &'a [GenericArg] {
+ &substitution.as_ref().0
+ }
+
+ fn intern_program_clause(
+ self,
+ data: chalk_ir::ProgramClauseData<Self>,
+ ) -> Self::InternedProgramClause {
+ data
+ }
+
+ fn program_clause_data<'a>(
+ self,
+ clause: &'a Self::InternedProgramClause,
+ ) -> &'a chalk_ir::ProgramClauseData<Self> {
+ clause
+ }
+
+ fn intern_program_clauses<E>(
+ self,
+ data: impl IntoIterator<Item = Result<chalk_ir::ProgramClause<Self>, E>>,
+ ) -> Result<Self::InternedProgramClauses, E> {
+ Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
+ }
+
+ fn program_clauses_data<'a>(
+ self,
+ clauses: &'a Self::InternedProgramClauses,
+ ) -> &'a [chalk_ir::ProgramClause<Self>] {
+ clauses
+ }
+
+ fn intern_quantified_where_clauses<E>(
+ self,
+ data: impl IntoIterator<Item = Result<chalk_ir::QuantifiedWhereClause<Self>, E>>,
+ ) -> Result<Self::InternedQuantifiedWhereClauses, E> {
+ Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
+ }
+
+ fn quantified_where_clauses_data<'a>(
+ self,
+ clauses: &'a Self::InternedQuantifiedWhereClauses,
+ ) -> &'a [chalk_ir::QuantifiedWhereClause<Self>] {
+ clauses
+ }
+
+ fn intern_generic_arg_kinds<E>(
+ self,
+ data: impl IntoIterator<Item = Result<chalk_ir::VariableKind<Self>, E>>,
+ ) -> Result<Self::InternedVariableKinds, E> {
+ Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
+ }
+
+ fn variable_kinds_data<'a>(
+ self,
+ parameter_kinds: &'a Self::InternedVariableKinds,
+ ) -> &'a [chalk_ir::VariableKind<Self>] {
+ &parameter_kinds.as_ref().0
+ }
+
+ fn intern_canonical_var_kinds<E>(
+ self,
+ data: impl IntoIterator<Item = Result<chalk_ir::CanonicalVarKind<Self>, E>>,
+ ) -> Result<Self::InternedCanonicalVarKinds, E> {
+ Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
+ }
+
+ fn canonical_var_kinds_data<'a>(
+ self,
+ canonical_var_kinds: &'a Self::InternedCanonicalVarKinds,
+ ) -> &'a [chalk_ir::CanonicalVarKind<Self>] {
+ canonical_var_kinds
+ }
+
+ fn intern_constraints<E>(
+ self,
+ data: impl IntoIterator<Item = Result<chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>, E>>,
+ ) -> Result<Self::InternedConstraints, E> {
+ data.into_iter().collect()
+ }
+
+ fn constraints_data<'a>(
+ self,
+ constraints: &'a Self::InternedConstraints,
+ ) -> &'a [chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>] {
+ constraints
+ }
+ fn debug_closure_id(
+ _fn_def_id: chalk_ir::ClosureId<Self>,
+ _fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ None
+ }
+ fn debug_constraints(
+ _clauses: &chalk_ir::Constraints<Self>,
+ _fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ None
+ }
+
+ fn intern_variances<E>(
+ self,
+ data: impl IntoIterator<Item = Result<chalk_ir::Variance, E>>,
+ ) -> Result<Self::InternedVariances, E> {
+ Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
+ }
+
+ fn variances_data<'a>(
+ self,
+ variances: &'a Self::InternedVariances,
+ ) -> &'a [chalk_ir::Variance] {
+ variances
+ }
+}
+
+impl chalk_ir::interner::HasInterner for Interner {
+ type Interner = Self;
+}
+
+#[macro_export]
+macro_rules! has_interner {
+ ($t:ty) => {
+ impl HasInterner for $t {
+ type Interner = crate::Interner;
+ }
+ };
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
new file mode 100644
index 000000000..5a5d610e3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
@@ -0,0 +1,525 @@
+//! The type system. We currently use this to infer types for completion, hover
+//! information and various assists.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+#[allow(unused)]
+macro_rules! eprintln {
+ ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
+}
+
+mod autoderef;
+mod builder;
+mod chalk_db;
+mod chalk_ext;
+pub mod consteval;
+mod infer;
+mod interner;
+mod lower;
+mod mapping;
+mod tls;
+mod utils;
+mod walk;
+pub mod db;
+pub mod diagnostics;
+pub mod display;
+pub mod method_resolution;
+pub mod primitive;
+pub mod traits;
+
+#[cfg(test)]
+mod tests;
+#[cfg(test)]
+mod test_db;
+
+use std::sync::Arc;
+
+use chalk_ir::{
+ fold::{Shift, TypeFoldable},
+ interner::HasInterner,
+ NoSolution,
+};
+use hir_def::{expr::ExprId, type_ref::Rawness, TypeOrConstParamId};
+use itertools::Either;
+use utils::Generics;
+
+use crate::{consteval::unknown_const, db::HirDatabase, utils::generics};
+
+pub use autoderef::autoderef;
+pub use builder::{ParamKind, TyBuilder};
+pub use chalk_ext::*;
+pub use infer::{
+ could_coerce, could_unify, Adjust, Adjustment, AutoBorrow, BindingMode, InferenceDiagnostic,
+ InferenceResult,
+};
+pub use interner::Interner;
+pub use lower::{
+ associated_type_shorthand_candidates, CallableDefId, ImplTraitLoweringMode, TyDefId,
+ TyLoweringContext, ValueTyDefId,
+};
+pub use mapping::{
+ from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
+ lt_from_placeholder_idx, to_assoc_type_id, to_chalk_trait_id, to_foreign_def_id,
+ to_placeholder_idx,
+};
+pub use traits::TraitEnvironment;
+pub use utils::{all_super_traits, is_fn_unsafe_to_call};
+pub use walk::TypeWalk;
+
+pub use chalk_ir::{
+ cast::Cast, AdtId, BoundVar, DebruijnIndex, Mutability, Safety, Scalar, TyVariableKind,
+};
+
+pub type ForeignDefId = chalk_ir::ForeignDefId<Interner>;
+pub type AssocTypeId = chalk_ir::AssocTypeId<Interner>;
+pub type FnDefId = chalk_ir::FnDefId<Interner>;
+pub type ClosureId = chalk_ir::ClosureId<Interner>;
+pub type OpaqueTyId = chalk_ir::OpaqueTyId<Interner>;
+pub type PlaceholderIndex = chalk_ir::PlaceholderIndex;
+
+pub type VariableKind = chalk_ir::VariableKind<Interner>;
+pub type VariableKinds = chalk_ir::VariableKinds<Interner>;
+pub type CanonicalVarKinds = chalk_ir::CanonicalVarKinds<Interner>;
+pub type Binders<T> = chalk_ir::Binders<T>;
+pub type Substitution = chalk_ir::Substitution<Interner>;
+pub type GenericArg = chalk_ir::GenericArg<Interner>;
+pub type GenericArgData = chalk_ir::GenericArgData<Interner>;
+
+pub type Ty = chalk_ir::Ty<Interner>;
+pub type TyKind = chalk_ir::TyKind<Interner>;
+pub type DynTy = chalk_ir::DynTy<Interner>;
+pub type FnPointer = chalk_ir::FnPointer<Interner>;
+// pub type FnSubst = chalk_ir::FnSubst<Interner>;
+pub use chalk_ir::FnSubst;
+pub type ProjectionTy = chalk_ir::ProjectionTy<Interner>;
+pub type AliasTy = chalk_ir::AliasTy<Interner>;
+pub type OpaqueTy = chalk_ir::OpaqueTy<Interner>;
+pub type InferenceVar = chalk_ir::InferenceVar;
+
+pub type Lifetime = chalk_ir::Lifetime<Interner>;
+pub type LifetimeData = chalk_ir::LifetimeData<Interner>;
+pub type LifetimeOutlives = chalk_ir::LifetimeOutlives<Interner>;
+
+pub type Const = chalk_ir::Const<Interner>;
+pub type ConstData = chalk_ir::ConstData<Interner>;
+pub type ConstValue = chalk_ir::ConstValue<Interner>;
+pub type ConcreteConst = chalk_ir::ConcreteConst<Interner>;
+
+pub type ChalkTraitId = chalk_ir::TraitId<Interner>;
+pub type TraitRef = chalk_ir::TraitRef<Interner>;
+pub type QuantifiedWhereClause = Binders<WhereClause>;
+pub type QuantifiedWhereClauses = chalk_ir::QuantifiedWhereClauses<Interner>;
+pub type Canonical<T> = chalk_ir::Canonical<T>;
+
+pub type FnSig = chalk_ir::FnSig<Interner>;
+
+pub type InEnvironment<T> = chalk_ir::InEnvironment<T>;
+pub type Environment = chalk_ir::Environment<Interner>;
+pub type DomainGoal = chalk_ir::DomainGoal<Interner>;
+pub type Goal = chalk_ir::Goal<Interner>;
+pub type AliasEq = chalk_ir::AliasEq<Interner>;
+pub type Solution = chalk_solve::Solution<Interner>;
+pub type ConstrainedSubst = chalk_ir::ConstrainedSubst<Interner>;
+pub type Guidance = chalk_solve::Guidance<Interner>;
+pub type WhereClause = chalk_ir::WhereClause<Interner>;
+
+// FIXME: get rid of this
+pub fn subst_prefix(s: &Substitution, n: usize) -> Substitution {
+ Substitution::from_iter(
+ Interner,
+ s.as_slice(Interner)[..std::cmp::min(s.len(Interner), n)].iter().cloned(),
+ )
+}
+
+/// Return an index of a parameter in the generic type parameter list by it's id.
+pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize> {
+ generics(db.upcast(), id.parent).param_idx(id)
+}
+
+pub(crate) fn wrap_empty_binders<T>(value: T) -> Binders<T>
+where
+ T: TypeFoldable<Interner> + HasInterner<Interner = Interner>,
+{
+ Binders::empty(Interner, value.shifted_in_from(Interner, DebruijnIndex::ONE))
+}
+
+pub(crate) fn make_type_and_const_binders<T: HasInterner<Interner = Interner>>(
+ which_is_const: impl Iterator<Item = Option<Ty>>,
+ value: T,
+) -> Binders<T> {
+ Binders::new(
+ VariableKinds::from_iter(
+ Interner,
+ which_is_const.map(|x| {
+ if let Some(ty) = x {
+ chalk_ir::VariableKind::Const(ty)
+ } else {
+ chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)
+ }
+ }),
+ ),
+ value,
+ )
+}
+
+pub(crate) fn make_single_type_binders<T: HasInterner<Interner = Interner>>(
+ value: T,
+) -> Binders<T> {
+ Binders::new(
+ VariableKinds::from_iter(
+ Interner,
+ std::iter::once(chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)),
+ ),
+ value,
+ )
+}
+
+pub(crate) fn make_binders_with_count<T: HasInterner<Interner = Interner>>(
+ db: &dyn HirDatabase,
+ count: usize,
+ generics: &Generics,
+ value: T,
+) -> Binders<T> {
+ let it = generics.iter_id().take(count).map(|id| match id {
+ Either::Left(_) => None,
+ Either::Right(id) => Some(db.const_param_ty(id)),
+ });
+ crate::make_type_and_const_binders(it, value)
+}
+
+pub(crate) fn make_binders<T: HasInterner<Interner = Interner>>(
+ db: &dyn HirDatabase,
+ generics: &Generics,
+ value: T,
+) -> Binders<T> {
+ make_binders_with_count(db, usize::MAX, generics, value)
+}
+
+// FIXME: get rid of this
+pub fn make_canonical<T: HasInterner<Interner = Interner>>(
+ value: T,
+ kinds: impl IntoIterator<Item = TyVariableKind>,
+) -> Canonical<T> {
+ let kinds = kinds.into_iter().map(|tk| {
+ chalk_ir::CanonicalVarKind::new(
+ chalk_ir::VariableKind::Ty(tk),
+ chalk_ir::UniverseIndex::ROOT,
+ )
+ });
+ Canonical { value, binders: chalk_ir::CanonicalVarKinds::from_iter(Interner, kinds) }
+}
+
+// FIXME: get rid of this, just replace it by FnPointer
+/// A function signature as seen by type inference: Several parameter types and
+/// one return type.
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub struct CallableSig {
+ params_and_return: Arc<[Ty]>,
+ is_varargs: bool,
+}
+
+has_interner!(CallableSig);
+
+/// A polymorphic function signature.
+pub type PolyFnSig = Binders<CallableSig>;
+
+impl CallableSig {
+ pub fn from_params_and_return(mut params: Vec<Ty>, ret: Ty, is_varargs: bool) -> CallableSig {
+ params.push(ret);
+ CallableSig { params_and_return: params.into(), is_varargs }
+ }
+
+ pub fn from_fn_ptr(fn_ptr: &FnPointer) -> CallableSig {
+ CallableSig {
+ // FIXME: what to do about lifetime params? -> return PolyFnSig
+ params_and_return: fn_ptr
+ .substitution
+ .clone()
+ .shifted_out_to(Interner, DebruijnIndex::ONE)
+ .expect("unexpected lifetime vars in fn ptr")
+ .0
+ .as_slice(Interner)
+ .iter()
+ .map(|arg| arg.assert_ty_ref(Interner).clone())
+ .collect(),
+ is_varargs: fn_ptr.sig.variadic,
+ }
+ }
+
+ pub fn to_fn_ptr(&self) -> FnPointer {
+ FnPointer {
+ num_binders: 0,
+ sig: FnSig { abi: (), safety: Safety::Safe, variadic: self.is_varargs },
+ substitution: FnSubst(Substitution::from_iter(
+ Interner,
+ self.params_and_return.iter().cloned(),
+ )),
+ }
+ }
+
+ pub fn params(&self) -> &[Ty] {
+ &self.params_and_return[0..self.params_and_return.len() - 1]
+ }
+
+ pub fn ret(&self) -> &Ty {
+ &self.params_and_return[self.params_and_return.len() - 1]
+ }
+}
+
+impl TypeFoldable<Interner> for CallableSig {
+ fn fold_with<E>(
+ self,
+ folder: &mut dyn chalk_ir::fold::TypeFolder<Interner, Error = E>,
+ outer_binder: DebruijnIndex,
+ ) -> Result<Self, E> {
+ let vec = self.params_and_return.to_vec();
+ let folded = vec.fold_with(folder, outer_binder)?;
+ Ok(CallableSig { params_and_return: folded.into(), is_varargs: self.is_varargs })
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
+pub enum ImplTraitId {
+ ReturnTypeImplTrait(hir_def::FunctionId, u16),
+ AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId),
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct ReturnTypeImplTraits {
+ pub(crate) impl_traits: Vec<ReturnTypeImplTrait>,
+}
+
+has_interner!(ReturnTypeImplTraits);
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) struct ReturnTypeImplTrait {
+ pub(crate) bounds: Binders<Vec<QuantifiedWhereClause>>,
+}
+
+pub fn static_lifetime() -> Lifetime {
+ LifetimeData::Static.intern(Interner)
+}
+
+pub(crate) fn fold_free_vars<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>>(
+ t: T,
+ for_ty: impl FnMut(BoundVar, DebruijnIndex) -> Ty,
+ for_const: impl FnMut(Ty, BoundVar, DebruijnIndex) -> Const,
+) -> T {
+ use chalk_ir::{fold::TypeFolder, Fallible};
+ struct FreeVarFolder<F1, F2>(F1, F2);
+ impl<
+ 'i,
+ F1: FnMut(BoundVar, DebruijnIndex) -> Ty + 'i,
+ F2: FnMut(Ty, BoundVar, DebruijnIndex) -> Const + 'i,
+ > TypeFolder<Interner> for FreeVarFolder<F1, F2>
+ {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn fold_free_var_ty(
+ &mut self,
+ bound_var: BoundVar,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Ty> {
+ Ok(self.0(bound_var, outer_binder))
+ }
+
+ fn fold_free_var_const(
+ &mut self,
+ ty: Ty,
+ bound_var: BoundVar,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Const> {
+ Ok(self.1(ty, bound_var, outer_binder))
+ }
+ }
+ t.fold_with(&mut FreeVarFolder(for_ty, for_const), DebruijnIndex::INNERMOST)
+ .expect("fold failed unexpectedly")
+}
+
+pub(crate) fn fold_tys<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>>(
+ t: T,
+ mut for_ty: impl FnMut(Ty, DebruijnIndex) -> Ty,
+ binders: DebruijnIndex,
+) -> T {
+ fold_tys_and_consts(
+ t,
+ |x, d| match x {
+ Either::Left(x) => Either::Left(for_ty(x, d)),
+ Either::Right(x) => Either::Right(x),
+ },
+ binders,
+ )
+}
+
+pub(crate) fn fold_tys_and_consts<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>>(
+ t: T,
+ f: impl FnMut(Either<Ty, Const>, DebruijnIndex) -> Either<Ty, Const>,
+ binders: DebruijnIndex,
+) -> T {
+ use chalk_ir::{
+ fold::{TypeFolder, TypeSuperFoldable},
+ Fallible,
+ };
+ struct TyFolder<F>(F);
+ impl<'i, F: FnMut(Either<Ty, Const>, DebruijnIndex) -> Either<Ty, Const> + 'i>
+ TypeFolder<Interner> for TyFolder<F>
+ {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn fold_ty(&mut self, ty: Ty, outer_binder: DebruijnIndex) -> Fallible<Ty> {
+ let ty = ty.super_fold_with(self.as_dyn(), outer_binder)?;
+ Ok(self.0(Either::Left(ty), outer_binder).left().unwrap())
+ }
+
+ fn fold_const(&mut self, c: Const, outer_binder: DebruijnIndex) -> Fallible<Const> {
+ Ok(self.0(Either::Right(c), outer_binder).right().unwrap())
+ }
+ }
+ t.fold_with(&mut TyFolder(f), binders).expect("fold failed unexpectedly")
+}
+
+/// 'Canonicalizes' the `t` by replacing any errors with new variables. Also
+/// ensures there are no unbound variables or inference variables anywhere in
+/// the `t`.
+pub fn replace_errors_with_variables<T>(t: &T) -> Canonical<T>
+where
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + Clone,
+ T: HasInterner<Interner = Interner>,
+{
+ use chalk_ir::{
+ fold::{TypeFolder, TypeSuperFoldable},
+ Fallible,
+ };
+ struct ErrorReplacer {
+ vars: usize,
+ }
+ impl TypeFolder<Interner> for ErrorReplacer {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn fold_ty(&mut self, ty: Ty, outer_binder: DebruijnIndex) -> Fallible<Ty> {
+ if let TyKind::Error = ty.kind(Interner) {
+ let index = self.vars;
+ self.vars += 1;
+ Ok(TyKind::BoundVar(BoundVar::new(outer_binder, index)).intern(Interner))
+ } else {
+ let ty = ty.super_fold_with(self.as_dyn(), outer_binder)?;
+ Ok(ty)
+ }
+ }
+
+ fn fold_inference_ty(
+ &mut self,
+ _var: InferenceVar,
+ _kind: TyVariableKind,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Ty> {
+ if cfg!(debug_assertions) {
+ // we don't want to just panic here, because then the error message
+ // won't contain the whole thing, which would not be very helpful
+ Err(NoSolution)
+ } else {
+ Ok(TyKind::Error.intern(Interner))
+ }
+ }
+
+ fn fold_free_var_ty(
+ &mut self,
+ _bound_var: BoundVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Ty> {
+ if cfg!(debug_assertions) {
+ // we don't want to just panic here, because then the error message
+ // won't contain the whole thing, which would not be very helpful
+ Err(NoSolution)
+ } else {
+ Ok(TyKind::Error.intern(Interner))
+ }
+ }
+
+ fn fold_inference_const(
+ &mut self,
+ ty: Ty,
+ _var: InferenceVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Const> {
+ if cfg!(debug_assertions) {
+ Err(NoSolution)
+ } else {
+ Ok(unknown_const(ty))
+ }
+ }
+
+ fn fold_free_var_const(
+ &mut self,
+ ty: Ty,
+ _bound_var: BoundVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Const> {
+ if cfg!(debug_assertions) {
+ Err(NoSolution)
+ } else {
+ Ok(unknown_const(ty))
+ }
+ }
+
+ fn fold_inference_lifetime(
+ &mut self,
+ _var: InferenceVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Lifetime> {
+ if cfg!(debug_assertions) {
+ Err(NoSolution)
+ } else {
+ Ok(static_lifetime())
+ }
+ }
+
+ fn fold_free_var_lifetime(
+ &mut self,
+ _bound_var: BoundVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Lifetime> {
+ if cfg!(debug_assertions) {
+ Err(NoSolution)
+ } else {
+ Ok(static_lifetime())
+ }
+ }
+ }
+ let mut error_replacer = ErrorReplacer { vars: 0 };
+ let value = match t.clone().fold_with(&mut error_replacer, DebruijnIndex::INNERMOST) {
+ Ok(t) => t,
+ Err(_) => panic!("Encountered unbound or inference vars in {:?}", t),
+ };
+ let kinds = (0..error_replacer.vars).map(|_| {
+ chalk_ir::CanonicalVarKind::new(
+ chalk_ir::VariableKind::Ty(TyVariableKind::General),
+ chalk_ir::UniverseIndex::ROOT,
+ )
+ });
+ Canonical { value, binders: chalk_ir::CanonicalVarKinds::from_iter(Interner, kinds) }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
new file mode 100644
index 000000000..3ed9c941f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
@@ -0,0 +1,1778 @@
+//! Methods for lowering the HIR to types. There are two main cases here:
+//!
+//! - Lowering a type reference like `&usize` or `Option<foo::bar::Baz>` to a
+//! type: The entry point for this is `Ty::from_hir`.
+//! - Building the type for an item: This happens through the `type_for_def` query.
+//!
+//! This usually involves resolving names, collecting generic arguments etc.
+use std::{
+ cell::{Cell, RefCell},
+ iter,
+ sync::Arc,
+};
+
+use base_db::CrateId;
+use chalk_ir::{
+ cast::Cast, fold::Shift, fold::TypeFoldable, interner::HasInterner, Mutability, Safety,
+};
+
+use hir_def::{
+ adt::StructKind,
+ body::{Expander, LowerCtx},
+ builtin_type::BuiltinType,
+ generics::{
+ TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
+ },
+ intern::Interned,
+ lang_item::lang_attr,
+ path::{GenericArg, ModPath, Path, PathKind, PathSegment, PathSegments},
+ resolver::{HasResolver, Resolver, TypeNs},
+ type_ref::{
+ ConstScalarOrPath, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef,
+ },
+ AdtId, AssocItemId, ConstId, ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId,
+ HasModule, ImplId, ItemContainerId, LocalFieldId, Lookup, StaticId, StructId, TraitId,
+ TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId,
+};
+use hir_expand::{name::Name, ExpandResult};
+use itertools::Either;
+use la_arena::ArenaMap;
+use rustc_hash::FxHashSet;
+use smallvec::SmallVec;
+use stdx::{impl_from, never};
+use syntax::{ast, SmolStr};
+
+use crate::{
+ all_super_traits,
+ consteval::{intern_const_scalar, path_to_const, unknown_const, unknown_const_as_generic},
+ db::HirDatabase,
+ make_binders,
+ mapping::ToChalk,
+ static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
+ utils::Generics,
+ utils::{all_super_trait_refs, associated_type_by_name_including_super_traits, generics},
+ AliasEq, AliasTy, Binders, BoundVar, CallableSig, Const, DebruijnIndex, DynTy, FnPointer,
+ FnSig, FnSubst, GenericArgData, ImplTraitId, Interner, ParamKind, PolyFnSig, ProjectionTy,
+ QuantifiedWhereClause, QuantifiedWhereClauses, ReturnTypeImplTrait, ReturnTypeImplTraits,
+ Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyKind, WhereClause,
+};
+
+#[derive(Debug)]
+pub struct TyLoweringContext<'a> {
+ pub db: &'a dyn HirDatabase,
+ pub resolver: &'a Resolver,
+ in_binders: DebruijnIndex,
+ /// Note: Conceptually, it's thinkable that we could be in a location where
+ /// some type params should be represented as placeholders, and others
+ /// should be converted to variables. I think in practice, this isn't
+ /// possible currently, so this should be fine for now.
+ pub type_param_mode: ParamLoweringMode,
+ pub impl_trait_mode: ImplTraitLoweringMode,
+ impl_trait_counter: Cell<u16>,
+ /// When turning `impl Trait` into opaque types, we have to collect the
+ /// bounds at the same time to get the IDs correct (without becoming too
+ /// complicated). I don't like using interior mutability (as for the
+ /// counter), but I've tried and failed to make the lifetimes work for
+ /// passing around a `&mut TyLoweringContext`. The core problem is that
+ /// we're grouping the mutable data (the counter and this field) together
+ /// with the immutable context (the references to the DB and resolver).
+ /// Splitting this up would be a possible fix.
+ opaque_type_data: RefCell<Vec<ReturnTypeImplTrait>>,
+ expander: RefCell<Option<Expander>>,
+ /// Tracks types with explicit `?Sized` bounds.
+ pub(crate) unsized_types: RefCell<FxHashSet<Ty>>,
+}
+
+impl<'a> TyLoweringContext<'a> {
+ pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver) -> Self {
+ let impl_trait_counter = Cell::new(0);
+ let impl_trait_mode = ImplTraitLoweringMode::Disallowed;
+ let type_param_mode = ParamLoweringMode::Placeholder;
+ let in_binders = DebruijnIndex::INNERMOST;
+ let opaque_type_data = RefCell::new(Vec::new());
+ Self {
+ db,
+ resolver,
+ in_binders,
+ impl_trait_mode,
+ impl_trait_counter,
+ type_param_mode,
+ opaque_type_data,
+ expander: RefCell::new(None),
+ unsized_types: RefCell::default(),
+ }
+ }
+
+ pub fn with_debruijn<T>(
+ &self,
+ debruijn: DebruijnIndex,
+ f: impl FnOnce(&TyLoweringContext<'_>) -> T,
+ ) -> T {
+ let opaque_ty_data_vec = self.opaque_type_data.take();
+ let expander = self.expander.take();
+ let unsized_types = self.unsized_types.take();
+ let new_ctx = Self {
+ in_binders: debruijn,
+ impl_trait_counter: Cell::new(self.impl_trait_counter.get()),
+ opaque_type_data: RefCell::new(opaque_ty_data_vec),
+ expander: RefCell::new(expander),
+ unsized_types: RefCell::new(unsized_types),
+ ..*self
+ };
+ let result = f(&new_ctx);
+ self.impl_trait_counter.set(new_ctx.impl_trait_counter.get());
+ self.opaque_type_data.replace(new_ctx.opaque_type_data.into_inner());
+ self.expander.replace(new_ctx.expander.into_inner());
+ self.unsized_types.replace(new_ctx.unsized_types.into_inner());
+ result
+ }
+
+ pub fn with_shifted_in<T>(
+ &self,
+ debruijn: DebruijnIndex,
+ f: impl FnOnce(&TyLoweringContext<'_>) -> T,
+ ) -> T {
+ self.with_debruijn(self.in_binders.shifted_in_from(debruijn), f)
+ }
+
+ pub fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self {
+ Self { impl_trait_mode, ..self }
+ }
+
+ pub fn with_type_param_mode(self, type_param_mode: ParamLoweringMode) -> Self {
+ Self { type_param_mode, ..self }
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum ImplTraitLoweringMode {
+ /// `impl Trait` gets lowered into an opaque type that doesn't unify with
+ /// anything except itself. This is used in places where values flow 'out',
+ /// i.e. for arguments of the function we're currently checking, and return
+ /// types of functions we're calling.
+ Opaque,
+ /// `impl Trait` gets lowered into a type variable. Used for argument
+ /// position impl Trait when inside the respective function, since it allows
+ /// us to support that without Chalk.
+ Param,
+ /// `impl Trait` gets lowered into a variable that can unify with some
+ /// type. This is used in places where values flow 'in', i.e. for arguments
+ /// of functions we're calling, and the return type of the function we're
+ /// currently checking.
+ Variable,
+ /// `impl Trait` is disallowed and will be an error.
+ Disallowed,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum ParamLoweringMode {
+ Placeholder,
+ Variable,
+}
+
+impl<'a> TyLoweringContext<'a> {
+ pub fn lower_ty(&self, type_ref: &TypeRef) -> Ty {
+ self.lower_ty_ext(type_ref).0
+ }
+
+ fn generics(&self) -> Generics {
+ generics(
+ self.db.upcast(),
+ self.resolver
+ .generic_def()
+ .expect("there should be generics if there's a generic param"),
+ )
+ }
+
+ pub fn lower_ty_ext(&self, type_ref: &TypeRef) -> (Ty, Option<TypeNs>) {
+ let mut res = None;
+ let ty = match type_ref {
+ TypeRef::Never => TyKind::Never.intern(Interner),
+ TypeRef::Tuple(inner) => {
+ let inner_tys = inner.iter().map(|tr| self.lower_ty(tr));
+ TyKind::Tuple(inner_tys.len(), Substitution::from_iter(Interner, inner_tys))
+ .intern(Interner)
+ }
+ TypeRef::Path(path) => {
+ let (ty, res_) = self.lower_path(path);
+ res = res_;
+ ty
+ }
+ TypeRef::RawPtr(inner, mutability) => {
+ let inner_ty = self.lower_ty(inner);
+ TyKind::Raw(lower_to_chalk_mutability(*mutability), inner_ty).intern(Interner)
+ }
+ TypeRef::Array(inner, len) => {
+ let inner_ty = self.lower_ty(inner);
+ let const_len = const_or_path_to_chalk(
+ self.db,
+ self.resolver,
+ TyBuilder::usize(),
+ len,
+ self.type_param_mode,
+ || self.generics(),
+ self.in_binders,
+ );
+
+ TyKind::Array(inner_ty, const_len).intern(Interner)
+ }
+ TypeRef::Slice(inner) => {
+ let inner_ty = self.lower_ty(inner);
+ TyKind::Slice(inner_ty).intern(Interner)
+ }
+ TypeRef::Reference(inner, _, mutability) => {
+ let inner_ty = self.lower_ty(inner);
+ let lifetime = static_lifetime();
+ TyKind::Ref(lower_to_chalk_mutability(*mutability), lifetime, inner_ty)
+ .intern(Interner)
+ }
+ TypeRef::Placeholder => TyKind::Error.intern(Interner),
+ TypeRef::Fn(params, is_varargs) => {
+ let substs = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
+ Substitution::from_iter(Interner, params.iter().map(|(_, tr)| ctx.lower_ty(tr)))
+ });
+ TyKind::Function(FnPointer {
+ num_binders: 0, // FIXME lower `for<'a> fn()` correctly
+ sig: FnSig { abi: (), safety: Safety::Safe, variadic: *is_varargs },
+ substitution: FnSubst(substs),
+ })
+ .intern(Interner)
+ }
+ TypeRef::DynTrait(bounds) => {
+ let self_ty =
+ TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner);
+ let bounds = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
+ QuantifiedWhereClauses::from_iter(
+ Interner,
+ bounds.iter().flat_map(|b| ctx.lower_type_bound(b, self_ty.clone(), false)),
+ )
+ });
+ let bounds = crate::make_single_type_binders(bounds);
+ TyKind::Dyn(DynTy { bounds, lifetime: static_lifetime() }).intern(Interner)
+ }
+ TypeRef::ImplTrait(bounds) => {
+ match self.impl_trait_mode {
+ ImplTraitLoweringMode::Opaque => {
+ let idx = self.impl_trait_counter.get();
+ self.impl_trait_counter.set(idx + 1);
+ let func = match self.resolver.generic_def() {
+ Some(GenericDefId::FunctionId(f)) => f,
+ _ => panic!("opaque impl trait lowering in non-function"),
+ };
+
+ assert!(idx as usize == self.opaque_type_data.borrow().len());
+ // this dance is to make sure the data is in the right
+ // place even if we encounter more opaque types while
+ // lowering the bounds
+ self.opaque_type_data.borrow_mut().push(ReturnTypeImplTrait {
+ bounds: crate::make_single_type_binders(Vec::new()),
+ });
+ // We don't want to lower the bounds inside the binders
+ // we're currently in, because they don't end up inside
+ // those binders. E.g. when we have `impl Trait<impl
+ // OtherTrait<T>>`, the `impl OtherTrait<T>` can't refer
+ // to the self parameter from `impl Trait`, and the
+ // bounds aren't actually stored nested within each
+ // other, but separately. So if the `T` refers to a type
+ // parameter of the outer function, it's just one binder
+ // away instead of two.
+ let actual_opaque_type_data = self
+ .with_debruijn(DebruijnIndex::INNERMOST, |ctx| {
+ ctx.lower_impl_trait(bounds, func)
+ });
+ self.opaque_type_data.borrow_mut()[idx as usize] = actual_opaque_type_data;
+
+ let impl_trait_id = ImplTraitId::ReturnTypeImplTrait(func, idx);
+ let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
+ let generics = generics(self.db.upcast(), func.into());
+ let parameters = generics.bound_vars_subst(self.db, self.in_binders);
+ TyKind::OpaqueType(opaque_ty_id, parameters).intern(Interner)
+ }
+ ImplTraitLoweringMode::Param => {
+ let idx = self.impl_trait_counter.get();
+ // FIXME we're probably doing something wrong here
+ self.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16);
+ if let Some(def) = self.resolver.generic_def() {
+ let generics = generics(self.db.upcast(), def);
+ let param = generics
+ .iter()
+ .filter(|(_, data)| {
+ matches!(
+ data,
+ TypeOrConstParamData::TypeParamData(data)
+ if data.provenance == TypeParamProvenance::ArgumentImplTrait
+ )
+ })
+ .nth(idx as usize)
+ .map_or(TyKind::Error, |(id, _)| {
+ TyKind::Placeholder(to_placeholder_idx(self.db, id))
+ });
+ param.intern(Interner)
+ } else {
+ TyKind::Error.intern(Interner)
+ }
+ }
+ ImplTraitLoweringMode::Variable => {
+ let idx = self.impl_trait_counter.get();
+ // FIXME we're probably doing something wrong here
+ self.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16);
+ let (
+ parent_params,
+ self_params,
+ list_params,
+ const_params,
+ _impl_trait_params,
+ ) = if let Some(def) = self.resolver.generic_def() {
+ let generics = generics(self.db.upcast(), def);
+ generics.provenance_split()
+ } else {
+ (0, 0, 0, 0, 0)
+ };
+ TyKind::BoundVar(BoundVar::new(
+ self.in_binders,
+ idx as usize + parent_params + self_params + list_params + const_params,
+ ))
+ .intern(Interner)
+ }
+ ImplTraitLoweringMode::Disallowed => {
+ // FIXME: report error
+ TyKind::Error.intern(Interner)
+ }
+ }
+ }
+ TypeRef::Macro(macro_call) => {
+ let (expander, recursion_start) = {
+ let mut expander = self.expander.borrow_mut();
+ if expander.is_some() {
+ (Some(expander), false)
+ } else {
+ *expander = Some(Expander::new(
+ self.db.upcast(),
+ macro_call.file_id,
+ self.resolver.module(),
+ ));
+ (Some(expander), true)
+ }
+ };
+ let ty = if let Some(mut expander) = expander {
+ let expander_mut = expander.as_mut().unwrap();
+ let macro_call = macro_call.to_node(self.db.upcast());
+ match expander_mut.enter_expand::<ast::Type>(self.db.upcast(), macro_call) {
+ Ok(ExpandResult { value: Some((mark, expanded)), .. }) => {
+ let ctx =
+ LowerCtx::new(self.db.upcast(), expander_mut.current_file_id());
+ let type_ref = TypeRef::from_ast(&ctx, expanded);
+
+ drop(expander);
+ let ty = self.lower_ty(&type_ref);
+
+ self.expander
+ .borrow_mut()
+ .as_mut()
+ .unwrap()
+ .exit(self.db.upcast(), mark);
+ Some(ty)
+ }
+ _ => None,
+ }
+ } else {
+ None
+ };
+ if recursion_start {
+ *self.expander.borrow_mut() = None;
+ }
+ ty.unwrap_or_else(|| TyKind::Error.intern(Interner))
+ }
+ TypeRef::Error => TyKind::Error.intern(Interner),
+ };
+ (ty, res)
+ }
+
+ /// This is only for `generic_predicates_for_param`, where we can't just
+ /// lower the self types of the predicates since that could lead to cycles.
+ /// So we just check here if the `type_ref` resolves to a generic param, and which.
+ fn lower_ty_only_param(&self, type_ref: &TypeRef) -> Option<TypeOrConstParamId> {
+ let path = match type_ref {
+ TypeRef::Path(path) => path,
+ _ => return None,
+ };
+ if path.type_anchor().is_some() {
+ return None;
+ }
+ if path.segments().len() > 1 {
+ return None;
+ }
+ let resolution =
+ match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
+ Some((it, None)) => it,
+ _ => return None,
+ };
+ match resolution {
+ TypeNs::GenericParam(param_id) => Some(param_id.into()),
+ _ => None,
+ }
+ }
+
+ pub(crate) fn lower_ty_relative_path(
+ &self,
+ ty: Ty,
+ // We need the original resolution to lower `Self::AssocTy` correctly
+ res: Option<TypeNs>,
+ remaining_segments: PathSegments<'_>,
+ ) -> (Ty, Option<TypeNs>) {
+ match remaining_segments.len() {
+ 0 => (ty, res),
+ 1 => {
+ // resolve unselected assoc types
+ let segment = remaining_segments.first().unwrap();
+ (self.select_associated_type(res, segment), None)
+ }
+ _ => {
+ // FIXME report error (ambiguous associated type)
+ (TyKind::Error.intern(Interner), None)
+ }
+ }
+ }
+
+ pub(crate) fn lower_partly_resolved_path(
+ &self,
+ resolution: TypeNs,
+ resolved_segment: PathSegment<'_>,
+ remaining_segments: PathSegments<'_>,
+ infer_args: bool,
+ ) -> (Ty, Option<TypeNs>) {
+ let ty = match resolution {
+ TypeNs::TraitId(trait_) => {
+ let ty = match remaining_segments.len() {
+ 1 => {
+ let trait_ref =
+ self.lower_trait_ref_from_resolved_path(trait_, resolved_segment, None);
+ let segment = remaining_segments.first().unwrap();
+ let found = self
+ .db
+ .trait_data(trait_ref.hir_trait_id())
+ .associated_type_by_name(segment.name);
+ match found {
+ Some(associated_ty) => {
+ // FIXME handle type parameters on the segment
+ TyKind::Alias(AliasTy::Projection(ProjectionTy {
+ associated_ty_id: to_assoc_type_id(associated_ty),
+ substitution: trait_ref.substitution,
+ }))
+ .intern(Interner)
+ }
+ None => {
+ // FIXME: report error (associated type not found)
+ TyKind::Error.intern(Interner)
+ }
+ }
+ }
+ 0 => {
+ let self_ty = Some(
+ TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0))
+ .intern(Interner),
+ );
+ let trait_ref = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
+ ctx.lower_trait_ref_from_resolved_path(
+ trait_,
+ resolved_segment,
+ self_ty,
+ )
+ });
+ let dyn_ty = DynTy {
+ bounds: crate::make_single_type_binders(
+ QuantifiedWhereClauses::from_iter(
+ Interner,
+ Some(crate::wrap_empty_binders(WhereClause::Implemented(
+ trait_ref,
+ ))),
+ ),
+ ),
+ lifetime: static_lifetime(),
+ };
+ TyKind::Dyn(dyn_ty).intern(Interner)
+ }
+ _ => {
+ // FIXME report error (ambiguous associated type)
+ TyKind::Error.intern(Interner)
+ }
+ };
+ return (ty, None);
+ }
+ TypeNs::GenericParam(param_id) => {
+ let generics = generics(
+ self.db.upcast(),
+ self.resolver.generic_def().expect("generics in scope"),
+ );
+ match self.type_param_mode {
+ ParamLoweringMode::Placeholder => {
+ TyKind::Placeholder(to_placeholder_idx(self.db, param_id.into()))
+ }
+ ParamLoweringMode::Variable => {
+ let idx = generics.param_idx(param_id.into()).expect("matching generics");
+ TyKind::BoundVar(BoundVar::new(self.in_binders, idx))
+ }
+ }
+ .intern(Interner)
+ }
+ TypeNs::SelfType(impl_id) => {
+ let generics = generics(self.db.upcast(), impl_id.into());
+ let substs = match self.type_param_mode {
+ ParamLoweringMode::Placeholder => generics.placeholder_subst(self.db),
+ ParamLoweringMode::Variable => {
+ generics.bound_vars_subst(self.db, self.in_binders)
+ }
+ };
+ self.db.impl_self_ty(impl_id).substitute(Interner, &substs)
+ }
+ TypeNs::AdtSelfType(adt) => {
+ let generics = generics(self.db.upcast(), adt.into());
+ let substs = match self.type_param_mode {
+ ParamLoweringMode::Placeholder => generics.placeholder_subst(self.db),
+ ParamLoweringMode::Variable => {
+ generics.bound_vars_subst(self.db, self.in_binders)
+ }
+ };
+ self.db.ty(adt.into()).substitute(Interner, &substs)
+ }
+
+ TypeNs::AdtId(it) => self.lower_path_inner(resolved_segment, it.into(), infer_args),
+ TypeNs::BuiltinType(it) => {
+ self.lower_path_inner(resolved_segment, it.into(), infer_args)
+ }
+ TypeNs::TypeAliasId(it) => {
+ self.lower_path_inner(resolved_segment, it.into(), infer_args)
+ }
+ // FIXME: report error
+ TypeNs::EnumVariantId(_) => return (TyKind::Error.intern(Interner), None),
+ };
+ self.lower_ty_relative_path(ty, Some(resolution), remaining_segments)
+ }
+
+ pub(crate) fn lower_path(&self, path: &Path) -> (Ty, Option<TypeNs>) {
+ // Resolve the path (in type namespace)
+ if let Some(type_ref) = path.type_anchor() {
+ let (ty, res) = self.lower_ty_ext(type_ref);
+ return self.lower_ty_relative_path(ty, res, path.segments());
+ }
+ let (resolution, remaining_index) =
+ match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
+ Some(it) => it,
+ None => return (TyKind::Error.intern(Interner), None),
+ };
+ let (resolved_segment, remaining_segments) = match remaining_index {
+ None => (
+ path.segments().last().expect("resolved path has at least one element"),
+ PathSegments::EMPTY,
+ ),
+ Some(i) => (path.segments().get(i - 1).unwrap(), path.segments().skip(i)),
+ };
+ self.lower_partly_resolved_path(resolution, resolved_segment, remaining_segments, false)
+ }
+
+ fn select_associated_type(&self, res: Option<TypeNs>, segment: PathSegment<'_>) -> Ty {
+ let (def, res) = match (self.resolver.generic_def(), res) {
+ (Some(def), Some(res)) => (def, res),
+ _ => return TyKind::Error.intern(Interner),
+ };
+ let ty = named_associated_type_shorthand_candidates(
+ self.db,
+ def,
+ res,
+ Some(segment.name.clone()),
+ move |name, t, associated_ty| {
+ if name == segment.name {
+ let substs = match self.type_param_mode {
+ ParamLoweringMode::Placeholder => {
+ // if we're lowering to placeholders, we have to put
+ // them in now
+ let generics = generics(
+ self.db.upcast(),
+ self.resolver
+ .generic_def()
+ .expect("there should be generics if there's a generic param"),
+ );
+ let s = generics.placeholder_subst(self.db);
+ s.apply(t.substitution.clone(), Interner)
+ }
+ ParamLoweringMode::Variable => t.substitution.clone(),
+ };
+ // We need to shift in the bound vars, since
+ // associated_type_shorthand_candidates does not do that
+ let substs = substs.shifted_in_from(Interner, self.in_binders);
+ // FIXME handle type parameters on the segment
+ Some(
+ TyKind::Alias(AliasTy::Projection(ProjectionTy {
+ associated_ty_id: to_assoc_type_id(associated_ty),
+ substitution: substs,
+ }))
+ .intern(Interner),
+ )
+ } else {
+ None
+ }
+ },
+ );
+
+ ty.unwrap_or_else(|| TyKind::Error.intern(Interner))
+ }
+
+ fn lower_path_inner(
+ &self,
+ segment: PathSegment<'_>,
+ typeable: TyDefId,
+ infer_args: bool,
+ ) -> Ty {
+ let generic_def = match typeable {
+ TyDefId::BuiltinType(_) => None,
+ TyDefId::AdtId(it) => Some(it.into()),
+ TyDefId::TypeAliasId(it) => Some(it.into()),
+ };
+ let substs = self.substs_from_path_segment(segment, generic_def, infer_args, None);
+ self.db.ty(typeable).substitute(Interner, &substs)
+ }
+
+ /// Collect generic arguments from a path into a `Substs`. See also
+ /// `create_substs_for_ast_path` and `def_to_ty` in rustc.
+ pub(super) fn substs_from_path(
+ &self,
+ path: &Path,
+ // Note that we don't call `db.value_type(resolved)` here,
+ // `ValueTyDefId` is just a convenient way to pass generics and
+ // special-case enum variants
+ resolved: ValueTyDefId,
+ infer_args: bool,
+ ) -> Substitution {
+ let last = path.segments().last().expect("path should have at least one segment");
+ let (segment, generic_def) = match resolved {
+ ValueTyDefId::FunctionId(it) => (last, Some(it.into())),
+ ValueTyDefId::StructId(it) => (last, Some(it.into())),
+ ValueTyDefId::UnionId(it) => (last, Some(it.into())),
+ ValueTyDefId::ConstId(it) => (last, Some(it.into())),
+ ValueTyDefId::StaticId(_) => (last, None),
+ ValueTyDefId::EnumVariantId(var) => {
+ // the generic args for an enum variant may be either specified
+ // on the segment referring to the enum, or on the segment
+ // referring to the variant. So `Option::<T>::None` and
+ // `Option::None::<T>` are both allowed (though the former is
+ // preferred). See also `def_ids_for_path_segments` in rustc.
+ let len = path.segments().len();
+ let penultimate = len.checked_sub(2).and_then(|idx| path.segments().get(idx));
+ let segment = match penultimate {
+ Some(segment) if segment.args_and_bindings.is_some() => segment,
+ _ => last,
+ };
+ (segment, Some(var.parent.into()))
+ }
+ };
+ self.substs_from_path_segment(segment, generic_def, infer_args, None)
+ }
+
+ fn substs_from_path_segment(
+ &self,
+ segment: PathSegment<'_>,
+ def_generic: Option<GenericDefId>,
+ infer_args: bool,
+ explicit_self_ty: Option<Ty>,
+ ) -> Substitution {
+ let mut substs = Vec::new();
+ let def_generics = if let Some(def) = def_generic {
+ generics(self.db.upcast(), def)
+ } else {
+ return Substitution::empty(Interner);
+ };
+ let (parent_params, self_params, type_params, const_params, impl_trait_params) =
+ def_generics.provenance_split();
+ let total_len =
+ parent_params + self_params + type_params + const_params + impl_trait_params;
+
+ let ty_error = GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner);
+
+ let mut def_generic_iter = def_generics.iter_id();
+
+ for _ in 0..parent_params {
+ if let Some(eid) = def_generic_iter.next() {
+ match eid {
+ Either::Left(_) => substs.push(ty_error.clone()),
+ Either::Right(x) => {
+ substs.push(unknown_const_as_generic(self.db.const_param_ty(x)))
+ }
+ }
+ }
+ }
+
+ let fill_self_params = || {
+ for x in explicit_self_ty
+ .into_iter()
+ .map(|x| GenericArgData::Ty(x).intern(Interner))
+ .chain(iter::repeat(ty_error.clone()))
+ .take(self_params)
+ {
+ if let Some(id) = def_generic_iter.next() {
+ assert!(id.is_left());
+ substs.push(x);
+ }
+ }
+ };
+ let mut had_explicit_args = false;
+
+ if let Some(generic_args) = &segment.args_and_bindings {
+ if !generic_args.has_self_type {
+ fill_self_params();
+ }
+ let expected_num = if generic_args.has_self_type {
+ self_params + type_params + const_params
+ } else {
+ type_params + const_params
+ };
+ let skip = if generic_args.has_self_type && self_params == 0 { 1 } else { 0 };
+ // if args are provided, it should be all of them, but we can't rely on that
+ for arg in generic_args
+ .args
+ .iter()
+ .filter(|arg| !matches!(arg, GenericArg::Lifetime(_)))
+ .skip(skip)
+ .take(expected_num)
+ {
+ if let Some(id) = def_generic_iter.next() {
+ if let Some(x) = generic_arg_to_chalk(
+ self.db,
+ id,
+ arg,
+ &mut (),
+ |_, type_ref| self.lower_ty(type_ref),
+ |_, c, ty| {
+ const_or_path_to_chalk(
+ self.db,
+ &self.resolver,
+ ty,
+ c,
+ self.type_param_mode,
+ || self.generics(),
+ self.in_binders,
+ )
+ },
+ ) {
+ had_explicit_args = true;
+ substs.push(x);
+ } else {
+ // we just filtered them out
+ never!("Unexpected lifetime argument");
+ }
+ }
+ }
+ } else {
+ fill_self_params();
+ }
+
+ // handle defaults. In expression or pattern path segments without
+ // explicitly specified type arguments, missing type arguments are inferred
+ // (i.e. defaults aren't used).
+ if !infer_args || had_explicit_args {
+ if let Some(def_generic) = def_generic {
+ let defaults = self.db.generic_defaults(def_generic);
+ assert_eq!(total_len, defaults.len());
+
+ for default_ty in defaults.iter().skip(substs.len()) {
+ // each default can depend on the previous parameters
+ let substs_so_far = Substitution::from_iter(Interner, substs.clone());
+ if let Some(_id) = def_generic_iter.next() {
+ substs.push(default_ty.clone().substitute(Interner, &substs_so_far));
+ }
+ }
+ }
+ }
+
+ // add placeholders for args that were not provided
+ // FIXME: emit diagnostics in contexts where this is not allowed
+ for eid in def_generic_iter {
+ match eid {
+ Either::Left(_) => substs.push(ty_error.clone()),
+ Either::Right(x) => {
+ substs.push(unknown_const_as_generic(self.db.const_param_ty(x)))
+ }
+ }
+ }
+ // If this assert fails, it means you pushed into subst but didn't call .next() of def_generic_iter
+ assert_eq!(substs.len(), total_len);
+
+ Substitution::from_iter(Interner, substs)
+ }
+
+ fn lower_trait_ref_from_path(
+ &self,
+ path: &Path,
+ explicit_self_ty: Option<Ty>,
+ ) -> Option<TraitRef> {
+ let resolved =
+ match self.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), path.mod_path())? {
+ TypeNs::TraitId(tr) => tr,
+ _ => return None,
+ };
+ let segment = path.segments().last().expect("path should have at least one segment");
+ Some(self.lower_trait_ref_from_resolved_path(resolved, segment, explicit_self_ty))
+ }
+
+ pub(crate) fn lower_trait_ref_from_resolved_path(
+ &self,
+ resolved: TraitId,
+ segment: PathSegment<'_>,
+ explicit_self_ty: Option<Ty>,
+ ) -> TraitRef {
+ let substs = self.trait_ref_substs_from_path(segment, resolved, explicit_self_ty);
+ TraitRef { trait_id: to_chalk_trait_id(resolved), substitution: substs }
+ }
+
+ fn lower_trait_ref(
+ &self,
+ trait_ref: &HirTraitRef,
+ explicit_self_ty: Option<Ty>,
+ ) -> Option<TraitRef> {
+ self.lower_trait_ref_from_path(&trait_ref.path, explicit_self_ty)
+ }
+
+ fn trait_ref_substs_from_path(
+ &self,
+ segment: PathSegment<'_>,
+ resolved: TraitId,
+ explicit_self_ty: Option<Ty>,
+ ) -> Substitution {
+ self.substs_from_path_segment(segment, Some(resolved.into()), false, explicit_self_ty)
+ }
+
+ pub(crate) fn lower_where_predicate(
+ &'a self,
+ where_predicate: &'a WherePredicate,
+ ignore_bindings: bool,
+ ) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
+ match where_predicate {
+ WherePredicate::ForLifetime { target, bound, .. }
+ | WherePredicate::TypeBound { target, bound } => {
+ let self_ty = match target {
+ WherePredicateTypeTarget::TypeRef(type_ref) => self.lower_ty(type_ref),
+ WherePredicateTypeTarget::TypeOrConstParam(param_id) => {
+ let generic_def = self.resolver.generic_def().expect("generics in scope");
+ let generics = generics(self.db.upcast(), generic_def);
+ let param_id = hir_def::TypeOrConstParamId {
+ parent: generic_def,
+ local_id: *param_id,
+ };
+ let placeholder = to_placeholder_idx(self.db, param_id);
+ match self.type_param_mode {
+ ParamLoweringMode::Placeholder => TyKind::Placeholder(placeholder),
+ ParamLoweringMode::Variable => {
+ let idx = generics.param_idx(param_id).expect("matching generics");
+ TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, idx))
+ }
+ }
+ .intern(Interner)
+ }
+ };
+ self.lower_type_bound(bound, self_ty, ignore_bindings)
+ .collect::<Vec<_>>()
+ .into_iter()
+ }
+ WherePredicate::Lifetime { .. } => vec![].into_iter(),
+ }
+ }
+
+ pub(crate) fn lower_type_bound(
+ &'a self,
+ bound: &'a TypeBound,
+ self_ty: Ty,
+ ignore_bindings: bool,
+ ) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
+ let mut bindings = None;
+ let trait_ref = match bound {
+ TypeBound::Path(path, TraitBoundModifier::None) => {
+ bindings = self.lower_trait_ref_from_path(path, Some(self_ty));
+ bindings
+ .clone()
+ .filter(|tr| {
+ // ignore `T: Drop` or `T: Destruct` bounds.
+ // - `T: ~const Drop` has a special meaning in Rust 1.61 that we don't implement.
+ // (So ideally, we'd only ignore `~const Drop` here)
+ // - `Destruct` impls are built-in in 1.62 (current nightlies as of 08-04-2022), so until
+ // the builtin impls are supported by Chalk, we ignore them here.
+ if let Some(lang) = lang_attr(self.db.upcast(), tr.hir_trait_id()) {
+ if lang == "drop" || lang == "destruct" {
+ return false;
+ }
+ }
+ true
+ })
+ .map(WhereClause::Implemented)
+ .map(crate::wrap_empty_binders)
+ }
+ TypeBound::Path(path, TraitBoundModifier::Maybe) => {
+ let sized_trait = self
+ .db
+ .lang_item(self.resolver.krate(), SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait());
+ // Don't lower associated type bindings as the only possible relaxed trait bound
+ // `?Sized` has no of them.
+ // If we got another trait here ignore the bound completely.
+ let trait_id = self
+ .lower_trait_ref_from_path(path, Some(self_ty.clone()))
+ .map(|trait_ref| trait_ref.hir_trait_id());
+ if trait_id == sized_trait {
+ self.unsized_types.borrow_mut().insert(self_ty);
+ }
+ None
+ }
+ TypeBound::ForLifetime(_, path) => {
+ // FIXME Don't silently drop the hrtb lifetimes here
+ bindings = self.lower_trait_ref_from_path(path, Some(self_ty));
+ bindings.clone().map(WhereClause::Implemented).map(crate::wrap_empty_binders)
+ }
+ TypeBound::Lifetime(_) => None,
+ TypeBound::Error => None,
+ };
+ trait_ref.into_iter().chain(
+ bindings
+ .into_iter()
+ .filter(move |_| !ignore_bindings)
+ .flat_map(move |tr| self.assoc_type_bindings_from_type_bound(bound, tr)),
+ )
+ }
+
+ fn assoc_type_bindings_from_type_bound(
+ &'a self,
+ bound: &'a TypeBound,
+ trait_ref: TraitRef,
+ ) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
+ let last_segment = match bound {
+ TypeBound::Path(path, TraitBoundModifier::None) | TypeBound::ForLifetime(_, path) => {
+ path.segments().last()
+ }
+ TypeBound::Path(_, TraitBoundModifier::Maybe)
+ | TypeBound::Error
+ | TypeBound::Lifetime(_) => None,
+ };
+ last_segment
+ .into_iter()
+ .filter_map(|segment| segment.args_and_bindings)
+ .flat_map(|args_and_bindings| &args_and_bindings.bindings)
+ .flat_map(move |binding| {
+ let found = associated_type_by_name_including_super_traits(
+ self.db,
+ trait_ref.clone(),
+ &binding.name,
+ );
+ let (super_trait_ref, associated_ty) = match found {
+ None => return SmallVec::new(),
+ Some(t) => t,
+ };
+ let projection_ty = ProjectionTy {
+ associated_ty_id: to_assoc_type_id(associated_ty),
+ substitution: super_trait_ref.substitution,
+ };
+ let mut preds: SmallVec<[_; 1]> = SmallVec::with_capacity(
+ binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(),
+ );
+ if let Some(type_ref) = &binding.type_ref {
+ let ty = self.lower_ty(type_ref);
+ let alias_eq =
+ AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
+ preds.push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
+ }
+ for bound in &binding.bounds {
+ preds.extend(self.lower_type_bound(
+ bound,
+ TyKind::Alias(AliasTy::Projection(projection_ty.clone())).intern(Interner),
+ false,
+ ));
+ }
+ preds
+ })
+ }
+
+ fn lower_impl_trait(
+ &self,
+ bounds: &[Interned<TypeBound>],
+ func: FunctionId,
+ ) -> ReturnTypeImplTrait {
+ cov_mark::hit!(lower_rpit);
+ let self_ty = TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner);
+ let predicates = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
+ let mut predicates: Vec<_> = bounds
+ .iter()
+ .flat_map(|b| ctx.lower_type_bound(b, self_ty.clone(), false))
+ .collect();
+
+ if !ctx.unsized_types.borrow().contains(&self_ty) {
+ let krate = func.lookup(ctx.db.upcast()).module(ctx.db.upcast()).krate();
+ let sized_trait = ctx
+ .db
+ .lang_item(krate, SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
+ let sized_clause = sized_trait.map(|trait_id| {
+ let clause = WhereClause::Implemented(TraitRef {
+ trait_id,
+ substitution: Substitution::from1(Interner, self_ty.clone()),
+ });
+ crate::wrap_empty_binders(clause)
+ });
+ predicates.extend(sized_clause.into_iter());
+ predicates.shrink_to_fit();
+ }
+ predicates
+ });
+ ReturnTypeImplTrait { bounds: crate::make_single_type_binders(predicates) }
+ }
+}
+
+fn count_impl_traits(type_ref: &TypeRef) -> usize {
+ let mut count = 0;
+ type_ref.walk(&mut |type_ref| {
+ if matches!(type_ref, TypeRef::ImplTrait(_)) {
+ count += 1;
+ }
+ });
+ count
+}
+
+/// Build the signature of a callable item (function, struct or enum variant).
+pub(crate) fn callable_item_sig(db: &dyn HirDatabase, def: CallableDefId) -> PolyFnSig {
+ match def {
+ CallableDefId::FunctionId(f) => fn_sig_for_fn(db, f),
+ CallableDefId::StructId(s) => fn_sig_for_struct_constructor(db, s),
+ CallableDefId::EnumVariantId(e) => fn_sig_for_enum_variant_constructor(db, e),
+ }
+}
+
+pub fn associated_type_shorthand_candidates<R>(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+ res: TypeNs,
+ cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option<R>,
+) -> Option<R> {
+ named_associated_type_shorthand_candidates(db, def, res, None, cb)
+}
+
+fn named_associated_type_shorthand_candidates<R>(
+ db: &dyn HirDatabase,
+ // If the type parameter is defined in an impl and we're in a method, there
+ // might be additional where clauses to consider
+ def: GenericDefId,
+ res: TypeNs,
+ assoc_name: Option<Name>,
+ mut cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option<R>,
+) -> Option<R> {
+ let mut search = |t| {
+ for t in all_super_trait_refs(db, t) {
+ let data = db.trait_data(t.hir_trait_id());
+
+ for (name, assoc_id) in &data.items {
+ if let AssocItemId::TypeAliasId(alias) = assoc_id {
+ if let Some(result) = cb(name, &t, *alias) {
+ return Some(result);
+ }
+ }
+ }
+ }
+ None
+ };
+
+ match res {
+ TypeNs::SelfType(impl_id) => search(
+ // we're _in_ the impl -- the binders get added back later. Correct,
+ // but it would be nice to make this more explicit
+ db.impl_trait(impl_id)?.into_value_and_skipped_binders().0,
+ ),
+ TypeNs::GenericParam(param_id) => {
+ let predicates = db.generic_predicates_for_param(def, param_id.into(), assoc_name);
+ let res = predicates.iter().find_map(|pred| match pred.skip_binders().skip_binders() {
+ // FIXME: how to correctly handle higher-ranked bounds here?
+ WhereClause::Implemented(tr) => search(
+ tr.clone()
+ .shifted_out_to(Interner, DebruijnIndex::ONE)
+ .expect("FIXME unexpected higher-ranked trait bound"),
+ ),
+ _ => None,
+ });
+ if let Some(_) = res {
+ return res;
+ }
+ // Handle `Self::Type` referring to own associated type in trait definitions
+ if let GenericDefId::TraitId(trait_id) = param_id.parent() {
+ let generics = generics(db.upcast(), trait_id.into());
+ if generics.params.type_or_consts[param_id.local_id()].is_trait_self() {
+ let trait_ref = TyBuilder::trait_ref(db, trait_id)
+ .fill_with_bound_vars(DebruijnIndex::INNERMOST, 0)
+ .build();
+ return search(trait_ref);
+ }
+ }
+ None
+ }
+ _ => None,
+ }
+}
+
+/// Build the type of all specific fields of a struct or enum variant.
+pub(crate) fn field_types_query(
+ db: &dyn HirDatabase,
+ variant_id: VariantId,
+) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>> {
+ let var_data = variant_id.variant_data(db.upcast());
+ let (resolver, def): (_, GenericDefId) = match variant_id {
+ VariantId::StructId(it) => (it.resolver(db.upcast()), it.into()),
+ VariantId::UnionId(it) => (it.resolver(db.upcast()), it.into()),
+ VariantId::EnumVariantId(it) => (it.parent.resolver(db.upcast()), it.parent.into()),
+ };
+ let generics = generics(db.upcast(), def);
+ let mut res = ArenaMap::default();
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ for (field_id, field_data) in var_data.fields().iter() {
+ res.insert(field_id, make_binders(db, &generics, ctx.lower_ty(&field_data.type_ref)))
+ }
+ Arc::new(res)
+}
+
+/// This query exists only to be used when resolving short-hand associated types
+/// like `T::Item`.
+///
+/// See the analogous query in rustc and its comment:
+/// <https://github.com/rust-lang/rust/blob/9150f844e2624eb013ec78ca08c1d416e6644026/src/librustc_typeck/astconv.rs#L46>
+/// This is a query mostly to handle cycles somewhat gracefully; e.g. the
+/// following bounds are disallowed: `T: Foo<U::Item>, U: Foo<T::Item>`, but
+/// these are fine: `T: Foo<U::Item>, U: Foo<()>`.
+pub(crate) fn generic_predicates_for_param_query(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+ param_id: TypeOrConstParamId,
+ assoc_name: Option<Name>,
+) -> Arc<[Binders<QuantifiedWhereClause>]> {
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let generics = generics(db.upcast(), def);
+ let mut predicates: Vec<_> = resolver
+ .where_predicates_in_scope()
+ // we have to filter out all other predicates *first*, before attempting to lower them
+ .filter(|pred| match pred {
+ WherePredicate::ForLifetime { target, bound, .. }
+ | WherePredicate::TypeBound { target, bound, .. } => {
+ match target {
+ WherePredicateTypeTarget::TypeRef(type_ref) => {
+ if ctx.lower_ty_only_param(type_ref) != Some(param_id) {
+ return false;
+ }
+ }
+ &WherePredicateTypeTarget::TypeOrConstParam(local_id) => {
+ let target_id = TypeOrConstParamId { parent: def, local_id };
+ if target_id != param_id {
+ return false;
+ }
+ }
+ };
+
+ match &**bound {
+ TypeBound::ForLifetime(_, path) | TypeBound::Path(path, _) => {
+ // Only lower the bound if the trait could possibly define the associated
+ // type we're looking for.
+
+ let assoc_name = match &assoc_name {
+ Some(it) => it,
+ None => return true,
+ };
+ let tr = match resolver
+ .resolve_path_in_type_ns_fully(db.upcast(), path.mod_path())
+ {
+ Some(TypeNs::TraitId(tr)) => tr,
+ _ => return false,
+ };
+
+ all_super_traits(db.upcast(), tr).iter().any(|tr| {
+ db.trait_data(*tr).items.iter().any(|(name, item)| {
+ matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name
+ })
+ })
+ }
+ TypeBound::Lifetime(_) | TypeBound::Error => false,
+ }
+ }
+ WherePredicate::Lifetime { .. } => false,
+ })
+ .flat_map(|pred| {
+ ctx.lower_where_predicate(pred, true).map(|p| make_binders(db, &generics, p))
+ })
+ .collect();
+
+ let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let explicitly_unsized_tys = ctx.unsized_types.into_inner();
+ let implicitly_sized_predicates =
+ implicitly_sized_clauses(db, param_id.parent, &explicitly_unsized_tys, &subst, &resolver)
+ .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p)));
+ predicates.extend(implicitly_sized_predicates);
+ predicates.into()
+}
+
+pub(crate) fn generic_predicates_for_param_recover(
+ _db: &dyn HirDatabase,
+ _cycle: &[String],
+ _def: &GenericDefId,
+ _param_id: &TypeOrConstParamId,
+ _assoc_name: &Option<Name>,
+) -> Arc<[Binders<QuantifiedWhereClause>]> {
+ Arc::new([])
+}
+
+pub(crate) fn trait_environment_query(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+) -> Arc<TraitEnvironment> {
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Placeholder);
+ let mut traits_in_scope = Vec::new();
+ let mut clauses = Vec::new();
+ for pred in resolver.where_predicates_in_scope() {
+ for pred in ctx.lower_where_predicate(pred, false) {
+ if let WhereClause::Implemented(tr) = &pred.skip_binders() {
+ traits_in_scope.push((tr.self_type_parameter(Interner).clone(), tr.hir_trait_id()));
+ }
+ let program_clause: chalk_ir::ProgramClause<Interner> = pred.cast(Interner);
+ clauses.push(program_clause.into_from_env_clause(Interner));
+ }
+ }
+
+ let container: Option<ItemContainerId> = match def {
+ // FIXME: is there a function for this?
+ GenericDefId::FunctionId(f) => Some(f.lookup(db.upcast()).container),
+ GenericDefId::AdtId(_) => None,
+ GenericDefId::TraitId(_) => None,
+ GenericDefId::TypeAliasId(t) => Some(t.lookup(db.upcast()).container),
+ GenericDefId::ImplId(_) => None,
+ GenericDefId::EnumVariantId(_) => None,
+ GenericDefId::ConstId(c) => Some(c.lookup(db.upcast()).container),
+ };
+ if let Some(ItemContainerId::TraitId(trait_id)) = container {
+ // add `Self: Trait<T1, T2, ...>` to the environment in trait
+ // function default implementations (and speculative code
+ // inside consts or type aliases)
+ cov_mark::hit!(trait_self_implements_self);
+ let substs = TyBuilder::placeholder_subst(db, trait_id);
+ let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution: substs };
+ let pred = WhereClause::Implemented(trait_ref);
+ let program_clause: chalk_ir::ProgramClause<Interner> = pred.cast(Interner);
+ clauses.push(program_clause.into_from_env_clause(Interner));
+ }
+
+ let subst = generics(db.upcast(), def).placeholder_subst(db);
+ let explicitly_unsized_tys = ctx.unsized_types.into_inner();
+ let implicitly_sized_clauses =
+ implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver).map(|pred| {
+ let program_clause: chalk_ir::ProgramClause<Interner> = pred.cast(Interner);
+ program_clause.into_from_env_clause(Interner)
+ });
+ clauses.extend(implicitly_sized_clauses);
+
+ let krate = def.module(db.upcast()).krate();
+
+ let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses);
+
+ Arc::new(TraitEnvironment { krate, traits_from_clauses: traits_in_scope, env })
+}
+
+/// Resolve the where clause(s) of an item with generics.
+pub(crate) fn generic_predicates_query(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+) -> Arc<[Binders<QuantifiedWhereClause>]> {
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let generics = generics(db.upcast(), def);
+
+ let mut predicates = resolver
+ .where_predicates_in_scope()
+ .flat_map(|pred| {
+ ctx.lower_where_predicate(pred, false).map(|p| make_binders(db, &generics, p))
+ })
+ .collect::<Vec<_>>();
+
+ let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let explicitly_unsized_tys = ctx.unsized_types.into_inner();
+ let implicitly_sized_predicates =
+ implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver)
+ .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p)));
+ predicates.extend(implicitly_sized_predicates);
+ predicates.into()
+}
+
+/// Generate implicit `: Sized` predicates for all generics that has no `?Sized` bound.
+/// Exception is Self of a trait def.
+fn implicitly_sized_clauses<'a>(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+ explicitly_unsized_tys: &'a FxHashSet<Ty>,
+ substitution: &'a Substitution,
+ resolver: &Resolver,
+) -> impl Iterator<Item = WhereClause> + 'a {
+ let is_trait_def = matches!(def, GenericDefId::TraitId(..));
+ let generic_args = &substitution.as_slice(Interner)[is_trait_def as usize..];
+ let sized_trait = db
+ .lang_item(resolver.krate(), SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
+
+ sized_trait.into_iter().flat_map(move |sized_trait| {
+ let implicitly_sized_tys = generic_args
+ .iter()
+ .filter_map(|generic_arg| generic_arg.ty(Interner))
+ .filter(move |&self_ty| !explicitly_unsized_tys.contains(self_ty));
+ implicitly_sized_tys.map(move |self_ty| {
+ WhereClause::Implemented(TraitRef {
+ trait_id: sized_trait,
+ substitution: Substitution::from1(Interner, self_ty.clone()),
+ })
+ })
+ })
+}
+
+/// Resolve the default type params from generics
+pub(crate) fn generic_defaults_query(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+) -> Arc<[Binders<chalk_ir::GenericArg<Interner>>]> {
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let generic_params = generics(db.upcast(), def);
+
+ let defaults = generic_params
+ .iter()
+ .enumerate()
+ .map(|(idx, (id, p))| {
+ let p = match p {
+ TypeOrConstParamData::TypeParamData(p) => p,
+ TypeOrConstParamData::ConstParamData(_) => {
+ // FIXME: implement const generic defaults
+ let val = unknown_const_as_generic(
+ db.const_param_ty(ConstParamId::from_unchecked(id)),
+ );
+ return crate::make_binders_with_count(db, idx, &generic_params, val);
+ }
+ };
+ let mut ty =
+ p.default.as_ref().map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t));
+
+ // Each default can only refer to previous parameters.
+ // type variable default referring to parameter coming
+ // after it. This is forbidden (FIXME: report
+ // diagnostic)
+ ty = fallback_bound_vars(ty, idx);
+ let val = GenericArgData::Ty(ty).intern(Interner);
+ crate::make_binders_with_count(db, idx, &generic_params, val)
+ })
+ .collect();
+
+ defaults
+}
+
+pub(crate) fn generic_defaults_recover(
+ db: &dyn HirDatabase,
+ _cycle: &[String],
+ def: &GenericDefId,
+) -> Arc<[Binders<crate::GenericArg>]> {
+ let generic_params = generics(db.upcast(), *def);
+ // FIXME: this code is not covered in tests.
+ // we still need one default per parameter
+ let defaults = generic_params
+ .iter_id()
+ .enumerate()
+ .map(|(count, id)| {
+ let val = match id {
+ itertools::Either::Left(_) => {
+ GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
+ }
+ itertools::Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
+ };
+ crate::make_binders_with_count(db, count, &generic_params, val)
+ })
+ .collect();
+
+ defaults
+}
+
+fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
+ let data = db.function_data(def);
+ let resolver = def.resolver(db.upcast());
+ let ctx_params = TyLoweringContext::new(db, &resolver)
+ .with_impl_trait_mode(ImplTraitLoweringMode::Variable)
+ .with_type_param_mode(ParamLoweringMode::Variable);
+ let params = data.params.iter().map(|(_, tr)| ctx_params.lower_ty(tr)).collect::<Vec<_>>();
+ let ctx_ret = TyLoweringContext::new(db, &resolver)
+ .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
+ .with_type_param_mode(ParamLoweringMode::Variable);
+ let ret = ctx_ret.lower_ty(&data.ret_type);
+ let generics = generics(db.upcast(), def.into());
+ let sig = CallableSig::from_params_and_return(params, ret, data.is_varargs());
+ make_binders(db, &generics, sig)
+}
+
+/// Build the declared type of a function. This should not need to look at the
+/// function body.
+fn type_for_fn(db: &dyn HirDatabase, def: FunctionId) -> Binders<Ty> {
+ let generics = generics(db.upcast(), def.into());
+ let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ make_binders(
+ db,
+ &generics,
+ TyKind::FnDef(CallableDefId::FunctionId(def).to_chalk(db), substs).intern(Interner),
+ )
+}
+
+/// Build the declared type of a const.
+fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> Binders<Ty> {
+ let data = db.const_data(def);
+ let generics = generics(db.upcast(), def.into());
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+
+ make_binders(db, &generics, ctx.lower_ty(&data.type_ref))
+}
+
+/// Build the declared type of a static.
+fn type_for_static(db: &dyn HirDatabase, def: StaticId) -> Binders<Ty> {
+ let data = db.static_data(def);
+ let resolver = def.resolver(db.upcast());
+ let ctx = TyLoweringContext::new(db, &resolver);
+
+ Binders::empty(Interner, ctx.lower_ty(&data.type_ref))
+}
+
+fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnSig {
+ let struct_data = db.struct_data(def);
+ let fields = struct_data.variant_data.fields();
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)).collect::<Vec<_>>();
+ let (ret, binders) = type_for_adt(db, def.into()).into_value_and_skipped_binders();
+ Binders::new(binders, CallableSig::from_params_and_return(params, ret, false))
+}
+
+/// Build the type of a tuple struct constructor.
+fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Binders<Ty> {
+ let struct_data = db.struct_data(def);
+ if let StructKind::Unit = struct_data.variant_data.kind() {
+ return type_for_adt(db, def.into());
+ }
+ let generics = generics(db.upcast(), def.into());
+ let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ make_binders(
+ db,
+ &generics,
+ TyKind::FnDef(CallableDefId::StructId(def).to_chalk(db), substs).intern(Interner),
+ )
+}
+
+fn fn_sig_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> PolyFnSig {
+ let enum_data = db.enum_data(def.parent);
+ let var_data = &enum_data.variants[def.local_id];
+ let fields = var_data.variant_data.fields();
+ let resolver = def.parent.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)).collect::<Vec<_>>();
+ let (ret, binders) = type_for_adt(db, def.parent.into()).into_value_and_skipped_binders();
+ Binders::new(binders, CallableSig::from_params_and_return(params, ret, false))
+}
+
+/// Build the type of a tuple enum variant constructor.
+fn type_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> Binders<Ty> {
+ let enum_data = db.enum_data(def.parent);
+ let var_data = &enum_data.variants[def.local_id].variant_data;
+ if let StructKind::Unit = var_data.kind() {
+ return type_for_adt(db, def.parent.into());
+ }
+ let generics = generics(db.upcast(), def.parent.into());
+ let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ make_binders(
+ db,
+ &generics,
+ TyKind::FnDef(CallableDefId::EnumVariantId(def).to_chalk(db), substs).intern(Interner),
+ )
+}
+
+fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
+ let generics = generics(db.upcast(), adt.into());
+ let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let ty = TyKind::Adt(crate::AdtId(adt), subst).intern(Interner);
+ make_binders(db, &generics, ty)
+}
+
+fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> {
+ let generics = generics(db.upcast(), t.into());
+ let resolver = t.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ if db.type_alias_data(t).is_extern {
+ Binders::empty(Interner, TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner))
+ } else {
+ let type_ref = &db.type_alias_data(t).type_ref;
+ let inner = ctx.lower_ty(type_ref.as_deref().unwrap_or(&TypeRef::Error));
+ make_binders(db, &generics, inner)
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum CallableDefId {
+ FunctionId(FunctionId),
+ StructId(StructId),
+ EnumVariantId(EnumVariantId),
+}
+impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId);
+
+impl CallableDefId {
+ pub fn krate(self, db: &dyn HirDatabase) -> CrateId {
+ let db = db.upcast();
+ match self {
+ CallableDefId::FunctionId(f) => f.lookup(db).module(db),
+ CallableDefId::StructId(s) => s.lookup(db).container,
+ CallableDefId::EnumVariantId(e) => e.parent.lookup(db).container,
+ }
+ .krate()
+ }
+}
+
+impl From<CallableDefId> for GenericDefId {
+ fn from(def: CallableDefId) -> GenericDefId {
+ match def {
+ CallableDefId::FunctionId(f) => f.into(),
+ CallableDefId::StructId(s) => s.into(),
+ CallableDefId::EnumVariantId(e) => e.into(),
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum TyDefId {
+ BuiltinType(BuiltinType),
+ AdtId(AdtId),
+ TypeAliasId(TypeAliasId),
+}
+impl_from!(BuiltinType, AdtId(StructId, EnumId, UnionId), TypeAliasId for TyDefId);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum ValueTyDefId {
+ FunctionId(FunctionId),
+ StructId(StructId),
+ UnionId(UnionId),
+ EnumVariantId(EnumVariantId),
+ ConstId(ConstId),
+ StaticId(StaticId),
+}
+impl_from!(FunctionId, StructId, UnionId, EnumVariantId, ConstId, StaticId for ValueTyDefId);
+
+/// Build the declared type of an item. This depends on the namespace; e.g. for
+/// `struct Foo(usize)`, we have two types: The type of the struct itself, and
+/// the constructor function `(usize) -> Foo` which lives in the values
+/// namespace.
+pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders<Ty> {
+ match def {
+ TyDefId::BuiltinType(it) => Binders::empty(Interner, TyBuilder::builtin(it)),
+ TyDefId::AdtId(it) => type_for_adt(db, it),
+ TyDefId::TypeAliasId(it) => type_for_type_alias(db, it),
+ }
+}
+
+pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &[String], def: &TyDefId) -> Binders<Ty> {
+ let generics = match *def {
+ TyDefId::BuiltinType(_) => return Binders::empty(Interner, TyKind::Error.intern(Interner)),
+ TyDefId::AdtId(it) => generics(db.upcast(), it.into()),
+ TyDefId::TypeAliasId(it) => generics(db.upcast(), it.into()),
+ };
+ make_binders(db, &generics, TyKind::Error.intern(Interner))
+}
+
+pub(crate) fn value_ty_query(db: &dyn HirDatabase, def: ValueTyDefId) -> Binders<Ty> {
+ match def {
+ ValueTyDefId::FunctionId(it) => type_for_fn(db, it),
+ ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it),
+ ValueTyDefId::UnionId(it) => type_for_adt(db, it.into()),
+ ValueTyDefId::EnumVariantId(it) => type_for_enum_variant_constructor(db, it),
+ ValueTyDefId::ConstId(it) => type_for_const(db, it),
+ ValueTyDefId::StaticId(it) => type_for_static(db, it),
+ }
+}
+
+pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binders<Ty> {
+ let impl_loc = impl_id.lookup(db.upcast());
+ let impl_data = db.impl_data(impl_id);
+ let resolver = impl_id.resolver(db.upcast());
+ let _cx = stdx::panic_context::enter(format!(
+ "impl_self_ty_query({:?} -> {:?} -> {:?})",
+ impl_id, impl_loc, impl_data
+ ));
+ let generics = generics(db.upcast(), impl_id.into());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ make_binders(db, &generics, ctx.lower_ty(&impl_data.self_ty))
+}
+
+// returns None if def is a type arg
+pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> Ty {
+ let parent_data = db.generic_params(def.parent());
+ let data = &parent_data.type_or_consts[def.local_id()];
+ let resolver = def.parent().resolver(db.upcast());
+ let ctx = TyLoweringContext::new(db, &resolver);
+ match data {
+ TypeOrConstParamData::TypeParamData(_) => {
+ never!();
+ Ty::new(Interner, TyKind::Error)
+ }
+ TypeOrConstParamData::ConstParamData(d) => ctx.lower_ty(&d.ty),
+ }
+}
+
+pub(crate) fn impl_self_ty_recover(
+ db: &dyn HirDatabase,
+ _cycle: &[String],
+ impl_id: &ImplId,
+) -> Binders<Ty> {
+ let generics = generics(db.upcast(), (*impl_id).into());
+ make_binders(db, &generics, TyKind::Error.intern(Interner))
+}
+
+pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option<Binders<TraitRef>> {
+ let impl_loc = impl_id.lookup(db.upcast());
+ let impl_data = db.impl_data(impl_id);
+ let resolver = impl_id.resolver(db.upcast());
+ let _cx = stdx::panic_context::enter(format!(
+ "impl_trait_query({:?} -> {:?} -> {:?})",
+ impl_id, impl_loc, impl_data
+ ));
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let (self_ty, binders) = db.impl_self_ty(impl_id).into_value_and_skipped_binders();
+ let target_trait = impl_data.target_trait.as_ref()?;
+ Some(Binders::new(binders, ctx.lower_trait_ref(target_trait, Some(self_ty))?))
+}
+
+pub(crate) fn return_type_impl_traits(
+ db: &dyn HirDatabase,
+ def: hir_def::FunctionId,
+) -> Option<Arc<Binders<ReturnTypeImplTraits>>> {
+ // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe
+ let data = db.function_data(def);
+ let resolver = def.resolver(db.upcast());
+ let ctx_ret = TyLoweringContext::new(db, &resolver)
+ .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
+ .with_type_param_mode(ParamLoweringMode::Variable);
+ let _ret = (&ctx_ret).lower_ty(&data.ret_type);
+ let generics = generics(db.upcast(), def.into());
+ let return_type_impl_traits =
+ ReturnTypeImplTraits { impl_traits: ctx_ret.opaque_type_data.into_inner() };
+ if return_type_impl_traits.impl_traits.is_empty() {
+ None
+ } else {
+ Some(Arc::new(make_binders(db, &generics, return_type_impl_traits)))
+ }
+}
+
+pub(crate) fn lower_to_chalk_mutability(m: hir_def::type_ref::Mutability) -> Mutability {
+ match m {
+ hir_def::type_ref::Mutability::Shared => Mutability::Not,
+ hir_def::type_ref::Mutability::Mut => Mutability::Mut,
+ }
+}
+
+/// Checks if the provided generic arg matches its expected kind, then lower them via
+/// provided closures. Use unknown if there was kind mismatch.
+///
+/// Returns `Some` of the lowered generic arg. `None` if the provided arg is a lifetime.
+pub(crate) fn generic_arg_to_chalk<'a, T>(
+ db: &dyn HirDatabase,
+ kind_id: Either<TypeParamId, ConstParamId>,
+ arg: &'a GenericArg,
+ this: &mut T,
+ for_type: impl FnOnce(&mut T, &TypeRef) -> Ty + 'a,
+ for_const: impl FnOnce(&mut T, &ConstScalarOrPath, Ty) -> Const + 'a,
+) -> Option<crate::GenericArg> {
+ let kind = match kind_id {
+ Either::Left(_) => ParamKind::Type,
+ Either::Right(id) => {
+ let ty = db.const_param_ty(id);
+ ParamKind::Const(ty)
+ }
+ };
+ Some(match (arg, kind) {
+ (GenericArg::Type(type_ref), ParamKind::Type) => {
+ let ty = for_type(this, type_ref);
+ GenericArgData::Ty(ty).intern(Interner)
+ }
+ (GenericArg::Const(c), ParamKind::Const(c_ty)) => {
+ GenericArgData::Const(for_const(this, c, c_ty)).intern(Interner)
+ }
+ (GenericArg::Const(_), ParamKind::Type) => {
+ GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
+ }
+ (GenericArg::Type(t), ParamKind::Const(c_ty)) => {
+ // We want to recover simple idents, which parser detects them
+ // as types. Maybe here is not the best place to do it, but
+ // it works.
+ if let TypeRef::Path(p) = t {
+ let p = p.mod_path();
+ if p.kind == PathKind::Plain {
+ if let [n] = p.segments() {
+ let c = ConstScalarOrPath::Path(n.clone());
+ return Some(
+ GenericArgData::Const(for_const(this, &c, c_ty)).intern(Interner),
+ );
+ }
+ }
+ }
+ unknown_const_as_generic(c_ty)
+ }
+ (GenericArg::Lifetime(_), _) => return None,
+ })
+}
+
+pub(crate) fn const_or_path_to_chalk(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ expected_ty: Ty,
+ value: &ConstScalarOrPath,
+ mode: ParamLoweringMode,
+ args: impl FnOnce() -> Generics,
+ debruijn: DebruijnIndex,
+) -> Const {
+ match value {
+ ConstScalarOrPath::Scalar(s) => intern_const_scalar(s.clone(), expected_ty),
+ ConstScalarOrPath::Path(n) => {
+ let path = ModPath::from_segments(PathKind::Plain, Some(n.clone()));
+ path_to_const(db, resolver, &path, mode, args, debruijn)
+ .unwrap_or_else(|| unknown_const(expected_ty))
+ }
+ }
+}
+
+/// This replaces any 'free' Bound vars in `s` (i.e. those with indices past
+/// num_vars_to_keep) by `TyKind::Unknown`.
+fn fallback_bound_vars<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>(
+ s: T,
+ num_vars_to_keep: usize,
+) -> T {
+ crate::fold_free_vars(
+ s,
+ |bound, binders| {
+ if bound.index >= num_vars_to_keep && bound.debruijn == DebruijnIndex::INNERMOST {
+ TyKind::Error.intern(Interner)
+ } else {
+ bound.shifted_in_from(binders).to_ty(Interner)
+ }
+ },
+ |ty, bound, binders| {
+ if bound.index >= num_vars_to_keep && bound.debruijn == DebruijnIndex::INNERMOST {
+ unknown_const(ty.clone())
+ } else {
+ bound.shifted_in_from(binders).to_const(Interner, ty)
+ }
+ },
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs
new file mode 100644
index 000000000..d765fee0e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs
@@ -0,0 +1,148 @@
+//! This module contains the implementations of the `ToChalk` trait, which
+//! handles conversion between our data types and their corresponding types in
+//! Chalk (in both directions); plus some helper functions for more specialized
+//! conversions.
+
+use chalk_solve::rust_ir;
+
+use base_db::salsa::{self, InternKey};
+use hir_def::{LifetimeParamId, TraitId, TypeAliasId, TypeOrConstParamId};
+
+use crate::{
+ chalk_db, db::HirDatabase, AssocTypeId, CallableDefId, ChalkTraitId, FnDefId, ForeignDefId,
+ Interner, OpaqueTyId, PlaceholderIndex,
+};
+
+pub(crate) trait ToChalk {
+ type Chalk;
+ fn to_chalk(self, db: &dyn HirDatabase) -> Self::Chalk;
+ fn from_chalk(db: &dyn HirDatabase, chalk: Self::Chalk) -> Self;
+}
+
+pub(crate) fn from_chalk<T, ChalkT>(db: &dyn HirDatabase, chalk: ChalkT) -> T
+where
+ T: ToChalk<Chalk = ChalkT>,
+{
+ T::from_chalk(db, chalk)
+}
+
+impl ToChalk for hir_def::ImplId {
+ type Chalk = chalk_db::ImplId;
+
+ fn to_chalk(self, _db: &dyn HirDatabase) -> chalk_db::ImplId {
+ chalk_ir::ImplId(self.as_intern_id())
+ }
+
+ fn from_chalk(_db: &dyn HirDatabase, impl_id: chalk_db::ImplId) -> hir_def::ImplId {
+ InternKey::from_intern_id(impl_id.0)
+ }
+}
+
+impl ToChalk for CallableDefId {
+ type Chalk = FnDefId;
+
+ fn to_chalk(self, db: &dyn HirDatabase) -> FnDefId {
+ db.intern_callable_def(self).into()
+ }
+
+ fn from_chalk(db: &dyn HirDatabase, fn_def_id: FnDefId) -> CallableDefId {
+ db.lookup_intern_callable_def(fn_def_id.into())
+ }
+}
+
+pub(crate) struct TypeAliasAsValue(pub(crate) TypeAliasId);
+
+impl ToChalk for TypeAliasAsValue {
+ type Chalk = chalk_db::AssociatedTyValueId;
+
+ fn to_chalk(self, _db: &dyn HirDatabase) -> chalk_db::AssociatedTyValueId {
+ rust_ir::AssociatedTyValueId(self.0.as_intern_id())
+ }
+
+ fn from_chalk(
+ _db: &dyn HirDatabase,
+ assoc_ty_value_id: chalk_db::AssociatedTyValueId,
+ ) -> TypeAliasAsValue {
+ TypeAliasAsValue(TypeAliasId::from_intern_id(assoc_ty_value_id.0))
+ }
+}
+
+impl From<FnDefId> for crate::db::InternedCallableDefId {
+ fn from(fn_def_id: FnDefId) -> Self {
+ InternKey::from_intern_id(fn_def_id.0)
+ }
+}
+
+impl From<crate::db::InternedCallableDefId> for FnDefId {
+ fn from(callable_def_id: crate::db::InternedCallableDefId) -> Self {
+ chalk_ir::FnDefId(callable_def_id.as_intern_id())
+ }
+}
+
+impl From<OpaqueTyId> for crate::db::InternedOpaqueTyId {
+ fn from(id: OpaqueTyId) -> Self {
+ InternKey::from_intern_id(id.0)
+ }
+}
+
+impl From<crate::db::InternedOpaqueTyId> for OpaqueTyId {
+ fn from(id: crate::db::InternedOpaqueTyId) -> Self {
+ chalk_ir::OpaqueTyId(id.as_intern_id())
+ }
+}
+
+impl From<chalk_ir::ClosureId<Interner>> for crate::db::InternedClosureId {
+ fn from(id: chalk_ir::ClosureId<Interner>) -> Self {
+ Self::from_intern_id(id.0)
+ }
+}
+
+impl From<crate::db::InternedClosureId> for chalk_ir::ClosureId<Interner> {
+ fn from(id: crate::db::InternedClosureId) -> Self {
+ chalk_ir::ClosureId(id.as_intern_id())
+ }
+}
+
+pub fn to_foreign_def_id(id: TypeAliasId) -> ForeignDefId {
+ chalk_ir::ForeignDefId(salsa::InternKey::as_intern_id(&id))
+}
+
+pub fn from_foreign_def_id(id: ForeignDefId) -> TypeAliasId {
+ salsa::InternKey::from_intern_id(id.0)
+}
+
+pub fn to_assoc_type_id(id: TypeAliasId) -> AssocTypeId {
+ chalk_ir::AssocTypeId(salsa::InternKey::as_intern_id(&id))
+}
+
+pub fn from_assoc_type_id(id: AssocTypeId) -> TypeAliasId {
+ salsa::InternKey::from_intern_id(id.0)
+}
+
+pub fn from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> TypeOrConstParamId {
+ assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
+ let interned_id = salsa::InternKey::from_intern_id(salsa::InternId::from(idx.idx));
+ db.lookup_intern_type_or_const_param_id(interned_id)
+}
+
+pub fn to_placeholder_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> PlaceholderIndex {
+ let interned_id = db.intern_type_or_const_param_id(id);
+ PlaceholderIndex {
+ ui: chalk_ir::UniverseIndex::ROOT,
+ idx: salsa::InternKey::as_intern_id(&interned_id).as_usize(),
+ }
+}
+
+pub fn lt_from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> LifetimeParamId {
+ assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
+ let interned_id = salsa::InternKey::from_intern_id(salsa::InternId::from(idx.idx));
+ db.lookup_intern_lifetime_param_id(interned_id)
+}
+
+pub fn to_chalk_trait_id(id: TraitId) -> ChalkTraitId {
+ chalk_ir::TraitId(salsa::InternKey::as_intern_id(&id))
+}
+
+pub fn from_chalk_trait_id(id: ChalkTraitId) -> TraitId {
+ salsa::InternKey::from_intern_id(id.0)
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
new file mode 100644
index 000000000..15df7b3dd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
@@ -0,0 +1,1186 @@
+//! This module is concerned with finding methods that a given type provides.
+//! For details about how this works in rustc, see the method lookup page in the
+//! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html)
+//! and the corresponding code mostly in librustc_typeck/check/method/probe.rs.
+use std::{iter, ops::ControlFlow, sync::Arc};
+
+use arrayvec::ArrayVec;
+use base_db::{CrateId, Edition};
+use chalk_ir::{cast::Cast, Mutability, UniverseIndex};
+use hir_def::{
+ data::ImplData, item_scope::ItemScope, nameres::DefMap, AssocItemId, BlockId, ConstId,
+ FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, Lookup, ModuleDefId, ModuleId,
+ TraitId,
+};
+use hir_expand::name::Name;
+use rustc_hash::{FxHashMap, FxHashSet};
+use stdx::never;
+
+use crate::{
+ autoderef::{self, AutoderefKind},
+ db::HirDatabase,
+ from_foreign_def_id,
+ infer::{unify::InferenceTable, Adjust, Adjustment, AutoBorrow, OverloadedDeref, PointerCast},
+ primitive::{FloatTy, IntTy, UintTy},
+ static_lifetime,
+ utils::all_super_traits,
+ AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, ForeignDefId, InEnvironment, Interner,
+ Scalar, TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyExt, TyKind,
+};
+
+/// This is used as a key for indexing impls.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum TyFingerprint {
+ // These are lang item impls:
+ Str,
+ Slice,
+ Array,
+ Never,
+ RawPtr(Mutability),
+ Scalar(Scalar),
+ // These can have user-defined impls:
+ Adt(hir_def::AdtId),
+ Dyn(TraitId),
+ ForeignType(ForeignDefId),
+ // These only exist for trait impls
+ Unit,
+ Unnameable,
+ Function(u32),
+}
+
+impl TyFingerprint {
+ /// Creates a TyFingerprint for looking up an inherent impl. Only certain
+ /// types can have inherent impls: if we have some `struct S`, we can have
+ /// an `impl S`, but not `impl &S`. Hence, this will return `None` for
+ /// reference types and such.
+ pub fn for_inherent_impl(ty: &Ty) -> Option<TyFingerprint> {
+ let fp = match ty.kind(Interner) {
+ TyKind::Str => TyFingerprint::Str,
+ TyKind::Never => TyFingerprint::Never,
+ TyKind::Slice(..) => TyFingerprint::Slice,
+ TyKind::Array(..) => TyFingerprint::Array,
+ TyKind::Scalar(scalar) => TyFingerprint::Scalar(*scalar),
+ TyKind::Adt(AdtId(adt), _) => TyFingerprint::Adt(*adt),
+ TyKind::Raw(mutability, ..) => TyFingerprint::RawPtr(*mutability),
+ TyKind::Foreign(alias_id, ..) => TyFingerprint::ForeignType(*alias_id),
+ TyKind::Dyn(_) => ty.dyn_trait().map(TyFingerprint::Dyn)?,
+ _ => return None,
+ };
+ Some(fp)
+ }
+
+ /// Creates a TyFingerprint for looking up a trait impl.
+ pub fn for_trait_impl(ty: &Ty) -> Option<TyFingerprint> {
+ let fp = match ty.kind(Interner) {
+ TyKind::Str => TyFingerprint::Str,
+ TyKind::Never => TyFingerprint::Never,
+ TyKind::Slice(..) => TyFingerprint::Slice,
+ TyKind::Array(..) => TyFingerprint::Array,
+ TyKind::Scalar(scalar) => TyFingerprint::Scalar(*scalar),
+ TyKind::Adt(AdtId(adt), _) => TyFingerprint::Adt(*adt),
+ TyKind::Raw(mutability, ..) => TyFingerprint::RawPtr(*mutability),
+ TyKind::Foreign(alias_id, ..) => TyFingerprint::ForeignType(*alias_id),
+ TyKind::Dyn(_) => ty.dyn_trait().map(TyFingerprint::Dyn)?,
+ TyKind::Ref(_, _, ty) => return TyFingerprint::for_trait_impl(ty),
+ TyKind::Tuple(_, subst) => {
+ let first_ty = subst.interned().get(0).map(|arg| arg.assert_ty_ref(Interner));
+ match first_ty {
+ Some(ty) => return TyFingerprint::for_trait_impl(ty),
+ None => TyFingerprint::Unit,
+ }
+ }
+ TyKind::AssociatedType(_, _)
+ | TyKind::OpaqueType(_, _)
+ | TyKind::FnDef(_, _)
+ | TyKind::Closure(_, _)
+ | TyKind::Generator(..)
+ | TyKind::GeneratorWitness(..) => TyFingerprint::Unnameable,
+ TyKind::Function(fn_ptr) => {
+ TyFingerprint::Function(fn_ptr.substitution.0.len(Interner) as u32)
+ }
+ TyKind::Alias(_)
+ | TyKind::Placeholder(_)
+ | TyKind::BoundVar(_)
+ | TyKind::InferenceVar(_, _)
+ | TyKind::Error => return None,
+ };
+ Some(fp)
+ }
+}
+
+pub(crate) const ALL_INT_FPS: [TyFingerprint; 12] = [
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I8)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I16)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I32)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I64)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I128)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::Isize)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U8)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U16)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U32)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U64)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U128)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::Usize)),
+];
+
+pub(crate) const ALL_FLOAT_FPS: [TyFingerprint; 2] = [
+ TyFingerprint::Scalar(Scalar::Float(FloatTy::F32)),
+ TyFingerprint::Scalar(Scalar::Float(FloatTy::F64)),
+];
+
+/// Trait impls defined or available in some crate.
+#[derive(Debug, Eq, PartialEq)]
+pub struct TraitImpls {
+ // If the `Option<TyFingerprint>` is `None`, the impl may apply to any self type.
+ map: FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Vec<ImplId>>>,
+}
+
+impl TraitImpls {
+ pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
+ let _p = profile::span("trait_impls_in_crate_query").detail(|| format!("{krate:?}"));
+ let mut impls = Self { map: FxHashMap::default() };
+
+ let crate_def_map = db.crate_def_map(krate);
+ impls.collect_def_map(db, &crate_def_map);
+ impls.shrink_to_fit();
+
+ Arc::new(impls)
+ }
+
+ pub(crate) fn trait_impls_in_block_query(
+ db: &dyn HirDatabase,
+ block: BlockId,
+ ) -> Option<Arc<Self>> {
+ let _p = profile::span("trait_impls_in_block_query");
+ let mut impls = Self { map: FxHashMap::default() };
+
+ let block_def_map = db.block_def_map(block)?;
+ impls.collect_def_map(db, &block_def_map);
+ impls.shrink_to_fit();
+
+ Some(Arc::new(impls))
+ }
+
+ pub(crate) fn trait_impls_in_deps_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
+ let _p = profile::span("trait_impls_in_deps_query").detail(|| format!("{krate:?}"));
+ let crate_graph = db.crate_graph();
+ let mut res = Self { map: FxHashMap::default() };
+
+ for krate in crate_graph.transitive_deps(krate) {
+ res.merge(&db.trait_impls_in_crate(krate));
+ }
+ res.shrink_to_fit();
+
+ Arc::new(res)
+ }
+
+ fn shrink_to_fit(&mut self) {
+ self.map.shrink_to_fit();
+ self.map.values_mut().for_each(|map| {
+ map.shrink_to_fit();
+ map.values_mut().for_each(Vec::shrink_to_fit);
+ });
+ }
+
+ fn collect_def_map(&mut self, db: &dyn HirDatabase, def_map: &DefMap) {
+ for (_module_id, module_data) in def_map.modules() {
+ for impl_id in module_data.scope.impls() {
+ let target_trait = match db.impl_trait(impl_id) {
+ Some(tr) => tr.skip_binders().hir_trait_id(),
+ None => continue,
+ };
+ let self_ty = db.impl_self_ty(impl_id);
+ let self_ty_fp = TyFingerprint::for_trait_impl(self_ty.skip_binders());
+ self.map
+ .entry(target_trait)
+ .or_default()
+ .entry(self_ty_fp)
+ .or_default()
+ .push(impl_id);
+ }
+
+ // To better support custom derives, collect impls in all unnamed const items.
+ // const _: () = { ... };
+ for konst in collect_unnamed_consts(db, &module_data.scope) {
+ let body = db.body(konst.into());
+ for (_, block_def_map) in body.blocks(db.upcast()) {
+ self.collect_def_map(db, &block_def_map);
+ }
+ }
+ }
+ }
+
+ fn merge(&mut self, other: &Self) {
+ for (trait_, other_map) in &other.map {
+ let map = self.map.entry(*trait_).or_default();
+ for (fp, impls) in other_map {
+ map.entry(*fp).or_default().extend(impls);
+ }
+ }
+ }
+
+ /// Queries all trait impls for the given type.
+ pub fn for_self_ty_without_blanket_impls(
+ &self,
+ fp: TyFingerprint,
+ ) -> impl Iterator<Item = ImplId> + '_ {
+ self.map
+ .values()
+ .flat_map(move |impls| impls.get(&Some(fp)).into_iter())
+ .flat_map(|it| it.iter().copied())
+ }
+
+ /// Queries all impls of the given trait.
+ pub fn for_trait(&self, trait_: TraitId) -> impl Iterator<Item = ImplId> + '_ {
+ self.map
+ .get(&trait_)
+ .into_iter()
+ .flat_map(|map| map.values().flat_map(|v| v.iter().copied()))
+ }
+
+ /// Queries all impls of `trait_` that may apply to `self_ty`.
+ pub fn for_trait_and_self_ty(
+ &self,
+ trait_: TraitId,
+ self_ty: TyFingerprint,
+ ) -> impl Iterator<Item = ImplId> + '_ {
+ self.map
+ .get(&trait_)
+ .into_iter()
+ .flat_map(move |map| map.get(&Some(self_ty)).into_iter().chain(map.get(&None)))
+ .flat_map(|v| v.iter().copied())
+ }
+
+ pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
+ self.map.values().flat_map(|map| map.values().flat_map(|v| v.iter().copied()))
+ }
+}
+
+/// Inherent impls defined in some crate.
+///
+/// Inherent impls can only be defined in the crate that also defines the self type of the impl
+/// (note that some primitives are considered to be defined by both libcore and liballoc).
+///
+/// This makes inherent impl lookup easier than trait impl lookup since we only have to consider a
+/// single crate.
+#[derive(Debug, Eq, PartialEq)]
+pub struct InherentImpls {
+ map: FxHashMap<TyFingerprint, Vec<ImplId>>,
+}
+
+impl InherentImpls {
+ pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
+ let mut impls = Self { map: FxHashMap::default() };
+
+ let crate_def_map = db.crate_def_map(krate);
+ impls.collect_def_map(db, &crate_def_map);
+ impls.shrink_to_fit();
+
+ Arc::new(impls)
+ }
+
+ pub(crate) fn inherent_impls_in_block_query(
+ db: &dyn HirDatabase,
+ block: BlockId,
+ ) -> Option<Arc<Self>> {
+ let mut impls = Self { map: FxHashMap::default() };
+ if let Some(block_def_map) = db.block_def_map(block) {
+ impls.collect_def_map(db, &block_def_map);
+ impls.shrink_to_fit();
+ return Some(Arc::new(impls));
+ }
+ None
+ }
+
+ fn shrink_to_fit(&mut self) {
+ self.map.values_mut().for_each(Vec::shrink_to_fit);
+ self.map.shrink_to_fit();
+ }
+
+ fn collect_def_map(&mut self, db: &dyn HirDatabase, def_map: &DefMap) {
+ for (_module_id, module_data) in def_map.modules() {
+ for impl_id in module_data.scope.impls() {
+ let data = db.impl_data(impl_id);
+ if data.target_trait.is_some() {
+ continue;
+ }
+
+ let self_ty = db.impl_self_ty(impl_id);
+ let fp = TyFingerprint::for_inherent_impl(self_ty.skip_binders());
+ if let Some(fp) = fp {
+ self.map.entry(fp).or_default().push(impl_id);
+ }
+ // `fp` should only be `None` in error cases (either erroneous code or incomplete name resolution)
+ }
+
+ // To better support custom derives, collect impls in all unnamed const items.
+ // const _: () = { ... };
+ for konst in collect_unnamed_consts(db, &module_data.scope) {
+ let body = db.body(konst.into());
+ for (_, block_def_map) in body.blocks(db.upcast()) {
+ self.collect_def_map(db, &block_def_map);
+ }
+ }
+ }
+ }
+
+ pub fn for_self_ty(&self, self_ty: &Ty) -> &[ImplId] {
+ match TyFingerprint::for_inherent_impl(self_ty) {
+ Some(fp) => self.map.get(&fp).map(|vec| vec.as_ref()).unwrap_or(&[]),
+ None => &[],
+ }
+ }
+
+ pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
+ self.map.values().flat_map(|v| v.iter().copied())
+ }
+}
+
+pub fn inherent_impl_crates_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ fp: TyFingerprint,
+) -> ArrayVec<CrateId, 2> {
+ let _p = profile::span("inherent_impl_crates_query");
+ let mut res = ArrayVec::new();
+ let crate_graph = db.crate_graph();
+
+ for krate in crate_graph.transitive_deps(krate) {
+ if res.is_full() {
+ // we don't currently look for or store more than two crates here,
+ // so don't needlessly look at more crates than necessary.
+ break;
+ }
+ let impls = db.inherent_impls_in_crate(krate);
+ if impls.map.get(&fp).map_or(false, |v| !v.is_empty()) {
+ res.push(krate);
+ }
+ }
+
+ res
+}
+
+fn collect_unnamed_consts<'a>(
+ db: &'a dyn HirDatabase,
+ scope: &'a ItemScope,
+) -> impl Iterator<Item = ConstId> + 'a {
+ let unnamed_consts = scope.unnamed_consts();
+
+ // FIXME: Also treat consts named `_DERIVE_*` as unnamed, since synstructure generates those.
+ // Should be removed once synstructure stops doing that.
+ let synstructure_hack_consts = scope.values().filter_map(|(item, _)| match item {
+ ModuleDefId::ConstId(id) => {
+ let loc = id.lookup(db.upcast());
+ let item_tree = loc.id.item_tree(db.upcast());
+ if item_tree[loc.id.value]
+ .name
+ .as_ref()
+ .map_or(false, |n| n.to_smol_str().starts_with("_DERIVE_"))
+ {
+ Some(id)
+ } else {
+ None
+ }
+ }
+ _ => None,
+ });
+
+ unnamed_consts.chain(synstructure_hack_consts)
+}
+
+pub fn def_crates(
+ db: &dyn HirDatabase,
+ ty: &Ty,
+ cur_crate: CrateId,
+) -> Option<ArrayVec<CrateId, 2>> {
+ let mod_to_crate_ids = |module: ModuleId| Some(iter::once(module.krate()).collect());
+
+ let fp = TyFingerprint::for_inherent_impl(ty);
+
+ match ty.kind(Interner) {
+ TyKind::Adt(AdtId(def_id), _) => mod_to_crate_ids(def_id.module(db.upcast())),
+ TyKind::Foreign(id) => {
+ mod_to_crate_ids(from_foreign_def_id(*id).lookup(db.upcast()).module(db.upcast()))
+ }
+ TyKind::Dyn(_) => ty
+ .dyn_trait()
+ .and_then(|trait_| mod_to_crate_ids(GenericDefId::TraitId(trait_).module(db.upcast()))),
+ // for primitives, there may be impls in various places (core and alloc
+ // mostly). We just check the whole crate graph for crates with impls
+ // (cached behind a query).
+ TyKind::Scalar(_)
+ | TyKind::Str
+ | TyKind::Slice(_)
+ | TyKind::Array(..)
+ | TyKind::Raw(..) => {
+ Some(db.inherent_impl_crates(cur_crate, fp.expect("fingerprint for primitive")))
+ }
+ _ => return None,
+ }
+}
+
+/// Look up the method with the given name.
+pub(crate) fn lookup_method(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: &Name,
+) -> Option<(ReceiverAdjustments, FunctionId)> {
+ iterate_method_candidates(
+ ty,
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ Some(name),
+ LookupMode::MethodCall,
+ |adjustments, f| match f {
+ AssocItemId::FunctionId(f) => Some((adjustments, f)),
+ _ => None,
+ },
+ )
+}
+
+/// Whether we're looking up a dotted method call (like `v.len()`) or a path
+/// (like `Vec::new`).
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum LookupMode {
+ /// Looking up a method call like `v.len()`: We only consider candidates
+ /// that have a `self` parameter, and do autoderef.
+ MethodCall,
+ /// Looking up a path like `Vec::new` or `Vec::default`: We consider all
+ /// candidates including associated constants, but don't do autoderef.
+ Path,
+}
+
+#[derive(Clone, Copy)]
+pub enum VisibleFromModule {
+ /// Filter for results that are visible from the given module
+ Filter(ModuleId),
+ /// Include impls from the given block.
+ IncludeBlock(BlockId),
+ /// Do nothing special in regards visibility
+ None,
+}
+
+impl From<Option<ModuleId>> for VisibleFromModule {
+ fn from(module: Option<ModuleId>) -> Self {
+ match module {
+ Some(module) => Self::Filter(module),
+ None => Self::None,
+ }
+ }
+}
+
+impl From<Option<BlockId>> for VisibleFromModule {
+ fn from(block: Option<BlockId>) -> Self {
+ match block {
+ Some(block) => Self::IncludeBlock(block),
+ None => Self::None,
+ }
+ }
+}
+
+#[derive(Debug, Clone, Default)]
+pub struct ReceiverAdjustments {
+ autoref: Option<Mutability>,
+ autoderefs: usize,
+ unsize_array: bool,
+}
+
+impl ReceiverAdjustments {
+ pub(crate) fn apply(&self, table: &mut InferenceTable<'_>, ty: Ty) -> (Ty, Vec<Adjustment>) {
+ let mut ty = ty;
+ let mut adjust = Vec::new();
+ for _ in 0..self.autoderefs {
+ match autoderef::autoderef_step(table, ty.clone()) {
+ None => {
+ never!("autoderef not possible for {:?}", ty);
+ ty = TyKind::Error.intern(Interner);
+ break;
+ }
+ Some((kind, new_ty)) => {
+ ty = new_ty.clone();
+ adjust.push(Adjustment {
+ kind: Adjust::Deref(match kind {
+ // FIXME should we know the mutability here?
+ AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)),
+ AutoderefKind::Builtin => None,
+ }),
+ target: new_ty,
+ });
+ }
+ }
+ }
+ if self.unsize_array {
+ ty = match ty.kind(Interner) {
+ TyKind::Array(inner, _) => TyKind::Slice(inner.clone()).intern(Interner),
+ _ => {
+ never!("unsize_array with non-array {:?}", ty);
+ ty
+ }
+ };
+ // FIXME this is kind of wrong since the unsize needs to happen to a pointer/reference
+ adjust.push(Adjustment {
+ kind: Adjust::Pointer(PointerCast::Unsize),
+ target: ty.clone(),
+ });
+ }
+ if let Some(m) = self.autoref {
+ ty = TyKind::Ref(m, static_lifetime(), ty).intern(Interner);
+ adjust
+ .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(m)), target: ty.clone() });
+ }
+ (ty, adjust)
+ }
+
+ fn with_autoref(&self, m: Mutability) -> ReceiverAdjustments {
+ Self { autoref: Some(m), ..*self }
+ }
+}
+
+// This would be nicer if it just returned an iterator, but that runs into
+// lifetime problems, because we need to borrow temp `CrateImplDefs`.
+// FIXME add a context type here?
+pub(crate) fn iterate_method_candidates<T>(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mode: LookupMode,
+ mut callback: impl FnMut(ReceiverAdjustments, AssocItemId) -> Option<T>,
+) -> Option<T> {
+ let mut slot = None;
+ iterate_method_candidates_dyn(
+ ty,
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ name,
+ mode,
+ &mut |adj, item| {
+ assert!(slot.is_none());
+ if let Some(it) = callback(adj, item) {
+ slot = Some(it);
+ return ControlFlow::Break(());
+ }
+ ControlFlow::Continue(())
+ },
+ );
+ slot
+}
+
+pub fn lookup_impl_method(
+ self_ty: &Ty,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ trait_: TraitId,
+ name: &Name,
+) -> Option<FunctionId> {
+ let self_ty_fp = TyFingerprint::for_trait_impl(self_ty)?;
+ let trait_impls = db.trait_impls_in_deps(env.krate);
+ let impls = trait_impls.for_trait_and_self_ty(trait_, self_ty_fp);
+ let mut table = InferenceTable::new(db, env.clone());
+ find_matching_impl(impls, &mut table, &self_ty).and_then(|data| {
+ data.items.iter().find_map(|it| match it {
+ AssocItemId::FunctionId(f) => (db.function_data(*f).name == *name).then(|| *f),
+ _ => None,
+ })
+ })
+}
+
+fn find_matching_impl(
+ mut impls: impl Iterator<Item = ImplId>,
+ table: &mut InferenceTable<'_>,
+ self_ty: &Ty,
+) -> Option<Arc<ImplData>> {
+ let db = table.db;
+ loop {
+ let impl_ = impls.next()?;
+ let r = table.run_in_snapshot(|table| {
+ let impl_data = db.impl_data(impl_);
+ let substs =
+ TyBuilder::subst_for_def(db, impl_).fill_with_inference_vars(table).build();
+ let impl_ty = db.impl_self_ty(impl_).substitute(Interner, &substs);
+
+ table
+ .unify(self_ty, &impl_ty)
+ .then(|| {
+ let wh_goals =
+ crate::chalk_db::convert_where_clauses(db, impl_.into(), &substs)
+ .into_iter()
+ .map(|b| b.cast(Interner));
+
+ let goal = crate::Goal::all(Interner, wh_goals);
+
+ table.try_obligation(goal).map(|_| impl_data)
+ })
+ .flatten()
+ });
+ if r.is_some() {
+ break r;
+ }
+ }
+}
+
+pub fn iterate_path_candidates(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ iterate_method_candidates_dyn(
+ ty,
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ name,
+ LookupMode::Path,
+ // the adjustments are not relevant for path lookup
+ &mut |_, id| callback(id),
+ )
+}
+
+pub fn iterate_method_candidates_dyn(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mode: LookupMode,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ match mode {
+ LookupMode::MethodCall => {
+ // For method calls, rust first does any number of autoderef, and
+ // then one autoref (i.e. when the method takes &self or &mut self).
+ // Note that when we've got a receiver like &S, even if the method
+ // we find in the end takes &self, we still do the autoderef step
+ // (just as rustc does an autoderef and then autoref again).
+
+ // We have to be careful about the order we're looking at candidates
+ // in here. Consider the case where we're resolving `x.clone()`
+ // where `x: &Vec<_>`. This resolves to the clone method with self
+ // type `Vec<_>`, *not* `&_`. I.e. we need to consider methods where
+ // the receiver type exactly matches before cases where we have to
+ // do autoref. But in the autoderef steps, the `&_` self type comes
+ // up *before* the `Vec<_>` self type.
+ //
+ // On the other hand, we don't want to just pick any by-value method
+ // before any by-autoref method; it's just that we need to consider
+ // the methods by autoderef order of *receiver types*, not *self
+ // types*.
+
+ let mut table = InferenceTable::new(db, env.clone());
+ let ty = table.instantiate_canonical(ty.clone());
+ let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty);
+
+ let result = deref_chain.into_iter().zip(adj).try_for_each(|(receiver_ty, adj)| {
+ iterate_method_candidates_with_autoref(
+ &receiver_ty,
+ adj,
+ db,
+ env.clone(),
+ traits_in_scope,
+ visible_from_module,
+ name,
+ callback,
+ )
+ });
+ result
+ }
+ LookupMode::Path => {
+ // No autoderef for path lookups
+ iterate_method_candidates_for_self_ty(
+ ty,
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ name,
+ callback,
+ )
+ }
+ }
+}
+
+fn iterate_method_candidates_with_autoref(
+ receiver_ty: &Canonical<Ty>,
+ first_adjustment: ReceiverAdjustments,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ if receiver_ty.value.is_general_var(Interner, &receiver_ty.binders) {
+ // don't try to resolve methods on unknown types
+ return ControlFlow::Continue(());
+ }
+
+ iterate_method_candidates_by_receiver(
+ receiver_ty,
+ first_adjustment.clone(),
+ db,
+ env.clone(),
+ traits_in_scope,
+ visible_from_module,
+ name,
+ &mut callback,
+ )?;
+
+ let refed = Canonical {
+ value: TyKind::Ref(Mutability::Not, static_lifetime(), receiver_ty.value.clone())
+ .intern(Interner),
+ binders: receiver_ty.binders.clone(),
+ };
+
+ iterate_method_candidates_by_receiver(
+ &refed,
+ first_adjustment.with_autoref(Mutability::Not),
+ db,
+ env.clone(),
+ traits_in_scope,
+ visible_from_module,
+ name,
+ &mut callback,
+ )?;
+
+ let ref_muted = Canonical {
+ value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value.clone())
+ .intern(Interner),
+ binders: receiver_ty.binders.clone(),
+ };
+
+ iterate_method_candidates_by_receiver(
+ &ref_muted,
+ first_adjustment.with_autoref(Mutability::Mut),
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ name,
+ &mut callback,
+ )
+}
+
+fn iterate_method_candidates_by_receiver(
+ receiver_ty: &Canonical<Ty>,
+ receiver_adjustments: ReceiverAdjustments,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ let mut table = InferenceTable::new(db, env);
+ let receiver_ty = table.instantiate_canonical(receiver_ty.clone());
+ let snapshot = table.snapshot();
+ // We're looking for methods with *receiver* type receiver_ty. These could
+ // be found in any of the derefs of receiver_ty, so we have to go through
+ // that.
+ let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone());
+ while let Some((self_ty, _)) = autoderef.next() {
+ iterate_inherent_methods(
+ &self_ty,
+ &mut autoderef.table,
+ name,
+ Some(&receiver_ty),
+ Some(receiver_adjustments.clone()),
+ visible_from_module,
+ &mut callback,
+ )?
+ }
+
+ table.rollback_to(snapshot);
+
+ let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone());
+ while let Some((self_ty, _)) = autoderef.next() {
+ iterate_trait_method_candidates(
+ &self_ty,
+ &mut autoderef.table,
+ traits_in_scope,
+ name,
+ Some(&receiver_ty),
+ Some(receiver_adjustments.clone()),
+ &mut callback,
+ )?
+ }
+
+ ControlFlow::Continue(())
+}
+
+fn iterate_method_candidates_for_self_ty(
+ self_ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ let mut table = InferenceTable::new(db, env);
+ let self_ty = table.instantiate_canonical(self_ty.clone());
+ iterate_inherent_methods(
+ &self_ty,
+ &mut table,
+ name,
+ None,
+ None,
+ visible_from_module,
+ &mut callback,
+ )?;
+ iterate_trait_method_candidates(
+ &self_ty,
+ &mut table,
+ traits_in_scope,
+ name,
+ None,
+ None,
+ callback,
+ )
+}
+
+fn iterate_trait_method_candidates(
+ self_ty: &Ty,
+ table: &mut InferenceTable<'_>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ receiver_adjustments: Option<ReceiverAdjustments>,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ let db = table.db;
+ let env = table.trait_env.clone();
+ let self_is_array = matches!(self_ty.kind(Interner), chalk_ir::TyKind::Array(..));
+ // if ty is `dyn Trait`, the trait doesn't need to be in scope
+ let inherent_trait =
+ self_ty.dyn_trait().into_iter().flat_map(|t| all_super_traits(db.upcast(), t));
+ let env_traits = matches!(self_ty.kind(Interner), TyKind::Placeholder(_))
+ // if we have `T: Trait` in the param env, the trait doesn't need to be in scope
+ .then(|| {
+ env.traits_in_scope_from_clauses(self_ty.clone())
+ .flat_map(|t| all_super_traits(db.upcast(), t))
+ })
+ .into_iter()
+ .flatten();
+ let traits = inherent_trait.chain(env_traits).chain(traits_in_scope.iter().copied());
+
+ let canonical_self_ty = table.canonicalize(self_ty.clone()).value;
+
+ 'traits: for t in traits {
+ let data = db.trait_data(t);
+
+ // Traits annotated with `#[rustc_skip_array_during_method_dispatch]` are skipped during
+ // method resolution, if the receiver is an array, and we're compiling for editions before
+ // 2021.
+ // This is to make `[a].into_iter()` not break code with the new `IntoIterator` impl for
+ // arrays.
+ if data.skip_array_during_method_dispatch && self_is_array {
+ // FIXME: this should really be using the edition of the method name's span, in case it
+ // comes from a macro
+ if db.crate_graph()[env.krate].edition < Edition::Edition2021 {
+ continue;
+ }
+ }
+
+ // we'll be lazy about checking whether the type implements the
+ // trait, but if we find out it doesn't, we'll skip the rest of the
+ // iteration
+ let mut known_implemented = false;
+ for &(_, item) in data.items.iter() {
+ // Don't pass a `visible_from_module` down to `is_valid_candidate`,
+ // since only inherent methods should be included into visibility checking.
+ if !is_valid_candidate(table, name, receiver_ty, item, self_ty, None) {
+ continue;
+ }
+ if !known_implemented {
+ let goal = generic_implements_goal(db, env.clone(), t, &canonical_self_ty);
+ if db.trait_solve(env.krate, goal.cast(Interner)).is_none() {
+ continue 'traits;
+ }
+ }
+ known_implemented = true;
+ callback(receiver_adjustments.clone().unwrap_or_default(), item)?;
+ }
+ }
+ ControlFlow::Continue(())
+}
+
+fn iterate_inherent_methods(
+ self_ty: &Ty,
+ table: &mut InferenceTable<'_>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ receiver_adjustments: Option<ReceiverAdjustments>,
+ visible_from_module: VisibleFromModule,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ let db = table.db;
+ let env = table.trait_env.clone();
+ let def_crates = match def_crates(db, self_ty, env.krate) {
+ Some(k) => k,
+ None => return ControlFlow::Continue(()),
+ };
+
+ let (module, block) = match visible_from_module {
+ VisibleFromModule::Filter(module) => (Some(module), module.containing_block()),
+ VisibleFromModule::IncludeBlock(block) => (None, Some(block)),
+ VisibleFromModule::None => (None, None),
+ };
+
+ if let Some(block_id) = block {
+ if let Some(impls) = db.inherent_impls_in_block(block_id) {
+ impls_for_self_ty(
+ &impls,
+ self_ty,
+ table,
+ name,
+ receiver_ty,
+ receiver_adjustments.clone(),
+ module,
+ callback,
+ )?;
+ }
+ }
+
+ for krate in def_crates {
+ let impls = db.inherent_impls_in_crate(krate);
+ impls_for_self_ty(
+ &impls,
+ self_ty,
+ table,
+ name,
+ receiver_ty,
+ receiver_adjustments.clone(),
+ module,
+ callback,
+ )?;
+ }
+ return ControlFlow::Continue(());
+
+ fn impls_for_self_ty(
+ impls: &InherentImpls,
+ self_ty: &Ty,
+ table: &mut InferenceTable<'_>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ receiver_adjustments: Option<ReceiverAdjustments>,
+ visible_from_module: Option<ModuleId>,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+ ) -> ControlFlow<()> {
+ let db = table.db;
+ let impls_for_self_ty = impls.for_self_ty(self_ty);
+ for &impl_def in impls_for_self_ty {
+ for &item in &db.impl_data(impl_def).items {
+ if !is_valid_candidate(table, name, receiver_ty, item, self_ty, visible_from_module)
+ {
+ continue;
+ }
+ callback(receiver_adjustments.clone().unwrap_or_default(), item)?;
+ }
+ }
+ ControlFlow::Continue(())
+ }
+}
+
+/// Returns the receiver type for the index trait call.
+pub fn resolve_indexing_op(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ ty: Canonical<Ty>,
+ index_trait: TraitId,
+) -> Option<ReceiverAdjustments> {
+ let mut table = InferenceTable::new(db, env.clone());
+ let ty = table.instantiate_canonical(ty);
+ let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty);
+ for (ty, adj) in deref_chain.into_iter().zip(adj) {
+ let goal = generic_implements_goal(db, env.clone(), index_trait, &ty);
+ if db.trait_solve(env.krate, goal.cast(Interner)).is_some() {
+ return Some(adj);
+ }
+ }
+ None
+}
+
+fn is_valid_candidate(
+ table: &mut InferenceTable<'_>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ item: AssocItemId,
+ self_ty: &Ty,
+ visible_from_module: Option<ModuleId>,
+) -> bool {
+ macro_rules! check_that {
+ ($cond:expr) => {
+ if !$cond {
+ return false;
+ }
+ };
+ }
+
+ let db = table.db;
+ match item {
+ AssocItemId::FunctionId(m) => {
+ let data = db.function_data(m);
+
+ check_that!(name.map_or(true, |n| n == &data.name));
+ check_that!(visible_from_module.map_or(true, |from_module| {
+ let v = db.function_visibility(m).is_visible_from(db.upcast(), from_module);
+ if !v {
+ cov_mark::hit!(autoderef_candidate_not_visible);
+ }
+ v
+ }));
+
+ table.run_in_snapshot(|table| {
+ let subst = TyBuilder::subst_for_def(db, m).fill_with_inference_vars(table).build();
+ let expect_self_ty = match m.lookup(db.upcast()).container {
+ ItemContainerId::TraitId(_) => {
+ subst.at(Interner, 0).assert_ty_ref(Interner).clone()
+ }
+ ItemContainerId::ImplId(impl_id) => {
+ subst.apply(db.impl_self_ty(impl_id).skip_binders().clone(), Interner)
+ }
+ // We should only get called for associated items (impl/trait)
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
+ unreachable!()
+ }
+ };
+ check_that!(table.unify(&expect_self_ty, self_ty));
+ if let Some(receiver_ty) = receiver_ty {
+ check_that!(data.has_self_param());
+
+ let sig = db.callable_item_signature(m.into());
+ let expected_receiver =
+ sig.map(|s| s.params()[0].clone()).substitute(Interner, &subst);
+
+ check_that!(table.unify(&receiver_ty, &expected_receiver));
+ }
+ true
+ })
+ }
+ AssocItemId::ConstId(c) => {
+ let data = db.const_data(c);
+ check_that!(receiver_ty.is_none());
+
+ check_that!(name.map_or(true, |n| data.name.as_ref() == Some(n)));
+ check_that!(visible_from_module.map_or(true, |from_module| {
+ let v = db.const_visibility(c).is_visible_from(db.upcast(), from_module);
+ if !v {
+ cov_mark::hit!(const_candidate_not_visible);
+ }
+ v
+ }));
+ if let ItemContainerId::ImplId(impl_id) = c.lookup(db.upcast()).container {
+ let self_ty_matches = table.run_in_snapshot(|table| {
+ let subst =
+ TyBuilder::subst_for_def(db, c).fill_with_inference_vars(table).build();
+ let expected_self_ty =
+ subst.apply(db.impl_self_ty(impl_id).skip_binders().clone(), Interner);
+ table.unify(&expected_self_ty, &self_ty)
+ });
+ if !self_ty_matches {
+ cov_mark::hit!(const_candidate_self_type_mismatch);
+ return false;
+ }
+ }
+ true
+ }
+ _ => false,
+ }
+}
+
+pub fn implements_trait(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ trait_: TraitId,
+) -> bool {
+ let goal = generic_implements_goal(db, env.clone(), trait_, ty);
+ let solution = db.trait_solve(env.krate, goal.cast(Interner));
+
+ solution.is_some()
+}
+
+pub fn implements_trait_unique(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ trait_: TraitId,
+) -> bool {
+ let goal = generic_implements_goal(db, env.clone(), trait_, ty);
+ let solution = db.trait_solve(env.krate, goal.cast(Interner));
+
+ matches!(solution, Some(crate::Solution::Unique(_)))
+}
+
+/// This creates Substs for a trait with the given Self type and type variables
+/// for all other parameters, to query Chalk with it.
+fn generic_implements_goal(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ trait_: TraitId,
+ self_ty: &Canonical<Ty>,
+) -> Canonical<InEnvironment<super::DomainGoal>> {
+ let mut kinds = self_ty.binders.interned().to_vec();
+ let trait_ref = TyBuilder::trait_ref(db, trait_)
+ .push(self_ty.value.clone())
+ .fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len())
+ .build();
+ kinds.extend(trait_ref.substitution.iter(Interner).skip(1).map(|x| {
+ let vk = match x.data(Interner) {
+ chalk_ir::GenericArgData::Ty(_) => {
+ chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)
+ }
+ chalk_ir::GenericArgData::Lifetime(_) => chalk_ir::VariableKind::Lifetime,
+ chalk_ir::GenericArgData::Const(c) => {
+ chalk_ir::VariableKind::Const(c.data(Interner).ty.clone())
+ }
+ };
+ chalk_ir::WithKind::new(vk, UniverseIndex::ROOT)
+ }));
+ let obligation = trait_ref.cast(Interner);
+ Canonical {
+ binders: CanonicalVarKinds::from_iter(Interner, kinds),
+ value: InEnvironment::new(&env.env, obligation),
+ }
+}
+
+fn autoderef_method_receiver(
+ table: &mut InferenceTable<'_>,
+ ty: Ty,
+) -> (Vec<Canonical<Ty>>, Vec<ReceiverAdjustments>) {
+ let (mut deref_chain, mut adjustments): (Vec<_>, Vec<_>) = (Vec::new(), Vec::new());
+ let mut autoderef = autoderef::Autoderef::new(table, ty);
+ while let Some((ty, derefs)) = autoderef.next() {
+ deref_chain.push(autoderef.table.canonicalize(ty).value);
+ adjustments.push(ReceiverAdjustments {
+ autoref: None,
+ autoderefs: derefs,
+ unsize_array: false,
+ });
+ }
+ // As a last step, we can do array unsizing (that's the only unsizing that rustc does for method receivers!)
+ if let (Some((TyKind::Array(parameters, _), binders)), Some(adj)) = (
+ deref_chain.last().map(|ty| (ty.value.kind(Interner), ty.binders.clone())),
+ adjustments.last().cloned(),
+ ) {
+ let unsized_ty = TyKind::Slice(parameters.clone()).intern(Interner);
+ deref_chain.push(Canonical { value: unsized_ty, binders });
+ adjustments.push(ReceiverAdjustments { unsize_array: true, ..adj });
+ }
+ (deref_chain, adjustments)
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs b/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs
new file mode 100644
index 000000000..d7f48c69a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs
@@ -0,0 +1,62 @@
+//! A few helper functions for dealing with primitives.
+
+pub use chalk_ir::{FloatTy, IntTy, UintTy};
+pub use hir_def::builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint};
+
+pub fn int_ty_to_string(ty: IntTy) -> &'static str {
+ match ty {
+ IntTy::Isize => "isize",
+ IntTy::I8 => "i8",
+ IntTy::I16 => "i16",
+ IntTy::I32 => "i32",
+ IntTy::I64 => "i64",
+ IntTy::I128 => "i128",
+ }
+}
+
+pub fn uint_ty_to_string(ty: UintTy) -> &'static str {
+ match ty {
+ UintTy::Usize => "usize",
+ UintTy::U8 => "u8",
+ UintTy::U16 => "u16",
+ UintTy::U32 => "u32",
+ UintTy::U64 => "u64",
+ UintTy::U128 => "u128",
+ }
+}
+
+pub fn float_ty_to_string(ty: FloatTy) -> &'static str {
+ match ty {
+ FloatTy::F32 => "f32",
+ FloatTy::F64 => "f64",
+ }
+}
+
+pub(super) fn int_ty_from_builtin(t: BuiltinInt) -> IntTy {
+ match t {
+ BuiltinInt::Isize => IntTy::Isize,
+ BuiltinInt::I8 => IntTy::I8,
+ BuiltinInt::I16 => IntTy::I16,
+ BuiltinInt::I32 => IntTy::I32,
+ BuiltinInt::I64 => IntTy::I64,
+ BuiltinInt::I128 => IntTy::I128,
+ }
+}
+
+pub(super) fn uint_ty_from_builtin(t: BuiltinUint) -> UintTy {
+ match t {
+ BuiltinUint::Usize => UintTy::Usize,
+ BuiltinUint::U8 => UintTy::U8,
+ BuiltinUint::U16 => UintTy::U16,
+ BuiltinUint::U32 => UintTy::U32,
+ BuiltinUint::U64 => UintTy::U64,
+ BuiltinUint::U128 => UintTy::U128,
+ }
+}
+
+pub(super) fn float_ty_from_builtin(t: BuiltinFloat) -> FloatTy {
+ match t {
+ BuiltinFloat::F32 => FloatTy::F32,
+ BuiltinFloat::F64 => FloatTy::F64,
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
new file mode 100644
index 000000000..dc7252f70
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
@@ -0,0 +1,150 @@
+//! Database used for testing `hir`.
+
+use std::{
+ fmt, panic,
+ sync::{Arc, Mutex},
+};
+
+use base_db::{
+ salsa, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
+};
+use hir_def::{db::DefDatabase, ModuleId};
+use hir_expand::db::AstDatabase;
+use rustc_hash::{FxHashMap, FxHashSet};
+use syntax::TextRange;
+use test_utils::extract_annotations;
+
+#[salsa::database(
+ base_db::SourceDatabaseExtStorage,
+ base_db::SourceDatabaseStorage,
+ hir_expand::db::AstDatabaseStorage,
+ hir_def::db::InternDatabaseStorage,
+ hir_def::db::DefDatabaseStorage,
+ crate::db::HirDatabaseStorage
+)]
+pub(crate) struct TestDB {
+ storage: salsa::Storage<TestDB>,
+ events: Mutex<Option<Vec<salsa::Event>>>,
+}
+
+impl Default for TestDB {
+ fn default() -> Self {
+ let mut this = Self { storage: Default::default(), events: Default::default() };
+ this.set_enable_proc_attr_macros(true);
+ this
+ }
+}
+
+impl fmt::Debug for TestDB {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("TestDB").finish()
+ }
+}
+
+impl Upcast<dyn AstDatabase> for TestDB {
+ fn upcast(&self) -> &(dyn AstDatabase + 'static) {
+ &*self
+ }
+}
+
+impl Upcast<dyn DefDatabase> for TestDB {
+ fn upcast(&self) -> &(dyn DefDatabase + 'static) {
+ &*self
+ }
+}
+
+impl salsa::Database for TestDB {
+ fn salsa_event(&self, event: salsa::Event) {
+ let mut events = self.events.lock().unwrap();
+ if let Some(events) = &mut *events {
+ events.push(event);
+ }
+ }
+}
+
+impl salsa::ParallelDatabase for TestDB {
+ fn snapshot(&self) -> salsa::Snapshot<TestDB> {
+ salsa::Snapshot::new(TestDB {
+ storage: self.storage.snapshot(),
+ events: Default::default(),
+ })
+ }
+}
+
+impl panic::RefUnwindSafe for TestDB {}
+
+impl FileLoader for TestDB {
+ fn file_text(&self, file_id: FileId) -> Arc<String> {
+ FileLoaderDelegate(self).file_text(file_id)
+ }
+ fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
+ FileLoaderDelegate(self).resolve_path(path)
+ }
+ fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
+ FileLoaderDelegate(self).relevant_crates(file_id)
+ }
+}
+
+impl TestDB {
+ pub(crate) fn module_for_file_opt(&self, file_id: FileId) -> Option<ModuleId> {
+ for &krate in self.relevant_crates(file_id).iter() {
+ let crate_def_map = self.crate_def_map(krate);
+ for (local_id, data) in crate_def_map.modules() {
+ if data.origin.file_id() == Some(file_id) {
+ return Some(crate_def_map.module_id(local_id));
+ }
+ }
+ }
+ None
+ }
+
+ pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId {
+ self.module_for_file_opt(file_id).unwrap()
+ }
+
+ pub(crate) fn extract_annotations(&self) -> FxHashMap<FileId, Vec<(TextRange, String)>> {
+ let mut files = Vec::new();
+ let crate_graph = self.crate_graph();
+ for krate in crate_graph.iter() {
+ let crate_def_map = self.crate_def_map(krate);
+ for (module_id, _) in crate_def_map.modules() {
+ let file_id = crate_def_map[module_id].origin.file_id();
+ files.extend(file_id)
+ }
+ }
+ files
+ .into_iter()
+ .filter_map(|file_id| {
+ let text = self.file_text(file_id);
+ let annotations = extract_annotations(&text);
+ if annotations.is_empty() {
+ return None;
+ }
+ Some((file_id, annotations))
+ })
+ .collect()
+ }
+}
+
+impl TestDB {
+ pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
+ *self.events.lock().unwrap() = Some(Vec::new());
+ f();
+ self.events.lock().unwrap().take().unwrap()
+ }
+
+ pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
+ let events = self.log(f);
+ events
+ .into_iter()
+ .filter_map(|e| match e.kind {
+ // This is pretty horrible, but `Debug` is the only way to inspect
+ // QueryDescriptor at the moment.
+ salsa::EventKind::WillExecute { database_key } => {
+ Some(format!("{:?}", database_key.debug(self)))
+ }
+ _ => None,
+ })
+ .collect()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
new file mode 100644
index 000000000..d2f13e435
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
@@ -0,0 +1,578 @@
+mod never_type;
+mod coercion;
+mod regression;
+mod simple;
+mod patterns;
+mod traits;
+mod method_resolution;
+mod macros;
+mod display_source_code;
+mod incremental;
+mod diagnostics;
+
+use std::{collections::HashMap, env, sync::Arc};
+
+use base_db::{fixture::WithFixture, FileRange, SourceDatabaseExt};
+use expect_test::Expect;
+use hir_def::{
+ body::{Body, BodySourceMap, SyntheticSyntax},
+ db::DefDatabase,
+ expr::{ExprId, PatId},
+ item_scope::ItemScope,
+ nameres::DefMap,
+ src::HasSource,
+ AssocItemId, DefWithBodyId, HasModule, LocalModuleId, Lookup, ModuleDefId,
+};
+use hir_expand::{db::AstDatabase, InFile};
+use once_cell::race::OnceBool;
+use stdx::format_to;
+use syntax::{
+ ast::{self, AstNode, HasName},
+ SyntaxNode,
+};
+use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Registry};
+use tracing_tree::HierarchicalLayer;
+
+use crate::{
+ db::HirDatabase,
+ display::HirDisplay,
+ infer::{Adjustment, TypeMismatch},
+ test_db::TestDB,
+ InferenceResult, Ty,
+};
+
+// These tests compare the inference results for all expressions in a file
+// against snapshots of the expected results using expect. Use
+// `env UPDATE_EXPECT=1 cargo test -p hir_ty` to update the snapshots.
+
+fn setup_tracing() -> Option<tracing::subscriber::DefaultGuard> {
+ static ENABLE: OnceBool = OnceBool::new();
+ if !ENABLE.get_or_init(|| env::var("CHALK_DEBUG").is_ok()) {
+ return None;
+ }
+
+ let filter = EnvFilter::from_env("CHALK_DEBUG");
+ let layer = HierarchicalLayer::default()
+ .with_indent_lines(true)
+ .with_ansi(false)
+ .with_indent_amount(2)
+ .with_writer(std::io::stderr);
+ let subscriber = Registry::default().with(filter).with(layer);
+ Some(tracing::subscriber::set_default(subscriber))
+}
+
+fn check_types(ra_fixture: &str) {
+ check_impl(ra_fixture, false, true, false)
+}
+
+fn check_types_source_code(ra_fixture: &str) {
+ check_impl(ra_fixture, false, true, true)
+}
+
+fn check_no_mismatches(ra_fixture: &str) {
+ check_impl(ra_fixture, true, false, false)
+}
+
+fn check(ra_fixture: &str) {
+ check_impl(ra_fixture, false, false, false)
+}
+
+fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_source: bool) {
+ let _tracing = setup_tracing();
+ let (db, files) = TestDB::with_many_files(ra_fixture);
+
+ let mut had_annotations = false;
+ let mut mismatches = HashMap::new();
+ let mut types = HashMap::new();
+ let mut adjustments = HashMap::<_, Vec<_>>::new();
+ for (file_id, annotations) in db.extract_annotations() {
+ for (range, expected) in annotations {
+ let file_range = FileRange { file_id, range };
+ if only_types {
+ types.insert(file_range, expected);
+ } else if expected.starts_with("type: ") {
+ types.insert(file_range, expected.trim_start_matches("type: ").to_string());
+ } else if expected.starts_with("expected") {
+ mismatches.insert(file_range, expected);
+ } else if expected.starts_with("adjustments: ") {
+ adjustments.insert(
+ file_range,
+ expected
+ .trim_start_matches("adjustments: ")
+ .split(',')
+ .map(|it| it.trim().to_string())
+ .filter(|it| !it.is_empty())
+ .collect(),
+ );
+ } else {
+ panic!("unexpected annotation: {}", expected);
+ }
+ had_annotations = true;
+ }
+ }
+ assert!(had_annotations || allow_none, "no `//^` annotations found");
+
+ let mut defs: Vec<DefWithBodyId> = Vec::new();
+ for file_id in files {
+ let module = db.module_for_file_opt(file_id);
+ let module = match module {
+ Some(m) => m,
+ None => continue,
+ };
+ let def_map = module.def_map(&db);
+ visit_module(&db, &def_map, module.local_id, &mut |it| defs.push(it));
+ }
+ defs.sort_by_key(|def| match def {
+ DefWithBodyId::FunctionId(it) => {
+ let loc = it.lookup(&db);
+ loc.source(&db).value.syntax().text_range().start()
+ }
+ DefWithBodyId::ConstId(it) => {
+ let loc = it.lookup(&db);
+ loc.source(&db).value.syntax().text_range().start()
+ }
+ DefWithBodyId::StaticId(it) => {
+ let loc = it.lookup(&db);
+ loc.source(&db).value.syntax().text_range().start()
+ }
+ });
+ let mut unexpected_type_mismatches = String::new();
+ for def in defs {
+ let (_body, body_source_map) = db.body_with_source_map(def);
+ let inference_result = db.infer(def);
+
+ for (pat, ty) in inference_result.type_of_pat.iter() {
+ let node = match pat_node(&body_source_map, pat, &db) {
+ Some(value) => value,
+ None => continue,
+ };
+ let range = node.as_ref().original_file_range(&db);
+ if let Some(expected) = types.remove(&range) {
+ let actual = if display_source {
+ ty.display_source_code(&db, def.module(&db)).unwrap()
+ } else {
+ ty.display_test(&db).to_string()
+ };
+ assert_eq!(actual, expected);
+ }
+ }
+
+ for (expr, ty) in inference_result.type_of_expr.iter() {
+ let node = match expr_node(&body_source_map, expr, &db) {
+ Some(value) => value,
+ None => continue,
+ };
+ let range = node.as_ref().original_file_range(&db);
+ if let Some(expected) = types.remove(&range) {
+ let actual = if display_source {
+ ty.display_source_code(&db, def.module(&db)).unwrap()
+ } else {
+ ty.display_test(&db).to_string()
+ };
+ assert_eq!(actual, expected);
+ }
+ if let Some(expected) = adjustments.remove(&range) {
+ if let Some(adjustments) = inference_result.expr_adjustments.get(&expr) {
+ assert_eq!(
+ expected,
+ adjustments
+ .iter()
+ .map(|Adjustment { kind, .. }| format!("{:?}", kind))
+ .collect::<Vec<_>>()
+ );
+ } else {
+ panic!("expected {:?} adjustments, found none", expected);
+ }
+ }
+ }
+
+ for (pat, mismatch) in inference_result.pat_type_mismatches() {
+ let node = match pat_node(&body_source_map, pat, &db) {
+ Some(value) => value,
+ None => continue,
+ };
+ let range = node.as_ref().original_file_range(&db);
+ let actual = format!(
+ "expected {}, got {}",
+ mismatch.expected.display_test(&db),
+ mismatch.actual.display_test(&db)
+ );
+ match mismatches.remove(&range) {
+ Some(annotation) => assert_eq!(actual, annotation),
+ None => format_to!(unexpected_type_mismatches, "{:?}: {}\n", range.range, actual),
+ }
+ }
+ for (expr, mismatch) in inference_result.expr_type_mismatches() {
+ let node = match body_source_map.expr_syntax(expr) {
+ Ok(sp) => {
+ let root = db.parse_or_expand(sp.file_id).unwrap();
+ sp.map(|ptr| ptr.to_node(&root).syntax().clone())
+ }
+ Err(SyntheticSyntax) => continue,
+ };
+ let range = node.as_ref().original_file_range(&db);
+ let actual = format!(
+ "expected {}, got {}",
+ mismatch.expected.display_test(&db),
+ mismatch.actual.display_test(&db)
+ );
+ match mismatches.remove(&range) {
+ Some(annotation) => assert_eq!(actual, annotation),
+ None => format_to!(unexpected_type_mismatches, "{:?}: {}\n", range.range, actual),
+ }
+ }
+ }
+
+ let mut buf = String::new();
+ if !unexpected_type_mismatches.is_empty() {
+ format_to!(buf, "Unexpected type mismatches:\n{}", unexpected_type_mismatches);
+ }
+ if !mismatches.is_empty() {
+ format_to!(buf, "Unchecked mismatch annotations:\n");
+ for m in mismatches {
+ format_to!(buf, "{:?}: {}\n", m.0.range, m.1);
+ }
+ }
+ if !types.is_empty() {
+ format_to!(buf, "Unchecked type annotations:\n");
+ for t in types {
+ format_to!(buf, "{:?}: type {}\n", t.0.range, t.1);
+ }
+ }
+ if !adjustments.is_empty() {
+ format_to!(buf, "Unchecked adjustments annotations:\n");
+ for t in adjustments {
+ format_to!(buf, "{:?}: type {:?}\n", t.0.range, t.1);
+ }
+ }
+ assert!(buf.is_empty(), "{}", buf);
+}
+
+fn expr_node(
+ body_source_map: &BodySourceMap,
+ expr: ExprId,
+ db: &TestDB,
+) -> Option<InFile<SyntaxNode>> {
+ Some(match body_source_map.expr_syntax(expr) {
+ Ok(sp) => {
+ let root = db.parse_or_expand(sp.file_id).unwrap();
+ sp.map(|ptr| ptr.to_node(&root).syntax().clone())
+ }
+ Err(SyntheticSyntax) => return None,
+ })
+}
+
+fn pat_node(
+ body_source_map: &BodySourceMap,
+ pat: PatId,
+ db: &TestDB,
+) -> Option<InFile<SyntaxNode>> {
+ Some(match body_source_map.pat_syntax(pat) {
+ Ok(sp) => {
+ let root = db.parse_or_expand(sp.file_id).unwrap();
+ sp.map(|ptr| {
+ ptr.either(
+ |it| it.to_node(&root).syntax().clone(),
+ |it| it.to_node(&root).syntax().clone(),
+ )
+ })
+ }
+ Err(SyntheticSyntax) => return None,
+ })
+}
+
+fn infer(ra_fixture: &str) -> String {
+ infer_with_mismatches(ra_fixture, false)
+}
+
+fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
+ let _tracing = setup_tracing();
+ let (db, file_id) = TestDB::with_single_file(content);
+
+ let mut buf = String::new();
+
+ let mut infer_def = |inference_result: Arc<InferenceResult>,
+ body_source_map: Arc<BodySourceMap>| {
+ let mut types: Vec<(InFile<SyntaxNode>, &Ty)> = Vec::new();
+ let mut mismatches: Vec<(InFile<SyntaxNode>, &TypeMismatch)> = Vec::new();
+
+ for (pat, ty) in inference_result.type_of_pat.iter() {
+ let syntax_ptr = match body_source_map.pat_syntax(pat) {
+ Ok(sp) => {
+ let root = db.parse_or_expand(sp.file_id).unwrap();
+ sp.map(|ptr| {
+ ptr.either(
+ |it| it.to_node(&root).syntax().clone(),
+ |it| it.to_node(&root).syntax().clone(),
+ )
+ })
+ }
+ Err(SyntheticSyntax) => continue,
+ };
+ types.push((syntax_ptr.clone(), ty));
+ if let Some(mismatch) = inference_result.type_mismatch_for_pat(pat) {
+ mismatches.push((syntax_ptr, mismatch));
+ }
+ }
+
+ for (expr, ty) in inference_result.type_of_expr.iter() {
+ let node = match body_source_map.expr_syntax(expr) {
+ Ok(sp) => {
+ let root = db.parse_or_expand(sp.file_id).unwrap();
+ sp.map(|ptr| ptr.to_node(&root).syntax().clone())
+ }
+ Err(SyntheticSyntax) => continue,
+ };
+ types.push((node.clone(), ty));
+ if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr) {
+ mismatches.push((node, mismatch));
+ }
+ }
+
+ // sort ranges for consistency
+ types.sort_by_key(|(node, _)| {
+ let range = node.value.text_range();
+ (range.start(), range.end())
+ });
+ for (node, ty) in &types {
+ let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.value.clone()) {
+ (self_param.name().unwrap().syntax().text_range(), "self".to_string())
+ } else {
+ (node.value.text_range(), node.value.text().to_string().replace('\n', " "))
+ };
+ let macro_prefix = if node.file_id != file_id.into() { "!" } else { "" };
+ format_to!(
+ buf,
+ "{}{:?} '{}': {}\n",
+ macro_prefix,
+ range,
+ ellipsize(text, 15),
+ ty.display_test(&db)
+ );
+ }
+ if include_mismatches {
+ mismatches.sort_by_key(|(node, _)| {
+ let range = node.value.text_range();
+ (range.start(), range.end())
+ });
+ for (src_ptr, mismatch) in &mismatches {
+ let range = src_ptr.value.text_range();
+ let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" };
+ format_to!(
+ buf,
+ "{}{:?}: expected {}, got {}\n",
+ macro_prefix,
+ range,
+ mismatch.expected.display_test(&db),
+ mismatch.actual.display_test(&db),
+ );
+ }
+ }
+ };
+
+ let module = db.module_for_file(file_id);
+ let def_map = module.def_map(&db);
+
+ let mut defs: Vec<DefWithBodyId> = Vec::new();
+ visit_module(&db, &def_map, module.local_id, &mut |it| defs.push(it));
+ defs.sort_by_key(|def| match def {
+ DefWithBodyId::FunctionId(it) => {
+ let loc = it.lookup(&db);
+ loc.source(&db).value.syntax().text_range().start()
+ }
+ DefWithBodyId::ConstId(it) => {
+ let loc = it.lookup(&db);
+ loc.source(&db).value.syntax().text_range().start()
+ }
+ DefWithBodyId::StaticId(it) => {
+ let loc = it.lookup(&db);
+ loc.source(&db).value.syntax().text_range().start()
+ }
+ });
+ for def in defs {
+ let (_body, source_map) = db.body_with_source_map(def);
+ let infer = db.infer(def);
+ infer_def(infer, source_map);
+ }
+
+ buf.truncate(buf.trim_end().len());
+ buf
+}
+
+fn visit_module(
+ db: &TestDB,
+ crate_def_map: &DefMap,
+ module_id: LocalModuleId,
+ cb: &mut dyn FnMut(DefWithBodyId),
+) {
+ visit_scope(db, crate_def_map, &crate_def_map[module_id].scope, cb);
+ for impl_id in crate_def_map[module_id].scope.impls() {
+ let impl_data = db.impl_data(impl_id);
+ for &item in impl_data.items.iter() {
+ match item {
+ AssocItemId::FunctionId(it) => {
+ let def = it.into();
+ cb(def);
+ let body = db.body(def);
+ visit_body(db, &body, cb);
+ }
+ AssocItemId::ConstId(it) => {
+ let def = it.into();
+ cb(def);
+ let body = db.body(def);
+ visit_body(db, &body, cb);
+ }
+ AssocItemId::TypeAliasId(_) => (),
+ }
+ }
+ }
+
+ fn visit_scope(
+ db: &TestDB,
+ crate_def_map: &DefMap,
+ scope: &ItemScope,
+ cb: &mut dyn FnMut(DefWithBodyId),
+ ) {
+ for decl in scope.declarations() {
+ match decl {
+ ModuleDefId::FunctionId(it) => {
+ let def = it.into();
+ cb(def);
+ let body = db.body(def);
+ visit_body(db, &body, cb);
+ }
+ ModuleDefId::ConstId(it) => {
+ let def = it.into();
+ cb(def);
+ let body = db.body(def);
+ visit_body(db, &body, cb);
+ }
+ ModuleDefId::StaticId(it) => {
+ let def = it.into();
+ cb(def);
+ let body = db.body(def);
+ visit_body(db, &body, cb);
+ }
+ ModuleDefId::TraitId(it) => {
+ let trait_data = db.trait_data(it);
+ for &(_, item) in trait_data.items.iter() {
+ match item {
+ AssocItemId::FunctionId(it) => cb(it.into()),
+ AssocItemId::ConstId(it) => cb(it.into()),
+ AssocItemId::TypeAliasId(_) => (),
+ }
+ }
+ }
+ ModuleDefId::ModuleId(it) => visit_module(db, crate_def_map, it.local_id, cb),
+ _ => (),
+ }
+ }
+ }
+
+ fn visit_body(db: &TestDB, body: &Body, cb: &mut dyn FnMut(DefWithBodyId)) {
+ for (_, def_map) in body.blocks(db) {
+ for (mod_id, _) in def_map.modules() {
+ visit_module(db, &def_map, mod_id, cb);
+ }
+ }
+ }
+}
+
+fn ellipsize(mut text: String, max_len: usize) -> String {
+ if text.len() <= max_len {
+ return text;
+ }
+ let ellipsis = "...";
+ let e_len = ellipsis.len();
+ let mut prefix_len = (max_len - e_len) / 2;
+ while !text.is_char_boundary(prefix_len) {
+ prefix_len += 1;
+ }
+ let mut suffix_len = max_len - e_len - prefix_len;
+ while !text.is_char_boundary(text.len() - suffix_len) {
+ suffix_len += 1;
+ }
+ text.replace_range(prefix_len..text.len() - suffix_len, ellipsis);
+ text
+}
+
+fn check_infer(ra_fixture: &str, expect: Expect) {
+ let mut actual = infer(ra_fixture);
+ actual.push('\n');
+ expect.assert_eq(&actual);
+}
+
+fn check_infer_with_mismatches(ra_fixture: &str, expect: Expect) {
+ let mut actual = infer_with_mismatches(ra_fixture, true);
+ actual.push('\n');
+ expect.assert_eq(&actual);
+}
+
+#[test]
+fn salsa_bug() {
+ let (mut db, pos) = TestDB::with_position(
+ "
+ //- /lib.rs
+ trait Index {
+ type Output;
+ }
+
+ type Key<S: UnificationStoreBase> = <S as UnificationStoreBase>::Key;
+
+ pub trait UnificationStoreBase: Index<Output = Key<Self>> {
+ type Key;
+
+ fn len(&self) -> usize;
+ }
+
+ pub trait UnificationStoreMut: UnificationStoreBase {
+ fn push(&mut self, value: Self::Key);
+ }
+
+ fn main() {
+ let x = 1;
+ x.push(1);$0
+ }
+ ",
+ );
+
+ let module = db.module_for_file(pos.file_id);
+ let crate_def_map = module.def_map(&db);
+ visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ db.infer(def);
+ });
+
+ let new_text = "
+ //- /lib.rs
+ trait Index {
+ type Output;
+ }
+
+ type Key<S: UnificationStoreBase> = <S as UnificationStoreBase>::Key;
+
+ pub trait UnificationStoreBase: Index<Output = Key<Self>> {
+ type Key;
+
+ fn len(&self) -> usize;
+ }
+
+ pub trait UnificationStoreMut: UnificationStoreBase {
+ fn push(&mut self, value: Self::Key);
+ }
+
+ fn main() {
+
+ let x = 1;
+ x.push(1);
+ }
+ "
+ .to_string();
+
+ db.set_file_text(pos.file_id, Arc::new(new_text));
+
+ let module = db.module_for_file(pos.file_id);
+ let crate_def_map = module.def_map(&db);
+ visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ db.infer(def);
+ });
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs
new file mode 100644
index 000000000..bf59fadc2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs
@@ -0,0 +1,755 @@
+use super::{check, check_no_mismatches, check_types};
+
+#[test]
+fn block_expr_type_mismatch() {
+ check(
+ r"
+fn test() {
+ let a: i32 = { 1i64 };
+ // ^^^^ expected i32, got i64
+}
+ ",
+ );
+}
+
+#[test]
+fn coerce_places() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+struct S<T> { a: T }
+
+fn f<T>(_: &[T]) -> T { loop {} }
+fn g<T>(_: S<&[T]>) -> T { loop {} }
+
+fn gen<T>() -> *mut [T; 2] { loop {} }
+fn test1<U>() -> *mut [U] {
+ gen()
+}
+
+fn test2() {
+ let arr: &[u8; 1] = &[1];
+
+ let a: &[_] = arr;
+ let b = f(arr);
+ let c: &[_] = { arr };
+ let d = g(S { a: arr });
+ let e: [&[_]; 1] = [arr];
+ let f: [&[_]; 2] = [arr; 2];
+ let g: (&[_], &[_]) = (arr, arr);
+}
+"#,
+ );
+}
+
+#[test]
+fn let_stmt_coerce() {
+ check(
+ r"
+//- minicore: coerce_unsized
+fn test() {
+ let x: &[isize] = &[1];
+ // ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
+ let x: *const [isize] = &[1];
+ // ^^^^ adjustments: Deref(None), Borrow(RawPtr(Not)), Pointer(Unsize)
+}
+",
+ );
+}
+
+#[test]
+fn custom_coerce_unsized() {
+ check(
+ r#"
+//- minicore: coerce_unsized
+use core::{marker::Unsize, ops::CoerceUnsized};
+
+struct A<T: ?Sized>(*const T);
+struct B<T: ?Sized>(*const T);
+struct C<T: ?Sized> { inner: *const T }
+
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<B<U>> for B<T> {}
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<C<U>> for C<T> {}
+
+fn foo1<T>(x: A<[T]>) -> A<[T]> { x }
+fn foo2<T>(x: B<[T]>) -> B<[T]> { x }
+fn foo3<T>(x: C<[T]>) -> C<[T]> { x }
+
+fn test(a: A<[u8; 2]>, b: B<[u8; 2]>, c: C<[u8; 2]>) {
+ let d = foo1(a);
+ // ^ expected A<[{unknown}]>, got A<[u8; 2]>
+ let e = foo2(b);
+ // ^ type: B<[u8]>
+ let f = foo3(c);
+ // ^ type: C<[u8]>
+}
+"#,
+ );
+}
+
+#[test]
+fn if_coerce() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+fn foo<T>(x: &[T]) -> &[T] { x }
+fn test() {
+ let x = if true {
+ foo(&[1])
+ // ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
+ } else {
+ &[1]
+ };
+}
+"#,
+ );
+}
+
+#[test]
+fn if_else_coerce() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+fn foo<T>(x: &[T]) -> &[T] { x }
+fn test() {
+ let x = if true {
+ &[1]
+ } else {
+ foo(&[1])
+ };
+}
+"#,
+ )
+}
+
+#[test]
+fn match_first_coerce() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+fn foo<T>(x: &[T]) -> &[T] { x }
+fn test(i: i32) {
+ let x = match i {
+ 2 => foo(&[2]),
+ // ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
+ 1 => &[1],
+ _ => &[3],
+ };
+}
+"#,
+ );
+}
+
+#[test]
+fn match_second_coerce() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+fn foo<T>(x: &[T]) -> &[T] { loop {} }
+ // ^^^^^^^ adjustments: NeverToAny
+fn test(i: i32) {
+ let x = match i {
+ 1 => &[1],
+ 2 => foo(&[2]),
+ _ => &[3],
+ };
+}
+"#,
+ );
+}
+
+#[test]
+fn coerce_merge_one_by_one1() {
+ cov_mark::check!(coerce_merge_fail_fallback);
+
+ check(
+ r"
+fn test() {
+ let t = &mut 1;
+ let x = match 1 {
+ 1 => t as *mut i32,
+ 2 => t as &i32,
+ //^^^^^^^^^ expected *mut i32, got &i32
+ _ => t as *const i32,
+ // ^^^^^^^^^^^^^^^ adjustments: Pointer(MutToConstPointer)
+
+ };
+ x;
+ //^ type: *const i32
+
+}
+ ",
+ );
+}
+
+#[test]
+fn return_coerce_unknown() {
+ check_types(
+ r"
+fn foo() -> u32 {
+ return unknown;
+ //^^^^^^^ u32
+}
+ ",
+ );
+}
+
+#[test]
+fn coerce_autoderef() {
+ check_no_mismatches(
+ r"
+struct Foo;
+fn takes_ref_foo(x: &Foo) {}
+fn test() {
+ takes_ref_foo(&Foo);
+ takes_ref_foo(&&Foo);
+ takes_ref_foo(&&&Foo);
+}",
+ );
+}
+
+#[test]
+fn coerce_autoderef_generic() {
+ check_no_mismatches(
+ r#"
+struct Foo;
+fn takes_ref<T>(x: &T) -> T { *x }
+fn test() {
+ takes_ref(&Foo);
+ takes_ref(&&Foo);
+ takes_ref(&&&Foo);
+}
+"#,
+ );
+}
+
+#[test]
+fn coerce_autoderef_block() {
+ check_no_mismatches(
+ r#"
+//- minicore: deref
+struct String {}
+impl core::ops::Deref for String { type Target = str; }
+fn takes_ref_str(x: &str) {}
+fn returns_string() -> String { loop {} }
+fn test() {
+ takes_ref_str(&{ returns_string() });
+ // ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Not))), Borrow(Ref(Not))
+}
+"#,
+ );
+}
+
+#[test]
+fn coerce_autoderef_implication_1() {
+ check_no_mismatches(
+ r"
+//- minicore: deref
+struct Foo<T>;
+impl core::ops::Deref for Foo<u32> { type Target = (); }
+
+fn takes_ref_foo<T>(x: &Foo<T>) {}
+fn test() {
+ let foo = Foo;
+ //^^^ type: Foo<{unknown}>
+ takes_ref_foo(&foo);
+
+ let foo = Foo;
+ //^^^ type: Foo<u32>
+ let _: &() = &foo;
+}",
+ );
+}
+
+#[test]
+fn coerce_autoderef_implication_2() {
+ check(
+ r"
+//- minicore: deref
+struct Foo<T>;
+impl core::ops::Deref for Foo<u32> { type Target = (); }
+
+fn takes_ref_foo<T>(x: &Foo<T>) {}
+fn test() {
+ let foo = Foo;
+ //^^^ type: Foo<{unknown}>
+ let _: &u32 = &Foo;
+ //^^^^ expected &u32, got &Foo<{unknown}>
+}",
+ );
+}
+
+#[test]
+fn closure_return_coerce() {
+ check_no_mismatches(
+ r"
+fn foo() {
+ let x = || {
+ if true {
+ return &1u32;
+ }
+ &&1u32
+ };
+}",
+ );
+}
+
+#[test]
+fn assign_coerce() {
+ check_no_mismatches(
+ r"
+//- minicore: deref
+struct String;
+impl core::ops::Deref for String { type Target = str; }
+fn g(_text: &str) {}
+fn f(text: &str) {
+ let mut text = text;
+ let tmp = String;
+ text = &tmp;
+ g(text);
+}
+",
+ );
+}
+
+#[test]
+fn destructuring_assign_coerce() {
+ check_no_mismatches(
+ r"
+//- minicore: deref
+struct String;
+impl core::ops::Deref for String { type Target = str; }
+fn g(_text: &str) {}
+fn f(text: &str) {
+ let mut text = text;
+ let tmp = String;
+ [text, _] = [&tmp, &tmp];
+ g(text);
+}
+",
+ );
+}
+
+#[test]
+fn coerce_fn_item_to_fn_ptr() {
+ check_no_mismatches(
+ r"
+fn foo(x: u32) -> isize { 1 }
+fn test() {
+ let f: fn(u32) -> isize = foo;
+ // ^^^ adjustments: Pointer(ReifyFnPointer)
+ let f: unsafe fn(u32) -> isize = foo;
+ // ^^^ adjustments: Pointer(ReifyFnPointer)
+}",
+ );
+}
+
+#[test]
+fn coerce_fn_items_in_match_arms() {
+ cov_mark::check!(coerce_fn_reification);
+
+ check_types(
+ r"
+fn foo1(x: u32) -> isize { 1 }
+fn foo2(x: u32) -> isize { 2 }
+fn foo3(x: u32) -> isize { 3 }
+fn test() {
+ let x = match 1 {
+ 1 => foo1,
+ 2 => foo2,
+ _ => foo3,
+ };
+ x;
+ //^ fn(u32) -> isize
+}",
+ );
+}
+
+#[test]
+fn coerce_closure_to_fn_ptr() {
+ check_no_mismatches(
+ r"
+fn test() {
+ let f: fn(u32) -> isize = |x| { 1 };
+}",
+ );
+}
+
+#[test]
+fn coerce_placeholder_ref() {
+ // placeholders should unify, even behind references
+ check_no_mismatches(
+ r"
+struct S<T> { t: T }
+impl<TT> S<TT> {
+ fn get(&self) -> &TT {
+ &self.t
+ }
+}",
+ );
+}
+
+#[test]
+fn coerce_unsize_array() {
+ check_types(
+ r#"
+//- minicore: coerce_unsized
+fn test() {
+ let f: &[usize] = &[1, 2, 3];
+ //^ usize
+}"#,
+ );
+}
+
+#[test]
+fn coerce_unsize_trait_object_simple() {
+ check_types(
+ r#"
+//- minicore: coerce_unsized
+trait Foo<T, U> {}
+trait Bar<U, T, X>: Foo<T, U> {}
+trait Baz<T, X>: Bar<usize, T, X> {}
+
+struct S<T, X>;
+impl<T, X> Foo<T, usize> for S<T, X> {}
+impl<T, X> Bar<usize, T, X> for S<T, X> {}
+impl<T, X> Baz<T, X> for S<T, X> {}
+
+fn test() {
+ let obj: &dyn Baz<i8, i16> = &S;
+ //^ S<i8, i16>
+ let obj: &dyn Bar<_, i8, i16> = &S;
+ //^ S<i8, i16>
+ let obj: &dyn Foo<i8, _> = &S;
+ //^ S<i8, {unknown}>
+}"#,
+ );
+}
+
+#[test]
+fn coerce_unsize_super_trait_cycle() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+trait A {}
+trait B: C + A {}
+trait C: B {}
+trait D: C
+
+struct S;
+impl A for S {}
+impl B for S {}
+impl C for S {}
+impl D for S {}
+
+fn test() {
+ let obj: &dyn D = &S;
+ let obj: &dyn A = &S;
+}
+"#,
+ );
+}
+
+#[test]
+fn coerce_unsize_generic() {
+ // FIXME: fix the type mismatches here
+ check(
+ r#"
+//- minicore: coerce_unsized
+struct Foo<T> { t: T };
+struct Bar<T>(Foo<T>);
+
+fn test() {
+ let _: &Foo<[usize]> = &Foo { t: [1, 2, 3] };
+ //^^^^^^^^^ expected [usize], got [usize; 3]
+ let _: &Bar<[usize]> = &Bar(Foo { t: [1, 2, 3] });
+ //^^^^^^^^^ expected [usize], got [usize; 3]
+}
+"#,
+ );
+}
+
+#[test]
+fn coerce_unsize_apit() {
+ check(
+ r#"
+//- minicore: coerce_unsized
+trait Foo {}
+
+fn test(f: impl Foo, g: &(impl Foo + ?Sized)) {
+ let _: &dyn Foo = &f;
+ let _: &dyn Foo = g;
+ //^ expected &dyn Foo, got &impl Foo + ?Sized
+}
+ "#,
+ );
+}
+
+#[test]
+fn two_closures_lub() {
+ check_types(
+ r#"
+fn foo(c: i32) {
+ let add = |a: i32, b: i32| a + b;
+ let sub = |a, b| a - b;
+ //^^^^^^^^^^^^ |i32, i32| -> i32
+ if c > 42 { add } else { sub };
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ fn(i32, i32) -> i32
+}
+ "#,
+ )
+}
+
+#[test]
+fn match_diverging_branch_1() {
+ check_types(
+ r#"
+enum Result<T> { Ok(T), Err }
+fn parse<T>() -> T { loop {} }
+
+fn test() -> i32 {
+ let a = match parse() {
+ Ok(val) => val,
+ Err => return 0,
+ };
+ a
+ //^ i32
+}
+ "#,
+ )
+}
+
+#[test]
+fn match_diverging_branch_2() {
+ // same as 1 except for order of branches
+ check_types(
+ r#"
+enum Result<T> { Ok(T), Err }
+fn parse<T>() -> T { loop {} }
+
+fn test() -> i32 {
+ let a = match parse() {
+ Err => return 0,
+ Ok(val) => val,
+ };
+ a
+ //^ i32
+}
+ "#,
+ )
+}
+
+#[test]
+fn panic_macro() {
+ check_no_mismatches(
+ r#"
+mod panic {
+ #[macro_export]
+ pub macro panic_2015 {
+ () => (
+ $crate::panicking::panic()
+ ),
+ }
+}
+
+mod panicking {
+ pub fn panic() -> ! { loop {} }
+}
+
+#[rustc_builtin_macro = "core_panic"]
+macro_rules! panic {
+ // Expands to either `$crate::panic::panic_2015` or `$crate::panic::panic_2021`
+ // depending on the edition of the caller.
+ ($($arg:tt)*) => {
+ /* compiler built-in */
+ };
+}
+
+fn main() {
+ panic!()
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_unsize_expected_type_1() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+fn main() {
+ let foo: &[u32] = &[1, 2];
+ let foo: &[u32] = match true {
+ true => &[1, 2],
+ false => &[1, 2, 3],
+ };
+ let foo: &[u32] = if true {
+ &[1, 2]
+ } else {
+ &[1, 2, 3]
+ };
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_unsize_expected_type_2() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+struct InFile<T>;
+impl<T> InFile<T> {
+ fn with_value<U>(self, value: U) -> InFile<U> { InFile }
+}
+struct RecordField;
+trait AstNode {}
+impl AstNode for RecordField {}
+
+fn takes_dyn(it: InFile<&dyn AstNode>) {}
+
+fn test() {
+ let x: InFile<()> = InFile;
+ let n = &RecordField;
+ takes_dyn(x.with_value(n));
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_unsize_expected_type_3() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+enum Option<T> { Some(T), None }
+struct RecordField;
+trait AstNode {}
+impl AstNode for RecordField {}
+
+fn takes_dyn(it: Option<&dyn AstNode>) {}
+
+fn test() {
+ let x: InFile<()> = InFile;
+ let n = &RecordField;
+ takes_dyn(Option::Some(n));
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_unsize_expected_type_4() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+use core::{marker::Unsize, ops::CoerceUnsized};
+
+struct B<T: ?Sized>(*const T);
+impl<T: ?Sized> B<T> {
+ fn new(t: T) -> Self { B(&t) }
+}
+
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<B<U>> for B<T> {}
+
+fn test() {
+ let _: B<[isize]> = B::new({ [1, 2, 3] });
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_array_elems_lub() {
+ check_no_mismatches(
+ r#"
+fn f() {}
+fn g() {}
+
+fn test() {
+ [f, g];
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_type_var() {
+ check_types(
+ r#"
+//- minicore: from, coerce_unsized
+fn test() {
+ let x = ();
+ let _: &() = &x.into();
+} //^^^^^^^^ ()
+"#,
+ )
+}
+
+#[test]
+fn coerce_overloaded_binary_op_rhs() {
+ check_types(
+ r#"
+//- minicore: deref, add
+
+struct String {}
+impl core::ops::Deref for String { type Target = str; }
+
+impl core::ops::Add<&str> for String {
+ type Output = String;
+}
+
+fn test() {
+ let s1 = String {};
+ let s2 = String {};
+ s1 + &s2;
+ //^^^^^^^^ String
+}
+
+ "#,
+ );
+}
+
+#[test]
+fn assign_coerce_struct_fields() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+struct S;
+trait Tr {}
+impl Tr for S {}
+struct V<T> { t: T }
+
+fn main() {
+ let a: V<&dyn Tr>;
+ a = V { t: &S };
+
+ let mut a: V<&dyn Tr> = V { t: &S };
+ a = V { t: &S };
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assign_coerce_struct_fields() {
+ check(
+ r#"
+//- minicore: coerce_unsized
+struct S;
+trait Tr {}
+impl Tr for S {}
+struct V<T> { t: T }
+
+fn main() {
+ let a: V<&dyn Tr>;
+ (a,) = V { t: &S };
+ //^^^^expected V<&S>, got (V<&dyn Tr>,)
+
+ let mut a: V<&dyn Tr> = V { t: &S };
+ (a,) = V { t: &S };
+ //^^^^expected V<&S>, got (V<&dyn Tr>,)
+}
+ "#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs
new file mode 100644
index 000000000..f00fa9729
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs
@@ -0,0 +1,75 @@
+use super::check;
+
+#[test]
+fn function_return_type_mismatch_1() {
+ check(
+ r#"
+fn test() -> &'static str {
+ 5
+ //^ expected &str, got i32
+}
+"#,
+ );
+}
+
+#[test]
+fn function_return_type_mismatch_2() {
+ check(
+ r#"
+fn test(x: bool) -> &'static str {
+ if x {
+ return 1;
+ //^ expected &str, got i32
+ }
+ "ok"
+}
+"#,
+ );
+}
+
+#[test]
+fn function_return_type_mismatch_3() {
+ check(
+ r#"
+fn test(x: bool) -> &'static str {
+ if x {
+ return "ok";
+ }
+ 1
+ //^ expected &str, got i32
+}
+"#,
+ );
+}
+
+#[test]
+fn function_return_type_mismatch_4() {
+ check(
+ r#"
+fn test(x: bool) -> &'static str {
+ if x {
+ "ok"
+ } else {
+ 1
+ //^ expected &str, got i32
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn function_return_type_mismatch_5() {
+ check(
+ r#"
+fn test(x: bool) -> &'static str {
+ if x {
+ 1
+ //^ expected &str, got i32
+ } else {
+ "ok"
+ }
+}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs
new file mode 100644
index 000000000..240942e48
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs
@@ -0,0 +1,176 @@
+use super::check_types_source_code;
+
+#[test]
+fn qualify_path_to_submodule() {
+ check_types_source_code(
+ r#"
+mod foo {
+ pub struct Foo;
+}
+
+fn bar() {
+ let foo: foo::Foo = foo::Foo;
+ foo;
+} //^^^ foo::Foo
+
+"#,
+ );
+}
+
+#[test]
+fn omit_default_type_parameters() {
+ check_types_source_code(
+ r#"
+struct Foo<T = u8> { t: T }
+fn main() {
+ let foo = Foo { t: 5u8 };
+ foo;
+} //^^^ Foo
+"#,
+ );
+
+ check_types_source_code(
+ r#"
+struct Foo<K, T = u8> { k: K, t: T }
+fn main() {
+ let foo = Foo { k: 400, t: 5u8 };
+ foo;
+} //^^^ Foo<i32>
+"#,
+ );
+}
+
+#[test]
+fn render_raw_ptr_impl_ty() {
+ check_types_source_code(
+ r#"
+//- minicore: sized
+trait Unpin {}
+fn foo() -> *const (impl Unpin + Sized) { loop {} }
+fn main() {
+ let foo = foo();
+ foo;
+} //^^^ *const impl Unpin
+"#,
+ );
+}
+
+#[test]
+fn render_dyn_for_ty() {
+ // FIXME
+ check_types_source_code(
+ r#"
+trait Foo<'a> {}
+
+fn foo(foo: &dyn for<'a> Foo<'a>) {}
+ // ^^^ &dyn Foo
+"#,
+ );
+}
+
+#[test]
+fn sized_bounds_apit() {
+ check_types_source_code(
+ r#"
+//- minicore: sized
+trait Foo {}
+trait Bar<T> {}
+struct S<T>;
+fn test(
+ a: impl Foo,
+ b: impl Foo + Sized,
+ c: &(impl Foo + ?Sized),
+ d: S<impl Foo>,
+ ref_any: &impl ?Sized,
+ empty: impl,
+) {
+ a;
+ //^ impl Foo
+ b;
+ //^ impl Foo
+ c;
+ //^ &impl Foo + ?Sized
+ d;
+ //^ S<impl Foo>
+ ref_any;
+ //^^^^^^^ &impl ?Sized
+ empty;
+} //^^^^^ impl Sized
+"#,
+ );
+}
+
+#[test]
+fn sized_bounds_rpit() {
+ check_types_source_code(
+ r#"
+//- minicore: sized
+trait Foo {}
+fn foo1() -> impl Foo { loop {} }
+fn foo2() -> impl Foo + Sized { loop {} }
+fn foo3() -> impl Foo + ?Sized { loop {} }
+fn test() {
+ let foo = foo1();
+ foo;
+ //^^^ impl Foo
+ let foo = foo2();
+ foo;
+ //^^^ impl Foo
+ let foo = foo3();
+ foo;
+} //^^^ impl Foo + ?Sized
+"#,
+ );
+}
+
+#[test]
+fn parenthesize_ptr_rpit_sized_bounds() {
+ check_types_source_code(
+ r#"
+//- minicore: sized
+trait Foo {}
+fn foo1() -> *const impl Foo { loop {} }
+fn foo2() -> *const (impl Foo + Sized) { loop {} }
+fn foo3() -> *const (impl Sized + Foo) { loop {} }
+fn foo4() -> *const (impl Foo + ?Sized) { loop {} }
+fn foo5() -> *const (impl ?Sized + Foo) { loop {} }
+fn test() {
+ let foo = foo1();
+ foo;
+ //^^^ *const impl Foo
+ let foo = foo2();
+ foo;
+ //^^^ *const impl Foo
+ let foo = foo3();
+ foo;
+ //^^^ *const impl Foo
+ let foo = foo4();
+ foo;
+ //^^^ *const (impl Foo + ?Sized)
+ let foo = foo5();
+ foo;
+} //^^^ *const (impl Foo + ?Sized)
+"#,
+ );
+}
+
+#[test]
+fn sized_bounds_impl_traits_in_fn_signature() {
+ check_types_source_code(
+ r#"
+//- minicore: sized
+trait Foo {}
+fn test(
+ a: fn(impl Foo) -> impl Foo,
+ b: fn(impl Foo + Sized) -> impl Foo + Sized,
+ c: fn(&(impl Foo + ?Sized)) -> &(impl Foo + ?Sized),
+) {
+ a;
+ //^ fn(impl Foo) -> impl Foo
+ b;
+ //^ fn(impl Foo) -> impl Foo
+ c;
+} //^ fn(&impl Foo + ?Sized) -> &impl Foo + ?Sized
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
new file mode 100644
index 000000000..3e08e83e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
@@ -0,0 +1,51 @@
+use std::sync::Arc;
+
+use base_db::{fixture::WithFixture, SourceDatabaseExt};
+
+use crate::{db::HirDatabase, test_db::TestDB};
+
+use super::visit_module;
+
+#[test]
+fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
+ let (mut db, pos) = TestDB::with_position(
+ "
+ //- /lib.rs
+ fn foo() -> i32 {
+ $01 + 1
+ }
+ ",
+ );
+ {
+ let events = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id);
+ let crate_def_map = module.def_map(&db);
+ visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ db.infer(def);
+ });
+ });
+ assert!(format!("{:?}", events).contains("infer"))
+ }
+
+ let new_text = "
+ fn foo() -> i32 {
+ 1
+ +
+ 1
+ }
+ "
+ .to_string();
+
+ db.set_file_text(pos.file_id, Arc::new(new_text));
+
+ {
+ let events = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id);
+ let crate_def_map = module.def_map(&db);
+ visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ db.infer(def);
+ });
+ });
+ assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs
new file mode 100644
index 000000000..a1ab6060e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs
@@ -0,0 +1,1338 @@
+use expect_test::expect;
+use test_utils::{bench, bench_fixture, skip_slow_tests};
+
+use crate::tests::check_infer_with_mismatches;
+
+use super::{check_infer, check_types};
+
+#[test]
+fn cfg_impl_def() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:foo cfg:test
+use foo::S as T;
+struct S;
+
+#[cfg(test)]
+impl S {
+ fn foo1(&self) -> i32 { 0 }
+}
+
+#[cfg(not(test))]
+impl S {
+ fn foo2(&self) -> i32 { 0 }
+}
+
+fn test() {
+ let t = (S.foo1(), S.foo2(), T.foo3(), T.foo4());
+ t;
+} //^ (i32, {unknown}, i32, {unknown})
+
+//- /foo.rs crate:foo
+pub struct S;
+
+#[cfg(not(test))]
+impl S {
+ pub fn foo3(&self) -> i32 { 0 }
+}
+
+#[cfg(test)]
+impl S {
+ pub fn foo4(&self) -> i32 { 0 }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_macros_expanded() {
+ check_infer(
+ r#"
+ struct Foo(Vec<i32>);
+
+ macro_rules! foo {
+ ($($item:expr),*) => {
+ {
+ Foo(vec![$($item,)*])
+ }
+ };
+ }
+
+ fn main() {
+ let x = foo!(1,2);
+ }
+ "#,
+ expect![[r#"
+ !0..17 '{Foo(v...,2,])}': Foo
+ !1..4 'Foo': Foo({unknown}) -> Foo
+ !1..16 'Foo(vec![1,2,])': Foo
+ !5..15 'vec![1,2,]': {unknown}
+ 155..181 '{ ...,2); }': ()
+ 165..166 'x': Foo
+ "#]],
+ );
+}
+
+#[test]
+fn infer_legacy_textual_scoped_macros_expanded() {
+ check_infer(
+ r#"
+ struct Foo(Vec<i32>);
+
+ #[macro_use]
+ mod m {
+ macro_rules! foo {
+ ($($item:expr),*) => {
+ {
+ Foo(vec![$($item,)*])
+ }
+ };
+ }
+ }
+
+ fn main() {
+ let x = foo!(1,2);
+ let y = crate::foo!(1,2);
+ }
+ "#,
+ expect![[r#"
+ !0..17 '{Foo(v...,2,])}': Foo
+ !1..4 'Foo': Foo({unknown}) -> Foo
+ !1..16 'Foo(vec![1,2,])': Foo
+ !5..15 'vec![1,2,]': {unknown}
+ 194..250 '{ ...,2); }': ()
+ 204..205 'x': Foo
+ 227..228 'y': {unknown}
+ 231..247 'crate:...!(1,2)': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn infer_path_qualified_macros_expanded() {
+ check_infer(
+ r#"
+ #[macro_export]
+ macro_rules! foo {
+ () => { 42i32 }
+ }
+
+ mod m {
+ pub use super::foo as bar;
+ }
+
+ fn main() {
+ let x = crate::foo!();
+ let y = m::bar!();
+ }
+ "#,
+ expect![[r#"
+ !0..5 '42i32': i32
+ !0..5 '42i32': i32
+ 110..163 '{ ...!(); }': ()
+ 120..121 'x': i32
+ 147..148 'y': i32
+ "#]],
+ );
+}
+
+#[test]
+fn expr_macro_def_expanded_in_various_places() {
+ check_infer(
+ r#"
+ macro spam() {
+ 1isize
+ }
+
+ fn spam() {
+ spam!();
+ (spam!());
+ spam!().spam(spam!());
+ for _ in spam!() {}
+ || spam!();
+ while spam!() {}
+ break spam!();
+ return spam!();
+ match spam!() {
+ _ if spam!() => spam!(),
+ }
+ spam!()(spam!());
+ Spam { spam: spam!() };
+ spam!()[spam!()];
+ await spam!();
+ spam!() as usize;
+ &spam!();
+ -spam!();
+ spam!()..spam!();
+ spam!() + spam!();
+ }
+ "#,
+ expect![[r#"
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ 39..442 '{ ...!(); }': ()
+ 73..94 'spam!(...am!())': {unknown}
+ 100..119 'for _ ...!() {}': ()
+ 104..105 '_': {unknown}
+ 117..119 '{}': ()
+ 124..134 '|| spam!()': || -> isize
+ 140..156 'while ...!() {}': ()
+ 154..156 '{}': ()
+ 161..174 'break spam!()': !
+ 180..194 'return spam!()': !
+ 200..254 'match ... }': isize
+ 224..225 '_': isize
+ 259..275 'spam!(...am!())': {unknown}
+ 281..303 'Spam {...m!() }': {unknown}
+ 309..325 'spam!(...am!()]': {unknown}
+ 350..366 'spam!(... usize': usize
+ 372..380 '&spam!()': &isize
+ 386..394 '-spam!()': isize
+ 400..416 'spam!(...pam!()': {unknown}
+ 422..439 'spam!(...pam!()': isize
+ "#]],
+ );
+}
+
+#[test]
+fn expr_macro_rules_expanded_in_various_places() {
+ check_infer(
+ r#"
+ macro_rules! spam {
+ () => (1isize);
+ }
+
+ fn spam() {
+ spam!();
+ (spam!());
+ spam!().spam(spam!());
+ for _ in spam!() {}
+ || spam!();
+ while spam!() {}
+ break spam!();
+ return spam!();
+ match spam!() {
+ _ if spam!() => spam!(),
+ }
+ spam!()(spam!());
+ Spam { spam: spam!() };
+ spam!()[spam!()];
+ await spam!();
+ spam!() as usize;
+ &spam!();
+ -spam!();
+ spam!()..spam!();
+ spam!() + spam!();
+ }
+ "#,
+ expect![[r#"
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ 53..456 '{ ...!(); }': ()
+ 87..108 'spam!(...am!())': {unknown}
+ 114..133 'for _ ...!() {}': ()
+ 118..119 '_': {unknown}
+ 131..133 '{}': ()
+ 138..148 '|| spam!()': || -> isize
+ 154..170 'while ...!() {}': ()
+ 168..170 '{}': ()
+ 175..188 'break spam!()': !
+ 194..208 'return spam!()': !
+ 214..268 'match ... }': isize
+ 238..239 '_': isize
+ 273..289 'spam!(...am!())': {unknown}
+ 295..317 'Spam {...m!() }': {unknown}
+ 323..339 'spam!(...am!()]': {unknown}
+ 364..380 'spam!(... usize': usize
+ 386..394 '&spam!()': &isize
+ 400..408 '-spam!()': isize
+ 414..430 'spam!(...pam!()': {unknown}
+ 436..453 'spam!(...pam!()': isize
+ "#]],
+ );
+}
+
+#[test]
+fn expr_macro_expanded_in_stmts() {
+ check_infer(
+ r#"
+ macro_rules! id { ($($es:tt)*) => { $($es)* } }
+ fn foo() {
+ id! { let a = (); }
+ }
+ "#,
+ expect![[r#"
+ !0..8 'leta=();': ()
+ !3..4 'a': ()
+ !5..7 '()': ()
+ 57..84 '{ ...); } }': ()
+ "#]],
+ );
+}
+
+#[test]
+fn recurisve_macro_expanded_in_stmts() {
+ check_infer(
+ r#"
+ macro_rules! ng {
+ ([$($tts:tt)*]) => {
+ $($tts)*;
+ };
+ ([$($tts:tt)*] $head:tt $($rest:tt)*) => {
+ ng! {
+ [$($tts)* $head] $($rest)*
+ }
+ };
+ }
+ fn foo() {
+ ng!([] let a = 3);
+ let b = a;
+ }
+ "#,
+ expect![[r#"
+ !0..7 'leta=3;': ()
+ !0..13 'ng!{[leta=3]}': ()
+ !0..13 'ng!{[leta=]3}': ()
+ !0..13 'ng!{[leta]=3}': ()
+ !0..13 'ng!{[let]a=3}': ()
+ !3..4 'a': i32
+ !5..6 '3': i32
+ 196..237 '{ ...= a; }': ()
+ 229..230 'b': i32
+ 233..234 'a': i32
+ "#]],
+ );
+}
+
+#[test]
+fn recursive_inner_item_macro_rules() {
+ check_infer(
+ r#"
+ macro_rules! mac {
+ () => { mac!($)};
+ ($x:tt) => { macro_rules! blub { () => { 1 }; } };
+ }
+ fn foo() {
+ mac!();
+ let a = blub!();
+ }
+ "#,
+ expect![[r#"
+ !0..1 '1': i32
+ !0..7 'mac!($)': ()
+ !0..26 'macro_...>{1};}': ()
+ 107..143 '{ ...!(); }': ()
+ 129..130 'a': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_macro_defining_block_with_items() {
+ check_infer(
+ r#"
+ macro_rules! foo {
+ () => {{
+ fn bar() -> usize { 0 }
+ bar()
+ }};
+ }
+ fn main() {
+ let _a = foo!();
+ }
+ "#,
+ expect![[r#"
+ !15..18 '{0}': usize
+ !16..17 '0': usize
+ !0..24 '{fnbar...bar()}': usize
+ !18..21 'bar': fn bar() -> usize
+ !18..23 'bar()': usize
+ 98..122 '{ ...!(); }': ()
+ 108..110 '_a': usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_type_value_macro_having_same_name() {
+ check_infer(
+ r#"
+ #[macro_export]
+ macro_rules! foo {
+ () => {
+ mod foo {
+ pub use super::foo;
+ }
+ };
+ ($x:tt) => {
+ $x
+ };
+ }
+
+ foo!();
+
+ fn foo() {
+ let foo = foo::foo!(42i32);
+ }
+ "#,
+ expect![[r#"
+ !0..5 '42i32': i32
+ 170..205 '{ ...32); }': ()
+ 180..183 'foo': i32
+ "#]],
+ );
+}
+
+#[test]
+fn processes_impls_generated_by_macros() {
+ check_types(
+ r#"
+macro_rules! m {
+ ($ident:ident) => (impl Trait for $ident {})
+}
+trait Trait { fn foo(self) -> u128 { 0 } }
+struct S;
+m!(S);
+fn test() { S.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn infer_assoc_items_generated_by_macros() {
+ check_types(
+ r#"
+macro_rules! m {
+ () => (fn foo(&self) -> u128 {0})
+}
+struct S;
+impl S {
+ m!();
+}
+
+fn test() { S.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn infer_assoc_items_generated_by_macros_chain() {
+ check_types(
+ r#"
+macro_rules! m_inner {
+ () => {fn foo(&self) -> u128 {0}}
+}
+macro_rules! m {
+ () => {m_inner!();}
+}
+
+struct S;
+impl S {
+ m!();
+}
+
+fn test() { S.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn infer_macro_with_dollar_crate_is_correct_in_expr() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:foo
+fn test() {
+ let x = (foo::foo!(1), foo::foo!(2));
+ x;
+} //^ (i32, usize)
+
+//- /lib.rs crate:foo
+#[macro_export]
+macro_rules! foo {
+ (1) => { $crate::bar!() };
+ (2) => { 1 + $crate::baz() };
+}
+
+#[macro_export]
+macro_rules! bar {
+ () => { 42 }
+}
+
+pub fn baz() -> usize { 31usize }
+"#,
+ );
+}
+
+#[test]
+fn infer_macro_with_dollar_crate_is_correct_in_trait_associate_type() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:foo
+use foo::Trait;
+
+fn test() {
+ let msg = foo::Message(foo::MessageRef);
+ let r = msg.deref();
+ r;
+ //^ &MessageRef
+}
+
+//- /lib.rs crate:foo
+pub struct MessageRef;
+pub struct Message(MessageRef);
+
+pub trait Trait {
+ type Target;
+ fn deref(&self) -> &Self::Target;
+}
+
+#[macro_export]
+macro_rules! expand {
+ () => {
+ impl Trait for Message {
+ type Target = $crate::MessageRef;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+ }
+ }
+}
+
+expand!();
+"#,
+ );
+}
+
+#[test]
+fn infer_macro_with_dollar_crate_in_def_site() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:foo
+use foo::expand;
+
+macro_rules! list {
+ ($($tt:tt)*) => { $($tt)* }
+}
+
+fn test() {
+ let r = expand!();
+ r;
+ //^ u128
+}
+
+//- /lib.rs crate:foo
+#[macro_export]
+macro_rules! expand {
+ () => { list!($crate::m!()) };
+}
+
+#[macro_export]
+macro_rules! m {
+ () => { 0u128 };
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_type_value_non_legacy_macro_use_as() {
+ check_infer(
+ r#"
+ mod m {
+ macro_rules! _foo {
+ ($x:ident) => { type $x = u64; }
+ }
+ pub(crate) use _foo as foo;
+ }
+
+ m::foo!(foo);
+ use foo as bar;
+ fn f() -> bar { 0 }
+ fn main() {
+ let _a = f();
+ }
+ "#,
+ expect![[r#"
+ 158..163 '{ 0 }': u64
+ 160..161 '0': u64
+ 174..196 '{ ...f(); }': ()
+ 184..186 '_a': u64
+ 190..191 'f': fn f() -> u64
+ 190..193 'f()': u64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_local_macro() {
+ check_infer(
+ r#"
+ fn main() {
+ macro_rules! foo {
+ () => { 1usize }
+ }
+ let _a = foo!();
+ }
+ "#,
+ expect![[r#"
+ !0..6 '1usize': usize
+ 10..89 '{ ...!(); }': ()
+ 74..76 '_a': usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_local_inner_macros() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:foo
+fn test() {
+ let x = foo::foo!(1);
+ x;
+} //^ i32
+
+//- /lib.rs crate:foo
+#[macro_export(local_inner_macros)]
+macro_rules! foo {
+ (1) => { bar!() };
+}
+
+#[macro_export]
+macro_rules! bar {
+ () => { 42 }
+}
+
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_line() {
+ check_infer(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! line {() => {}}
+
+ fn main() {
+ let x = line!();
+ }
+ "#,
+ expect![[r#"
+ !0..1 '0': i32
+ 63..87 '{ ...!(); }': ()
+ 73..74 'x': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_builtin_macros_file() {
+ check_infer(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! file {() => {}}
+
+ fn main() {
+ let x = file!();
+ }
+ "#,
+ expect![[r#"
+ !0..2 '""': &str
+ 63..87 '{ ...!(); }': ()
+ 73..74 'x': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_builtin_macros_column() {
+ check_infer(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! column {() => {}}
+
+ fn main() {
+ let x = column!();
+ }
+ "#,
+ expect![[r#"
+ !0..1 '0': i32
+ 65..91 '{ ...!(); }': ()
+ 75..76 'x': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_builtin_macros_concat() {
+ check_infer(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! concat {() => {}}
+
+ fn main() {
+ let x = concat!("hello", concat!("world", "!"));
+ }
+ "#,
+ expect![[r#"
+ !0..13 '"helloworld!"': &str
+ 65..121 '{ ...")); }': ()
+ 75..76 'x': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+include!("foo.rs");
+
+fn main() {
+ bar();
+} //^^^^^ u32
+
+//- /foo.rs
+fn bar() -> u32 {0}
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include_expression() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+fn main() {
+ let i = include!("bla.rs");
+ i;
+ //^ i32
+}
+//- /bla.rs
+0
+ "#,
+ )
+}
+
+#[test]
+fn infer_builtin_macros_include_child_mod() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+include!("f/foo.rs");
+
+fn main() {
+ bar::bar();
+} //^^^^^^^^^^ u32
+
+//- /f/foo.rs
+pub mod bar;
+
+//- /f/bar.rs
+pub fn bar() -> u32 {0}
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include_str() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include_str {() => {}}
+
+fn main() {
+ let a = include_str!("foo.rs");
+ a;
+} //^ &str
+
+//- /foo.rs
+hello
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include_str_with_lazy_nested() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! concat {() => {}}
+#[rustc_builtin_macro]
+macro_rules! include_str {() => {}}
+
+macro_rules! m {
+ ($x:expr) => {
+ concat!("foo", $x)
+ };
+}
+
+fn main() {
+ let a = include_str!(m!(".rs"));
+ a;
+} //^ &str
+
+//- /foo.rs
+hello
+"#,
+ );
+}
+
+#[test]
+fn benchmark_include_macro() {
+ if skip_slow_tests() {
+ return;
+ }
+ let data = bench_fixture::big_struct();
+ let fixture = r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+include!("foo.rs");
+
+fn main() {
+ RegisterBlock { };
+ //^^^^^^^^^^^^^^^^^ RegisterBlock
+}
+ "#;
+ let fixture = format!("{}\n//- /foo.rs\n{}", fixture, data);
+
+ {
+ let _b = bench("include macro");
+ check_types(&fixture);
+ }
+}
+
+#[test]
+fn infer_builtin_macros_include_concat() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+#[rustc_builtin_macro]
+macro_rules! concat {() => {}}
+
+include!(concat!("f", "oo.rs"));
+
+fn main() {
+ bar();
+} //^^^^^ u32
+
+//- /foo.rs
+fn bar() -> u32 {0}
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include_concat_with_bad_env_should_failed() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+#[rustc_builtin_macro]
+macro_rules! concat {() => {}}
+
+#[rustc_builtin_macro]
+macro_rules! env {() => {}}
+
+include!(concat!(env!("OUT_DIR"), "/foo.rs"));
+
+fn main() {
+ bar();
+} //^^^^^ {unknown}
+
+//- /foo.rs
+fn bar() -> u32 {0}
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include_itself_should_failed() {
+ check_types(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+include!("main.rs");
+
+fn main() {
+ 0;
+} //^ i32
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_concat_with_lazy() {
+ check_infer(
+ r#"
+ macro_rules! hello {() => {"hello"}}
+
+ #[rustc_builtin_macro]
+ macro_rules! concat {() => {}}
+
+ fn main() {
+ let x = concat!(hello!(), concat!("world", "!"));
+ }
+ "#,
+ expect![[r#"
+ !0..13 '"helloworld!"': &str
+ 103..160 '{ ...")); }': ()
+ 113..114 'x': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_builtin_macros_env() {
+ check_infer(
+ r#"
+ //- /main.rs env:foo=bar
+ #[rustc_builtin_macro]
+ macro_rules! env {() => {}}
+
+ fn main() {
+ let x = env!("foo");
+ }
+ "#,
+ expect![[r#"
+ !0..22 '"__RA_...TED__"': &str
+ 62..90 '{ ...o"); }': ()
+ 72..73 'x': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_derive_clone_simple() {
+ check_types(
+ r#"
+//- minicore: derive, clone
+#[derive(Clone)]
+struct S;
+fn test() {
+ S.clone();
+} //^^^^^^^^^ S
+"#,
+ );
+}
+
+#[test]
+fn infer_derive_clone_with_params() {
+ check_types(
+ r#"
+//- minicore: clone, derive
+#[derive(Clone)]
+struct S;
+#[derive(Clone)]
+struct Wrapper<T>(T);
+struct NonClone;
+fn test() {
+ let x = (Wrapper(S).clone(), Wrapper(NonClone).clone());
+ x;
+ //^ (Wrapper<S>, {unknown})
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_custom_derive_simple() {
+ // FIXME: this test current now do nothing
+ check_types(
+ r#"
+//- minicore: derive
+use foo::Foo;
+
+#[derive(Foo)]
+struct S{}
+
+fn test() {
+ S{};
+} //^^^ S
+"#,
+ );
+}
+
+#[test]
+fn macro_in_arm() {
+ check_infer(
+ r#"
+ macro_rules! unit {
+ () => { () };
+ }
+
+ fn main() {
+ let x = match () {
+ unit!() => 92u32,
+ };
+ }
+ "#,
+ expect![[r#"
+ !0..2 '()': ()
+ 51..110 '{ ... }; }': ()
+ 61..62 'x': u32
+ 65..107 'match ... }': u32
+ 71..73 '()': ()
+ 95..100 '92u32': u32
+ "#]],
+ );
+}
+
+#[test]
+fn macro_in_type_alias_position() {
+ check_infer(
+ r#"
+ macro_rules! U32 {
+ () => { u32 };
+ }
+
+ trait Foo {
+ type Ty;
+ }
+
+ impl<T> Foo for T {
+ type Ty = U32!();
+ }
+
+ type TayTo = U32!();
+
+ fn testy() {
+ let a: <() as Foo>::Ty;
+ let b: TayTo;
+ }
+ "#,
+ expect![[r#"
+ 147..196 '{ ...yTo; }': ()
+ 157..158 'a': u32
+ 185..186 'b': u32
+ "#]],
+ );
+}
+
+#[test]
+fn nested_macro_in_type_alias_position() {
+ check_infer(
+ r#"
+ macro_rules! U32Inner2 {
+ () => { u32 };
+ }
+
+ macro_rules! U32Inner1 {
+ () => { U32Inner2!() };
+ }
+
+ macro_rules! U32 {
+ () => { U32Inner1!() };
+ }
+
+ trait Foo {
+ type Ty;
+ }
+
+ impl<T> Foo for T {
+ type Ty = U32!();
+ }
+
+ type TayTo = U32!();
+
+ fn testy() {
+ let a: <() as Foo>::Ty;
+ let b: TayTo;
+ }
+ "#,
+ expect![[r#"
+ 259..308 '{ ...yTo; }': ()
+ 269..270 'a': u32
+ 297..298 'b': u32
+ "#]],
+ );
+}
+
+#[test]
+fn macros_in_type_alias_position_generics() {
+ check_infer(
+ r#"
+ struct Foo<A, B>(A, B);
+
+ macro_rules! U32 {
+ () => { u32 };
+ }
+
+ macro_rules! Bar {
+ () => { Foo<U32!(), U32!()> };
+ }
+
+ trait Moo {
+ type Ty;
+ }
+
+ impl<T> Moo for T {
+ type Ty = Bar!();
+ }
+
+ type TayTo = Bar!();
+
+ fn main() {
+ let a: <() as Moo>::Ty;
+ let b: TayTo;
+ }
+ "#,
+ expect![[r#"
+ 228..277 '{ ...yTo; }': ()
+ 238..239 'a': Foo<u32, u32>
+ 266..267 'b': Foo<u32, u32>
+ "#]],
+ );
+}
+
+#[test]
+fn macros_in_type_position() {
+ check_infer(
+ r#"
+ struct Foo<A, B>(A, B);
+
+ macro_rules! U32 {
+ () => { u32 };
+ }
+
+ macro_rules! Bar {
+ () => { Foo<U32!(), U32!()> };
+ }
+
+ fn main() {
+ let a: Bar!();
+ }
+ "#,
+ expect![[r#"
+ 133..155 '{ ...!(); }': ()
+ 143..144 'a': Foo<u32, u32>
+ "#]],
+ );
+}
+
+#[test]
+fn macros_in_type_generics() {
+ check_infer(
+ r#"
+ struct Foo<A, B>(A, B);
+
+ macro_rules! U32 {
+ () => { u32 };
+ }
+
+ macro_rules! Bar {
+ () => { Foo<U32!(), U32!()> };
+ }
+
+ trait Moo {
+ type Ty;
+ }
+
+ impl<T> Moo for T {
+ type Ty = Foo<Bar!(), Bar!()>;
+ }
+
+ type TayTo = Foo<Bar!(), U32!()>;
+
+ fn main() {
+ let a: <() as Moo>::Ty;
+ let b: TayTo;
+ }
+ "#,
+ expect![[r#"
+ 254..303 '{ ...yTo; }': ()
+ 264..265 'a': Foo<Foo<u32, u32>, Foo<u32, u32>>
+ 292..293 'b': Foo<Foo<u32, u32>, u32>
+ "#]],
+ );
+}
+
+#[test]
+fn infinitely_recursive_macro_type() {
+ check_infer(
+ r#"
+ struct Bar<T, X>(T, X);
+
+ macro_rules! Foo {
+ () => { Foo!() }
+ }
+
+ macro_rules! U32 {
+ () => { u32 }
+ }
+
+ type A = Foo!();
+ type B = Bar<Foo!(), U32!()>;
+
+ fn main() {
+ let a: A;
+ let b: B;
+ }
+ "#,
+ expect![[r#"
+ 166..197 '{ ...: B; }': ()
+ 176..177 'a': {unknown}
+ 190..191 'b': Bar<{unknown}, u32>
+ "#]],
+ );
+}
+
+#[test]
+fn cfg_tails() {
+ check_infer_with_mismatches(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo
+struct S {}
+
+impl S {
+ fn new2(bar: u32) -> Self {
+ #[cfg(feature = "foo")]
+ { Self { } }
+ #[cfg(not(feature = "foo"))]
+ { Self { } }
+ }
+}
+"#,
+ expect![[r#"
+ 34..37 'bar': u32
+ 52..170 '{ ... }': S
+ 62..106 '#[cfg(... { } }': S
+ 96..104 'Self { }': S
+ "#]],
+ );
+}
+
+#[test]
+fn infer_in_unexpandable_attr_proc_macro_1() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:mac
+#[mac::attr_macro]
+fn foo() {
+ let xxx = 1;
+ //^^^ i32
+}
+
+//- /mac.rs crate:mac
+#![crate_type="proc-macro"]
+#[proc_macro_attribute]
+pub fn attr_macro() {}
+"#,
+ );
+}
+
+#[test]
+fn infer_in_unexpandable_attr_proc_macro_in_impl() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:mac
+struct Foo;
+impl Foo {
+ #[mac::attr_macro]
+ fn foo() {
+ let xxx = 1;
+ //^^^ i32
+ }
+}
+
+//- /mac.rs crate:mac
+#![crate_type="proc-macro"]
+#[proc_macro_attribute]
+pub fn attr_macro() {}
+"#,
+ );
+}
+
+#[test]
+fn infer_in_unexpandable_attr_proc_macro_in_trait() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:mac
+trait Foo {
+ #[mac::attr_macro]
+ fn foo() {
+ let xxx = 1;
+ //^^^ i32
+ }
+}
+
+//- /mac.rs crate:mac
+#![crate_type="proc-macro"]
+#[proc_macro_attribute]
+pub fn attr_macro() {}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
new file mode 100644
index 000000000..68463dc06
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
@@ -0,0 +1,1792 @@
+use expect_test::expect;
+
+use crate::tests::check;
+
+use super::{check_infer, check_no_mismatches, check_types};
+
+#[test]
+fn infer_slice_method() {
+ check_types(
+ r#"
+impl<T> [T] {
+ fn foo(&self) -> T {
+ loop {}
+ }
+}
+
+fn test(x: &[u8]) {
+ <[_]>::foo(x);
+ //^^^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
+
+#[test]
+fn cross_crate_primitive_method() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:other_crate
+fn test() {
+ let x = 1f32;
+ x.foo();
+} //^^^^^^^ f32
+
+//- /lib.rs crate:other_crate
+mod foo {
+ impl f32 {
+ pub fn foo(self) -> f32 { 0. }
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_array_inherent_impl() {
+ check_types(
+ r#"
+impl<T, const N: usize> [T; N] {
+ fn foo(&self) -> T {
+ loop {}
+ }
+}
+fn test(x: &[u8; 0]) {
+ <[_; 0]>::foo(x);
+ //^^^^^^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_associated_method_struct() {
+ check_infer(
+ r#"
+ struct A { x: u32 }
+
+ impl A {
+ fn new() -> A {
+ A { x: 0 }
+ }
+ }
+ fn test() {
+ let a = A::new();
+ a.x;
+ }
+ "#,
+ expect![[r#"
+ 48..74 '{ ... }': A
+ 58..68 'A { x: 0 }': A
+ 65..66 '0': u32
+ 87..121 '{ ...a.x; }': ()
+ 97..98 'a': A
+ 101..107 'A::new': fn new() -> A
+ 101..109 'A::new()': A
+ 115..116 'a': A
+ 115..118 'a.x': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_struct_in_local_scope() {
+ check_infer(
+ r#"
+ fn mismatch() {
+ struct A;
+
+ impl A {
+ fn from(_: i32, _: i32) -> Self {
+ A
+ }
+ }
+
+ let _a = A::from(1, 2);
+ }
+ "#,
+ expect![[r#"
+ 14..146 '{ ... 2); }': ()
+ 125..127 '_a': A
+ 130..137 'A::from': fn from(i32, i32) -> A
+ 130..143 'A::from(1, 2)': A
+ 138..139 '1': i32
+ 141..142 '2': i32
+ 60..61 '_': i32
+ 68..69 '_': i32
+ 84..109 '{ ... }': A
+ 98..99 'A': A
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_enum() {
+ check_infer(
+ r#"
+ enum A { B, C }
+
+ impl A {
+ pub fn b() -> A {
+ A::B
+ }
+ pub fn c() -> A {
+ A::C
+ }
+ }
+ fn test() {
+ let a = A::b();
+ a;
+ let c = A::c();
+ c;
+ }
+ "#,
+ expect![[r#"
+ 46..66 '{ ... }': A
+ 56..60 'A::B': A
+ 87..107 '{ ... }': A
+ 97..101 'A::C': A
+ 120..177 '{ ... c; }': ()
+ 130..131 'a': A
+ 134..138 'A::b': fn b() -> A
+ 134..140 'A::b()': A
+ 146..147 'a': A
+ 157..158 'c': A
+ 161..165 'A::c': fn c() -> A
+ 161..167 'A::c()': A
+ 173..174 'c': A
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_with_modules() {
+ check_infer(
+ r#"
+ mod a {
+ struct A;
+ impl A { pub fn thing() -> A { A {} }}
+ }
+
+ mod b {
+ struct B;
+ impl B { pub fn thing() -> u32 { 99 }}
+
+ mod c {
+ struct C;
+ impl C { pub fn thing() -> C { C {} }}
+ }
+ }
+ use b::c;
+
+ fn test() {
+ let x = a::A::thing();
+ let y = b::B::thing();
+ let z = c::C::thing();
+ }
+ "#,
+ expect![[r#"
+ 55..63 '{ A {} }': A
+ 57..61 'A {}': A
+ 125..131 '{ 99 }': u32
+ 127..129 '99': u32
+ 201..209 '{ C {} }': C
+ 203..207 'C {}': C
+ 240..324 '{ ...g(); }': ()
+ 250..251 'x': A
+ 254..265 'a::A::thing': fn thing() -> A
+ 254..267 'a::A::thing()': A
+ 277..278 'y': u32
+ 281..292 'b::B::thing': fn thing() -> u32
+ 281..294 'b::B::thing()': u32
+ 304..305 'z': C
+ 308..319 'c::C::thing': fn thing() -> C
+ 308..321 'c::C::thing()': C
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_generics() {
+ check_infer(
+ r#"
+ struct Gen<T> {
+ val: T
+ }
+
+ impl<T> Gen<T> {
+ pub fn make(val: T) -> Gen<T> {
+ Gen { val }
+ }
+ }
+
+ fn test() {
+ let a = Gen::make(0u32);
+ }
+ "#,
+ expect![[r#"
+ 63..66 'val': T
+ 81..108 '{ ... }': Gen<T>
+ 91..102 'Gen { val }': Gen<T>
+ 97..100 'val': T
+ 122..154 '{ ...32); }': ()
+ 132..133 'a': Gen<u32>
+ 136..145 'Gen::make': fn make<u32>(u32) -> Gen<u32>
+ 136..151 'Gen::make(0u32)': Gen<u32>
+ 146..150 '0u32': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_generics_without_args() {
+ check_infer(
+ r#"
+ struct Gen<T> {
+ val: T
+ }
+
+ impl<T> Gen<T> {
+ pub fn make() -> Gen<T> {
+ loop { }
+ }
+ }
+
+ fn test() {
+ let a = Gen::<u32>::make();
+ }
+ "#,
+ expect![[r#"
+ 75..99 '{ ... }': Gen<T>
+ 85..93 'loop { }': !
+ 90..93 '{ }': ()
+ 113..148 '{ ...e(); }': ()
+ 123..124 'a': Gen<u32>
+ 127..143 'Gen::<...::make': fn make<u32>() -> Gen<u32>
+ 127..145 'Gen::<...make()': Gen<u32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_generics_2_type_params_without_args() {
+ check_infer(
+ r#"
+ struct Gen<T, U> {
+ val: T,
+ val2: U,
+ }
+
+ impl<T> Gen<u32, T> {
+ pub fn make() -> Gen<u32,T> {
+ loop { }
+ }
+ }
+
+ fn test() {
+ let a = Gen::<u32, u64>::make();
+ }
+ "#,
+ expect![[r#"
+ 101..125 '{ ... }': Gen<u32, T>
+ 111..119 'loop { }': !
+ 116..119 '{ }': ()
+ 139..179 '{ ...e(); }': ()
+ 149..150 'a': Gen<u32, u64>
+ 153..174 'Gen::<...::make': fn make<u64>() -> Gen<u32, u64>
+ 153..176 'Gen::<...make()': Gen<u32, u64>
+ "#]],
+ );
+}
+
+#[test]
+fn cross_crate_associated_method_call() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:other_crate
+fn test() {
+ let x = other_crate::foo::S::thing();
+ x;
+} //^ i128
+
+//- /lib.rs crate:other_crate
+pub mod foo {
+ pub struct S;
+ impl S {
+ pub fn thing() -> i128 { 0 }
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_trait_method_simple() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_types(
+ r#"
+trait Trait1 {
+ fn method(&self) -> u32;
+}
+struct S1;
+impl Trait1 for S1 {}
+trait Trait2 {
+ fn method(&self) -> i128;
+}
+struct S2;
+impl Trait2 for S2 {}
+fn test() {
+ S1.method();
+ //^^^^^^^^^^^ u32
+ S2.method(); // -> i128
+ //^^^^^^^^^^^ i128
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_trait_method_scoped() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_types(
+ r#"
+struct S;
+mod foo {
+ pub trait Trait1 {
+ fn method(&self) -> u32;
+ }
+ impl Trait1 for super::S {}
+}
+mod bar {
+ pub trait Trait2 {
+ fn method(&self) -> i128;
+ }
+ impl Trait2 for super::S {}
+}
+
+mod foo_test {
+ use super::S;
+ use super::foo::Trait1;
+ fn test() {
+ S.method();
+ //^^^^^^^^^^ u32
+ }
+}
+
+mod bar_test {
+ use super::S;
+ use super::bar::Trait2;
+ fn test() {
+ S.method();
+ //^^^^^^^^^^ i128
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_trait_method_generic_1() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_types(
+ r#"
+trait Trait<T> {
+ fn method(&self) -> T;
+}
+struct S;
+impl Trait<u32> for S {}
+fn test() {
+ S.method();
+ //^^^^^^^^^^ u32
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_trait_method_generic_more_params() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_types(
+ r#"
+trait Trait<T1, T2, T3> {
+ fn method1(&self) -> (T1, T2, T3);
+ fn method2(&self) -> (T3, T2, T1);
+}
+struct S1;
+impl Trait<u8, u16, u32> for S1 {}
+struct S2;
+impl<T> Trait<i8, i16, T> for S2 {}
+fn test() {
+ S1.method1();
+ //^^^^^^^^^^^^ (u8, u16, u32)
+ S1.method2();
+ //^^^^^^^^^^^^ (u32, u16, u8)
+ S2.method1();
+ //^^^^^^^^^^^^ (i8, i16, {unknown})
+ S2.method2();
+ //^^^^^^^^^^^^ ({unknown}, i16, i8)
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_trait_method_generic_2() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_types(
+ r#"
+trait Trait<T> {
+ fn method(&self) -> T;
+}
+struct S<T>(T);
+impl<U> Trait<U> for S<U> {}
+fn test() {
+ S(1u32).method();
+ //^^^^^^^^^^^^^^^^ u32
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method() {
+ check_infer(
+ r#"
+ trait Default {
+ fn default() -> Self;
+ }
+ struct S;
+ impl Default for S {}
+ fn test() {
+ let s1: S = Default::default();
+ let s2 = S::default();
+ let s3 = <S as Default>::default();
+ }
+ "#,
+ expect![[r#"
+ 86..192 '{ ...t(); }': ()
+ 96..98 's1': S
+ 104..120 'Defaul...efault': fn default<S>() -> S
+ 104..122 'Defaul...ault()': S
+ 132..134 's2': S
+ 137..147 'S::default': fn default<S>() -> S
+ 137..149 'S::default()': S
+ 159..161 's3': S
+ 164..187 '<S as ...efault': fn default<S>() -> S
+ 164..189 '<S as ...ault()': S
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_1() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make() -> T;
+ }
+ struct S;
+ impl Trait<u32> for S {}
+ struct G<T>;
+ impl<T> Trait<T> for G<T> {}
+ fn test() {
+ let a = S::make();
+ let b = G::<u64>::make();
+ let c: f64 = G::make();
+ }
+ "#,
+ expect![[r#"
+ 126..210 '{ ...e(); }': ()
+ 136..137 'a': u32
+ 140..147 'S::make': fn make<S, u32>() -> u32
+ 140..149 'S::make()': u32
+ 159..160 'b': u64
+ 163..177 'G::<u64>::make': fn make<G<u64>, u64>() -> u64
+ 163..179 'G::<u6...make()': u64
+ 189..190 'c': f64
+ 198..205 'G::make': fn make<G<f64>, f64>() -> f64
+ 198..207 'G::make()': f64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_2() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make<U>() -> (T, U);
+ }
+ struct S;
+ impl Trait<u32> for S {}
+ struct G<T>;
+ impl<T> Trait<T> for G<T> {}
+ fn test() {
+ let a = S::make::<i64>();
+ let b: (_, i64) = S::make();
+ let c = G::<u32>::make::<i64>();
+ let d: (u32, _) = G::make::<i64>();
+ let e: (u32, i64) = G::make();
+ }
+ "#,
+ expect![[r#"
+ 134..312 '{ ...e(); }': ()
+ 144..145 'a': (u32, i64)
+ 148..162 'S::make::<i64>': fn make<S, u32, i64>() -> (u32, i64)
+ 148..164 'S::mak...i64>()': (u32, i64)
+ 174..175 'b': (u32, i64)
+ 188..195 'S::make': fn make<S, u32, i64>() -> (u32, i64)
+ 188..197 'S::make()': (u32, i64)
+ 207..208 'c': (u32, i64)
+ 211..232 'G::<u3...:<i64>': fn make<G<u32>, u32, i64>() -> (u32, i64)
+ 211..234 'G::<u3...i64>()': (u32, i64)
+ 244..245 'd': (u32, i64)
+ 258..272 'G::make::<i64>': fn make<G<u32>, u32, i64>() -> (u32, i64)
+ 258..274 'G::mak...i64>()': (u32, i64)
+ 284..285 'e': (u32, i64)
+ 300..307 'G::make': fn make<G<u32>, u32, i64>() -> (u32, i64)
+ 300..309 'G::make()': (u32, i64)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_3() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make() -> (Self, T);
+ }
+ struct S<T>;
+ impl Trait<i64> for S<i32> {}
+ fn test() {
+ let a = S::make();
+ }
+ "#,
+ expect![[r#"
+ 100..126 '{ ...e(); }': ()
+ 110..111 'a': (S<i32>, i64)
+ 114..121 'S::make': fn make<S<i32>, i64>() -> (S<i32>, i64)
+ 114..123 'S::make()': (S<i32>, i64)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_4() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make() -> (Self, T);
+ }
+ struct S<T>;
+ impl Trait<i64> for S<u64> {}
+ impl Trait<i32> for S<u32> {}
+ fn test() {
+ let a: (S<u64>, _) = S::make();
+ let b: (_, i32) = S::make();
+ }
+ "#,
+ expect![[r#"
+ 130..202 '{ ...e(); }': ()
+ 140..141 'a': (S<u64>, i64)
+ 157..164 'S::make': fn make<S<u64>, i64>() -> (S<u64>, i64)
+ 157..166 'S::make()': (S<u64>, i64)
+ 176..177 'b': (S<u32>, i32)
+ 190..197 'S::make': fn make<S<u32>, i32>() -> (S<u32>, i32)
+ 190..199 'S::make()': (S<u32>, i32)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_5() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make<U>() -> (Self, T, U);
+ }
+ struct S<T>;
+ impl Trait<i64> for S<u64> {}
+ fn test() {
+ let a = <S as Trait<i64>>::make::<u8>();
+ let b: (S<u64>, _, _) = Trait::<i64>::make::<u8>();
+ }
+ "#,
+ expect![[r#"
+ 106..210 '{ ...>(); }': ()
+ 116..117 'a': (S<u64>, i64, u8)
+ 120..149 '<S as ...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8)
+ 120..151 '<S as ...<u8>()': (S<u64>, i64, u8)
+ 161..162 'b': (S<u64>, i64, u8)
+ 181..205 'Trait:...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8)
+ 181..207 'Trait:...<u8>()': (S<u64>, i64, u8)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_call_trait_method_on_generic_param_1() {
+ check_infer(
+ r#"
+ trait Trait {
+ fn method(&self) -> u32;
+ }
+ fn test<T: Trait>(t: T) {
+ t.method();
+ }
+ "#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 63..64 't': T
+ 69..88 '{ ...d(); }': ()
+ 75..76 't': T
+ 75..85 't.method()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_call_trait_method_on_generic_param_2() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn method(&self) -> T;
+ }
+ fn test<U, T: Trait<U>>(t: T) {
+ t.method();
+ }
+ "#,
+ expect![[r#"
+ 32..36 'self': &Self
+ 70..71 't': T
+ 76..95 '{ ...d(); }': ()
+ 82..83 't': T
+ 82..92 't.method()': U
+ "#]],
+ );
+}
+
+#[test]
+fn infer_with_multiple_trait_impls() {
+ check_infer(
+ r#"
+ trait Into<T> {
+ fn into(self) -> T;
+ }
+ struct S;
+ impl Into<u32> for S {}
+ impl Into<u64> for S {}
+ fn test() {
+ let x: u32 = S.into();
+ let y: u64 = S.into();
+ let z = Into::<u64>::into(S);
+ }
+ "#,
+ expect![[r#"
+ 28..32 'self': Self
+ 110..201 '{ ...(S); }': ()
+ 120..121 'x': u32
+ 129..130 'S': S
+ 129..137 'S.into()': u32
+ 147..148 'y': u64
+ 156..157 'S': S
+ 156..164 'S.into()': u64
+ 174..175 'z': u64
+ 178..195 'Into::...::into': fn into<S, u64>(S) -> u64
+ 178..198 'Into::...nto(S)': u64
+ 196..197 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn method_resolution_unify_impl_self_type() {
+ check_types(
+ r#"
+struct S<T>;
+impl S<u32> { fn foo(&self) -> u8 { 0 } }
+impl S<i32> { fn foo(&self) -> i8 { 0 } }
+fn test() { (S::<u32>.foo(), S::<i32>.foo()); }
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ (u8, i8)
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_trait_before_autoref() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl S { fn foo(&self) -> i8 { 0 } }
+impl Trait for S { fn foo(self) -> u128 { 0 } }
+fn test() { S.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_by_value_before_autoref() {
+ check_types(
+ r#"
+trait Clone { fn clone(&self) -> Self; }
+struct S;
+impl Clone for S {}
+impl Clone for &S {}
+fn test() { (S.clone(), (&S).clone(), (&&S).clone()); }
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ (S, S, &S)
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_trait_before_autoderef() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl S { fn foo(self) -> i8 { 0 } }
+impl Trait for &S { fn foo(self) -> u128 { 0 } }
+fn test() { (&S).foo(); }
+ //^^^^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_impl_before_trait() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl S { fn foo(self) -> i8 { 0 } }
+impl Trait for S { fn foo(self) -> u128 { 0 } }
+fn test() { S.foo(); }
+ //^^^^^^^ i8
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_impl_ref_before_trait() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl S { fn foo(&self) -> i8 { 0 } }
+impl Trait for &S { fn foo(self) -> u128 { 0 } }
+fn test() { S.foo(); }
+ //^^^^^^^ i8
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_trait_autoderef() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Trait for S { fn foo(self) -> u128 { 0 } }
+fn test() { (&S).foo(); }
+ //^^^^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_unsize_array() {
+ check_types(
+ r#"
+//- minicore: slice
+fn test() {
+ let a = [1, 2, 3];
+ a.len();
+} //^^^^^^^ usize
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_trait_from_prelude() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+struct S;
+impl Clone for S {}
+
+fn test() {
+ S.clone();
+ //^^^^^^^^^ S
+}
+
+//- /lib.rs crate:core
+pub mod prelude {
+ pub mod rust_2018 {
+ pub trait Clone {
+ fn clone(&self) -> Self;
+ }
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_for_unknown_trait() {
+ // The blanket impl currently applies because we ignore the unresolved where clause
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl<T> Trait for T where T: UnknownTrait {}
+fn test() { (&S).foo(); }
+ //^^^^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_not_met() {
+ // The blanket impl shouldn't apply because we can't prove S: Clone
+ // This is also to make sure that we don't resolve to the foo method just
+ // because that's the only method named foo we can find, which would make
+ // the below tests not work
+ check_types(
+ r#"
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl<T> Trait for T where T: Clone {}
+fn test() { (&S).foo(); }
+ //^^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_inline_not_met() {
+ // The blanket impl shouldn't apply because we can't prove S: Clone
+ check_types(
+ r#"
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl<T: Clone> Trait for T {}
+fn test() { (&S).foo(); }
+ //^^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_1() {
+ check_types(
+ r#"
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Clone for S {}
+impl<T> Trait for T where T: Clone {}
+fn test() { S.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_2() {
+ check_types(
+ r#"
+trait Into<T> { fn into(self) -> T; }
+trait From<T> { fn from(other: T) -> Self; }
+struct S1;
+struct S2;
+impl From<S2> for S1 {}
+impl<T, U> Into<U> for T where U: From<T> {}
+fn test() { S2.into(); }
+ //^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_inline() {
+ check_types(
+ r#"
+trait Into<T> { fn into(self) -> T; }
+trait From<T> { fn from(other: T) -> Self; }
+struct S1;
+struct S2;
+impl From<S2> for S1 {}
+impl<T, U: From<T>> Into<U> for T {}
+fn test() { S2.into(); }
+ //^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_overloaded_method() {
+ check_types(
+ r#"
+struct Wrapper<T>(T);
+struct Foo<T>(T);
+struct Bar<T>(T);
+
+impl<T> Wrapper<Foo<T>> {
+ pub fn new(foo_: T) -> Self {
+ Wrapper(Foo(foo_))
+ }
+}
+
+impl<T> Wrapper<Bar<T>> {
+ pub fn new(bar_: T) -> Self {
+ Wrapper(Bar(bar_))
+ }
+}
+
+fn main() {
+ let a = Wrapper::<Foo<f32>>::new(1.0);
+ let b = Wrapper::<Bar<f32>>::new(1.0);
+ (a, b);
+ //^^^^^^ (Wrapper<Foo<f32>>, Wrapper<Bar<f32>>)
+}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_overloaded_const() {
+ cov_mark::check!(const_candidate_self_type_mismatch);
+ check_types(
+ r#"
+struct Wrapper<T>(T);
+struct Foo<T>(T);
+struct Bar<T>(T);
+
+impl<T> Wrapper<Foo<T>> {
+ pub const VALUE: Foo<T>;
+}
+
+impl<T> Wrapper<Bar<T>> {
+ pub const VALUE: Bar<T>;
+}
+
+fn main() {
+ let a = Wrapper::<Foo<f32>>::VALUE;
+ let b = Wrapper::<Bar<f32>>::VALUE;
+ (a, b);
+ //^^^^^^ (Foo<f32>, Bar<f32>)
+}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_encountering_fn_type() {
+ check_types(
+ r#"
+//- /main.rs
+fn foo() {}
+trait FnOnce { fn call(self); }
+fn test() { foo.call(); }
+ //^^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn super_trait_impl_return_trait_method_resolution() {
+ check_infer(
+ r#"
+ //- minicore: sized
+ trait Base {
+ fn foo(self) -> usize;
+ }
+
+ trait Super : Base {}
+
+ fn base1() -> impl Base { loop {} }
+ fn super1() -> impl Super { loop {} }
+
+ fn test(base2: impl Base, super2: impl Super) {
+ base1().foo();
+ super1().foo();
+ base2.foo();
+ super2.foo();
+ }
+ "#,
+ expect![[r#"
+ 24..28 'self': Self
+ 90..101 '{ loop {} }': !
+ 92..99 'loop {}': !
+ 97..99 '{}': ()
+ 128..139 '{ loop {} }': !
+ 130..137 'loop {}': !
+ 135..137 '{}': ()
+ 149..154 'base2': impl Base
+ 167..173 'super2': impl Super
+ 187..264 '{ ...o(); }': ()
+ 193..198 'base1': fn base1() -> impl Base
+ 193..200 'base1()': impl Base
+ 193..206 'base1().foo()': usize
+ 212..218 'super1': fn super1() -> impl Super
+ 212..220 'super1()': impl Super
+ 212..226 'super1().foo()': usize
+ 232..237 'base2': impl Base
+ 232..243 'base2.foo()': usize
+ 249..255 'super2': impl Super
+ 249..261 'super2.foo()': usize
+ "#]],
+ );
+}
+
+#[test]
+fn method_resolution_non_parameter_type() {
+ check_types(
+ r#"
+mod a {
+ pub trait Foo {
+ fn foo(&self);
+ }
+}
+
+struct Wrapper<T>(T);
+fn foo<T>(t: Wrapper<T>)
+where
+ Wrapper<T>: a::Foo,
+{
+ t.foo();
+} //^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_3373() {
+ check_types(
+ r#"
+struct A<T>(T);
+
+impl A<i32> {
+ fn from(v: i32) -> A<i32> { A(v) }
+}
+
+fn main() {
+ A::from(3);
+} //^^^^^^^^^^ A<i32>
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_slow() {
+ // this can get quite slow if we set the solver size limit too high
+ check_types(
+ r#"
+trait SendX {}
+
+struct S1; impl SendX for S1 {}
+struct S2; impl SendX for S2 {}
+struct U1;
+
+trait Trait { fn method(self); }
+
+struct X1<A, B> {}
+impl<A, B> SendX for X1<A, B> where A: SendX, B: SendX {}
+
+struct S<B, C> {}
+
+trait FnX {}
+
+impl<B, C> Trait for S<B, C> where C: FnX, B: SendX {}
+
+fn test() { (S {}).method(); }
+ //^^^^^^^^^^^^^^^ ()
+"#,
+ );
+}
+
+#[test]
+fn dyn_trait_super_trait_not_in_scope() {
+ check_infer(
+ r#"
+ mod m {
+ pub trait SuperTrait {
+ fn foo(&self) -> u32 { 0 }
+ }
+ }
+ trait Trait: m::SuperTrait {}
+
+ struct S;
+ impl m::SuperTrait for S {}
+ impl Trait for S {}
+
+ fn test(d: &dyn Trait) {
+ d.foo();
+ }
+ "#,
+ expect![[r#"
+ 51..55 'self': &Self
+ 64..69 '{ 0 }': u32
+ 66..67 '0': u32
+ 176..177 'd': &dyn Trait
+ 191..207 '{ ...o(); }': ()
+ 197..198 'd': &dyn Trait
+ 197..204 'd.foo()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn method_resolution_foreign_opaque_type() {
+ check_infer(
+ r#"
+extern "C" {
+ type S;
+ fn f() -> &'static S;
+}
+
+impl S {
+ fn foo(&self) -> bool {
+ true
+ }
+}
+
+fn test() {
+ let s = unsafe { f() };
+ s.foo();
+}
+"#,
+ expect![[r#"
+ 75..79 'self': &S
+ 89..109 '{ ... }': bool
+ 99..103 'true': bool
+ 123..167 '{ ...o(); }': ()
+ 133..134 's': &S
+ 137..151 'unsafe { f() }': &S
+ 137..151 'unsafe { f() }': &S
+ 146..147 'f': fn f() -> &S
+ 146..149 'f()': &S
+ 157..158 's': &S
+ 157..164 's.foo()': bool
+ "#]],
+ );
+}
+
+#[test]
+fn method_with_allocator_box_self_type() {
+ check_types(
+ r#"
+struct Slice<T> {}
+struct Box<T, A> {}
+
+impl<T> Slice<T> {
+ pub fn into_vec<A>(self: Box<Self, A>) { }
+}
+
+fn main() {
+ let foo: Slice<u32>;
+ foo.into_vec(); // we shouldn't crash on this at least
+} //^^^^^^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_on_dyn_impl() {
+ check_types(
+ r#"
+trait Foo {}
+
+impl Foo for u32 {}
+impl dyn Foo + '_ {
+ pub fn dyn_foo(&self) -> u32 {
+ 0
+ }
+}
+
+fn main() {
+ let f = &42u32 as &dyn Foo;
+ f.dyn_foo();
+ // ^^^^^^^^^^^ u32
+}
+"#,
+ );
+}
+
+#[test]
+fn autoderef_visibility_field() {
+ check(
+ r#"
+//- minicore: deref
+mod a {
+ pub struct Foo(pub char);
+ pub struct Bar(i32);
+ impl Bar {
+ pub fn new() -> Self {
+ Self(0)
+ }
+ }
+ impl core::ops::Deref for Bar {
+ type Target = Foo;
+ fn deref(&self) -> &Foo {
+ &Foo('z')
+ }
+ }
+}
+mod b {
+ fn foo() {
+ let x = super::a::Bar::new().0;
+ // ^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Not)))
+ // ^^^^^^^^^^^^^^^^^^^^^^ type: char
+ }
+}
+"#,
+ )
+}
+
+#[test]
+fn autoderef_visibility_method() {
+ cov_mark::check!(autoderef_candidate_not_visible);
+ check(
+ r#"
+//- minicore: deref
+mod a {
+ pub struct Foo(pub char);
+ impl Foo {
+ pub fn mango(&self) -> char {
+ self.0
+ }
+ }
+ pub struct Bar(i32);
+ impl Bar {
+ pub fn new() -> Self {
+ Self(0)
+ }
+ fn mango(&self) -> i32 {
+ self.0
+ }
+ }
+ impl core::ops::Deref for Bar {
+ type Target = Foo;
+ fn deref(&self) -> &Foo {
+ &Foo('z')
+ }
+ }
+}
+mod b {
+ fn foo() {
+ let x = super::a::Bar::new().mango();
+ // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type: char
+ }
+}
+"#,
+ )
+}
+
+#[test]
+fn trait_vs_private_inherent_const() {
+ cov_mark::check!(const_candidate_not_visible);
+ check(
+ r#"
+mod a {
+ pub struct Foo;
+ impl Foo {
+ const VALUE: u32 = 2;
+ }
+ pub trait Trait {
+ const VALUE: usize;
+ }
+ impl Trait for Foo {
+ const VALUE: usize = 3;
+ }
+
+ fn foo() {
+ let x = Foo::VALUE;
+ // ^^^^^^^^^^ type: u32
+ }
+}
+use a::Trait;
+fn foo() {
+ let x = a::Foo::VALUE;
+ // ^^^^^^^^^^^^^ type: usize
+}
+"#,
+ )
+}
+
+#[test]
+fn trait_impl_in_unnamed_const() {
+ check_types(
+ r#"
+struct S;
+
+trait Tr {
+ fn method(&self) -> u16;
+}
+
+const _: () = {
+ impl Tr for S {}
+};
+
+fn f() {
+ S.method();
+ //^^^^^^^^^^ u16
+}
+ "#,
+ );
+}
+
+#[test]
+fn trait_impl_in_synstructure_const() {
+ check_types(
+ r#"
+struct S;
+
+trait Tr {
+ fn method(&self) -> u16;
+}
+
+const _DERIVE_Tr_: () = {
+ impl Tr for S {}
+};
+
+fn f() {
+ S.method();
+ //^^^^^^^^^^ u16
+}
+ "#,
+ );
+}
+
+#[test]
+fn inherent_impl_in_unnamed_const() {
+ check_types(
+ r#"
+struct S;
+
+const _: () = {
+ impl S {
+ fn method(&self) -> u16 { 0 }
+
+ pub(super) fn super_method(&self) -> u16 { 0 }
+
+ pub(crate) fn crate_method(&self) -> u16 { 0 }
+
+ pub fn pub_method(&self) -> u16 { 0 }
+ }
+};
+
+fn f() {
+ S.method();
+ //^^^^^^^^^^ u16
+
+ S.super_method();
+ //^^^^^^^^^^^^^^^^ u16
+
+ S.crate_method();
+ //^^^^^^^^^^^^^^^^ u16
+
+ S.pub_method();
+ //^^^^^^^^^^^^^^ u16
+}
+ "#,
+ );
+}
+
+#[test]
+fn resolve_const_generic_array_methods() {
+ check_types(
+ r#"
+#[lang = "array"]
+impl<T, const N: usize> [T; N] {
+ pub fn map<F, U>(self, f: F) -> [U; N]
+ where
+ F: FnMut(T) -> U,
+ { loop {} }
+}
+
+#[lang = "slice"]
+impl<T> [T] {
+ pub fn map<F, U>(self, f: F) -> &[U]
+ where
+ F: FnMut(T) -> U,
+ { loop {} }
+}
+
+fn f() {
+ let v = [1, 2].map::<_, usize>(|x| -> x * 2);
+ v;
+ //^ [usize; 2]
+}
+ "#,
+ );
+}
+
+#[test]
+fn resolve_const_generic_method() {
+ check_types(
+ r#"
+struct Const<const N: usize>;
+
+#[lang = "array"]
+impl<T, const N: usize> [T; N] {
+ pub fn my_map<F, U, const X: usize>(self, f: F, c: Const<X>) -> [U; X]
+ where
+ F: FnMut(T) -> U,
+ { loop {} }
+}
+
+#[lang = "slice"]
+impl<T> [T] {
+ pub fn my_map<F, const X: usize, U>(self, f: F, c: Const<X>) -> &[U]
+ where
+ F: FnMut(T) -> U,
+ { loop {} }
+}
+
+fn f<const C: usize, P>() {
+ let v = [1, 2].my_map::<_, (), 12>(|x| -> x * 2, Const::<12>);
+ v;
+ //^ [(); 12]
+ let v = [1, 2].my_map::<_, P, C>(|x| -> x * 2, Const::<C>);
+ v;
+ //^ [P; C]
+}
+ "#,
+ );
+}
+
+#[test]
+fn const_generic_type_alias() {
+ check_types(
+ r#"
+struct Const<const N: usize>;
+type U2 = Const<2>;
+type U5 = Const<5>;
+
+impl U2 {
+ fn f(self) -> Const<12> {
+ loop {}
+ }
+}
+
+impl U5 {
+ fn f(self) -> Const<15> {
+ loop {}
+ }
+}
+
+fn f(x: U2) {
+ let y = x.f();
+ //^ Const<12>
+}
+ "#,
+ );
+}
+
+#[test]
+fn skip_array_during_method_dispatch() {
+ check_types(
+ r#"
+//- /main2018.rs crate:main2018 deps:core
+use core::IntoIterator;
+
+fn f() {
+ let v = [4].into_iter();
+ v;
+ //^ &i32
+
+ let a = [0, 1].into_iter();
+ a;
+ //^ &i32
+}
+
+//- /main2021.rs crate:main2021 deps:core edition:2021
+use core::IntoIterator;
+
+fn f() {
+ let v = [4].into_iter();
+ v;
+ //^ i32
+
+ let a = [0, 1].into_iter();
+ a;
+ //^ &i32
+}
+
+//- /core.rs crate:core
+#[rustc_skip_array_during_method_dispatch]
+pub trait IntoIterator {
+ type Out;
+ fn into_iter(self) -> Self::Out;
+}
+
+impl<T> IntoIterator for [T; 1] {
+ type Out = T;
+ fn into_iter(self) -> Self::Out { loop {} }
+}
+impl<'a, T> IntoIterator for &'a [T] {
+ type Out = &'a T;
+ fn into_iter(self) -> Self::Out { loop {} }
+}
+ "#,
+ );
+}
+
+#[test]
+fn sized_blanket_impl() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Foo { fn foo() -> u8; }
+impl<T: Sized> Foo for T {}
+fn f<S: Sized, T, U: ?Sized>() {
+ u32::foo;
+ S::foo;
+ T::foo;
+ U::foo;
+ <[u32]>::foo;
+}
+"#,
+ expect![[r#"
+ 89..160 '{ ...foo; }': ()
+ 95..103 'u32::foo': fn foo<u32>() -> u8
+ 109..115 'S::foo': fn foo<S>() -> u8
+ 121..127 'T::foo': fn foo<T>() -> u8
+ 133..139 'U::foo': {unknown}
+ 145..157 '<[u32]>::foo': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn local_impl() {
+ check_types(
+ r#"
+fn main() {
+ struct SomeStruct(i32);
+
+ impl SomeStruct {
+ fn is_even(&self) -> bool {
+ self.0 % 2 == 0
+ }
+ }
+
+ let o = SomeStruct(3);
+ let is_even = o.is_even();
+ // ^^^^^^^ bool
+}
+ "#,
+ );
+}
+
+#[test]
+fn deref_fun_1() {
+ check_types(
+ r#"
+//- minicore: deref
+
+struct A<T, U>(T, U);
+struct B<T>(T);
+struct C<T>(T);
+
+impl<T> core::ops::Deref for A<B<T>, u32> {
+ type Target = B<T>;
+ fn deref(&self) -> &B<T> { &self.0 }
+}
+impl core::ops::Deref for B<isize> {
+ type Target = C<isize>;
+ fn deref(&self) -> &C<isize> { loop {} }
+}
+
+impl<T: Copy> C<T> {
+ fn thing(&self) -> T { self.0 }
+}
+
+fn make<T>() -> T { loop {} }
+
+fn test() {
+ let a1 = A(make(), make());
+ let _: usize = (*a1).0;
+ a1;
+ //^^ A<B<usize>, u32>
+
+ let a2 = A(make(), make());
+ a2.thing();
+ //^^^^^^^^^^ isize
+ a2;
+ //^^ A<B<isize>, u32>
+}
+"#,
+ );
+}
+
+#[test]
+fn deref_fun_2() {
+ check_types(
+ r#"
+//- minicore: deref
+
+struct A<T, U>(T, U);
+struct B<T>(T);
+struct C<T>(T);
+
+impl<T> core::ops::Deref for A<B<T>, u32> {
+ type Target = B<T>;
+ fn deref(&self) -> &B<T> { &self.0 }
+}
+impl core::ops::Deref for B<isize> {
+ type Target = C<isize>;
+ fn deref(&self) -> &C<isize> { loop {} }
+}
+
+impl<T> core::ops::Deref for A<C<T>, i32> {
+ type Target = C<T>;
+ fn deref(&self) -> &C<T> { &self.0 }
+}
+
+impl<T: Copy> C<T> {
+ fn thing(&self) -> T { self.0 }
+}
+
+fn make<T>() -> T { loop {} }
+
+fn test() {
+ let a1 = A(make(), 1u32);
+ a1.thing();
+ a1;
+ //^^ A<B<isize>, u32>
+
+ let a2 = A(make(), 1i32);
+ let _: &str = a2.thing();
+ a2;
+ //^^ A<C<&str>, i32>
+}
+"#,
+ );
+}
+
+#[test]
+fn receiver_adjustment_autoref() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+fn test() {
+ Foo.foo();
+ //^^^ adjustments: Borrow(Ref(Not))
+ (&Foo).foo();
+ // ^^^^ adjustments: ,
+}
+"#,
+ );
+}
+
+#[test]
+fn receiver_adjustment_unsize_array() {
+ // FIXME not quite correct
+ check(
+ r#"
+//- minicore: slice
+fn test() {
+ let a = [1, 2, 3];
+ a.len();
+} //^ adjustments: Pointer(Unsize), Borrow(Ref(Not))
+"#,
+ );
+}
+
+#[test]
+fn bad_inferred_reference_1() {
+ check_no_mismatches(
+ r#"
+//- minicore: sized
+pub trait Into<T>: Sized {
+ fn into(self) -> T;
+}
+impl<T> Into<T> for T {
+ fn into(self) -> T { self }
+}
+
+trait ExactSizeIterator {
+ fn len(&self) -> usize;
+}
+
+pub struct Foo;
+impl Foo {
+ fn len(&self) -> usize { 0 }
+}
+
+pub fn test(generic_args: impl Into<Foo>) {
+ let generic_args = generic_args.into();
+ generic_args.len();
+ let _: Foo = generic_args;
+}
+"#,
+ );
+}
+
+#[test]
+fn bad_inferred_reference_2() {
+ check_no_mismatches(
+ r#"
+//- minicore: deref
+trait ExactSizeIterator {
+ fn len(&self) -> usize;
+}
+
+pub struct Foo;
+impl Foo {
+ fn len(&self) -> usize { 0 }
+}
+
+pub fn test() {
+ let generic_args;
+ generic_args.len();
+ let _: Foo = generic_args;
+}
+"#,
+ );
+}
+
+#[test]
+fn resolve_minicore_iterator() {
+ check_types(
+ r#"
+//- minicore: iterators, sized
+fn foo() {
+ let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Option<i32>
+"#,
+ );
+}
+
+#[test]
+fn primitive_assoc_fn_shadowed_by_use() {
+ check_types(
+ r#"
+//- /lib.rs crate:lib deps:core
+use core::u16;
+
+fn f() -> u16 {
+ let x = u16::from_le_bytes();
+ x
+ //^ u16
+}
+
+//- /core.rs crate:core
+pub mod u16 {}
+
+impl u16 {
+ pub fn from_le_bytes() -> Self { 0 }
+}
+ "#,
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs
new file mode 100644
index 000000000..fbdc8209f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs
@@ -0,0 +1,485 @@
+use expect_test::expect;
+
+use super::{check_infer_with_mismatches, check_no_mismatches, check_types};
+
+#[test]
+fn infer_never1() {
+ check_types(
+ r#"
+fn test() {
+ let t = return;
+ t;
+} //^ !
+"#,
+ );
+}
+
+#[test]
+fn infer_never2() {
+ check_types(
+ r#"
+fn gen<T>() -> T { loop {} }
+
+fn test() {
+ let a = gen();
+ if false { a } else { loop {} };
+ a;
+} //^ !
+"#,
+ );
+}
+
+#[test]
+fn infer_never3() {
+ check_types(
+ r#"
+fn gen<T>() -> T { loop {} }
+
+fn test() {
+ let a = gen();
+ if false { loop {} } else { a };
+ a;
+ //^ !
+}
+"#,
+ );
+}
+
+#[test]
+fn never_type_in_generic_args() {
+ check_types(
+ r#"
+enum Option<T> { None, Some(T) }
+
+fn test() {
+ let a = if true { Option::None } else { Option::Some(return) };
+ a;
+} //^ Option<!>
+"#,
+ );
+}
+
+#[test]
+fn never_type_can_be_reinferred1() {
+ check_types(
+ r#"
+fn gen<T>() -> T { loop {} }
+
+fn test() {
+ let a = gen();
+ if false { loop {} } else { a };
+ a;
+ //^ ()
+ if false { a };
+}
+"#,
+ );
+}
+
+#[test]
+fn never_type_can_be_reinferred2() {
+ check_types(
+ r#"
+enum Option<T> { None, Some(T) }
+
+fn test() {
+ let a = if true { Option::None } else { Option::Some(return) };
+ a;
+ //^ Option<i32>
+ match 42 {
+ 42 => a,
+ _ => Option::Some(42),
+ };
+}
+"#,
+ );
+}
+
+#[test]
+fn never_type_can_be_reinferred3() {
+ check_types(
+ r#"
+enum Option<T> { None, Some(T) }
+
+fn test() {
+ let a = if true { Option::None } else { Option::Some(return) };
+ a;
+ //^ Option<&str>
+ match 42 {
+ 42 => a,
+ _ => Option::Some("str"),
+ };
+}
+"#,
+ );
+}
+
+#[test]
+fn match_no_arm() {
+ check_types(
+ r#"
+enum Void {}
+
+fn test(a: Void) {
+ let t = match a {};
+ t;
+} //^ !
+"#,
+ );
+}
+
+#[test]
+fn match_unknown_arm() {
+ check_types(
+ r#"
+fn test(a: Option) {
+ let t = match 0 {
+ _ => unknown,
+ };
+ t;
+} //^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn if_never() {
+ check_types(
+ r#"
+fn test() {
+ let i = if true {
+ loop {}
+ } else {
+ 3.0
+ };
+ i;
+} //^ f64
+"#,
+ );
+}
+
+#[test]
+fn if_else_never() {
+ check_types(
+ r#"
+fn test(input: bool) {
+ let i = if input {
+ 2.0
+ } else {
+ return
+ };
+ i;
+} //^ f64
+"#,
+ );
+}
+
+#[test]
+fn match_first_arm_never() {
+ check_types(
+ r#"
+fn test(a: i32) {
+ let i = match a {
+ 1 => return,
+ 2 => 2.0,
+ 3 => loop {},
+ _ => 3.0,
+ };
+ i;
+} //^ f64
+"#,
+ );
+}
+
+#[test]
+fn match_second_arm_never() {
+ check_types(
+ r#"
+fn test(a: i32) {
+ let i = match a {
+ 1 => 3.0,
+ 2 => loop {},
+ 3 => 3.0,
+ _ => return,
+ };
+ i;
+} //^ f64
+"#,
+ );
+}
+
+#[test]
+fn match_all_arms_never() {
+ check_types(
+ r#"
+fn test(a: i32) {
+ let i = match a {
+ 2 => return,
+ _ => loop {},
+ };
+ i;
+} //^ !
+"#,
+ );
+}
+
+#[test]
+fn match_no_never_arms() {
+ check_types(
+ r#"
+fn test(a: i32) {
+ let i = match a {
+ 2 => 2.0,
+ _ => 3.0,
+ };
+ i;
+} //^ f64
+"#,
+ );
+}
+
+#[test]
+fn diverging_expression_1() {
+ check_infer_with_mismatches(
+ r"
+ //- /main.rs
+ fn test1() {
+ let x: u32 = return;
+ }
+ fn test2() {
+ let x: u32 = { return; };
+ }
+ fn test3() {
+ let x: u32 = loop {};
+ }
+ fn test4() {
+ let x: u32 = { loop {} };
+ }
+ fn test5() {
+ let x: u32 = { if true { loop {}; } else { loop {}; } };
+ }
+ fn test6() {
+ let x: u32 = { let y: u32 = { loop {}; }; };
+ }
+ ",
+ expect![[r"
+ 11..39 '{ ...urn; }': ()
+ 21..22 'x': u32
+ 30..36 'return': !
+ 51..84 '{ ...; }; }': ()
+ 61..62 'x': u32
+ 70..81 '{ return; }': u32
+ 72..78 'return': !
+ 96..125 '{ ... {}; }': ()
+ 106..107 'x': u32
+ 115..122 'loop {}': !
+ 120..122 '{}': ()
+ 137..170 '{ ...} }; }': ()
+ 147..148 'x': u32
+ 156..167 '{ loop {} }': u32
+ 158..165 'loop {}': !
+ 163..165 '{}': ()
+ 182..246 '{ ...} }; }': ()
+ 192..193 'x': u32
+ 201..243 '{ if t...}; } }': u32
+ 203..241 'if tru... {}; }': u32
+ 206..210 'true': bool
+ 211..223 '{ loop {}; }': u32
+ 213..220 'loop {}': !
+ 218..220 '{}': ()
+ 229..241 '{ loop {}; }': u32
+ 231..238 'loop {}': !
+ 236..238 '{}': ()
+ 258..310 '{ ...; }; }': ()
+ 268..269 'x': u32
+ 277..307 '{ let ...; }; }': u32
+ 283..284 'y': u32
+ 292..304 '{ loop {}; }': u32
+ 294..301 'loop {}': !
+ 299..301 '{}': ()
+ "]],
+ );
+}
+
+#[test]
+fn diverging_expression_2() {
+ check_infer_with_mismatches(
+ r#"
+ //- /main.rs
+ fn test1() {
+ // should give type mismatch
+ let x: u32 = { loop {}; "foo" };
+ }
+ "#,
+ expect![[r#"
+ 11..84 '{ ..." }; }': ()
+ 54..55 'x': u32
+ 63..81 '{ loop...foo" }': u32
+ 65..72 'loop {}': !
+ 70..72 '{}': ()
+ 74..79 '"foo"': &str
+ 74..79: expected u32, got &str
+ "#]],
+ );
+}
+
+#[test]
+fn diverging_expression_3_break() {
+ check_infer_with_mismatches(
+ r"
+ //- /main.rs
+ fn test1() {
+ // should give type mismatch
+ let x: u32 = { loop { break; } };
+ }
+ fn test2() {
+ // should give type mismatch
+ let x: u32 = { for a in b { break; }; };
+ // should give type mismatch as well
+ let x: u32 = { for a in b {}; };
+ // should give type mismatch as well
+ let x: u32 = { for a in b { return; }; };
+ }
+ fn test3() {
+ // should give type mismatch
+ let x: u32 = { while true { break; }; };
+ // should give type mismatch as well -- there's an implicit break, even if it's never hit
+ let x: u32 = { while true {}; };
+ // should give type mismatch as well
+ let x: u32 = { while true { return; }; };
+ }
+ ",
+ expect![[r#"
+ 11..85 '{ ...} }; }': ()
+ 54..55 'x': u32
+ 63..82 '{ loop...k; } }': u32
+ 65..80 'loop { break; }': ()
+ 70..80 '{ break; }': ()
+ 72..77 'break': !
+ 65..80: expected u32, got ()
+ 97..343 '{ ...; }; }': ()
+ 140..141 'x': u32
+ 149..175 '{ for ...; }; }': u32
+ 151..172 'for a ...eak; }': ()
+ 155..156 'a': {unknown}
+ 160..161 'b': {unknown}
+ 162..172 '{ break; }': ()
+ 164..169 'break': !
+ 226..227 'x': u32
+ 235..253 '{ for ... {}; }': u32
+ 237..250 'for a in b {}': ()
+ 241..242 'a': {unknown}
+ 246..247 'b': {unknown}
+ 248..250 '{}': ()
+ 304..305 'x': u32
+ 313..340 '{ for ...; }; }': u32
+ 315..337 'for a ...urn; }': ()
+ 319..320 'a': {unknown}
+ 324..325 'b': {unknown}
+ 326..337 '{ return; }': ()
+ 328..334 'return': !
+ 149..175: expected u32, got ()
+ 235..253: expected u32, got ()
+ 313..340: expected u32, got ()
+ 355..654 '{ ...; }; }': ()
+ 398..399 'x': u32
+ 407..433 '{ whil...; }; }': u32
+ 409..430 'while ...eak; }': ()
+ 415..419 'true': bool
+ 420..430 '{ break; }': ()
+ 422..427 'break': !
+ 537..538 'x': u32
+ 546..564 '{ whil... {}; }': u32
+ 548..561 'while true {}': ()
+ 554..558 'true': bool
+ 559..561 '{}': ()
+ 615..616 'x': u32
+ 624..651 '{ whil...; }; }': u32
+ 626..648 'while ...urn; }': ()
+ 632..636 'true': bool
+ 637..648 '{ return; }': ()
+ 639..645 'return': !
+ 407..433: expected u32, got ()
+ 546..564: expected u32, got ()
+ 624..651: expected u32, got ()
+ "#]],
+ );
+}
+
+#[test]
+fn let_else_must_diverge() {
+ check_infer_with_mismatches(
+ r#"
+ fn f() {
+ let 1 = 2 else {
+ return;
+ };
+ }
+ "#,
+ expect![[r#"
+ 7..54 '{ ... }; }': ()
+ 17..18 '1': i32
+ 17..18 '1': i32
+ 21..22 '2': i32
+ 28..51 '{ ... }': !
+ 38..44 'return': !
+ "#]],
+ );
+ check_infer_with_mismatches(
+ r#"
+ fn f() {
+ let 1 = 2 else {};
+ }
+ "#,
+ expect![[r#"
+ 7..33 '{ ... {}; }': ()
+ 17..18 '1': i32
+ 17..18 '1': i32
+ 21..22 '2': i32
+ 28..30 '{}': !
+ 28..30: expected !, got ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_11837() {
+ check_no_mismatches(
+ r#"
+//- minicore: result
+enum MyErr {
+ Err1,
+ Err2,
+}
+
+fn example_ng() {
+ let value: Result<i32, MyErr> = Ok(3);
+
+ loop {
+ let ret = match value {
+ Ok(value) => value,
+ Err(ref err) => {
+ match err {
+ MyErr::Err1 => break,
+ MyErr::Err2 => continue,
+ };
+ }
+ };
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn issue_11814() {
+ check_no_mismatches(
+ r#"
+fn example() -> bool {
+ match 1 {
+ _ => return true,
+ };
+}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs
new file mode 100644
index 000000000..399553356
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs
@@ -0,0 +1,991 @@
+use expect_test::expect;
+
+use super::{check, check_infer, check_infer_with_mismatches, check_types};
+
+#[test]
+fn infer_pattern() {
+ check_infer(
+ r#"
+ fn test(x: &i32) {
+ let y = x;
+ let &z = x;
+ let a = z;
+ let (c, d) = (1, "hello");
+
+ for (e, f) in some_iter {
+ let g = e;
+ }
+
+ if let [val] = opt {
+ let h = val;
+ }
+
+ if let x @ true = &true {}
+
+ let lambda = |a: u64, b, c: i32| { a + b; c };
+
+ let ref ref_to_x = x;
+ let mut mut_x = x;
+ let ref mut mut_ref_to_x = x;
+ let k = mut_ref_to_x;
+ }
+ "#,
+ expect![[r#"
+ 8..9 'x': &i32
+ 17..400 '{ ...o_x; }': ()
+ 27..28 'y': &i32
+ 31..32 'x': &i32
+ 42..44 '&z': &i32
+ 43..44 'z': i32
+ 47..48 'x': &i32
+ 58..59 'a': i32
+ 62..63 'z': i32
+ 73..79 '(c, d)': (i32, &str)
+ 74..75 'c': i32
+ 77..78 'd': &str
+ 82..94 '(1, "hello")': (i32, &str)
+ 83..84 '1': i32
+ 86..93 '"hello"': &str
+ 101..151 'for (e... }': ()
+ 105..111 '(e, f)': ({unknown}, {unknown})
+ 106..107 'e': {unknown}
+ 109..110 'f': {unknown}
+ 115..124 'some_iter': {unknown}
+ 125..151 '{ ... }': ()
+ 139..140 'g': {unknown}
+ 143..144 'e': {unknown}
+ 157..204 'if let... }': ()
+ 160..175 'let [val] = opt': bool
+ 164..169 '[val]': [{unknown}]
+ 165..168 'val': {unknown}
+ 172..175 'opt': [{unknown}]
+ 176..204 '{ ... }': ()
+ 190..191 'h': {unknown}
+ 194..197 'val': {unknown}
+ 210..236 'if let...rue {}': ()
+ 213..233 'let x ... &true': bool
+ 217..225 'x @ true': &bool
+ 221..225 'true': bool
+ 221..225 'true': bool
+ 228..233 '&true': &bool
+ 229..233 'true': bool
+ 234..236 '{}': ()
+ 246..252 'lambda': |u64, u64, i32| -> i32
+ 255..287 '|a: u6...b; c }': |u64, u64, i32| -> i32
+ 256..257 'a': u64
+ 264..265 'b': u64
+ 267..268 'c': i32
+ 275..287 '{ a + b; c }': i32
+ 277..278 'a': u64
+ 277..282 'a + b': u64
+ 281..282 'b': u64
+ 284..285 'c': i32
+ 298..310 'ref ref_to_x': &&i32
+ 313..314 'x': &i32
+ 324..333 'mut mut_x': &i32
+ 336..337 'x': &i32
+ 347..367 'ref mu...f_to_x': &mut &i32
+ 370..371 'x': &i32
+ 381..382 'k': &mut &i32
+ 385..397 'mut_ref_to_x': &mut &i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_literal_pattern() {
+ check_infer_with_mismatches(
+ r#"
+ fn any<T>() -> T { loop {} }
+ fn test(x: &i32) {
+ if let "foo" = any() {}
+ if let 1 = any() {}
+ if let 1u32 = any() {}
+ if let 1f32 = any() {}
+ if let 1.0 = any() {}
+ if let true = any() {}
+ }
+ "#,
+ expect![[r#"
+ 17..28 '{ loop {} }': T
+ 19..26 'loop {}': !
+ 24..26 '{}': ()
+ 37..38 'x': &i32
+ 46..208 '{ ...) {} }': ()
+ 52..75 'if let...y() {}': ()
+ 55..72 'let "f... any()': bool
+ 59..64 '"foo"': &str
+ 59..64 '"foo"': &str
+ 67..70 'any': fn any<&str>() -> &str
+ 67..72 'any()': &str
+ 73..75 '{}': ()
+ 80..99 'if let...y() {}': ()
+ 83..96 'let 1 = any()': bool
+ 87..88 '1': i32
+ 87..88 '1': i32
+ 91..94 'any': fn any<i32>() -> i32
+ 91..96 'any()': i32
+ 97..99 '{}': ()
+ 104..126 'if let...y() {}': ()
+ 107..123 'let 1u... any()': bool
+ 111..115 '1u32': u32
+ 111..115 '1u32': u32
+ 118..121 'any': fn any<u32>() -> u32
+ 118..123 'any()': u32
+ 124..126 '{}': ()
+ 131..153 'if let...y() {}': ()
+ 134..150 'let 1f... any()': bool
+ 138..142 '1f32': f32
+ 138..142 '1f32': f32
+ 145..148 'any': fn any<f32>() -> f32
+ 145..150 'any()': f32
+ 151..153 '{}': ()
+ 158..179 'if let...y() {}': ()
+ 161..176 'let 1.0 = any()': bool
+ 165..168 '1.0': f64
+ 165..168 '1.0': f64
+ 171..174 'any': fn any<f64>() -> f64
+ 171..176 'any()': f64
+ 177..179 '{}': ()
+ 184..206 'if let...y() {}': ()
+ 187..203 'let tr... any()': bool
+ 191..195 'true': bool
+ 191..195 'true': bool
+ 198..201 'any': fn any<bool>() -> bool
+ 198..203 'any()': bool
+ 204..206 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_range_pattern() {
+ check_infer_with_mismatches(
+ r#"
+ fn test(x: &i32) {
+ if let 1..76 = 2u32 {}
+ if let 1..=76 = 2u32 {}
+ }
+ "#,
+ expect![[r#"
+ 8..9 'x': &i32
+ 17..75 '{ ...2 {} }': ()
+ 23..45 'if let...u32 {}': ()
+ 26..42 'let 1....= 2u32': bool
+ 30..35 '1..76': u32
+ 38..42 '2u32': u32
+ 43..45 '{}': ()
+ 50..73 'if let...u32 {}': ()
+ 53..70 'let 1....= 2u32': bool
+ 57..63 '1..=76': u32
+ 66..70 '2u32': u32
+ 71..73 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_ergonomics() {
+ check_infer(
+ r#"
+ struct A<T>(T);
+
+ fn test() {
+ let A(n) = &A(1);
+ let A(n) = &mut A(1);
+ }
+ "#,
+ expect![[r#"
+ 27..78 '{ ...(1); }': ()
+ 37..41 'A(n)': A<i32>
+ 39..40 'n': &i32
+ 44..49 '&A(1)': &A<i32>
+ 45..46 'A': A<i32>(i32) -> A<i32>
+ 45..49 'A(1)': A<i32>
+ 47..48 '1': i32
+ 59..63 'A(n)': A<i32>
+ 61..62 'n': &mut i32
+ 66..75 '&mut A(1)': &mut A<i32>
+ 71..72 'A': A<i32>(i32) -> A<i32>
+ 71..75 'A(1)': A<i32>
+ 73..74 '1': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_ergonomics_ref() {
+ cov_mark::check!(match_ergonomics_ref);
+ check_infer(
+ r#"
+ fn test() {
+ let v = &(1, &2);
+ let (_, &w) = v;
+ }
+ "#,
+ expect![[r#"
+ 10..56 '{ ...= v; }': ()
+ 20..21 'v': &(i32, &i32)
+ 24..32 '&(1, &2)': &(i32, &i32)
+ 25..32 '(1, &2)': (i32, &i32)
+ 26..27 '1': i32
+ 29..31 '&2': &i32
+ 30..31 '2': i32
+ 42..49 '(_, &w)': (i32, &i32)
+ 43..44 '_': i32
+ 46..48 '&w': &i32
+ 47..48 'w': i32
+ 52..53 'v': &(i32, &i32)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_slice() {
+ check_infer(
+ r#"
+ fn test() {
+ let slice: &[f64] = &[0.0];
+ match slice {
+ &[] => {},
+ &[a] => {
+ a;
+ },
+ &[b, c] => {
+ b;
+ c;
+ }
+ _ => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..209 '{ ... } }': ()
+ 20..25 'slice': &[f64]
+ 36..42 '&[0.0]': &[f64; 1]
+ 37..42 '[0.0]': [f64; 1]
+ 38..41 '0.0': f64
+ 48..207 'match ... }': ()
+ 54..59 'slice': &[f64]
+ 70..73 '&[]': &[f64]
+ 71..73 '[]': [f64]
+ 77..79 '{}': ()
+ 89..93 '&[a]': &[f64]
+ 90..93 '[a]': [f64]
+ 91..92 'a': f64
+ 97..123 '{ ... }': ()
+ 111..112 'a': f64
+ 133..140 '&[b, c]': &[f64]
+ 134..140 '[b, c]': [f64]
+ 135..136 'b': f64
+ 138..139 'c': f64
+ 144..185 '{ ... }': ()
+ 158..159 'b': f64
+ 173..174 'c': f64
+ 194..195 '_': &[f64]
+ 199..201 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_string_literal() {
+ check_infer_with_mismatches(
+ r#"
+ fn test() {
+ let s: &str = "hello";
+ match s {
+ "hello" => {}
+ _ => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..98 '{ ... } }': ()
+ 20..21 's': &str
+ 30..37 '"hello"': &str
+ 43..96 'match ... }': ()
+ 49..50 's': &str
+ 61..68 '"hello"': &str
+ 61..68 '"hello"': &str
+ 72..74 '{}': ()
+ 83..84 '_': &str
+ 88..90 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_or() {
+ check_infer_with_mismatches(
+ r#"
+ fn test() {
+ let s: &str = "hello";
+ match s {
+ "hello" | "world" => {}
+ _ => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..108 '{ ... } }': ()
+ 20..21 's': &str
+ 30..37 '"hello"': &str
+ 43..106 'match ... }': ()
+ 49..50 's': &str
+ 61..68 '"hello"': &str
+ 61..68 '"hello"': &str
+ 61..78 '"hello...world"': &str
+ 71..78 '"world"': &str
+ 71..78 '"world"': &str
+ 82..84 '{}': ()
+ 93..94 '_': &str
+ 98..100 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_arr() {
+ check_infer(
+ r#"
+ fn test() {
+ let arr: [f64; 2] = [0.0, 1.0];
+ match arr {
+ [1.0, a] => {
+ a;
+ },
+ [b, c] => {
+ b;
+ c;
+ }
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..179 '{ ... } }': ()
+ 20..23 'arr': [f64; 2]
+ 36..46 '[0.0, 1.0]': [f64; 2]
+ 37..40 '0.0': f64
+ 42..45 '1.0': f64
+ 52..177 'match ... }': ()
+ 58..61 'arr': [f64; 2]
+ 72..80 '[1.0, a]': [f64; 2]
+ 73..76 '1.0': f64
+ 73..76 '1.0': f64
+ 78..79 'a': f64
+ 84..110 '{ ... }': ()
+ 98..99 'a': f64
+ 120..126 '[b, c]': [f64; 2]
+ 121..122 'b': f64
+ 124..125 'c': f64
+ 130..171 '{ ... }': ()
+ 144..145 'b': f64
+ 159..160 'c': f64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_adt_pattern() {
+ check_infer(
+ r#"
+ enum E {
+ A { x: usize },
+ B
+ }
+
+ struct S(u32, E);
+
+ fn test() {
+ let e = E::A { x: 3 };
+
+ let S(y, z) = foo;
+ let E::A { x: new_var } = e;
+
+ match e {
+ E::A { x } => x,
+ E::B if foo => 1,
+ E::B => 10,
+ };
+
+ let ref d @ E::A { .. } = e;
+ d;
+ }
+ "#,
+ expect![[r#"
+ 67..288 '{ ... d; }': ()
+ 77..78 'e': E
+ 81..94 'E::A { x: 3 }': E
+ 91..92 '3': usize
+ 105..112 'S(y, z)': S
+ 107..108 'y': u32
+ 110..111 'z': E
+ 115..118 'foo': S
+ 128..147 'E::A {..._var }': E
+ 138..145 'new_var': usize
+ 150..151 'e': E
+ 158..244 'match ... }': usize
+ 164..165 'e': E
+ 176..186 'E::A { x }': E
+ 183..184 'x': usize
+ 190..191 'x': usize
+ 201..205 'E::B': E
+ 209..212 'foo': bool
+ 216..217 '1': usize
+ 227..231 'E::B': E
+ 235..237 '10': usize
+ 255..274 'ref d ...{ .. }': &E
+ 263..274 'E::A { .. }': E
+ 277..278 'e': E
+ 284..285 'd': &E
+ "#]],
+ );
+}
+
+#[test]
+fn enum_variant_through_self_in_pattern() {
+ check_infer(
+ r#"
+ enum E {
+ A { x: usize },
+ B(usize),
+ C
+ }
+
+ impl E {
+ fn test() {
+ match (loop {}) {
+ Self::A { x } => { x; },
+ Self::B(x) => { x; },
+ Self::C => {},
+ };
+ }
+ }
+ "#,
+ expect![[r#"
+ 75..217 '{ ... }': ()
+ 85..210 'match ... }': ()
+ 92..99 'loop {}': !
+ 97..99 '{}': ()
+ 115..128 'Self::A { x }': E
+ 125..126 'x': usize
+ 132..138 '{ x; }': ()
+ 134..135 'x': usize
+ 152..162 'Self::B(x)': E
+ 160..161 'x': usize
+ 166..172 '{ x; }': ()
+ 168..169 'x': usize
+ 186..193 'Self::C': E
+ 197..199 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_generics_in_patterns() {
+ check_infer(
+ r#"
+ struct A<T> {
+ x: T,
+ }
+
+ enum Option<T> {
+ Some(T),
+ None,
+ }
+
+ fn test(a1: A<u32>, o: Option<u64>) {
+ let A { x: x2 } = a1;
+ let A::<i64> { x: x3 } = A { x: 1 };
+ match o {
+ Option::Some(t) => t,
+ _ => 1,
+ };
+ }
+ "#,
+ expect![[r#"
+ 78..80 'a1': A<u32>
+ 90..91 'o': Option<u64>
+ 106..243 '{ ... }; }': ()
+ 116..127 'A { x: x2 }': A<u32>
+ 123..125 'x2': u32
+ 130..132 'a1': A<u32>
+ 142..160 'A::<i6...: x3 }': A<i64>
+ 156..158 'x3': i64
+ 163..173 'A { x: 1 }': A<i64>
+ 170..171 '1': i64
+ 179..240 'match ... }': u64
+ 185..186 'o': Option<u64>
+ 197..212 'Option::Some(t)': Option<u64>
+ 210..211 't': u64
+ 216..217 't': u64
+ 227..228 '_': Option<u64>
+ 232..233 '1': u64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_const_pattern() {
+ check(
+ r#"
+enum Option<T> { None }
+use Option::None;
+struct Foo;
+const Bar: usize = 1;
+
+fn test() {
+ let a: Option<u32> = None;
+ let b: Option<i64> = match a {
+ None => None,
+ };
+ let _: () = match () { Foo => () };
+ // ^^^ expected (), got Foo
+ let _: () = match () { Bar => () };
+ // ^^^ expected (), got usize
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_guard() {
+ check_infer(
+ r#"
+struct S;
+impl S { fn foo(&self) -> bool { false } }
+
+fn main() {
+ match S {
+ s if s.foo() => (),
+ }
+}
+ "#,
+ expect![[r#"
+ 27..31 'self': &S
+ 41..50 '{ false }': bool
+ 43..48 'false': bool
+ 64..115 '{ ... } }': ()
+ 70..113 'match ... }': ()
+ 76..77 'S': S
+ 88..89 's': S
+ 93..94 's': S
+ 93..100 's.foo()': bool
+ 104..106 '()': ()
+ "#]],
+ )
+}
+
+#[test]
+fn match_ergonomics_in_closure_params() {
+ check_infer(
+ r#"
+//- minicore: fn
+fn foo<T, U, F: FnOnce(T) -> U>(t: T, f: F) -> U { loop {} }
+
+fn test() {
+ foo(&(1, "a"), |&(x, y)| x); // normal, no match ergonomics
+ foo(&(1, "a"), |(x, y)| x);
+}
+"#,
+ expect![[r#"
+ 32..33 't': T
+ 38..39 'f': F
+ 49..60 '{ loop {} }': U
+ 51..58 'loop {}': !
+ 56..58 '{}': ()
+ 72..171 '{ ... x); }': ()
+ 78..81 'foo': fn foo<&(i32, &str), i32, |&(i32, &str)| -> i32>(&(i32, &str), |&(i32, &str)| -> i32) -> i32
+ 78..105 'foo(&(...y)| x)': i32
+ 82..91 '&(1, "a")': &(i32, &str)
+ 83..91 '(1, "a")': (i32, &str)
+ 84..85 '1': i32
+ 87..90 '"a"': &str
+ 93..104 '|&(x, y)| x': |&(i32, &str)| -> i32
+ 94..101 '&(x, y)': &(i32, &str)
+ 95..101 '(x, y)': (i32, &str)
+ 96..97 'x': i32
+ 99..100 'y': &str
+ 103..104 'x': i32
+ 142..145 'foo': fn foo<&(i32, &str), &i32, |&(i32, &str)| -> &i32>(&(i32, &str), |&(i32, &str)| -> &i32) -> &i32
+ 142..168 'foo(&(...y)| x)': &i32
+ 146..155 '&(1, "a")': &(i32, &str)
+ 147..155 '(1, "a")': (i32, &str)
+ 148..149 '1': i32
+ 151..154 '"a"': &str
+ 157..167 '|(x, y)| x': |&(i32, &str)| -> &i32
+ 158..164 '(x, y)': (i32, &str)
+ 159..160 'x': &i32
+ 162..163 'y': &&str
+ 166..167 'x': &i32
+ "#]],
+ );
+}
+
+#[test]
+fn slice_tail_pattern() {
+ check_infer(
+ r#"
+ fn foo(params: &[i32]) {
+ match params {
+ [head, tail @ ..] => {
+ }
+ }
+ }
+ "#,
+ expect![[r#"
+ 7..13 'params': &[i32]
+ 23..92 '{ ... } }': ()
+ 29..90 'match ... }': ()
+ 35..41 'params': &[i32]
+ 52..69 '[head,... @ ..]': [i32]
+ 53..57 'head': &i32
+ 59..68 'tail @ ..': &[i32]
+ 66..68 '..': [i32]
+ 73..84 '{ }': ()
+ "#]],
+ );
+}
+
+#[test]
+fn box_pattern() {
+ check_infer(
+ r#"
+ pub struct Global;
+ #[lang = "owned_box"]
+ pub struct Box<T, A = Global>(T);
+
+ fn foo(params: Box<i32>) {
+ match params {
+ box integer => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 83..89 'params': Box<i32, Global>
+ 101..155 '{ ... } }': ()
+ 107..153 'match ... }': ()
+ 113..119 'params': Box<i32, Global>
+ 130..141 'box integer': Box<i32, Global>
+ 134..141 'integer': i32
+ 145..147 '{}': ()
+ "#]],
+ );
+ check_infer(
+ r#"
+ #[lang = "owned_box"]
+ pub struct Box<T>(T);
+
+ fn foo(params: Box<i32>) {
+ match params {
+ box integer => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 52..58 'params': Box<i32>
+ 70..124 '{ ... } }': ()
+ 76..122 'match ... }': ()
+ 82..88 'params': Box<i32>
+ 99..110 'box integer': Box<i32>
+ 103..110 'integer': i32
+ 114..116 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn tuple_ellipsis_pattern() {
+ check_infer_with_mismatches(
+ r#"
+fn foo(tuple: (u8, i16, f32)) {
+ match tuple {
+ (.., b, c) => {},
+ (a, .., c) => {},
+ (a, b, ..) => {},
+ (a, b) => {/*too short*/}
+ (a, b, c, d) => {/*too long*/}
+ _ => {}
+ }
+}"#,
+ expect![[r#"
+ 7..12 'tuple': (u8, i16, f32)
+ 30..224 '{ ... } }': ()
+ 36..222 'match ... }': ()
+ 42..47 'tuple': (u8, i16, f32)
+ 58..68 '(.., b, c)': (u8, i16, f32)
+ 63..64 'b': i16
+ 66..67 'c': f32
+ 72..74 '{}': ()
+ 84..94 '(a, .., c)': (u8, i16, f32)
+ 85..86 'a': u8
+ 92..93 'c': f32
+ 98..100 '{}': ()
+ 110..120 '(a, b, ..)': (u8, i16, f32)
+ 111..112 'a': u8
+ 114..115 'b': i16
+ 124..126 '{}': ()
+ 136..142 '(a, b)': (u8, i16)
+ 137..138 'a': u8
+ 140..141 'b': i16
+ 146..161 '{/*too short*/}': ()
+ 170..182 '(a, b, c, d)': (u8, i16, f32, {unknown})
+ 171..172 'a': u8
+ 174..175 'b': i16
+ 177..178 'c': f32
+ 180..181 'd': {unknown}
+ 186..200 '{/*too long*/}': ()
+ 209..210 '_': (u8, i16, f32)
+ 214..216 '{}': ()
+ 136..142: expected (u8, i16, f32), got (u8, i16)
+ 170..182: expected (u8, i16, f32), got (u8, i16, f32, {unknown})
+ "#]],
+ );
+}
+
+#[test]
+fn tuple_struct_ellipsis_pattern() {
+ check_infer(
+ r#"
+struct Tuple(u8, i16, f32);
+fn foo(tuple: Tuple) {
+ match tuple {
+ Tuple(.., b, c) => {},
+ Tuple(a, .., c) => {},
+ Tuple(a, b, ..) => {},
+ Tuple(a, b) => {/*too short*/}
+ Tuple(a, b, c, d) => {/*too long*/}
+ _ => {}
+ }
+}"#,
+ expect![[r#"
+ 35..40 'tuple': Tuple
+ 49..268 '{ ... } }': ()
+ 55..266 'match ... }': ()
+ 61..66 'tuple': Tuple
+ 77..92 'Tuple(.., b, c)': Tuple
+ 87..88 'b': i16
+ 90..91 'c': f32
+ 96..98 '{}': ()
+ 108..123 'Tuple(a, .., c)': Tuple
+ 114..115 'a': u8
+ 121..122 'c': f32
+ 127..129 '{}': ()
+ 139..154 'Tuple(a, b, ..)': Tuple
+ 145..146 'a': u8
+ 148..149 'b': i16
+ 158..160 '{}': ()
+ 170..181 'Tuple(a, b)': Tuple
+ 176..177 'a': u8
+ 179..180 'b': i16
+ 185..200 '{/*too short*/}': ()
+ 209..226 'Tuple(... c, d)': Tuple
+ 215..216 'a': u8
+ 218..219 'b': i16
+ 221..222 'c': f32
+ 224..225 'd': {unknown}
+ 230..244 '{/*too long*/}': ()
+ 253..254 '_': Tuple
+ 258..260 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn const_block_pattern() {
+ check_infer(
+ r#"
+struct Foo(usize);
+fn foo(foo: Foo) {
+ match foo {
+ const { Foo(15 + 32) } => {},
+ _ => {}
+ }
+}"#,
+ expect![[r#"
+ 26..29 'foo': Foo
+ 36..115 '{ ... } }': ()
+ 42..113 'match ... }': ()
+ 48..51 'foo': Foo
+ 62..84 'const ... 32) }': Foo
+ 68..84 '{ Foo(... 32) }': Foo
+ 70..73 'Foo': Foo(usize) -> Foo
+ 70..82 'Foo(15 + 32)': Foo
+ 74..76 '15': usize
+ 74..81 '15 + 32': usize
+ 79..81 '32': usize
+ 88..90 '{}': ()
+ 100..101 '_': Foo
+ 105..107 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn macro_pat() {
+ check_types(
+ r#"
+macro_rules! pat {
+ ($name:ident) => { Enum::Variant1($name) }
+}
+
+enum Enum {
+ Variant1(u8),
+ Variant2,
+}
+
+fn f(e: Enum) {
+ match e {
+ pat!(bind) => {
+ bind;
+ //^^^^ u8
+ }
+ Enum::Variant2 => {}
+ }
+}
+ "#,
+ )
+}
+
+#[test]
+fn type_mismatch_in_or_pattern() {
+ check_infer_with_mismatches(
+ r#"
+fn main() {
+ match (false,) {
+ (true | (),) => {}
+ (() | true,) => {}
+ (_ | (),) => {}
+ (() | _,) => {}
+ }
+}
+"#,
+ expect![[r#"
+ 10..142 '{ ... } }': ()
+ 16..140 'match ... }': ()
+ 22..30 '(false,)': (bool,)
+ 23..28 'false': bool
+ 41..53 '(true | (),)': (bool,)
+ 42..46 'true': bool
+ 42..46 'true': bool
+ 42..51 'true | ()': bool
+ 49..51 '()': ()
+ 57..59 '{}': ()
+ 68..80 '(() | true,)': ((),)
+ 69..71 '()': ()
+ 69..78 '() | true': ()
+ 74..78 'true': bool
+ 74..78 'true': bool
+ 84..86 '{}': ()
+ 95..104 '(_ | (),)': (bool,)
+ 96..97 '_': bool
+ 96..102 '_ | ()': bool
+ 100..102 '()': ()
+ 108..110 '{}': ()
+ 119..128 '(() | _,)': ((),)
+ 120..122 '()': ()
+ 120..126 '() | _': ()
+ 125..126 '_': bool
+ 132..134 '{}': ()
+ 49..51: expected bool, got ()
+ 68..80: expected (bool,), got ((),)
+ 69..71: expected bool, got ()
+ 69..78: expected bool, got ()
+ 100..102: expected bool, got ()
+ 119..128: expected (bool,), got ((),)
+ 120..122: expected bool, got ()
+ 120..126: expected bool, got ()
+ "#]],
+ );
+}
+
+#[test]
+fn slice_pattern_correctly_handles_array_length() {
+ check_infer(
+ r#"
+fn main() {
+ let [head, middle @ .., tail, tail2] = [1, 2, 3, 4, 5];
+}
+ "#,
+ expect![[r#"
+ 10..73 '{ ... 5]; }': ()
+ 20..52 '[head,...tail2]': [i32; 5]
+ 21..25 'head': i32
+ 27..38 'middle @ ..': [i32; 2]
+ 36..38 '..': [i32; 2]
+ 40..44 'tail': i32
+ 46..51 'tail2': i32
+ 55..70 '[1, 2, 3, 4, 5]': [i32; 5]
+ 56..57 '1': i32
+ 59..60 '2': i32
+ 62..63 '3': i32
+ 65..66 '4': i32
+ 68..69 '5': i32
+ "#]],
+ );
+}
+
+#[test]
+fn pattern_lookup_in_value_ns() {
+ check_types(
+ r#"
+use self::Constructor::*;
+struct IntRange {
+ range: (),
+}
+enum Constructor {
+ IntRange(IntRange),
+}
+fn main() {
+ match Constructor::IntRange(IntRange { range: () }) {
+ IntRange(x) => {
+ x;
+ //^ IntRange
+ }
+ Constructor::IntRange(x) => {
+ x;
+ //^ IntRange
+ }
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn if_let_guards() {
+ check_types(
+ r#"
+fn main() {
+ match (0,) {
+ opt if let (x,) = opt => {
+ x;
+ //^ i32
+ }
+ _ => {}
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn tuple_wildcard() {
+ check_types(
+ r#"
+fn main() {
+ enum Option<T> {Some(T), None}
+ use Option::*;
+
+ let mut x = None;
+ x;
+ //^ Option<(i32, i32)>
+
+ if let Some((_, _a)) = x {}
+
+ x = Some((1, 2));
+}
+ "#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
new file mode 100644
index 000000000..93a88ab58
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
@@ -0,0 +1,1650 @@
+use expect_test::expect;
+
+use super::{check_infer, check_no_mismatches, check_types};
+
+#[test]
+fn bug_484() {
+ check_infer(
+ r#"
+ fn test() {
+ let x = if true {};
+ }
+ "#,
+ expect![[r#"
+ 10..37 '{ ... {}; }': ()
+ 20..21 'x': ()
+ 24..34 'if true {}': ()
+ 27..31 'true': bool
+ 32..34 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn no_panic_on_field_of_enum() {
+ check_infer(
+ r#"
+ enum X {}
+
+ fn test(x: X) {
+ x.some_field;
+ }
+ "#,
+ expect![[r#"
+ 19..20 'x': X
+ 25..46 '{ ...eld; }': ()
+ 31..32 'x': X
+ 31..43 'x.some_field': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn bug_585() {
+ check_infer(
+ r#"
+ fn test() {
+ X {};
+ match x {
+ A::B {} => (),
+ A::Y() => (),
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..88 '{ ... } }': ()
+ 16..20 'X {}': {unknown}
+ 26..86 'match ... }': ()
+ 32..33 'x': {unknown}
+ 44..51 'A::B {}': {unknown}
+ 55..57 '()': ()
+ 67..73 'A::Y()': {unknown}
+ 77..79 '()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn bug_651() {
+ check_infer(
+ r#"
+ fn quux() {
+ let y = 92;
+ 1 + y;
+ }
+ "#,
+ expect![[r#"
+ 10..40 '{ ...+ y; }': ()
+ 20..21 'y': i32
+ 24..26 '92': i32
+ 32..33 '1': i32
+ 32..37 '1 + y': i32
+ 36..37 'y': i32
+ "#]],
+ );
+}
+
+#[test]
+fn recursive_vars() {
+ check_infer(
+ r#"
+ fn test() {
+ let y = unknown;
+ [y, &y];
+ }
+ "#,
+ expect![[r#"
+ 10..47 '{ ...&y]; }': ()
+ 20..21 'y': {unknown}
+ 24..31 'unknown': {unknown}
+ 37..44 '[y, &y]': [{unknown}; 2]
+ 38..39 'y': {unknown}
+ 41..43 '&y': &{unknown}
+ 42..43 'y': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn recursive_vars_2() {
+ check_infer(
+ r#"
+ fn test() {
+ let x = unknown;
+ let y = unknown;
+ [(x, y), (&y, &x)];
+ }
+ "#,
+ expect![[r#"
+ 10..79 '{ ...x)]; }': ()
+ 20..21 'x': &{unknown}
+ 24..31 'unknown': &{unknown}
+ 41..42 'y': {unknown}
+ 45..52 'unknown': {unknown}
+ 58..76 '[(x, y..., &x)]': [(&{unknown}, {unknown}); 2]
+ 59..65 '(x, y)': (&{unknown}, {unknown})
+ 60..61 'x': &{unknown}
+ 63..64 'y': {unknown}
+ 67..75 '(&y, &x)': (&{unknown}, {unknown})
+ 68..70 '&y': &{unknown}
+ 69..70 'y': {unknown}
+ 72..74 '&x': &&{unknown}
+ 73..74 'x': &{unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn array_elements_expected_type() {
+ check_no_mismatches(
+ r#"
+ fn test() {
+ let x: [[u32; 2]; 2] = [[1, 2], [3, 4]];
+ }
+ "#,
+ );
+}
+
+#[test]
+fn infer_std_crash_1() {
+ // caused stack overflow, taken from std
+ check_infer(
+ r#"
+ enum Maybe<T> {
+ Real(T),
+ Fake,
+ }
+
+ fn write() {
+ match something_unknown {
+ Maybe::Real(ref mut something) => (),
+ }
+ }
+ "#,
+ expect![[r#"
+ 53..138 '{ ... } }': ()
+ 59..136 'match ... }': ()
+ 65..82 'someth...nknown': Maybe<{unknown}>
+ 93..123 'Maybe:...thing)': Maybe<{unknown}>
+ 105..122 'ref mu...ething': &mut {unknown}
+ 127..129 '()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_2() {
+ // caused "equating two type variables, ...", taken from std
+ check_infer(
+ r#"
+ fn test_line_buffer() {
+ &[0, b'\n', 1, b'\n'];
+ }
+ "#,
+ expect![[r#"
+ 22..52 '{ ...n']; }': ()
+ 28..49 '&[0, b...b'\n']': &[u8; 4]
+ 29..49 '[0, b'...b'\n']': [u8; 4]
+ 30..31 '0': u8
+ 33..38 'b'\n'': u8
+ 40..41 '1': u8
+ 43..48 'b'\n'': u8
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_3() {
+ // taken from rustc
+ check_infer(
+ r#"
+ pub fn compute() {
+ match nope!() {
+ SizeSkeleton::Pointer { non_zero: true, tail } => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 17..107 '{ ... } }': ()
+ 23..105 'match ... }': ()
+ 29..36 'nope!()': {unknown}
+ 47..93 'SizeSk...tail }': {unknown}
+ 81..85 'true': bool
+ 81..85 'true': bool
+ 87..91 'tail': {unknown}
+ 97..99 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_4() {
+ // taken from rustc
+ check_infer(
+ r#"
+ pub fn primitive_type() {
+ match *self {
+ BorrowedRef { type_: Primitive(p), ..} => {},
+ }
+ }
+ "#,
+ expect![[r#"
+ 24..105 '{ ... } }': ()
+ 30..103 'match ... }': ()
+ 36..41 '*self': {unknown}
+ 37..41 'self': {unknown}
+ 52..90 'Borrow...), ..}': {unknown}
+ 73..85 'Primitive(p)': {unknown}
+ 83..84 'p': {unknown}
+ 94..96 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_5() {
+ // taken from rustc
+ check_infer(
+ r#"
+ fn extra_compiler_flags() {
+ for content in doesnt_matter {
+ let name = if doesnt_matter {
+ first
+ } else {
+ &content
+ };
+
+ let content = if ICE_REPORT_COMPILER_FLAGS_STRIP_VALUE.contains(&name) {
+ name
+ } else {
+ content
+ };
+ }
+ }
+ "#,
+ expect![[r#"
+ 26..322 '{ ... } }': ()
+ 32..320 'for co... }': ()
+ 36..43 'content': {unknown}
+ 47..60 'doesnt_matter': {unknown}
+ 61..320 '{ ... }': ()
+ 75..79 'name': &{unknown}
+ 82..166 'if doe... }': &{unknown}
+ 85..98 'doesnt_matter': bool
+ 99..128 '{ ... }': &{unknown}
+ 113..118 'first': &{unknown}
+ 134..166 '{ ... }': &{unknown}
+ 148..156 '&content': &{unknown}
+ 149..156 'content': {unknown}
+ 181..188 'content': &{unknown}
+ 191..313 'if ICE... }': &{unknown}
+ 194..231 'ICE_RE..._VALUE': {unknown}
+ 194..247 'ICE_RE...&name)': bool
+ 241..246 '&name': &&{unknown}
+ 242..246 'name': &{unknown}
+ 248..276 '{ ... }': &{unknown}
+ 262..266 'name': &{unknown}
+ 282..313 '{ ... }': {unknown}
+ 296..303 'content': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn infer_nested_generics_crash() {
+ // another crash found typechecking rustc
+ check_infer(
+ r#"
+ struct Canonical<V> {
+ value: V,
+ }
+ struct QueryResponse<V> {
+ value: V,
+ }
+ fn test<R>(query_response: Canonical<QueryResponse<R>>) {
+ &query_response.value;
+ }
+ "#,
+ expect![[r#"
+ 91..105 'query_response': Canonical<QueryResponse<R>>
+ 136..166 '{ ...lue; }': ()
+ 142..163 '&query....value': &QueryResponse<R>
+ 143..157 'query_response': Canonical<QueryResponse<R>>
+ 143..163 'query_....value': QueryResponse<R>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_paren_macro_call() {
+ check_infer(
+ r#"
+ macro_rules! bar { () => {0u32} }
+ fn test() {
+ let a = (bar!());
+ }
+ "#,
+ expect![[r#"
+ !0..4 '0u32': u32
+ 44..69 '{ ...()); }': ()
+ 54..55 'a': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_array_macro_call() {
+ check_infer(
+ r#"
+ macro_rules! bar { () => {0u32} }
+ fn test() {
+ let a = [bar!()];
+ }
+ "#,
+ expect![[r#"
+ !0..4 '0u32': u32
+ 44..69 '{ ...()]; }': ()
+ 54..55 'a': [u32; 1]
+ 58..66 '[bar!()]': [u32; 1]
+ "#]],
+ );
+}
+
+#[test]
+fn bug_1030() {
+ check_infer(
+ r#"
+ struct HashSet<T, H>;
+ struct FxHasher;
+ type FxHashSet<T> = HashSet<T, FxHasher>;
+
+ impl<T, H> HashSet<T, H> {
+ fn default() -> HashSet<T, H> {}
+ }
+
+ pub fn main_loop() {
+ FxHashSet::default();
+ }
+ "#,
+ expect![[r#"
+ 143..145 '{}': HashSet<T, H>
+ 168..197 '{ ...t(); }': ()
+ 174..192 'FxHash...efault': fn default<{unknown}, FxHasher>() -> HashSet<{unknown}, FxHasher>
+ 174..194 'FxHash...ault()': HashSet<{unknown}, FxHasher>
+ "#]],
+ );
+}
+
+#[test]
+fn issue_2669() {
+ check_infer(
+ r#"
+ trait A {}
+ trait Write {}
+ struct Response<T> {}
+
+ trait D {
+ fn foo();
+ }
+
+ impl<T:A> D for Response<T> {
+ fn foo() {
+ end();
+ fn end<W: Write>() {
+ let _x: T = loop {};
+ }
+ }
+ }
+ "#,
+ expect![[r#"
+ 119..214 '{ ... }': ()
+ 129..132 'end': fn end<{unknown}>()
+ 129..134 'end()': ()
+ 163..208 '{ ... }': ()
+ 181..183 '_x': !
+ 190..197 'loop {}': !
+ 195..197 '{}': ()
+ "#]],
+ )
+}
+
+#[test]
+fn issue_2705() {
+ check_infer(
+ r#"
+ trait Trait {}
+ fn test() {
+ <Trait<u32>>::foo()
+ }
+ "#,
+ expect![[r#"
+ 25..52 '{ ...oo() }': ()
+ 31..48 '<Trait...>::foo': {unknown}
+ 31..50 '<Trait...:foo()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_2683_chars_impl() {
+ check_types(
+ r#"
+//- minicore: iterator
+pub struct Chars<'a> {}
+impl<'a> Iterator for Chars<'a> {
+ type Item = char;
+ fn next(&mut self) -> Option<char> { loop {} }
+}
+
+fn test() {
+ let chars: Chars<'_>;
+ (chars.next(), chars.nth(1));
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^ (Option<char>, Option<char>)
+"#,
+ );
+}
+
+#[test]
+fn issue_3999_slice() {
+ check_infer(
+ r#"
+ fn foo(params: &[usize]) {
+ match params {
+ [ps @ .., _] => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 7..13 'params': &[usize]
+ 25..80 '{ ... } }': ()
+ 31..78 'match ... }': ()
+ 37..43 'params': &[usize]
+ 54..66 '[ps @ .., _]': [usize]
+ 55..62 'ps @ ..': &[usize]
+ 60..62 '..': [usize]
+ 64..65 '_': usize
+ 70..72 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_3999_struct() {
+ // rust-analyzer should not panic on seeing this malformed
+ // record pattern.
+ check_infer(
+ r#"
+ struct Bar {
+ a: bool,
+ }
+ fn foo(b: Bar) {
+ match b {
+ Bar { a: .. } => {},
+ }
+ }
+ "#,
+ expect![[r#"
+ 35..36 'b': Bar
+ 43..95 '{ ... } }': ()
+ 49..93 'match ... }': ()
+ 55..56 'b': Bar
+ 67..80 'Bar { a: .. }': Bar
+ 76..78 '..': bool
+ 84..86 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4235_name_conflicts() {
+ check_infer(
+ r#"
+ struct FOO {}
+ static FOO:FOO = FOO {};
+
+ impl FOO {
+ fn foo(&self) {}
+ }
+
+ fn main() {
+ let a = &FOO;
+ a.foo();
+ }
+ "#,
+ expect![[r#"
+ 31..37 'FOO {}': FOO
+ 63..67 'self': &FOO
+ 69..71 '{}': ()
+ 85..119 '{ ...o(); }': ()
+ 95..96 'a': &FOO
+ 99..103 '&FOO': &FOO
+ 100..103 'FOO': FOO
+ 109..110 'a': &FOO
+ 109..116 'a.foo()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4465_dollar_crate_at_type() {
+ check_infer(
+ r#"
+ pub struct Foo {}
+ pub fn anything<T>() -> T {
+ loop {}
+ }
+ macro_rules! foo {
+ () => {{
+ let r: $crate::Foo = anything();
+ r
+ }};
+ }
+ fn main() {
+ let _a = foo!();
+ }
+ "#,
+ expect![[r#"
+ 44..59 '{ loop {} }': T
+ 50..57 'loop {}': !
+ 55..57 '{}': ()
+ !0..31 '{letr:...g();r}': Foo
+ !4..5 'r': Foo
+ !18..26 'anything': fn anything<Foo>() -> Foo
+ !18..28 'anything()': Foo
+ !29..30 'r': Foo
+ 163..187 '{ ...!(); }': ()
+ 173..175 '_a': Foo
+ "#]],
+ );
+}
+
+#[test]
+fn issue_6811() {
+ check_infer(
+ r#"
+ macro_rules! profile_function {
+ () => {
+ let _a = 1;
+ let _b = 1;
+ };
+ }
+ fn main() {
+ profile_function!();
+ }
+ "#,
+ expect![[r#"
+ !0..16 'let_a=...t_b=1;': ()
+ !3..5 '_a': i32
+ !6..7 '1': i32
+ !11..13 '_b': i32
+ !14..15 '1': i32
+ 103..131 '{ ...!(); }': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4053_diesel_where_clauses() {
+ check_infer(
+ r#"
+ trait BoxedDsl<DB> {
+ type Output;
+ fn internal_into_boxed(self) -> Self::Output;
+ }
+
+ struct SelectStatement<From, Select, Distinct, Where, Order, LimitOffset, GroupBy, Locking> {
+ order: Order,
+ }
+
+ trait QueryFragment<DB: Backend> {}
+
+ trait Into<T> { fn into(self) -> T; }
+
+ impl<F, S, D, W, O, LOf, DB> BoxedDsl<DB>
+ for SelectStatement<F, S, D, W, O, LOf, G>
+ where
+ O: Into<dyn QueryFragment<DB>>,
+ {
+ type Output = XXX;
+
+ fn internal_into_boxed(self) -> Self::Output {
+ self.order.into();
+ }
+ }
+ "#,
+ expect![[r#"
+ 65..69 'self': Self
+ 267..271 'self': Self
+ 466..470 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
+ 488..522 '{ ... }': ()
+ 498..502 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
+ 498..508 'self.order': O
+ 498..515 'self.o...into()': dyn QueryFragment<DB>
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4953() {
+ check_infer(
+ r#"
+ pub struct Foo(pub i64);
+ impl Foo {
+ fn test() -> Self { Self(0i64) }
+ }
+ "#,
+ expect![[r#"
+ 58..72 '{ Self(0i64) }': Foo
+ 60..64 'Self': Foo(i64) -> Foo
+ 60..70 'Self(0i64)': Foo
+ 65..69 '0i64': i64
+ "#]],
+ );
+ check_infer(
+ r#"
+ pub struct Foo<T>(pub T);
+ impl Foo<i64> {
+ fn test() -> Self { Self(0i64) }
+ }
+ "#,
+ expect![[r#"
+ 64..78 '{ Self(0i64) }': Foo<i64>
+ 66..70 'Self': Foo<i64>(i64) -> Foo<i64>
+ 66..76 'Self(0i64)': Foo<i64>
+ 71..75 '0i64': i64
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4931() {
+ check_infer(
+ r#"
+ trait Div<T> {
+ type Output;
+ }
+
+ trait CheckedDiv: Div<()> {}
+
+ trait PrimInt: CheckedDiv<Output = ()> {
+ fn pow(self);
+ }
+
+ fn check<T: PrimInt>(i: T) {
+ i.pow();
+ }
+ "#,
+ expect![[r#"
+ 117..121 'self': Self
+ 148..149 'i': T
+ 154..170 '{ ...w(); }': ()
+ 160..161 'i': T
+ 160..167 'i.pow()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4885() {
+ check_infer(
+ r#"
+ //- minicore: coerce_unsized, future
+ use core::future::Future;
+ trait Foo<R> {
+ type Bar;
+ }
+ fn foo<R, K>(key: &K) -> impl Future<Output = K::Bar>
+ where
+ K: Foo<R>,
+ {
+ bar(key)
+ }
+ fn bar<R, K>(key: &K) -> impl Future<Output = K::Bar>
+ where
+ K: Foo<R>,
+ {
+ }
+ "#,
+ expect![[r#"
+ 70..73 'key': &K
+ 132..148 '{ ...key) }': impl Future<Output = <K as Foo<R>>::Bar>
+ 138..141 'bar': fn bar<R, K>(&K) -> impl Future<Output = <K as Foo<R>>::Bar>
+ 138..146 'bar(key)': impl Future<Output = <K as Foo<R>>::Bar>
+ 142..145 'key': &K
+ 162..165 'key': &K
+ 224..227 '{ }': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4800() {
+ check_infer(
+ r#"
+ trait Debug {}
+
+ struct Foo<T>;
+
+ type E1<T> = (T, T, T);
+ type E2<T> = E1<E1<E1<(T, T, T)>>>;
+
+ impl Debug for Foo<E2<()>> {}
+
+ struct Request;
+
+ pub trait Future {
+ type Output;
+ }
+
+ pub struct PeerSet<D>;
+
+ impl<D> Service<Request> for PeerSet<D>
+ where
+ D: Discover,
+ D::Key: Debug,
+ {
+ type Error = ();
+ type Future = dyn Future<Output = Self::Error>;
+
+ fn call(&mut self) -> Self::Future {
+ loop {}
+ }
+ }
+
+ pub trait Discover {
+ type Key;
+ }
+
+ pub trait Service<Request> {
+ type Error;
+ type Future: Future<Output = Self::Error>;
+ fn call(&mut self) -> Self::Future;
+ }
+ "#,
+ expect![[r#"
+ 379..383 'self': &mut PeerSet<D>
+ 401..424 '{ ... }': dyn Future<Output = ()>
+ 411..418 'loop {}': !
+ 416..418 '{}': ()
+ 575..579 'self': &mut Self
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4966() {
+ check_infer(
+ r#"
+ //- minicore: deref
+ pub trait IntoIterator {
+ type Item;
+ }
+
+ struct Repeat<A> { element: A }
+
+ struct Map<F> { f: F }
+
+ struct Vec<T> {}
+
+ impl<T> core::ops::Deref for Vec<T> {
+ type Target = [T];
+ }
+
+ fn from_iter<A, T: IntoIterator<Item = A>>(iter: T) -> Vec<A> {}
+
+ fn main() {
+ let inner = Map { f: |_: &f64| 0.0 };
+
+ let repeat = Repeat { element: inner };
+
+ let vec = from_iter(repeat);
+
+ vec.foo_bar();
+ }
+ "#,
+ expect![[r#"
+ 225..229 'iter': T
+ 244..246 '{}': Vec<A>
+ 258..402 '{ ...r(); }': ()
+ 268..273 'inner': Map<|&f64| -> f64>
+ 276..300 'Map { ... 0.0 }': Map<|&f64| -> f64>
+ 285..298 '|_: &f64| 0.0': |&f64| -> f64
+ 286..287 '_': &f64
+ 295..298 '0.0': f64
+ 311..317 'repeat': Repeat<Map<|&f64| -> f64>>
+ 320..345 'Repeat...nner }': Repeat<Map<|&f64| -> f64>>
+ 338..343 'inner': Map<|&f64| -> f64>
+ 356..359 'vec': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
+ 362..371 'from_iter': fn from_iter<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>, Repeat<Map<|&f64| -> f64>>>(Repeat<Map<|&f64| -> f64>>) -> Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
+ 362..379 'from_i...epeat)': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
+ 372..378 'repeat': Repeat<Map<|&f64| -> f64>>
+ 386..389 'vec': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
+ 386..399 'vec.foo_bar()': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn issue_6628() {
+ check_infer(
+ r#"
+//- minicore: fn
+struct S<T>();
+impl<T> S<T> {
+ fn f(&self, _t: T) {}
+ fn g<F: FnOnce(&T)>(&self, _f: F) {}
+}
+fn main() {
+ let s = S();
+ s.g(|_x| {});
+ s.f(10);
+}
+"#,
+ expect![[r#"
+ 40..44 'self': &S<T>
+ 46..48 '_t': T
+ 53..55 '{}': ()
+ 81..85 'self': &S<T>
+ 87..89 '_f': F
+ 94..96 '{}': ()
+ 109..160 '{ ...10); }': ()
+ 119..120 's': S<i32>
+ 123..124 'S': S<i32>() -> S<i32>
+ 123..126 'S()': S<i32>
+ 132..133 's': S<i32>
+ 132..144 's.g(|_x| {})': ()
+ 136..143 '|_x| {}': |&i32| -> ()
+ 137..139 '_x': &i32
+ 141..143 '{}': ()
+ 150..151 's': S<i32>
+ 150..157 's.f(10)': ()
+ 154..156 '10': i32
+ "#]],
+ );
+}
+
+#[test]
+fn issue_6852() {
+ check_infer(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+
+struct BufWriter {}
+
+struct Mutex<T> {}
+struct MutexGuard<'a, T> {}
+impl<T> Mutex<T> {
+ fn lock(&self) -> MutexGuard<'_, T> {}
+}
+impl<'a, T: 'a> Deref for MutexGuard<'a, T> {
+ type Target = T;
+}
+fn flush(&self) {
+ let w: &Mutex<BufWriter>;
+ *(w.lock());
+}
+"#,
+ expect![[r#"
+ 123..127 'self': &Mutex<T>
+ 150..152 '{}': MutexGuard<T>
+ 234..238 'self': &{unknown}
+ 240..290 '{ ...()); }': ()
+ 250..251 'w': &Mutex<BufWriter>
+ 276..287 '*(w.lock())': BufWriter
+ 278..279 'w': &Mutex<BufWriter>
+ 278..286 'w.lock()': MutexGuard<BufWriter>
+ "#]],
+ );
+}
+
+#[test]
+fn param_overrides_fn() {
+ check_types(
+ r#"
+ fn example(example: i32) {
+ fn f() {}
+ example;
+ //^^^^^^^ i32
+ }
+ "#,
+ )
+}
+
+#[test]
+fn lifetime_from_chalk_during_deref() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Box<T: ?Sized> {}
+impl<T: ?Sized> core::ops::Deref for Box<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ loop {}
+ }
+}
+
+trait Iterator {
+ type Item;
+}
+
+pub struct Iter<'a, T: 'a> {
+ inner: Box<dyn IterTrait<'a, T, Item = &'a T> + 'a>,
+}
+
+trait IterTrait<'a, T: 'a>: Iterator<Item = &'a T> {
+ fn clone_box(&self);
+}
+
+fn clone_iter<T>(s: Iter<T>) {
+ s.inner.clone_box();
+ //^^^^^^^^^^^^^^^^^^^ ()
+}
+"#,
+ )
+}
+
+#[test]
+fn issue_8686() {
+ check_infer(
+ r#"
+pub trait Try: FromResidual {
+ type Output;
+ type Residual;
+}
+pub trait FromResidual<R = <Self as Try>::Residual> {
+ fn from_residual(residual: R) -> Self;
+}
+
+struct ControlFlow<B, C>;
+impl<B, C> Try for ControlFlow<B, C> {
+ type Output = C;
+ type Residual = ControlFlow<B, !>;
+}
+impl<B, C> FromResidual for ControlFlow<B, C> {
+ fn from_residual(r: ControlFlow<B, !>) -> Self { ControlFlow }
+}
+
+fn test() {
+ ControlFlow::from_residual(ControlFlow::<u32, !>);
+}
+ "#,
+ expect![[r#"
+ 144..152 'residual': R
+ 365..366 'r': ControlFlow<B, !>
+ 395..410 '{ ControlFlow }': ControlFlow<B, C>
+ 397..408 'ControlFlow': ControlFlow<B, C>
+ 424..482 '{ ...!>); }': ()
+ 430..456 'Contro...sidual': fn from_residual<ControlFlow<u32, {unknown}>, ControlFlow<u32, !>>(ControlFlow<u32, !>) -> ControlFlow<u32, {unknown}>
+ 430..479 'Contro...2, !>)': ControlFlow<u32, {unknown}>
+ 457..478 'Contro...32, !>': ControlFlow<u32, !>
+ "#]],
+ );
+}
+
+#[test]
+fn cfg_tail() {
+ // https://github.com/rust-lang/rust-analyzer/issues/8378
+ check_infer(
+ r#"
+ fn fake_tail(){
+ { "first" }
+ #[cfg(never)] 9
+ }
+ fn multiple_fake(){
+ { "fake" }
+ { "fake" }
+ { "second" }
+ #[cfg(never)] { 11 }
+ #[cfg(never)] 12;
+ #[cfg(never)] 13
+ }
+ fn no_normal_tail(){
+ { "third" }
+ #[cfg(never)] 14;
+ #[cfg(never)] 15;
+ }
+ fn no_actual_tail(){
+ { "fourth" };
+ #[cfg(never)] 14;
+ #[cfg(never)] 15
+ }
+ "#,
+ expect![[r#"
+ 14..53 '{ ...)] 9 }': ()
+ 20..31 '{ "first" }': ()
+ 22..29 '"first"': &str
+ 72..190 '{ ...] 13 }': ()
+ 78..88 '{ "fake" }': &str
+ 80..86 '"fake"': &str
+ 93..103 '{ "fake" }': &str
+ 95..101 '"fake"': &str
+ 108..120 '{ "second" }': ()
+ 110..118 '"second"': &str
+ 210..273 '{ ... 15; }': ()
+ 216..227 '{ "third" }': ()
+ 218..225 '"third"': &str
+ 293..357 '{ ...] 15 }': ()
+ 299..311 '{ "fourth" }': &str
+ 301..309 '"fourth"': &str
+ "#]],
+ )
+}
+
+#[test]
+fn impl_trait_in_option_9530() {
+ check_types(
+ r#"
+//- minicore: sized
+struct Option<T>;
+impl<T> Option<T> {
+ fn unwrap(self) -> T { loop {} }
+}
+fn make() -> Option<impl Copy> { Option }
+trait Copy {}
+fn test() {
+ let o = make();
+ o.unwrap();
+ //^^^^^^^^^^ impl Copy
+}
+ "#,
+ )
+}
+
+#[test]
+fn bare_dyn_trait_binders_9639() {
+ check_no_mismatches(
+ r#"
+//- minicore: fn, coerce_unsized
+fn infix_parse<T, S>(_state: S, _level_code: &Fn(S)) -> T {
+ loop {}
+}
+
+fn parse_arule() {
+ infix_parse((), &(|_recurse| ()))
+}
+ "#,
+ )
+}
+
+#[test]
+fn call_expected_type_closure() {
+ check_types(
+ r#"
+//- minicore: fn, option
+
+fn map<T, U>(o: Option<T>, f: impl FnOnce(T) -> U) -> Option<U> { loop {} }
+struct S {
+ field: u32
+}
+
+fn test() {
+ let o = Some(S { field: 2 });
+ let _: Option<()> = map(o, |s| { s.field; });
+ // ^^^^^^^ u32
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_diesel_panic() {
+ check_no_mismatches(
+ r#"
+//- minicore: option
+
+trait TypeMetadata {
+ type MetadataLookup;
+}
+
+pub struct Output<'a, T, DB>
+where
+ DB: TypeMetadata,
+ DB::MetadataLookup: 'a,
+{
+ out: T,
+ metadata_lookup: Option<&'a DB::MetadataLookup>,
+}
+
+impl<'a, T, DB: TypeMetadata> Output<'a, T, DB> {
+ pub fn new(out: T, metadata_lookup: &'a DB::MetadataLookup) -> Self {
+ Output {
+ out,
+ metadata_lookup: Some(metadata_lookup),
+ }
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn bitslice_panic() {
+ check_no_mismatches(
+ r#"
+//- minicore: option, deref
+
+pub trait BitView {
+ type Store;
+}
+
+pub struct Lsb0;
+
+pub struct BitArray<V: BitView> { }
+
+pub struct BitSlice<T> { }
+
+impl<V: BitView> core::ops::Deref for BitArray<V> {
+ type Target = BitSlice<V::Store>;
+}
+
+impl<T> BitSlice<T> {
+ pub fn split_first(&self) -> Option<(T, &Self)> { loop {} }
+}
+
+fn multiexp_inner() {
+ let exp: &BitArray<Foo>;
+ exp.split_first();
+}
+ "#,
+ );
+}
+
+#[test]
+fn macro_expands_to_impl_trait() {
+ check_no_mismatches(
+ r#"
+trait Foo {}
+
+macro_rules! ty {
+ () => {
+ impl Foo
+ }
+}
+
+fn foo(_: ty!()) {}
+
+fn bar() {
+ foo(());
+}
+ "#,
+ )
+}
+
+#[test]
+fn nested_macro_in_fn_params() {
+ check_no_mismatches(
+ r#"
+macro_rules! U32Inner {
+ () => {
+ u32
+ };
+}
+
+macro_rules! U32 {
+ () => {
+ U32Inner!()
+ };
+}
+
+fn mamba(a: U32!(), p: u32) -> u32 {
+ a
+}
+ "#,
+ )
+}
+
+#[test]
+fn for_loop_block_expr_iterable() {
+ check_infer(
+ r#"
+fn test() {
+ for _ in { let x = 0; } {
+ let y = 0;
+ }
+}
+ "#,
+ expect![[r#"
+ 10..68 '{ ... } }': ()
+ 16..66 'for _ ... }': ()
+ 20..21 '_': {unknown}
+ 25..39 '{ let x = 0; }': ()
+ 31..32 'x': i32
+ 35..36 '0': i32
+ 40..66 '{ ... }': ()
+ 54..55 'y': i32
+ 58..59 '0': i32
+ "#]],
+ );
+}
+
+#[test]
+fn while_loop_block_expr_iterable() {
+ check_infer(
+ r#"
+fn test() {
+ while { true } {
+ let y = 0;
+ }
+}
+ "#,
+ expect![[r#"
+ 10..59 '{ ... } }': ()
+ 16..57 'while ... }': ()
+ 22..30 '{ true }': bool
+ 24..28 'true': bool
+ 31..57 '{ ... }': ()
+ 45..46 'y': i32
+ 49..50 '0': i32
+ "#]],
+ );
+}
+
+#[test]
+fn bug_11242() {
+ // FIXME: wrong, should be u32
+ check_types(
+ r#"
+fn foo<A, B>()
+where
+ A: IntoIterator<Item = u32>,
+ B: IntoIterator<Item = usize>,
+{
+ let _x: <A as IntoIterator>::Item;
+ // ^^ {unknown}
+}
+
+pub trait Iterator {
+ type Item;
+}
+
+pub trait IntoIterator {
+ type Item;
+ type IntoIter: Iterator<Item = Self::Item>;
+}
+
+impl<I: Iterator> IntoIterator for I {
+ type Item = I::Item;
+ type IntoIter = I;
+}
+"#,
+ );
+}
+
+#[test]
+fn bug_11659() {
+ check_no_mismatches(
+ r#"
+struct LinkArray<const N: usize, LD>(LD);
+fn f<const N: usize, LD>(x: LD) -> LinkArray<N, LD> {
+ let r = LinkArray::<N, LD>(x);
+ r
+}
+
+fn test() {
+ let x = f::<2, i32>(5);
+ let y = LinkArray::<52, LinkArray<2, i32>>(x);
+}
+ "#,
+ );
+ check_no_mismatches(
+ r#"
+struct LinkArray<LD, const N: usize>(LD);
+fn f<const N: usize, LD>(x: LD) -> LinkArray<LD, N> {
+ let r = LinkArray::<LD, N>(x);
+ r
+}
+
+fn test() {
+ let x = f::<i32, 2>(5);
+ let y = LinkArray::<LinkArray<i32, 2>, 52>(x);
+}
+ "#,
+ );
+}
+
+#[test]
+fn const_generic_error_tolerance() {
+ check_no_mismatches(
+ r#"
+#[lang = "sized"]
+pub trait Sized {}
+
+struct CT<const N: usize, T>(T);
+struct TC<T, const N: usize>(T);
+fn f<const N: usize, T>(x: T) -> (CT<N, T>, TC<T, N>) {
+ let l = CT::<N, T>(x);
+ let r = TC::<N, T>(x);
+ (l, r)
+}
+
+trait TR1<const N: usize>;
+trait TR2<const N: usize>;
+
+impl<const N: usize, T> TR1<N> for CT<N, T>;
+impl<const N: usize, T> TR1<5> for TC<T, N>;
+impl<const N: usize, T> TR2<N> for CT<T, N>;
+
+trait TR3<const N: usize> {
+ fn tr3(&self) -> &Self;
+}
+
+impl<const N: usize, T> TR3<5> for TC<T, N> {
+ fn tr3(&self) -> &Self {
+ self
+ }
+}
+
+impl<const N: usize, T> TR3<Item = 5> for TC<T, N> {}
+impl<const N: usize, T> TR3<T> for TC<T, N> {}
+
+fn impl_trait<const N: usize>(inp: impl TR1<N>) {}
+fn dyn_trait<const N: usize>(inp: &dyn TR2<N>) {}
+fn impl_trait_bad<'a, const N: usize>(inp: impl TR1<i32>) -> impl TR1<'a, i32> {}
+fn impl_trait_very_bad<const N: usize>(inp: impl TR1<Item = i32>) -> impl TR1<'a, Item = i32, 5, Foo = N> {}
+
+fn test() {
+ f::<2, i32>(5);
+ f::<2, 2>(5);
+ f(5);
+ f::<i32>(5);
+ CT::<52, CT<2, i32>>(x);
+ CT::<CT<2, i32>>(x);
+ impl_trait_bad(5);
+ impl_trait_bad(12);
+ TR3<5>::tr3();
+ TR3<{ 2+3 }>::tr3();
+ TC::<i32, 10>(5).tr3();
+ TC::<i32, 20>(5).tr3();
+ TC::<i32, i32>(5).tr3();
+ TC::<i32, { 7 + 3 }>(5).tr3();
+}
+ "#,
+ );
+}
+
+#[test]
+fn const_generic_impl_trait() {
+ check_no_mismatches(
+ r#"
+ //- minicore: from
+
+ struct Foo<T, const M: usize>;
+
+ trait Tr<T> {
+ fn f(T) -> Self;
+ }
+
+ impl<T, const M: usize> Tr<[T; M]> for Foo<T, M> {
+ fn f(_: [T; M]) -> Self {
+ Self
+ }
+ }
+
+ fn test() {
+ Foo::f([1, 2, 7, 10]);
+ }
+ "#,
+ );
+}
+
+#[test]
+fn nalgebra_factorial() {
+ check_no_mismatches(
+ r#"
+ const FACTORIAL: [u128; 4] = [1, 1, 2, 6];
+
+ fn factorial(n: usize) -> u128 {
+ match FACTORIAL.get(n) {
+ Some(f) => *f,
+ None => panic!("{}! is greater than u128::MAX", n),
+ }
+ }
+ "#,
+ )
+}
+
+#[test]
+fn regression_11688_1() {
+ check_no_mismatches(
+ r#"
+ pub struct Buffer<T>(T);
+ type Writer = Buffer<u8>;
+ impl<T> Buffer<T> {
+ fn extend_from_array<const N: usize>(&mut self, xs: &[T; N]) {
+ loop {}
+ }
+ }
+ trait Encode<S> {
+ fn encode(self, w: &mut Writer, s: &mut S);
+ }
+ impl<S> Encode<S> for u8 {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.extend_from_array(&self.to_le_bytes());
+ }
+ }
+ "#,
+ );
+}
+
+#[test]
+fn regression_11688_2() {
+ check_types(
+ r#"
+ union MaybeUninit<T> {
+ uninit: (),
+ value: T,
+ }
+
+ impl<T> MaybeUninit<T> {
+ fn uninit_array<const LEN: usize>() -> [Self; LEN] {
+ loop {}
+ }
+ }
+
+ fn main() {
+ let x = MaybeUninit::<i32>::uninit_array::<1>();
+ //^ [MaybeUninit<i32>; 1]
+ }
+ "#,
+ );
+}
+
+#[test]
+fn regression_11688_3() {
+ check_types(
+ r#"
+ //- minicore: iterator
+ struct Ar<T, const N: u8>(T);
+ fn f<const LEN: usize, T, const BASE: u8>(
+ num_zeros: usize,
+ ) -> dyn Iterator<Item = [Ar<T, BASE>; LEN]> {
+ loop {}
+ }
+ fn dynamic_programming() {
+ for board in f::<9, u8, 7>(1) {
+ //^^^^^ [Ar<u8, 7>; 9]
+ }
+ }
+ "#,
+ );
+}
+
+#[test]
+fn regression_11688_4() {
+ check_types(
+ r#"
+ trait Bar<const C: usize> {
+ fn baz(&self) -> [i32; C];
+ }
+
+ fn foo(x: &dyn Bar<2>) {
+ x.baz();
+ //^^^^^^^ [i32; 2]
+ }
+ "#,
+ )
+}
+
+#[test]
+fn gat_crash_1() {
+ cov_mark::check!(ignore_gats);
+ check_no_mismatches(
+ r#"
+trait ATrait {}
+
+trait Crash {
+ type Member<const N: usize>: ATrait;
+ fn new<const N: usize>() -> Self::Member<N>;
+}
+
+fn test<T: Crash>() {
+ T::new();
+}
+"#,
+ );
+}
+
+#[test]
+fn gat_crash_2() {
+ check_no_mismatches(
+ r#"
+pub struct InlineStorage {}
+
+pub struct InlineStorageHandle<T: ?Sized> {}
+
+pub unsafe trait Storage {
+ type Handle<T: ?Sized>;
+ fn create<T: ?Sized>() -> Self::Handle<T>;
+}
+
+unsafe impl Storage for InlineStorage {
+ type Handle<T: ?Sized> = InlineStorageHandle<T>;
+}
+"#,
+ );
+}
+
+#[test]
+fn cfgd_out_self_param() {
+ cov_mark::check!(cfgd_out_self_param);
+ check_no_mismatches(
+ r#"
+struct S;
+impl S {
+ fn f(#[cfg(never)] &self) {}
+}
+
+fn f(s: S) {
+ s.f();
+}
+"#,
+ );
+}
+
+#[test]
+fn rust_161_option_clone() {
+ check_types(
+ r#"
+//- minicore: option, drop
+
+fn test(o: &Option<i32>) {
+ o.my_clone();
+ //^^^^^^^^^^^^ Option<i32>
+}
+
+pub trait MyClone: Sized {
+ fn my_clone(&self) -> Self;
+}
+
+impl<T> const MyClone for Option<T>
+where
+ T: ~const MyClone + ~const Drop + ~const Destruct,
+{
+ fn my_clone(&self) -> Self {
+ match self {
+ Some(x) => Some(x.my_clone()),
+ None => None,
+ }
+ }
+}
+
+impl const MyClone for i32 {
+ fn my_clone(&self) -> Self {
+ *self
+ }
+}
+
+pub trait Destruct {}
+
+impl<T: ?Sized> const Destruct for T {}
+"#,
+ );
+}
+
+#[test]
+fn rust_162_option_clone() {
+ check_types(
+ r#"
+//- minicore: option, drop
+
+fn test(o: &Option<i32>) {
+ o.my_clone();
+ //^^^^^^^^^^^^ Option<i32>
+}
+
+pub trait MyClone: Sized {
+ fn my_clone(&self) -> Self;
+}
+
+impl<T> const MyClone for Option<T>
+where
+ T: ~const MyClone + ~const Destruct,
+{
+ fn my_clone(&self) -> Self {
+ match self {
+ Some(x) => Some(x.my_clone()),
+ None => None,
+ }
+ }
+}
+
+impl const MyClone for i32 {
+ fn my_clone(&self) -> Self {
+ *self
+ }
+}
+
+#[lang = "destruct"]
+pub trait Destruct {}
+"#,
+ );
+}
+
+#[test]
+fn tuple_struct_pattern_with_unmatched_args_crash() {
+ check_infer(
+ r#"
+struct S(usize);
+fn main() {
+ let S(.., a, b) = S(1);
+ let (.., a, b) = (1,);
+}
+ "#,
+ expect![[r#"
+ 27..85 '{ ...1,); }': ()
+ 37..48 'S(.., a, b)': S
+ 43..44 'a': usize
+ 46..47 'b': {unknown}
+ 51..52 'S': S(usize) -> S
+ 51..55 'S(1)': S
+ 53..54 '1': usize
+ 65..75 '(.., a, b)': (i32, {unknown})
+ 70..71 'a': i32
+ 73..74 'b': {unknown}
+ 78..82 '(1,)': (i32,)
+ 79..80 '1': i32
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
new file mode 100644
index 000000000..5b08f5521
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
@@ -0,0 +1,3072 @@
+use expect_test::expect;
+
+use super::{check, check_infer, check_no_mismatches, check_types};
+
+#[test]
+fn infer_box() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+fn test() {
+ let x = box 1;
+ let t = (x, box x, box &1, box [1]);
+ t;
+} //^ (Box<i32>, Box<Box<i32>>, Box<&i32>, Box<[i32; 1]>)
+
+//- /std.rs crate:std
+#[prelude_import] use prelude::*;
+mod prelude {}
+
+mod boxed {
+ #[lang = "owned_box"]
+ pub struct Box<T: ?Sized> {
+ inner: *mut T,
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_box_with_allocator() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+fn test() {
+ let x = box 1;
+ let t = (x, box x, box &1, box [1]);
+ t;
+} //^ (Box<i32, {unknown}>, Box<Box<i32, {unknown}>, {unknown}>, Box<&i32, {unknown}>, Box<[i32; 1], {unknown}>)
+
+//- /std.rs crate:std
+#[prelude_import] use prelude::*;
+mod boxed {
+ #[lang = "owned_box"]
+ pub struct Box<T: ?Sized, A: Allocator> {
+ inner: *mut T,
+ allocator: A,
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_adt_self() {
+ check_types(
+ r#"
+enum Nat { Succ(Self), Demo(Nat), Zero }
+
+fn test() {
+ let foo: Nat = Nat::Zero;
+ if let Nat::Succ(x) = foo {
+ x;
+ } //^ Nat
+}
+"#,
+ );
+}
+
+#[test]
+fn self_in_struct_lit() {
+ check_infer(
+ r#"
+ //- /main.rs
+ struct S<T> { x: T }
+
+ impl S<u32> {
+ fn foo() {
+ Self { x: 1 };
+ }
+ }
+ "#,
+ expect![[r#"
+ 49..79 '{ ... }': ()
+ 59..72 'Self { x: 1 }': S<u32>
+ 69..70 '1': u32
+ "#]],
+ );
+}
+
+#[test]
+fn type_alias_in_struct_lit() {
+ check_infer(
+ r#"
+ //- /main.rs
+ struct S<T> { x: T }
+
+ type SS = S<u32>;
+
+ fn foo() {
+ SS { x: 1 };
+ }
+ "#,
+ expect![[r#"
+ 50..70 '{ ...1 }; }': ()
+ 56..67 'SS { x: 1 }': S<u32>
+ 64..65 '1': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_ranges() {
+ check_types(
+ r#"
+//- minicore: range
+fn test() {
+ let a = ..;
+ let b = 1..;
+ let c = ..2u32;
+ let d = 1..2usize;
+ let e = ..=10;
+ let f = 'a'..='z';
+
+ let t = (a, b, c, d, e, f);
+ t;
+} //^ (RangeFull, RangeFrom<i32>, RangeTo<u32>, Range<usize>, RangeToInclusive<i32>, RangeInclusive<char>)
+"#,
+ );
+}
+
+#[test]
+fn infer_while_let() {
+ check_types(
+ r#"
+enum Option<T> { Some(T), None }
+
+fn test() {
+ let foo: Option<f32> = None;
+ while let Option::Some(x) = foo {
+ x;
+ } //^ f32
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_basics() {
+ check_infer(
+ r#"
+fn test(a: u32, b: isize, c: !, d: &str) {
+ a;
+ b;
+ c;
+ d;
+ 1usize;
+ 1isize;
+ "test";
+ 1.0f32;
+}
+"#,
+ expect![[r#"
+ 8..9 'a': u32
+ 16..17 'b': isize
+ 26..27 'c': !
+ 32..33 'd': &str
+ 41..120 '{ ...f32; }': ()
+ 47..48 'a': u32
+ 54..55 'b': isize
+ 61..62 'c': !
+ 68..69 'd': &str
+ 75..81 '1usize': usize
+ 87..93 '1isize': isize
+ 99..105 '"test"': &str
+ 111..117 '1.0f32': f32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_let() {
+ check_infer(
+ r#"
+fn test() {
+ let a = 1isize;
+ let b: usize = 1;
+ let c = b;
+ let d: u32;
+ let e;
+ let f: i32 = e;
+}
+"#,
+ expect![[r#"
+ 10..117 '{ ...= e; }': ()
+ 20..21 'a': isize
+ 24..30 '1isize': isize
+ 40..41 'b': usize
+ 51..52 '1': usize
+ 62..63 'c': usize
+ 66..67 'b': usize
+ 77..78 'd': u32
+ 93..94 'e': i32
+ 104..105 'f': i32
+ 113..114 'e': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_paths() {
+ check_infer(
+ r#"
+fn a() -> u32 { 1 }
+
+mod b {
+ fn c() -> u32 { 1 }
+}
+
+fn test() {
+ a();
+ b::c();
+}
+"#,
+ expect![[r#"
+ 14..19 '{ 1 }': u32
+ 16..17 '1': u32
+ 47..52 '{ 1 }': u32
+ 49..50 '1': u32
+ 66..90 '{ ...c(); }': ()
+ 72..73 'a': fn a() -> u32
+ 72..75 'a()': u32
+ 81..85 'b::c': fn c() -> u32
+ 81..87 'b::c()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_path_type() {
+ check_infer(
+ r#"
+struct S;
+
+impl S {
+ fn foo() -> i32 { 1 }
+}
+
+fn test() {
+ S::foo();
+ <S>::foo();
+}
+"#,
+ expect![[r#"
+ 40..45 '{ 1 }': i32
+ 42..43 '1': i32
+ 59..92 '{ ...o(); }': ()
+ 65..71 'S::foo': fn foo() -> i32
+ 65..73 'S::foo()': i32
+ 79..87 '<S>::foo': fn foo() -> i32
+ 79..89 '<S>::foo()': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_struct() {
+ check_infer(
+ r#"
+struct A {
+ b: B,
+ c: C,
+}
+struct B;
+struct C(usize);
+
+fn test() {
+ let c = C(1);
+ B;
+ let a: A = A { b: B, c: C(1) };
+ a.b;
+ a.c;
+}
+"#,
+ expect![[r#"
+ 71..153 '{ ...a.c; }': ()
+ 81..82 'c': C
+ 85..86 'C': C(usize) -> C
+ 85..89 'C(1)': C
+ 87..88 '1': usize
+ 95..96 'B': B
+ 106..107 'a': A
+ 113..132 'A { b:...C(1) }': A
+ 120..121 'B': B
+ 126..127 'C': C(usize) -> C
+ 126..130 'C(1)': C
+ 128..129 '1': usize
+ 138..139 'a': A
+ 138..141 'a.b': B
+ 147..148 'a': A
+ 147..150 'a.c': C
+ "#]],
+ );
+}
+
+#[test]
+fn infer_enum() {
+ check_infer(
+ r#"
+enum E {
+ V1 { field: u32 },
+ V2
+}
+fn test() {
+ E::V1 { field: 1 };
+ E::V2;
+}
+"#,
+ expect![[r#"
+ 51..89 '{ ...:V2; }': ()
+ 57..75 'E::V1 ...d: 1 }': E
+ 72..73 '1': u32
+ 81..86 'E::V2': E
+ "#]],
+ );
+}
+
+#[test]
+fn infer_union() {
+ check_infer(
+ r#"
+union MyUnion {
+ foo: u32,
+ bar: f32,
+}
+
+fn test() {
+ let u = MyUnion { foo: 0 };
+ unsafe { baz(u); }
+ let u = MyUnion { bar: 0.0 };
+ unsafe { baz(u); }
+}
+
+unsafe fn baz(u: MyUnion) {
+ let inner = u.foo;
+ let inner = u.bar;
+}
+"#,
+ expect![[r#"
+ 57..172 '{ ...); } }': ()
+ 67..68 'u': MyUnion
+ 71..89 'MyUnio...o: 0 }': MyUnion
+ 86..87 '0': u32
+ 95..113 'unsafe...(u); }': ()
+ 95..113 'unsafe...(u); }': ()
+ 104..107 'baz': fn baz(MyUnion)
+ 104..110 'baz(u)': ()
+ 108..109 'u': MyUnion
+ 122..123 'u': MyUnion
+ 126..146 'MyUnio... 0.0 }': MyUnion
+ 141..144 '0.0': f32
+ 152..170 'unsafe...(u); }': ()
+ 152..170 'unsafe...(u); }': ()
+ 161..164 'baz': fn baz(MyUnion)
+ 161..167 'baz(u)': ()
+ 165..166 'u': MyUnion
+ 188..189 'u': MyUnion
+ 200..249 '{ ...bar; }': ()
+ 210..215 'inner': u32
+ 218..219 'u': MyUnion
+ 218..223 'u.foo': u32
+ 233..238 'inner': f32
+ 241..242 'u': MyUnion
+ 241..246 'u.bar': f32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_refs() {
+ check_infer(
+ r#"
+fn test(a: &u32, b: &mut u32, c: *const u32, d: *mut u32) {
+ a;
+ *a;
+ &a;
+ &mut a;
+ b;
+ *b;
+ &b;
+ c;
+ *c;
+ d;
+ *d;
+}
+ "#,
+ expect![[r#"
+ 8..9 'a': &u32
+ 17..18 'b': &mut u32
+ 30..31 'c': *const u32
+ 45..46 'd': *mut u32
+ 58..149 '{ ... *d; }': ()
+ 64..65 'a': &u32
+ 71..73 '*a': u32
+ 72..73 'a': &u32
+ 79..81 '&a': &&u32
+ 80..81 'a': &u32
+ 87..93 '&mut a': &mut &u32
+ 92..93 'a': &u32
+ 99..100 'b': &mut u32
+ 106..108 '*b': u32
+ 107..108 'b': &mut u32
+ 114..116 '&b': &&mut u32
+ 115..116 'b': &mut u32
+ 122..123 'c': *const u32
+ 129..131 '*c': u32
+ 130..131 'c': *const u32
+ 137..138 'd': *mut u32
+ 144..146 '*d': u32
+ 145..146 'd': *mut u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_raw_ref() {
+ check_infer(
+ r#"
+fn test(a: i32) {
+ &raw mut a;
+ &raw const a;
+}
+"#,
+ expect![[r#"
+ 8..9 'a': i32
+ 16..53 '{ ...t a; }': ()
+ 22..32 '&raw mut a': *mut i32
+ 31..32 'a': i32
+ 38..50 '&raw const a': *const i32
+ 49..50 'a': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_literals() {
+ check_infer(
+ r##"
+ fn test() {
+ 5i32;
+ 5f32;
+ 5f64;
+ "hello";
+ b"bytes";
+ 'c';
+ b'b';
+ 3.14;
+ 5000;
+ false;
+ true;
+ r#"
+ //! doc
+ // non-doc
+ mod foo {}
+ "#;
+ br#"yolo"#;
+ let a = b"a\x20b\
+ c";
+ let b = br"g\
+h";
+ let c = br#"x"\"yb"#;
+ }
+ "##,
+ expect![[r##"
+ 18..478 '{ ... }': ()
+ 32..36 '5i32': i32
+ 50..54 '5f32': f32
+ 68..72 '5f64': f64
+ 86..93 '"hello"': &str
+ 107..115 'b"bytes"': &[u8; 5]
+ 129..132 ''c'': char
+ 146..150 'b'b'': u8
+ 164..168 '3.14': f64
+ 182..186 '5000': i32
+ 200..205 'false': bool
+ 219..223 'true': bool
+ 237..333 'r#" ... "#': &str
+ 347..357 'br#"yolo"#': &[u8; 4]
+ 375..376 'a': &[u8; 4]
+ 379..403 'b"a\x2... c"': &[u8; 4]
+ 421..422 'b': &[u8; 4]
+ 425..433 'br"g\ h"': &[u8; 4]
+ 451..452 'c': &[u8; 6]
+ 455..467 'br#"x"\"yb"#': &[u8; 6]
+ "##]],
+ );
+}
+
+#[test]
+fn infer_unary_op() {
+ check_infer(
+ r#"
+enum SomeType {}
+
+fn test(x: SomeType) {
+ let b = false;
+ let c = !b;
+ let a = 100;
+ let d: i128 = -a;
+ let e = -100;
+ let f = !!!true;
+ let g = !42;
+ let h = !10u32;
+ let j = !a;
+ -3.14;
+ !3;
+ -x;
+ !x;
+ -"hello";
+ !"hello";
+}
+"#,
+ expect![[r#"
+ 26..27 'x': SomeType
+ 39..271 '{ ...lo"; }': ()
+ 49..50 'b': bool
+ 53..58 'false': bool
+ 68..69 'c': bool
+ 72..74 '!b': bool
+ 73..74 'b': bool
+ 84..85 'a': i128
+ 88..91 '100': i128
+ 101..102 'd': i128
+ 111..113 '-a': i128
+ 112..113 'a': i128
+ 123..124 'e': i32
+ 127..131 '-100': i32
+ 128..131 '100': i32
+ 141..142 'f': bool
+ 145..152 '!!!true': bool
+ 146..152 '!!true': bool
+ 147..152 '!true': bool
+ 148..152 'true': bool
+ 162..163 'g': i32
+ 166..169 '!42': i32
+ 167..169 '42': i32
+ 179..180 'h': u32
+ 183..189 '!10u32': u32
+ 184..189 '10u32': u32
+ 199..200 'j': i128
+ 203..205 '!a': i128
+ 204..205 'a': i128
+ 211..216 '-3.14': f64
+ 212..216 '3.14': f64
+ 222..224 '!3': i32
+ 223..224 '3': i32
+ 230..232 '-x': {unknown}
+ 231..232 'x': SomeType
+ 238..240 '!x': {unknown}
+ 239..240 'x': SomeType
+ 246..254 '-"hello"': {unknown}
+ 247..254 '"hello"': &str
+ 260..268 '!"hello"': {unknown}
+ 261..268 '"hello"': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_backwards() {
+ check_infer(
+ r#"
+fn takes_u32(x: u32) {}
+
+struct S { i32_field: i32 }
+
+fn test() -> &mut &f64 {
+ let a = unknown_function();
+ takes_u32(a);
+ let b = unknown_function();
+ S { i32_field: b };
+ let c = unknown_function();
+ &mut &c
+}
+"#,
+ expect![[r#"
+ 13..14 'x': u32
+ 21..23 '{}': ()
+ 77..230 '{ ...t &c }': &mut &f64
+ 87..88 'a': u32
+ 91..107 'unknow...nction': {unknown}
+ 91..109 'unknow...tion()': u32
+ 115..124 'takes_u32': fn takes_u32(u32)
+ 115..127 'takes_u32(a)': ()
+ 125..126 'a': u32
+ 137..138 'b': i32
+ 141..157 'unknow...nction': {unknown}
+ 141..159 'unknow...tion()': i32
+ 165..183 'S { i3...d: b }': S
+ 180..181 'b': i32
+ 193..194 'c': f64
+ 197..213 'unknow...nction': {unknown}
+ 197..215 'unknow...tion()': f64
+ 221..228 '&mut &c': &mut &f64
+ 226..228 '&c': &f64
+ 227..228 'c': f64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_self() {
+ check_infer(
+ r#"
+struct S;
+
+impl S {
+ fn test(&self) {
+ self;
+ }
+ fn test2(self: &Self) {
+ self;
+ }
+ fn test3() -> Self {
+ S {}
+ }
+ fn test4() -> Self {
+ Self {}
+ }
+}
+"#,
+ expect![[r#"
+ 33..37 'self': &S
+ 39..60 '{ ... }': ()
+ 49..53 'self': &S
+ 74..78 'self': &S
+ 87..108 '{ ... }': ()
+ 97..101 'self': &S
+ 132..152 '{ ... }': S
+ 142..146 'S {}': S
+ 176..199 '{ ... }': S
+ 186..193 'Self {}': S
+ "#]],
+ );
+}
+
+#[test]
+fn infer_self_as_path() {
+ check_infer(
+ r#"
+struct S1;
+struct S2(isize);
+enum E {
+ V1,
+ V2(u32),
+}
+
+impl S1 {
+ fn test() {
+ Self;
+ }
+}
+impl S2 {
+ fn test() {
+ Self(1);
+ }
+}
+impl E {
+ fn test() {
+ Self::V1;
+ Self::V2(1);
+ }
+}
+"#,
+ expect![[r#"
+ 86..107 '{ ... }': ()
+ 96..100 'Self': S1
+ 134..158 '{ ... }': ()
+ 144..148 'Self': S2(isize) -> S2
+ 144..151 'Self(1)': S2
+ 149..150 '1': isize
+ 184..230 '{ ... }': ()
+ 194..202 'Self::V1': E
+ 212..220 'Self::V2': V2(u32) -> E
+ 212..223 'Self::V2(1)': E
+ 221..222 '1': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_binary_op() {
+ check_infer(
+ r#"
+fn f(x: bool) -> i32 {
+ 0i32
+}
+
+fn test() -> bool {
+ let x = a && b;
+ let y = true || false;
+ let z = x == y;
+ let t = x != y;
+ let minus_forty: isize = -40isize;
+ let h = minus_forty <= CONST_2;
+ let c = f(z || y) + 5;
+ let d = b;
+ let g = minus_forty ^= i;
+ let ten: usize = 10;
+ let ten_is_eleven = ten == some_num;
+
+ ten < 3
+}
+"#,
+ expect![[r#"
+ 5..6 'x': bool
+ 21..33 '{ 0i32 }': i32
+ 27..31 '0i32': i32
+ 53..369 '{ ... < 3 }': bool
+ 63..64 'x': bool
+ 67..68 'a': bool
+ 67..73 'a && b': bool
+ 72..73 'b': bool
+ 83..84 'y': bool
+ 87..91 'true': bool
+ 87..100 'true || false': bool
+ 95..100 'false': bool
+ 110..111 'z': bool
+ 114..115 'x': bool
+ 114..120 'x == y': bool
+ 119..120 'y': bool
+ 130..131 't': bool
+ 134..135 'x': bool
+ 134..140 'x != y': bool
+ 139..140 'y': bool
+ 150..161 'minus_forty': isize
+ 171..179 '-40isize': isize
+ 172..179 '40isize': isize
+ 189..190 'h': bool
+ 193..204 'minus_forty': isize
+ 193..215 'minus_...ONST_2': bool
+ 208..215 'CONST_2': isize
+ 225..226 'c': i32
+ 229..230 'f': fn f(bool) -> i32
+ 229..238 'f(z || y)': i32
+ 229..242 'f(z || y) + 5': i32
+ 231..232 'z': bool
+ 231..237 'z || y': bool
+ 236..237 'y': bool
+ 241..242 '5': i32
+ 252..253 'd': {unknown}
+ 256..257 'b': {unknown}
+ 267..268 'g': ()
+ 271..282 'minus_forty': isize
+ 271..287 'minus_...y ^= i': ()
+ 286..287 'i': isize
+ 297..300 'ten': usize
+ 310..312 '10': usize
+ 322..335 'ten_is_eleven': bool
+ 338..341 'ten': usize
+ 338..353 'ten == some_num': bool
+ 345..353 'some_num': usize
+ 360..363 'ten': usize
+ 360..367 'ten < 3': bool
+ 366..367 '3': usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_shift_op() {
+ check_infer(
+ r#"
+fn test() {
+ 1u32 << 5u8;
+ 1u32 >> 5u8;
+}
+"#,
+ expect![[r#"
+ 10..47 '{ ...5u8; }': ()
+ 16..20 '1u32': u32
+ 16..27 '1u32 << 5u8': u32
+ 24..27 '5u8': u8
+ 33..37 '1u32': u32
+ 33..44 '1u32 >> 5u8': u32
+ 41..44 '5u8': u8
+ "#]],
+ );
+}
+
+#[test]
+fn infer_field_autoderef() {
+ check_infer(
+ r#"
+struct A {
+ b: B,
+}
+struct B;
+
+fn test1(a: A) {
+ let a1 = a;
+ a1.b;
+ let a2 = &a;
+ a2.b;
+ let a3 = &mut a;
+ a3.b;
+ let a4 = &&&&&&&a;
+ a4.b;
+ let a5 = &mut &&mut &&mut a;
+ a5.b;
+}
+
+fn test2(a1: *const A, a2: *mut A) {
+ a1.b;
+ a2.b;
+}
+"#,
+ expect![[r#"
+ 43..44 'a': A
+ 49..212 '{ ...5.b; }': ()
+ 59..61 'a1': A
+ 64..65 'a': A
+ 71..73 'a1': A
+ 71..75 'a1.b': B
+ 85..87 'a2': &A
+ 90..92 '&a': &A
+ 91..92 'a': A
+ 98..100 'a2': &A
+ 98..102 'a2.b': B
+ 112..114 'a3': &mut A
+ 117..123 '&mut a': &mut A
+ 122..123 'a': A
+ 129..131 'a3': &mut A
+ 129..133 'a3.b': B
+ 143..145 'a4': &&&&&&&A
+ 148..156 '&&&&&&&a': &&&&&&&A
+ 149..156 '&&&&&&a': &&&&&&A
+ 150..156 '&&&&&a': &&&&&A
+ 151..156 '&&&&a': &&&&A
+ 152..156 '&&&a': &&&A
+ 153..156 '&&a': &&A
+ 154..156 '&a': &A
+ 155..156 'a': A
+ 162..164 'a4': &&&&&&&A
+ 162..166 'a4.b': B
+ 176..178 'a5': &mut &&mut &&mut A
+ 181..199 '&mut &...&mut a': &mut &&mut &&mut A
+ 186..199 '&&mut &&mut a': &&mut &&mut A
+ 187..199 '&mut &&mut a': &mut &&mut A
+ 192..199 '&&mut a': &&mut A
+ 193..199 '&mut a': &mut A
+ 198..199 'a': A
+ 205..207 'a5': &mut &&mut &&mut A
+ 205..209 'a5.b': B
+ 223..225 'a1': *const A
+ 237..239 'a2': *mut A
+ 249..272 '{ ...2.b; }': ()
+ 255..257 'a1': *const A
+ 255..259 'a1.b': B
+ 265..267 'a2': *mut A
+ 265..269 'a2.b': B
+ "#]],
+ );
+}
+
+#[test]
+fn infer_argument_autoderef() {
+ check_infer(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+struct A<T>(T);
+
+impl<T> A<T> {
+ fn foo(&self) -> &T {
+ &self.0
+ }
+}
+
+struct B<T>(T);
+
+impl<T> Deref for B<T> {
+ type Target = T;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+fn test() {
+ let t = A::foo(&&B(B(A(42))));
+}
+"#,
+ expect![[r#"
+ 66..70 'self': &A<T>
+ 78..101 '{ ... }': &T
+ 88..95 '&self.0': &T
+ 89..93 'self': &A<T>
+ 89..95 'self.0': T
+ 182..186 'self': &B<T>
+ 205..228 '{ ... }': &T
+ 215..222 '&self.0': &T
+ 216..220 'self': &B<T>
+ 216..222 'self.0': T
+ 242..280 '{ ...))); }': ()
+ 252..253 't': &i32
+ 256..262 'A::foo': fn foo<i32>(&A<i32>) -> &i32
+ 256..277 'A::foo...42))))': &i32
+ 263..276 '&&B(B(A(42)))': &&B<B<A<i32>>>
+ 264..276 '&B(B(A(42)))': &B<B<A<i32>>>
+ 265..266 'B': B<B<A<i32>>>(B<A<i32>>) -> B<B<A<i32>>>
+ 265..276 'B(B(A(42)))': B<B<A<i32>>>
+ 267..268 'B': B<A<i32>>(A<i32>) -> B<A<i32>>
+ 267..275 'B(A(42))': B<A<i32>>
+ 269..270 'A': A<i32>(i32) -> A<i32>
+ 269..274 'A(42)': A<i32>
+ 271..273 '42': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_method_argument_autoderef() {
+ check_infer(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+struct A<T>(*mut T);
+
+impl<T> A<T> {
+ fn foo(&self, x: &A<T>) -> &T {
+ &*x.0
+ }
+}
+
+struct B<T>(T);
+
+impl<T> Deref for B<T> {
+ type Target = T;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+fn test(a: A<i32>) {
+ let t = A(0 as *mut _).foo(&&B(B(a)));
+}
+"#,
+ expect![[r#"
+ 71..75 'self': &A<T>
+ 77..78 'x': &A<T>
+ 93..114 '{ ... }': &T
+ 103..108 '&*x.0': &T
+ 104..108 '*x.0': T
+ 105..106 'x': &A<T>
+ 105..108 'x.0': *mut T
+ 195..199 'self': &B<T>
+ 218..241 '{ ... }': &T
+ 228..235 '&self.0': &T
+ 229..233 'self': &B<T>
+ 229..235 'self.0': T
+ 253..254 'a': A<i32>
+ 264..310 '{ ...))); }': ()
+ 274..275 't': &i32
+ 278..279 'A': A<i32>(*mut i32) -> A<i32>
+ 278..292 'A(0 as *mut _)': A<i32>
+ 278..307 'A(0 as...B(a)))': &i32
+ 280..281 '0': i32
+ 280..291 '0 as *mut _': *mut i32
+ 297..306 '&&B(B(a))': &&B<B<A<i32>>>
+ 298..306 '&B(B(a))': &B<B<A<i32>>>
+ 299..300 'B': B<B<A<i32>>>(B<A<i32>>) -> B<B<A<i32>>>
+ 299..306 'B(B(a))': B<B<A<i32>>>
+ 301..302 'B': B<A<i32>>(A<i32>) -> B<A<i32>>
+ 301..305 'B(a)': B<A<i32>>
+ 303..304 'a': A<i32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_in_elseif() {
+ check_infer(
+ r#"
+struct Foo { field: i32 }
+fn main(foo: Foo) {
+ if true {
+
+ } else if false {
+ foo.field
+ }
+}
+"#,
+ expect![[r#"
+ 34..37 'foo': Foo
+ 44..108 '{ ... } }': ()
+ 50..106 'if tru... }': ()
+ 53..57 'true': bool
+ 58..66 '{ }': ()
+ 72..106 'if fal... }': ()
+ 75..80 'false': bool
+ 81..106 '{ ... }': ()
+ 91..94 'foo': Foo
+ 91..100 'foo.field': i32
+ "#]],
+ )
+}
+
+#[test]
+fn infer_if_match_with_return() {
+ check_infer(
+ r#"
+fn foo() {
+ let _x1 = if true {
+ 1
+ } else {
+ return;
+ };
+ let _x2 = if true {
+ 2
+ } else {
+ return
+ };
+ let _x3 = match true {
+ true => 3,
+ _ => {
+ return;
+ }
+ };
+ let _x4 = match true {
+ true => 4,
+ _ => return
+ };
+}
+"#,
+ expect![[r#"
+ 9..322 '{ ... }; }': ()
+ 19..22 '_x1': i32
+ 25..79 'if tru... }': i32
+ 28..32 'true': bool
+ 33..50 '{ ... }': i32
+ 43..44 '1': i32
+ 56..79 '{ ... }': i32
+ 66..72 'return': !
+ 89..92 '_x2': i32
+ 95..148 'if tru... }': i32
+ 98..102 'true': bool
+ 103..120 '{ ... }': i32
+ 113..114 '2': i32
+ 126..148 '{ ... }': !
+ 136..142 'return': !
+ 158..161 '_x3': i32
+ 164..246 'match ... }': i32
+ 170..174 'true': bool
+ 185..189 'true': bool
+ 185..189 'true': bool
+ 193..194 '3': i32
+ 204..205 '_': bool
+ 209..240 '{ ... }': i32
+ 223..229 'return': !
+ 256..259 '_x4': i32
+ 262..319 'match ... }': i32
+ 268..272 'true': bool
+ 283..287 'true': bool
+ 283..287 'true': bool
+ 291..292 '4': i32
+ 302..303 '_': bool
+ 307..313 'return': !
+ "#]],
+ )
+}
+
+#[test]
+fn infer_inherent_method() {
+ check_infer(
+ r#"
+ struct A;
+
+ impl A {
+ fn foo(self, x: u32) -> i32 {}
+ }
+
+ mod b {
+ impl super::A {
+ pub fn bar(&self, x: u64) -> i64 {}
+ }
+ }
+
+ fn test(a: A) {
+ a.foo(1);
+ (&a).bar(1);
+ a.bar(1);
+ }
+ "#,
+ expect![[r#"
+ 31..35 'self': A
+ 37..38 'x': u32
+ 52..54 '{}': i32
+ 106..110 'self': &A
+ 112..113 'x': u64
+ 127..129 '{}': i64
+ 147..148 'a': A
+ 153..201 '{ ...(1); }': ()
+ 159..160 'a': A
+ 159..167 'a.foo(1)': i32
+ 165..166 '1': u32
+ 173..184 '(&a).bar(1)': i64
+ 174..176 '&a': &A
+ 175..176 'a': A
+ 182..183 '1': u64
+ 190..191 'a': A
+ 190..198 'a.bar(1)': i64
+ 196..197 '1': u64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_inherent_method_str() {
+ check_infer(
+ r#"
+ #[lang = "str"]
+ impl str {
+ fn foo(&self) -> i32 {}
+ }
+
+ fn test() {
+ "foo".foo();
+ }
+ "#,
+ expect![[r#"
+ 39..43 'self': &str
+ 52..54 '{}': i32
+ 68..88 '{ ...o(); }': ()
+ 74..79 '"foo"': &str
+ 74..85 '"foo".foo()': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_tuple() {
+ check_infer(
+ r#"
+ fn test(x: &str, y: isize) {
+ let a: (u32, &str) = (1, "a");
+ let b = (a, x);
+ let c = (y, x);
+ let d = (c, x);
+ let e = (1, "e");
+ let f = (e, "d");
+ }
+ "#,
+ expect![[r#"
+ 8..9 'x': &str
+ 17..18 'y': isize
+ 27..169 '{ ...d"); }': ()
+ 37..38 'a': (u32, &str)
+ 54..62 '(1, "a")': (u32, &str)
+ 55..56 '1': u32
+ 58..61 '"a"': &str
+ 72..73 'b': ((u32, &str), &str)
+ 76..82 '(a, x)': ((u32, &str), &str)
+ 77..78 'a': (u32, &str)
+ 80..81 'x': &str
+ 92..93 'c': (isize, &str)
+ 96..102 '(y, x)': (isize, &str)
+ 97..98 'y': isize
+ 100..101 'x': &str
+ 112..113 'd': ((isize, &str), &str)
+ 116..122 '(c, x)': ((isize, &str), &str)
+ 117..118 'c': (isize, &str)
+ 120..121 'x': &str
+ 132..133 'e': (i32, &str)
+ 136..144 '(1, "e")': (i32, &str)
+ 137..138 '1': i32
+ 140..143 '"e"': &str
+ 154..155 'f': ((i32, &str), &str)
+ 158..166 '(e, "d")': ((i32, &str), &str)
+ 159..160 'e': (i32, &str)
+ 162..165 '"d"': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_array() {
+ check_infer(
+ r#"
+ fn test(x: &str, y: isize) {
+ let a = [x];
+ let b = [a, a];
+ let c = [b, b];
+
+ let d = [y, 1, 2, 3];
+ let d = [1, y, 2, 3];
+ let e = [y];
+ let f = [d, d];
+ let g = [e, e];
+
+ let h = [1, 2];
+ let i = ["a", "b"];
+
+ let b = [a, ["b"]];
+ let x: [u8; 0] = [];
+ let y: [u8; 2+2] = [1,2,3,4];
+ }
+ "#,
+ expect![[r#"
+ 8..9 'x': &str
+ 17..18 'y': isize
+ 27..326 '{ ...,4]; }': ()
+ 37..38 'a': [&str; 1]
+ 41..44 '[x]': [&str; 1]
+ 42..43 'x': &str
+ 54..55 'b': [[&str; 1]; 2]
+ 58..64 '[a, a]': [[&str; 1]; 2]
+ 59..60 'a': [&str; 1]
+ 62..63 'a': [&str; 1]
+ 74..75 'c': [[[&str; 1]; 2]; 2]
+ 78..84 '[b, b]': [[[&str; 1]; 2]; 2]
+ 79..80 'b': [[&str; 1]; 2]
+ 82..83 'b': [[&str; 1]; 2]
+ 95..96 'd': [isize; 4]
+ 99..111 '[y, 1, 2, 3]': [isize; 4]
+ 100..101 'y': isize
+ 103..104 '1': isize
+ 106..107 '2': isize
+ 109..110 '3': isize
+ 121..122 'd': [isize; 4]
+ 125..137 '[1, y, 2, 3]': [isize; 4]
+ 126..127 '1': isize
+ 129..130 'y': isize
+ 132..133 '2': isize
+ 135..136 '3': isize
+ 147..148 'e': [isize; 1]
+ 151..154 '[y]': [isize; 1]
+ 152..153 'y': isize
+ 164..165 'f': [[isize; 4]; 2]
+ 168..174 '[d, d]': [[isize; 4]; 2]
+ 169..170 'd': [isize; 4]
+ 172..173 'd': [isize; 4]
+ 184..185 'g': [[isize; 1]; 2]
+ 188..194 '[e, e]': [[isize; 1]; 2]
+ 189..190 'e': [isize; 1]
+ 192..193 'e': [isize; 1]
+ 205..206 'h': [i32; 2]
+ 209..215 '[1, 2]': [i32; 2]
+ 210..211 '1': i32
+ 213..214 '2': i32
+ 225..226 'i': [&str; 2]
+ 229..239 '["a", "b"]': [&str; 2]
+ 230..233 '"a"': &str
+ 235..238 '"b"': &str
+ 250..251 'b': [[&str; 1]; 2]
+ 254..264 '[a, ["b"]]': [[&str; 1]; 2]
+ 255..256 'a': [&str; 1]
+ 258..263 '["b"]': [&str; 1]
+ 259..262 '"b"': &str
+ 274..275 'x': [u8; 0]
+ 287..289 '[]': [u8; 0]
+ 299..300 'y': [u8; 4]
+ 314..323 '[1,2,3,4]': [u8; 4]
+ 315..316 '1': u8
+ 317..318 '2': u8
+ 319..320 '3': u8
+ 321..322 '4': u8
+ "#]],
+ );
+}
+
+#[test]
+fn infer_struct_generics() {
+ check_infer(
+ r#"
+ struct A<T> {
+ x: T,
+ }
+
+ fn test(a1: A<u32>, i: i32) {
+ a1.x;
+ let a2 = A { x: i };
+ a2.x;
+ let a3 = A::<i128> { x: 1 };
+ a3.x;
+ }
+ "#,
+ expect![[r#"
+ 35..37 'a1': A<u32>
+ 47..48 'i': i32
+ 55..146 '{ ...3.x; }': ()
+ 61..63 'a1': A<u32>
+ 61..65 'a1.x': u32
+ 75..77 'a2': A<i32>
+ 80..90 'A { x: i }': A<i32>
+ 87..88 'i': i32
+ 96..98 'a2': A<i32>
+ 96..100 'a2.x': i32
+ 110..112 'a3': A<i128>
+ 115..133 'A::<i1...x: 1 }': A<i128>
+ 130..131 '1': i128
+ 139..141 'a3': A<i128>
+ 139..143 'a3.x': i128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_tuple_struct_generics() {
+ check_infer(
+ r#"
+ struct A<T>(T);
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ fn test() {
+ A(42);
+ A(42u128);
+ Some("x");
+ Option::Some("x");
+ None;
+ let x: Option<i64> = None;
+ }
+ "#,
+ expect![[r#"
+ 75..183 '{ ...one; }': ()
+ 81..82 'A': A<i32>(i32) -> A<i32>
+ 81..86 'A(42)': A<i32>
+ 83..85 '42': i32
+ 92..93 'A': A<u128>(u128) -> A<u128>
+ 92..101 'A(42u128)': A<u128>
+ 94..100 '42u128': u128
+ 107..111 'Some': Some<&str>(&str) -> Option<&str>
+ 107..116 'Some("x")': Option<&str>
+ 112..115 '"x"': &str
+ 122..134 'Option::Some': Some<&str>(&str) -> Option<&str>
+ 122..139 'Option...e("x")': Option<&str>
+ 135..138 '"x"': &str
+ 145..149 'None': Option<{unknown}>
+ 159..160 'x': Option<i64>
+ 176..180 'None': Option<i64>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_function_generics() {
+ check_infer(
+ r#"
+ fn id<T>(t: T) -> T { t }
+
+ fn test() {
+ id(1u32);
+ id::<i128>(1);
+ let x: u64 = id(1);
+ }
+ "#,
+ expect![[r#"
+ 9..10 't': T
+ 20..25 '{ t }': T
+ 22..23 't': T
+ 37..97 '{ ...(1); }': ()
+ 43..45 'id': fn id<u32>(u32) -> u32
+ 43..51 'id(1u32)': u32
+ 46..50 '1u32': u32
+ 57..67 'id::<i128>': fn id<i128>(i128) -> i128
+ 57..70 'id::<i128>(1)': i128
+ 68..69 '1': i128
+ 80..81 'x': u64
+ 89..91 'id': fn id<u64>(u64) -> u64
+ 89..94 'id(1)': u64
+ 92..93 '1': u64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_impl_generics_basic() {
+ check_infer(
+ r#"
+ struct A<T1, T2> {
+ x: T1,
+ y: T2,
+ }
+ impl<Y, X> A<X, Y> {
+ fn x(self) -> X {
+ self.x
+ }
+ fn y(self) -> Y {
+ self.y
+ }
+ fn z<T>(self, t: T) -> (X, Y, T) {
+ (self.x, self.y, t)
+ }
+ }
+
+ fn test() -> i128 {
+ let a = A { x: 1u64, y: 1i64 };
+ a.x();
+ a.y();
+ a.z(1i128);
+ a.z::<u128>(1);
+ }
+ "#,
+ expect![[r#"
+ 73..77 'self': A<X, Y>
+ 84..106 '{ ... }': X
+ 94..98 'self': A<X, Y>
+ 94..100 'self.x': X
+ 116..120 'self': A<X, Y>
+ 127..149 '{ ... }': Y
+ 137..141 'self': A<X, Y>
+ 137..143 'self.y': Y
+ 162..166 'self': A<X, Y>
+ 168..169 't': T
+ 187..222 '{ ... }': (X, Y, T)
+ 197..216 '(self.....y, t)': (X, Y, T)
+ 198..202 'self': A<X, Y>
+ 198..204 'self.x': X
+ 206..210 'self': A<X, Y>
+ 206..212 'self.y': Y
+ 214..215 't': T
+ 244..341 '{ ...(1); }': i128
+ 254..255 'a': A<u64, i64>
+ 258..280 'A { x:...1i64 }': A<u64, i64>
+ 265..269 '1u64': u64
+ 274..278 '1i64': i64
+ 286..287 'a': A<u64, i64>
+ 286..291 'a.x()': u64
+ 297..298 'a': A<u64, i64>
+ 297..302 'a.y()': i64
+ 308..309 'a': A<u64, i64>
+ 308..318 'a.z(1i128)': (u64, i64, i128)
+ 312..317 '1i128': i128
+ 324..325 'a': A<u64, i64>
+ 324..338 'a.z::<u128>(1)': (u64, i64, u128)
+ 336..337 '1': u128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_impl_generics_with_autoderef() {
+ check_infer(
+ r#"
+ enum Option<T> {
+ Some(T),
+ None,
+ }
+ impl<T> Option<T> {
+ fn as_ref(&self) -> Option<&T> {}
+ }
+ fn test(o: Option<u32>) {
+ (&o).as_ref();
+ o.as_ref();
+ }
+ "#,
+ expect![[r#"
+ 77..81 'self': &Option<T>
+ 97..99 '{}': Option<&T>
+ 110..111 'o': Option<u32>
+ 126..164 '{ ...f(); }': ()
+ 132..145 '(&o).as_ref()': Option<&u32>
+ 133..135 '&o': &Option<u32>
+ 134..135 'o': Option<u32>
+ 151..152 'o': Option<u32>
+ 151..161 'o.as_ref()': Option<&u32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_generic_chain() {
+ check_infer(
+ r#"
+ struct A<T> {
+ x: T,
+ }
+ impl<T2> A<T2> {
+ fn x(self) -> T2 {
+ self.x
+ }
+ }
+ fn id<T>(t: T) -> T { t }
+
+ fn test() -> i128 {
+ let x = 1;
+ let y = id(x);
+ let a = A { x: id(y) };
+ let z = id(a.x);
+ let b = A { x: z };
+ b.x()
+ }
+ "#,
+ expect![[r#"
+ 52..56 'self': A<T2>
+ 64..86 '{ ... }': T2
+ 74..78 'self': A<T2>
+ 74..80 'self.x': T2
+ 98..99 't': T
+ 109..114 '{ t }': T
+ 111..112 't': T
+ 134..254 '{ ....x() }': i128
+ 144..145 'x': i128
+ 148..149 '1': i128
+ 159..160 'y': i128
+ 163..165 'id': fn id<i128>(i128) -> i128
+ 163..168 'id(x)': i128
+ 166..167 'x': i128
+ 178..179 'a': A<i128>
+ 182..196 'A { x: id(y) }': A<i128>
+ 189..191 'id': fn id<i128>(i128) -> i128
+ 189..194 'id(y)': i128
+ 192..193 'y': i128
+ 206..207 'z': i128
+ 210..212 'id': fn id<i128>(i128) -> i128
+ 210..217 'id(a.x)': i128
+ 213..214 'a': A<i128>
+ 213..216 'a.x': i128
+ 227..228 'b': A<i128>
+ 231..241 'A { x: z }': A<i128>
+ 238..239 'z': i128
+ 247..248 'b': A<i128>
+ 247..252 'b.x()': i128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_const() {
+ check_infer(
+ r#"
+ struct Struct;
+
+ impl Struct {
+ const FOO: u32 = 1;
+ }
+
+ enum Enum {}
+
+ impl Enum {
+ const BAR: u32 = 2;
+ }
+
+ trait Trait {
+ const ID: u32;
+ }
+
+ struct TraitTest;
+
+ impl Trait for TraitTest {
+ const ID: u32 = 5;
+ }
+
+ fn test() {
+ let x = Struct::FOO;
+ let y = Enum::BAR;
+ let z = TraitTest::ID;
+ }
+ "#,
+ expect![[r#"
+ 51..52 '1': u32
+ 104..105 '2': u32
+ 212..213 '5': u32
+ 228..306 '{ ...:ID; }': ()
+ 238..239 'x': u32
+ 242..253 'Struct::FOO': u32
+ 263..264 'y': u32
+ 267..276 'Enum::BAR': u32
+ 286..287 'z': u32
+ 290..303 'TraitTest::ID': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_type_alias() {
+ check_infer(
+ r#"
+ struct A<X, Y> { x: X, y: Y }
+ type Foo = A<u32, i128>;
+ type Bar<T> = A<T, u128>;
+ type Baz<U, V> = A<V, U>;
+ fn test(x: Foo, y: Bar<&str>, z: Baz<i8, u8>) {
+ x.x;
+ x.y;
+ y.x;
+ y.y;
+ z.x;
+ z.y;
+ }
+ mod m {
+ pub enum Enum {
+ Foo(u8),
+ }
+ pub type Alias = Enum;
+ }
+ fn f() {
+ let e = m::Alias::Foo(0);
+ let m::Alias::Foo(x) = &e;
+ }
+ "#,
+ expect![[r#"
+ 115..116 'x': A<u32, i128>
+ 123..124 'y': A<&str, u128>
+ 137..138 'z': A<u8, i8>
+ 153..210 '{ ...z.y; }': ()
+ 159..160 'x': A<u32, i128>
+ 159..162 'x.x': u32
+ 168..169 'x': A<u32, i128>
+ 168..171 'x.y': i128
+ 177..178 'y': A<&str, u128>
+ 177..180 'y.x': &str
+ 186..187 'y': A<&str, u128>
+ 186..189 'y.y': u128
+ 195..196 'z': A<u8, i8>
+ 195..198 'z.x': u8
+ 204..205 'z': A<u8, i8>
+ 204..207 'z.y': i8
+ 298..362 '{ ... &e; }': ()
+ 308..309 'e': Enum
+ 312..325 'm::Alias::Foo': Foo(u8) -> Enum
+ 312..328 'm::Ali...Foo(0)': Enum
+ 326..327 '0': u8
+ 338..354 'm::Ali...Foo(x)': Enum
+ 352..353 'x': &u8
+ 357..359 '&e': &Enum
+ 358..359 'e': Enum
+ "#]],
+ )
+}
+
+#[test]
+fn recursive_type_alias() {
+ check_infer(
+ r#"
+ struct A<X> {}
+ type Foo = Foo;
+ type Bar = A<Bar>;
+ fn test(x: Foo) {}
+ "#,
+ expect![[r#"
+ 58..59 'x': {unknown}
+ 66..68 '{}': ()
+ "#]],
+ )
+}
+
+#[test]
+fn infer_type_param() {
+ check_infer(
+ r#"
+ fn id<T>(x: T) -> T {
+ x
+ }
+
+ fn clone<T>(x: &T) -> T {
+ *x
+ }
+
+ fn test() {
+ let y = 10u32;
+ id(y);
+ let x: bool = clone(z);
+ id::<i128>(1);
+ }
+ "#,
+ expect![[r#"
+ 9..10 'x': T
+ 20..29 '{ x }': T
+ 26..27 'x': T
+ 43..44 'x': &T
+ 55..65 '{ *x }': T
+ 61..63 '*x': T
+ 62..63 'x': &T
+ 77..157 '{ ...(1); }': ()
+ 87..88 'y': u32
+ 91..96 '10u32': u32
+ 102..104 'id': fn id<u32>(u32) -> u32
+ 102..107 'id(y)': u32
+ 105..106 'y': u32
+ 117..118 'x': bool
+ 127..132 'clone': fn clone<bool>(&bool) -> bool
+ 127..135 'clone(z)': bool
+ 133..134 'z': &bool
+ 141..151 'id::<i128>': fn id<i128>(i128) -> i128
+ 141..154 'id::<i128>(1)': i128
+ 152..153 '1': i128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_const() {
+ check_infer(
+ r#"
+ struct Foo;
+ impl Foo { const ASSOC_CONST: u32 = 0; }
+ const GLOBAL_CONST: u32 = 101;
+ fn test() {
+ const LOCAL_CONST: u32 = 99;
+ let x = LOCAL_CONST;
+ let z = GLOBAL_CONST;
+ let id = Foo::ASSOC_CONST;
+ }
+ "#,
+ expect![[r#"
+ 48..49 '0': u32
+ 79..82 '101': u32
+ 94..212 '{ ...NST; }': ()
+ 137..138 'x': u32
+ 141..152 'LOCAL_CONST': u32
+ 162..163 'z': u32
+ 166..178 'GLOBAL_CONST': u32
+ 188..190 'id': u32
+ 193..209 'Foo::A..._CONST': u32
+ 125..127 '99': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_static() {
+ check_infer(
+ r#"
+ static GLOBAL_STATIC: u32 = 101;
+ static mut GLOBAL_STATIC_MUT: u32 = 101;
+ fn test() {
+ static LOCAL_STATIC: u32 = 99;
+ static mut LOCAL_STATIC_MUT: u32 = 99;
+ let x = LOCAL_STATIC;
+ let y = LOCAL_STATIC_MUT;
+ let z = GLOBAL_STATIC;
+ let w = GLOBAL_STATIC_MUT;
+ }
+ "#,
+ expect![[r#"
+ 28..31 '101': u32
+ 69..72 '101': u32
+ 84..279 '{ ...MUT; }': ()
+ 172..173 'x': u32
+ 176..188 'LOCAL_STATIC': u32
+ 198..199 'y': u32
+ 202..218 'LOCAL_...IC_MUT': u32
+ 228..229 'z': u32
+ 232..245 'GLOBAL_STATIC': u32
+ 255..256 'w': u32
+ 259..276 'GLOBAL...IC_MUT': u32
+ 117..119 '99': u32
+ 160..162 '99': u32
+ "#]],
+ );
+}
+
+#[test]
+fn shadowing_primitive() {
+ check_types(
+ r#"
+struct i32;
+struct Foo;
+
+impl i32 { fn foo(&self) -> Foo { Foo } }
+
+fn main() {
+ let x: i32 = i32;
+ x.foo();
+ //^^^^^^^ Foo
+}"#,
+ );
+}
+
+#[test]
+fn const_eval_array_repeat_expr() {
+ check_types(
+ r#"
+fn main() {
+ const X: usize = 6 - 1;
+ let t = [(); X + 2];
+ //^ [(); 7]
+}"#,
+ );
+}
+
+#[test]
+fn shadowing_primitive_with_inner_items() {
+ check_types(
+ r#"
+struct i32;
+struct Foo;
+
+impl i32 { fn foo(&self) -> Foo { Foo } }
+
+fn main() {
+ fn inner() {}
+ let x: i32 = i32;
+ x.foo();
+ //^^^^^^^ Foo
+}"#,
+ );
+}
+
+#[test]
+fn not_shadowing_primitive_by_module() {
+ check_types(
+ r#"
+//- /str.rs
+fn foo() {}
+
+//- /main.rs
+mod str;
+fn foo() -> &'static str { "" }
+
+fn main() {
+ foo();
+ //^^^^^ &str
+}"#,
+ );
+}
+
+#[test]
+fn not_shadowing_module_by_primitive() {
+ check_types(
+ r#"
+//- /str.rs
+fn foo() -> u32 {0}
+
+//- /main.rs
+mod str;
+fn foo() -> &'static str { "" }
+
+fn main() {
+ str::foo();
+ //^^^^^^^^^^ u32
+}"#,
+ );
+}
+
+// This test is actually testing the shadowing behavior within hir_def. It
+// lives here because the testing infrastructure in hir_def isn't currently
+// capable of asserting the necessary conditions.
+#[test]
+fn should_be_shadowing_imports() {
+ check_types(
+ r#"
+mod a {
+ pub fn foo() -> i8 {0}
+ pub struct foo { a: i8 }
+}
+mod b { pub fn foo () -> u8 {0} }
+mod c { pub struct foo { a: u8 } }
+mod d {
+ pub use super::a::*;
+ pub use super::c::foo;
+ pub use super::b::foo;
+}
+
+fn main() {
+ d::foo();
+ //^^^^^^^^ u8
+ d::foo{a:0};
+ //^^^^^^^^^^^ foo
+}"#,
+ );
+}
+
+#[test]
+fn closure_return() {
+ check_infer(
+ r#"
+ fn foo() -> u32 {
+ let x = || -> usize { return 1; };
+ }
+ "#,
+ expect![[r#"
+ 16..58 '{ ...; }; }': u32
+ 26..27 'x': || -> usize
+ 30..55 '|| -> ...n 1; }': || -> usize
+ 42..55 '{ return 1; }': usize
+ 44..52 'return 1': !
+ 51..52 '1': usize
+ "#]],
+ );
+}
+
+#[test]
+fn closure_return_unit() {
+ check_infer(
+ r#"
+ fn foo() -> u32 {
+ let x = || { return; };
+ }
+ "#,
+ expect![[r#"
+ 16..47 '{ ...; }; }': u32
+ 26..27 'x': || -> ()
+ 30..44 '|| { return; }': || -> ()
+ 33..44 '{ return; }': ()
+ 35..41 'return': !
+ "#]],
+ );
+}
+
+#[test]
+fn closure_return_inferred() {
+ check_infer(
+ r#"
+ fn foo() -> u32 {
+ let x = || { "test" };
+ }
+ "#,
+ expect![[r#"
+ 16..46 '{ ..." }; }': u32
+ 26..27 'x': || -> &str
+ 30..43 '|| { "test" }': || -> &str
+ 33..43 '{ "test" }': &str
+ 35..41 '"test"': &str
+ "#]],
+ );
+}
+
+#[test]
+fn fn_pointer_return() {
+ check_infer(
+ r#"
+ struct Vtable {
+ method: fn(),
+ }
+
+ fn main() {
+ let vtable = Vtable { method: || {} };
+ let m = vtable.method;
+ }
+ "#,
+ expect![[r#"
+ 47..120 '{ ...hod; }': ()
+ 57..63 'vtable': Vtable
+ 66..90 'Vtable...| {} }': Vtable
+ 83..88 '|| {}': || -> ()
+ 86..88 '{}': ()
+ 100..101 'm': fn()
+ 104..110 'vtable': Vtable
+ 104..117 'vtable.method': fn()
+ "#]],
+ );
+}
+
+#[test]
+fn block_modifiers_smoke_test() {
+ check_infer(
+ r#"
+//- minicore: future
+async fn main() {
+ let x = unsafe { 92 };
+ let y = async { async { () }.await };
+ let z = try { () };
+ let w = const { 92 };
+ let t = 'a: { 92 };
+}
+ "#,
+ expect![[r#"
+ 16..162 '{ ...2 }; }': ()
+ 26..27 'x': i32
+ 30..43 'unsafe { 92 }': i32
+ 30..43 'unsafe { 92 }': i32
+ 39..41 '92': i32
+ 53..54 'y': impl Future<Output = ()>
+ 57..85 'async ...wait }': ()
+ 57..85 'async ...wait }': impl Future<Output = ()>
+ 65..77 'async { () }': ()
+ 65..77 'async { () }': impl Future<Output = ()>
+ 65..83 'async ....await': ()
+ 73..75 '()': ()
+ 95..96 'z': {unknown}
+ 99..109 'try { () }': ()
+ 99..109 'try { () }': {unknown}
+ 105..107 '()': ()
+ 119..120 'w': i32
+ 123..135 'const { 92 }': i32
+ 123..135 'const { 92 }': i32
+ 131..133 '92': i32
+ 145..146 't': i32
+ 149..159 ''a: { 92 }': i32
+ 155..157 '92': i32
+ "#]],
+ )
+}
+#[test]
+fn async_block_early_return() {
+ check_infer(
+ r#"
+//- minicore: future, result, fn
+fn test<I, E, F: FnMut() -> Fut, Fut: core::future::Future<Output = Result<I, E>>>(f: F) {}
+
+fn main() {
+ async {
+ return Err(());
+ Ok(())
+ };
+ test(|| async {
+ return Err(());
+ Ok(())
+ });
+}
+ "#,
+ expect![[r#"
+ 83..84 'f': F
+ 89..91 '{}': ()
+ 103..231 '{ ... }); }': ()
+ 109..161 'async ... }': Result<(), ()>
+ 109..161 'async ... }': impl Future<Output = Result<(), ()>>
+ 125..139 'return Err(())': !
+ 132..135 'Err': Err<(), ()>(()) -> Result<(), ()>
+ 132..139 'Err(())': Result<(), ()>
+ 136..138 '()': ()
+ 149..151 'Ok': Ok<(), ()>(()) -> Result<(), ()>
+ 149..155 'Ok(())': Result<(), ()>
+ 152..154 '()': ()
+ 167..171 'test': fn test<(), (), || -> impl Future<Output = Result<(), ()>>, impl Future<Output = Result<(), ()>>>(|| -> impl Future<Output = Result<(), ()>>)
+ 167..228 'test(|... })': ()
+ 172..227 '|| asy... }': || -> impl Future<Output = Result<(), ()>>
+ 175..227 'async ... }': Result<(), ()>
+ 175..227 'async ... }': impl Future<Output = Result<(), ()>>
+ 191..205 'return Err(())': !
+ 198..201 'Err': Err<(), ()>(()) -> Result<(), ()>
+ 198..205 'Err(())': Result<(), ()>
+ 202..204 '()': ()
+ 215..217 'Ok': Ok<(), ()>(()) -> Result<(), ()>
+ 215..221 'Ok(())': Result<(), ()>
+ 218..220 '()': ()
+ "#]],
+ )
+}
+
+#[test]
+fn infer_generic_from_later_assignment() {
+ check_infer(
+ r#"
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ fn test() {
+ let mut end = None;
+ loop {
+ end = Some(true);
+ }
+ }
+ "#,
+ expect![[r#"
+ 59..129 '{ ... } }': ()
+ 69..76 'mut end': Option<bool>
+ 79..83 'None': Option<bool>
+ 89..127 'loop {... }': !
+ 94..127 '{ ... }': ()
+ 104..107 'end': Option<bool>
+ 104..120 'end = ...(true)': ()
+ 110..114 'Some': Some<bool>(bool) -> Option<bool>
+ 110..120 'Some(true)': Option<bool>
+ 115..119 'true': bool
+ "#]],
+ );
+}
+
+#[test]
+fn infer_loop_break_with_val() {
+ check_infer(
+ r#"
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ fn test() {
+ let x = loop {
+ if false {
+ break None;
+ }
+
+ break Some(true);
+ };
+ }
+ "#,
+ expect![[r#"
+ 59..168 '{ ... }; }': ()
+ 69..70 'x': Option<bool>
+ 73..165 'loop {... }': Option<bool>
+ 78..165 '{ ... }': ()
+ 88..132 'if fal... }': ()
+ 91..96 'false': bool
+ 97..132 '{ ... }': ()
+ 111..121 'break None': !
+ 117..121 'None': Option<bool>
+ 142..158 'break ...(true)': !
+ 148..152 'Some': Some<bool>(bool) -> Option<bool>
+ 148..158 'Some(true)': Option<bool>
+ 153..157 'true': bool
+ "#]],
+ );
+}
+
+#[test]
+fn infer_loop_break_without_val() {
+ check_infer(
+ r#"
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ fn test() {
+ let x = loop {
+ if false {
+ break;
+ }
+ };
+ }
+ "#,
+ expect![[r#"
+ 59..136 '{ ... }; }': ()
+ 69..70 'x': ()
+ 73..133 'loop {... }': ()
+ 78..133 '{ ... }': ()
+ 88..127 'if fal... }': ()
+ 91..96 'false': bool
+ 97..127 '{ ... }': ()
+ 111..116 'break': !
+ "#]],
+ );
+}
+
+#[test]
+fn infer_labelled_break_with_val() {
+ check_infer(
+ r#"
+ fn foo() {
+ let _x = || 'outer: loop {
+ let inner = 'inner: loop {
+ let i = Default::default();
+ if (break 'outer i) {
+ loop { break 'inner 5i8; };
+ } else if true {
+ break 'inner 6;
+ }
+ break 7;
+ };
+ break inner < 8;
+ };
+ }
+ "#,
+ expect![[r#"
+ 9..335 '{ ... }; }': ()
+ 19..21 '_x': || -> bool
+ 24..332 '|| 'ou... }': || -> bool
+ 27..332 ''outer... }': bool
+ 40..332 '{ ... }': ()
+ 54..59 'inner': i8
+ 62..300 ''inner... }': i8
+ 75..300 '{ ... }': ()
+ 93..94 'i': bool
+ 97..113 'Defaul...efault': {unknown}
+ 97..115 'Defaul...ault()': bool
+ 129..269 'if (br... }': ()
+ 133..147 'break 'outer i': !
+ 146..147 'i': bool
+ 149..208 '{ ... }': ()
+ 167..193 'loop {...5i8; }': !
+ 172..193 '{ brea...5i8; }': ()
+ 174..190 'break ...er 5i8': !
+ 187..190 '5i8': i8
+ 214..269 'if tru... }': ()
+ 217..221 'true': bool
+ 222..269 '{ ... }': ()
+ 240..254 'break 'inner 6': !
+ 253..254 '6': i8
+ 282..289 'break 7': !
+ 288..289 '7': i8
+ 310..325 'break inner < 8': !
+ 316..321 'inner': i8
+ 316..325 'inner < 8': bool
+ 324..325 '8': i8
+ "#]],
+ );
+}
+
+#[test]
+fn infer_labelled_block_break_with_val() {
+ check_infer(
+ r#"
+fn default<T>() -> T { loop {} }
+fn foo() {
+ let _x = 'outer: {
+ let inner = 'inner: {
+ let i = default();
+ if (break 'outer i) {
+ break 'inner 5i8;
+ } else if true {
+ break 'inner 6;
+ }
+ break 'inner 'innermost: { 0 };
+ 42
+ };
+ break 'outer inner < 8;
+ };
+}
+"#,
+ expect![[r#"
+ 21..32 '{ loop {} }': T
+ 23..30 'loop {}': !
+ 28..30 '{}': ()
+ 42..381 '{ ... }; }': ()
+ 52..54 '_x': bool
+ 57..378 ''outer... }': bool
+ 79..84 'inner': i8
+ 87..339 ''inner... }': i8
+ 113..114 'i': bool
+ 117..124 'default': fn default<bool>() -> bool
+ 117..126 'default()': bool
+ 140..270 'if (br... }': ()
+ 144..158 'break 'outer i': !
+ 157..158 'i': bool
+ 160..209 '{ ... }': ()
+ 178..194 'break ...er 5i8': !
+ 191..194 '5i8': i8
+ 215..270 'if tru... }': ()
+ 218..222 'true': bool
+ 223..270 '{ ... }': ()
+ 241..255 'break 'inner 6': !
+ 254..255 '6': i8
+ 283..313 'break ... { 0 }': !
+ 296..313 ''inner... { 0 }': i8
+ 310..311 '0': i8
+ 327..329 '42': i8
+ 349..371 'break ...er < 8': !
+ 362..367 'inner': i8
+ 362..371 'inner < 8': bool
+ 370..371 '8': i8
+ "#]],
+ );
+}
+
+#[test]
+fn generic_default() {
+ check_infer(
+ r#"
+ struct Thing<T = ()> { t: T }
+ enum OtherThing<T = ()> {
+ One { t: T },
+ Two(T),
+ }
+
+ fn test(t1: Thing, t2: OtherThing, t3: Thing<i32>, t4: OtherThing<i32>) {
+ t1.t;
+ t3.t;
+ match t2 {
+ OtherThing::One { t } => { t; },
+ OtherThing::Two(t) => { t; },
+ }
+ match t4 {
+ OtherThing::One { t } => { t; },
+ OtherThing::Two(t) => { t; },
+ }
+ }
+ "#,
+ expect![[r#"
+ 97..99 't1': Thing<()>
+ 108..110 't2': OtherThing<()>
+ 124..126 't3': Thing<i32>
+ 140..142 't4': OtherThing<i32>
+ 161..384 '{ ... } }': ()
+ 167..169 't1': Thing<()>
+ 167..171 't1.t': ()
+ 177..179 't3': Thing<i32>
+ 177..181 't3.t': i32
+ 187..282 'match ... }': ()
+ 193..195 't2': OtherThing<()>
+ 206..227 'OtherT... { t }': OtherThing<()>
+ 224..225 't': ()
+ 231..237 '{ t; }': ()
+ 233..234 't': ()
+ 247..265 'OtherT...Two(t)': OtherThing<()>
+ 263..264 't': ()
+ 269..275 '{ t; }': ()
+ 271..272 't': ()
+ 287..382 'match ... }': ()
+ 293..295 't4': OtherThing<i32>
+ 306..327 'OtherT... { t }': OtherThing<i32>
+ 324..325 't': i32
+ 331..337 '{ t; }': ()
+ 333..334 't': i32
+ 347..365 'OtherT...Two(t)': OtherThing<i32>
+ 363..364 't': i32
+ 369..375 '{ t; }': ()
+ 371..372 't': i32
+ "#]],
+ );
+}
+
+#[test]
+fn generic_default_in_struct_literal() {
+ check_infer(
+ r#"
+ struct Thing<T = ()> { t: T }
+ enum OtherThing<T = ()> {
+ One { t: T },
+ Two(T),
+ }
+
+ fn test() {
+ let x = Thing { t: loop {} };
+ let y = Thing { t: () };
+ let z = Thing { t: 1i32 };
+ if let Thing { t } = z {
+ t;
+ }
+
+ let a = OtherThing::One { t: 1i32 };
+ let b = OtherThing::Two(1i32);
+ }
+ "#,
+ expect![[r#"
+ 99..319 '{ ...32); }': ()
+ 109..110 'x': Thing<!>
+ 113..133 'Thing ...p {} }': Thing<!>
+ 124..131 'loop {}': !
+ 129..131 '{}': ()
+ 143..144 'y': Thing<()>
+ 147..162 'Thing { t: () }': Thing<()>
+ 158..160 '()': ()
+ 172..173 'z': Thing<i32>
+ 176..193 'Thing ...1i32 }': Thing<i32>
+ 187..191 '1i32': i32
+ 199..240 'if let... }': ()
+ 202..221 'let Th... } = z': bool
+ 206..217 'Thing { t }': Thing<i32>
+ 214..215 't': i32
+ 220..221 'z': Thing<i32>
+ 222..240 '{ ... }': ()
+ 232..233 't': i32
+ 250..251 'a': OtherThing<i32>
+ 254..281 'OtherT...1i32 }': OtherThing<i32>
+ 275..279 '1i32': i32
+ 291..292 'b': OtherThing<i32>
+ 295..310 'OtherThing::Two': Two<i32>(i32) -> OtherThing<i32>
+ 295..316 'OtherT...(1i32)': OtherThing<i32>
+ 311..315 '1i32': i32
+ "#]],
+ );
+}
+
+#[test]
+fn generic_default_depending_on_other_type_arg() {
+ // FIXME: the {unknown} is a bug
+ check_infer(
+ r#"
+ struct Thing<T = u128, F = fn() -> T> { t: T }
+
+ fn test(t1: Thing<u32>, t2: Thing) {
+ t1;
+ t2;
+ Thing::<_> { t: 1u32 };
+ }
+ "#,
+ expect![[r#"
+ 56..58 't1': Thing<u32, fn() -> u32>
+ 72..74 't2': Thing<u128, fn() -> u128>
+ 83..130 '{ ...2 }; }': ()
+ 89..91 't1': Thing<u32, fn() -> u32>
+ 97..99 't2': Thing<u128, fn() -> u128>
+ 105..127 'Thing:...1u32 }': Thing<u32, fn() -> {unknown}>
+ 121..125 '1u32': u32
+ "#]],
+ );
+}
+
+#[test]
+fn generic_default_depending_on_other_type_arg_forward() {
+ // the {unknown} here is intentional, as defaults are not allowed to
+ // refer to type parameters coming later
+ check_infer(
+ r#"
+ struct Thing<F = fn() -> T, T = u128> { t: T }
+
+ fn test(t1: Thing) {
+ t1;
+ }
+ "#,
+ expect![[r#"
+ 56..58 't1': Thing<fn() -> {unknown}, u128>
+ 67..78 '{ t1; }': ()
+ 73..75 't1': Thing<fn() -> {unknown}, u128>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_operator_overload() {
+ check_types(
+ r#"
+//- minicore: add
+struct V2([f32; 2]);
+
+impl core::ops::Add<V2> for V2 {
+ type Output = V2;
+}
+
+fn test() {
+ let va = V2([0.0, 1.0]);
+ let vb = V2([0.0, 1.0]);
+
+ let r = va + vb;
+ // ^^^^^^^ V2
+}
+
+ "#,
+ );
+}
+
+#[test]
+fn infer_const_params() {
+ check_infer(
+ r#"
+ fn foo<const FOO: usize>() {
+ let bar = FOO;
+ }
+ "#,
+ expect![[r#"
+ 27..49 '{ ...FOO; }': ()
+ 37..40 'bar': usize
+ 43..46 'FOO': usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_inner_type() {
+ check_infer(
+ r#"
+ fn foo() {
+ struct S { field: u32 }
+ let s = S { field: 0 };
+ let f = s.field;
+ }
+ "#,
+ expect![[r#"
+ 9..89 '{ ...eld; }': ()
+ 47..48 's': S
+ 51..65 'S { field: 0 }': S
+ 62..63 '0': u32
+ 75..76 'f': u32
+ 79..80 's': S
+ 79..86 's.field': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_nested_inner_type() {
+ check_infer(
+ r#"
+ fn foo() {
+ {
+ let s = S { field: 0 };
+ let f = s.field;
+ }
+ struct S { field: u32 }
+ }
+ "#,
+ expect![[r#"
+ 9..109 '{ ...32 } }': ()
+ 15..79 '{ ... }': ()
+ 29..30 's': S
+ 33..47 'S { field: 0 }': S
+ 44..45 '0': u32
+ 61..62 'f': u32
+ 65..66 's': S
+ 65..72 's.field': u32
+ "#]],
+ );
+}
+
+#[test]
+fn inner_use_enum_rename() {
+ check_infer(
+ r#"
+ enum Request {
+ Info
+ }
+
+ fn f() {
+ use Request as R;
+
+ let r = R::Info;
+ match r {
+ R::Info => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 34..123 '{ ... } }': ()
+ 67..68 'r': Request
+ 71..78 'R::Info': Request
+ 84..121 'match ... }': ()
+ 90..91 'r': Request
+ 102..109 'R::Info': Request
+ 113..115 '{}': ()
+ "#]],
+ )
+}
+
+#[test]
+fn box_into_vec() {
+ check_infer(
+ r#"
+#[lang = "sized"]
+pub trait Sized {}
+
+#[lang = "unsize"]
+pub trait Unsize<T: ?Sized> {}
+
+#[lang = "coerce_unsized"]
+pub trait CoerceUnsized<T> {}
+
+pub unsafe trait Allocator {}
+
+pub struct Global;
+unsafe impl Allocator for Global {}
+
+#[lang = "owned_box"]
+#[fundamental]
+pub struct Box<T: ?Sized, A: Allocator = Global>;
+
+impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Box<U, A>> for Box<T, A> {}
+
+pub struct Vec<T, A: Allocator = Global> {}
+
+#[lang = "slice"]
+impl<T> [T] {}
+
+#[lang = "slice_alloc"]
+impl<T> [T] {
+ pub fn into_vec<A: Allocator>(self: Box<Self, A>) -> Vec<T, A> {
+ unimplemented!()
+ }
+}
+
+fn test() {
+ let vec = <[_]>::into_vec(box [1i32]);
+ let v: Vec<Box<dyn B>> = <[_]> :: into_vec(box [box Astruct]);
+}
+
+trait B{}
+struct Astruct;
+impl B for Astruct {}
+"#,
+ expect![[r#"
+ 569..573 'self': Box<[T], A>
+ 602..634 '{ ... }': Vec<T, A>
+ 612..628 'unimpl...ted!()': Vec<T, A>
+ 648..761 '{ ...t]); }': ()
+ 658..661 'vec': Vec<i32, Global>
+ 664..679 '<[_]>::into_vec': fn into_vec<i32, Global>(Box<[i32], Global>) -> Vec<i32, Global>
+ 664..691 '<[_]>:...1i32])': Vec<i32, Global>
+ 680..690 'box [1i32]': Box<[i32; 1], Global>
+ 684..690 '[1i32]': [i32; 1]
+ 685..689 '1i32': i32
+ 701..702 'v': Vec<Box<dyn B, Global>, Global>
+ 722..739 '<[_]> ...to_vec': fn into_vec<Box<dyn B, Global>, Global>(Box<[Box<dyn B, Global>], Global>) -> Vec<Box<dyn B, Global>, Global>
+ 722..758 '<[_]> ...ruct])': Vec<Box<dyn B, Global>, Global>
+ 740..757 'box [b...truct]': Box<[Box<dyn B, Global>; 1], Global>
+ 744..757 '[box Astruct]': [Box<dyn B, Global>; 1]
+ 745..756 'box Astruct': Box<Astruct, Global>
+ 749..756 'Astruct': Astruct
+ "#]],
+ )
+}
+
+#[test]
+fn cfgd_out_assoc_items() {
+ check_types(
+ r#"
+struct S;
+
+impl S {
+ #[cfg(FALSE)]
+ const C: S = S;
+}
+
+fn f() {
+ S::C;
+ //^^^^ {unknown}
+}
+ "#,
+ )
+}
+
+#[test]
+fn infer_missing_type() {
+ check_types(
+ r#"
+struct S;
+
+fn f() {
+ let s: = S;
+ //^ S
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_type_alias_variant() {
+ check_infer(
+ r#"
+type Qux = Foo;
+enum Foo {
+ Bar(i32),
+ Baz { baz: f32 }
+}
+
+fn f() {
+ match Foo::Bar(3) {
+ Qux::Bar(bar) => (),
+ Qux::Baz { baz } => (),
+ }
+}
+ "#,
+ expect![[r#"
+ 72..166 '{ ... } }': ()
+ 78..164 'match ... }': ()
+ 84..92 'Foo::Bar': Bar(i32) -> Foo
+ 84..95 'Foo::Bar(3)': Foo
+ 93..94 '3': i32
+ 106..119 'Qux::Bar(bar)': Foo
+ 115..118 'bar': i32
+ 123..125 '()': ()
+ 135..151 'Qux::B... baz }': Foo
+ 146..149 'baz': f32
+ 155..157 '()': ()
+ "#]],
+ )
+}
+
+#[test]
+fn infer_boxed_self_receiver() {
+ check_infer(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+
+struct Box<T>(T);
+
+impl<T> Deref for Box<T> {
+ type Target = T;
+ fn deref(&self) -> &Self::Target;
+}
+
+struct Foo<T>(T);
+
+impl<T> Foo<T> {
+ fn get_inner<'a>(self: &'a Box<Self>) -> &'a T {}
+
+ fn get_self<'a>(self: &'a Box<Self>) -> &'a Self {}
+
+ fn into_inner(self: Box<Self>) -> Self {}
+}
+
+fn main() {
+ let boxed = Box(Foo(0_i32));
+
+ let bad1 = boxed.get_inner();
+ let good1 = Foo::get_inner(&boxed);
+
+ let bad2 = boxed.get_self();
+ let good2 = Foo::get_self(&boxed);
+
+ let inner = boxed.into_inner();
+}
+ "#,
+ expect![[r#"
+ 104..108 'self': &Box<T>
+ 188..192 'self': &Box<Foo<T>>
+ 218..220 '{}': &T
+ 242..246 'self': &Box<Foo<T>>
+ 275..277 '{}': &Foo<T>
+ 297..301 'self': Box<Foo<T>>
+ 322..324 '{}': Foo<T>
+ 338..559 '{ ...r(); }': ()
+ 348..353 'boxed': Box<Foo<i32>>
+ 356..359 'Box': Box<Foo<i32>>(Foo<i32>) -> Box<Foo<i32>>
+ 356..371 'Box(Foo(0_i32))': Box<Foo<i32>>
+ 360..363 'Foo': Foo<i32>(i32) -> Foo<i32>
+ 360..370 'Foo(0_i32)': Foo<i32>
+ 364..369 '0_i32': i32
+ 382..386 'bad1': &i32
+ 389..394 'boxed': Box<Foo<i32>>
+ 389..406 'boxed....nner()': &i32
+ 416..421 'good1': &i32
+ 424..438 'Foo::get_inner': fn get_inner<i32>(&Box<Foo<i32>>) -> &i32
+ 424..446 'Foo::g...boxed)': &i32
+ 439..445 '&boxed': &Box<Foo<i32>>
+ 440..445 'boxed': Box<Foo<i32>>
+ 457..461 'bad2': &Foo<i32>
+ 464..469 'boxed': Box<Foo<i32>>
+ 464..480 'boxed....self()': &Foo<i32>
+ 490..495 'good2': &Foo<i32>
+ 498..511 'Foo::get_self': fn get_self<i32>(&Box<Foo<i32>>) -> &Foo<i32>
+ 498..519 'Foo::g...boxed)': &Foo<i32>
+ 512..518 '&boxed': &Box<Foo<i32>>
+ 513..518 'boxed': Box<Foo<i32>>
+ 530..535 'inner': Foo<i32>
+ 538..543 'boxed': Box<Foo<i32>>
+ 538..556 'boxed....nner()': Foo<i32>
+ "#]],
+ );
+}
+
+#[test]
+fn prelude_2015() {
+ check_types(
+ r#"
+//- /main.rs edition:2015 crate:main deps:core
+fn f() {
+ Rust;
+ //^^^^ Rust
+}
+
+//- /core.rs crate:core
+pub mod prelude {
+ pub mod rust_2015 {
+ pub struct Rust;
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn legacy_const_generics() {
+ check_no_mismatches(
+ r#"
+#[rustc_legacy_const_generics(1, 3)]
+fn mixed<const N1: &'static str, const N2: bool>(
+ a: u8,
+ b: i8,
+) {}
+
+fn f() {
+ mixed(0, "", -1, true);
+ mixed::<"", true>(0, -1);
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_slice() {
+ check_types(
+ r#"
+fn main() {
+ let a;
+ //^usize
+ [a,] = [0usize];
+
+ let a;
+ //^usize
+ [a, ..] = [0usize; 5];
+
+ let a;
+ //^usize
+ [.., a] = [0usize; 5];
+
+ let a;
+ //^usize
+ [.., a, _] = [0usize; 5];
+
+ let a;
+ //^usize
+ [_, a, ..] = [0usize; 5];
+
+ let a: &mut i64 = &mut 0;
+ [*a, ..] = [1, 2, 3];
+
+ let a: usize;
+ let b;
+ //^usize
+ [a, _, b] = [3, 4, 5];
+ //^usize
+
+ let a;
+ //^i64
+ let b;
+ //^i64
+ [[a, ..], .., [.., b]] = [[1, 2], [3i64, 4], [5, 6], [7, 8]];
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_tuple() {
+ check_types(
+ r#"
+fn main() {
+ let a;
+ //^char
+ let b;
+ //^i64
+ (a, b) = ('c', 0i64);
+
+ let a;
+ //^char
+ (a, ..) = ('c', 0i64);
+
+ let a;
+ //^i64
+ (.., a) = ('c', 0i64);
+
+ let a;
+ //^char
+ let b;
+ //^i64
+ (a, .., b) = ('c', 0i64);
+
+ let a;
+ //^char
+ let b;
+ //^bool
+ (a, .., b) = ('c', 0i64, true);
+
+ let a;
+ //^i64
+ let b;
+ //^bool
+ (_, a, .., b) = ('c', 0i64, true);
+
+ let a;
+ //^i64
+ let b;
+ //^usize
+ (_, a, .., b) = ('c', 0i64, true, 0usize);
+
+ let mut a = 1;
+ //^^^^^i64
+ let mut b: i64 = 0;
+ (a, b) = (b, a);
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_tuple_struct() {
+ check_types(
+ r#"
+struct S2(char, i64);
+struct S3(char, i64, bool);
+struct S4(char, i64, bool usize);
+fn main() {
+ let a;
+ //^char
+ let b;
+ //^i64
+ S2(a, b) = S2('c', 0i64);
+
+ let a;
+ //^char
+ let b;
+ //^i64
+ S2(a, .., b) = S2('c', 0i64);
+
+ let a;
+ //^char
+ let b;
+ //^bool
+ S3(a, .., b) = S3('c', 0i64, true);
+
+ let a;
+ //^i64
+ let b;
+ //^bool
+ S3(_, a, .., b) = S3('c', 0i64, true);
+
+ let a;
+ //^i64
+ let b;
+ //^usize
+ S4(_, a, .., b) = S4('c', 0i64, true, 0usize);
+
+ struct Swap(i64, i64);
+
+ let mut a = 1;
+ //^^^^^i64
+ let mut b = 0;
+ //^^^^^i64
+ Swap(a, b) = Swap(b, a);
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_struct() {
+ check_types(
+ r#"
+struct S {
+ a: usize,
+ b: char,
+}
+struct T {
+ s: S,
+ t: i64,
+}
+
+fn main() {
+ let a;
+ //^usize
+ let c;
+ //^char
+ S { a, b: c } = S { a: 3, b: 'b' };
+
+ let a;
+ //^char
+ S { b: a, .. } = S { a: 3, b: 'b' };
+
+ let a;
+ //^char
+ S { b: a, _ } = S { a: 3, b: 'b' };
+
+ let a;
+ //^usize
+ let c;
+ //^char
+ let t;
+ //^i64
+ T { s: S { a, b: c }, t } = T { s: S { a: 3, b: 'b' }, t: 0 };
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_nested() {
+ check_types(
+ r#"
+struct S {
+ a: TS,
+ b: [char; 3],
+}
+struct TS(usize, i64);
+
+fn main() {
+ let a;
+ //^i32
+ let b;
+ //^bool
+ ([.., a], .., b, _) = ([0, 1, 2], true, 'c');
+
+ let a;
+ //^i32
+ let b;
+ //^i32
+ [(.., a, _), .., (b, ..)] = [(1, 2); 5];
+
+ let a;
+ //^usize
+ let b;
+ //^char
+ S { a: TS(a, ..), b: [_, b, ..] } = S { a: TS(0, 0), b: ['a'; 3] };
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_unit_struct() {
+ // taken from rustc; see https://github.com/rust-lang/rust/pull/95380
+ check_no_mismatches(
+ r#"
+struct S;
+enum E { V, }
+type A = E;
+
+fn main() {
+ let mut a;
+
+ (S, a) = (S, ());
+
+ (E::V, a) = (E::V, ());
+
+ (<E>::V, a) = (E::V, ());
+ (A::V, a) = (E::V, ());
+}
+
+impl S {
+ fn check() {
+ let a;
+ (Self, a) = (S, ());
+ }
+}
+
+impl E {
+ fn check() {
+ let a;
+ (Self::V, a) = (E::V, ());
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_no_default_binding_mode() {
+ check(
+ r#"
+struct S { a: usize }
+struct TS(usize);
+fn main() {
+ let x;
+ [x,] = &[1,];
+ //^^^^expected &[i32; 1], got [{unknown}; _]
+
+ // FIXME we only want the outermost error, but this matches the current
+ // behavior of slice patterns
+ let x;
+ [(x,),] = &[(1,),];
+ // ^^^^expected {unknown}, got ({unknown},)
+ //^^^^^^^expected &[(i32,); 1], got [{unknown}; _]
+
+ let x;
+ ((x,),) = &((1,),);
+ //^^^^^^^expected &((i32,),), got (({unknown},),)
+
+ let x;
+ (x,) = &(1,);
+ //^^^^expected &(i32,), got ({unknown},)
+
+ let x;
+ (S { a: x },) = &(S { a: 42 },);
+ //^^^^^^^^^^^^^expected &(S,), got (S,)
+
+ let x;
+ S { a: x } = &S { a: 42 };
+ //^^^^^^^^^^expected &S, got S
+
+ let x;
+ TS(x) = &TS(42);
+ //^^^^^expected &TS, got TS
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_type_mismatch_on_identifier() {
+ check(
+ r#"
+struct S { v: i64 }
+struct TS(i64);
+fn main() {
+ let mut a: usize = 0;
+ (a,) = (0i64,);
+ //^expected i64, got usize
+
+ let mut a: usize = 0;
+ [a,] = [0i64,];
+ //^expected i64, got usize
+
+ let mut a: usize = 0;
+ S { v: a } = S { v: 0 };
+ //^expected i64, got usize
+
+ let mut a: usize = 0;
+ TS(a) = TS(0);
+ //^expected i64, got usize
+}
+ "#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
new file mode 100644
index 000000000..75802a5eb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
@@ -0,0 +1,3782 @@
+use cov_mark::check;
+use expect_test::expect;
+
+use super::{check, check_infer, check_infer_with_mismatches, check_no_mismatches, check_types};
+
+#[test]
+fn infer_await() {
+ check_types(
+ r#"
+//- minicore: future
+struct IntFuture;
+
+impl core::future::Future for IntFuture {
+ type Output = u64;
+}
+
+fn test() {
+ let r = IntFuture;
+ let v = r.await;
+ v;
+} //^ u64
+"#,
+ );
+}
+
+#[test]
+fn infer_async() {
+ check_types(
+ r#"
+//- minicore: future
+async fn foo() -> u64 { 128 }
+
+fn test() {
+ let r = foo();
+ let v = r.await;
+ v;
+} //^ u64
+"#,
+ );
+}
+
+#[test]
+fn infer_desugar_async() {
+ check_types(
+ r#"
+//- minicore: future, sized
+async fn foo() -> u64 { 128 }
+
+fn test() {
+ let r = foo();
+ r;
+} //^ impl Future<Output = u64>
+"#,
+ );
+}
+
+#[test]
+fn infer_async_block() {
+ check_types(
+ r#"
+//- minicore: future, option
+async fn test() {
+ let a = async { 42 };
+ a;
+// ^ impl Future<Output = i32>
+ let x = a.await;
+ x;
+// ^ i32
+ let b = async {}.await;
+ b;
+// ^ ()
+ let c = async {
+ let y = None;
+ y
+ // ^ Option<u64>
+ };
+ let _: Option<u64> = c.await;
+ c;
+// ^ impl Future<Output = Option<u64>>
+}
+"#,
+ );
+}
+
+#[test]
+fn auto_sized_async_block() {
+ check_no_mismatches(
+ r#"
+//- minicore: future, sized
+
+use core::future::Future;
+struct MyFut<Fut>(Fut);
+
+impl<Fut> Future for MyFut<Fut>
+where Fut: Future
+{
+ type Output = Fut::Output;
+}
+async fn reproduction() -> usize {
+ let f = async {999usize};
+ MyFut(f).await
+}
+ "#,
+ );
+ check_no_mismatches(
+ r#"
+//- minicore: future
+//#11815
+#[lang = "sized"]
+pub trait Sized {}
+
+#[lang = "unsize"]
+pub trait Unsize<T: ?Sized> {}
+
+#[lang = "coerce_unsized"]
+pub trait CoerceUnsized<T> {}
+
+pub unsafe trait Allocator {}
+
+pub struct Global;
+unsafe impl Allocator for Global {}
+
+#[lang = "owned_box"]
+#[fundamental]
+pub struct Box<T: ?Sized, A: Allocator = Global>;
+
+impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Box<U, A>> for Box<T, A> {}
+
+fn send() -> Box<dyn Future<Output = ()> + Send + 'static>{
+ box async move {}
+}
+
+fn not_send() -> Box<dyn Future<Output = ()> + 'static> {
+ box async move {}
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_try() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+fn test() {
+ let r: Result<i32, u64> = Result::Ok(1);
+ let v = r?;
+ v;
+} //^ i32
+
+//- /core.rs crate:core
+pub mod ops {
+ pub trait Try {
+ type Ok;
+ type Error;
+ }
+}
+
+pub mod result {
+ pub enum Result<O, E> {
+ Ok(O),
+ Err(E)
+ }
+
+ impl<O, E> crate::ops::Try for Result<O, E> {
+ type Ok = O;
+ type Error = E;
+ }
+}
+
+pub mod prelude {
+ pub mod rust_2018 {
+ pub use crate::{result::*, ops::*};
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_try_trait_v2() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+fn test() {
+ let r: Result<i32, u64> = Result::Ok(1);
+ let v = r?;
+ v;
+} //^ i32
+
+//- /core.rs crate:core
+mod ops {
+ mod try_trait {
+ pub trait Try: FromResidual {
+ type Output;
+ type Residual;
+ }
+ pub trait FromResidual<R = <Self as Try>::Residual> {}
+ }
+
+ pub use self::try_trait::FromResidual;
+ pub use self::try_trait::Try;
+}
+
+mod convert {
+ pub trait From<T> {}
+ impl<T> From<T> for T {}
+}
+
+pub mod result {
+ use crate::convert::From;
+ use crate::ops::{Try, FromResidual};
+
+ pub enum Infallible {}
+ pub enum Result<O, E> {
+ Ok(O),
+ Err(E)
+ }
+
+ impl<O, E> Try for Result<O, E> {
+ type Output = O;
+ type Error = Result<Infallible, E>;
+ }
+
+ impl<T, E, F: From<E>> FromResidual<Result<Infallible, E>> for Result<T, F> {}
+}
+
+pub mod prelude {
+ pub mod rust_2018 {
+ pub use crate::result::*;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_for_loop() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core,alloc
+#![no_std]
+use alloc::collections::Vec;
+
+fn test() {
+ let v = Vec::new();
+ v.push("foo");
+ for x in v {
+ x;
+ } //^ &str
+}
+
+//- /core.rs crate:core
+pub mod iter {
+ pub trait IntoIterator {
+ type Item;
+ }
+}
+pub mod prelude {
+ pub mod rust_2018 {
+ pub use crate::iter::*;
+ }
+}
+
+//- /alloc.rs crate:alloc deps:core
+#![no_std]
+pub mod collections {
+ pub struct Vec<T> {}
+ impl<T> Vec<T> {
+ pub fn new() -> Self { Vec {} }
+ pub fn push(&mut self, t: T) { }
+ }
+
+ impl<T> IntoIterator for Vec<T> {
+ type Item=T;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_neg() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+struct Bar;
+struct Foo;
+
+impl std::ops::Neg for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = -a;
+ b;
+} //^ Foo
+
+//- /std.rs crate:std
+#[prelude_import] use ops::*;
+mod ops {
+ #[lang = "neg"]
+ pub trait Neg {
+ type Output;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_not() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+struct Bar;
+struct Foo;
+
+impl std::ops::Not for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = !a;
+ b;
+} //^ Foo
+
+//- /std.rs crate:std
+#[prelude_import] use ops::*;
+mod ops {
+ #[lang = "not"]
+ pub trait Not {
+ type Output;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_from_bound_1() {
+ check_types(
+ r#"
+trait Trait<T> {}
+struct S<T>(T);
+impl<U> Trait<U> for S<U> {}
+fn foo<T: Trait<u32>>(t: T) {}
+fn test() {
+ let s = S(unknown);
+ // ^^^^^^^ u32
+ foo(s);
+}"#,
+ );
+}
+
+#[test]
+fn infer_from_bound_2() {
+ check_types(
+ r#"
+trait Trait<T> {}
+struct S<T>(T);
+impl<U> Trait<U> for S<U> {}
+fn foo<U, T: Trait<U>>(t: T) -> U { loop {} }
+fn test() {
+ let s = S(unknown);
+ // ^^^^^^^ u32
+ let x: u32 = foo(s);
+}"#,
+ );
+}
+
+#[test]
+fn trait_default_method_self_bound_implements_trait() {
+ cov_mark::check!(trait_self_implements_self);
+ check(
+ r#"
+trait Trait {
+ fn foo(&self) -> i64;
+ fn bar(&self) -> () {
+ self.foo();
+ // ^^^^^^^^^^ type: i64
+ }
+}"#,
+ );
+}
+
+#[test]
+fn trait_default_method_self_bound_implements_super_trait() {
+ check(
+ r#"
+trait SuperTrait {
+ fn foo(&self) -> i64;
+}
+trait Trait: SuperTrait {
+ fn bar(&self) -> () {
+ self.foo();
+ // ^^^^^^^^^^ type: i64
+ }
+}"#,
+ );
+}
+
+#[test]
+fn infer_project_associated_type() {
+ check_types(
+ r#"
+trait Iterable {
+ type Item;
+}
+struct S;
+impl Iterable for S { type Item = u32; }
+fn test<T: Iterable>() {
+ let x: <S as Iterable>::Item = 1;
+ // ^ u32
+ let y: <T as Iterable>::Item = u;
+ // ^ Iterable::Item<T>
+ let z: T::Item = u;
+ // ^ Iterable::Item<T>
+ let a: <T>::Item = u;
+ // ^ Iterable::Item<T>
+}"#,
+ );
+}
+
+#[test]
+fn infer_return_associated_type() {
+ check_types(
+ r#"
+trait Iterable {
+ type Item;
+}
+struct S;
+impl Iterable for S { type Item = u32; }
+fn foo1<T: Iterable>(t: T) -> T::Item { loop {} }
+fn foo2<T: Iterable>(t: T) -> <T as Iterable>::Item { loop {} }
+fn foo3<T: Iterable>(t: T) -> <T>::Item { loop {} }
+fn test() {
+ foo1(S);
+ // ^^^^^^^ u32
+ foo2(S);
+ // ^^^^^^^ u32
+ foo3(S);
+ // ^^^^^^^ u32
+}"#,
+ );
+}
+
+#[test]
+fn associated_type_shorthand_from_method_bound() {
+ check_types(
+ r#"
+trait Iterable {
+ type Item;
+}
+struct S<T>;
+impl<T> S<T> {
+ fn foo(self) -> T::Item where T: Iterable { loop {} }
+}
+fn test<T: Iterable>() {
+ let s: S<T>;
+ s.foo();
+ // ^^^^^^^ Iterable::Item<T>
+}"#,
+ );
+}
+
+#[test]
+fn associated_type_shorthand_from_self_issue_12484() {
+ check_types(
+ r#"
+trait Bar {
+ type A;
+}
+trait Foo {
+ type A;
+ fn test(a: Self::A, _: impl Bar) {
+ a;
+ //^ Foo::A<Self>
+ }
+}"#,
+ );
+}
+
+#[test]
+fn infer_associated_type_bound() {
+ check_types(
+ r#"
+trait Iterable {
+ type Item;
+}
+fn test<T: Iterable<Item=u32>>() {
+ let y: T::Item = unknown;
+ // ^^^^^^^ u32
+}"#,
+ );
+}
+
+#[test]
+fn infer_const_body() {
+ // FIXME make check_types work with other bodies
+ check_infer(
+ r#"
+const A: u32 = 1 + 1;
+static B: u64 = { let x = 1; x };
+"#,
+ expect![[r#"
+ 15..16 '1': u32
+ 15..20 '1 + 1': u32
+ 19..20 '1': u32
+ 38..54 '{ let ...1; x }': u64
+ 44..45 'x': u64
+ 48..49 '1': u64
+ 51..52 'x': u64
+ "#]],
+ );
+}
+
+#[test]
+fn tuple_struct_fields() {
+ check_infer(
+ r#"
+struct S(i32, u64);
+fn test() -> u64 {
+ let a = S(4, 6);
+ let b = a.0;
+ a.1
+}"#,
+ expect![[r#"
+ 37..86 '{ ... a.1 }': u64
+ 47..48 'a': S
+ 51..52 'S': S(i32, u64) -> S
+ 51..58 'S(4, 6)': S
+ 53..54 '4': i32
+ 56..57 '6': u64
+ 68..69 'b': i32
+ 72..73 'a': S
+ 72..75 'a.0': i32
+ 81..82 'a': S
+ 81..84 'a.1': u64
+ "#]],
+ );
+}
+
+#[test]
+fn tuple_struct_with_fn() {
+ check_infer(
+ r#"
+struct S(fn(u32) -> u64);
+fn test() -> u64 {
+ let a = S(|i| 2*i);
+ let b = a.0(4);
+ a.0(2)
+}"#,
+ expect![[r#"
+ 43..101 '{ ...0(2) }': u64
+ 53..54 'a': S
+ 57..58 'S': S(fn(u32) -> u64) -> S
+ 57..67 'S(|i| 2*i)': S
+ 59..66 '|i| 2*i': |u32| -> u64
+ 60..61 'i': u32
+ 63..64 '2': u32
+ 63..66 '2*i': u32
+ 65..66 'i': u32
+ 77..78 'b': u64
+ 81..82 'a': S
+ 81..84 'a.0': fn(u32) -> u64
+ 81..87 'a.0(4)': u64
+ 85..86 '4': u32
+ 93..94 'a': S
+ 93..96 'a.0': fn(u32) -> u64
+ 93..99 'a.0(2)': u64
+ 97..98 '2': u32
+ "#]],
+ );
+}
+
+#[test]
+fn indexing_arrays() {
+ check_infer(
+ "fn main() { &mut [9][2]; }",
+ expect![[r#"
+ 10..26 '{ &mut...[2]; }': ()
+ 12..23 '&mut [9][2]': &mut {unknown}
+ 17..20 '[9]': [i32; 1]
+ 17..23 '[9][2]': {unknown}
+ 18..19 '9': i32
+ 21..22 '2': i32
+ "#]],
+ )
+}
+
+#[test]
+fn infer_ops_index() {
+ check_types(
+ r#"
+//- minicore: index
+struct Bar;
+struct Foo;
+
+impl core::ops::Index<u32> for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = a[1u32];
+ b;
+} //^ Foo
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_index_field() {
+ check_types(
+ r#"
+//- minicore: index
+struct Bar;
+struct Foo {
+ field: u32;
+}
+
+impl core::ops::Index<u32> for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = a[1u32].field;
+ b;
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_index_field_autoderef() {
+ check_types(
+ r#"
+//- minicore: index
+struct Bar;
+struct Foo {
+ field: u32;
+}
+
+impl core::ops::Index<u32> for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = (&a[1u32]).field;
+ b;
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_index_int() {
+ check_types(
+ r#"
+//- minicore: index
+struct Bar;
+struct Foo;
+
+impl core::ops::Index<u32> for Bar {
+ type Output = Foo;
+}
+
+struct Range;
+impl core::ops::Index<Range> for Bar {
+ type Output = Bar;
+}
+
+fn test() {
+ let a = Bar;
+ let b = a[1];
+ b;
+ //^ Foo
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_index_autoderef() {
+ check_types(
+ r#"
+//- minicore: index, slice
+fn test() {
+ let a = &[1u32, 2, 3];
+ let b = a[1];
+ b;
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn deref_trait() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Arc<T: ?Sized>;
+impl<T: ?Sized> core::ops::Deref for Arc<T> {
+ type Target = T;
+}
+
+struct S;
+impl S {
+ fn foo(&self) -> u128 { 0 }
+}
+
+fn test(s: Arc<S>) {
+ (*s, s.foo());
+} //^^^^^^^^^^^^^ (S, u128)
+"#,
+ );
+}
+
+#[test]
+fn deref_trait_with_inference_var() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Arc<T: ?Sized>;
+fn new_arc<T: ?Sized>() -> Arc<T> { Arc }
+impl<T: ?Sized> core::ops::Deref for Arc<T> {
+ type Target = T;
+}
+
+struct S;
+fn foo(a: Arc<S>) {}
+
+fn test() {
+ let a = new_arc();
+ let b = *a;
+ //^^ S
+ foo(a);
+}
+"#,
+ );
+}
+
+#[test]
+fn deref_trait_infinite_recursion() {
+ check_types(
+ r#"
+//- minicore: deref
+struct S;
+
+impl core::ops::Deref for S {
+ type Target = S;
+}
+
+fn test(s: S) {
+ s.foo();
+} //^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn deref_trait_with_question_mark_size() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Arc<T: ?Sized>;
+impl<T: ?Sized> core::ops::Deref for Arc<T> {
+ type Target = T;
+}
+
+struct S;
+impl S {
+ fn foo(&self) -> u128 { 0 }
+}
+
+fn test(s: Arc<S>) {
+ (*s, s.foo());
+} //^^^^^^^^^^^^^ (S, u128)
+"#,
+ );
+}
+
+#[test]
+fn deref_trait_with_implicit_sized_requirement_on_inference_var() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Foo<T>;
+impl<T> core::ops::Deref for Foo<T> {
+ type Target = ();
+}
+fn test() {
+ let foo = Foo;
+ *foo;
+ //^^^^ ()
+ let _: Foo<u8> = foo;
+}
+"#,
+ )
+}
+
+#[test]
+fn obligation_from_function_clause() {
+ check_types(
+ r#"
+struct S;
+
+trait Trait<T> {}
+impl Trait<u32> for S {}
+
+fn foo<T: Trait<U>, U>(t: T) -> U { loop {} }
+
+fn test(s: S) {
+ foo(s);
+} //^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn obligation_from_method_clause() {
+ check_types(
+ r#"
+//- /main.rs
+struct S;
+
+trait Trait<T> {}
+impl Trait<isize> for S {}
+
+struct O;
+impl O {
+ fn foo<T: Trait<U>, U>(&self, t: T) -> U { loop {} }
+}
+
+fn test() {
+ O.foo(S);
+} //^^^^^^^^ isize
+"#,
+ );
+}
+
+#[test]
+fn obligation_from_self_method_clause() {
+ check_types(
+ r#"
+struct S;
+
+trait Trait<T> {}
+impl Trait<i64> for S {}
+
+impl S {
+ fn foo<U>(&self) -> U where Self: Trait<U> { loop {} }
+}
+
+fn test() {
+ S.foo();
+} //^^^^^^^ i64
+"#,
+ );
+}
+
+#[test]
+fn obligation_from_impl_clause() {
+ check_types(
+ r#"
+struct S;
+
+trait Trait<T> {}
+impl Trait<&str> for S {}
+
+struct O<T>;
+impl<U, T: Trait<U>> O<T> {
+ fn foo(&self) -> U { loop {} }
+}
+
+fn test(o: O<S>) {
+ o.foo();
+} //^^^^^^^ &str
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_1() {
+ check_types(
+ r#"
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Clone for S {}
+impl<T> Trait for T where T: Clone {}
+fn test<T: Clone>(t: T) { t.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_1_not_met() {
+ check_types(
+ r#"
+//- /main.rs
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Clone for S {}
+impl<T> Trait for T where T: Clone {}
+fn test<T>(t: T) { t.foo(); }
+ //^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_2() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Trait for S {}
+fn test<T: Trait>(t: T) { t.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_2_not_met() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Trait for S {}
+fn test<T>(t: T) { t.foo(); }
+ //^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_deref() {
+ check_types(
+ r#"
+//- minicore: deref
+trait Trait {}
+impl<T> core::ops::Deref for T where T: Trait {
+ type Target = i128;
+}
+fn test<T: Trait>(t: T) { *t; }
+ //^^ i128
+"#,
+ );
+}
+
+#[test]
+fn associated_type_placeholder() {
+ // inside the generic function, the associated type gets normalized to a placeholder `ApplL::Out<T>` [https://rust-lang.github.io/rustc-guide/traits/associated-types.html#placeholder-associated-types].
+ check_types(
+ r#"
+pub trait ApplyL {
+ type Out;
+}
+
+pub struct RefMutL<T>;
+
+impl<T> ApplyL for RefMutL<T> {
+ type Out = <T as ApplyL>::Out;
+}
+
+fn test<T: ApplyL>() {
+ let y: <RefMutL<T> as ApplyL>::Out = no_matter;
+ y;
+} //^ ApplyL::Out<T>
+"#,
+ );
+}
+
+#[test]
+fn associated_type_placeholder_2() {
+ check_types(
+ r#"
+pub trait ApplyL {
+ type Out;
+}
+fn foo<T: ApplyL>(t: T) -> <T as ApplyL>::Out;
+
+fn test<T: ApplyL>(t: T) {
+ let y = foo(t);
+ y;
+} //^ ApplyL::Out<T>
+"#,
+ );
+}
+
+#[test]
+fn argument_impl_trait() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Trait<T> {
+ fn foo(&self) -> T;
+ fn foo2(&self) -> i64;
+}
+fn bar(x: impl Trait<u16>) {}
+struct S<T>(T);
+impl<T> Trait<T> for S<T> {}
+
+fn test(x: impl Trait<u64>, y: &impl Trait<u32>) {
+ x;
+ y;
+ let z = S(1);
+ bar(z);
+ x.foo();
+ y.foo();
+ z.foo();
+ x.foo2();
+ y.foo2();
+ z.foo2();
+}"#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 54..58 'self': &Self
+ 77..78 'x': impl Trait<u16>
+ 97..99 '{}': ()
+ 154..155 'x': impl Trait<u64>
+ 174..175 'y': &impl Trait<u32>
+ 195..323 '{ ...2(); }': ()
+ 201..202 'x': impl Trait<u64>
+ 208..209 'y': &impl Trait<u32>
+ 219..220 'z': S<u16>
+ 223..224 'S': S<u16>(u16) -> S<u16>
+ 223..227 'S(1)': S<u16>
+ 225..226 '1': u16
+ 233..236 'bar': fn bar(S<u16>)
+ 233..239 'bar(z)': ()
+ 237..238 'z': S<u16>
+ 245..246 'x': impl Trait<u64>
+ 245..252 'x.foo()': u64
+ 258..259 'y': &impl Trait<u32>
+ 258..265 'y.foo()': u32
+ 271..272 'z': S<u16>
+ 271..278 'z.foo()': u16
+ 284..285 'x': impl Trait<u64>
+ 284..292 'x.foo2()': i64
+ 298..299 'y': &impl Trait<u32>
+ 298..306 'y.foo2()': i64
+ 312..313 'z': S<u16>
+ 312..320 'z.foo2()': i64
+ "#]],
+ );
+}
+
+#[test]
+fn argument_impl_trait_type_args_1() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Trait {}
+trait Foo {
+ // this function has an implicit Self param, an explicit type param,
+ // and an implicit impl Trait param!
+ fn bar<T>(x: impl Trait) -> T { loop {} }
+}
+fn foo<T>(x: impl Trait) -> T { loop {} }
+struct S;
+impl Trait for S {}
+struct F;
+impl Foo for F {}
+
+fn test() {
+ Foo::bar(S);
+ <F as Foo>::bar(S);
+ F::bar(S);
+ Foo::bar::<u32>(S);
+ <F as Foo>::bar::<u32>(S);
+
+ foo(S);
+ foo::<u32>(S);
+ foo::<u32, i32>(S); // we should ignore the extraneous i32
+}"#,
+ expect![[r#"
+ 155..156 'x': impl Trait
+ 175..186 '{ loop {} }': T
+ 177..184 'loop {}': !
+ 182..184 '{}': ()
+ 199..200 'x': impl Trait
+ 219..230 '{ loop {} }': T
+ 221..228 'loop {}': !
+ 226..228 '{}': ()
+ 300..509 '{ ... i32 }': ()
+ 306..314 'Foo::bar': fn bar<{unknown}, {unknown}>(S) -> {unknown}
+ 306..317 'Foo::bar(S)': {unknown}
+ 315..316 'S': S
+ 323..338 '<F as Foo>::bar': fn bar<F, {unknown}>(S) -> {unknown}
+ 323..341 '<F as ...bar(S)': {unknown}
+ 339..340 'S': S
+ 347..353 'F::bar': fn bar<F, {unknown}>(S) -> {unknown}
+ 347..356 'F::bar(S)': {unknown}
+ 354..355 'S': S
+ 362..377 'Foo::bar::<u32>': fn bar<{unknown}, u32>(S) -> u32
+ 362..380 'Foo::b...32>(S)': u32
+ 378..379 'S': S
+ 386..408 '<F as ...:<u32>': fn bar<F, u32>(S) -> u32
+ 386..411 '<F as ...32>(S)': u32
+ 409..410 'S': S
+ 418..421 'foo': fn foo<{unknown}>(S) -> {unknown}
+ 418..424 'foo(S)': {unknown}
+ 422..423 'S': S
+ 430..440 'foo::<u32>': fn foo<u32>(S) -> u32
+ 430..443 'foo::<u32>(S)': u32
+ 441..442 'S': S
+ 449..464 'foo::<u32, i32>': fn foo<u32>(S) -> u32
+ 449..467 'foo::<...32>(S)': u32
+ 465..466 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn argument_impl_trait_type_args_2() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Trait {}
+struct S;
+impl Trait for S {}
+struct F<T>;
+impl<T> F<T> {
+ fn foo<U>(self, x: impl Trait) -> (T, U) { loop {} }
+}
+
+fn test() {
+ F.foo(S);
+ F::<u32>.foo(S);
+ F::<u32>.foo::<i32>(S);
+ F::<u32>.foo::<i32, u32>(S); // extraneous argument should be ignored
+}"#,
+ expect![[r#"
+ 87..91 'self': F<T>
+ 93..94 'x': impl Trait
+ 118..129 '{ loop {} }': (T, U)
+ 120..127 'loop {}': !
+ 125..127 '{}': ()
+ 143..283 '{ ...ored }': ()
+ 149..150 'F': F<{unknown}>
+ 149..157 'F.foo(S)': ({unknown}, {unknown})
+ 155..156 'S': S
+ 163..171 'F::<u32>': F<u32>
+ 163..178 'F::<u32>.foo(S)': (u32, {unknown})
+ 176..177 'S': S
+ 184..192 'F::<u32>': F<u32>
+ 184..206 'F::<u3...32>(S)': (u32, i32)
+ 204..205 'S': S
+ 212..220 'F::<u32>': F<u32>
+ 212..239 'F::<u3...32>(S)': (u32, i32)
+ 237..238 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn argument_impl_trait_to_fn_pointer() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Trait {}
+fn foo(x: impl Trait) { loop {} }
+struct S;
+impl Trait for S {}
+
+fn test() {
+ let f: fn(S) -> () = foo;
+}"#,
+ expect![[r#"
+ 22..23 'x': impl Trait
+ 37..48 '{ loop {} }': ()
+ 39..46 'loop {}': !
+ 44..46 '{}': ()
+ 90..123 '{ ...foo; }': ()
+ 100..101 'f': fn(S)
+ 117..120 'foo': fn foo(S)
+ "#]],
+ );
+}
+
+#[test]
+fn impl_trait() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait<T> {
+ fn foo(&self) -> T;
+ fn foo2(&self) -> i64;
+}
+fn bar() -> impl Trait<u64> {}
+
+fn test(x: impl Trait<u64>, y: &impl Trait<u64>) {
+ x;
+ y;
+ let z = bar();
+ x.foo();
+ y.foo();
+ z.foo();
+ x.foo2();
+ y.foo2();
+ z.foo2();
+}"#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 54..58 'self': &Self
+ 98..100 '{}': ()
+ 110..111 'x': impl Trait<u64>
+ 130..131 'y': &impl Trait<u64>
+ 151..268 '{ ...2(); }': ()
+ 157..158 'x': impl Trait<u64>
+ 164..165 'y': &impl Trait<u64>
+ 175..176 'z': impl Trait<u64>
+ 179..182 'bar': fn bar() -> impl Trait<u64>
+ 179..184 'bar()': impl Trait<u64>
+ 190..191 'x': impl Trait<u64>
+ 190..197 'x.foo()': u64
+ 203..204 'y': &impl Trait<u64>
+ 203..210 'y.foo()': u64
+ 216..217 'z': impl Trait<u64>
+ 216..223 'z.foo()': u64
+ 229..230 'x': impl Trait<u64>
+ 229..237 'x.foo2()': i64
+ 243..244 'y': &impl Trait<u64>
+ 243..251 'y.foo2()': i64
+ 257..258 'z': impl Trait<u64>
+ 257..265 'z.foo2()': i64
+ "#]],
+ );
+}
+
+#[test]
+fn simple_return_pos_impl_trait() {
+ cov_mark::check!(lower_rpit);
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait<T> {
+ fn foo(&self) -> T;
+}
+fn bar() -> impl Trait<u64> { loop {} }
+
+fn test() {
+ let a = bar();
+ a.foo();
+}"#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 71..82 '{ loop {} }': !
+ 73..80 'loop {}': !
+ 78..80 '{}': ()
+ 94..129 '{ ...o(); }': ()
+ 104..105 'a': impl Trait<u64>
+ 108..111 'bar': fn bar() -> impl Trait<u64>
+ 108..113 'bar()': impl Trait<u64>
+ 119..120 'a': impl Trait<u64>
+ 119..126 'a.foo()': u64
+ "#]],
+ );
+}
+
+#[test]
+fn more_return_pos_impl_trait() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Iterator {
+ type Item;
+ fn next(&mut self) -> Self::Item;
+}
+trait Trait<T> {
+ fn foo(&self) -> T;
+}
+fn bar() -> (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>) { loop {} }
+fn baz<T>(t: T) -> (impl Iterator<Item = impl Trait<T>>, impl Trait<T>) { loop {} }
+
+fn test() {
+ let (a, b) = bar();
+ a.next().foo();
+ b.foo();
+ let (c, d) = baz(1u128);
+ c.next().foo();
+ d.foo();
+}"#,
+ expect![[r#"
+ 49..53 'self': &mut Self
+ 101..105 'self': &Self
+ 184..195 '{ loop {} }': ({unknown}, {unknown})
+ 186..193 'loop {}': !
+ 191..193 '{}': ()
+ 206..207 't': T
+ 268..279 '{ loop {} }': ({unknown}, {unknown})
+ 270..277 'loop {}': !
+ 275..277 '{}': ()
+ 291..413 '{ ...o(); }': ()
+ 301..307 '(a, b)': (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>)
+ 302..303 'a': impl Iterator<Item = impl Trait<u32>>
+ 305..306 'b': impl Trait<u64>
+ 310..313 'bar': fn bar() -> (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>)
+ 310..315 'bar()': (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>)
+ 321..322 'a': impl Iterator<Item = impl Trait<u32>>
+ 321..329 'a.next()': impl Trait<u32>
+ 321..335 'a.next().foo()': u32
+ 341..342 'b': impl Trait<u64>
+ 341..348 'b.foo()': u64
+ 358..364 '(c, d)': (impl Iterator<Item = impl Trait<u128>>, impl Trait<u128>)
+ 359..360 'c': impl Iterator<Item = impl Trait<u128>>
+ 362..363 'd': impl Trait<u128>
+ 367..370 'baz': fn baz<u128>(u128) -> (impl Iterator<Item = impl Trait<u128>>, impl Trait<u128>)
+ 367..377 'baz(1u128)': (impl Iterator<Item = impl Trait<u128>>, impl Trait<u128>)
+ 371..376 '1u128': u128
+ 383..384 'c': impl Iterator<Item = impl Trait<u128>>
+ 383..391 'c.next()': impl Trait<u128>
+ 383..397 'c.next().foo()': u128
+ 403..404 'd': impl Trait<u128>
+ 403..410 'd.foo()': u128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_from_return_pos_impl_trait() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn, sized
+trait Trait<T> {}
+struct Bar<T>(T);
+impl<T> Trait<T> for Bar<T> {}
+fn foo<const C: u8, T>() -> (impl FnOnce(&str, T), impl Trait<u8>) {
+ (|input, t| {}, Bar(C))
+}
+"#,
+ expect![[r#"
+ 134..165 '{ ...(C)) }': (|&str, T| -> (), Bar<u8>)
+ 140..163 '(|inpu...ar(C))': (|&str, T| -> (), Bar<u8>)
+ 141..154 '|input, t| {}': |&str, T| -> ()
+ 142..147 'input': &str
+ 149..150 't': T
+ 152..154 '{}': ()
+ 156..159 'Bar': Bar<u8>(u8) -> Bar<u8>
+ 156..162 'Bar(C)': Bar<u8>
+ 160..161 'C': u8
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_trait() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait<T> {
+ fn foo(&self) -> T;
+ fn foo2(&self) -> i64;
+}
+fn bar() -> dyn Trait<u64> {}
+
+fn test(x: dyn Trait<u64>, y: &dyn Trait<u64>) {
+ x;
+ y;
+ let z = bar();
+ x.foo();
+ y.foo();
+ z.foo();
+ x.foo2();
+ y.foo2();
+ z.foo2();
+}"#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 54..58 'self': &Self
+ 97..99 '{}': dyn Trait<u64>
+ 109..110 'x': dyn Trait<u64>
+ 128..129 'y': &dyn Trait<u64>
+ 148..265 '{ ...2(); }': ()
+ 154..155 'x': dyn Trait<u64>
+ 161..162 'y': &dyn Trait<u64>
+ 172..173 'z': dyn Trait<u64>
+ 176..179 'bar': fn bar() -> dyn Trait<u64>
+ 176..181 'bar()': dyn Trait<u64>
+ 187..188 'x': dyn Trait<u64>
+ 187..194 'x.foo()': u64
+ 200..201 'y': &dyn Trait<u64>
+ 200..207 'y.foo()': u64
+ 213..214 'z': dyn Trait<u64>
+ 213..220 'z.foo()': u64
+ 226..227 'x': dyn Trait<u64>
+ 226..234 'x.foo2()': i64
+ 240..241 'y': &dyn Trait<u64>
+ 240..248 'y.foo2()': i64
+ 254..255 'z': dyn Trait<u64>
+ 254..262 'z.foo2()': i64
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_trait_in_impl() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait<T, U> {
+ fn foo(&self) -> (T, U);
+}
+struct S<T, U> {}
+impl<T, U> S<T, U> {
+ fn bar(&self) -> &dyn Trait<T, U> { loop {} }
+}
+trait Trait2<T, U> {
+ fn baz(&self) -> (T, U);
+}
+impl<T, U> Trait2<T, U> for dyn Trait<T, U> { }
+
+fn test(s: S<u32, i32>) {
+ s.bar().baz();
+}"#,
+ expect![[r#"
+ 32..36 'self': &Self
+ 102..106 'self': &S<T, U>
+ 128..139 '{ loop {} }': &dyn Trait<T, U>
+ 130..137 'loop {}': !
+ 135..137 '{}': ()
+ 175..179 'self': &Self
+ 251..252 's': S<u32, i32>
+ 267..289 '{ ...z(); }': ()
+ 273..274 's': S<u32, i32>
+ 273..280 's.bar()': &dyn Trait<u32, i32>
+ 273..286 's.bar().baz()': (u32, i32)
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_trait_bare() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait {
+ fn foo(&self) -> u64;
+}
+fn bar() -> Trait {}
+
+fn test(x: Trait, y: &Trait) -> u64 {
+ x;
+ y;
+ let z = bar();
+ x.foo();
+ y.foo();
+ z.foo();
+}"#,
+ expect![[r#"
+ 26..30 'self': &Self
+ 60..62 '{}': dyn Trait
+ 72..73 'x': dyn Trait
+ 82..83 'y': &dyn Trait
+ 100..175 '{ ...o(); }': u64
+ 106..107 'x': dyn Trait
+ 113..114 'y': &dyn Trait
+ 124..125 'z': dyn Trait
+ 128..131 'bar': fn bar() -> dyn Trait
+ 128..133 'bar()': dyn Trait
+ 139..140 'x': dyn Trait
+ 139..146 'x.foo()': u64
+ 152..153 'y': &dyn Trait
+ 152..159 'y.foo()': u64
+ 165..166 'z': dyn Trait
+ 165..172 'z.foo()': u64
+ "#]],
+ );
+}
+
+#[test]
+fn weird_bounds() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait {}
+fn test(
+ a: impl Trait + 'lifetime,
+ b: impl 'lifetime,
+ c: impl (Trait),
+ d: impl ('lifetime),
+ e: impl ?Sized,
+ f: impl Trait + ?Sized
+) {}
+"#,
+ expect![[r#"
+ 28..29 'a': impl Trait
+ 59..60 'b': impl Sized
+ 82..83 'c': impl Trait
+ 103..104 'd': impl Sized
+ 128..129 'e': impl ?Sized
+ 148..149 'f': impl Trait + ?Sized
+ 173..175 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn error_bound_chalk() {
+ check_types(
+ r#"
+trait Trait {
+ fn foo(&self) -> u32 { 0 }
+}
+
+fn test(x: (impl Trait + UnknownTrait)) {
+ x.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn assoc_type_bindings() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait {
+ type Type;
+}
+
+fn get<T: Trait>(t: T) -> <T as Trait>::Type {}
+fn get2<U, T: Trait<Type = U>>(t: T) -> U {}
+fn set<T: Trait<Type = u64>>(t: T) -> T {t}
+
+struct S<T>;
+impl<T> Trait for S<T> { type Type = T; }
+
+fn test<T: Trait<Type = u32>>(x: T, y: impl Trait<Type = i64>) {
+ get(x);
+ get2(x);
+ get(y);
+ get2(y);
+ get(set(S));
+ get2(set(S));
+ get2(S::<str>);
+}"#,
+ expect![[r#"
+ 49..50 't': T
+ 77..79 '{}': Trait::Type<T>
+ 111..112 't': T
+ 122..124 '{}': U
+ 154..155 't': T
+ 165..168 '{t}': T
+ 166..167 't': T
+ 256..257 'x': T
+ 262..263 'y': impl Trait<Type = i64>
+ 289..397 '{ ...r>); }': ()
+ 295..298 'get': fn get<T>(T) -> <T as Trait>::Type
+ 295..301 'get(x)': u32
+ 299..300 'x': T
+ 307..311 'get2': fn get2<u32, T>(T) -> u32
+ 307..314 'get2(x)': u32
+ 312..313 'x': T
+ 320..323 'get': fn get<impl Trait<Type = i64>>(impl Trait<Type = i64>) -> <impl Trait<Type = i64> as Trait>::Type
+ 320..326 'get(y)': i64
+ 324..325 'y': impl Trait<Type = i64>
+ 332..336 'get2': fn get2<i64, impl Trait<Type = i64>>(impl Trait<Type = i64>) -> i64
+ 332..339 'get2(y)': i64
+ 337..338 'y': impl Trait<Type = i64>
+ 345..348 'get': fn get<S<u64>>(S<u64>) -> <S<u64> as Trait>::Type
+ 345..356 'get(set(S))': u64
+ 349..352 'set': fn set<S<u64>>(S<u64>) -> S<u64>
+ 349..355 'set(S)': S<u64>
+ 353..354 'S': S<u64>
+ 362..366 'get2': fn get2<u64, S<u64>>(S<u64>) -> u64
+ 362..374 'get2(set(S))': u64
+ 367..370 'set': fn set<S<u64>>(S<u64>) -> S<u64>
+ 367..373 'set(S)': S<u64>
+ 371..372 'S': S<u64>
+ 380..384 'get2': fn get2<str, S<str>>(S<str>) -> str
+ 380..394 'get2(S::<str>)': str
+ 385..393 'S::<str>': S<str>
+ "#]],
+ );
+}
+
+#[test]
+fn impl_trait_assoc_binding_projection_bug() {
+ check_types(
+ r#"
+//- minicore: iterator
+pub trait Language {
+ type Kind;
+}
+pub enum RustLanguage {}
+impl Language for RustLanguage {
+ type Kind = SyntaxKind;
+}
+struct SyntaxNode<L> {}
+fn foo() -> impl Iterator<Item = SyntaxNode<RustLanguage>> {}
+
+trait Clone {
+ fn clone(&self) -> Self;
+}
+
+fn api_walkthrough() {
+ for node in foo() {
+ node.clone();
+ } //^^^^^^^^^^^^ {unknown}
+}
+"#,
+ );
+}
+
+#[test]
+fn projection_eq_within_chalk() {
+ check_infer(
+ r#"
+trait Trait1 {
+ type Type;
+}
+trait Trait2<T> {
+ fn foo(self) -> T;
+}
+impl<T, U> Trait2<T> for U where U: Trait1<Type = T> {}
+
+fn test<T: Trait1<Type = u32>>(x: T) {
+ x.foo();
+}"#,
+ expect![[r#"
+ 61..65 'self': Self
+ 163..164 'x': T
+ 169..185 '{ ...o(); }': ()
+ 175..176 'x': T
+ 175..182 'x.foo()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn where_clause_trait_in_scope_for_method_resolution() {
+ check_types(
+ r#"
+mod foo {
+ trait Trait {
+ fn foo(&self) -> u32 { 0 }
+ }
+}
+
+fn test<T: foo::Trait>(x: T) {
+ x.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn super_trait_method_resolution() {
+ check_infer(
+ r#"
+mod foo {
+ trait SuperTrait {
+ fn foo(&self) -> u32 {}
+ }
+}
+trait Trait1: foo::SuperTrait {}
+trait Trait2 where Self: foo::SuperTrait {}
+
+fn test<T: Trait1, U: Trait2>(x: T, y: U) {
+ x.foo();
+ y.foo();
+}"#,
+ expect![[r#"
+ 49..53 'self': &Self
+ 62..64 '{}': u32
+ 181..182 'x': T
+ 187..188 'y': U
+ 193..222 '{ ...o(); }': ()
+ 199..200 'x': T
+ 199..206 'x.foo()': u32
+ 212..213 'y': U
+ 212..219 'y.foo()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn super_trait_impl_trait_method_resolution() {
+ check_infer(
+ r#"
+//- minicore: sized
+mod foo {
+ trait SuperTrait {
+ fn foo(&self) -> u32 {}
+ }
+}
+trait Trait1: foo::SuperTrait {}
+
+fn test(x: &impl Trait1) {
+ x.foo();
+}"#,
+ expect![[r#"
+ 49..53 'self': &Self
+ 62..64 '{}': u32
+ 115..116 'x': &impl Trait1
+ 132..148 '{ ...o(); }': ()
+ 138..139 'x': &impl Trait1
+ 138..145 'x.foo()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn super_trait_cycle() {
+ // This just needs to not crash
+ check_infer(
+ r#"
+ trait A: B {}
+ trait B: A {}
+
+ fn test<T: A>(x: T) {
+ x.foo();
+ }
+ "#,
+ expect![[r#"
+ 43..44 'x': T
+ 49..65 '{ ...o(); }': ()
+ 55..56 'x': T
+ 55..62 'x.foo()': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn super_trait_assoc_type_bounds() {
+ check_infer(
+ r#"
+trait SuperTrait { type Type; }
+trait Trait where Self: SuperTrait {}
+
+fn get2<U, T: Trait<Type = U>>(t: T) -> U {}
+fn set<T: Trait<Type = u64>>(t: T) -> T {t}
+
+struct S<T>;
+impl<T> SuperTrait for S<T> { type Type = T; }
+impl<T> Trait for S<T> {}
+
+fn test() {
+ get2(set(S));
+}"#,
+ expect![[r#"
+ 102..103 't': T
+ 113..115 '{}': U
+ 145..146 't': T
+ 156..159 '{t}': T
+ 157..158 't': T
+ 258..279 '{ ...S)); }': ()
+ 264..268 'get2': fn get2<u64, S<u64>>(S<u64>) -> u64
+ 264..276 'get2(set(S))': u64
+ 269..272 'set': fn set<S<u64>>(S<u64>) -> S<u64>
+ 269..275 'set(S)': S<u64>
+ 273..274 'S': S<u64>
+ "#]],
+ );
+}
+
+#[test]
+fn fn_trait() {
+ check_infer_with_mismatches(
+ r#"
+trait FnOnce<Args> {
+ type Output;
+
+ fn call_once(self, args: Args) -> <Self as FnOnce<Args>>::Output;
+}
+
+fn test<F: FnOnce(u32, u64) -> u128>(f: F) {
+ f.call_once((1, 2));
+}"#,
+ expect![[r#"
+ 56..60 'self': Self
+ 62..66 'args': Args
+ 149..150 'f': F
+ 155..183 '{ ...2)); }': ()
+ 161..162 'f': F
+ 161..180 'f.call...1, 2))': u128
+ 173..179 '(1, 2)': (u32, u64)
+ 174..175 '1': u32
+ 177..178 '2': u64
+ "#]],
+ );
+}
+
+#[test]
+fn fn_ptr_and_item() {
+ check_infer_with_mismatches(
+ r#"
+#[lang="fn_once"]
+trait FnOnce<Args> {
+ type Output;
+
+ fn call_once(self, args: Args) -> Self::Output;
+}
+
+trait Foo<T> {
+ fn foo(&self) -> T;
+}
+
+struct Bar<T>(T);
+
+impl<A1, R, F: FnOnce(A1) -> R> Foo<(A1, R)> for Bar<F> {
+ fn foo(&self) -> (A1, R) { loop {} }
+}
+
+enum Opt<T> { None, Some(T) }
+impl<T> Opt<T> {
+ fn map<U, F: FnOnce(T) -> U>(self, f: F) -> Opt<U> { loop {} }
+}
+
+fn test() {
+ let bar: Bar<fn(u8) -> u32>;
+ bar.foo();
+
+ let opt: Opt<u8>;
+ let f: fn(u8) -> u32;
+ opt.map(f);
+}"#,
+ expect![[r#"
+ 74..78 'self': Self
+ 80..84 'args': Args
+ 139..143 'self': &Self
+ 243..247 'self': &Bar<F>
+ 260..271 '{ loop {} }': (A1, R)
+ 262..269 'loop {}': !
+ 267..269 '{}': ()
+ 355..359 'self': Opt<T>
+ 361..362 'f': F
+ 377..388 '{ loop {} }': Opt<U>
+ 379..386 'loop {}': !
+ 384..386 '{}': ()
+ 402..518 '{ ...(f); }': ()
+ 412..415 'bar': Bar<fn(u8) -> u32>
+ 441..444 'bar': Bar<fn(u8) -> u32>
+ 441..450 'bar.foo()': (u8, u32)
+ 461..464 'opt': Opt<u8>
+ 483..484 'f': fn(u8) -> u32
+ 505..508 'opt': Opt<u8>
+ 505..515 'opt.map(f)': Opt<u32>
+ 513..514 'f': fn(u8) -> u32
+ "#]],
+ );
+}
+
+#[test]
+fn fn_trait_deref_with_ty_default() {
+ check_infer(
+ r#"
+//- minicore: deref, fn
+struct Foo;
+
+impl Foo {
+ fn foo(&self) -> usize {}
+}
+
+struct Lazy<T, F = fn() -> T>(F);
+
+impl<T, F> Lazy<T, F> {
+ pub fn new(f: F) -> Lazy<T, F> {}
+}
+
+impl<T, F: FnOnce() -> T> core::ops::Deref for Lazy<T, F> {
+ type Target = T;
+}
+
+fn test() {
+ let lazy1: Lazy<Foo, _> = Lazy::new(|| Foo);
+ let r1 = lazy1.foo();
+
+ fn make_foo_fn() -> Foo {}
+ let make_foo_fn_ptr: fn() -> Foo = make_foo_fn;
+ let lazy2: Lazy<Foo, _> = Lazy::new(make_foo_fn_ptr);
+ let r2 = lazy2.foo();
+}"#,
+ expect![[r#"
+ 36..40 'self': &Foo
+ 51..53 '{}': usize
+ 131..132 'f': F
+ 151..153 '{}': Lazy<T, F>
+ 251..497 '{ ...o(); }': ()
+ 261..266 'lazy1': Lazy<Foo, || -> Foo>
+ 283..292 'Lazy::new': fn new<Foo, || -> Foo>(|| -> Foo) -> Lazy<Foo, || -> Foo>
+ 283..300 'Lazy::...| Foo)': Lazy<Foo, || -> Foo>
+ 293..299 '|| Foo': || -> Foo
+ 296..299 'Foo': Foo
+ 310..312 'r1': usize
+ 315..320 'lazy1': Lazy<Foo, || -> Foo>
+ 315..326 'lazy1.foo()': usize
+ 368..383 'make_foo_fn_ptr': fn() -> Foo
+ 399..410 'make_foo_fn': fn make_foo_fn() -> Foo
+ 420..425 'lazy2': Lazy<Foo, fn() -> Foo>
+ 442..451 'Lazy::new': fn new<Foo, fn() -> Foo>(fn() -> Foo) -> Lazy<Foo, fn() -> Foo>
+ 442..468 'Lazy::...n_ptr)': Lazy<Foo, fn() -> Foo>
+ 452..467 'make_foo_fn_ptr': fn() -> Foo
+ 478..480 'r2': usize
+ 483..488 'lazy2': Lazy<Foo, fn() -> Foo>
+ 483..494 'lazy2.foo()': usize
+ 357..359 '{}': Foo
+ "#]],
+ );
+}
+
+#[test]
+fn closure_1() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn
+enum Option<T> { Some(T), None }
+impl<T> Option<T> {
+ fn map<U, F: FnOnce(T) -> U>(self, f: F) -> Option<U> { loop {} }
+}
+
+fn test() {
+ let x = Option::Some(1u32);
+ x.map(|v| v + 1);
+ x.map(|_v| 1u64);
+ let y: Option<i64> = x.map(|_v| 1);
+}"#,
+ expect![[r#"
+ 86..90 'self': Option<T>
+ 92..93 'f': F
+ 111..122 '{ loop {} }': Option<U>
+ 113..120 'loop {}': !
+ 118..120 '{}': ()
+ 136..255 '{ ... 1); }': ()
+ 146..147 'x': Option<u32>
+ 150..162 'Option::Some': Some<u32>(u32) -> Option<u32>
+ 150..168 'Option...(1u32)': Option<u32>
+ 163..167 '1u32': u32
+ 174..175 'x': Option<u32>
+ 174..190 'x.map(...v + 1)': Option<u32>
+ 180..189 '|v| v + 1': |u32| -> u32
+ 181..182 'v': u32
+ 184..185 'v': u32
+ 184..189 'v + 1': u32
+ 188..189 '1': u32
+ 196..197 'x': Option<u32>
+ 196..212 'x.map(... 1u64)': Option<u64>
+ 202..211 '|_v| 1u64': |u32| -> u64
+ 203..205 '_v': u32
+ 207..211 '1u64': u64
+ 222..223 'y': Option<i64>
+ 239..240 'x': Option<u32>
+ 239..252 'x.map(|_v| 1)': Option<i64>
+ 245..251 '|_v| 1': |u32| -> i64
+ 246..248 '_v': u32
+ 250..251 '1': i64
+ "#]],
+ );
+}
+
+#[test]
+fn closure_2() {
+ check_types(
+ r#"
+//- minicore: add, fn
+
+impl core::ops::Add for u64 {
+ type Output = Self;
+ fn add(self, rhs: u64) -> Self::Output {0}
+}
+
+impl core::ops::Add for u128 {
+ type Output = Self;
+ fn add(self, rhs: u128) -> Self::Output {0}
+}
+
+fn test<F: FnOnce(u32) -> u64>(f: F) {
+ f(1);
+ // ^ u32
+ //^^^^ u64
+ let g = |v| v + 1;
+ //^^^^^ u64
+ //^^^^^^^^^ |u64| -> u64
+ g(1u64);
+ //^^^^^^^ u64
+ let h = |v| 1u128 + v;
+ //^^^^^^^^^^^^^ |u128| -> u128
+}"#,
+ );
+}
+
+#[test]
+fn closure_as_argument_inference_order() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn
+fn foo1<T, U, F: FnOnce(T) -> U>(x: T, f: F) -> U { loop {} }
+fn foo2<T, U, F: FnOnce(T) -> U>(f: F, x: T) -> U { loop {} }
+
+struct S;
+impl S {
+ fn method(self) -> u64;
+
+ fn foo1<T, U, F: FnOnce(T) -> U>(self, x: T, f: F) -> U { loop {} }
+ fn foo2<T, U, F: FnOnce(T) -> U>(self, f: F, x: T) -> U { loop {} }
+}
+
+fn test() {
+ let x1 = foo1(S, |s| s.method());
+ let x2 = foo2(|s| s.method(), S);
+ let x3 = S.foo1(S, |s| s.method());
+ let x4 = S.foo2(|s| s.method(), S);
+}"#,
+ expect![[r#"
+ 33..34 'x': T
+ 39..40 'f': F
+ 50..61 '{ loop {} }': U
+ 52..59 'loop {}': !
+ 57..59 '{}': ()
+ 95..96 'f': F
+ 101..102 'x': T
+ 112..123 '{ loop {} }': U
+ 114..121 'loop {}': !
+ 119..121 '{}': ()
+ 158..162 'self': S
+ 210..214 'self': S
+ 216..217 'x': T
+ 222..223 'f': F
+ 233..244 '{ loop {} }': U
+ 235..242 'loop {}': !
+ 240..242 '{}': ()
+ 282..286 'self': S
+ 288..289 'f': F
+ 294..295 'x': T
+ 305..316 '{ loop {} }': U
+ 307..314 'loop {}': !
+ 312..314 '{}': ()
+ 330..489 '{ ... S); }': ()
+ 340..342 'x1': u64
+ 345..349 'foo1': fn foo1<S, u64, |S| -> u64>(S, |S| -> u64) -> u64
+ 345..368 'foo1(S...hod())': u64
+ 350..351 'S': S
+ 353..367 '|s| s.method()': |S| -> u64
+ 354..355 's': S
+ 357..358 's': S
+ 357..367 's.method()': u64
+ 378..380 'x2': u64
+ 383..387 'foo2': fn foo2<S, u64, |S| -> u64>(|S| -> u64, S) -> u64
+ 383..406 'foo2(|...(), S)': u64
+ 388..402 '|s| s.method()': |S| -> u64
+ 389..390 's': S
+ 392..393 's': S
+ 392..402 's.method()': u64
+ 404..405 'S': S
+ 416..418 'x3': u64
+ 421..422 'S': S
+ 421..446 'S.foo1...hod())': u64
+ 428..429 'S': S
+ 431..445 '|s| s.method()': |S| -> u64
+ 432..433 's': S
+ 435..436 's': S
+ 435..445 's.method()': u64
+ 456..458 'x4': u64
+ 461..462 'S': S
+ 461..486 'S.foo2...(), S)': u64
+ 468..482 '|s| s.method()': |S| -> u64
+ 469..470 's': S
+ 472..473 's': S
+ 472..482 's.method()': u64
+ 484..485 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn fn_item_fn_trait() {
+ check_types(
+ r#"
+//- minicore: fn
+struct S;
+
+fn foo() -> S { S }
+
+fn takes_closure<U, F: FnOnce() -> U>(f: F) -> U { f() }
+
+fn test() {
+ takes_closure(foo);
+} //^^^^^^^^^^^^^^^^^^ S
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_1() {
+ check_types(
+ r#"
+//- /main.rs
+trait Trait {
+ type Item;
+}
+
+trait Trait2 {
+ fn foo(&self) -> u32;
+}
+
+fn test<T: Trait>() where T::Item: Trait2 {
+ let x: T::Item = no_matter;
+ x.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_2() {
+ check_types(
+ r#"
+trait Trait<T> {
+ type Item;
+}
+
+trait Trait2 {
+ fn foo(&self) -> u32;
+}
+
+fn test<T, U>() where T::Item: Trait2, T: Trait<U::Item>, U: Trait<()> {
+ let x: T::Item = no_matter;
+ x.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_on_impl_self() {
+ check_infer(
+ r#"
+//- /main.rs
+trait Trait {
+ type Item;
+
+ fn f(&self, x: Self::Item);
+}
+
+struct S;
+
+impl Trait for S {
+ type Item = u32;
+ fn f(&self, x: Self::Item) { let y = x; }
+}
+
+struct S2;
+
+impl Trait for S2 {
+ type Item = i32;
+ fn f(&self, x: <Self>::Item) { let y = x; }
+}"#,
+ expect![[r#"
+ 40..44 'self': &Self
+ 46..47 'x': Trait::Item<Self>
+ 126..130 'self': &S
+ 132..133 'x': u32
+ 147..161 '{ let y = x; }': ()
+ 153..154 'y': u32
+ 157..158 'x': u32
+ 228..232 'self': &S2
+ 234..235 'x': i32
+ 251..265 '{ let y = x; }': ()
+ 257..258 'y': i32
+ 261..262 'x': i32
+ "#]],
+ );
+}
+
+#[test]
+fn unselected_projection_on_trait_self() {
+ check_types(
+ r#"
+trait Trait {
+ type Item;
+
+ fn f(&self) -> Self::Item { loop {} }
+}
+
+struct S;
+impl Trait for S {
+ type Item = u32;
+}
+
+fn test() {
+ S.f();
+} //^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_chalk_fold() {
+ check_types(
+ r#"
+trait Interner {}
+trait Fold<I: Interner, TI = I> {
+ type Result;
+}
+
+struct Ty<I: Interner> {}
+impl<I: Interner, TI: Interner> Fold<I, TI> for Ty<I> {
+ type Result = Ty<TI>;
+}
+
+fn fold<I: Interner, T>(interner: &I, t: T) -> T::Result
+where
+ T: Fold<I, I>,
+{
+ loop {}
+}
+
+fn foo<I: Interner>(interner: &I, t: Ty<I>) {
+ fold(interner, t);
+} //^^^^^^^^^^^^^^^^^ Ty<I>
+"#,
+ );
+}
+
+#[test]
+fn trait_impl_self_ty() {
+ check_types(
+ r#"
+trait Trait<T> {
+ fn foo(&self);
+}
+
+struct S;
+
+impl Trait<Self> for S {}
+
+fn test() {
+ S.foo();
+} //^^^^^^^ ()
+"#,
+ );
+}
+
+#[test]
+fn trait_impl_self_ty_cycle() {
+ check_types(
+ r#"
+trait Trait {
+ fn foo(&self);
+}
+
+struct S<T>;
+
+impl Trait for S<Self> {}
+
+fn test() {
+ S.foo();
+} //^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_cycle_1() {
+ // This is not a cycle, because the `T: Trait2<T::Item>` bound depends only on the `T: Trait`
+ // bound, not on itself (since only `Trait` can define `Item`).
+ check_types(
+ r#"
+trait Trait {
+ type Item;
+}
+
+trait Trait2<T> {}
+
+fn test<T: Trait>() where T: Trait2<T::Item> {
+ let x: T::Item = no_matter;
+} //^^^^^^^^^ Trait::Item<T>
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_cycle_2() {
+ // this is a legitimate cycle
+ check_types(
+ r#"
+//- /main.rs
+trait Trait<T> {
+ type Item;
+}
+
+fn test<T, U>() where T: Trait<U::Item>, U: Trait<T::Item> {
+ let x: T::Item = no_matter;
+} //^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_cycle_3() {
+ // this is a cycle for rustc; we currently accept it
+ check_types(
+ r#"
+//- /main.rs
+trait Trait {
+ type Item;
+ type OtherItem;
+}
+
+fn test<T>() where T: Trait<OtherItem = T::Item> {
+ let x: T::Item = no_matter;
+} //^^^^^^^^^ Trait::Item<T>
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_no_cycle() {
+ // this is not a cycle
+ check_types(
+ r#"
+//- /main.rs
+trait Index {
+ type Output;
+}
+
+type Key<S: UnificationStoreBase> = <S as UnificationStoreBase>::Key;
+
+pub trait UnificationStoreBase: Index<Output = Key<Self>> {
+ type Key;
+
+ fn len(&self) -> usize;
+}
+
+pub trait UnificationStoreMut: UnificationStoreBase {
+ fn push(&mut self, value: Self::Key);
+}
+
+fn test<T>(t: T) where T: UnificationStoreMut {
+ let x;
+ t.push(x);
+ let y: Key<T>;
+ (x, y);
+} //^^^^^^ (UnificationStoreBase::Key<T>, UnificationStoreBase::Key<T>)
+"#,
+ );
+}
+
+#[test]
+fn inline_assoc_type_bounds_1() {
+ check_types(
+ r#"
+trait Iterator {
+ type Item;
+}
+trait OtherTrait<T> {
+ fn foo(&self) -> T;
+}
+
+// workaround for Chalk assoc type normalization problems
+pub struct S<T>;
+impl<T: Iterator> Iterator for S<T> {
+ type Item = <T as Iterator>::Item;
+}
+
+fn test<I: Iterator<Item: OtherTrait<u32>>>() {
+ let x: <S<I> as Iterator>::Item;
+ x.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn inline_assoc_type_bounds_2() {
+ check_types(
+ r#"
+trait Iterator {
+ type Item;
+}
+
+fn test<I: Iterator<Item: Iterator<Item = u32>>>() {
+ let x: <<I as Iterator>::Item as Iterator>::Item;
+ x;
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn proc_macro_server_types() {
+ check_infer(
+ r#"
+macro_rules! with_api {
+ ($S:ident, $self:ident, $m:ident) => {
+ $m! {
+ TokenStream {
+ fn new() -> $S::TokenStream;
+ },
+ Group {
+ },
+ }
+ };
+}
+macro_rules! associated_item {
+ (type TokenStream) =>
+ (type TokenStream: 'static;);
+ (type Group) =>
+ (type Group: 'static;);
+ ($($item:tt)*) => ($($item)*;)
+}
+macro_rules! declare_server_traits {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ pub trait Types {
+ $(associated_item!(type $name);)*
+ }
+
+ $(pub trait $name: Types {
+ $(associated_item!(fn $method($($arg: $arg_ty),*) $(-> $ret_ty)?);)*
+ })*
+
+ pub trait Server: Types $(+ $name)* {}
+ impl<S: Types $(+ $name)*> Server for S {}
+ }
+}
+
+with_api!(Self, self_, declare_server_traits);
+struct G {}
+struct T {}
+struct RustAnalyzer;
+impl Types for RustAnalyzer {
+ type TokenStream = T;
+ type Group = G;
+}
+
+fn make<T>() -> T { loop {} }
+impl TokenStream for RustAnalyzer {
+ fn new() -> Self::TokenStream {
+ let group: Self::Group = make();
+ make()
+ }
+}"#,
+ expect![[r#"
+ 1075..1086 '{ loop {} }': T
+ 1077..1084 'loop {}': !
+ 1082..1084 '{}': ()
+ 1157..1220 '{ ... }': T
+ 1171..1176 'group': G
+ 1192..1196 'make': fn make<G>() -> G
+ 1192..1198 'make()': G
+ 1208..1212 'make': fn make<T>() -> T
+ 1208..1214 'make()': T
+ "#]],
+ );
+}
+
+#[test]
+fn unify_impl_trait() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Trait<T> {}
+
+fn foo(x: impl Trait<u32>) { loop {} }
+fn bar<T>(x: impl Trait<T>) -> T { loop {} }
+
+struct S<T>(T);
+impl<T> Trait<T> for S<T> {}
+
+fn default<T>() -> T { loop {} }
+
+fn test() -> impl Trait<i32> {
+ let s1 = S(default());
+ foo(s1);
+ let x: i32 = bar(S(default()));
+ S(default())
+}"#,
+ expect![[r#"
+ 26..27 'x': impl Trait<u32>
+ 46..57 '{ loop {} }': ()
+ 48..55 'loop {}': !
+ 53..55 '{}': ()
+ 68..69 'x': impl Trait<T>
+ 91..102 '{ loop {} }': T
+ 93..100 'loop {}': !
+ 98..100 '{}': ()
+ 171..182 '{ loop {} }': T
+ 173..180 'loop {}': !
+ 178..180 '{}': ()
+ 213..309 '{ ...t()) }': S<i32>
+ 223..225 's1': S<u32>
+ 228..229 'S': S<u32>(u32) -> S<u32>
+ 228..240 'S(default())': S<u32>
+ 230..237 'default': fn default<u32>() -> u32
+ 230..239 'default()': u32
+ 246..249 'foo': fn foo(S<u32>)
+ 246..253 'foo(s1)': ()
+ 250..252 's1': S<u32>
+ 263..264 'x': i32
+ 272..275 'bar': fn bar<i32>(S<i32>) -> i32
+ 272..289 'bar(S(...lt()))': i32
+ 276..277 'S': S<i32>(i32) -> S<i32>
+ 276..288 'S(default())': S<i32>
+ 278..285 'default': fn default<i32>() -> i32
+ 278..287 'default()': i32
+ 295..296 'S': S<i32>(i32) -> S<i32>
+ 295..307 'S(default())': S<i32>
+ 297..304 'default': fn default<i32>() -> i32
+ 297..306 'default()': i32
+ "#]],
+ );
+}
+
+#[test]
+fn assoc_types_from_bounds() {
+ check_infer(
+ r#"
+//- minicore: fn
+trait T {
+ type O;
+}
+
+impl T for () {
+ type O = ();
+}
+
+fn f<X, F>(_v: F)
+where
+ X: T,
+ F: FnOnce(&X::O),
+{ }
+
+fn main() {
+ f::<(), _>(|z| { z; });
+}"#,
+ expect![[r#"
+ 72..74 '_v': F
+ 117..120 '{ }': ()
+ 132..163 '{ ... }); }': ()
+ 138..148 'f::<(), _>': fn f<(), |&()| -> ()>(|&()| -> ())
+ 138..160 'f::<()... z; })': ()
+ 149..159 '|z| { z; }': |&()| -> ()
+ 150..151 'z': &()
+ 153..159 '{ z; }': ()
+ 155..156 'z': &()
+ "#]],
+ );
+}
+
+#[test]
+fn associated_type_bound() {
+ check_types(
+ r#"
+pub trait Trait {
+ type Item: OtherTrait<u32>;
+}
+pub trait OtherTrait<T> {
+ fn foo(&self) -> T;
+}
+
+// this is just a workaround for chalk#234
+pub struct S<T>;
+impl<T: Trait> Trait for S<T> {
+ type Item = <T as Trait>::Item;
+}
+
+fn test<T: Trait>() {
+ let y: <S<T> as Trait>::Item = no_matter;
+ y.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn dyn_trait_through_chalk() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Box<T: ?Sized> {}
+impl<T: ?Sized> core::ops::Deref for Box<T> {
+ type Target = T;
+}
+trait Trait {
+ fn foo(&self);
+}
+
+fn test(x: Box<dyn Trait>) {
+ x.foo();
+} //^^^^^^^ ()
+"#,
+ );
+}
+
+#[test]
+fn string_to_owned() {
+ check_types(
+ r#"
+struct String {}
+pub trait ToOwned {
+ type Owned;
+ fn to_owned(&self) -> Self::Owned;
+}
+impl ToOwned for str {
+ type Owned = String;
+}
+fn test() {
+ "foo".to_owned();
+} //^^^^^^^^^^^^^^^^ String
+"#,
+ );
+}
+
+#[test]
+fn iterator_chain() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn, option
+pub trait Iterator {
+ type Item;
+
+ fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
+ where
+ F: FnMut(Self::Item) -> Option<B>,
+ { loop {} }
+
+ fn for_each<F>(self, f: F)
+ where
+ F: FnMut(Self::Item),
+ { loop {} }
+}
+
+pub trait IntoIterator {
+ type Item;
+ type IntoIter: Iterator<Item = Self::Item>;
+ fn into_iter(self) -> Self::IntoIter;
+}
+
+pub struct FilterMap<I, F> { }
+impl<B, I: Iterator, F> Iterator for FilterMap<I, F>
+where
+ F: FnMut(I::Item) -> Option<B>,
+{
+ type Item = B;
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<I: Iterator> IntoIterator for I {
+ type Item = I::Item;
+ type IntoIter = I;
+
+ fn into_iter(self) -> I {
+ self
+ }
+}
+
+struct Vec<T> {}
+impl<T> Vec<T> {
+ fn new() -> Self { loop {} }
+}
+
+impl<T> IntoIterator for Vec<T> {
+ type Item = T;
+ type IntoIter = IntoIter<T>;
+}
+
+pub struct IntoIter<T> { }
+impl<T> Iterator for IntoIter<T> {
+ type Item = T;
+}
+
+fn main() {
+ Vec::<i32>::new().into_iter()
+ .filter_map(|x| if x > 0 { Some(x as u32) } else { None })
+ .for_each(|y| { y; });
+}"#,
+ expect![[r#"
+ 61..65 'self': Self
+ 67..68 'f': F
+ 152..163 '{ loop {} }': FilterMap<Self, F>
+ 154..161 'loop {}': !
+ 159..161 '{}': ()
+ 184..188 'self': Self
+ 190..191 'f': F
+ 240..251 '{ loop {} }': ()
+ 242..249 'loop {}': !
+ 247..249 '{}': ()
+ 360..364 'self': Self
+ 689..693 'self': I
+ 700..720 '{ ... }': I
+ 710..714 'self': I
+ 779..790 '{ loop {} }': Vec<T>
+ 781..788 'loop {}': !
+ 786..788 '{}': ()
+ 977..1104 '{ ... }); }': ()
+ 983..998 'Vec::<i32>::new': fn new<i32>() -> Vec<i32>
+ 983..1000 'Vec::<...:new()': Vec<i32>
+ 983..1012 'Vec::<...iter()': IntoIter<i32>
+ 983..1075 'Vec::<...one })': FilterMap<IntoIter<i32>, |i32| -> Option<u32>>
+ 983..1101 'Vec::<... y; })': ()
+ 1029..1074 '|x| if...None }': |i32| -> Option<u32>
+ 1030..1031 'x': i32
+ 1033..1074 'if x >...None }': Option<u32>
+ 1036..1037 'x': i32
+ 1036..1041 'x > 0': bool
+ 1040..1041 '0': i32
+ 1042..1060 '{ Some...u32) }': Option<u32>
+ 1044..1048 'Some': Some<u32>(u32) -> Option<u32>
+ 1044..1058 'Some(x as u32)': Option<u32>
+ 1049..1050 'x': i32
+ 1049..1057 'x as u32': u32
+ 1066..1074 '{ None }': Option<u32>
+ 1068..1072 'None': Option<u32>
+ 1090..1100 '|y| { y; }': |u32| -> ()
+ 1091..1092 'y': u32
+ 1094..1100 '{ y; }': ()
+ 1096..1097 'y': u32
+ "#]],
+ );
+}
+
+#[test]
+fn nested_assoc() {
+ check_types(
+ r#"
+struct Bar;
+struct Foo;
+
+trait A {
+ type OutputA;
+}
+
+impl A for Bar {
+ type OutputA = Foo;
+}
+
+trait B {
+ type Output;
+ fn foo() -> Self::Output;
+}
+
+impl<T:A> B for T {
+ type Output = T::OutputA;
+ fn foo() -> Self::Output { loop {} }
+}
+
+fn main() {
+ Bar::foo();
+} //^^^^^^^^^^ Foo
+"#,
+ );
+}
+
+#[test]
+fn trait_object_no_coercion() {
+ check_infer_with_mismatches(
+ r#"
+trait Foo {}
+
+fn foo(x: &dyn Foo) {}
+
+fn test(x: &dyn Foo) {
+ foo(x);
+}"#,
+ expect![[r#"
+ 21..22 'x': &dyn Foo
+ 34..36 '{}': ()
+ 46..47 'x': &dyn Foo
+ 59..74 '{ foo(x); }': ()
+ 65..68 'foo': fn foo(&dyn Foo)
+ 65..71 'foo(x)': ()
+ 69..70 'x': &dyn Foo
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_copy() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: copy
+struct IsCopy;
+impl Copy for IsCopy {}
+struct NotCopy;
+
+trait Test { fn test(&self) -> bool; }
+impl<T: Copy> Test for T {}
+
+fn test() {
+ IsCopy.test();
+ NotCopy.test();
+ (IsCopy, IsCopy).test();
+ (IsCopy, NotCopy).test();
+}"#,
+ expect![[r#"
+ 78..82 'self': &Self
+ 134..235 '{ ...t(); }': ()
+ 140..146 'IsCopy': IsCopy
+ 140..153 'IsCopy.test()': bool
+ 159..166 'NotCopy': NotCopy
+ 159..173 'NotCopy.test()': {unknown}
+ 179..195 '(IsCop...sCopy)': (IsCopy, IsCopy)
+ 179..202 '(IsCop...test()': bool
+ 180..186 'IsCopy': IsCopy
+ 188..194 'IsCopy': IsCopy
+ 208..225 '(IsCop...tCopy)': (IsCopy, NotCopy)
+ 208..232 '(IsCop...test()': {unknown}
+ 209..215 'IsCopy': IsCopy
+ 217..224 'NotCopy': NotCopy
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_fn_def_copy() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: copy
+fn foo() {}
+fn bar<T: Copy>(T) -> T {}
+struct Struct(usize);
+enum Enum { Variant(usize) }
+
+trait Test { fn test(&self) -> bool; }
+impl<T: Copy> Test for T {}
+
+fn test() {
+ foo.test();
+ bar.test();
+ Struct.test();
+ Enum::Variant.test();
+}"#,
+ expect![[r#"
+ 9..11 '{}': ()
+ 28..29 'T': {unknown}
+ 36..38 '{}': T
+ 36..38: expected T, got ()
+ 113..117 'self': &Self
+ 169..249 '{ ...t(); }': ()
+ 175..178 'foo': fn foo()
+ 175..185 'foo.test()': bool
+ 191..194 'bar': fn bar<{unknown}>({unknown}) -> {unknown}
+ 191..201 'bar.test()': bool
+ 207..213 'Struct': Struct(usize) -> Struct
+ 207..220 'Struct.test()': bool
+ 226..239 'Enum::Variant': Variant(usize) -> Enum
+ 226..246 'Enum::...test()': bool
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_fn_ptr_copy() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: copy
+trait Test { fn test(&self) -> bool; }
+impl<T: Copy> Test for T {}
+
+fn test(f1: fn(), f2: fn(usize) -> u8, f3: fn(u8, u8) -> &u8) {
+ f1.test();
+ f2.test();
+ f3.test();
+}"#,
+ expect![[r#"
+ 22..26 'self': &Self
+ 76..78 'f1': fn()
+ 86..88 'f2': fn(usize) -> u8
+ 107..109 'f3': fn(u8, u8) -> &u8
+ 130..178 '{ ...t(); }': ()
+ 136..138 'f1': fn()
+ 136..145 'f1.test()': bool
+ 151..153 'f2': fn(usize) -> u8
+ 151..160 'f2.test()': bool
+ 166..168 'f3': fn(u8, u8) -> &u8
+ 166..175 'f3.test()': bool
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_sized() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Test { fn test(&self) -> bool; }
+impl<T: Sized> Test for T {}
+
+fn test() {
+ 1u8.test();
+ (*"foo").test(); // not Sized
+ (1u8, 1u8).test();
+ (1u8, *"foo").test(); // not Sized
+}"#,
+ expect![[r#"
+ 22..26 'self': &Self
+ 79..194 '{ ...ized }': ()
+ 85..88 '1u8': u8
+ 85..95 '1u8.test()': bool
+ 101..116 '(*"foo").test()': {unknown}
+ 102..108 '*"foo"': str
+ 103..108 '"foo"': &str
+ 135..145 '(1u8, 1u8)': (u8, u8)
+ 135..152 '(1u8, ...test()': bool
+ 136..139 '1u8': u8
+ 141..144 '1u8': u8
+ 158..171 '(1u8, *"foo")': (u8, str)
+ 158..178 '(1u8, ...test()': {unknown}
+ 159..162 '1u8': u8
+ 164..170 '*"foo"': str
+ 165..170 '"foo"': &str
+ "#]],
+ );
+}
+
+#[test]
+fn integer_range_iterate() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+fn test() {
+ for x in 0..100 { x; }
+} //^ i32
+
+//- /core.rs crate:core
+pub mod ops {
+ pub struct Range<Idx> {
+ pub start: Idx,
+ pub end: Idx,
+ }
+}
+
+pub mod iter {
+ pub trait Iterator {
+ type Item;
+ }
+
+ pub trait IntoIterator {
+ type Item;
+ type IntoIter: Iterator<Item = Self::Item>;
+ }
+
+ impl<T> IntoIterator for T where T: Iterator {
+ type Item = <T as Iterator>::Item;
+ type IntoIter = Self;
+ }
+}
+
+trait Step {}
+impl Step for i32 {}
+impl Step for i64 {}
+
+impl<A: Step> iter::Iterator for ops::Range<A> {
+ type Item = A;
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_closure_arg() {
+ check_infer(
+ r#"
+//- /lib.rs
+
+enum Option<T> {
+ None,
+ Some(T)
+}
+
+fn foo() {
+ let s = Option::None;
+ let f = |x: Option<i32>| {};
+ (&f)(s)
+}"#,
+ expect![[r#"
+ 52..126 '{ ...)(s) }': ()
+ 62..63 's': Option<i32>
+ 66..78 'Option::None': Option<i32>
+ 88..89 'f': |Option<i32>| -> ()
+ 92..111 '|x: Op...2>| {}': |Option<i32>| -> ()
+ 93..94 'x': Option<i32>
+ 109..111 '{}': ()
+ 117..124 '(&f)(s)': ()
+ 118..120 '&f': &|Option<i32>| -> ()
+ 119..120 'f': |Option<i32>| -> ()
+ 122..123 's': Option<i32>
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_fn_param_informs_call_site_closure_signature() {
+ cov_mark::check!(dyn_fn_param_informs_call_site_closure_signature);
+ check_types(
+ r#"
+//- minicore: fn, coerce_unsized
+struct S;
+impl S {
+ fn inherent(&self) -> u8 { 0 }
+}
+fn take_dyn_fn(f: &dyn Fn(S)) {}
+
+fn f() {
+ take_dyn_fn(&|x| { x.inherent(); });
+ //^^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_fn_trait_arg() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn, option
+fn foo<F, T>(f: F) -> T
+where
+ F: Fn(Option<i32>) -> T,
+{
+ let s = None;
+ f(s)
+}
+"#,
+ expect![[r#"
+ 13..14 'f': F
+ 59..89 '{ ...f(s) }': T
+ 69..70 's': Option<i32>
+ 73..77 'None': Option<i32>
+ 83..84 'f': F
+ 83..87 'f(s)': T
+ 85..86 's': Option<i32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_box_fn_arg() {
+ // The type mismatch is because we don't define Unsize and CoerceUnsized
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn, deref, option
+#[lang = "owned_box"]
+pub struct Box<T: ?Sized> {
+ inner: *mut T,
+}
+
+impl<T: ?Sized> core::ops::Deref for Box<T> {
+ type Target = T;
+
+ fn deref(&self) -> &T {
+ &self.inner
+ }
+}
+
+fn foo() {
+ let s = None;
+ let f: Box<dyn FnOnce(&Option<i32>)> = box (|ps| {});
+ f(&s);
+}"#,
+ expect![[r#"
+ 154..158 'self': &Box<T>
+ 166..193 '{ ... }': &T
+ 176..187 '&self.inner': &*mut T
+ 177..181 'self': &Box<T>
+ 177..187 'self.inner': *mut T
+ 206..296 '{ ...&s); }': ()
+ 216..217 's': Option<i32>
+ 220..224 'None': Option<i32>
+ 234..235 'f': Box<dyn FnOnce(&Option<i32>)>
+ 269..282 'box (|ps| {})': Box<|&Option<i32>| -> ()>
+ 274..281 '|ps| {}': |&Option<i32>| -> ()
+ 275..277 'ps': &Option<i32>
+ 279..281 '{}': ()
+ 288..289 'f': Box<dyn FnOnce(&Option<i32>)>
+ 288..293 'f(&s)': ()
+ 290..292 '&s': &Option<i32>
+ 291..292 's': Option<i32>
+ 269..282: expected Box<dyn FnOnce(&Option<i32>)>, got Box<|&Option<i32>| -> ()>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_dyn_fn_output() {
+ check_types(
+ r#"
+//- minicore: fn
+fn foo() {
+ let f: &dyn Fn() -> i32;
+ f();
+ //^^^ i32
+}"#,
+ );
+}
+
+#[test]
+fn infer_dyn_fn_once_output() {
+ check_types(
+ r#"
+//- minicore: fn
+fn foo() {
+ let f: dyn FnOnce() -> i32;
+ f();
+ //^^^ i32
+}"#,
+ );
+}
+
+#[test]
+fn variable_kinds_1() {
+ check_types(
+ r#"
+trait Trait<T> { fn get(self, t: T) -> T; }
+struct S;
+impl Trait<u128> for S {}
+impl Trait<f32> for S {}
+fn test() {
+ S.get(1);
+ //^^^^^^^^ u128
+ S.get(1.);
+ //^^^^^^^^^ f32
+}
+ "#,
+ );
+}
+
+#[test]
+fn variable_kinds_2() {
+ check_types(
+ r#"
+trait Trait { fn get(self) -> Self; }
+impl Trait for u128 {}
+impl Trait for f32 {}
+fn test() {
+ 1.get();
+ //^^^^^^^ u128
+ (1.).get();
+ //^^^^^^^^^^ f32
+}
+ "#,
+ );
+}
+
+#[test]
+fn underscore_import() {
+ check_types(
+ r#"
+mod tr {
+ pub trait Tr {
+ fn method(&self) -> u8 { 0 }
+ }
+}
+
+struct Tr;
+impl crate::tr::Tr for Tr {}
+
+use crate::tr::Tr as _;
+fn test() {
+ Tr.method();
+ //^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
+
+#[test]
+fn inner_use() {
+ check_types(
+ r#"
+mod m {
+ pub trait Tr {
+ fn method(&self) -> u8 { 0 }
+ }
+
+ impl Tr for () {}
+}
+
+fn f() {
+ use m::Tr;
+
+ ().method();
+ //^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
+
+#[test]
+fn trait_in_scope_with_inner_item() {
+ check_infer(
+ r#"
+mod m {
+ pub trait Tr {
+ fn method(&self) -> u8 { 0 }
+ }
+
+ impl Tr for () {}
+}
+
+use m::Tr;
+
+fn f() {
+ fn inner() {
+ ().method();
+ //^^^^^^^^^^^ u8
+ }
+}"#,
+ expect![[r#"
+ 46..50 'self': &Self
+ 58..63 '{ 0 }': u8
+ 60..61 '0': u8
+ 115..185 '{ ... } }': ()
+ 132..183 '{ ... }': ()
+ 142..144 '()': ()
+ 142..153 '().method()': u8
+ "#]],
+ );
+}
+
+#[test]
+fn inner_use_in_block() {
+ check_types(
+ r#"
+mod m {
+ pub trait Tr {
+ fn method(&self) -> u8 { 0 }
+ }
+
+ impl Tr for () {}
+}
+
+fn f() {
+ {
+ use m::Tr;
+
+ ().method();
+ //^^^^^^^^^^^ u8
+ }
+
+ {
+ ().method();
+ //^^^^^^^^^^^ {unknown}
+ }
+
+ ().method();
+ //^^^^^^^^^^^ {unknown}
+}
+ "#,
+ );
+}
+
+#[test]
+fn nested_inner_function_calling_self() {
+ check_infer(
+ r#"
+struct S;
+fn f() {
+ fn inner() -> S {
+ let s = inner();
+ }
+}"#,
+ expect![[r#"
+ 17..73 '{ ... } }': ()
+ 39..71 '{ ... }': S
+ 53..54 's': S
+ 57..62 'inner': fn inner() -> S
+ 57..64 'inner()': S
+ "#]],
+ )
+}
+
+#[test]
+fn infer_default_trait_type_parameter() {
+ check_infer(
+ r#"
+struct A;
+
+trait Op<RHS=Self> {
+ type Output;
+
+ fn do_op(self, rhs: RHS) -> Self::Output;
+}
+
+impl Op for A {
+ type Output = bool;
+
+ fn do_op(self, rhs: Self) -> Self::Output {
+ true
+ }
+}
+
+fn test() {
+ let x = A;
+ let y = A;
+ let r = x.do_op(y);
+}"#,
+ expect![[r#"
+ 63..67 'self': Self
+ 69..72 'rhs': RHS
+ 153..157 'self': A
+ 159..162 'rhs': A
+ 186..206 '{ ... }': bool
+ 196..200 'true': bool
+ 220..277 '{ ...(y); }': ()
+ 230..231 'x': A
+ 234..235 'A': A
+ 245..246 'y': A
+ 249..250 'A': A
+ 260..261 'r': bool
+ 264..265 'x': A
+ 264..274 'x.do_op(y)': bool
+ 272..273 'y': A
+ "#]],
+ )
+}
+
+#[test]
+fn qualified_path_as_qualified_trait() {
+ check_infer(
+ r#"
+mod foo {
+
+ pub trait Foo {
+ type Target;
+ }
+ pub trait Bar {
+ type Output;
+ fn boo() -> Self::Output {
+ loop {}
+ }
+ }
+}
+
+struct F;
+impl foo::Foo for F {
+ type Target = ();
+}
+impl foo::Bar for F {
+ type Output = <F as foo::Foo>::Target;
+}
+
+fn foo() {
+ use foo::Bar;
+ let x = <F as Bar>::boo();
+}"#,
+ expect![[r#"
+ 132..163 '{ ... }': Bar::Output<Self>
+ 146..153 'loop {}': !
+ 151..153 '{}': ()
+ 306..358 '{ ...o(); }': ()
+ 334..335 'x': ()
+ 338..353 '<F as Bar>::boo': fn boo<F>() -> <F as Bar>::Output
+ 338..355 '<F as ...:boo()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn renamed_extern_crate_in_block() {
+ check_types(
+ r#"
+//- /lib.rs crate:lib deps:serde
+use serde::Deserialize;
+
+struct Foo {}
+
+const _ : () = {
+ extern crate serde as _serde;
+ impl _serde::Deserialize for Foo {
+ fn deserialize() -> u8 { 0 }
+ }
+};
+
+fn foo() {
+ Foo::deserialize();
+ //^^^^^^^^^^^^^^^^^^ u8
+}
+
+//- /serde.rs crate:serde
+
+pub trait Deserialize {
+ fn deserialize() -> u8;
+}"#,
+ );
+}
+
+#[test]
+fn bin_op_with_rhs_is_self_for_assoc_bound() {
+ check_no_mismatches(
+ r#"//- minicore: eq
+ fn repro<T>(t: T) -> bool
+where
+ T: Request,
+ T::Output: Convertable,
+{
+ let a = execute(&t).convert();
+ let b = execute(&t).convert();
+ a.eq(&b);
+ let a = execute(&t).convert2();
+ let b = execute(&t).convert2();
+ a.eq(&b)
+}
+fn execute<T>(t: &T) -> T::Output
+where
+ T: Request,
+{
+ <T as Request>::output()
+}
+trait Convertable {
+ type TraitSelf: PartialEq<Self::TraitSelf>;
+ type AssocAsDefaultSelf: PartialEq;
+ fn convert(self) -> Self::AssocAsDefaultSelf;
+ fn convert2(self) -> Self::TraitSelf;
+}
+trait Request {
+ type Output;
+ fn output() -> Self::Output;
+}
+ "#,
+ );
+}
+
+#[test]
+fn bin_op_adt_with_rhs_primitive() {
+ check_infer_with_mismatches(
+ r#"
+#[lang = "add"]
+pub trait Add<Rhs = Self> {
+ type Output;
+ fn add(self, rhs: Rhs) -> Self::Output;
+}
+
+struct Wrapper(u32);
+impl Add<u32> for Wrapper {
+ type Output = Self;
+ fn add(self, rhs: u32) -> Wrapper {
+ Wrapper(rhs)
+ }
+}
+fn main(){
+ let wrapped = Wrapper(10);
+ let num: u32 = 2;
+ let res = wrapped + num;
+
+}"#,
+ expect![[r#"
+ 72..76 'self': Self
+ 78..81 'rhs': Rhs
+ 192..196 'self': Wrapper
+ 198..201 'rhs': u32
+ 219..247 '{ ... }': Wrapper
+ 229..236 'Wrapper': Wrapper(u32) -> Wrapper
+ 229..241 'Wrapper(rhs)': Wrapper
+ 237..240 'rhs': u32
+ 259..345 '{ ...um; }': ()
+ 269..276 'wrapped': Wrapper
+ 279..286 'Wrapper': Wrapper(u32) -> Wrapper
+ 279..290 'Wrapper(10)': Wrapper
+ 287..289 '10': u32
+ 300..303 'num': u32
+ 311..312 '2': u32
+ 322..325 'res': Wrapper
+ 328..335 'wrapped': Wrapper
+ 328..341 'wrapped + num': Wrapper
+ 338..341 'num': u32
+ "#]],
+ )
+}
+
+#[test]
+fn array_length() {
+ check_infer(
+ r#"
+trait T {
+ type Output;
+ fn do_thing(&self) -> Self::Output;
+}
+
+impl T for [u8; 4] {
+ type Output = usize;
+ fn do_thing(&self) -> Self::Output {
+ 2
+ }
+}
+
+impl T for [u8; 2] {
+ type Output = u8;
+ fn do_thing(&self) -> Self::Output {
+ 2
+ }
+}
+
+fn main() {
+ let v = [0u8; 2];
+ let v2 = v.do_thing();
+ let v3 = [0u8; 4];
+ let v4 = v3.do_thing();
+}
+"#,
+ expect![[r#"
+ 44..48 'self': &Self
+ 133..137 'self': &[u8; 4]
+ 155..172 '{ ... }': usize
+ 165..166 '2': usize
+ 236..240 'self': &[u8; 2]
+ 258..275 '{ ... }': u8
+ 268..269 '2': u8
+ 289..392 '{ ...g(); }': ()
+ 299..300 'v': [u8; 2]
+ 303..311 '[0u8; 2]': [u8; 2]
+ 304..307 '0u8': u8
+ 309..310 '2': usize
+ 321..323 'v2': u8
+ 326..327 'v': [u8; 2]
+ 326..338 'v.do_thing()': u8
+ 348..350 'v3': [u8; 4]
+ 353..361 '[0u8; 4]': [u8; 4]
+ 354..357 '0u8': u8
+ 359..360 '4': usize
+ 371..373 'v4': usize
+ 376..378 'v3': [u8; 4]
+ 376..389 'v3.do_thing()': usize
+ "#]],
+ )
+}
+
+#[test]
+fn const_generics() {
+ check_infer(
+ r#"
+trait T {
+ type Output;
+ fn do_thing(&self) -> Self::Output;
+}
+
+impl<const L: usize> T for [u8; L] {
+ type Output = [u8; L];
+ fn do_thing(&self) -> Self::Output {
+ *self
+ }
+}
+
+fn main() {
+ let v = [0u8; 2];
+ let v2 = v.do_thing();
+}
+"#,
+ expect![[r#"
+ 44..48 'self': &Self
+ 151..155 'self': &[u8; L]
+ 173..194 '{ ... }': [u8; L]
+ 183..188 '*self': [u8; L]
+ 184..188 'self': &[u8; L]
+ 208..260 '{ ...g(); }': ()
+ 218..219 'v': [u8; 2]
+ 222..230 '[0u8; 2]': [u8; 2]
+ 223..226 '0u8': u8
+ 228..229 '2': usize
+ 240..242 'v2': [u8; 2]
+ 245..246 'v': [u8; 2]
+ 245..257 'v.do_thing()': [u8; 2]
+ "#]],
+ )
+}
+
+#[test]
+fn fn_returning_unit() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn
+fn test<F: FnOnce()>(f: F) {
+ let _: () = f();
+}"#,
+ expect![[r#"
+ 21..22 'f': F
+ 27..51 '{ ...f(); }': ()
+ 37..38 '_': ()
+ 45..46 'f': F
+ 45..48 'f()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn trait_in_scope_of_trait_impl() {
+ check_infer(
+ r#"
+mod foo {
+ pub trait Foo {
+ fn foo(self);
+ fn bar(self) -> usize { 0 }
+ }
+}
+impl foo::Foo for u32 {
+ fn foo(self) {
+ let _x = self.bar();
+ }
+}
+ "#,
+ expect![[r#"
+ 45..49 'self': Self
+ 67..71 'self': Self
+ 82..87 '{ 0 }': usize
+ 84..85 '0': usize
+ 131..135 'self': u32
+ 137..173 '{ ... }': ()
+ 151..153 '_x': usize
+ 156..160 'self': u32
+ 156..166 'self.bar()': usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_async_ret_type() {
+ check_types(
+ r#"
+//- minicore: future, result
+struct Fooey;
+
+impl Fooey {
+ fn collect<B: Convert>(self) -> B {
+ B::new()
+ }
+}
+
+trait Convert {
+ fn new() -> Self;
+}
+impl Convert for u32 {
+ fn new() -> Self { 0 }
+}
+
+async fn get_accounts() -> Result<u32, ()> {
+ let ret = Fooey.collect();
+ // ^^^^^^^^^^^^^^^ u32
+ Ok(ret)
+}
+"#,
+ );
+}
+
+#[test]
+fn local_impl_1() {
+ check!(block_local_impls);
+ check_types(
+ r#"
+trait Trait<T> {
+ fn foo(&self) -> T;
+}
+
+fn test() {
+ struct S;
+ impl Trait<u32> for S {
+ fn foo(&self) -> u32 { 0 }
+ }
+
+ S.foo();
+ // ^^^^^^^ u32
+}
+"#,
+ );
+}
+
+#[test]
+fn local_impl_2() {
+ check!(block_local_impls);
+ check_types(
+ r#"
+struct S;
+
+fn test() {
+ trait Trait<T> {
+ fn foo(&self) -> T;
+ }
+ impl Trait<u32> for S {
+ fn foo(&self) -> u32 { 0 }
+ }
+
+ S.foo();
+ // ^^^^^^^ u32
+}
+"#,
+ );
+}
+
+#[test]
+fn local_impl_3() {
+ check!(block_local_impls);
+ check_types(
+ r#"
+trait Trait<T> {
+ fn foo(&self) -> T;
+}
+
+fn test() {
+ struct S1;
+ {
+ struct S2;
+
+ impl Trait<S1> for S2 {
+ fn foo(&self) -> S1 { S1 }
+ }
+
+ S2.foo();
+ // ^^^^^^^^ S1
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn associated_type_sized_bounds() {
+ check_infer(
+ r#"
+//- minicore: sized
+struct Yes;
+trait IsSized { const IS_SIZED: Yes; }
+impl<T: Sized> IsSized for T { const IS_SIZED: Yes = Yes; }
+
+trait Foo {
+ type Explicit: Sized;
+ type Implicit;
+ type Relaxed: ?Sized;
+}
+fn f<F: Foo>() {
+ F::Explicit::IS_SIZED;
+ F::Implicit::IS_SIZED;
+ F::Relaxed::IS_SIZED;
+}
+"#,
+ expect![[r#"
+ 104..107 'Yes': Yes
+ 212..295 '{ ...ZED; }': ()
+ 218..239 'F::Exp..._SIZED': Yes
+ 245..266 'F::Imp..._SIZED': Yes
+ 272..292 'F::Rel..._SIZED': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_map() {
+ check_types(
+ r#"
+pub struct Key<K, V, P = (K, V)> {}
+
+pub trait Policy {
+ type K;
+ type V;
+}
+
+impl<K, V> Policy for (K, V) {
+ type K = K;
+ type V = V;
+}
+
+pub struct KeyMap<KEY> {}
+
+impl<P: Policy> KeyMap<Key<P::K, P::V, P>> {
+ pub fn get(&self, key: &P::K) -> P::V {
+ loop {}
+ }
+}
+
+struct Fn {}
+struct FunctionId {}
+
+fn test() {
+ let key_map: &KeyMap<Key<Fn, FunctionId>> = loop {};
+ let key;
+ let result = key_map.get(key);
+ //^^^^^^ FunctionId
+}
+"#,
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs
new file mode 100644
index 000000000..547850b02
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs
@@ -0,0 +1,133 @@
+//! Implementation of Chalk debug helper functions using TLS.
+use std::fmt::{self, Display};
+
+use itertools::Itertools;
+
+use crate::{
+ chalk_db, db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, mapping::from_chalk,
+ CallableDefId, Interner,
+};
+use hir_def::{AdtId, ItemContainerId, Lookup, TypeAliasId};
+
+pub(crate) use unsafe_tls::{set_current_program, with_current_program};
+
+pub(crate) struct DebugContext<'a>(&'a dyn HirDatabase);
+
+impl DebugContext<'_> {
+ pub(crate) fn debug_struct_id(
+ &self,
+ id: chalk_db::AdtId,
+ f: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let name = match id.0 {
+ AdtId::StructId(it) => self.0.struct_data(it).name.clone(),
+ AdtId::UnionId(it) => self.0.union_data(it).name.clone(),
+ AdtId::EnumId(it) => self.0.enum_data(it).name.clone(),
+ };
+ name.fmt(f)
+ }
+
+ pub(crate) fn debug_trait_id(
+ &self,
+ id: chalk_db::TraitId,
+ f: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let trait_: hir_def::TraitId = from_chalk_trait_id(id);
+ let trait_data = self.0.trait_data(trait_);
+ trait_data.name.fmt(f)
+ }
+
+ pub(crate) fn debug_assoc_type_id(
+ &self,
+ id: chalk_db::AssocTypeId,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let type_alias: TypeAliasId = from_assoc_type_id(id);
+ let type_alias_data = self.0.type_alias_data(type_alias);
+ let trait_ = match type_alias.lookup(self.0.upcast()).container {
+ ItemContainerId::TraitId(t) => t,
+ _ => panic!("associated type not in trait"),
+ };
+ let trait_data = self.0.trait_data(trait_);
+ write!(fmt, "{}::{}", trait_data.name, type_alias_data.name)
+ }
+
+ pub(crate) fn debug_projection_ty(
+ &self,
+ projection_ty: &chalk_ir::ProjectionTy<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let type_alias = from_assoc_type_id(projection_ty.associated_ty_id);
+ let type_alias_data = self.0.type_alias_data(type_alias);
+ let trait_ = match type_alias.lookup(self.0.upcast()).container {
+ ItemContainerId::TraitId(t) => t,
+ _ => panic!("associated type not in trait"),
+ };
+ let trait_data = self.0.trait_data(trait_);
+ let params = projection_ty.substitution.as_slice(Interner);
+ write!(fmt, "<{:?} as {}", &params[0], trait_data.name,)?;
+ if params.len() > 1 {
+ write!(
+ fmt,
+ "<{}>",
+ &params[1..].iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))),
+ )?;
+ }
+ write!(fmt, ">::{}", type_alias_data.name)
+ }
+
+ pub(crate) fn debug_fn_def_id(
+ &self,
+ fn_def_id: chalk_ir::FnDefId<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let def: CallableDefId = from_chalk(self.0, fn_def_id);
+ let name = match def {
+ CallableDefId::FunctionId(ff) => self.0.function_data(ff).name.clone(),
+ CallableDefId::StructId(s) => self.0.struct_data(s).name.clone(),
+ CallableDefId::EnumVariantId(e) => {
+ let enum_data = self.0.enum_data(e.parent);
+ enum_data.variants[e.local_id].name.clone()
+ }
+ };
+ match def {
+ CallableDefId::FunctionId(_) => write!(fmt, "{{fn {}}}", name),
+ CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {
+ write!(fmt, "{{ctor {}}}", name)
+ }
+ }
+ }
+}
+
+mod unsafe_tls {
+ use super::DebugContext;
+ use crate::db::HirDatabase;
+ use scoped_tls::scoped_thread_local;
+
+ scoped_thread_local!(static PROGRAM: DebugContext<'_>);
+
+ pub(crate) fn with_current_program<R>(
+ op: impl for<'a> FnOnce(Option<&'a DebugContext<'a>>) -> R,
+ ) -> R {
+ if PROGRAM.is_set() {
+ PROGRAM.with(|prog| op(Some(prog)))
+ } else {
+ op(None)
+ }
+ }
+
+ pub(crate) fn set_current_program<OP, R>(p: &dyn HirDatabase, op: OP) -> R
+ where
+ OP: FnOnce() -> R,
+ {
+ let ctx = DebugContext(p);
+ // we're transmuting the lifetime in the DebugContext to static. This is
+ // fine because we only keep the reference for the lifetime of this
+ // function, *and* the only way to access the context is through
+ // `with_current_program`, which hides the lifetime through the `for`
+ // type.
+ let static_p: &DebugContext<'static> =
+ unsafe { std::mem::transmute::<&DebugContext<'_>, &DebugContext<'static>>(&ctx) };
+ PROGRAM.set(static_p, op)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
new file mode 100644
index 000000000..77afeb321
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
@@ -0,0 +1,187 @@
+//! Trait solving using Chalk.
+
+use std::env::var;
+
+use chalk_ir::GoalData;
+use chalk_recursive::Cache;
+use chalk_solve::{logging_db::LoggingRustIrDatabase, Solver};
+
+use base_db::CrateId;
+use hir_def::{lang_item::LangItemTarget, TraitId};
+use stdx::panic_context;
+use syntax::SmolStr;
+
+use crate::{
+ db::HirDatabase, AliasEq, AliasTy, Canonical, DomainGoal, Goal, Guidance, InEnvironment,
+ Interner, Solution, TraitRefExt, Ty, TyKind, WhereClause,
+};
+
+/// This controls how much 'time' we give the Chalk solver before giving up.
+const CHALK_SOLVER_FUEL: i32 = 100;
+
+#[derive(Debug, Copy, Clone)]
+pub(crate) struct ChalkContext<'a> {
+ pub(crate) db: &'a dyn HirDatabase,
+ pub(crate) krate: CrateId,
+}
+
+fn create_chalk_solver() -> chalk_recursive::RecursiveSolver<Interner> {
+ let overflow_depth =
+ var("CHALK_OVERFLOW_DEPTH").ok().and_then(|s| s.parse().ok()).unwrap_or(500);
+ let max_size = var("CHALK_SOLVER_MAX_SIZE").ok().and_then(|s| s.parse().ok()).unwrap_or(150);
+ chalk_recursive::RecursiveSolver::new(overflow_depth, max_size, Some(Cache::new()))
+}
+
+/// A set of clauses that we assume to be true. E.g. if we are inside this function:
+/// ```rust
+/// fn foo<T: Default>(t: T) {}
+/// ```
+/// we assume that `T: Default`.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TraitEnvironment {
+ pub krate: CrateId,
+ // FIXME make this a BTreeMap
+ pub(crate) traits_from_clauses: Vec<(Ty, TraitId)>,
+ pub env: chalk_ir::Environment<Interner>,
+}
+
+impl TraitEnvironment {
+ pub fn empty(krate: CrateId) -> Self {
+ TraitEnvironment {
+ krate,
+ traits_from_clauses: Vec::new(),
+ env: chalk_ir::Environment::new(Interner),
+ }
+ }
+
+ pub fn traits_in_scope_from_clauses<'a>(
+ &'a self,
+ ty: Ty,
+ ) -> impl Iterator<Item = TraitId> + 'a {
+ self.traits_from_clauses
+ .iter()
+ .filter_map(move |(self_ty, trait_id)| (*self_ty == ty).then(|| *trait_id))
+ }
+}
+
+/// Solve a trait goal using Chalk.
+pub(crate) fn trait_solve_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ goal: Canonical<InEnvironment<Goal>>,
+) -> Option<Solution> {
+ let _p = profile::span("trait_solve_query").detail(|| match &goal.value.goal.data(Interner) {
+ GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => {
+ db.trait_data(it.hir_trait_id()).name.to_string()
+ }
+ GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_string(),
+ _ => "??".to_string(),
+ });
+ tracing::info!("trait_solve_query({:?})", goal.value.goal);
+
+ if let GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(AliasEq {
+ alias: AliasTy::Projection(projection_ty),
+ ..
+ }))) = &goal.value.goal.data(Interner)
+ {
+ if let TyKind::BoundVar(_) = projection_ty.self_type_parameter(Interner).kind(Interner) {
+ // Hack: don't ask Chalk to normalize with an unknown self type, it'll say that's impossible
+ return Some(Solution::Ambig(Guidance::Unknown));
+ }
+ }
+
+ // We currently don't deal with universes (I think / hope they're not yet
+ // relevant for our use cases?)
+ let u_canonical = chalk_ir::UCanonical { canonical: goal, universes: 1 };
+ solve(db, krate, &u_canonical)
+}
+
+fn solve(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ goal: &chalk_ir::UCanonical<chalk_ir::InEnvironment<chalk_ir::Goal<Interner>>>,
+) -> Option<chalk_solve::Solution<Interner>> {
+ let context = ChalkContext { db, krate };
+ tracing::debug!("solve goal: {:?}", goal);
+ let mut solver = create_chalk_solver();
+
+ let fuel = std::cell::Cell::new(CHALK_SOLVER_FUEL);
+
+ let should_continue = || {
+ db.unwind_if_cancelled();
+ let remaining = fuel.get();
+ fuel.set(remaining - 1);
+ if remaining == 0 {
+ tracing::debug!("fuel exhausted");
+ }
+ remaining > 0
+ };
+
+ let mut solve = || {
+ let _ctx = if is_chalk_debug() || is_chalk_print() {
+ Some(panic_context::enter(format!("solving {:?}", goal)))
+ } else {
+ None
+ };
+ let solution = if is_chalk_print() {
+ let logging_db =
+ LoggingRustIrDatabaseLoggingOnDrop(LoggingRustIrDatabase::new(context));
+ solver.solve_limited(&logging_db.0, goal, &should_continue)
+ } else {
+ solver.solve_limited(&context, goal, &should_continue)
+ };
+
+ tracing::debug!("solve({:?}) => {:?}", goal, solution);
+
+ solution
+ };
+
+ // don't set the TLS for Chalk unless Chalk debugging is active, to make
+ // extra sure we only use it for debugging
+ if is_chalk_debug() {
+ crate::tls::set_current_program(db, solve)
+ } else {
+ solve()
+ }
+}
+
+struct LoggingRustIrDatabaseLoggingOnDrop<'a>(LoggingRustIrDatabase<Interner, ChalkContext<'a>>);
+
+impl<'a> Drop for LoggingRustIrDatabaseLoggingOnDrop<'a> {
+ fn drop(&mut self) {
+ eprintln!("chalk program:\n{}", self.0);
+ }
+}
+
+fn is_chalk_debug() -> bool {
+ std::env::var("CHALK_DEBUG").is_ok()
+}
+
+fn is_chalk_print() -> bool {
+ std::env::var("CHALK_PRINT").is_ok()
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum FnTrait {
+ FnOnce,
+ FnMut,
+ Fn,
+}
+
+impl FnTrait {
+ const fn lang_item_name(self) -> &'static str {
+ match self {
+ FnTrait::FnOnce => "fn_once",
+ FnTrait::FnMut => "fn_mut",
+ FnTrait::Fn => "fn",
+ }
+ }
+
+ pub fn get_id(&self, db: &dyn HirDatabase, krate: CrateId) -> Option<TraitId> {
+ let target = db.lang_item(krate, SmolStr::new_inline(self.lang_item_name()))?;
+ match target {
+ LangItemTarget::TraitId(t) => Some(t),
+ _ => None,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
new file mode 100644
index 000000000..83319755d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
@@ -0,0 +1,408 @@
+//! Helper functions for working with def, which don't need to be a separate
+//! query, but can't be computed directly from `*Data` (ie, which need a `db`).
+
+use std::iter;
+
+use base_db::CrateId;
+use chalk_ir::{fold::Shift, BoundVar, DebruijnIndex};
+use hir_def::{
+ db::DefDatabase,
+ generics::{
+ GenericParams, TypeOrConstParamData, TypeParamProvenance, WherePredicate,
+ WherePredicateTypeTarget,
+ },
+ intern::Interned,
+ resolver::{HasResolver, TypeNs},
+ type_ref::{TraitBoundModifier, TypeRef},
+ ConstParamId, FunctionId, GenericDefId, ItemContainerId, Lookup, TraitId, TypeAliasId,
+ TypeOrConstParamId, TypeParamId,
+};
+use hir_expand::name::{known, Name};
+use itertools::Either;
+use rustc_hash::FxHashSet;
+use smallvec::{smallvec, SmallVec};
+use syntax::SmolStr;
+
+use crate::{
+ db::HirDatabase, ChalkTraitId, ConstData, ConstValue, GenericArgData, Interner, Substitution,
+ TraitRef, TraitRefExt, TyKind, WhereClause,
+};
+
+pub(crate) fn fn_traits(db: &dyn DefDatabase, krate: CrateId) -> impl Iterator<Item = TraitId> {
+ [
+ db.lang_item(krate, SmolStr::new_inline("fn")),
+ db.lang_item(krate, SmolStr::new_inline("fn_mut")),
+ db.lang_item(krate, SmolStr::new_inline("fn_once")),
+ ]
+ .into_iter()
+ .flatten()
+ .flat_map(|it| it.as_trait())
+}
+
+fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> SmallVec<[TraitId; 4]> {
+ let resolver = trait_.resolver(db);
+ // returning the iterator directly doesn't easily work because of
+ // lifetime problems, but since there usually shouldn't be more than a
+ // few direct traits this should be fine (we could even use some kind of
+ // SmallVec if performance is a concern)
+ let generic_params = db.generic_params(trait_.into());
+ let trait_self = generic_params.find_trait_self_param();
+ generic_params
+ .where_predicates
+ .iter()
+ .filter_map(|pred| match pred {
+ WherePredicate::ForLifetime { target, bound, .. }
+ | WherePredicate::TypeBound { target, bound } => {
+ let is_trait = match target {
+ WherePredicateTypeTarget::TypeRef(type_ref) => match &**type_ref {
+ TypeRef::Path(p) => p.is_self_type(),
+ _ => false,
+ },
+ WherePredicateTypeTarget::TypeOrConstParam(local_id) => {
+ Some(*local_id) == trait_self
+ }
+ };
+ match is_trait {
+ true => bound.as_path(),
+ false => None,
+ }
+ }
+ WherePredicate::Lifetime { .. } => None,
+ })
+ .filter(|(_, bound_modifier)| matches!(bound_modifier, TraitBoundModifier::None))
+ .filter_map(|(path, _)| match resolver.resolve_path_in_type_ns_fully(db, path.mod_path()) {
+ Some(TypeNs::TraitId(t)) => Some(t),
+ _ => None,
+ })
+ .collect()
+}
+
+fn direct_super_trait_refs(db: &dyn HirDatabase, trait_ref: &TraitRef) -> Vec<TraitRef> {
+ // returning the iterator directly doesn't easily work because of
+ // lifetime problems, but since there usually shouldn't be more than a
+ // few direct traits this should be fine (we could even use some kind of
+ // SmallVec if performance is a concern)
+ let generic_params = db.generic_params(trait_ref.hir_trait_id().into());
+ let trait_self = match generic_params.find_trait_self_param() {
+ Some(p) => TypeOrConstParamId { parent: trait_ref.hir_trait_id().into(), local_id: p },
+ None => return Vec::new(),
+ };
+ db.generic_predicates_for_param(trait_self.parent, trait_self, None)
+ .iter()
+ .filter_map(|pred| {
+ pred.as_ref().filter_map(|pred| match pred.skip_binders() {
+ // FIXME: how to correctly handle higher-ranked bounds here?
+ WhereClause::Implemented(tr) => Some(
+ tr.clone()
+ .shifted_out_to(Interner, DebruijnIndex::ONE)
+ .expect("FIXME unexpected higher-ranked trait bound"),
+ ),
+ _ => None,
+ })
+ })
+ .map(|pred| pred.substitute(Interner, &trait_ref.substitution))
+ .collect()
+}
+
+/// Returns an iterator over the whole super trait hierarchy (including the
+/// trait itself).
+pub fn all_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> SmallVec<[TraitId; 4]> {
+ // we need to take care a bit here to avoid infinite loops in case of cycles
+ // (i.e. if we have `trait A: B; trait B: A;`)
+
+ let mut result = smallvec![trait_];
+ let mut i = 0;
+ while let Some(&t) = result.get(i) {
+ // yeah this is quadratic, but trait hierarchies should be flat
+ // enough that this doesn't matter
+ for tt in direct_super_traits(db, t) {
+ if !result.contains(&tt) {
+ result.push(tt);
+ }
+ }
+ i += 1;
+ }
+ result
+}
+
+/// Given a trait ref (`Self: Trait`), builds all the implied trait refs for
+/// super traits. The original trait ref will be included. So the difference to
+/// `all_super_traits` is that we keep track of type parameters; for example if
+/// we have `Self: Trait<u32, i32>` and `Trait<T, U>: OtherTrait<U>` we'll get
+/// `Self: OtherTrait<i32>`.
+pub(super) fn all_super_trait_refs(db: &dyn HirDatabase, trait_ref: TraitRef) -> SuperTraits<'_> {
+ SuperTraits { db, seen: iter::once(trait_ref.trait_id).collect(), stack: vec![trait_ref] }
+}
+
+pub(super) struct SuperTraits<'a> {
+ db: &'a dyn HirDatabase,
+ stack: Vec<TraitRef>,
+ seen: FxHashSet<ChalkTraitId>,
+}
+
+impl<'a> SuperTraits<'a> {
+ fn elaborate(&mut self, trait_ref: &TraitRef) {
+ let mut trait_refs = direct_super_trait_refs(self.db, trait_ref);
+ trait_refs.retain(|tr| !self.seen.contains(&tr.trait_id));
+ self.stack.extend(trait_refs);
+ }
+}
+
+impl<'a> Iterator for SuperTraits<'a> {
+ type Item = TraitRef;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if let Some(next) = self.stack.pop() {
+ self.elaborate(&next);
+ Some(next)
+ } else {
+ None
+ }
+ }
+}
+
+pub(super) fn associated_type_by_name_including_super_traits(
+ db: &dyn HirDatabase,
+ trait_ref: TraitRef,
+ name: &Name,
+) -> Option<(TraitRef, TypeAliasId)> {
+ all_super_trait_refs(db, trait_ref).find_map(|t| {
+ let assoc_type = db.trait_data(t.hir_trait_id()).associated_type_by_name(name)?;
+ Some((t, assoc_type))
+ })
+}
+
+pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
+ let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def)));
+ if parent_generics.is_some() && matches!(def, GenericDefId::TypeAliasId(_)) {
+ let params = db.generic_params(def);
+ let has_consts =
+ params.iter().any(|(_, x)| matches!(x, TypeOrConstParamData::ConstParamData(_)));
+ return if has_consts {
+ // XXX: treat const generic associated types as not existing to avoid crashes (#11769)
+ //
+ // Chalk expects the inner associated type's parameters to come
+ // *before*, not after the trait's generics as we've always done it.
+ // Adapting to this requires a larger refactoring
+ cov_mark::hit!(ignore_gats);
+ Generics { def, params: Interned::new(Default::default()), parent_generics }
+ } else {
+ Generics { def, params, parent_generics }
+ };
+ }
+ Generics { def, params: db.generic_params(def), parent_generics }
+}
+
+#[derive(Debug)]
+pub(crate) struct Generics {
+ def: GenericDefId,
+ pub(crate) params: Interned<GenericParams>,
+ parent_generics: Option<Box<Generics>>,
+}
+
+impl Generics {
+ pub(crate) fn iter_id<'a>(
+ &'a self,
+ ) -> impl Iterator<Item = Either<TypeParamId, ConstParamId>> + 'a {
+ self.iter().map(|(id, data)| match data {
+ TypeOrConstParamData::TypeParamData(_) => Either::Left(TypeParamId::from_unchecked(id)),
+ TypeOrConstParamData::ConstParamData(_) => {
+ Either::Right(ConstParamId::from_unchecked(id))
+ }
+ })
+ }
+
+ /// Iterator over types and const params of parent, then self.
+ pub(crate) fn iter<'a>(
+ &'a self,
+ ) -> impl DoubleEndedIterator<Item = (TypeOrConstParamId, &'a TypeOrConstParamData)> + 'a {
+ let to_toc_id = |it: &'a Generics| {
+ move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p)
+ };
+ self.parent_generics()
+ .into_iter()
+ .flat_map(move |it| it.params.iter().map(to_toc_id(it)))
+ .chain(self.params.iter().map(to_toc_id(self)))
+ }
+
+ /// Iterator over types and const params of parent.
+ pub(crate) fn iter_parent<'a>(
+ &'a self,
+ ) -> impl Iterator<Item = (TypeOrConstParamId, &'a TypeOrConstParamData)> + 'a {
+ self.parent_generics().into_iter().flat_map(|it| {
+ let to_toc_id =
+ move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p);
+ it.params.iter().map(to_toc_id)
+ })
+ }
+
+ pub(crate) fn len(&self) -> usize {
+ let parent = self.parent_generics().map_or(0, Generics::len);
+ let child = self.params.type_or_consts.len();
+ parent + child
+ }
+
+ /// (parent total, self param, type param list, const param list, impl trait)
+ pub(crate) fn provenance_split(&self) -> (usize, usize, usize, usize, usize) {
+ let ty_iter = || self.params.iter().filter_map(|x| x.1.type_param());
+
+ let self_params =
+ ty_iter().filter(|p| p.provenance == TypeParamProvenance::TraitSelf).count();
+ let type_params =
+ ty_iter().filter(|p| p.provenance == TypeParamProvenance::TypeParamList).count();
+ let impl_trait_params =
+ ty_iter().filter(|p| p.provenance == TypeParamProvenance::ArgumentImplTrait).count();
+ let const_params = self.params.iter().filter_map(|x| x.1.const_param()).count();
+
+ let parent_len = self.parent_generics().map_or(0, Generics::len);
+ (parent_len, self_params, type_params, const_params, impl_trait_params)
+ }
+
+ pub(crate) fn param_idx(&self, param: TypeOrConstParamId) -> Option<usize> {
+ Some(self.find_param(param)?.0)
+ }
+
+ fn find_param(&self, param: TypeOrConstParamId) -> Option<(usize, &TypeOrConstParamData)> {
+ if param.parent == self.def {
+ let (idx, (_local_id, data)) = self
+ .params
+ .iter()
+ .enumerate()
+ .find(|(_, (idx, _))| *idx == param.local_id)
+ .unwrap();
+ let parent_len = self.parent_generics().map_or(0, Generics::len);
+ Some((parent_len + idx, data))
+ } else {
+ self.parent_generics().and_then(|g| g.find_param(param))
+ }
+ }
+
+ fn parent_generics(&self) -> Option<&Generics> {
+ self.parent_generics.as_ref().map(|it| &**it)
+ }
+
+ /// Returns a Substitution that replaces each parameter by a bound variable.
+ pub(crate) fn bound_vars_subst(
+ &self,
+ db: &dyn HirDatabase,
+ debruijn: DebruijnIndex,
+ ) -> Substitution {
+ Substitution::from_iter(
+ Interner,
+ self.iter_id().enumerate().map(|(idx, id)| match id {
+ Either::Left(_) => GenericArgData::Ty(
+ TyKind::BoundVar(BoundVar::new(debruijn, idx)).intern(Interner),
+ )
+ .intern(Interner),
+ Either::Right(id) => GenericArgData::Const(
+ ConstData {
+ value: ConstValue::BoundVar(BoundVar::new(debruijn, idx)),
+ ty: db.const_param_ty(id),
+ }
+ .intern(Interner),
+ )
+ .intern(Interner),
+ }),
+ )
+ }
+
+ /// Returns a Substitution that replaces each parameter by itself (i.e. `Ty::Param`).
+ pub(crate) fn placeholder_subst(&self, db: &dyn HirDatabase) -> Substitution {
+ Substitution::from_iter(
+ Interner,
+ self.iter_id().map(|id| match id {
+ Either::Left(id) => GenericArgData::Ty(
+ TyKind::Placeholder(crate::to_placeholder_idx(db, id.into())).intern(Interner),
+ )
+ .intern(Interner),
+ Either::Right(id) => GenericArgData::Const(
+ ConstData {
+ value: ConstValue::Placeholder(crate::to_placeholder_idx(db, id.into())),
+ ty: db.const_param_ty(id),
+ }
+ .intern(Interner),
+ )
+ .intern(Interner),
+ }),
+ )
+ }
+}
+
+fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option<GenericDefId> {
+ let container = match def {
+ GenericDefId::FunctionId(it) => it.lookup(db).container,
+ GenericDefId::TypeAliasId(it) => it.lookup(db).container,
+ GenericDefId::ConstId(it) => it.lookup(db).container,
+ GenericDefId::EnumVariantId(it) => return Some(it.parent.into()),
+ GenericDefId::AdtId(_) | GenericDefId::TraitId(_) | GenericDefId::ImplId(_) => return None,
+ };
+
+ match container {
+ ItemContainerId::ImplId(it) => Some(it.into()),
+ ItemContainerId::TraitId(it) => Some(it.into()),
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
+ }
+}
+
+pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool {
+ let data = db.function_data(func);
+ if data.has_unsafe_kw() {
+ return true;
+ }
+
+ match func.lookup(db.upcast()).container {
+ hir_def::ItemContainerId::ExternBlockId(block) => {
+ // Function in an `extern` block are always unsafe to call, except when it has
+ // `"rust-intrinsic"` ABI there are a few exceptions.
+ let id = block.lookup(db.upcast()).id;
+ !matches!(
+ id.item_tree(db.upcast())[id.value].abi.as_deref(),
+ Some("rust-intrinsic") if !is_intrinsic_fn_unsafe(&data.name)
+ )
+ }
+ _ => false,
+ }
+}
+
+/// Returns `true` if the given intrinsic is unsafe to call, or false otherwise.
+fn is_intrinsic_fn_unsafe(name: &Name) -> bool {
+ // Should be kept in sync with https://github.com/rust-lang/rust/blob/532d2b14c05f9bc20b2d27cbb5f4550d28343a36/compiler/rustc_typeck/src/check/intrinsic.rs#L72-L106
+ ![
+ known::abort,
+ known::add_with_overflow,
+ known::bitreverse,
+ known::black_box,
+ known::bswap,
+ known::caller_location,
+ known::ctlz,
+ known::ctpop,
+ known::cttz,
+ known::discriminant_value,
+ known::forget,
+ known::likely,
+ known::maxnumf32,
+ known::maxnumf64,
+ known::min_align_of,
+ known::minnumf32,
+ known::minnumf64,
+ known::mul_with_overflow,
+ known::needs_drop,
+ known::ptr_guaranteed_eq,
+ known::ptr_guaranteed_ne,
+ known::rotate_left,
+ known::rotate_right,
+ known::rustc_peek,
+ known::saturating_add,
+ known::saturating_sub,
+ known::size_of,
+ known::sub_with_overflow,
+ known::type_id,
+ known::type_name,
+ known::unlikely,
+ known::variant_count,
+ known::wrapping_add,
+ known::wrapping_mul,
+ known::wrapping_sub,
+ ]
+ .contains(name)
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/walk.rs b/src/tools/rust-analyzer/crates/hir-ty/src/walk.rs
new file mode 100644
index 000000000..c47689455
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/walk.rs
@@ -0,0 +1,147 @@
+//! The `TypeWalk` trait (probably to be replaced by Chalk's `Fold` and
+//! `Visit`).
+
+use chalk_ir::interner::HasInterner;
+
+use crate::{
+ AliasEq, AliasTy, Binders, CallableSig, FnSubst, GenericArg, GenericArgData, Interner,
+ OpaqueTy, ProjectionTy, Substitution, TraitRef, Ty, TyKind, WhereClause,
+};
+
+/// This allows walking structures that contain types to do something with those
+/// types, similar to Chalk's `Fold` trait.
+pub trait TypeWalk {
+ fn walk(&self, f: &mut impl FnMut(&Ty));
+}
+
+impl TypeWalk for Ty {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ match self.kind(Interner) {
+ TyKind::Alias(AliasTy::Projection(p_ty)) => {
+ for t in p_ty.substitution.iter(Interner) {
+ t.walk(f);
+ }
+ }
+ TyKind::Alias(AliasTy::Opaque(o_ty)) => {
+ for t in o_ty.substitution.iter(Interner) {
+ t.walk(f);
+ }
+ }
+ TyKind::Dyn(dyn_ty) => {
+ for p in dyn_ty.bounds.skip_binders().interned().iter() {
+ p.walk(f);
+ }
+ }
+ TyKind::Slice(ty)
+ | TyKind::Array(ty, _)
+ | TyKind::Ref(_, _, ty)
+ | TyKind::Raw(_, ty) => {
+ ty.walk(f);
+ }
+ TyKind::Function(fn_pointer) => {
+ fn_pointer.substitution.0.walk(f);
+ }
+ TyKind::Adt(_, substs)
+ | TyKind::FnDef(_, substs)
+ | TyKind::Tuple(_, substs)
+ | TyKind::OpaqueType(_, substs)
+ | TyKind::AssociatedType(_, substs)
+ | TyKind::Closure(.., substs) => {
+ substs.walk(f);
+ }
+ _ => {}
+ }
+ f(self);
+ }
+}
+
+impl<T: TypeWalk> TypeWalk for Vec<T> {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ for t in self {
+ t.walk(f);
+ }
+ }
+}
+
+impl TypeWalk for OpaqueTy {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.substitution.walk(f);
+ }
+}
+
+impl TypeWalk for ProjectionTy {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.substitution.walk(f);
+ }
+}
+
+impl TypeWalk for AliasTy {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ match self {
+ AliasTy::Projection(it) => it.walk(f),
+ AliasTy::Opaque(it) => it.walk(f),
+ }
+ }
+}
+
+impl TypeWalk for GenericArg {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ if let GenericArgData::Ty(ty) = &self.interned() {
+ ty.walk(f);
+ }
+ }
+}
+
+impl TypeWalk for Substitution {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ for t in self.iter(Interner) {
+ t.walk(f);
+ }
+ }
+}
+
+impl<T: TypeWalk + HasInterner<Interner = Interner>> TypeWalk for Binders<T> {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.skip_binders().walk(f);
+ }
+}
+
+impl TypeWalk for TraitRef {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.substitution.walk(f);
+ }
+}
+
+impl TypeWalk for WhereClause {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ match self {
+ WhereClause::Implemented(trait_ref) => trait_ref.walk(f),
+ WhereClause::AliasEq(alias_eq) => alias_eq.walk(f),
+ _ => {}
+ }
+ }
+}
+
+impl TypeWalk for CallableSig {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ for t in self.params_and_return.iter() {
+ t.walk(f);
+ }
+ }
+}
+
+impl TypeWalk for AliasEq {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.ty.walk(f);
+ match &self.alias {
+ AliasTy::Projection(projection_ty) => projection_ty.walk(f),
+ AliasTy::Opaque(opaque) => opaque.walk(f),
+ }
+ }
+}
+
+impl TypeWalk for FnSubst<Interner> {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.0.walk(f)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/Cargo.toml b/src/tools/rust-analyzer/crates/hir/Cargo.toml
new file mode 100644
index 000000000..8e6a2441b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/Cargo.toml
@@ -0,0 +1,28 @@
+[package]
+name = "hir"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+rustc-hash = "1.1.0"
+either = "1.7.0"
+arrayvec = "0.7.2"
+itertools = "0.10.3"
+smallvec = "1.9.0"
+once_cell = "1.12.0"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+hir-expand = { path = "../hir-expand", version = "0.0.0" }
+hir-def = { path = "../hir-def", version = "0.0.0" }
+hir-ty = { path = "../hir-ty", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
new file mode 100644
index 000000000..0bd379340
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
@@ -0,0 +1,177 @@
+//! Attributes & documentation for hir types.
+
+use hir_def::{
+ attr::{AttrsWithOwner, Documentation},
+ item_scope::ItemInNs,
+ path::ModPath,
+ per_ns::PerNs,
+ resolver::HasResolver,
+ AttrDefId, GenericParamId, ModuleDefId,
+};
+use hir_expand::hygiene::Hygiene;
+use hir_ty::db::HirDatabase;
+use syntax::{ast, AstNode};
+
+use crate::{
+ Adt, AssocItem, Const, ConstParam, Enum, Field, Function, GenericParam, Impl, LifetimeParam,
+ Macro, Module, ModuleDef, Static, Struct, Trait, TypeAlias, TypeParam, Union, Variant,
+};
+
+pub trait HasAttrs {
+ fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner;
+ fn docs(self, db: &dyn HirDatabase) -> Option<Documentation>;
+ fn resolve_doc_path(
+ self,
+ db: &dyn HirDatabase,
+ link: &str,
+ ns: Option<Namespace>,
+ ) -> Option<ModuleDef>;
+}
+
+#[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)]
+pub enum Namespace {
+ Types,
+ Values,
+ Macros,
+}
+
+macro_rules! impl_has_attrs {
+ ($(($def:ident, $def_id:ident),)*) => {$(
+ impl HasAttrs for $def {
+ fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
+ let def = AttrDefId::$def_id(self.into());
+ db.attrs(def)
+ }
+ fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
+ let def = AttrDefId::$def_id(self.into());
+ db.attrs(def).docs()
+ }
+ fn resolve_doc_path(self, db: &dyn HirDatabase, link: &str, ns: Option<Namespace>) -> Option<ModuleDef> {
+ let def = AttrDefId::$def_id(self.into());
+ resolve_doc_path(db, def, link, ns).map(ModuleDef::from)
+ }
+ }
+ )*};
+}
+
+impl_has_attrs![
+ (Field, FieldId),
+ (Variant, EnumVariantId),
+ (Static, StaticId),
+ (Const, ConstId),
+ (Trait, TraitId),
+ (TypeAlias, TypeAliasId),
+ (Macro, MacroId),
+ (Function, FunctionId),
+ (Adt, AdtId),
+ (Module, ModuleId),
+ (GenericParam, GenericParamId),
+ (Impl, ImplId),
+];
+
+macro_rules! impl_has_attrs_enum {
+ ($($variant:ident),* for $enum:ident) => {$(
+ impl HasAttrs for $variant {
+ fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
+ $enum::$variant(self).attrs(db)
+ }
+ fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
+ $enum::$variant(self).docs(db)
+ }
+ fn resolve_doc_path(self, db: &dyn HirDatabase, link: &str, ns: Option<Namespace>) -> Option<ModuleDef> {
+ $enum::$variant(self).resolve_doc_path(db, link, ns)
+ }
+ }
+ )*};
+}
+
+impl_has_attrs_enum![Struct, Union, Enum for Adt];
+impl_has_attrs_enum![TypeParam, ConstParam, LifetimeParam for GenericParam];
+
+impl HasAttrs for AssocItem {
+ fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
+ match self {
+ AssocItem::Function(it) => it.attrs(db),
+ AssocItem::Const(it) => it.attrs(db),
+ AssocItem::TypeAlias(it) => it.attrs(db),
+ }
+ }
+
+ fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
+ match self {
+ AssocItem::Function(it) => it.docs(db),
+ AssocItem::Const(it) => it.docs(db),
+ AssocItem::TypeAlias(it) => it.docs(db),
+ }
+ }
+
+ fn resolve_doc_path(
+ self,
+ db: &dyn HirDatabase,
+ link: &str,
+ ns: Option<Namespace>,
+ ) -> Option<ModuleDef> {
+ match self {
+ AssocItem::Function(it) => it.resolve_doc_path(db, link, ns),
+ AssocItem::Const(it) => it.resolve_doc_path(db, link, ns),
+ AssocItem::TypeAlias(it) => it.resolve_doc_path(db, link, ns),
+ }
+ }
+}
+
+fn resolve_doc_path(
+ db: &dyn HirDatabase,
+ def: AttrDefId,
+ link: &str,
+ ns: Option<Namespace>,
+) -> Option<ModuleDefId> {
+ let resolver = match def {
+ AttrDefId::ModuleId(it) => it.resolver(db.upcast()),
+ AttrDefId::FieldId(it) => it.parent.resolver(db.upcast()),
+ AttrDefId::AdtId(it) => it.resolver(db.upcast()),
+ AttrDefId::FunctionId(it) => it.resolver(db.upcast()),
+ AttrDefId::EnumVariantId(it) => it.parent.resolver(db.upcast()),
+ AttrDefId::StaticId(it) => it.resolver(db.upcast()),
+ AttrDefId::ConstId(it) => it.resolver(db.upcast()),
+ AttrDefId::TraitId(it) => it.resolver(db.upcast()),
+ AttrDefId::TypeAliasId(it) => it.resolver(db.upcast()),
+ AttrDefId::ImplId(it) => it.resolver(db.upcast()),
+ AttrDefId::ExternBlockId(it) => it.resolver(db.upcast()),
+ AttrDefId::MacroId(it) => it.resolver(db.upcast()),
+ AttrDefId::GenericParamId(it) => match it {
+ GenericParamId::TypeParamId(it) => it.parent(),
+ GenericParamId::ConstParamId(it) => it.parent(),
+ GenericParamId::LifetimeParamId(it) => it.parent,
+ }
+ .resolver(db.upcast()),
+ };
+
+ let modpath = {
+ // FIXME: this is not how we should get a mod path here
+ let ast_path = ast::SourceFile::parse(&format!("type T = {};", link))
+ .syntax_node()
+ .descendants()
+ .find_map(ast::Path::cast)?;
+ if ast_path.to_string() != link {
+ return None;
+ }
+ ModPath::from_src(db.upcast(), ast_path, &Hygiene::new_unhygienic())?
+ };
+
+ let resolved = resolver.resolve_module_path_in_items(db.upcast(), &modpath);
+ let resolved = if resolved == PerNs::none() {
+ resolver.resolve_module_path_in_trait_assoc_items(db.upcast(), &modpath)?
+ } else {
+ resolved
+ };
+ match ns {
+ Some(Namespace::Types) => resolved.take_types(),
+ Some(Namespace::Values) => resolved.take_values(),
+ Some(Namespace::Macros) => resolved.take_macros().map(ModuleDefId::MacroId),
+ None => resolved.iter_items().next().map(|it| match it {
+ ItemInNs::Types(it) => it,
+ ItemInNs::Values(it) => it,
+ ItemInNs::Macros(it) => ModuleDefId::MacroId(it),
+ }),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/db.rs b/src/tools/rust-analyzer/crates/hir/src/db.rs
new file mode 100644
index 000000000..e25d86784
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/db.rs
@@ -0,0 +1,16 @@
+//! Re-exports various subcrates databases so that the calling code can depend
+//! only on `hir`. This breaks abstraction boundary a bit, it would be cool if
+//! we didn't do that.
+//!
+//! But we need this for at least LRU caching at the query level.
+pub use hir_def::db::*;
+pub use hir_expand::db::{
+ AstDatabase, AstDatabaseStorage, AstIdMapQuery, HygieneFrameQuery, InternMacroCallQuery,
+ MacroArgTextQuery, MacroDefQuery, MacroExpandQuery, ParseMacroExpansionQuery,
+};
+pub use hir_ty::db::*;
+
+#[test]
+fn hir_database_is_object_safe() {
+ fn _assert_object_safe(_: &dyn HirDatabase) {}
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
new file mode 100644
index 000000000..6c6c11ea4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
@@ -0,0 +1,170 @@
+//! Re-export diagnostics such that clients of `hir` don't have to depend on
+//! low-level crates.
+//!
+//! This probably isn't the best way to do this -- ideally, diagnistics should
+//! be expressed in terms of hir types themselves.
+use base_db::CrateId;
+use cfg::{CfgExpr, CfgOptions};
+use either::Either;
+use hir_def::path::ModPath;
+use hir_expand::{name::Name, HirFileId, InFile};
+use syntax::{ast, AstPtr, SyntaxNodePtr, TextRange};
+
+use crate::{MacroKind, Type};
+
+macro_rules! diagnostics {
+ ($($diag:ident,)*) => {
+ pub enum AnyDiagnostic {$(
+ $diag(Box<$diag>),
+ )*}
+
+ $(
+ impl From<$diag> for AnyDiagnostic {
+ fn from(d: $diag) -> AnyDiagnostic {
+ AnyDiagnostic::$diag(Box::new(d))
+ }
+ }
+ )*
+ };
+}
+
+diagnostics![
+ BreakOutsideOfLoop,
+ InactiveCode,
+ IncorrectCase,
+ InvalidDeriveTarget,
+ MacroError,
+ MalformedDerive,
+ MismatchedArgCount,
+ MissingFields,
+ MissingMatchArms,
+ MissingUnsafe,
+ NoSuchField,
+ ReplaceFilterMapNextWithFindMap,
+ TypeMismatch,
+ UnimplementedBuiltinMacro,
+ UnresolvedExternCrate,
+ UnresolvedImport,
+ UnresolvedMacroCall,
+ UnresolvedModule,
+ UnresolvedProcMacro,
+];
+
+#[derive(Debug)]
+pub struct UnresolvedModule {
+ pub decl: InFile<AstPtr<ast::Module>>,
+ pub candidates: Box<[String]>,
+}
+
+#[derive(Debug)]
+pub struct UnresolvedExternCrate {
+ pub decl: InFile<AstPtr<ast::ExternCrate>>,
+}
+
+#[derive(Debug)]
+pub struct UnresolvedImport {
+ pub decl: InFile<AstPtr<ast::UseTree>>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct UnresolvedMacroCall {
+ pub macro_call: InFile<SyntaxNodePtr>,
+ pub precise_location: Option<TextRange>,
+ pub path: ModPath,
+ pub is_bang: bool,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct InactiveCode {
+ pub node: InFile<SyntaxNodePtr>,
+ pub cfg: CfgExpr,
+ pub opts: CfgOptions,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct UnresolvedProcMacro {
+ pub node: InFile<SyntaxNodePtr>,
+ /// If the diagnostic can be pinpointed more accurately than via `node`, this is the `TextRange`
+ /// to use instead.
+ pub precise_location: Option<TextRange>,
+ pub macro_name: Option<String>,
+ pub kind: MacroKind,
+ /// The crate id of the proc-macro this macro belongs to, or `None` if the proc-macro can't be found.
+ pub krate: CrateId,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct MacroError {
+ pub node: InFile<SyntaxNodePtr>,
+ pub precise_location: Option<TextRange>,
+ pub message: String,
+}
+
+#[derive(Debug)]
+pub struct UnimplementedBuiltinMacro {
+ pub node: InFile<SyntaxNodePtr>,
+}
+
+#[derive(Debug)]
+pub struct InvalidDeriveTarget {
+ pub node: InFile<SyntaxNodePtr>,
+}
+
+#[derive(Debug)]
+pub struct MalformedDerive {
+ pub node: InFile<SyntaxNodePtr>,
+}
+
+#[derive(Debug)]
+pub struct NoSuchField {
+ pub field: InFile<AstPtr<ast::RecordExprField>>,
+}
+
+#[derive(Debug)]
+pub struct BreakOutsideOfLoop {
+ pub expr: InFile<AstPtr<ast::Expr>>,
+}
+
+#[derive(Debug)]
+pub struct MissingUnsafe {
+ pub expr: InFile<AstPtr<ast::Expr>>,
+}
+
+#[derive(Debug)]
+pub struct MissingFields {
+ pub file: HirFileId,
+ pub field_list_parent: Either<AstPtr<ast::RecordExpr>, AstPtr<ast::RecordPat>>,
+ pub field_list_parent_path: Option<AstPtr<ast::Path>>,
+ pub missed_fields: Vec<Name>,
+}
+
+#[derive(Debug)]
+pub struct ReplaceFilterMapNextWithFindMap {
+ pub file: HirFileId,
+ /// This expression is the whole method chain up to and including `.filter_map(..).next()`.
+ pub next_expr: AstPtr<ast::Expr>,
+}
+
+#[derive(Debug)]
+pub struct MismatchedArgCount {
+ pub call_expr: InFile<AstPtr<ast::Expr>>,
+ pub expected: usize,
+ pub found: usize,
+}
+
+#[derive(Debug)]
+pub struct MissingMatchArms {
+ pub file: HirFileId,
+ pub match_expr: AstPtr<ast::Expr>,
+ pub uncovered_patterns: String,
+}
+
+#[derive(Debug)]
+pub struct TypeMismatch {
+ // FIXME: add mismatches in patterns as well
+ pub expr: InFile<AstPtr<ast::Expr>>,
+ pub expected: Type,
+ pub actual: Type,
+}
+
+pub use hir_ty::diagnostics::IncorrectCase;
diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs
new file mode 100644
index 000000000..0e29c52ad
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/display.rs
@@ -0,0 +1,530 @@
+//! HirDisplay implementations for various hir types.
+use hir_def::{
+ adt::VariantData,
+ generics::{
+ TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
+ },
+ type_ref::{TypeBound, TypeRef},
+ AdtId, GenericDefId,
+};
+use hir_ty::{
+ display::{
+ write_bounds_like_dyn_trait_with_prefix, write_visibility, HirDisplay, HirDisplayError,
+ HirFormatter, SizedByDefault,
+ },
+ Interner, TraitRefExt, WhereClause,
+};
+use syntax::SmolStr;
+
+use crate::{
+ Adt, Const, ConstParam, Enum, Field, Function, GenericParam, HasCrate, HasVisibility,
+ LifetimeParam, Macro, Module, Static, Struct, Trait, TyBuilder, Type, TypeAlias,
+ TypeOrConstParam, TypeParam, Union, Variant,
+};
+
+impl HirDisplay for Function {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ let data = f.db.function_data(self.id);
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ if data.has_default_kw() {
+ f.write_str("default ")?;
+ }
+ if data.has_const_kw() {
+ f.write_str("const ")?;
+ }
+ if data.has_async_kw() {
+ f.write_str("async ")?;
+ }
+ if self.is_unsafe_to_call(f.db) {
+ f.write_str("unsafe ")?;
+ }
+ if let Some(abi) = &data.abi {
+ // FIXME: String escape?
+ write!(f, "extern \"{}\" ", &**abi)?;
+ }
+ write!(f, "fn {}", data.name)?;
+
+ write_generic_params(GenericDefId::FunctionId(self.id), f)?;
+
+ f.write_char('(')?;
+
+ let write_self_param = |ty: &TypeRef, f: &mut HirFormatter<'_>| match ty {
+ TypeRef::Path(p) if p.is_self_type() => f.write_str("self"),
+ TypeRef::Reference(inner, lifetime, mut_) if matches!(&**inner,TypeRef::Path(p) if p.is_self_type()) =>
+ {
+ f.write_char('&')?;
+ if let Some(lifetime) = lifetime {
+ write!(f, "{} ", lifetime.name)?;
+ }
+ if let hir_def::type_ref::Mutability::Mut = mut_ {
+ f.write_str("mut ")?;
+ }
+ f.write_str("self")
+ }
+ _ => {
+ f.write_str("self: ")?;
+ ty.hir_fmt(f)
+ }
+ };
+
+ let mut first = true;
+ for (name, type_ref) in &data.params {
+ if !first {
+ f.write_str(", ")?;
+ } else {
+ first = false;
+ if data.has_self_param() {
+ write_self_param(type_ref, f)?;
+ continue;
+ }
+ }
+ match name {
+ Some(name) => write!(f, "{}: ", name)?,
+ None => f.write_str("_: ")?,
+ }
+ // FIXME: Use resolved `param.ty` or raw `type_ref`?
+ // The former will ignore lifetime arguments currently.
+ type_ref.hir_fmt(f)?;
+ }
+
+ if data.is_varargs() {
+ f.write_str(", ...")?;
+ }
+
+ f.write_char(')')?;
+
+ // `FunctionData::ret_type` will be `::core::future::Future<Output = ...>` for async fns.
+ // Use ugly pattern match to strip the Future trait.
+ // Better way?
+ let ret_type = if !data.has_async_kw() {
+ &data.ret_type
+ } else {
+ match &*data.ret_type {
+ TypeRef::ImplTrait(bounds) => match bounds[0].as_ref() {
+ TypeBound::Path(path, _) => {
+ path.segments().iter().last().unwrap().args_and_bindings.unwrap().bindings
+ [0]
+ .type_ref
+ .as_ref()
+ .unwrap()
+ }
+ _ => panic!("Async fn ret_type should be impl Future"),
+ },
+ _ => panic!("Async fn ret_type should be impl Future"),
+ }
+ };
+
+ match ret_type {
+ TypeRef::Tuple(tup) if tup.is_empty() => {}
+ ty => {
+ f.write_str(" -> ")?;
+ ty.hir_fmt(f)?;
+ }
+ }
+
+ write_where_clause(GenericDefId::FunctionId(self.id), f)?;
+
+ Ok(())
+ }
+}
+
+impl HirDisplay for Adt {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ Adt::Struct(it) => it.hir_fmt(f),
+ Adt::Union(it) => it.hir_fmt(f),
+ Adt::Enum(it) => it.hir_fmt(f),
+ }
+ }
+}
+
+impl HirDisplay for Struct {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ f.write_str("struct ")?;
+ write!(f, "{}", self.name(f.db))?;
+ let def_id = GenericDefId::AdtId(AdtId::StructId(self.id));
+ write_generic_params(def_id, f)?;
+ write_where_clause(def_id, f)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for Enum {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ f.write_str("enum ")?;
+ write!(f, "{}", self.name(f.db))?;
+ let def_id = GenericDefId::AdtId(AdtId::EnumId(self.id));
+ write_generic_params(def_id, f)?;
+ write_where_clause(def_id, f)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for Union {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ f.write_str("union ")?;
+ write!(f, "{}", self.name(f.db))?;
+ let def_id = GenericDefId::AdtId(AdtId::UnionId(self.id));
+ write_generic_params(def_id, f)?;
+ write_where_clause(def_id, f)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for Field {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.parent.module(f.db).id, self.visibility(f.db), f)?;
+ write!(f, "{}: ", self.name(f.db))?;
+ self.ty(f.db).hir_fmt(f)
+ }
+}
+
+impl HirDisplay for Variant {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write!(f, "{}", self.name(f.db))?;
+ let data = self.variant_data(f.db);
+ match &*data {
+ VariantData::Unit => {}
+ VariantData::Tuple(fields) => {
+ f.write_char('(')?;
+ let mut first = true;
+ for (_, field) in fields.iter() {
+ if first {
+ first = false;
+ } else {
+ f.write_str(", ")?;
+ }
+ // Enum variant fields must be pub.
+ field.type_ref.hir_fmt(f)?;
+ }
+ f.write_char(')')?;
+ }
+ VariantData::Record(fields) => {
+ f.write_str(" {")?;
+ let mut first = true;
+ for (_, field) in fields.iter() {
+ if first {
+ first = false;
+ f.write_char(' ')?;
+ } else {
+ f.write_str(", ")?;
+ }
+ // Enum variant fields must be pub.
+ write!(f, "{}: ", field.name)?;
+ field.type_ref.hir_fmt(f)?;
+ }
+ f.write_str(" }")?;
+ }
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for Type {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ self.ty.hir_fmt(f)
+ }
+}
+
+impl HirDisplay for GenericParam {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ GenericParam::TypeParam(it) => it.hir_fmt(f),
+ GenericParam::ConstParam(it) => it.hir_fmt(f),
+ GenericParam::LifetimeParam(it) => it.hir_fmt(f),
+ }
+ }
+}
+
+impl HirDisplay for TypeOrConstParam {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self.split(f.db) {
+ either::Either::Left(x) => x.hir_fmt(f),
+ either::Either::Right(x) => x.hir_fmt(f),
+ }
+ }
+}
+
+impl HirDisplay for TypeParam {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write!(f, "{}", self.name(f.db))?;
+ if f.omit_verbose_types() {
+ return Ok(());
+ }
+
+ let bounds = f.db.generic_predicates_for_param(self.id.parent(), self.id.into(), None);
+ let substs = TyBuilder::placeholder_subst(f.db, self.id.parent());
+ let predicates: Vec<_> =
+ bounds.iter().cloned().map(|b| b.substitute(Interner, &substs)).collect();
+ let krate = self.id.parent().krate(f.db).id;
+ let sized_trait =
+ f.db.lang_item(krate, SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait());
+ let has_only_sized_bound = predicates.iter().all(move |pred| match pred.skip_binders() {
+ WhereClause::Implemented(it) => Some(it.hir_trait_id()) == sized_trait,
+ _ => false,
+ });
+ let has_only_not_sized_bound = predicates.is_empty();
+ if !has_only_sized_bound || has_only_not_sized_bound {
+ let default_sized = SizedByDefault::Sized { anchor: krate };
+ write_bounds_like_dyn_trait_with_prefix(":", &predicates, default_sized, f)?;
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for LifetimeParam {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write!(f, "{}", self.name(f.db))
+ }
+}
+
+impl HirDisplay for ConstParam {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write!(f, "const {}: ", self.name(f.db))?;
+ self.ty(f.db).hir_fmt(f)
+ }
+}
+
+fn write_generic_params(
+ def: GenericDefId,
+ f: &mut HirFormatter<'_>,
+) -> Result<(), HirDisplayError> {
+ let params = f.db.generic_params(def);
+ if params.lifetimes.is_empty()
+ && params.type_or_consts.iter().all(|x| x.1.const_param().is_none())
+ && params
+ .type_or_consts
+ .iter()
+ .filter_map(|x| x.1.type_param())
+ .all(|param| !matches!(param.provenance, TypeParamProvenance::TypeParamList))
+ {
+ return Ok(());
+ }
+ f.write_char('<')?;
+
+ let mut first = true;
+ let mut delim = |f: &mut HirFormatter<'_>| {
+ if first {
+ first = false;
+ Ok(())
+ } else {
+ f.write_str(", ")
+ }
+ };
+ for (_, lifetime) in params.lifetimes.iter() {
+ delim(f)?;
+ write!(f, "{}", lifetime.name)?;
+ }
+ for (_, ty) in params.type_or_consts.iter() {
+ if let Some(name) = &ty.name() {
+ match ty {
+ TypeOrConstParamData::TypeParamData(ty) => {
+ if ty.provenance != TypeParamProvenance::TypeParamList {
+ continue;
+ }
+ delim(f)?;
+ write!(f, "{}", name)?;
+ if let Some(default) = &ty.default {
+ f.write_str(" = ")?;
+ default.hir_fmt(f)?;
+ }
+ }
+ TypeOrConstParamData::ConstParamData(c) => {
+ delim(f)?;
+ write!(f, "const {}: ", name)?;
+ c.ty.hir_fmt(f)?;
+ }
+ }
+ }
+ }
+
+ f.write_char('>')?;
+ Ok(())
+}
+
+fn write_where_clause(def: GenericDefId, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ let params = f.db.generic_params(def);
+
+ // unnamed type targets are displayed inline with the argument itself, e.g. `f: impl Y`.
+ let is_unnamed_type_target = |target: &WherePredicateTypeTarget| match target {
+ WherePredicateTypeTarget::TypeRef(_) => false,
+ WherePredicateTypeTarget::TypeOrConstParam(id) => {
+ params.type_or_consts[*id].name().is_none()
+ }
+ };
+
+ let has_displayable_predicate = params
+ .where_predicates
+ .iter()
+ .any(|pred| {
+ !matches!(pred, WherePredicate::TypeBound { target, .. } if is_unnamed_type_target(target))
+ });
+
+ if !has_displayable_predicate {
+ return Ok(());
+ }
+
+ let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target {
+ WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f),
+ WherePredicateTypeTarget::TypeOrConstParam(id) => {
+ match &params.type_or_consts[*id].name() {
+ Some(name) => write!(f, "{}", name),
+ None => f.write_str("{unnamed}"),
+ }
+ }
+ };
+
+ f.write_str("\nwhere")?;
+
+ for (pred_idx, pred) in params.where_predicates.iter().enumerate() {
+ let prev_pred =
+ if pred_idx == 0 { None } else { Some(&params.where_predicates[pred_idx - 1]) };
+
+ let new_predicate = |f: &mut HirFormatter<'_>| {
+ f.write_str(if pred_idx == 0 { "\n " } else { ",\n " })
+ };
+
+ match pred {
+ WherePredicate::TypeBound { target, .. } if is_unnamed_type_target(target) => {}
+ WherePredicate::TypeBound { target, bound } => {
+ if matches!(prev_pred, Some(WherePredicate::TypeBound { target: target_, .. }) if target_ == target)
+ {
+ f.write_str(" + ")?;
+ } else {
+ new_predicate(f)?;
+ write_target(target, f)?;
+ f.write_str(": ")?;
+ }
+ bound.hir_fmt(f)?;
+ }
+ WherePredicate::Lifetime { target, bound } => {
+ if matches!(prev_pred, Some(WherePredicate::Lifetime { target: target_, .. }) if target_ == target)
+ {
+ write!(f, " + {}", bound.name)?;
+ } else {
+ new_predicate(f)?;
+ write!(f, "{}: {}", target.name, bound.name)?;
+ }
+ }
+ WherePredicate::ForLifetime { lifetimes, target, bound } => {
+ if matches!(
+ prev_pred,
+ Some(WherePredicate::ForLifetime { lifetimes: lifetimes_, target: target_, .. })
+ if lifetimes_ == lifetimes && target_ == target,
+ ) {
+ f.write_str(" + ")?;
+ } else {
+ new_predicate(f)?;
+ f.write_str("for<")?;
+ for (idx, lifetime) in lifetimes.iter().enumerate() {
+ if idx != 0 {
+ f.write_str(", ")?;
+ }
+ write!(f, "{}", lifetime)?;
+ }
+ f.write_str("> ")?;
+ write_target(target, f)?;
+ f.write_str(": ")?;
+ }
+ bound.hir_fmt(f)?;
+ }
+ }
+ }
+
+ // End of final predicate. There must be at least one predicate here.
+ f.write_char(',')?;
+
+ Ok(())
+}
+
+impl HirDisplay for Const {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ let data = f.db.const_data(self.id);
+ f.write_str("const ")?;
+ match &data.name {
+ Some(name) => write!(f, "{}: ", name)?,
+ None => f.write_str("_: ")?,
+ }
+ data.type_ref.hir_fmt(f)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for Static {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ let data = f.db.static_data(self.id);
+ f.write_str("static ")?;
+ if data.mutable {
+ f.write_str("mut ")?;
+ }
+ write!(f, "{}: ", &data.name)?;
+ data.type_ref.hir_fmt(f)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for Trait {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ let data = f.db.trait_data(self.id);
+ if data.is_unsafe {
+ f.write_str("unsafe ")?;
+ }
+ if data.is_auto {
+ f.write_str("auto ")?;
+ }
+ write!(f, "trait {}", data.name)?;
+ let def_id = GenericDefId::TraitId(self.id);
+ write_generic_params(def_id, f)?;
+ write_where_clause(def_id, f)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for TypeAlias {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ let data = f.db.type_alias_data(self.id);
+ write!(f, "type {}", data.name)?;
+ if !data.bounds.is_empty() {
+ f.write_str(": ")?;
+ f.write_joined(&data.bounds, " + ")?;
+ }
+ if let Some(ty) = &data.type_ref {
+ f.write_str(" = ")?;
+ ty.hir_fmt(f)?;
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for Module {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ // FIXME: Module doesn't have visibility saved in data.
+ match self.name(f.db) {
+ Some(name) => write!(f, "mod {}", name),
+ None if self.is_crate_root(f.db) => match self.krate(f.db).display_name(f.db) {
+ Some(name) => write!(f, "extern crate {}", name),
+ None => f.write_str("extern crate {unknown}"),
+ },
+ None => f.write_str("mod {unnamed}"),
+ }
+ }
+}
+
+impl HirDisplay for Macro {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self.id {
+ hir_def::MacroId::Macro2Id(_) => f.write_str("macro"),
+ hir_def::MacroId::MacroRulesId(_) => f.write_str("macro_rules!"),
+ hir_def::MacroId::ProcMacroId(_) => f.write_str("proc_macro"),
+ }?;
+ write!(f, " {}", self.name(f.db))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/from_id.rs b/src/tools/rust-analyzer/crates/hir/src/from_id.rs
new file mode 100644
index 000000000..9c7558d19
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/from_id.rs
@@ -0,0 +1,293 @@
+//! Utility module for converting between hir_def ids and code_model wrappers.
+//!
+//! It's unclear if we need this long-term, but it's definitely useful while we
+//! are splitting the hir.
+
+use hir_def::{
+ expr::{LabelId, PatId},
+ AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, GenericDefId, GenericParamId,
+ ModuleDefId, VariantId,
+};
+
+use crate::{
+ Adt, AssocItem, BuiltinType, DefWithBody, Field, GenericDef, GenericParam, ItemInNs, Label,
+ Local, ModuleDef, Variant, VariantDef,
+};
+
+macro_rules! from_id {
+ ($(($id:path, $ty:path)),*) => {$(
+ impl From<$id> for $ty {
+ fn from(id: $id) -> $ty {
+ $ty { id }
+ }
+ }
+ impl From<$ty> for $id {
+ fn from(ty: $ty) -> $id {
+ ty.id
+ }
+ }
+ )*}
+}
+
+from_id![
+ (base_db::CrateId, crate::Crate),
+ (hir_def::ModuleId, crate::Module),
+ (hir_def::StructId, crate::Struct),
+ (hir_def::UnionId, crate::Union),
+ (hir_def::EnumId, crate::Enum),
+ (hir_def::TypeAliasId, crate::TypeAlias),
+ (hir_def::TraitId, crate::Trait),
+ (hir_def::StaticId, crate::Static),
+ (hir_def::ConstId, crate::Const),
+ (hir_def::FunctionId, crate::Function),
+ (hir_def::ImplId, crate::Impl),
+ (hir_def::TypeOrConstParamId, crate::TypeOrConstParam),
+ (hir_def::TypeParamId, crate::TypeParam),
+ (hir_def::ConstParamId, crate::ConstParam),
+ (hir_def::LifetimeParamId, crate::LifetimeParam),
+ (hir_def::MacroId, crate::Macro)
+];
+
+impl From<AdtId> for Adt {
+ fn from(id: AdtId) -> Self {
+ match id {
+ AdtId::StructId(it) => Adt::Struct(it.into()),
+ AdtId::UnionId(it) => Adt::Union(it.into()),
+ AdtId::EnumId(it) => Adt::Enum(it.into()),
+ }
+ }
+}
+
+impl From<Adt> for AdtId {
+ fn from(id: Adt) -> Self {
+ match id {
+ Adt::Struct(it) => AdtId::StructId(it.id),
+ Adt::Union(it) => AdtId::UnionId(it.id),
+ Adt::Enum(it) => AdtId::EnumId(it.id),
+ }
+ }
+}
+
+impl From<GenericParamId> for GenericParam {
+ fn from(id: GenericParamId) -> Self {
+ match id {
+ GenericParamId::TypeParamId(it) => GenericParam::TypeParam(it.into()),
+ GenericParamId::ConstParamId(it) => GenericParam::ConstParam(it.into()),
+ GenericParamId::LifetimeParamId(it) => GenericParam::LifetimeParam(it.into()),
+ }
+ }
+}
+
+impl From<GenericParam> for GenericParamId {
+ fn from(id: GenericParam) -> Self {
+ match id {
+ GenericParam::LifetimeParam(it) => GenericParamId::LifetimeParamId(it.id),
+ GenericParam::ConstParam(it) => GenericParamId::ConstParamId(it.id),
+ GenericParam::TypeParam(it) => GenericParamId::TypeParamId(it.id),
+ }
+ }
+}
+
+impl From<EnumVariantId> for Variant {
+ fn from(id: EnumVariantId) -> Self {
+ Variant { parent: id.parent.into(), id: id.local_id }
+ }
+}
+
+impl From<Variant> for EnumVariantId {
+ fn from(def: Variant) -> Self {
+ EnumVariantId { parent: def.parent.id, local_id: def.id }
+ }
+}
+
+impl From<ModuleDefId> for ModuleDef {
+ fn from(id: ModuleDefId) -> Self {
+ match id {
+ ModuleDefId::ModuleId(it) => ModuleDef::Module(it.into()),
+ ModuleDefId::FunctionId(it) => ModuleDef::Function(it.into()),
+ ModuleDefId::AdtId(it) => ModuleDef::Adt(it.into()),
+ ModuleDefId::EnumVariantId(it) => ModuleDef::Variant(it.into()),
+ ModuleDefId::ConstId(it) => ModuleDef::Const(it.into()),
+ ModuleDefId::StaticId(it) => ModuleDef::Static(it.into()),
+ ModuleDefId::TraitId(it) => ModuleDef::Trait(it.into()),
+ ModuleDefId::TypeAliasId(it) => ModuleDef::TypeAlias(it.into()),
+ ModuleDefId::BuiltinType(it) => ModuleDef::BuiltinType(it.into()),
+ ModuleDefId::MacroId(it) => ModuleDef::Macro(it.into()),
+ }
+ }
+}
+
+impl From<ModuleDef> for ModuleDefId {
+ fn from(id: ModuleDef) -> Self {
+ match id {
+ ModuleDef::Module(it) => ModuleDefId::ModuleId(it.into()),
+ ModuleDef::Function(it) => ModuleDefId::FunctionId(it.into()),
+ ModuleDef::Adt(it) => ModuleDefId::AdtId(it.into()),
+ ModuleDef::Variant(it) => ModuleDefId::EnumVariantId(it.into()),
+ ModuleDef::Const(it) => ModuleDefId::ConstId(it.into()),
+ ModuleDef::Static(it) => ModuleDefId::StaticId(it.into()),
+ ModuleDef::Trait(it) => ModuleDefId::TraitId(it.into()),
+ ModuleDef::TypeAlias(it) => ModuleDefId::TypeAliasId(it.into()),
+ ModuleDef::BuiltinType(it) => ModuleDefId::BuiltinType(it.into()),
+ ModuleDef::Macro(it) => ModuleDefId::MacroId(it.into()),
+ }
+ }
+}
+
+impl From<DefWithBody> for DefWithBodyId {
+ fn from(def: DefWithBody) -> Self {
+ match def {
+ DefWithBody::Function(it) => DefWithBodyId::FunctionId(it.id),
+ DefWithBody::Static(it) => DefWithBodyId::StaticId(it.id),
+ DefWithBody::Const(it) => DefWithBodyId::ConstId(it.id),
+ }
+ }
+}
+
+impl From<DefWithBodyId> for DefWithBody {
+ fn from(def: DefWithBodyId) -> Self {
+ match def {
+ DefWithBodyId::FunctionId(it) => DefWithBody::Function(it.into()),
+ DefWithBodyId::StaticId(it) => DefWithBody::Static(it.into()),
+ DefWithBodyId::ConstId(it) => DefWithBody::Const(it.into()),
+ }
+ }
+}
+
+impl From<AssocItemId> for AssocItem {
+ fn from(def: AssocItemId) -> Self {
+ match def {
+ AssocItemId::FunctionId(it) => AssocItem::Function(it.into()),
+ AssocItemId::TypeAliasId(it) => AssocItem::TypeAlias(it.into()),
+ AssocItemId::ConstId(it) => AssocItem::Const(it.into()),
+ }
+ }
+}
+
+impl From<GenericDef> for GenericDefId {
+ fn from(def: GenericDef) -> Self {
+ match def {
+ GenericDef::Function(it) => GenericDefId::FunctionId(it.id),
+ GenericDef::Adt(it) => GenericDefId::AdtId(it.into()),
+ GenericDef::Trait(it) => GenericDefId::TraitId(it.id),
+ GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id),
+ GenericDef::Impl(it) => GenericDefId::ImplId(it.id),
+ GenericDef::Variant(it) => {
+ GenericDefId::EnumVariantId(EnumVariantId { parent: it.parent.id, local_id: it.id })
+ }
+ GenericDef::Const(it) => GenericDefId::ConstId(it.id),
+ }
+ }
+}
+
+impl From<GenericDefId> for GenericDef {
+ fn from(def: GenericDefId) -> Self {
+ match def {
+ GenericDefId::FunctionId(it) => GenericDef::Function(it.into()),
+ GenericDefId::AdtId(it) => GenericDef::Adt(it.into()),
+ GenericDefId::TraitId(it) => GenericDef::Trait(it.into()),
+ GenericDefId::TypeAliasId(it) => GenericDef::TypeAlias(it.into()),
+ GenericDefId::ImplId(it) => GenericDef::Impl(it.into()),
+ GenericDefId::EnumVariantId(it) => {
+ GenericDef::Variant(Variant { parent: it.parent.into(), id: it.local_id })
+ }
+ GenericDefId::ConstId(it) => GenericDef::Const(it.into()),
+ }
+ }
+}
+
+impl From<Adt> for GenericDefId {
+ fn from(id: Adt) -> Self {
+ match id {
+ Adt::Struct(it) => it.id.into(),
+ Adt::Union(it) => it.id.into(),
+ Adt::Enum(it) => it.id.into(),
+ }
+ }
+}
+
+impl From<VariantId> for VariantDef {
+ fn from(def: VariantId) -> Self {
+ match def {
+ VariantId::StructId(it) => VariantDef::Struct(it.into()),
+ VariantId::EnumVariantId(it) => VariantDef::Variant(it.into()),
+ VariantId::UnionId(it) => VariantDef::Union(it.into()),
+ }
+ }
+}
+
+impl From<VariantDef> for VariantId {
+ fn from(def: VariantDef) -> Self {
+ match def {
+ VariantDef::Struct(it) => VariantId::StructId(it.id),
+ VariantDef::Variant(it) => VariantId::EnumVariantId(it.into()),
+ VariantDef::Union(it) => VariantId::UnionId(it.id),
+ }
+ }
+}
+
+impl From<Field> for FieldId {
+ fn from(def: Field) -> Self {
+ FieldId { parent: def.parent.into(), local_id: def.id }
+ }
+}
+
+impl From<FieldId> for Field {
+ fn from(def: FieldId) -> Self {
+ Field { parent: def.parent.into(), id: def.local_id }
+ }
+}
+
+impl From<AssocItem> for GenericDefId {
+ fn from(item: AssocItem) -> Self {
+ match item {
+ AssocItem::Function(f) => f.id.into(),
+ AssocItem::Const(c) => c.id.into(),
+ AssocItem::TypeAlias(t) => t.id.into(),
+ }
+ }
+}
+
+impl From<(DefWithBodyId, PatId)> for Local {
+ fn from((parent, pat_id): (DefWithBodyId, PatId)) -> Self {
+ Local { parent, pat_id }
+ }
+}
+
+impl From<(DefWithBodyId, LabelId)> for Label {
+ fn from((parent, label_id): (DefWithBodyId, LabelId)) -> Self {
+ Label { parent, label_id }
+ }
+}
+
+impl From<hir_def::item_scope::ItemInNs> for ItemInNs {
+ fn from(it: hir_def::item_scope::ItemInNs) -> Self {
+ match it {
+ hir_def::item_scope::ItemInNs::Types(it) => ItemInNs::Types(it.into()),
+ hir_def::item_scope::ItemInNs::Values(it) => ItemInNs::Values(it.into()),
+ hir_def::item_scope::ItemInNs::Macros(it) => ItemInNs::Macros(it.into()),
+ }
+ }
+}
+
+impl From<ItemInNs> for hir_def::item_scope::ItemInNs {
+ fn from(it: ItemInNs) -> Self {
+ match it {
+ ItemInNs::Types(it) => Self::Types(it.into()),
+ ItemInNs::Values(it) => Self::Values(it.into()),
+ ItemInNs::Macros(it) => Self::Macros(it.into()),
+ }
+ }
+}
+
+impl From<hir_def::builtin_type::BuiltinType> for BuiltinType {
+ fn from(inner: hir_def::builtin_type::BuiltinType) -> Self {
+ Self { inner }
+ }
+}
+
+impl From<BuiltinType> for hir_def::builtin_type::BuiltinType {
+ fn from(it: BuiltinType) -> Self {
+ it.inner
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/has_source.rs b/src/tools/rust-analyzer/crates/hir/src/has_source.rs
new file mode 100644
index 000000000..f8b01db3e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/has_source.rs
@@ -0,0 +1,174 @@
+//! Provides set of implementation for hir's objects that allows get back location in file.
+
+use either::Either;
+use hir_def::{
+ nameres::{ModuleOrigin, ModuleSource},
+ src::{HasChildSource, HasSource as _},
+ Lookup, MacroId, VariantId,
+};
+use hir_expand::InFile;
+use syntax::ast;
+
+use crate::{
+ db::HirDatabase, Adt, Const, Enum, Field, FieldSource, Function, Impl, LifetimeParam, Macro,
+ Module, Static, Struct, Trait, TypeAlias, TypeOrConstParam, Union, Variant,
+};
+
+pub trait HasSource {
+ type Ast;
+ /// Fetches the definition's source node.
+ /// Using [`crate::Semantics::source`] is preferred when working with [`crate::Semantics`],
+ /// as that caches the parsed file in the semantics' cache.
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>>;
+}
+
+/// NB: Module is !HasSource, because it has two source nodes at the same time:
+/// definition and declaration.
+impl Module {
+ /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items.
+ pub fn definition_source(self, db: &dyn HirDatabase) -> InFile<ModuleSource> {
+ let def_map = self.id.def_map(db.upcast());
+ def_map[self.id.local_id].definition_source(db.upcast())
+ }
+
+ pub fn is_mod_rs(self, db: &dyn HirDatabase) -> bool {
+ let def_map = self.id.def_map(db.upcast());
+ match def_map[self.id.local_id].origin {
+ ModuleOrigin::File { is_mod_rs, .. } => is_mod_rs,
+ _ => false,
+ }
+ }
+
+ pub fn is_inline(self, db: &dyn HirDatabase) -> bool {
+ let def_map = self.id.def_map(db.upcast());
+ def_map[self.id.local_id].origin.is_inline()
+ }
+
+ /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
+ /// `None` for the crate root.
+ pub fn declaration_source(self, db: &dyn HirDatabase) -> Option<InFile<ast::Module>> {
+ let def_map = self.id.def_map(db.upcast());
+ def_map[self.id.local_id].declaration_source(db.upcast())
+ }
+}
+
+impl HasSource for Field {
+ type Ast = FieldSource;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ let var = VariantId::from(self.parent);
+ let src = var.child_source(db.upcast());
+ let field_source = src.map(|it| match it[self.id].clone() {
+ Either::Left(it) => FieldSource::Pos(it),
+ Either::Right(it) => FieldSource::Named(it),
+ });
+ Some(field_source)
+ }
+}
+impl HasSource for Adt {
+ type Ast = ast::Adt;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ match self {
+ Adt::Struct(s) => Some(s.source(db)?.map(ast::Adt::Struct)),
+ Adt::Union(u) => Some(u.source(db)?.map(ast::Adt::Union)),
+ Adt::Enum(e) => Some(e.source(db)?.map(ast::Adt::Enum)),
+ }
+ }
+}
+impl HasSource for Struct {
+ type Ast = ast::Struct;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Union {
+ type Ast = ast::Union;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Enum {
+ type Ast = ast::Enum;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Variant {
+ type Ast = ast::Variant;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<ast::Variant>> {
+ Some(self.parent.id.child_source(db.upcast()).map(|map| map[self.id].clone()))
+ }
+}
+impl HasSource for Function {
+ type Ast = ast::Fn;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Const {
+ type Ast = ast::Const;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Static {
+ type Ast = ast::Static;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Trait {
+ type Ast = ast::Trait;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for TypeAlias {
+ type Ast = ast::TypeAlias;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Macro {
+ type Ast = Either<ast::Macro, ast::Fn>;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ match self.id {
+ MacroId::Macro2Id(it) => Some(
+ it.lookup(db.upcast())
+ .source(db.upcast())
+ .map(ast::Macro::MacroDef)
+ .map(Either::Left),
+ ),
+ MacroId::MacroRulesId(it) => Some(
+ it.lookup(db.upcast())
+ .source(db.upcast())
+ .map(ast::Macro::MacroRules)
+ .map(Either::Left),
+ ),
+ MacroId::ProcMacroId(it) => {
+ Some(it.lookup(db.upcast()).source(db.upcast()).map(Either::Right))
+ }
+ }
+ }
+}
+impl HasSource for Impl {
+ type Ast = ast::Impl;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+
+impl HasSource for TypeOrConstParam {
+ type Ast = Either<ast::TypeOrConstParam, ast::Trait>;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ let child_source = self.id.parent.child_source(db.upcast());
+ Some(child_source.map(|it| it[self.id.local_id].clone()))
+ }
+}
+
+impl HasSource for LifetimeParam {
+ type Ast = ast::LifetimeParam;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ let child_source = self.id.parent.child_source(db.upcast());
+ Some(child_source.map(|it| it[self.id.local_id].clone()))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs
new file mode 100644
index 000000000..8f984210e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs
@@ -0,0 +1,3639 @@
+//! HIR (previously known as descriptors) provides a high-level object oriented
+//! access to Rust code.
+//!
+//! The principal difference between HIR and syntax trees is that HIR is bound
+//! to a particular crate instance. That is, it has cfg flags and features
+//! applied. So, the relation between syntax and HIR is many-to-one.
+//!
+//! HIR is the public API of the all of the compiler logic above syntax trees.
+//! It is written in "OO" style. Each type is self contained (as in, it knows it's
+//! parents and full context). It should be "clean code".
+//!
+//! `hir_*` crates are the implementation of the compiler logic.
+//! They are written in "ECS" style, with relatively little abstractions.
+//! Many types are not self-contained, and explicitly use local indexes, arenas, etc.
+//!
+//! `hir` is what insulates the "we don't know how to actually write an incremental compiler"
+//! from the ide with completions, hovers, etc. It is a (soft, internal) boundary:
+//! <https://www.tedinski.com/2018/02/06/system-boundaries.html>.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![recursion_limit = "512"]
+
+mod semantics;
+mod source_analyzer;
+
+mod from_id;
+mod attrs;
+mod has_source;
+
+pub mod diagnostics;
+pub mod db;
+pub mod symbols;
+
+mod display;
+
+use std::{iter, ops::ControlFlow, sync::Arc};
+
+use arrayvec::ArrayVec;
+use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId, ProcMacroKind};
+use either::Either;
+use hir_def::{
+ adt::{ReprKind, VariantData},
+ body::{BodyDiagnostic, SyntheticSyntax},
+ expr::{BindingAnnotation, LabelId, Pat, PatId},
+ generics::{TypeOrConstParamData, TypeParamProvenance},
+ item_tree::ItemTreeNode,
+ lang_item::LangItemTarget,
+ nameres::{self, diagnostics::DefDiagnostic},
+ per_ns::PerNs,
+ resolver::{HasResolver, Resolver},
+ src::HasSource as _,
+ AdtId, AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, DefWithBodyId, EnumId,
+ FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
+ LocalEnumVariantId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId,
+ TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
+};
+use hir_expand::{name::name, MacroCallKind};
+use hir_ty::{
+ all_super_traits, autoderef,
+ consteval::{unknown_const_as_generic, ComputedExpr, ConstEvalError, ConstExt},
+ diagnostics::BodyValidationDiagnostic,
+ method_resolution::{self, TyFingerprint},
+ primitive::UintTy,
+ subst_prefix,
+ traits::FnTrait,
+ AliasEq, AliasTy, BoundVar, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast,
+ ClosureId, DebruijnIndex, GenericArgData, InEnvironment, Interner, ParamKind,
+ QuantifiedWhereClause, Scalar, Solution, Substitution, TraitEnvironment, TraitRefExt, Ty,
+ TyBuilder, TyDefId, TyExt, TyKind, TyVariableKind, WhereClause,
+};
+use itertools::Itertools;
+use nameres::diagnostics::DefDiagnosticKind;
+use once_cell::unsync::Lazy;
+use rustc_hash::FxHashSet;
+use stdx::{format_to, impl_from, never};
+use syntax::{
+ ast::{self, HasAttrs as _, HasDocComments, HasName},
+ AstNode, AstPtr, SmolStr, SyntaxNodePtr, TextRange, T,
+};
+
+use crate::db::{DefDatabase, HirDatabase};
+
+pub use crate::{
+ attrs::{HasAttrs, Namespace},
+ diagnostics::{
+ AnyDiagnostic, BreakOutsideOfLoop, InactiveCode, IncorrectCase, InvalidDeriveTarget,
+ MacroError, MalformedDerive, MismatchedArgCount, MissingFields, MissingMatchArms,
+ MissingUnsafe, NoSuchField, ReplaceFilterMapNextWithFindMap, TypeMismatch,
+ UnimplementedBuiltinMacro, UnresolvedExternCrate, UnresolvedImport, UnresolvedMacroCall,
+ UnresolvedModule, UnresolvedProcMacro,
+ },
+ has_source::HasSource,
+ semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits},
+};
+
+// Be careful with these re-exports.
+//
+// `hir` is the boundary between the compiler and the IDE. It should try hard to
+// isolate the compiler from the ide, to allow the two to be refactored
+// independently. Re-exporting something from the compiler is the sure way to
+// breach the boundary.
+//
+// Generally, a refactoring which *removes* a name from this list is a good
+// idea!
+pub use {
+ cfg::{CfgAtom, CfgExpr, CfgOptions},
+ hir_def::{
+ adt::StructKind,
+ attr::{Attr, Attrs, AttrsWithOwner, Documentation},
+ builtin_attr::AttributeTemplate,
+ find_path::PrefixKind,
+ import_map,
+ nameres::ModuleSource,
+ path::{ModPath, PathKind},
+ type_ref::{Mutability, TypeRef},
+ visibility::Visibility,
+ },
+ hir_expand::{
+ name::{known, Name},
+ ExpandResult, HirFileId, InFile, MacroFile, Origin,
+ },
+ hir_ty::display::HirDisplay,
+};
+
+// These are negative re-exports: pub using these names is forbidden, they
+// should remain private to hir internals.
+#[allow(unused)]
+use {
+ hir_def::path::Path,
+ hir_expand::{hygiene::Hygiene, name::AsName},
+};
+
+/// hir::Crate describes a single crate. It's the main interface with which
+/// a crate's dependencies interact. Mostly, it should be just a proxy for the
+/// root module.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Crate {
+ pub(crate) id: CrateId,
+}
+
+#[derive(Debug)]
+pub struct CrateDependency {
+ pub krate: Crate,
+ pub name: Name,
+}
+
+impl Crate {
+ pub fn origin(self, db: &dyn HirDatabase) -> CrateOrigin {
+ db.crate_graph()[self.id].origin.clone()
+ }
+
+ pub fn is_builtin(self, db: &dyn HirDatabase) -> bool {
+ matches!(self.origin(db), CrateOrigin::Lang(_))
+ }
+
+ pub fn dependencies(self, db: &dyn HirDatabase) -> Vec<CrateDependency> {
+ db.crate_graph()[self.id]
+ .dependencies
+ .iter()
+ .map(|dep| {
+ let krate = Crate { id: dep.crate_id };
+ let name = dep.as_name();
+ CrateDependency { krate, name }
+ })
+ .collect()
+ }
+
+ pub fn reverse_dependencies(self, db: &dyn HirDatabase) -> Vec<Crate> {
+ let crate_graph = db.crate_graph();
+ crate_graph
+ .iter()
+ .filter(|&krate| {
+ crate_graph[krate].dependencies.iter().any(|it| it.crate_id == self.id)
+ })
+ .map(|id| Crate { id })
+ .collect()
+ }
+
+ pub fn transitive_reverse_dependencies(
+ self,
+ db: &dyn HirDatabase,
+ ) -> impl Iterator<Item = Crate> {
+ db.crate_graph().transitive_rev_deps(self.id).map(|id| Crate { id })
+ }
+
+ pub fn root_module(self, db: &dyn HirDatabase) -> Module {
+ let def_map = db.crate_def_map(self.id);
+ Module { id: def_map.module_id(def_map.root()) }
+ }
+
+ pub fn modules(self, db: &dyn HirDatabase) -> Vec<Module> {
+ let def_map = db.crate_def_map(self.id);
+ def_map.modules().map(|(id, _)| def_map.module_id(id).into()).collect()
+ }
+
+ pub fn root_file(self, db: &dyn HirDatabase) -> FileId {
+ db.crate_graph()[self.id].root_file_id
+ }
+
+ pub fn edition(self, db: &dyn HirDatabase) -> Edition {
+ db.crate_graph()[self.id].edition
+ }
+
+ pub fn version(self, db: &dyn HirDatabase) -> Option<String> {
+ db.crate_graph()[self.id].version.clone()
+ }
+
+ pub fn display_name(self, db: &dyn HirDatabase) -> Option<CrateDisplayName> {
+ db.crate_graph()[self.id].display_name.clone()
+ }
+
+ pub fn query_external_importables(
+ self,
+ db: &dyn DefDatabase,
+ query: import_map::Query,
+ ) -> impl Iterator<Item = Either<ModuleDef, Macro>> {
+ let _p = profile::span("query_external_importables");
+ import_map::search_dependencies(db, self.into(), query).into_iter().map(|item| {
+ match ItemInNs::from(item) {
+ ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id),
+ ItemInNs::Macros(mac_id) => Either::Right(mac_id),
+ }
+ })
+ }
+
+ pub fn all(db: &dyn HirDatabase) -> Vec<Crate> {
+ db.crate_graph().iter().map(|id| Crate { id }).collect()
+ }
+
+ /// Try to get the root URL of the documentation of a crate.
+ pub fn get_html_root_url(self: &Crate, db: &dyn HirDatabase) -> Option<String> {
+ // Look for #![doc(html_root_url = "...")]
+ let attrs = db.attrs(AttrDefId::ModuleId(self.root_module(db).into()));
+ let doc_url = attrs.by_key("doc").find_string_value_in_tt("html_root_url");
+ doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/")
+ }
+
+ pub fn cfg(&self, db: &dyn HirDatabase) -> CfgOptions {
+ db.crate_graph()[self.id].cfg_options.clone()
+ }
+
+ pub fn potential_cfg(&self, db: &dyn HirDatabase) -> CfgOptions {
+ db.crate_graph()[self.id].potential_cfg_options.clone()
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Module {
+ pub(crate) id: ModuleId,
+}
+
+/// The defs which can be visible in the module.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum ModuleDef {
+ Module(Module),
+ Function(Function),
+ Adt(Adt),
+ // Can't be directly declared, but can be imported.
+ Variant(Variant),
+ Const(Const),
+ Static(Static),
+ Trait(Trait),
+ TypeAlias(TypeAlias),
+ BuiltinType(BuiltinType),
+ Macro(Macro),
+}
+impl_from!(
+ Module,
+ Function,
+ Adt(Struct, Enum, Union),
+ Variant,
+ Const,
+ Static,
+ Trait,
+ TypeAlias,
+ BuiltinType,
+ Macro
+ for ModuleDef
+);
+
+impl From<VariantDef> for ModuleDef {
+ fn from(var: VariantDef) -> Self {
+ match var {
+ VariantDef::Struct(t) => Adt::from(t).into(),
+ VariantDef::Union(t) => Adt::from(t).into(),
+ VariantDef::Variant(t) => t.into(),
+ }
+ }
+}
+
+impl ModuleDef {
+ pub fn module(self, db: &dyn HirDatabase) -> Option<Module> {
+ match self {
+ ModuleDef::Module(it) => it.parent(db),
+ ModuleDef::Function(it) => Some(it.module(db)),
+ ModuleDef::Adt(it) => Some(it.module(db)),
+ ModuleDef::Variant(it) => Some(it.module(db)),
+ ModuleDef::Const(it) => Some(it.module(db)),
+ ModuleDef::Static(it) => Some(it.module(db)),
+ ModuleDef::Trait(it) => Some(it.module(db)),
+ ModuleDef::TypeAlias(it) => Some(it.module(db)),
+ ModuleDef::Macro(it) => Some(it.module(db)),
+ ModuleDef::BuiltinType(_) => None,
+ }
+ }
+
+ pub fn canonical_path(&self, db: &dyn HirDatabase) -> Option<String> {
+ let mut segments = vec![self.name(db)?];
+ for m in self.module(db)?.path_to_root(db) {
+ segments.extend(m.name(db))
+ }
+ segments.reverse();
+ Some(segments.into_iter().join("::"))
+ }
+
+ pub fn canonical_module_path(
+ &self,
+ db: &dyn HirDatabase,
+ ) -> Option<impl Iterator<Item = Module>> {
+ self.module(db).map(|it| it.path_to_root(db).into_iter().rev())
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
+ let name = match self {
+ ModuleDef::Module(it) => it.name(db)?,
+ ModuleDef::Const(it) => it.name(db)?,
+ ModuleDef::Adt(it) => it.name(db),
+ ModuleDef::Trait(it) => it.name(db),
+ ModuleDef::Function(it) => it.name(db),
+ ModuleDef::Variant(it) => it.name(db),
+ ModuleDef::TypeAlias(it) => it.name(db),
+ ModuleDef::Static(it) => it.name(db),
+ ModuleDef::Macro(it) => it.name(db),
+ ModuleDef::BuiltinType(it) => it.name(),
+ };
+ Some(name)
+ }
+
+ pub fn diagnostics(self, db: &dyn HirDatabase) -> Vec<AnyDiagnostic> {
+ let id = match self {
+ ModuleDef::Adt(it) => match it {
+ Adt::Struct(it) => it.id.into(),
+ Adt::Enum(it) => it.id.into(),
+ Adt::Union(it) => it.id.into(),
+ },
+ ModuleDef::Trait(it) => it.id.into(),
+ ModuleDef::Function(it) => it.id.into(),
+ ModuleDef::TypeAlias(it) => it.id.into(),
+ ModuleDef::Module(it) => it.id.into(),
+ ModuleDef::Const(it) => it.id.into(),
+ ModuleDef::Static(it) => it.id.into(),
+ _ => return Vec::new(),
+ };
+
+ let module = match self.module(db) {
+ Some(it) => it,
+ None => return Vec::new(),
+ };
+
+ let mut acc = Vec::new();
+
+ match self.as_def_with_body() {
+ Some(def) => {
+ def.diagnostics(db, &mut acc);
+ }
+ None => {
+ for diag in hir_ty::diagnostics::incorrect_case(db, module.id.krate(), id) {
+ acc.push(diag.into())
+ }
+ }
+ }
+
+ acc
+ }
+
+ pub fn as_def_with_body(self) -> Option<DefWithBody> {
+ match self {
+ ModuleDef::Function(it) => Some(it.into()),
+ ModuleDef::Const(it) => Some(it.into()),
+ ModuleDef::Static(it) => Some(it.into()),
+
+ ModuleDef::Module(_)
+ | ModuleDef::Adt(_)
+ | ModuleDef::Variant(_)
+ | ModuleDef::Trait(_)
+ | ModuleDef::TypeAlias(_)
+ | ModuleDef::Macro(_)
+ | ModuleDef::BuiltinType(_) => None,
+ }
+ }
+
+ pub fn attrs(&self, db: &dyn HirDatabase) -> Option<AttrsWithOwner> {
+ Some(match self {
+ ModuleDef::Module(it) => it.attrs(db),
+ ModuleDef::Function(it) => it.attrs(db),
+ ModuleDef::Adt(it) => it.attrs(db),
+ ModuleDef::Variant(it) => it.attrs(db),
+ ModuleDef::Const(it) => it.attrs(db),
+ ModuleDef::Static(it) => it.attrs(db),
+ ModuleDef::Trait(it) => it.attrs(db),
+ ModuleDef::TypeAlias(it) => it.attrs(db),
+ ModuleDef::Macro(it) => it.attrs(db),
+ ModuleDef::BuiltinType(_) => return None,
+ })
+ }
+}
+
+impl HasVisibility for ModuleDef {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ match *self {
+ ModuleDef::Module(it) => it.visibility(db),
+ ModuleDef::Function(it) => it.visibility(db),
+ ModuleDef::Adt(it) => it.visibility(db),
+ ModuleDef::Const(it) => it.visibility(db),
+ ModuleDef::Static(it) => it.visibility(db),
+ ModuleDef::Trait(it) => it.visibility(db),
+ ModuleDef::TypeAlias(it) => it.visibility(db),
+ ModuleDef::Variant(it) => it.visibility(db),
+ ModuleDef::Macro(it) => it.visibility(db),
+ ModuleDef::BuiltinType(_) => Visibility::Public,
+ }
+ }
+}
+
+impl Module {
+ /// Name of this module.
+ pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
+ let def_map = self.id.def_map(db.upcast());
+ let parent = def_map[self.id.local_id].parent?;
+ def_map[parent].children.iter().find_map(|(name, module_id)| {
+ if *module_id == self.id.local_id {
+ Some(name.clone())
+ } else {
+ None
+ }
+ })
+ }
+
+ /// Returns the crate this module is part of.
+ pub fn krate(self) -> Crate {
+ Crate { id: self.id.krate() }
+ }
+
+ /// Topmost parent of this module. Every module has a `crate_root`, but some
+ /// might be missing `krate`. This can happen if a module's file is not included
+ /// in the module tree of any target in `Cargo.toml`.
+ pub fn crate_root(self, db: &dyn HirDatabase) -> Module {
+ let def_map = db.crate_def_map(self.id.krate());
+ Module { id: def_map.module_id(def_map.root()) }
+ }
+
+ pub fn is_crate_root(self, db: &dyn HirDatabase) -> bool {
+ let def_map = db.crate_def_map(self.id.krate());
+ def_map.root() == self.id.local_id
+ }
+
+ /// Iterates over all child modules.
+ pub fn children(self, db: &dyn HirDatabase) -> impl Iterator<Item = Module> {
+ let def_map = self.id.def_map(db.upcast());
+ let children = def_map[self.id.local_id]
+ .children
+ .iter()
+ .map(|(_, module_id)| Module { id: def_map.module_id(*module_id) })
+ .collect::<Vec<_>>();
+ children.into_iter()
+ }
+
+ /// Finds a parent module.
+ pub fn parent(self, db: &dyn HirDatabase) -> Option<Module> {
+ // FIXME: handle block expressions as modules (their parent is in a different DefMap)
+ let def_map = self.id.def_map(db.upcast());
+ let parent_id = def_map[self.id.local_id].parent?;
+ Some(Module { id: def_map.module_id(parent_id) })
+ }
+
+ pub fn path_to_root(self, db: &dyn HirDatabase) -> Vec<Module> {
+ let mut res = vec![self];
+ let mut curr = self;
+ while let Some(next) = curr.parent(db) {
+ res.push(next);
+ curr = next
+ }
+ res
+ }
+
+ /// Returns a `ModuleScope`: a set of items, visible in this module.
+ pub fn scope(
+ self,
+ db: &dyn HirDatabase,
+ visible_from: Option<Module>,
+ ) -> Vec<(Name, ScopeDef)> {
+ self.id.def_map(db.upcast())[self.id.local_id]
+ .scope
+ .entries()
+ .filter_map(|(name, def)| {
+ if let Some(m) = visible_from {
+ let filtered =
+ def.filter_visibility(|vis| vis.is_visible_from(db.upcast(), m.id));
+ if filtered.is_none() && !def.is_none() {
+ None
+ } else {
+ Some((name, filtered))
+ }
+ } else {
+ Some((name, def))
+ }
+ })
+ .flat_map(|(name, def)| {
+ ScopeDef::all_items(def).into_iter().map(move |item| (name.clone(), item))
+ })
+ .collect()
+ }
+
+ pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
+ let _p = profile::span("Module::diagnostics").detail(|| {
+ format!("{:?}", self.name(db).map_or("<unknown>".into(), |name| name.to_string()))
+ });
+ let def_map = self.id.def_map(db.upcast());
+ for diag in def_map.diagnostics() {
+ if diag.in_module != self.id.local_id {
+ // FIXME: This is accidentally quadratic.
+ continue;
+ }
+ emit_def_diagnostic(db, acc, diag);
+ }
+ for decl in self.declarations(db) {
+ match decl {
+ ModuleDef::Module(m) => {
+ // Only add diagnostics from inline modules
+ if def_map[m.id.local_id].origin.is_inline() {
+ m.diagnostics(db, acc)
+ }
+ }
+ _ => acc.extend(decl.diagnostics(db)),
+ }
+ }
+
+ for impl_def in self.impl_defs(db) {
+ for item in impl_def.items(db) {
+ let def: DefWithBody = match item {
+ AssocItem::Function(it) => it.into(),
+ AssocItem::Const(it) => it.into(),
+ AssocItem::TypeAlias(_) => continue,
+ };
+
+ def.diagnostics(db, acc);
+ }
+ }
+ }
+
+ pub fn declarations(self, db: &dyn HirDatabase) -> Vec<ModuleDef> {
+ let def_map = self.id.def_map(db.upcast());
+ let scope = &def_map[self.id.local_id].scope;
+ scope
+ .declarations()
+ .map(ModuleDef::from)
+ .chain(scope.unnamed_consts().map(|id| ModuleDef::Const(Const::from(id))))
+ .collect()
+ }
+
+ pub fn legacy_macros(self, db: &dyn HirDatabase) -> Vec<Macro> {
+ let def_map = self.id.def_map(db.upcast());
+ let scope = &def_map[self.id.local_id].scope;
+ scope.legacy_macros().flat_map(|(_, it)| it).map(|&it| MacroId::from(it).into()).collect()
+ }
+
+ pub fn impl_defs(self, db: &dyn HirDatabase) -> Vec<Impl> {
+ let def_map = self.id.def_map(db.upcast());
+ def_map[self.id.local_id].scope.impls().map(Impl::from).collect()
+ }
+
+ /// Finds a path that can be used to refer to the given item from within
+ /// this module, if possible.
+ pub fn find_use_path(self, db: &dyn DefDatabase, item: impl Into<ItemInNs>) -> Option<ModPath> {
+ hir_def::find_path::find_path(db, item.into().into(), self.into())
+ }
+
+ /// Finds a path that can be used to refer to the given item from within
+ /// this module, if possible. This is used for returning import paths for use-statements.
+ pub fn find_use_path_prefixed(
+ self,
+ db: &dyn DefDatabase,
+ item: impl Into<ItemInNs>,
+ prefix_kind: PrefixKind,
+ ) -> Option<ModPath> {
+ hir_def::find_path::find_path_prefixed(db, item.into().into(), self.into(), prefix_kind)
+ }
+}
+
+fn emit_def_diagnostic(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, diag: &DefDiagnostic) {
+ match &diag.kind {
+ DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates } => {
+ let decl = declaration.to_node(db.upcast());
+ acc.push(
+ UnresolvedModule {
+ decl: InFile::new(declaration.file_id, AstPtr::new(&decl)),
+ candidates: candidates.clone(),
+ }
+ .into(),
+ )
+ }
+ DefDiagnosticKind::UnresolvedExternCrate { ast } => {
+ let item = ast.to_node(db.upcast());
+ acc.push(
+ UnresolvedExternCrate { decl: InFile::new(ast.file_id, AstPtr::new(&item)) }.into(),
+ );
+ }
+
+ DefDiagnosticKind::UnresolvedImport { id, index } => {
+ let file_id = id.file_id();
+ let item_tree = id.item_tree(db.upcast());
+ let import = &item_tree[id.value];
+
+ let use_tree = import.use_tree_to_ast(db.upcast(), file_id, *index);
+ acc.push(
+ UnresolvedImport { decl: InFile::new(file_id, AstPtr::new(&use_tree)) }.into(),
+ );
+ }
+
+ DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } => {
+ let item = ast.to_node(db.upcast());
+ acc.push(
+ InactiveCode {
+ node: ast.with_value(AstPtr::new(&item).into()),
+ cfg: cfg.clone(),
+ opts: opts.clone(),
+ }
+ .into(),
+ );
+ }
+
+ DefDiagnosticKind::UnresolvedProcMacro { ast, krate } => {
+ let (node, precise_location, macro_name, kind) = precise_macro_call_location(ast, db);
+ acc.push(
+ UnresolvedProcMacro { node, precise_location, macro_name, kind, krate: *krate }
+ .into(),
+ );
+ }
+
+ DefDiagnosticKind::UnresolvedMacroCall { ast, path } => {
+ let (node, precise_location, _, _) = precise_macro_call_location(ast, db);
+ acc.push(
+ UnresolvedMacroCall {
+ macro_call: node,
+ precise_location,
+ path: path.clone(),
+ is_bang: matches!(ast, MacroCallKind::FnLike { .. }),
+ }
+ .into(),
+ );
+ }
+
+ DefDiagnosticKind::MacroError { ast, message } => {
+ let (node, precise_location, _, _) = precise_macro_call_location(ast, db);
+ acc.push(MacroError { node, precise_location, message: message.clone() }.into());
+ }
+
+ DefDiagnosticKind::UnimplementedBuiltinMacro { ast } => {
+ let node = ast.to_node(db.upcast());
+ // Must have a name, otherwise we wouldn't emit it.
+ let name = node.name().expect("unimplemented builtin macro with no name");
+ acc.push(
+ UnimplementedBuiltinMacro {
+ node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&name))),
+ }
+ .into(),
+ );
+ }
+ DefDiagnosticKind::InvalidDeriveTarget { ast, id } => {
+ let node = ast.to_node(db.upcast());
+ let derive = node.attrs().nth(*id as usize);
+ match derive {
+ Some(derive) => {
+ acc.push(
+ InvalidDeriveTarget {
+ node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
+ }
+ .into(),
+ );
+ }
+ None => stdx::never!("derive diagnostic on item without derive attribute"),
+ }
+ }
+ DefDiagnosticKind::MalformedDerive { ast, id } => {
+ let node = ast.to_node(db.upcast());
+ let derive = node.attrs().nth(*id as usize);
+ match derive {
+ Some(derive) => {
+ acc.push(
+ MalformedDerive {
+ node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
+ }
+ .into(),
+ );
+ }
+ None => stdx::never!("derive diagnostic on item without derive attribute"),
+ }
+ }
+ }
+}
+
+fn precise_macro_call_location(
+ ast: &MacroCallKind,
+ db: &dyn HirDatabase,
+) -> (InFile<SyntaxNodePtr>, Option<TextRange>, Option<String>, MacroKind) {
+ // FIXME: maaybe we actually want slightly different ranges for the different macro diagnostics
+ // - e.g. the full attribute for macro errors, but only the name for name resolution
+ match ast {
+ MacroCallKind::FnLike { ast_id, .. } => {
+ let node = ast_id.to_node(db.upcast());
+ (
+ ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
+ node.path()
+ .and_then(|it| it.segment())
+ .and_then(|it| it.name_ref())
+ .map(|it| it.syntax().text_range()),
+ node.path().and_then(|it| it.segment()).map(|it| it.to_string()),
+ MacroKind::ProcMacro,
+ )
+ }
+ MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => {
+ let node = ast_id.to_node(db.upcast());
+ // Compute the precise location of the macro name's token in the derive
+ // list.
+ let token = (|| {
+ let derive_attr = node
+ .doc_comments_and_attrs()
+ .nth(*derive_attr_index as usize)
+ .and_then(Either::left)?;
+ let token_tree = derive_attr.meta()?.token_tree()?;
+ let group_by = token_tree
+ .syntax()
+ .children_with_tokens()
+ .filter_map(|elem| match elem {
+ syntax::NodeOrToken::Token(tok) => Some(tok),
+ _ => None,
+ })
+ .group_by(|t| t.kind() == T![,]);
+ let (_, mut group) = group_by
+ .into_iter()
+ .filter(|&(comma, _)| !comma)
+ .nth(*derive_index as usize)?;
+ group.find(|t| t.kind() == T![ident])
+ })();
+ (
+ ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
+ token.as_ref().map(|tok| tok.text_range()),
+ token.as_ref().map(ToString::to_string),
+ MacroKind::Derive,
+ )
+ }
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+ let node = ast_id.to_node(db.upcast());
+ let attr = node
+ .doc_comments_and_attrs()
+ .nth((*invoc_attr_index) as usize)
+ .and_then(Either::left)
+ .unwrap_or_else(|| panic!("cannot find attribute #{}", invoc_attr_index));
+
+ (
+ ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
+ Some(attr.syntax().text_range()),
+ attr.path()
+ .and_then(|path| path.segment())
+ .and_then(|seg| seg.name_ref())
+ .as_ref()
+ .map(ToString::to_string),
+ MacroKind::Attr,
+ )
+ }
+ }
+}
+
+impl HasVisibility for Module {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ let def_map = self.id.def_map(db.upcast());
+ let module_data = &def_map[self.id.local_id];
+ module_data.visibility
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Field {
+ pub(crate) parent: VariantDef,
+ pub(crate) id: LocalFieldId,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub enum FieldSource {
+ Named(ast::RecordField),
+ Pos(ast::TupleField),
+}
+
+impl Field {
+ pub fn name(&self, db: &dyn HirDatabase) -> Name {
+ self.parent.variant_data(db).fields()[self.id].name.clone()
+ }
+
+ /// Returns the type as in the signature of the struct (i.e., with
+ /// placeholder types for type parameters). Only use this in the context of
+ /// the field definition.
+ pub fn ty(&self, db: &dyn HirDatabase) -> Type {
+ let var_id = self.parent.into();
+ let generic_def_id: GenericDefId = match self.parent {
+ VariantDef::Struct(it) => it.id.into(),
+ VariantDef::Union(it) => it.id.into(),
+ VariantDef::Variant(it) => it.parent.id.into(),
+ };
+ let substs = TyBuilder::placeholder_subst(db, generic_def_id);
+ let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs);
+ Type::new(db, var_id, ty)
+ }
+
+ pub fn parent_def(&self, _db: &dyn HirDatabase) -> VariantDef {
+ self.parent
+ }
+}
+
+impl HasVisibility for Field {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ let variant_data = self.parent.variant_data(db);
+ let visibility = &variant_data.fields()[self.id].visibility;
+ let parent_id: hir_def::VariantId = self.parent.into();
+ visibility.resolve(db.upcast(), &parent_id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Struct {
+ pub(crate) id: StructId,
+}
+
+impl Struct {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).container }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.struct_data(self.id).name.clone()
+ }
+
+ pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
+ db.struct_data(self.id)
+ .variant_data
+ .fields()
+ .iter()
+ .map(|(id, _)| Field { parent: self.into(), id })
+ .collect()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::from_def(db, self.id)
+ }
+
+ pub fn repr(self, db: &dyn HirDatabase) -> Option<ReprKind> {
+ db.struct_data(self.id).repr.clone()
+ }
+
+ pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
+ self.variant_data(db).kind()
+ }
+
+ fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
+ db.struct_data(self.id).variant_data.clone()
+ }
+}
+
+impl HasVisibility for Struct {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.struct_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Union {
+ pub(crate) id: UnionId,
+}
+
+impl Union {
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.union_data(self.id).name.clone()
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).container }
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::from_def(db, self.id)
+ }
+
+ pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
+ db.union_data(self.id)
+ .variant_data
+ .fields()
+ .iter()
+ .map(|(id, _)| Field { parent: self.into(), id })
+ .collect()
+ }
+
+ fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
+ db.union_data(self.id).variant_data.clone()
+ }
+}
+
+impl HasVisibility for Union {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.union_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Enum {
+ pub(crate) id: EnumId,
+}
+
+impl Enum {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).container }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.enum_data(self.id).name.clone()
+ }
+
+ pub fn variants(self, db: &dyn HirDatabase) -> Vec<Variant> {
+ db.enum_data(self.id).variants.iter().map(|(id, _)| Variant { parent: self, id }).collect()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::from_def(db, self.id)
+ }
+}
+
+impl HasVisibility for Enum {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.enum_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Variant {
+ pub(crate) parent: Enum,
+ pub(crate) id: LocalEnumVariantId,
+}
+
+impl Variant {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.parent.module(db)
+ }
+
+ pub fn parent_enum(self, _db: &dyn HirDatabase) -> Enum {
+ self.parent
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.enum_data(self.parent.id).variants[self.id].name.clone()
+ }
+
+ pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
+ self.variant_data(db)
+ .fields()
+ .iter()
+ .map(|(id, _)| Field { parent: self.into(), id })
+ .collect()
+ }
+
+ pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
+ self.variant_data(db).kind()
+ }
+
+ pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
+ db.enum_data(self.parent.id).variants[self.id].variant_data.clone()
+ }
+}
+
+/// Variants inherit visibility from the parent enum.
+impl HasVisibility for Variant {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ self.parent_enum(db).visibility(db)
+ }
+}
+
+/// A Data Type
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum Adt {
+ Struct(Struct),
+ Union(Union),
+ Enum(Enum),
+}
+impl_from!(Struct, Union, Enum for Adt);
+
+impl Adt {
+ pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
+ let subst = db.generic_defaults(self.into());
+ subst.iter().any(|ty| match ty.skip_binders().data(Interner) {
+ GenericArgData::Ty(x) => x.is_unknown(),
+ _ => false,
+ })
+ }
+
+ /// Turns this ADT into a type. Any type parameters of the ADT will be
+ /// turned into unknown types, which is good for e.g. finding the most
+ /// general set of completions, but will not look very nice when printed.
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ let id = AdtId::from(self);
+ Type::from_def(db, id)
+ }
+
+ /// Turns this ADT into a type with the given type parameters. This isn't
+ /// the greatest API, FIXME find a better one.
+ pub fn ty_with_args(self, db: &dyn HirDatabase, args: &[Type]) -> Type {
+ let id = AdtId::from(self);
+ let mut it = args.iter().map(|t| t.ty.clone());
+ let ty = TyBuilder::def_ty(db, id.into())
+ .fill(|x| {
+ let r = it.next().unwrap_or_else(|| TyKind::Error.intern(Interner));
+ match x {
+ ParamKind::Type => GenericArgData::Ty(r).intern(Interner),
+ ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
+ }
+ })
+ .build();
+ Type::new(db, id, ty)
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ match self {
+ Adt::Struct(s) => s.module(db),
+ Adt::Union(s) => s.module(db),
+ Adt::Enum(e) => e.module(db),
+ }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ match self {
+ Adt::Struct(s) => s.name(db),
+ Adt::Union(u) => u.name(db),
+ Adt::Enum(e) => e.name(db),
+ }
+ }
+
+ pub fn as_enum(&self) -> Option<Enum> {
+ if let Self::Enum(v) = self {
+ Some(*v)
+ } else {
+ None
+ }
+ }
+}
+
+impl HasVisibility for Adt {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ match self {
+ Adt::Struct(it) => it.visibility(db),
+ Adt::Union(it) => it.visibility(db),
+ Adt::Enum(it) => it.visibility(db),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum VariantDef {
+ Struct(Struct),
+ Union(Union),
+ Variant(Variant),
+}
+impl_from!(Struct, Union, Variant for VariantDef);
+
+impl VariantDef {
+ pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
+ match self {
+ VariantDef::Struct(it) => it.fields(db),
+ VariantDef::Union(it) => it.fields(db),
+ VariantDef::Variant(it) => it.fields(db),
+ }
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ match self {
+ VariantDef::Struct(it) => it.module(db),
+ VariantDef::Union(it) => it.module(db),
+ VariantDef::Variant(it) => it.module(db),
+ }
+ }
+
+ pub fn name(&self, db: &dyn HirDatabase) -> Name {
+ match self {
+ VariantDef::Struct(s) => s.name(db),
+ VariantDef::Union(u) => u.name(db),
+ VariantDef::Variant(e) => e.name(db),
+ }
+ }
+
+ pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
+ match self {
+ VariantDef::Struct(it) => it.variant_data(db),
+ VariantDef::Union(it) => it.variant_data(db),
+ VariantDef::Variant(it) => it.variant_data(db),
+ }
+ }
+}
+
+/// The defs which have a body.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum DefWithBody {
+ Function(Function),
+ Static(Static),
+ Const(Const),
+}
+impl_from!(Function, Const, Static for DefWithBody);
+
+impl DefWithBody {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ match self {
+ DefWithBody::Const(c) => c.module(db),
+ DefWithBody::Function(f) => f.module(db),
+ DefWithBody::Static(s) => s.module(db),
+ }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
+ match self {
+ DefWithBody::Function(f) => Some(f.name(db)),
+ DefWithBody::Static(s) => Some(s.name(db)),
+ DefWithBody::Const(c) => c.name(db),
+ }
+ }
+
+ /// Returns the type this def's body has to evaluate to.
+ pub fn body_type(self, db: &dyn HirDatabase) -> Type {
+ match self {
+ DefWithBody::Function(it) => it.ret_type(db),
+ DefWithBody::Static(it) => it.ty(db),
+ DefWithBody::Const(it) => it.ty(db),
+ }
+ }
+
+ pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
+ let krate = self.module(db).id.krate();
+
+ let (body, source_map) = db.body_with_source_map(self.into());
+
+ for (_, def_map) in body.blocks(db.upcast()) {
+ for diag in def_map.diagnostics() {
+ emit_def_diagnostic(db, acc, diag);
+ }
+ }
+
+ for diag in source_map.diagnostics() {
+ match diag {
+ BodyDiagnostic::InactiveCode { node, cfg, opts } => acc.push(
+ InactiveCode { node: node.clone(), cfg: cfg.clone(), opts: opts.clone() }
+ .into(),
+ ),
+ BodyDiagnostic::MacroError { node, message } => acc.push(
+ MacroError {
+ node: node.clone().map(|it| it.into()),
+ precise_location: None,
+ message: message.to_string(),
+ }
+ .into(),
+ ),
+ BodyDiagnostic::UnresolvedProcMacro { node, krate } => acc.push(
+ UnresolvedProcMacro {
+ node: node.clone().map(|it| it.into()),
+ precise_location: None,
+ macro_name: None,
+ kind: MacroKind::ProcMacro,
+ krate: *krate,
+ }
+ .into(),
+ ),
+ BodyDiagnostic::UnresolvedMacroCall { node, path } => acc.push(
+ UnresolvedMacroCall {
+ macro_call: node.clone().map(|ast_ptr| ast_ptr.into()),
+ precise_location: None,
+ path: path.clone(),
+ is_bang: true,
+ }
+ .into(),
+ ),
+ }
+ }
+
+ let infer = db.infer(self.into());
+ let source_map = Lazy::new(|| db.body_with_source_map(self.into()).1);
+ for d in &infer.diagnostics {
+ match d {
+ hir_ty::InferenceDiagnostic::NoSuchField { expr } => {
+ let field = source_map.field_syntax(*expr);
+ acc.push(NoSuchField { field }.into())
+ }
+ hir_ty::InferenceDiagnostic::BreakOutsideOfLoop { expr } => {
+ let expr = source_map
+ .expr_syntax(*expr)
+ .expect("break outside of loop in synthetic syntax");
+ acc.push(BreakOutsideOfLoop { expr }.into())
+ }
+ hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
+ match source_map.expr_syntax(*call_expr) {
+ Ok(source_ptr) => acc.push(
+ MismatchedArgCount {
+ call_expr: source_ptr,
+ expected: *expected,
+ found: *found,
+ }
+ .into(),
+ ),
+ Err(SyntheticSyntax) => (),
+ }
+ }
+ }
+ }
+ for (expr, mismatch) in infer.expr_type_mismatches() {
+ let expr = match source_map.expr_syntax(expr) {
+ Ok(expr) => expr,
+ Err(SyntheticSyntax) => continue,
+ };
+ acc.push(
+ TypeMismatch {
+ expr,
+ expected: Type::new(db, DefWithBodyId::from(self), mismatch.expected.clone()),
+ actual: Type::new(db, DefWithBodyId::from(self), mismatch.actual.clone()),
+ }
+ .into(),
+ );
+ }
+
+ for expr in hir_ty::diagnostics::missing_unsafe(db, self.into()) {
+ match source_map.expr_syntax(expr) {
+ Ok(expr) => acc.push(MissingUnsafe { expr }.into()),
+ Err(SyntheticSyntax) => {
+ // FIXME: Here and eslwhere in this file, the `expr` was
+ // desugared, report or assert that this doesn't happen.
+ }
+ }
+ }
+
+ for diagnostic in BodyValidationDiagnostic::collect(db, self.into()) {
+ match diagnostic {
+ BodyValidationDiagnostic::RecordMissingFields {
+ record,
+ variant,
+ missed_fields,
+ } => {
+ let variant_data = variant.variant_data(db.upcast());
+ let missed_fields = missed_fields
+ .into_iter()
+ .map(|idx| variant_data.fields()[idx].name.clone())
+ .collect();
+
+ match record {
+ Either::Left(record_expr) => match source_map.expr_syntax(record_expr) {
+ Ok(source_ptr) => {
+ let root = source_ptr.file_syntax(db.upcast());
+ if let ast::Expr::RecordExpr(record_expr) =
+ &source_ptr.value.to_node(&root)
+ {
+ if record_expr.record_expr_field_list().is_some() {
+ acc.push(
+ MissingFields {
+ file: source_ptr.file_id,
+ field_list_parent: Either::Left(AstPtr::new(
+ record_expr,
+ )),
+ field_list_parent_path: record_expr
+ .path()
+ .map(|path| AstPtr::new(&path)),
+ missed_fields,
+ }
+ .into(),
+ )
+ }
+ }
+ }
+ Err(SyntheticSyntax) => (),
+ },
+ Either::Right(record_pat) => match source_map.pat_syntax(record_pat) {
+ Ok(source_ptr) => {
+ if let Some(expr) = source_ptr.value.as_ref().left() {
+ let root = source_ptr.file_syntax(db.upcast());
+ if let ast::Pat::RecordPat(record_pat) = expr.to_node(&root) {
+ if record_pat.record_pat_field_list().is_some() {
+ acc.push(
+ MissingFields {
+ file: source_ptr.file_id,
+ field_list_parent: Either::Right(AstPtr::new(
+ &record_pat,
+ )),
+ field_list_parent_path: record_pat
+ .path()
+ .map(|path| AstPtr::new(&path)),
+ missed_fields,
+ }
+ .into(),
+ )
+ }
+ }
+ }
+ }
+ Err(SyntheticSyntax) => (),
+ },
+ }
+ }
+ BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap { method_call_expr } => {
+ if let Ok(next_source_ptr) = source_map.expr_syntax(method_call_expr) {
+ acc.push(
+ ReplaceFilterMapNextWithFindMap {
+ file: next_source_ptr.file_id,
+ next_expr: next_source_ptr.value,
+ }
+ .into(),
+ );
+ }
+ }
+ BodyValidationDiagnostic::MissingMatchArms { match_expr, uncovered_patterns } => {
+ match source_map.expr_syntax(match_expr) {
+ Ok(source_ptr) => {
+ let root = source_ptr.file_syntax(db.upcast());
+ if let ast::Expr::MatchExpr(match_expr) =
+ &source_ptr.value.to_node(&root)
+ {
+ if let Some(match_expr) = match_expr.expr() {
+ acc.push(
+ MissingMatchArms {
+ file: source_ptr.file_id,
+ match_expr: AstPtr::new(&match_expr),
+ uncovered_patterns,
+ }
+ .into(),
+ );
+ }
+ }
+ }
+ Err(SyntheticSyntax) => (),
+ }
+ }
+ }
+ }
+
+ let def: ModuleDef = match self {
+ DefWithBody::Function(it) => it.into(),
+ DefWithBody::Static(it) => it.into(),
+ DefWithBody::Const(it) => it.into(),
+ };
+ for diag in hir_ty::diagnostics::incorrect_case(db, krate, def.into()) {
+ acc.push(diag.into())
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Function {
+ pub(crate) id: FunctionId,
+}
+
+impl Function {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.lookup(db.upcast()).module(db.upcast()).into()
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.function_data(self.id).name.clone()
+ }
+
+ /// Get this function's return type
+ pub fn ret_type(self, db: &dyn HirDatabase) -> Type {
+ let resolver = self.id.resolver(db.upcast());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
+ let ty = callable_sig.ret().clone();
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+
+ pub fn async_ret_type(self, db: &dyn HirDatabase) -> Option<Type> {
+ if !self.is_async(db) {
+ return None;
+ }
+ let resolver = self.id.resolver(db.upcast());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
+ let ret_ty = callable_sig.ret().clone();
+ for pred in ret_ty.impl_trait_bounds(db).into_iter().flatten() {
+ if let WhereClause::AliasEq(output_eq) = pred.into_value_and_skipped_binders().0 {
+ return Type::new_with_resolver_inner(db, &resolver, output_eq.ty).into();
+ }
+ }
+ never!("Async fn ret_type should be impl Future");
+ None
+ }
+
+ pub fn has_self_param(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).has_self_param()
+ }
+
+ pub fn self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> {
+ self.has_self_param(db).then(|| SelfParam { func: self.id })
+ }
+
+ pub fn assoc_fn_params(self, db: &dyn HirDatabase) -> Vec<Param> {
+ let environment = db.trait_environment(self.id.into());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
+ callable_sig
+ .params()
+ .iter()
+ .enumerate()
+ .map(|(idx, ty)| {
+ let ty = Type { env: environment.clone(), ty: ty.clone() };
+ Param { func: self, ty, idx }
+ })
+ .collect()
+ }
+
+ pub fn method_params(self, db: &dyn HirDatabase) -> Option<Vec<Param>> {
+ if self.self_param(db).is_none() {
+ return None;
+ }
+ Some(self.params_without_self(db))
+ }
+
+ pub fn params_without_self(self, db: &dyn HirDatabase) -> Vec<Param> {
+ let environment = db.trait_environment(self.id.into());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
+ let skip = if db.function_data(self.id).has_self_param() { 1 } else { 0 };
+ callable_sig
+ .params()
+ .iter()
+ .enumerate()
+ .skip(skip)
+ .map(|(idx, ty)| {
+ let ty = Type { env: environment.clone(), ty: ty.clone() };
+ Param { func: self, ty, idx }
+ })
+ .collect()
+ }
+
+ pub fn is_const(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).has_const_kw()
+ }
+
+ pub fn is_async(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).has_async_kw()
+ }
+
+ pub fn is_unsafe_to_call(self, db: &dyn HirDatabase) -> bool {
+ hir_ty::is_fn_unsafe_to_call(db, self.id)
+ }
+
+ /// Whether this function declaration has a definition.
+ ///
+ /// This is false in the case of required (not provided) trait methods.
+ pub fn has_body(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).has_body()
+ }
+
+ pub fn as_proc_macro(self, db: &dyn HirDatabase) -> Option<Macro> {
+ let function_data = db.function_data(self.id);
+ let attrs = &function_data.attrs;
+ // FIXME: Store this in FunctionData flags?
+ if !(attrs.is_proc_macro()
+ || attrs.is_proc_macro_attribute()
+ || attrs.is_proc_macro_derive())
+ {
+ return None;
+ }
+ let loc = self.id.lookup(db.upcast());
+ let def_map = db.crate_def_map(loc.krate(db).into());
+ def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() })
+ }
+
+ /// A textual representation of the HIR of this function for debugging purposes.
+ pub fn debug_hir(self, db: &dyn HirDatabase) -> String {
+ let body = db.body(self.id.into());
+
+ let mut result = String::new();
+ format_to!(result, "HIR expressions in the body of `{}`:\n", self.name(db));
+ for (id, expr) in body.exprs.iter() {
+ format_to!(result, "{:?}: {:?}\n", id, expr);
+ }
+
+ result
+ }
+}
+
+// Note: logically, this belongs to `hir_ty`, but we are not using it there yet.
+#[derive(Clone, Copy, PartialEq, Eq)]
+pub enum Access {
+ Shared,
+ Exclusive,
+ Owned,
+}
+
+impl From<hir_ty::Mutability> for Access {
+ fn from(mutability: hir_ty::Mutability) -> Access {
+ match mutability {
+ hir_ty::Mutability::Not => Access::Shared,
+ hir_ty::Mutability::Mut => Access::Exclusive,
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct Param {
+ func: Function,
+ /// The index in parameter list, including self parameter.
+ idx: usize,
+ ty: Type,
+}
+
+impl Param {
+ pub fn ty(&self) -> &Type {
+ &self.ty
+ }
+
+ pub fn name(&self, db: &dyn HirDatabase) -> Option<Name> {
+ db.function_data(self.func.id).params[self.idx].0.clone()
+ }
+
+ pub fn as_local(&self, db: &dyn HirDatabase) -> Option<Local> {
+ let parent = DefWithBodyId::FunctionId(self.func.into());
+ let body = db.body(parent);
+ let pat_id = body.params[self.idx];
+ if let Pat::Bind { .. } = &body[pat_id] {
+ Some(Local { parent, pat_id: body.params[self.idx] })
+ } else {
+ None
+ }
+ }
+
+ pub fn pattern_source(&self, db: &dyn HirDatabase) -> Option<ast::Pat> {
+ self.source(db).and_then(|p| p.value.pat())
+ }
+
+ pub fn source(&self, db: &dyn HirDatabase) -> Option<InFile<ast::Param>> {
+ let InFile { file_id, value } = self.func.source(db)?;
+ let params = value.param_list()?;
+ if params.self_param().is_some() {
+ params.params().nth(self.idx.checked_sub(1)?)
+ } else {
+ params.params().nth(self.idx)
+ }
+ .map(|value| InFile { file_id, value })
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct SelfParam {
+ func: FunctionId,
+}
+
+impl SelfParam {
+ pub fn access(self, db: &dyn HirDatabase) -> Access {
+ let func_data = db.function_data(self.func);
+ func_data
+ .params
+ .first()
+ .map(|(_, param)| match &**param {
+ TypeRef::Reference(.., mutability) => match mutability {
+ hir_def::type_ref::Mutability::Shared => Access::Shared,
+ hir_def::type_ref::Mutability::Mut => Access::Exclusive,
+ },
+ _ => Access::Owned,
+ })
+ .unwrap_or(Access::Owned)
+ }
+
+ pub fn display(self, db: &dyn HirDatabase) -> &'static str {
+ match self.access(db) {
+ Access::Shared => "&self",
+ Access::Exclusive => "&mut self",
+ Access::Owned => "self",
+ }
+ }
+
+ pub fn source(&self, db: &dyn HirDatabase) -> Option<InFile<ast::SelfParam>> {
+ let InFile { file_id, value } = Function::from(self.func).source(db)?;
+ value
+ .param_list()
+ .and_then(|params| params.self_param())
+ .map(|value| InFile { file_id, value })
+ }
+
+ pub fn ty(&self, db: &dyn HirDatabase) -> Type {
+ let substs = TyBuilder::placeholder_subst(db, self.func);
+ let callable_sig =
+ db.callable_item_signature(self.func.into()).substitute(Interner, &substs);
+ let environment = db.trait_environment(self.func.into());
+ let ty = callable_sig.params()[0].clone();
+ Type { env: environment, ty }
+ }
+}
+
+impl HasVisibility for Function {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.function_visibility(self.id)
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Const {
+ pub(crate) id: ConstId,
+}
+
+impl Const {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).module(db.upcast()) }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
+ db.const_data(self.id).name.clone()
+ }
+
+ pub fn value(self, db: &dyn HirDatabase) -> Option<ast::Expr> {
+ self.source(db)?.value.body()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ let data = db.const_data(self.id);
+ let resolver = self.id.resolver(db.upcast());
+ let ctx = hir_ty::TyLoweringContext::new(db, &resolver);
+ let ty = ctx.lower_ty(&data.type_ref);
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+
+ pub fn eval(self, db: &dyn HirDatabase) -> Result<ComputedExpr, ConstEvalError> {
+ db.const_eval(self.id)
+ }
+}
+
+impl HasVisibility for Const {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.const_visibility(self.id)
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Static {
+ pub(crate) id: StaticId,
+}
+
+impl Static {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).module(db.upcast()) }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.static_data(self.id).name.clone()
+ }
+
+ pub fn is_mut(self, db: &dyn HirDatabase) -> bool {
+ db.static_data(self.id).mutable
+ }
+
+ pub fn value(self, db: &dyn HirDatabase) -> Option<ast::Expr> {
+ self.source(db)?.value.body()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ let data = db.static_data(self.id);
+ let resolver = self.id.resolver(db.upcast());
+ let ctx = hir_ty::TyLoweringContext::new(db, &resolver);
+ let ty = ctx.lower_ty(&data.type_ref);
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+}
+
+impl HasVisibility for Static {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.static_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Trait {
+ pub(crate) id: TraitId,
+}
+
+impl Trait {
+ pub fn lang(db: &dyn HirDatabase, krate: Crate, name: &Name) -> Option<Trait> {
+ db.lang_item(krate.into(), name.to_smol_str())
+ .and_then(LangItemTarget::as_trait)
+ .map(Into::into)
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).container }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.trait_data(self.id).name.clone()
+ }
+
+ pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
+ db.trait_data(self.id).items.iter().map(|(_name, it)| (*it).into()).collect()
+ }
+
+ pub fn items_with_supertraits(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
+ let traits = all_super_traits(db.upcast(), self.into());
+ traits.iter().flat_map(|tr| Trait::from(*tr).items(db)).collect()
+ }
+
+ pub fn is_auto(self, db: &dyn HirDatabase) -> bool {
+ db.trait_data(self.id).is_auto
+ }
+
+ pub fn is_unsafe(&self, db: &dyn HirDatabase) -> bool {
+ db.trait_data(self.id).is_unsafe
+ }
+
+ pub fn type_or_const_param_count(
+ &self,
+ db: &dyn HirDatabase,
+ count_required_only: bool,
+ ) -> usize {
+ db.generic_params(GenericDefId::from(self.id))
+ .type_or_consts
+ .iter()
+ .filter(|(_, ty)| match ty {
+ TypeOrConstParamData::TypeParamData(ty)
+ if ty.provenance != TypeParamProvenance::TypeParamList =>
+ {
+ false
+ }
+ _ => true,
+ })
+ .filter(|(_, ty)| !count_required_only || !ty.has_default())
+ .count()
+ }
+}
+
+impl HasVisibility for Trait {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.trait_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TypeAlias {
+ pub(crate) id: TypeAliasId,
+}
+
+impl TypeAlias {
+ pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
+ let subst = db.generic_defaults(self.id.into());
+ subst.iter().any(|ty| match ty.skip_binders().data(Interner) {
+ GenericArgData::Ty(x) => x.is_unknown(),
+ _ => false,
+ })
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).module(db.upcast()) }
+ }
+
+ pub fn type_ref(self, db: &dyn HirDatabase) -> Option<TypeRef> {
+ db.type_alias_data(self.id).type_ref.as_deref().cloned()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::from_def(db, self.id)
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.type_alias_data(self.id).name.clone()
+ }
+}
+
+impl HasVisibility for TypeAlias {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ let function_data = db.type_alias_data(self.id);
+ let visibility = &function_data.visibility;
+ visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct BuiltinType {
+ pub(crate) inner: hir_def::builtin_type::BuiltinType,
+}
+
+impl BuiltinType {
+ pub fn str() -> BuiltinType {
+ BuiltinType { inner: hir_def::builtin_type::BuiltinType::Str }
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::new_for_crate(db.crate_graph().iter().next().unwrap(), TyBuilder::builtin(self.inner))
+ }
+
+ pub fn name(self) -> Name {
+ self.inner.as_name()
+ }
+
+ pub fn is_int(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Int(_))
+ }
+
+ pub fn is_uint(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Uint(_))
+ }
+
+ pub fn is_float(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Float(_))
+ }
+
+ pub fn is_char(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Char)
+ }
+
+ pub fn is_bool(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Bool)
+ }
+
+ pub fn is_str(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Str)
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum MacroKind {
+ /// `macro_rules!` or Macros 2.0 macro.
+ Declarative,
+ /// A built-in or custom derive.
+ Derive,
+ /// A built-in function-like macro.
+ BuiltIn,
+ /// A procedural attribute macro.
+ Attr,
+ /// A function-like procedural macro.
+ ProcMacro,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Macro {
+ pub(crate) id: MacroId,
+}
+
+impl Macro {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.module(db.upcast()) }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ match self.id {
+ MacroId::Macro2Id(id) => db.macro2_data(id).name.clone(),
+ MacroId::MacroRulesId(id) => db.macro_rules_data(id).name.clone(),
+ MacroId::ProcMacroId(id) => db.proc_macro_data(id).name.clone(),
+ }
+ }
+
+ pub fn is_macro_export(self, db: &dyn HirDatabase) -> bool {
+ matches!(self.id, MacroId::MacroRulesId(id) if db.macro_rules_data(id).macro_export)
+ }
+
+ pub fn kind(&self, db: &dyn HirDatabase) -> MacroKind {
+ match self.id {
+ MacroId::Macro2Id(it) => match it.lookup(db.upcast()).expander {
+ MacroExpander::Declarative => MacroKind::Declarative,
+ MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => MacroKind::BuiltIn,
+ MacroExpander::BuiltInAttr(_) => MacroKind::Attr,
+ MacroExpander::BuiltInDerive(_) => MacroKind::Derive,
+ },
+ MacroId::MacroRulesId(it) => match it.lookup(db.upcast()).expander {
+ MacroExpander::Declarative => MacroKind::Declarative,
+ MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => MacroKind::BuiltIn,
+ MacroExpander::BuiltInAttr(_) => MacroKind::Attr,
+ MacroExpander::BuiltInDerive(_) => MacroKind::Derive,
+ },
+ MacroId::ProcMacroId(it) => match it.lookup(db.upcast()).kind {
+ ProcMacroKind::CustomDerive => MacroKind::Derive,
+ ProcMacroKind::FuncLike => MacroKind::ProcMacro,
+ ProcMacroKind::Attr => MacroKind::Attr,
+ },
+ }
+ }
+
+ pub fn is_fn_like(&self, db: &dyn HirDatabase) -> bool {
+ match self.kind(db) {
+ MacroKind::Declarative | MacroKind::BuiltIn | MacroKind::ProcMacro => true,
+ MacroKind::Attr | MacroKind::Derive => false,
+ }
+ }
+
+ pub fn is_builtin_derive(&self, db: &dyn HirDatabase) -> bool {
+ match self.id {
+ MacroId::Macro2Id(it) => {
+ matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInDerive(_))
+ }
+ MacroId::MacroRulesId(it) => {
+ matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInDerive(_))
+ }
+ MacroId::ProcMacroId(_) => false,
+ }
+ }
+
+ pub fn is_attr(&self, db: &dyn HirDatabase) -> bool {
+ matches!(self.kind(db), MacroKind::Attr)
+ }
+
+ pub fn is_derive(&self, db: &dyn HirDatabase) -> bool {
+ matches!(self.kind(db), MacroKind::Derive)
+ }
+}
+
+impl HasVisibility for Macro {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ match self.id {
+ MacroId::Macro2Id(id) => {
+ let data = db.macro2_data(id);
+ let visibility = &data.visibility;
+ visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+ MacroId::MacroRulesId(_) => Visibility::Public,
+ MacroId::ProcMacroId(_) => Visibility::Public,
+ }
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
+pub enum ItemInNs {
+ Types(ModuleDef),
+ Values(ModuleDef),
+ Macros(Macro),
+}
+
+impl From<Macro> for ItemInNs {
+ fn from(it: Macro) -> Self {
+ Self::Macros(it)
+ }
+}
+
+impl From<ModuleDef> for ItemInNs {
+ fn from(module_def: ModuleDef) -> Self {
+ match module_def {
+ ModuleDef::Static(_) | ModuleDef::Const(_) | ModuleDef::Function(_) => {
+ ItemInNs::Values(module_def)
+ }
+ _ => ItemInNs::Types(module_def),
+ }
+ }
+}
+
+impl ItemInNs {
+ pub fn as_module_def(self) -> Option<ModuleDef> {
+ match self {
+ ItemInNs::Types(id) | ItemInNs::Values(id) => Some(id),
+ ItemInNs::Macros(_) => None,
+ }
+ }
+
+ /// Returns the crate defining this item (or `None` if `self` is built-in).
+ pub fn krate(&self, db: &dyn HirDatabase) -> Option<Crate> {
+ match self {
+ ItemInNs::Types(did) | ItemInNs::Values(did) => did.module(db).map(|m| m.krate()),
+ ItemInNs::Macros(id) => Some(id.module(db).krate()),
+ }
+ }
+
+ pub fn attrs(&self, db: &dyn HirDatabase) -> Option<AttrsWithOwner> {
+ match self {
+ ItemInNs::Types(it) | ItemInNs::Values(it) => it.attrs(db),
+ ItemInNs::Macros(it) => Some(it.attrs(db)),
+ }
+ }
+}
+
+/// Invariant: `inner.as_assoc_item(db).is_some()`
+/// We do not actively enforce this invariant.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum AssocItem {
+ Function(Function),
+ Const(Const),
+ TypeAlias(TypeAlias),
+}
+#[derive(Debug)]
+pub enum AssocItemContainer {
+ Trait(Trait),
+ Impl(Impl),
+}
+pub trait AsAssocItem {
+ fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem>;
+}
+
+impl AsAssocItem for Function {
+ fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
+ as_assoc_item(db, AssocItem::Function, self.id)
+ }
+}
+impl AsAssocItem for Const {
+ fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
+ as_assoc_item(db, AssocItem::Const, self.id)
+ }
+}
+impl AsAssocItem for TypeAlias {
+ fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
+ as_assoc_item(db, AssocItem::TypeAlias, self.id)
+ }
+}
+impl AsAssocItem for ModuleDef {
+ fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
+ match self {
+ ModuleDef::Function(it) => it.as_assoc_item(db),
+ ModuleDef::Const(it) => it.as_assoc_item(db),
+ ModuleDef::TypeAlias(it) => it.as_assoc_item(db),
+ _ => None,
+ }
+ }
+}
+fn as_assoc_item<ID, DEF, CTOR, AST>(db: &dyn HirDatabase, ctor: CTOR, id: ID) -> Option<AssocItem>
+where
+ ID: Lookup<Data = AssocItemLoc<AST>>,
+ DEF: From<ID>,
+ CTOR: FnOnce(DEF) -> AssocItem,
+ AST: ItemTreeNode,
+{
+ match id.lookup(db.upcast()).container {
+ ItemContainerId::TraitId(_) | ItemContainerId::ImplId(_) => Some(ctor(DEF::from(id))),
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
+ }
+}
+
+impl AssocItem {
+ pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
+ match self {
+ AssocItem::Function(it) => Some(it.name(db)),
+ AssocItem::Const(it) => it.name(db),
+ AssocItem::TypeAlias(it) => Some(it.name(db)),
+ }
+ }
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ match self {
+ AssocItem::Function(f) => f.module(db),
+ AssocItem::Const(c) => c.module(db),
+ AssocItem::TypeAlias(t) => t.module(db),
+ }
+ }
+ pub fn container(self, db: &dyn HirDatabase) -> AssocItemContainer {
+ let container = match self {
+ AssocItem::Function(it) => it.id.lookup(db.upcast()).container,
+ AssocItem::Const(it) => it.id.lookup(db.upcast()).container,
+ AssocItem::TypeAlias(it) => it.id.lookup(db.upcast()).container,
+ };
+ match container {
+ ItemContainerId::TraitId(id) => AssocItemContainer::Trait(id.into()),
+ ItemContainerId::ImplId(id) => AssocItemContainer::Impl(id.into()),
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
+ panic!("invalid AssocItem")
+ }
+ }
+ }
+
+ pub fn containing_trait(self, db: &dyn HirDatabase) -> Option<Trait> {
+ match self.container(db) {
+ AssocItemContainer::Trait(t) => Some(t),
+ _ => None,
+ }
+ }
+
+ pub fn containing_trait_impl(self, db: &dyn HirDatabase) -> Option<Trait> {
+ match self.container(db) {
+ AssocItemContainer::Impl(i) => i.trait_(db),
+ _ => None,
+ }
+ }
+
+ pub fn containing_trait_or_trait_impl(self, db: &dyn HirDatabase) -> Option<Trait> {
+ match self.container(db) {
+ AssocItemContainer::Trait(t) => Some(t),
+ AssocItemContainer::Impl(i) => i.trait_(db),
+ }
+ }
+}
+
+impl HasVisibility for AssocItem {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ match self {
+ AssocItem::Function(f) => f.visibility(db),
+ AssocItem::Const(c) => c.visibility(db),
+ AssocItem::TypeAlias(t) => t.visibility(db),
+ }
+ }
+}
+
+impl From<AssocItem> for ModuleDef {
+ fn from(assoc: AssocItem) -> Self {
+ match assoc {
+ AssocItem::Function(it) => ModuleDef::Function(it),
+ AssocItem::Const(it) => ModuleDef::Const(it),
+ AssocItem::TypeAlias(it) => ModuleDef::TypeAlias(it),
+ }
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
+pub enum GenericDef {
+ Function(Function),
+ Adt(Adt),
+ Trait(Trait),
+ TypeAlias(TypeAlias),
+ Impl(Impl),
+ // enum variants cannot have generics themselves, but their parent enums
+ // can, and this makes some code easier to write
+ Variant(Variant),
+ // consts can have type parameters from their parents (i.e. associated consts of traits)
+ Const(Const),
+}
+impl_from!(
+ Function,
+ Adt(Struct, Enum, Union),
+ Trait,
+ TypeAlias,
+ Impl,
+ Variant,
+ Const
+ for GenericDef
+);
+
+impl GenericDef {
+ pub fn params(self, db: &dyn HirDatabase) -> Vec<GenericParam> {
+ let generics = db.generic_params(self.into());
+ let ty_params = generics.type_or_consts.iter().map(|(local_id, _)| {
+ let toc = TypeOrConstParam { id: TypeOrConstParamId { parent: self.into(), local_id } };
+ match toc.split(db) {
+ Either::Left(x) => GenericParam::ConstParam(x),
+ Either::Right(x) => GenericParam::TypeParam(x),
+ }
+ });
+ let lt_params = generics
+ .lifetimes
+ .iter()
+ .map(|(local_id, _)| LifetimeParam {
+ id: LifetimeParamId { parent: self.into(), local_id },
+ })
+ .map(GenericParam::LifetimeParam);
+ lt_params.chain(ty_params).collect()
+ }
+
+ pub fn type_params(self, db: &dyn HirDatabase) -> Vec<TypeOrConstParam> {
+ let generics = db.generic_params(self.into());
+ generics
+ .type_or_consts
+ .iter()
+ .map(|(local_id, _)| TypeOrConstParam {
+ id: TypeOrConstParamId { parent: self.into(), local_id },
+ })
+ .collect()
+ }
+}
+
+/// A single local definition.
+///
+/// If the definition of this is part of a "MultiLocal", that is a local that has multiple declarations due to or-patterns
+/// then this only references a single one of those.
+/// To retrieve the other locals you should use [`Local::associated_locals`]
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct Local {
+ pub(crate) parent: DefWithBodyId,
+ pub(crate) pat_id: PatId,
+}
+
+impl Local {
+ pub fn is_param(self, db: &dyn HirDatabase) -> bool {
+ let src = self.source(db);
+ match src.value {
+ Either::Left(pat) => pat
+ .syntax()
+ .ancestors()
+ .map(|it| it.kind())
+ .take_while(|&kind| ast::Pat::can_cast(kind) || ast::Param::can_cast(kind))
+ .any(ast::Param::can_cast),
+ Either::Right(_) => true,
+ }
+ }
+
+ pub fn as_self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> {
+ match self.parent {
+ DefWithBodyId::FunctionId(func) if self.is_self(db) => Some(SelfParam { func }),
+ _ => None,
+ }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ let body = db.body(self.parent);
+ match &body[self.pat_id] {
+ Pat::Bind { name, .. } => name.clone(),
+ _ => {
+ stdx::never!("hir::Local is missing a name!");
+ Name::missing()
+ }
+ }
+ }
+
+ pub fn is_self(self, db: &dyn HirDatabase) -> bool {
+ self.name(db) == name![self]
+ }
+
+ pub fn is_mut(self, db: &dyn HirDatabase) -> bool {
+ let body = db.body(self.parent);
+ matches!(&body[self.pat_id], Pat::Bind { mode: BindingAnnotation::Mutable, .. })
+ }
+
+ pub fn is_ref(self, db: &dyn HirDatabase) -> bool {
+ let body = db.body(self.parent);
+ matches!(
+ &body[self.pat_id],
+ Pat::Bind { mode: BindingAnnotation::Ref | BindingAnnotation::RefMut, .. }
+ )
+ }
+
+ pub fn parent(self, _db: &dyn HirDatabase) -> DefWithBody {
+ self.parent.into()
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.parent(db).module(db)
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ let def = self.parent;
+ let infer = db.infer(def);
+ let ty = infer[self.pat_id].clone();
+ Type::new(db, def, ty)
+ }
+
+ pub fn associated_locals(self, db: &dyn HirDatabase) -> Box<[Local]> {
+ let body = db.body(self.parent);
+ body.ident_patterns_for(&self.pat_id)
+ .iter()
+ .map(|&pat_id| Local { parent: self.parent, pat_id })
+ .collect()
+ }
+
+ /// If this local is part of a multi-local, retrieve the representative local.
+ /// That is the local that references are being resolved to.
+ pub fn representative(self, db: &dyn HirDatabase) -> Local {
+ let body = db.body(self.parent);
+ Local { pat_id: body.pattern_representative(self.pat_id), ..self }
+ }
+
+ pub fn source(self, db: &dyn HirDatabase) -> InFile<Either<ast::IdentPat, ast::SelfParam>> {
+ let (_body, source_map) = db.body_with_source_map(self.parent);
+ let src = source_map.pat_syntax(self.pat_id).unwrap(); // Hmm...
+ let root = src.file_syntax(db.upcast());
+ src.map(|ast| match ast {
+ // Suspicious unwrap
+ Either::Left(it) => Either::Left(it.cast().unwrap().to_node(&root)),
+ Either::Right(it) => Either::Right(it.to_node(&root)),
+ })
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct DeriveHelper {
+ pub(crate) derive: MacroId,
+ pub(crate) idx: usize,
+}
+
+impl DeriveHelper {
+ pub fn derive(&self) -> Macro {
+ Macro { id: self.derive.into() }
+ }
+
+ pub fn name(&self, db: &dyn HirDatabase) -> Name {
+ match self.derive {
+ MacroId::Macro2Id(_) => None,
+ MacroId::MacroRulesId(_) => None,
+ MacroId::ProcMacroId(proc_macro) => db
+ .proc_macro_data(proc_macro)
+ .helpers
+ .as_ref()
+ .and_then(|it| it.get(self.idx))
+ .cloned(),
+ }
+ .unwrap_or_else(|| Name::missing())
+ }
+}
+
+// FIXME: Wrong name? This is could also be a registered attribute
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct BuiltinAttr {
+ krate: Option<CrateId>,
+ idx: usize,
+}
+
+impl BuiltinAttr {
+ // FIXME: consider crates\hir_def\src\nameres\attr_resolution.rs?
+ pub(crate) fn by_name(db: &dyn HirDatabase, krate: Crate, name: &str) -> Option<Self> {
+ if let builtin @ Some(_) = Self::builtin(name) {
+ return builtin;
+ }
+ let idx = db.crate_def_map(krate.id).registered_attrs().iter().position(|it| it == name)?;
+ Some(BuiltinAttr { krate: Some(krate.id), idx })
+ }
+
+ fn builtin(name: &str) -> Option<Self> {
+ hir_def::builtin_attr::INERT_ATTRIBUTES
+ .iter()
+ .position(|tool| tool.name == name)
+ .map(|idx| BuiltinAttr { krate: None, idx })
+ }
+
+ pub fn name(&self, db: &dyn HirDatabase) -> SmolStr {
+ // FIXME: Return a `Name` here
+ match self.krate {
+ Some(krate) => db.crate_def_map(krate).registered_attrs()[self.idx].clone(),
+ None => SmolStr::new(hir_def::builtin_attr::INERT_ATTRIBUTES[self.idx].name),
+ }
+ }
+
+ pub fn template(&self, _: &dyn HirDatabase) -> Option<AttributeTemplate> {
+ match self.krate {
+ Some(_) => None,
+ None => Some(hir_def::builtin_attr::INERT_ATTRIBUTES[self.idx].template),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct ToolModule {
+ krate: Option<CrateId>,
+ idx: usize,
+}
+
+impl ToolModule {
+ // FIXME: consider crates\hir_def\src\nameres\attr_resolution.rs?
+ pub(crate) fn by_name(db: &dyn HirDatabase, krate: Crate, name: &str) -> Option<Self> {
+ if let builtin @ Some(_) = Self::builtin(name) {
+ return builtin;
+ }
+ let idx = db.crate_def_map(krate.id).registered_tools().iter().position(|it| it == name)?;
+ Some(ToolModule { krate: Some(krate.id), idx })
+ }
+
+ fn builtin(name: &str) -> Option<Self> {
+ hir_def::builtin_attr::TOOL_MODULES
+ .iter()
+ .position(|&tool| tool == name)
+ .map(|idx| ToolModule { krate: None, idx })
+ }
+
+ pub fn name(&self, db: &dyn HirDatabase) -> SmolStr {
+ // FIXME: Return a `Name` here
+ match self.krate {
+ Some(krate) => db.crate_def_map(krate).registered_tools()[self.idx].clone(),
+ None => SmolStr::new(hir_def::builtin_attr::TOOL_MODULES[self.idx]),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct Label {
+ pub(crate) parent: DefWithBodyId,
+ pub(crate) label_id: LabelId,
+}
+
+impl Label {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.parent(db).module(db)
+ }
+
+ pub fn parent(self, _db: &dyn HirDatabase) -> DefWithBody {
+ self.parent.into()
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ let body = db.body(self.parent);
+ body[self.label_id].name.clone()
+ }
+
+ pub fn source(self, db: &dyn HirDatabase) -> InFile<ast::Label> {
+ let (_body, source_map) = db.body_with_source_map(self.parent);
+ let src = source_map.label_syntax(self.label_id);
+ let root = src.file_syntax(db.upcast());
+ src.map(|ast| ast.to_node(&root))
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum GenericParam {
+ TypeParam(TypeParam),
+ ConstParam(ConstParam),
+ LifetimeParam(LifetimeParam),
+}
+impl_from!(TypeParam, ConstParam, LifetimeParam for GenericParam);
+
+impl GenericParam {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ match self {
+ GenericParam::TypeParam(it) => it.module(db),
+ GenericParam::ConstParam(it) => it.module(db),
+ GenericParam::LifetimeParam(it) => it.module(db),
+ }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ match self {
+ GenericParam::TypeParam(it) => it.name(db),
+ GenericParam::ConstParam(it) => it.name(db),
+ GenericParam::LifetimeParam(it) => it.name(db),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct TypeParam {
+ pub(crate) id: TypeParamId,
+}
+
+impl TypeParam {
+ pub fn merge(self) -> TypeOrConstParam {
+ TypeOrConstParam { id: self.id.into() }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ self.merge().name(db)
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.parent().module(db.upcast()).into()
+ }
+
+ /// Is this type parameter implicitly introduced (eg. `Self` in a trait or an `impl Trait`
+ /// argument)?
+ pub fn is_implicit(self, db: &dyn HirDatabase) -> bool {
+ let params = db.generic_params(self.id.parent());
+ let data = &params.type_or_consts[self.id.local_id()];
+ match data.type_param().unwrap().provenance {
+ hir_def::generics::TypeParamProvenance::TypeParamList => false,
+ hir_def::generics::TypeParamProvenance::TraitSelf
+ | hir_def::generics::TypeParamProvenance::ArgumentImplTrait => true,
+ }
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ let resolver = self.id.parent().resolver(db.upcast());
+ let ty =
+ TyKind::Placeholder(hir_ty::to_placeholder_idx(db, self.id.into())).intern(Interner);
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+
+ /// FIXME: this only lists trait bounds from the item defining the type
+ /// parameter, not additional bounds that might be added e.g. by a method if
+ /// the parameter comes from an impl!
+ pub fn trait_bounds(self, db: &dyn HirDatabase) -> Vec<Trait> {
+ db.generic_predicates_for_param(self.id.parent(), self.id.into(), None)
+ .iter()
+ .filter_map(|pred| match &pred.skip_binders().skip_binders() {
+ hir_ty::WhereClause::Implemented(trait_ref) => {
+ Some(Trait::from(trait_ref.hir_trait_id()))
+ }
+ _ => None,
+ })
+ .collect()
+ }
+
+ pub fn default(self, db: &dyn HirDatabase) -> Option<Type> {
+ let params = db.generic_defaults(self.id.parent());
+ let local_idx = hir_ty::param_idx(db, self.id.into())?;
+ let resolver = self.id.parent().resolver(db.upcast());
+ let ty = params.get(local_idx)?.clone();
+ let subst = TyBuilder::placeholder_subst(db, self.id.parent());
+ let ty = ty.substitute(Interner, &subst_prefix(&subst, local_idx));
+ match ty.data(Interner) {
+ GenericArgData::Ty(x) => Some(Type::new_with_resolver_inner(db, &resolver, x.clone())),
+ _ => None,
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct LifetimeParam {
+ pub(crate) id: LifetimeParamId,
+}
+
+impl LifetimeParam {
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ let params = db.generic_params(self.id.parent);
+ params.lifetimes[self.id.local_id].name.clone()
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.parent.module(db.upcast()).into()
+ }
+
+ pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef {
+ self.id.parent.into()
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct ConstParam {
+ pub(crate) id: ConstParamId,
+}
+
+impl ConstParam {
+ pub fn merge(self) -> TypeOrConstParam {
+ TypeOrConstParam { id: self.id.into() }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ let params = db.generic_params(self.id.parent());
+ match params.type_or_consts[self.id.local_id()].name() {
+ Some(x) => x.clone(),
+ None => {
+ never!();
+ Name::missing()
+ }
+ }
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.parent().module(db.upcast()).into()
+ }
+
+ pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef {
+ self.id.parent().into()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::new(db, self.id.parent(), db.const_param_ty(self.id))
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct TypeOrConstParam {
+ pub(crate) id: TypeOrConstParamId,
+}
+
+impl TypeOrConstParam {
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ let params = db.generic_params(self.id.parent);
+ match params.type_or_consts[self.id.local_id].name() {
+ Some(n) => n.clone(),
+ _ => Name::missing(),
+ }
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.parent.module(db.upcast()).into()
+ }
+
+ pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef {
+ self.id.parent.into()
+ }
+
+ pub fn split(self, db: &dyn HirDatabase) -> Either<ConstParam, TypeParam> {
+ let params = db.generic_params(self.id.parent);
+ match &params.type_or_consts[self.id.local_id] {
+ hir_def::generics::TypeOrConstParamData::TypeParamData(_) => {
+ Either::Right(TypeParam { id: TypeParamId::from_unchecked(self.id) })
+ }
+ hir_def::generics::TypeOrConstParamData::ConstParamData(_) => {
+ Either::Left(ConstParam { id: ConstParamId::from_unchecked(self.id) })
+ }
+ }
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ match self.split(db) {
+ Either::Left(x) => x.ty(db),
+ Either::Right(x) => x.ty(db),
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Impl {
+ pub(crate) id: ImplId,
+}
+
+impl Impl {
+ pub fn all_in_crate(db: &dyn HirDatabase, krate: Crate) -> Vec<Impl> {
+ let inherent = db.inherent_impls_in_crate(krate.id);
+ let trait_ = db.trait_impls_in_crate(krate.id);
+
+ inherent.all_impls().chain(trait_.all_impls()).map(Self::from).collect()
+ }
+
+ pub fn all_for_type(db: &dyn HirDatabase, Type { ty, env }: Type) -> Vec<Impl> {
+ let def_crates = match method_resolution::def_crates(db, &ty, env.krate) {
+ Some(def_crates) => def_crates,
+ None => return Vec::new(),
+ };
+
+ let filter = |impl_def: &Impl| {
+ let self_ty = impl_def.self_ty(db);
+ let rref = self_ty.remove_ref();
+ ty.equals_ctor(rref.as_ref().map_or(&self_ty.ty, |it| &it.ty))
+ };
+
+ let fp = TyFingerprint::for_inherent_impl(&ty);
+ let fp = match fp {
+ Some(fp) => fp,
+ None => return Vec::new(),
+ };
+
+ let mut all = Vec::new();
+ def_crates.iter().for_each(|&id| {
+ all.extend(
+ db.inherent_impls_in_crate(id)
+ .for_self_ty(&ty)
+ .iter()
+ .cloned()
+ .map(Self::from)
+ .filter(filter),
+ )
+ });
+ for id in def_crates
+ .iter()
+ .flat_map(|&id| Crate { id }.transitive_reverse_dependencies(db))
+ .map(|Crate { id }| id)
+ .chain(def_crates.iter().copied())
+ .unique()
+ {
+ all.extend(
+ db.trait_impls_in_crate(id)
+ .for_self_ty_without_blanket_impls(fp)
+ .map(Self::from)
+ .filter(filter),
+ );
+ }
+ all
+ }
+
+ pub fn all_for_trait(db: &dyn HirDatabase, trait_: Trait) -> Vec<Impl> {
+ let krate = trait_.module(db).krate();
+ let mut all = Vec::new();
+ for Crate { id } in krate.transitive_reverse_dependencies(db).into_iter() {
+ let impls = db.trait_impls_in_crate(id);
+ all.extend(impls.for_trait(trait_.id).map(Self::from))
+ }
+ all
+ }
+
+ // FIXME: the return type is wrong. This should be a hir version of
+ // `TraitRef` (to account for parameters and qualifiers)
+ pub fn trait_(self, db: &dyn HirDatabase) -> Option<Trait> {
+ let trait_ref = db.impl_trait(self.id)?.skip_binders().clone();
+ let id = hir_ty::from_chalk_trait_id(trait_ref.trait_id);
+ Some(Trait { id })
+ }
+
+ pub fn self_ty(self, db: &dyn HirDatabase) -> Type {
+ let resolver = self.id.resolver(db.upcast());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let ty = db.impl_self_ty(self.id).substitute(Interner, &substs);
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+
+ pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
+ db.impl_data(self.id).items.iter().map(|it| (*it).into()).collect()
+ }
+
+ pub fn is_negative(self, db: &dyn HirDatabase) -> bool {
+ db.impl_data(self.id).is_negative
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.lookup(db.upcast()).container.into()
+ }
+
+ pub fn is_builtin_derive(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> {
+ let src = self.source(db)?;
+ src.file_id.is_builtin_derive(db.upcast())
+ }
+}
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub struct Type {
+ env: Arc<TraitEnvironment>,
+ ty: Ty,
+}
+
+impl Type {
+ pub(crate) fn new_with_resolver(db: &dyn HirDatabase, resolver: &Resolver, ty: Ty) -> Type {
+ Type::new_with_resolver_inner(db, resolver, ty)
+ }
+
+ pub(crate) fn new_with_resolver_inner(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ ty: Ty,
+ ) -> Type {
+ let environment = resolver.generic_def().map_or_else(
+ || Arc::new(TraitEnvironment::empty(resolver.krate())),
+ |d| db.trait_environment(d),
+ );
+ Type { env: environment, ty }
+ }
+
+ pub(crate) fn new_for_crate(krate: CrateId, ty: Ty) -> Type {
+ Type { env: Arc::new(TraitEnvironment::empty(krate)), ty }
+ }
+
+ pub fn reference(inner: &Type, m: Mutability) -> Type {
+ inner.derived(
+ TyKind::Ref(
+ if m.is_mut() { hir_ty::Mutability::Mut } else { hir_ty::Mutability::Not },
+ hir_ty::static_lifetime(),
+ inner.ty.clone(),
+ )
+ .intern(Interner),
+ )
+ }
+
+ fn new(db: &dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Type {
+ let resolver = lexical_env.resolver(db.upcast());
+ let environment = resolver.generic_def().map_or_else(
+ || Arc::new(TraitEnvironment::empty(resolver.krate())),
+ |d| db.trait_environment(d),
+ );
+ Type { env: environment, ty }
+ }
+
+ fn from_def(db: &dyn HirDatabase, def: impl HasResolver + Into<TyDefId>) -> Type {
+ let ty = TyBuilder::def_ty(db, def.into()).fill_with_unknown().build();
+ Type::new(db, def, ty)
+ }
+
+ pub fn new_slice(ty: Type) -> Type {
+ Type { env: ty.env, ty: TyBuilder::slice(ty.ty) }
+ }
+
+ pub fn is_unit(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Tuple(0, ..))
+ }
+
+ pub fn is_bool(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Bool))
+ }
+
+ pub fn is_never(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Never)
+ }
+
+ pub fn is_mutable_reference(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Ref(hir_ty::Mutability::Mut, ..))
+ }
+
+ pub fn is_reference(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Ref(..))
+ }
+
+ pub fn as_reference(&self) -> Option<(Type, Mutability)> {
+ let (ty, _lt, m) = self.ty.as_reference()?;
+ let m = Mutability::from_mutable(matches!(m, hir_ty::Mutability::Mut));
+ Some((self.derived(ty.clone()), m))
+ }
+
+ pub fn is_slice(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Slice(..))
+ }
+
+ pub fn is_usize(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Uint(UintTy::Usize)))
+ }
+
+ pub fn remove_ref(&self) -> Option<Type> {
+ match &self.ty.kind(Interner) {
+ TyKind::Ref(.., ty) => Some(self.derived(ty.clone())),
+ _ => None,
+ }
+ }
+
+ pub fn strip_references(&self) -> Type {
+ self.derived(self.ty.strip_references().clone())
+ }
+
+ pub fn strip_reference(&self) -> Type {
+ self.derived(self.ty.strip_reference().clone())
+ }
+
+ pub fn is_unknown(&self) -> bool {
+ self.ty.is_unknown()
+ }
+
+ /// Checks that particular type `ty` implements `std::future::Future`.
+ /// This function is used in `.await` syntax completion.
+ pub fn impls_future(&self, db: &dyn HirDatabase) -> bool {
+ let std_future_trait = db
+ .lang_item(self.env.krate, SmolStr::new_inline("future_trait"))
+ .and_then(|it| it.as_trait());
+ let std_future_trait = match std_future_trait {
+ Some(it) => it,
+ None => return false,
+ };
+
+ let canonical_ty =
+ Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) };
+ method_resolution::implements_trait(&canonical_ty, db, self.env.clone(), std_future_trait)
+ }
+
+ /// Checks that particular type `ty` implements `std::ops::FnOnce`.
+ ///
+ /// This function can be used to check if a particular type is callable, since FnOnce is a
+ /// supertrait of Fn and FnMut, so all callable types implements at least FnOnce.
+ pub fn impls_fnonce(&self, db: &dyn HirDatabase) -> bool {
+ let fnonce_trait = match FnTrait::FnOnce.get_id(db, self.env.krate) {
+ Some(it) => it,
+ None => return false,
+ };
+
+ let canonical_ty =
+ Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) };
+ method_resolution::implements_trait_unique(
+ &canonical_ty,
+ db,
+ self.env.clone(),
+ fnonce_trait,
+ )
+ }
+
+ pub fn impls_trait(&self, db: &dyn HirDatabase, trait_: Trait, args: &[Type]) -> bool {
+ let mut it = args.iter().map(|t| t.ty.clone());
+ let trait_ref = TyBuilder::trait_ref(db, trait_.id)
+ .push(self.ty.clone())
+ .fill(|x| {
+ let r = it.next().unwrap();
+ match x {
+ ParamKind::Type => GenericArgData::Ty(r).intern(Interner),
+ ParamKind::Const(ty) => {
+ // FIXME: this code is not covered in tests.
+ unknown_const_as_generic(ty.clone())
+ }
+ }
+ })
+ .build();
+
+ let goal = Canonical {
+ value: hir_ty::InEnvironment::new(&self.env.env, trait_ref.cast(Interner)),
+ binders: CanonicalVarKinds::empty(Interner),
+ };
+
+ db.trait_solve(self.env.krate, goal).is_some()
+ }
+
+ pub fn normalize_trait_assoc_type(
+ &self,
+ db: &dyn HirDatabase,
+ args: &[Type],
+ alias: TypeAlias,
+ ) -> Option<Type> {
+ let mut args = args.iter();
+ let projection = TyBuilder::assoc_type_projection(db, alias.id)
+ .push(self.ty.clone())
+ .fill(|x| {
+ // FIXME: this code is not covered in tests.
+ match x {
+ ParamKind::Type => {
+ GenericArgData::Ty(args.next().unwrap().ty.clone()).intern(Interner)
+ }
+ ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
+ }
+ })
+ .build();
+ let goal = hir_ty::make_canonical(
+ InEnvironment::new(
+ &self.env.env,
+ AliasEq {
+ alias: AliasTy::Projection(projection),
+ ty: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0))
+ .intern(Interner),
+ }
+ .cast(Interner),
+ ),
+ [TyVariableKind::General].into_iter(),
+ );
+
+ match db.trait_solve(self.env.krate, goal)? {
+ Solution::Unique(s) => s
+ .value
+ .subst
+ .as_slice(Interner)
+ .first()
+ .map(|ty| self.derived(ty.assert_ty_ref(Interner).clone())),
+ Solution::Ambig(_) => None,
+ }
+ }
+
+ pub fn is_copy(&self, db: &dyn HirDatabase) -> bool {
+ let lang_item = db.lang_item(self.env.krate, SmolStr::new_inline("copy"));
+ let copy_trait = match lang_item {
+ Some(LangItemTarget::TraitId(it)) => it,
+ _ => return false,
+ };
+ self.impls_trait(db, copy_trait.into(), &[])
+ }
+
+ pub fn as_callable(&self, db: &dyn HirDatabase) -> Option<Callable> {
+ let callee = match self.ty.kind(Interner) {
+ TyKind::Closure(id, _) => Callee::Closure(*id),
+ TyKind::Function(_) => Callee::FnPtr,
+ _ => Callee::Def(self.ty.callable_def(db)?),
+ };
+
+ let sig = self.ty.callable_sig(db)?;
+ Some(Callable { ty: self.clone(), sig, callee, is_bound_method: false })
+ }
+
+ pub fn is_closure(&self) -> bool {
+ matches!(&self.ty.kind(Interner), TyKind::Closure { .. })
+ }
+
+ pub fn is_fn(&self) -> bool {
+ matches!(&self.ty.kind(Interner), TyKind::FnDef(..) | TyKind::Function { .. })
+ }
+
+ pub fn is_array(&self) -> bool {
+ matches!(&self.ty.kind(Interner), TyKind::Array(..))
+ }
+
+ pub fn is_packed(&self, db: &dyn HirDatabase) -> bool {
+ let adt_id = match *self.ty.kind(Interner) {
+ TyKind::Adt(hir_ty::AdtId(adt_id), ..) => adt_id,
+ _ => return false,
+ };
+
+ let adt = adt_id.into();
+ match adt {
+ Adt::Struct(s) => matches!(s.repr(db), Some(ReprKind::Packed)),
+ _ => false,
+ }
+ }
+
+ pub fn is_raw_ptr(&self) -> bool {
+ matches!(&self.ty.kind(Interner), TyKind::Raw(..))
+ }
+
+ pub fn contains_unknown(&self) -> bool {
+ return go(&self.ty);
+
+ fn go(ty: &Ty) -> bool {
+ match ty.kind(Interner) {
+ TyKind::Error => true,
+
+ TyKind::Adt(_, substs)
+ | TyKind::AssociatedType(_, substs)
+ | TyKind::Tuple(_, substs)
+ | TyKind::OpaqueType(_, substs)
+ | TyKind::FnDef(_, substs)
+ | TyKind::Closure(_, substs) => {
+ substs.iter(Interner).filter_map(|a| a.ty(Interner)).any(go)
+ }
+
+ TyKind::Array(_ty, len) if len.is_unknown() => true,
+ TyKind::Array(ty, _)
+ | TyKind::Slice(ty)
+ | TyKind::Raw(_, ty)
+ | TyKind::Ref(_, _, ty) => go(ty),
+
+ TyKind::Scalar(_)
+ | TyKind::Str
+ | TyKind::Never
+ | TyKind::Placeholder(_)
+ | TyKind::BoundVar(_)
+ | TyKind::InferenceVar(_, _)
+ | TyKind::Dyn(_)
+ | TyKind::Function(_)
+ | TyKind::Alias(_)
+ | TyKind::Foreign(_)
+ | TyKind::Generator(..)
+ | TyKind::GeneratorWitness(..) => false,
+ }
+ }
+ }
+
+ pub fn fields(&self, db: &dyn HirDatabase) -> Vec<(Field, Type)> {
+ let (variant_id, substs) = match self.ty.kind(Interner) {
+ TyKind::Adt(hir_ty::AdtId(AdtId::StructId(s)), substs) => ((*s).into(), substs),
+ TyKind::Adt(hir_ty::AdtId(AdtId::UnionId(u)), substs) => ((*u).into(), substs),
+ _ => return Vec::new(),
+ };
+
+ db.field_types(variant_id)
+ .iter()
+ .map(|(local_id, ty)| {
+ let def = Field { parent: variant_id.into(), id: local_id };
+ let ty = ty.clone().substitute(Interner, substs);
+ (def, self.derived(ty))
+ })
+ .collect()
+ }
+
+ pub fn tuple_fields(&self, _db: &dyn HirDatabase) -> Vec<Type> {
+ if let TyKind::Tuple(_, substs) = &self.ty.kind(Interner) {
+ substs
+ .iter(Interner)
+ .map(|ty| self.derived(ty.assert_ty_ref(Interner).clone()))
+ .collect()
+ } else {
+ Vec::new()
+ }
+ }
+
+ pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a {
+ self.autoderef_(db).map(move |ty| self.derived(ty))
+ }
+
+ fn autoderef_<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Ty> + 'a {
+ // There should be no inference vars in types passed here
+ let canonical = hir_ty::replace_errors_with_variables(&self.ty);
+ let environment = self.env.clone();
+ autoderef(db, environment, canonical).map(|canonical| canonical.value)
+ }
+
+ // This would be nicer if it just returned an iterator, but that runs into
+ // lifetime problems, because we need to borrow temp `CrateImplDefs`.
+ pub fn iterate_assoc_items<T>(
+ &self,
+ db: &dyn HirDatabase,
+ krate: Crate,
+ mut callback: impl FnMut(AssocItem) -> Option<T>,
+ ) -> Option<T> {
+ let mut slot = None;
+ self.iterate_assoc_items_dyn(db, krate, &mut |assoc_item_id| {
+ slot = callback(assoc_item_id.into());
+ slot.is_some()
+ });
+ slot
+ }
+
+ fn iterate_assoc_items_dyn(
+ &self,
+ db: &dyn HirDatabase,
+ krate: Crate,
+ callback: &mut dyn FnMut(AssocItemId) -> bool,
+ ) {
+ let def_crates = match method_resolution::def_crates(db, &self.ty, krate.id) {
+ Some(it) => it,
+ None => return,
+ };
+ for krate in def_crates {
+ let impls = db.inherent_impls_in_crate(krate);
+
+ for impl_def in impls.for_self_ty(&self.ty) {
+ for &item in db.impl_data(*impl_def).items.iter() {
+ if callback(item) {
+ return;
+ }
+ }
+ }
+ }
+ }
+
+ pub fn type_arguments(&self) -> impl Iterator<Item = Type> + '_ {
+ self.ty
+ .strip_references()
+ .as_adt()
+ .into_iter()
+ .flat_map(|(_, substs)| substs.iter(Interner))
+ .filter_map(|arg| arg.ty(Interner).cloned())
+ .map(move |ty| self.derived(ty))
+ }
+
+ pub fn iterate_method_candidates<T>(
+ &self,
+ db: &dyn HirDatabase,
+ scope: &SemanticsScope<'_>,
+ // FIXME this can be retrieved from `scope`, except autoimport uses this
+ // to specify a different set, so the method needs to be split
+ traits_in_scope: &FxHashSet<TraitId>,
+ with_local_impls: Option<Module>,
+ name: Option<&Name>,
+ mut callback: impl FnMut(Function) -> Option<T>,
+ ) -> Option<T> {
+ let _p = profile::span("iterate_method_candidates");
+ let mut slot = None;
+
+ self.iterate_method_candidates_dyn(
+ db,
+ scope,
+ traits_in_scope,
+ with_local_impls,
+ name,
+ &mut |assoc_item_id| {
+ if let AssocItemId::FunctionId(func) = assoc_item_id {
+ if let Some(res) = callback(func.into()) {
+ slot = Some(res);
+ return ControlFlow::Break(());
+ }
+ }
+ ControlFlow::Continue(())
+ },
+ );
+ slot
+ }
+
+ fn iterate_method_candidates_dyn(
+ &self,
+ db: &dyn HirDatabase,
+ scope: &SemanticsScope<'_>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ with_local_impls: Option<Module>,
+ name: Option<&Name>,
+ callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>,
+ ) {
+ // There should be no inference vars in types passed here
+ let canonical = hir_ty::replace_errors_with_variables(&self.ty);
+
+ let krate = scope.krate();
+ let environment = scope.resolver().generic_def().map_or_else(
+ || Arc::new(TraitEnvironment::empty(krate.id)),
+ |d| db.trait_environment(d),
+ );
+
+ method_resolution::iterate_method_candidates_dyn(
+ &canonical,
+ db,
+ environment,
+ traits_in_scope,
+ with_local_impls.and_then(|b| b.id.containing_block()).into(),
+ name,
+ method_resolution::LookupMode::MethodCall,
+ &mut |_adj, id| callback(id),
+ );
+ }
+
+ pub fn iterate_path_candidates<T>(
+ &self,
+ db: &dyn HirDatabase,
+ scope: &SemanticsScope<'_>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ with_local_impls: Option<Module>,
+ name: Option<&Name>,
+ mut callback: impl FnMut(AssocItem) -> Option<T>,
+ ) -> Option<T> {
+ let _p = profile::span("iterate_path_candidates");
+ let mut slot = None;
+ self.iterate_path_candidates_dyn(
+ db,
+ scope,
+ traits_in_scope,
+ with_local_impls,
+ name,
+ &mut |assoc_item_id| {
+ if let Some(res) = callback(assoc_item_id.into()) {
+ slot = Some(res);
+ return ControlFlow::Break(());
+ }
+ ControlFlow::Continue(())
+ },
+ );
+ slot
+ }
+
+ fn iterate_path_candidates_dyn(
+ &self,
+ db: &dyn HirDatabase,
+ scope: &SemanticsScope<'_>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ with_local_impls: Option<Module>,
+ name: Option<&Name>,
+ callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>,
+ ) {
+ let canonical = hir_ty::replace_errors_with_variables(&self.ty);
+
+ let krate = scope.krate();
+ let environment = scope.resolver().generic_def().map_or_else(
+ || Arc::new(TraitEnvironment::empty(krate.id)),
+ |d| db.trait_environment(d),
+ );
+
+ method_resolution::iterate_path_candidates(
+ &canonical,
+ db,
+ environment,
+ traits_in_scope,
+ with_local_impls.and_then(|b| b.id.containing_block()).into(),
+ name,
+ &mut |id| callback(id),
+ );
+ }
+
+ pub fn as_adt(&self) -> Option<Adt> {
+ let (adt, _subst) = self.ty.as_adt()?;
+ Some(adt.into())
+ }
+
+ pub fn as_builtin(&self) -> Option<BuiltinType> {
+ self.ty.as_builtin().map(|inner| BuiltinType { inner })
+ }
+
+ pub fn as_dyn_trait(&self) -> Option<Trait> {
+ self.ty.dyn_trait().map(Into::into)
+ }
+
+ /// If a type can be represented as `dyn Trait`, returns all traits accessible via this type,
+ /// or an empty iterator otherwise.
+ pub fn applicable_inherent_traits<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ ) -> impl Iterator<Item = Trait> + 'a {
+ let _p = profile::span("applicable_inherent_traits");
+ self.autoderef_(db)
+ .filter_map(|ty| ty.dyn_trait())
+ .flat_map(move |dyn_trait_id| hir_ty::all_super_traits(db.upcast(), dyn_trait_id))
+ .map(Trait::from)
+ }
+
+ pub fn env_traits<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Trait> + 'a {
+ let _p = profile::span("env_traits");
+ self.autoderef_(db)
+ .filter(|ty| matches!(ty.kind(Interner), TyKind::Placeholder(_)))
+ .flat_map(|ty| {
+ self.env
+ .traits_in_scope_from_clauses(ty)
+ .flat_map(|t| hir_ty::all_super_traits(db.upcast(), t))
+ })
+ .map(Trait::from)
+ }
+
+ pub fn as_impl_traits(&self, db: &dyn HirDatabase) -> Option<impl Iterator<Item = Trait>> {
+ self.ty.impl_trait_bounds(db).map(|it| {
+ it.into_iter().filter_map(|pred| match pred.skip_binders() {
+ hir_ty::WhereClause::Implemented(trait_ref) => {
+ Some(Trait::from(trait_ref.hir_trait_id()))
+ }
+ _ => None,
+ })
+ })
+ }
+
+ pub fn as_associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<Trait> {
+ self.ty.associated_type_parent_trait(db).map(Into::into)
+ }
+
+ fn derived(&self, ty: Ty) -> Type {
+ Type { env: self.env.clone(), ty }
+ }
+
+ pub fn walk(&self, db: &dyn HirDatabase, mut cb: impl FnMut(Type)) {
+ // TypeWalk::walk for a Ty at first visits parameters and only after that the Ty itself.
+ // We need a different order here.
+
+ fn walk_substs(
+ db: &dyn HirDatabase,
+ type_: &Type,
+ substs: &Substitution,
+ cb: &mut impl FnMut(Type),
+ ) {
+ for ty in substs.iter(Interner).filter_map(|a| a.ty(Interner)) {
+ walk_type(db, &type_.derived(ty.clone()), cb);
+ }
+ }
+
+ fn walk_bounds(
+ db: &dyn HirDatabase,
+ type_: &Type,
+ bounds: &[QuantifiedWhereClause],
+ cb: &mut impl FnMut(Type),
+ ) {
+ for pred in bounds {
+ if let WhereClause::Implemented(trait_ref) = pred.skip_binders() {
+ cb(type_.clone());
+ // skip the self type. it's likely the type we just got the bounds from
+ for ty in
+ trait_ref.substitution.iter(Interner).skip(1).filter_map(|a| a.ty(Interner))
+ {
+ walk_type(db, &type_.derived(ty.clone()), cb);
+ }
+ }
+ }
+ }
+
+ fn walk_type(db: &dyn HirDatabase, type_: &Type, cb: &mut impl FnMut(Type)) {
+ let ty = type_.ty.strip_references();
+ match ty.kind(Interner) {
+ TyKind::Adt(_, substs) => {
+ cb(type_.derived(ty.clone()));
+ walk_substs(db, type_, substs, cb);
+ }
+ TyKind::AssociatedType(_, substs) => {
+ if ty.associated_type_parent_trait(db).is_some() {
+ cb(type_.derived(ty.clone()));
+ }
+ walk_substs(db, type_, substs, cb);
+ }
+ TyKind::OpaqueType(_, subst) => {
+ if let Some(bounds) = ty.impl_trait_bounds(db) {
+ walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb);
+ }
+
+ walk_substs(db, type_, subst, cb);
+ }
+ TyKind::Alias(AliasTy::Opaque(opaque_ty)) => {
+ if let Some(bounds) = ty.impl_trait_bounds(db) {
+ walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb);
+ }
+
+ walk_substs(db, type_, &opaque_ty.substitution, cb);
+ }
+ TyKind::Placeholder(_) => {
+ if let Some(bounds) = ty.impl_trait_bounds(db) {
+ walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb);
+ }
+ }
+ TyKind::Dyn(bounds) => {
+ walk_bounds(
+ db,
+ &type_.derived(ty.clone()),
+ bounds.bounds.skip_binders().interned(),
+ cb,
+ );
+ }
+
+ TyKind::Ref(_, _, ty)
+ | TyKind::Raw(_, ty)
+ | TyKind::Array(ty, _)
+ | TyKind::Slice(ty) => {
+ walk_type(db, &type_.derived(ty.clone()), cb);
+ }
+
+ TyKind::FnDef(_, substs)
+ | TyKind::Tuple(_, substs)
+ | TyKind::Closure(.., substs) => {
+ walk_substs(db, type_, substs, cb);
+ }
+ TyKind::Function(hir_ty::FnPointer { substitution, .. }) => {
+ walk_substs(db, type_, &substitution.0, cb);
+ }
+
+ _ => {}
+ }
+ }
+
+ walk_type(db, self, &mut cb);
+ }
+
+ pub fn could_unify_with(&self, db: &dyn HirDatabase, other: &Type) -> bool {
+ let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone()));
+ hir_ty::could_unify(db, self.env.clone(), &tys)
+ }
+
+ pub fn could_coerce_to(&self, db: &dyn HirDatabase, to: &Type) -> bool {
+ let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), to.ty.clone()));
+ hir_ty::could_coerce(db, self.env.clone(), &tys)
+ }
+
+ pub fn as_type_param(&self, db: &dyn HirDatabase) -> Option<TypeParam> {
+ match self.ty.kind(Interner) {
+ TyKind::Placeholder(p) => Some(TypeParam {
+ id: TypeParamId::from_unchecked(hir_ty::from_placeholder_idx(db, *p)),
+ }),
+ _ => None,
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct Callable {
+ ty: Type,
+ sig: CallableSig,
+ callee: Callee,
+ pub(crate) is_bound_method: bool,
+}
+
+#[derive(Debug)]
+enum Callee {
+ Def(CallableDefId),
+ Closure(ClosureId),
+ FnPtr,
+}
+
+pub enum CallableKind {
+ Function(Function),
+ TupleStruct(Struct),
+ TupleEnumVariant(Variant),
+ Closure,
+ FnPtr,
+}
+
+impl Callable {
+ pub fn kind(&self) -> CallableKind {
+ use Callee::*;
+ match self.callee {
+ Def(CallableDefId::FunctionId(it)) => CallableKind::Function(it.into()),
+ Def(CallableDefId::StructId(it)) => CallableKind::TupleStruct(it.into()),
+ Def(CallableDefId::EnumVariantId(it)) => CallableKind::TupleEnumVariant(it.into()),
+ Closure(_) => CallableKind::Closure,
+ FnPtr => CallableKind::FnPtr,
+ }
+ }
+ pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<ast::SelfParam> {
+ let func = match self.callee {
+ Callee::Def(CallableDefId::FunctionId(it)) if self.is_bound_method => it,
+ _ => return None,
+ };
+ let src = func.lookup(db.upcast()).source(db.upcast());
+ let param_list = src.value.param_list()?;
+ param_list.self_param()
+ }
+ pub fn n_params(&self) -> usize {
+ self.sig.params().len() - if self.is_bound_method { 1 } else { 0 }
+ }
+ pub fn params(
+ &self,
+ db: &dyn HirDatabase,
+ ) -> Vec<(Option<Either<ast::SelfParam, ast::Pat>>, Type)> {
+ let types = self
+ .sig
+ .params()
+ .iter()
+ .skip(if self.is_bound_method { 1 } else { 0 })
+ .map(|ty| self.ty.derived(ty.clone()));
+ let map_param = |it: ast::Param| it.pat().map(Either::Right);
+ let patterns = match self.callee {
+ Callee::Def(CallableDefId::FunctionId(func)) => {
+ let src = func.lookup(db.upcast()).source(db.upcast());
+ src.value.param_list().map(|param_list| {
+ param_list
+ .self_param()
+ .map(|it| Some(Either::Left(it)))
+ .filter(|_| !self.is_bound_method)
+ .into_iter()
+ .chain(param_list.params().map(map_param))
+ })
+ }
+ Callee::Closure(closure_id) => match closure_source(db, closure_id) {
+ Some(src) => src.param_list().map(|param_list| {
+ param_list
+ .self_param()
+ .map(|it| Some(Either::Left(it)))
+ .filter(|_| !self.is_bound_method)
+ .into_iter()
+ .chain(param_list.params().map(map_param))
+ }),
+ None => None,
+ },
+ _ => None,
+ };
+ patterns.into_iter().flatten().chain(iter::repeat(None)).zip(types).collect()
+ }
+ pub fn return_type(&self) -> Type {
+ self.ty.derived(self.sig.ret().clone())
+ }
+}
+
+fn closure_source(db: &dyn HirDatabase, closure: ClosureId) -> Option<ast::ClosureExpr> {
+ let (owner, expr_id) = db.lookup_intern_closure(closure.into());
+ let (_, source_map) = db.body_with_source_map(owner);
+ let ast = source_map.expr_syntax(expr_id).ok()?;
+ let root = ast.file_syntax(db.upcast());
+ let expr = ast.value.to_node(&root);
+ match expr {
+ ast::Expr::ClosureExpr(it) => Some(it),
+ _ => None,
+ }
+}
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+pub enum BindingMode {
+ Move,
+ Ref(Mutability),
+}
+
+/// For IDE only
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum ScopeDef {
+ ModuleDef(ModuleDef),
+ GenericParam(GenericParam),
+ ImplSelfType(Impl),
+ AdtSelfType(Adt),
+ Local(Local),
+ Label(Label),
+ Unknown,
+}
+
+impl ScopeDef {
+ pub fn all_items(def: PerNs) -> ArrayVec<Self, 3> {
+ let mut items = ArrayVec::new();
+
+ match (def.take_types(), def.take_values()) {
+ (Some(m1), None) => items.push(ScopeDef::ModuleDef(m1.into())),
+ (None, Some(m2)) => items.push(ScopeDef::ModuleDef(m2.into())),
+ (Some(m1), Some(m2)) => {
+ // Some items, like unit structs and enum variants, are
+ // returned as both a type and a value. Here we want
+ // to de-duplicate them.
+ if m1 != m2 {
+ items.push(ScopeDef::ModuleDef(m1.into()));
+ items.push(ScopeDef::ModuleDef(m2.into()));
+ } else {
+ items.push(ScopeDef::ModuleDef(m1.into()));
+ }
+ }
+ (None, None) => {}
+ };
+
+ if let Some(macro_def_id) = def.take_macros() {
+ items.push(ScopeDef::ModuleDef(ModuleDef::Macro(macro_def_id.into())));
+ }
+
+ if items.is_empty() {
+ items.push(ScopeDef::Unknown);
+ }
+
+ items
+ }
+
+ pub fn attrs(&self, db: &dyn HirDatabase) -> Option<AttrsWithOwner> {
+ match self {
+ ScopeDef::ModuleDef(it) => it.attrs(db),
+ ScopeDef::GenericParam(it) => Some(it.attrs(db)),
+ ScopeDef::ImplSelfType(_)
+ | ScopeDef::AdtSelfType(_)
+ | ScopeDef::Local(_)
+ | ScopeDef::Label(_)
+ | ScopeDef::Unknown => None,
+ }
+ }
+
+ pub fn krate(&self, db: &dyn HirDatabase) -> Option<Crate> {
+ match self {
+ ScopeDef::ModuleDef(it) => it.module(db).map(|m| m.krate()),
+ ScopeDef::GenericParam(it) => Some(it.module(db).krate()),
+ ScopeDef::ImplSelfType(_) => None,
+ ScopeDef::AdtSelfType(it) => Some(it.module(db).krate()),
+ ScopeDef::Local(it) => Some(it.module(db).krate()),
+ ScopeDef::Label(it) => Some(it.module(db).krate()),
+ ScopeDef::Unknown => None,
+ }
+ }
+}
+
+impl From<ItemInNs> for ScopeDef {
+ fn from(item: ItemInNs) -> Self {
+ match item {
+ ItemInNs::Types(id) => ScopeDef::ModuleDef(id),
+ ItemInNs::Values(id) => ScopeDef::ModuleDef(id),
+ ItemInNs::Macros(id) => ScopeDef::ModuleDef(ModuleDef::Macro(id)),
+ }
+ }
+}
+
+pub trait HasVisibility {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility;
+ fn is_visible_from(&self, db: &dyn HirDatabase, module: Module) -> bool {
+ let vis = self.visibility(db);
+ vis.is_visible_from(db.upcast(), module.id)
+ }
+}
+
+/// Trait for obtaining the defining crate of an item.
+pub trait HasCrate {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate;
+}
+
+impl<T: hir_def::HasModule> HasCrate for T {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db.upcast()).krate().into()
+ }
+}
+
+impl HasCrate for AssocItem {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Struct {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Union {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Field {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.parent_def(db).module(db).krate()
+ }
+}
+
+impl HasCrate for Variant {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Function {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Const {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for TypeAlias {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Type {
+ fn krate(&self, _db: &dyn HirDatabase) -> Crate {
+ self.env.krate.into()
+ }
+}
+
+impl HasCrate for Macro {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Trait {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Static {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Adt {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Module {
+ fn krate(&self, _: &dyn HirDatabase) -> Crate {
+ Module::krate(*self)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
new file mode 100644
index 000000000..c84318b2f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
@@ -0,0 +1,1540 @@
+//! See `Semantics`.
+
+mod source_to_def;
+
+use std::{cell::RefCell, fmt, iter, ops};
+
+use base_db::{FileId, FileRange};
+use hir_def::{
+ body, macro_id_to_def_id,
+ resolver::{self, HasResolver, Resolver, TypeNs},
+ type_ref::Mutability,
+ AsMacroCall, FunctionId, MacroId, TraitId, VariantId,
+};
+use hir_expand::{
+ db::AstDatabase,
+ name::{known, AsName},
+ ExpansionInfo, MacroCallId,
+};
+use itertools::Itertools;
+use rustc_hash::{FxHashMap, FxHashSet};
+use smallvec::{smallvec, SmallVec};
+use syntax::{
+ algo::skip_trivia_token,
+ ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody},
+ match_ast, AstNode, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
+};
+
+use crate::{
+ db::HirDatabase,
+ semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
+ source_analyzer::{resolve_hir_path, SourceAnalyzer},
+ Access, BindingMode, BuiltinAttr, Callable, ConstParam, Crate, DeriveHelper, Field, Function,
+ HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local, Macro, Module, ModuleDef,
+ Name, Path, ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum PathResolution {
+ /// An item
+ Def(ModuleDef),
+ /// A local binding (only value namespace)
+ Local(Local),
+ /// A type parameter
+ TypeParam(TypeParam),
+ /// A const parameter
+ ConstParam(ConstParam),
+ SelfType(Impl),
+ BuiltinAttr(BuiltinAttr),
+ ToolModule(ToolModule),
+ DeriveHelper(DeriveHelper),
+}
+
+impl PathResolution {
+ pub(crate) fn in_type_ns(&self) -> Option<TypeNs> {
+ match self {
+ PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())),
+ PathResolution::Def(ModuleDef::BuiltinType(builtin)) => {
+ Some(TypeNs::BuiltinType((*builtin).into()))
+ }
+ PathResolution::Def(
+ ModuleDef::Const(_)
+ | ModuleDef::Variant(_)
+ | ModuleDef::Macro(_)
+ | ModuleDef::Function(_)
+ | ModuleDef::Module(_)
+ | ModuleDef::Static(_)
+ | ModuleDef::Trait(_),
+ ) => None,
+ PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
+ Some(TypeNs::TypeAliasId((*alias).into()))
+ }
+ PathResolution::BuiltinAttr(_)
+ | PathResolution::ToolModule(_)
+ | PathResolution::Local(_)
+ | PathResolution::DeriveHelper(_)
+ | PathResolution::ConstParam(_) => None,
+ PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
+ PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())),
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct TypeInfo {
+ /// The original type of the expression or pattern.
+ pub original: Type,
+ /// The adjusted type, if an adjustment happened.
+ pub adjusted: Option<Type>,
+}
+
+impl TypeInfo {
+ pub fn original(self) -> Type {
+ self.original
+ }
+
+ pub fn has_adjustment(&self) -> bool {
+ self.adjusted.is_some()
+ }
+
+ /// The adjusted type, or the original in case no adjustments occurred.
+ pub fn adjusted(self) -> Type {
+ self.adjusted.unwrap_or(self.original)
+ }
+}
+
+/// Primary API to get semantic information, like types, from syntax trees.
+pub struct Semantics<'db, DB> {
+ pub db: &'db DB,
+ imp: SemanticsImpl<'db>,
+}
+
+pub struct SemanticsImpl<'db> {
+ pub db: &'db dyn HirDatabase,
+ s2d_cache: RefCell<SourceToDefCache>,
+ expansion_info_cache: RefCell<FxHashMap<HirFileId, Option<ExpansionInfo>>>,
+ // Rootnode to HirFileId cache
+ cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
+ // MacroCall to its expansion's HirFileId cache
+ macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, HirFileId>>,
+}
+
+impl<DB> fmt::Debug for Semantics<'_, DB> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "Semantics {{ ... }}")
+ }
+}
+
+impl<'db, DB: HirDatabase> Semantics<'db, DB> {
+ pub fn new(db: &DB) -> Semantics<'_, DB> {
+ let impl_ = SemanticsImpl::new(db);
+ Semantics { db, imp: impl_ }
+ }
+
+ pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
+ self.imp.parse(file_id)
+ }
+
+ pub fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode> {
+ self.imp.parse_or_expand(file_id)
+ }
+
+ pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
+ self.imp.expand(macro_call)
+ }
+
+ /// If `item` has an attribute macro attached to it, expands it.
+ pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
+ self.imp.expand_attr_macro(item)
+ }
+
+ pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
+ self.imp.expand_derive_as_pseudo_attr_macro(attr)
+ }
+
+ pub fn resolve_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<Option<Macro>>> {
+ self.imp.resolve_derive_macro(derive)
+ }
+
+ pub fn expand_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<SyntaxNode>> {
+ self.imp.expand_derive_macro(derive)
+ }
+
+ pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
+ self.imp.is_attr_macro_call(item)
+ }
+
+ pub fn is_derive_annotated(&self, item: &ast::Adt) -> bool {
+ self.imp.is_derive_annotated(item)
+ }
+
+ pub fn speculative_expand(
+ &self,
+ actual_macro_call: &ast::MacroCall,
+ speculative_args: &ast::TokenTree,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map)
+ }
+
+ pub fn speculative_expand_attr_macro(
+ &self,
+ actual_macro_call: &ast::Item,
+ speculative_args: &ast::Item,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map)
+ }
+
+ pub fn speculative_expand_derive_as_pseudo_attr_macro(
+ &self,
+ actual_macro_call: &ast::Attr,
+ speculative_args: &ast::Attr,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ self.imp.speculative_expand_derive_as_pseudo_attr_macro(
+ actual_macro_call,
+ speculative_args,
+ token_to_map,
+ )
+ }
+
+ /// Descend the token into macrocalls to its first mapped counterpart.
+ pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
+ self.imp.descend_into_macros_single(token)
+ }
+
+ /// Descend the token into macrocalls to all its mapped counterparts.
+ pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+ self.imp.descend_into_macros(token)
+ }
+
+ /// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
+ ///
+ /// Returns the original non descended token if none of the mapped counterparts have the same text.
+ pub fn descend_into_macros_with_same_text(
+ &self,
+ token: SyntaxToken,
+ ) -> SmallVec<[SyntaxToken; 1]> {
+ self.imp.descend_into_macros_with_same_text(token)
+ }
+
+ pub fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken {
+ self.imp.descend_into_macros_with_kind_preference(token)
+ }
+
+ /// Maps a node down by mapping its first and last token down.
+ pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
+ self.imp.descend_node_into_attributes(node)
+ }
+
+ /// Search for a definition's source and cache its syntax tree
+ pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
+ where
+ Def::Ast: AstNode,
+ {
+ self.imp.source(def)
+ }
+
+ pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId {
+ self.imp.find_file(syntax_node).file_id
+ }
+
+ /// Attempts to map the node out of macro expanded files returning the original file range.
+ /// If upmapping is not possible, this will fall back to the range of the macro call of the
+ /// macro file the node resides in.
+ pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
+ self.imp.original_range(node)
+ }
+
+ /// Attempts to map the node out of macro expanded files returning the original file range.
+ pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
+ self.imp.original_range_opt(node)
+ }
+
+ /// Attempts to map the node out of macro expanded files.
+ /// This only work for attribute expansions, as other ones do not have nodes as input.
+ pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
+ self.imp.original_ast_node(node)
+ }
+
+ pub fn diagnostics_display_range(&self, diagnostics: InFile<SyntaxNodePtr>) -> FileRange {
+ self.imp.diagnostics_display_range(diagnostics)
+ }
+
+ pub fn token_ancestors_with_macros(
+ &self,
+ token: SyntaxToken,
+ ) -> impl Iterator<Item = SyntaxNode> + '_ {
+ token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it))
+ }
+
+ /// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
+ pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
+ self.imp.ancestors_with_macros(node)
+ }
+
+ pub fn ancestors_at_offset_with_macros(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = SyntaxNode> + '_ {
+ self.imp.ancestors_at_offset_with_macros(node, offset)
+ }
+
+ /// Find an AstNode by offset inside SyntaxNode, if it is inside *Macrofile*,
+ /// search up until it is of the target AstNode type
+ pub fn find_node_at_offset_with_macros<N: AstNode>(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<N> {
+ self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
+ }
+
+ /// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
+ /// descend it and find again
+ pub fn find_node_at_offset_with_descend<N: AstNode>(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<N> {
+ self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast)
+ }
+
+ /// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
+ /// descend it and find again
+ pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>(
+ &'slf self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = N> + 'slf {
+ self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
+ }
+
+ pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
+ self.imp.resolve_lifetime_param(lifetime)
+ }
+
+ pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
+ self.imp.resolve_label(lifetime)
+ }
+
+ pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
+ self.imp.resolve_type(ty)
+ }
+
+ pub fn resolve_trait(&self, trait_: &ast::Path) -> Option<Trait> {
+ self.imp.resolve_trait(trait_)
+ }
+
+ // FIXME: Figure out a nice interface to inspect adjustments
+ pub fn is_implicit_reborrow(&self, expr: &ast::Expr) -> Option<Mutability> {
+ self.imp.is_implicit_reborrow(expr)
+ }
+
+ pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
+ self.imp.type_of_expr(expr)
+ }
+
+ pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
+ self.imp.type_of_pat(pat)
+ }
+
+ pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
+ self.imp.type_of_self(param)
+ }
+
+ pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
+ self.imp.pattern_adjustments(pat)
+ }
+
+ pub fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
+ self.imp.binding_mode_of_pat(pat)
+ }
+
+ pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
+ self.imp.resolve_method_call(call).map(Function::from)
+ }
+
+ pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
+ self.imp.resolve_method_call_as_callable(call)
+ }
+
+ pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
+ self.imp.resolve_field(field)
+ }
+
+ pub fn resolve_record_field(
+ &self,
+ field: &ast::RecordExprField,
+ ) -> Option<(Field, Option<Local>, Type)> {
+ self.imp.resolve_record_field(field)
+ }
+
+ pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field> {
+ self.imp.resolve_record_pat_field(field)
+ }
+
+ pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
+ self.imp.resolve_macro_call(macro_call)
+ }
+
+ pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
+ self.imp.is_unsafe_macro_call(macro_call)
+ }
+
+ pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
+ self.imp.resolve_attr_macro_call(item)
+ }
+
+ pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
+ self.imp.resolve_path(path)
+ }
+
+ pub fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
+ self.imp.resolve_extern_crate(extern_crate)
+ }
+
+ pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> {
+ self.imp.resolve_variant(record_lit).map(VariantDef::from)
+ }
+
+ pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
+ self.imp.resolve_bind_pat_to_const(pat)
+ }
+
+ pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
+ self.imp.record_literal_missing_fields(literal)
+ }
+
+ pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
+ self.imp.record_pattern_missing_fields(pattern)
+ }
+
+ pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
+ let src = self.imp.find_file(src.syntax()).with_value(src).cloned();
+ T::to_def(&self.imp, src)
+ }
+
+ pub fn to_module_def(&self, file: FileId) -> Option<Module> {
+ self.imp.to_module_def(file).next()
+ }
+
+ pub fn to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
+ self.imp.to_module_def(file)
+ }
+
+ pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
+ self.imp.scope(node)
+ }
+
+ pub fn scope_at_offset(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<SemanticsScope<'db>> {
+ self.imp.scope_at_offset(node, offset)
+ }
+
+ pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
+ self.imp.scope_for_def(def)
+ }
+
+ pub fn assert_contains_node(&self, node: &SyntaxNode) {
+ self.imp.assert_contains_node(node)
+ }
+
+ pub fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool {
+ self.imp.is_unsafe_method_call(method_call_expr)
+ }
+
+ pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
+ self.imp.is_unsafe_ref_expr(ref_expr)
+ }
+
+ pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
+ self.imp.is_unsafe_ident_pat(ident_pat)
+ }
+}
+
+impl<'db> SemanticsImpl<'db> {
+ fn new(db: &'db dyn HirDatabase) -> Self {
+ SemanticsImpl {
+ db,
+ s2d_cache: Default::default(),
+ cache: Default::default(),
+ expansion_info_cache: Default::default(),
+ macro_call_cache: Default::default(),
+ }
+ }
+
+ fn parse(&self, file_id: FileId) -> ast::SourceFile {
+ let tree = self.db.parse(file_id).tree();
+ self.cache(tree.syntax().clone(), file_id.into());
+ tree
+ }
+
+ fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode> {
+ let node = self.db.parse_or_expand(file_id)?;
+ self.cache(node.clone(), file_id);
+ Some(node)
+ }
+
+ fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
+ let sa = self.analyze_no_infer(macro_call.syntax())?;
+ let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
+ let node = self.parse_or_expand(file_id)?;
+ Some(node)
+ }
+
+ fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
+ let src = self.wrap_node_infile(item.clone());
+ let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?;
+ self.parse_or_expand(macro_call_id.as_file())
+ }
+
+ fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
+ let src = self.wrap_node_infile(attr.clone());
+ let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
+ let call_id = self.with_ctx(|ctx| {
+ ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
+ })?;
+ self.parse_or_expand(call_id.as_file())
+ }
+
+ fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
+ let calls = self.derive_macro_calls(attr)?;
+ self.with_ctx(|ctx| {
+ Some(
+ calls
+ .into_iter()
+ .map(|call| {
+ macro_call_to_macro_id(ctx, self.db.upcast(), call?).map(|id| Macro { id })
+ })
+ .collect(),
+ )
+ })
+ }
+
+ fn expand_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<SyntaxNode>> {
+ let res: Vec<_> = self
+ .derive_macro_calls(attr)?
+ .into_iter()
+ .flat_map(|call| {
+ let file_id = call?.as_file();
+ let node = self.db.parse_or_expand(file_id)?;
+ self.cache(node.clone(), file_id);
+ Some(node)
+ })
+ .collect();
+ Some(res)
+ }
+
+ fn derive_macro_calls(&self, attr: &ast::Attr) -> Option<Vec<Option<MacroCallId>>> {
+ let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
+ let file_id = self.find_file(adt.syntax()).file_id;
+ let adt = InFile::new(file_id, &adt);
+ let src = InFile::new(file_id, attr.clone());
+ self.with_ctx(|ctx| {
+ let (.., res) = ctx.attr_to_derive_macro_call(adt, src)?;
+ Some(res.to_vec())
+ })
+ }
+
+ fn is_derive_annotated(&self, adt: &ast::Adt) -> bool {
+ let file_id = self.find_file(adt.syntax()).file_id;
+ let adt = InFile::new(file_id, adt);
+ self.with_ctx(|ctx| ctx.has_derives(adt))
+ }
+
+ fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
+ let file_id = self.find_file(item.syntax()).file_id;
+ let src = InFile::new(file_id, item.clone());
+ self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some())
+ }
+
+ fn speculative_expand(
+ &self,
+ actual_macro_call: &ast::MacroCall,
+ speculative_args: &ast::TokenTree,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ let SourceAnalyzer { file_id, resolver, .. } =
+ self.analyze_no_infer(actual_macro_call.syntax())?;
+ let macro_call = InFile::new(file_id, actual_macro_call);
+ let krate = resolver.krate();
+ let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
+ resolver
+ .resolve_path_as_macro(self.db.upcast(), &path)
+ .map(|it| macro_id_to_def_id(self.db.upcast(), it))
+ })?;
+ hir_expand::db::expand_speculative(
+ self.db.upcast(),
+ macro_call_id,
+ speculative_args.syntax(),
+ token_to_map,
+ )
+ }
+
+ fn speculative_expand_attr(
+ &self,
+ actual_macro_call: &ast::Item,
+ speculative_args: &ast::Item,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ let macro_call = self.wrap_node_infile(actual_macro_call.clone());
+ let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call))?;
+ hir_expand::db::expand_speculative(
+ self.db.upcast(),
+ macro_call_id,
+ speculative_args.syntax(),
+ token_to_map,
+ )
+ }
+
+ fn speculative_expand_derive_as_pseudo_attr_macro(
+ &self,
+ actual_macro_call: &ast::Attr,
+ speculative_args: &ast::Attr,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ let attr = self.wrap_node_infile(actual_macro_call.clone());
+ let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?;
+ let macro_call_id = self.with_ctx(|ctx| {
+ ctx.attr_to_derive_macro_call(attr.with_value(&adt), attr).map(|(_, it, _)| it)
+ })?;
+ hir_expand::db::expand_speculative(
+ self.db.upcast(),
+ macro_call_id,
+ speculative_args.syntax(),
+ token_to_map,
+ )
+ }
+
+ // This might not be the correct way to do this, but it works for now
+ fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
+ let mut res = smallvec![];
+ let tokens = (|| {
+ let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?;
+ let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?;
+ Some((first, last))
+ })();
+ let (first, last) = match tokens {
+ Some(it) => it,
+ None => return res,
+ };
+
+ if first == last {
+ self.descend_into_macros_impl(first, &mut |InFile { value, .. }| {
+ if let Some(node) = value.parent_ancestors().find_map(N::cast) {
+ res.push(node)
+ }
+ false
+ });
+ } else {
+ // Descend first and last token, then zip them to look for the node they belong to
+ let mut scratch: SmallVec<[_; 1]> = smallvec![];
+ self.descend_into_macros_impl(first, &mut |token| {
+ scratch.push(token);
+ false
+ });
+
+ let mut scratch = scratch.into_iter();
+ self.descend_into_macros_impl(
+ last,
+ &mut |InFile { value: last, file_id: last_fid }| {
+ if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
+ if first_fid == last_fid {
+ if let Some(p) = first.parent() {
+ let range = first.text_range().cover(last.text_range());
+ let node = find_root(&p)
+ .covering_element(range)
+ .ancestors()
+ .take_while(|it| it.text_range() == range)
+ .find_map(N::cast);
+ if let Some(node) = node {
+ res.push(node);
+ }
+ }
+ }
+ }
+ false
+ },
+ );
+ }
+ res
+ }
+
+ fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+ let mut res = smallvec![];
+ self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
+ res.push(value);
+ false
+ });
+ res
+ }
+
+ fn descend_into_macros_with_same_text(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+ let text = token.text();
+ let mut res = smallvec![];
+ self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
+ if value.text() == text {
+ res.push(value);
+ }
+ false
+ });
+ if res.is_empty() {
+ res.push(token);
+ }
+ res
+ }
+
+ fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken {
+ let fetch_kind = |token: &SyntaxToken| match token.parent() {
+ Some(node) => match node.kind() {
+ kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => {
+ node.parent().map_or(kind, |it| it.kind())
+ }
+ _ => token.kind(),
+ },
+ None => token.kind(),
+ };
+ let preferred_kind = fetch_kind(&token);
+ let mut res = None;
+ self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
+ if fetch_kind(&value) == preferred_kind {
+ res = Some(value);
+ true
+ } else {
+ if let None = res {
+ res = Some(value)
+ }
+ false
+ }
+ });
+ res.unwrap_or(token)
+ }
+
+ fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
+ let mut res = token.clone();
+ self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
+ res = value;
+ true
+ });
+ res
+ }
+
+ fn descend_into_macros_impl(
+ &self,
+ token: SyntaxToken,
+ f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool,
+ ) {
+ let _p = profile::span("descend_into_macros");
+ let parent = match token.parent() {
+ Some(it) => it,
+ None => return,
+ };
+ let sa = match self.analyze_no_infer(&parent) {
+ Some(it) => it,
+ None => return,
+ };
+ let def_map = sa.resolver.def_map();
+
+ let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
+ let mut cache = self.expansion_info_cache.borrow_mut();
+ let mut mcache = self.macro_call_cache.borrow_mut();
+
+ let mut process_expansion_for_token =
+ |stack: &mut SmallVec<_>, macro_file, item, token: InFile<&_>| {
+ let expansion_info = cache
+ .entry(macro_file)
+ .or_insert_with(|| macro_file.expansion_info(self.db.upcast()))
+ .as_ref()?;
+
+ {
+ let InFile { file_id, value } = expansion_info.expanded();
+ self.cache(value, file_id);
+ }
+
+ let mapped_tokens = expansion_info.map_token_down(self.db.upcast(), item, token)?;
+ let len = stack.len();
+
+ // requeue the tokens we got from mapping our current token down
+ stack.extend(mapped_tokens);
+ // if the length changed we have found a mapping for the token
+ (stack.len() != len).then(|| ())
+ };
+
+ // Remap the next token in the queue into a macro call its in, if it is not being remapped
+ // either due to not being in a macro-call or because its unused push it into the result vec,
+ // otherwise push the remapped tokens back into the queue as they can potentially be remapped again.
+ while let Some(token) = stack.pop() {
+ self.db.unwind_if_cancelled();
+ let was_not_remapped = (|| {
+ // First expand into attribute invocations
+ let containing_attribute_macro_call = self.with_ctx(|ctx| {
+ token.value.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
+ if item.attrs().next().is_none() {
+ // Don't force populate the dyn cache for items that don't have an attribute anyways
+ return None;
+ }
+ Some((ctx.item_to_macro_call(token.with_value(item.clone()))?, item))
+ })
+ });
+ if let Some((call_id, item)) = containing_attribute_macro_call {
+ let file_id = call_id.as_file();
+ return process_expansion_for_token(
+ &mut stack,
+ file_id,
+ Some(item),
+ token.as_ref(),
+ );
+ }
+
+ // Then check for token trees, that means we are either in a function-like macro or
+ // secondary attribute inputs
+ let tt = token.value.parent_ancestors().map_while(ast::TokenTree::cast).last()?;
+ let parent = tt.syntax().parent()?;
+
+ if tt.left_delimiter_token().map_or(false, |it| it == token.value) {
+ return None;
+ }
+ if tt.right_delimiter_token().map_or(false, |it| it == token.value) {
+ return None;
+ }
+
+ if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
+ let mcall = token.with_value(macro_call);
+ let file_id = match mcache.get(&mcall) {
+ Some(&it) => it,
+ None => {
+ let it = sa.expand(self.db, mcall.as_ref())?;
+ mcache.insert(mcall, it);
+ it
+ }
+ };
+ process_expansion_for_token(&mut stack, file_id, None, token.as_ref())
+ } else if let Some(meta) = ast::Meta::cast(parent.clone()) {
+ // attribute we failed expansion for earlier, this might be a derive invocation
+ // or derive helper attribute
+ let attr = meta.parent_attr()?;
+
+ let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast) {
+ // this might be a derive, or a derive helper on an ADT
+ let derive_call = self.with_ctx(|ctx| {
+ // so try downmapping the token into the pseudo derive expansion
+ // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
+ ctx.attr_to_derive_macro_call(
+ token.with_value(&adt),
+ token.with_value(attr.clone()),
+ )
+ .map(|(_, call_id, _)| call_id)
+ });
+
+ match derive_call {
+ Some(call_id) => {
+ // resolved to a derive
+ let file_id = call_id.as_file();
+ return process_expansion_for_token(
+ &mut stack,
+ file_id,
+ Some(adt.into()),
+ token.as_ref(),
+ );
+ }
+ None => Some(adt),
+ }
+ } else {
+ // Otherwise this could be a derive helper on a variant or field
+ if let Some(field) = attr.syntax().parent().and_then(ast::RecordField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(field) =
+ attr.syntax().parent().and_then(ast::TupleField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(variant) =
+ attr.syntax().parent().and_then(ast::Variant::cast)
+ {
+ variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
+ } else {
+ None
+ }
+ }?;
+ if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(token.file_id, &adt))) {
+ return None;
+ }
+ // Not an attribute, nor a derive, so it's either a builtin or a derive helper
+ // Try to resolve to a derive helper and downmap
+ let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
+ let id = self.db.ast_id_map(token.file_id).ast_id(&adt);
+ let helpers =
+ def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?;
+ let item = Some(adt.into());
+ let mut res = None;
+ for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) {
+ res = res.or(process_expansion_for_token(
+ &mut stack,
+ derive.as_file(),
+ item.clone(),
+ token.as_ref(),
+ ));
+ }
+ res
+ } else {
+ None
+ }
+ })()
+ .is_none();
+
+ if was_not_remapped && f(token) {
+ break;
+ }
+ }
+ }
+
+ // Note this return type is deliberate as [`find_nodes_at_offset_with_descend`] wants to stop
+ // traversing the inner iterator when it finds a node.
+ // The outer iterator is over the tokens descendants
+ // The inner iterator is the ancestors of a descendant
+ fn descend_node_at_offset(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
+ node.token_at_offset(offset)
+ .map(move |token| self.descend_into_macros(token))
+ .map(|descendants| {
+ descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
+ })
+ // re-order the tokens from token_at_offset by returning the ancestors with the smaller first nodes first
+ // See algo::ancestors_at_offset, which uses the same approach
+ .kmerge_by(|left, right| {
+ left.clone()
+ .map(|node| node.text_range().len())
+ .lt(right.clone().map(|node| node.text_range().len()))
+ })
+ }
+
+ fn original_range(&self, node: &SyntaxNode) -> FileRange {
+ let node = self.find_file(node);
+ node.original_file_range(self.db.upcast())
+ }
+
+ fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
+ let node = self.find_file(node);
+ node.original_file_range_opt(self.db.upcast())
+ }
+
+ fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
+ self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map(
+ |InFile { file_id, value }| {
+ self.cache(find_root(value.syntax()), file_id);
+ value
+ },
+ )
+ }
+
+ fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
+ let root = self.parse_or_expand(src.file_id).unwrap();
+ let node = src.map(|it| it.to_node(&root));
+ node.as_ref().original_file_range(self.db.upcast())
+ }
+
+ fn token_ancestors_with_macros(
+ &self,
+ token: SyntaxToken,
+ ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
+ token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent))
+ }
+
+ fn ancestors_with_macros(
+ &self,
+ node: SyntaxNode,
+ ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
+ let node = self.find_file(&node);
+ let db = self.db.upcast();
+ iter::successors(Some(node.cloned()), move |&InFile { file_id, ref value }| {
+ match value.parent() {
+ Some(parent) => Some(InFile::new(file_id, parent)),
+ None => {
+ self.cache(value.clone(), file_id);
+ file_id.call_node(db)
+ }
+ }
+ })
+ .map(|it| it.value)
+ }
+
+ fn ancestors_at_offset_with_macros(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = SyntaxNode> + '_ {
+ node.token_at_offset(offset)
+ .map(|token| self.token_ancestors_with_macros(token))
+ .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
+ }
+
+ fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
+ let text = lifetime.text();
+ let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| {
+ let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?;
+ gpl.lifetime_params()
+ .find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text))
+ })?;
+ let src = self.wrap_node_infile(lifetime_param);
+ ToDef::to_def(self, src)
+ }
+
+ fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
+ let text = lifetime.text();
+ let label = lifetime.syntax().ancestors().find_map(|syn| {
+ let label = match_ast! {
+ match syn {
+ ast::ForExpr(it) => it.label(),
+ ast::WhileExpr(it) => it.label(),
+ ast::LoopExpr(it) => it.label(),
+ ast::BlockExpr(it) => it.label(),
+ _ => None,
+ }
+ };
+ label.filter(|l| {
+ l.lifetime()
+ .and_then(|lt| lt.lifetime_ident_token())
+ .map_or(false, |lt| lt.text() == text)
+ })
+ })?;
+ let src = self.wrap_node_infile(label);
+ ToDef::to_def(self, src)
+ }
+
+ fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
+ let analyze = self.analyze(ty.syntax())?;
+ let ctx = body::LowerCtx::new(self.db.upcast(), analyze.file_id);
+ let ty = hir_ty::TyLoweringContext::new(self.db, &analyze.resolver)
+ .lower_ty(&crate::TypeRef::from_ast(&ctx, ty.clone()));
+ Some(Type::new_with_resolver(self.db, &analyze.resolver, ty))
+ }
+
+ fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
+ let analyze = self.analyze(path.syntax())?;
+ let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id);
+ let ctx = body::LowerCtx::with_hygiene(self.db.upcast(), &hygiene);
+ let hir_path = Path::from_src(path.clone(), &ctx)?;
+ match analyze
+ .resolver
+ .resolve_path_in_type_ns_fully(self.db.upcast(), hir_path.mod_path())?
+ {
+ TypeNs::TraitId(id) => Some(Trait { id }),
+ _ => None,
+ }
+ }
+
+ fn is_implicit_reborrow(&self, expr: &ast::Expr) -> Option<Mutability> {
+ self.analyze(expr.syntax())?.is_implicit_reborrow(self.db, expr)
+ }
+
+ fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
+ self.analyze(expr.syntax())?
+ .type_of_expr(self.db, expr)
+ .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
+ }
+
+ fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
+ self.analyze(pat.syntax())?
+ .type_of_pat(self.db, pat)
+ .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
+ }
+
+ fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
+ self.analyze(param.syntax())?.type_of_self(self.db, param)
+ }
+
+ fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
+ self.analyze(pat.syntax())
+ .and_then(|it| it.pattern_adjustments(self.db, pat))
+ .unwrap_or_default()
+ }
+
+ fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
+ self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
+ }
+
+ fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<FunctionId> {
+ self.analyze(call.syntax())?.resolve_method_call(self.db, call)
+ }
+
+ fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
+ self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
+ }
+
+ fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
+ self.analyze(field.syntax())?.resolve_field(self.db, field)
+ }
+
+ fn resolve_record_field(
+ &self,
+ field: &ast::RecordExprField,
+ ) -> Option<(Field, Option<Local>, Type)> {
+ self.analyze(field.syntax())?.resolve_record_field(self.db, field)
+ }
+
+ fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field> {
+ self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
+ }
+
+ fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
+ let sa = self.analyze(macro_call.syntax())?;
+ let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
+ sa.resolve_macro_call(self.db, macro_call)
+ }
+
+ fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
+ let sa = match self.analyze(macro_call.syntax()) {
+ Some(it) => it,
+ None => return false,
+ };
+ let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
+ sa.is_unsafe_macro_call(self.db, macro_call)
+ }
+
+ fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
+ let item_in_file = self.wrap_node_infile(item.clone());
+ let id = self.with_ctx(|ctx| {
+ let macro_call_id = ctx.item_to_macro_call(item_in_file)?;
+ macro_call_to_macro_id(ctx, self.db.upcast(), macro_call_id)
+ })?;
+ Some(Macro { id })
+ }
+
+ fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
+ self.analyze(path.syntax())?.resolve_path(self.db, path)
+ }
+
+ fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
+ let krate = self.scope(extern_crate.syntax())?.krate();
+ let name = extern_crate.name_ref()?.as_name();
+ if name == known::SELF_PARAM {
+ return Some(krate);
+ }
+ krate
+ .dependencies(self.db)
+ .into_iter()
+ .find_map(|dep| (dep.name == name).then(|| dep.krate))
+ }
+
+ fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
+ self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit)
+ }
+
+ fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
+ self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat)
+ }
+
+ fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
+ self.analyze(literal.syntax())
+ .and_then(|it| it.record_literal_missing_fields(self.db, literal))
+ .unwrap_or_default()
+ }
+
+ fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
+ self.analyze(pattern.syntax())
+ .and_then(|it| it.record_pattern_missing_fields(self.db, pattern))
+ .unwrap_or_default()
+ }
+
+ fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
+ let mut cache = self.s2d_cache.borrow_mut();
+ let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache };
+ f(&mut ctx)
+ }
+
+ fn to_module_def(&self, file: FileId) -> impl Iterator<Item = Module> {
+ self.with_ctx(|ctx| ctx.file_to_def(file)).into_iter().map(Module::from)
+ }
+
+ fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
+ self.analyze_no_infer(node).map(|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
+ db: self.db,
+ file_id,
+ resolver,
+ })
+ }
+
+ fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> Option<SemanticsScope<'db>> {
+ self.analyze_with_offset_no_infer(node, offset).map(
+ |SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
+ db: self.db,
+ file_id,
+ resolver,
+ },
+ )
+ }
+
+ fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
+ let file_id = self.db.lookup_intern_trait(def.id).id.file_id();
+ let resolver = def.id.resolver(self.db.upcast());
+ SemanticsScope { db: self.db, file_id, resolver }
+ }
+
+ fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
+ where
+ Def::Ast: AstNode,
+ {
+ let res = def.source(self.db)?;
+ self.cache(find_root(res.value.syntax()), res.file_id);
+ Some(res)
+ }
+
+ /// Returns none if the file of the node is not part of a crate.
+ fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
+ self.analyze_impl(node, None, true)
+ }
+
+ /// Returns none if the file of the node is not part of a crate.
+ fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
+ self.analyze_impl(node, None, false)
+ }
+
+ fn analyze_with_offset_no_infer(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<SourceAnalyzer> {
+ self.analyze_impl(node, Some(offset), false)
+ }
+
+ fn analyze_impl(
+ &self,
+ node: &SyntaxNode,
+ offset: Option<TextSize>,
+ infer_body: bool,
+ ) -> Option<SourceAnalyzer> {
+ let _p = profile::span("Semantics::analyze_impl");
+ let node = self.find_file(node);
+
+ let container = match self.with_ctx(|ctx| ctx.find_container(node)) {
+ Some(it) => it,
+ None => return None,
+ };
+
+ let resolver = match container {
+ ChildContainer::DefWithBodyId(def) => {
+ return Some(if infer_body {
+ SourceAnalyzer::new_for_body(self.db, def, node, offset)
+ } else {
+ SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset)
+ })
+ }
+ ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::EnumId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::VariantId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()),
+ };
+ Some(SourceAnalyzer::new_for_resolver(resolver, node))
+ }
+
+ fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
+ assert!(root_node.parent().is_none());
+ let mut cache = self.cache.borrow_mut();
+ let prev = cache.insert(root_node, file_id);
+ assert!(prev == None || prev == Some(file_id))
+ }
+
+ fn assert_contains_node(&self, node: &SyntaxNode) {
+ self.find_file(node);
+ }
+
+ fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
+ let cache = self.cache.borrow();
+ cache.get(root_node).copied()
+ }
+
+ fn wrap_node_infile<N: AstNode>(&self, node: N) -> InFile<N> {
+ let InFile { file_id, .. } = self.find_file(node.syntax());
+ InFile::new(file_id, node)
+ }
+
+ /// Wraps the node in a [`InFile`] with the file id it belongs to.
+ fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> {
+ let root_node = find_root(node);
+ let file_id = self.lookup(&root_node).unwrap_or_else(|| {
+ panic!(
+ "\n\nFailed to lookup {:?} in this Semantics.\n\
+ Make sure to use only query nodes, derived from this instance of Semantics.\n\
+ root node: {:?}\n\
+ known nodes: {}\n\n",
+ node,
+ root_node,
+ self.cache
+ .borrow()
+ .keys()
+ .map(|it| format!("{:?}", it))
+ .collect::<Vec<_>>()
+ .join(", ")
+ )
+ });
+ InFile::new(file_id, node)
+ }
+
+ fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool {
+ method_call_expr
+ .receiver()
+ .and_then(|expr| {
+ let field_expr = match expr {
+ ast::Expr::FieldExpr(field_expr) => field_expr,
+ _ => return None,
+ };
+ let ty = self.type_of_expr(&field_expr.expr()?)?.original;
+ if !ty.is_packed(self.db) {
+ return None;
+ }
+
+ let func = self.resolve_method_call(method_call_expr).map(Function::from)?;
+ let res = match func.self_param(self.db)?.access(self.db) {
+ Access::Shared | Access::Exclusive => true,
+ Access::Owned => false,
+ };
+ Some(res)
+ })
+ .unwrap_or(false)
+ }
+
+ fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
+ ref_expr
+ .expr()
+ .and_then(|expr| {
+ let field_expr = match expr {
+ ast::Expr::FieldExpr(field_expr) => field_expr,
+ _ => return None,
+ };
+ let expr = field_expr.expr()?;
+ self.type_of_expr(&expr)
+ })
+ // Binding a reference to a packed type is possibly unsafe.
+ .map(|ty| ty.original.is_packed(self.db))
+ .unwrap_or(false)
+
+ // FIXME This needs layout computation to be correct. It will highlight
+ // more than it should with the current implementation.
+ }
+
+ fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
+ if ident_pat.ref_token().is_none() {
+ return false;
+ }
+
+ ident_pat
+ .syntax()
+ .parent()
+ .and_then(|parent| {
+ // `IdentPat` can live under `RecordPat` directly under `RecordPatField` or
+ // `RecordPatFieldList`. `RecordPatField` also lives under `RecordPatFieldList`,
+ // so this tries to lookup the `IdentPat` anywhere along that structure to the
+ // `RecordPat` so we can get the containing type.
+ let record_pat = ast::RecordPatField::cast(parent.clone())
+ .and_then(|record_pat| record_pat.syntax().parent())
+ .or_else(|| Some(parent.clone()))
+ .and_then(|parent| {
+ ast::RecordPatFieldList::cast(parent)?
+ .syntax()
+ .parent()
+ .and_then(ast::RecordPat::cast)
+ });
+
+ // If this doesn't match a `RecordPat`, fallback to a `LetStmt` to see if
+ // this is initialized from a `FieldExpr`.
+ if let Some(record_pat) = record_pat {
+ self.type_of_pat(&ast::Pat::RecordPat(record_pat))
+ } else if let Some(let_stmt) = ast::LetStmt::cast(parent) {
+ let field_expr = match let_stmt.initializer()? {
+ ast::Expr::FieldExpr(field_expr) => field_expr,
+ _ => return None,
+ };
+
+ self.type_of_expr(&field_expr.expr()?)
+ } else {
+ None
+ }
+ })
+ // Binding a reference to a packed type is possibly unsafe.
+ .map(|ty| ty.original.is_packed(self.db))
+ .unwrap_or(false)
+ }
+}
+
+fn macro_call_to_macro_id(
+ ctx: &mut SourceToDefCtx<'_, '_>,
+ db: &dyn AstDatabase,
+ macro_call_id: MacroCallId,
+) -> Option<MacroId> {
+ let loc = db.lookup_intern_macro_call(macro_call_id);
+ match loc.def.kind {
+ hir_expand::MacroDefKind::Declarative(it)
+ | hir_expand::MacroDefKind::BuiltIn(_, it)
+ | hir_expand::MacroDefKind::BuiltInAttr(_, it)
+ | hir_expand::MacroDefKind::BuiltInDerive(_, it)
+ | hir_expand::MacroDefKind::BuiltInEager(_, it) => {
+ ctx.macro_to_def(InFile::new(it.file_id, it.to_node(db)))
+ }
+ hir_expand::MacroDefKind::ProcMacro(_, _, it) => {
+ ctx.proc_macro_to_def(InFile::new(it.file_id, it.to_node(db)))
+ }
+ }
+}
+
+pub trait ToDef: AstNode + Clone {
+ type Def;
+
+ fn to_def(sema: &SemanticsImpl<'_>, src: InFile<Self>) -> Option<Self::Def>;
+}
+
+macro_rules! to_def_impls {
+ ($(($def:path, $ast:path, $meth:ident)),* ,) => {$(
+ impl ToDef for $ast {
+ type Def = $def;
+ fn to_def(sema: &SemanticsImpl<'_>, src: InFile<Self>) -> Option<Self::Def> {
+ sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from)
+ }
+ }
+ )*}
+}
+
+to_def_impls![
+ (crate::Module, ast::Module, module_to_def),
+ (crate::Module, ast::SourceFile, source_file_to_def),
+ (crate::Struct, ast::Struct, struct_to_def),
+ (crate::Enum, ast::Enum, enum_to_def),
+ (crate::Union, ast::Union, union_to_def),
+ (crate::Trait, ast::Trait, trait_to_def),
+ (crate::Impl, ast::Impl, impl_to_def),
+ (crate::TypeAlias, ast::TypeAlias, type_alias_to_def),
+ (crate::Const, ast::Const, const_to_def),
+ (crate::Static, ast::Static, static_to_def),
+ (crate::Function, ast::Fn, fn_to_def),
+ (crate::Field, ast::RecordField, record_field_to_def),
+ (crate::Field, ast::TupleField, tuple_field_to_def),
+ (crate::Variant, ast::Variant, enum_variant_to_def),
+ (crate::TypeParam, ast::TypeParam, type_param_to_def),
+ (crate::LifetimeParam, ast::LifetimeParam, lifetime_param_to_def),
+ (crate::ConstParam, ast::ConstParam, const_param_to_def),
+ (crate::GenericParam, ast::GenericParam, generic_param_to_def),
+ (crate::Macro, ast::Macro, macro_to_def),
+ (crate::Local, ast::IdentPat, bind_pat_to_def),
+ (crate::Local, ast::SelfParam, self_param_to_def),
+ (crate::Label, ast::Label, label_to_def),
+ (crate::Adt, ast::Adt, adt_to_def),
+];
+
+fn find_root(node: &SyntaxNode) -> SyntaxNode {
+ node.ancestors().last().unwrap()
+}
+
+/// `SemanticScope` encapsulates the notion of a scope (the set of visible
+/// names) at a particular program point.
+///
+/// It is a bit tricky, as scopes do not really exist inside the compiler.
+/// Rather, the compiler directly computes for each reference the definition it
+/// refers to. It might transiently compute the explicit scope map while doing
+/// so, but, generally, this is not something left after the analysis.
+///
+/// However, we do very much need explicit scopes for IDE purposes --
+/// completion, at its core, lists the contents of the current scope. The notion
+/// of scope is also useful to answer questions like "what would be the meaning
+/// of this piece of code if we inserted it into this position?".
+///
+/// So `SemanticsScope` is constructed from a specific program point (a syntax
+/// node or just a raw offset) and provides access to the set of visible names
+/// on a somewhat best-effort basis.
+///
+/// Note that if you are wondering "what does this specific existing name mean?",
+/// you'd better use the `resolve_` family of methods.
+#[derive(Debug)]
+pub struct SemanticsScope<'a> {
+ pub db: &'a dyn HirDatabase,
+ file_id: HirFileId,
+ resolver: Resolver,
+}
+
+impl<'a> SemanticsScope<'a> {
+ pub fn module(&self) -> Module {
+ Module { id: self.resolver.module() }
+ }
+
+ pub fn krate(&self) -> Crate {
+ Crate { id: self.resolver.krate() }
+ }
+
+ pub(crate) fn resolver(&self) -> &Resolver {
+ &self.resolver
+ }
+
+ /// Note: `VisibleTraits` should be treated as an opaque type, passed into `Type
+ pub fn visible_traits(&self) -> VisibleTraits {
+ let resolver = &self.resolver;
+ VisibleTraits(resolver.traits_in_scope(self.db.upcast()))
+ }
+
+ pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
+ let scope = self.resolver.names_in_scope(self.db.upcast());
+ for (name, entries) in scope {
+ for entry in entries {
+ let def = match entry {
+ resolver::ScopeDef::ModuleDef(it) => ScopeDef::ModuleDef(it.into()),
+ resolver::ScopeDef::Unknown => ScopeDef::Unknown,
+ resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
+ resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
+ resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(id.into()),
+ resolver::ScopeDef::Local(pat_id) => match self.resolver.body_owner() {
+ Some(parent) => ScopeDef::Local(Local { parent, pat_id }),
+ None => continue,
+ },
+ resolver::ScopeDef::Label(label_id) => match self.resolver.body_owner() {
+ Some(parent) => ScopeDef::Label(Label { parent, label_id }),
+ None => continue,
+ },
+ };
+ f(name.clone(), def)
+ }
+ }
+ }
+
+ /// Resolve a path as-if it was written at the given scope. This is
+ /// necessary a heuristic, as it doesn't take hygiene into account.
+ pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> {
+ let ctx = body::LowerCtx::new(self.db.upcast(), self.file_id);
+ let path = Path::from_src(path.clone(), &ctx)?;
+ resolve_hir_path(self.db, &self.resolver, &path)
+ }
+
+ /// Iterates over associated types that may be specified after the given path (using
+ /// `Ty::Assoc` syntax).
+ pub fn assoc_type_shorthand_candidates<R>(
+ &self,
+ resolution: &PathResolution,
+ mut cb: impl FnMut(&Name, TypeAlias) -> Option<R>,
+ ) -> Option<R> {
+ let def = self.resolver.generic_def()?;
+ hir_ty::associated_type_shorthand_candidates(
+ self.db,
+ def,
+ resolution.in_type_ns()?,
+ |name, _, id| cb(name, id.into()),
+ )
+ }
+}
+
+pub struct VisibleTraits(pub FxHashSet<TraitId>);
+
+impl ops::Deref for VisibleTraits {
+ type Target = FxHashSet<TraitId>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
new file mode 100644
index 000000000..ba9a1cfb6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
@@ -0,0 +1,473 @@
+//! Maps *syntax* of various definitions to their semantic ids.
+//!
+//! This is a very interesting module, and, in some sense, can be considered the
+//! heart of the IDE parts of rust-analyzer.
+//!
+//! This module solves the following problem:
+//!
+//! Given a piece of syntax, find the corresponding semantic definition (def).
+//!
+//! This problem is a part of more-or-less every IDE feature implemented. Every
+//! IDE functionality (like goto to definition), conceptually starts with a
+//! specific cursor position in a file. Starting with this text offset, we first
+//! figure out what syntactic construct are we at: is this a pattern, an
+//! expression, an item definition.
+//!
+//! Knowing only the syntax gives us relatively little info. For example,
+//! looking at the syntax of the function we can realise that it is a part of an
+//! `impl` block, but we won't be able to tell what trait function the current
+//! function overrides, and whether it does that correctly. For that, we need to
+//! go from [`ast::Fn`] to [`crate::Function`], and that's exactly what this
+//! module does.
+//!
+//! As syntax trees are values and don't know their place of origin/identity,
+//! this module also requires [`InFile`] wrappers to understand which specific
+//! real or macro-expanded file the tree comes from.
+//!
+//! The actual algorithm to resolve syntax to def is curious in two aspects:
+//!
+//! * It is recursive
+//! * It uses the inverse algorithm (what is the syntax for this def?)
+//!
+//! Specifically, the algorithm goes like this:
+//!
+//! 1. Find the syntactic container for the syntax. For example, field's
+//! container is the struct, and structs container is a module.
+//! 2. Recursively get the def corresponding to container.
+//! 3. Ask the container def for all child defs. These child defs contain
+//! the answer and answer's siblings.
+//! 4. For each child def, ask for it's source.
+//! 5. The child def whose source is the syntax node we've started with
+//! is the answer.
+//!
+//! It's interesting that both Roslyn and Kotlin contain very similar code
+//! shape.
+//!
+//! Let's take a look at Roslyn:
+//!
+//! <https://github.com/dotnet/roslyn/blob/36a0c338d6621cc5fe34b79d414074a95a6a489c/src/Compilers/CSharp/Portable/Compilation/SyntaxTreeSemanticModel.cs#L1403-L1429>
+//! <https://sourceroslyn.io/#Microsoft.CodeAnalysis.CSharp/Compilation/SyntaxTreeSemanticModel.cs,1403>
+//!
+//! The `GetDeclaredType` takes `Syntax` as input, and returns `Symbol` as
+//! output. First, it retrieves a `Symbol` for parent `Syntax`:
+//!
+//! * <https://sourceroslyn.io/#Microsoft.CodeAnalysis.CSharp/Compilation/SyntaxTreeSemanticModel.cs,1423>
+//!
+//! Then, it iterates parent symbol's children, looking for one which has the
+//! same text span as the original node:
+//!
+//! <https://sourceroslyn.io/#Microsoft.CodeAnalysis.CSharp/Compilation/SyntaxTreeSemanticModel.cs,1786>
+//!
+//! Now, let's look at Kotlin:
+//!
+//! <https://github.com/JetBrains/kotlin/blob/a288b8b00e4754a1872b164999c6d3f3b8c8994a/idea/idea-frontend-fir/idea-fir-low-level-api/src/org/jetbrains/kotlin/idea/fir/low/level/api/FirModuleResolveStateImpl.kt#L93-L125>
+//!
+//! This function starts with a syntax node (`KtExpression` is syntax, like all
+//! `Kt` nodes), and returns a def. It uses
+//! `getNonLocalContainingOrThisDeclaration` to get syntactic container for a
+//! current node. Then, `findSourceNonLocalFirDeclaration` gets `Fir` for this
+//! parent. Finally, `findElementIn` function traverses `Fir` children to find
+//! one with the same source we originally started with.
+//!
+//! One question is left though -- where does the recursion stops? This happens
+//! when we get to the file syntax node, which doesn't have a syntactic parent.
+//! In that case, we loop through all the crates that might contain this file
+//! and look for a module whose source is the given file.
+//!
+//! Note that the logic in this module is somewhat fundamentally imprecise --
+//! due to conditional compilation and `#[path]` attributes, there's no
+//! injective mapping from syntax nodes to defs. This is not an edge case --
+//! more or less every item in a `lib.rs` is a part of two distinct crates: a
+//! library with `--cfg test` and a library without.
+//!
+//! At the moment, we don't really handle this well and return the first answer
+//! that works. Ideally, we should first let the caller to pick a specific
+//! active crate for a given position, and then provide an API to resolve all
+//! syntax nodes against this specific crate.
+
+use base_db::FileId;
+use hir_def::{
+ attr::AttrId,
+ child_by_source::ChildBySource,
+ dyn_map::DynMap,
+ expr::{LabelId, PatId},
+ keys::{self, Key},
+ AdtId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FieldId, FunctionId,
+ GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId, StructId,
+ TraitId, TypeAliasId, TypeParamId, UnionId, VariantId,
+};
+use hir_expand::{name::AsName, HirFileId, MacroCallId};
+use rustc_hash::FxHashMap;
+use smallvec::SmallVec;
+use stdx::impl_from;
+use syntax::{
+ ast::{self, HasName},
+ AstNode, SyntaxNode,
+};
+
+use crate::{db::HirDatabase, InFile};
+
+pub(super) type SourceToDefCache = FxHashMap<(ChildContainer, HirFileId), DynMap>;
+
+pub(super) struct SourceToDefCtx<'a, 'b> {
+ pub(super) db: &'b dyn HirDatabase,
+ pub(super) cache: &'a mut SourceToDefCache,
+}
+
+impl SourceToDefCtx<'_, '_> {
+ pub(super) fn file_to_def(&mut self, file: FileId) -> SmallVec<[ModuleId; 1]> {
+ let _p = profile::span("SourceBinder::to_module_def");
+ let mut mods = SmallVec::new();
+ for &crate_id in self.db.relevant_crates(file).iter() {
+ // FIXME: inner items
+ let crate_def_map = self.db.crate_def_map(crate_id);
+ mods.extend(
+ crate_def_map
+ .modules_for_file(file)
+ .map(|local_id| crate_def_map.module_id(local_id)),
+ )
+ }
+ mods
+ }
+
+ pub(super) fn module_to_def(&mut self, src: InFile<ast::Module>) -> Option<ModuleId> {
+ let _p = profile::span("module_to_def");
+ let parent_declaration = src
+ .syntax()
+ .ancestors_with_macros_skip_attr_item(self.db.upcast())
+ .find_map(|it| it.map(ast::Module::cast).transpose());
+
+ let parent_module = match parent_declaration {
+ Some(parent_declaration) => self.module_to_def(parent_declaration),
+ None => {
+ let file_id = src.file_id.original_file(self.db.upcast());
+ self.file_to_def(file_id).get(0).copied()
+ }
+ }?;
+
+ let child_name = src.value.name()?.as_name();
+ let def_map = parent_module.def_map(self.db.upcast());
+ let &child_id = def_map[parent_module.local_id].children.get(&child_name)?;
+ Some(def_map.module_id(child_id))
+ }
+
+ pub(super) fn source_file_to_def(&mut self, src: InFile<ast::SourceFile>) -> Option<ModuleId> {
+ let _p = profile::span("source_file_to_def");
+ let file_id = src.file_id.original_file(self.db.upcast());
+ self.file_to_def(file_id).get(0).copied()
+ }
+
+ pub(super) fn trait_to_def(&mut self, src: InFile<ast::Trait>) -> Option<TraitId> {
+ self.to_def(src, keys::TRAIT)
+ }
+ pub(super) fn impl_to_def(&mut self, src: InFile<ast::Impl>) -> Option<ImplId> {
+ self.to_def(src, keys::IMPL)
+ }
+ pub(super) fn fn_to_def(&mut self, src: InFile<ast::Fn>) -> Option<FunctionId> {
+ self.to_def(src, keys::FUNCTION)
+ }
+ pub(super) fn struct_to_def(&mut self, src: InFile<ast::Struct>) -> Option<StructId> {
+ self.to_def(src, keys::STRUCT)
+ }
+ pub(super) fn enum_to_def(&mut self, src: InFile<ast::Enum>) -> Option<EnumId> {
+ self.to_def(src, keys::ENUM)
+ }
+ pub(super) fn union_to_def(&mut self, src: InFile<ast::Union>) -> Option<UnionId> {
+ self.to_def(src, keys::UNION)
+ }
+ pub(super) fn static_to_def(&mut self, src: InFile<ast::Static>) -> Option<StaticId> {
+ self.to_def(src, keys::STATIC)
+ }
+ pub(super) fn const_to_def(&mut self, src: InFile<ast::Const>) -> Option<ConstId> {
+ self.to_def(src, keys::CONST)
+ }
+ pub(super) fn type_alias_to_def(&mut self, src: InFile<ast::TypeAlias>) -> Option<TypeAliasId> {
+ self.to_def(src, keys::TYPE_ALIAS)
+ }
+ pub(super) fn record_field_to_def(&mut self, src: InFile<ast::RecordField>) -> Option<FieldId> {
+ self.to_def(src, keys::RECORD_FIELD)
+ }
+ pub(super) fn tuple_field_to_def(&mut self, src: InFile<ast::TupleField>) -> Option<FieldId> {
+ self.to_def(src, keys::TUPLE_FIELD)
+ }
+ pub(super) fn enum_variant_to_def(
+ &mut self,
+ src: InFile<ast::Variant>,
+ ) -> Option<EnumVariantId> {
+ self.to_def(src, keys::VARIANT)
+ }
+ pub(super) fn adt_to_def(
+ &mut self,
+ InFile { file_id, value }: InFile<ast::Adt>,
+ ) -> Option<AdtId> {
+ match value {
+ ast::Adt::Enum(it) => self.enum_to_def(InFile::new(file_id, it)).map(AdtId::EnumId),
+ ast::Adt::Struct(it) => {
+ self.struct_to_def(InFile::new(file_id, it)).map(AdtId::StructId)
+ }
+ ast::Adt::Union(it) => self.union_to_def(InFile::new(file_id, it)).map(AdtId::UnionId),
+ }
+ }
+ pub(super) fn bind_pat_to_def(
+ &mut self,
+ src: InFile<ast::IdentPat>,
+ ) -> Option<(DefWithBodyId, PatId)> {
+ let container = self.find_pat_or_label_container(src.syntax())?;
+ let (body, source_map) = self.db.body_with_source_map(container);
+ let src = src.map(ast::Pat::from);
+ let pat_id = source_map.node_pat(src.as_ref())?;
+ // the pattern could resolve to a constant, verify that that is not the case
+ if let crate::Pat::Bind { .. } = body[pat_id] {
+ Some((container, pat_id))
+ } else {
+ None
+ }
+ }
+ pub(super) fn self_param_to_def(
+ &mut self,
+ src: InFile<ast::SelfParam>,
+ ) -> Option<(DefWithBodyId, PatId)> {
+ let container = self.find_pat_or_label_container(src.syntax())?;
+ let (_body, source_map) = self.db.body_with_source_map(container);
+ let pat_id = source_map.node_self_param(src.as_ref())?;
+ Some((container, pat_id))
+ }
+ pub(super) fn label_to_def(
+ &mut self,
+ src: InFile<ast::Label>,
+ ) -> Option<(DefWithBodyId, LabelId)> {
+ let container = self.find_pat_or_label_container(src.syntax())?;
+ let (_body, source_map) = self.db.body_with_source_map(container);
+ let label_id = source_map.node_label(src.as_ref())?;
+ Some((container, label_id))
+ }
+
+ pub(super) fn item_to_macro_call(&mut self, src: InFile<ast::Item>) -> Option<MacroCallId> {
+ let map = self.dyn_map(src.as_ref())?;
+ map[keys::ATTR_MACRO_CALL].get(&src.value).copied()
+ }
+
+ /// (AttrId, derive attribute call id, derive call ids)
+ pub(super) fn attr_to_derive_macro_call(
+ &mut self,
+ item: InFile<&ast::Adt>,
+ src: InFile<ast::Attr>,
+ ) -> Option<(AttrId, MacroCallId, &[Option<MacroCallId>])> {
+ let map = self.dyn_map(item)?;
+ map[keys::DERIVE_MACRO_CALL]
+ .get(&src.value)
+ .map(|&(attr_id, call_id, ref ids)| (attr_id, call_id, &**ids))
+ }
+
+ pub(super) fn has_derives(&mut self, adt: InFile<&ast::Adt>) -> bool {
+ self.dyn_map(adt).as_ref().map_or(false, |map| !map[keys::DERIVE_MACRO_CALL].is_empty())
+ }
+
+ fn to_def<Ast: AstNode + 'static, ID: Copy + 'static>(
+ &mut self,
+ src: InFile<Ast>,
+ key: Key<Ast, ID>,
+ ) -> Option<ID> {
+ self.dyn_map(src.as_ref())?[key].get(&src.value).copied()
+ }
+
+ fn dyn_map<Ast: AstNode + 'static>(&mut self, src: InFile<&Ast>) -> Option<&DynMap> {
+ let container = self.find_container(src.map(|it| it.syntax()))?;
+ Some(self.cache_for(container, src.file_id))
+ }
+
+ fn cache_for(&mut self, container: ChildContainer, file_id: HirFileId) -> &DynMap {
+ let db = self.db;
+ self.cache
+ .entry((container, file_id))
+ .or_insert_with(|| container.child_by_source(db, file_id))
+ }
+
+ pub(super) fn type_param_to_def(&mut self, src: InFile<ast::TypeParam>) -> Option<TypeParamId> {
+ let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
+ let dyn_map = self.cache_for(container, src.file_id);
+ dyn_map[keys::TYPE_PARAM].get(&src.value).copied().map(|x| TypeParamId::from_unchecked(x))
+ }
+
+ pub(super) fn lifetime_param_to_def(
+ &mut self,
+ src: InFile<ast::LifetimeParam>,
+ ) -> Option<LifetimeParamId> {
+ let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
+ let dyn_map = self.cache_for(container, src.file_id);
+ dyn_map[keys::LIFETIME_PARAM].get(&src.value).copied()
+ }
+
+ pub(super) fn const_param_to_def(
+ &mut self,
+ src: InFile<ast::ConstParam>,
+ ) -> Option<ConstParamId> {
+ let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
+ let dyn_map = self.cache_for(container, src.file_id);
+ dyn_map[keys::CONST_PARAM].get(&src.value).copied().map(|x| ConstParamId::from_unchecked(x))
+ }
+
+ pub(super) fn generic_param_to_def(
+ &mut self,
+ InFile { file_id, value }: InFile<ast::GenericParam>,
+ ) -> Option<GenericParamId> {
+ match value {
+ ast::GenericParam::ConstParam(it) => {
+ self.const_param_to_def(InFile::new(file_id, it)).map(GenericParamId::ConstParamId)
+ }
+ ast::GenericParam::LifetimeParam(it) => self
+ .lifetime_param_to_def(InFile::new(file_id, it))
+ .map(GenericParamId::LifetimeParamId),
+ ast::GenericParam::TypeParam(it) => {
+ self.type_param_to_def(InFile::new(file_id, it)).map(GenericParamId::TypeParamId)
+ }
+ }
+ }
+
+ pub(super) fn macro_to_def(&mut self, src: InFile<ast::Macro>) -> Option<MacroId> {
+ self.dyn_map(src.as_ref()).and_then(|it| match &src.value {
+ ast::Macro::MacroRules(value) => {
+ it[keys::MACRO_RULES].get(value).copied().map(MacroId::from)
+ }
+ ast::Macro::MacroDef(value) => it[keys::MACRO2].get(value).copied().map(MacroId::from),
+ })
+ }
+
+ pub(super) fn proc_macro_to_def(&mut self, src: InFile<ast::Fn>) -> Option<MacroId> {
+ self.dyn_map(src.as_ref())
+ .and_then(|it| it[keys::PROC_MACRO].get(&src.value).copied().map(MacroId::from))
+ }
+
+ pub(super) fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option<ChildContainer> {
+ for container in src.ancestors_with_macros_skip_attr_item(self.db.upcast()) {
+ if let Some(res) = self.container_to_def(container) {
+ return Some(res);
+ }
+ }
+
+ let def = self.file_to_def(src.file_id.original_file(self.db.upcast())).get(0).copied()?;
+ Some(def.into())
+ }
+
+ fn container_to_def(&mut self, container: InFile<SyntaxNode>) -> Option<ChildContainer> {
+ let cont = if let Some(item) = ast::Item::cast(container.value.clone()) {
+ match item {
+ ast::Item::Module(it) => self.module_to_def(container.with_value(it))?.into(),
+ ast::Item::Trait(it) => self.trait_to_def(container.with_value(it))?.into(),
+ ast::Item::Impl(it) => self.impl_to_def(container.with_value(it))?.into(),
+ ast::Item::Enum(it) => self.enum_to_def(container.with_value(it))?.into(),
+ ast::Item::TypeAlias(it) => {
+ self.type_alias_to_def(container.with_value(it))?.into()
+ }
+ ast::Item::Struct(it) => {
+ let def = self.struct_to_def(container.with_value(it))?;
+ VariantId::from(def).into()
+ }
+ ast::Item::Union(it) => {
+ let def = self.union_to_def(container.with_value(it))?;
+ VariantId::from(def).into()
+ }
+ ast::Item::Fn(it) => {
+ let def = self.fn_to_def(container.with_value(it))?;
+ DefWithBodyId::from(def).into()
+ }
+ ast::Item::Static(it) => {
+ let def = self.static_to_def(container.with_value(it))?;
+ DefWithBodyId::from(def).into()
+ }
+ ast::Item::Const(it) => {
+ let def = self.const_to_def(container.with_value(it))?;
+ DefWithBodyId::from(def).into()
+ }
+ _ => return None,
+ }
+ } else {
+ let it = ast::Variant::cast(container.value)?;
+ let def = self.enum_variant_to_def(InFile::new(container.file_id, it))?;
+ VariantId::from(def).into()
+ };
+ Some(cont)
+ }
+
+ fn find_generic_param_container(&mut self, src: InFile<&SyntaxNode>) -> Option<GenericDefId> {
+ let ancestors = src.ancestors_with_macros_skip_attr_item(self.db.upcast());
+ for InFile { file_id, value } in ancestors {
+ let item = match ast::Item::cast(value) {
+ Some(it) => it,
+ None => continue,
+ };
+ let res: GenericDefId = match item {
+ ast::Item::Fn(it) => self.fn_to_def(InFile::new(file_id, it))?.into(),
+ ast::Item::Struct(it) => self.struct_to_def(InFile::new(file_id, it))?.into(),
+ ast::Item::Enum(it) => self.enum_to_def(InFile::new(file_id, it))?.into(),
+ ast::Item::Trait(it) => self.trait_to_def(InFile::new(file_id, it))?.into(),
+ ast::Item::TypeAlias(it) => {
+ self.type_alias_to_def(InFile::new(file_id, it))?.into()
+ }
+ ast::Item::Impl(it) => self.impl_to_def(InFile::new(file_id, it))?.into(),
+ _ => continue,
+ };
+ return Some(res);
+ }
+ None
+ }
+
+ fn find_pat_or_label_container(&mut self, src: InFile<&SyntaxNode>) -> Option<DefWithBodyId> {
+ let ancestors = src.ancestors_with_macros_skip_attr_item(self.db.upcast());
+ for InFile { file_id, value } in ancestors {
+ let item = match ast::Item::cast(value) {
+ Some(it) => it,
+ None => continue,
+ };
+ let res: DefWithBodyId = match item {
+ ast::Item::Const(it) => self.const_to_def(InFile::new(file_id, it))?.into(),
+ ast::Item::Static(it) => self.static_to_def(InFile::new(file_id, it))?.into(),
+ ast::Item::Fn(it) => self.fn_to_def(InFile::new(file_id, it))?.into(),
+ _ => continue,
+ };
+ return Some(res);
+ }
+ None
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
+pub(crate) enum ChildContainer {
+ DefWithBodyId(DefWithBodyId),
+ ModuleId(ModuleId),
+ TraitId(TraitId),
+ ImplId(ImplId),
+ EnumId(EnumId),
+ VariantId(VariantId),
+ TypeAliasId(TypeAliasId),
+ /// XXX: this might be the same def as, for example an `EnumId`. However,
+ /// here the children are generic parameters, and not, eg enum variants.
+ GenericDefId(GenericDefId),
+}
+impl_from! {
+ DefWithBodyId,
+ ModuleId,
+ TraitId,
+ ImplId,
+ EnumId,
+ VariantId,
+ TypeAliasId,
+ GenericDefId
+ for ChildContainer
+}
+
+impl ChildContainer {
+ fn child_by_source(self, db: &dyn HirDatabase, file_id: HirFileId) -> DynMap {
+ let db = db.upcast();
+ match self {
+ ChildContainer::DefWithBodyId(it) => it.child_by_source(db, file_id),
+ ChildContainer::ModuleId(it) => it.child_by_source(db, file_id),
+ ChildContainer::TraitId(it) => it.child_by_source(db, file_id),
+ ChildContainer::ImplId(it) => it.child_by_source(db, file_id),
+ ChildContainer::EnumId(it) => it.child_by_source(db, file_id),
+ ChildContainer::VariantId(it) => it.child_by_source(db, file_id),
+ ChildContainer::TypeAliasId(_) => DynMap::default(),
+ ChildContainer::GenericDefId(it) => it.child_by_source(db, file_id),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
new file mode 100644
index 000000000..1eb51b20c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
@@ -0,0 +1,915 @@
+//! Lookup hir elements using positions in the source code. This is a lossy
+//! transformation: in general, a single source might correspond to several
+//! modules, functions, etc, due to macros, cfgs and `#[path=]` attributes on
+//! modules.
+//!
+//! So, this modules should not be used during hir construction, it exists
+//! purely for "IDE needs".
+use std::{
+ iter::{self, once},
+ sync::Arc,
+};
+
+use hir_def::{
+ body::{
+ self,
+ scope::{ExprScopes, ScopeId},
+ Body, BodySourceMap,
+ },
+ expr::{ExprId, Pat, PatId},
+ macro_id_to_def_id,
+ path::{ModPath, Path, PathKind},
+ resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs},
+ type_ref::Mutability,
+ AsMacroCall, AssocItemId, DefWithBodyId, FieldId, FunctionId, ItemContainerId, LocalFieldId,
+ Lookup, ModuleDefId, VariantId,
+};
+use hir_expand::{
+ builtin_fn_macro::BuiltinFnLikeExpander, hygiene::Hygiene, name::AsName, HirFileId, InFile,
+};
+use hir_ty::{
+ diagnostics::{
+ record_literal_missing_fields, record_pattern_missing_fields, unsafe_expressions,
+ UnsafeExpr,
+ },
+ method_resolution, Adjust, Adjustment, AutoBorrow, InferenceResult, Interner, Substitution,
+ TyExt, TyKind, TyLoweringContext,
+};
+use itertools::Itertools;
+use smallvec::SmallVec;
+use syntax::{
+ ast::{self, AstNode},
+ SyntaxKind, SyntaxNode, TextRange, TextSize,
+};
+
+use crate::{
+ db::HirDatabase, semantics::PathResolution, Adt, AssocItem, BindingMode, BuiltinAttr,
+ BuiltinType, Callable, Const, DeriveHelper, Field, Function, Local, Macro, ModuleDef, Static,
+ Struct, ToolModule, Trait, Type, TypeAlias, Variant,
+};
+
+/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
+/// original source files. It should not be used inside the HIR itself.
+#[derive(Debug)]
+pub(crate) struct SourceAnalyzer {
+ pub(crate) file_id: HirFileId,
+ pub(crate) resolver: Resolver,
+ def: Option<(DefWithBodyId, Arc<Body>, Arc<BodySourceMap>)>,
+ infer: Option<Arc<InferenceResult>>,
+}
+
+impl SourceAnalyzer {
+ pub(crate) fn new_for_body(
+ db: &dyn HirDatabase,
+ def: DefWithBodyId,
+ node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
+ offset: Option<TextSize>,
+ ) -> SourceAnalyzer {
+ let (body, source_map) = db.body_with_source_map(def);
+ let scopes = db.expr_scopes(def);
+ let scope = match offset {
+ None => scope_for(&scopes, &source_map, node),
+ Some(offset) => scope_for_offset(db, &scopes, &source_map, node.file_id, offset),
+ };
+ let resolver = resolver_for_scope(db.upcast(), def, scope);
+ SourceAnalyzer {
+ resolver,
+ def: Some((def, body, source_map)),
+ infer: Some(db.infer(def)),
+ file_id,
+ }
+ }
+
+ pub(crate) fn new_for_body_no_infer(
+ db: &dyn HirDatabase,
+ def: DefWithBodyId,
+ node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
+ offset: Option<TextSize>,
+ ) -> SourceAnalyzer {
+ let (body, source_map) = db.body_with_source_map(def);
+ let scopes = db.expr_scopes(def);
+ let scope = match offset {
+ None => scope_for(&scopes, &source_map, node),
+ Some(offset) => scope_for_offset(db, &scopes, &source_map, node.file_id, offset),
+ };
+ let resolver = resolver_for_scope(db.upcast(), def, scope);
+ SourceAnalyzer { resolver, def: Some((def, body, source_map)), infer: None, file_id }
+ }
+
+ pub(crate) fn new_for_resolver(
+ resolver: Resolver,
+ node: InFile<&SyntaxNode>,
+ ) -> SourceAnalyzer {
+ SourceAnalyzer { resolver, def: None, infer: None, file_id: node.file_id }
+ }
+
+ fn body_source_map(&self) -> Option<&BodySourceMap> {
+ self.def.as_ref().map(|(.., source_map)| &**source_map)
+ }
+ fn body(&self) -> Option<&Body> {
+ self.def.as_ref().map(|(_, body, _)| &**body)
+ }
+
+ fn expr_id(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<ExprId> {
+ let src = match expr {
+ ast::Expr::MacroExpr(expr) => {
+ self.expand_expr(db, InFile::new(self.file_id, expr.macro_call()?.clone()))?
+ }
+ _ => InFile::new(self.file_id, expr.clone()),
+ };
+ let sm = self.body_source_map()?;
+ sm.node_expr(src.as_ref())
+ }
+
+ fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> {
+ // FIXME: macros, see `expr_id`
+ let src = InFile { file_id: self.file_id, value: pat };
+ self.body_source_map()?.node_pat(src)
+ }
+
+ fn expand_expr(
+ &self,
+ db: &dyn HirDatabase,
+ expr: InFile<ast::MacroCall>,
+ ) -> Option<InFile<ast::Expr>> {
+ let macro_file = self.body_source_map()?.node_macro_file(expr.as_ref())?;
+ let expanded = db.parse_or_expand(macro_file)?;
+
+ let res = match ast::MacroCall::cast(expanded.clone()) {
+ Some(call) => self.expand_expr(db, InFile::new(macro_file, call))?,
+ _ => InFile::new(macro_file, ast::Expr::cast(expanded)?),
+ };
+ Some(res)
+ }
+
+ pub(crate) fn is_implicit_reborrow(
+ &self,
+ db: &dyn HirDatabase,
+ expr: &ast::Expr,
+ ) -> Option<Mutability> {
+ let expr_id = self.expr_id(db, expr)?;
+ let infer = self.infer.as_ref()?;
+ let adjustments = infer.expr_adjustments.get(&expr_id)?;
+ adjustments.windows(2).find_map(|slice| match slice {
+ &[Adjustment {kind: Adjust::Deref(None), ..}, Adjustment {kind: Adjust::Borrow(AutoBorrow::Ref(m)), ..}] => Some(match m {
+ hir_ty::Mutability::Mut => Mutability::Mut,
+ hir_ty::Mutability::Not => Mutability::Shared,
+ }),
+ _ => None,
+ })
+ }
+
+ pub(crate) fn type_of_expr(
+ &self,
+ db: &dyn HirDatabase,
+ expr: &ast::Expr,
+ ) -> Option<(Type, Option<Type>)> {
+ let expr_id = self.expr_id(db, expr)?;
+ let infer = self.infer.as_ref()?;
+ let coerced = infer
+ .expr_adjustments
+ .get(&expr_id)
+ .and_then(|adjusts| adjusts.last().map(|adjust| adjust.target.clone()));
+ let ty = infer[expr_id].clone();
+ let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
+ Some((mk_ty(ty), coerced.map(mk_ty)))
+ }
+
+ pub(crate) fn type_of_pat(
+ &self,
+ db: &dyn HirDatabase,
+ pat: &ast::Pat,
+ ) -> Option<(Type, Option<Type>)> {
+ let pat_id = self.pat_id(pat)?;
+ let infer = self.infer.as_ref()?;
+ let coerced = infer
+ .pat_adjustments
+ .get(&pat_id)
+ .and_then(|adjusts| adjusts.last().map(|adjust| adjust.clone()));
+ let ty = infer[pat_id].clone();
+ let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
+ Some((mk_ty(ty), coerced.map(mk_ty)))
+ }
+
+ pub(crate) fn type_of_self(
+ &self,
+ db: &dyn HirDatabase,
+ param: &ast::SelfParam,
+ ) -> Option<Type> {
+ let src = InFile { file_id: self.file_id, value: param };
+ let pat_id = self.body_source_map()?.node_self_param(src)?;
+ let ty = self.infer.as_ref()?[pat_id].clone();
+ Some(Type::new_with_resolver(db, &self.resolver, ty))
+ }
+
+ pub(crate) fn binding_mode_of_pat(
+ &self,
+ _db: &dyn HirDatabase,
+ pat: &ast::IdentPat,
+ ) -> Option<BindingMode> {
+ let pat_id = self.pat_id(&pat.clone().into())?;
+ let infer = self.infer.as_ref()?;
+ infer.pat_binding_modes.get(&pat_id).map(|bm| match bm {
+ hir_ty::BindingMode::Move => BindingMode::Move,
+ hir_ty::BindingMode::Ref(hir_ty::Mutability::Mut) => BindingMode::Ref(Mutability::Mut),
+ hir_ty::BindingMode::Ref(hir_ty::Mutability::Not) => {
+ BindingMode::Ref(Mutability::Shared)
+ }
+ })
+ }
+ pub(crate) fn pattern_adjustments(
+ &self,
+ db: &dyn HirDatabase,
+ pat: &ast::Pat,
+ ) -> Option<SmallVec<[Type; 1]>> {
+ let pat_id = self.pat_id(&pat)?;
+ let infer = self.infer.as_ref()?;
+ Some(
+ infer
+ .pat_adjustments
+ .get(&pat_id)?
+ .iter()
+ .map(|ty| Type::new_with_resolver(db, &self.resolver, ty.clone()))
+ .collect(),
+ )
+ }
+
+ pub(crate) fn resolve_method_call_as_callable(
+ &self,
+ db: &dyn HirDatabase,
+ call: &ast::MethodCallExpr,
+ ) -> Option<Callable> {
+ let expr_id = self.expr_id(db, &call.clone().into())?;
+ let (func, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
+ let ty = db.value_ty(func.into()).substitute(Interner, &substs);
+ let ty = Type::new_with_resolver(db, &self.resolver, ty);
+ let mut res = ty.as_callable(db)?;
+ res.is_bound_method = true;
+ Some(res)
+ }
+
+ pub(crate) fn resolve_method_call(
+ &self,
+ db: &dyn HirDatabase,
+ call: &ast::MethodCallExpr,
+ ) -> Option<FunctionId> {
+ let expr_id = self.expr_id(db, &call.clone().into())?;
+ let (f_in_trait, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
+ let f_in_impl = self.resolve_impl_method(db, f_in_trait, &substs);
+ f_in_impl.or(Some(f_in_trait))
+ }
+
+ pub(crate) fn resolve_field(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::FieldExpr,
+ ) -> Option<Field> {
+ let expr_id = self.expr_id(db, &field.clone().into())?;
+ self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
+ }
+
+ pub(crate) fn resolve_record_field(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::RecordExprField,
+ ) -> Option<(Field, Option<Local>, Type)> {
+ let record_expr = ast::RecordExpr::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
+ let expr = ast::Expr::from(record_expr);
+ let expr_id = self.body_source_map()?.node_expr(InFile::new(self.file_id, &expr))?;
+
+ let local_name = field.field_name()?.as_name();
+ let local = if field.name_ref().is_some() {
+ None
+ } else {
+ let path = ModPath::from_segments(PathKind::Plain, once(local_name.clone()));
+ match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) {
+ Some(ValueNs::LocalBinding(pat_id)) => {
+ Some(Local { pat_id, parent: self.resolver.body_owner()? })
+ }
+ _ => None,
+ }
+ };
+ let (_, subst) = self.infer.as_ref()?.type_of_expr.get(expr_id)?.as_adt()?;
+ let variant = self.infer.as_ref()?.variant_resolution_for_expr(expr_id)?;
+ let variant_data = variant.variant_data(db.upcast());
+ let field = FieldId { parent: variant, local_id: variant_data.field(&local_name)? };
+ let field_ty =
+ db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
+ Some((field.into(), local, Type::new_with_resolver(db, &self.resolver, field_ty)))
+ }
+
+ pub(crate) fn resolve_record_pat_field(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::RecordPatField,
+ ) -> Option<Field> {
+ let field_name = field.field_name()?.as_name();
+ let record_pat = ast::RecordPat::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
+ let pat_id = self.pat_id(&record_pat.into())?;
+ let variant = self.infer.as_ref()?.variant_resolution_for_pat(pat_id)?;
+ let variant_data = variant.variant_data(db.upcast());
+ let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? };
+ Some(field.into())
+ }
+
+ pub(crate) fn resolve_macro_call(
+ &self,
+ db: &dyn HirDatabase,
+ macro_call: InFile<&ast::MacroCall>,
+ ) -> Option<Macro> {
+ let ctx = body::LowerCtx::new(db.upcast(), macro_call.file_id);
+ let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?;
+ self.resolver.resolve_path_as_macro(db.upcast(), path.mod_path()).map(|it| it.into())
+ }
+
+ pub(crate) fn resolve_bind_pat_to_const(
+ &self,
+ db: &dyn HirDatabase,
+ pat: &ast::IdentPat,
+ ) -> Option<ModuleDef> {
+ let pat_id = self.pat_id(&pat.clone().into())?;
+ let body = self.body()?;
+ let path = match &body[pat_id] {
+ Pat::Path(path) => path,
+ _ => return None,
+ };
+ let res = resolve_hir_path(db, &self.resolver, path)?;
+ match res {
+ PathResolution::Def(def) => Some(def),
+ _ => None,
+ }
+ }
+
+ pub(crate) fn resolve_path(
+ &self,
+ db: &dyn HirDatabase,
+ path: &ast::Path,
+ ) -> Option<PathResolution> {
+ let parent = path.syntax().parent();
+ let parent = || parent.clone();
+
+ let mut prefer_value_ns = false;
+ let resolved = (|| {
+ if let Some(path_expr) = parent().and_then(ast::PathExpr::cast) {
+ let expr_id = self.expr_id(db, &path_expr.into())?;
+ let infer = self.infer.as_ref()?;
+ if let Some(assoc) = infer.assoc_resolutions_for_expr(expr_id) {
+ let assoc = match assoc {
+ AssocItemId::FunctionId(f_in_trait) => {
+ match infer.type_of_expr.get(expr_id) {
+ None => assoc,
+ Some(func_ty) => {
+ if let TyKind::FnDef(_fn_def, subs) = func_ty.kind(Interner) {
+ self.resolve_impl_method(db, f_in_trait, subs)
+ .map(AssocItemId::FunctionId)
+ .unwrap_or(assoc)
+ } else {
+ assoc
+ }
+ }
+ }
+ }
+
+ _ => assoc,
+ };
+
+ return Some(PathResolution::Def(AssocItem::from(assoc).into()));
+ }
+ if let Some(VariantId::EnumVariantId(variant)) =
+ infer.variant_resolution_for_expr(expr_id)
+ {
+ return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
+ }
+ prefer_value_ns = true;
+ } else if let Some(path_pat) = parent().and_then(ast::PathPat::cast) {
+ let pat_id = self.pat_id(&path_pat.into())?;
+ if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) {
+ return Some(PathResolution::Def(AssocItem::from(assoc).into()));
+ }
+ if let Some(VariantId::EnumVariantId(variant)) =
+ self.infer.as_ref()?.variant_resolution_for_pat(pat_id)
+ {
+ return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
+ }
+ } else if let Some(rec_lit) = parent().and_then(ast::RecordExpr::cast) {
+ let expr_id = self.expr_id(db, &rec_lit.into())?;
+ if let Some(VariantId::EnumVariantId(variant)) =
+ self.infer.as_ref()?.variant_resolution_for_expr(expr_id)
+ {
+ return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
+ }
+ } else {
+ let record_pat = parent().and_then(ast::RecordPat::cast).map(ast::Pat::from);
+ let tuple_struct_pat =
+ || parent().and_then(ast::TupleStructPat::cast).map(ast::Pat::from);
+ if let Some(pat) = record_pat.or_else(tuple_struct_pat) {
+ let pat_id = self.pat_id(&pat)?;
+ let variant_res_for_pat =
+ self.infer.as_ref()?.variant_resolution_for_pat(pat_id);
+ if let Some(VariantId::EnumVariantId(variant)) = variant_res_for_pat {
+ return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
+ }
+ }
+ }
+ None
+ })();
+ if let Some(_) = resolved {
+ return resolved;
+ }
+
+ // This must be a normal source file rather than macro file.
+ let hygiene = Hygiene::new(db.upcast(), self.file_id);
+ let ctx = body::LowerCtx::with_hygiene(db.upcast(), &hygiene);
+ let hir_path = Path::from_src(path.clone(), &ctx)?;
+
+ // Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
+ // trying to resolve foo::bar.
+ if let Some(use_tree) = parent().and_then(ast::UseTree::cast) {
+ if use_tree.coloncolon_token().is_some() {
+ return resolve_hir_path_qualifier(db, &self.resolver, &hir_path);
+ }
+ }
+
+ let meta_path = path
+ .syntax()
+ .ancestors()
+ .take_while(|it| {
+ let kind = it.kind();
+ ast::Path::can_cast(kind) || ast::Meta::can_cast(kind)
+ })
+ .last()
+ .and_then(ast::Meta::cast);
+
+ // Case where path is a qualifier of another path, e.g. foo::bar::Baz where we are
+ // trying to resolve foo::bar.
+ if path.parent_path().is_some() {
+ return match resolve_hir_path_qualifier(db, &self.resolver, &hir_path) {
+ None if meta_path.is_some() => {
+ path.first_segment().and_then(|it| it.name_ref()).and_then(|name_ref| {
+ ToolModule::by_name(db, self.resolver.krate().into(), &name_ref.text())
+ .map(PathResolution::ToolModule)
+ })
+ }
+ res => res,
+ };
+ } else if let Some(meta_path) = meta_path {
+ // Case where we are resolving the final path segment of a path in an attribute
+ // in this case we have to check for inert/builtin attributes and tools and prioritize
+ // resolution of attributes over other namespaces
+ if let Some(name_ref) = path.as_single_name_ref() {
+ let builtin =
+ BuiltinAttr::by_name(db, self.resolver.krate().into(), &name_ref.text());
+ if let Some(_) = builtin {
+ return builtin.map(PathResolution::BuiltinAttr);
+ }
+
+ if let Some(attr) = meta_path.parent_attr() {
+ let adt = if let Some(field) =
+ attr.syntax().parent().and_then(ast::RecordField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(field) =
+ attr.syntax().parent().and_then(ast::TupleField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(variant) =
+ attr.syntax().parent().and_then(ast::Variant::cast)
+ {
+ variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
+ } else {
+ None
+ };
+ if let Some(adt) = adt {
+ let ast_id = db.ast_id_map(self.file_id).ast_id(&adt);
+ if let Some(helpers) = self
+ .resolver
+ .def_map()
+ .derive_helpers_in_scope(InFile::new(self.file_id, ast_id))
+ {
+ // FIXME: Multiple derives can have the same helper
+ let name_ref = name_ref.as_name();
+ for (macro_id, mut helpers) in
+ helpers.iter().group_by(|(_, macro_id, ..)| macro_id).into_iter()
+ {
+ if let Some(idx) = helpers.position(|(name, ..)| *name == name_ref)
+ {
+ return Some(PathResolution::DeriveHelper(DeriveHelper {
+ derive: *macro_id,
+ idx,
+ }));
+ }
+ }
+ }
+ }
+ }
+ }
+ return match resolve_hir_path_as_macro(db, &self.resolver, &hir_path) {
+ Some(m) => Some(PathResolution::Def(ModuleDef::Macro(m))),
+ // this labels any path that starts with a tool module as the tool itself, this is technically wrong
+ // but there is no benefit in differentiating these two cases for the time being
+ None => path.first_segment().and_then(|it| it.name_ref()).and_then(|name_ref| {
+ ToolModule::by_name(db, self.resolver.krate().into(), &name_ref.text())
+ .map(PathResolution::ToolModule)
+ }),
+ };
+ }
+ if parent().map_or(false, |it| ast::Visibility::can_cast(it.kind())) {
+ resolve_hir_path_qualifier(db, &self.resolver, &hir_path)
+ } else {
+ resolve_hir_path_(db, &self.resolver, &hir_path, prefer_value_ns)
+ }
+ }
+
+ pub(crate) fn record_literal_missing_fields(
+ &self,
+ db: &dyn HirDatabase,
+ literal: &ast::RecordExpr,
+ ) -> Option<Vec<(Field, Type)>> {
+ let body = self.body()?;
+ let infer = self.infer.as_ref()?;
+
+ let expr_id = self.expr_id(db, &literal.clone().into())?;
+ let substs = infer.type_of_expr[expr_id].as_adt()?.1;
+
+ let (variant, missing_fields, _exhaustive) =
+ record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?;
+ let res = self.missing_fields(db, substs, variant, missing_fields);
+ Some(res)
+ }
+
+ pub(crate) fn record_pattern_missing_fields(
+ &self,
+ db: &dyn HirDatabase,
+ pattern: &ast::RecordPat,
+ ) -> Option<Vec<(Field, Type)>> {
+ let body = self.body()?;
+ let infer = self.infer.as_ref()?;
+
+ let pat_id = self.pat_id(&pattern.clone().into())?;
+ let substs = infer.type_of_pat[pat_id].as_adt()?.1;
+
+ let (variant, missing_fields, _exhaustive) =
+ record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
+ let res = self.missing_fields(db, substs, variant, missing_fields);
+ Some(res)
+ }
+
+ fn missing_fields(
+ &self,
+ db: &dyn HirDatabase,
+ substs: &Substitution,
+ variant: VariantId,
+ missing_fields: Vec<LocalFieldId>,
+ ) -> Vec<(Field, Type)> {
+ let field_types = db.field_types(variant);
+
+ missing_fields
+ .into_iter()
+ .map(|local_id| {
+ let field = FieldId { parent: variant, local_id };
+ let ty = field_types[local_id].clone().substitute(Interner, substs);
+ (field.into(), Type::new_with_resolver_inner(db, &self.resolver, ty))
+ })
+ .collect()
+ }
+
+ pub(crate) fn expand(
+ &self,
+ db: &dyn HirDatabase,
+ macro_call: InFile<&ast::MacroCall>,
+ ) -> Option<HirFileId> {
+ let krate = self.resolver.krate();
+ let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
+ self.resolver
+ .resolve_path_as_macro(db.upcast(), &path)
+ .map(|it| macro_id_to_def_id(db.upcast(), it))
+ })?;
+ Some(macro_call_id.as_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
+ }
+
+ pub(crate) fn resolve_variant(
+ &self,
+ db: &dyn HirDatabase,
+ record_lit: ast::RecordExpr,
+ ) -> Option<VariantId> {
+ let infer = self.infer.as_ref()?;
+ let expr_id = self.expr_id(db, &record_lit.into())?;
+ infer.variant_resolution_for_expr(expr_id)
+ }
+
+ pub(crate) fn is_unsafe_macro_call(
+ &self,
+ db: &dyn HirDatabase,
+ macro_call: InFile<&ast::MacroCall>,
+ ) -> bool {
+ // check for asm/global_asm
+ if let Some(mac) = self.resolve_macro_call(db, macro_call) {
+ let ex = match mac.id {
+ hir_def::MacroId::Macro2Id(it) => it.lookup(db.upcast()).expander,
+ hir_def::MacroId::MacroRulesId(it) => it.lookup(db.upcast()).expander,
+ _ => hir_def::MacroExpander::Declarative,
+ };
+ match ex {
+ hir_def::MacroExpander::BuiltIn(e)
+ if e == BuiltinFnLikeExpander::Asm || e == BuiltinFnLikeExpander::GlobalAsm =>
+ {
+ return true
+ }
+ _ => (),
+ }
+ }
+ let macro_expr = match macro_call
+ .map(|it| it.syntax().parent().and_then(ast::MacroExpr::cast))
+ .transpose()
+ {
+ Some(it) => it,
+ None => return false,
+ };
+
+ if let (Some((def, body, sm)), Some(infer)) = (&self.def, &self.infer) {
+ if let Some(expanded_expr) = sm.macro_expansion_expr(macro_expr.as_ref()) {
+ let mut is_unsafe = false;
+ unsafe_expressions(
+ db,
+ infer,
+ *def,
+ body,
+ expanded_expr,
+ &mut |UnsafeExpr { inside_unsafe_block, .. }| is_unsafe |= !inside_unsafe_block,
+ );
+ return is_unsafe;
+ }
+ }
+ false
+ }
+
+ fn resolve_impl_method(
+ &self,
+ db: &dyn HirDatabase,
+ func: FunctionId,
+ substs: &Substitution,
+ ) -> Option<FunctionId> {
+ let impled_trait = match func.lookup(db.upcast()).container {
+ ItemContainerId::TraitId(trait_id) => trait_id,
+ _ => return None,
+ };
+ if substs.is_empty(Interner) {
+ return None;
+ }
+ let self_ty = substs.at(Interner, 0).ty(Interner)?;
+ let krate = self.resolver.krate();
+ let trait_env = self.resolver.body_owner()?.as_generic_def_id().map_or_else(
+ || Arc::new(hir_ty::TraitEnvironment::empty(krate)),
+ |d| db.trait_environment(d),
+ );
+
+ let fun_data = db.function_data(func);
+ method_resolution::lookup_impl_method(self_ty, db, trait_env, impled_trait, &fun_data.name)
+ }
+}
+
+fn scope_for(
+ scopes: &ExprScopes,
+ source_map: &BodySourceMap,
+ node: InFile<&SyntaxNode>,
+) -> Option<ScopeId> {
+ node.value
+ .ancestors()
+ .filter_map(ast::Expr::cast)
+ .filter_map(|it| source_map.node_expr(InFile::new(node.file_id, &it)))
+ .find_map(|it| scopes.scope_for(it))
+}
+
+fn scope_for_offset(
+ db: &dyn HirDatabase,
+ scopes: &ExprScopes,
+ source_map: &BodySourceMap,
+ from_file: HirFileId,
+ offset: TextSize,
+) -> Option<ScopeId> {
+ scopes
+ .scope_by_expr()
+ .iter()
+ .filter_map(|(id, scope)| {
+ let InFile { file_id, value } = source_map.expr_syntax(*id).ok()?;
+ if from_file == file_id {
+ return Some((value.text_range(), scope));
+ }
+
+ // FIXME handle attribute expansion
+ let source = iter::successors(file_id.call_node(db.upcast()), |it| {
+ it.file_id.call_node(db.upcast())
+ })
+ .find(|it| it.file_id == from_file)
+ .filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
+ Some((source.value.text_range(), scope))
+ })
+ .filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end())
+ // find containing scope
+ .min_by_key(|(expr_range, _scope)| expr_range.len())
+ .map(|(expr_range, scope)| {
+ adjust(db, scopes, source_map, expr_range, from_file, offset).unwrap_or(*scope)
+ })
+}
+
+// XXX: during completion, cursor might be outside of any particular
+// expression. Try to figure out the correct scope...
+fn adjust(
+ db: &dyn HirDatabase,
+ scopes: &ExprScopes,
+ source_map: &BodySourceMap,
+ expr_range: TextRange,
+ from_file: HirFileId,
+ offset: TextSize,
+) -> Option<ScopeId> {
+ let child_scopes = scopes
+ .scope_by_expr()
+ .iter()
+ .filter_map(|(id, scope)| {
+ let source = source_map.expr_syntax(*id).ok()?;
+ // FIXME: correctly handle macro expansion
+ if source.file_id != from_file {
+ return None;
+ }
+ let root = source.file_syntax(db.upcast());
+ let node = source.value.to_node(&root);
+ Some((node.syntax().text_range(), scope))
+ })
+ .filter(|&(range, _)| {
+ range.start() <= offset && expr_range.contains_range(range) && range != expr_range
+ });
+
+ child_scopes
+ .max_by(|&(r1, _), &(r2, _)| {
+ if r1.contains_range(r2) {
+ std::cmp::Ordering::Greater
+ } else if r2.contains_range(r1) {
+ std::cmp::Ordering::Less
+ } else {
+ r1.start().cmp(&r2.start())
+ }
+ })
+ .map(|(_ptr, scope)| *scope)
+}
+
+#[inline]
+pub(crate) fn resolve_hir_path(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &Path,
+) -> Option<PathResolution> {
+ resolve_hir_path_(db, resolver, path, false)
+}
+
+#[inline]
+pub(crate) fn resolve_hir_path_as_macro(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &Path,
+) -> Option<Macro> {
+ resolver.resolve_path_as_macro(db.upcast(), path.mod_path()).map(Into::into)
+}
+
+fn resolve_hir_path_(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &Path,
+ prefer_value_ns: bool,
+) -> Option<PathResolution> {
+ let types = || {
+ let (ty, unresolved) = match path.type_anchor() {
+ Some(type_ref) => {
+ let (_, res) = TyLoweringContext::new(db, resolver).lower_ty_ext(type_ref);
+ res.map(|ty_ns| (ty_ns, path.segments().first()))
+ }
+ None => {
+ let (ty, remaining) =
+ resolver.resolve_path_in_type_ns(db.upcast(), path.mod_path())?;
+ match remaining {
+ Some(remaining) if remaining > 1 => {
+ if remaining + 1 == path.segments().len() {
+ Some((ty, path.segments().last()))
+ } else {
+ None
+ }
+ }
+ _ => Some((ty, path.segments().get(1))),
+ }
+ }
+ }?;
+
+ // If we are in a TypeNs for a Trait, and we have an unresolved name, try to resolve it as a type
+ // within the trait's associated types.
+ if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) {
+ if let Some(type_alias_id) =
+ db.trait_data(trait_id).associated_type_by_name(unresolved.name)
+ {
+ return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into()));
+ }
+ }
+
+ let res = match ty {
+ TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
+ TypeNs::GenericParam(id) => PathResolution::TypeParam(id.into()),
+ TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => {
+ PathResolution::Def(Adt::from(it).into())
+ }
+ TypeNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
+ TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
+ TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
+ TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
+ };
+ match unresolved {
+ Some(unresolved) => resolver
+ .generic_def()
+ .and_then(|def| {
+ hir_ty::associated_type_shorthand_candidates(
+ db,
+ def,
+ res.in_type_ns()?,
+ |name, _, id| (name == unresolved.name).then(|| id),
+ )
+ })
+ .map(TypeAlias::from)
+ .map(Into::into)
+ .map(PathResolution::Def),
+ None => Some(res),
+ }
+ };
+
+ let body_owner = resolver.body_owner();
+ let values = || {
+ resolver.resolve_path_in_value_ns_fully(db.upcast(), path.mod_path()).and_then(|val| {
+ let res = match val {
+ ValueNs::LocalBinding(pat_id) => {
+ let var = Local { parent: body_owner?, pat_id };
+ PathResolution::Local(var)
+ }
+ ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
+ ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
+ ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
+ ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
+ ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
+ ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()),
+ ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()),
+ };
+ Some(res)
+ })
+ };
+
+ let items = || {
+ resolver
+ .resolve_module_path_in_items(db.upcast(), path.mod_path())
+ .take_types()
+ .map(|it| PathResolution::Def(it.into()))
+ };
+
+ let macros = || {
+ resolver
+ .resolve_path_as_macro(db.upcast(), path.mod_path())
+ .map(|def| PathResolution::Def(ModuleDef::Macro(def.into())))
+ };
+
+ if prefer_value_ns { values().or_else(types) } else { types().or_else(values) }
+ .or_else(items)
+ .or_else(macros)
+}
+
+/// Resolves a path where we know it is a qualifier of another path.
+///
+/// For example, if we have:
+/// ```
+/// mod my {
+/// pub mod foo {
+/// struct Bar;
+/// }
+///
+/// pub fn foo() {}
+/// }
+/// ```
+/// then we know that `foo` in `my::foo::Bar` refers to the module, not the function.
+fn resolve_hir_path_qualifier(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &Path,
+) -> Option<PathResolution> {
+ resolver
+ .resolve_path_in_type_ns_fully(db.upcast(), path.mod_path())
+ .map(|ty| match ty {
+ TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
+ TypeNs::GenericParam(id) => PathResolution::TypeParam(id.into()),
+ TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => {
+ PathResolution::Def(Adt::from(it).into())
+ }
+ TypeNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
+ TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
+ TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
+ TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
+ })
+ .or_else(|| {
+ resolver
+ .resolve_module_path_in_items(db.upcast(), path.mod_path())
+ .take_types()
+ .map(|it| PathResolution::Def(it.into()))
+ })
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
new file mode 100644
index 000000000..616a406c7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
@@ -0,0 +1,348 @@
+//! File symbol extraction.
+
+use base_db::FileRange;
+use hir_def::{
+ item_tree::ItemTreeNode, src::HasSource, AdtId, AssocItemId, AssocItemLoc, DefWithBodyId,
+ HasModule, ImplId, ItemContainerId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId,
+};
+use hir_expand::{HirFileId, InFile};
+use hir_ty::db::HirDatabase;
+use syntax::{ast::HasName, AstNode, SmolStr, SyntaxNode, SyntaxNodePtr};
+
+use crate::{Module, Semantics};
+
+/// The actual data that is stored in the index. It should be as compact as
+/// possible.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct FileSymbol {
+ pub name: SmolStr,
+ pub loc: DeclarationLocation,
+ pub kind: FileSymbolKind,
+ pub container_name: Option<SmolStr>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct DeclarationLocation {
+ /// The file id for both the `ptr` and `name_ptr`.
+ pub hir_file_id: HirFileId,
+ /// This points to the whole syntax node of the declaration.
+ pub ptr: SyntaxNodePtr,
+ /// This points to the [`syntax::ast::Name`] identifier of the declaration.
+ pub name_ptr: SyntaxNodePtr,
+}
+
+impl DeclarationLocation {
+ pub fn syntax<DB: HirDatabase>(&self, sema: &Semantics<'_, DB>) -> Option<SyntaxNode> {
+ let root = sema.parse_or_expand(self.hir_file_id)?;
+ Some(self.ptr.to_node(&root))
+ }
+
+ pub fn original_range(&self, db: &dyn HirDatabase) -> Option<FileRange> {
+ let node = resolve_node(db, self.hir_file_id, &self.ptr)?;
+ Some(node.as_ref().original_file_range(db.upcast()))
+ }
+
+ pub fn original_name_range(&self, db: &dyn HirDatabase) -> Option<FileRange> {
+ let node = resolve_node(db, self.hir_file_id, &self.name_ptr)?;
+ node.as_ref().original_file_range_opt(db.upcast())
+ }
+}
+
+fn resolve_node(
+ db: &dyn HirDatabase,
+ file_id: HirFileId,
+ ptr: &SyntaxNodePtr,
+) -> Option<InFile<SyntaxNode>> {
+ let root = db.parse_or_expand(file_id)?;
+ let node = ptr.to_node(&root);
+ Some(InFile::new(file_id, node))
+}
+
+#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]
+pub enum FileSymbolKind {
+ Const,
+ Enum,
+ Function,
+ Macro,
+ Module,
+ Static,
+ Struct,
+ Trait,
+ TypeAlias,
+ Union,
+}
+
+impl FileSymbolKind {
+ pub fn is_type(self: FileSymbolKind) -> bool {
+ matches!(
+ self,
+ FileSymbolKind::Struct
+ | FileSymbolKind::Enum
+ | FileSymbolKind::Trait
+ | FileSymbolKind::TypeAlias
+ | FileSymbolKind::Union
+ )
+ }
+}
+
+/// Represents an outstanding module that the symbol collector must collect symbols from.
+struct SymbolCollectorWork {
+ module_id: ModuleId,
+ parent: Option<DefWithBodyId>,
+}
+
+pub struct SymbolCollector<'a> {
+ db: &'a dyn HirDatabase,
+ symbols: Vec<FileSymbol>,
+ work: Vec<SymbolCollectorWork>,
+ current_container_name: Option<SmolStr>,
+}
+
+/// Given a [`ModuleId`] and a [`HirDatabase`], use the DefMap for the module's crate to collect
+/// all symbols that should be indexed for the given module.
+impl<'a> SymbolCollector<'a> {
+ pub fn collect(db: &dyn HirDatabase, module: Module) -> Vec<FileSymbol> {
+ let mut symbol_collector = SymbolCollector {
+ db,
+ symbols: Default::default(),
+ current_container_name: None,
+ // The initial work is the root module we're collecting, additional work will
+ // be populated as we traverse the module's definitions.
+ work: vec![SymbolCollectorWork { module_id: module.into(), parent: None }],
+ };
+
+ while let Some(work) = symbol_collector.work.pop() {
+ symbol_collector.do_work(work);
+ }
+
+ symbol_collector.symbols
+ }
+
+ fn do_work(&mut self, work: SymbolCollectorWork) {
+ self.db.unwind_if_cancelled();
+
+ let parent_name = work.parent.and_then(|id| self.def_with_body_id_name(id));
+ self.with_container_name(parent_name, |s| s.collect_from_module(work.module_id));
+ }
+
+ fn collect_from_module(&mut self, module_id: ModuleId) {
+ let def_map = module_id.def_map(self.db.upcast());
+ let scope = &def_map[module_id.local_id].scope;
+
+ for module_def_id in scope.declarations() {
+ match module_def_id {
+ ModuleDefId::ModuleId(id) => self.push_module(id),
+ ModuleDefId::FunctionId(id) => {
+ self.push_decl_assoc(id, FileSymbolKind::Function);
+ self.collect_from_body(id);
+ }
+ ModuleDefId::AdtId(AdtId::StructId(id)) => {
+ self.push_decl(id, FileSymbolKind::Struct)
+ }
+ ModuleDefId::AdtId(AdtId::EnumId(id)) => self.push_decl(id, FileSymbolKind::Enum),
+ ModuleDefId::AdtId(AdtId::UnionId(id)) => self.push_decl(id, FileSymbolKind::Union),
+ ModuleDefId::ConstId(id) => {
+ self.push_decl_assoc(id, FileSymbolKind::Const);
+ self.collect_from_body(id);
+ }
+ ModuleDefId::StaticId(id) => {
+ self.push_decl_assoc(id, FileSymbolKind::Static);
+ self.collect_from_body(id);
+ }
+ ModuleDefId::TraitId(id) => {
+ self.push_decl(id, FileSymbolKind::Trait);
+ self.collect_from_trait(id);
+ }
+ ModuleDefId::TypeAliasId(id) => {
+ self.push_decl_assoc(id, FileSymbolKind::TypeAlias);
+ }
+ ModuleDefId::MacroId(id) => match id {
+ MacroId::Macro2Id(id) => self.push_decl(id, FileSymbolKind::Macro),
+ MacroId::MacroRulesId(id) => self.push_decl(id, FileSymbolKind::Macro),
+ MacroId::ProcMacroId(id) => self.push_decl(id, FileSymbolKind::Macro),
+ },
+ // Don't index these.
+ ModuleDefId::BuiltinType(_) => {}
+ ModuleDefId::EnumVariantId(_) => {}
+ }
+ }
+
+ for impl_id in scope.impls() {
+ self.collect_from_impl(impl_id);
+ }
+
+ for const_id in scope.unnamed_consts() {
+ self.collect_from_body(const_id);
+ }
+
+ for (_, id) in scope.legacy_macros() {
+ for &id in id {
+ if id.module(self.db.upcast()) == module_id {
+ match id {
+ MacroId::Macro2Id(id) => self.push_decl(id, FileSymbolKind::Macro),
+ MacroId::MacroRulesId(id) => self.push_decl(id, FileSymbolKind::Macro),
+ MacroId::ProcMacroId(id) => self.push_decl(id, FileSymbolKind::Macro),
+ }
+ }
+ }
+ }
+ }
+
+ fn collect_from_body(&mut self, body_id: impl Into<DefWithBodyId>) {
+ let body_id = body_id.into();
+ let body = self.db.body(body_id);
+
+ // Descend into the blocks and enqueue collection of all modules within.
+ for (_, def_map) in body.blocks(self.db.upcast()) {
+ for (id, _) in def_map.modules() {
+ self.work.push(SymbolCollectorWork {
+ module_id: def_map.module_id(id),
+ parent: Some(body_id),
+ });
+ }
+ }
+ }
+
+ fn collect_from_impl(&mut self, impl_id: ImplId) {
+ let impl_data = self.db.impl_data(impl_id);
+ for &assoc_item_id in &impl_data.items {
+ self.push_assoc_item(assoc_item_id)
+ }
+ }
+
+ fn collect_from_trait(&mut self, trait_id: TraitId) {
+ let trait_data = self.db.trait_data(trait_id);
+ self.with_container_name(trait_data.name.as_text(), |s| {
+ for &(_, assoc_item_id) in &trait_data.items {
+ s.push_assoc_item(assoc_item_id);
+ }
+ });
+ }
+
+ fn with_container_name(&mut self, container_name: Option<SmolStr>, f: impl FnOnce(&mut Self)) {
+ if let Some(container_name) = container_name {
+ let prev = self.current_container_name.replace(container_name);
+ f(self);
+ self.current_container_name = prev;
+ } else {
+ f(self);
+ }
+ }
+
+ fn current_container_name(&self) -> Option<SmolStr> {
+ self.current_container_name.clone()
+ }
+
+ fn def_with_body_id_name(&self, body_id: DefWithBodyId) -> Option<SmolStr> {
+ match body_id {
+ DefWithBodyId::FunctionId(id) => Some(
+ id.lookup(self.db.upcast()).source(self.db.upcast()).value.name()?.text().into(),
+ ),
+ DefWithBodyId::StaticId(id) => Some(
+ id.lookup(self.db.upcast()).source(self.db.upcast()).value.name()?.text().into(),
+ ),
+ DefWithBodyId::ConstId(id) => Some(
+ id.lookup(self.db.upcast()).source(self.db.upcast()).value.name()?.text().into(),
+ ),
+ }
+ }
+
+ fn push_assoc_item(&mut self, assoc_item_id: AssocItemId) {
+ match assoc_item_id {
+ AssocItemId::FunctionId(id) => self.push_decl_assoc(id, FileSymbolKind::Function),
+ AssocItemId::ConstId(id) => self.push_decl_assoc(id, FileSymbolKind::Const),
+ AssocItemId::TypeAliasId(id) => self.push_decl_assoc(id, FileSymbolKind::TypeAlias),
+ }
+ }
+
+ fn push_decl_assoc<L, T>(&mut self, id: L, kind: FileSymbolKind)
+ where
+ L: Lookup<Data = AssocItemLoc<T>>,
+ T: ItemTreeNode,
+ <T as ItemTreeNode>::Source: HasName,
+ {
+ fn container_name(db: &dyn HirDatabase, container: ItemContainerId) -> Option<SmolStr> {
+ match container {
+ ItemContainerId::ModuleId(module_id) => {
+ let module = Module::from(module_id);
+ module.name(db).and_then(|name| name.as_text())
+ }
+ ItemContainerId::TraitId(trait_id) => {
+ let trait_data = db.trait_data(trait_id);
+ trait_data.name.as_text()
+ }
+ ItemContainerId::ImplId(_) | ItemContainerId::ExternBlockId(_) => None,
+ }
+ }
+
+ self.push_file_symbol(|s| {
+ let loc = id.lookup(s.db.upcast());
+ let source = loc.source(s.db.upcast());
+ let name_node = source.value.name()?;
+ let container_name =
+ container_name(s.db, loc.container).or_else(|| s.current_container_name());
+
+ Some(FileSymbol {
+ name: name_node.text().into(),
+ kind,
+ container_name,
+ loc: DeclarationLocation {
+ hir_file_id: source.file_id,
+ ptr: SyntaxNodePtr::new(source.value.syntax()),
+ name_ptr: SyntaxNodePtr::new(name_node.syntax()),
+ },
+ })
+ })
+ }
+
+ fn push_decl<L>(&mut self, id: L, kind: FileSymbolKind)
+ where
+ L: Lookup,
+ <L as Lookup>::Data: HasSource,
+ <<L as Lookup>::Data as HasSource>::Value: HasName,
+ {
+ self.push_file_symbol(|s| {
+ let loc = id.lookup(s.db.upcast());
+ let source = loc.source(s.db.upcast());
+ let name_node = source.value.name()?;
+
+ Some(FileSymbol {
+ name: name_node.text().into(),
+ kind,
+ container_name: s.current_container_name(),
+ loc: DeclarationLocation {
+ hir_file_id: source.file_id,
+ ptr: SyntaxNodePtr::new(source.value.syntax()),
+ name_ptr: SyntaxNodePtr::new(name_node.syntax()),
+ },
+ })
+ })
+ }
+
+ fn push_module(&mut self, module_id: ModuleId) {
+ self.push_file_symbol(|s| {
+ let def_map = module_id.def_map(s.db.upcast());
+ let module_data = &def_map[module_id.local_id];
+ let declaration = module_data.origin.declaration()?;
+ let module = declaration.to_node(s.db.upcast());
+ let name_node = module.name()?;
+
+ Some(FileSymbol {
+ name: name_node.text().into(),
+ kind: FileSymbolKind::Module,
+ container_name: s.current_container_name(),
+ loc: DeclarationLocation {
+ hir_file_id: declaration.file_id,
+ ptr: SyntaxNodePtr::new(module.syntax()),
+ name_ptr: SyntaxNodePtr::new(name_node.syntax()),
+ },
+ })
+ })
+ }
+
+ fn push_file_symbol(&mut self, f: impl FnOnce(&Self) -> Option<FileSymbol>) {
+ if let Some(file_symbol) = f(self) {
+ self.symbols.push(file_symbol);
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml
new file mode 100644
index 000000000..fca09d384
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml
@@ -0,0 +1,31 @@
+[package]
+name = "ide-assists"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+
+itertools = "0.10.3"
+either = "1.7.0"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+text-edit = { path = "../text-edit", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+ide-db = { path = "../ide-db", version = "0.0.0" }
+hir = { path = "../hir", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
+sourcegen = { path = "../sourcegen" }
+expect-test = "1.4.0"
+
+[features]
+in-rust-tree = []
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs b/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs
new file mode 100644
index 000000000..d4d148c77
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs
@@ -0,0 +1,16 @@
+//! Settings for tweaking assists.
+//!
+//! The fun thing here is `SnippetCap` -- this type can only be created in this
+//! module, and we use to statically check that we only produce snippet
+//! assists if we are allowed to.
+
+use ide_db::{imports::insert_use::InsertUseConfig, SnippetCap};
+
+use crate::AssistKind;
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct AssistConfig {
+ pub snippet_cap: Option<SnippetCap>,
+ pub allowed: Option<Vec<AssistKind>>,
+ pub insert_use: InsertUseConfig,
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs b/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs
new file mode 100644
index 000000000..f9b426614
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs
@@ -0,0 +1,347 @@
+//! See [`AssistContext`].
+
+use std::mem;
+
+use hir::Semantics;
+use ide_db::{
+ base_db::{AnchoredPathBuf, FileId, FileRange},
+ SnippetCap,
+};
+use ide_db::{
+ label::Label,
+ source_change::{FileSystemEdit, SourceChange},
+ RootDatabase,
+};
+use syntax::{
+ algo::{self, find_node_at_offset, find_node_at_range},
+ AstNode, AstToken, Direction, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxNodePtr,
+ SyntaxToken, TextRange, TextSize, TokenAtOffset,
+};
+use text_edit::{TextEdit, TextEditBuilder};
+
+use crate::{
+ assist_config::AssistConfig, Assist, AssistId, AssistKind, AssistResolveStrategy, GroupLabel,
+};
+
+/// `AssistContext` allows to apply an assist or check if it could be applied.
+///
+/// Assists use a somewhat over-engineered approach, given the current needs.
+/// The assists workflow consists of two phases. In the first phase, a user asks
+/// for the list of available assists. In the second phase, the user picks a
+/// particular assist and it gets applied.
+///
+/// There are two peculiarities here:
+///
+/// * first, we ideally avoid computing more things then necessary to answer "is
+/// assist applicable" in the first phase.
+/// * second, when we are applying assist, we don't have a guarantee that there
+/// weren't any changes between the point when user asked for assists and when
+/// they applied a particular assist. So, when applying assist, we need to do
+/// all the checks from scratch.
+///
+/// To avoid repeating the same code twice for both "check" and "apply"
+/// functions, we use an approach reminiscent of that of Django's function based
+/// views dealing with forms. Each assist receives a runtime parameter,
+/// `resolve`. It first check if an edit is applicable (potentially computing
+/// info required to compute the actual edit). If it is applicable, and
+/// `resolve` is `true`, it then computes the actual edit.
+///
+/// So, to implement the original assists workflow, we can first apply each edit
+/// with `resolve = false`, and then applying the selected edit again, with
+/// `resolve = true` this time.
+///
+/// Note, however, that we don't actually use such two-phase logic at the
+/// moment, because the LSP API is pretty awkward in this place, and it's much
+/// easier to just compute the edit eagerly :-)
+pub(crate) struct AssistContext<'a> {
+ pub(crate) config: &'a AssistConfig,
+ pub(crate) sema: Semantics<'a, RootDatabase>,
+ frange: FileRange,
+ trimmed_range: TextRange,
+ source_file: SourceFile,
+}
+
+impl<'a> AssistContext<'a> {
+ pub(crate) fn new(
+ sema: Semantics<'a, RootDatabase>,
+ config: &'a AssistConfig,
+ frange: FileRange,
+ ) -> AssistContext<'a> {
+ let source_file = sema.parse(frange.file_id);
+
+ let start = frange.range.start();
+ let end = frange.range.end();
+ let left = source_file.syntax().token_at_offset(start);
+ let right = source_file.syntax().token_at_offset(end);
+ let left =
+ left.right_biased().and_then(|t| algo::skip_whitespace_token(t, Direction::Next));
+ let right =
+ right.left_biased().and_then(|t| algo::skip_whitespace_token(t, Direction::Prev));
+ let left = left.map(|t| t.text_range().start().clamp(start, end));
+ let right = right.map(|t| t.text_range().end().clamp(start, end));
+
+ let trimmed_range = match (left, right) {
+ (Some(left), Some(right)) if left <= right => TextRange::new(left, right),
+ // Selection solely consists of whitespace so just fall back to the original
+ _ => frange.range,
+ };
+
+ AssistContext { config, sema, frange, source_file, trimmed_range }
+ }
+
+ pub(crate) fn db(&self) -> &RootDatabase {
+ self.sema.db
+ }
+
+ // NB, this ignores active selection.
+ pub(crate) fn offset(&self) -> TextSize {
+ self.frange.range.start()
+ }
+
+ pub(crate) fn file_id(&self) -> FileId {
+ self.frange.file_id
+ }
+
+ pub(crate) fn has_empty_selection(&self) -> bool {
+ self.trimmed_range.is_empty()
+ }
+
+ /// Returns the selected range trimmed for whitespace tokens, that is the range will be snapped
+ /// to the nearest enclosed token.
+ pub(crate) fn selection_trimmed(&self) -> TextRange {
+ self.trimmed_range
+ }
+
+ pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> {
+ self.source_file.syntax().token_at_offset(self.offset())
+ }
+ pub(crate) fn find_token_syntax_at_offset(&self, kind: SyntaxKind) -> Option<SyntaxToken> {
+ self.token_at_offset().find(|it| it.kind() == kind)
+ }
+ pub(crate) fn find_token_at_offset<T: AstToken>(&self) -> Option<T> {
+ self.token_at_offset().find_map(T::cast)
+ }
+ pub(crate) fn find_node_at_offset<N: AstNode>(&self) -> Option<N> {
+ find_node_at_offset(self.source_file.syntax(), self.offset())
+ }
+ pub(crate) fn find_node_at_range<N: AstNode>(&self) -> Option<N> {
+ find_node_at_range(self.source_file.syntax(), self.trimmed_range)
+ }
+ pub(crate) fn find_node_at_offset_with_descend<N: AstNode>(&self) -> Option<N> {
+ self.sema.find_node_at_offset_with_descend(self.source_file.syntax(), self.offset())
+ }
+ /// Returns the element covered by the selection range, this excludes trailing whitespace in the selection.
+ pub(crate) fn covering_element(&self) -> SyntaxElement {
+ self.source_file.syntax().covering_element(self.selection_trimmed())
+ }
+}
+
+pub(crate) struct Assists {
+ file: FileId,
+ resolve: AssistResolveStrategy,
+ buf: Vec<Assist>,
+ allowed: Option<Vec<AssistKind>>,
+}
+
+impl Assists {
+ pub(crate) fn new(ctx: &AssistContext<'_>, resolve: AssistResolveStrategy) -> Assists {
+ Assists {
+ resolve,
+ file: ctx.frange.file_id,
+ buf: Vec::new(),
+ allowed: ctx.config.allowed.clone(),
+ }
+ }
+
+ pub(crate) fn finish(mut self) -> Vec<Assist> {
+ self.buf.sort_by_key(|assist| assist.target.len());
+ self.buf
+ }
+
+ pub(crate) fn add(
+ &mut self,
+ id: AssistId,
+ label: impl Into<String>,
+ target: TextRange,
+ f: impl FnOnce(&mut AssistBuilder),
+ ) -> Option<()> {
+ let mut f = Some(f);
+ self.add_impl(None, id, label.into(), target, &mut |it| f.take().unwrap()(it))
+ }
+
+ pub(crate) fn add_group(
+ &mut self,
+ group: &GroupLabel,
+ id: AssistId,
+ label: impl Into<String>,
+ target: TextRange,
+ f: impl FnOnce(&mut AssistBuilder),
+ ) -> Option<()> {
+ let mut f = Some(f);
+ self.add_impl(Some(group), id, label.into(), target, &mut |it| f.take().unwrap()(it))
+ }
+
+ fn add_impl(
+ &mut self,
+ group: Option<&GroupLabel>,
+ id: AssistId,
+ label: String,
+ target: TextRange,
+ f: &mut dyn FnMut(&mut AssistBuilder),
+ ) -> Option<()> {
+ if !self.is_allowed(&id) {
+ return None;
+ }
+
+ let mut trigger_signature_help = false;
+ let source_change = if self.resolve.should_resolve(&id) {
+ let mut builder = AssistBuilder::new(self.file);
+ f(&mut builder);
+ trigger_signature_help = builder.trigger_signature_help;
+ Some(builder.finish())
+ } else {
+ None
+ };
+
+ let label = Label::new(label);
+ let group = group.cloned();
+ self.buf.push(Assist { id, label, group, target, source_change, trigger_signature_help });
+ Some(())
+ }
+
+ fn is_allowed(&self, id: &AssistId) -> bool {
+ match &self.allowed {
+ Some(allowed) => allowed.iter().any(|kind| kind.contains(id.1)),
+ None => true,
+ }
+ }
+}
+
+pub(crate) struct AssistBuilder {
+ edit: TextEditBuilder,
+ file_id: FileId,
+ source_change: SourceChange,
+ trigger_signature_help: bool,
+
+ /// Maps the original, immutable `SyntaxNode` to a `clone_for_update` twin.
+ mutated_tree: Option<TreeMutator>,
+}
+
+pub(crate) struct TreeMutator {
+ immutable: SyntaxNode,
+ mutable_clone: SyntaxNode,
+}
+
+impl TreeMutator {
+ pub(crate) fn new(immutable: &SyntaxNode) -> TreeMutator {
+ let immutable = immutable.ancestors().last().unwrap();
+ let mutable_clone = immutable.clone_for_update();
+ TreeMutator { immutable, mutable_clone }
+ }
+
+ pub(crate) fn make_mut<N: AstNode>(&self, node: &N) -> N {
+ N::cast(self.make_syntax_mut(node.syntax())).unwrap()
+ }
+
+ pub(crate) fn make_syntax_mut(&self, node: &SyntaxNode) -> SyntaxNode {
+ let ptr = SyntaxNodePtr::new(node);
+ ptr.to_node(&self.mutable_clone)
+ }
+}
+
+impl AssistBuilder {
+ pub(crate) fn new(file_id: FileId) -> AssistBuilder {
+ AssistBuilder {
+ edit: TextEdit::builder(),
+ file_id,
+ source_change: SourceChange::default(),
+ trigger_signature_help: false,
+ mutated_tree: None,
+ }
+ }
+
+ pub(crate) fn edit_file(&mut self, file_id: FileId) {
+ self.commit();
+ self.file_id = file_id;
+ }
+
+ fn commit(&mut self) {
+ if let Some(tm) = self.mutated_tree.take() {
+ algo::diff(&tm.immutable, &tm.mutable_clone).into_text_edit(&mut self.edit)
+ }
+
+ let edit = mem::take(&mut self.edit).finish();
+ if !edit.is_empty() {
+ self.source_change.insert_source_edit(self.file_id, edit);
+ }
+ }
+
+ pub(crate) fn make_mut<N: AstNode>(&mut self, node: N) -> N {
+ self.mutated_tree.get_or_insert_with(|| TreeMutator::new(node.syntax())).make_mut(&node)
+ }
+ /// Returns a copy of the `node`, suitable for mutation.
+ ///
+ /// Syntax trees in rust-analyzer are typically immutable, and mutating
+ /// operations panic at runtime. However, it is possible to make a copy of
+ /// the tree and mutate the copy freely. Mutation is based on interior
+ /// mutability, and different nodes in the same tree see the same mutations.
+ ///
+ /// The typical pattern for an assist is to find specific nodes in the read
+ /// phase, and then get their mutable couterparts using `make_mut` in the
+ /// mutable state.
+ pub(crate) fn make_syntax_mut(&mut self, node: SyntaxNode) -> SyntaxNode {
+ self.mutated_tree.get_or_insert_with(|| TreeMutator::new(&node)).make_syntax_mut(&node)
+ }
+
+ /// Remove specified `range` of text.
+ pub(crate) fn delete(&mut self, range: TextRange) {
+ self.edit.delete(range)
+ }
+ /// Append specified `text` at the given `offset`
+ pub(crate) fn insert(&mut self, offset: TextSize, text: impl Into<String>) {
+ self.edit.insert(offset, text.into())
+ }
+ /// Append specified `snippet` at the given `offset`
+ pub(crate) fn insert_snippet(
+ &mut self,
+ _cap: SnippetCap,
+ offset: TextSize,
+ snippet: impl Into<String>,
+ ) {
+ self.source_change.is_snippet = true;
+ self.insert(offset, snippet);
+ }
+ /// Replaces specified `range` of text with a given string.
+ pub(crate) fn replace(&mut self, range: TextRange, replace_with: impl Into<String>) {
+ self.edit.replace(range, replace_with.into())
+ }
+ /// Replaces specified `range` of text with a given `snippet`.
+ pub(crate) fn replace_snippet(
+ &mut self,
+ _cap: SnippetCap,
+ range: TextRange,
+ snippet: impl Into<String>,
+ ) {
+ self.source_change.is_snippet = true;
+ self.replace(range, snippet);
+ }
+ pub(crate) fn replace_ast<N: AstNode>(&mut self, old: N, new: N) {
+ algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit)
+ }
+ pub(crate) fn create_file(&mut self, dst: AnchoredPathBuf, content: impl Into<String>) {
+ let file_system_edit = FileSystemEdit::CreateFile { dst, initial_contents: content.into() };
+ self.source_change.push_file_system_edit(file_system_edit);
+ }
+ pub(crate) fn move_file(&mut self, src: FileId, dst: AnchoredPathBuf) {
+ let file_system_edit = FileSystemEdit::MoveFile { src, dst };
+ self.source_change.push_file_system_edit(file_system_edit);
+ }
+ pub(crate) fn trigger_signature_help(&mut self) {
+ self.trigger_signature_help = true;
+ }
+
+ fn finish(mut self) -> SourceChange {
+ self.commit();
+ mem::take(&mut self.source_change)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs
new file mode 100644
index 000000000..bfa9759ec
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs
@@ -0,0 +1,325 @@
+use hir::HirDisplay;
+use ide_db::syntax_helpers::node_ext::walk_ty;
+use syntax::ast::{self, AstNode, LetStmt, Param};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: add_explicit_type
+//
+// Specify type for a let binding.
+//
+// ```
+// fn main() {
+// let x$0 = 92;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let x: i32 = 92;
+// }
+// ```
+pub(crate) fn add_explicit_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let (ascribed_ty, expr, pat) = if let Some(let_stmt) = ctx.find_node_at_offset::<LetStmt>() {
+ let cursor_in_range = {
+ let eq_range = let_stmt.eq_token()?.text_range();
+ ctx.offset() < eq_range.start()
+ };
+ if !cursor_in_range {
+ cov_mark::hit!(add_explicit_type_not_applicable_if_cursor_after_equals);
+ return None;
+ }
+
+ (let_stmt.ty(), let_stmt.initializer(), let_stmt.pat()?)
+ } else if let Some(param) = ctx.find_node_at_offset::<Param>() {
+ if param.syntax().ancestors().nth(2).and_then(ast::ClosureExpr::cast).is_none() {
+ cov_mark::hit!(add_explicit_type_not_applicable_in_fn_param);
+ return None;
+ }
+ (param.ty(), None, param.pat()?)
+ } else {
+ return None;
+ };
+
+ let module = ctx.sema.scope(pat.syntax())?.module();
+ let pat_range = pat.syntax().text_range();
+
+ // Don't enable the assist if there is a type ascription without any placeholders
+ if let Some(ty) = &ascribed_ty {
+ let mut contains_infer_ty = false;
+ walk_ty(ty, &mut |ty| contains_infer_ty |= matches!(ty, ast::Type::InferType(_)));
+ if !contains_infer_ty {
+ cov_mark::hit!(add_explicit_type_not_applicable_if_ty_already_specified);
+ return None;
+ }
+ }
+
+ let ty = match (pat, expr) {
+ (ast::Pat::IdentPat(_), Some(expr)) => ctx.sema.type_of_expr(&expr)?,
+ (pat, _) => ctx.sema.type_of_pat(&pat)?,
+ }
+ .adjusted();
+
+ // Fully unresolved or unnameable types can't be annotated
+ if (ty.contains_unknown() && ty.type_arguments().count() == 0) || ty.is_closure() {
+ cov_mark::hit!(add_explicit_type_not_applicable_if_ty_not_inferred);
+ return None;
+ }
+
+ let inferred_type = ty.display_source_code(ctx.db(), module.into()).ok()?;
+ acc.add(
+ AssistId("add_explicit_type", AssistKind::RefactorRewrite),
+ format!("Insert explicit type `{}`", inferred_type),
+ pat_range,
+ |builder| match ascribed_ty {
+ Some(ascribed_ty) => {
+ builder.replace(ascribed_ty.syntax().text_range(), inferred_type);
+ }
+ None => {
+ builder.insert(pat_range.end(), format!(": {}", inferred_type));
+ }
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ #[test]
+ fn add_explicit_type_target() {
+ check_assist_target(add_explicit_type, r#"fn f() { let a$0 = 1; }"#, "a");
+ }
+
+ #[test]
+ fn add_explicit_type_simple() {
+ check_assist(
+ add_explicit_type,
+ r#"fn f() { let a$0 = 1; }"#,
+ r#"fn f() { let a: i32 = 1; }"#,
+ );
+ }
+
+ #[test]
+ fn add_explicit_type_simple_on_infer_ty() {
+ check_assist(
+ add_explicit_type,
+ r#"fn f() { let a$0: _ = 1; }"#,
+ r#"fn f() { let a: i32 = 1; }"#,
+ );
+ }
+
+ #[test]
+ fn add_explicit_type_simple_nested_infer_ty() {
+ check_assist(
+ add_explicit_type,
+ r#"
+//- minicore: option
+fn f() {
+ let a$0: Option<_> = Option::Some(1);
+}
+"#,
+ r#"
+fn f() {
+ let a: Option<i32> = Option::Some(1);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_explicit_type_macro_call_expr() {
+ check_assist(
+ add_explicit_type,
+ r"macro_rules! v { () => {0u64} } fn f() { let a$0 = v!(); }",
+ r"macro_rules! v { () => {0u64} } fn f() { let a: u64 = v!(); }",
+ );
+ }
+
+ #[test]
+ fn add_explicit_type_not_applicable_for_fully_unresolved() {
+ cov_mark::check!(add_explicit_type_not_applicable_if_ty_not_inferred);
+ check_assist_not_applicable(add_explicit_type, r#"fn f() { let a$0 = None; }"#);
+ }
+
+ #[test]
+ fn add_explicit_type_applicable_for_partially_unresolved() {
+ check_assist(
+ add_explicit_type,
+ r#"
+ struct Vec<T, V> { t: T, v: V }
+ impl<T> Vec<T, Vec<ZZZ, i32>> {
+ fn new() -> Self {
+ panic!()
+ }
+ }
+ fn f() { let a$0 = Vec::new(); }"#,
+ r#"
+ struct Vec<T, V> { t: T, v: V }
+ impl<T> Vec<T, Vec<ZZZ, i32>> {
+ fn new() -> Self {
+ panic!()
+ }
+ }
+ fn f() { let a: Vec<_, Vec<_, i32>> = Vec::new(); }"#,
+ );
+ }
+
+ #[test]
+ fn add_explicit_type_not_applicable_closure_expr() {
+ check_assist_not_applicable(add_explicit_type, r#"fn f() { let a$0 = || {}; }"#);
+ }
+
+ #[test]
+ fn add_explicit_type_not_applicable_ty_already_specified() {
+ cov_mark::check!(add_explicit_type_not_applicable_if_ty_already_specified);
+ check_assist_not_applicable(add_explicit_type, r#"fn f() { let a$0: i32 = 1; }"#);
+ }
+
+ #[test]
+ fn add_explicit_type_not_applicable_cursor_after_equals_of_let() {
+ cov_mark::check!(add_explicit_type_not_applicable_if_cursor_after_equals);
+ check_assist_not_applicable(
+ add_explicit_type,
+ r#"fn f() {let a =$0 match 1 {2 => 3, 3 => 5};}"#,
+ )
+ }
+
+ /// https://github.com/rust-lang/rust-analyzer/issues/2922
+ #[test]
+ fn regression_issue_2922() {
+ check_assist(
+ add_explicit_type,
+ r#"
+fn main() {
+ let $0v = [0.0; 2];
+}
+"#,
+ r#"
+fn main() {
+ let v: [f64; 2] = [0.0; 2];
+}
+"#,
+ );
+ // note: this may break later if we add more consteval. it just needs to be something that our
+ // consteval engine doesn't understand
+ check_assist_not_applicable(
+ add_explicit_type,
+ r#"
+//- minicore: option
+
+fn main() {
+ let $0l = [0.0; Some(2).unwrap()];
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn default_generics_should_not_be_added() {
+ check_assist(
+ add_explicit_type,
+ r#"
+struct Test<K, T = u8> { k: K, t: T }
+
+fn main() {
+ let test$0 = Test { t: 23u8, k: 33 };
+}
+"#,
+ r#"
+struct Test<K, T = u8> { k: K, t: T }
+
+fn main() {
+ let test: Test<i32> = Test { t: 23u8, k: 33 };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn type_should_be_added_after_pattern() {
+ // LetStmt = Attr* 'let' Pat (':' Type)? '=' initializer:Expr ';'
+ check_assist(
+ add_explicit_type,
+ r#"
+fn main() {
+ let $0test @ () = ();
+}
+"#,
+ r#"
+fn main() {
+ let test @ (): () = ();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_explicit_type_inserts_coercions() {
+ check_assist(
+ add_explicit_type,
+ r#"
+//- minicore: coerce_unsized
+fn f() {
+ let $0x: *const [_] = &[3];
+}
+"#,
+ r#"
+fn f() {
+ let x: *const [i32] = &[3];
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_explicit_type_not_applicable_fn_param() {
+ cov_mark::check!(add_explicit_type_not_applicable_in_fn_param);
+ check_assist_not_applicable(add_explicit_type, r#"fn f(x$0: ()) {}"#);
+ }
+
+ #[test]
+ fn add_explicit_type_ascribes_closure_param() {
+ check_assist(
+ add_explicit_type,
+ r#"
+fn f() {
+ |y$0| {
+ let x: i32 = y;
+ };
+}
+"#,
+ r#"
+fn f() {
+ |y: i32| {
+ let x: i32 = y;
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_explicit_type_ascribes_closure_param_already_ascribed() {
+ check_assist(
+ add_explicit_type,
+ r#"
+//- minicore: option
+fn f() {
+ |mut y$0: Option<_>| {
+ y = Some(3);
+ };
+}
+"#,
+ r#"
+fn f() {
+ |mut y: Option<i32>| {
+ y = Some(3);
+ };
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_label_to_loop.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_label_to_loop.rs
new file mode 100644
index 000000000..001f1e8bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_label_to_loop.rs
@@ -0,0 +1,164 @@
+use ide_db::syntax_helpers::node_ext::for_each_break_and_continue_expr;
+use syntax::{
+ ast::{self, AstNode, HasLoopBody},
+ T,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: add_label_to_loop
+//
+// Adds a label to a loop.
+//
+// ```
+// fn main() {
+// loop$0 {
+// break;
+// continue;
+// }
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// 'l: loop {
+// break 'l;
+// continue 'l;
+// }
+// }
+// ```
+pub(crate) fn add_label_to_loop(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let loop_kw = ctx.find_token_syntax_at_offset(T![loop])?;
+ let loop_expr = loop_kw.parent().and_then(ast::LoopExpr::cast)?;
+ if loop_expr.label().is_some() {
+ return None;
+ }
+
+ acc.add(
+ AssistId("add_label_to_loop", AssistKind::Generate),
+ "Add Label",
+ loop_expr.syntax().text_range(),
+ |builder| {
+ builder.insert(loop_kw.text_range().start(), "'l: ");
+
+ let loop_body = loop_expr.loop_body().and_then(|it| it.stmt_list());
+ for_each_break_and_continue_expr(
+ loop_expr.label(),
+ loop_body,
+ &mut |expr| match expr {
+ ast::Expr::BreakExpr(break_expr) => {
+ if let Some(break_token) = break_expr.break_token() {
+ builder.insert(break_token.text_range().end(), " 'l")
+ }
+ }
+ ast::Expr::ContinueExpr(continue_expr) => {
+ if let Some(continue_token) = continue_expr.continue_token() {
+ builder.insert(continue_token.text_range().end(), " 'l")
+ }
+ }
+ _ => {}
+ },
+ );
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn add_label() {
+ check_assist(
+ add_label_to_loop,
+ r#"
+fn main() {
+ loop$0 {
+ break;
+ continue;
+ }
+}"#,
+ r#"
+fn main() {
+ 'l: loop {
+ break 'l;
+ continue 'l;
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn add_label_to_outer_loop() {
+ check_assist(
+ add_label_to_loop,
+ r#"
+fn main() {
+ loop$0 {
+ break;
+ continue;
+ loop {
+ break;
+ continue;
+ }
+ }
+}"#,
+ r#"
+fn main() {
+ 'l: loop {
+ break 'l;
+ continue 'l;
+ loop {
+ break;
+ continue;
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn add_label_to_inner_loop() {
+ check_assist(
+ add_label_to_loop,
+ r#"
+fn main() {
+ loop {
+ break;
+ continue;
+ loop$0 {
+ break;
+ continue;
+ }
+ }
+}"#,
+ r#"
+fn main() {
+ loop {
+ break;
+ continue;
+ 'l: loop {
+ break 'l;
+ continue 'l;
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn do_not_add_label_if_exists() {
+ check_assist_not_applicable(
+ add_label_to_loop,
+ r#"
+fn main() {
+ 'l: loop$0 {
+ break 'l;
+ continue 'l;
+ }
+}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs
new file mode 100644
index 000000000..12213c845
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs
@@ -0,0 +1,229 @@
+use syntax::ast::{self, AstNode, HasGenericParams, HasName};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: add_lifetime_to_type
+//
+// Adds a new lifetime to a struct, enum or union.
+//
+// ```
+// struct Point {
+// x: &$0u32,
+// y: u32,
+// }
+// ```
+// ->
+// ```
+// struct Point<'a> {
+// x: &'a u32,
+// y: u32,
+// }
+// ```
+pub(crate) fn add_lifetime_to_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let ref_type_focused = ctx.find_node_at_offset::<ast::RefType>()?;
+ if ref_type_focused.lifetime().is_some() {
+ return None;
+ }
+
+ let node = ctx.find_node_at_offset::<ast::Adt>()?;
+ let has_lifetime = node
+ .generic_param_list()
+ .map_or(false, |gen_list| gen_list.lifetime_params().next().is_some());
+
+ if has_lifetime {
+ return None;
+ }
+
+ let ref_types = fetch_borrowed_types(&node)?;
+ let target = node.syntax().text_range();
+
+ acc.add(
+ AssistId("add_lifetime_to_type", AssistKind::Generate),
+ "Add lifetime",
+ target,
+ |builder| {
+ match node.generic_param_list() {
+ Some(gen_param) => {
+ if let Some(left_angle) = gen_param.l_angle_token() {
+ builder.insert(left_angle.text_range().end(), "'a, ");
+ }
+ }
+ None => {
+ if let Some(name) = node.name() {
+ builder.insert(name.syntax().text_range().end(), "<'a>");
+ }
+ }
+ }
+
+ for ref_type in ref_types {
+ if let Some(amp_token) = ref_type.amp_token() {
+ builder.insert(amp_token.text_range().end(), "'a ");
+ }
+ }
+ },
+ )
+}
+
+fn fetch_borrowed_types(node: &ast::Adt) -> Option<Vec<ast::RefType>> {
+ let ref_types: Vec<ast::RefType> = match node {
+ ast::Adt::Enum(enum_) => {
+ let variant_list = enum_.variant_list()?;
+ variant_list
+ .variants()
+ .filter_map(|variant| {
+ let field_list = variant.field_list()?;
+
+ find_ref_types_from_field_list(&field_list)
+ })
+ .flatten()
+ .collect()
+ }
+ ast::Adt::Struct(strukt) => {
+ let field_list = strukt.field_list()?;
+ find_ref_types_from_field_list(&field_list)?
+ }
+ ast::Adt::Union(un) => {
+ let record_field_list = un.record_field_list()?;
+ record_field_list
+ .fields()
+ .filter_map(|r_field| {
+ if let ast::Type::RefType(ref_type) = r_field.ty()? {
+ if ref_type.lifetime().is_none() {
+ return Some(ref_type);
+ }
+ }
+
+ None
+ })
+ .collect()
+ }
+ };
+
+ if ref_types.is_empty() {
+ None
+ } else {
+ Some(ref_types)
+ }
+}
+
+fn find_ref_types_from_field_list(field_list: &ast::FieldList) -> Option<Vec<ast::RefType>> {
+ let ref_types: Vec<ast::RefType> = match field_list {
+ ast::FieldList::RecordFieldList(record_list) => record_list
+ .fields()
+ .filter_map(|f| {
+ if let ast::Type::RefType(ref_type) = f.ty()? {
+ if ref_type.lifetime().is_none() {
+ return Some(ref_type);
+ }
+ }
+
+ None
+ })
+ .collect(),
+ ast::FieldList::TupleFieldList(tuple_field_list) => tuple_field_list
+ .fields()
+ .filter_map(|f| {
+ if let ast::Type::RefType(ref_type) = f.ty()? {
+ if ref_type.lifetime().is_none() {
+ return Some(ref_type);
+ }
+ }
+
+ None
+ })
+ .collect(),
+ };
+
+ if ref_types.is_empty() {
+ None
+ } else {
+ Some(ref_types)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn add_lifetime_to_struct() {
+ check_assist(
+ add_lifetime_to_type,
+ r#"struct Foo { a: &$0i32 }"#,
+ r#"struct Foo<'a> { a: &'a i32 }"#,
+ );
+
+ check_assist(
+ add_lifetime_to_type,
+ r#"struct Foo { a: &$0i32, b: &usize }"#,
+ r#"struct Foo<'a> { a: &'a i32, b: &'a usize }"#,
+ );
+
+ check_assist(
+ add_lifetime_to_type,
+ r#"struct Foo { a: &$0i32, b: usize }"#,
+ r#"struct Foo<'a> { a: &'a i32, b: usize }"#,
+ );
+
+ check_assist(
+ add_lifetime_to_type,
+ r#"struct Foo<T> { a: &$0T, b: usize }"#,
+ r#"struct Foo<'a, T> { a: &'a T, b: usize }"#,
+ );
+
+ check_assist_not_applicable(add_lifetime_to_type, r#"struct Foo<'a> { a: &$0'a i32 }"#);
+ check_assist_not_applicable(add_lifetime_to_type, r#"struct Foo { a: &'a$0 i32 }"#);
+ }
+
+ #[test]
+ fn add_lifetime_to_enum() {
+ check_assist(
+ add_lifetime_to_type,
+ r#"enum Foo { Bar { a: i32 }, Other, Tuple(u32, &$0u32)}"#,
+ r#"enum Foo<'a> { Bar { a: i32 }, Other, Tuple(u32, &'a u32)}"#,
+ );
+
+ check_assist(
+ add_lifetime_to_type,
+ r#"enum Foo { Bar { a: &$0i32 }}"#,
+ r#"enum Foo<'a> { Bar { a: &'a i32 }}"#,
+ );
+
+ check_assist(
+ add_lifetime_to_type,
+ r#"enum Foo<T> { Bar { a: &$0i32, b: &T }}"#,
+ r#"enum Foo<'a, T> { Bar { a: &'a i32, b: &'a T }}"#,
+ );
+
+ check_assist_not_applicable(
+ add_lifetime_to_type,
+ r#"enum Foo<'a> { Bar { a: &$0'a i32 }}"#,
+ );
+ check_assist_not_applicable(add_lifetime_to_type, r#"enum Foo { Bar, $0Misc }"#);
+ }
+
+ #[test]
+ fn add_lifetime_to_union() {
+ check_assist(
+ add_lifetime_to_type,
+ r#"union Foo { a: &$0i32 }"#,
+ r#"union Foo<'a> { a: &'a i32 }"#,
+ );
+
+ check_assist(
+ add_lifetime_to_type,
+ r#"union Foo { a: &$0i32, b: &usize }"#,
+ r#"union Foo<'a> { a: &'a i32, b: &'a usize }"#,
+ );
+
+ check_assist(
+ add_lifetime_to_type,
+ r#"union Foo<T> { a: &$0T, b: usize }"#,
+ r#"union Foo<'a, T> { a: &'a T, b: usize }"#,
+ );
+
+ check_assist_not_applicable(add_lifetime_to_type, r#"struct Foo<'a> { a: &'a $0i32 }"#);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
new file mode 100644
index 000000000..c808c010c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
@@ -0,0 +1,1340 @@
+use hir::HasSource;
+use ide_db::{
+ syntax_helpers::insert_whitespace_into_node::insert_ws_into, traits::resolve_target_trait,
+};
+use syntax::ast::{self, make, AstNode};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils::{
+ add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body, render_snippet,
+ Cursor, DefaultMethods,
+ },
+ AssistId, AssistKind,
+};
+
+// Assist: add_impl_missing_members
+//
+// Adds scaffold for required impl members.
+//
+// ```
+// trait Trait<T> {
+// type X;
+// fn foo(&self) -> T;
+// fn bar(&self) {}
+// }
+//
+// impl Trait<u32> for () {$0
+//
+// }
+// ```
+// ->
+// ```
+// trait Trait<T> {
+// type X;
+// fn foo(&self) -> T;
+// fn bar(&self) {}
+// }
+//
+// impl Trait<u32> for () {
+// $0type X;
+//
+// fn foo(&self) -> u32 {
+// todo!()
+// }
+// }
+// ```
+pub(crate) fn add_missing_impl_members(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ add_missing_impl_members_inner(
+ acc,
+ ctx,
+ DefaultMethods::No,
+ "add_impl_missing_members",
+ "Implement missing members",
+ )
+}
+
+// Assist: add_impl_default_members
+//
+// Adds scaffold for overriding default impl members.
+//
+// ```
+// trait Trait {
+// type X;
+// fn foo(&self);
+// fn bar(&self) {}
+// }
+//
+// impl Trait for () {
+// type X = ();
+// fn foo(&self) {}$0
+// }
+// ```
+// ->
+// ```
+// trait Trait {
+// type X;
+// fn foo(&self);
+// fn bar(&self) {}
+// }
+//
+// impl Trait for () {
+// type X = ();
+// fn foo(&self) {}
+//
+// $0fn bar(&self) {}
+// }
+// ```
+pub(crate) fn add_missing_default_members(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ add_missing_impl_members_inner(
+ acc,
+ ctx,
+ DefaultMethods::Only,
+ "add_impl_default_members",
+ "Implement default members",
+ )
+}
+
+fn add_missing_impl_members_inner(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+ mode: DefaultMethods,
+ assist_id: &'static str,
+ label: &'static str,
+) -> Option<()> {
+ let _p = profile::span("add_missing_impl_members_inner");
+ let impl_def = ctx.find_node_at_offset::<ast::Impl>()?;
+ let target_scope = ctx.sema.scope(impl_def.syntax())?;
+ let trait_ = resolve_target_trait(&ctx.sema, &impl_def)?;
+
+ let missing_items = filter_assoc_items(
+ &ctx.sema,
+ &ide_db::traits::get_missing_assoc_items(&ctx.sema, &impl_def),
+ mode,
+ );
+
+ if missing_items.is_empty() {
+ return None;
+ }
+
+ let target = impl_def.syntax().text_range();
+ acc.add(AssistId(assist_id, AssistKind::QuickFix), label, target, |builder| {
+ let missing_items = missing_items
+ .into_iter()
+ .map(|it| {
+ if ctx.sema.hir_file_for(it.syntax()).is_macro() {
+ if let Some(it) = ast::AssocItem::cast(insert_ws_into(it.syntax().clone())) {
+ return it;
+ }
+ }
+ it.clone_for_update()
+ })
+ .collect();
+ let (new_impl_def, first_new_item) = add_trait_assoc_items_to_impl(
+ &ctx.sema,
+ missing_items,
+ trait_,
+ impl_def.clone(),
+ target_scope,
+ );
+ match ctx.config.snippet_cap {
+ None => builder.replace(target, new_impl_def.to_string()),
+ Some(cap) => {
+ let mut cursor = Cursor::Before(first_new_item.syntax());
+ let placeholder;
+ if let DefaultMethods::No = mode {
+ if let ast::AssocItem::Fn(func) = &first_new_item {
+ if try_gen_trait_body(ctx, func, &trait_, &impl_def).is_none() {
+ if let Some(m) =
+ func.syntax().descendants().find_map(ast::MacroCall::cast)
+ {
+ if m.syntax().text() == "todo!()" {
+ placeholder = m;
+ cursor = Cursor::Replace(placeholder.syntax());
+ }
+ }
+ }
+ }
+ }
+ builder.replace_snippet(
+ cap,
+ target,
+ render_snippet(cap, new_impl_def.syntax(), cursor),
+ )
+ }
+ };
+ })
+}
+
+fn try_gen_trait_body(
+ ctx: &AssistContext<'_>,
+ func: &ast::Fn,
+ trait_: &hir::Trait,
+ impl_def: &ast::Impl,
+) -> Option<()> {
+ let trait_path = make::ext::ident_path(&trait_.name(ctx.db()).to_string());
+ let hir_ty = ctx.sema.resolve_type(&impl_def.self_ty()?)?;
+ let adt = hir_ty.as_adt()?.source(ctx.db())?;
+ gen_trait_fn_body(func, &trait_path, &adt.value)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_add_missing_impl_members() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo {
+ type Output;
+
+ const CONST: usize = 42;
+
+ fn foo(&self);
+ fn bar(&self);
+ fn baz(&self);
+}
+
+struct S;
+
+impl Foo for S {
+ fn bar(&self) {}
+$0
+}"#,
+ r#"
+trait Foo {
+ type Output;
+
+ const CONST: usize = 42;
+
+ fn foo(&self);
+ fn bar(&self);
+ fn baz(&self);
+}
+
+struct S;
+
+impl Foo for S {
+ fn bar(&self) {}
+
+ $0type Output;
+
+ const CONST: usize = 42;
+
+ fn foo(&self) {
+ todo!()
+ }
+
+ fn baz(&self) {
+ todo!()
+ }
+
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_copied_overriden_members() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo {
+ fn foo(&self);
+ fn bar(&self) -> bool { true }
+ fn baz(&self) -> u32 { 42 }
+}
+
+struct S;
+
+impl Foo for S {
+ fn bar(&self) {}
+$0
+}"#,
+ r#"
+trait Foo {
+ fn foo(&self);
+ fn bar(&self) -> bool { true }
+ fn baz(&self) -> u32 { 42 }
+}
+
+struct S;
+
+impl Foo for S {
+ fn bar(&self) {}
+
+ fn foo(&self) {
+ ${0:todo!()}
+ }
+
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_empty_impl_def() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo { fn foo(&self); }
+struct S;
+impl Foo for S { $0 }"#,
+ r#"
+trait Foo { fn foo(&self); }
+struct S;
+impl Foo for S {
+ fn foo(&self) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_impl_def_without_braces() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo { fn foo(&self); }
+struct S;
+impl Foo for S$0"#,
+ r#"
+trait Foo { fn foo(&self); }
+struct S;
+impl Foo for S {
+ fn foo(&self) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn fill_in_type_params_1() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo<T> { fn foo(&self, t: T) -> &T; }
+struct S;
+impl Foo<u32> for S { $0 }"#,
+ r#"
+trait Foo<T> { fn foo(&self, t: T) -> &T; }
+struct S;
+impl Foo<u32> for S {
+ fn foo(&self, t: u32) -> &u32 {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn fill_in_type_params_2() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo<T> { fn foo(&self, t: T) -> &T; }
+struct S;
+impl<U> Foo<U> for S { $0 }"#,
+ r#"
+trait Foo<T> { fn foo(&self, t: T) -> &T; }
+struct S;
+impl<U> Foo<U> for S {
+ fn foo(&self, t: U) -> &U {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_cursor_after_empty_impl_def() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo { fn foo(&self); }
+struct S;
+impl Foo for S {}$0"#,
+ r#"
+trait Foo { fn foo(&self); }
+struct S;
+impl Foo for S {
+ fn foo(&self) {
+ ${0:todo!()}
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_qualify_path_1() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub struct Bar;
+ trait Foo { fn foo(&self, bar: Bar); }
+}
+struct S;
+impl foo::Foo for S { $0 }"#,
+ r#"
+mod foo {
+ pub struct Bar;
+ trait Foo { fn foo(&self, bar: Bar); }
+}
+struct S;
+impl foo::Foo for S {
+ fn foo(&self, bar: foo::Bar) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_qualify_path_2() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub trait Foo { fn foo(&self, bar: Bar); }
+ }
+}
+
+use foo::bar;
+
+struct S;
+impl bar::Foo for S { $0 }"#,
+ r#"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub trait Foo { fn foo(&self, bar: Bar); }
+ }
+}
+
+use foo::bar;
+
+struct S;
+impl bar::Foo for S {
+ fn foo(&self, bar: bar::Bar) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_qualify_path_generic() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub struct Bar<T>;
+ trait Foo { fn foo(&self, bar: Bar<u32>); }
+}
+struct S;
+impl foo::Foo for S { $0 }"#,
+ r#"
+mod foo {
+ pub struct Bar<T>;
+ trait Foo { fn foo(&self, bar: Bar<u32>); }
+}
+struct S;
+impl foo::Foo for S {
+ fn foo(&self, bar: foo::Bar<u32>) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_qualify_path_and_substitute_param() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub struct Bar<T>;
+ trait Foo<T> { fn foo(&self, bar: Bar<T>); }
+}
+struct S;
+impl foo::Foo<u32> for S { $0 }"#,
+ r#"
+mod foo {
+ pub struct Bar<T>;
+ trait Foo<T> { fn foo(&self, bar: Bar<T>); }
+}
+struct S;
+impl foo::Foo<u32> for S {
+ fn foo(&self, bar: foo::Bar<u32>) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_substitute_param_no_qualify() {
+ // when substituting params, the substituted param should not be qualified!
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ trait Foo<T> { fn foo(&self, bar: T); }
+ pub struct Param;
+}
+struct Param;
+struct S;
+impl foo::Foo<Param> for S { $0 }"#,
+ r#"
+mod foo {
+ trait Foo<T> { fn foo(&self, bar: T); }
+ pub struct Param;
+}
+struct Param;
+struct S;
+impl foo::Foo<Param> for S {
+ fn foo(&self, bar: Param) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_qualify_path_associated_item() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub struct Bar<T>;
+ impl Bar<T> { type Assoc = u32; }
+ trait Foo { fn foo(&self, bar: Bar<u32>::Assoc); }
+}
+struct S;
+impl foo::Foo for S { $0 }"#,
+ r#"
+mod foo {
+ pub struct Bar<T>;
+ impl Bar<T> { type Assoc = u32; }
+ trait Foo { fn foo(&self, bar: Bar<u32>::Assoc); }
+}
+struct S;
+impl foo::Foo for S {
+ fn foo(&self, bar: foo::Bar<u32>::Assoc) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_qualify_path_nested() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub struct Bar<T>;
+ pub struct Baz;
+ trait Foo { fn foo(&self, bar: Bar<Baz>); }
+}
+struct S;
+impl foo::Foo for S { $0 }"#,
+ r#"
+mod foo {
+ pub struct Bar<T>;
+ pub struct Baz;
+ trait Foo { fn foo(&self, bar: Bar<Baz>); }
+}
+struct S;
+impl foo::Foo for S {
+ fn foo(&self, bar: foo::Bar<foo::Baz>) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_qualify_path_fn_trait_notation() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub trait Fn<Args> { type Output; }
+ trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); }
+}
+struct S;
+impl foo::Foo for S { $0 }"#,
+ r#"
+mod foo {
+ pub trait Fn<Args> { type Output; }
+ trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); }
+}
+struct S;
+impl foo::Foo for S {
+ fn foo(&self, bar: dyn Fn(u32) -> i32) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_empty_trait() {
+ check_assist_not_applicable(
+ add_missing_impl_members,
+ r#"
+trait Foo;
+struct S;
+impl Foo for S { $0 }"#,
+ )
+ }
+
+ #[test]
+ fn test_ignore_unnamed_trait_members_and_default_methods() {
+ check_assist_not_applicable(
+ add_missing_impl_members,
+ r#"
+trait Foo {
+ fn (arg: u32);
+ fn valid(some: u32) -> bool { false }
+}
+struct S;
+impl Foo for S { $0 }"#,
+ )
+ }
+
+ #[test]
+ fn test_with_docstring_and_attrs() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+#[doc(alias = "test alias")]
+trait Foo {
+ /// doc string
+ type Output;
+
+ #[must_use]
+ fn foo(&self);
+}
+struct S;
+impl Foo for S {}$0"#,
+ r#"
+#[doc(alias = "test alias")]
+trait Foo {
+ /// doc string
+ type Output;
+
+ #[must_use]
+ fn foo(&self);
+}
+struct S;
+impl Foo for S {
+ $0type Output;
+
+ fn foo(&self) {
+ todo!()
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_default_methods() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+trait Foo {
+ type Output;
+
+ const CONST: usize = 42;
+
+ fn valid(some: u32) -> bool { false }
+ fn foo(some: u32) -> bool;
+}
+struct S;
+impl Foo for S { $0 }"#,
+ r#"
+trait Foo {
+ type Output;
+
+ const CONST: usize = 42;
+
+ fn valid(some: u32) -> bool { false }
+ fn foo(some: u32) -> bool;
+}
+struct S;
+impl Foo for S {
+ $0fn valid(some: u32) -> bool { false }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_generic_single_default_parameter() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo<T = Self> {
+ fn bar(&self, other: &T);
+}
+
+struct S;
+impl Foo for S { $0 }"#,
+ r#"
+trait Foo<T = Self> {
+ fn bar(&self, other: &T);
+}
+
+struct S;
+impl Foo for S {
+ fn bar(&self, other: &Self) {
+ ${0:todo!()}
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_generic_default_parameter_is_second() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo<T1, T2 = Self> {
+ fn bar(&self, this: &T1, that: &T2);
+}
+
+struct S<T>;
+impl Foo<T> for S<T> { $0 }"#,
+ r#"
+trait Foo<T1, T2 = Self> {
+ fn bar(&self, this: &T1, that: &T2);
+}
+
+struct S<T>;
+impl Foo<T> for S<T> {
+ fn bar(&self, this: &T, that: &Self) {
+ ${0:todo!()}
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_assoc_type_bounds_are_removed() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Tr {
+ type Ty: Copy + 'static;
+}
+
+impl Tr for ()$0 {
+}"#,
+ r#"
+trait Tr {
+ type Ty: Copy + 'static;
+}
+
+impl Tr for () {
+ $0type Ty;
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_whitespace_fixup_preserves_bad_tokens() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Tr {
+ fn foo();
+}
+
+impl Tr for ()$0 {
+ +++
+}"#,
+ r#"
+trait Tr {
+ fn foo();
+}
+
+impl Tr for () {
+ fn foo() {
+ ${0:todo!()}
+ }
+ +++
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_whitespace_fixup_preserves_comments() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Tr {
+ fn foo();
+}
+
+impl Tr for ()$0 {
+ // very important
+}"#,
+ r#"
+trait Tr {
+ fn foo();
+}
+
+impl Tr for () {
+ fn foo() {
+ ${0:todo!()}
+ }
+ // very important
+}"#,
+ )
+ }
+
+ #[test]
+ fn weird_path() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Test {
+ fn foo(&self, x: crate)
+}
+impl Test for () {
+ $0
+}
+"#,
+ r#"
+trait Test {
+ fn foo(&self, x: crate)
+}
+impl Test for () {
+ fn foo(&self, x: crate) {
+ ${0:todo!()}
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn missing_generic_type() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo<BAR> {
+ fn foo(&self, bar: BAR);
+}
+impl Foo for () {
+ $0
+}
+"#,
+ r#"
+trait Foo<BAR> {
+ fn foo(&self, bar: BAR);
+}
+impl Foo for () {
+ fn foo(&self, bar: BAR) {
+ ${0:todo!()}
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn does_not_requalify_self_as_crate() {
+ check_assist(
+ add_missing_default_members,
+ r"
+struct Wrapper<T>(T);
+
+trait T {
+ fn f(self) -> Wrapper<Self> {
+ Wrapper(self)
+ }
+}
+
+impl T for () {
+ $0
+}
+",
+ r"
+struct Wrapper<T>(T);
+
+trait T {
+ fn f(self) -> Wrapper<Self> {
+ Wrapper(self)
+ }
+}
+
+impl T for () {
+ $0fn f(self) -> Wrapper<Self> {
+ Wrapper(self)
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_default_body_generation() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+//- minicore: default
+struct Foo(usize);
+
+impl Default for Foo {
+ $0
+}
+"#,
+ r#"
+struct Foo(usize);
+
+impl Default for Foo {
+ $0fn default() -> Self {
+ Self(Default::default())
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_from_macro() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+macro_rules! foo {
+ () => {
+ trait FooB {
+ fn foo<'lt>(&'lt self) {}
+ }
+ }
+}
+foo!();
+struct Foo(usize);
+
+impl FooB for Foo {
+ $0
+}
+"#,
+ r#"
+macro_rules! foo {
+ () => {
+ trait FooB {
+ fn foo<'lt>(&'lt self) {}
+ }
+ }
+}
+foo!();
+struct Foo(usize);
+
+impl FooB for Foo {
+ $0fn foo< 'lt>(& 'lt self){}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_assoc_type_when_trait_with_same_name_in_scope() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Foo {}
+
+pub trait Types {
+ type Foo;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl<T: Types> Behavior<T> for Impl { $0 }"#,
+ r#"
+pub trait Foo {}
+
+pub trait Types {
+ type Foo;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl<T: Types> Behavior<T> for Impl {
+ fn reproduce(&self, foo: <T as Types>::Foo) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_type_on_concrete_type() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl { $0 }"#,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl {
+ fn reproduce(&self, foo: <u32 as Types>::Foo) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_type_on_concrete_type_qualified() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+impl Types for std::string::String {
+ type Foo = bool;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<std::string::String> for Impl { $0 }"#,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+impl Types for std::string::String {
+ type Foo = bool;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<std::string::String> for Impl {
+ fn reproduce(&self, foo: <std::string::String as Types>::Foo) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_type_on_concrete_type_multi_option_ambiguous() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+pub trait Types2 {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl Types2 for u32 {
+ type Foo = String;
+}
+
+pub trait Behavior<T: Types + Types2> {
+ fn reproduce(&self, foo: <T as Types2>::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl { $0 }"#,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+pub trait Types2 {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl Types2 for u32 {
+ type Foo = String;
+}
+
+pub trait Behavior<T: Types + Types2> {
+ fn reproduce(&self, foo: <T as Types2>::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl {
+ fn reproduce(&self, foo: <u32 as Types2>::Foo) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_type_on_concrete_type_multi_option() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+pub trait Types2 {
+ type Bar;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl Types2 for u32 {
+ type Bar = String;
+}
+
+pub trait Behavior<T: Types + Types2> {
+ fn reproduce(&self, foo: T::Bar);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl { $0 }"#,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+pub trait Types2 {
+ type Bar;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl Types2 for u32 {
+ type Bar = String;
+}
+
+pub trait Behavior<T: Types + Types2> {
+ fn reproduce(&self, foo: T::Bar);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl {
+ fn reproduce(&self, foo: <u32 as Types2>::Bar) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_type_on_concrete_type_multi_option_foreign() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod bar {
+ pub trait Types2 {
+ type Bar;
+ }
+}
+
+pub trait Types {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl bar::Types2 for u32 {
+ type Bar = String;
+}
+
+pub trait Behavior<T: Types + bar::Types2> {
+ fn reproduce(&self, foo: T::Bar);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl { $0 }"#,
+ r#"
+mod bar {
+ pub trait Types2 {
+ type Bar;
+ }
+}
+
+pub trait Types {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl bar::Types2 for u32 {
+ type Bar = String;
+}
+
+pub trait Behavior<T: Types + bar::Types2> {
+ fn reproduce(&self, foo: T::Bar);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl {
+ fn reproduce(&self, foo: <u32 as bar::Types2>::Bar) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_transform_path_in_path_expr() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+pub trait Const {
+ const FOO: u32;
+}
+
+pub trait Trait<T: Const> {
+ fn foo() -> bool {
+ match T::FOO {
+ 0 => true,
+ _ => false,
+ }
+ }
+}
+
+impl Const for u32 {
+ const FOO: u32 = 1;
+}
+
+struct Impl;
+
+impl Trait<u32> for Impl { $0 }"#,
+ r#"
+pub trait Const {
+ const FOO: u32;
+}
+
+pub trait Trait<T: Const> {
+ fn foo() -> bool {
+ match T::FOO {
+ 0 => true,
+ _ => false,
+ }
+ }
+}
+
+impl Const for u32 {
+ const FOO: u32 = 1;
+}
+
+struct Impl;
+
+impl Trait<u32> for Impl {
+ $0fn foo() -> bool {
+ match <u32 as Const>::FOO {
+ 0 => true,
+ _ => false,
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_default_partial_eq() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+//- minicore: eq
+struct SomeStruct {
+ data: usize,
+ field: (usize, usize),
+}
+impl PartialEq for SomeStruct {$0}
+"#,
+ r#"
+struct SomeStruct {
+ data: usize,
+ field: (usize, usize),
+}
+impl PartialEq for SomeStruct {
+ $0fn ne(&self, other: &Self) -> bool {
+ !self.eq(other)
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
new file mode 100644
index 000000000..b16f6fe03
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
@@ -0,0 +1,1709 @@
+use std::iter::{self, Peekable};
+
+use either::Either;
+use hir::{Adt, Crate, HasAttrs, HasSource, ModuleDef, Semantics};
+use ide_db::RootDatabase;
+use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast};
+use itertools::Itertools;
+use syntax::ast::{self, make, AstNode, HasName, MatchArmList, MatchExpr, Pat};
+
+use crate::{
+ utils::{self, render_snippet, Cursor},
+ AssistContext, AssistId, AssistKind, Assists,
+};
+
+// Assist: add_missing_match_arms
+//
+// Adds missing clauses to a `match` expression.
+//
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// match action {
+// $0
+// }
+// }
+// ```
+// ->
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// match action {
+// $0Action::Move { distance } => todo!(),
+// Action::Stop => todo!(),
+// }
+// }
+// ```
+pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let match_expr = ctx.find_node_at_offset_with_descend::<ast::MatchExpr>()?;
+ let match_arm_list = match_expr.match_arm_list()?;
+ let target_range = ctx.sema.original_range(match_expr.syntax()).range;
+
+ if let None = cursor_at_trivial_match_arm_list(ctx, &match_expr, &match_arm_list) {
+ let arm_list_range = ctx.sema.original_range(match_arm_list.syntax()).range;
+ let cursor_in_range = arm_list_range.contains_range(ctx.selection_trimmed());
+ if cursor_in_range {
+ cov_mark::hit!(not_applicable_outside_of_range_right);
+ return None;
+ }
+ }
+
+ let expr = match_expr.expr()?;
+
+ let mut has_catch_all_arm = false;
+
+ let top_lvl_pats: Vec<_> = match_arm_list
+ .arms()
+ .filter_map(|arm| Some((arm.pat()?, arm.guard().is_some())))
+ .flat_map(|(pat, has_guard)| {
+ match pat {
+ // Special case OrPat as separate top-level pats
+ Pat::OrPat(or_pat) => Either::Left(or_pat.pats()),
+ _ => Either::Right(iter::once(pat)),
+ }
+ .map(move |pat| (pat, has_guard))
+ })
+ .map(|(pat, has_guard)| {
+ has_catch_all_arm |= !has_guard && matches!(pat, Pat::WildcardPat(_));
+ pat
+ })
+ // Exclude top level wildcards so that they are expanded by this assist, retains status quo in #8129.
+ .filter(|pat| !matches!(pat, Pat::WildcardPat(_)))
+ .collect();
+
+ let module = ctx.sema.scope(expr.syntax())?.module();
+ let (mut missing_pats, is_non_exhaustive): (
+ Peekable<Box<dyn Iterator<Item = (ast::Pat, bool)>>>,
+ bool,
+ ) = if let Some(enum_def) = resolve_enum_def(&ctx.sema, &expr) {
+ let is_non_exhaustive = enum_def.is_non_exhaustive(ctx.db(), module.krate());
+
+ let variants = enum_def.variants(ctx.db());
+
+ let missing_pats = variants
+ .into_iter()
+ .filter_map(|variant| {
+ Some((
+ build_pat(ctx.db(), module, variant)?,
+ variant.should_be_hidden(ctx.db(), module.krate()),
+ ))
+ })
+ .filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
+
+ let option_enum = FamousDefs(&ctx.sema, module.krate()).core_option_Option().map(lift_enum);
+ let missing_pats: Box<dyn Iterator<Item = _>> = if Some(enum_def) == option_enum {
+ // Match `Some` variant first.
+ cov_mark::hit!(option_order);
+ Box::new(missing_pats.rev())
+ } else {
+ Box::new(missing_pats)
+ };
+ (missing_pats.peekable(), is_non_exhaustive)
+ } else if let Some(enum_defs) = resolve_tuple_of_enum_def(&ctx.sema, &expr) {
+ let is_non_exhaustive =
+ enum_defs.iter().any(|enum_def| enum_def.is_non_exhaustive(ctx.db(), module.krate()));
+
+ let mut n_arms = 1;
+ let variants_of_enums: Vec<Vec<ExtendedVariant>> = enum_defs
+ .into_iter()
+ .map(|enum_def| enum_def.variants(ctx.db()))
+ .inspect(|variants| n_arms *= variants.len())
+ .collect();
+
+ // When calculating the match arms for a tuple of enums, we want
+ // to create a match arm for each possible combination of enum
+ // values. The `multi_cartesian_product` method transforms
+ // Vec<Vec<EnumVariant>> into Vec<(EnumVariant, .., EnumVariant)>
+ // where each tuple represents a proposed match arm.
+
+ // A number of arms grows very fast on even a small tuple of large enums.
+ // We skip the assist beyond an arbitrary threshold.
+ if n_arms > 256 {
+ return None;
+ }
+ let missing_pats = variants_of_enums
+ .into_iter()
+ .multi_cartesian_product()
+ .inspect(|_| cov_mark::hit!(add_missing_match_arms_lazy_computation))
+ .map(|variants| {
+ let is_hidden = variants
+ .iter()
+ .any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
+ let patterns =
+ variants.into_iter().filter_map(|variant| build_pat(ctx.db(), module, variant));
+
+ (ast::Pat::from(make::tuple_pat(patterns)), is_hidden)
+ })
+ .filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
+ ((Box::new(missing_pats) as Box<dyn Iterator<Item = _>>).peekable(), is_non_exhaustive)
+ } else {
+ return None;
+ };
+
+ let mut needs_catch_all_arm = is_non_exhaustive && !has_catch_all_arm;
+
+ if !needs_catch_all_arm && missing_pats.peek().is_none() {
+ return None;
+ }
+
+ acc.add(
+ AssistId("add_missing_match_arms", AssistKind::QuickFix),
+ "Fill match arms",
+ target_range,
+ |builder| {
+ let new_match_arm_list = match_arm_list.clone_for_update();
+ let missing_arms = missing_pats
+ .map(|(pat, hidden)| {
+ (make::match_arm(iter::once(pat), None, make::ext::expr_todo()), hidden)
+ })
+ .map(|(it, hidden)| (it.clone_for_update(), hidden));
+
+ let catch_all_arm = new_match_arm_list
+ .arms()
+ .find(|arm| matches!(arm.pat(), Some(ast::Pat::WildcardPat(_))));
+ if let Some(arm) = catch_all_arm {
+ let is_empty_expr = arm.expr().map_or(true, |e| match e {
+ ast::Expr::BlockExpr(b) => {
+ b.statements().next().is_none() && b.tail_expr().is_none()
+ }
+ ast::Expr::TupleExpr(t) => t.fields().next().is_none(),
+ _ => false,
+ });
+ if is_empty_expr {
+ arm.remove();
+ } else {
+ cov_mark::hit!(add_missing_match_arms_empty_expr);
+ }
+ }
+ let mut first_new_arm = None;
+ for (arm, hidden) in missing_arms {
+ if hidden {
+ needs_catch_all_arm = !has_catch_all_arm;
+ } else {
+ first_new_arm.get_or_insert_with(|| arm.clone());
+ new_match_arm_list.add_arm(arm);
+ }
+ }
+ if needs_catch_all_arm && !has_catch_all_arm {
+ cov_mark::hit!(added_wildcard_pattern);
+ let arm = make::match_arm(
+ iter::once(make::wildcard_pat().into()),
+ None,
+ make::ext::expr_todo(),
+ )
+ .clone_for_update();
+ first_new_arm.get_or_insert_with(|| arm.clone());
+ new_match_arm_list.add_arm(arm);
+ }
+
+ let old_range = ctx.sema.original_range(match_arm_list.syntax()).range;
+ match (first_new_arm, ctx.config.snippet_cap) {
+ (Some(first_new_arm), Some(cap)) => {
+ let extend_lifetime;
+ let cursor =
+ match first_new_arm.syntax().descendants().find_map(ast::WildcardPat::cast)
+ {
+ Some(it) => {
+ extend_lifetime = it.syntax().clone();
+ Cursor::Replace(&extend_lifetime)
+ }
+ None => Cursor::Before(first_new_arm.syntax()),
+ };
+ let snippet = render_snippet(cap, new_match_arm_list.syntax(), cursor);
+ builder.replace_snippet(cap, old_range, snippet);
+ }
+ _ => builder.replace(old_range, new_match_arm_list.to_string()),
+ }
+ },
+ )
+}
+
+fn cursor_at_trivial_match_arm_list(
+ ctx: &AssistContext<'_>,
+ match_expr: &MatchExpr,
+ match_arm_list: &MatchArmList,
+) -> Option<()> {
+ // match x { $0 }
+ if match_arm_list.arms().next() == None {
+ cov_mark::hit!(add_missing_match_arms_empty_body);
+ return Some(());
+ }
+
+ // match x {
+ // bar => baz,
+ // $0
+ // }
+ if let Some(last_arm) = match_arm_list.arms().last() {
+ let last_arm_range = last_arm.syntax().text_range();
+ let match_expr_range = match_expr.syntax().text_range();
+ if last_arm_range.end() <= ctx.offset() && ctx.offset() < match_expr_range.end() {
+ cov_mark::hit!(add_missing_match_arms_end_of_last_arm);
+ return Some(());
+ }
+ }
+
+ // match { _$0 => {...} }
+ let wild_pat = ctx.find_node_at_offset_with_descend::<ast::WildcardPat>()?;
+ let arm = wild_pat.syntax().parent().and_then(ast::MatchArm::cast)?;
+ let arm_match_expr = arm.syntax().ancestors().nth(2).and_then(ast::MatchExpr::cast)?;
+ if arm_match_expr == *match_expr {
+ cov_mark::hit!(add_missing_match_arms_trivial_arm);
+ return Some(());
+ }
+
+ None
+}
+
+fn is_variant_missing(existing_pats: &[Pat], var: &Pat) -> bool {
+ !existing_pats.iter().any(|pat| does_pat_match_variant(pat, var))
+}
+
+// Fixme: this is still somewhat limited, use hir_ty::diagnostics::match_check?
+fn does_pat_match_variant(pat: &Pat, var: &Pat) -> bool {
+ match (pat, var) {
+ (Pat::WildcardPat(_), _) => true,
+ (Pat::TuplePat(tpat), Pat::TuplePat(tvar)) => {
+ tpat.fields().zip(tvar.fields()).all(|(p, v)| does_pat_match_variant(&p, &v))
+ }
+ _ => utils::does_pat_match_variant(pat, var),
+ }
+}
+
+#[derive(Eq, PartialEq, Clone, Copy)]
+enum ExtendedEnum {
+ Bool,
+ Enum(hir::Enum),
+}
+
+#[derive(Eq, PartialEq, Clone, Copy)]
+enum ExtendedVariant {
+ True,
+ False,
+ Variant(hir::Variant),
+}
+
+impl ExtendedVariant {
+ fn should_be_hidden(self, db: &RootDatabase, krate: Crate) -> bool {
+ match self {
+ ExtendedVariant::Variant(var) => {
+ var.attrs(db).has_doc_hidden() && var.module(db).krate() != krate
+ }
+ _ => false,
+ }
+ }
+}
+
+fn lift_enum(e: hir::Enum) -> ExtendedEnum {
+ ExtendedEnum::Enum(e)
+}
+
+impl ExtendedEnum {
+ fn is_non_exhaustive(self, db: &RootDatabase, krate: Crate) -> bool {
+ match self {
+ ExtendedEnum::Enum(e) => {
+ e.attrs(db).by_key("non_exhaustive").exists() && e.module(db).krate() != krate
+ }
+ _ => false,
+ }
+ }
+
+ fn variants(self, db: &RootDatabase) -> Vec<ExtendedVariant> {
+ match self {
+ ExtendedEnum::Enum(e) => {
+ e.variants(db).into_iter().map(ExtendedVariant::Variant).collect::<Vec<_>>()
+ }
+ ExtendedEnum::Bool => {
+ Vec::<ExtendedVariant>::from([ExtendedVariant::True, ExtendedVariant::False])
+ }
+ }
+ }
+}
+
+fn resolve_enum_def(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> Option<ExtendedEnum> {
+ sema.type_of_expr(expr)?.adjusted().autoderef(sema.db).find_map(|ty| match ty.as_adt() {
+ Some(Adt::Enum(e)) => Some(ExtendedEnum::Enum(e)),
+ _ => ty.is_bool().then(|| ExtendedEnum::Bool),
+ })
+}
+
+fn resolve_tuple_of_enum_def(
+ sema: &Semantics<'_, RootDatabase>,
+ expr: &ast::Expr,
+) -> Option<Vec<ExtendedEnum>> {
+ sema.type_of_expr(expr)?
+ .adjusted()
+ .tuple_fields(sema.db)
+ .iter()
+ .map(|ty| {
+ ty.autoderef(sema.db).find_map(|ty| match ty.as_adt() {
+ Some(Adt::Enum(e)) => Some(lift_enum(e)),
+ // For now we only handle expansion for a tuple of enums. Here
+ // we map non-enum items to None and rely on `collect` to
+ // convert Vec<Option<hir::Enum>> into Option<Vec<hir::Enum>>.
+ _ => ty.is_bool().then(|| ExtendedEnum::Bool),
+ })
+ })
+ .collect()
+}
+
+fn build_pat(db: &RootDatabase, module: hir::Module, var: ExtendedVariant) -> Option<ast::Pat> {
+ match var {
+ ExtendedVariant::Variant(var) => {
+ let path = mod_path_to_ast(&module.find_use_path(db, ModuleDef::from(var))?);
+
+ // FIXME: use HIR for this; it doesn't currently expose struct vs. tuple vs. unit variants though
+ let pat: ast::Pat = match var.source(db)?.value.kind() {
+ ast::StructKind::Tuple(field_list) => {
+ let pats =
+ iter::repeat(make::wildcard_pat().into()).take(field_list.fields().count());
+ make::tuple_struct_pat(path, pats).into()
+ }
+ ast::StructKind::Record(field_list) => {
+ let pats = field_list
+ .fields()
+ .map(|f| make::ext::simple_ident_pat(f.name().unwrap()).into());
+ make::record_pat(path, pats).into()
+ }
+ ast::StructKind::Unit => make::path_pat(path),
+ };
+
+ Some(pat)
+ }
+ ExtendedVariant::True => Some(ast::Pat::from(make::literal_pat("true"))),
+ ExtendedVariant::False => Some(ast::Pat::from(make::literal_pat("false"))),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{
+ check_assist, check_assist_not_applicable, check_assist_target, check_assist_unresolved,
+ };
+
+ use super::add_missing_match_arms;
+
+ #[test]
+ fn all_match_arms_provided() {
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+enum A {
+ As,
+ Bs{x:i32, y:Option<i32>},
+ Cs(i32, Option<i32>),
+}
+fn main() {
+ match A::As$0 {
+ A::As,
+ A::Bs{x,y:Some(_)} => {}
+ A::Cs(_, Some(_)) => {}
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_outside_of_range_left() {
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+enum A { X, Y }
+
+fn foo(a: A) {
+ $0 match a {
+ A::X => { }
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_outside_of_range_right() {
+ cov_mark::check!(not_applicable_outside_of_range_right);
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+enum A { X, Y }
+
+fn foo(a: A) {
+ match a {$0
+ A::X => { }
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn all_boolean_match_arms_provided() {
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+fn foo(a: bool) {
+ match a$0 {
+ true => {}
+ false => {}
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn tuple_of_non_enum() {
+ // for now this case is not handled, although it potentially could be
+ // in the future
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+fn main() {
+ match (0, false)$0 {
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_boolean() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+fn foo(a: bool) {
+ match a$0 {
+ }
+}
+"#,
+ r#"
+fn foo(a: bool) {
+ match a {
+ $0true => todo!(),
+ false => todo!(),
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn partial_fill_boolean() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+fn foo(a: bool) {
+ match a$0 {
+ true => {}
+ }
+}
+"#,
+ r#"
+fn foo(a: bool) {
+ match a {
+ true => {}
+ $0false => todo!(),
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn all_boolean_tuple_arms_provided() {
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+fn foo(a: bool) {
+ match (a, a)$0 {
+ (true, true) => {}
+ (true, false) => {}
+ (false, true) => {}
+ (false, false) => {}
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn fill_boolean_tuple() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+fn foo(a: bool) {
+ match (a, a)$0 {
+ }
+}
+"#,
+ r#"
+fn foo(a: bool) {
+ match (a, a) {
+ $0(true, true) => todo!(),
+ (true, false) => todo!(),
+ (false, true) => todo!(),
+ (false, false) => todo!(),
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn partial_fill_boolean_tuple() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+fn foo(a: bool) {
+ match (a, a)$0 {
+ (false, true) => {}
+ }
+}
+"#,
+ r#"
+fn foo(a: bool) {
+ match (a, a) {
+ (false, true) => {}
+ $0(true, true) => todo!(),
+ (true, false) => todo!(),
+ (false, false) => todo!(),
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn partial_fill_record_tuple() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A {
+ As,
+ Bs { x: i32, y: Option<i32> },
+ Cs(i32, Option<i32>),
+}
+fn main() {
+ match A::As$0 {
+ A::Bs { x, y: Some(_) } => {}
+ A::Cs(_, Some(_)) => {}
+ }
+}
+"#,
+ r#"
+enum A {
+ As,
+ Bs { x: i32, y: Option<i32> },
+ Cs(i32, Option<i32>),
+}
+fn main() {
+ match A::As {
+ A::Bs { x, y: Some(_) } => {}
+ A::Cs(_, Some(_)) => {}
+ $0A::As => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn partial_fill_option() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- minicore: option
+fn main() {
+ match None$0 {
+ None => {}
+ }
+}
+"#,
+ r#"
+fn main() {
+ match None {
+ None => {}
+ Some(${0:_}) => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn partial_fill_or_pat() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { As, Bs, Cs(Option<i32>) }
+fn main() {
+ match A::As$0 {
+ A::Cs(_) | A::Bs => {}
+ }
+}
+"#,
+ r#"
+enum A { As, Bs, Cs(Option<i32>) }
+fn main() {
+ match A::As {
+ A::Cs(_) | A::Bs => {}
+ $0A::As => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn partial_fill() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { As, Bs, Cs, Ds(String), Es(B) }
+enum B { Xs, Ys }
+fn main() {
+ match A::As$0 {
+ A::Bs if 0 < 1 => {}
+ A::Ds(_value) => { let x = 1; }
+ A::Es(B::Xs) => (),
+ }
+}
+"#,
+ r#"
+enum A { As, Bs, Cs, Ds(String), Es(B) }
+enum B { Xs, Ys }
+fn main() {
+ match A::As {
+ A::Bs if 0 < 1 => {}
+ A::Ds(_value) => { let x = 1; }
+ A::Es(B::Xs) => (),
+ $0A::As => todo!(),
+ A::Cs => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn partial_fill_bind_pat() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { As, Bs, Cs(Option<i32>) }
+fn main() {
+ match A::As$0 {
+ A::As(_) => {}
+ a @ A::Bs(_) => {}
+ }
+}
+"#,
+ r#"
+enum A { As, Bs, Cs(Option<i32>) }
+fn main() {
+ match A::As {
+ A::As(_) => {}
+ a @ A::Bs(_) => {}
+ A::Cs(${0:_}) => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_empty_body() {
+ cov_mark::check!(add_missing_match_arms_empty_body);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { As, Bs, Cs(String), Ds(String, String), Es { x: usize, y: usize } }
+
+fn main() {
+ let a = A::As;
+ match a {$0}
+}
+"#,
+ r#"
+enum A { As, Bs, Cs(String), Ds(String, String), Es { x: usize, y: usize } }
+
+fn main() {
+ let a = A::As;
+ match a {
+ $0A::As => todo!(),
+ A::Bs => todo!(),
+ A::Cs(_) => todo!(),
+ A::Ds(_, _) => todo!(),
+ A::Es { x, y } => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_end_of_last_arm() {
+ cov_mark::check!(add_missing_match_arms_end_of_last_arm);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two }
+enum B { One, Two }
+
+fn main() {
+ let a = A::One;
+ let b = B::One;
+ match (a, b) {
+ (A::Two, B::One) => {},$0
+ }
+}
+"#,
+ r#"
+enum A { One, Two }
+enum B { One, Two }
+
+fn main() {
+ let a = A::One;
+ let b = B::One;
+ match (a, b) {
+ (A::Two, B::One) => {},
+ $0(A::One, B::One) => todo!(),
+ (A::One, B::Two) => todo!(),
+ (A::Two, B::Two) => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_tuple_of_enum() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two }
+enum B { One, Two }
+
+fn main() {
+ let a = A::One;
+ let b = B::One;
+ match (a$0, b) {}
+}
+"#,
+ r#"
+enum A { One, Two }
+enum B { One, Two }
+
+fn main() {
+ let a = A::One;
+ let b = B::One;
+ match (a, b) {
+ $0(A::One, B::One) => todo!(),
+ (A::One, B::Two) => todo!(),
+ (A::Two, B::One) => todo!(),
+ (A::Two, B::Two) => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_tuple_of_enum_ref() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two }
+enum B { One, Two }
+
+fn main() {
+ let a = A::One;
+ let b = B::One;
+ match (&a$0, &b) {}
+}
+"#,
+ r#"
+enum A { One, Two }
+enum B { One, Two }
+
+fn main() {
+ let a = A::One;
+ let b = B::One;
+ match (&a, &b) {
+ $0(A::One, B::One) => todo!(),
+ (A::One, B::Two) => todo!(),
+ (A::Two, B::One) => todo!(),
+ (A::Two, B::Two) => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_tuple_of_enum_partial() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two }
+enum B { One, Two }
+
+fn main() {
+ let a = A::One;
+ let b = B::One;
+ match (a$0, b) {
+ (A::Two, B::One) => {}
+ }
+}
+"#,
+ r#"
+enum A { One, Two }
+enum B { One, Two }
+
+fn main() {
+ let a = A::One;
+ let b = B::One;
+ match (a, b) {
+ (A::Two, B::One) => {}
+ $0(A::One, B::One) => todo!(),
+ (A::One, B::Two) => todo!(),
+ (A::Two, B::Two) => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_tuple_of_enum_partial_with_wildcards() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- minicore: option
+fn main() {
+ let a = Some(1);
+ let b = Some(());
+ match (a$0, b) {
+ (Some(_), _) => {}
+ (None, Some(_)) => {}
+ }
+}
+"#,
+ r#"
+fn main() {
+ let a = Some(1);
+ let b = Some(());
+ match (a, b) {
+ (Some(_), _) => {}
+ (None, Some(_)) => {}
+ $0(None, None) => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_partial_with_deep_pattern() {
+ // Fixme: cannot handle deep patterns
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+//- minicore: option
+fn main() {
+ match $0Some(true) {
+ Some(true) => {}
+ None => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_tuple_of_enum_not_applicable() {
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two }
+enum B { One, Two }
+
+fn main() {
+ let a = A::One;
+ let b = B::One;
+ match (a$0, b) {
+ (A::Two, B::One) => {}
+ (A::One, B::One) => {}
+ (A::One, B::Two) => {}
+ (A::Two, B::Two) => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_single_element_tuple_of_enum() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two }
+
+fn main() {
+ let a = A::One;
+ match (a$0, ) {
+ }
+}
+"#,
+ r#"
+enum A { One, Two }
+
+fn main() {
+ let a = A::One;
+ match (a, ) {
+ $0(A::One,) => todo!(),
+ (A::Two,) => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_match_arm_refs() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { As }
+
+fn foo(a: &A) {
+ match a$0 {
+ }
+}
+"#,
+ r#"
+enum A { As }
+
+fn foo(a: &A) {
+ match a {
+ $0A::As => todo!(),
+ }
+}
+"#,
+ );
+
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A {
+ Es { x: usize, y: usize }
+}
+
+fn foo(a: &mut A) {
+ match a$0 {
+ }
+}
+"#,
+ r#"
+enum A {
+ Es { x: usize, y: usize }
+}
+
+fn foo(a: &mut A) {
+ match a {
+ $0A::Es { x, y } => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_target_simple() {
+ check_assist_target(
+ add_missing_match_arms,
+ r#"
+enum E { X, Y }
+
+fn main() {
+ match E::X$0 {}
+}
+"#,
+ "match E::X {}",
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_target_complex() {
+ check_assist_target(
+ add_missing_match_arms,
+ r#"
+enum E { X, Y }
+
+fn main() {
+ match E::X$0 {
+ E::X => {}
+ }
+}
+"#,
+ "match E::X {
+ E::X => {}
+ }",
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_trivial_arm() {
+ cov_mark::check!(add_missing_match_arms_trivial_arm);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum E { X, Y }
+
+fn main() {
+ match E::X {
+ $0_ => {}
+ }
+}
+"#,
+ r#"
+enum E { X, Y }
+
+fn main() {
+ match E::X {
+ $0E::X => todo!(),
+ E::Y => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wildcard_inside_expression_not_applicable() {
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+enum E { X, Y }
+
+fn foo(e : E) {
+ match e {
+ _ => {
+ println!("1");$0
+ println!("2");
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_qualifies_path() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+mod foo { pub enum E { X, Y } }
+use foo::E::X;
+
+fn main() {
+ match X {
+ $0
+ }
+}
+"#,
+ r#"
+mod foo { pub enum E { X, Y } }
+use foo::E::X;
+
+fn main() {
+ match X {
+ $0X => todo!(),
+ foo::E::Y => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_preserves_comments() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two }
+fn foo(a: A) {
+ match a $0 {
+ // foo bar baz
+ A::One => {}
+ // This is where the rest should be
+ }
+}
+"#,
+ r#"
+enum A { One, Two }
+fn foo(a: A) {
+ match a {
+ // foo bar baz
+ A::One => {}
+ $0A::Two => todo!(),
+ // This is where the rest should be
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_preserves_comments_empty() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two }
+fn foo(a: A) {
+ match a {
+ // foo bar baz$0
+ }
+}
+"#,
+ r#"
+enum A { One, Two }
+fn foo(a: A) {
+ match a {
+ $0A::One => todo!(),
+ A::Two => todo!(),
+ // foo bar baz
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_missing_match_arms_placeholder() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two, }
+fn foo(a: A) {
+ match a$0 {
+ _ => (),
+ }
+}
+"#,
+ r#"
+enum A { One, Two, }
+fn foo(a: A) {
+ match a {
+ $0A::One => todo!(),
+ A::Two => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn option_order() {
+ cov_mark::check!(option_order);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- minicore: option
+fn foo(opt: Option<i32>) {
+ match opt$0 {
+ }
+}
+"#,
+ r#"
+fn foo(opt: Option<i32>) {
+ match opt {
+ Some(${0:_}) => todo!(),
+ None => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn works_inside_macro_call() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+macro_rules! m { ($expr:expr) => {$expr}}
+enum Test {
+ A,
+ B,
+ C,
+}
+
+fn foo(t: Test) {
+ m!(match t$0 {});
+}"#,
+ r#"
+macro_rules! m { ($expr:expr) => {$expr}}
+enum Test {
+ A,
+ B,
+ C,
+}
+
+fn foo(t: Test) {
+ m!(match t {
+ $0Test::A => todo!(),
+ Test::B => todo!(),
+ Test::C => todo!(),
+});
+}"#,
+ );
+ }
+
+ #[test]
+ fn lazy_computation() {
+ // Computing a single missing arm is enough to determine applicability of the assist.
+ cov_mark::check_count!(add_missing_match_arms_lazy_computation, 1);
+ check_assist_unresolved(
+ add_missing_match_arms,
+ r#"
+enum A { One, Two, }
+fn foo(tuple: (A, A)) {
+ match $0tuple {};
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn adds_comma_before_new_arms() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+fn foo(t: bool) {
+ match $0t {
+ true => 1 + 2
+ }
+}"#,
+ r#"
+fn foo(t: bool) {
+ match t {
+ true => 1 + 2,
+ $0false => todo!(),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn does_not_add_extra_comma() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+fn foo(t: bool) {
+ match $0t {
+ true => 1 + 2,
+ }
+}"#,
+ r#"
+fn foo(t: bool) {
+ match t {
+ true => 1 + 2,
+ $0false => todo!(),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn does_not_remove_catch_all_with_non_empty_expr() {
+ cov_mark::check!(add_missing_match_arms_empty_expr);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+fn foo(t: bool) {
+ match $0t {
+ _ => 1 + 2,
+ }
+}"#,
+ r#"
+fn foo(t: bool) {
+ match t {
+ _ => 1 + 2,
+ $0true => todo!(),
+ false => todo!(),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn does_not_fill_hidden_variants() {
+ cov_mark::check!(added_wildcard_pattern);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E) {
+ match $0t {
+ }
+}
+//- /e.rs crate:e
+pub enum E { A, #[doc(hidden)] B, }
+"#,
+ r#"
+fn foo(t: ::e::E) {
+ match t {
+ $0e::E::A => todo!(),
+ _ => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn does_not_fill_hidden_variants_tuple() {
+ cov_mark::check!(added_wildcard_pattern);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: (bool, ::e::E)) {
+ match $0t {
+ }
+}
+//- /e.rs crate:e
+pub enum E { A, #[doc(hidden)] B, }
+"#,
+ r#"
+fn foo(t: (bool, ::e::E)) {
+ match t {
+ $0(true, e::E::A) => todo!(),
+ (false, e::E::A) => todo!(),
+ _ => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn fills_wildcard_with_only_hidden_variants() {
+ cov_mark::check!(added_wildcard_pattern);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E) {
+ match $0t {
+ }
+}
+//- /e.rs crate:e
+pub enum E { #[doc(hidden)] A, }
+"#,
+ r#"
+fn foo(t: ::e::E) {
+ match t {
+ ${0:_} => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn does_not_fill_wildcard_when_hidden_variants_are_explicit() {
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E) {
+ match $0t {
+ e::E::A => todo!(),
+ }
+}
+//- /e.rs crate:e
+pub enum E { #[doc(hidden)] A, }
+"#,
+ );
+ }
+
+ // FIXME: I don't think the assist should be applicable in this case
+ #[test]
+ fn does_not_fill_wildcard_with_wildcard() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E) {
+ match $0t {
+ _ => todo!(),
+ }
+}
+//- /e.rs crate:e
+pub enum E { #[doc(hidden)] A, }
+"#,
+ r#"
+fn foo(t: ::e::E) {
+ match t {
+ _ => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn fills_wildcard_on_non_exhaustive_with_explicit_matches() {
+ cov_mark::check!(added_wildcard_pattern);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E) {
+ match $0t {
+ e::E::A => todo!(),
+ }
+}
+//- /e.rs crate:e
+#[non_exhaustive]
+pub enum E { A, }
+"#,
+ r#"
+fn foo(t: ::e::E) {
+ match t {
+ e::E::A => todo!(),
+ ${0:_} => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn fills_wildcard_on_non_exhaustive_without_matches() {
+ cov_mark::check!(added_wildcard_pattern);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E) {
+ match $0t {
+ }
+}
+//- /e.rs crate:e
+#[non_exhaustive]
+pub enum E { A, }
+"#,
+ r#"
+fn foo(t: ::e::E) {
+ match t {
+ $0e::E::A => todo!(),
+ _ => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn fills_wildcard_on_non_exhaustive_with_doc_hidden() {
+ cov_mark::check!(added_wildcard_pattern);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E) {
+ match $0t {
+ }
+}
+//- /e.rs crate:e
+#[non_exhaustive]
+pub enum E { A, #[doc(hidden)] B }"#,
+ r#"
+fn foo(t: ::e::E) {
+ match t {
+ $0e::E::A => todo!(),
+ _ => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn fills_wildcard_on_non_exhaustive_with_doc_hidden_with_explicit_arms() {
+ cov_mark::check!(added_wildcard_pattern);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E) {
+ match $0t {
+ e::E::A => todo!(),
+ }
+}
+//- /e.rs crate:e
+#[non_exhaustive]
+pub enum E { A, #[doc(hidden)] B }"#,
+ r#"
+fn foo(t: ::e::E) {
+ match t {
+ e::E::A => todo!(),
+ ${0:_} => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn fill_wildcard_with_partial_wildcard() {
+ cov_mark::check!(added_wildcard_pattern);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E, b: bool) {
+ match $0t {
+ _ if b => todo!(),
+ }
+}
+//- /e.rs crate:e
+pub enum E { #[doc(hidden)] A, }"#,
+ r#"
+fn foo(t: ::e::E, b: bool) {
+ match t {
+ _ if b => todo!(),
+ ${0:_} => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn does_not_fill_wildcard_with_partial_wildcard_and_wildcard() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E, b: bool) {
+ match $0t {
+ _ if b => todo!(),
+ _ => todo!(),
+ }
+}
+//- /e.rs crate:e
+pub enum E { #[doc(hidden)] A, }"#,
+ r#"
+fn foo(t: ::e::E, b: bool) {
+ match t {
+ _ if b => todo!(),
+ _ => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn non_exhaustive_doc_hidden_tuple_fills_wildcard() {
+ cov_mark::check!(added_wildcard_pattern);
+ check_assist(
+ add_missing_match_arms,
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(t: ::e::E) {
+ match $0t {
+ }
+}
+//- /e.rs crate:e
+#[non_exhaustive]
+pub enum E { A, #[doc(hidden)] B, }"#,
+ r#"
+fn foo(t: ::e::E) {
+ match t {
+ $0e::E::A => todo!(),
+ _ => todo!(),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ignores_doc_hidden_for_crate_local_enums() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum E { A, #[doc(hidden)] B, }
+
+fn foo(t: E) {
+ match $0t {
+ }
+}"#,
+ r#"
+enum E { A, #[doc(hidden)] B, }
+
+fn foo(t: E) {
+ match t {
+ $0E::A => todo!(),
+ E::B => todo!(),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn ignores_non_exhaustive_for_crate_local_enums() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+#[non_exhaustive]
+enum E { A, B, }
+
+fn foo(t: E) {
+ match $0t {
+ }
+}"#,
+ r#"
+#[non_exhaustive]
+enum E { A, B, }
+
+fn foo(t: E) {
+ match t {
+ $0E::A => todo!(),
+ E::B => todo!(),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn ignores_doc_hidden_and_non_exhaustive_for_crate_local_enums() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+#[non_exhaustive]
+enum E { A, #[doc(hidden)] B, }
+
+fn foo(t: E) {
+ match $0t {
+ }
+}"#,
+ r#"
+#[non_exhaustive]
+enum E { A, #[doc(hidden)] B, }
+
+fn foo(t: E) {
+ match t {
+ $0E::A => todo!(),
+ E::B => todo!(),
+ }
+}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_return_type.rs
new file mode 100644
index 000000000..f858d7a15
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_return_type.rs
@@ -0,0 +1,447 @@
+use hir::HirDisplay;
+use syntax::{ast, match_ast, AstNode, SyntaxKind, SyntaxToken, TextRange, TextSize};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: add_return_type
+//
+// Adds the return type to a function or closure inferred from its tail expression if it doesn't have a return
+// type specified. This assists is useable in a functions or closures tail expression or return type position.
+//
+// ```
+// fn foo() { 4$02i32 }
+// ```
+// ->
+// ```
+// fn foo() -> i32 { 42i32 }
+// ```
+pub(crate) fn add_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let (fn_type, tail_expr, builder_edit_pos) = extract_tail(ctx)?;
+ let module = ctx.sema.scope(tail_expr.syntax())?.module();
+ let ty = ctx.sema.type_of_expr(&peel_blocks(tail_expr.clone()))?.original();
+ if ty.is_unit() {
+ return None;
+ }
+ let ty = ty.display_source_code(ctx.db(), module.into()).ok()?;
+
+ acc.add(
+ AssistId("add_return_type", AssistKind::RefactorRewrite),
+ match fn_type {
+ FnType::Function => "Add this function's return type",
+ FnType::Closure { .. } => "Add this closure's return type",
+ },
+ tail_expr.syntax().text_range(),
+ |builder| {
+ match builder_edit_pos {
+ InsertOrReplace::Insert(insert_pos, needs_whitespace) => {
+ let preceeding_whitespace = if needs_whitespace { " " } else { "" };
+ builder.insert(insert_pos, &format!("{}-> {} ", preceeding_whitespace, ty))
+ }
+ InsertOrReplace::Replace(text_range) => {
+ builder.replace(text_range, &format!("-> {}", ty))
+ }
+ }
+ if let FnType::Closure { wrap_expr: true } = fn_type {
+ cov_mark::hit!(wrap_closure_non_block_expr);
+ // `|x| x` becomes `|x| -> T x` which is invalid, so wrap it in a block
+ builder.replace(tail_expr.syntax().text_range(), &format!("{{{}}}", tail_expr));
+ }
+ },
+ )
+}
+
+enum InsertOrReplace {
+ Insert(TextSize, bool),
+ Replace(TextRange),
+}
+
+/// Check the potentially already specified return type and reject it or turn it into a builder command
+/// if allowed.
+fn ret_ty_to_action(
+ ret_ty: Option<ast::RetType>,
+ insert_after: SyntaxToken,
+) -> Option<InsertOrReplace> {
+ match ret_ty {
+ Some(ret_ty) => match ret_ty.ty() {
+ Some(ast::Type::InferType(_)) | None => {
+ cov_mark::hit!(existing_infer_ret_type);
+ cov_mark::hit!(existing_infer_ret_type_closure);
+ Some(InsertOrReplace::Replace(ret_ty.syntax().text_range()))
+ }
+ _ => {
+ cov_mark::hit!(existing_ret_type);
+ cov_mark::hit!(existing_ret_type_closure);
+ None
+ }
+ },
+ None => {
+ let insert_after_pos = insert_after.text_range().end();
+ let (insert_pos, needs_whitespace) = match insert_after.next_token() {
+ Some(it) if it.kind() == SyntaxKind::WHITESPACE => {
+ (insert_after_pos + TextSize::from(1), false)
+ }
+ _ => (insert_after_pos, true),
+ };
+
+ Some(InsertOrReplace::Insert(insert_pos, needs_whitespace))
+ }
+ }
+}
+
+enum FnType {
+ Function,
+ Closure { wrap_expr: bool },
+}
+
+/// If we're looking at a block that is supposed to return `()`, type inference
+/// will just tell us it has type `()`. We have to look at the tail expression
+/// to see the mismatched actual type. This 'unpeels' the various blocks to
+/// hopefully let us see the type the user intends. (This still doesn't handle
+/// all situations fully correctly; the 'ideal' way to handle this would be to
+/// run type inference on the function again, but with a variable as the return
+/// type.)
+fn peel_blocks(mut expr: ast::Expr) -> ast::Expr {
+ loop {
+ match_ast! {
+ match (expr.syntax()) {
+ ast::BlockExpr(it) => {
+ if let Some(tail) = it.tail_expr() {
+ expr = tail.clone();
+ } else {
+ break;
+ }
+ },
+ ast::IfExpr(it) => {
+ if let Some(then_branch) = it.then_branch() {
+ expr = ast::Expr::BlockExpr(then_branch.clone());
+ } else {
+ break;
+ }
+ },
+ ast::MatchExpr(it) => {
+ if let Some(arm_expr) = it.match_arm_list().and_then(|l| l.arms().next()).and_then(|a| a.expr()) {
+ expr = arm_expr;
+ } else {
+ break;
+ }
+ },
+ _ => break,
+ }
+ }
+ }
+ expr
+}
+
+fn extract_tail(ctx: &AssistContext<'_>) -> Option<(FnType, ast::Expr, InsertOrReplace)> {
+ let (fn_type, tail_expr, return_type_range, action) =
+ if let Some(closure) = ctx.find_node_at_offset::<ast::ClosureExpr>() {
+ let rpipe = closure.param_list()?.syntax().last_token()?;
+ let rpipe_pos = rpipe.text_range().end();
+
+ let action = ret_ty_to_action(closure.ret_type(), rpipe)?;
+
+ let body = closure.body()?;
+ let body_start = body.syntax().first_token()?.text_range().start();
+ let (tail_expr, wrap_expr) = match body {
+ ast::Expr::BlockExpr(block) => (block.tail_expr()?, false),
+ body => (body, true),
+ };
+
+ let ret_range = TextRange::new(rpipe_pos, body_start);
+ (FnType::Closure { wrap_expr }, tail_expr, ret_range, action)
+ } else {
+ let func = ctx.find_node_at_offset::<ast::Fn>()?;
+
+ let rparen = func.param_list()?.r_paren_token()?;
+ let rparen_pos = rparen.text_range().end();
+ let action = ret_ty_to_action(func.ret_type(), rparen)?;
+
+ let body = func.body()?;
+ let stmt_list = body.stmt_list()?;
+ let tail_expr = stmt_list.tail_expr()?;
+
+ let ret_range_end = stmt_list.l_curly_token()?.text_range().start();
+ let ret_range = TextRange::new(rparen_pos, ret_range_end);
+ (FnType::Function, tail_expr, ret_range, action)
+ };
+ let range = ctx.selection_trimmed();
+ if return_type_range.contains_range(range) {
+ cov_mark::hit!(cursor_in_ret_position);
+ cov_mark::hit!(cursor_in_ret_position_closure);
+ } else if tail_expr.syntax().text_range().contains_range(range) {
+ cov_mark::hit!(cursor_on_tail);
+ cov_mark::hit!(cursor_on_tail_closure);
+ } else {
+ return None;
+ }
+ Some((fn_type, tail_expr, action))
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn infer_return_type_specified_inferred() {
+ cov_mark::check!(existing_infer_ret_type);
+ check_assist(
+ add_return_type,
+ r#"fn foo() -> $0_ {
+ 45
+}"#,
+ r#"fn foo() -> i32 {
+ 45
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_specified_inferred_closure() {
+ cov_mark::check!(existing_infer_ret_type_closure);
+ check_assist(
+ add_return_type,
+ r#"fn foo() {
+ || -> _ {$045};
+}"#,
+ r#"fn foo() {
+ || -> i32 {45};
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_cursor_at_return_type_pos() {
+ cov_mark::check!(cursor_in_ret_position);
+ check_assist(
+ add_return_type,
+ r#"fn foo() $0{
+ 45
+}"#,
+ r#"fn foo() -> i32 {
+ 45
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_cursor_at_return_type_pos_closure() {
+ cov_mark::check!(cursor_in_ret_position_closure);
+ check_assist(
+ add_return_type,
+ r#"fn foo() {
+ || $045
+}"#,
+ r#"fn foo() {
+ || -> i32 {45}
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type() {
+ cov_mark::check!(cursor_on_tail);
+ check_assist(
+ add_return_type,
+ r#"fn foo() {
+ 45$0
+}"#,
+ r#"fn foo() -> i32 {
+ 45
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_no_whitespace() {
+ check_assist(
+ add_return_type,
+ r#"fn foo(){
+ 45$0
+}"#,
+ r#"fn foo() -> i32 {
+ 45
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_nested() {
+ check_assist(
+ add_return_type,
+ r#"fn foo() {
+ if true {
+ 3$0
+ } else {
+ 5
+ }
+}"#,
+ r#"fn foo() -> i32 {
+ if true {
+ 3
+ } else {
+ 5
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_nested_match() {
+ check_assist(
+ add_return_type,
+ r#"fn foo() {
+ match true {
+ true => { 3$0 },
+ false => { 5 },
+ }
+}"#,
+ r#"fn foo() -> i32 {
+ match true {
+ true => { 3 },
+ false => { 5 },
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_ret_type_specified() {
+ cov_mark::check!(existing_ret_type);
+ check_assist_not_applicable(
+ add_return_type,
+ r#"fn foo() -> i32 {
+ ( 45$0 + 32 ) * 123
+}"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_non_tail_expr() {
+ check_assist_not_applicable(
+ add_return_type,
+ r#"fn foo() {
+ let x = $03;
+ ( 45 + 32 ) * 123
+}"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_unit_return_type() {
+ check_assist_not_applicable(
+ add_return_type,
+ r#"fn foo() {
+ ($0)
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_closure_block() {
+ cov_mark::check!(cursor_on_tail_closure);
+ check_assist(
+ add_return_type,
+ r#"fn foo() {
+ |x: i32| {
+ x$0
+ };
+}"#,
+ r#"fn foo() {
+ |x: i32| -> i32 {
+ x
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_closure() {
+ check_assist(
+ add_return_type,
+ r#"fn foo() {
+ |x: i32| { x$0 };
+}"#,
+ r#"fn foo() {
+ |x: i32| -> i32 { x };
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_closure_no_whitespace() {
+ check_assist(
+ add_return_type,
+ r#"fn foo() {
+ |x: i32|{ x$0 };
+}"#,
+ r#"fn foo() {
+ |x: i32| -> i32 { x };
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_closure_wrap() {
+ cov_mark::check!(wrap_closure_non_block_expr);
+ check_assist(
+ add_return_type,
+ r#"fn foo() {
+ |x: i32| x$0;
+}"#,
+ r#"fn foo() {
+ |x: i32| -> i32 {x};
+}"#,
+ );
+ }
+
+ #[test]
+ fn infer_return_type_nested_closure() {
+ check_assist(
+ add_return_type,
+ r#"fn foo() {
+ || {
+ if true {
+ 3$0
+ } else {
+ 5
+ }
+ }
+}"#,
+ r#"fn foo() {
+ || -> i32 {
+ if true {
+ 3
+ } else {
+ 5
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_ret_type_specified_closure() {
+ cov_mark::check!(existing_ret_type_closure);
+ check_assist_not_applicable(
+ add_return_type,
+ r#"fn foo() {
+ || -> i32 { 3$0 }
+}"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_non_tail_expr_closure() {
+ check_assist_not_applicable(
+ add_return_type,
+ r#"fn foo() {
+ || -> i32 {
+ let x = 3$0;
+ 6
+ }
+}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs
new file mode 100644
index 000000000..c0bf238db
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs
@@ -0,0 +1,400 @@
+use ide_db::defs::{Definition, NameRefClass};
+use itertools::Itertools;
+use syntax::{ast, AstNode, SyntaxKind, T};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId, AssistKind,
+};
+
+// Assist: add_turbo_fish
+//
+// Adds `::<_>` to a call of a generic method or function.
+//
+// ```
+// fn make<T>() -> T { todo!() }
+// fn main() {
+// let x = make$0();
+// }
+// ```
+// ->
+// ```
+// fn make<T>() -> T { todo!() }
+// fn main() {
+// let x = make::<${0:_}>();
+// }
+// ```
+pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let ident = ctx.find_token_syntax_at_offset(SyntaxKind::IDENT).or_else(|| {
+ let arg_list = ctx.find_node_at_offset::<ast::ArgList>()?;
+ if arg_list.args().next().is_some() {
+ return None;
+ }
+ cov_mark::hit!(add_turbo_fish_after_call);
+ cov_mark::hit!(add_type_ascription_after_call);
+ arg_list.l_paren_token()?.prev_token().filter(|it| it.kind() == SyntaxKind::IDENT)
+ })?;
+ let next_token = ident.next_token()?;
+ if next_token.kind() == T![::] {
+ cov_mark::hit!(add_turbo_fish_one_fish_is_enough);
+ return None;
+ }
+ let name_ref = ast::NameRef::cast(ident.parent()?)?;
+ let def = match NameRefClass::classify(&ctx.sema, &name_ref)? {
+ NameRefClass::Definition(def) => def,
+ NameRefClass::FieldShorthand { .. } => return None,
+ };
+ let fun = match def {
+ Definition::Function(it) => it,
+ _ => return None,
+ };
+ let generics = hir::GenericDef::Function(fun).params(ctx.sema.db);
+ if generics.is_empty() {
+ cov_mark::hit!(add_turbo_fish_non_generic);
+ return None;
+ }
+
+ if let Some(let_stmt) = ctx.find_node_at_offset::<ast::LetStmt>() {
+ if let_stmt.colon_token().is_none() {
+ let type_pos = let_stmt.pat()?.syntax().last_token()?.text_range().end();
+ let semi_pos = let_stmt.syntax().last_token()?.text_range().end();
+
+ acc.add(
+ AssistId("add_type_ascription", AssistKind::RefactorRewrite),
+ "Add `: _` before assignment operator",
+ ident.text_range(),
+ |builder| {
+ if let_stmt.semicolon_token().is_none() {
+ builder.insert(semi_pos, ";");
+ }
+ match ctx.config.snippet_cap {
+ Some(cap) => builder.insert_snippet(cap, type_pos, ": ${0:_}"),
+ None => builder.insert(type_pos, ": _"),
+ }
+ },
+ )?
+ } else {
+ cov_mark::hit!(add_type_ascription_already_typed);
+ }
+ }
+
+ let number_of_arguments = generics
+ .iter()
+ .filter(|param| {
+ matches!(param, hir::GenericParam::TypeParam(_) | hir::GenericParam::ConstParam(_))
+ })
+ .count();
+
+ acc.add(
+ AssistId("add_turbo_fish", AssistKind::RefactorRewrite),
+ "Add `::<>`",
+ ident.text_range(),
+ |builder| {
+ builder.trigger_signature_help();
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let snip = format!("::<{}>", get_snippet_fish_head(number_of_arguments));
+ builder.insert_snippet(cap, ident.text_range().end(), snip)
+ }
+ None => {
+ let fish_head = std::iter::repeat("_").take(number_of_arguments).format(", ");
+ let snip = format!("::<{}>", fish_head);
+ builder.insert(ident.text_range().end(), snip);
+ }
+ }
+ },
+ )
+}
+
+/// This will create a snippet string with tabstops marked
+fn get_snippet_fish_head(number_of_arguments: usize) -> String {
+ let mut fish_head = (1..number_of_arguments)
+ .format_with("", |i, f| f(&format_args!("${{{}:_}}, ", i)))
+ .to_string();
+
+ // tabstop 0 is a special case and always the last one
+ fish_head.push_str("${0:_}");
+ fish_head
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_by_label, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn add_turbo_fish_function() {
+ check_assist(
+ add_turbo_fish,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ make$0();
+}
+"#,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ make::<${0:_}>();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_turbo_fish_function_multiple_generic_types() {
+ check_assist(
+ add_turbo_fish,
+ r#"
+fn make<T, A>() -> T {}
+fn main() {
+ make$0();
+}
+"#,
+ r#"
+fn make<T, A>() -> T {}
+fn main() {
+ make::<${1:_}, ${0:_}>();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_turbo_fish_function_many_generic_types() {
+ check_assist(
+ add_turbo_fish,
+ r#"
+fn make<T, A, B, C, D, E, F>() -> T {}
+fn main() {
+ make$0();
+}
+"#,
+ r#"
+fn make<T, A, B, C, D, E, F>() -> T {}
+fn main() {
+ make::<${1:_}, ${2:_}, ${3:_}, ${4:_}, ${5:_}, ${6:_}, ${0:_}>();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_turbo_fish_after_call() {
+ cov_mark::check!(add_turbo_fish_after_call);
+ check_assist(
+ add_turbo_fish,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ make()$0;
+}
+"#,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ make::<${0:_}>();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_turbo_fish_method() {
+ check_assist(
+ add_turbo_fish,
+ r#"
+struct S;
+impl S {
+ fn make<T>(&self) -> T {}
+}
+fn main() {
+ S.make$0();
+}
+"#,
+ r#"
+struct S;
+impl S {
+ fn make<T>(&self) -> T {}
+}
+fn main() {
+ S.make::<${0:_}>();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_turbo_fish_one_fish_is_enough() {
+ cov_mark::check!(add_turbo_fish_one_fish_is_enough);
+ check_assist_not_applicable(
+ add_turbo_fish,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ make$0::<()>();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_turbo_fish_non_generic() {
+ cov_mark::check!(add_turbo_fish_non_generic);
+ check_assist_not_applicable(
+ add_turbo_fish,
+ r#"
+fn make() -> () {}
+fn main() {
+ make$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_type_ascription_function() {
+ check_assist_by_label(
+ add_turbo_fish,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let x = make$0();
+}
+"#,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let x: ${0:_} = make();
+}
+"#,
+ "Add `: _` before assignment operator",
+ );
+ }
+
+ #[test]
+ fn add_type_ascription_after_call() {
+ cov_mark::check!(add_type_ascription_after_call);
+ check_assist_by_label(
+ add_turbo_fish,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let x = make()$0;
+}
+"#,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let x: ${0:_} = make();
+}
+"#,
+ "Add `: _` before assignment operator",
+ );
+ }
+
+ #[test]
+ fn add_type_ascription_method() {
+ check_assist_by_label(
+ add_turbo_fish,
+ r#"
+struct S;
+impl S {
+ fn make<T>(&self) -> T {}
+}
+fn main() {
+ let x = S.make$0();
+}
+"#,
+ r#"
+struct S;
+impl S {
+ fn make<T>(&self) -> T {}
+}
+fn main() {
+ let x: ${0:_} = S.make();
+}
+"#,
+ "Add `: _` before assignment operator",
+ );
+ }
+
+ #[test]
+ fn add_type_ascription_already_typed() {
+ cov_mark::check!(add_type_ascription_already_typed);
+ check_assist(
+ add_turbo_fish,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let x: () = make$0();
+}
+"#,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let x: () = make::<${0:_}>();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_type_ascription_append_semicolon() {
+ check_assist_by_label(
+ add_turbo_fish,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let x = make$0()
+}
+"#,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let x: ${0:_} = make();
+}
+"#,
+ "Add `: _` before assignment operator",
+ );
+ }
+
+ #[test]
+ fn add_turbo_fish_function_lifetime_parameter() {
+ check_assist(
+ add_turbo_fish,
+ r#"
+fn make<'a, T, A>(t: T, a: A) {}
+fn main() {
+ make$0(5, 2);
+}
+"#,
+ r#"
+fn make<'a, T, A>(t: T, a: A) {}
+fn main() {
+ make::<${1:_}, ${0:_}>(5, 2);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn add_turbo_fish_function_const_parameter() {
+ check_assist(
+ add_turbo_fish,
+ r#"
+fn make<T, const N: usize>(t: T) {}
+fn main() {
+ make$0(3);
+}
+"#,
+ r#"
+fn make<T, const N: usize>(t: T) {}
+fn main() {
+ make::<${1:_}, ${0:_}>(3);
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs
new file mode 100644
index 000000000..2853d1d1b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs
@@ -0,0 +1,234 @@
+use std::collections::VecDeque;
+
+use syntax::ast::{self, AstNode};
+
+use crate::{utils::invert_boolean_expression, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: apply_demorgan
+//
+// Apply https://en.wikipedia.org/wiki/De_Morgan%27s_laws[De Morgan's law].
+// This transforms expressions of the form `!l || !r` into `!(l && r)`.
+// This also works with `&&`. This assist can only be applied with the cursor
+// on either `||` or `&&`.
+//
+// ```
+// fn main() {
+// if x != 4 ||$0 y < 3.14 {}
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// if !(x == 4 && y >= 3.14) {}
+// }
+// ```
+pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let expr = ctx.find_node_at_offset::<ast::BinExpr>()?;
+ let op = expr.op_kind()?;
+ let op_range = expr.op_token()?.text_range();
+
+ let opposite_op = match op {
+ ast::BinaryOp::LogicOp(ast::LogicOp::And) => "||",
+ ast::BinaryOp::LogicOp(ast::LogicOp::Or) => "&&",
+ _ => return None,
+ };
+
+ let cursor_in_range = op_range.contains_range(ctx.selection_trimmed());
+ if !cursor_in_range {
+ return None;
+ }
+
+ let mut expr = expr;
+
+ // Walk up the tree while we have the same binary operator
+ while let Some(parent_expr) = expr.syntax().parent().and_then(ast::BinExpr::cast) {
+ match expr.op_kind() {
+ Some(parent_op) if parent_op == op => {
+ expr = parent_expr;
+ }
+ _ => break,
+ }
+ }
+
+ let mut expr_stack = vec![expr.clone()];
+ let mut terms = Vec::new();
+ let mut op_ranges = Vec::new();
+
+ // Find all the children with the same binary operator
+ while let Some(expr) = expr_stack.pop() {
+ let mut traverse_bin_expr_arm = |expr| {
+ if let ast::Expr::BinExpr(bin_expr) = expr {
+ if let Some(expr_op) = bin_expr.op_kind() {
+ if expr_op == op {
+ expr_stack.push(bin_expr);
+ } else {
+ terms.push(ast::Expr::BinExpr(bin_expr));
+ }
+ } else {
+ terms.push(ast::Expr::BinExpr(bin_expr));
+ }
+ } else {
+ terms.push(expr);
+ }
+ };
+
+ op_ranges.extend(expr.op_token().map(|t| t.text_range()));
+ traverse_bin_expr_arm(expr.lhs()?);
+ traverse_bin_expr_arm(expr.rhs()?);
+ }
+
+ acc.add(
+ AssistId("apply_demorgan", AssistKind::RefactorRewrite),
+ "Apply De Morgan's law",
+ op_range,
+ |edit| {
+ terms.sort_by_key(|t| t.syntax().text_range().start());
+ let mut terms = VecDeque::from(terms);
+
+ let paren_expr = expr.syntax().parent().and_then(ast::ParenExpr::cast);
+
+ let neg_expr = paren_expr
+ .clone()
+ .and_then(|paren_expr| paren_expr.syntax().parent())
+ .and_then(ast::PrefixExpr::cast)
+ .and_then(|prefix_expr| {
+ if prefix_expr.op_kind().unwrap() == ast::UnaryOp::Not {
+ Some(prefix_expr)
+ } else {
+ None
+ }
+ });
+
+ for op_range in op_ranges {
+ edit.replace(op_range, opposite_op);
+ }
+
+ if let Some(paren_expr) = paren_expr {
+ for term in terms {
+ let range = term.syntax().text_range();
+ let not_term = invert_boolean_expression(term);
+
+ edit.replace(range, not_term.syntax().text());
+ }
+
+ if let Some(neg_expr) = neg_expr {
+ cov_mark::hit!(demorgan_double_negation);
+ edit.replace(neg_expr.op_token().unwrap().text_range(), "");
+ } else {
+ cov_mark::hit!(demorgan_double_parens);
+ edit.replace(paren_expr.l_paren_token().unwrap().text_range(), "!(");
+ }
+ } else {
+ if let Some(lhs) = terms.pop_front() {
+ let lhs_range = lhs.syntax().text_range();
+ let not_lhs = invert_boolean_expression(lhs);
+
+ edit.replace(lhs_range, format!("!({}", not_lhs.syntax().text()));
+ }
+
+ if let Some(rhs) = terms.pop_back() {
+ let rhs_range = rhs.syntax().text_range();
+ let not_rhs = invert_boolean_expression(rhs);
+
+ edit.replace(rhs_range, format!("{})", not_rhs.syntax().text()));
+ }
+
+ for term in terms {
+ let term_range = term.syntax().text_range();
+ let not_term = invert_boolean_expression(term);
+ edit.replace(term_range, not_term.syntax().text());
+ }
+ }
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn demorgan_handles_leq() {
+ check_assist(
+ apply_demorgan,
+ r#"
+struct S;
+fn f() { S < S &&$0 S <= S }
+"#,
+ r#"
+struct S;
+fn f() { !(S >= S || S > S) }
+"#,
+ );
+ }
+
+ #[test]
+ fn demorgan_handles_geq() {
+ check_assist(
+ apply_demorgan,
+ r#"
+struct S;
+fn f() { S > S &&$0 S >= S }
+"#,
+ r#"
+struct S;
+fn f() { !(S <= S || S < S) }
+"#,
+ );
+ }
+
+ #[test]
+ fn demorgan_turns_and_into_or() {
+ check_assist(apply_demorgan, "fn f() { !x &&$0 !x }", "fn f() { !(x || x) }")
+ }
+
+ #[test]
+ fn demorgan_turns_or_into_and() {
+ check_assist(apply_demorgan, "fn f() { !x ||$0 !x }", "fn f() { !(x && x) }")
+ }
+
+ #[test]
+ fn demorgan_removes_inequality() {
+ check_assist(apply_demorgan, "fn f() { x != x ||$0 !x }", "fn f() { !(x == x && x) }")
+ }
+
+ #[test]
+ fn demorgan_general_case() {
+ check_assist(apply_demorgan, "fn f() { x ||$0 x }", "fn f() { !(!x && !x) }")
+ }
+
+ #[test]
+ fn demorgan_multiple_terms() {
+ check_assist(apply_demorgan, "fn f() { x ||$0 y || z }", "fn f() { !(!x && !y && !z) }");
+ check_assist(apply_demorgan, "fn f() { x || y ||$0 z }", "fn f() { !(!x && !y && !z) }");
+ }
+
+ #[test]
+ fn demorgan_doesnt_apply_with_cursor_not_on_op() {
+ check_assist_not_applicable(apply_demorgan, "fn f() { $0 !x || !x }")
+ }
+
+ #[test]
+ fn demorgan_doesnt_double_negation() {
+ cov_mark::check!(demorgan_double_negation);
+ check_assist(apply_demorgan, "fn f() { !(x ||$0 x) }", "fn f() { (!x && !x) }")
+ }
+
+ #[test]
+ fn demorgan_doesnt_double_parens() {
+ cov_mark::check!(demorgan_double_parens);
+ check_assist(apply_demorgan, "fn f() { (x ||$0 x) }", "fn f() { !(!x && !x) }")
+ }
+
+ // https://github.com/rust-lang/rust-analyzer/issues/10963
+ #[test]
+ fn demorgan_doesnt_hang() {
+ check_assist(
+ apply_demorgan,
+ "fn f() { 1 || 3 &&$0 4 || 5 }",
+ "fn f() { !(!1 || !3 || !4) || 5 }",
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs
new file mode 100644
index 000000000..949cf3167
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs
@@ -0,0 +1,1292 @@
+use std::cmp::Reverse;
+
+use hir::{db::HirDatabase, Module};
+use ide_db::{
+ helpers::mod_path_to_ast,
+ imports::{
+ import_assets::{ImportAssets, ImportCandidate, LocatedImport},
+ insert_use::{insert_use, ImportScope},
+ },
+};
+use syntax::{ast, AstNode, NodeOrToken, SyntaxElement};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
+
+// Feature: Auto Import
+//
+// Using the `auto-import` assist it is possible to insert missing imports for unresolved items.
+// When inserting an import it will do so in a structured manner by keeping imports grouped,
+// separated by a newline in the following order:
+//
+// - `std` and `core`
+// - External Crates
+// - Current Crate, paths prefixed by `crate`
+// - Current Module, paths prefixed by `self`
+// - Super Module, paths prefixed by `super`
+//
+// Example:
+// ```rust
+// use std::fs::File;
+//
+// use itertools::Itertools;
+// use syntax::ast;
+//
+// use crate::utils::insert_use;
+//
+// use self::auto_import;
+//
+// use super::AssistContext;
+// ```
+//
+// .Import Granularity
+//
+// It is possible to configure how use-trees are merged with the `imports.granularity.group` setting.
+// It has the following configurations:
+//
+// - `crate`: Merge imports from the same crate into a single use statement. This kind of
+// nesting is only supported in Rust versions later than 1.24.
+// - `module`: Merge imports from the same module into a single use statement.
+// - `item`: Don't merge imports at all, creating one import per item.
+// - `preserve`: Do not change the granularity of any imports. For auto-import this has the same
+// effect as `item`.
+//
+// In `VS Code` the configuration for this is `rust-analyzer.imports.granularity.group`.
+//
+// .Import Prefix
+//
+// The style of imports in the same crate is configurable through the `imports.prefix` setting.
+// It has the following configurations:
+//
+// - `crate`: This setting will force paths to be always absolute, starting with the `crate`
+// prefix, unless the item is defined outside of the current crate.
+// - `self`: This setting will force paths that are relative to the current module to always
+// start with `self`. This will result in paths that always start with either `crate`, `self`,
+// `super` or an extern crate identifier.
+// - `plain`: This setting does not impose any restrictions in imports.
+//
+// In `VS Code` the configuration for this is `rust-analyzer.imports.prefix`.
+//
+// image::https://user-images.githubusercontent.com/48062697/113020673-b85be580-917a-11eb-9022-59585f35d4f8.gif[]
+
+// Assist: auto_import
+//
+// If the name is unresolved, provides all possible imports for it.
+//
+// ```
+// fn main() {
+// let map = HashMap$0::new();
+// }
+// # pub mod std { pub mod collections { pub struct HashMap { } } }
+// ```
+// ->
+// ```
+// use std::collections::HashMap;
+//
+// fn main() {
+// let map = HashMap::new();
+// }
+// # pub mod std { pub mod collections { pub struct HashMap { } } }
+// ```
+pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let (import_assets, syntax_under_caret) = find_importable_node(ctx)?;
+ let mut proposed_imports =
+ import_assets.search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind);
+ if proposed_imports.is_empty() {
+ return None;
+ }
+
+ let range = match &syntax_under_caret {
+ NodeOrToken::Node(node) => ctx.sema.original_range(node).range,
+ NodeOrToken::Token(token) => token.text_range(),
+ };
+ let group_label = group_label(import_assets.import_candidate());
+ let scope = ImportScope::find_insert_use_container(
+ &match syntax_under_caret {
+ NodeOrToken::Node(it) => it,
+ NodeOrToken::Token(it) => it.parent()?,
+ },
+ &ctx.sema,
+ )?;
+
+ // we aren't interested in different namespaces
+ proposed_imports.dedup_by(|a, b| a.import_path == b.import_path);
+
+ let current_node = match ctx.covering_element() {
+ NodeOrToken::Node(node) => Some(node),
+ NodeOrToken::Token(token) => token.parent(),
+ };
+
+ let current_module =
+ current_node.as_ref().and_then(|node| ctx.sema.scope(node)).map(|scope| scope.module());
+
+ // prioritize more relevant imports
+ proposed_imports
+ .sort_by_key(|import| Reverse(relevance_score(ctx, import, current_module.as_ref())));
+
+ for import in proposed_imports {
+ acc.add_group(
+ &group_label,
+ AssistId("auto_import", AssistKind::QuickFix),
+ format!("Import `{}`", import.import_path),
+ range,
+ |builder| {
+ let scope = match scope.clone() {
+ ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
+ ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
+ ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
+ };
+ insert_use(&scope, mod_path_to_ast(&import.import_path), &ctx.config.insert_use);
+ },
+ );
+ }
+ Some(())
+}
+
+pub(super) fn find_importable_node(
+ ctx: &AssistContext<'_>,
+) -> Option<(ImportAssets, SyntaxElement)> {
+ if let Some(path_under_caret) = ctx.find_node_at_offset_with_descend::<ast::Path>() {
+ ImportAssets::for_exact_path(&path_under_caret, &ctx.sema)
+ .zip(Some(path_under_caret.syntax().clone().into()))
+ } else if let Some(method_under_caret) =
+ ctx.find_node_at_offset_with_descend::<ast::MethodCallExpr>()
+ {
+ ImportAssets::for_method_call(&method_under_caret, &ctx.sema)
+ .zip(Some(method_under_caret.syntax().clone().into()))
+ } else if let Some(pat) = ctx
+ .find_node_at_offset_with_descend::<ast::IdentPat>()
+ .filter(ast::IdentPat::is_simple_ident)
+ {
+ ImportAssets::for_ident_pat(&ctx.sema, &pat).zip(Some(pat.syntax().clone().into()))
+ } else {
+ None
+ }
+}
+
+fn group_label(import_candidate: &ImportCandidate) -> GroupLabel {
+ let name = match import_candidate {
+ ImportCandidate::Path(candidate) => format!("Import {}", candidate.name.text()),
+ ImportCandidate::TraitAssocItem(candidate) => {
+ format!("Import a trait for item {}", candidate.assoc_item_name.text())
+ }
+ ImportCandidate::TraitMethod(candidate) => {
+ format!("Import a trait for method {}", candidate.assoc_item_name.text())
+ }
+ };
+ GroupLabel(name)
+}
+
+/// Determine how relevant a given import is in the current context. Higher scores are more
+/// relevant.
+fn relevance_score(
+ ctx: &AssistContext<'_>,
+ import: &LocatedImport,
+ current_module: Option<&Module>,
+) -> i32 {
+ let mut score = 0;
+
+ let db = ctx.db();
+
+ let item_module = match import.item_to_import {
+ hir::ItemInNs::Types(item) | hir::ItemInNs::Values(item) => item.module(db),
+ hir::ItemInNs::Macros(makro) => Some(makro.module(db)),
+ };
+
+ match item_module.zip(current_module) {
+ // get the distance between the imported path and the current module
+ // (prefer items that are more local)
+ Some((item_module, current_module)) => {
+ score -= module_distance_hueristic(db, &current_module, &item_module) as i32;
+ }
+
+ // could not find relevant modules, so just use the length of the path as an estimate
+ None => return -(2 * import.import_path.len() as i32),
+ }
+
+ score
+}
+
+/// A heuristic that gives a higher score to modules that are more separated.
+fn module_distance_hueristic(db: &dyn HirDatabase, current: &Module, item: &Module) -> usize {
+ // get the path starting from the item to the respective crate roots
+ let mut current_path = current.path_to_root(db);
+ let mut item_path = item.path_to_root(db);
+
+ // we want paths going from the root to the item
+ current_path.reverse();
+ item_path.reverse();
+
+ // length of the common prefix of the two paths
+ let prefix_length = current_path.iter().zip(&item_path).take_while(|(a, b)| a == b).count();
+
+ // how many modules differ between the two paths (all modules, removing any duplicates)
+ let distinct_length = current_path.len() + item_path.len() - 2 * prefix_length;
+
+ // cost of importing from another crate
+ let crate_boundary_cost = if current.krate() == item.krate() {
+ 0
+ } else if item.krate().is_builtin(db) {
+ 2
+ } else {
+ 4
+ };
+
+ distinct_length + crate_boundary_cost
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use hir::Semantics;
+ use ide_db::{
+ assists::AssistResolveStrategy,
+ base_db::{fixture::WithFixture, FileRange},
+ RootDatabase,
+ };
+
+ use crate::tests::{
+ check_assist, check_assist_not_applicable, check_assist_target, TEST_CONFIG,
+ };
+
+ fn check_auto_import_order(before: &str, order: &[&str]) {
+ let (db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(before);
+ let frange = FileRange { file_id, range: range_or_offset.into() };
+
+ let sema = Semantics::new(&db);
+ let config = TEST_CONFIG;
+ let ctx = AssistContext::new(sema, &config, frange);
+ let mut acc = Assists::new(&ctx, AssistResolveStrategy::All);
+ auto_import(&mut acc, &ctx);
+ let assists = acc.finish();
+
+ let labels = assists.iter().map(|assist| assist.label.to_string()).collect::<Vec<_>>();
+
+ assert_eq!(labels, order);
+ }
+
+ #[test]
+ fn prefer_shorter_paths() {
+ let before = r"
+//- /main.rs crate:main deps:foo,bar
+HashMap$0::new();
+
+//- /lib.rs crate:foo
+pub mod collections { pub struct HashMap; }
+
+//- /lib.rs crate:bar
+pub mod collections { pub mod hash_map { pub struct HashMap; } }
+ ";
+
+ check_auto_import_order(
+ before,
+ &["Import `foo::collections::HashMap`", "Import `bar::collections::hash_map::HashMap`"],
+ )
+ }
+
+ #[test]
+ fn prefer_same_crate() {
+ let before = r"
+//- /main.rs crate:main deps:foo
+HashMap$0::new();
+
+mod collections {
+ pub mod hash_map {
+ pub struct HashMap;
+ }
+}
+
+//- /lib.rs crate:foo
+pub struct HashMap;
+ ";
+
+ check_auto_import_order(
+ before,
+ &["Import `collections::hash_map::HashMap`", "Import `foo::HashMap`"],
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_scope_inside_macro() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+mod bar {
+ pub struct Baz;
+}
+macro_rules! foo {
+ ($it:ident) => {
+ mod __ {
+ fn __(x: $it) {}
+ }
+ };
+}
+foo! {
+ Baz$0
+}
+",
+ );
+ }
+
+ #[test]
+ fn applicable_in_attributes() {
+ check_assist(
+ auto_import,
+ r"
+//- proc_macros: identity
+#[proc_macros::identity]
+mod foo {
+ mod bar {
+ const _: Baz$0 = ();
+ }
+}
+mod baz {
+ pub struct Baz;
+}
+",
+ r"
+#[proc_macros::identity]
+mod foo {
+ mod bar {
+ use crate::baz::Baz;
+
+ const _: Baz = ();
+ }
+}
+mod baz {
+ pub struct Baz;
+}
+",
+ );
+ }
+
+ #[test]
+ fn applicable_when_found_an_import_partial() {
+ check_assist(
+ auto_import,
+ r"
+ mod std {
+ pub mod fmt {
+ pub struct Formatter;
+ }
+ }
+
+ use std::fmt;
+
+ $0Formatter
+ ",
+ r"
+ mod std {
+ pub mod fmt {
+ pub struct Formatter;
+ }
+ }
+
+ use std::fmt::{self, Formatter};
+
+ Formatter
+ ",
+ );
+ }
+
+ #[test]
+ fn applicable_when_found_an_import() {
+ check_assist(
+ auto_import,
+ r"
+ $0PubStruct
+
+ pub mod PubMod {
+ pub struct PubStruct;
+ }
+ ",
+ r"
+ use PubMod::PubStruct;
+
+ PubStruct
+
+ pub mod PubMod {
+ pub struct PubStruct;
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn applicable_when_found_an_import_in_macros() {
+ check_assist(
+ auto_import,
+ r"
+ macro_rules! foo {
+ ($i:ident) => { fn foo(a: $i) {} }
+ }
+ foo!(Pub$0Struct);
+
+ pub mod PubMod {
+ pub struct PubStruct;
+ }
+ ",
+ r"
+ use PubMod::PubStruct;
+
+ macro_rules! foo {
+ ($i:ident) => { fn foo(a: $i) {} }
+ }
+ foo!(PubStruct);
+
+ pub mod PubMod {
+ pub struct PubStruct;
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn applicable_when_found_multiple_imports() {
+ check_assist(
+ auto_import,
+ r"
+ PubSt$0ruct
+
+ pub mod PubMod1 {
+ pub struct PubStruct;
+ }
+ pub mod PubMod2 {
+ pub struct PubStruct;
+ }
+ pub mod PubMod3 {
+ pub struct PubStruct;
+ }
+ ",
+ r"
+ use PubMod3::PubStruct;
+
+ PubStruct
+
+ pub mod PubMod1 {
+ pub struct PubStruct;
+ }
+ pub mod PubMod2 {
+ pub struct PubStruct;
+ }
+ pub mod PubMod3 {
+ pub struct PubStruct;
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_already_imported_types() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+ use PubMod::PubStruct;
+
+ PubStruct$0
+
+ pub mod PubMod {
+ pub struct PubStruct;
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_types_with_private_paths() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+ PrivateStruct$0
+
+ pub mod PubMod {
+ struct PrivateStruct;
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn not_applicable_when_no_imports_found() {
+ check_assist_not_applicable(
+ auto_import,
+ "
+ PubStruct$0",
+ );
+ }
+
+ #[test]
+ fn function_import() {
+ check_assist(
+ auto_import,
+ r"
+ test_function$0
+
+ pub mod PubMod {
+ pub fn test_function() {};
+ }
+ ",
+ r"
+ use PubMod::test_function;
+
+ test_function
+
+ pub mod PubMod {
+ pub fn test_function() {};
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn macro_import() {
+ check_assist(
+ auto_import,
+ r"
+//- /lib.rs crate:crate_with_macro
+#[macro_export]
+macro_rules! foo {
+ () => ()
+}
+
+//- /main.rs crate:main deps:crate_with_macro
+fn main() {
+ foo$0
+}
+",
+ r"use crate_with_macro::foo;
+
+fn main() {
+ foo
+}
+",
+ );
+ }
+
+ #[test]
+ fn auto_import_target() {
+ check_assist_target(
+ auto_import,
+ r"
+ struct AssistInfo {
+ group_label: Option<$0GroupLabel>,
+ }
+
+ mod m { pub struct GroupLabel; }
+ ",
+ "GroupLabel",
+ )
+ }
+
+ #[test]
+ fn not_applicable_when_path_start_is_imported() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+ pub mod mod1 {
+ pub mod mod2 {
+ pub mod mod3 {
+ pub struct TestStruct;
+ }
+ }
+ }
+
+ use mod1::mod2;
+ fn main() {
+ mod2::mod3::TestStruct$0
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_imported_function() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+ pub mod test_mod {
+ pub fn test_function() {}
+ }
+
+ use test_mod::test_function;
+ fn main() {
+ test_function$0
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn associated_struct_function() {
+ check_assist(
+ auto_import,
+ r"
+ mod test_mod {
+ pub struct TestStruct {}
+ impl TestStruct {
+ pub fn test_function() {}
+ }
+ }
+
+ fn main() {
+ TestStruct::test_function$0
+ }
+ ",
+ r"
+ use test_mod::TestStruct;
+
+ mod test_mod {
+ pub struct TestStruct {}
+ impl TestStruct {
+ pub fn test_function() {}
+ }
+ }
+
+ fn main() {
+ TestStruct::test_function
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn associated_struct_const() {
+ check_assist(
+ auto_import,
+ r"
+ mod test_mod {
+ pub struct TestStruct {}
+ impl TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+ }
+
+ fn main() {
+ TestStruct::TEST_CONST$0
+ }
+ ",
+ r"
+ use test_mod::TestStruct;
+
+ mod test_mod {
+ pub struct TestStruct {}
+ impl TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+ }
+
+ fn main() {
+ TestStruct::TEST_CONST
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn associated_trait_function() {
+ check_assist(
+ auto_import,
+ r"
+ mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+ }
+
+ fn main() {
+ test_mod::TestStruct::test_function$0
+ }
+ ",
+ r"
+ use test_mod::TestTrait;
+
+ mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+ }
+
+ fn main() {
+ test_mod::TestStruct::test_function
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_imported_trait_for_function() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+ mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub trait TestTrait2 {
+ fn test_function();
+ }
+ pub enum TestEnum {
+ One,
+ Two,
+ }
+ impl TestTrait2 for TestEnum {
+ fn test_function() {}
+ }
+ impl TestTrait for TestEnum {
+ fn test_function() {}
+ }
+ }
+
+ use test_mod::TestTrait2;
+ fn main() {
+ test_mod::TestEnum::test_function$0;
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn associated_trait_const() {
+ check_assist(
+ auto_import,
+ r"
+ mod test_mod {
+ pub trait TestTrait {
+ const TEST_CONST: u8;
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+ }
+
+ fn main() {
+ test_mod::TestStruct::TEST_CONST$0
+ }
+ ",
+ r"
+ use test_mod::TestTrait;
+
+ mod test_mod {
+ pub trait TestTrait {
+ const TEST_CONST: u8;
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+ }
+
+ fn main() {
+ test_mod::TestStruct::TEST_CONST
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_imported_trait_for_const() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+ mod test_mod {
+ pub trait TestTrait {
+ const TEST_CONST: u8;
+ }
+ pub trait TestTrait2 {
+ const TEST_CONST: f64;
+ }
+ pub enum TestEnum {
+ One,
+ Two,
+ }
+ impl TestTrait2 for TestEnum {
+ const TEST_CONST: f64 = 42.0;
+ }
+ impl TestTrait for TestEnum {
+ const TEST_CONST: u8 = 42;
+ }
+ }
+
+ use test_mod::TestTrait2;
+ fn main() {
+ test_mod::TestEnum::TEST_CONST$0;
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn trait_method() {
+ check_assist(
+ auto_import,
+ r"
+ mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+ }
+
+ fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_meth$0od()
+ }
+ ",
+ r"
+ use test_mod::TestTrait;
+
+ mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+ }
+
+ fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_method()
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn trait_method_cross_crate() {
+ check_assist(
+ auto_import,
+ r"
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.test_meth$0od()
+ }
+ //- /dep.rs crate:dep
+ pub mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+ }
+ ",
+ r"
+ use dep::test_mod::TestTrait;
+
+ fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.test_method()
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn assoc_fn_cross_crate() {
+ check_assist(
+ auto_import,
+ r"
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ dep::test_mod::TestStruct::test_func$0tion
+ }
+ //- /dep.rs crate:dep
+ pub mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+ }
+ ",
+ r"
+ use dep::test_mod::TestTrait;
+
+ fn main() {
+ dep::test_mod::TestStruct::test_function
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn assoc_const_cross_crate() {
+ check_assist(
+ auto_import,
+ r"
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ dep::test_mod::TestStruct::CONST$0
+ }
+ //- /dep.rs crate:dep
+ pub mod test_mod {
+ pub trait TestTrait {
+ const CONST: bool;
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const CONST: bool = true;
+ }
+ }
+ ",
+ r"
+ use dep::test_mod::TestTrait;
+
+ fn main() {
+ dep::test_mod::TestStruct::CONST
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn assoc_fn_as_method_cross_crate() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.test_func$0tion()
+ }
+ //- /dep.rs crate:dep
+ pub mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn private_trait_cross_crate() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.test_meth$0od()
+ }
+ //- /dep.rs crate:dep
+ pub mod test_mod {
+ trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_imported_trait_for_method() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+ mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub trait TestTrait2 {
+ fn test_method(&self);
+ }
+ pub enum TestEnum {
+ One,
+ Two,
+ }
+ impl TestTrait2 for TestEnum {
+ fn test_method(&self) {}
+ }
+ impl TestTrait for TestEnum {
+ fn test_method(&self) {}
+ }
+ }
+
+ use test_mod::TestTrait2;
+ fn main() {
+ let one = test_mod::TestEnum::One;
+ one.test$0_method();
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn dep_import() {
+ check_assist(
+ auto_import,
+ r"
+//- /lib.rs crate:dep
+pub struct Struct;
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ Struct$0
+}
+",
+ r"use dep::Struct;
+
+fn main() {
+ Struct
+}
+",
+ );
+ }
+
+ #[test]
+ fn whole_segment() {
+ // Tests that only imports whose last segment matches the identifier get suggested.
+ check_assist(
+ auto_import,
+ r"
+//- /lib.rs crate:dep
+pub mod fmt {
+ pub trait Display {}
+}
+
+pub fn panic_fmt() {}
+
+//- /main.rs crate:main deps:dep
+struct S;
+
+impl f$0mt::Display for S {}
+",
+ r"use dep::fmt;
+
+struct S;
+
+impl fmt::Display for S {}
+",
+ );
+ }
+
+ #[test]
+ fn macro_generated() {
+ // Tests that macro-generated items are suggested from external crates.
+ check_assist(
+ auto_import,
+ r"
+//- /lib.rs crate:dep
+macro_rules! mac {
+ () => {
+ pub struct Cheese;
+ };
+}
+
+mac!();
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ Cheese$0;
+}
+",
+ r"use dep::Cheese;
+
+fn main() {
+ Cheese;
+}
+",
+ );
+ }
+
+ #[test]
+ fn casing() {
+ // Tests that differently cased names don't interfere and we only suggest the matching one.
+ check_assist(
+ auto_import,
+ r"
+//- /lib.rs crate:dep
+pub struct FMT;
+pub struct fmt;
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ FMT$0;
+}
+",
+ r"use dep::FMT;
+
+fn main() {
+ FMT;
+}
+",
+ );
+ }
+
+ #[test]
+ fn inner_items() {
+ check_assist(
+ auto_import,
+ r#"
+mod baz {
+ pub struct Foo {}
+}
+
+mod bar {
+ fn bar() {
+ Foo$0;
+ println!("Hallo");
+ }
+}
+"#,
+ r#"
+mod baz {
+ pub struct Foo {}
+}
+
+mod bar {
+ use crate::baz::Foo;
+
+ fn bar() {
+ Foo;
+ println!("Hallo");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn uses_abs_path_with_extern_crate_clash() {
+ cov_mark::check!(ambiguous_crate_start);
+ check_assist(
+ auto_import,
+ r#"
+//- /main.rs crate:main deps:foo
+mod foo {}
+
+const _: () = {
+ Foo$0
+};
+//- /foo.rs crate:foo
+pub struct Foo
+"#,
+ r#"
+use ::foo::Foo;
+
+mod foo {}
+
+const _: () = {
+ Foo
+};
+"#,
+ );
+ }
+
+ #[test]
+ fn works_on_ident_patterns() {
+ check_assist(
+ auto_import,
+ r#"
+mod foo {
+ pub struct Foo {}
+}
+fn foo() {
+ let Foo$0;
+}
+"#,
+ r#"
+use foo::Foo;
+
+mod foo {
+ pub struct Foo {}
+}
+fn foo() {
+ let Foo;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn works_in_derives() {
+ check_assist(
+ auto_import,
+ r#"
+//- minicore:derive
+mod foo {
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+}
+#[derive(Copy$0)]
+struct Foo;
+"#,
+ r#"
+use foo::Copy;
+
+mod foo {
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+}
+#[derive(Copy)]
+struct Foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn works_in_use_start() {
+ check_assist(
+ auto_import,
+ r#"
+mod bar {
+ pub mod foo {
+ pub struct Foo;
+ }
+}
+use foo$0::Foo;
+"#,
+ r#"
+mod bar {
+ pub mod foo {
+ pub struct Foo;
+ }
+}
+use bar::foo;
+use foo::Foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_in_non_start_use() {
+ check_assist_not_applicable(
+ auto_import,
+ r"
+mod bar {
+ pub mod foo {
+ pub struct Foo;
+ }
+}
+use foo::Foo$0;
+",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs
new file mode 100644
index 000000000..2b1d8f6f0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs
@@ -0,0 +1,216 @@
+use syntax::{
+ ast::{self, HasName, HasVisibility},
+ AstNode,
+ SyntaxKind::{
+ CONST, ENUM, FN, MACRO_DEF, MODULE, STATIC, STRUCT, TRAIT, TYPE_ALIAS, USE, VISIBILITY,
+ },
+ T,
+};
+
+use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: change_visibility
+//
+// Adds or changes existing visibility specifier.
+//
+// ```
+// $0fn frobnicate() {}
+// ```
+// ->
+// ```
+// pub(crate) fn frobnicate() {}
+// ```
+pub(crate) fn change_visibility(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ if let Some(vis) = ctx.find_node_at_offset::<ast::Visibility>() {
+ return change_vis(acc, vis);
+ }
+ add_vis(acc, ctx)
+}
+
+fn add_vis(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let item_keyword = ctx.token_at_offset().find(|leaf| {
+ matches!(
+ leaf.kind(),
+ T![const]
+ | T![static]
+ | T![fn]
+ | T![mod]
+ | T![struct]
+ | T![enum]
+ | T![trait]
+ | T![type]
+ | T![use]
+ | T![macro]
+ )
+ });
+
+ let (offset, target) = if let Some(keyword) = item_keyword {
+ let parent = keyword.parent()?;
+ let def_kws =
+ vec![CONST, STATIC, TYPE_ALIAS, FN, MODULE, STRUCT, ENUM, TRAIT, USE, MACRO_DEF];
+ // Parent is not a definition, can't add visibility
+ if !def_kws.iter().any(|&def_kw| def_kw == parent.kind()) {
+ return None;
+ }
+ // Already have visibility, do nothing
+ if parent.children().any(|child| child.kind() == VISIBILITY) {
+ return None;
+ }
+ (vis_offset(&parent), keyword.text_range())
+ } else if let Some(field_name) = ctx.find_node_at_offset::<ast::Name>() {
+ let field = field_name.syntax().ancestors().find_map(ast::RecordField::cast)?;
+ if field.name()? != field_name {
+ cov_mark::hit!(change_visibility_field_false_positive);
+ return None;
+ }
+ if field.visibility().is_some() {
+ return None;
+ }
+ (vis_offset(field.syntax()), field_name.syntax().text_range())
+ } else if let Some(field) = ctx.find_node_at_offset::<ast::TupleField>() {
+ if field.visibility().is_some() {
+ return None;
+ }
+ (vis_offset(field.syntax()), field.syntax().text_range())
+ } else {
+ return None;
+ };
+
+ acc.add(
+ AssistId("change_visibility", AssistKind::RefactorRewrite),
+ "Change visibility to pub(crate)",
+ target,
+ |edit| {
+ edit.insert(offset, "pub(crate) ");
+ },
+ )
+}
+
+fn change_vis(acc: &mut Assists, vis: ast::Visibility) -> Option<()> {
+ if vis.syntax().text() == "pub" {
+ let target = vis.syntax().text_range();
+ return acc.add(
+ AssistId("change_visibility", AssistKind::RefactorRewrite),
+ "Change Visibility to pub(crate)",
+ target,
+ |edit| {
+ edit.replace(vis.syntax().text_range(), "pub(crate)");
+ },
+ );
+ }
+ if vis.syntax().text() == "pub(crate)" {
+ let target = vis.syntax().text_range();
+ return acc.add(
+ AssistId("change_visibility", AssistKind::RefactorRewrite),
+ "Change visibility to pub",
+ target,
+ |edit| {
+ edit.replace(vis.syntax().text_range(), "pub");
+ },
+ );
+ }
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn change_visibility_adds_pub_crate_to_items() {
+ check_assist(change_visibility, "$0fn foo() {}", "pub(crate) fn foo() {}");
+ check_assist(change_visibility, "f$0n foo() {}", "pub(crate) fn foo() {}");
+ check_assist(change_visibility, "$0struct Foo {}", "pub(crate) struct Foo {}");
+ check_assist(change_visibility, "$0mod foo {}", "pub(crate) mod foo {}");
+ check_assist(change_visibility, "$0trait Foo {}", "pub(crate) trait Foo {}");
+ check_assist(change_visibility, "m$0od {}", "pub(crate) mod {}");
+ check_assist(change_visibility, "unsafe f$0n foo() {}", "pub(crate) unsafe fn foo() {}");
+ check_assist(change_visibility, "$0macro foo() {}", "pub(crate) macro foo() {}");
+ check_assist(change_visibility, "$0use foo;", "pub(crate) use foo;");
+ }
+
+ #[test]
+ fn change_visibility_works_with_struct_fields() {
+ check_assist(
+ change_visibility,
+ r"struct S { $0field: u32 }",
+ r"struct S { pub(crate) field: u32 }",
+ );
+ check_assist(change_visibility, r"struct S ( $0u32 )", r"struct S ( pub(crate) u32 )");
+ }
+
+ #[test]
+ fn change_visibility_field_false_positive() {
+ cov_mark::check!(change_visibility_field_false_positive);
+ check_assist_not_applicable(
+ change_visibility,
+ r"struct S { field: [(); { let $0x = ();}] }",
+ )
+ }
+
+ #[test]
+ fn change_visibility_pub_to_pub_crate() {
+ check_assist(change_visibility, "$0pub fn foo() {}", "pub(crate) fn foo() {}")
+ }
+
+ #[test]
+ fn change_visibility_pub_crate_to_pub() {
+ check_assist(change_visibility, "$0pub(crate) fn foo() {}", "pub fn foo() {}")
+ }
+
+ #[test]
+ fn change_visibility_const() {
+ check_assist(change_visibility, "$0const FOO = 3u8;", "pub(crate) const FOO = 3u8;");
+ }
+
+ #[test]
+ fn change_visibility_static() {
+ check_assist(change_visibility, "$0static FOO = 3u8;", "pub(crate) static FOO = 3u8;");
+ }
+
+ #[test]
+ fn change_visibility_type_alias() {
+ check_assist(change_visibility, "$0type T = ();", "pub(crate) type T = ();");
+ }
+
+ #[test]
+ fn change_visibility_handles_comment_attrs() {
+ check_assist(
+ change_visibility,
+ r"
+ /// docs
+
+ // comments
+
+ #[derive(Debug)]
+ $0struct Foo;
+ ",
+ r"
+ /// docs
+
+ // comments
+
+ #[derive(Debug)]
+ pub(crate) struct Foo;
+ ",
+ )
+ }
+
+ #[test]
+ fn not_applicable_for_enum_variants() {
+ check_assist_not_applicable(
+ change_visibility,
+ r"mod foo { pub enum Foo {Foo1} }
+ fn main() { foo::Foo::Foo1$0 } ",
+ );
+ }
+
+ #[test]
+ fn change_visibility_target() {
+ check_assist_target(change_visibility, "$0fn foo() {}", "fn");
+ check_assist_target(change_visibility, "pub(crate)$0 fn foo() {}", "pub(crate)");
+ check_assist_target(change_visibility, "struct S { $0field: u32 }", "field");
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs
new file mode 100644
index 000000000..db96ad330
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs
@@ -0,0 +1,575 @@
+use hir::{known, AsAssocItem, Semantics};
+use ide_db::{
+ famous_defs::FamousDefs,
+ syntax_helpers::node_ext::{
+ block_as_lone_tail, for_each_tail_expr, is_pattern_cond, preorder_expr,
+ },
+ RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{
+ ast::{self, edit::AstNodeEdit, make, HasArgList},
+ ted, AstNode, SyntaxNode,
+};
+
+use crate::{
+ utils::{invert_boolean_expression, unwrap_trivial_block},
+ AssistContext, AssistId, AssistKind, Assists,
+};
+
+// Assist: convert_if_to_bool_then
+//
+// Converts an if expression into a corresponding `bool::then` call.
+//
+// ```
+// # //- minicore: option
+// fn main() {
+// if$0 cond {
+// Some(val)
+// } else {
+// None
+// }
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// cond.then(|| val)
+// }
+// ```
+pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ // FIXME applies to match as well
+ let expr = ctx.find_node_at_offset::<ast::IfExpr>()?;
+ if !expr.if_token()?.text_range().contains_inclusive(ctx.offset()) {
+ return None;
+ }
+
+ let cond = expr.condition().filter(|cond| !is_pattern_cond(cond.clone()))?;
+ let then = expr.then_branch()?;
+ let else_ = match expr.else_branch()? {
+ ast::ElseBranch::Block(b) => b,
+ ast::ElseBranch::IfExpr(_) => {
+ cov_mark::hit!(convert_if_to_bool_then_chain);
+ return None;
+ }
+ };
+
+ let (none_variant, some_variant) = option_variants(&ctx.sema, expr.syntax())?;
+
+ let (invert_cond, closure_body) = match (
+ block_is_none_variant(&ctx.sema, &then, none_variant),
+ block_is_none_variant(&ctx.sema, &else_, none_variant),
+ ) {
+ (invert @ true, false) => (invert, ast::Expr::BlockExpr(else_)),
+ (invert @ false, true) => (invert, ast::Expr::BlockExpr(then)),
+ _ => return None,
+ };
+
+ if is_invalid_body(&ctx.sema, some_variant, &closure_body) {
+ cov_mark::hit!(convert_if_to_bool_then_pattern_invalid_body);
+ return None;
+ }
+
+ let target = expr.syntax().text_range();
+ acc.add(
+ AssistId("convert_if_to_bool_then", AssistKind::RefactorRewrite),
+ "Convert `if` expression to `bool::then` call",
+ target,
+ |builder| {
+ let closure_body = closure_body.clone_for_update();
+ // Rewrite all `Some(e)` in tail position to `e`
+ let mut replacements = Vec::new();
+ for_each_tail_expr(&closure_body, &mut |e| {
+ let e = match e {
+ ast::Expr::BreakExpr(e) => e.expr(),
+ e @ ast::Expr::CallExpr(_) => Some(e.clone()),
+ _ => None,
+ };
+ if let Some(ast::Expr::CallExpr(call)) = e {
+ if let Some(arg_list) = call.arg_list() {
+ if let Some(arg) = arg_list.args().next() {
+ replacements.push((call.syntax().clone(), arg.syntax().clone()));
+ }
+ }
+ }
+ });
+ replacements.into_iter().for_each(|(old, new)| ted::replace(old, new));
+ let closure_body = match closure_body {
+ ast::Expr::BlockExpr(block) => unwrap_trivial_block(block),
+ e => e,
+ };
+
+ let parenthesize = matches!(
+ cond,
+ ast::Expr::BinExpr(_)
+ | ast::Expr::BlockExpr(_)
+ | ast::Expr::BoxExpr(_)
+ | ast::Expr::BreakExpr(_)
+ | ast::Expr::CastExpr(_)
+ | ast::Expr::ClosureExpr(_)
+ | ast::Expr::ContinueExpr(_)
+ | ast::Expr::ForExpr(_)
+ | ast::Expr::IfExpr(_)
+ | ast::Expr::LoopExpr(_)
+ | ast::Expr::MacroExpr(_)
+ | ast::Expr::MatchExpr(_)
+ | ast::Expr::PrefixExpr(_)
+ | ast::Expr::RangeExpr(_)
+ | ast::Expr::RefExpr(_)
+ | ast::Expr::ReturnExpr(_)
+ | ast::Expr::WhileExpr(_)
+ | ast::Expr::YieldExpr(_)
+ );
+ let cond = if invert_cond { invert_boolean_expression(cond) } else { cond };
+ let cond = if parenthesize { make::expr_paren(cond) } else { cond };
+ let arg_list = make::arg_list(Some(make::expr_closure(None, closure_body)));
+ let mcall = make::expr_method_call(cond, make::name_ref("then"), arg_list);
+ builder.replace(target, mcall.to_string());
+ },
+ )
+}
+
+// Assist: convert_bool_then_to_if
+//
+// Converts a `bool::then` method call to an equivalent if expression.
+//
+// ```
+// # //- minicore: bool_impl
+// fn main() {
+// (0 == 0).then$0(|| val)
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// if 0 == 0 {
+// Some(val)
+// } else {
+// None
+// }
+// }
+// ```
+pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let name_ref = ctx.find_node_at_offset::<ast::NameRef>()?;
+ let mcall = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast)?;
+ let receiver = mcall.receiver()?;
+ let closure_body = mcall.arg_list()?.args().exactly_one().ok()?;
+ let closure_body = match closure_body {
+ ast::Expr::ClosureExpr(expr) => expr.body()?,
+ _ => return None,
+ };
+ // Verify this is `bool::then` that is being called.
+ let func = ctx.sema.resolve_method_call(&mcall)?;
+ if func.name(ctx.sema.db).to_string() != "then" {
+ return None;
+ }
+ let assoc = func.as_assoc_item(ctx.sema.db)?;
+ match assoc.container(ctx.sema.db) {
+ hir::AssocItemContainer::Impl(impl_) if impl_.self_ty(ctx.sema.db).is_bool() => {}
+ _ => return None,
+ }
+
+ let target = mcall.syntax().text_range();
+ acc.add(
+ AssistId("convert_bool_then_to_if", AssistKind::RefactorRewrite),
+ "Convert `bool::then` call to `if`",
+ target,
+ |builder| {
+ let closure_body = match closure_body {
+ ast::Expr::BlockExpr(block) => block,
+ e => make::block_expr(None, Some(e)),
+ };
+
+ let closure_body = closure_body.clone_for_update();
+ // Wrap all tails in `Some(...)`
+ let none_path = make::expr_path(make::ext::ident_path("None"));
+ let some_path = make::expr_path(make::ext::ident_path("Some"));
+ let mut replacements = Vec::new();
+ for_each_tail_expr(&ast::Expr::BlockExpr(closure_body.clone()), &mut |e| {
+ let e = match e {
+ ast::Expr::BreakExpr(e) => e.expr(),
+ ast::Expr::ReturnExpr(e) => e.expr(),
+ _ => Some(e.clone()),
+ };
+ if let Some(expr) = e {
+ replacements.push((
+ expr.syntax().clone(),
+ make::expr_call(some_path.clone(), make::arg_list(Some(expr)))
+ .syntax()
+ .clone_for_update(),
+ ));
+ }
+ });
+ replacements.into_iter().for_each(|(old, new)| ted::replace(old, new));
+
+ let cond = match &receiver {
+ ast::Expr::ParenExpr(expr) => expr.expr().unwrap_or(receiver),
+ _ => receiver,
+ };
+ let if_expr = make::expr_if(
+ cond,
+ closure_body.reset_indent(),
+ Some(ast::ElseBranch::Block(make::block_expr(None, Some(none_path)))),
+ )
+ .indent(mcall.indent_level());
+
+ builder.replace(target, if_expr.to_string());
+ },
+ )
+}
+
+fn option_variants(
+ sema: &Semantics<'_, RootDatabase>,
+ expr: &SyntaxNode,
+) -> Option<(hir::Variant, hir::Variant)> {
+ let fam = FamousDefs(sema, sema.scope(expr)?.krate());
+ let option_variants = fam.core_option_Option()?.variants(sema.db);
+ match &*option_variants {
+ &[variant0, variant1] => Some(if variant0.name(sema.db) == known::None {
+ (variant0, variant1)
+ } else {
+ (variant1, variant0)
+ }),
+ _ => None,
+ }
+}
+
+/// Traverses the expression checking if it contains `return` or `?` expressions or if any tail is not a `Some(expr)` expression.
+/// If any of these conditions are met it is impossible to rewrite this as a `bool::then` call.
+fn is_invalid_body(
+ sema: &Semantics<'_, RootDatabase>,
+ some_variant: hir::Variant,
+ expr: &ast::Expr,
+) -> bool {
+ let mut invalid = false;
+ preorder_expr(expr, &mut |e| {
+ invalid |=
+ matches!(e, syntax::WalkEvent::Enter(ast::Expr::TryExpr(_) | ast::Expr::ReturnExpr(_)));
+ invalid
+ });
+ if !invalid {
+ for_each_tail_expr(expr, &mut |e| {
+ if invalid {
+ return;
+ }
+ let e = match e {
+ ast::Expr::BreakExpr(e) => e.expr(),
+ e @ ast::Expr::CallExpr(_) => Some(e.clone()),
+ _ => None,
+ };
+ if let Some(ast::Expr::CallExpr(call)) = e {
+ if let Some(ast::Expr::PathExpr(p)) = call.expr() {
+ let res = p.path().and_then(|p| sema.resolve_path(&p));
+ if let Some(hir::PathResolution::Def(hir::ModuleDef::Variant(v))) = res {
+ return invalid |= v != some_variant;
+ }
+ }
+ }
+ invalid = true
+ });
+ }
+ invalid
+}
+
+fn block_is_none_variant(
+ sema: &Semantics<'_, RootDatabase>,
+ block: &ast::BlockExpr,
+ none_variant: hir::Variant,
+) -> bool {
+ block_as_lone_tail(block).and_then(|e| match e {
+ ast::Expr::PathExpr(pat) => match sema.resolve_path(&pat.path()?)? {
+ hir::PathResolution::Def(hir::ModuleDef::Variant(v)) => Some(v),
+ _ => None,
+ },
+ _ => None,
+ }) == Some(none_variant)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn convert_if_to_bool_then_simple() {
+ check_assist(
+ convert_if_to_bool_then,
+ r"
+//- minicore:option
+fn main() {
+ if$0 true {
+ Some(15)
+ } else {
+ None
+ }
+}
+",
+ r"
+fn main() {
+ true.then(|| 15)
+}
+",
+ );
+ }
+
+ #[test]
+ fn convert_if_to_bool_then_invert() {
+ check_assist(
+ convert_if_to_bool_then,
+ r"
+//- minicore:option
+fn main() {
+ if$0 true {
+ None
+ } else {
+ Some(15)
+ }
+}
+",
+ r"
+fn main() {
+ false.then(|| 15)
+}
+",
+ );
+ }
+
+ #[test]
+ fn convert_if_to_bool_then_none_none() {
+ check_assist_not_applicable(
+ convert_if_to_bool_then,
+ r"
+//- minicore:option
+fn main() {
+ if$0 true {
+ None
+ } else {
+ None
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn convert_if_to_bool_then_some_some() {
+ check_assist_not_applicable(
+ convert_if_to_bool_then,
+ r"
+//- minicore:option
+fn main() {
+ if$0 true {
+ Some(15)
+ } else {
+ Some(15)
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn convert_if_to_bool_then_mixed() {
+ check_assist_not_applicable(
+ convert_if_to_bool_then,
+ r"
+//- minicore:option
+fn main() {
+ if$0 true {
+ if true {
+ Some(15)
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn convert_if_to_bool_then_chain() {
+ cov_mark::check!(convert_if_to_bool_then_chain);
+ check_assist_not_applicable(
+ convert_if_to_bool_then,
+ r"
+//- minicore:option
+fn main() {
+ if$0 true {
+ Some(15)
+ } else if true {
+ None
+ } else {
+ None
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn convert_if_to_bool_then_pattern_cond() {
+ check_assist_not_applicable(
+ convert_if_to_bool_then,
+ r"
+//- minicore:option
+fn main() {
+ if$0 let true = true {
+ Some(15)
+ } else {
+ None
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn convert_if_to_bool_then_pattern_invalid_body() {
+ cov_mark::check_count!(convert_if_to_bool_then_pattern_invalid_body, 2);
+ check_assist_not_applicable(
+ convert_if_to_bool_then,
+ r"
+//- minicore:option
+fn make_me_an_option() -> Option<i32> { None }
+fn main() {
+ if$0 true {
+ if true {
+ make_me_an_option()
+ } else {
+ Some(15)
+ }
+ } else {
+ None
+ }
+}
+",
+ );
+ check_assist_not_applicable(
+ convert_if_to_bool_then,
+ r"
+//- minicore:option
+fn main() {
+ if$0 true {
+ if true {
+ return;
+ }
+ Some(15)
+ } else {
+ None
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn convert_bool_then_to_if_inapplicable() {
+ check_assist_not_applicable(
+ convert_bool_then_to_if,
+ r"
+//- minicore:bool_impl
+fn main() {
+ 0.t$0hen(|| 15);
+}
+",
+ );
+ check_assist_not_applicable(
+ convert_bool_then_to_if,
+ r"
+//- minicore:bool_impl
+fn main() {
+ true.t$0hen(15);
+}
+",
+ );
+ check_assist_not_applicable(
+ convert_bool_then_to_if,
+ r"
+//- minicore:bool_impl
+fn main() {
+ true.t$0hen(|| 15, 15);
+}
+",
+ );
+ }
+
+ #[test]
+ fn convert_bool_then_to_if_simple() {
+ check_assist(
+ convert_bool_then_to_if,
+ r"
+//- minicore:bool_impl
+fn main() {
+ true.t$0hen(|| 15)
+}
+",
+ r"
+fn main() {
+ if true {
+ Some(15)
+ } else {
+ None
+ }
+}
+",
+ );
+ check_assist(
+ convert_bool_then_to_if,
+ r"
+//- minicore:bool_impl
+fn main() {
+ true.t$0hen(|| {
+ 15
+ })
+}
+",
+ r"
+fn main() {
+ if true {
+ Some(15)
+ } else {
+ None
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn convert_bool_then_to_if_tails() {
+ check_assist(
+ convert_bool_then_to_if,
+ r"
+//- minicore:bool_impl
+fn main() {
+ true.t$0hen(|| {
+ loop {
+ if false {
+ break 0;
+ }
+ break 15;
+ }
+ })
+}
+",
+ r"
+fn main() {
+ if true {
+ loop {
+ if false {
+ break Some(0);
+ }
+ break Some(15);
+ }
+ } else {
+ None
+ }
+}
+",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs
new file mode 100644
index 000000000..f171dd81a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs
@@ -0,0 +1,395 @@
+use itertools::Itertools;
+use syntax::{
+ ast::{self, edit::IndentLevel, Comment, CommentKind, CommentShape, Whitespace},
+ AstToken, Direction, SyntaxElement, TextRange,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: line_to_block
+//
+// Converts comments between block and single-line form.
+//
+// ```
+// // Multi-line$0
+// // comment
+// ```
+// ->
+// ```
+// /*
+// Multi-line
+// comment
+// */
+// ```
+pub(crate) fn convert_comment_block(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let comment = ctx.find_token_at_offset::<ast::Comment>()?;
+ // Only allow comments which are alone on their line
+ if let Some(prev) = comment.syntax().prev_token() {
+ if Whitespace::cast(prev).filter(|w| w.text().contains('\n')).is_none() {
+ return None;
+ }
+ }
+
+ match comment.kind().shape {
+ ast::CommentShape::Block => block_to_line(acc, comment),
+ ast::CommentShape::Line => line_to_block(acc, comment),
+ }
+}
+
+fn block_to_line(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
+ let target = comment.syntax().text_range();
+
+ acc.add(
+ AssistId("block_to_line", AssistKind::RefactorRewrite),
+ "Replace block comment with line comments",
+ target,
+ |edit| {
+ let indentation = IndentLevel::from_token(comment.syntax());
+ let line_prefix = CommentKind { shape: CommentShape::Line, ..comment.kind() }.prefix();
+
+ let text = comment.text();
+ let text = &text[comment.prefix().len()..(text.len() - "*/".len())].trim();
+
+ let lines = text.lines().peekable();
+
+ let indent_spaces = indentation.to_string();
+ let output = lines
+ .map(|l| l.trim_start_matches(&indent_spaces))
+ .map(|l| {
+ // Don't introduce trailing whitespace
+ if l.is_empty() {
+ line_prefix.to_string()
+ } else {
+ format!("{} {}", line_prefix, l.trim_start_matches(&indent_spaces))
+ }
+ })
+ .join(&format!("\n{}", indent_spaces));
+
+ edit.replace(target, output)
+ },
+ )
+}
+
+fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
+ // Find all the comments we'll be collapsing into a block
+ let comments = relevant_line_comments(&comment);
+
+ // Establish the target of our edit based on the comments we found
+ let target = TextRange::new(
+ comments[0].syntax().text_range().start(),
+ comments.last().unwrap().syntax().text_range().end(),
+ );
+
+ acc.add(
+ AssistId("line_to_block", AssistKind::RefactorRewrite),
+ "Replace line comments with a single block comment",
+ target,
+ |edit| {
+ // We pick a single indentation level for the whole block comment based on the
+ // comment where the assist was invoked. This will be prepended to the
+ // contents of each line comment when they're put into the block comment.
+ let indentation = IndentLevel::from_token(comment.syntax());
+
+ let block_comment_body =
+ comments.into_iter().map(|c| line_comment_text(indentation, c)).join("\n");
+
+ let block_prefix =
+ CommentKind { shape: CommentShape::Block, ..comment.kind() }.prefix();
+
+ let output = format!("{}\n{}\n{}*/", block_prefix, block_comment_body, indentation);
+
+ edit.replace(target, output)
+ },
+ )
+}
+
+/// The line -> block assist can be invoked from anywhere within a sequence of line comments.
+/// relevant_line_comments crawls backwards and forwards finding the complete sequence of comments that will
+/// be joined.
+fn relevant_line_comments(comment: &ast::Comment) -> Vec<Comment> {
+ // The prefix identifies the kind of comment we're dealing with
+ let prefix = comment.prefix();
+ let same_prefix = |c: &ast::Comment| c.prefix() == prefix;
+
+ // These tokens are allowed to exist between comments
+ let skippable = |not: &SyntaxElement| {
+ not.clone()
+ .into_token()
+ .and_then(Whitespace::cast)
+ .map(|w| !w.spans_multiple_lines())
+ .unwrap_or(false)
+ };
+
+ // Find all preceding comments (in reverse order) that have the same prefix
+ let prev_comments = comment
+ .syntax()
+ .siblings_with_tokens(Direction::Prev)
+ .filter(|s| !skippable(s))
+ .map(|not| not.into_token().and_then(Comment::cast).filter(same_prefix))
+ .take_while(|opt_com| opt_com.is_some())
+ .flatten()
+ .skip(1); // skip the first element so we don't duplicate it in next_comments
+
+ let next_comments = comment
+ .syntax()
+ .siblings_with_tokens(Direction::Next)
+ .filter(|s| !skippable(s))
+ .map(|not| not.into_token().and_then(Comment::cast).filter(same_prefix))
+ .take_while(|opt_com| opt_com.is_some())
+ .flatten();
+
+ let mut comments: Vec<_> = prev_comments.collect();
+ comments.reverse();
+ comments.extend(next_comments);
+ comments
+}
+
+// Line comments usually begin with a single space character following the prefix as seen here:
+//^
+// But comments can also include indented text:
+// > Hello there
+//
+// We handle this by stripping *AT MOST* one space character from the start of the line
+// This has its own problems because it can cause alignment issues:
+//
+// /*
+// a ----> a
+//b ----> b
+// */
+//
+// But since such comments aren't idiomatic we're okay with this.
+fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> String {
+ let contents_without_prefix = comm.text().strip_prefix(comm.prefix()).unwrap();
+ let contents = contents_without_prefix.strip_prefix(' ').unwrap_or(contents_without_prefix);
+
+ // Don't add the indentation if the line is empty
+ if contents.is_empty() {
+ contents.to_owned()
+ } else {
+ indentation.to_string() + contents
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn single_line_to_block() {
+ check_assist(
+ convert_comment_block,
+ r#"
+// line$0 comment
+fn main() {
+ foo();
+}
+"#,
+ r#"
+/*
+line comment
+*/
+fn main() {
+ foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn single_line_to_block_indented() {
+ check_assist(
+ convert_comment_block,
+ r#"
+fn main() {
+ // line$0 comment
+ foo();
+}
+"#,
+ r#"
+fn main() {
+ /*
+ line comment
+ */
+ foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn multiline_to_block() {
+ check_assist(
+ convert_comment_block,
+ r#"
+fn main() {
+ // above
+ // line$0 comment
+ //
+ // below
+ foo();
+}
+"#,
+ r#"
+fn main() {
+ /*
+ above
+ line comment
+
+ below
+ */
+ foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn end_of_line_to_block() {
+ check_assist_not_applicable(
+ convert_comment_block,
+ r#"
+fn main() {
+ foo(); // end-of-line$0 comment
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn single_line_different_kinds() {
+ check_assist(
+ convert_comment_block,
+ r#"
+fn main() {
+ /// different prefix
+ // line$0 comment
+ // below
+ foo();
+}
+"#,
+ r#"
+fn main() {
+ /// different prefix
+ /*
+ line comment
+ below
+ */
+ foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn single_line_separate_chunks() {
+ check_assist(
+ convert_comment_block,
+ r#"
+fn main() {
+ // different chunk
+
+ // line$0 comment
+ // below
+ foo();
+}
+"#,
+ r#"
+fn main() {
+ // different chunk
+
+ /*
+ line comment
+ below
+ */
+ foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn doc_block_comment_to_lines() {
+ check_assist(
+ convert_comment_block,
+ r#"
+/**
+ hi$0 there
+*/
+"#,
+ r#"
+/// hi there
+"#,
+ );
+ }
+
+ #[test]
+ fn block_comment_to_lines() {
+ check_assist(
+ convert_comment_block,
+ r#"
+/*
+ hi$0 there
+*/
+"#,
+ r#"
+// hi there
+"#,
+ );
+ }
+
+ #[test]
+ fn inner_doc_block_to_lines() {
+ check_assist(
+ convert_comment_block,
+ r#"
+/*!
+ hi$0 there
+*/
+"#,
+ r#"
+//! hi there
+"#,
+ );
+ }
+
+ #[test]
+ fn block_to_lines_indent() {
+ check_assist(
+ convert_comment_block,
+ r#"
+fn main() {
+ /*!
+ hi$0 there
+
+ ```
+ code_sample
+ ```
+ */
+}
+"#,
+ r#"
+fn main() {
+ //! hi there
+ //!
+ //! ```
+ //! code_sample
+ //! ```
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn end_of_line_block_to_line() {
+ check_assist_not_applicable(
+ convert_comment_block,
+ r#"
+fn main() {
+ foo(); /* end-of-line$0 comment */
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_integer_literal.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_integer_literal.rs
new file mode 100644
index 000000000..9060696cd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_integer_literal.rs
@@ -0,0 +1,268 @@
+use syntax::{ast, ast::Radix, AstToken};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
+
+// Assist: convert_integer_literal
+//
+// Converts the base of integer literals to other bases.
+//
+// ```
+// const _: i32 = 10$0;
+// ```
+// ->
+// ```
+// const _: i32 = 0b1010;
+// ```
+pub(crate) fn convert_integer_literal(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let literal = ctx.find_node_at_offset::<ast::Literal>()?;
+ let literal = match literal.kind() {
+ ast::LiteralKind::IntNumber(it) => it,
+ _ => return None,
+ };
+ let radix = literal.radix();
+ let value = literal.value()?;
+ let suffix = literal.suffix();
+
+ let range = literal.syntax().text_range();
+ let group_id = GroupLabel("Convert integer base".into());
+
+ for &target_radix in Radix::ALL {
+ if target_radix == radix {
+ continue;
+ }
+
+ let mut converted = match target_radix {
+ Radix::Binary => format!("0b{:b}", value),
+ Radix::Octal => format!("0o{:o}", value),
+ Radix::Decimal => value.to_string(),
+ Radix::Hexadecimal => format!("0x{:X}", value),
+ };
+
+ let label = format!("Convert {} to {}{}", literal, converted, suffix.unwrap_or_default());
+
+ // Appends the type suffix back into the new literal if it exists.
+ if let Some(suffix) = suffix {
+ converted.push_str(suffix);
+ }
+
+ acc.add_group(
+ &group_id,
+ AssistId("convert_integer_literal", AssistKind::RefactorInline),
+ label,
+ range,
+ |builder| builder.replace(range, converted),
+ );
+ }
+
+ Some(())
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist_by_label, check_assist_not_applicable, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn binary_target() {
+ check_assist_target(convert_integer_literal, "const _: i32 = 0b1010$0;", "0b1010");
+ }
+
+ #[test]
+ fn octal_target() {
+ check_assist_target(convert_integer_literal, "const _: i32 = 0o12$0;", "0o12");
+ }
+
+ #[test]
+ fn decimal_target() {
+ check_assist_target(convert_integer_literal, "const _: i32 = 10$0;", "10");
+ }
+
+ #[test]
+ fn hexadecimal_target() {
+ check_assist_target(convert_integer_literal, "const _: i32 = 0xA$0;", "0xA");
+ }
+
+ #[test]
+ fn binary_target_with_underscores() {
+ check_assist_target(convert_integer_literal, "const _: i32 = 0b10_10$0;", "0b10_10");
+ }
+
+ #[test]
+ fn octal_target_with_underscores() {
+ check_assist_target(convert_integer_literal, "const _: i32 = 0o1_2$0;", "0o1_2");
+ }
+
+ #[test]
+ fn decimal_target_with_underscores() {
+ check_assist_target(convert_integer_literal, "const _: i32 = 1_0$0;", "1_0");
+ }
+
+ #[test]
+ fn hexadecimal_target_with_underscores() {
+ check_assist_target(convert_integer_literal, "const _: i32 = 0x_A$0;", "0x_A");
+ }
+
+ #[test]
+ fn convert_decimal_integer() {
+ let before = "const _: i32 = 1000$0;";
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0b1111101000;",
+ "Convert 1000 to 0b1111101000",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0o1750;",
+ "Convert 1000 to 0o1750",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0x3E8;",
+ "Convert 1000 to 0x3E8",
+ );
+ }
+
+ #[test]
+ fn convert_hexadecimal_integer() {
+ let before = "const _: i32 = 0xFF$0;";
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0b11111111;",
+ "Convert 0xFF to 0b11111111",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0o377;",
+ "Convert 0xFF to 0o377",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 255;",
+ "Convert 0xFF to 255",
+ );
+ }
+
+ #[test]
+ fn convert_binary_integer() {
+ let before = "const _: i32 = 0b11111111$0;";
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0o377;",
+ "Convert 0b11111111 to 0o377",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 255;",
+ "Convert 0b11111111 to 255",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0xFF;",
+ "Convert 0b11111111 to 0xFF",
+ );
+ }
+
+ #[test]
+ fn convert_octal_integer() {
+ let before = "const _: i32 = 0o377$0;";
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0b11111111;",
+ "Convert 0o377 to 0b11111111",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 255;",
+ "Convert 0o377 to 255",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0xFF;",
+ "Convert 0o377 to 0xFF",
+ );
+ }
+
+ #[test]
+ fn convert_integer_with_underscores() {
+ let before = "const _: i32 = 1_00_0$0;";
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0b1111101000;",
+ "Convert 1_00_0 to 0b1111101000",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0o1750;",
+ "Convert 1_00_0 to 0o1750",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0x3E8;",
+ "Convert 1_00_0 to 0x3E8",
+ );
+ }
+
+ #[test]
+ fn convert_integer_with_suffix() {
+ let before = "const _: i32 = 1000i32$0;";
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0b1111101000i32;",
+ "Convert 1000i32 to 0b1111101000i32",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0o1750i32;",
+ "Convert 1000i32 to 0o1750i32",
+ );
+
+ check_assist_by_label(
+ convert_integer_literal,
+ before,
+ "const _: i32 = 0x3E8i32;",
+ "Convert 1000i32 to 0x3E8i32",
+ );
+ }
+
+ #[test]
+ fn convert_overflowing_literal() {
+ let before = "const _: i32 =
+ 111111111111111111111111111111111111111111111111111111111111111111111111$0;";
+ check_assist_not_applicable(convert_integer_literal, before);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs
new file mode 100644
index 000000000..30f6dd41a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs
@@ -0,0 +1,351 @@
+use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast, traits::resolve_target_trait};
+use syntax::ast::{self, AstNode, HasName};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// FIXME: this should be a diagnostic
+
+// Assist: convert_into_to_from
+//
+// Converts an Into impl to an equivalent From impl.
+//
+// ```
+// # //- minicore: from
+// impl $0Into<Thing> for usize {
+// fn into(self) -> Thing {
+// Thing {
+// b: self.to_string(),
+// a: self
+// }
+// }
+// }
+// ```
+// ->
+// ```
+// impl From<usize> for Thing {
+// fn from(val: usize) -> Self {
+// Thing {
+// b: val.to_string(),
+// a: val
+// }
+// }
+// }
+// ```
+pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let impl_ = ctx.find_node_at_offset::<ast::Impl>()?;
+ let src_type = impl_.self_ty()?;
+ let ast_trait = impl_.trait_()?;
+
+ let module = ctx.sema.scope(impl_.syntax())?.module();
+
+ let trait_ = resolve_target_trait(&ctx.sema, &impl_)?;
+ if trait_ != FamousDefs(&ctx.sema, module.krate()).core_convert_Into()? {
+ return None;
+ }
+
+ let src_type_path = {
+ let src_type_path = src_type.syntax().descendants().find_map(ast::Path::cast)?;
+ let src_type_def = match ctx.sema.resolve_path(&src_type_path) {
+ Some(hir::PathResolution::Def(module_def)) => module_def,
+ _ => return None,
+ };
+
+ mod_path_to_ast(&module.find_use_path(ctx.db(), src_type_def)?)
+ };
+
+ let dest_type = match &ast_trait {
+ ast::Type::PathType(path) => {
+ path.path()?.segment()?.generic_arg_list()?.generic_args().next()?
+ }
+ _ => return None,
+ };
+
+ let into_fn = impl_.assoc_item_list()?.assoc_items().find_map(|item| {
+ if let ast::AssocItem::Fn(f) = item {
+ if f.name()?.text() == "into" {
+ return Some(f);
+ }
+ };
+ None
+ })?;
+
+ let into_fn_name = into_fn.name()?;
+ let into_fn_params = into_fn.param_list()?;
+ let into_fn_return = into_fn.ret_type()?;
+
+ let selfs = into_fn
+ .body()?
+ .syntax()
+ .descendants()
+ .filter_map(ast::NameRef::cast)
+ .filter(|name| name.text() == "self" || name.text() == "Self");
+
+ acc.add(
+ AssistId("convert_into_to_from", AssistKind::RefactorRewrite),
+ "Convert Into to From",
+ impl_.syntax().text_range(),
+ |builder| {
+ builder.replace(src_type.syntax().text_range(), dest_type.to_string());
+ builder.replace(ast_trait.syntax().text_range(), format!("From<{}>", src_type));
+ builder.replace(into_fn_return.syntax().text_range(), "-> Self");
+ builder.replace(into_fn_params.syntax().text_range(), format!("(val: {})", src_type));
+ builder.replace(into_fn_name.syntax().text_range(), "from");
+
+ for s in selfs {
+ match s.text().as_ref() {
+ "self" => builder.replace(s.syntax().text_range(), "val"),
+ "Self" => builder.replace(s.syntax().text_range(), src_type_path.to_string()),
+ _ => {}
+ }
+ }
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn convert_into_to_from_converts_a_struct() {
+ check_assist(
+ convert_into_to_from,
+ r#"
+//- minicore: from
+struct Thing {
+ a: String,
+ b: usize
+}
+
+impl $0core::convert::Into<Thing> for usize {
+ fn into(self) -> Thing {
+ Thing {
+ b: self.to_string(),
+ a: self
+ }
+ }
+}
+"#,
+ r#"
+struct Thing {
+ a: String,
+ b: usize
+}
+
+impl From<usize> for Thing {
+ fn from(val: usize) -> Self {
+ Thing {
+ b: val.to_string(),
+ a: val
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn convert_into_to_from_converts_enums() {
+ check_assist(
+ convert_into_to_from,
+ r#"
+//- minicore: from
+enum Thing {
+ Foo(String),
+ Bar(String)
+}
+
+impl $0core::convert::Into<String> for Thing {
+ fn into(self) -> String {
+ match self {
+ Self::Foo(s) => s,
+ Self::Bar(s) => s
+ }
+ }
+}
+"#,
+ r#"
+enum Thing {
+ Foo(String),
+ Bar(String)
+}
+
+impl From<Thing> for String {
+ fn from(val: Thing) -> Self {
+ match val {
+ Thing::Foo(s) => s,
+ Thing::Bar(s) => s
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn convert_into_to_from_on_enum_with_lifetimes() {
+ check_assist(
+ convert_into_to_from,
+ r#"
+//- minicore: from
+enum Thing<'a> {
+ Foo(&'a str),
+ Bar(&'a str)
+}
+
+impl<'a> $0core::convert::Into<&'a str> for Thing<'a> {
+ fn into(self) -> &'a str {
+ match self {
+ Self::Foo(s) => s,
+ Self::Bar(s) => s
+ }
+ }
+}
+"#,
+ r#"
+enum Thing<'a> {
+ Foo(&'a str),
+ Bar(&'a str)
+}
+
+impl<'a> From<Thing<'a>> for &'a str {
+ fn from(val: Thing<'a>) -> Self {
+ match val {
+ Thing::Foo(s) => s,
+ Thing::Bar(s) => s
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn convert_into_to_from_works_on_references() {
+ check_assist(
+ convert_into_to_from,
+ r#"
+//- minicore: from
+struct Thing(String);
+
+impl $0core::convert::Into<String> for &Thing {
+ fn into(self) -> Thing {
+ self.0.clone()
+ }
+}
+"#,
+ r#"
+struct Thing(String);
+
+impl From<&Thing> for String {
+ fn from(val: &Thing) -> Self {
+ val.0.clone()
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn convert_into_to_from_works_on_qualified_structs() {
+ check_assist(
+ convert_into_to_from,
+ r#"
+//- minicore: from
+mod things {
+ pub struct Thing(String);
+ pub struct BetterThing(String);
+}
+
+impl $0core::convert::Into<things::BetterThing> for &things::Thing {
+ fn into(self) -> Thing {
+ things::BetterThing(self.0.clone())
+ }
+}
+"#,
+ r#"
+mod things {
+ pub struct Thing(String);
+ pub struct BetterThing(String);
+}
+
+impl From<&things::Thing> for things::BetterThing {
+ fn from(val: &things::Thing) -> Self {
+ things::BetterThing(val.0.clone())
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn convert_into_to_from_works_on_qualified_enums() {
+ check_assist(
+ convert_into_to_from,
+ r#"
+//- minicore: from
+mod things {
+ pub enum Thing {
+ A(String)
+ }
+ pub struct BetterThing {
+ B(String)
+ }
+}
+
+impl $0core::convert::Into<things::BetterThing> for &things::Thing {
+ fn into(self) -> Thing {
+ match self {
+ Self::A(s) => things::BetterThing::B(s)
+ }
+ }
+}
+"#,
+ r#"
+mod things {
+ pub enum Thing {
+ A(String)
+ }
+ pub struct BetterThing {
+ B(String)
+ }
+}
+
+impl From<&things::Thing> for things::BetterThing {
+ fn from(val: &things::Thing) -> Self {
+ match val {
+ things::Thing::A(s) => things::BetterThing::B(s)
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn convert_into_to_from_not_applicable_on_any_trait_named_into() {
+ check_assist_not_applicable(
+ convert_into_to_from,
+ r#"
+//- minicore: from
+pub trait Into<T> {
+ pub fn into(self) -> T;
+}
+
+struct Thing {
+ a: String,
+}
+
+impl $0Into<Thing> for String {
+ fn into(self) -> Thing {
+ Thing {
+ a: self
+ }
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs
new file mode 100644
index 000000000..2cf370c09
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs
@@ -0,0 +1,556 @@
+use hir::known;
+use ide_db::famous_defs::FamousDefs;
+use stdx::format_to;
+use syntax::{
+ ast::{self, edit_in_place::Indent, make, HasArgList, HasLoopBody},
+ AstNode,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: convert_iter_for_each_to_for
+//
+// Converts an Iterator::for_each function into a for loop.
+//
+// ```
+// # //- minicore: iterators
+// # use core::iter;
+// fn main() {
+// let iter = iter::repeat((9, 2));
+// iter.for_each$0(|(x, y)| {
+// println!("x: {}, y: {}", x, y);
+// });
+// }
+// ```
+// ->
+// ```
+// # use core::iter;
+// fn main() {
+// let iter = iter::repeat((9, 2));
+// for (x, y) in iter {
+// println!("x: {}, y: {}", x, y);
+// }
+// }
+// ```
+pub(crate) fn convert_iter_for_each_to_for(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let method = ctx.find_node_at_offset::<ast::MethodCallExpr>()?;
+
+ let closure = match method.arg_list()?.args().next()? {
+ ast::Expr::ClosureExpr(expr) => expr,
+ _ => return None,
+ };
+
+ let (method, receiver) = validate_method_call_expr(ctx, method)?;
+
+ let param_list = closure.param_list()?;
+ let param = param_list.params().next()?.pat()?;
+ let body = closure.body()?;
+
+ let stmt = method.syntax().parent().and_then(ast::ExprStmt::cast);
+ let range = stmt.as_ref().map_or(method.syntax(), AstNode::syntax).text_range();
+
+ acc.add(
+ AssistId("convert_iter_for_each_to_for", AssistKind::RefactorRewrite),
+ "Replace this `Iterator::for_each` with a for loop",
+ range,
+ |builder| {
+ let indent =
+ stmt.as_ref().map_or_else(|| method.indent_level(), ast::ExprStmt::indent_level);
+
+ let block = match body {
+ ast::Expr::BlockExpr(block) => block,
+ _ => make::block_expr(Vec::new(), Some(body)),
+ }
+ .clone_for_update();
+ block.reindent_to(indent);
+
+ let expr_for_loop = make::expr_for_loop(param, receiver, block);
+ builder.replace(range, expr_for_loop.to_string())
+ },
+ )
+}
+
+// Assist: convert_for_loop_with_for_each
+//
+// Converts a for loop into a for_each loop on the Iterator.
+//
+// ```
+// fn main() {
+// let x = vec![1, 2, 3];
+// for$0 v in x {
+// let y = v * 2;
+// }
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let x = vec![1, 2, 3];
+// x.into_iter().for_each(|v| {
+// let y = v * 2;
+// });
+// }
+// ```
+pub(crate) fn convert_for_loop_with_for_each(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let for_loop = ctx.find_node_at_offset::<ast::ForExpr>()?;
+ let iterable = for_loop.iterable()?;
+ let pat = for_loop.pat()?;
+ let body = for_loop.loop_body()?;
+ if body.syntax().text_range().start() < ctx.offset() {
+ cov_mark::hit!(not_available_in_body);
+ return None;
+ }
+
+ acc.add(
+ AssistId("convert_for_loop_with_for_each", AssistKind::RefactorRewrite),
+ "Replace this for loop with `Iterator::for_each`",
+ for_loop.syntax().text_range(),
+ |builder| {
+ let mut buf = String::new();
+
+ if let Some((expr_behind_ref, method)) =
+ is_ref_and_impls_iter_method(&ctx.sema, &iterable)
+ {
+ // We have either "for x in &col" and col implements a method called iter
+ // or "for x in &mut col" and col implements a method called iter_mut
+ format_to!(buf, "{}.{}()", expr_behind_ref, method);
+ } else if let ast::Expr::RangeExpr(..) = iterable {
+ // range expressions need to be parenthesized for the syntax to be correct
+ format_to!(buf, "({})", iterable);
+ } else if impls_core_iter(&ctx.sema, &iterable) {
+ format_to!(buf, "{}", iterable);
+ } else if let ast::Expr::RefExpr(_) = iterable {
+ format_to!(buf, "({}).into_iter()", iterable);
+ } else {
+ format_to!(buf, "{}.into_iter()", iterable);
+ }
+
+ format_to!(buf, ".for_each(|{}| {});", pat, body);
+
+ builder.replace(for_loop.syntax().text_range(), buf)
+ },
+ )
+}
+
+/// If iterable is a reference where the expression behind the reference implements a method
+/// returning an Iterator called iter or iter_mut (depending on the type of reference) then return
+/// the expression behind the reference and the method name
+fn is_ref_and_impls_iter_method(
+ sema: &hir::Semantics<'_, ide_db::RootDatabase>,
+ iterable: &ast::Expr,
+) -> Option<(ast::Expr, hir::Name)> {
+ let ref_expr = match iterable {
+ ast::Expr::RefExpr(r) => r,
+ _ => return None,
+ };
+ let wanted_method = if ref_expr.mut_token().is_some() { known::iter_mut } else { known::iter };
+ let expr_behind_ref = ref_expr.expr()?;
+ let ty = sema.type_of_expr(&expr_behind_ref)?.adjusted();
+ let scope = sema.scope(iterable.syntax())?;
+ let krate = scope.krate();
+ let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
+
+ let has_wanted_method = ty
+ .iterate_method_candidates(
+ sema.db,
+ &scope,
+ &scope.visible_traits().0,
+ None,
+ Some(&wanted_method),
+ |func| {
+ if func.ret_type(sema.db).impls_trait(sema.db, iter_trait, &[]) {
+ return Some(());
+ }
+ None
+ },
+ )
+ .is_some();
+ if !has_wanted_method {
+ return None;
+ }
+
+ Some((expr_behind_ref, wanted_method))
+}
+
+/// Whether iterable implements core::Iterator
+fn impls_core_iter(sema: &hir::Semantics<'_, ide_db::RootDatabase>, iterable: &ast::Expr) -> bool {
+ (|| {
+ let it_typ = sema.type_of_expr(iterable)?.adjusted();
+
+ let module = sema.scope(iterable.syntax())?.module();
+
+ let krate = module.krate();
+ let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
+ cov_mark::hit!(test_already_impls_iterator);
+ Some(it_typ.impls_trait(sema.db, iter_trait, &[]))
+ })()
+ .unwrap_or(false)
+}
+
+fn validate_method_call_expr(
+ ctx: &AssistContext<'_>,
+ expr: ast::MethodCallExpr,
+) -> Option<(ast::Expr, ast::Expr)> {
+ let name_ref = expr.name_ref()?;
+ if !name_ref.syntax().text_range().contains_range(ctx.selection_trimmed()) {
+ cov_mark::hit!(test_for_each_not_applicable_invalid_cursor_pos);
+ return None;
+ }
+ if name_ref.text() != "for_each" {
+ return None;
+ }
+
+ let sema = &ctx.sema;
+
+ let receiver = expr.receiver()?;
+ let expr = ast::Expr::MethodCallExpr(expr);
+
+ let it_type = sema.type_of_expr(&receiver)?.adjusted();
+ let module = sema.scope(receiver.syntax())?.module();
+ let krate = module.krate();
+
+ let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
+ it_type.impls_trait(sema.db, iter_trait, &[]).then(|| (expr, receiver))
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_for_each_in_method_stmt() {
+ check_assist(
+ convert_iter_for_each_to_for,
+ r#"
+//- minicore: iterators
+fn main() {
+ let it = core::iter::repeat(92);
+ it.$0for_each(|(x, y)| {
+ println!("x: {}, y: {}", x, y);
+ });
+}
+"#,
+ r#"
+fn main() {
+ let it = core::iter::repeat(92);
+ for (x, y) in it {
+ println!("x: {}, y: {}", x, y);
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_for_each_in_method() {
+ check_assist(
+ convert_iter_for_each_to_for,
+ r#"
+//- minicore: iterators
+fn main() {
+ let it = core::iter::repeat(92);
+ it.$0for_each(|(x, y)| {
+ println!("x: {}, y: {}", x, y);
+ })
+}
+"#,
+ r#"
+fn main() {
+ let it = core::iter::repeat(92);
+ for (x, y) in it {
+ println!("x: {}, y: {}", x, y);
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_for_each_without_braces_stmt() {
+ check_assist(
+ convert_iter_for_each_to_for,
+ r#"
+//- minicore: iterators
+fn main() {
+ let it = core::iter::repeat(92);
+ it.$0for_each(|(x, y)| println!("x: {}, y: {}", x, y));
+}
+"#,
+ r#"
+fn main() {
+ let it = core::iter::repeat(92);
+ for (x, y) in it {
+ println!("x: {}, y: {}", x, y)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_for_each_not_applicable() {
+ check_assist_not_applicable(
+ convert_iter_for_each_to_for,
+ r#"
+//- minicore: iterators
+fn main() {
+ ().$0for_each(|x| println!("{}", x));
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_for_each_not_applicable_invalid_cursor_pos() {
+ cov_mark::check!(test_for_each_not_applicable_invalid_cursor_pos);
+ check_assist_not_applicable(
+ convert_iter_for_each_to_for,
+ r#"
+//- minicore: iterators
+fn main() {
+ core::iter::repeat(92).for_each(|(x, y)| $0println!("x: {}, y: {}", x, y));
+}"#,
+ )
+ }
+
+ #[test]
+ fn each_to_for_not_for() {
+ check_assist_not_applicable(
+ convert_for_loop_with_for_each,
+ r"
+let mut x = vec![1, 2, 3];
+x.iter_mut().$0for_each(|v| *v *= 2);
+ ",
+ )
+ }
+
+ #[test]
+ fn each_to_for_simple_for() {
+ check_assist(
+ convert_for_loop_with_for_each,
+ r"
+fn main() {
+ let x = vec![1, 2, 3];
+ for $0v in x {
+ v *= 2;
+ }
+}",
+ r"
+fn main() {
+ let x = vec![1, 2, 3];
+ x.into_iter().for_each(|v| {
+ v *= 2;
+ });
+}",
+ )
+ }
+
+ #[test]
+ fn each_to_for_for_in_range() {
+ check_assist(
+ convert_for_loop_with_for_each,
+ r#"
+//- minicore: range, iterators
+impl<T> core::iter::Iterator for core::ops::Range<T> {
+ type Item = T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+}
+
+fn main() {
+ for $0x in 0..92 {
+ print!("{}", x);
+ }
+}"#,
+ r#"
+impl<T> core::iter::Iterator for core::ops::Range<T> {
+ type Item = T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+}
+
+fn main() {
+ (0..92).for_each(|x| {
+ print!("{}", x);
+ });
+}"#,
+ )
+ }
+
+ #[test]
+ fn each_to_for_not_available_in_body() {
+ cov_mark::check!(not_available_in_body);
+ check_assist_not_applicable(
+ convert_for_loop_with_for_each,
+ r"
+fn main() {
+ let x = vec![1, 2, 3];
+ for v in x {
+ $0v *= 2;
+ }
+}",
+ )
+ }
+
+ #[test]
+ fn each_to_for_for_borrowed() {
+ check_assist(
+ convert_for_loop_with_for_each,
+ r#"
+//- minicore: iterators
+use core::iter::{Repeat, repeat};
+
+struct S;
+impl S {
+ fn iter(&self) -> Repeat<i32> { repeat(92) }
+ fn iter_mut(&mut self) -> Repeat<i32> { repeat(92) }
+}
+
+fn main() {
+ let x = S;
+ for $0v in &x {
+ let a = v * 2;
+ }
+}
+"#,
+ r#"
+use core::iter::{Repeat, repeat};
+
+struct S;
+impl S {
+ fn iter(&self) -> Repeat<i32> { repeat(92) }
+ fn iter_mut(&mut self) -> Repeat<i32> { repeat(92) }
+}
+
+fn main() {
+ let x = S;
+ x.iter().for_each(|v| {
+ let a = v * 2;
+ });
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn each_to_for_for_borrowed_no_iter_method() {
+ check_assist(
+ convert_for_loop_with_for_each,
+ r"
+struct NoIterMethod;
+fn main() {
+ let x = NoIterMethod;
+ for $0v in &x {
+ let a = v * 2;
+ }
+}
+",
+ r"
+struct NoIterMethod;
+fn main() {
+ let x = NoIterMethod;
+ (&x).into_iter().for_each(|v| {
+ let a = v * 2;
+ });
+}
+",
+ )
+ }
+
+ #[test]
+ fn each_to_for_for_borrowed_mut() {
+ check_assist(
+ convert_for_loop_with_for_each,
+ r#"
+//- minicore: iterators
+use core::iter::{Repeat, repeat};
+
+struct S;
+impl S {
+ fn iter(&self) -> Repeat<i32> { repeat(92) }
+ fn iter_mut(&mut self) -> Repeat<i32> { repeat(92) }
+}
+
+fn main() {
+ let x = S;
+ for $0v in &mut x {
+ let a = v * 2;
+ }
+}
+"#,
+ r#"
+use core::iter::{Repeat, repeat};
+
+struct S;
+impl S {
+ fn iter(&self) -> Repeat<i32> { repeat(92) }
+ fn iter_mut(&mut self) -> Repeat<i32> { repeat(92) }
+}
+
+fn main() {
+ let x = S;
+ x.iter_mut().for_each(|v| {
+ let a = v * 2;
+ });
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn each_to_for_for_borrowed_mut_behind_var() {
+ check_assist(
+ convert_for_loop_with_for_each,
+ r"
+fn main() {
+ let x = vec![1, 2, 3];
+ let y = &mut x;
+ for $0v in y {
+ *v *= 2;
+ }
+}",
+ r"
+fn main() {
+ let x = vec![1, 2, 3];
+ let y = &mut x;
+ y.into_iter().for_each(|v| {
+ *v *= 2;
+ });
+}",
+ )
+ }
+
+ #[test]
+ fn each_to_for_already_impls_iterator() {
+ cov_mark::check!(test_already_impls_iterator);
+ check_assist(
+ convert_for_loop_with_for_each,
+ r#"
+//- minicore: iterators
+fn main() {
+ for$0 a in core::iter::repeat(92).take(1) {
+ println!("{}", a);
+ }
+}
+"#,
+ r#"
+fn main() {
+ core::iter::repeat(92).take(1).for_each(|a| {
+ println!("{}", a);
+ });
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_let_else_to_match.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_let_else_to_match.rs
new file mode 100644
index 000000000..00095de25
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_let_else_to_match.rs
@@ -0,0 +1,497 @@
+use hir::Semantics;
+use ide_db::RootDatabase;
+use syntax::ast::{edit::AstNodeEdit, AstNode, HasName, LetStmt, Name, Pat};
+use syntax::T;
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+/// Gets a list of binders in a pattern, and whether they are mut.
+fn binders_in_pat(
+ acc: &mut Vec<(Name, bool)>,
+ pat: &Pat,
+ sem: &Semantics<'_, RootDatabase>,
+) -> Option<()> {
+ use Pat::*;
+ match pat {
+ IdentPat(p) => {
+ let ident = p.name()?;
+ let ismut = p.ref_token().is_none() && p.mut_token().is_some();
+ // check for const reference
+ if sem.resolve_bind_pat_to_const(p).is_none() {
+ acc.push((ident, ismut));
+ }
+ if let Some(inner) = p.pat() {
+ binders_in_pat(acc, &inner, sem)?;
+ }
+ Some(())
+ }
+ BoxPat(p) => p.pat().and_then(|p| binders_in_pat(acc, &p, sem)),
+ RestPat(_) | LiteralPat(_) | PathPat(_) | WildcardPat(_) | ConstBlockPat(_) => Some(()),
+ OrPat(p) => {
+ for p in p.pats() {
+ binders_in_pat(acc, &p, sem)?;
+ }
+ Some(())
+ }
+ ParenPat(p) => p.pat().and_then(|p| binders_in_pat(acc, &p, sem)),
+ RangePat(p) => {
+ if let Some(st) = p.start() {
+ binders_in_pat(acc, &st, sem)?
+ }
+ if let Some(ed) = p.end() {
+ binders_in_pat(acc, &ed, sem)?
+ }
+ Some(())
+ }
+ RecordPat(p) => {
+ for f in p.record_pat_field_list()?.fields() {
+ let pat = f.pat()?;
+ binders_in_pat(acc, &pat, sem)?;
+ }
+ Some(())
+ }
+ RefPat(p) => p.pat().and_then(|p| binders_in_pat(acc, &p, sem)),
+ SlicePat(p) => {
+ for p in p.pats() {
+ binders_in_pat(acc, &p, sem)?;
+ }
+ Some(())
+ }
+ TuplePat(p) => {
+ for p in p.fields() {
+ binders_in_pat(acc, &p, sem)?;
+ }
+ Some(())
+ }
+ TupleStructPat(p) => {
+ for p in p.fields() {
+ binders_in_pat(acc, &p, sem)?;
+ }
+ Some(())
+ }
+ // don't support macro pat yet
+ MacroPat(_) => None,
+ }
+}
+
+fn binders_to_str(binders: &[(Name, bool)], addmut: bool) -> String {
+ let vars = binders
+ .iter()
+ .map(
+ |(ident, ismut)| {
+ if *ismut && addmut {
+ format!("mut {}", ident)
+ } else {
+ ident.to_string()
+ }
+ },
+ )
+ .collect::<Vec<_>>()
+ .join(", ");
+ if binders.is_empty() {
+ String::from("{}")
+ } else if binders.len() == 1 {
+ vars
+ } else {
+ format!("({})", vars)
+ }
+}
+
+// Assist: convert_let_else_to_match
+//
+// Converts let-else statement to let statement and match expression.
+//
+// ```
+// fn main() {
+// let Ok(mut x) = f() else$0 { return };
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let mut x = match f() {
+// Ok(x) => x,
+// _ => return,
+// };
+// }
+// ```
+pub(crate) fn convert_let_else_to_match(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ // should focus on else token to trigger
+ let else_token = ctx.find_token_syntax_at_offset(T![else])?;
+ let let_stmt = LetStmt::cast(else_token.parent()?.parent()?)?;
+ let let_else_block = let_stmt.let_else()?.block_expr()?;
+ let let_init = let_stmt.initializer()?;
+ if let_stmt.ty().is_some() {
+ // don't support let with type annotation
+ return None;
+ }
+ let pat = let_stmt.pat()?;
+ let mut binders = Vec::new();
+ binders_in_pat(&mut binders, &pat, &ctx.sema)?;
+
+ let target = let_stmt.syntax().text_range();
+ acc.add(
+ AssistId("convert_let_else_to_match", AssistKind::RefactorRewrite),
+ "Convert let-else to let and match",
+ target,
+ |edit| {
+ let indent_level = let_stmt.indent_level().0 as usize;
+ let indent = " ".repeat(indent_level);
+ let indent1 = " ".repeat(indent_level + 1);
+
+ let binders_str = binders_to_str(&binders, false);
+ let binders_str_mut = binders_to_str(&binders, true);
+
+ let init_expr = let_init.syntax().text();
+ let mut pat_no_mut = pat.syntax().text().to_string();
+ // remove the mut from the pattern
+ for (b, ismut) in binders.iter() {
+ if *ismut {
+ pat_no_mut = pat_no_mut.replace(&format!("mut {b}"), &b.to_string());
+ }
+ }
+
+ let only_expr = let_else_block.statements().next().is_none();
+ let branch2 = match &let_else_block.tail_expr() {
+ Some(tail) if only_expr => format!("{},", tail.syntax().text()),
+ _ => let_else_block.syntax().text().to_string(),
+ };
+ let replace = if binders.is_empty() {
+ format!(
+ "match {init_expr} {{
+{indent1}{pat_no_mut} => {binders_str}
+{indent1}_ => {branch2}
+{indent}}}"
+ )
+ } else {
+ format!(
+ "let {binders_str_mut} = match {init_expr} {{
+{indent1}{pat_no_mut} => {binders_str},
+{indent1}_ => {branch2}
+{indent}}};"
+ )
+ };
+ edit.replace(target, replace);
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ #[test]
+ fn convert_let_else_to_match_no_type_let() {
+ check_assist_not_applicable(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let 1: u32 = v.iter().sum() else$0 { return };
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_on_else() {
+ check_assist_not_applicable(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let Ok(x) = f() else {$0 return };
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_no_macropat() {
+ check_assist_not_applicable(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let m!() = g() else$0 { return };
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_target() {
+ check_assist_target(
+ convert_let_else_to_match,
+ r"
+fn main() {
+ let Ok(x) = f() else$0 { continue };
+}",
+ "let Ok(x) = f() else { continue };",
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_basic() {
+ check_assist(
+ convert_let_else_to_match,
+ r"
+fn main() {
+ let Ok(x) = f() else$0 { continue };
+}",
+ r"
+fn main() {
+ let x = match f() {
+ Ok(x) => x,
+ _ => continue,
+ };
+}",
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_const_ref() {
+ check_assist(
+ convert_let_else_to_match,
+ r"
+enum Option<T> {
+ Some(T),
+ None,
+}
+use Option::*;
+fn main() {
+ let None = f() el$0se { continue };
+}",
+ r"
+enum Option<T> {
+ Some(T),
+ None,
+}
+use Option::*;
+fn main() {
+ match f() {
+ None => {}
+ _ => continue,
+ }
+}",
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_const_ref_const() {
+ check_assist(
+ convert_let_else_to_match,
+ r"
+const NEG1: i32 = -1;
+fn main() {
+ let NEG1 = f() el$0se { continue };
+}",
+ r"
+const NEG1: i32 = -1;
+fn main() {
+ match f() {
+ NEG1 => {}
+ _ => continue,
+ }
+}",
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_mut() {
+ check_assist(
+ convert_let_else_to_match,
+ r"
+fn main() {
+ let Ok(mut x) = f() el$0se { continue };
+}",
+ r"
+fn main() {
+ let mut x = match f() {
+ Ok(x) => x,
+ _ => continue,
+ };
+}",
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_multi_binders() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let ControlFlow::Break((x, "tag", y, ..)) = f() else$0 { g(); return };
+}"#,
+ r#"
+fn main() {
+ let (x, y) = match f() {
+ ControlFlow::Break((x, "tag", y, ..)) => (x, y),
+ _ => { g(); return }
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_slice() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let [one, 1001, other] = f() else$0 { break };
+}"#,
+ r#"
+fn main() {
+ let (one, other) = match f() {
+ [one, 1001, other] => (one, other),
+ _ => break,
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_struct() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let [Struct { inner: Some(it) }, 1001, other] = f() else$0 { break };
+}"#,
+ r#"
+fn main() {
+ let (it, other) = match f() {
+ [Struct { inner: Some(it) }, 1001, other] => (it, other),
+ _ => break,
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_struct_ident_pat() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let [Struct { inner }, 1001, other] = f() else$0 { break };
+}"#,
+ r#"
+fn main() {
+ let (inner, other) = match f() {
+ [Struct { inner }, 1001, other] => (inner, other),
+ _ => break,
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_no_binder() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let (8 | 9) = f() else$0 { panic!() };
+}"#,
+ r#"
+fn main() {
+ match f() {
+ (8 | 9) => {}
+ _ => panic!(),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_range() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let 1.. = f() e$0lse { return };
+}"#,
+ r#"
+fn main() {
+ match f() {
+ 1.. => {}
+ _ => return,
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_refpat() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let Ok(&mut x) = f(&mut 0) else$0 { return };
+}"#,
+ r#"
+fn main() {
+ let x = match f(&mut 0) {
+ Ok(&mut x) => x,
+ _ => return,
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_refmut() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let Ok(ref mut x) = f() else$0 { return };
+}"#,
+ r#"
+fn main() {
+ let x = match f() {
+ Ok(ref mut x) => x,
+ _ => return,
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_atpat() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let out @ Ok(ins) = f() else$0 { return };
+}"#,
+ r#"
+fn main() {
+ let (out, ins) = match f() {
+ out @ Ok(ins) => (out, ins),
+ _ => return,
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_else_to_match_complex_init() {
+ check_assist(
+ convert_let_else_to_match,
+ r#"
+fn main() {
+ let v = vec![1, 2, 3];
+ let &[mut x, y, ..] = &v.iter().collect::<Vec<_>>()[..] else$0 { return };
+}"#,
+ r#"
+fn main() {
+ let v = vec![1, 2, 3];
+ let (mut x, y) = match &v.iter().collect::<Vec<_>>()[..] {
+ &[x, y, ..] => (x, y),
+ _ => return,
+ };
+}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
new file mode 100644
index 000000000..cb75619ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
@@ -0,0 +1,574 @@
+use std::iter::once;
+
+use ide_db::syntax_helpers::node_ext::{is_pattern_cond, single_let};
+use syntax::{
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ make,
+ },
+ ted, AstNode,
+ SyntaxKind::{FN, LOOP_EXPR, WHILE_EXPR, WHITESPACE},
+ T,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils::invert_boolean_expression,
+ AssistId, AssistKind,
+};
+
+// Assist: convert_to_guarded_return
+//
+// Replace a large conditional with a guarded return.
+//
+// ```
+// fn main() {
+// $0if cond {
+// foo();
+// bar();
+// }
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// if !cond {
+// return;
+// }
+// foo();
+// bar();
+// }
+// ```
+pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let if_expr: ast::IfExpr = ctx.find_node_at_offset()?;
+ if if_expr.else_branch().is_some() {
+ return None;
+ }
+
+ let cond = if_expr.condition()?;
+
+ // Check if there is an IfLet that we can handle.
+ let (if_let_pat, cond_expr) = if is_pattern_cond(cond.clone()) {
+ let let_ = single_let(cond)?;
+ match let_.pat() {
+ Some(ast::Pat::TupleStructPat(pat)) if pat.fields().count() == 1 => {
+ let path = pat.path()?;
+ if path.qualifier().is_some() {
+ return None;
+ }
+
+ let bound_ident = pat.fields().next().unwrap();
+ if !ast::IdentPat::can_cast(bound_ident.syntax().kind()) {
+ return None;
+ }
+
+ (Some((path, bound_ident)), let_.expr()?)
+ }
+ _ => return None, // Unsupported IfLet.
+ }
+ } else {
+ (None, cond)
+ };
+
+ let then_block = if_expr.then_branch()?;
+ let then_block = then_block.stmt_list()?;
+
+ let parent_block = if_expr.syntax().parent()?.ancestors().find_map(ast::BlockExpr::cast)?;
+
+ if parent_block.tail_expr()? != if_expr.clone().into() {
+ return None;
+ }
+
+ // FIXME: This relies on untyped syntax tree and casts to much. It should be
+ // rewritten to use strongly-typed APIs.
+
+ // check for early return and continue
+ let first_in_then_block = then_block.syntax().first_child()?;
+ if ast::ReturnExpr::can_cast(first_in_then_block.kind())
+ || ast::ContinueExpr::can_cast(first_in_then_block.kind())
+ || first_in_then_block
+ .children()
+ .any(|x| ast::ReturnExpr::can_cast(x.kind()) || ast::ContinueExpr::can_cast(x.kind()))
+ {
+ return None;
+ }
+
+ let parent_container = parent_block.syntax().parent()?;
+
+ let early_expression: ast::Expr = match parent_container.kind() {
+ WHILE_EXPR | LOOP_EXPR => make::expr_continue(None),
+ FN => make::expr_return(None),
+ _ => return None,
+ };
+
+ if then_block.syntax().first_child_or_token().map(|t| t.kind() == T!['{']).is_none() {
+ return None;
+ }
+
+ then_block.syntax().last_child_or_token().filter(|t| t.kind() == T!['}'])?;
+
+ let target = if_expr.syntax().text_range();
+ acc.add(
+ AssistId("convert_to_guarded_return", AssistKind::RefactorRewrite),
+ "Convert to guarded return",
+ target,
+ |edit| {
+ let if_expr = edit.make_mut(if_expr);
+ let if_indent_level = IndentLevel::from_node(if_expr.syntax());
+ let replacement = match if_let_pat {
+ None => {
+ // If.
+ let new_expr = {
+ let then_branch =
+ make::block_expr(once(make::expr_stmt(early_expression).into()), None);
+ let cond = invert_boolean_expression(cond_expr);
+ make::expr_if(cond, then_branch, None).indent(if_indent_level)
+ };
+ new_expr.syntax().clone_for_update()
+ }
+ Some((path, bound_ident)) => {
+ // If-let.
+ let match_expr = {
+ let happy_arm = {
+ let pat = make::tuple_struct_pat(
+ path,
+ once(make::ext::simple_ident_pat(make::name("it")).into()),
+ );
+ let expr = {
+ let path = make::ext::ident_path("it");
+ make::expr_path(path)
+ };
+ make::match_arm(once(pat.into()), None, expr)
+ };
+
+ let sad_arm = make::match_arm(
+ // FIXME: would be cool to use `None` or `Err(_)` if appropriate
+ once(make::wildcard_pat().into()),
+ None,
+ early_expression,
+ );
+
+ make::expr_match(cond_expr, make::match_arm_list(vec![happy_arm, sad_arm]))
+ };
+
+ let let_stmt = make::let_stmt(bound_ident, None, Some(match_expr));
+ let let_stmt = let_stmt.indent(if_indent_level);
+ let_stmt.syntax().clone_for_update()
+ }
+ };
+
+ let then_block_items = then_block.dedent(IndentLevel(1)).clone_for_update();
+
+ let end_of_then = then_block_items.syntax().last_child_or_token().unwrap();
+ let end_of_then =
+ if end_of_then.prev_sibling_or_token().map(|n| n.kind()) == Some(WHITESPACE) {
+ end_of_then.prev_sibling_or_token().unwrap()
+ } else {
+ end_of_then
+ };
+
+ let then_statements = replacement
+ .children_with_tokens()
+ .chain(
+ then_block_items
+ .syntax()
+ .children_with_tokens()
+ .skip(1)
+ .take_while(|i| *i != end_of_then),
+ )
+ .collect();
+
+ ted::replace_with_many(if_expr.syntax(), then_statements)
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn convert_inside_fn() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ bar();
+ if$0 true {
+ foo();
+
+ // comment
+ bar();
+ }
+}
+"#,
+ r#"
+fn main() {
+ bar();
+ if false {
+ return;
+ }
+ foo();
+
+ // comment
+ bar();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_inside_fn() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main(n: Option<String>) {
+ bar();
+ if$0 let Some(n) = n {
+ foo(n);
+
+ // comment
+ bar();
+ }
+}
+"#,
+ r#"
+fn main(n: Option<String>) {
+ bar();
+ let n = match n {
+ Some(it) => it,
+ _ => return,
+ };
+ foo(n);
+
+ // comment
+ bar();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_if_let_result() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ if$0 let Ok(x) = Err(92) {
+ foo(x);
+ }
+}
+"#,
+ r#"
+fn main() {
+ let x = match Err(92) {
+ Ok(it) => it,
+ _ => return,
+ };
+ foo(x);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_ok_inside_fn() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main(n: Option<String>) {
+ bar();
+ if$0 let Some(n) = n {
+ foo(n);
+
+ // comment
+ bar();
+ }
+}
+"#,
+ r#"
+fn main(n: Option<String>) {
+ bar();
+ let n = match n {
+ Some(it) => it,
+ _ => return,
+ };
+ foo(n);
+
+ // comment
+ bar();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_mut_ok_inside_fn() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main(n: Option<String>) {
+ bar();
+ if$0 let Some(mut n) = n {
+ foo(n);
+
+ // comment
+ bar();
+ }
+}
+"#,
+ r#"
+fn main(n: Option<String>) {
+ bar();
+ let mut n = match n {
+ Some(it) => it,
+ _ => return,
+ };
+ foo(n);
+
+ // comment
+ bar();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_ref_ok_inside_fn() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main(n: Option<&str>) {
+ bar();
+ if$0 let Some(ref n) = n {
+ foo(n);
+
+ // comment
+ bar();
+ }
+}
+"#,
+ r#"
+fn main(n: Option<&str>) {
+ bar();
+ let ref n = match n {
+ Some(it) => it,
+ _ => return,
+ };
+ foo(n);
+
+ // comment
+ bar();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_inside_while() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ while true {
+ if$0 true {
+ foo();
+ bar();
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ while true {
+ if false {
+ continue;
+ }
+ foo();
+ bar();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_inside_while() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ while true {
+ if$0 let Some(n) = n {
+ foo(n);
+ bar();
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ while true {
+ let n = match n {
+ Some(it) => it,
+ _ => continue,
+ };
+ foo(n);
+ bar();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_inside_loop() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ loop {
+ if$0 true {
+ foo();
+ bar();
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ loop {
+ if false {
+ continue;
+ }
+ foo();
+ bar();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_let_inside_loop() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ loop {
+ if$0 let Some(n) = n {
+ foo(n);
+ bar();
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ loop {
+ let n = match n {
+ Some(it) => it,
+ _ => continue,
+ };
+ foo(n);
+ bar();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ignore_already_converted_if() {
+ check_assist_not_applicable(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ if$0 true {
+ return;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ignore_already_converted_loop() {
+ check_assist_not_applicable(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ loop {
+ if$0 true {
+ continue;
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ignore_return() {
+ check_assist_not_applicable(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ if$0 true {
+ return
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ignore_else_branch() {
+ check_assist_not_applicable(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ if$0 true {
+ foo();
+ } else {
+ bar()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ignore_statements_aftert_if() {
+ check_assist_not_applicable(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ if$0 true {
+ foo();
+ }
+ bar();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ignore_statements_inside_if() {
+ check_assist_not_applicable(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ if false {
+ if$0 true {
+ foo();
+ }
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
new file mode 100644
index 000000000..4ab8e93a2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
@@ -0,0 +1,840 @@
+use either::Either;
+use ide_db::defs::{Definition, NameRefClass};
+use syntax::{
+ ast::{self, AstNode, HasGenericParams, HasVisibility},
+ match_ast, SyntaxNode,
+};
+
+use crate::{assist_context::AssistBuilder, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: convert_tuple_struct_to_named_struct
+//
+// Converts tuple struct to struct with named fields, and analogously for tuple enum variants.
+//
+// ```
+// struct Point$0(f32, f32);
+//
+// impl Point {
+// pub fn new(x: f32, y: f32) -> Self {
+// Point(x, y)
+// }
+//
+// pub fn x(&self) -> f32 {
+// self.0
+// }
+//
+// pub fn y(&self) -> f32 {
+// self.1
+// }
+// }
+// ```
+// ->
+// ```
+// struct Point { field1: f32, field2: f32 }
+//
+// impl Point {
+// pub fn new(x: f32, y: f32) -> Self {
+// Point { field1: x, field2: y }
+// }
+//
+// pub fn x(&self) -> f32 {
+// self.field1
+// }
+//
+// pub fn y(&self) -> f32 {
+// self.field2
+// }
+// }
+// ```
+pub(crate) fn convert_tuple_struct_to_named_struct(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let strukt = ctx
+ .find_node_at_offset::<ast::Struct>()
+ .map(Either::Left)
+ .or_else(|| ctx.find_node_at_offset::<ast::Variant>().map(Either::Right))?;
+ let field_list = strukt.as_ref().either(|s| s.field_list(), |v| v.field_list())?;
+ let tuple_fields = match field_list {
+ ast::FieldList::TupleFieldList(it) => it,
+ ast::FieldList::RecordFieldList(_) => return None,
+ };
+ let strukt_def = match &strukt {
+ Either::Left(s) => Either::Left(ctx.sema.to_def(s)?),
+ Either::Right(v) => Either::Right(ctx.sema.to_def(v)?),
+ };
+ let target = strukt.as_ref().either(|s| s.syntax(), |v| v.syntax()).text_range();
+
+ acc.add(
+ AssistId("convert_tuple_struct_to_named_struct", AssistKind::RefactorRewrite),
+ "Convert to named struct",
+ target,
+ |edit| {
+ let names = generate_names(tuple_fields.fields());
+ edit_field_references(ctx, edit, tuple_fields.fields(), &names);
+ edit_struct_references(ctx, edit, strukt_def, &names);
+ edit_struct_def(ctx, edit, &strukt, tuple_fields, names);
+ },
+ )
+}
+
+fn edit_struct_def(
+ ctx: &AssistContext<'_>,
+ edit: &mut AssistBuilder,
+ strukt: &Either<ast::Struct, ast::Variant>,
+ tuple_fields: ast::TupleFieldList,
+ names: Vec<ast::Name>,
+) {
+ let record_fields = tuple_fields
+ .fields()
+ .zip(names)
+ .filter_map(|(f, name)| Some(ast::make::record_field(f.visibility(), name, f.ty()?)));
+ let record_fields = ast::make::record_field_list(record_fields);
+ let tuple_fields_text_range = tuple_fields.syntax().text_range();
+
+ edit.edit_file(ctx.file_id());
+
+ if let Either::Left(strukt) = strukt {
+ if let Some(w) = strukt.where_clause() {
+ edit.delete(w.syntax().text_range());
+ edit.insert(
+ tuple_fields_text_range.start(),
+ ast::make::tokens::single_newline().text(),
+ );
+ edit.insert(tuple_fields_text_range.start(), w.syntax().text());
+ edit.insert(tuple_fields_text_range.start(), ",");
+ edit.insert(
+ tuple_fields_text_range.start(),
+ ast::make::tokens::single_newline().text(),
+ );
+ } else {
+ edit.insert(tuple_fields_text_range.start(), ast::make::tokens::single_space().text());
+ }
+ if let Some(t) = strukt.semicolon_token() {
+ edit.delete(t.text_range());
+ }
+ } else {
+ edit.insert(tuple_fields_text_range.start(), ast::make::tokens::single_space().text());
+ }
+
+ edit.replace(tuple_fields_text_range, record_fields.to_string());
+}
+
+fn edit_struct_references(
+ ctx: &AssistContext<'_>,
+ edit: &mut AssistBuilder,
+ strukt: Either<hir::Struct, hir::Variant>,
+ names: &[ast::Name],
+) {
+ let strukt_def = match strukt {
+ Either::Left(s) => Definition::Adt(hir::Adt::Struct(s)),
+ Either::Right(v) => Definition::Variant(v),
+ };
+ let usages = strukt_def.usages(&ctx.sema).include_self_refs().all();
+
+ let edit_node = |edit: &mut AssistBuilder, node: SyntaxNode| -> Option<()> {
+ match_ast! {
+ match node {
+ ast::TupleStructPat(tuple_struct_pat) => {
+ edit.replace(
+ tuple_struct_pat.syntax().text_range(),
+ ast::make::record_pat_with_fields(
+ tuple_struct_pat.path()?,
+ ast::make::record_pat_field_list(tuple_struct_pat.fields().zip(names).map(
+ |(pat, name)| {
+ ast::make::record_pat_field(
+ ast::make::name_ref(&name.to_string()),
+ pat,
+ )
+ },
+ )),
+ )
+ .to_string(),
+ );
+ },
+ // for tuple struct creations like Foo(42)
+ ast::CallExpr(call_expr) => {
+ let path = call_expr.syntax().descendants().find_map(ast::PathExpr::cast).and_then(|expr| expr.path())?;
+
+ // this also includes method calls like Foo::new(42), we should skip them
+ if let Some(name_ref) = path.segment().and_then(|s| s.name_ref()) {
+ match NameRefClass::classify(&ctx.sema, &name_ref) {
+ Some(NameRefClass::Definition(Definition::SelfType(_))) => {},
+ Some(NameRefClass::Definition(def)) if def == strukt_def => {},
+ _ => return None,
+ };
+ }
+
+ let arg_list = call_expr.syntax().descendants().find_map(ast::ArgList::cast)?;
+
+ edit.replace(
+ call_expr.syntax().text_range(),
+ ast::make::record_expr(
+ path,
+ ast::make::record_expr_field_list(arg_list.args().zip(names).map(
+ |(expr, name)| {
+ ast::make::record_expr_field(
+ ast::make::name_ref(&name.to_string()),
+ Some(expr),
+ )
+ },
+ )),
+ )
+ .to_string(),
+ );
+ },
+ _ => return None,
+ }
+ }
+ Some(())
+ };
+
+ for (file_id, refs) in usages {
+ edit.edit_file(file_id);
+ for r in refs {
+ for node in r.name.syntax().ancestors() {
+ if edit_node(edit, node).is_some() {
+ break;
+ }
+ }
+ }
+ }
+}
+
+fn edit_field_references(
+ ctx: &AssistContext<'_>,
+ edit: &mut AssistBuilder,
+ fields: impl Iterator<Item = ast::TupleField>,
+ names: &[ast::Name],
+) {
+ for (field, name) in fields.zip(names) {
+ let field = match ctx.sema.to_def(&field) {
+ Some(it) => it,
+ None => continue,
+ };
+ let def = Definition::Field(field);
+ let usages = def.usages(&ctx.sema).all();
+ for (file_id, refs) in usages {
+ edit.edit_file(file_id);
+ for r in refs {
+ if let Some(name_ref) = r.name.as_name_ref() {
+ edit.replace(name_ref.syntax().text_range(), name.text());
+ }
+ }
+ }
+ }
+}
+
+fn generate_names(fields: impl Iterator<Item = ast::TupleField>) -> Vec<ast::Name> {
+ fields.enumerate().map(|(i, _)| ast::make::name(&format!("field{}", i + 1))).collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn not_applicable_other_than_tuple_struct() {
+ check_assist_not_applicable(
+ convert_tuple_struct_to_named_struct,
+ r#"struct Foo$0 { bar: u32 };"#,
+ );
+ check_assist_not_applicable(convert_tuple_struct_to_named_struct, r#"struct Foo$0;"#);
+ }
+
+ #[test]
+ fn convert_simple_struct() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+struct Inner;
+struct A$0(Inner);
+
+impl A {
+ fn new(inner: Inner) -> A {
+ A(inner)
+ }
+
+ fn new_with_default() -> A {
+ A::new(Inner)
+ }
+
+ fn into_inner(self) -> Inner {
+ self.0
+ }
+}"#,
+ r#"
+struct Inner;
+struct A { field1: Inner }
+
+impl A {
+ fn new(inner: Inner) -> A {
+ A { field1: inner }
+ }
+
+ fn new_with_default() -> A {
+ A::new(Inner)
+ }
+
+ fn into_inner(self) -> Inner {
+ self.field1
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_struct_referenced_via_self_kw() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+struct Inner;
+struct A$0(Inner);
+
+impl A {
+ fn new(inner: Inner) -> Self {
+ Self(inner)
+ }
+
+ fn new_with_default() -> Self {
+ Self::new(Inner)
+ }
+
+ fn into_inner(self) -> Inner {
+ self.0
+ }
+}"#,
+ r#"
+struct Inner;
+struct A { field1: Inner }
+
+impl A {
+ fn new(inner: Inner) -> Self {
+ Self { field1: inner }
+ }
+
+ fn new_with_default() -> Self {
+ Self::new(Inner)
+ }
+
+ fn into_inner(self) -> Inner {
+ self.field1
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_destructured_struct() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+struct Inner;
+struct A$0(Inner);
+
+impl A {
+ fn into_inner(self) -> Inner {
+ let A(first) = self;
+ first
+ }
+
+ fn into_inner_via_self(self) -> Inner {
+ let Self(first) = self;
+ first
+ }
+}"#,
+ r#"
+struct Inner;
+struct A { field1: Inner }
+
+impl A {
+ fn into_inner(self) -> Inner {
+ let A { field1: first } = self;
+ first
+ }
+
+ fn into_inner_via_self(self) -> Inner {
+ let Self { field1: first } = self;
+ first
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_struct_with_visibility() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+struct A$0(pub u32, pub(crate) u64);
+
+impl A {
+ fn new() -> A {
+ A(42, 42)
+ }
+
+ fn into_first(self) -> u32 {
+ self.0
+ }
+
+ fn into_second(self) -> u64 {
+ self.1
+ }
+}"#,
+ r#"
+struct A { pub field1: u32, pub(crate) field2: u64 }
+
+impl A {
+ fn new() -> A {
+ A { field1: 42, field2: 42 }
+ }
+
+ fn into_first(self) -> u32 {
+ self.field1
+ }
+
+ fn into_second(self) -> u64 {
+ self.field2
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_struct_with_wrapped_references() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+struct Inner$0(u32);
+struct Outer(Inner);
+
+impl Outer {
+ fn new() -> Self {
+ Self(Inner(42))
+ }
+
+ fn into_inner(self) -> u32 {
+ (self.0).0
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer(Inner(x)) = self;
+ x
+ }
+}"#,
+ r#"
+struct Inner { field1: u32 }
+struct Outer(Inner);
+
+impl Outer {
+ fn new() -> Self {
+ Self(Inner { field1: 42 })
+ }
+
+ fn into_inner(self) -> u32 {
+ (self.0).field1
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer(Inner { field1: x }) = self;
+ x
+ }
+}"#,
+ );
+
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+struct Inner(u32);
+struct Outer$0(Inner);
+
+impl Outer {
+ fn new() -> Self {
+ Self(Inner(42))
+ }
+
+ fn into_inner(self) -> u32 {
+ (self.0).0
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer(Inner(x)) = self;
+ x
+ }
+}"#,
+ r#"
+struct Inner(u32);
+struct Outer { field1: Inner }
+
+impl Outer {
+ fn new() -> Self {
+ Self { field1: Inner(42) }
+ }
+
+ fn into_inner(self) -> u32 {
+ (self.field1).0
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer { field1: Inner(x) } = self;
+ x
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_struct_with_multi_file_references() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+//- /main.rs
+struct Inner;
+struct A$0(Inner);
+
+mod foo;
+
+//- /foo.rs
+use crate::{A, Inner};
+fn f() {
+ let a = A(Inner);
+}
+"#,
+ r#"
+//- /main.rs
+struct Inner;
+struct A { field1: Inner }
+
+mod foo;
+
+//- /foo.rs
+use crate::{A, Inner};
+fn f() {
+ let a = A { field1: Inner };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_struct_with_where_clause() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+struct Wrap$0<T>(T)
+where
+ T: Display;
+"#,
+ r#"
+struct Wrap<T>
+where
+ T: Display,
+{ field1: T }
+
+"#,
+ );
+ }
+ #[test]
+ fn not_applicable_other_than_tuple_variant() {
+ check_assist_not_applicable(
+ convert_tuple_struct_to_named_struct,
+ r#"enum Enum { Variant$0 { value: usize } };"#,
+ );
+ check_assist_not_applicable(
+ convert_tuple_struct_to_named_struct,
+ r#"enum Enum { Variant$0 }"#,
+ );
+ }
+
+ #[test]
+ fn convert_simple_variant() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+enum A {
+ $0Variant(usize),
+}
+
+impl A {
+ fn new(value: usize) -> A {
+ A::Variant(value)
+ }
+
+ fn new_with_default() -> A {
+ A::new(Default::default())
+ }
+
+ fn value(self) -> usize {
+ match self {
+ A::Variant(value) => value,
+ }
+ }
+}"#,
+ r#"
+enum A {
+ Variant { field1: usize },
+}
+
+impl A {
+ fn new(value: usize) -> A {
+ A::Variant { field1: value }
+ }
+
+ fn new_with_default() -> A {
+ A::new(Default::default())
+ }
+
+ fn value(self) -> usize {
+ match self {
+ A::Variant { field1: value } => value,
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_variant_referenced_via_self_kw() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+enum A {
+ $0Variant(usize),
+}
+
+impl A {
+ fn new(value: usize) -> A {
+ Self::Variant(value)
+ }
+
+ fn new_with_default() -> A {
+ Self::new(Default::default())
+ }
+
+ fn value(self) -> usize {
+ match self {
+ Self::Variant(value) => value,
+ }
+ }
+}"#,
+ r#"
+enum A {
+ Variant { field1: usize },
+}
+
+impl A {
+ fn new(value: usize) -> A {
+ Self::Variant { field1: value }
+ }
+
+ fn new_with_default() -> A {
+ Self::new(Default::default())
+ }
+
+ fn value(self) -> usize {
+ match self {
+ Self::Variant { field1: value } => value,
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_destructured_variant() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+enum A {
+ $0Variant(usize),
+}
+
+impl A {
+ fn into_inner(self) -> usize {
+ let A::Variant(first) = self;
+ first
+ }
+
+ fn into_inner_via_self(self) -> usize {
+ let Self::Variant(first) = self;
+ first
+ }
+}"#,
+ r#"
+enum A {
+ Variant { field1: usize },
+}
+
+impl A {
+ fn into_inner(self) -> usize {
+ let A::Variant { field1: first } = self;
+ first
+ }
+
+ fn into_inner_via_self(self) -> usize {
+ let Self::Variant { field1: first } = self;
+ first
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_variant_with_wrapped_references() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+enum Inner {
+ $0Variant(usize),
+}
+enum Outer {
+ Variant(Inner),
+}
+
+impl Outer {
+ fn new() -> Self {
+ Self::Variant(Inner::Variant(42))
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer::Variant(Inner::Variant(x)) = self;
+ x
+ }
+}"#,
+ r#"
+enum Inner {
+ Variant { field1: usize },
+}
+enum Outer {
+ Variant(Inner),
+}
+
+impl Outer {
+ fn new() -> Self {
+ Self::Variant(Inner::Variant { field1: 42 })
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer::Variant(Inner::Variant { field1: x }) = self;
+ x
+ }
+}"#,
+ );
+
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+enum Inner {
+ Variant(usize),
+}
+enum Outer {
+ $0Variant(Inner),
+}
+
+impl Outer {
+ fn new() -> Self {
+ Self::Variant(Inner::Variant(42))
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer::Variant(Inner::Variant(x)) = self;
+ x
+ }
+}"#,
+ r#"
+enum Inner {
+ Variant(usize),
+}
+enum Outer {
+ Variant { field1: Inner },
+}
+
+impl Outer {
+ fn new() -> Self {
+ Self::Variant { field1: Inner::Variant(42) }
+ }
+
+ fn into_inner_destructed(self) -> u32 {
+ let Outer::Variant { field1: Inner::Variant(x) } = self;
+ x
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_variant_with_multi_file_references() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+//- /main.rs
+struct Inner;
+enum A {
+ $0Variant(Inner),
+}
+
+mod foo;
+
+//- /foo.rs
+use crate::{A, Inner};
+fn f() {
+ let a = A::Variant(Inner);
+}
+"#,
+ r#"
+//- /main.rs
+struct Inner;
+enum A {
+ Variant { field1: Inner },
+}
+
+mod foo;
+
+//- /foo.rs
+use crate::{A, Inner};
+fn f() {
+ let a = A::Variant { field1: Inner };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_directly_used_variant() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+//- /main.rs
+struct Inner;
+enum A {
+ $0Variant(Inner),
+}
+
+mod foo;
+
+//- /foo.rs
+use crate::{A::Variant, Inner};
+fn f() {
+ let a = Variant(Inner);
+}
+"#,
+ r#"
+//- /main.rs
+struct Inner;
+enum A {
+ Variant { field1: Inner },
+}
+
+mod foo;
+
+//- /foo.rs
+use crate::{A::Variant, Inner};
+fn f() {
+ let a = Variant { field1: Inner };
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_while_to_loop.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_while_to_loop.rs
new file mode 100644
index 000000000..c34b68411
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_while_to_loop.rs
@@ -0,0 +1,188 @@
+use std::iter::once;
+
+use ide_db::syntax_helpers::node_ext::is_pattern_cond;
+use syntax::{
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ make, HasLoopBody,
+ },
+ AstNode, T,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils::invert_boolean_expression,
+ AssistId, AssistKind,
+};
+
+// Assist: convert_while_to_loop
+//
+// Replace a while with a loop.
+//
+// ```
+// fn main() {
+// $0while cond {
+// foo();
+// }
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// loop {
+// if !cond {
+// break;
+// }
+// foo();
+// }
+// }
+// ```
+pub(crate) fn convert_while_to_loop(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let while_kw = ctx.find_token_syntax_at_offset(T![while])?;
+ let while_expr = while_kw.parent().and_then(ast::WhileExpr::cast)?;
+ let while_body = while_expr.loop_body()?;
+ let while_cond = while_expr.condition()?;
+
+ let target = while_expr.syntax().text_range();
+ acc.add(
+ AssistId("convert_while_to_loop", AssistKind::RefactorRewrite),
+ "Convert while to loop",
+ target,
+ |edit| {
+ let while_indent_level = IndentLevel::from_node(while_expr.syntax());
+
+ let break_block =
+ make::block_expr(once(make::expr_stmt(make::expr_break(None, None)).into()), None)
+ .indent(while_indent_level);
+ let block_expr = if is_pattern_cond(while_cond.clone()) {
+ let if_expr = make::expr_if(while_cond, while_body, Some(break_block.into()));
+ let stmts = once(make::expr_stmt(if_expr).into());
+ make::block_expr(stmts, None)
+ } else {
+ let if_cond = invert_boolean_expression(while_cond);
+ let if_expr = make::expr_if(if_cond, break_block, None);
+ let stmts = once(make::expr_stmt(if_expr).into()).chain(while_body.statements());
+ make::block_expr(stmts, while_body.tail_expr())
+ };
+
+ let replacement = make::expr_loop(block_expr.indent(while_indent_level));
+ edit.replace(target, replacement.syntax().text())
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn convert_inside_fn() {
+ check_assist(
+ convert_while_to_loop,
+ r#"
+fn main() {
+ while$0 cond {
+ foo();
+ }
+}
+"#,
+ r#"
+fn main() {
+ loop {
+ if !cond {
+ break;
+ }
+ foo();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_busy_wait() {
+ check_assist(
+ convert_while_to_loop,
+ r#"
+fn main() {
+ while$0 cond() {}
+}
+"#,
+ r#"
+fn main() {
+ loop {
+ if !cond() {
+ break;
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_trailing_expr() {
+ check_assist(
+ convert_while_to_loop,
+ r#"
+fn main() {
+ while$0 cond() {
+ bar()
+ }
+}
+"#,
+ r#"
+fn main() {
+ loop {
+ if !cond() {
+ break;
+ }
+ bar()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn convert_while_let() {
+ check_assist(
+ convert_while_to_loop,
+ r#"
+fn main() {
+ while$0 let Some(_) = foo() {
+ bar();
+ }
+}
+"#,
+ r#"
+fn main() {
+ loop {
+ if let Some(_) = foo() {
+ bar();
+ } else {
+ break;
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ignore_cursor_in_body() {
+ check_assist_not_applicable(
+ convert_while_to_loop,
+ r#"
+fn main() {
+ while cond {$0
+ bar();
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
new file mode 100644
index 000000000..c1f57532b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
@@ -0,0 +1,2147 @@
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ defs::Definition,
+ search::{FileReference, SearchScope, UsageSearchResult},
+};
+use syntax::{
+ ast::{self, AstNode, FieldExpr, HasName, IdentPat, MethodCallExpr},
+ TextRange,
+};
+
+use crate::assist_context::{AssistBuilder, AssistContext, Assists};
+
+// Assist: destructure_tuple_binding
+//
+// Destructures a tuple binding in place.
+//
+// ```
+// fn main() {
+// let $0t = (1,2);
+// let v = t.0;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let ($0_0, _1) = (1,2);
+// let v = _0;
+// }
+// ```
+pub(crate) fn destructure_tuple_binding(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ destructure_tuple_binding_impl(acc, ctx, false)
+}
+
+// And when `with_sub_pattern` enabled (currently disabled):
+// Assist: destructure_tuple_binding_in_sub_pattern
+//
+// Destructures tuple items in sub-pattern (after `@`).
+//
+// ```
+// fn main() {
+// let $0t = (1,2);
+// let v = t.0;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let t @ ($0_0, _1) = (1,2);
+// let v = _0;
+// }
+// ```
+pub(crate) fn destructure_tuple_binding_impl(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+ with_sub_pattern: bool,
+) -> Option<()> {
+ let ident_pat = ctx.find_node_at_offset::<ast::IdentPat>()?;
+ let data = collect_data(ident_pat, ctx)?;
+
+ if with_sub_pattern {
+ acc.add(
+ AssistId("destructure_tuple_binding_in_sub_pattern", AssistKind::RefactorRewrite),
+ "Destructure tuple in sub-pattern",
+ data.range,
+ |builder| {
+ edit_tuple_assignment(ctx, builder, &data, true);
+ edit_tuple_usages(&data, builder, ctx, true);
+ },
+ );
+ }
+
+ acc.add(
+ AssistId("destructure_tuple_binding", AssistKind::RefactorRewrite),
+ if with_sub_pattern { "Destructure tuple in place" } else { "Destructure tuple" },
+ data.range,
+ |builder| {
+ edit_tuple_assignment(ctx, builder, &data, false);
+ edit_tuple_usages(&data, builder, ctx, false);
+ },
+ );
+
+ Some(())
+}
+
+fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option<TupleData> {
+ if ident_pat.at_token().is_some() {
+ // Cannot destructure pattern with sub-pattern:
+ // Only IdentPat can have sub-pattern,
+ // but not TuplePat (`(a,b)`).
+ cov_mark::hit!(destructure_tuple_subpattern);
+ return None;
+ }
+
+ let ty = ctx.sema.type_of_pat(&ident_pat.clone().into())?.adjusted();
+ let ref_type = if ty.is_mutable_reference() {
+ Some(RefType::Mutable)
+ } else if ty.is_reference() {
+ Some(RefType::ReadOnly)
+ } else {
+ None
+ };
+ // might be reference
+ let ty = ty.strip_references();
+ // must be tuple
+ let field_types = ty.tuple_fields(ctx.db());
+ if field_types.is_empty() {
+ cov_mark::hit!(destructure_tuple_no_tuple);
+ return None;
+ }
+
+ let name = ident_pat.name()?.to_string();
+ let range = ident_pat.syntax().text_range();
+
+ let usages = ctx.sema.to_def(&ident_pat).map(|def| {
+ Definition::Local(def)
+ .usages(&ctx.sema)
+ .in_scope(SearchScope::single_file(ctx.file_id()))
+ .all()
+ });
+
+ let field_names = (0..field_types.len())
+ .map(|i| generate_name(ctx, i, &name, &ident_pat, &usages))
+ .collect::<Vec<_>>();
+
+ Some(TupleData { ident_pat, range, ref_type, field_names, usages })
+}
+
+fn generate_name(
+ _ctx: &AssistContext<'_>,
+ index: usize,
+ _tuple_name: &str,
+ _ident_pat: &IdentPat,
+ _usages: &Option<UsageSearchResult>,
+) -> String {
+ // FIXME: detect if name already used
+ format!("_{}", index)
+}
+
+enum RefType {
+ ReadOnly,
+ Mutable,
+}
+struct TupleData {
+ ident_pat: IdentPat,
+ // name: String,
+ range: TextRange,
+ ref_type: Option<RefType>,
+ field_names: Vec<String>,
+ // field_types: Vec<Type>,
+ usages: Option<UsageSearchResult>,
+}
+fn edit_tuple_assignment(
+ ctx: &AssistContext<'_>,
+ builder: &mut AssistBuilder,
+ data: &TupleData,
+ in_sub_pattern: bool,
+) {
+ let tuple_pat = {
+ let original = &data.ident_pat;
+ let is_ref = original.ref_token().is_some();
+ let is_mut = original.mut_token().is_some();
+ let fields = data.field_names.iter().map(|name| {
+ ast::Pat::from(ast::make::ident_pat(is_ref, is_mut, ast::make::name(name)))
+ });
+ ast::make::tuple_pat(fields)
+ };
+
+ let add_cursor = |text: &str| {
+ // place cursor on first tuple item
+ let first_tuple = &data.field_names[0];
+ text.replacen(first_tuple, &format!("$0{}", first_tuple), 1)
+ };
+
+ // with sub_pattern: keep original tuple and add subpattern: `tup @ (_0, _1)`
+ if in_sub_pattern {
+ let text = format!(" @ {}", tuple_pat);
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let snip = add_cursor(&text);
+ builder.insert_snippet(cap, data.range.end(), snip);
+ }
+ None => builder.insert(data.range.end(), text),
+ };
+ } else {
+ let text = tuple_pat.to_string();
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let snip = add_cursor(&text);
+ builder.replace_snippet(cap, data.range, snip);
+ }
+ None => builder.replace(data.range, text),
+ };
+ }
+}
+
+fn edit_tuple_usages(
+ data: &TupleData,
+ builder: &mut AssistBuilder,
+ ctx: &AssistContext<'_>,
+ in_sub_pattern: bool,
+) {
+ if let Some(usages) = data.usages.as_ref() {
+ for (file_id, refs) in usages.iter() {
+ builder.edit_file(*file_id);
+
+ for r in refs {
+ edit_tuple_usage(ctx, builder, r, data, in_sub_pattern);
+ }
+ }
+ }
+}
+fn edit_tuple_usage(
+ ctx: &AssistContext<'_>,
+ builder: &mut AssistBuilder,
+ usage: &FileReference,
+ data: &TupleData,
+ in_sub_pattern: bool,
+) {
+ match detect_tuple_index(usage, data) {
+ Some(index) => edit_tuple_field_usage(ctx, builder, data, index),
+ None => {
+ if in_sub_pattern {
+ cov_mark::hit!(destructure_tuple_call_with_subpattern);
+ return;
+ }
+
+ // no index access -> make invalid -> requires handling by user
+ // -> put usage in block comment
+ //
+ // Note: For macro invocations this might result in still valid code:
+ // When a macro accepts the tuple as argument, as well as no arguments at all,
+ // uncommenting the tuple still leaves the macro call working (see `tests::in_macro_call::empty_macro`).
+ // But this is an unlikely case. Usually the resulting macro call will become erroneous.
+ builder.insert(usage.range.start(), "/*");
+ builder.insert(usage.range.end(), "*/");
+ }
+ }
+}
+
+fn edit_tuple_field_usage(
+ ctx: &AssistContext<'_>,
+ builder: &mut AssistBuilder,
+ data: &TupleData,
+ index: TupleIndex,
+) {
+ let field_name = &data.field_names[index.index];
+
+ if data.ref_type.is_some() {
+ let ref_data = handle_ref_field_usage(ctx, &index.field_expr);
+ builder.replace(ref_data.range, ref_data.format(field_name));
+ } else {
+ builder.replace(index.range, field_name);
+ }
+}
+struct TupleIndex {
+ index: usize,
+ range: TextRange,
+ field_expr: FieldExpr,
+}
+fn detect_tuple_index(usage: &FileReference, data: &TupleData) -> Option<TupleIndex> {
+ // usage is IDENT
+ // IDENT
+ // NAME_REF
+ // PATH_SEGMENT
+ // PATH
+ // PATH_EXPR
+ // PAREN_EXRP*
+ // FIELD_EXPR
+
+ let node = usage
+ .name
+ .syntax()
+ .ancestors()
+ .skip_while(|s| !ast::PathExpr::can_cast(s.kind()))
+ .skip(1) // PATH_EXPR
+ .find(|s| !ast::ParenExpr::can_cast(s.kind()))?; // skip parentheses
+
+ if let Some(field_expr) = ast::FieldExpr::cast(node) {
+ let idx = field_expr.name_ref()?.as_tuple_field()?;
+ if idx < data.field_names.len() {
+ // special case: in macro call -> range of `field_expr` in applied macro, NOT range in actual file!
+ if field_expr.syntax().ancestors().any(|a| ast::MacroStmts::can_cast(a.kind())) {
+ cov_mark::hit!(destructure_tuple_macro_call);
+
+ // issue: cannot differentiate between tuple index passed into macro or tuple index as result of macro:
+ // ```rust
+ // macro_rules! m {
+ // ($t1:expr, $t2:expr) => { $t1; $t2.0 }
+ // }
+ // let t = (1,2);
+ // m!(t.0, t)
+ // ```
+ // -> 2 tuple index usages detected!
+ //
+ // -> only handle `t`
+ return None;
+ }
+
+ Some(TupleIndex { index: idx, range: field_expr.syntax().text_range(), field_expr })
+ } else {
+ // tuple index out of range
+ None
+ }
+ } else {
+ None
+ }
+}
+
+struct RefData {
+ range: TextRange,
+ needs_deref: bool,
+ needs_parentheses: bool,
+}
+impl RefData {
+ fn format(&self, field_name: &str) -> String {
+ match (self.needs_deref, self.needs_parentheses) {
+ (true, true) => format!("(*{})", field_name),
+ (true, false) => format!("*{}", field_name),
+ (false, true) => format!("({})", field_name),
+ (false, false) => field_name.to_string(),
+ }
+ }
+}
+fn handle_ref_field_usage(ctx: &AssistContext<'_>, field_expr: &FieldExpr) -> RefData {
+ let s = field_expr.syntax();
+ let mut ref_data =
+ RefData { range: s.text_range(), needs_deref: true, needs_parentheses: true };
+
+ let parent = match s.parent().map(ast::Expr::cast) {
+ Some(Some(parent)) => parent,
+ Some(None) => {
+ ref_data.needs_parentheses = false;
+ return ref_data;
+ }
+ None => return ref_data,
+ };
+
+ match parent {
+ ast::Expr::ParenExpr(it) => {
+ // already parens in place -> don't replace
+ ref_data.needs_parentheses = false;
+ // there might be a ref outside: `&(t.0)` -> can be removed
+ if let Some(it) = it.syntax().parent().and_then(ast::RefExpr::cast) {
+ ref_data.needs_deref = false;
+ ref_data.range = it.syntax().text_range();
+ }
+ }
+ ast::Expr::RefExpr(it) => {
+ // `&*` -> cancel each other out
+ ref_data.needs_deref = false;
+ ref_data.needs_parentheses = false;
+ // might be surrounded by parens -> can be removed too
+ match it.syntax().parent().and_then(ast::ParenExpr::cast) {
+ Some(parent) => ref_data.range = parent.syntax().text_range(),
+ None => ref_data.range = it.syntax().text_range(),
+ };
+ }
+ // higher precedence than deref `*`
+ // https://doc.rust-lang.org/reference/expressions.html#expression-precedence
+ // -> requires parentheses
+ ast::Expr::PathExpr(_it) => {}
+ ast::Expr::MethodCallExpr(it) => {
+ // `field_expr` is `self_param` (otherwise it would be in `ArgList`)
+
+ // test if there's already auto-ref in place (`value` -> `&value`)
+ // -> no method accepting `self`, but `&self` -> no need for deref
+ //
+ // other combinations (`&value` -> `value`, `&&value` -> `&value`, `&value` -> `&&value`) might or might not be able to auto-ref/deref,
+ // but there might be trait implementations an added `&` might resolve to
+ // -> ONLY handle auto-ref from `value` to `&value`
+ fn is_auto_ref(ctx: &AssistContext<'_>, call_expr: &MethodCallExpr) -> bool {
+ fn impl_(ctx: &AssistContext<'_>, call_expr: &MethodCallExpr) -> Option<bool> {
+ let rec = call_expr.receiver()?;
+ let rec_ty = ctx.sema.type_of_expr(&rec)?.original();
+ // input must be actual value
+ if rec_ty.is_reference() {
+ return Some(false);
+ }
+
+ // doesn't resolve trait impl
+ let f = ctx.sema.resolve_method_call(call_expr)?;
+ let self_param = f.self_param(ctx.db())?;
+ // self must be ref
+ match self_param.access(ctx.db()) {
+ hir::Access::Shared | hir::Access::Exclusive => Some(true),
+ hir::Access::Owned => Some(false),
+ }
+ }
+ impl_(ctx, call_expr).unwrap_or(false)
+ }
+
+ if is_auto_ref(ctx, &it) {
+ ref_data.needs_deref = false;
+ ref_data.needs_parentheses = false;
+ }
+ }
+ ast::Expr::FieldExpr(_it) => {
+ // `t.0.my_field`
+ ref_data.needs_deref = false;
+ ref_data.needs_parentheses = false;
+ }
+ ast::Expr::IndexExpr(_it) => {
+ // `t.0[1]`
+ ref_data.needs_deref = false;
+ ref_data.needs_parentheses = false;
+ }
+ ast::Expr::TryExpr(_it) => {
+ // `t.0?`
+ // requires deref and parens: `(*_0)`
+ }
+ // lower precedence than deref `*` -> no parens
+ _ => {
+ ref_data.needs_parentheses = false;
+ }
+ };
+
+ ref_data
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ // Tests for direct tuple destructure:
+ // `let $0t = (1,2);` -> `let (_0, _1) = (1,2);`
+
+ fn assist(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ destructure_tuple_binding_impl(acc, ctx, false)
+ }
+
+ #[test]
+ fn dont_trigger_on_unit() {
+ cov_mark::check!(destructure_tuple_no_tuple);
+ check_assist_not_applicable(
+ assist,
+ r#"
+fn main() {
+let $0v = ();
+}
+ "#,
+ )
+ }
+ #[test]
+ fn dont_trigger_on_number() {
+ cov_mark::check!(destructure_tuple_no_tuple);
+ check_assist_not_applicable(
+ assist,
+ r#"
+fn main() {
+let $0v = 32;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn destructure_3_tuple() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let $0tup = (1,2,3);
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1, _2) = (1,2,3);
+}
+ "#,
+ )
+ }
+ #[test]
+ fn destructure_2_tuple() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let $0tup = (1,2);
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1) = (1,2);
+}
+ "#,
+ )
+ }
+ #[test]
+ fn replace_indices() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let $0tup = (1,2,3);
+ let v1 = tup.0;
+ let v2 = tup.1;
+ let v3 = tup.2;
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1, _2) = (1,2,3);
+ let v1 = _0;
+ let v2 = _1;
+ let v3 = _2;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn replace_usage_in_parentheses() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let $0tup = (1,2,3);
+ let a = (tup).1;
+ let b = ((tup)).1;
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1, _2) = (1,2,3);
+ let a = _1;
+ let b = _1;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn handle_function_call() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let $0tup = (1,2);
+ let v = tup.into();
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1) = (1,2);
+ let v = /*tup*/.into();
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn handle_invalid_index() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let $0tup = (1,2);
+ let v = tup.3;
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1) = (1,2);
+ let v = /*tup*/.3;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn dont_replace_variable_with_same_name_as_tuple() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let tup = (1,2);
+ let v = tup.1;
+ let $0tup = (1,2,3);
+ let v = tup.1;
+ let tup = (1,2,3);
+ let v = tup.1;
+}
+ "#,
+ r#"
+fn main() {
+ let tup = (1,2);
+ let v = tup.1;
+ let ($0_0, _1, _2) = (1,2,3);
+ let v = _1;
+ let tup = (1,2,3);
+ let v = tup.1;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn keep_function_call_in_tuple_item() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let $0t = ("3.14", 0);
+ let pi: f32 = t.0.parse().unwrap_or(0.0);
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1) = ("3.14", 0);
+ let pi: f32 = _0.parse().unwrap_or(0.0);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn keep_type() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let $0t: (usize, i32) = (1,2);
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1): (usize, i32) = (1,2);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn destructure_reference() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let t = (1,2);
+ let $0t = &t;
+ let v = t.0;
+}
+ "#,
+ r#"
+fn main() {
+ let t = (1,2);
+ let ($0_0, _1) = &t;
+ let v = *_0;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn destructure_multiple_reference() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let t = (1,2);
+ let $0t = &&t;
+ let v = t.0;
+}
+ "#,
+ r#"
+fn main() {
+ let t = (1,2);
+ let ($0_0, _1) = &&t;
+ let v = *_0;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn keep_reference() {
+ check_assist(
+ assist,
+ r#"
+fn foo(t: &(usize, usize)) -> usize {
+ match t {
+ &$0t => t.0
+ }
+}
+ "#,
+ r#"
+fn foo(t: &(usize, usize)) -> usize {
+ match t {
+ &($0_0, _1) => _0
+ }
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn with_ref() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let ref $0t = (1,2);
+ let v = t.0;
+}
+ "#,
+ r#"
+fn main() {
+ let (ref $0_0, ref _1) = (1,2);
+ let v = *_0;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn with_mut() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let mut $0t = (1,2);
+ t.0 = 42;
+ let v = t.0;
+}
+ "#,
+ r#"
+fn main() {
+ let (mut $0_0, mut _1) = (1,2);
+ _0 = 42;
+ let v = _0;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn with_ref_mut() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let ref mut $0t = (1,2);
+ t.0 = 42;
+ let v = t.0;
+}
+ "#,
+ r#"
+fn main() {
+ let (ref mut $0_0, ref mut _1) = (1,2);
+ *_0 = 42;
+ let v = *_0;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn dont_trigger_for_non_tuple_reference() {
+ check_assist_not_applicable(
+ assist,
+ r#"
+fn main() {
+ let v = 42;
+ let $0v = &42;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn dont_trigger_on_static_tuple() {
+ check_assist_not_applicable(
+ assist,
+ r#"
+static $0TUP: (usize, usize) = (1,2);
+ "#,
+ )
+ }
+
+ #[test]
+ fn dont_trigger_on_wildcard() {
+ check_assist_not_applicable(
+ assist,
+ r#"
+fn main() {
+ let $0_ = (1,2);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn dont_trigger_in_struct() {
+ check_assist_not_applicable(
+ assist,
+ r#"
+struct S {
+ $0tup: (usize, usize),
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn dont_trigger_in_struct_creation() {
+ check_assist_not_applicable(
+ assist,
+ r#"
+struct S {
+ tup: (usize, usize),
+}
+fn main() {
+ let s = S {
+ $0tup: (1,2),
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn dont_trigger_on_tuple_struct() {
+ check_assist_not_applicable(
+ assist,
+ r#"
+struct S(usize, usize);
+fn main() {
+ let $0s = S(1,2);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn dont_trigger_when_subpattern_exists() {
+ // sub-pattern is only allowed with IdentPat (name), not other patterns (like TuplePat)
+ cov_mark::check!(destructure_tuple_subpattern);
+ check_assist_not_applicable(
+ assist,
+ r#"
+fn sum(t: (usize, usize)) -> usize {
+ match t {
+ $0t @ (1..=3,1..=3) => t.0 + t.1,
+ _ => 0,
+ }
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn in_subpattern() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let t1 @ (_, $0t2) = (1, (2,3));
+ let v = t1.0 + t2.0 + t2.1;
+}
+ "#,
+ r#"
+fn main() {
+ let t1 @ (_, ($0_0, _1)) = (1, (2,3));
+ let v = t1.0 + _0 + _1;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn in_nested_tuple() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let ($0tup, v) = ((1,2),3);
+}
+ "#,
+ r#"
+fn main() {
+ let (($0_0, _1), v) = ((1,2),3);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn in_closure() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let $0tup = (1,2,3);
+ let f = |v| v + tup.1;
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1, _2) = (1,2,3);
+ let f = |v| v + _1;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn in_closure_args() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let f = |$0t| t.0 + t.1;
+ let v = f((1,2));
+}
+ "#,
+ r#"
+fn main() {
+ let f = |($0_0, _1)| _0 + _1;
+ let v = f((1,2));
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn in_function_args() {
+ check_assist(
+ assist,
+ r#"
+fn f($0t: (usize, usize)) {
+ let v = t.0;
+}
+ "#,
+ r#"
+fn f(($0_0, _1): (usize, usize)) {
+ let v = _0;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn in_if_let() {
+ check_assist(
+ assist,
+ r#"
+fn f(t: (usize, usize)) {
+ if let $0t = t {
+ let v = t.0;
+ }
+}
+ "#,
+ r#"
+fn f(t: (usize, usize)) {
+ if let ($0_0, _1) = t {
+ let v = _0;
+ }
+}
+ "#,
+ )
+ }
+ #[test]
+ fn in_if_let_option() {
+ check_assist(
+ assist,
+ r#"
+//- minicore: option
+fn f(o: Option<(usize, usize)>) {
+ if let Some($0t) = o {
+ let v = t.0;
+ }
+}
+ "#,
+ r#"
+fn f(o: Option<(usize, usize)>) {
+ if let Some(($0_0, _1)) = o {
+ let v = _0;
+ }
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn in_match() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ match (1,2) {
+ $0t => t.1,
+ };
+}
+ "#,
+ r#"
+fn main() {
+ match (1,2) {
+ ($0_0, _1) => _1,
+ };
+}
+ "#,
+ )
+ }
+ #[test]
+ fn in_match_option() {
+ check_assist(
+ assist,
+ r#"
+//- minicore: option
+fn main() {
+ match Some((1,2)) {
+ Some($0t) => t.1,
+ _ => 0,
+ };
+}
+ "#,
+ r#"
+fn main() {
+ match Some((1,2)) {
+ Some(($0_0, _1)) => _1,
+ _ => 0,
+ };
+}
+ "#,
+ )
+ }
+ #[test]
+ fn in_match_reference_option() {
+ check_assist(
+ assist,
+ r#"
+//- minicore: option
+fn main() {
+ let t = (1,2);
+ match Some(&t) {
+ Some($0t) => t.1,
+ _ => 0,
+ };
+}
+ "#,
+ r#"
+fn main() {
+ let t = (1,2);
+ match Some(&t) {
+ Some(($0_0, _1)) => *_1,
+ _ => 0,
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn in_for() {
+ check_assist(
+ assist,
+ r#"
+//- minicore: iterators
+fn main() {
+ for $0t in core::iter::repeat((1,2)) {
+ let v = t.1;
+ }
+}
+ "#,
+ r#"
+fn main() {
+ for ($0_0, _1) in core::iter::repeat((1,2)) {
+ let v = _1;
+ }
+}
+ "#,
+ )
+ }
+ #[test]
+ fn in_for_nested() {
+ check_assist(
+ assist,
+ r#"
+//- minicore: iterators
+fn main() {
+ for (a, $0b) in core::iter::repeat((1,(2,3))) {
+ let v = b.1;
+ }
+}
+ "#,
+ r#"
+fn main() {
+ for (a, ($0_0, _1)) in core::iter::repeat((1,(2,3))) {
+ let v = _1;
+ }
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_on_tuple_usage() {
+ //Improvement: might be reasonable to allow & implement
+ check_assist_not_applicable(
+ assist,
+ r#"
+fn main() {
+ let t = (1,2);
+ let v = $0t.0;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn replace_all() {
+ check_assist(
+ assist,
+ r#"
+fn main() {
+ let $0t = (1,2);
+ let v = t.1;
+ let s = (t.0 + t.1) / 2;
+ let f = |v| v + t.0;
+ let r = f(t.1);
+ let e = t == (9,0);
+ let m =
+ match t {
+ (_,2) if t.0 > 2 => 1,
+ _ => 0,
+ };
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1) = (1,2);
+ let v = _1;
+ let s = (_0 + _1) / 2;
+ let f = |v| v + _0;
+ let r = f(_1);
+ let e = /*t*/ == (9,0);
+ let m =
+ match /*t*/ {
+ (_,2) if _0 > 2 => 1,
+ _ => 0,
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn non_trivial_tuple_assignment() {
+ check_assist(
+ assist,
+ r#"
+fn main {
+ let $0t =
+ if 1 > 2 {
+ (1,2)
+ } else {
+ (5,6)
+ };
+ let v1 = t.0;
+ let v2 =
+ if t.0 > t.1 {
+ t.0 - t.1
+ } else {
+ t.1 - t.0
+ };
+}
+ "#,
+ r#"
+fn main {
+ let ($0_0, _1) =
+ if 1 > 2 {
+ (1,2)
+ } else {
+ (5,6)
+ };
+ let v1 = _0;
+ let v2 =
+ if _0 > _1 {
+ _0 - _1
+ } else {
+ _1 - _0
+ };
+}
+ "#,
+ )
+ }
+
+ mod assist {
+ use super::*;
+ use crate::tests::check_assist_by_label;
+
+ fn assist(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ destructure_tuple_binding_impl(acc, ctx, true)
+ }
+ fn in_place_assist(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ destructure_tuple_binding_impl(acc, ctx, false)
+ }
+
+ pub(crate) fn check_in_place_assist(ra_fixture_before: &str, ra_fixture_after: &str) {
+ check_assist_by_label(
+ in_place_assist,
+ ra_fixture_before,
+ ra_fixture_after,
+ // "Destructure tuple in place",
+ "Destructure tuple",
+ );
+ }
+
+ pub(crate) fn check_sub_pattern_assist(ra_fixture_before: &str, ra_fixture_after: &str) {
+ check_assist_by_label(
+ assist,
+ ra_fixture_before,
+ ra_fixture_after,
+ "Destructure tuple in sub-pattern",
+ );
+ }
+
+ pub(crate) fn check_both_assists(
+ ra_fixture_before: &str,
+ ra_fixture_after_in_place: &str,
+ ra_fixture_after_in_sub_pattern: &str,
+ ) {
+ check_in_place_assist(ra_fixture_before, ra_fixture_after_in_place);
+ check_sub_pattern_assist(ra_fixture_before, ra_fixture_after_in_sub_pattern);
+ }
+ }
+
+ /// Tests for destructure of tuple in sub-pattern:
+ /// `let $0t = (1,2);` -> `let t @ (_0, _1) = (1,2);`
+ mod sub_pattern {
+ use super::assist::*;
+ use super::*;
+ use crate::tests::check_assist_by_label;
+
+ #[test]
+ fn destructure_in_sub_pattern() {
+ check_sub_pattern_assist(
+ r#"
+#![feature(bindings_after_at)]
+
+fn main() {
+ let $0t = (1,2);
+}
+ "#,
+ r#"
+#![feature(bindings_after_at)]
+
+fn main() {
+ let t @ ($0_0, _1) = (1,2);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn trigger_both_destructure_tuple_assists() {
+ fn assist(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ destructure_tuple_binding_impl(acc, ctx, true)
+ }
+ let text = r#"
+fn main() {
+ let $0t = (1,2);
+}
+ "#;
+ check_assist_by_label(
+ assist,
+ text,
+ r#"
+fn main() {
+ let ($0_0, _1) = (1,2);
+}
+ "#,
+ "Destructure tuple in place",
+ );
+ check_assist_by_label(
+ assist,
+ text,
+ r#"
+fn main() {
+ let t @ ($0_0, _1) = (1,2);
+}
+ "#,
+ "Destructure tuple in sub-pattern",
+ );
+ }
+
+ #[test]
+ fn replace_indices() {
+ check_sub_pattern_assist(
+ r#"
+fn main() {
+ let $0t = (1,2);
+ let v1 = t.0;
+ let v2 = t.1;
+}
+ "#,
+ r#"
+fn main() {
+ let t @ ($0_0, _1) = (1,2);
+ let v1 = _0;
+ let v2 = _1;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn keep_function_call() {
+ cov_mark::check!(destructure_tuple_call_with_subpattern);
+ check_sub_pattern_assist(
+ r#"
+fn main() {
+ let $0t = (1,2);
+ let v = t.into();
+}
+ "#,
+ r#"
+fn main() {
+ let t @ ($0_0, _1) = (1,2);
+ let v = t.into();
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn keep_type() {
+ check_sub_pattern_assist(
+ r#"
+fn main() {
+ let $0t: (usize, i32) = (1,2);
+ let v = t.1;
+ let f = t.into();
+}
+ "#,
+ r#"
+fn main() {
+ let t @ ($0_0, _1): (usize, i32) = (1,2);
+ let v = _1;
+ let f = t.into();
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn in_function_args() {
+ check_sub_pattern_assist(
+ r#"
+fn f($0t: (usize, usize)) {
+ let v = t.0;
+ let f = t.into();
+}
+ "#,
+ r#"
+fn f(t @ ($0_0, _1): (usize, usize)) {
+ let v = _0;
+ let f = t.into();
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn with_ref() {
+ check_sub_pattern_assist(
+ r#"
+fn main() {
+ let ref $0t = (1,2);
+ let v = t.1;
+ let f = t.into();
+}
+ "#,
+ r#"
+fn main() {
+ let ref t @ (ref $0_0, ref _1) = (1,2);
+ let v = *_1;
+ let f = t.into();
+}
+ "#,
+ )
+ }
+ #[test]
+ fn with_mut() {
+ check_sub_pattern_assist(
+ r#"
+fn main() {
+ let mut $0t = (1,2);
+ let v = t.1;
+ let f = t.into();
+}
+ "#,
+ r#"
+fn main() {
+ let mut t @ (mut $0_0, mut _1) = (1,2);
+ let v = _1;
+ let f = t.into();
+}
+ "#,
+ )
+ }
+ #[test]
+ fn with_ref_mut() {
+ check_sub_pattern_assist(
+ r#"
+fn main() {
+ let ref mut $0t = (1,2);
+ let v = t.1;
+ let f = t.into();
+}
+ "#,
+ r#"
+fn main() {
+ let ref mut t @ (ref mut $0_0, ref mut _1) = (1,2);
+ let v = *_1;
+ let f = t.into();
+}
+ "#,
+ )
+ }
+ }
+
+ /// Tests for tuple usage in macro call:
+ /// `println!("{}", t.0)`
+ mod in_macro_call {
+ use super::assist::*;
+
+ #[test]
+ fn detect_macro_call() {
+ cov_mark::check!(destructure_tuple_macro_call);
+ check_in_place_assist(
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let $0t = (1,2);
+ m!(t.0);
+}
+ "#,
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let ($0_0, _1) = (1,2);
+ m!(/*t*/.0);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn tuple_usage() {
+ check_both_assists(
+ // leading `"foo"` to ensure `$e` doesn't start at position `0`
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let $0t = (1,2);
+ m!(t);
+}
+ "#,
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let ($0_0, _1) = (1,2);
+ m!(/*t*/);
+}
+ "#,
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let t @ ($0_0, _1) = (1,2);
+ m!(t);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn tuple_function_usage() {
+ check_both_assists(
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let $0t = (1,2);
+ m!(t.into());
+}
+ "#,
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let ($0_0, _1) = (1,2);
+ m!(/*t*/.into());
+}
+ "#,
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let t @ ($0_0, _1) = (1,2);
+ m!(t.into());
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn tuple_index_usage() {
+ check_both_assists(
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let $0t = (1,2);
+ m!(t.0);
+}
+ "#,
+ // FIXME: replace `t.0` with `_0` (cannot detect range of tuple index in macro call)
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let ($0_0, _1) = (1,2);
+ m!(/*t*/.0);
+}
+ "#,
+ // FIXME: replace `t.0` with `_0`
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let t @ ($0_0, _1) = (1,2);
+ m!(t.0);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn tuple_in_parentheses_index_usage() {
+ check_both_assists(
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let $0t = (1,2);
+ m!((t).0);
+}
+ "#,
+ // FIXME: replace `(t).0` with `_0`
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let ($0_0, _1) = (1,2);
+ m!((/*t*/).0);
+}
+ "#,
+ // FIXME: replace `(t).0` with `_0`
+ r#"
+macro_rules! m {
+ ($e:expr) => { "foo"; $e };
+}
+
+fn main() {
+ let t @ ($0_0, _1) = (1,2);
+ m!((t).0);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn empty_macro() {
+ check_in_place_assist(
+ r#"
+macro_rules! m {
+ () => { "foo" };
+ ($e:expr) => { $e; "foo" };
+}
+
+fn main() {
+ let $0t = (1,2);
+ m!(t);
+}
+ "#,
+ // FIXME: macro allows no arg -> is valid. But assist should result in invalid code
+ r#"
+macro_rules! m {
+ () => { "foo" };
+ ($e:expr) => { $e; "foo" };
+}
+
+fn main() {
+ let ($0_0, _1) = (1,2);
+ m!(/*t*/);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn tuple_index_in_macro() {
+ check_both_assists(
+ r#"
+macro_rules! m {
+ ($t:expr, $i:expr) => { $t.0 + $i };
+}
+
+fn main() {
+ let $0t = (1,2);
+ m!(t, t.0);
+}
+ "#,
+ // FIXME: replace `t.0` in macro call (not IN macro) with `_0`
+ r#"
+macro_rules! m {
+ ($t:expr, $i:expr) => { $t.0 + $i };
+}
+
+fn main() {
+ let ($0_0, _1) = (1,2);
+ m!(/*t*/, /*t*/.0);
+}
+ "#,
+ // FIXME: replace `t.0` in macro call with `_0`
+ r#"
+macro_rules! m {
+ ($t:expr, $i:expr) => { $t.0 + $i };
+}
+
+fn main() {
+ let t @ ($0_0, _1) = (1,2);
+ m!(t, t.0);
+}
+ "#,
+ )
+ }
+ }
+
+ mod refs {
+ use super::assist::*;
+
+ #[test]
+ fn no_ref() {
+ check_in_place_assist(
+ r#"
+fn main() {
+ let $0t = &(1,2);
+ let v: i32 = t.0;
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1) = &(1,2);
+ let v: i32 = *_0;
+}
+ "#,
+ )
+ }
+ #[test]
+ fn no_ref_with_parens() {
+ check_in_place_assist(
+ r#"
+fn main() {
+ let $0t = &(1,2);
+ let v: i32 = (t.0);
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1) = &(1,2);
+ let v: i32 = (*_0);
+}
+ "#,
+ )
+ }
+ #[test]
+ fn with_ref() {
+ check_in_place_assist(
+ r#"
+fn main() {
+ let $0t = &(1,2);
+ let v: &i32 = &t.0;
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1) = &(1,2);
+ let v: &i32 = _0;
+}
+ "#,
+ )
+ }
+ #[test]
+ fn with_ref_in_parens_ref() {
+ check_in_place_assist(
+ r#"
+fn main() {
+ let $0t = &(1,2);
+ let v: &i32 = &(t.0);
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1) = &(1,2);
+ let v: &i32 = _0;
+}
+ "#,
+ )
+ }
+ #[test]
+ fn with_ref_in_ref_parens() {
+ check_in_place_assist(
+ r#"
+fn main() {
+ let $0t = &(1,2);
+ let v: &i32 = (&t.0);
+}
+ "#,
+ r#"
+fn main() {
+ let ($0_0, _1) = &(1,2);
+ let v: &i32 = _0;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn deref_and_parentheses() {
+ // Operator/Expressions with higher precedence than deref (`*`):
+ // https://doc.rust-lang.org/reference/expressions.html#expression-precedence
+ // * Path
+ // * Method call
+ // * Field expression
+ // * Function calls, array indexing
+ // * `?`
+ check_in_place_assist(
+ r#"
+//- minicore: option
+fn f1(v: i32) {}
+fn f2(v: &i32) {}
+trait T {
+ fn do_stuff(self) {}
+}
+impl T for i32 {
+ fn do_stuff(self) {}
+}
+impl T for &i32 {
+ fn do_stuff(self) {}
+}
+struct S4 {
+ value: i32,
+}
+
+fn foo() -> Option<()> {
+ let $0t = &(0, (1,"1"), Some(2), [3;3], S4 { value: 4 }, &5);
+ let v: i32 = t.0; // deref, no parens
+ let v: &i32 = &t.0; // no deref, no parens, remove `&`
+ f1(t.0); // deref, no parens
+ f2(&t.0); // `&*` -> cancel out -> no deref, no parens
+ // https://github.com/rust-lang/rust-analyzer/issues/1109#issuecomment-658868639
+ // let v: i32 = t.1.0; // no deref, no parens
+ let v: i32 = t.4.value; // no deref, no parens
+ t.0.do_stuff(); // deref, parens
+ let v: i32 = t.2?; // deref, parens
+ let v: i32 = t.3[0]; // no deref, no parens
+ (t.0).do_stuff(); // deref, no additional parens
+ let v: i32 = *t.5; // deref (-> 2), no parens
+
+ None
+}
+ "#,
+ r#"
+fn f1(v: i32) {}
+fn f2(v: &i32) {}
+trait T {
+ fn do_stuff(self) {}
+}
+impl T for i32 {
+ fn do_stuff(self) {}
+}
+impl T for &i32 {
+ fn do_stuff(self) {}
+}
+struct S4 {
+ value: i32,
+}
+
+fn foo() -> Option<()> {
+ let ($0_0, _1, _2, _3, _4, _5) = &(0, (1,"1"), Some(2), [3;3], S4 { value: 4 }, &5);
+ let v: i32 = *_0; // deref, no parens
+ let v: &i32 = _0; // no deref, no parens, remove `&`
+ f1(*_0); // deref, no parens
+ f2(_0); // `&*` -> cancel out -> no deref, no parens
+ // https://github.com/rust-lang/rust-analyzer/issues/1109#issuecomment-658868639
+ // let v: i32 = t.1.0; // no deref, no parens
+ let v: i32 = _4.value; // no deref, no parens
+ (*_0).do_stuff(); // deref, parens
+ let v: i32 = (*_2)?; // deref, parens
+ let v: i32 = _3[0]; // no deref, no parens
+ (*_0).do_stuff(); // deref, no additional parens
+ let v: i32 = **_5; // deref (-> 2), no parens
+
+ None
+}
+ "#,
+ )
+ }
+
+ // ---------
+ // auto-ref/deref
+
+ #[test]
+ fn self_auto_ref_doesnt_need_deref() {
+ check_in_place_assist(
+ r#"
+#[derive(Clone, Copy)]
+struct S;
+impl S {
+ fn f(&self) {}
+}
+
+fn main() {
+ let $0t = &(S,2);
+ let s = t.0.f();
+}
+ "#,
+ r#"
+#[derive(Clone, Copy)]
+struct S;
+impl S {
+ fn f(&self) {}
+}
+
+fn main() {
+ let ($0_0, _1) = &(S,2);
+ let s = _0.f();
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn self_owned_requires_deref() {
+ check_in_place_assist(
+ r#"
+#[derive(Clone, Copy)]
+struct S;
+impl S {
+ fn f(self) {}
+}
+
+fn main() {
+ let $0t = &(S,2);
+ let s = t.0.f();
+}
+ "#,
+ r#"
+#[derive(Clone, Copy)]
+struct S;
+impl S {
+ fn f(self) {}
+}
+
+fn main() {
+ let ($0_0, _1) = &(S,2);
+ let s = (*_0).f();
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn self_auto_ref_in_trait_call_doesnt_require_deref() {
+ check_in_place_assist(
+ r#"
+trait T {
+ fn f(self);
+}
+#[derive(Clone, Copy)]
+struct S;
+impl T for &S {
+ fn f(self) {}
+}
+
+fn main() {
+ let $0t = &(S,2);
+ let s = t.0.f();
+}
+ "#,
+ // FIXME: doesn't need deref * parens. But `ctx.sema.resolve_method_call` doesn't resolve trait implementations
+ r#"
+trait T {
+ fn f(self);
+}
+#[derive(Clone, Copy)]
+struct S;
+impl T for &S {
+ fn f(self) {}
+}
+
+fn main() {
+ let ($0_0, _1) = &(S,2);
+ let s = (*_0).f();
+}
+ "#,
+ )
+ }
+ #[test]
+ fn no_auto_deref_because_of_owned_and_ref_trait_impl() {
+ check_in_place_assist(
+ r#"
+trait T {
+ fn f(self);
+}
+#[derive(Clone, Copy)]
+struct S;
+impl T for S {
+ fn f(self) {}
+}
+impl T for &S {
+ fn f(self) {}
+}
+
+fn main() {
+ let $0t = &(S,2);
+ let s = t.0.f();
+}
+ "#,
+ r#"
+trait T {
+ fn f(self);
+}
+#[derive(Clone, Copy)]
+struct S;
+impl T for S {
+ fn f(self) {}
+}
+impl T for &S {
+ fn f(self) {}
+}
+
+fn main() {
+ let ($0_0, _1) = &(S,2);
+ let s = (*_0).f();
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn no_outer_parens_when_ref_deref() {
+ check_in_place_assist(
+ r#"
+#[derive(Clone, Copy)]
+struct S;
+impl S {
+ fn do_stuff(&self) -> i32 { 42 }
+}
+fn main() {
+ let $0t = &(S,&S);
+ let v = (&t.0).do_stuff();
+}
+ "#,
+ r#"
+#[derive(Clone, Copy)]
+struct S;
+impl S {
+ fn do_stuff(&self) -> i32 { 42 }
+}
+fn main() {
+ let ($0_0, _1) = &(S,&S);
+ let v = _0.do_stuff();
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn auto_ref_deref() {
+ check_in_place_assist(
+ r#"
+#[derive(Clone, Copy)]
+struct S;
+impl S {
+ fn do_stuff(&self) -> i32 { 42 }
+}
+fn main() {
+ let $0t = &(S,&S);
+ let v = (&t.0).do_stuff(); // no deref, remove parens
+ // `t.0` gets auto-refed -> no deref needed -> no parens
+ let v = t.0.do_stuff(); // no deref, no parens
+ let v = &t.0.do_stuff(); // `&` is for result -> no deref, no parens
+ // deref: `_1` is `&&S`, but method called is on `&S` -> there might be a method accepting `&&S`
+ let v = t.1.do_stuff(); // deref, parens
+}
+ "#,
+ r#"
+#[derive(Clone, Copy)]
+struct S;
+impl S {
+ fn do_stuff(&self) -> i32 { 42 }
+}
+fn main() {
+ let ($0_0, _1) = &(S,&S);
+ let v = _0.do_stuff(); // no deref, remove parens
+ // `t.0` gets auto-refed -> no deref needed -> no parens
+ let v = _0.do_stuff(); // no deref, no parens
+ let v = &_0.do_stuff(); // `&` is for result -> no deref, no parens
+ // deref: `_1` is `&&S`, but method called is on `&S` -> there might be a method accepting `&&S`
+ let v = (*_1).do_stuff(); // deref, parens
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn mutable() {
+ check_in_place_assist(
+ r#"
+fn f_owned(v: i32) {}
+fn f(v: &i32) {}
+fn f_mut(v: &mut i32) { *v = 42; }
+
+fn main() {
+ let $0t = &mut (1,2);
+ let v = t.0;
+ t.0 = 42;
+ f_owned(t.0);
+ f(&t.0);
+ f_mut(&mut t.0);
+}
+ "#,
+ r#"
+fn f_owned(v: i32) {}
+fn f(v: &i32) {}
+fn f_mut(v: &mut i32) { *v = 42; }
+
+fn main() {
+ let ($0_0, _1) = &mut (1,2);
+ let v = *_0;
+ *_0 = 42;
+ f_owned(*_0);
+ f(_0);
+ f_mut(_0);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn with_ref_keyword() {
+ check_in_place_assist(
+ r#"
+fn f_owned(v: i32) {}
+fn f(v: &i32) {}
+
+fn main() {
+ let ref $0t = (1,2);
+ let v = t.0;
+ f_owned(t.0);
+ f(&t.0);
+}
+ "#,
+ r#"
+fn f_owned(v: i32) {}
+fn f(v: &i32) {}
+
+fn main() {
+ let (ref $0_0, ref _1) = (1,2);
+ let v = *_0;
+ f_owned(*_0);
+ f(_0);
+}
+ "#,
+ )
+ }
+ #[test]
+ fn with_ref_mut_keywords() {
+ check_in_place_assist(
+ r#"
+fn f_owned(v: i32) {}
+fn f(v: &i32) {}
+fn f_mut(v: &mut i32) { *v = 42; }
+
+fn main() {
+ let ref mut $0t = (1,2);
+ let v = t.0;
+ t.0 = 42;
+ f_owned(t.0);
+ f(&t.0);
+ f_mut(&mut t.0);
+}
+ "#,
+ r#"
+fn f_owned(v: i32) {}
+fn f(v: &i32) {}
+fn f_mut(v: &mut i32) { *v = 42; }
+
+fn main() {
+ let (ref mut $0_0, ref mut _1) = (1,2);
+ let v = *_0;
+ *_0 = 42;
+ f_owned(*_0);
+ f(_0);
+ f_mut(_0);
+}
+ "#,
+ )
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs
new file mode 100644
index 000000000..87f5018fb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs
@@ -0,0 +1,900 @@
+use either::Either;
+use hir::{AssocItem, HasVisibility, Module, ModuleDef, Name, PathResolution, ScopeDef};
+use ide_db::{
+ defs::{Definition, NameRefClass},
+ search::SearchScope,
+};
+use stdx::never;
+use syntax::{
+ ast::{self, make},
+ ted, AstNode, Direction, SyntaxNode, SyntaxToken, T,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId, AssistKind,
+};
+
+// Assist: expand_glob_import
+//
+// Expands glob imports.
+//
+// ```
+// mod foo {
+// pub struct Bar;
+// pub struct Baz;
+// }
+//
+// use foo::*$0;
+//
+// fn qux(bar: Bar, baz: Baz) {}
+// ```
+// ->
+// ```
+// mod foo {
+// pub struct Bar;
+// pub struct Baz;
+// }
+//
+// use foo::{Bar, Baz};
+//
+// fn qux(bar: Bar, baz: Baz) {}
+// ```
+pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let star = ctx.find_token_syntax_at_offset(T![*])?;
+ let use_tree = star.parent().and_then(ast::UseTree::cast)?;
+ let (parent, mod_path) = find_parent_and_path(&star)?;
+ let target_module = match ctx.sema.resolve_path(&mod_path)? {
+ PathResolution::Def(ModuleDef::Module(it)) => it,
+ _ => return None,
+ };
+
+ let current_scope = ctx.sema.scope(&star.parent()?)?;
+ let current_module = current_scope.module();
+
+ let refs_in_target = find_refs_in_mod(ctx, target_module, current_module)?;
+ let imported_defs = find_imported_defs(ctx, star)?;
+
+ let target = parent.either(|n| n.syntax().clone(), |n| n.syntax().clone());
+ acc.add(
+ AssistId("expand_glob_import", AssistKind::RefactorRewrite),
+ "Expand glob import",
+ target.text_range(),
+ |builder| {
+ let use_tree = builder.make_mut(use_tree);
+
+ let names_to_import = find_names_to_import(ctx, refs_in_target, imported_defs);
+ let expanded = make::use_tree_list(names_to_import.iter().map(|n| {
+ let path = make::ext::ident_path(&n.to_string());
+ make::use_tree(path, None, None, false)
+ }))
+ .clone_for_update();
+
+ match use_tree.star_token() {
+ Some(star) => {
+ let needs_braces = use_tree.path().is_some() && names_to_import.len() != 1;
+ if needs_braces {
+ ted::replace(star, expanded.syntax())
+ } else {
+ let without_braces = expanded
+ .syntax()
+ .children_with_tokens()
+ .filter(|child| !matches!(child.kind(), T!['{'] | T!['}']))
+ .collect();
+ ted::replace_with_many(star, without_braces)
+ }
+ }
+ None => never!(),
+ }
+ },
+ )
+}
+
+fn find_parent_and_path(
+ star: &SyntaxToken,
+) -> Option<(Either<ast::UseTree, ast::UseTreeList>, ast::Path)> {
+ return star.parent_ancestors().find_map(|n| {
+ find_use_tree_list(n.clone())
+ .map(|(u, p)| (Either::Right(u), p))
+ .or_else(|| find_use_tree(n).map(|(u, p)| (Either::Left(u), p)))
+ });
+
+ fn find_use_tree_list(n: SyntaxNode) -> Option<(ast::UseTreeList, ast::Path)> {
+ let use_tree_list = ast::UseTreeList::cast(n)?;
+ let path = use_tree_list.parent_use_tree().path()?;
+ Some((use_tree_list, path))
+ }
+
+ fn find_use_tree(n: SyntaxNode) -> Option<(ast::UseTree, ast::Path)> {
+ let use_tree = ast::UseTree::cast(n)?;
+ let path = use_tree.path()?;
+ Some((use_tree, path))
+ }
+}
+
+fn def_is_referenced_in(def: Definition, ctx: &AssistContext<'_>) -> bool {
+ let search_scope = SearchScope::single_file(ctx.file_id());
+ def.usages(&ctx.sema).in_scope(search_scope).at_least_one()
+}
+
+#[derive(Debug, Clone)]
+struct Ref {
+ // could be alias
+ visible_name: Name,
+ def: Definition,
+}
+
+impl Ref {
+ fn from_scope_def(name: Name, scope_def: ScopeDef) -> Option<Self> {
+ match scope_def {
+ ScopeDef::ModuleDef(def) => {
+ Some(Ref { visible_name: name, def: Definition::from(def) })
+ }
+ _ => None,
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+struct Refs(Vec<Ref>);
+
+impl Refs {
+ fn used_refs(&self, ctx: &AssistContext<'_>) -> Refs {
+ Refs(
+ self.0
+ .clone()
+ .into_iter()
+ .filter(|r| {
+ if let Definition::Trait(tr) = r.def {
+ if tr.items(ctx.db()).into_iter().any(|ai| {
+ if let AssocItem::Function(f) = ai {
+ def_is_referenced_in(Definition::Function(f), ctx)
+ } else {
+ false
+ }
+ }) {
+ return true;
+ }
+ }
+
+ def_is_referenced_in(r.def, ctx)
+ })
+ .collect(),
+ )
+ }
+
+ fn filter_out_by_defs(&self, defs: Vec<Definition>) -> Refs {
+ Refs(self.0.clone().into_iter().filter(|r| !defs.contains(&r.def)).collect())
+ }
+}
+
+fn find_refs_in_mod(ctx: &AssistContext<'_>, module: Module, visible_from: Module) -> Option<Refs> {
+ if !is_mod_visible_from(ctx, module, visible_from) {
+ return None;
+ }
+
+ let module_scope = module.scope(ctx.db(), Some(visible_from));
+ let refs = module_scope.into_iter().filter_map(|(n, d)| Ref::from_scope_def(n, d)).collect();
+ Some(Refs(refs))
+}
+
+fn is_mod_visible_from(ctx: &AssistContext<'_>, module: Module, from: Module) -> bool {
+ match module.parent(ctx.db()) {
+ Some(parent) => {
+ module.visibility(ctx.db()).is_visible_from(ctx.db(), from.into())
+ && is_mod_visible_from(ctx, parent, from)
+ }
+ None => true,
+ }
+}
+
+// looks for name refs in parent use block's siblings
+//
+// mod bar {
+// mod qux {
+// struct Qux;
+// }
+//
+// pub use qux::Qux;
+// }
+//
+// ↓ ---------------
+// use foo::*$0;
+// use baz::Baz;
+// ↑ ---------------
+fn find_imported_defs(ctx: &AssistContext<'_>, star: SyntaxToken) -> Option<Vec<Definition>> {
+ let parent_use_item_syntax = star.parent_ancestors().find_map(|n| {
+ if ast::Use::can_cast(n.kind()) {
+ Some(n)
+ } else {
+ None
+ }
+ })?;
+
+ Some(
+ [Direction::Prev, Direction::Next]
+ .into_iter()
+ .flat_map(|dir| {
+ parent_use_item_syntax
+ .siblings(dir.to_owned())
+ .filter(|n| ast::Use::can_cast(n.kind()))
+ })
+ .flat_map(|n| n.descendants().filter_map(ast::NameRef::cast))
+ .filter_map(|r| match NameRefClass::classify(&ctx.sema, &r)? {
+ NameRefClass::Definition(
+ def @ (Definition::Macro(_)
+ | Definition::Module(_)
+ | Definition::Function(_)
+ | Definition::Adt(_)
+ | Definition::Variant(_)
+ | Definition::Const(_)
+ | Definition::Static(_)
+ | Definition::Trait(_)
+ | Definition::TypeAlias(_)),
+ ) => Some(def),
+ _ => None,
+ })
+ .collect(),
+ )
+}
+
+fn find_names_to_import(
+ ctx: &AssistContext<'_>,
+ refs_in_target: Refs,
+ imported_defs: Vec<Definition>,
+) -> Vec<Name> {
+ let used_refs = refs_in_target.used_refs(ctx).filter_out_by_defs(imported_defs);
+ used_refs.0.iter().map(|r| r.visible_name.clone()).collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn expanding_glob_import() {
+ check_assist(
+ expand_glob_import,
+ r"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+}
+
+use foo::*$0;
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+}
+",
+ r"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+}
+
+use foo::{Bar, Baz, f};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+}
+",
+ )
+ }
+
+ #[test]
+ fn expanding_glob_import_unused() {
+ check_assist(
+ expand_glob_import,
+ r"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+}
+
+use foo::*$0;
+
+fn qux() {}
+",
+ r"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+}
+
+use foo::{};
+
+fn qux() {}
+",
+ )
+ }
+
+ #[test]
+ fn expanding_glob_import_with_existing_explicit_names() {
+ check_assist(
+ expand_glob_import,
+ r"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+}
+
+use foo::{*$0, f};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+}
+",
+ r"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+}
+
+use foo::{Bar, Baz, f};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+}
+",
+ )
+ }
+
+ #[test]
+ fn expanding_glob_import_with_existing_uses_in_same_module() {
+ check_assist(
+ expand_glob_import,
+ r"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+}
+
+use foo::Bar;
+use foo::{*$0, f};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+}
+",
+ r"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+}
+
+use foo::Bar;
+use foo::{Baz, f};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+}
+",
+ )
+ }
+
+ #[test]
+ fn expanding_nested_glob_import() {
+ check_assist(
+ expand_glob_import,
+ r"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+ }
+
+ pub mod baz {
+ pub fn g() {}
+ }
+}
+
+use foo::{bar::{*$0, f}, baz::*};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+ g();
+}
+",
+ r"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+ }
+
+ pub mod baz {
+ pub fn g() {}
+ }
+}
+
+use foo::{bar::{Bar, Baz, f}, baz::*};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+ g();
+}
+",
+ );
+
+ check_assist(
+ expand_glob_import,
+ r"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+ }
+
+ pub mod baz {
+ pub fn g() {}
+ }
+}
+
+use foo::{bar::{Bar, Baz, f}, baz::*$0};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+ g();
+}
+",
+ r"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+ }
+
+ pub mod baz {
+ pub fn g() {}
+ }
+}
+
+use foo::{bar::{Bar, Baz, f}, baz::g};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+ g();
+}
+",
+ );
+
+ check_assist(
+ expand_glob_import,
+ r"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+ }
+
+ pub mod baz {
+ pub fn g() {}
+
+ pub mod qux {
+ pub fn h() {}
+ pub fn m() {}
+
+ pub mod q {
+ pub fn j() {}
+ }
+ }
+ }
+}
+
+use foo::{
+ bar::{*, f},
+ baz::{g, qux::*$0}
+};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+ g();
+ h();
+ q::j();
+}
+",
+ r"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+ }
+
+ pub mod baz {
+ pub fn g() {}
+
+ pub mod qux {
+ pub fn h() {}
+ pub fn m() {}
+
+ pub mod q {
+ pub fn j() {}
+ }
+ }
+ }
+}
+
+use foo::{
+ bar::{*, f},
+ baz::{g, qux::{h, q}}
+};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+ g();
+ h();
+ q::j();
+}
+",
+ );
+
+ check_assist(
+ expand_glob_import,
+ r"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+ }
+
+ pub mod baz {
+ pub fn g() {}
+
+ pub mod qux {
+ pub fn h() {}
+ pub fn m() {}
+
+ pub mod q {
+ pub fn j() {}
+ }
+ }
+ }
+}
+
+use foo::{
+ bar::{*, f},
+ baz::{g, qux::{h, q::*$0}}
+};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+ g();
+ h();
+ j();
+}
+",
+ r"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+ }
+
+ pub mod baz {
+ pub fn g() {}
+
+ pub mod qux {
+ pub fn h() {}
+ pub fn m() {}
+
+ pub mod q {
+ pub fn j() {}
+ }
+ }
+ }
+}
+
+use foo::{
+ bar::{*, f},
+ baz::{g, qux::{h, q::j}}
+};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+ g();
+ h();
+ j();
+}
+",
+ );
+
+ check_assist(
+ expand_glob_import,
+ r"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+ }
+
+ pub mod baz {
+ pub fn g() {}
+
+ pub mod qux {
+ pub fn h() {}
+ pub fn m() {}
+
+ pub mod q {
+ pub fn j() {}
+ }
+ }
+ }
+}
+
+use foo::{
+ bar::{*, f},
+ baz::{g, qux::{q::j, *$0}}
+};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+ g();
+ h();
+ j();
+}
+",
+ r"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+
+ pub fn f() {}
+ }
+
+ pub mod baz {
+ pub fn g() {}
+
+ pub mod qux {
+ pub fn h() {}
+ pub fn m() {}
+
+ pub mod q {
+ pub fn j() {}
+ }
+ }
+ }
+}
+
+use foo::{
+ bar::{*, f},
+ baz::{g, qux::{q::j, h}}
+};
+
+fn qux(bar: Bar, baz: Baz) {
+ f();
+ g();
+ h();
+ j();
+}
+",
+ );
+ }
+
+ #[test]
+ fn expanding_glob_import_with_macro_defs() {
+ check_assist(
+ expand_glob_import,
+ r#"
+//- /lib.rs crate:foo
+#[macro_export]
+macro_rules! bar {
+ () => ()
+}
+
+pub fn baz() {}
+
+//- /main.rs crate:main deps:foo
+use foo::*$0;
+
+fn main() {
+ bar!();
+ baz();
+}
+"#,
+ r#"
+use foo::{bar, baz};
+
+fn main() {
+ bar!();
+ baz();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn expanding_glob_import_with_trait_method_uses() {
+ check_assist(
+ expand_glob_import,
+ r"
+//- /lib.rs crate:foo
+pub trait Tr {
+ fn method(&self) {}
+}
+impl Tr for () {}
+
+//- /main.rs crate:main deps:foo
+use foo::*$0;
+
+fn main() {
+ ().method();
+}
+",
+ r"
+use foo::Tr;
+
+fn main() {
+ ().method();
+}
+",
+ );
+
+ check_assist(
+ expand_glob_import,
+ r"
+//- /lib.rs crate:foo
+pub trait Tr {
+ fn method(&self) {}
+}
+impl Tr for () {}
+
+pub trait Tr2 {
+ fn method2(&self) {}
+}
+impl Tr2 for () {}
+
+//- /main.rs crate:main deps:foo
+use foo::*$0;
+
+fn main() {
+ ().method();
+}
+",
+ r"
+use foo::Tr;
+
+fn main() {
+ ().method();
+}
+",
+ );
+ }
+
+ #[test]
+ fn expanding_is_not_applicable_if_target_module_is_not_accessible_from_current_scope() {
+ check_assist_not_applicable(
+ expand_glob_import,
+ r"
+mod foo {
+ mod bar {
+ pub struct Bar;
+ }
+}
+
+use foo::bar::*$0;
+
+fn baz(bar: Bar) {}
+",
+ );
+
+ check_assist_not_applicable(
+ expand_glob_import,
+ r"
+mod foo {
+ mod bar {
+ pub mod baz {
+ pub struct Baz;
+ }
+ }
+}
+
+use foo::bar::baz::*$0;
+
+fn qux(baz: Baz) {}
+",
+ );
+ }
+
+ #[test]
+ fn expanding_is_not_applicable_if_cursor_is_not_in_star_token() {
+ check_assist_not_applicable(
+ expand_glob_import,
+ r"
+ mod foo {
+ pub struct Bar;
+ pub struct Baz;
+ pub struct Qux;
+ }
+
+ use foo::Bar$0;
+
+ fn qux(bar: Bar, baz: Baz) {}
+ ",
+ )
+ }
+
+ #[test]
+ fn expanding_glob_import_single_nested_glob_only() {
+ check_assist(
+ expand_glob_import,
+ r"
+mod foo {
+ pub struct Bar;
+}
+
+use foo::{*$0};
+
+struct Baz {
+ bar: Bar
+}
+",
+ r"
+mod foo {
+ pub struct Bar;
+}
+
+use foo::{Bar};
+
+struct Baz {
+ bar: Bar
+}
+",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
new file mode 100644
index 000000000..52a55ead3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
@@ -0,0 +1,5333 @@
+use std::iter;
+
+use ast::make;
+use either::Either;
+use hir::{
+ HasSource, HirDisplay, InFile, Local, ModuleDef, PathResolution, Semantics, TypeInfo, TypeParam,
+};
+use ide_db::{
+ defs::{Definition, NameRefClass},
+ famous_defs::FamousDefs,
+ helpers::mod_path_to_ast,
+ imports::insert_use::{insert_use, ImportScope},
+ search::{FileReference, ReferenceCategory, SearchScope},
+ syntax_helpers::node_ext::{preorder_expr, walk_expr, walk_pat, walk_patterns_in_expr},
+ FxIndexSet, RootDatabase,
+};
+use itertools::Itertools;
+use stdx::format_to;
+use syntax::{
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ AstNode, HasGenericParams,
+ },
+ match_ast, ted, SyntaxElement,
+ SyntaxKind::{self, COMMENT},
+ SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, WalkEvent, T,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists, TreeMutator},
+ utils::generate_impl_text,
+ AssistId,
+};
+
+// Assist: extract_function
+//
+// Extracts selected statements and comments into new function.
+//
+// ```
+// fn main() {
+// let n = 1;
+// $0let m = n + 2;
+// // calculate
+// let k = m + n;$0
+// let g = 3;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let n = 1;
+// fun_name(n);
+// let g = 3;
+// }
+//
+// fn $0fun_name(n: i32) {
+// let m = n + 2;
+// // calculate
+// let k = m + n;
+// }
+// ```
+pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let range = ctx.selection_trimmed();
+ if range.is_empty() {
+ return None;
+ }
+
+ let node = ctx.covering_element();
+ if node.kind() == COMMENT {
+ cov_mark::hit!(extract_function_in_comment_is_not_applicable);
+ return None;
+ }
+
+ let node = match node {
+ syntax::NodeOrToken::Node(n) => n,
+ syntax::NodeOrToken::Token(t) => t.parent()?,
+ };
+
+ let body = extraction_target(&node, range)?;
+ let container_info = body.analyze_container(&ctx.sema)?;
+
+ let (locals_used, self_param) = body.analyze(&ctx.sema);
+
+ let anchor = if self_param.is_some() { Anchor::Method } else { Anchor::Freestanding };
+ let insert_after = node_to_insert_after(&body, anchor)?;
+ let semantics_scope = ctx.sema.scope(&insert_after)?;
+ let module = semantics_scope.module();
+
+ let ret_ty = body.return_ty(ctx)?;
+ let control_flow = body.external_control_flow(ctx, &container_info)?;
+ let ret_values = body.ret_values(ctx, node.parent().as_ref().unwrap_or(&node));
+
+ let target_range = body.text_range();
+
+ let scope = ImportScope::find_insert_use_container(&node, &ctx.sema)?;
+
+ acc.add(
+ AssistId("extract_function", crate::AssistKind::RefactorExtract),
+ "Extract into function",
+ target_range,
+ move |builder| {
+ let outliving_locals: Vec<_> = ret_values.collect();
+ if stdx::never!(!outliving_locals.is_empty() && !ret_ty.is_unit()) {
+ // We should not have variables that outlive body if we have expression block
+ return;
+ }
+
+ let params =
+ body.extracted_function_params(ctx, &container_info, locals_used.iter().copied());
+
+ let extracted_from_trait_impl = body.extracted_from_trait_impl();
+
+ let name = make_function_name(&semantics_scope);
+
+ let fun = Function {
+ name,
+ self_param,
+ params,
+ control_flow,
+ ret_ty,
+ body,
+ outliving_locals,
+ mods: container_info,
+ };
+
+ let new_indent = IndentLevel::from_node(&insert_after);
+ let old_indent = fun.body.indent_level();
+
+ builder.replace(target_range, make_call(ctx, &fun, old_indent));
+
+ let fn_def = match fun.self_param_adt(ctx) {
+ Some(adt) if extracted_from_trait_impl => {
+ let fn_def = format_function(ctx, module, &fun, old_indent, new_indent + 1);
+ generate_impl_text(&adt, &fn_def).replace("{\n\n", "{")
+ }
+ _ => format_function(ctx, module, &fun, old_indent, new_indent),
+ };
+
+ if fn_def.contains("ControlFlow") {
+ let scope = match scope {
+ ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
+ ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
+ ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
+ };
+
+ let control_flow_enum =
+ FamousDefs(&ctx.sema, module.krate()).core_ops_ControlFlow();
+
+ if let Some(control_flow_enum) = control_flow_enum {
+ let mod_path = module.find_use_path_prefixed(
+ ctx.sema.db,
+ ModuleDef::from(control_flow_enum),
+ ctx.config.insert_use.prefix_kind,
+ );
+
+ if let Some(mod_path) = mod_path {
+ insert_use(&scope, mod_path_to_ast(&mod_path), &ctx.config.insert_use);
+ }
+ }
+ }
+
+ let insert_offset = insert_after.text_range().end();
+
+ match ctx.config.snippet_cap {
+ Some(cap) => builder.insert_snippet(cap, insert_offset, fn_def),
+ None => builder.insert(insert_offset, fn_def),
+ };
+ },
+ )
+}
+
+fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef {
+ let mut names_in_scope = vec![];
+ semantics_scope.process_all_names(&mut |name, _| names_in_scope.push(name.to_string()));
+
+ let default_name = "fun_name";
+
+ let mut name = default_name.to_string();
+ let mut counter = 0;
+ while names_in_scope.contains(&name) {
+ counter += 1;
+ name = format!("{}{}", &default_name, counter)
+ }
+ make::name_ref(&name)
+}
+
+/// Try to guess what user wants to extract
+///
+/// We have basically have two cases:
+/// * We want whole node, like `loop {}`, `2 + 2`, `{ let n = 1; }` exprs.
+/// Then we can use `ast::Expr`
+/// * We want a few statements for a block. E.g.
+/// ```rust,no_run
+/// fn foo() -> i32 {
+/// let m = 1;
+/// $0
+/// let n = 2;
+/// let k = 3;
+/// k + n
+/// $0
+/// }
+/// ```
+///
+fn extraction_target(node: &SyntaxNode, selection_range: TextRange) -> Option<FunctionBody> {
+ if let Some(stmt) = ast::Stmt::cast(node.clone()) {
+ return match stmt {
+ ast::Stmt::Item(_) => None,
+ ast::Stmt::ExprStmt(_) | ast::Stmt::LetStmt(_) => Some(FunctionBody::from_range(
+ node.parent().and_then(ast::StmtList::cast)?,
+ node.text_range(),
+ )),
+ };
+ }
+
+ // Covering element returned the parent block of one or multiple statements that have been selected
+ if let Some(stmt_list) = ast::StmtList::cast(node.clone()) {
+ if let Some(block_expr) = stmt_list.syntax().parent().and_then(ast::BlockExpr::cast) {
+ if block_expr.syntax().text_range() == selection_range {
+ return FunctionBody::from_expr(block_expr.into());
+ }
+ }
+
+ // Extract the full statements.
+ return Some(FunctionBody::from_range(stmt_list, selection_range));
+ }
+
+ let expr = ast::Expr::cast(node.clone())?;
+ // A node got selected fully
+ if node.text_range() == selection_range {
+ return FunctionBody::from_expr(expr);
+ }
+
+ node.ancestors().find_map(ast::Expr::cast).and_then(FunctionBody::from_expr)
+}
+
+#[derive(Debug)]
+struct Function {
+ name: ast::NameRef,
+ self_param: Option<ast::SelfParam>,
+ params: Vec<Param>,
+ control_flow: ControlFlow,
+ ret_ty: RetType,
+ body: FunctionBody,
+ outliving_locals: Vec<OutlivedLocal>,
+ mods: ContainerInfo,
+}
+
+#[derive(Debug)]
+struct Param {
+ var: Local,
+ ty: hir::Type,
+ move_local: bool,
+ requires_mut: bool,
+ is_copy: bool,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum ParamKind {
+ Value,
+ MutValue,
+ SharedRef,
+ MutRef,
+}
+
+#[derive(Debug, Eq, PartialEq)]
+enum FunType {
+ Unit,
+ Single(hir::Type),
+ Tuple(Vec<hir::Type>),
+}
+
+/// Where to put extracted function definition
+#[derive(Debug)]
+enum Anchor {
+ /// Extract free function and put right after current top-level function
+ Freestanding,
+ /// Extract method and put right after current function in the impl-block
+ Method,
+}
+
+// FIXME: ControlFlow and ContainerInfo both track some function modifiers, feels like these two should
+// probably be merged somehow.
+#[derive(Debug)]
+struct ControlFlow {
+ kind: Option<FlowKind>,
+ is_async: bool,
+ is_unsafe: bool,
+}
+
+/// The thing whose expression we are extracting from. Can be a function, const, static, const arg, ...
+#[derive(Clone, Debug)]
+struct ContainerInfo {
+ is_const: bool,
+ is_in_tail: bool,
+ parent_loop: Option<SyntaxNode>,
+ /// The function's return type, const's type etc.
+ ret_type: Option<hir::Type>,
+ generic_param_lists: Vec<ast::GenericParamList>,
+ where_clauses: Vec<ast::WhereClause>,
+}
+
+/// Control flow that is exported from extracted function
+///
+/// E.g.:
+/// ```rust,no_run
+/// loop {
+/// $0
+/// if 42 == 42 {
+/// break;
+/// }
+/// $0
+/// }
+/// ```
+#[derive(Debug, Clone)]
+enum FlowKind {
+ /// Return with value (`return $expr;`)
+ Return(Option<ast::Expr>),
+ Try {
+ kind: TryKind,
+ },
+ /// Break with label and value (`break 'label $expr;`)
+ Break(Option<ast::Lifetime>, Option<ast::Expr>),
+ /// Continue with label (`continue 'label;`)
+ Continue(Option<ast::Lifetime>),
+}
+
+#[derive(Debug, Clone)]
+enum TryKind {
+ Option,
+ Result { ty: hir::Type },
+}
+
+#[derive(Debug)]
+enum RetType {
+ Expr(hir::Type),
+ Stmt,
+}
+
+impl RetType {
+ fn is_unit(&self) -> bool {
+ match self {
+ RetType::Expr(ty) => ty.is_unit(),
+ RetType::Stmt => true,
+ }
+ }
+}
+
+/// Semantically same as `ast::Expr`, but preserves identity when using only part of the Block
+/// This is the future function body, the part that is being extracted.
+#[derive(Debug)]
+enum FunctionBody {
+ Expr(ast::Expr),
+ Span { parent: ast::StmtList, text_range: TextRange },
+}
+
+#[derive(Debug)]
+struct OutlivedLocal {
+ local: Local,
+ mut_usage_outside_body: bool,
+}
+
+/// Container of local variable usages
+///
+/// Semanticall same as `UsageSearchResult`, but provides more convenient interface
+struct LocalUsages(ide_db::search::UsageSearchResult);
+
+impl LocalUsages {
+ fn find_local_usages(ctx: &AssistContext<'_>, var: Local) -> Self {
+ Self(
+ Definition::Local(var)
+ .usages(&ctx.sema)
+ .in_scope(SearchScope::single_file(ctx.file_id()))
+ .all(),
+ )
+ }
+
+ fn iter(&self) -> impl Iterator<Item = &FileReference> + '_ {
+ self.0.iter().flat_map(|(_, rs)| rs)
+ }
+}
+
+impl Function {
+ fn return_type(&self, ctx: &AssistContext<'_>) -> FunType {
+ match &self.ret_ty {
+ RetType::Expr(ty) if ty.is_unit() => FunType::Unit,
+ RetType::Expr(ty) => FunType::Single(ty.clone()),
+ RetType::Stmt => match self.outliving_locals.as_slice() {
+ [] => FunType::Unit,
+ [var] => FunType::Single(var.local.ty(ctx.db())),
+ vars => {
+ let types = vars.iter().map(|v| v.local.ty(ctx.db())).collect();
+ FunType::Tuple(types)
+ }
+ },
+ }
+ }
+
+ fn self_param_adt(&self, ctx: &AssistContext<'_>) -> Option<ast::Adt> {
+ let self_param = self.self_param.as_ref()?;
+ let def = ctx.sema.to_def(self_param)?;
+ let adt = def.ty(ctx.db()).strip_references().as_adt()?;
+ let InFile { file_id: _, value } = adt.source(ctx.db())?;
+ Some(value)
+ }
+}
+
+impl ParamKind {
+ fn is_ref(&self) -> bool {
+ matches!(self, ParamKind::SharedRef | ParamKind::MutRef)
+ }
+}
+
+impl Param {
+ fn kind(&self) -> ParamKind {
+ match (self.move_local, self.requires_mut, self.is_copy) {
+ (false, true, _) => ParamKind::MutRef,
+ (false, false, false) => ParamKind::SharedRef,
+ (true, true, _) => ParamKind::MutValue,
+ (_, false, _) => ParamKind::Value,
+ }
+ }
+
+ fn to_arg(&self, ctx: &AssistContext<'_>) -> ast::Expr {
+ let var = path_expr_from_local(ctx, self.var);
+ match self.kind() {
+ ParamKind::Value | ParamKind::MutValue => var,
+ ParamKind::SharedRef => make::expr_ref(var, false),
+ ParamKind::MutRef => make::expr_ref(var, true),
+ }
+ }
+
+ fn to_param(&self, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Param {
+ let var = self.var.name(ctx.db()).to_string();
+ let var_name = make::name(&var);
+ let pat = match self.kind() {
+ ParamKind::MutValue => make::ident_pat(false, true, var_name),
+ ParamKind::Value | ParamKind::SharedRef | ParamKind::MutRef => {
+ make::ext::simple_ident_pat(var_name)
+ }
+ };
+
+ let ty = make_ty(&self.ty, ctx, module);
+ let ty = match self.kind() {
+ ParamKind::Value | ParamKind::MutValue => ty,
+ ParamKind::SharedRef => make::ty_ref(ty, false),
+ ParamKind::MutRef => make::ty_ref(ty, true),
+ };
+
+ make::param(pat.into(), ty)
+ }
+}
+
+impl TryKind {
+ fn of_ty(ty: hir::Type, ctx: &AssistContext<'_>) -> Option<TryKind> {
+ if ty.is_unknown() {
+ // We favour Result for `expr?`
+ return Some(TryKind::Result { ty });
+ }
+ let adt = ty.as_adt()?;
+ let name = adt.name(ctx.db());
+ // FIXME: use lang items to determine if it is std type or user defined
+ // E.g. if user happens to define type named `Option`, we would have false positive
+ match name.to_string().as_str() {
+ "Option" => Some(TryKind::Option),
+ "Result" => Some(TryKind::Result { ty }),
+ _ => None,
+ }
+ }
+}
+
+impl FlowKind {
+ fn make_result_handler(&self, expr: Option<ast::Expr>) -> ast::Expr {
+ match self {
+ FlowKind::Return(_) => make::expr_return(expr),
+ FlowKind::Break(label, _) => make::expr_break(label.clone(), expr),
+ FlowKind::Try { .. } => {
+ stdx::never!("cannot have result handler with try");
+ expr.unwrap_or_else(|| make::expr_return(None))
+ }
+ FlowKind::Continue(label) => {
+ stdx::always!(expr.is_none(), "continue with value is not possible");
+ make::expr_continue(label.clone())
+ }
+ }
+ }
+
+ fn expr_ty(&self, ctx: &AssistContext<'_>) -> Option<hir::Type> {
+ match self {
+ FlowKind::Return(Some(expr)) | FlowKind::Break(_, Some(expr)) => {
+ ctx.sema.type_of_expr(expr).map(TypeInfo::adjusted)
+ }
+ FlowKind::Try { .. } => {
+ stdx::never!("try does not have defined expr_ty");
+ None
+ }
+ _ => None,
+ }
+ }
+}
+
+impl FunctionBody {
+ fn parent(&self) -> Option<SyntaxNode> {
+ match self {
+ FunctionBody::Expr(expr) => expr.syntax().parent(),
+ FunctionBody::Span { parent, .. } => Some(parent.syntax().clone()),
+ }
+ }
+
+ fn node(&self) -> &SyntaxNode {
+ match self {
+ FunctionBody::Expr(e) => e.syntax(),
+ FunctionBody::Span { parent, .. } => parent.syntax(),
+ }
+ }
+
+ fn extracted_from_trait_impl(&self) -> bool {
+ match self.node().ancestors().find_map(ast::Impl::cast) {
+ Some(c) => return c.trait_().is_some(),
+ None => false,
+ }
+ }
+
+ fn descendants(&self) -> impl Iterator<Item = SyntaxNode> {
+ match self {
+ FunctionBody::Expr(expr) => expr.syntax().descendants(),
+ FunctionBody::Span { parent, .. } => parent.syntax().descendants(),
+ }
+ }
+
+ fn descendant_paths(&self) -> impl Iterator<Item = ast::Path> {
+ self.descendants().filter_map(|node| {
+ match_ast! {
+ match node {
+ ast::Path(it) => Some(it),
+ _ => None
+ }
+ }
+ })
+ }
+
+ fn from_expr(expr: ast::Expr) -> Option<Self> {
+ match expr {
+ ast::Expr::BreakExpr(it) => it.expr().map(Self::Expr),
+ ast::Expr::ReturnExpr(it) => it.expr().map(Self::Expr),
+ ast::Expr::BlockExpr(it) if !it.is_standalone() => None,
+ expr => Some(Self::Expr(expr)),
+ }
+ }
+
+ fn from_range(parent: ast::StmtList, selected: TextRange) -> FunctionBody {
+ let full_body = parent.syntax().children_with_tokens();
+
+ let mut text_range = full_body
+ .filter(|it| ast::Stmt::can_cast(it.kind()) || it.kind() == COMMENT)
+ .map(|element| element.text_range())
+ .filter(|&range| selected.intersect(range).filter(|it| !it.is_empty()).is_some())
+ .reduce(|acc, stmt| acc.cover(stmt));
+
+ if let Some(tail_range) = parent
+ .tail_expr()
+ .map(|it| it.syntax().text_range())
+ .filter(|&it| selected.intersect(it).is_some())
+ {
+ text_range = Some(match text_range {
+ Some(text_range) => text_range.cover(tail_range),
+ None => tail_range,
+ });
+ }
+ Self::Span { parent, text_range: text_range.unwrap_or(selected) }
+ }
+
+ fn indent_level(&self) -> IndentLevel {
+ match &self {
+ FunctionBody::Expr(expr) => IndentLevel::from_node(expr.syntax()),
+ FunctionBody::Span { parent, .. } => IndentLevel::from_node(parent.syntax()) + 1,
+ }
+ }
+
+ fn tail_expr(&self) -> Option<ast::Expr> {
+ match &self {
+ FunctionBody::Expr(expr) => Some(expr.clone()),
+ FunctionBody::Span { parent, text_range } => {
+ let tail_expr = parent.tail_expr()?;
+ text_range.contains_range(tail_expr.syntax().text_range()).then(|| tail_expr)
+ }
+ }
+ }
+
+ fn walk_expr(&self, cb: &mut dyn FnMut(ast::Expr)) {
+ match self {
+ FunctionBody::Expr(expr) => walk_expr(expr, cb),
+ FunctionBody::Span { parent, text_range } => {
+ parent
+ .statements()
+ .filter(|stmt| text_range.contains_range(stmt.syntax().text_range()))
+ .filter_map(|stmt| match stmt {
+ ast::Stmt::ExprStmt(expr_stmt) => expr_stmt.expr(),
+ ast::Stmt::Item(_) => None,
+ ast::Stmt::LetStmt(stmt) => stmt.initializer(),
+ })
+ .for_each(|expr| walk_expr(&expr, cb));
+ if let Some(expr) = parent
+ .tail_expr()
+ .filter(|it| text_range.contains_range(it.syntax().text_range()))
+ {
+ walk_expr(&expr, cb);
+ }
+ }
+ }
+ }
+
+ fn preorder_expr(&self, cb: &mut dyn FnMut(WalkEvent<ast::Expr>) -> bool) {
+ match self {
+ FunctionBody::Expr(expr) => preorder_expr(expr, cb),
+ FunctionBody::Span { parent, text_range } => {
+ parent
+ .statements()
+ .filter(|stmt| text_range.contains_range(stmt.syntax().text_range()))
+ .filter_map(|stmt| match stmt {
+ ast::Stmt::ExprStmt(expr_stmt) => expr_stmt.expr(),
+ ast::Stmt::Item(_) => None,
+ ast::Stmt::LetStmt(stmt) => stmt.initializer(),
+ })
+ .for_each(|expr| preorder_expr(&expr, cb));
+ if let Some(expr) = parent
+ .tail_expr()
+ .filter(|it| text_range.contains_range(it.syntax().text_range()))
+ {
+ preorder_expr(&expr, cb);
+ }
+ }
+ }
+ }
+
+ fn walk_pat(&self, cb: &mut dyn FnMut(ast::Pat)) {
+ match self {
+ FunctionBody::Expr(expr) => walk_patterns_in_expr(expr, cb),
+ FunctionBody::Span { parent, text_range } => {
+ parent
+ .statements()
+ .filter(|stmt| text_range.contains_range(stmt.syntax().text_range()))
+ .for_each(|stmt| match stmt {
+ ast::Stmt::ExprStmt(expr_stmt) => {
+ if let Some(expr) = expr_stmt.expr() {
+ walk_patterns_in_expr(&expr, cb)
+ }
+ }
+ ast::Stmt::Item(_) => (),
+ ast::Stmt::LetStmt(stmt) => {
+ if let Some(pat) = stmt.pat() {
+ walk_pat(&pat, cb);
+ }
+ if let Some(expr) = stmt.initializer() {
+ walk_patterns_in_expr(&expr, cb);
+ }
+ }
+ });
+ if let Some(expr) = parent
+ .tail_expr()
+ .filter(|it| text_range.contains_range(it.syntax().text_range()))
+ {
+ walk_patterns_in_expr(&expr, cb);
+ }
+ }
+ }
+ }
+
+ fn text_range(&self) -> TextRange {
+ match self {
+ FunctionBody::Expr(expr) => expr.syntax().text_range(),
+ &FunctionBody::Span { text_range, .. } => text_range,
+ }
+ }
+
+ fn contains_range(&self, range: TextRange) -> bool {
+ self.text_range().contains_range(range)
+ }
+
+ fn precedes_range(&self, range: TextRange) -> bool {
+ self.text_range().end() <= range.start()
+ }
+
+ fn contains_node(&self, node: &SyntaxNode) -> bool {
+ self.contains_range(node.text_range())
+ }
+}
+
+impl FunctionBody {
+ /// Analyzes a function body, returning the used local variables that are referenced in it as well as
+ /// whether it contains an await expression.
+ fn analyze(
+ &self,
+ sema: &Semantics<'_, RootDatabase>,
+ ) -> (FxIndexSet<Local>, Option<ast::SelfParam>) {
+ let mut self_param = None;
+ let mut res = FxIndexSet::default();
+ let mut cb = |name_ref: Option<_>| {
+ let local_ref =
+ match name_ref.and_then(|name_ref| NameRefClass::classify(sema, &name_ref)) {
+ Some(
+ NameRefClass::Definition(Definition::Local(local_ref))
+ | NameRefClass::FieldShorthand { local_ref, field_ref: _ },
+ ) => local_ref,
+ _ => return,
+ };
+ let InFile { file_id, value } = local_ref.source(sema.db);
+ // locals defined inside macros are not relevant to us
+ if !file_id.is_macro() {
+ match value {
+ Either::Right(it) => {
+ self_param.replace(it);
+ }
+ Either::Left(_) => {
+ res.insert(local_ref);
+ }
+ }
+ }
+ };
+ self.walk_expr(&mut |expr| match expr {
+ ast::Expr::PathExpr(path_expr) => {
+ cb(path_expr.path().and_then(|it| it.as_single_name_ref()))
+ }
+ ast::Expr::ClosureExpr(closure_expr) => {
+ if let Some(body) = closure_expr.body() {
+ body.syntax().descendants().map(ast::NameRef::cast).for_each(|it| cb(it));
+ }
+ }
+ ast::Expr::MacroExpr(expr) => {
+ if let Some(tt) = expr.macro_call().and_then(|call| call.token_tree()) {
+ tt.syntax()
+ .children_with_tokens()
+ .flat_map(SyntaxElement::into_token)
+ .filter(|it| it.kind() == SyntaxKind::IDENT)
+ .flat_map(|t| sema.descend_into_macros(t))
+ .for_each(|t| cb(t.parent().and_then(ast::NameRef::cast)));
+ }
+ }
+ _ => (),
+ });
+ (res, self_param)
+ }
+
+ fn analyze_container(&self, sema: &Semantics<'_, RootDatabase>) -> Option<ContainerInfo> {
+ let mut ancestors = self.parent()?.ancestors();
+ let infer_expr_opt = |expr| sema.type_of_expr(&expr?).map(TypeInfo::adjusted);
+ let mut parent_loop = None;
+ let mut set_parent_loop = |loop_: &dyn ast::HasLoopBody| {
+ if loop_
+ .loop_body()
+ .map_or(false, |it| it.syntax().text_range().contains_range(self.text_range()))
+ {
+ parent_loop.get_or_insert(loop_.syntax().clone());
+ }
+ };
+
+ let (is_const, expr, ty) = loop {
+ let anc = ancestors.next()?;
+ break match_ast! {
+ match anc {
+ ast::ClosureExpr(closure) => (false, closure.body(), infer_expr_opt(closure.body())),
+ ast::BlockExpr(block_expr) => {
+ let (constness, block) = match block_expr.modifier() {
+ Some(ast::BlockModifier::Const(_)) => (true, block_expr),
+ Some(ast::BlockModifier::Try(_)) => (false, block_expr),
+ Some(ast::BlockModifier::Label(label)) if label.lifetime().is_some() => (false, block_expr),
+ _ => continue,
+ };
+ let expr = Some(ast::Expr::BlockExpr(block));
+ (constness, expr.clone(), infer_expr_opt(expr))
+ },
+ ast::Fn(fn_) => {
+ let func = sema.to_def(&fn_)?;
+ let mut ret_ty = func.ret_type(sema.db);
+ if func.is_async(sema.db) {
+ if let Some(async_ret) = func.async_ret_type(sema.db) {
+ ret_ty = async_ret;
+ }
+ }
+ (fn_.const_token().is_some(), fn_.body().map(ast::Expr::BlockExpr), Some(ret_ty))
+ },
+ ast::Static(statik) => {
+ (true, statik.body(), Some(sema.to_def(&statik)?.ty(sema.db)))
+ },
+ ast::ConstArg(ca) => {
+ (true, ca.expr(), infer_expr_opt(ca.expr()))
+ },
+ ast::Const(konst) => {
+ (true, konst.body(), Some(sema.to_def(&konst)?.ty(sema.db)))
+ },
+ ast::ConstParam(cp) => {
+ (true, cp.default_val(), Some(sema.to_def(&cp)?.ty(sema.db)))
+ },
+ ast::ConstBlockPat(cbp) => {
+ let expr = cbp.block_expr().map(ast::Expr::BlockExpr);
+ (true, expr.clone(), infer_expr_opt(expr))
+ },
+ ast::Variant(__) => return None,
+ ast::Meta(__) => return None,
+ ast::LoopExpr(it) => {
+ set_parent_loop(&it);
+ continue;
+ },
+ ast::ForExpr(it) => {
+ set_parent_loop(&it);
+ continue;
+ },
+ ast::WhileExpr(it) => {
+ set_parent_loop(&it);
+ continue;
+ },
+ _ => continue,
+ }
+ };
+ };
+ let container_tail = match expr? {
+ ast::Expr::BlockExpr(block) => block.tail_expr(),
+ expr => Some(expr),
+ };
+ let is_in_tail =
+ container_tail.zip(self.tail_expr()).map_or(false, |(container_tail, body_tail)| {
+ container_tail.syntax().text_range().contains_range(body_tail.syntax().text_range())
+ });
+
+ let parent = self.parent()?;
+ let parents = generic_parents(&parent);
+ let generic_param_lists = parents.iter().filter_map(|it| it.generic_param_list()).collect();
+ let where_clauses = parents.iter().filter_map(|it| it.where_clause()).collect();
+
+ Some(ContainerInfo {
+ is_in_tail,
+ is_const,
+ parent_loop,
+ ret_type: ty,
+ generic_param_lists,
+ where_clauses,
+ })
+ }
+
+ fn return_ty(&self, ctx: &AssistContext<'_>) -> Option<RetType> {
+ match self.tail_expr() {
+ Some(expr) => ctx.sema.type_of_expr(&expr).map(TypeInfo::original).map(RetType::Expr),
+ None => Some(RetType::Stmt),
+ }
+ }
+
+ /// Local variables defined inside `body` that are accessed outside of it
+ fn ret_values<'a>(
+ &self,
+ ctx: &'a AssistContext<'_>,
+ parent: &SyntaxNode,
+ ) -> impl Iterator<Item = OutlivedLocal> + 'a {
+ let parent = parent.clone();
+ let range = self.text_range();
+ locals_defined_in_body(&ctx.sema, self)
+ .into_iter()
+ .filter_map(move |local| local_outlives_body(ctx, range, local, &parent))
+ }
+
+ /// Analyses the function body for external control flow.
+ fn external_control_flow(
+ &self,
+ ctx: &AssistContext<'_>,
+ container_info: &ContainerInfo,
+ ) -> Option<ControlFlow> {
+ let mut ret_expr = None;
+ let mut try_expr = None;
+ let mut break_expr = None;
+ let mut continue_expr = None;
+ let mut is_async = false;
+ let mut _is_unsafe = false;
+
+ let mut unsafe_depth = 0;
+ let mut loop_depth = 0;
+
+ self.preorder_expr(&mut |expr| {
+ let expr = match expr {
+ WalkEvent::Enter(e) => e,
+ WalkEvent::Leave(expr) => {
+ match expr {
+ ast::Expr::LoopExpr(_)
+ | ast::Expr::ForExpr(_)
+ | ast::Expr::WhileExpr(_) => loop_depth -= 1,
+ ast::Expr::BlockExpr(block_expr) if block_expr.unsafe_token().is_some() => {
+ unsafe_depth -= 1
+ }
+ _ => (),
+ }
+ return false;
+ }
+ };
+ match expr {
+ ast::Expr::LoopExpr(_) | ast::Expr::ForExpr(_) | ast::Expr::WhileExpr(_) => {
+ loop_depth += 1;
+ }
+ ast::Expr::BlockExpr(block_expr) if block_expr.unsafe_token().is_some() => {
+ unsafe_depth += 1
+ }
+ ast::Expr::ReturnExpr(it) => {
+ ret_expr = Some(it);
+ }
+ ast::Expr::TryExpr(it) => {
+ try_expr = Some(it);
+ }
+ ast::Expr::BreakExpr(it) if loop_depth == 0 => {
+ break_expr = Some(it);
+ }
+ ast::Expr::ContinueExpr(it) if loop_depth == 0 => {
+ continue_expr = Some(it);
+ }
+ ast::Expr::AwaitExpr(_) => is_async = true,
+ // FIXME: Do unsafe analysis on expression, sem highlighting knows this so we should be able
+ // to just lift that out of there
+ // expr if unsafe_depth ==0 && expr.is_unsafe => is_unsafe = true,
+ _ => {}
+ }
+ false
+ });
+
+ let kind = match (try_expr, ret_expr, break_expr, continue_expr) {
+ (Some(_), _, None, None) => {
+ let ret_ty = container_info.ret_type.clone()?;
+ let kind = TryKind::of_ty(ret_ty, ctx)?;
+
+ Some(FlowKind::Try { kind })
+ }
+ (Some(_), _, _, _) => {
+ cov_mark::hit!(external_control_flow_try_and_bc);
+ return None;
+ }
+ (None, Some(r), None, None) => Some(FlowKind::Return(r.expr())),
+ (None, Some(_), _, _) => {
+ cov_mark::hit!(external_control_flow_return_and_bc);
+ return None;
+ }
+ (None, None, Some(_), Some(_)) => {
+ cov_mark::hit!(external_control_flow_break_and_continue);
+ return None;
+ }
+ (None, None, Some(b), None) => Some(FlowKind::Break(b.lifetime(), b.expr())),
+ (None, None, None, Some(c)) => Some(FlowKind::Continue(c.lifetime())),
+ (None, None, None, None) => None,
+ };
+
+ Some(ControlFlow { kind, is_async, is_unsafe: _is_unsafe })
+ }
+
+ /// find variables that should be extracted as params
+ ///
+ /// Computes additional info that affects param type and mutability
+ fn extracted_function_params(
+ &self,
+ ctx: &AssistContext<'_>,
+ container_info: &ContainerInfo,
+ locals: impl Iterator<Item = Local>,
+ ) -> Vec<Param> {
+ locals
+ .map(|local| (local, local.source(ctx.db())))
+ .filter(|(_, src)| is_defined_outside_of_body(ctx, self, src))
+ .filter_map(|(local, src)| match src.value {
+ Either::Left(src) => Some((local, src)),
+ Either::Right(_) => {
+ stdx::never!(false, "Local::is_self returned false, but source is SelfParam");
+ None
+ }
+ })
+ .map(|(var, src)| {
+ let usages = LocalUsages::find_local_usages(ctx, var);
+ let ty = var.ty(ctx.db());
+
+ let defined_outside_parent_loop = container_info
+ .parent_loop
+ .as_ref()
+ .map_or(true, |it| it.text_range().contains_range(src.syntax().text_range()));
+
+ let is_copy = ty.is_copy(ctx.db());
+ let has_usages = self.has_usages_after_body(&usages);
+ let requires_mut =
+ !ty.is_mutable_reference() && has_exclusive_usages(ctx, &usages, self);
+ // We can move the value into the function call if it's not used after the call,
+ // if the var is not used but defined outside a loop we are extracting from we can't move it either
+ // as the function will reuse it in the next iteration.
+ let move_local = (!has_usages && defined_outside_parent_loop) || ty.is_reference();
+ Param { var, ty, move_local, requires_mut, is_copy }
+ })
+ .collect()
+ }
+
+ fn has_usages_after_body(&self, usages: &LocalUsages) -> bool {
+ usages.iter().any(|reference| self.precedes_range(reference.range))
+ }
+}
+
+enum GenericParent {
+ Fn(ast::Fn),
+ Impl(ast::Impl),
+ Trait(ast::Trait),
+}
+
+impl GenericParent {
+ fn generic_param_list(&self) -> Option<ast::GenericParamList> {
+ match self {
+ GenericParent::Fn(fn_) => fn_.generic_param_list(),
+ GenericParent::Impl(impl_) => impl_.generic_param_list(),
+ GenericParent::Trait(trait_) => trait_.generic_param_list(),
+ }
+ }
+
+ fn where_clause(&self) -> Option<ast::WhereClause> {
+ match self {
+ GenericParent::Fn(fn_) => fn_.where_clause(),
+ GenericParent::Impl(impl_) => impl_.where_clause(),
+ GenericParent::Trait(trait_) => trait_.where_clause(),
+ }
+ }
+}
+
+/// Search `parent`'s ancestors for items with potentially applicable generic parameters
+fn generic_parents(parent: &SyntaxNode) -> Vec<GenericParent> {
+ let mut list = Vec::new();
+ if let Some(parent_item) = parent.ancestors().find_map(ast::Item::cast) {
+ match parent_item {
+ ast::Item::Fn(ref fn_) => {
+ if let Some(parent_parent) = parent_item
+ .syntax()
+ .parent()
+ .and_then(|it| it.parent())
+ .and_then(ast::Item::cast)
+ {
+ match parent_parent {
+ ast::Item::Impl(impl_) => list.push(GenericParent::Impl(impl_)),
+ ast::Item::Trait(trait_) => list.push(GenericParent::Trait(trait_)),
+ _ => (),
+ }
+ }
+ list.push(GenericParent::Fn(fn_.clone()));
+ }
+ _ => (),
+ }
+ }
+ list
+}
+
+/// checks if relevant var is used with `&mut` access inside body
+fn has_exclusive_usages(
+ ctx: &AssistContext<'_>,
+ usages: &LocalUsages,
+ body: &FunctionBody,
+) -> bool {
+ usages
+ .iter()
+ .filter(|reference| body.contains_range(reference.range))
+ .any(|reference| reference_is_exclusive(reference, body, ctx))
+}
+
+/// checks if this reference requires `&mut` access inside node
+fn reference_is_exclusive(
+ reference: &FileReference,
+ node: &dyn HasTokenAtOffset,
+ ctx: &AssistContext<'_>,
+) -> bool {
+ // we directly modify variable with set: `n = 0`, `n += 1`
+ if reference.category == Some(ReferenceCategory::Write) {
+ return true;
+ }
+
+ // we take `&mut` reference to variable: `&mut v`
+ let path = match path_element_of_reference(node, reference) {
+ Some(path) => path,
+ None => return false,
+ };
+
+ expr_require_exclusive_access(ctx, &path).unwrap_or(false)
+}
+
+/// checks if this expr requires `&mut` access, recurses on field access
+fn expr_require_exclusive_access(ctx: &AssistContext<'_>, expr: &ast::Expr) -> Option<bool> {
+ if let ast::Expr::MacroExpr(_) = expr {
+ // FIXME: expand macro and check output for mutable usages of the variable?
+ return None;
+ }
+
+ let parent = expr.syntax().parent()?;
+
+ if let Some(bin_expr) = ast::BinExpr::cast(parent.clone()) {
+ if matches!(bin_expr.op_kind()?, ast::BinaryOp::Assignment { .. }) {
+ return Some(bin_expr.lhs()?.syntax() == expr.syntax());
+ }
+ return Some(false);
+ }
+
+ if let Some(ref_expr) = ast::RefExpr::cast(parent.clone()) {
+ return Some(ref_expr.mut_token().is_some());
+ }
+
+ if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) {
+ let func = ctx.sema.resolve_method_call(&method_call)?;
+ let self_param = func.self_param(ctx.db())?;
+ let access = self_param.access(ctx.db());
+
+ return Some(matches!(access, hir::Access::Exclusive));
+ }
+
+ if let Some(field) = ast::FieldExpr::cast(parent) {
+ return expr_require_exclusive_access(ctx, &field.into());
+ }
+
+ Some(false)
+}
+
+trait HasTokenAtOffset {
+ fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken>;
+}
+
+impl HasTokenAtOffset for SyntaxNode {
+ fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken> {
+ SyntaxNode::token_at_offset(self, offset)
+ }
+}
+
+impl HasTokenAtOffset for FunctionBody {
+ fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken> {
+ match self {
+ FunctionBody::Expr(expr) => expr.syntax().token_at_offset(offset),
+ FunctionBody::Span { parent, text_range } => {
+ match parent.syntax().token_at_offset(offset) {
+ TokenAtOffset::None => TokenAtOffset::None,
+ TokenAtOffset::Single(t) => {
+ if text_range.contains_range(t.text_range()) {
+ TokenAtOffset::Single(t)
+ } else {
+ TokenAtOffset::None
+ }
+ }
+ TokenAtOffset::Between(a, b) => {
+ match (
+ text_range.contains_range(a.text_range()),
+ text_range.contains_range(b.text_range()),
+ ) {
+ (true, true) => TokenAtOffset::Between(a, b),
+ (true, false) => TokenAtOffset::Single(a),
+ (false, true) => TokenAtOffset::Single(b),
+ (false, false) => TokenAtOffset::None,
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+/// find relevant `ast::Expr` for reference
+///
+/// # Preconditions
+///
+/// `node` must cover `reference`, that is `node.text_range().contains_range(reference.range)`
+fn path_element_of_reference(
+ node: &dyn HasTokenAtOffset,
+ reference: &FileReference,
+) -> Option<ast::Expr> {
+ let token = node.token_at_offset(reference.range.start()).right_biased().or_else(|| {
+ stdx::never!(false, "cannot find token at variable usage: {:?}", reference);
+ None
+ })?;
+ let path = token.parent_ancestors().find_map(ast::Expr::cast).or_else(|| {
+ stdx::never!(false, "cannot find path parent of variable usage: {:?}", token);
+ None
+ })?;
+ stdx::always!(
+ matches!(path, ast::Expr::PathExpr(_) | ast::Expr::MacroExpr(_)),
+ "unexpected expression type for variable usage: {:?}",
+ path
+ );
+ Some(path)
+}
+
+/// list local variables defined inside `body`
+fn locals_defined_in_body(
+ sema: &Semantics<'_, RootDatabase>,
+ body: &FunctionBody,
+) -> FxIndexSet<Local> {
+ // FIXME: this doesn't work well with macros
+ // see https://github.com/rust-lang/rust-analyzer/pull/7535#discussion_r570048550
+ let mut res = FxIndexSet::default();
+ body.walk_pat(&mut |pat| {
+ if let ast::Pat::IdentPat(pat) = pat {
+ if let Some(local) = sema.to_def(&pat) {
+ res.insert(local);
+ }
+ }
+ });
+ res
+}
+
+/// Returns usage details if local variable is used after(outside of) body
+fn local_outlives_body(
+ ctx: &AssistContext<'_>,
+ body_range: TextRange,
+ local: Local,
+ parent: &SyntaxNode,
+) -> Option<OutlivedLocal> {
+ let usages = LocalUsages::find_local_usages(ctx, local);
+ let mut has_mut_usages = false;
+ let mut any_outlives = false;
+ for usage in usages.iter() {
+ if body_range.end() <= usage.range.start() {
+ has_mut_usages |= reference_is_exclusive(usage, parent, ctx);
+ any_outlives |= true;
+ if has_mut_usages {
+ break; // no need to check more elements we have all the info we wanted
+ }
+ }
+ }
+ if !any_outlives {
+ return None;
+ }
+ Some(OutlivedLocal { local, mut_usage_outside_body: has_mut_usages })
+}
+
+/// checks if the relevant local was defined before(outside of) body
+fn is_defined_outside_of_body(
+ ctx: &AssistContext<'_>,
+ body: &FunctionBody,
+ src: &hir::InFile<Either<ast::IdentPat, ast::SelfParam>>,
+) -> bool {
+ src.file_id.original_file(ctx.db()) == ctx.file_id()
+ && !body.contains_node(either_syntax(&src.value))
+}
+
+fn either_syntax(value: &Either<ast::IdentPat, ast::SelfParam>) -> &SyntaxNode {
+ match value {
+ Either::Left(pat) => pat.syntax(),
+ Either::Right(it) => it.syntax(),
+ }
+}
+
+/// find where to put extracted function definition
+///
+/// Function should be put right after returned node
+fn node_to_insert_after(body: &FunctionBody, anchor: Anchor) -> Option<SyntaxNode> {
+ let node = body.node();
+ let mut ancestors = node.ancestors().peekable();
+ let mut last_ancestor = None;
+ while let Some(next_ancestor) = ancestors.next() {
+ match next_ancestor.kind() {
+ SyntaxKind::SOURCE_FILE => break,
+ SyntaxKind::ITEM_LIST if !matches!(anchor, Anchor::Freestanding) => continue,
+ SyntaxKind::ITEM_LIST => {
+ if ancestors.peek().map(SyntaxNode::kind) == Some(SyntaxKind::MODULE) {
+ break;
+ }
+ }
+ SyntaxKind::ASSOC_ITEM_LIST if !matches!(anchor, Anchor::Method) => continue,
+ SyntaxKind::ASSOC_ITEM_LIST if body.extracted_from_trait_impl() => continue,
+ SyntaxKind::ASSOC_ITEM_LIST => {
+ if ancestors.peek().map(SyntaxNode::kind) == Some(SyntaxKind::IMPL) {
+ break;
+ }
+ }
+ _ => (),
+ }
+ last_ancestor = Some(next_ancestor);
+ }
+ last_ancestor
+}
+
+fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> String {
+ let ret_ty = fun.return_type(ctx);
+
+ let args = make::arg_list(fun.params.iter().map(|param| param.to_arg(ctx)));
+ let name = fun.name.clone();
+ let mut call_expr = if fun.self_param.is_some() {
+ let self_arg = make::expr_path(make::ext::ident_path("self"));
+ make::expr_method_call(self_arg, name, args)
+ } else {
+ let func = make::expr_path(make::path_unqualified(make::path_segment(name)));
+ make::expr_call(func, args)
+ };
+
+ let handler = FlowHandler::from_ret_ty(fun, &ret_ty);
+
+ if fun.control_flow.is_async {
+ call_expr = make::expr_await(call_expr);
+ }
+ let expr = handler.make_call_expr(call_expr).indent(indent);
+
+ let mut_modifier = |var: &OutlivedLocal| if var.mut_usage_outside_body { "mut " } else { "" };
+
+ let mut buf = String::new();
+ match fun.outliving_locals.as_slice() {
+ [] => {}
+ [var] => {
+ format_to!(buf, "let {}{} = ", mut_modifier(var), var.local.name(ctx.db()))
+ }
+ vars => {
+ buf.push_str("let (");
+ let bindings = vars.iter().format_with(", ", |local, f| {
+ f(&format_args!("{}{}", mut_modifier(local), local.local.name(ctx.db())))
+ });
+ format_to!(buf, "{}", bindings);
+ buf.push_str(") = ");
+ }
+ }
+
+ format_to!(buf, "{}", expr);
+ let insert_comma = fun
+ .body
+ .parent()
+ .and_then(ast::MatchArm::cast)
+ .map_or(false, |it| it.comma_token().is_none());
+ if insert_comma {
+ buf.push(',');
+ } else if fun.ret_ty.is_unit() && (!fun.outliving_locals.is_empty() || !expr.is_block_like()) {
+ buf.push(';');
+ }
+ buf
+}
+
+enum FlowHandler {
+ None,
+ Try { kind: TryKind },
+ If { action: FlowKind },
+ IfOption { action: FlowKind },
+ MatchOption { none: FlowKind },
+ MatchResult { err: FlowKind },
+}
+
+impl FlowHandler {
+ fn from_ret_ty(fun: &Function, ret_ty: &FunType) -> FlowHandler {
+ match &fun.control_flow.kind {
+ None => FlowHandler::None,
+ Some(flow_kind) => {
+ let action = flow_kind.clone();
+ if *ret_ty == FunType::Unit {
+ match flow_kind {
+ FlowKind::Return(None)
+ | FlowKind::Break(_, None)
+ | FlowKind::Continue(_) => FlowHandler::If { action },
+ FlowKind::Return(_) | FlowKind::Break(_, _) => {
+ FlowHandler::IfOption { action }
+ }
+ FlowKind::Try { kind } => FlowHandler::Try { kind: kind.clone() },
+ }
+ } else {
+ match flow_kind {
+ FlowKind::Return(None)
+ | FlowKind::Break(_, None)
+ | FlowKind::Continue(_) => FlowHandler::MatchOption { none: action },
+ FlowKind::Return(_) | FlowKind::Break(_, _) => {
+ FlowHandler::MatchResult { err: action }
+ }
+ FlowKind::Try { kind } => FlowHandler::Try { kind: kind.clone() },
+ }
+ }
+ }
+ }
+ }
+
+ fn make_call_expr(&self, call_expr: ast::Expr) -> ast::Expr {
+ match self {
+ FlowHandler::None => call_expr,
+ FlowHandler::Try { kind: _ } => make::expr_try(call_expr),
+ FlowHandler::If { action } => {
+ let action = action.make_result_handler(None);
+ let stmt = make::expr_stmt(action);
+ let block = make::block_expr(iter::once(stmt.into()), None);
+ let controlflow_break_path = make::path_from_text("ControlFlow::Break");
+ let condition = make::expr_let(
+ make::tuple_struct_pat(
+ controlflow_break_path,
+ iter::once(make::wildcard_pat().into()),
+ )
+ .into(),
+ call_expr,
+ );
+ make::expr_if(condition.into(), block, None)
+ }
+ FlowHandler::IfOption { action } => {
+ let path = make::ext::ident_path("Some");
+ let value_pat = make::ext::simple_ident_pat(make::name("value"));
+ let pattern = make::tuple_struct_pat(path, iter::once(value_pat.into()));
+ let cond = make::expr_let(pattern.into(), call_expr);
+ let value = make::expr_path(make::ext::ident_path("value"));
+ let action_expr = action.make_result_handler(Some(value));
+ let action_stmt = make::expr_stmt(action_expr);
+ let then = make::block_expr(iter::once(action_stmt.into()), None);
+ make::expr_if(cond.into(), then, None)
+ }
+ FlowHandler::MatchOption { none } => {
+ let some_name = "value";
+
+ let some_arm = {
+ let path = make::ext::ident_path("Some");
+ let value_pat = make::ext::simple_ident_pat(make::name(some_name));
+ let pat = make::tuple_struct_pat(path, iter::once(value_pat.into()));
+ let value = make::expr_path(make::ext::ident_path(some_name));
+ make::match_arm(iter::once(pat.into()), None, value)
+ };
+ let none_arm = {
+ let path = make::ext::ident_path("None");
+ let pat = make::path_pat(path);
+ make::match_arm(iter::once(pat), None, none.make_result_handler(None))
+ };
+ let arms = make::match_arm_list(vec![some_arm, none_arm]);
+ make::expr_match(call_expr, arms)
+ }
+ FlowHandler::MatchResult { err } => {
+ let ok_name = "value";
+ let err_name = "value";
+
+ let ok_arm = {
+ let path = make::ext::ident_path("Ok");
+ let value_pat = make::ext::simple_ident_pat(make::name(ok_name));
+ let pat = make::tuple_struct_pat(path, iter::once(value_pat.into()));
+ let value = make::expr_path(make::ext::ident_path(ok_name));
+ make::match_arm(iter::once(pat.into()), None, value)
+ };
+ let err_arm = {
+ let path = make::ext::ident_path("Err");
+ let value_pat = make::ext::simple_ident_pat(make::name(err_name));
+ let pat = make::tuple_struct_pat(path, iter::once(value_pat.into()));
+ let value = make::expr_path(make::ext::ident_path(err_name));
+ make::match_arm(
+ iter::once(pat.into()),
+ None,
+ err.make_result_handler(Some(value)),
+ )
+ };
+ let arms = make::match_arm_list(vec![ok_arm, err_arm]);
+ make::expr_match(call_expr, arms)
+ }
+ }
+ }
+}
+
+fn path_expr_from_local(ctx: &AssistContext<'_>, var: Local) -> ast::Expr {
+ let name = var.name(ctx.db()).to_string();
+ make::expr_path(make::ext::ident_path(&name))
+}
+
+fn format_function(
+ ctx: &AssistContext<'_>,
+ module: hir::Module,
+ fun: &Function,
+ old_indent: IndentLevel,
+ new_indent: IndentLevel,
+) -> String {
+ let mut fn_def = String::new();
+ let params = fun.make_param_list(ctx, module);
+ let ret_ty = fun.make_ret_ty(ctx, module);
+ let body = make_body(ctx, old_indent, new_indent, fun);
+ let const_kw = if fun.mods.is_const { "const " } else { "" };
+ let async_kw = if fun.control_flow.is_async { "async " } else { "" };
+ let unsafe_kw = if fun.control_flow.is_unsafe { "unsafe " } else { "" };
+ let (generic_params, where_clause) = make_generic_params_and_where_clause(ctx, fun);
+ match ctx.config.snippet_cap {
+ Some(_) => format_to!(
+ fn_def,
+ "\n\n{}{}{}{}fn $0{}",
+ new_indent,
+ const_kw,
+ async_kw,
+ unsafe_kw,
+ fun.name,
+ ),
+ None => format_to!(
+ fn_def,
+ "\n\n{}{}{}{}fn {}",
+ new_indent,
+ const_kw,
+ async_kw,
+ unsafe_kw,
+ fun.name,
+ ),
+ }
+
+ if let Some(generic_params) = generic_params {
+ format_to!(fn_def, "{}", generic_params);
+ }
+
+ format_to!(fn_def, "{}", params);
+
+ if let Some(ret_ty) = ret_ty {
+ format_to!(fn_def, " {}", ret_ty);
+ }
+
+ if let Some(where_clause) = where_clause {
+ format_to!(fn_def, " {}", where_clause);
+ }
+
+ format_to!(fn_def, " {}", body);
+
+ fn_def
+}
+
+fn make_generic_params_and_where_clause(
+ ctx: &AssistContext<'_>,
+ fun: &Function,
+) -> (Option<ast::GenericParamList>, Option<ast::WhereClause>) {
+ let used_type_params = fun.type_params(ctx);
+
+ let generic_param_list = make_generic_param_list(ctx, fun, &used_type_params);
+ let where_clause = make_where_clause(ctx, fun, &used_type_params);
+
+ (generic_param_list, where_clause)
+}
+
+fn make_generic_param_list(
+ ctx: &AssistContext<'_>,
+ fun: &Function,
+ used_type_params: &[TypeParam],
+) -> Option<ast::GenericParamList> {
+ let mut generic_params = fun
+ .mods
+ .generic_param_lists
+ .iter()
+ .flat_map(|parent_params| {
+ parent_params
+ .generic_params()
+ .filter(|param| param_is_required(ctx, param, used_type_params))
+ })
+ .peekable();
+
+ if generic_params.peek().is_some() {
+ Some(make::generic_param_list(generic_params))
+ } else {
+ None
+ }
+}
+
+fn param_is_required(
+ ctx: &AssistContext<'_>,
+ param: &ast::GenericParam,
+ used_type_params: &[TypeParam],
+) -> bool {
+ match param {
+ ast::GenericParam::ConstParam(_) | ast::GenericParam::LifetimeParam(_) => false,
+ ast::GenericParam::TypeParam(type_param) => match &ctx.sema.to_def(type_param) {
+ Some(def) => used_type_params.contains(def),
+ _ => false,
+ },
+ }
+}
+
+fn make_where_clause(
+ ctx: &AssistContext<'_>,
+ fun: &Function,
+ used_type_params: &[TypeParam],
+) -> Option<ast::WhereClause> {
+ let mut predicates = fun
+ .mods
+ .where_clauses
+ .iter()
+ .flat_map(|parent_where_clause| {
+ parent_where_clause
+ .predicates()
+ .filter(|pred| pred_is_required(ctx, pred, used_type_params))
+ })
+ .peekable();
+
+ if predicates.peek().is_some() {
+ Some(make::where_clause(predicates))
+ } else {
+ None
+ }
+}
+
+fn pred_is_required(
+ ctx: &AssistContext<'_>,
+ pred: &ast::WherePred,
+ used_type_params: &[TypeParam],
+) -> bool {
+ match resolved_type_param(ctx, pred) {
+ Some(it) => used_type_params.contains(&it),
+ None => false,
+ }
+}
+
+fn resolved_type_param(ctx: &AssistContext<'_>, pred: &ast::WherePred) -> Option<TypeParam> {
+ let path = match pred.ty()? {
+ ast::Type::PathType(path_type) => path_type.path(),
+ _ => None,
+ }?;
+
+ match ctx.sema.resolve_path(&path)? {
+ PathResolution::TypeParam(type_param) => Some(type_param),
+ _ => None,
+ }
+}
+
+impl Function {
+ /// Collect all the `TypeParam`s used in the `body` and `params`.
+ fn type_params(&self, ctx: &AssistContext<'_>) -> Vec<TypeParam> {
+ let type_params_in_descendant_paths =
+ self.body.descendant_paths().filter_map(|it| match ctx.sema.resolve_path(&it) {
+ Some(PathResolution::TypeParam(type_param)) => Some(type_param),
+ _ => None,
+ });
+ let type_params_in_params = self.params.iter().filter_map(|p| p.ty.as_type_param(ctx.db()));
+ type_params_in_descendant_paths.chain(type_params_in_params).collect()
+ }
+
+ fn make_param_list(&self, ctx: &AssistContext<'_>, module: hir::Module) -> ast::ParamList {
+ let self_param = self.self_param.clone();
+ let params = self.params.iter().map(|param| param.to_param(ctx, module));
+ make::param_list(self_param, params)
+ }
+
+ fn make_ret_ty(&self, ctx: &AssistContext<'_>, module: hir::Module) -> Option<ast::RetType> {
+ let fun_ty = self.return_type(ctx);
+ let handler = if self.mods.is_in_tail {
+ FlowHandler::None
+ } else {
+ FlowHandler::from_ret_ty(self, &fun_ty)
+ };
+ let ret_ty = match &handler {
+ FlowHandler::None => {
+ if matches!(fun_ty, FunType::Unit) {
+ return None;
+ }
+ fun_ty.make_ty(ctx, module)
+ }
+ FlowHandler::Try { kind: TryKind::Option } => {
+ make::ext::ty_option(fun_ty.make_ty(ctx, module))
+ }
+ FlowHandler::Try { kind: TryKind::Result { ty: parent_ret_ty } } => {
+ let handler_ty = parent_ret_ty
+ .type_arguments()
+ .nth(1)
+ .map(|ty| make_ty(&ty, ctx, module))
+ .unwrap_or_else(make::ty_placeholder);
+ make::ext::ty_result(fun_ty.make_ty(ctx, module), handler_ty)
+ }
+ FlowHandler::If { .. } => make::ty("ControlFlow<()>"),
+ FlowHandler::IfOption { action } => {
+ let handler_ty = action
+ .expr_ty(ctx)
+ .map(|ty| make_ty(&ty, ctx, module))
+ .unwrap_or_else(make::ty_placeholder);
+ make::ext::ty_option(handler_ty)
+ }
+ FlowHandler::MatchOption { .. } => make::ext::ty_option(fun_ty.make_ty(ctx, module)),
+ FlowHandler::MatchResult { err } => {
+ let handler_ty = err
+ .expr_ty(ctx)
+ .map(|ty| make_ty(&ty, ctx, module))
+ .unwrap_or_else(make::ty_placeholder);
+ make::ext::ty_result(fun_ty.make_ty(ctx, module), handler_ty)
+ }
+ };
+ Some(make::ret_type(ret_ty))
+ }
+}
+
+impl FunType {
+ fn make_ty(&self, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Type {
+ match self {
+ FunType::Unit => make::ty_unit(),
+ FunType::Single(ty) => make_ty(ty, ctx, module),
+ FunType::Tuple(types) => match types.as_slice() {
+ [] => {
+ stdx::never!("tuple type with 0 elements");
+ make::ty_unit()
+ }
+ [ty] => {
+ stdx::never!("tuple type with 1 element");
+ make_ty(ty, ctx, module)
+ }
+ types => {
+ let types = types.iter().map(|ty| make_ty(ty, ctx, module));
+ make::ty_tuple(types)
+ }
+ },
+ }
+ }
+}
+
+fn make_body(
+ ctx: &AssistContext<'_>,
+ old_indent: IndentLevel,
+ new_indent: IndentLevel,
+ fun: &Function,
+) -> ast::BlockExpr {
+ let ret_ty = fun.return_type(ctx);
+ let handler = if fun.mods.is_in_tail {
+ FlowHandler::None
+ } else {
+ FlowHandler::from_ret_ty(fun, &ret_ty)
+ };
+
+ let block = match &fun.body {
+ FunctionBody::Expr(expr) => {
+ let expr = rewrite_body_segment(ctx, &fun.params, &handler, expr.syntax());
+ let expr = ast::Expr::cast(expr).unwrap();
+ match expr {
+ ast::Expr::BlockExpr(block) => {
+ // If the extracted expression is itself a block, there is no need to wrap it inside another block.
+ let block = block.dedent(old_indent);
+ // Recreate the block for formatting consistency with other extracted functions.
+ make::block_expr(block.statements(), block.tail_expr())
+ }
+ _ => {
+ let expr = expr.dedent(old_indent).indent(IndentLevel(1));
+
+ make::block_expr(Vec::new(), Some(expr))
+ }
+ }
+ }
+ FunctionBody::Span { parent, text_range } => {
+ let mut elements: Vec<_> = parent
+ .syntax()
+ .children_with_tokens()
+ .filter(|it| text_range.contains_range(it.text_range()))
+ .map(|it| match &it {
+ syntax::NodeOrToken::Node(n) => syntax::NodeOrToken::Node(
+ rewrite_body_segment(ctx, &fun.params, &handler, n),
+ ),
+ _ => it,
+ })
+ .collect();
+
+ let mut tail_expr = match &elements.last() {
+ Some(syntax::NodeOrToken::Node(node)) if ast::Expr::can_cast(node.kind()) => {
+ ast::Expr::cast(node.clone())
+ }
+ _ => None,
+ };
+
+ match tail_expr {
+ Some(_) => {
+ elements.pop();
+ }
+ None => match fun.outliving_locals.as_slice() {
+ [] => {}
+ [var] => {
+ tail_expr = Some(path_expr_from_local(ctx, var.local));
+ }
+ vars => {
+ let exprs = vars.iter().map(|var| path_expr_from_local(ctx, var.local));
+ let expr = make::expr_tuple(exprs);
+ tail_expr = Some(expr);
+ }
+ },
+ };
+
+ let body_indent = IndentLevel(1);
+ let elements = elements
+ .into_iter()
+ .map(|node_or_token| match &node_or_token {
+ syntax::NodeOrToken::Node(node) => match ast::Stmt::cast(node.clone()) {
+ Some(stmt) => {
+ let indented = stmt.dedent(old_indent).indent(body_indent);
+ let ast_node = indented.syntax().clone_subtree();
+ syntax::NodeOrToken::Node(ast_node)
+ }
+ _ => node_or_token,
+ },
+ _ => node_or_token,
+ })
+ .collect::<Vec<SyntaxElement>>();
+ let tail_expr = tail_expr.map(|expr| expr.dedent(old_indent).indent(body_indent));
+
+ make::hacky_block_expr_with_comments(elements, tail_expr)
+ }
+ };
+
+ let block = match &handler {
+ FlowHandler::None => block,
+ FlowHandler::Try { kind } => {
+ let block = with_default_tail_expr(block, make::expr_unit());
+ map_tail_expr(block, |tail_expr| {
+ let constructor = match kind {
+ TryKind::Option => "Some",
+ TryKind::Result { .. } => "Ok",
+ };
+ let func = make::expr_path(make::ext::ident_path(constructor));
+ let args = make::arg_list(iter::once(tail_expr));
+ make::expr_call(func, args)
+ })
+ }
+ FlowHandler::If { .. } => {
+ let controlflow_continue = make::expr_call(
+ make::expr_path(make::path_from_text("ControlFlow::Continue")),
+ make::arg_list(iter::once(make::expr_unit())),
+ );
+ with_tail_expr(block, controlflow_continue)
+ }
+ FlowHandler::IfOption { .. } => {
+ let none = make::expr_path(make::ext::ident_path("None"));
+ with_tail_expr(block, none)
+ }
+ FlowHandler::MatchOption { .. } => map_tail_expr(block, |tail_expr| {
+ let some = make::expr_path(make::ext::ident_path("Some"));
+ let args = make::arg_list(iter::once(tail_expr));
+ make::expr_call(some, args)
+ }),
+ FlowHandler::MatchResult { .. } => map_tail_expr(block, |tail_expr| {
+ let ok = make::expr_path(make::ext::ident_path("Ok"));
+ let args = make::arg_list(iter::once(tail_expr));
+ make::expr_call(ok, args)
+ }),
+ };
+
+ block.indent(new_indent)
+}
+
+fn map_tail_expr(block: ast::BlockExpr, f: impl FnOnce(ast::Expr) -> ast::Expr) -> ast::BlockExpr {
+ let tail_expr = match block.tail_expr() {
+ Some(tail_expr) => tail_expr,
+ None => return block,
+ };
+ make::block_expr(block.statements(), Some(f(tail_expr)))
+}
+
+fn with_default_tail_expr(block: ast::BlockExpr, tail_expr: ast::Expr) -> ast::BlockExpr {
+ match block.tail_expr() {
+ Some(_) => block,
+ None => make::block_expr(block.statements(), Some(tail_expr)),
+ }
+}
+
+fn with_tail_expr(block: ast::BlockExpr, tail_expr: ast::Expr) -> ast::BlockExpr {
+ let stmt_tail = block.tail_expr().map(|expr| make::expr_stmt(expr).into());
+ let stmts = block.statements().chain(stmt_tail);
+ make::block_expr(stmts, Some(tail_expr))
+}
+
+fn format_type(ty: &hir::Type, ctx: &AssistContext<'_>, module: hir::Module) -> String {
+ ty.display_source_code(ctx.db(), module.into()).ok().unwrap_or_else(|| "_".to_string())
+}
+
+fn make_ty(ty: &hir::Type, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Type {
+ let ty_str = format_type(ty, ctx, module);
+ make::ty(&ty_str)
+}
+
+fn rewrite_body_segment(
+ ctx: &AssistContext<'_>,
+ params: &[Param],
+ handler: &FlowHandler,
+ syntax: &SyntaxNode,
+) -> SyntaxNode {
+ let syntax = fix_param_usages(ctx, params, syntax);
+ update_external_control_flow(handler, &syntax);
+ syntax
+}
+
+/// change all usages to account for added `&`/`&mut` for some params
+fn fix_param_usages(ctx: &AssistContext<'_>, params: &[Param], syntax: &SyntaxNode) -> SyntaxNode {
+ let mut usages_for_param: Vec<(&Param, Vec<ast::Expr>)> = Vec::new();
+
+ let tm = TreeMutator::new(syntax);
+
+ for param in params {
+ if !param.kind().is_ref() {
+ continue;
+ }
+
+ let usages = LocalUsages::find_local_usages(ctx, param.var);
+ let usages = usages
+ .iter()
+ .filter(|reference| syntax.text_range().contains_range(reference.range))
+ .filter_map(|reference| path_element_of_reference(syntax, reference))
+ .map(|expr| tm.make_mut(&expr));
+
+ usages_for_param.push((param, usages.collect()));
+ }
+
+ let res = tm.make_syntax_mut(syntax);
+
+ for (param, usages) in usages_for_param {
+ for usage in usages {
+ match usage.syntax().ancestors().skip(1).find_map(ast::Expr::cast) {
+ Some(ast::Expr::MethodCallExpr(_) | ast::Expr::FieldExpr(_)) => {
+ // do nothing
+ }
+ Some(ast::Expr::RefExpr(node))
+ if param.kind() == ParamKind::MutRef && node.mut_token().is_some() =>
+ {
+ ted::replace(node.syntax(), node.expr().unwrap().syntax());
+ }
+ Some(ast::Expr::RefExpr(node))
+ if param.kind() == ParamKind::SharedRef && node.mut_token().is_none() =>
+ {
+ ted::replace(node.syntax(), node.expr().unwrap().syntax());
+ }
+ Some(_) | None => {
+ let p = &make::expr_prefix(T![*], usage.clone()).clone_for_update();
+ ted::replace(usage.syntax(), p.syntax())
+ }
+ }
+ }
+ }
+
+ res
+}
+
+fn update_external_control_flow(handler: &FlowHandler, syntax: &SyntaxNode) {
+ let mut nested_loop = None;
+ let mut nested_scope = None;
+ for event in syntax.preorder() {
+ match event {
+ WalkEvent::Enter(e) => match e.kind() {
+ SyntaxKind::LOOP_EXPR | SyntaxKind::WHILE_EXPR | SyntaxKind::FOR_EXPR => {
+ if nested_loop.is_none() {
+ nested_loop = Some(e.clone());
+ }
+ }
+ SyntaxKind::FN
+ | SyntaxKind::CONST
+ | SyntaxKind::STATIC
+ | SyntaxKind::IMPL
+ | SyntaxKind::MODULE => {
+ if nested_scope.is_none() {
+ nested_scope = Some(e.clone());
+ }
+ }
+ _ => {}
+ },
+ WalkEvent::Leave(e) => {
+ if nested_scope.is_none() {
+ if let Some(expr) = ast::Expr::cast(e.clone()) {
+ match expr {
+ ast::Expr::ReturnExpr(return_expr) if nested_scope.is_none() => {
+ let expr = return_expr.expr();
+ if let Some(replacement) = make_rewritten_flow(handler, expr) {
+ ted::replace(return_expr.syntax(), replacement.syntax())
+ }
+ }
+ ast::Expr::BreakExpr(break_expr) if nested_loop.is_none() => {
+ let expr = break_expr.expr();
+ if let Some(replacement) = make_rewritten_flow(handler, expr) {
+ ted::replace(break_expr.syntax(), replacement.syntax())
+ }
+ }
+ ast::Expr::ContinueExpr(continue_expr) if nested_loop.is_none() => {
+ if let Some(replacement) = make_rewritten_flow(handler, None) {
+ ted::replace(continue_expr.syntax(), replacement.syntax())
+ }
+ }
+ _ => {
+ // do nothing
+ }
+ }
+ }
+ }
+
+ if nested_loop.as_ref() == Some(&e) {
+ nested_loop = None;
+ }
+ if nested_scope.as_ref() == Some(&e) {
+ nested_scope = None;
+ }
+ }
+ };
+ }
+}
+
+fn make_rewritten_flow(handler: &FlowHandler, arg_expr: Option<ast::Expr>) -> Option<ast::Expr> {
+ let value = match handler {
+ FlowHandler::None | FlowHandler::Try { .. } => return None,
+ FlowHandler::If { .. } => make::expr_call(
+ make::expr_path(make::path_from_text("ControlFlow::Break")),
+ make::arg_list(iter::once(make::expr_unit())),
+ ),
+ FlowHandler::IfOption { .. } => {
+ let expr = arg_expr.unwrap_or_else(|| make::expr_tuple(Vec::new()));
+ let args = make::arg_list(iter::once(expr));
+ make::expr_call(make::expr_path(make::ext::ident_path("Some")), args)
+ }
+ FlowHandler::MatchOption { .. } => make::expr_path(make::ext::ident_path("None")),
+ FlowHandler::MatchResult { .. } => {
+ let expr = arg_expr.unwrap_or_else(|| make::expr_tuple(Vec::new()));
+ let args = make::arg_list(iter::once(expr));
+ make::expr_call(make::expr_path(make::ext::ident_path("Err")), args)
+ }
+ };
+ Some(make::expr_return(Some(value)).clone_for_update())
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn no_args_from_binary_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ foo($01 + 1$0);
+}
+"#,
+ r#"
+fn foo() {
+ foo(fun_name());
+}
+
+fn $0fun_name() -> i32 {
+ 1 + 1
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_binary_expr_in_module() {
+ check_assist(
+ extract_function,
+ r#"
+mod bar {
+ fn foo() {
+ foo($01 + 1$0);
+ }
+}
+"#,
+ r#"
+mod bar {
+ fn foo() {
+ foo(fun_name());
+ }
+
+ fn $0fun_name() -> i32 {
+ 1 + 1
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_binary_expr_indented() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0{ 1 + 1 }$0;
+}
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() -> i32 {
+ 1 + 1
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_stmt_with_last_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i32 {
+ let k = 1;
+ $0let m = 1;
+ m + 1$0
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let k = 1;
+ fun_name()
+}
+
+fn $0fun_name() -> i32 {
+ let m = 1;
+ m + 1
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_stmt_unit() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let k = 3;
+ $0let m = 1;
+ let n = m + 1;$0
+ let g = 5;
+}
+"#,
+ r#"
+fn foo() {
+ let k = 3;
+ fun_name();
+ let g = 5;
+}
+
+fn $0fun_name() {
+ let m = 1;
+ let n = m + 1;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_if() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0if true { }$0
+}
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() {
+ if true { }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_if_else() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i32 {
+ $0if true { 1 } else { 2 }$0
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ fun_name()
+}
+
+fn $0fun_name() -> i32 {
+ if true { 1 } else { 2 }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_if_let_else() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i32 {
+ $0if let true = false { 1 } else { 2 }$0
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ fun_name()
+}
+
+fn $0fun_name() -> i32 {
+ if let true = false { 1 } else { 2 }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_match() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i32 {
+ $0match true {
+ true => 1,
+ false => 2,
+ }$0
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ fun_name()
+}
+
+fn $0fun_name() -> i32 {
+ match true {
+ true => 1,
+ false => 2,
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_while() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0while true { }$0
+}
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() {
+ while true { }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_for() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0for v in &[0, 1] { }$0
+}
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() {
+ for v in &[0, 1] { }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_loop_unit() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0loop {
+ let m = 1;
+ }$0
+}
+"#,
+ r#"
+fn foo() {
+ fun_name()
+}
+
+fn $0fun_name() -> ! {
+ loop {
+ let m = 1;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_loop_with_return() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let v = $0loop {
+ let m = 1;
+ break m;
+ }$0;
+}
+"#,
+ r#"
+fn foo() {
+ let v = fun_name();
+}
+
+fn $0fun_name() -> i32 {
+ loop {
+ let m = 1;
+ break m;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_match() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let v: i32 = $0match Some(1) {
+ Some(x) => x,
+ None => 0,
+ }$0;
+}
+"#,
+ r#"
+fn foo() {
+ let v: i32 = fun_name();
+}
+
+fn $0fun_name() -> i32 {
+ match Some(1) {
+ Some(x) => x,
+ None => 0,
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_partial_block_single_line() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let n = 1;
+ let mut v = $0n * n;$0
+ v += 1;
+}
+"#,
+ r#"
+fn foo() {
+ let n = 1;
+ let mut v = fun_name(n);
+ v += 1;
+}
+
+fn $0fun_name(n: i32) -> i32 {
+ let mut v = n * n;
+ v
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_partial_block() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let m = 2;
+ let n = 1;
+ let mut v = m $0* n;
+ let mut w = 3;$0
+ v += 1;
+ w += 1;
+}
+"#,
+ r#"
+fn foo() {
+ let m = 2;
+ let n = 1;
+ let (mut v, mut w) = fun_name(m, n);
+ v += 1;
+ w += 1;
+}
+
+fn $0fun_name(m: i32, n: i32) -> (i32, i32) {
+ let mut v = m * n;
+ let mut w = 3;
+ (v, w)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn argument_form_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ $0n+2$0
+}
+"#,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ fun_name(n)
+}
+
+fn $0fun_name(n: u32) -> u32 {
+ n+2
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn argument_used_twice_form_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ $0n+n$0
+}
+"#,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ fun_name(n)
+}
+
+fn $0fun_name(n: u32) -> u32 {
+ n+n
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn two_arguments_form_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ let m = 3;
+ $0n+n*m$0
+}
+"#,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ let m = 3;
+ fun_name(n, m)
+}
+
+fn $0fun_name(n: u32, m: u32) -> u32 {
+ n+n*m
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn argument_and_locals() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ $0let m = 1;
+ n + m$0
+}
+"#,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ fun_name(n)
+}
+
+fn $0fun_name(n: u32) -> u32 {
+ let m = 1;
+ n + m
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn in_comment_is_not_applicable() {
+ cov_mark::check!(extract_function_in_comment_is_not_applicable);
+ check_assist_not_applicable(extract_function, r"fn main() { 1 + /* $0comment$0 */ 1; }");
+ }
+
+ #[test]
+ fn part_of_expr_stmt() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $01$0 + 1;
+}
+"#,
+ r#"
+fn foo() {
+ fun_name() + 1;
+}
+
+fn $0fun_name() -> i32 {
+ 1
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn function_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0bar(1 + 1)$0
+}
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() {
+ bar(1 + 1)
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_from_nested() {
+ check_assist(
+ extract_function,
+ r#"
+fn main() {
+ let x = true;
+ let tuple = match x {
+ true => ($02 + 2$0, true)
+ _ => (0, false)
+ };
+}
+"#,
+ r#"
+fn main() {
+ let x = true;
+ let tuple = match x {
+ true => (fun_name(), true)
+ _ => (0, false)
+ };
+}
+
+fn $0fun_name() -> i32 {
+ 2 + 2
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn param_from_closure() {
+ check_assist(
+ extract_function,
+ r#"
+fn main() {
+ let lambda = |x: u32| $0x * 2$0;
+}
+"#,
+ r#"
+fn main() {
+ let lambda = |x: u32| fun_name(x);
+}
+
+fn $0fun_name(x: u32) -> u32 {
+ x * 2
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_return_stmt() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> u32 {
+ $0return 2 + 2$0;
+}
+"#,
+ r#"
+fn foo() -> u32 {
+ return fun_name();
+}
+
+fn $0fun_name() -> u32 {
+ 2 + 2
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn does_not_add_extra_whitespace() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> u32 {
+
+
+ $0return 2 + 2$0;
+}
+"#,
+ r#"
+fn foo() -> u32 {
+
+
+ return fun_name();
+}
+
+fn $0fun_name() -> u32 {
+ 2 + 2
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_stmt() {
+ check_assist(
+ extract_function,
+ r#"
+fn main() {
+ let result = loop {
+ $0break 2 + 2$0;
+ };
+}
+"#,
+ r#"
+fn main() {
+ let result = loop {
+ break fun_name();
+ };
+}
+
+fn $0fun_name() -> i32 {
+ 2 + 2
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_cast() {
+ check_assist(
+ extract_function,
+ r#"
+fn main() {
+ let v = $00f32 as u32$0;
+}
+"#,
+ r#"
+fn main() {
+ let v = fun_name();
+}
+
+fn $0fun_name() -> u32 {
+ 0f32 as u32
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn return_not_applicable() {
+ check_assist_not_applicable(extract_function, r"fn foo() { $0return$0; } ");
+ }
+
+ #[test]
+ fn method_to_freestanding() {
+ check_assist(
+ extract_function,
+ r#"
+struct S;
+
+impl S {
+ fn foo(&self) -> i32 {
+ $01+1$0
+ }
+}
+"#,
+ r#"
+struct S;
+
+impl S {
+ fn foo(&self) -> i32 {
+ fun_name()
+ }
+}
+
+fn $0fun_name() -> i32 {
+ 1+1
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_with_reference() {
+ check_assist(
+ extract_function,
+ r#"
+struct S { f: i32 };
+
+impl S {
+ fn foo(&self) -> i32 {
+ $0self.f+self.f$0
+ }
+}
+"#,
+ r#"
+struct S { f: i32 };
+
+impl S {
+ fn foo(&self) -> i32 {
+ self.fun_name()
+ }
+
+ fn $0fun_name(&self) -> i32 {
+ self.f+self.f
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_with_mut() {
+ check_assist(
+ extract_function,
+ r#"
+struct S { f: i32 };
+
+impl S {
+ fn foo(&mut self) {
+ $0self.f += 1;$0
+ }
+}
+"#,
+ r#"
+struct S { f: i32 };
+
+impl S {
+ fn foo(&mut self) {
+ self.fun_name();
+ }
+
+ fn $0fun_name(&mut self) {
+ self.f += 1;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn variable_defined_inside_and_used_after_no_ret() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let n = 1;
+ $0let k = n * n;$0
+ let m = k + 1;
+}
+"#,
+ r#"
+fn foo() {
+ let n = 1;
+ let k = fun_name(n);
+ let m = k + 1;
+}
+
+fn $0fun_name(n: i32) -> i32 {
+ let k = n * n;
+ k
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn variable_defined_inside_and_used_after_mutably_no_ret() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let n = 1;
+ $0let mut k = n * n;$0
+ k += 1;
+}
+"#,
+ r#"
+fn foo() {
+ let n = 1;
+ let mut k = fun_name(n);
+ k += 1;
+}
+
+fn $0fun_name(n: i32) -> i32 {
+ let mut k = n * n;
+ k
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn two_variables_defined_inside_and_used_after_no_ret() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let n = 1;
+ $0let k = n * n;
+ let m = k + 2;$0
+ let h = k + m;
+}
+"#,
+ r#"
+fn foo() {
+ let n = 1;
+ let (k, m) = fun_name(n);
+ let h = k + m;
+}
+
+fn $0fun_name(n: i32) -> (i32, i32) {
+ let k = n * n;
+ let m = k + 2;
+ (k, m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn multi_variables_defined_inside_and_used_after_mutably_no_ret() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let n = 1;
+ $0let mut k = n * n;
+ let mut m = k + 2;
+ let mut o = m + 3;
+ o += 1;$0
+ k += o;
+ m = 1;
+}
+"#,
+ r#"
+fn foo() {
+ let n = 1;
+ let (mut k, mut m, o) = fun_name(n);
+ k += o;
+ m = 1;
+}
+
+fn $0fun_name(n: i32) -> (i32, i32, i32) {
+ let mut k = n * n;
+ let mut m = k + 2;
+ let mut o = m + 3;
+ o += 1;
+ (k, m, o)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn nontrivial_patterns_define_variables() {
+ check_assist(
+ extract_function,
+ r#"
+struct Counter(i32);
+fn foo() {
+ $0let Counter(n) = Counter(0);$0
+ let m = n;
+}
+"#,
+ r#"
+struct Counter(i32);
+fn foo() {
+ let n = fun_name();
+ let m = n;
+}
+
+fn $0fun_name() -> i32 {
+ let Counter(n) = Counter(0);
+ n
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn struct_with_two_fields_pattern_define_variables() {
+ check_assist(
+ extract_function,
+ r#"
+struct Counter { n: i32, m: i32 };
+fn foo() {
+ $0let Counter { n, m: k } = Counter { n: 1, m: 2 };$0
+ let h = n + k;
+}
+"#,
+ r#"
+struct Counter { n: i32, m: i32 };
+fn foo() {
+ let (n, k) = fun_name();
+ let h = n + k;
+}
+
+fn $0fun_name() -> (i32, i32) {
+ let Counter { n, m: k } = Counter { n: 1, m: 2 };
+ (n, k)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_var_from_outer_scope() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let mut n = 1;
+ $0n += 1;$0
+ let m = n + 1;
+}
+"#,
+ r#"
+fn foo() {
+ let mut n = 1;
+ fun_name(&mut n);
+ let m = n + 1;
+}
+
+fn $0fun_name(n: &mut i32) {
+ *n += 1;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_field_from_outer_scope() {
+ check_assist(
+ extract_function,
+ r#"
+struct C { n: i32 }
+fn foo() {
+ let mut c = C { n: 0 };
+ $0c.n += 1;$0
+ let m = c.n + 1;
+}
+"#,
+ r#"
+struct C { n: i32 }
+fn foo() {
+ let mut c = C { n: 0 };
+ fun_name(&mut c);
+ let m = c.n + 1;
+}
+
+fn $0fun_name(c: &mut C) {
+ c.n += 1;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_nested_field_from_outer_scope() {
+ check_assist(
+ extract_function,
+ r#"
+struct P { n: i32}
+struct C { p: P }
+fn foo() {
+ let mut c = C { p: P { n: 0 } };
+ let mut v = C { p: P { n: 0 } };
+ let u = C { p: P { n: 0 } };
+ $0c.p.n += u.p.n;
+ let r = &mut v.p.n;$0
+ let m = c.p.n + v.p.n + u.p.n;
+}
+"#,
+ r#"
+struct P { n: i32}
+struct C { p: P }
+fn foo() {
+ let mut c = C { p: P { n: 0 } };
+ let mut v = C { p: P { n: 0 } };
+ let u = C { p: P { n: 0 } };
+ fun_name(&mut c, &u, &mut v);
+ let m = c.p.n + v.p.n + u.p.n;
+}
+
+fn $0fun_name(c: &mut C, u: &C, v: &mut C) {
+ c.p.n += u.p.n;
+ let r = &mut v.p.n;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_param_many_usages_stmt() {
+ check_assist(
+ extract_function,
+ r#"
+fn bar(k: i32) {}
+trait I: Copy {
+ fn succ(&self) -> Self;
+ fn inc(&mut self) -> Self { let v = self.succ(); *self = v; v }
+}
+impl I for i32 {
+ fn succ(&self) -> Self { *self + 1 }
+}
+fn foo() {
+ let mut n = 1;
+ $0n += n;
+ bar(n);
+ bar(n+1);
+ bar(n*n);
+ bar(&n);
+ n.inc();
+ let v = &mut n;
+ *v = v.succ();
+ n.succ();$0
+ let m = n + 1;
+}
+"#,
+ r#"
+fn bar(k: i32) {}
+trait I: Copy {
+ fn succ(&self) -> Self;
+ fn inc(&mut self) -> Self { let v = self.succ(); *self = v; v }
+}
+impl I for i32 {
+ fn succ(&self) -> Self { *self + 1 }
+}
+fn foo() {
+ let mut n = 1;
+ fun_name(&mut n);
+ let m = n + 1;
+}
+
+fn $0fun_name(n: &mut i32) {
+ *n += *n;
+ bar(*n);
+ bar(*n+1);
+ bar(*n**n);
+ bar(&*n);
+ n.inc();
+ let v = n;
+ *v = v.succ();
+ n.succ();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_param_many_usages_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn bar(k: i32) {}
+trait I: Copy {
+ fn succ(&self) -> Self;
+ fn inc(&mut self) -> Self { let v = self.succ(); *self = v; v }
+}
+impl I for i32 {
+ fn succ(&self) -> Self { *self + 1 }
+}
+fn foo() {
+ let mut n = 1;
+ $0{
+ n += n;
+ bar(n);
+ bar(n+1);
+ bar(n*n);
+ bar(&n);
+ n.inc();
+ let v = &mut n;
+ *v = v.succ();
+ n.succ();
+ }$0
+ let m = n + 1;
+}
+"#,
+ r#"
+fn bar(k: i32) {}
+trait I: Copy {
+ fn succ(&self) -> Self;
+ fn inc(&mut self) -> Self { let v = self.succ(); *self = v; v }
+}
+impl I for i32 {
+ fn succ(&self) -> Self { *self + 1 }
+}
+fn foo() {
+ let mut n = 1;
+ fun_name(&mut n);
+ let m = n + 1;
+}
+
+fn $0fun_name(n: &mut i32) {
+ *n += *n;
+ bar(*n);
+ bar(*n+1);
+ bar(*n**n);
+ bar(&*n);
+ n.inc();
+ let v = n;
+ *v = v.succ();
+ n.succ();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_param_by_value() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let mut n = 1;
+ $0n += 1;$0
+}
+"#,
+ r"
+fn foo() {
+ let mut n = 1;
+ fun_name(n);
+}
+
+fn $0fun_name(mut n: i32) {
+ n += 1;
+}
+",
+ );
+ }
+
+ #[test]
+ fn mut_param_because_of_mut_ref() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let mut n = 1;
+ $0let v = &mut n;
+ *v += 1;$0
+ let k = n;
+}
+"#,
+ r#"
+fn foo() {
+ let mut n = 1;
+ fun_name(&mut n);
+ let k = n;
+}
+
+fn $0fun_name(n: &mut i32) {
+ let v = n;
+ *v += 1;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_param_by_value_because_of_mut_ref() {
+ check_assist(
+ extract_function,
+ r"
+fn foo() {
+ let mut n = 1;
+ $0let v = &mut n;
+ *v += 1;$0
+}
+",
+ r#"
+fn foo() {
+ let mut n = 1;
+ fun_name(n);
+}
+
+fn $0fun_name(mut n: i32) {
+ let v = &mut n;
+ *v += 1;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_method_call() {
+ check_assist(
+ extract_function,
+ r#"
+trait I {
+ fn inc(&mut self);
+}
+impl I for i32 {
+ fn inc(&mut self) { *self += 1 }
+}
+fn foo() {
+ let mut n = 1;
+ $0n.inc();$0
+}
+"#,
+ r#"
+trait I {
+ fn inc(&mut self);
+}
+impl I for i32 {
+ fn inc(&mut self) { *self += 1 }
+}
+fn foo() {
+ let mut n = 1;
+ fun_name(n);
+}
+
+fn $0fun_name(mut n: i32) {
+ n.inc();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn shared_method_call() {
+ check_assist(
+ extract_function,
+ r#"
+trait I {
+ fn succ(&self);
+}
+impl I for i32 {
+ fn succ(&self) { *self + 1 }
+}
+fn foo() {
+ let mut n = 1;
+ $0n.succ();$0
+}
+"#,
+ r"
+trait I {
+ fn succ(&self);
+}
+impl I for i32 {
+ fn succ(&self) { *self + 1 }
+}
+fn foo() {
+ let mut n = 1;
+ fun_name(n);
+}
+
+fn $0fun_name(n: i32) {
+ n.succ();
+}
+",
+ );
+ }
+
+ #[test]
+ fn mut_method_call_with_other_receiver() {
+ check_assist(
+ extract_function,
+ r#"
+trait I {
+ fn inc(&mut self, n: i32);
+}
+impl I for i32 {
+ fn inc(&mut self, n: i32) { *self += n }
+}
+fn foo() {
+ let mut n = 1;
+ $0let mut m = 2;
+ m.inc(n);$0
+}
+"#,
+ r"
+trait I {
+ fn inc(&mut self, n: i32);
+}
+impl I for i32 {
+ fn inc(&mut self, n: i32) { *self += n }
+}
+fn foo() {
+ let mut n = 1;
+ fun_name(n);
+}
+
+fn $0fun_name(n: i32) {
+ let mut m = 2;
+ m.inc(n);
+}
+",
+ );
+ }
+
+ #[test]
+ fn non_copy_without_usages_after() {
+ check_assist(
+ extract_function,
+ r#"
+struct Counter(i32);
+fn foo() {
+ let c = Counter(0);
+ $0let n = c.0;$0
+}
+"#,
+ r"
+struct Counter(i32);
+fn foo() {
+ let c = Counter(0);
+ fun_name(c);
+}
+
+fn $0fun_name(c: Counter) {
+ let n = c.0;
+}
+",
+ );
+ }
+
+ #[test]
+ fn non_copy_used_after() {
+ check_assist(
+ extract_function,
+ r"
+struct Counter(i32);
+fn foo() {
+ let c = Counter(0);
+ $0let n = c.0;$0
+ let m = c.0;
+}
+",
+ r#"
+struct Counter(i32);
+fn foo() {
+ let c = Counter(0);
+ fun_name(&c);
+ let m = c.0;
+}
+
+fn $0fun_name(c: &Counter) {
+ let n = c.0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn copy_used_after() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: copy
+fn foo() {
+ let n = 0;
+ $0let m = n;$0
+ let k = n;
+}
+"#,
+ r#"
+fn foo() {
+ let n = 0;
+ fun_name(n);
+ let k = n;
+}
+
+fn $0fun_name(n: i32) {
+ let m = n;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn copy_custom_used_after() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: copy, derive
+#[derive(Clone, Copy)]
+struct Counter(i32);
+fn foo() {
+ let c = Counter(0);
+ $0let n = c.0;$0
+ let m = c.0;
+}
+"#,
+ r#"
+#[derive(Clone, Copy)]
+struct Counter(i32);
+fn foo() {
+ let c = Counter(0);
+ fun_name(c);
+ let m = c.0;
+}
+
+fn $0fun_name(c: Counter) {
+ let n = c.0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn indented_stmts() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ if true {
+ loop {
+ $0let n = 1;
+ let m = 2;$0
+ }
+ }
+}
+"#,
+ r#"
+fn foo() {
+ if true {
+ loop {
+ fun_name();
+ }
+ }
+}
+
+fn $0fun_name() {
+ let n = 1;
+ let m = 2;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn indented_stmts_inside_mod() {
+ check_assist(
+ extract_function,
+ r#"
+mod bar {
+ fn foo() {
+ if true {
+ loop {
+ $0let n = 1;
+ let m = 2;$0
+ }
+ }
+ }
+}
+"#,
+ r#"
+mod bar {
+ fn foo() {
+ if true {
+ loop {
+ fun_name();
+ }
+ }
+ }
+
+ fn $0fun_name() {
+ let n = 1;
+ let m = 2;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_loop() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: option
+fn foo() {
+ loop {
+ let n = 1;
+ $0let m = n + 1;
+ break;
+ let k = 2;$0
+ let h = 1 + k;
+ }
+}
+"#,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ let k = match fun_name(n) {
+ Some(value) => value,
+ None => break,
+ };
+ let h = 1 + k;
+ }
+}
+
+fn $0fun_name(n: i32) -> Option<i32> {
+ let m = n + 1;
+ return None;
+ let k = 2;
+ Some(k)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn return_to_parent() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: copy, result
+fn foo() -> i64 {
+ let n = 1;
+ $0let m = n + 1;
+ return 1;
+ let k = 2;$0
+ (n + k) as i64
+}
+"#,
+ r#"
+fn foo() -> i64 {
+ let n = 1;
+ let k = match fun_name(n) {
+ Ok(value) => value,
+ Err(value) => return value,
+ };
+ (n + k) as i64
+}
+
+fn $0fun_name(n: i32) -> Result<i32, i64> {
+ let m = n + 1;
+ return Err(1);
+ let k = 2;
+ Ok(k)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_and_continue() {
+ cov_mark::check!(external_control_flow_break_and_continue);
+ check_assist_not_applicable(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ $0let m = n + 1;
+ break;
+ let k = 2;
+ continue;
+ let k = k + 1;$0
+ let r = n + k;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn return_and_break() {
+ cov_mark::check!(external_control_flow_return_and_bc);
+ check_assist_not_applicable(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ $0let m = n + 1;
+ break;
+ let k = 2;
+ return;
+ let k = k + 1;$0
+ let r = n + k;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_loop_with_if() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: try
+fn foo() {
+ loop {
+ let mut n = 1;
+ $0let m = n + 1;
+ break;
+ n += m;$0
+ let h = 1 + n;
+ }
+}
+"#,
+ r#"
+use core::ops::ControlFlow;
+
+fn foo() {
+ loop {
+ let mut n = 1;
+ if let ControlFlow::Break(_) = fun_name(&mut n) {
+ break;
+ }
+ let h = 1 + n;
+ }
+}
+
+fn $0fun_name(n: &mut i32) -> ControlFlow<()> {
+ let m = *n + 1;
+ return ControlFlow::Break(());
+ *n += m;
+ ControlFlow::Continue(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_loop_nested() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: try
+fn foo() {
+ loop {
+ let mut n = 1;
+ $0let m = n + 1;
+ if m == 42 {
+ break;
+ }$0
+ let h = 1;
+ }
+}
+"#,
+ r#"
+use core::ops::ControlFlow;
+
+fn foo() {
+ loop {
+ let mut n = 1;
+ if let ControlFlow::Break(_) = fun_name(n) {
+ break;
+ }
+ let h = 1;
+ }
+}
+
+fn $0fun_name(n: i32) -> ControlFlow<()> {
+ let m = n + 1;
+ if m == 42 {
+ return ControlFlow::Break(());
+ }
+ ControlFlow::Continue(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_loop_nested_labeled() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: try
+fn foo() {
+ 'bar: loop {
+ loop {
+ $0break 'bar;$0
+ }
+ }
+}
+"#,
+ r#"
+use core::ops::ControlFlow;
+
+fn foo() {
+ 'bar: loop {
+ loop {
+ if let ControlFlow::Break(_) = fun_name() {
+ break 'bar;
+ }
+ }
+ }
+}
+
+fn $0fun_name() -> ControlFlow<()> {
+ return ControlFlow::Break(());
+ ControlFlow::Continue(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn continue_loop_nested_labeled() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: try
+fn foo() {
+ 'bar: loop {
+ loop {
+ $0continue 'bar;$0
+ }
+ }
+}
+"#,
+ r#"
+use core::ops::ControlFlow;
+
+fn foo() {
+ 'bar: loop {
+ loop {
+ if let ControlFlow::Break(_) = fun_name() {
+ continue 'bar;
+ }
+ }
+ }
+}
+
+fn $0fun_name() -> ControlFlow<()> {
+ return ControlFlow::Break(());
+ ControlFlow::Continue(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn return_from_nested_loop() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;$0
+ let k = 1;
+ loop {
+ return;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ }
+}
+"#,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ let m = match fun_name() {
+ Some(value) => value,
+ None => return,
+ };
+ let h = 1 + m;
+ }
+}
+
+fn $0fun_name() -> Option<i32> {
+ let k = 1;
+ loop {
+ return None;
+ }
+ let m = k + 1;
+ Some(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_from_nested_loop() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ $0let k = 1;
+ loop {
+ break;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ }
+}
+"#,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ let m = fun_name();
+ let h = 1 + m;
+ }
+}
+
+fn $0fun_name() -> i32 {
+ let k = 1;
+ loop {
+ break;
+ }
+ let m = k + 1;
+ m
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_from_nested_and_outer_loops() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ $0let k = 1;
+ loop {
+ break;
+ }
+ if k == 42 {
+ break;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ }
+}
+"#,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ let m = match fun_name() {
+ Some(value) => value,
+ None => break,
+ };
+ let h = 1 + m;
+ }
+}
+
+fn $0fun_name() -> Option<i32> {
+ let k = 1;
+ loop {
+ break;
+ }
+ if k == 42 {
+ return None;
+ }
+ let m = k + 1;
+ Some(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn return_from_nested_fn() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ $0let k = 1;
+ fn test() {
+ return;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ }
+}
+"#,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ let m = fun_name();
+ let h = 1 + m;
+ }
+}
+
+fn $0fun_name() -> i32 {
+ let k = 1;
+ fn test() {
+ return;
+ }
+ let m = k + 1;
+ m
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_with_value() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i32 {
+ loop {
+ let n = 1;
+ $0let k = 1;
+ if k == 42 {
+ break 3;
+ }
+ let m = k + 1;$0
+ let h = 1;
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ loop {
+ let n = 1;
+ if let Some(value) = fun_name() {
+ break value;
+ }
+ let h = 1;
+ }
+}
+
+fn $0fun_name() -> Option<i32> {
+ let k = 1;
+ if k == 42 {
+ return Some(3);
+ }
+ let m = k + 1;
+ None
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_with_value_and_label() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i32 {
+ 'bar: loop {
+ let n = 1;
+ $0let k = 1;
+ if k == 42 {
+ break 'bar 4;
+ }
+ let m = k + 1;$0
+ let h = 1;
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ 'bar: loop {
+ let n = 1;
+ if let Some(value) = fun_name() {
+ break 'bar value;
+ }
+ let h = 1;
+ }
+}
+
+fn $0fun_name() -> Option<i32> {
+ let k = 1;
+ if k == 42 {
+ return Some(4);
+ }
+ let m = k + 1;
+ None
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_with_value_and_return() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i64 {
+ loop {
+ let n = 1;$0
+ let k = 1;
+ if k == 42 {
+ break 3;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ }
+}
+"#,
+ r#"
+fn foo() -> i64 {
+ loop {
+ let n = 1;
+ let m = match fun_name() {
+ Ok(value) => value,
+ Err(value) => break value,
+ };
+ let h = 1 + m;
+ }
+}
+
+fn $0fun_name() -> Result<i32, i64> {
+ let k = 1;
+ if k == 42 {
+ return Err(3);
+ }
+ let m = k + 1;
+ Ok(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_option() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: option
+fn bar() -> Option<i32> { None }
+fn foo() -> Option<()> {
+ let n = bar()?;
+ $0let k = foo()?;
+ let m = k + 1;$0
+ let h = 1 + m;
+ Some(())
+}
+"#,
+ r#"
+fn bar() -> Option<i32> { None }
+fn foo() -> Option<()> {
+ let n = bar()?;
+ let m = fun_name()?;
+ let h = 1 + m;
+ Some(())
+}
+
+fn $0fun_name() -> Option<i32> {
+ let k = foo()?;
+ let m = k + 1;
+ Some(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_option_unit() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: option
+fn foo() -> Option<()> {
+ let n = 1;
+ $0let k = foo()?;
+ let m = k + 1;$0
+ let h = 1 + n;
+ Some(())
+}
+"#,
+ r#"
+fn foo() -> Option<()> {
+ let n = 1;
+ fun_name()?;
+ let h = 1 + n;
+ Some(())
+}
+
+fn $0fun_name() -> Option<()> {
+ let k = foo()?;
+ let m = k + 1;
+ Some(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_result() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: result
+fn foo() -> Result<(), i64> {
+ let n = 1;
+ $0let k = foo()?;
+ let m = k + 1;$0
+ let h = 1 + m;
+ Ok(())
+}
+"#,
+ r#"
+fn foo() -> Result<(), i64> {
+ let n = 1;
+ let m = fun_name()?;
+ let h = 1 + m;
+ Ok(())
+}
+
+fn $0fun_name() -> Result<i32, i64> {
+ let k = foo()?;
+ let m = k + 1;
+ Ok(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_option_with_return() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: option
+fn foo() -> Option<()> {
+ let n = 1;
+ $0let k = foo()?;
+ if k == 42 {
+ return None;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ Some(())
+}
+"#,
+ r#"
+fn foo() -> Option<()> {
+ let n = 1;
+ let m = fun_name()?;
+ let h = 1 + m;
+ Some(())
+}
+
+fn $0fun_name() -> Option<i32> {
+ let k = foo()?;
+ if k == 42 {
+ return None;
+ }
+ let m = k + 1;
+ Some(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_result_with_return() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: result
+fn foo() -> Result<(), i64> {
+ let n = 1;
+ $0let k = foo()?;
+ if k == 42 {
+ return Err(1);
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ Ok(())
+}
+"#,
+ r#"
+fn foo() -> Result<(), i64> {
+ let n = 1;
+ let m = fun_name()?;
+ let h = 1 + m;
+ Ok(())
+}
+
+fn $0fun_name() -> Result<i32, i64> {
+ let k = foo()?;
+ if k == 42 {
+ return Err(1);
+ }
+ let m = k + 1;
+ Ok(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_and_break() {
+ cov_mark::check!(external_control_flow_try_and_bc);
+ check_assist_not_applicable(
+ extract_function,
+ r#"
+//- minicore: option
+fn foo() -> Option<()> {
+ loop {
+ let n = Some(1);
+ $0let m = n? + 1;
+ break;
+ let k = 2;
+ let k = k + 1;$0
+ let r = n + k;
+ }
+ Some(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_and_return_ok() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: result
+fn foo() -> Result<(), i64> {
+ let n = 1;
+ $0let k = foo()?;
+ if k == 42 {
+ return Ok(1);
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ Ok(())
+}
+"#,
+ r#"
+fn foo() -> Result<(), i64> {
+ let n = 1;
+ let m = fun_name()?;
+ let h = 1 + m;
+ Ok(())
+}
+
+fn $0fun_name() -> Result<i32, i64> {
+ let k = foo()?;
+ if k == 42 {
+ return Ok(1);
+ }
+ let m = k + 1;
+ Ok(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn param_usage_in_macro() {
+ check_assist(
+ extract_function,
+ r#"
+macro_rules! m {
+ ($val:expr) => { $val };
+}
+
+fn foo() {
+ let n = 1;
+ $0let k = n * m!(n);$0
+ let m = k + 1;
+}
+"#,
+ r#"
+macro_rules! m {
+ ($val:expr) => { $val };
+}
+
+fn foo() {
+ let n = 1;
+ let k = fun_name(n);
+ let m = k + 1;
+}
+
+fn $0fun_name(n: i32) -> i32 {
+ let k = n * m!(n);
+ k
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_with_await() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: future
+fn main() {
+ $0some_function().await;$0
+}
+
+async fn some_function() {
+
+}
+"#,
+ r#"
+fn main() {
+ fun_name().await;
+}
+
+async fn $0fun_name() {
+ some_function().await;
+}
+
+async fn some_function() {
+
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_with_await_and_result_not_producing_match_expr() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: future, result
+async fn foo() -> Result<(), ()> {
+ $0async {}.await;
+ Err(())?$0
+}
+"#,
+ r#"
+async fn foo() -> Result<(), ()> {
+ fun_name().await?
+}
+
+async fn $0fun_name() -> Result<(), ()> {
+ async {}.await;
+ Err(())?
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_with_await_and_result_producing_match_expr() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: future
+async fn foo() -> i32 {
+ loop {
+ let n = 1;$0
+ let k = async { 1 }.await;
+ if k == 42 {
+ break 3;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ }
+}
+"#,
+ r#"
+async fn foo() -> i32 {
+ loop {
+ let n = 1;
+ let m = match fun_name().await {
+ Ok(value) => value,
+ Err(value) => break value,
+ };
+ let h = 1 + m;
+ }
+}
+
+async fn $0fun_name() -> Result<i32, i32> {
+ let k = async { 1 }.await;
+ if k == 42 {
+ return Err(3);
+ }
+ let m = k + 1;
+ Ok(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_with_await_in_args() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: future
+fn main() {
+ $0function_call("a", some_function().await);$0
+}
+
+async fn some_function() {
+
+}
+"#,
+ r#"
+fn main() {
+ fun_name().await;
+}
+
+async fn $0fun_name() {
+ function_call("a", some_function().await);
+}
+
+async fn some_function() {
+
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_does_not_extract_standalone_blocks() {
+ check_assist_not_applicable(
+ extract_function,
+ r#"
+fn main() $0{}$0
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_adds_comma_for_match_arm() {
+ check_assist(
+ extract_function,
+ r#"
+fn main() {
+ match 6 {
+ 100 => $0{ 100 }$0
+ _ => 0,
+ };
+}
+"#,
+ r#"
+fn main() {
+ match 6 {
+ 100 => fun_name(),
+ _ => 0,
+ };
+}
+
+fn $0fun_name() -> i32 {
+ 100
+}
+"#,
+ );
+ check_assist(
+ extract_function,
+ r#"
+fn main() {
+ match 6 {
+ 100 => $0{ 100 }$0,
+ _ => 0,
+ };
+}
+"#,
+ r#"
+fn main() {
+ match 6 {
+ 100 => fun_name(),
+ _ => 0,
+ };
+}
+
+fn $0fun_name() -> i32 {
+ 100
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_does_not_tear_comments_apart() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ /*$0*/
+ foo();
+ foo();
+ /*$0*/
+}
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() {
+ /**/
+ foo();
+ foo();
+ /**/
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_does_not_tear_body_apart() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0foo();
+}$0
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() {
+ foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_does_not_wrap_res_in_res() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: result
+fn foo() -> Result<(), i64> {
+ $0Result::<i32, i64>::Ok(0)?;
+ Ok(())$0
+}
+"#,
+ r#"
+fn foo() -> Result<(), i64> {
+ fun_name()?
+}
+
+fn $0fun_name() -> Result<(), i64> {
+ Result::<i32, i64>::Ok(0)?;
+ Ok(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_knows_const() {
+ check_assist(
+ extract_function,
+ r#"
+const fn foo() {
+ $0()$0
+}
+"#,
+ r#"
+const fn foo() {
+ fun_name();
+}
+
+const fn $0fun_name() {
+ ()
+}
+"#,
+ );
+ check_assist(
+ extract_function,
+ r#"
+const FOO: () = {
+ $0()$0
+};
+"#,
+ r#"
+const FOO: () = {
+ fun_name();
+};
+
+const fn $0fun_name() {
+ ()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_does_not_move_outer_loop_vars() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let mut x = 5;
+ for _ in 0..10 {
+ $0x += 1;$0
+ }
+}
+"#,
+ r#"
+fn foo() {
+ let mut x = 5;
+ for _ in 0..10 {
+ fun_name(&mut x);
+ }
+}
+
+fn $0fun_name(x: &mut i32) {
+ *x += 1;
+}
+"#,
+ );
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ for _ in 0..10 {
+ let mut x = 5;
+ $0x += 1;$0
+ }
+}
+"#,
+ r#"
+fn foo() {
+ for _ in 0..10 {
+ let mut x = 5;
+ fun_name(x);
+ }
+}
+
+fn $0fun_name(mut x: i32) {
+ x += 1;
+}
+"#,
+ );
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let mut x = 5;
+ for _ in 0..10 {
+ $0x += 1;$0
+ }
+ }
+}
+"#,
+ r#"
+fn foo() {
+ loop {
+ let mut x = 5;
+ for _ in 0..10 {
+ fun_name(&mut x);
+ }
+ }
+}
+
+fn $0fun_name(x: &mut i32) {
+ *x += 1;
+}
+"#,
+ );
+ }
+
+ // regression test for #9822
+ #[test]
+ fn extract_mut_ref_param_has_no_mut_binding_in_loop() {
+ check_assist(
+ extract_function,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&mut self) {}
+}
+fn foo() {
+ let mut x = Foo;
+ while false {
+ let y = &mut x;
+ $0y.foo();$0
+ }
+ let z = x;
+}
+"#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&mut self) {}
+}
+fn foo() {
+ let mut x = Foo;
+ while false {
+ let y = &mut x;
+ fun_name(y);
+ }
+ let z = x;
+}
+
+fn $0fun_name(y: &mut Foo) {
+ y.foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_with_macro_arg() {
+ check_assist(
+ extract_function,
+ r#"
+macro_rules! m {
+ ($val:expr) => { $val };
+}
+fn main() {
+ let bar = "bar";
+ $0m!(bar);$0
+}
+"#,
+ r#"
+macro_rules! m {
+ ($val:expr) => { $val };
+}
+fn main() {
+ let bar = "bar";
+ fun_name(bar);
+}
+
+fn $0fun_name(bar: &str) {
+ m!(bar);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unresolveable_types_default_to_placeholder() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let a = __unresolved;
+ let _ = $0{a}$0;
+}
+"#,
+ r#"
+fn foo() {
+ let a = __unresolved;
+ let _ = fun_name(a);
+}
+
+fn $0fun_name(a: _) -> _ {
+ a
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn reference_mutable_param_with_further_usages() {
+ check_assist(
+ extract_function,
+ r#"
+pub struct Foo {
+ field: u32,
+}
+
+pub fn testfn(arg: &mut Foo) {
+ $0arg.field = 8;$0
+ // Simulating access after the extracted portion
+ arg.field = 16;
+}
+"#,
+ r#"
+pub struct Foo {
+ field: u32,
+}
+
+pub fn testfn(arg: &mut Foo) {
+ fun_name(arg);
+ // Simulating access after the extracted portion
+ arg.field = 16;
+}
+
+fn $0fun_name(arg: &mut Foo) {
+ arg.field = 8;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn reference_mutable_param_without_further_usages() {
+ check_assist(
+ extract_function,
+ r#"
+pub struct Foo {
+ field: u32,
+}
+
+pub fn testfn(arg: &mut Foo) {
+ $0arg.field = 8;$0
+}
+"#,
+ r#"
+pub struct Foo {
+ field: u32,
+}
+
+pub fn testfn(arg: &mut Foo) {
+ fun_name(arg);
+}
+
+fn $0fun_name(arg: &mut Foo) {
+ arg.field = 8;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_function_copies_comment_at_start() {
+ check_assist(
+ extract_function,
+ r#"
+fn func() {
+ let i = 0;
+ $0// comment here!
+ let x = 0;$0
+}
+"#,
+ r#"
+fn func() {
+ let i = 0;
+ fun_name();
+}
+
+fn $0fun_name() {
+ // comment here!
+ let x = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_function_copies_comment_in_between() {
+ check_assist(
+ extract_function,
+ r#"
+fn func() {
+ let i = 0;$0
+ let a = 0;
+ // comment here!
+ let x = 0;$0
+}
+"#,
+ r#"
+fn func() {
+ let i = 0;
+ fun_name();
+}
+
+fn $0fun_name() {
+ let a = 0;
+ // comment here!
+ let x = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_function_copies_comment_at_end() {
+ check_assist(
+ extract_function,
+ r#"
+fn func() {
+ let i = 0;
+ $0let x = 0;
+ // comment here!$0
+}
+"#,
+ r#"
+fn func() {
+ let i = 0;
+ fun_name();
+}
+
+fn $0fun_name() {
+ let x = 0;
+ // comment here!
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_function_copies_comment_indented() {
+ check_assist(
+ extract_function,
+ r#"
+fn func() {
+ let i = 0;
+ $0let x = 0;
+ while(true) {
+ // comment here!
+ }$0
+}
+"#,
+ r#"
+fn func() {
+ let i = 0;
+ fun_name();
+}
+
+fn $0fun_name() {
+ let x = 0;
+ while(true) {
+ // comment here!
+ }
+}
+"#,
+ );
+ }
+
+ // FIXME: we do want to preserve whitespace
+ #[test]
+ fn extract_function_does_not_preserve_whitespace() {
+ check_assist(
+ extract_function,
+ r#"
+fn func() {
+ let i = 0;
+ $0let a = 0;
+
+ let x = 0;$0
+}
+"#,
+ r#"
+fn func() {
+ let i = 0;
+ fun_name();
+}
+
+fn $0fun_name() {
+ let a = 0;
+ let x = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_function_long_form_comment() {
+ check_assist(
+ extract_function,
+ r#"
+fn func() {
+ let i = 0;
+ $0/* a comment */
+ let x = 0;$0
+}
+"#,
+ r#"
+fn func() {
+ let i = 0;
+ fun_name();
+}
+
+fn $0fun_name() {
+ /* a comment */
+ let x = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn it_should_not_generate_duplicate_function_names() {
+ check_assist(
+ extract_function,
+ r#"
+fn fun_name() {
+ $0let x = 0;$0
+}
+"#,
+ r#"
+fn fun_name() {
+ fun_name1();
+}
+
+fn $0fun_name1() {
+ let x = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn should_increment_suffix_until_it_finds_space() {
+ check_assist(
+ extract_function,
+ r#"
+fn fun_name1() {
+ let y = 0;
+}
+
+fn fun_name() {
+ $0let x = 0;$0
+}
+"#,
+ r#"
+fn fun_name1() {
+ let y = 0;
+}
+
+fn fun_name() {
+ fun_name2();
+}
+
+fn $0fun_name2() {
+ let x = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_method_from_trait_impl() {
+ check_assist(
+ extract_function,
+ r#"
+struct Struct(i32);
+trait Trait {
+ fn bar(&self) -> i32;
+}
+
+impl Trait for Struct {
+ fn bar(&self) -> i32 {
+ $0self.0 + 2$0
+ }
+}
+"#,
+ r#"
+struct Struct(i32);
+trait Trait {
+ fn bar(&self) -> i32;
+}
+
+impl Trait for Struct {
+ fn bar(&self) -> i32 {
+ self.fun_name()
+ }
+}
+
+impl Struct {
+ fn $0fun_name(&self) -> i32 {
+ self.0 + 2
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn closure_arguments() {
+ check_assist(
+ extract_function,
+ r#"
+fn parent(factor: i32) {
+ let v = &[1, 2, 3];
+
+ $0v.iter().map(|it| it * factor);$0
+}
+"#,
+ r#"
+fn parent(factor: i32) {
+ let v = &[1, 2, 3];
+
+ fun_name(v, factor);
+}
+
+fn $0fun_name(v: &[i32; 3], factor: i32) {
+ v.iter().map(|it| it * factor);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn preserve_generics() {
+ check_assist(
+ extract_function,
+ r#"
+fn func<T: Debug>(i: T) {
+ $0foo(i);$0
+}
+"#,
+ r#"
+fn func<T: Debug>(i: T) {
+ fun_name(i);
+}
+
+fn $0fun_name<T: Debug>(i: T) {
+ foo(i);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn preserve_generics_from_body() {
+ check_assist(
+ extract_function,
+ r#"
+fn func<T: Default>() -> T {
+ $0T::default()$0
+}
+"#,
+ r#"
+fn func<T: Default>() -> T {
+ fun_name()
+}
+
+fn $0fun_name<T: Default>() -> T {
+ T::default()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn filter_unused_generics() {
+ check_assist(
+ extract_function,
+ r#"
+fn func<T: Debug, U: Copy>(i: T, u: U) {
+ bar(u);
+ $0foo(i);$0
+}
+"#,
+ r#"
+fn func<T: Debug, U: Copy>(i: T, u: U) {
+ bar(u);
+ fun_name(i);
+}
+
+fn $0fun_name<T: Debug>(i: T) {
+ foo(i);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn empty_generic_param_list() {
+ check_assist(
+ extract_function,
+ r#"
+fn func<T: Debug>(t: T, i: u32) {
+ bar(t);
+ $0foo(i);$0
+}
+"#,
+ r#"
+fn func<T: Debug>(t: T, i: u32) {
+ bar(t);
+ fun_name(i);
+}
+
+fn $0fun_name(i: u32) {
+ foo(i);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn preserve_where_clause() {
+ check_assist(
+ extract_function,
+ r#"
+fn func<T>(i: T) where T: Debug {
+ $0foo(i);$0
+}
+"#,
+ r#"
+fn func<T>(i: T) where T: Debug {
+ fun_name(i);
+}
+
+fn $0fun_name<T>(i: T) where T: Debug {
+ foo(i);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn filter_unused_where_clause() {
+ check_assist(
+ extract_function,
+ r#"
+fn func<T, U>(i: T, u: U) where T: Debug, U: Copy {
+ bar(u);
+ $0foo(i);$0
+}
+"#,
+ r#"
+fn func<T, U>(i: T, u: U) where T: Debug, U: Copy {
+ bar(u);
+ fun_name(i);
+}
+
+fn $0fun_name<T>(i: T) where T: Debug {
+ foo(i);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn nested_generics() {
+ check_assist(
+ extract_function,
+ r#"
+struct Struct<T: Into<i32>>(T);
+impl <T: Into<i32> + Copy> Struct<T> {
+ fn func<V: Into<i32>>(&self, v: V) -> i32 {
+ let t = self.0;
+ $0t.into() + v.into()$0
+ }
+}
+"#,
+ r#"
+struct Struct<T: Into<i32>>(T);
+impl <T: Into<i32> + Copy> Struct<T> {
+ fn func<V: Into<i32>>(&self, v: V) -> i32 {
+ let t = self.0;
+ fun_name(t, v)
+ }
+}
+
+fn $0fun_name<T: Into<i32> + Copy, V: Into<i32>>(t: T, v: V) -> i32 {
+ t.into() + v.into()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn filters_unused_nested_generics() {
+ check_assist(
+ extract_function,
+ r#"
+struct Struct<T: Into<i32>, U: Debug>(T, U);
+impl <T: Into<i32> + Copy, U: Debug> Struct<T, U> {
+ fn func<V: Into<i32>>(&self, v: V) -> i32 {
+ let t = self.0;
+ $0t.into() + v.into()$0
+ }
+}
+"#,
+ r#"
+struct Struct<T: Into<i32>, U: Debug>(T, U);
+impl <T: Into<i32> + Copy, U: Debug> Struct<T, U> {
+ fn func<V: Into<i32>>(&self, v: V) -> i32 {
+ let t = self.0;
+ fun_name(t, v)
+ }
+}
+
+fn $0fun_name<T: Into<i32> + Copy, V: Into<i32>>(t: T, v: V) -> i32 {
+ t.into() + v.into()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn nested_where_clauses() {
+ check_assist(
+ extract_function,
+ r#"
+struct Struct<T>(T) where T: Into<i32>;
+impl <T> Struct<T> where T: Into<i32> + Copy {
+ fn func<V>(&self, v: V) -> i32 where V: Into<i32> {
+ let t = self.0;
+ $0t.into() + v.into()$0
+ }
+}
+"#,
+ r#"
+struct Struct<T>(T) where T: Into<i32>;
+impl <T> Struct<T> where T: Into<i32> + Copy {
+ fn func<V>(&self, v: V) -> i32 where V: Into<i32> {
+ let t = self.0;
+ fun_name(t, v)
+ }
+}
+
+fn $0fun_name<T, V>(t: T, v: V) -> i32 where T: Into<i32> + Copy, V: Into<i32> {
+ t.into() + v.into()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn filters_unused_nested_where_clauses() {
+ check_assist(
+ extract_function,
+ r#"
+struct Struct<T, U>(T, U) where T: Into<i32>, U: Debug;
+impl <T, U> Struct<T, U> where T: Into<i32> + Copy, U: Debug {
+ fn func<V>(&self, v: V) -> i32 where V: Into<i32> {
+ let t = self.0;
+ $0t.into() + v.into()$0
+ }
+}
+"#,
+ r#"
+struct Struct<T, U>(T, U) where T: Into<i32>, U: Debug;
+impl <T, U> Struct<T, U> where T: Into<i32> + Copy, U: Debug {
+ fn func<V>(&self, v: V) -> i32 where V: Into<i32> {
+ let t = self.0;
+ fun_name(t, v)
+ }
+}
+
+fn $0fun_name<T, V>(t: T, v: V) -> i32 where T: Into<i32> + Copy, V: Into<i32> {
+ t.into() + v.into()
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
new file mode 100644
index 000000000..b3c4d306a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
@@ -0,0 +1,1770 @@
+use std::{
+ collections::{HashMap, HashSet},
+ iter,
+};
+
+use hir::{HasSource, ModuleSource};
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ base_db::FileId,
+ defs::{Definition, NameClass, NameRefClass},
+ search::{FileReference, SearchScope},
+};
+use stdx::format_to;
+use syntax::{
+ algo::find_node_at_range,
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ make, HasName, HasVisibility,
+ },
+ match_ast, ted, AstNode, SourceFile,
+ SyntaxKind::{self, WHITESPACE},
+ SyntaxNode, TextRange,
+};
+
+use crate::{AssistContext, Assists};
+
+use super::remove_unused_param::range_to_remove;
+
+// Assist: extract_module
+//
+// Extracts a selected region as seperate module. All the references, visibility and imports are
+// resolved.
+//
+// ```
+// $0fn foo(name: i32) -> i32 {
+// name + 1
+// }$0
+//
+// fn bar(name: i32) -> i32 {
+// name + 2
+// }
+// ```
+// ->
+// ```
+// mod modname {
+// pub(crate) fn foo(name: i32) -> i32 {
+// name + 1
+// }
+// }
+//
+// fn bar(name: i32) -> i32 {
+// name + 2
+// }
+// ```
+pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ if ctx.has_empty_selection() {
+ return None;
+ }
+
+ let node = ctx.covering_element();
+ let node = match node {
+ syntax::NodeOrToken::Node(n) => n,
+ syntax::NodeOrToken::Token(t) => t.parent()?,
+ };
+
+ //If the selection is inside impl block, we need to place new module outside impl block,
+ //as impl blocks cannot contain modules
+
+ let mut impl_parent: Option<ast::Impl> = None;
+ let mut impl_child_count: usize = 0;
+ if let Some(parent_assoc_list) = node.parent() {
+ if let Some(parent_impl) = parent_assoc_list.parent() {
+ if let Some(impl_) = ast::Impl::cast(parent_impl) {
+ impl_child_count = parent_assoc_list.children().count();
+ impl_parent = Some(impl_);
+ }
+ }
+ }
+
+ let mut curr_parent_module: Option<ast::Module> = None;
+ if let Some(mod_syn_opt) = node.ancestors().find(|it| ast::Module::can_cast(it.kind())) {
+ curr_parent_module = ast::Module::cast(mod_syn_opt);
+ }
+
+ let mut module = extract_target(&node, ctx.selection_trimmed())?;
+ if module.body_items.is_empty() {
+ return None;
+ }
+
+ let old_item_indent = module.body_items[0].indent_level();
+
+ acc.add(
+ AssistId("extract_module", AssistKind::RefactorExtract),
+ "Extract Module",
+ module.text_range,
+ |builder| {
+ //This takes place in three steps:
+ //
+ //- Firstly, we will update the references(usages) e.g. converting a
+ // function call bar() to modname::bar(), and similarly for other items
+ //
+ //- Secondly, changing the visibility of each item inside the newly selected module
+ // i.e. making a fn a() {} to pub(crate) fn a() {}
+ //
+ //- Thirdly, resolving all the imports this includes removing paths from imports
+ // outside the module, shifting/cloning them inside new module, or shifting the imports, or making
+ // new import statemnts
+
+ //We are getting item usages and record_fields together, record_fields
+ //for change_visibility and usages for first point mentioned above in the process
+ let (usages_to_be_processed, record_fields) = module.get_usages_and_record_fields(ctx);
+
+ let import_paths_to_be_removed = module.resolve_imports(curr_parent_module, ctx);
+ module.change_visibility(record_fields);
+
+ let mut body_items: Vec<String> = Vec::new();
+ let mut items_to_be_processed: Vec<ast::Item> = module.body_items.clone();
+ let mut new_item_indent = old_item_indent + 1;
+
+ if impl_parent.is_some() {
+ new_item_indent = old_item_indent + 2;
+ } else {
+ items_to_be_processed = [module.use_items.clone(), items_to_be_processed].concat();
+ }
+
+ for item in items_to_be_processed {
+ let item = item.indent(IndentLevel(1));
+ let mut indented_item = String::new();
+ format_to!(indented_item, "{}{}", new_item_indent, item.to_string());
+ body_items.push(indented_item);
+ }
+
+ let mut body = body_items.join("\n\n");
+
+ if let Some(impl_) = &impl_parent {
+ let mut impl_body_def = String::new();
+
+ if let Some(self_ty) = impl_.self_ty() {
+ format_to!(
+ impl_body_def,
+ "{}impl {} {{\n{}\n{}}}",
+ old_item_indent + 1,
+ self_ty.to_string(),
+ body,
+ old_item_indent + 1
+ );
+
+ body = impl_body_def;
+
+ // Add the import for enum/struct corresponding to given impl block
+ module.make_use_stmt_of_node_with_super(self_ty.syntax());
+ for item in module.use_items {
+ let mut indented_item = String::new();
+ format_to!(indented_item, "{}{}", old_item_indent + 1, item.to_string());
+ body = format!("{}\n\n{}", indented_item, body);
+ }
+ }
+ }
+
+ let mut module_def = String::new();
+
+ format_to!(module_def, "mod {} {{\n{}\n{}}}", module.name, body, old_item_indent);
+
+ let mut usages_to_be_updated_for_curr_file = vec![];
+ for usages_to_be_updated_for_file in usages_to_be_processed {
+ if usages_to_be_updated_for_file.0 == ctx.file_id() {
+ usages_to_be_updated_for_curr_file = usages_to_be_updated_for_file.1;
+ continue;
+ }
+ builder.edit_file(usages_to_be_updated_for_file.0);
+ for usage_to_be_processed in usages_to_be_updated_for_file.1 {
+ builder.replace(usage_to_be_processed.0, usage_to_be_processed.1)
+ }
+ }
+
+ builder.edit_file(ctx.file_id());
+ for usage_to_be_processed in usages_to_be_updated_for_curr_file {
+ builder.replace(usage_to_be_processed.0, usage_to_be_processed.1)
+ }
+
+ for import_path_text_range in import_paths_to_be_removed {
+ builder.delete(import_path_text_range);
+ }
+
+ if let Some(impl_) = impl_parent {
+ // Remove complete impl block if it has only one child (as such it will be empty
+ // after deleting that child)
+ let node_to_be_removed = if impl_child_count == 1 {
+ impl_.syntax()
+ } else {
+ //Remove selected node
+ &node
+ };
+
+ builder.delete(node_to_be_removed.text_range());
+ // Remove preceding indentation from node
+ if let Some(range) = indent_range_before_given_node(node_to_be_removed) {
+ builder.delete(range);
+ }
+
+ builder.insert(impl_.syntax().text_range().end(), format!("\n\n{}", module_def));
+ } else {
+ builder.replace(module.text_range, module_def)
+ }
+ },
+ )
+}
+
+#[derive(Debug)]
+struct Module {
+ text_range: TextRange,
+ name: &'static str,
+ /// All items except use items.
+ body_items: Vec<ast::Item>,
+ /// Use items are kept separately as they help when the selection is inside an impl block,
+ /// we can directly take these items and keep them outside generated impl block inside
+ /// generated module.
+ use_items: Vec<ast::Item>,
+}
+
+fn extract_target(node: &SyntaxNode, selection_range: TextRange) -> Option<Module> {
+ let selected_nodes = node
+ .children()
+ .filter(|node| selection_range.contains_range(node.text_range()))
+ .chain(iter::once(node.clone()));
+ let (use_items, body_items) = selected_nodes
+ .filter_map(ast::Item::cast)
+ .partition(|item| matches!(item, ast::Item::Use(..)));
+
+ Some(Module { text_range: selection_range, name: "modname", body_items, use_items })
+}
+
+impl Module {
+ fn get_usages_and_record_fields(
+ &self,
+ ctx: &AssistContext<'_>,
+ ) -> (HashMap<FileId, Vec<(TextRange, String)>>, Vec<SyntaxNode>) {
+ let mut adt_fields = Vec::new();
+ let mut refs: HashMap<FileId, Vec<(TextRange, String)>> = HashMap::new();
+
+ //Here impl is not included as each item inside impl will be tied to the parent of
+ //implementing block(a struct, enum, etc), if the parent is in selected module, it will
+ //get updated by ADT section given below or if it is not, then we dont need to do any operation
+ for item in &self.body_items {
+ match_ast! {
+ match (item.syntax()) {
+ ast::Adt(it) => {
+ if let Some( nod ) = ctx.sema.to_def(&it) {
+ let node_def = Definition::Adt(nod);
+ self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs);
+
+ //Enum Fields are not allowed to explicitly specify pub, it is implied
+ match it {
+ ast::Adt::Struct(x) => {
+ if let Some(field_list) = x.field_list() {
+ match field_list {
+ ast::FieldList::RecordFieldList(record_field_list) => {
+ record_field_list.fields().for_each(|record_field| {
+ adt_fields.push(record_field.syntax().clone());
+ });
+ },
+ ast::FieldList::TupleFieldList(tuple_field_list) => {
+ tuple_field_list.fields().for_each(|tuple_field| {
+ adt_fields.push(tuple_field.syntax().clone());
+ });
+ },
+ }
+ }
+ },
+ ast::Adt::Union(x) => {
+ if let Some(record_field_list) = x.record_field_list() {
+ record_field_list.fields().for_each(|record_field| {
+ adt_fields.push(record_field.syntax().clone());
+ });
+ }
+ },
+ ast::Adt::Enum(_) => {},
+ }
+ }
+ },
+ ast::TypeAlias(it) => {
+ if let Some( nod ) = ctx.sema.to_def(&it) {
+ let node_def = Definition::TypeAlias(nod);
+ self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs);
+ }
+ },
+ ast::Const(it) => {
+ if let Some( nod ) = ctx.sema.to_def(&it) {
+ let node_def = Definition::Const(nod);
+ self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs);
+ }
+ },
+ ast::Static(it) => {
+ if let Some( nod ) = ctx.sema.to_def(&it) {
+ let node_def = Definition::Static(nod);
+ self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs);
+ }
+ },
+ ast::Fn(it) => {
+ if let Some( nod ) = ctx.sema.to_def(&it) {
+ let node_def = Definition::Function(nod);
+ self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs);
+ }
+ },
+ ast::Macro(it) => {
+ if let Some(nod) = ctx.sema.to_def(&it) {
+ self.expand_and_group_usages_file_wise(ctx, Definition::Macro(nod), &mut refs);
+ }
+ },
+ _ => (),
+ }
+ }
+ }
+
+ (refs, adt_fields)
+ }
+
+ fn expand_and_group_usages_file_wise(
+ &self,
+ ctx: &AssistContext<'_>,
+ node_def: Definition,
+ refs_in_files: &mut HashMap<FileId, Vec<(TextRange, String)>>,
+ ) {
+ for (file_id, references) in node_def.usages(&ctx.sema).all() {
+ let source_file = ctx.sema.parse(file_id);
+ let usages_in_file = references
+ .into_iter()
+ .filter_map(|usage| self.get_usage_to_be_processed(&source_file, usage));
+ refs_in_files.entry(file_id).or_default().extend(usages_in_file);
+ }
+ }
+
+ fn get_usage_to_be_processed(
+ &self,
+ source_file: &SourceFile,
+ FileReference { range, name, .. }: FileReference,
+ ) -> Option<(TextRange, String)> {
+ let path: ast::Path = find_node_at_range(source_file.syntax(), range)?;
+
+ for desc in path.syntax().descendants() {
+ if desc.to_string() == name.syntax().to_string()
+ && !self.text_range.contains_range(desc.text_range())
+ {
+ if let Some(name_ref) = ast::NameRef::cast(desc) {
+ return Some((
+ name_ref.syntax().text_range(),
+ format!("{}::{}", self.name, name_ref),
+ ));
+ }
+ }
+ }
+
+ None
+ }
+
+ fn change_visibility(&mut self, record_fields: Vec<SyntaxNode>) {
+ let (mut replacements, record_field_parents, impls) =
+ get_replacements_for_visibilty_change(&mut self.body_items, false);
+
+ let mut impl_items: Vec<ast::Item> = impls
+ .into_iter()
+ .flat_map(|impl_| impl_.syntax().descendants())
+ .filter_map(ast::Item::cast)
+ .collect();
+
+ let (mut impl_item_replacements, _, _) =
+ get_replacements_for_visibilty_change(&mut impl_items, true);
+
+ replacements.append(&mut impl_item_replacements);
+
+ for (_, field_owner) in record_field_parents {
+ for desc in field_owner.descendants().filter_map(ast::RecordField::cast) {
+ let is_record_field_present =
+ record_fields.clone().into_iter().any(|x| x.to_string() == desc.to_string());
+ if is_record_field_present {
+ replacements.push((desc.visibility(), desc.syntax().clone()));
+ }
+ }
+ }
+
+ for (vis, syntax) in replacements {
+ let item = syntax.children_with_tokens().find(|node_or_token| {
+ match node_or_token.kind() {
+ // We're skipping comments, doc comments, and attribute macros that may precede the keyword
+ // that the visibility should be placed before.
+ SyntaxKind::COMMENT | SyntaxKind::ATTR | SyntaxKind::WHITESPACE => false,
+ _ => true,
+ }
+ });
+
+ add_change_vis(vis, item);
+ }
+ }
+
+ fn resolve_imports(
+ &mut self,
+ curr_parent_module: Option<ast::Module>,
+ ctx: &AssistContext<'_>,
+ ) -> Vec<TextRange> {
+ let mut import_paths_to_be_removed: Vec<TextRange> = vec![];
+ let mut node_set: HashSet<String> = HashSet::new();
+
+ for item in self.body_items.clone() {
+ for x in item.syntax().descendants() {
+ if let Some(name) = ast::Name::cast(x.clone()) {
+ if let Some(name_classify) = NameClass::classify(&ctx.sema, &name) {
+ //Necessary to avoid two same names going through
+ if !node_set.contains(&name.syntax().to_string()) {
+ node_set.insert(name.syntax().to_string());
+ let def_opt: Option<Definition> = match name_classify {
+ NameClass::Definition(def) => Some(def),
+ _ => None,
+ };
+
+ if let Some(def) = def_opt {
+ if let Some(import_path) = self
+ .process_names_and_namerefs_for_import_resolve(
+ def,
+ name.syntax(),
+ &curr_parent_module,
+ ctx,
+ )
+ {
+ check_intersection_and_push(
+ &mut import_paths_to_be_removed,
+ import_path,
+ );
+ }
+ }
+ }
+ }
+ }
+
+ if let Some(name_ref) = ast::NameRef::cast(x) {
+ if let Some(name_classify) = NameRefClass::classify(&ctx.sema, &name_ref) {
+ //Necessary to avoid two same names going through
+ if !node_set.contains(&name_ref.syntax().to_string()) {
+ node_set.insert(name_ref.syntax().to_string());
+ let def_opt: Option<Definition> = match name_classify {
+ NameRefClass::Definition(def) => Some(def),
+ _ => None,
+ };
+
+ if let Some(def) = def_opt {
+ if let Some(import_path) = self
+ .process_names_and_namerefs_for_import_resolve(
+ def,
+ name_ref.syntax(),
+ &curr_parent_module,
+ ctx,
+ )
+ {
+ check_intersection_and_push(
+ &mut import_paths_to_be_removed,
+ import_path,
+ );
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ import_paths_to_be_removed
+ }
+
+ fn process_names_and_namerefs_for_import_resolve(
+ &mut self,
+ def: Definition,
+ node_syntax: &SyntaxNode,
+ curr_parent_module: &Option<ast::Module>,
+ ctx: &AssistContext<'_>,
+ ) -> Option<TextRange> {
+ //We only need to find in the current file
+ let selection_range = ctx.selection_trimmed();
+ let curr_file_id = ctx.file_id();
+ let search_scope = SearchScope::single_file(curr_file_id);
+ let usage_res = def.usages(&ctx.sema).in_scope(search_scope).all();
+ let file = ctx.sema.parse(curr_file_id);
+
+ let mut exists_inside_sel = false;
+ let mut exists_outside_sel = false;
+ for (_, refs) in usage_res.iter() {
+ let mut non_use_nodes_itr = refs.iter().filter_map(|x| {
+ if find_node_at_range::<ast::Use>(file.syntax(), x.range).is_none() {
+ let path_opt = find_node_at_range::<ast::Path>(file.syntax(), x.range);
+ return path_opt;
+ }
+
+ None
+ });
+
+ if non_use_nodes_itr
+ .clone()
+ .any(|x| !selection_range.contains_range(x.syntax().text_range()))
+ {
+ exists_outside_sel = true;
+ }
+ if non_use_nodes_itr.any(|x| selection_range.contains_range(x.syntax().text_range())) {
+ exists_inside_sel = true;
+ }
+ }
+
+ let source_exists_outside_sel_in_same_mod = does_source_exists_outside_sel_in_same_mod(
+ def,
+ ctx,
+ curr_parent_module,
+ selection_range,
+ curr_file_id,
+ );
+
+ let use_stmt_opt: Option<ast::Use> = usage_res.into_iter().find_map(|(file_id, refs)| {
+ if file_id == curr_file_id {
+ refs.into_iter()
+ .rev()
+ .find_map(|fref| find_node_at_range(file.syntax(), fref.range))
+ } else {
+ None
+ }
+ });
+
+ let mut use_tree_str_opt: Option<Vec<ast::Path>> = None;
+ //Exists inside and outside selection
+ // - Use stmt for item is present -> get the use_tree_str and reconstruct the path in new
+ // module
+ // - Use stmt for item is not present ->
+ //If it is not found, the definition is either ported inside new module or it stays
+ //outside:
+ //- Def is inside: Nothing to import
+ //- Def is outside: Import it inside with super
+
+ //Exists inside selection but not outside -> Check for the import of it in original module,
+ //get the use_tree_str, reconstruct the use stmt in new module
+
+ let mut import_path_to_be_removed: Option<TextRange> = None;
+ if exists_inside_sel && exists_outside_sel {
+ //Changes to be made only inside new module
+
+ //If use_stmt exists, find the use_tree_str, reconstruct it inside new module
+ //If not, insert a use stmt with super and the given nameref
+ if let Some((use_tree_str, _)) =
+ self.process_use_stmt_for_import_resolve(use_stmt_opt, node_syntax)
+ {
+ use_tree_str_opt = Some(use_tree_str);
+ } else if source_exists_outside_sel_in_same_mod {
+ //Considered only after use_stmt is not present
+ //source_exists_outside_sel_in_same_mod | exists_outside_sel(exists_inside_sel =
+ //true for all cases)
+ // false | false -> Do nothing
+ // false | true -> If source is in selection -> nothing to do, If source is outside
+ // mod -> ust_stmt transversal
+ // true | false -> super import insertion
+ // true | true -> super import insertion
+ self.make_use_stmt_of_node_with_super(node_syntax);
+ }
+ } else if exists_inside_sel && !exists_outside_sel {
+ //Changes to be made inside new module, and remove import from outside
+
+ if let Some((mut use_tree_str, text_range_opt)) =
+ self.process_use_stmt_for_import_resolve(use_stmt_opt, node_syntax)
+ {
+ if let Some(text_range) = text_range_opt {
+ import_path_to_be_removed = Some(text_range);
+ }
+
+ if source_exists_outside_sel_in_same_mod {
+ if let Some(first_path_in_use_tree) = use_tree_str.last() {
+ let first_path_in_use_tree_str = first_path_in_use_tree.to_string();
+ if !first_path_in_use_tree_str.contains("super")
+ && !first_path_in_use_tree_str.contains("crate")
+ {
+ let super_path = make::ext::ident_path("super");
+ use_tree_str.push(super_path);
+ }
+ }
+ }
+
+ use_tree_str_opt = Some(use_tree_str);
+ } else if source_exists_outside_sel_in_same_mod {
+ self.make_use_stmt_of_node_with_super(node_syntax);
+ }
+ }
+
+ if let Some(use_tree_str) = use_tree_str_opt {
+ let mut use_tree_str = use_tree_str;
+ use_tree_str.reverse();
+
+ if !(!exists_outside_sel && exists_inside_sel && source_exists_outside_sel_in_same_mod)
+ {
+ if let Some(first_path_in_use_tree) = use_tree_str.first() {
+ let first_path_in_use_tree_str = first_path_in_use_tree.to_string();
+ if first_path_in_use_tree_str.contains("super") {
+ let super_path = make::ext::ident_path("super");
+ use_tree_str.insert(0, super_path)
+ }
+ }
+ }
+
+ let use_ =
+ make::use_(None, make::use_tree(make::join_paths(use_tree_str), None, None, false));
+ let item = ast::Item::from(use_);
+ self.use_items.insert(0, item);
+ }
+
+ import_path_to_be_removed
+ }
+
+ fn make_use_stmt_of_node_with_super(&mut self, node_syntax: &SyntaxNode) -> ast::Item {
+ let super_path = make::ext::ident_path("super");
+ let node_path = make::ext::ident_path(&node_syntax.to_string());
+ let use_ = make::use_(
+ None,
+ make::use_tree(make::join_paths(vec![super_path, node_path]), None, None, false),
+ );
+
+ let item = ast::Item::from(use_);
+ self.use_items.insert(0, item.clone());
+ item
+ }
+
+ fn process_use_stmt_for_import_resolve(
+ &self,
+ use_stmt_opt: Option<ast::Use>,
+ node_syntax: &SyntaxNode,
+ ) -> Option<(Vec<ast::Path>, Option<TextRange>)> {
+ if let Some(use_stmt) = use_stmt_opt {
+ for desc in use_stmt.syntax().descendants() {
+ if let Some(path_seg) = ast::PathSegment::cast(desc) {
+ if path_seg.syntax().to_string() == node_syntax.to_string() {
+ let mut use_tree_str = vec![path_seg.parent_path()];
+ get_use_tree_paths_from_path(path_seg.parent_path(), &mut use_tree_str);
+ for ancs in path_seg.syntax().ancestors() {
+ //Here we are looking for use_tree with same string value as node
+ //passed above as the range_to_remove function looks for a comma and
+ //then includes it in the text range to remove it. But the comma only
+ //appears at the use_tree level
+ if let Some(use_tree) = ast::UseTree::cast(ancs) {
+ if use_tree.syntax().to_string() == node_syntax.to_string() {
+ return Some((
+ use_tree_str,
+ Some(range_to_remove(use_tree.syntax())),
+ ));
+ }
+ }
+ }
+
+ return Some((use_tree_str, None));
+ }
+ }
+ }
+ }
+
+ None
+ }
+}
+
+fn check_intersection_and_push(
+ import_paths_to_be_removed: &mut Vec<TextRange>,
+ import_path: TextRange,
+) {
+ if import_paths_to_be_removed.len() > 0 {
+ // Text ranges recieved here for imports are extended to the
+ // next/previous comma which can cause intersections among them
+ // and later deletion of these can cause panics similar
+ // to reported in #11766. So to mitigate it, we
+ // check for intersection between all current members
+ // and if it exists we combine both text ranges into
+ // one
+ let r = import_paths_to_be_removed
+ .into_iter()
+ .position(|it| it.intersect(import_path).is_some());
+ match r {
+ Some(it) => {
+ import_paths_to_be_removed[it] = import_paths_to_be_removed[it].cover(import_path)
+ }
+ None => import_paths_to_be_removed.push(import_path),
+ }
+ } else {
+ import_paths_to_be_removed.push(import_path);
+ }
+}
+
+fn does_source_exists_outside_sel_in_same_mod(
+ def: Definition,
+ ctx: &AssistContext<'_>,
+ curr_parent_module: &Option<ast::Module>,
+ selection_range: TextRange,
+ curr_file_id: FileId,
+) -> bool {
+ let mut source_exists_outside_sel_in_same_mod = false;
+ match def {
+ Definition::Module(x) => {
+ let source = x.definition_source(ctx.db());
+ let have_same_parent;
+ if let Some(ast_module) = &curr_parent_module {
+ if let Some(hir_module) = x.parent(ctx.db()) {
+ have_same_parent =
+ compare_hir_and_ast_module(ast_module, hir_module, ctx).is_some();
+ } else {
+ let source_file_id = source.file_id.original_file(ctx.db());
+ have_same_parent = source_file_id == curr_file_id;
+ }
+ } else {
+ let source_file_id = source.file_id.original_file(ctx.db());
+ have_same_parent = source_file_id == curr_file_id;
+ }
+
+ if have_same_parent {
+ match source.value {
+ ModuleSource::Module(module_) => {
+ source_exists_outside_sel_in_same_mod =
+ !selection_range.contains_range(module_.syntax().text_range());
+ }
+ _ => {}
+ }
+ }
+ }
+ Definition::Function(x) => {
+ if let Some(source) = x.source(ctx.db()) {
+ let have_same_parent = if let Some(ast_module) = &curr_parent_module {
+ compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
+ } else {
+ let source_file_id = source.file_id.original_file(ctx.db());
+ source_file_id == curr_file_id
+ };
+
+ if have_same_parent {
+ source_exists_outside_sel_in_same_mod =
+ !selection_range.contains_range(source.value.syntax().text_range());
+ }
+ }
+ }
+ Definition::Adt(x) => {
+ if let Some(source) = x.source(ctx.db()) {
+ let have_same_parent = if let Some(ast_module) = &curr_parent_module {
+ compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
+ } else {
+ let source_file_id = source.file_id.original_file(ctx.db());
+ source_file_id == curr_file_id
+ };
+
+ if have_same_parent {
+ source_exists_outside_sel_in_same_mod =
+ !selection_range.contains_range(source.value.syntax().text_range());
+ }
+ }
+ }
+ Definition::Variant(x) => {
+ if let Some(source) = x.source(ctx.db()) {
+ let have_same_parent = if let Some(ast_module) = &curr_parent_module {
+ compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
+ } else {
+ let source_file_id = source.file_id.original_file(ctx.db());
+ source_file_id == curr_file_id
+ };
+
+ if have_same_parent {
+ source_exists_outside_sel_in_same_mod =
+ !selection_range.contains_range(source.value.syntax().text_range());
+ }
+ }
+ }
+ Definition::Const(x) => {
+ if let Some(source) = x.source(ctx.db()) {
+ let have_same_parent = if let Some(ast_module) = &curr_parent_module {
+ compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
+ } else {
+ let source_file_id = source.file_id.original_file(ctx.db());
+ source_file_id == curr_file_id
+ };
+
+ if have_same_parent {
+ source_exists_outside_sel_in_same_mod =
+ !selection_range.contains_range(source.value.syntax().text_range());
+ }
+ }
+ }
+ Definition::Static(x) => {
+ if let Some(source) = x.source(ctx.db()) {
+ let have_same_parent = if let Some(ast_module) = &curr_parent_module {
+ compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
+ } else {
+ let source_file_id = source.file_id.original_file(ctx.db());
+ source_file_id == curr_file_id
+ };
+
+ if have_same_parent {
+ source_exists_outside_sel_in_same_mod =
+ !selection_range.contains_range(source.value.syntax().text_range());
+ }
+ }
+ }
+ Definition::Trait(x) => {
+ if let Some(source) = x.source(ctx.db()) {
+ let have_same_parent = if let Some(ast_module) = &curr_parent_module {
+ compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
+ } else {
+ let source_file_id = source.file_id.original_file(ctx.db());
+ source_file_id == curr_file_id
+ };
+
+ if have_same_parent {
+ source_exists_outside_sel_in_same_mod =
+ !selection_range.contains_range(source.value.syntax().text_range());
+ }
+ }
+ }
+ Definition::TypeAlias(x) => {
+ if let Some(source) = x.source(ctx.db()) {
+ let have_same_parent = if let Some(ast_module) = &curr_parent_module {
+ compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
+ } else {
+ let source_file_id = source.file_id.original_file(ctx.db());
+ source_file_id == curr_file_id
+ };
+
+ if have_same_parent {
+ source_exists_outside_sel_in_same_mod =
+ !selection_range.contains_range(source.value.syntax().text_range());
+ }
+ }
+ }
+ _ => {}
+ }
+
+ source_exists_outside_sel_in_same_mod
+}
+
+fn get_replacements_for_visibilty_change(
+ items: &mut [ast::Item],
+ is_clone_for_updated: bool,
+) -> (
+ Vec<(Option<ast::Visibility>, SyntaxNode)>,
+ Vec<(Option<ast::Visibility>, SyntaxNode)>,
+ Vec<ast::Impl>,
+) {
+ let mut replacements = Vec::new();
+ let mut record_field_parents = Vec::new();
+ let mut impls = Vec::new();
+
+ for item in items {
+ if !is_clone_for_updated {
+ *item = item.clone_for_update();
+ }
+ //Use stmts are ignored
+ match item {
+ ast::Item::Const(it) => replacements.push((it.visibility(), it.syntax().clone())),
+ ast::Item::Enum(it) => replacements.push((it.visibility(), it.syntax().clone())),
+ ast::Item::ExternCrate(it) => replacements.push((it.visibility(), it.syntax().clone())),
+ ast::Item::Fn(it) => replacements.push((it.visibility(), it.syntax().clone())),
+ //Associated item's visibility should not be changed
+ ast::Item::Impl(it) if it.for_token().is_none() => impls.push(it.clone()),
+ ast::Item::MacroDef(it) => replacements.push((it.visibility(), it.syntax().clone())),
+ ast::Item::Module(it) => replacements.push((it.visibility(), it.syntax().clone())),
+ ast::Item::Static(it) => replacements.push((it.visibility(), it.syntax().clone())),
+ ast::Item::Struct(it) => {
+ replacements.push((it.visibility(), it.syntax().clone()));
+ record_field_parents.push((it.visibility(), it.syntax().clone()));
+ }
+ ast::Item::Trait(it) => replacements.push((it.visibility(), it.syntax().clone())),
+ ast::Item::TypeAlias(it) => replacements.push((it.visibility(), it.syntax().clone())),
+ ast::Item::Union(it) => {
+ replacements.push((it.visibility(), it.syntax().clone()));
+ record_field_parents.push((it.visibility(), it.syntax().clone()));
+ }
+ _ => (),
+ }
+ }
+
+ (replacements, record_field_parents, impls)
+}
+
+fn get_use_tree_paths_from_path(
+ path: ast::Path,
+ use_tree_str: &mut Vec<ast::Path>,
+) -> Option<&mut Vec<ast::Path>> {
+ path.syntax().ancestors().filter(|x| x.to_string() != path.to_string()).find_map(|x| {
+ if let Some(use_tree) = ast::UseTree::cast(x) {
+ if let Some(upper_tree_path) = use_tree.path() {
+ if upper_tree_path.to_string() != path.to_string() {
+ use_tree_str.push(upper_tree_path.clone());
+ get_use_tree_paths_from_path(upper_tree_path, use_tree_str);
+ return Some(use_tree);
+ }
+ }
+ }
+ None
+ })?;
+
+ Some(use_tree_str)
+}
+
+fn add_change_vis(vis: Option<ast::Visibility>, node_or_token_opt: Option<syntax::SyntaxElement>) {
+ if vis.is_none() {
+ if let Some(node_or_token) = node_or_token_opt {
+ let pub_crate_vis = make::visibility_pub_crate().clone_for_update();
+ ted::insert(ted::Position::before(node_or_token), pub_crate_vis.syntax());
+ }
+ }
+}
+
+fn compare_hir_and_ast_module(
+ ast_module: &ast::Module,
+ hir_module: hir::Module,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let hir_mod_name = hir_module.name(ctx.db())?;
+ let ast_mod_name = ast_module.name()?;
+ if hir_mod_name.to_string() != ast_mod_name.to_string() {
+ return None;
+ }
+
+ Some(())
+}
+
+fn indent_range_before_given_node(node: &SyntaxNode) -> Option<TextRange> {
+ node.siblings_with_tokens(syntax::Direction::Prev)
+ .find(|x| x.kind() == WHITESPACE)
+ .map(|x| x.text_range())
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_not_applicable_without_selection() {
+ check_assist_not_applicable(
+ extract_module,
+ r"
+$0pub struct PublicStruct {
+ field: i32,
+}
+ ",
+ )
+ }
+
+ #[test]
+ fn test_extract_module() {
+ check_assist(
+ extract_module,
+ r"
+ mod thirdpartycrate {
+ pub mod nest {
+ pub struct SomeType;
+ pub struct SomeType2;
+ }
+ pub struct SomeType1;
+ }
+
+ mod bar {
+ use crate::thirdpartycrate::{nest::{SomeType, SomeType2}, SomeType1};
+
+ pub struct PublicStruct {
+ field: PrivateStruct,
+ field1: SomeType1,
+ }
+
+ impl PublicStruct {
+ pub fn new() -> Self {
+ Self { field: PrivateStruct::new(), field1: SomeType1 }
+ }
+ }
+
+ fn foo() {
+ let _s = PrivateStruct::new();
+ let _a = bar();
+ }
+
+$0struct PrivateStruct {
+ inner: SomeType,
+}
+
+pub struct PrivateStruct1 {
+ pub inner: i32,
+}
+
+impl PrivateStruct {
+ fn new() -> Self {
+ PrivateStruct { inner: SomeType }
+ }
+}
+
+fn bar() -> i32 {
+ 2
+}$0
+ }
+ ",
+ r"
+ mod thirdpartycrate {
+ pub mod nest {
+ pub struct SomeType;
+ pub struct SomeType2;
+ }
+ pub struct SomeType1;
+ }
+
+ mod bar {
+ use crate::thirdpartycrate::{nest::{SomeType2}, SomeType1};
+
+ pub struct PublicStruct {
+ field: modname::PrivateStruct,
+ field1: SomeType1,
+ }
+
+ impl PublicStruct {
+ pub fn new() -> Self {
+ Self { field: modname::PrivateStruct::new(), field1: SomeType1 }
+ }
+ }
+
+ fn foo() {
+ let _s = modname::PrivateStruct::new();
+ let _a = modname::bar();
+ }
+
+mod modname {
+ use crate::thirdpartycrate::nest::SomeType;
+
+ pub(crate) struct PrivateStruct {
+ pub(crate) inner: SomeType,
+ }
+
+ pub struct PrivateStruct1 {
+ pub inner: i32,
+ }
+
+ impl PrivateStruct {
+ pub(crate) fn new() -> Self {
+ PrivateStruct { inner: SomeType }
+ }
+ }
+
+ pub(crate) fn bar() -> i32 {
+ 2
+ }
+}
+ }
+ ",
+ );
+ }
+
+ #[test]
+ fn test_extract_module_for_function_only() {
+ check_assist(
+ extract_module,
+ r"
+$0fn foo(name: i32) -> i32 {
+ name + 1
+}$0
+
+ fn bar(name: i32) -> i32 {
+ name + 2
+ }
+ ",
+ r"
+mod modname {
+ pub(crate) fn foo(name: i32) -> i32 {
+ name + 1
+ }
+}
+
+ fn bar(name: i32) -> i32 {
+ name + 2
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_extract_module_for_impl_having_corresponding_adt_in_selection() {
+ check_assist(
+ extract_module,
+ r"
+ mod impl_play {
+$0struct A {}
+
+impl A {
+ pub fn new_a() -> i32 {
+ 2
+ }
+}$0
+
+ fn a() {
+ let _a = A::new_a();
+ }
+ }
+ ",
+ r"
+ mod impl_play {
+mod modname {
+ pub(crate) struct A {}
+
+ impl A {
+ pub fn new_a() -> i32 {
+ 2
+ }
+ }
+}
+
+ fn a() {
+ let _a = modname::A::new_a();
+ }
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_import_resolve_when_its_only_inside_selection() {
+ check_assist(
+ extract_module,
+ r"
+ mod foo {
+ pub struct PrivateStruct;
+ pub struct PrivateStruct1;
+ }
+
+ mod bar {
+ use super::foo::{PrivateStruct, PrivateStruct1};
+
+$0struct Strukt {
+ field: PrivateStruct,
+}$0
+
+ struct Strukt1 {
+ field: PrivateStruct1,
+ }
+ }
+ ",
+ r"
+ mod foo {
+ pub struct PrivateStruct;
+ pub struct PrivateStruct1;
+ }
+
+ mod bar {
+ use super::foo::{PrivateStruct1};
+
+mod modname {
+ use super::super::foo::PrivateStruct;
+
+ pub(crate) struct Strukt {
+ pub(crate) field: PrivateStruct,
+ }
+}
+
+ struct Strukt1 {
+ field: PrivateStruct1,
+ }
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_import_resolve_when_its_inside_and_outside_selection_and_source_not_in_same_mod() {
+ check_assist(
+ extract_module,
+ r"
+ mod foo {
+ pub struct PrivateStruct;
+ }
+
+ mod bar {
+ use super::foo::PrivateStruct;
+
+$0struct Strukt {
+ field: PrivateStruct,
+}$0
+
+ struct Strukt1 {
+ field: PrivateStruct,
+ }
+ }
+ ",
+ r"
+ mod foo {
+ pub struct PrivateStruct;
+ }
+
+ mod bar {
+ use super::foo::PrivateStruct;
+
+mod modname {
+ use super::super::foo::PrivateStruct;
+
+ pub(crate) struct Strukt {
+ pub(crate) field: PrivateStruct,
+ }
+}
+
+ struct Strukt1 {
+ field: PrivateStruct,
+ }
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_import_resolve_when_its_inside_and_outside_selection_and_source_is_in_same_mod() {
+ check_assist(
+ extract_module,
+ r"
+ mod bar {
+ pub struct PrivateStruct;
+
+$0struct Strukt {
+ field: PrivateStruct,
+}$0
+
+ struct Strukt1 {
+ field: PrivateStruct,
+ }
+ }
+ ",
+ r"
+ mod bar {
+ pub struct PrivateStruct;
+
+mod modname {
+ use super::PrivateStruct;
+
+ pub(crate) struct Strukt {
+ pub(crate) field: PrivateStruct,
+ }
+}
+
+ struct Strukt1 {
+ field: PrivateStruct,
+ }
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_extract_module_for_correspoding_adt_of_impl_present_in_same_mod_but_not_in_selection() {
+ check_assist(
+ extract_module,
+ r"
+ mod impl_play {
+ struct A {}
+
+$0impl A {
+ pub fn new_a() -> i32 {
+ 2
+ }
+}$0
+
+ fn a() {
+ let _a = A::new_a();
+ }
+ }
+ ",
+ r"
+ mod impl_play {
+ struct A {}
+
+mod modname {
+ use super::A;
+
+ impl A {
+ pub fn new_a() -> i32 {
+ 2
+ }
+ }
+}
+
+ fn a() {
+ let _a = A::new_a();
+ }
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_extract_module_for_impl_not_having_corresponding_adt_in_selection_and_not_in_same_mod_but_with_super(
+ ) {
+ check_assist(
+ extract_module,
+ r"
+ mod foo {
+ pub struct A {}
+ }
+ mod impl_play {
+ use super::foo::A;
+
+$0impl A {
+ pub fn new_a() -> i32 {
+ 2
+ }
+}$0
+
+ fn a() {
+ let _a = A::new_a();
+ }
+ }
+ ",
+ r"
+ mod foo {
+ pub struct A {}
+ }
+ mod impl_play {
+ use super::foo::A;
+
+mod modname {
+ use super::super::foo::A;
+
+ impl A {
+ pub fn new_a() -> i32 {
+ 2
+ }
+ }
+}
+
+ fn a() {
+ let _a = A::new_a();
+ }
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_import_resolve_for_trait_bounds_on_function() {
+ check_assist(
+ extract_module,
+ r"
+ mod impl_play2 {
+ trait JustATrait {}
+
+$0struct A {}
+
+fn foo<T: JustATrait>(arg: T) -> T {
+ arg
+}
+
+impl JustATrait for A {}
+
+fn bar() {
+ let a = A {};
+ foo(a);
+}$0
+ }
+ ",
+ r"
+ mod impl_play2 {
+ trait JustATrait {}
+
+mod modname {
+ use super::JustATrait;
+
+ pub(crate) struct A {}
+
+ pub(crate) fn foo<T: JustATrait>(arg: T) -> T {
+ arg
+ }
+
+ impl JustATrait for A {}
+
+ pub(crate) fn bar() {
+ let a = A {};
+ foo(a);
+ }
+}
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_extract_module_for_module() {
+ check_assist(
+ extract_module,
+ r"
+ mod impl_play2 {
+$0mod impl_play {
+ pub struct A {}
+}$0
+ }
+ ",
+ r"
+ mod impl_play2 {
+mod modname {
+ pub(crate) mod impl_play {
+ pub struct A {}
+ }
+}
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_extract_module_with_multiple_files() {
+ check_assist(
+ extract_module,
+ r"
+ //- /main.rs
+ mod foo;
+
+ use foo::PrivateStruct;
+
+ pub struct Strukt {
+ field: PrivateStruct,
+ }
+
+ fn main() {
+ $0struct Strukt1 {
+ field: Strukt,
+ }$0
+ }
+ //- /foo.rs
+ pub struct PrivateStruct;
+ ",
+ r"
+ mod foo;
+
+ use foo::PrivateStruct;
+
+ pub struct Strukt {
+ field: PrivateStruct,
+ }
+
+ fn main() {
+ mod modname {
+ use super::Strukt;
+
+ pub(crate) struct Strukt1 {
+ pub(crate) field: Strukt,
+ }
+ }
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_extract_module_macro_rules() {
+ check_assist(
+ extract_module,
+ r"
+$0macro_rules! m {
+ () => {};
+}$0
+m! {}
+ ",
+ r"
+mod modname {
+ macro_rules! m {
+ () => {};
+ }
+}
+modname::m! {}
+ ",
+ );
+ }
+
+ #[test]
+ fn test_do_not_apply_visibility_modifier_to_trait_impl_items() {
+ check_assist(
+ extract_module,
+ r"
+ trait ATrait {
+ fn function();
+ }
+
+ struct A {}
+
+$0impl ATrait for A {
+ fn function() {}
+}$0
+ ",
+ r"
+ trait ATrait {
+ fn function();
+ }
+
+ struct A {}
+
+mod modname {
+ use super::A;
+
+ use super::ATrait;
+
+ impl ATrait for A {
+ fn function() {}
+ }
+}
+ ",
+ )
+ }
+
+ #[test]
+ fn test_if_inside_impl_block_generate_module_outside() {
+ check_assist(
+ extract_module,
+ r"
+ struct A {}
+
+ impl A {
+$0fn foo() {}$0
+ fn bar() {}
+ }
+ ",
+ r"
+ struct A {}
+
+ impl A {
+ fn bar() {}
+ }
+
+mod modname {
+ use super::A;
+
+ impl A {
+ pub(crate) fn foo() {}
+ }
+}
+ ",
+ )
+ }
+
+ #[test]
+ fn test_if_inside_impl_block_generate_module_outside_but_impl_block_having_one_child() {
+ check_assist(
+ extract_module,
+ r"
+ struct A {}
+ struct B {}
+
+ impl A {
+$0fn foo(x: B) {}$0
+ }
+ ",
+ r"
+ struct A {}
+ struct B {}
+
+mod modname {
+ use super::B;
+
+ use super::A;
+
+ impl A {
+ pub(crate) fn foo(x: B) {}
+ }
+}
+ ",
+ )
+ }
+
+ #[test]
+ fn test_issue_11766() {
+ //https://github.com/rust-lang/rust-analyzer/issues/11766
+ check_assist(
+ extract_module,
+ r"
+ mod x {
+ pub struct Foo;
+ pub struct Bar;
+ }
+
+ use x::{Bar, Foo};
+
+ $0type A = (Foo, Bar);$0
+ ",
+ r"
+ mod x {
+ pub struct Foo;
+ pub struct Bar;
+ }
+
+ use x::{};
+
+ mod modname {
+ use super::x::Bar;
+
+ use super::x::Foo;
+
+ pub(crate) type A = (Foo, Bar);
+ }
+ ",
+ )
+ }
+
+ #[test]
+ fn test_issue_12790() {
+ check_assist(
+ extract_module,
+ r"
+ $0/// A documented function
+ fn documented_fn() {}
+
+ // A commented function with a #[] attribute macro
+ #[cfg(test)]
+ fn attribute_fn() {}
+
+ // A normally commented function
+ fn normal_fn() {}
+
+ /// A documented Struct
+ struct DocumentedStruct {
+ // Normal field
+ x: i32,
+
+ /// Documented field
+ y: i32,
+
+ // Macroed field
+ #[cfg(test)]
+ z: i32,
+ }
+
+ // A macroed Struct
+ #[cfg(test)]
+ struct MacroedStruct {
+ // Normal field
+ x: i32,
+
+ /// Documented field
+ y: i32,
+
+ // Macroed field
+ #[cfg(test)]
+ z: i32,
+ }
+
+ // A normal Struct
+ struct NormalStruct {
+ // Normal field
+ x: i32,
+
+ /// Documented field
+ y: i32,
+
+ // Macroed field
+ #[cfg(test)]
+ z: i32,
+ }
+
+ /// A documented type
+ type DocumentedType = i32;
+
+ // A macroed type
+ #[cfg(test)]
+ type MacroedType = i32;
+
+ /// A module to move
+ mod module {}
+
+ /// An impl to move
+ impl NormalStruct {
+ /// A method
+ fn new() {}
+ }
+
+ /// A documented trait
+ trait DocTrait {
+ /// Inner function
+ fn doc() {}
+ }
+
+ /// An enum
+ enum DocumentedEnum {
+ /// A variant
+ A,
+ /// Another variant
+ B { x: i32, y: i32 }
+ }
+
+ /// Documented const
+ const MY_CONST: i32 = 0;$0
+ ",
+ r"
+ mod modname {
+ /// A documented function
+ pub(crate) fn documented_fn() {}
+
+ // A commented function with a #[] attribute macro
+ #[cfg(test)]
+ pub(crate) fn attribute_fn() {}
+
+ // A normally commented function
+ pub(crate) fn normal_fn() {}
+
+ /// A documented Struct
+ pub(crate) struct DocumentedStruct {
+ // Normal field
+ pub(crate) x: i32,
+
+ /// Documented field
+ pub(crate) y: i32,
+
+ // Macroed field
+ #[cfg(test)]
+ pub(crate) z: i32,
+ }
+
+ // A macroed Struct
+ #[cfg(test)]
+ pub(crate) struct MacroedStruct {
+ // Normal field
+ pub(crate) x: i32,
+
+ /// Documented field
+ pub(crate) y: i32,
+
+ // Macroed field
+ #[cfg(test)]
+ pub(crate) z: i32,
+ }
+
+ // A normal Struct
+ pub(crate) struct NormalStruct {
+ // Normal field
+ pub(crate) x: i32,
+
+ /// Documented field
+ pub(crate) y: i32,
+
+ // Macroed field
+ #[cfg(test)]
+ pub(crate) z: i32,
+ }
+
+ /// A documented type
+ pub(crate) type DocumentedType = i32;
+
+ // A macroed type
+ #[cfg(test)]
+ pub(crate) type MacroedType = i32;
+
+ /// A module to move
+ pub(crate) mod module {}
+
+ /// An impl to move
+ impl NormalStruct {
+ /// A method
+ pub(crate) fn new() {}
+ }
+
+ /// A documented trait
+ pub(crate) trait DocTrait {
+ /// Inner function
+ fn doc() {}
+ }
+
+ /// An enum
+ pub(crate) enum DocumentedEnum {
+ /// A variant
+ A,
+ /// Another variant
+ B { x: i32, y: i32 }
+ }
+
+ /// Documented const
+ pub(crate) const MY_CONST: i32 = 0;
+ }
+ ",
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
new file mode 100644
index 000000000..a93648f2d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
@@ -0,0 +1,1076 @@
+use std::iter;
+
+use either::Either;
+use hir::{Module, ModuleDef, Name, Variant};
+use ide_db::{
+ defs::Definition,
+ helpers::mod_path_to_ast,
+ imports::insert_use::{insert_use, ImportScope, InsertUseConfig},
+ search::FileReference,
+ FxHashSet, RootDatabase,
+};
+use itertools::{Itertools, Position};
+use syntax::{
+ ast::{
+ self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasAttrs, HasGenericParams,
+ HasName, HasVisibility,
+ },
+ match_ast, ted, SyntaxElement,
+ SyntaxKind::*,
+ SyntaxNode, T,
+};
+
+use crate::{assist_context::AssistBuilder, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: extract_struct_from_enum_variant
+//
+// Extracts a struct from enum variant.
+//
+// ```
+// enum A { $0One(u32, u32) }
+// ```
+// ->
+// ```
+// struct One(u32, u32);
+//
+// enum A { One(One) }
+// ```
+pub(crate) fn extract_struct_from_enum_variant(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let variant = ctx.find_node_at_offset::<ast::Variant>()?;
+ let field_list = extract_field_list_if_applicable(&variant)?;
+
+ let variant_name = variant.name()?;
+ let variant_hir = ctx.sema.to_def(&variant)?;
+ if existing_definition(ctx.db(), &variant_name, &variant_hir) {
+ cov_mark::hit!(test_extract_enum_not_applicable_if_struct_exists);
+ return None;
+ }
+
+ let enum_ast = variant.parent_enum();
+ let enum_hir = ctx.sema.to_def(&enum_ast)?;
+ let target = variant.syntax().text_range();
+ acc.add(
+ AssistId("extract_struct_from_enum_variant", AssistKind::RefactorRewrite),
+ "Extract struct from enum variant",
+ target,
+ |builder| {
+ let variant_hir_name = variant_hir.name(ctx.db());
+ let enum_module_def = ModuleDef::from(enum_hir);
+ let usages = Definition::Variant(variant_hir).usages(&ctx.sema).all();
+
+ let mut visited_modules_set = FxHashSet::default();
+ let current_module = enum_hir.module(ctx.db());
+ visited_modules_set.insert(current_module);
+ // record file references of the file the def resides in, we only want to swap to the edited file in the builder once
+ let mut def_file_references = None;
+ for (file_id, references) in usages {
+ if file_id == ctx.file_id() {
+ def_file_references = Some(references);
+ continue;
+ }
+ builder.edit_file(file_id);
+ let processed = process_references(
+ ctx,
+ builder,
+ &mut visited_modules_set,
+ &enum_module_def,
+ &variant_hir_name,
+ references,
+ );
+ processed.into_iter().for_each(|(path, node, import)| {
+ apply_references(ctx.config.insert_use, path, node, import)
+ });
+ }
+ builder.edit_file(ctx.file_id());
+
+ let variant = builder.make_mut(variant.clone());
+ if let Some(references) = def_file_references {
+ let processed = process_references(
+ ctx,
+ builder,
+ &mut visited_modules_set,
+ &enum_module_def,
+ &variant_hir_name,
+ references,
+ );
+ processed.into_iter().for_each(|(path, node, import)| {
+ apply_references(ctx.config.insert_use, path, node, import)
+ });
+ }
+
+ let indent = enum_ast.indent_level();
+ let generic_params = enum_ast
+ .generic_param_list()
+ .and_then(|known_generics| extract_generic_params(&known_generics, &field_list));
+ let generics = generic_params.as_ref().map(|generics| generics.clone_for_update());
+ let def =
+ create_struct_def(variant_name.clone(), &variant, &field_list, generics, &enum_ast);
+ def.reindent_to(indent);
+
+ let start_offset = &variant.parent_enum().syntax().clone();
+ ted::insert_all_raw(
+ ted::Position::before(start_offset),
+ vec![
+ def.syntax().clone().into(),
+ make::tokens::whitespace(&format!("\n\n{}", indent)).into(),
+ ],
+ );
+
+ update_variant(&variant, generic_params.map(|g| g.clone_for_update()));
+ },
+ )
+}
+
+fn extract_field_list_if_applicable(
+ variant: &ast::Variant,
+) -> Option<Either<ast::RecordFieldList, ast::TupleFieldList>> {
+ match variant.kind() {
+ ast::StructKind::Record(field_list) if field_list.fields().next().is_some() => {
+ Some(Either::Left(field_list))
+ }
+ ast::StructKind::Tuple(field_list) if field_list.fields().count() > 1 => {
+ Some(Either::Right(field_list))
+ }
+ _ => None,
+ }
+}
+
+fn existing_definition(db: &RootDatabase, variant_name: &ast::Name, variant: &Variant) -> bool {
+ variant
+ .parent_enum(db)
+ .module(db)
+ .scope(db, None)
+ .into_iter()
+ .filter(|(_, def)| match def {
+ // only check type-namespace
+ hir::ScopeDef::ModuleDef(def) => matches!(
+ def,
+ ModuleDef::Module(_)
+ | ModuleDef::Adt(_)
+ | ModuleDef::Variant(_)
+ | ModuleDef::Trait(_)
+ | ModuleDef::TypeAlias(_)
+ | ModuleDef::BuiltinType(_)
+ ),
+ _ => false,
+ })
+ .any(|(name, _)| name.to_string() == variant_name.to_string())
+}
+
+fn extract_generic_params(
+ known_generics: &ast::GenericParamList,
+ field_list: &Either<ast::RecordFieldList, ast::TupleFieldList>,
+) -> Option<ast::GenericParamList> {
+ let mut generics = known_generics.generic_params().map(|param| (param, false)).collect_vec();
+
+ let tagged_one = match field_list {
+ Either::Left(field_list) => field_list
+ .fields()
+ .filter_map(|f| f.ty())
+ .fold(false, |tagged, ty| tag_generics_in_variant(&ty, &mut generics) || tagged),
+ Either::Right(field_list) => field_list
+ .fields()
+ .filter_map(|f| f.ty())
+ .fold(false, |tagged, ty| tag_generics_in_variant(&ty, &mut generics) || tagged),
+ };
+
+ let generics = generics.into_iter().filter_map(|(param, tag)| tag.then(|| param));
+ tagged_one.then(|| make::generic_param_list(generics))
+}
+
+fn tag_generics_in_variant(ty: &ast::Type, generics: &mut [(ast::GenericParam, bool)]) -> bool {
+ let mut tagged_one = false;
+
+ for token in ty.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token) {
+ for (param, tag) in generics.iter_mut().filter(|(_, tag)| !tag) {
+ match param {
+ ast::GenericParam::LifetimeParam(lt)
+ if matches!(token.kind(), T![lifetime_ident]) =>
+ {
+ if let Some(lt) = lt.lifetime() {
+ if lt.text().as_str() == token.text() {
+ *tag = true;
+ tagged_one = true;
+ break;
+ }
+ }
+ }
+ param if matches!(token.kind(), T![ident]) => {
+ if match param {
+ ast::GenericParam::ConstParam(konst) => konst
+ .name()
+ .map(|name| name.text().as_str() == token.text())
+ .unwrap_or_default(),
+ ast::GenericParam::TypeParam(ty) => ty
+ .name()
+ .map(|name| name.text().as_str() == token.text())
+ .unwrap_or_default(),
+ ast::GenericParam::LifetimeParam(lt) => lt
+ .lifetime()
+ .map(|lt| lt.text().as_str() == token.text())
+ .unwrap_or_default(),
+ } {
+ *tag = true;
+ tagged_one = true;
+ break;
+ }
+ }
+ _ => (),
+ }
+ }
+ }
+
+ tagged_one
+}
+
+fn create_struct_def(
+ variant_name: ast::Name,
+ variant: &ast::Variant,
+ field_list: &Either<ast::RecordFieldList, ast::TupleFieldList>,
+ generics: Option<ast::GenericParamList>,
+ enum_: &ast::Enum,
+) -> ast::Struct {
+ let enum_vis = enum_.visibility();
+
+ let insert_vis = |node: &'_ SyntaxNode, vis: &'_ SyntaxNode| {
+ let vis = vis.clone_for_update();
+ ted::insert(ted::Position::before(node), vis);
+ };
+
+ // for fields without any existing visibility, use visibility of enum
+ let field_list: ast::FieldList = match field_list {
+ Either::Left(field_list) => {
+ let field_list = field_list.clone_for_update();
+
+ if let Some(vis) = &enum_vis {
+ field_list
+ .fields()
+ .filter(|field| field.visibility().is_none())
+ .filter_map(|field| field.name())
+ .for_each(|it| insert_vis(it.syntax(), vis.syntax()));
+ }
+
+ field_list.into()
+ }
+ Either::Right(field_list) => {
+ let field_list = field_list.clone_for_update();
+
+ if let Some(vis) = &enum_vis {
+ field_list
+ .fields()
+ .filter(|field| field.visibility().is_none())
+ .filter_map(|field| field.ty())
+ .for_each(|it| insert_vis(it.syntax(), vis.syntax()));
+ }
+
+ field_list.into()
+ }
+ };
+
+ field_list.reindent_to(IndentLevel::single());
+
+ let strukt = make::struct_(enum_vis, variant_name, generics, field_list).clone_for_update();
+
+ // FIXME: Consider making this an actual function somewhere (like in `AttrsOwnerEdit`) after some deliberation
+ let attrs_and_docs = |node: &SyntaxNode| {
+ let mut select_next_ws = false;
+ node.children_with_tokens().filter(move |child| {
+ let accept = match child.kind() {
+ ATTR | COMMENT => {
+ select_next_ws = true;
+ return true;
+ }
+ WHITESPACE if select_next_ws => true,
+ _ => false,
+ };
+ select_next_ws = false;
+
+ accept
+ })
+ };
+
+ // copy attributes & comments from variant
+ let variant_attrs = attrs_and_docs(variant.syntax())
+ .map(|tok| match tok.kind() {
+ WHITESPACE => make::tokens::single_newline().into(),
+ _ => tok,
+ })
+ .collect();
+ ted::insert_all(ted::Position::first_child_of(strukt.syntax()), variant_attrs);
+
+ // copy attributes from enum
+ ted::insert_all(
+ ted::Position::first_child_of(strukt.syntax()),
+ enum_.attrs().map(|it| it.syntax().clone_for_update().into()).collect(),
+ );
+ strukt
+}
+
+fn update_variant(variant: &ast::Variant, generics: Option<ast::GenericParamList>) -> Option<()> {
+ let name = variant.name()?;
+ let ty = generics
+ .filter(|generics| generics.generic_params().count() > 0)
+ .map(|generics| {
+ let mut generic_str = String::with_capacity(8);
+
+ for (p, more) in generics.generic_params().with_position().map(|p| match p {
+ Position::First(p) | Position::Middle(p) => (p, true),
+ Position::Last(p) | Position::Only(p) => (p, false),
+ }) {
+ match p {
+ ast::GenericParam::ConstParam(konst) => {
+ if let Some(name) = konst.name() {
+ generic_str.push_str(name.text().as_str());
+ }
+ }
+ ast::GenericParam::LifetimeParam(lt) => {
+ if let Some(lt) = lt.lifetime() {
+ generic_str.push_str(lt.text().as_str());
+ }
+ }
+ ast::GenericParam::TypeParam(ty) => {
+ if let Some(name) = ty.name() {
+ generic_str.push_str(name.text().as_str());
+ }
+ }
+ }
+ if more {
+ generic_str.push_str(", ");
+ }
+ }
+
+ make::ty(&format!("{}<{}>", &name.text(), &generic_str))
+ })
+ .unwrap_or_else(|| make::ty(&name.text()));
+
+ let tuple_field = make::tuple_field(None, ty);
+ let replacement = make::variant(
+ name,
+ Some(ast::FieldList::TupleFieldList(make::tuple_field_list(iter::once(tuple_field)))),
+ )
+ .clone_for_update();
+ ted::replace(variant.syntax(), replacement.syntax());
+ Some(())
+}
+
+fn apply_references(
+ insert_use_cfg: InsertUseConfig,
+ segment: ast::PathSegment,
+ node: SyntaxNode,
+ import: Option<(ImportScope, hir::ModPath)>,
+) {
+ if let Some((scope, path)) = import {
+ insert_use(&scope, mod_path_to_ast(&path), &insert_use_cfg);
+ }
+ // deep clone to prevent cycle
+ let path = make::path_from_segments(iter::once(segment.clone_subtree()), false);
+ ted::insert_raw(ted::Position::before(segment.syntax()), path.clone_for_update().syntax());
+ ted::insert_raw(ted::Position::before(segment.syntax()), make::token(T!['(']));
+ ted::insert_raw(ted::Position::after(&node), make::token(T![')']));
+}
+
+fn process_references(
+ ctx: &AssistContext<'_>,
+ builder: &mut AssistBuilder,
+ visited_modules: &mut FxHashSet<Module>,
+ enum_module_def: &ModuleDef,
+ variant_hir_name: &Name,
+ refs: Vec<FileReference>,
+) -> Vec<(ast::PathSegment, SyntaxNode, Option<(ImportScope, hir::ModPath)>)> {
+ // we have to recollect here eagerly as we are about to edit the tree we need to calculate the changes
+ // and corresponding nodes up front
+ refs.into_iter()
+ .flat_map(|reference| {
+ let (segment, scope_node, module) = reference_to_node(&ctx.sema, reference)?;
+ let segment = builder.make_mut(segment);
+ let scope_node = builder.make_syntax_mut(scope_node);
+ if !visited_modules.contains(&module) {
+ let mod_path = module.find_use_path_prefixed(
+ ctx.sema.db,
+ *enum_module_def,
+ ctx.config.insert_use.prefix_kind,
+ );
+ if let Some(mut mod_path) = mod_path {
+ mod_path.pop_segment();
+ mod_path.push_segment(variant_hir_name.clone());
+ let scope = ImportScope::find_insert_use_container(&scope_node, &ctx.sema)?;
+ visited_modules.insert(module);
+ return Some((segment, scope_node, Some((scope, mod_path))));
+ }
+ }
+ Some((segment, scope_node, None))
+ })
+ .collect()
+}
+
+fn reference_to_node(
+ sema: &hir::Semantics<'_, RootDatabase>,
+ reference: FileReference,
+) -> Option<(ast::PathSegment, SyntaxNode, hir::Module)> {
+ let segment =
+ reference.name.as_name_ref()?.syntax().parent().and_then(ast::PathSegment::cast)?;
+ let parent = segment.parent_path().syntax().parent()?;
+ let expr_or_pat = match_ast! {
+ match parent {
+ ast::PathExpr(_it) => parent.parent()?,
+ ast::RecordExpr(_it) => parent,
+ ast::TupleStructPat(_it) => parent,
+ ast::RecordPat(_it) => parent,
+ _ => return None,
+ }
+ };
+ let module = sema.scope(&expr_or_pat)?.module();
+ Some((segment, expr_or_pat, module))
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_extract_struct_several_fields_tuple() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ "enum A { $0One(u32, u32) }",
+ r#"struct One(u32, u32);
+
+enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_several_fields_named() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ "enum A { $0One { foo: u32, bar: u32 } }",
+ r#"struct One{ foo: u32, bar: u32 }
+
+enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_one_field_named() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ "enum A { $0One { foo: u32 } }",
+ r#"struct One{ foo: u32 }
+
+enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_carries_over_generics() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r"enum En<T> { Var { a: T$0 } }",
+ r#"struct Var<T>{ a: T }
+
+enum En<T> { Var(Var<T>) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_carries_over_attributes() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"#[derive(Debug)]
+#[derive(Clone)]
+enum Enum { Variant{ field: u32$0 } }"#,
+ r#"#[derive(Debug)]#[derive(Clone)] struct Variant{ field: u32 }
+
+#[derive(Debug)]
+#[derive(Clone)]
+enum Enum { Variant(Variant) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_indent_to_parent_enum() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum Enum {
+ Variant {
+ field: u32$0
+ }
+}"#,
+ r#"
+struct Variant{
+ field: u32
+}
+
+enum Enum {
+ Variant(Variant)
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_indent_to_parent_enum_in_mod() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+mod indenting {
+ enum Enum {
+ Variant {
+ field: u32$0
+ }
+ }
+}"#,
+ r#"
+mod indenting {
+ struct Variant{
+ field: u32
+ }
+
+ enum Enum {
+ Variant(Variant)
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_keep_comments_and_attrs_one_field_named() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum A {
+ $0One {
+ // leading comment
+ /// doc comment
+ #[an_attr]
+ foo: u32
+ // trailing comment
+ }
+}"#,
+ r#"
+struct One{
+ // leading comment
+ /// doc comment
+ #[an_attr]
+ foo: u32
+ // trailing comment
+}
+
+enum A {
+ One(One)
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_keep_comments_and_attrs_several_fields_named() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum A {
+ $0One {
+ // comment
+ /// doc
+ #[attr]
+ foo: u32,
+ // comment
+ #[attr]
+ /// doc
+ bar: u32
+ }
+}"#,
+ r#"
+struct One{
+ // comment
+ /// doc
+ #[attr]
+ foo: u32,
+ // comment
+ #[attr]
+ /// doc
+ bar: u32
+}
+
+enum A {
+ One(One)
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_keep_comments_and_attrs_several_fields_tuple() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ "enum A { $0One(/* comment */ #[attr] u32, /* another */ u32 /* tail */) }",
+ r#"
+struct One(/* comment */ #[attr] u32, /* another */ u32 /* tail */);
+
+enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_keep_comments_and_attrs_on_variant_struct() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum A {
+ /* comment */
+ // other
+ /// comment
+ #[attr]
+ $0One {
+ a: u32
+ }
+}"#,
+ r#"
+/* comment */
+// other
+/// comment
+#[attr]
+struct One{
+ a: u32
+}
+
+enum A {
+ One(One)
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_keep_comments_and_attrs_on_variant_tuple() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum A {
+ /* comment */
+ // other
+ /// comment
+ #[attr]
+ $0One(u32, u32)
+}"#,
+ r#"
+/* comment */
+// other
+/// comment
+#[attr]
+struct One(u32, u32);
+
+enum A {
+ One(One)
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_keep_existing_visibility_named() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ "enum A { $0One{ a: u32, pub(crate) b: u32, pub(super) c: u32, d: u32 } }",
+ r#"
+struct One{ a: u32, pub(crate) b: u32, pub(super) c: u32, d: u32 }
+
+enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_keep_existing_visibility_tuple() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ "enum A { $0One(u32, pub(crate) u32, pub(super) u32, u32) }",
+ r#"
+struct One(u32, pub(crate) u32, pub(super) u32, u32);
+
+enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_enum_variant_name_value_namespace() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"const One: () = ();
+enum A { $0One(u32, u32) }"#,
+ r#"const One: () = ();
+struct One(u32, u32);
+
+enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_no_visibility() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ "enum A { $0One(u32, u32) }",
+ r#"
+struct One(u32, u32);
+
+enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_pub_visibility() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ "pub enum A { $0One(u32, u32) }",
+ r#"
+pub struct One(pub u32, pub u32);
+
+pub enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_pub_in_mod_visibility() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ "pub(in something) enum A { $0One{ a: u32, b: u32 } }",
+ r#"
+pub(in something) struct One{ pub(in something) a: u32, pub(in something) b: u32 }
+
+pub(in something) enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_pub_crate_visibility() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ "pub(crate) enum A { $0One{ a: u32, b: u32, c: u32 } }",
+ r#"
+pub(crate) struct One{ pub(crate) a: u32, pub(crate) b: u32, pub(crate) c: u32 }
+
+pub(crate) enum A { One(One) }"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_with_complex_imports() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"mod my_mod {
+ fn another_fn() {
+ let m = my_other_mod::MyEnum::MyField(1, 1);
+ }
+
+ pub mod my_other_mod {
+ fn another_fn() {
+ let m = MyEnum::MyField(1, 1);
+ }
+
+ pub enum MyEnum {
+ $0MyField(u8, u8),
+ }
+ }
+}
+
+fn another_fn() {
+ let m = my_mod::my_other_mod::MyEnum::MyField(1, 1);
+}"#,
+ r#"use my_mod::my_other_mod::MyField;
+
+mod my_mod {
+ use self::my_other_mod::MyField;
+
+ fn another_fn() {
+ let m = my_other_mod::MyEnum::MyField(MyField(1, 1));
+ }
+
+ pub mod my_other_mod {
+ fn another_fn() {
+ let m = MyEnum::MyField(MyField(1, 1));
+ }
+
+ pub struct MyField(pub u8, pub u8);
+
+ pub enum MyEnum {
+ MyField(MyField),
+ }
+ }
+}
+
+fn another_fn() {
+ let m = my_mod::my_other_mod::MyEnum::MyField(MyField(1, 1));
+}"#,
+ );
+ }
+
+ #[test]
+ fn extract_record_fix_references() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum E {
+ $0V { i: i32, j: i32 }
+}
+
+fn f() {
+ let E::V { i, j } = E::V { i: 9, j: 2 };
+}
+"#,
+ r#"
+struct V{ i: i32, j: i32 }
+
+enum E {
+ V(V)
+}
+
+fn f() {
+ let E::V(V { i, j }) = E::V(V { i: 9, j: 2 });
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_record_fix_references2() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum E {
+ $0V(i32, i32)
+}
+
+fn f() {
+ let E::V(i, j) = E::V(9, 2);
+}
+"#,
+ r#"
+struct V(i32, i32);
+
+enum E {
+ V(V)
+}
+
+fn f() {
+ let E::V(V(i, j)) = E::V(V(9, 2));
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_several_files() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+//- /main.rs
+enum E {
+ $0V(i32, i32)
+}
+mod foo;
+
+//- /foo.rs
+use crate::E;
+fn f() {
+ let e = E::V(9, 2);
+}
+"#,
+ r#"
+//- /main.rs
+struct V(i32, i32);
+
+enum E {
+ V(V)
+}
+mod foo;
+
+//- /foo.rs
+use crate::{E, V};
+fn f() {
+ let e = E::V(V(9, 2));
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_several_files_record() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+//- /main.rs
+enum E {
+ $0V { i: i32, j: i32 }
+}
+mod foo;
+
+//- /foo.rs
+use crate::E;
+fn f() {
+ let e = E::V { i: 9, j: 2 };
+}
+"#,
+ r#"
+//- /main.rs
+struct V{ i: i32, j: i32 }
+
+enum E {
+ V(V)
+}
+mod foo;
+
+//- /foo.rs
+use crate::{E, V};
+fn f() {
+ let e = E::V(V { i: 9, j: 2 });
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_extract_struct_record_nested_call_exp() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum A { $0One { a: u32, b: u32 } }
+
+struct B(A);
+
+fn foo() {
+ let _ = B(A::One { a: 1, b: 2 });
+}
+"#,
+ r#"
+struct One{ a: u32, b: u32 }
+
+enum A { One(One) }
+
+struct B(A);
+
+fn foo() {
+ let _ = B(A::One(One { a: 1, b: 2 }));
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_enum_not_applicable_for_element_with_no_fields() {
+ check_assist_not_applicable(extract_struct_from_enum_variant, r#"enum A { $0One }"#);
+ }
+
+ #[test]
+ fn test_extract_enum_not_applicable_if_struct_exists() {
+ cov_mark::check!(test_extract_enum_not_applicable_if_struct_exists);
+ check_assist_not_applicable(
+ extract_struct_from_enum_variant,
+ r#"
+struct One;
+enum A { $0One(u8, u32) }
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_not_applicable_one_field() {
+ check_assist_not_applicable(extract_struct_from_enum_variant, r"enum A { $0One(u32) }");
+ }
+
+ #[test]
+ fn test_extract_not_applicable_no_field_tuple() {
+ check_assist_not_applicable(extract_struct_from_enum_variant, r"enum A { $0None() }");
+ }
+
+ #[test]
+ fn test_extract_not_applicable_no_field_named() {
+ check_assist_not_applicable(extract_struct_from_enum_variant, r"enum A { $0None {} }");
+ }
+
+ #[test]
+ fn test_extract_struct_only_copies_needed_generics() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum X<'a, 'b, 'x> {
+ $0A { a: &'a &'x mut () },
+ B { b: &'b () },
+ C { c: () },
+}
+"#,
+ r#"
+struct A<'a, 'x>{ a: &'a &'x mut () }
+
+enum X<'a, 'b, 'x> {
+ A(A<'a, 'x>),
+ B { b: &'b () },
+ C { c: () },
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_with_liftime_type_const() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum X<'b, T, V, const C: usize> {
+ $0A { a: T, b: X<'b>, c: [u8; C] },
+ D { d: V },
+}
+"#,
+ r#"
+struct A<'b, T, const C: usize>{ a: T, b: X<'b>, c: [u8; C] }
+
+enum X<'b, T, V, const C: usize> {
+ A(A<'b, T, C>),
+ D { d: V },
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_without_generics() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum X<'a, 'b> {
+ A { a: &'a () },
+ B { b: &'b () },
+ $0C { c: () },
+}
+"#,
+ r#"
+struct C{ c: () }
+
+enum X<'a, 'b> {
+ A { a: &'a () },
+ B { b: &'b () },
+ C(C),
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_struct_keeps_trait_bounds() {
+ check_assist(
+ extract_struct_from_enum_variant,
+ r#"
+enum En<T: TraitT, V: TraitV> {
+ $0A { a: T },
+ B { b: V },
+}
+"#,
+ r#"
+struct A<T: TraitT>{ a: T }
+
+enum En<T: TraitT, V: TraitV> {
+ A(A<T>),
+ B { b: V },
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs
new file mode 100644
index 000000000..af584cdb4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs
@@ -0,0 +1,360 @@
+use either::Either;
+use ide_db::syntax_helpers::node_ext::walk_ty;
+use itertools::Itertools;
+use syntax::{
+ ast::{self, edit::IndentLevel, AstNode, HasGenericParams, HasName},
+ match_ast,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: extract_type_alias
+//
+// Extracts the selected type as a type alias.
+//
+// ```
+// struct S {
+// field: $0(u8, u8, u8)$0,
+// }
+// ```
+// ->
+// ```
+// type $0Type = (u8, u8, u8);
+//
+// struct S {
+// field: Type,
+// }
+// ```
+pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ if ctx.has_empty_selection() {
+ return None;
+ }
+
+ let ty = ctx.find_node_at_range::<ast::Type>()?;
+ let item = ty.syntax().ancestors().find_map(ast::Item::cast)?;
+ let assoc_owner = item.syntax().ancestors().nth(2).and_then(|it| {
+ match_ast! {
+ match it {
+ ast::Trait(tr) => Some(Either::Left(tr)),
+ ast::Impl(impl_) => Some(Either::Right(impl_)),
+ _ => None,
+ }
+ }
+ });
+ let node = assoc_owner.as_ref().map_or_else(
+ || item.syntax(),
+ |impl_| impl_.as_ref().either(AstNode::syntax, AstNode::syntax),
+ );
+ let insert_pos = node.text_range().start();
+ let target = ty.syntax().text_range();
+
+ acc.add(
+ AssistId("extract_type_alias", AssistKind::RefactorExtract),
+ "Extract type as type alias",
+ target,
+ |builder| {
+ let mut known_generics = match item.generic_param_list() {
+ Some(it) => it.generic_params().collect(),
+ None => Vec::new(),
+ };
+ if let Some(it) = assoc_owner.as_ref().and_then(|it| match it {
+ Either::Left(it) => it.generic_param_list(),
+ Either::Right(it) => it.generic_param_list(),
+ }) {
+ known_generics.extend(it.generic_params());
+ }
+ let generics = collect_used_generics(&ty, &known_generics);
+
+ let replacement = if !generics.is_empty() {
+ format!(
+ "Type<{}>",
+ generics.iter().format_with(", ", |generic, f| {
+ match generic {
+ ast::GenericParam::ConstParam(cp) => f(&cp.name().unwrap()),
+ ast::GenericParam::LifetimeParam(lp) => f(&lp.lifetime().unwrap()),
+ ast::GenericParam::TypeParam(tp) => f(&tp.name().unwrap()),
+ }
+ })
+ )
+ } else {
+ String::from("Type")
+ };
+ builder.replace(target, replacement);
+
+ let indent = IndentLevel::from_node(node);
+ let generics = if !generics.is_empty() {
+ format!("<{}>", generics.iter().format(", "))
+ } else {
+ String::new()
+ };
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ builder.insert_snippet(
+ cap,
+ insert_pos,
+ format!("type $0Type{} = {};\n\n{}", generics, ty, indent),
+ );
+ }
+ None => {
+ builder.insert(
+ insert_pos,
+ format!("type Type{} = {};\n\n{}", generics, ty, indent),
+ );
+ }
+ }
+ },
+ )
+}
+
+fn collect_used_generics<'gp>(
+ ty: &ast::Type,
+ known_generics: &'gp [ast::GenericParam],
+) -> Vec<&'gp ast::GenericParam> {
+ // can't use a closure -> closure here cause lifetime inference fails for that
+ fn find_lifetime(text: &str) -> impl Fn(&&ast::GenericParam) -> bool + '_ {
+ move |gp: &&ast::GenericParam| match gp {
+ ast::GenericParam::LifetimeParam(lp) => {
+ lp.lifetime().map_or(false, |lt| lt.text() == text)
+ }
+ _ => false,
+ }
+ }
+
+ let mut generics = Vec::new();
+ walk_ty(ty, &mut |ty| match ty {
+ ast::Type::PathType(ty) => {
+ if let Some(path) = ty.path() {
+ if let Some(name_ref) = path.as_single_name_ref() {
+ if let Some(param) = known_generics.iter().find(|gp| {
+ match gp {
+ ast::GenericParam::ConstParam(cp) => cp.name(),
+ ast::GenericParam::TypeParam(tp) => tp.name(),
+ _ => None,
+ }
+ .map_or(false, |n| n.text() == name_ref.text())
+ }) {
+ generics.push(param);
+ }
+ }
+ generics.extend(
+ path.segments()
+ .filter_map(|seg| seg.generic_arg_list())
+ .flat_map(|it| it.generic_args())
+ .filter_map(|it| match it {
+ ast::GenericArg::LifetimeArg(lt) => {
+ let lt = lt.lifetime()?;
+ known_generics.iter().find(find_lifetime(&lt.text()))
+ }
+ _ => None,
+ }),
+ );
+ }
+ }
+ ast::Type::ImplTraitType(impl_ty) => {
+ if let Some(it) = impl_ty.type_bound_list() {
+ generics.extend(
+ it.bounds()
+ .filter_map(|it| it.lifetime())
+ .filter_map(|lt| known_generics.iter().find(find_lifetime(&lt.text()))),
+ );
+ }
+ }
+ ast::Type::DynTraitType(dyn_ty) => {
+ if let Some(it) = dyn_ty.type_bound_list() {
+ generics.extend(
+ it.bounds()
+ .filter_map(|it| it.lifetime())
+ .filter_map(|lt| known_generics.iter().find(find_lifetime(&lt.text()))),
+ );
+ }
+ }
+ ast::Type::RefType(ref_) => generics.extend(
+ ref_.lifetime().and_then(|lt| known_generics.iter().find(find_lifetime(&lt.text()))),
+ ),
+ _ => (),
+ });
+ // stable resort to lifetime, type, const
+ generics.sort_by_key(|gp| match gp {
+ ast::GenericParam::ConstParam(_) => 2,
+ ast::GenericParam::LifetimeParam(_) => 0,
+ ast::GenericParam::TypeParam(_) => 1,
+ });
+ generics
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_not_applicable_without_selection() {
+ check_assist_not_applicable(
+ extract_type_alias,
+ r"
+struct S {
+ field: $0(u8, u8, u8),
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn test_simple_types() {
+ check_assist(
+ extract_type_alias,
+ r"
+struct S {
+ field: $0u8$0,
+}
+ ",
+ r#"
+type $0Type = u8;
+
+struct S {
+ field: Type,
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_generic_type_arg() {
+ check_assist(
+ extract_type_alias,
+ r"
+fn generic<T>() {}
+
+fn f() {
+ generic::<$0()$0>();
+}
+ ",
+ r#"
+fn generic<T>() {}
+
+type $0Type = ();
+
+fn f() {
+ generic::<Type>();
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_inner_type_arg() {
+ check_assist(
+ extract_type_alias,
+ r"
+struct Vec<T> {}
+struct S {
+ v: Vec<Vec<$0Vec<u8>$0>>,
+}
+ ",
+ r#"
+struct Vec<T> {}
+type $0Type = Vec<u8>;
+
+struct S {
+ v: Vec<Vec<Type>>,
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_extract_inner_type() {
+ check_assist(
+ extract_type_alias,
+ r"
+struct S {
+ field: ($0u8$0,),
+}
+ ",
+ r#"
+type $0Type = u8;
+
+struct S {
+ field: (Type,),
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn extract_from_impl_or_trait() {
+ // When invoked in an impl/trait, extracted type alias should be placed next to the
+ // impl/trait, not inside.
+ check_assist(
+ extract_type_alias,
+ r#"
+impl S {
+ fn f() -> $0(u8, u8)$0 {}
+}
+ "#,
+ r#"
+type $0Type = (u8, u8);
+
+impl S {
+ fn f() -> Type {}
+}
+ "#,
+ );
+ check_assist(
+ extract_type_alias,
+ r#"
+trait Tr {
+ fn f() -> $0(u8, u8)$0 {}
+}
+ "#,
+ r#"
+type $0Type = (u8, u8);
+
+trait Tr {
+ fn f() -> Type {}
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn indentation() {
+ check_assist(
+ extract_type_alias,
+ r#"
+mod m {
+ fn f() -> $0u8$0 {}
+}
+ "#,
+ r#"
+mod m {
+ type $0Type = u8;
+
+ fn f() -> Type {}
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn generics() {
+ check_assist(
+ extract_type_alias,
+ r#"
+struct Struct<const C: usize>;
+impl<'outer, Outer, const OUTER: usize> () {
+ fn func<'inner, Inner, const INNER: usize>(_: $0&(Struct<INNER>, Struct<OUTER>, Outer, &'inner (), Inner, &'outer ())$0) {}
+}
+"#,
+ r#"
+struct Struct<const C: usize>;
+type $0Type<'inner, 'outer, Outer, Inner, const INNER: usize, const OUTER: usize> = &(Struct<INNER>, Struct<OUTER>, Outer, &'inner (), Inner, &'outer ());
+
+impl<'outer, Outer, const OUTER: usize> () {
+ fn func<'inner, Inner, const INNER: usize>(_: Type<'inner, 'outer, Outer, Inner, INNER, OUTER>) {}
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
new file mode 100644
index 000000000..3596b6f82
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
@@ -0,0 +1,1279 @@
+use stdx::format_to;
+use syntax::{
+ ast::{self, AstNode},
+ NodeOrToken,
+ SyntaxKind::{
+ BLOCK_EXPR, BREAK_EXPR, CLOSURE_EXPR, COMMENT, LOOP_EXPR, MATCH_ARM, MATCH_GUARD,
+ PATH_EXPR, RETURN_EXPR,
+ },
+ SyntaxNode,
+};
+
+use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: extract_variable
+//
+// Extracts subexpression into a variable.
+//
+// ```
+// fn main() {
+// $0(1 + 2)$0 * 4;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let $0var_name = (1 + 2);
+// var_name * 4;
+// }
+// ```
+pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ if ctx.has_empty_selection() {
+ return None;
+ }
+
+ let node = match ctx.covering_element() {
+ NodeOrToken::Node(it) => it,
+ NodeOrToken::Token(it) if it.kind() == COMMENT => {
+ cov_mark::hit!(extract_var_in_comment_is_not_applicable);
+ return None;
+ }
+ NodeOrToken::Token(it) => it.parent()?,
+ };
+ let node = node.ancestors().take_while(|anc| anc.text_range() == node.text_range()).last()?;
+ let to_extract = node
+ .descendants()
+ .take_while(|it| ctx.selection_trimmed().contains_range(it.text_range()))
+ .find_map(valid_target_expr)?;
+
+ if let Some(ty_info) = ctx.sema.type_of_expr(&to_extract) {
+ if ty_info.adjusted().is_unit() {
+ return None;
+ }
+ }
+
+ let reference_modifier = match get_receiver_type(ctx, &to_extract) {
+ Some(receiver_type) if receiver_type.is_mutable_reference() => "&mut ",
+ Some(receiver_type) if receiver_type.is_reference() => "&",
+ _ => "",
+ };
+
+ let parent_ref_expr = to_extract.syntax().parent().and_then(ast::RefExpr::cast);
+ let var_modifier = match parent_ref_expr {
+ Some(expr) if expr.mut_token().is_some() => "mut ",
+ _ => "",
+ };
+
+ let anchor = Anchor::from(&to_extract)?;
+ let indent = anchor.syntax().prev_sibling_or_token()?.as_token()?.clone();
+ let target = to_extract.syntax().text_range();
+ acc.add(
+ AssistId("extract_variable", AssistKind::RefactorExtract),
+ "Extract into variable",
+ target,
+ move |edit| {
+ let field_shorthand =
+ match to_extract.syntax().parent().and_then(ast::RecordExprField::cast) {
+ Some(field) => field.name_ref(),
+ None => None,
+ };
+
+ let mut buf = String::new();
+
+ let var_name = match &field_shorthand {
+ Some(it) => it.to_string(),
+ None => suggest_name::for_variable(&to_extract, &ctx.sema),
+ };
+ let expr_range = match &field_shorthand {
+ Some(it) => it.syntax().text_range().cover(to_extract.syntax().text_range()),
+ None => to_extract.syntax().text_range(),
+ };
+
+ match anchor {
+ Anchor::Before(_) | Anchor::Replace(_) => {
+ format_to!(buf, "let {}{} = {}", var_modifier, var_name, reference_modifier)
+ }
+ Anchor::WrapInBlock(_) => {
+ format_to!(buf, "{{ let {} = {}", var_name, reference_modifier)
+ }
+ };
+ format_to!(buf, "{}", to_extract.syntax());
+
+ if let Anchor::Replace(stmt) = anchor {
+ cov_mark::hit!(test_extract_var_expr_stmt);
+ if stmt.semicolon_token().is_none() {
+ buf.push(';');
+ }
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let snip = buf.replace(
+ &format!("let {}{}", var_modifier, var_name),
+ &format!("let {}$0{}", var_modifier, var_name),
+ );
+ edit.replace_snippet(cap, expr_range, snip)
+ }
+ None => edit.replace(expr_range, buf),
+ }
+ return;
+ }
+
+ buf.push(';');
+
+ // We want to maintain the indent level,
+ // but we do not want to duplicate possible
+ // extra newlines in the indent block
+ let text = indent.text();
+ if text.starts_with('\n') {
+ buf.push('\n');
+ buf.push_str(text.trim_start_matches('\n'));
+ } else {
+ buf.push_str(text);
+ }
+
+ edit.replace(expr_range, var_name.clone());
+ let offset = anchor.syntax().text_range().start();
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let snip = buf.replace(
+ &format!("let {}{}", var_modifier, var_name),
+ &format!("let {}$0{}", var_modifier, var_name),
+ );
+ edit.insert_snippet(cap, offset, snip)
+ }
+ None => edit.insert(offset, buf),
+ }
+
+ if let Anchor::WrapInBlock(_) = anchor {
+ edit.insert(anchor.syntax().text_range().end(), " }");
+ }
+ },
+ )
+}
+
+/// Check whether the node is a valid expression which can be extracted to a variable.
+/// In general that's true for any expression, but in some cases that would produce invalid code.
+fn valid_target_expr(node: SyntaxNode) -> Option<ast::Expr> {
+ match node.kind() {
+ PATH_EXPR | LOOP_EXPR => None,
+ BREAK_EXPR => ast::BreakExpr::cast(node).and_then(|e| e.expr()),
+ RETURN_EXPR => ast::ReturnExpr::cast(node).and_then(|e| e.expr()),
+ BLOCK_EXPR => {
+ ast::BlockExpr::cast(node).filter(|it| it.is_standalone()).map(ast::Expr::from)
+ }
+ _ => ast::Expr::cast(node),
+ }
+}
+
+fn get_receiver_type(ctx: &AssistContext<'_>, expression: &ast::Expr) -> Option<hir::Type> {
+ let receiver = get_receiver(expression.clone())?;
+ Some(ctx.sema.type_of_expr(&receiver)?.original())
+}
+
+/// In the expression `a.b.c.x()`, find `a`
+fn get_receiver(expression: ast::Expr) -> Option<ast::Expr> {
+ match expression {
+ ast::Expr::FieldExpr(field) if field.expr().is_some() => {
+ let nested_expression = &field.expr()?;
+ get_receiver(nested_expression.to_owned())
+ }
+ _ => Some(expression),
+ }
+}
+
+#[derive(Debug)]
+enum Anchor {
+ Before(SyntaxNode),
+ Replace(ast::ExprStmt),
+ WrapInBlock(SyntaxNode),
+}
+
+impl Anchor {
+ fn from(to_extract: &ast::Expr) -> Option<Anchor> {
+ to_extract
+ .syntax()
+ .ancestors()
+ .take_while(|it| !ast::Item::can_cast(it.kind()) || ast::MacroCall::can_cast(it.kind()))
+ .find_map(|node| {
+ if ast::MacroCall::can_cast(node.kind()) {
+ return None;
+ }
+ if let Some(expr) =
+ node.parent().and_then(ast::StmtList::cast).and_then(|it| it.tail_expr())
+ {
+ if expr.syntax() == &node {
+ cov_mark::hit!(test_extract_var_last_expr);
+ return Some(Anchor::Before(node));
+ }
+ }
+
+ if let Some(parent) = node.parent() {
+ if parent.kind() == CLOSURE_EXPR {
+ cov_mark::hit!(test_extract_var_in_closure_no_block);
+ return Some(Anchor::WrapInBlock(node));
+ }
+ if parent.kind() == MATCH_ARM {
+ if node.kind() == MATCH_GUARD {
+ cov_mark::hit!(test_extract_var_in_match_guard);
+ } else {
+ cov_mark::hit!(test_extract_var_in_match_arm_no_block);
+ return Some(Anchor::WrapInBlock(node));
+ }
+ }
+ }
+
+ if let Some(stmt) = ast::Stmt::cast(node.clone()) {
+ if let ast::Stmt::ExprStmt(stmt) = stmt {
+ if stmt.expr().as_ref() == Some(to_extract) {
+ return Some(Anchor::Replace(stmt));
+ }
+ }
+ return Some(Anchor::Before(node));
+ }
+ None
+ })
+ }
+
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Anchor::Before(it) | Anchor::WrapInBlock(it) => it,
+ Anchor::Replace(stmt) => stmt.syntax(),
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn test_extract_var_simple() {
+ check_assist(
+ extract_variable,
+ r#"
+fn foo() {
+ foo($01 + 1$0);
+}"#,
+ r#"
+fn foo() {
+ let $0var_name = 1 + 1;
+ foo(var_name);
+}"#,
+ );
+ }
+
+ #[test]
+ fn extract_var_in_comment_is_not_applicable() {
+ cov_mark::check!(extract_var_in_comment_is_not_applicable);
+ check_assist_not_applicable(extract_variable, "fn main() { 1 + /* $0comment$0 */ 1; }");
+ }
+
+ #[test]
+ fn test_extract_var_expr_stmt() {
+ cov_mark::check!(test_extract_var_expr_stmt);
+ check_assist(
+ extract_variable,
+ r#"
+fn foo() {
+ $0 1 + 1$0;
+}"#,
+ r#"
+fn foo() {
+ let $0var_name = 1 + 1;
+}"#,
+ );
+ check_assist(
+ extract_variable,
+ r"
+fn foo() {
+ $0{ let x = 0; x }$0
+ something_else();
+}",
+ r"
+fn foo() {
+ let $0var_name = { let x = 0; x };
+ something_else();
+}",
+ );
+ }
+
+ #[test]
+ fn test_extract_var_part_of_expr_stmt() {
+ check_assist(
+ extract_variable,
+ r"
+fn foo() {
+ $01$0 + 1;
+}",
+ r"
+fn foo() {
+ let $0var_name = 1;
+ var_name + 1;
+}",
+ );
+ }
+
+ #[test]
+ fn test_extract_var_last_expr() {
+ cov_mark::check!(test_extract_var_last_expr);
+ check_assist(
+ extract_variable,
+ r#"
+fn foo() {
+ bar($01 + 1$0)
+}
+"#,
+ r#"
+fn foo() {
+ let $0var_name = 1 + 1;
+ bar(var_name)
+}
+"#,
+ );
+ check_assist(
+ extract_variable,
+ r#"
+fn foo() -> i32 {
+ $0bar(1 + 1)$0
+}
+
+fn bar(i: i32) -> i32 {
+ i
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let $0bar = bar(1 + 1);
+ bar
+}
+
+fn bar(i: i32) -> i32 {
+ i
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_extract_var_in_match_arm_no_block() {
+ cov_mark::check!(test_extract_var_in_match_arm_no_block);
+ check_assist(
+ extract_variable,
+ r#"
+fn main() {
+ let x = true;
+ let tuple = match x {
+ true => ($02 + 2$0, true)
+ _ => (0, false)
+ };
+}
+"#,
+ r#"
+fn main() {
+ let x = true;
+ let tuple = match x {
+ true => { let $0var_name = 2 + 2; (var_name, true) }
+ _ => (0, false)
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_in_match_arm_with_block() {
+ check_assist(
+ extract_variable,
+ r#"
+fn main() {
+ let x = true;
+ let tuple = match x {
+ true => {
+ let y = 1;
+ ($02 + y$0, true)
+ }
+ _ => (0, false)
+ };
+}
+"#,
+ r#"
+fn main() {
+ let x = true;
+ let tuple = match x {
+ true => {
+ let y = 1;
+ let $0var_name = 2 + y;
+ (var_name, true)
+ }
+ _ => (0, false)
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_in_match_guard() {
+ cov_mark::check!(test_extract_var_in_match_guard);
+ check_assist(
+ extract_variable,
+ r#"
+fn main() {
+ match () {
+ () if $010 > 0$0 => 1
+ _ => 2
+ };
+}
+"#,
+ r#"
+fn main() {
+ let $0var_name = 10 > 0;
+ match () {
+ () if var_name => 1
+ _ => 2
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_in_closure_no_block() {
+ cov_mark::check!(test_extract_var_in_closure_no_block);
+ check_assist(
+ extract_variable,
+ r#"
+fn main() {
+ let lambda = |x: u32| $0x * 2$0;
+}
+"#,
+ r#"
+fn main() {
+ let lambda = |x: u32| { let $0var_name = x * 2; var_name };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_in_closure_with_block() {
+ check_assist(
+ extract_variable,
+ r#"
+fn main() {
+ let lambda = |x: u32| { $0x * 2$0 };
+}
+"#,
+ r#"
+fn main() {
+ let lambda = |x: u32| { let $0var_name = x * 2; var_name };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_path_simple() {
+ check_assist(
+ extract_variable,
+ "
+fn main() {
+ let o = $0Some(true)$0;
+}
+",
+ "
+fn main() {
+ let $0var_name = Some(true);
+ let o = var_name;
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_extract_var_path_method() {
+ check_assist(
+ extract_variable,
+ "
+fn main() {
+ let v = $0bar.foo()$0;
+}
+",
+ "
+fn main() {
+ let $0foo = bar.foo();
+ let v = foo;
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_extract_var_return() {
+ check_assist(
+ extract_variable,
+ "
+fn foo() -> u32 {
+ $0return 2 + 2$0;
+}
+",
+ "
+fn foo() -> u32 {
+ let $0var_name = 2 + 2;
+ return var_name;
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_extract_var_does_not_add_extra_whitespace() {
+ check_assist(
+ extract_variable,
+ "
+fn foo() -> u32 {
+
+
+ $0return 2 + 2$0;
+}
+",
+ "
+fn foo() -> u32 {
+
+
+ let $0var_name = 2 + 2;
+ return var_name;
+}
+",
+ );
+
+ check_assist(
+ extract_variable,
+ "
+fn foo() -> u32 {
+
+ $0return 2 + 2$0;
+}
+",
+ "
+fn foo() -> u32 {
+
+ let $0var_name = 2 + 2;
+ return var_name;
+}
+",
+ );
+
+ check_assist(
+ extract_variable,
+ "
+fn foo() -> u32 {
+ let foo = 1;
+
+ // bar
+
+
+ $0return 2 + 2$0;
+}
+",
+ "
+fn foo() -> u32 {
+ let foo = 1;
+
+ // bar
+
+
+ let $0var_name = 2 + 2;
+ return var_name;
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_extract_var_break() {
+ check_assist(
+ extract_variable,
+ "
+fn main() {
+ let result = loop {
+ $0break 2 + 2$0;
+ };
+}
+",
+ "
+fn main() {
+ let result = loop {
+ let $0var_name = 2 + 2;
+ break var_name;
+ };
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_extract_var_for_cast() {
+ check_assist(
+ extract_variable,
+ "
+fn main() {
+ let v = $00f32 as u32$0;
+}
+",
+ "
+fn main() {
+ let $0var_name = 0f32 as u32;
+ let v = var_name;
+}
+",
+ );
+ }
+
+ #[test]
+ fn extract_var_field_shorthand() {
+ check_assist(
+ extract_variable,
+ r#"
+struct S {
+ foo: i32
+}
+
+fn main() {
+ S { foo: $01 + 1$0 }
+}
+"#,
+ r#"
+struct S {
+ foo: i32
+}
+
+fn main() {
+ let $0foo = 1 + 1;
+ S { foo }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_var_name_from_type() {
+ check_assist(
+ extract_variable,
+ r#"
+struct Test(i32);
+
+fn foo() -> Test {
+ $0{ Test(10) }$0
+}
+"#,
+ r#"
+struct Test(i32);
+
+fn foo() -> Test {
+ let $0test = { Test(10) };
+ test
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_var_name_from_parameter() {
+ check_assist(
+ extract_variable,
+ r#"
+fn bar(test: u32, size: u32)
+
+fn foo() {
+ bar(1, $01+1$0);
+}
+"#,
+ r#"
+fn bar(test: u32, size: u32)
+
+fn foo() {
+ let $0size = 1+1;
+ bar(1, size);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_var_parameter_name_has_precedence_over_type() {
+ check_assist(
+ extract_variable,
+ r#"
+struct TextSize(u32);
+fn bar(test: u32, size: TextSize)
+
+fn foo() {
+ bar(1, $0{ TextSize(1+1) }$0);
+}
+"#,
+ r#"
+struct TextSize(u32);
+fn bar(test: u32, size: TextSize)
+
+fn foo() {
+ let $0size = { TextSize(1+1) };
+ bar(1, size);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_var_name_from_function() {
+ check_assist(
+ extract_variable,
+ r#"
+fn is_required(test: u32, size: u32) -> bool
+
+fn foo() -> bool {
+ $0is_required(1, 2)$0
+}
+"#,
+ r#"
+fn is_required(test: u32, size: u32) -> bool
+
+fn foo() -> bool {
+ let $0is_required = is_required(1, 2);
+ is_required
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_var_name_from_method() {
+ check_assist(
+ extract_variable,
+ r#"
+struct S;
+impl S {
+ fn bar(&self, n: u32) -> u32 { n }
+}
+
+fn foo() -> u32 {
+ $0S.bar(1)$0
+}
+"#,
+ r#"
+struct S;
+impl S {
+ fn bar(&self, n: u32) -> u32 { n }
+}
+
+fn foo() -> u32 {
+ let $0bar = S.bar(1);
+ bar
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_var_name_from_method_param() {
+ check_assist(
+ extract_variable,
+ r#"
+struct S;
+impl S {
+ fn bar(&self, n: u32, size: u32) { n }
+}
+
+fn foo() {
+ S.bar($01 + 1$0, 2)
+}
+"#,
+ r#"
+struct S;
+impl S {
+ fn bar(&self, n: u32, size: u32) { n }
+}
+
+fn foo() {
+ let $0n = 1 + 1;
+ S.bar(n, 2)
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_var_name_from_ufcs_method_param() {
+ check_assist(
+ extract_variable,
+ r#"
+struct S;
+impl S {
+ fn bar(&self, n: u32, size: u32) { n }
+}
+
+fn foo() {
+ S::bar(&S, $01 + 1$0, 2)
+}
+"#,
+ r#"
+struct S;
+impl S {
+ fn bar(&self, n: u32, size: u32) { n }
+}
+
+fn foo() {
+ let $0n = 1 + 1;
+ S::bar(&S, n, 2)
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_var_parameter_name_has_precedence_over_function() {
+ check_assist(
+ extract_variable,
+ r#"
+fn bar(test: u32, size: u32)
+
+fn foo() {
+ bar(1, $0symbol_size(1, 2)$0);
+}
+"#,
+ r#"
+fn bar(test: u32, size: u32)
+
+fn foo() {
+ let $0size = symbol_size(1, 2);
+ bar(1, size);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_macro_call() {
+ check_assist(
+ extract_variable,
+ r"
+struct Vec;
+macro_rules! vec {
+ () => {Vec}
+}
+fn main() {
+ let _ = $0vec![]$0;
+}
+",
+ r"
+struct Vec;
+macro_rules! vec {
+ () => {Vec}
+}
+fn main() {
+ let $0vec = vec![];
+ let _ = vec;
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_extract_var_for_return_not_applicable() {
+ check_assist_not_applicable(extract_variable, "fn foo() { $0return$0; } ");
+ }
+
+ #[test]
+ fn test_extract_var_for_break_not_applicable() {
+ check_assist_not_applicable(extract_variable, "fn main() { loop { $0break$0; }; }");
+ }
+
+ #[test]
+ fn test_extract_var_unit_expr_not_applicable() {
+ check_assist_not_applicable(
+ extract_variable,
+ r#"
+fn foo() {
+ let mut i = 3;
+ $0if i >= 0 {
+ i += 1;
+ } else {
+ i -= 1;
+ }$0
+}"#,
+ );
+ }
+
+ // FIXME: This is not quite correct, but good enough(tm) for the sorting heuristic
+ #[test]
+ fn extract_var_target() {
+ check_assist_target(extract_variable, "fn foo() -> u32 { $0return 2 + 2$0; }", "2 + 2");
+
+ check_assist_target(
+ extract_variable,
+ "
+fn main() {
+ let x = true;
+ let tuple = match x {
+ true => ($02 + 2$0, true)
+ _ => (0, false)
+ };
+}
+",
+ "2 + 2",
+ );
+ }
+
+ #[test]
+ fn extract_var_no_block_body() {
+ check_assist_not_applicable(
+ extract_variable,
+ r"
+const X: usize = $0100$0;
+",
+ );
+ }
+
+ #[test]
+ fn test_extract_var_mutable_reference_parameter() {
+ check_assist(
+ extract_variable,
+ r#"
+struct S {
+ vec: Vec<u8>
+}
+
+fn foo(s: &mut S) {
+ $0s.vec$0.push(0);
+}"#,
+ r#"
+struct S {
+ vec: Vec<u8>
+}
+
+fn foo(s: &mut S) {
+ let $0vec = &mut s.vec;
+ vec.push(0);
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_mutable_reference_parameter_deep_nesting() {
+ check_assist(
+ extract_variable,
+ r#"
+struct Y {
+ field: X
+}
+struct X {
+ field: S
+}
+struct S {
+ vec: Vec<u8>
+}
+
+fn foo(f: &mut Y) {
+ $0f.field.field.vec$0.push(0);
+}"#,
+ r#"
+struct Y {
+ field: X
+}
+struct X {
+ field: S
+}
+struct S {
+ vec: Vec<u8>
+}
+
+fn foo(f: &mut Y) {
+ let $0vec = &mut f.field.field.vec;
+ vec.push(0);
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_reference_parameter() {
+ check_assist(
+ extract_variable,
+ r#"
+struct X;
+
+impl X {
+ fn do_thing(&self) {
+
+ }
+}
+
+struct S {
+ sub: X
+}
+
+fn foo(s: &S) {
+ $0s.sub$0.do_thing();
+}"#,
+ r#"
+struct X;
+
+impl X {
+ fn do_thing(&self) {
+
+ }
+}
+
+struct S {
+ sub: X
+}
+
+fn foo(s: &S) {
+ let $0x = &s.sub;
+ x.do_thing();
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_reference_parameter_deep_nesting() {
+ check_assist(
+ extract_variable,
+ r#"
+struct Z;
+impl Z {
+ fn do_thing(&self) {
+
+ }
+}
+
+struct Y {
+ field: Z
+}
+
+struct X {
+ field: Y
+}
+
+struct S {
+ sub: X
+}
+
+fn foo(s: &S) {
+ $0s.sub.field.field$0.do_thing();
+}"#,
+ r#"
+struct Z;
+impl Z {
+ fn do_thing(&self) {
+
+ }
+}
+
+struct Y {
+ field: Z
+}
+
+struct X {
+ field: Y
+}
+
+struct S {
+ sub: X
+}
+
+fn foo(s: &S) {
+ let $0z = &s.sub.field.field;
+ z.do_thing();
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_regular_parameter() {
+ check_assist(
+ extract_variable,
+ r#"
+struct X;
+
+impl X {
+ fn do_thing(&self) {
+
+ }
+}
+
+struct S {
+ sub: X
+}
+
+fn foo(s: S) {
+ $0s.sub$0.do_thing();
+}"#,
+ r#"
+struct X;
+
+impl X {
+ fn do_thing(&self) {
+
+ }
+}
+
+struct S {
+ sub: X
+}
+
+fn foo(s: S) {
+ let $0x = s.sub;
+ x.do_thing();
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_mutable_reference_local() {
+ check_assist(
+ extract_variable,
+ r#"
+struct X;
+
+struct S {
+ sub: X
+}
+
+impl S {
+ fn new() -> S {
+ S {
+ sub: X::new()
+ }
+ }
+}
+
+impl X {
+ fn new() -> X {
+ X { }
+ }
+ fn do_thing(&self) {
+
+ }
+}
+
+
+fn foo() {
+ let local = &mut S::new();
+ $0local.sub$0.do_thing();
+}"#,
+ r#"
+struct X;
+
+struct S {
+ sub: X
+}
+
+impl S {
+ fn new() -> S {
+ S {
+ sub: X::new()
+ }
+ }
+}
+
+impl X {
+ fn new() -> X {
+ X { }
+ }
+ fn do_thing(&self) {
+
+ }
+}
+
+
+fn foo() {
+ let local = &mut S::new();
+ let $0x = &mut local.sub;
+ x.do_thing();
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_reference_local() {
+ check_assist(
+ extract_variable,
+ r#"
+struct X;
+
+struct S {
+ sub: X
+}
+
+impl S {
+ fn new() -> S {
+ S {
+ sub: X::new()
+ }
+ }
+}
+
+impl X {
+ fn new() -> X {
+ X { }
+ }
+ fn do_thing(&self) {
+
+ }
+}
+
+
+fn foo() {
+ let local = &S::new();
+ $0local.sub$0.do_thing();
+}"#,
+ r#"
+struct X;
+
+struct S {
+ sub: X
+}
+
+impl S {
+ fn new() -> S {
+ S {
+ sub: X::new()
+ }
+ }
+}
+
+impl X {
+ fn new() -> X {
+ X { }
+ }
+ fn do_thing(&self) {
+
+ }
+}
+
+
+fn foo() {
+ let local = &S::new();
+ let $0x = &local.sub;
+ x.do_thing();
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_for_mutable_borrow() {
+ check_assist(
+ extract_variable,
+ r#"
+fn foo() {
+ let v = &mut $00$0;
+}"#,
+ r#"
+fn foo() {
+ let mut $0var_name = 0;
+ let v = &mut var_name;
+}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
new file mode 100644
index 000000000..b33846f54
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
@@ -0,0 +1,606 @@
+use hir::{db::HirDatabase, HasSource, HasVisibility, PathResolution};
+use ide_db::base_db::FileId;
+use syntax::{
+ ast::{self, HasVisibility as _},
+ AstNode, TextRange, TextSize,
+};
+
+use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists};
+
+// FIXME: this really should be a fix for diagnostic, rather than an assist.
+
+// Assist: fix_visibility
+//
+// Makes inaccessible item public.
+//
+// ```
+// mod m {
+// fn frobnicate() {}
+// }
+// fn main() {
+// m::frobnicate$0() {}
+// }
+// ```
+// ->
+// ```
+// mod m {
+// $0pub(crate) fn frobnicate() {}
+// }
+// fn main() {
+// m::frobnicate() {}
+// }
+// ```
+pub(crate) fn fix_visibility(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ add_vis_to_referenced_module_def(acc, ctx)
+ .or_else(|| add_vis_to_referenced_record_field(acc, ctx))
+}
+
+fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let path: ast::Path = ctx.find_node_at_offset()?;
+ let path_res = ctx.sema.resolve_path(&path)?;
+ let def = match path_res {
+ PathResolution::Def(def) => def,
+ _ => return None,
+ };
+
+ let current_module = ctx.sema.scope(path.syntax())?.module();
+ let target_module = def.module(ctx.db())?;
+
+ if def.visibility(ctx.db()).is_visible_from(ctx.db(), current_module.into()) {
+ return None;
+ };
+
+ let (offset, current_visibility, target, target_file, target_name) =
+ target_data_for_def(ctx.db(), def)?;
+
+ let missing_visibility =
+ if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" };
+
+ let assist_label = match target_name {
+ None => format!("Change visibility to {}", missing_visibility),
+ Some(name) => format!("Change visibility of {} to {}", name, missing_visibility),
+ };
+
+ acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| {
+ builder.edit_file(target_file);
+ match ctx.config.snippet_cap {
+ Some(cap) => match current_visibility {
+ Some(current_visibility) => builder.replace_snippet(
+ cap,
+ current_visibility.syntax().text_range(),
+ format!("$0{}", missing_visibility),
+ ),
+ None => builder.insert_snippet(cap, offset, format!("$0{} ", missing_visibility)),
+ },
+ None => match current_visibility {
+ Some(current_visibility) => {
+ builder.replace(current_visibility.syntax().text_range(), missing_visibility)
+ }
+ None => builder.insert(offset, format!("{} ", missing_visibility)),
+ },
+ }
+ })
+}
+
+fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let record_field: ast::RecordExprField = ctx.find_node_at_offset()?;
+ let (record_field_def, _, _) = ctx.sema.resolve_record_field(&record_field)?;
+
+ let current_module = ctx.sema.scope(record_field.syntax())?.module();
+ let visibility = record_field_def.visibility(ctx.db());
+ if visibility.is_visible_from(ctx.db(), current_module.into()) {
+ return None;
+ }
+
+ let parent = record_field_def.parent_def(ctx.db());
+ let parent_name = parent.name(ctx.db());
+ let target_module = parent.module(ctx.db());
+
+ let in_file_source = record_field_def.source(ctx.db())?;
+ let (offset, current_visibility, target) = match in_file_source.value {
+ hir::FieldSource::Named(it) => {
+ let s = it.syntax();
+ (vis_offset(s), it.visibility(), s.text_range())
+ }
+ hir::FieldSource::Pos(it) => {
+ let s = it.syntax();
+ (vis_offset(s), it.visibility(), s.text_range())
+ }
+ };
+
+ let missing_visibility =
+ if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" };
+ let target_file = in_file_source.file_id.original_file(ctx.db());
+
+ let target_name = record_field_def.name(ctx.db());
+ let assist_label =
+ format!("Change visibility of {}.{} to {}", parent_name, target_name, missing_visibility);
+
+ acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| {
+ builder.edit_file(target_file);
+ match ctx.config.snippet_cap {
+ Some(cap) => match current_visibility {
+ Some(current_visibility) => builder.replace_snippet(
+ cap,
+ current_visibility.syntax().text_range(),
+ format!("$0{}", missing_visibility),
+ ),
+ None => builder.insert_snippet(cap, offset, format!("$0{} ", missing_visibility)),
+ },
+ None => match current_visibility {
+ Some(current_visibility) => {
+ builder.replace(current_visibility.syntax().text_range(), missing_visibility)
+ }
+ None => builder.insert(offset, format!("{} ", missing_visibility)),
+ },
+ }
+ })
+}
+
+fn target_data_for_def(
+ db: &dyn HirDatabase,
+ def: hir::ModuleDef,
+) -> Option<(TextSize, Option<ast::Visibility>, TextRange, FileId, Option<hir::Name>)> {
+ fn offset_target_and_file_id<S, Ast>(
+ db: &dyn HirDatabase,
+ x: S,
+ ) -> Option<(TextSize, Option<ast::Visibility>, TextRange, FileId)>
+ where
+ S: HasSource<Ast = Ast>,
+ Ast: AstNode + ast::HasVisibility,
+ {
+ let source = x.source(db)?;
+ let in_file_syntax = source.syntax();
+ let file_id = in_file_syntax.file_id;
+ let syntax = in_file_syntax.value;
+ let current_visibility = source.value.visibility();
+ Some((
+ vis_offset(syntax),
+ current_visibility,
+ syntax.text_range(),
+ file_id.original_file(db.upcast()),
+ ))
+ }
+
+ let target_name;
+ let (offset, current_visibility, target, target_file) = match def {
+ hir::ModuleDef::Function(f) => {
+ target_name = Some(f.name(db));
+ offset_target_and_file_id(db, f)?
+ }
+ hir::ModuleDef::Adt(adt) => {
+ target_name = Some(adt.name(db));
+ match adt {
+ hir::Adt::Struct(s) => offset_target_and_file_id(db, s)?,
+ hir::Adt::Union(u) => offset_target_and_file_id(db, u)?,
+ hir::Adt::Enum(e) => offset_target_and_file_id(db, e)?,
+ }
+ }
+ hir::ModuleDef::Const(c) => {
+ target_name = c.name(db);
+ offset_target_and_file_id(db, c)?
+ }
+ hir::ModuleDef::Static(s) => {
+ target_name = Some(s.name(db));
+ offset_target_and_file_id(db, s)?
+ }
+ hir::ModuleDef::Trait(t) => {
+ target_name = Some(t.name(db));
+ offset_target_and_file_id(db, t)?
+ }
+ hir::ModuleDef::TypeAlias(t) => {
+ target_name = Some(t.name(db));
+ offset_target_and_file_id(db, t)?
+ }
+ hir::ModuleDef::Module(m) => {
+ target_name = m.name(db);
+ let in_file_source = m.declaration_source(db)?;
+ let file_id = in_file_source.file_id.original_file(db.upcast());
+ let syntax = in_file_source.value.syntax();
+ (vis_offset(syntax), in_file_source.value.visibility(), syntax.text_range(), file_id)
+ }
+ // FIXME
+ hir::ModuleDef::Macro(_) => return None,
+ // Enum variants can't be private, we can't modify builtin types
+ hir::ModuleDef::Variant(_) | hir::ModuleDef::BuiltinType(_) => return None,
+ };
+
+ Some((offset, current_visibility, target, target_file, target_name))
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn fix_visibility_of_fn() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { fn foo() {} }
+ fn main() { foo::foo$0() } ",
+ r"mod foo { $0pub(crate) fn foo() {} }
+ fn main() { foo::foo() } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub fn foo() {} }
+ fn main() { foo::foo$0() } ",
+ )
+ }
+
+ #[test]
+ fn fix_visibility_of_adt_in_submodule() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { struct Foo; }
+ fn main() { foo::Foo$0 } ",
+ r"mod foo { $0pub(crate) struct Foo; }
+ fn main() { foo::Foo } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub struct Foo; }
+ fn main() { foo::Foo$0 } ",
+ );
+ check_assist(
+ fix_visibility,
+ r"mod foo { enum Foo; }
+ fn main() { foo::Foo$0 } ",
+ r"mod foo { $0pub(crate) enum Foo; }
+ fn main() { foo::Foo } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub enum Foo; }
+ fn main() { foo::Foo$0 } ",
+ );
+ check_assist(
+ fix_visibility,
+ r"mod foo { union Foo; }
+ fn main() { foo::Foo$0 } ",
+ r"mod foo { $0pub(crate) union Foo; }
+ fn main() { foo::Foo } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub union Foo; }
+ fn main() { foo::Foo$0 } ",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_adt_in_other_file() {
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs
+mod foo;
+fn main() { foo::Foo$0 }
+
+//- /foo.rs
+struct Foo;
+",
+ r"$0pub(crate) struct Foo;
+",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_struct_field() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { pub struct Foo { bar: (), } }
+ fn main() { foo::Foo { $0bar: () }; } ",
+ r"mod foo { pub struct Foo { $0pub(crate) bar: (), } }
+ fn main() { foo::Foo { bar: () }; } ",
+ );
+ check_assist(
+ fix_visibility,
+ r"
+//- /lib.rs
+mod foo;
+fn main() { foo::Foo { $0bar: () }; }
+//- /foo.rs
+pub struct Foo { bar: () }
+",
+ r"pub struct Foo { $0pub(crate) bar: () }
+",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub struct Foo { pub bar: (), } }
+ fn main() { foo::Foo { $0bar: () }; } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"
+//- /lib.rs
+mod foo;
+fn main() { foo::Foo { $0bar: () }; }
+//- /foo.rs
+pub struct Foo { pub bar: () }
+",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_enum_variant_field() {
+ // Enum variants, as well as their fields, always get the enum's visibility. In fact, rustc
+ // rejects any visibility specifiers on them, so this assist should never fire on them.
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub enum Foo { Bar { bar: () } } }
+ fn main() { foo::Foo::Bar { $0bar: () }; } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"
+//- /lib.rs
+mod foo;
+fn main() { foo::Foo::Bar { $0bar: () }; }
+//- /foo.rs
+pub enum Foo { Bar { bar: () } }
+",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub struct Foo { pub bar: (), } }
+ fn main() { foo::Foo { $0bar: () }; } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"
+//- /lib.rs
+mod foo;
+fn main() { foo::Foo { $0bar: () }; }
+//- /foo.rs
+pub struct Foo { pub bar: () }
+",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_union_field() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { pub union Foo { bar: (), } }
+ fn main() { foo::Foo { $0bar: () }; } ",
+ r"mod foo { pub union Foo { $0pub(crate) bar: (), } }
+ fn main() { foo::Foo { bar: () }; } ",
+ );
+ check_assist(
+ fix_visibility,
+ r"
+//- /lib.rs
+mod foo;
+fn main() { foo::Foo { $0bar: () }; }
+//- /foo.rs
+pub union Foo { bar: () }
+",
+ r"pub union Foo { $0pub(crate) bar: () }
+",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub union Foo { pub bar: (), } }
+ fn main() { foo::Foo { $0bar: () }; } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"
+//- /lib.rs
+mod foo;
+fn main() { foo::Foo { $0bar: () }; }
+//- /foo.rs
+pub union Foo { pub bar: () }
+",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_const() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { const FOO: () = (); }
+ fn main() { foo::FOO$0 } ",
+ r"mod foo { $0pub(crate) const FOO: () = (); }
+ fn main() { foo::FOO } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub const FOO: () = (); }
+ fn main() { foo::FOO$0 } ",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_static() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { static FOO: () = (); }
+ fn main() { foo::FOO$0 } ",
+ r"mod foo { $0pub(crate) static FOO: () = (); }
+ fn main() { foo::FOO } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub static FOO: () = (); }
+ fn main() { foo::FOO$0 } ",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_trait() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { trait Foo { fn foo(&self) {} } }
+ fn main() { let x: &dyn foo::$0Foo; } ",
+ r"mod foo { $0pub(crate) trait Foo { fn foo(&self) {} } }
+ fn main() { let x: &dyn foo::Foo; } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub trait Foo { fn foo(&self) {} } }
+ fn main() { let x: &dyn foo::Foo$0; } ",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_type_alias() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { type Foo = (); }
+ fn main() { let x: foo::Foo$0; } ",
+ r"mod foo { $0pub(crate) type Foo = (); }
+ fn main() { let x: foo::Foo; } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub type Foo = (); }
+ fn main() { let x: foo::Foo$0; } ",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_module() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { mod bar { fn bar() {} } }
+ fn main() { foo::bar$0::bar(); } ",
+ r"mod foo { $0pub(crate) mod bar { fn bar() {} } }
+ fn main() { foo::bar::bar(); } ",
+ );
+
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs
+mod foo;
+fn main() { foo::bar$0::baz(); }
+
+//- /foo.rs
+mod bar {
+ pub fn baz() {}
+}
+",
+ r"$0pub(crate) mod bar {
+ pub fn baz() {}
+}
+",
+ );
+
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub mod bar { pub fn bar() {} } }
+ fn main() { foo::bar$0::bar(); } ",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_inline_module_in_other_file() {
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs
+mod foo;
+fn main() { foo::bar$0::baz(); }
+
+//- /foo.rs
+mod bar;
+//- /foo/bar.rs
+pub fn baz() {}
+",
+ r"$0pub(crate) mod bar;
+",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_module_declaration_in_other_file() {
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs
+mod foo;
+fn main() { foo::bar$0>::baz(); }
+
+//- /foo.rs
+mod bar {
+ pub fn baz() {}
+}
+",
+ r"$0pub(crate) mod bar {
+ pub fn baz() {}
+}
+",
+ );
+ }
+
+ #[test]
+ fn adds_pub_when_target_is_in_another_crate() {
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs crate:a deps:foo
+foo::Bar$0
+//- /lib.rs crate:foo
+struct Bar;
+",
+ r"$0pub struct Bar;
+",
+ )
+ }
+
+ #[test]
+ fn replaces_pub_crate_with_pub() {
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs crate:a deps:foo
+foo::Bar$0
+//- /lib.rs crate:foo
+pub(crate) struct Bar;
+",
+ r"$0pub struct Bar;
+",
+ );
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs crate:a deps:foo
+fn main() {
+ foo::Foo { $0bar: () };
+}
+//- /lib.rs crate:foo
+pub struct Foo { pub(crate) bar: () }
+",
+ r"pub struct Foo { $0pub bar: () }
+",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_reexport() {
+ // FIXME: broken test, this should fix visibility of the re-export
+ // rather than the struct.
+ check_assist(
+ fix_visibility,
+ r#"
+mod foo {
+ use bar::Baz;
+ mod bar { pub(super) struct Baz; }
+}
+foo::Baz$0
+"#,
+ r#"
+mod foo {
+ use bar::Baz;
+ mod bar { $0pub(crate) struct Baz; }
+}
+foo::Baz
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs
new file mode 100644
index 000000000..2ea6f58fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs
@@ -0,0 +1,139 @@
+use syntax::ast::{self, AstNode, BinExpr};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: flip_binexpr
+//
+// Flips operands of a binary expression.
+//
+// ```
+// fn main() {
+// let _ = 90 +$0 2;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let _ = 2 + 90;
+// }
+// ```
+pub(crate) fn flip_binexpr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let expr = ctx.find_node_at_offset::<BinExpr>()?;
+ let lhs = expr.lhs()?.syntax().clone();
+ let rhs = expr.rhs()?.syntax().clone();
+ let op_range = expr.op_token()?.text_range();
+ // The assist should be applied only if the cursor is on the operator
+ let cursor_in_range = op_range.contains_range(ctx.selection_trimmed());
+ if !cursor_in_range {
+ return None;
+ }
+ let action: FlipAction = expr.op_kind()?.into();
+ // The assist should not be applied for certain operators
+ if let FlipAction::DontFlip = action {
+ return None;
+ }
+
+ acc.add(
+ AssistId("flip_binexpr", AssistKind::RefactorRewrite),
+ "Flip binary expression",
+ op_range,
+ |edit| {
+ if let FlipAction::FlipAndReplaceOp(new_op) = action {
+ edit.replace(op_range, new_op);
+ }
+ edit.replace(lhs.text_range(), rhs.text());
+ edit.replace(rhs.text_range(), lhs.text());
+ },
+ )
+}
+
+enum FlipAction {
+ // Flip the expression
+ Flip,
+ // Flip the expression and replace the operator with this string
+ FlipAndReplaceOp(&'static str),
+ // Do not flip the expression
+ DontFlip,
+}
+
+impl From<ast::BinaryOp> for FlipAction {
+ fn from(op_kind: ast::BinaryOp) -> Self {
+ match op_kind {
+ ast::BinaryOp::Assignment { .. } => FlipAction::DontFlip,
+ ast::BinaryOp::CmpOp(ast::CmpOp::Ord { ordering, strict }) => {
+ let rev_op = match (ordering, strict) {
+ (ast::Ordering::Less, true) => ">",
+ (ast::Ordering::Less, false) => ">=",
+ (ast::Ordering::Greater, true) => "<",
+ (ast::Ordering::Greater, false) => "<=",
+ };
+ FlipAction::FlipAndReplaceOp(rev_op)
+ }
+ _ => FlipAction::Flip,
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ #[test]
+ fn flip_binexpr_target_is_the_op() {
+ check_assist_target(flip_binexpr, "fn f() { let res = 1 ==$0 2; }", "==")
+ }
+
+ #[test]
+ fn flip_binexpr_not_applicable_for_assignment() {
+ check_assist_not_applicable(flip_binexpr, "fn f() { let mut _x = 1; _x +=$0 2 }")
+ }
+
+ #[test]
+ fn flip_binexpr_works_for_eq() {
+ check_assist(flip_binexpr, "fn f() { let res = 1 ==$0 2; }", "fn f() { let res = 2 == 1; }")
+ }
+
+ #[test]
+ fn flip_binexpr_works_for_gt() {
+ check_assist(flip_binexpr, "fn f() { let res = 1 >$0 2; }", "fn f() { let res = 2 < 1; }")
+ }
+
+ #[test]
+ fn flip_binexpr_works_for_lteq() {
+ check_assist(flip_binexpr, "fn f() { let res = 1 <=$0 2; }", "fn f() { let res = 2 >= 1; }")
+ }
+
+ #[test]
+ fn flip_binexpr_works_for_complex_expr() {
+ check_assist(
+ flip_binexpr,
+ "fn f() { let res = (1 + 1) ==$0 (2 + 2); }",
+ "fn f() { let res = (2 + 2) == (1 + 1); }",
+ )
+ }
+
+ #[test]
+ fn flip_binexpr_works_inside_match() {
+ check_assist(
+ flip_binexpr,
+ r#"
+ fn dyn_eq(&self, other: &dyn Diagnostic) -> bool {
+ match other.downcast_ref::<Self>() {
+ None => false,
+ Some(it) => it ==$0 self,
+ }
+ }
+ "#,
+ r#"
+ fn dyn_eq(&self, other: &dyn Diagnostic) -> bool {
+ match other.downcast_ref::<Self>() {
+ None => false,
+ Some(it) => self == it,
+ }
+ }
+ "#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_comma.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_comma.rs
new file mode 100644
index 000000000..f40f2713a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_comma.rs
@@ -0,0 +1,92 @@
+use syntax::{algo::non_trivia_sibling, Direction, SyntaxKind, T};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: flip_comma
+//
+// Flips two comma-separated items.
+//
+// ```
+// fn main() {
+// ((1, 2),$0 (3, 4));
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// ((3, 4), (1, 2));
+// }
+// ```
+pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let comma = ctx.find_token_syntax_at_offset(T![,])?;
+ let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?;
+ let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?;
+
+ // Don't apply a "flip" in case of a last comma
+ // that typically comes before punctuation
+ if next.kind().is_punct() {
+ return None;
+ }
+
+ // Don't apply a "flip" inside the macro call
+ // since macro input are just mere tokens
+ if comma.parent_ancestors().any(|it| it.kind() == SyntaxKind::MACRO_CALL) {
+ return None;
+ }
+
+ acc.add(
+ AssistId("flip_comma", AssistKind::RefactorRewrite),
+ "Flip comma",
+ comma.text_range(),
+ |edit| {
+ edit.replace(prev.text_range(), next.to_string());
+ edit.replace(next.text_range(), prev.to_string());
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ #[test]
+ fn flip_comma_works_for_function_parameters() {
+ check_assist(
+ flip_comma,
+ r#"fn foo(x: i32,$0 y: Result<(), ()>) {}"#,
+ r#"fn foo(y: Result<(), ()>, x: i32) {}"#,
+ )
+ }
+
+ #[test]
+ fn flip_comma_target() {
+ check_assist_target(flip_comma, r#"fn foo(x: i32,$0 y: Result<(), ()>) {}"#, ",")
+ }
+
+ #[test]
+ fn flip_comma_before_punct() {
+ // See https://github.com/rust-lang/rust-analyzer/issues/1619
+ // "Flip comma" assist shouldn't be applicable to the last comma in enum or struct
+ // declaration body.
+ check_assist_not_applicable(flip_comma, "pub enum Test { A,$0 }");
+ check_assist_not_applicable(flip_comma, "pub struct Test { foo: usize,$0 }");
+ }
+
+ #[test]
+ fn flip_comma_works() {
+ check_assist(
+ flip_comma,
+ r#"fn main() {((1, 2),$0 (3, 4));}"#,
+ r#"fn main() {((3, 4), (1, 2));}"#,
+ )
+ }
+
+ #[test]
+ fn flip_comma_not_applicable_for_macro_input() {
+ // "Flip comma" assist shouldn't be applicable inside the macro call
+ // See https://github.com/rust-lang/rust-analyzer/issues/7693
+ check_assist_not_applicable(flip_comma, r#"bar!(a,$0 b)"#);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_trait_bound.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_trait_bound.rs
new file mode 100644
index 000000000..e3ae4970b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_trait_bound.rs
@@ -0,0 +1,121 @@
+use syntax::{
+ algo::non_trivia_sibling,
+ ast::{self, AstNode},
+ Direction, T,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: flip_trait_bound
+//
+// Flips two trait bounds.
+//
+// ```
+// fn foo<T: Clone +$0 Copy>() { }
+// ```
+// ->
+// ```
+// fn foo<T: Copy + Clone>() { }
+// ```
+pub(crate) fn flip_trait_bound(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ // We want to replicate the behavior of `flip_binexpr` by only suggesting
+ // the assist when the cursor is on a `+`
+ let plus = ctx.find_token_syntax_at_offset(T![+])?;
+
+ // Make sure we're in a `TypeBoundList`
+ if ast::TypeBoundList::cast(plus.parent()?).is_none() {
+ return None;
+ }
+
+ let (before, after) = (
+ non_trivia_sibling(plus.clone().into(), Direction::Prev)?,
+ non_trivia_sibling(plus.clone().into(), Direction::Next)?,
+ );
+
+ let target = plus.text_range();
+ acc.add(
+ AssistId("flip_trait_bound", AssistKind::RefactorRewrite),
+ "Flip trait bounds",
+ target,
+ |edit| {
+ edit.replace(before.text_range(), after.to_string());
+ edit.replace(after.text_range(), before.to_string());
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ #[test]
+ fn flip_trait_bound_assist_available() {
+ check_assist_target(flip_trait_bound, "struct S<T> where T: A $0+ B + C { }", "+")
+ }
+
+ #[test]
+ fn flip_trait_bound_not_applicable_for_single_trait_bound() {
+ check_assist_not_applicable(flip_trait_bound, "struct S<T> where T: $0A { }")
+ }
+
+ #[test]
+ fn flip_trait_bound_works_for_struct() {
+ check_assist(
+ flip_trait_bound,
+ "struct S<T> where T: A $0+ B { }",
+ "struct S<T> where T: B + A { }",
+ )
+ }
+
+ #[test]
+ fn flip_trait_bound_works_for_trait_impl() {
+ check_assist(
+ flip_trait_bound,
+ "impl X for S<T> where T: A +$0 B { }",
+ "impl X for S<T> where T: B + A { }",
+ )
+ }
+
+ #[test]
+ fn flip_trait_bound_works_for_fn() {
+ check_assist(flip_trait_bound, "fn f<T: A $0+ B>(t: T) { }", "fn f<T: B + A>(t: T) { }")
+ }
+
+ #[test]
+ fn flip_trait_bound_works_for_fn_where_clause() {
+ check_assist(
+ flip_trait_bound,
+ "fn f<T>(t: T) where T: A +$0 B { }",
+ "fn f<T>(t: T) where T: B + A { }",
+ )
+ }
+
+ #[test]
+ fn flip_trait_bound_works_for_lifetime() {
+ check_assist(
+ flip_trait_bound,
+ "fn f<T>(t: T) where T: A $0+ 'static { }",
+ "fn f<T>(t: T) where T: 'static + A { }",
+ )
+ }
+
+ #[test]
+ fn flip_trait_bound_works_for_complex_bounds() {
+ check_assist(
+ flip_trait_bound,
+ "struct S<T> where T: A<T> $0+ b_mod::B<T> + C<T> { }",
+ "struct S<T> where T: b_mod::B<T> + A<T> + C<T> { }",
+ )
+ }
+
+ #[test]
+ fn flip_trait_bound_works_for_long_bounds() {
+ check_assist(
+ flip_trait_bound,
+ "struct S<T> where T: A + B + C + D + E + F +$0 G + H + I + J { }",
+ "struct S<T> where T: A + B + C + D + E + G + F + H + I + J { }",
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs
new file mode 100644
index 000000000..eaa6de73e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs
@@ -0,0 +1,255 @@
+use crate::assist_context::{AssistContext, Assists};
+use hir::{HasVisibility, HirDisplay, Module};
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ base_db::{FileId, Upcast},
+ defs::{Definition, NameRefClass},
+};
+use syntax::{
+ ast::{self, edit::IndentLevel, NameRef},
+ AstNode, Direction, SyntaxKind, TextSize,
+};
+
+// Assist: generate_constant
+//
+// Generate a named constant.
+//
+// ```
+// struct S { i: usize }
+// impl S { pub fn new(n: usize) {} }
+// fn main() {
+// let v = S::new(CAPA$0CITY);
+// }
+// ```
+// ->
+// ```
+// struct S { i: usize }
+// impl S { pub fn new(n: usize) {} }
+// fn main() {
+// const CAPACITY: usize = $0;
+// let v = S::new(CAPACITY);
+// }
+// ```
+
+pub(crate) fn generate_constant(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let constant_token = ctx.find_node_at_offset::<ast::NameRef>()?;
+ if constant_token.to_string().chars().any(|it| !(it.is_uppercase() || it == '_')) {
+ cov_mark::hit!(not_constant_name);
+ return None;
+ }
+ if NameRefClass::classify(&ctx.sema, &constant_token).is_some() {
+ cov_mark::hit!(already_defined);
+ return None;
+ }
+ let expr = constant_token.syntax().ancestors().find_map(ast::Expr::cast)?;
+ let statement = expr.syntax().ancestors().find_map(ast::Stmt::cast)?;
+ let ty = ctx.sema.type_of_expr(&expr)?;
+ let scope = ctx.sema.scope(statement.syntax())?;
+ let constant_module = scope.module();
+ let type_name = ty.original().display_source_code(ctx.db(), constant_module.into()).ok()?;
+ let target = statement.syntax().parent()?.text_range();
+ let path = constant_token.syntax().ancestors().find_map(ast::Path::cast)?;
+
+ let name_refs = path.segments().map(|s| s.name_ref());
+ let mut outer_exists = false;
+ let mut not_exist_name_ref = Vec::new();
+ let mut current_module = constant_module;
+ for name_ref in name_refs {
+ let name_ref_value = name_ref?;
+ let name_ref_class = NameRefClass::classify(&ctx.sema, &name_ref_value);
+ match name_ref_class {
+ Some(NameRefClass::Definition(Definition::Module(m))) => {
+ if !m.visibility(ctx.sema.db).is_visible_from(ctx.sema.db, constant_module.into()) {
+ return None;
+ }
+ outer_exists = true;
+ current_module = m;
+ }
+ Some(_) => {
+ return None;
+ }
+ None => {
+ not_exist_name_ref.push(name_ref_value);
+ }
+ }
+ }
+ let (offset, indent, file_id, post_string) =
+ target_data_for_generate_constant(ctx, current_module, constant_module).unwrap_or_else(
+ || {
+ let indent = IndentLevel::from_node(statement.syntax());
+ (statement.syntax().text_range().start(), indent, None, format!("\n{}", indent))
+ },
+ );
+
+ let text = get_text_for_generate_constant(not_exist_name_ref, indent, outer_exists, type_name)?;
+ acc.add(
+ AssistId("generate_constant", AssistKind::QuickFix),
+ "Generate constant",
+ target,
+ |builder| {
+ if let Some(file_id) = file_id {
+ builder.edit_file(file_id);
+ }
+ builder.insert(offset, format!("{}{}", text, post_string));
+ },
+ )
+}
+
+fn get_text_for_generate_constant(
+ mut not_exist_name_ref: Vec<NameRef>,
+ indent: IndentLevel,
+ outer_exists: bool,
+ type_name: String,
+) -> Option<String> {
+ let constant_token = not_exist_name_ref.pop()?;
+ let vis = if not_exist_name_ref.len() == 0 && !outer_exists { "" } else { "\npub " };
+ let mut text = format!("{}const {}: {} = $0;", vis, constant_token, type_name);
+ while let Some(name_ref) = not_exist_name_ref.pop() {
+ let vis = if not_exist_name_ref.len() == 0 && !outer_exists { "" } else { "\npub " };
+ text = text.replace("\n", "\n ");
+ text = format!("{}mod {} {{{}\n}}", vis, name_ref.to_string(), text);
+ }
+ Some(text.replace("\n", &format!("\n{}", indent)))
+}
+
+fn target_data_for_generate_constant(
+ ctx: &AssistContext<'_>,
+ current_module: Module,
+ constant_module: Module,
+) -> Option<(TextSize, IndentLevel, Option<FileId>, String)> {
+ if current_module == constant_module {
+ // insert in current file
+ return None;
+ }
+ let in_file_source = current_module.definition_source(ctx.sema.db);
+ let file_id = in_file_source.file_id.original_file(ctx.sema.db.upcast());
+ match in_file_source.value {
+ hir::ModuleSource::Module(module_node) => {
+ let indent = IndentLevel::from_node(module_node.syntax());
+ let l_curly_token = module_node.item_list()?.l_curly_token()?;
+ let offset = l_curly_token.text_range().end();
+
+ let siblings_has_newline = l_curly_token
+ .siblings_with_tokens(Direction::Next)
+ .find(|it| it.kind() == SyntaxKind::WHITESPACE && it.to_string().contains("\n"))
+ .is_some();
+ let post_string =
+ if siblings_has_newline { format!("{}", indent) } else { format!("\n{}", indent) };
+ Some((offset, indent + 1, Some(file_id), post_string))
+ }
+ _ => Some((TextSize::from(0), 0.into(), Some(file_id), "\n".into())),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn test_trivial() {
+ check_assist(
+ generate_constant,
+ r#"struct S { i: usize }
+impl S {
+ pub fn new(n: usize) {}
+}
+fn main() {
+ let v = S::new(CAPA$0CITY);
+}"#,
+ r#"struct S { i: usize }
+impl S {
+ pub fn new(n: usize) {}
+}
+fn main() {
+ const CAPACITY: usize = $0;
+ let v = S::new(CAPACITY);
+}"#,
+ );
+ }
+ #[test]
+ fn test_wont_apply_when_defined() {
+ cov_mark::check!(already_defined);
+ check_assist_not_applicable(
+ generate_constant,
+ r#"struct S { i: usize }
+impl S {
+ pub fn new(n: usize) {}
+}
+fn main() {
+ const CAPACITY: usize = 10;
+ let v = S::new(CAPAC$0ITY);
+}"#,
+ );
+ }
+ #[test]
+ fn test_wont_apply_when_maybe_not_constant() {
+ cov_mark::check!(not_constant_name);
+ check_assist_not_applicable(
+ generate_constant,
+ r#"struct S { i: usize }
+impl S {
+ pub fn new(n: usize) {}
+}
+fn main() {
+ let v = S::new(capa$0city);
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_constant_with_path() {
+ check_assist(
+ generate_constant,
+ r#"mod foo {}
+fn bar() -> i32 {
+ foo::A_CON$0STANT
+}"#,
+ r#"mod foo {
+ pub const A_CONSTANT: i32 = $0;
+}
+fn bar() -> i32 {
+ foo::A_CONSTANT
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_constant_with_longer_path() {
+ check_assist(
+ generate_constant,
+ r#"mod foo {
+ pub mod goo {}
+}
+fn bar() -> i32 {
+ foo::goo::A_CON$0STANT
+}"#,
+ r#"mod foo {
+ pub mod goo {
+ pub const A_CONSTANT: i32 = $0;
+ }
+}
+fn bar() -> i32 {
+ foo::goo::A_CONSTANT
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_constant_with_not_exist_longer_path() {
+ check_assist(
+ generate_constant,
+ r#"fn bar() -> i32 {
+ foo::goo::A_CON$0STANT
+}"#,
+ r#"mod foo {
+ pub mod goo {
+ pub const A_CONSTANT: i32 = $0;
+ }
+}
+fn bar() -> i32 {
+ foo::goo::A_CONSTANT
+}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs
new file mode 100644
index 000000000..5e9995a98
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs
@@ -0,0 +1,179 @@
+use ide_db::{famous_defs::FamousDefs, RootDatabase};
+use syntax::ast::{self, AstNode, HasName};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: generate_default_from_enum_variant
+//
+// Adds a Default impl for an enum using a variant.
+//
+// ```
+// enum Version {
+// Undefined,
+// Minor$0,
+// Major,
+// }
+// ```
+// ->
+// ```
+// enum Version {
+// Undefined,
+// Minor,
+// Major,
+// }
+//
+// impl Default for Version {
+// fn default() -> Self {
+// Self::Minor
+// }
+// }
+// ```
+pub(crate) fn generate_default_from_enum_variant(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let variant = ctx.find_node_at_offset::<ast::Variant>()?;
+ let variant_name = variant.name()?;
+ let enum_name = variant.parent_enum().name()?;
+ if !matches!(variant.kind(), ast::StructKind::Unit) {
+ cov_mark::hit!(test_gen_default_on_non_unit_variant_not_implemented);
+ return None;
+ }
+
+ if existing_default_impl(&ctx.sema, &variant).is_some() {
+ cov_mark::hit!(test_gen_default_impl_already_exists);
+ return None;
+ }
+
+ let target = variant.syntax().text_range();
+ acc.add(
+ AssistId("generate_default_from_enum_variant", AssistKind::Generate),
+ "Generate `Default` impl from this enum variant",
+ target,
+ |edit| {
+ let start_offset = variant.parent_enum().syntax().text_range().end();
+ let buf = format!(
+ r#"
+
+impl Default for {0} {{
+ fn default() -> Self {{
+ Self::{1}
+ }}
+}}"#,
+ enum_name, variant_name
+ );
+ edit.insert(start_offset, buf);
+ },
+ )
+}
+
+fn existing_default_impl(
+ sema: &'_ hir::Semantics<'_, RootDatabase>,
+ variant: &ast::Variant,
+) -> Option<()> {
+ let variant = sema.to_def(variant)?;
+ let enum_ = variant.parent_enum(sema.db);
+ let krate = enum_.module(sema.db).krate();
+
+ let default_trait = FamousDefs(sema, krate).core_default_Default()?;
+ let enum_type = enum_.ty(sema.db);
+
+ if enum_type.impls_trait(sema.db, default_trait, &[]) {
+ Some(())
+ } else {
+ None
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_generate_default_from_variant() {
+ check_assist(
+ generate_default_from_enum_variant,
+ r#"
+//- minicore: default
+enum Variant {
+ Undefined,
+ Minor$0,
+ Major,
+}
+"#,
+ r#"
+enum Variant {
+ Undefined,
+ Minor,
+ Major,
+}
+
+impl Default for Variant {
+ fn default() -> Self {
+ Self::Minor
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_default_already_implemented() {
+ cov_mark::check!(test_gen_default_impl_already_exists);
+ check_assist_not_applicable(
+ generate_default_from_enum_variant,
+ r#"
+//- minicore: default
+enum Variant {
+ Undefined,
+ Minor$0,
+ Major,
+}
+
+impl Default for Variant {
+ fn default() -> Self {
+ Self::Minor
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_from_impl_no_element() {
+ cov_mark::check!(test_gen_default_on_non_unit_variant_not_implemented);
+ check_assist_not_applicable(
+ generate_default_from_enum_variant,
+ r#"
+//- minicore: default
+enum Variant {
+ Undefined,
+ Minor(u32)$0,
+ Major,
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_default_from_variant_with_one_variant() {
+ check_assist(
+ generate_default_from_enum_variant,
+ r#"
+//- minicore: default
+enum Variant { Undefi$0ned }
+"#,
+ r#"
+enum Variant { Undefined }
+
+impl Default for Variant {
+ fn default() -> Self {
+ Self::Undefined
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs
new file mode 100644
index 000000000..cbd33de19
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs
@@ -0,0 +1,657 @@
+use ide_db::famous_defs::FamousDefs;
+use itertools::Itertools;
+use stdx::format_to;
+use syntax::{
+ ast::{self, HasGenericParams, HasName, HasTypeBounds, Impl},
+ AstNode,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId,
+};
+
+// Assist: generate_default_from_new
+//
+// Generates default implementation from new method.
+//
+// ```
+// struct Example { _inner: () }
+//
+// impl Example {
+// pub fn n$0ew() -> Self {
+// Self { _inner: () }
+// }
+// }
+// ```
+// ->
+// ```
+// struct Example { _inner: () }
+//
+// impl Example {
+// pub fn new() -> Self {
+// Self { _inner: () }
+// }
+// }
+//
+// impl Default for Example {
+// fn default() -> Self {
+// Self::new()
+// }
+// }
+// ```
+pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let fn_node = ctx.find_node_at_offset::<ast::Fn>()?;
+ let fn_name = fn_node.name()?;
+
+ if fn_name.text() != "new" {
+ cov_mark::hit!(other_function_than_new);
+ return None;
+ }
+
+ if fn_node.param_list()?.params().next().is_some() {
+ cov_mark::hit!(new_function_with_parameters);
+ return None;
+ }
+
+ let impl_ = fn_node.syntax().ancestors().into_iter().find_map(ast::Impl::cast)?;
+ if is_default_implemented(ctx, &impl_) {
+ cov_mark::hit!(default_block_is_already_present);
+ cov_mark::hit!(struct_in_module_with_default);
+ return None;
+ }
+
+ let insert_location = impl_.syntax().text_range();
+
+ acc.add(
+ AssistId("generate_default_from_new", crate::AssistKind::Generate),
+ "Generate a Default impl from a new fn",
+ insert_location,
+ move |builder| {
+ let default_code = " fn default() -> Self {
+ Self::new()
+ }";
+ let code = generate_trait_impl_text_from_impl(&impl_, "Default", default_code);
+ builder.insert(insert_location.end(), code);
+ },
+ )
+}
+
+fn generate_trait_impl_text_from_impl(impl_: &ast::Impl, trait_text: &str, code: &str) -> String {
+ let generic_params = impl_.generic_param_list();
+ let mut buf = String::with_capacity(code.len());
+ buf.push_str("\n\n");
+ buf.push_str("impl");
+
+ if let Some(generic_params) = &generic_params {
+ let lifetimes = generic_params.lifetime_params().map(|lt| format!("{}", lt.syntax()));
+ let toc_params = generic_params.type_or_const_params().map(|toc_param| match toc_param {
+ ast::TypeOrConstParam::Type(type_param) => {
+ let mut buf = String::new();
+ if let Some(it) = type_param.name() {
+ format_to!(buf, "{}", it.syntax());
+ }
+ if let Some(it) = type_param.colon_token() {
+ format_to!(buf, "{} ", it);
+ }
+ if let Some(it) = type_param.type_bound_list() {
+ format_to!(buf, "{}", it.syntax());
+ }
+ buf
+ }
+ ast::TypeOrConstParam::Const(const_param) => const_param.syntax().to_string(),
+ });
+ let generics = lifetimes.chain(toc_params).format(", ");
+ format_to!(buf, "<{}>", generics);
+ }
+
+ buf.push(' ');
+ buf.push_str(trait_text);
+ buf.push_str(" for ");
+ buf.push_str(&impl_.self_ty().unwrap().syntax().text().to_string());
+
+ match impl_.where_clause() {
+ Some(where_clause) => {
+ format_to!(buf, "\n{}\n{{\n{}\n}}", where_clause, code);
+ }
+ None => {
+ format_to!(buf, " {{\n{}\n}}", code);
+ }
+ }
+
+ buf
+}
+
+fn is_default_implemented(ctx: &AssistContext<'_>, impl_: &Impl) -> bool {
+ let db = ctx.sema.db;
+ let impl_ = ctx.sema.to_def(impl_);
+ let impl_def = match impl_ {
+ Some(value) => value,
+ None => return false,
+ };
+
+ let ty = impl_def.self_ty(db);
+ let krate = impl_def.module(db).krate();
+ let default = FamousDefs(&ctx.sema, krate).core_default_Default();
+ let default_trait = match default {
+ Some(value) => value,
+ None => return false,
+ };
+
+ ty.impls_trait(db, default_trait, &[])
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn generate_default() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+struct Example { _inner: () }
+
+impl Example {
+ pub fn ne$0w() -> Self {
+ Self { _inner: () }
+ }
+}
+
+fn main() {}
+"#,
+ r#"
+struct Example { _inner: () }
+
+impl Example {
+ pub fn new() -> Self {
+ Self { _inner: () }
+ }
+}
+
+impl Default for Example {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+fn main() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn generate_default2() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+struct Test { value: u32 }
+
+impl Test {
+ pub fn ne$0w() -> Self {
+ Self { value: 0 }
+ }
+}
+"#,
+ r#"
+struct Test { value: u32 }
+
+impl Test {
+ pub fn new() -> Self {
+ Self { value: 0 }
+ }
+}
+
+impl Default for Test {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn new_function_with_generic() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+pub struct Foo<T> {
+ _bar: *mut T,
+}
+
+impl<T> Foo<T> {
+ pub fn ne$0w() -> Self {
+ unimplemented!()
+ }
+}
+"#,
+ r#"
+pub struct Foo<T> {
+ _bar: *mut T,
+}
+
+impl<T> Foo<T> {
+ pub fn new() -> Self {
+ unimplemented!()
+ }
+}
+
+impl<T> Default for Foo<T> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn new_function_with_generics() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+pub struct Foo<T, B> {
+ _tars: *mut T,
+ _bar: *mut B,
+}
+
+impl<T, B> Foo<T, B> {
+ pub fn ne$0w() -> Self {
+ unimplemented!()
+ }
+}
+"#,
+ r#"
+pub struct Foo<T, B> {
+ _tars: *mut T,
+ _bar: *mut B,
+}
+
+impl<T, B> Foo<T, B> {
+ pub fn new() -> Self {
+ unimplemented!()
+ }
+}
+
+impl<T, B> Default for Foo<T, B> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn new_function_with_generic_and_bound() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+pub struct Foo<T> {
+ t: T,
+}
+
+impl<T: From<i32>> Foo<T> {
+ pub fn ne$0w() -> Self {
+ Foo { t: 0.into() }
+ }
+}
+"#,
+ r#"
+pub struct Foo<T> {
+ t: T,
+}
+
+impl<T: From<i32>> Foo<T> {
+ pub fn new() -> Self {
+ Foo { t: 0.into() }
+ }
+}
+
+impl<T: From<i32>> Default for Foo<T> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn new_function_with_generics_and_bounds() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+pub struct Foo<T, B> {
+ _tars: T,
+ _bar: B,
+}
+
+impl<T: From<i32>, B: From<i64>> Foo<T, B> {
+ pub fn ne$0w() -> Self {
+ unimplemented!()
+ }
+}
+"#,
+ r#"
+pub struct Foo<T, B> {
+ _tars: T,
+ _bar: B,
+}
+
+impl<T: From<i32>, B: From<i64>> Foo<T, B> {
+ pub fn new() -> Self {
+ unimplemented!()
+ }
+}
+
+impl<T: From<i32>, B: From<i64>> Default for Foo<T, B> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn new_function_with_generic_and_where() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+pub struct Foo<T> {
+ t: T,
+}
+
+impl<T: From<i32>> Foo<T>
+where
+ Option<T>: Debug
+{
+ pub fn ne$0w() -> Self {
+ Foo { t: 0.into() }
+ }
+}
+"#,
+ r#"
+pub struct Foo<T> {
+ t: T,
+}
+
+impl<T: From<i32>> Foo<T>
+where
+ Option<T>: Debug
+{
+ pub fn new() -> Self {
+ Foo { t: 0.into() }
+ }
+}
+
+impl<T: From<i32>> Default for Foo<T>
+where
+ Option<T>: Debug
+{
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn new_function_with_generics_and_wheres() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+pub struct Foo<T, B> {
+ _tars: T,
+ _bar: B,
+}
+
+impl<T: From<i32>, B: From<i64>> Foo<T, B>
+where
+ Option<T>: Debug, Option<B>: Debug,
+{
+ pub fn ne$0w() -> Self {
+ unimplemented!()
+ }
+}
+"#,
+ r#"
+pub struct Foo<T, B> {
+ _tars: T,
+ _bar: B,
+}
+
+impl<T: From<i32>, B: From<i64>> Foo<T, B>
+where
+ Option<T>: Debug, Option<B>: Debug,
+{
+ pub fn new() -> Self {
+ unimplemented!()
+ }
+}
+
+impl<T: From<i32>, B: From<i64>> Default for Foo<T, B>
+where
+ Option<T>: Debug, Option<B>: Debug,
+{
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn new_function_with_parameters() {
+ cov_mark::check!(new_function_with_parameters);
+ check_assist_not_applicable(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+struct Example { _inner: () }
+
+impl Example {
+ pub fn $0new(value: ()) -> Self {
+ Self { _inner: value }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn other_function_than_new() {
+ cov_mark::check!(other_function_than_new);
+ check_assist_not_applicable(
+ generate_default_from_new,
+ r#"
+struct Example { _inner: () }
+
+impl Example {
+ pub fn a$0dd() -> Self {
+ Self { _inner: () }
+ }
+}
+
+"#,
+ );
+ }
+
+ #[test]
+ fn default_block_is_already_present() {
+ cov_mark::check!(default_block_is_already_present);
+ check_assist_not_applicable(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+struct Example { _inner: () }
+
+impl Example {
+ pub fn n$0ew() -> Self {
+ Self { _inner: () }
+ }
+}
+
+impl Default for Example {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn standalone_new_function() {
+ check_assist_not_applicable(
+ generate_default_from_new,
+ r#"
+fn n$0ew() -> u32 {
+ 0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn multiple_struct_blocks() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+struct Example { _inner: () }
+struct Test { value: u32 }
+
+impl Example {
+ pub fn new$0() -> Self {
+ Self { _inner: () }
+ }
+}
+"#,
+ r#"
+struct Example { _inner: () }
+struct Test { value: u32 }
+
+impl Example {
+ pub fn new() -> Self {
+ Self { _inner: () }
+ }
+}
+
+impl Default for Example {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn when_struct_is_after_impl() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+impl Example {
+ pub fn $0new() -> Self {
+ Self { _inner: () }
+ }
+}
+
+struct Example { _inner: () }
+"#,
+ r#"
+impl Example {
+ pub fn new() -> Self {
+ Self { _inner: () }
+ }
+}
+
+impl Default for Example {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+struct Example { _inner: () }
+"#,
+ );
+ }
+
+ #[test]
+ fn struct_in_module() {
+ check_assist(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+mod test {
+ struct Example { _inner: () }
+
+ impl Example {
+ pub fn n$0ew() -> Self {
+ Self { _inner: () }
+ }
+ }
+}
+"#,
+ r#"
+mod test {
+ struct Example { _inner: () }
+
+ impl Example {
+ pub fn new() -> Self {
+ Self { _inner: () }
+ }
+ }
+
+impl Default for Example {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn struct_in_module_with_default() {
+ cov_mark::check!(struct_in_module_with_default);
+ check_assist_not_applicable(
+ generate_default_from_new,
+ r#"
+//- minicore: default
+mod test {
+ struct Example { _inner: () }
+
+ impl Example {
+ pub fn n$0ew() -> Self {
+ Self { _inner: () }
+ }
+ }
+
+ impl Default for Example {
+ fn default() -> Self {
+ Self::new()
+ }
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
new file mode 100644
index 000000000..85b193663
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
@@ -0,0 +1,334 @@
+use hir::{self, HasCrate, HasSource, HasVisibility};
+use syntax::ast::{self, make, AstNode, HasGenericParams, HasName, HasVisibility as _};
+
+use crate::{
+ utils::{convert_param_list_to_arg_list, find_struct_impl, render_snippet, Cursor},
+ AssistContext, AssistId, AssistKind, Assists, GroupLabel,
+};
+use syntax::ast::edit::AstNodeEdit;
+
+// Assist: generate_delegate_methods
+//
+// Generate delegate methods.
+//
+// ```
+// struct Age(u8);
+// impl Age {
+// fn age(&self) -> u8 {
+// self.0
+// }
+// }
+//
+// struct Person {
+// ag$0e: Age,
+// }
+// ```
+// ->
+// ```
+// struct Age(u8);
+// impl Age {
+// fn age(&self) -> u8 {
+// self.0
+// }
+// }
+//
+// struct Person {
+// age: Age,
+// }
+//
+// impl Person {
+// $0fn age(&self) -> u8 {
+// self.age.age()
+// }
+// }
+// ```
+pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
+ let strukt_name = strukt.name()?;
+ let current_module = ctx.sema.scope(strukt.syntax())?.module();
+
+ let (field_name, field_ty, target) = match ctx.find_node_at_offset::<ast::RecordField>() {
+ Some(field) => {
+ let field_name = field.name()?;
+ let field_ty = field.ty()?;
+ (format!("{}", field_name), field_ty, field.syntax().text_range())
+ }
+ None => {
+ let field = ctx.find_node_at_offset::<ast::TupleField>()?;
+ let field_list = ctx.find_node_at_offset::<ast::TupleFieldList>()?;
+ let field_list_index = field_list.fields().position(|it| it == field)?;
+ let field_ty = field.ty()?;
+ (format!("{}", field_list_index), field_ty, field.syntax().text_range())
+ }
+ };
+
+ let sema_field_ty = ctx.sema.resolve_type(&field_ty)?;
+ let krate = sema_field_ty.krate(ctx.db());
+ let mut methods = vec![];
+ sema_field_ty.iterate_assoc_items(ctx.db(), krate, |item| {
+ if let hir::AssocItem::Function(f) = item {
+ if f.self_param(ctx.db()).is_some() && f.is_visible_from(ctx.db(), current_module) {
+ methods.push(f)
+ }
+ }
+ Option::<()>::None
+ });
+
+ for method in methods {
+ let adt = ast::Adt::Struct(strukt.clone());
+ let name = method.name(ctx.db()).to_string();
+ let impl_def = find_struct_impl(ctx, &adt, &name).flatten();
+ acc.add_group(
+ &GroupLabel("Generate delegate methods…".to_owned()),
+ AssistId("generate_delegate_methods", AssistKind::Generate),
+ format!("Generate delegate for `{}.{}()`", field_name, method.name(ctx.db())),
+ target,
+ |builder| {
+ // Create the function
+ let method_source = match method.source(ctx.db()) {
+ Some(source) => source.value,
+ None => return,
+ };
+ let method_name = method.name(ctx.db());
+ let vis = method_source.visibility();
+ let name = make::name(&method.name(ctx.db()).to_string());
+ let params =
+ method_source.param_list().unwrap_or_else(|| make::param_list(None, []));
+ let type_params = method_source.generic_param_list();
+ let arg_list = match method_source.param_list() {
+ Some(list) => convert_param_list_to_arg_list(list),
+ None => make::arg_list([]),
+ };
+ let tail_expr = make::expr_method_call(
+ make::ext::field_from_idents(["self", &field_name]).unwrap(), // This unwrap is ok because we have at least 1 arg in the list
+ make::name_ref(&method_name.to_string()),
+ arg_list,
+ );
+ let body = make::block_expr([], Some(tail_expr));
+ let ret_type = method_source.ret_type();
+ let is_async = method_source.async_token().is_some();
+ let f = make::fn_(vis, name, type_params, params, body, ret_type, is_async)
+ .indent(ast::edit::IndentLevel(1))
+ .clone_for_update();
+
+ let cursor = Cursor::Before(f.syntax());
+
+ // Create or update an impl block, attach the function to it,
+ // then insert into our code.
+ match impl_def {
+ Some(impl_def) => {
+ // Remember where in our source our `impl` block lives.
+ let impl_def = impl_def.clone_for_update();
+ let old_range = impl_def.syntax().text_range();
+
+ // Attach the function to the impl block
+ let assoc_items = impl_def.get_or_create_assoc_item_list();
+ assoc_items.add_item(f.clone().into());
+
+ // Update the impl block.
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let snippet = render_snippet(cap, impl_def.syntax(), cursor);
+ builder.replace_snippet(cap, old_range, snippet);
+ }
+ None => {
+ builder.replace(old_range, impl_def.syntax().to_string());
+ }
+ }
+ }
+ None => {
+ // Attach the function to the impl block
+ let name = &strukt_name.to_string();
+ let params = strukt.generic_param_list();
+ let ty_params = params.clone();
+ let impl_def = make::impl_(make::ext::ident_path(name), params, ty_params)
+ .clone_for_update();
+ let assoc_items = impl_def.get_or_create_assoc_item_list();
+ assoc_items.add_item(f.clone().into());
+
+ // Insert the impl block.
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let offset = strukt.syntax().text_range().end();
+ let snippet = render_snippet(cap, impl_def.syntax(), cursor);
+ let snippet = format!("\n\n{}", snippet);
+ builder.insert_snippet(cap, offset, snippet);
+ }
+ None => {
+ let offset = strukt.syntax().text_range().end();
+ let snippet = format!("\n\n{}", impl_def.syntax());
+ builder.insert(offset, snippet);
+ }
+ }
+ }
+ }
+ },
+ )?;
+ }
+ Some(())
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_generate_delegate_create_impl_block() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+
+struct Person {
+ ag$0e: Age,
+}"#,
+ r#"
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+
+struct Person {
+ age: Age,
+}
+
+impl Person {
+ $0fn age(&self) -> u8 {
+ self.age.age()
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_delegate_update_impl_block() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+
+struct Person {
+ ag$0e: Age,
+}
+
+impl Person {}"#,
+ r#"
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+
+struct Person {
+ age: Age,
+}
+
+impl Person {
+ $0fn age(&self) -> u8 {
+ self.age.age()
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_delegate_tuple_struct() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+
+struct Person(A$0ge);"#,
+ r#"
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+
+struct Person(Age);
+
+impl Person {
+ $0fn age(&self) -> u8 {
+ self.0.age()
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_delegate_enable_all_attributes() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+struct Age<T>(T);
+impl<T> Age<T> {
+ pub(crate) async fn age<J, 'a>(&'a mut self, ty: T, arg: J) -> T {
+ self.0
+ }
+}
+
+struct Person<T> {
+ ag$0e: Age<T>,
+}"#,
+ r#"
+struct Age<T>(T);
+impl<T> Age<T> {
+ pub(crate) async fn age<J, 'a>(&'a mut self, ty: T, arg: J) -> T {
+ self.0
+ }
+}
+
+struct Person<T> {
+ age: Age<T>,
+}
+
+impl<T> Person<T> {
+ $0pub(crate) async fn age<J, 'a>(&'a mut self, ty: T, arg: J) -> T {
+ self.age.age(ty, arg)
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_delegate_visibility() {
+ check_assist_not_applicable(
+ generate_delegate_methods,
+ r#"
+mod m {
+ pub struct Age(u8);
+ impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+ }
+}
+
+struct Person {
+ ag$0e: m::Age,
+}"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs
new file mode 100644
index 000000000..b9637ee8d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs
@@ -0,0 +1,343 @@
+use std::fmt::Display;
+
+use hir::{ModPath, ModuleDef};
+use ide_db::{famous_defs::FamousDefs, RootDatabase};
+use syntax::{
+ ast::{self, HasName},
+ AstNode, SyntaxNode,
+};
+
+use crate::{
+ assist_context::{AssistBuilder, AssistContext, Assists},
+ utils::generate_trait_impl_text,
+ AssistId, AssistKind,
+};
+
+// Assist: generate_deref
+//
+// Generate `Deref` impl using the given struct field.
+//
+// ```
+// # //- minicore: deref, deref_mut
+// struct A;
+// struct B {
+// $0a: A
+// }
+// ```
+// ->
+// ```
+// struct A;
+// struct B {
+// a: A
+// }
+//
+// impl core::ops::Deref for B {
+// type Target = A;
+//
+// fn deref(&self) -> &Self::Target {
+// &self.a
+// }
+// }
+// ```
+pub(crate) fn generate_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ generate_record_deref(acc, ctx).or_else(|| generate_tuple_deref(acc, ctx))
+}
+
+fn generate_record_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
+ let field = ctx.find_node_at_offset::<ast::RecordField>()?;
+
+ let deref_type_to_generate = match existing_deref_impl(&ctx.sema, &strukt) {
+ None => DerefType::Deref,
+ Some(DerefType::Deref) => DerefType::DerefMut,
+ Some(DerefType::DerefMut) => {
+ cov_mark::hit!(test_add_record_deref_impl_already_exists);
+ return None;
+ }
+ };
+
+ let module = ctx.sema.to_def(&strukt)?.module(ctx.db());
+ let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?;
+ let trait_path = module.find_use_path(ctx.db(), ModuleDef::Trait(trait_))?;
+
+ let field_type = field.ty()?;
+ let field_name = field.name()?;
+ let target = field.syntax().text_range();
+ acc.add(
+ AssistId("generate_deref", AssistKind::Generate),
+ format!("Generate `{:?}` impl using `{}`", deref_type_to_generate, field_name),
+ target,
+ |edit| {
+ generate_edit(
+ edit,
+ strukt,
+ field_type.syntax(),
+ field_name.syntax(),
+ deref_type_to_generate,
+ trait_path,
+ )
+ },
+ )
+}
+
+fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
+ let field = ctx.find_node_at_offset::<ast::TupleField>()?;
+ let field_list = ctx.find_node_at_offset::<ast::TupleFieldList>()?;
+ let field_list_index =
+ field_list.syntax().children().into_iter().position(|s| &s == field.syntax())?;
+
+ let deref_type_to_generate = match existing_deref_impl(&ctx.sema, &strukt) {
+ None => DerefType::Deref,
+ Some(DerefType::Deref) => DerefType::DerefMut,
+ Some(DerefType::DerefMut) => {
+ cov_mark::hit!(test_add_field_deref_impl_already_exists);
+ return None;
+ }
+ };
+
+ let module = ctx.sema.to_def(&strukt)?.module(ctx.db());
+ let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?;
+ let trait_path = module.find_use_path(ctx.db(), ModuleDef::Trait(trait_))?;
+
+ let field_type = field.ty()?;
+ let target = field.syntax().text_range();
+ acc.add(
+ AssistId("generate_deref", AssistKind::Generate),
+ format!("Generate `{:?}` impl using `{}`", deref_type_to_generate, field.syntax()),
+ target,
+ |edit| {
+ generate_edit(
+ edit,
+ strukt,
+ field_type.syntax(),
+ field_list_index,
+ deref_type_to_generate,
+ trait_path,
+ )
+ },
+ )
+}
+
+fn generate_edit(
+ edit: &mut AssistBuilder,
+ strukt: ast::Struct,
+ field_type_syntax: &SyntaxNode,
+ field_name: impl Display,
+ deref_type: DerefType,
+ trait_path: ModPath,
+) {
+ let start_offset = strukt.syntax().text_range().end();
+ let impl_code = match deref_type {
+ DerefType::Deref => format!(
+ r#" type Target = {0};
+
+ fn deref(&self) -> &Self::Target {{
+ &self.{1}
+ }}"#,
+ field_type_syntax, field_name
+ ),
+ DerefType::DerefMut => format!(
+ r#" fn deref_mut(&mut self) -> &mut Self::Target {{
+ &mut self.{}
+ }}"#,
+ field_name
+ ),
+ };
+ let strukt_adt = ast::Adt::Struct(strukt);
+ let deref_impl = generate_trait_impl_text(&strukt_adt, &trait_path.to_string(), &impl_code);
+ edit.insert(start_offset, deref_impl);
+}
+
+fn existing_deref_impl(
+ sema: &hir::Semantics<'_, RootDatabase>,
+ strukt: &ast::Struct,
+) -> Option<DerefType> {
+ let strukt = sema.to_def(strukt)?;
+ let krate = strukt.module(sema.db).krate();
+
+ let deref_trait = FamousDefs(sema, krate).core_ops_Deref()?;
+ let deref_mut_trait = FamousDefs(sema, krate).core_ops_DerefMut()?;
+ let strukt_type = strukt.ty(sema.db);
+
+ if strukt_type.impls_trait(sema.db, deref_trait, &[]) {
+ if strukt_type.impls_trait(sema.db, deref_mut_trait, &[]) {
+ Some(DerefType::DerefMut)
+ } else {
+ Some(DerefType::Deref)
+ }
+ } else {
+ None
+ }
+}
+
+#[derive(Debug)]
+enum DerefType {
+ Deref,
+ DerefMut,
+}
+
+impl DerefType {
+ fn to_trait(
+ &self,
+ sema: &hir::Semantics<'_, RootDatabase>,
+ krate: hir::Crate,
+ ) -> Option<hir::Trait> {
+ match self {
+ DerefType::Deref => FamousDefs(sema, krate).core_ops_Deref(),
+ DerefType::DerefMut => FamousDefs(sema, krate).core_ops_DerefMut(),
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_generate_record_deref() {
+ check_assist(
+ generate_deref,
+ r#"
+//- minicore: deref
+struct A { }
+struct B { $0a: A }"#,
+ r#"
+struct A { }
+struct B { a: A }
+
+impl core::ops::Deref for B {
+ type Target = A;
+
+ fn deref(&self) -> &Self::Target {
+ &self.a
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_record_deref_short_path() {
+ check_assist(
+ generate_deref,
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+struct A { }
+struct B { $0a: A }"#,
+ r#"
+use core::ops::Deref;
+struct A { }
+struct B { a: A }
+
+impl Deref for B {
+ type Target = A;
+
+ fn deref(&self) -> &Self::Target {
+ &self.a
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_field_deref_idx_0() {
+ check_assist(
+ generate_deref,
+ r#"
+//- minicore: deref
+struct A { }
+struct B($0A);"#,
+ r#"
+struct A { }
+struct B(A);
+
+impl core::ops::Deref for B {
+ type Target = A;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}"#,
+ );
+ }
+ #[test]
+ fn test_generate_field_deref_idx_1() {
+ check_assist(
+ generate_deref,
+ r#"
+//- minicore: deref
+struct A { }
+struct B(u8, $0A);"#,
+ r#"
+struct A { }
+struct B(u8, A);
+
+impl core::ops::Deref for B {
+ type Target = A;
+
+ fn deref(&self) -> &Self::Target {
+ &self.1
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generates_derefmut_when_deref_present() {
+ check_assist(
+ generate_deref,
+ r#"
+//- minicore: deref, deref_mut
+struct B { $0a: u8 }
+
+impl core::ops::Deref for B {}
+"#,
+ r#"
+struct B { a: u8 }
+
+impl core::ops::DerefMut for B {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.a
+ }
+}
+
+impl core::ops::Deref for B {}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_record_deref_not_applicable_if_already_impl() {
+ cov_mark::check!(test_add_record_deref_impl_already_exists);
+ check_assist_not_applicable(
+ generate_deref,
+ r#"
+//- minicore: deref, deref_mut
+struct A { }
+struct B { $0a: A }
+
+impl core::ops::Deref for B {}
+impl core::ops::DerefMut for B {}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_generate_field_deref_not_applicable_if_already_impl() {
+ cov_mark::check!(test_add_field_deref_impl_already_exists);
+ check_assist_not_applicable(
+ generate_deref,
+ r#"
+//- minicore: deref, deref_mut
+struct A { }
+struct B($0A)
+
+impl core::ops::Deref for B {}
+impl core::ops::DerefMut for B {}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs
new file mode 100644
index 000000000..339245b94
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs
@@ -0,0 +1,132 @@
+use syntax::{
+ ast::{self, AstNode, HasAttrs},
+ SyntaxKind::{COMMENT, WHITESPACE},
+ TextSize,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: generate_derive
+//
+// Adds a new `#[derive()]` clause to a struct or enum.
+//
+// ```
+// struct Point {
+// x: u32,
+// y: u32,$0
+// }
+// ```
+// ->
+// ```
+// #[derive($0)]
+// struct Point {
+// x: u32,
+// y: u32,
+// }
+// ```
+pub(crate) fn generate_derive(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let cap = ctx.config.snippet_cap?;
+ let nominal = ctx.find_node_at_offset::<ast::Adt>()?;
+ let node_start = derive_insertion_offset(&nominal)?;
+ let target = nominal.syntax().text_range();
+ acc.add(
+ AssistId("generate_derive", AssistKind::Generate),
+ "Add `#[derive]`",
+ target,
+ |builder| {
+ let derive_attr = nominal
+ .attrs()
+ .filter_map(|x| x.as_simple_call())
+ .filter(|(name, _arg)| name == "derive")
+ .map(|(_name, arg)| arg)
+ .next();
+ match derive_attr {
+ None => {
+ builder.insert_snippet(cap, node_start, "#[derive($0)]\n");
+ }
+ Some(tt) => {
+ // Just move the cursor.
+ builder.insert_snippet(
+ cap,
+ tt.syntax().text_range().end() - TextSize::of(')'),
+ "$0",
+ )
+ }
+ };
+ },
+ )
+}
+
+// Insert `derive` after doc comments.
+fn derive_insertion_offset(nominal: &ast::Adt) -> Option<TextSize> {
+ let non_ws_child = nominal
+ .syntax()
+ .children_with_tokens()
+ .find(|it| it.kind() != COMMENT && it.kind() != WHITESPACE)?;
+ Some(non_ws_child.text_range().start())
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn add_derive_new() {
+ check_assist(
+ generate_derive,
+ "struct Foo { a: i32, $0}",
+ "#[derive($0)]\nstruct Foo { a: i32, }",
+ );
+ check_assist(
+ generate_derive,
+ "struct Foo { $0 a: i32, }",
+ "#[derive($0)]\nstruct Foo { a: i32, }",
+ );
+ }
+
+ #[test]
+ fn add_derive_existing() {
+ check_assist(
+ generate_derive,
+ "#[derive(Clone)]\nstruct Foo { a: i32$0, }",
+ "#[derive(Clone$0)]\nstruct Foo { a: i32, }",
+ );
+ }
+
+ #[test]
+ fn add_derive_new_with_doc_comment() {
+ check_assist(
+ generate_derive,
+ "
+/// `Foo` is a pretty important struct.
+/// It does stuff.
+struct Foo { a: i32$0, }
+ ",
+ "
+/// `Foo` is a pretty important struct.
+/// It does stuff.
+#[derive($0)]
+struct Foo { a: i32, }
+ ",
+ );
+ }
+
+ #[test]
+ fn add_derive_target() {
+ check_assist_target(
+ generate_derive,
+ "
+struct SomeThingIrrelevant;
+/// `Foo` is a pretty important struct.
+/// It does stuff.
+struct Foo { a: i32$0, }
+struct EvenMoreIrrelevant;
+ ",
+ "/// `Foo` is a pretty important struct.
+/// It does stuff.
+struct Foo { a: i32, }",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs
new file mode 100644
index 000000000..c91141f8e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs
@@ -0,0 +1,1328 @@
+use hir::{AsAssocItem, HasVisibility, ModuleDef, Visibility};
+use ide_db::assists::{AssistId, AssistKind};
+use itertools::Itertools;
+use stdx::{format_to, to_lower_snake_case};
+use syntax::{
+ algo::skip_whitespace_token,
+ ast::{self, edit::IndentLevel, HasDocComments, HasName},
+ match_ast, AstNode, AstToken,
+};
+
+use crate::assist_context::{AssistContext, Assists};
+
+// Assist: generate_documentation_template
+//
+// Adds a documentation template above a function definition / declaration.
+//
+// ```
+// pub struct S;
+// impl S {
+// pub unsafe fn set_len$0(&mut self, len: usize) -> Result<(), std::io::Error> {
+// /* ... */
+// }
+// }
+// ```
+// ->
+// ```
+// pub struct S;
+// impl S {
+// /// Sets the length of this [`S`].
+// ///
+// /// # Errors
+// ///
+// /// This function will return an error if .
+// ///
+// /// # Safety
+// ///
+// /// .
+// pub unsafe fn set_len(&mut self, len: usize) -> Result<(), std::io::Error> {
+// /* ... */
+// }
+// }
+// ```
+pub(crate) fn generate_documentation_template(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let name = ctx.find_node_at_offset::<ast::Name>()?;
+ let ast_func = name.syntax().parent().and_then(ast::Fn::cast)?;
+ if is_in_trait_impl(&ast_func, ctx) || ast_func.doc_comments().next().is_some() {
+ return None;
+ }
+
+ let parent_syntax = ast_func.syntax();
+ let text_range = parent_syntax.text_range();
+ let indent_level = IndentLevel::from_node(parent_syntax);
+
+ acc.add(
+ AssistId("generate_documentation_template", AssistKind::Generate),
+ "Generate a documentation template",
+ text_range,
+ |builder| {
+ // Introduction / short function description before the sections
+ let mut doc_lines = vec![introduction_builder(&ast_func, ctx).unwrap_or(".".into())];
+ // Then come the sections
+ for section_builder in [panics_builder, errors_builder, safety_builder] {
+ if let Some(mut lines) = section_builder(&ast_func) {
+ doc_lines.push("".into());
+ doc_lines.append(&mut lines);
+ }
+ }
+ builder.insert(text_range.start(), documentation_from_lines(doc_lines, indent_level));
+ },
+ )
+}
+
+// Assist: generate_doc_example
+//
+// Generates a rustdoc example when editing an item's documentation.
+//
+// ```
+// /// Adds two numbers.$0
+// pub fn add(a: i32, b: i32) -> i32 { a + b }
+// ```
+// ->
+// ```
+// /// Adds two numbers.
+// ///
+// /// # Examples
+// ///
+// /// ```
+// /// use test::add;
+// ///
+// /// assert_eq!(add(a, b), );
+// /// ```
+// pub fn add(a: i32, b: i32) -> i32 { a + b }
+// ```
+pub(crate) fn generate_doc_example(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let tok: ast::Comment = ctx.find_token_at_offset()?;
+ let node = tok.syntax().parent()?;
+ let last_doc_token =
+ ast::AnyHasDocComments::cast(node.clone())?.doc_comments().last()?.syntax().clone();
+ let next_token = skip_whitespace_token(last_doc_token.next_token()?, syntax::Direction::Next)?;
+
+ let example = match_ast! {
+ match node {
+ ast::Fn(it) => make_example_for_fn(&it, ctx)?,
+ _ => return None,
+ }
+ };
+
+ let mut lines = string_vec_from(&["", "# Examples", "", "```"]);
+ lines.extend(example.lines().map(String::from));
+ lines.push("```".into());
+ let indent_level = IndentLevel::from_node(&node);
+
+ acc.add(
+ AssistId("generate_doc_example", AssistKind::Generate),
+ "Generate a documentation example",
+ node.text_range(),
+ |builder| {
+ builder.insert(
+ next_token.text_range().start(),
+ documentation_from_lines(lines, indent_level),
+ );
+ },
+ )
+}
+
+fn make_example_for_fn(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<String> {
+ if !is_public(ast_func, ctx)? {
+ // Doctests for private items can't actually name the item, so they're pretty useless.
+ return None;
+ }
+
+ if is_in_trait_def(ast_func, ctx) {
+ // This is not yet implemented.
+ return None;
+ }
+
+ let mut example = String::new();
+
+ let is_unsafe = ast_func.unsafe_token().is_some();
+ let param_list = ast_func.param_list()?;
+ let ref_mut_params = ref_mut_params(&param_list);
+ let self_name = self_name(ast_func);
+
+ format_to!(example, "use {};\n\n", build_path(ast_func, ctx)?);
+ if let Some(self_name) = &self_name {
+ if let Some(mtbl) = is_ref_mut_self(ast_func) {
+ let mtbl = if mtbl == true { " mut" } else { "" };
+ format_to!(example, "let{} {} = ;\n", mtbl, self_name);
+ }
+ }
+ for param_name in &ref_mut_params {
+ format_to!(example, "let mut {} = ;\n", param_name);
+ }
+ // Call the function, check result
+ let function_call = function_call(ast_func, &param_list, self_name.as_deref(), is_unsafe)?;
+ if returns_a_value(ast_func, ctx) {
+ if count_parameters(&param_list) < 3 {
+ format_to!(example, "assert_eq!({}, );\n", function_call);
+ } else {
+ format_to!(example, "let result = {};\n", function_call);
+ example.push_str("assert_eq!(result, );\n");
+ }
+ } else {
+ format_to!(example, "{};\n", function_call);
+ }
+ // Check the mutated values
+ if is_ref_mut_self(ast_func) == Some(true) {
+ format_to!(example, "assert_eq!({}, );", self_name?);
+ }
+ for param_name in &ref_mut_params {
+ format_to!(example, "assert_eq!({}, );", param_name);
+ }
+ Some(example)
+}
+
+fn introduction_builder(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<String> {
+ let hir_func = ctx.sema.to_def(ast_func)?;
+ let container = hir_func.as_assoc_item(ctx.db())?.container(ctx.db());
+ if let hir::AssocItemContainer::Impl(imp) = container {
+ let ret_ty = hir_func.ret_type(ctx.db());
+ let self_ty = imp.self_ty(ctx.db());
+ let name = ast_func.name()?.to_string();
+ let linkable_self_ty = self_type_without_lifetimes(ast_func);
+ let linkable_self_ty = linkable_self_ty.as_deref();
+
+ let intro_for_new = || {
+ let is_new = name == "new";
+ if is_new && ret_ty == self_ty {
+ Some(format!("Creates a new [`{}`].", linkable_self_ty?))
+ } else {
+ None
+ }
+ };
+
+ let intro_for_getter = || match (
+ hir_func.self_param(ctx.sema.db),
+ &*hir_func.params_without_self(ctx.sema.db),
+ ) {
+ (Some(self_param), []) if self_param.access(ctx.sema.db) != hir::Access::Owned => {
+ if name.starts_with("as_") || name.starts_with("to_") || name == "get" {
+ return None;
+ }
+ let mut what = name.trim_end_matches("_mut").replace('_', " ");
+ if what == "len" {
+ what = "length".into()
+ }
+ let reference = if ret_ty.is_mutable_reference() {
+ " a mutable reference to"
+ } else if ret_ty.is_reference() {
+ " a reference to"
+ } else {
+ ""
+ };
+ Some(format!("Returns{reference} the {what} of this [`{}`].", linkable_self_ty?))
+ }
+ _ => None,
+ };
+
+ let intro_for_setter = || {
+ if !name.starts_with("set_") {
+ return None;
+ }
+
+ let mut what = name.trim_start_matches("set_").replace('_', " ");
+ if what == "len" {
+ what = "length".into()
+ };
+ Some(format!("Sets the {what} of this [`{}`].", linkable_self_ty?))
+ };
+
+ if let Some(intro) = intro_for_new() {
+ return Some(intro);
+ }
+ if let Some(intro) = intro_for_getter() {
+ return Some(intro);
+ }
+ if let Some(intro) = intro_for_setter() {
+ return Some(intro);
+ }
+ }
+ None
+}
+
+/// Builds an optional `# Panics` section
+fn panics_builder(ast_func: &ast::Fn) -> Option<Vec<String>> {
+ match can_panic(ast_func) {
+ Some(true) => Some(string_vec_from(&["# Panics", "", "Panics if ."])),
+ _ => None,
+ }
+}
+
+/// Builds an optional `# Errors` section
+fn errors_builder(ast_func: &ast::Fn) -> Option<Vec<String>> {
+ match return_type(ast_func)?.to_string().contains("Result") {
+ true => Some(string_vec_from(&["# Errors", "", "This function will return an error if ."])),
+ false => None,
+ }
+}
+
+/// Builds an optional `# Safety` section
+fn safety_builder(ast_func: &ast::Fn) -> Option<Vec<String>> {
+ let is_unsafe = ast_func.unsafe_token().is_some();
+ match is_unsafe {
+ true => Some(string_vec_from(&["# Safety", "", "."])),
+ false => None,
+ }
+}
+
+/// Checks if the function is public / exported
+fn is_public(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<bool> {
+ let hir_func = ctx.sema.to_def(ast_func)?;
+ Some(
+ hir_func.visibility(ctx.db()) == Visibility::Public
+ && all_parent_mods_public(&hir_func, ctx),
+ )
+}
+
+/// Checks that all parent modules of the function are public / exported
+fn all_parent_mods_public(hir_func: &hir::Function, ctx: &AssistContext<'_>) -> bool {
+ let mut module = hir_func.module(ctx.db());
+ loop {
+ if let Some(parent) = module.parent(ctx.db()) {
+ match ModuleDef::from(module).visibility(ctx.db()) {
+ Visibility::Public => module = parent,
+ _ => break false,
+ }
+ } else {
+ break true;
+ }
+ }
+}
+
+/// Returns the name of the current crate
+fn crate_name(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<String> {
+ let krate = ctx.sema.scope(ast_func.syntax())?.krate();
+ Some(krate.display_name(ctx.db())?.to_string())
+}
+
+/// `None` if function without a body; some bool to guess if function can panic
+fn can_panic(ast_func: &ast::Fn) -> Option<bool> {
+ let body = ast_func.body()?.to_string();
+ let can_panic = body.contains("panic!(")
+ // FIXME it would be better to not match `debug_assert*!` macro invocations
+ || body.contains("assert!(")
+ || body.contains(".unwrap()")
+ || body.contains(".expect(");
+ Some(can_panic)
+}
+
+/// Helper function to get the name that should be given to `self` arguments
+fn self_name(ast_func: &ast::Fn) -> Option<String> {
+ self_partial_type(ast_func).map(|name| to_lower_snake_case(&name))
+}
+
+/// Heper function to get the name of the type of `self`
+fn self_type(ast_func: &ast::Fn) -> Option<ast::Type> {
+ ast_func.syntax().ancestors().find_map(ast::Impl::cast).and_then(|i| i.self_ty())
+}
+
+/// Output the real name of `Self` like `MyType<T>`, without the lifetimes.
+fn self_type_without_lifetimes(ast_func: &ast::Fn) -> Option<String> {
+ let path_segment = match self_type(ast_func)? {
+ ast::Type::PathType(path_type) => path_type.path()?.segment()?,
+ _ => return None,
+ };
+ let mut name = path_segment.name_ref()?.to_string();
+ let generics = path_segment.generic_arg_list().into_iter().flat_map(|list| {
+ list.generic_args()
+ .filter(|generic| matches!(generic, ast::GenericArg::TypeArg(_)))
+ .map(|generic| generic.to_string())
+ });
+ let generics: String = generics.format(", ").to_string();
+ if !generics.is_empty() {
+ name.push('<');
+ name.push_str(&generics);
+ name.push('>');
+ }
+ Some(name)
+}
+
+/// Heper function to get the name of the type of `self` without generic arguments
+fn self_partial_type(ast_func: &ast::Fn) -> Option<String> {
+ let mut self_type = self_type(ast_func)?.to_string();
+ if let Some(idx) = self_type.find(|c| ['<', ' '].contains(&c)) {
+ self_type.truncate(idx);
+ }
+ Some(self_type)
+}
+
+/// Helper function to determine if the function is in a trait implementation
+fn is_in_trait_impl(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> bool {
+ ctx.sema
+ .to_def(ast_func)
+ .and_then(|hir_func| hir_func.as_assoc_item(ctx.db()))
+ .and_then(|assoc_item| assoc_item.containing_trait_impl(ctx.db()))
+ .is_some()
+}
+
+/// Helper function to determine if the function definition is in a trait definition
+fn is_in_trait_def(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> bool {
+ ctx.sema
+ .to_def(ast_func)
+ .and_then(|hir_func| hir_func.as_assoc_item(ctx.db()))
+ .and_then(|assoc_item| assoc_item.containing_trait(ctx.db()))
+ .is_some()
+}
+
+/// Returns `None` if no `self` at all, `Some(true)` if there is `&mut self` else `Some(false)`
+fn is_ref_mut_self(ast_func: &ast::Fn) -> Option<bool> {
+ let self_param = ast_func.param_list()?.self_param()?;
+ Some(self_param.mut_token().is_some() && self_param.amp_token().is_some())
+}
+
+/// Helper function to determine if a parameter is `&mut`
+fn is_a_ref_mut_param(param: &ast::Param) -> bool {
+ match param.ty() {
+ Some(ast::Type::RefType(param_ref)) => param_ref.mut_token().is_some(),
+ _ => false,
+ }
+}
+
+/// Helper function to build the list of `&mut` parameters
+fn ref_mut_params(param_list: &ast::ParamList) -> Vec<String> {
+ param_list
+ .params()
+ .filter_map(|param| match is_a_ref_mut_param(&param) {
+ // Maybe better filter the param name (to do this maybe extract a function from
+ // `arguments_from_params`?) in case of a `mut a: &mut T`. Anyway managing most (not
+ // all) cases might be enough, the goal is just to produce a template.
+ true => Some(param.pat()?.to_string()),
+ false => None,
+ })
+ .collect()
+}
+
+/// Helper function to build the comma-separated list of arguments of the function
+fn arguments_from_params(param_list: &ast::ParamList) -> String {
+ let args_iter = param_list.params().map(|param| match param.pat() {
+ // To avoid `mut` in the function call (which would be a nonsense), `Pat` should not be
+ // written as is so its variants must be managed independently. Other variants (for
+ // instance `TuplePat`) could be managed later.
+ Some(ast::Pat::IdentPat(ident_pat)) => match ident_pat.name() {
+ Some(name) => match is_a_ref_mut_param(&param) {
+ true => format!("&mut {}", name),
+ false => name.to_string(),
+ },
+ None => "_".to_string(),
+ },
+ _ => "_".to_string(),
+ });
+ args_iter.format(", ").to_string()
+}
+
+/// Helper function to build a function call. `None` if expected `self_name` was not provided
+fn function_call(
+ ast_func: &ast::Fn,
+ param_list: &ast::ParamList,
+ self_name: Option<&str>,
+ is_unsafe: bool,
+) -> Option<String> {
+ let name = ast_func.name()?;
+ let arguments = arguments_from_params(param_list);
+ let function_call = if param_list.self_param().is_some() {
+ format!("{}.{}({})", self_name?, name, arguments)
+ } else if let Some(implementation) = self_partial_type(ast_func) {
+ format!("{}::{}({})", implementation, name, arguments)
+ } else {
+ format!("{}({})", name, arguments)
+ };
+ match is_unsafe {
+ true => Some(format!("unsafe {{ {} }}", function_call)),
+ false => Some(function_call),
+ }
+}
+
+/// Helper function to count the parameters including `self`
+fn count_parameters(param_list: &ast::ParamList) -> usize {
+ param_list.params().count() + if param_list.self_param().is_some() { 1 } else { 0 }
+}
+
+/// Helper function to transform lines of documentation into a Rust code documentation
+fn documentation_from_lines(doc_lines: Vec<String>, indent_level: IndentLevel) -> String {
+ let mut result = String::new();
+ for doc_line in doc_lines {
+ result.push_str("///");
+ if !doc_line.is_empty() {
+ result.push(' ');
+ result.push_str(&doc_line);
+ }
+ result.push('\n');
+ result.push_str(&indent_level.to_string());
+ }
+ result
+}
+
+/// Helper function to transform an array of borrowed strings to an owned `Vec<String>`
+fn string_vec_from(string_array: &[&str]) -> Vec<String> {
+ string_array.iter().map(|&s| s.to_owned()).collect()
+}
+
+/// Helper function to build the path of the module in the which is the node
+fn build_path(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<String> {
+ let crate_name = crate_name(ast_func, ctx)?;
+ let leaf = self_partial_type(ast_func)
+ .or_else(|| ast_func.name().map(|n| n.to_string()))
+ .unwrap_or_else(|| "*".into());
+ let module_def: ModuleDef = ctx.sema.to_def(ast_func)?.module(ctx.db()).into();
+ match module_def.canonical_path(ctx.db()) {
+ Some(path) => Some(format!("{}::{}::{}", crate_name, path, leaf)),
+ None => Some(format!("{}::{}", crate_name, leaf)),
+ }
+}
+
+/// Helper function to get the return type of a function
+fn return_type(ast_func: &ast::Fn) -> Option<ast::Type> {
+ ast_func.ret_type()?.ty()
+}
+
+/// Helper function to determine if the function returns some data
+fn returns_a_value(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> bool {
+ ctx.sema
+ .to_def(ast_func)
+ .map(|hir_func| hir_func.ret_type(ctx.db()))
+ .map(|ret_ty| !ret_ty.is_unit() && !ret_ty.is_never())
+ .unwrap_or(false)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn not_applicable_on_function_calls() {
+ check_assist_not_applicable(
+ generate_documentation_template,
+ r#"
+fn hello_world() {}
+fn calls_hello_world() {
+ hello_world$0();
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_in_trait_impl() {
+ check_assist_not_applicable(
+ generate_documentation_template,
+ r#"
+trait MyTrait {}
+struct MyStruct;
+impl MyTrait for MyStruct {
+ fn hello_world$0();
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_function_already_documented() {
+ check_assist_not_applicable(
+ generate_documentation_template,
+ r#"
+/// Some documentation here
+pub fn $0documented_function() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn supports_noop_function() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub fn no$0op() {}
+"#,
+ r#"
+/// .
+pub fn noop() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn is_applicable_if_function_is_private() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+fn priv$0ate() {}
+"#,
+ r#"
+/// .
+fn private() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_doc_example_for_private_fn() {
+ check_assist_not_applicable(
+ generate_doc_example,
+ r#"
+///$0
+fn private() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn supports_a_parameter() {
+ check_assist(
+ generate_doc_example,
+ r#"
+/// $0.
+pub fn noop_with_param(_a: i32) {}
+"#,
+ r#"
+/// .
+///
+/// # Examples
+///
+/// ```
+/// use test::noop_with_param;
+///
+/// noop_with_param(_a);
+/// ```
+pub fn noop_with_param(_a: i32) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn detects_unsafe_function() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub unsafe fn no$0op_unsafe() {}
+"#,
+ r#"
+/// .
+///
+/// # Safety
+///
+/// .
+pub unsafe fn noop_unsafe() {}
+"#,
+ );
+ check_assist(
+ generate_doc_example,
+ r#"
+/// .
+///
+/// # Safety$0
+///
+/// .
+pub unsafe fn noop_unsafe() {}
+"#,
+ r#"
+/// .
+///
+/// # Safety
+///
+/// .
+///
+/// # Examples
+///
+/// ```
+/// use test::noop_unsafe;
+///
+/// unsafe { noop_unsafe() };
+/// ```
+pub unsafe fn noop_unsafe() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn guesses_panic_macro_can_panic() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub fn panic$0s_if(a: bool) {
+ if a {
+ panic!();
+ }
+}
+"#,
+ r#"
+/// .
+///
+/// # Panics
+///
+/// Panics if .
+pub fn panics_if(a: bool) {
+ if a {
+ panic!();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn guesses_assert_macro_can_panic() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub fn $0panics_if_not(a: bool) {
+ assert!(a == true);
+}
+"#,
+ r#"
+/// .
+///
+/// # Panics
+///
+/// Panics if .
+pub fn panics_if_not(a: bool) {
+ assert!(a == true);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn guesses_unwrap_can_panic() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub fn $0panics_if_none(a: Option<()>) {
+ a.unwrap();
+}
+"#,
+ r#"
+/// .
+///
+/// # Panics
+///
+/// Panics if .
+pub fn panics_if_none(a: Option<()>) {
+ a.unwrap();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn guesses_expect_can_panic() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub fn $0panics_if_none2(a: Option<()>) {
+ a.expect("Bouh!");
+}
+"#,
+ r#"
+/// .
+///
+/// # Panics
+///
+/// Panics if .
+pub fn panics_if_none2(a: Option<()>) {
+ a.expect("Bouh!");
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn checks_output_in_example() {
+ check_assist(
+ generate_doc_example,
+ r#"
+///$0
+pub fn returns_a_value$0() -> i32 {
+ 0
+}
+"#,
+ r#"
+///
+///
+/// # Examples
+///
+/// ```
+/// use test::returns_a_value;
+///
+/// assert_eq!(returns_a_value(), );
+/// ```
+pub fn returns_a_value() -> i32 {
+ 0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn detects_result_output() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub fn returns_a_result$0() -> Result<i32, std::io::Error> {
+ Ok(0)
+}
+"#,
+ r#"
+/// .
+///
+/// # Errors
+///
+/// This function will return an error if .
+pub fn returns_a_result() -> Result<i32, std::io::Error> {
+ Ok(0)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn checks_ref_mut_in_example() {
+ check_assist(
+ generate_doc_example,
+ r#"
+///$0
+pub fn modifies_a_value$0(a: &mut i32) {
+ *a = 0;
+}
+"#,
+ r#"
+///
+///
+/// # Examples
+///
+/// ```
+/// use test::modifies_a_value;
+///
+/// let mut a = ;
+/// modifies_a_value(&mut a);
+/// assert_eq!(a, );
+/// ```
+pub fn modifies_a_value(a: &mut i32) {
+ *a = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn stores_result_if_at_least_3_params() {
+ check_assist(
+ generate_doc_example,
+ r#"
+///$0
+pub fn sum3$0(a: i32, b: i32, c: i32) -> i32 {
+ a + b + c
+}
+"#,
+ r#"
+///
+///
+/// # Examples
+///
+/// ```
+/// use test::sum3;
+///
+/// let result = sum3(a, b, c);
+/// assert_eq!(result, );
+/// ```
+pub fn sum3(a: i32, b: i32, c: i32) -> i32 {
+ a + b + c
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn supports_fn_in_mods() {
+ check_assist(
+ generate_doc_example,
+ r#"
+pub mod a {
+ pub mod b {
+ ///$0
+ pub fn noop() {}
+ }
+}
+"#,
+ r#"
+pub mod a {
+ pub mod b {
+ ///
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use test::a::b::noop;
+ ///
+ /// noop();
+ /// ```
+ pub fn noop() {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn supports_fn_in_impl() {
+ check_assist(
+ generate_doc_example,
+ r#"
+pub struct MyStruct;
+impl MyStruct {
+ ///$0
+ pub fn noop() {}
+}
+"#,
+ r#"
+pub struct MyStruct;
+impl MyStruct {
+ ///
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use test::MyStruct;
+ ///
+ /// MyStruct::noop();
+ /// ```
+ pub fn noop() {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn supports_unsafe_fn_in_trait() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub trait MyTrait {
+ unsafe fn unsafe_funct$0ion_trait();
+}
+"#,
+ r#"
+pub trait MyTrait {
+ /// .
+ ///
+ /// # Safety
+ ///
+ /// .
+ unsafe fn unsafe_function_trait();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn supports_fn_in_trait_with_default_panicking() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub trait MyTrait {
+ fn function_trait_with_$0default_panicking() {
+ panic!()
+ }
+}
+"#,
+ r#"
+pub trait MyTrait {
+ /// .
+ ///
+ /// # Panics
+ ///
+ /// Panics if .
+ fn function_trait_with_default_panicking() {
+ panic!()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn supports_fn_in_trait_returning_result() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub trait MyTrait {
+ fn function_tr$0ait_returning_result() -> Result<(), std::io::Error>;
+}
+"#,
+ r#"
+pub trait MyTrait {
+ /// .
+ ///
+ /// # Errors
+ ///
+ /// This function will return an error if .
+ fn function_trait_returning_result() -> Result<(), std::io::Error>;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn detects_new() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub struct String(u8);
+impl String {
+ pub fn new$0(x: u8) -> String {
+ String(x)
+ }
+}
+"#,
+ r#"
+pub struct String(u8);
+impl String {
+ /// Creates a new [`String`].
+ pub fn new(x: u8) -> String {
+ String(x)
+ }
+}
+"#,
+ );
+ check_assist(
+ generate_documentation_template,
+ r#"
+#[derive(Debug, PartialEq)]
+pub struct MyGenericStruct<T> {
+ pub x: T,
+}
+impl<T> MyGenericStruct<T> {
+ pub fn new$0(x: T) -> MyGenericStruct<T> {
+ MyGenericStruct { x }
+ }
+}
+"#,
+ r#"
+#[derive(Debug, PartialEq)]
+pub struct MyGenericStruct<T> {
+ pub x: T,
+}
+impl<T> MyGenericStruct<T> {
+ /// Creates a new [`MyGenericStruct<T>`].
+ pub fn new(x: T) -> MyGenericStruct<T> {
+ MyGenericStruct { x }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn removes_one_lifetime_from_description() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+#[derive(Debug, PartialEq)]
+pub struct MyGenericStruct<'a, T> {
+ pub x: &'a T,
+}
+impl<'a, T> MyGenericStruct<'a, T> {
+ pub fn new$0(x: &'a T) -> Self {
+ MyGenericStruct { x }
+ }
+}
+"#,
+ r#"
+#[derive(Debug, PartialEq)]
+pub struct MyGenericStruct<'a, T> {
+ pub x: &'a T,
+}
+impl<'a, T> MyGenericStruct<'a, T> {
+ /// Creates a new [`MyGenericStruct<T>`].
+ pub fn new(x: &'a T) -> Self {
+ MyGenericStruct { x }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn removes_all_lifetimes_from_description() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+#[derive(Debug, PartialEq)]
+pub struct MyGenericStruct<'a, 'b, T> {
+ pub x: &'a T,
+ pub y: &'b T,
+}
+impl<'a, 'b, T> MyGenericStruct<'a, 'b, T> {
+ pub fn new$0(x: &'a T, y: &'b T) -> Self {
+ MyGenericStruct { x, y }
+ }
+}
+"#,
+ r#"
+#[derive(Debug, PartialEq)]
+pub struct MyGenericStruct<'a, 'b, T> {
+ pub x: &'a T,
+ pub y: &'b T,
+}
+impl<'a, 'b, T> MyGenericStruct<'a, 'b, T> {
+ /// Creates a new [`MyGenericStruct<T>`].
+ pub fn new(x: &'a T, y: &'b T) -> Self {
+ MyGenericStruct { x, y }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn removes_all_lifetimes_and_brackets_from_description() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+#[derive(Debug, PartialEq)]
+pub struct MyGenericStruct<'a, 'b> {
+ pub x: &'a usize,
+ pub y: &'b usize,
+}
+impl<'a, 'b> MyGenericStruct<'a, 'b> {
+ pub fn new$0(x: &'a usize, y: &'b usize) -> Self {
+ MyGenericStruct { x, y }
+ }
+}
+"#,
+ r#"
+#[derive(Debug, PartialEq)]
+pub struct MyGenericStruct<'a, 'b> {
+ pub x: &'a usize,
+ pub y: &'b usize,
+}
+impl<'a, 'b> MyGenericStruct<'a, 'b> {
+ /// Creates a new [`MyGenericStruct`].
+ pub fn new(x: &'a usize, y: &'b usize) -> Self {
+ MyGenericStruct { x, y }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn detects_new_with_self() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+#[derive(Debug, PartialEq)]
+pub struct MyGenericStruct2<T> {
+ pub x: T,
+}
+impl<T> MyGenericStruct2<T> {
+ pub fn new$0(x: T) -> Self {
+ MyGenericStruct2 { x }
+ }
+}
+"#,
+ r#"
+#[derive(Debug, PartialEq)]
+pub struct MyGenericStruct2<T> {
+ pub x: T,
+}
+impl<T> MyGenericStruct2<T> {
+ /// Creates a new [`MyGenericStruct2<T>`].
+ pub fn new(x: T) -> Self {
+ MyGenericStruct2 { x }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn supports_method_call() {
+ check_assist(
+ generate_doc_example,
+ r#"
+impl<T> MyGenericStruct<T> {
+ ///$0
+ pub fn consume(self) {}
+}
+"#,
+ r#"
+impl<T> MyGenericStruct<T> {
+ ///
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use test::MyGenericStruct;
+ ///
+ /// let my_generic_struct = ;
+ /// my_generic_struct.consume();
+ /// ```
+ pub fn consume(self) {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn checks_modified_self_param() {
+ check_assist(
+ generate_doc_example,
+ r#"
+impl<T> MyGenericStruct<T> {
+ ///$0
+ pub fn modify(&mut self, new_value: T) {
+ self.x = new_value;
+ }
+}
+"#,
+ r#"
+impl<T> MyGenericStruct<T> {
+ ///
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use test::MyGenericStruct;
+ ///
+ /// let mut my_generic_struct = ;
+ /// my_generic_struct.modify(new_value);
+ /// assert_eq!(my_generic_struct, );
+ /// ```
+ pub fn modify(&mut self, new_value: T) {
+ self.x = new_value;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn generates_intro_for_getters() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub struct S;
+impl S {
+ pub fn speed$0(&self) -> f32 { 0.0 }
+}
+"#,
+ r#"
+pub struct S;
+impl S {
+ /// Returns the speed of this [`S`].
+ pub fn speed(&self) -> f32 { 0.0 }
+}
+"#,
+ );
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub struct S;
+impl S {
+ pub fn data$0(&self) -> &[u8] { &[] }
+}
+"#,
+ r#"
+pub struct S;
+impl S {
+ /// Returns a reference to the data of this [`S`].
+ pub fn data(&self) -> &[u8] { &[] }
+}
+"#,
+ );
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub struct S;
+impl S {
+ pub fn data$0(&mut self) -> &mut [u8] { &mut [] }
+}
+"#,
+ r#"
+pub struct S;
+impl S {
+ /// Returns a mutable reference to the data of this [`S`].
+ pub fn data(&mut self) -> &mut [u8] { &mut [] }
+}
+"#,
+ );
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub struct S;
+impl S {
+ pub fn data_mut$0(&mut self) -> &mut [u8] { &mut [] }
+}
+"#,
+ r#"
+pub struct S;
+impl S {
+ /// Returns a mutable reference to the data of this [`S`].
+ pub fn data_mut(&mut self) -> &mut [u8] { &mut [] }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_getter_intro_for_prefixed_methods() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub struct S;
+impl S {
+ pub fn as_bytes$0(&self) -> &[u8] { &[] }
+}
+"#,
+ r#"
+pub struct S;
+impl S {
+ /// .
+ pub fn as_bytes(&self) -> &[u8] { &[] }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn generates_intro_for_setters() {
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub struct S;
+impl S {
+ pub fn set_data$0(&mut self, data: Vec<u8>) {}
+}
+"#,
+ r#"
+pub struct S;
+impl S {
+ /// Sets the data of this [`S`].
+ pub fn set_data(&mut self, data: Vec<u8>) {}
+}
+"#,
+ );
+ check_assist(
+ generate_documentation_template,
+ r#"
+pub struct S;
+impl S {
+ pub fn set_domain_name$0(&mut self, name: String) {}
+}
+"#,
+ r#"
+pub struct S;
+impl S {
+ /// Sets the domain name of this [`S`].
+ pub fn set_domain_name(&mut self, name: String) {}
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs
new file mode 100644
index 000000000..52d27d8a7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs
@@ -0,0 +1,316 @@
+use ide_db::assists::GroupLabel;
+use stdx::to_lower_snake_case;
+use syntax::ast::HasVisibility;
+use syntax::ast::{self, AstNode, HasName};
+
+use crate::{
+ utils::{add_method_to_adt, find_struct_impl},
+ AssistContext, AssistId, AssistKind, Assists,
+};
+
+// Assist: generate_enum_is_method
+//
+// Generate an `is_` method for this enum variant.
+//
+// ```
+// enum Version {
+// Undefined,
+// Minor$0,
+// Major,
+// }
+// ```
+// ->
+// ```
+// enum Version {
+// Undefined,
+// Minor,
+// Major,
+// }
+//
+// impl Version {
+// /// Returns `true` if the version is [`Minor`].
+// ///
+// /// [`Minor`]: Version::Minor
+// #[must_use]
+// fn is_minor(&self) -> bool {
+// matches!(self, Self::Minor)
+// }
+// }
+// ```
+pub(crate) fn generate_enum_is_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let variant = ctx.find_node_at_offset::<ast::Variant>()?;
+ let variant_name = variant.name()?;
+ let parent_enum = ast::Adt::Enum(variant.parent_enum());
+ let pattern_suffix = match variant.kind() {
+ ast::StructKind::Record(_) => " { .. }",
+ ast::StructKind::Tuple(_) => "(..)",
+ ast::StructKind::Unit => "",
+ };
+
+ let enum_name = parent_enum.name()?;
+ let enum_lowercase_name = to_lower_snake_case(&enum_name.to_string()).replace('_', " ");
+ let fn_name = format!("is_{}", &to_lower_snake_case(&variant_name.text()));
+
+ // Return early if we've found an existing new fn
+ let impl_def = find_struct_impl(ctx, &parent_enum, &fn_name)?;
+
+ let target = variant.syntax().text_range();
+ acc.add_group(
+ &GroupLabel("Generate an `is_`,`as_`, or `try_into_` for this enum variant".to_owned()),
+ AssistId("generate_enum_is_method", AssistKind::Generate),
+ "Generate an `is_` method for this enum variant",
+ target,
+ |builder| {
+ let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{} ", v));
+ let method = format!(
+ " /// Returns `true` if the {} is [`{variant}`].
+ ///
+ /// [`{variant}`]: {}::{variant}
+ #[must_use]
+ {}fn {}(&self) -> bool {{
+ matches!(self, Self::{variant}{})
+ }}",
+ enum_lowercase_name,
+ enum_name,
+ vis,
+ fn_name,
+ pattern_suffix,
+ variant = variant_name
+ );
+
+ add_method_to_adt(builder, &parent_enum, impl_def, &method);
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_generate_enum_is_from_variant() {
+ check_assist(
+ generate_enum_is_method,
+ r#"
+enum Variant {
+ Undefined,
+ Minor$0,
+ Major,
+}"#,
+ r#"enum Variant {
+ Undefined,
+ Minor,
+ Major,
+}
+
+impl Variant {
+ /// Returns `true` if the variant is [`Minor`].
+ ///
+ /// [`Minor`]: Variant::Minor
+ #[must_use]
+ fn is_minor(&self) -> bool {
+ matches!(self, Self::Minor)
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_is_already_implemented() {
+ check_assist_not_applicable(
+ generate_enum_is_method,
+ r#"
+enum Variant {
+ Undefined,
+ Minor$0,
+ Major,
+}
+
+impl Variant {
+ fn is_minor(&self) -> bool {
+ matches!(self, Self::Minor)
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_is_from_tuple_variant() {
+ check_assist(
+ generate_enum_is_method,
+ r#"
+enum Variant {
+ Undefined,
+ Minor(u32)$0,
+ Major,
+}"#,
+ r#"enum Variant {
+ Undefined,
+ Minor(u32),
+ Major,
+}
+
+impl Variant {
+ /// Returns `true` if the variant is [`Minor`].
+ ///
+ /// [`Minor`]: Variant::Minor
+ #[must_use]
+ fn is_minor(&self) -> bool {
+ matches!(self, Self::Minor(..))
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_is_from_record_variant() {
+ check_assist(
+ generate_enum_is_method,
+ r#"
+enum Variant {
+ Undefined,
+ Minor { foo: i32 }$0,
+ Major,
+}"#,
+ r#"enum Variant {
+ Undefined,
+ Minor { foo: i32 },
+ Major,
+}
+
+impl Variant {
+ /// Returns `true` if the variant is [`Minor`].
+ ///
+ /// [`Minor`]: Variant::Minor
+ #[must_use]
+ fn is_minor(&self) -> bool {
+ matches!(self, Self::Minor { .. })
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_is_from_variant_with_one_variant() {
+ check_assist(
+ generate_enum_is_method,
+ r#"enum Variant { Undefi$0ned }"#,
+ r#"
+enum Variant { Undefined }
+
+impl Variant {
+ /// Returns `true` if the variant is [`Undefined`].
+ ///
+ /// [`Undefined`]: Variant::Undefined
+ #[must_use]
+ fn is_undefined(&self) -> bool {
+ matches!(self, Self::Undefined)
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_is_from_variant_with_visibility_marker() {
+ check_assist(
+ generate_enum_is_method,
+ r#"
+pub(crate) enum Variant {
+ Undefined,
+ Minor$0,
+ Major,
+}"#,
+ r#"pub(crate) enum Variant {
+ Undefined,
+ Minor,
+ Major,
+}
+
+impl Variant {
+ /// Returns `true` if the variant is [`Minor`].
+ ///
+ /// [`Minor`]: Variant::Minor
+ #[must_use]
+ pub(crate) fn is_minor(&self) -> bool {
+ matches!(self, Self::Minor)
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_multiple_generate_enum_is_from_variant() {
+ check_assist(
+ generate_enum_is_method,
+ r#"
+enum Variant {
+ Undefined,
+ Minor,
+ Major$0,
+}
+
+impl Variant {
+ /// Returns `true` if the variant is [`Minor`].
+ ///
+ /// [`Minor`]: Variant::Minor
+ #[must_use]
+ fn is_minor(&self) -> bool {
+ matches!(self, Self::Minor)
+ }
+}"#,
+ r#"enum Variant {
+ Undefined,
+ Minor,
+ Major,
+}
+
+impl Variant {
+ /// Returns `true` if the variant is [`Minor`].
+ ///
+ /// [`Minor`]: Variant::Minor
+ #[must_use]
+ fn is_minor(&self) -> bool {
+ matches!(self, Self::Minor)
+ }
+
+ /// Returns `true` if the variant is [`Major`].
+ ///
+ /// [`Major`]: Variant::Major
+ #[must_use]
+ fn is_major(&self) -> bool {
+ matches!(self, Self::Major)
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_is_variant_names() {
+ check_assist(
+ generate_enum_is_method,
+ r#"
+enum GeneratorState {
+ Yielded,
+ Complete$0,
+ Major,
+}"#,
+ r#"enum GeneratorState {
+ Yielded,
+ Complete,
+ Major,
+}
+
+impl GeneratorState {
+ /// Returns `true` if the generator state is [`Complete`].
+ ///
+ /// [`Complete`]: GeneratorState::Complete
+ #[must_use]
+ fn is_complete(&self) -> bool {
+ matches!(self, Self::Complete)
+ }
+}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs
new file mode 100644
index 000000000..b19aa0f65
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs
@@ -0,0 +1,342 @@
+use ide_db::assists::GroupLabel;
+use itertools::Itertools;
+use stdx::to_lower_snake_case;
+use syntax::ast::HasVisibility;
+use syntax::ast::{self, AstNode, HasName};
+
+use crate::{
+ utils::{add_method_to_adt, find_struct_impl},
+ AssistContext, AssistId, AssistKind, Assists,
+};
+
+// Assist: generate_enum_try_into_method
+//
+// Generate a `try_into_` method for this enum variant.
+//
+// ```
+// enum Value {
+// Number(i32),
+// Text(String)$0,
+// }
+// ```
+// ->
+// ```
+// enum Value {
+// Number(i32),
+// Text(String),
+// }
+//
+// impl Value {
+// fn try_into_text(self) -> Result<String, Self> {
+// if let Self::Text(v) = self {
+// Ok(v)
+// } else {
+// Err(self)
+// }
+// }
+// }
+// ```
+pub(crate) fn generate_enum_try_into_method(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ generate_enum_projection_method(
+ acc,
+ ctx,
+ "generate_enum_try_into_method",
+ "Generate a `try_into_` method for this enum variant",
+ ProjectionProps {
+ fn_name_prefix: "try_into",
+ self_param: "self",
+ return_prefix: "Result<",
+ return_suffix: ", Self>",
+ happy_case: "Ok",
+ sad_case: "Err(self)",
+ },
+ )
+}
+
+// Assist: generate_enum_as_method
+//
+// Generate an `as_` method for this enum variant.
+//
+// ```
+// enum Value {
+// Number(i32),
+// Text(String)$0,
+// }
+// ```
+// ->
+// ```
+// enum Value {
+// Number(i32),
+// Text(String),
+// }
+//
+// impl Value {
+// fn as_text(&self) -> Option<&String> {
+// if let Self::Text(v) = self {
+// Some(v)
+// } else {
+// None
+// }
+// }
+// }
+// ```
+pub(crate) fn generate_enum_as_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ generate_enum_projection_method(
+ acc,
+ ctx,
+ "generate_enum_as_method",
+ "Generate an `as_` method for this enum variant",
+ ProjectionProps {
+ fn_name_prefix: "as",
+ self_param: "&self",
+ return_prefix: "Option<&",
+ return_suffix: ">",
+ happy_case: "Some",
+ sad_case: "None",
+ },
+ )
+}
+
+struct ProjectionProps {
+ fn_name_prefix: &'static str,
+ self_param: &'static str,
+ return_prefix: &'static str,
+ return_suffix: &'static str,
+ happy_case: &'static str,
+ sad_case: &'static str,
+}
+
+fn generate_enum_projection_method(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+ assist_id: &'static str,
+ assist_description: &str,
+ props: ProjectionProps,
+) -> Option<()> {
+ let variant = ctx.find_node_at_offset::<ast::Variant>()?;
+ let variant_name = variant.name()?;
+ let parent_enum = ast::Adt::Enum(variant.parent_enum());
+
+ let (pattern_suffix, field_type, bound_name) = match variant.kind() {
+ ast::StructKind::Record(record) => {
+ let (field,) = record.fields().collect_tuple()?;
+ let name = field.name()?.to_string();
+ let ty = field.ty()?;
+ let pattern_suffix = format!(" {{ {} }}", name);
+ (pattern_suffix, ty, name)
+ }
+ ast::StructKind::Tuple(tuple) => {
+ let (field,) = tuple.fields().collect_tuple()?;
+ let ty = field.ty()?;
+ ("(v)".to_owned(), ty, "v".to_owned())
+ }
+ ast::StructKind::Unit => return None,
+ };
+
+ let fn_name =
+ format!("{}_{}", props.fn_name_prefix, &to_lower_snake_case(&variant_name.text()));
+
+ // Return early if we've found an existing new fn
+ let impl_def = find_struct_impl(ctx, &parent_enum, &fn_name)?;
+
+ let target = variant.syntax().text_range();
+ acc.add_group(
+ &GroupLabel("Generate an `is_`,`as_`, or `try_into_` for this enum variant".to_owned()),
+ AssistId(assist_id, AssistKind::Generate),
+ assist_description,
+ target,
+ |builder| {
+ let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{} ", v));
+ let method = format!(
+ " {0}fn {1}({2}) -> {3}{4}{5} {{
+ if let Self::{6}{7} = self {{
+ {8}({9})
+ }} else {{
+ {10}
+ }}
+ }}",
+ vis,
+ fn_name,
+ props.self_param,
+ props.return_prefix,
+ field_type.syntax(),
+ props.return_suffix,
+ variant_name,
+ pattern_suffix,
+ props.happy_case,
+ bound_name,
+ props.sad_case,
+ );
+
+ add_method_to_adt(builder, &parent_enum, impl_def, &method);
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_generate_enum_try_into_tuple_variant() {
+ check_assist(
+ generate_enum_try_into_method,
+ r#"
+enum Value {
+ Number(i32),
+ Text(String)$0,
+}"#,
+ r#"enum Value {
+ Number(i32),
+ Text(String),
+}
+
+impl Value {
+ fn try_into_text(self) -> Result<String, Self> {
+ if let Self::Text(v) = self {
+ Ok(v)
+ } else {
+ Err(self)
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_try_into_already_implemented() {
+ check_assist_not_applicable(
+ generate_enum_try_into_method,
+ r#"enum Value {
+ Number(i32),
+ Text(String)$0,
+}
+
+impl Value {
+ fn try_into_text(self) -> Result<String, Self> {
+ if let Self::Text(v) = self {
+ Ok(v)
+ } else {
+ Err(self)
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_try_into_unit_variant() {
+ check_assist_not_applicable(
+ generate_enum_try_into_method,
+ r#"enum Value {
+ Number(i32),
+ Text(String),
+ Unit$0,
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_try_into_record_with_multiple_fields() {
+ check_assist_not_applicable(
+ generate_enum_try_into_method,
+ r#"enum Value {
+ Number(i32),
+ Text(String),
+ Both { first: i32, second: String }$0,
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_try_into_tuple_with_multiple_fields() {
+ check_assist_not_applicable(
+ generate_enum_try_into_method,
+ r#"enum Value {
+ Number(i32),
+ Text(String, String)$0,
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_try_into_record_variant() {
+ check_assist(
+ generate_enum_try_into_method,
+ r#"enum Value {
+ Number(i32),
+ Text { text: String }$0,
+}"#,
+ r#"enum Value {
+ Number(i32),
+ Text { text: String },
+}
+
+impl Value {
+ fn try_into_text(self) -> Result<String, Self> {
+ if let Self::Text { text } = self {
+ Ok(text)
+ } else {
+ Err(self)
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_as_tuple_variant() {
+ check_assist(
+ generate_enum_as_method,
+ r#"
+enum Value {
+ Number(i32),
+ Text(String)$0,
+}"#,
+ r#"enum Value {
+ Number(i32),
+ Text(String),
+}
+
+impl Value {
+ fn as_text(&self) -> Option<&String> {
+ if let Self::Text(v) = self {
+ Some(v)
+ } else {
+ None
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_enum_as_record_variant() {
+ check_assist(
+ generate_enum_as_method,
+ r#"enum Value {
+ Number(i32),
+ Text { text: String }$0,
+}"#,
+ r#"enum Value {
+ Number(i32),
+ Text { text: String },
+}
+
+impl Value {
+ fn as_text(&self) -> Option<&String> {
+ if let Self::Text { text } = self {
+ Some(text)
+ } else {
+ None
+ }
+ }
+}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs
new file mode 100644
index 000000000..4461fbd5a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs
@@ -0,0 +1,227 @@
+use hir::{HasSource, InFile};
+use ide_db::assists::{AssistId, AssistKind};
+use syntax::{
+ ast::{self, edit::IndentLevel},
+ AstNode, TextSize,
+};
+
+use crate::assist_context::{AssistContext, Assists};
+
+// Assist: generate_enum_variant
+//
+// Adds a variant to an enum.
+//
+// ```
+// enum Countries {
+// Ghana,
+// }
+//
+// fn main() {
+// let country = Countries::Lesotho$0;
+// }
+// ```
+// ->
+// ```
+// enum Countries {
+// Ghana,
+// Lesotho,
+// }
+//
+// fn main() {
+// let country = Countries::Lesotho;
+// }
+// ```
+pub(crate) fn generate_enum_variant(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let path_expr: ast::PathExpr = ctx.find_node_at_offset()?;
+ let path = path_expr.path()?;
+
+ if ctx.sema.resolve_path(&path).is_some() {
+ // No need to generate anything if the path resolves
+ return None;
+ }
+
+ let name_ref = path.segment()?.name_ref()?;
+ if name_ref.text().starts_with(char::is_lowercase) {
+ // Don't suggest generating variant if the name starts with a lowercase letter
+ return None;
+ }
+
+ if let Some(hir::PathResolution::Def(hir::ModuleDef::Adt(hir::Adt::Enum(e)))) =
+ ctx.sema.resolve_path(&path.qualifier()?)
+ {
+ let target = path.syntax().text_range();
+ return add_variant_to_accumulator(acc, ctx, target, e, &name_ref);
+ }
+
+ None
+}
+
+fn add_variant_to_accumulator(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+ target: syntax::TextRange,
+ adt: hir::Enum,
+ name_ref: &ast::NameRef,
+) -> Option<()> {
+ let db = ctx.db();
+ let InFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?;
+ let enum_indent = IndentLevel::from_node(&enum_node.syntax());
+
+ let variant_list = enum_node.variant_list()?;
+ let offset = variant_list.syntax().text_range().end() - TextSize::of('}');
+ let empty_enum = variant_list.variants().next().is_none();
+
+ acc.add(
+ AssistId("generate_enum_variant", AssistKind::Generate),
+ "Generate variant",
+ target,
+ |builder| {
+ builder.edit_file(file_id.original_file(db));
+ let text = format!(
+ "{maybe_newline}{indent_1}{name},\n{enum_indent}",
+ maybe_newline = if empty_enum { "\n" } else { "" },
+ indent_1 = IndentLevel(1),
+ name = name_ref,
+ enum_indent = enum_indent
+ );
+ builder.insert(offset, text)
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn generate_basic_enum_variant_in_empty_enum() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {}
+fn main() {
+ Foo::Bar$0
+}
+",
+ r"
+enum Foo {
+ Bar,
+}
+fn main() {
+ Foo::Bar
+}
+",
+ )
+ }
+
+ #[test]
+ fn generate_basic_enum_variant_in_non_empty_enum() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {
+ Bar,
+}
+fn main() {
+ Foo::Baz$0
+}
+",
+ r"
+enum Foo {
+ Bar,
+ Baz,
+}
+fn main() {
+ Foo::Baz
+}
+",
+ )
+ }
+
+ #[test]
+ fn generate_basic_enum_variant_in_different_file() {
+ check_assist(
+ generate_enum_variant,
+ r"
+//- /main.rs
+mod foo;
+use foo::Foo;
+
+fn main() {
+ Foo::Baz$0
+}
+
+//- /foo.rs
+enum Foo {
+ Bar,
+}
+",
+ r"
+enum Foo {
+ Bar,
+ Baz,
+}
+",
+ )
+ }
+
+ #[test]
+ fn not_applicable_for_existing_variant() {
+ check_assist_not_applicable(
+ generate_enum_variant,
+ r"
+enum Foo {
+ Bar,
+}
+fn main() {
+ Foo::Bar$0
+}
+",
+ )
+ }
+
+ #[test]
+ fn not_applicable_for_lowercase() {
+ check_assist_not_applicable(
+ generate_enum_variant,
+ r"
+enum Foo {
+ Bar,
+}
+fn main() {
+ Foo::new$0
+}
+",
+ )
+ }
+
+ #[test]
+ fn indentation_level_is_correct() {
+ check_assist(
+ generate_enum_variant,
+ r"
+mod m {
+ enum Foo {
+ Bar,
+ }
+}
+fn main() {
+ m::Foo::Baz$0
+}
+",
+ r"
+mod m {
+ enum Foo {
+ Bar,
+ Baz,
+ }
+}
+fn main() {
+ m::Foo::Baz
+}
+",
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs
new file mode 100644
index 000000000..507ea012b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs
@@ -0,0 +1,310 @@
+use ide_db::{famous_defs::FamousDefs, RootDatabase};
+use syntax::ast::{self, AstNode, HasName};
+
+use crate::{utils::generate_trait_impl_text, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: generate_from_impl_for_enum
+//
+// Adds a From impl for this enum variant with one tuple field.
+//
+// ```
+// enum A { $0One(u32) }
+// ```
+// ->
+// ```
+// enum A { One(u32) }
+//
+// impl From<u32> for A {
+// fn from(v: u32) -> Self {
+// Self::One(v)
+// }
+// }
+// ```
+pub(crate) fn generate_from_impl_for_enum(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let variant = ctx.find_node_at_offset::<ast::Variant>()?;
+ let variant_name = variant.name()?;
+ let enum_ = ast::Adt::Enum(variant.parent_enum());
+ let (field_name, field_type) = match variant.kind() {
+ ast::StructKind::Tuple(field_list) => {
+ if field_list.fields().count() != 1 {
+ return None;
+ }
+ (None, field_list.fields().next()?.ty()?)
+ }
+ ast::StructKind::Record(field_list) => {
+ if field_list.fields().count() != 1 {
+ return None;
+ }
+ let field = field_list.fields().next()?;
+ (Some(field.name()?), field.ty()?)
+ }
+ ast::StructKind::Unit => return None,
+ };
+
+ if existing_from_impl(&ctx.sema, &variant).is_some() {
+ cov_mark::hit!(test_add_from_impl_already_exists);
+ return None;
+ }
+
+ let target = variant.syntax().text_range();
+ acc.add(
+ AssistId("generate_from_impl_for_enum", AssistKind::Generate),
+ "Generate `From` impl for this enum variant",
+ target,
+ |edit| {
+ let start_offset = variant.parent_enum().syntax().text_range().end();
+ let from_trait = format!("From<{}>", field_type.syntax());
+ let impl_code = if let Some(name) = field_name {
+ format!(
+ r#" fn from({0}: {1}) -> Self {{
+ Self::{2} {{ {0} }}
+ }}"#,
+ name.text(),
+ field_type.syntax(),
+ variant_name,
+ )
+ } else {
+ format!(
+ r#" fn from(v: {}) -> Self {{
+ Self::{}(v)
+ }}"#,
+ field_type.syntax(),
+ variant_name,
+ )
+ };
+ let from_impl = generate_trait_impl_text(&enum_, &from_trait, &impl_code);
+ edit.insert(start_offset, from_impl);
+ },
+ )
+}
+
+fn existing_from_impl(
+ sema: &'_ hir::Semantics<'_, RootDatabase>,
+ variant: &ast::Variant,
+) -> Option<()> {
+ let variant = sema.to_def(variant)?;
+ let enum_ = variant.parent_enum(sema.db);
+ let krate = enum_.module(sema.db).krate();
+
+ let from_trait = FamousDefs(sema, krate).core_convert_From()?;
+
+ let enum_type = enum_.ty(sema.db);
+
+ let wrapped_type = variant.fields(sema.db).get(0)?.ty(sema.db);
+
+ if enum_type.impls_trait(sema.db, from_trait, &[wrapped_type]) {
+ Some(())
+ } else {
+ None
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_generate_from_impl_for_enum() {
+ check_assist(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum A { $0One(u32) }
+"#,
+ r#"
+enum A { One(u32) }
+
+impl From<u32> for A {
+ fn from(v: u32) -> Self {
+ Self::One(v)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_from_impl_for_enum_complicated_path() {
+ check_assist(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum A { $0One(foo::bar::baz::Boo) }
+"#,
+ r#"
+enum A { One(foo::bar::baz::Boo) }
+
+impl From<foo::bar::baz::Boo> for A {
+ fn from(v: foo::bar::baz::Boo) -> Self {
+ Self::One(v)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_from_impl_no_element() {
+ check_assist_not_applicable(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum A { $0One }
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_from_impl_more_than_one_element_in_tuple() {
+ check_assist_not_applicable(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum A { $0One(u32, String) }
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_from_impl_struct_variant() {
+ check_assist(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum A { $0One { x: u32 } }
+"#,
+ r#"
+enum A { One { x: u32 } }
+
+impl From<u32> for A {
+ fn from(x: u32) -> Self {
+ Self::One { x }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_from_impl_already_exists() {
+ cov_mark::check!(test_add_from_impl_already_exists);
+ check_assist_not_applicable(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum A { $0One(u32), }
+
+impl From<u32> for A {
+ fn from(v: u32) -> Self {
+ Self::One(v)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_from_impl_different_variant_impl_exists() {
+ check_assist(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum A { $0One(u32), Two(String), }
+
+impl From<String> for A {
+ fn from(v: String) -> Self {
+ A::Two(v)
+ }
+}
+
+pub trait From<T> {
+ fn from(T) -> Self;
+}
+"#,
+ r#"
+enum A { One(u32), Two(String), }
+
+impl From<u32> for A {
+ fn from(v: u32) -> Self {
+ Self::One(v)
+ }
+}
+
+impl From<String> for A {
+ fn from(v: String) -> Self {
+ A::Two(v)
+ }
+}
+
+pub trait From<T> {
+ fn from(T) -> Self;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_from_impl_static_str() {
+ check_assist(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum A { $0One(&'static str) }
+"#,
+ r#"
+enum A { One(&'static str) }
+
+impl From<&'static str> for A {
+ fn from(v: &'static str) -> Self {
+ Self::One(v)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_from_impl_generic_enum() {
+ check_assist(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum Generic<T, U: Clone> { $0One(T), Two(U) }
+"#,
+ r#"
+enum Generic<T, U: Clone> { One(T), Two(U) }
+
+impl<T, U: Clone> From<T> for Generic<T, U> {
+ fn from(v: T) -> Self {
+ Self::One(v)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_from_impl_with_lifetime() {
+ check_assist(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum Generic<'a> { $0One(&'a i32) }
+"#,
+ r#"
+enum Generic<'a> { One(&'a i32) }
+
+impl<'a> From<&'a i32> for Generic<'a> {
+ fn from(v: &'a i32) -> Self {
+ Self::One(v)
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
new file mode 100644
index 000000000..d564a0540
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
@@ -0,0 +1,1787 @@
+use hir::{HasSource, HirDisplay, Module, Semantics, TypeInfo};
+use ide_db::{
+ base_db::FileId,
+ defs::{Definition, NameRefClass},
+ famous_defs::FamousDefs,
+ FxHashMap, FxHashSet, RootDatabase, SnippetCap,
+};
+use stdx::to_lower_snake_case;
+use syntax::{
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ make, AstNode, CallExpr, HasArgList, HasModuleItem,
+ },
+ SyntaxKind, SyntaxNode, TextRange, TextSize,
+};
+
+use crate::{
+ utils::convert_reference_type,
+ utils::{find_struct_impl, render_snippet, Cursor},
+ AssistContext, AssistId, AssistKind, Assists,
+};
+
+// Assist: generate_function
+//
+// Adds a stub function with a signature matching the function under the cursor.
+//
+// ```
+// struct Baz;
+// fn baz() -> Baz { Baz }
+// fn foo() {
+// bar$0("", baz());
+// }
+//
+// ```
+// ->
+// ```
+// struct Baz;
+// fn baz() -> Baz { Baz }
+// fn foo() {
+// bar("", baz());
+// }
+//
+// fn bar(arg: &str, baz: Baz) ${0:-> _} {
+// todo!()
+// }
+//
+// ```
+pub(crate) fn generate_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ gen_fn(acc, ctx).or_else(|| gen_method(acc, ctx))
+}
+
+fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let path_expr: ast::PathExpr = ctx.find_node_at_offset()?;
+ let call = path_expr.syntax().parent().and_then(ast::CallExpr::cast)?;
+ let path = path_expr.path()?;
+ let name_ref = path.segment()?.name_ref()?;
+ if ctx.sema.resolve_path(&path).is_some() {
+ // The function call already resolves, no need to add a function
+ return None;
+ }
+
+ let fn_name = &*name_ref.text();
+ let target_module;
+ let mut adt_name = None;
+
+ let (target, file, insert_offset) = match path.qualifier() {
+ Some(qualifier) => match ctx.sema.resolve_path(&qualifier) {
+ Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))) => {
+ target_module = Some(module);
+ get_fn_target(ctx, &target_module, call.clone())?
+ }
+ Some(hir::PathResolution::Def(hir::ModuleDef::Adt(adt))) => {
+ if let hir::Adt::Enum(_) = adt {
+ // Don't suggest generating function if the name starts with an uppercase letter
+ if name_ref.text().starts_with(char::is_uppercase) {
+ return None;
+ }
+ }
+
+ let current_module = ctx.sema.scope(call.syntax())?.module();
+ let module = adt.module(ctx.sema.db);
+ target_module = if current_module == module { None } else { Some(module) };
+ if current_module.krate() != module.krate() {
+ return None;
+ }
+ let (impl_, file) = get_adt_source(ctx, &adt, fn_name)?;
+ let (target, insert_offset) = get_method_target(ctx, &module, &impl_)?;
+ adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None };
+ (target, file, insert_offset)
+ }
+ _ => {
+ return None;
+ }
+ },
+ _ => {
+ target_module = None;
+ get_fn_target(ctx, &target_module, call.clone())?
+ }
+ };
+ let function_builder = FunctionBuilder::from_call(ctx, &call, fn_name, target_module, target)?;
+ let text_range = call.syntax().text_range();
+ let label = format!("Generate {} function", function_builder.fn_name);
+ add_func_to_accumulator(
+ acc,
+ ctx,
+ text_range,
+ function_builder,
+ insert_offset,
+ file,
+ adt_name,
+ label,
+ )
+}
+
+fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let call: ast::MethodCallExpr = ctx.find_node_at_offset()?;
+ if ctx.sema.resolve_method_call(&call).is_some() {
+ return None;
+ }
+
+ let fn_name = call.name_ref()?;
+ let adt = ctx.sema.type_of_expr(&call.receiver()?)?.original().strip_references().as_adt()?;
+
+ let current_module = ctx.sema.scope(call.syntax())?.module();
+ let target_module = adt.module(ctx.sema.db);
+
+ if current_module.krate() != target_module.krate() {
+ return None;
+ }
+ let (impl_, file) = get_adt_source(ctx, &adt, fn_name.text().as_str())?;
+ let (target, insert_offset) = get_method_target(ctx, &target_module, &impl_)?;
+ let function_builder =
+ FunctionBuilder::from_method_call(ctx, &call, &fn_name, target_module, target)?;
+ let text_range = call.syntax().text_range();
+ let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None };
+ let label = format!("Generate {} method", function_builder.fn_name);
+ add_func_to_accumulator(
+ acc,
+ ctx,
+ text_range,
+ function_builder,
+ insert_offset,
+ file,
+ adt_name,
+ label,
+ )
+}
+
+fn add_func_to_accumulator(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+ text_range: TextRange,
+ function_builder: FunctionBuilder,
+ insert_offset: TextSize,
+ file: FileId,
+ adt_name: Option<hir::Name>,
+ label: String,
+) -> Option<()> {
+ acc.add(AssistId("generate_function", AssistKind::Generate), label, text_range, |builder| {
+ let function_template = function_builder.render();
+ let mut func = function_template.to_string(ctx.config.snippet_cap);
+ if let Some(name) = adt_name {
+ func = format!("\nimpl {} {{\n{}\n}}", name, func);
+ }
+ builder.edit_file(file);
+ match ctx.config.snippet_cap {
+ Some(cap) => builder.insert_snippet(cap, insert_offset, func),
+ None => builder.insert(insert_offset, func),
+ }
+ })
+}
+
+fn get_adt_source(
+ ctx: &AssistContext<'_>,
+ adt: &hir::Adt,
+ fn_name: &str,
+) -> Option<(Option<ast::Impl>, FileId)> {
+ let range = adt.source(ctx.sema.db)?.syntax().original_file_range(ctx.sema.db);
+ let file = ctx.sema.parse(range.file_id);
+ let adt_source =
+ ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?;
+ find_struct_impl(ctx, &adt_source, fn_name).map(|impl_| (impl_, range.file_id))
+}
+
+struct FunctionTemplate {
+ leading_ws: String,
+ fn_def: ast::Fn,
+ ret_type: Option<ast::RetType>,
+ should_focus_return_type: bool,
+ trailing_ws: String,
+ tail_expr: ast::Expr,
+}
+
+impl FunctionTemplate {
+ fn to_string(&self, cap: Option<SnippetCap>) -> String {
+ let f = match cap {
+ Some(cap) => {
+ let cursor = if self.should_focus_return_type {
+ // Focus the return type if there is one
+ match self.ret_type {
+ Some(ref ret_type) => ret_type.syntax(),
+ None => self.tail_expr.syntax(),
+ }
+ } else {
+ self.tail_expr.syntax()
+ };
+ render_snippet(cap, self.fn_def.syntax(), Cursor::Replace(cursor))
+ }
+ None => self.fn_def.to_string(),
+ };
+
+ format!("{}{}{}", self.leading_ws, f, self.trailing_ws)
+ }
+}
+
+struct FunctionBuilder {
+ target: GeneratedFunctionTarget,
+ fn_name: ast::Name,
+ type_params: Option<ast::GenericParamList>,
+ params: ast::ParamList,
+ ret_type: Option<ast::RetType>,
+ should_focus_return_type: bool,
+ needs_pub: bool,
+ is_async: bool,
+}
+
+impl FunctionBuilder {
+ /// Prepares a generated function that matches `call`.
+ /// The function is generated in `target_module` or next to `call`
+ fn from_call(
+ ctx: &AssistContext<'_>,
+ call: &ast::CallExpr,
+ fn_name: &str,
+ target_module: Option<hir::Module>,
+ target: GeneratedFunctionTarget,
+ ) -> Option<Self> {
+ let needs_pub = target_module.is_some();
+ let target_module =
+ target_module.or_else(|| ctx.sema.scope(target.syntax()).map(|it| it.module()))?;
+ let fn_name = make::name(fn_name);
+ let (type_params, params) =
+ fn_args(ctx, target_module, ast::CallableExpr::Call(call.clone()))?;
+
+ let await_expr = call.syntax().parent().and_then(ast::AwaitExpr::cast);
+ let is_async = await_expr.is_some();
+
+ let (ret_type, should_focus_return_type) =
+ make_return_type(ctx, &ast::Expr::CallExpr(call.clone()), target_module);
+
+ Some(Self {
+ target,
+ fn_name,
+ type_params,
+ params,
+ ret_type,
+ should_focus_return_type,
+ needs_pub,
+ is_async,
+ })
+ }
+
+ fn from_method_call(
+ ctx: &AssistContext<'_>,
+ call: &ast::MethodCallExpr,
+ name: &ast::NameRef,
+ target_module: Module,
+ target: GeneratedFunctionTarget,
+ ) -> Option<Self> {
+ let needs_pub =
+ !module_is_descendant(&ctx.sema.scope(call.syntax())?.module(), &target_module, ctx);
+ let fn_name = make::name(&name.text());
+ let (type_params, params) =
+ fn_args(ctx, target_module, ast::CallableExpr::MethodCall(call.clone()))?;
+
+ let await_expr = call.syntax().parent().and_then(ast::AwaitExpr::cast);
+ let is_async = await_expr.is_some();
+
+ let (ret_type, should_focus_return_type) =
+ make_return_type(ctx, &ast::Expr::MethodCallExpr(call.clone()), target_module);
+
+ Some(Self {
+ target,
+ fn_name,
+ type_params,
+ params,
+ ret_type,
+ should_focus_return_type,
+ needs_pub,
+ is_async,
+ })
+ }
+
+ fn render(self) -> FunctionTemplate {
+ let placeholder_expr = make::ext::expr_todo();
+ let fn_body = make::block_expr(vec![], Some(placeholder_expr));
+ let visibility = if self.needs_pub { Some(make::visibility_pub_crate()) } else { None };
+ let mut fn_def = make::fn_(
+ visibility,
+ self.fn_name,
+ self.type_params,
+ self.params,
+ fn_body,
+ self.ret_type,
+ self.is_async,
+ );
+ let leading_ws;
+ let trailing_ws;
+
+ match self.target {
+ GeneratedFunctionTarget::BehindItem(it) => {
+ let indent = IndentLevel::from_node(&it);
+ leading_ws = format!("\n\n{}", indent);
+ fn_def = fn_def.indent(indent);
+ trailing_ws = String::new();
+ }
+ GeneratedFunctionTarget::InEmptyItemList(it) => {
+ let indent = IndentLevel::from_node(&it);
+ leading_ws = format!("\n{}", indent + 1);
+ fn_def = fn_def.indent(indent + 1);
+ trailing_ws = format!("\n{}", indent);
+ }
+ };
+
+ FunctionTemplate {
+ leading_ws,
+ ret_type: fn_def.ret_type(),
+ // PANIC: we guarantee we always create a function body with a tail expr
+ tail_expr: fn_def.body().unwrap().tail_expr().unwrap(),
+ should_focus_return_type: self.should_focus_return_type,
+ fn_def,
+ trailing_ws,
+ }
+ }
+}
+
+/// Makes an optional return type along with whether the return type should be focused by the cursor.
+/// If we cannot infer what the return type should be, we create a placeholder type.
+///
+/// The rule for whether we focus a return type or not (and thus focus the function body),
+/// is rather simple:
+/// * If we could *not* infer what the return type should be, focus it (so the user can fill-in
+/// the correct return type).
+/// * If we could infer the return type, don't focus it (and thus focus the function body) so the
+/// user can change the `todo!` function body.
+fn make_return_type(
+ ctx: &AssistContext<'_>,
+ call: &ast::Expr,
+ target_module: Module,
+) -> (Option<ast::RetType>, bool) {
+ let (ret_ty, should_focus_return_type) = {
+ match ctx.sema.type_of_expr(call).map(TypeInfo::original) {
+ Some(ty) if ty.is_unknown() => (Some(make::ty_placeholder()), true),
+ None => (Some(make::ty_placeholder()), true),
+ Some(ty) if ty.is_unit() => (None, false),
+ Some(ty) => {
+ let rendered = ty.display_source_code(ctx.db(), target_module.into());
+ match rendered {
+ Ok(rendered) => (Some(make::ty(&rendered)), false),
+ Err(_) => (Some(make::ty_placeholder()), true),
+ }
+ }
+ }
+ };
+ let ret_type = ret_ty.map(make::ret_type);
+ (ret_type, should_focus_return_type)
+}
+
+fn get_fn_target(
+ ctx: &AssistContext<'_>,
+ target_module: &Option<Module>,
+ call: CallExpr,
+) -> Option<(GeneratedFunctionTarget, FileId, TextSize)> {
+ let mut file = ctx.file_id();
+ let target = match target_module {
+ Some(target_module) => {
+ let module_source = target_module.definition_source(ctx.db());
+ let (in_file, target) = next_space_for_fn_in_module(ctx.sema.db, &module_source)?;
+ file = in_file;
+ target
+ }
+ None => next_space_for_fn_after_call_site(ast::CallableExpr::Call(call))?,
+ };
+ Some((target.clone(), file, get_insert_offset(&target)))
+}
+
+fn get_method_target(
+ ctx: &AssistContext<'_>,
+ target_module: &Module,
+ impl_: &Option<ast::Impl>,
+) -> Option<(GeneratedFunctionTarget, TextSize)> {
+ let target = match impl_ {
+ Some(impl_) => next_space_for_fn_in_impl(impl_)?,
+ None => {
+ next_space_for_fn_in_module(ctx.sema.db, &target_module.definition_source(ctx.sema.db))?
+ .1
+ }
+ };
+ Some((target.clone(), get_insert_offset(&target)))
+}
+
+fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize {
+ match &target {
+ GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(),
+ GeneratedFunctionTarget::InEmptyItemList(it) => it.text_range().start() + TextSize::of('{'),
+ }
+}
+
+#[derive(Clone)]
+enum GeneratedFunctionTarget {
+ BehindItem(SyntaxNode),
+ InEmptyItemList(SyntaxNode),
+}
+
+impl GeneratedFunctionTarget {
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ GeneratedFunctionTarget::BehindItem(it) => it,
+ GeneratedFunctionTarget::InEmptyItemList(it) => it,
+ }
+ }
+}
+
+/// Computes the type variables and arguments required for the generated function
+fn fn_args(
+ ctx: &AssistContext<'_>,
+ target_module: hir::Module,
+ call: ast::CallableExpr,
+) -> Option<(Option<ast::GenericParamList>, ast::ParamList)> {
+ let mut arg_names = Vec::new();
+ let mut arg_types = Vec::new();
+ for arg in call.arg_list()?.args() {
+ arg_names.push(fn_arg_name(&ctx.sema, &arg));
+ arg_types.push(fn_arg_type(ctx, target_module, &arg));
+ }
+ deduplicate_arg_names(&mut arg_names);
+ let params = arg_names.into_iter().zip(arg_types).map(|(name, ty)| {
+ make::param(make::ext::simple_ident_pat(make::name(&name)).into(), make::ty(&ty))
+ });
+
+ Some((
+ None,
+ make::param_list(
+ match call {
+ ast::CallableExpr::Call(_) => None,
+ ast::CallableExpr::MethodCall(_) => Some(make::self_param()),
+ },
+ params,
+ ),
+ ))
+}
+
+/// Makes duplicate argument names unique by appending incrementing numbers.
+///
+/// ```
+/// let mut names: Vec<String> =
+/// vec!["foo".into(), "foo".into(), "bar".into(), "baz".into(), "bar".into()];
+/// deduplicate_arg_names(&mut names);
+/// let expected: Vec<String> =
+/// vec!["foo_1".into(), "foo_2".into(), "bar_1".into(), "baz".into(), "bar_2".into()];
+/// assert_eq!(names, expected);
+/// ```
+fn deduplicate_arg_names(arg_names: &mut Vec<String>) {
+ let mut arg_name_counts = FxHashMap::default();
+ for name in arg_names.iter() {
+ *arg_name_counts.entry(name).or_insert(0) += 1;
+ }
+ let duplicate_arg_names: FxHashSet<String> = arg_name_counts
+ .into_iter()
+ .filter(|(_, count)| *count >= 2)
+ .map(|(name, _)| name.clone())
+ .collect();
+
+ let mut counter_per_name = FxHashMap::default();
+ for arg_name in arg_names.iter_mut() {
+ if duplicate_arg_names.contains(arg_name) {
+ let counter = counter_per_name.entry(arg_name.clone()).or_insert(1);
+ arg_name.push('_');
+ arg_name.push_str(&counter.to_string());
+ *counter += 1;
+ }
+ }
+}
+
+fn fn_arg_name(sema: &Semantics<'_, RootDatabase>, arg_expr: &ast::Expr) -> String {
+ let name = (|| match arg_expr {
+ ast::Expr::CastExpr(cast_expr) => Some(fn_arg_name(sema, &cast_expr.expr()?)),
+ expr => {
+ let name_ref = expr
+ .syntax()
+ .descendants()
+ .filter_map(ast::NameRef::cast)
+ .filter(|name| name.ident_token().is_some())
+ .last()?;
+ if let Some(NameRefClass::Definition(Definition::Const(_) | Definition::Static(_))) =
+ NameRefClass::classify(sema, &name_ref)
+ {
+ return Some(name_ref.to_string().to_lowercase());
+ };
+ Some(to_lower_snake_case(&name_ref.to_string()))
+ }
+ })();
+ match name {
+ Some(mut name) if name.starts_with(|c: char| c.is_ascii_digit()) => {
+ name.insert_str(0, "arg");
+ name
+ }
+ Some(name) => name,
+ None => "arg".to_string(),
+ }
+}
+
+fn fn_arg_type(ctx: &AssistContext<'_>, target_module: hir::Module, fn_arg: &ast::Expr) -> String {
+ fn maybe_displayed_type(
+ ctx: &AssistContext<'_>,
+ target_module: hir::Module,
+ fn_arg: &ast::Expr,
+ ) -> Option<String> {
+ let ty = ctx.sema.type_of_expr(fn_arg)?.adjusted();
+ if ty.is_unknown() {
+ return None;
+ }
+
+ if ty.is_reference() || ty.is_mutable_reference() {
+ let famous_defs = &FamousDefs(&ctx.sema, ctx.sema.scope(fn_arg.syntax())?.krate());
+ convert_reference_type(ty.strip_references(), ctx.db(), famous_defs)
+ .map(|conversion| conversion.convert_type(ctx.db()))
+ .or_else(|| ty.display_source_code(ctx.db(), target_module.into()).ok())
+ } else {
+ ty.display_source_code(ctx.db(), target_module.into()).ok()
+ }
+ }
+
+ maybe_displayed_type(ctx, target_module, fn_arg).unwrap_or_else(|| String::from("_"))
+}
+
+/// Returns the position inside the current mod or file
+/// directly after the current block
+/// We want to write the generated function directly after
+/// fns, impls or macro calls, but inside mods
+fn next_space_for_fn_after_call_site(expr: ast::CallableExpr) -> Option<GeneratedFunctionTarget> {
+ let mut ancestors = expr.syntax().ancestors().peekable();
+ let mut last_ancestor: Option<SyntaxNode> = None;
+ while let Some(next_ancestor) = ancestors.next() {
+ match next_ancestor.kind() {
+ SyntaxKind::SOURCE_FILE => {
+ break;
+ }
+ SyntaxKind::ITEM_LIST => {
+ if ancestors.peek().map(|a| a.kind()) == Some(SyntaxKind::MODULE) {
+ break;
+ }
+ }
+ _ => {}
+ }
+ last_ancestor = Some(next_ancestor);
+ }
+ last_ancestor.map(GeneratedFunctionTarget::BehindItem)
+}
+
+fn next_space_for_fn_in_module(
+ db: &dyn hir::db::AstDatabase,
+ module_source: &hir::InFile<hir::ModuleSource>,
+) -> Option<(FileId, GeneratedFunctionTarget)> {
+ let file = module_source.file_id.original_file(db);
+ let assist_item = match &module_source.value {
+ hir::ModuleSource::SourceFile(it) => match it.items().last() {
+ Some(last_item) => GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()),
+ None => GeneratedFunctionTarget::BehindItem(it.syntax().clone()),
+ },
+ hir::ModuleSource::Module(it) => match it.item_list().and_then(|it| it.items().last()) {
+ Some(last_item) => GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()),
+ None => GeneratedFunctionTarget::InEmptyItemList(it.item_list()?.syntax().clone()),
+ },
+ hir::ModuleSource::BlockExpr(it) => {
+ if let Some(last_item) =
+ it.statements().take_while(|stmt| matches!(stmt, ast::Stmt::Item(_))).last()
+ {
+ GeneratedFunctionTarget::BehindItem(last_item.syntax().clone())
+ } else {
+ GeneratedFunctionTarget::InEmptyItemList(it.syntax().clone())
+ }
+ }
+ };
+ Some((file, assist_item))
+}
+
+fn next_space_for_fn_in_impl(impl_: &ast::Impl) -> Option<GeneratedFunctionTarget> {
+ if let Some(last_item) = impl_.assoc_item_list().and_then(|it| it.assoc_items().last()) {
+ Some(GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()))
+ } else {
+ Some(GeneratedFunctionTarget::InEmptyItemList(impl_.assoc_item_list()?.syntax().clone()))
+ }
+}
+
+fn module_is_descendant(module: &hir::Module, ans: &hir::Module, ctx: &AssistContext<'_>) -> bool {
+ if module == ans {
+ return true;
+ }
+ for c in ans.children(ctx.sema.db) {
+ if module_is_descendant(module, &c, ctx) {
+ return true;
+ }
+ }
+ false
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn add_function_with_no_args() {
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ bar$0();
+}
+",
+ r"
+fn foo() {
+ bar();
+}
+
+fn bar() ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_from_method() {
+ // This ensures that the function is correctly generated
+ // in the next outer mod or file
+ check_assist(
+ generate_function,
+ r"
+impl Foo {
+ fn foo() {
+ bar$0();
+ }
+}
+",
+ r"
+impl Foo {
+ fn foo() {
+ bar();
+ }
+}
+
+fn bar() ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_directly_after_current_block() {
+ // The new fn should not be created at the end of the file or module
+ check_assist(
+ generate_function,
+ r"
+fn foo1() {
+ bar$0();
+}
+
+fn foo2() {}
+",
+ r"
+fn foo1() {
+ bar();
+}
+
+fn bar() ${0:-> _} {
+ todo!()
+}
+
+fn foo2() {}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_no_args_in_same_module() {
+ check_assist(
+ generate_function,
+ r"
+mod baz {
+ fn foo() {
+ bar$0();
+ }
+}
+",
+ r"
+mod baz {
+ fn foo() {
+ bar();
+ }
+
+ fn bar() ${0:-> _} {
+ todo!()
+ }
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_upper_camel_case_arg() {
+ check_assist(
+ generate_function,
+ r"
+struct BazBaz;
+fn foo() {
+ bar$0(BazBaz);
+}
+",
+ r"
+struct BazBaz;
+fn foo() {
+ bar(BazBaz);
+}
+
+fn bar(baz_baz: BazBaz) ${0:-> _} {
+ todo!()
+}
+",
+ );
+ }
+
+ #[test]
+ fn add_function_with_upper_camel_case_arg_as_cast() {
+ check_assist(
+ generate_function,
+ r"
+struct BazBaz;
+fn foo() {
+ bar$0(&BazBaz as *const BazBaz);
+}
+",
+ r"
+struct BazBaz;
+fn foo() {
+ bar(&BazBaz as *const BazBaz);
+}
+
+fn bar(baz_baz: *const BazBaz) ${0:-> _} {
+ todo!()
+}
+",
+ );
+ }
+
+ #[test]
+ fn add_function_with_function_call_arg() {
+ check_assist(
+ generate_function,
+ r"
+struct Baz;
+fn baz() -> Baz { todo!() }
+fn foo() {
+ bar$0(baz());
+}
+",
+ r"
+struct Baz;
+fn baz() -> Baz { todo!() }
+fn foo() {
+ bar(baz());
+}
+
+fn bar(baz: Baz) ${0:-> _} {
+ todo!()
+}
+",
+ );
+ }
+
+ #[test]
+ fn add_function_with_method_call_arg() {
+ check_assist(
+ generate_function,
+ r"
+struct Baz;
+impl Baz {
+ fn foo(&self) -> Baz {
+ ba$0r(self.baz())
+ }
+ fn baz(&self) -> Baz {
+ Baz
+ }
+}
+",
+ r"
+struct Baz;
+impl Baz {
+ fn foo(&self) -> Baz {
+ bar(self.baz())
+ }
+ fn baz(&self) -> Baz {
+ Baz
+ }
+}
+
+fn bar(baz: Baz) -> Baz {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_string_literal_arg() {
+ check_assist(
+ generate_function,
+ r#"
+fn foo() {
+ $0bar("bar")
+}
+"#,
+ r#"
+fn foo() {
+ bar("bar")
+}
+
+fn bar(arg: &str) {
+ ${0:todo!()}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_function_with_char_literal_arg() {
+ check_assist(
+ generate_function,
+ r#"
+fn foo() {
+ $0bar('x')
+}
+"#,
+ r#"
+fn foo() {
+ bar('x')
+}
+
+fn bar(arg: char) {
+ ${0:todo!()}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_function_with_int_literal_arg() {
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ $0bar(42)
+}
+",
+ r"
+fn foo() {
+ bar(42)
+}
+
+fn bar(arg: i32) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_cast_int_literal_arg() {
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ $0bar(42 as u8)
+}
+",
+ r"
+fn foo() {
+ bar(42 as u8)
+}
+
+fn bar(arg: u8) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn name_of_cast_variable_is_used() {
+ // Ensures that the name of the cast type isn't used
+ // in the generated function signature.
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ let x = 42;
+ bar$0(x as u8)
+}
+",
+ r"
+fn foo() {
+ let x = 42;
+ bar(x as u8)
+}
+
+fn bar(x: u8) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_variable_arg() {
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ let worble = ();
+ $0bar(worble)
+}
+",
+ r"
+fn foo() {
+ let worble = ();
+ bar(worble)
+}
+
+fn bar(worble: ()) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_impl_trait_arg() {
+ check_assist(
+ generate_function,
+ r#"
+//- minicore: sized
+trait Foo {}
+fn foo() -> impl Foo {
+ todo!()
+}
+fn baz() {
+ $0bar(foo())
+}
+"#,
+ r#"
+trait Foo {}
+fn foo() -> impl Foo {
+ todo!()
+}
+fn baz() {
+ bar(foo())
+}
+
+fn bar(foo: impl Foo) {
+ ${0:todo!()}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn borrowed_arg() {
+ check_assist(
+ generate_function,
+ r"
+struct Baz;
+fn baz() -> Baz { todo!() }
+
+fn foo() {
+ bar$0(&baz())
+}
+",
+ r"
+struct Baz;
+fn baz() -> Baz { todo!() }
+
+fn foo() {
+ bar(&baz())
+}
+
+fn bar(baz: &Baz) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_qualified_path_arg() {
+ check_assist(
+ generate_function,
+ r"
+mod Baz {
+ pub struct Bof;
+ pub fn baz() -> Bof { Bof }
+}
+fn foo() {
+ $0bar(Baz::baz())
+}
+",
+ r"
+mod Baz {
+ pub struct Bof;
+ pub fn baz() -> Bof { Bof }
+}
+fn foo() {
+ bar(Baz::baz())
+}
+
+fn bar(baz: Baz::Bof) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_generic_arg() {
+ // FIXME: This is wrong, generated `bar` should include generic parameter.
+ check_assist(
+ generate_function,
+ r"
+fn foo<T>(t: T) {
+ $0bar(t)
+}
+",
+ r"
+fn foo<T>(t: T) {
+ bar(t)
+}
+
+fn bar(t: T) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_fn_arg() {
+ // FIXME: The argument in `bar` is wrong.
+ check_assist(
+ generate_function,
+ r"
+struct Baz;
+impl Baz {
+ fn new() -> Self { Baz }
+}
+fn foo() {
+ $0bar(Baz::new);
+}
+",
+ r"
+struct Baz;
+impl Baz {
+ fn new() -> Self { Baz }
+}
+fn foo() {
+ bar(Baz::new);
+}
+
+fn bar(new: fn) ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_closure_arg() {
+ // FIXME: The argument in `bar` is wrong.
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ let closure = |x: i64| x - 1;
+ $0bar(closure)
+}
+",
+ r"
+fn foo() {
+ let closure = |x: i64| x - 1;
+ bar(closure)
+}
+
+fn bar(closure: _) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn unresolveable_types_default_to_placeholder() {
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ $0bar(baz)
+}
+",
+ r"
+fn foo() {
+ bar(baz)
+}
+
+fn bar(baz: _) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn arg_names_dont_overlap() {
+ check_assist(
+ generate_function,
+ r"
+struct Baz;
+fn baz() -> Baz { Baz }
+fn foo() {
+ $0bar(baz(), baz())
+}
+",
+ r"
+struct Baz;
+fn baz() -> Baz { Baz }
+fn foo() {
+ bar(baz(), baz())
+}
+
+fn bar(baz_1: Baz, baz_2: Baz) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn arg_name_counters_start_at_1_per_name() {
+ check_assist(
+ generate_function,
+ r#"
+struct Baz;
+fn baz() -> Baz { Baz }
+fn foo() {
+ $0bar(baz(), baz(), "foo", "bar")
+}
+"#,
+ r#"
+struct Baz;
+fn baz() -> Baz { Baz }
+fn foo() {
+ bar(baz(), baz(), "foo", "bar")
+}
+
+fn bar(baz_1: Baz, baz_2: Baz, arg_1: &str, arg_2: &str) {
+ ${0:todo!()}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_function_in_module() {
+ check_assist(
+ generate_function,
+ r"
+mod bar {}
+
+fn foo() {
+ bar::my_fn$0()
+}
+",
+ r"
+mod bar {
+ pub(crate) fn my_fn() {
+ ${0:todo!()}
+ }
+}
+
+fn foo() {
+ bar::my_fn()
+}
+",
+ )
+ }
+
+ #[test]
+ fn qualified_path_uses_correct_scope() {
+ check_assist(
+ generate_function,
+ r#"
+mod foo {
+ pub struct Foo;
+}
+fn bar() {
+ use foo::Foo;
+ let foo = Foo;
+ baz$0(foo)
+}
+"#,
+ r#"
+mod foo {
+ pub struct Foo;
+}
+fn bar() {
+ use foo::Foo;
+ let foo = Foo;
+ baz(foo)
+}
+
+fn baz(foo: foo::Foo) {
+ ${0:todo!()}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_function_in_module_containing_other_items() {
+ check_assist(
+ generate_function,
+ r"
+mod bar {
+ fn something_else() {}
+}
+
+fn foo() {
+ bar::my_fn$0()
+}
+",
+ r"
+mod bar {
+ fn something_else() {}
+
+ pub(crate) fn my_fn() {
+ ${0:todo!()}
+ }
+}
+
+fn foo() {
+ bar::my_fn()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_in_nested_module() {
+ check_assist(
+ generate_function,
+ r"
+mod bar {
+ mod baz {}
+}
+
+fn foo() {
+ bar::baz::my_fn$0()
+}
+",
+ r"
+mod bar {
+ mod baz {
+ pub(crate) fn my_fn() {
+ ${0:todo!()}
+ }
+ }
+}
+
+fn foo() {
+ bar::baz::my_fn()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_in_another_file() {
+ check_assist(
+ generate_function,
+ r"
+//- /main.rs
+mod foo;
+
+fn main() {
+ foo::bar$0()
+}
+//- /foo.rs
+",
+ r"
+
+
+pub(crate) fn bar() {
+ ${0:todo!()}
+}",
+ )
+ }
+
+ #[test]
+ fn add_function_with_return_type() {
+ check_assist(
+ generate_function,
+ r"
+fn main() {
+ let x: u32 = foo$0();
+}
+",
+ r"
+fn main() {
+ let x: u32 = foo();
+}
+
+fn foo() -> u32 {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_not_applicable_if_function_already_exists() {
+ check_assist_not_applicable(
+ generate_function,
+ r"
+fn foo() {
+ bar$0();
+}
+
+fn bar() {}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_not_applicable_if_unresolved_variable_in_call_is_selected() {
+ check_assist_not_applicable(
+ // bar is resolved, but baz isn't.
+ // The assist is only active if the cursor is on an unresolved path,
+ // but the assist should only be offered if the path is a function call.
+ generate_function,
+ r#"
+fn foo() {
+ bar(b$0az);
+}
+
+fn bar(baz: ()) {}
+"#,
+ )
+ }
+
+ #[test]
+ fn create_method_with_no_args() {
+ check_assist(
+ generate_function,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self) {
+ self.bar()$0;
+ }
+}
+"#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self) {
+ self.bar();
+ }
+
+ fn bar(&self) ${0:-> _} {
+ todo!()
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn create_function_with_async() {
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ $0bar(42).await();
+}
+",
+ r"
+fn foo() {
+ bar(42).await();
+}
+
+async fn bar(arg: i32) ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn create_method() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+fn foo() {S.bar$0();}
+",
+ r"
+struct S;
+fn foo() {S.bar();}
+impl S {
+
+
+fn bar(&self) ${0:-> _} {
+ todo!()
+}
+}
+",
+ )
+ }
+
+ #[test]
+ fn create_method_within_an_impl() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+fn foo() {S.bar$0();}
+impl S {}
+
+",
+ r"
+struct S;
+fn foo() {S.bar();}
+impl S {
+ fn bar(&self) ${0:-> _} {
+ todo!()
+ }
+}
+
+",
+ )
+ }
+
+ #[test]
+ fn create_method_from_different_module() {
+ check_assist(
+ generate_function,
+ r"
+mod s {
+ pub struct S;
+}
+fn foo() {s::S.bar$0();}
+",
+ r"
+mod s {
+ pub struct S;
+impl S {
+
+
+ pub(crate) fn bar(&self) ${0:-> _} {
+ todo!()
+ }
+}
+}
+fn foo() {s::S.bar();}
+",
+ )
+ }
+
+ #[test]
+ fn create_method_from_descendant_module() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+mod s {
+ fn foo() {
+ super::S.bar$0();
+ }
+}
+
+",
+ r"
+struct S;
+mod s {
+ fn foo() {
+ super::S.bar();
+ }
+}
+impl S {
+
+
+fn bar(&self) ${0:-> _} {
+ todo!()
+}
+}
+
+",
+ )
+ }
+
+ #[test]
+ fn create_method_with_cursor_anywhere_on_call_expresion() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+fn foo() {$0S.bar();}
+",
+ r"
+struct S;
+fn foo() {S.bar();}
+impl S {
+
+
+fn bar(&self) ${0:-> _} {
+ todo!()
+}
+}
+",
+ )
+ }
+
+ #[test]
+ fn create_static_method() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+fn foo() {S::bar$0();}
+",
+ r"
+struct S;
+fn foo() {S::bar();}
+impl S {
+
+
+fn bar() ${0:-> _} {
+ todo!()
+}
+}
+",
+ )
+ }
+
+ #[test]
+ fn create_static_method_within_an_impl() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+fn foo() {S::bar$0();}
+impl S {}
+
+",
+ r"
+struct S;
+fn foo() {S::bar();}
+impl S {
+ fn bar() ${0:-> _} {
+ todo!()
+ }
+}
+
+",
+ )
+ }
+
+ #[test]
+ fn create_static_method_from_different_module() {
+ check_assist(
+ generate_function,
+ r"
+mod s {
+ pub struct S;
+}
+fn foo() {s::S::bar$0();}
+",
+ r"
+mod s {
+ pub struct S;
+impl S {
+
+
+ pub(crate) fn bar() ${0:-> _} {
+ todo!()
+ }
+}
+}
+fn foo() {s::S::bar();}
+",
+ )
+ }
+
+ #[test]
+ fn create_static_method_with_cursor_anywhere_on_call_expresion() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+fn foo() {$0S::bar();}
+",
+ r"
+struct S;
+fn foo() {S::bar();}
+impl S {
+
+
+fn bar() ${0:-> _} {
+ todo!()
+}
+}
+",
+ )
+ }
+
+ #[test]
+ fn no_panic_on_invalid_global_path() {
+ check_assist(
+ generate_function,
+ r"
+fn main() {
+ ::foo$0();
+}
+",
+ r"
+fn main() {
+ ::foo();
+}
+
+fn foo() ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn handle_tuple_indexing() {
+ check_assist(
+ generate_function,
+ r"
+fn main() {
+ let a = ((),);
+ foo$0(a.0);
+}
+",
+ r"
+fn main() {
+ let a = ((),);
+ foo(a.0);
+}
+
+fn foo(a: ()) ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_const_arg() {
+ check_assist(
+ generate_function,
+ r"
+const VALUE: usize = 0;
+fn main() {
+ foo$0(VALUE);
+}
+",
+ r"
+const VALUE: usize = 0;
+fn main() {
+ foo(VALUE);
+}
+
+fn foo(value: usize) ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_static_arg() {
+ check_assist(
+ generate_function,
+ r"
+static VALUE: usize = 0;
+fn main() {
+ foo$0(VALUE);
+}
+",
+ r"
+static VALUE: usize = 0;
+fn main() {
+ foo(VALUE);
+}
+
+fn foo(value: usize) ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_static_mut_arg() {
+ check_assist(
+ generate_function,
+ r"
+static mut VALUE: usize = 0;
+fn main() {
+ foo$0(VALUE);
+}
+",
+ r"
+static mut VALUE: usize = 0;
+fn main() {
+ foo(VALUE);
+}
+
+fn foo(value: usize) ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn not_applicable_for_enum_variant() {
+ check_assist_not_applicable(
+ generate_function,
+ r"
+enum Foo {}
+fn main() {
+ Foo::Bar$0(true)
+}
+",
+ );
+ }
+
+ #[test]
+ fn applicable_for_enum_method() {
+ check_assist(
+ generate_function,
+ r"
+enum Foo {}
+fn main() {
+ Foo::new$0();
+}
+",
+ r"
+enum Foo {}
+fn main() {
+ Foo::new();
+}
+impl Foo {
+
+
+fn new() ${0:-> _} {
+ todo!()
+}
+}
+",
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs
new file mode 100644
index 000000000..76fcef0ca
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs
@@ -0,0 +1,492 @@
+use ide_db::famous_defs::FamousDefs;
+use stdx::{format_to, to_lower_snake_case};
+use syntax::ast::{self, AstNode, HasName, HasVisibility};
+
+use crate::{
+ utils::{convert_reference_type, find_impl_block_end, find_struct_impl, generate_impl_text},
+ AssistContext, AssistId, AssistKind, Assists, GroupLabel,
+};
+
+// Assist: generate_getter
+//
+// Generate a getter method.
+//
+// ```
+// # //- minicore: as_ref
+// # pub struct String;
+// # impl AsRef<str> for String {
+// # fn as_ref(&self) -> &str {
+// # ""
+// # }
+// # }
+// #
+// struct Person {
+// nam$0e: String,
+// }
+// ```
+// ->
+// ```
+// # pub struct String;
+// # impl AsRef<str> for String {
+// # fn as_ref(&self) -> &str {
+// # ""
+// # }
+// # }
+// #
+// struct Person {
+// name: String,
+// }
+//
+// impl Person {
+// fn $0name(&self) -> &str {
+// self.name.as_ref()
+// }
+// }
+// ```
+pub(crate) fn generate_getter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ generate_getter_impl(acc, ctx, false)
+}
+
+// Assist: generate_getter_mut
+//
+// Generate a mut getter method.
+//
+// ```
+// struct Person {
+// nam$0e: String,
+// }
+// ```
+// ->
+// ```
+// struct Person {
+// name: String,
+// }
+//
+// impl Person {
+// fn $0name_mut(&mut self) -> &mut String {
+// &mut self.name
+// }
+// }
+// ```
+pub(crate) fn generate_getter_mut(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ generate_getter_impl(acc, ctx, true)
+}
+
+pub(crate) fn generate_getter_impl(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+ mutable: bool,
+) -> Option<()> {
+ let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
+ let field = ctx.find_node_at_offset::<ast::RecordField>()?;
+
+ let field_name = field.name()?;
+ let field_ty = field.ty()?;
+
+ // Return early if we've found an existing fn
+ let mut fn_name = to_lower_snake_case(&field_name.to_string());
+ if mutable {
+ format_to!(fn_name, "_mut");
+ }
+ let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), fn_name.as_str())?;
+
+ let (id, label) = if mutable {
+ ("generate_getter_mut", "Generate a mut getter method")
+ } else {
+ ("generate_getter", "Generate a getter method")
+ };
+ let target = field.syntax().text_range();
+ acc.add_group(
+ &GroupLabel("Generate getter/setter".to_owned()),
+ AssistId(id, AssistKind::Generate),
+ label,
+ target,
+ |builder| {
+ let mut buf = String::with_capacity(512);
+
+ if impl_def.is_some() {
+ buf.push('\n');
+ }
+
+ let vis = strukt.visibility().map_or(String::new(), |v| format!("{} ", v));
+ let (ty, body) = if mutable {
+ (format!("&mut {}", field_ty), format!("&mut self.{}", field_name))
+ } else {
+ (|| {
+ let krate = ctx.sema.scope(field_ty.syntax())?.krate();
+ let famous_defs = &FamousDefs(&ctx.sema, krate);
+ ctx.sema
+ .resolve_type(&field_ty)
+ .and_then(|ty| convert_reference_type(ty, ctx.db(), famous_defs))
+ .map(|conversion| {
+ cov_mark::hit!(convert_reference_type);
+ (
+ conversion.convert_type(ctx.db()),
+ conversion.getter(field_name.to_string()),
+ )
+ })
+ })()
+ .unwrap_or_else(|| (format!("&{}", field_ty), format!("&self.{}", field_name)))
+ };
+
+ format_to!(
+ buf,
+ " {}fn {}(&{}self) -> {} {{
+ {}
+ }}",
+ vis,
+ fn_name,
+ mutable.then(|| "mut ").unwrap_or_default(),
+ ty,
+ body,
+ );
+
+ let start_offset = impl_def
+ .and_then(|impl_def| find_impl_block_end(impl_def, &mut buf))
+ .unwrap_or_else(|| {
+ buf = generate_impl_text(&ast::Adt::Struct(strukt.clone()), &buf);
+ strukt.syntax().text_range().end()
+ });
+
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ builder.insert_snippet(cap, start_offset, buf.replacen("fn ", "fn $0", 1))
+ }
+ None => builder.insert(start_offset, buf),
+ }
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_generate_getter_from_field() {
+ check_assist(
+ generate_getter,
+ r#"
+struct Context {
+ dat$0a: Data,
+}
+"#,
+ r#"
+struct Context {
+ data: Data,
+}
+
+impl Context {
+ fn $0data(&self) -> &Data {
+ &self.data
+ }
+}
+"#,
+ );
+
+ check_assist(
+ generate_getter_mut,
+ r#"
+struct Context {
+ dat$0a: Data,
+}
+"#,
+ r#"
+struct Context {
+ data: Data,
+}
+
+impl Context {
+ fn $0data_mut(&mut self) -> &mut Data {
+ &mut self.data
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_getter_already_implemented() {
+ check_assist_not_applicable(
+ generate_getter,
+ r#"
+struct Context {
+ dat$0a: Data,
+}
+
+impl Context {
+ fn data(&self) -> &Data {
+ &self.data
+ }
+}
+"#,
+ );
+
+ check_assist_not_applicable(
+ generate_getter_mut,
+ r#"
+struct Context {
+ dat$0a: Data,
+}
+
+impl Context {
+ fn data_mut(&mut self) -> &mut Data {
+ &mut self.data
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_getter_from_field_with_visibility_marker() {
+ check_assist(
+ generate_getter,
+ r#"
+pub(crate) struct Context {
+ dat$0a: Data,
+}
+"#,
+ r#"
+pub(crate) struct Context {
+ data: Data,
+}
+
+impl Context {
+ pub(crate) fn $0data(&self) -> &Data {
+ &self.data
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_multiple_generate_getter() {
+ check_assist(
+ generate_getter,
+ r#"
+struct Context {
+ data: Data,
+ cou$0nt: usize,
+}
+
+impl Context {
+ fn data(&self) -> &Data {
+ &self.data
+ }
+}
+"#,
+ r#"
+struct Context {
+ data: Data,
+ count: usize,
+}
+
+impl Context {
+ fn data(&self) -> &Data {
+ &self.data
+ }
+
+ fn $0count(&self) -> &usize {
+ &self.count
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_not_a_special_case() {
+ cov_mark::check_count!(convert_reference_type, 0);
+ // Fake string which doesn't implement AsRef<str>
+ check_assist(
+ generate_getter,
+ r#"
+pub struct String;
+
+struct S { foo: $0String }
+"#,
+ r#"
+pub struct String;
+
+struct S { foo: String }
+
+impl S {
+ fn $0foo(&self) -> &String {
+ &self.foo
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_convert_reference_type() {
+ cov_mark::check_count!(convert_reference_type, 6);
+
+ // Copy
+ check_assist(
+ generate_getter,
+ r#"
+//- minicore: copy
+struct S { foo: $0bool }
+"#,
+ r#"
+struct S { foo: bool }
+
+impl S {
+ fn $0foo(&self) -> bool {
+ self.foo
+ }
+}
+"#,
+ );
+
+ // AsRef<str>
+ check_assist(
+ generate_getter,
+ r#"
+//- minicore: as_ref
+pub struct String;
+impl AsRef<str> for String {
+ fn as_ref(&self) -> &str {
+ ""
+ }
+}
+
+struct S { foo: $0String }
+"#,
+ r#"
+pub struct String;
+impl AsRef<str> for String {
+ fn as_ref(&self) -> &str {
+ ""
+ }
+}
+
+struct S { foo: String }
+
+impl S {
+ fn $0foo(&self) -> &str {
+ self.foo.as_ref()
+ }
+}
+"#,
+ );
+
+ // AsRef<T>
+ check_assist(
+ generate_getter,
+ r#"
+//- minicore: as_ref
+struct Sweets;
+
+pub struct Box<T>(T);
+impl<T> AsRef<T> for Box<T> {
+ fn as_ref(&self) -> &T {
+ &self.0
+ }
+}
+
+struct S { foo: $0Box<Sweets> }
+"#,
+ r#"
+struct Sweets;
+
+pub struct Box<T>(T);
+impl<T> AsRef<T> for Box<T> {
+ fn as_ref(&self) -> &T {
+ &self.0
+ }
+}
+
+struct S { foo: Box<Sweets> }
+
+impl S {
+ fn $0foo(&self) -> &Sweets {
+ self.foo.as_ref()
+ }
+}
+"#,
+ );
+
+ // AsRef<[T]>
+ check_assist(
+ generate_getter,
+ r#"
+//- minicore: as_ref
+pub struct Vec<T>;
+impl<T> AsRef<[T]> for Vec<T> {
+ fn as_ref(&self) -> &[T] {
+ &[]
+ }
+}
+
+struct S { foo: $0Vec<()> }
+"#,
+ r#"
+pub struct Vec<T>;
+impl<T> AsRef<[T]> for Vec<T> {
+ fn as_ref(&self) -> &[T] {
+ &[]
+ }
+}
+
+struct S { foo: Vec<()> }
+
+impl S {
+ fn $0foo(&self) -> &[()] {
+ self.foo.as_ref()
+ }
+}
+"#,
+ );
+
+ // Option
+ check_assist(
+ generate_getter,
+ r#"
+//- minicore: option
+struct Failure;
+
+struct S { foo: $0Option<Failure> }
+"#,
+ r#"
+struct Failure;
+
+struct S { foo: Option<Failure> }
+
+impl S {
+ fn $0foo(&self) -> Option<&Failure> {
+ self.foo.as_ref()
+ }
+}
+"#,
+ );
+
+ // Result
+ check_assist(
+ generate_getter,
+ r#"
+//- minicore: result
+struct Context {
+ dat$0a: Result<bool, i32>,
+}
+"#,
+ r#"
+struct Context {
+ data: Result<bool, i32>,
+}
+
+impl Context {
+ fn $0data(&self) -> Result<&bool, &i32> {
+ self.data.as_ref()
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs
new file mode 100644
index 000000000..68287a20b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs
@@ -0,0 +1,177 @@
+use syntax::ast::{self, AstNode, HasName};
+
+use crate::{utils::generate_impl_text, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: generate_impl
+//
+// Adds a new inherent impl for a type.
+//
+// ```
+// struct Ctx<T: Clone> {
+// data: T,$0
+// }
+// ```
+// ->
+// ```
+// struct Ctx<T: Clone> {
+// data: T,
+// }
+//
+// impl<T: Clone> Ctx<T> {
+// $0
+// }
+// ```
+pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let nominal = ctx.find_node_at_offset::<ast::Adt>()?;
+ let name = nominal.name()?;
+ let target = nominal.syntax().text_range();
+
+ acc.add(
+ AssistId("generate_impl", AssistKind::Generate),
+ format!("Generate impl for `{}`", name),
+ target,
+ |edit| {
+ let start_offset = nominal.syntax().text_range().end();
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let snippet = generate_impl_text(&nominal, " $0");
+ edit.insert_snippet(cap, start_offset, snippet);
+ }
+ None => {
+ let snippet = generate_impl_text(&nominal, "");
+ edit.insert(start_offset, snippet);
+ }
+ }
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn test_add_impl() {
+ check_assist(
+ generate_impl,
+ "struct Foo {$0}\n",
+ "struct Foo {}\n\nimpl Foo {\n $0\n}\n",
+ );
+ check_assist(
+ generate_impl,
+ "struct Foo<T: Clone> {$0}",
+ "struct Foo<T: Clone> {}\n\nimpl<T: Clone> Foo<T> {\n $0\n}",
+ );
+ check_assist(
+ generate_impl,
+ "struct Foo<'a, T: Foo<'a>> {$0}",
+ "struct Foo<'a, T: Foo<'a>> {}\n\nimpl<'a, T: Foo<'a>> Foo<'a, T> {\n $0\n}",
+ );
+ check_assist(
+ generate_impl,
+ r#"
+ struct MyOwnArray<T, const S: usize> {}$0"#,
+ r#"
+ struct MyOwnArray<T, const S: usize> {}
+
+ impl<T, const S: usize> MyOwnArray<T, S> {
+ $0
+ }"#,
+ );
+ check_assist(
+ generate_impl,
+ r#"
+ #[cfg(feature = "foo")]
+ struct Foo<'a, T: Foo<'a>> {$0}"#,
+ r#"
+ #[cfg(feature = "foo")]
+ struct Foo<'a, T: Foo<'a>> {}
+
+ #[cfg(feature = "foo")]
+ impl<'a, T: Foo<'a>> Foo<'a, T> {
+ $0
+ }"#,
+ );
+
+ check_assist(
+ generate_impl,
+ r#"
+ #[cfg(not(feature = "foo"))]
+ struct Foo<'a, T: Foo<'a>> {$0}"#,
+ r#"
+ #[cfg(not(feature = "foo"))]
+ struct Foo<'a, T: Foo<'a>> {}
+
+ #[cfg(not(feature = "foo"))]
+ impl<'a, T: Foo<'a>> Foo<'a, T> {
+ $0
+ }"#,
+ );
+
+ check_assist(
+ generate_impl,
+ r#"
+ struct Defaulted<T = i32> {}$0"#,
+ r#"
+ struct Defaulted<T = i32> {}
+
+ impl<T> Defaulted<T> {
+ $0
+ }"#,
+ );
+
+ check_assist(
+ generate_impl,
+ r#"
+ struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {}$0"#,
+ r#"
+ struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {}
+
+ impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> Defaulted<'a, 'b, T, S> {
+ $0
+ }"#,
+ );
+
+ check_assist(
+ generate_impl,
+ r#"pub trait Trait {}
+struct Struct<T>$0
+where
+ T: Trait,
+{
+ inner: T,
+}"#,
+ r#"pub trait Trait {}
+struct Struct<T>
+where
+ T: Trait,
+{
+ inner: T,
+}
+
+impl<T> Struct<T>
+where
+ T: Trait,
+{
+ $0
+}"#,
+ );
+ }
+
+ #[test]
+ fn add_impl_target() {
+ check_assist_target(
+ generate_impl,
+ "
+struct SomeThingIrrelevant;
+/// Has a lifetime parameter
+struct Foo<'a, T: Foo<'a>> {$0}
+struct EvenMoreIrrelevant;
+",
+ "/// Has a lifetime parameter
+struct Foo<'a, T: Foo<'a>> {}",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs
new file mode 100644
index 000000000..9ce525ca3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs
@@ -0,0 +1,295 @@
+use hir::{known, HasSource, Name};
+use syntax::{
+ ast::{self, HasName},
+ AstNode,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId, AssistKind,
+};
+
+// Assist: generate_is_empty_from_len
+//
+// Generates is_empty implementation from the len method.
+//
+// ```
+// struct MyStruct { data: Vec<String> }
+//
+// impl MyStruct {
+// #[must_use]
+// p$0ub fn len(&self) -> usize {
+// self.data.len()
+// }
+// }
+// ```
+// ->
+// ```
+// struct MyStruct { data: Vec<String> }
+//
+// impl MyStruct {
+// #[must_use]
+// pub fn len(&self) -> usize {
+// self.data.len()
+// }
+//
+// #[must_use]
+// pub fn is_empty(&self) -> bool {
+// self.len() == 0
+// }
+// }
+// ```
+pub(crate) fn generate_is_empty_from_len(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let fn_node = ctx.find_node_at_offset::<ast::Fn>()?;
+ let fn_name = fn_node.name()?;
+
+ if fn_name.text() != "len" {
+ cov_mark::hit!(len_function_not_present);
+ return None;
+ }
+
+ if fn_node.param_list()?.params().next().is_some() {
+ cov_mark::hit!(len_function_with_parameters);
+ return None;
+ }
+
+ let impl_ = fn_node.syntax().ancestors().find_map(ast::Impl::cast)?;
+ let len_fn = get_impl_method(ctx, &impl_, &known::len)?;
+ if !len_fn.ret_type(ctx.sema.db).is_usize() {
+ cov_mark::hit!(len_fn_different_return_type);
+ return None;
+ }
+
+ if get_impl_method(ctx, &impl_, &known::is_empty).is_some() {
+ cov_mark::hit!(is_empty_already_implemented);
+ return None;
+ }
+
+ let node = len_fn.source(ctx.sema.db)?;
+ let range = node.syntax().value.text_range();
+
+ acc.add(
+ AssistId("generate_is_empty_from_len", AssistKind::Generate),
+ "Generate a is_empty impl from a len function",
+ range,
+ |builder| {
+ let code = r#"
+
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }"#
+ .to_string();
+ builder.insert(range.end(), code)
+ },
+ )
+}
+
+fn get_impl_method(
+ ctx: &AssistContext<'_>,
+ impl_: &ast::Impl,
+ fn_name: &Name,
+) -> Option<hir::Function> {
+ let db = ctx.sema.db;
+ let impl_def: hir::Impl = ctx.sema.to_def(impl_)?;
+
+ let scope = ctx.sema.scope(impl_.syntax())?;
+ let ty = impl_def.self_ty(db);
+ ty.iterate_method_candidates(
+ db,
+ &scope,
+ &scope.visible_traits().0,
+ None,
+ Some(fn_name),
+ |func| Some(func),
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn len_function_not_present() {
+ cov_mark::check!(len_function_not_present);
+ check_assist_not_applicable(
+ generate_is_empty_from_len,
+ r#"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ p$0ub fn test(&self) -> usize {
+ self.data.len()
+ }
+ }
+"#,
+ );
+ }
+
+ #[test]
+ fn len_function_with_parameters() {
+ cov_mark::check!(len_function_with_parameters);
+ check_assist_not_applicable(
+ generate_is_empty_from_len,
+ r#"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ p$0ub fn len(&self, _i: bool) -> usize {
+ self.data.len()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn is_empty_already_implemented() {
+ cov_mark::check!(is_empty_already_implemented);
+ check_assist_not_applicable(
+ generate_is_empty_from_len,
+ r#"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ p$0ub fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn len_fn_different_return_type() {
+ cov_mark::check!(len_fn_different_return_type);
+ check_assist_not_applicable(
+ generate_is_empty_from_len,
+ r#"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ p$0ub fn len(&self) -> u32 {
+ self.data.len()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn generate_is_empty() {
+ check_assist(
+ generate_is_empty_from_len,
+ r#"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ p$0ub fn len(&self) -> usize {
+ self.data.len()
+ }
+}
+"#,
+ r#"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ pub fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn multiple_functions_in_impl() {
+ check_assist(
+ generate_is_empty_from_len,
+ r#"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ pub fn new() -> Self {
+ Self { data: 0 }
+ }
+
+ #[must_use]
+ p$0ub fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ pub fn work(&self) -> Option<usize> {
+
+ }
+}
+"#,
+ r#"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ pub fn new() -> Self {
+ Self { data: 0 }
+ }
+
+ #[must_use]
+ pub fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ pub fn work(&self) -> Option<usize> {
+
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn multiple_impls() {
+ check_assist_not_applicable(
+ generate_is_empty_from_len,
+ r#"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ p$0ub fn len(&self) -> usize {
+ self.data.len()
+ }
+}
+
+impl MyStruct {
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs
new file mode 100644
index 000000000..6c93875e9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs
@@ -0,0 +1,495 @@
+use ide_db::{
+ imports::import_assets::item_for_path_search, use_trivial_contructor::use_trivial_constructor,
+};
+use itertools::Itertools;
+use stdx::format_to;
+use syntax::ast::{self, AstNode, HasName, HasVisibility, StructKind};
+
+use crate::{
+ utils::{find_impl_block_start, find_struct_impl, generate_impl_text},
+ AssistContext, AssistId, AssistKind, Assists,
+};
+
+// Assist: generate_new
+//
+// Adds a `fn new` for a type.
+//
+// ```
+// struct Ctx<T: Clone> {
+// data: T,$0
+// }
+// ```
+// ->
+// ```
+// struct Ctx<T: Clone> {
+// data: T,
+// }
+//
+// impl<T: Clone> Ctx<T> {
+// fn $0new(data: T) -> Self { Self { data } }
+// }
+// ```
+pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
+
+ // We want to only apply this to non-union structs with named fields
+ let field_list = match strukt.kind() {
+ StructKind::Record(named) => named,
+ _ => return None,
+ };
+
+ // Return early if we've found an existing new fn
+ let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), "new")?;
+
+ let current_module = ctx.sema.scope(strukt.syntax())?.module();
+
+ let target = strukt.syntax().text_range();
+ acc.add(AssistId("generate_new", AssistKind::Generate), "Generate `new`", target, |builder| {
+ let mut buf = String::with_capacity(512);
+
+ if impl_def.is_some() {
+ buf.push('\n');
+ }
+
+ let vis = strukt.visibility().map_or(String::new(), |v| format!("{} ", v));
+
+ let trivial_constructors = field_list
+ .fields()
+ .map(|f| {
+ let ty = ctx.sema.resolve_type(&f.ty()?)?;
+
+ let item_in_ns = hir::ItemInNs::from(hir::ModuleDef::from(ty.as_adt()?));
+
+ let type_path = current_module
+ .find_use_path(ctx.sema.db, item_for_path_search(ctx.sema.db, item_in_ns)?)?;
+
+ let expr = use_trivial_constructor(
+ &ctx.sema.db,
+ ide_db::helpers::mod_path_to_ast(&type_path),
+ &ty,
+ )?;
+
+ Some(format!("{}: {}", f.name()?.syntax(), expr))
+ })
+ .collect::<Vec<_>>();
+
+ let params = field_list
+ .fields()
+ .enumerate()
+ .filter_map(|(i, f)| {
+ if trivial_constructors[i].is_none() {
+ Some(format!("{}: {}", f.name()?.syntax(), f.ty()?.syntax()))
+ } else {
+ None
+ }
+ })
+ .format(", ");
+
+ let fields = field_list
+ .fields()
+ .enumerate()
+ .filter_map(|(i, f)| {
+ let contructor = trivial_constructors[i].clone();
+ if contructor.is_some() {
+ contructor
+ } else {
+ Some(f.name()?.to_string())
+ }
+ })
+ .format(", ");
+
+ format_to!(buf, " {}fn new({}) -> Self {{ Self {{ {} }} }}", vis, params, fields);
+
+ let start_offset = impl_def
+ .and_then(|impl_def| find_impl_block_start(impl_def, &mut buf))
+ .unwrap_or_else(|| {
+ buf = generate_impl_text(&ast::Adt::Struct(strukt.clone()), &buf);
+ strukt.syntax().text_range().end()
+ });
+
+ match ctx.config.snippet_cap {
+ None => builder.insert(start_offset, buf),
+ Some(cap) => {
+ buf = buf.replace("fn new", "fn $0new");
+ builder.insert_snippet(cap, start_offset, buf);
+ }
+ }
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn test_generate_new_with_zst_fields() {
+ check_assist(
+ generate_new,
+ r#"
+struct Empty;
+
+struct Foo { empty: Empty $0}
+"#,
+ r#"
+struct Empty;
+
+struct Foo { empty: Empty }
+
+impl Foo {
+ fn $0new() -> Self { Self { empty: Empty } }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Empty;
+
+struct Foo { baz: String, empty: Empty $0}
+"#,
+ r#"
+struct Empty;
+
+struct Foo { baz: String, empty: Empty }
+
+impl Foo {
+ fn $0new(baz: String) -> Self { Self { baz, empty: Empty } }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+enum Empty { Bar }
+
+struct Foo { empty: Empty $0}
+"#,
+ r#"
+enum Empty { Bar }
+
+struct Foo { empty: Empty }
+
+impl Foo {
+ fn $0new() -> Self { Self { empty: Empty::Bar } }
+}
+"#,
+ );
+
+ // make sure the assist only works on unit variants
+ check_assist(
+ generate_new,
+ r#"
+struct Empty {}
+
+struct Foo { empty: Empty $0}
+"#,
+ r#"
+struct Empty {}
+
+struct Foo { empty: Empty }
+
+impl Foo {
+ fn $0new(empty: Empty) -> Self { Self { empty } }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+enum Empty { Bar {} }
+
+struct Foo { empty: Empty $0}
+"#,
+ r#"
+enum Empty { Bar {} }
+
+struct Foo { empty: Empty }
+
+impl Foo {
+ fn $0new(empty: Empty) -> Self { Self { empty } }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_new() {
+ check_assist(
+ generate_new,
+ r#"
+struct Foo {$0}
+"#,
+ r#"
+struct Foo {}
+
+impl Foo {
+ fn $0new() -> Self { Self { } }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Foo<T: Clone> {$0}
+"#,
+ r#"
+struct Foo<T: Clone> {}
+
+impl<T: Clone> Foo<T> {
+ fn $0new() -> Self { Self { } }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Foo<'a, T: Foo<'a>> {$0}
+"#,
+ r#"
+struct Foo<'a, T: Foo<'a>> {}
+
+impl<'a, T: Foo<'a>> Foo<'a, T> {
+ fn $0new() -> Self { Self { } }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Foo { baz: String $0}
+"#,
+ r#"
+struct Foo { baz: String }
+
+impl Foo {
+ fn $0new(baz: String) -> Self { Self { baz } }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Foo { baz: String, qux: Vec<i32> $0}
+"#,
+ r#"
+struct Foo { baz: String, qux: Vec<i32> }
+
+impl Foo {
+ fn $0new(baz: String, qux: Vec<i32>) -> Self { Self { baz, qux } }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn check_that_visibility_modifiers_dont_get_brought_in() {
+ check_assist(
+ generate_new,
+ r#"
+struct Foo { pub baz: String, pub qux: Vec<i32> $0}
+"#,
+ r#"
+struct Foo { pub baz: String, pub qux: Vec<i32> }
+
+impl Foo {
+ fn $0new(baz: String, qux: Vec<i32>) -> Self { Self { baz, qux } }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn check_it_reuses_existing_impls() {
+ check_assist(
+ generate_new,
+ r#"
+struct Foo {$0}
+
+impl Foo {}
+"#,
+ r#"
+struct Foo {}
+
+impl Foo {
+ fn $0new() -> Self { Self { } }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Foo {$0}
+
+impl Foo {
+ fn qux(&self) {}
+}
+"#,
+ r#"
+struct Foo {}
+
+impl Foo {
+ fn $0new() -> Self { Self { } }
+
+ fn qux(&self) {}
+}
+"#,
+ );
+
+ check_assist(
+ generate_new,
+ r#"
+struct Foo {$0}
+
+impl Foo {
+ fn qux(&self) {}
+ fn baz() -> i32 {
+ 5
+ }
+}
+"#,
+ r#"
+struct Foo {}
+
+impl Foo {
+ fn $0new() -> Self { Self { } }
+
+ fn qux(&self) {}
+ fn baz() -> i32 {
+ 5
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn check_visibility_of_new_fn_based_on_struct() {
+ check_assist(
+ generate_new,
+ r#"
+pub struct Foo {$0}
+"#,
+ r#"
+pub struct Foo {}
+
+impl Foo {
+ pub fn $0new() -> Self { Self { } }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+pub(crate) struct Foo {$0}
+"#,
+ r#"
+pub(crate) struct Foo {}
+
+impl Foo {
+ pub(crate) fn $0new() -> Self { Self { } }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn generate_new_not_applicable_if_fn_exists() {
+ check_assist_not_applicable(
+ generate_new,
+ r#"
+struct Foo {$0}
+
+impl Foo {
+ fn new() -> Self {
+ Self
+ }
+}
+"#,
+ );
+
+ check_assist_not_applicable(
+ generate_new,
+ r#"
+struct Foo {$0}
+
+impl Foo {
+ fn New() -> Self {
+ Self
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn generate_new_target() {
+ check_assist_target(
+ generate_new,
+ r#"
+struct SomeThingIrrelevant;
+/// Has a lifetime parameter
+struct Foo<'a, T: Foo<'a>> {$0}
+struct EvenMoreIrrelevant;
+"#,
+ "/// Has a lifetime parameter
+struct Foo<'a, T: Foo<'a>> {}",
+ );
+ }
+
+ #[test]
+ fn test_unrelated_new() {
+ check_assist(
+ generate_new,
+ r#"
+pub struct AstId<N: AstNode> {
+ file_id: HirFileId,
+ file_ast_id: FileAstId<N>,
+}
+
+impl<N: AstNode> AstId<N> {
+ pub fn new(file_id: HirFileId, file_ast_id: FileAstId<N>) -> AstId<N> {
+ AstId { file_id, file_ast_id }
+ }
+}
+
+pub struct Source<T> {
+ pub file_id: HirFileId,$0
+ pub ast: T,
+}
+
+impl<T> Source<T> {
+ pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
+ Source { file_id: self.file_id, ast: f(self.ast) }
+ }
+}
+"#,
+ r#"
+pub struct AstId<N: AstNode> {
+ file_id: HirFileId,
+ file_ast_id: FileAstId<N>,
+}
+
+impl<N: AstNode> AstId<N> {
+ pub fn new(file_id: HirFileId, file_ast_id: FileAstId<N>) -> AstId<N> {
+ AstId { file_id, file_ast_id }
+ }
+}
+
+pub struct Source<T> {
+ pub file_id: HirFileId,
+ pub ast: T,
+}
+
+impl<T> Source<T> {
+ pub fn $0new(file_id: HirFileId, ast: T) -> Self { Self { file_id, ast } }
+
+ pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
+ Source { file_id: self.file_id, ast: f(self.ast) }
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs
new file mode 100644
index 000000000..2a7ad6ce3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs
@@ -0,0 +1,184 @@
+use stdx::{format_to, to_lower_snake_case};
+use syntax::ast::{self, AstNode, HasName, HasVisibility};
+
+use crate::{
+ utils::{find_impl_block_end, find_struct_impl, generate_impl_text},
+ AssistContext, AssistId, AssistKind, Assists, GroupLabel,
+};
+
+// Assist: generate_setter
+//
+// Generate a setter method.
+//
+// ```
+// struct Person {
+// nam$0e: String,
+// }
+// ```
+// ->
+// ```
+// struct Person {
+// name: String,
+// }
+//
+// impl Person {
+// fn set_name(&mut self, name: String) {
+// self.name = name;
+// }
+// }
+// ```
+pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
+ let field = ctx.find_node_at_offset::<ast::RecordField>()?;
+
+ let field_name = field.name()?;
+ let field_ty = field.ty()?;
+
+ // Return early if we've found an existing fn
+ let fn_name = to_lower_snake_case(&field_name.to_string());
+ let impl_def = find_struct_impl(
+ ctx,
+ &ast::Adt::Struct(strukt.clone()),
+ format!("set_{}", fn_name).as_str(),
+ )?;
+
+ let target = field.syntax().text_range();
+ acc.add_group(
+ &GroupLabel("Generate getter/setter".to_owned()),
+ AssistId("generate_setter", AssistKind::Generate),
+ "Generate a setter method",
+ target,
+ |builder| {
+ let mut buf = String::with_capacity(512);
+
+ if impl_def.is_some() {
+ buf.push('\n');
+ }
+
+ let vis = strukt.visibility().map_or(String::new(), |v| format!("{} ", v));
+ format_to!(
+ buf,
+ " {}fn set_{}(&mut self, {}: {}) {{
+ self.{} = {};
+ }}",
+ vis,
+ fn_name,
+ fn_name,
+ field_ty,
+ fn_name,
+ fn_name,
+ );
+
+ let start_offset = impl_def
+ .and_then(|impl_def| find_impl_block_end(impl_def, &mut buf))
+ .unwrap_or_else(|| {
+ buf = generate_impl_text(&ast::Adt::Struct(strukt.clone()), &buf);
+ strukt.syntax().text_range().end()
+ });
+
+ builder.insert(start_offset, buf);
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ fn check_not_applicable(ra_fixture: &str) {
+ check_assist_not_applicable(generate_setter, ra_fixture)
+ }
+
+ #[test]
+ fn test_generate_setter_from_field() {
+ check_assist(
+ generate_setter,
+ r#"
+struct Person<T: Clone> {
+ dat$0a: T,
+}"#,
+ r#"
+struct Person<T: Clone> {
+ data: T,
+}
+
+impl<T: Clone> Person<T> {
+ fn set_data(&mut self, data: T) {
+ self.data = data;
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_setter_already_implemented() {
+ check_not_applicable(
+ r#"
+struct Person<T: Clone> {
+ dat$0a: T,
+}
+
+impl<T: Clone> Person<T> {
+ fn set_data(&mut self, data: T) {
+ self.data = data;
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_setter_from_field_with_visibility_marker() {
+ check_assist(
+ generate_setter,
+ r#"
+pub(crate) struct Person<T: Clone> {
+ dat$0a: T,
+}"#,
+ r#"
+pub(crate) struct Person<T: Clone> {
+ data: T,
+}
+
+impl<T: Clone> Person<T> {
+ pub(crate) fn set_data(&mut self, data: T) {
+ self.data = data;
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_multiple_generate_setter() {
+ check_assist(
+ generate_setter,
+ r#"
+struct Context<T: Clone> {
+ data: T,
+ cou$0nt: usize,
+}
+
+impl<T: Clone> Context<T> {
+ fn set_data(&mut self, data: T) {
+ self.data = data;
+ }
+}"#,
+ r#"
+struct Context<T: Clone> {
+ data: T,
+ count: usize,
+}
+
+impl<T: Clone> Context<T> {
+ fn set_data(&mut self, data: T) {
+ self.data = data;
+ }
+
+ fn set_count(&mut self, count: usize) {
+ self.count = count;
+ }
+}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
new file mode 100644
index 000000000..80d3b9255
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
@@ -0,0 +1,1194 @@
+use ast::make;
+use either::Either;
+use hir::{db::HirDatabase, PathResolution, Semantics, TypeInfo};
+use ide_db::{
+ base_db::{FileId, FileRange},
+ defs::Definition,
+ imports::insert_use::remove_path_if_in_use_stmt,
+ path_transform::PathTransform,
+ search::{FileReference, SearchScope},
+ syntax_helpers::{insert_whitespace_into_node::insert_ws_into, node_ext::expr_as_name_ref},
+ RootDatabase,
+};
+use itertools::{izip, Itertools};
+use syntax::{
+ ast::{self, edit_in_place::Indent, HasArgList, PathExpr},
+ ted, AstNode,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId, AssistKind,
+};
+
+// Assist: inline_into_callers
+//
+// Inline a function or method body into all of its callers where possible, creating a `let` statement per parameter
+// unless the parameter can be inlined. The parameter will be inlined either if it the supplied argument is a simple local
+// or if the parameter is only accessed inside the function body once.
+// If all calls can be inlined the function will be removed.
+//
+// ```
+// fn print(_: &str) {}
+// fn foo$0(word: &str) {
+// if !word.is_empty() {
+// print(word);
+// }
+// }
+// fn bar() {
+// foo("안녕하세요");
+// foo("여러분");
+// }
+// ```
+// ->
+// ```
+// fn print(_: &str) {}
+//
+// fn bar() {
+// {
+// let word = "안녕하세요";
+// if !word.is_empty() {
+// print(word);
+// }
+// };
+// {
+// let word = "여러분";
+// if !word.is_empty() {
+// print(word);
+// }
+// };
+// }
+// ```
+pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let def_file = ctx.file_id();
+ let name = ctx.find_node_at_offset::<ast::Name>()?;
+ let ast_func = name.syntax().parent().and_then(ast::Fn::cast)?;
+ let func_body = ast_func.body()?;
+ let param_list = ast_func.param_list()?;
+
+ let function = ctx.sema.to_def(&ast_func)?;
+
+ let params = get_fn_params(ctx.sema.db, function, &param_list)?;
+
+ let usages = Definition::Function(function).usages(&ctx.sema);
+ if !usages.at_least_one() {
+ return None;
+ }
+
+ let is_recursive_fn = usages
+ .clone()
+ .in_scope(SearchScope::file_range(FileRange {
+ file_id: def_file,
+ range: func_body.syntax().text_range(),
+ }))
+ .at_least_one();
+ if is_recursive_fn {
+ cov_mark::hit!(inline_into_callers_recursive);
+ return None;
+ }
+
+ acc.add(
+ AssistId("inline_into_callers", AssistKind::RefactorInline),
+ "Inline into all callers",
+ name.syntax().text_range(),
+ |builder| {
+ let mut usages = usages.all();
+ let current_file_usage = usages.references.remove(&def_file);
+
+ let mut remove_def = true;
+ let mut inline_refs_for_file = |file_id, refs: Vec<FileReference>| {
+ builder.edit_file(file_id);
+ let count = refs.len();
+ // The collects are required as we are otherwise iterating while mutating 🙅‍♀️🙅‍♂️
+ let (name_refs, name_refs_use): (Vec<_>, Vec<_>) = refs
+ .into_iter()
+ .filter_map(|file_ref| match file_ref.name {
+ ast::NameLike::NameRef(name_ref) => Some(name_ref),
+ _ => None,
+ })
+ .partition_map(|name_ref| {
+ match name_ref.syntax().ancestors().find_map(ast::UseTree::cast) {
+ Some(use_tree) => Either::Right(builder.make_mut(use_tree)),
+ None => Either::Left(name_ref),
+ }
+ });
+ let call_infos: Vec<_> = name_refs
+ .into_iter()
+ .filter_map(CallInfo::from_name_ref)
+ .map(|call_info| {
+ let mut_node = builder.make_syntax_mut(call_info.node.syntax().clone());
+ (call_info, mut_node)
+ })
+ .collect();
+ let replaced = call_infos
+ .into_iter()
+ .map(|(call_info, mut_node)| {
+ let replacement =
+ inline(&ctx.sema, def_file, function, &func_body, &params, &call_info);
+ ted::replace(mut_node, replacement.syntax());
+ })
+ .count();
+ if replaced + name_refs_use.len() == count {
+ // we replaced all usages in this file, so we can remove the imports
+ name_refs_use.into_iter().for_each(|use_tree| {
+ if let Some(path) = use_tree.path() {
+ remove_path_if_in_use_stmt(&path);
+ }
+ })
+ } else {
+ remove_def = false;
+ }
+ };
+ for (file_id, refs) in usages.into_iter() {
+ inline_refs_for_file(file_id, refs);
+ }
+ match current_file_usage {
+ Some(refs) => inline_refs_for_file(def_file, refs),
+ None => builder.edit_file(def_file),
+ }
+ if remove_def {
+ builder.delete(ast_func.syntax().text_range());
+ }
+ },
+ )
+}
+
+// Assist: inline_call
+//
+// Inlines a function or method body creating a `let` statement per parameter unless the parameter
+// can be inlined. The parameter will be inlined either if it the supplied argument is a simple local
+// or if the parameter is only accessed inside the function body once.
+//
+// ```
+// # //- minicore: option
+// fn foo(name: Option<&str>) {
+// let name = name.unwrap$0();
+// }
+// ```
+// ->
+// ```
+// fn foo(name: Option<&str>) {
+// let name = match name {
+// Some(val) => val,
+// None => panic!("called `Option::unwrap()` on a `None` value"),
+// };
+// }
+// ```
+pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let name_ref: ast::NameRef = ctx.find_node_at_offset()?;
+ let call_info = CallInfo::from_name_ref(name_ref.clone())?;
+ let (function, label) = match &call_info.node {
+ ast::CallableExpr::Call(call) => {
+ let path = match call.expr()? {
+ ast::Expr::PathExpr(path) => path.path(),
+ _ => None,
+ }?;
+ let function = match ctx.sema.resolve_path(&path)? {
+ PathResolution::Def(hir::ModuleDef::Function(f)) => f,
+ _ => return None,
+ };
+ (function, format!("Inline `{}`", path))
+ }
+ ast::CallableExpr::MethodCall(call) => {
+ (ctx.sema.resolve_method_call(call)?, format!("Inline `{}`", name_ref))
+ }
+ };
+
+ let fn_source = ctx.sema.source(function)?;
+ let fn_body = fn_source.value.body()?;
+ let param_list = fn_source.value.param_list()?;
+
+ let FileRange { file_id, range } = fn_source.syntax().original_file_range(ctx.sema.db);
+ if file_id == ctx.file_id() && range.contains(ctx.offset()) {
+ cov_mark::hit!(inline_call_recursive);
+ return None;
+ }
+ let params = get_fn_params(ctx.sema.db, function, &param_list)?;
+
+ if call_info.arguments.len() != params.len() {
+ // Can't inline the function because they've passed the wrong number of
+ // arguments to this function
+ cov_mark::hit!(inline_call_incorrect_number_of_arguments);
+ return None;
+ }
+
+ let syntax = call_info.node.syntax().clone();
+ acc.add(
+ AssistId("inline_call", AssistKind::RefactorInline),
+ label,
+ syntax.text_range(),
+ |builder| {
+ let replacement = inline(&ctx.sema, file_id, function, &fn_body, &params, &call_info);
+
+ builder.replace_ast(
+ match call_info.node {
+ ast::CallableExpr::Call(it) => ast::Expr::CallExpr(it),
+ ast::CallableExpr::MethodCall(it) => ast::Expr::MethodCallExpr(it),
+ },
+ replacement,
+ );
+ },
+ )
+}
+
+struct CallInfo {
+ node: ast::CallableExpr,
+ arguments: Vec<ast::Expr>,
+ generic_arg_list: Option<ast::GenericArgList>,
+}
+
+impl CallInfo {
+ fn from_name_ref(name_ref: ast::NameRef) -> Option<CallInfo> {
+ let parent = name_ref.syntax().parent()?;
+ if let Some(call) = ast::MethodCallExpr::cast(parent.clone()) {
+ let receiver = call.receiver()?;
+ let mut arguments = vec![receiver];
+ arguments.extend(call.arg_list()?.args());
+ Some(CallInfo {
+ generic_arg_list: call.generic_arg_list(),
+ node: ast::CallableExpr::MethodCall(call),
+ arguments,
+ })
+ } else if let Some(segment) = ast::PathSegment::cast(parent) {
+ let path = segment.syntax().parent().and_then(ast::Path::cast)?;
+ let path = path.syntax().parent().and_then(ast::PathExpr::cast)?;
+ let call = path.syntax().parent().and_then(ast::CallExpr::cast)?;
+
+ Some(CallInfo {
+ arguments: call.arg_list()?.args().collect(),
+ node: ast::CallableExpr::Call(call),
+ generic_arg_list: segment.generic_arg_list(),
+ })
+ } else {
+ None
+ }
+ }
+}
+
+fn get_fn_params(
+ db: &dyn HirDatabase,
+ function: hir::Function,
+ param_list: &ast::ParamList,
+) -> Option<Vec<(ast::Pat, Option<ast::Type>, hir::Param)>> {
+ let mut assoc_fn_params = function.assoc_fn_params(db).into_iter();
+
+ let mut params = Vec::new();
+ if let Some(self_param) = param_list.self_param() {
+ // FIXME this should depend on the receiver as well as the self_param
+ params.push((
+ make::ident_pat(
+ self_param.amp_token().is_some(),
+ self_param.mut_token().is_some(),
+ make::name("this"),
+ )
+ .into(),
+ None,
+ assoc_fn_params.next()?,
+ ));
+ }
+ for param in param_list.params() {
+ params.push((param.pat()?, param.ty(), assoc_fn_params.next()?));
+ }
+
+ Some(params)
+}
+
+fn inline(
+ sema: &Semantics<'_, RootDatabase>,
+ function_def_file_id: FileId,
+ function: hir::Function,
+ fn_body: &ast::BlockExpr,
+ params: &[(ast::Pat, Option<ast::Type>, hir::Param)],
+ CallInfo { node, arguments, generic_arg_list }: &CallInfo,
+) -> ast::Expr {
+ let body = if sema.hir_file_for(fn_body.syntax()).is_macro() {
+ cov_mark::hit!(inline_call_defined_in_macro);
+ if let Some(body) = ast::BlockExpr::cast(insert_ws_into(fn_body.syntax().clone())) {
+ body
+ } else {
+ fn_body.clone_for_update()
+ }
+ } else {
+ fn_body.clone_for_update()
+ };
+ let usages_for_locals = |local| {
+ Definition::Local(local)
+ .usages(sema)
+ .all()
+ .references
+ .remove(&function_def_file_id)
+ .unwrap_or_default()
+ .into_iter()
+ };
+ let param_use_nodes: Vec<Vec<_>> = params
+ .iter()
+ .map(|(pat, _, param)| {
+ if !matches!(pat, ast::Pat::IdentPat(pat) if pat.is_simple_ident()) {
+ return Vec::new();
+ }
+ // FIXME: we need to fetch all locals declared in the parameter here
+ // not only the local if it is a simple binding
+ match param.as_local(sema.db) {
+ Some(l) => usages_for_locals(l)
+ .map(|FileReference { name, range, .. }| match name {
+ ast::NameLike::NameRef(_) => body
+ .syntax()
+ .covering_element(range)
+ .ancestors()
+ .nth(3)
+ .and_then(ast::PathExpr::cast),
+ _ => None,
+ })
+ .collect::<Option<Vec<_>>>()
+ .unwrap_or_default(),
+ None => Vec::new(),
+ }
+ })
+ .collect();
+ if function.self_param(sema.db).is_some() {
+ let this = || make::name_ref("this").syntax().clone_for_update();
+ if let Some(self_local) = params[0].2.as_local(sema.db) {
+ usages_for_locals(self_local)
+ .flat_map(|FileReference { name, range, .. }| match name {
+ ast::NameLike::NameRef(_) => Some(body.syntax().covering_element(range)),
+ _ => None,
+ })
+ .for_each(|it| {
+ ted::replace(it, &this());
+ })
+ }
+ }
+ // Inline parameter expressions or generate `let` statements depending on whether inlining works or not.
+ for ((pat, param_ty, _), usages, expr) in izip!(params, param_use_nodes, arguments).rev() {
+ let inline_direct = |usage, replacement: &ast::Expr| {
+ if let Some(field) = path_expr_as_record_field(usage) {
+ cov_mark::hit!(inline_call_inline_direct_field);
+ field.replace_expr(replacement.clone_for_update());
+ } else {
+ ted::replace(usage.syntax(), &replacement.syntax().clone_for_update());
+ }
+ };
+ // izip confuses RA due to our lack of hygiene info currently losing us type info causing incorrect errors
+ let usages: &[ast::PathExpr] = &*usages;
+ let expr: &ast::Expr = expr;
+ match usages {
+ // inline single use closure arguments
+ [usage]
+ if matches!(expr, ast::Expr::ClosureExpr(_))
+ && usage.syntax().parent().and_then(ast::Expr::cast).is_some() =>
+ {
+ cov_mark::hit!(inline_call_inline_closure);
+ let expr = make::expr_paren(expr.clone());
+ inline_direct(usage, &expr);
+ }
+ // inline single use literals
+ [usage] if matches!(expr, ast::Expr::Literal(_)) => {
+ cov_mark::hit!(inline_call_inline_literal);
+ inline_direct(usage, expr);
+ }
+ // inline direct local arguments
+ [_, ..] if expr_as_name_ref(expr).is_some() => {
+ cov_mark::hit!(inline_call_inline_locals);
+ usages.iter().for_each(|usage| inline_direct(usage, expr));
+ }
+ // can't inline, emit a let statement
+ _ => {
+ let ty =
+ sema.type_of_expr(expr).filter(TypeInfo::has_adjustment).and(param_ty.clone());
+ if let Some(stmt_list) = body.stmt_list() {
+ stmt_list.push_front(
+ make::let_stmt(pat.clone(), ty, Some(expr.clone()))
+ .clone_for_update()
+ .into(),
+ )
+ }
+ }
+ }
+ }
+ if let Some(generic_arg_list) = generic_arg_list.clone() {
+ if let Some((target, source)) = &sema.scope(node.syntax()).zip(sema.scope(fn_body.syntax()))
+ {
+ PathTransform::function_call(target, source, function, generic_arg_list)
+ .apply(body.syntax());
+ }
+ }
+
+ let original_indentation = match node {
+ ast::CallableExpr::Call(it) => it.indent_level(),
+ ast::CallableExpr::MethodCall(it) => it.indent_level(),
+ };
+ body.reindent_to(original_indentation);
+
+ match body.tail_expr() {
+ Some(expr) if body.statements().next().is_none() => expr,
+ _ => match node
+ .syntax()
+ .parent()
+ .and_then(ast::BinExpr::cast)
+ .and_then(|bin_expr| bin_expr.lhs())
+ {
+ Some(lhs) if lhs.syntax() == node.syntax() => {
+ make::expr_paren(ast::Expr::BlockExpr(body)).clone_for_update()
+ }
+ _ => ast::Expr::BlockExpr(body),
+ },
+ }
+}
+
+fn path_expr_as_record_field(usage: &PathExpr) -> Option<ast::RecordExprField> {
+ let path = usage.path()?;
+ let name_ref = path.as_single_name_ref()?;
+ ast::RecordExprField::for_name_ref(&name_ref)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn no_args_or_return_value_gets_inlined_without_block() {
+ check_assist(
+ inline_call,
+ r#"
+fn foo() { println!("Hello, World!"); }
+fn main() {
+ fo$0o();
+}
+"#,
+ r#"
+fn foo() { println!("Hello, World!"); }
+fn main() {
+ { println!("Hello, World!"); };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_when_incorrect_number_of_parameters_are_provided() {
+ cov_mark::check!(inline_call_incorrect_number_of_arguments);
+ check_assist_not_applicable(
+ inline_call,
+ r#"
+fn add(a: u32, b: u32) -> u32 { a + b }
+fn main() { let x = add$0(42); }
+"#,
+ );
+ }
+
+ #[test]
+ fn args_with_side_effects() {
+ check_assist(
+ inline_call,
+ r#"
+fn foo(name: String) {
+ println!("Hello, {}!", name);
+}
+fn main() {
+ foo$0(String::from("Michael"));
+}
+"#,
+ r#"
+fn foo(name: String) {
+ println!("Hello, {}!", name);
+}
+fn main() {
+ {
+ let name = String::from("Michael");
+ println!("Hello, {}!", name);
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn function_with_multiple_statements() {
+ check_assist(
+ inline_call,
+ r#"
+fn foo(a: u32, b: u32) -> u32 {
+ let x = a + b;
+ let y = x - b;
+ x * y
+}
+
+fn main() {
+ let x = foo$0(1, 2);
+}
+"#,
+ r#"
+fn foo(a: u32, b: u32) -> u32 {
+ let x = a + b;
+ let y = x - b;
+ x * y
+}
+
+fn main() {
+ let x = {
+ let b = 2;
+ let x = 1 + b;
+ let y = x - b;
+ x * y
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn function_with_self_param() {
+ check_assist(
+ inline_call,
+ r#"
+struct Foo(u32);
+
+impl Foo {
+ fn add(self, a: u32) -> Self {
+ Foo(self.0 + a)
+ }
+}
+
+fn main() {
+ let x = Foo::add$0(Foo(3), 2);
+}
+"#,
+ r#"
+struct Foo(u32);
+
+impl Foo {
+ fn add(self, a: u32) -> Self {
+ Foo(self.0 + a)
+ }
+}
+
+fn main() {
+ let x = {
+ let this = Foo(3);
+ Foo(this.0 + 2)
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_by_val() {
+ check_assist(
+ inline_call,
+ r#"
+struct Foo(u32);
+
+impl Foo {
+ fn add(self, a: u32) -> Self {
+ Foo(self.0 + a)
+ }
+}
+
+fn main() {
+ let x = Foo(3).add$0(2);
+}
+"#,
+ r#"
+struct Foo(u32);
+
+impl Foo {
+ fn add(self, a: u32) -> Self {
+ Foo(self.0 + a)
+ }
+}
+
+fn main() {
+ let x = {
+ let this = Foo(3);
+ Foo(this.0 + 2)
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_by_ref() {
+ check_assist(
+ inline_call,
+ r#"
+struct Foo(u32);
+
+impl Foo {
+ fn add(&self, a: u32) -> Self {
+ Foo(self.0 + a)
+ }
+}
+
+fn main() {
+ let x = Foo(3).add$0(2);
+}
+"#,
+ r#"
+struct Foo(u32);
+
+impl Foo {
+ fn add(&self, a: u32) -> Self {
+ Foo(self.0 + a)
+ }
+}
+
+fn main() {
+ let x = {
+ let ref this = Foo(3);
+ Foo(this.0 + 2)
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_by_ref_mut() {
+ check_assist(
+ inline_call,
+ r#"
+struct Foo(u32);
+
+impl Foo {
+ fn clear(&mut self) {
+ self.0 = 0;
+ }
+}
+
+fn main() {
+ let mut foo = Foo(3);
+ foo.clear$0();
+}
+"#,
+ r#"
+struct Foo(u32);
+
+impl Foo {
+ fn clear(&mut self) {
+ self.0 = 0;
+ }
+}
+
+fn main() {
+ let mut foo = Foo(3);
+ {
+ let ref mut this = foo;
+ this.0 = 0;
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn function_multi_use_expr_in_param() {
+ check_assist(
+ inline_call,
+ r#"
+fn square(x: u32) -> u32 {
+ x * x
+}
+fn main() {
+ let x = 51;
+ let y = square$0(10 + x);
+}
+"#,
+ r#"
+fn square(x: u32) -> u32 {
+ x * x
+}
+fn main() {
+ let x = 51;
+ let y = {
+ let x = 10 + x;
+ x * x
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn function_use_local_in_param() {
+ cov_mark::check!(inline_call_inline_locals);
+ check_assist(
+ inline_call,
+ r#"
+fn square(x: u32) -> u32 {
+ x * x
+}
+fn main() {
+ let local = 51;
+ let y = square$0(local);
+}
+"#,
+ r#"
+fn square(x: u32) -> u32 {
+ x * x
+}
+fn main() {
+ let local = 51;
+ let y = local * local;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_in_impl() {
+ check_assist(
+ inline_call,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self) {
+ self;
+ self;
+ }
+ fn bar(&self) {
+ self.foo$0();
+ }
+}
+"#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self) {
+ self;
+ self;
+ }
+ fn bar(&self) {
+ {
+ let ref this = self;
+ this;
+ this;
+ };
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wraps_closure_in_paren() {
+ cov_mark::check!(inline_call_inline_closure);
+ check_assist(
+ inline_call,
+ r#"
+fn foo(x: fn()) {
+ x();
+}
+
+fn main() {
+ foo$0(|| {})
+}
+"#,
+ r#"
+fn foo(x: fn()) {
+ x();
+}
+
+fn main() {
+ {
+ (|| {})();
+ }
+}
+"#,
+ );
+ check_assist(
+ inline_call,
+ r#"
+fn foo(x: fn()) {
+ x();
+}
+
+fn main() {
+ foo$0(main)
+}
+"#,
+ r#"
+fn foo(x: fn()) {
+ x();
+}
+
+fn main() {
+ {
+ main();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_single_literal_expr() {
+ cov_mark::check!(inline_call_inline_literal);
+ check_assist(
+ inline_call,
+ r#"
+fn foo(x: u32) -> u32{
+ x
+}
+
+fn main() {
+ foo$0(222);
+}
+"#,
+ r#"
+fn foo(x: u32) -> u32{
+ x
+}
+
+fn main() {
+ 222;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_emits_type_for_coercion() {
+ check_assist(
+ inline_call,
+ r#"
+fn foo(x: *const u32) -> u32 {
+ x as u32
+}
+
+fn main() {
+ foo$0(&222);
+}
+"#,
+ r#"
+fn foo(x: *const u32) -> u32 {
+ x as u32
+}
+
+fn main() {
+ {
+ let x: *const u32 = &222;
+ x as u32
+ };
+}
+"#,
+ );
+ }
+
+ // FIXME: const generics aren't being substituted, this is blocked on better support for them
+ #[test]
+ fn inline_substitutes_generics() {
+ check_assist(
+ inline_call,
+ r#"
+fn foo<T, const N: usize>() {
+ bar::<T, N>()
+}
+
+fn bar<U, const M: usize>() {}
+
+fn main() {
+ foo$0::<usize, {0}>();
+}
+"#,
+ r#"
+fn foo<T, const N: usize>() {
+ bar::<T, N>()
+}
+
+fn bar<U, const M: usize>() {}
+
+fn main() {
+ bar::<usize, N>();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_callers() {
+ check_assist(
+ inline_into_callers,
+ r#"
+fn do_the_math$0(b: u32) -> u32 {
+ let foo = 10;
+ foo * b + foo
+}
+fn foo() {
+ do_the_math(0);
+ let bar = 10;
+ do_the_math(bar);
+}
+"#,
+ r#"
+
+fn foo() {
+ {
+ let foo = 10;
+ foo * 0 + foo
+ };
+ let bar = 10;
+ {
+ let foo = 10;
+ foo * bar + foo
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_callers_across_files() {
+ check_assist(
+ inline_into_callers,
+ r#"
+//- /lib.rs
+mod foo;
+fn do_the_math$0(b: u32) -> u32 {
+ let foo = 10;
+ foo * b + foo
+}
+//- /foo.rs
+use super::do_the_math;
+fn foo() {
+ do_the_math(0);
+ let bar = 10;
+ do_the_math(bar);
+}
+"#,
+ r#"
+//- /lib.rs
+mod foo;
+
+//- /foo.rs
+fn foo() {
+ {
+ let foo = 10;
+ foo * 0 + foo
+ };
+ let bar = 10;
+ {
+ let foo = 10;
+ foo * bar + foo
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_callers_across_files_with_def_file() {
+ check_assist(
+ inline_into_callers,
+ r#"
+//- /lib.rs
+mod foo;
+fn do_the_math$0(b: u32) -> u32 {
+ let foo = 10;
+ foo * b + foo
+}
+fn bar(a: u32, b: u32) -> u32 {
+ do_the_math(0);
+}
+//- /foo.rs
+use super::do_the_math;
+fn foo() {
+ do_the_math(0);
+}
+"#,
+ r#"
+//- /lib.rs
+mod foo;
+
+fn bar(a: u32, b: u32) -> u32 {
+ {
+ let foo = 10;
+ foo * 0 + foo
+ };
+}
+//- /foo.rs
+fn foo() {
+ {
+ let foo = 10;
+ foo * 0 + foo
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_callers_recursive() {
+ cov_mark::check!(inline_into_callers_recursive);
+ check_assist_not_applicable(
+ inline_into_callers,
+ r#"
+fn foo$0() {
+ foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_call_recursive() {
+ cov_mark::check!(inline_call_recursive);
+ check_assist_not_applicable(
+ inline_call,
+ r#"
+fn foo() {
+ foo$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_call_field_shorthand() {
+ cov_mark::check!(inline_call_inline_direct_field);
+ check_assist(
+ inline_call,
+ r#"
+struct Foo {
+ field: u32,
+ field1: u32,
+ field2: u32,
+ field3: u32,
+}
+fn foo(field: u32, field1: u32, val2: u32, val3: u32) -> Foo {
+ Foo {
+ field,
+ field1,
+ field2: val2,
+ field3: val3,
+ }
+}
+fn main() {
+ let bar = 0;
+ let baz = 0;
+ foo$0(bar, 0, baz, 0);
+}
+"#,
+ r#"
+struct Foo {
+ field: u32,
+ field1: u32,
+ field2: u32,
+ field3: u32,
+}
+fn foo(field: u32, field1: u32, val2: u32, val3: u32) -> Foo {
+ Foo {
+ field,
+ field1,
+ field2: val2,
+ field3: val3,
+ }
+}
+fn main() {
+ let bar = 0;
+ let baz = 0;
+ Foo {
+ field: bar,
+ field1: 0,
+ field2: baz,
+ field3: 0,
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_callers_wrapped_in_parentheses() {
+ check_assist(
+ inline_into_callers,
+ r#"
+fn foo$0() -> u32 {
+ let x = 0;
+ x
+}
+fn bar() -> u32 {
+ foo() + foo()
+}
+"#,
+ r#"
+
+fn bar() -> u32 {
+ ({
+ let x = 0;
+ x
+ }) + {
+ let x = 0;
+ x
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn inline_call_wrapped_in_parentheses() {
+ check_assist(
+ inline_call,
+ r#"
+fn foo() -> u32 {
+ let x = 0;
+ x
+}
+fn bar() -> u32 {
+ foo$0() + foo()
+}
+"#,
+ r#"
+fn foo() -> u32 {
+ let x = 0;
+ x
+}
+fn bar() -> u32 {
+ ({
+ let x = 0;
+ x
+ }) + foo()
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn inline_call_defined_in_macro() {
+ cov_mark::check!(inline_call_defined_in_macro);
+ check_assist(
+ inline_call,
+ r#"
+macro_rules! define_foo {
+ () => { fn foo() -> u32 {
+ let x = 0;
+ x
+ } };
+}
+define_foo!();
+fn bar() -> u32 {
+ foo$0()
+}
+"#,
+ r#"
+macro_rules! define_foo {
+ () => { fn foo() -> u32 {
+ let x = 0;
+ x
+ } };
+}
+define_foo!();
+fn bar() -> u32 {
+ {
+ let x = 0;
+ x
+ }
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs
new file mode 100644
index 000000000..7259d6781
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs
@@ -0,0 +1,954 @@
+use either::Either;
+use hir::{PathResolution, Semantics};
+use ide_db::{
+ base_db::FileId,
+ defs::Definition,
+ search::{FileReference, UsageSearchResult},
+ RootDatabase,
+};
+use syntax::{
+ ast::{self, AstNode, AstToken, HasName},
+ SyntaxElement, TextRange,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId, AssistKind,
+};
+
+// Assist: inline_local_variable
+//
+// Inlines a local variable.
+//
+// ```
+// fn main() {
+// let x$0 = 1 + 2;
+// x * 4;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// (1 + 2) * 4;
+// }
+// ```
+pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let file_id = ctx.file_id();
+ let range = ctx.selection_trimmed();
+ let InlineData { let_stmt, delete_let, references, target } =
+ if let Some(path_expr) = ctx.find_node_at_offset::<ast::PathExpr>() {
+ inline_usage(&ctx.sema, path_expr, range, file_id)
+ } else if let Some(let_stmt) = ctx.find_node_at_offset::<ast::LetStmt>() {
+ inline_let(&ctx.sema, let_stmt, range, file_id)
+ } else {
+ None
+ }?;
+ let initializer_expr = let_stmt.initializer()?;
+
+ let delete_range = delete_let.then(|| {
+ if let Some(whitespace) = let_stmt
+ .syntax()
+ .next_sibling_or_token()
+ .and_then(SyntaxElement::into_token)
+ .and_then(ast::Whitespace::cast)
+ {
+ TextRange::new(
+ let_stmt.syntax().text_range().start(),
+ whitespace.syntax().text_range().end(),
+ )
+ } else {
+ let_stmt.syntax().text_range()
+ }
+ });
+
+ let wrap_in_parens = references
+ .into_iter()
+ .filter_map(|FileReference { range, name, .. }| match name {
+ ast::NameLike::NameRef(name) => Some((range, name)),
+ _ => None,
+ })
+ .map(|(range, name_ref)| {
+ if range != name_ref.syntax().text_range() {
+ // Do not rename inside macros
+ // FIXME: This feels like a bad heuristic for macros
+ return None;
+ }
+ let usage_node =
+ name_ref.syntax().ancestors().find(|it| ast::PathExpr::can_cast(it.kind()));
+ let usage_parent_option =
+ usage_node.and_then(|it| it.parent()).and_then(ast::Expr::cast);
+ let usage_parent = match usage_parent_option {
+ Some(u) => u,
+ None => return Some((range, name_ref, false)),
+ };
+ let initializer = matches!(
+ initializer_expr,
+ ast::Expr::CallExpr(_)
+ | ast::Expr::IndexExpr(_)
+ | ast::Expr::MethodCallExpr(_)
+ | ast::Expr::FieldExpr(_)
+ | ast::Expr::TryExpr(_)
+ | ast::Expr::Literal(_)
+ | ast::Expr::TupleExpr(_)
+ | ast::Expr::ArrayExpr(_)
+ | ast::Expr::ParenExpr(_)
+ | ast::Expr::PathExpr(_)
+ | ast::Expr::BlockExpr(_),
+ );
+ let parent = matches!(
+ usage_parent,
+ ast::Expr::CallExpr(_)
+ | ast::Expr::TupleExpr(_)
+ | ast::Expr::ArrayExpr(_)
+ | ast::Expr::ParenExpr(_)
+ | ast::Expr::ForExpr(_)
+ | ast::Expr::WhileExpr(_)
+ | ast::Expr::BreakExpr(_)
+ | ast::Expr::ReturnExpr(_)
+ | ast::Expr::MatchExpr(_)
+ | ast::Expr::BlockExpr(_)
+ );
+ Some((range, name_ref, !(initializer || parent)))
+ })
+ .collect::<Option<Vec<_>>>()?;
+
+ let init_str = initializer_expr.syntax().text().to_string();
+ let init_in_paren = format!("({})", &init_str);
+
+ let target = match target {
+ ast::NameOrNameRef::Name(it) => it.syntax().text_range(),
+ ast::NameOrNameRef::NameRef(it) => it.syntax().text_range(),
+ };
+
+ acc.add(
+ AssistId("inline_local_variable", AssistKind::RefactorInline),
+ "Inline variable",
+ target,
+ move |builder| {
+ if let Some(range) = delete_range {
+ builder.delete(range);
+ }
+ for (range, name, should_wrap) in wrap_in_parens {
+ let replacement = if should_wrap { &init_in_paren } else { &init_str };
+ if ast::RecordExprField::for_field_name(&name).is_some() {
+ cov_mark::hit!(inline_field_shorthand);
+ builder.insert(range.end(), format!(": {}", replacement));
+ } else {
+ builder.replace(range, replacement.clone())
+ }
+ }
+ },
+ )
+}
+
+struct InlineData {
+ let_stmt: ast::LetStmt,
+ delete_let: bool,
+ target: ast::NameOrNameRef,
+ references: Vec<FileReference>,
+}
+
+fn inline_let(
+ sema: &Semantics<'_, RootDatabase>,
+ let_stmt: ast::LetStmt,
+ range: TextRange,
+ file_id: FileId,
+) -> Option<InlineData> {
+ let bind_pat = match let_stmt.pat()? {
+ ast::Pat::IdentPat(pat) => pat,
+ _ => return None,
+ };
+ if bind_pat.mut_token().is_some() {
+ cov_mark::hit!(test_not_inline_mut_variable);
+ return None;
+ }
+ if !bind_pat.syntax().text_range().contains_range(range) {
+ cov_mark::hit!(not_applicable_outside_of_bind_pat);
+ return None;
+ }
+
+ let local = sema.to_def(&bind_pat)?;
+ let UsageSearchResult { mut references } = Definition::Local(local).usages(sema).all();
+ match references.remove(&file_id) {
+ Some(references) => Some(InlineData {
+ let_stmt,
+ delete_let: true,
+ target: ast::NameOrNameRef::Name(bind_pat.name()?),
+ references,
+ }),
+ None => {
+ cov_mark::hit!(test_not_applicable_if_variable_unused);
+ None
+ }
+ }
+}
+
+fn inline_usage(
+ sema: &Semantics<'_, RootDatabase>,
+ path_expr: ast::PathExpr,
+ range: TextRange,
+ file_id: FileId,
+) -> Option<InlineData> {
+ let path = path_expr.path()?;
+ let name = path.as_single_name_ref()?;
+ if !name.syntax().text_range().contains_range(range) {
+ cov_mark::hit!(test_not_inline_selection_too_broad);
+ return None;
+ }
+
+ let local = match sema.resolve_path(&path)? {
+ PathResolution::Local(local) => local,
+ _ => return None,
+ };
+ if local.is_mut(sema.db) {
+ cov_mark::hit!(test_not_inline_mut_variable_use);
+ return None;
+ }
+
+ // FIXME: Handle multiple local definitions
+ let bind_pat = match local.source(sema.db).value {
+ Either::Left(ident) => ident,
+ _ => return None,
+ };
+
+ let let_stmt = ast::LetStmt::cast(bind_pat.syntax().parent()?)?;
+
+ let UsageSearchResult { mut references } = Definition::Local(local).usages(sema).all();
+ let mut references = references.remove(&file_id)?;
+ let delete_let = references.len() == 1;
+ references.retain(|fref| fref.name.as_name_ref() == Some(&name));
+
+ Some(InlineData { let_stmt, delete_let, target: ast::NameOrNameRef::NameRef(name), references })
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_inline_let_bind_literal_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn bar(a: usize) {}
+fn foo() {
+ let a$0 = 1;
+ a + 1;
+ if a > 10 {
+ }
+
+ while a > 10 {
+
+ }
+ let b = a * 10;
+ bar(a);
+}",
+ r"
+fn bar(a: usize) {}
+fn foo() {
+ 1 + 1;
+ if 1 > 10 {
+ }
+
+ while 1 > 10 {
+
+ }
+ let b = 1 * 10;
+ bar(1);
+}",
+ );
+ }
+
+ #[test]
+ fn test_inline_let_bind_bin_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn bar(a: usize) {}
+fn foo() {
+ let a$0 = 1 + 1;
+ a + 1;
+ if a > 10 {
+ }
+
+ while a > 10 {
+
+ }
+ let b = a * 10;
+ bar(a);
+}",
+ r"
+fn bar(a: usize) {}
+fn foo() {
+ (1 + 1) + 1;
+ if (1 + 1) > 10 {
+ }
+
+ while (1 + 1) > 10 {
+
+ }
+ let b = (1 + 1) * 10;
+ bar(1 + 1);
+}",
+ );
+ }
+
+ #[test]
+ fn test_inline_let_bind_function_call_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn bar(a: usize) {}
+fn foo() {
+ let a$0 = bar(1);
+ a + 1;
+ if a > 10 {
+ }
+
+ while a > 10 {
+
+ }
+ let b = a * 10;
+ bar(a);
+}",
+ r"
+fn bar(a: usize) {}
+fn foo() {
+ bar(1) + 1;
+ if bar(1) > 10 {
+ }
+
+ while bar(1) > 10 {
+
+ }
+ let b = bar(1) * 10;
+ bar(bar(1));
+}",
+ );
+ }
+
+ #[test]
+ fn test_inline_let_bind_cast_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn bar(a: usize): usize { a }
+fn foo() {
+ let a$0 = bar(1) as u64;
+ a + 1;
+ if a > 10 {
+ }
+
+ while a > 10 {
+
+ }
+ let b = a * 10;
+ bar(a);
+}",
+ r"
+fn bar(a: usize): usize { a }
+fn foo() {
+ (bar(1) as u64) + 1;
+ if (bar(1) as u64) > 10 {
+ }
+
+ while (bar(1) as u64) > 10 {
+
+ }
+ let b = (bar(1) as u64) * 10;
+ bar(bar(1) as u64);
+}",
+ );
+ }
+
+ #[test]
+ fn test_inline_let_bind_block_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = { 10 + 1 };
+ a + 1;
+ if a > 10 {
+ }
+
+ while a > 10 {
+
+ }
+ let b = a * 10;
+ bar(a);
+}",
+ r"
+fn foo() {
+ { 10 + 1 } + 1;
+ if { 10 + 1 } > 10 {
+ }
+
+ while { 10 + 1 } > 10 {
+
+ }
+ let b = { 10 + 1 } * 10;
+ bar({ 10 + 1 });
+}",
+ );
+ }
+
+ #[test]
+ fn test_inline_let_bind_paren_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = ( 10 + 1 );
+ a + 1;
+ if a > 10 {
+ }
+
+ while a > 10 {
+
+ }
+ let b = a * 10;
+ bar(a);
+}",
+ r"
+fn foo() {
+ ( 10 + 1 ) + 1;
+ if ( 10 + 1 ) > 10 {
+ }
+
+ while ( 10 + 1 ) > 10 {
+
+ }
+ let b = ( 10 + 1 ) * 10;
+ bar(( 10 + 1 ));
+}",
+ );
+ }
+
+ #[test]
+ fn test_not_inline_mut_variable() {
+ cov_mark::check!(test_not_inline_mut_variable);
+ check_assist_not_applicable(
+ inline_local_variable,
+ r"
+fn foo() {
+ let mut a$0 = 1 + 1;
+ a + 1;
+}",
+ );
+ }
+
+ #[test]
+ fn test_not_inline_mut_variable_use() {
+ cov_mark::check!(test_not_inline_mut_variable_use);
+ check_assist_not_applicable(
+ inline_local_variable,
+ r"
+fn foo() {
+ let mut a = 1 + 1;
+ a$0 + 1;
+}",
+ );
+ }
+
+ #[test]
+ fn test_call_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = bar(10 + 1);
+ let b = a * 10;
+ let c = a as usize;
+}",
+ r"
+fn foo() {
+ let b = bar(10 + 1) * 10;
+ let c = bar(10 + 1) as usize;
+}",
+ );
+ }
+
+ #[test]
+ fn test_index_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let x = vec![1, 2, 3];
+ let a$0 = x[0];
+ let b = a * 10;
+ let c = a as usize;
+}",
+ r"
+fn foo() {
+ let x = vec![1, 2, 3];
+ let b = x[0] * 10;
+ let c = x[0] as usize;
+}",
+ );
+ }
+
+ #[test]
+ fn test_method_call_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let bar = vec![1];
+ let a$0 = bar.len();
+ let b = a * 10;
+ let c = a as usize;
+}",
+ r"
+fn foo() {
+ let bar = vec![1];
+ let b = bar.len() * 10;
+ let c = bar.len() as usize;
+}",
+ );
+ }
+
+ #[test]
+ fn test_field_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+struct Bar {
+ foo: usize
+}
+
+fn foo() {
+ let bar = Bar { foo: 1 };
+ let a$0 = bar.foo;
+ let b = a * 10;
+ let c = a as usize;
+}",
+ r"
+struct Bar {
+ foo: usize
+}
+
+fn foo() {
+ let bar = Bar { foo: 1 };
+ let b = bar.foo * 10;
+ let c = bar.foo as usize;
+}",
+ );
+ }
+
+ #[test]
+ fn test_try_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() -> Option<usize> {
+ let bar = Some(1);
+ let a$0 = bar?;
+ let b = a * 10;
+ let c = a as usize;
+ None
+}",
+ r"
+fn foo() -> Option<usize> {
+ let bar = Some(1);
+ let b = bar? * 10;
+ let c = bar? as usize;
+ None
+}",
+ );
+ }
+
+ #[test]
+ fn test_ref_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let bar = 10;
+ let a$0 = &bar;
+ let b = a * 10;
+}",
+ r"
+fn foo() {
+ let bar = 10;
+ let b = (&bar) * 10;
+}",
+ );
+ }
+
+ #[test]
+ fn test_tuple_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = (10, 20);
+ let b = a[0];
+}",
+ r"
+fn foo() {
+ let b = (10, 20)[0];
+}",
+ );
+ }
+
+ #[test]
+ fn test_array_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = [1, 2, 3];
+ let b = a.len();
+}",
+ r"
+fn foo() {
+ let b = [1, 2, 3].len();
+}",
+ );
+ }
+
+ #[test]
+ fn test_paren() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = (10 + 20);
+ let b = a * 10;
+ let c = a as usize;
+}",
+ r"
+fn foo() {
+ let b = (10 + 20) * 10;
+ let c = (10 + 20) as usize;
+}",
+ );
+ }
+
+ #[test]
+ fn test_path_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let d = 10;
+ let a$0 = d;
+ let b = a * 10;
+ let c = a as usize;
+}",
+ r"
+fn foo() {
+ let d = 10;
+ let b = d * 10;
+ let c = d as usize;
+}",
+ );
+ }
+
+ #[test]
+ fn test_block_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = { 10 };
+ let b = a * 10;
+ let c = a as usize;
+}",
+ r"
+fn foo() {
+ let b = { 10 } * 10;
+ let c = { 10 } as usize;
+}",
+ );
+ }
+
+ #[test]
+ fn test_used_in_different_expr1() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = 10 + 20;
+ let b = a * 10;
+ let c = (a, 20);
+ let d = [a, 10];
+ let e = (a);
+}",
+ r"
+fn foo() {
+ let b = (10 + 20) * 10;
+ let c = (10 + 20, 20);
+ let d = [10 + 20, 10];
+ let e = (10 + 20);
+}",
+ );
+ }
+
+ #[test]
+ fn test_used_in_for_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = vec![10, 20];
+ for i in a {}
+}",
+ r"
+fn foo() {
+ for i in vec![10, 20] {}
+}",
+ );
+ }
+
+ #[test]
+ fn test_used_in_while_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = 1 > 0;
+ while a {}
+}",
+ r"
+fn foo() {
+ while 1 > 0 {}
+}",
+ );
+ }
+
+ #[test]
+ fn test_used_in_break_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = 1 + 1;
+ loop {
+ break a;
+ }
+}",
+ r"
+fn foo() {
+ loop {
+ break 1 + 1;
+ }
+}",
+ );
+ }
+
+ #[test]
+ fn test_used_in_return_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = 1 > 0;
+ return a;
+}",
+ r"
+fn foo() {
+ return 1 > 0;
+}",
+ );
+ }
+
+ #[test]
+ fn test_used_in_match_expr() {
+ check_assist(
+ inline_local_variable,
+ r"
+fn foo() {
+ let a$0 = 1 > 0;
+ match a {}
+}",
+ r"
+fn foo() {
+ match 1 > 0 {}
+}",
+ );
+ }
+
+ #[test]
+ fn inline_field_shorthand() {
+ cov_mark::check!(inline_field_shorthand);
+ check_assist(
+ inline_local_variable,
+ r"
+struct S { foo: i32}
+fn main() {
+ let $0foo = 92;
+ S { foo }
+}
+",
+ r"
+struct S { foo: i32}
+fn main() {
+ S { foo: 92 }
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_not_applicable_if_variable_unused() {
+ cov_mark::check!(test_not_applicable_if_variable_unused);
+ check_assist_not_applicable(
+ inline_local_variable,
+ r"
+fn foo() {
+ let $0a = 0;
+}
+ ",
+ )
+ }
+
+ #[test]
+ fn not_applicable_outside_of_bind_pat() {
+ cov_mark::check!(not_applicable_outside_of_bind_pat);
+ check_assist_not_applicable(
+ inline_local_variable,
+ r"
+fn main() {
+ let x = $01 + 2;
+ x * 4;
+}
+",
+ )
+ }
+
+ #[test]
+ fn works_on_local_usage() {
+ check_assist(
+ inline_local_variable,
+ r#"
+fn f() {
+ let xyz = 0;
+ xyz$0;
+}
+"#,
+ r#"
+fn f() {
+ 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn does_not_remove_let_when_multiple_usages() {
+ check_assist(
+ inline_local_variable,
+ r#"
+fn f() {
+ let xyz = 0;
+ xyz$0;
+ xyz;
+}
+"#,
+ r#"
+fn f() {
+ let xyz = 0;
+ 0;
+ xyz;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_with_non_ident_pattern() {
+ check_assist_not_applicable(
+ inline_local_variable,
+ r#"
+fn main() {
+ let (x, y) = (0, 1);
+ x$0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_on_local_usage_in_macro() {
+ check_assist_not_applicable(
+ inline_local_variable,
+ r#"
+macro_rules! m {
+ ($i:ident) => { $i }
+}
+fn f() {
+ let xyz = 0;
+ m!(xyz$0); // replacing it would break the macro
+}
+"#,
+ );
+ check_assist_not_applicable(
+ inline_local_variable,
+ r#"
+macro_rules! m {
+ ($i:ident) => { $i }
+}
+fn f() {
+ let xyz$0 = 0;
+ m!(xyz); // replacing it would break the macro
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_not_inline_selection_too_broad() {
+ cov_mark::check!(test_not_inline_selection_too_broad);
+ check_assist_not_applicable(
+ inline_local_variable,
+ r#"
+fn f() {
+ let foo = 0;
+ let bar = 0;
+ $0foo + bar$0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_inline_ref_in_let() {
+ check_assist(
+ inline_local_variable,
+ r#"
+fn f() {
+ let x = {
+ let y = 0;
+ y$0
+ };
+}
+"#,
+ r#"
+fn f() {
+ let x = {
+ 0
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_inline_let_unit_struct() {
+ check_assist_not_applicable(
+ inline_local_variable,
+ r#"
+struct S;
+fn f() {
+ let S$0 = S;
+ S;
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs
new file mode 100644
index 000000000..054663a06
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs
@@ -0,0 +1,838 @@
+// Some ideas for future improvements:
+// - Support replacing aliases which are used in expressions, e.g. `A::new()`.
+// - "inline_alias_to_users" assist #10881.
+// - Remove unused aliases if there are no longer any users, see inline_call.rs.
+
+use hir::{HasSource, PathResolution};
+use itertools::Itertools;
+use std::collections::HashMap;
+use syntax::{
+ ast::{self, make, HasGenericParams, HasName},
+ ted, AstNode, NodeOrToken, SyntaxNode,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId, AssistKind,
+};
+
+// Assist: inline_type_alias
+//
+// Replace a type alias with its concrete type.
+//
+// ```
+// type A<T = u32> = Vec<T>;
+//
+// fn main() {
+// let a: $0A;
+// }
+// ```
+// ->
+// ```
+// type A<T = u32> = Vec<T>;
+//
+// fn main() {
+// let a: Vec<u32>;
+// }
+// ```
+pub(crate) fn inline_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ enum Replacement {
+ Generic { lifetime_map: LifetimeMap, const_and_type_map: ConstAndTypeMap },
+ Plain,
+ }
+
+ let alias_instance = ctx.find_node_at_offset::<ast::PathType>()?;
+ let concrete_type;
+ let replacement;
+ match alias_instance.path()?.as_single_name_ref() {
+ Some(nameref) if nameref.Self_token().is_some() => {
+ match ctx.sema.resolve_path(&alias_instance.path()?)? {
+ PathResolution::SelfType(imp) => {
+ concrete_type = imp.source(ctx.db())?.value.self_ty()?;
+ }
+ // FIXME: should also work in ADT definitions
+ _ => return None,
+ }
+
+ replacement = Replacement::Plain;
+ }
+ _ => {
+ let alias = get_type_alias(&ctx, &alias_instance)?;
+ concrete_type = alias.ty()?;
+
+ replacement = if let Some(alias_generics) = alias.generic_param_list() {
+ if alias_generics.generic_params().next().is_none() {
+ cov_mark::hit!(no_generics_params);
+ return None;
+ }
+
+ let instance_args =
+ alias_instance.syntax().descendants().find_map(ast::GenericArgList::cast);
+
+ Replacement::Generic {
+ lifetime_map: LifetimeMap::new(&instance_args, &alias_generics)?,
+ const_and_type_map: ConstAndTypeMap::new(&instance_args, &alias_generics)?,
+ }
+ } else {
+ Replacement::Plain
+ };
+ }
+ }
+
+ let target = alias_instance.syntax().text_range();
+
+ acc.add(
+ AssistId("inline_type_alias", AssistKind::RefactorInline),
+ "Inline type alias",
+ target,
+ |builder| {
+ let replacement_text = match replacement {
+ Replacement::Generic { lifetime_map, const_and_type_map } => {
+ create_replacement(&lifetime_map, &const_and_type_map, &concrete_type)
+ }
+ Replacement::Plain => concrete_type.to_string(),
+ };
+
+ builder.replace(target, replacement_text);
+ },
+ )
+}
+
+struct LifetimeMap(HashMap<String, ast::Lifetime>);
+
+impl LifetimeMap {
+ fn new(
+ instance_args: &Option<ast::GenericArgList>,
+ alias_generics: &ast::GenericParamList,
+ ) -> Option<Self> {
+ let mut inner = HashMap::new();
+
+ let wildcard_lifetime = make::lifetime("'_");
+ let lifetimes = alias_generics
+ .lifetime_params()
+ .filter_map(|lp| lp.lifetime())
+ .map(|l| l.to_string())
+ .collect_vec();
+
+ for lifetime in &lifetimes {
+ inner.insert(lifetime.to_string(), wildcard_lifetime.clone());
+ }
+
+ if let Some(instance_generic_args_list) = &instance_args {
+ for (index, lifetime) in instance_generic_args_list
+ .lifetime_args()
+ .filter_map(|arg| arg.lifetime())
+ .enumerate()
+ {
+ let key = match lifetimes.get(index) {
+ Some(key) => key,
+ None => {
+ cov_mark::hit!(too_many_lifetimes);
+ return None;
+ }
+ };
+
+ inner.insert(key.clone(), lifetime);
+ }
+ }
+
+ Some(Self(inner))
+ }
+}
+
+struct ConstAndTypeMap(HashMap<String, SyntaxNode>);
+
+impl ConstAndTypeMap {
+ fn new(
+ instance_args: &Option<ast::GenericArgList>,
+ alias_generics: &ast::GenericParamList,
+ ) -> Option<Self> {
+ let mut inner = HashMap::new();
+ let instance_generics = generic_args_to_const_and_type_generics(instance_args);
+ let alias_generics = generic_param_list_to_const_and_type_generics(&alias_generics);
+
+ if instance_generics.len() > alias_generics.len() {
+ cov_mark::hit!(too_many_generic_args);
+ return None;
+ }
+
+ // Any declaration generics that don't have a default value must have one
+ // provided by the instance.
+ for (i, declaration_generic) in alias_generics.iter().enumerate() {
+ let key = declaration_generic.replacement_key()?;
+
+ if let Some(instance_generic) = instance_generics.get(i) {
+ inner.insert(key, instance_generic.replacement_value()?);
+ } else if let Some(value) = declaration_generic.replacement_value() {
+ inner.insert(key, value);
+ } else {
+ cov_mark::hit!(missing_replacement_param);
+ return None;
+ }
+ }
+
+ Some(Self(inner))
+ }
+}
+
+/// This doesn't attempt to ensure specified generics are compatible with those
+/// required by the type alias, other than lifetimes which must either all be
+/// specified or all omitted. It will replace TypeArgs with ConstArgs and vice
+/// versa if they're in the wrong position. It supports partially specified
+/// generics.
+///
+/// 1. Map the provided instance's generic args to the type alias's generic
+/// params:
+///
+/// ```
+/// type A<'a, const N: usize, T = u64> = &'a [T; N];
+/// ^ alias generic params
+/// let a: A<100>;
+/// ^ instance generic args
+/// ```
+///
+/// generic['a] = '_ due to omission
+/// generic[N] = 100 due to the instance arg
+/// generic[T] = u64 due to the default param
+///
+/// 2. Copy the concrete type and substitute in each found mapping:
+///
+/// &'_ [u64; 100]
+///
+/// 3. Remove wildcard lifetimes entirely:
+///
+/// &[u64; 100]
+fn create_replacement(
+ lifetime_map: &LifetimeMap,
+ const_and_type_map: &ConstAndTypeMap,
+ concrete_type: &ast::Type,
+) -> String {
+ let updated_concrete_type = concrete_type.clone_for_update();
+ let mut replacements = Vec::new();
+ let mut removals = Vec::new();
+
+ for syntax in updated_concrete_type.syntax().descendants() {
+ let syntax_string = syntax.to_string();
+ let syntax_str = syntax_string.as_str();
+
+ if let Some(old_lifetime) = ast::Lifetime::cast(syntax.clone()) {
+ if let Some(new_lifetime) = lifetime_map.0.get(&old_lifetime.to_string()) {
+ if new_lifetime.text() == "'_" {
+ removals.push(NodeOrToken::Node(syntax.clone()));
+
+ if let Some(ws) = syntax.next_sibling_or_token() {
+ removals.push(ws.clone());
+ }
+
+ continue;
+ }
+
+ replacements.push((syntax.clone(), new_lifetime.syntax().clone_for_update()));
+ }
+ } else if let Some(replacement_syntax) = const_and_type_map.0.get(syntax_str) {
+ let new_string = replacement_syntax.to_string();
+ let new = if new_string == "_" {
+ make::wildcard_pat().syntax().clone_for_update()
+ } else {
+ replacement_syntax.clone_for_update()
+ };
+
+ replacements.push((syntax.clone(), new));
+ }
+ }
+
+ for (old, new) in replacements {
+ ted::replace(old, new);
+ }
+
+ for syntax in removals {
+ ted::remove(syntax);
+ }
+
+ updated_concrete_type.to_string()
+}
+
+fn get_type_alias(ctx: &AssistContext<'_>, path: &ast::PathType) -> Option<ast::TypeAlias> {
+ let resolved_path = ctx.sema.resolve_path(&path.path()?)?;
+
+ // We need the generics in the correct order to be able to map any provided
+ // instance generics to declaration generics. The `hir::TypeAlias` doesn't
+ // keep the order, so we must get the `ast::TypeAlias` from the hir
+ // definition.
+ if let PathResolution::Def(hir::ModuleDef::TypeAlias(ta)) = resolved_path {
+ Some(ctx.sema.source(ta)?.value)
+ } else {
+ None
+ }
+}
+
+enum ConstOrTypeGeneric {
+ ConstArg(ast::ConstArg),
+ TypeArg(ast::TypeArg),
+ ConstParam(ast::ConstParam),
+ TypeParam(ast::TypeParam),
+}
+
+impl ConstOrTypeGeneric {
+ fn replacement_key(&self) -> Option<String> {
+ // Only params are used as replacement keys.
+ match self {
+ ConstOrTypeGeneric::ConstParam(cp) => Some(cp.name()?.to_string()),
+ ConstOrTypeGeneric::TypeParam(tp) => Some(tp.name()?.to_string()),
+ _ => None,
+ }
+ }
+
+ fn replacement_value(&self) -> Option<SyntaxNode> {
+ Some(match self {
+ ConstOrTypeGeneric::ConstArg(ca) => ca.expr()?.syntax().clone(),
+ ConstOrTypeGeneric::TypeArg(ta) => ta.syntax().clone(),
+ ConstOrTypeGeneric::ConstParam(cp) => cp.default_val()?.syntax().clone(),
+ ConstOrTypeGeneric::TypeParam(tp) => tp.default_type()?.syntax().clone(),
+ })
+ }
+}
+
+fn generic_param_list_to_const_and_type_generics(
+ generics: &ast::GenericParamList,
+) -> Vec<ConstOrTypeGeneric> {
+ let mut others = Vec::new();
+
+ for param in generics.generic_params() {
+ match param {
+ ast::GenericParam::LifetimeParam(_) => {}
+ ast::GenericParam::ConstParam(cp) => {
+ others.push(ConstOrTypeGeneric::ConstParam(cp));
+ }
+ ast::GenericParam::TypeParam(tp) => others.push(ConstOrTypeGeneric::TypeParam(tp)),
+ }
+ }
+
+ others
+}
+
+fn generic_args_to_const_and_type_generics(
+ generics: &Option<ast::GenericArgList>,
+) -> Vec<ConstOrTypeGeneric> {
+ let mut others = Vec::new();
+
+ // It's fine for there to be no instance generics because the declaration
+ // might have default values or they might be inferred.
+ if let Some(generics) = generics {
+ for arg in generics.generic_args() {
+ match arg {
+ ast::GenericArg::TypeArg(ta) => {
+ others.push(ConstOrTypeGeneric::TypeArg(ta));
+ }
+ ast::GenericArg::ConstArg(ca) => {
+ others.push(ConstOrTypeGeneric::ConstArg(ca));
+ }
+ _ => {}
+ }
+ }
+ }
+
+ others
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn empty_generic_params() {
+ cov_mark::check!(no_generics_params);
+ check_assist_not_applicable(
+ inline_type_alias,
+ r#"
+type A<> = T;
+fn main() {
+ let a: $0A<u32>;
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn too_many_generic_args() {
+ cov_mark::check!(too_many_generic_args);
+ check_assist_not_applicable(
+ inline_type_alias,
+ r#"
+type A<T> = T;
+fn main() {
+ let a: $0A<u32, u64>;
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn too_many_lifetimes() {
+ cov_mark::check!(too_many_lifetimes);
+ check_assist_not_applicable(
+ inline_type_alias,
+ r#"
+type A<'a> = &'a &'b u32;
+fn f<'a>() {
+ let a: $0A<'a, 'b> = 0;
+}
+"#,
+ );
+ }
+
+ // This must be supported in order to support "inline_alias_to_users" or
+ // whatever it will be called.
+ #[test]
+ fn alias_as_expression_ignored() {
+ check_assist_not_applicable(
+ inline_type_alias,
+ r#"
+type A = Vec<u32>;
+fn main() {
+ let a: A = $0A::new();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn primitive_arg() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A<T> = T;
+fn main() {
+ let a: $0A<u32> = 0;
+}
+"#,
+ r#"
+type A<T> = T;
+fn main() {
+ let a: u32 = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_generic_replacements() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A = Vec<u32>;
+fn main() {
+ let a: $0A;
+}
+"#,
+ r#"
+type A = Vec<u32>;
+fn main() {
+ let a: Vec<u32>;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn param_expression() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A<const N: usize = { 1 }> = [u32; N];
+fn main() {
+ let a: $0A;
+}
+"#,
+ r#"
+type A<const N: usize = { 1 }> = [u32; N];
+fn main() {
+ let a: [u32; { 1 }];
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn param_default_value() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A<const N: usize = 1> = [u32; N];
+fn main() {
+ let a: $0A;
+}
+"#,
+ r#"
+type A<const N: usize = 1> = [u32; N];
+fn main() {
+ let a: [u32; 1];
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn all_param_types() {
+ check_assist(
+ inline_type_alias,
+ r#"
+struct Struct<const C: usize>;
+type A<'inner1, 'outer1, Outer1, const INNER1: usize, Inner1: Clone, const OUTER1: usize> = (Struct<INNER1>, Struct<OUTER1>, Outer1, &'inner1 (), Inner1, &'outer1 ());
+fn foo<'inner2, 'outer2, Outer2, const INNER2: usize, Inner2, const OUTER2: usize>() {
+ let a: $0A<'inner2, 'outer2, Outer2, INNER2, Inner2, OUTER2>;
+}
+"#,
+ r#"
+struct Struct<const C: usize>;
+type A<'inner1, 'outer1, Outer1, const INNER1: usize, Inner1: Clone, const OUTER1: usize> = (Struct<INNER1>, Struct<OUTER1>, Outer1, &'inner1 (), Inner1, &'outer1 ());
+fn foo<'inner2, 'outer2, Outer2, const INNER2: usize, Inner2, const OUTER2: usize>() {
+ let a: (Struct<INNER2>, Struct<OUTER2>, Outer2, &'inner2 (), Inner2, &'outer2 ());
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn omitted_lifetimes() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A<'l, 'r> = &'l &'r u32;
+fn main() {
+ let a: $0A;
+}
+"#,
+ r#"
+type A<'l, 'r> = &'l &'r u32;
+fn main() {
+ let a: &&u32;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn omitted_type() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A<'r, 'l, T = u32> = &'l std::collections::HashMap<&'r str, T>;
+fn main() {
+ let a: $0A<'_, '_>;
+}
+"#,
+ r#"
+type A<'r, 'l, T = u32> = &'l std::collections::HashMap<&'r str, T>;
+fn main() {
+ let a: &std::collections::HashMap<&str, u32>;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn omitted_everything() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A<'r, 'l, T = u32> = &'l std::collections::HashMap<&'r str, T>;
+fn main() {
+ let v = std::collections::HashMap<&str, u32>;
+ let a: $0A = &v;
+}
+"#,
+ r#"
+type A<'r, 'l, T = u32> = &'l std::collections::HashMap<&'r str, T>;
+fn main() {
+ let v = std::collections::HashMap<&str, u32>;
+ let a: &std::collections::HashMap<&str, u32> = &v;
+}
+"#,
+ );
+ }
+
+ // This doesn't actually cause the GenericArgsList to contain a AssocTypeArg.
+ #[test]
+ fn arg_associated_type() {
+ check_assist(
+ inline_type_alias,
+ r#"
+trait Tra { type Assoc; fn a(); }
+struct Str {}
+impl Tra for Str {
+ type Assoc = u32;
+ fn a() {
+ type A<T> = Vec<T>;
+ let a: $0A<Self::Assoc>;
+ }
+}
+"#,
+ r#"
+trait Tra { type Assoc; fn a(); }
+struct Str {}
+impl Tra for Str {
+ type Assoc = u32;
+ fn a() {
+ type A<T> = Vec<T>;
+ let a: Vec<Self::Assoc>;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn param_default_associated_type() {
+ check_assist(
+ inline_type_alias,
+ r#"
+trait Tra { type Assoc; fn a() }
+struct Str {}
+impl Tra for Str {
+ type Assoc = u32;
+ fn a() {
+ type A<T = Self::Assoc> = Vec<T>;
+ let a: $0A;
+ }
+}
+"#,
+ r#"
+trait Tra { type Assoc; fn a() }
+struct Str {}
+impl Tra for Str {
+ type Assoc = u32;
+ fn a() {
+ type A<T = Self::Assoc> = Vec<T>;
+ let a: Vec<Self::Assoc>;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn function_pointer() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A = fn(u32);
+fn foo(a: u32) {}
+fn main() {
+ let a: $0A = foo;
+}
+"#,
+ r#"
+type A = fn(u32);
+fn foo(a: u32) {}
+fn main() {
+ let a: fn(u32) = foo;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn closure() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A = Box<dyn FnOnce(u32) -> u32>;
+fn main() {
+ let a: $0A = Box::new(|_| 0);
+}
+"#,
+ r#"
+type A = Box<dyn FnOnce(u32) -> u32>;
+fn main() {
+ let a: Box<dyn FnOnce(u32) -> u32> = Box::new(|_| 0);
+}
+"#,
+ );
+ }
+
+ // Type aliases can't be used in traits, but someone might use the assist to
+ // fix the error.
+ #[test]
+ fn bounds() {
+ check_assist(
+ inline_type_alias,
+ r#"type A = std::io::Write; fn f<T>() where T: $0A {}"#,
+ r#"type A = std::io::Write; fn f<T>() where T: std::io::Write {}"#,
+ );
+ }
+
+ #[test]
+ fn function_parameter() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A = std::io::Write;
+fn f(a: impl $0A) {}
+"#,
+ r#"
+type A = std::io::Write;
+fn f(a: impl std::io::Write) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn arg_expression() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A<const N: usize> = [u32; N];
+fn main() {
+ let a: $0A<{ 1 + 1 }>;
+}
+"#,
+ r#"
+type A<const N: usize> = [u32; N];
+fn main() {
+ let a: [u32; { 1 + 1 }];
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn alias_instance_generic_path() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A<const N: usize> = [u32; N];
+fn main() {
+ let a: $0A<u32::MAX>;
+}
+"#,
+ r#"
+type A<const N: usize> = [u32; N];
+fn main() {
+ let a: [u32; u32::MAX];
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn generic_type() {
+ check_assist(
+ inline_type_alias,
+ r#"
+type A = String;
+fn f(a: Vec<$0A>) {}
+"#,
+ r#"
+type A = String;
+fn f(a: Vec<String>) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn missing_replacement_param() {
+ cov_mark::check!(missing_replacement_param);
+ check_assist_not_applicable(
+ inline_type_alias,
+ r#"
+type A<U> = Vec<T>;
+fn main() {
+ let a: $0A;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn full_path_type_is_replaced() {
+ check_assist(
+ inline_type_alias,
+ r#"
+mod foo {
+ pub type A = String;
+}
+fn main() {
+ let a: foo::$0A;
+}
+"#,
+ r#"
+mod foo {
+ pub type A = String;
+}
+fn main() {
+ let a: String;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn inline_self_type() {
+ check_assist(
+ inline_type_alias,
+ r#"
+struct Strukt;
+
+impl Strukt {
+ fn new() -> Self$0 {}
+}
+"#,
+ r#"
+struct Strukt;
+
+impl Strukt {
+ fn new() -> Strukt {}
+}
+"#,
+ );
+ check_assist(
+ inline_type_alias,
+ r#"
+struct Strukt<'a, T, const C: usize>(&'a [T; C]);
+
+impl<T, const C: usize> Strukt<'_, T, C> {
+ fn new() -> Self$0 {}
+}
+"#,
+ r#"
+struct Strukt<'a, T, const C: usize>(&'a [T; C]);
+
+impl<T, const C: usize> Strukt<'_, T, C> {
+ fn new() -> Strukt<'_, T, C> {}
+}
+"#,
+ );
+ check_assist(
+ inline_type_alias,
+ r#"
+struct Strukt<'a, T, const C: usize>(&'a [T; C]);
+
+trait Tr<'b, T> {}
+
+impl<T, const C: usize> Tr<'static, u8> for Strukt<'_, T, C> {
+ fn new() -> Self$0 {}
+}
+"#,
+ r#"
+struct Strukt<'a, T, const C: usize>(&'a [T; C]);
+
+trait Tr<'b, T> {}
+
+impl<T, const C: usize> Tr<'static, u8> for Strukt<'_, T, C> {
+ fn new() -> Strukt<'_, T, C> {}
+}
+"#,
+ );
+
+ check_assist_not_applicable(
+ inline_type_alias,
+ r#"
+trait Tr {
+ fn new() -> Self$0;
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_generic.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_generic.rs
new file mode 100644
index 000000000..062c816ae
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_generic.rs
@@ -0,0 +1,144 @@
+use syntax::{
+ ast::{self, edit_in_place::GenericParamsOwnerEdit, make, AstNode},
+ ted,
+};
+
+use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: introduce_named_generic
+//
+// Replaces `impl Trait` function argument with the named generic.
+//
+// ```
+// fn foo(bar: $0impl Bar) {}
+// ```
+// ->
+// ```
+// fn foo<B: Bar>(bar: B) {}
+// ```
+pub(crate) fn introduce_named_generic(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let impl_trait_type = ctx.find_node_at_offset::<ast::ImplTraitType>()?;
+ let param = impl_trait_type.syntax().parent().and_then(ast::Param::cast)?;
+ let fn_ = param.syntax().ancestors().find_map(ast::Fn::cast)?;
+
+ let type_bound_list = impl_trait_type.type_bound_list()?;
+
+ let target = fn_.syntax().text_range();
+ acc.add(
+ AssistId("introduce_named_generic", AssistKind::RefactorRewrite),
+ "Replace impl trait with generic",
+ target,
+ |edit| {
+ let impl_trait_type = edit.make_mut(impl_trait_type);
+ let fn_ = edit.make_mut(fn_);
+
+ let type_param_name = suggest_name::for_generic_parameter(&impl_trait_type);
+
+ let type_param = make::type_param(make::name(&type_param_name), Some(type_bound_list))
+ .clone_for_update();
+ let new_ty = make::ty(&type_param_name).clone_for_update();
+
+ ted::replace(impl_trait_type.syntax(), new_ty.syntax());
+ fn_.get_or_create_generic_param_list().add_generic_param(type_param.into())
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::check_assist;
+
+ #[test]
+ fn introduce_named_generic_params() {
+ check_assist(
+ introduce_named_generic,
+ r#"fn foo<G>(bar: $0impl Bar) {}"#,
+ r#"fn foo<G, B: Bar>(bar: B) {}"#,
+ );
+ }
+
+ #[test]
+ fn replace_impl_trait_without_generic_params() {
+ check_assist(
+ introduce_named_generic,
+ r#"fn foo(bar: $0impl Bar) {}"#,
+ r#"fn foo<B: Bar>(bar: B) {}"#,
+ );
+ }
+
+ #[test]
+ fn replace_two_impl_trait_with_generic_params() {
+ check_assist(
+ introduce_named_generic,
+ r#"fn foo<G>(foo: impl Foo, bar: $0impl Bar) {}"#,
+ r#"fn foo<G, B: Bar>(foo: impl Foo, bar: B) {}"#,
+ );
+ }
+
+ #[test]
+ fn replace_impl_trait_with_empty_generic_params() {
+ check_assist(
+ introduce_named_generic,
+ r#"fn foo<>(bar: $0impl Bar) {}"#,
+ r#"fn foo<B: Bar>(bar: B) {}"#,
+ );
+ }
+
+ #[test]
+ fn replace_impl_trait_with_empty_multiline_generic_params() {
+ check_assist(
+ introduce_named_generic,
+ r#"
+fn foo<
+>(bar: $0impl Bar) {}
+"#,
+ r#"
+fn foo<B: Bar
+>(bar: B) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_impl_trait_with_exist_generic_letter() {
+ // FIXME: This is wrong, we should pick a different name if the one we
+ // want is already bound.
+ check_assist(
+ introduce_named_generic,
+ r#"fn foo<B>(bar: $0impl Bar) {}"#,
+ r#"fn foo<B, B: Bar>(bar: B) {}"#,
+ );
+ }
+
+ #[test]
+ fn replace_impl_trait_with_multiline_generic_params() {
+ check_assist(
+ introduce_named_generic,
+ r#"
+fn foo<
+ G: Foo,
+ F,
+ H,
+>(bar: $0impl Bar) {}
+"#,
+ r#"
+fn foo<
+ G: Foo,
+ F,
+ H, B: Bar,
+>(bar: B) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_impl_trait_multiple() {
+ check_assist(
+ introduce_named_generic,
+ r#"fn foo(bar: $0impl Foo + Bar) {}"#,
+ r#"fn foo<F: Foo + Bar>(bar: F) {}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs
new file mode 100644
index 000000000..ce91dd237
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs
@@ -0,0 +1,338 @@
+use ide_db::FxHashSet;
+use syntax::{
+ ast::{self, edit_in_place::GenericParamsOwnerEdit, make, HasGenericParams},
+ ted::{self, Position},
+ AstNode, TextRange,
+};
+
+use crate::{assist_context::AssistBuilder, AssistContext, AssistId, AssistKind, Assists};
+
+static ASSIST_NAME: &str = "introduce_named_lifetime";
+static ASSIST_LABEL: &str = "Introduce named lifetime";
+
+// Assist: introduce_named_lifetime
+//
+// Change an anonymous lifetime to a named lifetime.
+//
+// ```
+// impl Cursor<'_$0> {
+// fn node(self) -> &SyntaxNode {
+// match self {
+// Cursor::Replace(node) | Cursor::Before(node) => node,
+// }
+// }
+// }
+// ```
+// ->
+// ```
+// impl<'a> Cursor<'a> {
+// fn node(self) -> &SyntaxNode {
+// match self {
+// Cursor::Replace(node) | Cursor::Before(node) => node,
+// }
+// }
+// }
+// ```
+pub(crate) fn introduce_named_lifetime(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ // FIXME: How can we handle renaming any one of multiple anonymous lifetimes?
+ // FIXME: should also add support for the case fun(f: &Foo) -> &$0Foo
+ let lifetime =
+ ctx.find_node_at_offset::<ast::Lifetime>().filter(|lifetime| lifetime.text() == "'_")?;
+ let lifetime_loc = lifetime.lifetime_ident_token()?.text_range();
+
+ if let Some(fn_def) = lifetime.syntax().ancestors().find_map(ast::Fn::cast) {
+ generate_fn_def_assist(acc, fn_def, lifetime_loc, lifetime)
+ } else if let Some(impl_def) = lifetime.syntax().ancestors().find_map(ast::Impl::cast) {
+ generate_impl_def_assist(acc, impl_def, lifetime_loc, lifetime)
+ } else {
+ None
+ }
+}
+
+/// Generate the assist for the fn def case
+fn generate_fn_def_assist(
+ acc: &mut Assists,
+ fn_def: ast::Fn,
+ lifetime_loc: TextRange,
+ lifetime: ast::Lifetime,
+) -> Option<()> {
+ let param_list: ast::ParamList = fn_def.param_list()?;
+ let new_lifetime_param = generate_unique_lifetime_param_name(fn_def.generic_param_list())?;
+ let self_param =
+ // use the self if it's a reference and has no explicit lifetime
+ param_list.self_param().filter(|p| p.lifetime().is_none() && p.amp_token().is_some());
+ // compute the location which implicitly has the same lifetime as the anonymous lifetime
+ let loc_needing_lifetime = if let Some(self_param) = self_param {
+ // if we have a self reference, use that
+ Some(NeedsLifetime::SelfParam(self_param))
+ } else {
+ // otherwise, if there's a single reference parameter without a named liftime, use that
+ let fn_params_without_lifetime: Vec<_> = param_list
+ .params()
+ .filter_map(|param| match param.ty() {
+ Some(ast::Type::RefType(ascribed_type)) if ascribed_type.lifetime().is_none() => {
+ Some(NeedsLifetime::RefType(ascribed_type))
+ }
+ _ => None,
+ })
+ .collect();
+ match fn_params_without_lifetime.len() {
+ 1 => Some(fn_params_without_lifetime.into_iter().next()?),
+ 0 => None,
+ // multiple unnnamed is invalid. assist is not applicable
+ _ => return None,
+ }
+ };
+ acc.add(AssistId(ASSIST_NAME, AssistKind::Refactor), ASSIST_LABEL, lifetime_loc, |builder| {
+ let fn_def = builder.make_mut(fn_def);
+ let lifetime = builder.make_mut(lifetime);
+ let loc_needing_lifetime =
+ loc_needing_lifetime.and_then(|it| it.make_mut(builder).to_position());
+
+ fn_def.get_or_create_generic_param_list().add_generic_param(
+ make::lifetime_param(new_lifetime_param.clone()).clone_for_update().into(),
+ );
+ ted::replace(lifetime.syntax(), new_lifetime_param.clone_for_update().syntax());
+ if let Some(position) = loc_needing_lifetime {
+ ted::insert(position, new_lifetime_param.clone_for_update().syntax());
+ }
+ })
+}
+
+/// Generate the assist for the impl def case
+fn generate_impl_def_assist(
+ acc: &mut Assists,
+ impl_def: ast::Impl,
+ lifetime_loc: TextRange,
+ lifetime: ast::Lifetime,
+) -> Option<()> {
+ let new_lifetime_param = generate_unique_lifetime_param_name(impl_def.generic_param_list())?;
+ acc.add(AssistId(ASSIST_NAME, AssistKind::Refactor), ASSIST_LABEL, lifetime_loc, |builder| {
+ let impl_def = builder.make_mut(impl_def);
+ let lifetime = builder.make_mut(lifetime);
+
+ impl_def.get_or_create_generic_param_list().add_generic_param(
+ make::lifetime_param(new_lifetime_param.clone()).clone_for_update().into(),
+ );
+ ted::replace(lifetime.syntax(), new_lifetime_param.clone_for_update().syntax());
+ })
+}
+
+/// Given a type parameter list, generate a unique lifetime parameter name
+/// which is not in the list
+fn generate_unique_lifetime_param_name(
+ existing_type_param_list: Option<ast::GenericParamList>,
+) -> Option<ast::Lifetime> {
+ match existing_type_param_list {
+ Some(type_params) => {
+ let used_lifetime_params: FxHashSet<_> =
+ type_params.lifetime_params().map(|p| p.syntax().text().to_string()).collect();
+ ('a'..='z').map(|it| format!("'{}", it)).find(|it| !used_lifetime_params.contains(it))
+ }
+ None => Some("'a".to_string()),
+ }
+ .map(|it| make::lifetime(&it))
+}
+
+enum NeedsLifetime {
+ SelfParam(ast::SelfParam),
+ RefType(ast::RefType),
+}
+
+impl NeedsLifetime {
+ fn make_mut(self, builder: &mut AssistBuilder) -> Self {
+ match self {
+ Self::SelfParam(it) => Self::SelfParam(builder.make_mut(it)),
+ Self::RefType(it) => Self::RefType(builder.make_mut(it)),
+ }
+ }
+
+ fn to_position(self) -> Option<Position> {
+ match self {
+ Self::SelfParam(it) => Some(Position::after(it.amp_token()?)),
+ Self::RefType(it) => Some(Position::after(it.amp_token()?)),
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn test_example_case() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"impl Cursor<'_$0> {
+ fn node(self) -> &SyntaxNode {
+ match self {
+ Cursor::Replace(node) | Cursor::Before(node) => node,
+ }
+ }
+ }"#,
+ r#"impl<'a> Cursor<'a> {
+ fn node(self) -> &SyntaxNode {
+ match self {
+ Cursor::Replace(node) | Cursor::Before(node) => node,
+ }
+ }
+ }"#,
+ );
+ }
+
+ #[test]
+ fn test_example_case_simplified() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"impl Cursor<'_$0> {"#,
+ r#"impl<'a> Cursor<'a> {"#,
+ );
+ }
+
+ #[test]
+ fn test_example_case_cursor_after_tick() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"impl Cursor<'$0_> {"#,
+ r#"impl<'a> Cursor<'a> {"#,
+ );
+ }
+
+ #[test]
+ fn test_impl_with_other_type_param() {
+ check_assist(
+ introduce_named_lifetime,
+ "impl<I> fmt::Display for SepByBuilder<'_$0, I>
+ where
+ I: Iterator,
+ I::Item: fmt::Display,
+ {",
+ "impl<I, 'a> fmt::Display for SepByBuilder<'a, I>
+ where
+ I: Iterator,
+ I::Item: fmt::Display,
+ {",
+ )
+ }
+
+ #[test]
+ fn test_example_case_cursor_before_tick() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"impl Cursor<$0'_> {"#,
+ r#"impl<'a> Cursor<'a> {"#,
+ );
+ }
+
+ #[test]
+ fn test_not_applicable_cursor_position() {
+ check_assist_not_applicable(introduce_named_lifetime, r#"impl Cursor<'_>$0 {"#);
+ check_assist_not_applicable(introduce_named_lifetime, r#"impl Cursor$0<'_> {"#);
+ }
+
+ #[test]
+ fn test_not_applicable_lifetime_already_name() {
+ check_assist_not_applicable(introduce_named_lifetime, r#"impl Cursor<'a$0> {"#);
+ check_assist_not_applicable(introduce_named_lifetime, r#"fn my_fun<'a>() -> X<'a$0>"#);
+ }
+
+ #[test]
+ fn test_with_type_parameter() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"impl<T> Cursor<T, '_$0>"#,
+ r#"impl<T, 'a> Cursor<T, 'a>"#,
+ );
+ }
+
+ #[test]
+ fn test_with_existing_lifetime_name_conflict() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"impl<'a, 'b> Cursor<'a, 'b, '_$0>"#,
+ r#"impl<'a, 'b, 'c> Cursor<'a, 'b, 'c>"#,
+ );
+ }
+
+ #[test]
+ fn test_function_return_value_anon_lifetime_param() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"fn my_fun() -> X<'_$0>"#,
+ r#"fn my_fun<'a>() -> X<'a>"#,
+ );
+ }
+
+ #[test]
+ fn test_function_return_value_anon_reference_lifetime() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"fn my_fun() -> &'_$0 X"#,
+ r#"fn my_fun<'a>() -> &'a X"#,
+ );
+ }
+
+ #[test]
+ fn test_function_param_anon_lifetime() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"fn my_fun(x: X<'_$0>)"#,
+ r#"fn my_fun<'a>(x: X<'a>)"#,
+ );
+ }
+
+ #[test]
+ fn test_function_add_lifetime_to_params() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"fn my_fun(f: &Foo) -> X<'_$0>"#,
+ r#"fn my_fun<'a>(f: &'a Foo) -> X<'a>"#,
+ );
+ }
+
+ #[test]
+ fn test_function_add_lifetime_to_params_in_presence_of_other_lifetime() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"fn my_fun<'other>(f: &Foo, b: &'other Bar) -> X<'_$0>"#,
+ r#"fn my_fun<'other, 'a>(f: &'a Foo, b: &'other Bar) -> X<'a>"#,
+ );
+ }
+
+ #[test]
+ fn test_function_not_applicable_without_self_and_multiple_unnamed_param_lifetimes() {
+ // this is not permitted under lifetime elision rules
+ check_assist_not_applicable(
+ introduce_named_lifetime,
+ r#"fn my_fun(f: &Foo, b: &Bar) -> X<'_$0>"#,
+ );
+ }
+
+ #[test]
+ fn test_function_add_lifetime_to_self_ref_param() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"fn my_fun<'other>(&self, f: &Foo, b: &'other Bar) -> X<'_$0>"#,
+ r#"fn my_fun<'other, 'a>(&'a self, f: &Foo, b: &'other Bar) -> X<'a>"#,
+ );
+ }
+
+ #[test]
+ fn test_function_add_lifetime_to_param_with_non_ref_self() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"fn my_fun<'other>(self, f: &Foo, b: &'other Bar) -> X<'_$0>"#,
+ r#"fn my_fun<'other, 'a>(self, f: &'a Foo, b: &'other Bar) -> X<'a>"#,
+ );
+ }
+
+ #[test]
+ fn test_function_add_lifetime_to_self_ref_mut() {
+ check_assist(
+ introduce_named_lifetime,
+ r#"fn foo(&mut self) -> &'_$0 ()"#,
+ r#"fn foo<'a>(&'a mut self) -> &'a ()"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/invert_if.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/invert_if.rs
new file mode 100644
index 000000000..547158e29
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/invert_if.rs
@@ -0,0 +1,144 @@
+use ide_db::syntax_helpers::node_ext::is_pattern_cond;
+use syntax::{
+ ast::{self, AstNode},
+ T,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils::invert_boolean_expression,
+ AssistId, AssistKind,
+};
+
+// Assist: invert_if
+//
+// This transforms if expressions of the form `if !x {A} else {B}` into `if x {B} else {A}`
+// This also works with `!=`. This assist can only be applied with the cursor on `if`.
+//
+// ```
+// fn main() {
+// if$0 !y { A } else { B }
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// if y { B } else { A }
+// }
+// ```
+pub(crate) fn invert_if(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let if_keyword = ctx.find_token_syntax_at_offset(T![if])?;
+ let expr = ast::IfExpr::cast(if_keyword.parent()?)?;
+ let if_range = if_keyword.text_range();
+ let cursor_in_range = if_range.contains_range(ctx.selection_trimmed());
+ if !cursor_in_range {
+ return None;
+ }
+
+ let cond = expr.condition()?;
+ // This assist should not apply for if-let.
+ if is_pattern_cond(cond.clone()) {
+ return None;
+ }
+
+ let then_node = expr.then_branch()?.syntax().clone();
+ let else_block = match expr.else_branch()? {
+ ast::ElseBranch::Block(it) => it,
+ ast::ElseBranch::IfExpr(_) => return None,
+ };
+
+ acc.add(AssistId("invert_if", AssistKind::RefactorRewrite), "Invert if", if_range, |edit| {
+ let flip_cond = invert_boolean_expression(cond.clone());
+ edit.replace_ast(cond, flip_cond);
+
+ let else_node = else_block.syntax();
+ let else_range = else_node.text_range();
+ let then_range = then_node.text_range();
+
+ edit.replace(else_range, then_node.text());
+ edit.replace(then_range, else_node.text());
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn invert_if_composite_condition() {
+ check_assist(
+ invert_if,
+ "fn f() { i$0f x == 3 || x == 4 || x == 5 { 1 } else { 3 * 2 } }",
+ "fn f() { if !(x == 3 || x == 4 || x == 5) { 3 * 2 } else { 1 } }",
+ )
+ }
+
+ #[test]
+ fn invert_if_remove_not_parentheses() {
+ check_assist(
+ invert_if,
+ "fn f() { i$0f !(x == 3 || x == 4 || x == 5) { 3 * 2 } else { 1 } }",
+ "fn f() { if x == 3 || x == 4 || x == 5 { 1 } else { 3 * 2 } }",
+ )
+ }
+
+ #[test]
+ fn invert_if_remove_inequality() {
+ check_assist(
+ invert_if,
+ "fn f() { i$0f x != 3 { 1 } else { 3 + 2 } }",
+ "fn f() { if x == 3 { 3 + 2 } else { 1 } }",
+ )
+ }
+
+ #[test]
+ fn invert_if_remove_not() {
+ check_assist(
+ invert_if,
+ "fn f() { $0if !cond { 3 * 2 } else { 1 } }",
+ "fn f() { if cond { 1 } else { 3 * 2 } }",
+ )
+ }
+
+ #[test]
+ fn invert_if_general_case() {
+ check_assist(
+ invert_if,
+ "fn f() { i$0f cond { 3 * 2 } else { 1 } }",
+ "fn f() { if !cond { 1 } else { 3 * 2 } }",
+ )
+ }
+
+ #[test]
+ fn invert_if_doesnt_apply_with_cursor_not_on_if() {
+ check_assist_not_applicable(invert_if, "fn f() { if !$0cond { 3 * 2 } else { 1 } }")
+ }
+
+ #[test]
+ fn invert_if_doesnt_apply_with_if_let() {
+ check_assist_not_applicable(
+ invert_if,
+ "fn f() { i$0f let Some(_) = Some(1) { 1 } else { 0 } }",
+ )
+ }
+
+ #[test]
+ fn invert_if_option_case() {
+ check_assist(
+ invert_if,
+ "fn f() { if$0 doc_style.is_some() { Class::DocComment } else { Class::Comment } }",
+ "fn f() { if doc_style.is_none() { Class::Comment } else { Class::DocComment } }",
+ )
+ }
+
+ #[test]
+ fn invert_if_result_case() {
+ check_assist(
+ invert_if,
+ "fn f() { i$0f doc_style.is_err() { Class::Err } else { Class::Ok } }",
+ "fn f() { if doc_style.is_ok() { Class::Ok } else { Class::Err } }",
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs
new file mode 100644
index 000000000..7e102ceba
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs
@@ -0,0 +1,570 @@
+use either::Either;
+use ide_db::imports::merge_imports::{try_merge_imports, try_merge_trees, MergeBehavior};
+use syntax::{algo::neighbor, ast, match_ast, ted, AstNode, SyntaxElement, SyntaxNode};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils::next_prev,
+ AssistId, AssistKind,
+};
+
+use Edit::*;
+
+// Assist: merge_imports
+//
+// Merges two imports with a common prefix.
+//
+// ```
+// use std::$0fmt::Formatter;
+// use std::io;
+// ```
+// ->
+// ```
+// use std::{fmt::Formatter, io};
+// ```
+pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let (target, edits) = if ctx.has_empty_selection() {
+ // Merge a neighbor
+ let tree: ast::UseTree = ctx.find_node_at_offset()?;
+ let target = tree.syntax().text_range();
+
+ let edits = if let Some(use_item) = tree.syntax().parent().and_then(ast::Use::cast) {
+ let mut neighbor = next_prev().find_map(|dir| neighbor(&use_item, dir)).into_iter();
+ use_item.try_merge_from(&mut neighbor)
+ } else {
+ let mut neighbor = next_prev().find_map(|dir| neighbor(&tree, dir)).into_iter();
+ tree.try_merge_from(&mut neighbor)
+ };
+ (target, edits?)
+ } else {
+ // Merge selected
+ let selection_range = ctx.selection_trimmed();
+ let parent_node = match ctx.covering_element() {
+ SyntaxElement::Node(n) => n,
+ SyntaxElement::Token(t) => t.parent()?,
+ };
+ let mut selected_nodes =
+ parent_node.children().filter(|it| selection_range.contains_range(it.text_range()));
+
+ let first_selected = selected_nodes.next()?;
+ let edits = match_ast! {
+ match first_selected {
+ ast::Use(use_item) => {
+ use_item.try_merge_from(&mut selected_nodes.filter_map(ast::Use::cast))
+ },
+ ast::UseTree(use_tree) => {
+ use_tree.try_merge_from(&mut selected_nodes.filter_map(ast::UseTree::cast))
+ },
+ _ => return None,
+ }
+ };
+ (selection_range, edits?)
+ };
+
+ acc.add(
+ AssistId("merge_imports", AssistKind::RefactorRewrite),
+ "Merge imports",
+ target,
+ |builder| {
+ let edits_mut: Vec<Edit> = edits
+ .into_iter()
+ .map(|it| match it {
+ Remove(Either::Left(it)) => Remove(Either::Left(builder.make_mut(it))),
+ Remove(Either::Right(it)) => Remove(Either::Right(builder.make_mut(it))),
+ Replace(old, new) => Replace(builder.make_syntax_mut(old), new),
+ })
+ .collect();
+ for edit in edits_mut {
+ match edit {
+ Remove(it) => it.as_ref().either(ast::Use::remove, ast::UseTree::remove),
+ Replace(old, new) => ted::replace(old, new),
+ }
+ }
+ },
+ )
+}
+
+trait Merge: AstNode + Clone {
+ fn try_merge_from(self, items: &mut dyn Iterator<Item = Self>) -> Option<Vec<Edit>> {
+ let mut edits = Vec::new();
+ let mut merged = self.clone();
+ while let Some(item) = items.next() {
+ merged = merged.try_merge(&item)?;
+ edits.push(Edit::Remove(item.into_either()));
+ }
+ if !edits.is_empty() {
+ edits.push(Edit::replace(self, merged));
+ Some(edits)
+ } else {
+ None
+ }
+ }
+ fn try_merge(&self, other: &Self) -> Option<Self>;
+ fn into_either(self) -> Either<ast::Use, ast::UseTree>;
+}
+
+impl Merge for ast::Use {
+ fn try_merge(&self, other: &Self) -> Option<Self> {
+ try_merge_imports(self, other, MergeBehavior::Crate)
+ }
+ fn into_either(self) -> Either<ast::Use, ast::UseTree> {
+ Either::Left(self)
+ }
+}
+
+impl Merge for ast::UseTree {
+ fn try_merge(&self, other: &Self) -> Option<Self> {
+ try_merge_trees(self, other, MergeBehavior::Crate)
+ }
+ fn into_either(self) -> Either<ast::Use, ast::UseTree> {
+ Either::Right(self)
+ }
+}
+
+enum Edit {
+ Remove(Either<ast::Use, ast::UseTree>),
+ Replace(SyntaxNode, SyntaxNode),
+}
+
+impl Edit {
+ fn replace(old: impl AstNode, new: impl AstNode) -> Self {
+ Edit::Replace(old.syntax().clone(), new.syntax().clone())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_merge_equal() {
+ check_assist(
+ merge_imports,
+ r"
+use std::fmt$0::{Display, Debug};
+use std::fmt::{Display, Debug};
+",
+ r"
+use std::fmt::{Display, Debug};
+",
+ )
+ }
+
+ #[test]
+ fn test_merge_first() {
+ check_assist(
+ merge_imports,
+ r"
+use std::fmt$0::Debug;
+use std::fmt::Display;
+",
+ r"
+use std::fmt::{Debug, Display};
+",
+ )
+ }
+
+ #[test]
+ fn test_merge_second() {
+ check_assist(
+ merge_imports,
+ r"
+use std::fmt::Debug;
+use std::fmt$0::Display;
+",
+ r"
+use std::fmt::{Display, Debug};
+",
+ );
+ }
+
+ #[test]
+ fn merge_self1() {
+ check_assist(
+ merge_imports,
+ r"
+use std::fmt$0;
+use std::fmt::Display;
+",
+ r"
+use std::fmt::{self, Display};
+",
+ );
+ }
+
+ #[test]
+ fn merge_self2() {
+ check_assist(
+ merge_imports,
+ r"
+use std::{fmt, $0fmt::Display};
+",
+ r"
+use std::{fmt::{Display, self}};
+",
+ );
+ }
+
+ #[test]
+ fn skip_pub1() {
+ check_assist_not_applicable(
+ merge_imports,
+ r"
+pub use std::fmt$0::Debug;
+use std::fmt::Display;
+",
+ );
+ }
+
+ #[test]
+ fn skip_pub_last() {
+ check_assist_not_applicable(
+ merge_imports,
+ r"
+use std::fmt$0::Debug;
+pub use std::fmt::Display;
+",
+ );
+ }
+
+ #[test]
+ fn skip_pub_crate_pub() {
+ check_assist_not_applicable(
+ merge_imports,
+ r"
+pub(crate) use std::fmt$0::Debug;
+pub use std::fmt::Display;
+",
+ );
+ }
+
+ #[test]
+ fn skip_pub_pub_crate() {
+ check_assist_not_applicable(
+ merge_imports,
+ r"
+pub use std::fmt$0::Debug;
+pub(crate) use std::fmt::Display;
+",
+ );
+ }
+
+ #[test]
+ fn merge_pub() {
+ check_assist(
+ merge_imports,
+ r"
+pub use std::fmt$0::Debug;
+pub use std::fmt::Display;
+",
+ r"
+pub use std::fmt::{Debug, Display};
+",
+ )
+ }
+
+ #[test]
+ fn merge_pub_crate() {
+ check_assist(
+ merge_imports,
+ r"
+pub(crate) use std::fmt$0::Debug;
+pub(crate) use std::fmt::Display;
+",
+ r"
+pub(crate) use std::fmt::{Debug, Display};
+",
+ )
+ }
+
+ #[test]
+ fn merge_pub_in_path_crate() {
+ check_assist(
+ merge_imports,
+ r"
+pub(in this::path) use std::fmt$0::Debug;
+pub(in this::path) use std::fmt::Display;
+",
+ r"
+pub(in this::path) use std::fmt::{Debug, Display};
+",
+ )
+ }
+
+ #[test]
+ fn test_merge_nested() {
+ check_assist(
+ merge_imports,
+ r"
+use std::{fmt$0::Debug, fmt::Display};
+",
+ r"
+use std::{fmt::{Debug, Display}};
+",
+ );
+ }
+
+ #[test]
+ fn test_merge_nested2() {
+ check_assist(
+ merge_imports,
+ r"
+use std::{fmt::Debug, fmt$0::Display};
+",
+ r"
+use std::{fmt::{Display, Debug}};
+",
+ );
+ }
+
+ #[test]
+ fn test_merge_with_nested_self_item() {
+ check_assist(
+ merge_imports,
+ r"
+use std$0::{fmt::{Write, Display}};
+use std::{fmt::{self, Debug}};
+",
+ r"
+use std::{fmt::{Write, Display, self, Debug}};
+",
+ );
+ }
+
+ #[test]
+ fn test_merge_with_nested_self_item2() {
+ check_assist(
+ merge_imports,
+ r"
+use std$0::{fmt::{self, Debug}};
+use std::{fmt::{Write, Display}};
+",
+ r"
+use std::{fmt::{self, Debug, Write, Display}};
+",
+ );
+ }
+
+ #[test]
+ fn test_merge_self_with_nested_self_item() {
+ check_assist(
+ merge_imports,
+ r"
+use std::{fmt$0::{self, Debug}, fmt::{Write, Display}};
+",
+ r"
+use std::{fmt::{self, Debug, Write, Display}};
+",
+ );
+ }
+
+ #[test]
+ fn test_merge_nested_self_and_empty() {
+ check_assist(
+ merge_imports,
+ r"
+use foo::$0{bar::{self}};
+use foo::{bar};
+",
+ r"
+use foo::{bar::{self}};
+",
+ )
+ }
+
+ #[test]
+ fn test_merge_nested_empty_and_self() {
+ check_assist(
+ merge_imports,
+ r"
+use foo::$0{bar};
+use foo::{bar::{self}};
+",
+ r"
+use foo::{bar::{self}};
+",
+ )
+ }
+
+ #[test]
+ fn test_merge_nested_list_self_and_glob() {
+ check_assist(
+ merge_imports,
+ r"
+use std$0::{fmt::*};
+use std::{fmt::{self, Display}};
+",
+ r"
+use std::{fmt::{*, self, Display}};
+",
+ )
+ }
+
+ #[test]
+ fn test_merge_single_wildcard_diff_prefixes() {
+ check_assist(
+ merge_imports,
+ r"
+use std$0::cell::*;
+use std::str;
+",
+ r"
+use std::{cell::*, str};
+",
+ )
+ }
+
+ #[test]
+ fn test_merge_both_wildcard_diff_prefixes() {
+ check_assist(
+ merge_imports,
+ r"
+use std$0::cell::*;
+use std::str::*;
+",
+ r"
+use std::{cell::*, str::*};
+",
+ )
+ }
+
+ #[test]
+ fn removes_just_enough_whitespace() {
+ check_assist(
+ merge_imports,
+ r"
+use foo$0::bar;
+use foo::baz;
+
+/// Doc comment
+",
+ r"
+use foo::{bar, baz};
+
+/// Doc comment
+",
+ );
+ }
+
+ #[test]
+ fn works_with_trailing_comma() {
+ check_assist(
+ merge_imports,
+ r"
+use {
+ foo$0::bar,
+ foo::baz,
+};
+",
+ r"
+use {
+ foo::{bar, baz},
+};
+",
+ );
+ check_assist(
+ merge_imports,
+ r"
+use {
+ foo::baz,
+ foo$0::bar,
+};
+",
+ r"
+use {
+ foo::{bar, baz},
+};
+",
+ );
+ }
+
+ #[test]
+ fn test_double_comma() {
+ check_assist(
+ merge_imports,
+ r"
+use foo::bar::baz;
+use foo::$0{
+ FooBar,
+};
+",
+ r"
+use foo::{
+ FooBar, bar::baz,
+};
+",
+ )
+ }
+
+ #[test]
+ fn test_empty_use() {
+ check_assist_not_applicable(
+ merge_imports,
+ r"
+use std::$0
+fn main() {}",
+ );
+ }
+
+ #[test]
+ fn split_glob() {
+ check_assist(
+ merge_imports,
+ r"
+use foo::$0*;
+use foo::bar::Baz;
+",
+ r"
+use foo::{*, bar::Baz};
+",
+ );
+ }
+
+ #[test]
+ fn merge_selection_uses() {
+ check_assist(
+ merge_imports,
+ r"
+use std::fmt::Error;
+$0use std::fmt::Display;
+use std::fmt::Debug;
+use std::fmt::Write;
+$0use std::fmt::Result;
+",
+ r"
+use std::fmt::Error;
+use std::fmt::{Display, Debug, Write};
+use std::fmt::Result;
+",
+ );
+ }
+
+ #[test]
+ fn merge_selection_use_trees() {
+ check_assist(
+ merge_imports,
+ r"
+use std::{
+ fmt::Error,
+ $0fmt::Display,
+ fmt::Debug,
+ fmt::Write,$0
+ fmt::Result,
+};",
+ r"
+use std::{
+ fmt::Error,
+ fmt::{Display, Debug, Write},
+ fmt::Result,
+};",
+ );
+ // FIXME: Remove redundant braces. See also unnecessary-braces diagnostic.
+ check_assist(
+ merge_imports,
+ r"use std::$0{fmt::Display, fmt::Debug}$0;",
+ r"use std::{fmt::{Display, Debug}};",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_match_arms.rs
new file mode 100644
index 000000000..c24015b1c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_match_arms.rs
@@ -0,0 +1,822 @@
+use hir::TypeInfo;
+use std::{collections::HashMap, iter::successors};
+use syntax::{
+ algo::neighbor,
+ ast::{self, AstNode, HasName},
+ Direction,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists, TextRange};
+
+// Assist: merge_match_arms
+//
+// Merges the current match arm with the following if their bodies are identical.
+//
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// match action {
+// $0Action::Move(..) => foo(),
+// Action::Stop => foo(),
+// }
+// }
+// ```
+// ->
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// match action {
+// Action::Move(..) | Action::Stop => foo(),
+// }
+// }
+// ```
+pub(crate) fn merge_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let current_arm = ctx.find_node_at_offset::<ast::MatchArm>()?;
+ // Don't try to handle arms with guards for now - can add support for this later
+ if current_arm.guard().is_some() {
+ return None;
+ }
+ let current_expr = current_arm.expr()?;
+ let current_text_range = current_arm.syntax().text_range();
+ let current_arm_types = get_arm_types(ctx, &current_arm);
+
+ // We check if the following match arms match this one. We could, but don't,
+ // compare to the previous match arm as well.
+ let arms_to_merge = successors(Some(current_arm), |it| neighbor(it, Direction::Next))
+ .take_while(|arm| match arm.expr() {
+ Some(expr) if arm.guard().is_none() => {
+ let same_text = expr.syntax().text() == current_expr.syntax().text();
+ if !same_text {
+ return false;
+ }
+
+ are_same_types(&current_arm_types, arm, ctx)
+ }
+ _ => false,
+ })
+ .collect::<Vec<_>>();
+
+ if arms_to_merge.len() <= 1 {
+ return None;
+ }
+
+ acc.add(
+ AssistId("merge_match_arms", AssistKind::RefactorRewrite),
+ "Merge match arms",
+ current_text_range,
+ |edit| {
+ let pats = if arms_to_merge.iter().any(contains_placeholder) {
+ "_".into()
+ } else {
+ arms_to_merge
+ .iter()
+ .filter_map(ast::MatchArm::pat)
+ .map(|x| x.syntax().to_string())
+ .collect::<Vec<String>>()
+ .join(" | ")
+ };
+
+ let arm = format!("{} => {},", pats, current_expr.syntax().text());
+
+ if let [first, .., last] = &*arms_to_merge {
+ let start = first.syntax().text_range().start();
+ let end = last.syntax().text_range().end();
+
+ edit.replace(TextRange::new(start, end), arm);
+ }
+ },
+ )
+}
+
+fn contains_placeholder(a: &ast::MatchArm) -> bool {
+ matches!(a.pat(), Some(ast::Pat::WildcardPat(..)))
+}
+
+fn are_same_types(
+ current_arm_types: &HashMap<String, Option<TypeInfo>>,
+ arm: &ast::MatchArm,
+ ctx: &AssistContext<'_>,
+) -> bool {
+ let arm_types = get_arm_types(ctx, arm);
+ for (other_arm_type_name, other_arm_type) in arm_types {
+ match (current_arm_types.get(&other_arm_type_name), other_arm_type) {
+ (Some(Some(current_arm_type)), Some(other_arm_type))
+ if other_arm_type.original == current_arm_type.original => {}
+ _ => return false,
+ }
+ }
+
+ true
+}
+
+fn get_arm_types(
+ context: &AssistContext<'_>,
+ arm: &ast::MatchArm,
+) -> HashMap<String, Option<TypeInfo>> {
+ let mut mapping: HashMap<String, Option<TypeInfo>> = HashMap::new();
+
+ fn recurse(
+ map: &mut HashMap<String, Option<TypeInfo>>,
+ ctx: &AssistContext<'_>,
+ pat: &Option<ast::Pat>,
+ ) {
+ if let Some(local_pat) = pat {
+ match pat {
+ Some(ast::Pat::TupleStructPat(tuple)) => {
+ for field in tuple.fields() {
+ recurse(map, ctx, &Some(field));
+ }
+ }
+ Some(ast::Pat::TuplePat(tuple)) => {
+ for field in tuple.fields() {
+ recurse(map, ctx, &Some(field));
+ }
+ }
+ Some(ast::Pat::RecordPat(record)) => {
+ if let Some(field_list) = record.record_pat_field_list() {
+ for field in field_list.fields() {
+ recurse(map, ctx, &field.pat());
+ }
+ }
+ }
+ Some(ast::Pat::ParenPat(parentheses)) => {
+ recurse(map, ctx, &parentheses.pat());
+ }
+ Some(ast::Pat::SlicePat(slice)) => {
+ for slice_pat in slice.pats() {
+ recurse(map, ctx, &Some(slice_pat));
+ }
+ }
+ Some(ast::Pat::IdentPat(ident_pat)) => {
+ if let Some(name) = ident_pat.name() {
+ let pat_type = ctx.sema.type_of_pat(local_pat);
+ map.insert(name.text().to_string(), pat_type);
+ }
+ }
+ _ => (),
+ }
+ }
+ }
+
+ recurse(&mut mapping, context, &arm.pat());
+ mapping
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn merge_match_arms_single_patterns() {
+ check_assist(
+ merge_match_arms,
+ r#"
+#[derive(Debug)]
+enum X { A, B, C }
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A => { 1i32$0 }
+ X::B => { 1i32 }
+ X::C => { 2i32 }
+ }
+}
+"#,
+ r#"
+#[derive(Debug)]
+enum X { A, B, C }
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A | X::B => { 1i32 },
+ X::C => { 2i32 }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_multiple_patterns() {
+ check_assist(
+ merge_match_arms,
+ r#"
+#[derive(Debug)]
+enum X { A, B, C, D, E }
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A | X::B => {$0 1i32 },
+ X::C | X::D => { 1i32 },
+ X::E => { 2i32 },
+ }
+}
+"#,
+ r#"
+#[derive(Debug)]
+enum X { A, B, C, D, E }
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A | X::B | X::C | X::D => { 1i32 },
+ X::E => { 2i32 },
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_placeholder_pattern() {
+ check_assist(
+ merge_match_arms,
+ r#"
+#[derive(Debug)]
+enum X { A, B, C, D, E }
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A => { 1i32 },
+ X::B => { 2i$032 },
+ _ => { 2i32 }
+ }
+}
+"#,
+ r#"
+#[derive(Debug)]
+enum X { A, B, C, D, E }
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A => { 1i32 },
+ _ => { 2i32 },
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merges_all_subsequent_arms() {
+ check_assist(
+ merge_match_arms,
+ r#"
+enum X { A, B, C, D, E }
+
+fn main() {
+ match X::A {
+ X::A$0 => 92,
+ X::B => 92,
+ X::C => 92,
+ X::D => 62,
+ _ => panic!(),
+ }
+}
+"#,
+ r#"
+enum X { A, B, C, D, E }
+
+fn main() {
+ match X::A {
+ X::A | X::B | X::C => 92,
+ X::D => 62,
+ _ => panic!(),
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_rejects_guards() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+#[derive(Debug)]
+enum X {
+ A(i32),
+ B,
+ C
+}
+
+fn main() {
+ let x = X::A;
+ let y = match x {
+ X::A(a) if a > 5 => { $01i32 },
+ X::B => { 1i32 },
+ X::C => { 2i32 }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_different_type() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+//- minicore: result
+fn func() {
+ match Result::<f64, f32>::Ok(0f64) {
+ Ok(x) => $0x.classify(),
+ Err(x) => x.classify()
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_different_type_multiple_fields() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+//- minicore: result
+fn func() {
+ match Result::<(f64, f64), (f32, f32)>::Ok((0f64, 0f64)) {
+ Ok(x) => $0x.1.classify(),
+ Err(x) => x.1.classify()
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_same_type_multiple_fields() {
+ check_assist(
+ merge_match_arms,
+ r#"
+//- minicore: result
+fn func() {
+ match Result::<(f64, f64), (f64, f64)>::Ok((0f64, 0f64)) {
+ Ok(x) => $0x.1.classify(),
+ Err(x) => x.1.classify()
+ };
+}
+"#,
+ r#"
+fn func() {
+ match Result::<(f64, f64), (f64, f64)>::Ok((0f64, 0f64)) {
+ Ok(x) | Err(x) => x.1.classify(),
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_same_type_subsequent_arm_with_different_type_in_other() {
+ check_assist(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ OptionA(f32),
+ OptionB(f32),
+ OptionC(f64)
+}
+
+fn func(e: MyEnum) {
+ match e {
+ MyEnum::OptionA(x) => $0x.classify(),
+ MyEnum::OptionB(x) => x.classify(),
+ MyEnum::OptionC(x) => x.classify(),
+ };
+}
+"#,
+ r#"
+enum MyEnum {
+ OptionA(f32),
+ OptionB(f32),
+ OptionC(f64)
+}
+
+fn func(e: MyEnum) {
+ match e {
+ MyEnum::OptionA(x) | MyEnum::OptionB(x) => x.classify(),
+ MyEnum::OptionC(x) => x.classify(),
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_same_type_skip_arm_with_different_type_in_between() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ OptionA(f32),
+ OptionB(f64),
+ OptionC(f32)
+}
+
+fn func(e: MyEnum) {
+ match e {
+ MyEnum::OptionA(x) => $0x.classify(),
+ MyEnum::OptionB(x) => x.classify(),
+ MyEnum::OptionC(x) => x.classify(),
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_same_type_different_number_of_fields() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+//- minicore: result
+fn func() {
+ match Result::<(f64, f64, f64), (f64, f64)>::Ok((0f64, 0f64, 0f64)) {
+ Ok(x) => $0x.1.classify(),
+ Err(x) => x.1.classify()
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_same_destructuring_different_types() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+struct Point {
+ x: i32,
+ y: i32,
+}
+
+fn func() {
+ let p = Point { x: 0, y: 7 };
+
+ match p {
+ Point { x, y: 0 } => $0"",
+ Point { x: 0, y } => "",
+ Point { x, y } => "",
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_range() {
+ check_assist(
+ merge_match_arms,
+ r#"
+fn func() {
+ let x = 'c';
+
+ match x {
+ 'a'..='j' => $0"",
+ 'c'..='z' => "",
+ _ => "other",
+ };
+}
+"#,
+ r#"
+fn func() {
+ let x = 'c';
+
+ match x {
+ 'a'..='j' | 'c'..='z' => "",
+ _ => "other",
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_enum_without_field() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ NoField,
+ AField(u8)
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::NoField => $0"",
+ MyEnum::AField(x) => ""
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_enum_destructuring_different_types() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ Move { x: i32, y: i32 },
+ Write(String),
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::Move { x, y } => $0"",
+ MyEnum::Write(text) => "",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_enum_destructuring_same_types() {
+ check_assist(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ Move { x: i32, y: i32 },
+ Crawl { x: i32, y: i32 }
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::Move { x, y } => $0"",
+ MyEnum::Crawl { x, y } => "",
+ };
+}
+ "#,
+ r#"
+enum MyEnum {
+ Move { x: i32, y: i32 },
+ Crawl { x: i32, y: i32 }
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::Move { x, y } | MyEnum::Crawl { x, y } => "",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_enum_destructuring_same_types_different_name() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ Move { x: i32, y: i32 },
+ Crawl { a: i32, b: i32 }
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::Move { x, y } => $0"",
+ MyEnum::Crawl { a, b } => "",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_enum_nested_pattern_different_names() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+enum Color {
+ Rgb(i32, i32, i32),
+ Hsv(i32, i32, i32),
+}
+
+enum Message {
+ Quit,
+ Move { x: i32, y: i32 },
+ Write(String),
+ ChangeColor(Color),
+}
+
+fn main(msg: Message) {
+ match msg {
+ Message::ChangeColor(Color::Rgb(r, g, b)) => $0"",
+ Message::ChangeColor(Color::Hsv(h, s, v)) => "",
+ _ => "other"
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_enum_nested_pattern_same_names() {
+ check_assist(
+ merge_match_arms,
+ r#"
+enum Color {
+ Rgb(i32, i32, i32),
+ Hsv(i32, i32, i32),
+}
+
+enum Message {
+ Quit,
+ Move { x: i32, y: i32 },
+ Write(String),
+ ChangeColor(Color),
+}
+
+fn main(msg: Message) {
+ match msg {
+ Message::ChangeColor(Color::Rgb(a, b, c)) => $0"",
+ Message::ChangeColor(Color::Hsv(a, b, c)) => "",
+ _ => "other"
+ };
+}
+ "#,
+ r#"
+enum Color {
+ Rgb(i32, i32, i32),
+ Hsv(i32, i32, i32),
+}
+
+enum Message {
+ Quit,
+ Move { x: i32, y: i32 },
+ Write(String),
+ ChangeColor(Color),
+}
+
+fn main(msg: Message) {
+ match msg {
+ Message::ChangeColor(Color::Rgb(a, b, c)) | Message::ChangeColor(Color::Hsv(a, b, c)) => "",
+ _ => "other"
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_enum_destructuring_with_ignore() {
+ check_assist(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ Move { x: i32, a: i32 },
+ Crawl { x: i32, b: i32 }
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::Move { x, .. } => $0"",
+ MyEnum::Crawl { x, .. } => "",
+ };
+}
+ "#,
+ r#"
+enum MyEnum {
+ Move { x: i32, a: i32 },
+ Crawl { x: i32, b: i32 }
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::Move { x, .. } | MyEnum::Crawl { x, .. } => "",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_nested_with_conflicting_identifier() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+enum Color {
+ Rgb(i32, i32, i32),
+ Hsv(i32, i32, i32),
+}
+
+enum Message {
+ Move { x: i32, y: i32 },
+ ChangeColor(u8, Color),
+}
+
+fn main(msg: Message) {
+ match msg {
+ Message::ChangeColor(x, Color::Rgb(y, b, c)) => $0"",
+ Message::ChangeColor(y, Color::Hsv(x, b, c)) => "",
+ _ => "other"
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_tuple() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+fn func() {
+ match (0, "boo") {
+ (x, y) => $0"",
+ (y, x) => "",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_parentheses() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+fn func(x: i32) {
+ let variable = 2;
+ match x {
+ 1 => $0"",
+ ((((variable)))) => "",
+ _ => "other"
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_refpat() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+fn func() {
+ let name = Some(String::from(""));
+ let n = String::from("");
+ match name {
+ Some(ref n) => $0"",
+ Some(n) => "",
+ _ => "other",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_slice() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+fn func(binary: &[u8]) {
+ let space = b' ';
+ match binary {
+ [0x7f, b'E', b'L', b'F', ..] => $0"",
+ [space] => "",
+ _ => "other",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_slice_identical() {
+ check_assist(
+ merge_match_arms,
+ r#"
+fn func(binary: &[u8]) {
+ let space = b' ';
+ match binary {
+ [space, 5u8] => $0"",
+ [space] => "",
+ _ => "other",
+ };
+}
+ "#,
+ r#"
+fn func(binary: &[u8]) {
+ let space = b' ';
+ match binary {
+ [space, 5u8] | [space] => "",
+ _ => "other",
+ };
+}
+ "#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs
new file mode 100644
index 000000000..176a3bf58
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs
@@ -0,0 +1,122 @@
+use syntax::{
+ ast::{self, edit_in_place::GenericParamsOwnerEdit, make, AstNode, HasName, HasTypeBounds},
+ match_ast,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: move_bounds_to_where_clause
+//
+// Moves inline type bounds to a where clause.
+//
+// ```
+// fn apply<T, U, $0F: FnOnce(T) -> U>(f: F, x: T) -> U {
+// f(x)
+// }
+// ```
+// ->
+// ```
+// fn apply<T, U, F>(f: F, x: T) -> U where F: FnOnce(T) -> U {
+// f(x)
+// }
+// ```
+pub(crate) fn move_bounds_to_where_clause(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let type_param_list = ctx.find_node_at_offset::<ast::GenericParamList>()?;
+
+ let mut type_params = type_param_list.type_or_const_params();
+ if type_params.all(|p| match p {
+ ast::TypeOrConstParam::Type(t) => t.type_bound_list().is_none(),
+ ast::TypeOrConstParam::Const(_) => true,
+ }) {
+ return None;
+ }
+
+ let parent = type_param_list.syntax().parent()?;
+
+ let target = type_param_list.syntax().text_range();
+ acc.add(
+ AssistId("move_bounds_to_where_clause", AssistKind::RefactorRewrite),
+ "Move to where clause",
+ target,
+ |edit| {
+ let type_param_list = edit.make_mut(type_param_list);
+ let parent = edit.make_syntax_mut(parent);
+
+ let where_clause: ast::WhereClause = match_ast! {
+ match parent {
+ ast::Fn(it) => it.get_or_create_where_clause(),
+ ast::Trait(it) => it.get_or_create_where_clause(),
+ ast::Impl(it) => it.get_or_create_where_clause(),
+ ast::Enum(it) => it.get_or_create_where_clause(),
+ ast::Struct(it) => it.get_or_create_where_clause(),
+ _ => return,
+ }
+ };
+
+ for toc_param in type_param_list.type_or_const_params() {
+ let type_param = match toc_param {
+ ast::TypeOrConstParam::Type(x) => x,
+ ast::TypeOrConstParam::Const(_) => continue,
+ };
+ if let Some(tbl) = type_param.type_bound_list() {
+ if let Some(predicate) = build_predicate(type_param) {
+ where_clause.add_predicate(predicate)
+ }
+ tbl.remove()
+ }
+ }
+ },
+ )
+}
+
+fn build_predicate(param: ast::TypeParam) -> Option<ast::WherePred> {
+ let path = make::ext::ident_path(&param.name()?.syntax().to_string());
+ let predicate = make::where_pred(path, param.type_bound_list()?.bounds());
+ Some(predicate.clone_for_update())
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::check_assist;
+
+ #[test]
+ fn move_bounds_to_where_clause_fn() {
+ check_assist(
+ move_bounds_to_where_clause,
+ r#"fn foo<T: u32, $0F: FnOnce(T) -> T>() {}"#,
+ r#"fn foo<T, F>() where T: u32, F: FnOnce(T) -> T {}"#,
+ );
+ }
+
+ #[test]
+ fn move_bounds_to_where_clause_impl() {
+ check_assist(
+ move_bounds_to_where_clause,
+ r#"impl<U: u32, $0T> A<U, T> {}"#,
+ r#"impl<U, T> A<U, T> where U: u32 {}"#,
+ );
+ }
+
+ #[test]
+ fn move_bounds_to_where_clause_struct() {
+ check_assist(
+ move_bounds_to_where_clause,
+ r#"struct A<$0T: Iterator<Item = u32>> {}"#,
+ r#"struct A<T> where T: Iterator<Item = u32> {}"#,
+ );
+ }
+
+ #[test]
+ fn move_bounds_to_where_clause_tuple_struct() {
+ check_assist(
+ move_bounds_to_where_clause,
+ r#"struct Pair<$0T: u32>(T, T);"#,
+ r#"struct Pair<T>(T, T) where T: u32;"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs
new file mode 100644
index 000000000..a6c85a2b1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs
@@ -0,0 +1,130 @@
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ base_db::AnchoredPathBuf,
+};
+use syntax::{ast, AstNode};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils::trimmed_text_range,
+};
+
+// Assist: move_from_mod_rs
+//
+// Moves xxx/mod.rs to xxx.rs.
+//
+// ```
+// //- /main.rs
+// mod a;
+// //- /a/mod.rs
+// $0fn t() {}$0
+// ```
+// ->
+// ```
+// fn t() {}
+// ```
+pub(crate) fn move_from_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let source_file = ctx.find_node_at_offset::<ast::SourceFile>()?;
+ let module = ctx.sema.to_module_def(ctx.file_id())?;
+ // Enable this assist if the user select all "meaningful" content in the source file
+ let trimmed_selected_range = trimmed_text_range(&source_file, ctx.selection_trimmed());
+ let trimmed_file_range = trimmed_text_range(&source_file, source_file.syntax().text_range());
+ if !module.is_mod_rs(ctx.db()) {
+ cov_mark::hit!(not_mod_rs);
+ return None;
+ }
+ if trimmed_selected_range != trimmed_file_range {
+ cov_mark::hit!(not_all_selected);
+ return None;
+ }
+
+ let target = source_file.syntax().text_range();
+ let module_name = module.name(ctx.db())?.to_string();
+ let path = format!("../{}.rs", module_name);
+ let dst = AnchoredPathBuf { anchor: ctx.file_id(), path };
+ acc.add(
+ AssistId("move_from_mod_rs", AssistKind::Refactor),
+ format!("Convert {}/mod.rs to {}.rs", module_name, module_name),
+ target,
+ |builder| {
+ builder.move_file(ctx.file_id(), dst);
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn trivial() {
+ check_assist(
+ move_from_mod_rs,
+ r#"
+//- /main.rs
+mod a;
+//- /a/mod.rs
+$0fn t() {}
+$0"#,
+ r#"
+//- /a.rs
+fn t() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn must_select_all_file() {
+ cov_mark::check!(not_all_selected);
+ check_assist_not_applicable(
+ move_from_mod_rs,
+ r#"
+//- /main.rs
+mod a;
+//- /a/mod.rs
+fn t() {}$0
+"#,
+ );
+ cov_mark::check!(not_all_selected);
+ check_assist_not_applicable(
+ move_from_mod_rs,
+ r#"
+//- /main.rs
+mod a;
+//- /a/mod.rs
+$0fn$0 t() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn cannot_move_not_mod_rs() {
+ cov_mark::check!(not_mod_rs);
+ check_assist_not_applicable(
+ move_from_mod_rs,
+ r#"//- /main.rs
+mod a;
+//- /a.rs
+$0fn t() {}$0
+"#,
+ );
+ }
+
+ #[test]
+ fn cannot_downgrade_main_and_lib_rs() {
+ check_assist_not_applicable(
+ move_from_mod_rs,
+ r#"//- /main.rs
+$0fn t() {}$0
+"#,
+ );
+ check_assist_not_applicable(
+ move_from_mod_rs,
+ r#"//- /lib.rs
+$0fn t() {}$0
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs
new file mode 100644
index 000000000..b8f1b36de
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs
@@ -0,0 +1,997 @@
+use syntax::{
+ ast::{edit::AstNodeEdit, make, AstNode, BlockExpr, ElseBranch, Expr, IfExpr, MatchArm, Pat},
+ SyntaxKind::WHITESPACE,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: move_guard_to_arm_body
+//
+// Moves match guard into match arm body.
+//
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// match action {
+// Action::Move { distance } $0if distance > 10 => foo(),
+// _ => (),
+// }
+// }
+// ```
+// ->
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// match action {
+// Action::Move { distance } => if distance > 10 {
+// foo()
+// },
+// _ => (),
+// }
+// }
+// ```
+pub(crate) fn move_guard_to_arm_body(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let match_arm = ctx.find_node_at_offset::<MatchArm>()?;
+ let guard = match_arm.guard()?;
+ if ctx.offset() > guard.syntax().text_range().end() {
+ cov_mark::hit!(move_guard_unapplicable_in_arm_body);
+ return None;
+ }
+ let space_before_guard = guard.syntax().prev_sibling_or_token();
+
+ let guard_condition = guard.condition()?;
+ let arm_expr = match_arm.expr()?;
+ let if_expr =
+ make::expr_if(guard_condition, make::block_expr(None, Some(arm_expr.clone())), None)
+ .indent(arm_expr.indent_level());
+
+ let target = guard.syntax().text_range();
+ acc.add(
+ AssistId("move_guard_to_arm_body", AssistKind::RefactorRewrite),
+ "Move guard to arm body",
+ target,
+ |edit| {
+ match space_before_guard {
+ Some(element) if element.kind() == WHITESPACE => {
+ edit.delete(element.text_range());
+ }
+ _ => (),
+ };
+
+ edit.delete(guard.syntax().text_range());
+ edit.replace_ast(arm_expr, if_expr);
+ },
+ )
+}
+
+// Assist: move_arm_cond_to_match_guard
+//
+// Moves if expression from match arm body into a guard.
+//
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// match action {
+// Action::Move { distance } => $0if distance > 10 { foo() },
+// _ => (),
+// }
+// }
+// ```
+// ->
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// match action {
+// Action::Move { distance } if distance > 10 => foo(),
+// _ => (),
+// }
+// }
+// ```
+pub(crate) fn move_arm_cond_to_match_guard(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let match_arm: MatchArm = ctx.find_node_at_offset::<MatchArm>()?;
+ let match_pat = match_arm.pat()?;
+ let arm_body = match_arm.expr()?;
+
+ let mut replace_node = None;
+ let if_expr: IfExpr = IfExpr::cast(arm_body.syntax().clone()).or_else(|| {
+ let block_expr = BlockExpr::cast(arm_body.syntax().clone())?;
+ if let Expr::IfExpr(e) = block_expr.tail_expr()? {
+ replace_node = Some(block_expr.syntax().clone());
+ Some(e)
+ } else {
+ None
+ }
+ })?;
+ if ctx.offset() > if_expr.then_branch()?.syntax().text_range().start() {
+ return None;
+ }
+
+ let replace_node = replace_node.unwrap_or_else(|| if_expr.syntax().clone());
+ let needs_dedent = replace_node != *if_expr.syntax();
+ let (conds_blocks, tail) = parse_if_chain(if_expr)?;
+
+ acc.add(
+ AssistId("move_arm_cond_to_match_guard", AssistKind::RefactorRewrite),
+ "Move condition to match guard",
+ replace_node.text_range(),
+ |edit| {
+ edit.delete(match_arm.syntax().text_range());
+ // Dedent if if_expr is in a BlockExpr
+ let dedent = if needs_dedent {
+ cov_mark::hit!(move_guard_ifelse_in_block);
+ 1
+ } else {
+ cov_mark::hit!(move_guard_ifelse_else_block);
+ 0
+ };
+ let then_arm_end = match_arm.syntax().text_range().end();
+ let indent_level = match_arm.indent_level();
+ let spaces = " ".repeat(indent_level.0 as _);
+
+ let mut first = true;
+ for (cond, block) in conds_blocks {
+ if !first {
+ edit.insert(then_arm_end, format!("\n{}", spaces));
+ } else {
+ first = false;
+ }
+ let guard = format!("{} if {} => ", match_pat, cond.syntax().text());
+ edit.insert(then_arm_end, guard);
+ let only_expr = block.statements().next().is_none();
+ match &block.tail_expr() {
+ Some(then_expr) if only_expr => {
+ edit.insert(then_arm_end, then_expr.syntax().text());
+ edit.insert(then_arm_end, ",");
+ }
+ _ => {
+ let to_insert = block.dedent(dedent.into()).syntax().text();
+ edit.insert(then_arm_end, to_insert)
+ }
+ }
+ }
+ if let Some(e) = tail {
+ cov_mark::hit!(move_guard_ifelse_else_tail);
+ let guard = format!("\n{}{} => ", spaces, match_pat);
+ edit.insert(then_arm_end, guard);
+ let only_expr = e.statements().next().is_none();
+ match &e.tail_expr() {
+ Some(expr) if only_expr => {
+ cov_mark::hit!(move_guard_ifelse_expr_only);
+ edit.insert(then_arm_end, expr.syntax().text());
+ edit.insert(then_arm_end, ",");
+ }
+ _ => {
+ let to_insert = e.dedent(dedent.into()).syntax().text();
+ edit.insert(then_arm_end, to_insert)
+ }
+ }
+ } else {
+ // There's no else branch. Add a pattern without guard, unless the following match
+ // arm is `_ => ...`
+ cov_mark::hit!(move_guard_ifelse_notail);
+ match match_arm.syntax().next_sibling().and_then(MatchArm::cast) {
+ Some(next_arm)
+ if matches!(next_arm.pat(), Some(Pat::WildcardPat(_)))
+ && next_arm.guard().is_none() =>
+ {
+ cov_mark::hit!(move_guard_ifelse_has_wildcard);
+ }
+ _ => edit.insert(then_arm_end, format!("\n{}{} => {{}}", spaces, match_pat)),
+ }
+ }
+ },
+ )
+}
+
+// Parses an if-else-if chain to get the conditions and the then branches until we encounter an else
+// branch or the end.
+fn parse_if_chain(if_expr: IfExpr) -> Option<(Vec<(Expr, BlockExpr)>, Option<BlockExpr>)> {
+ let mut conds_blocks = Vec::new();
+ let mut curr_if = if_expr;
+ let tail = loop {
+ let cond = curr_if.condition()?;
+ conds_blocks.push((cond, curr_if.then_branch()?));
+ match curr_if.else_branch() {
+ Some(ElseBranch::IfExpr(e)) => {
+ curr_if = e;
+ }
+ Some(ElseBranch::Block(b)) => {
+ break Some(b);
+ }
+ None => break None,
+ }
+ };
+ Some((conds_blocks, tail))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ #[test]
+ fn move_guard_to_arm_body_range() {
+ cov_mark::check!(move_guard_unapplicable_in_arm_body);
+ check_assist_not_applicable(
+ move_guard_to_arm_body,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => $0false,
+ _ => true
+ }
+}
+"#,
+ );
+ }
+ #[test]
+ fn move_guard_to_arm_body_target() {
+ check_assist_target(
+ move_guard_to_arm_body,
+ r#"
+fn main() {
+ match 92 {
+ x $0if x > 10 => false,
+ _ => true
+ }
+}
+"#,
+ r#"if x > 10"#,
+ );
+ }
+
+ #[test]
+ fn move_guard_to_arm_body_works() {
+ check_assist(
+ move_guard_to_arm_body,
+ r#"
+fn main() {
+ match 92 {
+ x $0if x > 10 => false,
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x => if x > 10 {
+ false
+ },
+ _ => true
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_let_guard_to_arm_body_works() {
+ check_assist(
+ move_guard_to_arm_body,
+ r#"
+fn main() {
+ match 92 {
+ x $0if (let 1 = x) => false,
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x => if (let 1 = x) {
+ false
+ },
+ _ => true
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_guard_to_arm_body_works_complex_match() {
+ check_assist(
+ move_guard_to_arm_body,
+ r#"
+fn main() {
+ match 92 {
+ $0x @ 4 | x @ 5 if x > 5 => true,
+ _ => false
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x @ 4 | x @ 5 => if x > 5 {
+ true
+ },
+ _ => false
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => if x > 10$0 { false },
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => false,
+ _ => true
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_in_block_to_match_guard_works() {
+ cov_mark::check!(move_guard_ifelse_has_wildcard);
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => {
+ $0if x > 10 {
+ false
+ }
+ },
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => false,
+ _ => true
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_in_block_to_match_guard_no_wildcard_works() {
+ cov_mark::check_count!(move_guard_ifelse_has_wildcard, 0);
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => {
+ $0if x > 10 {
+ false
+ }
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => false,
+ x => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_in_block_to_match_guard_wildcard_guard_works() {
+ cov_mark::check_count!(move_guard_ifelse_has_wildcard, 0);
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => {
+ $0if x > 10 {
+ false
+ }
+ }
+ _ if x > 10 => true,
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => false,
+ x => {}
+ _ if x > 10 => true,
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_in_block_to_match_guard_add_comma_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => {
+ $0if x > 10 {
+ false
+ }
+ }
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => false,
+ _ => true
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_if_let_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => if let 62 = x $0&& true { false },
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if let 62 = x && true => false,
+ _ => true
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_if_empty_body_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => if x $0> 10 { },
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => { }
+ _ => true
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_if_multiline_body_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => if$0 x > 10 {
+ 92;
+ false
+ },
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => {
+ 92;
+ false
+ }
+ _ => true
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_in_block_to_match_guard_if_multiline_body_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => {
+ if x > $010 {
+ 92;
+ false
+ }
+ }
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => {
+ 92;
+ false
+ }
+ _ => true
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_with_else_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => if x > $010 {
+ false
+ } else {
+ true
+ }
+ _ => true,
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => false,
+ x => true,
+ _ => true,
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_with_else_block_works() {
+ cov_mark::check!(move_guard_ifelse_expr_only);
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => {
+ if x $0> 10 {
+ false
+ } else {
+ true
+ }
+ }
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => false,
+ x => true,
+ _ => true
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_else_if_empty_body_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => if x > $010 { } else { },
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => { }
+ x => { }
+ _ => true
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_with_else_multiline_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => if$0 x > 10 {
+ 92;
+ false
+ } else {
+ true
+ }
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => {
+ 92;
+ false
+ }
+ x => true,
+ _ => true
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_with_else_multiline_else_works() {
+ cov_mark::check!(move_guard_ifelse_else_block);
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => if x $0> 10 {
+ false
+ } else {
+ 42;
+ true
+ }
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => false,
+ x => {
+ 42;
+ true
+ }
+ _ => true
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_with_else_multiline_else_block_works() {
+ cov_mark::check!(move_guard_ifelse_in_block);
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ x => {
+ if x > $010 {
+ false
+ } else {
+ 42;
+ true
+ }
+ }
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if x > 10 => false,
+ x => {
+ 42;
+ true
+ }
+ _ => true
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_with_else_last_arm_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ 3 => true,
+ x => {
+ if x > $010 {
+ false
+ } else {
+ 92;
+ true
+ }
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ 3 => true,
+ x if x > 10 => false,
+ x => {
+ 92;
+ true
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_with_else_comma_works() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ 3 => true,
+ x => if x > $010 {
+ false
+ } else {
+ 92;
+ true
+ },
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ 3 => true,
+ x if x > 10 => false,
+ x => {
+ 92;
+ true
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_elseif() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ 3 => true,
+ x => if x $0> 10 {
+ false
+ } else if x > 5 {
+ true
+ } else if x > 4 {
+ false
+ } else {
+ true
+ },
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ 3 => true,
+ x if x > 10 => false,
+ x if x > 5 => true,
+ x if x > 4 => false,
+ x => true,
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_elseif_in_block() {
+ cov_mark::check!(move_guard_ifelse_in_block);
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ 3 => true,
+ x => {
+ if x > $010 {
+ false
+ } else if x > 5 {
+ true
+ } else if x > 4 {
+ false
+ } else {
+ true
+ }
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ 3 => true,
+ x if x > 10 => false,
+ x if x > 5 => true,
+ x if x > 4 => false,
+ x => true,
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_elseif_chain() {
+ cov_mark::check!(move_guard_ifelse_else_tail);
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ 3 => 0,
+ x => if x $0> 10 {
+ 1
+ } else if x > 5 {
+ 2
+ } else if x > 3 {
+ 42;
+ 3
+ } else {
+ 4
+ },
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ 3 => 0,
+ x if x > 10 => 1,
+ x if x > 5 => 2,
+ x if x > 3 => {
+ 42;
+ 3
+ }
+ x => 4,
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_elseif_iflet() {
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ 3 => 0,
+ x => if x $0> 10 {
+ 1
+ } else if x > 5 {
+ 2
+ } else if let 4 = 4 {
+ 42;
+ 3
+ } else {
+ 4
+ },
+ }
+}"#,
+ r#"
+fn main() {
+ match 92 {
+ 3 => 0,
+ x if x > 10 => 1,
+ x if x > 5 => 2,
+ x if let 4 = 4 => {
+ 42;
+ 3
+ }
+ x => 4,
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn move_arm_cond_to_match_guard_elseif_notail() {
+ cov_mark::check!(move_guard_ifelse_notail);
+ check_assist(
+ move_arm_cond_to_match_guard,
+ r#"
+fn main() {
+ match 92 {
+ 3 => 0,
+ x => if x > $010 {
+ 1
+ } else if x > 5 {
+ 2
+ } else if x > 4 {
+ 42;
+ 3
+ },
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ 3 => 0,
+ x if x > 10 => 1,
+ x if x > 5 => 2,
+ x if x > 4 => {
+ 42;
+ 3
+ }
+ x => {}
+ }
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs
new file mode 100644
index 000000000..7468318a5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs
@@ -0,0 +1,337 @@
+use std::iter;
+
+use ast::edit::IndentLevel;
+use ide_db::base_db::AnchoredPathBuf;
+use itertools::Itertools;
+use stdx::format_to;
+use syntax::{
+ ast::{self, edit::AstNodeEdit, HasName},
+ AstNode, SmolStr, TextRange,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: move_module_to_file
+//
+// Moves inline module's contents to a separate file.
+//
+// ```
+// mod $0foo {
+// fn t() {}
+// }
+// ```
+// ->
+// ```
+// mod foo;
+// ```
+pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let module_ast = ctx.find_node_at_offset::<ast::Module>()?;
+ let module_items = module_ast.item_list()?;
+
+ let l_curly_offset = module_items.syntax().text_range().start();
+ if l_curly_offset <= ctx.offset() {
+ cov_mark::hit!(available_before_curly);
+ return None;
+ }
+ let target = TextRange::new(module_ast.syntax().text_range().start(), l_curly_offset);
+
+ let module_name = module_ast.name()?;
+
+ // get to the outermost module syntax so we can grab the module of file we are in
+ let outermost_mod_decl =
+ iter::successors(Some(module_ast.clone()), |module| module.parent()).last()?;
+ let module_def = ctx.sema.to_def(&outermost_mod_decl)?;
+ let parent_module = module_def.parent(ctx.db())?;
+
+ acc.add(
+ AssistId("move_module_to_file", AssistKind::RefactorExtract),
+ "Extract module to file",
+ target,
+ |builder| {
+ let path = {
+ let mut buf = String::from("./");
+ match parent_module.name(ctx.db()) {
+ Some(name) if !parent_module.is_mod_rs(ctx.db()) => {
+ format_to!(buf, "{}/", name)
+ }
+ _ => (),
+ }
+ let segments = iter::successors(Some(module_ast.clone()), |module| module.parent())
+ .filter_map(|it| it.name())
+ .map(|name| SmolStr::from(name.text().trim_start_matches("r#")))
+ .collect::<Vec<_>>();
+
+ format_to!(buf, "{}", segments.into_iter().rev().format("/"));
+
+ // We need to special case mod named `r#mod` and place the file in a
+ // subdirectory as "mod.rs" would be of its parent module otherwise.
+ if module_name.text() == "r#mod" {
+ format_to!(buf, "/mod.rs");
+ } else {
+ format_to!(buf, ".rs");
+ }
+ buf
+ };
+ let contents = {
+ let items = module_items.dedent(IndentLevel(1)).to_string();
+ let mut items =
+ items.trim_start_matches('{').trim_end_matches('}').trim().to_string();
+ if !items.is_empty() {
+ items.push('\n');
+ }
+ items
+ };
+
+ let buf = format!("mod {};", module_name);
+
+ let replacement_start = match module_ast.mod_token() {
+ Some(mod_token) => mod_token.text_range(),
+ None => module_ast.syntax().text_range(),
+ }
+ .start();
+
+ builder.replace(
+ TextRange::new(replacement_start, module_ast.syntax().text_range().end()),
+ buf,
+ );
+
+ let dst = AnchoredPathBuf { anchor: ctx.file_id(), path };
+ builder.create_file(dst, contents);
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn extract_from_root() {
+ check_assist(
+ move_module_to_file,
+ r#"
+mod $0tests {
+ #[test] fn t() {}
+}
+"#,
+ r#"
+//- /main.rs
+mod tests;
+//- /tests.rs
+#[test] fn t() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_from_submodule() {
+ check_assist(
+ move_module_to_file,
+ r#"
+//- /main.rs
+mod submod;
+//- /submod.rs
+$0mod inner {
+ fn f() {}
+}
+fn g() {}
+"#,
+ r#"
+//- /submod.rs
+mod inner;
+fn g() {}
+//- /submod/inner.rs
+fn f() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_from_mod_rs() {
+ check_assist(
+ move_module_to_file,
+ r#"
+//- /main.rs
+mod submodule;
+//- /submodule/mod.rs
+mod inner$0 {
+ fn f() {}
+}
+fn g() {}
+"#,
+ r#"
+//- /submodule/mod.rs
+mod inner;
+fn g() {}
+//- /submodule/inner.rs
+fn f() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_public() {
+ check_assist(
+ move_module_to_file,
+ r#"
+pub mod $0tests {
+ #[test] fn t() {}
+}
+"#,
+ r#"
+//- /main.rs
+pub mod tests;
+//- /tests.rs
+#[test] fn t() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_public_crate() {
+ check_assist(
+ move_module_to_file,
+ r#"
+pub(crate) mod $0tests {
+ #[test] fn t() {}
+}
+"#,
+ r#"
+//- /main.rs
+pub(crate) mod tests;
+//- /tests.rs
+#[test] fn t() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn available_before_curly() {
+ cov_mark::check!(available_before_curly);
+ check_assist_not_applicable(move_module_to_file, r#"mod m { $0 }"#);
+ }
+
+ #[test]
+ fn keep_outer_comments_and_attributes() {
+ check_assist(
+ move_module_to_file,
+ r#"
+/// doc comment
+#[attribute]
+mod $0tests {
+ #[test] fn t() {}
+}
+"#,
+ r#"
+//- /main.rs
+/// doc comment
+#[attribute]
+mod tests;
+//- /tests.rs
+#[test] fn t() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_nested() {
+ check_assist(
+ move_module_to_file,
+ r#"
+//- /lib.rs
+mod foo;
+//- /foo.rs
+mod bar {
+ mod baz {
+ mod qux$0 {}
+ }
+}
+"#,
+ r#"
+//- /foo.rs
+mod bar {
+ mod baz {
+ mod qux;
+ }
+}
+//- /foo/bar/baz/qux.rs
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_mod_with_raw_ident() {
+ check_assist(
+ move_module_to_file,
+ r#"
+//- /main.rs
+mod $0r#static {}
+"#,
+ r#"
+//- /main.rs
+mod r#static;
+//- /static.rs
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_r_mod() {
+ check_assist(
+ move_module_to_file,
+ r#"
+//- /main.rs
+mod $0r#mod {}
+"#,
+ r#"
+//- /main.rs
+mod r#mod;
+//- /mod/mod.rs
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_r_mod_from_mod_rs() {
+ check_assist(
+ move_module_to_file,
+ r#"
+//- /main.rs
+mod foo;
+//- /foo/mod.rs
+mod $0r#mod {}
+"#,
+ r#"
+//- /foo/mod.rs
+mod r#mod;
+//- /foo/mod/mod.rs
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_nested_r_mod() {
+ check_assist(
+ move_module_to_file,
+ r#"
+//- /main.rs
+mod r#mod {
+ mod foo {
+ mod $0r#mod {}
+ }
+}
+"#,
+ r#"
+//- /main.rs
+mod r#mod {
+ mod foo {
+ mod r#mod;
+ }
+}
+//- /mod/foo/mod/mod.rs
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs
new file mode 100644
index 000000000..a909ce8b2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs
@@ -0,0 +1,151 @@
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ base_db::AnchoredPathBuf,
+};
+use syntax::{ast, AstNode};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils::trimmed_text_range,
+};
+
+// Assist: move_to_mod_rs
+//
+// Moves xxx.rs to xxx/mod.rs.
+//
+// ```
+// //- /main.rs
+// mod a;
+// //- /a.rs
+// $0fn t() {}$0
+// ```
+// ->
+// ```
+// fn t() {}
+// ```
+pub(crate) fn move_to_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let source_file = ctx.find_node_at_offset::<ast::SourceFile>()?;
+ let module = ctx.sema.to_module_def(ctx.file_id())?;
+ // Enable this assist if the user select all "meaningful" content in the source file
+ let trimmed_selected_range = trimmed_text_range(&source_file, ctx.selection_trimmed());
+ let trimmed_file_range = trimmed_text_range(&source_file, source_file.syntax().text_range());
+ if module.is_mod_rs(ctx.db()) {
+ cov_mark::hit!(already_mod_rs);
+ return None;
+ }
+ if trimmed_selected_range != trimmed_file_range {
+ cov_mark::hit!(not_all_selected);
+ return None;
+ }
+
+ let target = source_file.syntax().text_range();
+ let module_name = module.name(ctx.db())?.to_string();
+ let path = format!("./{}/mod.rs", module_name);
+ let dst = AnchoredPathBuf { anchor: ctx.file_id(), path };
+ acc.add(
+ AssistId("move_to_mod_rs", AssistKind::Refactor),
+ format!("Convert {}.rs to {}/mod.rs", module_name, module_name),
+ target,
+ |builder| {
+ builder.move_file(ctx.file_id(), dst);
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn trivial() {
+ check_assist(
+ move_to_mod_rs,
+ r#"
+//- /main.rs
+mod a;
+//- /a.rs
+$0fn t() {}
+$0"#,
+ r#"
+//- /a/mod.rs
+fn t() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn must_select_all_file() {
+ cov_mark::check!(not_all_selected);
+ check_assist_not_applicable(
+ move_to_mod_rs,
+ r#"
+//- /main.rs
+mod a;
+//- /a.rs
+fn t() {}$0
+"#,
+ );
+ cov_mark::check!(not_all_selected);
+ check_assist_not_applicable(
+ move_to_mod_rs,
+ r#"
+//- /main.rs
+mod a;
+//- /a.rs
+$0fn$0 t() {}
+"#,
+ );
+ }
+
+ #[test]
+ fn cannot_promote_mod_rs() {
+ cov_mark::check!(already_mod_rs);
+ check_assist_not_applicable(
+ move_to_mod_rs,
+ r#"//- /main.rs
+mod a;
+//- /a/mod.rs
+$0fn t() {}$0
+"#,
+ );
+ }
+
+ #[test]
+ fn cannot_promote_main_and_lib_rs() {
+ check_assist_not_applicable(
+ move_to_mod_rs,
+ r#"//- /main.rs
+$0fn t() {}$0
+"#,
+ );
+ check_assist_not_applicable(
+ move_to_mod_rs,
+ r#"//- /lib.rs
+$0fn t() {}$0
+"#,
+ );
+ }
+
+ #[test]
+ fn works_in_mod() {
+ // note: /a/b.rs remains untouched
+ check_assist(
+ move_to_mod_rs,
+ r#"//- /main.rs
+mod a;
+//- /a.rs
+$0mod b;
+fn t() {}$0
+//- /a/b.rs
+fn t1() {}
+"#,
+ r#"
+//- /a/mod.rs
+mod b;
+fn t() {}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/number_representation.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/number_representation.rs
new file mode 100644
index 000000000..424db7437
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/number_representation.rs
@@ -0,0 +1,183 @@
+use syntax::{ast, ast::Radix, AstToken};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
+
+const MIN_NUMBER_OF_DIGITS_TO_FORMAT: usize = 5;
+
+// Assist: reformat_number_literal
+//
+// Adds or removes separators from integer literal.
+//
+// ```
+// const _: i32 = 1012345$0;
+// ```
+// ->
+// ```
+// const _: i32 = 1_012_345;
+// ```
+pub(crate) fn reformat_number_literal(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let literal = ctx.find_node_at_offset::<ast::Literal>()?;
+ let literal = match literal.kind() {
+ ast::LiteralKind::IntNumber(it) => it,
+ _ => return None,
+ };
+
+ let text = literal.text();
+ if text.contains('_') {
+ return remove_separators(acc, literal);
+ }
+
+ let (prefix, value, suffix) = literal.split_into_parts();
+ if value.len() < MIN_NUMBER_OF_DIGITS_TO_FORMAT {
+ return None;
+ }
+
+ let radix = literal.radix();
+ let mut converted = prefix.to_string();
+ converted.push_str(&add_group_separators(value, group_size(radix)));
+ converted.push_str(suffix);
+
+ let group_id = GroupLabel("Reformat number literal".into());
+ let label = format!("Convert {} to {}", literal, converted);
+ let range = literal.syntax().text_range();
+ acc.add_group(
+ &group_id,
+ AssistId("reformat_number_literal", AssistKind::RefactorInline),
+ label,
+ range,
+ |builder| builder.replace(range, converted),
+ )
+}
+
+fn remove_separators(acc: &mut Assists, literal: ast::IntNumber) -> Option<()> {
+ let group_id = GroupLabel("Reformat number literal".into());
+ let range = literal.syntax().text_range();
+ acc.add_group(
+ &group_id,
+ AssistId("reformat_number_literal", AssistKind::RefactorInline),
+ "Remove digit separators",
+ range,
+ |builder| builder.replace(range, literal.text().replace('_', "")),
+ )
+}
+
+const fn group_size(r: Radix) -> usize {
+ match r {
+ Radix::Binary => 4,
+ Radix::Octal => 3,
+ Radix::Decimal => 3,
+ Radix::Hexadecimal => 4,
+ }
+}
+
+fn add_group_separators(s: &str, group_size: usize) -> String {
+ let mut chars = Vec::new();
+ for (i, ch) in s.chars().filter(|&ch| ch != '_').rev().enumerate() {
+ if i > 0 && i % group_size == 0 {
+ chars.push('_');
+ }
+ chars.push(ch);
+ }
+
+ chars.into_iter().rev().collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist_by_label, check_assist_not_applicable, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn group_separators() {
+ let cases = vec![
+ ("", 4, ""),
+ ("1", 4, "1"),
+ ("12", 4, "12"),
+ ("123", 4, "123"),
+ ("1234", 4, "1234"),
+ ("12345", 4, "1_2345"),
+ ("123456", 4, "12_3456"),
+ ("1234567", 4, "123_4567"),
+ ("12345678", 4, "1234_5678"),
+ ("123456789", 4, "1_2345_6789"),
+ ("1234567890", 4, "12_3456_7890"),
+ ("1_2_3_4_5_6_7_8_9_0_", 4, "12_3456_7890"),
+ ("1234567890", 3, "1_234_567_890"),
+ ("1234567890", 2, "12_34_56_78_90"),
+ ("1234567890", 1, "1_2_3_4_5_6_7_8_9_0"),
+ ];
+
+ for case in cases {
+ let (input, group_size, expected) = case;
+ assert_eq!(add_group_separators(input, group_size), expected)
+ }
+ }
+
+ #[test]
+ fn good_targets() {
+ let cases = vec![
+ ("const _: i32 = 0b11111$0", "0b11111"),
+ ("const _: i32 = 0o77777$0;", "0o77777"),
+ ("const _: i32 = 10000$0;", "10000"),
+ ("const _: i32 = 0xFFFFF$0;", "0xFFFFF"),
+ ("const _: i32 = 10000i32$0;", "10000i32"),
+ ("const _: i32 = 0b_10_0i32$0;", "0b_10_0i32"),
+ ];
+
+ for case in cases {
+ check_assist_target(reformat_number_literal, case.0, case.1);
+ }
+ }
+
+ #[test]
+ fn bad_targets() {
+ let cases = vec![
+ "const _: i32 = 0b111$0",
+ "const _: i32 = 0b1111$0",
+ "const _: i32 = 0o77$0;",
+ "const _: i32 = 0o777$0;",
+ "const _: i32 = 10$0;",
+ "const _: i32 = 999$0;",
+ "const _: i32 = 0xFF$0;",
+ "const _: i32 = 0xFFFF$0;",
+ ];
+
+ for case in cases {
+ check_assist_not_applicable(reformat_number_literal, case);
+ }
+ }
+
+ #[test]
+ fn labels() {
+ let cases = vec![
+ ("const _: i32 = 10000$0", "const _: i32 = 10_000", "Convert 10000 to 10_000"),
+ (
+ "const _: i32 = 0xFF0000$0;",
+ "const _: i32 = 0xFF_0000;",
+ "Convert 0xFF0000 to 0xFF_0000",
+ ),
+ (
+ "const _: i32 = 0b11111111$0;",
+ "const _: i32 = 0b1111_1111;",
+ "Convert 0b11111111 to 0b1111_1111",
+ ),
+ (
+ "const _: i32 = 0o377211$0;",
+ "const _: i32 = 0o377_211;",
+ "Convert 0o377211 to 0o377_211",
+ ),
+ (
+ "const _: i32 = 10000i32$0;",
+ "const _: i32 = 10_000i32;",
+ "Convert 10000i32 to 10_000i32",
+ ),
+ ("const _: i32 = 1_0_0_0_i32$0;", "const _: i32 = 1000i32;", "Remove digit separators"),
+ ];
+
+ for case in cases {
+ let (before, after, label) = case;
+ check_assist_by_label(reformat_number_literal, before, after, label);
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs
new file mode 100644
index 000000000..cbbea6c1e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs
@@ -0,0 +1,221 @@
+use hir::{HirDisplay, ModuleDef, PathResolution, Semantics};
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ defs::Definition,
+ syntax_helpers::node_ext::preorder_expr,
+ RootDatabase,
+};
+use stdx::to_upper_snake_case;
+use syntax::{
+ ast::{self, make, HasName},
+ AstNode, WalkEvent,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils::{render_snippet, Cursor},
+};
+
+// Assist: promote_local_to_const
+//
+// Promotes a local variable to a const item changing its name to a `SCREAMING_SNAKE_CASE` variant
+// if the local uses no non-const expressions.
+//
+// ```
+// fn main() {
+// let foo$0 = true;
+//
+// if foo {
+// println!("It's true");
+// } else {
+// println!("It's false");
+// }
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// const $0FOO: bool = true;
+//
+// if FOO {
+// println!("It's true");
+// } else {
+// println!("It's false");
+// }
+// }
+// ```
+pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let pat = ctx.find_node_at_offset::<ast::IdentPat>()?;
+ let name = pat.name()?;
+ if !pat.is_simple_ident() {
+ cov_mark::hit!(promote_local_non_simple_ident);
+ return None;
+ }
+ let let_stmt = pat.syntax().parent().and_then(ast::LetStmt::cast)?;
+
+ let module = ctx.sema.scope(pat.syntax())?.module();
+ let local = ctx.sema.to_def(&pat)?;
+ let ty = ctx.sema.type_of_pat(&pat.into())?.original;
+
+ if ty.contains_unknown() || ty.is_closure() {
+ cov_mark::hit!(promote_lcoal_not_applicable_if_ty_not_inferred);
+ return None;
+ }
+ let ty = ty.display_source_code(ctx.db(), module.into()).ok()?;
+
+ let initializer = let_stmt.initializer()?;
+ if !is_body_const(&ctx.sema, &initializer) {
+ cov_mark::hit!(promote_local_non_const);
+ return None;
+ }
+ let target = let_stmt.syntax().text_range();
+ acc.add(
+ AssistId("promote_local_to_const", AssistKind::Refactor),
+ "Promote local to constant",
+ target,
+ |builder| {
+ let name = to_upper_snake_case(&name.to_string());
+ let usages = Definition::Local(local).usages(&ctx.sema).all();
+ if let Some(usages) = usages.references.get(&ctx.file_id()) {
+ for usage in usages {
+ builder.replace(usage.range, &name);
+ }
+ }
+
+ let item = make::item_const(None, make::name(&name), make::ty(&ty), initializer);
+ match ctx.config.snippet_cap.zip(item.name()) {
+ Some((cap, name)) => builder.replace_snippet(
+ cap,
+ target,
+ render_snippet(cap, item.syntax(), Cursor::Before(name.syntax())),
+ ),
+ None => builder.replace(target, item.to_string()),
+ }
+ },
+ )
+}
+
+fn is_body_const(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> bool {
+ let mut is_const = true;
+ preorder_expr(expr, &mut |ev| {
+ let expr = match ev {
+ WalkEvent::Enter(_) if !is_const => return true,
+ WalkEvent::Enter(expr) => expr,
+ WalkEvent::Leave(_) => return false,
+ };
+ match expr {
+ ast::Expr::CallExpr(call) => {
+ if let Some(ast::Expr::PathExpr(path_expr)) = call.expr() {
+ if let Some(PathResolution::Def(ModuleDef::Function(func))) =
+ path_expr.path().and_then(|path| sema.resolve_path(&path))
+ {
+ is_const &= func.is_const(sema.db);
+ }
+ }
+ }
+ ast::Expr::MethodCallExpr(call) => {
+ is_const &=
+ sema.resolve_method_call(&call).map(|it| it.is_const(sema.db)).unwrap_or(true)
+ }
+ ast::Expr::BoxExpr(_)
+ | ast::Expr::ForExpr(_)
+ | ast::Expr::ReturnExpr(_)
+ | ast::Expr::TryExpr(_)
+ | ast::Expr::YieldExpr(_)
+ | ast::Expr::AwaitExpr(_) => is_const = false,
+ _ => (),
+ }
+ !is_const
+ });
+ is_const
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn simple() {
+ check_assist(
+ promote_local_to_const,
+ r"
+fn foo() {
+ let x$0 = 0;
+ let y = x;
+}
+",
+ r"
+fn foo() {
+ const $0X: i32 = 0;
+ let y = X;
+}
+",
+ );
+ }
+
+ #[test]
+ fn not_applicable_non_const_meth_call() {
+ cov_mark::check!(promote_local_non_const);
+ check_assist_not_applicable(
+ promote_local_to_const,
+ r"
+struct Foo;
+impl Foo {
+ fn foo(self) {}
+}
+fn foo() {
+ let x$0 = Foo.foo();
+}
+",
+ );
+ }
+
+ #[test]
+ fn not_applicable_non_const_call() {
+ check_assist_not_applicable(
+ promote_local_to_const,
+ r"
+fn bar(self) {}
+fn foo() {
+ let x$0 = bar();
+}
+",
+ );
+ }
+
+ #[test]
+ fn not_applicable_unknown_ty() {
+ cov_mark::check!(promote_lcoal_not_applicable_if_ty_not_inferred);
+ check_assist_not_applicable(
+ promote_local_to_const,
+ r"
+fn foo() {
+ let x$0 = bar();
+}
+",
+ );
+ }
+
+ #[test]
+ fn not_applicable_non_simple_ident() {
+ cov_mark::check!(promote_local_non_simple_ident);
+ check_assist_not_applicable(
+ promote_local_to_const,
+ r"
+fn foo() {
+ let ref x$0 = ();
+}
+",
+ );
+ check_assist_not_applicable(
+ promote_local_to_const,
+ r"
+fn foo() {
+ let mut x$0 = ();
+}
+",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs
new file mode 100644
index 000000000..4cfe6c99b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs
@@ -0,0 +1,507 @@
+use syntax::{
+ ast::{self, make},
+ ted, AstNode,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId, AssistKind,
+};
+
+// Assist: pull_assignment_up
+//
+// Extracts variable assignment to outside an if or match statement.
+//
+// ```
+// fn main() {
+// let mut foo = 6;
+//
+// if true {
+// $0foo = 5;
+// } else {
+// foo = 4;
+// }
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let mut foo = 6;
+//
+// foo = if true {
+// 5
+// } else {
+// 4
+// };
+// }
+// ```
+pub(crate) fn pull_assignment_up(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let assign_expr = ctx.find_node_at_offset::<ast::BinExpr>()?;
+
+ let op_kind = assign_expr.op_kind()?;
+ if op_kind != (ast::BinaryOp::Assignment { op: None }) {
+ cov_mark::hit!(test_cant_pull_non_assignments);
+ return None;
+ }
+
+ let mut collector = AssignmentsCollector {
+ sema: &ctx.sema,
+ common_lhs: assign_expr.lhs()?,
+ assignments: Vec::new(),
+ };
+
+ let tgt: ast::Expr = if let Some(if_expr) = ctx.find_node_at_offset::<ast::IfExpr>() {
+ collector.collect_if(&if_expr)?;
+ if_expr.into()
+ } else if let Some(match_expr) = ctx.find_node_at_offset::<ast::MatchExpr>() {
+ collector.collect_match(&match_expr)?;
+ match_expr.into()
+ } else {
+ return None;
+ };
+
+ if let Some(parent) = tgt.syntax().parent() {
+ if matches!(parent.kind(), syntax::SyntaxKind::BIN_EXPR | syntax::SyntaxKind::LET_STMT) {
+ return None;
+ }
+ }
+
+ acc.add(
+ AssistId("pull_assignment_up", AssistKind::RefactorExtract),
+ "Pull assignment up",
+ tgt.syntax().text_range(),
+ move |edit| {
+ let assignments: Vec<_> = collector
+ .assignments
+ .into_iter()
+ .map(|(stmt, rhs)| (edit.make_mut(stmt), rhs.clone_for_update()))
+ .collect();
+
+ let tgt = edit.make_mut(tgt);
+
+ for (stmt, rhs) in assignments {
+ let mut stmt = stmt.syntax().clone();
+ if let Some(parent) = stmt.parent() {
+ if ast::ExprStmt::cast(parent.clone()).is_some() {
+ stmt = parent.clone();
+ }
+ }
+ ted::replace(stmt, rhs.syntax());
+ }
+ let assign_expr = make::expr_assignment(collector.common_lhs, tgt.clone());
+ let assign_stmt = make::expr_stmt(assign_expr);
+
+ ted::replace(tgt.syntax(), assign_stmt.syntax().clone_for_update());
+ },
+ )
+}
+
+struct AssignmentsCollector<'a> {
+ sema: &'a hir::Semantics<'a, ide_db::RootDatabase>,
+ common_lhs: ast::Expr,
+ assignments: Vec<(ast::BinExpr, ast::Expr)>,
+}
+
+impl<'a> AssignmentsCollector<'a> {
+ fn collect_match(&mut self, match_expr: &ast::MatchExpr) -> Option<()> {
+ for arm in match_expr.match_arm_list()?.arms() {
+ match arm.expr()? {
+ ast::Expr::BlockExpr(block) => self.collect_block(&block)?,
+ ast::Expr::BinExpr(expr) => self.collect_expr(&expr)?,
+ _ => return None,
+ }
+ }
+
+ Some(())
+ }
+ fn collect_if(&mut self, if_expr: &ast::IfExpr) -> Option<()> {
+ let then_branch = if_expr.then_branch()?;
+ self.collect_block(&then_branch)?;
+
+ match if_expr.else_branch()? {
+ ast::ElseBranch::Block(block) => self.collect_block(&block),
+ ast::ElseBranch::IfExpr(expr) => {
+ cov_mark::hit!(test_pull_assignment_up_chained_if);
+ self.collect_if(&expr)
+ }
+ }
+ }
+ fn collect_block(&mut self, block: &ast::BlockExpr) -> Option<()> {
+ let last_expr = block.tail_expr().or_else(|| match block.statements().last()? {
+ ast::Stmt::ExprStmt(stmt) => stmt.expr(),
+ ast::Stmt::Item(_) | ast::Stmt::LetStmt(_) => None,
+ })?;
+
+ if let ast::Expr::BinExpr(expr) = last_expr {
+ return self.collect_expr(&expr);
+ }
+
+ None
+ }
+
+ fn collect_expr(&mut self, expr: &ast::BinExpr) -> Option<()> {
+ if expr.op_kind()? == (ast::BinaryOp::Assignment { op: None })
+ && is_equivalent(self.sema, &expr.lhs()?, &self.common_lhs)
+ {
+ self.assignments.push((expr.clone(), expr.rhs()?));
+ return Some(());
+ }
+ None
+ }
+}
+
+fn is_equivalent(
+ sema: &hir::Semantics<'_, ide_db::RootDatabase>,
+ expr0: &ast::Expr,
+ expr1: &ast::Expr,
+) -> bool {
+ match (expr0, expr1) {
+ (ast::Expr::FieldExpr(field_expr0), ast::Expr::FieldExpr(field_expr1)) => {
+ cov_mark::hit!(test_pull_assignment_up_field_assignment);
+ sema.resolve_field(field_expr0) == sema.resolve_field(field_expr1)
+ }
+ (ast::Expr::PathExpr(path0), ast::Expr::PathExpr(path1)) => {
+ let path0 = path0.path();
+ let path1 = path1.path();
+ if let (Some(path0), Some(path1)) = (path0, path1) {
+ sema.resolve_path(&path0) == sema.resolve_path(&path1)
+ } else {
+ false
+ }
+ }
+ (ast::Expr::PrefixExpr(prefix0), ast::Expr::PrefixExpr(prefix1))
+ if prefix0.op_kind() == Some(ast::UnaryOp::Deref)
+ && prefix1.op_kind() == Some(ast::UnaryOp::Deref) =>
+ {
+ cov_mark::hit!(test_pull_assignment_up_deref);
+ if let (Some(prefix0), Some(prefix1)) = (prefix0.expr(), prefix1.expr()) {
+ is_equivalent(sema, &prefix0, &prefix1)
+ } else {
+ false
+ }
+ }
+ _ => false,
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn test_pull_assignment_up_if() {
+ check_assist(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ if true {
+ $0a = 2;
+ } else {
+ a = 3;
+ }
+}"#,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ a = if true {
+ 2
+ } else {
+ 3
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_pull_assignment_up_match() {
+ check_assist(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ match 1 {
+ 1 => {
+ $0a = 2;
+ },
+ 2 => {
+ a = 3;
+ },
+ 3 => {
+ a = 4;
+ }
+ }
+}"#,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ a = match 1 {
+ 1 => {
+ 2
+ },
+ 2 => {
+ 3
+ },
+ 3 => {
+ 4
+ }
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_pull_assignment_up_assignment_expressions() {
+ check_assist(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ match 1 {
+ 1 => { $0a = 2; },
+ 2 => a = 3,
+ 3 => {
+ a = 4
+ }
+ }
+}"#,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ a = match 1 {
+ 1 => { 2 },
+ 2 => 3,
+ 3 => {
+ 4
+ }
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_pull_assignment_up_not_last_not_applicable() {
+ check_assist_not_applicable(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ if true {
+ $0a = 2;
+ b = a;
+ } else {
+ a = 3;
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_pull_assignment_up_chained_if() {
+ cov_mark::check!(test_pull_assignment_up_chained_if);
+ check_assist(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ if true {
+ $0a = 2;
+ } else if false {
+ a = 3;
+ } else {
+ a = 4;
+ }
+}"#,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ a = if true {
+ 2
+ } else if false {
+ 3
+ } else {
+ 4
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_pull_assignment_up_retains_stmts() {
+ check_assist(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ if true {
+ let b = 2;
+ $0a = 2;
+ } else {
+ let b = 3;
+ a = 3;
+ }
+}"#,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ a = if true {
+ let b = 2;
+ 2
+ } else {
+ let b = 3;
+ 3
+ };
+}"#,
+ )
+ }
+
+ #[test]
+ fn pull_assignment_up_let_stmt_not_applicable() {
+ check_assist_not_applicable(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ let b = if true {
+ $0a = 2
+ } else {
+ a = 3
+ };
+}"#,
+ )
+ }
+
+ #[test]
+ fn pull_assignment_up_if_missing_assigment_not_applicable() {
+ check_assist_not_applicable(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ if true {
+ $0a = 2;
+ } else {}
+}"#,
+ )
+ }
+
+ #[test]
+ fn pull_assignment_up_match_missing_assigment_not_applicable() {
+ check_assist_not_applicable(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+
+ match 1 {
+ 1 => {
+ $0a = 2;
+ },
+ 2 => {
+ a = 3;
+ },
+ 3 => {},
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_pull_assignment_up_field_assignment() {
+ cov_mark::check!(test_pull_assignment_up_field_assignment);
+ check_assist(
+ pull_assignment_up,
+ r#"
+struct A(usize);
+
+fn foo() {
+ let mut a = A(1);
+
+ if true {
+ $0a.0 = 2;
+ } else {
+ a.0 = 3;
+ }
+}"#,
+ r#"
+struct A(usize);
+
+fn foo() {
+ let mut a = A(1);
+
+ a.0 = if true {
+ 2
+ } else {
+ 3
+ };
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_pull_assignment_up_deref() {
+ cov_mark::check!(test_pull_assignment_up_deref);
+ check_assist(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+ let b = &mut a;
+
+ if true {
+ $0*b = 2;
+ } else {
+ *b = 3;
+ }
+}
+"#,
+ r#"
+fn foo() {
+ let mut a = 1;
+ let b = &mut a;
+
+ *b = if true {
+ 2
+ } else {
+ 3
+ };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_cant_pull_non_assignments() {
+ cov_mark::check!(test_cant_pull_non_assignments);
+ check_assist_not_applicable(
+ pull_assignment_up,
+ r#"
+fn foo() {
+ let mut a = 1;
+ let b = &mut a;
+
+ if true {
+ $0*b + 2;
+ } else {
+ *b + 3;
+ }
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs
new file mode 100644
index 000000000..121f8b4a1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs
@@ -0,0 +1,548 @@
+use hir::{db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, ItemInNs, ModuleDef};
+use ide_db::assists::{AssistId, AssistKind};
+use syntax::{ast, AstNode};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ handlers::qualify_path::QualifyCandidate,
+};
+
+// Assist: qualify_method_call
+//
+// Replaces the method call with a qualified function call.
+//
+// ```
+// struct Foo;
+// impl Foo {
+// fn foo(&self) {}
+// }
+// fn main() {
+// let foo = Foo;
+// foo.fo$0o();
+// }
+// ```
+// ->
+// ```
+// struct Foo;
+// impl Foo {
+// fn foo(&self) {}
+// }
+// fn main() {
+// let foo = Foo;
+// Foo::foo(&foo);
+// }
+// ```
+pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let name: ast::NameRef = ctx.find_node_at_offset()?;
+ let call = name.syntax().parent().and_then(ast::MethodCallExpr::cast)?;
+
+ let ident = name.ident_token()?;
+
+ let range = call.syntax().text_range();
+ let resolved_call = ctx.sema.resolve_method_call(&call)?;
+
+ let current_module = ctx.sema.scope(call.syntax())?.module();
+ let target_module_def = ModuleDef::from(resolved_call);
+ let item_in_ns = ItemInNs::from(target_module_def);
+ let receiver_path = current_module
+ .find_use_path(ctx.sema.db, item_for_path_search(ctx.sema.db, item_in_ns)?)?;
+
+ let qualify_candidate = QualifyCandidate::ImplMethod(ctx.sema.db, call, resolved_call);
+
+ acc.add(
+ AssistId("qualify_method_call", AssistKind::RefactorInline),
+ format!("Qualify `{}` method call", ident.text()),
+ range,
+ |builder| {
+ qualify_candidate.qualify(
+ |replace_with: String| builder.replace(range, replace_with),
+ &receiver_path,
+ item_in_ns,
+ )
+ },
+ );
+ Some(())
+}
+
+fn item_for_path_search(db: &dyn HirDatabase, item: ItemInNs) -> Option<ItemInNs> {
+ Some(match item {
+ ItemInNs::Types(_) | ItemInNs::Values(_) => match item_as_assoc(db, item) {
+ Some(assoc_item) => match assoc_item.container(db) {
+ AssocItemContainer::Trait(trait_) => ItemInNs::from(ModuleDef::from(trait_)),
+ AssocItemContainer::Impl(impl_) => match impl_.trait_(db) {
+ None => ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?)),
+ Some(trait_) => ItemInNs::from(ModuleDef::from(trait_)),
+ },
+ },
+ None => item,
+ },
+ ItemInNs::Macros(_) => item,
+ })
+}
+
+fn item_as_assoc(db: &dyn HirDatabase, item: ItemInNs) -> Option<AssocItem> {
+ item.as_module_def().and_then(|module_def| module_def.as_assoc_item(db))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn struct_method() {
+ check_assist(
+ qualify_method_call,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ foo.fo$0o()
+}
+"#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ Foo::foo(&foo)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn struct_method_multi_params() {
+ check_assist(
+ qualify_method_call,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self, p1: i32, p2: u32) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ foo.fo$0o(9, 9u)
+}
+"#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self, p1: i32, p2: u32) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ Foo::foo(&foo, 9, 9u)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn struct_method_consume() {
+ check_assist(
+ qualify_method_call,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(self, p1: i32, p2: u32) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ foo.fo$0o(9, 9u)
+}
+"#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(self, p1: i32, p2: u32) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ Foo::foo(foo, 9, 9u)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn struct_method_exclusive() {
+ check_assist(
+ qualify_method_call,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&mut self, p1: i32, p2: u32) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ foo.fo$0o(9, 9u)
+}
+"#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&mut self, p1: i32, p2: u32) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ Foo::foo(&mut foo, 9, 9u)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn struct_method_cross_crate() {
+ check_assist(
+ qualify_method_call,
+ r#"
+//- /main.rs crate:main deps:dep
+fn main() {
+ let foo = dep::test_mod::Foo {};
+ foo.fo$0o(9, 9u)
+}
+//- /dep.rs crate:dep
+pub mod test_mod {
+ pub struct Foo;
+ impl Foo {
+ pub fn foo(&mut self, p1: i32, p2: u32) {}
+ }
+}
+"#,
+ r#"
+fn main() {
+ let foo = dep::test_mod::Foo {};
+ dep::test_mod::Foo::foo(&mut foo, 9, 9u)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn struct_method_generic() {
+ check_assist(
+ qualify_method_call,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo<T>(&self) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ foo.fo$0o::<()>()
+}
+"#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo<T>(&self) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ Foo::foo::<()>(&foo)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method() {
+ check_assist(
+ qualify_method_call,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_meth$0od()
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ TestTrait::test_method(&test_struct)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method_multi_params() {
+ check_assist(
+ qualify_method_call,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self, p1: i32, p2: u32);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self, p1: i32, p2: u32) {}
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_meth$0od(12, 32u)
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self, p1: i32, p2: u32);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self, p1: i32, p2: u32) {}
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ TestTrait::test_method(&test_struct, 12, 32u)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method_consume() {
+ check_assist(
+ qualify_method_call,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(self, p1: i32, p2: u32);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(self, p1: i32, p2: u32) {}
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_meth$0od(12, 32u)
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(self, p1: i32, p2: u32);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(self, p1: i32, p2: u32) {}
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ TestTrait::test_method(test_struct, 12, 32u)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method_exclusive() {
+ check_assist(
+ qualify_method_call,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&mut self, p1: i32, p2: u32);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&mut self, p1: i32, p2: u32);
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_meth$0od(12, 32u)
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&mut self, p1: i32, p2: u32);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&mut self, p1: i32, p2: u32);
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ TestTrait::test_method(&mut test_struct, 12, 32u)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method_cross_crate() {
+ check_assist(
+ qualify_method_call,
+ r#"
+//- /main.rs crate:main deps:dep
+fn main() {
+ let foo = dep::test_mod::Foo {};
+ foo.fo$0o(9, 9u)
+}
+//- /dep.rs crate:dep
+pub mod test_mod {
+ pub struct Foo;
+ impl Foo {
+ pub fn foo(&mut self, p1: i32, p2: u32) {}
+ }
+}
+"#,
+ r#"
+fn main() {
+ let foo = dep::test_mod::Foo {};
+ dep::test_mod::Foo::foo(&mut foo, 9, 9u)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method_generic() {
+ check_assist(
+ qualify_method_call,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method<T>(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method<T>(&self) {}
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = TestStruct {};
+ test_struct.test_meth$0od::<()>()
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method<T>(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method<T>(&self) {}
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = TestStruct {};
+ TestTrait::test_method::<()>(&test_struct)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn struct_method_over_stuct_instance() {
+ check_assist_not_applicable(
+ qualify_method_call,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+fn main() {
+ let foo = Foo {};
+ f$0oo.foo()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method_over_stuct_instance() {
+ check_assist_not_applicable(
+ qualify_method_call,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+}
+
+use test_mod::*;
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ tes$0t_struct.test_method()
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs
new file mode 100644
index 000000000..0c2e9da38
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs
@@ -0,0 +1,1297 @@
+use std::iter;
+
+use hir::AsAssocItem;
+use ide_db::RootDatabase;
+use ide_db::{
+ helpers::mod_path_to_ast,
+ imports::import_assets::{ImportCandidate, LocatedImport},
+};
+use syntax::{
+ ast,
+ ast::{make, HasArgList},
+ AstNode, NodeOrToken,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ handlers::auto_import::find_importable_node,
+ AssistId, AssistKind, GroupLabel,
+};
+
+// Assist: qualify_path
+//
+// If the name is unresolved, provides all possible qualified paths for it.
+//
+// ```
+// fn main() {
+// let map = HashMap$0::new();
+// }
+// # pub mod std { pub mod collections { pub struct HashMap { } } }
+// ```
+// ->
+// ```
+// fn main() {
+// let map = std::collections::HashMap::new();
+// }
+// # pub mod std { pub mod collections { pub struct HashMap { } } }
+// ```
+pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let (import_assets, syntax_under_caret) = find_importable_node(ctx)?;
+ let mut proposed_imports = import_assets.search_for_relative_paths(&ctx.sema);
+ if proposed_imports.is_empty() {
+ return None;
+ }
+
+ let range = match &syntax_under_caret {
+ NodeOrToken::Node(node) => ctx.sema.original_range(node).range,
+ NodeOrToken::Token(token) => token.text_range(),
+ };
+ let candidate = import_assets.import_candidate();
+ let qualify_candidate = match syntax_under_caret {
+ NodeOrToken::Node(syntax_under_caret) => match candidate {
+ ImportCandidate::Path(candidate) if candidate.qualifier.is_some() => {
+ cov_mark::hit!(qualify_path_qualifier_start);
+ let path = ast::Path::cast(syntax_under_caret)?;
+ let (prev_segment, segment) = (path.qualifier()?.segment()?, path.segment()?);
+ QualifyCandidate::QualifierStart(segment, prev_segment.generic_arg_list())
+ }
+ ImportCandidate::Path(_) => {
+ cov_mark::hit!(qualify_path_unqualified_name);
+ let path = ast::Path::cast(syntax_under_caret)?;
+ let generics = path.segment()?.generic_arg_list();
+ QualifyCandidate::UnqualifiedName(generics)
+ }
+ ImportCandidate::TraitAssocItem(_) => {
+ cov_mark::hit!(qualify_path_trait_assoc_item);
+ let path = ast::Path::cast(syntax_under_caret)?;
+ let (qualifier, segment) = (path.qualifier()?, path.segment()?);
+ QualifyCandidate::TraitAssocItem(qualifier, segment)
+ }
+ ImportCandidate::TraitMethod(_) => {
+ cov_mark::hit!(qualify_path_trait_method);
+ let mcall_expr = ast::MethodCallExpr::cast(syntax_under_caret)?;
+ QualifyCandidate::TraitMethod(ctx.sema.db, mcall_expr)
+ }
+ },
+ // derive attribute path
+ NodeOrToken::Token(_) => QualifyCandidate::UnqualifiedName(None),
+ };
+
+ // we aren't interested in different namespaces
+ proposed_imports.dedup_by(|a, b| a.import_path == b.import_path);
+
+ let group_label = group_label(candidate);
+ for import in proposed_imports {
+ acc.add_group(
+ &group_label,
+ AssistId("qualify_path", AssistKind::QuickFix),
+ label(candidate, &import),
+ range,
+ |builder| {
+ qualify_candidate.qualify(
+ |replace_with: String| builder.replace(range, replace_with),
+ &import.import_path,
+ import.item_to_import,
+ )
+ },
+ );
+ }
+ Some(())
+}
+pub(crate) enum QualifyCandidate<'db> {
+ QualifierStart(ast::PathSegment, Option<ast::GenericArgList>),
+ UnqualifiedName(Option<ast::GenericArgList>),
+ TraitAssocItem(ast::Path, ast::PathSegment),
+ TraitMethod(&'db RootDatabase, ast::MethodCallExpr),
+ ImplMethod(&'db RootDatabase, ast::MethodCallExpr, hir::Function),
+}
+
+impl QualifyCandidate<'_> {
+ pub(crate) fn qualify(
+ &self,
+ mut replacer: impl FnMut(String),
+ import: &hir::ModPath,
+ item: hir::ItemInNs,
+ ) {
+ let import = mod_path_to_ast(import);
+ match self {
+ QualifyCandidate::QualifierStart(segment, generics) => {
+ let generics = generics.as_ref().map_or_else(String::new, ToString::to_string);
+ replacer(format!("{}{}::{}", import, generics, segment));
+ }
+ QualifyCandidate::UnqualifiedName(generics) => {
+ let generics = generics.as_ref().map_or_else(String::new, ToString::to_string);
+ replacer(format!("{}{}", import, generics));
+ }
+ QualifyCandidate::TraitAssocItem(qualifier, segment) => {
+ replacer(format!("<{} as {}>::{}", qualifier, import, segment));
+ }
+ QualifyCandidate::TraitMethod(db, mcall_expr) => {
+ Self::qualify_trait_method(db, mcall_expr, replacer, import, item);
+ }
+ QualifyCandidate::ImplMethod(db, mcall_expr, hir_fn) => {
+ Self::qualify_fn_call(db, mcall_expr, replacer, import, hir_fn);
+ }
+ }
+ }
+
+ fn qualify_fn_call(
+ db: &RootDatabase,
+ mcall_expr: &ast::MethodCallExpr,
+ mut replacer: impl FnMut(String),
+ import: ast::Path,
+ hir_fn: &hir::Function,
+ ) -> Option<()> {
+ let receiver = mcall_expr.receiver()?;
+ let method_name = mcall_expr.name_ref()?;
+ let generics =
+ mcall_expr.generic_arg_list().as_ref().map_or_else(String::new, ToString::to_string);
+ let arg_list = mcall_expr.arg_list().map(|arg_list| arg_list.args());
+
+ if let Some(self_access) = hir_fn.self_param(db).map(|sp| sp.access(db)) {
+ let receiver = match self_access {
+ hir::Access::Shared => make::expr_ref(receiver, false),
+ hir::Access::Exclusive => make::expr_ref(receiver, true),
+ hir::Access::Owned => receiver,
+ };
+ replacer(format!(
+ "{}::{}{}{}",
+ import,
+ method_name,
+ generics,
+ match arg_list {
+ Some(args) => make::arg_list(iter::once(receiver).chain(args)),
+ None => make::arg_list(iter::once(receiver)),
+ }
+ ));
+ }
+ Some(())
+ }
+
+ fn qualify_trait_method(
+ db: &RootDatabase,
+ mcall_expr: &ast::MethodCallExpr,
+ replacer: impl FnMut(String),
+ import: ast::Path,
+ item: hir::ItemInNs,
+ ) -> Option<()> {
+ let trait_method_name = mcall_expr.name_ref()?;
+ let trait_ = item_as_trait(db, item)?;
+ let method = find_trait_method(db, trait_, &trait_method_name)?;
+ Self::qualify_fn_call(db, mcall_expr, replacer, import, &method)
+ }
+}
+
+fn find_trait_method(
+ db: &RootDatabase,
+ trait_: hir::Trait,
+ trait_method_name: &ast::NameRef,
+) -> Option<hir::Function> {
+ if let Some(hir::AssocItem::Function(method)) =
+ trait_.items(db).into_iter().find(|item: &hir::AssocItem| {
+ item.name(db)
+ .map(|name| name.to_string() == trait_method_name.to_string())
+ .unwrap_or(false)
+ })
+ {
+ Some(method)
+ } else {
+ None
+ }
+}
+
+fn item_as_trait(db: &RootDatabase, item: hir::ItemInNs) -> Option<hir::Trait> {
+ let item_module_def = item.as_module_def()?;
+
+ match item_module_def {
+ hir::ModuleDef::Trait(trait_) => Some(trait_),
+ _ => item_module_def.as_assoc_item(db)?.containing_trait(db),
+ }
+}
+
+fn group_label(candidate: &ImportCandidate) -> GroupLabel {
+ let name = match candidate {
+ ImportCandidate::Path(it) => &it.name,
+ ImportCandidate::TraitAssocItem(it) | ImportCandidate::TraitMethod(it) => {
+ &it.assoc_item_name
+ }
+ }
+ .text();
+ GroupLabel(format!("Qualify {}", name))
+}
+
+fn label(candidate: &ImportCandidate, import: &LocatedImport) -> String {
+ match candidate {
+ ImportCandidate::Path(candidate) if candidate.qualifier.is_none() => {
+ format!("Qualify as `{}`", import.import_path)
+ }
+ _ => format!("Qualify with `{}`", import.import_path),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn applicable_when_found_an_import_partial() {
+ cov_mark::check!(qualify_path_unqualified_name);
+ check_assist(
+ qualify_path,
+ r#"
+mod std {
+ pub mod fmt {
+ pub struct Formatter;
+ }
+}
+
+use std::fmt;
+
+$0Formatter
+"#,
+ r#"
+mod std {
+ pub mod fmt {
+ pub struct Formatter;
+ }
+}
+
+use std::fmt;
+
+fmt::Formatter
+"#,
+ );
+ }
+
+ #[test]
+ fn applicable_when_found_an_import() {
+ check_assist(
+ qualify_path,
+ r#"
+$0PubStruct
+
+pub mod PubMod {
+ pub struct PubStruct;
+}
+"#,
+ r#"
+PubMod::PubStruct
+
+pub mod PubMod {
+ pub struct PubStruct;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn applicable_in_macros() {
+ check_assist(
+ qualify_path,
+ r#"
+macro_rules! foo {
+ ($i:ident) => { fn foo(a: $i) {} }
+}
+foo!(Pub$0Struct);
+
+pub mod PubMod {
+ pub struct PubStruct;
+}
+"#,
+ r#"
+macro_rules! foo {
+ ($i:ident) => { fn foo(a: $i) {} }
+}
+foo!(PubMod::PubStruct);
+
+pub mod PubMod {
+ pub struct PubStruct;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn applicable_when_found_multiple_imports() {
+ check_assist(
+ qualify_path,
+ r#"
+PubSt$0ruct
+
+pub mod PubMod1 {
+ pub struct PubStruct;
+}
+pub mod PubMod2 {
+ pub struct PubStruct;
+}
+pub mod PubMod3 {
+ pub struct PubStruct;
+}
+"#,
+ r#"
+PubMod3::PubStruct
+
+pub mod PubMod1 {
+ pub struct PubStruct;
+}
+pub mod PubMod2 {
+ pub struct PubStruct;
+}
+pub mod PubMod3 {
+ pub struct PubStruct;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_already_imported_types() {
+ check_assist_not_applicable(
+ qualify_path,
+ r#"
+use PubMod::PubStruct;
+
+PubStruct$0
+
+pub mod PubMod {
+ pub struct PubStruct;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_types_with_private_paths() {
+ check_assist_not_applicable(
+ qualify_path,
+ r#"
+PrivateStruct$0
+
+pub mod PubMod {
+ struct PrivateStruct;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_when_no_imports_found() {
+ check_assist_not_applicable(qualify_path, r#"PubStruct$0"#);
+ }
+
+ #[test]
+ fn qualify_function() {
+ check_assist(
+ qualify_path,
+ r#"
+test_function$0
+
+pub mod PubMod {
+ pub fn test_function() {};
+}
+"#,
+ r#"
+PubMod::test_function
+
+pub mod PubMod {
+ pub fn test_function() {};
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn qualify_macro() {
+ check_assist(
+ qualify_path,
+ r#"
+//- /lib.rs crate:crate_with_macro
+#[macro_export]
+macro_rules! foo {
+ () => ()
+}
+
+//- /main.rs crate:main deps:crate_with_macro
+fn main() {
+ foo$0
+}
+"#,
+ r#"
+fn main() {
+ crate_with_macro::foo
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn qualify_path_target() {
+ check_assist_target(
+ qualify_path,
+ r#"
+struct AssistInfo {
+ group_label: Option<$0GroupLabel>,
+}
+
+mod m { pub struct GroupLabel; }
+"#,
+ "GroupLabel",
+ )
+ }
+
+ #[test]
+ fn not_applicable_when_path_start_is_imported() {
+ check_assist_not_applicable(
+ qualify_path,
+ r#"
+pub mod mod1 {
+ pub mod mod2 {
+ pub mod mod3 {
+ pub struct TestStruct;
+ }
+ }
+}
+
+use mod1::mod2;
+fn main() {
+ mod2::mod3::TestStruct$0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_imported_function() {
+ check_assist_not_applicable(
+ qualify_path,
+ r#"
+pub mod test_mod {
+ pub fn test_function() {}
+}
+
+use test_mod::test_function;
+fn main() {
+ test_function$0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn associated_struct_function() {
+ check_assist(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub struct TestStruct {}
+ impl TestStruct {
+ pub fn test_function() {}
+ }
+}
+
+fn main() {
+ TestStruct::test_function$0
+}
+"#,
+ r#"
+mod test_mod {
+ pub struct TestStruct {}
+ impl TestStruct {
+ pub fn test_function() {}
+ }
+}
+
+fn main() {
+ test_mod::TestStruct::test_function
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn associated_struct_const() {
+ cov_mark::check!(qualify_path_qualifier_start);
+ check_assist(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub struct TestStruct {}
+ impl TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+}
+
+fn main() {
+ TestStruct::TEST_CONST$0
+}
+"#,
+ r#"
+mod test_mod {
+ pub struct TestStruct {}
+ impl TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+}
+
+fn main() {
+ test_mod::TestStruct::TEST_CONST
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn associated_struct_const_unqualified() {
+ // FIXME: non-trait assoc items completion is unsupported yet, see FIXME in the import_assets.rs for more details
+ check_assist_not_applicable(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub struct TestStruct {}
+ impl TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+}
+
+fn main() {
+ TEST_CONST$0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn associated_trait_function() {
+ check_assist(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+}
+
+fn main() {
+ test_mod::TestStruct::test_function$0
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+}
+
+fn main() {
+ <test_mod::TestStruct as test_mod::TestTrait>::test_function
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_imported_trait_for_function() {
+ check_assist_not_applicable(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub trait TestTrait2 {
+ fn test_function();
+ }
+ pub enum TestEnum {
+ One,
+ Two,
+ }
+ impl TestTrait2 for TestEnum {
+ fn test_function() {}
+ }
+ impl TestTrait for TestEnum {
+ fn test_function() {}
+ }
+}
+
+use test_mod::TestTrait2;
+fn main() {
+ test_mod::TestEnum::test_function$0;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn associated_trait_const() {
+ cov_mark::check!(qualify_path_trait_assoc_item);
+ check_assist(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ const TEST_CONST: u8;
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+}
+
+fn main() {
+ test_mod::TestStruct::TEST_CONST$0
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ const TEST_CONST: u8;
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+}
+
+fn main() {
+ <test_mod::TestStruct as test_mod::TestTrait>::TEST_CONST
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_imported_trait_for_const() {
+ check_assist_not_applicable(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ const TEST_CONST: u8;
+ }
+ pub trait TestTrait2 {
+ const TEST_CONST: f64;
+ }
+ pub enum TestEnum {
+ One,
+ Two,
+ }
+ impl TestTrait2 for TestEnum {
+ const TEST_CONST: f64 = 42.0;
+ }
+ impl TestTrait for TestEnum {
+ const TEST_CONST: u8 = 42;
+ }
+}
+
+use test_mod::TestTrait2;
+fn main() {
+ test_mod::TestEnum::TEST_CONST$0;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn trait_method() {
+ cov_mark::check!(qualify_path_trait_method);
+ check_assist(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+}
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_meth$0od()
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+}
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_mod::TestTrait::test_method(&test_struct)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method_multi_params() {
+ check_assist(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self, test: i32);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self, test: i32) {}
+ }
+}
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_meth$0od(42)
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self, test: i32);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self, test: i32) {}
+ }
+}
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_mod::TestTrait::test_method(&test_struct, 42)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method_consume() {
+ check_assist(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(self) {}
+ }
+}
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_meth$0od()
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(self) {}
+ }
+}
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_mod::TestTrait::test_method(test_struct)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method_cross_crate() {
+ check_assist(
+ qualify_path,
+ r#"
+//- /main.rs crate:main deps:dep
+fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.test_meth$0od()
+}
+//- /dep.rs crate:dep
+pub mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+}
+"#,
+ r#"
+fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ dep::test_mod::TestTrait::test_method(&test_struct)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn assoc_fn_cross_crate() {
+ check_assist(
+ qualify_path,
+ r#"
+//- /main.rs crate:main deps:dep
+fn main() {
+ dep::test_mod::TestStruct::test_func$0tion
+}
+//- /dep.rs crate:dep
+pub mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+}
+"#,
+ r#"
+fn main() {
+ <dep::test_mod::TestStruct as dep::test_mod::TestTrait>::test_function
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn assoc_const_cross_crate() {
+ check_assist(
+ qualify_path,
+ r#"
+//- /main.rs crate:main deps:dep
+fn main() {
+ dep::test_mod::TestStruct::CONST$0
+}
+//- /dep.rs crate:dep
+pub mod test_mod {
+ pub trait TestTrait {
+ const CONST: bool;
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const CONST: bool = true;
+ }
+}
+"#,
+ r#"
+fn main() {
+ <dep::test_mod::TestStruct as dep::test_mod::TestTrait>::CONST
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn assoc_fn_as_method_cross_crate() {
+ check_assist_not_applicable(
+ qualify_path,
+ r#"
+//- /main.rs crate:main deps:dep
+fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.test_func$0tion()
+}
+//- /dep.rs crate:dep
+pub mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn private_trait_cross_crate() {
+ check_assist_not_applicable(
+ qualify_path,
+ r#"
+//- /main.rs crate:main deps:dep
+fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.test_meth$0od()
+}
+//- /dep.rs crate:dep
+pub mod test_mod {
+ trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_imported_trait_for_method() {
+ check_assist_not_applicable(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub trait TestTrait2 {
+ fn test_method(&self);
+ }
+ pub enum TestEnum {
+ One,
+ Two,
+ }
+ impl TestTrait2 for TestEnum {
+ fn test_method(&self) {}
+ }
+ impl TestTrait for TestEnum {
+ fn test_method(&self) {}
+ }
+}
+
+use test_mod::TestTrait2;
+fn main() {
+ let one = test_mod::TestEnum::One;
+ one.test$0_method();
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn dep_import() {
+ check_assist(
+ qualify_path,
+ r"
+//- /lib.rs crate:dep
+pub struct Struct;
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ Struct$0
+}
+",
+ r"
+fn main() {
+ dep::Struct
+}
+",
+ );
+ }
+
+ #[test]
+ fn whole_segment() {
+ // Tests that only imports whose last segment matches the identifier get suggested.
+ check_assist(
+ qualify_path,
+ r"
+//- /lib.rs crate:dep
+pub mod fmt {
+ pub trait Display {}
+}
+
+pub fn panic_fmt() {}
+
+//- /main.rs crate:main deps:dep
+struct S;
+
+impl f$0mt::Display for S {}
+",
+ r"
+struct S;
+
+impl dep::fmt::Display for S {}
+",
+ );
+ }
+
+ #[test]
+ fn macro_generated() {
+ // Tests that macro-generated items are suggested from external crates.
+ check_assist(
+ qualify_path,
+ r"
+//- /lib.rs crate:dep
+macro_rules! mac {
+ () => {
+ pub struct Cheese;
+ };
+}
+
+mac!();
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ Cheese$0;
+}
+",
+ r"
+fn main() {
+ dep::Cheese;
+}
+",
+ );
+ }
+
+ #[test]
+ fn casing() {
+ // Tests that differently cased names don't interfere and we only suggest the matching one.
+ check_assist(
+ qualify_path,
+ r"
+//- /lib.rs crate:dep
+pub struct FMT;
+pub struct fmt;
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ FMT$0;
+}
+",
+ r"
+fn main() {
+ dep::FMT;
+}
+",
+ );
+ }
+
+ #[test]
+ fn keep_generic_annotations() {
+ check_assist(
+ qualify_path,
+ r"
+//- /lib.rs crate:dep
+pub mod generic { pub struct Thing<'a, T>(&'a T); }
+
+//- /main.rs crate:main deps:dep
+fn foo() -> Thin$0g<'static, ()> {}
+
+fn main() {}
+",
+ r"
+fn foo() -> dep::generic::Thing<'static, ()> {}
+
+fn main() {}
+",
+ );
+ }
+
+ #[test]
+ fn keep_generic_annotations_leading_colon() {
+ check_assist(
+ qualify_path,
+ r#"
+//- /lib.rs crate:dep
+pub mod generic { pub struct Thing<'a, T>(&'a T); }
+
+//- /main.rs crate:main deps:dep
+fn foo() -> Thin$0g::<'static, ()> {}
+
+fn main() {}
+"#,
+ r"
+fn foo() -> dep::generic::Thing::<'static, ()> {}
+
+fn main() {}
+",
+ );
+ }
+
+ #[test]
+ fn associated_struct_const_generic() {
+ check_assist(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub struct TestStruct<T> {}
+ impl<T> TestStruct<T> {
+ const TEST_CONST: u8 = 42;
+ }
+}
+
+fn main() {
+ TestStruct::<()>::TEST_CONST$0
+}
+"#,
+ r#"
+mod test_mod {
+ pub struct TestStruct<T> {}
+ impl<T> TestStruct<T> {
+ const TEST_CONST: u8 = 42;
+ }
+}
+
+fn main() {
+ test_mod::TestStruct::<()>::TEST_CONST
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn associated_trait_const_generic() {
+ check_assist(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ const TEST_CONST: u8;
+ }
+ pub struct TestStruct<T> {}
+ impl<T> TestTrait for TestStruct<T> {
+ const TEST_CONST: u8 = 42;
+ }
+}
+
+fn main() {
+ test_mod::TestStruct::<()>::TEST_CONST$0
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ const TEST_CONST: u8;
+ }
+ pub struct TestStruct<T> {}
+ impl<T> TestTrait for TestStruct<T> {
+ const TEST_CONST: u8 = 42;
+ }
+}
+
+fn main() {
+ <test_mod::TestStruct::<()> as test_mod::TestTrait>::TEST_CONST
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_method_generic() {
+ check_assist(
+ qualify_path,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method<T>(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method<T>(&self) {}
+ }
+}
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_meth$0od::<()>()
+}
+"#,
+ r#"
+mod test_mod {
+ pub trait TestTrait {
+ fn test_method<T>(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method<T>(&self) {}
+ }
+}
+
+fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_mod::TestTrait::test_method::<()>(&test_struct)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn works_in_derives() {
+ check_assist(
+ qualify_path,
+ r#"
+//- minicore:derive
+mod foo {
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+}
+#[derive(Copy$0)]
+struct Foo;
+"#,
+ r#"
+mod foo {
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+}
+#[derive(foo::Copy)]
+struct Foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn works_in_use_start() {
+ check_assist(
+ qualify_path,
+ r#"
+mod bar {
+ pub mod foo {
+ pub struct Foo;
+ }
+}
+use foo$0::Foo;
+"#,
+ r#"
+mod bar {
+ pub mod foo {
+ pub struct Foo;
+ }
+}
+use bar::foo::Foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_in_non_start_use() {
+ check_assist_not_applicable(
+ qualify_path,
+ r"
+mod bar {
+ pub mod foo {
+ pub struct Foo;
+ }
+}
+use foo::Foo$0;
+",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs
new file mode 100644
index 000000000..dbe8cb7bf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs
@@ -0,0 +1,509 @@
+use std::borrow::Cow;
+
+use syntax::{ast, ast::IsString, AstToken, TextRange, TextSize};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: make_raw_string
+//
+// Adds `r#` to a plain string literal.
+//
+// ```
+// fn main() {
+// "Hello,$0 World!";
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// r#"Hello, World!"#;
+// }
+// ```
+pub(crate) fn make_raw_string(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let token = ctx.find_token_at_offset::<ast::String>()?;
+ if token.is_raw() {
+ return None;
+ }
+ let value = token.value()?;
+ let target = token.syntax().text_range();
+ acc.add(
+ AssistId("make_raw_string", AssistKind::RefactorRewrite),
+ "Rewrite as raw string",
+ target,
+ |edit| {
+ let hashes = "#".repeat(required_hashes(&value).max(1));
+ if matches!(value, Cow::Borrowed(_)) {
+ // Avoid replacing the whole string to better position the cursor.
+ edit.insert(token.syntax().text_range().start(), format!("r{}", hashes));
+ edit.insert(token.syntax().text_range().end(), hashes);
+ } else {
+ edit.replace(
+ token.syntax().text_range(),
+ format!("r{}\"{}\"{}", hashes, value, hashes),
+ );
+ }
+ },
+ )
+}
+
+// Assist: make_usual_string
+//
+// Turns a raw string into a plain string.
+//
+// ```
+// fn main() {
+// r#"Hello,$0 "World!""#;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// "Hello, \"World!\"";
+// }
+// ```
+pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let token = ctx.find_token_at_offset::<ast::String>()?;
+ if !token.is_raw() {
+ return None;
+ }
+ let value = token.value()?;
+ let target = token.syntax().text_range();
+ acc.add(
+ AssistId("make_usual_string", AssistKind::RefactorRewrite),
+ "Rewrite as regular string",
+ target,
+ |edit| {
+ // parse inside string to escape `"`
+ let escaped = value.escape_default().to_string();
+ if let Some(offsets) = token.quote_offsets() {
+ if token.text()[offsets.contents - token.syntax().text_range().start()] == escaped {
+ edit.replace(offsets.quotes.0, "\"");
+ edit.replace(offsets.quotes.1, "\"");
+ return;
+ }
+ }
+
+ edit.replace(token.syntax().text_range(), format!("\"{}\"", escaped));
+ },
+ )
+}
+
+// Assist: add_hash
+//
+// Adds a hash to a raw string literal.
+//
+// ```
+// fn main() {
+// r#"Hello,$0 World!"#;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// r##"Hello, World!"##;
+// }
+// ```
+pub(crate) fn add_hash(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let token = ctx.find_token_at_offset::<ast::String>()?;
+ if !token.is_raw() {
+ return None;
+ }
+ let text_range = token.syntax().text_range();
+ let target = text_range;
+ acc.add(AssistId("add_hash", AssistKind::Refactor), "Add #", target, |edit| {
+ edit.insert(text_range.start() + TextSize::of('r'), "#");
+ edit.insert(text_range.end(), "#");
+ })
+}
+
+// Assist: remove_hash
+//
+// Removes a hash from a raw string literal.
+//
+// ```
+// fn main() {
+// r#"Hello,$0 World!"#;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// r"Hello, World!";
+// }
+// ```
+pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let token = ctx.find_token_at_offset::<ast::String>()?;
+ if !token.is_raw() {
+ return None;
+ }
+
+ let text = token.text();
+ if !text.starts_with("r#") && text.ends_with('#') {
+ return None;
+ }
+
+ let existing_hashes = text.chars().skip(1).take_while(|&it| it == '#').count();
+
+ let text_range = token.syntax().text_range();
+ let internal_text = &text[token.text_range_between_quotes()? - text_range.start()];
+
+ if existing_hashes == required_hashes(internal_text) {
+ cov_mark::hit!(cant_remove_required_hash);
+ return None;
+ }
+
+ acc.add(AssistId("remove_hash", AssistKind::RefactorRewrite), "Remove #", text_range, |edit| {
+ edit.delete(TextRange::at(text_range.start() + TextSize::of('r'), TextSize::of('#')));
+ edit.delete(TextRange::new(text_range.end() - TextSize::of('#'), text_range.end()));
+ })
+}
+
+fn required_hashes(s: &str) -> usize {
+ let mut res = 0usize;
+ for idx in s.match_indices('"').map(|(i, _)| i) {
+ let (_, sub) = s.split_at(idx + 1);
+ let n_hashes = sub.chars().take_while(|c| *c == '#').count();
+ res = res.max(n_hashes + 1)
+ }
+ res
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn test_required_hashes() {
+ assert_eq!(0, required_hashes("abc"));
+ assert_eq!(0, required_hashes("###"));
+ assert_eq!(1, required_hashes("\""));
+ assert_eq!(2, required_hashes("\"#abc"));
+ assert_eq!(0, required_hashes("#abc"));
+ assert_eq!(3, required_hashes("#ab\"##c"));
+ assert_eq!(5, required_hashes("#ab\"##\"####c"));
+ }
+
+ #[test]
+ fn make_raw_string_target() {
+ check_assist_target(
+ make_raw_string,
+ r#"
+ fn f() {
+ let s = $0"random\nstring";
+ }
+ "#,
+ r#""random\nstring""#,
+ );
+ }
+
+ #[test]
+ fn make_raw_string_works() {
+ check_assist(
+ make_raw_string,
+ r#"
+fn f() {
+ let s = $0"random\nstring";
+}
+"#,
+ r##"
+fn f() {
+ let s = r#"random
+string"#;
+}
+"##,
+ )
+ }
+
+ #[test]
+ fn make_raw_string_works_inside_macros() {
+ check_assist(
+ make_raw_string,
+ r#"
+ fn f() {
+ format!($0"x = {}", 92)
+ }
+ "#,
+ r##"
+ fn f() {
+ format!(r#"x = {}"#, 92)
+ }
+ "##,
+ )
+ }
+
+ #[test]
+ fn make_raw_string_hashes_inside_works() {
+ check_assist(
+ make_raw_string,
+ r###"
+fn f() {
+ let s = $0"#random##\nstring";
+}
+"###,
+ r####"
+fn f() {
+ let s = r#"#random##
+string"#;
+}
+"####,
+ )
+ }
+
+ #[test]
+ fn make_raw_string_closing_hashes_inside_works() {
+ check_assist(
+ make_raw_string,
+ r###"
+fn f() {
+ let s = $0"#random\"##\nstring";
+}
+"###,
+ r####"
+fn f() {
+ let s = r###"#random"##
+string"###;
+}
+"####,
+ )
+ }
+
+ #[test]
+ fn make_raw_string_nothing_to_unescape_works() {
+ check_assist(
+ make_raw_string,
+ r#"
+ fn f() {
+ let s = $0"random string";
+ }
+ "#,
+ r##"
+ fn f() {
+ let s = r#"random string"#;
+ }
+ "##,
+ )
+ }
+
+ #[test]
+ fn make_raw_string_not_works_on_partial_string() {
+ check_assist_not_applicable(
+ make_raw_string,
+ r#"
+ fn f() {
+ let s = "foo$0
+ }
+ "#,
+ )
+ }
+
+ #[test]
+ fn make_usual_string_not_works_on_partial_string() {
+ check_assist_not_applicable(
+ make_usual_string,
+ r#"
+ fn main() {
+ let s = r#"bar$0
+ }
+ "#,
+ )
+ }
+
+ #[test]
+ fn add_hash_target() {
+ check_assist_target(
+ add_hash,
+ r#"
+ fn f() {
+ let s = $0r"random string";
+ }
+ "#,
+ r#"r"random string""#,
+ );
+ }
+
+ #[test]
+ fn add_hash_works() {
+ check_assist(
+ add_hash,
+ r#"
+ fn f() {
+ let s = $0r"random string";
+ }
+ "#,
+ r##"
+ fn f() {
+ let s = r#"random string"#;
+ }
+ "##,
+ )
+ }
+
+ #[test]
+ fn add_more_hash_works() {
+ check_assist(
+ add_hash,
+ r##"
+ fn f() {
+ let s = $0r#"random"string"#;
+ }
+ "##,
+ r###"
+ fn f() {
+ let s = r##"random"string"##;
+ }
+ "###,
+ )
+ }
+
+ #[test]
+ fn add_hash_not_works() {
+ check_assist_not_applicable(
+ add_hash,
+ r#"
+ fn f() {
+ let s = $0"random string";
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn remove_hash_target() {
+ check_assist_target(
+ remove_hash,
+ r##"
+ fn f() {
+ let s = $0r#"random string"#;
+ }
+ "##,
+ r##"r#"random string"#"##,
+ );
+ }
+
+ #[test]
+ fn remove_hash_works() {
+ check_assist(
+ remove_hash,
+ r##"fn f() { let s = $0r#"random string"#; }"##,
+ r#"fn f() { let s = r"random string"; }"#,
+ )
+ }
+
+ #[test]
+ fn cant_remove_required_hash() {
+ cov_mark::check!(cant_remove_required_hash);
+ check_assist_not_applicable(
+ remove_hash,
+ r##"
+ fn f() {
+ let s = $0r#"random"str"ing"#;
+ }
+ "##,
+ )
+ }
+
+ #[test]
+ fn remove_more_hash_works() {
+ check_assist(
+ remove_hash,
+ r###"
+ fn f() {
+ let s = $0r##"random string"##;
+ }
+ "###,
+ r##"
+ fn f() {
+ let s = r#"random string"#;
+ }
+ "##,
+ )
+ }
+
+ #[test]
+ fn remove_hash_doesnt_work() {
+ check_assist_not_applicable(remove_hash, r#"fn f() { let s = $0"random string"; }"#);
+ }
+
+ #[test]
+ fn remove_hash_no_hash_doesnt_work() {
+ check_assist_not_applicable(remove_hash, r#"fn f() { let s = $0r"random string"; }"#);
+ }
+
+ #[test]
+ fn make_usual_string_target() {
+ check_assist_target(
+ make_usual_string,
+ r##"
+ fn f() {
+ let s = $0r#"random string"#;
+ }
+ "##,
+ r##"r#"random string"#"##,
+ );
+ }
+
+ #[test]
+ fn make_usual_string_works() {
+ check_assist(
+ make_usual_string,
+ r##"
+ fn f() {
+ let s = $0r#"random string"#;
+ }
+ "##,
+ r#"
+ fn f() {
+ let s = "random string";
+ }
+ "#,
+ )
+ }
+
+ #[test]
+ fn make_usual_string_with_quote_works() {
+ check_assist(
+ make_usual_string,
+ r##"
+ fn f() {
+ let s = $0r#"random"str"ing"#;
+ }
+ "##,
+ r#"
+ fn f() {
+ let s = "random\"str\"ing";
+ }
+ "#,
+ )
+ }
+
+ #[test]
+ fn make_usual_string_more_hash_works() {
+ check_assist(
+ make_usual_string,
+ r###"
+ fn f() {
+ let s = $0r##"random string"##;
+ }
+ "###,
+ r##"
+ fn f() {
+ let s = "random string";
+ }
+ "##,
+ )
+ }
+
+ #[test]
+ fn make_usual_string_not_works() {
+ check_assist_not_applicable(
+ make_usual_string,
+ r#"
+ fn f() {
+ let s = $0"random string";
+ }
+ "#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs
new file mode 100644
index 000000000..afaa7c933
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs
@@ -0,0 +1,241 @@
+use itertools::Itertools;
+use syntax::{
+ ast::{self, AstNode, AstToken},
+ match_ast, NodeOrToken, SyntaxElement, TextSize, T,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: remove_dbg
+//
+// Removes `dbg!()` macro call.
+//
+// ```
+// fn main() {
+// $0dbg!(92);
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// 92;
+// }
+// ```
+pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let macro_call = ctx.find_node_at_offset::<ast::MacroCall>()?;
+ let tt = macro_call.token_tree()?;
+ let r_delim = NodeOrToken::Token(tt.right_delimiter_token()?);
+ if macro_call.path()?.segment()?.name_ref()?.text() != "dbg"
+ || macro_call.excl_token().is_none()
+ {
+ return None;
+ }
+
+ let mac_input = tt.syntax().children_with_tokens().skip(1).take_while(|it| *it != r_delim);
+ let input_expressions = mac_input.group_by(|tok| tok.kind() == T![,]);
+ let input_expressions = input_expressions
+ .into_iter()
+ .filter_map(|(is_sep, group)| (!is_sep).then(|| group))
+ .map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
+ .collect::<Option<Vec<ast::Expr>>>()?;
+
+ let macro_expr = ast::MacroExpr::cast(macro_call.syntax().parent()?)?;
+ let parent = macro_expr.syntax().parent()?;
+ let (range, text) = match &*input_expressions {
+ // dbg!()
+ [] => {
+ match_ast! {
+ match parent {
+ ast::StmtList(__) => {
+ let range = macro_expr.syntax().text_range();
+ let range = match whitespace_start(macro_expr.syntax().prev_sibling_or_token()) {
+ Some(start) => range.cover_offset(start),
+ None => range,
+ };
+ (range, String::new())
+ },
+ ast::ExprStmt(it) => {
+ let range = it.syntax().text_range();
+ let range = match whitespace_start(it.syntax().prev_sibling_or_token()) {
+ Some(start) => range.cover_offset(start),
+ None => range,
+ };
+ (range, String::new())
+ },
+ _ => (macro_call.syntax().text_range(), "()".to_owned())
+ }
+ }
+ }
+ // dbg!(expr0)
+ [expr] => {
+ let wrap = match ast::Expr::cast(parent) {
+ Some(parent) => match (expr, parent) {
+ (ast::Expr::CastExpr(_), ast::Expr::CastExpr(_)) => false,
+ (
+ ast::Expr::BoxExpr(_) | ast::Expr::PrefixExpr(_) | ast::Expr::RefExpr(_),
+ ast::Expr::AwaitExpr(_)
+ | ast::Expr::CallExpr(_)
+ | ast::Expr::CastExpr(_)
+ | ast::Expr::FieldExpr(_)
+ | ast::Expr::IndexExpr(_)
+ | ast::Expr::MethodCallExpr(_)
+ | ast::Expr::RangeExpr(_)
+ | ast::Expr::TryExpr(_),
+ ) => true,
+ (
+ ast::Expr::BinExpr(_) | ast::Expr::CastExpr(_) | ast::Expr::RangeExpr(_),
+ ast::Expr::AwaitExpr(_)
+ | ast::Expr::BinExpr(_)
+ | ast::Expr::CallExpr(_)
+ | ast::Expr::CastExpr(_)
+ | ast::Expr::FieldExpr(_)
+ | ast::Expr::IndexExpr(_)
+ | ast::Expr::MethodCallExpr(_)
+ | ast::Expr::PrefixExpr(_)
+ | ast::Expr::RangeExpr(_)
+ | ast::Expr::RefExpr(_)
+ | ast::Expr::TryExpr(_),
+ ) => true,
+ _ => false,
+ },
+ None => false,
+ };
+ (
+ macro_call.syntax().text_range(),
+ if wrap { format!("({})", expr) } else { expr.to_string() },
+ )
+ }
+ // dbg!(expr0, expr1, ...)
+ exprs => (macro_call.syntax().text_range(), format!("({})", exprs.iter().format(", "))),
+ };
+
+ acc.add(AssistId("remove_dbg", AssistKind::Refactor), "Remove dbg!()", range, |builder| {
+ builder.replace(range, text);
+ })
+}
+
+fn whitespace_start(it: Option<SyntaxElement>) -> Option<TextSize> {
+ Some(it?.into_token().and_then(ast::Whitespace::cast)?.syntax().text_range().start())
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ fn check(ra_fixture_before: &str, ra_fixture_after: &str) {
+ check_assist(
+ remove_dbg,
+ &format!("fn main() {{\n{}\n}}", ra_fixture_before),
+ &format!("fn main() {{\n{}\n}}", ra_fixture_after),
+ );
+ }
+
+ #[test]
+ fn test_remove_dbg() {
+ check("$0dbg!(1 + 1)", "1 + 1");
+ check("dbg!$0(1 + 1)", "1 + 1");
+ check("dbg!(1 $0+ 1)", "1 + 1");
+ check("dbg![$01 + 1]", "1 + 1");
+ check("dbg!{$01 + 1}", "1 + 1");
+ }
+
+ #[test]
+ fn test_remove_dbg_not_applicable() {
+ check_assist_not_applicable(remove_dbg, "fn main() {$0vec![1, 2, 3]}");
+ check_assist_not_applicable(remove_dbg, "fn main() {$0dbg(5, 6, 7)}");
+ check_assist_not_applicable(remove_dbg, "fn main() {$0dbg!(5, 6, 7}");
+ }
+
+ #[test]
+ fn test_remove_dbg_keep_semicolon_in_let() {
+ // https://github.com/rust-lang/rust-analyzer/issues/5129#issuecomment-651399779
+ check(
+ r#"let res = $0dbg!(1 * 20); // needless comment"#,
+ r#"let res = 1 * 20; // needless comment"#,
+ );
+ check(r#"let res = $0dbg!(); // needless comment"#, r#"let res = (); // needless comment"#);
+ check(
+ r#"let res = $0dbg!(1, 2); // needless comment"#,
+ r#"let res = (1, 2); // needless comment"#,
+ );
+ }
+
+ #[test]
+ fn test_remove_dbg_cast_cast() {
+ check(r#"let res = $0dbg!(x as u32) as u32;"#, r#"let res = x as u32 as u32;"#);
+ }
+
+ #[test]
+ fn test_remove_dbg_prefix() {
+ check(r#"let res = $0dbg!(&result).foo();"#, r#"let res = (&result).foo();"#);
+ check(r#"let res = &$0dbg!(&result);"#, r#"let res = &&result;"#);
+ check(r#"let res = $0dbg!(!result) && true;"#, r#"let res = !result && true;"#);
+ }
+
+ #[test]
+ fn test_remove_dbg_post_expr() {
+ check(r#"let res = $0dbg!(fut.await).foo();"#, r#"let res = fut.await.foo();"#);
+ check(r#"let res = $0dbg!(result?).foo();"#, r#"let res = result?.foo();"#);
+ check(r#"let res = $0dbg!(foo as u32).foo();"#, r#"let res = (foo as u32).foo();"#);
+ check(r#"let res = $0dbg!(array[3]).foo();"#, r#"let res = array[3].foo();"#);
+ check(r#"let res = $0dbg!(tuple.3).foo();"#, r#"let res = tuple.3.foo();"#);
+ }
+
+ #[test]
+ fn test_remove_dbg_range_expr() {
+ check(r#"let res = $0dbg!(foo..bar).foo();"#, r#"let res = (foo..bar).foo();"#);
+ check(r#"let res = $0dbg!(foo..=bar).foo();"#, r#"let res = (foo..=bar).foo();"#);
+ }
+
+ #[test]
+ fn test_remove_empty_dbg() {
+ check_assist(remove_dbg, r#"fn foo() { $0dbg!(); }"#, r#"fn foo() { }"#);
+ check_assist(
+ remove_dbg,
+ r#"
+fn foo() {
+ $0dbg!();
+}
+"#,
+ r#"
+fn foo() {
+}
+"#,
+ );
+ check_assist(
+ remove_dbg,
+ r#"
+fn foo() {
+ let test = $0dbg!();
+}"#,
+ r#"
+fn foo() {
+ let test = ();
+}"#,
+ );
+ check_assist(
+ remove_dbg,
+ r#"
+fn foo() {
+ let t = {
+ println!("Hello, world");
+ $0dbg!()
+ };
+}"#,
+ r#"
+fn foo() {
+ let t = {
+ println!("Hello, world");
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_remove_multi_dbg() {
+ check(r#"$0dbg!(0, 1)"#, r#"(0, 1)"#);
+ check(r#"$0dbg!(0, (1, 2))"#, r#"(0, (1, 2))"#);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_mut.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_mut.rs
new file mode 100644
index 000000000..0b299e834
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_mut.rs
@@ -0,0 +1,37 @@
+use syntax::{SyntaxKind, TextRange, T};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: remove_mut
+//
+// Removes the `mut` keyword.
+//
+// ```
+// impl Walrus {
+// fn feed(&mut$0 self, amount: u32) {}
+// }
+// ```
+// ->
+// ```
+// impl Walrus {
+// fn feed(&self, amount: u32) {}
+// }
+// ```
+pub(crate) fn remove_mut(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let mut_token = ctx.find_token_syntax_at_offset(T![mut])?;
+ let delete_from = mut_token.text_range().start();
+ let delete_to = match mut_token.next_token() {
+ Some(it) if it.kind() == SyntaxKind::WHITESPACE => it.text_range().end(),
+ _ => mut_token.text_range().end(),
+ };
+
+ let target = mut_token.text_range();
+ acc.add(
+ AssistId("remove_mut", AssistKind::Refactor),
+ "Remove `mut` keyword",
+ target,
+ |builder| {
+ builder.delete(TextRange::new(delete_from, delete_to));
+ },
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs
new file mode 100644
index 000000000..59ea94ea1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs
@@ -0,0 +1,409 @@
+use ide_db::{base_db::FileId, defs::Definition, search::FileReference};
+use syntax::{
+ algo::find_node_at_range,
+ ast::{self, HasArgList},
+ AstNode, SourceFile, SyntaxKind, SyntaxNode, TextRange, T,
+};
+
+use SyntaxKind::WHITESPACE;
+
+use crate::{
+ assist_context::AssistBuilder, utils::next_prev, AssistContext, AssistId, AssistKind, Assists,
+};
+
+// Assist: remove_unused_param
+//
+// Removes unused function parameter.
+//
+// ```
+// fn frobnicate(x: i32$0) {}
+//
+// fn main() {
+// frobnicate(92);
+// }
+// ```
+// ->
+// ```
+// fn frobnicate() {}
+//
+// fn main() {
+// frobnicate();
+// }
+// ```
+pub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let param: ast::Param = ctx.find_node_at_offset()?;
+ let ident_pat = match param.pat()? {
+ ast::Pat::IdentPat(it) => it,
+ _ => return None,
+ };
+ let func = param.syntax().ancestors().find_map(ast::Fn::cast)?;
+ let is_self_present =
+ param.syntax().parent()?.children().find_map(ast::SelfParam::cast).is_some();
+
+ // check if fn is in impl Trait for ..
+ if func
+ .syntax()
+ .parent() // AssocItemList
+ .and_then(|x| x.parent())
+ .and_then(ast::Impl::cast)
+ .map_or(false, |imp| imp.trait_().is_some())
+ {
+ cov_mark::hit!(trait_impl);
+ return None;
+ }
+
+ let mut param_position = func.param_list()?.params().position(|it| it == param)?;
+ // param_list() does not take the self param into consideration, hence this additional check
+ // is required. For associated functions, param_position is incremented here. For inherent
+ // calls we revet the increment below, in process_usage, as those calls will not have an
+ // explicit self parameter.
+ if is_self_present {
+ param_position += 1;
+ }
+ let fn_def = {
+ let func = ctx.sema.to_def(&func)?;
+ Definition::Function(func)
+ };
+
+ let param_def = {
+ let local = ctx.sema.to_def(&ident_pat)?;
+ Definition::Local(local)
+ };
+ if param_def.usages(&ctx.sema).at_least_one() {
+ cov_mark::hit!(keep_used);
+ return None;
+ }
+ acc.add(
+ AssistId("remove_unused_param", AssistKind::Refactor),
+ "Remove unused parameter",
+ param.syntax().text_range(),
+ |builder| {
+ builder.delete(range_to_remove(param.syntax()));
+ for (file_id, references) in fn_def.usages(&ctx.sema).all() {
+ process_usages(ctx, builder, file_id, references, param_position, is_self_present);
+ }
+ },
+ )
+}
+
+fn process_usages(
+ ctx: &AssistContext<'_>,
+ builder: &mut AssistBuilder,
+ file_id: FileId,
+ references: Vec<FileReference>,
+ arg_to_remove: usize,
+ is_self_present: bool,
+) {
+ let source_file = ctx.sema.parse(file_id);
+ builder.edit_file(file_id);
+ let possible_ranges = references
+ .into_iter()
+ .filter_map(|usage| process_usage(&source_file, usage, arg_to_remove, is_self_present));
+
+ let mut ranges_to_delete: Vec<TextRange> = vec![];
+ for range in possible_ranges {
+ if !ranges_to_delete.iter().any(|it| it.contains_range(range)) {
+ ranges_to_delete.push(range)
+ }
+ }
+
+ for range in ranges_to_delete {
+ builder.delete(range)
+ }
+}
+
+fn process_usage(
+ source_file: &SourceFile,
+ FileReference { range, .. }: FileReference,
+ mut arg_to_remove: usize,
+ is_self_present: bool,
+) -> Option<TextRange> {
+ let call_expr_opt: Option<ast::CallExpr> = find_node_at_range(source_file.syntax(), range);
+ if let Some(call_expr) = call_expr_opt {
+ let call_expr_range = call_expr.expr()?.syntax().text_range();
+ if !call_expr_range.contains_range(range) {
+ return None;
+ }
+
+ let arg = call_expr.arg_list()?.args().nth(arg_to_remove)?;
+ return Some(range_to_remove(arg.syntax()));
+ }
+
+ let method_call_expr_opt: Option<ast::MethodCallExpr> =
+ find_node_at_range(source_file.syntax(), range);
+ if let Some(method_call_expr) = method_call_expr_opt {
+ let method_call_expr_range = method_call_expr.name_ref()?.syntax().text_range();
+ if !method_call_expr_range.contains_range(range) {
+ return None;
+ }
+
+ if is_self_present {
+ arg_to_remove -= 1;
+ }
+
+ let arg = method_call_expr.arg_list()?.args().nth(arg_to_remove)?;
+ return Some(range_to_remove(arg.syntax()));
+ }
+
+ None
+}
+
+pub(crate) fn range_to_remove(node: &SyntaxNode) -> TextRange {
+ let up_to_comma = next_prev().find_map(|dir| {
+ node.siblings_with_tokens(dir)
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == T![,])
+ .map(|it| (dir, it))
+ });
+ if let Some((dir, token)) = up_to_comma {
+ if node.next_sibling().is_some() {
+ let up_to_space = token
+ .siblings_with_tokens(dir)
+ .skip(1)
+ .take_while(|it| it.kind() == WHITESPACE)
+ .last()
+ .and_then(|it| it.into_token());
+ return node
+ .text_range()
+ .cover(up_to_space.map_or(token.text_range(), |it| it.text_range()));
+ }
+ node.text_range().cover(token.text_range())
+ } else {
+ node.text_range()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn remove_unused() {
+ check_assist(
+ remove_unused_param,
+ r#"
+fn a() { foo(9, 2) }
+fn foo(x: i32, $0y: i32) { x; }
+fn b() { foo(9, 2,) }
+"#,
+ r#"
+fn a() { foo(9) }
+fn foo(x: i32) { x; }
+fn b() { foo(9, ) }
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_first_param() {
+ check_assist(
+ remove_unused_param,
+ r#"
+fn foo($0x: i32, y: i32) { y; }
+fn a() { foo(1, 2) }
+fn b() { foo(1, 2,) }
+"#,
+ r#"
+fn foo(y: i32) { y; }
+fn a() { foo(2) }
+fn b() { foo(2,) }
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_single_param() {
+ check_assist(
+ remove_unused_param,
+ r#"
+fn foo($0x: i32) { 0; }
+fn a() { foo(1) }
+fn b() { foo(1, ) }
+"#,
+ r#"
+fn foo() { 0; }
+fn a() { foo() }
+fn b() { foo( ) }
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_surrounded_by_parms() {
+ check_assist(
+ remove_unused_param,
+ r#"
+fn foo(x: i32, $0y: i32, z: i32) { x; }
+fn a() { foo(1, 2, 3) }
+fn b() { foo(1, 2, 3,) }
+"#,
+ r#"
+fn foo(x: i32, z: i32) { x; }
+fn a() { foo(1, 3) }
+fn b() { foo(1, 3,) }
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_qualified_call() {
+ check_assist(
+ remove_unused_param,
+ r#"
+mod bar { pub fn foo(x: i32, $0y: i32) { x; } }
+fn b() { bar::foo(9, 2) }
+"#,
+ r#"
+mod bar { pub fn foo(x: i32) { x; } }
+fn b() { bar::foo(9) }
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_turbofished_func() {
+ check_assist(
+ remove_unused_param,
+ r#"
+pub fn foo<T>(x: T, $0y: i32) { x; }
+fn b() { foo::<i32>(9, 2) }
+"#,
+ r#"
+pub fn foo<T>(x: T) { x; }
+fn b() { foo::<i32>(9) }
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_generic_unused_param_func() {
+ check_assist(
+ remove_unused_param,
+ r#"
+pub fn foo<T>(x: i32, $0y: T) { x; }
+fn b() { foo::<i32>(9, 2) }
+fn b2() { foo(9, 2) }
+"#,
+ r#"
+pub fn foo<T>(x: i32) { x; }
+fn b() { foo::<i32>(9) }
+fn b2() { foo(9) }
+"#,
+ );
+ }
+
+ #[test]
+ fn keep_used() {
+ cov_mark::check!(keep_used);
+ check_assist_not_applicable(
+ remove_unused_param,
+ r#"
+fn foo(x: i32, $0y: i32) { y; }
+fn main() { foo(9, 2) }
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_impl() {
+ cov_mark::check!(trait_impl);
+ check_assist_not_applicable(
+ remove_unused_param,
+ r#"
+trait Trait {
+ fn foo(x: i32);
+}
+impl Trait for () {
+ fn foo($0x: i32) {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_across_files() {
+ check_assist(
+ remove_unused_param,
+ r#"
+//- /main.rs
+fn foo(x: i32, $0y: i32) { x; }
+
+mod foo;
+
+//- /foo.rs
+use super::foo;
+
+fn bar() {
+ let _ = foo(1, 2);
+}
+"#,
+ r#"
+//- /main.rs
+fn foo(x: i32) { x; }
+
+mod foo;
+
+//- /foo.rs
+use super::foo;
+
+fn bar() {
+ let _ = foo(1);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_remove_method_param() {
+ check_assist(
+ remove_unused_param,
+ r#"
+struct S;
+impl S { fn f(&self, $0_unused: i32) {} }
+fn main() {
+ S.f(92);
+ S.f();
+ S.f(93, 92);
+ S::f(&S, 92);
+}
+"#,
+ r#"
+struct S;
+impl S { fn f(&self) {} }
+fn main() {
+ S.f();
+ S.f();
+ S.f(92);
+ S::f(&S);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn nested_call() {
+ check_assist(
+ remove_unused_param,
+ r#"
+fn foo(x: i32, $0y: i32) -> i32 {
+ x
+}
+
+fn bar() {
+ foo(1, foo(2, 3));
+}
+"#,
+ r#"
+fn foo(x: i32) -> i32 {
+ x
+}
+
+fn bar() {
+ foo(1);
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs
new file mode 100644
index 000000000..a899c7a64
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs
@@ -0,0 +1,212 @@
+use either::Either;
+use ide_db::FxHashMap;
+use itertools::Itertools;
+use syntax::{ast, ted, AstNode};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: reorder_fields
+//
+// Reorder the fields of record literals and record patterns in the same order as in
+// the definition.
+//
+// ```
+// struct Foo {foo: i32, bar: i32};
+// const test: Foo = $0Foo {bar: 0, foo: 1}
+// ```
+// ->
+// ```
+// struct Foo {foo: i32, bar: i32};
+// const test: Foo = Foo {foo: 1, bar: 0}
+// ```
+pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let record = ctx
+ .find_node_at_offset::<ast::RecordExpr>()
+ .map(Either::Left)
+ .or_else(|| ctx.find_node_at_offset::<ast::RecordPat>().map(Either::Right))?;
+
+ let path = record.as_ref().either(|it| it.path(), |it| it.path())?;
+ let ranks = compute_fields_ranks(&path, ctx)?;
+ let get_rank_of_field =
+ |of: Option<_>| *ranks.get(&of.unwrap_or_default()).unwrap_or(&usize::MAX);
+
+ let field_list = match &record {
+ Either::Left(it) => Either::Left(it.record_expr_field_list()?),
+ Either::Right(it) => Either::Right(it.record_pat_field_list()?),
+ };
+ let fields = match field_list {
+ Either::Left(it) => Either::Left((
+ it.fields()
+ .sorted_unstable_by_key(|field| {
+ get_rank_of_field(field.field_name().map(|it| it.to_string()))
+ })
+ .collect::<Vec<_>>(),
+ it,
+ )),
+ Either::Right(it) => Either::Right((
+ it.fields()
+ .sorted_unstable_by_key(|field| {
+ get_rank_of_field(field.field_name().map(|it| it.to_string()))
+ })
+ .collect::<Vec<_>>(),
+ it,
+ )),
+ };
+
+ let is_sorted = fields.as_ref().either(
+ |(sorted, field_list)| field_list.fields().zip(sorted).all(|(a, b)| a == *b),
+ |(sorted, field_list)| field_list.fields().zip(sorted).all(|(a, b)| a == *b),
+ );
+ if is_sorted {
+ cov_mark::hit!(reorder_sorted_fields);
+ return None;
+ }
+ let target = record.as_ref().either(AstNode::syntax, AstNode::syntax).text_range();
+ acc.add(
+ AssistId("reorder_fields", AssistKind::RefactorRewrite),
+ "Reorder record fields",
+ target,
+ |builder| match fields {
+ Either::Left((sorted, field_list)) => {
+ replace(builder.make_mut(field_list).fields(), sorted)
+ }
+ Either::Right((sorted, field_list)) => {
+ replace(builder.make_mut(field_list).fields(), sorted)
+ }
+ },
+ )
+}
+
+fn replace<T: AstNode + PartialEq>(
+ fields: impl Iterator<Item = T>,
+ sorted_fields: impl IntoIterator<Item = T>,
+) {
+ fields.zip(sorted_fields).for_each(|(field, sorted_field)| {
+ ted::replace(field.syntax(), sorted_field.syntax().clone_for_update())
+ });
+}
+
+fn compute_fields_ranks(
+ path: &ast::Path,
+ ctx: &AssistContext<'_>,
+) -> Option<FxHashMap<String, usize>> {
+ let strukt = match ctx.sema.resolve_path(path) {
+ Some(hir::PathResolution::Def(hir::ModuleDef::Adt(hir::Adt::Struct(it)))) => it,
+ _ => return None,
+ };
+
+ let res = strukt
+ .fields(ctx.db())
+ .into_iter()
+ .enumerate()
+ .map(|(idx, field)| (field.name(ctx.db()).to_string(), idx))
+ .collect();
+
+ Some(res)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn reorder_sorted_fields() {
+ cov_mark::check!(reorder_sorted_fields);
+ check_assist_not_applicable(
+ reorder_fields,
+ r#"
+struct Foo { foo: i32, bar: i32 }
+const test: Foo = $0Foo { foo: 0, bar: 0 };
+"#,
+ )
+ }
+
+ #[test]
+ fn trivial_empty_fields() {
+ check_assist_not_applicable(
+ reorder_fields,
+ r#"
+struct Foo {}
+const test: Foo = $0Foo {};
+"#,
+ )
+ }
+
+ #[test]
+ fn reorder_struct_fields() {
+ check_assist(
+ reorder_fields,
+ r#"
+struct Foo { foo: i32, bar: i32 }
+const test: Foo = $0Foo { bar: 0, foo: 1 };
+"#,
+ r#"
+struct Foo { foo: i32, bar: i32 }
+const test: Foo = Foo { foo: 1, bar: 0 };
+"#,
+ )
+ }
+ #[test]
+ fn reorder_struct_pattern() {
+ check_assist(
+ reorder_fields,
+ r#"
+struct Foo { foo: i64, bar: i64, baz: i64 }
+
+fn f(f: Foo) -> {
+ match f {
+ $0Foo { baz: 0, ref mut bar, .. } => (),
+ _ => ()
+ }
+}
+"#,
+ r#"
+struct Foo { foo: i64, bar: i64, baz: i64 }
+
+fn f(f: Foo) -> {
+ match f {
+ Foo { ref mut bar, baz: 0, .. } => (),
+ _ => ()
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn reorder_with_extra_field() {
+ check_assist(
+ reorder_fields,
+ r#"
+struct Foo { foo: String, bar: String }
+
+impl Foo {
+ fn new() -> Foo {
+ let foo = String::new();
+ $0Foo {
+ bar: foo.clone(),
+ extra: "Extra field",
+ foo,
+ }
+ }
+}
+"#,
+ r#"
+struct Foo { foo: String, bar: String }
+
+impl Foo {
+ fn new() -> Foo {
+ let foo = String::new();
+ Foo {
+ foo,
+ bar: foo.clone(),
+ extra: "Extra field",
+ }
+ }
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_impl_items.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_impl_items.rs
new file mode 100644
index 000000000..208c3e109
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_impl_items.rs
@@ -0,0 +1,284 @@
+use hir::{PathResolution, Semantics};
+use ide_db::{FxHashMap, RootDatabase};
+use itertools::Itertools;
+use syntax::{
+ ast::{self, HasName},
+ ted, AstNode,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: reorder_impl_items
+//
+// Reorder the items of an `impl Trait`. The items will be ordered
+// in the same order as in the trait definition.
+//
+// ```
+// trait Foo {
+// type A;
+// const B: u8;
+// fn c();
+// }
+//
+// struct Bar;
+// $0impl Foo for Bar {
+// const B: u8 = 17;
+// fn c() {}
+// type A = String;
+// }
+// ```
+// ->
+// ```
+// trait Foo {
+// type A;
+// const B: u8;
+// fn c();
+// }
+//
+// struct Bar;
+// impl Foo for Bar {
+// type A = String;
+// const B: u8 = 17;
+// fn c() {}
+// }
+// ```
+pub(crate) fn reorder_impl_items(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let impl_ast = ctx.find_node_at_offset::<ast::Impl>()?;
+ let items = impl_ast.assoc_item_list()?;
+ let assoc_items = items.assoc_items().collect::<Vec<_>>();
+
+ let path = impl_ast
+ .trait_()
+ .and_then(|t| match t {
+ ast::Type::PathType(path) => Some(path),
+ _ => None,
+ })?
+ .path()?;
+
+ let ranks = compute_item_ranks(&path, ctx)?;
+ let sorted: Vec<_> = assoc_items
+ .iter()
+ .cloned()
+ .sorted_by_key(|i| {
+ let name = match i {
+ ast::AssocItem::Const(c) => c.name(),
+ ast::AssocItem::Fn(f) => f.name(),
+ ast::AssocItem::TypeAlias(t) => t.name(),
+ ast::AssocItem::MacroCall(_) => None,
+ };
+
+ name.and_then(|n| ranks.get(&n.to_string()).copied()).unwrap_or(usize::max_value())
+ })
+ .collect();
+
+ // Don't edit already sorted methods:
+ if assoc_items == sorted {
+ cov_mark::hit!(not_applicable_if_sorted);
+ return None;
+ }
+
+ let target = items.syntax().text_range();
+ acc.add(
+ AssistId("reorder_impl_items", AssistKind::RefactorRewrite),
+ "Sort items by trait definition",
+ target,
+ |builder| {
+ let assoc_items =
+ assoc_items.into_iter().map(|item| builder.make_mut(item)).collect::<Vec<_>>();
+ assoc_items
+ .into_iter()
+ .zip(sorted)
+ .for_each(|(old, new)| ted::replace(old.syntax(), new.clone_for_update().syntax()));
+ },
+ )
+}
+
+fn compute_item_ranks(
+ path: &ast::Path,
+ ctx: &AssistContext<'_>,
+) -> Option<FxHashMap<String, usize>> {
+ let td = trait_definition(path, &ctx.sema)?;
+
+ Some(
+ td.items(ctx.db())
+ .iter()
+ .flat_map(|i| i.name(ctx.db()))
+ .enumerate()
+ .map(|(idx, name)| (name.to_string(), idx))
+ .collect(),
+ )
+}
+
+fn trait_definition(path: &ast::Path, sema: &Semantics<'_, RootDatabase>) -> Option<hir::Trait> {
+ match sema.resolve_path(path)? {
+ PathResolution::Def(hir::ModuleDef::Trait(trait_)) => Some(trait_),
+ _ => None,
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn not_applicable_if_sorted() {
+ cov_mark::check!(not_applicable_if_sorted);
+ check_assist_not_applicable(
+ reorder_impl_items,
+ r#"
+trait Bar {
+ type T;
+ const C: ();
+ fn a() {}
+ fn z() {}
+ fn b() {}
+}
+struct Foo;
+$0impl Bar for Foo {
+ type T = ();
+ const C: () = ();
+ fn a() {}
+ fn z() {}
+ fn b() {}
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn reorder_impl_trait_functions() {
+ check_assist(
+ reorder_impl_items,
+ r#"
+trait Bar {
+ fn a() {}
+ fn c() {}
+ fn b() {}
+ fn d() {}
+}
+
+struct Foo;
+$0impl Bar for Foo {
+ fn d() {}
+ fn b() {}
+ fn c() {}
+ fn a() {}
+}
+"#,
+ r#"
+trait Bar {
+ fn a() {}
+ fn c() {}
+ fn b() {}
+ fn d() {}
+}
+
+struct Foo;
+impl Bar for Foo {
+ fn a() {}
+ fn c() {}
+ fn b() {}
+ fn d() {}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_empty() {
+ check_assist_not_applicable(
+ reorder_impl_items,
+ r#"
+trait Bar {};
+struct Foo;
+$0impl Bar for Foo {}
+ "#,
+ )
+ }
+
+ #[test]
+ fn reorder_impl_trait_items() {
+ check_assist(
+ reorder_impl_items,
+ r#"
+trait Bar {
+ fn a() {}
+ type T0;
+ fn c() {}
+ const C1: ();
+ fn b() {}
+ type T1;
+ fn d() {}
+ const C0: ();
+}
+
+struct Foo;
+$0impl Bar for Foo {
+ type T1 = ();
+ fn d() {}
+ fn b() {}
+ fn c() {}
+ const C1: () = ();
+ fn a() {}
+ type T0 = ();
+ const C0: () = ();
+}
+ "#,
+ r#"
+trait Bar {
+ fn a() {}
+ type T0;
+ fn c() {}
+ const C1: ();
+ fn b() {}
+ type T1;
+ fn d() {}
+ const C0: ();
+}
+
+struct Foo;
+impl Bar for Foo {
+ fn a() {}
+ type T0 = ();
+ fn c() {}
+ const C1: () = ();
+ fn b() {}
+ type T1 = ();
+ fn d() {}
+ const C0: () = ();
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn reorder_impl_trait_items_uneven_ident_lengths() {
+ check_assist(
+ reorder_impl_items,
+ r#"
+trait Bar {
+ type Foo;
+ type Fooo;
+}
+
+struct Foo;
+impl Bar for Foo {
+ type Fooo = ();
+ type Foo = ();$0
+}"#,
+ r#"
+trait Bar {
+ type Foo;
+ type Fooo;
+}
+
+struct Foo;
+impl Bar for Foo {
+ type Foo = ();
+ type Fooo = ();
+}"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
new file mode 100644
index 000000000..bd50208da
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
@@ -0,0 +1,1250 @@
+use hir::{InFile, ModuleDef};
+use ide_db::{
+ helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator,
+ syntax_helpers::insert_whitespace_into_node::insert_ws_into,
+};
+use itertools::Itertools;
+use syntax::{
+ ast::{self, AstNode, HasName},
+ SyntaxKind::WHITESPACE,
+};
+
+use crate::{
+ assist_context::{AssistBuilder, AssistContext, Assists},
+ utils::{
+ add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body,
+ generate_trait_impl_text, render_snippet, Cursor, DefaultMethods,
+ },
+ AssistId, AssistKind,
+};
+
+// Assist: replace_derive_with_manual_impl
+//
+// Converts a `derive` impl into a manual one.
+//
+// ```
+// # //- minicore: derive
+// # trait Debug { fn fmt(&self, f: &mut Formatter) -> Result<()>; }
+// #[derive(Deb$0ug, Display)]
+// struct S;
+// ```
+// ->
+// ```
+// # trait Debug { fn fmt(&self, f: &mut Formatter) -> Result<()>; }
+// #[derive(Display)]
+// struct S;
+//
+// impl Debug for S {
+// $0fn fmt(&self, f: &mut Formatter) -> Result<()> {
+// f.debug_struct("S").finish()
+// }
+// }
+// ```
+pub(crate) fn replace_derive_with_manual_impl(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let attr = ctx.find_node_at_offset_with_descend::<ast::Attr>()?;
+ let path = attr.path()?;
+ let hir_file = ctx.sema.hir_file_for(attr.syntax());
+ if !hir_file.is_derive_attr_pseudo_expansion(ctx.db()) {
+ return None;
+ }
+
+ let InFile { file_id, value } = hir_file.call_node(ctx.db())?;
+ if file_id.is_macro() {
+ // FIXME: make this work in macro files
+ return None;
+ }
+ // collect the derive paths from the #[derive] expansion
+ let current_derives = ctx
+ .sema
+ .parse_or_expand(hir_file)?
+ .descendants()
+ .filter_map(ast::Attr::cast)
+ .filter_map(|attr| attr.path())
+ .collect::<Vec<_>>();
+
+ let adt = value.parent().and_then(ast::Adt::cast)?;
+ let attr = ast::Attr::cast(value)?;
+ let args = attr.token_tree()?;
+
+ let current_module = ctx.sema.scope(adt.syntax())?.module();
+ let current_crate = current_module.krate();
+
+ let found_traits = items_locator::items_with_name(
+ &ctx.sema,
+ current_crate,
+ NameToImport::exact_case_sensitive(path.segments().last()?.to_string()),
+ items_locator::AssocItemSearch::Exclude,
+ Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()),
+ )
+ .filter_map(|item| match item.as_module_def()? {
+ ModuleDef::Trait(trait_) => Some(trait_),
+ _ => None,
+ })
+ .flat_map(|trait_| {
+ current_module
+ .find_use_path(ctx.sema.db, hir::ModuleDef::Trait(trait_))
+ .as_ref()
+ .map(mod_path_to_ast)
+ .zip(Some(trait_))
+ });
+
+ let mut no_traits_found = true;
+ for (replace_trait_path, trait_) in found_traits.inspect(|_| no_traits_found = false) {
+ add_assist(
+ acc,
+ ctx,
+ &attr,
+ &current_derives,
+ &args,
+ &path,
+ &replace_trait_path,
+ Some(trait_),
+ &adt,
+ )?;
+ }
+ if no_traits_found {
+ add_assist(acc, ctx, &attr, &current_derives, &args, &path, &path, None, &adt)?;
+ }
+ Some(())
+}
+
+fn add_assist(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+ attr: &ast::Attr,
+ old_derives: &[ast::Path],
+ old_tree: &ast::TokenTree,
+ old_trait_path: &ast::Path,
+ replace_trait_path: &ast::Path,
+ trait_: Option<hir::Trait>,
+ adt: &ast::Adt,
+) -> Option<()> {
+ let target = attr.syntax().text_range();
+ let annotated_name = adt.name()?;
+ let label = format!("Convert to manual `impl {} for {}`", replace_trait_path, annotated_name);
+
+ acc.add(
+ AssistId("replace_derive_with_manual_impl", AssistKind::Refactor),
+ label,
+ target,
+ |builder| {
+ let insert_pos = adt.syntax().text_range().end();
+ let impl_def_with_items =
+ impl_def_from_trait(&ctx.sema, adt, &annotated_name, trait_, replace_trait_path);
+ update_attribute(builder, old_derives, old_tree, old_trait_path, attr);
+ let trait_path = replace_trait_path.to_string();
+ match (ctx.config.snippet_cap, impl_def_with_items) {
+ (None, _) => {
+ builder.insert(insert_pos, generate_trait_impl_text(adt, &trait_path, ""))
+ }
+ (Some(cap), None) => builder.insert_snippet(
+ cap,
+ insert_pos,
+ generate_trait_impl_text(adt, &trait_path, " $0"),
+ ),
+ (Some(cap), Some((impl_def, first_assoc_item))) => {
+ let mut cursor = Cursor::Before(first_assoc_item.syntax());
+ let placeholder;
+ if let ast::AssocItem::Fn(ref func) = first_assoc_item {
+ if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast)
+ {
+ if m.syntax().text() == "todo!()" {
+ placeholder = m;
+ cursor = Cursor::Replace(placeholder.syntax());
+ }
+ }
+ }
+
+ builder.insert_snippet(
+ cap,
+ insert_pos,
+ format!("\n\n{}", render_snippet(cap, impl_def.syntax(), cursor)),
+ )
+ }
+ };
+ },
+ )
+}
+
+fn impl_def_from_trait(
+ sema: &hir::Semantics<'_, ide_db::RootDatabase>,
+ adt: &ast::Adt,
+ annotated_name: &ast::Name,
+ trait_: Option<hir::Trait>,
+ trait_path: &ast::Path,
+) -> Option<(ast::Impl, ast::AssocItem)> {
+ let trait_ = trait_?;
+ let target_scope = sema.scope(annotated_name.syntax())?;
+ let trait_items = filter_assoc_items(sema, &trait_.items(sema.db), DefaultMethods::No);
+ if trait_items.is_empty() {
+ return None;
+ }
+ let impl_def = {
+ use syntax::ast::Impl;
+ let text = generate_trait_impl_text(adt, trait_path.to_string().as_str(), "");
+ let parse = syntax::SourceFile::parse(&text);
+ let node = match parse.tree().syntax().descendants().find_map(Impl::cast) {
+ Some(it) => it,
+ None => {
+ panic!(
+ "Failed to make ast node `{}` from text {}",
+ std::any::type_name::<Impl>(),
+ text
+ )
+ }
+ };
+ let node = node.clone_subtree();
+ assert_eq!(node.syntax().text_range().start(), 0.into());
+ node
+ };
+
+ let trait_items = trait_items
+ .into_iter()
+ .map(|it| {
+ if sema.hir_file_for(it.syntax()).is_macro() {
+ if let Some(it) = ast::AssocItem::cast(insert_ws_into(it.syntax().clone())) {
+ return it;
+ }
+ }
+ it.clone_for_update()
+ })
+ .collect();
+ let (impl_def, first_assoc_item) =
+ add_trait_assoc_items_to_impl(sema, trait_items, trait_, impl_def, target_scope);
+
+ // Generate a default `impl` function body for the derived trait.
+ if let ast::AssocItem::Fn(ref func) = first_assoc_item {
+ let _ = gen_trait_fn_body(func, trait_path, adt);
+ };
+
+ Some((impl_def, first_assoc_item))
+}
+
+fn update_attribute(
+ builder: &mut AssistBuilder,
+ old_derives: &[ast::Path],
+ old_tree: &ast::TokenTree,
+ old_trait_path: &ast::Path,
+ attr: &ast::Attr,
+) {
+ let new_derives = old_derives
+ .iter()
+ .filter(|t| t.to_string() != old_trait_path.to_string())
+ .collect::<Vec<_>>();
+ let has_more_derives = !new_derives.is_empty();
+
+ if has_more_derives {
+ let new_derives = format!("({})", new_derives.iter().format(", "));
+ builder.replace(old_tree.syntax().text_range(), new_derives);
+ } else {
+ let attr_range = attr.syntax().text_range();
+ builder.delete(attr_range);
+
+ if let Some(line_break_range) = attr
+ .syntax()
+ .next_sibling_or_token()
+ .filter(|t| t.kind() == WHITESPACE)
+ .map(|t| t.text_range())
+ {
+ builder.delete(line_break_range);
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn add_custom_impl_debug_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(Debu$0g)]
+struct Foo {
+ bar: String,
+}
+"#,
+ r#"
+struct Foo {
+ bar: String,
+}
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ f.debug_struct("Foo").field("bar", &self.bar).finish()
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_debug_tuple_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(Debu$0g)]
+struct Foo(String, usize);
+"#,
+ r#"struct Foo(String, usize);
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ f.debug_tuple("Foo").field(&self.0).field(&self.1).finish()
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_debug_empty_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(Debu$0g)]
+struct Foo;
+"#,
+ r#"
+struct Foo;
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ f.debug_struct("Foo").finish()
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_debug_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(Debu$0g)]
+enum Foo {
+ Bar,
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar,
+ Baz,
+}
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ match self {
+ Self::Bar => write!(f, "Bar"),
+ Self::Baz => write!(f, "Baz"),
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_debug_tuple_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(Debu$0g)]
+enum Foo {
+ Bar(usize, usize),
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar(usize, usize),
+ Baz,
+}
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ match self {
+ Self::Bar(arg0, arg1) => f.debug_tuple("Bar").field(arg0).field(arg1).finish(),
+ Self::Baz => write!(f, "Baz"),
+ }
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_debug_record_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(Debu$0g)]
+enum Foo {
+ Bar {
+ baz: usize,
+ qux: usize,
+ },
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar {
+ baz: usize,
+ qux: usize,
+ },
+ Baz,
+}
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ match self {
+ Self::Bar { baz, qux } => f.debug_struct("Bar").field("baz", baz).field("qux", qux).finish(),
+ Self::Baz => write!(f, "Baz"),
+ }
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_default_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: default, derive
+#[derive(Defau$0lt)]
+struct Foo {
+ foo: usize,
+}
+"#,
+ r#"
+struct Foo {
+ foo: usize,
+}
+
+impl Default for Foo {
+ $0fn default() -> Self {
+ Self { foo: Default::default() }
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_default_tuple_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: default, derive
+#[derive(Defau$0lt)]
+struct Foo(usize);
+"#,
+ r#"
+struct Foo(usize);
+
+impl Default for Foo {
+ $0fn default() -> Self {
+ Self(Default::default())
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_default_empty_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: default, derive
+#[derive(Defau$0lt)]
+struct Foo;
+"#,
+ r#"
+struct Foo;
+
+impl Default for Foo {
+ $0fn default() -> Self {
+ Self { }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_hash_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: hash, derive
+#[derive(Has$0h)]
+struct Foo {
+ bin: usize,
+ bar: usize,
+}
+"#,
+ r#"
+struct Foo {
+ bin: usize,
+ bar: usize,
+}
+
+impl core::hash::Hash for Foo {
+ $0fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
+ self.bin.hash(state);
+ self.bar.hash(state);
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_hash_tuple_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: hash, derive
+#[derive(Has$0h)]
+struct Foo(usize, usize);
+"#,
+ r#"
+struct Foo(usize, usize);
+
+impl core::hash::Hash for Foo {
+ $0fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
+ self.0.hash(state);
+ self.1.hash(state);
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_hash_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: hash, derive
+#[derive(Has$0h)]
+enum Foo {
+ Bar,
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar,
+ Baz,
+}
+
+impl core::hash::Hash for Foo {
+ $0fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
+ core::mem::discriminant(self).hash(state);
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+struct Foo {
+ bin: usize,
+ bar: usize,
+}
+"#,
+ r#"
+struct Foo {
+ bin: usize,
+ bar: usize,
+}
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ Self { bin: self.bin.clone(), bar: self.bar.clone() }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_tuple_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+struct Foo(usize, usize);
+"#,
+ r#"
+struct Foo(usize, usize);
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ Self(self.0.clone(), self.1.clone())
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_empty_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+struct Foo;
+"#,
+ r#"
+struct Foo;
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ Self { }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+enum Foo {
+ Bar,
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar,
+ Baz,
+}
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ match self {
+ Self::Bar => Self::Bar,
+ Self::Baz => Self::Baz,
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_tuple_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+enum Foo {
+ Bar(String),
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar(String),
+ Baz,
+}
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ match self {
+ Self::Bar(arg0) => Self::Bar(arg0.clone()),
+ Self::Baz => Self::Baz,
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_record_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+enum Foo {
+ Bar {
+ bin: String,
+ },
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar {
+ bin: String,
+ },
+ Baz,
+}
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ match self {
+ Self::Bar { bin } => Self::Bar { bin: bin.clone() },
+ Self::Baz => Self::Baz,
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_ord_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: ord, derive
+#[derive(Partial$0Ord)]
+struct Foo {
+ bin: usize,
+}
+"#,
+ r#"
+struct Foo {
+ bin: usize,
+}
+
+impl PartialOrd for Foo {
+ $0fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
+ self.bin.partial_cmp(&other.bin)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_ord_record_struct_multi_field() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: ord, derive
+#[derive(Partial$0Ord)]
+struct Foo {
+ bin: usize,
+ bar: usize,
+ baz: usize,
+}
+"#,
+ r#"
+struct Foo {
+ bin: usize,
+ bar: usize,
+ baz: usize,
+}
+
+impl PartialOrd for Foo {
+ $0fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
+ match self.bin.partial_cmp(&other.bin) {
+ Some(core::cmp::Ordering::Equal) => {}
+ ord => return ord,
+ }
+ match self.bar.partial_cmp(&other.bar) {
+ Some(core::cmp::Ordering::Equal) => {}
+ ord => return ord,
+ }
+ self.baz.partial_cmp(&other.baz)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_ord_tuple_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: ord, derive
+#[derive(Partial$0Ord)]
+struct Foo(usize, usize, usize);
+"#,
+ r#"
+struct Foo(usize, usize, usize);
+
+impl PartialOrd for Foo {
+ $0fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
+ match self.0.partial_cmp(&other.0) {
+ Some(core::cmp::Ordering::Equal) => {}
+ ord => return ord,
+ }
+ match self.1.partial_cmp(&other.1) {
+ Some(core::cmp::Ordering::Equal) => {}
+ ord => return ord,
+ }
+ self.2.partial_cmp(&other.2)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_eq_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: eq, derive
+#[derive(Partial$0Eq)]
+struct Foo {
+ bin: usize,
+ bar: usize,
+}
+"#,
+ r#"
+struct Foo {
+ bin: usize,
+ bar: usize,
+}
+
+impl PartialEq for Foo {
+ $0fn eq(&self, other: &Self) -> bool {
+ self.bin == other.bin && self.bar == other.bar
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_eq_tuple_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: eq, derive
+#[derive(Partial$0Eq)]
+struct Foo(usize, usize);
+"#,
+ r#"
+struct Foo(usize, usize);
+
+impl PartialEq for Foo {
+ $0fn eq(&self, other: &Self) -> bool {
+ self.0 == other.0 && self.1 == other.1
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_eq_empty_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: eq, derive
+#[derive(Partial$0Eq)]
+struct Foo;
+"#,
+ r#"
+struct Foo;
+
+impl PartialEq for Foo {
+ $0fn eq(&self, other: &Self) -> bool {
+ true
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_eq_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: eq, derive
+#[derive(Partial$0Eq)]
+enum Foo {
+ Bar,
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar,
+ Baz,
+}
+
+impl PartialEq for Foo {
+ $0fn eq(&self, other: &Self) -> bool {
+ core::mem::discriminant(self) == core::mem::discriminant(other)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_eq_tuple_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: eq, derive
+#[derive(Partial$0Eq)]
+enum Foo {
+ Bar(String),
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar(String),
+ Baz,
+}
+
+impl PartialEq for Foo {
+ $0fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (Self::Bar(l0), Self::Bar(r0)) => l0 == r0,
+ _ => core::mem::discriminant(self) == core::mem::discriminant(other),
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_eq_record_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: eq, derive
+#[derive(Partial$0Eq)]
+enum Foo {
+ Bar {
+ bin: String,
+ },
+ Baz {
+ qux: String,
+ fez: String,
+ },
+ Qux {},
+ Bin,
+}
+"#,
+ r#"
+enum Foo {
+ Bar {
+ bin: String,
+ },
+ Baz {
+ qux: String,
+ fez: String,
+ },
+ Qux {},
+ Bin,
+}
+
+impl PartialEq for Foo {
+ $0fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (Self::Bar { bin: l_bin }, Self::Bar { bin: r_bin }) => l_bin == r_bin,
+ (Self::Baz { qux: l_qux, fez: l_fez }, Self::Baz { qux: r_qux, fez: r_fez }) => l_qux == r_qux && l_fez == r_fez,
+ _ => core::mem::discriminant(self) == core::mem::discriminant(other),
+ }
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_all() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive
+mod foo {
+ pub trait Bar {
+ type Qux;
+ const Baz: usize = 42;
+ const Fez: usize;
+ fn foo();
+ fn bar() {}
+ }
+}
+
+#[derive($0Bar)]
+struct Foo {
+ bar: String,
+}
+"#,
+ r#"
+mod foo {
+ pub trait Bar {
+ type Qux;
+ const Baz: usize = 42;
+ const Fez: usize;
+ fn foo();
+ fn bar() {}
+ }
+}
+
+struct Foo {
+ bar: String,
+}
+
+impl foo::Bar for Foo {
+ $0type Qux;
+
+ const Baz: usize = 42;
+
+ const Fez: usize;
+
+ fn foo() {
+ todo!()
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_for_unique_input_unknown() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive
+#[derive(Debu$0g)]
+struct Foo {
+ bar: String,
+}
+ "#,
+ r#"
+struct Foo {
+ bar: String,
+}
+
+impl Debug for Foo {
+ $0
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_for_with_visibility_modifier() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive
+#[derive(Debug$0)]
+pub struct Foo {
+ bar: String,
+}
+ "#,
+ r#"
+pub struct Foo {
+ bar: String,
+}
+
+impl Debug for Foo {
+ $0
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_when_multiple_inputs() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive
+#[derive(Display, Debug$0, Serialize)]
+struct Foo {}
+ "#,
+ r#"
+#[derive(Display, Serialize)]
+struct Foo {}
+
+impl Debug for Foo {
+ $0
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_default_generic_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: default, derive
+#[derive(Defau$0lt)]
+struct Foo<T, U> {
+ foo: T,
+ bar: U,
+}
+"#,
+ r#"
+struct Foo<T, U> {
+ foo: T,
+ bar: U,
+}
+
+impl<T, U> Default for Foo<T, U> {
+ $0fn default() -> Self {
+ Self { foo: Default::default(), bar: Default::default() }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_generic_tuple_struct_with_bounds() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+struct Foo<T: Clone>(T, usize);
+"#,
+ r#"
+struct Foo<T: Clone>(T, usize);
+
+impl<T: Clone> Clone for Foo<T> {
+ $0fn clone(&self) -> Self {
+ Self(self.0.clone(), self.1.clone())
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_ignore_derive_macro_without_input() {
+ check_assist_not_applicable(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive
+#[derive($0)]
+struct Foo {}
+ "#,
+ )
+ }
+
+ #[test]
+ fn test_ignore_if_cursor_on_param() {
+ check_assist_not_applicable(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive, fmt
+#[derive$0(Debug)]
+struct Foo {}
+ "#,
+ );
+
+ check_assist_not_applicable(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive, fmt
+#[derive(Debug)$0]
+struct Foo {}
+ "#,
+ )
+ }
+
+ #[test]
+ fn test_ignore_if_not_derive() {
+ check_assist_not_applicable(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive
+#[allow(non_camel_$0case_types)]
+struct Foo {}
+ "#,
+ )
+ }
+
+ #[test]
+ fn works_at_start_of_file() {
+ check_assist_not_applicable(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive, fmt
+$0#[derive(Debug)]
+struct S;
+ "#,
+ );
+ }
+
+ #[test]
+ fn add_custom_impl_keep_path() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(std::fmt::Debug, Clo$0ne)]
+pub struct Foo;
+"#,
+ r#"
+#[derive(std::fmt::Debug)]
+pub struct Foo;
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ Self { }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_replace_path() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(core::fmt::Deb$0ug, Clone)]
+pub struct Foo;
+"#,
+ r#"
+#[derive(Clone)]
+pub struct Foo;
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ f.debug_struct("Foo").finish()
+ }
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
new file mode 100644
index 000000000..484c27387
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
@@ -0,0 +1,999 @@
+use std::iter::{self, successors};
+
+use either::Either;
+use ide_db::{
+ defs::NameClass,
+ syntax_helpers::node_ext::{is_pattern_cond, single_let},
+ ty_filter::TryEnum,
+ RootDatabase,
+};
+use syntax::{
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ make, HasName,
+ },
+ AstNode, TextRange,
+};
+
+use crate::{
+ utils::{does_nested_pattern, does_pat_match_variant, unwrap_trivial_block},
+ AssistContext, AssistId, AssistKind, Assists,
+};
+
+// Assist: replace_if_let_with_match
+//
+// Replaces a `if let` expression with a `match` expression.
+//
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// $0if let Action::Move { distance } = action {
+// foo(distance)
+// } else {
+// bar()
+// }
+// }
+// ```
+// ->
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// match action {
+// Action::Move { distance } => foo(distance),
+// _ => bar(),
+// }
+// }
+// ```
+pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let if_expr: ast::IfExpr = ctx.find_node_at_offset()?;
+ let available_range = TextRange::new(
+ if_expr.syntax().text_range().start(),
+ if_expr.then_branch()?.syntax().text_range().start(),
+ );
+ let cursor_in_range = available_range.contains_range(ctx.selection_trimmed());
+ if !cursor_in_range {
+ return None;
+ }
+ let mut else_block = None;
+ let if_exprs = successors(Some(if_expr.clone()), |expr| match expr.else_branch()? {
+ ast::ElseBranch::IfExpr(expr) => Some(expr),
+ ast::ElseBranch::Block(block) => {
+ else_block = Some(block);
+ None
+ }
+ });
+ let scrutinee_to_be_expr = if_expr.condition()?;
+ let scrutinee_to_be_expr = match single_let(scrutinee_to_be_expr.clone()) {
+ Some(cond) => cond.expr()?,
+ None => scrutinee_to_be_expr,
+ };
+
+ let mut pat_seen = false;
+ let mut cond_bodies = Vec::new();
+ for if_expr in if_exprs {
+ let cond = if_expr.condition()?;
+ let cond = match single_let(cond.clone()) {
+ Some(let_) => {
+ let pat = let_.pat()?;
+ let expr = let_.expr()?;
+ // FIXME: If one `let` is wrapped in parentheses and the second is not,
+ // we'll exit here.
+ if scrutinee_to_be_expr.syntax().text() != expr.syntax().text() {
+ // Only if all condition expressions are equal we can merge them into a match
+ return None;
+ }
+ pat_seen = true;
+ Either::Left(pat)
+ }
+ // Multiple `let`, unsupported.
+ None if is_pattern_cond(cond.clone()) => return None,
+ None => Either::Right(cond),
+ };
+ let body = if_expr.then_branch()?;
+ cond_bodies.push((cond, body));
+ }
+
+ if !pat_seen {
+ // Don't offer turning an if (chain) without patterns into a match
+ return None;
+ }
+
+ acc.add(
+ AssistId("replace_if_let_with_match", AssistKind::RefactorRewrite),
+ "Replace if let with match",
+ available_range,
+ move |edit| {
+ let match_expr = {
+ let else_arm = make_else_arm(ctx, else_block, &cond_bodies);
+ let make_match_arm = |(pat, body): (_, ast::BlockExpr)| {
+ let body = body.reset_indent().indent(IndentLevel(1));
+ match pat {
+ Either::Left(pat) => {
+ make::match_arm(iter::once(pat), None, unwrap_trivial_block(body))
+ }
+ Either::Right(expr) => make::match_arm(
+ iter::once(make::wildcard_pat().into()),
+ Some(expr),
+ unwrap_trivial_block(body),
+ ),
+ }
+ };
+ let arms = cond_bodies.into_iter().map(make_match_arm).chain(iter::once(else_arm));
+ let match_expr = make::expr_match(scrutinee_to_be_expr, make::match_arm_list(arms));
+ match_expr.indent(IndentLevel::from_node(if_expr.syntax()))
+ };
+
+ let has_preceding_if_expr =
+ if_expr.syntax().parent().map_or(false, |it| ast::IfExpr::can_cast(it.kind()));
+ let expr = if has_preceding_if_expr {
+ // make sure we replace the `else if let ...` with a block so we don't end up with `else expr`
+ make::block_expr(None, Some(match_expr)).into()
+ } else {
+ match_expr
+ };
+ edit.replace_ast::<ast::Expr>(if_expr.into(), expr);
+ },
+ )
+}
+
+fn make_else_arm(
+ ctx: &AssistContext<'_>,
+ else_block: Option<ast::BlockExpr>,
+ conditionals: &[(Either<ast::Pat, ast::Expr>, ast::BlockExpr)],
+) -> ast::MatchArm {
+ if let Some(else_block) = else_block {
+ let pattern = if let [(Either::Left(pat), _)] = conditionals {
+ ctx.sema
+ .type_of_pat(pat)
+ .and_then(|ty| TryEnum::from_ty(&ctx.sema, &ty.adjusted()))
+ .zip(Some(pat))
+ } else {
+ None
+ };
+ let pattern = match pattern {
+ Some((it, pat)) => {
+ if does_pat_match_variant(pat, &it.sad_pattern()) {
+ it.happy_pattern_wildcard()
+ } else if does_nested_pattern(pat) {
+ make::wildcard_pat().into()
+ } else {
+ it.sad_pattern()
+ }
+ }
+ None => make::wildcard_pat().into(),
+ };
+ make::match_arm(iter::once(pattern), None, unwrap_trivial_block(else_block))
+ } else {
+ make::match_arm(iter::once(make::wildcard_pat().into()), None, make::expr_unit())
+ }
+}
+
+// Assist: replace_match_with_if_let
+//
+// Replaces a binary `match` with a wildcard pattern and no guards with an `if let` expression.
+//
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// $0match action {
+// Action::Move { distance } => foo(distance),
+// _ => bar(),
+// }
+// }
+// ```
+// ->
+// ```
+// enum Action { Move { distance: u32 }, Stop }
+//
+// fn handle(action: Action) {
+// if let Action::Move { distance } = action {
+// foo(distance)
+// } else {
+// bar()
+// }
+// }
+// ```
+pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let match_expr: ast::MatchExpr = ctx.find_node_at_offset()?;
+
+ let mut arms = match_expr.match_arm_list()?.arms();
+ let (first_arm, second_arm) = (arms.next()?, arms.next()?);
+ if arms.next().is_some() || first_arm.guard().is_some() || second_arm.guard().is_some() {
+ return None;
+ }
+
+ let (if_let_pat, then_expr, else_expr) = pick_pattern_and_expr_order(
+ &ctx.sema,
+ first_arm.pat()?,
+ second_arm.pat()?,
+ first_arm.expr()?,
+ second_arm.expr()?,
+ )?;
+ let scrutinee = match_expr.expr()?;
+
+ let target = match_expr.syntax().text_range();
+ acc.add(
+ AssistId("replace_match_with_if_let", AssistKind::RefactorRewrite),
+ "Replace match with if let",
+ target,
+ move |edit| {
+ fn make_block_expr(expr: ast::Expr) -> ast::BlockExpr {
+ // Blocks with modifiers (unsafe, async, etc.) are parsed as BlockExpr, but are
+ // formatted without enclosing braces. If we encounter such block exprs,
+ // wrap them in another BlockExpr.
+ match expr {
+ ast::Expr::BlockExpr(block) if block.modifier().is_none() => block,
+ expr => make::block_expr(iter::empty(), Some(expr)),
+ }
+ }
+
+ let condition = make::expr_let(if_let_pat, scrutinee);
+ let then_block = make_block_expr(then_expr.reset_indent());
+ let else_expr = if is_empty_expr(&else_expr) { None } else { Some(else_expr) };
+ let if_let_expr = make::expr_if(
+ condition.into(),
+ then_block,
+ else_expr.map(make_block_expr).map(ast::ElseBranch::Block),
+ )
+ .indent(IndentLevel::from_node(match_expr.syntax()));
+
+ edit.replace_ast::<ast::Expr>(match_expr.into(), if_let_expr);
+ },
+ )
+}
+
+/// Pick the pattern for the if let condition and return the expressions for the `then` body and `else` body in that order.
+fn pick_pattern_and_expr_order(
+ sema: &hir::Semantics<'_, RootDatabase>,
+ pat: ast::Pat,
+ pat2: ast::Pat,
+ expr: ast::Expr,
+ expr2: ast::Expr,
+) -> Option<(ast::Pat, ast::Expr, ast::Expr)> {
+ let res = match (pat, pat2) {
+ (ast::Pat::WildcardPat(_), _) => return None,
+ (pat, ast::Pat::WildcardPat(_)) => (pat, expr, expr2),
+ (pat, _) if is_empty_expr(&expr2) => (pat, expr, expr2),
+ (_, pat) if is_empty_expr(&expr) => (pat, expr2, expr),
+ (pat, pat2) => match (binds_name(sema, &pat), binds_name(sema, &pat2)) {
+ (true, true) => return None,
+ (true, false) => (pat, expr, expr2),
+ (false, true) => (pat2, expr2, expr),
+ _ if is_sad_pat(sema, &pat) => (pat2, expr2, expr),
+ (false, false) => (pat, expr, expr2),
+ },
+ };
+ Some(res)
+}
+
+fn is_empty_expr(expr: &ast::Expr) -> bool {
+ match expr {
+ ast::Expr::BlockExpr(expr) => match expr.stmt_list() {
+ Some(it) => it.statements().next().is_none() && it.tail_expr().is_none(),
+ None => true,
+ },
+ ast::Expr::TupleExpr(expr) => expr.fields().next().is_none(),
+ _ => false,
+ }
+}
+
+fn binds_name(sema: &hir::Semantics<'_, RootDatabase>, pat: &ast::Pat) -> bool {
+ let binds_name_v = |pat| binds_name(sema, &pat);
+ match pat {
+ ast::Pat::IdentPat(pat) => !matches!(
+ pat.name().and_then(|name| NameClass::classify(sema, &name)),
+ Some(NameClass::ConstReference(_))
+ ),
+ ast::Pat::MacroPat(_) => true,
+ ast::Pat::OrPat(pat) => pat.pats().any(binds_name_v),
+ ast::Pat::SlicePat(pat) => pat.pats().any(binds_name_v),
+ ast::Pat::TuplePat(it) => it.fields().any(binds_name_v),
+ ast::Pat::TupleStructPat(it) => it.fields().any(binds_name_v),
+ ast::Pat::RecordPat(it) => it
+ .record_pat_field_list()
+ .map_or(false, |rpfl| rpfl.fields().flat_map(|rpf| rpf.pat()).any(binds_name_v)),
+ ast::Pat::RefPat(pat) => pat.pat().map_or(false, binds_name_v),
+ ast::Pat::BoxPat(pat) => pat.pat().map_or(false, binds_name_v),
+ ast::Pat::ParenPat(pat) => pat.pat().map_or(false, binds_name_v),
+ _ => false,
+ }
+}
+
+fn is_sad_pat(sema: &hir::Semantics<'_, RootDatabase>, pat: &ast::Pat) -> bool {
+ sema.type_of_pat(pat)
+ .and_then(|ty| TryEnum::from_ty(sema, &ty.adjusted()))
+ .map_or(false, |it| does_pat_match_variant(pat, &it.sad_pattern()))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ #[test]
+ fn test_if_let_with_match_unapplicable_for_simple_ifs() {
+ check_assist_not_applicable(
+ replace_if_let_with_match,
+ r#"
+fn main() {
+ if $0true {} else if false {} else {}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_if_let_with_match_no_else() {
+ check_assist(
+ replace_if_let_with_match,
+ r#"
+impl VariantData {
+ pub fn foo(&self) {
+ if $0let VariantData::Struct(..) = *self {
+ self.foo();
+ }
+ }
+}
+"#,
+ r#"
+impl VariantData {
+ pub fn foo(&self) {
+ match *self {
+ VariantData::Struct(..) => {
+ self.foo();
+ }
+ _ => (),
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_if_let_with_match_available_range_left() {
+ check_assist_not_applicable(
+ replace_if_let_with_match,
+ r#"
+impl VariantData {
+ pub fn foo(&self) {
+ $0 if let VariantData::Struct(..) = *self {
+ self.foo();
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_if_let_with_match_available_range_right() {
+ check_assist_not_applicable(
+ replace_if_let_with_match,
+ r#"
+impl VariantData {
+ pub fn foo(&self) {
+ if let VariantData::Struct(..) = *self {$0
+ self.foo();
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_if_let_with_match_let_chain() {
+ check_assist_not_applicable(
+ replace_if_let_with_match,
+ r#"
+fn main() {
+ if $0let true = true && let Some(1) = None {}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_if_let_with_match_basic() {
+ check_assist(
+ replace_if_let_with_match,
+ r#"
+impl VariantData {
+ pub fn is_struct(&self) -> bool {
+ if $0let VariantData::Struct(..) = *self {
+ true
+ } else if let VariantData::Tuple(..) = *self {
+ false
+ } else if cond() {
+ true
+ } else {
+ bar(
+ 123
+ )
+ }
+ }
+}
+"#,
+ r#"
+impl VariantData {
+ pub fn is_struct(&self) -> bool {
+ match *self {
+ VariantData::Struct(..) => true,
+ VariantData::Tuple(..) => false,
+ _ if cond() => true,
+ _ => {
+ bar(
+ 123
+ )
+ }
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_if_let_with_match_on_tail_if_let() {
+ check_assist(
+ replace_if_let_with_match,
+ r#"
+impl VariantData {
+ pub fn is_struct(&self) -> bool {
+ if let VariantData::Struct(..) = *self {
+ true
+ } else if let$0 VariantData::Tuple(..) = *self {
+ false
+ } else {
+ false
+ }
+ }
+}
+"#,
+ r#"
+impl VariantData {
+ pub fn is_struct(&self) -> bool {
+ if let VariantData::Struct(..) = *self {
+ true
+ } else {
+ match *self {
+ VariantData::Tuple(..) => false,
+ _ => false,
+ }
+}
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn special_case_option() {
+ check_assist(
+ replace_if_let_with_match,
+ r#"
+//- minicore: option
+fn foo(x: Option<i32>) {
+ $0if let Some(x) = x {
+ println!("{}", x)
+ } else {
+ println!("none")
+ }
+}
+"#,
+ r#"
+fn foo(x: Option<i32>) {
+ match x {
+ Some(x) => println!("{}", x),
+ None => println!("none"),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn special_case_inverted_option() {
+ check_assist(
+ replace_if_let_with_match,
+ r#"
+//- minicore: option
+fn foo(x: Option<i32>) {
+ $0if let None = x {
+ println!("none")
+ } else {
+ println!("some")
+ }
+}
+"#,
+ r#"
+fn foo(x: Option<i32>) {
+ match x {
+ None => println!("none"),
+ Some(_) => println!("some"),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn special_case_result() {
+ check_assist(
+ replace_if_let_with_match,
+ r#"
+//- minicore: result
+fn foo(x: Result<i32, ()>) {
+ $0if let Ok(x) = x {
+ println!("{}", x)
+ } else {
+ println!("none")
+ }
+}
+"#,
+ r#"
+fn foo(x: Result<i32, ()>) {
+ match x {
+ Ok(x) => println!("{}", x),
+ Err(_) => println!("none"),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn special_case_inverted_result() {
+ check_assist(
+ replace_if_let_with_match,
+ r#"
+//- minicore: result
+fn foo(x: Result<i32, ()>) {
+ $0if let Err(x) = x {
+ println!("{}", x)
+ } else {
+ println!("ok")
+ }
+}
+"#,
+ r#"
+fn foo(x: Result<i32, ()>) {
+ match x {
+ Err(x) => println!("{}", x),
+ Ok(_) => println!("ok"),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn nested_indent() {
+ check_assist(
+ replace_if_let_with_match,
+ r#"
+fn main() {
+ if true {
+ $0if let Ok(rel_path) = path.strip_prefix(root_path) {
+ let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
+ Some((*id, rel_path))
+ } else {
+ None
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ if true {
+ match path.strip_prefix(root_path) {
+ Ok(rel_path) => {
+ let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
+ Some((*id, rel_path))
+ }
+ _ => None,
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn nested_type() {
+ check_assist(
+ replace_if_let_with_match,
+ r#"
+//- minicore: result
+fn foo(x: Result<i32, ()>) {
+ let bar: Result<_, ()> = Ok(Some(1));
+ $0if let Ok(Some(_)) = bar {
+ ()
+ } else {
+ ()
+ }
+}
+"#,
+ r#"
+fn foo(x: Result<i32, ()>) {
+ let bar: Result<_, ()> = Ok(Some(1));
+ match bar {
+ Ok(Some(_)) => (),
+ _ => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_replace_match_with_if_let_unwraps_simple_expressions() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+impl VariantData {
+ pub fn is_struct(&self) -> bool {
+ $0match *self {
+ VariantData::Struct(..) => true,
+ _ => false,
+ }
+ }
+} "#,
+ r#"
+impl VariantData {
+ pub fn is_struct(&self) -> bool {
+ if let VariantData::Struct(..) = *self {
+ true
+ } else {
+ false
+ }
+ }
+} "#,
+ )
+ }
+
+ #[test]
+ fn test_replace_match_with_if_let_doesnt_unwrap_multiline_expressions() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn foo() {
+ $0match a {
+ VariantData::Struct(..) => {
+ bar(
+ 123
+ )
+ }
+ _ => false,
+ }
+} "#,
+ r#"
+fn foo() {
+ if let VariantData::Struct(..) = a {
+ bar(
+ 123
+ )
+ } else {
+ false
+ }
+} "#,
+ )
+ }
+
+ #[test]
+ fn replace_match_with_if_let_target() {
+ check_assist_target(
+ replace_match_with_if_let,
+ r#"
+impl VariantData {
+ pub fn is_struct(&self) -> bool {
+ $0match *self {
+ VariantData::Struct(..) => true,
+ _ => false,
+ }
+ }
+} "#,
+ r#"match *self {
+ VariantData::Struct(..) => true,
+ _ => false,
+ }"#,
+ );
+ }
+
+ #[test]
+ fn special_case_option_match_to_if_let() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+//- minicore: option
+fn foo(x: Option<i32>) {
+ $0match x {
+ Some(x) => println!("{}", x),
+ None => println!("none"),
+ }
+}
+"#,
+ r#"
+fn foo(x: Option<i32>) {
+ if let Some(x) = x {
+ println!("{}", x)
+ } else {
+ println!("none")
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn special_case_result_match_to_if_let() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+//- minicore: result
+fn foo(x: Result<i32, ()>) {
+ $0match x {
+ Ok(x) => println!("{}", x),
+ Err(_) => println!("none"),
+ }
+}
+"#,
+ r#"
+fn foo(x: Result<i32, ()>) {
+ if let Ok(x) = x {
+ println!("{}", x)
+ } else {
+ println!("none")
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn nested_indent_match_to_if_let() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn main() {
+ if true {
+ $0match path.strip_prefix(root_path) {
+ Ok(rel_path) => {
+ let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
+ Some((*id, rel_path))
+ }
+ _ => None,
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ if true {
+ if let Ok(rel_path) = path.strip_prefix(root_path) {
+ let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
+ Some((*id, rel_path))
+ } else {
+ None
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_match_with_if_let_empty_wildcard_expr() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn main() {
+ $0match path.strip_prefix(root_path) {
+ Ok(rel_path) => println!("{}", rel_path),
+ _ => (),
+ }
+}
+"#,
+ r#"
+fn main() {
+ if let Ok(rel_path) = path.strip_prefix(root_path) {
+ println!("{}", rel_path)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_match_with_if_let_number_body() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn main() {
+ $0match Ok(()) {
+ Ok(()) => {},
+ Err(_) => 0,
+ }
+}
+"#,
+ r#"
+fn main() {
+ if let Err(_) = Ok(()) {
+ 0
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_match_with_if_let_exhaustive() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn print_source(def_source: ModuleSource) {
+ match def_so$0urce {
+ ModuleSource::SourceFile(..) => { println!("source file"); }
+ ModuleSource::Module(..) => { println!("module"); }
+ }
+}
+"#,
+ r#"
+fn print_source(def_source: ModuleSource) {
+ if let ModuleSource::SourceFile(..) = def_source { println!("source file"); } else { println!("module"); }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_match_with_if_let_prefer_name_bind() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn foo() {
+ match $0Foo(0) {
+ Foo(_) => (),
+ Bar(bar) => println!("bar {}", bar),
+ }
+}
+"#,
+ r#"
+fn foo() {
+ if let Bar(bar) = Foo(0) {
+ println!("bar {}", bar)
+ }
+}
+"#,
+ );
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn foo() {
+ match $0Foo(0) {
+ Bar(bar) => println!("bar {}", bar),
+ Foo(_) => (),
+ }
+}
+"#,
+ r#"
+fn foo() {
+ if let Bar(bar) = Foo(0) {
+ println!("bar {}", bar)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_match_with_if_let_prefer_nonempty_body() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn foo() {
+ match $0Ok(0) {
+ Ok(value) => {},
+ Err(err) => eprintln!("{}", err),
+ }
+}
+"#,
+ r#"
+fn foo() {
+ if let Err(err) = Ok(0) {
+ eprintln!("{}", err)
+ }
+}
+"#,
+ );
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn foo() {
+ match $0Ok(0) {
+ Err(err) => eprintln!("{}", err),
+ Ok(value) => {},
+ }
+}
+"#,
+ r#"
+fn foo() {
+ if let Err(err) = Ok(0) {
+ eprintln!("{}", err)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_match_with_if_let_rejects_double_name_bindings() {
+ check_assist_not_applicable(
+ replace_match_with_if_let,
+ r#"
+fn foo() {
+ match $0Foo(0) {
+ Foo(foo) => println!("bar {}", foo),
+ Bar(bar) => println!("bar {}", bar),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_replace_match_with_if_let_keeps_unsafe_block() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+impl VariantData {
+ pub fn is_struct(&self) -> bool {
+ $0match *self {
+ VariantData::Struct(..) => true,
+ _ => unsafe { unreachable_unchecked() },
+ }
+ }
+} "#,
+ r#"
+impl VariantData {
+ pub fn is_struct(&self) -> bool {
+ if let VariantData::Struct(..) = *self {
+ true
+ } else {
+ unsafe { unreachable_unchecked() }
+ }
+ }
+} "#,
+ )
+ }
+
+ #[test]
+ fn test_replace_match_with_if_let_forces_else() {
+ check_assist(
+ replace_match_with_if_let,
+ r#"
+fn main() {
+ match$0 0 {
+ 0 => (),
+ _ => code(),
+ }
+}
+"#,
+ r#"
+fn main() {
+ if let 0 = 0 {
+ ()
+ } else {
+ code()
+ }
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs
new file mode 100644
index 000000000..c2be4593b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs
@@ -0,0 +1,100 @@
+use std::iter::once;
+
+use ide_db::ty_filter::TryEnum;
+use syntax::{
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ make,
+ },
+ AstNode, T,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: replace_let_with_if_let
+//
+// Replaces `let` with an `if let`.
+//
+// ```
+// # enum Option<T> { Some(T), None }
+//
+// fn main(action: Action) {
+// $0let x = compute();
+// }
+//
+// fn compute() -> Option<i32> { None }
+// ```
+// ->
+// ```
+// # enum Option<T> { Some(T), None }
+//
+// fn main(action: Action) {
+// if let Some(x) = compute() {
+// }
+// }
+//
+// fn compute() -> Option<i32> { None }
+// ```
+pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let let_kw = ctx.find_token_syntax_at_offset(T![let])?;
+ let let_stmt = let_kw.parent().and_then(ast::LetStmt::cast)?;
+ let init = let_stmt.initializer()?;
+ let original_pat = let_stmt.pat()?;
+
+ let target = let_kw.text_range();
+ acc.add(
+ AssistId("replace_let_with_if_let", AssistKind::RefactorRewrite),
+ "Replace let with if let",
+ target,
+ |edit| {
+ let ty = ctx.sema.type_of_expr(&init);
+ let happy_variant = ty
+ .and_then(|ty| TryEnum::from_ty(&ctx.sema, &ty.adjusted()))
+ .map(|it| it.happy_case());
+ let pat = match happy_variant {
+ None => original_pat,
+ Some(var_name) => {
+ make::tuple_struct_pat(make::ext::ident_path(var_name), once(original_pat))
+ .into()
+ }
+ };
+
+ let block =
+ make::ext::empty_block_expr().indent(IndentLevel::from_node(let_stmt.syntax()));
+ let if_ = make::expr_if(make::expr_let(pat, init).into(), block, None);
+ let stmt = make::expr_stmt(if_);
+
+ edit.replace_ast(ast::Stmt::from(let_stmt), ast::Stmt::from(stmt));
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_assist;
+
+ use super::*;
+
+ #[test]
+ fn replace_let_unknown_enum() {
+ check_assist(
+ replace_let_with_if_let,
+ r"
+enum E<T> { X(T), Y(T) }
+
+fn main() {
+ $0let x = E::X(92);
+}
+ ",
+ r"
+enum E<T> { X(T), Y(T) }
+
+fn main() {
+ if let x = E::X(92) {
+ }
+}
+ ",
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
new file mode 100644
index 000000000..2419fa11c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
@@ -0,0 +1,438 @@
+use hir::AsAssocItem;
+use ide_db::{
+ helpers::mod_path_to_ast,
+ imports::insert_use::{insert_use, ImportScope},
+};
+use syntax::{
+ ast::{self, make},
+ match_ast, ted, AstNode, SyntaxNode,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: replace_qualified_name_with_use
+//
+// Adds a use statement for a given fully-qualified name.
+//
+// ```
+// # mod std { pub mod collections { pub struct HashMap<T, U>(T, U); } }
+// fn process(map: std::collections::$0HashMap<String, String>) {}
+// ```
+// ->
+// ```
+// use std::collections::HashMap;
+//
+// # mod std { pub mod collections { pub struct HashMap<T, U>(T, U); } }
+// fn process(map: HashMap<String, String>) {}
+// ```
+pub(crate) fn replace_qualified_name_with_use(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let path: ast::Path = ctx.find_node_at_offset()?;
+ // We don't want to mess with use statements
+ if path.syntax().ancestors().find_map(ast::UseTree::cast).is_some() {
+ cov_mark::hit!(not_applicable_in_use);
+ return None;
+ }
+
+ if path.qualifier().is_none() {
+ cov_mark::hit!(dont_import_trivial_paths);
+ return None;
+ }
+
+ // only offer replacement for non assoc items
+ match ctx.sema.resolve_path(&path)? {
+ hir::PathResolution::Def(def) if def.as_assoc_item(ctx.sema.db).is_none() => (),
+ _ => return None,
+ }
+ // then search for an import for the first path segment of what we want to replace
+ // that way it is less likely that we import the item from a different location due re-exports
+ let module = match ctx.sema.resolve_path(&path.first_qualifier_or_self())? {
+ hir::PathResolution::Def(module @ hir::ModuleDef::Module(_)) => module,
+ _ => return None,
+ };
+
+ let starts_with_name_ref = !matches!(
+ path.first_segment().and_then(|it| it.kind()),
+ Some(
+ ast::PathSegmentKind::CrateKw
+ | ast::PathSegmentKind::SuperKw
+ | ast::PathSegmentKind::SelfKw
+ )
+ );
+ let path_to_qualifier = starts_with_name_ref
+ .then(|| {
+ ctx.sema.scope(path.syntax())?.module().find_use_path_prefixed(
+ ctx.sema.db,
+ module,
+ ctx.config.insert_use.prefix_kind,
+ )
+ })
+ .flatten();
+
+ let scope = ImportScope::find_insert_use_container(path.syntax(), &ctx.sema)?;
+ let target = path.syntax().text_range();
+ acc.add(
+ AssistId("replace_qualified_name_with_use", AssistKind::RefactorRewrite),
+ "Replace qualified path with use",
+ target,
+ |builder| {
+ // Now that we've brought the name into scope, re-qualify all paths that could be
+ // affected (that is, all paths inside the node we added the `use` to).
+ let scope = match scope {
+ ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
+ ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
+ ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
+ };
+ shorten_paths(scope.as_syntax_node(), &path);
+ let path = drop_generic_args(&path);
+ // stick the found import in front of the to be replaced path
+ let path = match path_to_qualifier.and_then(|it| mod_path_to_ast(&it).qualifier()) {
+ Some(qualifier) => make::path_concat(qualifier, path),
+ None => path,
+ };
+ insert_use(&scope, path, &ctx.config.insert_use);
+ },
+ )
+}
+
+fn drop_generic_args(path: &ast::Path) -> ast::Path {
+ let path = path.clone_for_update();
+ if let Some(segment) = path.segment() {
+ if let Some(generic_args) = segment.generic_arg_list() {
+ ted::remove(generic_args.syntax());
+ }
+ }
+ path
+}
+
+/// Mutates `node` to shorten `path` in all descendants of `node`.
+fn shorten_paths(node: &SyntaxNode, path: &ast::Path) {
+ for child in node.children() {
+ match_ast! {
+ match child {
+ // Don't modify `use` items, as this can break the `use` item when injecting a new
+ // import into the use tree.
+ ast::Use(_) => continue,
+ // Don't descend into submodules, they don't have the same `use` items in scope.
+ // FIXME: This isn't true due to `super::*` imports?
+ ast::Module(_) => continue,
+ ast::Path(p) => if maybe_replace_path(p.clone(), path.clone()).is_none() {
+ shorten_paths(p.syntax(), path);
+ },
+ _ => shorten_paths(&child, path),
+ }
+ }
+ }
+}
+
+fn maybe_replace_path(path: ast::Path, target: ast::Path) -> Option<()> {
+ if !path_eq_no_generics(path.clone(), target) {
+ return None;
+ }
+
+ // Shorten `path`, leaving only its last segment.
+ if let Some(parent) = path.qualifier() {
+ ted::remove(parent.syntax());
+ }
+ if let Some(double_colon) = path.coloncolon_token() {
+ ted::remove(&double_colon);
+ }
+
+ Some(())
+}
+
+fn path_eq_no_generics(lhs: ast::Path, rhs: ast::Path) -> bool {
+ let mut lhs_curr = lhs;
+ let mut rhs_curr = rhs;
+ loop {
+ match lhs_curr.segment().zip(rhs_curr.segment()) {
+ Some((lhs, rhs))
+ if lhs.coloncolon_token().is_some() == rhs.coloncolon_token().is_some()
+ && lhs
+ .name_ref()
+ .zip(rhs.name_ref())
+ .map_or(false, |(lhs, rhs)| lhs.text() == rhs.text()) => {}
+ _ => return false,
+ }
+
+ match (lhs_curr.qualifier(), rhs_curr.qualifier()) {
+ (Some(lhs), Some(rhs)) => {
+ lhs_curr = lhs;
+ rhs_curr = rhs;
+ }
+ (None, None) => return true,
+ _ => return false,
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_replace_already_imported() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+mod std { pub mod fs { pub struct Path; } }
+use std::fs;
+
+fn main() {
+ std::f$0s::Path
+}",
+ r"
+mod std { pub mod fs { pub struct Path; } }
+use std::fs;
+
+fn main() {
+ fs::Path
+}",
+ )
+ }
+
+ #[test]
+ fn test_replace_add_use_no_anchor() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+mod std { pub mod fs { pub struct Path; } }
+std::fs::Path$0
+ ",
+ r"
+use std::fs::Path;
+
+mod std { pub mod fs { pub struct Path; } }
+Path
+ ",
+ );
+ }
+
+ #[test]
+ fn test_replace_add_use_no_anchor_middle_segment() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+mod std { pub mod fs { pub struct Path; } }
+std::fs$0::Path
+ ",
+ r"
+use std::fs;
+
+mod std { pub mod fs { pub struct Path; } }
+fs::Path
+ ",
+ );
+ }
+
+ #[test]
+ fn dont_import_trivial_paths() {
+ cov_mark::check!(dont_import_trivial_paths);
+ check_assist_not_applicable(replace_qualified_name_with_use, r"impl foo$0 for () {}");
+ }
+
+ #[test]
+ fn test_replace_not_applicable_in_use() {
+ cov_mark::check!(not_applicable_in_use);
+ check_assist_not_applicable(replace_qualified_name_with_use, r"use std::fmt$0;");
+ }
+
+ #[test]
+ fn replaces_all_affected_paths() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+mod std { pub mod fmt { pub trait Debug {} } }
+fn main() {
+ std::fmt::Debug$0;
+ let x: std::fmt::Debug = std::fmt::Debug;
+}
+ ",
+ r"
+use std::fmt::Debug;
+
+mod std { pub mod fmt { pub trait Debug {} } }
+fn main() {
+ Debug;
+ let x: Debug = Debug;
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn does_not_replace_in_submodules() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+mod std { pub mod fmt { pub trait Debug {} } }
+fn main() {
+ std::fmt::Debug$0;
+}
+
+mod sub {
+ fn f() {
+ std::fmt::Debug;
+ }
+}
+ ",
+ r"
+use std::fmt::Debug;
+
+mod std { pub mod fmt { pub trait Debug {} } }
+fn main() {
+ Debug;
+}
+
+mod sub {
+ fn f() {
+ std::fmt::Debug;
+ }
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn does_not_replace_in_use() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+mod std { pub mod fmt { pub trait Display {} } }
+use std::fmt::Display;
+
+fn main() {
+ std::fmt$0;
+}
+ ",
+ r"
+mod std { pub mod fmt { pub trait Display {} } }
+use std::fmt::{Display, self};
+
+fn main() {
+ fmt;
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn does_not_replace_assoc_item_path() {
+ check_assist_not_applicable(
+ replace_qualified_name_with_use,
+ r"
+pub struct Foo;
+impl Foo {
+ pub fn foo() {}
+}
+
+fn main() {
+ Foo::foo$0();
+}
+",
+ );
+ }
+
+ #[test]
+ fn replace_reuses_path_qualifier() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+pub mod foo {
+ pub struct Foo;
+}
+
+mod bar {
+ pub use super::foo::Foo as Bar;
+}
+
+fn main() {
+ foo::Foo$0;
+}
+",
+ r"
+use foo::Foo;
+
+pub mod foo {
+ pub struct Foo;
+}
+
+mod bar {
+ pub use super::foo::Foo as Bar;
+}
+
+fn main() {
+ Foo;
+}
+",
+ );
+ }
+
+ #[test]
+ fn replace_does_not_always_try_to_replace_by_full_item_path() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+use std::mem;
+
+mod std {
+ pub mod mem {
+ pub fn drop<T>(_: T) {}
+ }
+}
+
+fn main() {
+ mem::drop$0(0);
+}
+",
+ r"
+use std::mem::{self, drop};
+
+mod std {
+ pub mod mem {
+ pub fn drop<T>(_: T) {}
+ }
+}
+
+fn main() {
+ drop(0);
+}
+",
+ );
+ }
+
+ #[test]
+ fn replace_should_drop_generic_args_in_use() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+mod std {
+ pub mod mem {
+ pub fn drop<T>(_: T) {}
+ }
+}
+
+fn main() {
+ std::mem::drop::<usize>$0(0);
+}
+",
+ r"
+use std::mem::drop;
+
+mod std {
+ pub mod mem {
+ pub fn drop<T>(_: T) {}
+ }
+}
+
+fn main() {
+ drop::<usize>(0);
+}
+",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_string_with_char.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_string_with_char.rs
new file mode 100644
index 000000000..decb5fb62
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_string_with_char.rs
@@ -0,0 +1,307 @@
+use syntax::{
+ ast,
+ ast::IsString,
+ AstToken,
+ SyntaxKind::{CHAR, STRING},
+ TextRange, TextSize,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: replace_string_with_char
+//
+// Replace string literal with char literal.
+//
+// ```
+// fn main() {
+// find("{$0");
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// find('{');
+// }
+// ```
+pub(crate) fn replace_string_with_char(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let token = ctx.find_token_syntax_at_offset(STRING).and_then(ast::String::cast)?;
+ let value = token.value()?;
+ let target = token.syntax().text_range();
+
+ if value.chars().take(2).count() != 1 {
+ return None;
+ }
+ let quote_offets = token.quote_offsets()?;
+
+ acc.add(
+ AssistId("replace_string_with_char", AssistKind::RefactorRewrite),
+ "Replace string with char",
+ target,
+ |edit| {
+ let (left, right) = quote_offets.quotes;
+ edit.replace(left, '\'');
+ edit.replace(right, '\'');
+ if value == "'" {
+ edit.insert(left.end(), '\\');
+ }
+ },
+ )
+}
+
+// Assist: replace_char_with_string
+//
+// Replace a char literal with a string literal.
+//
+// ```
+// fn main() {
+// find('{$0');
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// find("{");
+// }
+// ```
+pub(crate) fn replace_char_with_string(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let token = ctx.find_token_syntax_at_offset(CHAR)?;
+ let target = token.text_range();
+
+ acc.add(
+ AssistId("replace_char_with_string", AssistKind::RefactorRewrite),
+ "Replace char with string",
+ target,
+ |edit| {
+ if token.text() == "'\"'" {
+ edit.replace(token.text_range(), r#""\"""#);
+ } else {
+ let len = TextSize::of('\'');
+ edit.replace(TextRange::at(target.start(), len), '"');
+ edit.replace(TextRange::at(target.end() - len, len), '"');
+ }
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn replace_string_with_char_assist() {
+ check_assist(
+ replace_string_with_char,
+ r#"
+fn f() {
+ let s = "$0c";
+}
+"#,
+ r##"
+fn f() {
+ let s = 'c';
+}
+"##,
+ )
+ }
+
+ #[test]
+ fn replace_string_with_char_assist_with_multi_byte_char() {
+ check_assist(
+ replace_string_with_char,
+ r#"
+fn f() {
+ let s = "$0😀";
+}
+"#,
+ r##"
+fn f() {
+ let s = '😀';
+}
+"##,
+ )
+ }
+
+ #[test]
+ fn replace_string_with_char_multiple_chars() {
+ check_assist_not_applicable(
+ replace_string_with_char,
+ r#"
+fn f() {
+ let s = "$0test";
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_string_with_char_works_inside_macros() {
+ check_assist(
+ replace_string_with_char,
+ r#"
+fn f() {
+ format!($0"x", 92)
+}
+"#,
+ r##"
+fn f() {
+ format!('x', 92)
+}
+"##,
+ )
+ }
+
+ #[test]
+ fn replace_string_with_char_newline() {
+ check_assist(
+ replace_string_with_char,
+ r#"
+fn f() {
+ find($0"\n");
+}
+"#,
+ r##"
+fn f() {
+ find('\n');
+}
+"##,
+ )
+ }
+
+ #[test]
+ fn replace_string_with_char_unicode_escape() {
+ check_assist(
+ replace_string_with_char,
+ r#"
+fn f() {
+ find($0"\u{7FFF}");
+}
+"#,
+ r##"
+fn f() {
+ find('\u{7FFF}');
+}
+"##,
+ )
+ }
+
+ #[test]
+ fn replace_raw_string_with_char() {
+ check_assist(
+ replace_string_with_char,
+ r##"
+fn f() {
+ $0r#"X"#
+}
+"##,
+ r##"
+fn f() {
+ 'X'
+}
+"##,
+ )
+ }
+
+ #[test]
+ fn replace_char_with_string_assist() {
+ check_assist(
+ replace_char_with_string,
+ r"
+fn f() {
+ let s = '$0c';
+}
+",
+ r#"
+fn f() {
+ let s = "c";
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_char_with_string_assist_with_multi_byte_char() {
+ check_assist(
+ replace_char_with_string,
+ r"
+fn f() {
+ let s = '$0😀';
+}
+",
+ r#"
+fn f() {
+ let s = "😀";
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_char_with_string_newline() {
+ check_assist(
+ replace_char_with_string,
+ r"
+fn f() {
+ find($0'\n');
+}
+",
+ r#"
+fn f() {
+ find("\n");
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_char_with_string_unicode_escape() {
+ check_assist(
+ replace_char_with_string,
+ r"
+fn f() {
+ find($0'\u{7FFF}');
+}
+",
+ r#"
+fn f() {
+ find("\u{7FFF}");
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_char_with_string_quote() {
+ check_assist(
+ replace_char_with_string,
+ r#"
+fn f() {
+ find($0'"');
+}
+"#,
+ r#"
+fn f() {
+ find("\"");
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_string_with_char_quote() {
+ check_assist(
+ replace_string_with_char,
+ r#"
+fn f() {
+ find($0"'");
+}
+"#,
+ r#"
+fn f() {
+ find('\'');
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs
new file mode 100644
index 000000000..38fccb338
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs
@@ -0,0 +1,150 @@
+use std::iter;
+
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ ty_filter::TryEnum,
+};
+use syntax::{
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ make,
+ },
+ AstNode, T,
+};
+
+use crate::assist_context::{AssistContext, Assists};
+
+// Assist: replace_try_expr_with_match
+//
+// Replaces a `try` expression with a `match` expression.
+//
+// ```
+// # //- minicore:option
+// fn handle() {
+// let pat = Some(true)$0?;
+// }
+// ```
+// ->
+// ```
+// fn handle() {
+// let pat = match Some(true) {
+// Some(it) => it,
+// None => return None,
+// };
+// }
+// ```
+pub(crate) fn replace_try_expr_with_match(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let qm_kw = ctx.find_token_syntax_at_offset(T![?])?;
+ let qm_kw_parent = qm_kw.parent().and_then(ast::TryExpr::cast)?;
+
+ let expr = qm_kw_parent.expr()?;
+ let expr_type_info = ctx.sema.type_of_expr(&expr)?;
+
+ let try_enum = TryEnum::from_ty(&ctx.sema, &expr_type_info.original)?;
+
+ let target = qm_kw_parent.syntax().text_range();
+ acc.add(
+ AssistId("replace_try_expr_with_match", AssistKind::RefactorRewrite),
+ "Replace try expression with match",
+ target,
+ |edit| {
+ let sad_pat = match try_enum {
+ TryEnum::Option => make::path_pat(make::ext::ident_path("None")),
+ TryEnum::Result => make::tuple_struct_pat(
+ make::ext::ident_path("Err"),
+ iter::once(make::path_pat(make::ext::ident_path("err"))),
+ )
+ .into(),
+ };
+ let sad_expr = match try_enum {
+ TryEnum::Option => {
+ make::expr_return(Some(make::expr_path(make::ext::ident_path("None"))))
+ }
+ TryEnum::Result => make::expr_return(Some(make::expr_call(
+ make::expr_path(make::ext::ident_path("Err")),
+ make::arg_list(iter::once(make::expr_path(make::ext::ident_path("err")))),
+ ))),
+ };
+
+ let happy_arm = make::match_arm(
+ iter::once(
+ try_enum.happy_pattern(make::ident_pat(false, false, make::name("it")).into()),
+ ),
+ None,
+ make::expr_path(make::ext::ident_path("it")),
+ );
+ let sad_arm = make::match_arm(iter::once(sad_pat), None, sad_expr);
+
+ let match_arm_list = make::match_arm_list([happy_arm, sad_arm]);
+
+ let expr_match = make::expr_match(expr, match_arm_list)
+ .indent(IndentLevel::from_node(qm_kw_parent.syntax()));
+ edit.replace_ast::<ast::Expr>(qm_kw_parent.into(), expr_match);
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn test_replace_try_expr_with_match_not_applicable() {
+ check_assist_not_applicable(
+ replace_try_expr_with_match,
+ r#"
+ fn test() {
+ let pat: u32 = 25$0;
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_replace_try_expr_with_match_option() {
+ check_assist(
+ replace_try_expr_with_match,
+ r#"
+//- minicore:option
+fn test() {
+ let pat = Some(true)$0?;
+}
+ "#,
+ r#"
+fn test() {
+ let pat = match Some(true) {
+ Some(it) => it,
+ None => return None,
+ };
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_replace_try_expr_with_match_result() {
+ check_assist(
+ replace_try_expr_with_match,
+ r#"
+//- minicore:result
+fn test() {
+ let pat = Ok(true)$0?;
+}
+ "#,
+ r#"
+fn test() {
+ let pat = match Ok(true) {
+ Ok(it) => it,
+ Err(err) => return Err(err),
+ };
+}
+ "#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs
new file mode 100644
index 000000000..6112e0945
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs
@@ -0,0 +1,243 @@
+use syntax::{
+ ast::{Expr, GenericArg},
+ ast::{LetStmt, Type::InferType},
+ AstNode, TextRange,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId, AssistKind,
+};
+
+// Assist: replace_turbofish_with_explicit_type
+//
+// Converts `::<_>` to an explicit type assignment.
+//
+// ```
+// fn make<T>() -> T { ) }
+// fn main() {
+// let a = make$0::<i32>();
+// }
+// ```
+// ->
+// ```
+// fn make<T>() -> T { ) }
+// fn main() {
+// let a: i32 = make();
+// }
+// ```
+pub(crate) fn replace_turbofish_with_explicit_type(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let let_stmt = ctx.find_node_at_offset::<LetStmt>()?;
+
+ let initializer = let_stmt.initializer()?;
+
+ let generic_args = match &initializer {
+ Expr::MethodCallExpr(ce) => ce.generic_arg_list()?,
+ Expr::CallExpr(ce) => {
+ if let Expr::PathExpr(pe) = ce.expr()? {
+ pe.path()?.segment()?.generic_arg_list()?
+ } else {
+ cov_mark::hit!(not_applicable_if_non_path_function_call);
+ return None;
+ }
+ }
+ _ => {
+ cov_mark::hit!(not_applicable_if_non_function_call_initializer);
+ return None;
+ }
+ };
+
+ // Find range of ::<_>
+ let colon2 = generic_args.coloncolon_token()?;
+ let r_angle = generic_args.r_angle_token()?;
+ let turbofish_range = TextRange::new(colon2.text_range().start(), r_angle.text_range().end());
+
+ let turbofish_args: Vec<GenericArg> = generic_args.generic_args().into_iter().collect();
+
+ // Find type of ::<_>
+ if turbofish_args.len() != 1 {
+ cov_mark::hit!(not_applicable_if_not_single_arg);
+ return None;
+ }
+
+ // An improvement would be to check that this is correctly part of the return value of the
+ // function call, or sub in the actual return type.
+ let turbofish_type = &turbofish_args[0];
+
+ let initializer_start = initializer.syntax().text_range().start();
+ if ctx.offset() > turbofish_range.end() || ctx.offset() < initializer_start {
+ cov_mark::hit!(not_applicable_outside_turbofish);
+ return None;
+ }
+
+ if let None = let_stmt.colon_token() {
+ // If there's no colon in a let statement, then there is no explicit type.
+ // let x = fn::<...>();
+ let ident_range = let_stmt.pat()?.syntax().text_range();
+
+ return acc.add(
+ AssistId("replace_turbofish_with_explicit_type", AssistKind::RefactorRewrite),
+ "Replace turbofish with explicit type",
+ TextRange::new(initializer_start, turbofish_range.end()),
+ |builder| {
+ builder.insert(ident_range.end(), format!(": {}", turbofish_type));
+ builder.delete(turbofish_range);
+ },
+ );
+ } else if let Some(InferType(t)) = let_stmt.ty() {
+ // If there's a type inferrence underscore, we can offer to replace it with the type in
+ // the turbofish.
+ // let x: _ = fn::<...>();
+ let underscore_range = t.syntax().text_range();
+
+ return acc.add(
+ AssistId("replace_turbofish_with_explicit_type", AssistKind::RefactorRewrite),
+ "Replace `_` with turbofish type",
+ turbofish_range,
+ |builder| {
+ builder.replace(underscore_range, turbofish_type.to_string());
+ builder.delete(turbofish_range);
+ },
+ );
+ }
+
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ #[test]
+ fn replaces_turbofish_for_vec_string() {
+ check_assist(
+ replace_turbofish_with_explicit_type,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a = make$0::<Vec<String>>();
+}
+"#,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a: Vec<String> = make();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replaces_method_calls() {
+ // foo.make() is a method call which uses a different expr in the let initializer
+ check_assist(
+ replace_turbofish_with_explicit_type,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a = foo.make$0::<Vec<String>>();
+}
+"#,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a: Vec<String> = foo.make();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_turbofish_target() {
+ check_assist_target(
+ replace_turbofish_with_explicit_type,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a = $0make::<Vec<String>>();
+}
+"#,
+ r#"make::<Vec<String>>"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_outside_turbofish() {
+ cov_mark::check!(not_applicable_outside_turbofish);
+ check_assist_not_applicable(
+ replace_turbofish_with_explicit_type,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let $0a = make::<Vec<String>>();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_inferred_type_placeholder() {
+ check_assist(
+ replace_turbofish_with_explicit_type,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a: _ = make$0::<Vec<String>>();
+}
+"#,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a: Vec<String> = make();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_constant_initializer() {
+ cov_mark::check!(not_applicable_if_non_function_call_initializer);
+ check_assist_not_applicable(
+ replace_turbofish_with_explicit_type,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a = "foo"$0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_non_path_function_call() {
+ cov_mark::check!(not_applicable_if_non_path_function_call);
+ check_assist_not_applicable(
+ replace_turbofish_with_explicit_type,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ $0let a = (|| {})();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn non_applicable_multiple_generic_args() {
+ cov_mark::check!(not_applicable_if_not_single_arg);
+ check_assist_not_applicable(
+ replace_turbofish_with_explicit_type,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let a = make$0::<Vec<String>, i32>();
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/sort_items.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/sort_items.rs
new file mode 100644
index 000000000..a93704b39
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/sort_items.rs
@@ -0,0 +1,588 @@
+use std::cmp::Ordering;
+
+use itertools::Itertools;
+
+use syntax::{
+ ast::{self, HasName},
+ ted, AstNode, TextRange,
+};
+
+use crate::{utils::get_methods, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: sort_items
+//
+// Sorts item members alphabetically: fields, enum variants and methods.
+//
+// ```
+// struct $0Foo$0 { second: u32, first: String }
+// ```
+// ->
+// ```
+// struct Foo { first: String, second: u32 }
+// ```
+// ---
+// ```
+// trait $0Bar$0 {
+// fn second(&self) -> u32;
+// fn first(&self) -> String;
+// }
+// ```
+// ->
+// ```
+// trait Bar {
+// fn first(&self) -> String;
+// fn second(&self) -> u32;
+// }
+// ```
+// ---
+// ```
+// struct Baz;
+// impl $0Baz$0 {
+// fn second(&self) -> u32;
+// fn first(&self) -> String;
+// }
+// ```
+// ->
+// ```
+// struct Baz;
+// impl Baz {
+// fn first(&self) -> String;
+// fn second(&self) -> u32;
+// }
+// ```
+// ---
+// There is a difference between sorting enum variants:
+//
+// ```
+// enum $0Animal$0 {
+// Dog(String, f64),
+// Cat { weight: f64, name: String },
+// }
+// ```
+// ->
+// ```
+// enum Animal {
+// Cat { weight: f64, name: String },
+// Dog(String, f64),
+// }
+// ```
+// and sorting a single enum struct variant:
+//
+// ```
+// enum Animal {
+// Dog(String, f64),
+// Cat $0{ weight: f64, name: String }$0,
+// }
+// ```
+// ->
+// ```
+// enum Animal {
+// Dog(String, f64),
+// Cat { name: String, weight: f64 },
+// }
+// ```
+pub(crate) fn sort_items(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ if ctx.has_empty_selection() {
+ cov_mark::hit!(not_applicable_if_no_selection);
+ return None;
+ }
+
+ if let Some(trait_ast) = ctx.find_node_at_offset::<ast::Trait>() {
+ add_sort_methods_assist(acc, trait_ast.assoc_item_list()?)
+ } else if let Some(impl_ast) = ctx.find_node_at_offset::<ast::Impl>() {
+ add_sort_methods_assist(acc, impl_ast.assoc_item_list()?)
+ } else if let Some(struct_ast) = ctx.find_node_at_offset::<ast::Struct>() {
+ add_sort_field_list_assist(acc, struct_ast.field_list())
+ } else if let Some(union_ast) = ctx.find_node_at_offset::<ast::Union>() {
+ add_sort_fields_assist(acc, union_ast.record_field_list()?)
+ } else if let Some(variant_ast) = ctx.find_node_at_offset::<ast::Variant>() {
+ add_sort_field_list_assist(acc, variant_ast.field_list())
+ } else if let Some(enum_struct_variant_ast) = ctx.find_node_at_offset::<ast::RecordFieldList>()
+ {
+ // should be above enum and below struct
+ add_sort_fields_assist(acc, enum_struct_variant_ast)
+ } else if let Some(enum_ast) = ctx.find_node_at_offset::<ast::Enum>() {
+ add_sort_variants_assist(acc, enum_ast.variant_list()?)
+ } else {
+ None
+ }
+}
+
+trait AddRewrite {
+ fn add_rewrite<T: AstNode>(
+ &mut self,
+ label: &str,
+ old: Vec<T>,
+ new: Vec<T>,
+ target: TextRange,
+ ) -> Option<()>;
+}
+
+impl AddRewrite for Assists {
+ fn add_rewrite<T: AstNode>(
+ &mut self,
+ label: &str,
+ old: Vec<T>,
+ new: Vec<T>,
+ target: TextRange,
+ ) -> Option<()> {
+ self.add(AssistId("sort_items", AssistKind::RefactorRewrite), label, target, |builder| {
+ let mutable: Vec<T> = old.into_iter().map(|it| builder.make_mut(it)).collect();
+ mutable
+ .into_iter()
+ .zip(new)
+ .for_each(|(old, new)| ted::replace(old.syntax(), new.clone_for_update().syntax()));
+ })
+ }
+}
+
+fn add_sort_field_list_assist(acc: &mut Assists, field_list: Option<ast::FieldList>) -> Option<()> {
+ match field_list {
+ Some(ast::FieldList::RecordFieldList(it)) => add_sort_fields_assist(acc, it),
+ _ => {
+ cov_mark::hit!(not_applicable_if_sorted_or_empty_or_single);
+ None
+ }
+ }
+}
+
+fn add_sort_methods_assist(acc: &mut Assists, item_list: ast::AssocItemList) -> Option<()> {
+ let methods = get_methods(&item_list);
+ let sorted = sort_by_name(&methods);
+
+ if methods == sorted {
+ cov_mark::hit!(not_applicable_if_sorted_or_empty_or_single);
+ return None;
+ }
+
+ acc.add_rewrite("Sort methods alphabetically", methods, sorted, item_list.syntax().text_range())
+}
+
+fn add_sort_fields_assist(
+ acc: &mut Assists,
+ record_field_list: ast::RecordFieldList,
+) -> Option<()> {
+ let fields: Vec<_> = record_field_list.fields().collect();
+ let sorted = sort_by_name(&fields);
+
+ if fields == sorted {
+ cov_mark::hit!(not_applicable_if_sorted_or_empty_or_single);
+ return None;
+ }
+
+ acc.add_rewrite(
+ "Sort fields alphabetically",
+ fields,
+ sorted,
+ record_field_list.syntax().text_range(),
+ )
+}
+
+fn add_sort_variants_assist(acc: &mut Assists, variant_list: ast::VariantList) -> Option<()> {
+ let variants: Vec<_> = variant_list.variants().collect();
+ let sorted = sort_by_name(&variants);
+
+ if variants == sorted {
+ cov_mark::hit!(not_applicable_if_sorted_or_empty_or_single);
+ return None;
+ }
+
+ acc.add_rewrite(
+ "Sort variants alphabetically",
+ variants,
+ sorted,
+ variant_list.syntax().text_range(),
+ )
+}
+
+fn sort_by_name<T: HasName + Clone>(initial: &[T]) -> Vec<T> {
+ initial
+ .iter()
+ .cloned()
+ .sorted_by(|a, b| match (a.name(), b.name()) {
+ (Some(a), Some(b)) => Ord::cmp(&a.to_string(), &b.to_string()),
+
+ // unexpected, but just in case
+ (None, None) => Ordering::Equal,
+ (None, Some(_)) => Ordering::Less,
+ (Some(_), None) => Ordering::Greater,
+ })
+ .collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn not_applicable_if_no_selection() {
+ cov_mark::check!(not_applicable_if_no_selection);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+t$0rait Bar {
+ fn b();
+ fn a();
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_trait_empty() {
+ cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+t$0rait Bar$0 {
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_impl_empty() {
+ cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+struct Bar;
+$0impl Bar$0 {
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_struct_empty() {
+ cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+$0struct Bar$0 ;
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_struct_empty2() {
+ cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+$0struct Bar$0 { };
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_enum_empty() {
+ cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+$0enum ZeroVariants$0 {};
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_trait_sorted() {
+ cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+t$0rait Bar$0 {
+ fn a() {}
+ fn b() {}
+ fn c() {}
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_impl_sorted() {
+ cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+struct Bar;
+$0impl Bar$0 {
+ fn a() {}
+ fn b() {}
+ fn c() {}
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_struct_sorted() {
+ cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+$0struct Bar$0 {
+ a: u32,
+ b: u8,
+ c: u64,
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_union_sorted() {
+ cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+$0union Bar$0 {
+ a: u32,
+ b: u8,
+ c: u64,
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_if_enum_sorted() {
+ cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
+
+ check_assist_not_applicable(
+ sort_items,
+ r#"
+$0enum Bar$0 {
+ a,
+ b,
+ c,
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn sort_trait() {
+ check_assist(
+ sort_items,
+ r#"
+$0trait Bar$0 {
+ fn a() {
+
+ }
+
+ // comment for c
+ fn c() {}
+ fn z() {}
+ fn b() {}
+}
+ "#,
+ r#"
+trait Bar {
+ fn a() {
+
+ }
+
+ fn b() {}
+ // comment for c
+ fn c() {}
+ fn z() {}
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn sort_impl() {
+ check_assist(
+ sort_items,
+ r#"
+struct Bar;
+$0impl Bar$0 {
+ fn c() {}
+ fn a() {}
+ /// long
+ /// doc
+ /// comment
+ fn z() {}
+ fn d() {}
+}
+ "#,
+ r#"
+struct Bar;
+impl Bar {
+ fn a() {}
+ fn c() {}
+ fn d() {}
+ /// long
+ /// doc
+ /// comment
+ fn z() {}
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn sort_struct() {
+ check_assist(
+ sort_items,
+ r#"
+$0struct Bar$0 {
+ b: u8,
+ a: u32,
+ c: u64,
+}
+ "#,
+ r#"
+struct Bar {
+ a: u32,
+ b: u8,
+ c: u64,
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn sort_generic_struct_with_lifetime() {
+ check_assist(
+ sort_items,
+ r#"
+$0struct Bar<'a,$0 T> {
+ d: &'a str,
+ b: u8,
+ a: T,
+ c: u64,
+}
+ "#,
+ r#"
+struct Bar<'a, T> {
+ a: T,
+ b: u8,
+ c: u64,
+ d: &'a str,
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn sort_struct_fields_diff_len() {
+ check_assist(
+ sort_items,
+ r#"
+$0struct Bar $0{
+ aaa: u8,
+ a: usize,
+ b: u8,
+}
+ "#,
+ r#"
+struct Bar {
+ a: usize,
+ aaa: u8,
+ b: u8,
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn sort_union() {
+ check_assist(
+ sort_items,
+ r#"
+$0union Bar$0 {
+ b: u8,
+ a: u32,
+ c: u64,
+}
+ "#,
+ r#"
+union Bar {
+ a: u32,
+ b: u8,
+ c: u64,
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn sort_enum() {
+ check_assist(
+ sort_items,
+ r#"
+$0enum Bar $0{
+ d{ first: u32, second: usize},
+ b = 14,
+ a,
+ c(u32, usize),
+}
+ "#,
+ r#"
+enum Bar {
+ a,
+ b = 14,
+ c(u32, usize),
+ d{ first: u32, second: usize},
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn sort_struct_enum_variant_fields() {
+ check_assist(
+ sort_items,
+ r#"
+enum Bar {
+ d$0{ second: usize, first: u32 }$0,
+ b = 14,
+ a,
+ c(u32, usize),
+}
+ "#,
+ r#"
+enum Bar {
+ d{ first: u32, second: usize },
+ b = 14,
+ a,
+ c(u32, usize),
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn sort_struct_enum_variant() {
+ check_assist(
+ sort_items,
+ r#"
+enum Bar {
+ $0d$0{ second: usize, first: u32 },
+}
+ "#,
+ r#"
+enum Bar {
+ d{ first: u32, second: usize },
+}
+ "#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/split_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/split_import.rs
new file mode 100644
index 000000000..775ededec
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/split_import.rs
@@ -0,0 +1,82 @@
+use syntax::{ast, AstNode, T};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: split_import
+//
+// Wraps the tail of import into braces.
+//
+// ```
+// use std::$0collections::HashMap;
+// ```
+// ->
+// ```
+// use std::{collections::HashMap};
+// ```
+pub(crate) fn split_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let colon_colon = ctx.find_token_syntax_at_offset(T![::])?;
+ let path = ast::Path::cast(colon_colon.parent()?)?.qualifier()?;
+
+ let use_tree = path.top_path().syntax().ancestors().find_map(ast::UseTree::cast)?;
+
+ let has_errors = use_tree
+ .syntax()
+ .descendants_with_tokens()
+ .any(|it| it.kind() == syntax::SyntaxKind::ERROR);
+ let last_segment = use_tree.path().and_then(|it| it.segment());
+ if has_errors || last_segment.is_none() {
+ return None;
+ }
+
+ let target = colon_colon.text_range();
+ acc.add(AssistId("split_import", AssistKind::RefactorRewrite), "Split import", target, |edit| {
+ let use_tree = edit.make_mut(use_tree.clone());
+ let path = edit.make_mut(path);
+ use_tree.split_prefix(&path);
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn test_split_import() {
+ check_assist(
+ split_import,
+ "use crate::$0db::RootDatabase;",
+ "use crate::{db::RootDatabase};",
+ )
+ }
+
+ #[test]
+ fn split_import_works_with_trees() {
+ check_assist(
+ split_import,
+ "use crate:$0:db::{RootDatabase, FileSymbol}",
+ "use crate::{db::{RootDatabase, FileSymbol}}",
+ )
+ }
+
+ #[test]
+ fn split_import_target() {
+ check_assist_target(split_import, "use crate::$0db::{RootDatabase, FileSymbol}", "::");
+ }
+
+ #[test]
+ fn issue4044() {
+ check_assist_not_applicable(split_import, "use crate::$0:::self;")
+ }
+
+ #[test]
+ fn test_empty_use() {
+ check_assist_not_applicable(
+ split_import,
+ r"
+use std::$0
+fn main() {}",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs
new file mode 100644
index 000000000..b7d57f02b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs
@@ -0,0 +1,98 @@
+use syntax::{
+ ast::{self, HasAttrs},
+ AstNode, AstToken,
+};
+
+use crate::{utils::test_related_attribute, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: toggle_ignore
+//
+// Adds `#[ignore]` attribute to the test.
+//
+// ```
+// $0#[test]
+// fn arithmetics {
+// assert_eq!(2 + 2, 5);
+// }
+// ```
+// ->
+// ```
+// #[test]
+// #[ignore]
+// fn arithmetics {
+// assert_eq!(2 + 2, 5);
+// }
+// ```
+pub(crate) fn toggle_ignore(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let attr: ast::Attr = ctx.find_node_at_offset()?;
+ let func = attr.syntax().parent().and_then(ast::Fn::cast)?;
+ let attr = test_related_attribute(&func)?;
+
+ match has_ignore_attribute(&func) {
+ None => acc.add(
+ AssistId("toggle_ignore", AssistKind::None),
+ "Ignore this test",
+ attr.syntax().text_range(),
+ |builder| builder.insert(attr.syntax().text_range().end(), "\n#[ignore]"),
+ ),
+ Some(ignore_attr) => acc.add(
+ AssistId("toggle_ignore", AssistKind::None),
+ "Re-enable this test",
+ ignore_attr.syntax().text_range(),
+ |builder| {
+ builder.delete(ignore_attr.syntax().text_range());
+ let whitespace = ignore_attr
+ .syntax()
+ .next_sibling_or_token()
+ .and_then(|x| x.into_token())
+ .and_then(ast::Whitespace::cast);
+ if let Some(whitespace) = whitespace {
+ builder.delete(whitespace.syntax().text_range());
+ }
+ },
+ ),
+ }
+}
+
+fn has_ignore_attribute(fn_def: &ast::Fn) -> Option<ast::Attr> {
+ fn_def.attrs().find(|attr| attr.path().map(|it| it.syntax().text() == "ignore") == Some(true))
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_assist;
+
+ use super::*;
+
+ #[test]
+ fn test_base_case() {
+ check_assist(
+ toggle_ignore,
+ r#"
+ #[test$0]
+ fn test() {}
+ "#,
+ r#"
+ #[test]
+ #[ignore]
+ fn test() {}
+ "#,
+ )
+ }
+
+ #[test]
+ fn test_unignore() {
+ check_assist(
+ toggle_ignore,
+ r#"
+ #[test$0]
+ #[ignore]
+ fn test() {}
+ "#,
+ r#"
+ #[test]
+ fn test() {}
+ "#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs
new file mode 100644
index 000000000..3ce028e93
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs
@@ -0,0 +1,237 @@
+use syntax::{
+ ast::{self, make, HasVisibility},
+ ted::{self, Position},
+ AstNode, SyntaxKind,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ AssistId, AssistKind,
+};
+
+// Assist: unmerge_use
+//
+// Extracts single use item from use list.
+//
+// ```
+// use std::fmt::{Debug, Display$0};
+// ```
+// ->
+// ```
+// use std::fmt::{Debug};
+// use std::fmt::Display;
+// ```
+pub(crate) fn unmerge_use(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let tree: ast::UseTree = ctx.find_node_at_offset::<ast::UseTree>()?.clone_for_update();
+
+ let tree_list = tree.syntax().parent().and_then(ast::UseTreeList::cast)?;
+ if tree_list.use_trees().count() < 2 {
+ cov_mark::hit!(skip_single_use_item);
+ return None;
+ }
+
+ let use_: ast::Use = tree_list.syntax().ancestors().find_map(ast::Use::cast)?;
+ let path = resolve_full_path(&tree)?;
+
+ let old_parent_range = use_.syntax().parent()?.text_range();
+ let new_parent = use_.syntax().parent()?;
+
+ let target = tree.syntax().text_range();
+ acc.add(
+ AssistId("unmerge_use", AssistKind::RefactorRewrite),
+ "Unmerge use",
+ target,
+ |builder| {
+ let new_use = make::use_(
+ use_.visibility(),
+ make::use_tree(
+ path,
+ tree.use_tree_list(),
+ tree.rename(),
+ tree.star_token().is_some(),
+ ),
+ )
+ .clone_for_update();
+
+ tree.remove();
+ ted::insert(Position::after(use_.syntax()), new_use.syntax());
+
+ builder.replace(old_parent_range, new_parent.to_string());
+ },
+ )
+}
+
+fn resolve_full_path(tree: &ast::UseTree) -> Option<ast::Path> {
+ let paths = tree
+ .syntax()
+ .ancestors()
+ .take_while(|n| n.kind() != SyntaxKind::USE)
+ .filter_map(ast::UseTree::cast)
+ .filter_map(|t| t.path());
+
+ let final_path = paths.reduce(|prev, next| make::path_concat(next, prev))?;
+ if final_path.segment().map_or(false, |it| it.self_token().is_some()) {
+ final_path.qualifier()
+ } else {
+ Some(final_path)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn skip_single_use_item() {
+ cov_mark::check!(skip_single_use_item);
+ check_assist_not_applicable(
+ unmerge_use,
+ r"
+use std::fmt::Debug$0;
+",
+ );
+ check_assist_not_applicable(
+ unmerge_use,
+ r"
+use std::fmt::{Debug$0};
+",
+ );
+ check_assist_not_applicable(
+ unmerge_use,
+ r"
+use std::fmt::Debug as Dbg$0;
+",
+ );
+ }
+
+ #[test]
+ fn skip_single_glob_import() {
+ check_assist_not_applicable(
+ unmerge_use,
+ r"
+use std::fmt::*$0;
+",
+ );
+ }
+
+ #[test]
+ fn unmerge_use_item() {
+ check_assist(
+ unmerge_use,
+ r"
+use std::fmt::{Debug, Display$0};
+",
+ r"
+use std::fmt::{Debug};
+use std::fmt::Display;
+",
+ );
+
+ check_assist(
+ unmerge_use,
+ r"
+use std::fmt::{Debug, format$0, Display};
+",
+ r"
+use std::fmt::{Debug, Display};
+use std::fmt::format;
+",
+ );
+ }
+
+ #[test]
+ fn unmerge_glob_import() {
+ check_assist(
+ unmerge_use,
+ r"
+use std::fmt::{*$0, Display};
+",
+ r"
+use std::fmt::{Display};
+use std::fmt::*;
+",
+ );
+ }
+
+ #[test]
+ fn unmerge_renamed_use_item() {
+ check_assist(
+ unmerge_use,
+ r"
+use std::fmt::{Debug, Display as Disp$0};
+",
+ r"
+use std::fmt::{Debug};
+use std::fmt::Display as Disp;
+",
+ );
+ }
+
+ #[test]
+ fn unmerge_indented_use_item() {
+ check_assist(
+ unmerge_use,
+ r"
+mod format {
+ use std::fmt::{Debug, Display$0 as Disp, format};
+}
+",
+ r"
+mod format {
+ use std::fmt::{Debug, format};
+ use std::fmt::Display as Disp;
+}
+",
+ );
+ }
+
+ #[test]
+ fn unmerge_nested_use_item() {
+ check_assist(
+ unmerge_use,
+ r"
+use foo::bar::{baz::{qux$0, foobar}, barbaz};
+",
+ r"
+use foo::bar::{baz::{foobar}, barbaz};
+use foo::bar::baz::qux;
+",
+ );
+ check_assist(
+ unmerge_use,
+ r"
+use foo::bar::{baz$0::{qux, foobar}, barbaz};
+",
+ r"
+use foo::bar::{barbaz};
+use foo::bar::baz::{qux, foobar};
+",
+ );
+ }
+
+ #[test]
+ fn unmerge_use_item_with_visibility() {
+ check_assist(
+ unmerge_use,
+ r"
+pub use std::fmt::{Debug, Display$0};
+",
+ r"
+pub use std::fmt::{Debug};
+pub use std::fmt::Display;
+",
+ );
+ }
+
+ #[test]
+ fn unmerge_use_item_on_self() {
+ check_assist(
+ unmerge_use,
+ r"use std::process::{Command, self$0};",
+ r"use std::process::{Command};
+use std::process;",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs
new file mode 100644
index 000000000..d5cd2d551
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs
@@ -0,0 +1,257 @@
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ base_db::FileId,
+ defs::Definition,
+ search::FileReference,
+ syntax_helpers::node_ext::full_path_of_name_ref,
+};
+use syntax::{
+ ast::{self, NameLike, NameRef},
+ AstNode, SyntaxKind, TextRange,
+};
+
+use crate::{AssistContext, Assists};
+
+// Assist: unnecessary_async
+//
+// Removes the `async` mark from functions which have no `.await` in their body.
+// Looks for calls to the functions and removes the `.await` on the call site.
+//
+// ```
+// pub async f$0n foo() {}
+// pub async fn bar() { foo().await }
+// ```
+// ->
+// ```
+// pub fn foo() {}
+// pub async fn bar() { foo() }
+// ```
+pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let function: ast::Fn = ctx.find_node_at_offset()?;
+
+ // Do nothing if the cursor is not on the prototype. This is so that the check does not pollute
+ // when the user asks us for assists when in the middle of the function body.
+ // We consider the prototype to be anything that is before the body of the function.
+ let cursor_position = ctx.offset();
+ if cursor_position >= function.body()?.syntax().text_range().start() {
+ return None;
+ }
+ // Do nothing if the function isn't async.
+ if let None = function.async_token() {
+ return None;
+ }
+ // Do nothing if the function has an `await` expression in its body.
+ if function.body()?.syntax().descendants().find_map(ast::AwaitExpr::cast).is_some() {
+ return None;
+ }
+
+ // Remove the `async` keyword plus whitespace after it, if any.
+ let async_range = {
+ let async_token = function.async_token()?;
+ let next_token = async_token.next_token()?;
+ if matches!(next_token.kind(), SyntaxKind::WHITESPACE) {
+ TextRange::new(async_token.text_range().start(), next_token.text_range().end())
+ } else {
+ async_token.text_range()
+ }
+ };
+
+ // Otherwise, we may remove the `async` keyword.
+ acc.add(
+ AssistId("unnecessary_async", AssistKind::QuickFix),
+ "Remove unnecessary async",
+ async_range,
+ |edit| {
+ // Remove async on the function definition.
+ edit.replace(async_range, "");
+
+ // Remove all `.await`s from calls to the function we remove `async` from.
+ if let Some(fn_def) = ctx.sema.to_def(&function) {
+ for await_expr in find_all_references(ctx, &Definition::Function(fn_def))
+ // Keep only references that correspond NameRefs.
+ .filter_map(|(_, reference)| match reference.name {
+ NameLike::NameRef(nameref) => Some(nameref),
+ _ => None,
+ })
+ // Keep only references that correspond to await expressions
+ .filter_map(|nameref| find_await_expression(ctx, &nameref))
+ {
+ if let Some(await_token) = &await_expr.await_token() {
+ edit.replace(await_token.text_range(), "");
+ }
+ if let Some(dot_token) = &await_expr.dot_token() {
+ edit.replace(dot_token.text_range(), "");
+ }
+ }
+ }
+ },
+ )
+}
+
+fn find_all_references(
+ ctx: &AssistContext<'_>,
+ def: &Definition,
+) -> impl Iterator<Item = (FileId, FileReference)> {
+ def.usages(&ctx.sema).all().into_iter().flat_map(|(file_id, references)| {
+ references.into_iter().map(move |reference| (file_id, reference))
+ })
+}
+
+/// Finds the await expression for the given `NameRef`.
+/// If no await expression is found, returns None.
+fn find_await_expression(ctx: &AssistContext<'_>, nameref: &NameRef) -> Option<ast::AwaitExpr> {
+ // From the nameref, walk up the tree to the await expression.
+ let await_expr = if let Some(path) = full_path_of_name_ref(&nameref) {
+ // Function calls.
+ path.syntax()
+ .parent()
+ .and_then(ast::PathExpr::cast)?
+ .syntax()
+ .parent()
+ .and_then(ast::CallExpr::cast)?
+ .syntax()
+ .parent()
+ .and_then(ast::AwaitExpr::cast)
+ } else {
+ // Method calls.
+ nameref
+ .syntax()
+ .parent()
+ .and_then(ast::MethodCallExpr::cast)?
+ .syntax()
+ .parent()
+ .and_then(ast::AwaitExpr::cast)
+ };
+
+ ctx.sema.original_ast_node(await_expr?)
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn applies_on_empty_function() {
+ check_assist(unnecessary_async, "pub async f$0n f() {}", "pub fn f() {}")
+ }
+
+ #[test]
+ fn applies_and_removes_whitespace() {
+ check_assist(unnecessary_async, "pub async f$0n f() {}", "pub fn f() {}")
+ }
+
+ #[test]
+ fn does_not_apply_on_non_async_function() {
+ check_assist_not_applicable(unnecessary_async, "pub f$0n f() {}")
+ }
+
+ #[test]
+ fn applies_on_function_with_a_non_await_expr() {
+ check_assist(unnecessary_async, "pub async f$0n f() { f2() }", "pub fn f() { f2() }")
+ }
+
+ #[test]
+ fn does_not_apply_on_function_with_an_await_expr() {
+ check_assist_not_applicable(unnecessary_async, "pub async f$0n f() { f2().await }")
+ }
+
+ #[test]
+ fn applies_and_removes_await_on_reference() {
+ check_assist(
+ unnecessary_async,
+ r#"
+pub async fn f4() { }
+pub async f$0n f2() { }
+pub async fn f() { f2().await }
+pub async fn f3() { f2().await }"#,
+ r#"
+pub async fn f4() { }
+pub fn f2() { }
+pub async fn f() { f2() }
+pub async fn f3() { f2() }"#,
+ )
+ }
+
+ #[test]
+ fn applies_and_removes_await_from_within_module() {
+ check_assist(
+ unnecessary_async,
+ r#"
+pub async fn f4() { }
+mod a { pub async f$0n f2() { } }
+pub async fn f() { a::f2().await }
+pub async fn f3() { a::f2().await }"#,
+ r#"
+pub async fn f4() { }
+mod a { pub fn f2() { } }
+pub async fn f() { a::f2() }
+pub async fn f3() { a::f2() }"#,
+ )
+ }
+
+ #[test]
+ fn applies_and_removes_await_on_inner_await() {
+ check_assist(
+ unnecessary_async,
+ // Ensure that it is the first await on the 3rd line that is removed
+ r#"
+pub async fn f() { f2().await }
+pub async f$0n f2() -> i32 { 1 }
+pub async fn f3() { f4(f2().await).await }
+pub async fn f4(i: i32) { }"#,
+ r#"
+pub async fn f() { f2() }
+pub fn f2() -> i32 { 1 }
+pub async fn f3() { f4(f2()).await }
+pub async fn f4(i: i32) { }"#,
+ )
+ }
+
+ #[test]
+ fn applies_and_removes_await_on_outer_await() {
+ check_assist(
+ unnecessary_async,
+ // Ensure that it is the second await on the 3rd line that is removed
+ r#"
+pub async fn f() { f2().await }
+pub async f$0n f2(i: i32) { }
+pub async fn f3() { f2(f4().await).await }
+pub async fn f4() -> i32 { 1 }"#,
+ r#"
+pub async fn f() { f2() }
+pub fn f2(i: i32) { }
+pub async fn f3() { f2(f4().await) }
+pub async fn f4() -> i32 { 1 }"#,
+ )
+ }
+
+ #[test]
+ fn applies_on_method_call() {
+ check_assist(
+ unnecessary_async,
+ r#"
+pub struct S { }
+impl S { pub async f$0n f2(&self) { } }
+pub async fn f(s: &S) { s.f2().await }"#,
+ r#"
+pub struct S { }
+impl S { pub fn f2(&self) { } }
+pub async fn f(s: &S) { s.f2() }"#,
+ )
+ }
+
+ #[test]
+ fn does_not_apply_on_function_with_a_nested_await_expr() {
+ check_assist_not_applicable(
+ unnecessary_async,
+ "async f$0n f() { if true { loop { f2().await } } }",
+ )
+ }
+
+ #[test]
+ fn does_not_apply_when_not_on_prototype() {
+ check_assist_not_applicable(unnecessary_async, "pub async fn f() { $0f2() }")
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs
new file mode 100644
index 000000000..7969a4918
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs
@@ -0,0 +1,719 @@
+use syntax::{
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ },
+ AstNode, SyntaxKind, TextRange, T,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: unwrap_block
+//
+// This assist removes if...else, for, while and loop control statements to just keep the body.
+//
+// ```
+// fn foo() {
+// if true {$0
+// println!("foo");
+// }
+// }
+// ```
+// ->
+// ```
+// fn foo() {
+// println!("foo");
+// }
+// ```
+pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let assist_id = AssistId("unwrap_block", AssistKind::RefactorRewrite);
+ let assist_label = "Unwrap block";
+
+ let l_curly_token = ctx.find_token_syntax_at_offset(T!['{'])?;
+ let mut block = ast::BlockExpr::cast(l_curly_token.parent_ancestors().nth(1)?)?;
+ let target = block.syntax().text_range();
+ let mut parent = block.syntax().parent()?;
+ if ast::MatchArm::can_cast(parent.kind()) {
+ parent = parent.ancestors().find(|it| ast::MatchExpr::can_cast(it.kind()))?
+ }
+
+ if matches!(parent.kind(), SyntaxKind::STMT_LIST | SyntaxKind::EXPR_STMT) {
+ return acc.add(assist_id, assist_label, target, |builder| {
+ builder.replace(block.syntax().text_range(), update_expr_string(block.to_string()));
+ });
+ }
+
+ let parent = ast::Expr::cast(parent)?;
+
+ match parent.clone() {
+ ast::Expr::ForExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::LoopExpr(_) => (),
+ ast::Expr::MatchExpr(_) => block = block.dedent(IndentLevel(1)),
+ ast::Expr::IfExpr(if_expr) => {
+ let then_branch = if_expr.then_branch()?;
+ if then_branch == block {
+ if let Some(ancestor) = if_expr.syntax().parent().and_then(ast::IfExpr::cast) {
+ // For `else if` blocks
+ let ancestor_then_branch = ancestor.then_branch()?;
+
+ return acc.add(assist_id, assist_label, target, |edit| {
+ let range_to_del_else_if = TextRange::new(
+ ancestor_then_branch.syntax().text_range().end(),
+ l_curly_token.text_range().start(),
+ );
+ let range_to_del_rest = TextRange::new(
+ then_branch.syntax().text_range().end(),
+ if_expr.syntax().text_range().end(),
+ );
+
+ edit.delete(range_to_del_rest);
+ edit.delete(range_to_del_else_if);
+ edit.replace(
+ target,
+ update_expr_string_without_newline(then_branch.to_string()),
+ );
+ });
+ }
+ } else {
+ return acc.add(assist_id, assist_label, target, |edit| {
+ let range_to_del = TextRange::new(
+ then_branch.syntax().text_range().end(),
+ l_curly_token.text_range().start(),
+ );
+
+ edit.delete(range_to_del);
+ edit.replace(target, update_expr_string_without_newline(block.to_string()));
+ });
+ }
+ }
+ _ => return None,
+ };
+
+ acc.add(assist_id, assist_label, target, |builder| {
+ builder.replace(parent.syntax().text_range(), update_expr_string(block.to_string()));
+ })
+}
+
+fn update_expr_string(expr_string: String) -> String {
+ update_expr_string_with_pat(expr_string, &[' ', '\n'])
+}
+
+fn update_expr_string_without_newline(expr_string: String) -> String {
+ update_expr_string_with_pat(expr_string, &[' '])
+}
+
+fn update_expr_string_with_pat(expr_str: String, whitespace_pat: &[char]) -> String {
+ // Remove leading whitespace, index [1..] to remove the leading '{',
+ // then continue to remove leading whitespace.
+ let expr_str =
+ expr_str.trim_start_matches(whitespace_pat)[1..].trim_start_matches(whitespace_pat);
+
+ // Remove trailing whitespace, index [..expr_str.len() - 1] to remove the trailing '}',
+ // then continue to remove trailing whitespace.
+ let expr_str = expr_str.trim_end_matches(whitespace_pat);
+ let expr_str = expr_str[..expr_str.len() - 1].trim_end_matches(whitespace_pat);
+
+ expr_str
+ .lines()
+ .map(|line| line.replacen(" ", "", 1)) // Delete indentation
+ .collect::<Vec<String>>()
+ .join("\n")
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn unwrap_tail_expr_block() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ $0{
+ 92
+ }
+}
+"#,
+ r#"
+fn main() {
+ 92
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn unwrap_stmt_expr_block() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ $0{
+ 92;
+ }
+ ()
+}
+"#,
+ r#"
+fn main() {
+ 92;
+ ()
+}
+"#,
+ );
+ // Pedantically, we should add an `;` here...
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ $0{
+ 92
+ }
+ ()
+}
+"#,
+ r#"
+fn main() {
+ 92
+ ()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_if() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ bar();
+ if true {$0
+ foo();
+
+ // comment
+ bar();
+ } else {
+ println!("bar");
+ }
+}
+"#,
+ r#"
+fn main() {
+ bar();
+ foo();
+
+ // comment
+ bar();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_if_else() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ bar();
+ if true {
+ foo();
+
+ // comment
+ bar();
+ } else {$0
+ println!("bar");
+ }
+}
+"#,
+ r#"
+fn main() {
+ bar();
+ if true {
+ foo();
+
+ // comment
+ bar();
+ }
+ println!("bar");
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_if_else_if() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ // bar();
+ if true {
+ println!("true");
+
+ // comment
+ // bar();
+ } else if false {$0
+ println!("bar");
+ } else {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+fn main() {
+ // bar();
+ if true {
+ println!("true");
+
+ // comment
+ // bar();
+ }
+ println!("bar");
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_if_else_if_nested() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ // bar();
+ if true {
+ println!("true");
+
+ // comment
+ // bar();
+ } else if false {
+ println!("bar");
+ } else if true {$0
+ println!("foo");
+ }
+}
+"#,
+ r#"
+fn main() {
+ // bar();
+ if true {
+ println!("true");
+
+ // comment
+ // bar();
+ } else if false {
+ println!("bar");
+ }
+ println!("foo");
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_if_else_if_nested_else() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ // bar();
+ if true {
+ println!("true");
+
+ // comment
+ // bar();
+ } else if false {
+ println!("bar");
+ } else if true {
+ println!("foo");
+ } else {$0
+ println!("else");
+ }
+}
+"#,
+ r#"
+fn main() {
+ // bar();
+ if true {
+ println!("true");
+
+ // comment
+ // bar();
+ } else if false {
+ println!("bar");
+ } else if true {
+ println!("foo");
+ }
+ println!("else");
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_if_else_if_nested_middle() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ // bar();
+ if true {
+ println!("true");
+
+ // comment
+ // bar();
+ } else if false {
+ println!("bar");
+ } else if true {$0
+ println!("foo");
+ } else {
+ println!("else");
+ }
+}
+"#,
+ r#"
+fn main() {
+ // bar();
+ if true {
+ println!("true");
+
+ // comment
+ // bar();
+ } else if false {
+ println!("bar");
+ }
+ println!("foo");
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_if_bad_cursor_position() {
+ check_assist_not_applicable(
+ unwrap_block,
+ r#"
+fn main() {
+ bar();$0
+ if true {
+ foo();
+
+ // comment
+ bar();
+ } else {
+ println!("bar");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_for() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ for i in 0..5 {$0
+ if true {
+ foo();
+
+ // comment
+ bar();
+ } else {
+ println!("bar");
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ if true {
+ foo();
+
+ // comment
+ bar();
+ } else {
+ println!("bar");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_if_in_for() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ for i in 0..5 {
+ if true {$0
+ foo();
+
+ // comment
+ bar();
+ } else {
+ println!("bar");
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ for i in 0..5 {
+ foo();
+
+ // comment
+ bar();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_loop() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ loop {$0
+ if true {
+ foo();
+
+ // comment
+ bar();
+ } else {
+ println!("bar");
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ if true {
+ foo();
+
+ // comment
+ bar();
+ } else {
+ println!("bar");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_while() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ while true {$0
+ if true {
+ foo();
+
+ // comment
+ bar();
+ } else {
+ println!("bar");
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ if true {
+ foo();
+
+ // comment
+ bar();
+ } else {
+ println!("bar");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_match_arm() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ match rel_path {
+ Ok(rel_path) => {$0
+ let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
+ Some((*id, rel_path))
+ }
+ Err(_) => None,
+ }
+}
+"#,
+ r#"
+fn main() {
+ let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
+ Some((*id, rel_path))
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_if_in_while_bad_cursor_position() {
+ check_assist_not_applicable(
+ unwrap_block,
+ r#"
+fn main() {
+ while true {
+ if true {
+ foo();$0
+
+ // comment
+ bar();
+ } else {
+ println!("bar");
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_single_line() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ {$0 0 }
+}
+"#,
+ r#"
+fn main() {
+ 0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_nested_block() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ $0{
+ {
+ 3
+ }
+ }
+}
+"#,
+ r#"
+fn main() {
+ {
+ 3
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn nested_single_line() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ {$0 { println!("foo"); } }
+}
+"#,
+ r#"
+fn main() {
+ { println!("foo"); }
+}
+"#,
+ );
+
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ {$0 { 0 } }
+}
+"#,
+ r#"
+fn main() {
+ { 0 }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_if_single_line() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ if true {$0 /* foo */ foo() } else { bar() /* bar */}
+}
+"#,
+ r#"
+fn main() {
+ /* foo */ foo()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn if_single_statement() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() {
+ if true {$0
+ return 3;
+ }
+}
+"#,
+ r#"
+fn main() {
+ return 3;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn multiple_statements() {
+ check_assist(
+ unwrap_block,
+ r#"
+fn main() -> i32 {
+ if 2 > 1 {$0
+ let a = 5;
+ return 3;
+ }
+ 5
+}
+"#,
+ r#"
+fn main() -> i32 {
+ let a = 5;
+ return 3;
+ 5
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs
new file mode 100644
index 000000000..9ef4ae047
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs
@@ -0,0 +1,1020 @@
+use ide_db::{
+ famous_defs::FamousDefs,
+ syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
+};
+use itertools::Itertools;
+use syntax::{
+ ast::{self, Expr},
+ match_ast, AstNode, TextRange, TextSize,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: unwrap_result_return_type
+//
+// Unwrap the function's return type.
+//
+// ```
+// # //- minicore: result
+// fn foo() -> Result<i32>$0 { Ok(42i32) }
+// ```
+// ->
+// ```
+// fn foo() -> i32 { 42i32 }
+// ```
+pub(crate) fn unwrap_result_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let ret_type = ctx.find_node_at_offset::<ast::RetType>()?;
+ let parent = ret_type.syntax().parent()?;
+ let body = match_ast! {
+ match parent {
+ ast::Fn(func) => func.body()?,
+ ast::ClosureExpr(closure) => match closure.body()? {
+ Expr::BlockExpr(block) => block,
+ // closures require a block when a return type is specified
+ _ => return None,
+ },
+ _ => return None,
+ }
+ };
+
+ let type_ref = &ret_type.ty()?;
+ let ty = ctx.sema.resolve_type(type_ref)?.as_adt();
+ let result_enum =
+ FamousDefs(&ctx.sema, ctx.sema.scope(type_ref.syntax())?.krate()).core_result_Result()?;
+
+ if !matches!(ty, Some(hir::Adt::Enum(ret_type)) if ret_type == result_enum) {
+ return None;
+ }
+
+ acc.add(
+ AssistId("unwrap_result_return_type", AssistKind::RefactorRewrite),
+ "Unwrap Result return type",
+ type_ref.syntax().text_range(),
+ |builder| {
+ let body = ast::Expr::BlockExpr(body);
+
+ let mut exprs_to_unwrap = Vec::new();
+ let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_unwrap, e);
+ walk_expr(&body, &mut |expr| {
+ if let Expr::ReturnExpr(ret_expr) = expr {
+ if let Some(ret_expr_arg) = &ret_expr.expr() {
+ for_each_tail_expr(ret_expr_arg, tail_cb);
+ }
+ }
+ });
+ for_each_tail_expr(&body, tail_cb);
+
+ let mut is_unit_type = false;
+ if let Some((_, inner_type)) = type_ref.to_string().split_once('<') {
+ let inner_type = match inner_type.split_once(',') {
+ Some((success_inner_type, _)) => success_inner_type,
+ None => inner_type,
+ };
+ let new_ret_type = inner_type.strip_suffix('>').unwrap_or(inner_type);
+ if new_ret_type == "()" {
+ is_unit_type = true;
+ let text_range = TextRange::new(
+ ret_type.syntax().text_range().start(),
+ ret_type.syntax().text_range().end() + TextSize::from(1u32),
+ );
+ builder.delete(text_range)
+ } else {
+ builder.replace(
+ type_ref.syntax().text_range(),
+ inner_type.strip_suffix('>').unwrap_or(inner_type),
+ )
+ }
+ }
+
+ for ret_expr_arg in exprs_to_unwrap {
+ let ret_expr_str = ret_expr_arg.to_string();
+ if ret_expr_str.starts_with("Ok(") || ret_expr_str.starts_with("Err(") {
+ let arg_list = ret_expr_arg.syntax().children().find_map(ast::ArgList::cast);
+ if let Some(arg_list) = arg_list {
+ if is_unit_type {
+ match ret_expr_arg.syntax().prev_sibling_or_token() {
+ // Useful to delete the entire line without leaving trailing whitespaces
+ Some(whitespace) => {
+ let new_range = TextRange::new(
+ whitespace.text_range().start(),
+ ret_expr_arg.syntax().text_range().end(),
+ );
+ builder.delete(new_range);
+ }
+ None => {
+ builder.delete(ret_expr_arg.syntax().text_range());
+ }
+ }
+ } else {
+ builder.replace(
+ ret_expr_arg.syntax().text_range(),
+ arg_list.args().join(", "),
+ );
+ }
+ }
+ }
+ }
+ },
+ )
+}
+
+fn tail_cb_impl(acc: &mut Vec<ast::Expr>, e: &ast::Expr) {
+ match e {
+ Expr::BreakExpr(break_expr) => {
+ if let Some(break_expr_arg) = break_expr.expr() {
+ for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(acc, e))
+ }
+ }
+ Expr::ReturnExpr(ret_expr) => {
+ if let Some(ret_expr_arg) = &ret_expr.expr() {
+ for_each_tail_expr(ret_expr_arg, &mut |e| tail_cb_impl(acc, e));
+ }
+ }
+ e => acc.push(e.clone()),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn unwrap_result_return_type_simple() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i3$02> {
+ let test = "test";
+ return Ok(42i32);
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let test = "test";
+ return 42i32;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_unit_type() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<(), Box<dyn Error$0>> {
+ Ok(())
+}
+"#,
+ r#"
+fn foo() {
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_ending_with_parent() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32, Box<dyn Error$0>> {
+ if true {
+ Ok(42)
+ } else {
+ foo()
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ if true {
+ 42
+ } else {
+ foo()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_return_type_break_split_tail() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i3$02, String> {
+ loop {
+ break if true {
+ Ok(1)
+ } else {
+ Ok(0)
+ };
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ loop {
+ break if true {
+ 1
+ } else {
+ 0
+ };
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_closure() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() {
+ || -> Result<i32$0> {
+ let test = "test";
+ return Ok(42i32);
+ };
+}
+"#,
+ r#"
+fn foo() {
+ || -> i32 {
+ let test = "test";
+ return 42i32;
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_return_type_bad_cursor() {
+ check_assist_not_applicable(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> i32 {
+ let test = "test";$0
+ return 42i32;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_return_type_bad_cursor_closure() {
+ check_assist_not_applicable(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() {
+ || -> i32 {
+ let test = "test";$0
+ return 42i32;
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_closure_non_block() {
+ check_assist_not_applicable(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() { || -> i$032 3; }
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_return_type_already_not_result_std() {
+ check_assist_not_applicable(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ let test = "test";
+ return 42i32;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_return_type_already_not_result_closure() {
+ check_assist_not_applicable(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() {
+ || -> i32$0 {
+ let test = "test";
+ return 42i32;
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_tail() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() ->$0 Result<i32> {
+ let test = "test";
+ Ok(42i32)
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let test = "test";
+ 42i32
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_tail_closure() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() {
+ || ->$0 Result<i32, String> {
+ let test = "test";
+ Ok(42i32)
+ };
+}
+"#,
+ r#"
+fn foo() {
+ || -> i32 {
+ let test = "test";
+ 42i32
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_tail_only() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32$0> { Ok(42i32) }
+"#,
+ r#"
+fn foo() -> i32 { 42i32 }
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_tail_block_like() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32>$0 {
+ if true {
+ Ok(42i32)
+ } else {
+ Ok(24i32)
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ if true {
+ 42i32
+ } else {
+ 24i32
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_without_block_closure() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() {
+ || -> Result<i32, String>$0 {
+ if true {
+ Ok(42i32)
+ } else {
+ Ok(24i32)
+ }
+ };
+}
+"#,
+ r#"
+fn foo() {
+ || -> i32 {
+ if true {
+ 42i32
+ } else {
+ 24i32
+ }
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_nested_if() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32>$0 {
+ if true {
+ if false {
+ Ok(1)
+ } else {
+ Ok(2)
+ }
+ } else {
+ Ok(24i32)
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ if true {
+ if false {
+ 1
+ } else {
+ 2
+ }
+ } else {
+ 24i32
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_await() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+async fn foo() -> Result<i$032> {
+ if true {
+ if false {
+ Ok(1.await)
+ } else {
+ Ok(2.await)
+ }
+ } else {
+ Ok(24i32.await)
+ }
+}
+"#,
+ r#"
+async fn foo() -> i32 {
+ if true {
+ if false {
+ 1.await
+ } else {
+ 2.await
+ }
+ } else {
+ 24i32.await
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_array() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<[i32; 3]$0> { Ok([1, 2, 3]) }
+"#,
+ r#"
+fn foo() -> [i32; 3] { [1, 2, 3] }
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_cast() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -$0> Result<i32> {
+ if true {
+ if false {
+ Ok(1 as i32)
+ } else {
+ Ok(2 as i32)
+ }
+ } else {
+ Ok(24 as i32)
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ if true {
+ if false {
+ 1 as i32
+ } else {
+ 2 as i32
+ }
+ } else {
+ 24 as i32
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_tail_block_like_match() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32$0> {
+ let my_var = 5;
+ match my_var {
+ 5 => Ok(42i32),
+ _ => Ok(24i32),
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let my_var = 5;
+ match my_var {
+ 5 => 42i32,
+ _ => 24i32,
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_loop_with_tail() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32$0> {
+ let my_var = 5;
+ loop {
+ println!("test");
+ 5
+ }
+ Ok(my_var)
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let my_var = 5;
+ loop {
+ println!("test");
+ 5
+ }
+ my_var
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_loop_in_let_stmt() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32$0> {
+ let my_var = let x = loop {
+ break 1;
+ };
+ Ok(my_var)
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let my_var = let x = loop {
+ break 1;
+ };
+ my_var
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_tail_block_like_match_return_expr() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32>$0 {
+ let my_var = 5;
+ let res = match my_var {
+ 5 => 42i32,
+ _ => return Ok(24i32),
+ };
+ Ok(res)
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let my_var = 5;
+ let res = match my_var {
+ 5 => 42i32,
+ _ => return 24i32,
+ };
+ res
+}
+"#,
+ );
+
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32$0> {
+ let my_var = 5;
+ let res = if my_var == 5 {
+ 42i32
+ } else {
+ return Ok(24i32);
+ };
+ Ok(res)
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let my_var = 5;
+ let res = if my_var == 5 {
+ 42i32
+ } else {
+ return 24i32;
+ };
+ res
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_tail_block_like_match_deeper() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32$0> {
+ let my_var = 5;
+ match my_var {
+ 5 => {
+ if true {
+ Ok(42i32)
+ } else {
+ Ok(25i32)
+ }
+ },
+ _ => {
+ let test = "test";
+ if test == "test" {
+ return Ok(bar());
+ }
+ Ok(53i32)
+ },
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let my_var = 5;
+ match my_var {
+ 5 => {
+ if true {
+ 42i32
+ } else {
+ 25i32
+ }
+ },
+ _ => {
+ let test = "test";
+ if test == "test" {
+ return bar();
+ }
+ 53i32
+ },
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_tail_block_like_early_return() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32$0> {
+ let test = "test";
+ if test == "test" {
+ return Ok(24i32);
+ }
+ Ok(53i32)
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let test = "test";
+ if test == "test" {
+ return 24i32;
+ }
+ 53i32
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_closure() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> Result<u32$0> {
+ let true_closure = || { return true; };
+ if the_field < 5 {
+ let mut i = 0;
+ if true_closure() {
+ return Ok(99);
+ } else {
+ return Ok(0);
+ }
+ }
+ Ok(the_field)
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> u32 {
+ let true_closure = || { return true; };
+ if the_field < 5 {
+ let mut i = 0;
+ if true_closure() {
+ return 99;
+ } else {
+ return 0;
+ }
+ }
+ the_field
+}
+"#,
+ );
+
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> Result<u32$0> {
+ let true_closure = || {
+ return true;
+ };
+ if the_field < 5 {
+ let mut i = 0;
+
+
+ if true_closure() {
+ return Ok(99);
+ } else {
+ return Ok(0);
+ }
+ }
+ let t = None;
+
+ Ok(t.unwrap_or_else(|| the_field))
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> u32 {
+ let true_closure = || {
+ return true;
+ };
+ if the_field < 5 {
+ let mut i = 0;
+
+
+ if true_closure() {
+ return 99;
+ } else {
+ return 0;
+ }
+ }
+ let t = None;
+
+ t.unwrap_or_else(|| the_field)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_result_return_type_simple_with_weird_forms() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32$0> {
+ let test = "test";
+ if test == "test" {
+ return Ok(24i32);
+ }
+ let mut i = 0;
+ loop {
+ if i == 1 {
+ break Ok(55);
+ }
+ i += 1;
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let test = "test";
+ if test == "test" {
+ return 24i32;
+ }
+ let mut i = 0;
+ loop {
+ if i == 1 {
+ break 55;
+ }
+ i += 1;
+ }
+}
+"#,
+ );
+
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> Result<u32$0> {
+ if the_field < 5 {
+ let mut i = 0;
+ loop {
+ if i > 5 {
+ return Ok(55u32);
+ }
+ i += 3;
+ }
+ match i {
+ 5 => return Ok(99),
+ _ => return Ok(0),
+ };
+ }
+ Ok(the_field)
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> u32 {
+ if the_field < 5 {
+ let mut i = 0;
+ loop {
+ if i > 5 {
+ return 55u32;
+ }
+ i += 3;
+ }
+ match i {
+ 5 => return 99,
+ _ => return 0,
+ };
+ }
+ the_field
+}
+"#,
+ );
+
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> Result<u32$0> {
+ if the_field < 5 {
+ let mut i = 0;
+ match i {
+ 5 => return Ok(99),
+ _ => return Ok(0),
+ }
+ }
+ Ok(the_field)
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> u32 {
+ if the_field < 5 {
+ let mut i = 0;
+ match i {
+ 5 => return 99,
+ _ => return 0,
+ }
+ }
+ the_field
+}
+"#,
+ );
+
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> Result<u32$0> {
+ if the_field < 5 {
+ let mut i = 0;
+ if i == 5 {
+ return Ok(99)
+ } else {
+ return Ok(0)
+ }
+ }
+ Ok(the_field)
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> u32 {
+ if the_field < 5 {
+ let mut i = 0;
+ if i == 5 {
+ return 99
+ } else {
+ return 0
+ }
+ }
+ the_field
+}
+"#,
+ );
+
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> Result<u3$02> {
+ if the_field < 5 {
+ let mut i = 0;
+ if i == 5 {
+ return Ok(99);
+ } else {
+ return Ok(0);
+ }
+ }
+ Ok(the_field)
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> u32 {
+ if the_field < 5 {
+ let mut i = 0;
+ if i == 5 {
+ return 99;
+ } else {
+ return 0;
+ }
+ }
+ the_field
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs
new file mode 100644
index 000000000..83446387d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs
@@ -0,0 +1,980 @@
+use std::iter;
+
+use ide_db::{
+ famous_defs::FamousDefs,
+ syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
+};
+use syntax::{
+ ast::{self, make, Expr},
+ match_ast, AstNode,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: wrap_return_type_in_result
+//
+// Wrap the function's return type into Result.
+//
+// ```
+// # //- minicore: result
+// fn foo() -> i32$0 { 42i32 }
+// ```
+// ->
+// ```
+// fn foo() -> Result<i32, ${0:_}> { Ok(42i32) }
+// ```
+pub(crate) fn wrap_return_type_in_result(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let ret_type = ctx.find_node_at_offset::<ast::RetType>()?;
+ let parent = ret_type.syntax().parent()?;
+ let body = match_ast! {
+ match parent {
+ ast::Fn(func) => func.body()?,
+ ast::ClosureExpr(closure) => match closure.body()? {
+ Expr::BlockExpr(block) => block,
+ // closures require a block when a return type is specified
+ _ => return None,
+ },
+ _ => return None,
+ }
+ };
+
+ let type_ref = &ret_type.ty()?;
+ let ty = ctx.sema.resolve_type(type_ref)?.as_adt();
+ let result_enum =
+ FamousDefs(&ctx.sema, ctx.sema.scope(type_ref.syntax())?.krate()).core_result_Result()?;
+
+ if matches!(ty, Some(hir::Adt::Enum(ret_type)) if ret_type == result_enum) {
+ cov_mark::hit!(wrap_return_type_in_result_simple_return_type_already_result);
+ return None;
+ }
+
+ acc.add(
+ AssistId("wrap_return_type_in_result", AssistKind::RefactorRewrite),
+ "Wrap return type in Result",
+ type_ref.syntax().text_range(),
+ |builder| {
+ let body = ast::Expr::BlockExpr(body);
+
+ let mut exprs_to_wrap = Vec::new();
+ let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_wrap, e);
+ walk_expr(&body, &mut |expr| {
+ if let Expr::ReturnExpr(ret_expr) = expr {
+ if let Some(ret_expr_arg) = &ret_expr.expr() {
+ for_each_tail_expr(ret_expr_arg, tail_cb);
+ }
+ }
+ });
+ for_each_tail_expr(&body, tail_cb);
+
+ for ret_expr_arg in exprs_to_wrap {
+ let ok_wrapped = make::expr_call(
+ make::expr_path(make::ext::ident_path("Ok")),
+ make::arg_list(iter::once(ret_expr_arg.clone())),
+ );
+ builder.replace_ast(ret_expr_arg, ok_wrapped);
+ }
+
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let snippet = format!("Result<{}, ${{0:_}}>", type_ref);
+ builder.replace_snippet(cap, type_ref.syntax().text_range(), snippet)
+ }
+ None => builder
+ .replace(type_ref.syntax().text_range(), format!("Result<{}, _>", type_ref)),
+ }
+ },
+ )
+}
+
+fn tail_cb_impl(acc: &mut Vec<ast::Expr>, e: &ast::Expr) {
+ match e {
+ Expr::BreakExpr(break_expr) => {
+ if let Some(break_expr_arg) = break_expr.expr() {
+ for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(acc, e))
+ }
+ }
+ Expr::ReturnExpr(ret_expr) => {
+ if let Some(ret_expr_arg) = &ret_expr.expr() {
+ for_each_tail_expr(ret_expr_arg, &mut |e| tail_cb_impl(acc, e));
+ }
+ }
+ e => acc.push(e.clone()),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn wrap_return_type_in_result_simple() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i3$02 {
+ let test = "test";
+ return 42i32;
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let test = "test";
+ return Ok(42i32);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_break_split_tail() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i3$02 {
+ loop {
+ break if true {
+ 1
+ } else {
+ 0
+ };
+ }
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ loop {
+ break if true {
+ Ok(1)
+ } else {
+ Ok(0)
+ };
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_closure() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() {
+ || -> i32$0 {
+ let test = "test";
+ return 42i32;
+ };
+}
+"#,
+ r#"
+fn foo() {
+ || -> Result<i32, ${0:_}> {
+ let test = "test";
+ return Ok(42i32);
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_return_type_bad_cursor() {
+ check_assist_not_applicable(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32 {
+ let test = "test";$0
+ return 42i32;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_return_type_bad_cursor_closure() {
+ check_assist_not_applicable(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() {
+ || -> i32 {
+ let test = "test";$0
+ return 42i32;
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_closure_non_block() {
+ check_assist_not_applicable(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() { || -> i$032 3; }
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_return_type_already_result_std() {
+ check_assist_not_applicable(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> core::result::Result<i32$0, String> {
+ let test = "test";
+ return 42i32;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_return_type_already_result() {
+ cov_mark::check!(wrap_return_type_in_result_simple_return_type_already_result);
+ check_assist_not_applicable(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> Result<i32$0, String> {
+ let test = "test";
+ return 42i32;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_return_type_already_result_closure() {
+ check_assist_not_applicable(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() {
+ || -> Result<i32$0, String> {
+ let test = "test";
+ return 42i32;
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_cursor() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> $0i32 {
+ let test = "test";
+ return 42i32;
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let test = "test";
+ return Ok(42i32);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_tail() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() ->$0 i32 {
+ let test = "test";
+ 42i32
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let test = "test";
+ Ok(42i32)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_tail_closure() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() {
+ || ->$0 i32 {
+ let test = "test";
+ 42i32
+ };
+}
+"#,
+ r#"
+fn foo() {
+ || -> Result<i32, ${0:_}> {
+ let test = "test";
+ Ok(42i32)
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_tail_only() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 { 42i32 }
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> { Ok(42i32) }
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_tail_block_like() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ if true {
+ 42i32
+ } else {
+ 24i32
+ }
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ if true {
+ Ok(42i32)
+ } else {
+ Ok(24i32)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_without_block_closure() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() {
+ || -> i32$0 {
+ if true {
+ 42i32
+ } else {
+ 24i32
+ }
+ };
+}
+"#,
+ r#"
+fn foo() {
+ || -> Result<i32, ${0:_}> {
+ if true {
+ Ok(42i32)
+ } else {
+ Ok(24i32)
+ }
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_nested_if() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ if true {
+ if false {
+ 1
+ } else {
+ 2
+ }
+ } else {
+ 24i32
+ }
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ if true {
+ if false {
+ Ok(1)
+ } else {
+ Ok(2)
+ }
+ } else {
+ Ok(24i32)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_await() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+async fn foo() -> i$032 {
+ if true {
+ if false {
+ 1.await
+ } else {
+ 2.await
+ }
+ } else {
+ 24i32.await
+ }
+}
+"#,
+ r#"
+async fn foo() -> Result<i32, ${0:_}> {
+ if true {
+ if false {
+ Ok(1.await)
+ } else {
+ Ok(2.await)
+ }
+ } else {
+ Ok(24i32.await)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_array() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> [i32;$0 3] { [1, 2, 3] }
+"#,
+ r#"
+fn foo() -> Result<[i32; 3], ${0:_}> { Ok([1, 2, 3]) }
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_cast() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -$0> i32 {
+ if true {
+ if false {
+ 1 as i32
+ } else {
+ 2 as i32
+ }
+ } else {
+ 24 as i32
+ }
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ if true {
+ if false {
+ Ok(1 as i32)
+ } else {
+ Ok(2 as i32)
+ }
+ } else {
+ Ok(24 as i32)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_tail_block_like_match() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ let my_var = 5;
+ match my_var {
+ 5 => 42i32,
+ _ => 24i32,
+ }
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let my_var = 5;
+ match my_var {
+ 5 => Ok(42i32),
+ _ => Ok(24i32),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_loop_with_tail() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ let my_var = 5;
+ loop {
+ println!("test");
+ 5
+ }
+ my_var
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let my_var = 5;
+ loop {
+ println!("test");
+ 5
+ }
+ Ok(my_var)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_loop_in_let_stmt() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ let my_var = let x = loop {
+ break 1;
+ };
+ my_var
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let my_var = let x = loop {
+ break 1;
+ };
+ Ok(my_var)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_tail_block_like_match_return_expr() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ let my_var = 5;
+ let res = match my_var {
+ 5 => 42i32,
+ _ => return 24i32,
+ };
+ res
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let my_var = 5;
+ let res = match my_var {
+ 5 => 42i32,
+ _ => return Ok(24i32),
+ };
+ Ok(res)
+}
+"#,
+ );
+
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ let my_var = 5;
+ let res = if my_var == 5 {
+ 42i32
+ } else {
+ return 24i32;
+ };
+ res
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let my_var = 5;
+ let res = if my_var == 5 {
+ 42i32
+ } else {
+ return Ok(24i32);
+ };
+ Ok(res)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_tail_block_like_match_deeper() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ let my_var = 5;
+ match my_var {
+ 5 => {
+ if true {
+ 42i32
+ } else {
+ 25i32
+ }
+ },
+ _ => {
+ let test = "test";
+ if test == "test" {
+ return bar();
+ }
+ 53i32
+ },
+ }
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let my_var = 5;
+ match my_var {
+ 5 => {
+ if true {
+ Ok(42i32)
+ } else {
+ Ok(25i32)
+ }
+ },
+ _ => {
+ let test = "test";
+ if test == "test" {
+ return Ok(bar());
+ }
+ Ok(53i32)
+ },
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_tail_block_like_early_return() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i$032 {
+ let test = "test";
+ if test == "test" {
+ return 24i32;
+ }
+ 53i32
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let test = "test";
+ if test == "test" {
+ return Ok(24i32);
+ }
+ Ok(53i32)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_closure() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) ->$0 u32 {
+ let true_closure = || { return true; };
+ if the_field < 5 {
+ let mut i = 0;
+ if true_closure() {
+ return 99;
+ } else {
+ return 0;
+ }
+ }
+ the_field
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> Result<u32, ${0:_}> {
+ let true_closure = || { return true; };
+ if the_field < 5 {
+ let mut i = 0;
+ if true_closure() {
+ return Ok(99);
+ } else {
+ return Ok(0);
+ }
+ }
+ Ok(the_field)
+}
+"#,
+ );
+
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> u32$0 {
+ let true_closure = || {
+ return true;
+ };
+ if the_field < 5 {
+ let mut i = 0;
+
+
+ if true_closure() {
+ return 99;
+ } else {
+ return 0;
+ }
+ }
+ let t = None;
+
+ t.unwrap_or_else(|| the_field)
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> Result<u32, ${0:_}> {
+ let true_closure = || {
+ return true;
+ };
+ if the_field < 5 {
+ let mut i = 0;
+
+
+ if true_closure() {
+ return Ok(99);
+ } else {
+ return Ok(0);
+ }
+ }
+ let t = None;
+
+ Ok(t.unwrap_or_else(|| the_field))
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn wrap_return_type_in_result_simple_with_weird_forms() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ let test = "test";
+ if test == "test" {
+ return 24i32;
+ }
+ let mut i = 0;
+ loop {
+ if i == 1 {
+ break 55;
+ }
+ i += 1;
+ }
+}
+"#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ let test = "test";
+ if test == "test" {
+ return Ok(24i32);
+ }
+ let mut i = 0;
+ loop {
+ if i == 1 {
+ break Ok(55);
+ }
+ i += 1;
+ }
+}
+"#,
+ );
+
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> u32$0 {
+ if the_field < 5 {
+ let mut i = 0;
+ loop {
+ if i > 5 {
+ return 55u32;
+ }
+ i += 3;
+ }
+ match i {
+ 5 => return 99,
+ _ => return 0,
+ };
+ }
+ the_field
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> Result<u32, ${0:_}> {
+ if the_field < 5 {
+ let mut i = 0;
+ loop {
+ if i > 5 {
+ return Ok(55u32);
+ }
+ i += 3;
+ }
+ match i {
+ 5 => return Ok(99),
+ _ => return Ok(0),
+ };
+ }
+ Ok(the_field)
+}
+"#,
+ );
+
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> u3$02 {
+ if the_field < 5 {
+ let mut i = 0;
+ match i {
+ 5 => return 99,
+ _ => return 0,
+ }
+ }
+ the_field
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> Result<u32, ${0:_}> {
+ if the_field < 5 {
+ let mut i = 0;
+ match i {
+ 5 => return Ok(99),
+ _ => return Ok(0),
+ }
+ }
+ Ok(the_field)
+}
+"#,
+ );
+
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> u32$0 {
+ if the_field < 5 {
+ let mut i = 0;
+ if i == 5 {
+ return 99
+ } else {
+ return 0
+ }
+ }
+ the_field
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> Result<u32, ${0:_}> {
+ if the_field < 5 {
+ let mut i = 0;
+ if i == 5 {
+ return Ok(99)
+ } else {
+ return Ok(0)
+ }
+ }
+ Ok(the_field)
+}
+"#,
+ );
+
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo(the_field: u32) -> $0u32 {
+ if the_field < 5 {
+ let mut i = 0;
+ if i == 5 {
+ return 99;
+ } else {
+ return 0;
+ }
+ }
+ the_field
+}
+"#,
+ r#"
+fn foo(the_field: u32) -> Result<u32, ${0:_}> {
+ if the_field < 5 {
+ let mut i = 0;
+ if i == 5 {
+ return Ok(99);
+ } else {
+ return Ok(0);
+ }
+ }
+ Ok(the_field)
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
new file mode 100644
index 000000000..fe87aa15f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
@@ -0,0 +1,309 @@
+//! `assists` crate provides a bunch of code assists, also known as code actions
+//! (in LSP) or intentions (in IntelliJ).
+//!
+//! An assist is a micro-refactoring, which is automatically activated in
+//! certain context. For example, if the cursor is over `,`, a "swap `,`" assist
+//! becomes available.
+//!
+//! ## Assists Guidelines
+//!
+//! Assists are the main mechanism to deliver advanced IDE features to the user,
+//! so we should pay extra attention to the UX.
+//!
+//! The power of assists comes from their context-awareness. The main problem
+//! with IDE features is that there are a lot of them, and it's hard to teach
+//! the user what's available. Assists solve this problem nicely: 💡 signifies
+//! that *something* is possible, and clicking on it reveals a *short* list of
+//! actions. Contrast it with Emacs `M-x`, which just spits an infinite list of
+//! all the features.
+//!
+//! Here are some considerations when creating a new assist:
+//!
+//! * It's good to preserve semantics, and it's good to keep the code compiling,
+//! but it isn't necessary. Example: "flip binary operation" might change
+//! semantics.
+//! * Assist shouldn't necessary make the code "better". A lot of assist come in
+//! pairs: "if let <-> match".
+//! * Assists should have as narrow scope as possible. Each new assists greatly
+//! improves UX for cases where the user actually invokes it, but it makes UX
+//! worse for every case where the user clicks 💡 to invoke some *other*
+//! assist. So, a rarely useful assist which is always applicable can be a net
+//! negative.
+//! * Rarely useful actions are tricky. Sometimes there are features which are
+//! clearly useful to some users, but are just noise most of the time. We
+//! don't have a good solution here, our current approach is to make this
+//! functionality available only if assist is applicable to the whole
+//! selection. Example: `sort_items` sorts items alphabetically. Naively, it
+//! should be available more or less everywhere, which isn't useful. So
+//! instead we only show it if the user *selects* the items they want to sort.
+//! * Consider grouping related assists together (see [`Assists::add_group`]).
+//! * Make assists robust. If the assist depends on results of type-inference too
+//! much, it might only fire in fully-correct code. This makes assist less
+//! useful and (worse) less predictable. The user should have a clear
+//! intuition when each particular assist is available.
+//! * Make small assists, which compose. Example: rather than auto-importing
+//! enums in `add_missing_match_arms`, we use fully-qualified names. There's a
+//! separate assist to shorten a fully-qualified name.
+//! * Distinguish between assists and fixits for diagnostics. Internally, fixits
+//! and assists are equivalent. They have the same "show a list + invoke a
+//! single element" workflow, and both use [`Assist`] data structure. The main
+//! difference is in the UX: while 💡 looks only at the cursor position,
+//! diagnostics squigglies and fixits are calculated for the whole file and
+//! are presented to the user eagerly. So, diagnostics should be fixable
+//! errors, while assists can be just suggestions for an alternative way to do
+//! something. If something *could* be a diagnostic, it should be a
+//! diagnostic. Conversely, it might be valuable to turn a diagnostic with a
+//! lot of false errors into an assist.
+//!
+//! See also this post:
+//! <https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html>
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+#[allow(unused)]
+macro_rules! eprintln {
+ ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
+}
+
+mod assist_config;
+mod assist_context;
+#[cfg(test)]
+mod tests;
+pub mod utils;
+
+use hir::Semantics;
+use ide_db::{base_db::FileRange, RootDatabase};
+use syntax::TextRange;
+
+pub(crate) use crate::assist_context::{AssistContext, Assists};
+
+pub use assist_config::AssistConfig;
+pub use ide_db::assists::{
+ Assist, AssistId, AssistKind, AssistResolveStrategy, GroupLabel, SingleResolve,
+};
+
+/// Return all the assists applicable at the given position.
+///
+// NOTE: We don't have a `Feature: ` section for assists, they are special-cased
+// in the manual.
+pub fn assists(
+ db: &RootDatabase,
+ config: &AssistConfig,
+ resolve: AssistResolveStrategy,
+ range: FileRange,
+) -> Vec<Assist> {
+ let sema = Semantics::new(db);
+ let ctx = AssistContext::new(sema, config, range);
+ let mut acc = Assists::new(&ctx, resolve);
+ handlers::all().iter().for_each(|handler| {
+ handler(&mut acc, &ctx);
+ });
+ acc.finish()
+}
+
+mod handlers {
+ use crate::{AssistContext, Assists};
+
+ pub(crate) type Handler = fn(&mut Assists, &AssistContext<'_>) -> Option<()>;
+
+ mod add_explicit_type;
+ mod add_label_to_loop;
+ mod add_lifetime_to_type;
+ mod add_missing_impl_members;
+ mod add_turbo_fish;
+ mod apply_demorgan;
+ mod auto_import;
+ mod change_visibility;
+ mod convert_bool_then;
+ mod convert_comment_block;
+ mod convert_integer_literal;
+ mod convert_into_to_from;
+ mod convert_iter_for_each_to_for;
+ mod convert_let_else_to_match;
+ mod convert_tuple_struct_to_named_struct;
+ mod convert_to_guarded_return;
+ mod convert_while_to_loop;
+ mod destructure_tuple_binding;
+ mod expand_glob_import;
+ mod extract_function;
+ mod extract_module;
+ mod extract_struct_from_enum_variant;
+ mod extract_type_alias;
+ mod extract_variable;
+ mod add_missing_match_arms;
+ mod fix_visibility;
+ mod flip_binexpr;
+ mod flip_comma;
+ mod flip_trait_bound;
+ mod generate_constant;
+ mod generate_default_from_enum_variant;
+ mod generate_default_from_new;
+ mod generate_deref;
+ mod generate_derive;
+ mod generate_documentation_template;
+ mod generate_enum_is_method;
+ mod generate_enum_projection_method;
+ mod generate_enum_variant;
+ mod generate_from_impl_for_enum;
+ mod generate_function;
+ mod generate_getter;
+ mod generate_impl;
+ mod generate_is_empty_from_len;
+ mod generate_new;
+ mod generate_setter;
+ mod generate_delegate_methods;
+ mod add_return_type;
+ mod inline_call;
+ mod inline_local_variable;
+ mod inline_type_alias;
+ mod introduce_named_lifetime;
+ mod invert_if;
+ mod merge_imports;
+ mod merge_match_arms;
+ mod move_bounds;
+ mod move_guard;
+ mod move_module_to_file;
+ mod move_to_mod_rs;
+ mod move_from_mod_rs;
+ mod number_representation;
+ mod promote_local_to_const;
+ mod pull_assignment_up;
+ mod qualify_path;
+ mod qualify_method_call;
+ mod raw_string;
+ mod remove_dbg;
+ mod remove_mut;
+ mod remove_unused_param;
+ mod reorder_fields;
+ mod reorder_impl_items;
+ mod replace_try_expr_with_match;
+ mod replace_derive_with_manual_impl;
+ mod replace_if_let_with_match;
+ mod introduce_named_generic;
+ mod replace_let_with_if_let;
+ mod replace_qualified_name_with_use;
+ mod replace_string_with_char;
+ mod replace_turbofish_with_explicit_type;
+ mod split_import;
+ mod sort_items;
+ mod toggle_ignore;
+ mod unmerge_use;
+ mod unnecessary_async;
+ mod unwrap_block;
+ mod unwrap_result_return_type;
+ mod wrap_return_type_in_result;
+
+ pub(crate) fn all() -> &'static [Handler] {
+ &[
+ // These are alphabetic for the foolish consistency
+ add_explicit_type::add_explicit_type,
+ add_label_to_loop::add_label_to_loop,
+ add_missing_match_arms::add_missing_match_arms,
+ add_lifetime_to_type::add_lifetime_to_type,
+ add_return_type::add_return_type,
+ add_turbo_fish::add_turbo_fish,
+ apply_demorgan::apply_demorgan,
+ auto_import::auto_import,
+ change_visibility::change_visibility,
+ convert_bool_then::convert_bool_then_to_if,
+ convert_bool_then::convert_if_to_bool_then,
+ convert_comment_block::convert_comment_block,
+ convert_integer_literal::convert_integer_literal,
+ convert_into_to_from::convert_into_to_from,
+ convert_iter_for_each_to_for::convert_iter_for_each_to_for,
+ convert_iter_for_each_to_for::convert_for_loop_with_for_each,
+ convert_let_else_to_match::convert_let_else_to_match,
+ convert_to_guarded_return::convert_to_guarded_return,
+ convert_tuple_struct_to_named_struct::convert_tuple_struct_to_named_struct,
+ convert_while_to_loop::convert_while_to_loop,
+ destructure_tuple_binding::destructure_tuple_binding,
+ expand_glob_import::expand_glob_import,
+ extract_struct_from_enum_variant::extract_struct_from_enum_variant,
+ extract_type_alias::extract_type_alias,
+ fix_visibility::fix_visibility,
+ flip_binexpr::flip_binexpr,
+ flip_comma::flip_comma,
+ flip_trait_bound::flip_trait_bound,
+ generate_constant::generate_constant,
+ generate_default_from_enum_variant::generate_default_from_enum_variant,
+ generate_default_from_new::generate_default_from_new,
+ generate_derive::generate_derive,
+ generate_documentation_template::generate_documentation_template,
+ generate_documentation_template::generate_doc_example,
+ generate_enum_is_method::generate_enum_is_method,
+ generate_enum_projection_method::generate_enum_as_method,
+ generate_enum_projection_method::generate_enum_try_into_method,
+ generate_enum_variant::generate_enum_variant,
+ generate_from_impl_for_enum::generate_from_impl_for_enum,
+ generate_function::generate_function,
+ generate_impl::generate_impl,
+ generate_is_empty_from_len::generate_is_empty_from_len,
+ generate_new::generate_new,
+ inline_call::inline_call,
+ inline_call::inline_into_callers,
+ inline_local_variable::inline_local_variable,
+ inline_type_alias::inline_type_alias,
+ introduce_named_generic::introduce_named_generic,
+ introduce_named_lifetime::introduce_named_lifetime,
+ invert_if::invert_if,
+ merge_imports::merge_imports,
+ merge_match_arms::merge_match_arms,
+ move_bounds::move_bounds_to_where_clause,
+ move_guard::move_arm_cond_to_match_guard,
+ move_guard::move_guard_to_arm_body,
+ move_module_to_file::move_module_to_file,
+ move_to_mod_rs::move_to_mod_rs,
+ move_from_mod_rs::move_from_mod_rs,
+ number_representation::reformat_number_literal,
+ pull_assignment_up::pull_assignment_up,
+ promote_local_to_const::promote_local_to_const,
+ qualify_path::qualify_path,
+ qualify_method_call::qualify_method_call,
+ raw_string::add_hash,
+ raw_string::make_usual_string,
+ raw_string::remove_hash,
+ remove_dbg::remove_dbg,
+ remove_mut::remove_mut,
+ remove_unused_param::remove_unused_param,
+ reorder_fields::reorder_fields,
+ reorder_impl_items::reorder_impl_items,
+ replace_try_expr_with_match::replace_try_expr_with_match,
+ replace_derive_with_manual_impl::replace_derive_with_manual_impl,
+ replace_if_let_with_match::replace_if_let_with_match,
+ replace_if_let_with_match::replace_match_with_if_let,
+ replace_let_with_if_let::replace_let_with_if_let,
+ replace_turbofish_with_explicit_type::replace_turbofish_with_explicit_type,
+ replace_qualified_name_with_use::replace_qualified_name_with_use,
+ sort_items::sort_items,
+ split_import::split_import,
+ toggle_ignore::toggle_ignore,
+ unmerge_use::unmerge_use,
+ unnecessary_async::unnecessary_async,
+ unwrap_block::unwrap_block,
+ unwrap_result_return_type::unwrap_result_return_type,
+ wrap_return_type_in_result::wrap_return_type_in_result,
+ // These are manually sorted for better priorities. By default,
+ // priority is determined by the size of the target range (smaller
+ // target wins). If the ranges are equal, position in this list is
+ // used as a tie-breaker.
+ add_missing_impl_members::add_missing_impl_members,
+ add_missing_impl_members::add_missing_default_members,
+ //
+ replace_string_with_char::replace_string_with_char,
+ replace_string_with_char::replace_char_with_string,
+ raw_string::make_raw_string,
+ //
+ extract_variable::extract_variable,
+ extract_function::extract_function,
+ extract_module::extract_module,
+ //
+ generate_getter::generate_getter,
+ generate_getter::generate_getter_mut,
+ generate_setter::generate_setter,
+ generate_delegate_methods::generate_delegate_methods,
+ generate_deref::generate_deref,
+ // Are you sure you want to add new assist here, and not to the
+ // sorted list above?
+ ]
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
new file mode 100644
index 000000000..9cd66c6b3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
@@ -0,0 +1,558 @@
+mod generated;
+#[cfg(not(feature = "in-rust-tree"))]
+mod sourcegen;
+
+use expect_test::expect;
+use hir::{db::DefDatabase, Semantics};
+use ide_db::{
+ base_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt},
+ imports::insert_use::{ImportGranularity, InsertUseConfig},
+ source_change::FileSystemEdit,
+ RootDatabase, SnippetCap,
+};
+use stdx::{format_to, trim_indent};
+use syntax::TextRange;
+use test_utils::{assert_eq_text, extract_offset};
+
+use crate::{
+ assists, handlers::Handler, Assist, AssistConfig, AssistContext, AssistKind,
+ AssistResolveStrategy, Assists, SingleResolve,
+};
+
+pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig {
+ snippet_cap: SnippetCap::new(true),
+ allowed: None,
+ insert_use: InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ prefix_kind: hir::PrefixKind::Plain,
+ enforce_granularity: true,
+ group: true,
+ skip_glob_imports: true,
+ },
+};
+
+pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) {
+ RootDatabase::with_single_file(text)
+}
+
+#[track_caller]
+pub(crate) fn check_assist(assist: Handler, ra_fixture_before: &str, ra_fixture_after: &str) {
+ let ra_fixture_after = trim_indent(ra_fixture_after);
+ check(assist, ra_fixture_before, ExpectedResult::After(&ra_fixture_after), None);
+}
+
+// There is no way to choose what assist within a group you want to test against,
+// so this is here to allow you choose.
+pub(crate) fn check_assist_by_label(
+ assist: Handler,
+ ra_fixture_before: &str,
+ ra_fixture_after: &str,
+ label: &str,
+) {
+ let ra_fixture_after = trim_indent(ra_fixture_after);
+ check(assist, ra_fixture_before, ExpectedResult::After(&ra_fixture_after), Some(label));
+}
+
+// FIXME: instead of having a separate function here, maybe use
+// `extract_ranges` and mark the target as `<target> </target>` in the
+// fixture?
+#[track_caller]
+pub(crate) fn check_assist_target(assist: Handler, ra_fixture: &str, target: &str) {
+ check(assist, ra_fixture, ExpectedResult::Target(target), None);
+}
+
+#[track_caller]
+pub(crate) fn check_assist_not_applicable(assist: Handler, ra_fixture: &str) {
+ check(assist, ra_fixture, ExpectedResult::NotApplicable, None);
+}
+
+/// Check assist in unresolved state. Useful to check assists for lazy computation.
+#[track_caller]
+pub(crate) fn check_assist_unresolved(assist: Handler, ra_fixture: &str) {
+ check(assist, ra_fixture, ExpectedResult::Unresolved, None);
+}
+
+#[track_caller]
+fn check_doc_test(assist_id: &str, before: &str, after: &str) {
+ let after = trim_indent(after);
+ let (db, file_id, selection) = RootDatabase::with_range_or_offset(before);
+ let before = db.file_text(file_id).to_string();
+ let frange = FileRange { file_id, range: selection.into() };
+
+ let assist = assists(&db, &TEST_CONFIG, AssistResolveStrategy::All, frange)
+ .into_iter()
+ .find(|assist| assist.id.0 == assist_id)
+ .unwrap_or_else(|| {
+ panic!(
+ "\n\nAssist is not applicable: {}\nAvailable assists: {}",
+ assist_id,
+ assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange)
+ .into_iter()
+ .map(|assist| assist.id.0)
+ .collect::<Vec<_>>()
+ .join(", ")
+ )
+ });
+
+ let actual = {
+ let source_change =
+ assist.source_change.expect("Assist did not contain any source changes");
+ let mut actual = before;
+ if let Some(source_file_edit) = source_change.get_source_edit(file_id) {
+ source_file_edit.apply(&mut actual);
+ }
+ actual
+ };
+ assert_eq_text!(&after, &actual);
+}
+
+enum ExpectedResult<'a> {
+ NotApplicable,
+ Unresolved,
+ After(&'a str),
+ Target(&'a str),
+}
+
+#[track_caller]
+fn check(handler: Handler, before: &str, expected: ExpectedResult<'_>, assist_label: Option<&str>) {
+ let (mut db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before);
+ db.set_enable_proc_attr_macros(true);
+ let text_without_caret = db.file_text(file_with_caret_id).to_string();
+
+ let frange = FileRange { file_id: file_with_caret_id, range: range_or_offset.into() };
+
+ let sema = Semantics::new(&db);
+ let config = TEST_CONFIG;
+ let ctx = AssistContext::new(sema, &config, frange);
+ let resolve = match expected {
+ ExpectedResult::Unresolved => AssistResolveStrategy::None,
+ _ => AssistResolveStrategy::All,
+ };
+ let mut acc = Assists::new(&ctx, resolve);
+ handler(&mut acc, &ctx);
+ let mut res = acc.finish();
+
+ let assist = match assist_label {
+ Some(label) => res.into_iter().find(|resolved| resolved.label == label),
+ None => res.pop(),
+ };
+
+ match (assist, expected) {
+ (Some(assist), ExpectedResult::After(after)) => {
+ let source_change =
+ assist.source_change.expect("Assist did not contain any source changes");
+ let skip_header = source_change.source_file_edits.len() == 1
+ && source_change.file_system_edits.len() == 0;
+
+ let mut buf = String::new();
+ for (file_id, edit) in source_change.source_file_edits {
+ let mut text = db.file_text(file_id).as_ref().to_owned();
+ edit.apply(&mut text);
+ if !skip_header {
+ let sr = db.file_source_root(file_id);
+ let sr = db.source_root(sr);
+ let path = sr.path_for_file(&file_id).unwrap();
+ format_to!(buf, "//- {}\n", path)
+ }
+ buf.push_str(&text);
+ }
+
+ for file_system_edit in source_change.file_system_edits {
+ let (dst, contents) = match file_system_edit {
+ FileSystemEdit::CreateFile { dst, initial_contents } => (dst, initial_contents),
+ FileSystemEdit::MoveFile { src, dst } => {
+ (dst, db.file_text(src).as_ref().to_owned())
+ }
+ FileSystemEdit::MoveDir { src, src_id, dst } => {
+ // temporary placeholder for MoveDir since we are not using MoveDir in ide assists yet.
+ (dst, format!("{:?}\n{:?}", src_id, src))
+ }
+ };
+ let sr = db.file_source_root(dst.anchor);
+ let sr = db.source_root(sr);
+ let mut base = sr.path_for_file(&dst.anchor).unwrap().clone();
+ base.pop();
+ let created_file_path = base.join(&dst.path).unwrap();
+ format_to!(buf, "//- {}\n", created_file_path);
+ buf.push_str(&contents);
+ }
+
+ assert_eq_text!(after, &buf);
+ }
+ (Some(assist), ExpectedResult::Target(target)) => {
+ let range = assist.target;
+ assert_eq_text!(&text_without_caret[range], target);
+ }
+ (Some(assist), ExpectedResult::Unresolved) => assert!(
+ assist.source_change.is_none(),
+ "unresolved assist should not contain source changes"
+ ),
+ (Some(_), ExpectedResult::NotApplicable) => panic!("assist should not be applicable!"),
+ (
+ None,
+ ExpectedResult::After(_) | ExpectedResult::Target(_) | ExpectedResult::Unresolved,
+ ) => {
+ panic!("code action is not applicable")
+ }
+ (None, ExpectedResult::NotApplicable) => (),
+ };
+}
+
+fn labels(assists: &[Assist]) -> String {
+ let mut labels = assists
+ .iter()
+ .map(|assist| {
+ let mut label = match &assist.group {
+ Some(g) => g.0.clone(),
+ None => assist.label.to_string(),
+ };
+ label.push('\n');
+ label
+ })
+ .collect::<Vec<_>>();
+ labels.dedup();
+ labels.into_iter().collect::<String>()
+}
+
+#[test]
+fn assist_order_field_struct() {
+ let before = "struct Foo { $0bar: u32 }";
+ let (before_cursor_pos, before) = extract_offset(before);
+ let (db, file_id) = with_single_file(&before);
+ let frange = FileRange { file_id, range: TextRange::empty(before_cursor_pos) };
+ let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange);
+ let mut assists = assists.iter();
+
+ assert_eq!(assists.next().expect("expected assist").label, "Change visibility to pub(crate)");
+ assert_eq!(assists.next().expect("expected assist").label, "Generate a getter method");
+ assert_eq!(assists.next().expect("expected assist").label, "Generate a mut getter method");
+ assert_eq!(assists.next().expect("expected assist").label, "Generate a setter method");
+ assert_eq!(assists.next().expect("expected assist").label, "Add `#[derive]`");
+}
+
+#[test]
+fn assist_order_if_expr() {
+ let (db, frange) = RootDatabase::with_range(
+ r#"
+pub fn test_some_range(a: int) -> bool {
+ if let 2..6 = $05$0 {
+ true
+ } else {
+ false
+ }
+}
+"#,
+ );
+
+ let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange);
+ let expected = labels(&assists);
+
+ expect![[r#"
+ Convert integer base
+ Extract into variable
+ Extract into function
+ Replace if let with match
+ "#]]
+ .assert_eq(&expected);
+}
+
+#[test]
+fn assist_filter_works() {
+ let (db, frange) = RootDatabase::with_range(
+ r#"
+pub fn test_some_range(a: int) -> bool {
+ if let 2..6 = $05$0 {
+ true
+ } else {
+ false
+ }
+}
+"#,
+ );
+ {
+ let mut cfg = TEST_CONFIG;
+ cfg.allowed = Some(vec![AssistKind::Refactor]);
+
+ let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange);
+ let expected = labels(&assists);
+
+ expect![[r#"
+ Convert integer base
+ Extract into variable
+ Extract into function
+ Replace if let with match
+ "#]]
+ .assert_eq(&expected);
+ }
+
+ {
+ let mut cfg = TEST_CONFIG;
+ cfg.allowed = Some(vec![AssistKind::RefactorExtract]);
+ let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange);
+ let expected = labels(&assists);
+
+ expect![[r#"
+ Extract into variable
+ Extract into function
+ "#]]
+ .assert_eq(&expected);
+ }
+
+ {
+ let mut cfg = TEST_CONFIG;
+ cfg.allowed = Some(vec![AssistKind::QuickFix]);
+ let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange);
+ let expected = labels(&assists);
+
+ expect![[r#""#]].assert_eq(&expected);
+ }
+}
+
+#[test]
+fn various_resolve_strategies() {
+ let (db, frange) = RootDatabase::with_range(
+ r#"
+pub fn test_some_range(a: int) -> bool {
+ if let 2..6 = $05$0 {
+ true
+ } else {
+ false
+ }
+}
+"#,
+ );
+
+ let mut cfg = TEST_CONFIG;
+ cfg.allowed = Some(vec![AssistKind::RefactorExtract]);
+
+ {
+ let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange);
+ assert_eq!(2, assists.len());
+ let mut assists = assists.into_iter();
+
+ let extract_into_variable_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "extract_variable",
+ RefactorExtract,
+ ),
+ label: "Extract into variable",
+ group: None,
+ target: 59..60,
+ source_change: None,
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&extract_into_variable_assist);
+
+ let extract_into_function_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "extract_function",
+ RefactorExtract,
+ ),
+ label: "Extract into function",
+ group: None,
+ target: 59..60,
+ source_change: None,
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&extract_into_function_assist);
+ }
+
+ {
+ let assists = assists(
+ &db,
+ &cfg,
+ AssistResolveStrategy::Single(SingleResolve {
+ assist_id: "SOMETHING_MISMATCHING".to_string(),
+ assist_kind: AssistKind::RefactorExtract,
+ }),
+ frange,
+ );
+ assert_eq!(2, assists.len());
+ let mut assists = assists.into_iter();
+
+ let extract_into_variable_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "extract_variable",
+ RefactorExtract,
+ ),
+ label: "Extract into variable",
+ group: None,
+ target: 59..60,
+ source_change: None,
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&extract_into_variable_assist);
+
+ let extract_into_function_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "extract_function",
+ RefactorExtract,
+ ),
+ label: "Extract into function",
+ group: None,
+ target: 59..60,
+ source_change: None,
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&extract_into_function_assist);
+ }
+
+ {
+ let assists = assists(
+ &db,
+ &cfg,
+ AssistResolveStrategy::Single(SingleResolve {
+ assist_id: "extract_variable".to_string(),
+ assist_kind: AssistKind::RefactorExtract,
+ }),
+ frange,
+ );
+ assert_eq!(2, assists.len());
+ let mut assists = assists.into_iter();
+
+ let extract_into_variable_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "extract_variable",
+ RefactorExtract,
+ ),
+ label: "Extract into variable",
+ group: None,
+ target: 59..60,
+ source_change: Some(
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 0,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "let $0var_name = 5;\n ",
+ delete: 45..45,
+ },
+ Indel {
+ insert: "var_name",
+ delete: 59..60,
+ },
+ ],
+ },
+ },
+ file_system_edits: [],
+ is_snippet: true,
+ },
+ ),
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&extract_into_variable_assist);
+
+ let extract_into_function_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "extract_function",
+ RefactorExtract,
+ ),
+ label: "Extract into function",
+ group: None,
+ target: 59..60,
+ source_change: None,
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&extract_into_function_assist);
+ }
+
+ {
+ let assists = assists(&db, &cfg, AssistResolveStrategy::All, frange);
+ assert_eq!(2, assists.len());
+ let mut assists = assists.into_iter();
+
+ let extract_into_variable_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "extract_variable",
+ RefactorExtract,
+ ),
+ label: "Extract into variable",
+ group: None,
+ target: 59..60,
+ source_change: Some(
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 0,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "let $0var_name = 5;\n ",
+ delete: 45..45,
+ },
+ Indel {
+ insert: "var_name",
+ delete: 59..60,
+ },
+ ],
+ },
+ },
+ file_system_edits: [],
+ is_snippet: true,
+ },
+ ),
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&extract_into_variable_assist);
+
+ let extract_into_function_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "extract_function",
+ RefactorExtract,
+ ),
+ label: "Extract into function",
+ group: None,
+ target: 59..60,
+ source_change: Some(
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 0,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "fun_name()",
+ delete: 59..60,
+ },
+ Indel {
+ insert: "\n\nfn $0fun_name() -> i32 {\n 5\n}",
+ delete: 110..110,
+ },
+ ],
+ },
+ },
+ file_system_edits: [],
+ is_snippet: true,
+ },
+ ),
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&extract_into_function_assist);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
new file mode 100644
index 000000000..6eaab48a3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
@@ -0,0 +1,2259 @@
+//! Generated by `sourcegen_assists_docs`, do not edit by hand.
+
+use super::check_doc_test;
+
+#[test]
+fn doctest_add_explicit_type() {
+ check_doc_test(
+ "add_explicit_type",
+ r#####"
+fn main() {
+ let x$0 = 92;
+}
+"#####,
+ r#####"
+fn main() {
+ let x: i32 = 92;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_hash() {
+ check_doc_test(
+ "add_hash",
+ r#####"
+fn main() {
+ r#"Hello,$0 World!"#;
+}
+"#####,
+ r#####"
+fn main() {
+ r##"Hello, World!"##;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_impl_default_members() {
+ check_doc_test(
+ "add_impl_default_members",
+ r#####"
+trait Trait {
+ type X;
+ fn foo(&self);
+ fn bar(&self) {}
+}
+
+impl Trait for () {
+ type X = ();
+ fn foo(&self) {}$0
+}
+"#####,
+ r#####"
+trait Trait {
+ type X;
+ fn foo(&self);
+ fn bar(&self) {}
+}
+
+impl Trait for () {
+ type X = ();
+ fn foo(&self) {}
+
+ $0fn bar(&self) {}
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_impl_missing_members() {
+ check_doc_test(
+ "add_impl_missing_members",
+ r#####"
+trait Trait<T> {
+ type X;
+ fn foo(&self) -> T;
+ fn bar(&self) {}
+}
+
+impl Trait<u32> for () {$0
+
+}
+"#####,
+ r#####"
+trait Trait<T> {
+ type X;
+ fn foo(&self) -> T;
+ fn bar(&self) {}
+}
+
+impl Trait<u32> for () {
+ $0type X;
+
+ fn foo(&self) -> u32 {
+ todo!()
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_label_to_loop() {
+ check_doc_test(
+ "add_label_to_loop",
+ r#####"
+fn main() {
+ loop$0 {
+ break;
+ continue;
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ 'l: loop {
+ break 'l;
+ continue 'l;
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_lifetime_to_type() {
+ check_doc_test(
+ "add_lifetime_to_type",
+ r#####"
+struct Point {
+ x: &$0u32,
+ y: u32,
+}
+"#####,
+ r#####"
+struct Point<'a> {
+ x: &'a u32,
+ y: u32,
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_missing_match_arms() {
+ check_doc_test(
+ "add_missing_match_arms",
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ $0
+ }
+}
+"#####,
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ $0Action::Move { distance } => todo!(),
+ Action::Stop => todo!(),
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_return_type() {
+ check_doc_test(
+ "add_return_type",
+ r#####"
+fn foo() { 4$02i32 }
+"#####,
+ r#####"
+fn foo() -> i32 { 42i32 }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_turbo_fish() {
+ check_doc_test(
+ "add_turbo_fish",
+ r#####"
+fn make<T>() -> T { todo!() }
+fn main() {
+ let x = make$0();
+}
+"#####,
+ r#####"
+fn make<T>() -> T { todo!() }
+fn main() {
+ let x = make::<${0:_}>();
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_apply_demorgan() {
+ check_doc_test(
+ "apply_demorgan",
+ r#####"
+fn main() {
+ if x != 4 ||$0 y < 3.14 {}
+}
+"#####,
+ r#####"
+fn main() {
+ if !(x == 4 && y >= 3.14) {}
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_auto_import() {
+ check_doc_test(
+ "auto_import",
+ r#####"
+fn main() {
+ let map = HashMap$0::new();
+}
+pub mod std { pub mod collections { pub struct HashMap { } } }
+"#####,
+ r#####"
+use std::collections::HashMap;
+
+fn main() {
+ let map = HashMap::new();
+}
+pub mod std { pub mod collections { pub struct HashMap { } } }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_change_visibility() {
+ check_doc_test(
+ "change_visibility",
+ r#####"
+$0fn frobnicate() {}
+"#####,
+ r#####"
+pub(crate) fn frobnicate() {}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_bool_then_to_if() {
+ check_doc_test(
+ "convert_bool_then_to_if",
+ r#####"
+//- minicore: bool_impl
+fn main() {
+ (0 == 0).then$0(|| val)
+}
+"#####,
+ r#####"
+fn main() {
+ if 0 == 0 {
+ Some(val)
+ } else {
+ None
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_for_loop_with_for_each() {
+ check_doc_test(
+ "convert_for_loop_with_for_each",
+ r#####"
+fn main() {
+ let x = vec![1, 2, 3];
+ for$0 v in x {
+ let y = v * 2;
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ let x = vec![1, 2, 3];
+ x.into_iter().for_each(|v| {
+ let y = v * 2;
+ });
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_if_to_bool_then() {
+ check_doc_test(
+ "convert_if_to_bool_then",
+ r#####"
+//- minicore: option
+fn main() {
+ if$0 cond {
+ Some(val)
+ } else {
+ None
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ cond.then(|| val)
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_integer_literal() {
+ check_doc_test(
+ "convert_integer_literal",
+ r#####"
+const _: i32 = 10$0;
+"#####,
+ r#####"
+const _: i32 = 0b1010;
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_into_to_from() {
+ check_doc_test(
+ "convert_into_to_from",
+ r#####"
+//- minicore: from
+impl $0Into<Thing> for usize {
+ fn into(self) -> Thing {
+ Thing {
+ b: self.to_string(),
+ a: self
+ }
+ }
+}
+"#####,
+ r#####"
+impl From<usize> for Thing {
+ fn from(val: usize) -> Self {
+ Thing {
+ b: val.to_string(),
+ a: val
+ }
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_iter_for_each_to_for() {
+ check_doc_test(
+ "convert_iter_for_each_to_for",
+ r#####"
+//- minicore: iterators
+use core::iter;
+fn main() {
+ let iter = iter::repeat((9, 2));
+ iter.for_each$0(|(x, y)| {
+ println!("x: {}, y: {}", x, y);
+ });
+}
+"#####,
+ r#####"
+use core::iter;
+fn main() {
+ let iter = iter::repeat((9, 2));
+ for (x, y) in iter {
+ println!("x: {}, y: {}", x, y);
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_let_else_to_match() {
+ check_doc_test(
+ "convert_let_else_to_match",
+ r#####"
+fn main() {
+ let Ok(mut x) = f() else$0 { return };
+}
+"#####,
+ r#####"
+fn main() {
+ let mut x = match f() {
+ Ok(x) => x,
+ _ => return,
+ };
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_to_guarded_return() {
+ check_doc_test(
+ "convert_to_guarded_return",
+ r#####"
+fn main() {
+ $0if cond {
+ foo();
+ bar();
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ if !cond {
+ return;
+ }
+ foo();
+ bar();
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_tuple_struct_to_named_struct() {
+ check_doc_test(
+ "convert_tuple_struct_to_named_struct",
+ r#####"
+struct Point$0(f32, f32);
+
+impl Point {
+ pub fn new(x: f32, y: f32) -> Self {
+ Point(x, y)
+ }
+
+ pub fn x(&self) -> f32 {
+ self.0
+ }
+
+ pub fn y(&self) -> f32 {
+ self.1
+ }
+}
+"#####,
+ r#####"
+struct Point { field1: f32, field2: f32 }
+
+impl Point {
+ pub fn new(x: f32, y: f32) -> Self {
+ Point { field1: x, field2: y }
+ }
+
+ pub fn x(&self) -> f32 {
+ self.field1
+ }
+
+ pub fn y(&self) -> f32 {
+ self.field2
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_while_to_loop() {
+ check_doc_test(
+ "convert_while_to_loop",
+ r#####"
+fn main() {
+ $0while cond {
+ foo();
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ loop {
+ if !cond {
+ break;
+ }
+ foo();
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_destructure_tuple_binding() {
+ check_doc_test(
+ "destructure_tuple_binding",
+ r#####"
+fn main() {
+ let $0t = (1,2);
+ let v = t.0;
+}
+"#####,
+ r#####"
+fn main() {
+ let ($0_0, _1) = (1,2);
+ let v = _0;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_expand_glob_import() {
+ check_doc_test(
+ "expand_glob_import",
+ r#####"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+}
+
+use foo::*$0;
+
+fn qux(bar: Bar, baz: Baz) {}
+"#####,
+ r#####"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+}
+
+use foo::{Bar, Baz};
+
+fn qux(bar: Bar, baz: Baz) {}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_extract_function() {
+ check_doc_test(
+ "extract_function",
+ r#####"
+fn main() {
+ let n = 1;
+ $0let m = n + 2;
+ // calculate
+ let k = m + n;$0
+ let g = 3;
+}
+"#####,
+ r#####"
+fn main() {
+ let n = 1;
+ fun_name(n);
+ let g = 3;
+}
+
+fn $0fun_name(n: i32) {
+ let m = n + 2;
+ // calculate
+ let k = m + n;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_extract_module() {
+ check_doc_test(
+ "extract_module",
+ r#####"
+$0fn foo(name: i32) -> i32 {
+ name + 1
+}$0
+
+fn bar(name: i32) -> i32 {
+ name + 2
+}
+"#####,
+ r#####"
+mod modname {
+ pub(crate) fn foo(name: i32) -> i32 {
+ name + 1
+ }
+}
+
+fn bar(name: i32) -> i32 {
+ name + 2
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_extract_struct_from_enum_variant() {
+ check_doc_test(
+ "extract_struct_from_enum_variant",
+ r#####"
+enum A { $0One(u32, u32) }
+"#####,
+ r#####"
+struct One(u32, u32);
+
+enum A { One(One) }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_extract_type_alias() {
+ check_doc_test(
+ "extract_type_alias",
+ r#####"
+struct S {
+ field: $0(u8, u8, u8)$0,
+}
+"#####,
+ r#####"
+type $0Type = (u8, u8, u8);
+
+struct S {
+ field: Type,
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_extract_variable() {
+ check_doc_test(
+ "extract_variable",
+ r#####"
+fn main() {
+ $0(1 + 2)$0 * 4;
+}
+"#####,
+ r#####"
+fn main() {
+ let $0var_name = (1 + 2);
+ var_name * 4;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_fix_visibility() {
+ check_doc_test(
+ "fix_visibility",
+ r#####"
+mod m {
+ fn frobnicate() {}
+}
+fn main() {
+ m::frobnicate$0() {}
+}
+"#####,
+ r#####"
+mod m {
+ $0pub(crate) fn frobnicate() {}
+}
+fn main() {
+ m::frobnicate() {}
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_flip_binexpr() {
+ check_doc_test(
+ "flip_binexpr",
+ r#####"
+fn main() {
+ let _ = 90 +$0 2;
+}
+"#####,
+ r#####"
+fn main() {
+ let _ = 2 + 90;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_flip_comma() {
+ check_doc_test(
+ "flip_comma",
+ r#####"
+fn main() {
+ ((1, 2),$0 (3, 4));
+}
+"#####,
+ r#####"
+fn main() {
+ ((3, 4), (1, 2));
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_flip_trait_bound() {
+ check_doc_test(
+ "flip_trait_bound",
+ r#####"
+fn foo<T: Clone +$0 Copy>() { }
+"#####,
+ r#####"
+fn foo<T: Copy + Clone>() { }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_constant() {
+ check_doc_test(
+ "generate_constant",
+ r#####"
+struct S { i: usize }
+impl S { pub fn new(n: usize) {} }
+fn main() {
+ let v = S::new(CAPA$0CITY);
+}
+"#####,
+ r#####"
+struct S { i: usize }
+impl S { pub fn new(n: usize) {} }
+fn main() {
+ const CAPACITY: usize = $0;
+ let v = S::new(CAPACITY);
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_default_from_enum_variant() {
+ check_doc_test(
+ "generate_default_from_enum_variant",
+ r#####"
+enum Version {
+ Undefined,
+ Minor$0,
+ Major,
+}
+"#####,
+ r#####"
+enum Version {
+ Undefined,
+ Minor,
+ Major,
+}
+
+impl Default for Version {
+ fn default() -> Self {
+ Self::Minor
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_default_from_new() {
+ check_doc_test(
+ "generate_default_from_new",
+ r#####"
+struct Example { _inner: () }
+
+impl Example {
+ pub fn n$0ew() -> Self {
+ Self { _inner: () }
+ }
+}
+"#####,
+ r#####"
+struct Example { _inner: () }
+
+impl Example {
+ pub fn new() -> Self {
+ Self { _inner: () }
+ }
+}
+
+impl Default for Example {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_delegate_methods() {
+ check_doc_test(
+ "generate_delegate_methods",
+ r#####"
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+
+struct Person {
+ ag$0e: Age,
+}
+"#####,
+ r#####"
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+
+struct Person {
+ age: Age,
+}
+
+impl Person {
+ $0fn age(&self) -> u8 {
+ self.age.age()
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_deref() {
+ check_doc_test(
+ "generate_deref",
+ r#####"
+//- minicore: deref, deref_mut
+struct A;
+struct B {
+ $0a: A
+}
+"#####,
+ r#####"
+struct A;
+struct B {
+ a: A
+}
+
+impl core::ops::Deref for B {
+ type Target = A;
+
+ fn deref(&self) -> &Self::Target {
+ &self.a
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_derive() {
+ check_doc_test(
+ "generate_derive",
+ r#####"
+struct Point {
+ x: u32,
+ y: u32,$0
+}
+"#####,
+ r#####"
+#[derive($0)]
+struct Point {
+ x: u32,
+ y: u32,
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_doc_example() {
+ check_doc_test(
+ "generate_doc_example",
+ r#####"
+/// Adds two numbers.$0
+pub fn add(a: i32, b: i32) -> i32 { a + b }
+"#####,
+ r#####"
+/// Adds two numbers.
+///
+/// # Examples
+///
+/// ```
+/// use test::add;
+///
+/// assert_eq!(add(a, b), );
+/// ```
+pub fn add(a: i32, b: i32) -> i32 { a + b }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_documentation_template() {
+ check_doc_test(
+ "generate_documentation_template",
+ r#####"
+pub struct S;
+impl S {
+ pub unsafe fn set_len$0(&mut self, len: usize) -> Result<(), std::io::Error> {
+ /* ... */
+ }
+}
+"#####,
+ r#####"
+pub struct S;
+impl S {
+ /// Sets the length of this [`S`].
+ ///
+ /// # Errors
+ ///
+ /// This function will return an error if .
+ ///
+ /// # Safety
+ ///
+ /// .
+ pub unsafe fn set_len(&mut self, len: usize) -> Result<(), std::io::Error> {
+ /* ... */
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_enum_as_method() {
+ check_doc_test(
+ "generate_enum_as_method",
+ r#####"
+enum Value {
+ Number(i32),
+ Text(String)$0,
+}
+"#####,
+ r#####"
+enum Value {
+ Number(i32),
+ Text(String),
+}
+
+impl Value {
+ fn as_text(&self) -> Option<&String> {
+ if let Self::Text(v) = self {
+ Some(v)
+ } else {
+ None
+ }
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_enum_is_method() {
+ check_doc_test(
+ "generate_enum_is_method",
+ r#####"
+enum Version {
+ Undefined,
+ Minor$0,
+ Major,
+}
+"#####,
+ r#####"
+enum Version {
+ Undefined,
+ Minor,
+ Major,
+}
+
+impl Version {
+ /// Returns `true` if the version is [`Minor`].
+ ///
+ /// [`Minor`]: Version::Minor
+ #[must_use]
+ fn is_minor(&self) -> bool {
+ matches!(self, Self::Minor)
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_enum_try_into_method() {
+ check_doc_test(
+ "generate_enum_try_into_method",
+ r#####"
+enum Value {
+ Number(i32),
+ Text(String)$0,
+}
+"#####,
+ r#####"
+enum Value {
+ Number(i32),
+ Text(String),
+}
+
+impl Value {
+ fn try_into_text(self) -> Result<String, Self> {
+ if let Self::Text(v) = self {
+ Ok(v)
+ } else {
+ Err(self)
+ }
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_enum_variant() {
+ check_doc_test(
+ "generate_enum_variant",
+ r#####"
+enum Countries {
+ Ghana,
+}
+
+fn main() {
+ let country = Countries::Lesotho$0;
+}
+"#####,
+ r#####"
+enum Countries {
+ Ghana,
+ Lesotho,
+}
+
+fn main() {
+ let country = Countries::Lesotho;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_from_impl_for_enum() {
+ check_doc_test(
+ "generate_from_impl_for_enum",
+ r#####"
+enum A { $0One(u32) }
+"#####,
+ r#####"
+enum A { One(u32) }
+
+impl From<u32> for A {
+ fn from(v: u32) -> Self {
+ Self::One(v)
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_function() {
+ check_doc_test(
+ "generate_function",
+ r#####"
+struct Baz;
+fn baz() -> Baz { Baz }
+fn foo() {
+ bar$0("", baz());
+}
+
+"#####,
+ r#####"
+struct Baz;
+fn baz() -> Baz { Baz }
+fn foo() {
+ bar("", baz());
+}
+
+fn bar(arg: &str, baz: Baz) ${0:-> _} {
+ todo!()
+}
+
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_getter() {
+ check_doc_test(
+ "generate_getter",
+ r#####"
+//- minicore: as_ref
+pub struct String;
+impl AsRef<str> for String {
+ fn as_ref(&self) -> &str {
+ ""
+ }
+}
+
+struct Person {
+ nam$0e: String,
+}
+"#####,
+ r#####"
+pub struct String;
+impl AsRef<str> for String {
+ fn as_ref(&self) -> &str {
+ ""
+ }
+}
+
+struct Person {
+ name: String,
+}
+
+impl Person {
+ fn $0name(&self) -> &str {
+ self.name.as_ref()
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_getter_mut() {
+ check_doc_test(
+ "generate_getter_mut",
+ r#####"
+struct Person {
+ nam$0e: String,
+}
+"#####,
+ r#####"
+struct Person {
+ name: String,
+}
+
+impl Person {
+ fn $0name_mut(&mut self) -> &mut String {
+ &mut self.name
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_impl() {
+ check_doc_test(
+ "generate_impl",
+ r#####"
+struct Ctx<T: Clone> {
+ data: T,$0
+}
+"#####,
+ r#####"
+struct Ctx<T: Clone> {
+ data: T,
+}
+
+impl<T: Clone> Ctx<T> {
+ $0
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_is_empty_from_len() {
+ check_doc_test(
+ "generate_is_empty_from_len",
+ r#####"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ p$0ub fn len(&self) -> usize {
+ self.data.len()
+ }
+}
+"#####,
+ r#####"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ pub fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_new() {
+ check_doc_test(
+ "generate_new",
+ r#####"
+struct Ctx<T: Clone> {
+ data: T,$0
+}
+"#####,
+ r#####"
+struct Ctx<T: Clone> {
+ data: T,
+}
+
+impl<T: Clone> Ctx<T> {
+ fn $0new(data: T) -> Self { Self { data } }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_setter() {
+ check_doc_test(
+ "generate_setter",
+ r#####"
+struct Person {
+ nam$0e: String,
+}
+"#####,
+ r#####"
+struct Person {
+ name: String,
+}
+
+impl Person {
+ fn set_name(&mut self, name: String) {
+ self.name = name;
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_inline_call() {
+ check_doc_test(
+ "inline_call",
+ r#####"
+//- minicore: option
+fn foo(name: Option<&str>) {
+ let name = name.unwrap$0();
+}
+"#####,
+ r#####"
+fn foo(name: Option<&str>) {
+ let name = match name {
+ Some(val) => val,
+ None => panic!("called `Option::unwrap()` on a `None` value"),
+ };
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_inline_into_callers() {
+ check_doc_test(
+ "inline_into_callers",
+ r#####"
+fn print(_: &str) {}
+fn foo$0(word: &str) {
+ if !word.is_empty() {
+ print(word);
+ }
+}
+fn bar() {
+ foo("안녕하세요");
+ foo("여러분");
+}
+"#####,
+ r#####"
+fn print(_: &str) {}
+
+fn bar() {
+ {
+ let word = "안녕하세요";
+ if !word.is_empty() {
+ print(word);
+ }
+ };
+ {
+ let word = "여러분";
+ if !word.is_empty() {
+ print(word);
+ }
+ };
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_inline_local_variable() {
+ check_doc_test(
+ "inline_local_variable",
+ r#####"
+fn main() {
+ let x$0 = 1 + 2;
+ x * 4;
+}
+"#####,
+ r#####"
+fn main() {
+ (1 + 2) * 4;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_inline_type_alias() {
+ check_doc_test(
+ "inline_type_alias",
+ r#####"
+type A<T = u32> = Vec<T>;
+
+fn main() {
+ let a: $0A;
+}
+"#####,
+ r#####"
+type A<T = u32> = Vec<T>;
+
+fn main() {
+ let a: Vec<u32>;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_introduce_named_generic() {
+ check_doc_test(
+ "introduce_named_generic",
+ r#####"
+fn foo(bar: $0impl Bar) {}
+"#####,
+ r#####"
+fn foo<B: Bar>(bar: B) {}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_introduce_named_lifetime() {
+ check_doc_test(
+ "introduce_named_lifetime",
+ r#####"
+impl Cursor<'_$0> {
+ fn node(self) -> &SyntaxNode {
+ match self {
+ Cursor::Replace(node) | Cursor::Before(node) => node,
+ }
+ }
+}
+"#####,
+ r#####"
+impl<'a> Cursor<'a> {
+ fn node(self) -> &SyntaxNode {
+ match self {
+ Cursor::Replace(node) | Cursor::Before(node) => node,
+ }
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_invert_if() {
+ check_doc_test(
+ "invert_if",
+ r#####"
+fn main() {
+ if$0 !y { A } else { B }
+}
+"#####,
+ r#####"
+fn main() {
+ if y { B } else { A }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_line_to_block() {
+ check_doc_test(
+ "line_to_block",
+ r#####"
+ // Multi-line$0
+ // comment
+"#####,
+ r#####"
+ /*
+ Multi-line
+ comment
+ */
+"#####,
+ )
+}
+
+#[test]
+fn doctest_make_raw_string() {
+ check_doc_test(
+ "make_raw_string",
+ r#####"
+fn main() {
+ "Hello,$0 World!";
+}
+"#####,
+ r#####"
+fn main() {
+ r#"Hello, World!"#;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_make_usual_string() {
+ check_doc_test(
+ "make_usual_string",
+ r#####"
+fn main() {
+ r#"Hello,$0 "World!""#;
+}
+"#####,
+ r#####"
+fn main() {
+ "Hello, \"World!\"";
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_merge_imports() {
+ check_doc_test(
+ "merge_imports",
+ r#####"
+use std::$0fmt::Formatter;
+use std::io;
+"#####,
+ r#####"
+use std::{fmt::Formatter, io};
+"#####,
+ )
+}
+
+#[test]
+fn doctest_merge_match_arms() {
+ check_doc_test(
+ "merge_match_arms",
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ $0Action::Move(..) => foo(),
+ Action::Stop => foo(),
+ }
+}
+"#####,
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move(..) | Action::Stop => foo(),
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_move_arm_cond_to_match_guard() {
+ check_doc_test(
+ "move_arm_cond_to_match_guard",
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } => $0if distance > 10 { foo() },
+ _ => (),
+ }
+}
+"#####,
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } if distance > 10 => foo(),
+ _ => (),
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_move_bounds_to_where_clause() {
+ check_doc_test(
+ "move_bounds_to_where_clause",
+ r#####"
+fn apply<T, U, $0F: FnOnce(T) -> U>(f: F, x: T) -> U {
+ f(x)
+}
+"#####,
+ r#####"
+fn apply<T, U, F>(f: F, x: T) -> U where F: FnOnce(T) -> U {
+ f(x)
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_move_from_mod_rs() {
+ check_doc_test(
+ "move_from_mod_rs",
+ r#####"
+//- /main.rs
+mod a;
+//- /a/mod.rs
+$0fn t() {}$0
+"#####,
+ r#####"
+fn t() {}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_move_guard_to_arm_body() {
+ check_doc_test(
+ "move_guard_to_arm_body",
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } $0if distance > 10 => foo(),
+ _ => (),
+ }
+}
+"#####,
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } => if distance > 10 {
+ foo()
+ },
+ _ => (),
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_move_module_to_file() {
+ check_doc_test(
+ "move_module_to_file",
+ r#####"
+mod $0foo {
+ fn t() {}
+}
+"#####,
+ r#####"
+mod foo;
+"#####,
+ )
+}
+
+#[test]
+fn doctest_move_to_mod_rs() {
+ check_doc_test(
+ "move_to_mod_rs",
+ r#####"
+//- /main.rs
+mod a;
+//- /a.rs
+$0fn t() {}$0
+"#####,
+ r#####"
+fn t() {}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_promote_local_to_const() {
+ check_doc_test(
+ "promote_local_to_const",
+ r#####"
+fn main() {
+ let foo$0 = true;
+
+ if foo {
+ println!("It's true");
+ } else {
+ println!("It's false");
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ const $0FOO: bool = true;
+
+ if FOO {
+ println!("It's true");
+ } else {
+ println!("It's false");
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_pull_assignment_up() {
+ check_doc_test(
+ "pull_assignment_up",
+ r#####"
+fn main() {
+ let mut foo = 6;
+
+ if true {
+ $0foo = 5;
+ } else {
+ foo = 4;
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ let mut foo = 6;
+
+ foo = if true {
+ 5
+ } else {
+ 4
+ };
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_qualify_method_call() {
+ check_doc_test(
+ "qualify_method_call",
+ r#####"
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+fn main() {
+ let foo = Foo;
+ foo.fo$0o();
+}
+"#####,
+ r#####"
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+fn main() {
+ let foo = Foo;
+ Foo::foo(&foo);
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_qualify_path() {
+ check_doc_test(
+ "qualify_path",
+ r#####"
+fn main() {
+ let map = HashMap$0::new();
+}
+pub mod std { pub mod collections { pub struct HashMap { } } }
+"#####,
+ r#####"
+fn main() {
+ let map = std::collections::HashMap::new();
+}
+pub mod std { pub mod collections { pub struct HashMap { } } }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_reformat_number_literal() {
+ check_doc_test(
+ "reformat_number_literal",
+ r#####"
+const _: i32 = 1012345$0;
+"#####,
+ r#####"
+const _: i32 = 1_012_345;
+"#####,
+ )
+}
+
+#[test]
+fn doctest_remove_dbg() {
+ check_doc_test(
+ "remove_dbg",
+ r#####"
+fn main() {
+ $0dbg!(92);
+}
+"#####,
+ r#####"
+fn main() {
+ 92;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_remove_hash() {
+ check_doc_test(
+ "remove_hash",
+ r#####"
+fn main() {
+ r#"Hello,$0 World!"#;
+}
+"#####,
+ r#####"
+fn main() {
+ r"Hello, World!";
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_remove_mut() {
+ check_doc_test(
+ "remove_mut",
+ r#####"
+impl Walrus {
+ fn feed(&mut$0 self, amount: u32) {}
+}
+"#####,
+ r#####"
+impl Walrus {
+ fn feed(&self, amount: u32) {}
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_remove_unused_param() {
+ check_doc_test(
+ "remove_unused_param",
+ r#####"
+fn frobnicate(x: i32$0) {}
+
+fn main() {
+ frobnicate(92);
+}
+"#####,
+ r#####"
+fn frobnicate() {}
+
+fn main() {
+ frobnicate();
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_reorder_fields() {
+ check_doc_test(
+ "reorder_fields",
+ r#####"
+struct Foo {foo: i32, bar: i32};
+const test: Foo = $0Foo {bar: 0, foo: 1}
+"#####,
+ r#####"
+struct Foo {foo: i32, bar: i32};
+const test: Foo = Foo {foo: 1, bar: 0}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_reorder_impl_items() {
+ check_doc_test(
+ "reorder_impl_items",
+ r#####"
+trait Foo {
+ type A;
+ const B: u8;
+ fn c();
+}
+
+struct Bar;
+$0impl Foo for Bar {
+ const B: u8 = 17;
+ fn c() {}
+ type A = String;
+}
+"#####,
+ r#####"
+trait Foo {
+ type A;
+ const B: u8;
+ fn c();
+}
+
+struct Bar;
+impl Foo for Bar {
+ type A = String;
+ const B: u8 = 17;
+ fn c() {}
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_char_with_string() {
+ check_doc_test(
+ "replace_char_with_string",
+ r#####"
+fn main() {
+ find('{$0');
+}
+"#####,
+ r#####"
+fn main() {
+ find("{");
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_derive_with_manual_impl() {
+ check_doc_test(
+ "replace_derive_with_manual_impl",
+ r#####"
+//- minicore: derive
+trait Debug { fn fmt(&self, f: &mut Formatter) -> Result<()>; }
+#[derive(Deb$0ug, Display)]
+struct S;
+"#####,
+ r#####"
+trait Debug { fn fmt(&self, f: &mut Formatter) -> Result<()>; }
+#[derive(Display)]
+struct S;
+
+impl Debug for S {
+ $0fn fmt(&self, f: &mut Formatter) -> Result<()> {
+ f.debug_struct("S").finish()
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_if_let_with_match() {
+ check_doc_test(
+ "replace_if_let_with_match",
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ $0if let Action::Move { distance } = action {
+ foo(distance)
+ } else {
+ bar()
+ }
+}
+"#####,
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } => foo(distance),
+ _ => bar(),
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_let_with_if_let() {
+ check_doc_test(
+ "replace_let_with_if_let",
+ r#####"
+enum Option<T> { Some(T), None }
+
+fn main(action: Action) {
+ $0let x = compute();
+}
+
+fn compute() -> Option<i32> { None }
+"#####,
+ r#####"
+enum Option<T> { Some(T), None }
+
+fn main(action: Action) {
+ if let Some(x) = compute() {
+ }
+}
+
+fn compute() -> Option<i32> { None }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_match_with_if_let() {
+ check_doc_test(
+ "replace_match_with_if_let",
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ $0match action {
+ Action::Move { distance } => foo(distance),
+ _ => bar(),
+ }
+}
+"#####,
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ if let Action::Move { distance } = action {
+ foo(distance)
+ } else {
+ bar()
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_qualified_name_with_use() {
+ check_doc_test(
+ "replace_qualified_name_with_use",
+ r#####"
+mod std { pub mod collections { pub struct HashMap<T, U>(T, U); } }
+fn process(map: std::collections::$0HashMap<String, String>) {}
+"#####,
+ r#####"
+use std::collections::HashMap;
+
+mod std { pub mod collections { pub struct HashMap<T, U>(T, U); } }
+fn process(map: HashMap<String, String>) {}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_string_with_char() {
+ check_doc_test(
+ "replace_string_with_char",
+ r#####"
+fn main() {
+ find("{$0");
+}
+"#####,
+ r#####"
+fn main() {
+ find('{');
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_try_expr_with_match() {
+ check_doc_test(
+ "replace_try_expr_with_match",
+ r#####"
+//- minicore:option
+fn handle() {
+ let pat = Some(true)$0?;
+}
+"#####,
+ r#####"
+fn handle() {
+ let pat = match Some(true) {
+ Some(it) => it,
+ None => return None,
+ };
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_turbofish_with_explicit_type() {
+ check_doc_test(
+ "replace_turbofish_with_explicit_type",
+ r#####"
+fn make<T>() -> T { ) }
+fn main() {
+ let a = make$0::<i32>();
+}
+"#####,
+ r#####"
+fn make<T>() -> T { ) }
+fn main() {
+ let a: i32 = make();
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_sort_items() {
+ check_doc_test(
+ "sort_items",
+ r#####"
+struct $0Foo$0 { second: u32, first: String }
+"#####,
+ r#####"
+struct Foo { first: String, second: u32 }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_sort_items_1() {
+ check_doc_test(
+ "sort_items",
+ r#####"
+trait $0Bar$0 {
+ fn second(&self) -> u32;
+ fn first(&self) -> String;
+}
+"#####,
+ r#####"
+trait Bar {
+ fn first(&self) -> String;
+ fn second(&self) -> u32;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_sort_items_2() {
+ check_doc_test(
+ "sort_items",
+ r#####"
+struct Baz;
+impl $0Baz$0 {
+ fn second(&self) -> u32;
+ fn first(&self) -> String;
+}
+"#####,
+ r#####"
+struct Baz;
+impl Baz {
+ fn first(&self) -> String;
+ fn second(&self) -> u32;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_sort_items_3() {
+ check_doc_test(
+ "sort_items",
+ r#####"
+enum $0Animal$0 {
+ Dog(String, f64),
+ Cat { weight: f64, name: String },
+}
+"#####,
+ r#####"
+enum Animal {
+ Cat { weight: f64, name: String },
+ Dog(String, f64),
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_sort_items_4() {
+ check_doc_test(
+ "sort_items",
+ r#####"
+enum Animal {
+ Dog(String, f64),
+ Cat $0{ weight: f64, name: String }$0,
+}
+"#####,
+ r#####"
+enum Animal {
+ Dog(String, f64),
+ Cat { name: String, weight: f64 },
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_split_import() {
+ check_doc_test(
+ "split_import",
+ r#####"
+use std::$0collections::HashMap;
+"#####,
+ r#####"
+use std::{collections::HashMap};
+"#####,
+ )
+}
+
+#[test]
+fn doctest_toggle_ignore() {
+ check_doc_test(
+ "toggle_ignore",
+ r#####"
+$0#[test]
+fn arithmetics {
+ assert_eq!(2 + 2, 5);
+}
+"#####,
+ r#####"
+#[test]
+#[ignore]
+fn arithmetics {
+ assert_eq!(2 + 2, 5);
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_unmerge_use() {
+ check_doc_test(
+ "unmerge_use",
+ r#####"
+use std::fmt::{Debug, Display$0};
+"#####,
+ r#####"
+use std::fmt::{Debug};
+use std::fmt::Display;
+"#####,
+ )
+}
+
+#[test]
+fn doctest_unnecessary_async() {
+ check_doc_test(
+ "unnecessary_async",
+ r#####"
+pub async f$0n foo() {}
+pub async fn bar() { foo().await }
+"#####,
+ r#####"
+pub fn foo() {}
+pub async fn bar() { foo() }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_unwrap_block() {
+ check_doc_test(
+ "unwrap_block",
+ r#####"
+fn foo() {
+ if true {$0
+ println!("foo");
+ }
+}
+"#####,
+ r#####"
+fn foo() {
+ println!("foo");
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_unwrap_result_return_type() {
+ check_doc_test(
+ "unwrap_result_return_type",
+ r#####"
+//- minicore: result
+fn foo() -> Result<i32>$0 { Ok(42i32) }
+"#####,
+ r#####"
+fn foo() -> i32 { 42i32 }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_wrap_return_type_in_result() {
+ check_doc_test(
+ "wrap_return_type_in_result",
+ r#####"
+//- minicore: result
+fn foo() -> i32$0 { 42i32 }
+"#####,
+ r#####"
+fn foo() -> Result<i32, ${0:_}> { Ok(42i32) }
+"#####,
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/sourcegen.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/sourcegen.rs
new file mode 100644
index 000000000..070b83d3c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/sourcegen.rs
@@ -0,0 +1,195 @@
+//! Generates `assists.md` documentation.
+
+use std::{fmt, fs, path::Path};
+
+use test_utils::project_root;
+
+#[test]
+fn sourcegen_assists_docs() {
+ let assists = Assist::collect();
+
+ {
+ // Generate doctests.
+
+ let mut buf = "
+use super::check_doc_test;
+"
+ .to_string();
+ for assist in assists.iter() {
+ for (idx, section) in assist.sections.iter().enumerate() {
+ let test_id =
+ if idx == 0 { assist.id.clone() } else { format!("{}_{}", &assist.id, idx) };
+ let test = format!(
+ r######"
+#[test]
+fn doctest_{}() {{
+ check_doc_test(
+ "{}",
+r#####"
+{}"#####, r#####"
+{}"#####)
+}}
+"######,
+ &test_id,
+ &assist.id,
+ reveal_hash_comments(&section.before),
+ reveal_hash_comments(&section.after)
+ );
+
+ buf.push_str(&test)
+ }
+ }
+ let buf = sourcegen::add_preamble("sourcegen_assists_docs", sourcegen::reformat(buf));
+ sourcegen::ensure_file_contents(
+ &project_root().join("crates/ide-assists/src/tests/generated.rs"),
+ &buf,
+ );
+ }
+
+ {
+ // Generate assists manual. Note that we do _not_ commit manual to the
+ // git repo. Instead, `cargo xtask release` runs this test before making
+ // a release.
+
+ let contents = sourcegen::add_preamble(
+ "sourcegen_assists_docs",
+ assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"),
+ );
+ let dst = project_root().join("docs/user/generated_assists.adoc");
+ fs::write(dst, contents).unwrap();
+ }
+}
+
+#[derive(Debug)]
+struct Section {
+ doc: String,
+ before: String,
+ after: String,
+}
+
+#[derive(Debug)]
+struct Assist {
+ id: String,
+ location: sourcegen::Location,
+ sections: Vec<Section>,
+}
+
+impl Assist {
+ fn collect() -> Vec<Assist> {
+ let handlers_dir = project_root().join("crates/ide-assists/src/handlers");
+
+ let mut res = Vec::new();
+ for path in sourcegen::list_rust_files(&handlers_dir) {
+ collect_file(&mut res, path.as_path());
+ }
+ res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
+ return res;
+
+ fn collect_file(acc: &mut Vec<Assist>, path: &Path) {
+ let text = fs::read_to_string(path).unwrap();
+ let comment_blocks = sourcegen::CommentBlock::extract("Assist", &text);
+
+ for block in comment_blocks {
+ // FIXME: doesn't support blank lines yet, need to tweak
+ // `extract_comment_blocks` for that.
+ let id = block.id;
+ assert!(
+ id.chars().all(|it| it.is_ascii_lowercase() || it == '_'),
+ "invalid assist id: {:?}",
+ id
+ );
+ let mut lines = block.contents.iter().peekable();
+ let location = sourcegen::Location { file: path.to_path_buf(), line: block.line };
+ let mut assist = Assist { id, location, sections: Vec::new() };
+
+ while lines.peek().is_some() {
+ let doc = take_until(lines.by_ref(), "```").trim().to_string();
+ assert!(
+ (doc.chars().next().unwrap().is_ascii_uppercase() && doc.ends_with('.'))
+ || assist.sections.len() > 0,
+ "\n\n{}: assist docs should be proper sentences, with capitalization and a full stop at the end.\n\n{}\n\n",
+ &assist.id,
+ doc,
+ );
+
+ let before = take_until(lines.by_ref(), "```");
+
+ assert_eq!(lines.next().unwrap().as_str(), "->");
+ assert_eq!(lines.next().unwrap().as_str(), "```");
+ let after = take_until(lines.by_ref(), "```");
+
+ assist.sections.push(Section { doc, before, after });
+ }
+
+ acc.push(assist)
+ }
+ }
+
+ fn take_until<'a>(lines: impl Iterator<Item = &'a String>, marker: &str) -> String {
+ let mut buf = Vec::new();
+ for line in lines {
+ if line == marker {
+ break;
+ }
+ buf.push(line.clone());
+ }
+ buf.join("\n")
+ }
+ }
+}
+
+impl fmt::Display for Assist {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let _ = writeln!(
+ f,
+ "[discrete]\n=== `{}`
+**Source:** {}",
+ self.id, self.location,
+ );
+
+ for section in &self.sections {
+ let before = section.before.replace("$0", "┃"); // Unicode pseudo-graphics bar
+ let after = section.after.replace("$0", "┃");
+ let _ = writeln!(
+ f,
+ "
+{}
+
+.Before
+```rust
+{}```
+
+.After
+```rust
+{}```",
+ section.doc,
+ hide_hash_comments(&before),
+ hide_hash_comments(&after)
+ );
+ }
+
+ Ok(())
+ }
+}
+
+fn hide_hash_comments(text: &str) -> String {
+ text.split('\n') // want final newline
+ .filter(|&it| !(it.starts_with("# ") || it == "#"))
+ .map(|it| format!("{}\n", it))
+ .collect()
+}
+
+fn reveal_hash_comments(text: &str) -> String {
+ text.split('\n') // want final newline
+ .map(|it| {
+ if let Some(stripped) = it.strip_prefix("# ") {
+ stripped
+ } else if it == "#" {
+ ""
+ } else {
+ it
+ }
+ })
+ .map(|it| format!("{}\n", it))
+ .collect()
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
new file mode 100644
index 000000000..3e61d0741
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
@@ -0,0 +1,703 @@
+//! Assorted functions shared by several assists.
+
+use std::ops;
+
+use itertools::Itertools;
+
+pub(crate) use gen_trait_fn_body::gen_trait_fn_body;
+use hir::{db::HirDatabase, HirDisplay, Semantics};
+use ide_db::{famous_defs::FamousDefs, path_transform::PathTransform, RootDatabase, SnippetCap};
+use stdx::format_to;
+use syntax::{
+ ast::{
+ self,
+ edit::{self, AstNodeEdit},
+ edit_in_place::AttrsOwnerEdit,
+ make, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace,
+ },
+ ted, AstNode, AstToken, Direction, SmolStr, SourceFile,
+ SyntaxKind::*,
+ SyntaxNode, TextRange, TextSize, T,
+};
+
+use crate::assist_context::{AssistBuilder, AssistContext};
+
+pub(crate) mod suggest_name;
+mod gen_trait_fn_body;
+
+pub(crate) fn unwrap_trivial_block(block_expr: ast::BlockExpr) -> ast::Expr {
+ extract_trivial_expression(&block_expr)
+ .filter(|expr| !expr.syntax().text().contains_char('\n'))
+ .unwrap_or_else(|| block_expr.into())
+}
+
+pub fn extract_trivial_expression(block_expr: &ast::BlockExpr) -> Option<ast::Expr> {
+ if block_expr.modifier().is_some() {
+ return None;
+ }
+ let stmt_list = block_expr.stmt_list()?;
+ let has_anything_else = |thing: &SyntaxNode| -> bool {
+ let mut non_trivial_children =
+ stmt_list.syntax().children_with_tokens().filter(|it| match it.kind() {
+ WHITESPACE | T!['{'] | T!['}'] => false,
+ _ => it.as_node() != Some(thing),
+ });
+ non_trivial_children.next().is_some()
+ };
+
+ if let Some(expr) = stmt_list.tail_expr() {
+ if has_anything_else(expr.syntax()) {
+ return None;
+ }
+ return Some(expr);
+ }
+ // Unwrap `{ continue; }`
+ let stmt = stmt_list.statements().next()?;
+ if let ast::Stmt::ExprStmt(expr_stmt) = stmt {
+ if has_anything_else(expr_stmt.syntax()) {
+ return None;
+ }
+ let expr = expr_stmt.expr()?;
+ if matches!(expr.syntax().kind(), CONTINUE_EXPR | BREAK_EXPR | RETURN_EXPR) {
+ return Some(expr);
+ }
+ }
+ None
+}
+
+/// This is a method with a heuristics to support test methods annotated with custom test annotations, such as
+/// `#[test_case(...)]`, `#[tokio::test]` and similar.
+/// Also a regular `#[test]` annotation is supported.
+///
+/// It may produce false positives, for example, `#[wasm_bindgen_test]` requires a different command to run the test,
+/// but it's better than not to have the runnables for the tests at all.
+pub fn test_related_attribute(fn_def: &ast::Fn) -> Option<ast::Attr> {
+ fn_def.attrs().find_map(|attr| {
+ let path = attr.path()?;
+ let text = path.syntax().text().to_string();
+ if text.starts_with("test") || text.ends_with("test") {
+ Some(attr)
+ } else {
+ None
+ }
+ })
+}
+
+#[derive(Copy, Clone, PartialEq)]
+pub enum DefaultMethods {
+ Only,
+ No,
+}
+
+pub fn filter_assoc_items(
+ sema: &Semantics<'_, RootDatabase>,
+ items: &[hir::AssocItem],
+ default_methods: DefaultMethods,
+) -> Vec<ast::AssocItem> {
+ fn has_def_name(item: &ast::AssocItem) -> bool {
+ match item {
+ ast::AssocItem::Fn(def) => def.name(),
+ ast::AssocItem::TypeAlias(def) => def.name(),
+ ast::AssocItem::Const(def) => def.name(),
+ ast::AssocItem::MacroCall(_) => None,
+ }
+ .is_some()
+ }
+
+ items
+ .iter()
+ // Note: This throws away items with no source.
+ .filter_map(|&i| {
+ let item = match i {
+ hir::AssocItem::Function(i) => ast::AssocItem::Fn(sema.source(i)?.value),
+ hir::AssocItem::TypeAlias(i) => ast::AssocItem::TypeAlias(sema.source(i)?.value),
+ hir::AssocItem::Const(i) => ast::AssocItem::Const(sema.source(i)?.value),
+ };
+ Some(item)
+ })
+ .filter(has_def_name)
+ .filter(|it| match it {
+ ast::AssocItem::Fn(def) => matches!(
+ (default_methods, def.body()),
+ (DefaultMethods::Only, Some(_)) | (DefaultMethods::No, None)
+ ),
+ _ => default_methods == DefaultMethods::No,
+ })
+ .collect::<Vec<_>>()
+}
+
+pub fn add_trait_assoc_items_to_impl(
+ sema: &Semantics<'_, RootDatabase>,
+ items: Vec<ast::AssocItem>,
+ trait_: hir::Trait,
+ impl_: ast::Impl,
+ target_scope: hir::SemanticsScope<'_>,
+) -> (ast::Impl, ast::AssocItem) {
+ let source_scope = sema.scope_for_def(trait_);
+
+ let transform = PathTransform::trait_impl(&target_scope, &source_scope, trait_, impl_.clone());
+
+ let items = items.into_iter().map(|assoc_item| {
+ transform.apply(assoc_item.syntax());
+ assoc_item.remove_attrs_and_docs();
+ assoc_item
+ });
+
+ let res = impl_.clone_for_update();
+
+ let assoc_item_list = res.get_or_create_assoc_item_list();
+ let mut first_item = None;
+ for item in items {
+ first_item.get_or_insert_with(|| item.clone());
+ match &item {
+ ast::AssocItem::Fn(fn_) if fn_.body().is_none() => {
+ let body = make::block_expr(None, Some(make::ext::expr_todo()))
+ .indent(edit::IndentLevel(1));
+ ted::replace(fn_.get_or_create_body().syntax(), body.clone_for_update().syntax())
+ }
+ ast::AssocItem::TypeAlias(type_alias) => {
+ if let Some(type_bound_list) = type_alias.type_bound_list() {
+ type_bound_list.remove()
+ }
+ }
+ _ => {}
+ }
+
+ assoc_item_list.add_item(item)
+ }
+
+ (res, first_item.unwrap())
+}
+
+#[derive(Clone, Copy, Debug)]
+pub(crate) enum Cursor<'a> {
+ Replace(&'a SyntaxNode),
+ Before(&'a SyntaxNode),
+}
+
+impl<'a> Cursor<'a> {
+ fn node(self) -> &'a SyntaxNode {
+ match self {
+ Cursor::Replace(node) | Cursor::Before(node) => node,
+ }
+ }
+}
+
+pub(crate) fn render_snippet(_cap: SnippetCap, node: &SyntaxNode, cursor: Cursor<'_>) -> String {
+ assert!(cursor.node().ancestors().any(|it| it == *node));
+ let range = cursor.node().text_range() - node.text_range().start();
+ let range: ops::Range<usize> = range.into();
+
+ let mut placeholder = cursor.node().to_string();
+ escape(&mut placeholder);
+ let tab_stop = match cursor {
+ Cursor::Replace(placeholder) => format!("${{0:{}}}", placeholder),
+ Cursor::Before(placeholder) => format!("$0{}", placeholder),
+ };
+
+ let mut buf = node.to_string();
+ buf.replace_range(range, &tab_stop);
+ return buf;
+
+ fn escape(buf: &mut String) {
+ stdx::replace(buf, '{', r"\{");
+ stdx::replace(buf, '}', r"\}");
+ stdx::replace(buf, '$', r"\$");
+ }
+}
+
+pub(crate) fn vis_offset(node: &SyntaxNode) -> TextSize {
+ node.children_with_tokens()
+ .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR))
+ .map(|it| it.text_range().start())
+ .unwrap_or_else(|| node.text_range().start())
+}
+
+pub(crate) fn invert_boolean_expression(expr: ast::Expr) -> ast::Expr {
+ invert_special_case(&expr).unwrap_or_else(|| make::expr_prefix(T![!], expr))
+}
+
+fn invert_special_case(expr: &ast::Expr) -> Option<ast::Expr> {
+ match expr {
+ ast::Expr::BinExpr(bin) => {
+ let bin = bin.clone_for_update();
+ let op_token = bin.op_token()?;
+ let rev_token = match op_token.kind() {
+ T![==] => T![!=],
+ T![!=] => T![==],
+ T![<] => T![>=],
+ T![<=] => T![>],
+ T![>] => T![<=],
+ T![>=] => T![<],
+ // Parenthesize other expressions before prefixing `!`
+ _ => return Some(make::expr_prefix(T![!], make::expr_paren(expr.clone()))),
+ };
+ ted::replace(op_token, make::token(rev_token));
+ Some(bin.into())
+ }
+ ast::Expr::MethodCallExpr(mce) => {
+ let receiver = mce.receiver()?;
+ let method = mce.name_ref()?;
+ let arg_list = mce.arg_list()?;
+
+ let method = match method.text().as_str() {
+ "is_some" => "is_none",
+ "is_none" => "is_some",
+ "is_ok" => "is_err",
+ "is_err" => "is_ok",
+ _ => return None,
+ };
+ Some(make::expr_method_call(receiver, make::name_ref(method), arg_list))
+ }
+ ast::Expr::PrefixExpr(pe) if pe.op_kind()? == ast::UnaryOp::Not => match pe.expr()? {
+ ast::Expr::ParenExpr(parexpr) => parexpr.expr(),
+ _ => pe.expr(),
+ },
+ ast::Expr::Literal(lit) => match lit.kind() {
+ ast::LiteralKind::Bool(b) => match b {
+ true => Some(ast::Expr::Literal(make::expr_literal("false"))),
+ false => Some(ast::Expr::Literal(make::expr_literal("true"))),
+ },
+ _ => None,
+ },
+ _ => None,
+ }
+}
+
+pub(crate) fn next_prev() -> impl Iterator<Item = Direction> {
+ [Direction::Next, Direction::Prev].into_iter()
+}
+
+pub(crate) fn does_pat_match_variant(pat: &ast::Pat, var: &ast::Pat) -> bool {
+ let first_node_text = |pat: &ast::Pat| pat.syntax().first_child().map(|node| node.text());
+
+ let pat_head = match pat {
+ ast::Pat::IdentPat(bind_pat) => match bind_pat.pat() {
+ Some(p) => first_node_text(&p),
+ None => return pat.syntax().text() == var.syntax().text(),
+ },
+ pat => first_node_text(pat),
+ };
+
+ let var_head = first_node_text(var);
+
+ pat_head == var_head
+}
+
+pub(crate) fn does_nested_pattern(pat: &ast::Pat) -> bool {
+ let depth = calc_depth(pat, 0);
+
+ if 1 < depth {
+ return true;
+ }
+ false
+}
+
+fn calc_depth(pat: &ast::Pat, depth: usize) -> usize {
+ match pat {
+ ast::Pat::IdentPat(_)
+ | ast::Pat::BoxPat(_)
+ | ast::Pat::RestPat(_)
+ | ast::Pat::LiteralPat(_)
+ | ast::Pat::MacroPat(_)
+ | ast::Pat::OrPat(_)
+ | ast::Pat::ParenPat(_)
+ | ast::Pat::PathPat(_)
+ | ast::Pat::WildcardPat(_)
+ | ast::Pat::RangePat(_)
+ | ast::Pat::RecordPat(_)
+ | ast::Pat::RefPat(_)
+ | ast::Pat::SlicePat(_)
+ | ast::Pat::TuplePat(_)
+ | ast::Pat::ConstBlockPat(_) => depth,
+
+ // FIXME: Other patterns may also be nested. Currently it simply supports only `TupleStructPat`
+ ast::Pat::TupleStructPat(pat) => {
+ let mut max_depth = depth;
+ for p in pat.fields() {
+ let d = calc_depth(&p, depth + 1);
+ if d > max_depth {
+ max_depth = d
+ }
+ }
+ max_depth
+ }
+ }
+}
+
+// Uses a syntax-driven approach to find any impl blocks for the struct that
+// exist within the module/file
+//
+// Returns `None` if we've found an existing fn
+//
+// FIXME: change the new fn checking to a more semantic approach when that's more
+// viable (e.g. we process proc macros, etc)
+// FIXME: this partially overlaps with `find_impl_block_*`
+pub(crate) fn find_struct_impl(
+ ctx: &AssistContext<'_>,
+ adt: &ast::Adt,
+ name: &str,
+) -> Option<Option<ast::Impl>> {
+ let db = ctx.db();
+ let module = adt.syntax().parent()?;
+
+ let struct_def = ctx.sema.to_def(adt)?;
+
+ let block = module.descendants().filter_map(ast::Impl::cast).find_map(|impl_blk| {
+ let blk = ctx.sema.to_def(&impl_blk)?;
+
+ // FIXME: handle e.g. `struct S<T>; impl<U> S<U> {}`
+ // (we currently use the wrong type parameter)
+ // also we wouldn't want to use e.g. `impl S<u32>`
+
+ let same_ty = match blk.self_ty(db).as_adt() {
+ Some(def) => def == struct_def,
+ None => false,
+ };
+ let not_trait_impl = blk.trait_(db).is_none();
+
+ if !(same_ty && not_trait_impl) {
+ None
+ } else {
+ Some(impl_blk)
+ }
+ });
+
+ if let Some(ref impl_blk) = block {
+ if has_fn(impl_blk, name) {
+ return None;
+ }
+ }
+
+ Some(block)
+}
+
+fn has_fn(imp: &ast::Impl, rhs_name: &str) -> bool {
+ if let Some(il) = imp.assoc_item_list() {
+ for item in il.assoc_items() {
+ if let ast::AssocItem::Fn(f) = item {
+ if let Some(name) = f.name() {
+ if name.text().eq_ignore_ascii_case(rhs_name) {
+ return true;
+ }
+ }
+ }
+ }
+ }
+
+ false
+}
+
+/// Find the start of the `impl` block for the given `ast::Impl`.
+//
+// FIXME: this partially overlaps with `find_struct_impl`
+pub(crate) fn find_impl_block_start(impl_def: ast::Impl, buf: &mut String) -> Option<TextSize> {
+ buf.push('\n');
+ let start = impl_def.assoc_item_list().and_then(|it| it.l_curly_token())?.text_range().end();
+ Some(start)
+}
+
+/// Find the end of the `impl` block for the given `ast::Impl`.
+//
+// FIXME: this partially overlaps with `find_struct_impl`
+pub(crate) fn find_impl_block_end(impl_def: ast::Impl, buf: &mut String) -> Option<TextSize> {
+ buf.push('\n');
+ let end = impl_def
+ .assoc_item_list()
+ .and_then(|it| it.r_curly_token())?
+ .prev_sibling_or_token()?
+ .text_range()
+ .end();
+ Some(end)
+}
+
+// Generates the surrounding `impl Type { <code> }` including type and lifetime
+// parameters
+pub(crate) fn generate_impl_text(adt: &ast::Adt, code: &str) -> String {
+ generate_impl_text_inner(adt, None, code)
+}
+
+// Generates the surrounding `impl <trait> for Type { <code> }` including type
+// and lifetime parameters
+pub(crate) fn generate_trait_impl_text(adt: &ast::Adt, trait_text: &str, code: &str) -> String {
+ generate_impl_text_inner(adt, Some(trait_text), code)
+}
+
+fn generate_impl_text_inner(adt: &ast::Adt, trait_text: Option<&str>, code: &str) -> String {
+ let generic_params = adt.generic_param_list();
+ let mut buf = String::with_capacity(code.len());
+ buf.push_str("\n\n");
+ adt.attrs()
+ .filter(|attr| attr.as_simple_call().map(|(name, _arg)| name == "cfg").unwrap_or(false))
+ .for_each(|attr| buf.push_str(format!("{}\n", attr).as_str()));
+ buf.push_str("impl");
+ if let Some(generic_params) = &generic_params {
+ let lifetimes = generic_params.lifetime_params().map(|lt| format!("{}", lt.syntax()));
+ let toc_params = generic_params.type_or_const_params().map(|toc_param| {
+ let type_param = match toc_param {
+ ast::TypeOrConstParam::Type(x) => x,
+ ast::TypeOrConstParam::Const(x) => return x.syntax().to_string(),
+ };
+ let mut buf = String::new();
+ if let Some(it) = type_param.name() {
+ format_to!(buf, "{}", it.syntax());
+ }
+ if let Some(it) = type_param.colon_token() {
+ format_to!(buf, "{} ", it);
+ }
+ if let Some(it) = type_param.type_bound_list() {
+ format_to!(buf, "{}", it.syntax());
+ }
+ buf
+ });
+ let generics = lifetimes.chain(toc_params).format(", ");
+ format_to!(buf, "<{}>", generics);
+ }
+ buf.push(' ');
+ if let Some(trait_text) = trait_text {
+ buf.push_str(trait_text);
+ buf.push_str(" for ");
+ }
+ buf.push_str(&adt.name().unwrap().text());
+ if let Some(generic_params) = generic_params {
+ let lifetime_params = generic_params
+ .lifetime_params()
+ .filter_map(|it| it.lifetime())
+ .map(|it| SmolStr::from(it.text()));
+ let toc_params = generic_params
+ .type_or_const_params()
+ .filter_map(|it| it.name())
+ .map(|it| SmolStr::from(it.text()));
+ format_to!(buf, "<{}>", lifetime_params.chain(toc_params).format(", "))
+ }
+
+ match adt.where_clause() {
+ Some(where_clause) => {
+ format_to!(buf, "\n{}\n{{\n{}\n}}", where_clause, code);
+ }
+ None => {
+ format_to!(buf, " {{\n{}\n}}", code);
+ }
+ }
+
+ buf
+}
+
+pub(crate) fn add_method_to_adt(
+ builder: &mut AssistBuilder,
+ adt: &ast::Adt,
+ impl_def: Option<ast::Impl>,
+ method: &str,
+) {
+ let mut buf = String::with_capacity(method.len() + 2);
+ if impl_def.is_some() {
+ buf.push('\n');
+ }
+ buf.push_str(method);
+
+ let start_offset = impl_def
+ .and_then(|impl_def| find_impl_block_end(impl_def, &mut buf))
+ .unwrap_or_else(|| {
+ buf = generate_impl_text(adt, &buf);
+ adt.syntax().text_range().end()
+ });
+
+ builder.insert(start_offset, buf);
+}
+
+#[derive(Debug)]
+pub(crate) struct ReferenceConversion {
+ conversion: ReferenceConversionType,
+ ty: hir::Type,
+}
+
+#[derive(Debug)]
+enum ReferenceConversionType {
+ // reference can be stripped if the type is Copy
+ Copy,
+ // &String -> &str
+ AsRefStr,
+ // &Vec<T> -> &[T]
+ AsRefSlice,
+ // &Box<T> -> &T
+ Dereferenced,
+ // &Option<T> -> Option<&T>
+ Option,
+ // &Result<T, E> -> Result<&T, &E>
+ Result,
+}
+
+impl ReferenceConversion {
+ pub(crate) fn convert_type(&self, db: &dyn HirDatabase) -> String {
+ match self.conversion {
+ ReferenceConversionType::Copy => self.ty.display(db).to_string(),
+ ReferenceConversionType::AsRefStr => "&str".to_string(),
+ ReferenceConversionType::AsRefSlice => {
+ let type_argument_name =
+ self.ty.type_arguments().next().unwrap().display(db).to_string();
+ format!("&[{}]", type_argument_name)
+ }
+ ReferenceConversionType::Dereferenced => {
+ let type_argument_name =
+ self.ty.type_arguments().next().unwrap().display(db).to_string();
+ format!("&{}", type_argument_name)
+ }
+ ReferenceConversionType::Option => {
+ let type_argument_name =
+ self.ty.type_arguments().next().unwrap().display(db).to_string();
+ format!("Option<&{}>", type_argument_name)
+ }
+ ReferenceConversionType::Result => {
+ let mut type_arguments = self.ty.type_arguments();
+ let first_type_argument_name =
+ type_arguments.next().unwrap().display(db).to_string();
+ let second_type_argument_name =
+ type_arguments.next().unwrap().display(db).to_string();
+ format!("Result<&{}, &{}>", first_type_argument_name, second_type_argument_name)
+ }
+ }
+ }
+
+ pub(crate) fn getter(&self, field_name: String) -> String {
+ match self.conversion {
+ ReferenceConversionType::Copy => format!("self.{}", field_name),
+ ReferenceConversionType::AsRefStr
+ | ReferenceConversionType::AsRefSlice
+ | ReferenceConversionType::Dereferenced
+ | ReferenceConversionType::Option
+ | ReferenceConversionType::Result => format!("self.{}.as_ref()", field_name),
+ }
+ }
+}
+
+// FIXME: It should return a new hir::Type, but currently constructing new types is too cumbersome
+// and all users of this function operate on string type names, so they can do the conversion
+// itself themselves.
+pub(crate) fn convert_reference_type(
+ ty: hir::Type,
+ db: &RootDatabase,
+ famous_defs: &FamousDefs<'_, '_>,
+) -> Option<ReferenceConversion> {
+ handle_copy(&ty, db)
+ .or_else(|| handle_as_ref_str(&ty, db, famous_defs))
+ .or_else(|| handle_as_ref_slice(&ty, db, famous_defs))
+ .or_else(|| handle_dereferenced(&ty, db, famous_defs))
+ .or_else(|| handle_option_as_ref(&ty, db, famous_defs))
+ .or_else(|| handle_result_as_ref(&ty, db, famous_defs))
+ .map(|conversion| ReferenceConversion { ty, conversion })
+}
+
+fn handle_copy(ty: &hir::Type, db: &dyn HirDatabase) -> Option<ReferenceConversionType> {
+ ty.is_copy(db).then(|| ReferenceConversionType::Copy)
+}
+
+fn handle_as_ref_str(
+ ty: &hir::Type,
+ db: &dyn HirDatabase,
+ famous_defs: &FamousDefs<'_, '_>,
+) -> Option<ReferenceConversionType> {
+ let str_type = hir::BuiltinType::str().ty(db);
+
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[str_type])
+ .then(|| ReferenceConversionType::AsRefStr)
+}
+
+fn handle_as_ref_slice(
+ ty: &hir::Type,
+ db: &dyn HirDatabase,
+ famous_defs: &FamousDefs<'_, '_>,
+) -> Option<ReferenceConversionType> {
+ let type_argument = ty.type_arguments().next()?;
+ let slice_type = hir::Type::new_slice(type_argument);
+
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[slice_type])
+ .then(|| ReferenceConversionType::AsRefSlice)
+}
+
+fn handle_dereferenced(
+ ty: &hir::Type,
+ db: &dyn HirDatabase,
+ famous_defs: &FamousDefs<'_, '_>,
+) -> Option<ReferenceConversionType> {
+ let type_argument = ty.type_arguments().next()?;
+
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[type_argument])
+ .then(|| ReferenceConversionType::Dereferenced)
+}
+
+fn handle_option_as_ref(
+ ty: &hir::Type,
+ db: &dyn HirDatabase,
+ famous_defs: &FamousDefs<'_, '_>,
+) -> Option<ReferenceConversionType> {
+ if ty.as_adt() == famous_defs.core_option_Option()?.ty(db).as_adt() {
+ Some(ReferenceConversionType::Option)
+ } else {
+ None
+ }
+}
+
+fn handle_result_as_ref(
+ ty: &hir::Type,
+ db: &dyn HirDatabase,
+ famous_defs: &FamousDefs<'_, '_>,
+) -> Option<ReferenceConversionType> {
+ if ty.as_adt() == famous_defs.core_result_Result()?.ty(db).as_adt() {
+ Some(ReferenceConversionType::Result)
+ } else {
+ None
+ }
+}
+
+pub(crate) fn get_methods(items: &ast::AssocItemList) -> Vec<ast::Fn> {
+ items
+ .assoc_items()
+ .flat_map(|i| match i {
+ ast::AssocItem::Fn(f) => Some(f),
+ _ => None,
+ })
+ .filter(|f| f.name().is_some())
+ .collect()
+}
+
+/// Trim(remove leading and trailing whitespace) `initial_range` in `source_file`, return the trimmed range.
+pub(crate) fn trimmed_text_range(source_file: &SourceFile, initial_range: TextRange) -> TextRange {
+ let mut trimmed_range = initial_range;
+ while source_file
+ .syntax()
+ .token_at_offset(trimmed_range.start())
+ .find_map(Whitespace::cast)
+ .is_some()
+ && trimmed_range.start() < trimmed_range.end()
+ {
+ let start = trimmed_range.start() + TextSize::from(1);
+ trimmed_range = TextRange::new(start, trimmed_range.end());
+ }
+ while source_file
+ .syntax()
+ .token_at_offset(trimmed_range.end())
+ .find_map(Whitespace::cast)
+ .is_some()
+ && trimmed_range.start() < trimmed_range.end()
+ {
+ let end = trimmed_range.end() - TextSize::from(1);
+ trimmed_range = TextRange::new(trimmed_range.start(), end);
+ }
+ trimmed_range
+}
+
+/// Convert a list of function params to a list of arguments that can be passed
+/// into a function call.
+pub(crate) fn convert_param_list_to_arg_list(list: ast::ParamList) -> ast::ArgList {
+ let mut args = vec![];
+ for param in list.params() {
+ if let Some(ast::Pat::IdentPat(pat)) = param.pat() {
+ if let Some(name) = pat.name() {
+ let name = name.to_string();
+ let expr = make::expr_path(make::ext::ident_path(&name));
+ args.push(expr);
+ }
+ }
+ }
+ make::arg_list(args)
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs
new file mode 100644
index 000000000..7a0c91295
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs
@@ -0,0 +1,661 @@
+//! This module contains functions to generate default trait impl function bodies where possible.
+
+use syntax::{
+ ast::{self, edit::AstNodeEdit, make, AstNode, BinaryOp, CmpOp, HasName, LogicOp},
+ ted,
+};
+
+/// Generate custom trait bodies without default implementation where possible.
+///
+/// Returns `Option` so that we can use `?` rather than `if let Some`. Returning
+/// `None` means that generating a custom trait body failed, and the body will remain
+/// as `todo!` instead.
+pub(crate) fn gen_trait_fn_body(
+ func: &ast::Fn,
+ trait_path: &ast::Path,
+ adt: &ast::Adt,
+) -> Option<()> {
+ match trait_path.segment()?.name_ref()?.text().as_str() {
+ "Clone" => gen_clone_impl(adt, func),
+ "Debug" => gen_debug_impl(adt, func),
+ "Default" => gen_default_impl(adt, func),
+ "Hash" => gen_hash_impl(adt, func),
+ "PartialEq" => gen_partial_eq(adt, func),
+ "PartialOrd" => gen_partial_ord(adt, func),
+ _ => None,
+ }
+}
+
+/// Generate a `Clone` impl based on the fields and members of the target type.
+fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
+ stdx::always!(func.name().map_or(false, |name| name.text() == "clone"));
+ fn gen_clone_call(target: ast::Expr) -> ast::Expr {
+ let method = make::name_ref("clone");
+ make::expr_method_call(target, method, make::arg_list(None))
+ }
+ let expr = match adt {
+ // `Clone` cannot be derived for unions, so no default impl can be provided.
+ ast::Adt::Union(_) => return None,
+ ast::Adt::Enum(enum_) => {
+ let list = enum_.variant_list()?;
+ let mut arms = vec![];
+ for variant in list.variants() {
+ let name = variant.name()?;
+ let variant_name = make::ext::path_from_idents(["Self", &format!("{}", name)])?;
+
+ match variant.field_list() {
+ // => match self { Self::Name { x } => Self::Name { x: x.clone() } }
+ Some(ast::FieldList::RecordFieldList(list)) => {
+ let mut pats = vec![];
+ let mut fields = vec![];
+ for field in list.fields() {
+ let field_name = field.name()?;
+ let pat = make::ident_pat(false, false, field_name.clone());
+ pats.push(pat.into());
+
+ let path = make::ext::ident_path(&field_name.to_string());
+ let method_call = gen_clone_call(make::expr_path(path));
+ let name_ref = make::name_ref(&field_name.to_string());
+ let field = make::record_expr_field(name_ref, Some(method_call));
+ fields.push(field);
+ }
+ let pat = make::record_pat(variant_name.clone(), pats.into_iter());
+ let fields = make::record_expr_field_list(fields);
+ let record_expr = make::record_expr(variant_name, fields).into();
+ arms.push(make::match_arm(Some(pat.into()), None, record_expr));
+ }
+
+ // => match self { Self::Name(arg1) => Self::Name(arg1.clone()) }
+ Some(ast::FieldList::TupleFieldList(list)) => {
+ let mut pats = vec![];
+ let mut fields = vec![];
+ for (i, _) in list.fields().enumerate() {
+ let field_name = format!("arg{}", i);
+ let pat = make::ident_pat(false, false, make::name(&field_name));
+ pats.push(pat.into());
+
+ let f_path = make::expr_path(make::ext::ident_path(&field_name));
+ fields.push(gen_clone_call(f_path));
+ }
+ let pat = make::tuple_struct_pat(variant_name.clone(), pats.into_iter());
+ let struct_name = make::expr_path(variant_name);
+ let tuple_expr = make::expr_call(struct_name, make::arg_list(fields));
+ arms.push(make::match_arm(Some(pat.into()), None, tuple_expr));
+ }
+
+ // => match self { Self::Name => Self::Name }
+ None => {
+ let pattern = make::path_pat(variant_name.clone());
+ let variant_expr = make::expr_path(variant_name);
+ arms.push(make::match_arm(Some(pattern), None, variant_expr));
+ }
+ }
+ }
+
+ let match_target = make::expr_path(make::ext::ident_path("self"));
+ let list = make::match_arm_list(arms).indent(ast::edit::IndentLevel(1));
+ make::expr_match(match_target, list)
+ }
+ ast::Adt::Struct(strukt) => {
+ match strukt.field_list() {
+ // => Self { name: self.name.clone() }
+ Some(ast::FieldList::RecordFieldList(field_list)) => {
+ let mut fields = vec![];
+ for field in field_list.fields() {
+ let base = make::expr_path(make::ext::ident_path("self"));
+ let target = make::expr_field(base, &field.name()?.to_string());
+ let method_call = gen_clone_call(target);
+ let name_ref = make::name_ref(&field.name()?.to_string());
+ let field = make::record_expr_field(name_ref, Some(method_call));
+ fields.push(field);
+ }
+ let struct_name = make::ext::ident_path("Self");
+ let fields = make::record_expr_field_list(fields);
+ make::record_expr(struct_name, fields).into()
+ }
+ // => Self(self.0.clone(), self.1.clone())
+ Some(ast::FieldList::TupleFieldList(field_list)) => {
+ let mut fields = vec![];
+ for (i, _) in field_list.fields().enumerate() {
+ let f_path = make::expr_path(make::ext::ident_path("self"));
+ let target = make::expr_field(f_path, &format!("{}", i));
+ fields.push(gen_clone_call(target));
+ }
+ let struct_name = make::expr_path(make::ext::ident_path("Self"));
+ make::expr_call(struct_name, make::arg_list(fields))
+ }
+ // => Self { }
+ None => {
+ let struct_name = make::ext::ident_path("Self");
+ let fields = make::record_expr_field_list(None);
+ make::record_expr(struct_name, fields).into()
+ }
+ }
+ }
+ };
+ let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1));
+ ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
+ Some(())
+}
+
+/// Generate a `Debug` impl based on the fields and members of the target type.
+fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
+ let annotated_name = adt.name()?;
+ match adt {
+ // `Debug` cannot be derived for unions, so no default impl can be provided.
+ ast::Adt::Union(_) => None,
+
+ // => match self { Self::Variant => write!(f, "Variant") }
+ ast::Adt::Enum(enum_) => {
+ let list = enum_.variant_list()?;
+ let mut arms = vec![];
+ for variant in list.variants() {
+ let name = variant.name()?;
+ let variant_name = make::ext::path_from_idents(["Self", &format!("{}", name)])?;
+ let target = make::expr_path(make::ext::ident_path("f"));
+
+ match variant.field_list() {
+ Some(ast::FieldList::RecordFieldList(list)) => {
+ // => f.debug_struct(name)
+ let target = make::expr_path(make::ext::ident_path("f"));
+ let method = make::name_ref("debug_struct");
+ let struct_name = format!("\"{}\"", name);
+ let args = make::arg_list(Some(make::expr_literal(&struct_name).into()));
+ let mut expr = make::expr_method_call(target, method, args);
+
+ let mut pats = vec![];
+ for field in list.fields() {
+ let field_name = field.name()?;
+
+ // create a field pattern for use in `MyStruct { fields.. }`
+ let pat = make::ident_pat(false, false, field_name.clone());
+ pats.push(pat.into());
+
+ // => <expr>.field("field_name", field)
+ let method_name = make::name_ref("field");
+ let name = make::expr_literal(&(format!("\"{}\"", field_name))).into();
+ let path = &format!("{}", field_name);
+ let path = make::expr_path(make::ext::ident_path(path));
+ let args = make::arg_list(vec![name, path]);
+ expr = make::expr_method_call(expr, method_name, args);
+ }
+
+ // => <expr>.finish()
+ let method = make::name_ref("finish");
+ let expr = make::expr_method_call(expr, method, make::arg_list(None));
+
+ // => MyStruct { fields.. } => f.debug_struct("MyStruct")...finish(),
+ let pat = make::record_pat(variant_name.clone(), pats.into_iter());
+ arms.push(make::match_arm(Some(pat.into()), None, expr));
+ }
+ Some(ast::FieldList::TupleFieldList(list)) => {
+ // => f.debug_tuple(name)
+ let target = make::expr_path(make::ext::ident_path("f"));
+ let method = make::name_ref("debug_tuple");
+ let struct_name = format!("\"{}\"", name);
+ let args = make::arg_list(Some(make::expr_literal(&struct_name).into()));
+ let mut expr = make::expr_method_call(target, method, args);
+
+ let mut pats = vec![];
+ for (i, _) in list.fields().enumerate() {
+ let name = format!("arg{}", i);
+
+ // create a field pattern for use in `MyStruct(fields..)`
+ let field_name = make::name(&name);
+ let pat = make::ident_pat(false, false, field_name.clone());
+ pats.push(pat.into());
+
+ // => <expr>.field(field)
+ let method_name = make::name_ref("field");
+ let field_path = &name.to_string();
+ let field_path = make::expr_path(make::ext::ident_path(field_path));
+ let args = make::arg_list(vec![field_path]);
+ expr = make::expr_method_call(expr, method_name, args);
+ }
+
+ // => <expr>.finish()
+ let method = make::name_ref("finish");
+ let expr = make::expr_method_call(expr, method, make::arg_list(None));
+
+ // => MyStruct (fields..) => f.debug_tuple("MyStruct")...finish(),
+ let pat = make::tuple_struct_pat(variant_name.clone(), pats.into_iter());
+ arms.push(make::match_arm(Some(pat.into()), None, expr));
+ }
+ None => {
+ let fmt_string = make::expr_literal(&(format!("\"{}\"", name))).into();
+ let args = make::arg_list([target, fmt_string]);
+ let macro_name = make::expr_path(make::ext::ident_path("write"));
+ let macro_call = make::expr_macro_call(macro_name, args);
+
+ let variant_name = make::path_pat(variant_name);
+ arms.push(make::match_arm(Some(variant_name), None, macro_call));
+ }
+ }
+ }
+
+ let match_target = make::expr_path(make::ext::ident_path("self"));
+ let list = make::match_arm_list(arms).indent(ast::edit::IndentLevel(1));
+ let match_expr = make::expr_match(match_target, list);
+
+ let body = make::block_expr(None, Some(match_expr));
+ let body = body.indent(ast::edit::IndentLevel(1));
+ ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
+ Some(())
+ }
+
+ ast::Adt::Struct(strukt) => {
+ let name = format!("\"{}\"", annotated_name);
+ let args = make::arg_list(Some(make::expr_literal(&name).into()));
+ let target = make::expr_path(make::ext::ident_path("f"));
+
+ let expr = match strukt.field_list() {
+ // => f.debug_struct("Name").finish()
+ None => make::expr_method_call(target, make::name_ref("debug_struct"), args),
+
+ // => f.debug_struct("Name").field("foo", &self.foo).finish()
+ Some(ast::FieldList::RecordFieldList(field_list)) => {
+ let method = make::name_ref("debug_struct");
+ let mut expr = make::expr_method_call(target, method, args);
+ for field in field_list.fields() {
+ let name = field.name()?;
+ let f_name = make::expr_literal(&(format!("\"{}\"", name))).into();
+ let f_path = make::expr_path(make::ext::ident_path("self"));
+ let f_path = make::expr_ref(f_path, false);
+ let f_path = make::expr_field(f_path, &format!("{}", name));
+ let args = make::arg_list([f_name, f_path]);
+ expr = make::expr_method_call(expr, make::name_ref("field"), args);
+ }
+ expr
+ }
+
+ // => f.debug_tuple("Name").field(self.0).finish()
+ Some(ast::FieldList::TupleFieldList(field_list)) => {
+ let method = make::name_ref("debug_tuple");
+ let mut expr = make::expr_method_call(target, method, args);
+ for (i, _) in field_list.fields().enumerate() {
+ let f_path = make::expr_path(make::ext::ident_path("self"));
+ let f_path = make::expr_ref(f_path, false);
+ let f_path = make::expr_field(f_path, &format!("{}", i));
+ let method = make::name_ref("field");
+ expr = make::expr_method_call(expr, method, make::arg_list(Some(f_path)));
+ }
+ expr
+ }
+ };
+
+ let method = make::name_ref("finish");
+ let expr = make::expr_method_call(expr, method, make::arg_list(None));
+ let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1));
+ ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
+ Some(())
+ }
+ }
+}
+
+/// Generate a `Debug` impl based on the fields and members of the target type.
+fn gen_default_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
+ fn gen_default_call() -> Option<ast::Expr> {
+ let fn_name = make::ext::path_from_idents(["Default", "default"])?;
+ Some(make::expr_call(make::expr_path(fn_name), make::arg_list(None)))
+ }
+ match adt {
+ // `Debug` cannot be derived for unions, so no default impl can be provided.
+ ast::Adt::Union(_) => None,
+ // Deriving `Debug` for enums is not stable yet.
+ ast::Adt::Enum(_) => None,
+ ast::Adt::Struct(strukt) => {
+ let expr = match strukt.field_list() {
+ Some(ast::FieldList::RecordFieldList(field_list)) => {
+ let mut fields = vec![];
+ for field in field_list.fields() {
+ let method_call = gen_default_call()?;
+ let name_ref = make::name_ref(&field.name()?.to_string());
+ let field = make::record_expr_field(name_ref, Some(method_call));
+ fields.push(field);
+ }
+ let struct_name = make::ext::ident_path("Self");
+ let fields = make::record_expr_field_list(fields);
+ make::record_expr(struct_name, fields).into()
+ }
+ Some(ast::FieldList::TupleFieldList(field_list)) => {
+ let struct_name = make::expr_path(make::ext::ident_path("Self"));
+ let fields = field_list
+ .fields()
+ .map(|_| gen_default_call())
+ .collect::<Option<Vec<ast::Expr>>>()?;
+ make::expr_call(struct_name, make::arg_list(fields))
+ }
+ None => {
+ let struct_name = make::ext::ident_path("Self");
+ let fields = make::record_expr_field_list(None);
+ make::record_expr(struct_name, fields).into()
+ }
+ };
+ let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1));
+ ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
+ Some(())
+ }
+ }
+}
+
+/// Generate a `Hash` impl based on the fields and members of the target type.
+fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
+ stdx::always!(func.name().map_or(false, |name| name.text() == "hash"));
+ fn gen_hash_call(target: ast::Expr) -> ast::Stmt {
+ let method = make::name_ref("hash");
+ let arg = make::expr_path(make::ext::ident_path("state"));
+ let expr = make::expr_method_call(target, method, make::arg_list(Some(arg)));
+ make::expr_stmt(expr).into()
+ }
+
+ let body = match adt {
+ // `Hash` cannot be derived for unions, so no default impl can be provided.
+ ast::Adt::Union(_) => return None,
+
+ // => std::mem::discriminant(self).hash(state);
+ ast::Adt::Enum(_) => {
+ let fn_name = make_discriminant()?;
+
+ let arg = make::expr_path(make::ext::ident_path("self"));
+ let fn_call = make::expr_call(fn_name, make::arg_list(Some(arg)));
+ let stmt = gen_hash_call(fn_call);
+
+ make::block_expr(Some(stmt), None).indent(ast::edit::IndentLevel(1))
+ }
+ ast::Adt::Struct(strukt) => match strukt.field_list() {
+ // => self.<field>.hash(state);
+ Some(ast::FieldList::RecordFieldList(field_list)) => {
+ let mut stmts = vec![];
+ for field in field_list.fields() {
+ let base = make::expr_path(make::ext::ident_path("self"));
+ let target = make::expr_field(base, &field.name()?.to_string());
+ stmts.push(gen_hash_call(target));
+ }
+ make::block_expr(stmts, None).indent(ast::edit::IndentLevel(1))
+ }
+
+ // => self.<field_index>.hash(state);
+ Some(ast::FieldList::TupleFieldList(field_list)) => {
+ let mut stmts = vec![];
+ for (i, _) in field_list.fields().enumerate() {
+ let base = make::expr_path(make::ext::ident_path("self"));
+ let target = make::expr_field(base, &format!("{}", i));
+ stmts.push(gen_hash_call(target));
+ }
+ make::block_expr(stmts, None).indent(ast::edit::IndentLevel(1))
+ }
+
+ // No fields in the body means there's nothing to hash.
+ None => return None,
+ },
+ };
+
+ ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
+ Some(())
+}
+
+/// Generate a `PartialEq` impl based on the fields and members of the target type.
+fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
+ stdx::always!(func.name().map_or(false, |name| name.text() == "eq"));
+ fn gen_eq_chain(expr: Option<ast::Expr>, cmp: ast::Expr) -> Option<ast::Expr> {
+ match expr {
+ Some(expr) => Some(make::expr_bin_op(expr, BinaryOp::LogicOp(LogicOp::And), cmp)),
+ None => Some(cmp),
+ }
+ }
+
+ fn gen_record_pat_field(field_name: &str, pat_name: &str) -> ast::RecordPatField {
+ let pat = make::ext::simple_ident_pat(make::name(pat_name));
+ let name_ref = make::name_ref(field_name);
+ make::record_pat_field(name_ref, pat.into())
+ }
+
+ fn gen_record_pat(record_name: ast::Path, fields: Vec<ast::RecordPatField>) -> ast::RecordPat {
+ let list = make::record_pat_field_list(fields);
+ make::record_pat_with_fields(record_name, list)
+ }
+
+ fn gen_variant_path(variant: &ast::Variant) -> Option<ast::Path> {
+ make::ext::path_from_idents(["Self", &variant.name()?.to_string()])
+ }
+
+ fn gen_tuple_field(field_name: &String) -> ast::Pat {
+ ast::Pat::IdentPat(make::ident_pat(false, false, make::name(field_name)))
+ }
+
+ // FIXME: return `None` if the trait carries a generic type; we can only
+ // generate this code `Self` for the time being.
+
+ let body = match adt {
+ // `PartialEq` cannot be derived for unions, so no default impl can be provided.
+ ast::Adt::Union(_) => return None,
+
+ ast::Adt::Enum(enum_) => {
+ // => std::mem::discriminant(self) == std::mem::discriminant(other)
+ let lhs_name = make::expr_path(make::ext::ident_path("self"));
+ let lhs = make::expr_call(make_discriminant()?, make::arg_list(Some(lhs_name.clone())));
+ let rhs_name = make::expr_path(make::ext::ident_path("other"));
+ let rhs = make::expr_call(make_discriminant()?, make::arg_list(Some(rhs_name.clone())));
+ let eq_check =
+ make::expr_bin_op(lhs, BinaryOp::CmpOp(CmpOp::Eq { negated: false }), rhs);
+
+ let mut n_cases = 0;
+ let mut arms = vec![];
+ for variant in enum_.variant_list()?.variants() {
+ n_cases += 1;
+ match variant.field_list() {
+ // => (Self::Bar { bin: l_bin }, Self::Bar { bin: r_bin }) => l_bin == r_bin,
+ Some(ast::FieldList::RecordFieldList(list)) => {
+ let mut expr = None;
+ let mut l_fields = vec![];
+ let mut r_fields = vec![];
+
+ for field in list.fields() {
+ let field_name = field.name()?.to_string();
+
+ let l_name = &format!("l_{}", field_name);
+ l_fields.push(gen_record_pat_field(&field_name, l_name));
+
+ let r_name = &format!("r_{}", field_name);
+ r_fields.push(gen_record_pat_field(&field_name, r_name));
+
+ let lhs = make::expr_path(make::ext::ident_path(l_name));
+ let rhs = make::expr_path(make::ext::ident_path(r_name));
+ let cmp = make::expr_bin_op(
+ lhs,
+ BinaryOp::CmpOp(CmpOp::Eq { negated: false }),
+ rhs,
+ );
+ expr = gen_eq_chain(expr, cmp);
+ }
+
+ let left = gen_record_pat(gen_variant_path(&variant)?, l_fields);
+ let right = gen_record_pat(gen_variant_path(&variant)?, r_fields);
+ let tuple = make::tuple_pat(vec![left.into(), right.into()]);
+
+ if let Some(expr) = expr {
+ arms.push(make::match_arm(Some(tuple.into()), None, expr));
+ }
+ }
+
+ Some(ast::FieldList::TupleFieldList(list)) => {
+ let mut expr = None;
+ let mut l_fields = vec![];
+ let mut r_fields = vec![];
+
+ for (i, _) in list.fields().enumerate() {
+ let field_name = format!("{}", i);
+
+ let l_name = format!("l{}", field_name);
+ l_fields.push(gen_tuple_field(&l_name));
+
+ let r_name = format!("r{}", field_name);
+ r_fields.push(gen_tuple_field(&r_name));
+
+ let lhs = make::expr_path(make::ext::ident_path(&l_name));
+ let rhs = make::expr_path(make::ext::ident_path(&r_name));
+ let cmp = make::expr_bin_op(
+ lhs,
+ BinaryOp::CmpOp(CmpOp::Eq { negated: false }),
+ rhs,
+ );
+ expr = gen_eq_chain(expr, cmp);
+ }
+
+ let left = make::tuple_struct_pat(gen_variant_path(&variant)?, l_fields);
+ let right = make::tuple_struct_pat(gen_variant_path(&variant)?, r_fields);
+ let tuple = make::tuple_pat(vec![left.into(), right.into()]);
+
+ if let Some(expr) = expr {
+ arms.push(make::match_arm(Some(tuple.into()), None, expr));
+ }
+ }
+ None => continue,
+ }
+ }
+
+ let expr = match arms.len() {
+ 0 => eq_check,
+ _ => {
+ if n_cases > arms.len() {
+ let lhs = make::wildcard_pat().into();
+ arms.push(make::match_arm(Some(lhs), None, eq_check));
+ }
+
+ let match_target = make::expr_tuple(vec![lhs_name, rhs_name]);
+ let list = make::match_arm_list(arms).indent(ast::edit::IndentLevel(1));
+ make::expr_match(match_target, list)
+ }
+ };
+
+ make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1))
+ }
+ ast::Adt::Struct(strukt) => match strukt.field_list() {
+ Some(ast::FieldList::RecordFieldList(field_list)) => {
+ let mut expr = None;
+ for field in field_list.fields() {
+ let lhs = make::expr_path(make::ext::ident_path("self"));
+ let lhs = make::expr_field(lhs, &field.name()?.to_string());
+ let rhs = make::expr_path(make::ext::ident_path("other"));
+ let rhs = make::expr_field(rhs, &field.name()?.to_string());
+ let cmp =
+ make::expr_bin_op(lhs, BinaryOp::CmpOp(CmpOp::Eq { negated: false }), rhs);
+ expr = gen_eq_chain(expr, cmp);
+ }
+ make::block_expr(None, expr).indent(ast::edit::IndentLevel(1))
+ }
+
+ Some(ast::FieldList::TupleFieldList(field_list)) => {
+ let mut expr = None;
+ for (i, _) in field_list.fields().enumerate() {
+ let idx = format!("{}", i);
+ let lhs = make::expr_path(make::ext::ident_path("self"));
+ let lhs = make::expr_field(lhs, &idx);
+ let rhs = make::expr_path(make::ext::ident_path("other"));
+ let rhs = make::expr_field(rhs, &idx);
+ let cmp =
+ make::expr_bin_op(lhs, BinaryOp::CmpOp(CmpOp::Eq { negated: false }), rhs);
+ expr = gen_eq_chain(expr, cmp);
+ }
+ make::block_expr(None, expr).indent(ast::edit::IndentLevel(1))
+ }
+
+ // No fields in the body means there's nothing to hash.
+ None => {
+ let expr = make::expr_literal("true").into();
+ make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1))
+ }
+ },
+ };
+
+ ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
+ Some(())
+}
+
+fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
+ stdx::always!(func.name().map_or(false, |name| name.text() == "partial_cmp"));
+ fn gen_partial_eq_match(match_target: ast::Expr) -> Option<ast::Stmt> {
+ let mut arms = vec![];
+
+ let variant_name =
+ make::path_pat(make::ext::path_from_idents(["core", "cmp", "Ordering", "Equal"])?);
+ let lhs = make::tuple_struct_pat(make::ext::path_from_idents(["Some"])?, [variant_name]);
+ arms.push(make::match_arm(Some(lhs.into()), None, make::expr_empty_block()));
+
+ arms.push(make::match_arm(
+ [make::ident_pat(false, false, make::name("ord")).into()],
+ None,
+ make::expr_return(Some(make::expr_path(make::ext::ident_path("ord")))),
+ ));
+ let list = make::match_arm_list(arms).indent(ast::edit::IndentLevel(1));
+ Some(make::expr_stmt(make::expr_match(match_target, list)).into())
+ }
+
+ fn gen_partial_cmp_call(lhs: ast::Expr, rhs: ast::Expr) -> ast::Expr {
+ let rhs = make::expr_ref(rhs, false);
+ let method = make::name_ref("partial_cmp");
+ make::expr_method_call(lhs, method, make::arg_list(Some(rhs)))
+ }
+
+ // FIXME: return `None` if the trait carries a generic type; we can only
+ // generate this code `Self` for the time being.
+
+ let body = match adt {
+ // `PartialOrd` cannot be derived for unions, so no default impl can be provided.
+ ast::Adt::Union(_) => return None,
+ // `core::mem::Discriminant` does not implement `PartialOrd` in stable Rust today.
+ ast::Adt::Enum(_) => return None,
+ ast::Adt::Struct(strukt) => match strukt.field_list() {
+ Some(ast::FieldList::RecordFieldList(field_list)) => {
+ let mut exprs = vec![];
+ for field in field_list.fields() {
+ let lhs = make::expr_path(make::ext::ident_path("self"));
+ let lhs = make::expr_field(lhs, &field.name()?.to_string());
+ let rhs = make::expr_path(make::ext::ident_path("other"));
+ let rhs = make::expr_field(rhs, &field.name()?.to_string());
+ let ord = gen_partial_cmp_call(lhs, rhs);
+ exprs.push(ord);
+ }
+
+ let tail = exprs.pop();
+ let stmts = exprs
+ .into_iter()
+ .map(gen_partial_eq_match)
+ .collect::<Option<Vec<ast::Stmt>>>()?;
+ make::block_expr(stmts.into_iter(), tail).indent(ast::edit::IndentLevel(1))
+ }
+
+ Some(ast::FieldList::TupleFieldList(field_list)) => {
+ let mut exprs = vec![];
+ for (i, _) in field_list.fields().enumerate() {
+ let idx = format!("{}", i);
+ let lhs = make::expr_path(make::ext::ident_path("self"));
+ let lhs = make::expr_field(lhs, &idx);
+ let rhs = make::expr_path(make::ext::ident_path("other"));
+ let rhs = make::expr_field(rhs, &idx);
+ let ord = gen_partial_cmp_call(lhs, rhs);
+ exprs.push(ord);
+ }
+ let tail = exprs.pop();
+ let stmts = exprs
+ .into_iter()
+ .map(gen_partial_eq_match)
+ .collect::<Option<Vec<ast::Stmt>>>()?;
+ make::block_expr(stmts.into_iter(), tail).indent(ast::edit::IndentLevel(1))
+ }
+
+ // No fields in the body means there's nothing to compare.
+ None => {
+ let expr = make::expr_literal("true").into();
+ make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1))
+ }
+ },
+ };
+
+ ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
+ Some(())
+}
+
+fn make_discriminant() -> Option<ast::Expr> {
+ Some(make::expr_path(make::ext::path_from_idents(["core", "mem", "discriminant"])?))
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs
new file mode 100644
index 000000000..779cdbc93
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs
@@ -0,0 +1,775 @@
+//! This module contains functions to suggest names for expressions, functions and other items
+
+use hir::Semantics;
+use ide_db::RootDatabase;
+use itertools::Itertools;
+use stdx::to_lower_snake_case;
+use syntax::{
+ ast::{self, HasName},
+ match_ast, AstNode, SmolStr,
+};
+
+/// Trait names, that will be ignored when in `impl Trait` and `dyn Trait`
+const USELESS_TRAITS: &[&str] = &["Send", "Sync", "Copy", "Clone", "Eq", "PartialEq"];
+
+/// Identifier names that won't be suggested, ever
+///
+/// **NOTE**: they all must be snake lower case
+const USELESS_NAMES: &[&str] =
+ &["new", "default", "option", "some", "none", "ok", "err", "str", "string"];
+
+/// Generic types replaced by their first argument
+///
+/// # Examples
+/// `Option<Name>` -> `Name`
+/// `Result<User, Error>` -> `User`
+const WRAPPER_TYPES: &[&str] = &["Box", "Option", "Result"];
+
+/// Prefixes to strip from methods names
+///
+/// # Examples
+/// `vec.as_slice()` -> `slice`
+/// `args.into_config()` -> `config`
+/// `bytes.to_vec()` -> `vec`
+const USELESS_METHOD_PREFIXES: &[&str] = &["into_", "as_", "to_"];
+
+/// Useless methods that are stripped from expression
+///
+/// # Examples
+/// `var.name().to_string()` -> `var.name()`
+const USELESS_METHODS: &[&str] = &[
+ "to_string",
+ "as_str",
+ "to_owned",
+ "as_ref",
+ "clone",
+ "cloned",
+ "expect",
+ "expect_none",
+ "unwrap",
+ "unwrap_none",
+ "unwrap_or",
+ "unwrap_or_default",
+ "unwrap_or_else",
+ "unwrap_unchecked",
+ "iter",
+ "into_iter",
+ "iter_mut",
+];
+
+pub(crate) fn for_generic_parameter(ty: &ast::ImplTraitType) -> SmolStr {
+ let c = ty
+ .type_bound_list()
+ .and_then(|bounds| bounds.syntax().text().char_at(0.into()))
+ .unwrap_or('T');
+ c.encode_utf8(&mut [0; 4]).into()
+}
+
+/// Suggest name of variable for given expression
+///
+/// **NOTE**: it is caller's responsibility to guarantee uniqueness of the name.
+/// I.e. it doesn't look for names in scope.
+///
+/// # Current implementation
+///
+/// In current implementation, the function tries to get the name from
+/// the following sources:
+///
+/// * if expr is an argument to function/method, use paramter name
+/// * if expr is a function/method call, use function name
+/// * expression type name if it exists (E.g. `()`, `fn() -> ()` or `!` do not have names)
+/// * fallback: `var_name`
+///
+/// It also applies heuristics to filter out less informative names
+///
+/// Currently it sticks to the first name found.
+// FIXME: Microoptimize and return a `SmolStr` here.
+pub(crate) fn for_variable(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> String {
+ // `from_param` does not benifit from stripping
+ // it need the largest context possible
+ // so we check firstmost
+ if let Some(name) = from_param(expr, sema) {
+ return name;
+ }
+
+ let mut next_expr = Some(expr.clone());
+ while let Some(expr) = next_expr {
+ let name =
+ from_call(&expr).or_else(|| from_type(&expr, sema)).or_else(|| from_field_name(&expr));
+ if let Some(name) = name {
+ return name;
+ }
+
+ match expr {
+ ast::Expr::RefExpr(inner) => next_expr = inner.expr(),
+ ast::Expr::BoxExpr(inner) => next_expr = inner.expr(),
+ ast::Expr::AwaitExpr(inner) => next_expr = inner.expr(),
+ // ast::Expr::BlockExpr(block) => expr = block.tail_expr(),
+ ast::Expr::CastExpr(inner) => next_expr = inner.expr(),
+ ast::Expr::MethodCallExpr(method) if is_useless_method(&method) => {
+ next_expr = method.receiver();
+ }
+ ast::Expr::ParenExpr(inner) => next_expr = inner.expr(),
+ ast::Expr::TryExpr(inner) => next_expr = inner.expr(),
+ ast::Expr::PrefixExpr(prefix) if prefix.op_kind() == Some(ast::UnaryOp::Deref) => {
+ next_expr = prefix.expr()
+ }
+ _ => break,
+ }
+ }
+
+ "var_name".to_string()
+}
+
+fn normalize(name: &str) -> Option<String> {
+ let name = to_lower_snake_case(name);
+
+ if USELESS_NAMES.contains(&name.as_str()) {
+ return None;
+ }
+
+ if !is_valid_name(&name) {
+ return None;
+ }
+
+ Some(name)
+}
+
+fn is_valid_name(name: &str) -> bool {
+ match ide_db::syntax_helpers::LexedStr::single_token(name) {
+ Some((syntax::SyntaxKind::IDENT, _error)) => true,
+ _ => false,
+ }
+}
+
+fn is_useless_method(method: &ast::MethodCallExpr) -> bool {
+ let ident = method.name_ref().and_then(|it| it.ident_token());
+
+ match ident {
+ Some(ident) => USELESS_METHODS.contains(&ident.text()),
+ None => false,
+ }
+}
+
+fn from_call(expr: &ast::Expr) -> Option<String> {
+ from_func_call(expr).or_else(|| from_method_call(expr))
+}
+
+fn from_func_call(expr: &ast::Expr) -> Option<String> {
+ let call = match expr {
+ ast::Expr::CallExpr(call) => call,
+ _ => return None,
+ };
+ let func = match call.expr()? {
+ ast::Expr::PathExpr(path) => path,
+ _ => return None,
+ };
+ let ident = func.path()?.segment()?.name_ref()?.ident_token()?;
+ normalize(ident.text())
+}
+
+fn from_method_call(expr: &ast::Expr) -> Option<String> {
+ let method = match expr {
+ ast::Expr::MethodCallExpr(call) => call,
+ _ => return None,
+ };
+ let ident = method.name_ref()?.ident_token()?;
+ let mut name = ident.text();
+
+ if USELESS_METHODS.contains(&name) {
+ return None;
+ }
+
+ for prefix in USELESS_METHOD_PREFIXES {
+ if let Some(suffix) = name.strip_prefix(prefix) {
+ name = suffix;
+ break;
+ }
+ }
+
+ normalize(name)
+}
+
+fn from_param(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> Option<String> {
+ let arg_list = expr.syntax().parent().and_then(ast::ArgList::cast)?;
+ let args_parent = arg_list.syntax().parent()?;
+ let func = match_ast! {
+ match args_parent {
+ ast::CallExpr(call) => {
+ let func = call.expr()?;
+ let func_ty = sema.type_of_expr(&func)?.adjusted();
+ func_ty.as_callable(sema.db)?
+ },
+ ast::MethodCallExpr(method) => sema.resolve_method_call_as_callable(&method)?,
+ _ => return None,
+ }
+ };
+
+ let (idx, _) = arg_list.args().find_position(|it| it == expr).unwrap();
+ let (pat, _) = func.params(sema.db).into_iter().nth(idx)?;
+ let pat = match pat? {
+ either::Either::Right(pat) => pat,
+ _ => return None,
+ };
+ let name = var_name_from_pat(&pat)?;
+ normalize(&name.to_string())
+}
+
+fn var_name_from_pat(pat: &ast::Pat) -> Option<ast::Name> {
+ match pat {
+ ast::Pat::IdentPat(var) => var.name(),
+ ast::Pat::RefPat(ref_pat) => var_name_from_pat(&ref_pat.pat()?),
+ ast::Pat::BoxPat(box_pat) => var_name_from_pat(&box_pat.pat()?),
+ _ => None,
+ }
+}
+
+fn from_type(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> Option<String> {
+ let ty = sema.type_of_expr(expr)?.adjusted();
+ let ty = ty.remove_ref().unwrap_or(ty);
+
+ name_of_type(&ty, sema.db)
+}
+
+fn name_of_type(ty: &hir::Type, db: &RootDatabase) -> Option<String> {
+ let name = if let Some(adt) = ty.as_adt() {
+ let name = adt.name(db).to_string();
+
+ if WRAPPER_TYPES.contains(&name.as_str()) {
+ let inner_ty = ty.type_arguments().next()?;
+ return name_of_type(&inner_ty, db);
+ }
+
+ name
+ } else if let Some(trait_) = ty.as_dyn_trait() {
+ trait_name(&trait_, db)?
+ } else if let Some(traits) = ty.as_impl_traits(db) {
+ let mut iter = traits.filter_map(|t| trait_name(&t, db));
+ let name = iter.next()?;
+ if iter.next().is_some() {
+ return None;
+ }
+ name
+ } else {
+ return None;
+ };
+ normalize(&name)
+}
+
+fn trait_name(trait_: &hir::Trait, db: &RootDatabase) -> Option<String> {
+ let name = trait_.name(db).to_string();
+ if USELESS_TRAITS.contains(&name.as_str()) {
+ return None;
+ }
+ Some(name)
+}
+
+fn from_field_name(expr: &ast::Expr) -> Option<String> {
+ let field = match expr {
+ ast::Expr::FieldExpr(field) => field,
+ _ => return None,
+ };
+ let ident = field.name_ref()?.ident_token()?;
+ normalize(ident.text())
+}
+
+#[cfg(test)]
+mod tests {
+ use ide_db::base_db::{fixture::WithFixture, FileRange};
+
+ use super::*;
+
+ #[track_caller]
+ fn check(ra_fixture: &str, expected: &str) {
+ let (db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture);
+ let frange = FileRange { file_id, range: range_or_offset.into() };
+
+ let sema = Semantics::new(&db);
+ let source_file = sema.parse(frange.file_id);
+ let element = source_file.syntax().covering_element(frange.range);
+ let expr =
+ element.ancestors().find_map(ast::Expr::cast).expect("selection is not an expression");
+ assert_eq!(
+ expr.syntax().text_range(),
+ frange.range,
+ "selection is not an expression(yet contained in one)"
+ );
+ let name = for_variable(&expr, &sema);
+ assert_eq!(&name, expected);
+ }
+
+ #[test]
+ fn no_args() {
+ check(r#"fn foo() { $0bar()$0 }"#, "bar");
+ check(r#"fn foo() { $0bar.frobnicate()$0 }"#, "frobnicate");
+ }
+
+ #[test]
+ fn single_arg() {
+ check(r#"fn foo() { $0bar(1)$0 }"#, "bar");
+ }
+
+ #[test]
+ fn many_args() {
+ check(r#"fn foo() { $0bar(1, 2, 3)$0 }"#, "bar");
+ }
+
+ #[test]
+ fn path() {
+ check(r#"fn foo() { $0i32::bar(1, 2, 3)$0 }"#, "bar");
+ }
+
+ #[test]
+ fn generic_params() {
+ check(r#"fn foo() { $0bar::<i32>(1, 2, 3)$0 }"#, "bar");
+ check(r#"fn foo() { $0bar.frobnicate::<i32, u32>()$0 }"#, "frobnicate");
+ }
+
+ #[test]
+ fn to_name() {
+ check(
+ r#"
+struct Args;
+struct Config;
+impl Args {
+ fn to_config(&self) -> Config {}
+}
+fn foo() {
+ $0Args.to_config()$0;
+}
+"#,
+ "config",
+ );
+ }
+
+ #[test]
+ fn plain_func() {
+ check(
+ r#"
+fn bar(n: i32, m: u32);
+fn foo() { bar($01$0, 2) }
+"#,
+ "n",
+ );
+ }
+
+ #[test]
+ fn mut_param() {
+ check(
+ r#"
+fn bar(mut n: i32, m: u32);
+fn foo() { bar($01$0, 2) }
+"#,
+ "n",
+ );
+ }
+
+ #[test]
+ fn func_does_not_exist() {
+ check(r#"fn foo() { bar($01$0, 2) }"#, "var_name");
+ }
+
+ #[test]
+ fn unnamed_param() {
+ check(
+ r#"
+fn bar(_: i32, m: u32);
+fn foo() { bar($01$0, 2) }
+"#,
+ "var_name",
+ );
+ }
+
+ #[test]
+ fn tuple_pat() {
+ check(
+ r#"
+fn bar((n, k): (i32, i32), m: u32);
+fn foo() {
+ bar($0(1, 2)$0, 3)
+}
+"#,
+ "var_name",
+ );
+ }
+
+ #[test]
+ fn ref_pat() {
+ check(
+ r#"
+fn bar(&n: &i32, m: u32);
+fn foo() { bar($0&1$0, 3) }
+"#,
+ "n",
+ );
+ }
+
+ #[test]
+ fn box_pat() {
+ check(
+ r#"
+fn bar(box n: &i32, m: u32);
+fn foo() { bar($01$0, 3) }
+"#,
+ "n",
+ );
+ }
+
+ #[test]
+ fn param_out_of_index() {
+ check(
+ r#"
+fn bar(n: i32, m: u32);
+fn foo() { bar(1, 2, $03$0) }
+"#,
+ "var_name",
+ );
+ }
+
+ #[test]
+ fn generic_param_resolved() {
+ check(
+ r#"
+fn bar<T>(n: T, m: u32);
+fn foo() { bar($01$0, 2) }
+"#,
+ "n",
+ );
+ }
+
+ #[test]
+ fn generic_param_unresolved() {
+ check(
+ r#"
+fn bar<T>(n: T, m: u32);
+fn foo<T>(x: T) { bar($0x$0, 2) }
+"#,
+ "n",
+ );
+ }
+
+ #[test]
+ fn method() {
+ check(
+ r#"
+struct S;
+impl S { fn bar(&self, n: i32, m: u32); }
+fn foo() { S.bar($01$0, 2) }
+"#,
+ "n",
+ );
+ }
+
+ #[test]
+ fn method_on_impl_trait() {
+ check(
+ r#"
+struct S;
+trait T {
+ fn bar(&self, n: i32, m: u32);
+}
+impl T for S { fn bar(&self, n: i32, m: u32); }
+fn foo() { S.bar($01$0, 2) }
+"#,
+ "n",
+ );
+ }
+
+ #[test]
+ fn method_ufcs() {
+ check(
+ r#"
+struct S;
+impl S { fn bar(&self, n: i32, m: u32); }
+fn foo() { S::bar(&S, $01$0, 2) }
+"#,
+ "n",
+ );
+ }
+
+ #[test]
+ fn method_self() {
+ check(
+ r#"
+struct S;
+impl S { fn bar(&self, n: i32, m: u32); }
+fn foo() { S::bar($0&S$0, 1, 2) }
+"#,
+ "s",
+ );
+ }
+
+ #[test]
+ fn method_self_named() {
+ check(
+ r#"
+struct S;
+impl S { fn bar(strukt: &Self, n: i32, m: u32); }
+fn foo() { S::bar($0&S$0, 1, 2) }
+"#,
+ "strukt",
+ );
+ }
+
+ #[test]
+ fn i32() {
+ check(r#"fn foo() { let _: i32 = $01$0; }"#, "var_name");
+ }
+
+ #[test]
+ fn u64() {
+ check(r#"fn foo() { let _: u64 = $01$0; }"#, "var_name");
+ }
+
+ #[test]
+ fn bool() {
+ check(r#"fn foo() { let _: bool = $0true$0; }"#, "var_name");
+ }
+
+ #[test]
+ fn struct_unit() {
+ check(
+ r#"
+struct Seed;
+fn foo() { let _ = $0Seed$0; }
+"#,
+ "seed",
+ );
+ }
+
+ #[test]
+ fn struct_unit_to_snake() {
+ check(
+ r#"
+struct SeedState;
+fn foo() { let _ = $0SeedState$0; }
+"#,
+ "seed_state",
+ );
+ }
+
+ #[test]
+ fn struct_single_arg() {
+ check(
+ r#"
+struct Seed(u32);
+fn foo() { let _ = $0Seed(0)$0; }
+"#,
+ "seed",
+ );
+ }
+
+ #[test]
+ fn struct_with_fields() {
+ check(
+ r#"
+struct Seed { value: u32 }
+fn foo() { let _ = $0Seed { value: 0 }$0; }
+"#,
+ "seed",
+ );
+ }
+
+ #[test]
+ fn enum_() {
+ check(
+ r#"
+enum Kind { A, B }
+fn foo() { let _ = $0Kind::A$0; }
+"#,
+ "kind",
+ );
+ }
+
+ #[test]
+ fn enum_generic_resolved() {
+ check(
+ r#"
+enum Kind<T> { A { x: T }, B }
+fn foo() { let _ = $0Kind::A { x:1 }$0; }
+"#,
+ "kind",
+ );
+ }
+
+ #[test]
+ fn enum_generic_unresolved() {
+ check(
+ r#"
+enum Kind<T> { A { x: T }, B }
+fn foo<T>(x: T) { let _ = $0Kind::A { x }$0; }
+"#,
+ "kind",
+ );
+ }
+
+ #[test]
+ fn dyn_trait() {
+ check(
+ r#"
+trait DynHandler {}
+fn bar() -> dyn DynHandler {}
+fn foo() { $0(bar())$0; }
+"#,
+ "dyn_handler",
+ );
+ }
+
+ #[test]
+ fn impl_trait() {
+ check(
+ r#"
+trait StaticHandler {}
+fn bar() -> impl StaticHandler {}
+fn foo() { $0(bar())$0; }
+"#,
+ "static_handler",
+ );
+ }
+
+ #[test]
+ fn impl_trait_plus_clone() {
+ check(
+ r#"
+trait StaticHandler {}
+trait Clone {}
+fn bar() -> impl StaticHandler + Clone {}
+fn foo() { $0(bar())$0; }
+"#,
+ "static_handler",
+ );
+ }
+
+ #[test]
+ fn impl_trait_plus_lifetime() {
+ check(
+ r#"
+trait StaticHandler {}
+trait Clone {}
+fn bar<'a>(&'a i32) -> impl StaticHandler + 'a {}
+fn foo() { $0(bar(&1))$0; }
+"#,
+ "static_handler",
+ );
+ }
+
+ #[test]
+ fn impl_trait_plus_trait() {
+ check(
+ r#"
+trait Handler {}
+trait StaticHandler {}
+fn bar() -> impl StaticHandler + Handler {}
+fn foo() { $0(bar())$0; }
+"#,
+ "bar",
+ );
+ }
+
+ #[test]
+ fn ref_value() {
+ check(
+ r#"
+struct Seed;
+fn bar() -> &Seed {}
+fn foo() { $0(bar())$0; }
+"#,
+ "seed",
+ );
+ }
+
+ #[test]
+ fn box_value() {
+ check(
+ r#"
+struct Box<T>(*const T);
+struct Seed;
+fn bar() -> Box<Seed> {}
+fn foo() { $0(bar())$0; }
+"#,
+ "seed",
+ );
+ }
+
+ #[test]
+ fn box_generic() {
+ check(
+ r#"
+struct Box<T>(*const T);
+fn bar<T>() -> Box<T> {}
+fn foo<T>() { $0(bar::<T>())$0; }
+"#,
+ "bar",
+ );
+ }
+
+ #[test]
+ fn option_value() {
+ check(
+ r#"
+enum Option<T> { Some(T) }
+struct Seed;
+fn bar() -> Option<Seed> {}
+fn foo() { $0(bar())$0; }
+"#,
+ "seed",
+ );
+ }
+
+ #[test]
+ fn result_value() {
+ check(
+ r#"
+enum Result<T, E> { Ok(T), Err(E) }
+struct Seed;
+struct Error;
+fn bar() -> Result<Seed, Error> {}
+fn foo() { $0(bar())$0; }
+"#,
+ "seed",
+ );
+ }
+
+ #[test]
+ fn ref_call() {
+ check(
+ r#"
+fn foo() { $0&bar(1, 3)$0 }
+"#,
+ "bar",
+ );
+ }
+
+ #[test]
+ fn name_to_string() {
+ check(
+ r#"
+fn foo() { $0function.name().to_string()$0 }
+"#,
+ "name",
+ );
+ }
+
+ #[test]
+ fn nested_useless_method() {
+ check(
+ r#"
+fn foo() { $0function.name().as_ref().unwrap().to_string()$0 }
+"#,
+ "name",
+ );
+ }
+
+ #[test]
+ fn struct_field_name() {
+ check(
+ r#"
+struct S<T> {
+ some_field: T;
+}
+fn foo<T>(some_struct: S<T>) { $0some_struct.some_field$0 }
+"#,
+ "some_field",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml
new file mode 100644
index 000000000..8c9d6b228
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml
@@ -0,0 +1,33 @@
+[package]
+name = "ide-completion"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+itertools = "0.10.3"
+
+once_cell = "1.12.0"
+smallvec = "1.9.0"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+text-edit = { path = "../text-edit", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+ide-db = { path = "../ide-db", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+
+# completions crate should depend only on the top-level `hir` package. if you need
+# something from some `hir-xxx` subpackage, reexport the API via `hir`.
+hir = { path = "../hir", version = "0.0.0" }
+
+[dev-dependencies]
+expect-test = "1.4.0"
+
+test-utils = { path = "../test-utils" }
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs
new file mode 100644
index 000000000..72579e602
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs
@@ -0,0 +1,691 @@
+//! This module defines an accumulator for completions which are going to be presented to user.
+
+pub(crate) mod attribute;
+pub(crate) mod dot;
+pub(crate) mod expr;
+pub(crate) mod extern_abi;
+pub(crate) mod field;
+pub(crate) mod flyimport;
+pub(crate) mod fn_param;
+pub(crate) mod format_string;
+pub(crate) mod item_list;
+pub(crate) mod keyword;
+pub(crate) mod lifetime;
+pub(crate) mod mod_;
+pub(crate) mod pattern;
+pub(crate) mod postfix;
+pub(crate) mod record;
+pub(crate) mod snippet;
+pub(crate) mod r#type;
+pub(crate) mod use_;
+pub(crate) mod vis;
+
+use std::iter;
+
+use hir::{known, ScopeDef};
+use ide_db::{imports::import_assets::LocatedImport, SymbolKind};
+use syntax::ast;
+
+use crate::{
+ context::{
+ DotAccess, ItemListKind, NameContext, NameKind, NameRefContext, NameRefKind,
+ PathCompletionCtx, PathKind, PatternContext, TypeLocation, Visible,
+ },
+ item::Builder,
+ render::{
+ const_::render_const,
+ function::{render_fn, render_method},
+ literal::{render_struct_literal, render_variant_lit},
+ macro_::render_macro,
+ pattern::{render_struct_pat, render_variant_pat},
+ render_field, render_path_resolution, render_pattern_resolution, render_tuple_field,
+ type_alias::{render_type_alias, render_type_alias_with_eq},
+ union_literal::render_union_literal,
+ RenderContext,
+ },
+ CompletionContext, CompletionItem, CompletionItemKind,
+};
+
+/// Represents an in-progress set of completions being built.
+#[derive(Debug, Default)]
+pub struct Completions {
+ buf: Vec<CompletionItem>,
+}
+
+impl From<Completions> for Vec<CompletionItem> {
+ fn from(val: Completions) -> Self {
+ val.buf
+ }
+}
+
+impl Builder {
+ /// Convenience method, which allows to add a freshly created completion into accumulator
+ /// without binding it to the variable.
+ pub(crate) fn add_to(self, acc: &mut Completions) {
+ acc.add(self.build())
+ }
+}
+
+impl Completions {
+ fn add(&mut self, item: CompletionItem) {
+ self.buf.push(item)
+ }
+
+ fn add_opt(&mut self, item: Option<CompletionItem>) {
+ if let Some(item) = item {
+ self.buf.push(item)
+ }
+ }
+
+ pub(crate) fn add_all<I>(&mut self, items: I)
+ where
+ I: IntoIterator,
+ I::Item: Into<CompletionItem>,
+ {
+ items.into_iter().for_each(|item| self.add(item.into()))
+ }
+
+ pub(crate) fn add_keyword(&mut self, ctx: &CompletionContext<'_>, keyword: &'static str) {
+ let item = CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), keyword);
+ item.add_to(self);
+ }
+
+ pub(crate) fn add_nameref_keywords_with_colon(&mut self, ctx: &CompletionContext<'_>) {
+ ["self::", "crate::"].into_iter().for_each(|kw| self.add_keyword(ctx, kw));
+
+ if ctx.depth_from_crate_root > 0 {
+ self.add_keyword(ctx, "super::");
+ }
+ }
+
+ pub(crate) fn add_nameref_keywords(&mut self, ctx: &CompletionContext<'_>) {
+ ["self", "crate"].into_iter().for_each(|kw| self.add_keyword(ctx, kw));
+
+ if ctx.depth_from_crate_root > 0 {
+ self.add_keyword(ctx, "super");
+ }
+ }
+
+ pub(crate) fn add_super_keyword(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ super_chain_len: Option<usize>,
+ ) {
+ if let Some(len) = super_chain_len {
+ if len > 0 && len < ctx.depth_from_crate_root {
+ self.add_keyword(ctx, "super::");
+ }
+ }
+ }
+
+ pub(crate) fn add_keyword_snippet_expr(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ incomplete_let: bool,
+ kw: &str,
+ snippet: &str,
+ ) {
+ let mut item = CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), kw);
+
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ if incomplete_let && snippet.ends_with('}') {
+ // complete block expression snippets with a trailing semicolon, if inside an incomplete let
+ cov_mark::hit!(let_semi);
+ item.insert_snippet(cap, format!("{};", snippet));
+ } else {
+ item.insert_snippet(cap, snippet);
+ }
+ }
+ None => {
+ item.insert_text(if snippet.contains('$') { kw } else { snippet });
+ }
+ };
+ item.add_to(self);
+ }
+
+ pub(crate) fn add_keyword_snippet(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ kw: &str,
+ snippet: &str,
+ ) {
+ let mut item = CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), kw);
+
+ match ctx.config.snippet_cap {
+ Some(cap) => item.insert_snippet(cap, snippet),
+ None => item.insert_text(if snippet.contains('$') { kw } else { snippet }),
+ };
+ item.add_to(self);
+ }
+
+ pub(crate) fn add_crate_roots(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ ) {
+ ctx.process_all_names(&mut |name, res| match res {
+ ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) if m.is_crate_root(ctx.db) => {
+ self.add_module(ctx, path_ctx, m, name);
+ }
+ _ => (),
+ });
+ }
+
+ pub(crate) fn add_path_resolution(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ local_name: hir::Name,
+ resolution: hir::ScopeDef,
+ ) {
+ let is_private_editable = match ctx.def_is_visible(&resolution) {
+ Visible::Yes => false,
+ Visible::Editable => true,
+ Visible::No => return,
+ };
+ self.add(
+ render_path_resolution(
+ RenderContext::new(ctx).private_editable(is_private_editable),
+ path_ctx,
+ local_name,
+ resolution,
+ )
+ .build(),
+ );
+ }
+
+ pub(crate) fn add_pattern_resolution(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ pattern_ctx: &PatternContext,
+ local_name: hir::Name,
+ resolution: hir::ScopeDef,
+ ) {
+ let is_private_editable = match ctx.def_is_visible(&resolution) {
+ Visible::Yes => false,
+ Visible::Editable => true,
+ Visible::No => return,
+ };
+ self.add(
+ render_pattern_resolution(
+ RenderContext::new(ctx).private_editable(is_private_editable),
+ pattern_ctx,
+ local_name,
+ resolution,
+ )
+ .build(),
+ );
+ }
+
+ pub(crate) fn add_enum_variants(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ e: hir::Enum,
+ ) {
+ e.variants(ctx.db)
+ .into_iter()
+ .for_each(|variant| self.add_enum_variant(ctx, path_ctx, variant, None));
+ }
+
+ pub(crate) fn add_module(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ module: hir::Module,
+ local_name: hir::Name,
+ ) {
+ self.add_path_resolution(
+ ctx,
+ path_ctx,
+ local_name,
+ hir::ScopeDef::ModuleDef(module.into()),
+ );
+ }
+
+ pub(crate) fn add_macro(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ mac: hir::Macro,
+ local_name: hir::Name,
+ ) {
+ let is_private_editable = match ctx.is_visible(&mac) {
+ Visible::Yes => false,
+ Visible::Editable => true,
+ Visible::No => return,
+ };
+ self.add(
+ render_macro(
+ RenderContext::new(ctx).private_editable(is_private_editable),
+ path_ctx,
+ local_name,
+ mac,
+ )
+ .build(),
+ );
+ }
+
+ pub(crate) fn add_function(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ func: hir::Function,
+ local_name: Option<hir::Name>,
+ ) {
+ let is_private_editable = match ctx.is_visible(&func) {
+ Visible::Yes => false,
+ Visible::Editable => true,
+ Visible::No => return,
+ };
+ self.add(
+ render_fn(
+ RenderContext::new(ctx).private_editable(is_private_editable),
+ path_ctx,
+ local_name,
+ func,
+ )
+ .build(),
+ );
+ }
+
+ pub(crate) fn add_method(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ dot_access: &DotAccess,
+ func: hir::Function,
+ receiver: Option<hir::Name>,
+ local_name: Option<hir::Name>,
+ ) {
+ let is_private_editable = match ctx.is_visible(&func) {
+ Visible::Yes => false,
+ Visible::Editable => true,
+ Visible::No => return,
+ };
+ self.add(
+ render_method(
+ RenderContext::new(ctx).private_editable(is_private_editable),
+ dot_access,
+ receiver,
+ local_name,
+ func,
+ )
+ .build(),
+ );
+ }
+
+ pub(crate) fn add_method_with_import(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ dot_access: &DotAccess,
+ func: hir::Function,
+ import: LocatedImport,
+ ) {
+ let is_private_editable = match ctx.is_visible(&func) {
+ Visible::Yes => false,
+ Visible::Editable => true,
+ Visible::No => return,
+ };
+ self.add(
+ render_method(
+ RenderContext::new(ctx)
+ .private_editable(is_private_editable)
+ .import_to_add(Some(import)),
+ dot_access,
+ None,
+ None,
+ func,
+ )
+ .build(),
+ );
+ }
+
+ pub(crate) fn add_const(&mut self, ctx: &CompletionContext<'_>, konst: hir::Const) {
+ let is_private_editable = match ctx.is_visible(&konst) {
+ Visible::Yes => false,
+ Visible::Editable => true,
+ Visible::No => return,
+ };
+ self.add_opt(render_const(
+ RenderContext::new(ctx).private_editable(is_private_editable),
+ konst,
+ ));
+ }
+
+ pub(crate) fn add_type_alias(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ type_alias: hir::TypeAlias,
+ ) {
+ let is_private_editable = match ctx.is_visible(&type_alias) {
+ Visible::Yes => false,
+ Visible::Editable => true,
+ Visible::No => return,
+ };
+ self.add_opt(render_type_alias(
+ RenderContext::new(ctx).private_editable(is_private_editable),
+ type_alias,
+ ));
+ }
+
+ pub(crate) fn add_type_alias_with_eq(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ type_alias: hir::TypeAlias,
+ ) {
+ self.add_opt(render_type_alias_with_eq(RenderContext::new(ctx), type_alias));
+ }
+
+ pub(crate) fn add_qualified_enum_variant(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ variant: hir::Variant,
+ path: hir::ModPath,
+ ) {
+ if let Some(builder) =
+ render_variant_lit(RenderContext::new(ctx), path_ctx, None, variant, Some(path))
+ {
+ self.add(builder.build());
+ }
+ }
+
+ pub(crate) fn add_enum_variant(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ variant: hir::Variant,
+ local_name: Option<hir::Name>,
+ ) {
+ if let PathCompletionCtx { kind: PathKind::Pat { pat_ctx }, .. } = path_ctx {
+ cov_mark::hit!(enum_variant_pattern_path);
+ self.add_variant_pat(ctx, pat_ctx, Some(path_ctx), variant, local_name);
+ return;
+ }
+
+ if let Some(builder) =
+ render_variant_lit(RenderContext::new(ctx), path_ctx, local_name, variant, None)
+ {
+ self.add(builder.build());
+ }
+ }
+
+ pub(crate) fn add_field(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ dot_access: &DotAccess,
+ receiver: Option<hir::Name>,
+ field: hir::Field,
+ ty: &hir::Type,
+ ) {
+ let is_private_editable = match ctx.is_visible(&field) {
+ Visible::Yes => false,
+ Visible::Editable => true,
+ Visible::No => return,
+ };
+ let item = render_field(
+ RenderContext::new(ctx).private_editable(is_private_editable),
+ dot_access,
+ receiver,
+ field,
+ ty,
+ );
+ self.add(item);
+ }
+
+ pub(crate) fn add_struct_literal(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ strukt: hir::Struct,
+ path: Option<hir::ModPath>,
+ local_name: Option<hir::Name>,
+ ) {
+ if let Some(builder) =
+ render_struct_literal(RenderContext::new(ctx), path_ctx, strukt, path, local_name)
+ {
+ self.add(builder.build());
+ }
+ }
+
+ pub(crate) fn add_union_literal(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ un: hir::Union,
+ path: Option<hir::ModPath>,
+ local_name: Option<hir::Name>,
+ ) {
+ let item = render_union_literal(RenderContext::new(ctx), un, path, local_name);
+ self.add_opt(item);
+ }
+
+ pub(crate) fn add_tuple_field(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ receiver: Option<hir::Name>,
+ field: usize,
+ ty: &hir::Type,
+ ) {
+ let item = render_tuple_field(RenderContext::new(ctx), receiver, field, ty);
+ self.add(item);
+ }
+
+ pub(crate) fn add_lifetime(&mut self, ctx: &CompletionContext<'_>, name: hir::Name) {
+ CompletionItem::new(SymbolKind::LifetimeParam, ctx.source_range(), name.to_smol_str())
+ .add_to(self)
+ }
+
+ pub(crate) fn add_label(&mut self, ctx: &CompletionContext<'_>, name: hir::Name) {
+ CompletionItem::new(SymbolKind::Label, ctx.source_range(), name.to_smol_str()).add_to(self)
+ }
+
+ pub(crate) fn add_variant_pat(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ pattern_ctx: &PatternContext,
+ path_ctx: Option<&PathCompletionCtx>,
+ variant: hir::Variant,
+ local_name: Option<hir::Name>,
+ ) {
+ self.add_opt(render_variant_pat(
+ RenderContext::new(ctx),
+ pattern_ctx,
+ path_ctx,
+ variant,
+ local_name.clone(),
+ None,
+ ));
+ }
+
+ pub(crate) fn add_qualified_variant_pat(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ pattern_ctx: &PatternContext,
+ variant: hir::Variant,
+ path: hir::ModPath,
+ ) {
+ let path = Some(&path);
+ self.add_opt(render_variant_pat(
+ RenderContext::new(ctx),
+ pattern_ctx,
+ None,
+ variant,
+ None,
+ path,
+ ));
+ }
+
+ pub(crate) fn add_struct_pat(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ pattern_ctx: &PatternContext,
+ strukt: hir::Struct,
+ local_name: Option<hir::Name>,
+ ) {
+ self.add_opt(render_struct_pat(RenderContext::new(ctx), pattern_ctx, strukt, local_name));
+ }
+}
+
+/// Calls the callback for each variant of the provided enum with the path to the variant.
+/// Skips variants that are visible with single segment paths.
+fn enum_variants_with_paths(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ enum_: hir::Enum,
+ impl_: &Option<ast::Impl>,
+ cb: impl Fn(&mut Completions, &CompletionContext<'_>, hir::Variant, hir::ModPath),
+) {
+ let variants = enum_.variants(ctx.db);
+
+ if let Some(impl_) = impl_.as_ref().and_then(|impl_| ctx.sema.to_def(impl_)) {
+ if impl_.self_ty(ctx.db).as_adt() == Some(hir::Adt::Enum(enum_)) {
+ for &variant in &variants {
+ let self_path = hir::ModPath::from_segments(
+ hir::PathKind::Plain,
+ iter::once(known::SELF_TYPE).chain(iter::once(variant.name(ctx.db))),
+ );
+ cb(acc, ctx, variant, self_path);
+ }
+ }
+ }
+
+ for variant in variants {
+ if let Some(path) = ctx.module.find_use_path(ctx.db, hir::ModuleDef::from(variant)) {
+ // Variants with trivial paths are already added by the existing completion logic,
+ // so we should avoid adding these twice
+ if path.segments().len() > 1 {
+ cb(acc, ctx, variant, path);
+ }
+ }
+ }
+}
+
+pub(super) fn complete_name(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ NameContext { name, kind }: &NameContext,
+) {
+ match kind {
+ NameKind::Const => {
+ item_list::trait_impl::complete_trait_impl_const(acc, ctx, name);
+ }
+ NameKind::Function => {
+ item_list::trait_impl::complete_trait_impl_fn(acc, ctx, name);
+ }
+ NameKind::IdentPat(pattern_ctx) => {
+ if ctx.token.kind() != syntax::T![_] {
+ complete_patterns(acc, ctx, pattern_ctx)
+ }
+ }
+ NameKind::Module(mod_under_caret) => {
+ mod_::complete_mod(acc, ctx, mod_under_caret);
+ }
+ NameKind::TypeAlias => {
+ item_list::trait_impl::complete_trait_impl_type_alias(acc, ctx, name);
+ }
+ NameKind::RecordField => {
+ field::complete_field_list_record_variant(acc, ctx);
+ }
+ NameKind::ConstParam
+ | NameKind::Enum
+ | NameKind::MacroDef
+ | NameKind::MacroRules
+ | NameKind::Rename
+ | NameKind::SelfParam
+ | NameKind::Static
+ | NameKind::Struct
+ | NameKind::Trait
+ | NameKind::TypeParam
+ | NameKind::Union
+ | NameKind::Variant => (),
+ }
+}
+
+pub(super) fn complete_name_ref(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ NameRefContext { nameref, kind }: &NameRefContext,
+) {
+ match kind {
+ NameRefKind::Path(path_ctx) => {
+ flyimport::import_on_the_fly_path(acc, ctx, path_ctx);
+
+ match &path_ctx.kind {
+ PathKind::Expr { expr_ctx } => {
+ expr::complete_expr_path(acc, ctx, path_ctx, expr_ctx);
+
+ dot::complete_undotted_self(acc, ctx, path_ctx, expr_ctx);
+ item_list::complete_item_list_in_expr(acc, ctx, path_ctx, expr_ctx);
+ record::complete_record_expr_func_update(acc, ctx, path_ctx, expr_ctx);
+ snippet::complete_expr_snippet(acc, ctx, path_ctx, expr_ctx);
+ }
+ PathKind::Type { location } => {
+ r#type::complete_type_path(acc, ctx, path_ctx, location);
+
+ match location {
+ TypeLocation::TupleField => {
+ field::complete_field_list_tuple_variant(acc, ctx, path_ctx);
+ }
+ TypeLocation::TypeAscription(ascription) => {
+ r#type::complete_ascribed_type(acc, ctx, path_ctx, ascription);
+ }
+ TypeLocation::GenericArgList(_)
+ | TypeLocation::TypeBound
+ | TypeLocation::ImplTarget
+ | TypeLocation::ImplTrait
+ | TypeLocation::Other => (),
+ }
+ }
+ PathKind::Attr { attr_ctx } => {
+ attribute::complete_attribute_path(acc, ctx, path_ctx, attr_ctx);
+ }
+ PathKind::Derive { existing_derives } => {
+ attribute::complete_derive_path(acc, ctx, path_ctx, existing_derives);
+ }
+ PathKind::Item { kind } => {
+ item_list::complete_item_list(acc, ctx, path_ctx, kind);
+
+ snippet::complete_item_snippet(acc, ctx, path_ctx, kind);
+ if let ItemListKind::TraitImpl(impl_) = kind {
+ item_list::trait_impl::complete_trait_impl_item_by_name(
+ acc, ctx, path_ctx, nameref, impl_,
+ );
+ }
+ }
+ PathKind::Pat { .. } => {
+ pattern::complete_pattern_path(acc, ctx, path_ctx);
+ }
+ PathKind::Vis { has_in_token } => {
+ vis::complete_vis_path(acc, ctx, path_ctx, has_in_token);
+ }
+ PathKind::Use => {
+ use_::complete_use_path(acc, ctx, path_ctx, nameref);
+ }
+ }
+ }
+ NameRefKind::DotAccess(dot_access) => {
+ flyimport::import_on_the_fly_dot(acc, ctx, dot_access);
+ dot::complete_dot(acc, ctx, dot_access);
+ postfix::complete_postfix(acc, ctx, dot_access);
+ }
+ NameRefKind::Keyword(item) => {
+ keyword::complete_for_and_where(acc, ctx, item);
+ }
+ NameRefKind::RecordExpr { dot_prefix, expr } => {
+ record::complete_record_expr_fields(acc, ctx, expr, dot_prefix);
+ }
+ NameRefKind::Pattern(pattern_ctx) => complete_patterns(acc, ctx, pattern_ctx),
+ }
+}
+
+fn complete_patterns(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ pattern_ctx: &PatternContext,
+) {
+ flyimport::import_on_the_fly_pat(acc, ctx, pattern_ctx);
+ fn_param::complete_fn_param(acc, ctx, pattern_ctx);
+ pattern::complete_pattern(acc, ctx, pattern_ctx);
+ record::complete_record_pattern_fields(acc, ctx, pattern_ctx);
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs
new file mode 100644
index 000000000..d9fe94cb4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs
@@ -0,0 +1,380 @@
+//! Completion for (built-in) attributes, derives and lints.
+//!
+//! This module uses a bit of static metadata to provide completions for builtin-in attributes and lints.
+
+use ide_db::{
+ generated::lints::{
+ Lint, CLIPPY_LINTS, CLIPPY_LINT_GROUPS, DEFAULT_LINTS, FEATURES, RUSTDOC_LINTS,
+ },
+ syntax_helpers::node_ext::parse_tt_as_comma_sep_paths,
+ FxHashMap, SymbolKind,
+};
+use itertools::Itertools;
+use once_cell::sync::Lazy;
+use syntax::{
+ ast::{self, AttrKind},
+ AstNode, SyntaxKind, T,
+};
+
+use crate::{
+ context::{AttrCtx, CompletionContext, PathCompletionCtx, Qualified},
+ item::CompletionItem,
+ Completions,
+};
+
+mod cfg;
+mod derive;
+mod lint;
+mod repr;
+
+pub(crate) use self::derive::complete_derive_path;
+
+/// Complete inputs to known builtin attributes as well as derive attributes
+pub(crate) fn complete_known_attribute_input(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ &colon_prefix: &bool,
+ fake_attribute_under_caret: &ast::Attr,
+) -> Option<()> {
+ let attribute = fake_attribute_under_caret;
+ let name_ref = match attribute.path() {
+ Some(p) => Some(p.as_single_name_ref()?),
+ None => None,
+ };
+ let (path, tt) = name_ref.zip(attribute.token_tree())?;
+ if tt.l_paren_token().is_none() {
+ return None;
+ }
+
+ match path.text().as_str() {
+ "repr" => repr::complete_repr(acc, ctx, tt),
+ "feature" => {
+ lint::complete_lint(acc, ctx, colon_prefix, &parse_tt_as_comma_sep_paths(tt)?, FEATURES)
+ }
+ "allow" | "warn" | "deny" | "forbid" => {
+ let existing_lints = parse_tt_as_comma_sep_paths(tt)?;
+
+ let lints: Vec<Lint> = CLIPPY_LINT_GROUPS
+ .iter()
+ .map(|g| &g.lint)
+ .chain(DEFAULT_LINTS)
+ .chain(CLIPPY_LINTS)
+ .chain(RUSTDOC_LINTS)
+ .cloned()
+ .collect();
+
+ lint::complete_lint(acc, ctx, colon_prefix, &existing_lints, &lints);
+ }
+ "cfg" => cfg::complete_cfg(acc, ctx),
+ _ => (),
+ }
+ Some(())
+}
+
+pub(crate) fn complete_attribute_path(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ &AttrCtx { kind, annotated_item_kind }: &AttrCtx,
+) {
+ let is_inner = kind == AttrKind::Inner;
+
+ match qualified {
+ Qualified::With {
+ resolution: Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))),
+ super_chain_len,
+ ..
+ } => {
+ acc.add_super_keyword(ctx, *super_chain_len);
+
+ for (name, def) in module.scope(ctx.db, Some(ctx.module)) {
+ match def {
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Macro(m)) if m.is_attr(ctx.db) => {
+ acc.add_macro(ctx, path_ctx, m, name)
+ }
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) => {
+ acc.add_module(ctx, path_ctx, m, name)
+ }
+ _ => (),
+ }
+ }
+ return;
+ }
+ // fresh use tree with leading colon2, only show crate roots
+ Qualified::Absolute => acc.add_crate_roots(ctx, path_ctx),
+ // only show modules in a fresh UseTree
+ Qualified::No => {
+ ctx.process_all_names(&mut |name, def| match def {
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Macro(m)) if m.is_attr(ctx.db) => {
+ acc.add_macro(ctx, path_ctx, m, name)
+ }
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) => {
+ acc.add_module(ctx, path_ctx, m, name)
+ }
+ _ => (),
+ });
+ acc.add_nameref_keywords_with_colon(ctx);
+ }
+ Qualified::TypeAnchor { .. } | Qualified::With { .. } => {}
+ }
+
+ let attributes = annotated_item_kind.and_then(|kind| {
+ if ast::Expr::can_cast(kind) {
+ Some(EXPR_ATTRIBUTES)
+ } else {
+ KIND_TO_ATTRIBUTES.get(&kind).copied()
+ }
+ });
+
+ let add_completion = |attr_completion: &AttrCompletion| {
+ let mut item =
+ CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), attr_completion.label);
+
+ if let Some(lookup) = attr_completion.lookup {
+ item.lookup_by(lookup);
+ }
+
+ if let Some((snippet, cap)) = attr_completion.snippet.zip(ctx.config.snippet_cap) {
+ item.insert_snippet(cap, snippet);
+ }
+
+ if is_inner || !attr_completion.prefer_inner {
+ item.add_to(acc);
+ }
+ };
+
+ match attributes {
+ Some(applicable) => applicable
+ .iter()
+ .flat_map(|name| ATTRIBUTES.binary_search_by(|attr| attr.key().cmp(name)).ok())
+ .flat_map(|idx| ATTRIBUTES.get(idx))
+ .for_each(add_completion),
+ None if is_inner => ATTRIBUTES.iter().for_each(add_completion),
+ None => ATTRIBUTES.iter().filter(|compl| !compl.prefer_inner).for_each(add_completion),
+ }
+}
+
+struct AttrCompletion {
+ label: &'static str,
+ lookup: Option<&'static str>,
+ snippet: Option<&'static str>,
+ prefer_inner: bool,
+}
+
+impl AttrCompletion {
+ fn key(&self) -> &'static str {
+ self.lookup.unwrap_or(self.label)
+ }
+
+ const fn prefer_inner(self) -> AttrCompletion {
+ AttrCompletion { prefer_inner: true, ..self }
+ }
+}
+
+const fn attr(
+ label: &'static str,
+ lookup: Option<&'static str>,
+ snippet: Option<&'static str>,
+) -> AttrCompletion {
+ AttrCompletion { label, lookup, snippet, prefer_inner: false }
+}
+
+macro_rules! attrs {
+ // attributes applicable to all items
+ [@ { item $($tt:tt)* } {$($acc:tt)*}] => {
+ attrs!(@ { $($tt)* } { $($acc)*, "deprecated", "doc", "dochidden", "docalias", "must_use", "no_mangle" })
+ };
+ // attributes applicable to all adts
+ [@ { adt $($tt:tt)* } {$($acc:tt)*}] => {
+ attrs!(@ { $($tt)* } { $($acc)*, "derive", "repr" })
+ };
+ // attributes applicable to all linkable things aka functions/statics
+ [@ { linkable $($tt:tt)* } {$($acc:tt)*}] => {
+ attrs!(@ { $($tt)* } { $($acc)*, "export_name", "link_name", "link_section" })
+ };
+ // error fallback for nicer error message
+ [@ { $ty:ident $($tt:tt)* } {$($acc:tt)*}] => {
+ compile_error!(concat!("unknown attr subtype ", stringify!($ty)))
+ };
+ // general push down accumulation
+ [@ { $lit:literal $($tt:tt)*} {$($acc:tt)*}] => {
+ attrs!(@ { $($tt)* } { $($acc)*, $lit })
+ };
+ [@ {$($tt:tt)+} {$($tt2:tt)*}] => {
+ compile_error!(concat!("Unexpected input ", stringify!($($tt)+)))
+ };
+ // final output construction
+ [@ {} {$($tt:tt)*}] => { &[$($tt)*] as _ };
+ // starting matcher
+ [$($tt:tt),*] => {
+ attrs!(@ { $($tt)* } { "allow", "cfg", "cfg_attr", "deny", "forbid", "warn" })
+ };
+}
+
+#[rustfmt::skip]
+static KIND_TO_ATTRIBUTES: Lazy<FxHashMap<SyntaxKind, &[&str]>> = Lazy::new(|| {
+ use SyntaxKind::*;
+ [
+ (
+ SOURCE_FILE,
+ attrs!(
+ item,
+ "crate_name", "feature", "no_implicit_prelude", "no_main", "no_std",
+ "recursion_limit", "type_length_limit", "windows_subsystem"
+ ),
+ ),
+ (MODULE, attrs!(item, "macro_use", "no_implicit_prelude", "path")),
+ (ITEM_LIST, attrs!(item, "no_implicit_prelude")),
+ (MACRO_RULES, attrs!(item, "macro_export", "macro_use")),
+ (MACRO_DEF, attrs!(item)),
+ (EXTERN_CRATE, attrs!(item, "macro_use", "no_link")),
+ (USE, attrs!(item)),
+ (TYPE_ALIAS, attrs!(item)),
+ (STRUCT, attrs!(item, adt, "non_exhaustive")),
+ (ENUM, attrs!(item, adt, "non_exhaustive")),
+ (UNION, attrs!(item, adt)),
+ (CONST, attrs!(item)),
+ (
+ FN,
+ attrs!(
+ item, linkable,
+ "cold", "ignore", "inline", "must_use", "panic_handler", "proc_macro",
+ "proc_macro_derive", "proc_macro_attribute", "should_panic", "target_feature",
+ "test", "track_caller"
+ ),
+ ),
+ (STATIC, attrs!(item, linkable, "global_allocator", "used")),
+ (TRAIT, attrs!(item, "must_use")),
+ (IMPL, attrs!(item, "automatically_derived")),
+ (ASSOC_ITEM_LIST, attrs!(item)),
+ (EXTERN_BLOCK, attrs!(item, "link")),
+ (EXTERN_ITEM_LIST, attrs!(item, "link")),
+ (MACRO_CALL, attrs!()),
+ (SELF_PARAM, attrs!()),
+ (PARAM, attrs!()),
+ (RECORD_FIELD, attrs!()),
+ (VARIANT, attrs!("non_exhaustive")),
+ (TYPE_PARAM, attrs!()),
+ (CONST_PARAM, attrs!()),
+ (LIFETIME_PARAM, attrs!()),
+ (LET_STMT, attrs!()),
+ (EXPR_STMT, attrs!()),
+ (LITERAL, attrs!()),
+ (RECORD_EXPR_FIELD_LIST, attrs!()),
+ (RECORD_EXPR_FIELD, attrs!()),
+ (MATCH_ARM_LIST, attrs!()),
+ (MATCH_ARM, attrs!()),
+ (IDENT_PAT, attrs!()),
+ (RECORD_PAT_FIELD, attrs!()),
+ ]
+ .into_iter()
+ .collect()
+});
+const EXPR_ATTRIBUTES: &[&str] = attrs!();
+
+/// <https://doc.rust-lang.org/reference/attributes.html#built-in-attributes-index>
+// Keep these sorted for the binary search!
+const ATTRIBUTES: &[AttrCompletion] = &[
+ attr("allow(…)", Some("allow"), Some("allow(${0:lint})")),
+ attr("automatically_derived", None, None),
+ attr("cfg(…)", Some("cfg"), Some("cfg(${0:predicate})")),
+ attr("cfg_attr(…)", Some("cfg_attr"), Some("cfg_attr(${1:predicate}, ${0:attr})")),
+ attr("cold", None, None),
+ attr(r#"crate_name = """#, Some("crate_name"), Some(r#"crate_name = "${0:crate_name}""#))
+ .prefer_inner(),
+ attr("deny(…)", Some("deny"), Some("deny(${0:lint})")),
+ attr(r#"deprecated"#, Some("deprecated"), Some(r#"deprecated"#)),
+ attr("derive(…)", Some("derive"), Some(r#"derive(${0:Debug})"#)),
+ attr(r#"doc = "…""#, Some("doc"), Some(r#"doc = "${0:docs}""#)),
+ attr(r#"doc(alias = "…")"#, Some("docalias"), Some(r#"doc(alias = "${0:docs}")"#)),
+ attr(r#"doc(hidden)"#, Some("dochidden"), Some(r#"doc(hidden)"#)),
+ attr(
+ r#"export_name = "…""#,
+ Some("export_name"),
+ Some(r#"export_name = "${0:exported_symbol_name}""#),
+ ),
+ attr("feature(…)", Some("feature"), Some("feature(${0:flag})")).prefer_inner(),
+ attr("forbid(…)", Some("forbid"), Some("forbid(${0:lint})")),
+ attr("global_allocator", None, None),
+ attr(r#"ignore = "…""#, Some("ignore"), Some(r#"ignore = "${0:reason}""#)),
+ attr("inline", Some("inline"), Some("inline")),
+ attr("link", None, None),
+ attr(r#"link_name = "…""#, Some("link_name"), Some(r#"link_name = "${0:symbol_name}""#)),
+ attr(
+ r#"link_section = "…""#,
+ Some("link_section"),
+ Some(r#"link_section = "${0:section_name}""#),
+ ),
+ attr("macro_export", None, None),
+ attr("macro_use", None, None),
+ attr(r#"must_use"#, Some("must_use"), Some(r#"must_use"#)),
+ attr("no_implicit_prelude", None, None).prefer_inner(),
+ attr("no_link", None, None).prefer_inner(),
+ attr("no_main", None, None).prefer_inner(),
+ attr("no_mangle", None, None),
+ attr("no_std", None, None).prefer_inner(),
+ attr("non_exhaustive", None, None),
+ attr("panic_handler", None, None),
+ attr(r#"path = "…""#, Some("path"), Some(r#"path ="${0:path}""#)),
+ attr("proc_macro", None, None),
+ attr("proc_macro_attribute", None, None),
+ attr("proc_macro_derive(…)", Some("proc_macro_derive"), Some("proc_macro_derive(${0:Trait})")),
+ attr(
+ r#"recursion_limit = "…""#,
+ Some("recursion_limit"),
+ Some(r#"recursion_limit = "${0:128}""#),
+ )
+ .prefer_inner(),
+ attr("repr(…)", Some("repr"), Some("repr(${0:C})")),
+ attr("should_panic", Some("should_panic"), Some(r#"should_panic"#)),
+ attr(
+ r#"target_feature(enable = "…")"#,
+ Some("target_feature"),
+ Some(r#"target_feature(enable = "${0:feature}")"#),
+ ),
+ attr("test", None, None),
+ attr("track_caller", None, None),
+ attr("type_length_limit = …", Some("type_length_limit"), Some("type_length_limit = ${0:128}"))
+ .prefer_inner(),
+ attr("used", None, None),
+ attr("warn(…)", Some("warn"), Some("warn(${0:lint})")),
+ attr(
+ r#"windows_subsystem = "…""#,
+ Some("windows_subsystem"),
+ Some(r#"windows_subsystem = "${0:subsystem}""#),
+ )
+ .prefer_inner(),
+];
+
+fn parse_comma_sep_expr(input: ast::TokenTree) -> Option<Vec<ast::Expr>> {
+ let r_paren = input.r_paren_token()?;
+ let tokens = input
+ .syntax()
+ .children_with_tokens()
+ .skip(1)
+ .take_while(|it| it.as_token() != Some(&r_paren));
+ let input_expressions = tokens.group_by(|tok| tok.kind() == T![,]);
+ Some(
+ input_expressions
+ .into_iter()
+ .filter_map(|(is_sep, group)| (!is_sep).then(|| group))
+ .filter_map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
+ .collect::<Vec<ast::Expr>>(),
+ )
+}
+
+#[test]
+fn attributes_are_sorted() {
+ let mut attrs = ATTRIBUTES.iter().map(|attr| attr.key());
+ let mut prev = attrs.next().unwrap();
+
+ attrs.for_each(|next| {
+ assert!(
+ prev < next,
+ r#"ATTRIBUTES array is not sorted, "{}" should come after "{}""#,
+ prev,
+ next
+ );
+ prev = next;
+ });
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs
new file mode 100644
index 000000000..311060143
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs
@@ -0,0 +1,93 @@
+//! Completion for cfg
+
+use std::iter;
+
+use ide_db::SymbolKind;
+use itertools::Itertools;
+use syntax::SyntaxKind;
+
+use crate::{completions::Completions, context::CompletionContext, CompletionItem};
+
+pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext<'_>) {
+ let add_completion = |item: &str| {
+ let mut completion = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), item);
+ completion.insert_text(format!(r#""{}""#, item));
+ acc.add(completion.build());
+ };
+
+ let previous = iter::successors(ctx.original_token.prev_token(), |t| {
+ (matches!(t.kind(), SyntaxKind::EQ) || t.kind().is_trivia())
+ .then(|| t.prev_token())
+ .flatten()
+ })
+ .find(|t| matches!(t.kind(), SyntaxKind::IDENT));
+
+ match previous.as_ref().map(|p| p.text()) {
+ Some("target_arch") => KNOWN_ARCH.iter().copied().for_each(add_completion),
+ Some("target_env") => KNOWN_ENV.iter().copied().for_each(add_completion),
+ Some("target_os") => KNOWN_OS.iter().copied().for_each(add_completion),
+ Some("target_vendor") => KNOWN_VENDOR.iter().copied().for_each(add_completion),
+ Some("target_endian") => ["little", "big"].into_iter().for_each(add_completion),
+ Some(name) => ctx.krate.potential_cfg(ctx.db).get_cfg_values(name).cloned().for_each(|s| {
+ let insert_text = format!(r#""{}""#, s);
+ let mut item = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s);
+ item.insert_text(insert_text);
+
+ acc.add(item.build());
+ }),
+ None => ctx.krate.potential_cfg(ctx.db).get_cfg_keys().cloned().unique().for_each(|s| {
+ let item = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s);
+ acc.add(item.build());
+ }),
+ };
+}
+
+const KNOWN_ARCH: [&str; 19] = [
+ "aarch64",
+ "arm",
+ "avr",
+ "hexagon",
+ "mips",
+ "mips64",
+ "msp430",
+ "nvptx64",
+ "powerpc",
+ "powerpc64",
+ "riscv32",
+ "riscv64",
+ "s390x",
+ "sparc",
+ "sparc64",
+ "wasm32",
+ "wasm64",
+ "x86",
+ "x86_64",
+];
+
+const KNOWN_ENV: [&str; 7] = ["eabihf", "gnu", "gnueabihf", "msvc", "relibc", "sgx", "uclibc"];
+
+const KNOWN_OS: [&str; 20] = [
+ "cuda",
+ "dragonfly",
+ "emscripten",
+ "freebsd",
+ "fuchsia",
+ "haiku",
+ "hermit",
+ "illumos",
+ "l4re",
+ "linux",
+ "netbsd",
+ "none",
+ "openbsd",
+ "psp",
+ "redox",
+ "solaris",
+ "uefi",
+ "unknown",
+ "vxworks",
+ "windows",
+];
+
+const KNOWN_VENDOR: [&str; 8] =
+ ["apple", "fortanix", "nvidia", "pc", "sony", "unknown", "wrs", "uwp"];
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs
new file mode 100644
index 000000000..793c22630
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs
@@ -0,0 +1,116 @@
+//! Completion for derives
+use hir::{HasAttrs, ScopeDef};
+use ide_db::SymbolKind;
+use itertools::Itertools;
+use syntax::SmolStr;
+
+use crate::{
+ context::{CompletionContext, ExistingDerives, PathCompletionCtx, Qualified},
+ item::CompletionItem,
+ Completions,
+};
+
+pub(crate) fn complete_derive_path(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ existing_derives: &ExistingDerives,
+) {
+ let core = ctx.famous_defs().core();
+
+ match qualified {
+ Qualified::With {
+ resolution: Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))),
+ super_chain_len,
+ ..
+ } => {
+ acc.add_super_keyword(ctx, *super_chain_len);
+
+ for (name, def) in module.scope(ctx.db, Some(ctx.module)) {
+ match def {
+ ScopeDef::ModuleDef(hir::ModuleDef::Macro(mac))
+ if !existing_derives.contains(&mac) && mac.is_derive(ctx.db) =>
+ {
+ acc.add_macro(ctx, path_ctx, mac, name)
+ }
+ ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) => {
+ acc.add_module(ctx, path_ctx, m, name)
+ }
+ _ => (),
+ }
+ }
+ }
+ Qualified::Absolute => acc.add_crate_roots(ctx, path_ctx),
+ // only show modules in a fresh UseTree
+ Qualified::No => {
+ ctx.process_all_names(&mut |name, def| {
+ let mac = match def {
+ ScopeDef::ModuleDef(hir::ModuleDef::Macro(mac))
+ if !existing_derives.contains(&mac) && mac.is_derive(ctx.db) =>
+ {
+ mac
+ }
+ ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) => {
+ return acc.add_module(ctx, path_ctx, m, name);
+ }
+ _ => return,
+ };
+
+ match (core, mac.module(ctx.db).krate()) {
+ // show derive dependencies for `core`/`std` derives
+ (Some(core), mac_krate) if core == mac_krate => {}
+ _ => return acc.add_macro(ctx, path_ctx, mac, name),
+ };
+
+ let name_ = name.to_smol_str();
+ let find = DEFAULT_DERIVE_DEPENDENCIES
+ .iter()
+ .find(|derive_completion| derive_completion.label == name_);
+
+ match find {
+ Some(derive_completion) => {
+ let mut components = vec![derive_completion.label];
+ components.extend(derive_completion.dependencies.iter().filter(
+ |&&dependency| {
+ !existing_derives
+ .iter()
+ .map(|it| it.name(ctx.db))
+ .any(|it| it.to_smol_str() == dependency)
+ },
+ ));
+ let lookup = components.join(", ");
+ let label = Itertools::intersperse(components.into_iter().rev(), ", ");
+
+ let mut item = CompletionItem::new(
+ SymbolKind::Derive,
+ ctx.source_range(),
+ SmolStr::from_iter(label),
+ );
+ if let Some(docs) = mac.docs(ctx.db) {
+ item.documentation(docs);
+ }
+ item.lookup_by(lookup);
+ item.add_to(acc);
+ }
+ None => acc.add_macro(ctx, path_ctx, mac, name),
+ }
+ });
+ acc.add_nameref_keywords_with_colon(ctx);
+ }
+ Qualified::TypeAnchor { .. } | Qualified::With { .. } => {}
+ }
+}
+
+struct DeriveDependencies {
+ label: &'static str,
+ dependencies: &'static [&'static str],
+}
+
+/// Standard Rust derives that have dependencies
+/// (the dependencies are needed so that the main derive don't break the compilation when added)
+const DEFAULT_DERIVE_DEPENDENCIES: &[DeriveDependencies] = &[
+ DeriveDependencies { label: "Copy", dependencies: &["Clone"] },
+ DeriveDependencies { label: "Eq", dependencies: &["PartialEq"] },
+ DeriveDependencies { label: "Ord", dependencies: &["PartialOrd", "Eq", "PartialEq"] },
+ DeriveDependencies { label: "PartialOrd", dependencies: &["PartialEq"] },
+];
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs
new file mode 100644
index 000000000..967f6ddd9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs
@@ -0,0 +1,61 @@
+//! Completion for lints
+use ide_db::{generated::lints::Lint, SymbolKind};
+use syntax::ast;
+
+use crate::{context::CompletionContext, item::CompletionItem, Completions};
+
+pub(super) fn complete_lint(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ is_qualified: bool,
+ existing_lints: &[ast::Path],
+ lints_completions: &[Lint],
+) {
+ for &Lint { label, description } in lints_completions {
+ let (qual, name) = {
+ // FIXME: change `Lint`'s label to not store a path in it but split the prefix off instead?
+ let mut parts = label.split("::");
+ let ns_or_label = match parts.next() {
+ Some(it) => it,
+ None => continue,
+ };
+ let label = parts.next();
+ match label {
+ Some(label) => (Some(ns_or_label), label),
+ None => (None, ns_or_label),
+ }
+ };
+ if qual.is_none() && is_qualified {
+ // qualified completion requested, but this lint is unqualified
+ continue;
+ }
+ let lint_already_annotated = existing_lints
+ .iter()
+ .filter_map(|path| {
+ let q = path.qualifier();
+ if q.as_ref().and_then(|it| it.qualifier()).is_some() {
+ return None;
+ }
+ Some((q.and_then(|it| it.as_single_name_ref()), path.segment()?.name_ref()?))
+ })
+ .any(|(q, name_ref)| {
+ let qualifier_matches = match (q, qual) {
+ (None, None) => true,
+ (None, Some(_)) => false,
+ (Some(_), None) => false,
+ (Some(q), Some(ns)) => q.text() == ns,
+ };
+ qualifier_matches && name_ref.text() == name
+ });
+ if lint_already_annotated {
+ continue;
+ }
+ let label = match qual {
+ Some(qual) if !is_qualified => format!("{}::{}", qual, name),
+ _ => name.to_owned(),
+ };
+ let mut item = CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), label);
+ item.documentation(hir::Documentation::new(description.to_owned()));
+ item.add_to(acc)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/repr.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/repr.rs
new file mode 100644
index 000000000..a29417133
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/repr.rs
@@ -0,0 +1,74 @@
+//! Completion for representations.
+
+use ide_db::SymbolKind;
+use syntax::ast;
+
+use crate::{context::CompletionContext, item::CompletionItem, Completions};
+
+pub(super) fn complete_repr(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ input: ast::TokenTree,
+) {
+ if let Some(existing_reprs) = super::parse_comma_sep_expr(input) {
+ for &ReprCompletion { label, snippet, lookup, collides } in REPR_COMPLETIONS {
+ let repr_already_annotated = existing_reprs
+ .iter()
+ .filter_map(|expr| match expr {
+ ast::Expr::PathExpr(path) => path.path()?.as_single_name_ref(),
+ ast::Expr::CallExpr(call) => match call.expr()? {
+ ast::Expr::PathExpr(path) => path.path()?.as_single_name_ref(),
+ _ => None,
+ },
+ _ => None,
+ })
+ .any(|it| {
+ let text = it.text();
+ lookup.unwrap_or(label) == text || collides.contains(&text.as_str())
+ });
+ if repr_already_annotated {
+ continue;
+ }
+
+ let mut item = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), label);
+ if let Some(lookup) = lookup {
+ item.lookup_by(lookup);
+ }
+ if let Some((snippet, cap)) = snippet.zip(ctx.config.snippet_cap) {
+ item.insert_snippet(cap, snippet);
+ }
+ item.add_to(acc);
+ }
+ }
+}
+
+struct ReprCompletion {
+ label: &'static str,
+ snippet: Option<&'static str>,
+ lookup: Option<&'static str>,
+ collides: &'static [&'static str],
+}
+
+const fn attr(label: &'static str, collides: &'static [&'static str]) -> ReprCompletion {
+ ReprCompletion { label, snippet: None, lookup: None, collides }
+}
+
+#[rustfmt::skip]
+const REPR_COMPLETIONS: &[ReprCompletion] = &[
+ ReprCompletion { label: "align($0)", snippet: Some("align($0)"), lookup: Some("align"), collides: &["transparent", "packed"] },
+ attr("packed", &["transparent", "align"]),
+ attr("transparent", &["C", "u8", "u16", "u32", "u64", "u128", "usize", "i8", "i16", "i32", "i64", "i128", "isize"]),
+ attr("C", &["transparent"]),
+ attr("u8", &["transparent", "u16", "u32", "u64", "u128", "usize", "i8", "i16", "i32", "i64", "i128", "isize"]),
+ attr("u16", &["transparent", "u8", "u32", "u64", "u128", "usize", "i8", "i16", "i32", "i64", "i128", "isize"]),
+ attr("u32", &["transparent", "u8", "u16", "u64", "u128", "usize", "i8", "i16", "i32", "i64", "i128", "isize"]),
+ attr("u64", &["transparent", "u8", "u16", "u32", "u128", "usize", "i8", "i16", "i32", "i64", "i128", "isize"]),
+ attr("u128", &["transparent", "u8", "u16", "u32", "u64", "usize", "i8", "i16", "i32", "i64", "i128", "isize"]),
+ attr("usize", &["transparent", "u8", "u16", "u32", "u64", "u128", "i8", "i16", "i32", "i64", "i128", "isize"]),
+ attr("i8", &["transparent", "u8", "u16", "u32", "u64", "u128", "usize", "i16", "i32", "i64", "i128", "isize"]),
+ attr("i16", &["transparent", "u8", "u16", "u32", "u64", "u128", "usize", "i8", "i32", "i64", "i128", "isize"]),
+ attr("i32", &["transparent", "u8", "u16", "u32", "u64", "u128", "usize", "i8", "i16", "i64", "i128", "isize"]),
+ attr("i64", &["transparent", "u8", "u16", "u32", "u64", "u128", "usize", "i8", "i16", "i32", "i128", "isize"]),
+ attr("i28", &["transparent", "u8", "u16", "u32", "u64", "u128", "usize", "i8", "i16", "i32", "i64", "isize"]),
+ attr("isize", &["transparent", "u8", "u16", "u32", "u64", "u128", "usize", "i8", "i16", "i32", "i64", "i128"]),
+];
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
new file mode 100644
index 000000000..cf40ca489
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
@@ -0,0 +1,947 @@
+//! Completes references after dot (fields and method calls).
+
+use ide_db::FxHashSet;
+
+use crate::{
+ context::{CompletionContext, DotAccess, DotAccessKind, ExprCtx, PathCompletionCtx, Qualified},
+ CompletionItem, CompletionItemKind, Completions,
+};
+
+/// Complete dot accesses, i.e. fields or methods.
+pub(crate) fn complete_dot(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ dot_access: &DotAccess,
+) {
+ let receiver_ty = match dot_access {
+ DotAccess { receiver_ty: Some(receiver_ty), .. } => &receiver_ty.original,
+ _ => return,
+ };
+
+ // Suggest .await syntax for types that implement Future trait
+ if receiver_ty.impls_future(ctx.db) {
+ let mut item =
+ CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), "await");
+ item.detail("expr.await");
+ item.add_to(acc);
+ }
+
+ if let DotAccessKind::Method { .. } = dot_access.kind {
+ cov_mark::hit!(test_no_struct_field_completion_for_method_call);
+ } else {
+ complete_fields(
+ acc,
+ ctx,
+ &receiver_ty,
+ |acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty),
+ |acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty),
+ );
+ }
+ complete_methods(ctx, &receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None));
+}
+
+pub(crate) fn complete_undotted_self(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ expr_ctx: &ExprCtx,
+) {
+ if !ctx.config.enable_self_on_the_fly {
+ return;
+ }
+ if !path_ctx.is_trivial_path() {
+ return;
+ }
+ if !ctx.qualifier_ctx.none() {
+ return;
+ }
+ if !matches!(path_ctx.qualified, Qualified::No) {
+ return;
+ }
+ let self_param = match expr_ctx {
+ ExprCtx { self_param: Some(self_param), .. } => self_param,
+ _ => return,
+ };
+
+ let ty = self_param.ty(ctx.db);
+ complete_fields(
+ acc,
+ ctx,
+ &ty,
+ |acc, field, ty| {
+ acc.add_field(
+ ctx,
+ &DotAccess {
+ receiver: None,
+ receiver_ty: None,
+ kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal: false },
+ },
+ Some(hir::known::SELF_PARAM),
+ field,
+ &ty,
+ )
+ },
+ |acc, field, ty| acc.add_tuple_field(ctx, Some(hir::known::SELF_PARAM), field, &ty),
+ );
+ complete_methods(ctx, &ty, |func| {
+ acc.add_method(
+ ctx,
+ &DotAccess {
+ receiver: None,
+ receiver_ty: None,
+ kind: DotAccessKind::Method { has_parens: false },
+ },
+ func,
+ Some(hir::known::SELF_PARAM),
+ None,
+ )
+ });
+}
+
+fn complete_fields(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ receiver: &hir::Type,
+ mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type),
+ mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type),
+) {
+ for receiver in receiver.autoderef(ctx.db) {
+ for (field, ty) in receiver.fields(ctx.db) {
+ named_field(acc, field, ty);
+ }
+ for (i, ty) in receiver.tuple_fields(ctx.db).into_iter().enumerate() {
+ // Tuple fields are always public (tuple struct fields are handled above).
+ tuple_index(acc, i, ty);
+ }
+ }
+}
+
+fn complete_methods(
+ ctx: &CompletionContext<'_>,
+ receiver: &hir::Type,
+ mut f: impl FnMut(hir::Function),
+) {
+ let mut seen_methods = FxHashSet::default();
+ receiver.iterate_method_candidates(
+ ctx.db,
+ &ctx.scope,
+ &ctx.traits_in_scope(),
+ Some(ctx.module),
+ None,
+ |func| {
+ if func.self_param(ctx.db).is_some() && seen_methods.insert(func.name(ctx.db)) {
+ f(func);
+ }
+ None::<()>
+ },
+ );
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::tests::{
+ check_edit, completion_list_no_kw, completion_list_no_kw_with_private_editable,
+ };
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list_no_kw(ra_fixture);
+ expect.assert_eq(&actual);
+ }
+
+ fn check_with_private_editable(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list_no_kw_with_private_editable(ra_fixture);
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn test_struct_field_and_method_completion() {
+ check(
+ r#"
+struct S { foo: u32 }
+impl S {
+ fn bar(&self) {}
+}
+fn foo(s: S) { s.$0 }
+"#,
+ expect![[r#"
+ fd foo u32
+ me bar() fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_struct_field_completion_self() {
+ check(
+ r#"
+struct S { the_field: (u32,) }
+impl S {
+ fn foo(self) { self.$0 }
+}
+"#,
+ expect![[r#"
+ fd the_field (u32,)
+ me foo() fn(self)
+ "#]],
+ )
+ }
+
+ #[test]
+ fn test_struct_field_completion_autoderef() {
+ check(
+ r#"
+struct A { the_field: (u32, i32) }
+impl A {
+ fn foo(&self) { self.$0 }
+}
+"#,
+ expect![[r#"
+ fd the_field (u32, i32)
+ me foo() fn(&self)
+ "#]],
+ )
+ }
+
+ #[test]
+ fn test_no_struct_field_completion_for_method_call() {
+ cov_mark::check!(test_no_struct_field_completion_for_method_call);
+ check(
+ r#"
+struct A { the_field: u32 }
+fn foo(a: A) { a.$0() }
+"#,
+ expect![[r#""#]],
+ );
+ }
+
+ #[test]
+ fn test_visibility_filtering() {
+ check(
+ r#"
+//- /lib.rs crate:lib new_source_root:local
+pub mod m {
+ pub struct A {
+ private_field: u32,
+ pub pub_field: u32,
+ pub(crate) crate_field: u32,
+ pub(super) super_field: u32,
+ }
+}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo(a: lib::m::A) { a.$0 }
+"#,
+ expect![[r#"
+ fd pub_field u32
+ "#]],
+ );
+
+ check(
+ r#"
+//- /lib.rs crate:lib new_source_root:library
+pub mod m {
+ pub struct A {
+ private_field: u32,
+ pub pub_field: u32,
+ pub(crate) crate_field: u32,
+ pub(super) super_field: u32,
+ }
+}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo(a: lib::m::A) { a.$0 }
+"#,
+ expect![[r#"
+ fd pub_field u32
+ "#]],
+ );
+
+ check(
+ r#"
+//- /lib.rs crate:lib new_source_root:library
+pub mod m {
+ pub struct A(
+ i32,
+ pub f64,
+ );
+}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo(a: lib::m::A) { a.$0 }
+"#,
+ expect![[r#"
+ fd 1 f64
+ "#]],
+ );
+
+ check(
+ r#"
+//- /lib.rs crate:lib new_source_root:local
+pub struct A {}
+mod m {
+ impl super::A {
+ fn private_method(&self) {}
+ pub(crate) fn crate_method(&self) {}
+ pub fn pub_method(&self) {}
+ }
+}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo(a: lib::A) { a.$0 }
+"#,
+ expect![[r#"
+ me pub_method() fn(&self)
+ "#]],
+ );
+ check(
+ r#"
+//- /lib.rs crate:lib new_source_root:library
+pub struct A {}
+mod m {
+ impl super::A {
+ fn private_method(&self) {}
+ pub(crate) fn crate_method(&self) {}
+ pub fn pub_method(&self) {}
+ }
+}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo(a: lib::A) { a.$0 }
+"#,
+ expect![[r#"
+ me pub_method() fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_visibility_filtering_with_private_editable_enabled() {
+ check_with_private_editable(
+ r#"
+//- /lib.rs crate:lib new_source_root:local
+pub mod m {
+ pub struct A {
+ private_field: u32,
+ pub pub_field: u32,
+ pub(crate) crate_field: u32,
+ pub(super) super_field: u32,
+ }
+}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo(a: lib::m::A) { a.$0 }
+"#,
+ expect![[r#"
+ fd crate_field u32
+ fd private_field u32
+ fd pub_field u32
+ fd super_field u32
+ "#]],
+ );
+
+ check_with_private_editable(
+ r#"
+//- /lib.rs crate:lib new_source_root:library
+pub mod m {
+ pub struct A {
+ private_field: u32,
+ pub pub_field: u32,
+ pub(crate) crate_field: u32,
+ pub(super) super_field: u32,
+ }
+}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo(a: lib::m::A) { a.$0 }
+"#,
+ expect![[r#"
+ fd pub_field u32
+ "#]],
+ );
+
+ check_with_private_editable(
+ r#"
+//- /lib.rs crate:lib new_source_root:library
+pub mod m {
+ pub struct A(
+ i32,
+ pub f64,
+ );
+}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo(a: lib::m::A) { a.$0 }
+"#,
+ expect![[r#"
+ fd 1 f64
+ "#]],
+ );
+
+ check_with_private_editable(
+ r#"
+//- /lib.rs crate:lib new_source_root:local
+pub struct A {}
+mod m {
+ impl super::A {
+ fn private_method(&self) {}
+ pub(crate) fn crate_method(&self) {}
+ pub fn pub_method(&self) {}
+ }
+}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo(a: lib::A) { a.$0 }
+"#,
+ expect![[r#"
+ me crate_method() fn(&self)
+ me private_method() fn(&self)
+ me pub_method() fn(&self)
+ "#]],
+ );
+ check_with_private_editable(
+ r#"
+//- /lib.rs crate:lib new_source_root:library
+pub struct A {}
+mod m {
+ impl super::A {
+ fn private_method(&self) {}
+ pub(crate) fn crate_method(&self) {}
+ pub fn pub_method(&self) {}
+ }
+}
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo(a: lib::A) { a.$0 }
+"#,
+ expect![[r#"
+ me pub_method() fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_local_impls() {
+ check(
+ r#"
+//- /lib.rs crate:lib
+pub struct A {}
+mod m {
+ impl super::A {
+ pub fn pub_module_method(&self) {}
+ }
+ fn f() {
+ impl super::A {
+ pub fn pub_foreign_local_method(&self) {}
+ }
+ }
+}
+//- /main.rs crate:main deps:lib
+fn foo(a: lib::A) {
+ impl lib::A {
+ fn local_method(&self) {}
+ }
+ a.$0
+}
+"#,
+ expect![[r#"
+ me local_method() fn(&self)
+ me pub_module_method() fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_doc_hidden_filtering() {
+ check(
+ r#"
+//- /lib.rs crate:lib deps:dep
+fn foo(a: dep::A) { a.$0 }
+//- /dep.rs crate:dep
+pub struct A {
+ #[doc(hidden)]
+ pub hidden_field: u32,
+ pub pub_field: u32,
+}
+
+impl A {
+ pub fn pub_method(&self) {}
+
+ #[doc(hidden)]
+ pub fn hidden_method(&self) {}
+}
+ "#,
+ expect![[r#"
+ fd pub_field u32
+ me pub_method() fn(&self)
+ "#]],
+ )
+ }
+
+ #[test]
+ fn test_union_field_completion() {
+ check(
+ r#"
+union U { field: u8, other: u16 }
+fn foo(u: U) { u.$0 }
+"#,
+ expect![[r#"
+ fd field u8
+ fd other u16
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_method_completion_only_fitting_impls() {
+ check(
+ r#"
+struct A<T> {}
+impl A<u32> {
+ fn the_method(&self) {}
+}
+impl A<i32> {
+ fn the_other_method(&self) {}
+}
+fn foo(a: A<u32>) { a.$0 }
+"#,
+ expect![[r#"
+ me the_method() fn(&self)
+ "#]],
+ )
+ }
+
+ #[test]
+ fn test_trait_method_completion() {
+ check(
+ r#"
+struct A {}
+trait Trait { fn the_method(&self); }
+impl Trait for A {}
+fn foo(a: A) { a.$0 }
+"#,
+ expect![[r#"
+ me the_method() (as Trait) fn(&self)
+ "#]],
+ );
+ check_edit(
+ "the_method",
+ r#"
+struct A {}
+trait Trait { fn the_method(&self); }
+impl Trait for A {}
+fn foo(a: A) { a.$0 }
+"#,
+ r#"
+struct A {}
+trait Trait { fn the_method(&self); }
+impl Trait for A {}
+fn foo(a: A) { a.the_method()$0 }
+"#,
+ );
+ }
+
+ #[test]
+ fn test_trait_method_completion_deduplicated() {
+ check(
+ r"
+struct A {}
+trait Trait { fn the_method(&self); }
+impl<T> Trait for T {}
+fn foo(a: &A) { a.$0 }
+",
+ expect![[r#"
+ me the_method() (as Trait) fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn completes_trait_method_from_other_module() {
+ check(
+ r"
+struct A {}
+mod m {
+ pub trait Trait { fn the_method(&self); }
+}
+use m::Trait;
+impl Trait for A {}
+fn foo(a: A) { a.$0 }
+",
+ expect![[r#"
+ me the_method() (as Trait) fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_no_non_self_method() {
+ check(
+ r#"
+struct A {}
+impl A {
+ fn the_method() {}
+}
+fn foo(a: A) {
+ a.$0
+}
+"#,
+ expect![[r#""#]],
+ );
+ }
+
+ #[test]
+ fn test_tuple_field_completion() {
+ check(
+ r#"
+fn foo() {
+ let b = (0, 3.14);
+ b.$0
+}
+"#,
+ expect![[r#"
+ fd 0 i32
+ fd 1 f64
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_tuple_struct_field_completion() {
+ check(
+ r#"
+struct S(i32, f64);
+fn foo() {
+ let b = S(0, 3.14);
+ b.$0
+}
+"#,
+ expect![[r#"
+ fd 0 i32
+ fd 1 f64
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_tuple_field_inference() {
+ check(
+ r#"
+pub struct S;
+impl S { pub fn blah(&self) {} }
+
+struct T(S);
+
+impl T {
+ fn foo(&self) {
+ // FIXME: This doesn't work without the trailing `a` as `0.` is a float
+ self.0.a$0
+ }
+}
+"#,
+ expect![[r#"
+ me blah() fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_completion_works_in_consts() {
+ check(
+ r#"
+struct A { the_field: u32 }
+const X: u32 = {
+ A { the_field: 92 }.$0
+};
+"#,
+ expect![[r#"
+ fd the_field u32
+ "#]],
+ );
+ }
+
+ #[test]
+ fn works_in_simple_macro_1() {
+ check(
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+struct A { the_field: u32 }
+fn foo(a: A) {
+ m!(a.x$0)
+}
+"#,
+ expect![[r#"
+ fd the_field u32
+ "#]],
+ );
+ }
+
+ #[test]
+ fn works_in_simple_macro_2() {
+ // this doesn't work yet because the macro doesn't expand without the token -- maybe it can be fixed with better recovery
+ check(
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+struct A { the_field: u32 }
+fn foo(a: A) {
+ m!(a.$0)
+}
+"#,
+ expect![[r#"
+ fd the_field u32
+ "#]],
+ );
+ }
+
+ #[test]
+ fn works_in_simple_macro_recursive_1() {
+ check(
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+struct A { the_field: u32 }
+fn foo(a: A) {
+ m!(m!(m!(a.x$0)))
+}
+"#,
+ expect![[r#"
+ fd the_field u32
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_expansion_resilient() {
+ check(
+ r#"
+macro_rules! d {
+ () => {};
+ ($val:expr) => {
+ match $val { tmp => { tmp } }
+ };
+ // Trailing comma with single argument is ignored
+ ($val:expr,) => { $crate::d!($val) };
+ ($($val:expr),+ $(,)?) => {
+ ($($crate::d!($val)),+,)
+ };
+}
+struct A { the_field: u32 }
+fn foo(a: A) {
+ d!(a.$0)
+}
+"#,
+ expect![[r#"
+ fd the_field u32
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_method_completion_issue_3547() {
+ check(
+ r#"
+struct HashSet<T> {}
+impl<T> HashSet<T> {
+ pub fn the_method(&self) {}
+}
+fn foo() {
+ let s: HashSet<_>;
+ s.$0
+}
+"#,
+ expect![[r#"
+ me the_method() fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn completes_method_call_when_receiver_is_a_macro_call() {
+ check(
+ r#"
+struct S;
+impl S { fn foo(&self) {} }
+macro_rules! make_s { () => { S }; }
+fn main() { make_s!().f$0; }
+"#,
+ expect![[r#"
+ me foo() fn(&self)
+ "#]],
+ )
+ }
+
+ #[test]
+ fn completes_after_macro_call_in_submodule() {
+ check(
+ r#"
+macro_rules! empty {
+ () => {};
+}
+
+mod foo {
+ #[derive(Debug, Default)]
+ struct Template2 {}
+
+ impl Template2 {
+ fn private(&self) {}
+ }
+ fn baz() {
+ let goo: Template2 = Template2 {};
+ empty!();
+ goo.$0
+ }
+}
+ "#,
+ expect![[r#"
+ me private() fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn issue_8931() {
+ check(
+ r#"
+//- minicore: fn
+struct S;
+
+struct Foo;
+impl Foo {
+ fn foo(&self) -> &[u8] { loop {} }
+}
+
+impl S {
+ fn indented(&mut self, f: impl FnOnce(&mut Self)) {
+ }
+
+ fn f(&mut self, v: Foo) {
+ self.indented(|this| v.$0)
+ }
+}
+ "#,
+ expect![[r#"
+ me foo() fn(&self) -> &[u8]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn completes_bare_fields_and_methods_in_methods() {
+ check(
+ r#"
+struct Foo { field: i32 }
+
+impl Foo { fn foo(&self) { $0 } }"#,
+ expect![[r#"
+ fd self.field i32
+ lc self &Foo
+ sp Self
+ st Foo
+ bt u32
+ me self.foo() fn(&self)
+ "#]],
+ );
+ check(
+ r#"
+struct Foo(i32);
+
+impl Foo { fn foo(&mut self) { $0 } }"#,
+ expect![[r#"
+ fd self.0 i32
+ lc self &mut Foo
+ sp Self
+ st Foo
+ bt u32
+ me self.foo() fn(&mut self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_completion_after_dot() {
+ check(
+ r#"
+macro_rules! m {
+ ($e:expr) => { $e };
+}
+
+struct Completable;
+
+impl Completable {
+ fn method(&self) {}
+}
+
+fn f() {
+ let c = Completable;
+ m!(c.$0);
+}
+ "#,
+ expect![[r#"
+ me method() fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn completes_method_call_when_receiver_type_has_errors_issue_10297() {
+ check(
+ r#"
+//- minicore: iterator, sized
+struct Vec<T>;
+impl<T> IntoIterator for Vec<T> {
+ type Item = ();
+ type IntoIter = ();
+ fn into_iter(self);
+}
+fn main() {
+ let x: Vec<_>;
+ x.$0;
+}
+"#,
+ expect![[r#"
+ me into_iter() (as IntoIterator) fn(self) -> <Self as IntoIterator>::IntoIter
+ "#]],
+ )
+ }
+
+ #[test]
+ fn postfix_drop_completion() {
+ cov_mark::check!(postfix_drop_completion);
+ check_edit(
+ "drop",
+ r#"
+//- minicore: drop
+struct Vec<T>(T);
+impl<T> Drop for Vec<T> {
+ fn drop(&mut self) {}
+}
+fn main() {
+ let x = Vec(0u32)
+ x.$0;
+}
+"#,
+ r"
+struct Vec<T>(T);
+impl<T> Drop for Vec<T> {
+ fn drop(&mut self) {}
+}
+fn main() {
+ let x = Vec(0u32)
+ drop($0x);
+}
+",
+ )
+ }
+
+ #[test]
+ fn issue_12484() {
+ check(
+ r#"
+//- minicore: sized
+trait SizeUser {
+ type Size;
+}
+trait Closure: SizeUser {}
+trait Encrypt: SizeUser {
+ fn encrypt(self, _: impl Closure<Size = Self::Size>);
+}
+fn test(thing: impl Encrypt) {
+ thing.$0;
+}
+ "#,
+ expect![[r#"
+ me encrypt(…) (as Encrypt) fn(self, impl Closure<Size = <Self as SizeUser>::Size>)
+ "#]],
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs
new file mode 100644
index 000000000..5d0ddaaf2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs
@@ -0,0 +1,280 @@
+//! Completion of names from the current scope in expression position.
+
+use hir::ScopeDef;
+
+use crate::{
+ context::{ExprCtx, PathCompletionCtx, Qualified},
+ CompletionContext, Completions,
+};
+
+pub(crate) fn complete_expr_path(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ expr_ctx: &ExprCtx,
+) {
+ let _p = profile::span("complete_expr_path");
+ if !ctx.qualifier_ctx.none() {
+ return;
+ }
+
+ let &ExprCtx {
+ in_block_expr,
+ in_loop_body,
+ after_if_expr,
+ in_condition,
+ incomplete_let,
+ ref ref_expr_parent,
+ ref is_func_update,
+ ref innermost_ret_ty,
+ ref impl_,
+ in_match_guard,
+ ..
+ } = expr_ctx;
+
+ let wants_mut_token =
+ ref_expr_parent.as_ref().map(|it| it.mut_token().is_none()).unwrap_or(false);
+
+ let scope_def_applicable = |def| match def {
+ ScopeDef::GenericParam(hir::GenericParam::LifetimeParam(_)) | ScopeDef::Label(_) => false,
+ ScopeDef::ModuleDef(hir::ModuleDef::Macro(mac)) => mac.is_fn_like(ctx.db),
+ _ => true,
+ };
+
+ let add_assoc_item = |acc: &mut Completions, item| match item {
+ hir::AssocItem::Function(func) => acc.add_function(ctx, path_ctx, func, None),
+ hir::AssocItem::Const(ct) => acc.add_const(ctx, ct),
+ hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty),
+ };
+
+ match qualified {
+ Qualified::TypeAnchor { ty: None, trait_: None } => ctx
+ .traits_in_scope()
+ .iter()
+ .flat_map(|&it| hir::Trait::from(it).items(ctx.sema.db))
+ .for_each(|item| add_assoc_item(acc, item)),
+ Qualified::TypeAnchor { trait_: Some(trait_), .. } => {
+ trait_.items(ctx.sema.db).into_iter().for_each(|item| add_assoc_item(acc, item))
+ }
+ Qualified::TypeAnchor { ty: Some(ty), trait_: None } => {
+ if let Some(hir::Adt::Enum(e)) = ty.as_adt() {
+ cov_mark::hit!(completes_variant_through_alias);
+ acc.add_enum_variants(ctx, path_ctx, e);
+ }
+
+ ctx.iterate_path_candidates(&ty, |item| {
+ add_assoc_item(acc, item);
+ });
+
+ // Iterate assoc types separately
+ ty.iterate_assoc_items(ctx.db, ctx.krate, |item| {
+ if let hir::AssocItem::TypeAlias(ty) = item {
+ acc.add_type_alias(ctx, ty)
+ }
+ None::<()>
+ });
+ }
+ Qualified::With { resolution: None, .. } => {}
+ Qualified::With { resolution: Some(resolution), .. } => {
+ // Add associated types on type parameters and `Self`.
+ ctx.scope.assoc_type_shorthand_candidates(resolution, |_, alias| {
+ acc.add_type_alias(ctx, alias);
+ None::<()>
+ });
+ match resolution {
+ hir::PathResolution::Def(hir::ModuleDef::Module(module)) => {
+ let module_scope = module.scope(ctx.db, Some(ctx.module));
+ for (name, def) in module_scope {
+ if scope_def_applicable(def) {
+ acc.add_path_resolution(ctx, path_ctx, name, def);
+ }
+ }
+ }
+ hir::PathResolution::Def(
+ def @ (hir::ModuleDef::Adt(_)
+ | hir::ModuleDef::TypeAlias(_)
+ | hir::ModuleDef::BuiltinType(_)),
+ ) => {
+ let ty = match def {
+ hir::ModuleDef::Adt(adt) => adt.ty(ctx.db),
+ hir::ModuleDef::TypeAlias(a) => a.ty(ctx.db),
+ hir::ModuleDef::BuiltinType(builtin) => {
+ cov_mark::hit!(completes_primitive_assoc_const);
+ builtin.ty(ctx.db)
+ }
+ _ => return,
+ };
+
+ if let Some(hir::Adt::Enum(e)) = ty.as_adt() {
+ cov_mark::hit!(completes_variant_through_alias);
+ acc.add_enum_variants(ctx, path_ctx, e);
+ }
+
+ // XXX: For parity with Rust bug #22519, this does not complete Ty::AssocType.
+ // (where AssocType is defined on a trait, not an inherent impl)
+
+ ctx.iterate_path_candidates(&ty, |item| {
+ add_assoc_item(acc, item);
+ });
+
+ // Iterate assoc types separately
+ ty.iterate_assoc_items(ctx.db, ctx.krate, |item| {
+ if let hir::AssocItem::TypeAlias(ty) = item {
+ acc.add_type_alias(ctx, ty)
+ }
+ None::<()>
+ });
+ }
+ hir::PathResolution::Def(hir::ModuleDef::Trait(t)) => {
+ // Handles `Trait::assoc` as well as `<Ty as Trait>::assoc`.
+ for item in t.items(ctx.db) {
+ add_assoc_item(acc, item);
+ }
+ }
+ hir::PathResolution::TypeParam(_) | hir::PathResolution::SelfType(_) => {
+ let ty = match resolution {
+ hir::PathResolution::TypeParam(param) => param.ty(ctx.db),
+ hir::PathResolution::SelfType(impl_def) => impl_def.self_ty(ctx.db),
+ _ => return,
+ };
+
+ if let Some(hir::Adt::Enum(e)) = ty.as_adt() {
+ cov_mark::hit!(completes_variant_through_self);
+ acc.add_enum_variants(ctx, path_ctx, e);
+ }
+
+ ctx.iterate_path_candidates(&ty, |item| {
+ add_assoc_item(acc, item);
+ });
+ }
+ _ => (),
+ }
+ }
+ Qualified::Absolute => acc.add_crate_roots(ctx, path_ctx),
+ Qualified::No => {
+ acc.add_nameref_keywords_with_colon(ctx);
+ if let Some(adt) =
+ ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt())
+ {
+ let self_ty = (|| ctx.sema.to_def(impl_.as_ref()?)?.self_ty(ctx.db).as_adt())();
+ let complete_self = self_ty == Some(adt);
+
+ match adt {
+ hir::Adt::Struct(strukt) => {
+ let path = ctx
+ .module
+ .find_use_path(ctx.db, hir::ModuleDef::from(strukt))
+ .filter(|it| it.len() > 1);
+
+ acc.add_struct_literal(ctx, path_ctx, strukt, path, None);
+
+ if complete_self {
+ acc.add_struct_literal(
+ ctx,
+ path_ctx,
+ strukt,
+ None,
+ Some(hir::known::SELF_TYPE),
+ );
+ }
+ }
+ hir::Adt::Union(un) => {
+ let path = ctx
+ .module
+ .find_use_path(ctx.db, hir::ModuleDef::from(un))
+ .filter(|it| it.len() > 1);
+
+ acc.add_union_literal(ctx, un, path, None);
+ if complete_self {
+ acc.add_union_literal(ctx, un, None, Some(hir::known::SELF_TYPE));
+ }
+ }
+ hir::Adt::Enum(e) => {
+ super::enum_variants_with_paths(
+ acc,
+ ctx,
+ e,
+ impl_,
+ |acc, ctx, variant, path| {
+ acc.add_qualified_enum_variant(ctx, path_ctx, variant, path)
+ },
+ );
+ }
+ }
+ }
+ ctx.process_all_names(&mut |name, def| match def {
+ ScopeDef::ModuleDef(hir::ModuleDef::Trait(t)) => {
+ let assocs = t.items_with_supertraits(ctx.db);
+ match &*assocs {
+ // traits with no assoc items are unusable as expressions since
+ // there is no associated item path that can be constructed with them
+ [] => (),
+ // FIXME: Render the assoc item with the trait qualified
+ &[_item] => acc.add_path_resolution(ctx, path_ctx, name, def),
+ // FIXME: Append `::` to the thing here, since a trait on its own won't work
+ [..] => acc.add_path_resolution(ctx, path_ctx, name, def),
+ }
+ }
+ _ if scope_def_applicable(def) => acc.add_path_resolution(ctx, path_ctx, name, def),
+ _ => (),
+ });
+
+ if is_func_update.is_none() {
+ let mut add_keyword =
+ |kw, snippet| acc.add_keyword_snippet_expr(ctx, incomplete_let, kw, snippet);
+
+ if !in_block_expr {
+ add_keyword("unsafe", "unsafe {\n $0\n}");
+ }
+ add_keyword("match", "match $1 {\n $0\n}");
+ add_keyword("while", "while $1 {\n $0\n}");
+ add_keyword("while let", "while let $1 = $2 {\n $0\n}");
+ add_keyword("loop", "loop {\n $0\n}");
+ if in_match_guard {
+ add_keyword("if", "if $0");
+ } else {
+ add_keyword("if", "if $1 {\n $0\n}");
+ }
+ add_keyword("if let", "if let $1 = $2 {\n $0\n}");
+ add_keyword("for", "for $1 in $2 {\n $0\n}");
+ add_keyword("true", "true");
+ add_keyword("false", "false");
+
+ if in_condition || in_block_expr {
+ add_keyword("let", "let");
+ }
+
+ if after_if_expr {
+ add_keyword("else", "else {\n $0\n}");
+ add_keyword("else if", "else if $1 {\n $0\n}");
+ }
+
+ if wants_mut_token {
+ add_keyword("mut", "mut ");
+ }
+
+ if in_loop_body {
+ if in_block_expr {
+ add_keyword("continue", "continue;");
+ add_keyword("break", "break;");
+ } else {
+ add_keyword("continue", "continue");
+ add_keyword("break", "break");
+ }
+ }
+
+ if let Some(ty) = innermost_ret_ty {
+ add_keyword(
+ "return",
+ match (in_block_expr, ty.is_unit()) {
+ (true, true) => "return ;",
+ (true, false) => "return;",
+ (false, true) => "return $0",
+ (false, false) => "return",
+ },
+ );
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs
new file mode 100644
index 000000000..4e89ef696
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs
@@ -0,0 +1,108 @@
+//! Completes function abi strings.
+use syntax::{
+ ast::{self, IsString},
+ AstNode, AstToken,
+};
+
+use crate::{
+ completions::Completions, context::CompletionContext, CompletionItem, CompletionItemKind,
+};
+
+// Most of these are feature gated, we should filter/add feature gate completions once we have them.
+const SUPPORTED_CALLING_CONVENTIONS: &[&str] = &[
+ "Rust",
+ "C",
+ "C-unwind",
+ "cdecl",
+ "stdcall",
+ "stdcall-unwind",
+ "fastcall",
+ "vectorcall",
+ "thiscall",
+ "thiscall-unwind",
+ "aapcs",
+ "win64",
+ "sysv64",
+ "ptx-kernel",
+ "msp430-interrupt",
+ "x86-interrupt",
+ "amdgpu-kernel",
+ "efiapi",
+ "avr-interrupt",
+ "avr-non-blocking-interrupt",
+ "C-cmse-nonsecure-call",
+ "wasm",
+ "system",
+ "system-unwind",
+ "rust-intrinsic",
+ "rust-call",
+ "platform-intrinsic",
+ "unadjusted",
+];
+
+pub(crate) fn complete_extern_abi(
+ acc: &mut Completions,
+ _ctx: &CompletionContext<'_>,
+ expanded: &ast::String,
+) -> Option<()> {
+ if !expanded.syntax().parent().map_or(false, |it| ast::Abi::can_cast(it.kind())) {
+ return None;
+ }
+ let abi_str = expanded;
+ let source_range = abi_str.text_range_between_quotes()?;
+ for &abi in SUPPORTED_CALLING_CONVENTIONS {
+ CompletionItem::new(CompletionItemKind::Keyword, source_range, abi).add_to(acc);
+ }
+ Some(())
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::tests::{check_edit, completion_list_no_kw};
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list_no_kw(ra_fixture);
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn only_completes_in_string_literals() {
+ check(
+ r#"
+$0 fn foo {}
+"#,
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn requires_extern_prefix() {
+ check(
+ r#"
+"$0" fn foo {}
+"#,
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn works() {
+ check(
+ r#"
+extern "$0" fn foo {}
+"#,
+ expect![[]],
+ );
+ check_edit(
+ "Rust",
+ r#"
+extern "$0" fn foo {}
+"#,
+ r#"
+extern "Rust" fn foo {}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/field.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/field.rs
new file mode 100644
index 000000000..870df63b7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/field.rs
@@ -0,0 +1,43 @@
+//! Completion of field list position.
+
+use crate::{
+ context::{PathCompletionCtx, Qualified},
+ CompletionContext, Completions,
+};
+
+pub(crate) fn complete_field_list_tuple_variant(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+) {
+ if ctx.qualifier_ctx.vis_node.is_some() {
+ return;
+ }
+ match path_ctx {
+ PathCompletionCtx {
+ has_macro_bang: false,
+ qualified: Qualified::No,
+ parent: None,
+ has_type_args: false,
+ ..
+ } => {
+ let mut add_keyword = |kw, snippet| acc.add_keyword_snippet(ctx, kw, snippet);
+ add_keyword("pub(crate)", "pub(crate)");
+ add_keyword("pub(super)", "pub(super)");
+ add_keyword("pub", "pub");
+ }
+ _ => (),
+ }
+}
+
+pub(crate) fn complete_field_list_record_variant(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+) {
+ if ctx.qualifier_ctx.vis_node.is_none() {
+ let mut add_keyword = |kw, snippet| acc.add_keyword_snippet(ctx, kw, snippet);
+ add_keyword("pub(crate)", "pub(crate)");
+ add_keyword("pub(super)", "pub(super)");
+ add_keyword("pub", "pub");
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
new file mode 100644
index 000000000..f04cc15d7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
@@ -0,0 +1,407 @@
+//! See [`import_on_the_fly`].
+use hir::{ItemInNs, ModuleDef};
+use ide_db::imports::{
+ import_assets::{ImportAssets, LocatedImport},
+ insert_use::ImportScope,
+};
+use itertools::Itertools;
+use syntax::{
+ ast::{self},
+ AstNode, SyntaxNode, T,
+};
+
+use crate::{
+ context::{
+ CompletionContext, DotAccess, PathCompletionCtx, PathKind, PatternContext, Qualified,
+ TypeLocation,
+ },
+ render::{render_resolution_with_import, render_resolution_with_import_pat, RenderContext},
+};
+
+use super::Completions;
+
+// Feature: Completion With Autoimport
+//
+// When completing names in the current scope, proposes additional imports from other modules or crates,
+// if they can be qualified in the scope, and their name contains all symbols from the completion input.
+//
+// To be considered applicable, the name must contain all input symbols in the given order, not necessarily adjacent.
+// If any input symbol is not lowercased, the name must contain all symbols in exact case; otherwise the containing is checked case-insensitively.
+//
+// ```
+// fn main() {
+// pda$0
+// }
+// # pub mod std { pub mod marker { pub struct PhantomData { } } }
+// ```
+// ->
+// ```
+// use std::marker::PhantomData;
+//
+// fn main() {
+// PhantomData
+// }
+// # pub mod std { pub mod marker { pub struct PhantomData { } } }
+// ```
+//
+// Also completes associated items, that require trait imports.
+// If any unresolved and/or partially-qualified path precedes the input, it will be taken into account.
+// Currently, only the imports with their import path ending with the whole qualifier will be proposed
+// (no fuzzy matching for qualifier).
+//
+// ```
+// mod foo {
+// pub mod bar {
+// pub struct Item;
+//
+// impl Item {
+// pub const TEST_ASSOC: usize = 3;
+// }
+// }
+// }
+//
+// fn main() {
+// bar::Item::TEST_A$0
+// }
+// ```
+// ->
+// ```
+// use foo::bar;
+//
+// mod foo {
+// pub mod bar {
+// pub struct Item;
+//
+// impl Item {
+// pub const TEST_ASSOC: usize = 3;
+// }
+// }
+// }
+//
+// fn main() {
+// bar::Item::TEST_ASSOC
+// }
+// ```
+//
+// NOTE: currently, if an assoc item comes from a trait that's not currently imported, and it also has an unresolved and/or partially-qualified path,
+// no imports will be proposed.
+//
+// .Fuzzy search details
+//
+// To avoid an excessive amount of the results returned, completion input is checked for inclusion in the names only
+// (i.e. in `HashMap` in the `std::collections::HashMap` path).
+// For the same reasons, avoids searching for any path imports for inputs with their length less than 2 symbols
+// (but shows all associated items for any input length).
+//
+// .Import configuration
+//
+// It is possible to configure how use-trees are merged with the `imports.granularity.group` setting.
+// Mimics the corresponding behavior of the `Auto Import` feature.
+//
+// .LSP and performance implications
+//
+// The feature is enabled only if the LSP client supports LSP protocol version 3.16+ and reports the `additionalTextEdits`
+// (case-sensitive) resolve client capability in its client capabilities.
+// This way the server is able to defer the costly computations, doing them for a selected completion item only.
+// For clients with no such support, all edits have to be calculated on the completion request, including the fuzzy search completion ones,
+// which might be slow ergo the feature is automatically disabled.
+//
+// .Feature toggle
+//
+// The feature can be forcefully turned off in the settings with the `rust-analyzer.completion.autoimport.enable` flag.
+// Note that having this flag set to `true` does not guarantee that the feature is enabled: your client needs to have the corresponding
+// capability enabled.
+pub(crate) fn import_on_the_fly_path(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+) -> Option<()> {
+ if !ctx.config.enable_imports_on_the_fly {
+ return None;
+ }
+ let qualified = match path_ctx {
+ PathCompletionCtx {
+ kind:
+ PathKind::Expr { .. }
+ | PathKind::Type { .. }
+ | PathKind::Attr { .. }
+ | PathKind::Derive { .. }
+ | PathKind::Item { .. }
+ | PathKind::Pat { .. },
+ qualified,
+ ..
+ } => qualified,
+ _ => return None,
+ };
+ let potential_import_name = import_name(ctx);
+ let qualifier = match qualified {
+ Qualified::With { path, .. } => Some(path.clone()),
+ _ => None,
+ };
+ let import_assets = import_assets_for_path(ctx, &potential_import_name, qualifier.clone())?;
+
+ import_on_the_fly(
+ acc,
+ ctx,
+ path_ctx,
+ import_assets,
+ qualifier.map(|it| it.syntax().clone()).or_else(|| ctx.original_token.parent())?,
+ potential_import_name,
+ )
+}
+
+pub(crate) fn import_on_the_fly_pat(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ pattern_ctx: &PatternContext,
+) -> Option<()> {
+ if !ctx.config.enable_imports_on_the_fly {
+ return None;
+ }
+ if let PatternContext { record_pat: Some(_), .. } = pattern_ctx {
+ return None;
+ }
+
+ let potential_import_name = import_name(ctx);
+ let import_assets = import_assets_for_path(ctx, &potential_import_name, None)?;
+
+ import_on_the_fly_pat_(
+ acc,
+ ctx,
+ pattern_ctx,
+ import_assets,
+ ctx.original_token.parent()?,
+ potential_import_name,
+ )
+}
+
+pub(crate) fn import_on_the_fly_dot(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ dot_access: &DotAccess,
+) -> Option<()> {
+ if !ctx.config.enable_imports_on_the_fly {
+ return None;
+ }
+ let receiver = dot_access.receiver.as_ref()?;
+ let ty = dot_access.receiver_ty.as_ref()?;
+ let potential_import_name = import_name(ctx);
+ let import_assets = ImportAssets::for_fuzzy_method_call(
+ ctx.module,
+ ty.original.clone(),
+ potential_import_name.clone(),
+ receiver.syntax().clone(),
+ )?;
+
+ import_on_the_fly_method(
+ acc,
+ ctx,
+ dot_access,
+ import_assets,
+ receiver.syntax().clone(),
+ potential_import_name,
+ )
+}
+
+fn import_on_the_fly(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx @ PathCompletionCtx { kind, .. }: &PathCompletionCtx,
+ import_assets: ImportAssets,
+ position: SyntaxNode,
+ potential_import_name: String,
+) -> Option<()> {
+ let _p = profile::span("import_on_the_fly").detail(|| potential_import_name.clone());
+
+ if ImportScope::find_insert_use_container(&position, &ctx.sema).is_none() {
+ return None;
+ }
+
+ let ns_filter = |import: &LocatedImport| {
+ match (kind, import.original_item) {
+ // Aren't handled in flyimport
+ (PathKind::Vis { .. } | PathKind::Use, _) => false,
+ // modules are always fair game
+ (_, ItemInNs::Types(hir::ModuleDef::Module(_))) => true,
+ // and so are macros(except for attributes)
+ (
+ PathKind::Expr { .. }
+ | PathKind::Type { .. }
+ | PathKind::Item { .. }
+ | PathKind::Pat { .. },
+ ItemInNs::Macros(mac),
+ ) => mac.is_fn_like(ctx.db),
+ (PathKind::Item { .. }, ..) => false,
+
+ (PathKind::Expr { .. }, ItemInNs::Types(_) | ItemInNs::Values(_)) => true,
+
+ (PathKind::Pat { .. }, ItemInNs::Types(_)) => true,
+ (PathKind::Pat { .. }, ItemInNs::Values(def)) => {
+ matches!(def, hir::ModuleDef::Const(_))
+ }
+
+ (PathKind::Type { location }, ItemInNs::Types(ty)) => {
+ if matches!(location, TypeLocation::TypeBound) {
+ matches!(ty, ModuleDef::Trait(_))
+ } else {
+ true
+ }
+ }
+ (PathKind::Type { .. }, ItemInNs::Values(_)) => false,
+
+ (PathKind::Attr { .. }, ItemInNs::Macros(mac)) => mac.is_attr(ctx.db),
+ (PathKind::Attr { .. }, _) => false,
+
+ (PathKind::Derive { existing_derives }, ItemInNs::Macros(mac)) => {
+ mac.is_derive(ctx.db) && !existing_derives.contains(&mac)
+ }
+ (PathKind::Derive { .. }, _) => false,
+ }
+ };
+ let user_input_lowercased = potential_import_name.to_lowercase();
+
+ acc.add_all(
+ import_assets
+ .search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind)
+ .into_iter()
+ .filter(ns_filter)
+ .filter(|import| {
+ !ctx.is_item_hidden(&import.item_to_import)
+ && !ctx.is_item_hidden(&import.original_item)
+ })
+ .sorted_by_key(|located_import| {
+ compute_fuzzy_completion_order_key(
+ &located_import.import_path,
+ &user_input_lowercased,
+ )
+ })
+ .filter_map(|import| {
+ render_resolution_with_import(RenderContext::new(ctx), path_ctx, import)
+ })
+ .map(|builder| builder.build()),
+ );
+ Some(())
+}
+
+fn import_on_the_fly_pat_(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ pattern_ctx: &PatternContext,
+ import_assets: ImportAssets,
+ position: SyntaxNode,
+ potential_import_name: String,
+) -> Option<()> {
+ let _p = profile::span("import_on_the_fly_pat").detail(|| potential_import_name.clone());
+
+ if ImportScope::find_insert_use_container(&position, &ctx.sema).is_none() {
+ return None;
+ }
+
+ let ns_filter = |import: &LocatedImport| match import.original_item {
+ ItemInNs::Macros(mac) => mac.is_fn_like(ctx.db),
+ ItemInNs::Types(_) => true,
+ ItemInNs::Values(def) => matches!(def, hir::ModuleDef::Const(_)),
+ };
+ let user_input_lowercased = potential_import_name.to_lowercase();
+
+ acc.add_all(
+ import_assets
+ .search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind)
+ .into_iter()
+ .filter(ns_filter)
+ .filter(|import| {
+ !ctx.is_item_hidden(&import.item_to_import)
+ && !ctx.is_item_hidden(&import.original_item)
+ })
+ .sorted_by_key(|located_import| {
+ compute_fuzzy_completion_order_key(
+ &located_import.import_path,
+ &user_input_lowercased,
+ )
+ })
+ .filter_map(|import| {
+ render_resolution_with_import_pat(RenderContext::new(ctx), pattern_ctx, import)
+ })
+ .map(|builder| builder.build()),
+ );
+ Some(())
+}
+
+fn import_on_the_fly_method(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ dot_access: &DotAccess,
+ import_assets: ImportAssets,
+ position: SyntaxNode,
+ potential_import_name: String,
+) -> Option<()> {
+ let _p = profile::span("import_on_the_fly_method").detail(|| potential_import_name.clone());
+
+ if ImportScope::find_insert_use_container(&position, &ctx.sema).is_none() {
+ return None;
+ }
+
+ let user_input_lowercased = potential_import_name.to_lowercase();
+
+ import_assets
+ .search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind)
+ .into_iter()
+ .filter(|import| {
+ !ctx.is_item_hidden(&import.item_to_import)
+ && !ctx.is_item_hidden(&import.original_item)
+ })
+ .sorted_by_key(|located_import| {
+ compute_fuzzy_completion_order_key(&located_import.import_path, &user_input_lowercased)
+ })
+ .for_each(|import| match import.original_item {
+ ItemInNs::Values(hir::ModuleDef::Function(f)) => {
+ acc.add_method_with_import(ctx, dot_access, f, import);
+ }
+ _ => (),
+ });
+ Some(())
+}
+
+fn import_name(ctx: &CompletionContext<'_>) -> String {
+ let token_kind = ctx.token.kind();
+ if matches!(token_kind, T![.] | T![::]) {
+ String::new()
+ } else {
+ ctx.token.to_string()
+ }
+}
+
+fn import_assets_for_path(
+ ctx: &CompletionContext<'_>,
+ potential_import_name: &str,
+ qualifier: Option<ast::Path>,
+) -> Option<ImportAssets> {
+ let fuzzy_name_length = potential_import_name.len();
+ let mut assets_for_path = ImportAssets::for_fuzzy_path(
+ ctx.module,
+ qualifier,
+ potential_import_name.to_owned(),
+ &ctx.sema,
+ ctx.token.parent()?,
+ )?;
+ if fuzzy_name_length < 3 {
+ cov_mark::hit!(flyimport_exact_on_short_path);
+ assets_for_path.path_fuzzy_name_to_exact(false);
+ }
+ Some(assets_for_path)
+}
+
+fn compute_fuzzy_completion_order_key(
+ proposed_mod_path: &hir::ModPath,
+ user_input_lowercased: &str,
+) -> usize {
+ cov_mark::hit!(certain_fuzzy_order_test);
+ let import_name = match proposed_mod_path.segments().last() {
+ Some(name) => name.to_smol_str().to_lowercase(),
+ None => return usize::MAX,
+ };
+ match import_name.match_indices(user_input_lowercased).next() {
+ Some((first_matching_index, _)) => first_matching_index,
+ None => usize::MAX,
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs
new file mode 100644
index 000000000..f0ecc595a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs
@@ -0,0 +1,196 @@
+//! See [`complete_fn_param`].
+
+use hir::HirDisplay;
+use ide_db::FxHashMap;
+use syntax::{
+ algo,
+ ast::{self, HasModuleItem},
+ match_ast, AstNode, Direction, SyntaxKind, TextRange, TextSize,
+};
+
+use crate::{
+ context::{ParamContext, ParamKind, PatternContext},
+ CompletionContext, CompletionItem, CompletionItemKind, Completions,
+};
+
+// FIXME: Make this a submodule of [`pattern`]
+/// Complete repeated parameters, both name and type. For example, if all
+/// functions in a file have a `spam: &mut Spam` parameter, a completion with
+/// `spam: &mut Spam` insert text/label will be suggested.
+///
+/// Also complete parameters for closure or local functions from the surrounding defined locals.
+pub(crate) fn complete_fn_param(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ pattern_ctx: &PatternContext,
+) -> Option<()> {
+ let (ParamContext { param_list, kind, .. }, impl_) = match pattern_ctx {
+ PatternContext { param_ctx: Some(kind), impl_, .. } => (kind, impl_),
+ _ => return None,
+ };
+
+ let comma_wrapper = comma_wrapper(ctx);
+ let mut add_new_item_to_acc = |label: &str| {
+ let mk_item = |label: &str, range: TextRange| {
+ CompletionItem::new(CompletionItemKind::Binding, range, label)
+ };
+ let item = match &comma_wrapper {
+ Some((fmt, range)) => mk_item(&fmt(label), *range),
+ None => mk_item(label, ctx.source_range()),
+ };
+ // Completion lookup is omitted intentionally here.
+ // See the full discussion: https://github.com/rust-lang/rust-analyzer/issues/12073
+ item.add_to(acc)
+ };
+
+ match kind {
+ ParamKind::Function(function) => {
+ fill_fn_params(ctx, function, param_list, impl_, add_new_item_to_acc);
+ }
+ ParamKind::Closure(closure) => {
+ let stmt_list = closure.syntax().ancestors().find_map(ast::StmtList::cast)?;
+ params_from_stmt_list_scope(ctx, stmt_list, |name, ty| {
+ add_new_item_to_acc(&format!("{name}: {ty}"));
+ });
+ }
+ }
+
+ Some(())
+}
+
+fn fill_fn_params(
+ ctx: &CompletionContext<'_>,
+ function: &ast::Fn,
+ param_list: &ast::ParamList,
+ impl_: &Option<ast::Impl>,
+ mut add_new_item_to_acc: impl FnMut(&str),
+) {
+ let mut file_params = FxHashMap::default();
+
+ let mut extract_params = |f: ast::Fn| {
+ f.param_list().into_iter().flat_map(|it| it.params()).for_each(|param| {
+ if let Some(pat) = param.pat() {
+ // FIXME: We should be able to turn these into SmolStr without having to allocate a String
+ let whole_param = param.syntax().text().to_string();
+ let binding = pat.syntax().text().to_string();
+ file_params.entry(whole_param).or_insert(binding);
+ }
+ });
+ };
+
+ for node in ctx.token.parent_ancestors() {
+ match_ast! {
+ match node {
+ ast::SourceFile(it) => it.items().filter_map(|item| match item {
+ ast::Item::Fn(it) => Some(it),
+ _ => None,
+ }).for_each(&mut extract_params),
+ ast::ItemList(it) => it.items().filter_map(|item| match item {
+ ast::Item::Fn(it) => Some(it),
+ _ => None,
+ }).for_each(&mut extract_params),
+ ast::AssocItemList(it) => it.assoc_items().filter_map(|item| match item {
+ ast::AssocItem::Fn(it) => Some(it),
+ _ => None,
+ }).for_each(&mut extract_params),
+ _ => continue,
+ }
+ };
+ }
+
+ if let Some(stmt_list) = function.syntax().parent().and_then(ast::StmtList::cast) {
+ params_from_stmt_list_scope(ctx, stmt_list, |name, ty| {
+ file_params.entry(format!("{name}: {ty}")).or_insert(name.to_string());
+ });
+ }
+ remove_duplicated(&mut file_params, param_list.params());
+ let self_completion_items = ["self", "&self", "mut self", "&mut self"];
+ if should_add_self_completions(ctx.token.text_range().start(), param_list, impl_) {
+ self_completion_items.into_iter().for_each(|self_item| add_new_item_to_acc(self_item));
+ }
+
+ file_params.keys().for_each(|whole_param| add_new_item_to_acc(whole_param));
+}
+
+fn params_from_stmt_list_scope(
+ ctx: &CompletionContext<'_>,
+ stmt_list: ast::StmtList,
+ mut cb: impl FnMut(hir::Name, String),
+) {
+ let syntax_node = match stmt_list.syntax().last_child() {
+ Some(it) => it,
+ None => return,
+ };
+ if let Some(scope) =
+ ctx.sema.scope_at_offset(stmt_list.syntax(), syntax_node.text_range().end())
+ {
+ let module = scope.module().into();
+ scope.process_all_names(&mut |name, def| {
+ if let hir::ScopeDef::Local(local) = def {
+ if let Ok(ty) = local.ty(ctx.db).display_source_code(ctx.db, module) {
+ cb(name, ty);
+ }
+ }
+ });
+ }
+}
+
+fn remove_duplicated(
+ file_params: &mut FxHashMap<String, String>,
+ fn_params: ast::AstChildren<ast::Param>,
+) {
+ fn_params.for_each(|param| {
+ let whole_param = param.syntax().text().to_string();
+ file_params.remove(&whole_param);
+
+ match param.pat() {
+ // remove suggestions for patterns that already exist
+ // if the type is missing we are checking the current param to be completed
+ // in which case this would find itself removing the suggestions due to itself
+ Some(pattern) if param.ty().is_some() => {
+ let binding = pattern.syntax().text().to_string();
+ file_params.retain(|_, v| v != &binding);
+ }
+ _ => (),
+ }
+ })
+}
+
+fn should_add_self_completions(
+ cursor: TextSize,
+ param_list: &ast::ParamList,
+ impl_: &Option<ast::Impl>,
+) -> bool {
+ if impl_.is_none() || param_list.self_param().is_some() {
+ return false;
+ }
+ match param_list.params().next() {
+ Some(first) => first.pat().map_or(false, |pat| pat.syntax().text_range().contains(cursor)),
+ None => true,
+ }
+}
+
+fn comma_wrapper(ctx: &CompletionContext<'_>) -> Option<(impl Fn(&str) -> String, TextRange)> {
+ let param = ctx.token.parent_ancestors().find(|node| node.kind() == SyntaxKind::PARAM)?;
+
+ let next_token_kind = {
+ let t = param.last_token()?.next_token()?;
+ let t = algo::skip_whitespace_token(t, Direction::Next)?;
+ t.kind()
+ };
+ let prev_token_kind = {
+ let t = param.first_token()?.prev_token()?;
+ let t = algo::skip_whitespace_token(t, Direction::Prev)?;
+ t.kind()
+ };
+
+ let has_trailing_comma =
+ matches!(next_token_kind, SyntaxKind::COMMA | SyntaxKind::R_PAREN | SyntaxKind::PIPE);
+ let trailing = if has_trailing_comma { "" } else { "," };
+
+ let has_leading_comma =
+ matches!(prev_token_kind, SyntaxKind::COMMA | SyntaxKind::L_PAREN | SyntaxKind::PIPE);
+ let leading = if has_leading_comma { "" } else { ", " };
+
+ Some((move |label: &_| (format!("{}{}{}", leading, label, trailing)), param.text_range()))
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs
new file mode 100644
index 000000000..038bdb427
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs
@@ -0,0 +1,130 @@
+//! Completes identifiers in format string literals.
+
+use ide_db::syntax_helpers::format_string::is_format_string;
+use itertools::Itertools;
+use syntax::{ast, AstToken, TextRange, TextSize};
+
+use crate::{context::CompletionContext, CompletionItem, CompletionItemKind, Completions};
+
+/// Complete identifiers in format strings.
+pub(crate) fn format_string(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ original: &ast::String,
+ expanded: &ast::String,
+) {
+ if !is_format_string(&expanded) {
+ return;
+ }
+ let cursor = ctx.position.offset;
+ let lit_start = ctx.original_token.text_range().start();
+ let cursor_in_lit = cursor - lit_start;
+
+ let prefix = &original.text()[..cursor_in_lit.into()];
+ let braces = prefix.char_indices().rev().skip_while(|&(_, c)| c.is_alphanumeric()).next_tuple();
+ let brace_offset = match braces {
+ // escaped brace
+ Some(((_, '{'), (_, '{'))) => return,
+ Some(((idx, '{'), _)) => lit_start + TextSize::from(idx as u32 + 1),
+ _ => return,
+ };
+
+ let source_range = TextRange::new(brace_offset, cursor);
+ ctx.locals.iter().for_each(|(name, _)| {
+ CompletionItem::new(CompletionItemKind::Binding, source_range, name.to_smol_str())
+ .add_to(acc);
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::tests::{check_edit, completion_list_no_kw};
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list_no_kw(ra_fixture);
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn works_when_wrapped() {
+ check(
+ r#"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+macro_rules! print {
+ ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*)));
+}
+fn main() {
+ let foobar = 1;
+ print!("f$0");
+}
+"#,
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn no_completion_without_brace() {
+ check(
+ r#"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+fn main() {
+ let foobar = 1;
+ format_args!("f$0");
+}
+"#,
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn completes_locals() {
+ check_edit(
+ "foobar",
+ r#"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+fn main() {
+ let foobar = 1;
+ format_args!("{f$0");
+}
+"#,
+ r#"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+fn main() {
+ let foobar = 1;
+ format_args!("{foobar");
+}
+"#,
+ );
+ check_edit(
+ "foobar",
+ r#"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+fn main() {
+ let foobar = 1;
+ format_args!("{$0");
+}
+"#,
+ r#"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+fn main() {
+ let foobar = 1;
+ format_args!("{foobar");
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs
new file mode 100644
index 000000000..60d05ae46
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs
@@ -0,0 +1,133 @@
+//! Completion of paths and keywords at item list position.
+
+use crate::{
+ context::{ExprCtx, ItemListKind, PathCompletionCtx, Qualified},
+ CompletionContext, Completions,
+};
+
+pub(crate) mod trait_impl;
+
+pub(crate) fn complete_item_list_in_expr(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ expr_ctx: &ExprCtx,
+) {
+ if !expr_ctx.in_block_expr {
+ return;
+ }
+ if !path_ctx.is_trivial_path() {
+ return;
+ }
+ add_keywords(acc, ctx, None);
+}
+
+pub(crate) fn complete_item_list(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ kind: &ItemListKind,
+) {
+ let _p = profile::span("complete_item_list");
+ if path_ctx.is_trivial_path() {
+ add_keywords(acc, ctx, Some(kind));
+ }
+
+ match qualified {
+ Qualified::With {
+ resolution: Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))),
+ super_chain_len,
+ ..
+ } => {
+ for (name, def) in module.scope(ctx.db, Some(ctx.module)) {
+ match def {
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Macro(m)) if m.is_fn_like(ctx.db) => {
+ acc.add_macro(ctx, path_ctx, m, name)
+ }
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) => {
+ acc.add_module(ctx, path_ctx, m, name)
+ }
+ _ => (),
+ }
+ }
+
+ acc.add_super_keyword(ctx, *super_chain_len);
+ }
+ Qualified::Absolute => acc.add_crate_roots(ctx, path_ctx),
+ Qualified::No if ctx.qualifier_ctx.none() => {
+ ctx.process_all_names(&mut |name, def| match def {
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Macro(m)) if m.is_fn_like(ctx.db) => {
+ acc.add_macro(ctx, path_ctx, m, name)
+ }
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) => {
+ acc.add_module(ctx, path_ctx, m, name)
+ }
+ _ => (),
+ });
+ acc.add_nameref_keywords_with_colon(ctx);
+ }
+ Qualified::TypeAnchor { .. } | Qualified::No | Qualified::With { .. } => {}
+ }
+}
+
+fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option<&ItemListKind>) {
+ let mut add_keyword = |kw, snippet| acc.add_keyword_snippet(ctx, kw, snippet);
+
+ let in_item_list = matches!(kind, Some(ItemListKind::SourceFile | ItemListKind::Module) | None);
+ let in_assoc_non_trait_impl = matches!(kind, Some(ItemListKind::Impl | ItemListKind::Trait));
+ let in_extern_block = matches!(kind, Some(ItemListKind::ExternBlock));
+ let in_trait = matches!(kind, Some(ItemListKind::Trait));
+ let in_trait_impl = matches!(kind, Some(ItemListKind::TraitImpl(_)));
+ let in_inherent_impl = matches!(kind, Some(ItemListKind::Impl));
+ let no_qualifiers = ctx.qualifier_ctx.vis_node.is_none();
+ let in_block = matches!(kind, None);
+
+ if !in_trait_impl {
+ if ctx.qualifier_ctx.unsafe_tok.is_some() {
+ if in_item_list || in_assoc_non_trait_impl {
+ add_keyword("fn", "fn $1($2) {\n $0\n}");
+ }
+ if in_item_list {
+ add_keyword("trait", "trait $1 {\n $0\n}");
+ if no_qualifiers {
+ add_keyword("impl", "impl $1 {\n $0\n}");
+ }
+ }
+ return;
+ }
+
+ if in_item_list {
+ add_keyword("enum", "enum $1 {\n $0\n}");
+ add_keyword("mod", "mod $0");
+ add_keyword("static", "static $0");
+ add_keyword("struct", "struct $0");
+ add_keyword("trait", "trait $1 {\n $0\n}");
+ add_keyword("union", "union $1 {\n $0\n}");
+ add_keyword("use", "use $0");
+ if no_qualifiers {
+ add_keyword("impl", "impl $1 {\n $0\n}");
+ }
+ }
+
+ if !in_trait && !in_block && no_qualifiers {
+ add_keyword("pub(crate)", "pub(crate)");
+ add_keyword("pub(super)", "pub(super)");
+ add_keyword("pub", "pub");
+ }
+
+ if in_extern_block {
+ add_keyword("fn", "fn $1($2);");
+ } else {
+ if !in_inherent_impl {
+ if !in_trait {
+ add_keyword("extern", "extern $0");
+ }
+ add_keyword("type", "type $0");
+ }
+
+ add_keyword("fn", "fn $1($2) {\n $0\n}");
+ add_keyword("unsafe", "unsafe");
+ add_keyword("const", "const $0");
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs
new file mode 100644
index 000000000..e9256803c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs
@@ -0,0 +1,1160 @@
+//! Completion for associated items in a trait implementation.
+//!
+//! This module adds the completion items related to implementing associated
+//! items within an `impl Trait for Struct` block. The current context node
+//! must be within either a `FN`, `TYPE_ALIAS`, or `CONST` node
+//! and an direct child of an `IMPL`.
+//!
+//! # Examples
+//!
+//! Considering the following trait `impl`:
+//!
+//! ```ignore
+//! trait SomeTrait {
+//! fn foo();
+//! }
+//!
+//! impl SomeTrait for () {
+//! fn f$0
+//! }
+//! ```
+//!
+//! may result in the completion of the following method:
+//!
+//! ```ignore
+//! # trait SomeTrait {
+//! # fn foo();
+//! # }
+//!
+//! impl SomeTrait for () {
+//! fn foo() {}$0
+//! }
+//! ```
+
+use hir::{self, HasAttrs};
+use ide_db::{
+ path_transform::PathTransform, syntax_helpers::insert_whitespace_into_node,
+ traits::get_missing_assoc_items, SymbolKind,
+};
+use syntax::{
+ ast::{self, edit_in_place::AttrsOwnerEdit},
+ AstNode, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, T,
+};
+use text_edit::TextEdit;
+
+use crate::{
+ context::PathCompletionCtx, CompletionContext, CompletionItem, CompletionItemKind,
+ CompletionRelevance, Completions,
+};
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+enum ImplCompletionKind {
+ All,
+ Fn,
+ TypeAlias,
+ Const,
+}
+
+pub(crate) fn complete_trait_impl_const(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ name: &Option<ast::Name>,
+) -> Option<()> {
+ complete_trait_impl_name(acc, ctx, name, ImplCompletionKind::Const)
+}
+
+pub(crate) fn complete_trait_impl_type_alias(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ name: &Option<ast::Name>,
+) -> Option<()> {
+ complete_trait_impl_name(acc, ctx, name, ImplCompletionKind::TypeAlias)
+}
+
+pub(crate) fn complete_trait_impl_fn(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ name: &Option<ast::Name>,
+) -> Option<()> {
+ complete_trait_impl_name(acc, ctx, name, ImplCompletionKind::Fn)
+}
+
+fn complete_trait_impl_name(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ name: &Option<ast::Name>,
+ kind: ImplCompletionKind,
+) -> Option<()> {
+ let token = ctx.token.clone();
+ let item = match name {
+ Some(name) => name.syntax().parent(),
+ None => if token.kind() == SyntaxKind::WHITESPACE { token.prev_token()? } else { token }
+ .parent(),
+ }?;
+ complete_trait_impl(
+ acc,
+ ctx,
+ kind,
+ replacement_range(ctx, &item),
+ // item -> ASSOC_ITEM_LIST -> IMPL
+ &ast::Impl::cast(item.parent()?.parent()?)?,
+ );
+ Some(())
+}
+
+pub(crate) fn complete_trait_impl_item_by_name(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ name_ref: &Option<ast::NameRef>,
+ impl_: &Option<ast::Impl>,
+) {
+ if !path_ctx.is_trivial_path() {
+ return;
+ }
+ if let Some(impl_) = impl_ {
+ complete_trait_impl(
+ acc,
+ ctx,
+ ImplCompletionKind::All,
+ match name_ref {
+ Some(name) => name.syntax().text_range(),
+ None => ctx.source_range(),
+ },
+ impl_,
+ );
+ }
+}
+
+fn complete_trait_impl(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ kind: ImplCompletionKind,
+ replacement_range: TextRange,
+ impl_def: &ast::Impl,
+) {
+ if let Some(hir_impl) = ctx.sema.to_def(impl_def) {
+ get_missing_assoc_items(&ctx.sema, impl_def).into_iter().for_each(|item| {
+ use self::ImplCompletionKind::*;
+ match (item, kind) {
+ (hir::AssocItem::Function(func), All | Fn) => {
+ add_function_impl(acc, ctx, replacement_range, func, hir_impl)
+ }
+ (hir::AssocItem::TypeAlias(type_alias), All | TypeAlias) => {
+ add_type_alias_impl(acc, ctx, replacement_range, type_alias)
+ }
+ (hir::AssocItem::Const(const_), All | Const) => {
+ add_const_impl(acc, ctx, replacement_range, const_, hir_impl)
+ }
+ _ => {}
+ }
+ });
+ }
+}
+
+fn add_function_impl(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ replacement_range: TextRange,
+ func: hir::Function,
+ impl_def: hir::Impl,
+) {
+ let fn_name = func.name(ctx.db);
+
+ let label = format!(
+ "fn {}({})",
+ fn_name,
+ if func.assoc_fn_params(ctx.db).is_empty() { "" } else { ".." }
+ );
+
+ let completion_kind = if func.has_self_param(ctx.db) {
+ CompletionItemKind::Method
+ } else {
+ CompletionItemKind::SymbolKind(SymbolKind::Function)
+ };
+
+ let mut item = CompletionItem::new(completion_kind, replacement_range, label);
+ item.lookup_by(format!("fn {}", fn_name))
+ .set_documentation(func.docs(ctx.db))
+ .set_relevance(CompletionRelevance { is_item_from_trait: true, ..Default::default() });
+
+ if let Some(source) = ctx.sema.source(func) {
+ let assoc_item = ast::AssocItem::Fn(source.value);
+ if let Some(transformed_item) = get_transformed_assoc_item(ctx, assoc_item, impl_def) {
+ let transformed_fn = match transformed_item {
+ ast::AssocItem::Fn(func) => func,
+ _ => unreachable!(),
+ };
+
+ let function_decl = function_declaration(&transformed_fn, source.file_id.is_macro());
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let snippet = format!("{} {{\n $0\n}}", function_decl);
+ item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet));
+ }
+ None => {
+ let header = format!("{} {{", function_decl);
+ item.text_edit(TextEdit::replace(replacement_range, header));
+ }
+ };
+ item.add_to(acc);
+ }
+ }
+}
+
+/// Transform a relevant associated item to inline generics from the impl, remove attrs and docs, etc.
+fn get_transformed_assoc_item(
+ ctx: &CompletionContext<'_>,
+ assoc_item: ast::AssocItem,
+ impl_def: hir::Impl,
+) -> Option<ast::AssocItem> {
+ let assoc_item = assoc_item.clone_for_update();
+ let trait_ = impl_def.trait_(ctx.db)?;
+ let source_scope = &ctx.sema.scope_for_def(trait_);
+ let target_scope = &ctx.sema.scope(ctx.sema.source(impl_def)?.syntax().value)?;
+ let transform = PathTransform::trait_impl(
+ target_scope,
+ source_scope,
+ trait_,
+ ctx.sema.source(impl_def)?.value,
+ );
+
+ transform.apply(assoc_item.syntax());
+ if let ast::AssocItem::Fn(func) = &assoc_item {
+ func.remove_attrs_and_docs();
+ }
+ Some(assoc_item)
+}
+
+fn add_type_alias_impl(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ replacement_range: TextRange,
+ type_alias: hir::TypeAlias,
+) {
+ let alias_name = type_alias.name(ctx.db);
+ let (alias_name, escaped_name) = (alias_name.to_smol_str(), alias_name.escaped().to_smol_str());
+
+ let label = format!("type {} =", alias_name);
+ let replacement = format!("type {} = ", escaped_name);
+
+ let mut item = CompletionItem::new(SymbolKind::TypeAlias, replacement_range, label);
+ item.lookup_by(format!("type {}", alias_name))
+ .set_documentation(type_alias.docs(ctx.db))
+ .set_relevance(CompletionRelevance { is_item_from_trait: true, ..Default::default() });
+ match ctx.config.snippet_cap {
+ Some(cap) => item
+ .snippet_edit(cap, TextEdit::replace(replacement_range, format!("{}$0;", replacement))),
+ None => item.text_edit(TextEdit::replace(replacement_range, replacement)),
+ };
+ item.add_to(acc);
+}
+
+fn add_const_impl(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ replacement_range: TextRange,
+ const_: hir::Const,
+ impl_def: hir::Impl,
+) {
+ let const_name = const_.name(ctx.db).map(|n| n.to_smol_str());
+
+ if let Some(const_name) = const_name {
+ if let Some(source) = ctx.sema.source(const_) {
+ let assoc_item = ast::AssocItem::Const(source.value);
+ if let Some(transformed_item) = get_transformed_assoc_item(ctx, assoc_item, impl_def) {
+ let transformed_const = match transformed_item {
+ ast::AssocItem::Const(const_) => const_,
+ _ => unreachable!(),
+ };
+
+ let label = make_const_compl_syntax(&transformed_const, source.file_id.is_macro());
+ let replacement = format!("{} ", label);
+
+ let mut item = CompletionItem::new(SymbolKind::Const, replacement_range, label);
+ item.lookup_by(format!("const {}", const_name))
+ .set_documentation(const_.docs(ctx.db))
+ .set_relevance(CompletionRelevance {
+ is_item_from_trait: true,
+ ..Default::default()
+ });
+ match ctx.config.snippet_cap {
+ Some(cap) => item.snippet_edit(
+ cap,
+ TextEdit::replace(replacement_range, format!("{}$0;", replacement)),
+ ),
+ None => item.text_edit(TextEdit::replace(replacement_range, replacement)),
+ };
+ item.add_to(acc);
+ }
+ }
+ }
+}
+
+fn make_const_compl_syntax(const_: &ast::Const, needs_whitespace: bool) -> String {
+ const_.remove_attrs_and_docs();
+ let const_ = if needs_whitespace {
+ insert_whitespace_into_node::insert_ws_into(const_.syntax().clone())
+ } else {
+ const_.syntax().clone()
+ };
+
+ let start = const_.text_range().start();
+ let const_end = const_.text_range().end();
+
+ let end = const_
+ .children_with_tokens()
+ .find(|s| s.kind() == T![;] || s.kind() == T![=])
+ .map_or(const_end, |f| f.text_range().start());
+
+ let len = end - start;
+ let range = TextRange::new(0.into(), len);
+
+ let syntax = const_.text().slice(range).to_string();
+
+ format!("{} =", syntax.trim_end())
+}
+
+fn function_declaration(node: &ast::Fn, needs_whitespace: bool) -> String {
+ node.remove_attrs_and_docs();
+
+ let node = if needs_whitespace {
+ insert_whitespace_into_node::insert_ws_into(node.syntax().clone())
+ } else {
+ node.syntax().clone()
+ };
+
+ let start = node.text_range().start();
+ let end = node.text_range().end();
+
+ let end = node
+ .last_child_or_token()
+ .filter(|s| s.kind() == T![;] || s.kind() == SyntaxKind::BLOCK_EXPR)
+ .map_or(end, |f| f.text_range().start());
+
+ let len = end - start;
+ let range = TextRange::new(0.into(), len);
+
+ let syntax = node.text().slice(range).to_string();
+
+ syntax.trim_end().to_owned()
+}
+
+fn replacement_range(ctx: &CompletionContext<'_>, item: &SyntaxNode) -> TextRange {
+ let first_child = item
+ .children_with_tokens()
+ .find(|child| {
+ !matches!(child.kind(), SyntaxKind::COMMENT | SyntaxKind::WHITESPACE | SyntaxKind::ATTR)
+ })
+ .unwrap_or_else(|| SyntaxElement::Node(item.clone()));
+
+ TextRange::new(first_child.text_range().start(), ctx.source_range().end())
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::tests::{check_edit, completion_list_no_kw};
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list_no_kw(ra_fixture);
+ expect.assert_eq(&actual)
+ }
+
+ #[test]
+ fn no_completion_inside_fn() {
+ check(
+ r"
+trait Test { fn test(); fn test2(); }
+struct T;
+
+impl Test for T {
+ fn test() {
+ t$0
+ }
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ tt Test
+ bt u32
+ "#]],
+ );
+
+ check(
+ r"
+trait Test { fn test(); fn test2(); }
+struct T;
+
+impl Test for T {
+ fn test() {
+ fn t$0
+ }
+}
+",
+ expect![[""]],
+ );
+
+ check(
+ r"
+trait Test { fn test(); fn test2(); }
+struct T;
+
+impl Test for T {
+ fn test() {
+ fn $0
+ }
+}
+",
+ expect![[""]],
+ );
+
+ // https://github.com/rust-lang/rust-analyzer/pull/5976#issuecomment-692332191
+ check(
+ r"
+trait Test { fn test(); fn test2(); }
+struct T;
+
+impl Test for T {
+ fn test() {
+ foo.$0
+ }
+}
+",
+ expect![[r#""#]],
+ );
+
+ check(
+ r"
+trait Test { fn test(_: i32); fn test2(); }
+struct T;
+
+impl Test for T {
+ fn test(t$0)
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ bn &mut self
+ bn &self
+ bn mut self
+ bn self
+ "#]],
+ );
+
+ check(
+ r"
+trait Test { fn test(_: fn()); fn test2(); }
+struct T;
+
+impl Test for T {
+ fn test(f: fn $0)
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ "#]],
+ );
+ }
+
+ #[test]
+ fn no_completion_inside_const() {
+ check(
+ r"
+trait Test { const TEST: fn(); const TEST2: u32; type Test; fn test(); }
+struct T;
+
+impl Test for T {
+ const TEST: fn $0
+}
+",
+ expect![[r#""#]],
+ );
+
+ check(
+ r"
+trait Test { const TEST: u32; const TEST2: u32; type Test; fn test(); }
+struct T;
+
+impl Test for T {
+ const TEST: T$0
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ tt Test
+ bt u32
+ "#]],
+ );
+
+ check(
+ r"
+trait Test { const TEST: u32; const TEST2: u32; type Test; fn test(); }
+struct T;
+
+impl Test for T {
+ const TEST: u32 = f$0
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ tt Test
+ bt u32
+ "#]],
+ );
+
+ check(
+ r"
+trait Test { const TEST: u32; const TEST2: u32; type Test; fn test(); }
+struct T;
+
+impl Test for T {
+ const TEST: u32 = {
+ t$0
+ };
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ tt Test
+ bt u32
+ "#]],
+ );
+
+ check(
+ r"
+trait Test { const TEST: u32; const TEST2: u32; type Test; fn test(); }
+struct T;
+
+impl Test for T {
+ const TEST: u32 = {
+ fn $0
+ };
+}
+",
+ expect![[""]],
+ );
+
+ check(
+ r"
+trait Test { const TEST: u32; const TEST2: u32; type Test; fn test(); }
+struct T;
+
+impl Test for T {
+ const TEST: u32 = {
+ fn t$0
+ };
+}
+",
+ expect![[""]],
+ );
+ }
+
+ #[test]
+ fn no_completion_inside_type() {
+ check(
+ r"
+trait Test { type Test; type Test2; fn test(); }
+struct T;
+
+impl Test for T {
+ type Test = T$0;
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ tt Test
+ bt u32
+ "#]],
+ );
+
+ check(
+ r"
+trait Test { type Test; type Test2; fn test(); }
+struct T;
+
+impl Test for T {
+ type Test = fn $0;
+}
+",
+ expect![[r#""#]],
+ );
+ }
+
+ #[test]
+ fn name_ref_single_function() {
+ check_edit(
+ "fn test",
+ r#"
+trait Test {
+ fn test();
+}
+struct T;
+
+impl Test for T {
+ t$0
+}
+"#,
+ r#"
+trait Test {
+ fn test();
+}
+struct T;
+
+impl Test for T {
+ fn test() {
+ $0
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn single_function() {
+ check_edit(
+ "fn test",
+ r#"
+trait Test {
+ fn test();
+}
+struct T;
+
+impl Test for T {
+ fn t$0
+}
+"#,
+ r#"
+trait Test {
+ fn test();
+}
+struct T;
+
+impl Test for T {
+ fn test() {
+ $0
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn generic_fn() {
+ check_edit(
+ "fn foo",
+ r#"
+trait Test {
+ fn foo<T>();
+}
+struct T;
+
+impl Test for T {
+ fn f$0
+}
+"#,
+ r#"
+trait Test {
+ fn foo<T>();
+}
+struct T;
+
+impl Test for T {
+ fn foo<T>() {
+ $0
+}
+}
+"#,
+ );
+ check_edit(
+ "fn foo",
+ r#"
+trait Test {
+ fn foo<T>() where T: Into<String>;
+}
+struct T;
+
+impl Test for T {
+ fn f$0
+}
+"#,
+ r#"
+trait Test {
+ fn foo<T>() where T: Into<String>;
+}
+struct T;
+
+impl Test for T {
+ fn foo<T>() where T: Into<String> {
+ $0
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn associated_type() {
+ check_edit(
+ "type SomeType",
+ r#"
+trait Test {
+ type SomeType;
+}
+
+impl Test for () {
+ type S$0
+}
+"#,
+ "
+trait Test {
+ type SomeType;
+}
+
+impl Test for () {
+ type SomeType = $0;\n\
+}
+",
+ );
+ check_edit(
+ "type SomeType",
+ r#"
+trait Test {
+ type SomeType;
+}
+
+impl Test for () {
+ type$0
+}
+"#,
+ "
+trait Test {
+ type SomeType;
+}
+
+impl Test for () {
+ type SomeType = $0;\n\
+}
+",
+ );
+ }
+
+ #[test]
+ fn associated_const() {
+ check_edit(
+ "const SOME_CONST",
+ r#"
+trait Test {
+ const SOME_CONST: u16;
+}
+
+impl Test for () {
+ const S$0
+}
+"#,
+ "
+trait Test {
+ const SOME_CONST: u16;
+}
+
+impl Test for () {
+ const SOME_CONST: u16 = $0;\n\
+}
+",
+ );
+
+ check_edit(
+ "const SOME_CONST",
+ r#"
+trait Test {
+ const SOME_CONST: u16 = 92;
+}
+
+impl Test for () {
+ const S$0
+}
+"#,
+ "
+trait Test {
+ const SOME_CONST: u16 = 92;
+}
+
+impl Test for () {
+ const SOME_CONST: u16 = $0;\n\
+}
+",
+ );
+ }
+
+ #[test]
+ fn complete_without_name() {
+ let test = |completion: &str, hint: &str, completed: &str, next_sibling: &str| {
+ check_edit(
+ completion,
+ &format!(
+ r#"
+trait Test {{
+ type Foo;
+ const CONST: u16;
+ fn bar();
+}}
+struct T;
+
+impl Test for T {{
+ {}
+ {}
+}}
+"#,
+ hint, next_sibling
+ ),
+ &format!(
+ r#"
+trait Test {{
+ type Foo;
+ const CONST: u16;
+ fn bar();
+}}
+struct T;
+
+impl Test for T {{
+ {}
+ {}
+}}
+"#,
+ completed, next_sibling
+ ),
+ )
+ };
+
+ // Enumerate some possible next siblings.
+ for next_sibling in &[
+ "",
+ "fn other_fn() {}", // `const $0 fn` -> `const fn`
+ "type OtherType = i32;",
+ "const OTHER_CONST: i32 = 0;",
+ "async fn other_fn() {}",
+ "unsafe fn other_fn() {}",
+ "default fn other_fn() {}",
+ "default type OtherType = i32;",
+ "default const OTHER_CONST: i32 = 0;",
+ ] {
+ test("fn bar", "fn $0", "fn bar() {\n $0\n}", next_sibling);
+ test("type Foo", "type $0", "type Foo = $0;", next_sibling);
+ test("const CONST", "const $0", "const CONST: u16 = $0;", next_sibling);
+ }
+ }
+
+ #[test]
+ fn snippet_does_not_overwrite_comment_or_attr() {
+ let test = |completion: &str, hint: &str, completed: &str| {
+ check_edit(
+ completion,
+ &format!(
+ r#"
+trait Foo {{
+ type Type;
+ fn function();
+ const CONST: i32 = 0;
+}}
+struct T;
+
+impl Foo for T {{
+ // Comment
+ #[bar]
+ {}
+}}
+"#,
+ hint
+ ),
+ &format!(
+ r#"
+trait Foo {{
+ type Type;
+ fn function();
+ const CONST: i32 = 0;
+}}
+struct T;
+
+impl Foo for T {{
+ // Comment
+ #[bar]
+ {}
+}}
+"#,
+ completed
+ ),
+ )
+ };
+ test("fn function", "fn f$0", "fn function() {\n $0\n}");
+ test("type Type", "type T$0", "type Type = $0;");
+ test("const CONST", "const C$0", "const CONST: i32 = $0;");
+ }
+
+ #[test]
+ fn generics_are_inlined_in_return_type() {
+ check_edit(
+ "fn function",
+ r#"
+trait Foo<T> {
+ fn function() -> T;
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn f$0
+}
+"#,
+ r#"
+trait Foo<T> {
+ fn function() -> T;
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn function() -> u32 {
+ $0
+}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn generics_are_inlined_in_parameter() {
+ check_edit(
+ "fn function",
+ r#"
+trait Foo<T> {
+ fn function(bar: T);
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn f$0
+}
+"#,
+ r#"
+trait Foo<T> {
+ fn function(bar: T);
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn function(bar: u32) {
+ $0
+}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn generics_are_inlined_when_part_of_other_types() {
+ check_edit(
+ "fn function",
+ r#"
+trait Foo<T> {
+ fn function(bar: Vec<T>);
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn f$0
+}
+"#,
+ r#"
+trait Foo<T> {
+ fn function(bar: Vec<T>);
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn function(bar: Vec<u32>) {
+ $0
+}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn generics_are_inlined_complex() {
+ check_edit(
+ "fn function",
+ r#"
+trait Foo<T, U, V> {
+ fn function(bar: Vec<T>, baz: U) -> Arc<Vec<V>>;
+}
+struct Bar;
+
+impl Foo<u32, Vec<usize>, u8> for Bar {
+ fn f$0
+}
+"#,
+ r#"
+trait Foo<T, U, V> {
+ fn function(bar: Vec<T>, baz: U) -> Arc<Vec<V>>;
+}
+struct Bar;
+
+impl Foo<u32, Vec<usize>, u8> for Bar {
+ fn function(bar: Vec<u32>, baz: Vec<usize>) -> Arc<Vec<u8>> {
+ $0
+}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn generics_are_inlined_in_associated_const() {
+ check_edit(
+ "const BAR",
+ r#"
+trait Foo<T> {
+ const BAR: T;
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ const B$0
+}
+"#,
+ r#"
+trait Foo<T> {
+ const BAR: T;
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ const BAR: u32 = $0;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn generics_are_inlined_in_where_clause() {
+ check_edit(
+ "fn function",
+ r#"
+trait SomeTrait<T> {}
+
+trait Foo<T> {
+ fn function()
+ where Self: SomeTrait<T>;
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn f$0
+}
+"#,
+ r#"
+trait SomeTrait<T> {}
+
+trait Foo<T> {
+ fn function()
+ where Self: SomeTrait<T>;
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn function()
+ where Self: SomeTrait<u32> {
+ $0
+}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn works_directly_in_impl() {
+ check(
+ r#"
+trait Tr {
+ fn required();
+}
+
+impl Tr for () {
+ $0
+}
+"#,
+ expect![[r#"
+ fn fn required()
+ "#]],
+ );
+ check(
+ r#"
+trait Tr {
+ fn provided() {}
+ fn required();
+}
+
+impl Tr for () {
+ fn provided() {}
+ $0
+}
+"#,
+ expect![[r#"
+ fn fn required()
+ "#]],
+ );
+ }
+
+ #[test]
+ fn fixes_up_macro_generated() {
+ check_edit(
+ "fn foo",
+ r#"
+macro_rules! noop {
+ ($($item: item)*) => {
+ $($item)*
+ }
+}
+
+noop! {
+ trait Foo {
+ fn foo(&mut self, bar: i64, baz: &mut u32) -> Result<(), u32>;
+ }
+}
+
+struct Test;
+
+impl Foo for Test {
+ $0
+}
+"#,
+ r#"
+macro_rules! noop {
+ ($($item: item)*) => {
+ $($item)*
+ }
+}
+
+noop! {
+ trait Foo {
+ fn foo(&mut self, bar: i64, baz: &mut u32) -> Result<(), u32>;
+ }
+}
+
+struct Test;
+
+impl Foo for Test {
+ fn foo(&mut self,bar:i64,baz: &mut u32) -> Result<(),u32> {
+ $0
+}
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs
new file mode 100644
index 000000000..3989a451b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs
@@ -0,0 +1,237 @@
+//! Completes `where` and `for` keywords.
+
+use syntax::ast::{self, Item};
+
+use crate::{CompletionContext, Completions};
+
+pub(crate) fn complete_for_and_where(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ keyword_item: &ast::Item,
+) {
+ let mut add_keyword = |kw, snippet| acc.add_keyword_snippet(ctx, kw, snippet);
+
+ match keyword_item {
+ Item::Impl(it) => {
+ if it.for_token().is_none() && it.trait_().is_none() && it.self_ty().is_some() {
+ add_keyword("for", "for");
+ }
+ add_keyword("where", "where");
+ }
+ Item::Enum(_)
+ | Item::Fn(_)
+ | Item::Struct(_)
+ | Item::Trait(_)
+ | Item::TypeAlias(_)
+ | Item::Union(_) => {
+ add_keyword("where", "where");
+ }
+ _ => (),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::tests::{check_edit, completion_list};
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual)
+ }
+
+ #[test]
+ fn test_else_edit_after_if() {
+ check_edit(
+ "else",
+ r#"fn quux() { if true { () } $0 }"#,
+ r#"fn quux() { if true { () } else {
+ $0
+} }"#,
+ );
+ }
+
+ #[test]
+ fn test_keywords_after_unsafe_in_block_expr() {
+ check(
+ r"fn my_fn() { unsafe $0 }",
+ expect![[r#"
+ kw fn
+ kw impl
+ kw trait
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_completion_await_impls_future() {
+ check(
+ r#"
+//- minicore: future
+use core::future::*;
+struct A {}
+impl Future for A {}
+fn foo(a: A) { a.$0 }
+"#,
+ expect![[r#"
+ kw await expr.await
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ "#]],
+ );
+
+ check(
+ r#"
+//- minicore: future
+use std::future::*;
+fn foo() {
+ let a = async {};
+ a.$0
+}
+"#,
+ expect![[r#"
+ kw await expr.await
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ "#]],
+ )
+ }
+
+ #[test]
+ fn let_semi() {
+ cov_mark::check!(let_semi);
+ check_edit(
+ "match",
+ r#"
+fn main() { let x = $0 }
+"#,
+ r#"
+fn main() { let x = match $1 {
+ $0
+}; }
+"#,
+ );
+
+ check_edit(
+ "if",
+ r#"
+fn main() {
+ let x = $0
+ let y = 92;
+}
+"#,
+ r#"
+fn main() {
+ let x = if $1 {
+ $0
+};
+ let y = 92;
+}
+"#,
+ );
+
+ check_edit(
+ "loop",
+ r#"
+fn main() {
+ let x = $0
+ bar();
+}
+"#,
+ r#"
+fn main() {
+ let x = loop {
+ $0
+};
+ bar();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn if_completion_in_match_guard() {
+ check_edit(
+ "if",
+ r"
+fn main() {
+ match () {
+ () $0
+ }
+}
+",
+ r"
+fn main() {
+ match () {
+ () if $0
+ }
+}
+",
+ )
+ }
+
+ #[test]
+ fn if_completion_in_match_arm_expr() {
+ check_edit(
+ "if",
+ r"
+fn main() {
+ match () {
+ () => $0
+ }
+}
+",
+ r"
+fn main() {
+ match () {
+ () => if $1 {
+ $0
+}
+ }
+}
+",
+ )
+ }
+
+ #[test]
+ fn if_completion_in_match_arm_expr_block() {
+ check_edit(
+ "if",
+ r"
+fn main() {
+ match () {
+ () => {
+ $0
+ }
+ }
+}
+",
+ r"
+fn main() {
+ match () {
+ () => {
+ if $1 {
+ $0
+}
+ }
+ }
+}
+",
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs
new file mode 100644
index 000000000..3b79def63
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs
@@ -0,0 +1,341 @@
+//! Completes lifetimes and labels.
+//!
+//! These completions work a bit differently in that they are only shown when what the user types
+//! has a `'` preceding it, as our fake syntax tree is invalid otherwise (due to us not inserting
+//! a lifetime but an ident for obvious reasons).
+//! Due to this all the tests for lifetimes and labels live in this module for the time being as
+//! there is no value in lifting these out into the outline module test since they will either not
+//! show up for normal completions, or they won't show completions other than lifetimes depending
+//! on the fixture input.
+use hir::{known, ScopeDef};
+use syntax::{ast, TokenText};
+
+use crate::{
+ completions::Completions,
+ context::{CompletionContext, LifetimeContext, LifetimeKind},
+};
+
+/// Completes lifetimes.
+pub(crate) fn complete_lifetime(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ lifetime_ctx: &LifetimeContext,
+) {
+ let (lp, lifetime) = match lifetime_ctx {
+ LifetimeContext { kind: LifetimeKind::Lifetime, lifetime } => (None, lifetime),
+ LifetimeContext {
+ kind: LifetimeKind::LifetimeParam { is_decl: false, param },
+ lifetime,
+ } => (Some(param), lifetime),
+ _ => return,
+ };
+ let param_lifetime = match (lifetime, lp.and_then(|lp| lp.lifetime())) {
+ (Some(lt), Some(lp)) if lp == lt.clone() => return,
+ (Some(_), Some(lp)) => Some(lp),
+ _ => None,
+ };
+ let param_lifetime = param_lifetime.as_ref().map(ast::Lifetime::text);
+ let param_lifetime = param_lifetime.as_ref().map(TokenText::as_str);
+
+ ctx.process_all_names_raw(&mut |name, res| {
+ if matches!(
+ res,
+ ScopeDef::GenericParam(hir::GenericParam::LifetimeParam(_))
+ if param_lifetime != Some(&*name.to_smol_str())
+ ) {
+ acc.add_lifetime(ctx, name);
+ }
+ });
+ if param_lifetime.is_none() {
+ acc.add_lifetime(ctx, known::STATIC_LIFETIME);
+ }
+}
+
+/// Completes labels.
+pub(crate) fn complete_label(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ lifetime_ctx: &LifetimeContext,
+) {
+ if !matches!(lifetime_ctx, LifetimeContext { kind: LifetimeKind::LabelRef, .. }) {
+ return;
+ }
+ ctx.process_all_names_raw(&mut |name, res| {
+ if let ScopeDef::Label(_) = res {
+ acc.add_label(ctx, name);
+ }
+ });
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::tests::{check_edit, completion_list};
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn check_lifetime_edit() {
+ check_edit(
+ "'lifetime",
+ r#"
+fn func<'lifetime>(foo: &'li$0) {}
+"#,
+ r#"
+fn func<'lifetime>(foo: &'lifetime) {}
+"#,
+ );
+ cov_mark::check!(completes_if_lifetime_without_idents);
+ check_edit(
+ "'lifetime",
+ r#"
+fn func<'lifetime>(foo: &'$0) {}
+"#,
+ r#"
+fn func<'lifetime>(foo: &'lifetime) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn complete_lifetime_in_ref() {
+ check(
+ r#"
+fn foo<'lifetime>(foo: &'a$0 usize) {}
+"#,
+ expect![[r#"
+ lt 'lifetime
+ lt 'static
+ "#]],
+ );
+ }
+
+ #[test]
+ fn complete_lifetime_in_ref_missing_ty() {
+ check(
+ r#"
+fn foo<'lifetime>(foo: &'a$0) {}
+"#,
+ expect![[r#"
+ lt 'lifetime
+ lt 'static
+ "#]],
+ );
+ }
+ #[test]
+ fn complete_lifetime_in_self_ref() {
+ check(
+ r#"
+struct Foo;
+impl<'impl> Foo {
+ fn foo<'func>(&'a$0 self) {}
+}
+"#,
+ expect![[r#"
+ lt 'func
+ lt 'impl
+ lt 'static
+ "#]],
+ );
+ }
+
+ #[test]
+ fn complete_lifetime_in_arg_list() {
+ check(
+ r#"
+struct Foo<'lt>;
+fn foo<'lifetime>(_: Foo<'a$0>) {}
+"#,
+ expect![[r#"
+ lt 'lifetime
+ lt 'static
+ "#]],
+ );
+ }
+
+ #[test]
+ fn complete_lifetime_in_where_pred() {
+ check(
+ r#"
+fn foo2<'lifetime, T>() where 'a$0 {}
+"#,
+ expect![[r#"
+ lt 'lifetime
+ lt 'static
+ "#]],
+ );
+ }
+
+ #[test]
+ fn complete_lifetime_in_ty_bound() {
+ check(
+ r#"
+fn foo2<'lifetime, T>() where T: 'a$0 {}
+"#,
+ expect![[r#"
+ lt 'lifetime
+ lt 'static
+ "#]],
+ );
+ check(
+ r#"
+fn foo2<'lifetime, T>() where T: Trait<'a$0> {}
+"#,
+ expect![[r#"
+ lt 'lifetime
+ lt 'static
+ "#]],
+ );
+ }
+
+ #[test]
+ fn dont_complete_lifetime_in_assoc_ty_bound() {
+ check(
+ r#"
+fn foo2<'lifetime, T>() where T: Trait<Item = 'a$0> {}
+"#,
+ expect![[r#""#]],
+ );
+ }
+
+ #[test]
+ fn complete_lifetime_in_param_list() {
+ check(
+ r#"
+fn foo<'$0>() {}
+"#,
+ expect![[r#""#]],
+ );
+ check(
+ r#"
+fn foo<'a$0>() {}
+"#,
+ expect![[r#""#]],
+ );
+ check(
+ r#"
+fn foo<'footime, 'lifetime: 'a$0>() {}
+"#,
+ expect![[r#"
+ lt 'footime
+ "#]],
+ );
+ }
+
+ #[test]
+ fn check_label_edit() {
+ check_edit(
+ "'label",
+ r#"
+fn foo() {
+ 'label: loop {
+ break '$0
+ }
+}
+"#,
+ r#"
+fn foo() {
+ 'label: loop {
+ break 'label
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn complete_label_in_loop() {
+ check(
+ r#"
+fn foo() {
+ 'foop: loop {
+ break '$0
+ }
+}
+"#,
+ expect![[r#"
+ lb 'foop
+ "#]],
+ );
+ check(
+ r#"
+fn foo() {
+ 'foop: loop {
+ continue '$0
+ }
+}
+"#,
+ expect![[r#"
+ lb 'foop
+ "#]],
+ );
+ }
+
+ #[test]
+ fn complete_label_in_block_nested() {
+ check(
+ r#"
+fn foo() {
+ 'foop: {
+ 'baap: {
+ break '$0
+ }
+ }
+}
+"#,
+ expect![[r#"
+ lb 'baap
+ lb 'foop
+ "#]],
+ );
+ }
+
+ #[test]
+ fn complete_label_in_loop_with_value() {
+ check(
+ r#"
+fn foo() {
+ 'foop: loop {
+ break '$0 i32;
+ }
+}
+"#,
+ expect![[r#"
+ lb 'foop
+ "#]],
+ );
+ }
+
+ #[test]
+ fn complete_label_in_while_cond() {
+ check(
+ r#"
+fn foo() {
+ 'outer: while { 'inner: loop { break '$0 } } {}
+}
+"#,
+ expect![[r#"
+ lb 'inner
+ lb 'outer
+ "#]],
+ );
+ }
+
+ #[test]
+ fn complete_label_in_for_iterable() {
+ check(
+ r#"
+fn foo() {
+ 'outer: for _ in [{ 'inner: loop { break '$0 } }] {}
+}
+"#,
+ expect![[r#"
+ lb 'inner
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
new file mode 100644
index 000000000..9c975b929
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
@@ -0,0 +1,354 @@
+//! Completes mod declarations.
+
+use std::iter;
+
+use hir::{Module, ModuleSource};
+use ide_db::{
+ base_db::{SourceDatabaseExt, VfsPath},
+ FxHashSet, RootDatabase, SymbolKind,
+};
+use syntax::{ast, AstNode, SyntaxKind};
+
+use crate::{context::CompletionContext, CompletionItem, Completions};
+
+/// Complete mod declaration, i.e. `mod $0;`
+pub(crate) fn complete_mod(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ mod_under_caret: &ast::Module,
+) -> Option<()> {
+ if mod_under_caret.item_list().is_some() {
+ return None;
+ }
+
+ let _p = profile::span("completion::complete_mod");
+
+ let mut current_module = ctx.module;
+ // For `mod $0`, `ctx.module` is its parent, but for `mod f$0`, it's `mod f` itself, but we're
+ // interested in its parent.
+ if ctx.original_token.kind() == SyntaxKind::IDENT {
+ if let Some(module) =
+ ctx.original_token.parent_ancestors().nth(1).and_then(ast::Module::cast)
+ {
+ match ctx.sema.to_def(&module) {
+ Some(module) if module == current_module => {
+ if let Some(parent) = current_module.parent(ctx.db) {
+ current_module = parent;
+ }
+ }
+ _ => {}
+ }
+ }
+ }
+
+ let module_definition_file =
+ current_module.definition_source(ctx.db).file_id.original_file(ctx.db);
+ let source_root = ctx.db.source_root(ctx.db.file_source_root(module_definition_file));
+ let directory_to_look_for_submodules = directory_to_look_for_submodules(
+ current_module,
+ ctx.db,
+ source_root.path_for_file(&module_definition_file)?,
+ )?;
+
+ let existing_mod_declarations = current_module
+ .children(ctx.db)
+ .filter_map(|module| Some(module.name(ctx.db)?.to_string()))
+ .collect::<FxHashSet<_>>();
+
+ let module_declaration_file =
+ current_module.declaration_source(ctx.db).map(|module_declaration_source_file| {
+ module_declaration_source_file.file_id.original_file(ctx.db)
+ });
+
+ source_root
+ .iter()
+ .filter(|submodule_candidate_file| submodule_candidate_file != &module_definition_file)
+ .filter(|submodule_candidate_file| {
+ Some(submodule_candidate_file) != module_declaration_file.as_ref()
+ })
+ .filter_map(|submodule_file| {
+ let submodule_path = source_root.path_for_file(&submodule_file)?;
+ let directory_with_submodule = submodule_path.parent()?;
+ let (name, ext) = submodule_path.name_and_extension()?;
+ if ext != Some("rs") {
+ return None;
+ }
+ match name {
+ "lib" | "main" => None,
+ "mod" => {
+ if directory_with_submodule.parent()? == directory_to_look_for_submodules {
+ match directory_with_submodule.name_and_extension()? {
+ (directory_name, None) => Some(directory_name.to_owned()),
+ _ => None,
+ }
+ } else {
+ None
+ }
+ }
+ file_name if directory_with_submodule == directory_to_look_for_submodules => {
+ Some(file_name.to_owned())
+ }
+ _ => None,
+ }
+ })
+ .filter(|name| !existing_mod_declarations.contains(name))
+ .for_each(|submodule_name| {
+ let mut label = submodule_name;
+ if mod_under_caret.semicolon_token().is_none() {
+ label.push(';');
+ }
+ let item = CompletionItem::new(SymbolKind::Module, ctx.source_range(), &label);
+ item.add_to(acc)
+ });
+
+ Some(())
+}
+
+fn directory_to_look_for_submodules(
+ module: Module,
+ db: &RootDatabase,
+ module_file_path: &VfsPath,
+) -> Option<VfsPath> {
+ let directory_with_module_path = module_file_path.parent()?;
+ let (name, ext) = module_file_path.name_and_extension()?;
+ if ext != Some("rs") {
+ return None;
+ }
+ let base_directory = match name {
+ "mod" | "lib" | "main" => Some(directory_with_module_path),
+ regular_rust_file_name => {
+ if matches!(
+ (
+ directory_with_module_path
+ .parent()
+ .as_ref()
+ .and_then(|path| path.name_and_extension()),
+ directory_with_module_path.name_and_extension(),
+ ),
+ (Some(("src", None)), Some(("bin", None)))
+ ) {
+ // files in /src/bin/ can import each other directly
+ Some(directory_with_module_path)
+ } else {
+ directory_with_module_path.join(regular_rust_file_name)
+ }
+ }
+ }?;
+
+ module_chain_to_containing_module_file(module, db)
+ .into_iter()
+ .filter_map(|module| module.name(db))
+ .try_fold(base_directory, |path, name| path.join(&name.to_smol_str()))
+}
+
+fn module_chain_to_containing_module_file(
+ current_module: Module,
+ db: &RootDatabase,
+) -> Vec<Module> {
+ let mut path =
+ iter::successors(Some(current_module), |current_module| current_module.parent(db))
+ .take_while(|current_module| {
+ matches!(current_module.definition_source(db).value, ModuleSource::Module(_))
+ })
+ .collect::<Vec<_>>();
+ path.reverse();
+ path
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::tests::completion_list;
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn lib_module_completion() {
+ check(
+ r#"
+//- /lib.rs
+mod $0
+//- /foo.rs
+fn foo() {}
+//- /foo/ignored_foo.rs
+fn ignored_foo() {}
+//- /bar/mod.rs
+fn bar() {}
+//- /bar/ignored_bar.rs
+fn ignored_bar() {}
+"#,
+ expect![[r#"
+ md bar;
+ md foo;
+ "#]],
+ );
+ }
+
+ #[test]
+ fn no_module_completion_with_module_body() {
+ check(
+ r#"
+//- /lib.rs
+mod $0 {
+
+}
+//- /foo.rs
+fn foo() {}
+"#,
+ expect![[r#""#]],
+ );
+ }
+
+ #[test]
+ fn main_module_completion() {
+ check(
+ r#"
+//- /main.rs
+mod $0
+//- /foo.rs
+fn foo() {}
+//- /foo/ignored_foo.rs
+fn ignored_foo() {}
+//- /bar/mod.rs
+fn bar() {}
+//- /bar/ignored_bar.rs
+fn ignored_bar() {}
+"#,
+ expect![[r#"
+ md bar;
+ md foo;
+ "#]],
+ );
+ }
+
+ #[test]
+ fn main_test_module_completion() {
+ check(
+ r#"
+//- /main.rs
+mod tests {
+ mod $0;
+}
+//- /tests/foo.rs
+fn foo() {}
+"#,
+ expect![[r#"
+ md foo
+ "#]],
+ );
+ }
+
+ #[test]
+ fn directly_nested_module_completion() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+//- /foo.rs
+mod $0;
+//- /foo/bar.rs
+fn bar() {}
+//- /foo/bar/ignored_bar.rs
+fn ignored_bar() {}
+//- /foo/baz/mod.rs
+fn baz() {}
+//- /foo/moar/ignored_moar.rs
+fn ignored_moar() {}
+"#,
+ expect![[r#"
+ md bar
+ md baz
+ "#]],
+ );
+ }
+
+ #[test]
+ fn nested_in_source_module_completion() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+//- /foo.rs
+mod bar {
+ mod $0
+}
+//- /foo/bar/baz.rs
+fn baz() {}
+"#,
+ expect![[r#"
+ md baz;
+ "#]],
+ );
+ }
+
+ // FIXME binary modules are not supported in tests properly
+ // Binary modules are a bit special, they allow importing the modules from `/src/bin`
+ // and that's why are good to test two things:
+ // * no cycles are allowed in mod declarations
+ // * no modules from the parent directory are proposed
+ // Unfortunately, binary modules support is in cargo not rustc,
+ // hence the test does not work now
+ //
+ // #[test]
+ // fn regular_bin_module_completion() {
+ // check(
+ // r#"
+ // //- /src/bin.rs
+ // fn main() {}
+ // //- /src/bin/foo.rs
+ // mod $0
+ // //- /src/bin/bar.rs
+ // fn bar() {}
+ // //- /src/bin/bar/bar_ignored.rs
+ // fn bar_ignored() {}
+ // "#,
+ // expect![[r#"
+ // md bar;
+ // "#]],foo
+ // );
+ // }
+
+ #[test]
+ fn already_declared_bin_module_completion_omitted() {
+ check(
+ r#"
+//- /src/bin.rs crate:main
+fn main() {}
+//- /src/bin/foo.rs
+mod $0
+//- /src/bin/bar.rs
+mod foo;
+fn bar() {}
+//- /src/bin/bar/bar_ignored.rs
+fn bar_ignored() {}
+"#,
+ expect![[r#""#]],
+ );
+ }
+
+ #[test]
+ fn name_partially_typed() {
+ check(
+ r#"
+//- /lib.rs
+mod f$0
+//- /foo.rs
+fn foo() {}
+//- /foo/ignored_foo.rs
+fn ignored_foo() {}
+//- /bar/mod.rs
+fn bar() {}
+//- /bar/ignored_bar.rs
+fn ignored_bar() {}
+"#,
+ expect![[r#"
+ md bar;
+ md foo;
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs
new file mode 100644
index 000000000..71d2d9d43
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs
@@ -0,0 +1,185 @@
+//! Completes constants and paths in unqualified patterns.
+
+use hir::{db::DefDatabase, AssocItem, ScopeDef};
+use syntax::ast::Pat;
+
+use crate::{
+ context::{PathCompletionCtx, PatternContext, PatternRefutability, Qualified},
+ CompletionContext, Completions,
+};
+
+/// Completes constants and paths in unqualified patterns.
+pub(crate) fn complete_pattern(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ pattern_ctx: &PatternContext,
+) {
+ match pattern_ctx.parent_pat.as_ref() {
+ Some(Pat::RangePat(_) | Pat::BoxPat(_)) => (),
+ Some(Pat::RefPat(r)) => {
+ if r.mut_token().is_none() {
+ acc.add_keyword(ctx, "mut");
+ }
+ }
+ _ => {
+ let tok = ctx.token.text_range().start();
+ match (pattern_ctx.ref_token.as_ref(), pattern_ctx.mut_token.as_ref()) {
+ (None, None) => {
+ acc.add_keyword(ctx, "ref");
+ acc.add_keyword(ctx, "mut");
+ }
+ (None, Some(m)) if tok < m.text_range().start() => {
+ acc.add_keyword(ctx, "ref");
+ }
+ (Some(r), None) if tok > r.text_range().end() => {
+ acc.add_keyword(ctx, "mut");
+ }
+ _ => (),
+ }
+ }
+ }
+
+ if pattern_ctx.record_pat.is_some() {
+ return;
+ }
+
+ let refutable = pattern_ctx.refutability == PatternRefutability::Refutable;
+ let single_variant_enum = |enum_: hir::Enum| ctx.db.enum_data(enum_.into()).variants.len() == 1;
+
+ if let Some(hir::Adt::Enum(e)) =
+ ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt())
+ {
+ if refutable || single_variant_enum(e) {
+ super::enum_variants_with_paths(
+ acc,
+ ctx,
+ e,
+ &pattern_ctx.impl_,
+ |acc, ctx, variant, path| {
+ acc.add_qualified_variant_pat(ctx, pattern_ctx, variant, path);
+ },
+ );
+ }
+ }
+
+ // FIXME: ideally, we should look at the type we are matching against and
+ // suggest variants + auto-imports
+ ctx.process_all_names(&mut |name, res| {
+ let add_simple_path = match res {
+ hir::ScopeDef::ModuleDef(def) => match def {
+ hir::ModuleDef::Adt(hir::Adt::Struct(strukt)) => {
+ acc.add_struct_pat(ctx, pattern_ctx, strukt, Some(name.clone()));
+ true
+ }
+ hir::ModuleDef::Variant(variant)
+ if refutable || single_variant_enum(variant.parent_enum(ctx.db)) =>
+ {
+ acc.add_variant_pat(ctx, pattern_ctx, None, variant, Some(name.clone()));
+ true
+ }
+ hir::ModuleDef::Adt(hir::Adt::Enum(e)) => refutable || single_variant_enum(e),
+ hir::ModuleDef::Const(..) => refutable,
+ hir::ModuleDef::Module(..) => true,
+ hir::ModuleDef::Macro(mac) => mac.is_fn_like(ctx.db),
+ _ => false,
+ },
+ hir::ScopeDef::ImplSelfType(impl_) => match impl_.self_ty(ctx.db).as_adt() {
+ Some(hir::Adt::Struct(strukt)) => {
+ acc.add_struct_pat(ctx, pattern_ctx, strukt, Some(name.clone()));
+ true
+ }
+ Some(hir::Adt::Enum(e)) => refutable || single_variant_enum(e),
+ Some(hir::Adt::Union(_)) => true,
+ _ => false,
+ },
+ ScopeDef::GenericParam(hir::GenericParam::ConstParam(_)) => true,
+ ScopeDef::GenericParam(_)
+ | ScopeDef::AdtSelfType(_)
+ | ScopeDef::Local(_)
+ | ScopeDef::Label(_)
+ | ScopeDef::Unknown => false,
+ };
+ if add_simple_path {
+ acc.add_pattern_resolution(ctx, pattern_ctx, name, res);
+ }
+ });
+}
+
+pub(crate) fn complete_pattern_path(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+) {
+ match qualified {
+ Qualified::With { resolution: Some(resolution), super_chain_len, .. } => {
+ acc.add_super_keyword(ctx, *super_chain_len);
+
+ match resolution {
+ hir::PathResolution::Def(hir::ModuleDef::Module(module)) => {
+ let module_scope = module.scope(ctx.db, Some(ctx.module));
+ for (name, def) in module_scope {
+ let add_resolution = match def {
+ ScopeDef::ModuleDef(hir::ModuleDef::Macro(mac)) => {
+ mac.is_fn_like(ctx.db)
+ }
+ ScopeDef::ModuleDef(_) => true,
+ _ => false,
+ };
+
+ if add_resolution {
+ acc.add_path_resolution(ctx, path_ctx, name, def);
+ }
+ }
+ }
+ res => {
+ let ty = match res {
+ hir::PathResolution::TypeParam(param) => param.ty(ctx.db),
+ hir::PathResolution::SelfType(impl_def) => impl_def.self_ty(ctx.db),
+ hir::PathResolution::Def(hir::ModuleDef::Adt(hir::Adt::Struct(s))) => {
+ s.ty(ctx.db)
+ }
+ hir::PathResolution::Def(hir::ModuleDef::Adt(hir::Adt::Enum(e))) => {
+ e.ty(ctx.db)
+ }
+ hir::PathResolution::Def(hir::ModuleDef::Adt(hir::Adt::Union(u))) => {
+ u.ty(ctx.db)
+ }
+ hir::PathResolution::Def(hir::ModuleDef::BuiltinType(ty)) => ty.ty(ctx.db),
+ _ => return,
+ };
+
+ if let Some(hir::Adt::Enum(e)) = ty.as_adt() {
+ acc.add_enum_variants(ctx, path_ctx, e);
+ }
+
+ ctx.iterate_path_candidates(&ty, |item| match item {
+ AssocItem::TypeAlias(ta) => acc.add_type_alias(ctx, ta),
+ AssocItem::Const(c) => acc.add_const(ctx, c),
+ _ => {}
+ });
+ }
+ }
+ }
+ Qualified::Absolute => acc.add_crate_roots(ctx, path_ctx),
+ Qualified::No => {
+ // this will only be hit if there are brackets or braces, otherwise this will be parsed as an ident pattern
+ ctx.process_all_names(&mut |name, res| {
+ // FIXME: we should check what kind of pattern we are in and filter accordingly
+ let add_completion = match res {
+ ScopeDef::ModuleDef(hir::ModuleDef::Macro(mac)) => mac.is_fn_like(ctx.db),
+ ScopeDef::ModuleDef(hir::ModuleDef::Adt(_)) => true,
+ ScopeDef::ModuleDef(hir::ModuleDef::Variant(_)) => true,
+ ScopeDef::ModuleDef(hir::ModuleDef::Module(_)) => true,
+ ScopeDef::ImplSelfType(_) => true,
+ _ => false,
+ };
+ if add_completion {
+ acc.add_path_resolution(ctx, path_ctx, name, res);
+ }
+ });
+
+ acc.add_nameref_keywords_with_colon(ctx);
+ }
+ Qualified::TypeAnchor { .. } | Qualified::With { .. } => {}
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
new file mode 100644
index 000000000..9a891cea2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
@@ -0,0 +1,616 @@
+//! Postfix completions, like `Ok(10).ifl$0` => `if let Ok() = Ok(10) { $0 }`.
+
+mod format_like;
+
+use hir::{Documentation, HasAttrs};
+use ide_db::{imports::insert_use::ImportScope, ty_filter::TryEnum, SnippetCap};
+use syntax::{
+ ast::{self, AstNode, AstToken},
+ SyntaxKind::{EXPR_STMT, STMT_LIST},
+ TextRange, TextSize,
+};
+use text_edit::TextEdit;
+
+use crate::{
+ completions::postfix::format_like::add_format_like_completions,
+ context::{CompletionContext, DotAccess, DotAccessKind},
+ item::{Builder, CompletionRelevancePostfixMatch},
+ CompletionItem, CompletionItemKind, CompletionRelevance, Completions, SnippetScope,
+};
+
+pub(crate) fn complete_postfix(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ dot_access: &DotAccess,
+) {
+ if !ctx.config.enable_postfix_completions {
+ return;
+ }
+
+ let (dot_receiver, receiver_ty, receiver_is_ambiguous_float_literal) = match dot_access {
+ DotAccess { receiver_ty: Some(ty), receiver: Some(it), kind, .. } => (
+ it,
+ &ty.original,
+ match *kind {
+ DotAccessKind::Field { receiver_is_ambiguous_float_literal } => {
+ receiver_is_ambiguous_float_literal
+ }
+ DotAccessKind::Method { .. } => false,
+ },
+ ),
+ _ => return,
+ };
+
+ let receiver_text = get_receiver_text(dot_receiver, receiver_is_ambiguous_float_literal);
+
+ let cap = match ctx.config.snippet_cap {
+ Some(it) => it,
+ None => return,
+ };
+
+ let postfix_snippet = match build_postfix_snippet_builder(ctx, cap, dot_receiver) {
+ Some(it) => it,
+ None => return,
+ };
+
+ if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() {
+ if receiver_ty.impls_trait(ctx.db, drop_trait, &[]) {
+ if let &[hir::AssocItem::Function(drop_fn)] = &*drop_trait.items(ctx.db) {
+ cov_mark::hit!(postfix_drop_completion);
+ // FIXME: check that `drop` is in scope, use fully qualified path if it isn't/if shadowed
+ let mut item = postfix_snippet(
+ "drop",
+ "fn drop(&mut self)",
+ &format!("drop($0{})", receiver_text),
+ );
+ item.set_documentation(drop_fn.docs(ctx.db));
+ item.add_to(acc);
+ }
+ }
+ }
+
+ if !ctx.config.snippets.is_empty() {
+ add_custom_postfix_completions(acc, ctx, &postfix_snippet, &receiver_text);
+ }
+
+ let try_enum = TryEnum::from_ty(&ctx.sema, &receiver_ty.strip_references());
+ if let Some(try_enum) = &try_enum {
+ match try_enum {
+ TryEnum::Result => {
+ postfix_snippet(
+ "ifl",
+ "if let Ok {}",
+ &format!("if let Ok($1) = {} {{\n $0\n}}", receiver_text),
+ )
+ .add_to(acc);
+
+ postfix_snippet(
+ "while",
+ "while let Ok {}",
+ &format!("while let Ok($1) = {} {{\n $0\n}}", receiver_text),
+ )
+ .add_to(acc);
+ }
+ TryEnum::Option => {
+ postfix_snippet(
+ "ifl",
+ "if let Some {}",
+ &format!("if let Some($1) = {} {{\n $0\n}}", receiver_text),
+ )
+ .add_to(acc);
+
+ postfix_snippet(
+ "while",
+ "while let Some {}",
+ &format!("while let Some($1) = {} {{\n $0\n}}", receiver_text),
+ )
+ .add_to(acc);
+ }
+ }
+ } else if receiver_ty.is_bool() || receiver_ty.is_unknown() {
+ postfix_snippet("if", "if expr {}", &format!("if {} {{\n $0\n}}", receiver_text))
+ .add_to(acc);
+ postfix_snippet(
+ "while",
+ "while expr {}",
+ &format!("while {} {{\n $0\n}}", receiver_text),
+ )
+ .add_to(acc);
+ postfix_snippet("not", "!expr", &format!("!{}", receiver_text)).add_to(acc);
+ } else if let Some(trait_) = ctx.famous_defs().core_iter_IntoIterator() {
+ if receiver_ty.impls_trait(ctx.db, trait_, &[]) {
+ postfix_snippet(
+ "for",
+ "for ele in expr {}",
+ &format!("for ele in {} {{\n $0\n}}", receiver_text),
+ )
+ .add_to(acc);
+ }
+ }
+
+ postfix_snippet("ref", "&expr", &format!("&{}", receiver_text)).add_to(acc);
+ postfix_snippet("refm", "&mut expr", &format!("&mut {}", receiver_text)).add_to(acc);
+
+ // The rest of the postfix completions create an expression that moves an argument,
+ // so it's better to consider references now to avoid breaking the compilation
+ let dot_receiver = include_references(dot_receiver);
+ let receiver_text = get_receiver_text(&dot_receiver, receiver_is_ambiguous_float_literal);
+ let postfix_snippet = match build_postfix_snippet_builder(ctx, cap, &dot_receiver) {
+ Some(it) => it,
+ None => return,
+ };
+
+ match try_enum {
+ Some(try_enum) => match try_enum {
+ TryEnum::Result => {
+ postfix_snippet(
+ "match",
+ "match expr {}",
+ &format!("match {} {{\n Ok(${{1:_}}) => {{$2}},\n Err(${{3:_}}) => {{$0}},\n}}", receiver_text),
+ )
+ .add_to(acc);
+ }
+ TryEnum::Option => {
+ postfix_snippet(
+ "match",
+ "match expr {}",
+ &format!(
+ "match {} {{\n Some(${{1:_}}) => {{$2}},\n None => {{$0}},\n}}",
+ receiver_text
+ ),
+ )
+ .add_to(acc);
+ }
+ },
+ None => {
+ postfix_snippet(
+ "match",
+ "match expr {}",
+ &format!("match {} {{\n ${{1:_}} => {{$0}},\n}}", receiver_text),
+ )
+ .add_to(acc);
+ }
+ }
+
+ postfix_snippet("box", "Box::new(expr)", &format!("Box::new({})", receiver_text)).add_to(acc);
+ postfix_snippet("dbg", "dbg!(expr)", &format!("dbg!({})", receiver_text)).add_to(acc); // fixme
+ postfix_snippet("dbgr", "dbg!(&expr)", &format!("dbg!(&{})", receiver_text)).add_to(acc);
+ postfix_snippet("call", "function(expr)", &format!("${{1}}({})", receiver_text)).add_to(acc);
+
+ if let Some(parent) = dot_receiver.syntax().parent().and_then(|p| p.parent()) {
+ if matches!(parent.kind(), STMT_LIST | EXPR_STMT) {
+ postfix_snippet("let", "let", &format!("let $0 = {};", receiver_text)).add_to(acc);
+ postfix_snippet("letm", "let mut", &format!("let mut $0 = {};", receiver_text))
+ .add_to(acc);
+ }
+ }
+
+ if let ast::Expr::Literal(literal) = dot_receiver.clone() {
+ if let Some(literal_text) = ast::String::cast(literal.token()) {
+ add_format_like_completions(acc, ctx, &dot_receiver, cap, &literal_text);
+ }
+ }
+}
+
+fn get_receiver_text(receiver: &ast::Expr, receiver_is_ambiguous_float_literal: bool) -> String {
+ let text = if receiver_is_ambiguous_float_literal {
+ let text = receiver.syntax().text();
+ let without_dot = ..text.len() - TextSize::of('.');
+ text.slice(without_dot).to_string()
+ } else {
+ receiver.to_string()
+ };
+
+ // The receiver texts should be interpreted as-is, as they are expected to be
+ // normal Rust expressions. We escape '\' and '$' so they don't get treated as
+ // snippet-specific constructs.
+ //
+ // Note that we don't need to escape the other characters that can be escaped,
+ // because they wouldn't be treated as snippet-specific constructs without '$'.
+ text.replace('\\', "\\\\").replace('$', "\\$")
+}
+
+fn include_references(initial_element: &ast::Expr) -> ast::Expr {
+ let mut resulting_element = initial_element.clone();
+ while let Some(parent_ref_element) =
+ resulting_element.syntax().parent().and_then(ast::RefExpr::cast)
+ {
+ resulting_element = ast::Expr::from(parent_ref_element);
+ }
+ resulting_element
+}
+
+fn build_postfix_snippet_builder<'ctx>(
+ ctx: &'ctx CompletionContext<'_>,
+ cap: SnippetCap,
+ receiver: &'ctx ast::Expr,
+) -> Option<impl Fn(&str, &str, &str) -> Builder + 'ctx> {
+ let receiver_syntax = receiver.syntax();
+ let receiver_range = ctx.sema.original_range_opt(receiver_syntax)?.range;
+ if ctx.source_range().end() < receiver_range.start() {
+ // This shouldn't happen, yet it does. I assume this might be due to an incorrect token mapping.
+ return None;
+ }
+ let delete_range = TextRange::new(receiver_range.start(), ctx.source_range().end());
+
+ // Wrapping impl Fn in an option ruins lifetime inference for the parameters in a way that
+ // can't be annotated for the closure, hence fix it by constructing it without the Option first
+ fn build<'ctx>(
+ ctx: &'ctx CompletionContext<'_>,
+ cap: SnippetCap,
+ delete_range: TextRange,
+ ) -> impl Fn(&str, &str, &str) -> Builder + 'ctx {
+ move |label, detail, snippet| {
+ let edit = TextEdit::replace(delete_range, snippet.to_string());
+ let mut item =
+ CompletionItem::new(CompletionItemKind::Snippet, ctx.source_range(), label);
+ item.detail(detail).snippet_edit(cap, edit);
+ let postfix_match = if ctx.original_token.text() == label {
+ cov_mark::hit!(postfix_exact_match_is_high_priority);
+ Some(CompletionRelevancePostfixMatch::Exact)
+ } else {
+ cov_mark::hit!(postfix_inexact_match_is_low_priority);
+ Some(CompletionRelevancePostfixMatch::NonExact)
+ };
+ let relevance = CompletionRelevance { postfix_match, ..Default::default() };
+ item.set_relevance(relevance);
+ item
+ }
+ }
+ Some(build(ctx, cap, delete_range))
+}
+
+fn add_custom_postfix_completions(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ postfix_snippet: impl Fn(&str, &str, &str) -> Builder,
+ receiver_text: &str,
+) -> Option<()> {
+ if ImportScope::find_insert_use_container(&ctx.token.parent()?, &ctx.sema).is_none() {
+ return None;
+ }
+ ctx.config.postfix_snippets().filter(|(_, snip)| snip.scope == SnippetScope::Expr).for_each(
+ |(trigger, snippet)| {
+ let imports = match snippet.imports(ctx) {
+ Some(imports) => imports,
+ None => return,
+ };
+ let body = snippet.postfix_snippet(receiver_text);
+ let mut builder =
+ postfix_snippet(trigger, snippet.description.as_deref().unwrap_or_default(), &body);
+ builder.documentation(Documentation::new(format!("```rust\n{}\n```", body)));
+ for import in imports.into_iter() {
+ builder.add_import(import);
+ }
+ builder.add_to(acc);
+ },
+ );
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::{
+ tests::{check_edit, check_edit_with_config, completion_list, TEST_CONFIG},
+ CompletionConfig, Snippet,
+ };
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual)
+ }
+
+ #[test]
+ fn postfix_completion_works_for_trivial_path_expression() {
+ check(
+ r#"
+fn main() {
+ let bar = true;
+ bar.$0
+}
+"#,
+ expect![[r#"
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn if if expr {}
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn not !expr
+ sn ref &expr
+ sn refm &mut expr
+ sn while while expr {}
+ "#]],
+ );
+ }
+
+ #[test]
+ fn postfix_completion_works_for_function_calln() {
+ check(
+ r#"
+fn foo(elt: bool) -> bool {
+ !elt
+}
+
+fn main() {
+ let bar = true;
+ foo(bar.$0)
+}
+"#,
+ expect![[r#"
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn if if expr {}
+ sn match match expr {}
+ sn not !expr
+ sn ref &expr
+ sn refm &mut expr
+ sn while while expr {}
+ "#]],
+ );
+ }
+
+ #[test]
+ fn postfix_type_filtering() {
+ check(
+ r#"
+fn main() {
+ let bar: u8 = 12;
+ bar.$0
+}
+"#,
+ expect![[r#"
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ "#]],
+ )
+ }
+
+ #[test]
+ fn let_middle_block() {
+ check(
+ r#"
+fn main() {
+ baz.l$0
+ res
+}
+"#,
+ expect![[r#"
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn if if expr {}
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn not !expr
+ sn ref &expr
+ sn refm &mut expr
+ sn while while expr {}
+ "#]],
+ );
+ }
+
+ #[test]
+ fn option_iflet() {
+ check_edit(
+ "ifl",
+ r#"
+//- minicore: option
+fn main() {
+ let bar = Some(true);
+ bar.$0
+}
+"#,
+ r#"
+fn main() {
+ let bar = Some(true);
+ if let Some($1) = bar {
+ $0
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn result_match() {
+ check_edit(
+ "match",
+ r#"
+//- minicore: result
+fn main() {
+ let bar = Ok(true);
+ bar.$0
+}
+"#,
+ r#"
+fn main() {
+ let bar = Ok(true);
+ match bar {
+ Ok(${1:_}) => {$2},
+ Err(${3:_}) => {$0},
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn postfix_completion_works_for_ambiguous_float_literal() {
+ check_edit("refm", r#"fn main() { 42.$0 }"#, r#"fn main() { &mut 42 }"#)
+ }
+
+ #[test]
+ fn works_in_simple_macro() {
+ check_edit(
+ "dbg",
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+fn main() {
+ let bar: u8 = 12;
+ m!(bar.d$0)
+}
+"#,
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+fn main() {
+ let bar: u8 = 12;
+ m!(dbg!(bar))
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn postfix_completion_for_references() {
+ check_edit("dbg", r#"fn main() { &&42.$0 }"#, r#"fn main() { dbg!(&&42) }"#);
+ check_edit("refm", r#"fn main() { &&42.$0 }"#, r#"fn main() { &&&mut 42 }"#);
+ check_edit(
+ "ifl",
+ r#"
+//- minicore: option
+fn main() {
+ let bar = &Some(true);
+ bar.$0
+}
+"#,
+ r#"
+fn main() {
+ let bar = &Some(true);
+ if let Some($1) = bar {
+ $0
+}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn custom_postfix_completion() {
+ let config = CompletionConfig {
+ snippets: vec![Snippet::new(
+ &[],
+ &["break".into()],
+ &["ControlFlow::Break(${receiver})".into()],
+ "",
+ &["core::ops::ControlFlow".into()],
+ crate::SnippetScope::Expr,
+ )
+ .unwrap()],
+ ..TEST_CONFIG
+ };
+
+ check_edit_with_config(
+ config.clone(),
+ "break",
+ r#"
+//- minicore: try
+fn main() { 42.$0 }
+"#,
+ r#"
+use core::ops::ControlFlow;
+
+fn main() { ControlFlow::Break(42) }
+"#,
+ );
+
+ // The receiver texts should be escaped, see comments in `get_receiver_text()`
+ // for detail.
+ //
+ // Note that the last argument is what *lsp clients would see* rather than
+ // what users would see. Unescaping happens thereafter.
+ check_edit_with_config(
+ config.clone(),
+ "break",
+ r#"
+//- minicore: try
+fn main() { '\\'.$0 }
+"#,
+ r#"
+use core::ops::ControlFlow;
+
+fn main() { ControlFlow::Break('\\\\') }
+"#,
+ );
+
+ check_edit_with_config(
+ config.clone(),
+ "break",
+ r#"
+//- minicore: try
+fn main() {
+ match true {
+ true => "${1:placeholder}",
+ false => "\$",
+ }.$0
+}
+"#,
+ r#"
+use core::ops::ControlFlow;
+
+fn main() {
+ ControlFlow::Break(match true {
+ true => "\${1:placeholder}",
+ false => "\\\$",
+ })
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn postfix_completion_for_format_like_strings() {
+ check_edit(
+ "format",
+ r#"fn main() { "{some_var:?}".$0 }"#,
+ r#"fn main() { format!("{:?}", some_var) }"#,
+ );
+ check_edit(
+ "panic",
+ r#"fn main() { "Panic with {a}".$0 }"#,
+ r#"fn main() { panic!("Panic with {}", a) }"#,
+ );
+ check_edit(
+ "println",
+ r#"fn main() { "{ 2+2 } { SomeStruct { val: 1, other: 32 } :?}".$0 }"#,
+ r#"fn main() { println!("{} {:?}", 2+2, SomeStruct { val: 1, other: 32 }) }"#,
+ );
+ check_edit(
+ "loge",
+ r#"fn main() { "{2+2}".$0 }"#,
+ r#"fn main() { log::error!("{}", 2+2) }"#,
+ );
+ check_edit(
+ "logt",
+ r#"fn main() { "{2+2}".$0 }"#,
+ r#"fn main() { log::trace!("{}", 2+2) }"#,
+ );
+ check_edit(
+ "logd",
+ r#"fn main() { "{2+2}".$0 }"#,
+ r#"fn main() { log::debug!("{}", 2+2) }"#,
+ );
+ check_edit("logi", r#"fn main() { "{2+2}".$0 }"#, r#"fn main() { log::info!("{}", 2+2) }"#);
+ check_edit("logw", r#"fn main() { "{2+2}".$0 }"#, r#"fn main() { log::warn!("{}", 2+2) }"#);
+ check_edit(
+ "loge",
+ r#"fn main() { "{2+2}".$0 }"#,
+ r#"fn main() { log::error!("{}", 2+2) }"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs
new file mode 100644
index 000000000..6b94347e0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs
@@ -0,0 +1,311 @@
+// Feature: Format String Completion
+//
+// `"Result {result} is {2 + 2}"` is expanded to the `"Result {} is {}", result, 2 + 2`.
+//
+// The following postfix snippets are available:
+//
+// * `format` -> `format!(...)`
+// * `panic` -> `panic!(...)`
+// * `println` -> `println!(...)`
+// * `log`:
+// ** `logd` -> `log::debug!(...)`
+// ** `logt` -> `log::trace!(...)`
+// ** `logi` -> `log::info!(...)`
+// ** `logw` -> `log::warn!(...)`
+// ** `loge` -> `log::error!(...)`
+//
+// image::https://user-images.githubusercontent.com/48062697/113020656-b560f500-917a-11eb-87de-02991f61beb8.gif[]
+
+use ide_db::SnippetCap;
+use syntax::ast::{self, AstToken};
+
+use crate::{
+ completions::postfix::build_postfix_snippet_builder, context::CompletionContext, Completions,
+};
+
+/// Mapping ("postfix completion item" => "macro to use")
+static KINDS: &[(&str, &str)] = &[
+ ("format", "format!"),
+ ("panic", "panic!"),
+ ("println", "println!"),
+ ("eprintln", "eprintln!"),
+ ("logd", "log::debug!"),
+ ("logt", "log::trace!"),
+ ("logi", "log::info!"),
+ ("logw", "log::warn!"),
+ ("loge", "log::error!"),
+];
+
+pub(crate) fn add_format_like_completions(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ dot_receiver: &ast::Expr,
+ cap: SnippetCap,
+ receiver_text: &ast::String,
+) {
+ let input = match string_literal_contents(receiver_text) {
+ // It's not a string literal, do not parse input.
+ Some(input) => input,
+ None => return,
+ };
+
+ let postfix_snippet = match build_postfix_snippet_builder(ctx, cap, dot_receiver) {
+ Some(it) => it,
+ None => return,
+ };
+ let mut parser = FormatStrParser::new(input);
+
+ if parser.parse().is_ok() {
+ for (label, macro_name) in KINDS {
+ let snippet = parser.to_suggestion(macro_name);
+
+ postfix_snippet(label, macro_name, &snippet).add_to(acc);
+ }
+ }
+}
+
+/// Checks whether provided item is a string literal.
+fn string_literal_contents(item: &ast::String) -> Option<String> {
+ let item = item.text();
+ if item.len() >= 2 && item.starts_with('\"') && item.ends_with('\"') {
+ return Some(item[1..item.len() - 1].to_owned());
+ }
+
+ None
+}
+
+/// Parser for a format-like string. It is more allowing in terms of string contents,
+/// as we expect variable placeholders to be filled with expressions.
+#[derive(Debug)]
+pub(crate) struct FormatStrParser {
+ input: String,
+ output: String,
+ extracted_expressions: Vec<String>,
+ state: State,
+ parsed: bool,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq)]
+enum State {
+ NotExpr,
+ MaybeExpr,
+ Expr,
+ MaybeIncorrect,
+ FormatOpts,
+}
+
+impl FormatStrParser {
+ pub(crate) fn new(input: String) -> Self {
+ Self {
+ input,
+ output: String::new(),
+ extracted_expressions: Vec::new(),
+ state: State::NotExpr,
+ parsed: false,
+ }
+ }
+
+ pub(crate) fn parse(&mut self) -> Result<(), ()> {
+ let mut current_expr = String::new();
+
+ let mut placeholder_id = 1;
+
+ // Count of open braces inside of an expression.
+ // We assume that user knows what they're doing, thus we treat it like a correct pattern, e.g.
+ // "{MyStruct { val_a: 0, val_b: 1 }}".
+ let mut inexpr_open_count = 0;
+
+ // We need to escape '\' and '$'. See the comments on `get_receiver_text()` for detail.
+ let mut chars = self.input.chars().peekable();
+ while let Some(chr) = chars.next() {
+ match (self.state, chr) {
+ (State::NotExpr, '{') => {
+ self.output.push(chr);
+ self.state = State::MaybeExpr;
+ }
+ (State::NotExpr, '}') => {
+ self.output.push(chr);
+ self.state = State::MaybeIncorrect;
+ }
+ (State::NotExpr, _) => {
+ if matches!(chr, '\\' | '$') {
+ self.output.push('\\');
+ }
+ self.output.push(chr);
+ }
+ (State::MaybeIncorrect, '}') => {
+ // It's okay, we met "}}".
+ self.output.push(chr);
+ self.state = State::NotExpr;
+ }
+ (State::MaybeIncorrect, _) => {
+ // Error in the string.
+ return Err(());
+ }
+ (State::MaybeExpr, '{') => {
+ self.output.push(chr);
+ self.state = State::NotExpr;
+ }
+ (State::MaybeExpr, '}') => {
+ // This is an empty sequence '{}'. Replace it with placeholder.
+ self.output.push(chr);
+ self.extracted_expressions.push(format!("${}", placeholder_id));
+ placeholder_id += 1;
+ self.state = State::NotExpr;
+ }
+ (State::MaybeExpr, _) => {
+ if matches!(chr, '\\' | '$') {
+ current_expr.push('\\');
+ }
+ current_expr.push(chr);
+ self.state = State::Expr;
+ }
+ (State::Expr, '}') => {
+ if inexpr_open_count == 0 {
+ self.output.push(chr);
+ self.extracted_expressions.push(current_expr.trim().into());
+ current_expr = String::new();
+ self.state = State::NotExpr;
+ } else {
+ // We're closing one brace met before inside of the expression.
+ current_expr.push(chr);
+ inexpr_open_count -= 1;
+ }
+ }
+ (State::Expr, ':') if chars.peek().copied() == Some(':') => {
+ // path seperator
+ current_expr.push_str("::");
+ chars.next();
+ }
+ (State::Expr, ':') => {
+ if inexpr_open_count == 0 {
+ // We're outside of braces, thus assume that it's a specifier, like "{Some(value):?}"
+ self.output.push(chr);
+ self.extracted_expressions.push(current_expr.trim().into());
+ current_expr = String::new();
+ self.state = State::FormatOpts;
+ } else {
+ // We're inside of braced expression, assume that it's a struct field name/value delimeter.
+ current_expr.push(chr);
+ }
+ }
+ (State::Expr, '{') => {
+ current_expr.push(chr);
+ inexpr_open_count += 1;
+ }
+ (State::Expr, _) => {
+ if matches!(chr, '\\' | '$') {
+ current_expr.push('\\');
+ }
+ current_expr.push(chr);
+ }
+ (State::FormatOpts, '}') => {
+ self.output.push(chr);
+ self.state = State::NotExpr;
+ }
+ (State::FormatOpts, _) => {
+ if matches!(chr, '\\' | '$') {
+ self.output.push('\\');
+ }
+ self.output.push(chr);
+ }
+ }
+ }
+
+ if self.state != State::NotExpr {
+ return Err(());
+ }
+
+ self.parsed = true;
+ Ok(())
+ }
+
+ pub(crate) fn to_suggestion(&self, macro_name: &str) -> String {
+ assert!(self.parsed, "Attempt to get a suggestion from not parsed expression");
+
+ let expressions_as_string = self.extracted_expressions.join(", ");
+ format!(r#"{}("{}", {})"#, macro_name, self.output, expressions_as_string)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use expect_test::{expect, Expect};
+
+ fn check(input: &str, expect: &Expect) {
+ let mut parser = FormatStrParser::new((*input).to_owned());
+ let outcome_repr = if parser.parse().is_ok() {
+ // Parsing should be OK, expected repr is "string; expr_1, expr_2".
+ if parser.extracted_expressions.is_empty() {
+ parser.output
+ } else {
+ format!("{}; {}", parser.output, parser.extracted_expressions.join(", "))
+ }
+ } else {
+ // Parsing should fail, expected repr is "-".
+ "-".to_owned()
+ };
+
+ expect.assert_eq(&outcome_repr);
+ }
+
+ #[test]
+ fn format_str_parser() {
+ let test_vector = &[
+ ("no expressions", expect![["no expressions"]]),
+ (r"no expressions with \$0$1", expect![r"no expressions with \\\$0\$1"]),
+ ("{expr} is {2 + 2}", expect![["{} is {}; expr, 2 + 2"]]),
+ ("{expr:?}", expect![["{:?}; expr"]]),
+ ("{expr:1$}", expect![[r"{:1\$}; expr"]]),
+ ("{$0}", expect![[r"{}; \$0"]]),
+ ("{malformed", expect![["-"]]),
+ ("malformed}", expect![["-"]]),
+ ("{{correct", expect![["{{correct"]]),
+ ("correct}}", expect![["correct}}"]]),
+ ("{correct}}}", expect![["{}}}; correct"]]),
+ ("{correct}}}}}", expect![["{}}}}}; correct"]]),
+ ("{incorrect}}", expect![["-"]]),
+ ("placeholders {} {}", expect![["placeholders {} {}; $1, $2"]]),
+ ("mixed {} {2 + 2} {}", expect![["mixed {} {} {}; $1, 2 + 2, $2"]]),
+ (
+ "{SomeStruct { val_a: 0, val_b: 1 }}",
+ expect![["{}; SomeStruct { val_a: 0, val_b: 1 }"]],
+ ),
+ ("{expr:?} is {2.32f64:.5}", expect![["{:?} is {:.5}; expr, 2.32f64"]]),
+ (
+ "{SomeStruct { val_a: 0, val_b: 1 }:?}",
+ expect![["{:?}; SomeStruct { val_a: 0, val_b: 1 }"]],
+ ),
+ ("{ 2 + 2 }", expect![["{}; 2 + 2"]]),
+ ("{strsim::jaro_winkle(a)}", expect![["{}; strsim::jaro_winkle(a)"]]),
+ ("{foo::bar::baz()}", expect![["{}; foo::bar::baz()"]]),
+ ("{foo::bar():?}", expect![["{:?}; foo::bar()"]]),
+ ];
+
+ for (input, output) in test_vector {
+ check(input, output)
+ }
+ }
+
+ #[test]
+ fn test_into_suggestion() {
+ let test_vector = &[
+ ("println!", "{}", r#"println!("{}", $1)"#),
+ ("eprintln!", "{}", r#"eprintln!("{}", $1)"#),
+ (
+ "log::info!",
+ "{} {expr} {} {2 + 2}",
+ r#"log::info!("{} {} {} {}", $1, expr, $2, 2 + 2)"#,
+ ),
+ ("format!", "{expr:?}", r#"format!("{:?}", expr)"#),
+ ];
+
+ for (kind, input, output) in test_vector {
+ let mut parser = FormatStrParser::new((*input).to_owned());
+ parser.parse().expect("Parsing must succeed");
+
+ assert_eq!(&parser.to_suggestion(*kind), output);
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs
new file mode 100644
index 000000000..1c9042390
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs
@@ -0,0 +1,369 @@
+//! Complete fields in record literals and patterns.
+use ide_db::SymbolKind;
+use syntax::ast::{self, Expr};
+
+use crate::{
+ context::{DotAccess, DotAccessKind, ExprCtx, PathCompletionCtx, PatternContext, Qualified},
+ CompletionContext, CompletionItem, CompletionItemKind, CompletionRelevance,
+ CompletionRelevancePostfixMatch, Completions,
+};
+
+pub(crate) fn complete_record_pattern_fields(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ pattern_ctx: &PatternContext,
+) {
+ if let PatternContext { record_pat: Some(record_pat), .. } = pattern_ctx {
+ complete_fields(acc, ctx, ctx.sema.record_pattern_missing_fields(record_pat));
+ }
+}
+
+pub(crate) fn complete_record_expr_fields(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ record_expr: &ast::RecordExpr,
+ &dot_prefix: &bool,
+) {
+ let ty = ctx.sema.type_of_expr(&Expr::RecordExpr(record_expr.clone()));
+
+ let missing_fields = match ty.as_ref().and_then(|t| t.original.as_adt()) {
+ Some(hir::Adt::Union(un)) => {
+ // ctx.sema.record_literal_missing_fields will always return
+ // an empty Vec on a union literal. This is normally
+ // reasonable, but here we'd like to present the full list
+ // of fields if the literal is empty.
+ let were_fields_specified =
+ record_expr.record_expr_field_list().and_then(|fl| fl.fields().next()).is_some();
+
+ match were_fields_specified {
+ false => un.fields(ctx.db).into_iter().map(|f| (f, f.ty(ctx.db))).collect(),
+ true => return,
+ }
+ }
+ _ => {
+ let missing_fields = ctx.sema.record_literal_missing_fields(record_expr);
+ add_default_update(acc, ctx, ty, &missing_fields);
+ if dot_prefix {
+ let mut item =
+ CompletionItem::new(CompletionItemKind::Snippet, ctx.source_range(), "..");
+ item.insert_text(".");
+ item.add_to(acc);
+ return;
+ }
+ missing_fields
+ }
+ };
+ complete_fields(acc, ctx, missing_fields);
+}
+
+// FIXME: This should probably be part of complete_path_expr
+pub(crate) fn complete_record_expr_func_update(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ expr_ctx: &ExprCtx,
+) {
+ if !matches!(path_ctx.qualified, Qualified::No) {
+ return;
+ }
+ if let ExprCtx { is_func_update: Some(record_expr), .. } = expr_ctx {
+ let ty = ctx.sema.type_of_expr(&Expr::RecordExpr(record_expr.clone()));
+
+ match ty.as_ref().and_then(|t| t.original.as_adt()) {
+ Some(hir::Adt::Union(_)) => (),
+ _ => {
+ let missing_fields = ctx.sema.record_literal_missing_fields(record_expr);
+ add_default_update(acc, ctx, ty, &missing_fields);
+ }
+ };
+ }
+}
+
+fn add_default_update(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ ty: Option<hir::TypeInfo>,
+ missing_fields: &[(hir::Field, hir::Type)],
+) {
+ let default_trait = ctx.famous_defs().core_default_Default();
+ let impl_default_trait = default_trait
+ .zip(ty.as_ref())
+ .map_or(false, |(default_trait, ty)| ty.original.impls_trait(ctx.db, default_trait, &[]));
+ if impl_default_trait && !missing_fields.is_empty() {
+ // FIXME: This should make use of scope_def like completions so we get all the other goodies
+ let completion_text = "..Default::default()";
+ let mut item = CompletionItem::new(SymbolKind::Field, ctx.source_range(), completion_text);
+ let completion_text =
+ completion_text.strip_prefix(ctx.token.text()).unwrap_or(completion_text);
+ item.insert_text(completion_text).set_relevance(CompletionRelevance {
+ postfix_match: Some(CompletionRelevancePostfixMatch::Exact),
+ ..Default::default()
+ });
+ item.add_to(acc);
+ }
+}
+
+fn complete_fields(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ missing_fields: Vec<(hir::Field, hir::Type)>,
+) {
+ for (field, ty) in missing_fields {
+ acc.add_field(
+ ctx,
+ &DotAccess {
+ receiver: None,
+ receiver_ty: None,
+ kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal: false },
+ },
+ None,
+ field,
+ &ty,
+ );
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_edit;
+
+ #[test]
+ fn literal_struct_completion_edit() {
+ check_edit(
+ "FooDesc {…}",
+ r#"
+struct FooDesc { pub bar: bool }
+
+fn create_foo(foo_desc: &FooDesc) -> () { () }
+
+fn baz() {
+ let foo = create_foo(&$0);
+}
+ "#,
+ r#"
+struct FooDesc { pub bar: bool }
+
+fn create_foo(foo_desc: &FooDesc) -> () { () }
+
+fn baz() {
+ let foo = create_foo(&FooDesc { bar: ${1:()} }$0);
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn literal_struct_impl_self_completion() {
+ check_edit(
+ "Self {…}",
+ r#"
+struct Foo {
+ bar: u64,
+}
+
+impl Foo {
+ fn new() -> Foo {
+ Self$0
+ }
+}
+ "#,
+ r#"
+struct Foo {
+ bar: u64,
+}
+
+impl Foo {
+ fn new() -> Foo {
+ Self { bar: ${1:()} }$0
+ }
+}
+ "#,
+ );
+
+ check_edit(
+ "Self(…)",
+ r#"
+mod submod {
+ pub struct Foo(pub u64);
+}
+
+impl submod::Foo {
+ fn new() -> submod::Foo {
+ Self$0
+ }
+}
+ "#,
+ r#"
+mod submod {
+ pub struct Foo(pub u64);
+}
+
+impl submod::Foo {
+ fn new() -> submod::Foo {
+ Self(${1:()})$0
+ }
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn literal_struct_completion_from_sub_modules() {
+ check_edit(
+ "submod::Struct {…}",
+ r#"
+mod submod {
+ pub struct Struct {
+ pub a: u64,
+ }
+}
+
+fn f() -> submod::Struct {
+ Stru$0
+}
+ "#,
+ r#"
+mod submod {
+ pub struct Struct {
+ pub a: u64,
+ }
+}
+
+fn f() -> submod::Struct {
+ submod::Struct { a: ${1:()} }$0
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn literal_struct_complexion_module() {
+ check_edit(
+ "FooDesc {…}",
+ r#"
+mod _69latrick {
+ pub struct FooDesc { pub six: bool, pub neuf: Vec<String>, pub bar: bool }
+ pub fn create_foo(foo_desc: &FooDesc) -> () { () }
+}
+
+fn baz() {
+ use _69latrick::*;
+
+ let foo = create_foo(&$0);
+}
+ "#,
+ r#"
+mod _69latrick {
+ pub struct FooDesc { pub six: bool, pub neuf: Vec<String>, pub bar: bool }
+ pub fn create_foo(foo_desc: &FooDesc) -> () { () }
+}
+
+fn baz() {
+ use _69latrick::*;
+
+ let foo = create_foo(&FooDesc { six: ${1:()}, neuf: ${2:()}, bar: ${3:()} }$0);
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn default_completion_edit() {
+ check_edit(
+ "..Default::default()",
+ r#"
+//- minicore: default
+struct Struct { foo: u32, bar: usize }
+
+impl Default for Struct {
+ fn default() -> Self {}
+}
+
+fn foo() {
+ let other = Struct {
+ foo: 5,
+ .$0
+ };
+}
+"#,
+ r#"
+struct Struct { foo: u32, bar: usize }
+
+impl Default for Struct {
+ fn default() -> Self {}
+}
+
+fn foo() {
+ let other = Struct {
+ foo: 5,
+ ..Default::default()
+ };
+}
+"#,
+ );
+ check_edit(
+ "..Default::default()",
+ r#"
+//- minicore: default
+struct Struct { foo: u32, bar: usize }
+
+impl Default for Struct {
+ fn default() -> Self {}
+}
+
+fn foo() {
+ let other = Struct {
+ foo: 5,
+ $0
+ };
+}
+"#,
+ r#"
+struct Struct { foo: u32, bar: usize }
+
+impl Default for Struct {
+ fn default() -> Self {}
+}
+
+fn foo() {
+ let other = Struct {
+ foo: 5,
+ ..Default::default()
+ };
+}
+"#,
+ );
+ check_edit(
+ "..Default::default()",
+ r#"
+//- minicore: default
+struct Struct { foo: u32, bar: usize }
+
+impl Default for Struct {
+ fn default() -> Self {}
+}
+
+fn foo() {
+ let other = Struct {
+ foo: 5,
+ ..$0
+ };
+}
+"#,
+ r#"
+struct Struct { foo: u32, bar: usize }
+
+impl Default for Struct {
+ fn default() -> Self {}
+}
+
+fn foo() {
+ let other = Struct {
+ foo: 5,
+ ..Default::default()
+ };
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs
new file mode 100644
index 000000000..66adb4286
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs
@@ -0,0 +1,189 @@
+//! This file provides snippet completions, like `pd` => `eprintln!(...)`.
+
+use hir::Documentation;
+use ide_db::{imports::insert_use::ImportScope, SnippetCap};
+
+use crate::{
+ context::{ExprCtx, ItemListKind, PathCompletionCtx, Qualified},
+ item::Builder,
+ CompletionContext, CompletionItem, CompletionItemKind, Completions, SnippetScope,
+};
+
+pub(crate) fn complete_expr_snippet(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ &ExprCtx { in_block_expr, .. }: &ExprCtx,
+) {
+ if !matches!(path_ctx.qualified, Qualified::No) {
+ return;
+ }
+ if !ctx.qualifier_ctx.none() {
+ return;
+ }
+
+ let cap = match ctx.config.snippet_cap {
+ Some(it) => it,
+ None => return,
+ };
+
+ if !ctx.config.snippets.is_empty() {
+ add_custom_completions(acc, ctx, cap, SnippetScope::Expr);
+ }
+
+ if in_block_expr {
+ snippet(ctx, cap, "pd", "eprintln!(\"$0 = {:?}\", $0);").add_to(acc);
+ snippet(ctx, cap, "ppd", "eprintln!(\"$0 = {:#?}\", $0);").add_to(acc);
+ let item = snippet(
+ ctx,
+ cap,
+ "macro_rules",
+ "\
+macro_rules! $1 {
+ ($2) => {
+ $0
+ };
+}",
+ );
+ item.add_to(acc);
+ }
+}
+
+pub(crate) fn complete_item_snippet(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ kind: &ItemListKind,
+) {
+ if !matches!(path_ctx.qualified, Qualified::No) {
+ return;
+ }
+ if !ctx.qualifier_ctx.none() {
+ return;
+ }
+ let cap = match ctx.config.snippet_cap {
+ Some(it) => it,
+ None => return,
+ };
+
+ if !ctx.config.snippets.is_empty() {
+ add_custom_completions(acc, ctx, cap, SnippetScope::Item);
+ }
+
+ // Test-related snippets shouldn't be shown in blocks.
+ if let ItemListKind::SourceFile | ItemListKind::Module = kind {
+ let mut item = snippet(
+ ctx,
+ cap,
+ "tmod (Test module)",
+ "\
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn ${1:test_name}() {
+ $0
+ }
+}",
+ );
+ item.lookup_by("tmod");
+ item.add_to(acc);
+
+ let mut item = snippet(
+ ctx,
+ cap,
+ "tfn (Test function)",
+ "\
+#[test]
+fn ${1:feature}() {
+ $0
+}",
+ );
+ item.lookup_by("tfn");
+ item.add_to(acc);
+
+ let item = snippet(
+ ctx,
+ cap,
+ "macro_rules",
+ "\
+macro_rules! $1 {
+ ($2) => {
+ $0
+ };
+}",
+ );
+ item.add_to(acc);
+ }
+}
+
+fn snippet(ctx: &CompletionContext<'_>, cap: SnippetCap, label: &str, snippet: &str) -> Builder {
+ let mut item = CompletionItem::new(CompletionItemKind::Snippet, ctx.source_range(), label);
+ item.insert_snippet(cap, snippet);
+ item
+}
+
+fn add_custom_completions(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ cap: SnippetCap,
+ scope: SnippetScope,
+) -> Option<()> {
+ if ImportScope::find_insert_use_container(&ctx.token.parent()?, &ctx.sema).is_none() {
+ return None;
+ }
+ ctx.config.prefix_snippets().filter(|(_, snip)| snip.scope == scope).for_each(
+ |(trigger, snip)| {
+ let imports = match snip.imports(ctx) {
+ Some(imports) => imports,
+ None => return,
+ };
+ let body = snip.snippet();
+ let mut builder = snippet(ctx, cap, trigger, &body);
+ builder.documentation(Documentation::new(format!("```rust\n{}\n```", body)));
+ for import in imports.into_iter() {
+ builder.add_import(import);
+ }
+ builder.set_detail(snip.description.clone());
+ builder.add_to(acc);
+ },
+ );
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{
+ tests::{check_edit_with_config, TEST_CONFIG},
+ CompletionConfig, Snippet,
+ };
+
+ #[test]
+ fn custom_snippet_completion() {
+ check_edit_with_config(
+ CompletionConfig {
+ snippets: vec![Snippet::new(
+ &["break".into()],
+ &[],
+ &["ControlFlow::Break(())".into()],
+ "",
+ &["core::ops::ControlFlow".into()],
+ crate::SnippetScope::Expr,
+ )
+ .unwrap()],
+ ..TEST_CONFIG
+ },
+ "break",
+ r#"
+//- minicore: try
+fn main() { $0 }
+"#,
+ r#"
+use core::ops::ControlFlow;
+
+fn main() { ControlFlow::Break(()) }
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs
new file mode 100644
index 000000000..8f9db2f94
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs
@@ -0,0 +1,246 @@
+//! Completion of names from the current scope in type position.
+
+use hir::{HirDisplay, ScopeDef};
+use syntax::{ast, AstNode, SyntaxKind};
+
+use crate::{
+ context::{PathCompletionCtx, Qualified, TypeAscriptionTarget, TypeLocation},
+ render::render_type_inference,
+ CompletionContext, Completions,
+};
+
+pub(crate) fn complete_type_path(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ location: &TypeLocation,
+) {
+ let _p = profile::span("complete_type_path");
+
+ let scope_def_applicable = |def| {
+ use hir::{GenericParam::*, ModuleDef::*};
+ match def {
+ ScopeDef::GenericParam(LifetimeParam(_)) | ScopeDef::Label(_) => false,
+ // no values in type places
+ ScopeDef::ModuleDef(Function(_) | Variant(_) | Static(_)) | ScopeDef::Local(_) => false,
+ // unless its a constant in a generic arg list position
+ ScopeDef::ModuleDef(Const(_)) | ScopeDef::GenericParam(ConstParam(_)) => {
+ matches!(location, TypeLocation::GenericArgList(_))
+ }
+ ScopeDef::ImplSelfType(_) => {
+ !matches!(location, TypeLocation::ImplTarget | TypeLocation::ImplTrait)
+ }
+ // Don't suggest attribute macros and derives.
+ ScopeDef::ModuleDef(Macro(mac)) => mac.is_fn_like(ctx.db),
+ // Type things are fine
+ ScopeDef::ModuleDef(BuiltinType(_) | Adt(_) | Module(_) | Trait(_) | TypeAlias(_))
+ | ScopeDef::AdtSelfType(_)
+ | ScopeDef::Unknown
+ | ScopeDef::GenericParam(TypeParam(_)) => true,
+ }
+ };
+
+ let add_assoc_item = |acc: &mut Completions, item| match item {
+ hir::AssocItem::Const(ct) if matches!(location, TypeLocation::GenericArgList(_)) => {
+ acc.add_const(ctx, ct)
+ }
+ hir::AssocItem::Function(_) | hir::AssocItem::Const(_) => (),
+ hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty),
+ };
+
+ match qualified {
+ Qualified::TypeAnchor { ty: None, trait_: None } => ctx
+ .traits_in_scope()
+ .iter()
+ .flat_map(|&it| hir::Trait::from(it).items(ctx.sema.db))
+ .for_each(|item| add_assoc_item(acc, item)),
+ Qualified::TypeAnchor { trait_: Some(trait_), .. } => {
+ trait_.items(ctx.sema.db).into_iter().for_each(|item| add_assoc_item(acc, item))
+ }
+ Qualified::TypeAnchor { ty: Some(ty), trait_: None } => {
+ ctx.iterate_path_candidates(&ty, |item| {
+ add_assoc_item(acc, item);
+ });
+
+ // Iterate assoc types separately
+ ty.iterate_assoc_items(ctx.db, ctx.krate, |item| {
+ if let hir::AssocItem::TypeAlias(ty) = item {
+ acc.add_type_alias(ctx, ty)
+ }
+ None::<()>
+ });
+ }
+ Qualified::With { resolution: None, .. } => {}
+ Qualified::With { resolution: Some(resolution), .. } => {
+ // Add associated types on type parameters and `Self`.
+ ctx.scope.assoc_type_shorthand_candidates(resolution, |_, alias| {
+ acc.add_type_alias(ctx, alias);
+ None::<()>
+ });
+
+ match resolution {
+ hir::PathResolution::Def(hir::ModuleDef::Module(module)) => {
+ let module_scope = module.scope(ctx.db, Some(ctx.module));
+ for (name, def) in module_scope {
+ if scope_def_applicable(def) {
+ acc.add_path_resolution(ctx, path_ctx, name, def);
+ }
+ }
+ }
+ hir::PathResolution::Def(
+ def @ (hir::ModuleDef::Adt(_)
+ | hir::ModuleDef::TypeAlias(_)
+ | hir::ModuleDef::BuiltinType(_)),
+ ) => {
+ let ty = match def {
+ hir::ModuleDef::Adt(adt) => adt.ty(ctx.db),
+ hir::ModuleDef::TypeAlias(a) => a.ty(ctx.db),
+ hir::ModuleDef::BuiltinType(builtin) => builtin.ty(ctx.db),
+ _ => return,
+ };
+
+ // XXX: For parity with Rust bug #22519, this does not complete Ty::AssocType.
+ // (where AssocType is defined on a trait, not an inherent impl)
+
+ ctx.iterate_path_candidates(&ty, |item| {
+ add_assoc_item(acc, item);
+ });
+
+ // Iterate assoc types separately
+ ty.iterate_assoc_items(ctx.db, ctx.krate, |item| {
+ if let hir::AssocItem::TypeAlias(ty) = item {
+ acc.add_type_alias(ctx, ty)
+ }
+ None::<()>
+ });
+ }
+ hir::PathResolution::Def(hir::ModuleDef::Trait(t)) => {
+ // Handles `Trait::assoc` as well as `<Ty as Trait>::assoc`.
+ for item in t.items(ctx.db) {
+ add_assoc_item(acc, item);
+ }
+ }
+ hir::PathResolution::TypeParam(_) | hir::PathResolution::SelfType(_) => {
+ let ty = match resolution {
+ hir::PathResolution::TypeParam(param) => param.ty(ctx.db),
+ hir::PathResolution::SelfType(impl_def) => impl_def.self_ty(ctx.db),
+ _ => return,
+ };
+
+ ctx.iterate_path_candidates(&ty, |item| {
+ add_assoc_item(acc, item);
+ });
+ }
+ _ => (),
+ }
+ }
+ Qualified::Absolute => acc.add_crate_roots(ctx, path_ctx),
+ Qualified::No => {
+ match location {
+ TypeLocation::TypeBound => {
+ acc.add_nameref_keywords_with_colon(ctx);
+ ctx.process_all_names(&mut |name, res| {
+ let add_resolution = match res {
+ ScopeDef::ModuleDef(hir::ModuleDef::Macro(mac)) => {
+ mac.is_fn_like(ctx.db)
+ }
+ ScopeDef::ModuleDef(
+ hir::ModuleDef::Trait(_) | hir::ModuleDef::Module(_),
+ ) => true,
+ _ => false,
+ };
+ if add_resolution {
+ acc.add_path_resolution(ctx, path_ctx, name, res);
+ }
+ });
+ return;
+ }
+ TypeLocation::GenericArgList(Some(arg_list)) => {
+ let in_assoc_type_arg = ctx
+ .original_token
+ .parent_ancestors()
+ .any(|node| node.kind() == SyntaxKind::ASSOC_TYPE_ARG);
+
+ if !in_assoc_type_arg {
+ if let Some(path_seg) =
+ arg_list.syntax().parent().and_then(ast::PathSegment::cast)
+ {
+ if path_seg
+ .syntax()
+ .ancestors()
+ .find_map(ast::TypeBound::cast)
+ .is_some()
+ {
+ if let Some(hir::PathResolution::Def(hir::ModuleDef::Trait(
+ trait_,
+ ))) = ctx.sema.resolve_path(&path_seg.parent_path())
+ {
+ let arg_idx = arg_list
+ .generic_args()
+ .filter(|arg| {
+ arg.syntax().text_range().end()
+ < ctx.original_token.text_range().start()
+ })
+ .count();
+
+ let n_required_params =
+ trait_.type_or_const_param_count(ctx.sema.db, true);
+ if arg_idx >= n_required_params {
+ trait_
+ .items_with_supertraits(ctx.sema.db)
+ .into_iter()
+ .for_each(|it| {
+ if let hir::AssocItem::TypeAlias(alias) = it {
+ cov_mark::hit!(
+ complete_assoc_type_in_generics_list
+ );
+ acc.add_type_alias_with_eq(ctx, alias);
+ }
+ });
+
+ let n_params =
+ trait_.type_or_const_param_count(ctx.sema.db, false);
+ if arg_idx >= n_params {
+ return; // only show assoc types
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ _ => {}
+ };
+
+ acc.add_nameref_keywords_with_colon(ctx);
+ ctx.process_all_names(&mut |name, def| {
+ if scope_def_applicable(def) {
+ acc.add_path_resolution(ctx, path_ctx, name, def);
+ }
+ });
+ }
+ }
+}
+
+pub(crate) fn complete_ascribed_type(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ ascription: &TypeAscriptionTarget,
+) -> Option<()> {
+ if !path_ctx.is_trivial_path() {
+ return None;
+ }
+ let x = match ascription {
+ TypeAscriptionTarget::Let(pat) | TypeAscriptionTarget::FnParam(pat) => {
+ ctx.sema.type_of_pat(pat.as_ref()?)
+ }
+ TypeAscriptionTarget::Const(exp) | TypeAscriptionTarget::RetType(exp) => {
+ ctx.sema.type_of_expr(exp.as_ref()?)
+ }
+ }?
+ .adjusted();
+ let ty_string = x.display_source_code(ctx.db, ctx.module.into()).ok()?;
+ acc.add(render_type_inference(ty_string, ctx));
+ None
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs
new file mode 100644
index 000000000..2555c34aa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs
@@ -0,0 +1,120 @@
+//! Completion for use trees
+
+use hir::ScopeDef;
+use ide_db::{FxHashSet, SymbolKind};
+use syntax::{ast, AstNode};
+
+use crate::{
+ context::{CompletionContext, PathCompletionCtx, Qualified},
+ item::Builder,
+ CompletionItem, CompletionItemKind, CompletionRelevance, Completions,
+};
+
+pub(crate) fn complete_use_path(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx @ PathCompletionCtx { qualified, use_tree_parent, .. }: &PathCompletionCtx,
+ name_ref: &Option<ast::NameRef>,
+) {
+ match qualified {
+ Qualified::With { path, resolution: Some(resolution), super_chain_len } => {
+ acc.add_super_keyword(ctx, *super_chain_len);
+
+ // only show `self` in a new use-tree when the qualifier doesn't end in self
+ let not_preceded_by_self = *use_tree_parent
+ && !matches!(
+ path.segment().and_then(|it| it.kind()),
+ Some(ast::PathSegmentKind::SelfKw)
+ );
+ if not_preceded_by_self {
+ acc.add_keyword(ctx, "self");
+ }
+
+ let mut already_imported_names = FxHashSet::default();
+ if let Some(list) = ctx.token.parent_ancestors().find_map(ast::UseTreeList::cast) {
+ let use_tree = list.parent_use_tree();
+ if use_tree.path().as_ref() == Some(path) {
+ for tree in list.use_trees().filter(|tree| tree.is_simple_path()) {
+ if let Some(name) = tree.path().and_then(|path| path.as_single_name_ref()) {
+ already_imported_names.insert(name.to_string());
+ }
+ }
+ }
+ }
+
+ match resolution {
+ hir::PathResolution::Def(hir::ModuleDef::Module(module)) => {
+ let module_scope = module.scope(ctx.db, Some(ctx.module));
+ let unknown_is_current = |name: &hir::Name| {
+ matches!(
+ name_ref,
+ Some(name_ref) if name_ref.syntax().text() == name.to_smol_str().as_str()
+ )
+ };
+ for (name, def) in module_scope {
+ let is_name_already_imported = name
+ .as_text()
+ .map_or(false, |text| already_imported_names.contains(text.as_str()));
+
+ let add_resolution = match def {
+ ScopeDef::Unknown if unknown_is_current(&name) => {
+ // for `use self::foo$0`, don't suggest `foo` as a completion
+ cov_mark::hit!(dont_complete_current_use);
+ continue;
+ }
+ ScopeDef::ModuleDef(_) | ScopeDef::Unknown => true,
+ _ => false,
+ };
+
+ if add_resolution {
+ let mut builder = Builder::from_resolution(ctx, path_ctx, name, def);
+ builder.set_relevance(CompletionRelevance {
+ is_name_already_imported,
+ ..Default::default()
+ });
+ acc.add(builder.build());
+ }
+ }
+ }
+ hir::PathResolution::Def(hir::ModuleDef::Adt(hir::Adt::Enum(e))) => {
+ cov_mark::hit!(enum_plain_qualified_use_tree);
+ acc.add_enum_variants(ctx, path_ctx, *e);
+ }
+ _ => {}
+ }
+ }
+ // fresh use tree with leading colon2, only show crate roots
+ Qualified::Absolute => {
+ cov_mark::hit!(use_tree_crate_roots_only);
+ acc.add_crate_roots(ctx, path_ctx);
+ }
+ // only show modules and non-std enum in a fresh UseTree
+ Qualified::No => {
+ cov_mark::hit!(unqualified_path_selected_only);
+ ctx.process_all_names(&mut |name, res| {
+ match res {
+ ScopeDef::ModuleDef(hir::ModuleDef::Module(module)) => {
+ acc.add_module(ctx, path_ctx, module, name);
+ }
+ ScopeDef::ModuleDef(hir::ModuleDef::Adt(hir::Adt::Enum(e))) => {
+ // exclude prelude enum
+ let is_builtin =
+ res.krate(ctx.db).map_or(false, |krate| krate.is_builtin(ctx.db));
+
+ if !is_builtin {
+ let item = CompletionItem::new(
+ CompletionItemKind::SymbolKind(SymbolKind::Enum),
+ ctx.source_range(),
+ format!("{}::", e.name(ctx.db)),
+ );
+ acc.add(item.build());
+ }
+ }
+ _ => {}
+ };
+ });
+ acc.add_nameref_keywords_with_colon(ctx);
+ }
+ Qualified::TypeAnchor { .. } | Qualified::With { resolution: None, .. } => {}
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs
new file mode 100644
index 000000000..5e6cf4bf9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs
@@ -0,0 +1,41 @@
+//! Completion for visibility specifiers.
+
+use crate::{
+ context::{CompletionContext, PathCompletionCtx, Qualified},
+ Completions,
+};
+
+pub(crate) fn complete_vis_path(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ &has_in_token: &bool,
+) {
+ match qualified {
+ Qualified::With {
+ resolution: Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))),
+ super_chain_len,
+ ..
+ } => {
+ // Try completing next child module of the path that is still a parent of the current module
+ let next_towards_current =
+ ctx.module.path_to_root(ctx.db).into_iter().take_while(|it| it != module).last();
+ if let Some(next) = next_towards_current {
+ if let Some(name) = next.name(ctx.db) {
+ cov_mark::hit!(visibility_qualified);
+ acc.add_module(ctx, path_ctx, next, name);
+ }
+ }
+
+ acc.add_super_keyword(ctx, *super_chain_len);
+ }
+ Qualified::Absolute | Qualified::TypeAnchor { .. } | Qualified::With { .. } => {}
+ Qualified::No => {
+ if !has_in_token {
+ cov_mark::hit!(kw_completion_in);
+ acc.add_keyword(ctx, "in");
+ }
+ acc.add_nameref_keywords(ctx);
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
new file mode 100644
index 000000000..80d6af281
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
@@ -0,0 +1,41 @@
+//! Settings for tweaking completion.
+//!
+//! The fun thing here is `SnippetCap` -- this type can only be created in this
+//! module, and we use to statically check that we only produce snippet
+//! completions if we are allowed to.
+
+use ide_db::{imports::insert_use::InsertUseConfig, SnippetCap};
+
+use crate::snippet::Snippet;
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct CompletionConfig {
+ pub enable_postfix_completions: bool,
+ pub enable_imports_on_the_fly: bool,
+ pub enable_self_on_the_fly: bool,
+ pub enable_private_editable: bool,
+ pub callable: Option<CallableSnippets>,
+ pub snippet_cap: Option<SnippetCap>,
+ pub insert_use: InsertUseConfig,
+ pub snippets: Vec<Snippet>,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum CallableSnippets {
+ FillArguments,
+ AddParentheses,
+}
+
+impl CompletionConfig {
+ pub fn postfix_snippets(&self) -> impl Iterator<Item = (&str, &Snippet)> {
+ self.snippets
+ .iter()
+ .flat_map(|snip| snip.postfix_triggers.iter().map(move |trigger| (&**trigger, snip)))
+ }
+
+ pub fn prefix_snippets(&self) -> impl Iterator<Item = (&str, &Snippet)> {
+ self.snippets
+ .iter()
+ .flat_map(|snip| snip.prefix_triggers.iter().map(move |trigger| (&**trigger, snip)))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
new file mode 100644
index 000000000..e35f79d2b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
@@ -0,0 +1,639 @@
+//! See `CompletionContext` structure.
+
+mod analysis;
+#[cfg(test)]
+mod tests;
+
+use std::iter;
+
+use base_db::SourceDatabaseExt;
+use hir::{
+ HasAttrs, Local, Name, PathResolution, ScopeDef, Semantics, SemanticsScope, Type, TypeInfo,
+};
+use ide_db::{
+ base_db::{FilePosition, SourceDatabase},
+ famous_defs::FamousDefs,
+ FxHashMap, FxHashSet, RootDatabase,
+};
+use syntax::{
+ ast::{self, AttrKind, NameOrNameRef},
+ AstNode,
+ SyntaxKind::{self, *},
+ SyntaxToken, TextRange, TextSize,
+};
+use text_edit::Indel;
+
+use crate::CompletionConfig;
+
+const COMPLETION_MARKER: &str = "intellijRulezz";
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub(crate) enum PatternRefutability {
+ Refutable,
+ Irrefutable,
+}
+
+#[derive(Debug)]
+pub(crate) enum Visible {
+ Yes,
+ Editable,
+ No,
+}
+
+/// Existing qualifiers for the thing we are currently completing.
+#[derive(Debug, Default)]
+pub(super) struct QualifierCtx {
+ pub(super) unsafe_tok: Option<SyntaxToken>,
+ pub(super) vis_node: Option<ast::Visibility>,
+}
+
+impl QualifierCtx {
+ pub(super) fn none(&self) -> bool {
+ self.unsafe_tok.is_none() && self.vis_node.is_none()
+ }
+}
+
+/// The state of the path we are currently completing.
+#[derive(Debug)]
+pub(crate) struct PathCompletionCtx {
+ /// If this is a call with () already there (or {} in case of record patterns)
+ pub(super) has_call_parens: bool,
+ /// If this has a macro call bang !
+ pub(super) has_macro_bang: bool,
+ /// The qualifier of the current path.
+ pub(super) qualified: Qualified,
+ /// The parent of the path we are completing.
+ pub(super) parent: Option<ast::Path>,
+ /// The path of which we are completing the segment
+ pub(super) path: ast::Path,
+ pub(super) kind: PathKind,
+ /// Whether the path segment has type args or not.
+ pub(super) has_type_args: bool,
+ /// Whether the qualifier comes from a use tree parent or not
+ pub(crate) use_tree_parent: bool,
+}
+
+impl PathCompletionCtx {
+ pub(super) fn is_trivial_path(&self) -> bool {
+ matches!(
+ self,
+ PathCompletionCtx {
+ has_call_parens: false,
+ has_macro_bang: false,
+ qualified: Qualified::No,
+ parent: None,
+ has_type_args: false,
+ ..
+ }
+ )
+ }
+}
+
+/// The kind of path we are completing right now.
+#[derive(Debug, PartialEq, Eq)]
+pub(super) enum PathKind {
+ Expr {
+ expr_ctx: ExprCtx,
+ },
+ Type {
+ location: TypeLocation,
+ },
+ Attr {
+ attr_ctx: AttrCtx,
+ },
+ Derive {
+ existing_derives: ExistingDerives,
+ },
+ /// Path in item position, that is inside an (Assoc)ItemList
+ Item {
+ kind: ItemListKind,
+ },
+ Pat {
+ pat_ctx: PatternContext,
+ },
+ Vis {
+ has_in_token: bool,
+ },
+ Use,
+}
+
+pub(crate) type ExistingDerives = FxHashSet<hir::Macro>;
+
+#[derive(Debug, PartialEq, Eq)]
+pub(crate) struct AttrCtx {
+ pub(crate) kind: AttrKind,
+ pub(crate) annotated_item_kind: Option<SyntaxKind>,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub(crate) struct ExprCtx {
+ pub(crate) in_block_expr: bool,
+ pub(crate) in_loop_body: bool,
+ pub(crate) after_if_expr: bool,
+ /// Whether this expression is the direct condition of an if or while expression
+ pub(crate) in_condition: bool,
+ pub(crate) incomplete_let: bool,
+ pub(crate) ref_expr_parent: Option<ast::RefExpr>,
+ pub(crate) is_func_update: Option<ast::RecordExpr>,
+ pub(crate) self_param: Option<hir::SelfParam>,
+ pub(crate) innermost_ret_ty: Option<hir::Type>,
+ pub(crate) impl_: Option<ast::Impl>,
+ /// Whether this expression occurs in match arm guard position: before the
+ /// fat arrow token
+ pub(crate) in_match_guard: bool,
+}
+
+/// Original file ast nodes
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) enum TypeLocation {
+ TupleField,
+ TypeAscription(TypeAscriptionTarget),
+ GenericArgList(Option<ast::GenericArgList>),
+ TypeBound,
+ ImplTarget,
+ ImplTrait,
+ Other,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) enum TypeAscriptionTarget {
+ Let(Option<ast::Pat>),
+ FnParam(Option<ast::Pat>),
+ RetType(Option<ast::Expr>),
+ Const(Option<ast::Expr>),
+}
+
+/// The kind of item list a [`PathKind::Item`] belongs to.
+#[derive(Debug, PartialEq, Eq)]
+pub(super) enum ItemListKind {
+ SourceFile,
+ Module,
+ Impl,
+ TraitImpl(Option<ast::Impl>),
+ Trait,
+ ExternBlock,
+}
+
+#[derive(Debug)]
+pub(super) enum Qualified {
+ No,
+ With {
+ path: ast::Path,
+ resolution: Option<PathResolution>,
+ /// How many `super` segments are present in the path
+ ///
+ /// This would be None, if path is not solely made of
+ /// `super` segments, e.g.
+ ///
+ /// ```rust
+ /// use super::foo;
+ /// ```
+ ///
+ /// Otherwise it should be Some(count of `super`)
+ super_chain_len: Option<usize>,
+ },
+ /// <_>::
+ TypeAnchor {
+ ty: Option<hir::Type>,
+ trait_: Option<hir::Trait>,
+ },
+ /// Whether the path is an absolute path
+ Absolute,
+}
+
+/// The state of the pattern we are completing.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub(super) struct PatternContext {
+ pub(super) refutability: PatternRefutability,
+ pub(super) param_ctx: Option<ParamContext>,
+ pub(super) has_type_ascription: bool,
+ pub(super) parent_pat: Option<ast::Pat>,
+ pub(super) ref_token: Option<SyntaxToken>,
+ pub(super) mut_token: Option<SyntaxToken>,
+ /// The record pattern this name or ref is a field of
+ pub(super) record_pat: Option<ast::RecordPat>,
+ pub(super) impl_: Option<ast::Impl>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub(super) struct ParamContext {
+ pub(super) param_list: ast::ParamList,
+ pub(super) param: ast::Param,
+ pub(super) kind: ParamKind,
+}
+
+/// The state of the lifetime we are completing.
+#[derive(Debug)]
+pub(super) struct LifetimeContext {
+ pub(super) lifetime: Option<ast::Lifetime>,
+ pub(super) kind: LifetimeKind,
+}
+
+/// The kind of lifetime we are completing.
+#[derive(Debug)]
+pub(super) enum LifetimeKind {
+ LifetimeParam { is_decl: bool, param: ast::LifetimeParam },
+ Lifetime,
+ LabelRef,
+ LabelDef,
+}
+
+/// The state of the name we are completing.
+#[derive(Debug)]
+pub(super) struct NameContext {
+ #[allow(dead_code)]
+ pub(super) name: Option<ast::Name>,
+ pub(super) kind: NameKind,
+}
+
+/// The kind of the name we are completing.
+#[derive(Debug)]
+#[allow(dead_code)]
+pub(super) enum NameKind {
+ Const,
+ ConstParam,
+ Enum,
+ Function,
+ IdentPat(PatternContext),
+ MacroDef,
+ MacroRules,
+ /// Fake node
+ Module(ast::Module),
+ RecordField,
+ Rename,
+ SelfParam,
+ Static,
+ Struct,
+ Trait,
+ TypeAlias,
+ TypeParam,
+ Union,
+ Variant,
+}
+
+/// The state of the NameRef we are completing.
+#[derive(Debug)]
+pub(super) struct NameRefContext {
+ /// NameRef syntax in the original file
+ pub(super) nameref: Option<ast::NameRef>,
+ pub(super) kind: NameRefKind,
+}
+
+/// The kind of the NameRef we are completing.
+#[derive(Debug)]
+pub(super) enum NameRefKind {
+ Path(PathCompletionCtx),
+ DotAccess(DotAccess),
+ /// Position where we are only interested in keyword completions
+ Keyword(ast::Item),
+ /// The record expression this nameref is a field of and whether a dot precedes the completion identifier.
+ RecordExpr {
+ dot_prefix: bool,
+ expr: ast::RecordExpr,
+ },
+ Pattern(PatternContext),
+}
+
+/// The identifier we are currently completing.
+#[derive(Debug)]
+pub(super) enum CompletionAnalysis {
+ Name(NameContext),
+ NameRef(NameRefContext),
+ Lifetime(LifetimeContext),
+ /// The string the cursor is currently inside
+ String {
+ /// original token
+ original: ast::String,
+ /// fake token
+ expanded: Option<ast::String>,
+ },
+ /// Set if we are currently completing in an unexpanded attribute, this usually implies a builtin attribute like `allow($0)`
+ UnexpandedAttrTT {
+ colon_prefix: bool,
+ fake_attribute_under_caret: Option<ast::Attr>,
+ },
+}
+
+/// Information about the field or method access we are completing.
+#[derive(Debug)]
+pub(super) struct DotAccess {
+ pub(super) receiver: Option<ast::Expr>,
+ pub(super) receiver_ty: Option<TypeInfo>,
+ pub(super) kind: DotAccessKind,
+}
+
+#[derive(Debug)]
+pub(super) enum DotAccessKind {
+ Field {
+ /// True if the receiver is an integer and there is no ident in the original file after it yet
+ /// like `0.$0`
+ receiver_is_ambiguous_float_literal: bool,
+ },
+ Method {
+ has_parens: bool,
+ },
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) enum ParamKind {
+ Function(ast::Fn),
+ Closure(ast::ClosureExpr),
+}
+
+/// `CompletionContext` is created early during completion to figure out, where
+/// exactly is the cursor, syntax-wise.
+#[derive(Debug)]
+pub(crate) struct CompletionContext<'a> {
+ pub(super) sema: Semantics<'a, RootDatabase>,
+ pub(super) scope: SemanticsScope<'a>,
+ pub(super) db: &'a RootDatabase,
+ pub(super) config: &'a CompletionConfig,
+ pub(super) position: FilePosition,
+
+ /// The token before the cursor, in the original file.
+ pub(super) original_token: SyntaxToken,
+ /// The token before the cursor, in the macro-expanded file.
+ pub(super) token: SyntaxToken,
+ /// The crate of the current file.
+ pub(super) krate: hir::Crate,
+ /// The module of the `scope`.
+ pub(super) module: hir::Module,
+
+ /// The expected name of what we are completing.
+ /// This is usually the parameter name of the function argument we are completing.
+ pub(super) expected_name: Option<NameOrNameRef>,
+ /// The expected type of what we are completing.
+ pub(super) expected_type: Option<Type>,
+
+ pub(super) qualifier_ctx: QualifierCtx,
+
+ pub(super) locals: FxHashMap<Name, Local>,
+
+ /// The module depth of the current module of the cursor position.
+ /// - crate-root
+ /// - mod foo
+ /// - mod bar
+ /// Here depth will be 2
+ pub(super) depth_from_crate_root: usize,
+}
+
+impl<'a> CompletionContext<'a> {
+ /// The range of the identifier that is being completed.
+ pub(crate) fn source_range(&self) -> TextRange {
+ // check kind of macro-expanded token, but use range of original token
+ let kind = self.token.kind();
+ match kind {
+ CHAR => {
+ // assume we are completing a lifetime but the user has only typed the '
+ cov_mark::hit!(completes_if_lifetime_without_idents);
+ TextRange::at(self.original_token.text_range().start(), TextSize::from(1))
+ }
+ IDENT | LIFETIME_IDENT | UNDERSCORE => self.original_token.text_range(),
+ _ if kind.is_keyword() => self.original_token.text_range(),
+ _ => TextRange::empty(self.position.offset),
+ }
+ }
+
+ pub(crate) fn famous_defs(&self) -> FamousDefs<'_, '_> {
+ FamousDefs(&self.sema, self.krate)
+ }
+
+ /// Checks if an item is visible and not `doc(hidden)` at the completion site.
+ pub(crate) fn def_is_visible(&self, item: &ScopeDef) -> Visible {
+ match item {
+ ScopeDef::ModuleDef(def) => match def {
+ hir::ModuleDef::Module(it) => self.is_visible(it),
+ hir::ModuleDef::Function(it) => self.is_visible(it),
+ hir::ModuleDef::Adt(it) => self.is_visible(it),
+ hir::ModuleDef::Variant(it) => self.is_visible(it),
+ hir::ModuleDef::Const(it) => self.is_visible(it),
+ hir::ModuleDef::Static(it) => self.is_visible(it),
+ hir::ModuleDef::Trait(it) => self.is_visible(it),
+ hir::ModuleDef::TypeAlias(it) => self.is_visible(it),
+ hir::ModuleDef::Macro(it) => self.is_visible(it),
+ hir::ModuleDef::BuiltinType(_) => Visible::Yes,
+ },
+ ScopeDef::GenericParam(_)
+ | ScopeDef::ImplSelfType(_)
+ | ScopeDef::AdtSelfType(_)
+ | ScopeDef::Local(_)
+ | ScopeDef::Label(_)
+ | ScopeDef::Unknown => Visible::Yes,
+ }
+ }
+
+ /// Checks if an item is visible and not `doc(hidden)` at the completion site.
+ pub(crate) fn is_visible<I>(&self, item: &I) -> Visible
+ where
+ I: hir::HasVisibility + hir::HasAttrs + hir::HasCrate + Copy,
+ {
+ let vis = item.visibility(self.db);
+ let attrs = item.attrs(self.db);
+ self.is_visible_impl(&vis, &attrs, item.krate(self.db))
+ }
+
+ /// Check if an item is `#[doc(hidden)]`.
+ pub(crate) fn is_item_hidden(&self, item: &hir::ItemInNs) -> bool {
+ let attrs = item.attrs(self.db);
+ let krate = item.krate(self.db);
+ match (attrs, krate) {
+ (Some(attrs), Some(krate)) => self.is_doc_hidden(&attrs, krate),
+ _ => false,
+ }
+ }
+
+ /// Whether the given trait is an operator trait or not.
+ pub(crate) fn is_ops_trait(&self, trait_: hir::Trait) -> bool {
+ match trait_.attrs(self.db).lang() {
+ Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang.as_str()),
+ None => false,
+ }
+ }
+
+ /// Returns the traits in scope, with the [`Drop`] trait removed.
+ pub(crate) fn traits_in_scope(&self) -> hir::VisibleTraits {
+ let mut traits_in_scope = self.scope.visible_traits();
+ if let Some(drop) = self.famous_defs().core_ops_Drop() {
+ traits_in_scope.0.remove(&drop.into());
+ }
+ traits_in_scope
+ }
+
+ pub(crate) fn iterate_path_candidates(
+ &self,
+ ty: &hir::Type,
+ mut cb: impl FnMut(hir::AssocItem),
+ ) {
+ let mut seen = FxHashSet::default();
+ ty.iterate_path_candidates(
+ self.db,
+ &self.scope,
+ &self.traits_in_scope(),
+ Some(self.module),
+ None,
+ |item| {
+ // We might iterate candidates of a trait multiple times here, so deduplicate
+ // them.
+ if seen.insert(item) {
+ cb(item)
+ }
+ None::<()>
+ },
+ );
+ }
+
+ /// A version of [`SemanticsScope::process_all_names`] that filters out `#[doc(hidden)]` items.
+ pub(crate) fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
+ let _p = profile::span("CompletionContext::process_all_names");
+ self.scope.process_all_names(&mut |name, def| {
+ if self.is_scope_def_hidden(def) {
+ return;
+ }
+
+ f(name, def);
+ });
+ }
+
+ pub(crate) fn process_all_names_raw(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
+ let _p = profile::span("CompletionContext::process_all_names_raw");
+ self.scope.process_all_names(&mut |name, def| f(name, def));
+ }
+
+ fn is_scope_def_hidden(&self, scope_def: ScopeDef) -> bool {
+ if let (Some(attrs), Some(krate)) = (scope_def.attrs(self.db), scope_def.krate(self.db)) {
+ return self.is_doc_hidden(&attrs, krate);
+ }
+
+ false
+ }
+
+ fn is_visible_impl(
+ &self,
+ vis: &hir::Visibility,
+ attrs: &hir::Attrs,
+ defining_crate: hir::Crate,
+ ) -> Visible {
+ if !vis.is_visible_from(self.db, self.module.into()) {
+ if !self.config.enable_private_editable {
+ return Visible::No;
+ }
+ // If the definition location is editable, also show private items
+ let root_file = defining_crate.root_file(self.db);
+ let source_root_id = self.db.file_source_root(root_file);
+ let is_editable = !self.db.source_root(source_root_id).is_library;
+ return if is_editable { Visible::Editable } else { Visible::No };
+ }
+
+ if self.is_doc_hidden(attrs, defining_crate) {
+ Visible::No
+ } else {
+ Visible::Yes
+ }
+ }
+
+ fn is_doc_hidden(&self, attrs: &hir::Attrs, defining_crate: hir::Crate) -> bool {
+ // `doc(hidden)` items are only completed within the defining crate.
+ self.krate != defining_crate && attrs.has_doc_hidden()
+ }
+}
+
+// CompletionContext construction
+impl<'a> CompletionContext<'a> {
+ pub(super) fn new(
+ db: &'a RootDatabase,
+ position @ FilePosition { file_id, offset }: FilePosition,
+ config: &'a CompletionConfig,
+ ) -> Option<(CompletionContext<'a>, CompletionAnalysis)> {
+ let _p = profile::span("CompletionContext::new");
+ let sema = Semantics::new(db);
+
+ let original_file = sema.parse(file_id);
+
+ // Insert a fake ident to get a valid parse tree. We will use this file
+ // to determine context, though the original_file will be used for
+ // actual completion.
+ let file_with_fake_ident = {
+ let parse = db.parse(file_id);
+ let edit = Indel::insert(offset, COMPLETION_MARKER.to_string());
+ parse.reparse(&edit).tree()
+ };
+ let fake_ident_token =
+ file_with_fake_ident.syntax().token_at_offset(offset).right_biased()?;
+
+ let original_token = original_file.syntax().token_at_offset(offset).left_biased()?;
+ let token = sema.descend_into_macros_single(original_token.clone());
+
+ // adjust for macro input, this still fails if there is no token written yet
+ let scope_offset = if original_token == token { offset } else { token.text_range().end() };
+ let scope = sema.scope_at_offset(&token.parent()?, scope_offset)?;
+
+ let krate = scope.krate();
+ let module = scope.module();
+
+ let mut locals = FxHashMap::default();
+ scope.process_all_names(&mut |name, scope| {
+ if let ScopeDef::Local(local) = scope {
+ locals.insert(name, local);
+ }
+ });
+
+ let depth_from_crate_root = iter::successors(module.parent(db), |m| m.parent(db)).count();
+
+ let mut ctx = CompletionContext {
+ sema,
+ scope,
+ db,
+ config,
+ position,
+ original_token,
+ token,
+ krate,
+ module,
+ expected_name: None,
+ expected_type: None,
+ qualifier_ctx: Default::default(),
+ locals,
+ depth_from_crate_root,
+ };
+ let ident_ctx = ctx.expand_and_analyze(
+ original_file.syntax().clone(),
+ file_with_fake_ident.syntax().clone(),
+ offset,
+ fake_ident_token,
+ )?;
+ Some((ctx, ident_ctx))
+ }
+}
+
+const OP_TRAIT_LANG_NAMES: &[&str] = &[
+ "add_assign",
+ "add",
+ "bitand_assign",
+ "bitand",
+ "bitor_assign",
+ "bitor",
+ "bitxor_assign",
+ "bitxor",
+ "deref_mut",
+ "deref",
+ "div_assign",
+ "div",
+ "eq",
+ "fn_mut",
+ "fn_once",
+ "fn",
+ "index_mut",
+ "index",
+ "mul_assign",
+ "mul",
+ "neg",
+ "not",
+ "partial_ord",
+ "rem_assign",
+ "rem",
+ "shl_assign",
+ "shl",
+ "shr_assign",
+ "shr",
+ "sub",
+];
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
new file mode 100644
index 000000000..22ec7cead
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
@@ -0,0 +1,1293 @@
+//! Module responsible for analyzing the code surrounding the cursor for completion.
+use std::iter;
+
+use hir::{Semantics, Type, TypeInfo};
+use ide_db::{active_parameter::ActiveParameter, RootDatabase};
+use syntax::{
+ algo::{find_node_at_offset, non_trivia_sibling},
+ ast::{self, AttrKind, HasArgList, HasLoopBody, HasName, NameOrNameRef},
+ match_ast, AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode,
+ SyntaxToken, TextRange, TextSize, T,
+};
+
+use crate::context::{
+ AttrCtx, CompletionAnalysis, CompletionContext, DotAccess, DotAccessKind, ExprCtx,
+ ItemListKind, LifetimeContext, LifetimeKind, NameContext, NameKind, NameRefContext,
+ NameRefKind, ParamContext, ParamKind, PathCompletionCtx, PathKind, PatternContext,
+ PatternRefutability, Qualified, QualifierCtx, TypeAscriptionTarget, TypeLocation,
+ COMPLETION_MARKER,
+};
+
+impl<'a> CompletionContext<'a> {
+ /// Expand attributes and macro calls at the current cursor position for both the original file
+ /// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
+ /// and speculative states stay in sync.
+ pub(super) fn expand_and_analyze(
+ &mut self,
+ mut original_file: SyntaxNode,
+ mut speculative_file: SyntaxNode,
+ mut offset: TextSize,
+ mut fake_ident_token: SyntaxToken,
+ ) -> Option<CompletionAnalysis> {
+ let _p = profile::span("CompletionContext::expand_and_fill");
+ let mut derive_ctx = None;
+
+ 'expansion: loop {
+ let parent_item =
+ |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
+ let ancestor_items = iter::successors(
+ Option::zip(
+ find_node_at_offset::<ast::Item>(&original_file, offset),
+ find_node_at_offset::<ast::Item>(&speculative_file, offset),
+ ),
+ |(a, b)| parent_item(a).zip(parent_item(b)),
+ );
+
+ // first try to expand attributes as these are always the outermost macro calls
+ 'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items {
+ match (
+ self.sema.expand_attr_macro(&actual_item),
+ self.sema.speculative_expand_attr_macro(
+ &actual_item,
+ &item_with_fake_ident,
+ fake_ident_token.clone(),
+ ),
+ ) {
+ // maybe parent items have attributes, so continue walking the ancestors
+ (None, None) => continue 'ancestors,
+ // successful expansions
+ (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
+ let new_offset = fake_mapped_token.text_range().start();
+ if new_offset > actual_expansion.text_range().end() {
+ // offset outside of bounds from the original expansion,
+ // stop here to prevent problems from happening
+ break 'expansion;
+ }
+ original_file = actual_expansion;
+ speculative_file = fake_expansion;
+ fake_ident_token = fake_mapped_token;
+ offset = new_offset;
+ continue 'expansion;
+ }
+ // exactly one expansion failed, inconsistent state so stop expanding completely
+ _ => break 'expansion,
+ }
+ }
+
+ // No attributes have been expanded, so look for macro_call! token trees or derive token trees
+ let orig_tt = match find_node_at_offset::<ast::TokenTree>(&original_file, offset) {
+ Some(it) => it,
+ None => break 'expansion,
+ };
+ let spec_tt = match find_node_at_offset::<ast::TokenTree>(&speculative_file, offset) {
+ Some(it) => it,
+ None => break 'expansion,
+ };
+
+ // Expand pseudo-derive expansion
+ if let (Some(orig_attr), Some(spec_attr)) = (
+ orig_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
+ spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
+ ) {
+ if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = (
+ self.sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
+ self.sema.speculative_expand_derive_as_pseudo_attr_macro(
+ &orig_attr,
+ &spec_attr,
+ fake_ident_token.clone(),
+ ),
+ ) {
+ derive_ctx = Some((
+ actual_expansion,
+ fake_expansion,
+ fake_mapped_token.text_range().start(),
+ orig_attr,
+ ));
+ }
+ // at this point we won't have any more successful expansions, so stop
+ break 'expansion;
+ }
+
+ // Expand fn-like macro calls
+ if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
+ orig_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
+ spec_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
+ ) {
+ let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
+ let mac_call_path1 =
+ macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
+
+ // inconsistent state, stop expanding
+ if mac_call_path0 != mac_call_path1 {
+ break 'expansion;
+ }
+ let speculative_args = match macro_call_with_fake_ident.token_tree() {
+ Some(tt) => tt,
+ None => break 'expansion,
+ };
+
+ match (
+ self.sema.expand(&actual_macro_call),
+ self.sema.speculative_expand(
+ &actual_macro_call,
+ &speculative_args,
+ fake_ident_token.clone(),
+ ),
+ ) {
+ // successful expansions
+ (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
+ let new_offset = fake_mapped_token.text_range().start();
+ if new_offset > actual_expansion.text_range().end() {
+ // offset outside of bounds from the original expansion,
+ // stop here to prevent problems from happening
+ break 'expansion;
+ }
+ original_file = actual_expansion;
+ speculative_file = fake_expansion;
+ fake_ident_token = fake_mapped_token;
+ offset = new_offset;
+ continue 'expansion;
+ }
+ // at least on expansion failed, we won't have anything to expand from this point
+ // onwards so break out
+ _ => break 'expansion,
+ }
+ }
+
+ // none of our states have changed so stop the loop
+ break 'expansion;
+ }
+
+ self.analyze(&original_file, speculative_file, offset, derive_ctx)
+ }
+
+ /// Calculate the expected type and name of the cursor position.
+ fn expected_type_and_name(
+ &self,
+ name_like: &ast::NameLike,
+ ) -> (Option<Type>, Option<NameOrNameRef>) {
+ let mut node = match self.token.parent() {
+ Some(it) => it,
+ None => return (None, None),
+ };
+
+ let strip_refs = |mut ty: Type| match name_like {
+ ast::NameLike::NameRef(n) => {
+ let p = match n.syntax().parent() {
+ Some(it) => it,
+ None => return ty,
+ };
+ let top_syn = match_ast! {
+ match p {
+ ast::FieldExpr(e) => e
+ .syntax()
+ .ancestors()
+ .map_while(ast::FieldExpr::cast)
+ .last()
+ .map(|it| it.syntax().clone()),
+ ast::PathSegment(e) => e
+ .syntax()
+ .ancestors()
+ .skip(1)
+ .take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind()))
+ .find_map(ast::PathExpr::cast)
+ .map(|it| it.syntax().clone()),
+ _ => None
+ }
+ };
+ let top_syn = match top_syn {
+ Some(it) => it,
+ None => return ty,
+ };
+ for _ in top_syn.ancestors().skip(1).map_while(ast::RefExpr::cast) {
+ cov_mark::hit!(expected_type_fn_param_ref);
+ ty = ty.strip_reference();
+ }
+ ty
+ }
+ _ => ty,
+ };
+
+ loop {
+ break match_ast! {
+ match node {
+ ast::LetStmt(it) => {
+ cov_mark::hit!(expected_type_let_with_leading_char);
+ cov_mark::hit!(expected_type_let_without_leading_char);
+ let ty = it.pat()
+ .and_then(|pat| self.sema.type_of_pat(&pat))
+ .or_else(|| it.initializer().and_then(|it| self.sema.type_of_expr(&it)))
+ .map(TypeInfo::original);
+ let name = match it.pat() {
+ Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name),
+ Some(_) | None => None,
+ };
+
+ (ty, name)
+ },
+ ast::LetExpr(it) => {
+ cov_mark::hit!(expected_type_if_let_without_leading_char);
+ let ty = it.pat()
+ .and_then(|pat| self.sema.type_of_pat(&pat))
+ .or_else(|| it.expr().and_then(|it| self.sema.type_of_expr(&it)))
+ .map(TypeInfo::original);
+ (ty, None)
+ },
+ ast::ArgList(_) => {
+ cov_mark::hit!(expected_type_fn_param);
+ ActiveParameter::at_token(
+ &self.sema,
+ self.token.clone(),
+ ).map(|ap| {
+ let name = ap.ident().map(NameOrNameRef::Name);
+
+ let ty = strip_refs(ap.ty);
+ (Some(ty), name)
+ })
+ .unwrap_or((None, None))
+ },
+ ast::RecordExprFieldList(it) => {
+ // wouldn't try {} be nice...
+ (|| {
+ if self.token.kind() == T![..]
+ || self.token.prev_token().map(|t| t.kind()) == Some(T![..])
+ {
+ cov_mark::hit!(expected_type_struct_func_update);
+ let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?;
+ let ty = self.sema.type_of_expr(&record_expr.into())?;
+ Some((
+ Some(ty.original),
+ None
+ ))
+ } else {
+ cov_mark::hit!(expected_type_struct_field_without_leading_char);
+ let expr_field = self.token.prev_sibling_or_token()?
+ .into_node()
+ .and_then(ast::RecordExprField::cast)?;
+ let (_, _, ty) = self.sema.resolve_record_field(&expr_field)?;
+ Some((
+ Some(ty),
+ expr_field.field_name().map(NameOrNameRef::NameRef),
+ ))
+ }
+ })().unwrap_or((None, None))
+ },
+ ast::RecordExprField(it) => {
+ if let Some(expr) = it.expr() {
+ cov_mark::hit!(expected_type_struct_field_with_leading_char);
+ (
+ self.sema.type_of_expr(&expr).map(TypeInfo::original),
+ it.field_name().map(NameOrNameRef::NameRef),
+ )
+ } else {
+ cov_mark::hit!(expected_type_struct_field_followed_by_comma);
+ let ty = self.sema.resolve_record_field(&it)
+ .map(|(_, _, ty)| ty);
+ (
+ ty,
+ it.field_name().map(NameOrNameRef::NameRef),
+ )
+ }
+ },
+ // match foo { $0 }
+ // match foo { ..., pat => $0 }
+ ast::MatchExpr(it) => {
+ let on_arrow = previous_non_trivia_token(self.token.clone()).map_or(false, |it| T![=>] == it.kind());
+
+ let ty = if on_arrow {
+ // match foo { ..., pat => $0 }
+ cov_mark::hit!(expected_type_match_arm_body_without_leading_char);
+ cov_mark::hit!(expected_type_match_arm_body_with_leading_char);
+ self.sema.type_of_expr(&it.into())
+ } else {
+ // match foo { $0 }
+ cov_mark::hit!(expected_type_match_arm_without_leading_char);
+ it.expr().and_then(|e| self.sema.type_of_expr(&e))
+ }.map(TypeInfo::original);
+ (ty, None)
+ },
+ ast::IfExpr(it) => {
+ let ty = it.condition()
+ .and_then(|e| self.sema.type_of_expr(&e))
+ .map(TypeInfo::original);
+ (ty, None)
+ },
+ ast::IdentPat(it) => {
+ cov_mark::hit!(expected_type_if_let_with_leading_char);
+ cov_mark::hit!(expected_type_match_arm_with_leading_char);
+ let ty = self.sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original);
+ (ty, None)
+ },
+ ast::Fn(it) => {
+ cov_mark::hit!(expected_type_fn_ret_with_leading_char);
+ cov_mark::hit!(expected_type_fn_ret_without_leading_char);
+ let def = self.sema.to_def(&it);
+ (def.map(|def| def.ret_type(self.db)), None)
+ },
+ ast::ClosureExpr(it) => {
+ let ty = self.sema.type_of_expr(&it.into());
+ ty.and_then(|ty| ty.original.as_callable(self.db))
+ .map(|c| (Some(c.return_type()), None))
+ .unwrap_or((None, None))
+ },
+ ast::ParamList(_) => (None, None),
+ ast::Stmt(_) => (None, None),
+ ast::Item(_) => (None, None),
+ _ => {
+ match node.parent() {
+ Some(n) => {
+ node = n;
+ continue;
+ },
+ None => (None, None),
+ }
+ },
+ }
+ };
+ }
+ }
+
+ /// Fill the completion context, this is what does semantic reasoning about the surrounding context
+ /// of the completion location.
+ fn analyze(
+ &mut self,
+ original_file: &SyntaxNode,
+ file_with_fake_ident: SyntaxNode,
+ offset: TextSize,
+ derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
+ ) -> Option<CompletionAnalysis> {
+ let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased()?;
+ let syntax_element = NodeOrToken::Token(fake_ident_token);
+ if is_in_token_of_for_loop(syntax_element.clone()) {
+ // for pat $0
+ // there is nothing to complete here except `in` keyword
+ // don't bother populating the context
+ // FIXME: the completion calculations should end up good enough
+ // such that this special case becomes unnecessary
+ return None;
+ }
+
+ // Overwrite the path kind for derives
+ if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
+ if let Some(ast::NameLike::NameRef(name_ref)) =
+ find_node_at_offset(&file_with_fake_ident, offset)
+ {
+ let parent = name_ref.syntax().parent()?;
+ let (mut nameref_ctx, _) =
+ Self::classify_name_ref(&self.sema, &original_file, name_ref, parent)?;
+ if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
+ path_ctx.kind = PathKind::Derive {
+ existing_derives: self
+ .sema
+ .resolve_derive_macro(&origin_attr)
+ .into_iter()
+ .flatten()
+ .flatten()
+ .collect(),
+ };
+ }
+ return Some(CompletionAnalysis::NameRef(nameref_ctx));
+ }
+ return None;
+ }
+
+ let name_like = match find_node_at_offset(&file_with_fake_ident, offset) {
+ Some(it) => it,
+ None => {
+ let analysis =
+ if let Some(original) = ast::String::cast(self.original_token.clone()) {
+ CompletionAnalysis::String {
+ original,
+ expanded: ast::String::cast(self.token.clone()),
+ }
+ } else {
+ // Fix up trailing whitespace problem
+ // #[attr(foo = $0
+ let token =
+ syntax::algo::skip_trivia_token(self.token.clone(), Direction::Prev)?;
+ let p = token.parent()?;
+ if p.kind() == SyntaxKind::TOKEN_TREE
+ && p.ancestors().any(|it| it.kind() == SyntaxKind::META)
+ {
+ let colon_prefix = previous_non_trivia_token(self.token.clone())
+ .map_or(false, |it| T![:] == it.kind());
+ CompletionAnalysis::UnexpandedAttrTT {
+ fake_attribute_under_caret: syntax_element
+ .ancestors()
+ .find_map(ast::Attr::cast),
+ colon_prefix,
+ }
+ } else {
+ return None;
+ }
+ };
+ return Some(analysis);
+ }
+ };
+ (self.expected_type, self.expected_name) = self.expected_type_and_name(&name_like);
+ let analysis = match name_like {
+ ast::NameLike::Lifetime(lifetime) => CompletionAnalysis::Lifetime(
+ Self::classify_lifetime(&self.sema, original_file, lifetime)?,
+ ),
+ ast::NameLike::NameRef(name_ref) => {
+ let parent = name_ref.syntax().parent()?;
+ let (nameref_ctx, qualifier_ctx) =
+ Self::classify_name_ref(&self.sema, &original_file, name_ref, parent.clone())?;
+
+ self.qualifier_ctx = qualifier_ctx;
+ CompletionAnalysis::NameRef(nameref_ctx)
+ }
+ ast::NameLike::Name(name) => {
+ let name_ctx = Self::classify_name(&self.sema, original_file, name)?;
+ CompletionAnalysis::Name(name_ctx)
+ }
+ };
+ Some(analysis)
+ }
+
+ fn classify_lifetime(
+ _sema: &Semantics<'_, RootDatabase>,
+ original_file: &SyntaxNode,
+ lifetime: ast::Lifetime,
+ ) -> Option<LifetimeContext> {
+ let parent = lifetime.syntax().parent()?;
+ if parent.kind() == SyntaxKind::ERROR {
+ return None;
+ }
+
+ let kind = match_ast! {
+ match parent {
+ ast::LifetimeParam(param) => LifetimeKind::LifetimeParam {
+ is_decl: param.lifetime().as_ref() == Some(&lifetime),
+ param
+ },
+ ast::BreakExpr(_) => LifetimeKind::LabelRef,
+ ast::ContinueExpr(_) => LifetimeKind::LabelRef,
+ ast::Label(_) => LifetimeKind::LabelDef,
+ _ => LifetimeKind::Lifetime,
+ }
+ };
+ let lifetime = find_node_at_offset(&original_file, lifetime.syntax().text_range().start());
+
+ Some(LifetimeContext { lifetime, kind })
+ }
+
+ fn classify_name(
+ sema: &Semantics<'_, RootDatabase>,
+ original_file: &SyntaxNode,
+ name: ast::Name,
+ ) -> Option<NameContext> {
+ let parent = name.syntax().parent()?;
+ let kind = match_ast! {
+ match parent {
+ ast::Const(_) => NameKind::Const,
+ ast::ConstParam(_) => NameKind::ConstParam,
+ ast::Enum(_) => NameKind::Enum,
+ ast::Fn(_) => NameKind::Function,
+ ast::IdentPat(bind_pat) => {
+ let mut pat_ctx = pattern_context_for(sema, original_file, bind_pat.into());
+ if let Some(record_field) = ast::RecordPatField::for_field_name(&name) {
+ pat_ctx.record_pat = find_node_in_file_compensated(sema, original_file, &record_field.parent_record_pat());
+ }
+
+ NameKind::IdentPat(pat_ctx)
+ },
+ ast::MacroDef(_) => NameKind::MacroDef,
+ ast::MacroRules(_) => NameKind::MacroRules,
+ ast::Module(module) => NameKind::Module(module),
+ ast::RecordField(_) => NameKind::RecordField,
+ ast::Rename(_) => NameKind::Rename,
+ ast::SelfParam(_) => NameKind::SelfParam,
+ ast::Static(_) => NameKind::Static,
+ ast::Struct(_) => NameKind::Struct,
+ ast::Trait(_) => NameKind::Trait,
+ ast::TypeAlias(_) => NameKind::TypeAlias,
+ ast::TypeParam(_) => NameKind::TypeParam,
+ ast::Union(_) => NameKind::Union,
+ ast::Variant(_) => NameKind::Variant,
+ _ => return None,
+ }
+ };
+ let name = find_node_at_offset(&original_file, name.syntax().text_range().start());
+ Some(NameContext { name, kind })
+ }
+
+ fn classify_name_ref(
+ sema: &Semantics<'_, RootDatabase>,
+ original_file: &SyntaxNode,
+ name_ref: ast::NameRef,
+ parent: SyntaxNode,
+ ) -> Option<(NameRefContext, QualifierCtx)> {
+ let nameref = find_node_at_offset(&original_file, name_ref.syntax().text_range().start());
+
+ let make_res =
+ |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
+
+ if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) {
+ let dot_prefix = previous_non_trivia_token(name_ref.syntax().clone())
+ .map_or(false, |it| T![.] == it.kind());
+
+ return find_node_in_file_compensated(
+ sema,
+ original_file,
+ &record_field.parent_record_lit(),
+ )
+ .map(|expr| NameRefKind::RecordExpr { expr, dot_prefix })
+ .map(make_res);
+ }
+ if let Some(record_field) = ast::RecordPatField::for_field_name_ref(&name_ref) {
+ let kind = NameRefKind::Pattern(PatternContext {
+ param_ctx: None,
+ has_type_ascription: false,
+ ref_token: None,
+ mut_token: None,
+ record_pat: find_node_in_file_compensated(
+ sema,
+ original_file,
+ &record_field.parent_record_pat(),
+ ),
+ ..pattern_context_for(
+ sema,
+ original_file,
+ record_field.parent_record_pat().clone().into(),
+ )
+ });
+ return Some(make_res(kind));
+ }
+
+ let segment = match_ast! {
+ match parent {
+ ast::PathSegment(segment) => segment,
+ ast::FieldExpr(field) => {
+ let receiver = find_opt_node_in_file(original_file, field.expr());
+ let receiver_is_ambiguous_float_literal = match &receiver {
+ Some(ast::Expr::Literal(l)) => matches! {
+ l.kind(),
+ ast::LiteralKind::FloatNumber { .. } if l.syntax().last_token().map_or(false, |it| it.text().ends_with('.'))
+ },
+ _ => false,
+ };
+ let kind = NameRefKind::DotAccess(DotAccess {
+ receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
+ kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal },
+ receiver
+ });
+ return Some(make_res(kind));
+ },
+ ast::MethodCallExpr(method) => {
+ let receiver = find_opt_node_in_file(original_file, method.receiver());
+ let kind = NameRefKind::DotAccess(DotAccess {
+ receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
+ kind: DotAccessKind::Method { has_parens: method.arg_list().map_or(false, |it| it.l_paren_token().is_some()) },
+ receiver
+ });
+ return Some(make_res(kind));
+ },
+ _ => return None,
+ }
+ };
+
+ let path = segment.parent_path();
+ let mut path_ctx = PathCompletionCtx {
+ has_call_parens: false,
+ has_macro_bang: false,
+ qualified: Qualified::No,
+ parent: None,
+ path: path.clone(),
+ kind: PathKind::Item { kind: ItemListKind::SourceFile },
+ has_type_args: false,
+ use_tree_parent: false,
+ };
+
+ let is_in_block = |it: &SyntaxNode| {
+ it.parent()
+ .map(|node| {
+ ast::ExprStmt::can_cast(node.kind()) || ast::StmtList::can_cast(node.kind())
+ })
+ .unwrap_or(false)
+ };
+ let func_update_record = |syn: &SyntaxNode| {
+ if let Some(record_expr) = syn.ancestors().nth(2).and_then(ast::RecordExpr::cast) {
+ find_node_in_file_compensated(sema, original_file, &record_expr)
+ } else {
+ None
+ }
+ };
+ let after_if_expr = |node: SyntaxNode| {
+ let prev_expr = (|| {
+ let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?;
+ ast::ExprStmt::cast(prev_sibling)?.expr()
+ })();
+ matches!(prev_expr, Some(ast::Expr::IfExpr(_)))
+ };
+
+ // We do not want to generate path completions when we are sandwiched between an item decl signature and its body.
+ // ex. trait Foo $0 {}
+ // in these cases parser recovery usually kicks in for our inserted identifier, causing it
+ // to either be parsed as an ExprStmt or a MacroCall, depending on whether it is in a block
+ // expression or an item list.
+ // The following code checks if the body is missing, if it is we either cut off the body
+ // from the item or it was missing in the first place
+ let inbetween_body_and_decl_check = |node: SyntaxNode| {
+ if let Some(NodeOrToken::Node(n)) =
+ syntax::algo::non_trivia_sibling(node.into(), syntax::Direction::Prev)
+ {
+ if let Some(item) = ast::Item::cast(n) {
+ let is_inbetween = match &item {
+ ast::Item::Const(it) => it.body().is_none(),
+ ast::Item::Enum(it) => it.variant_list().is_none(),
+ ast::Item::ExternBlock(it) => it.extern_item_list().is_none(),
+ ast::Item::Fn(it) => it.body().is_none(),
+ ast::Item::Impl(it) => it.assoc_item_list().is_none(),
+ ast::Item::Module(it) => it.item_list().is_none(),
+ ast::Item::Static(it) => it.body().is_none(),
+ ast::Item::Struct(it) => it.field_list().is_none(),
+ ast::Item::Trait(it) => it.assoc_item_list().is_none(),
+ ast::Item::TypeAlias(it) => it.ty().is_none(),
+ ast::Item::Union(it) => it.record_field_list().is_none(),
+ _ => false,
+ };
+ if is_inbetween {
+ return Some(item);
+ }
+ }
+ }
+ None
+ };
+
+ let type_location = |node: &SyntaxNode| {
+ let parent = node.parent()?;
+ let res = match_ast! {
+ match parent {
+ ast::Const(it) => {
+ let name = find_opt_node_in_file(original_file, it.name())?;
+ let original = ast::Const::cast(name.syntax().parent()?)?;
+ TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body()))
+ },
+ ast::RetType(it) => {
+ if it.thin_arrow_token().is_none() {
+ return None;
+ }
+ let parent = match ast::Fn::cast(parent.parent()?) {
+ Some(x) => x.param_list(),
+ None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(),
+ };
+
+ let parent = find_opt_node_in_file(original_file, parent)?.syntax().parent()?;
+ TypeLocation::TypeAscription(TypeAscriptionTarget::RetType(match_ast! {
+ match parent {
+ ast::ClosureExpr(it) => {
+ it.body()
+ },
+ ast::Fn(it) => {
+ it.body().map(ast::Expr::BlockExpr)
+ },
+ _ => return None,
+ }
+ }))
+ },
+ ast::Param(it) => {
+ if it.colon_token().is_none() {
+ return None;
+ }
+ TypeLocation::TypeAscription(TypeAscriptionTarget::FnParam(find_opt_node_in_file(original_file, it.pat())))
+ },
+ ast::LetStmt(it) => {
+ if it.colon_token().is_none() {
+ return None;
+ }
+ TypeLocation::TypeAscription(TypeAscriptionTarget::Let(find_opt_node_in_file(original_file, it.pat())))
+ },
+ ast::Impl(it) => {
+ match it.trait_() {
+ Some(t) if t.syntax() == node => TypeLocation::ImplTrait,
+ _ => match it.self_ty() {
+ Some(t) if t.syntax() == node => TypeLocation::ImplTarget,
+ _ => return None,
+ },
+ }
+ },
+ ast::TypeBound(_) => TypeLocation::TypeBound,
+ // is this case needed?
+ ast::TypeBoundList(_) => TypeLocation::TypeBound,
+ ast::GenericArg(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, it.syntax().parent().and_then(ast::GenericArgList::cast))),
+ // is this case needed?
+ ast::GenericArgList(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, Some(it))),
+ ast::TupleField(_) => TypeLocation::TupleField,
+ _ => return None,
+ }
+ };
+ Some(res)
+ };
+
+ let is_in_condition = |it: &ast::Expr| {
+ (|| {
+ let parent = it.syntax().parent()?;
+ if let Some(expr) = ast::WhileExpr::cast(parent.clone()) {
+ Some(expr.condition()? == *it)
+ } else if let Some(expr) = ast::IfExpr::cast(parent) {
+ Some(expr.condition()? == *it)
+ } else {
+ None
+ }
+ })()
+ .unwrap_or(false)
+ };
+
+ let make_path_kind_expr = |expr: ast::Expr| {
+ let it = expr.syntax();
+ let in_block_expr = is_in_block(it);
+ let in_loop_body = is_in_loop_body(it);
+ let after_if_expr = after_if_expr(it.clone());
+ let ref_expr_parent =
+ path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast);
+ let (innermost_ret_ty, self_param) = {
+ let find_ret_ty = |it: SyntaxNode| {
+ if let Some(item) = ast::Item::cast(it.clone()) {
+ match item {
+ ast::Item::Fn(f) => {
+ Some(sema.to_def(&f).map(|it| it.ret_type(sema.db)))
+ }
+ ast::Item::MacroCall(_) => None,
+ _ => Some(None),
+ }
+ } else {
+ let expr = ast::Expr::cast(it)?;
+ let callable = match expr {
+ // FIXME
+ // ast::Expr::BlockExpr(b) if b.async_token().is_some() || b.try_token().is_some() => sema.type_of_expr(b),
+ ast::Expr::ClosureExpr(_) => sema.type_of_expr(&expr),
+ _ => return None,
+ };
+ Some(
+ callable
+ .and_then(|c| c.adjusted().as_callable(sema.db))
+ .map(|it| it.return_type()),
+ )
+ }
+ };
+ let find_fn_self_param = |it| match it {
+ ast::Item::Fn(fn_) => {
+ Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db)))
+ }
+ ast::Item::MacroCall(_) => None,
+ _ => Some(None),
+ };
+
+ match find_node_in_file_compensated(sema, original_file, &expr) {
+ Some(it) => {
+ let innermost_ret_ty = sema
+ .ancestors_with_macros(it.syntax().clone())
+ .find_map(find_ret_ty)
+ .flatten();
+
+ let self_param = sema
+ .ancestors_with_macros(it.syntax().clone())
+ .filter_map(ast::Item::cast)
+ .find_map(find_fn_self_param)
+ .flatten();
+ (innermost_ret_ty, self_param)
+ }
+ None => (None, None),
+ }
+ };
+ let is_func_update = func_update_record(it);
+ let in_condition = is_in_condition(&expr);
+ let incomplete_let = it
+ .parent()
+ .and_then(ast::LetStmt::cast)
+ .map_or(false, |it| it.semicolon_token().is_none());
+ let impl_ = fetch_immediate_impl(sema, original_file, expr.syntax());
+
+ let in_match_guard = match it.parent().and_then(ast::MatchArm::cast) {
+ Some(arm) => arm
+ .fat_arrow_token()
+ .map_or(true, |arrow| it.text_range().start() < arrow.text_range().start()),
+ None => false,
+ };
+
+ PathKind::Expr {
+ expr_ctx: ExprCtx {
+ in_block_expr,
+ in_loop_body,
+ after_if_expr,
+ in_condition,
+ ref_expr_parent,
+ is_func_update,
+ innermost_ret_ty,
+ self_param,
+ incomplete_let,
+ impl_,
+ in_match_guard,
+ },
+ }
+ };
+ let make_path_kind_type = |ty: ast::Type| {
+ let location = type_location(ty.syntax());
+ PathKind::Type { location: location.unwrap_or(TypeLocation::Other) }
+ };
+
+ let mut kind_macro_call = |it: ast::MacroCall| {
+ path_ctx.has_macro_bang = it.excl_token().is_some();
+ let parent = it.syntax().parent()?;
+ // Any path in an item list will be treated as a macro call by the parser
+ let kind = match_ast! {
+ match parent {
+ ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
+ ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
+ ast::MacroType(ty) => make_path_kind_type(ty.into()),
+ ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module },
+ ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() {
+ Some(it) => match_ast! {
+ match it {
+ ast::Trait(_) => ItemListKind::Trait,
+ ast::Impl(it) => if it.trait_().is_some() {
+ ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it))
+ } else {
+ ItemListKind::Impl
+ },
+ _ => return None
+ }
+ },
+ None => return None,
+ } },
+ ast::ExternItemList(_) => PathKind::Item { kind: ItemListKind::ExternBlock },
+ ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile },
+ _ => return None,
+ }
+ };
+ Some(kind)
+ };
+ let make_path_kind_attr = |meta: ast::Meta| {
+ let attr = meta.parent_attr()?;
+ let kind = attr.kind();
+ let attached = attr.syntax().parent()?;
+ let is_trailing_outer_attr = kind != AttrKind::Inner
+ && non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next)
+ .is_none();
+ let annotated_item_kind =
+ if is_trailing_outer_attr { None } else { Some(attached.kind()) };
+ Some(PathKind::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind } })
+ };
+
+ // Infer the path kind
+ let parent = path.syntax().parent()?;
+ let kind = match_ast! {
+ match parent {
+ ast::PathType(it) => make_path_kind_type(it.into()),
+ ast::PathExpr(it) => {
+ if let Some(p) = it.syntax().parent() {
+ if ast::ExprStmt::can_cast(p.kind()) {
+ if let Some(kind) = inbetween_body_and_decl_check(p) {
+ return Some(make_res(NameRefKind::Keyword(kind)));
+ }
+ }
+ }
+
+ path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
+
+ make_path_kind_expr(it.into())
+ },
+ ast::TupleStructPat(it) => {
+ path_ctx.has_call_parens = true;
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
+ },
+ ast::RecordPat(it) => {
+ path_ctx.has_call_parens = true;
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
+ },
+ ast::PathPat(it) => {
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
+ },
+ ast::MacroCall(it) => {
+ // A macro call in this position is usually a result of parsing recovery, so check that
+ if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) {
+ return Some(make_res(NameRefKind::Keyword(kind)));
+ }
+
+ kind_macro_call(it)?
+ },
+ ast::Meta(meta) => make_path_kind_attr(meta)?,
+ ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
+ ast::UseTree(_) => PathKind::Use,
+ // completing inside a qualifier
+ ast::Path(parent) => {
+ path_ctx.parent = Some(parent.clone());
+ let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?;
+ match_ast! {
+ match parent {
+ ast::PathType(it) => make_path_kind_type(it.into()),
+ ast::PathExpr(it) => {
+ path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
+
+ make_path_kind_expr(it.into())
+ },
+ ast::TupleStructPat(it) => {
+ path_ctx.has_call_parens = true;
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
+ },
+ ast::RecordPat(it) => {
+ path_ctx.has_call_parens = true;
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
+ },
+ ast::PathPat(it) => {
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
+ },
+ ast::MacroCall(it) => {
+ kind_macro_call(it)?
+ },
+ ast::Meta(meta) => make_path_kind_attr(meta)?,
+ ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
+ ast::UseTree(_) => PathKind::Use,
+ ast::RecordExpr(it) => make_path_kind_expr(it.into()),
+ _ => return None,
+ }
+ }
+ },
+ ast::RecordExpr(it) => make_path_kind_expr(it.into()),
+ _ => return None,
+ }
+ };
+
+ path_ctx.kind = kind;
+ path_ctx.has_type_args = segment.generic_arg_list().is_some();
+
+ // calculate the qualifier context
+ if let Some((qualifier, use_tree_parent)) = path_or_use_tree_qualifier(&path) {
+ path_ctx.use_tree_parent = use_tree_parent;
+ if !use_tree_parent && segment.coloncolon_token().is_some() {
+ path_ctx.qualified = Qualified::Absolute;
+ } else {
+ let qualifier = qualifier
+ .segment()
+ .and_then(|it| find_node_in_file(original_file, &it))
+ .map(|it| it.parent_path());
+ if let Some(qualifier) = qualifier {
+ let type_anchor = match qualifier.segment().and_then(|it| it.kind()) {
+ Some(ast::PathSegmentKind::Type {
+ type_ref: Some(type_ref),
+ trait_ref,
+ }) if qualifier.qualifier().is_none() => Some((type_ref, trait_ref)),
+ _ => None,
+ };
+
+ path_ctx.qualified = if let Some((ty, trait_ref)) = type_anchor {
+ let ty = match ty {
+ ast::Type::InferType(_) => None,
+ ty => sema.resolve_type(&ty),
+ };
+ let trait_ = trait_ref.and_then(|it| sema.resolve_trait(&it.path()?));
+ Qualified::TypeAnchor { ty, trait_ }
+ } else {
+ let res = sema.resolve_path(&qualifier);
+
+ // For understanding how and why super_chain_len is calculated the way it
+ // is check the documentation at it's definition
+ let mut segment_count = 0;
+ let super_count =
+ iter::successors(Some(qualifier.clone()), |p| p.qualifier())
+ .take_while(|p| {
+ p.segment()
+ .and_then(|s| {
+ segment_count += 1;
+ s.super_token()
+ })
+ .is_some()
+ })
+ .count();
+
+ let super_chain_len =
+ if segment_count > super_count { None } else { Some(super_count) };
+
+ Qualified::With { path: qualifier, resolution: res, super_chain_len }
+ }
+ };
+ }
+ } else if let Some(segment) = path.segment() {
+ if segment.coloncolon_token().is_some() {
+ path_ctx.qualified = Qualified::Absolute;
+ }
+ }
+
+ let mut qualifier_ctx = QualifierCtx::default();
+ if path_ctx.is_trivial_path() {
+ // fetch the full expression that may have qualifiers attached to it
+ let top_node = match path_ctx.kind {
+ PathKind::Expr { expr_ctx: ExprCtx { in_block_expr: true, .. } } => {
+ parent.ancestors().find(|it| ast::PathExpr::can_cast(it.kind())).and_then(|p| {
+ let parent = p.parent()?;
+ if ast::StmtList::can_cast(parent.kind()) {
+ Some(p)
+ } else if ast::ExprStmt::can_cast(parent.kind()) {
+ Some(parent)
+ } else {
+ None
+ }
+ })
+ }
+ PathKind::Item { .. } => {
+ parent.ancestors().find(|it| ast::MacroCall::can_cast(it.kind()))
+ }
+ _ => None,
+ };
+ if let Some(top) = top_node {
+ if let Some(NodeOrToken::Node(error_node)) =
+ syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev)
+ {
+ if error_node.kind() == SyntaxKind::ERROR {
+ qualifier_ctx.unsafe_tok = error_node
+ .children_with_tokens()
+ .filter_map(NodeOrToken::into_token)
+ .find(|it| it.kind() == T![unsafe]);
+ qualifier_ctx.vis_node =
+ error_node.children().find_map(ast::Visibility::cast);
+ }
+ }
+
+ if let PathKind::Item { .. } = path_ctx.kind {
+ if qualifier_ctx.none() {
+ if let Some(t) = top.first_token() {
+ if let Some(prev) = t
+ .prev_token()
+ .and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev))
+ {
+ if ![T![;], T!['}'], T!['{']].contains(&prev.kind()) {
+ // This was inferred to be an item position path, but it seems
+ // to be part of some other broken node which leaked into an item
+ // list
+ return None;
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ Some((NameRefContext { nameref, kind: NameRefKind::Path(path_ctx) }, qualifier_ctx))
+ }
+}
+
+fn pattern_context_for(
+ sema: &Semantics<'_, RootDatabase>,
+ original_file: &SyntaxNode,
+ pat: ast::Pat,
+) -> PatternContext {
+ let mut param_ctx = None;
+ let (refutability, has_type_ascription) =
+ pat
+ .syntax()
+ .ancestors()
+ .skip_while(|it| ast::Pat::can_cast(it.kind()))
+ .next()
+ .map_or((PatternRefutability::Irrefutable, false), |node| {
+ let refutability = match_ast! {
+ match node {
+ ast::LetStmt(let_) => return (PatternRefutability::Irrefutable, let_.ty().is_some()),
+ ast::Param(param) => {
+ let has_type_ascription = param.ty().is_some();
+ param_ctx = (|| {
+ let fake_param_list = param.syntax().parent().and_then(ast::ParamList::cast)?;
+ let param_list = find_node_in_file_compensated(sema, original_file, &fake_param_list)?;
+ let param_list_owner = param_list.syntax().parent()?;
+ let kind = match_ast! {
+ match param_list_owner {
+ ast::ClosureExpr(closure) => ParamKind::Closure(closure),
+ ast::Fn(fn_) => ParamKind::Function(fn_),
+ _ => return None,
+ }
+ };
+ Some(ParamContext {
+ param_list, param, kind
+ })
+ })();
+ return (PatternRefutability::Irrefutable, has_type_ascription)
+ },
+ ast::MatchArm(_) => PatternRefutability::Refutable,
+ ast::LetExpr(_) => PatternRefutability::Refutable,
+ ast::ForExpr(_) => PatternRefutability::Irrefutable,
+ _ => PatternRefutability::Irrefutable,
+ }
+ };
+ (refutability, false)
+ });
+ let (ref_token, mut_token) = match &pat {
+ ast::Pat::IdentPat(it) => (it.ref_token(), it.mut_token()),
+ _ => (None, None),
+ };
+
+ PatternContext {
+ refutability,
+ param_ctx,
+ has_type_ascription,
+ parent_pat: pat.syntax().parent().and_then(ast::Pat::cast),
+ mut_token,
+ ref_token,
+ record_pat: None,
+ impl_: fetch_immediate_impl(sema, original_file, pat.syntax()),
+ }
+}
+
+fn fetch_immediate_impl(
+ sema: &Semantics<'_, RootDatabase>,
+ original_file: &SyntaxNode,
+ node: &SyntaxNode,
+) -> Option<ast::Impl> {
+ let mut ancestors = ancestors_in_file_compensated(sema, original_file, node)?
+ .filter_map(ast::Item::cast)
+ .filter(|it| !matches!(it, ast::Item::MacroCall(_)));
+
+ match ancestors.next()? {
+ ast::Item::Const(_) | ast::Item::Fn(_) | ast::Item::TypeAlias(_) => (),
+ ast::Item::Impl(it) => return Some(it),
+ _ => return None,
+ }
+ match ancestors.next()? {
+ ast::Item::Impl(it) => Some(it),
+ _ => None,
+ }
+}
+
+/// Attempts to find `node` inside `syntax` via `node`'s text range.
+/// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
+fn find_opt_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: Option<N>) -> Option<N> {
+ find_node_in_file(syntax, &node?)
+}
+
+/// Attempts to find `node` inside `syntax` via `node`'s text range.
+/// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
+fn find_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
+ let syntax_range = syntax.text_range();
+ let range = node.syntax().text_range();
+ let intersection = range.intersect(syntax_range)?;
+ syntax.covering_element(intersection).ancestors().find_map(N::cast)
+}
+
+/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
+/// for the offset introduced by the fake ident.
+/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
+fn find_node_in_file_compensated<N: AstNode>(
+ sema: &Semantics<'_, RootDatabase>,
+ in_file: &SyntaxNode,
+ node: &N,
+) -> Option<N> {
+ ancestors_in_file_compensated(sema, in_file, node.syntax())?.find_map(N::cast)
+}
+
+fn ancestors_in_file_compensated<'sema>(
+ sema: &'sema Semantics<'_, RootDatabase>,
+ in_file: &SyntaxNode,
+ node: &SyntaxNode,
+) -> Option<impl Iterator<Item = SyntaxNode> + 'sema> {
+ let syntax_range = in_file.text_range();
+ let range = node.text_range();
+ let end = range.end().checked_sub(TextSize::try_from(COMPLETION_MARKER.len()).ok()?)?;
+ if end < range.start() {
+ return None;
+ }
+ let range = TextRange::new(range.start(), end);
+ // our inserted ident could cause `range` to go outside of the original syntax, so cap it
+ let intersection = range.intersect(syntax_range)?;
+ let node = match in_file.covering_element(intersection) {
+ NodeOrToken::Node(node) => node,
+ NodeOrToken::Token(tok) => tok.parent()?,
+ };
+ Some(sema.ancestors_with_macros(node))
+}
+
+/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
+/// for the offset introduced by the fake ident..
+/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
+fn find_opt_node_in_file_compensated<N: AstNode>(
+ sema: &Semantics<'_, RootDatabase>,
+ syntax: &SyntaxNode,
+ node: Option<N>,
+) -> Option<N> {
+ find_node_in_file_compensated(sema, syntax, &node?)
+}
+
+fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {
+ if let Some(qual) = path.qualifier() {
+ return Some((qual, false));
+ }
+ let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?;
+ let use_tree = use_tree_list.syntax().parent().and_then(ast::UseTree::cast)?;
+ Some((use_tree.path()?, true))
+}
+
+pub(crate) fn is_in_token_of_for_loop(element: SyntaxElement) -> bool {
+ // oh my ...
+ (|| {
+ let syntax_token = element.into_token()?;
+ let range = syntax_token.text_range();
+ let for_expr = syntax_token.parent_ancestors().find_map(ast::ForExpr::cast)?;
+
+ // check if the current token is the `in` token of a for loop
+ if let Some(token) = for_expr.in_token() {
+ return Some(syntax_token == token);
+ }
+ let pat = for_expr.pat()?;
+ if range.end() < pat.syntax().text_range().end() {
+ // if we are inside or before the pattern we can't be at the `in` token position
+ return None;
+ }
+ let next_sibl = next_non_trivia_sibling(pat.syntax().clone().into())?;
+ Some(match next_sibl {
+ // the loop body is some node, if our token is at the start we are at the `in` position,
+ // otherwise we could be in a recovered expression, we don't wanna ruin completions there
+ syntax::NodeOrToken::Node(n) => n.text_range().start() == range.start(),
+ // the loop body consists of a single token, if we are this we are certainly at the `in` token position
+ syntax::NodeOrToken::Token(t) => t == syntax_token,
+ })
+ })()
+ .unwrap_or(false)
+}
+
+#[test]
+fn test_for_is_prev2() {
+ crate::tests::check_pattern_is_applicable(r"fn __() { for i i$0 }", is_in_token_of_for_loop);
+}
+
+pub(crate) fn is_in_loop_body(node: &SyntaxNode) -> bool {
+ node.ancestors()
+ .take_while(|it| it.kind() != SyntaxKind::FN && it.kind() != SyntaxKind::CLOSURE_EXPR)
+ .find_map(|it| {
+ let loop_body = match_ast! {
+ match it {
+ ast::ForExpr(it) => it.loop_body(),
+ ast::WhileExpr(it) => it.loop_body(),
+ ast::LoopExpr(it) => it.loop_body(),
+ _ => None,
+ }
+ };
+ loop_body.filter(|it| it.syntax().text_range().contains_range(node.text_range()))
+ })
+ .is_some()
+}
+
+fn previous_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken> {
+ let mut token = match e.into() {
+ SyntaxElement::Node(n) => n.first_token()?,
+ SyntaxElement::Token(t) => t,
+ }
+ .prev_token();
+ while let Some(inner) = token {
+ if !inner.kind().is_trivia() {
+ return Some(inner);
+ } else {
+ token = inner.prev_token();
+ }
+ }
+ None
+}
+
+fn next_non_trivia_sibling(ele: SyntaxElement) -> Option<SyntaxElement> {
+ let mut e = ele.next_sibling_or_token();
+ while let Some(inner) = e {
+ if !inner.kind().is_trivia() {
+ return Some(inner);
+ } else {
+ e = inner.next_sibling_or_token();
+ }
+ }
+ None
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs
new file mode 100644
index 000000000..50845b388
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs
@@ -0,0 +1,413 @@
+use expect_test::{expect, Expect};
+use hir::HirDisplay;
+
+use crate::{
+ context::CompletionContext,
+ tests::{position, TEST_CONFIG},
+};
+
+fn check_expected_type_and_name(ra_fixture: &str, expect: Expect) {
+ let (db, pos) = position(ra_fixture);
+ let config = TEST_CONFIG;
+ let (completion_context, _analysis) = CompletionContext::new(&db, pos, &config).unwrap();
+
+ let ty = completion_context
+ .expected_type
+ .map(|t| t.display_test(&db).to_string())
+ .unwrap_or("?".to_owned());
+
+ let name =
+ completion_context.expected_name.map_or_else(|| "?".to_owned(), |name| name.to_string());
+
+ expect.assert_eq(&format!("ty: {}, name: {}", ty, name));
+}
+
+#[test]
+fn expected_type_let_without_leading_char() {
+ cov_mark::check!(expected_type_let_without_leading_char);
+ check_expected_type_and_name(
+ r#"
+fn foo() {
+ let x: u32 = $0;
+}
+"#,
+ expect![[r#"ty: u32, name: x"#]],
+ );
+}
+
+#[test]
+fn expected_type_let_with_leading_char() {
+ cov_mark::check!(expected_type_let_with_leading_char);
+ check_expected_type_and_name(
+ r#"
+fn foo() {
+ let x: u32 = c$0;
+}
+"#,
+ expect![[r#"ty: u32, name: x"#]],
+ );
+}
+
+#[test]
+fn expected_type_let_pat() {
+ check_expected_type_and_name(
+ r#"
+fn foo() {
+ let x$0 = 0u32;
+}
+"#,
+ expect![[r#"ty: u32, name: ?"#]],
+ );
+ check_expected_type_and_name(
+ r#"
+fn foo() {
+ let $0 = 0u32;
+}
+"#,
+ expect![[r#"ty: u32, name: ?"#]],
+ );
+}
+
+#[test]
+fn expected_type_fn_param() {
+ cov_mark::check!(expected_type_fn_param);
+ check_expected_type_and_name(
+ r#"
+fn foo() { bar($0); }
+fn bar(x: u32) {}
+"#,
+ expect![[r#"ty: u32, name: x"#]],
+ );
+ check_expected_type_and_name(
+ r#"
+fn foo() { bar(c$0); }
+fn bar(x: u32) {}
+"#,
+ expect![[r#"ty: u32, name: x"#]],
+ );
+}
+
+#[test]
+fn expected_type_fn_param_ref() {
+ cov_mark::check!(expected_type_fn_param_ref);
+ check_expected_type_and_name(
+ r#"
+fn foo() { bar(&$0); }
+fn bar(x: &u32) {}
+"#,
+ expect![[r#"ty: u32, name: x"#]],
+ );
+ check_expected_type_and_name(
+ r#"
+fn foo() { bar(&mut $0); }
+fn bar(x: &mut u32) {}
+"#,
+ expect![[r#"ty: u32, name: x"#]],
+ );
+ check_expected_type_and_name(
+ r#"
+fn foo() { bar(& c$0); }
+fn bar(x: &u32) {}
+ "#,
+ expect![[r#"ty: u32, name: x"#]],
+ );
+ check_expected_type_and_name(
+ r#"
+fn foo() { bar(&mut c$0); }
+fn bar(x: &mut u32) {}
+"#,
+ expect![[r#"ty: u32, name: x"#]],
+ );
+ check_expected_type_and_name(
+ r#"
+fn foo() { bar(&c$0); }
+fn bar(x: &u32) {}
+ "#,
+ expect![[r#"ty: u32, name: x"#]],
+ );
+}
+
+#[test]
+fn expected_type_struct_field_without_leading_char() {
+ cov_mark::check!(expected_type_struct_field_without_leading_char);
+ check_expected_type_and_name(
+ r#"
+struct Foo { a: u32 }
+fn foo() {
+ Foo { a: $0 };
+}
+"#,
+ expect![[r#"ty: u32, name: a"#]],
+ )
+}
+
+#[test]
+fn expected_type_struct_field_followed_by_comma() {
+ cov_mark::check!(expected_type_struct_field_followed_by_comma);
+ check_expected_type_and_name(
+ r#"
+struct Foo { a: u32 }
+fn foo() {
+ Foo { a: $0, };
+}
+"#,
+ expect![[r#"ty: u32, name: a"#]],
+ )
+}
+
+#[test]
+fn expected_type_generic_struct_field() {
+ check_expected_type_and_name(
+ r#"
+struct Foo<T> { a: T }
+fn foo() -> Foo<u32> {
+ Foo { a: $0 }
+}
+"#,
+ expect![[r#"ty: u32, name: a"#]],
+ )
+}
+
+#[test]
+fn expected_type_struct_field_with_leading_char() {
+ cov_mark::check!(expected_type_struct_field_with_leading_char);
+ check_expected_type_and_name(
+ r#"
+struct Foo { a: u32 }
+fn foo() {
+ Foo { a: c$0 };
+}
+"#,
+ expect![[r#"ty: u32, name: a"#]],
+ );
+}
+
+#[test]
+fn expected_type_match_arm_without_leading_char() {
+ cov_mark::check!(expected_type_match_arm_without_leading_char);
+ check_expected_type_and_name(
+ r#"
+enum E { X }
+fn foo() {
+ match E::X { $0 }
+}
+"#,
+ expect![[r#"ty: E, name: ?"#]],
+ );
+}
+
+#[test]
+fn expected_type_match_arm_with_leading_char() {
+ cov_mark::check!(expected_type_match_arm_with_leading_char);
+ check_expected_type_and_name(
+ r#"
+enum E { X }
+fn foo() {
+ match E::X { c$0 }
+}
+"#,
+ expect![[r#"ty: E, name: ?"#]],
+ );
+}
+
+#[test]
+fn expected_type_match_arm_body_without_leading_char() {
+ cov_mark::check!(expected_type_match_arm_body_without_leading_char);
+ check_expected_type_and_name(
+ r#"
+struct Foo;
+enum E { X }
+fn foo() -> Foo {
+ match E::X { E::X => $0 }
+}
+"#,
+ expect![[r#"ty: Foo, name: ?"#]],
+ );
+}
+
+#[test]
+fn expected_type_match_body_arm_with_leading_char() {
+ cov_mark::check!(expected_type_match_arm_body_with_leading_char);
+ check_expected_type_and_name(
+ r#"
+struct Foo;
+enum E { X }
+fn foo() -> Foo {
+ match E::X { E::X => c$0 }
+}
+"#,
+ expect![[r#"ty: Foo, name: ?"#]],
+ );
+}
+
+#[test]
+fn expected_type_if_let_without_leading_char() {
+ cov_mark::check!(expected_type_if_let_without_leading_char);
+ check_expected_type_and_name(
+ r#"
+enum Foo { Bar, Baz, Quux }
+
+fn foo() {
+ let f = Foo::Quux;
+ if let $0 = f { }
+}
+"#,
+ expect![[r#"ty: Foo, name: ?"#]],
+ )
+}
+
+#[test]
+fn expected_type_if_let_with_leading_char() {
+ cov_mark::check!(expected_type_if_let_with_leading_char);
+ check_expected_type_and_name(
+ r#"
+enum Foo { Bar, Baz, Quux }
+
+fn foo() {
+ let f = Foo::Quux;
+ if let c$0 = f { }
+}
+"#,
+ expect![[r#"ty: Foo, name: ?"#]],
+ )
+}
+
+#[test]
+fn expected_type_fn_ret_without_leading_char() {
+ cov_mark::check!(expected_type_fn_ret_without_leading_char);
+ check_expected_type_and_name(
+ r#"
+fn foo() -> u32 {
+ $0
+}
+"#,
+ expect![[r#"ty: u32, name: ?"#]],
+ )
+}
+
+#[test]
+fn expected_type_fn_ret_with_leading_char() {
+ cov_mark::check!(expected_type_fn_ret_with_leading_char);
+ check_expected_type_and_name(
+ r#"
+fn foo() -> u32 {
+ c$0
+}
+"#,
+ expect![[r#"ty: u32, name: ?"#]],
+ )
+}
+
+#[test]
+fn expected_type_fn_ret_fn_ref_fully_typed() {
+ check_expected_type_and_name(
+ r#"
+fn foo() -> u32 {
+ foo$0
+}
+"#,
+ expect![[r#"ty: u32, name: ?"#]],
+ )
+}
+
+#[test]
+fn expected_type_closure_param_return() {
+ // FIXME: make this work with `|| $0`
+ check_expected_type_and_name(
+ r#"
+//- minicore: fn
+fn foo() {
+ bar(|| a$0);
+}
+
+fn bar(f: impl FnOnce() -> u32) {}
+"#,
+ expect![[r#"ty: u32, name: ?"#]],
+ );
+}
+
+#[test]
+fn expected_type_generic_function() {
+ check_expected_type_and_name(
+ r#"
+fn foo() {
+ bar::<u32>($0);
+}
+
+fn bar<T>(t: T) {}
+"#,
+ expect![[r#"ty: u32, name: t"#]],
+ );
+}
+
+#[test]
+fn expected_type_generic_method() {
+ check_expected_type_and_name(
+ r#"
+fn foo() {
+ S(1u32).bar($0);
+}
+
+struct S<T>(T);
+impl<T> S<T> {
+ fn bar(self, t: T) {}
+}
+"#,
+ expect![[r#"ty: u32, name: t"#]],
+ );
+}
+
+#[test]
+fn expected_type_functional_update() {
+ cov_mark::check!(expected_type_struct_func_update);
+ check_expected_type_and_name(
+ r#"
+struct Foo { field: u32 }
+fn foo() {
+ Foo {
+ ..$0
+ }
+}
+"#,
+ expect![[r#"ty: Foo, name: ?"#]],
+ );
+}
+
+#[test]
+fn expected_type_param_pat() {
+ check_expected_type_and_name(
+ r#"
+struct Foo { field: u32 }
+fn foo(a$0: Foo) {}
+"#,
+ expect![[r#"ty: Foo, name: ?"#]],
+ );
+ check_expected_type_and_name(
+ r#"
+struct Foo { field: u32 }
+fn foo($0: Foo) {}
+"#,
+ // FIXME make this work, currently fails due to pattern recovery eating the `:`
+ expect![[r#"ty: ?, name: ?"#]],
+ );
+}
+
+#[test]
+fn expected_type_ref_prefix_on_field() {
+ check_expected_type_and_name(
+ r#"
+fn foo(_: &mut i32) {}
+struct S {
+ field: i32,
+}
+
+fn main() {
+ let s = S {
+ field: 100,
+ };
+ foo(&mut s.f$0);
+}
+"#,
+ expect!["ty: i32, name: ?"],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
new file mode 100644
index 000000000..27c3ccb35
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
@@ -0,0 +1,637 @@
+//! See `CompletionItem` structure.
+
+use std::fmt;
+
+use hir::{Documentation, Mutability};
+use ide_db::{imports::import_assets::LocatedImport, SnippetCap, SymbolKind};
+use smallvec::SmallVec;
+use stdx::{impl_from, never};
+use syntax::{SmolStr, TextRange, TextSize};
+use text_edit::TextEdit;
+
+use crate::{
+ context::{CompletionContext, PathCompletionCtx},
+ render::{render_path_resolution, RenderContext},
+};
+
+/// `CompletionItem` describes a single completion variant in the editor pop-up.
+/// It is basically a POD with various properties. To construct a
+/// `CompletionItem`, use `new` method and the `Builder` struct.
+#[derive(Clone)]
+pub struct CompletionItem {
+ /// Label in the completion pop up which identifies completion.
+ label: SmolStr,
+ /// Range of identifier that is being completed.
+ ///
+ /// It should be used primarily for UI, but we also use this to convert
+ /// generic TextEdit into LSP's completion edit (see conv.rs).
+ ///
+ /// `source_range` must contain the completion offset. `text_edit` should
+ /// start with what `source_range` points to, or VSCode will filter out the
+ /// completion silently.
+ source_range: TextRange,
+ /// What happens when user selects this item.
+ ///
+ /// Typically, replaces `source_range` with new identifier.
+ text_edit: TextEdit,
+ is_snippet: bool,
+
+ /// What item (struct, function, etc) are we completing.
+ kind: CompletionItemKind,
+
+ /// Lookup is used to check if completion item indeed can complete current
+ /// ident.
+ ///
+ /// That is, in `foo.bar$0` lookup of `abracadabra` will be accepted (it
+ /// contains `bar` sub sequence), and `quux` will rejected.
+ lookup: Option<SmolStr>,
+
+ /// Additional info to show in the UI pop up.
+ detail: Option<String>,
+ documentation: Option<Documentation>,
+
+ /// Whether this item is marked as deprecated
+ deprecated: bool,
+
+ /// If completing a function call, ask the editor to show parameter popup
+ /// after completion.
+ trigger_call_info: bool,
+
+ /// We use this to sort completion. Relevance records facts like "do the
+ /// types align precisely?". We can't sort by relevances directly, they are
+ /// only partially ordered.
+ ///
+ /// Note that Relevance ignores fuzzy match score. We compute Relevance for
+ /// all possible items, and then separately build an ordered completion list
+ /// based on relevance and fuzzy matching with the already typed identifier.
+ relevance: CompletionRelevance,
+
+ /// Indicates that a reference or mutable reference to this variable is a
+ /// possible match.
+ ref_match: Option<(Mutability, TextSize)>,
+
+ /// The import data to add to completion's edits.
+ import_to_add: SmallVec<[LocatedImport; 1]>,
+}
+
+// We use custom debug for CompletionItem to make snapshot tests more readable.
+impl fmt::Debug for CompletionItem {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut s = f.debug_struct("CompletionItem");
+ s.field("label", &self.label()).field("source_range", &self.source_range());
+ if self.text_edit().len() == 1 {
+ let atom = &self.text_edit().iter().next().unwrap();
+ s.field("delete", &atom.delete);
+ s.field("insert", &atom.insert);
+ } else {
+ s.field("text_edit", &self.text_edit);
+ }
+ s.field("kind", &self.kind());
+ if self.lookup() != self.label() {
+ s.field("lookup", &self.lookup());
+ }
+ if let Some(detail) = self.detail() {
+ s.field("detail", &detail);
+ }
+ if let Some(documentation) = self.documentation() {
+ s.field("documentation", &documentation);
+ }
+ if self.deprecated {
+ s.field("deprecated", &true);
+ }
+
+ if self.relevance != CompletionRelevance::default() {
+ s.field("relevance", &self.relevance);
+ }
+
+ if let Some((mutability, offset)) = &self.ref_match {
+ s.field("ref_match", &format!("&{}@{offset:?}", mutability.as_keyword_for_ref()));
+ }
+ if self.trigger_call_info {
+ s.field("trigger_call_info", &true);
+ }
+ s.finish()
+ }
+}
+
+#[derive(Debug, Clone, Copy, Eq, PartialEq, Default)]
+pub struct CompletionRelevance {
+ /// This is set in cases like these:
+ ///
+ /// ```
+ /// fn f(spam: String) {}
+ /// fn main {
+ /// let spam = 92;
+ /// f($0) // name of local matches the name of param
+ /// }
+ /// ```
+ pub exact_name_match: bool,
+ /// See CompletionRelevanceTypeMatch doc comments for cases where this is set.
+ pub type_match: Option<CompletionRelevanceTypeMatch>,
+ /// This is set in cases like these:
+ ///
+ /// ```
+ /// fn foo(a: u32) {
+ /// let b = 0;
+ /// $0 // `a` and `b` are local
+ /// }
+ /// ```
+ pub is_local: bool,
+ /// This is set when trait items are completed in an impl of that trait.
+ pub is_item_from_trait: bool,
+ /// This is set when an import is suggested whose name is already imported.
+ pub is_name_already_imported: bool,
+ /// This is set for completions that will insert a `use` item.
+ pub requires_import: bool,
+ /// Set for method completions of the `core::ops` and `core::cmp` family.
+ pub is_op_method: bool,
+ /// Set for item completions that are private but in the workspace.
+ pub is_private_editable: bool,
+ /// Set for postfix snippet item completions
+ pub postfix_match: Option<CompletionRelevancePostfixMatch>,
+ /// This is set for type inference results
+ pub is_definite: bool,
+}
+
+#[derive(Debug, Clone, Copy, Eq, PartialEq)]
+pub enum CompletionRelevanceTypeMatch {
+ /// This is set in cases like these:
+ ///
+ /// ```
+ /// enum Option<T> { Some(T), None }
+ /// fn f(a: Option<u32>) {}
+ /// fn main {
+ /// f(Option::N$0) // type `Option<T>` could unify with `Option<u32>`
+ /// }
+ /// ```
+ CouldUnify,
+ /// This is set in cases like these:
+ ///
+ /// ```
+ /// fn f(spam: String) {}
+ /// fn main {
+ /// let foo = String::new();
+ /// f($0) // type of local matches the type of param
+ /// }
+ /// ```
+ Exact,
+}
+
+#[derive(Debug, Clone, Copy, Eq, PartialEq)]
+pub enum CompletionRelevancePostfixMatch {
+ /// Set in cases when item is postfix, but not exact
+ NonExact,
+ /// This is set in cases like these:
+ ///
+ /// ```
+ /// (a > b).not$0
+ /// ```
+ ///
+ /// Basically, we want to guarantee that postfix snippets always takes
+ /// precedence over everything else.
+ Exact,
+}
+
+impl CompletionRelevance {
+ /// Provides a relevance score. Higher values are more relevant.
+ ///
+ /// The absolute value of the relevance score is not meaningful, for
+ /// example a value of 0 doesn't mean "not relevant", rather
+ /// it means "least relevant". The score value should only be used
+ /// for relative ordering.
+ ///
+ /// See is_relevant if you need to make some judgement about score
+ /// in an absolute sense.
+ pub fn score(self) -> u32 {
+ let mut score = 0;
+ let CompletionRelevance {
+ exact_name_match,
+ type_match,
+ is_local,
+ is_item_from_trait,
+ is_name_already_imported,
+ requires_import,
+ is_op_method,
+ is_private_editable,
+ postfix_match,
+ is_definite,
+ } = self;
+
+ // lower rank private things
+ if !is_private_editable {
+ score += 1;
+ }
+ // lower rank trait op methods
+ if !is_op_method {
+ score += 10;
+ }
+ // lower rank for conflicting import names
+ if !is_name_already_imported {
+ score += 1;
+ }
+ // lower rank for items that don't need an import
+ if !requires_import {
+ score += 1;
+ }
+ if exact_name_match {
+ score += 10;
+ }
+ score += match postfix_match {
+ Some(CompletionRelevancePostfixMatch::Exact) => 100,
+ Some(CompletionRelevancePostfixMatch::NonExact) => 0,
+ None => 3,
+ };
+ score += match type_match {
+ Some(CompletionRelevanceTypeMatch::Exact) => 8,
+ Some(CompletionRelevanceTypeMatch::CouldUnify) => 3,
+ None => 0,
+ };
+ // slightly prefer locals
+ if is_local {
+ score += 1;
+ }
+ if is_item_from_trait {
+ score += 1;
+ }
+ if is_definite {
+ score += 10;
+ }
+ score
+ }
+
+ /// Returns true when the score for this threshold is above
+ /// some threshold such that we think it is especially likely
+ /// to be relevant.
+ pub fn is_relevant(&self) -> bool {
+ self.score() > 0
+ }
+}
+
+/// The type of the completion item.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
+pub enum CompletionItemKind {
+ SymbolKind(SymbolKind),
+ Binding,
+ BuiltinType,
+ InferredType,
+ Keyword,
+ Method,
+ Snippet,
+ UnresolvedReference,
+}
+
+impl_from!(SymbolKind for CompletionItemKind);
+
+impl CompletionItemKind {
+ #[cfg(test)]
+ pub(crate) fn tag(&self) -> &'static str {
+ match self {
+ CompletionItemKind::SymbolKind(kind) => match kind {
+ SymbolKind::Attribute => "at",
+ SymbolKind::BuiltinAttr => "ba",
+ SymbolKind::Const => "ct",
+ SymbolKind::ConstParam => "cp",
+ SymbolKind::Derive => "de",
+ SymbolKind::DeriveHelper => "dh",
+ SymbolKind::Enum => "en",
+ SymbolKind::Field => "fd",
+ SymbolKind::Function => "fn",
+ SymbolKind::Impl => "im",
+ SymbolKind::Label => "lb",
+ SymbolKind::LifetimeParam => "lt",
+ SymbolKind::Local => "lc",
+ SymbolKind::Macro => "ma",
+ SymbolKind::Module => "md",
+ SymbolKind::SelfParam => "sp",
+ SymbolKind::SelfType => "sy",
+ SymbolKind::Static => "sc",
+ SymbolKind::Struct => "st",
+ SymbolKind::ToolModule => "tm",
+ SymbolKind::Trait => "tt",
+ SymbolKind::TypeAlias => "ta",
+ SymbolKind::TypeParam => "tp",
+ SymbolKind::Union => "un",
+ SymbolKind::ValueParam => "vp",
+ SymbolKind::Variant => "ev",
+ },
+ CompletionItemKind::Binding => "bn",
+ CompletionItemKind::BuiltinType => "bt",
+ CompletionItemKind::InferredType => "it",
+ CompletionItemKind::Keyword => "kw",
+ CompletionItemKind::Method => "me",
+ CompletionItemKind::Snippet => "sn",
+ CompletionItemKind::UnresolvedReference => "??",
+ }
+ }
+}
+
+impl CompletionItem {
+ pub(crate) fn new(
+ kind: impl Into<CompletionItemKind>,
+ source_range: TextRange,
+ label: impl Into<SmolStr>,
+ ) -> Builder {
+ let label = label.into();
+ Builder {
+ source_range,
+ label,
+ insert_text: None,
+ is_snippet: false,
+ trait_name: None,
+ detail: None,
+ documentation: None,
+ lookup: None,
+ kind: kind.into(),
+ text_edit: None,
+ deprecated: false,
+ trigger_call_info: false,
+ relevance: CompletionRelevance::default(),
+ ref_match: None,
+ imports_to_add: Default::default(),
+ }
+ }
+
+ /// What user sees in pop-up in the UI.
+ pub fn label(&self) -> &str {
+ &self.label
+ }
+ pub fn source_range(&self) -> TextRange {
+ self.source_range
+ }
+
+ pub fn text_edit(&self) -> &TextEdit {
+ &self.text_edit
+ }
+ /// Whether `text_edit` is a snippet (contains `$0` markers).
+ pub fn is_snippet(&self) -> bool {
+ self.is_snippet
+ }
+
+ /// Short one-line additional information, like a type
+ pub fn detail(&self) -> Option<&str> {
+ self.detail.as_deref()
+ }
+ /// A doc-comment
+ pub fn documentation(&self) -> Option<Documentation> {
+ self.documentation.clone()
+ }
+ /// What string is used for filtering.
+ pub fn lookup(&self) -> &str {
+ self.lookup.as_deref().unwrap_or(&self.label)
+ }
+
+ pub fn kind(&self) -> CompletionItemKind {
+ self.kind
+ }
+
+ pub fn deprecated(&self) -> bool {
+ self.deprecated
+ }
+
+ pub fn relevance(&self) -> CompletionRelevance {
+ self.relevance
+ }
+
+ pub fn trigger_call_info(&self) -> bool {
+ self.trigger_call_info
+ }
+
+ pub fn ref_match(&self) -> Option<(Mutability, TextSize, CompletionRelevance)> {
+ // Relevance of the ref match should be the same as the original
+ // match, but with exact type match set because self.ref_match
+ // is only set if there is an exact type match.
+ let mut relevance = self.relevance;
+ relevance.type_match = Some(CompletionRelevanceTypeMatch::Exact);
+
+ self.ref_match.map(|(mutability, offset)| (mutability, offset, relevance))
+ }
+
+ pub fn imports_to_add(&self) -> &[LocatedImport] {
+ &self.import_to_add
+ }
+}
+
+/// A helper to make `CompletionItem`s.
+#[must_use]
+#[derive(Clone)]
+pub(crate) struct Builder {
+ source_range: TextRange,
+ imports_to_add: SmallVec<[LocatedImport; 1]>,
+ trait_name: Option<SmolStr>,
+ label: SmolStr,
+ insert_text: Option<String>,
+ is_snippet: bool,
+ detail: Option<String>,
+ documentation: Option<Documentation>,
+ lookup: Option<SmolStr>,
+ kind: CompletionItemKind,
+ text_edit: Option<TextEdit>,
+ deprecated: bool,
+ trigger_call_info: bool,
+ relevance: CompletionRelevance,
+ ref_match: Option<(Mutability, TextSize)>,
+}
+
+impl Builder {
+ pub(crate) fn from_resolution(
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ local_name: hir::Name,
+ resolution: hir::ScopeDef,
+ ) -> Self {
+ render_path_resolution(RenderContext::new(ctx), path_ctx, local_name, resolution)
+ }
+
+ pub(crate) fn build(self) -> CompletionItem {
+ let _p = profile::span("item::Builder::build");
+
+ let mut label = self.label;
+ let mut lookup = self.lookup;
+ let insert_text = self.insert_text.unwrap_or_else(|| label.to_string());
+
+ if let [import_edit] = &*self.imports_to_add {
+ // snippets can have multiple imports, but normal completions only have up to one
+ if let Some(original_path) = import_edit.original_path.as_ref() {
+ lookup = lookup.or_else(|| Some(label.clone()));
+ label = SmolStr::from(format!("{} (use {})", label, original_path));
+ }
+ } else if let Some(trait_name) = self.trait_name {
+ label = SmolStr::from(format!("{} (as {})", label, trait_name));
+ }
+
+ let text_edit = match self.text_edit {
+ Some(it) => it,
+ None => TextEdit::replace(self.source_range, insert_text),
+ };
+
+ CompletionItem {
+ source_range: self.source_range,
+ label,
+ text_edit,
+ is_snippet: self.is_snippet,
+ detail: self.detail,
+ documentation: self.documentation,
+ lookup,
+ kind: self.kind,
+ deprecated: self.deprecated,
+ trigger_call_info: self.trigger_call_info,
+ relevance: self.relevance,
+ ref_match: self.ref_match,
+ import_to_add: self.imports_to_add,
+ }
+ }
+ pub(crate) fn lookup_by(&mut self, lookup: impl Into<SmolStr>) -> &mut Builder {
+ self.lookup = Some(lookup.into());
+ self
+ }
+ pub(crate) fn label(&mut self, label: impl Into<SmolStr>) -> &mut Builder {
+ self.label = label.into();
+ self
+ }
+ pub(crate) fn trait_name(&mut self, trait_name: SmolStr) -> &mut Builder {
+ self.trait_name = Some(trait_name);
+ self
+ }
+ pub(crate) fn insert_text(&mut self, insert_text: impl Into<String>) -> &mut Builder {
+ self.insert_text = Some(insert_text.into());
+ self
+ }
+ pub(crate) fn insert_snippet(
+ &mut self,
+ cap: SnippetCap,
+ snippet: impl Into<String>,
+ ) -> &mut Builder {
+ let _ = cap;
+ self.is_snippet = true;
+ self.insert_text(snippet)
+ }
+ pub(crate) fn text_edit(&mut self, edit: TextEdit) -> &mut Builder {
+ self.text_edit = Some(edit);
+ self
+ }
+ pub(crate) fn snippet_edit(&mut self, _cap: SnippetCap, edit: TextEdit) -> &mut Builder {
+ self.is_snippet = true;
+ self.text_edit(edit)
+ }
+ pub(crate) fn detail(&mut self, detail: impl Into<String>) -> &mut Builder {
+ self.set_detail(Some(detail))
+ }
+ pub(crate) fn set_detail(&mut self, detail: Option<impl Into<String>>) -> &mut Builder {
+ self.detail = detail.map(Into::into);
+ if let Some(detail) = &self.detail {
+ if never!(detail.contains('\n'), "multiline detail:\n{}", detail) {
+ self.detail = Some(detail.splitn(2, '\n').next().unwrap().to_string());
+ }
+ }
+ self
+ }
+ #[allow(unused)]
+ pub(crate) fn documentation(&mut self, docs: Documentation) -> &mut Builder {
+ self.set_documentation(Some(docs))
+ }
+ pub(crate) fn set_documentation(&mut self, docs: Option<Documentation>) -> &mut Builder {
+ self.documentation = docs.map(Into::into);
+ self
+ }
+ pub(crate) fn set_deprecated(&mut self, deprecated: bool) -> &mut Builder {
+ self.deprecated = deprecated;
+ self
+ }
+ pub(crate) fn set_relevance(&mut self, relevance: CompletionRelevance) -> &mut Builder {
+ self.relevance = relevance;
+ self
+ }
+ pub(crate) fn trigger_call_info(&mut self) -> &mut Builder {
+ self.trigger_call_info = true;
+ self
+ }
+ pub(crate) fn add_import(&mut self, import_to_add: LocatedImport) -> &mut Builder {
+ self.imports_to_add.push(import_to_add);
+ self
+ }
+ pub(crate) fn ref_match(&mut self, mutability: Mutability, offset: TextSize) -> &mut Builder {
+ self.ref_match = Some((mutability, offset));
+ self
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use itertools::Itertools;
+ use test_utils::assert_eq_text;
+
+ use super::{
+ CompletionRelevance, CompletionRelevancePostfixMatch, CompletionRelevanceTypeMatch,
+ };
+
+ /// Check that these are CompletionRelevance are sorted in ascending order
+ /// by their relevance score.
+ ///
+ /// We want to avoid making assertions about the absolute score of any
+ /// item, but we do want to assert whether each is >, <, or == to the
+ /// others.
+ ///
+ /// If provided vec![vec![a], vec![b, c], vec![d]], then this will assert:
+ /// a.score < b.score == c.score < d.score
+ fn check_relevance_score_ordered(expected_relevance_order: Vec<Vec<CompletionRelevance>>) {
+ let expected = format!("{:#?}", &expected_relevance_order);
+
+ let actual_relevance_order = expected_relevance_order
+ .into_iter()
+ .flatten()
+ .map(|r| (r.score(), r))
+ .sorted_by_key(|(score, _r)| *score)
+ .fold(
+ (u32::MIN, vec![vec![]]),
+ |(mut currently_collecting_score, mut out), (score, r)| {
+ if currently_collecting_score == score {
+ out.last_mut().unwrap().push(r);
+ } else {
+ currently_collecting_score = score;
+ out.push(vec![r]);
+ }
+ (currently_collecting_score, out)
+ },
+ )
+ .1;
+
+ let actual = format!("{:#?}", &actual_relevance_order);
+
+ assert_eq_text!(&expected, &actual);
+ }
+
+ #[test]
+ fn relevance_score() {
+ use CompletionRelevance as Cr;
+ let default = Cr::default();
+ // This test asserts that the relevance score for these items is ascending, and
+ // that any items in the same vec have the same score.
+ let expected_relevance_order = vec![
+ vec![],
+ vec![Cr { is_op_method: true, is_private_editable: true, ..default }],
+ vec![Cr { is_op_method: true, ..default }],
+ vec![Cr { postfix_match: Some(CompletionRelevancePostfixMatch::NonExact), ..default }],
+ vec![Cr { is_private_editable: true, ..default }],
+ vec![default],
+ vec![Cr { is_local: true, ..default }],
+ vec![Cr { type_match: Some(CompletionRelevanceTypeMatch::CouldUnify), ..default }],
+ vec![Cr { type_match: Some(CompletionRelevanceTypeMatch::Exact), ..default }],
+ vec![Cr { exact_name_match: true, ..default }],
+ vec![Cr { exact_name_match: true, is_local: true, ..default }],
+ vec![Cr {
+ exact_name_match: true,
+ type_match: Some(CompletionRelevanceTypeMatch::Exact),
+ ..default
+ }],
+ vec![Cr {
+ exact_name_match: true,
+ type_match: Some(CompletionRelevanceTypeMatch::Exact),
+ is_local: true,
+ ..default
+ }],
+ vec![Cr { postfix_match: Some(CompletionRelevancePostfixMatch::Exact), ..default }],
+ ];
+
+ check_relevance_score_ordered(expected_relevance_order);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
new file mode 100644
index 000000000..ae1a440d0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
@@ -0,0 +1,247 @@
+//! `completions` crate provides utilities for generating completions of user input.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod completions;
+mod config;
+mod context;
+mod item;
+mod render;
+
+#[cfg(test)]
+mod tests;
+mod snippet;
+
+use ide_db::{
+ base_db::FilePosition,
+ helpers::mod_path_to_ast,
+ imports::{
+ import_assets::NameToImport,
+ insert_use::{self, ImportScope},
+ },
+ items_locator, RootDatabase,
+};
+use syntax::algo;
+use text_edit::TextEdit;
+
+use crate::{
+ completions::Completions,
+ context::{
+ CompletionAnalysis, CompletionContext, NameRefContext, NameRefKind, PathCompletionCtx,
+ PathKind,
+ },
+};
+
+pub use crate::{
+ config::{CallableSnippets, CompletionConfig},
+ item::{
+ CompletionItem, CompletionItemKind, CompletionRelevance, CompletionRelevancePostfixMatch,
+ },
+ snippet::{Snippet, SnippetScope},
+};
+
+//FIXME: split the following feature into fine-grained features.
+
+// Feature: Magic Completions
+//
+// In addition to usual reference completion, rust-analyzer provides some ✨magic✨
+// completions as well:
+//
+// Keywords like `if`, `else` `while`, `loop` are completed with braces, and cursor
+// is placed at the appropriate position. Even though `if` is easy to type, you
+// still want to complete it, to get ` { }` for free! `return` is inserted with a
+// space or `;` depending on the return type of the function.
+//
+// When completing a function call, `()` are automatically inserted. If a function
+// takes arguments, the cursor is positioned inside the parenthesis.
+//
+// There are postfix completions, which can be triggered by typing something like
+// `foo().if`. The word after `.` determines postfix completion. Possible variants are:
+//
+// - `expr.if` -> `if expr {}` or `if let ... {}` for `Option` or `Result`
+// - `expr.match` -> `match expr {}`
+// - `expr.while` -> `while expr {}` or `while let ... {}` for `Option` or `Result`
+// - `expr.ref` -> `&expr`
+// - `expr.refm` -> `&mut expr`
+// - `expr.let` -> `let $0 = expr;`
+// - `expr.letm` -> `let mut $0 = expr;`
+// - `expr.not` -> `!expr`
+// - `expr.dbg` -> `dbg!(expr)`
+// - `expr.dbgr` -> `dbg!(&expr)`
+// - `expr.call` -> `(expr)`
+//
+// There also snippet completions:
+//
+// .Expressions
+// - `pd` -> `eprintln!(" = {:?}", );`
+// - `ppd` -> `eprintln!(" = {:#?}", );`
+//
+// .Items
+// - `tfn` -> `#[test] fn feature(){}`
+// - `tmod` ->
+// ```rust
+// #[cfg(test)]
+// mod tests {
+// use super::*;
+//
+// #[test]
+// fn test_name() {}
+// }
+// ```
+//
+// And the auto import completions, enabled with the `rust-analyzer.completion.autoimport.enable` setting and the corresponding LSP client capabilities.
+// Those are the additional completion options with automatic `use` import and options from all project importable items,
+// fuzzy matched against the completion input.
+//
+// image::https://user-images.githubusercontent.com/48062697/113020667-b72ab880-917a-11eb-8778-716cf26a0eb3.gif[]
+
+/// Main entry point for completion. We run completion as a two-phase process.
+///
+/// First, we look at the position and collect a so-called `CompletionContext.
+/// This is a somewhat messy process, because, during completion, syntax tree is
+/// incomplete and can look really weird.
+///
+/// Once the context is collected, we run a series of completion routines which
+/// look at the context and produce completion items. One subtlety about this
+/// phase is that completion engine should not filter by the substring which is
+/// already present, it should give all possible variants for the identifier at
+/// the caret. In other words, for
+///
+/// ```no_run
+/// fn f() {
+/// let foo = 92;
+/// let _ = bar$0
+/// }
+/// ```
+///
+/// `foo` *should* be present among the completion variants. Filtering by
+/// identifier prefix/fuzzy match should be done higher in the stack, together
+/// with ordering of completions (currently this is done by the client).
+///
+/// # Speculative Completion Problem
+///
+/// There's a curious unsolved problem in the current implementation. Often, you
+/// want to compute completions on a *slightly different* text document.
+///
+/// In the simplest case, when the code looks like `let x = `, you want to
+/// insert a fake identifier to get a better syntax tree: `let x = complete_me`.
+///
+/// We do this in `CompletionContext`, and it works OK-enough for *syntax*
+/// analysis. However, we might want to, eg, ask for the type of `complete_me`
+/// variable, and that's where our current infrastructure breaks down. salsa
+/// doesn't allow such "phantom" inputs.
+///
+/// Another case where this would be instrumental is macro expansion. We want to
+/// insert a fake ident and re-expand code. There's `expand_speculative` as a
+/// work-around for this.
+///
+/// A different use-case is completion of injection (examples and links in doc
+/// comments). When computing completion for a path in a doc-comment, you want
+/// to inject a fake path expression into the item being documented and complete
+/// that.
+///
+/// IntelliJ has CodeFragment/Context infrastructure for that. You can create a
+/// temporary PSI node, and say that the context ("parent") of this node is some
+/// existing node. Asking for, eg, type of this `CodeFragment` node works
+/// correctly, as the underlying infrastructure makes use of contexts to do
+/// analysis.
+pub fn completions(
+ db: &RootDatabase,
+ config: &CompletionConfig,
+ position: FilePosition,
+ trigger_character: Option<char>,
+) -> Option<Vec<CompletionItem>> {
+ let (ctx, analysis) = &CompletionContext::new(db, position, config)?;
+ let mut completions = Completions::default();
+
+ // prevent `(` from triggering unwanted completion noise
+ if trigger_character == Some('(') {
+ if let CompletionAnalysis::NameRef(NameRefContext { kind, .. }) = &analysis {
+ if let NameRefKind::Path(
+ path_ctx @ PathCompletionCtx { kind: PathKind::Vis { has_in_token }, .. },
+ ) = kind
+ {
+ completions::vis::complete_vis_path(&mut completions, ctx, path_ctx, has_in_token);
+ }
+ }
+ // prevent `(` from triggering unwanted completion noise
+ return Some(completions.into());
+ }
+
+ {
+ let acc = &mut completions;
+
+ match &analysis {
+ CompletionAnalysis::Name(name_ctx) => completions::complete_name(acc, ctx, name_ctx),
+ CompletionAnalysis::NameRef(name_ref_ctx) => {
+ completions::complete_name_ref(acc, ctx, name_ref_ctx)
+ }
+ CompletionAnalysis::Lifetime(lifetime_ctx) => {
+ completions::lifetime::complete_label(acc, ctx, lifetime_ctx);
+ completions::lifetime::complete_lifetime(acc, ctx, lifetime_ctx);
+ }
+ CompletionAnalysis::String { original, expanded: Some(expanded) } => {
+ completions::extern_abi::complete_extern_abi(acc, ctx, expanded);
+ completions::format_string::format_string(acc, ctx, original, expanded);
+ }
+ CompletionAnalysis::UnexpandedAttrTT {
+ colon_prefix,
+ fake_attribute_under_caret: Some(attr),
+ } => {
+ completions::attribute::complete_known_attribute_input(
+ acc,
+ ctx,
+ colon_prefix,
+ attr,
+ );
+ }
+ CompletionAnalysis::UnexpandedAttrTT { .. } | CompletionAnalysis::String { .. } => (),
+ }
+ }
+
+ Some(completions.into())
+}
+
+/// Resolves additional completion data at the position given.
+/// This is used for import insertion done via completions like flyimport and custom user snippets.
+pub fn resolve_completion_edits(
+ db: &RootDatabase,
+ config: &CompletionConfig,
+ FilePosition { file_id, offset }: FilePosition,
+ imports: impl IntoIterator<Item = (String, String)>,
+) -> Option<Vec<TextEdit>> {
+ let _p = profile::span("resolve_completion_edits");
+ let sema = hir::Semantics::new(db);
+
+ let original_file = sema.parse(file_id);
+ let original_token =
+ syntax::AstNode::syntax(&original_file).token_at_offset(offset).left_biased()?;
+ let position_for_import = &original_token.parent()?;
+ let scope = ImportScope::find_insert_use_container(position_for_import, &sema)?;
+
+ let current_module = sema.scope(position_for_import)?.module();
+ let current_crate = current_module.krate();
+ let new_ast = scope.clone_for_update();
+ let mut import_insert = TextEdit::builder();
+
+ imports.into_iter().for_each(|(full_import_path, imported_name)| {
+ let items_with_name = items_locator::items_with_name(
+ &sema,
+ current_crate,
+ NameToImport::exact_case_sensitive(imported_name),
+ items_locator::AssocItemSearch::Include,
+ Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()),
+ );
+ let import = items_with_name
+ .filter_map(|candidate| {
+ current_module.find_use_path_prefixed(db, candidate, config.insert_use.prefix_kind)
+ })
+ .find(|mod_path| mod_path.to_string() == full_import_path);
+ if let Some(import_path) = import {
+ insert_use::insert_use(&new_ast, mod_path_to_ast(&import_path), &config.insert_use);
+ }
+ });
+
+ algo::diff(scope.as_syntax_node(), new_ast.as_syntax_node()).into_text_edit(&mut import_insert);
+ Some(vec![import_insert.finish()])
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
new file mode 100644
index 000000000..946134b0f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
@@ -0,0 +1,1910 @@
+//! `render` module provides utilities for rendering completion suggestions
+//! into code pieces that will be presented to user.
+
+pub(crate) mod macro_;
+pub(crate) mod function;
+pub(crate) mod const_;
+pub(crate) mod pattern;
+pub(crate) mod type_alias;
+pub(crate) mod variant;
+pub(crate) mod union_literal;
+pub(crate) mod literal;
+
+use hir::{AsAssocItem, HasAttrs, HirDisplay, ScopeDef};
+use ide_db::{
+ helpers::item_name, imports::import_assets::LocatedImport, RootDatabase, SnippetCap, SymbolKind,
+};
+use syntax::{AstNode, SmolStr, SyntaxKind, TextRange};
+
+use crate::{
+ context::{DotAccess, PathCompletionCtx, PathKind, PatternContext},
+ item::{Builder, CompletionRelevanceTypeMatch},
+ render::{
+ function::render_fn,
+ literal::render_variant_lit,
+ macro_::{render_macro, render_macro_pat},
+ },
+ CompletionContext, CompletionItem, CompletionItemKind, CompletionRelevance,
+};
+/// Interface for data and methods required for items rendering.
+#[derive(Debug, Clone)]
+pub(crate) struct RenderContext<'a> {
+ completion: &'a CompletionContext<'a>,
+ is_private_editable: bool,
+ import_to_add: Option<LocatedImport>,
+}
+
+impl<'a> RenderContext<'a> {
+ pub(crate) fn new(completion: &'a CompletionContext<'a>) -> RenderContext<'a> {
+ RenderContext { completion, is_private_editable: false, import_to_add: None }
+ }
+
+ pub(crate) fn private_editable(mut self, private_editable: bool) -> Self {
+ self.is_private_editable = private_editable;
+ self
+ }
+
+ pub(crate) fn import_to_add(mut self, import_to_add: Option<LocatedImport>) -> Self {
+ self.import_to_add = import_to_add;
+ self
+ }
+
+ fn snippet_cap(&self) -> Option<SnippetCap> {
+ self.completion.config.snippet_cap
+ }
+
+ fn db(&self) -> &'a RootDatabase {
+ self.completion.db
+ }
+
+ fn source_range(&self) -> TextRange {
+ self.completion.source_range()
+ }
+
+ fn completion_relevance(&self) -> CompletionRelevance {
+ CompletionRelevance {
+ is_private_editable: self.is_private_editable,
+ requires_import: self.import_to_add.is_some(),
+ ..Default::default()
+ }
+ }
+
+ fn is_immediately_after_macro_bang(&self) -> bool {
+ self.completion.token.kind() == SyntaxKind::BANG
+ && self
+ .completion
+ .token
+ .parent()
+ .map_or(false, |it| it.kind() == SyntaxKind::MACRO_CALL)
+ }
+
+ fn is_deprecated(&self, def: impl HasAttrs) -> bool {
+ let attrs = def.attrs(self.db());
+ attrs.by_key("deprecated").exists()
+ }
+
+ fn is_deprecated_assoc_item(&self, as_assoc_item: impl AsAssocItem) -> bool {
+ let db = self.db();
+ let assoc = match as_assoc_item.as_assoc_item(db) {
+ Some(assoc) => assoc,
+ None => return false,
+ };
+
+ let is_assoc_deprecated = match assoc {
+ hir::AssocItem::Function(it) => self.is_deprecated(it),
+ hir::AssocItem::Const(it) => self.is_deprecated(it),
+ hir::AssocItem::TypeAlias(it) => self.is_deprecated(it),
+ };
+ is_assoc_deprecated
+ || assoc
+ .containing_trait_or_trait_impl(db)
+ .map(|trait_| self.is_deprecated(trait_))
+ .unwrap_or(false)
+ }
+
+ // FIXME: remove this
+ fn docs(&self, def: impl HasAttrs) -> Option<hir::Documentation> {
+ def.docs(self.db())
+ }
+}
+
+pub(crate) fn render_field(
+ ctx: RenderContext<'_>,
+ dot_access: &DotAccess,
+ receiver: Option<hir::Name>,
+ field: hir::Field,
+ ty: &hir::Type,
+) -> CompletionItem {
+ let is_deprecated = ctx.is_deprecated(field);
+ let name = field.name(ctx.db());
+ let (name, escaped_name) = (name.to_smol_str(), name.escaped().to_smol_str());
+ let mut item = CompletionItem::new(
+ SymbolKind::Field,
+ ctx.source_range(),
+ field_with_receiver(receiver.as_ref(), &name),
+ );
+ item.set_relevance(CompletionRelevance {
+ type_match: compute_type_match(ctx.completion, ty),
+ exact_name_match: compute_exact_name_match(ctx.completion, name.as_str()),
+ ..CompletionRelevance::default()
+ });
+ item.detail(ty.display(ctx.db()).to_string())
+ .set_documentation(field.docs(ctx.db()))
+ .set_deprecated(is_deprecated)
+ .lookup_by(name.clone());
+ item.insert_text(field_with_receiver(receiver.as_ref(), &escaped_name));
+ if let Some(receiver) = &dot_access.receiver {
+ if let Some(original) = ctx.completion.sema.original_ast_node(receiver.clone()) {
+ if let Some(ref_match) = compute_ref_match(ctx.completion, ty) {
+ item.ref_match(ref_match, original.syntax().text_range().start());
+ }
+ }
+ }
+ item.build()
+}
+
+fn field_with_receiver(receiver: Option<&hir::Name>, field_name: &str) -> SmolStr {
+ receiver
+ .map_or_else(|| field_name.into(), |receiver| format!("{}.{}", receiver, field_name).into())
+}
+
+pub(crate) fn render_tuple_field(
+ ctx: RenderContext<'_>,
+ receiver: Option<hir::Name>,
+ field: usize,
+ ty: &hir::Type,
+) -> CompletionItem {
+ let mut item = CompletionItem::new(
+ SymbolKind::Field,
+ ctx.source_range(),
+ field_with_receiver(receiver.as_ref(), &field.to_string()),
+ );
+ item.detail(ty.display(ctx.db()).to_string()).lookup_by(field.to_string());
+ item.build()
+}
+
+pub(crate) fn render_type_inference(
+ ty_string: String,
+ ctx: &CompletionContext<'_>,
+) -> CompletionItem {
+ let mut builder =
+ CompletionItem::new(CompletionItemKind::InferredType, ctx.source_range(), ty_string);
+ builder.set_relevance(CompletionRelevance { is_definite: true, ..Default::default() });
+ builder.build()
+}
+
+pub(crate) fn render_path_resolution(
+ ctx: RenderContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ local_name: hir::Name,
+ resolution: ScopeDef,
+) -> Builder {
+ render_resolution_path(ctx, path_ctx, local_name, None, resolution)
+}
+
+pub(crate) fn render_pattern_resolution(
+ ctx: RenderContext<'_>,
+ pattern_ctx: &PatternContext,
+ local_name: hir::Name,
+ resolution: ScopeDef,
+) -> Builder {
+ render_resolution_pat(ctx, pattern_ctx, local_name, None, resolution)
+}
+
+pub(crate) fn render_resolution_with_import(
+ ctx: RenderContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ import_edit: LocatedImport,
+) -> Option<Builder> {
+ let resolution = ScopeDef::from(import_edit.original_item);
+ let local_name = scope_def_to_name(resolution, &ctx, &import_edit)?;
+
+ Some(render_resolution_path(ctx, path_ctx, local_name, Some(import_edit), resolution))
+}
+
+pub(crate) fn render_resolution_with_import_pat(
+ ctx: RenderContext<'_>,
+ pattern_ctx: &PatternContext,
+ import_edit: LocatedImport,
+) -> Option<Builder> {
+ let resolution = ScopeDef::from(import_edit.original_item);
+ let local_name = scope_def_to_name(resolution, &ctx, &import_edit)?;
+ Some(render_resolution_pat(ctx, pattern_ctx, local_name, Some(import_edit), resolution))
+}
+
+fn scope_def_to_name(
+ resolution: ScopeDef,
+ ctx: &RenderContext<'_>,
+ import_edit: &LocatedImport,
+) -> Option<hir::Name> {
+ Some(match resolution {
+ ScopeDef::ModuleDef(hir::ModuleDef::Function(f)) => f.name(ctx.completion.db),
+ ScopeDef::ModuleDef(hir::ModuleDef::Const(c)) => c.name(ctx.completion.db)?,
+ ScopeDef::ModuleDef(hir::ModuleDef::TypeAlias(t)) => t.name(ctx.completion.db),
+ _ => item_name(ctx.db(), import_edit.original_item)?,
+ })
+}
+
+fn render_resolution_pat(
+ ctx: RenderContext<'_>,
+ pattern_ctx: &PatternContext,
+ local_name: hir::Name,
+ import_to_add: Option<LocatedImport>,
+ resolution: ScopeDef,
+) -> Builder {
+ let _p = profile::span("render_resolution");
+ use hir::ModuleDef::*;
+
+ match resolution {
+ ScopeDef::ModuleDef(Macro(mac)) => {
+ let ctx = ctx.import_to_add(import_to_add);
+ return render_macro_pat(ctx, pattern_ctx, local_name, mac);
+ }
+ _ => (),
+ }
+
+ render_resolution_simple_(ctx, &local_name, import_to_add, resolution)
+}
+
+fn render_resolution_path(
+ ctx: RenderContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ local_name: hir::Name,
+ import_to_add: Option<LocatedImport>,
+ resolution: ScopeDef,
+) -> Builder {
+ let _p = profile::span("render_resolution");
+ use hir::ModuleDef::*;
+
+ match resolution {
+ ScopeDef::ModuleDef(Macro(mac)) => {
+ let ctx = ctx.import_to_add(import_to_add);
+ return render_macro(ctx, path_ctx, local_name, mac);
+ }
+ ScopeDef::ModuleDef(Function(func)) => {
+ let ctx = ctx.import_to_add(import_to_add);
+ return render_fn(ctx, path_ctx, Some(local_name), func);
+ }
+ ScopeDef::ModuleDef(Variant(var)) => {
+ let ctx = ctx.clone().import_to_add(import_to_add.clone());
+ if let Some(item) =
+ render_variant_lit(ctx, path_ctx, Some(local_name.clone()), var, None)
+ {
+ return item;
+ }
+ }
+ _ => (),
+ }
+
+ let completion = ctx.completion;
+ let cap = ctx.snippet_cap();
+ let db = completion.db;
+ let config = completion.config;
+
+ let name = local_name.to_smol_str();
+ let mut item = render_resolution_simple_(ctx, &local_name, import_to_add, resolution);
+ if local_name.escaped().is_escaped() {
+ item.insert_text(local_name.escaped().to_smol_str());
+ }
+ // Add `<>` for generic types
+ let type_path_no_ty_args = matches!(
+ path_ctx,
+ PathCompletionCtx { kind: PathKind::Type { .. }, has_type_args: false, .. }
+ ) && config.callable.is_some();
+ if type_path_no_ty_args {
+ if let Some(cap) = cap {
+ let has_non_default_type_params = match resolution {
+ ScopeDef::ModuleDef(hir::ModuleDef::Adt(it)) => it.has_non_default_type_params(db),
+ ScopeDef::ModuleDef(hir::ModuleDef::TypeAlias(it)) => {
+ it.has_non_default_type_params(db)
+ }
+ _ => false,
+ };
+
+ if has_non_default_type_params {
+ cov_mark::hit!(inserts_angle_brackets_for_generics);
+ item.lookup_by(name.clone())
+ .label(SmolStr::from_iter([&name, "<…>"]))
+ .trigger_call_info()
+ .insert_snippet(cap, format!("{}<$0>", local_name.escaped()));
+ }
+ }
+ }
+ if let ScopeDef::Local(local) = resolution {
+ let ty = local.ty(db);
+ if !ty.is_unknown() {
+ item.detail(ty.display(db).to_string());
+ }
+
+ item.set_relevance(CompletionRelevance {
+ type_match: compute_type_match(completion, &ty),
+ exact_name_match: compute_exact_name_match(completion, &name),
+ is_local: true,
+ ..CompletionRelevance::default()
+ });
+
+ if let Some(ref_match) = compute_ref_match(completion, &ty) {
+ item.ref_match(ref_match, path_ctx.path.syntax().text_range().start());
+ }
+ };
+ item
+}
+
+fn render_resolution_simple_(
+ ctx: RenderContext<'_>,
+ local_name: &hir::Name,
+ import_to_add: Option<LocatedImport>,
+ resolution: ScopeDef,
+) -> Builder {
+ let _p = profile::span("render_resolution");
+
+ let db = ctx.db();
+ let ctx = ctx.import_to_add(import_to_add);
+ let kind = res_to_kind(resolution);
+
+ let mut item = CompletionItem::new(kind, ctx.source_range(), local_name.to_smol_str());
+ item.set_relevance(ctx.completion_relevance())
+ .set_documentation(scope_def_docs(db, resolution))
+ .set_deprecated(scope_def_is_deprecated(&ctx, resolution));
+
+ if let Some(import_to_add) = ctx.import_to_add {
+ item.add_import(import_to_add);
+ }
+ item
+}
+
+fn res_to_kind(resolution: ScopeDef) -> CompletionItemKind {
+ use hir::ModuleDef::*;
+ match resolution {
+ ScopeDef::Unknown => CompletionItemKind::UnresolvedReference,
+ ScopeDef::ModuleDef(Function(_)) => CompletionItemKind::SymbolKind(SymbolKind::Function),
+ ScopeDef::ModuleDef(Variant(_)) => CompletionItemKind::SymbolKind(SymbolKind::Variant),
+ ScopeDef::ModuleDef(Macro(_)) => CompletionItemKind::SymbolKind(SymbolKind::Macro),
+ ScopeDef::ModuleDef(Module(..)) => CompletionItemKind::SymbolKind(SymbolKind::Module),
+ ScopeDef::ModuleDef(Adt(adt)) => CompletionItemKind::SymbolKind(match adt {
+ hir::Adt::Struct(_) => SymbolKind::Struct,
+ hir::Adt::Union(_) => SymbolKind::Union,
+ hir::Adt::Enum(_) => SymbolKind::Enum,
+ }),
+ ScopeDef::ModuleDef(Const(..)) => CompletionItemKind::SymbolKind(SymbolKind::Const),
+ ScopeDef::ModuleDef(Static(..)) => CompletionItemKind::SymbolKind(SymbolKind::Static),
+ ScopeDef::ModuleDef(Trait(..)) => CompletionItemKind::SymbolKind(SymbolKind::Trait),
+ ScopeDef::ModuleDef(TypeAlias(..)) => CompletionItemKind::SymbolKind(SymbolKind::TypeAlias),
+ ScopeDef::ModuleDef(BuiltinType(..)) => CompletionItemKind::BuiltinType,
+ ScopeDef::GenericParam(param) => CompletionItemKind::SymbolKind(match param {
+ hir::GenericParam::TypeParam(_) => SymbolKind::TypeParam,
+ hir::GenericParam::ConstParam(_) => SymbolKind::ConstParam,
+ hir::GenericParam::LifetimeParam(_) => SymbolKind::LifetimeParam,
+ }),
+ ScopeDef::Local(..) => CompletionItemKind::SymbolKind(SymbolKind::Local),
+ ScopeDef::Label(..) => CompletionItemKind::SymbolKind(SymbolKind::Label),
+ ScopeDef::AdtSelfType(..) | ScopeDef::ImplSelfType(..) => {
+ CompletionItemKind::SymbolKind(SymbolKind::SelfParam)
+ }
+ }
+}
+
+fn scope_def_docs(db: &RootDatabase, resolution: ScopeDef) -> Option<hir::Documentation> {
+ use hir::ModuleDef::*;
+ match resolution {
+ ScopeDef::ModuleDef(Module(it)) => it.docs(db),
+ ScopeDef::ModuleDef(Adt(it)) => it.docs(db),
+ ScopeDef::ModuleDef(Variant(it)) => it.docs(db),
+ ScopeDef::ModuleDef(Const(it)) => it.docs(db),
+ ScopeDef::ModuleDef(Static(it)) => it.docs(db),
+ ScopeDef::ModuleDef(Trait(it)) => it.docs(db),
+ ScopeDef::ModuleDef(TypeAlias(it)) => it.docs(db),
+ _ => None,
+ }
+}
+
+fn scope_def_is_deprecated(ctx: &RenderContext<'_>, resolution: ScopeDef) -> bool {
+ match resolution {
+ ScopeDef::ModuleDef(it) => ctx.is_deprecated_assoc_item(it),
+ ScopeDef::GenericParam(it) => ctx.is_deprecated(it),
+ ScopeDef::AdtSelfType(it) => ctx.is_deprecated(it),
+ _ => false,
+ }
+}
+
+fn compute_type_match(
+ ctx: &CompletionContext<'_>,
+ completion_ty: &hir::Type,
+) -> Option<CompletionRelevanceTypeMatch> {
+ let expected_type = ctx.expected_type.as_ref()?;
+
+ // We don't ever consider unit type to be an exact type match, since
+ // nearly always this is not meaningful to the user.
+ if expected_type.is_unit() {
+ return None;
+ }
+
+ if completion_ty == expected_type {
+ Some(CompletionRelevanceTypeMatch::Exact)
+ } else if expected_type.could_unify_with(ctx.db, completion_ty) {
+ Some(CompletionRelevanceTypeMatch::CouldUnify)
+ } else {
+ None
+ }
+}
+
+fn compute_exact_name_match(ctx: &CompletionContext<'_>, completion_name: &str) -> bool {
+ ctx.expected_name.as_ref().map_or(false, |name| name.text() == completion_name)
+}
+
+fn compute_ref_match(
+ ctx: &CompletionContext<'_>,
+ completion_ty: &hir::Type,
+) -> Option<hir::Mutability> {
+ let expected_type = ctx.expected_type.as_ref()?;
+ if completion_ty != expected_type {
+ let expected_type_without_ref = expected_type.remove_ref()?;
+ if completion_ty.autoderef(ctx.db).any(|deref_ty| deref_ty == expected_type_without_ref) {
+ cov_mark::hit!(suggest_ref);
+ let mutability = if expected_type.is_mutable_reference() {
+ hir::Mutability::Mut
+ } else {
+ hir::Mutability::Shared
+ };
+ return Some(mutability);
+ };
+ }
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use std::cmp;
+
+ use expect_test::{expect, Expect};
+ use ide_db::SymbolKind;
+ use itertools::Itertools;
+
+ use crate::{
+ item::CompletionRelevanceTypeMatch,
+ tests::{check_edit, do_completion, get_all_items, TEST_CONFIG},
+ CompletionItem, CompletionItemKind, CompletionRelevance, CompletionRelevancePostfixMatch,
+ };
+
+ #[track_caller]
+ fn check(ra_fixture: &str, kind: impl Into<CompletionItemKind>, expect: Expect) {
+ let actual = do_completion(ra_fixture, kind.into());
+ expect.assert_debug_eq(&actual);
+ }
+
+ #[track_caller]
+ fn check_kinds(ra_fixture: &str, kinds: &[CompletionItemKind], expect: Expect) {
+ let actual: Vec<_> =
+ kinds.iter().flat_map(|&kind| do_completion(ra_fixture, kind)).collect();
+ expect.assert_debug_eq(&actual);
+ }
+
+ #[track_caller]
+ fn check_relevance_for_kinds(ra_fixture: &str, kinds: &[CompletionItemKind], expect: Expect) {
+ let mut actual = get_all_items(TEST_CONFIG, ra_fixture, None);
+ actual.retain(|it| kinds.contains(&it.kind()));
+ actual.sort_by_key(|it| cmp::Reverse(it.relevance().score()));
+ check_relevance_(actual, expect);
+ }
+
+ #[track_caller]
+ fn check_relevance(ra_fixture: &str, expect: Expect) {
+ let mut actual = get_all_items(TEST_CONFIG, ra_fixture, None);
+ actual.retain(|it| it.kind() != CompletionItemKind::Snippet);
+ actual.retain(|it| it.kind() != CompletionItemKind::Keyword);
+ actual.retain(|it| it.kind() != CompletionItemKind::BuiltinType);
+ actual.sort_by_key(|it| cmp::Reverse(it.relevance().score()));
+ check_relevance_(actual, expect);
+ }
+
+ #[track_caller]
+ fn check_relevance_(actual: Vec<CompletionItem>, expect: Expect) {
+ let actual = actual
+ .into_iter()
+ .flat_map(|it| {
+ let mut items = vec![];
+
+ let tag = it.kind().tag();
+ let relevance = display_relevance(it.relevance());
+ items.push(format!("{} {} {}\n", tag, it.label(), relevance));
+
+ if let Some((mutability, _offset, relevance)) = it.ref_match() {
+ let label = format!("&{}{}", mutability.as_keyword_for_ref(), it.label());
+ let relevance = display_relevance(relevance);
+
+ items.push(format!("{} {} {}\n", tag, label, relevance));
+ }
+
+ items
+ })
+ .collect::<String>();
+
+ expect.assert_eq(&actual);
+
+ fn display_relevance(relevance: CompletionRelevance) -> String {
+ let relevance_factors = vec![
+ (relevance.type_match == Some(CompletionRelevanceTypeMatch::Exact), "type"),
+ (
+ relevance.type_match == Some(CompletionRelevanceTypeMatch::CouldUnify),
+ "type_could_unify",
+ ),
+ (relevance.exact_name_match, "name"),
+ (relevance.is_local, "local"),
+ (
+ relevance.postfix_match == Some(CompletionRelevancePostfixMatch::Exact),
+ "snippet",
+ ),
+ (relevance.is_op_method, "op_method"),
+ (relevance.requires_import, "requires_import"),
+ ]
+ .into_iter()
+ .filter_map(|(cond, desc)| if cond { Some(desc) } else { None })
+ .join("+");
+
+ format!("[{}]", relevance_factors)
+ }
+ }
+
+ #[test]
+ fn enum_detail_includes_record_fields() {
+ check(
+ r#"
+enum Foo { Foo { x: i32, y: i32 } }
+
+fn main() { Foo::Fo$0 }
+"#,
+ SymbolKind::Variant,
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "Foo {…}",
+ source_range: 54..56,
+ delete: 54..56,
+ insert: "Foo { x: ${1:()}, y: ${2:()} }$0",
+ kind: SymbolKind(
+ Variant,
+ ),
+ detail: "Foo { x: i32, y: i32 }",
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn enum_detail_includes_tuple_fields() {
+ check(
+ r#"
+enum Foo { Foo (i32, i32) }
+
+fn main() { Foo::Fo$0 }
+"#,
+ SymbolKind::Variant,
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "Foo(…)",
+ source_range: 46..48,
+ delete: 46..48,
+ insert: "Foo(${1:()}, ${2:()})$0",
+ kind: SymbolKind(
+ Variant,
+ ),
+ detail: "Foo(i32, i32)",
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn fn_detail_includes_args_and_return_type() {
+ check(
+ r#"
+fn foo<T>(a: u32, b: u32, t: T) -> (u32, T) { (a, t) }
+
+fn main() { fo$0 }
+"#,
+ SymbolKind::Function,
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "foo(…)",
+ source_range: 68..70,
+ delete: 68..70,
+ insert: "foo(${1:a}, ${2:b}, ${3:t})$0",
+ kind: SymbolKind(
+ Function,
+ ),
+ lookup: "foo",
+ detail: "fn(u32, u32, T) -> (u32, T)",
+ trigger_call_info: true,
+ },
+ CompletionItem {
+ label: "main()",
+ source_range: 68..70,
+ delete: 68..70,
+ insert: "main()$0",
+ kind: SymbolKind(
+ Function,
+ ),
+ lookup: "main",
+ detail: "fn()",
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn enum_detail_just_name_for_unit() {
+ check(
+ r#"
+enum Foo { Foo }
+
+fn main() { Foo::Fo$0 }
+"#,
+ SymbolKind::Variant,
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "Foo",
+ source_range: 35..37,
+ delete: 35..37,
+ insert: "Foo$0",
+ kind: SymbolKind(
+ Variant,
+ ),
+ detail: "Foo",
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn lookup_enums_by_two_qualifiers() {
+ check_kinds(
+ r#"
+mod m {
+ pub enum Spam { Foo, Bar(i32) }
+}
+fn main() { let _: m::Spam = S$0 }
+"#,
+ &[
+ CompletionItemKind::SymbolKind(SymbolKind::Function),
+ CompletionItemKind::SymbolKind(SymbolKind::Module),
+ CompletionItemKind::SymbolKind(SymbolKind::Variant),
+ ],
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "main()",
+ source_range: 75..76,
+ delete: 75..76,
+ insert: "main()$0",
+ kind: SymbolKind(
+ Function,
+ ),
+ lookup: "main",
+ detail: "fn()",
+ },
+ CompletionItem {
+ label: "m",
+ source_range: 75..76,
+ delete: 75..76,
+ insert: "m",
+ kind: SymbolKind(
+ Module,
+ ),
+ },
+ CompletionItem {
+ label: "m::Spam::Bar(…)",
+ source_range: 75..76,
+ delete: 75..76,
+ insert: "m::Spam::Bar(${1:()})$0",
+ kind: SymbolKind(
+ Variant,
+ ),
+ lookup: "Spam::Bar(…)",
+ detail: "m::Spam::Bar(i32)",
+ relevance: CompletionRelevance {
+ exact_name_match: false,
+ type_match: Some(
+ Exact,
+ ),
+ is_local: false,
+ is_item_from_trait: false,
+ is_name_already_imported: false,
+ requires_import: false,
+ is_op_method: false,
+ is_private_editable: false,
+ postfix_match: None,
+ is_definite: false,
+ },
+ },
+ CompletionItem {
+ label: "m::Spam::Foo",
+ source_range: 75..76,
+ delete: 75..76,
+ insert: "m::Spam::Foo$0",
+ kind: SymbolKind(
+ Variant,
+ ),
+ lookup: "Spam::Foo",
+ detail: "m::Spam::Foo",
+ relevance: CompletionRelevance {
+ exact_name_match: false,
+ type_match: Some(
+ Exact,
+ ),
+ is_local: false,
+ is_item_from_trait: false,
+ is_name_already_imported: false,
+ requires_import: false,
+ is_op_method: false,
+ is_private_editable: false,
+ postfix_match: None,
+ is_definite: false,
+ },
+ },
+ ]
+ "#]],
+ )
+ }
+
+ #[test]
+ fn sets_deprecated_flag_in_items() {
+ check(
+ r#"
+#[deprecated]
+fn something_deprecated() {}
+
+fn main() { som$0 }
+"#,
+ SymbolKind::Function,
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "main()",
+ source_range: 56..59,
+ delete: 56..59,
+ insert: "main()$0",
+ kind: SymbolKind(
+ Function,
+ ),
+ lookup: "main",
+ detail: "fn()",
+ },
+ CompletionItem {
+ label: "something_deprecated()",
+ source_range: 56..59,
+ delete: 56..59,
+ insert: "something_deprecated()$0",
+ kind: SymbolKind(
+ Function,
+ ),
+ lookup: "something_deprecated",
+ detail: "fn()",
+ deprecated: true,
+ },
+ ]
+ "#]],
+ );
+
+ check(
+ r#"
+struct A { #[deprecated] the_field: u32 }
+fn foo() { A { the$0 } }
+"#,
+ SymbolKind::Field,
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "the_field",
+ source_range: 57..60,
+ delete: 57..60,
+ insert: "the_field",
+ kind: SymbolKind(
+ Field,
+ ),
+ detail: "u32",
+ deprecated: true,
+ relevance: CompletionRelevance {
+ exact_name_match: false,
+ type_match: Some(
+ CouldUnify,
+ ),
+ is_local: false,
+ is_item_from_trait: false,
+ is_name_already_imported: false,
+ requires_import: false,
+ is_op_method: false,
+ is_private_editable: false,
+ postfix_match: None,
+ is_definite: false,
+ },
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn renders_docs() {
+ check_kinds(
+ r#"
+struct S {
+ /// Field docs
+ foo:
+}
+impl S {
+ /// Method docs
+ fn bar(self) { self.$0 }
+}"#,
+ &[CompletionItemKind::Method, CompletionItemKind::SymbolKind(SymbolKind::Field)],
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "bar()",
+ source_range: 94..94,
+ delete: 94..94,
+ insert: "bar()$0",
+ kind: Method,
+ lookup: "bar",
+ detail: "fn(self)",
+ documentation: Documentation(
+ "Method docs",
+ ),
+ },
+ CompletionItem {
+ label: "foo",
+ source_range: 94..94,
+ delete: 94..94,
+ insert: "foo",
+ kind: SymbolKind(
+ Field,
+ ),
+ detail: "{unknown}",
+ documentation: Documentation(
+ "Field docs",
+ ),
+ },
+ ]
+ "#]],
+ );
+
+ check_kinds(
+ r#"
+use self::my$0;
+
+/// mod docs
+mod my { }
+
+/// enum docs
+enum E {
+ /// variant docs
+ V
+}
+use self::E::*;
+"#,
+ &[
+ CompletionItemKind::SymbolKind(SymbolKind::Module),
+ CompletionItemKind::SymbolKind(SymbolKind::Variant),
+ CompletionItemKind::SymbolKind(SymbolKind::Enum),
+ ],
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "my",
+ source_range: 10..12,
+ delete: 10..12,
+ insert: "my",
+ kind: SymbolKind(
+ Module,
+ ),
+ documentation: Documentation(
+ "mod docs",
+ ),
+ },
+ CompletionItem {
+ label: "V",
+ source_range: 10..12,
+ delete: 10..12,
+ insert: "V$0",
+ kind: SymbolKind(
+ Variant,
+ ),
+ detail: "V",
+ documentation: Documentation(
+ "variant docs",
+ ),
+ },
+ CompletionItem {
+ label: "E",
+ source_range: 10..12,
+ delete: 10..12,
+ insert: "E",
+ kind: SymbolKind(
+ Enum,
+ ),
+ documentation: Documentation(
+ "enum docs",
+ ),
+ },
+ ]
+ "#]],
+ )
+ }
+
+ #[test]
+ fn dont_render_attrs() {
+ check(
+ r#"
+struct S;
+impl S {
+ #[inline]
+ fn the_method(&self) { }
+}
+fn foo(s: S) { s.$0 }
+"#,
+ CompletionItemKind::Method,
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "the_method()",
+ source_range: 81..81,
+ delete: 81..81,
+ insert: "the_method()$0",
+ kind: Method,
+ lookup: "the_method",
+ detail: "fn(&self)",
+ },
+ ]
+ "#]],
+ )
+ }
+
+ #[test]
+ fn no_call_parens_if_fn_ptr_needed() {
+ cov_mark::check!(no_call_parens_if_fn_ptr_needed);
+ check_edit(
+ "foo",
+ r#"
+fn foo(foo: u8, bar: u8) {}
+struct ManualVtable { f: fn(u8, u8) }
+
+fn main() -> ManualVtable {
+ ManualVtable { f: f$0 }
+}
+"#,
+ r#"
+fn foo(foo: u8, bar: u8) {}
+struct ManualVtable { f: fn(u8, u8) }
+
+fn main() -> ManualVtable {
+ ManualVtable { f: foo }
+}
+"#,
+ );
+ check_edit(
+ "type",
+ r#"
+struct RawIdentTable { r#type: u32 }
+
+fn main() -> RawIdentTable {
+ RawIdentTable { t$0: 42 }
+}
+"#,
+ r#"
+struct RawIdentTable { r#type: u32 }
+
+fn main() -> RawIdentTable {
+ RawIdentTable { r#type: 42 }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_parens_in_use_item() {
+ check_edit(
+ "foo",
+ r#"
+mod m { pub fn foo() {} }
+use crate::m::f$0;
+"#,
+ r#"
+mod m { pub fn foo() {} }
+use crate::m::foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn no_parens_in_call() {
+ check_edit(
+ "foo",
+ r#"
+fn foo(x: i32) {}
+fn main() { f$0(); }
+"#,
+ r#"
+fn foo(x: i32) {}
+fn main() { foo(); }
+"#,
+ );
+ check_edit(
+ "foo",
+ r#"
+struct Foo;
+impl Foo { fn foo(&self){} }
+fn f(foo: &Foo) { foo.f$0(); }
+"#,
+ r#"
+struct Foo;
+impl Foo { fn foo(&self){} }
+fn f(foo: &Foo) { foo.foo(); }
+"#,
+ );
+ }
+
+ #[test]
+ fn inserts_angle_brackets_for_generics() {
+ cov_mark::check!(inserts_angle_brackets_for_generics);
+ check_edit(
+ "Vec",
+ r#"
+struct Vec<T> {}
+fn foo(xs: Ve$0)
+"#,
+ r#"
+struct Vec<T> {}
+fn foo(xs: Vec<$0>)
+"#,
+ );
+ check_edit(
+ "Vec",
+ r#"
+type Vec<T> = (T,);
+fn foo(xs: Ve$0)
+"#,
+ r#"
+type Vec<T> = (T,);
+fn foo(xs: Vec<$0>)
+"#,
+ );
+ check_edit(
+ "Vec",
+ r#"
+struct Vec<T = i128> {}
+fn foo(xs: Ve$0)
+"#,
+ r#"
+struct Vec<T = i128> {}
+fn foo(xs: Vec)
+"#,
+ );
+ check_edit(
+ "Vec",
+ r#"
+struct Vec<T> {}
+fn foo(xs: Ve$0<i128>)
+"#,
+ r#"
+struct Vec<T> {}
+fn foo(xs: Vec<i128>)
+"#,
+ );
+ }
+
+ #[test]
+ fn active_param_relevance() {
+ check_relevance(
+ r#"
+struct S { foo: i64, bar: u32, baz: u32 }
+fn test(bar: u32) { }
+fn foo(s: S) { test(s.$0) }
+"#,
+ expect![[r#"
+ fd bar [type+name]
+ fd baz [type]
+ fd foo []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn record_field_relevances() {
+ check_relevance(
+ r#"
+struct A { foo: i64, bar: u32, baz: u32 }
+struct B { x: (), y: f32, bar: u32 }
+fn foo(a: A) { B { bar: a.$0 }; }
+"#,
+ expect![[r#"
+ fd bar [type+name]
+ fd baz [type]
+ fd foo []
+ "#]],
+ )
+ }
+
+ #[test]
+ fn record_field_and_call_relevances() {
+ check_relevance(
+ r#"
+struct A { foo: i64, bar: u32, baz: u32 }
+struct B { x: (), y: f32, bar: u32 }
+fn f(foo: i64) { }
+fn foo(a: A) { B { bar: f(a.$0) }; }
+"#,
+ expect![[r#"
+ fd foo [type+name]
+ fd bar []
+ fd baz []
+ "#]],
+ );
+ check_relevance(
+ r#"
+struct A { foo: i64, bar: u32, baz: u32 }
+struct B { x: (), y: f32, bar: u32 }
+fn f(foo: i64) { }
+fn foo(a: A) { f(B { bar: a.$0 }); }
+"#,
+ expect![[r#"
+ fd bar [type+name]
+ fd baz [type]
+ fd foo []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn prioritize_exact_ref_match() {
+ check_relevance(
+ r#"
+struct WorldSnapshot { _f: () };
+fn go(world: &WorldSnapshot) { go(w$0) }
+"#,
+ expect![[r#"
+ lc world [type+name+local]
+ st WorldSnapshot {…} []
+ st &WorldSnapshot {…} [type]
+ st WorldSnapshot []
+ fn go(…) []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn too_many_arguments() {
+ cov_mark::check!(too_many_arguments);
+ check_relevance(
+ r#"
+struct Foo;
+fn f(foo: &Foo) { f(foo, w$0) }
+"#,
+ expect![[r#"
+ lc foo [local]
+ st Foo []
+ fn f(…) []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn score_fn_type_and_name_match() {
+ check_relevance(
+ r#"
+struct A { bar: u8 }
+fn baz() -> u8 { 0 }
+fn bar() -> u8 { 0 }
+fn f() { A { bar: b$0 }; }
+"#,
+ expect![[r#"
+ fn bar() [type+name]
+ fn baz() [type]
+ st A []
+ fn f() []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn score_method_type_and_name_match() {
+ check_relevance(
+ r#"
+fn baz(aaa: u32){}
+struct Foo;
+impl Foo {
+fn aaa(&self) -> u32 { 0 }
+fn bbb(&self) -> u32 { 0 }
+fn ccc(&self) -> u64 { 0 }
+}
+fn f() {
+ baz(Foo.$0
+}
+"#,
+ expect![[r#"
+ me aaa() [type+name]
+ me bbb() [type]
+ me ccc() []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn score_method_name_match_only() {
+ check_relevance(
+ r#"
+fn baz(aaa: u32){}
+struct Foo;
+impl Foo {
+fn aaa(&self) -> u64 { 0 }
+}
+fn f() {
+ baz(Foo.$0
+}
+"#,
+ expect![[r#"
+ me aaa() [name]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn suggest_ref_mut() {
+ cov_mark::check!(suggest_ref);
+ check_relevance(
+ r#"
+struct S;
+fn foo(s: &mut S) {}
+fn main() {
+ let mut s = S;
+ foo($0);
+}
+ "#,
+ expect![[r#"
+ lc s [name+local]
+ lc &mut s [type+name+local]
+ st S []
+ st &mut S [type]
+ st S []
+ fn foo(…) []
+ fn main() []
+ "#]],
+ );
+ check_relevance(
+ r#"
+struct S;
+fn foo(s: &mut S) {}
+fn main() {
+ let mut s = S;
+ foo(&mut $0);
+}
+ "#,
+ expect![[r#"
+ lc s [type+name+local]
+ st S [type]
+ st S []
+ fn foo(…) []
+ fn main() []
+ "#]],
+ );
+ check_relevance(
+ r#"
+struct S;
+fn foo(s: &mut S) {}
+fn main() {
+ let mut ssss = S;
+ foo(&mut s$0);
+}
+ "#,
+ expect![[r#"
+ lc ssss [type+local]
+ st S [type]
+ st S []
+ fn foo(…) []
+ fn main() []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn suggest_deref() {
+ check_relevance(
+ r#"
+//- minicore: deref
+struct S;
+struct T(S);
+
+impl core::ops::Deref for T {
+ type Target = S;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+fn foo(s: &S) {}
+
+fn main() {
+ let t = T(S);
+ let m = 123;
+
+ foo($0);
+}
+ "#,
+ expect![[r#"
+ lc m [local]
+ lc t [local]
+ lc &t [type+local]
+ st S []
+ st &S [type]
+ st S []
+ st T []
+ fn foo(…) []
+ fn main() []
+ md core []
+ "#]],
+ )
+ }
+
+ #[test]
+ fn suggest_deref_mut() {
+ check_relevance(
+ r#"
+//- minicore: deref_mut
+struct S;
+struct T(S);
+
+impl core::ops::Deref for T {
+ type Target = S;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+impl core::ops::DerefMut for T {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.0
+ }
+}
+
+fn foo(s: &mut S) {}
+
+fn main() {
+ let t = T(S);
+ let m = 123;
+
+ foo($0);
+}
+ "#,
+ expect![[r#"
+ lc m [local]
+ lc t [local]
+ lc &mut t [type+local]
+ st S []
+ st &mut S [type]
+ st S []
+ st T []
+ fn foo(…) []
+ fn main() []
+ md core []
+ "#]],
+ )
+ }
+
+ #[test]
+ fn locals() {
+ check_relevance(
+ r#"
+fn foo(bar: u32) {
+ let baz = 0;
+
+ f$0
+}
+"#,
+ expect![[r#"
+ lc baz [local]
+ lc bar [local]
+ fn foo(…) []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn enum_owned() {
+ check_relevance(
+ r#"
+enum Foo { A, B }
+fn foo() {
+ bar($0);
+}
+fn bar(t: Foo) {}
+"#,
+ expect![[r#"
+ ev Foo::A [type]
+ ev Foo::B [type]
+ en Foo []
+ fn bar(…) []
+ fn foo() []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn enum_ref() {
+ check_relevance(
+ r#"
+enum Foo { A, B }
+fn foo() {
+ bar($0);
+}
+fn bar(t: &Foo) {}
+"#,
+ expect![[r#"
+ ev Foo::A []
+ ev &Foo::A [type]
+ ev Foo::B []
+ ev &Foo::B [type]
+ en Foo []
+ fn bar(…) []
+ fn foo() []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn suggest_deref_fn_ret() {
+ check_relevance(
+ r#"
+//- minicore: deref
+struct S;
+struct T(S);
+
+impl core::ops::Deref for T {
+ type Target = S;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+fn foo(s: &S) {}
+fn bar() -> T {}
+
+fn main() {
+ foo($0);
+}
+"#,
+ expect![[r#"
+ st S []
+ st &S [type]
+ st S []
+ st T []
+ fn bar() []
+ fn &bar() [type]
+ fn foo(…) []
+ fn main() []
+ md core []
+ "#]],
+ )
+ }
+
+ #[test]
+ fn op_function_relevances() {
+ check_relevance(
+ r#"
+#[lang = "sub"]
+trait Sub {
+ fn sub(self, other: Self) -> Self { self }
+}
+impl Sub for u32 {}
+fn foo(a: u32) { a.$0 }
+"#,
+ expect![[r#"
+ me sub(…) (as Sub) [op_method]
+ "#]],
+ );
+ check_relevance(
+ r#"
+struct Foo;
+impl Foo {
+ fn new() -> Self {}
+}
+#[lang = "eq"]
+pub trait PartialEq<Rhs: ?Sized = Self> {
+ fn eq(&self, other: &Rhs) -> bool;
+ fn ne(&self, other: &Rhs) -> bool;
+}
+
+impl PartialEq for Foo {}
+fn main() {
+ Foo::$0
+}
+"#,
+ expect![[r#"
+ fn new() []
+ me eq(…) (as PartialEq) [op_method]
+ me ne(…) (as PartialEq) [op_method]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn struct_field_method_ref() {
+ check_kinds(
+ r#"
+struct Foo { bar: u32 }
+impl Foo { fn baz(&self) -> u32 { 0 } }
+
+fn foo(f: Foo) { let _: &u32 = f.b$0 }
+"#,
+ &[CompletionItemKind::Method, CompletionItemKind::SymbolKind(SymbolKind::Field)],
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "baz()",
+ source_range: 98..99,
+ delete: 98..99,
+ insert: "baz()$0",
+ kind: Method,
+ lookup: "baz",
+ detail: "fn(&self) -> u32",
+ ref_match: "&@96",
+ },
+ CompletionItem {
+ label: "bar",
+ source_range: 98..99,
+ delete: 98..99,
+ insert: "bar",
+ kind: SymbolKind(
+ Field,
+ ),
+ detail: "u32",
+ ref_match: "&@96",
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn qualified_path_ref() {
+ check_kinds(
+ r#"
+struct S;
+
+struct T;
+impl T {
+ fn foo() -> S {}
+}
+
+fn bar(s: &S) {}
+
+fn main() {
+ bar(T::$0);
+}
+"#,
+ &[CompletionItemKind::SymbolKind(SymbolKind::Function)],
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "foo()",
+ source_range: 95..95,
+ delete: 95..95,
+ insert: "foo()$0",
+ kind: SymbolKind(
+ Function,
+ ),
+ lookup: "foo",
+ detail: "fn() -> S",
+ ref_match: "&@92",
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn generic_enum() {
+ check_relevance(
+ r#"
+enum Foo<T> { A(T), B }
+// bar() should not be an exact type match
+// because the generic parameters are different
+fn bar() -> Foo<u8> { Foo::B }
+// FIXME baz() should be an exact type match
+// because the types could unify, but it currently
+// is not. This is due to the T here being
+// TyKind::Placeholder rather than TyKind::Missing.
+fn baz<T>() -> Foo<T> { Foo::B }
+fn foo() {
+ let foo: Foo<u32> = Foo::B;
+ let _: Foo<u32> = f$0;
+}
+"#,
+ expect![[r#"
+ lc foo [type+local]
+ ev Foo::A(…) [type_could_unify]
+ ev Foo::B [type_could_unify]
+ fn foo() []
+ en Foo []
+ fn bar() []
+ fn baz() []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn postfix_exact_match_is_high_priority() {
+ cov_mark::check!(postfix_exact_match_is_high_priority);
+ check_relevance_for_kinds(
+ r#"
+mod ops {
+ pub trait Not {
+ type Output;
+ fn not(self) -> Self::Output;
+ }
+
+ impl Not for bool {
+ type Output = bool;
+ fn not(self) -> bool { if self { false } else { true }}
+ }
+}
+
+fn main() {
+ let _: bool = (9 > 2).not$0;
+}
+ "#,
+ &[CompletionItemKind::Snippet, CompletionItemKind::Method],
+ expect![[r#"
+ sn not [snippet]
+ me not() (use ops::Not) [type_could_unify+requires_import]
+ sn if []
+ sn while []
+ sn ref []
+ sn refm []
+ sn match []
+ sn box []
+ sn dbg []
+ sn dbgr []
+ sn call []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn postfix_inexact_match_is_low_priority() {
+ cov_mark::check!(postfix_inexact_match_is_low_priority);
+ check_relevance_for_kinds(
+ r#"
+struct S;
+impl S {
+ fn f(&self) {}
+}
+fn main() {
+ S.$0
+}
+ "#,
+ &[CompletionItemKind::Snippet, CompletionItemKind::Method],
+ expect![[r#"
+ me f() []
+ sn ref []
+ sn refm []
+ sn match []
+ sn box []
+ sn dbg []
+ sn dbgr []
+ sn call []
+ sn let []
+ sn letm []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn flyimport_reduced_relevance() {
+ check_relevance(
+ r#"
+mod std {
+ pub mod io {
+ pub trait BufRead {}
+ pub struct BufReader;
+ pub struct BufWriter;
+ }
+}
+struct Buffer;
+
+fn f() {
+ Buf$0
+}
+"#,
+ expect![[r#"
+ st Buffer []
+ fn f() []
+ md std []
+ tt BufRead (use std::io::BufRead) [requires_import]
+ st BufReader (use std::io::BufReader) [requires_import]
+ st BufWriter (use std::io::BufWriter) [requires_import]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn completes_struct_with_raw_identifier() {
+ check_edit(
+ "type",
+ r#"
+mod m { pub struct r#type {} }
+fn main() {
+ let r#type = m::t$0;
+}
+"#,
+ r#"
+mod m { pub struct r#type {} }
+fn main() {
+ let r#type = m::r#type;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn completes_fn_with_raw_identifier() {
+ check_edit(
+ "type",
+ r#"
+mod m { pub fn r#type {} }
+fn main() {
+ m::t$0
+}
+"#,
+ r#"
+mod m { pub fn r#type {} }
+fn main() {
+ m::r#type()$0
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn completes_macro_with_raw_identifier() {
+ check_edit(
+ "let!",
+ r#"
+macro_rules! r#let { () => {} }
+fn main() {
+ $0
+}
+"#,
+ r#"
+macro_rules! r#let { () => {} }
+fn main() {
+ r#let!($0)
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn completes_variant_with_raw_identifier() {
+ check_edit(
+ "type",
+ r#"
+enum A { r#type }
+fn main() {
+ let a = A::t$0
+}
+"#,
+ r#"
+enum A { r#type }
+fn main() {
+ let a = A::r#type$0
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn completes_field_with_raw_identifier() {
+ check_edit(
+ "fn",
+ r#"
+mod r#type {
+ pub struct r#struct {
+ pub r#fn: u32
+ }
+}
+
+fn main() {
+ let a = r#type::r#struct {};
+ a.$0
+}
+"#,
+ r#"
+mod r#type {
+ pub struct r#struct {
+ pub r#fn: u32
+ }
+}
+
+fn main() {
+ let a = r#type::r#struct {};
+ a.r#fn
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn completes_const_with_raw_identifier() {
+ check_edit(
+ "type",
+ r#"
+struct r#struct {}
+impl r#struct { pub const r#type: u8 = 1; }
+fn main() {
+ r#struct::t$0
+}
+"#,
+ r#"
+struct r#struct {}
+impl r#struct { pub const r#type: u8 = 1; }
+fn main() {
+ r#struct::r#type
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn completes_type_alias_with_raw_identifier() {
+ check_edit(
+ "type type",
+ r#"
+struct r#struct {}
+trait r#trait { type r#type; }
+impl r#trait for r#struct { type t$0 }
+"#,
+ r#"
+struct r#struct {}
+trait r#trait { type r#type; }
+impl r#trait for r#struct { type r#type = $0; }
+"#,
+ )
+ }
+
+ #[test]
+ fn field_access_includes_self() {
+ check_edit(
+ "length",
+ r#"
+struct S {
+ length: i32
+}
+
+impl S {
+ fn some_fn(&self) {
+ let l = len$0
+ }
+}
+"#,
+ r#"
+struct S {
+ length: i32
+}
+
+impl S {
+ fn some_fn(&self) {
+ let l = self.length
+ }
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs
new file mode 100644
index 000000000..a810eef18
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs
@@ -0,0 +1,33 @@
+//! Renderer for `const` fields.
+
+use hir::{AsAssocItem, HirDisplay};
+use ide_db::SymbolKind;
+
+use crate::{item::CompletionItem, render::RenderContext};
+
+pub(crate) fn render_const(ctx: RenderContext<'_>, const_: hir::Const) -> Option<CompletionItem> {
+ let _p = profile::span("render_const");
+ render(ctx, const_)
+}
+
+fn render(ctx: RenderContext<'_>, const_: hir::Const) -> Option<CompletionItem> {
+ let db = ctx.db();
+ let name = const_.name(db)?;
+ let (name, escaped_name) = (name.to_smol_str(), name.escaped().to_smol_str());
+ let detail = const_.display(db).to_string();
+
+ let mut item = CompletionItem::new(SymbolKind::Const, ctx.source_range(), name.clone());
+ item.set_documentation(ctx.docs(const_))
+ .set_deprecated(ctx.is_deprecated(const_) || ctx.is_deprecated_assoc_item(const_))
+ .detail(detail)
+ .set_relevance(ctx.completion_relevance());
+
+ if let Some(actm) = const_.as_assoc_item(db) {
+ if let Some(trt) = actm.containing_trait_or_trait_impl(db) {
+ item.trait_name(trt.name(db).to_smol_str());
+ }
+ }
+ item.insert_text(escaped_name);
+
+ Some(item.build())
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs
new file mode 100644
index 000000000..4b5535718
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs
@@ -0,0 +1,671 @@
+//! Renderer for function calls.
+
+use hir::{db::HirDatabase, AsAssocItem, HirDisplay};
+use ide_db::{SnippetCap, SymbolKind};
+use itertools::Itertools;
+use stdx::{format_to, to_lower_snake_case};
+use syntax::{AstNode, SmolStr};
+
+use crate::{
+ context::{CompletionContext, DotAccess, DotAccessKind, PathCompletionCtx, PathKind},
+ item::{Builder, CompletionItem, CompletionItemKind, CompletionRelevance},
+ render::{compute_exact_name_match, compute_ref_match, compute_type_match, RenderContext},
+ CallableSnippets,
+};
+
+#[derive(Debug)]
+enum FuncKind<'ctx> {
+ Function(&'ctx PathCompletionCtx),
+ Method(&'ctx DotAccess, Option<hir::Name>),
+}
+
+pub(crate) fn render_fn(
+ ctx: RenderContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ local_name: Option<hir::Name>,
+ func: hir::Function,
+) -> Builder {
+ let _p = profile::span("render_fn");
+ render(ctx, local_name, func, FuncKind::Function(path_ctx))
+}
+
+pub(crate) fn render_method(
+ ctx: RenderContext<'_>,
+ dot_access: &DotAccess,
+ receiver: Option<hir::Name>,
+ local_name: Option<hir::Name>,
+ func: hir::Function,
+) -> Builder {
+ let _p = profile::span("render_method");
+ render(ctx, local_name, func, FuncKind::Method(dot_access, receiver))
+}
+
+fn render(
+ ctx @ RenderContext { completion, .. }: RenderContext<'_>,
+ local_name: Option<hir::Name>,
+ func: hir::Function,
+ func_kind: FuncKind<'_>,
+) -> Builder {
+ let db = completion.db;
+
+ let name = local_name.unwrap_or_else(|| func.name(db));
+
+ let (call, escaped_call) = match &func_kind {
+ FuncKind::Method(_, Some(receiver)) => (
+ format!("{}.{}", receiver, &name).into(),
+ format!("{}.{}", receiver.escaped(), name.escaped()).into(),
+ ),
+ _ => (name.to_smol_str(), name.escaped().to_smol_str()),
+ };
+ let mut item = CompletionItem::new(
+ if func.self_param(db).is_some() {
+ CompletionItemKind::Method
+ } else {
+ CompletionItemKind::SymbolKind(SymbolKind::Function)
+ },
+ ctx.source_range(),
+ call.clone(),
+ );
+
+ let ret_type = func.ret_type(db);
+ let is_op_method = func
+ .as_assoc_item(ctx.db())
+ .and_then(|trait_| trait_.containing_trait_or_trait_impl(ctx.db()))
+ .map_or(false, |trait_| completion.is_ops_trait(trait_));
+ item.set_relevance(CompletionRelevance {
+ type_match: compute_type_match(completion, &ret_type),
+ exact_name_match: compute_exact_name_match(completion, &call),
+ is_op_method,
+ ..ctx.completion_relevance()
+ });
+
+ if let Some(ref_match) = compute_ref_match(completion, &ret_type) {
+ match func_kind {
+ FuncKind::Function(path_ctx) => {
+ item.ref_match(ref_match, path_ctx.path.syntax().text_range().start());
+ }
+ FuncKind::Method(DotAccess { receiver: Some(receiver), .. }, _) => {
+ if let Some(original_expr) = completion.sema.original_ast_node(receiver.clone()) {
+ item.ref_match(ref_match, original_expr.syntax().text_range().start());
+ }
+ }
+ _ => (),
+ }
+ }
+
+ item.set_documentation(ctx.docs(func))
+ .set_deprecated(ctx.is_deprecated(func) || ctx.is_deprecated_assoc_item(func))
+ .detail(detail(db, func))
+ .lookup_by(name.to_smol_str());
+
+ match ctx.completion.config.snippet_cap {
+ Some(cap) => {
+ let complete_params = match func_kind {
+ FuncKind::Function(PathCompletionCtx {
+ kind: PathKind::Expr { .. },
+ has_call_parens: false,
+ ..
+ }) => Some(false),
+ FuncKind::Method(
+ DotAccess {
+ kind:
+ DotAccessKind::Method { has_parens: false } | DotAccessKind::Field { .. },
+ ..
+ },
+ _,
+ ) => Some(true),
+ _ => None,
+ };
+ if let Some(has_dot_receiver) = complete_params {
+ if let Some((self_param, params)) =
+ params(ctx.completion, func, &func_kind, has_dot_receiver)
+ {
+ add_call_parens(
+ &mut item,
+ completion,
+ cap,
+ call,
+ escaped_call,
+ self_param,
+ params,
+ );
+ }
+ }
+ }
+ _ => (),
+ };
+
+ match ctx.import_to_add {
+ Some(import_to_add) => {
+ item.add_import(import_to_add);
+ }
+ None => {
+ if let Some(actm) = func.as_assoc_item(db) {
+ if let Some(trt) = actm.containing_trait_or_trait_impl(db) {
+ item.trait_name(trt.name(db).to_smol_str());
+ }
+ }
+ }
+ }
+ item
+}
+
+pub(super) fn add_call_parens<'b>(
+ builder: &'b mut Builder,
+ ctx: &CompletionContext<'_>,
+ cap: SnippetCap,
+ name: SmolStr,
+ escaped_name: SmolStr,
+ self_param: Option<hir::SelfParam>,
+ params: Vec<hir::Param>,
+) -> &'b mut Builder {
+ cov_mark::hit!(inserts_parens_for_function_calls);
+
+ let (snippet, label_suffix) = if self_param.is_none() && params.is_empty() {
+ (format!("{}()$0", escaped_name), "()")
+ } else {
+ builder.trigger_call_info();
+ let snippet = if let Some(CallableSnippets::FillArguments) = ctx.config.callable {
+ let offset = if self_param.is_some() { 2 } else { 1 };
+ let function_params_snippet =
+ params.iter().enumerate().format_with(", ", |(index, param), f| {
+ match param.name(ctx.db) {
+ Some(n) => {
+ let smol_str = n.to_smol_str();
+ let text = smol_str.as_str().trim_start_matches('_');
+ let ref_ = ref_of_param(ctx, text, param.ty());
+ f(&format_args!("${{{}:{}{}}}", index + offset, ref_, text))
+ }
+ None => {
+ let name = match param.ty().as_adt() {
+ None => "_".to_string(),
+ Some(adt) => adt
+ .name(ctx.db)
+ .as_text()
+ .map(|s| to_lower_snake_case(s.as_str()))
+ .unwrap_or_else(|| "_".to_string()),
+ };
+ f(&format_args!("${{{}:{}}}", index + offset, name))
+ }
+ }
+ });
+ match self_param {
+ Some(self_param) => {
+ format!(
+ "{}(${{1:{}}}{}{})$0",
+ escaped_name,
+ self_param.display(ctx.db),
+ if params.is_empty() { "" } else { ", " },
+ function_params_snippet
+ )
+ }
+ None => {
+ format!("{}({})$0", escaped_name, function_params_snippet)
+ }
+ }
+ } else {
+ cov_mark::hit!(suppress_arg_snippets);
+ format!("{}($0)", escaped_name)
+ };
+
+ (snippet, "(…)")
+ };
+ builder.label(SmolStr::from_iter([&name, label_suffix])).insert_snippet(cap, snippet)
+}
+
+fn ref_of_param(ctx: &CompletionContext<'_>, arg: &str, ty: &hir::Type) -> &'static str {
+ if let Some(derefed_ty) = ty.remove_ref() {
+ for (name, local) in ctx.locals.iter() {
+ if name.as_text().as_deref() == Some(arg) {
+ return if local.ty(ctx.db) == derefed_ty {
+ if ty.is_mutable_reference() {
+ "&mut "
+ } else {
+ "&"
+ }
+ } else {
+ ""
+ };
+ }
+ }
+ }
+ ""
+}
+
+fn detail(db: &dyn HirDatabase, func: hir::Function) -> String {
+ let mut ret_ty = func.ret_type(db);
+ let mut detail = String::new();
+
+ if func.is_const(db) {
+ format_to!(detail, "const ");
+ }
+ if func.is_async(db) {
+ format_to!(detail, "async ");
+ if let Some(async_ret) = func.async_ret_type(db) {
+ ret_ty = async_ret;
+ }
+ }
+ if func.is_unsafe_to_call(db) {
+ format_to!(detail, "unsafe ");
+ }
+
+ format_to!(detail, "fn({})", params_display(db, func));
+ if !ret_ty.is_unit() {
+ format_to!(detail, " -> {}", ret_ty.display(db));
+ }
+ detail
+}
+
+fn params_display(db: &dyn HirDatabase, func: hir::Function) -> String {
+ if let Some(self_param) = func.self_param(db) {
+ let assoc_fn_params = func.assoc_fn_params(db);
+ let params = assoc_fn_params
+ .iter()
+ .skip(1) // skip the self param because we are manually handling that
+ .map(|p| p.ty().display(db));
+ format!(
+ "{}{}",
+ self_param.display(db),
+ params.format_with("", |display, f| {
+ f(&", ")?;
+ f(&display)
+ })
+ )
+ } else {
+ let assoc_fn_params = func.assoc_fn_params(db);
+ assoc_fn_params.iter().map(|p| p.ty().display(db)).join(", ")
+ }
+}
+
+fn params(
+ ctx: &CompletionContext<'_>,
+ func: hir::Function,
+ func_kind: &FuncKind<'_>,
+ has_dot_receiver: bool,
+) -> Option<(Option<hir::SelfParam>, Vec<hir::Param>)> {
+ if ctx.config.callable.is_none() {
+ return None;
+ }
+
+ // Don't add parentheses if the expected type is some function reference.
+ if let Some(ty) = &ctx.expected_type {
+ // FIXME: check signature matches?
+ if ty.is_fn() {
+ cov_mark::hit!(no_call_parens_if_fn_ptr_needed);
+ return None;
+ }
+ }
+
+ let self_param = if has_dot_receiver || matches!(func_kind, FuncKind::Method(_, Some(_))) {
+ None
+ } else {
+ func.self_param(ctx.db)
+ };
+ Some((self_param, func.params_without_self(ctx.db)))
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{
+ tests::{check_edit, check_edit_with_config, TEST_CONFIG},
+ CallableSnippets, CompletionConfig,
+ };
+
+ #[test]
+ fn inserts_parens_for_function_calls() {
+ cov_mark::check!(inserts_parens_for_function_calls);
+ check_edit(
+ "no_args",
+ r#"
+fn no_args() {}
+fn main() { no_$0 }
+"#,
+ r#"
+fn no_args() {}
+fn main() { no_args()$0 }
+"#,
+ );
+
+ check_edit(
+ "with_args",
+ r#"
+fn with_args(x: i32, y: String) {}
+fn main() { with_$0 }
+"#,
+ r#"
+fn with_args(x: i32, y: String) {}
+fn main() { with_args(${1:x}, ${2:y})$0 }
+"#,
+ );
+
+ check_edit(
+ "foo",
+ r#"
+struct S;
+impl S {
+ fn foo(&self) {}
+}
+fn bar(s: &S) { s.f$0 }
+"#,
+ r#"
+struct S;
+impl S {
+ fn foo(&self) {}
+}
+fn bar(s: &S) { s.foo()$0 }
+"#,
+ );
+
+ check_edit(
+ "foo",
+ r#"
+struct S {}
+impl S {
+ fn foo(&self, x: i32) {}
+}
+fn bar(s: &S) {
+ s.f$0
+}
+"#,
+ r#"
+struct S {}
+impl S {
+ fn foo(&self, x: i32) {}
+}
+fn bar(s: &S) {
+ s.foo(${1:x})$0
+}
+"#,
+ );
+
+ check_edit(
+ "foo",
+ r#"
+struct S {}
+impl S {
+ fn foo(&self, x: i32) {
+ $0
+ }
+}
+"#,
+ r#"
+struct S {}
+impl S {
+ fn foo(&self, x: i32) {
+ self.foo(${1:x})$0
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn parens_for_method_call_as_assoc_fn() {
+ check_edit(
+ "foo",
+ r#"
+struct S;
+impl S {
+ fn foo(&self) {}
+}
+fn main() { S::f$0 }
+"#,
+ r#"
+struct S;
+impl S {
+ fn foo(&self) {}
+}
+fn main() { S::foo(${1:&self})$0 }
+"#,
+ );
+ }
+
+ #[test]
+ fn suppress_arg_snippets() {
+ cov_mark::check!(suppress_arg_snippets);
+ check_edit_with_config(
+ CompletionConfig { callable: Some(CallableSnippets::AddParentheses), ..TEST_CONFIG },
+ "with_args",
+ r#"
+fn with_args(x: i32, y: String) {}
+fn main() { with_$0 }
+"#,
+ r#"
+fn with_args(x: i32, y: String) {}
+fn main() { with_args($0) }
+"#,
+ );
+ }
+
+ #[test]
+ fn strips_underscores_from_args() {
+ check_edit(
+ "foo",
+ r#"
+fn foo(_foo: i32, ___bar: bool, ho_ge_: String) {}
+fn main() { f$0 }
+"#,
+ r#"
+fn foo(_foo: i32, ___bar: bool, ho_ge_: String) {}
+fn main() { foo(${1:foo}, ${2:bar}, ${3:ho_ge_})$0 }
+"#,
+ );
+ }
+
+ #[test]
+ fn insert_ref_when_matching_local_in_scope() {
+ check_edit(
+ "ref_arg",
+ r#"
+struct Foo {}
+fn ref_arg(x: &Foo) {}
+fn main() {
+ let x = Foo {};
+ ref_ar$0
+}
+"#,
+ r#"
+struct Foo {}
+fn ref_arg(x: &Foo) {}
+fn main() {
+ let x = Foo {};
+ ref_arg(${1:&x})$0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn insert_mut_ref_when_matching_local_in_scope() {
+ check_edit(
+ "ref_arg",
+ r#"
+struct Foo {}
+fn ref_arg(x: &mut Foo) {}
+fn main() {
+ let x = Foo {};
+ ref_ar$0
+}
+"#,
+ r#"
+struct Foo {}
+fn ref_arg(x: &mut Foo) {}
+fn main() {
+ let x = Foo {};
+ ref_arg(${1:&mut x})$0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn insert_ref_when_matching_local_in_scope_for_method() {
+ check_edit(
+ "apply_foo",
+ r#"
+struct Foo {}
+struct Bar {}
+impl Bar {
+ fn apply_foo(&self, x: &Foo) {}
+}
+
+fn main() {
+ let x = Foo {};
+ let y = Bar {};
+ y.$0
+}
+"#,
+ r#"
+struct Foo {}
+struct Bar {}
+impl Bar {
+ fn apply_foo(&self, x: &Foo) {}
+}
+
+fn main() {
+ let x = Foo {};
+ let y = Bar {};
+ y.apply_foo(${1:&x})$0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trim_mut_keyword_in_func_completion() {
+ check_edit(
+ "take_mutably",
+ r#"
+fn take_mutably(mut x: &i32) {}
+
+fn main() {
+ take_m$0
+}
+"#,
+ r#"
+fn take_mutably(mut x: &i32) {}
+
+fn main() {
+ take_mutably(${1:x})$0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn complete_pattern_args_with_type_name_if_adt() {
+ check_edit(
+ "qux",
+ r#"
+struct Foo {
+ bar: i32
+}
+
+fn qux(Foo { bar }: Foo) {
+ println!("{}", bar);
+}
+
+fn main() {
+ qu$0
+}
+"#,
+ r#"
+struct Foo {
+ bar: i32
+}
+
+fn qux(Foo { bar }: Foo) {
+ println!("{}", bar);
+}
+
+fn main() {
+ qux(${1:foo})$0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn complete_fn_param() {
+ // has mut kw
+ check_edit(
+ "mut bar: u32",
+ r#"
+fn f(foo: (), mut bar: u32) {}
+fn g(foo: (), mut ba$0)
+"#,
+ r#"
+fn f(foo: (), mut bar: u32) {}
+fn g(foo: (), mut bar: u32)
+"#,
+ );
+
+ // has type param
+ check_edit(
+ "mut bar: u32",
+ r#"
+fn g(foo: (), mut ba$0: u32)
+fn f(foo: (), mut bar: u32) {}
+"#,
+ r#"
+fn g(foo: (), mut bar: u32)
+fn f(foo: (), mut bar: u32) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn complete_fn_mut_param_add_comma() {
+ // add leading and trailing comma
+ check_edit(
+ ", mut bar: u32,",
+ r#"
+fn f(foo: (), mut bar: u32) {}
+fn g(foo: ()mut ba$0 baz: ())
+"#,
+ r#"
+fn f(foo: (), mut bar: u32) {}
+fn g(foo: (), mut bar: u32, baz: ())
+"#,
+ );
+ }
+
+ #[test]
+ fn complete_fn_mut_param_has_attribute() {
+ check_edit(
+ r#"#[baz = "qux"] mut bar: u32"#,
+ r#"
+fn f(foo: (), #[baz = "qux"] mut bar: u32) {}
+fn g(foo: (), mut ba$0)
+"#,
+ r#"
+fn f(foo: (), #[baz = "qux"] mut bar: u32) {}
+fn g(foo: (), #[baz = "qux"] mut bar: u32)
+"#,
+ );
+
+ check_edit(
+ r#"#[baz = "qux"] mut bar: u32"#,
+ r#"
+fn f(foo: (), #[baz = "qux"] mut bar: u32) {}
+fn g(foo: (), #[baz = "qux"] mut ba$0)
+"#,
+ r#"
+fn f(foo: (), #[baz = "qux"] mut bar: u32) {}
+fn g(foo: (), #[baz = "qux"] mut bar: u32)
+"#,
+ );
+
+ check_edit(
+ r#", #[baz = "qux"] mut bar: u32"#,
+ r#"
+fn f(foo: (), #[baz = "qux"] mut bar: u32) {}
+fn g(foo: ()#[baz = "qux"] mut ba$0)
+"#,
+ r#"
+fn f(foo: (), #[baz = "qux"] mut bar: u32) {}
+fn g(foo: (), #[baz = "qux"] mut bar: u32)
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs
new file mode 100644
index 000000000..91a253f8f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs
@@ -0,0 +1,191 @@
+//! Renderer for `enum` variants.
+
+use hir::{db::HirDatabase, Documentation, HasAttrs, StructKind};
+use ide_db::SymbolKind;
+use syntax::AstNode;
+
+use crate::{
+ context::{CompletionContext, PathCompletionCtx, PathKind},
+ item::{Builder, CompletionItem},
+ render::{
+ compute_ref_match, compute_type_match,
+ variant::{
+ format_literal_label, render_record_lit, render_tuple_lit, visible_fields,
+ RenderedLiteral,
+ },
+ RenderContext,
+ },
+ CompletionItemKind, CompletionRelevance,
+};
+
+pub(crate) fn render_variant_lit(
+ ctx: RenderContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ local_name: Option<hir::Name>,
+ variant: hir::Variant,
+ path: Option<hir::ModPath>,
+) -> Option<Builder> {
+ let _p = profile::span("render_enum_variant");
+ let db = ctx.db();
+
+ let name = local_name.unwrap_or_else(|| variant.name(db));
+ render(ctx, path_ctx, Variant::EnumVariant(variant), name, path)
+}
+
+pub(crate) fn render_struct_literal(
+ ctx: RenderContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ strukt: hir::Struct,
+ path: Option<hir::ModPath>,
+ local_name: Option<hir::Name>,
+) -> Option<Builder> {
+ let _p = profile::span("render_struct_literal");
+ let db = ctx.db();
+
+ let name = local_name.unwrap_or_else(|| strukt.name(db));
+ render(ctx, path_ctx, Variant::Struct(strukt), name, path)
+}
+
+fn render(
+ ctx @ RenderContext { completion, .. }: RenderContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ thing: Variant,
+ name: hir::Name,
+ path: Option<hir::ModPath>,
+) -> Option<Builder> {
+ let db = completion.db;
+ let mut kind = thing.kind(db);
+ let should_add_parens = match &path_ctx {
+ PathCompletionCtx { has_call_parens: true, .. } => false,
+ PathCompletionCtx { kind: PathKind::Use | PathKind::Type { .. }, .. } => false,
+ _ => true,
+ };
+
+ let fields = thing.fields(completion)?;
+ let (qualified_name, short_qualified_name, qualified) = match path {
+ Some(path) => {
+ let short = hir::ModPath::from_segments(
+ hir::PathKind::Plain,
+ path.segments().iter().skip(path.segments().len().saturating_sub(2)).cloned(),
+ );
+ (path, short, true)
+ }
+ None => (name.clone().into(), name.into(), false),
+ };
+ let (qualified_name, escaped_qualified_name) =
+ (qualified_name.to_string(), qualified_name.escaped().to_string());
+ let snippet_cap = ctx.snippet_cap();
+
+ let mut rendered = match kind {
+ StructKind::Tuple if should_add_parens => {
+ render_tuple_lit(db, snippet_cap, &fields, &escaped_qualified_name)
+ }
+ StructKind::Record if should_add_parens => {
+ render_record_lit(db, snippet_cap, &fields, &escaped_qualified_name)
+ }
+ _ => RenderedLiteral {
+ literal: escaped_qualified_name.clone(),
+ detail: escaped_qualified_name.clone(),
+ },
+ };
+
+ if snippet_cap.is_some() {
+ rendered.literal.push_str("$0");
+ }
+
+ // only show name in label if not adding parens
+ if !should_add_parens {
+ kind = StructKind::Unit;
+ }
+
+ let mut item = CompletionItem::new(
+ CompletionItemKind::SymbolKind(thing.symbol_kind()),
+ ctx.source_range(),
+ format_literal_label(&qualified_name, kind),
+ );
+
+ item.detail(rendered.detail);
+
+ match snippet_cap {
+ Some(snippet_cap) => item.insert_snippet(snippet_cap, rendered.literal),
+ None => item.insert_text(rendered.literal),
+ };
+
+ if qualified {
+ item.lookup_by(format_literal_label(&short_qualified_name.to_string(), kind));
+ }
+ item.set_documentation(thing.docs(db)).set_deprecated(thing.is_deprecated(&ctx));
+
+ let ty = thing.ty(db);
+ item.set_relevance(CompletionRelevance {
+ type_match: compute_type_match(ctx.completion, &ty),
+ ..ctx.completion_relevance()
+ });
+ if let Some(ref_match) = compute_ref_match(completion, &ty) {
+ item.ref_match(ref_match, path_ctx.path.syntax().text_range().start());
+ }
+
+ if let Some(import_to_add) = ctx.import_to_add {
+ item.add_import(import_to_add);
+ }
+ Some(item)
+}
+
+#[derive(Clone, Copy)]
+enum Variant {
+ Struct(hir::Struct),
+ EnumVariant(hir::Variant),
+}
+
+impl Variant {
+ fn fields(self, ctx: &CompletionContext<'_>) -> Option<Vec<hir::Field>> {
+ let fields = match self {
+ Variant::Struct(it) => it.fields(ctx.db),
+ Variant::EnumVariant(it) => it.fields(ctx.db),
+ };
+ let (visible_fields, fields_omitted) = match self {
+ Variant::Struct(it) => visible_fields(ctx, &fields, it)?,
+ Variant::EnumVariant(it) => visible_fields(ctx, &fields, it)?,
+ };
+ if !fields_omitted {
+ Some(visible_fields)
+ } else {
+ None
+ }
+ }
+
+ fn kind(self, db: &dyn HirDatabase) -> StructKind {
+ match self {
+ Variant::Struct(it) => it.kind(db),
+ Variant::EnumVariant(it) => it.kind(db),
+ }
+ }
+
+ fn symbol_kind(self) -> SymbolKind {
+ match self {
+ Variant::Struct(_) => SymbolKind::Struct,
+ Variant::EnumVariant(_) => SymbolKind::Variant,
+ }
+ }
+
+ fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
+ match self {
+ Variant::Struct(it) => it.docs(db),
+ Variant::EnumVariant(it) => it.docs(db),
+ }
+ }
+
+ fn is_deprecated(self, ctx: &RenderContext<'_>) -> bool {
+ match self {
+ Variant::Struct(it) => ctx.is_deprecated(it),
+ Variant::EnumVariant(it) => ctx.is_deprecated(it),
+ }
+ }
+
+ fn ty(self, db: &dyn HirDatabase) -> hir::Type {
+ match self {
+ Variant::Struct(it) => it.ty(db),
+ Variant::EnumVariant(it) => it.parent_enum(db).ty(db),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs
new file mode 100644
index 000000000..ca2269f13
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs
@@ -0,0 +1,270 @@
+//! Renderer for macro invocations.
+
+use hir::{Documentation, HirDisplay};
+use ide_db::SymbolKind;
+use syntax::SmolStr;
+
+use crate::{
+ context::{PathCompletionCtx, PathKind, PatternContext},
+ item::{Builder, CompletionItem},
+ render::RenderContext,
+};
+
+pub(crate) fn render_macro(
+ ctx: RenderContext<'_>,
+ PathCompletionCtx { kind, has_macro_bang, has_call_parens, .. }: &PathCompletionCtx,
+
+ name: hir::Name,
+ macro_: hir::Macro,
+) -> Builder {
+ let _p = profile::span("render_macro");
+ render(ctx, *kind == PathKind::Use, *has_macro_bang, *has_call_parens, name, macro_)
+}
+
+pub(crate) fn render_macro_pat(
+ ctx: RenderContext<'_>,
+ _pattern_ctx: &PatternContext,
+ name: hir::Name,
+ macro_: hir::Macro,
+) -> Builder {
+ let _p = profile::span("render_macro");
+ render(ctx, false, false, false, name, macro_)
+}
+
+fn render(
+ ctx @ RenderContext { completion, .. }: RenderContext<'_>,
+ is_use_path: bool,
+ has_macro_bang: bool,
+ has_call_parens: bool,
+ name: hir::Name,
+ macro_: hir::Macro,
+) -> Builder {
+ let source_range = if ctx.is_immediately_after_macro_bang() {
+ cov_mark::hit!(completes_macro_call_if_cursor_at_bang_token);
+ completion.token.parent().map_or_else(|| ctx.source_range(), |it| it.text_range())
+ } else {
+ ctx.source_range()
+ };
+
+ let (name, escaped_name) = (name.to_smol_str(), name.escaped().to_smol_str());
+ let docs = ctx.docs(macro_);
+ let docs_str = docs.as_ref().map(Documentation::as_str).unwrap_or_default();
+ let is_fn_like = macro_.is_fn_like(completion.db);
+ let (bra, ket) = if is_fn_like { guess_macro_braces(&name, docs_str) } else { ("", "") };
+
+ let needs_bang = is_fn_like && !is_use_path && !has_macro_bang;
+
+ let mut item = CompletionItem::new(
+ SymbolKind::from(macro_.kind(completion.db)),
+ source_range,
+ label(&ctx, needs_bang, bra, ket, &name),
+ );
+ item.set_deprecated(ctx.is_deprecated(macro_))
+ .detail(macro_.display(completion.db).to_string())
+ .set_documentation(docs)
+ .set_relevance(ctx.completion_relevance());
+
+ match ctx.snippet_cap() {
+ Some(cap) if needs_bang && !has_call_parens => {
+ let snippet = format!("{}!{}$0{}", escaped_name, bra, ket);
+ let lookup = banged_name(&name);
+ item.insert_snippet(cap, snippet).lookup_by(lookup);
+ }
+ _ if needs_bang => {
+ item.insert_text(banged_name(&escaped_name)).lookup_by(banged_name(&name));
+ }
+ _ => {
+ cov_mark::hit!(dont_insert_macro_call_parens_unncessary);
+ item.insert_text(escaped_name);
+ }
+ };
+ if let Some(import_to_add) = ctx.import_to_add {
+ item.add_import(import_to_add);
+ }
+
+ item
+}
+
+fn label(
+ ctx: &RenderContext<'_>,
+ needs_bang: bool,
+ bra: &str,
+ ket: &str,
+ name: &SmolStr,
+) -> SmolStr {
+ if needs_bang {
+ if ctx.snippet_cap().is_some() {
+ SmolStr::from_iter([&*name, "!", bra, "…", ket])
+ } else {
+ banged_name(name)
+ }
+ } else {
+ name.clone()
+ }
+}
+
+fn banged_name(name: &str) -> SmolStr {
+ SmolStr::from_iter([name, "!"])
+}
+
+fn guess_macro_braces(macro_name: &str, docs: &str) -> (&'static str, &'static str) {
+ let mut votes = [0, 0, 0];
+ for (idx, s) in docs.match_indices(&macro_name) {
+ let (before, after) = (&docs[..idx], &docs[idx + s.len()..]);
+ // Ensure to match the full word
+ if after.starts_with('!')
+ && !before.ends_with(|c: char| c == '_' || c.is_ascii_alphanumeric())
+ {
+ // It may have spaces before the braces like `foo! {}`
+ match after[1..].chars().find(|&c| !c.is_whitespace()) {
+ Some('{') => votes[0] += 1,
+ Some('[') => votes[1] += 1,
+ Some('(') => votes[2] += 1,
+ _ => {}
+ }
+ }
+ }
+
+ // Insert a space before `{}`.
+ // We prefer the last one when some votes equal.
+ let (_vote, (bra, ket)) = votes
+ .iter()
+ .zip(&[(" {", "}"), ("[", "]"), ("(", ")")])
+ .max_by_key(|&(&vote, _)| vote)
+ .unwrap();
+ (*bra, *ket)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_edit;
+
+ #[test]
+ fn dont_insert_macro_call_parens_unncessary() {
+ cov_mark::check!(dont_insert_macro_call_parens_unncessary);
+ check_edit(
+ "frobnicate",
+ r#"
+//- /main.rs crate:main deps:foo
+use foo::$0;
+//- /foo/lib.rs crate:foo
+#[macro_export]
+macro_rules! frobnicate { () => () }
+"#,
+ r#"
+use foo::frobnicate;
+"#,
+ );
+
+ check_edit(
+ "frobnicate",
+ r#"
+macro_rules! frobnicate { () => () }
+fn main() { frob$0!(); }
+"#,
+ r#"
+macro_rules! frobnicate { () => () }
+fn main() { frobnicate!(); }
+"#,
+ );
+ }
+
+ #[test]
+ fn add_bang_to_parens() {
+ check_edit(
+ "frobnicate!",
+ r#"
+macro_rules! frobnicate { () => () }
+fn main() {
+ frob$0()
+}
+"#,
+ r#"
+macro_rules! frobnicate { () => () }
+fn main() {
+ frobnicate!()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn guesses_macro_braces() {
+ check_edit(
+ "vec!",
+ r#"
+/// Creates a [`Vec`] containing the arguments.
+///
+/// ```
+/// let v = vec![1, 2, 3];
+/// assert_eq!(v[0], 1);
+/// assert_eq!(v[1], 2);
+/// assert_eq!(v[2], 3);
+/// ```
+macro_rules! vec { () => {} }
+
+fn main() { v$0 }
+"#,
+ r#"
+/// Creates a [`Vec`] containing the arguments.
+///
+/// ```
+/// let v = vec![1, 2, 3];
+/// assert_eq!(v[0], 1);
+/// assert_eq!(v[1], 2);
+/// assert_eq!(v[2], 3);
+/// ```
+macro_rules! vec { () => {} }
+
+fn main() { vec![$0] }
+"#,
+ );
+
+ check_edit(
+ "foo!",
+ r#"
+/// Foo
+///
+/// Don't call `fooo!()` `fooo!()`, or `_foo![]` `_foo![]`,
+/// call as `let _=foo! { hello world };`
+macro_rules! foo { () => {} }
+fn main() { $0 }
+"#,
+ r#"
+/// Foo
+///
+/// Don't call `fooo!()` `fooo!()`, or `_foo![]` `_foo![]`,
+/// call as `let _=foo! { hello world };`
+macro_rules! foo { () => {} }
+fn main() { foo! {$0} }
+"#,
+ )
+ }
+
+ #[test]
+ fn completes_macro_call_if_cursor_at_bang_token() {
+ // Regression test for https://github.com/rust-lang/rust-analyzer/issues/9904
+ cov_mark::check!(completes_macro_call_if_cursor_at_bang_token);
+ check_edit(
+ "foo!",
+ r#"
+macro_rules! foo {
+ () => {}
+}
+
+fn main() {
+ foo!$0
+}
+"#,
+ r#"
+macro_rules! foo {
+ () => {}
+}
+
+fn main() {
+ foo!($0)
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs
new file mode 100644
index 000000000..34a384f2f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs
@@ -0,0 +1,193 @@
+//! Renderer for patterns.
+
+use hir::{db::HirDatabase, HasAttrs, Name, StructKind};
+use ide_db::SnippetCap;
+use itertools::Itertools;
+use syntax::SmolStr;
+
+use crate::{
+ context::{ParamContext, ParamKind, PathCompletionCtx, PatternContext},
+ render::{
+ variant::{format_literal_label, visible_fields},
+ RenderContext,
+ },
+ CompletionItem, CompletionItemKind,
+};
+
+pub(crate) fn render_struct_pat(
+ ctx: RenderContext<'_>,
+ pattern_ctx: &PatternContext,
+ strukt: hir::Struct,
+ local_name: Option<Name>,
+) -> Option<CompletionItem> {
+ let _p = profile::span("render_struct_pat");
+
+ let fields = strukt.fields(ctx.db());
+ let (visible_fields, fields_omitted) = visible_fields(ctx.completion, &fields, strukt)?;
+
+ if visible_fields.is_empty() {
+ // Matching a struct without matching its fields is pointless, unlike matching a Variant without its fields
+ return None;
+ }
+
+ let name = local_name.unwrap_or_else(|| strukt.name(ctx.db()));
+ let (name, escaped_name) = (name.to_smol_str(), name.escaped().to_smol_str());
+ let kind = strukt.kind(ctx.db());
+ let label = format_literal_label(name.as_str(), kind);
+ let pat = render_pat(&ctx, pattern_ctx, &escaped_name, kind, &visible_fields, fields_omitted)?;
+
+ Some(build_completion(ctx, label, pat, strukt))
+}
+
+pub(crate) fn render_variant_pat(
+ ctx: RenderContext<'_>,
+ pattern_ctx: &PatternContext,
+ path_ctx: Option<&PathCompletionCtx>,
+ variant: hir::Variant,
+ local_name: Option<Name>,
+ path: Option<&hir::ModPath>,
+) -> Option<CompletionItem> {
+ let _p = profile::span("render_variant_pat");
+
+ let fields = variant.fields(ctx.db());
+ let (visible_fields, fields_omitted) = visible_fields(ctx.completion, &fields, variant)?;
+
+ let (name, escaped_name) = match path {
+ Some(path) => (path.to_string().into(), path.escaped().to_string().into()),
+ None => {
+ let name = local_name.unwrap_or_else(|| variant.name(ctx.db()));
+ (name.to_smol_str(), name.escaped().to_smol_str())
+ }
+ };
+
+ let (label, pat) = match path_ctx {
+ Some(PathCompletionCtx { has_call_parens: true, .. }) => (name, escaped_name.to_string()),
+ _ => {
+ let kind = variant.kind(ctx.db());
+ let label = format_literal_label(name.as_str(), kind);
+ let pat = render_pat(
+ &ctx,
+ pattern_ctx,
+ &escaped_name,
+ kind,
+ &visible_fields,
+ fields_omitted,
+ )?;
+ (label, pat)
+ }
+ };
+
+ Some(build_completion(ctx, label, pat, variant))
+}
+
+fn build_completion(
+ ctx: RenderContext<'_>,
+ label: SmolStr,
+ pat: String,
+ def: impl HasAttrs + Copy,
+) -> CompletionItem {
+ let mut item = CompletionItem::new(CompletionItemKind::Binding, ctx.source_range(), label);
+ item.set_documentation(ctx.docs(def))
+ .set_deprecated(ctx.is_deprecated(def))
+ .detail(&pat)
+ .set_relevance(ctx.completion_relevance());
+ match ctx.snippet_cap() {
+ Some(snippet_cap) => item.insert_snippet(snippet_cap, pat),
+ None => item.insert_text(pat),
+ };
+ item.build()
+}
+
+fn render_pat(
+ ctx: &RenderContext<'_>,
+ pattern_ctx: &PatternContext,
+ name: &str,
+ kind: StructKind,
+ fields: &[hir::Field],
+ fields_omitted: bool,
+) -> Option<String> {
+ let mut pat = match kind {
+ StructKind::Tuple => render_tuple_as_pat(ctx.snippet_cap(), fields, name, fields_omitted),
+ StructKind::Record => {
+ render_record_as_pat(ctx.db(), ctx.snippet_cap(), fields, name, fields_omitted)
+ }
+ StructKind::Unit => name.to_string(),
+ };
+
+ let needs_ascription = matches!(
+ pattern_ctx,
+ PatternContext {
+ param_ctx: Some(ParamContext { kind: ParamKind::Function(_), .. }),
+ has_type_ascription: false,
+ ..
+ }
+ );
+ if needs_ascription {
+ pat.push(':');
+ pat.push(' ');
+ pat.push_str(name);
+ }
+ if ctx.snippet_cap().is_some() {
+ pat.push_str("$0");
+ }
+ Some(pat)
+}
+
+fn render_record_as_pat(
+ db: &dyn HirDatabase,
+ snippet_cap: Option<SnippetCap>,
+ fields: &[hir::Field],
+ name: &str,
+ fields_omitted: bool,
+) -> String {
+ let fields = fields.iter();
+ match snippet_cap {
+ Some(_) => {
+ format!(
+ "{name} {{ {}{} }}",
+ fields.enumerate().format_with(", ", |(idx, field), f| {
+ f(&format_args!("{}${}", field.name(db).escaped(), idx + 1))
+ }),
+ if fields_omitted { ", .." } else { "" },
+ name = name
+ )
+ }
+ None => {
+ format!(
+ "{name} {{ {}{} }}",
+ fields.map(|field| field.name(db).escaped().to_smol_str()).format(", "),
+ if fields_omitted { ", .." } else { "" },
+ name = name
+ )
+ }
+ }
+}
+
+fn render_tuple_as_pat(
+ snippet_cap: Option<SnippetCap>,
+ fields: &[hir::Field],
+ name: &str,
+ fields_omitted: bool,
+) -> String {
+ let fields = fields.iter();
+ match snippet_cap {
+ Some(_) => {
+ format!(
+ "{name}({}{})",
+ fields
+ .enumerate()
+ .format_with(", ", |(idx, _), f| { f(&format_args!("${}", idx + 1)) }),
+ if fields_omitted { ", .." } else { "" },
+ name = name
+ )
+ }
+ None => {
+ format!(
+ "{name}({}{})",
+ fields.enumerate().map(|(idx, _)| idx).format(", "),
+ if fields_omitted { ", .." } else { "" },
+ name = name
+ )
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs
new file mode 100644
index 000000000..f1b23c76e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs
@@ -0,0 +1,57 @@
+//! Renderer for type aliases.
+
+use hir::{AsAssocItem, HirDisplay};
+use ide_db::SymbolKind;
+use syntax::SmolStr;
+
+use crate::{item::CompletionItem, render::RenderContext};
+
+pub(crate) fn render_type_alias(
+ ctx: RenderContext<'_>,
+ type_alias: hir::TypeAlias,
+) -> Option<CompletionItem> {
+ let _p = profile::span("render_type_alias");
+ render(ctx, type_alias, false)
+}
+
+pub(crate) fn render_type_alias_with_eq(
+ ctx: RenderContext<'_>,
+ type_alias: hir::TypeAlias,
+) -> Option<CompletionItem> {
+ let _p = profile::span("render_type_alias_with_eq");
+ render(ctx, type_alias, true)
+}
+
+fn render(
+ ctx: RenderContext<'_>,
+ type_alias: hir::TypeAlias,
+ with_eq: bool,
+) -> Option<CompletionItem> {
+ let db = ctx.db();
+
+ let name = type_alias.name(db);
+ let (name, escaped_name) = if with_eq {
+ (
+ SmolStr::from_iter([&name.to_smol_str(), " = "]),
+ SmolStr::from_iter([&name.escaped().to_smol_str(), " = "]),
+ )
+ } else {
+ (name.to_smol_str(), name.escaped().to_smol_str())
+ };
+ let detail = type_alias.display(db).to_string();
+
+ let mut item = CompletionItem::new(SymbolKind::TypeAlias, ctx.source_range(), name.clone());
+ item.set_documentation(ctx.docs(type_alias))
+ .set_deprecated(ctx.is_deprecated(type_alias) || ctx.is_deprecated_assoc_item(type_alias))
+ .detail(detail)
+ .set_relevance(ctx.completion_relevance());
+
+ if let Some(actm) = type_alias.as_assoc_item(db) {
+ if let Some(trt) = actm.containing_trait_or_trait_impl(db) {
+ item.trait_name(trt.name(db).to_smol_str());
+ }
+ }
+ item.insert_text(escaped_name);
+
+ Some(item.build())
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs
new file mode 100644
index 000000000..9c9540a9b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs
@@ -0,0 +1,77 @@
+//! Renderer for `union` literals.
+
+use hir::{HirDisplay, Name, StructKind};
+use ide_db::SymbolKind;
+use itertools::Itertools;
+
+use crate::{
+ render::{
+ variant::{format_literal_label, visible_fields},
+ RenderContext,
+ },
+ CompletionItem, CompletionItemKind,
+};
+
+pub(crate) fn render_union_literal(
+ ctx: RenderContext<'_>,
+ un: hir::Union,
+ path: Option<hir::ModPath>,
+ local_name: Option<Name>,
+) -> Option<CompletionItem> {
+ let name = local_name.unwrap_or_else(|| un.name(ctx.db()));
+
+ let (qualified_name, escaped_qualified_name) = match path {
+ Some(p) => (p.to_string(), p.escaped().to_string()),
+ None => (name.to_string(), name.escaped().to_string()),
+ };
+
+ let mut item = CompletionItem::new(
+ CompletionItemKind::SymbolKind(SymbolKind::Union),
+ ctx.source_range(),
+ format_literal_label(&name.to_smol_str(), StructKind::Record),
+ );
+
+ let fields = un.fields(ctx.db());
+ let (fields, fields_omitted) = visible_fields(ctx.completion, &fields, un)?;
+
+ if fields.is_empty() {
+ return None;
+ }
+
+ let literal = if ctx.snippet_cap().is_some() {
+ format!(
+ "{} {{ ${{1|{}|}}: ${{2:()}} }}$0",
+ escaped_qualified_name,
+ fields.iter().map(|field| field.name(ctx.db()).escaped().to_smol_str()).format(",")
+ )
+ } else {
+ format!(
+ "{} {{ {} }}",
+ escaped_qualified_name,
+ fields.iter().format_with(", ", |field, f| {
+ f(&format_args!("{}: ()", field.name(ctx.db()).escaped()))
+ })
+ )
+ };
+
+ let detail = format!(
+ "{} {{ {}{} }}",
+ qualified_name,
+ fields.iter().format_with(", ", |field, f| {
+ f(&format_args!("{}: {}", field.name(ctx.db()), field.ty(ctx.db()).display(ctx.db())))
+ }),
+ if fields_omitted { ", .." } else { "" }
+ );
+
+ item.set_documentation(ctx.docs(un))
+ .set_deprecated(ctx.is_deprecated(un))
+ .detail(&detail)
+ .set_relevance(ctx.completion_relevance());
+
+ match ctx.snippet_cap() {
+ Some(snippet_cap) => item.insert_snippet(snippet_cap, literal),
+ None => item.insert_text(literal),
+ };
+
+ Some(item.build())
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs
new file mode 100644
index 000000000..003a0c11e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs
@@ -0,0 +1,96 @@
+//! Code common to structs, unions, and enum variants.
+
+use crate::context::CompletionContext;
+use hir::{db::HirDatabase, HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind};
+use ide_db::SnippetCap;
+use itertools::Itertools;
+use syntax::SmolStr;
+
+/// A rendered struct, union, or enum variant, split into fields for actual
+/// auto-completion (`literal`, using `field: ()`) and display in the
+/// completions menu (`detail`, using `field: type`).
+pub(crate) struct RenderedLiteral {
+ pub(crate) literal: String,
+ pub(crate) detail: String,
+}
+
+/// Render a record type (or sub-type) to a `RenderedCompound`. Use `None` for
+/// the `name` argument for an anonymous type.
+pub(crate) fn render_record_lit(
+ db: &dyn HirDatabase,
+ snippet_cap: Option<SnippetCap>,
+ fields: &[hir::Field],
+ path: &str,
+) -> RenderedLiteral {
+ let completions = fields.iter().enumerate().format_with(", ", |(idx, field), f| {
+ if snippet_cap.is_some() {
+ f(&format_args!("{}: ${{{}:()}}", field.name(db).escaped(), idx + 1))
+ } else {
+ f(&format_args!("{}: ()", field.name(db).escaped()))
+ }
+ });
+
+ let types = fields.iter().format_with(", ", |field, f| {
+ f(&format_args!("{}: {}", field.name(db), field.ty(db).display(db)))
+ });
+
+ RenderedLiteral {
+ literal: format!("{} {{ {} }}", path, completions),
+ detail: format!("{} {{ {} }}", path, types),
+ }
+}
+
+/// Render a tuple type (or sub-type) to a `RenderedCompound`. Use `None` for
+/// the `name` argument for an anonymous type.
+pub(crate) fn render_tuple_lit(
+ db: &dyn HirDatabase,
+ snippet_cap: Option<SnippetCap>,
+ fields: &[hir::Field],
+ path: &str,
+) -> RenderedLiteral {
+ let completions = fields.iter().enumerate().format_with(", ", |(idx, _), f| {
+ if snippet_cap.is_some() {
+ f(&format_args!("${{{}:()}}", idx + 1))
+ } else {
+ f(&format_args!("()"))
+ }
+ });
+
+ let types = fields.iter().format_with(", ", |field, f| f(&field.ty(db).display(db)));
+
+ RenderedLiteral {
+ literal: format!("{}({})", path, completions),
+ detail: format!("{}({})", path, types),
+ }
+}
+
+/// Find all the visible fields in a given list. Returns the list of visible
+/// fields, plus a boolean for whether the list is comprehensive (contains no
+/// private fields and its item is not marked `#[non_exhaustive]`).
+pub(crate) fn visible_fields(
+ ctx: &CompletionContext<'_>,
+ fields: &[hir::Field],
+ item: impl HasAttrs + HasCrate + Copy,
+) -> Option<(Vec<hir::Field>, bool)> {
+ let module = ctx.module;
+ let n_fields = fields.len();
+ let fields = fields
+ .iter()
+ .filter(|field| field.is_visible_from(ctx.db, module))
+ .copied()
+ .collect::<Vec<_>>();
+ let has_invisible_field = n_fields - fields.len() > 0;
+ let is_foreign_non_exhaustive = item.attrs(ctx.db).by_key("non_exhaustive").exists()
+ && item.krate(ctx.db) != module.krate();
+ let fields_omitted = has_invisible_field || is_foreign_non_exhaustive;
+ Some((fields, fields_omitted))
+}
+
+/// Format a struct, etc. literal option for display in the completions menu.
+pub(crate) fn format_literal_label(name: &str, kind: StructKind) -> SmolStr {
+ match kind {
+ StructKind::Tuple => SmolStr::from_iter([name, "(…)"]),
+ StructKind::Record => SmolStr::from_iter([name, " {…}"]),
+ StructKind::Unit => name.into(),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs
new file mode 100644
index 000000000..dc1039fa6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs
@@ -0,0 +1,214 @@
+//! User (postfix)-snippet definitions.
+//!
+//! Actual logic is implemented in [`crate::completions::postfix`] and [`crate::completions::snippet`] respectively.
+
+// Feature: User Snippet Completions
+//
+// rust-analyzer allows the user to define custom (postfix)-snippets that may depend on items to be accessible for the current scope to be applicable.
+//
+// A custom snippet can be defined by adding it to the `rust-analyzer.completion.snippets.custom` object respectively.
+//
+// [source,json]
+// ----
+// {
+// "rust-analyzer.completion.snippets.custom": {
+// "thread spawn": {
+// "prefix": ["spawn", "tspawn"],
+// "body": [
+// "thread::spawn(move || {",
+// "\t$0",
+// "});",
+// ],
+// "description": "Insert a thread::spawn call",
+// "requires": "std::thread",
+// "scope": "expr",
+// }
+// }
+// }
+// ----
+//
+// In the example above:
+//
+// * `"thread spawn"` is the name of the snippet.
+//
+// * `prefix` defines one or more trigger words that will trigger the snippets completion.
+// Using `postfix` will instead create a postfix snippet.
+//
+// * `body` is one or more lines of content joined via newlines for the final output.
+//
+// * `description` is an optional description of the snippet, if unset the snippet name will be used.
+//
+// * `requires` is an optional list of item paths that have to be resolvable in the current crate where the completion is rendered.
+// On failure of resolution the snippet won't be applicable, otherwise the snippet will insert an import for the items on insertion if
+// the items aren't yet in scope.
+//
+// * `scope` is an optional filter for when the snippet should be applicable. Possible values are:
+// ** for Snippet-Scopes: `expr`, `item` (default: `item`)
+// ** for Postfix-Snippet-Scopes: `expr`, `type` (default: `expr`)
+//
+// The `body` field also has access to placeholders as visible in the example as `$0`.
+// These placeholders take the form of `$number` or `${number:placeholder_text}` which can be traversed as tabstop in ascending order starting from 1,
+// with `$0` being a special case that always comes last.
+//
+// There is also a special placeholder, `${receiver}`, which will be replaced by the receiver expression for postfix snippets, or a `$0` tabstop in case of normal snippets.
+// This replacement for normal snippets allows you to reuse a snippet for both post- and prefix in a single definition.
+//
+// For the VSCode editor, rust-analyzer also ships with a small set of defaults which can be removed
+// by overwriting the settings object mentioned above, the defaults are:
+// [source,json]
+// ----
+// {
+// "Arc::new": {
+// "postfix": "arc",
+// "body": "Arc::new(${receiver})",
+// "requires": "std::sync::Arc",
+// "description": "Put the expression into an `Arc`",
+// "scope": "expr"
+// },
+// "Rc::new": {
+// "postfix": "rc",
+// "body": "Rc::new(${receiver})",
+// "requires": "std::rc::Rc",
+// "description": "Put the expression into an `Rc`",
+// "scope": "expr"
+// },
+// "Box::pin": {
+// "postfix": "pinbox",
+// "body": "Box::pin(${receiver})",
+// "requires": "std::boxed::Box",
+// "description": "Put the expression into a pinned `Box`",
+// "scope": "expr"
+// },
+// "Ok": {
+// "postfix": "ok",
+// "body": "Ok(${receiver})",
+// "description": "Wrap the expression in a `Result::Ok`",
+// "scope": "expr"
+// },
+// "Err": {
+// "postfix": "err",
+// "body": "Err(${receiver})",
+// "description": "Wrap the expression in a `Result::Err`",
+// "scope": "expr"
+// },
+// "Some": {
+// "postfix": "some",
+// "body": "Some(${receiver})",
+// "description": "Wrap the expression in an `Option::Some`",
+// "scope": "expr"
+// }
+// }
+// ----
+
+use ide_db::imports::import_assets::LocatedImport;
+use itertools::Itertools;
+use syntax::{ast, AstNode, GreenNode, SyntaxNode};
+
+use crate::context::CompletionContext;
+
+/// A snippet scope describing where a snippet may apply to.
+/// These may differ slightly in meaning depending on the snippet trigger.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum SnippetScope {
+ Item,
+ Expr,
+ Type,
+}
+
+/// A user supplied snippet.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct Snippet {
+ pub postfix_triggers: Box<[Box<str>]>,
+ pub prefix_triggers: Box<[Box<str>]>,
+ pub scope: SnippetScope,
+ pub description: Option<Box<str>>,
+ snippet: String,
+ // These are `ast::Path`'s but due to SyntaxNodes not being Send we store these
+ // and reconstruct them on demand instead. This is cheaper than reparsing them
+ // from strings
+ requires: Box<[GreenNode]>,
+}
+
+impl Snippet {
+ pub fn new(
+ prefix_triggers: &[String],
+ postfix_triggers: &[String],
+ snippet: &[String],
+ description: &str,
+ requires: &[String],
+ scope: SnippetScope,
+ ) -> Option<Self> {
+ if prefix_triggers.is_empty() && postfix_triggers.is_empty() {
+ return None;
+ }
+ let (requires, snippet, description) = validate_snippet(snippet, description, requires)?;
+ Some(Snippet {
+ // Box::into doesn't work as that has a Copy bound 😒
+ postfix_triggers: postfix_triggers.iter().map(String::as_str).map(Into::into).collect(),
+ prefix_triggers: prefix_triggers.iter().map(String::as_str).map(Into::into).collect(),
+ scope,
+ snippet,
+ description,
+ requires,
+ })
+ }
+
+ /// Returns [`None`] if the required items do not resolve.
+ pub(crate) fn imports(&self, ctx: &CompletionContext<'_>) -> Option<Vec<LocatedImport>> {
+ import_edits(ctx, &self.requires)
+ }
+
+ pub fn snippet(&self) -> String {
+ self.snippet.replace("${receiver}", "$0")
+ }
+
+ pub fn postfix_snippet(&self, receiver: &str) -> String {
+ self.snippet.replace("${receiver}", receiver)
+ }
+}
+
+fn import_edits(ctx: &CompletionContext<'_>, requires: &[GreenNode]) -> Option<Vec<LocatedImport>> {
+ let resolve = |import: &GreenNode| {
+ let path = ast::Path::cast(SyntaxNode::new_root(import.clone()))?;
+ let item = match ctx.scope.speculative_resolve(&path)? {
+ hir::PathResolution::Def(def) => def.into(),
+ _ => return None,
+ };
+ let path =
+ ctx.module.find_use_path_prefixed(ctx.db, item, ctx.config.insert_use.prefix_kind)?;
+ Some((path.len() > 1).then(|| LocatedImport::new(path.clone(), item, item, None)))
+ };
+ let mut res = Vec::with_capacity(requires.len());
+ for import in requires {
+ match resolve(import) {
+ Some(first) => res.extend(first),
+ None => return None,
+ }
+ }
+ Some(res)
+}
+
+fn validate_snippet(
+ snippet: &[String],
+ description: &str,
+ requires: &[String],
+) -> Option<(Box<[GreenNode]>, String, Option<Box<str>>)> {
+ let mut imports = Vec::with_capacity(requires.len());
+ for path in requires.iter() {
+ let use_path = ast::SourceFile::parse(&format!("use {};", path))
+ .syntax_node()
+ .descendants()
+ .find_map(ast::Path::cast)?;
+ if use_path.syntax().text() != path.as_str() {
+ return None;
+ }
+ let green = use_path.syntax().green().into_owned();
+ imports.push(green);
+ }
+ let snippet = snippet.iter().join("\n");
+ let description = (!description.is_empty())
+ .then(|| description.split_once('\n').map_or(description, |(it, _)| it))
+ .map(ToOwned::to_owned)
+ .map(Into::into);
+ Some((imports.into_boxed_slice(), snippet, description))
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
new file mode 100644
index 000000000..cf826648d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
@@ -0,0 +1,305 @@
+//! Tests and test utilities for completions.
+//!
+//! Most tests live in this module or its submodules. The tests in these submodules are "location"
+//! oriented, that is they try to check completions for something like type position, param position
+//! etc.
+//! Tests that are more orientated towards specific completion types like visibility checks of path
+//! completions or `check_edit` tests usually live in their respective completion modules instead.
+//! This gives this test module and its submodules here the main purpose of giving the developer an
+//! overview of whats being completed where, not how.
+
+mod attribute;
+mod expression;
+mod flyimport;
+mod fn_param;
+mod item_list;
+mod item;
+mod pattern;
+mod predicate;
+mod proc_macros;
+mod record;
+mod special;
+mod type_pos;
+mod use_tree;
+mod visibility;
+
+use hir::{db::DefDatabase, PrefixKind, Semantics};
+use ide_db::{
+ base_db::{fixture::ChangeFixture, FileLoader, FilePosition},
+ imports::insert_use::{ImportGranularity, InsertUseConfig},
+ RootDatabase, SnippetCap,
+};
+use itertools::Itertools;
+use stdx::{format_to, trim_indent};
+use syntax::{AstNode, NodeOrToken, SyntaxElement};
+use test_utils::assert_eq_text;
+
+use crate::{
+ resolve_completion_edits, CallableSnippets, CompletionConfig, CompletionItem,
+ CompletionItemKind,
+};
+
+/// Lots of basic item definitions
+const BASE_ITEMS_FIXTURE: &str = r#"
+enum Enum { TupleV(u32), RecordV { field: u32 }, UnitV }
+use self::Enum::TupleV;
+mod module {}
+
+trait Trait {}
+static STATIC: Unit = Unit;
+const CONST: Unit = Unit;
+struct Record { field: u32 }
+struct Tuple(u32);
+struct Unit;
+#[macro_export]
+macro_rules! makro {}
+#[rustc_builtin_macro]
+pub macro Clone {}
+fn function() {}
+union Union { field: i32 }
+"#;
+
+pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig {
+ enable_postfix_completions: true,
+ enable_imports_on_the_fly: true,
+ enable_self_on_the_fly: true,
+ enable_private_editable: false,
+ callable: Some(CallableSnippets::FillArguments),
+ snippet_cap: SnippetCap::new(true),
+ insert_use: InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ prefix_kind: PrefixKind::Plain,
+ enforce_granularity: true,
+ group: true,
+ skip_glob_imports: true,
+ },
+ snippets: Vec::new(),
+};
+
+pub(crate) fn completion_list(ra_fixture: &str) -> String {
+ completion_list_with_config(TEST_CONFIG, ra_fixture, true, None)
+}
+
+pub(crate) fn completion_list_no_kw(ra_fixture: &str) -> String {
+ completion_list_with_config(TEST_CONFIG, ra_fixture, false, None)
+}
+
+pub(crate) fn completion_list_no_kw_with_private_editable(ra_fixture: &str) -> String {
+ let mut config = TEST_CONFIG.clone();
+ config.enable_private_editable = true;
+ completion_list_with_config(config, ra_fixture, false, None)
+}
+
+pub(crate) fn completion_list_with_trigger_character(
+ ra_fixture: &str,
+ trigger_character: Option<char>,
+) -> String {
+ completion_list_with_config(TEST_CONFIG, ra_fixture, true, trigger_character)
+}
+
+fn completion_list_with_config(
+ config: CompletionConfig,
+ ra_fixture: &str,
+ include_keywords: bool,
+ trigger_character: Option<char>,
+) -> String {
+ // filter out all but one builtintype completion for smaller test outputs
+ let items = get_all_items(config, ra_fixture, trigger_character);
+ let items = items
+ .into_iter()
+ .filter(|it| it.kind() != CompletionItemKind::BuiltinType || it.label() == "u32")
+ .filter(|it| include_keywords || it.kind() != CompletionItemKind::Keyword)
+ .filter(|it| include_keywords || it.kind() != CompletionItemKind::Snippet)
+ .sorted_by_key(|it| (it.kind(), it.label().to_owned(), it.detail().map(ToOwned::to_owned)))
+ .collect();
+ render_completion_list(items)
+}
+
+/// Creates analysis from a multi-file fixture, returns positions marked with $0.
+pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) {
+ let change_fixture = ChangeFixture::parse(ra_fixture);
+ let mut database = RootDatabase::default();
+ database.set_enable_proc_attr_macros(true);
+ database.apply_change(change_fixture.change);
+ let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
+ let offset = range_or_offset.expect_offset();
+ (database, FilePosition { file_id, offset })
+}
+
+pub(crate) fn do_completion(code: &str, kind: CompletionItemKind) -> Vec<CompletionItem> {
+ do_completion_with_config(TEST_CONFIG, code, kind)
+}
+
+pub(crate) fn do_completion_with_config(
+ config: CompletionConfig,
+ code: &str,
+ kind: CompletionItemKind,
+) -> Vec<CompletionItem> {
+ get_all_items(config, code, None)
+ .into_iter()
+ .filter(|c| c.kind() == kind)
+ .sorted_by(|l, r| l.label().cmp(r.label()))
+ .collect()
+}
+
+fn render_completion_list(completions: Vec<CompletionItem>) -> String {
+ fn monospace_width(s: &str) -> usize {
+ s.chars().count()
+ }
+ let label_width =
+ completions.iter().map(|it| monospace_width(it.label())).max().unwrap_or_default().min(22);
+ completions
+ .into_iter()
+ .map(|it| {
+ let tag = it.kind().tag();
+ let var_name = format!("{} {}", tag, it.label());
+ let mut buf = var_name;
+ if let Some(detail) = it.detail() {
+ let width = label_width.saturating_sub(monospace_width(it.label()));
+ format_to!(buf, "{:width$} {}", "", detail, width = width);
+ }
+ if it.deprecated() {
+ format_to!(buf, " DEPRECATED");
+ }
+ format_to!(buf, "\n");
+ buf
+ })
+ .collect()
+}
+
+#[track_caller]
+pub(crate) fn check_edit(what: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
+ check_edit_with_config(TEST_CONFIG, what, ra_fixture_before, ra_fixture_after)
+}
+
+#[track_caller]
+pub(crate) fn check_edit_with_config(
+ config: CompletionConfig,
+ what: &str,
+ ra_fixture_before: &str,
+ ra_fixture_after: &str,
+) {
+ let ra_fixture_after = trim_indent(ra_fixture_after);
+ let (db, position) = position(ra_fixture_before);
+ let completions: Vec<CompletionItem> =
+ crate::completions(&db, &config, position, None).unwrap().into();
+ let (completion,) = completions
+ .iter()
+ .filter(|it| it.lookup() == what)
+ .collect_tuple()
+ .unwrap_or_else(|| panic!("can't find {:?} completion in {:#?}", what, completions));
+ let mut actual = db.file_text(position.file_id).to_string();
+
+ let mut combined_edit = completion.text_edit().to_owned();
+
+ resolve_completion_edits(
+ &db,
+ &config,
+ position,
+ completion.imports_to_add().iter().filter_map(|import_edit| {
+ let import_path = &import_edit.import_path;
+ let import_name = import_path.segments().last()?;
+ Some((import_path.to_string(), import_name.to_string()))
+ }),
+ )
+ .into_iter()
+ .flatten()
+ .for_each(|text_edit| {
+ combined_edit.union(text_edit).expect(
+ "Failed to apply completion resolve changes: change ranges overlap, but should not",
+ )
+ });
+
+ combined_edit.apply(&mut actual);
+ assert_eq_text!(&ra_fixture_after, &actual)
+}
+
+pub(crate) fn check_pattern_is_applicable(code: &str, check: impl FnOnce(SyntaxElement) -> bool) {
+ let (db, pos) = position(code);
+
+ let sema = Semantics::new(&db);
+ let original_file = sema.parse(pos.file_id);
+ let token = original_file.syntax().token_at_offset(pos.offset).left_biased().unwrap();
+ assert!(check(NodeOrToken::Token(token)));
+}
+
+pub(crate) fn get_all_items(
+ config: CompletionConfig,
+ code: &str,
+ trigger_character: Option<char>,
+) -> Vec<CompletionItem> {
+ let (db, position) = position(code);
+ let res = crate::completions(&db, &config, position, trigger_character)
+ .map_or_else(Vec::default, Into::into);
+ // validate
+ res.iter().for_each(|it| {
+ let sr = it.source_range();
+ assert!(
+ sr.contains_inclusive(position.offset),
+ "source range {sr:?} does not contain the offset {:?} of the completion request: {it:?}",
+ position.offset
+ );
+ });
+ res
+}
+
+#[test]
+fn test_no_completions_required() {
+ assert_eq!(completion_list(r#"fn foo() { for i i$0 }"#), String::new());
+}
+
+#[test]
+fn regression_10042() {
+ completion_list(
+ r#"
+macro_rules! preset {
+ ($($x:ident)&&*) => {
+ {
+ let mut v = Vec::new();
+ $(
+ v.push($x.into());
+ )*
+ v
+ }
+ };
+}
+
+fn foo() {
+ preset!(foo$0);
+}
+"#,
+ );
+}
+
+#[test]
+fn no_completions_in_comments() {
+ assert_eq!(
+ completion_list(
+ r#"
+fn test() {
+let x = 2; // A comment$0
+}
+"#,
+ ),
+ String::new(),
+ );
+ assert_eq!(
+ completion_list(
+ r#"
+/*
+Some multi-line comment$0
+*/
+"#,
+ ),
+ String::new(),
+ );
+ assert_eq!(
+ completion_list(
+ r#"
+/// Some doc comment
+/// let test$0 = 1
+"#,
+ ),
+ String::new(),
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs
new file mode 100644
index 000000000..1578ba2c3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs
@@ -0,0 +1,1016 @@
+//! Completion tests for attributes.
+use expect_test::{expect, Expect};
+
+use crate::tests::{check_edit, completion_list};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual);
+}
+
+#[test]
+fn proc_macros() {
+ check(
+ r#"
+//- proc_macros: identity
+#[$0]
+struct Foo;
+"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at derive(…)
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at no_mangle
+ at non_exhaustive
+ at repr(…)
+ at warn(…)
+ md proc_macros
+ kw crate::
+ kw self::
+ "#]],
+ )
+}
+
+#[test]
+fn proc_macros_on_comment() {
+ check(
+ r#"
+//- proc_macros: identity
+/// $0
+#[proc_macros::identity]
+struct Foo;
+"#,
+ expect![[r#""#]],
+ )
+}
+
+#[test]
+fn proc_macros_qualified() {
+ check(
+ r#"
+//- proc_macros: identity
+#[proc_macros::$0]
+struct Foo;
+"#,
+ expect![[r#"
+ at identity proc_macro identity
+ "#]],
+ )
+}
+
+#[test]
+fn inside_nested_attr() {
+ check(r#"#[cfg($0)]"#, expect![[]])
+}
+
+#[test]
+fn with_existing_attr() {
+ check(
+ r#"#[no_mangle] #[$0] mcall!();"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at forbid(…)
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ )
+}
+
+#[test]
+fn attr_on_source_file() {
+ check(
+ r#"#![$0]"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at crate_name = ""
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at feature(…)
+ at forbid(…)
+ at must_use
+ at no_implicit_prelude
+ at no_main
+ at no_mangle
+ at no_std
+ at recursion_limit = "…"
+ at type_length_limit = …
+ at warn(…)
+ at windows_subsystem = "…"
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_module() {
+ check(
+ r#"#[$0] mod foo;"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at macro_use
+ at must_use
+ at no_mangle
+ at path = "…"
+ at warn(…)
+ kw crate::
+ kw self::
+ kw super::
+ "#]],
+ );
+ check(
+ r#"mod foo {#![$0]}"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at no_implicit_prelude
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ kw super::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_macro_rules() {
+ check(
+ r#"#[$0] macro_rules! foo {}"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at macro_export
+ at macro_use
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_macro_def() {
+ check(
+ r#"#[$0] macro foo {}"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_extern_crate() {
+ check(
+ r#"#[$0] extern crate foo;"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at macro_use
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_use() {
+ check(
+ r#"#[$0] use foo;"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_type_alias() {
+ check(
+ r#"#[$0] type foo = ();"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_struct() {
+ check(
+ r#"
+//- minicore:derive
+#[$0]
+struct Foo;
+"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at derive macro derive
+ at derive(…)
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at no_mangle
+ at non_exhaustive
+ at repr(…)
+ at warn(…)
+ md core
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_enum() {
+ check(
+ r#"#[$0] enum Foo {}"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at derive(…)
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at no_mangle
+ at non_exhaustive
+ at repr(…)
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_const() {
+ check(
+ r#"#[$0] const FOO: () = ();"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_static() {
+ check(
+ r#"#[$0] static FOO: () = ()"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at export_name = "…"
+ at forbid(…)
+ at global_allocator
+ at link_name = "…"
+ at link_section = "…"
+ at must_use
+ at no_mangle
+ at used
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_trait() {
+ check(
+ r#"#[$0] trait Foo {}"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_impl() {
+ check(
+ r#"#[$0] impl () {}"#,
+ expect![[r#"
+ at allow(…)
+ at automatically_derived
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+ check(
+ r#"impl () {#![$0]}"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_extern_block() {
+ check(
+ r#"#[$0] extern {}"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at link
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+ check(
+ r#"extern {#![$0]}"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at forbid(…)
+ at link
+ at must_use
+ at no_mangle
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_variant() {
+ check(
+ r#"enum Foo { #[$0] Bar }"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at forbid(…)
+ at non_exhaustive
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_on_fn() {
+ check(
+ r#"#[$0] fn main() {}"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at cold
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at export_name = "…"
+ at forbid(…)
+ at ignore = "…"
+ at inline
+ at link_name = "…"
+ at link_section = "…"
+ at must_use
+ at must_use
+ at no_mangle
+ at panic_handler
+ at proc_macro
+ at proc_macro_attribute
+ at proc_macro_derive(…)
+ at should_panic
+ at target_feature(enable = "…")
+ at test
+ at track_caller
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn attr_in_source_file_end() {
+ check(
+ r#"#[$0]"#,
+ expect![[r#"
+ at allow(…)
+ at automatically_derived
+ at cfg(…)
+ at cfg_attr(…)
+ at cold
+ at deny(…)
+ at deprecated
+ at derive(…)
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at export_name = "…"
+ at forbid(…)
+ at global_allocator
+ at ignore = "…"
+ at inline
+ at link
+ at link_name = "…"
+ at link_section = "…"
+ at macro_export
+ at macro_use
+ at must_use
+ at no_mangle
+ at non_exhaustive
+ at panic_handler
+ at path = "…"
+ at proc_macro
+ at proc_macro_attribute
+ at proc_macro_derive(…)
+ at repr(…)
+ at should_panic
+ at target_feature(enable = "…")
+ at test
+ at track_caller
+ at used
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+mod cfg {
+ use super::*;
+
+ #[test]
+ fn cfg_target_endian() {
+ check(
+ r#"#[cfg(target_endian = $0"#,
+ expect![[r#"
+ ba big
+ ba little
+ "#]],
+ );
+ }
+}
+
+mod derive {
+ use super::*;
+
+ fn check_derive(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn no_completion_for_incorrect_derive() {
+ check_derive(
+ r#"
+//- minicore: derive, copy, clone, ord, eq, default, fmt
+#[derive{$0)] struct Test;
+"#,
+ expect![[]],
+ )
+ }
+
+ #[test]
+ fn empty_derive() {
+ check_derive(
+ r#"
+//- minicore: derive, copy, clone, ord, eq, default, fmt
+#[derive($0)] struct Test;
+"#,
+ expect![[r#"
+ de Clone macro Clone
+ de Clone, Copy
+ de Default macro Default
+ de PartialEq macro PartialEq
+ de PartialEq, Eq
+ de PartialEq, Eq, PartialOrd, Ord
+ de PartialEq, PartialOrd
+ md core
+ kw crate::
+ kw self::
+ "#]],
+ );
+ }
+
+ #[test]
+ fn derive_with_input_before() {
+ check_derive(
+ r#"
+//- minicore: derive, copy, clone, ord, eq, default, fmt
+#[derive(serde::Serialize, PartialEq, $0)] struct Test;
+"#,
+ expect![[r#"
+ de Clone macro Clone
+ de Clone, Copy
+ de Default macro Default
+ de Eq
+ de Eq, PartialOrd, Ord
+ de PartialOrd
+ md core
+ kw crate::
+ kw self::
+ "#]],
+ )
+ }
+
+ #[test]
+ fn derive_with_input_after() {
+ check_derive(
+ r#"
+//- minicore: derive, copy, clone, ord, eq, default, fmt
+#[derive($0 serde::Serialize, PartialEq)] struct Test;
+"#,
+ expect![[r#"
+ de Clone macro Clone
+ de Clone, Copy
+ de Default macro Default
+ de Eq
+ de Eq, PartialOrd, Ord
+ de PartialOrd
+ md core
+ kw crate::
+ kw self::
+ "#]],
+ );
+ }
+
+ #[test]
+ fn derive_with_existing_derives() {
+ check_derive(
+ r#"
+//- minicore: derive, copy, clone, ord, eq, default, fmt
+#[derive(PartialEq, Eq, Or$0)] struct Test;
+"#,
+ expect![[r#"
+ de Clone macro Clone
+ de Clone, Copy
+ de Default macro Default
+ de PartialOrd
+ de PartialOrd, Ord
+ md core
+ kw crate::
+ kw self::
+ "#]],
+ );
+ }
+
+ #[test]
+ fn derive_flyimport() {
+ check_derive(
+ r#"
+//- proc_macros: derive_identity
+//- minicore: derive
+#[derive(der$0)] struct Test;
+"#,
+ expect![[r#"
+ de DeriveIdentity (use proc_macros::DeriveIdentity) proc_macro DeriveIdentity
+ md core
+ md proc_macros
+ kw crate::
+ kw self::
+ "#]],
+ );
+ check_derive(
+ r#"
+//- proc_macros: derive_identity
+//- minicore: derive
+use proc_macros::DeriveIdentity;
+#[derive(der$0)] struct Test;
+"#,
+ expect![[r#"
+ de DeriveIdentity proc_macro DeriveIdentity
+ md core
+ md proc_macros
+ kw crate::
+ kw self::
+ "#]],
+ );
+ }
+
+ #[test]
+ fn derive_flyimport_edit() {
+ check_edit(
+ "DeriveIdentity",
+ r#"
+//- proc_macros: derive_identity
+//- minicore: derive
+#[derive(der$0)] struct Test;
+"#,
+ r#"
+use proc_macros::DeriveIdentity;
+
+#[derive(DeriveIdentity)] struct Test;
+"#,
+ );
+ }
+
+ #[test]
+ fn qualified() {
+ check_derive(
+ r#"
+//- proc_macros: derive_identity
+//- minicore: derive, copy, clone
+#[derive(proc_macros::$0)] struct Test;
+"#,
+ expect![[r#"
+ de DeriveIdentity proc_macro DeriveIdentity
+ "#]],
+ );
+ check_derive(
+ r#"
+//- proc_macros: derive_identity
+//- minicore: derive, copy, clone
+#[derive(proc_macros::C$0)] struct Test;
+"#,
+ expect![[r#"
+ de DeriveIdentity proc_macro DeriveIdentity
+ "#]],
+ );
+ }
+}
+
+mod lint {
+ use super::*;
+
+ #[test]
+ fn lint_empty() {
+ check_edit(
+ "deprecated",
+ r#"#[allow($0)] struct Test;"#,
+ r#"#[allow(deprecated)] struct Test;"#,
+ )
+ }
+
+ #[test]
+ fn lint_with_existing() {
+ check_edit(
+ "deprecated",
+ r#"#[allow(keyword_idents, $0)] struct Test;"#,
+ r#"#[allow(keyword_idents, deprecated)] struct Test;"#,
+ )
+ }
+
+ #[test]
+ fn lint_qualified() {
+ check_edit(
+ "deprecated",
+ r#"#[allow(keyword_idents, $0)] struct Test;"#,
+ r#"#[allow(keyword_idents, deprecated)] struct Test;"#,
+ )
+ }
+
+ #[test]
+ fn lint_feature() {
+ check_edit(
+ "box_syntax",
+ r#"#[feature(box_$0)] struct Test;"#,
+ r#"#[feature(box_syntax)] struct Test;"#,
+ )
+ }
+
+ #[test]
+ fn lint_clippy_unqualified() {
+ check_edit(
+ "clippy::as_conversions",
+ r#"#[allow($0)] struct Test;"#,
+ r#"#[allow(clippy::as_conversions)] struct Test;"#,
+ );
+ }
+
+ #[test]
+ fn lint_clippy_qualified() {
+ check_edit(
+ "as_conversions",
+ r#"#[allow(clippy::$0)] struct Test;"#,
+ r#"#[allow(clippy::as_conversions)] struct Test;"#,
+ );
+ }
+
+ #[test]
+ fn lint_rustdoc_unqualified() {
+ check_edit(
+ "rustdoc::bare_urls",
+ r#"#[allow($0)] struct Test;"#,
+ r#"#[allow(rustdoc::bare_urls)] struct Test;"#,
+ );
+ }
+
+ #[test]
+ fn lint_rustdoc_qualified() {
+ check_edit(
+ "bare_urls",
+ r#"#[allow(rustdoc::$0)] struct Test;"#,
+ r#"#[allow(rustdoc::bare_urls)] struct Test;"#,
+ );
+ }
+
+ #[test]
+ fn lint_unclosed() {
+ check_edit(
+ "deprecated",
+ r#"#[allow(dep$0 struct Test;"#,
+ r#"#[allow(deprecated struct Test;"#,
+ );
+ check_edit(
+ "bare_urls",
+ r#"#[allow(rustdoc::$0 struct Test;"#,
+ r#"#[allow(rustdoc::bare_urls struct Test;"#,
+ );
+ }
+}
+
+mod repr {
+ use super::*;
+
+ fn check_repr(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn no_completion_for_incorrect_repr() {
+ check_repr(r#"#[repr{$0)] struct Test;"#, expect![[]])
+ }
+
+ #[test]
+ fn empty() {
+ check_repr(
+ r#"#[repr($0)] struct Test;"#,
+ expect![[r#"
+ ba C
+ ba align($0)
+ ba i16
+ ba i28
+ ba i32
+ ba i64
+ ba i8
+ ba isize
+ ba packed
+ ba transparent
+ ba u128
+ ba u16
+ ba u32
+ ba u64
+ ba u8
+ ba usize
+ "#]],
+ );
+ }
+
+ #[test]
+ fn transparent() {
+ check_repr(r#"#[repr(transparent, $0)] struct Test;"#, expect![[r#""#]]);
+ }
+
+ #[test]
+ fn align() {
+ check_repr(
+ r#"#[repr(align(1), $0)] struct Test;"#,
+ expect![[r#"
+ ba C
+ ba i16
+ ba i28
+ ba i32
+ ba i64
+ ba i8
+ ba isize
+ ba transparent
+ ba u128
+ ba u16
+ ba u32
+ ba u64
+ ba u8
+ ba usize
+ "#]],
+ );
+ }
+
+ #[test]
+ fn packed() {
+ check_repr(
+ r#"#[repr(packed, $0)] struct Test;"#,
+ expect![[r#"
+ ba C
+ ba i16
+ ba i28
+ ba i32
+ ba i64
+ ba i8
+ ba isize
+ ba transparent
+ ba u128
+ ba u16
+ ba u32
+ ba u64
+ ba u8
+ ba usize
+ "#]],
+ );
+ }
+
+ #[test]
+ fn c() {
+ check_repr(
+ r#"#[repr(C, $0)] struct Test;"#,
+ expect![[r#"
+ ba align($0)
+ ba i16
+ ba i28
+ ba i32
+ ba i64
+ ba i8
+ ba isize
+ ba packed
+ ba u128
+ ba u16
+ ba u32
+ ba u64
+ ba u8
+ ba usize
+ "#]],
+ );
+ }
+
+ #[test]
+ fn prim() {
+ check_repr(
+ r#"#[repr(usize, $0)] struct Test;"#,
+ expect![[r#"
+ ba C
+ ba align($0)
+ ba packed
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
new file mode 100644
index 000000000..925081ebf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
@@ -0,0 +1,672 @@
+//! Completion tests for expressions.
+use expect_test::{expect, Expect};
+
+use crate::tests::{completion_list, BASE_ITEMS_FIXTURE};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(&format!("{}{}", BASE_ITEMS_FIXTURE, ra_fixture));
+ expect.assert_eq(&actual)
+}
+
+fn check_empty(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual);
+}
+
+#[test]
+fn complete_literal_struct_with_a_private_field() {
+ // `FooDesc.bar` is private, the completion should not be triggered.
+ check(
+ r#"
+mod _69latrick {
+ pub struct FooDesc { pub six: bool, pub neuf: Vec<String>, bar: bool }
+ pub fn create_foo(foo_desc: &FooDesc) -> () { () }
+}
+
+fn baz() {
+ use _69latrick::*;
+
+ let foo = create_foo(&$0);
+}
+ "#,
+ // This should not contain `FooDesc {…}`.
+ expect![[r#"
+ ct CONST
+ en Enum
+ fn baz() fn()
+ fn create_foo(…) fn(&FooDesc)
+ fn function() fn()
+ ma makro!(…) macro_rules! makro
+ md _69latrick
+ md module
+ sc STATIC
+ st FooDesc
+ st Record
+ st Tuple
+ st Unit
+ un Union
+ ev TupleV(…) TupleV(u32)
+ bt u32
+ kw crate::
+ kw false
+ kw for
+ kw if
+ kw if let
+ kw loop
+ kw match
+ kw mut
+ kw return
+ kw self::
+ kw true
+ kw unsafe
+ kw while
+ kw while let
+ "#]],
+ )
+}
+
+#[test]
+fn completes_various_bindings() {
+ check_empty(
+ r#"
+fn func(param0 @ (param1, param2): (i32, i32)) {
+ let letlocal = 92;
+ if let ifletlocal = 100 {
+ match 0 {
+ matcharm => 1 + $0,
+ otherwise => (),
+ }
+ }
+ let letlocal2 = 44;
+}
+"#,
+ expect![[r#"
+ fn func(…) fn((i32, i32))
+ lc ifletlocal i32
+ lc letlocal i32
+ lc matcharm i32
+ lc param0 (i32, i32)
+ lc param1 i32
+ lc param2 i32
+ bt u32
+ kw crate::
+ kw false
+ kw for
+ kw if
+ kw if let
+ kw loop
+ kw match
+ kw return
+ kw self::
+ kw true
+ kw unsafe
+ kw while
+ kw while let
+ "#]],
+ );
+}
+
+#[test]
+fn completes_all_the_things_in_fn_body() {
+ check(
+ r#"
+use non_existant::Unresolved;
+mod qualified { pub enum Enum { Variant } }
+
+impl Unit {
+ fn foo<'lifetime, TypeParam, const CONST_PARAM: usize>(self) {
+ fn local_func() {}
+ $0
+ }
+}
+"#,
+ // `self` is in here twice, once as the module, once as the local
+ expect![[r#"
+ ct CONST
+ cp CONST_PARAM
+ en Enum
+ fn function() fn()
+ fn local_func() fn()
+ lc self Unit
+ ma makro!(…) macro_rules! makro
+ md module
+ md qualified
+ sp Self
+ sc STATIC
+ st Record
+ st Tuple
+ st Unit
+ tp TypeParam
+ un Union
+ ev TupleV(…) TupleV(u32)
+ bt u32
+ kw const
+ kw crate::
+ kw enum
+ kw extern
+ kw false
+ kw fn
+ kw for
+ kw if
+ kw if let
+ kw impl
+ kw let
+ kw loop
+ kw match
+ kw mod
+ kw return
+ kw self::
+ kw static
+ kw struct
+ kw trait
+ kw true
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ kw while
+ kw while let
+ me self.foo() fn(self)
+ sn macro_rules
+ sn pd
+ sn ppd
+ ?? Unresolved
+ "#]],
+ );
+ check(
+ r#"
+use non_existant::Unresolved;
+mod qualified { pub enum Enum { Variant } }
+
+impl Unit {
+ fn foo<'lifetime, TypeParam, const CONST_PARAM: usize>(self) {
+ fn local_func() {}
+ self::$0
+ }
+}
+"#,
+ expect![[r#"
+ ct CONST
+ en Enum
+ fn function() fn()
+ ma makro!(…) macro_rules! makro
+ md module
+ md qualified
+ sc STATIC
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ ev TupleV(…) TupleV(u32)
+ ?? Unresolved
+ "#]],
+ );
+}
+
+#[test]
+fn complete_in_block() {
+ check_empty(
+ r#"
+ fn foo() {
+ if true {
+ $0
+ }
+ }
+"#,
+ expect![[r#"
+ fn foo() fn()
+ bt u32
+ kw const
+ kw crate::
+ kw enum
+ kw extern
+ kw false
+ kw fn
+ kw for
+ kw if
+ kw if let
+ kw impl
+ kw let
+ kw loop
+ kw match
+ kw mod
+ kw return
+ kw self::
+ kw static
+ kw struct
+ kw trait
+ kw true
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ kw while
+ kw while let
+ sn macro_rules
+ sn pd
+ sn ppd
+ "#]],
+ )
+}
+
+#[test]
+fn complete_after_if_expr() {
+ check_empty(
+ r#"
+ fn foo() {
+ if true {}
+ $0
+ }
+"#,
+ expect![[r#"
+ fn foo() fn()
+ bt u32
+ kw const
+ kw crate::
+ kw else
+ kw else if
+ kw enum
+ kw extern
+ kw false
+ kw fn
+ kw for
+ kw if
+ kw if let
+ kw impl
+ kw let
+ kw loop
+ kw match
+ kw mod
+ kw return
+ kw self::
+ kw static
+ kw struct
+ kw trait
+ kw true
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ kw while
+ kw while let
+ sn macro_rules
+ sn pd
+ sn ppd
+ "#]],
+ )
+}
+
+#[test]
+fn complete_in_match_arm() {
+ check_empty(
+ r#"
+ fn foo() {
+ match () {
+ () => $0
+ }
+ }
+"#,
+ expect![[r#"
+ fn foo() fn()
+ bt u32
+ kw crate::
+ kw false
+ kw for
+ kw if
+ kw if let
+ kw loop
+ kw match
+ kw return
+ kw self::
+ kw true
+ kw unsafe
+ kw while
+ kw while let
+ "#]],
+ )
+}
+
+#[test]
+fn completes_in_loop_ctx() {
+ check_empty(
+ r"fn my() { loop { $0 } }",
+ expect![[r#"
+ fn my() fn()
+ bt u32
+ kw break
+ kw const
+ kw continue
+ kw crate::
+ kw enum
+ kw extern
+ kw false
+ kw fn
+ kw for
+ kw if
+ kw if let
+ kw impl
+ kw let
+ kw loop
+ kw match
+ kw mod
+ kw return
+ kw self::
+ kw static
+ kw struct
+ kw trait
+ kw true
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ kw while
+ kw while let
+ sn macro_rules
+ sn pd
+ sn ppd
+ "#]],
+ );
+}
+
+#[test]
+fn completes_in_let_initializer() {
+ check_empty(
+ r#"fn main() { let _ = $0 }"#,
+ expect![[r#"
+ fn main() fn()
+ bt u32
+ kw crate::
+ kw false
+ kw for
+ kw if
+ kw if let
+ kw loop
+ kw match
+ kw return
+ kw self::
+ kw true
+ kw unsafe
+ kw while
+ kw while let
+ "#]],
+ )
+}
+
+#[test]
+fn struct_initializer_field_expr() {
+ check_empty(
+ r#"
+struct Foo {
+ pub f: i32,
+}
+fn foo() {
+ Foo {
+ f: $0
+ }
+}
+"#,
+ expect![[r#"
+ fn foo() fn()
+ st Foo
+ bt u32
+ kw crate::
+ kw false
+ kw for
+ kw if
+ kw if let
+ kw loop
+ kw match
+ kw return
+ kw self::
+ kw true
+ kw unsafe
+ kw while
+ kw while let
+ "#]],
+ );
+}
+
+#[test]
+fn shadowing_shows_single_completion() {
+ cov_mark::check!(shadowing_shows_single_completion);
+
+ check_empty(
+ r#"
+fn foo() {
+ let bar = 92;
+ {
+ let bar = 62;
+ drop($0)
+ }
+}
+"#,
+ expect![[r#"
+ fn foo() fn()
+ lc bar i32
+ bt u32
+ kw crate::
+ kw false
+ kw for
+ kw if
+ kw if let
+ kw loop
+ kw match
+ kw return
+ kw self::
+ kw true
+ kw unsafe
+ kw while
+ kw while let
+ "#]],
+ );
+}
+
+#[test]
+fn in_macro_expr_frag() {
+ check_empty(
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+fn quux(x: i32) {
+ m!($0);
+}
+"#,
+ expect![[r#"
+ fn quux(…) fn(i32)
+ lc x i32
+ ma m!(…) macro_rules! m
+ bt u32
+ kw crate::
+ kw false
+ kw for
+ kw if
+ kw if let
+ kw loop
+ kw match
+ kw return
+ kw self::
+ kw true
+ kw unsafe
+ kw while
+ kw while let
+ "#]],
+ );
+ check_empty(
+ r"
+macro_rules! m { ($e:expr) => { $e } }
+fn quux(x: i32) {
+ m!(x$0);
+}
+",
+ expect![[r#"
+ fn quux(…) fn(i32)
+ lc x i32
+ ma m!(…) macro_rules! m
+ bt u32
+ kw crate::
+ kw false
+ kw for
+ kw if
+ kw if let
+ kw loop
+ kw match
+ kw return
+ kw self::
+ kw true
+ kw unsafe
+ kw while
+ kw while let
+ "#]],
+ );
+ check_empty(
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+fn quux(x: i32) {
+ let y = 92;
+ m!(x$0
+}
+"#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn enum_qualified() {
+ check(
+ r#"
+impl Enum {
+ type AssocType = ();
+ const ASSOC_CONST: () = ();
+ fn assoc_fn() {}
+}
+fn func() {
+ Enum::$0
+}
+"#,
+ expect![[r#"
+ ct ASSOC_CONST const ASSOC_CONST: ()
+ fn assoc_fn() fn()
+ ta AssocType type AssocType = ()
+ ev RecordV {…} RecordV { field: u32 }
+ ev TupleV(…) TupleV(u32)
+ ev UnitV UnitV
+ "#]],
+ );
+}
+
+#[test]
+fn ty_qualified_no_drop() {
+ check_empty(
+ r#"
+//- minicore: drop
+struct Foo;
+impl Drop for Foo {
+ fn drop(&mut self) {}
+}
+fn func() {
+ Foo::$0
+}
+"#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn with_parens() {
+ check_empty(
+ r#"
+enum Enum {
+ Variant()
+}
+impl Enum {
+ fn variant() -> Self { Enum::Variant() }
+}
+fn func() {
+ Enum::$0()
+}
+"#,
+ expect![[r#"
+ fn variant fn() -> Enum
+ ev Variant Variant
+ "#]],
+ );
+}
+
+#[test]
+fn detail_impl_trait_in_return_position() {
+ check_empty(
+ r"
+//- minicore: sized
+trait Trait<T> {}
+fn foo<U>() -> impl Trait<U> {}
+fn main() {
+ self::$0
+}
+",
+ expect![[r#"
+ fn foo() fn() -> impl Trait<U>
+ fn main() fn()
+ tt Trait
+ "#]],
+ );
+}
+
+#[test]
+fn detail_async_fn() {
+ check_empty(
+ r#"
+//- minicore: future, sized
+trait Trait<T> {}
+async fn foo() -> u8 {}
+async fn bar<U>() -> impl Trait<U> {}
+fn main() {
+ self::$0
+}
+"#,
+ expect![[r#"
+ fn bar() async fn() -> impl Trait<U>
+ fn foo() async fn() -> u8
+ fn main() fn()
+ tt Trait
+ "#]],
+ );
+}
+
+#[test]
+fn detail_impl_trait_in_argument_position() {
+ check_empty(
+ r"
+//- minicore: sized
+trait Trait<T> {}
+struct Foo;
+impl Foo {
+ fn bar<U>(_: impl Trait<U>) {}
+}
+fn main() {
+ Foo::$0
+}
+",
+ expect![[r"
+ fn bar(…) fn(impl Trait<U>)
+ "]],
+ );
+}
+
+#[test]
+fn complete_record_expr_path() {
+ check(
+ r#"
+struct Zulu;
+impl Zulu {
+ fn test() -> Self { }
+}
+fn boi(val: Zulu) { }
+fn main() {
+ boi(Zulu:: $0 {});
+}
+"#,
+ expect![[r#"
+ fn test() fn() -> Zulu
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
new file mode 100644
index 000000000..0bba7f245
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
@@ -0,0 +1,1232 @@
+use expect_test::{expect, Expect};
+
+use crate::{
+ context::{CompletionAnalysis, NameContext, NameKind, NameRefKind},
+ tests::{check_edit, check_edit_with_config, TEST_CONFIG},
+};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let config = TEST_CONFIG;
+ let (db, position) = crate::tests::position(ra_fixture);
+ let (ctx, analysis) = crate::context::CompletionContext::new(&db, position, &config).unwrap();
+
+ let mut acc = crate::completions::Completions::default();
+ if let CompletionAnalysis::Name(NameContext { kind: NameKind::IdentPat(pat_ctx), .. }) =
+ &analysis
+ {
+ crate::completions::flyimport::import_on_the_fly_pat(&mut acc, &ctx, pat_ctx);
+ }
+ if let CompletionAnalysis::NameRef(name_ref_ctx) = &analysis {
+ match &name_ref_ctx.kind {
+ NameRefKind::Path(path) => {
+ crate::completions::flyimport::import_on_the_fly_path(&mut acc, &ctx, path);
+ }
+ NameRefKind::DotAccess(dot_access) => {
+ crate::completions::flyimport::import_on_the_fly_dot(&mut acc, &ctx, dot_access);
+ }
+ NameRefKind::Pattern(pattern) => {
+ crate::completions::flyimport::import_on_the_fly_pat(&mut acc, &ctx, pattern);
+ }
+ _ => (),
+ }
+ }
+
+ expect.assert_eq(&super::render_completion_list(Vec::from(acc)));
+}
+
+#[test]
+fn function_fuzzy_completion() {
+ check_edit(
+ "stdin",
+ r#"
+//- /lib.rs crate:dep
+pub mod io {
+ pub fn stdin() {}
+};
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ stdi$0
+}
+"#,
+ r#"
+use dep::io::stdin;
+
+fn main() {
+ stdin()$0
+}
+"#,
+ );
+}
+
+#[test]
+fn macro_fuzzy_completion() {
+ check_edit(
+ "macro_with_curlies!",
+ r#"
+//- /lib.rs crate:dep
+/// Please call me as macro_with_curlies! {}
+#[macro_export]
+macro_rules! macro_with_curlies {
+ () => {}
+}
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ curli$0
+}
+"#,
+ r#"
+use dep::macro_with_curlies;
+
+fn main() {
+ macro_with_curlies! {$0}
+}
+"#,
+ );
+}
+
+#[test]
+fn struct_fuzzy_completion() {
+ check_edit(
+ "ThirdStruct",
+ r#"
+//- /lib.rs crate:dep
+pub struct FirstStruct;
+pub mod some_module {
+ pub struct SecondStruct;
+ pub struct ThirdStruct;
+}
+
+//- /main.rs crate:main deps:dep
+use dep::{FirstStruct, some_module::SecondStruct};
+
+fn main() {
+ this$0
+}
+"#,
+ r#"
+use dep::{FirstStruct, some_module::{SecondStruct, ThirdStruct}};
+
+fn main() {
+ ThirdStruct
+}
+"#,
+ );
+}
+
+#[test]
+fn short_paths_are_ignored() {
+ cov_mark::check!(flyimport_exact_on_short_path);
+
+ check(
+ r#"
+//- /lib.rs crate:dep
+pub struct Bar;
+pub struct Rcar;
+pub struct Rc;
+pub mod some_module {
+ pub struct Bar;
+ pub struct Rcar;
+ pub struct Rc;
+}
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ rc$0
+}
+"#,
+ expect![[r#"
+ st Rc (use dep::Rc)
+ st Rc (use dep::some_module::Rc)
+ "#]],
+ );
+}
+
+#[test]
+fn fuzzy_completions_come_in_specific_order() {
+ cov_mark::check!(certain_fuzzy_order_test);
+ check(
+ r#"
+//- /lib.rs crate:dep
+pub struct FirstStruct;
+pub mod some_module {
+ // already imported, omitted
+ pub struct SecondStruct;
+ // does not contain all letters from the query, omitted
+ pub struct UnrelatedOne;
+ // contains all letters from the query, but not in sequence, displayed last
+ pub struct ThiiiiiirdStruct;
+ // contains all letters from the query, but not in the beginning, displayed second
+ pub struct AfterThirdStruct;
+ // contains all letters from the query in the begginning, displayed first
+ pub struct ThirdStruct;
+}
+
+//- /main.rs crate:main deps:dep
+use dep::{FirstStruct, some_module::SecondStruct};
+
+fn main() {
+ hir$0
+}
+"#,
+ expect![[r#"
+ st ThirdStruct (use dep::some_module::ThirdStruct)
+ st AfterThirdStruct (use dep::some_module::AfterThirdStruct)
+ st ThiiiiiirdStruct (use dep::some_module::ThiiiiiirdStruct)
+ "#]],
+ );
+}
+
+#[test]
+fn trait_function_fuzzy_completion() {
+ let fixture = r#"
+ //- /lib.rs crate:dep
+ pub mod test_mod {
+ pub trait TestTrait {
+ const SPECIAL_CONST: u8;
+ type HumbleType;
+ fn weird_function();
+ fn random_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const SPECIAL_CONST: u8 = 42;
+ type HumbleType = ();
+ fn weird_function() {}
+ fn random_method(&self) {}
+ }
+ }
+
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ dep::test_mod::TestStruct::wei$0
+ }
+ "#;
+
+ check(
+ fixture,
+ expect![[r#"
+ fn weird_function() (use dep::test_mod::TestTrait) fn()
+ "#]],
+ );
+
+ check_edit(
+ "weird_function",
+ fixture,
+ r#"
+use dep::test_mod::TestTrait;
+
+fn main() {
+ dep::test_mod::TestStruct::weird_function()$0
+}
+"#,
+ );
+}
+
+#[test]
+fn trait_const_fuzzy_completion() {
+ let fixture = r#"
+ //- /lib.rs crate:dep
+ pub mod test_mod {
+ pub trait TestTrait {
+ const SPECIAL_CONST: u8;
+ type HumbleType;
+ fn weird_function();
+ fn random_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const SPECIAL_CONST: u8 = 42;
+ type HumbleType = ();
+ fn weird_function() {}
+ fn random_method(&self) {}
+ }
+ }
+
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ dep::test_mod::TestStruct::spe$0
+ }
+ "#;
+
+ check(
+ fixture,
+ expect![[r#"
+ ct SPECIAL_CONST (use dep::test_mod::TestTrait)
+ "#]],
+ );
+
+ check_edit(
+ "SPECIAL_CONST",
+ fixture,
+ r#"
+use dep::test_mod::TestTrait;
+
+fn main() {
+ dep::test_mod::TestStruct::SPECIAL_CONST
+}
+"#,
+ );
+}
+
+#[test]
+fn trait_method_fuzzy_completion() {
+ let fixture = r#"
+ //- /lib.rs crate:dep
+ pub mod test_mod {
+ pub trait TestTrait {
+ const SPECIAL_CONST: u8;
+ type HumbleType;
+ fn weird_function();
+ fn random_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const SPECIAL_CONST: u8 = 42;
+ type HumbleType = ();
+ fn weird_function() {}
+ fn random_method(&self) {}
+ }
+ }
+
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.ran$0
+ }
+ "#;
+
+ check(
+ fixture,
+ expect![[r#"
+ me random_method() (use dep::test_mod::TestTrait) fn(&self)
+ "#]],
+ );
+
+ check_edit(
+ "random_method",
+ fixture,
+ r#"
+use dep::test_mod::TestTrait;
+
+fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.random_method()$0
+}
+"#,
+ );
+}
+
+#[test]
+fn trait_method_from_alias() {
+ let fixture = r#"
+//- /lib.rs crate:dep
+pub mod test_mod {
+ pub trait TestTrait {
+ fn random_method();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn random_method() {}
+ }
+ pub type TestAlias = TestStruct;
+}
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ dep::test_mod::TestAlias::ran$0
+}
+"#;
+
+ check(
+ fixture,
+ expect![[r#"
+ fn random_method() (use dep::test_mod::TestTrait) fn()
+ "#]],
+ );
+
+ check_edit(
+ "random_method",
+ fixture,
+ r#"
+use dep::test_mod::TestTrait;
+
+fn main() {
+ dep::test_mod::TestAlias::random_method()$0
+}
+"#,
+ );
+}
+
+#[test]
+fn no_trait_type_fuzzy_completion() {
+ check(
+ r#"
+//- /lib.rs crate:dep
+pub mod test_mod {
+ pub trait TestTrait {
+ const SPECIAL_CONST: u8;
+ type HumbleType;
+ fn weird_function();
+ fn random_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const SPECIAL_CONST: u8 = 42;
+ type HumbleType = ();
+ fn weird_function() {}
+ fn random_method(&self) {}
+ }
+}
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ dep::test_mod::TestStruct::hum$0
+}
+"#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn does_not_propose_names_in_scope() {
+ check(
+ r#"
+//- /lib.rs crate:dep
+pub mod test_mod {
+ pub trait TestTrait {
+ const SPECIAL_CONST: u8;
+ type HumbleType;
+ fn weird_function();
+ fn random_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const SPECIAL_CONST: u8 = 42;
+ type HumbleType = ();
+ fn weird_function() {}
+ fn random_method(&self) {}
+ }
+}
+
+//- /main.rs crate:main deps:dep
+use dep::test_mod::TestStruct;
+fn main() {
+ TestSt$0
+}
+"#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn does_not_propose_traits_in_scope() {
+ check(
+ r#"
+//- /lib.rs crate:dep
+pub mod test_mod {
+ pub trait TestTrait {
+ const SPECIAL_CONST: u8;
+ type HumbleType;
+ fn weird_function();
+ fn random_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const SPECIAL_CONST: u8 = 42;
+ type HumbleType = ();
+ fn weird_function() {}
+ fn random_method(&self) {}
+ }
+}
+
+//- /main.rs crate:main deps:dep
+use dep::test_mod::{TestStruct, TestTrait};
+fn main() {
+ dep::test_mod::TestStruct::hum$0
+}
+"#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn blanket_trait_impl_import() {
+ check_edit(
+ "another_function",
+ r#"
+//- /lib.rs crate:dep
+pub mod test_mod {
+ pub struct TestStruct {}
+ pub trait TestTrait {
+ fn another_function();
+ }
+ impl<T> TestTrait for T {
+ fn another_function() {}
+ }
+}
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ dep::test_mod::TestStruct::ano$0
+}
+"#,
+ r#"
+use dep::test_mod::TestTrait;
+
+fn main() {
+ dep::test_mod::TestStruct::another_function()$0
+}
+"#,
+ );
+}
+
+#[test]
+fn zero_input_deprecated_assoc_item_completion() {
+ check(
+ r#"
+//- /lib.rs crate:dep
+pub mod test_mod {
+ #[deprecated]
+ pub trait TestTrait {
+ const SPECIAL_CONST: u8;
+ type HumbleType;
+ fn weird_function();
+ fn random_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const SPECIAL_CONST: u8 = 42;
+ type HumbleType = ();
+ fn weird_function() {}
+ fn random_method(&self) {}
+ }
+}
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.$0
+}
+ "#,
+ expect![[r#"
+ me random_method() (use dep::test_mod::TestTrait) fn(&self) DEPRECATED
+ "#]],
+ );
+
+ check(
+ r#"
+//- /lib.rs crate:dep
+pub mod test_mod {
+ #[deprecated]
+ pub trait TestTrait {
+ const SPECIAL_CONST: u8;
+ type HumbleType;
+ fn weird_function();
+ fn random_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const SPECIAL_CONST: u8 = 42;
+ type HumbleType = ();
+ fn weird_function() {}
+ fn random_method(&self) {}
+ }
+}
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ dep::test_mod::TestStruct::$0
+}
+"#,
+ expect![[r#"
+ fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED
+ ct SPECIAL_CONST (use dep::test_mod::TestTrait) DEPRECATED
+ "#]],
+ );
+}
+
+#[test]
+fn no_completions_in_use_statements() {
+ check(
+ r#"
+//- /lib.rs crate:dep
+pub mod io {
+ pub fn stdin() {}
+};
+
+//- /main.rs crate:main deps:dep
+use stdi$0
+
+fn main() {}
+"#,
+ expect![[]],
+ );
+}
+
+#[test]
+fn prefix_config_usage() {
+ let fixture = r#"
+mod foo {
+ pub mod bar {
+ pub struct Item;
+ }
+}
+
+use crate::foo::bar;
+
+fn main() {
+ Ite$0
+}"#;
+ let mut config = TEST_CONFIG;
+
+ config.insert_use.prefix_kind = hir::PrefixKind::ByCrate;
+ check_edit_with_config(
+ config.clone(),
+ "Item",
+ fixture,
+ r#"
+mod foo {
+ pub mod bar {
+ pub struct Item;
+ }
+}
+
+use crate::foo::bar::{self, Item};
+
+fn main() {
+ Item
+}"#,
+ );
+
+ config.insert_use.prefix_kind = hir::PrefixKind::BySelf;
+ check_edit_with_config(
+ config.clone(),
+ "Item",
+ fixture,
+ r#"
+mod foo {
+ pub mod bar {
+ pub struct Item;
+ }
+}
+
+use crate::foo::bar;
+
+use self::foo::bar::Item;
+
+fn main() {
+ Item
+}"#,
+ );
+
+ config.insert_use.prefix_kind = hir::PrefixKind::Plain;
+ check_edit_with_config(
+ config,
+ "Item",
+ fixture,
+ r#"
+mod foo {
+ pub mod bar {
+ pub struct Item;
+ }
+}
+
+use foo::bar::Item;
+
+use crate::foo::bar;
+
+fn main() {
+ Item
+}"#,
+ );
+}
+
+#[test]
+fn unresolved_qualifier() {
+ let fixture = r#"
+mod foo {
+ pub mod bar {
+ pub mod baz {
+ pub struct Item;
+ }
+ }
+}
+
+fn main() {
+ bar::baz::Ite$0
+}"#;
+
+ check(
+ fixture,
+ expect![[r#"
+ st Item (use foo::bar::baz::Item)
+ "#]],
+ );
+
+ check_edit(
+ "Item",
+ fixture,
+ r#"
+ use foo::bar;
+
+ mod foo {
+ pub mod bar {
+ pub mod baz {
+ pub struct Item;
+ }
+ }
+ }
+
+ fn main() {
+ bar::baz::Item
+ }"#,
+ );
+}
+
+#[test]
+fn unresolved_assoc_item_container() {
+ let fixture = r#"
+mod foo {
+ pub struct Item;
+
+ impl Item {
+ pub const TEST_ASSOC: usize = 3;
+ }
+}
+
+fn main() {
+ Item::TEST_A$0
+}"#;
+
+ check(
+ fixture,
+ expect![[r#"
+ ct TEST_ASSOC (use foo::Item)
+ "#]],
+ );
+
+ check_edit(
+ "TEST_ASSOC",
+ fixture,
+ r#"
+use foo::Item;
+
+mod foo {
+ pub struct Item;
+
+ impl Item {
+ pub const TEST_ASSOC: usize = 3;
+ }
+}
+
+fn main() {
+ Item::TEST_ASSOC
+}"#,
+ );
+}
+
+#[test]
+fn unresolved_assoc_item_container_with_path() {
+ let fixture = r#"
+mod foo {
+ pub mod bar {
+ pub struct Item;
+
+ impl Item {
+ pub const TEST_ASSOC: usize = 3;
+ }
+ }
+}
+
+fn main() {
+ bar::Item::TEST_A$0
+}"#;
+
+ check(
+ fixture,
+ expect![[r#"
+ ct TEST_ASSOC (use foo::bar::Item)
+ "#]],
+ );
+
+ check_edit(
+ "TEST_ASSOC",
+ fixture,
+ r#"
+use foo::bar;
+
+mod foo {
+ pub mod bar {
+ pub struct Item;
+
+ impl Item {
+ pub const TEST_ASSOC: usize = 3;
+ }
+ }
+}
+
+fn main() {
+ bar::Item::TEST_ASSOC
+}"#,
+ );
+}
+
+#[test]
+fn fuzzy_unresolved_path() {
+ check(
+ r#"
+mod foo {
+ pub mod bar {
+ pub struct Item;
+
+ impl Item {
+ pub const TEST_ASSOC: usize = 3;
+ }
+ }
+}
+
+fn main() {
+ bar::ASS$0
+}"#,
+ expect![[]],
+ )
+}
+
+#[test]
+fn unqualified_assoc_items_are_omitted() {
+ check(
+ r#"
+mod something {
+ pub trait BaseTrait {
+ fn test_function() -> i32;
+ }
+
+ pub struct Item1;
+ pub struct Item2;
+
+ impl BaseTrait for Item1 {
+ fn test_function() -> i32 {
+ 1
+ }
+ }
+
+ impl BaseTrait for Item2 {
+ fn test_function() -> i32 {
+ 2
+ }
+ }
+}
+
+fn main() {
+ test_f$0
+}"#,
+ expect![[]],
+ )
+}
+
+#[test]
+fn case_matters() {
+ check(
+ r#"
+mod foo {
+ pub const TEST_CONST: usize = 3;
+ pub fn test_function() -> i32 {
+ 4
+ }
+}
+
+fn main() {
+ TES$0
+}"#,
+ expect![[r#"
+ ct TEST_CONST (use foo::TEST_CONST)
+ "#]],
+ );
+
+ check(
+ r#"
+mod foo {
+ pub const TEST_CONST: usize = 3;
+ pub fn test_function() -> i32 {
+ 4
+ }
+}
+
+fn main() {
+ tes$0
+}"#,
+ expect![[r#"
+ ct TEST_CONST (use foo::TEST_CONST)
+ fn test_function() (use foo::test_function) fn() -> i32
+ "#]],
+ );
+
+ check(
+ r#"
+mod foo {
+ pub const TEST_CONST: usize = 3;
+ pub fn test_function() -> i32 {
+ 4
+ }
+}
+
+fn main() {
+ Te$0
+}"#,
+ expect![[]],
+ );
+}
+
+#[test]
+fn no_fuzzy_during_fields_of_record_lit_syntax() {
+ check(
+ r#"
+mod m {
+ pub fn some_fn() -> i32 {
+ 42
+ }
+}
+struct Foo {
+ some_field: i32,
+}
+fn main() {
+ let _ = Foo { so$0 };
+}
+"#,
+ expect![[]],
+ );
+}
+
+#[test]
+fn fuzzy_after_fields_of_record_lit_syntax() {
+ check(
+ r#"
+mod m {
+ pub fn some_fn() -> i32 {
+ 42
+ }
+}
+struct Foo {
+ some_field: i32,
+}
+fn main() {
+ let _ = Foo { some_field: som$0 };
+}
+"#,
+ expect![[r#"
+ fn some_fn() (use m::some_fn) fn() -> i32
+ "#]],
+ );
+}
+
+#[test]
+fn no_flyimports_in_traits_and_impl_declarations() {
+ check(
+ r#"
+mod m {
+ pub fn some_fn() -> i32 {
+ 42
+ }
+}
+trait Foo {
+ som$0
+}
+"#,
+ expect![[r#""#]],
+ );
+
+ check(
+ r#"
+mod m {
+ pub fn some_fn() -> i32 {
+ 42
+ }
+}
+struct Foo;
+impl Foo {
+ som$0
+}
+"#,
+ expect![[r#""#]],
+ );
+
+ check(
+ r#"
+mod m {
+ pub fn some_fn() -> i32 {
+ 42
+ }
+}
+struct Foo;
+trait Bar {}
+impl Bar for Foo {
+ som$0
+}
+"#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn no_inherent_candidates_proposed() {
+ check(
+ r#"
+mod baz {
+ pub trait DefDatabase {
+ fn method1(&self);
+ }
+ pub trait HirDatabase: DefDatabase {
+ fn method2(&self);
+ }
+}
+
+mod bar {
+ fn test(db: &dyn crate::baz::HirDatabase) {
+ db.metho$0
+ }
+}
+ "#,
+ expect![[r#""#]],
+ );
+ check(
+ r#"
+mod baz {
+ pub trait DefDatabase {
+ fn method1(&self);
+ }
+ pub trait HirDatabase: DefDatabase {
+ fn method2(&self);
+ }
+}
+
+mod bar {
+ fn test(db: &impl crate::baz::HirDatabase) {
+ db.metho$0
+ }
+}
+"#,
+ expect![[r#""#]],
+ );
+ check(
+ r#"
+mod baz {
+ pub trait DefDatabase {
+ fn method1(&self);
+ }
+ pub trait HirDatabase: DefDatabase {
+ fn method2(&self);
+ }
+}
+
+mod bar {
+ fn test<T: crate::baz::HirDatabase>(db: T) {
+ db.metho$0
+ }
+}
+"#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn respects_doc_hidden() {
+ check(
+ r#"
+//- /lib.rs crate:lib deps:dep
+fn f() {
+ ().fro$0
+}
+
+//- /dep.rs crate:dep
+#[doc(hidden)]
+pub trait Private {
+ fn frob(&self) {}
+}
+
+impl<T> Private for T {}
+ "#,
+ expect![[r#""#]],
+ );
+ check(
+ r#"
+//- /lib.rs crate:lib deps:dep
+fn f() {
+ ().fro$0
+}
+
+//- /dep.rs crate:dep
+pub trait Private {
+ #[doc(hidden)]
+ fn frob(&self) {}
+}
+
+impl<T> Private for T {}
+ "#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn regression_9760() {
+ check(
+ r#"
+struct Struct;
+fn main() {}
+
+mod mud {
+ fn func() {
+ let struct_instance = Stru$0
+ }
+}
+"#,
+ expect![[r#"
+ st Struct (use crate::Struct)
+ "#]],
+ );
+}
+
+#[test]
+fn flyimport_pattern() {
+ check(
+ r#"
+mod module {
+ pub struct FooStruct {}
+ pub const FooConst: () = ();
+ pub fn foo_fun() {}
+}
+fn function() {
+ let foo$0
+}
+"#,
+ expect![[r#"
+ ct FooConst (use module::FooConst)
+ st FooStruct (use module::FooStruct)
+ "#]],
+ );
+}
+
+#[test]
+fn flyimport_item_name() {
+ check(
+ r#"
+mod module {
+ pub struct Struct;
+}
+struct Str$0
+ "#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn flyimport_rename() {
+ check(
+ r#"
+mod module {
+ pub struct Struct;
+}
+use self as Str$0;
+ "#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn flyimport_enum_variant() {
+ check(
+ r#"
+mod foo {
+ pub struct Barbara;
+}
+
+enum Foo {
+ Barba$0()
+}
+}"#,
+ expect![[r#""#]],
+ );
+
+ check(
+ r#"
+mod foo {
+ pub struct Barbara;
+}
+
+enum Foo {
+ Barba(Barba$0)
+}
+}"#,
+ expect![[r#"
+ st Barbara (use foo::Barbara)
+ "#]],
+ )
+}
+
+#[test]
+fn flyimport_attribute() {
+ check(
+ r#"
+//- proc_macros:identity
+#[ide$0]
+struct Foo;
+"#,
+ expect![[r#"
+ at identity (use proc_macros::identity) proc_macro identity
+ "#]],
+ );
+ check_edit(
+ "identity",
+ r#"
+//- proc_macros:identity
+#[ide$0]
+struct Foo;
+"#,
+ r#"
+use proc_macros::identity;
+
+#[identity]
+struct Foo;
+"#,
+ );
+}
+
+#[test]
+fn flyimport_in_type_bound_omits_types() {
+ check(
+ r#"
+mod module {
+ pub struct CompletemeStruct;
+ pub type CompletemeType = ();
+ pub enum CompletemeEnum {}
+ pub trait CompletemeTrait {}
+}
+
+fn f<T>() where T: Comp$0
+"#,
+ expect![[r#"
+ tt CompletemeTrait (use module::CompletemeTrait)
+ "#]],
+ );
+}
+
+#[test]
+fn flyimport_source_file() {
+ check(
+ r#"
+//- /main.rs crate:main deps:dep
+def$0
+//- /lib.rs crate:dep
+#[macro_export]
+macro_rules! define_struct {
+ () => {
+ pub struct Foo;
+ };
+}
+"#,
+ expect![[r#"
+ ma define_struct!(…) (use dep::define_struct) macro_rules! define_struct
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/fn_param.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/fn_param.rs
new file mode 100644
index 000000000..cce74604c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/fn_param.rs
@@ -0,0 +1,274 @@
+use expect_test::{expect, Expect};
+
+use crate::tests::{completion_list, completion_list_with_trigger_character};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual);
+}
+
+fn check_with_trigger_character(ra_fixture: &str, trigger_character: char, expect: Expect) {
+ let actual = completion_list_with_trigger_character(ra_fixture, Some(trigger_character));
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn only_param() {
+ check(
+ r#"
+fn foo(file_id: usize) {}
+fn bar(file_id: usize) {}
+fn baz(file$0) {}
+"#,
+ expect![[r#"
+ bn file_id: usize
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn last_param() {
+ check(
+ r#"
+fn foo(file_id: usize) {}
+fn bar(file_id: usize) {}
+fn baz(foo: (), file$0) {}
+"#,
+ expect![[r#"
+ bn file_id: usize
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn first_param() {
+ check(
+ r#"
+fn foo(file_id: usize) {}
+fn bar(file_id: usize) {}
+fn baz(file$0 id: u32) {}
+"#,
+ expect![[r#"
+ bn file_id: usize,
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn repeated_param_name() {
+ check(
+ r#"
+fn foo(file_id: usize) {}
+fn bar(file_id: u32, $0) {}
+"#,
+ expect![[r#"
+ kw mut
+ kw ref
+ "#]],
+ );
+
+ check(
+ r#"
+fn f(#[foo = "bar"] baz: u32,) {}
+fn g(baz: (), ba$0)
+"#,
+ expect![[r#"
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn trait_param() {
+ check(
+ r#"
+pub(crate) trait SourceRoot {
+ pub fn contains(file_id: usize) -> bool;
+ pub fn syntax(file$0)
+}
+"#,
+ expect![[r#"
+ bn file_id: usize
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn in_inner_function() {
+ check(
+ r#"
+fn outer(text: &str) {
+ fn inner($0)
+}
+"#,
+ expect![[r#"
+ bn text: &str
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn trigger_by_l_paren() {
+ check_with_trigger_character(
+ r#"
+fn foo($0)
+"#,
+ '(',
+ expect![[]],
+ )
+}
+
+#[test]
+fn shows_non_ident_pat_param() {
+ check(
+ r#"
+struct Bar { bar: u32 }
+fn foo(Bar { bar }: Bar) {}
+fn foo2($0) {}
+"#,
+ expect![[r#"
+ st Bar
+ bn Bar { bar }: Bar
+ bn Bar {…} Bar { bar$1 }: Bar$0
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn in_impl_only_param() {
+ check(
+ r#"
+struct A {}
+
+impl A {
+ fn foo(file_id: usize) {}
+ fn new($0) {}
+}
+"#,
+ expect![[r#"
+ sp Self
+ st A
+ bn &mut self
+ bn &self
+ bn file_id: usize
+ bn mut self
+ bn self
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn in_impl_after_self() {
+ check(
+ r#"
+struct A {}
+
+impl A {
+ fn foo(file_id: usize) {}
+ fn new(self, $0) {}
+}
+"#,
+ expect![[r#"
+ sp Self
+ st A
+ bn file_id: usize
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+// doesn't complete qux due to there being no expression after
+// see source_analyzer::adjust comment
+#[test]
+fn local_fn_shows_locals_for_params() {
+ check(
+ r#"
+fn outer() {
+ let foo = 3;
+ {
+ let bar = 3;
+ fn inner($0) {}
+ let baz = 3;
+ let qux = 3;
+ }
+ let fez = 3;
+}
+"#,
+ expect![[r#"
+ bn bar: i32
+ bn baz: i32
+ bn foo: i32
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn closure_shows_locals_for_params() {
+ check(
+ r#"
+fn outer() {
+ let foo = 3;
+ {
+ let bar = 3;
+ |$0| {};
+ let baz = 3;
+ let qux = 3;
+ }
+ let fez = 3;
+}
+"#,
+ expect![[r#"
+ bn bar: i32
+ bn baz: i32
+ bn foo: i32
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn completes_fully_equal() {
+ check(
+ r#"
+fn foo(bar: u32) {}
+fn bar(bar$0) {}
+"#,
+ expect![[r#"
+ bn bar: u32
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn completes_for_params_with_attributes() {
+ check(
+ r#"
+fn f(foo: (), #[baz = "qux"] mut bar: u32) {}
+fn g(foo: (), #[baz = "qux"] mut ba$0)
+"#,
+ expect![[r##"
+ bn #[baz = "qux"] mut bar: u32
+ "##]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs
new file mode 100644
index 000000000..409413c1d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs
@@ -0,0 +1,154 @@
+//! Completion tests for item specifics overall.
+//!
+//! Except for use items which are tested in [super::use_tree] and mod declarations with are tested
+//! in [crate::completions::mod_].
+use expect_test::{expect, Expect};
+
+use crate::tests::{completion_list, BASE_ITEMS_FIXTURE};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(&format!("{}{}", BASE_ITEMS_FIXTURE, ra_fixture));
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn target_type_or_trait_in_impl_block() {
+ check(
+ r#"
+impl Tra$0
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ )
+}
+
+#[test]
+fn target_type_in_trait_impl_block() {
+ check(
+ r#"
+impl Trait for Str$0
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ )
+}
+
+#[test]
+fn after_trait_name_in_trait_def() {
+ check(
+ r"trait A $0",
+ expect![[r#"
+ kw where
+ "#]],
+ );
+}
+
+#[test]
+fn after_target_name_in_impl() {
+ check(
+ r"impl Trait $0",
+ expect![[r#"
+ kw for
+ kw where
+ "#]],
+ );
+ check(
+ r"impl Trait f$0",
+ expect![[r#"
+ kw for
+ kw where
+ "#]],
+ );
+ check(
+ r"impl Trait for Type $0",
+ expect![[r#"
+ kw where
+ "#]],
+ );
+}
+
+#[test]
+fn completes_where() {
+ check(
+ r"struct Struct $0",
+ expect![[r#"
+ kw where
+ "#]],
+ );
+ check(
+ r"struct Struct $0 {}",
+ expect![[r#"
+ kw where
+ "#]],
+ );
+ // FIXME: This shouldn't be completed here
+ check(
+ r"struct Struct $0 ()",
+ expect![[r#"
+ kw where
+ "#]],
+ );
+ check(
+ r"fn func() $0",
+ expect![[r#"
+ kw where
+ "#]],
+ );
+ check(
+ r"enum Enum $0",
+ expect![[r#"
+ kw where
+ "#]],
+ );
+ check(
+ r"enum Enum $0 {}",
+ expect![[r#"
+ kw where
+ "#]],
+ );
+ check(
+ r"trait Trait $0 {}",
+ expect![[r#"
+ kw where
+ "#]],
+ );
+}
+
+#[test]
+fn before_record_field() {
+ check(
+ r#"
+struct Foo {
+ $0
+ pub f: i32,
+}
+"#,
+ expect![[r#"
+ kw pub
+ kw pub(crate)
+ kw pub(super)
+ "#]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs
new file mode 100644
index 000000000..5076c6e86
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs
@@ -0,0 +1,247 @@
+//! Completion tests for item list position.
+use expect_test::{expect, Expect};
+
+use crate::tests::{completion_list, BASE_ITEMS_FIXTURE};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(&format!("{}{}", BASE_ITEMS_FIXTURE, ra_fixture));
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn in_mod_item_list() {
+ check(
+ r#"mod tests { $0 }"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ kw const
+ kw crate::
+ kw enum
+ kw extern
+ kw fn
+ kw impl
+ kw mod
+ kw pub
+ kw pub(crate)
+ kw pub(super)
+ kw self::
+ kw static
+ kw struct
+ kw super::
+ kw trait
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ sn macro_rules
+ sn tfn (Test function)
+ sn tmod (Test module)
+ "#]],
+ )
+}
+
+#[test]
+fn in_source_file_item_list() {
+ check(
+ r#"$0"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ kw const
+ kw crate::
+ kw enum
+ kw extern
+ kw fn
+ kw impl
+ kw mod
+ kw pub
+ kw pub(crate)
+ kw pub(super)
+ kw self::
+ kw static
+ kw struct
+ kw trait
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ sn macro_rules
+ sn tfn (Test function)
+ sn tmod (Test module)
+ "#]],
+ )
+}
+
+#[test]
+fn in_item_list_after_attr() {
+ check(
+ r#"#[attr] $0"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ kw const
+ kw crate::
+ kw enum
+ kw extern
+ kw fn
+ kw impl
+ kw mod
+ kw pub
+ kw pub(crate)
+ kw pub(super)
+ kw self::
+ kw static
+ kw struct
+ kw trait
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ sn macro_rules
+ sn tfn (Test function)
+ sn tmod (Test module)
+ "#]],
+ )
+}
+
+#[test]
+fn in_qualified_path() {
+ check(
+ r#"crate::$0"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ "#]],
+ )
+}
+
+#[test]
+fn after_unsafe_token() {
+ check(
+ r#"unsafe $0"#,
+ expect![[r#"
+ kw fn
+ kw impl
+ kw trait
+ "#]],
+ );
+}
+
+#[test]
+fn after_visibility() {
+ check(
+ r#"pub $0"#,
+ expect![[r#"
+ kw const
+ kw enum
+ kw extern
+ kw fn
+ kw mod
+ kw static
+ kw struct
+ kw trait
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ "#]],
+ );
+}
+
+#[test]
+fn after_visibility_unsafe() {
+ check(
+ r#"pub unsafe $0"#,
+ expect![[r#"
+ kw fn
+ kw trait
+ "#]],
+ );
+}
+
+#[test]
+fn in_impl_assoc_item_list() {
+ check(
+ r#"impl Struct { $0 }"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ kw const
+ kw crate::
+ kw fn
+ kw pub
+ kw pub(crate)
+ kw pub(super)
+ kw self::
+ kw unsafe
+ "#]],
+ )
+}
+
+#[test]
+fn in_impl_assoc_item_list_after_attr() {
+ check(
+ r#"impl Struct { #[attr] $0 }"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ kw const
+ kw crate::
+ kw fn
+ kw pub
+ kw pub(crate)
+ kw pub(super)
+ kw self::
+ kw unsafe
+ "#]],
+ )
+}
+
+#[test]
+fn in_trait_assoc_item_list() {
+ check(
+ r"trait Foo { $0 }",
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ kw const
+ kw crate::
+ kw fn
+ kw self::
+ kw type
+ kw unsafe
+ "#]],
+ );
+}
+
+#[test]
+fn in_trait_impl_assoc_item_list() {
+ check(
+ r#"
+trait Test {
+ type Type0;
+ type Type1;
+ const CONST0: ();
+ const CONST1: ();
+ fn function0();
+ fn function1();
+}
+
+impl Test for () {
+ type Type0 = ();
+ const CONST0: () = ();
+ fn function0() {}
+ $0
+}
+"#,
+ expect![[r#"
+ ct const CONST1: () =
+ fn fn function1()
+ ma makro!(…) macro_rules! makro
+ md module
+ ta type Type1 =
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs
new file mode 100644
index 000000000..30ddbe2dc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs
@@ -0,0 +1,716 @@
+//! Completion tests for pattern position.
+use expect_test::{expect, Expect};
+
+use crate::tests::{check_edit, completion_list, BASE_ITEMS_FIXTURE};
+
+fn check_empty(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual)
+}
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(&format!("{}\n{}", BASE_ITEMS_FIXTURE, ra_fixture));
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn wildcard() {
+ check(
+ r#"
+fn quux() {
+ let _$0
+}
+"#,
+ expect![""],
+ );
+}
+
+#[test]
+fn ident_rebind_pat() {
+ check_empty(
+ r#"
+fn quux() {
+ let en$0 @ x
+}
+"#,
+ expect![[r#"
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn ident_ref_pat() {
+ check_empty(
+ r#"
+fn quux() {
+ let ref en$0
+}
+"#,
+ expect![[r#"
+ kw mut
+ "#]],
+ );
+ check_empty(
+ r#"
+fn quux() {
+ let ref en$0 @ x
+}
+"#,
+ expect![[r#"
+ kw mut
+ "#]],
+ );
+}
+
+#[test]
+fn ident_ref_mut_pat() {
+ check_empty(
+ r#"
+fn quux() {
+ let ref mut en$0
+}
+"#,
+ expect![[r#""#]],
+ );
+ check_empty(
+ r#"
+fn quux() {
+ let ref mut en$0 @ x
+}
+"#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn ref_pat() {
+ check_empty(
+ r#"
+fn quux() {
+ let &en$0
+}
+"#,
+ expect![[r#"
+ kw mut
+ "#]],
+ );
+ check_empty(
+ r#"
+fn quux() {
+ let &mut en$0
+}
+"#,
+ expect![[r#""#]],
+ );
+ check_empty(
+ r#"
+fn foo() {
+ for &$0 in () {}
+}
+"#,
+ expect![[r#"
+ kw mut
+ "#]],
+ );
+}
+
+#[test]
+fn refutable() {
+ check(
+ r#"
+fn foo() {
+ if let a$0
+}
+"#,
+ expect![[r#"
+ ct CONST
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ ev TupleV
+ bn Record {…} Record { field$1 }$0
+ bn Tuple(…) Tuple($1)$0
+ bn TupleV(…) TupleV($1)$0
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn irrefutable() {
+ check(
+ r#"
+enum SingleVariantEnum {
+ Variant
+}
+use SingleVariantEnum::Variant;
+fn foo() {
+ let a$0
+}
+"#,
+ expect![[r#"
+ en SingleVariantEnum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ ev Variant
+ bn Record {…} Record { field$1 }$0
+ bn Tuple(…) Tuple($1)$0
+ bn Variant Variant$0
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn in_param() {
+ check(
+ r#"
+fn foo(a$0) {
+}
+"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ bn Record {…} Record { field$1 }: Record$0
+ bn Tuple(…) Tuple($1): Tuple$0
+ kw mut
+ kw ref
+ "#]],
+ );
+ check(
+ r#"
+fn foo(a$0: Tuple) {
+}
+"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ bn Record {…} Record { field$1 }$0
+ bn Tuple(…) Tuple($1)$0
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn only_fn_like_macros() {
+ check_empty(
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+
+#[rustc_builtin_macro]
+macro Clone {}
+
+fn foo() {
+ let x$0
+}
+"#,
+ expect![[r#"
+ ma m!(…) macro_rules! m
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn in_simple_macro_call() {
+ check_empty(
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+enum E { X }
+
+fn foo() {
+ m!(match E::X { a$0 })
+}
+"#,
+ expect![[r#"
+ en E
+ ma m!(…) macro_rules! m
+ bn E::X E::X$0
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn omits_private_fields_pat() {
+ check_empty(
+ r#"
+mod foo {
+ pub struct Record { pub field: i32, _field: i32 }
+ pub struct Tuple(pub u32, u32);
+ pub struct Invisible(u32, u32);
+}
+use foo::*;
+
+fn outer() {
+ if let a$0
+}
+"#,
+ expect![[r#"
+ md foo
+ st Invisible
+ st Record
+ st Tuple
+ bn Record {…} Record { field$1, .. }$0
+ bn Tuple(…) Tuple($1, ..)$0
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn completes_self_pats() {
+ check_empty(
+ r#"
+struct Foo(i32);
+impl Foo {
+ fn foo() {
+ match Foo(0) {
+ a$0
+ }
+ }
+}
+ "#,
+ expect![[r#"
+ sp Self
+ st Foo
+ bn Foo(…) Foo($1)$0
+ bn Self(…) Self($1)$0
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn enum_qualified() {
+ check(
+ r#"
+impl Enum {
+ type AssocType = ();
+ const ASSOC_CONST: () = ();
+ fn assoc_fn() {}
+}
+fn func() {
+ if let Enum::$0 = unknown {}
+}
+"#,
+ expect![[r#"
+ ct ASSOC_CONST const ASSOC_CONST: ()
+ bn RecordV {…} RecordV { field$1 }$0
+ bn TupleV(…) TupleV($1)$0
+ bn UnitV UnitV$0
+ "#]],
+ );
+}
+
+#[test]
+fn completes_in_record_field_pat() {
+ check_empty(
+ r#"
+struct Foo { bar: Bar }
+struct Bar(u32);
+fn outer(Foo { bar: $0 }: Foo) {}
+"#,
+ expect![[r#"
+ st Bar
+ st Foo
+ bn Bar(…) Bar($1)$0
+ bn Foo {…} Foo { bar$1 }$0
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn skips_in_record_field_pat_name() {
+ check_empty(
+ r#"
+struct Foo { bar: Bar }
+struct Bar(u32);
+fn outer(Foo { bar$0 }: Foo) {}
+"#,
+ expect![[r#"
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn completes_in_fn_param() {
+ check_empty(
+ r#"
+struct Foo { bar: Bar }
+struct Bar(u32);
+fn foo($0) {}
+"#,
+ expect![[r#"
+ st Bar
+ st Foo
+ bn Bar(…) Bar($1): Bar$0
+ bn Foo {…} Foo { bar$1 }: Foo$0
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn completes_in_closure_param() {
+ check_empty(
+ r#"
+struct Foo { bar: Bar }
+struct Bar(u32);
+fn foo() {
+ |$0| {};
+}
+"#,
+ expect![[r#"
+ st Bar
+ st Foo
+ bn Bar(…) Bar($1)$0
+ bn Foo {…} Foo { bar$1 }$0
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
+fn completes_no_delims_if_existing() {
+ check_empty(
+ r#"
+struct Bar(u32);
+fn foo() {
+ match Bar(0) {
+ B$0(b) => {}
+ }
+}
+"#,
+ expect![[r#"
+ st Bar
+ kw crate::
+ kw self::
+ "#]],
+ );
+ check_empty(
+ r#"
+struct Foo { bar: u32 }
+fn foo() {
+ match (Foo { bar: 0 }) {
+ F$0 { bar } => {}
+ }
+}
+"#,
+ expect![[r#"
+ st Foo
+ kw crate::
+ kw self::
+ "#]],
+ );
+ check_empty(
+ r#"
+enum Enum {
+ TupleVariant(u32)
+}
+fn foo() {
+ match Enum::TupleVariant(0) {
+ Enum::T$0(b) => {}
+ }
+}
+"#,
+ expect![[r#"
+ bn TupleVariant TupleVariant
+ "#]],
+ );
+ check_empty(
+ r#"
+enum Enum {
+ RecordVariant { field: u32 }
+}
+fn foo() {
+ match (Enum::RecordVariant { field: 0 }) {
+ Enum::RecordV$0 { field } => {}
+ }
+}
+"#,
+ expect![[r#"
+ bn RecordVariant RecordVariant
+ "#]],
+ );
+}
+
+#[test]
+fn completes_enum_variant_pat() {
+ cov_mark::check!(enum_variant_pattern_path);
+ check_edit(
+ "RecordVariant {…}",
+ r#"
+enum Enum {
+ RecordVariant { field: u32 }
+}
+fn foo() {
+ match (Enum::RecordVariant { field: 0 }) {
+ Enum::RecordV$0
+ }
+}
+"#,
+ r#"
+enum Enum {
+ RecordVariant { field: u32 }
+}
+fn foo() {
+ match (Enum::RecordVariant { field: 0 }) {
+ Enum::RecordVariant { field$1 }$0
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn completes_enum_variant_pat_escape() {
+ cov_mark::check!(enum_variant_pattern_path);
+ check_empty(
+ r#"
+enum Enum {
+ A,
+ B { r#type: i32 },
+ r#type,
+ r#struct { r#type: i32 },
+}
+fn foo() {
+ match (Enum::A) {
+ $0
+ }
+}
+"#,
+ expect![[r#"
+ en Enum
+ bn Enum::A Enum::A$0
+ bn Enum::B {…} Enum::B { r#type$1 }$0
+ bn Enum::struct {…} Enum::r#struct { r#type$1 }$0
+ bn Enum::type Enum::r#type$0
+ kw mut
+ kw ref
+ "#]],
+ );
+
+ check_empty(
+ r#"
+enum Enum {
+ A,
+ B { r#type: i32 },
+ r#type,
+ r#struct { r#type: i32 },
+}
+fn foo() {
+ match (Enum::A) {
+ Enum::$0
+ }
+}
+"#,
+ expect![[r#"
+ bn A A$0
+ bn B {…} B { r#type$1 }$0
+ bn struct {…} r#struct { r#type$1 }$0
+ bn type r#type$0
+ "#]],
+ );
+}
+
+#[test]
+fn completes_associated_const() {
+ check_empty(
+ r#"
+#[derive(PartialEq, Eq)]
+struct Ty(u8);
+
+impl Ty {
+ const ABC: Self = Self(0);
+}
+
+fn f(t: Ty) {
+ match t {
+ Ty::$0 => {}
+ _ => {}
+ }
+}
+"#,
+ expect![[r#"
+ ct ABC const ABC: Self
+ "#]],
+ );
+
+ check_empty(
+ r#"
+enum MyEnum {}
+
+impl MyEnum {
+ pub const A: i32 = 123;
+ pub const B: i32 = 456;
+}
+
+fn f(e: MyEnum) {
+ match e {
+ MyEnum::$0 => {}
+ _ => {}
+ }
+}
+"#,
+ expect![[r#"
+ ct A pub const A: i32
+ ct B pub const B: i32
+ "#]],
+ );
+
+ check_empty(
+ r#"
+union U {
+ i: i32,
+ f: f32,
+}
+
+impl U {
+ pub const C: i32 = 123;
+ pub const D: i32 = 456;
+}
+
+fn f(u: U) {
+ match u {
+ U::$0 => {}
+ _ => {}
+ }
+}
+"#,
+ expect![[r#"
+ ct C pub const C: i32
+ ct D pub const D: i32
+ "#]],
+ );
+
+ check_empty(
+ r#"
+#[lang = "u32"]
+impl u32 {
+ pub const MIN: Self = 0;
+}
+
+fn f(v: u32) {
+ match v {
+ u32::$0
+ }
+}
+ "#,
+ expect![[r#"
+ ct MIN pub const MIN: Self
+ "#]],
+ );
+}
+
+#[test]
+fn in_method_param() {
+ check_empty(
+ r#"
+struct Ty(u8);
+
+impl Ty {
+ fn foo($0)
+}
+"#,
+ expect![[r#"
+ sp Self
+ st Ty
+ bn &mut self
+ bn &self
+ bn Self(…) Self($1): Self$0
+ bn Ty(…) Ty($1): Ty$0
+ bn mut self
+ bn self
+ kw mut
+ kw ref
+ "#]],
+ );
+ check_empty(
+ r#"
+struct Ty(u8);
+
+impl Ty {
+ fn foo(s$0)
+}
+"#,
+ expect![[r#"
+ sp Self
+ st Ty
+ bn &mut self
+ bn &self
+ bn Self(…) Self($1): Self$0
+ bn Ty(…) Ty($1): Ty$0
+ bn mut self
+ bn self
+ kw mut
+ kw ref
+ "#]],
+ );
+ check_empty(
+ r#"
+struct Ty(u8);
+
+impl Ty {
+ fn foo(s$0, foo: u8)
+}
+"#,
+ expect![[r#"
+ sp Self
+ st Ty
+ bn &mut self
+ bn &self
+ bn Self(…) Self($1): Self$0
+ bn Ty(…) Ty($1): Ty$0
+ bn mut self
+ bn self
+ kw mut
+ kw ref
+ "#]],
+ );
+ check_empty(
+ r#"
+struct Ty(u8);
+
+impl Ty {
+ fn foo(foo: u8, b$0)
+}
+"#,
+ expect![[r#"
+ sp Self
+ st Ty
+ bn Self(…) Self($1): Self$0
+ bn Ty(…) Ty($1): Ty$0
+ kw mut
+ kw ref
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs
new file mode 100644
index 000000000..a8676e2f2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs
@@ -0,0 +1,131 @@
+//! Completion tests for predicates and bounds.
+use expect_test::{expect, Expect};
+
+use crate::tests::{completion_list, BASE_ITEMS_FIXTURE};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(&format!("{}\n{}", BASE_ITEMS_FIXTURE, ra_fixture));
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn predicate_start() {
+ // FIXME: `for` kw
+ check(
+ r#"
+struct Foo<'lt, T, const C: usize> where $0 {}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Foo<…>
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn bound_for_type_pred() {
+ check(
+ r#"
+struct Foo<'lt, T, const C: usize> where T: $0 {}
+"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ tt Trait
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn bound_for_lifetime_pred() {
+ // FIXME: should only show lifetimes here, that is we shouldn't get any completions here when not typing
+ // a `'`
+ check(
+ r#"
+struct Foo<'lt, T, const C: usize> where 'lt: $0 {}
+"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ tt Trait
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn bound_for_for_pred() {
+ check(
+ r#"
+struct Foo<'lt, T, const C: usize> where for<'a> T: $0 {}
+"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ tt Trait
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn param_list_for_for_pred() {
+ check(
+ r#"
+struct Foo<'lt, T, const C: usize> where for<'a> $0 {}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Foo<…>
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn pred_on_fn_in_impl() {
+ check(
+ r#"
+impl Record {
+ fn method(self) where $0 {}
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ sp Self
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs
new file mode 100644
index 000000000..9eae6f849
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs
@@ -0,0 +1,133 @@
+//! Completion tests for expressions.
+use expect_test::{expect, Expect};
+
+use crate::tests::completion_list;
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn complete_dot_in_attr() {
+ check(
+ r#"
+//- proc_macros: identity
+pub struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+#[proc_macros::identity]
+fn main() {
+ Foo.$0
+}
+"#,
+ expect![[r#"
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ "#]],
+ )
+}
+
+#[test]
+fn complete_dot_in_attr2() {
+ check(
+ r#"
+//- proc_macros: identity
+pub struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+#[proc_macros::identity]
+fn main() {
+ Foo.f$0
+}
+"#,
+ expect![[r#"
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ "#]],
+ )
+}
+
+#[test]
+fn complete_dot_in_attr_input() {
+ check(
+ r#"
+//- proc_macros: input_replace
+pub struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+#[proc_macros::input_replace(
+ fn suprise() {
+ Foo.$0
+ }
+)]
+fn main() {}
+"#,
+ expect![[r#"
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ "#]],
+ )
+}
+
+#[test]
+fn complete_dot_in_attr_input2() {
+ check(
+ r#"
+//- proc_macros: input_replace
+pub struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+#[proc_macros::input_replace(
+ fn suprise() {
+ Foo.f$0
+ }
+)]
+fn main() {}
+"#,
+ expect![[r#"
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ "#]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs
new file mode 100644
index 000000000..f6accc68e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs
@@ -0,0 +1,229 @@
+use expect_test::{expect, Expect};
+
+use crate::tests::completion_list;
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual);
+}
+
+#[test]
+fn without_default_impl() {
+ check(
+ r#"
+struct Struct { foo: u32, bar: usize }
+
+fn foo() {
+ let other = Struct {
+ foo: 5,
+ $0
+ };
+}
+"#,
+ expect![[r#"
+ fd bar usize
+ "#]],
+ );
+}
+
+#[test]
+fn record_pattern_field() {
+ check(
+ r#"
+struct Struct { foo: u32, bar: u32 }
+
+fn foo(s: Struct) {
+ match s {
+ Struct { foo, $0: 92 } => (),
+ }
+}
+"#,
+ expect![[r#"
+ fd bar u32
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn pattern_enum_variant() {
+ check(
+ r#"
+enum Enum { Variant { foo: u32, bar: u32 } }
+fn foo(e: Enum) {
+ match e {
+ Enum::Variant { foo, $0 } => (),
+ }
+}
+"#,
+ expect![[r#"
+ fd bar u32
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn record_literal_field_in_macro() {
+ check(
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+struct Struct { field: u32 }
+fn foo() {
+ m!(Struct { fie$0 })
+}
+"#,
+ expect![[r#"
+ fd field u32
+ "#]],
+ );
+}
+
+#[test]
+fn record_pattern_field_in_macro() {
+ check(
+ r"
+macro_rules! m { ($e:expr) => { $e } }
+struct Struct { field: u32 }
+
+fn foo(f: Struct) {
+ m!(match f {
+ Struct { f$0: 92 } => (),
+ })
+}
+",
+ expect![[r#"
+ fd field u32
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
+fn functional_update() {
+ // FIXME: This should filter out all completions that do not have the type `Foo`
+ check(
+ r#"
+//- minicore:default
+struct Foo { foo1: u32, foo2: u32 }
+impl Default for Foo {
+ fn default() -> Self { loop {} }
+}
+
+fn main() {
+ let thing = 1;
+ let foo = Foo { foo1: 0, foo2: 0 };
+ let foo2 = Foo { thing, $0 }
+}
+"#,
+ expect![[r#"
+ fd ..Default::default()
+ fd foo1 u32
+ fd foo2 u32
+ "#]],
+ );
+ check(
+ r#"
+//- minicore:default
+struct Foo { foo1: u32, foo2: u32 }
+impl Default for Foo {
+ fn default() -> Self { loop {} }
+}
+
+fn main() {
+ let thing = 1;
+ let foo = Foo { foo1: 0, foo2: 0 };
+ let foo2 = Foo { thing, .$0 }
+}
+"#,
+ expect![[r#"
+ fd ..Default::default()
+ sn ..
+ "#]],
+ );
+ check(
+ r#"
+//- minicore:default
+struct Foo { foo1: u32, foo2: u32 }
+impl Default for Foo {
+ fn default() -> Self { loop {} }
+}
+
+fn main() {
+ let thing = 1;
+ let foo = Foo { foo1: 0, foo2: 0 };
+ let foo2 = Foo { thing, ..$0 }
+}
+"#,
+ expect![[r#"
+ fd ..Default::default()
+ fn main() fn()
+ lc foo Foo
+ lc thing i32
+ md core
+ st Foo
+ st Foo {…} Foo { foo1: u32, foo2: u32 }
+ tt Default
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+ check(
+ r#"
+//- minicore:default
+struct Foo { foo1: u32, foo2: u32 }
+impl Default for Foo {
+ fn default() -> Self { loop {} }
+}
+
+fn main() {
+ let thing = 1;
+ let foo = Foo { foo1: 0, foo2: 0 };
+ let foo2 = Foo { thing, ..Default::$0 }
+}
+"#,
+ expect![[r#"
+ fn default() (as Default) fn() -> Self
+ "#]],
+ );
+}
+
+#[test]
+fn empty_union_literal() {
+ check(
+ r#"
+union Union { foo: u32, bar: f32 }
+
+fn foo() {
+ let other = Union {
+ $0
+ };
+}
+ "#,
+ expect![[r#"
+ fd bar f32
+ fd foo u32
+ "#]],
+ )
+}
+
+#[test]
+fn dont_suggest_additional_union_fields() {
+ check(
+ r#"
+union Union { foo: u32, bar: f32 }
+
+fn foo() {
+ let other = Union {
+ foo: 1,
+ $0
+ };
+}
+ "#,
+ expect![[r#""#]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
new file mode 100644
index 000000000..033dc99c2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
@@ -0,0 +1,895 @@
+//! Tests that don't fit into a specific category.
+
+use expect_test::{expect, Expect};
+
+use crate::tests::{check_edit, completion_list_no_kw};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list_no_kw(ra_fixture);
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn completes_if_prefix_is_keyword() {
+ check_edit(
+ "wherewolf",
+ r#"
+fn main() {
+ let wherewolf = 92;
+ drop(where$0)
+}
+"#,
+ r#"
+fn main() {
+ let wherewolf = 92;
+ drop(wherewolf)
+}
+"#,
+ )
+}
+
+/// Regression test for issue #6091.
+#[test]
+fn correctly_completes_module_items_prefixed_with_underscore() {
+ check_edit(
+ "_alpha",
+ r#"
+fn main() {
+ _$0
+}
+fn _alpha() {}
+"#,
+ r#"
+fn main() {
+ _alpha()$0
+}
+fn _alpha() {}
+"#,
+ )
+}
+
+#[test]
+fn completes_prelude() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+fn foo() { let x: $0 }
+
+//- /std/lib.rs crate:std
+pub mod prelude {
+ pub mod rust_2018 {
+ pub struct Option;
+ }
+}
+"#,
+ expect![[r#"
+ md std
+ st Option
+ bt u32
+ "#]],
+ );
+}
+
+#[test]
+fn completes_prelude_macros() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+fn f() {$0}
+
+//- /std/lib.rs crate:std
+pub mod prelude {
+ pub mod rust_2018 {
+ pub use crate::concat;
+ }
+}
+
+mod macros {
+ #[rustc_builtin_macro]
+ #[macro_export]
+ macro_rules! concat { }
+}
+"#,
+ expect![[r#"
+ fn f() fn()
+ ma concat!(…) macro_rules! concat
+ md std
+ bt u32
+ "#]],
+ );
+}
+
+#[test]
+fn completes_std_prelude_if_core_is_defined() {
+ check(
+ r#"
+//- /main.rs crate:main deps:core,std
+fn foo() { let x: $0 }
+
+//- /core/lib.rs crate:core
+pub mod prelude {
+ pub mod rust_2018 {
+ pub struct Option;
+ }
+}
+
+//- /std/lib.rs crate:std deps:core
+pub mod prelude {
+ pub mod rust_2018 {
+ pub struct String;
+ }
+}
+"#,
+ expect![[r#"
+ md core
+ md std
+ st String
+ bt u32
+ "#]],
+ );
+}
+
+#[test]
+fn respects_doc_hidden() {
+ check(
+ r#"
+//- /lib.rs crate:lib deps:std
+fn f() {
+ format_$0
+}
+
+//- /std.rs crate:std
+#[doc(hidden)]
+#[macro_export]
+macro_rules! format_args_nl {
+ () => {}
+}
+
+pub mod prelude {
+ pub mod rust_2018 {}
+}
+ "#,
+ expect![[r#"
+ fn f() fn()
+ md std
+ bt u32
+ "#]],
+ );
+}
+
+#[test]
+fn respects_doc_hidden_in_assoc_item_list() {
+ check(
+ r#"
+//- /lib.rs crate:lib deps:std
+struct S;
+impl S {
+ format_$0
+}
+
+//- /std.rs crate:std
+#[doc(hidden)]
+#[macro_export]
+macro_rules! format_args_nl {
+ () => {}
+}
+
+pub mod prelude {
+ pub mod rust_2018 {}
+}
+ "#,
+ expect![[r#"
+ md std
+ "#]],
+ );
+}
+
+#[test]
+fn associated_item_visibility() {
+ check(
+ r#"
+//- /lib.rs crate:lib new_source_root:library
+pub struct S;
+
+impl S {
+ pub fn public_method() { }
+ fn private_method() { }
+ pub type PublicType = u32;
+ type PrivateType = u32;
+ pub const PUBLIC_CONST: u32 = 1;
+ const PRIVATE_CONST: u32 = 1;
+}
+
+//- /main.rs crate:main deps:lib new_source_root:local
+fn foo() { let _ = lib::S::$0 }
+"#,
+ expect![[r#"
+ ct PUBLIC_CONST pub const PUBLIC_CONST: u32
+ fn public_method() fn()
+ ta PublicType pub type PublicType = u32
+ "#]],
+ );
+}
+
+#[test]
+fn completes_union_associated_method() {
+ check(
+ r#"
+union U {};
+impl U { fn m() { } }
+
+fn foo() { let _ = U::$0 }
+"#,
+ expect![[r#"
+ fn m() fn()
+ "#]],
+ );
+}
+
+#[test]
+fn completes_trait_associated_method_1() {
+ check(
+ r#"
+trait Trait { fn m(); }
+
+fn foo() { let _ = Trait::$0 }
+"#,
+ expect![[r#"
+ fn m() (as Trait) fn()
+ "#]],
+ );
+}
+
+#[test]
+fn completes_trait_associated_method_2() {
+ check(
+ r#"
+trait Trait { fn m(); }
+
+struct S;
+impl Trait for S {}
+
+fn foo() { let _ = S::$0 }
+"#,
+ expect![[r#"
+ fn m() (as Trait) fn()
+ "#]],
+ );
+}
+
+#[test]
+fn completes_trait_associated_method_3() {
+ check(
+ r#"
+trait Trait { fn m(); }
+
+struct S;
+impl Trait for S {}
+
+fn foo() { let _ = <S as Trait>::$0 }
+"#,
+ expect![[r#"
+ fn m() (as Trait) fn()
+ "#]],
+ );
+}
+
+#[test]
+fn completes_ty_param_assoc_ty() {
+ check(
+ r#"
+trait Super {
+ type Ty;
+ const CONST: u8;
+ fn func() {}
+ fn method(&self) {}
+}
+
+trait Sub: Super {
+ type SubTy;
+ const C2: ();
+ fn subfunc() {}
+ fn submethod(&self) {}
+}
+
+fn foo<T: Sub>() { T::$0 }
+"#,
+ expect![[r#"
+ ct C2 (as Sub) const C2: ()
+ ct CONST (as Super) const CONST: u8
+ fn func() (as Super) fn()
+ fn subfunc() (as Sub) fn()
+ ta SubTy (as Sub) type SubTy
+ ta Ty (as Super) type Ty
+ me method(…) (as Super) fn(&self)
+ me submethod(…) (as Sub) fn(&self)
+ "#]],
+ );
+}
+
+#[test]
+fn completes_self_param_assoc_ty() {
+ check(
+ r#"
+trait Super {
+ type Ty;
+ const CONST: u8 = 0;
+ fn func() {}
+ fn method(&self) {}
+}
+
+trait Sub: Super {
+ type SubTy;
+ const C2: () = ();
+ fn subfunc() {}
+ fn submethod(&self) {}
+}
+
+struct Wrap<T>(T);
+impl<T> Super for Wrap<T> {}
+impl<T> Sub for Wrap<T> {
+ fn subfunc() {
+ // Should be able to assume `Self: Sub + Super`
+ Self::$0
+ }
+}
+"#,
+ expect![[r#"
+ ct C2 (as Sub) const C2: ()
+ ct CONST (as Super) const CONST: u8
+ fn func() (as Super) fn()
+ fn subfunc() (as Sub) fn()
+ ta SubTy (as Sub) type SubTy
+ ta Ty (as Super) type Ty
+ me method(…) (as Super) fn(&self)
+ me submethod(…) (as Sub) fn(&self)
+ "#]],
+ );
+}
+
+#[test]
+fn completes_type_alias() {
+ check(
+ r#"
+struct S;
+impl S { fn foo() {} }
+type T = S;
+impl T { fn bar() {} }
+
+fn main() { T::$0; }
+"#,
+ expect![[r#"
+ fn bar() fn()
+ fn foo() fn()
+ "#]],
+ );
+}
+
+#[test]
+fn completes_qualified_macros() {
+ check(
+ r#"
+#[macro_export]
+macro_rules! foo { () => {} }
+
+fn main() { let _ = crate::$0 }
+"#,
+ expect![[r#"
+ fn main() fn()
+ ma foo!(…) macro_rules! foo
+ "#]],
+ );
+}
+
+#[test]
+fn does_not_complete_non_fn_macros() {
+ check(
+ r#"
+mod m {
+ #[rustc_builtin_macro]
+ pub macro Clone {}
+}
+
+fn f() {m::$0}
+"#,
+ expect![[r#""#]],
+ );
+ check(
+ r#"
+mod m {
+ #[rustc_builtin_macro]
+ pub macro bench {}
+}
+
+fn f() {m::$0}
+"#,
+ expect![[r#""#]],
+ );
+}
+
+#[test]
+fn completes_reexported_items_under_correct_name() {
+ check(
+ r#"
+fn foo() { self::m::$0 }
+
+mod m {
+ pub use super::p::wrong_fn as right_fn;
+ pub use super::p::WRONG_CONST as RIGHT_CONST;
+ pub use super::p::WrongType as RightType;
+}
+mod p {
+ pub fn wrong_fn() {}
+ pub const WRONG_CONST: u32 = 1;
+ pub struct WrongType {};
+}
+"#,
+ expect![[r#"
+ ct RIGHT_CONST
+ fn right_fn() fn()
+ st RightType
+ "#]],
+ );
+
+ check_edit(
+ "RightType",
+ r#"
+fn foo() { self::m::$0 }
+
+mod m {
+ pub use super::p::wrong_fn as right_fn;
+ pub use super::p::WRONG_CONST as RIGHT_CONST;
+ pub use super::p::WrongType as RightType;
+}
+mod p {
+ pub fn wrong_fn() {}
+ pub const WRONG_CONST: u32 = 1;
+ pub struct WrongType {};
+}
+"#,
+ r#"
+fn foo() { self::m::RightType }
+
+mod m {
+ pub use super::p::wrong_fn as right_fn;
+ pub use super::p::WRONG_CONST as RIGHT_CONST;
+ pub use super::p::WrongType as RightType;
+}
+mod p {
+ pub fn wrong_fn() {}
+ pub const WRONG_CONST: u32 = 1;
+ pub struct WrongType {};
+}
+"#,
+ );
+}
+
+#[test]
+fn completes_in_simple_macro_call() {
+ check(
+ r#"
+macro_rules! m { ($e:expr) => { $e } }
+fn main() { m!(self::f$0); }
+fn foo() {}
+"#,
+ expect![[r#"
+ fn foo() fn()
+ fn main() fn()
+ "#]],
+ );
+}
+
+#[test]
+fn function_mod_share_name() {
+ check(
+ r#"
+fn foo() { self::m::$0 }
+
+mod m {
+ pub mod z {}
+ pub fn z() {}
+}
+"#,
+ expect![[r#"
+ fn z() fn()
+ md z
+ "#]],
+ );
+}
+
+#[test]
+fn completes_hashmap_new() {
+ check(
+ r#"
+struct RandomState;
+struct HashMap<K, V, S = RandomState> {}
+
+impl<K, V> HashMap<K, V, RandomState> {
+ pub fn new() -> HashMap<K, V, RandomState> { }
+}
+fn foo() {
+ HashMap::$0
+}
+"#,
+ expect![[r#"
+ fn new() fn() -> HashMap<K, V, RandomState>
+ "#]],
+ );
+}
+
+#[test]
+fn completes_variant_through_self() {
+ cov_mark::check!(completes_variant_through_self);
+ check(
+ r#"
+enum Foo {
+ Bar,
+ Baz,
+}
+
+impl Foo {
+ fn foo(self) {
+ Self::$0
+ }
+}
+"#,
+ expect![[r#"
+ ev Bar Bar
+ ev Baz Baz
+ me foo(…) fn(self)
+ "#]],
+ );
+}
+
+#[test]
+fn completes_non_exhaustive_variant_within_the_defining_crate() {
+ check(
+ r#"
+enum Foo {
+ #[non_exhaustive]
+ Bar,
+ Baz,
+}
+
+fn foo(self) {
+ Foo::$0
+}
+"#,
+ expect![[r#"
+ ev Bar Bar
+ ev Baz Baz
+ "#]],
+ );
+
+ check(
+ r#"
+//- /main.rs crate:main deps:e
+fn foo(self) {
+ e::Foo::$0
+}
+
+//- /e.rs crate:e
+enum Foo {
+ #[non_exhaustive]
+ Bar,
+ Baz,
+}
+"#,
+ expect![[r#"
+ ev Baz Baz
+ "#]],
+ );
+}
+
+#[test]
+fn completes_primitive_assoc_const() {
+ cov_mark::check!(completes_primitive_assoc_const);
+ check(
+ r#"
+//- /lib.rs crate:lib deps:core
+fn f() {
+ u8::$0
+}
+
+//- /core.rs crate:core
+#[lang = "u8"]
+impl u8 {
+ pub const MAX: Self = 255;
+
+ pub fn func(self) {}
+}
+"#,
+ expect![[r#"
+ ct MAX pub const MAX: Self
+ me func(…) fn(self)
+ "#]],
+ );
+}
+
+#[test]
+fn completes_variant_through_alias() {
+ cov_mark::check!(completes_variant_through_alias);
+ check(
+ r#"
+enum Foo {
+ Bar
+}
+type Foo2 = Foo;
+fn main() {
+ Foo2::$0
+}
+"#,
+ expect![[r#"
+ ev Bar Bar
+ "#]],
+ );
+}
+
+#[test]
+fn respects_doc_hidden2() {
+ check(
+ r#"
+//- /lib.rs crate:lib deps:dep
+fn f() {
+ dep::$0
+}
+
+//- /dep.rs crate:dep
+#[doc(hidden)]
+#[macro_export]
+macro_rules! m {
+ () => {}
+}
+
+#[doc(hidden)]
+pub fn f() {}
+
+#[doc(hidden)]
+pub struct S;
+
+#[doc(hidden)]
+pub mod m {}
+ "#,
+ expect![[r#""#]],
+ )
+}
+
+#[test]
+fn type_anchor_empty() {
+ check(
+ r#"
+trait Foo {
+ fn foo() -> Self;
+}
+struct Bar;
+impl Foo for Bar {
+ fn foo() -> {
+ Bar
+ }
+}
+fn bar() -> Bar {
+ <_>::$0
+}
+"#,
+ expect![[r#"
+ fn foo() (as Foo) fn() -> Self
+ "#]],
+ );
+}
+
+#[test]
+fn type_anchor_type() {
+ check(
+ r#"
+trait Foo {
+ fn foo() -> Self;
+}
+struct Bar;
+impl Bar {
+ fn bar() {}
+}
+impl Foo for Bar {
+ fn foo() -> {
+ Bar
+ }
+}
+fn bar() -> Bar {
+ <Bar>::$0
+}
+"#,
+ expect![[r#"
+ fn bar() fn()
+ fn foo() (as Foo) fn() -> Self
+ "#]],
+ );
+}
+
+#[test]
+fn type_anchor_type_trait() {
+ check(
+ r#"
+trait Foo {
+ fn foo() -> Self;
+}
+struct Bar;
+impl Bar {
+ fn bar() {}
+}
+impl Foo for Bar {
+ fn foo() -> {
+ Bar
+ }
+}
+fn bar() -> Bar {
+ <Bar as Foo>::$0
+}
+"#,
+ expect![[r#"
+ fn foo() (as Foo) fn() -> Self
+ "#]],
+ );
+}
+
+#[test]
+fn completes_fn_in_pub_trait_generated_by_macro() {
+ check(
+ r#"
+mod other_mod {
+ macro_rules! make_method {
+ ($name:ident) => {
+ fn $name(&self) {}
+ };
+ }
+
+ pub trait MyTrait {
+ make_method! { by_macro }
+ fn not_by_macro(&self) {}
+ }
+
+ pub struct Foo {}
+
+ impl MyTrait for Foo {}
+}
+
+fn main() {
+ use other_mod::{Foo, MyTrait};
+ let f = Foo {};
+ f.$0
+}
+"#,
+ expect![[r#"
+ me by_macro() (as MyTrait) fn(&self)
+ me not_by_macro() (as MyTrait) fn(&self)
+ "#]],
+ )
+}
+
+#[test]
+fn completes_fn_in_pub_trait_generated_by_recursive_macro() {
+ check(
+ r#"
+mod other_mod {
+ macro_rules! make_method {
+ ($name:ident) => {
+ fn $name(&self) {}
+ };
+ }
+
+ macro_rules! make_trait {
+ () => {
+ pub trait MyTrait {
+ make_method! { by_macro }
+ fn not_by_macro(&self) {}
+ }
+ }
+ }
+
+ make_trait!();
+
+ pub struct Foo {}
+
+ impl MyTrait for Foo {}
+}
+
+fn main() {
+ use other_mod::{Foo, MyTrait};
+ let f = Foo {};
+ f.$0
+}
+"#,
+ expect![[r#"
+ me by_macro() (as MyTrait) fn(&self)
+ me not_by_macro() (as MyTrait) fn(&self)
+ "#]],
+ )
+}
+
+#[test]
+fn completes_const_in_pub_trait_generated_by_macro() {
+ check(
+ r#"
+mod other_mod {
+ macro_rules! make_const {
+ ($name:ident) => {
+ const $name: u8 = 1;
+ };
+ }
+
+ pub trait MyTrait {
+ make_const! { by_macro }
+ }
+
+ pub struct Foo {}
+
+ impl MyTrait for Foo {}
+}
+
+fn main() {
+ use other_mod::{Foo, MyTrait};
+ let f = Foo {};
+ Foo::$0
+}
+"#,
+ expect![[r#"
+ ct by_macro (as MyTrait) pub const by_macro: u8
+ "#]],
+ )
+}
+
+#[test]
+fn completes_locals_from_macros() {
+ check(
+ r#"
+
+macro_rules! x {
+ ($x:ident, $expr:expr) => {
+ let $x = 0;
+ $expr
+ };
+}
+fn main() {
+ x! {
+ foobar, {
+ f$0
+ }
+ };
+}
+"#,
+ expect![[r#"
+ fn main() fn()
+ lc foobar i32
+ ma x!(…) macro_rules! x
+ bt u32
+ "#]],
+ )
+}
+
+#[test]
+fn regression_12644() {
+ check(
+ r#"
+macro_rules! __rust_force_expr {
+ ($e:expr) => {
+ $e
+ };
+}
+macro_rules! vec {
+ ($elem:expr) => {
+ __rust_force_expr!($elem)
+ };
+}
+
+struct Struct;
+impl Struct {
+ fn foo(self) {}
+}
+
+fn f() {
+ vec![Struct].$0;
+}
+"#,
+ expect![[r#"
+ me foo() fn(self)
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs
new file mode 100644
index 000000000..f0b7726c5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs
@@ -0,0 +1,671 @@
+//! Completion tests for type position.
+use expect_test::{expect, Expect};
+
+use crate::tests::{completion_list, BASE_ITEMS_FIXTURE};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(&format!("{}\n{}", BASE_ITEMS_FIXTURE, ra_fixture));
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn record_field_ty() {
+ check(
+ r#"
+struct Foo<'lt, T, const C: usize> {
+ f: $0
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ sp Self
+ st Foo<…>
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ tp T
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ )
+}
+
+#[test]
+fn tuple_struct_field() {
+ check(
+ r#"
+struct Foo<'lt, T, const C: usize>(f$0);
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ sp Self
+ st Foo<…>
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ tp T
+ un Union
+ bt u32
+ kw crate::
+ kw pub
+ kw pub(crate)
+ kw pub(super)
+ kw self::
+ "#]],
+ )
+}
+
+#[test]
+fn fn_return_type() {
+ check(
+ r#"
+fn x<'lt, T, const C: usize>() -> $0
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ tp T
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn fn_return_type_no_local_items() {
+ check(
+ r#"
+fn foo() -> B$0 {
+ struct Bar;
+ enum Baz {}
+ union Bax {
+ i: i32,
+ f: f32
+ }
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ it ()
+ kw crate::
+ kw self::
+ "#]],
+ )
+}
+
+#[test]
+fn inferred_type_const() {
+ check(
+ r#"
+struct Foo<T>(T);
+const FOO: $0 = Foo(2);
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Foo<…>
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ it Foo<i32>
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn inferred_type_closure_param() {
+ check(
+ r#"
+fn f1(f: fn(i32) -> i32) {}
+fn f2() {
+ f1(|x: $0);
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ it i32
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn inferred_type_closure_return() {
+ check(
+ r#"
+fn f1(f: fn(u64) -> u64) {}
+fn f2() {
+ f1(|x| -> $0 {
+ x + 5
+ });
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ it u64
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn inferred_type_fn_return() {
+ check(
+ r#"
+fn f2(x: u64) -> $0 {
+ x + 5
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ it u64
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn inferred_type_fn_param() {
+ check(
+ r#"
+fn f1(x: i32) {}
+fn f2(x: $0) {
+ f1(x);
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ bt u32
+ it i32
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn inferred_type_not_in_the_scope() {
+ check(
+ r#"
+mod a {
+ pub struct Foo<T>(T);
+ pub fn x() -> Foo<Foo<i32>> {
+ Foo(Foo(2))
+ }
+}
+fn foo<'lt, T, const C: usize>() {
+ let local = ();
+ let foo: $0 = a::x();
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md a
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ tp T
+ un Union
+ bt u32
+ it a::Foo<a::Foo<i32>>
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn inferred_type_let() {
+ check(
+ r#"
+struct Foo<T>(T);
+fn foo<'lt, T, const C: usize>() {
+ let local = ();
+ let foo: $0 = Foo(2);
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Foo<…>
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ tp T
+ un Union
+ bt u32
+ it Foo<i32>
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn body_type_pos() {
+ check(
+ r#"
+fn foo<'lt, T, const C: usize>() {
+ let local = ();
+ let _: $0;
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ tp T
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+ check(
+ r#"
+fn foo<'lt, T, const C: usize>() {
+ let local = ();
+ let _: self::$0;
+}
+"#,
+ expect![[r#"
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ un Union
+ "#]],
+ );
+}
+
+#[test]
+fn completes_types_and_const_in_arg_list() {
+ cov_mark::check!(complete_assoc_type_in_generics_list);
+ check(
+ r#"
+trait Trait1 {
+ type Super;
+}
+trait Trait2: Trait1 {
+ type Foo;
+}
+
+fn foo<'lt, T: Trait2<$0>, const CONST_PARAM: usize>(_: T) {}
+"#,
+ expect![[r#"
+ ta Foo = (as Trait2) type Foo
+ ta Super = (as Trait1) type Super
+ "#]],
+ );
+ check(
+ r#"
+trait Trait1 {
+ type Super;
+}
+trait Trait2<T>: Trait1 {
+ type Foo;
+}
+
+fn foo<'lt, T: Trait2<$0>, const CONST_PARAM: usize>(_: T) {}
+"#,
+ expect![[r#"
+ ct CONST
+ cp CONST_PARAM
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ tt Trait1
+ tt Trait2
+ tp T
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+ check(
+ r#"
+trait Trait2 {
+ type Foo;
+}
+
+fn foo<'lt, T: Trait2<self::$0>, const CONST_PARAM: usize>(_: T) {}
+ "#,
+ expect![[r#"
+ ct CONST
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt Trait
+ tt Trait2
+ un Union
+ "#]],
+ );
+}
+
+#[test]
+fn no_assoc_completion_outside_type_bounds() {
+ check(
+ r#"
+struct S;
+trait Tr<T> {
+ type Ty;
+}
+
+impl Tr<$0
+ "#,
+ expect![[r#"
+ ct CONST
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ sp Self
+ st Record
+ st S
+ st Tuple
+ st Unit
+ tt Tr
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn enum_qualified() {
+ check(
+ r#"
+impl Enum {
+ type AssocType = ();
+ const ASSOC_CONST: () = ();
+ fn assoc_fn() {}
+}
+fn func(_: Enum::$0) {}
+"#,
+ expect![[r#"
+ ta AssocType type AssocType = ()
+ "#]],
+ );
+}
+
+#[test]
+fn completes_type_parameter_or_associated_type() {
+ check(
+ r#"
+trait MyTrait<T, U> {
+ type Item1;
+ type Item2;
+};
+
+fn f(t: impl MyTrait<u$0
+"#,
+ expect![[r#"
+ ct CONST
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt MyTrait
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+
+ check(
+ r#"
+trait MyTrait<T, U> {
+ type Item1;
+ type Item2;
+};
+
+fn f(t: impl MyTrait<u8, u$0
+"#,
+ expect![[r#"
+ ct CONST
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt MyTrait
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+
+ check(
+ r#"
+trait MyTrait<T, U> {
+ type Item1;
+ type Item2;
+};
+
+fn f(t: impl MyTrait<u8, u8, I$0
+"#,
+ expect![[r#"
+ ta Item1 = (as MyTrait) type Item1
+ ta Item2 = (as MyTrait) type Item2
+ "#]],
+ );
+}
+
+#[test]
+fn completes_type_parameter_or_associated_type_with_default_value() {
+ check(
+ r#"
+trait MyTrait<T, U = u8> {
+ type Item1;
+ type Item2;
+};
+
+fn f(t: impl MyTrait<u$0
+"#,
+ expect![[r#"
+ ct CONST
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt MyTrait
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+
+ check(
+ r#"
+trait MyTrait<T, U = u8> {
+ type Item1;
+ type Item2;
+};
+
+fn f(t: impl MyTrait<u8, u$0
+"#,
+ expect![[r#"
+ ct CONST
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt MyTrait
+ tt Trait
+ ta Item1 = (as MyTrait) type Item1
+ ta Item2 = (as MyTrait) type Item2
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+
+ check(
+ r#"
+trait MyTrait<T, U = u8> {
+ type Item1;
+ type Item2;
+};
+
+fn f(t: impl MyTrait<u8, u8, I$0
+"#,
+ expect![[r#"
+ ta Item1 = (as MyTrait) type Item1
+ ta Item2 = (as MyTrait) type Item2
+ "#]],
+ );
+}
+
+#[test]
+fn completes_types_after_associated_type() {
+ check(
+ r#"
+trait MyTrait {
+ type Item1;
+ type Item2;
+};
+
+fn f(t: impl MyTrait<Item1 = $0
+"#,
+ expect![[r#"
+ ct CONST
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt MyTrait
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+
+ check(
+ r#"
+trait MyTrait {
+ type Item1;
+ type Item2;
+};
+
+fn f(t: impl MyTrait<Item1 = u8, Item2 = $0
+"#,
+ expect![[r#"
+ ct CONST
+ en Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record
+ st Tuple
+ st Unit
+ tt MyTrait
+ tt Trait
+ un Union
+ bt u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs
new file mode 100644
index 000000000..037d7dce5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs
@@ -0,0 +1,384 @@
+//! Completion tests for use trees.
+use expect_test::{expect, Expect};
+
+use crate::tests::completion_list;
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn use_tree_start() {
+ cov_mark::check!(unqualified_path_selected_only);
+ check(
+ r#"
+//- /lib.rs crate:main deps:other_crate
+use f$0
+
+struct Foo;
+enum FooBar {
+ Foo,
+ Bar
+}
+mod foo {}
+//- /other_crate/lib.rs crate:other_crate
+// nothing here
+"#,
+ expect![[r#"
+ en FooBar::
+ md foo
+ md other_crate
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn use_tree_start_abs() {
+ cov_mark::check!(use_tree_crate_roots_only);
+ check(
+ r#"
+//- /lib.rs crate:main deps:other_crate
+use ::f$0
+
+struct Foo;
+mod foo {}
+//- /other_crate/lib.rs crate:other_crate
+// nothing here
+"#,
+ expect![[r#"
+ md other_crate
+ "#]],
+ );
+}
+
+#[test]
+fn dont_complete_current_use() {
+ cov_mark::check!(dont_complete_current_use);
+ check(r#"use self::foo$0;"#, expect![[r#""#]]);
+ check(
+ r#"
+mod foo { pub struct S; }
+use self::{foo::*, bar$0};
+"#,
+ expect![[r#"
+ md foo
+ st S
+ "#]],
+ );
+}
+
+#[test]
+fn nested_use_tree() {
+ check(
+ r#"
+mod foo {
+ pub mod bar {
+ pub struct FooBar;
+ }
+}
+use foo::{bar::$0}
+"#,
+ expect![[r#"
+ st FooBar
+ "#]],
+ );
+ check(
+ r#"
+mod foo {
+ pub mod bar {
+ pub struct FooBar;
+ }
+}
+use foo::{$0}
+"#,
+ expect![[r#"
+ md bar
+ kw self
+ "#]],
+ );
+}
+
+#[test]
+fn deeply_nested_use_tree() {
+ check(
+ r#"
+mod foo {
+ pub mod bar {
+ pub mod baz {
+ pub struct FooBarBaz;
+ }
+ }
+}
+use foo::{bar::{baz::$0}}
+"#,
+ expect![[r#"
+ st FooBarBaz
+ "#]],
+ );
+ check(
+ r#"
+mod foo {
+ pub mod bar {
+ pub mod baz {
+ pub struct FooBarBaz;
+ }
+ }
+}
+use foo::{bar::{$0}}
+"#,
+ expect![[r#"
+ md baz
+ kw self
+ "#]],
+ );
+}
+
+#[test]
+fn plain_qualified_use_tree() {
+ check(
+ r#"
+use foo::$0
+
+mod foo {
+ struct Private;
+ pub struct Foo;
+ macro_rules! foo_ { {} => {} }
+ pub use foo_ as foo;
+}
+struct Bar;
+"#,
+ expect![[r#"
+ ma foo macro_rules! foo_
+ st Foo
+ "#]],
+ );
+}
+
+#[test]
+fn enum_plain_qualified_use_tree() {
+ cov_mark::check!(enum_plain_qualified_use_tree);
+ check(
+ r#"
+use Foo::$0
+
+enum Foo {
+ UnitVariant,
+ TupleVariant(),
+ RecordVariant {},
+}
+impl Foo {
+ const CONST: () = ()
+ fn func() {}
+}
+"#,
+ expect![[r#"
+ ev RecordVariant RecordVariant
+ ev TupleVariant TupleVariant
+ ev UnitVariant UnitVariant
+ "#]],
+ );
+}
+
+#[test]
+fn self_qualified_use_tree() {
+ check(
+ r#"
+use self::$0
+
+mod foo {}
+struct Bar;
+"#,
+ expect![[r#"
+ md foo
+ st Bar
+ "#]],
+ );
+}
+
+#[test]
+fn super_qualified_use_tree() {
+ check(
+ r#"
+mod bar {
+ use super::$0
+}
+
+mod foo {}
+struct Bar;
+"#,
+ expect![[r#"
+ md bar
+ md foo
+ st Bar
+ "#]],
+ );
+}
+
+#[test]
+fn super_super_qualified_use_tree() {
+ check(
+ r#"
+mod a {
+ const A: usize = 0;
+ mod b {
+ const B: usize = 0;
+ mod c { use super::super::$0 }
+ }
+}
+"#,
+ expect![[r#"
+ ct A
+ md b
+ kw super::
+ "#]],
+ );
+}
+
+#[test]
+fn crate_qualified_use_tree() {
+ check(
+ r#"
+use crate::$0
+
+mod foo {}
+struct Bar;
+"#,
+ expect![[r#"
+ md foo
+ st Bar
+ "#]],
+ );
+}
+
+#[test]
+fn extern_crate_qualified_use_tree() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:other_crate
+use other_crate::$0
+//- /other_crate/lib.rs crate:other_crate
+pub struct Foo;
+pub mod foo {}
+"#,
+ expect![[r#"
+ md foo
+ st Foo
+ "#]],
+ );
+}
+
+#[test]
+fn pub_use_tree() {
+ check(
+ r#"
+pub struct X;
+pub mod bar {}
+pub use $0;
+"#,
+ expect![[r#"
+ md bar
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn pub_suggest_use_tree_super_acc_to_depth_in_tree() {
+ // https://github.com/rust-lang/rust-analyzer/issues/12439
+ // Check discussion in https://github.com/rust-lang/rust-analyzer/pull/12447
+
+ check(
+ r#"
+mod foo {
+ mod bar {
+ pub use super::$0;
+ }
+}
+"#,
+ expect![[r#"
+ md bar
+ kw super::
+ "#]],
+ );
+
+ // Not suggest super when at crate root
+ check(
+ r#"
+mod foo {
+ mod bar {
+ pub use super::super::$0;
+ }
+}
+"#,
+ expect![[r#"
+ md foo
+ "#]],
+ );
+
+ check(
+ r#"
+mod foo {
+ use $0;
+}
+"#,
+ expect![[r#"
+ kw crate::
+ kw self::
+ kw super::
+ "#]],
+ );
+
+ // Not suggest super after another kw in path ( here it is foo1 )
+ check(
+ r#"
+mod foo {
+ mod bar {
+ use super::super::foo1::$0;
+ }
+}
+
+mod foo1 {
+ pub mod bar1 {}
+}
+"#,
+ expect![[r#"
+ md bar1
+ "#]],
+ );
+}
+
+#[test]
+fn use_tree_braces_at_start() {
+ check(
+ r#"
+struct X;
+mod bar {}
+use {$0};
+"#,
+ expect![[r#"
+ md bar
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
+fn impl_prefix_does_not_add_fn_snippet() {
+ // regression test for 7222
+ check(
+ r#"
+mod foo {
+ pub fn bar(x: u32) {}
+}
+use self::foo::impl$0
+"#,
+ expect![[r#"
+ fn bar fn(u32)
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/visibility.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/visibility.rs
new file mode 100644
index 000000000..c18d6e66d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/visibility.rs
@@ -0,0 +1,90 @@
+//! Completion tests for visibility modifiers.
+use expect_test::{expect, Expect};
+
+use crate::tests::{completion_list, completion_list_with_trigger_character};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(ra_fixture);
+ expect.assert_eq(&actual)
+}
+
+fn check_with_trigger_character(ra_fixture: &str, trigger_character: char, expect: Expect) {
+ let actual = completion_list_with_trigger_character(ra_fixture, Some(trigger_character));
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn empty_pub() {
+ cov_mark::check!(kw_completion_in);
+ check_with_trigger_character(
+ r#"
+pub($0)
+"#,
+ '(',
+ expect![[r#"
+ kw crate
+ kw in
+ kw self
+ "#]],
+ );
+}
+
+#[test]
+fn after_in_kw() {
+ check(
+ r#"
+pub(in $0)
+"#,
+ expect![[r#"
+ kw crate
+ kw self
+ "#]],
+ );
+}
+
+#[test]
+fn qualified() {
+ cov_mark::check!(visibility_qualified);
+ check(
+ r#"
+mod foo {
+ pub(in crate::$0)
+}
+
+mod bar {}
+"#,
+ expect![[r#"
+ md foo
+ "#]],
+ );
+ check(
+ r#"
+mod qux {
+ mod foo {
+ pub(in crate::$0)
+ }
+ mod baz {}
+}
+
+mod bar {}
+"#,
+ expect![[r#"
+ md qux
+ "#]],
+ );
+ check(
+ r#"
+mod qux {
+ mod foo {
+ pub(in crate::qux::$0)
+ }
+ mod baz {}
+}
+
+mod bar {}
+"#,
+ expect![[r#"
+ md foo
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
new file mode 100644
index 000000000..a1b0bd6cb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
@@ -0,0 +1,39 @@
+[package]
+name = "ide-db"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+tracing = "0.1.35"
+rayon = "1.5.3"
+fst = { version = "0.4.7", default-features = false }
+rustc-hash = "1.1.0"
+once_cell = "1.12.0"
+either = "1.7.0"
+itertools = "0.10.3"
+arrayvec = "0.7.2"
+indexmap = "1.9.1"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+parser = { path = "../parser", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+text-edit = { path = "../text-edit", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+# ide should depend only on the top-level `hir` package. if you need
+# something from some `hir-xxx` subpackage, reexport the API via `hir`.
+hir = { path = "../hir", version = "0.0.0" }
+limit = { path = "../limit", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
+sourcegen = { path = "../sourcegen" }
+xshell = "0.2.2"
+expect-test = "1.4.0"
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs b/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs
new file mode 100644
index 000000000..7303ef8b7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs
@@ -0,0 +1,78 @@
+//! This module provides functionality for querying callable information about a token.
+
+use either::Either;
+use hir::{Semantics, Type};
+use syntax::{
+ ast::{self, HasArgList, HasName},
+ AstNode, SyntaxToken,
+};
+
+use crate::RootDatabase;
+
+#[derive(Debug)]
+pub struct ActiveParameter {
+ pub ty: Type,
+ pub pat: Either<ast::SelfParam, ast::Pat>,
+}
+
+impl ActiveParameter {
+ /// Returns information about the call argument this token is part of.
+ pub fn at_token(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option<Self> {
+ let (signature, active_parameter) = callable_for_token(sema, token)?;
+
+ let idx = active_parameter?;
+ let mut params = signature.params(sema.db);
+ if !(idx < params.len()) {
+ cov_mark::hit!(too_many_arguments);
+ return None;
+ }
+ let (pat, ty) = params.swap_remove(idx);
+ pat.map(|pat| ActiveParameter { ty, pat })
+ }
+
+ pub fn ident(&self) -> Option<ast::Name> {
+ self.pat.as_ref().right().and_then(|param| match param {
+ ast::Pat::IdentPat(ident) => ident.name(),
+ _ => None,
+ })
+ }
+}
+
+/// Returns a [`hir::Callable`] this token is a part of and its argument index of said callable.
+pub fn callable_for_token(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+) -> Option<(hir::Callable, Option<usize>)> {
+ // Find the calling expression and its NameRef
+ let parent = token.parent()?;
+ let calling_node = parent.ancestors().filter_map(ast::CallableExpr::cast).find(|it| {
+ it.arg_list()
+ .map_or(false, |it| it.syntax().text_range().contains(token.text_range().start()))
+ })?;
+
+ callable_for_node(sema, &calling_node, &token)
+}
+
+pub fn callable_for_node(
+ sema: &Semantics<'_, RootDatabase>,
+ calling_node: &ast::CallableExpr,
+ token: &SyntaxToken,
+) -> Option<(hir::Callable, Option<usize>)> {
+ let callable = match &calling_node {
+ ast::CallableExpr::Call(call) => {
+ let expr = call.expr()?;
+ sema.type_of_expr(&expr)?.adjusted().as_callable(sema.db)
+ }
+ ast::CallableExpr::MethodCall(call) => sema.resolve_method_call_as_callable(call),
+ }?;
+ let active_param = if let Some(arg_list) = calling_node.arg_list() {
+ let param = arg_list
+ .args()
+ .take_while(|arg| arg.syntax().text_range().end() <= token.text_range().start())
+ .count();
+ Some(param)
+ } else {
+ None
+ };
+ Some((callable, active_param))
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
new file mode 100644
index 000000000..98b0e9c94
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
@@ -0,0 +1,163 @@
+//! Applies changes to the IDE state transactionally.
+
+use std::sync::Arc;
+
+use base_db::{
+ salsa::{Database, Durability},
+ Change, SourceRootId,
+};
+use profile::{memory_usage, Bytes};
+use rustc_hash::FxHashSet;
+
+use crate::{symbol_index::SymbolsDatabase, RootDatabase};
+
+impl RootDatabase {
+ pub fn request_cancellation(&mut self) {
+ let _p = profile::span("RootDatabase::request_cancellation");
+ self.salsa_runtime_mut().synthetic_write(Durability::LOW);
+ }
+
+ pub fn apply_change(&mut self, change: Change) {
+ let _p = profile::span("RootDatabase::apply_change");
+ self.request_cancellation();
+ tracing::info!("apply_change {:?}", change);
+ if let Some(roots) = &change.roots {
+ let mut local_roots = FxHashSet::default();
+ let mut library_roots = FxHashSet::default();
+ for (idx, root) in roots.iter().enumerate() {
+ let root_id = SourceRootId(idx as u32);
+ if root.is_library {
+ library_roots.insert(root_id);
+ } else {
+ local_roots.insert(root_id);
+ }
+ }
+ self.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH);
+ self.set_library_roots_with_durability(Arc::new(library_roots), Durability::HIGH);
+ }
+ change.apply(self);
+ }
+
+ // Feature: Memory Usage
+ //
+ // Clears rust-analyzer's internal database and prints memory usage statistics.
+ //
+ // |===
+ // | Editor | Action Name
+ //
+ // | VS Code | **Rust Analyzer: Memory Usage (Clears Database)**
+ // |===
+ // image::https://user-images.githubusercontent.com/48062697/113065592-08559f00-91b1-11eb-8c96-64b88068ec02.gif[]
+ pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> {
+ let mut acc: Vec<(String, Bytes)> = vec![];
+ macro_rules! purge_each_query {
+ ($($q:path)*) => {$(
+ let before = memory_usage().allocated;
+ $q.in_db(self).purge();
+ let after = memory_usage().allocated;
+ let q: $q = Default::default();
+ let name = format!("{:?}", q);
+ acc.push((name, before - after));
+ )*}
+ }
+ purge_each_query![
+ // SourceDatabase
+ base_db::ParseQuery
+ base_db::CrateGraphQuery
+
+ // SourceDatabaseExt
+ base_db::FileTextQuery
+ base_db::FileSourceRootQuery
+ base_db::SourceRootQuery
+ base_db::SourceRootCratesQuery
+
+ // AstDatabase
+ hir::db::AstIdMapQuery
+ hir::db::MacroArgTextQuery
+ hir::db::MacroDefQuery
+ hir::db::ParseMacroExpansionQuery
+ hir::db::MacroExpandQuery
+ hir::db::HygieneFrameQuery
+ hir::db::InternMacroCallQuery
+
+ // DefDatabase
+ hir::db::FileItemTreeQuery
+ hir::db::BlockDefMapQuery
+ hir::db::CrateDefMapQueryQuery
+ hir::db::FieldsAttrsQuery
+ hir::db::VariantsAttrsQuery
+ hir::db::FieldsAttrsSourceMapQuery
+ hir::db::VariantsAttrsSourceMapQuery
+ hir::db::StructDataQuery
+ hir::db::UnionDataQuery
+ hir::db::EnumDataQuery
+ hir::db::ImplDataQuery
+ hir::db::TraitDataQuery
+ hir::db::TypeAliasDataQuery
+ hir::db::FunctionDataQuery
+ hir::db::ConstDataQuery
+ hir::db::StaticDataQuery
+ hir::db::BodyWithSourceMapQuery
+ hir::db::BodyQuery
+ hir::db::ExprScopesQuery
+ hir::db::GenericParamsQuery
+ hir::db::AttrsQuery
+ hir::db::CrateLangItemsQuery
+ hir::db::LangItemQuery
+ hir::db::ImportMapQuery
+
+ // HirDatabase
+ hir::db::InferQueryQuery
+ hir::db::TyQuery
+ hir::db::ValueTyQuery
+ hir::db::ImplSelfTyQuery
+ hir::db::ImplTraitQuery
+ hir::db::FieldTypesQuery
+ hir::db::CallableItemSignatureQuery
+ hir::db::GenericPredicatesForParamQuery
+ hir::db::GenericPredicatesQuery
+ hir::db::GenericDefaultsQuery
+ hir::db::InherentImplsInCrateQuery
+ hir::db::TraitEnvironmentQuery
+ hir::db::TraitImplsInCrateQuery
+ hir::db::TraitImplsInDepsQuery
+ hir::db::AssociatedTyDataQuery
+ hir::db::AssociatedTyDataQuery
+ hir::db::TraitDatumQuery
+ hir::db::StructDatumQuery
+ hir::db::ImplDatumQuery
+ hir::db::FnDefDatumQuery
+ hir::db::ReturnTypeImplTraitsQuery
+ hir::db::InternCallableDefQuery
+ hir::db::InternTypeOrConstParamIdQuery
+ hir::db::InternImplTraitIdQuery
+ hir::db::InternClosureQuery
+ hir::db::AssociatedTyValueQuery
+ hir::db::TraitSolveQueryQuery
+ hir::db::InternTypeOrConstParamIdQuery
+
+ // SymbolsDatabase
+ crate::symbol_index::ModuleSymbolsQuery
+ crate::symbol_index::LibrarySymbolsQuery
+ crate::symbol_index::LocalRootsQuery
+ crate::symbol_index::LibraryRootsQuery
+
+ // LineIndexDatabase
+ crate::LineIndexQuery
+
+ // InternDatabase
+ hir::db::InternFunctionQuery
+ hir::db::InternStructQuery
+ hir::db::InternUnionQuery
+ hir::db::InternEnumQuery
+ hir::db::InternConstQuery
+ hir::db::InternStaticQuery
+ hir::db::InternTraitQuery
+ hir::db::InternTypeAliasQuery
+ hir::db::InternImplQuery
+ ];
+
+ acc.sort_by_key(|it| std::cmp::Reverse(it.1));
+ acc
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/assists.rs b/src/tools/rust-analyzer/crates/ide-db/src/assists.rs
new file mode 100644
index 000000000..da23763dc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/assists.rs
@@ -0,0 +1,137 @@
+//! This module defines the `Assist` data structure. The actual assist live in
+//! the `ide_assists` downstream crate. We want to define the data structures in
+//! this low-level crate though, because `ide_diagnostics` also need them
+//! (fixits for diagnostics and assists are the same thing under the hood). We
+//! want to compile `ide_assists` and `ide_diagnostics` in parallel though, so
+//! we pull the common definitions upstream, to this crate.
+
+use std::str::FromStr;
+
+use syntax::TextRange;
+
+use crate::{label::Label, source_change::SourceChange};
+
+#[derive(Debug, Clone)]
+pub struct Assist {
+ pub id: AssistId,
+ /// Short description of the assist, as shown in the UI.
+ pub label: Label,
+ pub group: Option<GroupLabel>,
+ /// Target ranges are used to sort assists: the smaller the target range,
+ /// the more specific assist is, and so it should be sorted first.
+ pub target: TextRange,
+ /// Computing source change sometimes is much more costly then computing the
+ /// other fields. Additionally, the actual change is not required to show
+ /// the lightbulb UI, it only is needed when the user tries to apply an
+ /// assist. So, we compute it lazily: the API allow requesting assists with
+ /// or without source change. We could (and in fact, used to) distinguish
+ /// between resolved and unresolved assists at the type level, but this is
+ /// cumbersome, especially if you want to embed an assist into another data
+ /// structure, such as a diagnostic.
+ pub source_change: Option<SourceChange>,
+ pub trigger_signature_help: bool,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum AssistKind {
+ // FIXME: does the None variant make sense? Probably not.
+ None,
+
+ QuickFix,
+ Generate,
+ Refactor,
+ RefactorExtract,
+ RefactorInline,
+ RefactorRewrite,
+}
+
+impl AssistKind {
+ pub fn contains(self, other: AssistKind) -> bool {
+ if self == other {
+ return true;
+ }
+
+ match self {
+ AssistKind::None | AssistKind::Generate => true,
+ AssistKind::Refactor => matches!(
+ other,
+ AssistKind::RefactorExtract
+ | AssistKind::RefactorInline
+ | AssistKind::RefactorRewrite
+ ),
+ _ => false,
+ }
+ }
+
+ pub fn name(&self) -> &str {
+ match self {
+ AssistKind::None => "None",
+ AssistKind::QuickFix => "QuickFix",
+ AssistKind::Generate => "Generate",
+ AssistKind::Refactor => "Refactor",
+ AssistKind::RefactorExtract => "RefactorExtract",
+ AssistKind::RefactorInline => "RefactorInline",
+ AssistKind::RefactorRewrite => "RefactorRewrite",
+ }
+ }
+}
+
+impl FromStr for AssistKind {
+ type Err = String;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ match s {
+ "None" => Ok(AssistKind::None),
+ "QuickFix" => Ok(AssistKind::QuickFix),
+ "Generate" => Ok(AssistKind::Generate),
+ "Refactor" => Ok(AssistKind::Refactor),
+ "RefactorExtract" => Ok(AssistKind::RefactorExtract),
+ "RefactorInline" => Ok(AssistKind::RefactorInline),
+ "RefactorRewrite" => Ok(AssistKind::RefactorRewrite),
+ unknown => Err(format!("Unknown AssistKind: '{}'", unknown)),
+ }
+ }
+}
+
+/// Unique identifier of the assist, should not be shown to the user
+/// directly.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct AssistId(pub &'static str, pub AssistKind);
+
+/// A way to control how many asssist to resolve during the assist resolution.
+/// When an assist is resolved, its edits are calculated that might be costly to always do by default.
+#[derive(Debug)]
+pub enum AssistResolveStrategy {
+ /// No assists should be resolved.
+ None,
+ /// All assists should be resolved.
+ All,
+ /// Only a certain assist should be resolved.
+ Single(SingleResolve),
+}
+
+/// Hold the [`AssistId`] data of a certain assist to resolve.
+/// The original id object cannot be used due to a `'static` lifetime
+/// and the requirement to construct this struct dynamically during the resolve handling.
+#[derive(Debug)]
+pub struct SingleResolve {
+ /// The id of the assist.
+ pub assist_id: String,
+ // The kind of the assist.
+ pub assist_kind: AssistKind,
+}
+
+impl AssistResolveStrategy {
+ pub fn should_resolve(&self, id: &AssistId) -> bool {
+ match self {
+ AssistResolveStrategy::None => false,
+ AssistResolveStrategy::All => true,
+ AssistResolveStrategy::Single(single_resolve) => {
+ single_resolve.assist_id == id.0 && single_resolve.assist_kind == id.1
+ }
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct GroupLabel(pub String);
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
new file mode 100644
index 000000000..aeaca00ec
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
@@ -0,0 +1,545 @@
+//! `NameDefinition` keeps information about the element we want to search references for.
+//! The element is represented by `NameKind`. It's located inside some `container` and
+//! has a `visibility`, which defines a search scope.
+//! Note that the reference search is possible for not all of the classified items.
+
+// FIXME: this badly needs rename/rewrite (matklad, 2020-02-06).
+
+use arrayvec::ArrayVec;
+use hir::{
+ Adt, AsAssocItem, AssocItem, BuiltinAttr, BuiltinType, Const, Crate, DeriveHelper, Field,
+ Function, GenericParam, HasVisibility, Impl, ItemInNs, Label, Local, Macro, Module, ModuleDef,
+ Name, PathResolution, Semantics, Static, ToolModule, Trait, TypeAlias, Variant, Visibility,
+};
+use stdx::impl_from;
+use syntax::{
+ ast::{self, AstNode},
+ match_ast, SyntaxKind, SyntaxNode, SyntaxToken,
+};
+
+use crate::RootDatabase;
+
+// FIXME: a more precise name would probably be `Symbol`?
+#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)]
+pub enum Definition {
+ Macro(Macro),
+ Field(Field),
+ Module(Module),
+ Function(Function),
+ Adt(Adt),
+ Variant(Variant),
+ Const(Const),
+ Static(Static),
+ Trait(Trait),
+ TypeAlias(TypeAlias),
+ BuiltinType(BuiltinType),
+ SelfType(Impl),
+ Local(Local),
+ GenericParam(GenericParam),
+ Label(Label),
+ DeriveHelper(DeriveHelper),
+ BuiltinAttr(BuiltinAttr),
+ ToolModule(ToolModule),
+}
+
+impl Definition {
+ pub fn canonical_module_path(&self, db: &RootDatabase) -> Option<impl Iterator<Item = Module>> {
+ self.module(db).map(|it| it.path_to_root(db).into_iter().rev())
+ }
+
+ pub fn krate(&self, db: &RootDatabase) -> Option<Crate> {
+ Some(match self {
+ Definition::Module(m) => m.krate(),
+ _ => self.module(db)?.krate(),
+ })
+ }
+
+ pub fn module(&self, db: &RootDatabase) -> Option<Module> {
+ let module = match self {
+ Definition::Macro(it) => it.module(db),
+ Definition::Module(it) => it.parent(db)?,
+ Definition::Field(it) => it.parent_def(db).module(db),
+ Definition::Function(it) => it.module(db),
+ Definition::Adt(it) => it.module(db),
+ Definition::Const(it) => it.module(db),
+ Definition::Static(it) => it.module(db),
+ Definition::Trait(it) => it.module(db),
+ Definition::TypeAlias(it) => it.module(db),
+ Definition::Variant(it) => it.module(db),
+ Definition::SelfType(it) => it.module(db),
+ Definition::Local(it) => it.module(db),
+ Definition::GenericParam(it) => it.module(db),
+ Definition::Label(it) => it.module(db),
+ Definition::DeriveHelper(it) => it.derive().module(db),
+ Definition::BuiltinAttr(_) | Definition::BuiltinType(_) | Definition::ToolModule(_) => {
+ return None
+ }
+ };
+ Some(module)
+ }
+
+ pub fn visibility(&self, db: &RootDatabase) -> Option<Visibility> {
+ let vis = match self {
+ Definition::Field(sf) => sf.visibility(db),
+ Definition::Module(it) => it.visibility(db),
+ Definition::Function(it) => it.visibility(db),
+ Definition::Adt(it) => it.visibility(db),
+ Definition::Const(it) => it.visibility(db),
+ Definition::Static(it) => it.visibility(db),
+ Definition::Trait(it) => it.visibility(db),
+ Definition::TypeAlias(it) => it.visibility(db),
+ Definition::Variant(it) => it.visibility(db),
+ Definition::BuiltinType(_) => Visibility::Public,
+ Definition::Macro(_) => return None,
+ Definition::BuiltinAttr(_)
+ | Definition::ToolModule(_)
+ | Definition::SelfType(_)
+ | Definition::Local(_)
+ | Definition::GenericParam(_)
+ | Definition::Label(_)
+ | Definition::DeriveHelper(_) => return None,
+ };
+ Some(vis)
+ }
+
+ pub fn name(&self, db: &RootDatabase) -> Option<Name> {
+ let name = match self {
+ Definition::Macro(it) => it.name(db),
+ Definition::Field(it) => it.name(db),
+ Definition::Module(it) => it.name(db)?,
+ Definition::Function(it) => it.name(db),
+ Definition::Adt(it) => it.name(db),
+ Definition::Variant(it) => it.name(db),
+ Definition::Const(it) => it.name(db)?,
+ Definition::Static(it) => it.name(db),
+ Definition::Trait(it) => it.name(db),
+ Definition::TypeAlias(it) => it.name(db),
+ Definition::BuiltinType(it) => it.name(),
+ Definition::SelfType(_) => return None,
+ Definition::Local(it) => it.name(db),
+ Definition::GenericParam(it) => it.name(db),
+ Definition::Label(it) => it.name(db),
+ Definition::BuiltinAttr(_) => return None, // FIXME
+ Definition::ToolModule(_) => return None, // FIXME
+ Definition::DeriveHelper(it) => it.name(db),
+ };
+ Some(name)
+ }
+}
+
+#[derive(Debug)]
+pub enum IdentClass {
+ NameClass(NameClass),
+ NameRefClass(NameRefClass),
+}
+
+impl IdentClass {
+ pub fn classify_node(
+ sema: &Semantics<'_, RootDatabase>,
+ node: &SyntaxNode,
+ ) -> Option<IdentClass> {
+ match_ast! {
+ match node {
+ ast::Name(name) => NameClass::classify(sema, &name).map(IdentClass::NameClass),
+ ast::NameRef(name_ref) => NameRefClass::classify(sema, &name_ref).map(IdentClass::NameRefClass),
+ ast::Lifetime(lifetime) => {
+ NameClass::classify_lifetime(sema, &lifetime)
+ .map(IdentClass::NameClass)
+ .or_else(|| NameRefClass::classify_lifetime(sema, &lifetime).map(IdentClass::NameRefClass))
+ },
+ _ => None,
+ }
+ }
+ }
+
+ pub fn classify_token(
+ sema: &Semantics<'_, RootDatabase>,
+ token: &SyntaxToken,
+ ) -> Option<IdentClass> {
+ let parent = token.parent()?;
+ Self::classify_node(sema, &parent)
+ }
+
+ pub fn classify_lifetime(
+ sema: &Semantics<'_, RootDatabase>,
+ lifetime: &ast::Lifetime,
+ ) -> Option<IdentClass> {
+ NameRefClass::classify_lifetime(sema, lifetime)
+ .map(IdentClass::NameRefClass)
+ .or_else(|| NameClass::classify_lifetime(sema, lifetime).map(IdentClass::NameClass))
+ }
+
+ pub fn definitions(self) -> ArrayVec<Definition, 2> {
+ let mut res = ArrayVec::new();
+ match self {
+ IdentClass::NameClass(NameClass::Definition(it) | NameClass::ConstReference(it)) => {
+ res.push(it)
+ }
+ IdentClass::NameClass(NameClass::PatFieldShorthand { local_def, field_ref }) => {
+ res.push(Definition::Local(local_def));
+ res.push(Definition::Field(field_ref));
+ }
+ IdentClass::NameRefClass(NameRefClass::Definition(it)) => res.push(it),
+ IdentClass::NameRefClass(NameRefClass::FieldShorthand { local_ref, field_ref }) => {
+ res.push(Definition::Local(local_ref));
+ res.push(Definition::Field(field_ref));
+ }
+ }
+ res
+ }
+}
+
+/// On a first blush, a single `ast::Name` defines a single definition at some
+/// scope. That is, that, by just looking at the syntactical category, we can
+/// unambiguously define the semantic category.
+///
+/// Sadly, that's not 100% true, there are special cases. To make sure that
+/// callers handle all the special cases correctly via exhaustive matching, we
+/// add a [`NameClass`] enum which lists all of them!
+///
+/// A model special case is `None` constant in pattern.
+#[derive(Debug)]
+pub enum NameClass {
+ Definition(Definition),
+ /// `None` in `if let None = Some(82) {}`.
+ /// Syntactically, it is a name, but semantically it is a reference.
+ ConstReference(Definition),
+ /// `field` in `if let Foo { field } = foo`. Here, `ast::Name` both introduces
+ /// a definition into a local scope, and refers to an existing definition.
+ PatFieldShorthand {
+ local_def: Local,
+ field_ref: Field,
+ },
+}
+
+impl NameClass {
+ /// `Definition` defined by this name.
+ pub fn defined(self) -> Option<Definition> {
+ let res = match self {
+ NameClass::Definition(it) => it,
+ NameClass::ConstReference(_) => return None,
+ NameClass::PatFieldShorthand { local_def, field_ref: _ } => {
+ Definition::Local(local_def)
+ }
+ };
+ Some(res)
+ }
+
+ pub fn classify(sema: &Semantics<'_, RootDatabase>, name: &ast::Name) -> Option<NameClass> {
+ let _p = profile::span("classify_name");
+
+ let parent = name.syntax().parent()?;
+
+ let definition = match_ast! {
+ match parent {
+ ast::Item(it) => classify_item(sema, it)?,
+ ast::IdentPat(it) => return classify_ident_pat(sema, it),
+ ast::Rename(it) => classify_rename(sema, it)?,
+ ast::SelfParam(it) => Definition::Local(sema.to_def(&it)?),
+ ast::RecordField(it) => Definition::Field(sema.to_def(&it)?),
+ ast::Variant(it) => Definition::Variant(sema.to_def(&it)?),
+ ast::TypeParam(it) => Definition::GenericParam(sema.to_def(&it)?.into()),
+ ast::ConstParam(it) => Definition::GenericParam(sema.to_def(&it)?.into()),
+ _ => return None,
+ }
+ };
+ return Some(NameClass::Definition(definition));
+
+ fn classify_item(
+ sema: &Semantics<'_, RootDatabase>,
+ item: ast::Item,
+ ) -> Option<Definition> {
+ let definition = match item {
+ ast::Item::MacroRules(it) => {
+ Definition::Macro(sema.to_def(&ast::Macro::MacroRules(it))?)
+ }
+ ast::Item::MacroDef(it) => {
+ Definition::Macro(sema.to_def(&ast::Macro::MacroDef(it))?)
+ }
+ ast::Item::Const(it) => Definition::Const(sema.to_def(&it)?),
+ ast::Item::Fn(it) => {
+ let def = sema.to_def(&it)?;
+ def.as_proc_macro(sema.db)
+ .map(Definition::Macro)
+ .unwrap_or(Definition::Function(def))
+ }
+ ast::Item::Module(it) => Definition::Module(sema.to_def(&it)?),
+ ast::Item::Static(it) => Definition::Static(sema.to_def(&it)?),
+ ast::Item::Trait(it) => Definition::Trait(sema.to_def(&it)?),
+ ast::Item::TypeAlias(it) => Definition::TypeAlias(sema.to_def(&it)?),
+ ast::Item::Enum(it) => Definition::Adt(hir::Adt::Enum(sema.to_def(&it)?)),
+ ast::Item::Struct(it) => Definition::Adt(hir::Adt::Struct(sema.to_def(&it)?)),
+ ast::Item::Union(it) => Definition::Adt(hir::Adt::Union(sema.to_def(&it)?)),
+ _ => return None,
+ };
+ Some(definition)
+ }
+
+ fn classify_ident_pat(
+ sema: &Semantics<'_, RootDatabase>,
+ ident_pat: ast::IdentPat,
+ ) -> Option<NameClass> {
+ if let Some(def) = sema.resolve_bind_pat_to_const(&ident_pat) {
+ return Some(NameClass::ConstReference(Definition::from(def)));
+ }
+
+ let local = sema.to_def(&ident_pat)?;
+ let pat_parent = ident_pat.syntax().parent();
+ if let Some(record_pat_field) = pat_parent.and_then(ast::RecordPatField::cast) {
+ if record_pat_field.name_ref().is_none() {
+ if let Some(field) = sema.resolve_record_pat_field(&record_pat_field) {
+ return Some(NameClass::PatFieldShorthand {
+ local_def: local,
+ field_ref: field,
+ });
+ }
+ }
+ }
+ Some(NameClass::Definition(Definition::Local(local)))
+ }
+
+ fn classify_rename(
+ sema: &Semantics<'_, RootDatabase>,
+ rename: ast::Rename,
+ ) -> Option<Definition> {
+ if let Some(use_tree) = rename.syntax().parent().and_then(ast::UseTree::cast) {
+ let path = use_tree.path()?;
+ sema.resolve_path(&path).map(Definition::from)
+ } else {
+ let extern_crate = rename.syntax().parent().and_then(ast::ExternCrate::cast)?;
+ let krate = sema.resolve_extern_crate(&extern_crate)?;
+ let root_module = krate.root_module(sema.db);
+ Some(Definition::Module(root_module))
+ }
+ }
+ }
+
+ pub fn classify_lifetime(
+ sema: &Semantics<'_, RootDatabase>,
+ lifetime: &ast::Lifetime,
+ ) -> Option<NameClass> {
+ let _p = profile::span("classify_lifetime").detail(|| lifetime.to_string());
+ let parent = lifetime.syntax().parent()?;
+
+ if let Some(it) = ast::LifetimeParam::cast(parent.clone()) {
+ sema.to_def(&it).map(Into::into).map(Definition::GenericParam)
+ } else if let Some(it) = ast::Label::cast(parent) {
+ sema.to_def(&it).map(Definition::Label)
+ } else {
+ None
+ }
+ .map(NameClass::Definition)
+ }
+}
+
+/// This is similar to [`NameClass`], but works for [`ast::NameRef`] rather than
+/// for [`ast::Name`]. Similarly, what looks like a reference in syntax is a
+/// reference most of the time, but there are a couple of annoying exceptions.
+///
+/// A model special case is field shorthand syntax, which uses a single
+/// reference to point to two different defs.
+#[derive(Debug)]
+pub enum NameRefClass {
+ Definition(Definition),
+ FieldShorthand { local_ref: Local, field_ref: Field },
+}
+
+impl NameRefClass {
+ // Note: we don't have unit-tests for this rather important function.
+ // It is primarily exercised via goto definition tests in `ide`.
+ pub fn classify(
+ sema: &Semantics<'_, RootDatabase>,
+ name_ref: &ast::NameRef,
+ ) -> Option<NameRefClass> {
+ let _p = profile::span("classify_name_ref").detail(|| name_ref.to_string());
+
+ let parent = name_ref.syntax().parent()?;
+
+ if let Some(record_field) = ast::RecordExprField::for_field_name(name_ref) {
+ if let Some((field, local, _)) = sema.resolve_record_field(&record_field) {
+ let res = match local {
+ None => NameRefClass::Definition(Definition::Field(field)),
+ Some(local) => {
+ NameRefClass::FieldShorthand { field_ref: field, local_ref: local }
+ }
+ };
+ return Some(res);
+ }
+ }
+
+ if let Some(path) = ast::PathSegment::cast(parent.clone()).map(|it| it.parent_path()) {
+ if path.parent_path().is_none() {
+ if let Some(macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
+ // Only use this to resolve to macro calls for last segments as qualifiers resolve
+ // to modules below.
+ if let Some(macro_def) = sema.resolve_macro_call(&macro_call) {
+ return Some(NameRefClass::Definition(Definition::Macro(macro_def)));
+ }
+ }
+ }
+ return sema.resolve_path(&path).map(Into::into).map(NameRefClass::Definition);
+ }
+
+ match_ast! {
+ match parent {
+ ast::MethodCallExpr(method_call) => {
+ sema.resolve_method_call(&method_call)
+ .map(Definition::Function)
+ .map(NameRefClass::Definition)
+ },
+ ast::FieldExpr(field_expr) => {
+ sema.resolve_field(&field_expr)
+ .map(Definition::Field)
+ .map(NameRefClass::Definition)
+ },
+ ast::RecordPatField(record_pat_field) => {
+ sema.resolve_record_pat_field(&record_pat_field)
+ .map(Definition::Field)
+ .map(NameRefClass::Definition)
+ },
+ ast::AssocTypeArg(_) => {
+ // `Trait<Assoc = Ty>`
+ // ^^^^^
+ let containing_path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?;
+ let resolved = sema.resolve_path(&containing_path)?;
+ if let PathResolution::Def(ModuleDef::Trait(tr)) = resolved {
+ if let Some(ty) = tr
+ .items_with_supertraits(sema.db)
+ .iter()
+ .filter_map(|&assoc| match assoc {
+ hir::AssocItem::TypeAlias(it) => Some(it),
+ _ => None,
+ })
+ .find(|alias| alias.name(sema.db).to_smol_str() == name_ref.text().as_str())
+ {
+ return Some(NameRefClass::Definition(Definition::TypeAlias(ty)));
+ }
+ }
+ None
+ },
+ ast::ExternCrate(extern_crate) => {
+ let krate = sema.resolve_extern_crate(&extern_crate)?;
+ let root_module = krate.root_module(sema.db);
+ Some(NameRefClass::Definition(Definition::Module(root_module)))
+ },
+ _ => None
+ }
+ }
+ }
+
+ pub fn classify_lifetime(
+ sema: &Semantics<'_, RootDatabase>,
+ lifetime: &ast::Lifetime,
+ ) -> Option<NameRefClass> {
+ let _p = profile::span("classify_lifetime_ref").detail(|| lifetime.to_string());
+ let parent = lifetime.syntax().parent()?;
+ match parent.kind() {
+ SyntaxKind::BREAK_EXPR | SyntaxKind::CONTINUE_EXPR => {
+ sema.resolve_label(lifetime).map(Definition::Label).map(NameRefClass::Definition)
+ }
+ SyntaxKind::LIFETIME_ARG
+ | SyntaxKind::SELF_PARAM
+ | SyntaxKind::TYPE_BOUND
+ | SyntaxKind::WHERE_PRED
+ | SyntaxKind::REF_TYPE => sema
+ .resolve_lifetime_param(lifetime)
+ .map(GenericParam::LifetimeParam)
+ .map(Definition::GenericParam)
+ .map(NameRefClass::Definition),
+ // lifetime bounds, as in the 'b in 'a: 'b aren't wrapped in TypeBound nodes so we gotta check
+ // if our lifetime is in a LifetimeParam without being the constrained lifetime
+ _ if ast::LifetimeParam::cast(parent).and_then(|param| param.lifetime()).as_ref()
+ != Some(lifetime) =>
+ {
+ sema.resolve_lifetime_param(lifetime)
+ .map(GenericParam::LifetimeParam)
+ .map(Definition::GenericParam)
+ .map(NameRefClass::Definition)
+ }
+ _ => None,
+ }
+ }
+}
+
+impl_from!(
+ Field, Module, Function, Adt, Variant, Const, Static, Trait, TypeAlias, BuiltinType, Local,
+ GenericParam, Label, Macro
+ for Definition
+);
+
+impl From<Impl> for Definition {
+ fn from(impl_: Impl) -> Self {
+ Definition::SelfType(impl_)
+ }
+}
+
+impl AsAssocItem for Definition {
+ fn as_assoc_item(self, db: &dyn hir::db::HirDatabase) -> Option<AssocItem> {
+ match self {
+ Definition::Function(it) => it.as_assoc_item(db),
+ Definition::Const(it) => it.as_assoc_item(db),
+ Definition::TypeAlias(it) => it.as_assoc_item(db),
+ _ => None,
+ }
+ }
+}
+
+impl From<AssocItem> for Definition {
+ fn from(assoc_item: AssocItem) -> Self {
+ match assoc_item {
+ AssocItem::Function(it) => Definition::Function(it),
+ AssocItem::Const(it) => Definition::Const(it),
+ AssocItem::TypeAlias(it) => Definition::TypeAlias(it),
+ }
+ }
+}
+
+impl From<PathResolution> for Definition {
+ fn from(path_resolution: PathResolution) -> Self {
+ match path_resolution {
+ PathResolution::Def(def) => def.into(),
+ PathResolution::Local(local) => Definition::Local(local),
+ PathResolution::TypeParam(par) => Definition::GenericParam(par.into()),
+ PathResolution::ConstParam(par) => Definition::GenericParam(par.into()),
+ PathResolution::SelfType(impl_def) => Definition::SelfType(impl_def),
+ PathResolution::BuiltinAttr(attr) => Definition::BuiltinAttr(attr),
+ PathResolution::ToolModule(tool) => Definition::ToolModule(tool),
+ PathResolution::DeriveHelper(helper) => Definition::DeriveHelper(helper),
+ }
+ }
+}
+
+impl From<ModuleDef> for Definition {
+ fn from(def: ModuleDef) -> Self {
+ match def {
+ ModuleDef::Module(it) => Definition::Module(it),
+ ModuleDef::Function(it) => Definition::Function(it),
+ ModuleDef::Adt(it) => Definition::Adt(it),
+ ModuleDef::Variant(it) => Definition::Variant(it),
+ ModuleDef::Const(it) => Definition::Const(it),
+ ModuleDef::Static(it) => Definition::Static(it),
+ ModuleDef::Trait(it) => Definition::Trait(it),
+ ModuleDef::TypeAlias(it) => Definition::TypeAlias(it),
+ ModuleDef::Macro(it) => Definition::Macro(it),
+ ModuleDef::BuiltinType(it) => Definition::BuiltinType(it),
+ }
+ }
+}
+
+impl From<Definition> for Option<ItemInNs> {
+ fn from(def: Definition) -> Self {
+ let item = match def {
+ Definition::Module(it) => ModuleDef::Module(it),
+ Definition::Function(it) => ModuleDef::Function(it),
+ Definition::Adt(it) => ModuleDef::Adt(it),
+ Definition::Variant(it) => ModuleDef::Variant(it),
+ Definition::Const(it) => ModuleDef::Const(it),
+ Definition::Static(it) => ModuleDef::Static(it),
+ Definition::Trait(it) => ModuleDef::Trait(it),
+ Definition::TypeAlias(it) => ModuleDef::TypeAlias(it),
+ Definition::BuiltinType(it) => ModuleDef::BuiltinType(it),
+ _ => return None,
+ };
+ Some(ItemInNs::from(item))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
new file mode 100644
index 000000000..c8341fed1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
@@ -0,0 +1,185 @@
+//! See [`FamousDefs`].
+
+use base_db::{CrateOrigin, LangCrateOrigin, SourceDatabase};
+use hir::{Crate, Enum, Macro, Module, ScopeDef, Semantics, Trait};
+
+use crate::RootDatabase;
+
+/// Helps with finding well-know things inside the standard library. This is
+/// somewhat similar to the known paths infra inside hir, but it different; We
+/// want to make sure that IDE specific paths don't become interesting inside
+/// the compiler itself as well.
+///
+/// Note that, by default, rust-analyzer tests **do not** include core or std
+/// libraries. If you are writing tests for functionality using [`FamousDefs`],
+/// you'd want to include minicore (see `test_utils::MiniCore`) declaration at
+/// the start of your tests:
+///
+/// ```
+/// //- minicore: iterator, ord, derive
+/// ```
+pub struct FamousDefs<'a, 'b>(pub &'a Semantics<'b, RootDatabase>, pub Crate);
+
+#[allow(non_snake_case)]
+impl FamousDefs<'_, '_> {
+ pub fn std(&self) -> Option<Crate> {
+ self.find_lang_crate(LangCrateOrigin::Std)
+ }
+
+ pub fn core(&self) -> Option<Crate> {
+ self.find_lang_crate(LangCrateOrigin::Core)
+ }
+
+ pub fn alloc(&self) -> Option<Crate> {
+ self.find_lang_crate(LangCrateOrigin::Alloc)
+ }
+
+ pub fn test(&self) -> Option<Crate> {
+ self.find_lang_crate(LangCrateOrigin::Test)
+ }
+
+ pub fn proc_macro(&self) -> Option<Crate> {
+ self.find_lang_crate(LangCrateOrigin::ProcMacro)
+ }
+
+ pub fn core_cmp_Ord(&self) -> Option<Trait> {
+ self.find_trait("core:cmp:Ord")
+ }
+
+ pub fn core_convert_From(&self) -> Option<Trait> {
+ self.find_trait("core:convert:From")
+ }
+
+ pub fn core_convert_Into(&self) -> Option<Trait> {
+ self.find_trait("core:convert:Into")
+ }
+
+ pub fn core_option_Option(&self) -> Option<Enum> {
+ self.find_enum("core:option:Option")
+ }
+
+ pub fn core_result_Result(&self) -> Option<Enum> {
+ self.find_enum("core:result:Result")
+ }
+
+ pub fn core_default_Default(&self) -> Option<Trait> {
+ self.find_trait("core:default:Default")
+ }
+
+ pub fn core_iter_Iterator(&self) -> Option<Trait> {
+ self.find_trait("core:iter:traits:iterator:Iterator")
+ }
+
+ pub fn core_iter_IntoIterator(&self) -> Option<Trait> {
+ self.find_trait("core:iter:traits:collect:IntoIterator")
+ }
+
+ pub fn core_iter(&self) -> Option<Module> {
+ self.find_module("core:iter")
+ }
+
+ pub fn core_ops_Deref(&self) -> Option<Trait> {
+ self.find_trait("core:ops:Deref")
+ }
+
+ pub fn core_ops_DerefMut(&self) -> Option<Trait> {
+ self.find_trait("core:ops:DerefMut")
+ }
+
+ pub fn core_convert_AsRef(&self) -> Option<Trait> {
+ self.find_trait("core:convert:AsRef")
+ }
+
+ pub fn core_ops_ControlFlow(&self) -> Option<Enum> {
+ self.find_enum("core:ops:ControlFlow")
+ }
+
+ pub fn core_ops_Drop(&self) -> Option<Trait> {
+ self.find_trait("core:ops:Drop")
+ }
+
+ pub fn core_marker_Copy(&self) -> Option<Trait> {
+ self.find_trait("core:marker:Copy")
+ }
+
+ pub fn core_macros_builtin_derive(&self) -> Option<Macro> {
+ self.find_macro("core:macros:builtin:derive")
+ }
+
+ pub fn builtin_crates(&self) -> impl Iterator<Item = Crate> {
+ IntoIterator::into_iter([
+ self.std(),
+ self.core(),
+ self.alloc(),
+ self.test(),
+ self.proc_macro(),
+ ])
+ .flatten()
+ }
+
+ fn find_trait(&self, path: &str) -> Option<Trait> {
+ match self.find_def(path)? {
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Trait(it)) => Some(it),
+ _ => None,
+ }
+ }
+
+ fn find_macro(&self, path: &str) -> Option<Macro> {
+ match self.find_def(path)? {
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Macro(it)) => Some(it),
+ _ => None,
+ }
+ }
+
+ fn find_enum(&self, path: &str) -> Option<Enum> {
+ match self.find_def(path)? {
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Adt(hir::Adt::Enum(it))) => Some(it),
+ _ => None,
+ }
+ }
+
+ fn find_module(&self, path: &str) -> Option<Module> {
+ match self.find_def(path)? {
+ hir::ScopeDef::ModuleDef(hir::ModuleDef::Module(it)) => Some(it),
+ _ => None,
+ }
+ }
+
+ fn find_lang_crate(&self, origin: LangCrateOrigin) -> Option<Crate> {
+ let krate = self.1;
+ let db = self.0.db;
+ let crate_graph = self.0.db.crate_graph();
+ let res = krate
+ .dependencies(db)
+ .into_iter()
+ .find(|dep| crate_graph[dep.krate.into()].origin == CrateOrigin::Lang(origin))?
+ .krate;
+ Some(res)
+ }
+
+ fn find_def(&self, path: &str) -> Option<ScopeDef> {
+ let db = self.0.db;
+ let mut path = path.split(':');
+ let trait_ = path.next_back()?;
+ let lang_crate = path.next()?;
+ let lang_crate = match LangCrateOrigin::from(lang_crate) {
+ LangCrateOrigin::Other => return None,
+ lang_crate => lang_crate,
+ };
+ let std_crate = self.find_lang_crate(lang_crate)?;
+ let mut module = std_crate.root_module(db);
+ for segment in path {
+ module = module.children(db).find_map(|child| {
+ let name = child.name(db)?;
+ if name.to_smol_str() == segment {
+ Some(child)
+ } else {
+ None
+ }
+ })?;
+ }
+ let def =
+ module.scope(db, None).into_iter().find(|(name, _def)| name.to_smol_str() == trait_)?.1;
+ Some(def)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs
new file mode 100644
index 000000000..64dd2bb5f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs
@@ -0,0 +1,7682 @@
+//! Generated by `sourcegen_lints`, do not edit by hand.
+
+#[derive(Clone)]
+pub struct Lint {
+ pub label: &'static str,
+ pub description: &'static str,
+}
+pub struct LintGroup {
+ pub lint: Lint,
+ pub children: &'static [&'static str],
+}
+pub const DEFAULT_LINTS: &[Lint] = &[
+ Lint {
+ label: "absolute_paths_not_starting_with_crate",
+ description: r##"fully qualified paths that start with a module name instead of `crate`, `self`, or an extern crate name"##,
+ },
+ Lint { label: "ambiguous_associated_items", description: r##"ambiguous associated items"## },
+ Lint { label: "anonymous_parameters", description: r##"detects anonymous parameters"## },
+ Lint { label: "arithmetic_overflow", description: r##"arithmetic operation overflows"## },
+ Lint {
+ label: "array_into_iter",
+ description: r##"detects calling `into_iter` on arrays in Rust 2015 and 2018"##,
+ },
+ Lint {
+ label: "asm_sub_register",
+ description: r##"using only a subset of a register for inline asm inputs"##,
+ },
+ Lint { label: "bad_asm_style", description: r##"incorrect use of inline assembly"## },
+ Lint {
+ label: "bare_trait_objects",
+ description: r##"suggest using `dyn Trait` for trait objects"##,
+ },
+ Lint {
+ label: "bindings_with_variant_name",
+ description: r##"detects pattern bindings with the same name as one of the matched variants"##,
+ },
+ Lint { label: "box_pointers", description: r##"use of owned (Box type) heap memory"## },
+ Lint {
+ label: "break_with_label_and_loop",
+ description: r##"`break` expression with label and unlabeled loop as value expression"##,
+ },
+ Lint {
+ label: "cenum_impl_drop_cast",
+ description: r##"a C-like enum implementing Drop is cast"##,
+ },
+ Lint {
+ label: "clashing_extern_declarations",
+ description: r##"detects when an extern fn has been declared with the same name but different types"##,
+ },
+ Lint {
+ label: "coherence_leak_check",
+ description: r##"distinct impls distinguished only by the leak-check code"##,
+ },
+ Lint {
+ label: "conflicting_repr_hints",
+ description: r##"conflicts between `#[repr(..)]` hints that were previously accepted and used in practice"##,
+ },
+ Lint {
+ label: "confusable_idents",
+ description: r##"detects visually confusable pairs between identifiers"##,
+ },
+ Lint {
+ label: "const_err",
+ description: r##"constant evaluation encountered erroneous expression"##,
+ },
+ Lint {
+ label: "const_evaluatable_unchecked",
+ description: r##"detects a generic constant is used in a type without a emitting a warning"##,
+ },
+ Lint {
+ label: "const_item_mutation",
+ description: r##"detects attempts to mutate a `const` item"##,
+ },
+ Lint { label: "dead_code", description: r##"detect unused, unexported items"## },
+ Lint { label: "deprecated", description: r##"detects use of deprecated items"## },
+ Lint {
+ label: "deprecated_in_future",
+ description: r##"detects use of items that will be deprecated in a future version"##,
+ },
+ Lint {
+ label: "deref_into_dyn_supertrait",
+ description: r##"`Deref` implementation usage with a supertrait trait object for output might be shadowed in the future"##,
+ },
+ Lint {
+ label: "deref_nullptr",
+ description: r##"detects when an null pointer is dereferenced"##,
+ },
+ Lint {
+ label: "drop_bounds",
+ description: r##"bounds of the form `T: Drop` are most likely incorrect"##,
+ },
+ Lint {
+ label: "dyn_drop",
+ description: r##"trait objects of the form `dyn Drop` are useless"##,
+ },
+ Lint {
+ label: "elided_lifetimes_in_paths",
+ description: r##"hidden lifetime parameters in types are deprecated"##,
+ },
+ Lint {
+ label: "ellipsis_inclusive_range_patterns",
+ description: r##"`...` range patterns are deprecated"##,
+ },
+ Lint {
+ label: "enum_intrinsics_non_enums",
+ description: r##"detects calls to `core::mem::discriminant` and `core::mem::variant_count` with non-enum types"##,
+ },
+ Lint {
+ label: "explicit_outlives_requirements",
+ description: r##"outlives requirements can be inferred"##,
+ },
+ Lint {
+ label: "exported_private_dependencies",
+ description: r##"public interface leaks type from a private dependency"##,
+ },
+ Lint { label: "forbidden_lint_groups", description: r##"applying forbid to lint-groups"## },
+ Lint {
+ label: "function_item_references",
+ description: r##"suggest casting to a function pointer when attempting to take references to function items"##,
+ },
+ Lint {
+ label: "future_incompatible",
+ description: r##"lint group for: forbidden-lint-groups, illegal-floating-point-literal-pattern, private-in-public, pub-use-of-private-extern-crate, invalid-type-param-default, const-err, unaligned-references, patterns-in-fns-without-body, missing-fragment-specifier, late-bound-lifetime-arguments, order-dependent-trait-objects, coherence-leak-check, unstable-name-collisions, where-clauses-object-safety, proc-macro-derive-resolution-fallback, macro-expanded-macro-exports-accessed-by-absolute-paths, ill-formed-attribute-input, conflicting-repr-hints, ambiguous-associated-items, mutable-borrow-reservation-conflict, indirect-structural-match, pointer-structural-match, nontrivial-structural-match, soft-unstable, cenum-impl-drop-cast, const-evaluatable-unchecked, uninhabited-static, unsupported-naked-functions, invalid-doc-attributes, semicolon-in-expressions-from-macros, legacy-derive-helpers, proc-macro-back-compat, unsupported-calling-conventions, deref-into-dyn-supertrait"##,
+ },
+ Lint {
+ label: "ill_formed_attribute_input",
+ description: r##"ill-formed attribute inputs that were previously accepted and used in practice"##,
+ },
+ Lint {
+ label: "illegal_floating_point_literal_pattern",
+ description: r##"floating-point literals cannot be used in patterns"##,
+ },
+ Lint {
+ label: "improper_ctypes",
+ description: r##"proper use of libc types in foreign modules"##,
+ },
+ Lint {
+ label: "improper_ctypes_definitions",
+ description: r##"proper use of libc types in foreign item definitions"##,
+ },
+ Lint {
+ label: "incomplete_features",
+ description: r##"incomplete features that may function improperly in some or all cases"##,
+ },
+ Lint { label: "incomplete_include", description: r##"trailing content in included file"## },
+ Lint {
+ label: "indirect_structural_match",
+ description: r##"constant used in pattern contains value of non-structural-match type in a field or a variant"##,
+ },
+ Lint {
+ label: "ineffective_unstable_trait_impl",
+ description: r##"detects `#[unstable]` on stable trait implementations for stable types"##,
+ },
+ Lint {
+ label: "inline_no_sanitize",
+ description: r##"detects incompatible use of `#[inline(always)]` and `#[no_sanitize(...)]`"##,
+ },
+ Lint {
+ label: "invalid_atomic_ordering",
+ description: r##"usage of invalid atomic ordering in atomic operations and memory fences"##,
+ },
+ Lint {
+ label: "invalid_doc_attributes",
+ description: r##"detects invalid `#[doc(...)]` attributes"##,
+ },
+ Lint {
+ label: "invalid_type_param_default",
+ description: r##"type parameter default erroneously allowed in invalid location"##,
+ },
+ Lint {
+ label: "invalid_value",
+ description: r##"an invalid value is being created (such as a null reference)"##,
+ },
+ Lint {
+ label: "irrefutable_let_patterns",
+ description: r##"detects irrefutable patterns in `if let` and `while let` statements"##,
+ },
+ Lint {
+ label: "keyword_idents",
+ description: r##"detects edition keywords being used as an identifier"##,
+ },
+ Lint { label: "large_assignments", description: r##"detects large moves or copies"## },
+ Lint {
+ label: "late_bound_lifetime_arguments",
+ description: r##"detects generic lifetime arguments in path segments with late bound lifetime parameters"##,
+ },
+ Lint {
+ label: "legacy_derive_helpers",
+ description: r##"detects derive helper attributes that are used before they are introduced"##,
+ },
+ Lint {
+ label: "macro_expanded_macro_exports_accessed_by_absolute_paths",
+ description: r##"macro-expanded `macro_export` macros from the current crate cannot be referred to by absolute paths"##,
+ },
+ Lint {
+ label: "macro_use_extern_crate",
+ description: r##"the `#[macro_use]` attribute is now deprecated in favor of using macros via the module system"##,
+ },
+ Lint {
+ label: "meta_variable_misuse",
+ description: r##"possible meta-variable misuse at macro definition"##,
+ },
+ Lint { label: "missing_abi", description: r##"No declared ABI for extern declaration"## },
+ Lint {
+ label: "missing_copy_implementations",
+ description: r##"detects potentially-forgotten implementations of `Copy`"##,
+ },
+ Lint {
+ label: "missing_debug_implementations",
+ description: r##"detects missing implementations of Debug"##,
+ },
+ Lint {
+ label: "missing_docs",
+ description: r##"detects missing documentation for public members"##,
+ },
+ Lint {
+ label: "missing_fragment_specifier",
+ description: r##"detects missing fragment specifiers in unused `macro_rules!` patterns"##,
+ },
+ Lint {
+ label: "mixed_script_confusables",
+ description: r##"detects Unicode scripts whose mixed script confusables codepoints are solely used"##,
+ },
+ Lint {
+ label: "must_not_suspend",
+ description: r##"use of a `#[must_not_suspend]` value across a yield point"##,
+ },
+ Lint {
+ label: "mutable_borrow_reservation_conflict",
+ description: r##"reservation of a two-phased borrow conflicts with other shared borrows"##,
+ },
+ Lint {
+ label: "mutable_transmutes",
+ description: r##"mutating transmuted &mut T from &T may cause undefined behavior"##,
+ },
+ Lint { label: "named_asm_labels", description: r##"named labels in inline assembly"## },
+ Lint {
+ label: "no_mangle_const_items",
+ description: r##"const items will not have their symbols exported"##,
+ },
+ Lint { label: "no_mangle_generic_items", description: r##"generic items must be mangled"## },
+ Lint { label: "non_ascii_idents", description: r##"detects non-ASCII identifiers"## },
+ Lint {
+ label: "non_camel_case_types",
+ description: r##"types, variants, traits and type parameters should have camel case names"##,
+ },
+ Lint {
+ label: "non_exhaustive_omitted_patterns",
+ description: r##"detect when patterns of types marked `non_exhaustive` are missed"##,
+ },
+ Lint {
+ label: "non_fmt_panics",
+ description: r##"detect single-argument panic!() invocations in which the argument is not a format string"##,
+ },
+ Lint {
+ label: "non_shorthand_field_patterns",
+ description: r##"using `Struct { x: x }` instead of `Struct { x }` in a pattern"##,
+ },
+ Lint {
+ label: "non_snake_case",
+ description: r##"variables, methods, functions, lifetime parameters and modules should have snake case names"##,
+ },
+ Lint {
+ label: "non_upper_case_globals",
+ description: r##"static constants should have uppercase identifiers"##,
+ },
+ Lint {
+ label: "nonstandard_style",
+ description: r##"lint group for: non-camel-case-types, non-snake-case, non-upper-case-globals"##,
+ },
+ Lint {
+ label: "nontrivial_structural_match",
+ description: r##"constant used in pattern of non-structural-match type and the constant's initializer expression contains values of non-structural-match types"##,
+ },
+ Lint {
+ label: "noop_method_call",
+ description: r##"detects the use of well-known noop methods"##,
+ },
+ Lint {
+ label: "order_dependent_trait_objects",
+ description: r##"trait-object types were treated as different depending on marker-trait order"##,
+ },
+ Lint { label: "overflowing_literals", description: r##"literal out of range for its type"## },
+ Lint {
+ label: "overlapping_range_endpoints",
+ description: r##"detects range patterns with overlapping endpoints"##,
+ },
+ Lint { label: "path_statements", description: r##"path statements with no effect"## },
+ Lint {
+ label: "patterns_in_fns_without_body",
+ description: r##"patterns in functions without body were erroneously allowed"##,
+ },
+ Lint {
+ label: "pointer_structural_match",
+ description: r##"pointers are not structural-match"##,
+ },
+ Lint {
+ label: "private_in_public",
+ description: r##"detect private items in public interfaces not caught by the old implementation"##,
+ },
+ Lint {
+ label: "proc_macro_back_compat",
+ description: r##"detects usage of old versions of certain proc-macro crates"##,
+ },
+ Lint {
+ label: "proc_macro_derive_resolution_fallback",
+ description: r##"detects proc macro derives using inaccessible names from parent modules"##,
+ },
+ Lint {
+ label: "pub_use_of_private_extern_crate",
+ description: r##"detect public re-exports of private extern crates"##,
+ },
+ Lint {
+ label: "redundant_semicolons",
+ description: r##"detects unnecessary trailing semicolons"##,
+ },
+ Lint {
+ label: "renamed_and_removed_lints",
+ description: r##"lints that have been renamed or removed"##,
+ },
+ Lint {
+ label: "rust_2018_compatibility",
+ description: r##"lint group for: keyword-idents, anonymous-parameters, tyvar-behind-raw-pointer, absolute-paths-not-starting-with-crate"##,
+ },
+ Lint {
+ label: "rust_2018_idioms",
+ description: r##"lint group for: bare-trait-objects, unused-extern-crates, ellipsis-inclusive-range-patterns, elided-lifetimes-in-paths, explicit-outlives-requirements"##,
+ },
+ Lint {
+ label: "rust_2021_compatibility",
+ description: r##"lint group for: ellipsis-inclusive-range-patterns, bare-trait-objects, rust-2021-incompatible-closure-captures, rust-2021-incompatible-or-patterns, rust-2021-prelude-collisions, rust-2021-prefixes-incompatible-syntax, array-into-iter, non-fmt-panics"##,
+ },
+ Lint {
+ label: "rust_2021_incompatible_closure_captures",
+ description: r##"detects closures affected by Rust 2021 changes"##,
+ },
+ Lint {
+ label: "rust_2021_incompatible_or_patterns",
+ description: r##"detects usage of old versions of or-patterns"##,
+ },
+ Lint {
+ label: "rust_2021_prefixes_incompatible_syntax",
+ description: r##"identifiers that will be parsed as a prefix in Rust 2021"##,
+ },
+ Lint {
+ label: "rust_2021_prelude_collisions",
+ description: r##"detects the usage of trait methods which are ambiguous with traits added to the prelude in future editions"##,
+ },
+ Lint {
+ label: "semicolon_in_expressions_from_macros",
+ description: r##"trailing semicolon in macro body used as expression"##,
+ },
+ Lint {
+ label: "single_use_lifetimes",
+ description: r##"detects lifetime parameters that are only used once"##,
+ },
+ Lint {
+ label: "soft_unstable",
+ description: r##"a feature gate that doesn't break dependent crates"##,
+ },
+ Lint {
+ label: "stable_features",
+ description: r##"stable features found in `#[feature]` directive"##,
+ },
+ Lint {
+ label: "temporary_cstring_as_ptr",
+ description: r##"detects getting the inner pointer of a temporary `CString`"##,
+ },
+ Lint {
+ label: "text_direction_codepoint_in_comment",
+ description: r##"invisible directionality-changing codepoints in comment"##,
+ },
+ Lint {
+ label: "text_direction_codepoint_in_literal",
+ description: r##"detect special Unicode codepoints that affect the visual representation of text on screen, changing the direction in which text flows"##,
+ },
+ Lint {
+ label: "trivial_bounds",
+ description: r##"these bounds don't depend on an type parameters"##,
+ },
+ Lint {
+ label: "trivial_casts",
+ description: r##"detects trivial casts which could be removed"##,
+ },
+ Lint {
+ label: "trivial_numeric_casts",
+ description: r##"detects trivial casts of numeric types which could be removed"##,
+ },
+ Lint {
+ label: "type_alias_bounds",
+ description: r##"bounds in type aliases are not enforced"##,
+ },
+ Lint {
+ label: "tyvar_behind_raw_pointer",
+ description: r##"raw pointer to an inference variable"##,
+ },
+ Lint {
+ label: "unaligned_references",
+ description: r##"detects unaligned references to fields of packed structs"##,
+ },
+ Lint {
+ label: "uncommon_codepoints",
+ description: r##"detects uncommon Unicode codepoints in identifiers"##,
+ },
+ Lint {
+ label: "unconditional_panic",
+ description: r##"operation will cause a panic at runtime"##,
+ },
+ Lint {
+ label: "unconditional_recursion",
+ description: r##"functions that cannot return without calling themselves"##,
+ },
+ Lint { label: "uninhabited_static", description: r##"uninhabited static"## },
+ Lint {
+ label: "unknown_crate_types",
+ description: r##"unknown crate type found in `#[crate_type]` directive"##,
+ },
+ Lint { label: "unknown_lints", description: r##"unrecognized lint attribute"## },
+ Lint {
+ label: "unnameable_test_items",
+ description: r##"detects an item that cannot be named being marked as `#[test_case]`"##,
+ },
+ Lint { label: "unreachable_code", description: r##"detects unreachable code paths"## },
+ Lint { label: "unreachable_patterns", description: r##"detects unreachable patterns"## },
+ Lint {
+ label: "unreachable_pub",
+ description: r##"`pub` items not reachable from crate root"##,
+ },
+ Lint { label: "unsafe_code", description: r##"usage of `unsafe` code"## },
+ Lint {
+ label: "unsafe_op_in_unsafe_fn",
+ description: r##"unsafe operations in unsafe functions without an explicit unsafe block are deprecated"##,
+ },
+ Lint {
+ label: "unstable_features",
+ description: r##"enabling unstable features (deprecated. do not use)"##,
+ },
+ Lint {
+ label: "unstable_name_collisions",
+ description: r##"detects name collision with an existing but unstable method"##,
+ },
+ Lint {
+ label: "unsupported_calling_conventions",
+ description: r##"use of unsupported calling convention"##,
+ },
+ Lint {
+ label: "unsupported_naked_functions",
+ description: r##"unsupported naked function definitions"##,
+ },
+ Lint {
+ label: "unused",
+ description: r##"lint group for: unused-imports, unused-variables, unused-assignments, dead-code, unused-mut, unreachable-code, unreachable-patterns, unused-must-use, unused-unsafe, path-statements, unused-attributes, unused-macros, unused-allocation, unused-doc-comments, unused-extern-crates, unused-features, unused-labels, unused-parens, unused-braces, redundant-semicolons"##,
+ },
+ Lint {
+ label: "unused_allocation",
+ description: r##"detects unnecessary allocations that can be eliminated"##,
+ },
+ Lint {
+ label: "unused_assignments",
+ description: r##"detect assignments that will never be read"##,
+ },
+ Lint {
+ label: "unused_attributes",
+ description: r##"detects attributes that were not used by the compiler"##,
+ },
+ Lint { label: "unused_braces", description: r##"unnecessary braces around an expression"## },
+ Lint {
+ label: "unused_comparisons",
+ description: r##"comparisons made useless by limits of the types involved"##,
+ },
+ Lint {
+ label: "unused_crate_dependencies",
+ description: r##"crate dependencies that are never used"##,
+ },
+ Lint {
+ label: "unused_doc_comments",
+ description: r##"detects doc comments that aren't used by rustdoc"##,
+ },
+ Lint { label: "unused_extern_crates", description: r##"extern crates that are never used"## },
+ Lint {
+ label: "unused_features",
+ description: r##"unused features found in crate-level `#[feature]` directives"##,
+ },
+ Lint {
+ label: "unused_import_braces",
+ description: r##"unnecessary braces around an imported item"##,
+ },
+ Lint { label: "unused_imports", description: r##"imports that are never used"## },
+ Lint { label: "unused_labels", description: r##"detects labels that are never used"## },
+ Lint {
+ label: "unused_lifetimes",
+ description: r##"detects lifetime parameters that are never used"##,
+ },
+ Lint { label: "unused_macros", description: r##"detects macros that were not used"## },
+ Lint {
+ label: "unused_must_use",
+ description: r##"unused result of a type flagged as `#[must_use]`"##,
+ },
+ Lint {
+ label: "unused_mut",
+ description: r##"detect mut variables which don't need to be mutable"##,
+ },
+ Lint {
+ label: "unused_parens",
+ description: r##"`if`, `match`, `while` and `return` do not need parentheses"##,
+ },
+ Lint {
+ label: "unused_qualifications",
+ description: r##"detects unnecessarily qualified names"##,
+ },
+ Lint {
+ label: "unused_results",
+ description: r##"unused result of an expression in a statement"##,
+ },
+ Lint { label: "unused_unsafe", description: r##"unnecessary use of an `unsafe` block"## },
+ Lint {
+ label: "unused_variables",
+ description: r##"detect variables which are not used in any way"##,
+ },
+ Lint {
+ label: "useless_deprecated",
+ description: r##"detects deprecation attributes with no effect"##,
+ },
+ Lint {
+ label: "variant_size_differences",
+ description: r##"detects enums with widely varying variant sizes"##,
+ },
+ Lint {
+ label: "warnings",
+ description: r##"mass-change the level for lints which produce warnings"##,
+ },
+ Lint {
+ label: "warnings",
+ description: r##"lint group for: all lints that are set to issue warnings"##,
+ },
+ Lint {
+ label: "where_clauses_object_safety",
+ description: r##"checks the object safety of where clauses"##,
+ },
+ Lint {
+ label: "while_true",
+ description: r##"suggest using `loop { }` instead of `while true { }`"##,
+ },
+];
+pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
+ LintGroup {
+ lint: Lint {
+ label: "future_incompatible",
+ description: r##"lint group for: forbidden-lint-groups, illegal-floating-point-literal-pattern, private-in-public, pub-use-of-private-extern-crate, invalid-type-param-default, const-err, unaligned-references, patterns-in-fns-without-body, missing-fragment-specifier, late-bound-lifetime-arguments, order-dependent-trait-objects, coherence-leak-check, unstable-name-collisions, where-clauses-object-safety, proc-macro-derive-resolution-fallback, macro-expanded-macro-exports-accessed-by-absolute-paths, ill-formed-attribute-input, conflicting-repr-hints, ambiguous-associated-items, mutable-borrow-reservation-conflict, indirect-structural-match, pointer-structural-match, nontrivial-structural-match, soft-unstable, cenum-impl-drop-cast, const-evaluatable-unchecked, uninhabited-static, unsupported-naked-functions, invalid-doc-attributes, semicolon-in-expressions-from-macros, legacy-derive-helpers, proc-macro-back-compat, unsupported-calling-conventions, deref-into-dyn-supertrait"##,
+ },
+ children: &[
+ "forbidden_lint_groups",
+ "illegal_floating_point_literal_pattern",
+ "private_in_public",
+ "pub_use_of_private_extern_crate",
+ "invalid_type_param_default",
+ "const_err",
+ "unaligned_references",
+ "patterns_in_fns_without_body",
+ "missing_fragment_specifier",
+ "late_bound_lifetime_arguments",
+ "order_dependent_trait_objects",
+ "coherence_leak_check",
+ "unstable_name_collisions",
+ "where_clauses_object_safety",
+ "proc_macro_derive_resolution_fallback",
+ "macro_expanded_macro_exports_accessed_by_absolute_paths",
+ "ill_formed_attribute_input",
+ "conflicting_repr_hints",
+ "ambiguous_associated_items",
+ "mutable_borrow_reservation_conflict",
+ "indirect_structural_match",
+ "pointer_structural_match",
+ "nontrivial_structural_match",
+ "soft_unstable",
+ "cenum_impl_drop_cast",
+ "const_evaluatable_unchecked",
+ "uninhabited_static",
+ "unsupported_naked_functions",
+ "invalid_doc_attributes",
+ "semicolon_in_expressions_from_macros",
+ "legacy_derive_helpers",
+ "proc_macro_back_compat",
+ "unsupported_calling_conventions",
+ "deref_into_dyn_supertrait",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "nonstandard_style",
+ description: r##"lint group for: non-camel-case-types, non-snake-case, non-upper-case-globals"##,
+ },
+ children: &["non_camel_case_types", "non_snake_case", "non_upper_case_globals"],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "rust_2018_compatibility",
+ description: r##"lint group for: keyword-idents, anonymous-parameters, tyvar-behind-raw-pointer, absolute-paths-not-starting-with-crate"##,
+ },
+ children: &[
+ "keyword_idents",
+ "anonymous_parameters",
+ "tyvar_behind_raw_pointer",
+ "absolute_paths_not_starting_with_crate",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "rust_2018_idioms",
+ description: r##"lint group for: bare-trait-objects, unused-extern-crates, ellipsis-inclusive-range-patterns, elided-lifetimes-in-paths, explicit-outlives-requirements"##,
+ },
+ children: &[
+ "bare_trait_objects",
+ "unused_extern_crates",
+ "ellipsis_inclusive_range_patterns",
+ "elided_lifetimes_in_paths",
+ "explicit_outlives_requirements",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "rust_2021_compatibility",
+ description: r##"lint group for: ellipsis-inclusive-range-patterns, bare-trait-objects, rust-2021-incompatible-closure-captures, rust-2021-incompatible-or-patterns, rust-2021-prelude-collisions, rust-2021-prefixes-incompatible-syntax, array-into-iter, non-fmt-panics"##,
+ },
+ children: &[
+ "ellipsis_inclusive_range_patterns",
+ "bare_trait_objects",
+ "rust_2021_incompatible_closure_captures",
+ "rust_2021_incompatible_or_patterns",
+ "rust_2021_prelude_collisions",
+ "rust_2021_prefixes_incompatible_syntax",
+ "array_into_iter",
+ "non_fmt_panics",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "unused",
+ description: r##"lint group for: unused-imports, unused-variables, unused-assignments, dead-code, unused-mut, unreachable-code, unreachable-patterns, unused-must-use, unused-unsafe, path-statements, unused-attributes, unused-macros, unused-allocation, unused-doc-comments, unused-extern-crates, unused-features, unused-labels, unused-parens, unused-braces, redundant-semicolons"##,
+ },
+ children: &[
+ "unused_imports",
+ "unused_variables",
+ "unused_assignments",
+ "dead_code",
+ "unused_mut",
+ "unreachable_code",
+ "unreachable_patterns",
+ "unused_must_use",
+ "unused_unsafe",
+ "path_statements",
+ "unused_attributes",
+ "unused_macros",
+ "unused_allocation",
+ "unused_doc_comments",
+ "unused_extern_crates",
+ "unused_features",
+ "unused_labels",
+ "unused_parens",
+ "unused_braces",
+ "redundant_semicolons",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "warnings",
+ description: r##"lint group for: all lints that are set to issue warnings"##,
+ },
+ children: &[],
+ },
+];
+
+pub const RUSTDOC_LINTS: &[Lint] = &[
+ Lint {
+ label: "rustdoc::all",
+ description: r##"lint group for: rustdoc::broken-intra-doc-links, rustdoc::private-intra-doc-links, rustdoc::missing-doc-code-examples, rustdoc::private-doc-tests, rustdoc::invalid-codeblock-attributes, rustdoc::invalid-rust-codeblocks, rustdoc::invalid-html-tags, rustdoc::bare-urls, rustdoc::missing-crate-level-docs"##,
+ },
+ Lint { label: "rustdoc::bare_urls", description: r##"detects URLs that are not hyperlinks"## },
+ Lint {
+ label: "rustdoc::broken_intra_doc_links",
+ description: r##"failures in resolving intra-doc link targets"##,
+ },
+ Lint {
+ label: "rustdoc::invalid_codeblock_attributes",
+ description: r##"codeblock attribute looks a lot like a known one"##,
+ },
+ Lint {
+ label: "rustdoc::invalid_html_tags",
+ description: r##"detects invalid HTML tags in doc comments"##,
+ },
+ Lint {
+ label: "rustdoc::invalid_rust_codeblocks",
+ description: r##"codeblock could not be parsed as valid Rust or is empty"##,
+ },
+ Lint {
+ label: "rustdoc::missing_crate_level_docs",
+ description: r##"detects crates with no crate-level documentation"##,
+ },
+ Lint {
+ label: "rustdoc::missing_doc_code_examples",
+ description: r##"detects publicly-exported items without code samples in their documentation"##,
+ },
+ Lint {
+ label: "rustdoc::private_doc_tests",
+ description: r##"detects code samples in docs of private items not documented by rustdoc"##,
+ },
+ Lint {
+ label: "rustdoc::private_intra_doc_links",
+ description: r##"linking from a public item to a private one"##,
+ },
+];
+pub const RUSTDOC_LINT_GROUPS: &[LintGroup] = &[LintGroup {
+ lint: Lint {
+ label: "rustdoc::all",
+ description: r##"lint group for: rustdoc::broken-intra-doc-links, rustdoc::private-intra-doc-links, rustdoc::missing-doc-code-examples, rustdoc::private-doc-tests, rustdoc::invalid-codeblock-attributes, rustdoc::invalid-rust-codeblocks, rustdoc::invalid-html-tags, rustdoc::bare-urls, rustdoc::missing-crate-level-docs"##,
+ },
+ children: &[
+ "rustdoc::broken_intra_doc_links",
+ "rustdoc::private_intra_doc_links",
+ "rustdoc::missing_doc_code_examples",
+ "rustdoc::private_doc_tests",
+ "rustdoc::invalid_codeblock_attributes",
+ "rustdoc::invalid_rust_codeblocks",
+ "rustdoc::invalid_html_tags",
+ "rustdoc::bare_urls",
+ "rustdoc::missing_crate_level_docs",
+ ],
+}];
+
+pub const FEATURES: &[Lint] = &[
+ Lint {
+ label: "abi_c_cmse_nonsecure_call",
+ description: r##"# `abi_c_cmse_nonsecure_call`
+
+The tracking issue for this feature is: [#81391]
+
+[#81391]: https://github.com/rust-lang/rust/issues/81391
+
+------------------------
+
+The [TrustZone-M
+feature](https://developer.arm.com/documentation/100690/latest/) is available
+for targets with the Armv8-M architecture profile (`thumbv8m` in their target
+name).
+LLVM, the Rust compiler and the linker are providing
+[support](https://developer.arm.com/documentation/ecm0359818/latest/) for the
+TrustZone-M feature.
+
+One of the things provided, with this unstable feature, is the
+`C-cmse-nonsecure-call` function ABI. This ABI is used on function pointers to
+non-secure code to mark a non-secure function call (see [section
+5.5](https://developer.arm.com/documentation/ecm0359818/latest/) for details).
+
+With this ABI, the compiler will do the following to perform the call:
+* save registers needed after the call to Secure memory
+* clear all registers that might contain confidential information
+* clear the Least Significant Bit of the function address
+* branches using the BLXNS instruction
+
+To avoid using the non-secure stack, the compiler will constrain the number and
+type of parameters/return value.
+
+The `extern "C-cmse-nonsecure-call"` ABI is otherwise equivalent to the
+`extern "C"` ABI.
+
+<!-- NOTE(ignore) this example is specific to thumbv8m targets -->
+
+``` rust,ignore
+#![no_std]
+#![feature(abi_c_cmse_nonsecure_call)]
+
+#[no_mangle]
+pub fn call_nonsecure_function(addr: usize) -> u32 {
+ let non_secure_function =
+ unsafe { core::mem::transmute::<usize, extern "C-cmse-nonsecure-call" fn() -> u32>(addr) };
+ non_secure_function()
+}
+```
+
+``` text
+$ rustc --emit asm --crate-type lib --target thumbv8m.main-none-eabi function.rs
+
+call_nonsecure_function:
+ .fnstart
+ .save {r7, lr}
+ push {r7, lr}
+ .setfp r7, sp
+ mov r7, sp
+ .pad #16
+ sub sp, #16
+ str r0, [sp, #12]
+ ldr r0, [sp, #12]
+ str r0, [sp, #8]
+ b .LBB0_1
+.LBB0_1:
+ ldr r0, [sp, #8]
+ push.w {r4, r5, r6, r7, r8, r9, r10, r11}
+ bic r0, r0, #1
+ mov r1, r0
+ mov r2, r0
+ mov r3, r0
+ mov r4, r0
+ mov r5, r0
+ mov r6, r0
+ mov r7, r0
+ mov r8, r0
+ mov r9, r0
+ mov r10, r0
+ mov r11, r0
+ mov r12, r0
+ msr apsr_nzcvq, r0
+ blxns r0
+ pop.w {r4, r5, r6, r7, r8, r9, r10, r11}
+ str r0, [sp, #4]
+ b .LBB0_2
+.LBB0_2:
+ ldr r0, [sp, #4]
+ add sp, #16
+ pop {r7, pc}
+```
+"##,
+ },
+ Lint {
+ label: "abi_msp430_interrupt",
+ description: r##"# `abi_msp430_interrupt`
+
+The tracking issue for this feature is: [#38487]
+
+[#38487]: https://github.com/rust-lang/rust/issues/38487
+
+------------------------
+
+In the MSP430 architecture, interrupt handlers have a special calling
+convention. You can use the `"msp430-interrupt"` ABI to make the compiler apply
+the right calling convention to the interrupt handlers you define.
+
+<!-- NOTE(ignore) this example is specific to the msp430 target -->
+
+``` rust,ignore
+#![feature(abi_msp430_interrupt)]
+#![no_std]
+
+// Place the interrupt handler at the appropriate memory address
+// (Alternatively, you can use `#[used]` and remove `pub` and `#[no_mangle]`)
+#[link_section = "__interrupt_vector_10"]
+#[no_mangle]
+pub static TIM0_VECTOR: extern "msp430-interrupt" fn() = tim0;
+
+// The interrupt handler
+extern "msp430-interrupt" fn tim0() {
+ // ..
+}
+```
+
+``` text
+$ msp430-elf-objdump -CD ./target/msp430/release/app
+Disassembly of section __interrupt_vector_10:
+
+0000fff2 <TIM0_VECTOR>:
+ fff2: 00 c0 interrupt service routine at 0xc000
+
+Disassembly of section .text:
+
+0000c000 <int::tim0>:
+ c000: 00 13 reti
+```
+"##,
+ },
+ Lint {
+ label: "abi_ptx",
+ description: r##"# `abi_ptx`
+
+The tracking issue for this feature is: [#38788]
+
+[#38788]: https://github.com/rust-lang/rust/issues/38788
+
+------------------------
+
+When emitting PTX code, all vanilla Rust functions (`fn`) get translated to
+"device" functions. These functions are *not* callable from the host via the
+CUDA API so a crate with only device functions is not too useful!
+
+OTOH, "global" functions *can* be called by the host; you can think of them
+as the real public API of your crate. To produce a global function use the
+`"ptx-kernel"` ABI.
+
+<!-- NOTE(ignore) this example is specific to the nvptx targets -->
+
+``` rust,ignore
+#![feature(abi_ptx)]
+#![no_std]
+
+pub unsafe extern "ptx-kernel" fn global_function() {
+ device_function();
+}
+
+pub fn device_function() {
+ // ..
+}
+```
+
+``` text
+$ xargo rustc --target nvptx64-nvidia-cuda --release -- --emit=asm
+
+$ cat $(find -name '*.s')
+//
+// Generated by LLVM NVPTX Back-End
+//
+
+.version 3.2
+.target sm_20
+.address_size 64
+
+ // .globl _ZN6kernel15global_function17h46111ebe6516b382E
+
+.visible .entry _ZN6kernel15global_function17h46111ebe6516b382E()
+{
+
+
+ ret;
+}
+
+ // .globl _ZN6kernel15device_function17hd6a0e4993bbf3f78E
+.visible .func _ZN6kernel15device_function17hd6a0e4993bbf3f78E()
+{
+
+
+ ret;
+}
+```
+"##,
+ },
+ Lint {
+ label: "abi_thiscall",
+ description: r##"# `abi_thiscall`
+
+The tracking issue for this feature is: [#42202]
+
+[#42202]: https://github.com/rust-lang/rust/issues/42202
+
+------------------------
+
+The MSVC ABI on x86 Windows uses the `thiscall` calling convention for C++
+instance methods by default; it is identical to the usual (C) calling
+convention on x86 Windows except that the first parameter of the method,
+the `this` pointer, is passed in the ECX register.
+"##,
+ },
+ Lint {
+ label: "allocator_api",
+ description: r##"# `allocator_api`
+
+The tracking issue for this feature is [#32838]
+
+[#32838]: https://github.com/rust-lang/rust/issues/32838
+
+------------------------
+
+Sometimes you want the memory for one collection to use a different
+allocator than the memory for another collection. In this case,
+replacing the global allocator is not a workable option. Instead,
+you need to pass in an instance of an `AllocRef` to each collection
+for which you want a custom allocator.
+
+TBD
+"##,
+ },
+ Lint {
+ label: "allocator_internals",
+ description: r##"# `allocator_internals`
+
+This feature does not have a tracking issue, it is an unstable implementation
+detail of the `global_allocator` feature not intended for use outside the
+compiler.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "arbitrary_enum_discriminant",
+ description: r##"# `arbitrary_enum_discriminant`
+
+The tracking issue for this feature is: [#60553]
+
+[#60553]: https://github.com/rust-lang/rust/issues/60553
+
+------------------------
+
+The `arbitrary_enum_discriminant` feature permits tuple-like and
+struct-like enum variants with `#[repr(<int-type>)]` to have explicit discriminants.
+
+## Examples
+
+```rust
+#![feature(arbitrary_enum_discriminant)]
+
+#[allow(dead_code)]
+#[repr(u8)]
+enum Enum {
+ Unit = 3,
+ Tuple(u16) = 2,
+ Struct {
+ a: u8,
+ b: u16,
+ } = 1,
+}
+
+impl Enum {
+ fn tag(&self) -> u8 {
+ unsafe { *(self as *const Self as *const u8) }
+ }
+}
+
+assert_eq!(3, Enum::Unit.tag());
+assert_eq!(2, Enum::Tuple(5).tag());
+assert_eq!(1, Enum::Struct{a: 7, b: 11}.tag());
+```
+"##,
+ },
+ Lint {
+ label: "asm_const",
+ description: r##"# `asm_const`
+
+The tracking issue for this feature is: [#72016]
+
+[#72016]: https://github.com/rust-lang/rust/issues/72016
+
+------------------------
+
+This feature adds a `const <expr>` operand type to `asm!` and `global_asm!`.
+- `<expr>` must be an integer constant expression.
+- The value of the expression is formatted as a string and substituted directly into the asm template string.
+"##,
+ },
+ Lint {
+ label: "asm_experimental_arch",
+ description: r##"# `asm_experimental_arch`
+
+The tracking issue for this feature is: [#72016]
+
+[#72016]: https://github.com/rust-lang/rust/issues/72016
+
+------------------------
+
+This feature tracks `asm!` and `global_asm!` support for the following architectures:
+- NVPTX
+- PowerPC
+- Hexagon
+- MIPS32r2 and MIPS64r2
+- wasm32
+- BPF
+- SPIR-V
+- AVR
+
+## Register classes
+
+| Architecture | Register class | Registers | LLVM constraint code |
+| ------------ | -------------- | ---------------------------------- | -------------------- |
+| MIPS | `reg` | `$[2-25]` | `r` |
+| MIPS | `freg` | `$f[0-31]` | `f` |
+| NVPTX | `reg16` | None\* | `h` |
+| NVPTX | `reg32` | None\* | `r` |
+| NVPTX | `reg64` | None\* | `l` |
+| Hexagon | `reg` | `r[0-28]` | `r` |
+| PowerPC | `reg` | `r[0-31]` | `r` |
+| PowerPC | `reg_nonzero` | `r[1-31]` | `b` |
+| PowerPC | `freg` | `f[0-31]` | `f` |
+| PowerPC | `cr` | `cr[0-7]`, `cr` | Only clobbers |
+| PowerPC | `xer` | `xer` | Only clobbers |
+| wasm32 | `local` | None\* | `r` |
+| BPF | `reg` | `r[0-10]` | `r` |
+| BPF | `wreg` | `w[0-10]` | `w` |
+| AVR | `reg` | `r[2-25]`, `XH`, `XL`, `ZH`, `ZL` | `r` |
+| AVR | `reg_upper` | `r[16-25]`, `XH`, `XL`, `ZH`, `ZL` | `d` |
+| AVR | `reg_pair` | `r3r2` .. `r25r24`, `X`, `Z` | `r` |
+| AVR | `reg_iw` | `r25r24`, `X`, `Z` | `w` |
+| AVR | `reg_ptr` | `X`, `Z` | `e` |
+
+> **Notes**:
+> - NVPTX doesn't have a fixed register set, so named registers are not supported.
+>
+> - WebAssembly doesn't have registers, so named registers are not supported.
+
+# Register class supported types
+
+| Architecture | Register class | Target feature | Allowed types |
+| ------------ | ------------------------------- | -------------- | --------------------------------------- |
+| MIPS32 | `reg` | None | `i8`, `i16`, `i32`, `f32` |
+| MIPS32 | `freg` | None | `f32`, `f64` |
+| MIPS64 | `reg` | None | `i8`, `i16`, `i32`, `i64`, `f32`, `f64` |
+| MIPS64 | `freg` | None | `f32`, `f64` |
+| NVPTX | `reg16` | None | `i8`, `i16` |
+| NVPTX | `reg32` | None | `i8`, `i16`, `i32`, `f32` |
+| NVPTX | `reg64` | None | `i8`, `i16`, `i32`, `f32`, `i64`, `f64` |
+| Hexagon | `reg` | None | `i8`, `i16`, `i32`, `f32` |
+| PowerPC | `reg` | None | `i8`, `i16`, `i32` |
+| PowerPC | `reg_nonzero` | None | `i8`, `i16`, `i32` |
+| PowerPC | `freg` | None | `f32`, `f64` |
+| PowerPC | `cr` | N/A | Only clobbers |
+| PowerPC | `xer` | N/A | Only clobbers |
+| wasm32 | `local` | None | `i8` `i16` `i32` `i64` `f32` `f64` |
+| BPF | `reg` | None | `i8` `i16` `i32` `i64` |
+| BPF | `wreg` | `alu32` | `i8` `i16` `i32` |
+| AVR | `reg`, `reg_upper` | None | `i8` |
+| AVR | `reg_pair`, `reg_iw`, `reg_ptr` | None | `i16` |
+
+## Register aliases
+
+| Architecture | Base register | Aliases |
+| ------------ | ------------- | --------- |
+| Hexagon | `r29` | `sp` |
+| Hexagon | `r30` | `fr` |
+| Hexagon | `r31` | `lr` |
+| BPF | `r[0-10]` | `w[0-10]` |
+| AVR | `XH` | `r27` |
+| AVR | `XL` | `r26` |
+| AVR | `ZH` | `r31` |
+| AVR | `ZL` | `r30` |
+
+## Unsupported registers
+
+| Architecture | Unsupported register | Reason |
+| ------------ | --------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| All | `sp` | The stack pointer must be restored to its original value at the end of an asm code block. |
+| All | `fr` (Hexagon), `$fp` (MIPS), `Y` (AVR) | The frame pointer cannot be used as an input or output. |
+| All | `r19` (Hexagon) | This is used internally by LLVM as a "base pointer" for functions with complex stack frames. |
+| MIPS | `$0` or `$zero` | This is a constant zero register which can't be modified. |
+| MIPS | `$1` or `$at` | Reserved for assembler. |
+| MIPS | `$26`/`$k0`, `$27`/`$k1` | OS-reserved registers. |
+| MIPS | `$28`/`$gp` | Global pointer cannot be used as inputs or outputs. |
+| MIPS | `$ra` | Return address cannot be used as inputs or outputs. |
+| Hexagon | `lr` | This is the link register which cannot be used as an input or output. |
+| AVR | `r0`, `r1`, `r1r0` | Due to an issue in LLVM, the `r0` and `r1` registers cannot be used as inputs or outputs. If modified, they must be restored to their original values before the end of the block. |
+
+## Template modifiers
+
+| Architecture | Register class | Modifier | Example output | LLVM modifier |
+| ------------ | -------------- | -------- | -------------- | ------------- |
+| MIPS | `reg` | None | `$2` | None |
+| MIPS | `freg` | None | `$f0` | None |
+| NVPTX | `reg16` | None | `rs0` | None |
+| NVPTX | `reg32` | None | `r0` | None |
+| NVPTX | `reg64` | None | `rd0` | None |
+| Hexagon | `reg` | None | `r0` | None |
+| PowerPC | `reg` | None | `0` | None |
+| PowerPC | `reg_nonzero` | None | `3` | `b` |
+| PowerPC | `freg` | None | `0` | None |
+
+# Flags covered by `preserves_flags`
+
+These flags registers must be restored upon exiting the asm block if the `preserves_flags` option is set:
+- AVR
+ - The status register `SREG`.
+"##,
+ },
+ Lint {
+ label: "asm_sym",
+ description: r##"# `asm_sym`
+
+The tracking issue for this feature is: [#72016]
+
+[#72016]: https://github.com/rust-lang/rust/issues/72016
+
+------------------------
+
+This feature adds a `sym <path>` operand type to `asm!` and `global_asm!`.
+- `<path>` must refer to a `fn` or `static`.
+- A mangled symbol name referring to the item is substituted into the asm template string.
+- The substituted string does not include any modifiers (e.g. GOT, PLT, relocations, etc).
+- `<path>` is allowed to point to a `#[thread_local]` static, in which case the asm code can combine the symbol with relocations (e.g. `@plt`, `@TPOFF`) to read from thread-local data.
+"##,
+ },
+ Lint {
+ label: "asm_unwind",
+ description: r##"# `asm_unwind`
+
+The tracking issue for this feature is: [#72016]
+
+[#72016]: https://github.com/rust-lang/rust/issues/72016
+
+------------------------
+
+This feature adds a `may_unwind` option to `asm!` which allows an `asm` block to unwind stack and be part of the stack unwinding process. This option is only supported by the LLVM backend right now.
+"##,
+ },
+ Lint {
+ label: "auto_traits",
+ description: r##"# `auto_traits`
+
+The tracking issue for this feature is [#13231]
+
+[#13231]: https://github.com/rust-lang/rust/issues/13231
+
+----
+
+The `auto_traits` feature gate allows you to define auto traits.
+
+Auto traits, like [`Send`] or [`Sync`] in the standard library, are marker traits
+that are automatically implemented for every type, unless the type, or a type it contains,
+has explicitly opted out via a negative impl. (Negative impls are separately controlled
+by the `negative_impls` feature.)
+
+[`Send`]: https://doc.rust-lang.org/std/marker/trait.Send.html
+[`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html
+
+```rust,ignore (partial-example)
+impl !Trait for Type {}
+```
+
+Example:
+
+```rust
+#![feature(negative_impls)]
+#![feature(auto_traits)]
+
+auto trait Valid {}
+
+struct True;
+struct False;
+
+impl !Valid for False {}
+
+struct MaybeValid<T>(T);
+
+fn must_be_valid<T: Valid>(_t: T) { }
+
+fn main() {
+ // works
+ must_be_valid( MaybeValid(True) );
+
+ // compiler error - trait bound not satisfied
+ // must_be_valid( MaybeValid(False) );
+}
+```
+
+## Automatic trait implementations
+
+When a type is declared as an `auto trait`, we will automatically
+create impls for every struct/enum/union, unless an explicit impl is
+provided. These automatic impls contain a where clause for each field
+of the form `T: AutoTrait`, where `T` is the type of the field and
+`AutoTrait` is the auto trait in question. As an example, consider the
+struct `List` and the auto trait `Send`:
+
+```rust
+struct List<T> {
+ data: T,
+ next: Option<Box<List<T>>>,
+}
+```
+
+Presuming that there is no explicit impl of `Send` for `List`, the
+compiler will supply an automatic impl of the form:
+
+```rust
+struct List<T> {
+ data: T,
+ next: Option<Box<List<T>>>,
+}
+
+unsafe impl<T> Send for List<T>
+where
+ T: Send, // from the field `data`
+ Option<Box<List<T>>>: Send, // from the field `next`
+{ }
+```
+
+Explicit impls may be either positive or negative. They take the form:
+
+```rust,ignore (partial-example)
+impl<...> AutoTrait for StructName<..> { }
+impl<...> !AutoTrait for StructName<..> { }
+```
+
+## Coinduction: Auto traits permit cyclic matching
+
+Unlike ordinary trait matching, auto traits are **coinductive**. This
+means, in short, that cycles which occur in trait matching are
+considered ok. As an example, consider the recursive struct `List`
+introduced in the previous section. In attempting to determine whether
+`List: Send`, we would wind up in a cycle: to apply the impl, we must
+show that `Option<Box<List>>: Send`, which will in turn require
+`Box<List>: Send` and then finally `List: Send` again. Under ordinary
+trait matching, this cycle would be an error, but for an auto trait it
+is considered a successful match.
+
+## Items
+
+Auto traits cannot have any trait items, such as methods or associated types. This ensures that we can generate default implementations.
+
+## Supertraits
+
+Auto traits cannot have supertraits. This is for soundness reasons, as the interaction of coinduction with implied bounds is difficult to reconcile.
+"##,
+ },
+ Lint {
+ label: "box_patterns",
+ description: r##"# `box_patterns`
+
+The tracking issue for this feature is: [#29641]
+
+[#29641]: https://github.com/rust-lang/rust/issues/29641
+
+See also [`box_syntax`](box-syntax.md)
+
+------------------------
+
+Box patterns let you match on `Box<T>`s:
+
+
+```rust
+#![feature(box_patterns)]
+
+fn main() {
+ let b = Some(Box::new(5));
+ match b {
+ Some(box n) if n < 0 => {
+ println!("Box contains negative number {}", n);
+ },
+ Some(box n) if n >= 0 => {
+ println!("Box contains non-negative number {}", n);
+ },
+ None => {
+ println!("No box");
+ },
+ _ => unreachable!()
+ }
+}
+```
+"##,
+ },
+ Lint {
+ label: "box_syntax",
+ description: r##"# `box_syntax`
+
+The tracking issue for this feature is: [#49733]
+
+[#49733]: https://github.com/rust-lang/rust/issues/49733
+
+See also [`box_patterns`](box-patterns.md)
+
+------------------------
+
+Currently the only stable way to create a `Box` is via the `Box::new` method.
+Also it is not possible in stable Rust to destructure a `Box` in a match
+pattern. The unstable `box` keyword can be used to create a `Box`. An example
+usage would be:
+
+```rust
+#![feature(box_syntax)]
+
+fn main() {
+ let b = box 5;
+}
+```
+"##,
+ },
+ Lint {
+ label: "c_unwind",
+ description: r##"# `c_unwind`
+
+The tracking issue for this feature is: [#74990]
+
+[#74990]: https://github.com/rust-lang/rust/issues/74990
+
+------------------------
+
+Introduces four new ABI strings: "C-unwind", "stdcall-unwind",
+"thiscall-unwind", and "system-unwind". These enable unwinding from other
+languages (such as C++) into Rust frames and from Rust into other languages.
+
+See [RFC 2945] for more information.
+
+[RFC 2945]: https://github.com/rust-lang/rfcs/blob/master/text/2945-c-unwind-abi.md
+"##,
+ },
+ Lint {
+ label: "c_variadic",
+ description: r##"# `c_variadic`
+
+The tracking issue for this feature is: [#44930]
+
+[#44930]: https://github.com/rust-lang/rust/issues/44930
+
+------------------------
+
+The `c_variadic` language feature enables C-variadic functions to be
+defined in Rust. The may be called both from within Rust and via FFI.
+
+## Examples
+
+```rust
+#![feature(c_variadic)]
+
+pub unsafe extern "C" fn add(n: usize, mut args: ...) -> usize {
+ let mut sum = 0;
+ for _ in 0..n {
+ sum += args.arg::<usize>();
+ }
+ sum
+}
+```
+"##,
+ },
+ Lint {
+ label: "c_variadic",
+ description: r##"# `c_variadic`
+
+The tracking issue for this feature is: [#44930]
+
+[#44930]: https://github.com/rust-lang/rust/issues/44930
+
+------------------------
+
+The `c_variadic` library feature exposes the `VaList` structure,
+Rust's analogue of C's `va_list` type.
+
+## Examples
+
+```rust
+#![feature(c_variadic)]
+
+use std::ffi::VaList;
+
+pub unsafe extern "C" fn vadd(n: usize, mut args: VaList) -> usize {
+ let mut sum = 0;
+ for _ in 0..n {
+ sum += args.arg::<usize>();
+ }
+ sum
+}
+```
+"##,
+ },
+ Lint {
+ label: "c_void_variant",
+ description: r##"# `c_void_variant`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cfg_panic",
+ description: r##"# `cfg_panic`
+
+The tracking issue for this feature is: [#77443]
+
+[#77443]: https://github.com/rust-lang/rust/issues/77443
+
+------------------------
+
+The `cfg_panic` feature makes it possible to execute different code
+depending on the panic strategy.
+
+Possible values at the moment are `"unwind"` or `"abort"`, although
+it is possible that new panic strategies may be added to Rust in the
+future.
+
+## Examples
+
+```rust
+#![feature(cfg_panic)]
+
+#[cfg(panic = "unwind")]
+fn a() {
+ // ...
+}
+
+#[cfg(not(panic = "unwind"))]
+fn a() {
+ // ...
+}
+
+fn b() {
+ if cfg!(panic = "abort") {
+ // ...
+ } else {
+ // ...
+ }
+}
+```
+"##,
+ },
+ Lint {
+ label: "cfg_sanitize",
+ description: r##"# `cfg_sanitize`
+
+The tracking issue for this feature is: [#39699]
+
+[#39699]: https://github.com/rust-lang/rust/issues/39699
+
+------------------------
+
+The `cfg_sanitize` feature makes it possible to execute different code
+depending on whether a particular sanitizer is enabled or not.
+
+## Examples
+
+```rust
+#![feature(cfg_sanitize)]
+
+#[cfg(sanitize = "thread")]
+fn a() {
+ // ...
+}
+
+#[cfg(not(sanitize = "thread"))]
+fn a() {
+ // ...
+}
+
+fn b() {
+ if cfg!(sanitize = "leak") {
+ // ...
+ } else {
+ // ...
+ }
+}
+```
+"##,
+ },
+ Lint {
+ label: "cfg_version",
+ description: r##"# `cfg_version`
+
+The tracking issue for this feature is: [#64796]
+
+[#64796]: https://github.com/rust-lang/rust/issues/64796
+
+------------------------
+
+The `cfg_version` feature makes it possible to execute different code
+depending on the compiler version. It will return true if the compiler
+version is greater than or equal to the specified version.
+
+## Examples
+
+```rust
+#![feature(cfg_version)]
+
+#[cfg(version("1.42"))] // 1.42 and above
+fn a() {
+ // ...
+}
+
+#[cfg(not(version("1.42")))] // 1.41 and below
+fn a() {
+ // ...
+}
+
+fn b() {
+ if cfg!(version("1.42")) {
+ // ...
+ } else {
+ // ...
+ }
+}
+```
+"##,
+ },
+ Lint {
+ label: "char_error_internals",
+ description: r##"# `char_error_internals`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "closure_track_caller",
+ description: r##"# `closure_track_caller`
+
+The tracking issue for this feature is: [#87417]
+
+[#87417]: https://github.com/rust-lang/rust/issues/87417
+
+------------------------
+
+Allows using the `#[track_caller]` attribute on closures and generators.
+Calls made to the closure or generator will have caller information
+available through `std::panic::Location::caller()`, just like using
+`#[track_caller]` on a function.
+"##,
+ },
+ Lint {
+ label: "cmse_nonsecure_entry",
+ description: r##"# `cmse_nonsecure_entry`
+
+The tracking issue for this feature is: [#75835]
+
+[#75835]: https://github.com/rust-lang/rust/issues/75835
+
+------------------------
+
+The [TrustZone-M
+feature](https://developer.arm.com/documentation/100690/latest/) is available
+for targets with the Armv8-M architecture profile (`thumbv8m` in their target
+name).
+LLVM, the Rust compiler and the linker are providing
+[support](https://developer.arm.com/documentation/ecm0359818/latest/) for the
+TrustZone-M feature.
+
+One of the things provided, with this unstable feature, is the
+`cmse_nonsecure_entry` attribute. This attribute marks a Secure function as an
+entry function (see [section
+5.4](https://developer.arm.com/documentation/ecm0359818/latest/) for details).
+With this attribute, the compiler will do the following:
+* add a special symbol on the function which is the `__acle_se_` prefix and the
+ standard function name
+* constrain the number of parameters to avoid using the Non-Secure stack
+* before returning from the function, clear registers that might contain Secure
+ information
+* use the `BXNS` instruction to return
+
+Because the stack can not be used to pass parameters, there will be compilation
+errors if:
+* the total size of all parameters is too big (for example more than four 32
+ bits integers)
+* the entry function is not using a C ABI
+
+The special symbol `__acle_se_` will be used by the linker to generate a secure
+gateway veneer.
+
+<!-- NOTE(ignore) this example is specific to thumbv8m targets -->
+
+``` rust,ignore
+#![feature(cmse_nonsecure_entry)]
+
+#[no_mangle]
+#[cmse_nonsecure_entry]
+pub extern "C" fn entry_function(input: u32) -> u32 {
+ input + 6
+}
+```
+
+``` text
+$ rustc --emit obj --crate-type lib --target thumbv8m.main-none-eabi function.rs
+$ arm-none-eabi-objdump -D function.o
+
+00000000 <entry_function>:
+ 0: b580 push {r7, lr}
+ 2: 466f mov r7, sp
+ 4: b082 sub sp, #8
+ 6: 9001 str r0, [sp, #4]
+ 8: 1d81 adds r1, r0, #6
+ a: 460a mov r2, r1
+ c: 4281 cmp r1, r0
+ e: 9200 str r2, [sp, #0]
+ 10: d30b bcc.n 2a <entry_function+0x2a>
+ 12: e7ff b.n 14 <entry_function+0x14>
+ 14: 9800 ldr r0, [sp, #0]
+ 16: b002 add sp, #8
+ 18: e8bd 4080 ldmia.w sp!, {r7, lr}
+ 1c: 4671 mov r1, lr
+ 1e: 4672 mov r2, lr
+ 20: 4673 mov r3, lr
+ 22: 46f4 mov ip, lr
+ 24: f38e 8800 msr CPSR_f, lr
+ 28: 4774 bxns lr
+ 2a: f240 0000 movw r0, #0
+ 2e: f2c0 0000 movt r0, #0
+ 32: f240 0200 movw r2, #0
+ 36: f2c0 0200 movt r2, #0
+ 3a: 211c movs r1, #28
+ 3c: f7ff fffe bl 0 <_ZN4core9panicking5panic17h5c028258ca2fb3f5E>
+ 40: defe udf #254 ; 0xfe
+```
+"##,
+ },
+ Lint {
+ label: "compiler_builtins",
+ description: r##"# `compiler_builtins`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "concat_idents",
+ description: r##"# `concat_idents`
+
+The tracking issue for this feature is: [#29599]
+
+[#29599]: https://github.com/rust-lang/rust/issues/29599
+
+------------------------
+
+The `concat_idents` feature adds a macro for concatenating multiple identifiers
+into one identifier.
+
+## Examples
+
+```rust
+#![feature(concat_idents)]
+
+fn main() {
+ fn foobar() -> u32 { 23 }
+ let f = concat_idents!(foo, bar);
+ assert_eq!(f(), 23);
+}
+```
+"##,
+ },
+ Lint {
+ label: "const_eval_limit",
+ description: r##"# `const_eval_limit`
+
+The tracking issue for this feature is: [#67217]
+
+[#67217]: https://github.com/rust-lang/rust/issues/67217
+
+The `const_eval_limit` allows someone to limit the evaluation steps the CTFE undertakes to evaluate a `const fn`.
+"##,
+ },
+ Lint {
+ label: "core_intrinsics",
+ description: r##"# `core_intrinsics`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "core_panic",
+ description: r##"# `core_panic`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "core_private_bignum",
+ description: r##"# `core_private_bignum`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "core_private_diy_float",
+ description: r##"# `core_private_diy_float`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "crate_visibility_modifier",
+ description: r##"# `crate_visibility_modifier`
+
+The tracking issue for this feature is: [#53120]
+
+[#53120]: https://github.com/rust-lang/rust/issues/53120
+
+-----
+
+The `crate_visibility_modifier` feature allows the `crate` keyword to be used
+as a visibility modifier synonymous to `pub(crate)`, indicating that a type
+(function, _&c._) is to be visible to the entire enclosing crate, but not to
+other crates.
+
+```rust
+#![feature(crate_visibility_modifier)]
+
+crate struct Foo {
+ bar: usize,
+}
+```
+"##,
+ },
+ Lint {
+ label: "custom_test_frameworks",
+ description: r##"# `custom_test_frameworks`
+
+The tracking issue for this feature is: [#50297]
+
+[#50297]: https://github.com/rust-lang/rust/issues/50297
+
+------------------------
+
+The `custom_test_frameworks` feature allows the use of `#[test_case]` and `#![test_runner]`.
+Any function, const, or static can be annotated with `#[test_case]` causing it to be aggregated (like `#[test]`)
+and be passed to the test runner determined by the `#![test_runner]` crate attribute.
+
+```rust
+#![feature(custom_test_frameworks)]
+#![test_runner(my_runner)]
+
+fn my_runner(tests: &[&i32]) {
+ for t in tests {
+ if **t == 0 {
+ println!("PASSED");
+ } else {
+ println!("FAILED");
+ }
+ }
+}
+
+#[test_case]
+const WILL_PASS: i32 = 0;
+
+#[test_case]
+const WILL_FAIL: i32 = 4;
+```
+"##,
+ },
+ Lint {
+ label: "dec2flt",
+ description: r##"# `dec2flt`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "default_free_fn",
+ description: r##"# `default_free_fn`
+
+The tracking issue for this feature is: [#73014]
+
+[#73014]: https://github.com/rust-lang/rust/issues/73014
+
+------------------------
+
+Adds a free `default()` function to the `std::default` module. This function
+just forwards to [`Default::default()`], but may remove repetition of the word
+"default" from the call site.
+
+[`Default::default()`]: https://doc.rust-lang.org/nightly/std/default/trait.Default.html#tymethod.default
+
+Here is an example:
+
+```rust
+#![feature(default_free_fn)]
+use std::default::default;
+
+#[derive(Default)]
+struct AppConfig {
+ foo: FooConfig,
+ bar: BarConfig,
+}
+
+#[derive(Default)]
+struct FooConfig {
+ foo: i32,
+}
+
+#[derive(Default)]
+struct BarConfig {
+ bar: f32,
+ baz: u8,
+}
+
+fn main() {
+ let options = AppConfig {
+ foo: default(),
+ bar: BarConfig {
+ bar: 10.1,
+ ..default()
+ },
+ };
+}
+```
+"##,
+ },
+ Lint {
+ label: "derive_clone_copy",
+ description: r##"# `derive_clone_copy`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "derive_eq",
+ description: r##"# `derive_eq`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "doc_cfg",
+ description: r##"# `doc_cfg`
+
+The tracking issue for this feature is: [#43781]
+
+------
+
+The `doc_cfg` feature allows an API be documented as only available in some specific platforms.
+This attribute has two effects:
+
+1. In the annotated item's documentation, there will be a message saying "This is supported on
+ (platform) only".
+
+2. The item's doc-tests will only run on the specific platform.
+
+In addition to allowing the use of the `#[doc(cfg)]` attribute, this feature enables the use of a
+special conditional compilation flag, `#[cfg(doc)]`, set whenever building documentation on your
+crate.
+
+This feature was introduced as part of PR [#43348] to allow the platform-specific parts of the
+standard library be documented.
+
+```rust
+#![feature(doc_cfg)]
+
+#[cfg(any(windows, doc))]
+#[doc(cfg(windows))]
+/// The application's icon in the notification area (a.k.a. system tray).
+///
+/// # Examples
+///
+/// ```no_run
+/// extern crate my_awesome_ui_library;
+/// use my_awesome_ui_library::current_app;
+/// use my_awesome_ui_library::windows::notification;
+///
+/// let icon = current_app().get::<notification::Icon>();
+/// icon.show();
+/// icon.show_message("Hello");
+/// ```
+pub struct Icon {
+ // ...
+}
+```
+
+[#43781]: https://github.com/rust-lang/rust/issues/43781
+[#43348]: https://github.com/rust-lang/rust/issues/43348
+"##,
+ },
+ Lint {
+ label: "doc_masked",
+ description: r##"# `doc_masked`
+
+The tracking issue for this feature is: [#44027]
+
+-----
+
+The `doc_masked` feature allows a crate to exclude types from a given crate from appearing in lists
+of trait implementations. The specifics of the feature are as follows:
+
+1. When rustdoc encounters an `extern crate` statement annotated with a `#[doc(masked)]` attribute,
+ it marks the crate as being masked.
+
+2. When listing traits a given type implements, rustdoc ensures that traits from masked crates are
+ not emitted into the documentation.
+
+3. When listing types that implement a given trait, rustdoc ensures that types from masked crates
+ are not emitted into the documentation.
+
+This feature was introduced in PR [#44026] to ensure that compiler-internal and
+implementation-specific types and traits were not included in the standard library's documentation.
+Such types would introduce broken links into the documentation.
+
+[#44026]: https://github.com/rust-lang/rust/pull/44026
+[#44027]: https://github.com/rust-lang/rust/pull/44027
+"##,
+ },
+ Lint {
+ label: "doc_notable_trait",
+ description: r##"# `doc_notable_trait`
+
+The tracking issue for this feature is: [#45040]
+
+The `doc_notable_trait` feature allows the use of the `#[doc(notable_trait)]`
+attribute, which will display the trait in a "Notable traits" dialog for
+functions returning types that implement the trait. For example, this attribute
+is applied to the `Iterator`, `Future`, `io::Read`, and `io::Write` traits in
+the standard library.
+
+You can do this on your own traits like so:
+
+```
+#![feature(doc_notable_trait)]
+
+#[doc(notable_trait)]
+pub trait MyTrait {}
+
+pub struct MyStruct;
+impl MyTrait for MyStruct {}
+
+/// The docs for this function will have a button that displays a dialog about
+/// `MyStruct` implementing `MyTrait`.
+pub fn my_fn() -> MyStruct { MyStruct }
+```
+
+This feature was originally implemented in PR [#45039].
+
+See also its documentation in [the rustdoc book][rustdoc-book-notable_trait].
+
+[#45040]: https://github.com/rust-lang/rust/issues/45040
+[#45039]: https://github.com/rust-lang/rust/pull/45039
+[rustdoc-book-notable_trait]: ../../rustdoc/unstable-features.html#adding-your-trait-to-the-notable-traits-dialog
+"##,
+ },
+ Lint {
+ label: "exclusive_range_pattern",
+ description: r##"# `exclusive_range_pattern`
+
+The tracking issue for this feature is: [#37854].
+
+
+[#67264]: https://github.com/rust-lang/rust/issues/67264
+[#37854]: https://github.com/rust-lang/rust/issues/37854
+-----
+
+The `exclusive_range_pattern` feature allows non-inclusive range
+patterns (`0..10`) to be used in appropriate pattern matching
+contexts. It also can be combined with `#![feature(half_open_range_patterns]`
+to be able to use RangeTo patterns (`..10`).
+
+It also enabled RangeFrom patterns but that has since been
+stabilized.
+
+```rust
+#![feature(exclusive_range_pattern)]
+ let x = 5;
+ match x {
+ 0..10 => println!("single digit"),
+ 10 => println!("ten isn't part of the above range"),
+ _ => println!("nor is everything else.")
+ }
+```
+"##,
+ },
+ Lint {
+ label: "explicit_generic_args_with_impl_trait",
+ description: r##"# `explicit_generic_args_with_impl_trait`
+
+The tracking issue for this feature is: [#83701]
+
+[#83701]: https://github.com/rust-lang/rust/issues/83701
+
+------------------------
+
+The `explicit_generic_args_with_impl_trait` feature gate lets you specify generic arguments even
+when `impl Trait` is used in argument position.
+
+A simple example is:
+
+```rust
+#![feature(explicit_generic_args_with_impl_trait)]
+
+fn foo<T: ?Sized>(_f: impl AsRef<T>) {}
+
+fn main() {
+ foo::<str>("".to_string());
+}
+```
+
+This is currently rejected:
+
+```text
+error[E0632]: cannot provide explicit generic arguments when `impl Trait` is used in argument position
+ --> src/main.rs:6:11
+ |
+6 | foo::<str>("".to_string());
+ | ^^^ explicit generic argument not allowed
+
+```
+
+However it would compile if `explicit_generic_args_with_impl_trait` is enabled.
+
+Note that the synthetic type parameters from `impl Trait` are still implicit and you
+cannot explicitly specify these:
+
+```rust,compile_fail
+#![feature(explicit_generic_args_with_impl_trait)]
+
+fn foo<T: ?Sized>(_f: impl AsRef<T>) {}
+fn bar<T: ?Sized, F: AsRef<T>>(_f: F) {}
+
+fn main() {
+ bar::<str, _>("".to_string()); // Okay
+ bar::<str, String>("".to_string()); // Okay
+
+ foo::<str>("".to_string()); // Okay
+ foo::<str, String>("".to_string()); // Error, you cannot specify `impl Trait` explicitly
+}
+```
+"##,
+ },
+ Lint {
+ label: "fd",
+ description: r##"# `fd`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "fd_read",
+ description: r##"# `fd_read`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ffi_const",
+ description: r##"# `ffi_const`
+
+The tracking issue for this feature is: [#58328]
+
+------
+
+The `#[ffi_const]` attribute applies clang's `const` attribute to foreign
+functions declarations.
+
+That is, `#[ffi_const]` functions shall have no effects except for its return
+value, which can only depend on the values of the function parameters, and is
+not affected by changes to the observable state of the program.
+
+Applying the `#[ffi_const]` attribute to a function that violates these
+requirements is undefined behaviour.
+
+This attribute enables Rust to perform common optimizations, like sub-expression
+elimination, and it can avoid emitting some calls in repeated invocations of the
+function with the same argument values regardless of other operations being
+performed in between these functions calls (as opposed to `#[ffi_pure]`
+functions).
+
+## Pitfalls
+
+A `#[ffi_const]` function can only read global memory that would not affect
+its return value for the whole execution of the program (e.g. immutable global
+memory). `#[ffi_const]` functions are referentially-transparent and therefore
+more strict than `#[ffi_pure]` functions.
+
+A common pitfall involves applying the `#[ffi_const]` attribute to a
+function that reads memory through pointer arguments which do not necessarily
+point to immutable global memory.
+
+A `#[ffi_const]` function that returns unit has no effect on the abstract
+machine's state, and a `#[ffi_const]` function cannot be `#[ffi_pure]`.
+
+A `#[ffi_const]` function must not diverge, neither via a side effect (e.g. a
+call to `abort`) nor by infinite loops.
+
+When translating C headers to Rust FFI, it is worth verifying for which targets
+the `const` attribute is enabled in those headers, and using the appropriate
+`cfg` macros in the Rust side to match those definitions. While the semantics of
+`const` are implemented identically by many C and C++ compilers, e.g., clang,
+[GCC], [ARM C/C++ compiler], [IBM ILE C/C++], etc. they are not necessarily
+implemented in this way on all of them. It is therefore also worth verifying
+that the semantics of the C toolchain used to compile the binary being linked
+against are compatible with those of the `#[ffi_const]`.
+
+[#58328]: https://github.com/rust-lang/rust/issues/58328
+[ARM C/C++ compiler]: http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0491c/Cacgigch.html
+[GCC]: https://gcc.gnu.org/onlinedocs/gcc/Common-Function-Attributes.html#index-const-function-attribute
+[IBM ILE C/C++]: https://www.ibm.com/support/knowledgecenter/fr/ssw_ibm_i_71/rzarg/fn_attrib_const.htm
+"##,
+ },
+ Lint {
+ label: "ffi_pure",
+ description: r##"# `ffi_pure`
+
+The tracking issue for this feature is: [#58329]
+
+------
+
+The `#[ffi_pure]` attribute applies clang's `pure` attribute to foreign
+functions declarations.
+
+That is, `#[ffi_pure]` functions shall have no effects except for its return
+value, which shall not change across two consecutive function calls with
+the same parameters.
+
+Applying the `#[ffi_pure]` attribute to a function that violates these
+requirements is undefined behavior.
+
+This attribute enables Rust to perform common optimizations, like sub-expression
+elimination and loop optimizations. Some common examples of pure functions are
+`strlen` or `memcmp`.
+
+These optimizations are only applicable when the compiler can prove that no
+program state observable by the `#[ffi_pure]` function has changed between calls
+of the function, which could alter the result. See also the `#[ffi_const]`
+attribute, which provides stronger guarantees regarding the allowable behavior
+of a function, enabling further optimization.
+
+## Pitfalls
+
+A `#[ffi_pure]` function can read global memory through the function
+parameters (e.g. pointers), globals, etc. `#[ffi_pure]` functions are not
+referentially-transparent, and are therefore more relaxed than `#[ffi_const]`
+functions.
+
+However, accessing global memory through volatile or atomic reads can violate the
+requirement that two consecutive function calls shall return the same value.
+
+A `pure` function that returns unit has no effect on the abstract machine's
+state.
+
+A `#[ffi_pure]` function must not diverge, neither via a side effect (e.g. a
+call to `abort`) nor by infinite loops.
+
+When translating C headers to Rust FFI, it is worth verifying for which targets
+the `pure` attribute is enabled in those headers, and using the appropriate
+`cfg` macros in the Rust side to match those definitions. While the semantics of
+`pure` are implemented identically by many C and C++ compilers, e.g., clang,
+[GCC], [ARM C/C++ compiler], [IBM ILE C/C++], etc. they are not necessarily
+implemented in this way on all of them. It is therefore also worth verifying
+that the semantics of the C toolchain used to compile the binary being linked
+against are compatible with those of the `#[ffi_pure]`.
+
+
+[#58329]: https://github.com/rust-lang/rust/issues/58329
+[ARM C/C++ compiler]: http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0491c/Cacigdac.html
+[GCC]: https://gcc.gnu.org/onlinedocs/gcc/Common-Function-Attributes.html#index-pure-function-attribute
+[IBM ILE C/C++]: https://www.ibm.com/support/knowledgecenter/fr/ssw_ibm_i_71/rzarg/fn_attrib_pure.htm
+"##,
+ },
+ Lint {
+ label: "flt2dec",
+ description: r##"# `flt2dec`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "fmt_internals",
+ description: r##"# `fmt_internals`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "fn_traits",
+ description: r##"# `fn_traits`
+
+The tracking issue for this feature is [#29625]
+
+See Also: [`unboxed_closures`](../language-features/unboxed-closures.md)
+
+[#29625]: https://github.com/rust-lang/rust/issues/29625
+
+----
+
+The `fn_traits` feature allows for implementation of the [`Fn*`] traits
+for creating custom closure-like types.
+
+[`Fn*`]: https://doc.rust-lang.org/std/ops/trait.Fn.html
+
+```rust
+#![feature(unboxed_closures)]
+#![feature(fn_traits)]
+
+struct Adder {
+ a: u32
+}
+
+impl FnOnce<(u32, )> for Adder {
+ type Output = u32;
+ extern "rust-call" fn call_once(self, b: (u32, )) -> Self::Output {
+ self.a + b.0
+ }
+}
+
+fn main() {
+ let adder = Adder { a: 3 };
+ assert_eq!(adder(2), 5);
+}
+```
+"##,
+ },
+ Lint {
+ label: "generators",
+ description: r##"# `generators`
+
+The tracking issue for this feature is: [#43122]
+
+[#43122]: https://github.com/rust-lang/rust/issues/43122
+
+------------------------
+
+The `generators` feature gate in Rust allows you to define generator or
+coroutine literals. A generator is a "resumable function" that syntactically
+resembles a closure but compiles to much different semantics in the compiler
+itself. The primary feature of a generator is that it can be suspended during
+execution to be resumed at a later date. Generators use the `yield` keyword to
+"return", and then the caller can `resume` a generator to resume execution just
+after the `yield` keyword.
+
+Generators are an extra-unstable feature in the compiler right now. Added in
+[RFC 2033] they're mostly intended right now as a information/constraint
+gathering phase. The intent is that experimentation can happen on the nightly
+compiler before actual stabilization. A further RFC will be required to
+stabilize generators/coroutines and will likely contain at least a few small
+tweaks to the overall design.
+
+[RFC 2033]: https://github.com/rust-lang/rfcs/pull/2033
+
+A syntactical example of a generator is:
+
+```rust
+#![feature(generators, generator_trait)]
+
+use std::ops::{Generator, GeneratorState};
+use std::pin::Pin;
+
+fn main() {
+ let mut generator = || {
+ yield 1;
+ return "foo"
+ };
+
+ match Pin::new(&mut generator).resume(()) {
+ GeneratorState::Yielded(1) => {}
+ _ => panic!("unexpected value from resume"),
+ }
+ match Pin::new(&mut generator).resume(()) {
+ GeneratorState::Complete("foo") => {}
+ _ => panic!("unexpected value from resume"),
+ }
+}
+```
+
+Generators are closure-like literals which can contain a `yield` statement. The
+`yield` statement takes an optional expression of a value to yield out of the
+generator. All generator literals implement the `Generator` trait in the
+`std::ops` module. The `Generator` trait has one main method, `resume`, which
+resumes execution of the generator at the previous suspension point.
+
+An example of the control flow of generators is that the following example
+prints all numbers in order:
+
+```rust
+#![feature(generators, generator_trait)]
+
+use std::ops::Generator;
+use std::pin::Pin;
+
+fn main() {
+ let mut generator = || {
+ println!("2");
+ yield;
+ println!("4");
+ };
+
+ println!("1");
+ Pin::new(&mut generator).resume(());
+ println!("3");
+ Pin::new(&mut generator).resume(());
+ println!("5");
+}
+```
+
+At this time the main intended use case of generators is an implementation
+primitive for async/await syntax, but generators will likely be extended to
+ergonomic implementations of iterators and other primitives in the future.
+Feedback on the design and usage is always appreciated!
+
+### The `Generator` trait
+
+The `Generator` trait in `std::ops` currently looks like:
+
+```rust
+# #![feature(arbitrary_self_types, generator_trait)]
+# use std::ops::GeneratorState;
+# use std::pin::Pin;
+
+pub trait Generator<R = ()> {
+ type Yield;
+ type Return;
+ fn resume(self: Pin<&mut Self>, resume: R) -> GeneratorState<Self::Yield, Self::Return>;
+}
+```
+
+The `Generator::Yield` type is the type of values that can be yielded with the
+`yield` statement. The `Generator::Return` type is the returned type of the
+generator. This is typically the last expression in a generator's definition or
+any value passed to `return` in a generator. The `resume` function is the entry
+point for executing the `Generator` itself.
+
+The return value of `resume`, `GeneratorState`, looks like:
+
+```rust
+pub enum GeneratorState<Y, R> {
+ Yielded(Y),
+ Complete(R),
+}
+```
+
+The `Yielded` variant indicates that the generator can later be resumed. This
+corresponds to a `yield` point in a generator. The `Complete` variant indicates
+that the generator is complete and cannot be resumed again. Calling `resume`
+after a generator has returned `Complete` will likely result in a panic of the
+program.
+
+### Closure-like semantics
+
+The closure-like syntax for generators alludes to the fact that they also have
+closure-like semantics. Namely:
+
+* When created, a generator executes no code. A closure literal does not
+ actually execute any of the closure's code on construction, and similarly a
+ generator literal does not execute any code inside the generator when
+ constructed.
+
+* Generators can capture outer variables by reference or by move, and this can
+ be tweaked with the `move` keyword at the beginning of the closure. Like
+ closures all generators will have an implicit environment which is inferred by
+ the compiler. Outer variables can be moved into a generator for use as the
+ generator progresses.
+
+* Generator literals produce a value with a unique type which implements the
+ `std::ops::Generator` trait. This allows actual execution of the generator
+ through the `Generator::resume` method as well as also naming it in return
+ types and such.
+
+* Traits like `Send` and `Sync` are automatically implemented for a `Generator`
+ depending on the captured variables of the environment. Unlike closures,
+ generators also depend on variables live across suspension points. This means
+ that although the ambient environment may be `Send` or `Sync`, the generator
+ itself may not be due to internal variables live across `yield` points being
+ not-`Send` or not-`Sync`. Note that generators do
+ not implement traits like `Copy` or `Clone` automatically.
+
+* Whenever a generator is dropped it will drop all captured environment
+ variables.
+
+### Generators as state machines
+
+In the compiler, generators are currently compiled as state machines. Each
+`yield` expression will correspond to a different state that stores all live
+variables over that suspension point. Resumption of a generator will dispatch on
+the current state and then execute internally until a `yield` is reached, at
+which point all state is saved off in the generator and a value is returned.
+
+Let's take a look at an example to see what's going on here:
+
+```rust
+#![feature(generators, generator_trait)]
+
+use std::ops::Generator;
+use std::pin::Pin;
+
+fn main() {
+ let ret = "foo";
+ let mut generator = move || {
+ yield 1;
+ return ret
+ };
+
+ Pin::new(&mut generator).resume(());
+ Pin::new(&mut generator).resume(());
+}
+```
+
+This generator literal will compile down to something similar to:
+
+```rust
+#![feature(arbitrary_self_types, generators, generator_trait)]
+
+use std::ops::{Generator, GeneratorState};
+use std::pin::Pin;
+
+fn main() {
+ let ret = "foo";
+ let mut generator = {
+ enum __Generator {
+ Start(&'static str),
+ Yield1(&'static str),
+ Done,
+ }
+
+ impl Generator for __Generator {
+ type Yield = i32;
+ type Return = &'static str;
+
+ fn resume(mut self: Pin<&mut Self>, resume: ()) -> GeneratorState<i32, &'static str> {
+ use std::mem;
+ match mem::replace(&mut *self, __Generator::Done) {
+ __Generator::Start(s) => {
+ *self = __Generator::Yield1(s);
+ GeneratorState::Yielded(1)
+ }
+
+ __Generator::Yield1(s) => {
+ *self = __Generator::Done;
+ GeneratorState::Complete(s)
+ }
+
+ __Generator::Done => {
+ panic!("generator resumed after completion")
+ }
+ }
+ }
+ }
+
+ __Generator::Start(ret)
+ };
+
+ Pin::new(&mut generator).resume(());
+ Pin::new(&mut generator).resume(());
+}
+```
+
+Notably here we can see that the compiler is generating a fresh type,
+`__Generator` in this case. This type has a number of states (represented here
+as an `enum`) corresponding to each of the conceptual states of the generator.
+At the beginning we're closing over our outer variable `foo` and then that
+variable is also live over the `yield` point, so it's stored in both states.
+
+When the generator starts it'll immediately yield 1, but it saves off its state
+just before it does so indicating that it has reached the yield point. Upon
+resuming again we'll execute the `return ret` which returns the `Complete`
+state.
+
+Here we can also note that the `Done` state, if resumed, panics immediately as
+it's invalid to resume a completed generator. It's also worth noting that this
+is just a rough desugaring, not a normative specification for what the compiler
+does.
+"##,
+ },
+ Lint {
+ label: "half_open_range_patterns",
+ description: r##"# `half_open_range_patterns`
+
+The tracking issue for this feature is: [#67264]
+It is part of the `#![exclusive_range_pattern]` feature,
+tracked at [#37854].
+
+[#67264]: https://github.com/rust-lang/rust/issues/67264
+[#37854]: https://github.com/rust-lang/rust/issues/37854
+-----
+
+The `half_open_range_patterns` feature allows RangeTo patterns
+(`..10`) to be used in appropriate pattern matching contexts.
+This requires also enabling the `exclusive_range_pattern` feature.
+
+It also enabled RangeFrom patterns but that has since been
+stabilized.
+
+```rust
+#![feature(half_open_range_patterns)]
+#![feature(exclusive_range_pattern)]
+ let x = 5;
+ match x {
+ ..0 => println!("negative!"), // "RangeTo" pattern. Unstable.
+ 0 => println!("zero!"),
+ 1.. => println!("positive!"), // "RangeFrom" pattern. Stable.
+ }
+```
+"##,
+ },
+ Lint {
+ label: "infer_static_outlives_requirements",
+ description: r##"# `infer_static_outlives_requirements`
+
+The tracking issue for this feature is: [#54185]
+
+[#54185]: https://github.com/rust-lang/rust/issues/54185
+
+------------------------
+The `infer_static_outlives_requirements` feature indicates that certain
+`'static` outlives requirements can be inferred by the compiler rather than
+stating them explicitly.
+
+Note: It is an accompanying feature to `infer_outlives_requirements`,
+which must be enabled to infer outlives requirements.
+
+For example, currently generic struct definitions that contain
+references, require where-clauses of the form T: 'static. By using
+this feature the outlives predicates will be inferred, although
+they may still be written explicitly.
+
+```rust,ignore (pseudo-Rust)
+struct Foo<U> where U: 'static { // <-- currently required
+ bar: Bar<U>
+}
+struct Bar<T: 'static> {
+ x: T,
+}
+```
+
+
+## Examples:
+
+```rust,ignore (pseudo-Rust)
+#![feature(infer_outlives_requirements)]
+#![feature(infer_static_outlives_requirements)]
+
+#[rustc_outlives]
+// Implicitly infer U: 'static
+struct Foo<U> {
+ bar: Bar<U>
+}
+struct Bar<T: 'static> {
+ x: T,
+}
+```
+"##,
+ },
+ Lint {
+ label: "inline_const",
+ description: r##"# `inline_const`
+
+The tracking issue for this feature is: [#76001]
+
+See also [`inline_const_pat`](inline-const-pat.md)
+
+------
+
+This feature allows you to use inline constant expressions. For example, you can
+turn this code:
+
+```rust
+# fn add_one(x: i32) -> i32 { x + 1 }
+const MY_COMPUTATION: i32 = 1 + 2 * 3 / 4;
+
+fn main() {
+ let x = add_one(MY_COMPUTATION);
+}
+```
+
+into this code:
+
+```rust
+#![feature(inline_const)]
+
+# fn add_one(x: i32) -> i32 { x + 1 }
+fn main() {
+ let x = add_one(const { 1 + 2 * 3 / 4 });
+}
+```
+
+[#76001]: https://github.com/rust-lang/rust/issues/76001
+"##,
+ },
+ Lint {
+ label: "inline_const_pat",
+ description: r##"# `inline_const_pat`
+
+The tracking issue for this feature is: [#76001]
+
+See also [`inline_const`](inline-const.md)
+
+------
+
+This feature allows you to use inline constant expressions in pattern position:
+
+```rust
+#![feature(inline_const_pat)]
+
+const fn one() -> i32 { 1 }
+
+let some_int = 3;
+match some_int {
+ const { 1 + 2 } => println!("Matched 1 + 2"),
+ const { one() } => println!("Matched const fn returning 1"),
+ _ => println!("Didn't match anything :("),
+}
+```
+
+[#76001]: https://github.com/rust-lang/rust/issues/76001
+"##,
+ },
+ Lint {
+ label: "int_error_internals",
+ description: r##"# `int_error_internals`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "internal_output_capture",
+ description: r##"# `internal_output_capture`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "intra_doc_pointers",
+ description: r##"# `intra-doc-pointers`
+
+The tracking issue for this feature is: [#80896]
+
+[#80896]: https://github.com/rust-lang/rust/issues/80896
+
+------------------------
+
+Rustdoc does not currently allow disambiguating between `*const` and `*mut`, and
+raw pointers in intra-doc links are unstable until it does.
+
+```rust
+#![feature(intra_doc_pointers)]
+//! [pointer::add]
+```
+"##,
+ },
+ Lint {
+ label: "intrinsics",
+ description: r##"# `intrinsics`
+
+The tracking issue for this feature is: None.
+
+Intrinsics are never intended to be stable directly, but intrinsics are often
+exported in some sort of stable manner. Prefer using the stable interfaces to
+the intrinsic directly when you can.
+
+------------------------
+
+
+These are imported as if they were FFI functions, with the special
+`rust-intrinsic` ABI. For example, if one was in a freestanding
+context, but wished to be able to `transmute` between types, and
+perform efficient pointer arithmetic, one would import those functions
+via a declaration like
+
+```rust
+#![feature(intrinsics)]
+# fn main() {}
+
+extern "rust-intrinsic" {
+ fn transmute<T, U>(x: T) -> U;
+
+ fn offset<T>(dst: *const T, offset: isize) -> *const T;
+}
+```
+
+As with any other FFI functions, these are always `unsafe` to call.
+"##,
+ },
+ Lint {
+ label: "is_sorted",
+ description: r##"# `is_sorted`
+
+The tracking issue for this feature is: [#53485]
+
+[#53485]: https://github.com/rust-lang/rust/issues/53485
+
+------------------------
+
+Add the methods `is_sorted`, `is_sorted_by` and `is_sorted_by_key` to `[T]`;
+add the methods `is_sorted`, `is_sorted_by` and `is_sorted_by_key` to
+`Iterator`.
+"##,
+ },
+ Lint {
+ label: "lang_items",
+ description: r##"# `lang_items`
+
+The tracking issue for this feature is: None.
+
+------------------------
+
+The `rustc` compiler has certain pluggable operations, that is,
+functionality that isn't hard-coded into the language, but is
+implemented in libraries, with a special marker to tell the compiler
+it exists. The marker is the attribute `#[lang = "..."]` and there are
+various different values of `...`, i.e. various different 'lang
+items'.
+
+For example, `Box` pointers require two lang items, one for allocation
+and one for deallocation. A freestanding program that uses the `Box`
+sugar for dynamic allocations via `malloc` and `free`:
+
+```rust,ignore (libc-is-finicky)
+#![feature(lang_items, box_syntax, start, libc, core_intrinsics, rustc_private)]
+#![no_std]
+use core::intrinsics;
+use core::panic::PanicInfo;
+
+extern crate libc;
+
+#[lang = "owned_box"]
+pub struct Box<T>(*mut T);
+
+#[lang = "exchange_malloc"]
+unsafe fn allocate(size: usize, _align: usize) -> *mut u8 {
+ let p = libc::malloc(size as libc::size_t) as *mut u8;
+
+ // Check if `malloc` failed:
+ if p as usize == 0 {
+ intrinsics::abort();
+ }
+
+ p
+}
+
+#[lang = "box_free"]
+unsafe fn box_free<T: ?Sized>(ptr: *mut T) {
+ libc::free(ptr as *mut libc::c_void)
+}
+
+#[start]
+fn main(_argc: isize, _argv: *const *const u8) -> isize {
+ let _x = box 1;
+
+ 0
+}
+
+#[lang = "eh_personality"] extern fn rust_eh_personality() {}
+#[lang = "panic_impl"] extern fn rust_begin_panic(info: &PanicInfo) -> ! { unsafe { intrinsics::abort() } }
+#[no_mangle] pub extern fn rust_eh_register_frames () {}
+#[no_mangle] pub extern fn rust_eh_unregister_frames () {}
+```
+
+Note the use of `abort`: the `exchange_malloc` lang item is assumed to
+return a valid pointer, and so needs to do the check internally.
+
+Other features provided by lang items include:
+
+- overloadable operators via traits: the traits corresponding to the
+ `==`, `<`, dereferencing (`*`) and `+` (etc.) operators are all
+ marked with lang items; those specific four are `eq`, `ord`,
+ `deref`, and `add` respectively.
+- stack unwinding and general failure; the `eh_personality`,
+ `panic` and `panic_bounds_check` lang items.
+- the traits in `std::marker` used to indicate types of
+ various kinds; lang items `send`, `sync` and `copy`.
+- the marker types and variance indicators found in
+ `std::marker`; lang items `covariant_type`,
+ `contravariant_lifetime`, etc.
+
+Lang items are loaded lazily by the compiler; e.g. if one never uses
+`Box` then there is no need to define functions for `exchange_malloc`
+and `box_free`. `rustc` will emit an error when an item is needed
+but not found in the current crate or any that it depends on.
+
+Most lang items are defined by `libcore`, but if you're trying to build
+an executable without the standard library, you'll run into the need
+for lang items. The rest of this page focuses on this use-case, even though
+lang items are a bit broader than that.
+
+### Using libc
+
+In order to build a `#[no_std]` executable we will need libc as a dependency.
+We can specify this using our `Cargo.toml` file:
+
+```toml
+[dependencies]
+libc = { version = "0.2.14", default-features = false }
+```
+
+Note that the default features have been disabled. This is a critical step -
+**the default features of libc include the standard library and so must be
+disabled.**
+
+### Writing an executable without stdlib
+
+Controlling the entry point is possible in two ways: the `#[start]` attribute,
+or overriding the default shim for the C `main` function with your own.
+
+The function marked `#[start]` is passed the command line parameters
+in the same format as C:
+
+```rust,ignore (libc-is-finicky)
+#![feature(lang_items, core_intrinsics, rustc_private)]
+#![feature(start)]
+#![no_std]
+use core::intrinsics;
+use core::panic::PanicInfo;
+
+// Pull in the system libc library for what crt0.o likely requires.
+extern crate libc;
+
+// Entry point for this program.
+#[start]
+fn start(_argc: isize, _argv: *const *const u8) -> isize {
+ 0
+}
+
+// These functions are used by the compiler, but not
+// for a bare-bones hello world. These are normally
+// provided by libstd.
+#[lang = "eh_personality"]
+#[no_mangle]
+pub extern fn rust_eh_personality() {
+}
+
+#[lang = "panic_impl"]
+#[no_mangle]
+pub extern fn rust_begin_panic(info: &PanicInfo) -> ! {
+ unsafe { intrinsics::abort() }
+}
+```
+
+To override the compiler-inserted `main` shim, one has to disable it
+with `#![no_main]` and then create the appropriate symbol with the
+correct ABI and the correct name, which requires overriding the
+compiler's name mangling too:
+
+```rust,ignore (libc-is-finicky)
+#![feature(lang_items, core_intrinsics, rustc_private)]
+#![feature(start)]
+#![no_std]
+#![no_main]
+use core::intrinsics;
+use core::panic::PanicInfo;
+
+// Pull in the system libc library for what crt0.o likely requires.
+extern crate libc;
+
+// Entry point for this program.
+#[no_mangle] // ensure that this symbol is called `main` in the output
+pub extern fn main(_argc: i32, _argv: *const *const u8) -> i32 {
+ 0
+}
+
+// These functions are used by the compiler, but not
+// for a bare-bones hello world. These are normally
+// provided by libstd.
+#[lang = "eh_personality"]
+#[no_mangle]
+pub extern fn rust_eh_personality() {
+}
+
+#[lang = "panic_impl"]
+#[no_mangle]
+pub extern fn rust_begin_panic(info: &PanicInfo) -> ! {
+ unsafe { intrinsics::abort() }
+}
+```
+
+In many cases, you may need to manually link to the `compiler_builtins` crate
+when building a `no_std` binary. You may observe this via linker error messages
+such as "```undefined reference to `__rust_probestack'```".
+
+## More about the language items
+
+The compiler currently makes a few assumptions about symbols which are
+available in the executable to call. Normally these functions are provided by
+the standard library, but without it you must define your own. These symbols
+are called "language items", and they each have an internal name, and then a
+signature that an implementation must conform to.
+
+The first of these functions, `rust_eh_personality`, is used by the failure
+mechanisms of the compiler. This is often mapped to GCC's personality function
+(see the [libstd implementation][unwind] for more information), but crates
+which do not trigger a panic can be assured that this function is never
+called. The language item's name is `eh_personality`.
+
+[unwind]: https://github.com/rust-lang/rust/blob/master/library/panic_unwind/src/gcc.rs
+
+The second function, `rust_begin_panic`, is also used by the failure mechanisms of the
+compiler. When a panic happens, this controls the message that's displayed on
+the screen. While the language item's name is `panic_impl`, the symbol name is
+`rust_begin_panic`.
+
+Finally, a `eh_catch_typeinfo` static is needed for certain targets which
+implement Rust panics on top of C++ exceptions.
+
+## List of all language items
+
+This is a list of all language items in Rust along with where they are located in
+the source code.
+
+- Primitives
+ - `i8`: `libcore/num/mod.rs`
+ - `i16`: `libcore/num/mod.rs`
+ - `i32`: `libcore/num/mod.rs`
+ - `i64`: `libcore/num/mod.rs`
+ - `i128`: `libcore/num/mod.rs`
+ - `isize`: `libcore/num/mod.rs`
+ - `u8`: `libcore/num/mod.rs`
+ - `u16`: `libcore/num/mod.rs`
+ - `u32`: `libcore/num/mod.rs`
+ - `u64`: `libcore/num/mod.rs`
+ - `u128`: `libcore/num/mod.rs`
+ - `usize`: `libcore/num/mod.rs`
+ - `f32`: `libstd/f32.rs`
+ - `f64`: `libstd/f64.rs`
+ - `char`: `libcore/char.rs`
+ - `slice`: `liballoc/slice.rs`
+ - `str`: `liballoc/str.rs`
+ - `const_ptr`: `libcore/ptr.rs`
+ - `mut_ptr`: `libcore/ptr.rs`
+ - `unsafe_cell`: `libcore/cell.rs`
+- Runtime
+ - `start`: `libstd/rt.rs`
+ - `eh_personality`: `libpanic_unwind/emcc.rs` (EMCC)
+ - `eh_personality`: `libpanic_unwind/gcc.rs` (GNU)
+ - `eh_personality`: `libpanic_unwind/seh.rs` (SEH)
+ - `eh_catch_typeinfo`: `libpanic_unwind/emcc.rs` (EMCC)
+ - `panic`: `libcore/panicking.rs`
+ - `panic_bounds_check`: `libcore/panicking.rs`
+ - `panic_impl`: `libcore/panicking.rs`
+ - `panic_impl`: `libstd/panicking.rs`
+- Allocations
+ - `owned_box`: `liballoc/boxed.rs`
+ - `exchange_malloc`: `liballoc/heap.rs`
+ - `box_free`: `liballoc/heap.rs`
+- Operands
+ - `not`: `libcore/ops/bit.rs`
+ - `bitand`: `libcore/ops/bit.rs`
+ - `bitor`: `libcore/ops/bit.rs`
+ - `bitxor`: `libcore/ops/bit.rs`
+ - `shl`: `libcore/ops/bit.rs`
+ - `shr`: `libcore/ops/bit.rs`
+ - `bitand_assign`: `libcore/ops/bit.rs`
+ - `bitor_assign`: `libcore/ops/bit.rs`
+ - `bitxor_assign`: `libcore/ops/bit.rs`
+ - `shl_assign`: `libcore/ops/bit.rs`
+ - `shr_assign`: `libcore/ops/bit.rs`
+ - `deref`: `libcore/ops/deref.rs`
+ - `deref_mut`: `libcore/ops/deref.rs`
+ - `index`: `libcore/ops/index.rs`
+ - `index_mut`: `libcore/ops/index.rs`
+ - `add`: `libcore/ops/arith.rs`
+ - `sub`: `libcore/ops/arith.rs`
+ - `mul`: `libcore/ops/arith.rs`
+ - `div`: `libcore/ops/arith.rs`
+ - `rem`: `libcore/ops/arith.rs`
+ - `neg`: `libcore/ops/arith.rs`
+ - `add_assign`: `libcore/ops/arith.rs`
+ - `sub_assign`: `libcore/ops/arith.rs`
+ - `mul_assign`: `libcore/ops/arith.rs`
+ - `div_assign`: `libcore/ops/arith.rs`
+ - `rem_assign`: `libcore/ops/arith.rs`
+ - `eq`: `libcore/cmp.rs`
+ - `ord`: `libcore/cmp.rs`
+- Functions
+ - `fn`: `libcore/ops/function.rs`
+ - `fn_mut`: `libcore/ops/function.rs`
+ - `fn_once`: `libcore/ops/function.rs`
+ - `generator_state`: `libcore/ops/generator.rs`
+ - `generator`: `libcore/ops/generator.rs`
+- Other
+ - `coerce_unsized`: `libcore/ops/unsize.rs`
+ - `drop`: `libcore/ops/drop.rs`
+ - `drop_in_place`: `libcore/ptr.rs`
+ - `clone`: `libcore/clone.rs`
+ - `copy`: `libcore/marker.rs`
+ - `send`: `libcore/marker.rs`
+ - `sized`: `libcore/marker.rs`
+ - `unsize`: `libcore/marker.rs`
+ - `sync`: `libcore/marker.rs`
+ - `phantom_data`: `libcore/marker.rs`
+ - `discriminant_kind`: `libcore/marker.rs`
+ - `freeze`: `libcore/marker.rs`
+ - `debug_trait`: `libcore/fmt/mod.rs`
+ - `non_zero`: `libcore/nonzero.rs`
+ - `arc`: `liballoc/sync.rs`
+ - `rc`: `liballoc/rc.rs`
+"##,
+ },
+ Lint {
+ label: "libstd_sys_internals",
+ description: r##"# `libstd_sys_internals`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "libstd_thread_internals",
+ description: r##"# `libstd_thread_internals`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "link_cfg",
+ description: r##"# `link_cfg`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "llvm_asm",
+ description: r##"# `llvm_asm`
+
+The tracking issue for this feature is: [#70173]
+
+[#70173]: https://github.com/rust-lang/rust/issues/70173
+
+------------------------
+
+For extremely low-level manipulations and performance reasons, one
+might wish to control the CPU directly. Rust supports using inline
+assembly to do this via the `llvm_asm!` macro.
+
+```rust,ignore (pseudo-code)
+llvm_asm!(assembly template
+ : output operands
+ : input operands
+ : clobbers
+ : options
+ );
+```
+
+Any use of `llvm_asm` is feature gated (requires `#![feature(llvm_asm)]` on the
+crate to allow) and of course requires an `unsafe` block.
+
+> **Note**: the examples here are given in x86/x86-64 assembly, but
+> all platforms are supported.
+
+## Assembly template
+
+The `assembly template` is the only required parameter and must be a
+literal string (i.e. `""`)
+
+```rust
+#![feature(llvm_asm)]
+
+#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
+fn foo() {
+ unsafe {
+ llvm_asm!("NOP");
+ }
+}
+
+// Other platforms:
+#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
+fn foo() { /* ... */ }
+
+fn main() {
+ // ...
+ foo();
+ // ...
+}
+```
+
+(The `feature(llvm_asm)` and `#[cfg]`s are omitted from now on.)
+
+Output operands, input operands, clobbers and options are all optional
+but you must add the right number of `:` if you skip them:
+
+```rust
+# #![feature(llvm_asm)]
+# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
+# fn main() { unsafe {
+llvm_asm!("xor %eax, %eax"
+ :
+ :
+ : "eax"
+ );
+# } }
+# #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
+# fn main() {}
+```
+
+Whitespace also doesn't matter:
+
+```rust
+# #![feature(llvm_asm)]
+# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
+# fn main() { unsafe {
+llvm_asm!("xor %eax, %eax" ::: "eax");
+# } }
+# #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
+# fn main() {}
+```
+
+## Operands
+
+Input and output operands follow the same format: `:
+"constraints1"(expr1), "constraints2"(expr2), ..."`. Output operand
+expressions must be mutable place, or not yet assigned:
+
+```rust
+# #![feature(llvm_asm)]
+# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
+fn add(a: i32, b: i32) -> i32 {
+ let c: i32;
+ unsafe {
+ llvm_asm!("add $2, $0"
+ : "=r"(c)
+ : "0"(a), "r"(b)
+ );
+ }
+ c
+}
+# #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
+# fn add(a: i32, b: i32) -> i32 { a + b }
+
+fn main() {
+ assert_eq!(add(3, 14159), 14162)
+}
+```
+
+If you would like to use real operands in this position, however,
+you are required to put curly braces `{}` around the register that
+you want, and you are required to put the specific size of the
+operand. This is useful for very low level programming, where
+which register you use is important:
+
+```rust
+# #![feature(llvm_asm)]
+# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
+# unsafe fn read_byte_in(port: u16) -> u8 {
+let result: u8;
+llvm_asm!("in %dx, %al" : "={al}"(result) : "{dx}"(port));
+result
+# }
+```
+
+## Clobbers
+
+Some instructions modify registers which might otherwise have held
+different values so we use the clobbers list to indicate to the
+compiler not to assume any values loaded into those registers will
+stay valid.
+
+```rust
+# #![feature(llvm_asm)]
+# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
+# fn main() { unsafe {
+// Put the value 0x200 in eax:
+llvm_asm!("mov $$0x200, %eax" : /* no outputs */ : /* no inputs */ : "eax");
+# } }
+# #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
+# fn main() {}
+```
+
+Input and output registers need not be listed since that information
+is already communicated by the given constraints. Otherwise, any other
+registers used either implicitly or explicitly should be listed.
+
+If the assembly changes the condition code register `cc` should be
+specified as one of the clobbers. Similarly, if the assembly modifies
+memory, `memory` should also be specified.
+
+## Options
+
+The last section, `options` is specific to Rust. The format is comma
+separated literal strings (i.e. `:"foo", "bar", "baz"`). It's used to
+specify some extra info about the inline assembly:
+
+Current valid options are:
+
+1. `volatile` - specifying this is analogous to
+ `__asm__ __volatile__ (...)` in gcc/clang.
+2. `alignstack` - certain instructions expect the stack to be
+ aligned a certain way (i.e. SSE) and specifying this indicates to
+ the compiler to insert its usual stack alignment code
+3. `intel` - use intel syntax instead of the default AT&T.
+
+```rust
+# #![feature(llvm_asm)]
+# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
+# fn main() {
+let result: i32;
+unsafe {
+ llvm_asm!("mov eax, 2" : "={eax}"(result) : : : "intel")
+}
+println!("eax is currently {}", result);
+# }
+# #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
+# fn main() {}
+```
+
+## More Information
+
+The current implementation of the `llvm_asm!` macro is a direct binding to [LLVM's
+inline assembler expressions][llvm-docs], so be sure to check out [their
+documentation as well][llvm-docs] for more information about clobbers,
+constraints, etc.
+
+[llvm-docs]: http://llvm.org/docs/LangRef.html#inline-assembler-expressions
+"##,
+ },
+ Lint {
+ label: "marker_trait_attr",
+ description: r##"# `marker_trait_attr`
+
+The tracking issue for this feature is: [#29864]
+
+[#29864]: https://github.com/rust-lang/rust/issues/29864
+
+------------------------
+
+Normally, Rust keeps you from adding trait implementations that could
+overlap with each other, as it would be ambiguous which to use. This
+feature, however, carves out an exception to that rule: a trait can
+opt-in to having overlapping implementations, at the cost that those
+implementations are not allowed to override anything (and thus the
+trait itself cannot have any associated items, as they're pointless
+when they'd need to do the same thing for every type anyway).
+
+```rust
+#![feature(marker_trait_attr)]
+
+#[marker] trait CheapToClone: Clone {}
+
+impl<T: Copy> CheapToClone for T {}
+
+// These could potentially overlap with the blanket implementation above,
+// so are only allowed because CheapToClone is a marker trait.
+impl<T: CheapToClone, U: CheapToClone> CheapToClone for (T, U) {}
+impl<T: CheapToClone> CheapToClone for std::ops::Range<T> {}
+
+fn cheap_clone<T: CheapToClone>(t: T) -> T {
+ t.clone()
+}
+```
+
+This is expected to replace the unstable `overlapping_marker_traits`
+feature, which applied to all empty traits (without needing an opt-in).
+"##,
+ },
+ Lint {
+ label: "more_qualified_paths",
+ description: r##"# `more_qualified_paths`
+
+The `more_qualified_paths` feature can be used in order to enable the
+use of qualified paths in patterns.
+
+## Example
+
+```rust
+#![feature(more_qualified_paths)]
+
+fn main() {
+ // destructure through a qualified path
+ let <Foo as A>::Assoc { br } = StructStruct { br: 2 };
+}
+
+struct StructStruct {
+ br: i8,
+}
+
+struct Foo;
+
+trait A {
+ type Assoc;
+}
+
+impl A for Foo {
+ type Assoc = StructStruct;
+}
+```
+"##,
+ },
+ Lint {
+ label: "native_link_modifiers",
+ description: r##"# `native_link_modifiers`
+
+The tracking issue for this feature is: [#81490]
+
+[#81490]: https://github.com/rust-lang/rust/issues/81490
+
+------------------------
+
+The `native_link_modifiers` feature allows you to use the `modifiers` syntax with the `#[link(..)]` attribute.
+
+Modifiers are specified as a comma-delimited string with each modifier prefixed with either a `+` or `-` to indicate that the modifier is enabled or disabled, respectively. The last boolean value specified for a given modifier wins.
+"##,
+ },
+ Lint {
+ label: "native_link_modifiers_as_needed",
+ description: r##"# `native_link_modifiers_as_needed`
+
+The tracking issue for this feature is: [#81490]
+
+[#81490]: https://github.com/rust-lang/rust/issues/81490
+
+------------------------
+
+The `native_link_modifiers_as_needed` feature allows you to use the `as-needed` modifier.
+
+`as-needed` is only compatible with the `dynamic` and `framework` linking kinds. Using any other kind will result in a compiler error.
+
+`+as-needed` means that the library will be actually linked only if it satisfies some undefined symbols at the point at which it is specified on the command line, making it similar to static libraries in this regard.
+
+This modifier translates to `--as-needed` for ld-like linkers, and to `-dead_strip_dylibs` / `-needed_library` / `-needed_framework` for ld64.
+The modifier does nothing for linkers that don't support it (e.g. `link.exe`).
+
+The default for this modifier is unclear, some targets currently specify it as `+as-needed`, some do not. We may want to try making `+as-needed` a default for all targets.
+"##,
+ },
+ Lint {
+ label: "native_link_modifiers_bundle",
+ description: r##"# `native_link_modifiers_bundle`
+
+The tracking issue for this feature is: [#81490]
+
+[#81490]: https://github.com/rust-lang/rust/issues/81490
+
+------------------------
+
+The `native_link_modifiers_bundle` feature allows you to use the `bundle` modifier.
+
+Only compatible with the `static` linking kind. Using any other kind will result in a compiler error.
+
+`+bundle` means objects from the static library are bundled into the produced crate (a rlib, for example) and are used from this crate later during linking of the final binary.
+
+`-bundle` means the static library is included into the produced rlib "by name" and object files from it are included only during linking of the final binary, the file search by that name is also performed during final linking.
+
+This modifier is supposed to supersede the `static-nobundle` linking kind defined by [RFC 1717](https://github.com/rust-lang/rfcs/pull/1717).
+
+The default for this modifier is currently `+bundle`, but it could be changed later on some future edition boundary.
+"##,
+ },
+ Lint {
+ label: "native_link_modifiers_verbatim",
+ description: r##"# `native_link_modifiers_verbatim`
+
+The tracking issue for this feature is: [#81490]
+
+[#81490]: https://github.com/rust-lang/rust/issues/81490
+
+------------------------
+
+The `native_link_modifiers_verbatim` feature allows you to use the `verbatim` modifier.
+
+`+verbatim` means that rustc itself won't add any target-specified library prefixes or suffixes (like `lib` or `.a`) to the library name, and will try its best to ask for the same thing from the linker.
+
+For `ld`-like linkers rustc will use the `-l:filename` syntax (note the colon) when passing the library, so the linker won't add any prefixes or suffixes as well.
+See [`-l namespec`](https://sourceware.org/binutils/docs/ld/Options.html) in ld documentation for more details.
+For linkers not supporting any verbatim modifiers (e.g. `link.exe` or `ld64`) the library name will be passed as is.
+
+The default for this modifier is `-verbatim`.
+
+This RFC changes the behavior of `raw-dylib` linking kind specified by [RFC 2627](https://github.com/rust-lang/rfcs/pull/2627). The `.dll` suffix (or other target-specified suffixes for other targets) is now added automatically.
+If your DLL doesn't have the `.dll` suffix, it can be specified with `+verbatim`.
+"##,
+ },
+ Lint {
+ label: "native_link_modifiers_whole_archive",
+ description: r##"# `native_link_modifiers_whole_archive`
+
+The tracking issue for this feature is: [#81490]
+
+[#81490]: https://github.com/rust-lang/rust/issues/81490
+
+------------------------
+
+The `native_link_modifiers_whole_archive` feature allows you to use the `whole-archive` modifier.
+
+Only compatible with the `static` linking kind. Using any other kind will result in a compiler error.
+
+`+whole-archive` means that the static library is linked as a whole archive without throwing any object files away.
+
+This modifier translates to `--whole-archive` for `ld`-like linkers, to `/WHOLEARCHIVE` for `link.exe`, and to `-force_load` for `ld64`.
+The modifier does nothing for linkers that don't support it.
+
+The default for this modifier is `-whole-archive`.
+"##,
+ },
+ Lint {
+ label: "negative_impls",
+ description: r##"# `negative_impls`
+
+The tracking issue for this feature is [#68318].
+
+[#68318]: https://github.com/rust-lang/rust/issues/68318
+
+----
+
+With the feature gate `negative_impls`, you can write negative impls as well as positive ones:
+
+```rust
+#![feature(negative_impls)]
+trait DerefMut { }
+impl<T: ?Sized> !DerefMut for &T { }
+```
+
+Negative impls indicate a semver guarantee that the given trait will not be implemented for the given types. Negative impls play an additional purpose for auto traits, described below.
+
+Negative impls have the following characteristics:
+
+* They do not have any items.
+* They must obey the orphan rules as if they were a positive impl.
+* They cannot "overlap" with any positive impls.
+
+## Semver interaction
+
+It is a breaking change to remove a negative impl. Negative impls are a commitment not to implement the given trait for the named types.
+
+## Orphan and overlap rules
+
+Negative impls must obey the same orphan rules as a positive impl. This implies you cannot add a negative impl for types defined in upstream crates and so forth.
+
+Similarly, negative impls cannot overlap with positive impls, again using the same "overlap" check that we ordinarily use to determine if two impls overlap. (Note that positive impls typically cannot overlap with one another either, except as permitted by specialization.)
+
+## Interaction with auto traits
+
+Declaring a negative impl `impl !SomeAutoTrait for SomeType` for an
+auto-trait serves two purposes:
+
+* as with any trait, it declares that `SomeType` will never implement `SomeAutoTrait`;
+* it disables the automatic `SomeType: SomeAutoTrait` impl that would otherwise have been generated.
+
+Note that, at present, there is no way to indicate that a given type
+does not implement an auto trait *but that it may do so in the
+future*. For ordinary types, this is done by simply not declaring any
+impl at all, but that is not an option for auto traits. A workaround
+is that one could embed a marker type as one of the fields, where the
+marker type is `!AutoTrait`.
+
+## Immediate uses
+
+Negative impls are used to declare that `&T: !DerefMut` and `&mut T: !Clone`, as required to fix the soundness of `Pin` described in [#66544](https://github.com/rust-lang/rust/issues/66544).
+
+This serves two purposes:
+
+* For proving the correctness of unsafe code, we can use that impl as evidence that no `DerefMut` or `Clone` impl exists.
+* It prevents downstream crates from creating such impls.
+"##,
+ },
+ Lint {
+ label: "no_coverage",
+ description: r##"# `no_coverage`
+
+The tracking issue for this feature is: [#84605]
+
+[#84605]: https://github.com/rust-lang/rust/issues/84605
+
+---
+
+The `no_coverage` attribute can be used to selectively disable coverage
+instrumentation in an annotated function. This might be useful to:
+
+- Avoid instrumentation overhead in a performance critical function
+- Avoid generating coverage for a function that is not meant to be executed,
+ but still target 100% coverage for the rest of the program.
+
+## Example
+
+```rust
+#![feature(no_coverage)]
+
+// `foo()` will get coverage instrumentation (by default)
+fn foo() {
+ // ...
+}
+
+#[no_coverage]
+fn bar() {
+ // ...
+}
+```
+"##,
+ },
+ Lint {
+ label: "no_sanitize",
+ description: r##"# `no_sanitize`
+
+The tracking issue for this feature is: [#39699]
+
+[#39699]: https://github.com/rust-lang/rust/issues/39699
+
+------------------------
+
+The `no_sanitize` attribute can be used to selectively disable sanitizer
+instrumentation in an annotated function. This might be useful to: avoid
+instrumentation overhead in a performance critical function, or avoid
+instrumenting code that contains constructs unsupported by given sanitizer.
+
+The precise effect of this annotation depends on particular sanitizer in use.
+For example, with `no_sanitize(thread)`, the thread sanitizer will no longer
+instrument non-atomic store / load operations, but it will instrument atomic
+operations to avoid reporting false positives and provide meaning full stack
+traces.
+
+## Examples
+
+``` rust
+#![feature(no_sanitize)]
+
+#[no_sanitize(address)]
+fn foo() {
+ // ...
+}
+```
+"##,
+ },
+ Lint {
+ label: "plugin",
+ description: r##"# `plugin`
+
+The tracking issue for this feature is: [#29597]
+
+[#29597]: https://github.com/rust-lang/rust/issues/29597
+
+
+This feature is part of "compiler plugins." It will often be used with the
+`rustc_private` feature.
+
+------------------------
+
+`rustc` can load compiler plugins, which are user-provided libraries that
+extend the compiler's behavior with new lint checks, etc.
+
+A plugin is a dynamic library crate with a designated *registrar* function that
+registers extensions with `rustc`. Other crates can load these extensions using
+the crate attribute `#![plugin(...)]`. See the
+`rustc_driver::plugin` documentation for more about the
+mechanics of defining and loading a plugin.
+
+In the vast majority of cases, a plugin should *only* be used through
+`#![plugin]` and not through an `extern crate` item. Linking a plugin would
+pull in all of librustc_ast and librustc as dependencies of your crate. This is
+generally unwanted unless you are building another plugin.
+
+The usual practice is to put compiler plugins in their own crate, separate from
+any `macro_rules!` macros or ordinary Rust code meant to be used by consumers
+of a library.
+
+# Lint plugins
+
+Plugins can extend [Rust's lint
+infrastructure](../../reference/attributes/diagnostics.md#lint-check-attributes) with
+additional checks for code style, safety, etc. Now let's write a plugin
+[`lint-plugin-test.rs`](https://github.com/rust-lang/rust/blob/master/src/test/ui-fulldeps/auxiliary/lint-plugin-test.rs)
+that warns about any item named `lintme`.
+
+```rust,ignore (requires-stage-2)
+#![feature(box_syntax, rustc_private)]
+
+extern crate rustc_ast;
+
+// Load rustc as a plugin to get macros
+extern crate rustc_driver;
+#[macro_use]
+extern crate rustc_lint;
+#[macro_use]
+extern crate rustc_session;
+
+use rustc_driver::plugin::Registry;
+use rustc_lint::{EarlyContext, EarlyLintPass, LintArray, LintContext, LintPass};
+use rustc_ast::ast;
+declare_lint!(TEST_LINT, Warn, "Warn about items named 'lintme'");
+
+declare_lint_pass!(Pass => [TEST_LINT]);
+
+impl EarlyLintPass for Pass {
+ fn check_item(&mut self, cx: &EarlyContext, it: &ast::Item) {
+ if it.ident.name.as_str() == "lintme" {
+ cx.lint(TEST_LINT, |lint| {
+ lint.build("item is named 'lintme'").set_span(it.span).emit()
+ });
+ }
+ }
+}
+
+#[no_mangle]
+fn __rustc_plugin_registrar(reg: &mut Registry) {
+ reg.lint_store.register_lints(&[&TEST_LINT]);
+ reg.lint_store.register_early_pass(|| box Pass);
+}
+```
+
+Then code like
+
+```rust,ignore (requires-plugin)
+#![feature(plugin)]
+#![plugin(lint_plugin_test)]
+
+fn lintme() { }
+```
+
+will produce a compiler warning:
+
+```txt
+foo.rs:4:1: 4:16 warning: item is named 'lintme', #[warn(test_lint)] on by default
+foo.rs:4 fn lintme() { }
+ ^~~~~~~~~~~~~~~
+```
+
+The components of a lint plugin are:
+
+* one or more `declare_lint!` invocations, which define static `Lint` structs;
+
+* a struct holding any state needed by the lint pass (here, none);
+
+* a `LintPass`
+ implementation defining how to check each syntax element. A single
+ `LintPass` may call `span_lint` for several different `Lint`s, but should
+ register them all through the `get_lints` method.
+
+Lint passes are syntax traversals, but they run at a late stage of compilation
+where type information is available. `rustc`'s [built-in
+lints](https://github.com/rust-lang/rust/blob/master/src/librustc_session/lint/builtin.rs)
+mostly use the same infrastructure as lint plugins, and provide examples of how
+to access type information.
+
+Lints defined by plugins are controlled by the usual [attributes and compiler
+flags](../../reference/attributes/diagnostics.md#lint-check-attributes), e.g.
+`#[allow(test_lint)]` or `-A test-lint`. These identifiers are derived from the
+first argument to `declare_lint!`, with appropriate case and punctuation
+conversion.
+
+You can run `rustc -W help foo.rs` to see a list of lints known to `rustc`,
+including those provided by plugins loaded by `foo.rs`.
+"##,
+ },
+ Lint {
+ label: "print_internals",
+ description: r##"# `print_internals`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "profiler_runtime",
+ description: r##"# `profiler_runtime`
+
+The tracking issue for this feature is: [#42524](https://github.com/rust-lang/rust/issues/42524).
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "profiler_runtime_lib",
+ description: r##"# `profiler_runtime_lib`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "raw_dylib",
+ description: r##"# `raw_dylib`
+
+The tracking issue for this feature is: [#58713]
+
+[#58713]: https://github.com/rust-lang/rust/issues/58713
+
+------------------------
+
+The `raw_dylib` feature allows you to link against the implementations of functions in an `extern`
+block without, on Windows, linking against an import library.
+
+```rust,ignore (partial-example)
+#![feature(raw_dylib)]
+
+#[link(name="library", kind="raw-dylib")]
+extern {
+ fn extern_function(x: i32);
+}
+
+fn main() {
+ unsafe {
+ extern_function(14);
+ }
+}
+```
+
+## Limitations
+
+Currently, this feature is only supported on `-windows-msvc` targets. Non-Windows platforms don't have import
+libraries, and an incompatibility between LLVM and the BFD linker means that it is not currently supported on
+`-windows-gnu` targets.
+
+On the `i686-pc-windows-msvc` target, this feature supports only the `cdecl`, `stdcall`, `system`, and `fastcall`
+calling conventions.
+"##,
+ },
+ Lint {
+ label: "repr128",
+ description: r##"# `repr128`
+
+The tracking issue for this feature is: [#56071]
+
+[#56071]: https://github.com/rust-lang/rust/issues/56071
+
+------------------------
+
+The `repr128` feature adds support for `#[repr(u128)]` on `enum`s.
+
+```rust
+#![feature(repr128)]
+
+#[repr(u128)]
+enum Foo {
+ Bar(u64),
+}
+```
+"##,
+ },
+ Lint {
+ label: "rt",
+ description: r##"# `rt`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "rustc_attrs",
+ description: r##"# `rustc_attrs`
+
+This feature has no tracking issue, and is therefore internal to
+the compiler, not being intended for general use.
+
+Note: `rustc_attrs` enables many rustc-internal attributes and this page
+only discuss a few of them.
+
+------------------------
+
+The `rustc_attrs` feature allows debugging rustc type layouts by using
+`#[rustc_layout(...)]` to debug layout at compile time (it even works
+with `cargo check`) as an alternative to `rustc -Z print-type-sizes`
+that is way more verbose.
+
+Options provided by `#[rustc_layout(...)]` are `debug`, `size`, `align`,
+`abi`. Note that it only works on sized types without generics.
+
+## Examples
+
+```rust,compile_fail
+#![feature(rustc_attrs)]
+
+#[rustc_layout(abi, size)]
+pub enum X {
+ Y(u8, u8, u8),
+ Z(isize),
+}
+```
+
+When that is compiled, the compiler will error with something like
+
+```text
+error: abi: Aggregate { sized: true }
+ --> src/lib.rs:4:1
+ |
+4 | / pub enum T {
+5 | | Y(u8, u8, u8),
+6 | | Z(isize),
+7 | | }
+ | |_^
+
+error: size: Size { raw: 16 }
+ --> src/lib.rs:4:1
+ |
+4 | / pub enum T {
+5 | | Y(u8, u8, u8),
+6 | | Z(isize),
+7 | | }
+ | |_^
+
+error: aborting due to 2 previous errors
+```
+"##,
+ },
+ Lint {
+ label: "sort_internals",
+ description: r##"# `sort_internals`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "str_internals",
+ description: r##"# `str_internals`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "test",
+ description: r##"# `test`
+
+The tracking issue for this feature is: None.
+
+------------------------
+
+The internals of the `test` crate are unstable, behind the `test` flag. The
+most widely used part of the `test` crate are benchmark tests, which can test
+the performance of your code. Let's make our `src/lib.rs` look like this
+(comments elided):
+
+```rust,no_run
+#![feature(test)]
+
+extern crate test;
+
+pub fn add_two(a: i32) -> i32 {
+ a + 2
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use test::Bencher;
+
+ #[test]
+ fn it_works() {
+ assert_eq!(4, add_two(2));
+ }
+
+ #[bench]
+ fn bench_add_two(b: &mut Bencher) {
+ b.iter(|| add_two(2));
+ }
+}
+```
+
+Note the `test` feature gate, which enables this unstable feature.
+
+We've imported the `test` crate, which contains our benchmarking support.
+We have a new function as well, with the `bench` attribute. Unlike regular
+tests, which take no arguments, benchmark tests take a `&mut Bencher`. This
+`Bencher` provides an `iter` method, which takes a closure. This closure
+contains the code we'd like to benchmark.
+
+We can run benchmark tests with `cargo bench`:
+
+```bash
+$ cargo bench
+ Compiling adder v0.0.1 (file:///home/steve/tmp/adder)
+ Running target/release/adder-91b3e234d4ed382a
+
+running 2 tests
+test tests::it_works ... ignored
+test tests::bench_add_two ... bench: 1 ns/iter (+/- 0)
+
+test result: ok. 0 passed; 0 failed; 1 ignored; 1 measured
+```
+
+Our non-benchmark test was ignored. You may have noticed that `cargo bench`
+takes a bit longer than `cargo test`. This is because Rust runs our benchmark
+a number of times, and then takes the average. Because we're doing so little
+work in this example, we have a `1 ns/iter (+/- 0)`, but this would show
+the variance if there was one.
+
+Advice on writing benchmarks:
+
+
+* Move setup code outside the `iter` loop; only put the part you want to measure inside
+* Make the code do "the same thing" on each iteration; do not accumulate or change state
+* Make the outer function idempotent too; the benchmark runner is likely to run
+ it many times
+* Make the inner `iter` loop short and fast so benchmark runs are fast and the
+ calibrator can adjust the run-length at fine resolution
+* Make the code in the `iter` loop do something simple, to assist in pinpointing
+ performance improvements (or regressions)
+
+## Gotcha: optimizations
+
+There's another tricky part to writing benchmarks: benchmarks compiled with
+optimizations activated can be dramatically changed by the optimizer so that
+the benchmark is no longer benchmarking what one expects. For example, the
+compiler might recognize that some calculation has no external effects and
+remove it entirely.
+
+```rust,no_run
+#![feature(test)]
+
+extern crate test;
+use test::Bencher;
+
+#[bench]
+fn bench_xor_1000_ints(b: &mut Bencher) {
+ b.iter(|| {
+ (0..1000).fold(0, |old, new| old ^ new);
+ });
+}
+```
+
+gives the following results
+
+```text
+running 1 test
+test bench_xor_1000_ints ... bench: 0 ns/iter (+/- 0)
+
+test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
+```
+
+The benchmarking runner offers two ways to avoid this. Either, the closure that
+the `iter` method receives can return an arbitrary value which forces the
+optimizer to consider the result used and ensures it cannot remove the
+computation entirely. This could be done for the example above by adjusting the
+`b.iter` call to
+
+```rust
+# struct X;
+# impl X { fn iter<T, F>(&self, _: F) where F: FnMut() -> T {} } let b = X;
+b.iter(|| {
+ // Note lack of `;` (could also use an explicit `return`).
+ (0..1000).fold(0, |old, new| old ^ new)
+});
+```
+
+Or, the other option is to call the generic `test::black_box` function, which
+is an opaque "black box" to the optimizer and so forces it to consider any
+argument as used.
+
+```rust
+#![feature(test)]
+
+extern crate test;
+
+# fn main() {
+# struct X;
+# impl X { fn iter<T, F>(&self, _: F) where F: FnMut() -> T {} } let b = X;
+b.iter(|| {
+ let n = test::black_box(1000);
+
+ (0..n).fold(0, |a, b| a ^ b)
+})
+# }
+```
+
+Neither of these read or modify the value, and are very cheap for small values.
+Larger values can be passed indirectly to reduce overhead (e.g.
+`black_box(&huge_struct)`).
+
+Performing either of the above changes gives the following benchmarking results
+
+```text
+running 1 test
+test bench_xor_1000_ints ... bench: 131 ns/iter (+/- 3)
+
+test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured
+```
+
+However, the optimizer can still modify a testcase in an undesirable manner
+even when using either of the above.
+"##,
+ },
+ Lint {
+ label: "thread_local_internals",
+ description: r##"# `thread_local_internals`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "trace_macros",
+ description: r##"# `trace_macros`
+
+The tracking issue for this feature is [#29598].
+
+[#29598]: https://github.com/rust-lang/rust/issues/29598
+
+------------------------
+
+With `trace_macros` you can trace the expansion of macros in your code.
+
+## Examples
+
+```rust
+#![feature(trace_macros)]
+
+fn main() {
+ trace_macros!(true);
+ println!("Hello, Rust!");
+ trace_macros!(false);
+}
+```
+
+The `cargo build` output:
+
+```txt
+note: trace_macro
+ --> src/main.rs:5:5
+ |
+5 | println!("Hello, Rust!");
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: expanding `println! { "Hello, Rust!" }`
+ = note: to `print ! ( concat ! ( "Hello, Rust!" , "\n" ) )`
+ = note: expanding `print! { concat ! ( "Hello, Rust!" , "\n" ) }`
+ = note: to `$crate :: io :: _print ( format_args ! ( concat ! ( "Hello, Rust!" , "\n" ) )
+ )`
+
+ Finished dev [unoptimized + debuginfo] target(s) in 0.60 secs
+```
+"##,
+ },
+ Lint {
+ label: "trait_alias",
+ description: r##"# `trait_alias`
+
+The tracking issue for this feature is: [#41517]
+
+[#41517]: https://github.com/rust-lang/rust/issues/41517
+
+------------------------
+
+The `trait_alias` feature adds support for trait aliases. These allow aliases
+to be created for one or more traits (currently just a single regular trait plus
+any number of auto-traits), and used wherever traits would normally be used as
+either bounds or trait objects.
+
+```rust
+#![feature(trait_alias)]
+
+trait Foo = std::fmt::Debug + Send;
+trait Bar = Foo + Sync;
+
+// Use trait alias as bound on type parameter.
+fn foo<T: Foo>(v: &T) {
+ println!("{:?}", v);
+}
+
+pub fn main() {
+ foo(&1);
+
+ // Use trait alias for trait objects.
+ let a: &Bar = &123;
+ println!("{:?}", a);
+ let b = Box::new(456) as Box<dyn Foo>;
+ println!("{:?}", b);
+}
+```
+"##,
+ },
+ Lint {
+ label: "trait_upcasting",
+ description: r##"# `trait_upcasting`
+
+The tracking issue for this feature is: [#65991]
+
+[#65991]: https://github.com/rust-lang/rust/issues/65991
+
+------------------------
+
+The `trait_upcasting` feature adds support for trait upcasting coercion. This allows a
+trait object of type `dyn Bar` to be cast to a trait object of type `dyn Foo`
+so long as `Bar: Foo`.
+
+```rust,edition2018
+#![feature(trait_upcasting)]
+#![allow(incomplete_features)]
+
+trait Foo {}
+
+trait Bar: Foo {}
+
+impl Foo for i32 {}
+
+impl<T: Foo + ?Sized> Bar for T {}
+
+let bar: &dyn Bar = &123;
+let foo: &dyn Foo = bar;
+```
+"##,
+ },
+ Lint {
+ label: "transparent_unions",
+ description: r##"# `transparent_unions`
+
+The tracking issue for this feature is [#60405]
+
+[#60405]: https://github.com/rust-lang/rust/issues/60405
+
+----
+
+The `transparent_unions` feature allows you mark `union`s as
+`#[repr(transparent)]`. A `union` may be `#[repr(transparent)]` in exactly the
+same conditions in which a `struct` may be `#[repr(transparent)]` (generally,
+this means the `union` must have exactly one non-zero-sized field). Some
+concrete illustrations follow.
+
+```rust
+#![feature(transparent_unions)]
+
+// This union has the same representation as `f32`.
+#[repr(transparent)]
+union SingleFieldUnion {
+ field: f32,
+}
+
+// This union has the same representation as `usize`.
+#[repr(transparent)]
+union MultiFieldUnion {
+ field: usize,
+ nothing: (),
+}
+```
+
+For consistency with transparent `struct`s, `union`s must have exactly one
+non-zero-sized field. If all fields are zero-sized, the `union` must not be
+`#[repr(transparent)]`:
+
+```rust
+#![feature(transparent_unions)]
+
+// This (non-transparent) union is already valid in stable Rust:
+pub union GoodUnion {
+ pub nothing: (),
+}
+
+// Error: transparent union needs exactly one non-zero-sized field, but has 0
+// #[repr(transparent)]
+// pub union BadUnion {
+// pub nothing: (),
+// }
+```
+
+The one exception is if the `union` is generic over `T` and has a field of type
+`T`, it may be `#[repr(transparent)]` even if `T` is a zero-sized type:
+
+```rust
+#![feature(transparent_unions)]
+
+// This union has the same representation as `T`.
+#[repr(transparent)]
+pub union GenericUnion<T: Copy> { // Unions with non-`Copy` fields are unstable.
+ pub field: T,
+ pub nothing: (),
+}
+
+// This is okay even though `()` is a zero-sized type.
+pub const THIS_IS_OKAY: GenericUnion<()> = GenericUnion { field: () };
+```
+
+Like transarent `struct`s, a transparent `union` of type `U` has the same
+layout, size, and ABI as its single non-ZST field. If it is generic over a type
+`T`, and all its fields are ZSTs except for exactly one field of type `T`, then
+it has the same layout and ABI as `T` (even if `T` is a ZST when monomorphized).
+
+Like transparent `struct`s, transparent `union`s are FFI-safe if and only if
+their underlying representation type is also FFI-safe.
+
+A `union` may not be eligible for the same nonnull-style optimizations that a
+`struct` or `enum` (with the same fields) are eligible for. Adding
+`#[repr(transparent)]` to `union` does not change this. To give a more concrete
+example, it is unspecified whether `size_of::<T>()` is equal to
+`size_of::<Option<T>>()`, where `T` is a `union` (regardless of whether or not
+it is transparent). The Rust compiler is free to perform this optimization if
+possible, but is not required to, and different compiler versions may differ in
+their application of these optimizations.
+"##,
+ },
+ Lint {
+ label: "try_blocks",
+ description: r##"# `try_blocks`
+
+The tracking issue for this feature is: [#31436]
+
+[#31436]: https://github.com/rust-lang/rust/issues/31436
+
+------------------------
+
+The `try_blocks` feature adds support for `try` blocks. A `try`
+block creates a new scope one can use the `?` operator in.
+
+```rust,edition2018
+#![feature(try_blocks)]
+
+use std::num::ParseIntError;
+
+let result: Result<i32, ParseIntError> = try {
+ "1".parse::<i32>()?
+ + "2".parse::<i32>()?
+ + "3".parse::<i32>()?
+};
+assert_eq!(result, Ok(6));
+
+let result: Result<i32, ParseIntError> = try {
+ "1".parse::<i32>()?
+ + "foo".parse::<i32>()?
+ + "3".parse::<i32>()?
+};
+assert!(result.is_err());
+```
+"##,
+ },
+ Lint {
+ label: "type_changing_struct_update",
+ description: r##"# `type_changing_struct_update`
+
+The tracking issue for this feature is: [#86555]
+
+[#86555]: https://github.com/rust-lang/rust/issues/86555
+
+------------------------
+
+This implements [RFC2528]. When turned on, you can create instances of the same struct
+that have different generic type or lifetime parameters.
+
+[RFC2528]: https://github.com/rust-lang/rfcs/blob/master/text/2528-type-changing-struct-update-syntax.md
+
+```rust
+#![allow(unused_variables, dead_code)]
+#![feature(type_changing_struct_update)]
+
+fn main () {
+ struct Foo<T, U> {
+ field1: T,
+ field2: U,
+ }
+
+ let base: Foo<String, i32> = Foo {
+ field1: String::from("hello"),
+ field2: 1234,
+ };
+ let updated: Foo<f64, i32> = Foo {
+ field1: 3.14,
+ ..base
+ };
+}
+```
+"##,
+ },
+ Lint {
+ label: "unboxed_closures",
+ description: r##"# `unboxed_closures`
+
+The tracking issue for this feature is [#29625]
+
+See Also: [`fn_traits`](../library-features/fn-traits.md)
+
+[#29625]: https://github.com/rust-lang/rust/issues/29625
+
+----
+
+The `unboxed_closures` feature allows you to write functions using the `"rust-call"` ABI,
+required for implementing the [`Fn*`] family of traits. `"rust-call"` functions must have
+exactly one (non self) argument, a tuple representing the argument list.
+
+[`Fn*`]: https://doc.rust-lang.org/std/ops/trait.Fn.html
+
+```rust
+#![feature(unboxed_closures)]
+
+extern "rust-call" fn add_args(args: (u32, u32)) -> u32 {
+ args.0 + args.1
+}
+
+fn main() {}
+```
+"##,
+ },
+ Lint {
+ label: "unsized_locals",
+ description: r##"# `unsized_locals`
+
+The tracking issue for this feature is: [#48055]
+
+[#48055]: https://github.com/rust-lang/rust/issues/48055
+
+------------------------
+
+This implements [RFC1909]. When turned on, you can have unsized arguments and locals:
+
+[RFC1909]: https://github.com/rust-lang/rfcs/blob/master/text/1909-unsized-rvalues.md
+
+```rust
+#![allow(incomplete_features)]
+#![feature(unsized_locals, unsized_fn_params)]
+
+use std::any::Any;
+
+fn main() {
+ let x: Box<dyn Any> = Box::new(42);
+ let x: dyn Any = *x;
+ // ^ unsized local variable
+ // ^^ unsized temporary
+ foo(x);
+}
+
+fn foo(_: dyn Any) {}
+// ^^^^^^ unsized argument
+```
+
+The RFC still forbids the following unsized expressions:
+
+```rust,compile_fail
+#![feature(unsized_locals)]
+
+use std::any::Any;
+
+struct MyStruct<T: ?Sized> {
+ content: T,
+}
+
+struct MyTupleStruct<T: ?Sized>(T);
+
+fn answer() -> Box<dyn Any> {
+ Box::new(42)
+}
+
+fn main() {
+ // You CANNOT have unsized statics.
+ static X: dyn Any = *answer(); // ERROR
+ const Y: dyn Any = *answer(); // ERROR
+
+ // You CANNOT have struct initialized unsized.
+ MyStruct { content: *answer() }; // ERROR
+ MyTupleStruct(*answer()); // ERROR
+ (42, *answer()); // ERROR
+
+ // You CANNOT have unsized return types.
+ fn my_function() -> dyn Any { *answer() } // ERROR
+
+ // You CAN have unsized local variables...
+ let mut x: dyn Any = *answer(); // OK
+ // ...but you CANNOT reassign to them.
+ x = *answer(); // ERROR
+
+ // You CANNOT even initialize them separately.
+ let y: dyn Any; // OK
+ y = *answer(); // ERROR
+
+ // Not mentioned in the RFC, but by-move captured variables are also Sized.
+ let x: dyn Any = *answer();
+ (move || { // ERROR
+ let y = x;
+ })();
+
+ // You CAN create a closure with unsized arguments,
+ // but you CANNOT call it.
+ // This is an implementation detail and may be changed in the future.
+ let f = |x: dyn Any| {};
+ f(*answer()); // ERROR
+}
+```
+
+## By-value trait objects
+
+With this feature, you can have by-value `self` arguments without `Self: Sized` bounds.
+
+```rust
+#![feature(unsized_fn_params)]
+
+trait Foo {
+ fn foo(self) {}
+}
+
+impl<T: ?Sized> Foo for T {}
+
+fn main() {
+ let slice: Box<[i32]> = Box::new([1, 2, 3]);
+ <[i32] as Foo>::foo(*slice);
+}
+```
+
+And `Foo` will also be object-safe.
+
+```rust
+#![feature(unsized_fn_params)]
+
+trait Foo {
+ fn foo(self) {}
+}
+
+impl<T: ?Sized> Foo for T {}
+
+fn main () {
+ let slice: Box<dyn Foo> = Box::new([1, 2, 3]);
+ // doesn't compile yet
+ <dyn Foo as Foo>::foo(*slice);
+}
+```
+
+One of the objectives of this feature is to allow `Box<dyn FnOnce>`.
+
+## Variable length arrays
+
+The RFC also describes an extension to the array literal syntax: `[e; dyn n]`. In the syntax, `n` isn't necessarily a constant expression. The array is dynamically allocated on the stack and has the type of `[T]`, instead of `[T; n]`.
+
+```rust,ignore (not-yet-implemented)
+#![feature(unsized_locals)]
+
+fn mergesort<T: Ord>(a: &mut [T]) {
+ let mut tmp = [T; dyn a.len()];
+ // ...
+}
+
+fn main() {
+ let mut a = [3, 1, 5, 6];
+ mergesort(&mut a);
+ assert_eq!(a, [1, 3, 5, 6]);
+}
+```
+
+VLAs are not implemented yet. The syntax isn't final, either. We may need an alternative syntax for Rust 2015 because, in Rust 2015, expressions like `[e; dyn(1)]` would be ambiguous. One possible alternative proposed in the RFC is `[e; n]`: if `n` captures one or more local variables, then it is considered as `[e; dyn n]`.
+
+## Advisory on stack usage
+
+It's advised not to casually use the `#![feature(unsized_locals)]` feature. Typical use-cases are:
+
+- When you need a by-value trait objects.
+- When you really need a fast allocation of small temporary arrays.
+
+Another pitfall is repetitive allocation and temporaries. Currently the compiler simply extends the stack frame every time it encounters an unsized assignment. So for example, the code
+
+```rust
+#![feature(unsized_locals)]
+
+fn main() {
+ let x: Box<[i32]> = Box::new([1, 2, 3, 4, 5]);
+ let _x = {{{{{{{{{{*x}}}}}}}}}};
+}
+```
+
+and the code
+
+```rust
+#![feature(unsized_locals)]
+
+fn main() {
+ for _ in 0..10 {
+ let x: Box<[i32]> = Box::new([1, 2, 3, 4, 5]);
+ let _x = *x;
+ }
+}
+```
+
+will unnecessarily extend the stack frame.
+"##,
+ },
+ Lint {
+ label: "unsized_tuple_coercion",
+ description: r##"# `unsized_tuple_coercion`
+
+The tracking issue for this feature is: [#42877]
+
+[#42877]: https://github.com/rust-lang/rust/issues/42877
+
+------------------------
+
+This is a part of [RFC0401]. According to the RFC, there should be an implementation like this:
+
+```rust,ignore (partial-example)
+impl<..., T, U: ?Sized> Unsized<(..., U)> for (..., T) where T: Unsized<U> {}
+```
+
+This implementation is currently gated behind `#[feature(unsized_tuple_coercion)]` to avoid insta-stability. Therefore you can use it like this:
+
+```rust
+#![feature(unsized_tuple_coercion)]
+
+fn main() {
+ let x : ([i32; 3], [i32; 3]) = ([1, 2, 3], [4, 5, 6]);
+ let y : &([i32; 3], [i32]) = &x;
+ assert_eq!(y.1[0], 4);
+}
+```
+
+[RFC0401]: https://github.com/rust-lang/rfcs/blob/master/text/0401-coercions.md
+"##,
+ },
+ Lint {
+ label: "update_panic_count",
+ description: r##"# `update_panic_count`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "windows_c",
+ description: r##"# `windows_c`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "windows_handle",
+ description: r##"# `windows_handle`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "windows_net",
+ description: r##"# `windows_net`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "windows_stdio",
+ description: r##"# `windows_stdio`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+"##,
+ },
+];
+
+pub const CLIPPY_LINTS: &[Lint] = &[
+ Lint {
+ label: "clippy::absurd_extreme_comparisons",
+ description: r##"Checks for comparisons where one side of the relation is
+either the minimum or maximum value for its type and warns if it involves a
+case that is always true or always false. Only integer and boolean types are
+checked."##,
+ },
+ Lint {
+ label: "clippy::almost_swapped",
+ description: r##"Checks for `foo = bar; bar = foo` sequences."##,
+ },
+ Lint {
+ label: "clippy::approx_constant",
+ description: r##"Checks for floating point literals that approximate
+constants which are defined in
+[`std::f32::consts`](https://doc.rust-lang.org/stable/std/f32/consts/#constants)
+or
+[`std::f64::consts`](https://doc.rust-lang.org/stable/std/f64/consts/#constants),
+respectively, suggesting to use the predefined constant."##,
+ },
+ Lint {
+ label: "clippy::as_conversions",
+ description: r##"Checks for usage of `as` conversions.
+
+Note that this lint is specialized in linting *every single* use of `as`
+regardless of whether good alternatives exist or not.
+If you want more precise lints for `as`, please consider using these separate lints:
+`unnecessary_cast`, `cast_lossless/possible_truncation/possible_wrap/precision_loss/sign_loss`,
+`fn_to_numeric_cast(_with_truncation)`, `char_lit_as_u8`, `ref_to_mut` and `ptr_as_ptr`.
+There is a good explanation the reason why this lint should work in this way and how it is useful
+[in this issue](https://github.com/rust-lang/rust-clippy/issues/5122)."##,
+ },
+ Lint {
+ label: "clippy::assertions_on_constants",
+ description: r##"Checks for `assert!(true)` and `assert!(false)` calls."##,
+ },
+ Lint {
+ label: "clippy::assign_op_pattern",
+ description: r##"Checks for `a = a op b` or `a = b commutative_op a`
+patterns."##,
+ },
+ Lint {
+ label: "clippy::assign_ops",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::async_yields_async",
+ description: r##"Checks for async blocks that yield values of types
+that can themselves be awaited."##,
+ },
+ Lint {
+ label: "clippy::await_holding_lock",
+ description: r##"Checks for calls to await while holding a
+non-async-aware MutexGuard."##,
+ },
+ Lint {
+ label: "clippy::await_holding_refcell_ref",
+ description: r##"Checks for calls to await while holding a
+`RefCell` `Ref` or `RefMut`."##,
+ },
+ Lint {
+ label: "clippy::bad_bit_mask",
+ description: r##"Checks for incompatible bit masks in comparisons.
+
+The formula for detecting if an expression of the type `_ <bit_op> m
+<cmp_op> c` (where `<bit_op>` is one of {`&`, `|`} and `<cmp_op>` is one of
+{`!=`, `>=`, `>`, `!=`, `>=`, `>`}) can be determined from the following
+table:
+
+|Comparison |Bit Op|Example |is always|Formula |
+|------------|------|------------|---------|----------------------|
+|`==` or `!=`| `&` |`x & 2 == 3`|`false` |`c & m != c` |
+|`<` or `>=`| `&` |`x & 2 < 3` |`true` |`m < c` |
+|`>` or `<=`| `&` |`x & 1 > 1` |`false` |`m <= c` |
+|`==` or `!=`| `|` |`x | 1 == 0`|`false` |`c | m != c` |
+|`<` or `>=`| `|` |`x | 1 < 1` |`false` |`m >= c` |
+|`<=` or `>` | `|` |`x | 1 > 0` |`true` |`m > c` |"##,
+ },
+ Lint {
+ label: "clippy::bind_instead_of_map",
+ description: r##"Checks for usage of `_.and_then(|x| Some(y))`, `_.and_then(|x| Ok(y))` or
+`_.or_else(|x| Err(y))`."##,
+ },
+ Lint {
+ label: "clippy::blacklisted_name",
+ description: r##"Checks for usage of blacklisted names for variables, such
+as `foo`."##,
+ },
+ Lint {
+ label: "clippy::blanket_clippy_restriction_lints",
+ description: r##"Checks for `warn`/`deny`/`forbid` attributes targeting the whole clippy::restriction category."##,
+ },
+ Lint {
+ label: "clippy::blocks_in_if_conditions",
+ description: r##"Checks for `if` conditions that use blocks containing an
+expression, statements or conditions that use closures with blocks."##,
+ },
+ Lint {
+ label: "clippy::bool_assert_comparison",
+ description: r##"This lint warns about boolean comparisons in assert-like macros."##,
+ },
+ Lint {
+ label: "clippy::bool_comparison",
+ description: r##"Checks for expressions of the form `x == true`,
+`x != true` and order comparisons such as `x < true` (or vice versa) and
+suggest using the variable directly."##,
+ },
+ Lint {
+ label: "clippy::borrow_interior_mutable_const",
+ description: r##"Checks if `const` items which is interior mutable (e.g.,
+contains a `Cell`, `Mutex`, `AtomicXxxx`, etc.) has been borrowed directly."##,
+ },
+ Lint {
+ label: "clippy::borrowed_box",
+ description: r##"Checks for use of `&Box<T>` anywhere in the code.
+Check the [Box documentation](https://doc.rust-lang.org/std/boxed/index.html) for more information."##,
+ },
+ Lint {
+ label: "clippy::box_collection",
+ description: r##"Checks for use of `Box<T>` where T is a collection such as Vec anywhere in the code.
+Check the [Box documentation](https://doc.rust-lang.org/std/boxed/index.html) for more information."##,
+ },
+ Lint {
+ label: "clippy::boxed_local",
+ description: r##"Checks for usage of `Box<T>` where an unboxed `T` would
+work fine."##,
+ },
+ Lint {
+ label: "clippy::branches_sharing_code",
+ description: r##"Checks if the `if` and `else` block contain shared code that can be
+moved out of the blocks."##,
+ },
+ Lint {
+ label: "clippy::builtin_type_shadow",
+ description: r##"Warns if a generic shadows a built-in type."##,
+ },
+ Lint {
+ label: "clippy::bytes_nth",
+ description: r##"Checks for the use of `.bytes().nth()`."##,
+ },
+ Lint {
+ label: "clippy::cargo_common_metadata",
+ description: r##"Checks to see if all common metadata is defined in
+`Cargo.toml`. See: https://rust-lang-nursery.github.io/api-guidelines/documentation.html#cargotoml-includes-all-common-metadata-c-metadata"##,
+ },
+ Lint {
+ label: "clippy::case_sensitive_file_extension_comparisons",
+ description: r##"Checks for calls to `ends_with` with possible file extensions
+and suggests to use a case-insensitive approach instead."##,
+ },
+ Lint {
+ label: "clippy::cast_lossless",
+ description: r##"Checks for casts between numerical types that may
+be replaced by safe conversion functions."##,
+ },
+ Lint {
+ label: "clippy::cast_possible_truncation",
+ description: r##"Checks for casts between numerical types that may
+truncate large values. This is expected behavior, so the cast is `Allow` by
+default."##,
+ },
+ Lint {
+ label: "clippy::cast_possible_wrap",
+ description: r##"Checks for casts from an unsigned type to a signed type of
+the same size. Performing such a cast is a 'no-op' for the compiler,
+i.e., nothing is changed at the bit level, and the binary representation of
+the value is reinterpreted. This can cause wrapping if the value is too big
+for the target signed type. However, the cast works as defined, so this lint
+is `Allow` by default."##,
+ },
+ Lint {
+ label: "clippy::cast_precision_loss",
+ description: r##"Checks for casts from any numerical to a float type where
+the receiving type cannot store all values from the original type without
+rounding errors. This possible rounding is to be expected, so this lint is
+`Allow` by default.
+
+Basically, this warns on casting any integer with 32 or more bits to `f32`
+or any 64-bit integer to `f64`."##,
+ },
+ Lint {
+ label: "clippy::cast_ptr_alignment",
+ description: r##"Checks for casts, using `as` or `pointer::cast`,
+from a less-strictly-aligned pointer to a more-strictly-aligned pointer"##,
+ },
+ Lint {
+ label: "clippy::cast_ref_to_mut",
+ description: r##"Checks for casts of `&T` to `&mut T` anywhere in the code."##,
+ },
+ Lint {
+ label: "clippy::cast_sign_loss",
+ description: r##"Checks for casts from a signed to an unsigned numerical
+type. In this case, negative values wrap around to large positive values,
+which can be quite surprising in practice. However, as the cast works as
+defined, this lint is `Allow` by default."##,
+ },
+ Lint {
+ label: "clippy::char_lit_as_u8",
+ description: r##"Checks for expressions where a character literal is cast
+to `u8` and suggests using a byte literal instead."##,
+ },
+ Lint {
+ label: "clippy::chars_last_cmp",
+ description: r##"Checks for usage of `_.chars().last()` or
+`_.chars().next_back()` on a `str` to check if it ends with a given char."##,
+ },
+ Lint {
+ label: "clippy::chars_next_cmp",
+ description: r##"Checks for usage of `.chars().next()` on a `str` to check
+if it starts with a given char."##,
+ },
+ Lint {
+ label: "clippy::checked_conversions",
+ description: r##"Checks for explicit bounds checking when casting."##,
+ },
+ Lint {
+ label: "clippy::clone_double_ref",
+ description: r##"Checks for usage of `.clone()` on an `&&T`."##,
+ },
+ Lint {
+ label: "clippy::clone_on_copy",
+ description: r##"Checks for usage of `.clone()` on a `Copy` type."##,
+ },
+ Lint {
+ label: "clippy::clone_on_ref_ptr",
+ description: r##"Checks for usage of `.clone()` on a ref-counted pointer,
+(`Rc`, `Arc`, `rc::Weak`, or `sync::Weak`), and suggests calling Clone via unified
+function syntax instead (e.g., `Rc::clone(foo)`)."##,
+ },
+ Lint {
+ label: "clippy::cloned_instead_of_copied",
+ description: r##"Checks for usages of `cloned()` on an `Iterator` or `Option` where
+`copied()` could be used instead."##,
+ },
+ Lint { label: "clippy::cmp_nan", description: r##"Checks for comparisons to NaN."## },
+ Lint {
+ label: "clippy::cmp_null",
+ description: r##"This lint checks for equality comparisons with `ptr::null`"##,
+ },
+ Lint {
+ label: "clippy::cmp_owned",
+ description: r##"Checks for conversions to owned values just for the sake
+of a comparison."##,
+ },
+ Lint {
+ label: "clippy::cognitive_complexity",
+ description: r##"Checks for methods with high cognitive complexity."##,
+ },
+ Lint {
+ label: "clippy::collapsible_else_if",
+ description: r##"Checks for collapsible `else { if ... }` expressions
+that can be collapsed to `else if ...`."##,
+ },
+ Lint {
+ label: "clippy::collapsible_if",
+ description: r##"Checks for nested `if` statements which can be collapsed
+by `&&`-combining their conditions."##,
+ },
+ Lint {
+ label: "clippy::collapsible_match",
+ description: r##"Finds nested `match` or `if let` expressions where the patterns may be collapsed together
+without adding any branches.
+
+Note that this lint is not intended to find _all_ cases where nested match patterns can be merged, but only
+cases where merging would most likely make the code more readable."##,
+ },
+ Lint {
+ label: "clippy::comparison_chain",
+ description: r##"Checks comparison chains written with `if` that can be
+rewritten with `match` and `cmp`."##,
+ },
+ Lint {
+ label: "clippy::comparison_to_empty",
+ description: r##"Checks for comparing to an empty slice such as `` or `[]`,
+and suggests using `.is_empty()` where applicable."##,
+ },
+ Lint {
+ label: "clippy::copy_iterator",
+ description: r##"Checks for types that implement `Copy` as well as
+`Iterator`."##,
+ },
+ Lint {
+ label: "clippy::create_dir",
+ description: r##"Checks usage of `std::fs::create_dir` and suggest using `std::fs::create_dir_all` instead."##,
+ },
+ Lint {
+ label: "clippy::crosspointer_transmute",
+ description: r##"Checks for transmutes between a type `T` and `*T`."##,
+ },
+ Lint { label: "clippy::dbg_macro", description: r##"Checks for usage of dbg!() macro."## },
+ Lint {
+ label: "clippy::debug_assert_with_mut_call",
+ description: r##"Checks for function/method calls with a mutable
+parameter in `debug_assert!`, `debug_assert_eq!` and `debug_assert_ne!` macros."##,
+ },
+ Lint {
+ label: "clippy::decimal_literal_representation",
+ description: r##"Warns if there is a better representation for a numeric literal."##,
+ },
+ Lint {
+ label: "clippy::declare_interior_mutable_const",
+ description: r##"Checks for declaration of `const` items which is interior
+mutable (e.g., contains a `Cell`, `Mutex`, `AtomicXxxx`, etc.)."##,
+ },
+ Lint {
+ label: "clippy::default_numeric_fallback",
+ description: r##"Checks for usage of unconstrained numeric literals which may cause default numeric fallback in type
+inference.
+
+Default numeric fallback means that if numeric types have not yet been bound to concrete
+types at the end of type inference, then integer type is bound to `i32`, and similarly
+floating type is bound to `f64`.
+
+See [RFC0212](https://github.com/rust-lang/rfcs/blob/master/text/0212-restore-int-fallback.md) for more information about the fallback."##,
+ },
+ Lint {
+ label: "clippy::default_trait_access",
+ description: r##"Checks for literal calls to `Default::default()`."##,
+ },
+ Lint {
+ label: "clippy::deprecated_cfg_attr",
+ description: r##"Checks for `#[cfg_attr(rustfmt, rustfmt_skip)]` and suggests to replace it
+with `#[rustfmt::skip]`."##,
+ },
+ Lint {
+ label: "clippy::deprecated_semver",
+ description: r##"Checks for `#[deprecated]` annotations with a `since`
+field that is not a valid semantic version."##,
+ },
+ Lint {
+ label: "clippy::deref_addrof",
+ description: r##"Checks for usage of `*&` and `*&mut` in expressions."##,
+ },
+ Lint {
+ label: "clippy::derivable_impls",
+ description: r##"Detects manual `std::default::Default` implementations that are identical to a derived implementation."##,
+ },
+ Lint {
+ label: "clippy::derive_hash_xor_eq",
+ description: r##"Checks for deriving `Hash` but implementing `PartialEq`
+explicitly or vice versa."##,
+ },
+ Lint {
+ label: "clippy::derive_ord_xor_partial_ord",
+ description: r##"Checks for deriving `Ord` but implementing `PartialOrd`
+explicitly or vice versa."##,
+ },
+ Lint {
+ label: "clippy::disallowed_methods",
+ description: r##"Denies the configured methods and functions in clippy.toml"##,
+ },
+ Lint {
+ label: "clippy::disallowed_script_idents",
+ description: r##"Checks for usage of unicode scripts other than those explicitly allowed
+by the lint config.
+
+This lint doesn't take into account non-text scripts such as `Unknown` and `Linear_A`.
+It also ignores the `Common` script type.
+While configuring, be sure to use official script name [aliases] from
+[the list of supported scripts][supported_scripts].
+
+See also: [`non_ascii_idents`].
+
+[aliases]: http://www.unicode.org/reports/tr24/tr24-31.html#Script_Value_Aliases
+[supported_scripts]: https://www.unicode.org/iso15924/iso15924-codes.html"##,
+ },
+ Lint {
+ label: "clippy::disallowed_types",
+ description: r##"Denies the configured types in clippy.toml."##,
+ },
+ Lint {
+ label: "clippy::diverging_sub_expression",
+ description: r##"Checks for diverging calls that are not match arms or
+statements."##,
+ },
+ Lint {
+ label: "clippy::doc_markdown",
+ description: r##"Checks for the presence of `_`, `::` or camel-case words
+outside ticks in documentation."##,
+ },
+ Lint {
+ label: "clippy::double_comparisons",
+ description: r##"Checks for double comparisons that could be simplified to a single expression."##,
+ },
+ Lint {
+ label: "clippy::double_must_use",
+ description: r##"Checks for a `#[must_use]` attribute without
+further information on functions and methods that return a type already
+marked as `#[must_use]`."##,
+ },
+ Lint {
+ label: "clippy::double_neg",
+ description: r##"Detects expressions of the form `--x`."##,
+ },
+ Lint {
+ label: "clippy::double_parens",
+ description: r##"Checks for unnecessary double parentheses."##,
+ },
+ Lint {
+ label: "clippy::drop_copy",
+ description: r##"Checks for calls to `std::mem::drop` with a value
+that derives the Copy trait"##,
+ },
+ Lint {
+ label: "clippy::drop_ref",
+ description: r##"Checks for calls to `std::mem::drop` with a reference
+instead of an owned value."##,
+ },
+ Lint {
+ label: "clippy::duplicate_underscore_argument",
+ description: r##"Checks for function arguments having the similar names
+differing by an underscore."##,
+ },
+ Lint {
+ label: "clippy::duration_subsec",
+ description: r##"Checks for calculation of subsecond microseconds or milliseconds
+from other `Duration` methods."##,
+ },
+ Lint {
+ label: "clippy::else_if_without_else",
+ description: r##"Checks for usage of if expressions with an `else if` branch,
+but without a final `else` branch."##,
+ },
+ Lint {
+ label: "clippy::empty_enum",
+ description: r##"Checks for `enum`s with no variants.
+
+As of this writing, the `never_type` is still a
+nightly-only experimental API. Therefore, this lint is only triggered
+if the `never_type` is enabled."##,
+ },
+ Lint {
+ label: "clippy::empty_line_after_outer_attr",
+ description: r##"Checks for empty lines after outer attributes"##,
+ },
+ Lint { label: "clippy::empty_loop", description: r##"Checks for empty `loop` expressions."## },
+ Lint {
+ label: "clippy::enum_clike_unportable_variant",
+ description: r##"Checks for C-like enumerations that are
+`repr(isize/usize)` and have values that don't fit into an `i32`."##,
+ },
+ Lint { label: "clippy::enum_glob_use", description: r##"Checks for `use Enum::*`."## },
+ Lint {
+ label: "clippy::enum_variant_names",
+ description: r##"Detects enumeration variants that are prefixed or suffixed
+by the same characters."##,
+ },
+ Lint {
+ label: "clippy::eq_op",
+ description: r##"Checks for equal operands to comparison, logical and
+bitwise, difference and division binary operators (`==`, `>`, etc., `&&`,
+`||`, `&`, `|`, `^`, `-` and `/`)."##,
+ },
+ Lint {
+ label: "clippy::equatable_if_let",
+ description: r##"Checks for pattern matchings that can be expressed using equality."##,
+ },
+ Lint {
+ label: "clippy::erasing_op",
+ description: r##"Checks for erasing operations, e.g., `x * 0`."##,
+ },
+ Lint {
+ label: "clippy::eval_order_dependence",
+ description: r##"Checks for a read and a write to the same variable where
+whether the read occurs before or after the write depends on the evaluation
+order of sub-expressions."##,
+ },
+ Lint {
+ label: "clippy::excessive_precision",
+ description: r##"Checks for float literals with a precision greater
+than that supported by the underlying type."##,
+ },
+ Lint {
+ label: "clippy::exhaustive_enums",
+ description: r##"Warns on any exported `enum`s that are not tagged `#[non_exhaustive]`"##,
+ },
+ Lint {
+ label: "clippy::exhaustive_structs",
+ description: r##"Warns on any exported `structs`s that are not tagged `#[non_exhaustive]`"##,
+ },
+ Lint {
+ label: "clippy::exit",
+ description: r##"`exit()` terminates the program and doesn't provide a
+stack trace."##,
+ },
+ Lint {
+ label: "clippy::expect_fun_call",
+ description: r##"Checks for calls to `.expect(&format!(...))`, `.expect(foo(..))`,
+etc., and suggests to use `unwrap_or_else` instead"##,
+ },
+ Lint {
+ label: "clippy::expect_used",
+ description: r##"Checks for `.expect()` calls on `Option`s and `Result`s."##,
+ },
+ Lint {
+ label: "clippy::expl_impl_clone_on_copy",
+ description: r##"Checks for explicit `Clone` implementations for `Copy`
+types."##,
+ },
+ Lint {
+ label: "clippy::explicit_counter_loop",
+ description: r##"Checks `for` loops over slices with an explicit counter
+and suggests the use of `.enumerate()`."##,
+ },
+ Lint {
+ label: "clippy::explicit_deref_methods",
+ description: r##"Checks for explicit `deref()` or `deref_mut()` method calls."##,
+ },
+ Lint {
+ label: "clippy::explicit_into_iter_loop",
+ description: r##"Checks for loops on `y.into_iter()` where `y` will do, and
+suggests the latter."##,
+ },
+ Lint {
+ label: "clippy::explicit_iter_loop",
+ description: r##"Checks for loops on `x.iter()` where `&x` will do, and
+suggests the latter."##,
+ },
+ Lint {
+ label: "clippy::explicit_write",
+ description: r##"Checks for usage of `write!()` / `writeln()!` which can be
+replaced with `(e)print!()` / `(e)println!()`"##,
+ },
+ Lint {
+ label: "clippy::extend_from_slice",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::extend_with_drain",
+ description: r##"Checks for occurrences where one vector gets extended instead of append"##,
+ },
+ Lint {
+ label: "clippy::extra_unused_lifetimes",
+ description: r##"Checks for lifetimes in generics that are never used
+anywhere else."##,
+ },
+ Lint {
+ label: "clippy::fallible_impl_from",
+ description: r##"Checks for impls of `From<..>` that contain `panic!()` or `unwrap()`"##,
+ },
+ Lint {
+ label: "clippy::field_reassign_with_default",
+ description: r##"Checks for immediate reassignment of fields initialized
+with Default::default()."##,
+ },
+ Lint {
+ label: "clippy::filetype_is_file",
+ description: r##"Checks for `FileType::is_file()`."##,
+ },
+ Lint {
+ label: "clippy::filter_map",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::filter_map_identity",
+ description: r##"Checks for usage of `filter_map(|x| x)`."##,
+ },
+ Lint {
+ label: "clippy::filter_map_next",
+ description: r##"Checks for usage of `_.filter_map(_).next()`."##,
+ },
+ Lint {
+ label: "clippy::filter_next",
+ description: r##"Checks for usage of `_.filter(_).next()`."##,
+ },
+ Lint { label: "clippy::find_map", description: r##"Nothing. This lint has been deprecated."## },
+ Lint {
+ label: "clippy::flat_map_identity",
+ description: r##"Checks for usage of `flat_map(|x| x)`."##,
+ },
+ Lint {
+ label: "clippy::flat_map_option",
+ description: r##"Checks for usages of `Iterator::flat_map()` where `filter_map()` could be
+used instead."##,
+ },
+ Lint { label: "clippy::float_arithmetic", description: r##"Checks for float arithmetic."## },
+ Lint {
+ label: "clippy::float_cmp",
+ description: r##"Checks for (in-)equality comparisons on floating-point
+values (apart from zero), except in functions called `*eq*` (which probably
+implement equality for a type involving floats)."##,
+ },
+ Lint {
+ label: "clippy::float_cmp_const",
+ description: r##"Checks for (in-)equality comparisons on floating-point
+value and constant, except in functions called `*eq*` (which probably
+implement equality for a type involving floats)."##,
+ },
+ Lint {
+ label: "clippy::float_equality_without_abs",
+ description: r##"Checks for statements of the form `(a - b) < f32::EPSILON` or
+`(a - b) < f64::EPSILON`. Notes the missing `.abs()`."##,
+ },
+ Lint {
+ label: "clippy::fn_address_comparisons",
+ description: r##"Checks for comparisons with an address of a function item."##,
+ },
+ Lint {
+ label: "clippy::fn_params_excessive_bools",
+ description: r##"Checks for excessive use of
+bools in function definitions."##,
+ },
+ Lint {
+ label: "clippy::fn_to_numeric_cast",
+ description: r##"Checks for casts of function pointers to something other than usize"##,
+ },
+ Lint {
+ label: "clippy::fn_to_numeric_cast_any",
+ description: r##"Checks for casts of a function pointer to any integer type."##,
+ },
+ Lint {
+ label: "clippy::fn_to_numeric_cast_with_truncation",
+ description: r##"Checks for casts of a function pointer to a numeric type not wide enough to
+store address."##,
+ },
+ Lint {
+ label: "clippy::for_kv_map",
+ description: r##"Checks for iterating a map (`HashMap` or `BTreeMap`) and
+ignoring either the keys or values."##,
+ },
+ Lint {
+ label: "clippy::for_loops_over_fallibles",
+ description: r##"Checks for `for` loops over `Option` or `Result` values."##,
+ },
+ Lint {
+ label: "clippy::forget_copy",
+ description: r##"Checks for calls to `std::mem::forget` with a value that
+derives the Copy trait"##,
+ },
+ Lint {
+ label: "clippy::forget_ref",
+ description: r##"Checks for calls to `std::mem::forget` with a reference
+instead of an owned value."##,
+ },
+ Lint {
+ label: "clippy::format_in_format_args",
+ description: r##"Detects `format!` within the arguments of another macro that does
+formatting such as `format!` itself, `write!` or `println!`. Suggests
+inlining the `format!` call."##,
+ },
+ Lint {
+ label: "clippy::from_iter_instead_of_collect",
+ description: r##"Checks for `from_iter()` function calls on types that implement the `FromIterator`
+trait."##,
+ },
+ Lint {
+ label: "clippy::from_over_into",
+ description: r##"Searches for implementations of the `Into<..>` trait and suggests to implement `From<..>` instead."##,
+ },
+ Lint {
+ label: "clippy::from_str_radix_10",
+ description: r##"Checks for function invocations of the form `primitive::from_str_radix(s, 10)`"##,
+ },
+ Lint {
+ label: "clippy::future_not_send",
+ description: r##"This lint requires Future implementations returned from
+functions and methods to implement the `Send` marker trait. It is mostly
+used by library authors (public and internal) that target an audience where
+multithreaded executors are likely to be used for running these Futures."##,
+ },
+ Lint {
+ label: "clippy::get_last_with_len",
+ description: r##"Checks for using `x.get(x.len() - 1)` instead of
+`x.last()`."##,
+ },
+ Lint {
+ label: "clippy::get_unwrap",
+ description: r##"Checks for use of `.get().unwrap()` (or
+`.get_mut().unwrap`) on a standard library type which implements `Index`"##,
+ },
+ Lint {
+ label: "clippy::identity_op",
+ description: r##"Checks for identity operations, e.g., `x + 0`."##,
+ },
+ Lint {
+ label: "clippy::if_let_mutex",
+ description: r##"Checks for `Mutex::lock` calls in `if let` expression
+with lock calls in any of the else blocks."##,
+ },
+ Lint {
+ label: "clippy::if_let_redundant_pattern_matching",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::if_not_else",
+ description: r##"Checks for usage of `!` or `!=` in an if condition with an
+else branch."##,
+ },
+ Lint {
+ label: "clippy::if_same_then_else",
+ description: r##"Checks for `if/else` with the same body as the *then* part
+and the *else* part."##,
+ },
+ Lint {
+ label: "clippy::if_then_some_else_none",
+ description: r##"Checks for if-else that could be written to `bool::then`."##,
+ },
+ Lint {
+ label: "clippy::ifs_same_cond",
+ description: r##"Checks for consecutive `if`s with the same condition."##,
+ },
+ Lint {
+ label: "clippy::implicit_clone",
+ description: r##"Checks for the usage of `_.to_owned()`, `vec.to_vec()`, or similar when calling `_.clone()` would be clearer."##,
+ },
+ Lint {
+ label: "clippy::implicit_hasher",
+ description: r##"Checks for public `impl` or `fn` missing generalization
+over different hashers and implicitly defaulting to the default hashing
+algorithm (`SipHash`)."##,
+ },
+ Lint {
+ label: "clippy::implicit_return",
+ description: r##"Checks for missing return statements at the end of a block."##,
+ },
+ Lint {
+ label: "clippy::implicit_saturating_sub",
+ description: r##"Checks for implicit saturating subtraction."##,
+ },
+ Lint {
+ label: "clippy::imprecise_flops",
+ description: r##"Looks for floating-point expressions that
+can be expressed using built-in methods to improve accuracy
+at the cost of performance."##,
+ },
+ Lint {
+ label: "clippy::inconsistent_digit_grouping",
+ description: r##"Warns if an integral or floating-point constant is
+grouped inconsistently with underscores."##,
+ },
+ Lint {
+ label: "clippy::inconsistent_struct_constructor",
+ description: r##"Checks for struct constructors where all fields are shorthand and
+the order of the field init shorthand in the constructor is inconsistent
+with the order in the struct definition."##,
+ },
+ Lint {
+ label: "clippy::index_refutable_slice",
+ description: r##"The lint checks for slice bindings in patterns that are only used to
+access individual slice values."##,
+ },
+ Lint {
+ label: "clippy::indexing_slicing",
+ description: r##"Checks for usage of indexing or slicing. Arrays are special cases, this lint
+does report on arrays if we can tell that slicing operations are in bounds and does not
+lint on constant `usize` indexing on arrays because that is handled by rustc's `const_err` lint."##,
+ },
+ Lint {
+ label: "clippy::ineffective_bit_mask",
+ description: r##"Checks for bit masks in comparisons which can be removed
+without changing the outcome. The basic structure can be seen in the
+following table:
+
+|Comparison| Bit Op |Example |equals |
+|----------|---------|-----------|-------|
+|`>` / `<=`|`|` / `^`|`x | 2 > 3`|`x > 3`|
+|`<` / `>=`|`|` / `^`|`x ^ 1 < 4`|`x < 4`|"##,
+ },
+ Lint {
+ label: "clippy::inefficient_to_string",
+ description: r##"Checks for usage of `.to_string()` on an `&&T` where
+`T` implements `ToString` directly (like `&&str` or `&&String`)."##,
+ },
+ Lint {
+ label: "clippy::infallible_destructuring_match",
+ description: r##"Checks for matches being used to destructure a single-variant enum
+or tuple struct where a `let` will suffice."##,
+ },
+ Lint {
+ label: "clippy::infinite_iter",
+ description: r##"Checks for iteration that is guaranteed to be infinite."##,
+ },
+ Lint {
+ label: "clippy::inherent_to_string",
+ description: r##"Checks for the definition of inherent methods with a signature of `to_string(&self) -> String`."##,
+ },
+ Lint {
+ label: "clippy::inherent_to_string_shadow_display",
+ description: r##"Checks for the definition of inherent methods with a signature of `to_string(&self) -> String` and if the type implementing this method also implements the `Display` trait."##,
+ },
+ Lint {
+ label: "clippy::inline_always",
+ description: r##"Checks for items annotated with `#[inline(always)]`,
+unless the annotated function is empty or simply panics."##,
+ },
+ Lint {
+ label: "clippy::inline_asm_x86_att_syntax",
+ description: r##"Checks for usage of AT&T x86 assembly syntax."##,
+ },
+ Lint {
+ label: "clippy::inline_asm_x86_intel_syntax",
+ description: r##"Checks for usage of Intel x86 assembly syntax."##,
+ },
+ Lint {
+ label: "clippy::inline_fn_without_body",
+ description: r##"Checks for `#[inline]` on trait methods without bodies"##,
+ },
+ Lint {
+ label: "clippy::inspect_for_each",
+ description: r##"Checks for usage of `inspect().for_each()`."##,
+ },
+ Lint {
+ label: "clippy::int_plus_one",
+ description: r##"Checks for usage of `x >= y + 1` or `x - 1 >= y` (and `<=`) in a block"##,
+ },
+ Lint {
+ label: "clippy::integer_arithmetic",
+ description: r##"Checks for integer arithmetic operations which could overflow or panic.
+
+Specifically, checks for any operators (`+`, `-`, `*`, `<<`, etc) which are capable
+of overflowing according to the [Rust
+Reference](https://doc.rust-lang.org/reference/expressions/operator-expr.html#overflow),
+or which can panic (`/`, `%`). No bounds analysis or sophisticated reasoning is
+attempted."##,
+ },
+ Lint { label: "clippy::integer_division", description: r##"Checks for division of integers"## },
+ Lint {
+ label: "clippy::into_iter_on_ref",
+ description: r##"Checks for `into_iter` calls on references which should be replaced by `iter`
+or `iter_mut`."##,
+ },
+ Lint {
+ label: "clippy::invalid_null_ptr_usage",
+ description: r##"This lint checks for invalid usages of `ptr::null`."##,
+ },
+ Lint {
+ label: "clippy::invalid_regex",
+ description: r##"Checks [regex](https://crates.io/crates/regex) creation
+(with `Regex::new`, `RegexBuilder::new`, or `RegexSet::new`) for correct
+regex syntax."##,
+ },
+ Lint {
+ label: "clippy::invalid_upcast_comparisons",
+ description: r##"Checks for comparisons where the relation is always either
+true or false, but where one side has been upcast so that the comparison is
+necessary. Only integer types are checked."##,
+ },
+ Lint {
+ label: "clippy::invisible_characters",
+ description: r##"Checks for invisible Unicode characters in the code."##,
+ },
+ Lint {
+ label: "clippy::items_after_statements",
+ description: r##"Checks for items declared after some statement in a block."##,
+ },
+ Lint {
+ label: "clippy::iter_cloned_collect",
+ description: r##"Checks for the use of `.cloned().collect()` on slice to
+create a `Vec`."##,
+ },
+ Lint {
+ label: "clippy::iter_count",
+ description: r##"Checks for the use of `.iter().count()`."##,
+ },
+ Lint { label: "clippy::iter_next_loop", description: r##"Checks for loops on `x.next()`."## },
+ Lint {
+ label: "clippy::iter_next_slice",
+ description: r##"Checks for usage of `iter().next()` on a Slice or an Array"##,
+ },
+ Lint {
+ label: "clippy::iter_not_returning_iterator",
+ description: r##"Detects methods named `iter` or `iter_mut` that do not have a return type that implements `Iterator`."##,
+ },
+ Lint {
+ label: "clippy::iter_nth",
+ description: r##"Checks for use of `.iter().nth()` (and the related
+`.iter_mut().nth()`) on standard library types with *O*(1) element access."##,
+ },
+ Lint {
+ label: "clippy::iter_nth_zero",
+ description: r##"Checks for the use of `iter.nth(0)`."##,
+ },
+ Lint {
+ label: "clippy::iter_skip_next",
+ description: r##"Checks for use of `.skip(x).next()` on iterators."##,
+ },
+ Lint {
+ label: "clippy::iterator_step_by_zero",
+ description: r##"Checks for calling `.step_by(0)` on iterators which panics."##,
+ },
+ Lint {
+ label: "clippy::just_underscores_and_digits",
+ description: r##"Checks if you have variables whose name consists of just
+underscores and digits."##,
+ },
+ Lint {
+ label: "clippy::large_const_arrays",
+ description: r##"Checks for large `const` arrays that should
+be defined as `static` instead."##,
+ },
+ Lint {
+ label: "clippy::large_digit_groups",
+ description: r##"Warns if the digits of an integral or floating-point
+constant are grouped into groups that
+are too large."##,
+ },
+ Lint {
+ label: "clippy::large_enum_variant",
+ description: r##"Checks for large size differences between variants on
+`enum`s."##,
+ },
+ Lint {
+ label: "clippy::large_stack_arrays",
+ description: r##"Checks for local arrays that may be too large."##,
+ },
+ Lint {
+ label: "clippy::large_types_passed_by_value",
+ description: r##"Checks for functions taking arguments by value, where
+the argument type is `Copy` and large enough to be worth considering
+passing by reference. Does not trigger if the function is being exported,
+because that might induce API breakage, if the parameter is declared as mutable,
+or if the argument is a `self`."##,
+ },
+ Lint {
+ label: "clippy::len_without_is_empty",
+ description: r##"Checks for items that implement `.len()` but not
+`.is_empty()`."##,
+ },
+ Lint {
+ label: "clippy::len_zero",
+ description: r##"Checks for getting the length of something via `.len()`
+just to compare to zero, and suggests using `.is_empty()` where applicable."##,
+ },
+ Lint {
+ label: "clippy::let_and_return",
+ description: r##"Checks for `let`-bindings, which are subsequently
+returned."##,
+ },
+ Lint {
+ label: "clippy::let_underscore_drop",
+ description: r##"Checks for `let _ = <expr>`
+where expr has a type that implements `Drop`"##,
+ },
+ Lint {
+ label: "clippy::let_underscore_lock",
+ description: r##"Checks for `let _ = sync_lock`.
+This supports `mutex` and `rwlock` in `std::sync` and `parking_lot`."##,
+ },
+ Lint {
+ label: "clippy::let_underscore_must_use",
+ description: r##"Checks for `let _ = <expr>` where expr is `#[must_use]`"##,
+ },
+ Lint { label: "clippy::let_unit_value", description: r##"Checks for binding a unit value."## },
+ Lint {
+ label: "clippy::linkedlist",
+ description: r##"Checks for usage of any `LinkedList`, suggesting to use a
+`Vec` or a `VecDeque` (formerly called `RingBuf`)."##,
+ },
+ Lint {
+ label: "clippy::logic_bug",
+ description: r##"Checks for boolean expressions that contain terminals that
+can be eliminated."##,
+ },
+ Lint {
+ label: "clippy::lossy_float_literal",
+ description: r##"Checks for whole number float literals that
+cannot be represented as the underlying type without loss."##,
+ },
+ Lint {
+ label: "clippy::macro_use_imports",
+ description: r##"Checks for `#[macro_use] use...`."##,
+ },
+ Lint {
+ label: "clippy::main_recursion",
+ description: r##"Checks for recursion using the entrypoint."##,
+ },
+ Lint {
+ label: "clippy::manual_assert",
+ description: r##"Detects `if`-then-`panic!` that can be replaced with `assert!`."##,
+ },
+ Lint {
+ label: "clippy::manual_async_fn",
+ description: r##"It checks for manual implementations of `async` functions."##,
+ },
+ Lint {
+ label: "clippy::manual_filter_map",
+ description: r##"Checks for usage of `_.filter(_).map(_)` that can be written more simply
+as `filter_map(_)`."##,
+ },
+ Lint {
+ label: "clippy::manual_find_map",
+ description: r##"Checks for usage of `_.find(_).map(_)` that can be written more simply
+as `find_map(_)`."##,
+ },
+ Lint {
+ label: "clippy::manual_flatten",
+ description: r##"Check for unnecessary `if let` usage in a for loop
+where only the `Some` or `Ok` variant of the iterator element is used."##,
+ },
+ Lint {
+ label: "clippy::manual_map",
+ description: r##"Checks for usages of `match` which could be implemented using `map`"##,
+ },
+ Lint {
+ label: "clippy::manual_memcpy",
+ description: r##"Checks for for-loops that manually copy items between
+slices that could be optimized by having a memcpy."##,
+ },
+ Lint {
+ label: "clippy::manual_non_exhaustive",
+ description: r##"Checks for manual implementations of the non-exhaustive pattern."##,
+ },
+ Lint {
+ label: "clippy::manual_ok_or",
+ description: r##"Finds patterns that reimplement `Option::ok_or`."##,
+ },
+ Lint {
+ label: "clippy::manual_range_contains",
+ description: r##"Checks for expressions like `x >= 3 && x < 8` that could
+be more readably expressed as `(3..8).contains(x)`."##,
+ },
+ Lint {
+ label: "clippy::manual_saturating_arithmetic",
+ description: r##"Checks for `.checked_add/sub(x).unwrap_or(MAX/MIN)`."##,
+ },
+ Lint {
+ label: "clippy::manual_split_once",
+ description: r##"Checks for usages of `str::splitn(2, _)`"##,
+ },
+ Lint {
+ label: "clippy::manual_str_repeat",
+ description: r##"Checks for manual implementations of `str::repeat`"##,
+ },
+ Lint {
+ label: "clippy::manual_strip",
+ description: r##"Suggests using `strip_{prefix,suffix}` over `str::{starts,ends}_with` and slicing using
+the pattern's length."##,
+ },
+ Lint { label: "clippy::manual_swap", description: r##"Checks for manual swapping."## },
+ Lint {
+ label: "clippy::manual_unwrap_or",
+ description: r##"Finds patterns that reimplement `Option::unwrap_or` or `Result::unwrap_or`."##,
+ },
+ Lint {
+ label: "clippy::many_single_char_names",
+ description: r##"Checks for too many variables whose name consists of a
+single character."##,
+ },
+ Lint {
+ label: "clippy::map_clone",
+ description: r##"Checks for usage of `map(|x| x.clone())` or
+dereferencing closures for `Copy` types, on `Iterator` or `Option`,
+and suggests `cloned()` or `copied()` instead"##,
+ },
+ Lint {
+ label: "clippy::map_collect_result_unit",
+ description: r##"Checks for usage of `_.map(_).collect::<Result<(), _>()`."##,
+ },
+ Lint {
+ label: "clippy::map_entry",
+ description: r##"Checks for uses of `contains_key` + `insert` on `HashMap`
+or `BTreeMap`."##,
+ },
+ Lint {
+ label: "clippy::map_err_ignore",
+ description: r##"Checks for instances of `map_err(|_| Some::Enum)`"##,
+ },
+ Lint {
+ label: "clippy::map_flatten",
+ description: r##"Checks for usage of `_.map(_).flatten(_)` on `Iterator` and `Option`"##,
+ },
+ Lint {
+ label: "clippy::map_identity",
+ description: r##"Checks for instances of `map(f)` where `f` is the identity function."##,
+ },
+ Lint {
+ label: "clippy::map_unwrap_or",
+ description: r##"Checks for usage of `option.map(_).unwrap_or(_)` or `option.map(_).unwrap_or_else(_)` or
+`result.map(_).unwrap_or_else(_)`."##,
+ },
+ Lint {
+ label: "clippy::match_as_ref",
+ description: r##"Checks for match which is used to add a reference to an
+`Option` value."##,
+ },
+ Lint {
+ label: "clippy::match_bool",
+ description: r##"Checks for matches where match expression is a `bool`. It
+suggests to replace the expression with an `if...else` block."##,
+ },
+ Lint {
+ label: "clippy::match_like_matches_macro",
+ description: r##"Checks for `match` or `if let` expressions producing a
+`bool` that could be written using `matches!`"##,
+ },
+ Lint {
+ label: "clippy::match_on_vec_items",
+ description: r##"Checks for `match vec[idx]` or `match vec[n..m]`."##,
+ },
+ Lint {
+ label: "clippy::match_overlapping_arm",
+ description: r##"Checks for overlapping match arms."##,
+ },
+ Lint {
+ label: "clippy::match_ref_pats",
+ description: r##"Checks for matches where all arms match a reference,
+suggesting to remove the reference and deref the matched expression
+instead. It also checks for `if let &foo = bar` blocks."##,
+ },
+ Lint {
+ label: "clippy::match_result_ok",
+ description: r##"Checks for unnecessary `ok()` in `while let`."##,
+ },
+ Lint {
+ label: "clippy::match_same_arms",
+ description: r##"Checks for `match` with identical arm bodies."##,
+ },
+ Lint {
+ label: "clippy::match_single_binding",
+ description: r##"Checks for useless match that binds to only one value."##,
+ },
+ Lint {
+ label: "clippy::match_str_case_mismatch",
+ description: r##"Checks for `match` expressions modifying the case of a string with non-compliant arms"##,
+ },
+ Lint {
+ label: "clippy::match_wild_err_arm",
+ description: r##"Checks for arm which matches all errors with `Err(_)`
+and take drastic actions like `panic!`."##,
+ },
+ Lint {
+ label: "clippy::match_wildcard_for_single_variants",
+ description: r##"Checks for wildcard enum matches for a single variant."##,
+ },
+ Lint {
+ label: "clippy::maybe_infinite_iter",
+ description: r##"Checks for iteration that may be infinite."##,
+ },
+ Lint {
+ label: "clippy::mem_forget",
+ description: r##"Checks for usage of `std::mem::forget(t)` where `t` is
+`Drop`."##,
+ },
+ Lint {
+ label: "clippy::mem_replace_option_with_none",
+ description: r##"Checks for `mem::replace()` on an `Option` with
+`None`."##,
+ },
+ Lint {
+ label: "clippy::mem_replace_with_default",
+ description: r##"Checks for `std::mem::replace` on a value of type
+`T` with `T::default()`."##,
+ },
+ Lint {
+ label: "clippy::mem_replace_with_uninit",
+ description: r##"Checks for `mem::replace(&mut _, mem::uninitialized())`
+and `mem::replace(&mut _, mem::zeroed())`."##,
+ },
+ Lint {
+ label: "clippy::min_max",
+ description: r##"Checks for expressions where `std::cmp::min` and `max` are
+used to clamp values, but switched so that the result is constant."##,
+ },
+ Lint {
+ label: "clippy::misaligned_transmute",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::mismatched_target_os",
+ description: r##"Checks for cfg attributes having operating systems used in target family position."##,
+ },
+ Lint {
+ label: "clippy::misrefactored_assign_op",
+ description: r##"Checks for `a op= a op b` or `a op= b op a` patterns."##,
+ },
+ Lint {
+ label: "clippy::missing_const_for_fn",
+ description: r##"Suggests the use of `const` in functions and methods where possible."##,
+ },
+ Lint {
+ label: "clippy::missing_docs_in_private_items",
+ description: r##"Warns if there is missing doc for any documentable item
+(public or private)."##,
+ },
+ Lint {
+ label: "clippy::missing_enforced_import_renames",
+ description: r##"Checks for imports that do not rename the item as specified
+in the `enforce-import-renames` config option."##,
+ },
+ Lint {
+ label: "clippy::missing_errors_doc",
+ description: r##"Checks the doc comments of publicly visible functions that
+return a `Result` type and warns if there is no `# Errors` section."##,
+ },
+ Lint {
+ label: "clippy::missing_inline_in_public_items",
+ description: r##"It lints if an exported function, method, trait method with default impl,
+or trait method impl is not `#[inline]`."##,
+ },
+ Lint {
+ label: "clippy::missing_panics_doc",
+ description: r##"Checks the doc comments of publicly visible functions that
+may panic and warns if there is no `# Panics` section."##,
+ },
+ Lint {
+ label: "clippy::missing_safety_doc",
+ description: r##"Checks for the doc comments of publicly visible
+unsafe functions and warns if there is no `# Safety` section."##,
+ },
+ Lint {
+ label: "clippy::mistyped_literal_suffixes",
+ description: r##"Warns for mistyped suffix in literals"##,
+ },
+ Lint {
+ label: "clippy::mixed_case_hex_literals",
+ description: r##"Warns on hexadecimal literals with mixed-case letter
+digits."##,
+ },
+ Lint {
+ label: "clippy::mod_module_files",
+ description: r##"Checks that module layout uses only self named module files, bans mod.rs files."##,
+ },
+ Lint {
+ label: "clippy::module_inception",
+ description: r##"Checks for modules that have the same name as their
+parent module"##,
+ },
+ Lint {
+ label: "clippy::module_name_repetitions",
+ description: r##"Detects type names that are prefixed or suffixed by the
+containing module's name."##,
+ },
+ Lint { label: "clippy::modulo_arithmetic", description: r##"Checks for modulo arithmetic."## },
+ Lint {
+ label: "clippy::modulo_one",
+ description: r##"Checks for getting the remainder of a division by one or minus
+one."##,
+ },
+ Lint {
+ label: "clippy::multiple_crate_versions",
+ description: r##"Checks to see if multiple versions of a crate are being
+used."##,
+ },
+ Lint {
+ label: "clippy::multiple_inherent_impl",
+ description: r##"Checks for multiple inherent implementations of a struct"##,
+ },
+ Lint {
+ label: "clippy::must_use_candidate",
+ description: r##"Checks for public functions that have no
+`#[must_use]` attribute, but return something not already marked
+must-use, have no mutable arg and mutate no statics."##,
+ },
+ Lint {
+ label: "clippy::must_use_unit",
+ description: r##"Checks for a `#[must_use]` attribute on
+unit-returning functions and methods."##,
+ },
+ Lint {
+ label: "clippy::mut_from_ref",
+ description: r##"This lint checks for functions that take immutable
+references and return mutable ones."##,
+ },
+ Lint {
+ label: "clippy::mut_mut",
+ description: r##"Checks for instances of `mut mut` references."##,
+ },
+ Lint {
+ label: "clippy::mut_mutex_lock",
+ description: r##"Checks for `&mut Mutex::lock` calls"##,
+ },
+ Lint {
+ label: "clippy::mut_range_bound",
+ description: r##"Checks for loops which have a range bound that is a mutable variable"##,
+ },
+ Lint {
+ label: "clippy::mutable_key_type",
+ description: r##"Checks for sets/maps with mutable key types."##,
+ },
+ Lint {
+ label: "clippy::mutex_atomic",
+ description: r##"Checks for usages of `Mutex<X>` where an atomic will do."##,
+ },
+ Lint {
+ label: "clippy::mutex_integer",
+ description: r##"Checks for usages of `Mutex<X>` where `X` is an integral
+type."##,
+ },
+ Lint { label: "clippy::naive_bytecount", description: r##"Checks for naive byte counts"## },
+ Lint {
+ label: "clippy::needless_arbitrary_self_type",
+ description: r##"The lint checks for `self` in fn parameters that
+specify the `Self`-type explicitly"##,
+ },
+ Lint {
+ label: "clippy::needless_bitwise_bool",
+ description: r##"Checks for uses of bitwise and/or operators between booleans, where performance may be improved by using
+a lazy and."##,
+ },
+ Lint {
+ label: "clippy::needless_bool",
+ description: r##"Checks for expressions of the form `if c { true } else {
+false }` (or vice versa) and suggests using the condition directly."##,
+ },
+ Lint {
+ label: "clippy::needless_borrow",
+ description: r##"Checks for address of operations (`&`) that are going to
+be dereferenced immediately by the compiler."##,
+ },
+ Lint {
+ label: "clippy::needless_borrowed_reference",
+ description: r##"Checks for bindings that destructure a reference and borrow the inner
+value with `&ref`."##,
+ },
+ Lint {
+ label: "clippy::needless_collect",
+ description: r##"Checks for functions collecting an iterator when collect
+is not needed."##,
+ },
+ Lint {
+ label: "clippy::needless_continue",
+ description: r##"The lint checks for `if`-statements appearing in loops
+that contain a `continue` statement in either their main blocks or their
+`else`-blocks, when omitting the `else`-block possibly with some
+rearrangement of code can make the code easier to understand."##,
+ },
+ Lint {
+ label: "clippy::needless_doctest_main",
+ description: r##"Checks for `fn main() { .. }` in doctests"##,
+ },
+ Lint {
+ label: "clippy::needless_for_each",
+ description: r##"Checks for usage of `for_each` that would be more simply written as a
+`for` loop."##,
+ },
+ Lint {
+ label: "clippy::needless_late_init",
+ description: r##"Checks for late initializations that can be replaced by a `let` statement
+with an initializer."##,
+ },
+ Lint {
+ label: "clippy::needless_lifetimes",
+ description: r##"Checks for lifetime annotations which can be removed by
+relying on lifetime elision."##,
+ },
+ Lint {
+ label: "clippy::needless_option_as_deref",
+ description: r##"Checks for no-op uses of Option::{as_deref,as_deref_mut},
+for example, `Option<&T>::as_deref()` returns the same type."##,
+ },
+ Lint {
+ label: "clippy::needless_pass_by_value",
+ description: r##"Checks for functions taking arguments by value, but not
+consuming them in its
+body."##,
+ },
+ Lint {
+ label: "clippy::needless_question_mark",
+ description: r##"Suggests alternatives for useless applications of `?` in terminating expressions"##,
+ },
+ Lint {
+ label: "clippy::needless_range_loop",
+ description: r##"Checks for looping over the range of `0..len` of some
+collection just to get the values by index."##,
+ },
+ Lint {
+ label: "clippy::needless_return",
+ description: r##"Checks for return statements at the end of a block."##,
+ },
+ Lint {
+ label: "clippy::needless_splitn",
+ description: r##"Checks for usages of `str::splitn` (or `str::rsplitn`) where using `str::split` would be the same."##,
+ },
+ Lint {
+ label: "clippy::needless_update",
+ description: r##"Checks for needlessly including a base struct on update
+when all fields are changed anyway.
+
+This lint is not applied to structs marked with
+[non_exhaustive](https://doc.rust-lang.org/reference/attributes/type_system.html)."##,
+ },
+ Lint {
+ label: "clippy::neg_cmp_op_on_partial_ord",
+ description: r##"Checks for the usage of negated comparison operators on types which only implement
+`PartialOrd` (e.g., `f64`)."##,
+ },
+ Lint {
+ label: "clippy::neg_multiply",
+ description: r##"Checks for multiplication by -1 as a form of negation."##,
+ },
+ Lint {
+ label: "clippy::negative_feature_names",
+ description: r##"Checks for negative feature names with prefix `no-` or `not-`"##,
+ },
+ Lint {
+ label: "clippy::never_loop",
+ description: r##"Checks for loops that will always `break`, `return` or
+`continue` an outer loop."##,
+ },
+ Lint {
+ label: "clippy::new_ret_no_self",
+ description: r##"Checks for `new` not returning a type that contains `Self`."##,
+ },
+ Lint {
+ label: "clippy::new_without_default",
+ description: r##"Checks for types with a `fn new() -> Self` method and no
+implementation of
+[`Default`](https://doc.rust-lang.org/std/default/trait.Default.html)."##,
+ },
+ Lint {
+ label: "clippy::no_effect",
+ description: r##"Checks for statements which have no effect."##,
+ },
+ Lint {
+ label: "clippy::no_effect_underscore_binding",
+ description: r##"Checks for binding to underscore prefixed variable without side-effects."##,
+ },
+ Lint {
+ label: "clippy::non_ascii_literal",
+ description: r##"Checks for non-ASCII characters in string and char literals."##,
+ },
+ Lint {
+ label: "clippy::non_octal_unix_permissions",
+ description: r##"Checks for non-octal values used to set Unix file permissions."##,
+ },
+ Lint {
+ label: "clippy::non_send_fields_in_send_ty",
+ description: r##"This lint warns about a `Send` implementation for a type that
+contains fields that are not safe to be sent across threads.
+It tries to detect fields that can cause a soundness issue
+when sent to another thread (e.g., `Rc`) while allowing `!Send` fields
+that are expected to exist in a `Send` type, such as raw pointers."##,
+ },
+ Lint {
+ label: "clippy::nonminimal_bool",
+ description: r##"Checks for boolean expressions that can be written more
+concisely."##,
+ },
+ Lint {
+ label: "clippy::nonsensical_open_options",
+ description: r##"Checks for duplicate open options as well as combinations
+that make no sense."##,
+ },
+ Lint {
+ label: "clippy::nonstandard_macro_braces",
+ description: r##"Checks that common macros are used with consistent bracing."##,
+ },
+ Lint {
+ label: "clippy::not_unsafe_ptr_arg_deref",
+ description: r##"Checks for public functions that dereference raw pointer
+arguments but are not marked `unsafe`."##,
+ },
+ Lint {
+ label: "clippy::octal_escapes",
+ description: r##"Checks for `\\0` escapes in string and byte literals that look like octal
+character escapes in C."##,
+ },
+ Lint { label: "clippy::ok_expect", description: r##"Checks for usage of `ok().expect(..)`."## },
+ Lint {
+ label: "clippy::op_ref",
+ description: r##"Checks for arguments to `==` which have their address
+taken to satisfy a bound
+and suggests to dereference the other argument instead"##,
+ },
+ Lint {
+ label: "clippy::option_as_ref_deref",
+ description: r##"Checks for usage of `_.as_ref().map(Deref::deref)` or it's aliases (such as String::as_str)."##,
+ },
+ Lint {
+ label: "clippy::option_env_unwrap",
+ description: r##"Checks for usage of `option_env!(...).unwrap()` and
+suggests usage of the `env!` macro."##,
+ },
+ Lint {
+ label: "clippy::option_filter_map",
+ description: r##"Checks for indirect collection of populated `Option`"##,
+ },
+ Lint {
+ label: "clippy::option_if_let_else",
+ description: r##"Lints usage of `if let Some(v) = ... { y } else { x }` which is more
+idiomatically done with `Option::map_or` (if the else bit is a pure
+expression) or `Option::map_or_else` (if the else bit is an impure
+expression)."##,
+ },
+ Lint {
+ label: "clippy::option_map_or_none",
+ description: r##"Checks for usage of `_.map_or(None, _)`."##,
+ },
+ Lint {
+ label: "clippy::option_map_unit_fn",
+ description: r##"Checks for usage of `option.map(f)` where f is a function
+or closure that returns the unit type `()`."##,
+ },
+ Lint {
+ label: "clippy::option_option",
+ description: r##"Checks for use of `Option<Option<_>>` in function signatures and type
+definitions"##,
+ },
+ Lint {
+ label: "clippy::or_fun_call",
+ description: r##"Checks for calls to `.or(foo(..))`, `.unwrap_or(foo(..))`,
+etc., and suggests to use `or_else`, `unwrap_or_else`, etc., or
+`unwrap_or_default` instead."##,
+ },
+ Lint {
+ label: "clippy::out_of_bounds_indexing",
+ description: r##"Checks for out of bounds array indexing with a constant
+index."##,
+ },
+ Lint {
+ label: "clippy::overflow_check_conditional",
+ description: r##"Detects classic underflow/overflow checks."##,
+ },
+ Lint { label: "clippy::panic", description: r##"Checks for usage of `panic!`."## },
+ Lint {
+ label: "clippy::panic_in_result_fn",
+ description: r##"Checks for usage of `panic!`, `unimplemented!`, `todo!`, `unreachable!` or assertions in a function of type result."##,
+ },
+ Lint {
+ label: "clippy::panicking_unwrap",
+ description: r##"Checks for calls of `unwrap[_err]()` that will always fail."##,
+ },
+ Lint {
+ label: "clippy::partialeq_ne_impl",
+ description: r##"Checks for manual re-implementations of `PartialEq::ne`."##,
+ },
+ Lint {
+ label: "clippy::path_buf_push_overwrite",
+ description: r##"* Checks for [push](https://doc.rust-lang.org/std/path/struct.PathBuf.html#method.push)
+calls on `PathBuf` that can cause overwrites."##,
+ },
+ Lint {
+ label: "clippy::pattern_type_mismatch",
+ description: r##"Checks for patterns that aren't exact representations of the types
+they are applied to.
+
+To satisfy this lint, you will have to adjust either the expression that is matched
+against or the pattern itself, as well as the bindings that are introduced by the
+adjusted patterns. For matching you will have to either dereference the expression
+with the `*` operator, or amend the patterns to explicitly match against `&<pattern>`
+or `&mut <pattern>` depending on the reference mutability. For the bindings you need
+to use the inverse. You can leave them as plain bindings if you wish for the value
+to be copied, but you must use `ref mut <variable>` or `ref <variable>` to construct
+a reference into the matched structure.
+
+If you are looking for a way to learn about ownership semantics in more detail, it
+is recommended to look at IDE options available to you to highlight types, lifetimes
+and reference semantics in your code. The available tooling would expose these things
+in a general way even outside of the various pattern matching mechanics. Of course
+this lint can still be used to highlight areas of interest and ensure a good understanding
+of ownership semantics."##,
+ },
+ Lint {
+ label: "clippy::possible_missing_comma",
+ description: r##"Checks for possible missing comma in an array. It lints if
+an array element is a binary operator expression and it lies on two lines."##,
+ },
+ Lint {
+ label: "clippy::precedence",
+ description: r##"Checks for operations where precedence may be unclear
+and suggests to add parentheses. Currently it catches the following:
+* mixed usage of arithmetic and bit shifting/combining operators without
+parentheses
+* a negative numeric literal (which is really a unary `-` followed by a
+numeric literal)
+ followed by a method call"##,
+ },
+ Lint {
+ label: "clippy::print_literal",
+ description: r##"This lint warns about the use of literals as `print!`/`println!` args."##,
+ },
+ Lint {
+ label: "clippy::print_stderr",
+ description: r##"Checks for printing on *stderr*. The purpose of this lint
+is to catch debugging remnants."##,
+ },
+ Lint {
+ label: "clippy::print_stdout",
+ description: r##"Checks for printing on *stdout*. The purpose of this lint
+is to catch debugging remnants."##,
+ },
+ Lint {
+ label: "clippy::print_with_newline",
+ description: r##"This lint warns when you use `print!()` with a format
+string that ends in a newline."##,
+ },
+ Lint {
+ label: "clippy::println_empty_string",
+ description: r##"This lint warns when you use `println!()` to
+print a newline."##,
+ },
+ Lint {
+ label: "clippy::ptr_arg",
+ description: r##"This lint checks for function arguments of type `&String`
+or `&Vec` unless the references are mutable. It will also suggest you
+replace `.clone()` calls with the appropriate `.to_owned()`/`to_string()`
+calls."##,
+ },
+ Lint {
+ label: "clippy::ptr_as_ptr",
+ description: r##"Checks for `as` casts between raw pointers without changing its mutability,
+namely `*const T` to `*const U` and `*mut T` to `*mut U`."##,
+ },
+ Lint { label: "clippy::ptr_eq", description: r##"Use `std::ptr::eq` when applicable"## },
+ Lint {
+ label: "clippy::ptr_offset_with_cast",
+ description: r##"Checks for usage of the `offset` pointer method with a `usize` casted to an
+`isize`."##,
+ },
+ Lint {
+ label: "clippy::pub_enum_variant_names",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::question_mark",
+ description: r##"Checks for expressions that could be replaced by the question mark operator."##,
+ },
+ Lint {
+ label: "clippy::range_minus_one",
+ description: r##"Checks for inclusive ranges where 1 is subtracted from
+the upper bound, e.g., `x..=(y-1)`."##,
+ },
+ Lint {
+ label: "clippy::range_plus_one",
+ description: r##"Checks for exclusive ranges where 1 is added to the
+upper bound, e.g., `x..(y+1)`."##,
+ },
+ Lint {
+ label: "clippy::range_step_by_zero",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::range_zip_with_len",
+ description: r##"Checks for zipping a collection with the range of
+`0.._.len()`."##,
+ },
+ Lint {
+ label: "clippy::rc_buffer",
+ description: r##"Checks for `Rc<T>` and `Arc<T>` when `T` is a mutable buffer type such as `String` or `Vec`."##,
+ },
+ Lint { label: "clippy::rc_mutex", description: r##"Checks for `Rc<Mutex<T>>`."## },
+ Lint {
+ label: "clippy::redundant_allocation",
+ description: r##"Checks for use of redundant allocations anywhere in the code."##,
+ },
+ Lint {
+ label: "clippy::redundant_clone",
+ description: r##"Checks for a redundant `clone()` (and its relatives) which clones an owned
+value that is going to be dropped without further use."##,
+ },
+ Lint {
+ label: "clippy::redundant_closure",
+ description: r##"Checks for closures which just call another function where
+the function can be called directly. `unsafe` functions or calls where types
+get adjusted are ignored."##,
+ },
+ Lint {
+ label: "clippy::redundant_closure_call",
+ description: r##"Detects closures called in the same expression where they
+are defined."##,
+ },
+ Lint {
+ label: "clippy::redundant_closure_for_method_calls",
+ description: r##"Checks for closures which only invoke a method on the closure
+argument and can be replaced by referencing the method directly."##,
+ },
+ Lint {
+ label: "clippy::redundant_else",
+ description: r##"Checks for `else` blocks that can be removed without changing semantics."##,
+ },
+ Lint {
+ label: "clippy::redundant_feature_names",
+ description: r##"Checks for feature names with prefix `use-`, `with-` or suffix `-support`"##,
+ },
+ Lint {
+ label: "clippy::redundant_field_names",
+ description: r##"Checks for fields in struct literals where shorthands
+could be used."##,
+ },
+ Lint {
+ label: "clippy::redundant_pattern",
+ description: r##"Checks for patterns in the form `name @ _`."##,
+ },
+ Lint {
+ label: "clippy::redundant_pattern_matching",
+ description: r##"Lint for redundant pattern matching over `Result`, `Option`,
+`std::task::Poll` or `std::net::IpAddr`"##,
+ },
+ Lint {
+ label: "clippy::redundant_pub_crate",
+ description: r##"Checks for items declared `pub(crate)` that are not crate visible because they
+are inside a private module."##,
+ },
+ Lint {
+ label: "clippy::redundant_slicing",
+ description: r##"Checks for redundant slicing expressions which use the full range, and
+do not change the type."##,
+ },
+ Lint {
+ label: "clippy::redundant_static_lifetimes",
+ description: r##"Checks for constants and statics with an explicit `'static` lifetime."##,
+ },
+ Lint {
+ label: "clippy::ref_binding_to_reference",
+ description: r##"Checks for `ref` bindings which create a reference to a reference."##,
+ },
+ Lint {
+ label: "clippy::ref_in_deref",
+ description: r##"Checks for references in expressions that use
+auto dereference."##,
+ },
+ Lint {
+ label: "clippy::ref_option_ref",
+ description: r##"Checks for usage of `&Option<&T>`."##,
+ },
+ Lint {
+ label: "clippy::regex_macro",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::repeat_once",
+ description: r##"Checks for usage of `.repeat(1)` and suggest the following method for each types.
+- `.to_string()` for `str`
+- `.clone()` for `String`
+- `.to_vec()` for `slice`
+
+The lint will evaluate constant expressions and values as arguments of `.repeat(..)` and emit a message if
+they are equivalent to `1`. (Related discussion in [rust-clippy#7306](https://github.com/rust-lang/rust-clippy/issues/7306))"##,
+ },
+ Lint {
+ label: "clippy::replace_consts",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::rest_pat_in_fully_bound_structs",
+ description: r##"Checks for unnecessary '..' pattern binding on struct when all fields are explicitly matched."##,
+ },
+ Lint {
+ label: "clippy::result_map_or_into_option",
+ description: r##"Checks for usage of `_.map_or(None, Some)`."##,
+ },
+ Lint {
+ label: "clippy::result_map_unit_fn",
+ description: r##"Checks for usage of `result.map(f)` where f is a function
+or closure that returns the unit type `()`."##,
+ },
+ Lint {
+ label: "clippy::result_unit_err",
+ description: r##"Checks for public functions that return a `Result`
+with an `Err` type of `()`. It suggests using a custom type that
+implements `std::error::Error`."##,
+ },
+ Lint {
+ label: "clippy::return_self_not_must_use",
+ description: r##"This lint warns when a method returning `Self` doesn't have the `#[must_use]` attribute."##,
+ },
+ Lint {
+ label: "clippy::reversed_empty_ranges",
+ description: r##"Checks for range expressions `x..y` where both `x` and `y`
+are constant and `x` is greater or equal to `y`."##,
+ },
+ Lint {
+ label: "clippy::same_functions_in_if_condition",
+ description: r##"Checks for consecutive `if`s with the same function call."##,
+ },
+ Lint {
+ label: "clippy::same_item_push",
+ description: r##"Checks whether a for loop is being used to push a constant
+value into a Vec."##,
+ },
+ Lint {
+ label: "clippy::same_name_method",
+ description: r##"It lints if a struct has two methods with the same name:
+one from a trait, another not from trait."##,
+ },
+ Lint {
+ label: "clippy::search_is_some",
+ description: r##"Checks for an iterator or string search (such as `find()`,
+`position()`, or `rposition()`) followed by a call to `is_some()` or `is_none()`."##,
+ },
+ Lint {
+ label: "clippy::self_assignment",
+ description: r##"Checks for explicit self-assignments."##,
+ },
+ Lint {
+ label: "clippy::self_named_constructors",
+ description: r##"Warns when constructors have the same name as their types."##,
+ },
+ Lint {
+ label: "clippy::self_named_module_files",
+ description: r##"Checks that module layout uses only mod.rs files."##,
+ },
+ Lint {
+ label: "clippy::semicolon_if_nothing_returned",
+ description: r##"Looks for blocks of expressions and fires if the last expression returns
+`()` but is not followed by a semicolon."##,
+ },
+ Lint {
+ label: "clippy::separated_literal_suffix",
+ description: r##"Warns if literal suffixes are separated by an underscore.
+To enforce separated literal suffix style,
+see the `unseparated_literal_suffix` lint."##,
+ },
+ Lint {
+ label: "clippy::serde_api_misuse",
+ description: r##"Checks for mis-uses of the serde API."##,
+ },
+ Lint {
+ label: "clippy::shadow_reuse",
+ description: r##"Checks for bindings that shadow other bindings already in
+scope, while reusing the original value."##,
+ },
+ Lint {
+ label: "clippy::shadow_same",
+ description: r##"Checks for bindings that shadow other bindings already in
+scope, while just changing reference level or mutability."##,
+ },
+ Lint {
+ label: "clippy::shadow_unrelated",
+ description: r##"Checks for bindings that shadow other bindings already in
+scope, either without an initialization or with one that does not even use
+the original value."##,
+ },
+ Lint {
+ label: "clippy::short_circuit_statement",
+ description: r##"Checks for the use of short circuit boolean conditions as
+a
+statement."##,
+ },
+ Lint {
+ label: "clippy::should_assert_eq",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::should_implement_trait",
+ description: r##"Checks for methods that should live in a trait
+implementation of a `std` trait (see [llogiq's blog
+post](http://llogiq.github.io/2015/07/30/traits.html) for further
+information) instead of an inherent implementation."##,
+ },
+ Lint {
+ label: "clippy::similar_names",
+ description: r##"Checks for names that are very similar and thus confusing."##,
+ },
+ Lint {
+ label: "clippy::single_char_add_str",
+ description: r##"Warns when using `push_str`/`insert_str` with a single-character string literal
+where `push`/`insert` with a `char` would work fine."##,
+ },
+ Lint {
+ label: "clippy::single_char_pattern",
+ description: r##"Checks for string methods that receive a single-character
+`str` as an argument, e.g., `_.split(x)`."##,
+ },
+ Lint {
+ label: "clippy::single_component_path_imports",
+ description: r##"Checking for imports with single component use path."##,
+ },
+ Lint {
+ label: "clippy::single_element_loop",
+ description: r##"Checks whether a for loop has a single element."##,
+ },
+ Lint {
+ label: "clippy::single_match",
+ description: r##"Checks for matches with a single arm where an `if let`
+will usually suffice."##,
+ },
+ Lint {
+ label: "clippy::single_match_else",
+ description: r##"Checks for matches with two arms where an `if let else` will
+usually suffice."##,
+ },
+ Lint {
+ label: "clippy::size_of_in_element_count",
+ description: r##"Detects expressions where
+`size_of::<T>` or `size_of_val::<T>` is used as a
+count of elements of type `T`"##,
+ },
+ Lint {
+ label: "clippy::skip_while_next",
+ description: r##"Checks for usage of `_.skip_while(condition).next()`."##,
+ },
+ Lint {
+ label: "clippy::slow_vector_initialization",
+ description: r##"Checks slow zero-filled vector initialization"##,
+ },
+ Lint {
+ label: "clippy::stable_sort_primitive",
+ description: r##"When sorting primitive values (integers, bools, chars, as well
+as arrays, slices, and tuples of such items), it is better to
+use an unstable sort than a stable sort."##,
+ },
+ Lint {
+ label: "clippy::str_to_string",
+ description: r##"This lint checks for `.to_string()` method calls on values of type `&str`."##,
+ },
+ Lint {
+ label: "clippy::string_add",
+ description: r##"Checks for all instances of `x + _` where `x` is of type
+`String`, but only if [`string_add_assign`](#string_add_assign) does *not*
+match."##,
+ },
+ Lint {
+ label: "clippy::string_add_assign",
+ description: r##"Checks for string appends of the form `x = x + y` (without
+`let`!)."##,
+ },
+ Lint {
+ label: "clippy::string_extend_chars",
+ description: r##"Checks for the use of `.extend(s.chars())` where s is a
+`&str` or `String`."##,
+ },
+ Lint {
+ label: "clippy::string_from_utf8_as_bytes",
+ description: r##"Check if the string is transformed to byte array and casted back to string."##,
+ },
+ Lint {
+ label: "clippy::string_lit_as_bytes",
+ description: r##"Checks for the `as_bytes` method called on string literals
+that contain only ASCII characters."##,
+ },
+ Lint {
+ label: "clippy::string_slice",
+ description: r##"Checks for slice operations on strings"##,
+ },
+ Lint {
+ label: "clippy::string_to_string",
+ description: r##"This lint checks for `.to_string()` method calls on values of type `String`."##,
+ },
+ Lint {
+ label: "clippy::strlen_on_c_strings",
+ description: r##"Checks for usage of `libc::strlen` on a `CString` or `CStr` value,
+and suggest calling `as_bytes().len()` or `to_bytes().len()` respectively instead."##,
+ },
+ Lint {
+ label: "clippy::struct_excessive_bools",
+ description: r##"Checks for excessive
+use of bools in structs."##,
+ },
+ Lint {
+ label: "clippy::suboptimal_flops",
+ description: r##"Looks for floating-point expressions that
+can be expressed using built-in methods to improve both
+accuracy and performance."##,
+ },
+ Lint {
+ label: "clippy::suspicious_arithmetic_impl",
+ description: r##"Lints for suspicious operations in impls of arithmetic operators, e.g.
+subtracting elements in an Add impl."##,
+ },
+ Lint {
+ label: "clippy::suspicious_assignment_formatting",
+ description: r##"Checks for use of the non-existent `=*`, `=!` and `=-`
+operators."##,
+ },
+ Lint {
+ label: "clippy::suspicious_else_formatting",
+ description: r##"Checks for formatting of `else`. It lints if the `else`
+is followed immediately by a newline or the `else` seems to be missing."##,
+ },
+ Lint {
+ label: "clippy::suspicious_map",
+ description: r##"Checks for calls to `map` followed by a `count`."##,
+ },
+ Lint {
+ label: "clippy::suspicious_op_assign_impl",
+ description: r##"Lints for suspicious operations in impls of OpAssign, e.g.
+subtracting elements in an AddAssign impl."##,
+ },
+ Lint {
+ label: "clippy::suspicious_operation_groupings",
+ description: r##"Checks for unlikely usages of binary operators that are almost
+certainly typos and/or copy/paste errors, given the other usages
+of binary operators nearby."##,
+ },
+ Lint {
+ label: "clippy::suspicious_splitn",
+ description: r##"Checks for calls to [`splitn`]
+(https://doc.rust-lang.org/std/primitive.str.html#method.splitn) and
+related functions with either zero or one splits."##,
+ },
+ Lint {
+ label: "clippy::suspicious_unary_op_formatting",
+ description: r##"Checks the formatting of a unary operator on the right hand side
+of a binary operator. It lints if there is no space between the binary and unary operators,
+but there is a space between the unary and its operand."##,
+ },
+ Lint {
+ label: "clippy::tabs_in_doc_comments",
+ description: r##"Checks doc comments for usage of tab characters."##,
+ },
+ Lint {
+ label: "clippy::temporary_assignment",
+ description: r##"Checks for construction of a structure or tuple just to
+assign a value in it."##,
+ },
+ Lint {
+ label: "clippy::to_digit_is_some",
+ description: r##"Checks for `.to_digit(..).is_some()` on `char`s."##,
+ },
+ Lint {
+ label: "clippy::to_string_in_display",
+ description: r##"Checks for uses of `to_string()` in `Display` traits."##,
+ },
+ Lint {
+ label: "clippy::to_string_in_format_args",
+ description: r##"Checks for [`ToString::to_string`](https://doc.rust-lang.org/std/string/trait.ToString.html#tymethod.to_string)
+applied to a type that implements [`Display`](https://doc.rust-lang.org/std/fmt/trait.Display.html)
+in a macro that does formatting."##,
+ },
+ Lint { label: "clippy::todo", description: r##"Checks for usage of `todo!`."## },
+ Lint {
+ label: "clippy::too_many_arguments",
+ description: r##"Checks for functions with too many parameters."##,
+ },
+ Lint {
+ label: "clippy::too_many_lines",
+ description: r##"Checks for functions with a large amount of lines."##,
+ },
+ Lint {
+ label: "clippy::toplevel_ref_arg",
+ description: r##"Checks for function arguments and let bindings denoted as
+`ref`."##,
+ },
+ Lint {
+ label: "clippy::trailing_empty_array",
+ description: r##"Displays a warning when a struct with a trailing zero-sized array is declared without a `repr` attribute."##,
+ },
+ Lint {
+ label: "clippy::trait_duplication_in_bounds",
+ description: r##"Checks for cases where generics are being used and multiple
+syntax specifications for trait bounds are used simultaneously."##,
+ },
+ Lint {
+ label: "clippy::transmute_bytes_to_str",
+ description: r##"Checks for transmutes from a `&[u8]` to a `&str`."##,
+ },
+ Lint {
+ label: "clippy::transmute_float_to_int",
+ description: r##"Checks for transmutes from a float to an integer."##,
+ },
+ Lint {
+ label: "clippy::transmute_int_to_bool",
+ description: r##"Checks for transmutes from an integer to a `bool`."##,
+ },
+ Lint {
+ label: "clippy::transmute_int_to_char",
+ description: r##"Checks for transmutes from an integer to a `char`."##,
+ },
+ Lint {
+ label: "clippy::transmute_int_to_float",
+ description: r##"Checks for transmutes from an integer to a float."##,
+ },
+ Lint {
+ label: "clippy::transmute_num_to_bytes",
+ description: r##"Checks for transmutes from a number to an array of `u8`"##,
+ },
+ Lint {
+ label: "clippy::transmute_ptr_to_ptr",
+ description: r##"Checks for transmutes from a pointer to a pointer, or
+from a reference to a reference."##,
+ },
+ Lint {
+ label: "clippy::transmute_ptr_to_ref",
+ description: r##"Checks for transmutes from a pointer to a reference."##,
+ },
+ Lint {
+ label: "clippy::transmutes_expressible_as_ptr_casts",
+ description: r##"Checks for transmutes that could be a pointer cast."##,
+ },
+ Lint {
+ label: "clippy::transmuting_null",
+ description: r##"Checks for transmute calls which would receive a null pointer."##,
+ },
+ Lint {
+ label: "clippy::trivial_regex",
+ description: r##"Checks for trivial [regex](https://crates.io/crates/regex)
+creation (with `Regex::new`, `RegexBuilder::new`, or `RegexSet::new`)."##,
+ },
+ Lint {
+ label: "clippy::trivially_copy_pass_by_ref",
+ description: r##"Checks for functions taking arguments by reference, where
+the argument type is `Copy` and small enough to be more efficient to always
+pass by value."##,
+ },
+ Lint { label: "clippy::try_err", description: r##"Checks for usages of `Err(x)?`."## },
+ Lint {
+ label: "clippy::type_complexity",
+ description: r##"Checks for types used in structs, parameters and `let`
+declarations above a certain complexity threshold."##,
+ },
+ Lint {
+ label: "clippy::type_repetition_in_bounds",
+ description: r##"This lint warns about unnecessary type repetitions in trait bounds"##,
+ },
+ Lint {
+ label: "clippy::undocumented_unsafe_blocks",
+ description: r##"Checks for `unsafe` blocks without a `// Safety: ` comment
+explaining why the unsafe operations performed inside
+the block are safe."##,
+ },
+ Lint {
+ label: "clippy::undropped_manually_drops",
+ description: r##"Prevents the safe `std::mem::drop` function from being called on `std::mem::ManuallyDrop`."##,
+ },
+ Lint {
+ label: "clippy::unicode_not_nfc",
+ description: r##"Checks for string literals that contain Unicode in a form
+that is not equal to its
+[NFC-recomposition](http://www.unicode.org/reports/tr15/#Norm_Forms)."##,
+ },
+ Lint {
+ label: "clippy::unimplemented",
+ description: r##"Checks for usage of `unimplemented!`."##,
+ },
+ Lint {
+ label: "clippy::uninit_assumed_init",
+ description: r##"Checks for `MaybeUninit::uninit().assume_init()`."##,
+ },
+ Lint {
+ label: "clippy::uninit_vec",
+ description: r##"Checks for `set_len()` call that creates `Vec` with uninitialized elements.
+This is commonly caused by calling `set_len()` right after allocating or
+reserving a buffer with `new()`, `default()`, `with_capacity()`, or `reserve()`."##,
+ },
+ Lint {
+ label: "clippy::unit_arg",
+ description: r##"Checks for passing a unit value as an argument to a function without using a
+unit literal (`()`)."##,
+ },
+ Lint {
+ label: "clippy::unit_cmp",
+ description: r##"Checks for comparisons to unit. This includes all binary
+comparisons (like `==` and `<`) and asserts."##,
+ },
+ Lint { label: "clippy::unit_hash", description: r##"Detects `().hash(_)`."## },
+ Lint {
+ label: "clippy::unit_return_expecting_ord",
+ description: r##"Checks for functions that expect closures of type
+Fn(...) -> Ord where the implemented closure returns the unit type.
+The lint also suggests to remove the semi-colon at the end of the statement if present."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_cast",
+ description: r##"Checks for casts to the same type, casts of int literals to integer types
+and casts of float literals to float types."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_filter_map",
+ description: r##"Checks for `filter_map` calls which could be replaced by `filter` or `map`.
+More specifically it checks if the closure provided is only performing one of the
+filter or map operations and suggests the appropriate option."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_fold",
+ description: r##"Checks for using `fold` when a more succinct alternative exists.
+Specifically, this checks for `fold`s which could be replaced by `any`, `all`,
+`sum` or `product`."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_lazy_evaluations",
+ description: r##"As the counterpart to `or_fun_call`, this lint looks for unnecessary
+lazily evaluated closures on `Option` and `Result`.
+
+This lint suggests changing the following functions, when eager evaluation results in
+simpler code:
+ - `unwrap_or_else` to `unwrap_or`
+ - `and_then` to `and`
+ - `or_else` to `or`
+ - `get_or_insert_with` to `get_or_insert`
+ - `ok_or_else` to `ok_or`"##,
+ },
+ Lint {
+ label: "clippy::unnecessary_mut_passed",
+ description: r##"Detects passing a mutable reference to a function that only
+requires an immutable reference."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_operation",
+ description: r##"Checks for expression statements that can be reduced to a
+sub-expression."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_self_imports",
+ description: r##"Checks for imports ending in `::{self}`."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_sort_by",
+ description: r##"Detects uses of `Vec::sort_by` passing in a closure
+which compares the two arguments, either directly or indirectly."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_to_owned",
+ description: r##"Checks for unnecessary calls to [`ToOwned::to_owned`](https://doc.rust-lang.org/std/borrow/trait.ToOwned.html#tymethod.to_owned)
+and other `to_owned`-like functions."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_unwrap",
+ description: r##"Checks for calls of `unwrap[_err]()` that cannot fail."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_wraps",
+ description: r##"Checks for private functions that only return `Ok` or `Some`."##,
+ },
+ Lint {
+ label: "clippy::unneeded_field_pattern",
+ description: r##"Checks for structure field patterns bound to wildcards."##,
+ },
+ Lint {
+ label: "clippy::unneeded_wildcard_pattern",
+ description: r##"Checks for tuple patterns with a wildcard
+pattern (`_`) is next to a rest pattern (`..`).
+
+_NOTE_: While `_, ..` means there is at least one element left, `..`
+means there are 0 or more elements left. This can make a difference
+when refactoring, but shouldn't result in errors in the refactored code,
+since the wildcard pattern isn't used anyway."##,
+ },
+ Lint {
+ label: "clippy::unnested_or_patterns",
+ description: r##"Checks for unnested or-patterns, e.g., `Some(0) | Some(2)` and
+suggests replacing the pattern with a nested one, `Some(0 | 2)`.
+
+Another way to think of this is that it rewrites patterns in
+*disjunctive normal form (DNF)* into *conjunctive normal form (CNF)*."##,
+ },
+ Lint { label: "clippy::unreachable", description: r##"Checks for usage of `unreachable!`."## },
+ Lint {
+ label: "clippy::unreadable_literal",
+ description: r##"Warns if a long integral or floating-point constant does
+not contain underscores."##,
+ },
+ Lint {
+ label: "clippy::unsafe_derive_deserialize",
+ description: r##"Checks for deriving `serde::Deserialize` on a type that
+has methods using `unsafe`."##,
+ },
+ Lint {
+ label: "clippy::unsafe_removed_from_name",
+ description: r##"Checks for imports that remove unsafe from an item's
+name."##,
+ },
+ Lint {
+ label: "clippy::unsafe_vector_initialization",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::unseparated_literal_suffix",
+ description: r##"Warns if literal suffixes are not separated by an
+underscore.
+To enforce unseparated literal suffix style,
+see the `separated_literal_suffix` lint."##,
+ },
+ Lint {
+ label: "clippy::unsound_collection_transmute",
+ description: r##"Checks for transmutes between collections whose
+types have different ABI, size or alignment."##,
+ },
+ Lint {
+ label: "clippy::unstable_as_mut_slice",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::unstable_as_slice",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::unused_async",
+ description: r##"Checks for functions that are declared `async` but have no `.await`s inside of them."##,
+ },
+ Lint {
+ label: "clippy::unused_collect",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::unused_io_amount",
+ description: r##"Checks for unused written/read amount."##,
+ },
+ Lint {
+ label: "clippy::unused_self",
+ description: r##"Checks methods that contain a `self` argument but don't use it"##,
+ },
+ Lint {
+ label: "clippy::unused_unit",
+ description: r##"Checks for unit (`()`) expressions that can be removed."##,
+ },
+ Lint {
+ label: "clippy::unusual_byte_groupings",
+ description: r##"Warns if hexadecimal or binary literals are not grouped
+by nibble or byte."##,
+ },
+ Lint {
+ label: "clippy::unwrap_in_result",
+ description: r##"Checks for functions of type `Result` that contain `expect()` or `unwrap()`"##,
+ },
+ Lint {
+ label: "clippy::unwrap_or_else_default",
+ description: r##"Checks for usages of `_.unwrap_or_else(Default::default)` on `Option` and
+`Result` values."##,
+ },
+ Lint {
+ label: "clippy::unwrap_used",
+ description: r##"Checks for `.unwrap()` calls on `Option`s and on `Result`s."##,
+ },
+ Lint {
+ label: "clippy::upper_case_acronyms",
+ description: r##"Checks for fully capitalized names and optionally names containing a capitalized acronym."##,
+ },
+ Lint {
+ label: "clippy::use_debug",
+ description: r##"Checks for use of `Debug` formatting. The purpose of this
+lint is to catch debugging remnants."##,
+ },
+ Lint {
+ label: "clippy::use_self",
+ description: r##"Checks for unnecessary repetition of structure name when a
+replacement with `Self` is applicable."##,
+ },
+ Lint {
+ label: "clippy::used_underscore_binding",
+ description: r##"Checks for the use of bindings with a single leading
+underscore."##,
+ },
+ Lint {
+ label: "clippy::useless_asref",
+ description: r##"Checks for usage of `.as_ref()` or `.as_mut()` where the
+types before and after the call are the same."##,
+ },
+ Lint {
+ label: "clippy::useless_attribute",
+ description: r##"Checks for `extern crate` and `use` items annotated with
+lint attributes.
+
+This lint permits `#[allow(unused_imports)]`, `#[allow(deprecated)]`,
+`#[allow(unreachable_pub)]`, `#[allow(clippy::wildcard_imports)]` and
+`#[allow(clippy::enum_glob_use)]` on `use` items and `#[allow(unused_imports)]` on
+`extern crate` items with a `#[macro_use]` attribute."##,
+ },
+ Lint {
+ label: "clippy::useless_conversion",
+ description: r##"Checks for `Into`, `TryInto`, `From`, `TryFrom`, or `IntoIter` calls
+which uselessly convert to the same type."##,
+ },
+ Lint {
+ label: "clippy::useless_format",
+ description: r##"Checks for the use of `format!(string literal with no
+argument)` and `format!({}, foo)` where `foo` is a string."##,
+ },
+ Lint {
+ label: "clippy::useless_let_if_seq",
+ description: r##"Checks for variable declarations immediately followed by a
+conditional affectation."##,
+ },
+ Lint {
+ label: "clippy::useless_transmute",
+ description: r##"Checks for transmutes to the original type of the object
+and transmutes that could be a cast."##,
+ },
+ Lint {
+ label: "clippy::useless_vec",
+ description: r##"Checks for usage of `&vec![..]` when using `&[..]` would
+be possible."##,
+ },
+ Lint {
+ label: "clippy::vec_box",
+ description: r##"Checks for use of `Vec<Box<T>>` where T: Sized anywhere in the code.
+Check the [Box documentation](https://doc.rust-lang.org/std/boxed/index.html) for more information."##,
+ },
+ Lint {
+ label: "clippy::vec_init_then_push",
+ description: r##"Checks for calls to `push` immediately after creating a new `Vec`."##,
+ },
+ Lint {
+ label: "clippy::vec_resize_to_zero",
+ description: r##"Finds occurrences of `Vec::resize(0, an_int)`"##,
+ },
+ Lint {
+ label: "clippy::verbose_bit_mask",
+ description: r##"Checks for bit masks that can be replaced by a call
+to `trailing_zeros`"##,
+ },
+ Lint {
+ label: "clippy::verbose_file_reads",
+ description: r##"Checks for use of File::read_to_end and File::read_to_string."##,
+ },
+ Lint {
+ label: "clippy::vtable_address_comparisons",
+ description: r##"Checks for comparisons with an address of a trait vtable."##,
+ },
+ Lint {
+ label: "clippy::while_immutable_condition",
+ description: r##"Checks whether variables used within while loop condition
+can be (and are) mutated in the body."##,
+ },
+ Lint {
+ label: "clippy::while_let_loop",
+ description: r##"Detects `loop + match` combinations that are easier
+written as a `while let` loop."##,
+ },
+ Lint {
+ label: "clippy::while_let_on_iterator",
+ description: r##"Checks for `while let` expressions on iterators."##,
+ },
+ Lint {
+ label: "clippy::wildcard_dependencies",
+ description: r##"Checks for wildcard dependencies in the `Cargo.toml`."##,
+ },
+ Lint {
+ label: "clippy::wildcard_enum_match_arm",
+ description: r##"Checks for wildcard enum matches using `_`."##,
+ },
+ Lint {
+ label: "clippy::wildcard_imports",
+ description: r##"Checks for wildcard imports `use _::*`."##,
+ },
+ Lint {
+ label: "clippy::wildcard_in_or_patterns",
+ description: r##"Checks for wildcard pattern used with others patterns in same match arm."##,
+ },
+ Lint {
+ label: "clippy::write_literal",
+ description: r##"This lint warns about the use of literals as `write!`/`writeln!` args."##,
+ },
+ Lint {
+ label: "clippy::write_with_newline",
+ description: r##"This lint warns when you use `write!()` with a format
+string that
+ends in a newline."##,
+ },
+ Lint {
+ label: "clippy::writeln_empty_string",
+ description: r##"This lint warns when you use `writeln!(buf, )` to
+print a newline."##,
+ },
+ Lint {
+ label: "clippy::wrong_pub_self_convention",
+ description: r##"Nothing. This lint has been deprecated."##,
+ },
+ Lint {
+ label: "clippy::wrong_self_convention",
+ description: r##"Checks for methods with certain name prefixes and which
+doesn't match how self is taken. The actual rules are:
+
+|Prefix |Postfix |`self` taken | `self` type |
+|-------|------------|-----------------------|--------------|
+|`as_` | none |`&self` or `&mut self` | any |
+|`from_`| none | none | any |
+|`into_`| none |`self` | any |
+|`is_` | none |`&self` or none | any |
+|`to_` | `_mut` |`&mut self` | any |
+|`to_` | not `_mut` |`self` | `Copy` |
+|`to_` | not `_mut` |`&self` | not `Copy` |
+
+Note: Clippy doesn't trigger methods with `to_` prefix in:
+- Traits definition.
+Clippy can not tell if a type that implements a trait is `Copy` or not.
+- Traits implementation, when `&self` is taken.
+The method signature is controlled by the trait and often `&self` is required for all types that implement the trait
+(see e.g. the `std::string::ToString` trait).
+
+Clippy allows `Pin<&Self>` and `Pin<&mut Self>` if `&self` and `&mut self` is required.
+
+Please find more info here:
+https://rust-lang.github.io/api-guidelines/naming.html#ad-hoc-conversions-follow-as_-to_-into_-conventions-c-conv"##,
+ },
+ Lint {
+ label: "clippy::wrong_transmute",
+ description: r##"Checks for transmutes that can't ever be correct on any
+architecture."##,
+ },
+ Lint { label: "clippy::zero_divided_by_zero", description: r##"Checks for `0.0 / 0.0`."## },
+ Lint {
+ label: "clippy::zero_prefixed_literal",
+ description: r##"Warns if an integral constant literal starts with `0`."##,
+ },
+ Lint {
+ label: "clippy::zero_ptr",
+ description: r##"Catch casts from `0` to some pointer type"##,
+ },
+ Lint {
+ label: "clippy::zero_sized_map_values",
+ description: r##"Checks for maps with zero-sized value types anywhere in the code."##,
+ },
+ Lint {
+ label: "clippy::zst_offset",
+ description: r##"Checks for `offset(_)`, `wrapping_`{`add`, `sub`}, etc. on raw pointers to
+zero-sized types"##,
+ },
+];
+pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
+ LintGroup {
+ lint: Lint {
+ label: "clippy::cargo",
+ description: r##"lint group for: clippy::cargo_common_metadata, clippy::multiple_crate_versions, clippy::negative_feature_names, clippy::redundant_feature_names, clippy::wildcard_dependencies"##,
+ },
+ children: &[
+ "clippy::cargo_common_metadata",
+ "clippy::multiple_crate_versions",
+ "clippy::negative_feature_names",
+ "clippy::redundant_feature_names",
+ "clippy::wildcard_dependencies",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "clippy::complexity",
+ description: r##"lint group for: clippy::bind_instead_of_map, clippy::bool_comparison, clippy::borrowed_box, clippy::char_lit_as_u8, clippy::clone_on_copy, clippy::crosspointer_transmute, clippy::deprecated_cfg_attr, clippy::deref_addrof, clippy::derivable_impls, clippy::diverging_sub_expression, clippy::double_comparisons, clippy::double_parens, clippy::duration_subsec, clippy::explicit_counter_loop, clippy::explicit_write, clippy::extra_unused_lifetimes, clippy::filter_map_identity, clippy::filter_next, clippy::flat_map_identity, clippy::get_last_with_len, clippy::identity_op, clippy::inspect_for_each, clippy::int_plus_one, clippy::iter_count, clippy::manual_filter_map, clippy::manual_find_map, clippy::manual_flatten, clippy::manual_split_once, clippy::manual_strip, clippy::manual_swap, clippy::manual_unwrap_or, clippy::map_flatten, clippy::map_identity, clippy::match_as_ref, clippy::match_single_binding, clippy::needless_arbitrary_self_type, clippy::needless_bool, clippy::needless_borrowed_reference, clippy::needless_lifetimes, clippy::needless_option_as_deref, clippy::needless_question_mark, clippy::needless_splitn, clippy::needless_update, clippy::neg_cmp_op_on_partial_ord, clippy::no_effect, clippy::nonminimal_bool, clippy::option_as_ref_deref, clippy::option_filter_map, clippy::option_map_unit_fn, clippy::overflow_check_conditional, clippy::partialeq_ne_impl, clippy::precedence, clippy::ptr_offset_with_cast, clippy::range_zip_with_len, clippy::redundant_closure_call, clippy::redundant_slicing, clippy::ref_in_deref, clippy::repeat_once, clippy::result_map_unit_fn, clippy::search_is_some, clippy::short_circuit_statement, clippy::single_element_loop, clippy::skip_while_next, clippy::string_from_utf8_as_bytes, clippy::strlen_on_c_strings, clippy::temporary_assignment, clippy::too_many_arguments, clippy::transmute_bytes_to_str, clippy::transmute_float_to_int, clippy::transmute_int_to_bool, clippy::transmute_int_to_char, clippy::transmute_int_to_float, clippy::transmute_num_to_bytes, clippy::transmute_ptr_to_ref, clippy::transmutes_expressible_as_ptr_casts, clippy::type_complexity, clippy::unit_arg, clippy::unnecessary_cast, clippy::unnecessary_filter_map, clippy::unnecessary_operation, clippy::unnecessary_sort_by, clippy::unnecessary_unwrap, clippy::unneeded_wildcard_pattern, clippy::useless_asref, clippy::useless_conversion, clippy::useless_format, clippy::vec_box, clippy::while_let_loop, clippy::wildcard_in_or_patterns, clippy::zero_divided_by_zero, clippy::zero_prefixed_literal"##,
+ },
+ children: &[
+ "clippy::bind_instead_of_map",
+ "clippy::bool_comparison",
+ "clippy::borrowed_box",
+ "clippy::char_lit_as_u8",
+ "clippy::clone_on_copy",
+ "clippy::crosspointer_transmute",
+ "clippy::deprecated_cfg_attr",
+ "clippy::deref_addrof",
+ "clippy::derivable_impls",
+ "clippy::diverging_sub_expression",
+ "clippy::double_comparisons",
+ "clippy::double_parens",
+ "clippy::duration_subsec",
+ "clippy::explicit_counter_loop",
+ "clippy::explicit_write",
+ "clippy::extra_unused_lifetimes",
+ "clippy::filter_map_identity",
+ "clippy::filter_next",
+ "clippy::flat_map_identity",
+ "clippy::get_last_with_len",
+ "clippy::identity_op",
+ "clippy::inspect_for_each",
+ "clippy::int_plus_one",
+ "clippy::iter_count",
+ "clippy::manual_filter_map",
+ "clippy::manual_find_map",
+ "clippy::manual_flatten",
+ "clippy::manual_split_once",
+ "clippy::manual_strip",
+ "clippy::manual_swap",
+ "clippy::manual_unwrap_or",
+ "clippy::map_flatten",
+ "clippy::map_identity",
+ "clippy::match_as_ref",
+ "clippy::match_single_binding",
+ "clippy::needless_arbitrary_self_type",
+ "clippy::needless_bool",
+ "clippy::needless_borrowed_reference",
+ "clippy::needless_lifetimes",
+ "clippy::needless_option_as_deref",
+ "clippy::needless_question_mark",
+ "clippy::needless_splitn",
+ "clippy::needless_update",
+ "clippy::neg_cmp_op_on_partial_ord",
+ "clippy::no_effect",
+ "clippy::nonminimal_bool",
+ "clippy::option_as_ref_deref",
+ "clippy::option_filter_map",
+ "clippy::option_map_unit_fn",
+ "clippy::overflow_check_conditional",
+ "clippy::partialeq_ne_impl",
+ "clippy::precedence",
+ "clippy::ptr_offset_with_cast",
+ "clippy::range_zip_with_len",
+ "clippy::redundant_closure_call",
+ "clippy::redundant_slicing",
+ "clippy::ref_in_deref",
+ "clippy::repeat_once",
+ "clippy::result_map_unit_fn",
+ "clippy::search_is_some",
+ "clippy::short_circuit_statement",
+ "clippy::single_element_loop",
+ "clippy::skip_while_next",
+ "clippy::string_from_utf8_as_bytes",
+ "clippy::strlen_on_c_strings",
+ "clippy::temporary_assignment",
+ "clippy::too_many_arguments",
+ "clippy::transmute_bytes_to_str",
+ "clippy::transmute_float_to_int",
+ "clippy::transmute_int_to_bool",
+ "clippy::transmute_int_to_char",
+ "clippy::transmute_int_to_float",
+ "clippy::transmute_num_to_bytes",
+ "clippy::transmute_ptr_to_ref",
+ "clippy::transmutes_expressible_as_ptr_casts",
+ "clippy::type_complexity",
+ "clippy::unit_arg",
+ "clippy::unnecessary_cast",
+ "clippy::unnecessary_filter_map",
+ "clippy::unnecessary_operation",
+ "clippy::unnecessary_sort_by",
+ "clippy::unnecessary_unwrap",
+ "clippy::unneeded_wildcard_pattern",
+ "clippy::useless_asref",
+ "clippy::useless_conversion",
+ "clippy::useless_format",
+ "clippy::vec_box",
+ "clippy::while_let_loop",
+ "clippy::wildcard_in_or_patterns",
+ "clippy::zero_divided_by_zero",
+ "clippy::zero_prefixed_literal",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "clippy::correctness",
+ description: r##"lint group for: clippy::absurd_extreme_comparisons, clippy::almost_swapped, clippy::approx_constant, clippy::async_yields_async, clippy::bad_bit_mask, clippy::cast_ref_to_mut, clippy::clone_double_ref, clippy::cmp_nan, clippy::deprecated_semver, clippy::derive_hash_xor_eq, clippy::derive_ord_xor_partial_ord, clippy::drop_copy, clippy::drop_ref, clippy::enum_clike_unportable_variant, clippy::eq_op, clippy::erasing_op, clippy::fn_address_comparisons, clippy::forget_copy, clippy::forget_ref, clippy::if_let_mutex, clippy::if_same_then_else, clippy::ifs_same_cond, clippy::ineffective_bit_mask, clippy::infinite_iter, clippy::inherent_to_string_shadow_display, clippy::inline_fn_without_body, clippy::invalid_null_ptr_usage, clippy::invalid_regex, clippy::invisible_characters, clippy::iter_next_loop, clippy::iterator_step_by_zero, clippy::let_underscore_lock, clippy::logic_bug, clippy::match_str_case_mismatch, clippy::mem_replace_with_uninit, clippy::min_max, clippy::mismatched_target_os, clippy::mistyped_literal_suffixes, clippy::modulo_one, clippy::mut_from_ref, clippy::never_loop, clippy::non_octal_unix_permissions, clippy::nonsensical_open_options, clippy::not_unsafe_ptr_arg_deref, clippy::option_env_unwrap, clippy::out_of_bounds_indexing, clippy::panicking_unwrap, clippy::possible_missing_comma, clippy::reversed_empty_ranges, clippy::self_assignment, clippy::serde_api_misuse, clippy::size_of_in_element_count, clippy::suspicious_splitn, clippy::to_string_in_display, clippy::transmuting_null, clippy::undropped_manually_drops, clippy::uninit_assumed_init, clippy::uninit_vec, clippy::unit_cmp, clippy::unit_hash, clippy::unit_return_expecting_ord, clippy::unsound_collection_transmute, clippy::unused_io_amount, clippy::useless_attribute, clippy::vec_resize_to_zero, clippy::vtable_address_comparisons, clippy::while_immutable_condition, clippy::wrong_transmute, clippy::zst_offset"##,
+ },
+ children: &[
+ "clippy::absurd_extreme_comparisons",
+ "clippy::almost_swapped",
+ "clippy::approx_constant",
+ "clippy::async_yields_async",
+ "clippy::bad_bit_mask",
+ "clippy::cast_ref_to_mut",
+ "clippy::clone_double_ref",
+ "clippy::cmp_nan",
+ "clippy::deprecated_semver",
+ "clippy::derive_hash_xor_eq",
+ "clippy::derive_ord_xor_partial_ord",
+ "clippy::drop_copy",
+ "clippy::drop_ref",
+ "clippy::enum_clike_unportable_variant",
+ "clippy::eq_op",
+ "clippy::erasing_op",
+ "clippy::fn_address_comparisons",
+ "clippy::forget_copy",
+ "clippy::forget_ref",
+ "clippy::if_let_mutex",
+ "clippy::if_same_then_else",
+ "clippy::ifs_same_cond",
+ "clippy::ineffective_bit_mask",
+ "clippy::infinite_iter",
+ "clippy::inherent_to_string_shadow_display",
+ "clippy::inline_fn_without_body",
+ "clippy::invalid_null_ptr_usage",
+ "clippy::invalid_regex",
+ "clippy::invisible_characters",
+ "clippy::iter_next_loop",
+ "clippy::iterator_step_by_zero",
+ "clippy::let_underscore_lock",
+ "clippy::logic_bug",
+ "clippy::match_str_case_mismatch",
+ "clippy::mem_replace_with_uninit",
+ "clippy::min_max",
+ "clippy::mismatched_target_os",
+ "clippy::mistyped_literal_suffixes",
+ "clippy::modulo_one",
+ "clippy::mut_from_ref",
+ "clippy::never_loop",
+ "clippy::non_octal_unix_permissions",
+ "clippy::nonsensical_open_options",
+ "clippy::not_unsafe_ptr_arg_deref",
+ "clippy::option_env_unwrap",
+ "clippy::out_of_bounds_indexing",
+ "clippy::panicking_unwrap",
+ "clippy::possible_missing_comma",
+ "clippy::reversed_empty_ranges",
+ "clippy::self_assignment",
+ "clippy::serde_api_misuse",
+ "clippy::size_of_in_element_count",
+ "clippy::suspicious_splitn",
+ "clippy::to_string_in_display",
+ "clippy::transmuting_null",
+ "clippy::undropped_manually_drops",
+ "clippy::uninit_assumed_init",
+ "clippy::uninit_vec",
+ "clippy::unit_cmp",
+ "clippy::unit_hash",
+ "clippy::unit_return_expecting_ord",
+ "clippy::unsound_collection_transmute",
+ "clippy::unused_io_amount",
+ "clippy::useless_attribute",
+ "clippy::vec_resize_to_zero",
+ "clippy::vtable_address_comparisons",
+ "clippy::while_immutable_condition",
+ "clippy::wrong_transmute",
+ "clippy::zst_offset",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "clippy::deprecated",
+ description: r##"lint group for: clippy::assign_ops, clippy::extend_from_slice, clippy::filter_map, clippy::find_map, clippy::if_let_redundant_pattern_matching, clippy::misaligned_transmute, clippy::pub_enum_variant_names, clippy::range_step_by_zero, clippy::regex_macro, clippy::replace_consts, clippy::should_assert_eq, clippy::unsafe_vector_initialization, clippy::unstable_as_mut_slice, clippy::unstable_as_slice, clippy::unused_collect, clippy::wrong_pub_self_convention"##,
+ },
+ children: &[
+ "clippy::assign_ops",
+ "clippy::extend_from_slice",
+ "clippy::filter_map",
+ "clippy::find_map",
+ "clippy::if_let_redundant_pattern_matching",
+ "clippy::misaligned_transmute",
+ "clippy::pub_enum_variant_names",
+ "clippy::range_step_by_zero",
+ "clippy::regex_macro",
+ "clippy::replace_consts",
+ "clippy::should_assert_eq",
+ "clippy::unsafe_vector_initialization",
+ "clippy::unstable_as_mut_slice",
+ "clippy::unstable_as_slice",
+ "clippy::unused_collect",
+ "clippy::wrong_pub_self_convention",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "clippy::nursery",
+ description: r##"lint group for: clippy::branches_sharing_code, clippy::cognitive_complexity, clippy::debug_assert_with_mut_call, clippy::disallowed_methods, clippy::disallowed_types, clippy::empty_line_after_outer_attr, clippy::equatable_if_let, clippy::fallible_impl_from, clippy::future_not_send, clippy::imprecise_flops, clippy::index_refutable_slice, clippy::missing_const_for_fn, clippy::mutex_integer, clippy::non_send_fields_in_send_ty, clippy::nonstandard_macro_braces, clippy::option_if_let_else, clippy::path_buf_push_overwrite, clippy::redundant_pub_crate, clippy::string_lit_as_bytes, clippy::suboptimal_flops, clippy::suspicious_operation_groupings, clippy::trailing_empty_array, clippy::trivial_regex, clippy::use_self, clippy::useless_let_if_seq, clippy::useless_transmute"##,
+ },
+ children: &[
+ "clippy::branches_sharing_code",
+ "clippy::cognitive_complexity",
+ "clippy::debug_assert_with_mut_call",
+ "clippy::disallowed_methods",
+ "clippy::disallowed_types",
+ "clippy::empty_line_after_outer_attr",
+ "clippy::equatable_if_let",
+ "clippy::fallible_impl_from",
+ "clippy::future_not_send",
+ "clippy::imprecise_flops",
+ "clippy::index_refutable_slice",
+ "clippy::missing_const_for_fn",
+ "clippy::mutex_integer",
+ "clippy::non_send_fields_in_send_ty",
+ "clippy::nonstandard_macro_braces",
+ "clippy::option_if_let_else",
+ "clippy::path_buf_push_overwrite",
+ "clippy::redundant_pub_crate",
+ "clippy::string_lit_as_bytes",
+ "clippy::suboptimal_flops",
+ "clippy::suspicious_operation_groupings",
+ "clippy::trailing_empty_array",
+ "clippy::trivial_regex",
+ "clippy::use_self",
+ "clippy::useless_let_if_seq",
+ "clippy::useless_transmute",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "clippy::pedantic",
+ description: r##"lint group for: clippy::await_holding_lock, clippy::await_holding_refcell_ref, clippy::case_sensitive_file_extension_comparisons, clippy::cast_lossless, clippy::cast_possible_truncation, clippy::cast_possible_wrap, clippy::cast_precision_loss, clippy::cast_ptr_alignment, clippy::cast_sign_loss, clippy::checked_conversions, clippy::cloned_instead_of_copied, clippy::copy_iterator, clippy::default_trait_access, clippy::doc_markdown, clippy::empty_enum, clippy::enum_glob_use, clippy::expl_impl_clone_on_copy, clippy::explicit_deref_methods, clippy::explicit_into_iter_loop, clippy::explicit_iter_loop, clippy::filter_map_next, clippy::flat_map_option, clippy::float_cmp, clippy::fn_params_excessive_bools, clippy::from_iter_instead_of_collect, clippy::if_not_else, clippy::implicit_clone, clippy::implicit_hasher, clippy::implicit_saturating_sub, clippy::inconsistent_struct_constructor, clippy::inefficient_to_string, clippy::inline_always, clippy::invalid_upcast_comparisons, clippy::items_after_statements, clippy::iter_not_returning_iterator, clippy::large_digit_groups, clippy::large_stack_arrays, clippy::large_types_passed_by_value, clippy::let_underscore_drop, clippy::let_unit_value, clippy::linkedlist, clippy::macro_use_imports, clippy::manual_assert, clippy::manual_ok_or, clippy::many_single_char_names, clippy::map_unwrap_or, clippy::match_bool, clippy::match_on_vec_items, clippy::match_same_arms, clippy::match_wild_err_arm, clippy::match_wildcard_for_single_variants, clippy::maybe_infinite_iter, clippy::missing_errors_doc, clippy::missing_panics_doc, clippy::module_name_repetitions, clippy::must_use_candidate, clippy::mut_mut, clippy::naive_bytecount, clippy::needless_bitwise_bool, clippy::needless_continue, clippy::needless_for_each, clippy::needless_pass_by_value, clippy::no_effect_underscore_binding, clippy::option_option, clippy::ptr_as_ptr, clippy::range_minus_one, clippy::range_plus_one, clippy::redundant_closure_for_method_calls, clippy::redundant_else, clippy::ref_binding_to_reference, clippy::ref_option_ref, clippy::same_functions_in_if_condition, clippy::semicolon_if_nothing_returned, clippy::similar_names, clippy::single_match_else, clippy::string_add_assign, clippy::struct_excessive_bools, clippy::too_many_lines, clippy::trait_duplication_in_bounds, clippy::transmute_ptr_to_ptr, clippy::trivially_copy_pass_by_ref, clippy::type_repetition_in_bounds, clippy::unicode_not_nfc, clippy::unnecessary_wraps, clippy::unnested_or_patterns, clippy::unreadable_literal, clippy::unsafe_derive_deserialize, clippy::unused_async, clippy::unused_self, clippy::used_underscore_binding, clippy::verbose_bit_mask, clippy::wildcard_imports, clippy::zero_sized_map_values"##,
+ },
+ children: &[
+ "clippy::await_holding_lock",
+ "clippy::await_holding_refcell_ref",
+ "clippy::case_sensitive_file_extension_comparisons",
+ "clippy::cast_lossless",
+ "clippy::cast_possible_truncation",
+ "clippy::cast_possible_wrap",
+ "clippy::cast_precision_loss",
+ "clippy::cast_ptr_alignment",
+ "clippy::cast_sign_loss",
+ "clippy::checked_conversions",
+ "clippy::cloned_instead_of_copied",
+ "clippy::copy_iterator",
+ "clippy::default_trait_access",
+ "clippy::doc_markdown",
+ "clippy::empty_enum",
+ "clippy::enum_glob_use",
+ "clippy::expl_impl_clone_on_copy",
+ "clippy::explicit_deref_methods",
+ "clippy::explicit_into_iter_loop",
+ "clippy::explicit_iter_loop",
+ "clippy::filter_map_next",
+ "clippy::flat_map_option",
+ "clippy::float_cmp",
+ "clippy::fn_params_excessive_bools",
+ "clippy::from_iter_instead_of_collect",
+ "clippy::if_not_else",
+ "clippy::implicit_clone",
+ "clippy::implicit_hasher",
+ "clippy::implicit_saturating_sub",
+ "clippy::inconsistent_struct_constructor",
+ "clippy::inefficient_to_string",
+ "clippy::inline_always",
+ "clippy::invalid_upcast_comparisons",
+ "clippy::items_after_statements",
+ "clippy::iter_not_returning_iterator",
+ "clippy::large_digit_groups",
+ "clippy::large_stack_arrays",
+ "clippy::large_types_passed_by_value",
+ "clippy::let_underscore_drop",
+ "clippy::let_unit_value",
+ "clippy::linkedlist",
+ "clippy::macro_use_imports",
+ "clippy::manual_assert",
+ "clippy::manual_ok_or",
+ "clippy::many_single_char_names",
+ "clippy::map_unwrap_or",
+ "clippy::match_bool",
+ "clippy::match_on_vec_items",
+ "clippy::match_same_arms",
+ "clippy::match_wild_err_arm",
+ "clippy::match_wildcard_for_single_variants",
+ "clippy::maybe_infinite_iter",
+ "clippy::missing_errors_doc",
+ "clippy::missing_panics_doc",
+ "clippy::module_name_repetitions",
+ "clippy::must_use_candidate",
+ "clippy::mut_mut",
+ "clippy::naive_bytecount",
+ "clippy::needless_bitwise_bool",
+ "clippy::needless_continue",
+ "clippy::needless_for_each",
+ "clippy::needless_pass_by_value",
+ "clippy::no_effect_underscore_binding",
+ "clippy::option_option",
+ "clippy::ptr_as_ptr",
+ "clippy::range_minus_one",
+ "clippy::range_plus_one",
+ "clippy::redundant_closure_for_method_calls",
+ "clippy::redundant_else",
+ "clippy::ref_binding_to_reference",
+ "clippy::ref_option_ref",
+ "clippy::same_functions_in_if_condition",
+ "clippy::semicolon_if_nothing_returned",
+ "clippy::similar_names",
+ "clippy::single_match_else",
+ "clippy::string_add_assign",
+ "clippy::struct_excessive_bools",
+ "clippy::too_many_lines",
+ "clippy::trait_duplication_in_bounds",
+ "clippy::transmute_ptr_to_ptr",
+ "clippy::trivially_copy_pass_by_ref",
+ "clippy::type_repetition_in_bounds",
+ "clippy::unicode_not_nfc",
+ "clippy::unnecessary_wraps",
+ "clippy::unnested_or_patterns",
+ "clippy::unreadable_literal",
+ "clippy::unsafe_derive_deserialize",
+ "clippy::unused_async",
+ "clippy::unused_self",
+ "clippy::used_underscore_binding",
+ "clippy::verbose_bit_mask",
+ "clippy::wildcard_imports",
+ "clippy::zero_sized_map_values",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "clippy::perf",
+ description: r##"lint group for: clippy::box_collection, clippy::boxed_local, clippy::cmp_owned, clippy::expect_fun_call, clippy::extend_with_drain, clippy::format_in_format_args, clippy::iter_nth, clippy::large_const_arrays, clippy::large_enum_variant, clippy::manual_memcpy, clippy::manual_str_repeat, clippy::map_entry, clippy::mutex_atomic, clippy::needless_collect, clippy::or_fun_call, clippy::redundant_allocation, clippy::redundant_clone, clippy::single_char_pattern, clippy::slow_vector_initialization, clippy::stable_sort_primitive, clippy::to_string_in_format_args, clippy::unnecessary_to_owned, clippy::useless_vec, clippy::vec_init_then_push"##,
+ },
+ children: &[
+ "clippy::box_collection",
+ "clippy::boxed_local",
+ "clippy::cmp_owned",
+ "clippy::expect_fun_call",
+ "clippy::extend_with_drain",
+ "clippy::format_in_format_args",
+ "clippy::iter_nth",
+ "clippy::large_const_arrays",
+ "clippy::large_enum_variant",
+ "clippy::manual_memcpy",
+ "clippy::manual_str_repeat",
+ "clippy::map_entry",
+ "clippy::mutex_atomic",
+ "clippy::needless_collect",
+ "clippy::or_fun_call",
+ "clippy::redundant_allocation",
+ "clippy::redundant_clone",
+ "clippy::single_char_pattern",
+ "clippy::slow_vector_initialization",
+ "clippy::stable_sort_primitive",
+ "clippy::to_string_in_format_args",
+ "clippy::unnecessary_to_owned",
+ "clippy::useless_vec",
+ "clippy::vec_init_then_push",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "clippy::restriction",
+ description: r##"lint group for: clippy::as_conversions, clippy::clone_on_ref_ptr, clippy::create_dir, clippy::dbg_macro, clippy::decimal_literal_representation, clippy::default_numeric_fallback, clippy::disallowed_script_idents, clippy::else_if_without_else, clippy::exhaustive_enums, clippy::exhaustive_structs, clippy::exit, clippy::expect_used, clippy::filetype_is_file, clippy::float_arithmetic, clippy::float_cmp_const, clippy::fn_to_numeric_cast_any, clippy::get_unwrap, clippy::if_then_some_else_none, clippy::implicit_return, clippy::indexing_slicing, clippy::inline_asm_x86_att_syntax, clippy::inline_asm_x86_intel_syntax, clippy::integer_arithmetic, clippy::integer_division, clippy::let_underscore_must_use, clippy::lossy_float_literal, clippy::map_err_ignore, clippy::mem_forget, clippy::missing_docs_in_private_items, clippy::missing_enforced_import_renames, clippy::missing_inline_in_public_items, clippy::mod_module_files, clippy::modulo_arithmetic, clippy::multiple_inherent_impl, clippy::non_ascii_literal, clippy::panic, clippy::panic_in_result_fn, clippy::pattern_type_mismatch, clippy::print_stderr, clippy::print_stdout, clippy::rc_buffer, clippy::rc_mutex, clippy::rest_pat_in_fully_bound_structs, clippy::same_name_method, clippy::self_named_module_files, clippy::separated_literal_suffix, clippy::shadow_reuse, clippy::shadow_same, clippy::shadow_unrelated, clippy::str_to_string, clippy::string_add, clippy::string_slice, clippy::string_to_string, clippy::todo, clippy::undocumented_unsafe_blocks, clippy::unimplemented, clippy::unnecessary_self_imports, clippy::unneeded_field_pattern, clippy::unreachable, clippy::unseparated_literal_suffix, clippy::unwrap_in_result, clippy::unwrap_used, clippy::use_debug, clippy::verbose_file_reads, clippy::wildcard_enum_match_arm"##,
+ },
+ children: &[
+ "clippy::as_conversions",
+ "clippy::clone_on_ref_ptr",
+ "clippy::create_dir",
+ "clippy::dbg_macro",
+ "clippy::decimal_literal_representation",
+ "clippy::default_numeric_fallback",
+ "clippy::disallowed_script_idents",
+ "clippy::else_if_without_else",
+ "clippy::exhaustive_enums",
+ "clippy::exhaustive_structs",
+ "clippy::exit",
+ "clippy::expect_used",
+ "clippy::filetype_is_file",
+ "clippy::float_arithmetic",
+ "clippy::float_cmp_const",
+ "clippy::fn_to_numeric_cast_any",
+ "clippy::get_unwrap",
+ "clippy::if_then_some_else_none",
+ "clippy::implicit_return",
+ "clippy::indexing_slicing",
+ "clippy::inline_asm_x86_att_syntax",
+ "clippy::inline_asm_x86_intel_syntax",
+ "clippy::integer_arithmetic",
+ "clippy::integer_division",
+ "clippy::let_underscore_must_use",
+ "clippy::lossy_float_literal",
+ "clippy::map_err_ignore",
+ "clippy::mem_forget",
+ "clippy::missing_docs_in_private_items",
+ "clippy::missing_enforced_import_renames",
+ "clippy::missing_inline_in_public_items",
+ "clippy::mod_module_files",
+ "clippy::modulo_arithmetic",
+ "clippy::multiple_inherent_impl",
+ "clippy::non_ascii_literal",
+ "clippy::panic",
+ "clippy::panic_in_result_fn",
+ "clippy::pattern_type_mismatch",
+ "clippy::print_stderr",
+ "clippy::print_stdout",
+ "clippy::rc_buffer",
+ "clippy::rc_mutex",
+ "clippy::rest_pat_in_fully_bound_structs",
+ "clippy::same_name_method",
+ "clippy::self_named_module_files",
+ "clippy::separated_literal_suffix",
+ "clippy::shadow_reuse",
+ "clippy::shadow_same",
+ "clippy::shadow_unrelated",
+ "clippy::str_to_string",
+ "clippy::string_add",
+ "clippy::string_slice",
+ "clippy::string_to_string",
+ "clippy::todo",
+ "clippy::undocumented_unsafe_blocks",
+ "clippy::unimplemented",
+ "clippy::unnecessary_self_imports",
+ "clippy::unneeded_field_pattern",
+ "clippy::unreachable",
+ "clippy::unseparated_literal_suffix",
+ "clippy::unwrap_in_result",
+ "clippy::unwrap_used",
+ "clippy::use_debug",
+ "clippy::verbose_file_reads",
+ "clippy::wildcard_enum_match_arm",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "clippy::style",
+ description: r##"lint group for: clippy::assertions_on_constants, clippy::assign_op_pattern, clippy::blacklisted_name, clippy::blocks_in_if_conditions, clippy::bool_assert_comparison, clippy::borrow_interior_mutable_const, clippy::builtin_type_shadow, clippy::bytes_nth, clippy::chars_last_cmp, clippy::chars_next_cmp, clippy::cmp_null, clippy::collapsible_else_if, clippy::collapsible_if, clippy::collapsible_match, clippy::comparison_chain, clippy::comparison_to_empty, clippy::declare_interior_mutable_const, clippy::double_must_use, clippy::double_neg, clippy::duplicate_underscore_argument, clippy::enum_variant_names, clippy::excessive_precision, clippy::field_reassign_with_default, clippy::fn_to_numeric_cast, clippy::fn_to_numeric_cast_with_truncation, clippy::for_kv_map, clippy::from_over_into, clippy::from_str_radix_10, clippy::inconsistent_digit_grouping, clippy::infallible_destructuring_match, clippy::inherent_to_string, clippy::into_iter_on_ref, clippy::iter_cloned_collect, clippy::iter_next_slice, clippy::iter_nth_zero, clippy::iter_skip_next, clippy::just_underscores_and_digits, clippy::len_without_is_empty, clippy::len_zero, clippy::let_and_return, clippy::main_recursion, clippy::manual_async_fn, clippy::manual_map, clippy::manual_non_exhaustive, clippy::manual_range_contains, clippy::manual_saturating_arithmetic, clippy::map_clone, clippy::map_collect_result_unit, clippy::match_like_matches_macro, clippy::match_overlapping_arm, clippy::match_ref_pats, clippy::match_result_ok, clippy::mem_replace_option_with_none, clippy::mem_replace_with_default, clippy::missing_safety_doc, clippy::mixed_case_hex_literals, clippy::module_inception, clippy::must_use_unit, clippy::mut_mutex_lock, clippy::needless_borrow, clippy::needless_doctest_main, clippy::needless_late_init, clippy::needless_range_loop, clippy::needless_return, clippy::neg_multiply, clippy::new_ret_no_self, clippy::new_without_default, clippy::ok_expect, clippy::op_ref, clippy::option_map_or_none, clippy::print_literal, clippy::print_with_newline, clippy::println_empty_string, clippy::ptr_arg, clippy::ptr_eq, clippy::question_mark, clippy::redundant_closure, clippy::redundant_field_names, clippy::redundant_pattern, clippy::redundant_pattern_matching, clippy::redundant_static_lifetimes, clippy::result_map_or_into_option, clippy::result_unit_err, clippy::same_item_push, clippy::self_named_constructors, clippy::should_implement_trait, clippy::single_char_add_str, clippy::single_component_path_imports, clippy::single_match, clippy::string_extend_chars, clippy::tabs_in_doc_comments, clippy::to_digit_is_some, clippy::toplevel_ref_arg, clippy::try_err, clippy::unnecessary_fold, clippy::unnecessary_lazy_evaluations, clippy::unnecessary_mut_passed, clippy::unsafe_removed_from_name, clippy::unused_unit, clippy::unusual_byte_groupings, clippy::unwrap_or_else_default, clippy::upper_case_acronyms, clippy::while_let_on_iterator, clippy::write_literal, clippy::write_with_newline, clippy::writeln_empty_string, clippy::wrong_self_convention, clippy::zero_ptr"##,
+ },
+ children: &[
+ "clippy::assertions_on_constants",
+ "clippy::assign_op_pattern",
+ "clippy::blacklisted_name",
+ "clippy::blocks_in_if_conditions",
+ "clippy::bool_assert_comparison",
+ "clippy::borrow_interior_mutable_const",
+ "clippy::builtin_type_shadow",
+ "clippy::bytes_nth",
+ "clippy::chars_last_cmp",
+ "clippy::chars_next_cmp",
+ "clippy::cmp_null",
+ "clippy::collapsible_else_if",
+ "clippy::collapsible_if",
+ "clippy::collapsible_match",
+ "clippy::comparison_chain",
+ "clippy::comparison_to_empty",
+ "clippy::declare_interior_mutable_const",
+ "clippy::double_must_use",
+ "clippy::double_neg",
+ "clippy::duplicate_underscore_argument",
+ "clippy::enum_variant_names",
+ "clippy::excessive_precision",
+ "clippy::field_reassign_with_default",
+ "clippy::fn_to_numeric_cast",
+ "clippy::fn_to_numeric_cast_with_truncation",
+ "clippy::for_kv_map",
+ "clippy::from_over_into",
+ "clippy::from_str_radix_10",
+ "clippy::inconsistent_digit_grouping",
+ "clippy::infallible_destructuring_match",
+ "clippy::inherent_to_string",
+ "clippy::into_iter_on_ref",
+ "clippy::iter_cloned_collect",
+ "clippy::iter_next_slice",
+ "clippy::iter_nth_zero",
+ "clippy::iter_skip_next",
+ "clippy::just_underscores_and_digits",
+ "clippy::len_without_is_empty",
+ "clippy::len_zero",
+ "clippy::let_and_return",
+ "clippy::main_recursion",
+ "clippy::manual_async_fn",
+ "clippy::manual_map",
+ "clippy::manual_non_exhaustive",
+ "clippy::manual_range_contains",
+ "clippy::manual_saturating_arithmetic",
+ "clippy::map_clone",
+ "clippy::map_collect_result_unit",
+ "clippy::match_like_matches_macro",
+ "clippy::match_overlapping_arm",
+ "clippy::match_ref_pats",
+ "clippy::match_result_ok",
+ "clippy::mem_replace_option_with_none",
+ "clippy::mem_replace_with_default",
+ "clippy::missing_safety_doc",
+ "clippy::mixed_case_hex_literals",
+ "clippy::module_inception",
+ "clippy::must_use_unit",
+ "clippy::mut_mutex_lock",
+ "clippy::needless_borrow",
+ "clippy::needless_doctest_main",
+ "clippy::needless_late_init",
+ "clippy::needless_range_loop",
+ "clippy::needless_return",
+ "clippy::neg_multiply",
+ "clippy::new_ret_no_self",
+ "clippy::new_without_default",
+ "clippy::ok_expect",
+ "clippy::op_ref",
+ "clippy::option_map_or_none",
+ "clippy::print_literal",
+ "clippy::print_with_newline",
+ "clippy::println_empty_string",
+ "clippy::ptr_arg",
+ "clippy::ptr_eq",
+ "clippy::question_mark",
+ "clippy::redundant_closure",
+ "clippy::redundant_field_names",
+ "clippy::redundant_pattern",
+ "clippy::redundant_pattern_matching",
+ "clippy::redundant_static_lifetimes",
+ "clippy::result_map_or_into_option",
+ "clippy::result_unit_err",
+ "clippy::same_item_push",
+ "clippy::self_named_constructors",
+ "clippy::should_implement_trait",
+ "clippy::single_char_add_str",
+ "clippy::single_component_path_imports",
+ "clippy::single_match",
+ "clippy::string_extend_chars",
+ "clippy::tabs_in_doc_comments",
+ "clippy::to_digit_is_some",
+ "clippy::toplevel_ref_arg",
+ "clippy::try_err",
+ "clippy::unnecessary_fold",
+ "clippy::unnecessary_lazy_evaluations",
+ "clippy::unnecessary_mut_passed",
+ "clippy::unsafe_removed_from_name",
+ "clippy::unused_unit",
+ "clippy::unusual_byte_groupings",
+ "clippy::unwrap_or_else_default",
+ "clippy::upper_case_acronyms",
+ "clippy::while_let_on_iterator",
+ "clippy::write_literal",
+ "clippy::write_with_newline",
+ "clippy::writeln_empty_string",
+ "clippy::wrong_self_convention",
+ "clippy::zero_ptr",
+ ],
+ },
+ LintGroup {
+ lint: Lint {
+ label: "clippy::suspicious",
+ description: r##"lint group for: clippy::blanket_clippy_restriction_lints, clippy::empty_loop, clippy::eval_order_dependence, clippy::float_equality_without_abs, clippy::for_loops_over_fallibles, clippy::misrefactored_assign_op, clippy::mut_range_bound, clippy::mutable_key_type, clippy::octal_escapes, clippy::return_self_not_must_use, clippy::suspicious_arithmetic_impl, clippy::suspicious_assignment_formatting, clippy::suspicious_else_formatting, clippy::suspicious_map, clippy::suspicious_op_assign_impl, clippy::suspicious_unary_op_formatting"##,
+ },
+ children: &[
+ "clippy::blanket_clippy_restriction_lints",
+ "clippy::empty_loop",
+ "clippy::eval_order_dependence",
+ "clippy::float_equality_without_abs",
+ "clippy::for_loops_over_fallibles",
+ "clippy::misrefactored_assign_op",
+ "clippy::mut_range_bound",
+ "clippy::mutable_key_type",
+ "clippy::octal_escapes",
+ "clippy::return_self_not_must_use",
+ "clippy::suspicious_arithmetic_impl",
+ "clippy::suspicious_assignment_formatting",
+ "clippy::suspicious_else_formatting",
+ "clippy::suspicious_map",
+ "clippy::suspicious_op_assign_impl",
+ "clippy::suspicious_unary_op_formatting",
+ ],
+ },
+];
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
new file mode 100644
index 000000000..6e56efe34
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
@@ -0,0 +1,105 @@
+//! Random assortment of ide helpers for high-level ide features that don't fit in any other module.
+
+use std::collections::VecDeque;
+
+use base_db::FileId;
+use hir::{ItemInNs, ModuleDef, Name, Semantics};
+use syntax::{
+ ast::{self, make},
+ AstToken, SyntaxKind, SyntaxToken, TokenAtOffset,
+};
+
+use crate::{defs::Definition, generated, RootDatabase};
+
+pub fn item_name(db: &RootDatabase, item: ItemInNs) -> Option<Name> {
+ match item {
+ ItemInNs::Types(module_def_id) => module_def_id.name(db),
+ ItemInNs::Values(module_def_id) => module_def_id.name(db),
+ ItemInNs::Macros(macro_def_id) => Some(macro_def_id.name(db)),
+ }
+}
+
+/// Picks the token with the highest rank returned by the passed in function.
+pub fn pick_best_token(
+ tokens: TokenAtOffset<SyntaxToken>,
+ f: impl Fn(SyntaxKind) -> usize,
+) -> Option<SyntaxToken> {
+ tokens.max_by_key(move |t| f(t.kind()))
+}
+pub fn pick_token<T: AstToken>(mut tokens: TokenAtOffset<SyntaxToken>) -> Option<T> {
+ tokens.find_map(T::cast)
+}
+
+/// Converts the mod path struct into its ast representation.
+pub fn mod_path_to_ast(path: &hir::ModPath) -> ast::Path {
+ let _p = profile::span("mod_path_to_ast");
+
+ let mut segments = Vec::new();
+ let mut is_abs = false;
+ match path.kind {
+ hir::PathKind::Plain => {}
+ hir::PathKind::Super(0) => segments.push(make::path_segment_self()),
+ hir::PathKind::Super(n) => segments.extend((0..n).map(|_| make::path_segment_super())),
+ hir::PathKind::DollarCrate(_) | hir::PathKind::Crate => {
+ segments.push(make::path_segment_crate())
+ }
+ hir::PathKind::Abs => is_abs = true,
+ }
+
+ segments.extend(
+ path.segments()
+ .iter()
+ .map(|segment| make::path_segment(make::name_ref(&segment.to_smol_str()))),
+ );
+ make::path_from_segments(segments, is_abs)
+}
+
+/// Iterates all `ModuleDef`s and `Impl` blocks of the given file.
+pub fn visit_file_defs(
+ sema: &Semantics<'_, RootDatabase>,
+ file_id: FileId,
+ cb: &mut dyn FnMut(Definition),
+) {
+ let db = sema.db;
+ let module = match sema.to_module_def(file_id) {
+ Some(it) => it,
+ None => return,
+ };
+ let mut defs: VecDeque<_> = module.declarations(db).into();
+ while let Some(def) = defs.pop_front() {
+ if let ModuleDef::Module(submodule) = def {
+ if let hir::ModuleSource::Module(_) = submodule.definition_source(db).value {
+ defs.extend(submodule.declarations(db));
+ submodule.impl_defs(db).into_iter().for_each(|impl_| cb(impl_.into()));
+ }
+ }
+ cb(def.into());
+ }
+ module.impl_defs(db).into_iter().for_each(|impl_| cb(impl_.into()));
+
+ let is_root = module.is_crate_root(db);
+ module
+ .legacy_macros(db)
+ .into_iter()
+ // don't show legacy macros declared in the crate-root that were already covered in declarations earlier
+ .filter(|it| !(is_root && it.is_macro_export(db)))
+ .for_each(|mac| cb(mac.into()));
+}
+
+/// Checks if the given lint is equal or is contained by the other lint which may or may not be a group.
+pub fn lint_eq_or_in_group(lint: &str, lint_is: &str) -> bool {
+ if lint == lint_is {
+ return true;
+ }
+
+ if let Some(group) = generated::lints::DEFAULT_LINT_GROUPS
+ .iter()
+ .chain(generated::lints::CLIPPY_LINT_GROUPS.iter())
+ .chain(generated::lints::RUSTDOC_LINT_GROUPS.iter())
+ .find(|&check| check.lint.label == lint_is)
+ {
+ group.children.contains(&lint)
+ } else {
+ false
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
new file mode 100644
index 000000000..26ef86155
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
@@ -0,0 +1,674 @@
+//! Look up accessible paths for items.
+use hir::{
+ AsAssocItem, AssocItem, AssocItemContainer, Crate, ItemInNs, ModPath, Module, ModuleDef,
+ PathResolution, PrefixKind, ScopeDef, Semantics, SemanticsScope, Type,
+};
+use itertools::Itertools;
+use rustc_hash::FxHashSet;
+use syntax::{
+ ast::{self, HasName},
+ utils::path_to_string_stripping_turbo_fish,
+ AstNode, SyntaxNode,
+};
+
+use crate::{
+ helpers::item_name,
+ items_locator::{self, AssocItemSearch, DEFAULT_QUERY_SEARCH_LIMIT},
+ RootDatabase,
+};
+
+/// A candidate for import, derived during various IDE activities:
+/// * completion with imports on the fly proposals
+/// * completion edit resolve requests
+/// * assists
+/// * etc.
+#[derive(Debug)]
+pub enum ImportCandidate {
+ /// A path, qualified (`std::collections::HashMap`) or not (`HashMap`).
+ Path(PathImportCandidate),
+ /// A trait associated function (with no self parameter) or an associated constant.
+ /// For 'test_mod::TestEnum::test_function', `ty` is the `test_mod::TestEnum` expression type
+ /// and `name` is the `test_function`
+ TraitAssocItem(TraitImportCandidate),
+ /// A trait method with self parameter.
+ /// For 'test_enum.test_method()', `ty` is the `test_enum` expression type
+ /// and `name` is the `test_method`
+ TraitMethod(TraitImportCandidate),
+}
+
+/// A trait import needed for a given associated item access.
+/// For `some::path::SomeStruct::ASSOC_`, contains the
+/// type of `some::path::SomeStruct` and `ASSOC_` as the item name.
+#[derive(Debug)]
+pub struct TraitImportCandidate {
+ /// A type of the item that has the associated item accessed at.
+ pub receiver_ty: Type,
+ /// The associated item name that the trait to import should contain.
+ pub assoc_item_name: NameToImport,
+}
+
+/// Path import for a given name, qualified or not.
+#[derive(Debug)]
+pub struct PathImportCandidate {
+ /// Optional qualifier before name.
+ pub qualifier: Option<FirstSegmentUnresolved>,
+ /// The name the item (struct, trait, enum, etc.) should have.
+ pub name: NameToImport,
+}
+
+/// A qualifier that has a first segment and it's unresolved.
+#[derive(Debug)]
+pub struct FirstSegmentUnresolved {
+ fist_segment: ast::NameRef,
+ full_qualifier: ast::Path,
+}
+
+/// A name that will be used during item lookups.
+#[derive(Debug, Clone)]
+pub enum NameToImport {
+ /// Requires items with names that exactly match the given string, bool indicates case-sensitivity.
+ Exact(String, bool),
+ /// Requires items with names that case-insensitively contain all letters from the string,
+ /// in the same order, but not necessary adjacent.
+ Fuzzy(String),
+}
+
+impl NameToImport {
+ pub fn exact_case_sensitive(s: String) -> NameToImport {
+ NameToImport::Exact(s, true)
+ }
+}
+
+impl NameToImport {
+ pub fn text(&self) -> &str {
+ match self {
+ NameToImport::Exact(text, _) => text.as_str(),
+ NameToImport::Fuzzy(text) => text.as_str(),
+ }
+ }
+}
+
+/// A struct to find imports in the project, given a certain name (or its part) and the context.
+#[derive(Debug)]
+pub struct ImportAssets {
+ import_candidate: ImportCandidate,
+ candidate_node: SyntaxNode,
+ module_with_candidate: Module,
+}
+
+impl ImportAssets {
+ pub fn for_method_call(
+ method_call: &ast::MethodCallExpr,
+ sema: &Semantics<'_, RootDatabase>,
+ ) -> Option<Self> {
+ let candidate_node = method_call.syntax().clone();
+ Some(Self {
+ import_candidate: ImportCandidate::for_method_call(sema, method_call)?,
+ module_with_candidate: sema.scope(&candidate_node)?.module(),
+ candidate_node,
+ })
+ }
+
+ pub fn for_exact_path(
+ fully_qualified_path: &ast::Path,
+ sema: &Semantics<'_, RootDatabase>,
+ ) -> Option<Self> {
+ let candidate_node = fully_qualified_path.syntax().clone();
+ if let Some(use_tree) = candidate_node.ancestors().find_map(ast::UseTree::cast) {
+ // Path is inside a use tree, then only continue if it is the first segment of a use statement.
+ if use_tree.syntax().parent().and_then(ast::Use::cast).is_none()
+ || fully_qualified_path.qualifier().is_some()
+ {
+ return None;
+ }
+ }
+ Some(Self {
+ import_candidate: ImportCandidate::for_regular_path(sema, fully_qualified_path)?,
+ module_with_candidate: sema.scope(&candidate_node)?.module(),
+ candidate_node,
+ })
+ }
+
+ pub fn for_ident_pat(sema: &Semantics<'_, RootDatabase>, pat: &ast::IdentPat) -> Option<Self> {
+ if !pat.is_simple_ident() {
+ return None;
+ }
+ let name = pat.name()?;
+ let candidate_node = pat.syntax().clone();
+ Some(Self {
+ import_candidate: ImportCandidate::for_name(sema, &name)?,
+ module_with_candidate: sema.scope(&candidate_node)?.module(),
+ candidate_node,
+ })
+ }
+
+ pub fn for_fuzzy_path(
+ module_with_candidate: Module,
+ qualifier: Option<ast::Path>,
+ fuzzy_name: String,
+ sema: &Semantics<'_, RootDatabase>,
+ candidate_node: SyntaxNode,
+ ) -> Option<Self> {
+ Some(Self {
+ import_candidate: ImportCandidate::for_fuzzy_path(qualifier, fuzzy_name, sema)?,
+ module_with_candidate,
+ candidate_node,
+ })
+ }
+
+ pub fn for_fuzzy_method_call(
+ module_with_method_call: Module,
+ receiver_ty: Type,
+ fuzzy_method_name: String,
+ candidate_node: SyntaxNode,
+ ) -> Option<Self> {
+ Some(Self {
+ import_candidate: ImportCandidate::TraitMethod(TraitImportCandidate {
+ receiver_ty,
+ assoc_item_name: NameToImport::Fuzzy(fuzzy_method_name),
+ }),
+ module_with_candidate: module_with_method_call,
+ candidate_node,
+ })
+ }
+}
+
+/// An import (not necessary the only one) that corresponds a certain given [`PathImportCandidate`].
+/// (the structure is not entirely correct, since there can be situations requiring two imports, see FIXME below for the details)
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LocatedImport {
+ /// The path to use in the `use` statement for a given candidate to be imported.
+ pub import_path: ModPath,
+ /// An item that will be imported with the import path given.
+ pub item_to_import: ItemInNs,
+ /// The path import candidate, resolved.
+ ///
+ /// Not necessary matches the import:
+ /// For any associated constant from the trait, we try to access as `some::path::SomeStruct::ASSOC_`
+ /// the original item is the associated constant, but the import has to be a trait that
+ /// defines this constant.
+ pub original_item: ItemInNs,
+ /// A path of the original item.
+ pub original_path: Option<ModPath>,
+}
+
+impl LocatedImport {
+ pub fn new(
+ import_path: ModPath,
+ item_to_import: ItemInNs,
+ original_item: ItemInNs,
+ original_path: Option<ModPath>,
+ ) -> Self {
+ Self { import_path, item_to_import, original_item, original_path }
+ }
+}
+
+impl ImportAssets {
+ pub fn import_candidate(&self) -> &ImportCandidate {
+ &self.import_candidate
+ }
+
+ pub fn search_for_imports(
+ &self,
+ sema: &Semantics<'_, RootDatabase>,
+ prefix_kind: PrefixKind,
+ ) -> Vec<LocatedImport> {
+ let _p = profile::span("import_assets::search_for_imports");
+ self.search_for(sema, Some(prefix_kind))
+ }
+
+ /// This may return non-absolute paths if a part of the returned path is already imported into scope.
+ pub fn search_for_relative_paths(
+ &self,
+ sema: &Semantics<'_, RootDatabase>,
+ ) -> Vec<LocatedImport> {
+ let _p = profile::span("import_assets::search_for_relative_paths");
+ self.search_for(sema, None)
+ }
+
+ pub fn path_fuzzy_name_to_exact(&mut self, case_sensitive: bool) {
+ if let ImportCandidate::Path(PathImportCandidate { name: to_import, .. }) =
+ &mut self.import_candidate
+ {
+ let name = match to_import {
+ NameToImport::Fuzzy(name) => std::mem::take(name),
+ _ => return,
+ };
+ *to_import = NameToImport::Exact(name, case_sensitive);
+ }
+ }
+
+ fn search_for(
+ &self,
+ sema: &Semantics<'_, RootDatabase>,
+ prefixed: Option<PrefixKind>,
+ ) -> Vec<LocatedImport> {
+ let _p = profile::span("import_assets::search_for");
+
+ let scope_definitions = self.scope_definitions(sema);
+ let mod_path = |item| {
+ get_mod_path(
+ sema.db,
+ item_for_path_search(sema.db, item)?,
+ &self.module_with_candidate,
+ prefixed,
+ )
+ };
+
+ let krate = self.module_with_candidate.krate();
+ let scope = match sema.scope(&self.candidate_node) {
+ Some(it) => it,
+ None => return Vec::new(),
+ };
+
+ match &self.import_candidate {
+ ImportCandidate::Path(path_candidate) => {
+ path_applicable_imports(sema, krate, path_candidate, mod_path)
+ }
+ ImportCandidate::TraitAssocItem(trait_candidate) => {
+ trait_applicable_items(sema, krate, &scope, trait_candidate, true, mod_path)
+ }
+ ImportCandidate::TraitMethod(trait_candidate) => {
+ trait_applicable_items(sema, krate, &scope, trait_candidate, false, mod_path)
+ }
+ }
+ .into_iter()
+ .filter(|import| import.import_path.len() > 1)
+ .filter(|import| !scope_definitions.contains(&ScopeDef::from(import.item_to_import)))
+ .sorted_by(|a, b| a.import_path.cmp(&b.import_path))
+ .collect()
+ }
+
+ fn scope_definitions(&self, sema: &Semantics<'_, RootDatabase>) -> FxHashSet<ScopeDef> {
+ let _p = profile::span("import_assets::scope_definitions");
+ let mut scope_definitions = FxHashSet::default();
+ if let Some(scope) = sema.scope(&self.candidate_node) {
+ scope.process_all_names(&mut |_, scope_def| {
+ scope_definitions.insert(scope_def);
+ });
+ }
+ scope_definitions
+ }
+}
+
+fn path_applicable_imports(
+ sema: &Semantics<'_, RootDatabase>,
+ current_crate: Crate,
+ path_candidate: &PathImportCandidate,
+ mod_path: impl Fn(ItemInNs) -> Option<ModPath> + Copy,
+) -> FxHashSet<LocatedImport> {
+ let _p = profile::span("import_assets::path_applicable_imports");
+
+ match &path_candidate.qualifier {
+ None => {
+ items_locator::items_with_name(
+ sema,
+ current_crate,
+ path_candidate.name.clone(),
+ // FIXME: we could look up assoc items by the input and propose those in completion,
+ // but that requires more preparation first:
+ // * store non-trait assoc items in import_map to fully enable this lookup
+ // * ensure that does not degrade the performance (benchmark it)
+ // * write more logic to check for corresponding trait presence requirement (we're unable to flyimport multiple item right now)
+ // * improve the associated completion item matching and/or scoring to ensure no noisy completions appear
+ //
+ // see also an ignored test under FIXME comment in the qualify_path.rs module
+ AssocItemSearch::Exclude,
+ Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
+ )
+ .filter_map(|item| {
+ let mod_path = mod_path(item)?;
+ Some(LocatedImport::new(mod_path.clone(), item, item, Some(mod_path)))
+ })
+ .collect()
+ }
+ Some(first_segment_unresolved) => {
+ let unresolved_qualifier =
+ path_to_string_stripping_turbo_fish(&first_segment_unresolved.full_qualifier);
+ let unresolved_first_segment = first_segment_unresolved.fist_segment.text();
+ items_locator::items_with_name(
+ sema,
+ current_crate,
+ path_candidate.name.clone(),
+ AssocItemSearch::Include,
+ Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
+ )
+ .filter_map(|item| {
+ import_for_item(
+ sema.db,
+ mod_path,
+ &unresolved_first_segment,
+ &unresolved_qualifier,
+ item,
+ )
+ })
+ .collect()
+ }
+ }
+}
+
+fn import_for_item(
+ db: &RootDatabase,
+ mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
+ unresolved_first_segment: &str,
+ unresolved_qualifier: &str,
+ original_item: ItemInNs,
+) -> Option<LocatedImport> {
+ let _p = profile::span("import_assets::import_for_item");
+
+ let original_item_candidate = item_for_path_search(db, original_item)?;
+ let import_path_candidate = mod_path(original_item_candidate)?;
+ let import_path_string = import_path_candidate.to_string();
+
+ let expected_import_end = if item_as_assoc(db, original_item).is_some() {
+ unresolved_qualifier.to_string()
+ } else {
+ format!("{}::{}", unresolved_qualifier, item_name(db, original_item)?)
+ };
+ if !import_path_string.contains(unresolved_first_segment)
+ || !import_path_string.ends_with(&expected_import_end)
+ {
+ return None;
+ }
+
+ let segment_import =
+ find_import_for_segment(db, original_item_candidate, unresolved_first_segment)?;
+ let trait_item_to_import = item_as_assoc(db, original_item)
+ .and_then(|assoc| assoc.containing_trait(db))
+ .map(|trait_| ItemInNs::from(ModuleDef::from(trait_)));
+ Some(match (segment_import == original_item_candidate, trait_item_to_import) {
+ (true, Some(_)) => {
+ // FIXME we should be able to import both the trait and the segment,
+ // but it's unclear what to do with overlapping edits (merge imports?)
+ // especially in case of lazy completion edit resolutions.
+ return None;
+ }
+ (false, Some(trait_to_import)) => LocatedImport::new(
+ mod_path(trait_to_import)?,
+ trait_to_import,
+ original_item,
+ mod_path(original_item),
+ ),
+ (true, None) => LocatedImport::new(
+ import_path_candidate,
+ original_item_candidate,
+ original_item,
+ mod_path(original_item),
+ ),
+ (false, None) => LocatedImport::new(
+ mod_path(segment_import)?,
+ segment_import,
+ original_item,
+ mod_path(original_item),
+ ),
+ })
+}
+
+pub fn item_for_path_search(db: &RootDatabase, item: ItemInNs) -> Option<ItemInNs> {
+ Some(match item {
+ ItemInNs::Types(_) | ItemInNs::Values(_) => match item_as_assoc(db, item) {
+ Some(assoc_item) => match assoc_item.container(db) {
+ AssocItemContainer::Trait(trait_) => ItemInNs::from(ModuleDef::from(trait_)),
+ AssocItemContainer::Impl(impl_) => {
+ ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?))
+ }
+ },
+ None => item,
+ },
+ ItemInNs::Macros(_) => item,
+ })
+}
+
+fn find_import_for_segment(
+ db: &RootDatabase,
+ original_item: ItemInNs,
+ unresolved_first_segment: &str,
+) -> Option<ItemInNs> {
+ let segment_is_name = item_name(db, original_item)
+ .map(|name| name.to_smol_str() == unresolved_first_segment)
+ .unwrap_or(false);
+
+ Some(if segment_is_name {
+ original_item
+ } else {
+ let matching_module =
+ module_with_segment_name(db, unresolved_first_segment, original_item)?;
+ ItemInNs::from(ModuleDef::from(matching_module))
+ })
+}
+
+fn module_with_segment_name(
+ db: &RootDatabase,
+ segment_name: &str,
+ candidate: ItemInNs,
+) -> Option<Module> {
+ let mut current_module = match candidate {
+ ItemInNs::Types(module_def_id) => module_def_id.module(db),
+ ItemInNs::Values(module_def_id) => module_def_id.module(db),
+ ItemInNs::Macros(macro_def_id) => ModuleDef::from(macro_def_id).module(db),
+ };
+ while let Some(module) = current_module {
+ if let Some(module_name) = module.name(db) {
+ if module_name.to_smol_str() == segment_name {
+ return Some(module);
+ }
+ }
+ current_module = module.parent(db);
+ }
+ None
+}
+
+fn trait_applicable_items(
+ sema: &Semantics<'_, RootDatabase>,
+ current_crate: Crate,
+ scope: &SemanticsScope<'_>,
+ trait_candidate: &TraitImportCandidate,
+ trait_assoc_item: bool,
+ mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
+) -> FxHashSet<LocatedImport> {
+ let _p = profile::span("import_assets::trait_applicable_items");
+
+ let db = sema.db;
+
+ let inherent_traits = trait_candidate.receiver_ty.applicable_inherent_traits(db);
+ let env_traits = trait_candidate.receiver_ty.env_traits(db);
+ let related_traits = inherent_traits.chain(env_traits).collect::<FxHashSet<_>>();
+
+ let mut required_assoc_items = FxHashSet::default();
+ let trait_candidates = items_locator::items_with_name(
+ sema,
+ current_crate,
+ trait_candidate.assoc_item_name.clone(),
+ AssocItemSearch::AssocItemsOnly,
+ Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
+ )
+ .filter_map(|input| item_as_assoc(db, input))
+ .filter_map(|assoc| {
+ let assoc_item_trait = assoc.containing_trait(db)?;
+ if related_traits.contains(&assoc_item_trait) {
+ None
+ } else {
+ required_assoc_items.insert(assoc);
+ Some(assoc_item_trait.into())
+ }
+ })
+ .collect();
+
+ let mut located_imports = FxHashSet::default();
+
+ if trait_assoc_item {
+ trait_candidate.receiver_ty.iterate_path_candidates(
+ db,
+ scope,
+ &trait_candidates,
+ None,
+ None,
+ |assoc| {
+ if required_assoc_items.contains(&assoc) {
+ if let AssocItem::Function(f) = assoc {
+ if f.self_param(db).is_some() {
+ return None;
+ }
+ }
+ let located_trait = assoc.containing_trait(db)?;
+ let trait_item = ItemInNs::from(ModuleDef::from(located_trait));
+ let original_item = assoc_to_item(assoc);
+ located_imports.insert(LocatedImport::new(
+ mod_path(trait_item)?,
+ trait_item,
+ original_item,
+ mod_path(original_item),
+ ));
+ }
+ None::<()>
+ },
+ )
+ } else {
+ trait_candidate.receiver_ty.iterate_method_candidates(
+ db,
+ scope,
+ &trait_candidates,
+ None,
+ None,
+ |function| {
+ let assoc = function.as_assoc_item(db)?;
+ if required_assoc_items.contains(&assoc) {
+ let located_trait = assoc.containing_trait(db)?;
+ let trait_item = ItemInNs::from(ModuleDef::from(located_trait));
+ let original_item = assoc_to_item(assoc);
+ located_imports.insert(LocatedImport::new(
+ mod_path(trait_item)?,
+ trait_item,
+ original_item,
+ mod_path(original_item),
+ ));
+ }
+ None::<()>
+ },
+ )
+ };
+
+ located_imports
+}
+
+fn assoc_to_item(assoc: AssocItem) -> ItemInNs {
+ match assoc {
+ AssocItem::Function(f) => ItemInNs::from(ModuleDef::from(f)),
+ AssocItem::Const(c) => ItemInNs::from(ModuleDef::from(c)),
+ AssocItem::TypeAlias(t) => ItemInNs::from(ModuleDef::from(t)),
+ }
+}
+
+fn get_mod_path(
+ db: &RootDatabase,
+ item_to_search: ItemInNs,
+ module_with_candidate: &Module,
+ prefixed: Option<PrefixKind>,
+) -> Option<ModPath> {
+ if let Some(prefix_kind) = prefixed {
+ module_with_candidate.find_use_path_prefixed(db, item_to_search, prefix_kind)
+ } else {
+ module_with_candidate.find_use_path(db, item_to_search)
+ }
+}
+
+impl ImportCandidate {
+ fn for_method_call(
+ sema: &Semantics<'_, RootDatabase>,
+ method_call: &ast::MethodCallExpr,
+ ) -> Option<Self> {
+ match sema.resolve_method_call(method_call) {
+ Some(_) => None,
+ None => Some(Self::TraitMethod(TraitImportCandidate {
+ receiver_ty: sema.type_of_expr(&method_call.receiver()?)?.adjusted(),
+ assoc_item_name: NameToImport::exact_case_sensitive(
+ method_call.name_ref()?.to_string(),
+ ),
+ })),
+ }
+ }
+
+ fn for_regular_path(sema: &Semantics<'_, RootDatabase>, path: &ast::Path) -> Option<Self> {
+ if sema.resolve_path(path).is_some() {
+ return None;
+ }
+ path_import_candidate(
+ sema,
+ path.qualifier(),
+ NameToImport::exact_case_sensitive(path.segment()?.name_ref()?.to_string()),
+ )
+ }
+
+ fn for_name(sema: &Semantics<'_, RootDatabase>, name: &ast::Name) -> Option<Self> {
+ if sema
+ .scope(name.syntax())?
+ .speculative_resolve(&ast::make::ext::ident_path(&name.text()))
+ .is_some()
+ {
+ return None;
+ }
+ Some(ImportCandidate::Path(PathImportCandidate {
+ qualifier: None,
+ name: NameToImport::exact_case_sensitive(name.to_string()),
+ }))
+ }
+
+ fn for_fuzzy_path(
+ qualifier: Option<ast::Path>,
+ fuzzy_name: String,
+ sema: &Semantics<'_, RootDatabase>,
+ ) -> Option<Self> {
+ path_import_candidate(sema, qualifier, NameToImport::Fuzzy(fuzzy_name))
+ }
+}
+
+fn path_import_candidate(
+ sema: &Semantics<'_, RootDatabase>,
+ qualifier: Option<ast::Path>,
+ name: NameToImport,
+) -> Option<ImportCandidate> {
+ Some(match qualifier {
+ Some(qualifier) => match sema.resolve_path(&qualifier) {
+ None => {
+ let qualifier_start =
+ qualifier.syntax().descendants().find_map(ast::NameRef::cast)?;
+ let qualifier_start_path =
+ qualifier_start.syntax().ancestors().find_map(ast::Path::cast)?;
+ if sema.resolve_path(&qualifier_start_path).is_none() {
+ ImportCandidate::Path(PathImportCandidate {
+ qualifier: Some(FirstSegmentUnresolved {
+ fist_segment: qualifier_start,
+ full_qualifier: qualifier,
+ }),
+ name,
+ })
+ } else {
+ return None;
+ }
+ }
+ Some(PathResolution::Def(ModuleDef::Adt(assoc_item_path))) => {
+ ImportCandidate::TraitAssocItem(TraitImportCandidate {
+ receiver_ty: assoc_item_path.ty(sema.db),
+ assoc_item_name: name,
+ })
+ }
+ Some(PathResolution::Def(ModuleDef::TypeAlias(alias))) => {
+ let ty = alias.ty(sema.db);
+ if ty.as_adt().is_some() {
+ ImportCandidate::TraitAssocItem(TraitImportCandidate {
+ receiver_ty: ty,
+ assoc_item_name: name,
+ })
+ } else {
+ return None;
+ }
+ }
+ Some(_) => return None,
+ },
+ None => ImportCandidate::Path(PathImportCandidate { qualifier: None, name }),
+ })
+}
+
+fn item_as_assoc(db: &RootDatabase, item: ItemInNs) -> Option<AssocItem> {
+ item.as_module_def().and_then(|module_def| module_def.as_assoc_item(db))
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs
new file mode 100644
index 000000000..c14182279
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs
@@ -0,0 +1,446 @@
+//! Handle syntactic aspects of inserting a new `use` item.
+#[cfg(test)]
+mod tests;
+
+use std::cmp::Ordering;
+
+use hir::Semantics;
+use syntax::{
+ algo,
+ ast::{self, make, AstNode, HasAttrs, HasModuleItem, HasVisibility, PathSegmentKind},
+ ted, Direction, NodeOrToken, SyntaxKind, SyntaxNode,
+};
+
+use crate::{
+ imports::merge_imports::{
+ common_prefix, eq_attrs, eq_visibility, try_merge_imports, use_tree_path_cmp, MergeBehavior,
+ },
+ RootDatabase,
+};
+
+pub use hir::PrefixKind;
+
+/// How imports should be grouped into use statements.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum ImportGranularity {
+ /// Do not change the granularity of any imports and preserve the original structure written by the developer.
+ Preserve,
+ /// Merge imports from the same crate into a single use statement.
+ Crate,
+ /// Merge imports from the same module into a single use statement.
+ Module,
+ /// Flatten imports so that each has its own use statement.
+ Item,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub struct InsertUseConfig {
+ pub granularity: ImportGranularity,
+ pub enforce_granularity: bool,
+ pub prefix_kind: PrefixKind,
+ pub group: bool,
+ pub skip_glob_imports: bool,
+}
+
+#[derive(Debug, Clone)]
+pub enum ImportScope {
+ File(ast::SourceFile),
+ Module(ast::ItemList),
+ Block(ast::StmtList),
+}
+
+impl ImportScope {
+ // FIXME: Remove this?
+ #[cfg(test)]
+ fn from(syntax: SyntaxNode) -> Option<Self> {
+ use syntax::match_ast;
+ fn contains_cfg_attr(attrs: &dyn HasAttrs) -> bool {
+ attrs
+ .attrs()
+ .any(|attr| attr.as_simple_call().map_or(false, |(ident, _)| ident == "cfg"))
+ }
+ match_ast! {
+ match syntax {
+ ast::Module(module) => module.item_list().map(ImportScope::Module),
+ ast::SourceFile(file) => Some(ImportScope::File(file)),
+ ast::Fn(func) => contains_cfg_attr(&func).then(|| func.body().and_then(|it| it.stmt_list().map(ImportScope::Block))).flatten(),
+ ast::Const(konst) => contains_cfg_attr(&konst).then(|| match konst.body()? {
+ ast::Expr::BlockExpr(block) => Some(block),
+ _ => None,
+ }).flatten().and_then(|it| it.stmt_list().map(ImportScope::Block)),
+ ast::Static(statik) => contains_cfg_attr(&statik).then(|| match statik.body()? {
+ ast::Expr::BlockExpr(block) => Some(block),
+ _ => None,
+ }).flatten().and_then(|it| it.stmt_list().map(ImportScope::Block)),
+ _ => None,
+
+ }
+ }
+ }
+
+ /// Determines the containing syntax node in which to insert a `use` statement affecting `position`.
+ /// Returns the original source node inside attributes.
+ pub fn find_insert_use_container(
+ position: &SyntaxNode,
+ sema: &Semantics<'_, RootDatabase>,
+ ) -> Option<Self> {
+ fn contains_cfg_attr(attrs: &dyn HasAttrs) -> bool {
+ attrs
+ .attrs()
+ .any(|attr| attr.as_simple_call().map_or(false, |(ident, _)| ident == "cfg"))
+ }
+
+ // Walk up the ancestor tree searching for a suitable node to do insertions on
+ // with special handling on cfg-gated items, in which case we want to insert imports locally
+ // or FIXME: annotate inserted imports with the same cfg
+ for syntax in sema.ancestors_with_macros(position.clone()) {
+ if let Some(file) = ast::SourceFile::cast(syntax.clone()) {
+ return Some(ImportScope::File(file));
+ } else if let Some(item) = ast::Item::cast(syntax) {
+ return match item {
+ ast::Item::Const(konst) if contains_cfg_attr(&konst) => {
+ // FIXME: Instead of bailing out with None, we should note down that
+ // this import needs an attribute added
+ match sema.original_ast_node(konst)?.body()? {
+ ast::Expr::BlockExpr(block) => block,
+ _ => return None,
+ }
+ .stmt_list()
+ .map(ImportScope::Block)
+ }
+ ast::Item::Fn(func) if contains_cfg_attr(&func) => {
+ // FIXME: Instead of bailing out with None, we should note down that
+ // this import needs an attribute added
+ sema.original_ast_node(func)?.body()?.stmt_list().map(ImportScope::Block)
+ }
+ ast::Item::Static(statik) if contains_cfg_attr(&statik) => {
+ // FIXME: Instead of bailing out with None, we should note down that
+ // this import needs an attribute added
+ match sema.original_ast_node(statik)?.body()? {
+ ast::Expr::BlockExpr(block) => block,
+ _ => return None,
+ }
+ .stmt_list()
+ .map(ImportScope::Block)
+ }
+ ast::Item::Module(module) => {
+ // early return is important here, if we can't find the original module
+ // in the input there is no way for us to insert an import anywhere.
+ sema.original_ast_node(module)?.item_list().map(ImportScope::Module)
+ }
+ _ => continue,
+ };
+ }
+ }
+ None
+ }
+
+ pub fn as_syntax_node(&self) -> &SyntaxNode {
+ match self {
+ ImportScope::File(file) => file.syntax(),
+ ImportScope::Module(item_list) => item_list.syntax(),
+ ImportScope::Block(block) => block.syntax(),
+ }
+ }
+
+ pub fn clone_for_update(&self) -> Self {
+ match self {
+ ImportScope::File(file) => ImportScope::File(file.clone_for_update()),
+ ImportScope::Module(item_list) => ImportScope::Module(item_list.clone_for_update()),
+ ImportScope::Block(block) => ImportScope::Block(block.clone_for_update()),
+ }
+ }
+}
+
+/// Insert an import path into the given file/node. A `merge` value of none indicates that no import merging is allowed to occur.
+pub fn insert_use(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
+ let _p = profile::span("insert_use");
+ let mut mb = match cfg.granularity {
+ ImportGranularity::Crate => Some(MergeBehavior::Crate),
+ ImportGranularity::Module => Some(MergeBehavior::Module),
+ ImportGranularity::Item | ImportGranularity::Preserve => None,
+ };
+ if !cfg.enforce_granularity {
+ let file_granularity = guess_granularity_from_scope(scope);
+ mb = match file_granularity {
+ ImportGranularityGuess::Unknown => mb,
+ ImportGranularityGuess::Item => None,
+ ImportGranularityGuess::Module => Some(MergeBehavior::Module),
+ ImportGranularityGuess::ModuleOrItem => mb.and(Some(MergeBehavior::Module)),
+ ImportGranularityGuess::Crate => Some(MergeBehavior::Crate),
+ ImportGranularityGuess::CrateOrModule => mb.or(Some(MergeBehavior::Crate)),
+ };
+ }
+
+ let use_item =
+ make::use_(None, make::use_tree(path.clone(), None, None, false)).clone_for_update();
+ // merge into existing imports if possible
+ if let Some(mb) = mb {
+ let filter = |it: &_| !(cfg.skip_glob_imports && ast::Use::is_simple_glob(it));
+ for existing_use in
+ scope.as_syntax_node().children().filter_map(ast::Use::cast).filter(filter)
+ {
+ if let Some(merged) = try_merge_imports(&existing_use, &use_item, mb) {
+ ted::replace(existing_use.syntax(), merged.syntax());
+ return;
+ }
+ }
+ }
+
+ // either we weren't allowed to merge or there is no import that fits the merge conditions
+ // so look for the place we have to insert to
+ insert_use_(scope, &path, cfg.group, use_item);
+}
+
+pub fn remove_path_if_in_use_stmt(path: &ast::Path) {
+ // FIXME: improve this
+ if path.parent_path().is_some() {
+ return;
+ }
+ if let Some(use_tree) = path.syntax().parent().and_then(ast::UseTree::cast) {
+ if use_tree.use_tree_list().is_some() || use_tree.star_token().is_some() {
+ return;
+ }
+ if let Some(use_) = use_tree.syntax().parent().and_then(ast::Use::cast) {
+ use_.remove();
+ return;
+ }
+ use_tree.remove();
+ }
+}
+
+#[derive(Eq, PartialEq, PartialOrd, Ord)]
+enum ImportGroup {
+ // the order here defines the order of new group inserts
+ Std,
+ ExternCrate,
+ ThisCrate,
+ ThisModule,
+ SuperModule,
+}
+
+impl ImportGroup {
+ fn new(path: &ast::Path) -> ImportGroup {
+ let default = ImportGroup::ExternCrate;
+
+ let first_segment = match path.first_segment() {
+ Some(it) => it,
+ None => return default,
+ };
+
+ let kind = first_segment.kind().unwrap_or(PathSegmentKind::SelfKw);
+ match kind {
+ PathSegmentKind::SelfKw => ImportGroup::ThisModule,
+ PathSegmentKind::SuperKw => ImportGroup::SuperModule,
+ PathSegmentKind::CrateKw => ImportGroup::ThisCrate,
+ PathSegmentKind::Name(name) => match name.text().as_str() {
+ "std" => ImportGroup::Std,
+ "core" => ImportGroup::Std,
+ _ => ImportGroup::ExternCrate,
+ },
+ // these aren't valid use paths, so fall back to something random
+ PathSegmentKind::SelfTypeKw => ImportGroup::ExternCrate,
+ PathSegmentKind::Type { .. } => ImportGroup::ExternCrate,
+ }
+ }
+}
+
+#[derive(PartialEq, PartialOrd, Debug, Clone, Copy)]
+enum ImportGranularityGuess {
+ Unknown,
+ Item,
+ Module,
+ ModuleOrItem,
+ Crate,
+ CrateOrModule,
+}
+
+fn guess_granularity_from_scope(scope: &ImportScope) -> ImportGranularityGuess {
+ // The idea is simple, just check each import as well as the import and its precedent together for
+ // whether they fulfill a granularity criteria.
+ let use_stmt = |item| match item {
+ ast::Item::Use(use_) => {
+ let use_tree = use_.use_tree()?;
+ Some((use_tree, use_.visibility(), use_.attrs()))
+ }
+ _ => None,
+ };
+ let mut use_stmts = match scope {
+ ImportScope::File(f) => f.items(),
+ ImportScope::Module(m) => m.items(),
+ ImportScope::Block(b) => b.items(),
+ }
+ .filter_map(use_stmt);
+ let mut res = ImportGranularityGuess::Unknown;
+ let (mut prev, mut prev_vis, mut prev_attrs) = match use_stmts.next() {
+ Some(it) => it,
+ None => return res,
+ };
+ loop {
+ if let Some(use_tree_list) = prev.use_tree_list() {
+ if use_tree_list.use_trees().any(|tree| tree.use_tree_list().is_some()) {
+ // Nested tree lists can only occur in crate style, or with no proper style being enforced in the file.
+ break ImportGranularityGuess::Crate;
+ } else {
+ // Could still be crate-style so continue looking.
+ res = ImportGranularityGuess::CrateOrModule;
+ }
+ }
+
+ let (curr, curr_vis, curr_attrs) = match use_stmts.next() {
+ Some(it) => it,
+ None => break res,
+ };
+ if eq_visibility(prev_vis, curr_vis.clone()) && eq_attrs(prev_attrs, curr_attrs.clone()) {
+ if let Some((prev_path, curr_path)) = prev.path().zip(curr.path()) {
+ if let Some((prev_prefix, _)) = common_prefix(&prev_path, &curr_path) {
+ if prev.use_tree_list().is_none() && curr.use_tree_list().is_none() {
+ let prefix_c = prev_prefix.qualifiers().count();
+ let curr_c = curr_path.qualifiers().count() - prefix_c;
+ let prev_c = prev_path.qualifiers().count() - prefix_c;
+ if curr_c == 1 && prev_c == 1 {
+ // Same prefix, only differing in the last segment and no use tree lists so this has to be of item style.
+ break ImportGranularityGuess::Item;
+ } else {
+ // Same prefix and no use tree list but differs in more than one segment at the end. This might be module style still.
+ res = ImportGranularityGuess::ModuleOrItem;
+ }
+ } else {
+ // Same prefix with item tree lists, has to be module style as it
+ // can't be crate style since the trees wouldn't share a prefix then.
+ break ImportGranularityGuess::Module;
+ }
+ }
+ }
+ }
+ prev = curr;
+ prev_vis = curr_vis;
+ prev_attrs = curr_attrs;
+ }
+}
+
+fn insert_use_(
+ scope: &ImportScope,
+ insert_path: &ast::Path,
+ group_imports: bool,
+ use_item: ast::Use,
+) {
+ let scope_syntax = scope.as_syntax_node();
+ let group = ImportGroup::new(insert_path);
+ let path_node_iter = scope_syntax
+ .children()
+ .filter_map(|node| ast::Use::cast(node.clone()).zip(Some(node)))
+ .flat_map(|(use_, node)| {
+ let tree = use_.use_tree()?;
+ let path = tree.path()?;
+ let has_tl = tree.use_tree_list().is_some();
+ Some((path, has_tl, node))
+ });
+
+ if group_imports {
+ // Iterator that discards anything thats not in the required grouping
+ // This implementation allows the user to rearrange their import groups as this only takes the first group that fits
+ let group_iter = path_node_iter
+ .clone()
+ .skip_while(|(path, ..)| ImportGroup::new(path) != group)
+ .take_while(|(path, ..)| ImportGroup::new(path) == group);
+
+ // track the last element we iterated over, if this is still None after the iteration then that means we never iterated in the first place
+ let mut last = None;
+ // find the element that would come directly after our new import
+ let post_insert: Option<(_, _, SyntaxNode)> = group_iter
+ .inspect(|(.., node)| last = Some(node.clone()))
+ .find(|&(ref path, has_tl, _)| {
+ use_tree_path_cmp(insert_path, false, path, has_tl) != Ordering::Greater
+ });
+
+ if let Some((.., node)) = post_insert {
+ cov_mark::hit!(insert_group);
+ // insert our import before that element
+ return ted::insert(ted::Position::before(node), use_item.syntax());
+ }
+ if let Some(node) = last {
+ cov_mark::hit!(insert_group_last);
+ // there is no element after our new import, so append it to the end of the group
+ return ted::insert(ted::Position::after(node), use_item.syntax());
+ }
+
+ // the group we were looking for actually doesn't exist, so insert
+
+ let mut last = None;
+ // find the group that comes after where we want to insert
+ let post_group = path_node_iter
+ .inspect(|(.., node)| last = Some(node.clone()))
+ .find(|(p, ..)| ImportGroup::new(p) > group);
+ if let Some((.., node)) = post_group {
+ cov_mark::hit!(insert_group_new_group);
+ ted::insert(ted::Position::before(&node), use_item.syntax());
+ if let Some(node) = algo::non_trivia_sibling(node.into(), Direction::Prev) {
+ ted::insert(ted::Position::after(node), make::tokens::single_newline());
+ }
+ return;
+ }
+ // there is no such group, so append after the last one
+ if let Some(node) = last {
+ cov_mark::hit!(insert_group_no_group);
+ ted::insert(ted::Position::after(&node), use_item.syntax());
+ ted::insert(ted::Position::after(node), make::tokens::single_newline());
+ return;
+ }
+ } else {
+ // There exists a group, so append to the end of it
+ if let Some((_, _, node)) = path_node_iter.last() {
+ cov_mark::hit!(insert_no_grouping_last);
+ ted::insert(ted::Position::after(node), use_item.syntax());
+ return;
+ }
+ }
+
+ let l_curly = match scope {
+ ImportScope::File(_) => None,
+ // don't insert the imports before the item list/block expr's opening curly brace
+ ImportScope::Module(item_list) => item_list.l_curly_token(),
+ // don't insert the imports before the item list's opening curly brace
+ ImportScope::Block(block) => block.l_curly_token(),
+ };
+ // there are no imports in this file at all
+ // so put the import after all inner module attributes and possible license header comments
+ if let Some(last_inner_element) = scope_syntax
+ .children_with_tokens()
+ // skip the curly brace
+ .skip(l_curly.is_some() as usize)
+ .take_while(|child| match child {
+ NodeOrToken::Node(node) => is_inner_attribute(node.clone()),
+ NodeOrToken::Token(token) => {
+ [SyntaxKind::WHITESPACE, SyntaxKind::COMMENT, SyntaxKind::SHEBANG]
+ .contains(&token.kind())
+ }
+ })
+ .filter(|child| child.as_token().map_or(true, |t| t.kind() != SyntaxKind::WHITESPACE))
+ .last()
+ {
+ cov_mark::hit!(insert_empty_inner_attr);
+ ted::insert(ted::Position::after(&last_inner_element), use_item.syntax());
+ ted::insert(ted::Position::after(last_inner_element), make::tokens::single_newline());
+ } else {
+ match l_curly {
+ Some(b) => {
+ cov_mark::hit!(insert_empty_module);
+ ted::insert(ted::Position::after(&b), make::tokens::single_newline());
+ ted::insert(ted::Position::after(&b), use_item.syntax());
+ }
+ None => {
+ cov_mark::hit!(insert_empty_file);
+ ted::insert(
+ ted::Position::first_child_of(scope_syntax),
+ make::tokens::blank_line(),
+ );
+ ted::insert(ted::Position::first_child_of(scope_syntax), use_item.syntax());
+ }
+ }
+ }
+}
+
+fn is_inner_attribute(node: SyntaxNode) -> bool {
+ ast::Attr::cast(node).map(|attr| attr.kind()) == Some(ast::AttrKind::Inner)
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs
new file mode 100644
index 000000000..59673af32
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs
@@ -0,0 +1,1084 @@
+use base_db::fixture::WithFixture;
+use hir::PrefixKind;
+use stdx::trim_indent;
+use test_utils::{assert_eq_text, CURSOR_MARKER};
+
+use super::*;
+
+#[test]
+fn trailing_comment_in_empty_file() {
+ check(
+ "foo::bar",
+ r#"
+struct Struct;
+// 0 = 1
+"#,
+ r#"
+use foo::bar;
+
+struct Struct;
+// 0 = 1
+"#,
+ ImportGranularity::Crate,
+ );
+}
+
+#[test]
+fn respects_cfg_attr_fn() {
+ check(
+ r"bar::Bar",
+ r#"
+#[cfg(test)]
+fn foo() {$0}
+"#,
+ r#"
+#[cfg(test)]
+fn foo() {
+ use bar::Bar;
+}
+"#,
+ ImportGranularity::Crate,
+ );
+}
+
+#[test]
+fn respects_cfg_attr_const() {
+ check(
+ r"bar::Bar",
+ r#"
+#[cfg(test)]
+const FOO: Bar = {$0};
+"#,
+ r#"
+#[cfg(test)]
+const FOO: Bar = {
+ use bar::Bar;
+};
+"#,
+ ImportGranularity::Crate,
+ );
+}
+
+#[test]
+fn insert_skips_lone_glob_imports() {
+ check(
+ "use foo::baz::A",
+ r"
+use foo::bar::*;
+",
+ r"
+use foo::bar::*;
+use foo::baz::A;
+",
+ ImportGranularity::Crate,
+ );
+}
+
+#[test]
+fn insert_not_group() {
+ cov_mark::check!(insert_no_grouping_last);
+ check_with_config(
+ "use external_crate2::bar::A",
+ r"
+use std::bar::B;
+use external_crate::bar::A;
+use crate::bar::A;
+use self::bar::A;
+use super::bar::A;",
+ r"
+use std::bar::B;
+use external_crate::bar::A;
+use crate::bar::A;
+use self::bar::A;
+use super::bar::A;
+use external_crate2::bar::A;",
+ &InsertUseConfig {
+ granularity: ImportGranularity::Item,
+ enforce_granularity: true,
+ prefix_kind: PrefixKind::Plain,
+ group: false,
+ skip_glob_imports: true,
+ },
+ );
+}
+
+#[test]
+fn insert_existing() {
+ check_crate("std::fs", "use std::fs;", "use std::fs;")
+}
+
+#[test]
+fn insert_start() {
+ check_none(
+ "std::bar::AA",
+ r"
+use std::bar::B;
+use std::bar::D;
+use std::bar::F;
+use std::bar::G;",
+ r"
+use std::bar::AA;
+use std::bar::B;
+use std::bar::D;
+use std::bar::F;
+use std::bar::G;",
+ )
+}
+
+#[test]
+fn insert_start_indent() {
+ check_none(
+ "std::bar::AA",
+ r"
+ use std::bar::B;
+ use std::bar::C;",
+ r"
+ use std::bar::AA;
+ use std::bar::B;
+ use std::bar::C;",
+ );
+}
+
+#[test]
+fn insert_middle() {
+ cov_mark::check!(insert_group);
+ check_none(
+ "std::bar::EE",
+ r"
+use std::bar::A;
+use std::bar::D;
+use std::bar::F;
+use std::bar::G;",
+ r"
+use std::bar::A;
+use std::bar::D;
+use std::bar::EE;
+use std::bar::F;
+use std::bar::G;",
+ )
+}
+
+#[test]
+fn insert_middle_indent() {
+ check_none(
+ "std::bar::EE",
+ r"
+ use std::bar::A;
+ use std::bar::D;
+ use std::bar::F;
+ use std::bar::G;",
+ r"
+ use std::bar::A;
+ use std::bar::D;
+ use std::bar::EE;
+ use std::bar::F;
+ use std::bar::G;",
+ )
+}
+
+#[test]
+fn insert_end() {
+ cov_mark::check!(insert_group_last);
+ check_none(
+ "std::bar::ZZ",
+ r"
+use std::bar::A;
+use std::bar::D;
+use std::bar::F;
+use std::bar::G;",
+ r"
+use std::bar::A;
+use std::bar::D;
+use std::bar::F;
+use std::bar::G;
+use std::bar::ZZ;",
+ )
+}
+
+#[test]
+fn insert_end_indent() {
+ check_none(
+ "std::bar::ZZ",
+ r"
+ use std::bar::A;
+ use std::bar::D;
+ use std::bar::F;
+ use std::bar::G;",
+ r"
+ use std::bar::A;
+ use std::bar::D;
+ use std::bar::F;
+ use std::bar::G;
+ use std::bar::ZZ;",
+ )
+}
+
+#[test]
+fn insert_middle_nested() {
+ check_none(
+ "std::bar::EE",
+ r"
+use std::bar::A;
+use std::bar::{D, Z}; // example of weird imports due to user
+use std::bar::F;
+use std::bar::G;",
+ r"
+use std::bar::A;
+use std::bar::EE;
+use std::bar::{D, Z}; // example of weird imports due to user
+use std::bar::F;
+use std::bar::G;",
+ )
+}
+
+#[test]
+fn insert_middle_groups() {
+ check_none(
+ "foo::bar::GG",
+ r"
+ use std::bar::A;
+ use std::bar::D;
+
+ use foo::bar::F;
+ use foo::bar::H;",
+ r"
+ use std::bar::A;
+ use std::bar::D;
+
+ use foo::bar::F;
+ use foo::bar::GG;
+ use foo::bar::H;",
+ )
+}
+
+#[test]
+fn insert_first_matching_group() {
+ check_none(
+ "foo::bar::GG",
+ r"
+ use foo::bar::A;
+ use foo::bar::D;
+
+ use std;
+
+ use foo::bar::F;
+ use foo::bar::H;",
+ r"
+ use foo::bar::A;
+ use foo::bar::D;
+ use foo::bar::GG;
+
+ use std;
+
+ use foo::bar::F;
+ use foo::bar::H;",
+ )
+}
+
+#[test]
+fn insert_missing_group_std() {
+ cov_mark::check!(insert_group_new_group);
+ check_none(
+ "std::fmt",
+ r"
+ use foo::bar::A;
+ use foo::bar::D;",
+ r"
+ use std::fmt;
+
+ use foo::bar::A;
+ use foo::bar::D;",
+ )
+}
+
+#[test]
+fn insert_missing_group_self() {
+ cov_mark::check!(insert_group_no_group);
+ check_none(
+ "self::fmt",
+ r"
+use foo::bar::A;
+use foo::bar::D;",
+ r"
+use foo::bar::A;
+use foo::bar::D;
+
+use self::fmt;",
+ )
+}
+
+#[test]
+fn insert_no_imports() {
+ check_crate(
+ "foo::bar",
+ "fn main() {}",
+ r"use foo::bar;
+
+fn main() {}",
+ )
+}
+
+#[test]
+fn insert_empty_file() {
+ cov_mark::check_count!(insert_empty_file, 2);
+
+ // Default configuration
+ // empty files will get two trailing newlines
+ // this is due to the test case insert_no_imports above
+ check_crate(
+ "foo::bar",
+ "",
+ r"use foo::bar;
+
+",
+ );
+
+ // "not group" configuration
+ check_with_config(
+ "use external_crate2::bar::A",
+ r"",
+ r"use external_crate2::bar::A;
+
+",
+ &InsertUseConfig {
+ granularity: ImportGranularity::Item,
+ enforce_granularity: true,
+ prefix_kind: PrefixKind::Plain,
+ group: false,
+ skip_glob_imports: true,
+ },
+ );
+}
+
+#[test]
+fn insert_empty_module() {
+ cov_mark::check_count!(insert_empty_module, 2);
+
+ // Default configuration
+ check(
+ "foo::bar",
+ r"
+mod x {$0}
+",
+ r"
+mod x {
+ use foo::bar;
+}
+",
+ ImportGranularity::Item,
+ );
+
+ // "not group" configuration
+ check_with_config(
+ "foo::bar",
+ r"mod x {$0}",
+ r"mod x {
+ use foo::bar;
+}",
+ &InsertUseConfig {
+ granularity: ImportGranularity::Item,
+ enforce_granularity: true,
+ prefix_kind: PrefixKind::Plain,
+ group: false,
+ skip_glob_imports: true,
+ },
+ );
+}
+
+#[test]
+fn insert_after_inner_attr() {
+ cov_mark::check_count!(insert_empty_inner_attr, 2);
+
+ // Default configuration
+ check_crate(
+ "foo::bar",
+ r"#![allow(unused_imports)]",
+ r"#![allow(unused_imports)]
+
+use foo::bar;",
+ );
+
+ // "not group" configuration
+ check_with_config(
+ "foo::bar",
+ r"#![allow(unused_imports)]",
+ r"#![allow(unused_imports)]
+
+use foo::bar;",
+ &InsertUseConfig {
+ granularity: ImportGranularity::Item,
+ enforce_granularity: true,
+ prefix_kind: PrefixKind::Plain,
+ group: false,
+ skip_glob_imports: true,
+ },
+ );
+}
+
+#[test]
+fn insert_after_inner_attr2() {
+ check_crate(
+ "foo::bar",
+ r"#![allow(unused_imports)]
+
+#![no_std]
+fn main() {}",
+ r"#![allow(unused_imports)]
+
+#![no_std]
+
+use foo::bar;
+fn main() {}",
+ );
+}
+
+#[test]
+fn inserts_after_single_line_inner_comments() {
+ check_none(
+ "foo::bar::Baz",
+ "//! Single line inner comments do not allow any code before them.",
+ r#"//! Single line inner comments do not allow any code before them.
+
+use foo::bar::Baz;"#,
+ );
+ check_none(
+ "foo::bar::Baz",
+ r"mod foo {
+ //! Single line inner comments do not allow any code before them.
+$0
+}",
+ r"mod foo {
+ //! Single line inner comments do not allow any code before them.
+
+ use foo::bar::Baz;
+
+}",
+ );
+}
+
+#[test]
+fn inserts_after_single_line_comments() {
+ check_none(
+ "foo::bar::Baz",
+ "// Represents a possible license header and/or general module comments",
+ r#"// Represents a possible license header and/or general module comments
+
+use foo::bar::Baz;"#,
+ );
+}
+
+#[test]
+fn inserts_after_shebang() {
+ check_none(
+ "foo::bar::Baz",
+ "#!/usr/bin/env rust",
+ r#"#!/usr/bin/env rust
+
+use foo::bar::Baz;"#,
+ );
+}
+
+#[test]
+fn inserts_after_multiple_single_line_comments() {
+ check_none(
+ "foo::bar::Baz",
+ "// Represents a possible license header and/or general module comments
+// Second single-line comment
+// Third single-line comment",
+ r#"// Represents a possible license header and/or general module comments
+// Second single-line comment
+// Third single-line comment
+
+use foo::bar::Baz;"#,
+ );
+}
+
+#[test]
+fn inserts_before_single_line_item_comments() {
+ check_none(
+ "foo::bar::Baz",
+ r#"// Represents a comment about a function
+fn foo() {}"#,
+ r#"use foo::bar::Baz;
+
+// Represents a comment about a function
+fn foo() {}"#,
+ );
+}
+
+#[test]
+fn inserts_after_single_line_header_comments_and_before_item() {
+ check_none(
+ "foo::bar::Baz",
+ r#"// Represents a possible license header
+// Line two of possible license header
+
+fn foo() {}"#,
+ r#"// Represents a possible license header
+// Line two of possible license header
+
+use foo::bar::Baz;
+
+fn foo() {}"#,
+ );
+}
+
+#[test]
+fn inserts_after_multiline_inner_comments() {
+ check_none(
+ "foo::bar::Baz",
+ r#"/*! Multiline inner comments do not allow any code before them. */
+
+/*! Still an inner comment, cannot place any code before. */
+fn main() {}"#,
+ r#"/*! Multiline inner comments do not allow any code before them. */
+
+/*! Still an inner comment, cannot place any code before. */
+
+use foo::bar::Baz;
+fn main() {}"#,
+ )
+}
+
+#[test]
+fn inserts_after_all_inner_items() {
+ check_none(
+ "foo::bar::Baz",
+ r#"#![allow(unused_imports)]
+/*! Multiline line comment 2 */
+
+
+//! Single line comment 1
+#![no_std]
+//! Single line comment 2
+fn main() {}"#,
+ r#"#![allow(unused_imports)]
+/*! Multiline line comment 2 */
+
+
+//! Single line comment 1
+#![no_std]
+//! Single line comment 2
+
+use foo::bar::Baz;
+fn main() {}"#,
+ )
+}
+
+#[test]
+fn merge_groups() {
+ check_module("std::io", r"use std::fmt;", r"use std::{fmt, io};")
+}
+
+#[test]
+fn merge_groups_last() {
+ check_module(
+ "std::io",
+ r"use std::fmt::{Result, Display};",
+ r"use std::fmt::{Result, Display};
+use std::io;",
+ )
+}
+
+#[test]
+fn merge_last_into_self() {
+ check_module("foo::bar::baz", r"use foo::bar;", r"use foo::bar::{self, baz};");
+}
+
+#[test]
+fn merge_groups_full() {
+ check_crate(
+ "std::io",
+ r"use std::fmt::{Result, Display};",
+ r"use std::{fmt::{Result, Display}, io};",
+ )
+}
+
+#[test]
+fn merge_groups_long_full() {
+ check_crate("std::foo::bar::Baz", r"use std::foo::bar::Qux;", r"use std::foo::bar::{Qux, Baz};")
+}
+
+#[test]
+fn merge_groups_long_last() {
+ check_module(
+ "std::foo::bar::Baz",
+ r"use std::foo::bar::Qux;",
+ r"use std::foo::bar::{Qux, Baz};",
+ )
+}
+
+#[test]
+fn merge_groups_long_full_list() {
+ check_crate(
+ "std::foo::bar::Baz",
+ r"use std::foo::bar::{Qux, Quux};",
+ r"use std::foo::bar::{Qux, Quux, Baz};",
+ )
+}
+
+#[test]
+fn merge_groups_long_last_list() {
+ check_module(
+ "std::foo::bar::Baz",
+ r"use std::foo::bar::{Qux, Quux};",
+ r"use std::foo::bar::{Qux, Quux, Baz};",
+ )
+}
+
+#[test]
+fn merge_groups_long_full_nested() {
+ check_crate(
+ "std::foo::bar::Baz",
+ r"use std::foo::bar::{Qux, quux::{Fez, Fizz}};",
+ r"use std::foo::bar::{Qux, quux::{Fez, Fizz}, Baz};",
+ )
+}
+
+#[test]
+fn merge_groups_long_last_nested() {
+ check_module(
+ "std::foo::bar::Baz",
+ r"use std::foo::bar::{Qux, quux::{Fez, Fizz}};",
+ r"use std::foo::bar::Baz;
+use std::foo::bar::{Qux, quux::{Fez, Fizz}};",
+ )
+}
+
+#[test]
+fn merge_groups_full_nested_deep() {
+ check_crate(
+ "std::foo::bar::quux::Baz",
+ r"use std::foo::bar::{Qux, quux::{Fez, Fizz}};",
+ r"use std::foo::bar::{Qux, quux::{Fez, Fizz, Baz}};",
+ )
+}
+
+#[test]
+fn merge_groups_full_nested_long() {
+ check_crate(
+ "std::foo::bar::Baz",
+ r"use std::{foo::bar::Qux};",
+ r"use std::{foo::bar::{Qux, Baz}};",
+ );
+}
+
+#[test]
+fn merge_groups_last_nested_long() {
+ check_crate(
+ "std::foo::bar::Baz",
+ r"use std::{foo::bar::Qux};",
+ r"use std::{foo::bar::{Qux, Baz}};",
+ );
+}
+
+#[test]
+fn merge_groups_skip_pub() {
+ check_crate(
+ "std::io",
+ r"pub use std::fmt::{Result, Display};",
+ r"pub use std::fmt::{Result, Display};
+use std::io;",
+ )
+}
+
+#[test]
+fn merge_groups_skip_pub_crate() {
+ check_crate(
+ "std::io",
+ r"pub(crate) use std::fmt::{Result, Display};",
+ r"pub(crate) use std::fmt::{Result, Display};
+use std::io;",
+ )
+}
+
+#[test]
+fn merge_groups_skip_attributed() {
+ check_crate(
+ "std::io",
+ r#"
+#[cfg(feature = "gated")] use std::fmt::{Result, Display};
+"#,
+ r#"
+#[cfg(feature = "gated")] use std::fmt::{Result, Display};
+use std::io;
+"#,
+ )
+}
+
+#[test]
+fn split_out_merge() {
+ // FIXME: This is suboptimal, we want to get `use std::fmt::{self, Result}`
+ // instead.
+ check_module(
+ "std::fmt::Result",
+ r"use std::{fmt, io};",
+ r"use std::fmt::Result;
+use std::{fmt, io};",
+ )
+}
+
+#[test]
+fn merge_into_module_import() {
+ check_crate("std::fmt::Result", r"use std::{fmt, io};", r"use std::{fmt::{self, Result}, io};")
+}
+
+#[test]
+fn merge_groups_self() {
+ check_crate("std::fmt::Debug", r"use std::fmt;", r"use std::fmt::{self, Debug};")
+}
+
+#[test]
+fn merge_mod_into_glob() {
+ check_with_config(
+ "token::TokenKind",
+ r"use token::TokenKind::*;",
+ r"use token::TokenKind::{*, self};",
+ &InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ enforce_granularity: true,
+ prefix_kind: PrefixKind::Plain,
+ group: false,
+ skip_glob_imports: false,
+ },
+ )
+ // FIXME: have it emit `use token::TokenKind::{self, *}`?
+}
+
+#[test]
+fn merge_self_glob() {
+ check_with_config(
+ "self",
+ r"use self::*;",
+ r"use self::{*, self};",
+ &InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ enforce_granularity: true,
+ prefix_kind: PrefixKind::Plain,
+ group: false,
+ skip_glob_imports: false,
+ },
+ )
+ // FIXME: have it emit `use {self, *}`?
+}
+
+#[test]
+fn merge_glob() {
+ check_crate(
+ "syntax::SyntaxKind",
+ r"
+use syntax::{SyntaxKind::*};",
+ r"
+use syntax::{SyntaxKind::{*, self}};",
+ )
+}
+
+#[test]
+fn merge_glob_nested() {
+ check_crate(
+ "foo::bar::quux::Fez",
+ r"use foo::bar::{Baz, quux::*};",
+ r"use foo::bar::{Baz, quux::{*, Fez}};",
+ )
+}
+
+#[test]
+fn merge_nested_considers_first_segments() {
+ check_crate(
+ "hir_ty::display::write_bounds_like_dyn_trait",
+ r"use hir_ty::{autoderef, display::{HirDisplayError, HirFormatter}, method_resolution};",
+ r"use hir_ty::{autoderef, display::{HirDisplayError, HirFormatter, write_bounds_like_dyn_trait}, method_resolution};",
+ );
+}
+
+#[test]
+fn skip_merge_last_too_long() {
+ check_module(
+ "foo::bar",
+ r"use foo::bar::baz::Qux;",
+ r"use foo::bar;
+use foo::bar::baz::Qux;",
+ );
+}
+
+#[test]
+fn skip_merge_last_too_long2() {
+ check_module(
+ "foo::bar::baz::Qux",
+ r"use foo::bar;",
+ r"use foo::bar;
+use foo::bar::baz::Qux;",
+ );
+}
+
+#[test]
+fn insert_short_before_long() {
+ check_none(
+ "foo::bar",
+ r"use foo::bar::baz::Qux;",
+ r"use foo::bar;
+use foo::bar::baz::Qux;",
+ );
+}
+
+#[test]
+fn merge_last_fail() {
+ check_merge_only_fail(
+ r"use foo::bar::{baz::{Qux, Fez}};",
+ r"use foo::bar::{baaz::{Quux, Feez}};",
+ MergeBehavior::Module,
+ );
+}
+
+#[test]
+fn merge_last_fail1() {
+ check_merge_only_fail(
+ r"use foo::bar::{baz::{Qux, Fez}};",
+ r"use foo::bar::baaz::{Quux, Feez};",
+ MergeBehavior::Module,
+ );
+}
+
+#[test]
+fn merge_last_fail2() {
+ check_merge_only_fail(
+ r"use foo::bar::baz::{Qux, Fez};",
+ r"use foo::bar::{baaz::{Quux, Feez}};",
+ MergeBehavior::Module,
+ );
+}
+
+#[test]
+fn merge_last_fail3() {
+ check_merge_only_fail(
+ r"use foo::bar::baz::{Qux, Fez};",
+ r"use foo::bar::baaz::{Quux, Feez};",
+ MergeBehavior::Module,
+ );
+}
+
+#[test]
+fn guess_empty() {
+ check_guess("", ImportGranularityGuess::Unknown);
+}
+
+#[test]
+fn guess_single() {
+ check_guess(r"use foo::{baz::{qux, quux}, bar};", ImportGranularityGuess::Crate);
+ check_guess(r"use foo::bar;", ImportGranularityGuess::Unknown);
+ check_guess(r"use foo::bar::{baz, qux};", ImportGranularityGuess::CrateOrModule);
+}
+
+#[test]
+fn guess_unknown() {
+ check_guess(
+ r"
+use foo::bar::baz;
+use oof::rab::xuq;
+",
+ ImportGranularityGuess::Unknown,
+ );
+}
+
+#[test]
+fn guess_item() {
+ check_guess(
+ r"
+use foo::bar::baz;
+use foo::bar::qux;
+",
+ ImportGranularityGuess::Item,
+ );
+}
+
+#[test]
+fn guess_module_or_item() {
+ check_guess(
+ r"
+use foo::bar::Bar;
+use foo::qux;
+",
+ ImportGranularityGuess::ModuleOrItem,
+ );
+ check_guess(
+ r"
+use foo::bar::Bar;
+use foo::bar;
+",
+ ImportGranularityGuess::ModuleOrItem,
+ );
+}
+
+#[test]
+fn guess_module() {
+ check_guess(
+ r"
+use foo::bar::baz;
+use foo::bar::{qux, quux};
+",
+ ImportGranularityGuess::Module,
+ );
+ // this is a rather odd case, technically this file isn't following any style properly.
+ check_guess(
+ r"
+use foo::bar::baz;
+use foo::{baz::{qux, quux}, bar};
+",
+ ImportGranularityGuess::Module,
+ );
+ check_guess(
+ r"
+use foo::bar::Bar;
+use foo::baz::Baz;
+use foo::{Foo, Qux};
+",
+ ImportGranularityGuess::Module,
+ );
+}
+
+#[test]
+fn guess_crate_or_module() {
+ check_guess(
+ r"
+use foo::bar::baz;
+use oof::bar::{qux, quux};
+",
+ ImportGranularityGuess::CrateOrModule,
+ );
+}
+
+#[test]
+fn guess_crate() {
+ check_guess(
+ r"
+use frob::bar::baz;
+use foo::{baz::{qux, quux}, bar};
+",
+ ImportGranularityGuess::Crate,
+ );
+}
+
+#[test]
+fn guess_skips_differing_vis() {
+ check_guess(
+ r"
+use foo::bar::baz;
+pub use foo::bar::qux;
+",
+ ImportGranularityGuess::Unknown,
+ );
+}
+
+#[test]
+fn guess_skips_differing_attrs() {
+ check_guess(
+ r"
+pub use foo::bar::baz;
+#[doc(hidden)]
+pub use foo::bar::qux;
+",
+ ImportGranularityGuess::Unknown,
+ );
+}
+
+#[test]
+fn guess_grouping_matters() {
+ check_guess(
+ r"
+use foo::bar::baz;
+use oof::bar::baz;
+use foo::bar::qux;
+",
+ ImportGranularityGuess::Unknown,
+ );
+}
+
+fn check_with_config(
+ path: &str,
+ ra_fixture_before: &str,
+ ra_fixture_after: &str,
+ config: &InsertUseConfig,
+) {
+ let (db, file_id, pos) = if ra_fixture_before.contains(CURSOR_MARKER) {
+ let (db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture_before);
+ (db, file_id, Some(range_or_offset))
+ } else {
+ let (db, file_id) = RootDatabase::with_single_file(ra_fixture_before);
+ (db, file_id, None)
+ };
+ let sema = &Semantics::new(&db);
+ let source_file = sema.parse(file_id);
+ let syntax = source_file.syntax().clone_for_update();
+ let file = pos
+ .and_then(|pos| syntax.token_at_offset(pos.expect_offset()).next()?.parent())
+ .and_then(|it| ImportScope::find_insert_use_container(&it, sema))
+ .or_else(|| ImportScope::from(syntax))
+ .unwrap();
+ let path = ast::SourceFile::parse(&format!("use {};", path))
+ .tree()
+ .syntax()
+ .descendants()
+ .find_map(ast::Path::cast)
+ .unwrap();
+
+ insert_use(&file, path, config);
+ let result = file.as_syntax_node().ancestors().last().unwrap().to_string();
+ assert_eq_text!(&trim_indent(ra_fixture_after), &result);
+}
+
+fn check(
+ path: &str,
+ ra_fixture_before: &str,
+ ra_fixture_after: &str,
+ granularity: ImportGranularity,
+) {
+ check_with_config(
+ path,
+ ra_fixture_before,
+ ra_fixture_after,
+ &InsertUseConfig {
+ granularity,
+ enforce_granularity: true,
+ prefix_kind: PrefixKind::Plain,
+ group: true,
+ skip_glob_imports: true,
+ },
+ )
+}
+
+fn check_crate(path: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
+ check(path, ra_fixture_before, ra_fixture_after, ImportGranularity::Crate)
+}
+
+fn check_module(path: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
+ check(path, ra_fixture_before, ra_fixture_after, ImportGranularity::Module)
+}
+
+fn check_none(path: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
+ check(path, ra_fixture_before, ra_fixture_after, ImportGranularity::Item)
+}
+
+fn check_merge_only_fail(ra_fixture0: &str, ra_fixture1: &str, mb: MergeBehavior) {
+ let use0 = ast::SourceFile::parse(ra_fixture0)
+ .tree()
+ .syntax()
+ .descendants()
+ .find_map(ast::Use::cast)
+ .unwrap();
+
+ let use1 = ast::SourceFile::parse(ra_fixture1)
+ .tree()
+ .syntax()
+ .descendants()
+ .find_map(ast::Use::cast)
+ .unwrap();
+
+ let result = try_merge_imports(&use0, &use1, mb);
+ assert_eq!(result.map(|u| u.to_string()), None);
+}
+
+fn check_guess(ra_fixture: &str, expected: ImportGranularityGuess) {
+ let syntax = ast::SourceFile::parse(ra_fixture).tree().syntax().clone();
+ let file = ImportScope::from(syntax).unwrap();
+ assert_eq!(super::guess_granularity_from_scope(&file), expected);
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
new file mode 100644
index 000000000..7fb4b90e6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
@@ -0,0 +1,295 @@
+//! Handle syntactic aspects of merging UseTrees.
+use std::cmp::Ordering;
+
+use itertools::{EitherOrBoth, Itertools};
+use syntax::{
+ ast::{self, AstNode, HasAttrs, HasVisibility, PathSegmentKind},
+ ted,
+};
+
+use crate::syntax_helpers::node_ext::vis_eq;
+
+/// What type of merges are allowed.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum MergeBehavior {
+ /// Merge imports from the same crate into a single use statement.
+ Crate,
+ /// Merge imports from the same module into a single use statement.
+ Module,
+}
+
+impl MergeBehavior {
+ fn is_tree_allowed(&self, tree: &ast::UseTree) -> bool {
+ match self {
+ MergeBehavior::Crate => true,
+ // only simple single segment paths are allowed
+ MergeBehavior::Module => {
+ tree.use_tree_list().is_none() && tree.path().map(path_len) <= Some(1)
+ }
+ }
+ }
+}
+
+/// Merge `rhs` into `lhs` keeping both intact.
+/// Returned AST is mutable.
+pub fn try_merge_imports(
+ lhs: &ast::Use,
+ rhs: &ast::Use,
+ merge_behavior: MergeBehavior,
+) -> Option<ast::Use> {
+ // don't merge imports with different visibilities
+ if !eq_visibility(lhs.visibility(), rhs.visibility()) {
+ return None;
+ }
+ if !eq_attrs(lhs.attrs(), rhs.attrs()) {
+ return None;
+ }
+
+ let lhs = lhs.clone_subtree().clone_for_update();
+ let rhs = rhs.clone_subtree().clone_for_update();
+ let lhs_tree = lhs.use_tree()?;
+ let rhs_tree = rhs.use_tree()?;
+ try_merge_trees_mut(&lhs_tree, &rhs_tree, merge_behavior)?;
+ Some(lhs)
+}
+
+/// Merge `rhs` into `lhs` keeping both intact.
+/// Returned AST is mutable.
+pub fn try_merge_trees(
+ lhs: &ast::UseTree,
+ rhs: &ast::UseTree,
+ merge: MergeBehavior,
+) -> Option<ast::UseTree> {
+ let lhs = lhs.clone_subtree().clone_for_update();
+ let rhs = rhs.clone_subtree().clone_for_update();
+ try_merge_trees_mut(&lhs, &rhs, merge)?;
+ Some(lhs)
+}
+
+fn try_merge_trees_mut(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior) -> Option<()> {
+ let lhs_path = lhs.path()?;
+ let rhs_path = rhs.path()?;
+
+ let (lhs_prefix, rhs_prefix) = common_prefix(&lhs_path, &rhs_path)?;
+ if !(lhs.is_simple_path()
+ && rhs.is_simple_path()
+ && lhs_path == lhs_prefix
+ && rhs_path == rhs_prefix)
+ {
+ lhs.split_prefix(&lhs_prefix);
+ rhs.split_prefix(&rhs_prefix);
+ }
+ recursive_merge(lhs, rhs, merge)
+}
+
+/// Recursively merges rhs to lhs
+#[must_use]
+fn recursive_merge(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior) -> Option<()> {
+ let mut use_trees: Vec<ast::UseTree> = lhs
+ .use_tree_list()
+ .into_iter()
+ .flat_map(|list| list.use_trees())
+ // We use Option here to early return from this function(this is not the
+ // same as a `filter` op).
+ .map(|tree| merge.is_tree_allowed(&tree).then(|| tree))
+ .collect::<Option<_>>()?;
+ use_trees.sort_unstable_by(|a, b| path_cmp_for_sort(a.path(), b.path()));
+ for rhs_t in rhs.use_tree_list().into_iter().flat_map(|list| list.use_trees()) {
+ if !merge.is_tree_allowed(&rhs_t) {
+ return None;
+ }
+ let rhs_path = rhs_t.path();
+
+ match use_trees
+ .binary_search_by(|lhs_t| path_cmp_bin_search(lhs_t.path(), rhs_path.as_ref()))
+ {
+ Ok(idx) => {
+ let lhs_t = &mut use_trees[idx];
+ let lhs_path = lhs_t.path()?;
+ let rhs_path = rhs_path?;
+ let (lhs_prefix, rhs_prefix) = common_prefix(&lhs_path, &rhs_path)?;
+ if lhs_prefix == lhs_path && rhs_prefix == rhs_path {
+ let tree_is_self = |tree: &ast::UseTree| {
+ tree.path().as_ref().map(path_is_self).unwrap_or(false)
+ };
+ // Check if only one of the two trees has a tree list, and
+ // whether that then contains `self` or not. If this is the
+ // case we can skip this iteration since the path without
+ // the list is already included in the other one via `self`.
+ let tree_contains_self = |tree: &ast::UseTree| {
+ tree.use_tree_list()
+ .map(|tree_list| tree_list.use_trees().any(|it| tree_is_self(&it)))
+ // Glob imports aren't part of the use-tree lists,
+ // so they need to be handled explicitly
+ .or_else(|| tree.star_token().map(|_| false))
+ };
+ match (tree_contains_self(lhs_t), tree_contains_self(&rhs_t)) {
+ (Some(true), None) => continue,
+ (None, Some(true)) => {
+ ted::replace(lhs_t.syntax(), rhs_t.syntax());
+ *lhs_t = rhs_t;
+ continue;
+ }
+ _ => (),
+ }
+
+ if lhs_t.is_simple_path() && rhs_t.is_simple_path() {
+ continue;
+ }
+ }
+ lhs_t.split_prefix(&lhs_prefix);
+ rhs_t.split_prefix(&rhs_prefix);
+ recursive_merge(lhs_t, &rhs_t, merge)?;
+ }
+ Err(_)
+ if merge == MergeBehavior::Module
+ && !use_trees.is_empty()
+ && rhs_t.use_tree_list().is_some() =>
+ {
+ return None
+ }
+ Err(idx) => {
+ use_trees.insert(idx, rhs_t.clone());
+ lhs.get_or_create_use_tree_list().add_use_tree(rhs_t);
+ }
+ }
+ }
+ Some(())
+}
+
+/// Traverses both paths until they differ, returning the common prefix of both.
+pub fn common_prefix(lhs: &ast::Path, rhs: &ast::Path) -> Option<(ast::Path, ast::Path)> {
+ let mut res = None;
+ let mut lhs_curr = lhs.first_qualifier_or_self();
+ let mut rhs_curr = rhs.first_qualifier_or_self();
+ loop {
+ match (lhs_curr.segment(), rhs_curr.segment()) {
+ (Some(lhs), Some(rhs)) if lhs.syntax().text() == rhs.syntax().text() => (),
+ _ => break res,
+ }
+ res = Some((lhs_curr.clone(), rhs_curr.clone()));
+
+ match lhs_curr.parent_path().zip(rhs_curr.parent_path()) {
+ Some((lhs, rhs)) => {
+ lhs_curr = lhs;
+ rhs_curr = rhs;
+ }
+ _ => break res,
+ }
+ }
+}
+
+/// Orders paths in the following way:
+/// the sole self token comes first, after that come uppercase identifiers, then lowercase identifiers
+// FIXME: rustfmt sorts lowercase idents before uppercase, in general we want to have the same ordering rustfmt has
+// which is `self` and `super` first, then identifier imports with lowercase ones first, then glob imports and at last list imports.
+// Example foo::{self, foo, baz, Baz, Qux, *, {Bar}}
+fn path_cmp_for_sort(a: Option<ast::Path>, b: Option<ast::Path>) -> Ordering {
+ match (a, b) {
+ (None, None) => Ordering::Equal,
+ (None, Some(_)) => Ordering::Less,
+ (Some(_), None) => Ordering::Greater,
+ (Some(ref a), Some(ref b)) => match (path_is_self(a), path_is_self(b)) {
+ (true, true) => Ordering::Equal,
+ (true, false) => Ordering::Less,
+ (false, true) => Ordering::Greater,
+ (false, false) => path_cmp_short(a, b),
+ },
+ }
+}
+
+/// Path comparison func for binary searching for merging.
+fn path_cmp_bin_search(lhs: Option<ast::Path>, rhs: Option<&ast::Path>) -> Ordering {
+ match (lhs.as_ref().and_then(ast::Path::first_segment), rhs.and_then(ast::Path::first_segment))
+ {
+ (None, None) => Ordering::Equal,
+ (None, Some(_)) => Ordering::Less,
+ (Some(_), None) => Ordering::Greater,
+ (Some(ref a), Some(ref b)) => path_segment_cmp(a, b),
+ }
+}
+
+/// Short circuiting comparison, if both paths are equal until one of them ends they are considered
+/// equal
+fn path_cmp_short(a: &ast::Path, b: &ast::Path) -> Ordering {
+ let a = a.segments();
+ let b = b.segments();
+ // cmp_by would be useful for us here but that is currently unstable
+ // cmp doesn't work due the lifetimes on text's return type
+ a.zip(b)
+ .find_map(|(a, b)| match path_segment_cmp(&a, &b) {
+ Ordering::Equal => None,
+ ord => Some(ord),
+ })
+ .unwrap_or(Ordering::Equal)
+}
+
+/// Compares two paths, if one ends earlier than the other the has_tl parameters decide which is
+/// greater as a a path that has a tree list should be greater, while one that just ends without
+/// a tree list should be considered less.
+pub(super) fn use_tree_path_cmp(
+ a: &ast::Path,
+ a_has_tl: bool,
+ b: &ast::Path,
+ b_has_tl: bool,
+) -> Ordering {
+ let a_segments = a.segments();
+ let b_segments = b.segments();
+ // cmp_by would be useful for us here but that is currently unstable
+ // cmp doesn't work due the lifetimes on text's return type
+ a_segments
+ .zip_longest(b_segments)
+ .find_map(|zipped| match zipped {
+ EitherOrBoth::Both(ref a, ref b) => match path_segment_cmp(a, b) {
+ Ordering::Equal => None,
+ ord => Some(ord),
+ },
+ EitherOrBoth::Left(_) if !b_has_tl => Some(Ordering::Greater),
+ EitherOrBoth::Left(_) => Some(Ordering::Less),
+ EitherOrBoth::Right(_) if !a_has_tl => Some(Ordering::Less),
+ EitherOrBoth::Right(_) => Some(Ordering::Greater),
+ })
+ .unwrap_or(Ordering::Equal)
+}
+
+fn path_segment_cmp(a: &ast::PathSegment, b: &ast::PathSegment) -> Ordering {
+ let a = a.kind().and_then(|kind| match kind {
+ PathSegmentKind::Name(name_ref) => Some(name_ref),
+ _ => None,
+ });
+ let b = b.kind().and_then(|kind| match kind {
+ PathSegmentKind::Name(name_ref) => Some(name_ref),
+ _ => None,
+ });
+ a.as_ref().map(ast::NameRef::text).cmp(&b.as_ref().map(ast::NameRef::text))
+}
+
+pub fn eq_visibility(vis0: Option<ast::Visibility>, vis1: Option<ast::Visibility>) -> bool {
+ match (vis0, vis1) {
+ (None, None) => true,
+ (Some(vis0), Some(vis1)) => vis_eq(&vis0, &vis1),
+ _ => false,
+ }
+}
+
+pub fn eq_attrs(
+ attrs0: impl Iterator<Item = ast::Attr>,
+ attrs1: impl Iterator<Item = ast::Attr>,
+) -> bool {
+ // FIXME order of attributes should not matter
+ let attrs0 = attrs0
+ .flat_map(|attr| attr.syntax().descendants_with_tokens())
+ .flat_map(|it| it.into_token());
+ let attrs1 = attrs1
+ .flat_map(|attr| attr.syntax().descendants_with_tokens())
+ .flat_map(|it| it.into_token());
+ stdx::iter_eq_by(attrs0, attrs1, |tok, tok2| tok.text() == tok2.text())
+}
+
+fn path_is_self(path: &ast::Path) -> bool {
+ path.segment().and_then(|seg| seg.self_token()).is_some() && path.qualifier().is_none()
+}
+
+fn path_len(path: ast::Path) -> usize {
+ path.segments().count()
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
new file mode 100644
index 000000000..07a57c883
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
@@ -0,0 +1,151 @@
+//! This module has the functionality to search the project and its dependencies for a certain item,
+//! by its name and a few criteria.
+//! The main reason for this module to exist is the fact that project's items and dependencies' items
+//! are located in different caches, with different APIs.
+use either::Either;
+use hir::{
+ import_map::{self, ImportKind},
+ symbols::FileSymbol,
+ AsAssocItem, Crate, ItemInNs, Semantics,
+};
+use limit::Limit;
+use syntax::{ast, AstNode, SyntaxKind::NAME};
+
+use crate::{
+ defs::{Definition, NameClass},
+ imports::import_assets::NameToImport,
+ symbol_index, RootDatabase,
+};
+
+/// A value to use, when uncertain which limit to pick.
+pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(40);
+
+/// Three possible ways to search for the name in associated and/or other items.
+#[derive(Debug, Clone, Copy)]
+pub enum AssocItemSearch {
+ /// Search for the name in both associated and other items.
+ Include,
+ /// Search for the name in other items only.
+ Exclude,
+ /// Search for the name in the associated items only.
+ AssocItemsOnly,
+}
+
+/// Searches for importable items with the given name in the crate and its dependencies.
+pub fn items_with_name<'a>(
+ sema: &'a Semantics<'_, RootDatabase>,
+ krate: Crate,
+ name: NameToImport,
+ assoc_item_search: AssocItemSearch,
+ limit: Option<usize>,
+) -> impl Iterator<Item = ItemInNs> + 'a {
+ let _p = profile::span("items_with_name").detail(|| {
+ format!(
+ "Name: {}, crate: {:?}, assoc items: {:?}, limit: {:?}",
+ name.text(),
+ assoc_item_search,
+ krate.display_name(sema.db).map(|name| name.to_string()),
+ limit,
+ )
+ });
+
+ let (mut local_query, mut external_query) = match name {
+ NameToImport::Exact(exact_name, case_sensitive) => {
+ let mut local_query = symbol_index::Query::new(exact_name.clone());
+ local_query.exact();
+
+ let external_query = import_map::Query::new(exact_name)
+ .name_only()
+ .search_mode(import_map::SearchMode::Equals);
+
+ (
+ local_query,
+ if case_sensitive { external_query.case_sensitive() } else { external_query },
+ )
+ }
+ NameToImport::Fuzzy(fuzzy_search_string) => {
+ let mut local_query = symbol_index::Query::new(fuzzy_search_string.clone());
+
+ let mut external_query = import_map::Query::new(fuzzy_search_string.clone())
+ .search_mode(import_map::SearchMode::Fuzzy)
+ .name_only();
+ match assoc_item_search {
+ AssocItemSearch::Include => {}
+ AssocItemSearch::Exclude => {
+ external_query = external_query.exclude_import_kind(ImportKind::AssociatedItem);
+ }
+ AssocItemSearch::AssocItemsOnly => {
+ external_query = external_query.assoc_items_only();
+ }
+ }
+
+ if fuzzy_search_string.to_lowercase() != fuzzy_search_string {
+ local_query.case_sensitive();
+ external_query = external_query.case_sensitive();
+ }
+
+ (local_query, external_query)
+ }
+ };
+
+ if let Some(limit) = limit {
+ external_query = external_query.limit(limit);
+ local_query.limit(limit);
+ }
+
+ find_items(sema, krate, assoc_item_search, local_query, external_query)
+}
+
+fn find_items<'a>(
+ sema: &'a Semantics<'_, RootDatabase>,
+ krate: Crate,
+ assoc_item_search: AssocItemSearch,
+ local_query: symbol_index::Query,
+ external_query: import_map::Query,
+) -> impl Iterator<Item = ItemInNs> + 'a {
+ let _p = profile::span("find_items");
+ let db = sema.db;
+
+ let external_importables =
+ krate.query_external_importables(db, external_query).map(|external_importable| {
+ match external_importable {
+ Either::Left(module_def) => ItemInNs::from(module_def),
+ Either::Right(macro_def) => ItemInNs::from(macro_def),
+ }
+ });
+
+ // Query the local crate using the symbol index.
+ let local_results = symbol_index::crate_symbols(db, krate, local_query)
+ .into_iter()
+ .filter_map(move |local_candidate| get_name_definition(sema, &local_candidate))
+ .filter_map(|name_definition_to_import| match name_definition_to_import {
+ Definition::Macro(macro_def) => Some(ItemInNs::from(macro_def)),
+ def => <Option<_>>::from(def),
+ });
+
+ external_importables.chain(local_results).filter(move |&item| match assoc_item_search {
+ AssocItemSearch::Include => true,
+ AssocItemSearch::Exclude => !is_assoc_item(item, sema.db),
+ AssocItemSearch::AssocItemsOnly => is_assoc_item(item, sema.db),
+ })
+}
+
+fn get_name_definition(
+ sema: &Semantics<'_, RootDatabase>,
+ import_candidate: &FileSymbol,
+) -> Option<Definition> {
+ let _p = profile::span("get_name_definition");
+
+ let candidate_node = import_candidate.loc.syntax(sema)?;
+ let candidate_name_node = if candidate_node.kind() != NAME {
+ candidate_node.children().find(|it| it.kind() == NAME)?
+ } else {
+ candidate_node
+ };
+ let name = ast::Name::cast(candidate_name_node)?;
+ NameClass::classify(sema, &name)?.defined()
+}
+
+fn is_assoc_item(item: ItemInNs, db: &RootDatabase) -> bool {
+ item.as_module_def().and_then(|module_def| module_def.as_assoc_item(db)).is_some()
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/label.rs b/src/tools/rust-analyzer/crates/ide-db/src/label.rs
new file mode 100644
index 000000000..4b6d54b5e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/label.rs
@@ -0,0 +1,48 @@
+//! See [`Label`]
+use std::fmt;
+
+/// A type to specify UI label, like an entry in the list of assists. Enforces
+/// proper casing:
+///
+/// Frobnicate bar
+///
+/// Note the upper-case first letter and the absence of `.` at the end.
+#[derive(Clone)]
+pub struct Label(String);
+
+impl PartialEq<str> for Label {
+ fn eq(&self, other: &str) -> bool {
+ self.0 == other
+ }
+}
+
+impl PartialEq<&'_ str> for Label {
+ fn eq(&self, other: &&str) -> bool {
+ self == *other
+ }
+}
+
+impl From<Label> for String {
+ fn from(label: Label) -> String {
+ label.0
+ }
+}
+
+impl Label {
+ pub fn new(label: String) -> Label {
+ assert!(label.starts_with(char::is_uppercase) && !label.ends_with('.'));
+ Label(label)
+ }
+}
+
+impl fmt::Display for Label {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.0, f)
+ }
+}
+
+impl fmt::Debug for Label {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(&self.0, f)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
new file mode 100644
index 000000000..966bba616
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
@@ -0,0 +1,246 @@
+//! This crate defines the core datastructure representing IDE state -- `RootDatabase`.
+//!
+//! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod apply_change;
+
+pub mod active_parameter;
+pub mod assists;
+pub mod defs;
+pub mod famous_defs;
+pub mod helpers;
+pub mod items_locator;
+pub mod label;
+pub mod line_index;
+pub mod path_transform;
+pub mod rename;
+pub mod rust_doc;
+pub mod search;
+pub mod source_change;
+pub mod symbol_index;
+pub mod traits;
+pub mod ty_filter;
+pub mod use_trivial_contructor;
+
+pub mod imports {
+ pub mod import_assets;
+ pub mod insert_use;
+ pub mod merge_imports;
+}
+
+pub mod generated {
+ pub mod lints;
+}
+
+pub mod syntax_helpers {
+ pub mod node_ext;
+ pub mod insert_whitespace_into_node;
+ pub mod format_string;
+
+ pub use parser::LexedStr;
+}
+
+use std::{fmt, mem::ManuallyDrop, sync::Arc};
+
+use base_db::{
+ salsa::{self, Durability},
+ AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
+};
+use hir::{
+ db::{AstDatabase, DefDatabase, HirDatabase},
+ symbols::FileSymbolKind,
+};
+
+use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase};
+pub use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
+
+/// `base_db` is normally also needed in places where `ide_db` is used, so this re-export is for convenience.
+pub use base_db;
+
+pub type FxIndexSet<T> = indexmap::IndexSet<T, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
+pub type FxIndexMap<K, V> =
+ indexmap::IndexMap<K, V, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
+
+#[salsa::database(
+ base_db::SourceDatabaseExtStorage,
+ base_db::SourceDatabaseStorage,
+ hir::db::AstDatabaseStorage,
+ hir::db::DefDatabaseStorage,
+ hir::db::HirDatabaseStorage,
+ hir::db::InternDatabaseStorage,
+ LineIndexDatabaseStorage,
+ symbol_index::SymbolsDatabaseStorage
+)]
+pub struct RootDatabase {
+ // We use `ManuallyDrop` here because every codegen unit that contains a
+ // `&RootDatabase -> &dyn OtherDatabase` cast will instantiate its drop glue in the vtable,
+ // which duplicates `Weak::drop` and `Arc::drop` tens of thousands of times, which makes
+ // compile times of all `ide_*` and downstream crates suffer greatly.
+ storage: ManuallyDrop<salsa::Storage<RootDatabase>>,
+}
+
+impl Drop for RootDatabase {
+ fn drop(&mut self) {
+ unsafe { ManuallyDrop::drop(&mut self.storage) };
+ }
+}
+
+impl fmt::Debug for RootDatabase {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("RootDatabase").finish()
+ }
+}
+
+impl Upcast<dyn AstDatabase> for RootDatabase {
+ fn upcast(&self) -> &(dyn AstDatabase + 'static) {
+ &*self
+ }
+}
+
+impl Upcast<dyn DefDatabase> for RootDatabase {
+ fn upcast(&self) -> &(dyn DefDatabase + 'static) {
+ &*self
+ }
+}
+
+impl Upcast<dyn HirDatabase> for RootDatabase {
+ fn upcast(&self) -> &(dyn HirDatabase + 'static) {
+ &*self
+ }
+}
+
+impl FileLoader for RootDatabase {
+ fn file_text(&self, file_id: FileId) -> Arc<String> {
+ FileLoaderDelegate(self).file_text(file_id)
+ }
+ fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
+ FileLoaderDelegate(self).resolve_path(path)
+ }
+ fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
+ FileLoaderDelegate(self).relevant_crates(file_id)
+ }
+}
+
+impl salsa::Database for RootDatabase {}
+
+impl Default for RootDatabase {
+ fn default() -> RootDatabase {
+ RootDatabase::new(None)
+ }
+}
+
+impl RootDatabase {
+ pub fn new(lru_capacity: Option<usize>) -> RootDatabase {
+ let mut db = RootDatabase { storage: ManuallyDrop::new(salsa::Storage::default()) };
+ db.set_crate_graph_with_durability(Default::default(), Durability::HIGH);
+ db.set_local_roots_with_durability(Default::default(), Durability::HIGH);
+ db.set_library_roots_with_durability(Default::default(), Durability::HIGH);
+ db.set_enable_proc_attr_macros(false);
+ db.update_lru_capacity(lru_capacity);
+ db
+ }
+
+ pub fn update_lru_capacity(&mut self, lru_capacity: Option<usize>) {
+ let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_LRU_CAP);
+ base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
+ hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
+ hir::db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
+ }
+}
+
+impl salsa::ParallelDatabase for RootDatabase {
+ fn snapshot(&self) -> salsa::Snapshot<RootDatabase> {
+ salsa::Snapshot::new(RootDatabase { storage: ManuallyDrop::new(self.storage.snapshot()) })
+ }
+}
+
+#[salsa::query_group(LineIndexDatabaseStorage)]
+pub trait LineIndexDatabase: base_db::SourceDatabase {
+ fn line_index(&self, file_id: FileId) -> Arc<LineIndex>;
+}
+
+fn line_index(db: &dyn LineIndexDatabase, file_id: FileId) -> Arc<LineIndex> {
+ let text = db.file_text(file_id);
+ Arc::new(LineIndex::new(&*text))
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub enum SymbolKind {
+ Attribute,
+ BuiltinAttr,
+ Const,
+ ConstParam,
+ Derive,
+ DeriveHelper,
+ Enum,
+ Field,
+ Function,
+ Impl,
+ Label,
+ LifetimeParam,
+ Local,
+ Macro,
+ Module,
+ SelfParam,
+ SelfType,
+ Static,
+ Struct,
+ ToolModule,
+ Trait,
+ TypeAlias,
+ TypeParam,
+ Union,
+ ValueParam,
+ Variant,
+}
+
+impl From<hir::MacroKind> for SymbolKind {
+ fn from(it: hir::MacroKind) -> Self {
+ match it {
+ hir::MacroKind::Declarative | hir::MacroKind::BuiltIn | hir::MacroKind::ProcMacro => {
+ SymbolKind::Macro
+ }
+ hir::MacroKind::Derive => SymbolKind::Derive,
+ hir::MacroKind::Attr => SymbolKind::Attribute,
+ }
+ }
+}
+
+impl From<FileSymbolKind> for SymbolKind {
+ fn from(it: FileSymbolKind) -> Self {
+ match it {
+ FileSymbolKind::Const => SymbolKind::Const,
+ FileSymbolKind::Enum => SymbolKind::Enum,
+ FileSymbolKind::Function => SymbolKind::Function,
+ FileSymbolKind::Macro => SymbolKind::Macro,
+ FileSymbolKind::Module => SymbolKind::Module,
+ FileSymbolKind::Static => SymbolKind::Static,
+ FileSymbolKind::Struct => SymbolKind::Struct,
+ FileSymbolKind::Trait => SymbolKind::Trait,
+ FileSymbolKind::TypeAlias => SymbolKind::TypeAlias,
+ FileSymbolKind::Union => SymbolKind::Union,
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub struct SnippetCap {
+ _private: (),
+}
+
+impl SnippetCap {
+ pub const fn new(allow_snippets: bool) -> Option<SnippetCap> {
+ if allow_snippets {
+ Some(SnippetCap { _private: () })
+ } else {
+ None
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ mod sourcegen_lints;
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/line_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/line_index.rs
new file mode 100644
index 000000000..68ad07ee8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/line_index.rs
@@ -0,0 +1,300 @@
+//! `LineIndex` maps flat `TextSize` offsets into `(Line, Column)`
+//! representation.
+use std::{iter, mem};
+
+use rustc_hash::FxHashMap;
+use syntax::{TextRange, TextSize};
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct LineIndex {
+ /// Offset the the beginning of each line, zero-based
+ pub(crate) newlines: Vec<TextSize>,
+ /// List of non-ASCII characters on each line
+ pub(crate) utf16_lines: FxHashMap<u32, Vec<Utf16Char>>,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct LineColUtf16 {
+ /// Zero-based
+ pub line: u32,
+ /// Zero-based
+ pub col: u32,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct LineCol {
+ /// Zero-based
+ pub line: u32,
+ /// Zero-based utf8 offset
+ pub col: u32,
+}
+
+#[derive(Clone, Debug, Hash, PartialEq, Eq)]
+pub(crate) struct Utf16Char {
+ /// Start offset of a character inside a line, zero-based
+ pub(crate) start: TextSize,
+ /// End offset of a character inside a line, zero-based
+ pub(crate) end: TextSize,
+}
+
+impl Utf16Char {
+ /// Returns the length in 8-bit UTF-8 code units.
+ fn len(&self) -> TextSize {
+ self.end - self.start
+ }
+
+ /// Returns the length in 16-bit UTF-16 code units.
+ fn len_utf16(&self) -> usize {
+ if self.len() == TextSize::from(4) {
+ 2
+ } else {
+ 1
+ }
+ }
+}
+
+impl LineIndex {
+ pub fn new(text: &str) -> LineIndex {
+ let mut utf16_lines = FxHashMap::default();
+ let mut utf16_chars = Vec::new();
+
+ let mut newlines = vec![0.into()];
+ let mut curr_row @ mut curr_col = 0.into();
+ let mut line = 0;
+ for c in text.chars() {
+ let c_len = TextSize::of(c);
+ curr_row += c_len;
+ if c == '\n' {
+ newlines.push(curr_row);
+
+ // Save any utf-16 characters seen in the previous line
+ if !utf16_chars.is_empty() {
+ utf16_lines.insert(line, mem::take(&mut utf16_chars));
+ }
+
+ // Prepare for processing the next line
+ curr_col = 0.into();
+ line += 1;
+ continue;
+ }
+
+ if !c.is_ascii() {
+ utf16_chars.push(Utf16Char { start: curr_col, end: curr_col + c_len });
+ }
+
+ curr_col += c_len;
+ }
+
+ // Save any utf-16 characters seen in the last line
+ if !utf16_chars.is_empty() {
+ utf16_lines.insert(line, utf16_chars);
+ }
+
+ LineIndex { newlines, utf16_lines }
+ }
+
+ pub fn line_col(&self, offset: TextSize) -> LineCol {
+ let line = self.newlines.partition_point(|&it| it <= offset) - 1;
+ let line_start_offset = self.newlines[line];
+ let col = offset - line_start_offset;
+ LineCol { line: line as u32, col: col.into() }
+ }
+
+ pub fn offset(&self, line_col: LineCol) -> Option<TextSize> {
+ self.newlines
+ .get(line_col.line as usize)
+ .map(|offset| offset + TextSize::from(line_col.col))
+ }
+
+ pub fn to_utf16(&self, line_col: LineCol) -> LineColUtf16 {
+ let col = self.utf8_to_utf16_col(line_col.line, line_col.col.into());
+ LineColUtf16 { line: line_col.line, col: col as u32 }
+ }
+
+ pub fn to_utf8(&self, line_col: LineColUtf16) -> LineCol {
+ let col = self.utf16_to_utf8_col(line_col.line, line_col.col);
+ LineCol { line: line_col.line, col: col.into() }
+ }
+
+ pub fn lines(&self, range: TextRange) -> impl Iterator<Item = TextRange> + '_ {
+ let lo = self.newlines.partition_point(|&it| it < range.start());
+ let hi = self.newlines.partition_point(|&it| it <= range.end());
+ let all = iter::once(range.start())
+ .chain(self.newlines[lo..hi].iter().copied())
+ .chain(iter::once(range.end()));
+
+ all.clone()
+ .zip(all.skip(1))
+ .map(|(lo, hi)| TextRange::new(lo, hi))
+ .filter(|it| !it.is_empty())
+ }
+
+ fn utf8_to_utf16_col(&self, line: u32, col: TextSize) -> usize {
+ let mut res: usize = col.into();
+ if let Some(utf16_chars) = self.utf16_lines.get(&line) {
+ for c in utf16_chars {
+ if c.end <= col {
+ res -= usize::from(c.len()) - c.len_utf16();
+ } else {
+ // From here on, all utf16 characters come *after* the character we are mapping,
+ // so we don't need to take them into account
+ break;
+ }
+ }
+ }
+ res
+ }
+
+ fn utf16_to_utf8_col(&self, line: u32, mut col: u32) -> TextSize {
+ if let Some(utf16_chars) = self.utf16_lines.get(&line) {
+ for c in utf16_chars {
+ if col > u32::from(c.start) {
+ col += u32::from(c.len()) - c.len_utf16() as u32;
+ } else {
+ // From here on, all utf16 characters come *after* the character we are mapping,
+ // so we don't need to take them into account
+ break;
+ }
+ }
+ }
+
+ col.into()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_line_index() {
+ let text = "hello\nworld";
+ let table = [
+ (00, 0, 0),
+ (01, 0, 1),
+ (05, 0, 5),
+ (06, 1, 0),
+ (07, 1, 1),
+ (08, 1, 2),
+ (10, 1, 4),
+ (11, 1, 5),
+ (12, 1, 6),
+ ];
+
+ let index = LineIndex::new(text);
+ for &(offset, line, col) in &table {
+ assert_eq!(index.line_col(offset.into()), LineCol { line, col });
+ }
+
+ let text = "\nhello\nworld";
+ let table = [(0, 0, 0), (1, 1, 0), (2, 1, 1), (6, 1, 5), (7, 2, 0)];
+ let index = LineIndex::new(text);
+ for &(offset, line, col) in &table {
+ assert_eq!(index.line_col(offset.into()), LineCol { line, col });
+ }
+ }
+
+ #[test]
+ fn test_char_len() {
+ assert_eq!('メ'.len_utf8(), 3);
+ assert_eq!('メ'.len_utf16(), 1);
+ }
+
+ #[test]
+ fn test_empty_index() {
+ let col_index = LineIndex::new(
+ "
+const C: char = 'x';
+",
+ );
+ assert_eq!(col_index.utf16_lines.len(), 0);
+ }
+
+ #[test]
+ fn test_single_char() {
+ let col_index = LineIndex::new(
+ "
+const C: char = 'メ';
+",
+ );
+
+ assert_eq!(col_index.utf16_lines.len(), 1);
+ assert_eq!(col_index.utf16_lines[&1].len(), 1);
+ assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() });
+
+ // UTF-8 to UTF-16, no changes
+ assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15);
+
+ // UTF-8 to UTF-16
+ assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20);
+
+ // UTF-16 to UTF-8, no changes
+ assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
+
+ // UTF-16 to UTF-8
+ assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21));
+
+ let col_index = LineIndex::new("a𐐏b");
+ assert_eq!(col_index.utf16_to_utf8_col(0, 3), TextSize::from(5));
+ }
+
+ #[test]
+ fn test_string() {
+ let col_index = LineIndex::new(
+ "
+const C: char = \"メ メ\";
+",
+ );
+
+ assert_eq!(col_index.utf16_lines.len(), 1);
+ assert_eq!(col_index.utf16_lines[&1].len(), 2);
+ assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() });
+ assert_eq!(col_index.utf16_lines[&1][1], Utf16Char { start: 21.into(), end: 24.into() });
+
+ // UTF-8 to UTF-16
+ assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15);
+
+ assert_eq!(col_index.utf8_to_utf16_col(1, 21.into()), 19);
+ assert_eq!(col_index.utf8_to_utf16_col(1, 25.into()), 21);
+
+ assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15);
+
+ // UTF-16 to UTF-8
+ assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
+
+ // メ UTF-8: 0xE3 0x83 0xA1, UTF-16: 0x30E1
+ assert_eq!(col_index.utf16_to_utf8_col(1, 17), TextSize::from(17)); // first メ at 17..20
+ assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextSize::from(20)); // space
+ assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21)); // second メ at 21..24
+
+ assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextSize::from(15));
+ }
+
+ #[test]
+ fn test_splitlines() {
+ fn r(lo: u32, hi: u32) -> TextRange {
+ TextRange::new(lo.into(), hi.into())
+ }
+
+ let text = "a\nbb\nccc\n";
+ let line_index = LineIndex::new(text);
+
+ let actual = line_index.lines(r(0, 9)).collect::<Vec<_>>();
+ let expected = vec![r(0, 2), r(2, 5), r(5, 9)];
+ assert_eq!(actual, expected);
+
+ let text = "";
+ let line_index = LineIndex::new(text);
+
+ let actual = line_index.lines(r(0, 0)).collect::<Vec<_>>();
+ let expected = vec![];
+ assert_eq!(actual, expected);
+
+ let text = "\n";
+ let line_index = LineIndex::new(text);
+
+ let actual = line_index.lines(r(0, 1)).collect::<Vec<_>>();
+ let expected = vec![r(0, 1)];
+ assert_eq!(actual, expected)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
new file mode 100644
index 000000000..40af9e6fe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
@@ -0,0 +1,287 @@
+//! See [`PathTransform`].
+
+use crate::helpers::mod_path_to_ast;
+use either::Either;
+use hir::{AsAssocItem, HirDisplay, SemanticsScope};
+use rustc_hash::FxHashMap;
+use syntax::{
+ ast::{self, AstNode},
+ ted, SyntaxNode,
+};
+
+/// `PathTransform` substitutes path in SyntaxNodes in bulk.
+///
+/// This is mostly useful for IDE code generation. If you paste some existing
+/// code into a new context (for example, to add method overrides to an `impl`
+/// block), you generally want to appropriately qualify the names, and sometimes
+/// you might want to substitute generic parameters as well:
+///
+/// ```
+/// mod x {
+/// pub struct A<V>;
+/// pub trait T<U> { fn foo(&self, _: U) -> A<U>; }
+/// }
+///
+/// mod y {
+/// use x::T;
+///
+/// impl T<()> for () {
+/// // If we invoke **Add Missing Members** here, we want to copy-paste `foo`.
+/// // But we want a slightly-modified version of it:
+/// fn foo(&self, _: ()) -> x::A<()> {}
+/// }
+/// }
+/// ```
+pub struct PathTransform<'a> {
+ generic_def: hir::GenericDef,
+ substs: Vec<ast::Type>,
+ target_scope: &'a SemanticsScope<'a>,
+ source_scope: &'a SemanticsScope<'a>,
+}
+
+impl<'a> PathTransform<'a> {
+ pub fn trait_impl(
+ target_scope: &'a SemanticsScope<'a>,
+ source_scope: &'a SemanticsScope<'a>,
+ trait_: hir::Trait,
+ impl_: ast::Impl,
+ ) -> PathTransform<'a> {
+ PathTransform {
+ source_scope,
+ target_scope,
+ generic_def: trait_.into(),
+ substs: get_syntactic_substs(impl_).unwrap_or_default(),
+ }
+ }
+
+ pub fn function_call(
+ target_scope: &'a SemanticsScope<'a>,
+ source_scope: &'a SemanticsScope<'a>,
+ function: hir::Function,
+ generic_arg_list: ast::GenericArgList,
+ ) -> PathTransform<'a> {
+ PathTransform {
+ source_scope,
+ target_scope,
+ generic_def: function.into(),
+ substs: get_type_args_from_arg_list(generic_arg_list).unwrap_or_default(),
+ }
+ }
+
+ pub fn apply(&self, syntax: &SyntaxNode) {
+ self.build_ctx().apply(syntax)
+ }
+
+ fn build_ctx(&self) -> Ctx<'a> {
+ let db = self.source_scope.db;
+ let target_module = self.target_scope.module();
+ let source_module = self.source_scope.module();
+ let skip = match self.generic_def {
+ // this is a trait impl, so we need to skip the first type parameter -- this is a bit hacky
+ hir::GenericDef::Trait(_) => 1,
+ _ => 0,
+ };
+ let substs_by_param: FxHashMap<_, _> = self
+ .generic_def
+ .type_params(db)
+ .into_iter()
+ .skip(skip)
+ // The actual list of trait type parameters may be longer than the one
+ // used in the `impl` block due to trailing default type parameters.
+ // For that case we extend the `substs` with an empty iterator so we
+ // can still hit those trailing values and check if they actually have
+ // a default type. If they do, go for that type from `hir` to `ast` so
+ // the resulting change can be applied correctly.
+ .zip(self.substs.iter().map(Some).chain(std::iter::repeat(None)))
+ .filter_map(|(k, v)| match k.split(db) {
+ Either::Left(_) => None,
+ Either::Right(t) => match v {
+ Some(v) => Some((k, v.clone())),
+ None => {
+ let default = t.default(db)?;
+ Some((
+ k,
+ ast::make::ty(
+ &default.display_source_code(db, source_module.into()).ok()?,
+ ),
+ ))
+ }
+ },
+ })
+ .collect();
+ Ctx { substs: substs_by_param, target_module, source_scope: self.source_scope }
+ }
+}
+
+struct Ctx<'a> {
+ substs: FxHashMap<hir::TypeOrConstParam, ast::Type>,
+ target_module: hir::Module,
+ source_scope: &'a SemanticsScope<'a>,
+}
+
+impl<'a> Ctx<'a> {
+ fn apply(&self, item: &SyntaxNode) {
+ // `transform_path` may update a node's parent and that would break the
+ // tree traversal. Thus all paths in the tree are collected into a vec
+ // so that such operation is safe.
+ let paths = item
+ .preorder()
+ .filter_map(|event| match event {
+ syntax::WalkEvent::Enter(_) => None,
+ syntax::WalkEvent::Leave(node) => Some(node),
+ })
+ .filter_map(ast::Path::cast)
+ .collect::<Vec<_>>();
+
+ for path in paths {
+ self.transform_path(path);
+ }
+ }
+ fn transform_path(&self, path: ast::Path) -> Option<()> {
+ if path.qualifier().is_some() {
+ return None;
+ }
+ if path.segment().map_or(false, |s| {
+ s.param_list().is_some() || (s.self_token().is_some() && path.parent_path().is_none())
+ }) {
+ // don't try to qualify `Fn(Foo) -> Bar` paths, they are in prelude anyway
+ // don't try to qualify sole `self` either, they are usually locals, but are returned as modules due to namespace clashing
+ return None;
+ }
+
+ let resolution = self.source_scope.speculative_resolve(&path)?;
+
+ match resolution {
+ hir::PathResolution::TypeParam(tp) => {
+ if let Some(subst) = self.substs.get(&tp.merge()) {
+ let parent = path.syntax().parent()?;
+ if let Some(parent) = ast::Path::cast(parent.clone()) {
+ // Path inside path means that there is an associated
+ // type/constant on the type parameter. It is necessary
+ // to fully qualify the type with `as Trait`. Even
+ // though it might be unnecessary if `subst` is generic
+ // type, always fully qualifying the path is safer
+ // because of potential clash of associated types from
+ // multiple traits
+
+ let trait_ref = find_trait_for_assoc_item(
+ self.source_scope,
+ tp,
+ parent.segment()?.name_ref()?,
+ )
+ .and_then(|trait_ref| {
+ let found_path = self.target_module.find_use_path(
+ self.source_scope.db.upcast(),
+ hir::ModuleDef::Trait(trait_ref),
+ )?;
+ match ast::make::ty_path(mod_path_to_ast(&found_path)) {
+ ast::Type::PathType(path_ty) => Some(path_ty),
+ _ => None,
+ }
+ });
+
+ let segment = ast::make::path_segment_ty(subst.clone(), trait_ref);
+ let qualified =
+ ast::make::path_from_segments(std::iter::once(segment), false);
+ ted::replace(path.syntax(), qualified.clone_for_update().syntax());
+ } else if let Some(path_ty) = ast::PathType::cast(parent) {
+ ted::replace(
+ path_ty.syntax(),
+ subst.clone_subtree().clone_for_update().syntax(),
+ );
+ } else {
+ ted::replace(
+ path.syntax(),
+ subst.clone_subtree().clone_for_update().syntax(),
+ );
+ }
+ }
+ }
+ hir::PathResolution::Def(def) if def.as_assoc_item(self.source_scope.db).is_none() => {
+ if let hir::ModuleDef::Trait(_) = def {
+ if matches!(path.segment()?.kind()?, ast::PathSegmentKind::Type { .. }) {
+ // `speculative_resolve` resolves segments like `<T as
+ // Trait>` into `Trait`, but just the trait name should
+ // not be used as the replacement of the original
+ // segment.
+ return None;
+ }
+ }
+
+ let found_path =
+ self.target_module.find_use_path(self.source_scope.db.upcast(), def)?;
+ let res = mod_path_to_ast(&found_path).clone_for_update();
+ if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) {
+ if let Some(segment) = res.segment() {
+ let old = segment.get_or_create_generic_arg_list();
+ ted::replace(old.syntax(), args.clone_subtree().syntax().clone_for_update())
+ }
+ }
+ ted::replace(path.syntax(), res.syntax())
+ }
+ hir::PathResolution::Local(_)
+ | hir::PathResolution::ConstParam(_)
+ | hir::PathResolution::SelfType(_)
+ | hir::PathResolution::Def(_)
+ | hir::PathResolution::BuiltinAttr(_)
+ | hir::PathResolution::ToolModule(_)
+ | hir::PathResolution::DeriveHelper(_) => (),
+ }
+ Some(())
+ }
+}
+
+// FIXME: It would probably be nicer if we could get this via HIR (i.e. get the
+// trait ref, and then go from the types in the substs back to the syntax).
+fn get_syntactic_substs(impl_def: ast::Impl) -> Option<Vec<ast::Type>> {
+ let target_trait = impl_def.trait_()?;
+ let path_type = match target_trait {
+ ast::Type::PathType(path) => path,
+ _ => return None,
+ };
+ let generic_arg_list = path_type.path()?.segment()?.generic_arg_list()?;
+
+ get_type_args_from_arg_list(generic_arg_list)
+}
+
+fn get_type_args_from_arg_list(generic_arg_list: ast::GenericArgList) -> Option<Vec<ast::Type>> {
+ let mut result = Vec::new();
+ for generic_arg in generic_arg_list.generic_args() {
+ if let ast::GenericArg::TypeArg(type_arg) = generic_arg {
+ result.push(type_arg.ty()?)
+ }
+ }
+
+ Some(result)
+}
+
+fn find_trait_for_assoc_item(
+ scope: &SemanticsScope<'_>,
+ type_param: hir::TypeParam,
+ assoc_item: ast::NameRef,
+) -> Option<hir::Trait> {
+ let db = scope.db;
+ let trait_bounds = type_param.trait_bounds(db);
+
+ let assoc_item_name = assoc_item.text();
+
+ for trait_ in trait_bounds {
+ let names = trait_.items(db).into_iter().filter_map(|item| match item {
+ hir::AssocItem::TypeAlias(ta) => Some(ta.name(db)),
+ hir::AssocItem::Const(cst) => cst.name(db),
+ _ => None,
+ });
+
+ for name in names {
+ if assoc_item_name.as_str() == name.as_text()?.as_str() {
+ // It is fine to return the first match because in case of
+ // multiple possibilities, the exact trait must be disambiguated
+ // in the definition of trait being implemented, so this search
+ // should not be needed.
+ return Some(trait_);
+ }
+ }
+ }
+
+ None
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
new file mode 100644
index 000000000..517fe3f24
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
@@ -0,0 +1,540 @@
+//! Rename infrastructure for rust-analyzer. It is used primarily for the
+//! literal "rename" in the ide (look for tests there), but it is also available
+//! as a general-purpose service. For example, it is used by the fix for the
+//! "incorrect case" diagnostic.
+//!
+//! It leverages the [`crate::search`] functionality to find what needs to be
+//! renamed. The actual renames are tricky -- field shorthands need special
+//! attention, and, when renaming modules, you also want to rename files on the
+//! file system.
+//!
+//! Another can of worms are macros:
+//!
+//! ```ignore
+//! macro_rules! m { () => { fn f() {} } }
+//! m!();
+//! fn main() {
+//! f() // <- rename me
+//! }
+//! ```
+//!
+//! The correct behavior in such cases is probably to show a dialog to the user.
+//! Our current behavior is ¯\_(ツ)_/¯.
+use std::fmt;
+
+use base_db::{AnchoredPathBuf, FileId, FileRange};
+use either::Either;
+use hir::{FieldSource, HasSource, InFile, ModuleSource, Semantics};
+use stdx::never;
+use syntax::{
+ ast::{self, HasName},
+ AstNode, SyntaxKind, TextRange, T,
+};
+use text_edit::{TextEdit, TextEditBuilder};
+
+use crate::{
+ defs::Definition,
+ search::FileReference,
+ source_change::{FileSystemEdit, SourceChange},
+ syntax_helpers::node_ext::expr_as_name_ref,
+ traits::convert_to_def_in_trait,
+ RootDatabase,
+};
+
+pub type Result<T, E = RenameError> = std::result::Result<T, E>;
+
+#[derive(Debug)]
+pub struct RenameError(pub String);
+
+impl fmt::Display for RenameError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.0, f)
+ }
+}
+
+#[macro_export]
+macro_rules! _format_err {
+ ($fmt:expr) => { RenameError(format!($fmt)) };
+ ($fmt:expr, $($arg:tt)+) => { RenameError(format!($fmt, $($arg)+)) }
+}
+pub use _format_err as format_err;
+
+#[macro_export]
+macro_rules! _bail {
+ ($($tokens:tt)*) => { return Err(format_err!($($tokens)*)) }
+}
+pub use _bail as bail;
+
+impl Definition {
+ pub fn rename(
+ &self,
+ sema: &Semantics<'_, RootDatabase>,
+ new_name: &str,
+ ) -> Result<SourceChange> {
+ match *self {
+ Definition::Module(module) => rename_mod(sema, module, new_name),
+ Definition::BuiltinType(_) => {
+ bail!("Cannot rename builtin type")
+ }
+ Definition::SelfType(_) => bail!("Cannot rename `Self`"),
+ def => rename_reference(sema, def, new_name),
+ }
+ }
+
+ /// Textual range of the identifier which will change when renaming this
+ /// `Definition`. Note that some definitions, like buitin types, can't be
+ /// renamed.
+ pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange> {
+ let res = match self {
+ Definition::Macro(mac) => {
+ let src = mac.source(sema.db)?;
+ let name = match &src.value {
+ Either::Left(it) => it.name()?,
+ Either::Right(it) => it.name()?,
+ };
+ src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ }
+ Definition::Field(field) => {
+ let src = field.source(sema.db)?;
+ match &src.value {
+ FieldSource::Named(record_field) => {
+ let name = record_field.name()?;
+ src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ }
+ FieldSource::Pos(_) => None,
+ }
+ }
+ Definition::Module(module) => {
+ let src = module.declaration_source(sema.db)?;
+ let name = src.value.name()?;
+ src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ }
+ Definition::Function(it) => name_range(it, sema),
+ Definition::Adt(adt) => match adt {
+ hir::Adt::Struct(it) => name_range(it, sema),
+ hir::Adt::Union(it) => name_range(it, sema),
+ hir::Adt::Enum(it) => name_range(it, sema),
+ },
+ Definition::Variant(it) => name_range(it, sema),
+ Definition::Const(it) => name_range(it, sema),
+ Definition::Static(it) => name_range(it, sema),
+ Definition::Trait(it) => name_range(it, sema),
+ Definition::TypeAlias(it) => name_range(it, sema),
+ Definition::Local(local) => {
+ let src = local.source(sema.db);
+ let name = match &src.value {
+ Either::Left(bind_pat) => bind_pat.name()?,
+ Either::Right(_) => return None,
+ };
+ src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ }
+ Definition::GenericParam(generic_param) => match generic_param {
+ hir::GenericParam::LifetimeParam(lifetime_param) => {
+ let src = lifetime_param.source(sema.db)?;
+ src.with_value(src.value.lifetime()?.syntax()).original_file_range_opt(sema.db)
+ }
+ _ => {
+ let x = match generic_param {
+ hir::GenericParam::TypeParam(it) => it.merge(),
+ hir::GenericParam::ConstParam(it) => it.merge(),
+ hir::GenericParam::LifetimeParam(_) => return None,
+ };
+ let src = x.source(sema.db)?;
+ let name = match &src.value {
+ Either::Left(x) => x.name()?,
+ Either::Right(_) => return None,
+ };
+ src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ }
+ },
+ Definition::Label(label) => {
+ let src = label.source(sema.db);
+ let lifetime = src.value.lifetime()?;
+ src.with_value(lifetime.syntax()).original_file_range_opt(sema.db)
+ }
+ Definition::BuiltinType(_) => return None,
+ Definition::SelfType(_) => return None,
+ Definition::BuiltinAttr(_) => return None,
+ Definition::ToolModule(_) => return None,
+ // FIXME: This should be doable in theory
+ Definition::DeriveHelper(_) => return None,
+ };
+ return res;
+
+ fn name_range<D>(def: D, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange>
+ where
+ D: HasSource,
+ D::Ast: ast::HasName,
+ {
+ let src = def.source(sema.db)?;
+ let name = src.value.name()?;
+ src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ }
+ }
+}
+
+fn rename_mod(
+ sema: &Semantics<'_, RootDatabase>,
+ module: hir::Module,
+ new_name: &str,
+) -> Result<SourceChange> {
+ if IdentifierKind::classify(new_name)? != IdentifierKind::Ident {
+ bail!("Invalid name `{0}`: cannot rename module to {0}", new_name);
+ }
+
+ let mut source_change = SourceChange::default();
+
+ if module.is_crate_root(sema.db) {
+ return Ok(source_change);
+ }
+
+ let InFile { file_id, value: def_source } = module.definition_source(sema.db);
+ if let ModuleSource::SourceFile(..) = def_source {
+ let anchor = file_id.original_file(sema.db);
+
+ let is_mod_rs = module.is_mod_rs(sema.db);
+ let has_detached_child = module.children(sema.db).any(|child| !child.is_inline(sema.db));
+
+ // Module exists in a named file
+ if !is_mod_rs {
+ let path = format!("{}.rs", new_name);
+ let dst = AnchoredPathBuf { anchor, path };
+ source_change.push_file_system_edit(FileSystemEdit::MoveFile { src: anchor, dst })
+ }
+
+ // Rename the dir if:
+ // - Module source is in mod.rs
+ // - Module has submodules defined in separate files
+ let dir_paths = match (is_mod_rs, has_detached_child, module.name(sema.db)) {
+ // Go up one level since the anchor is inside the dir we're trying to rename
+ (true, _, Some(mod_name)) => {
+ Some((format!("../{}", mod_name), format!("../{}", new_name)))
+ }
+ // The anchor is on the same level as target dir
+ (false, true, Some(mod_name)) => Some((mod_name.to_string(), new_name.to_string())),
+ _ => None,
+ };
+
+ if let Some((src, dst)) = dir_paths {
+ let src = AnchoredPathBuf { anchor, path: src };
+ let dst = AnchoredPathBuf { anchor, path: dst };
+ source_change.push_file_system_edit(FileSystemEdit::MoveDir {
+ src,
+ src_id: anchor,
+ dst,
+ })
+ }
+ }
+
+ if let Some(src) = module.declaration_source(sema.db) {
+ let file_id = src.file_id.original_file(sema.db);
+ match src.value.name() {
+ Some(name) => {
+ if let Some(file_range) =
+ src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ {
+ source_change.insert_source_edit(
+ file_id,
+ TextEdit::replace(file_range.range, new_name.to_string()),
+ )
+ };
+ }
+ _ => never!("Module source node is missing a name"),
+ }
+ }
+
+ let def = Definition::Module(module);
+ let usages = def.usages(sema).all();
+ let ref_edits = usages.iter().map(|(&file_id, references)| {
+ (file_id, source_edit_from_references(references, def, new_name))
+ });
+ source_change.extend(ref_edits);
+
+ Ok(source_change)
+}
+
+fn rename_reference(
+ sema: &Semantics<'_, RootDatabase>,
+ def: Definition,
+ new_name: &str,
+) -> Result<SourceChange> {
+ let ident_kind = IdentifierKind::classify(new_name)?;
+
+ if matches!(
+ def,
+ Definition::GenericParam(hir::GenericParam::LifetimeParam(_)) | Definition::Label(_)
+ ) {
+ match ident_kind {
+ IdentifierKind::Ident | IdentifierKind::Underscore => {
+ cov_mark::hit!(rename_not_a_lifetime_ident_ref);
+ bail!("Invalid name `{}`: not a lifetime identifier", new_name);
+ }
+ IdentifierKind::Lifetime => cov_mark::hit!(rename_lifetime),
+ }
+ } else {
+ match ident_kind {
+ IdentifierKind::Lifetime => {
+ cov_mark::hit!(rename_not_an_ident_ref);
+ bail!("Invalid name `{}`: not an identifier", new_name);
+ }
+ IdentifierKind::Ident => cov_mark::hit!(rename_non_local),
+ IdentifierKind::Underscore => (),
+ }
+ }
+
+ let def = convert_to_def_in_trait(sema.db, def);
+ let usages = def.usages(sema).all();
+
+ if !usages.is_empty() && ident_kind == IdentifierKind::Underscore {
+ cov_mark::hit!(rename_underscore_multiple);
+ bail!("Cannot rename reference to `_` as it is being referenced multiple times");
+ }
+ let mut source_change = SourceChange::default();
+ source_change.extend(usages.iter().map(|(&file_id, references)| {
+ (file_id, source_edit_from_references(references, def, new_name))
+ }));
+
+ let mut insert_def_edit = |def| {
+ let (file_id, edit) = source_edit_from_def(sema, def, new_name)?;
+ source_change.insert_source_edit(file_id, edit);
+ Ok(())
+ };
+ match def {
+ Definition::Local(l) => l
+ .associated_locals(sema.db)
+ .iter()
+ .try_for_each(|&local| insert_def_edit(Definition::Local(local))),
+ def => insert_def_edit(def),
+ }?;
+ Ok(source_change)
+}
+
+pub fn source_edit_from_references(
+ references: &[FileReference],
+ def: Definition,
+ new_name: &str,
+) -> TextEdit {
+ let mut edit = TextEdit::builder();
+ // macros can cause multiple refs to occur for the same text range, so keep track of what we have edited so far
+ let mut edited_ranges = Vec::new();
+ for &FileReference { range, ref name, .. } in references {
+ let name_range = name.syntax().text_range();
+ if name_range.len() != range.len() {
+ // This usage comes from a different token kind that was downmapped to a NameLike in a macro
+ // Renaming this will most likely break things syntax-wise
+ continue;
+ }
+ let has_emitted_edit = match name {
+ // if the ranges differ then the node is inside a macro call, we can't really attempt
+ // to make special rewrites like shorthand syntax and such, so just rename the node in
+ // the macro input
+ ast::NameLike::NameRef(name_ref) if name_range == range => {
+ source_edit_from_name_ref(&mut edit, name_ref, new_name, def)
+ }
+ ast::NameLike::Name(name) if name_range == range => {
+ source_edit_from_name(&mut edit, name, new_name)
+ }
+ _ => false,
+ };
+ if !has_emitted_edit {
+ if !edited_ranges.contains(&range.start()) {
+ edit.replace(range, new_name.to_string());
+ edited_ranges.push(range.start());
+ }
+ }
+ }
+
+ edit.finish()
+}
+
+fn source_edit_from_name(edit: &mut TextEditBuilder, name: &ast::Name, new_name: &str) -> bool {
+ if ast::RecordPatField::for_field_name(name).is_some() {
+ if let Some(ident_pat) = name.syntax().parent().and_then(ast::IdentPat::cast) {
+ cov_mark::hit!(rename_record_pat_field_name_split);
+ // Foo { ref mut field } -> Foo { new_name: ref mut field }
+ // ^ insert `new_name: `
+
+ // FIXME: instead of splitting the shorthand, recursively trigger a rename of the
+ // other name https://github.com/rust-lang/rust-analyzer/issues/6547
+ edit.insert(ident_pat.syntax().text_range().start(), format!("{}: ", new_name));
+ return true;
+ }
+ }
+
+ false
+}
+
+fn source_edit_from_name_ref(
+ edit: &mut TextEditBuilder,
+ name_ref: &ast::NameRef,
+ new_name: &str,
+ def: Definition,
+) -> bool {
+ if name_ref.super_token().is_some() {
+ return true;
+ }
+
+ if let Some(record_field) = ast::RecordExprField::for_name_ref(name_ref) {
+ let rcf_name_ref = record_field.name_ref();
+ let rcf_expr = record_field.expr();
+ match &(rcf_name_ref, rcf_expr.and_then(|it| expr_as_name_ref(&it))) {
+ // field: init-expr, check if we can use a field init shorthand
+ (Some(field_name), Some(init)) => {
+ if field_name == name_ref {
+ if init.text() == new_name {
+ cov_mark::hit!(test_rename_field_put_init_shorthand);
+ // Foo { field: local } -> Foo { local }
+ // ^^^^^^^ delete this
+
+ // same names, we can use a shorthand here instead.
+ // we do not want to erase attributes hence this range start
+ let s = field_name.syntax().text_range().start();
+ let e = init.syntax().text_range().start();
+ edit.delete(TextRange::new(s, e));
+ return true;
+ }
+ } else if init == name_ref {
+ if field_name.text() == new_name {
+ cov_mark::hit!(test_rename_local_put_init_shorthand);
+ // Foo { field: local } -> Foo { field }
+ // ^^^^^^^ delete this
+
+ // same names, we can use a shorthand here instead.
+ // we do not want to erase attributes hence this range start
+ let s = field_name.syntax().text_range().end();
+ let e = init.syntax().text_range().end();
+ edit.delete(TextRange::new(s, e));
+ return true;
+ }
+ }
+ }
+ // init shorthand
+ (None, Some(_)) if matches!(def, Definition::Field(_)) => {
+ cov_mark::hit!(test_rename_field_in_field_shorthand);
+ // Foo { field } -> Foo { new_name: field }
+ // ^ insert `new_name: `
+ let offset = name_ref.syntax().text_range().start();
+ edit.insert(offset, format!("{}: ", new_name));
+ return true;
+ }
+ (None, Some(_)) if matches!(def, Definition::Local(_)) => {
+ cov_mark::hit!(test_rename_local_in_field_shorthand);
+ // Foo { field } -> Foo { field: new_name }
+ // ^ insert `: new_name`
+ let offset = name_ref.syntax().text_range().end();
+ edit.insert(offset, format!(": {}", new_name));
+ return true;
+ }
+ _ => (),
+ }
+ } else if let Some(record_field) = ast::RecordPatField::for_field_name_ref(name_ref) {
+ let rcf_name_ref = record_field.name_ref();
+ let rcf_pat = record_field.pat();
+ match (rcf_name_ref, rcf_pat) {
+ // field: rename
+ (Some(field_name), Some(ast::Pat::IdentPat(pat)))
+ if field_name == *name_ref && pat.at_token().is_none() =>
+ {
+ // field name is being renamed
+ if let Some(name) = pat.name() {
+ if name.text() == new_name {
+ cov_mark::hit!(test_rename_field_put_init_shorthand_pat);
+ // Foo { field: ref mut local } -> Foo { ref mut field }
+ // ^^^^^^^ delete this
+ // ^^^^^ replace this with `field`
+
+ // same names, we can use a shorthand here instead/
+ // we do not want to erase attributes hence this range start
+ let s = field_name.syntax().text_range().start();
+ let e = pat.syntax().text_range().start();
+ edit.delete(TextRange::new(s, e));
+ edit.replace(name.syntax().text_range(), new_name.to_string());
+ return true;
+ }
+ }
+ }
+ _ => (),
+ }
+ }
+ false
+}
+
+fn source_edit_from_def(
+ sema: &Semantics<'_, RootDatabase>,
+ def: Definition,
+ new_name: &str,
+) -> Result<(FileId, TextEdit)> {
+ let FileRange { file_id, range } = def
+ .range_for_rename(sema)
+ .ok_or_else(|| format_err!("No identifier available to rename"))?;
+
+ let mut edit = TextEdit::builder();
+ if let Definition::Local(local) = def {
+ if let Either::Left(pat) = local.source(sema.db).value {
+ // special cases required for renaming fields/locals in Record patterns
+ if let Some(pat_field) = pat.syntax().parent().and_then(ast::RecordPatField::cast) {
+ let name_range = pat.name().unwrap().syntax().text_range();
+ if let Some(name_ref) = pat_field.name_ref() {
+ if new_name == name_ref.text() && pat.at_token().is_none() {
+ // Foo { field: ref mut local } -> Foo { ref mut field }
+ // ^^^^^^ delete this
+ // ^^^^^ replace this with `field`
+ cov_mark::hit!(test_rename_local_put_init_shorthand_pat);
+ edit.delete(
+ name_ref
+ .syntax()
+ .text_range()
+ .cover_offset(pat.syntax().text_range().start()),
+ );
+ edit.replace(name_range, name_ref.text().to_string());
+ } else {
+ // Foo { field: ref mut local @ local 2} -> Foo { field: ref mut new_name @ local2 }
+ // Foo { field: ref mut local } -> Foo { field: ref mut new_name }
+ // ^^^^^ replace this with `new_name`
+ edit.replace(name_range, new_name.to_string());
+ }
+ } else {
+ // Foo { ref mut field } -> Foo { field: ref mut new_name }
+ // ^ insert `field: `
+ // ^^^^^ replace this with `new_name`
+ edit.insert(
+ pat.syntax().text_range().start(),
+ format!("{}: ", pat_field.field_name().unwrap()),
+ );
+ edit.replace(name_range, new_name.to_string());
+ }
+ }
+ }
+ }
+ if edit.is_empty() {
+ edit.replace(range, new_name.to_string());
+ }
+ Ok((file_id, edit.finish()))
+}
+
+#[derive(Copy, Clone, Debug, PartialEq)]
+pub enum IdentifierKind {
+ Ident,
+ Lifetime,
+ Underscore,
+}
+
+impl IdentifierKind {
+ pub fn classify(new_name: &str) -> Result<IdentifierKind> {
+ match parser::LexedStr::single_token(new_name) {
+ Some(res) => match res {
+ (SyntaxKind::IDENT, _) => Ok(IdentifierKind::Ident),
+ (T![_], _) => Ok(IdentifierKind::Underscore),
+ (SyntaxKind::LIFETIME_IDENT, _) if new_name != "'static" && new_name != "'_" => {
+ Ok(IdentifierKind::Lifetime)
+ }
+ (SyntaxKind::LIFETIME_IDENT, _) => {
+ bail!("Invalid name `{}`: not a lifetime identifier", new_name)
+ }
+ (_, Some(syntax_error)) => bail!("Invalid name `{}`: {}", new_name, syntax_error),
+ (_, None) => bail!("Invalid name `{}`: not an identifier", new_name),
+ },
+ None => bail!("Invalid name `{}`: not an identifier", new_name),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs b/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs
new file mode 100644
index 000000000..e27e23867
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs
@@ -0,0 +1,34 @@
+//! Rustdoc specific doc comment handling
+
+// stripped down version of https://github.com/rust-lang/rust/blob/392ba2ba1a7d6c542d2459fb8133bebf62a4a423/src/librustdoc/html/markdown.rs#L810-L933
+pub fn is_rust_fence(s: &str) -> bool {
+ let mut seen_rust_tags = false;
+ let mut seen_other_tags = false;
+
+ let tokens = s
+ .trim()
+ .split(|c| c == ',' || c == ' ' || c == '\t')
+ .map(str::trim)
+ .filter(|t| !t.is_empty());
+
+ for token in tokens {
+ match token {
+ "should_panic" | "no_run" | "ignore" | "allow_fail" => {
+ seen_rust_tags = !seen_other_tags
+ }
+ "rust" => seen_rust_tags = true,
+ "test_harness" | "compile_fail" => seen_rust_tags = !seen_other_tags || seen_rust_tags,
+ x if x.starts_with("edition") => {}
+ x if x.starts_with('E') && x.len() == 5 => {
+ if x[1..].parse::<u32>().is_ok() {
+ seen_rust_tags = !seen_other_tags || seen_rust_tags;
+ } else {
+ seen_other_tags = true;
+ }
+ }
+ _ => seen_other_tags = true,
+ }
+ }
+
+ !seen_other_tags || seen_rust_tags
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
new file mode 100644
index 000000000..bd038cdaa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
@@ -0,0 +1,785 @@
+//! Implementation of find-usages functionality.
+//!
+//! It is based on the standard ide trick: first, we run a fast text search to
+//! get a super-set of matches. Then, we we confirm each match using precise
+//! name resolution.
+
+use std::{convert::TryInto, mem, sync::Arc};
+
+use base_db::{FileId, FileRange, SourceDatabase, SourceDatabaseExt};
+use hir::{DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility};
+use once_cell::unsync::Lazy;
+use rustc_hash::FxHashMap;
+use syntax::{ast, match_ast, AstNode, TextRange, TextSize};
+
+use crate::{
+ defs::{Definition, NameClass, NameRefClass},
+ traits::{as_trait_assoc_def, convert_to_def_in_trait},
+ RootDatabase,
+};
+
+#[derive(Debug, Default, Clone)]
+pub struct UsageSearchResult {
+ pub references: FxHashMap<FileId, Vec<FileReference>>,
+}
+
+impl UsageSearchResult {
+ pub fn is_empty(&self) -> bool {
+ self.references.is_empty()
+ }
+
+ pub fn len(&self) -> usize {
+ self.references.len()
+ }
+
+ pub fn iter(&self) -> impl Iterator<Item = (&FileId, &[FileReference])> + '_ {
+ self.references.iter().map(|(file_id, refs)| (file_id, &**refs))
+ }
+
+ pub fn file_ranges(&self) -> impl Iterator<Item = FileRange> + '_ {
+ self.references.iter().flat_map(|(&file_id, refs)| {
+ refs.iter().map(move |&FileReference { range, .. }| FileRange { file_id, range })
+ })
+ }
+}
+
+impl IntoIterator for UsageSearchResult {
+ type Item = (FileId, Vec<FileReference>);
+ type IntoIter = <FxHashMap<FileId, Vec<FileReference>> as IntoIterator>::IntoIter;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.references.into_iter()
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct FileReference {
+ /// The range of the reference in the original file
+ pub range: TextRange,
+ /// The node of the reference in the (macro-)file
+ pub name: ast::NameLike,
+ pub category: Option<ReferenceCategory>,
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum ReferenceCategory {
+ // FIXME: Add this variant and delete the `retain_adt_literal_usages` function.
+ // Create
+ Write,
+ Read,
+ // FIXME: Some day should be able to search in doc comments. Would probably
+ // need to switch from enum to bitflags then?
+ // DocComment
+}
+
+/// Generally, `search_scope` returns files that might contain references for the element.
+/// For `pub(crate)` things it's a crate, for `pub` things it's a crate and dependant crates.
+/// In some cases, the location of the references is known to within a `TextRange`,
+/// e.g. for things like local variables.
+#[derive(Clone, Debug)]
+pub struct SearchScope {
+ entries: FxHashMap<FileId, Option<TextRange>>,
+}
+
+impl SearchScope {
+ fn new(entries: FxHashMap<FileId, Option<TextRange>>) -> SearchScope {
+ SearchScope { entries }
+ }
+
+ /// Build a search scope spanning the entire crate graph of files.
+ fn crate_graph(db: &RootDatabase) -> SearchScope {
+ let mut entries = FxHashMap::default();
+
+ let graph = db.crate_graph();
+ for krate in graph.iter() {
+ let root_file = graph[krate].root_file_id;
+ let source_root_id = db.file_source_root(root_file);
+ let source_root = db.source_root(source_root_id);
+ entries.extend(source_root.iter().map(|id| (id, None)));
+ }
+ SearchScope { entries }
+ }
+
+ /// Build a search scope spanning all the reverse dependencies of the given crate.
+ fn reverse_dependencies(db: &RootDatabase, of: hir::Crate) -> SearchScope {
+ let mut entries = FxHashMap::default();
+ for rev_dep in of.transitive_reverse_dependencies(db) {
+ let root_file = rev_dep.root_file(db);
+ let source_root_id = db.file_source_root(root_file);
+ let source_root = db.source_root(source_root_id);
+ entries.extend(source_root.iter().map(|id| (id, None)));
+ }
+ SearchScope { entries }
+ }
+
+ /// Build a search scope spanning the given crate.
+ fn krate(db: &RootDatabase, of: hir::Crate) -> SearchScope {
+ let root_file = of.root_file(db);
+ let source_root_id = db.file_source_root(root_file);
+ let source_root = db.source_root(source_root_id);
+ SearchScope {
+ entries: source_root.iter().map(|id| (id, None)).collect::<FxHashMap<_, _>>(),
+ }
+ }
+
+ /// Build a search scope spanning the given module and all its submodules.
+ fn module_and_children(db: &RootDatabase, module: hir::Module) -> SearchScope {
+ let mut entries = FxHashMap::default();
+
+ let (file_id, range) = {
+ let InFile { file_id, value } = module.definition_source(db);
+ if let Some((file_id, call_source)) = file_id.original_call_node(db) {
+ (file_id, Some(call_source.text_range()))
+ } else {
+ (
+ file_id.original_file(db),
+ match value {
+ ModuleSource::SourceFile(_) => None,
+ ModuleSource::Module(it) => Some(it.syntax().text_range()),
+ ModuleSource::BlockExpr(it) => Some(it.syntax().text_range()),
+ },
+ )
+ }
+ };
+ entries.insert(file_id, range);
+
+ let mut to_visit: Vec<_> = module.children(db).collect();
+ while let Some(module) = to_visit.pop() {
+ if let InFile { file_id, value: ModuleSource::SourceFile(_) } =
+ module.definition_source(db)
+ {
+ entries.insert(file_id.original_file(db), None);
+ }
+ to_visit.extend(module.children(db));
+ }
+ SearchScope { entries }
+ }
+
+ /// Build an empty search scope.
+ pub fn empty() -> SearchScope {
+ SearchScope::new(FxHashMap::default())
+ }
+
+ /// Build a empty search scope spanning the given file.
+ pub fn single_file(file: FileId) -> SearchScope {
+ SearchScope::new(std::iter::once((file, None)).collect())
+ }
+
+ /// Build a empty search scope spanning the text range of the given file.
+ pub fn file_range(range: FileRange) -> SearchScope {
+ SearchScope::new(std::iter::once((range.file_id, Some(range.range))).collect())
+ }
+
+ /// Build a empty search scope spanning the given files.
+ pub fn files(files: &[FileId]) -> SearchScope {
+ SearchScope::new(files.iter().map(|f| (*f, None)).collect())
+ }
+
+ pub fn intersection(&self, other: &SearchScope) -> SearchScope {
+ let (mut small, mut large) = (&self.entries, &other.entries);
+ if small.len() > large.len() {
+ mem::swap(&mut small, &mut large)
+ }
+
+ let intersect_ranges =
+ |r1: Option<TextRange>, r2: Option<TextRange>| -> Option<Option<TextRange>> {
+ match (r1, r2) {
+ (None, r) | (r, None) => Some(r),
+ (Some(r1), Some(r2)) => r1.intersect(r2).map(Some),
+ }
+ };
+ let res = small
+ .iter()
+ .filter_map(|(&file_id, &r1)| {
+ let &r2 = large.get(&file_id)?;
+ let r = intersect_ranges(r1, r2)?;
+ Some((file_id, r))
+ })
+ .collect();
+
+ SearchScope::new(res)
+ }
+}
+
+impl IntoIterator for SearchScope {
+ type Item = (FileId, Option<TextRange>);
+ type IntoIter = std::collections::hash_map::IntoIter<FileId, Option<TextRange>>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.entries.into_iter()
+ }
+}
+
+impl Definition {
+ fn search_scope(&self, db: &RootDatabase) -> SearchScope {
+ let _p = profile::span("search_scope");
+
+ if let Definition::BuiltinType(_) = self {
+ return SearchScope::crate_graph(db);
+ }
+
+ // def is crate root
+ // FIXME: We don't do searches for crates currently, as a crate does not actually have a single name
+ if let &Definition::Module(module) = self {
+ if module.is_crate_root(db) {
+ return SearchScope::reverse_dependencies(db, module.krate());
+ }
+ }
+
+ let module = match self.module(db) {
+ Some(it) => it,
+ None => return SearchScope::empty(),
+ };
+ let InFile { file_id, value: module_source } = module.definition_source(db);
+ let file_id = file_id.original_file(db);
+
+ if let Definition::Local(var) = self {
+ let def = match var.parent(db) {
+ DefWithBody::Function(f) => f.source(db).map(|src| src.syntax().cloned()),
+ DefWithBody::Const(c) => c.source(db).map(|src| src.syntax().cloned()),
+ DefWithBody::Static(s) => s.source(db).map(|src| src.syntax().cloned()),
+ };
+ return match def {
+ Some(def) => SearchScope::file_range(def.as_ref().original_file_range(db)),
+ None => SearchScope::single_file(file_id),
+ };
+ }
+
+ if let Definition::SelfType(impl_) = self {
+ return match impl_.source(db).map(|src| src.syntax().cloned()) {
+ Some(def) => SearchScope::file_range(def.as_ref().original_file_range(db)),
+ None => SearchScope::single_file(file_id),
+ };
+ }
+
+ if let Definition::GenericParam(hir::GenericParam::LifetimeParam(param)) = self {
+ let def = match param.parent(db) {
+ hir::GenericDef::Function(it) => it.source(db).map(|src| src.syntax().cloned()),
+ hir::GenericDef::Adt(it) => it.source(db).map(|src| src.syntax().cloned()),
+ hir::GenericDef::Trait(it) => it.source(db).map(|src| src.syntax().cloned()),
+ hir::GenericDef::TypeAlias(it) => it.source(db).map(|src| src.syntax().cloned()),
+ hir::GenericDef::Impl(it) => it.source(db).map(|src| src.syntax().cloned()),
+ hir::GenericDef::Variant(it) => it.source(db).map(|src| src.syntax().cloned()),
+ hir::GenericDef::Const(it) => it.source(db).map(|src| src.syntax().cloned()),
+ };
+ return match def {
+ Some(def) => SearchScope::file_range(def.as_ref().original_file_range(db)),
+ None => SearchScope::single_file(file_id),
+ };
+ }
+
+ if let Definition::Macro(macro_def) = self {
+ return match macro_def.kind(db) {
+ hir::MacroKind::Declarative => {
+ if macro_def.attrs(db).by_key("macro_export").exists() {
+ SearchScope::reverse_dependencies(db, module.krate())
+ } else {
+ SearchScope::krate(db, module.krate())
+ }
+ }
+ hir::MacroKind::BuiltIn => SearchScope::crate_graph(db),
+ hir::MacroKind::Derive | hir::MacroKind::Attr | hir::MacroKind::ProcMacro => {
+ SearchScope::reverse_dependencies(db, module.krate())
+ }
+ };
+ }
+
+ if let Definition::DeriveHelper(_) = self {
+ return SearchScope::reverse_dependencies(db, module.krate());
+ }
+
+ let vis = self.visibility(db);
+ if let Some(Visibility::Public) = vis {
+ return SearchScope::reverse_dependencies(db, module.krate());
+ }
+ if let Some(Visibility::Module(module)) = vis {
+ return SearchScope::module_and_children(db, module.into());
+ }
+
+ let range = match module_source {
+ ModuleSource::Module(m) => Some(m.syntax().text_range()),
+ ModuleSource::BlockExpr(b) => Some(b.syntax().text_range()),
+ ModuleSource::SourceFile(_) => None,
+ };
+ match range {
+ Some(range) => SearchScope::file_range(FileRange { file_id, range }),
+ None => SearchScope::single_file(file_id),
+ }
+ }
+
+ pub fn usages<'a>(self, sema: &'a Semantics<'_, RootDatabase>) -> FindUsages<'a> {
+ FindUsages {
+ local_repr: match self {
+ Definition::Local(local) => Some(local.representative(sema.db)),
+ _ => None,
+ },
+ def: self,
+ trait_assoc_def: as_trait_assoc_def(sema.db, self),
+ sema,
+ scope: None,
+ include_self_kw_refs: None,
+ search_self_mod: false,
+ }
+ }
+}
+
+#[derive(Clone)]
+pub struct FindUsages<'a> {
+ def: Definition,
+ /// If def is an assoc item from a trait or trait impl, this is the corresponding item of the trait definition
+ trait_assoc_def: Option<Definition>,
+ sema: &'a Semantics<'a, RootDatabase>,
+ scope: Option<SearchScope>,
+ include_self_kw_refs: Option<hir::Type>,
+ local_repr: Option<hir::Local>,
+ search_self_mod: bool,
+}
+
+impl<'a> FindUsages<'a> {
+ /// Enable searching for `Self` when the definition is a type or `self` for modules.
+ pub fn include_self_refs(mut self) -> FindUsages<'a> {
+ self.include_self_kw_refs = def_to_ty(self.sema, &self.def);
+ self.search_self_mod = true;
+ self
+ }
+
+ /// Limit the search to a given [`SearchScope`].
+ pub fn in_scope(self, scope: SearchScope) -> FindUsages<'a> {
+ self.set_scope(Some(scope))
+ }
+
+ /// Limit the search to a given [`SearchScope`].
+ pub fn set_scope(mut self, scope: Option<SearchScope>) -> FindUsages<'a> {
+ assert!(self.scope.is_none());
+ self.scope = scope;
+ self
+ }
+
+ pub fn at_least_one(&self) -> bool {
+ let mut found = false;
+ self.search(&mut |_, _| {
+ found = true;
+ true
+ });
+ found
+ }
+
+ pub fn all(self) -> UsageSearchResult {
+ let mut res = UsageSearchResult::default();
+ self.search(&mut |file_id, reference| {
+ res.references.entry(file_id).or_default().push(reference);
+ false
+ });
+ res
+ }
+
+ fn search(&self, sink: &mut dyn FnMut(FileId, FileReference) -> bool) {
+ let _p = profile::span("FindUsages:search");
+ let sema = self.sema;
+
+ let search_scope = {
+ let base = self.trait_assoc_def.unwrap_or(self.def).search_scope(sema.db);
+ match &self.scope {
+ None => base,
+ Some(scope) => base.intersection(scope),
+ }
+ };
+
+ let name = match self.def {
+ // special case crate modules as these do not have a proper name
+ Definition::Module(module) if module.is_crate_root(self.sema.db) => {
+ // FIXME: This assumes the crate name is always equal to its display name when it really isn't
+ module
+ .krate()
+ .display_name(self.sema.db)
+ .map(|crate_name| crate_name.crate_name().as_smol_str().clone())
+ }
+ _ => {
+ let self_kw_refs = || {
+ self.include_self_kw_refs.as_ref().and_then(|ty| {
+ ty.as_adt()
+ .map(|adt| adt.name(self.sema.db))
+ .or_else(|| ty.as_builtin().map(|builtin| builtin.name()))
+ })
+ };
+ self.def.name(sema.db).or_else(self_kw_refs).map(|it| it.to_smol_str())
+ }
+ };
+ let name = match &name {
+ Some(s) => s.as_str(),
+ None => return,
+ };
+
+ // these can't be closures because rust infers the lifetimes wrong ...
+ fn match_indices<'a>(
+ text: &'a str,
+ name: &'a str,
+ search_range: TextRange,
+ ) -> impl Iterator<Item = TextSize> + 'a {
+ text.match_indices(name).filter_map(move |(idx, _)| {
+ let offset: TextSize = idx.try_into().unwrap();
+ if !search_range.contains_inclusive(offset) {
+ return None;
+ }
+ Some(offset)
+ })
+ }
+
+ fn scope_files<'a>(
+ sema: &'a Semantics<'_, RootDatabase>,
+ scope: &'a SearchScope,
+ ) -> impl Iterator<Item = (Arc<String>, FileId, TextRange)> + 'a {
+ scope.entries.iter().map(|(&file_id, &search_range)| {
+ let text = sema.db.file_text(file_id);
+ let search_range =
+ search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(text.as_str())));
+
+ (text, file_id, search_range)
+ })
+ }
+
+ // FIXME: There should be optimization potential here
+ // Currently we try to descend everything we find which
+ // means we call `Semantics::descend_into_macros` on
+ // every textual hit. That function is notoriously
+ // expensive even for things that do not get down mapped
+ // into macros.
+ for (text, file_id, search_range) in scope_files(sema, &search_scope) {
+ let tree = Lazy::new(move || sema.parse(file_id).syntax().clone());
+
+ // Search for occurrences of the items name
+ for offset in match_indices(&text, name, search_range) {
+ for name in sema.find_nodes_at_offset_with_descend(&tree, offset) {
+ if match name {
+ ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink),
+ ast::NameLike::Name(name) => self.found_name(&name, sink),
+ ast::NameLike::Lifetime(lifetime) => self.found_lifetime(&lifetime, sink),
+ } {
+ return;
+ }
+ }
+ }
+ // Search for occurrences of the `Self` referring to our type
+ if let Some(self_ty) = &self.include_self_kw_refs {
+ for offset in match_indices(&text, "Self", search_range) {
+ for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) {
+ if self.found_self_ty_name_ref(self_ty, &name_ref, sink) {
+ return;
+ }
+ }
+ }
+ }
+ }
+
+ // Search for `super` and `crate` resolving to our module
+ match self.def {
+ Definition::Module(module) => {
+ let scope = search_scope
+ .intersection(&SearchScope::module_and_children(self.sema.db, module));
+
+ let is_crate_root = module.is_crate_root(self.sema.db);
+
+ for (text, file_id, search_range) in scope_files(sema, &scope) {
+ let tree = Lazy::new(move || sema.parse(file_id).syntax().clone());
+
+ for offset in match_indices(&text, "super", search_range) {
+ for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) {
+ if self.found_name_ref(&name_ref, sink) {
+ return;
+ }
+ }
+ }
+ if is_crate_root {
+ for offset in match_indices(&text, "crate", search_range) {
+ for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) {
+ if self.found_name_ref(&name_ref, sink) {
+ return;
+ }
+ }
+ }
+ }
+ }
+ }
+ _ => (),
+ }
+
+ // search for module `self` references in our module's definition source
+ match self.def {
+ Definition::Module(module) if self.search_self_mod => {
+ let src = module.definition_source(sema.db);
+ let file_id = src.file_id.original_file(sema.db);
+ let (file_id, search_range) = match src.value {
+ ModuleSource::Module(m) => (file_id, Some(m.syntax().text_range())),
+ ModuleSource::BlockExpr(b) => (file_id, Some(b.syntax().text_range())),
+ ModuleSource::SourceFile(_) => (file_id, None),
+ };
+
+ let search_range = if let Some(&range) = search_scope.entries.get(&file_id) {
+ match (range, search_range) {
+ (None, range) | (range, None) => range,
+ (Some(range), Some(search_range)) => match range.intersect(search_range) {
+ Some(range) => Some(range),
+ None => return,
+ },
+ }
+ } else {
+ return;
+ };
+
+ let text = sema.db.file_text(file_id);
+ let search_range =
+ search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(text.as_str())));
+
+ let tree = Lazy::new(|| sema.parse(file_id).syntax().clone());
+
+ for offset in match_indices(&text, "self", search_range) {
+ for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) {
+ if self.found_self_module_name_ref(&name_ref, sink) {
+ return;
+ }
+ }
+ }
+ }
+ _ => {}
+ }
+ }
+
+ fn found_self_ty_name_ref(
+ &self,
+ self_ty: &hir::Type,
+ name_ref: &ast::NameRef,
+ sink: &mut dyn FnMut(FileId, FileReference) -> bool,
+ ) -> bool {
+ match NameRefClass::classify(self.sema, name_ref) {
+ Some(NameRefClass::Definition(Definition::SelfType(impl_)))
+ if impl_.self_ty(self.sema.db) == *self_ty =>
+ {
+ let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::NameRef(name_ref.clone()),
+ category: None,
+ };
+ sink(file_id, reference)
+ }
+ _ => false,
+ }
+ }
+
+ fn found_self_module_name_ref(
+ &self,
+ name_ref: &ast::NameRef,
+ sink: &mut dyn FnMut(FileId, FileReference) -> bool,
+ ) -> bool {
+ match NameRefClass::classify(self.sema, name_ref) {
+ Some(NameRefClass::Definition(def @ Definition::Module(_))) if def == self.def => {
+ let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::NameRef(name_ref.clone()),
+ category: None,
+ };
+ sink(file_id, reference)
+ }
+ _ => false,
+ }
+ }
+
+ fn found_lifetime(
+ &self,
+ lifetime: &ast::Lifetime,
+ sink: &mut dyn FnMut(FileId, FileReference) -> bool,
+ ) -> bool {
+ match NameRefClass::classify_lifetime(self.sema, lifetime) {
+ Some(NameRefClass::Definition(def)) if def == self.def => {
+ let FileRange { file_id, range } = self.sema.original_range(lifetime.syntax());
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::Lifetime(lifetime.clone()),
+ category: None,
+ };
+ sink(file_id, reference)
+ }
+ _ => false,
+ }
+ }
+
+ fn found_name_ref(
+ &self,
+ name_ref: &ast::NameRef,
+ sink: &mut dyn FnMut(FileId, FileReference) -> bool,
+ ) -> bool {
+ match NameRefClass::classify(self.sema, name_ref) {
+ Some(NameRefClass::Definition(def @ Definition::Local(local)))
+ if matches!(
+ self.local_repr, Some(repr) if repr == local.representative(self.sema.db)
+ ) =>
+ {
+ let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::NameRef(name_ref.clone()),
+ category: ReferenceCategory::new(&def, name_ref),
+ };
+ sink(file_id, reference)
+ }
+ Some(NameRefClass::Definition(def))
+ if match self.trait_assoc_def {
+ Some(trait_assoc_def) => {
+ // we have a trait assoc item, so force resolve all assoc items to their trait version
+ convert_to_def_in_trait(self.sema.db, def) == trait_assoc_def
+ }
+ None => self.def == def,
+ } =>
+ {
+ let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::NameRef(name_ref.clone()),
+ category: ReferenceCategory::new(&def, name_ref),
+ };
+ sink(file_id, reference)
+ }
+ Some(NameRefClass::Definition(def)) if self.include_self_kw_refs.is_some() => {
+ if self.include_self_kw_refs == def_to_ty(self.sema, &def) {
+ let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::NameRef(name_ref.clone()),
+ category: ReferenceCategory::new(&def, name_ref),
+ };
+ sink(file_id, reference)
+ } else {
+ false
+ }
+ }
+ Some(NameRefClass::FieldShorthand { local_ref: local, field_ref: field }) => {
+ let field = Definition::Field(field);
+ let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
+ let access = match self.def {
+ Definition::Field(_) if field == self.def => {
+ ReferenceCategory::new(&field, name_ref)
+ }
+ Definition::Local(_) if matches!(self.local_repr, Some(repr) if repr == local.representative(self.sema.db)) => {
+ ReferenceCategory::new(&Definition::Local(local), name_ref)
+ }
+ _ => return false,
+ };
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::NameRef(name_ref.clone()),
+ category: access,
+ };
+ sink(file_id, reference)
+ }
+ _ => false,
+ }
+ }
+
+ fn found_name(
+ &self,
+ name: &ast::Name,
+ sink: &mut dyn FnMut(FileId, FileReference) -> bool,
+ ) -> bool {
+ match NameClass::classify(self.sema, name) {
+ Some(NameClass::PatFieldShorthand { local_def: _, field_ref })
+ if matches!(
+ self.def, Definition::Field(_) if Definition::Field(field_ref) == self.def
+ ) =>
+ {
+ let FileRange { file_id, range } = self.sema.original_range(name.syntax());
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::Name(name.clone()),
+ // FIXME: mutable patterns should have `Write` access
+ category: Some(ReferenceCategory::Read),
+ };
+ sink(file_id, reference)
+ }
+ Some(NameClass::ConstReference(def)) if self.def == def => {
+ let FileRange { file_id, range } = self.sema.original_range(name.syntax());
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::Name(name.clone()),
+ category: None,
+ };
+ sink(file_id, reference)
+ }
+ Some(NameClass::Definition(def @ Definition::Local(local))) if def != self.def => {
+ if matches!(
+ self.local_repr,
+ Some(repr) if local.representative(self.sema.db) == repr
+ ) {
+ let FileRange { file_id, range } = self.sema.original_range(name.syntax());
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::Name(name.clone()),
+ category: None,
+ };
+ return sink(file_id, reference);
+ }
+ false
+ }
+ Some(NameClass::Definition(def)) if def != self.def => {
+ // if the def we are looking for is a trait (impl) assoc item, we'll have to resolve the items to trait definition assoc item
+ if !matches!(
+ self.trait_assoc_def,
+ Some(trait_assoc_def)
+ if convert_to_def_in_trait(self.sema.db, def) == trait_assoc_def
+ ) {
+ return false;
+ }
+ let FileRange { file_id, range } = self.sema.original_range(name.syntax());
+ let reference = FileReference {
+ range,
+ name: ast::NameLike::Name(name.clone()),
+ category: None,
+ };
+ sink(file_id, reference)
+ }
+ _ => false,
+ }
+ }
+}
+
+fn def_to_ty(sema: &Semantics<'_, RootDatabase>, def: &Definition) -> Option<hir::Type> {
+ match def {
+ Definition::Adt(adt) => Some(adt.ty(sema.db)),
+ Definition::TypeAlias(it) => Some(it.ty(sema.db)),
+ Definition::BuiltinType(it) => Some(it.ty(sema.db)),
+ Definition::SelfType(it) => Some(it.self_ty(sema.db)),
+ _ => None,
+ }
+}
+
+impl ReferenceCategory {
+ fn new(def: &Definition, r: &ast::NameRef) -> Option<ReferenceCategory> {
+ // Only Locals and Fields have accesses for now.
+ if !matches!(def, Definition::Local(_) | Definition::Field(_)) {
+ return None;
+ }
+
+ let mode = r.syntax().ancestors().find_map(|node| {
+ match_ast! {
+ match node {
+ ast::BinExpr(expr) => {
+ if matches!(expr.op_kind()?, ast::BinaryOp::Assignment { .. }) {
+ // If the variable or field ends on the LHS's end then it's a Write (covers fields and locals).
+ // FIXME: This is not terribly accurate.
+ if let Some(lhs) = expr.lhs() {
+ if lhs.syntax().text_range().end() == r.syntax().text_range().end() {
+ return Some(ReferenceCategory::Write);
+ }
+ }
+ }
+ Some(ReferenceCategory::Read)
+ },
+ _ => None
+ }
+ }
+ });
+
+ // Default Locals and Fields to read
+ mode.or(Some(ReferenceCategory::Read))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
new file mode 100644
index 000000000..8132c73ef
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
@@ -0,0 +1,99 @@
+//! This modules defines type to represent changes to the source code, that flow
+//! from the server to the client.
+//!
+//! It can be viewed as a dual for `Change`.
+
+use std::{collections::hash_map::Entry, iter};
+
+use base_db::{AnchoredPathBuf, FileId};
+use rustc_hash::FxHashMap;
+use stdx::never;
+use text_edit::TextEdit;
+
+#[derive(Default, Debug, Clone)]
+pub struct SourceChange {
+ pub source_file_edits: FxHashMap<FileId, TextEdit>,
+ pub file_system_edits: Vec<FileSystemEdit>,
+ pub is_snippet: bool,
+}
+
+impl SourceChange {
+ /// Creates a new SourceChange with the given label
+ /// from the edits.
+ pub fn from_edits(
+ source_file_edits: FxHashMap<FileId, TextEdit>,
+ file_system_edits: Vec<FileSystemEdit>,
+ ) -> Self {
+ SourceChange { source_file_edits, file_system_edits, is_snippet: false }
+ }
+
+ pub fn from_text_edit(file_id: FileId, edit: TextEdit) -> Self {
+ SourceChange {
+ source_file_edits: iter::once((file_id, edit)).collect(),
+ ..Default::default()
+ }
+ }
+
+ /// Inserts a [`TextEdit`] for the given [`FileId`]. This properly handles merging existing
+ /// edits for a file if some already exist.
+ pub fn insert_source_edit(&mut self, file_id: FileId, edit: TextEdit) {
+ match self.source_file_edits.entry(file_id) {
+ Entry::Occupied(mut entry) => {
+ never!(entry.get_mut().union(edit).is_err(), "overlapping edits for same file");
+ }
+ Entry::Vacant(entry) => {
+ entry.insert(edit);
+ }
+ }
+ }
+
+ pub fn push_file_system_edit(&mut self, edit: FileSystemEdit) {
+ self.file_system_edits.push(edit);
+ }
+
+ pub fn get_source_edit(&self, file_id: FileId) -> Option<&TextEdit> {
+ self.source_file_edits.get(&file_id)
+ }
+
+ pub fn merge(mut self, other: SourceChange) -> SourceChange {
+ self.extend(other.source_file_edits);
+ self.extend(other.file_system_edits);
+ self.is_snippet |= other.is_snippet;
+ self
+ }
+}
+
+impl Extend<(FileId, TextEdit)> for SourceChange {
+ fn extend<T: IntoIterator<Item = (FileId, TextEdit)>>(&mut self, iter: T) {
+ iter.into_iter().for_each(|(file_id, edit)| self.insert_source_edit(file_id, edit));
+ }
+}
+
+impl Extend<FileSystemEdit> for SourceChange {
+ fn extend<T: IntoIterator<Item = FileSystemEdit>>(&mut self, iter: T) {
+ iter.into_iter().for_each(|edit| self.push_file_system_edit(edit));
+ }
+}
+
+impl From<FxHashMap<FileId, TextEdit>> for SourceChange {
+ fn from(source_file_edits: FxHashMap<FileId, TextEdit>) -> SourceChange {
+ SourceChange { source_file_edits, file_system_edits: Vec::new(), is_snippet: false }
+ }
+}
+
+#[derive(Debug, Clone)]
+pub enum FileSystemEdit {
+ CreateFile { dst: AnchoredPathBuf, initial_contents: String },
+ MoveFile { src: FileId, dst: AnchoredPathBuf },
+ MoveDir { src: AnchoredPathBuf, src_id: FileId, dst: AnchoredPathBuf },
+}
+
+impl From<FileSystemEdit> for SourceChange {
+ fn from(edit: FileSystemEdit) -> SourceChange {
+ SourceChange {
+ source_file_edits: Default::default(),
+ file_system_edits: vec![edit],
+ is_snippet: false,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
new file mode 100644
index 000000000..bfb003127
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
@@ -0,0 +1,429 @@
+//! This module handles fuzzy-searching of functions, structs and other symbols
+//! by name across the whole workspace and dependencies.
+//!
+//! It works by building an incrementally-updated text-search index of all
+//! symbols. The backbone of the index is the **awesome** `fst` crate by
+//! @BurntSushi.
+//!
+//! In a nutshell, you give a set of strings to `fst`, and it builds a
+//! finite state machine describing this set of strings. The strings which
+//! could fuzzy-match a pattern can also be described by a finite state machine.
+//! What is freaking cool is that you can now traverse both state machines in
+//! lock-step to enumerate the strings which are both in the input set and
+//! fuzz-match the query. Or, more formally, given two languages described by
+//! FSTs, one can build a product FST which describes the intersection of the
+//! languages.
+//!
+//! `fst` does not support cheap updating of the index, but it supports unioning
+//! of state machines. So, to account for changing source code, we build an FST
+//! for each library (which is assumed to never change) and an FST for each Rust
+//! file in the current workspace, and run a query against the union of all
+//! those FSTs.
+
+use std::{
+ cmp::Ordering,
+ fmt,
+ hash::{Hash, Hasher},
+ mem,
+ sync::Arc,
+};
+
+use base_db::{
+ salsa::{self, ParallelDatabase},
+ SourceDatabaseExt, SourceRootId, Upcast,
+};
+use fst::{self, Streamer};
+use hir::{
+ db::HirDatabase,
+ symbols::{FileSymbol, SymbolCollector},
+ Crate, Module,
+};
+use rayon::prelude::*;
+use rustc_hash::FxHashSet;
+
+use crate::RootDatabase;
+
+#[derive(Debug)]
+pub struct Query {
+ query: String,
+ lowercased: String,
+ only_types: bool,
+ libs: bool,
+ exact: bool,
+ case_sensitive: bool,
+ limit: usize,
+}
+
+impl Query {
+ pub fn new(query: String) -> Query {
+ let lowercased = query.to_lowercase();
+ Query {
+ query,
+ lowercased,
+ only_types: false,
+ libs: false,
+ exact: false,
+ case_sensitive: false,
+ limit: usize::max_value(),
+ }
+ }
+
+ pub fn only_types(&mut self) {
+ self.only_types = true;
+ }
+
+ pub fn libs(&mut self) {
+ self.libs = true;
+ }
+
+ pub fn exact(&mut self) {
+ self.exact = true;
+ }
+
+ pub fn case_sensitive(&mut self) {
+ self.case_sensitive = true;
+ }
+
+ pub fn limit(&mut self, limit: usize) {
+ self.limit = limit
+ }
+}
+
+#[salsa::query_group(SymbolsDatabaseStorage)]
+pub trait SymbolsDatabase: HirDatabase + SourceDatabaseExt + Upcast<dyn HirDatabase> {
+ /// The symbol index for a given module. These modules should only be in source roots that
+ /// are inside local_roots.
+ fn module_symbols(&self, module: Module) -> Arc<SymbolIndex>;
+
+ /// The symbol index for a given source root within library_roots.
+ fn library_symbols(&self, source_root_id: SourceRootId) -> Arc<SymbolIndex>;
+
+ /// The set of "local" (that is, from the current workspace) roots.
+ /// Files in local roots are assumed to change frequently.
+ #[salsa::input]
+ fn local_roots(&self) -> Arc<FxHashSet<SourceRootId>>;
+
+ /// The set of roots for crates.io libraries.
+ /// Files in libraries are assumed to never change.
+ #[salsa::input]
+ fn library_roots(&self) -> Arc<FxHashSet<SourceRootId>>;
+}
+
+fn library_symbols(db: &dyn SymbolsDatabase, source_root_id: SourceRootId) -> Arc<SymbolIndex> {
+ let _p = profile::span("library_symbols");
+
+ // todo: this could be parallelized, once I figure out how to do that...
+ let symbols = db
+ .source_root_crates(source_root_id)
+ .iter()
+ .flat_map(|&krate| Crate::from(krate).modules(db.upcast()))
+ // we specifically avoid calling SymbolsDatabase::module_symbols here, even they do the same thing,
+ // as the index for a library is not going to really ever change, and we do not want to store each
+ // module's index in salsa.
+ .flat_map(|module| SymbolCollector::collect(db.upcast(), module))
+ .collect();
+
+ Arc::new(SymbolIndex::new(symbols))
+}
+
+fn module_symbols(db: &dyn SymbolsDatabase, module: Module) -> Arc<SymbolIndex> {
+ let _p = profile::span("module_symbols");
+ let symbols = SymbolCollector::collect(db.upcast(), module);
+ Arc::new(SymbolIndex::new(symbols))
+}
+
+/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
+struct Snap<DB>(DB);
+impl<DB: ParallelDatabase> Snap<salsa::Snapshot<DB>> {
+ fn new(db: &DB) -> Self {
+ Self(db.snapshot())
+ }
+}
+impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> {
+ fn clone(&self) -> Snap<salsa::Snapshot<DB>> {
+ Snap(self.0.snapshot())
+ }
+}
+impl<DB> std::ops::Deref for Snap<DB> {
+ type Target = DB;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+// Feature: Workspace Symbol
+//
+// Uses fuzzy-search to find types, modules and functions by name across your
+// project and dependencies. This is **the** most useful feature, which improves code
+// navigation tremendously. It mostly works on top of the built-in LSP
+// functionality, however `#` and `*` symbols can be used to narrow down the
+// search. Specifically,
+//
+// - `Foo` searches for `Foo` type in the current workspace
+// - `foo#` searches for `foo` function in the current workspace
+// - `Foo*` searches for `Foo` type among dependencies, including `stdlib`
+// - `foo#*` searches for `foo` function among dependencies
+//
+// That is, `#` switches from "types" to all symbols, `*` switches from the current
+// workspace to dependencies.
+//
+// Note that filtering does not currently work in VSCode due to the editor never
+// sending the special symbols to the language server. Instead, you can configure
+// the filtering via the `rust-analyzer.workspace.symbol.search.scope` and
+// `rust-analyzer.workspace.symbol.search.kind` settings.
+//
+// |===
+// | Editor | Shortcut
+//
+// | VS Code | kbd:[Ctrl+T]
+// |===
+pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> {
+ let _p = profile::span("world_symbols").detail(|| query.query.clone());
+
+ let indices: Vec<_> = if query.libs {
+ db.library_roots()
+ .par_iter()
+ .map_with(Snap::new(db), |snap, &root| snap.library_symbols(root))
+ .collect()
+ } else {
+ let mut modules = Vec::new();
+
+ for &root in db.local_roots().iter() {
+ let crates = db.source_root_crates(root);
+ for &krate in crates.iter() {
+ modules.extend(Crate::from(krate).modules(db));
+ }
+ }
+
+ modules
+ .par_iter()
+ .map_with(Snap::new(db), |snap, &module| snap.module_symbols(module))
+ .collect()
+ };
+
+ query.search(&indices)
+}
+
+pub fn crate_symbols(db: &RootDatabase, krate: Crate, query: Query) -> Vec<FileSymbol> {
+ let _p = profile::span("crate_symbols").detail(|| format!("{:?}", query));
+
+ let modules = krate.modules(db);
+ let indices: Vec<_> = modules
+ .par_iter()
+ .map_with(Snap::new(db), |snap, &module| snap.module_symbols(module))
+ .collect();
+
+ query.search(&indices)
+}
+
+#[derive(Default)]
+pub struct SymbolIndex {
+ symbols: Vec<FileSymbol>,
+ map: fst::Map<Vec<u8>>,
+}
+
+impl fmt::Debug for SymbolIndex {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("SymbolIndex").field("n_symbols", &self.symbols.len()).finish()
+ }
+}
+
+impl PartialEq for SymbolIndex {
+ fn eq(&self, other: &SymbolIndex) -> bool {
+ self.symbols == other.symbols
+ }
+}
+
+impl Eq for SymbolIndex {}
+
+impl Hash for SymbolIndex {
+ fn hash<H: Hasher>(&self, hasher: &mut H) {
+ self.symbols.hash(hasher)
+ }
+}
+
+impl SymbolIndex {
+ fn new(mut symbols: Vec<FileSymbol>) -> SymbolIndex {
+ fn cmp(lhs: &FileSymbol, rhs: &FileSymbol) -> Ordering {
+ let lhs_chars = lhs.name.chars().map(|c| c.to_ascii_lowercase());
+ let rhs_chars = rhs.name.chars().map(|c| c.to_ascii_lowercase());
+ lhs_chars.cmp(rhs_chars)
+ }
+
+ symbols.par_sort_by(cmp);
+
+ let mut builder = fst::MapBuilder::memory();
+
+ let mut last_batch_start = 0;
+
+ for idx in 0..symbols.len() {
+ if let Some(next_symbol) = symbols.get(idx + 1) {
+ if cmp(&symbols[last_batch_start], next_symbol) == Ordering::Equal {
+ continue;
+ }
+ }
+
+ let start = last_batch_start;
+ let end = idx + 1;
+ last_batch_start = end;
+
+ let key = symbols[start].name.as_str().to_ascii_lowercase();
+ let value = SymbolIndex::range_to_map_value(start, end);
+
+ builder.insert(key, value).unwrap();
+ }
+
+ let map = fst::Map::new(builder.into_inner().unwrap()).unwrap();
+ SymbolIndex { symbols, map }
+ }
+
+ pub fn len(&self) -> usize {
+ self.symbols.len()
+ }
+
+ pub fn memory_size(&self) -> usize {
+ self.map.as_fst().size() + self.symbols.len() * mem::size_of::<FileSymbol>()
+ }
+
+ fn range_to_map_value(start: usize, end: usize) -> u64 {
+ debug_assert![start <= (std::u32::MAX as usize)];
+ debug_assert![end <= (std::u32::MAX as usize)];
+
+ ((start as u64) << 32) | end as u64
+ }
+
+ fn map_value_to_range(value: u64) -> (usize, usize) {
+ let end = value as u32 as usize;
+ let start = (value >> 32) as usize;
+ (start, end)
+ }
+}
+
+impl Query {
+ pub(crate) fn search(self, indices: &[Arc<SymbolIndex>]) -> Vec<FileSymbol> {
+ let _p = profile::span("symbol_index::Query::search");
+ let mut op = fst::map::OpBuilder::new();
+ for file_symbols in indices.iter() {
+ let automaton = fst::automaton::Subsequence::new(&self.lowercased);
+ op = op.add(file_symbols.map.search(automaton))
+ }
+ let mut stream = op.union();
+ let mut res = Vec::new();
+ while let Some((_, indexed_values)) = stream.next() {
+ for indexed_value in indexed_values {
+ let symbol_index = &indices[indexed_value.index];
+ let (start, end) = SymbolIndex::map_value_to_range(indexed_value.value);
+
+ for symbol in &symbol_index.symbols[start..end] {
+ if self.only_types && !symbol.kind.is_type() {
+ continue;
+ }
+ if self.exact {
+ if symbol.name != self.query {
+ continue;
+ }
+ } else if self.case_sensitive {
+ if self.query.chars().any(|c| !symbol.name.contains(c)) {
+ continue;
+ }
+ }
+
+ res.push(symbol.clone());
+ if res.len() >= self.limit {
+ return res;
+ }
+ }
+ }
+ }
+ res
+ }
+}
+
+#[cfg(test)]
+mod tests {
+
+ use base_db::fixture::WithFixture;
+ use expect_test::expect_file;
+ use hir::symbols::SymbolCollector;
+
+ use super::*;
+
+ #[test]
+ fn test_symbol_index_collection() {
+ let (db, _) = RootDatabase::with_many_files(
+ r#"
+//- /main.rs
+
+macro_rules! macro_rules_macro {
+ () => {}
+};
+
+macro_rules! define_struct {
+ () => {
+ struct StructFromMacro;
+ }
+};
+
+define_struct!();
+
+macro Macro { }
+
+struct Struct;
+enum Enum {
+ A, B
+}
+union Union {}
+
+impl Struct {
+ fn impl_fn() {}
+}
+
+trait Trait {
+ fn trait_fn(&self);
+}
+
+fn main() {
+ struct StructInFn;
+}
+
+const CONST: u32 = 1;
+static STATIC: &'static str = "2";
+type Alias = Struct;
+
+mod a_mod {
+ struct StructInModA;
+}
+
+const _: () = {
+ struct StructInUnnamedConst;
+
+ ()
+};
+
+const CONST_WITH_INNER: () = {
+ struct StructInNamedConst;
+
+ ()
+};
+
+mod b_mod;
+
+//- /b_mod.rs
+struct StructInModB;
+ "#,
+ );
+
+ let symbols: Vec<_> = Crate::from(db.test_crate())
+ .modules(&db)
+ .into_iter()
+ .map(|module_id| {
+ let mut symbols = SymbolCollector::collect(&db, module_id);
+ symbols.sort_by_key(|it| it.name.clone());
+ (module_id, symbols)
+ })
+ .collect();
+
+ expect_file!["./test_data/test_symbol_index_collection.txt"].assert_debug_eq(&symbols);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs
new file mode 100644
index 000000000..f48a57008
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs
@@ -0,0 +1,308 @@
+//! Tools to work with format string literals for the `format_args!` family of macros.
+use syntax::{
+ ast::{self, IsString},
+ AstNode, AstToken, TextRange, TextSize,
+};
+
+pub fn is_format_string(string: &ast::String) -> bool {
+ // Check if `string` is a format string argument of a macro invocation.
+ // `string` is a string literal, mapped down into the innermost macro expansion.
+ // Since `format_args!` etc. remove the format string when expanding, but place all arguments
+ // in the expanded output, we know that the string token is (part of) the format string if it
+ // appears in `format_args!` (otherwise it would have been mapped down further).
+ //
+ // This setup lets us correctly highlight the components of `concat!("{}", "bla")` format
+ // strings. It still fails for `concat!("{", "}")`, but that is rare.
+ (|| {
+ let macro_call = string.syntax().parent_ancestors().find_map(ast::MacroCall::cast)?;
+ let name = macro_call.path()?.segment()?.name_ref()?;
+
+ if !matches!(
+ name.text().as_str(),
+ "format_args" | "format_args_nl" | "const_format_args" | "panic_2015" | "panic_2021"
+ ) {
+ return None;
+ }
+
+ // NB: we match against `panic_2015`/`panic_2021` here because they have a special-cased arm for
+ // `"{}"`, which otherwise wouldn't get highlighted.
+
+ Some(())
+ })()
+ .is_some()
+}
+
+#[derive(Debug)]
+pub enum FormatSpecifier {
+ Open,
+ Close,
+ Integer,
+ Identifier,
+ Colon,
+ Fill,
+ Align,
+ Sign,
+ NumberSign,
+ Zero,
+ DollarSign,
+ Dot,
+ Asterisk,
+ QuestionMark,
+ Escape,
+}
+
+pub fn lex_format_specifiers(
+ string: &ast::String,
+ mut callback: &mut dyn FnMut(TextRange, FormatSpecifier),
+) {
+ let mut char_ranges = Vec::new();
+ string.escaped_char_ranges(&mut |range, res| char_ranges.push((range, res)));
+ let mut chars = char_ranges
+ .iter()
+ .filter_map(|(range, res)| Some((*range, *res.as_ref().ok()?)))
+ .peekable();
+
+ while let Some((range, first_char)) = chars.next() {
+ if let '{' = first_char {
+ // Format specifier, see syntax at https://doc.rust-lang.org/std/fmt/index.html#syntax
+ if let Some((_, '{')) = chars.peek() {
+ // Escaped format specifier, `{{`
+ read_escaped_format_specifier(&mut chars, &mut callback);
+ continue;
+ }
+
+ callback(range, FormatSpecifier::Open);
+
+ // check for integer/identifier
+ let (_, int_char) = chars.peek().copied().unwrap_or_default();
+ match int_char {
+ // integer
+ '0'..='9' => read_integer(&mut chars, &mut callback),
+ // identifier
+ c if c == '_' || c.is_alphabetic() => read_identifier(&mut chars, &mut callback),
+ _ => {}
+ }
+
+ if let Some((_, ':')) = chars.peek() {
+ skip_char_and_emit(&mut chars, FormatSpecifier::Colon, &mut callback);
+
+ // check for fill/align
+ let mut cloned = chars.clone().take(2);
+ let (_, first) = cloned.next().unwrap_or_default();
+ let (_, second) = cloned.next().unwrap_or_default();
+ match second {
+ '<' | '^' | '>' => {
+ // alignment specifier, first char specifies fillment
+ skip_char_and_emit(&mut chars, FormatSpecifier::Fill, &mut callback);
+ skip_char_and_emit(&mut chars, FormatSpecifier::Align, &mut callback);
+ }
+ _ => {
+ if let '<' | '^' | '>' = first {
+ skip_char_and_emit(&mut chars, FormatSpecifier::Align, &mut callback);
+ }
+ }
+ }
+
+ // check for sign
+ match chars.peek().copied().unwrap_or_default().1 {
+ '+' | '-' => {
+ skip_char_and_emit(&mut chars, FormatSpecifier::Sign, &mut callback);
+ }
+ _ => {}
+ }
+
+ // check for `#`
+ if let Some((_, '#')) = chars.peek() {
+ skip_char_and_emit(&mut chars, FormatSpecifier::NumberSign, &mut callback);
+ }
+
+ // check for `0`
+ let mut cloned = chars.clone().take(2);
+ let first = cloned.next().map(|next| next.1);
+ let second = cloned.next().map(|next| next.1);
+
+ if first == Some('0') && second != Some('$') {
+ skip_char_and_emit(&mut chars, FormatSpecifier::Zero, &mut callback);
+ }
+
+ // width
+ match chars.peek().copied().unwrap_or_default().1 {
+ '0'..='9' => {
+ read_integer(&mut chars, &mut callback);
+ if let Some((_, '$')) = chars.peek() {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::DollarSign,
+ &mut callback,
+ );
+ }
+ }
+ c if c == '_' || c.is_alphabetic() => {
+ read_identifier(&mut chars, &mut callback);
+
+ if chars.peek().map(|&(_, c)| c) == Some('?') {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::QuestionMark,
+ &mut callback,
+ );
+ }
+
+ // can be either width (indicated by dollar sign, or type in which case
+ // the next sign has to be `}`)
+ let next = chars.peek().map(|&(_, c)| c);
+
+ match next {
+ Some('$') => skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::DollarSign,
+ &mut callback,
+ ),
+ Some('}') => {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::Close,
+ &mut callback,
+ );
+ continue;
+ }
+ _ => continue,
+ };
+ }
+ _ => {}
+ }
+
+ // precision
+ if let Some((_, '.')) = chars.peek() {
+ skip_char_and_emit(&mut chars, FormatSpecifier::Dot, &mut callback);
+
+ match chars.peek().copied().unwrap_or_default().1 {
+ '*' => {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::Asterisk,
+ &mut callback,
+ );
+ }
+ '0'..='9' => {
+ read_integer(&mut chars, &mut callback);
+ if let Some((_, '$')) = chars.peek() {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::DollarSign,
+ &mut callback,
+ );
+ }
+ }
+ c if c == '_' || c.is_alphabetic() => {
+ read_identifier(&mut chars, &mut callback);
+ if chars.peek().map(|&(_, c)| c) != Some('$') {
+ continue;
+ }
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::DollarSign,
+ &mut callback,
+ );
+ }
+ _ => {
+ continue;
+ }
+ }
+ }
+
+ // type
+ match chars.peek().copied().unwrap_or_default().1 {
+ '?' => {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::QuestionMark,
+ &mut callback,
+ );
+ }
+ c if c == '_' || c.is_alphabetic() => {
+ read_identifier(&mut chars, &mut callback);
+
+ if chars.peek().map(|&(_, c)| c) == Some('?') {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::QuestionMark,
+ &mut callback,
+ );
+ }
+ }
+ _ => {}
+ }
+ }
+
+ if let Some((_, '}')) = chars.peek() {
+ skip_char_and_emit(&mut chars, FormatSpecifier::Close, &mut callback);
+ }
+ continue;
+ } else if let '}' = first_char {
+ if let Some((_, '}')) = chars.peek() {
+ // Escaped format specifier, `}}`
+ read_escaped_format_specifier(&mut chars, &mut callback);
+ }
+ }
+ }
+
+ fn skip_char_and_emit<I, F>(
+ chars: &mut std::iter::Peekable<I>,
+ emit: FormatSpecifier,
+ callback: &mut F,
+ ) where
+ I: Iterator<Item = (TextRange, char)>,
+ F: FnMut(TextRange, FormatSpecifier),
+ {
+ let (range, _) = chars.next().unwrap();
+ callback(range, emit);
+ }
+
+ fn read_integer<I, F>(chars: &mut std::iter::Peekable<I>, callback: &mut F)
+ where
+ I: Iterator<Item = (TextRange, char)>,
+ F: FnMut(TextRange, FormatSpecifier),
+ {
+ let (mut range, c) = chars.next().unwrap();
+ assert!(c.is_ascii_digit());
+ while let Some(&(r, next_char)) = chars.peek() {
+ if next_char.is_ascii_digit() {
+ chars.next();
+ range = range.cover(r);
+ } else {
+ break;
+ }
+ }
+ callback(range, FormatSpecifier::Integer);
+ }
+
+ fn read_identifier<I, F>(chars: &mut std::iter::Peekable<I>, callback: &mut F)
+ where
+ I: Iterator<Item = (TextRange, char)>,
+ F: FnMut(TextRange, FormatSpecifier),
+ {
+ let (mut range, c) = chars.next().unwrap();
+ assert!(c.is_alphabetic() || c == '_');
+ while let Some(&(r, next_char)) = chars.peek() {
+ if next_char == '_' || next_char.is_ascii_digit() || next_char.is_alphabetic() {
+ chars.next();
+ range = range.cover(r);
+ } else {
+ break;
+ }
+ }
+ callback(range, FormatSpecifier::Identifier);
+ }
+
+ fn read_escaped_format_specifier<I, F>(chars: &mut std::iter::Peekable<I>, callback: &mut F)
+ where
+ I: Iterator<Item = (TextRange, char)>,
+ F: FnMut(TextRange, FormatSpecifier),
+ {
+ let (range, _) = chars.peek().unwrap();
+ let offset = TextSize::from(1);
+ callback(TextRange::new(range.start() - offset, range.end()), FormatSpecifier::Escape);
+ chars.next();
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs
new file mode 100644
index 000000000..f54ae6c92
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs
@@ -0,0 +1,136 @@
+//! Utilities for formatting macro expanded nodes until we get a proper formatter.
+use syntax::{
+ ast::make,
+ ted::{self, Position},
+ NodeOrToken,
+ SyntaxKind::{self, *},
+ SyntaxNode, SyntaxToken, WalkEvent, T,
+};
+
+// FIXME: It would also be cool to share logic here and in the mbe tests,
+// which are pretty unreadable at the moment.
+/// Renders a [`SyntaxNode`] with whitespace inserted between tokens that require them.
+pub fn insert_ws_into(syn: SyntaxNode) -> SyntaxNode {
+ let mut indent = 0;
+ let mut last: Option<SyntaxKind> = None;
+ let mut mods = Vec::new();
+ let syn = syn.clone_subtree().clone_for_update();
+
+ let before = Position::before;
+ let after = Position::after;
+
+ let do_indent = |pos: fn(_) -> Position, token: &SyntaxToken, indent| {
+ (pos(token.clone()), make::tokens::whitespace(&" ".repeat(2 * indent)))
+ };
+ let do_ws = |pos: fn(_) -> Position, token: &SyntaxToken| {
+ (pos(token.clone()), make::tokens::single_space())
+ };
+ let do_nl = |pos: fn(_) -> Position, token: &SyntaxToken| {
+ (pos(token.clone()), make::tokens::single_newline())
+ };
+
+ for event in syn.preorder_with_tokens() {
+ let token = match event {
+ WalkEvent::Enter(NodeOrToken::Token(token)) => token,
+ WalkEvent::Leave(NodeOrToken::Node(node))
+ if matches!(
+ node.kind(),
+ ATTR | MATCH_ARM | STRUCT | ENUM | UNION | FN | IMPL | MACRO_RULES
+ ) =>
+ {
+ if indent > 0 {
+ mods.push((
+ Position::after(node.clone()),
+ make::tokens::whitespace(&" ".repeat(2 * indent)),
+ ));
+ }
+ if node.parent().is_some() {
+ mods.push((Position::after(node), make::tokens::single_newline()));
+ }
+ continue;
+ }
+ _ => continue,
+ };
+ let tok = &token;
+
+ let is_next = |f: fn(SyntaxKind) -> bool, default| -> bool {
+ tok.next_token().map(|it| f(it.kind())).unwrap_or(default)
+ };
+ let is_last =
+ |f: fn(SyntaxKind) -> bool, default| -> bool { last.map(f).unwrap_or(default) };
+
+ match tok.kind() {
+ k if is_text(k) && is_next(|it| !it.is_punct() || it == UNDERSCORE, false) => {
+ mods.push(do_ws(after, tok));
+ }
+ L_CURLY if is_next(|it| it != R_CURLY, true) => {
+ indent += 1;
+ if is_last(is_text, false) {
+ mods.push(do_ws(before, tok));
+ }
+
+ mods.push(do_indent(after, tok, indent));
+ mods.push(do_nl(after, tok));
+ }
+ R_CURLY if is_last(|it| it != L_CURLY, true) => {
+ indent = indent.saturating_sub(1);
+
+ if indent > 0 {
+ mods.push(do_indent(before, tok, indent));
+ }
+ mods.push(do_nl(before, tok));
+ }
+ R_CURLY => {
+ if indent > 0 {
+ mods.push(do_indent(after, tok, indent));
+ }
+ mods.push(do_nl(after, tok));
+ }
+ LIFETIME_IDENT if is_next(is_text, true) => {
+ mods.push(do_ws(after, tok));
+ }
+ MUT_KW if is_next(|it| it == SELF_KW, false) => {
+ mods.push(do_ws(after, tok));
+ }
+ AS_KW | DYN_KW | IMPL_KW | CONST_KW => {
+ mods.push(do_ws(after, tok));
+ }
+ T![;] => {
+ if indent > 0 {
+ mods.push(do_indent(after, tok, indent));
+ }
+ mods.push(do_nl(after, tok));
+ }
+ T![=] if is_next(|it| it == T![>], false) => {
+ // FIXME: this branch is for `=>` in macro_rules!, which is currently parsed as
+ // two separate symbols.
+ mods.push(do_ws(before, tok));
+ mods.push(do_ws(after, &tok.next_token().unwrap()));
+ }
+ T![->] | T![=] | T![=>] => {
+ mods.push(do_ws(before, tok));
+ mods.push(do_ws(after, tok));
+ }
+ T![!] if is_last(|it| it == MACRO_RULES_KW, false) && is_next(is_text, false) => {
+ mods.push(do_ws(after, tok));
+ }
+ _ => (),
+ }
+
+ last = Some(tok.kind());
+ }
+
+ for (pos, insert) in mods {
+ ted::insert(pos, insert);
+ }
+
+ if let Some(it) = syn.last_token().filter(|it| it.kind() == SyntaxKind::WHITESPACE) {
+ ted::remove(it);
+ }
+
+ syn
+}
+
+fn is_text(k: SyntaxKind) -> bool {
+ k.is_keyword() || k.is_literal() || k == IDENT || k == UNDERSCORE
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs
new file mode 100644
index 000000000..84bde4d44
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs
@@ -0,0 +1,460 @@
+//! Various helper functions to work with SyntaxNodes.
+use itertools::Itertools;
+use parser::T;
+use syntax::{
+ ast::{self, HasLoopBody, PathSegmentKind, VisibilityKind},
+ AstNode, Preorder, RustLanguage, WalkEvent,
+};
+
+pub fn expr_as_name_ref(expr: &ast::Expr) -> Option<ast::NameRef> {
+ if let ast::Expr::PathExpr(expr) = expr {
+ let path = expr.path()?;
+ path.as_single_name_ref()
+ } else {
+ None
+ }
+}
+
+pub fn full_path_of_name_ref(name_ref: &ast::NameRef) -> Option<ast::Path> {
+ let mut ancestors = name_ref.syntax().ancestors();
+ let _ = ancestors.next()?; // skip self
+ let _ = ancestors.next().filter(|it| ast::PathSegment::can_cast(it.kind()))?; // skip self
+ ancestors.take_while(|it| ast::Path::can_cast(it.kind())).last().and_then(ast::Path::cast)
+}
+
+pub fn block_as_lone_tail(block: &ast::BlockExpr) -> Option<ast::Expr> {
+ block.statements().next().is_none().then(|| block.tail_expr()).flatten()
+}
+
+/// Preorder walk all the expression's child expressions.
+pub fn walk_expr(expr: &ast::Expr, cb: &mut dyn FnMut(ast::Expr)) {
+ preorder_expr(expr, &mut |ev| {
+ if let WalkEvent::Enter(expr) = ev {
+ cb(expr);
+ }
+ false
+ })
+}
+
+/// Preorder walk all the expression's child expressions preserving events.
+/// If the callback returns true on an [`WalkEvent::Enter`], the subtree of the expression will be skipped.
+/// Note that the subtree may already be skipped due to the context analysis this function does.
+pub fn preorder_expr(start: &ast::Expr, cb: &mut dyn FnMut(WalkEvent<ast::Expr>) -> bool) {
+ let mut preorder = start.syntax().preorder();
+ while let Some(event) = preorder.next() {
+ let node = match event {
+ WalkEvent::Enter(node) => node,
+ WalkEvent::Leave(node) => {
+ if let Some(expr) = ast::Expr::cast(node) {
+ cb(WalkEvent::Leave(expr));
+ }
+ continue;
+ }
+ };
+ if let Some(let_stmt) = node.parent().and_then(ast::LetStmt::cast) {
+ if Some(node.clone()) != let_stmt.initializer().map(|it| it.syntax().clone()) {
+ // skipping potential const pat expressions in let statements
+ preorder.skip_subtree();
+ continue;
+ }
+ }
+
+ match ast::Stmt::cast(node.clone()) {
+ // Don't skip subtree since we want to process the expression child next
+ Some(ast::Stmt::ExprStmt(_)) | Some(ast::Stmt::LetStmt(_)) => (),
+ // skip inner items which might have their own expressions
+ Some(ast::Stmt::Item(_)) => preorder.skip_subtree(),
+ None => {
+ // skip const args, those expressions are a different context
+ if ast::GenericArg::can_cast(node.kind()) {
+ preorder.skip_subtree();
+ } else if let Some(expr) = ast::Expr::cast(node) {
+ let is_different_context = match &expr {
+ ast::Expr::BlockExpr(block_expr) => {
+ matches!(
+ block_expr.modifier(),
+ Some(
+ ast::BlockModifier::Async(_)
+ | ast::BlockModifier::Try(_)
+ | ast::BlockModifier::Const(_)
+ )
+ )
+ }
+ ast::Expr::ClosureExpr(_) => true,
+ _ => false,
+ } && expr.syntax() != start.syntax();
+ let skip = cb(WalkEvent::Enter(expr));
+ if skip || is_different_context {
+ preorder.skip_subtree();
+ }
+ }
+ }
+ }
+ }
+}
+
+/// Preorder walk all the expression's child patterns.
+pub fn walk_patterns_in_expr(start: &ast::Expr, cb: &mut dyn FnMut(ast::Pat)) {
+ let mut preorder = start.syntax().preorder();
+ while let Some(event) = preorder.next() {
+ let node = match event {
+ WalkEvent::Enter(node) => node,
+ WalkEvent::Leave(_) => continue,
+ };
+ match ast::Stmt::cast(node.clone()) {
+ Some(ast::Stmt::LetStmt(l)) => {
+ if let Some(pat) = l.pat() {
+ walk_pat(&pat, cb);
+ }
+ if let Some(expr) = l.initializer() {
+ walk_patterns_in_expr(&expr, cb);
+ }
+ preorder.skip_subtree();
+ }
+ // Don't skip subtree since we want to process the expression child next
+ Some(ast::Stmt::ExprStmt(_)) => (),
+ // skip inner items which might have their own patterns
+ Some(ast::Stmt::Item(_)) => preorder.skip_subtree(),
+ None => {
+ // skip const args, those are a different context
+ if ast::GenericArg::can_cast(node.kind()) {
+ preorder.skip_subtree();
+ } else if let Some(expr) = ast::Expr::cast(node.clone()) {
+ let is_different_context = match &expr {
+ ast::Expr::BlockExpr(block_expr) => {
+ matches!(
+ block_expr.modifier(),
+ Some(
+ ast::BlockModifier::Async(_)
+ | ast::BlockModifier::Try(_)
+ | ast::BlockModifier::Const(_)
+ )
+ )
+ }
+ ast::Expr::ClosureExpr(_) => true,
+ _ => false,
+ } && expr.syntax() != start.syntax();
+ if is_different_context {
+ preorder.skip_subtree();
+ }
+ } else if let Some(pat) = ast::Pat::cast(node) {
+ preorder.skip_subtree();
+ walk_pat(&pat, cb);
+ }
+ }
+ }
+ }
+}
+
+/// Preorder walk all the pattern's sub patterns.
+pub fn walk_pat(pat: &ast::Pat, cb: &mut dyn FnMut(ast::Pat)) {
+ let mut preorder = pat.syntax().preorder();
+ while let Some(event) = preorder.next() {
+ let node = match event {
+ WalkEvent::Enter(node) => node,
+ WalkEvent::Leave(_) => continue,
+ };
+ let kind = node.kind();
+ match ast::Pat::cast(node) {
+ Some(pat @ ast::Pat::ConstBlockPat(_)) => {
+ preorder.skip_subtree();
+ cb(pat);
+ }
+ Some(pat) => {
+ cb(pat);
+ }
+ // skip const args
+ None if ast::GenericArg::can_cast(kind) => {
+ preorder.skip_subtree();
+ }
+ None => (),
+ }
+ }
+}
+
+/// Preorder walk all the type's sub types.
+pub fn walk_ty(ty: &ast::Type, cb: &mut dyn FnMut(ast::Type)) {
+ let mut preorder = ty.syntax().preorder();
+ while let Some(event) = preorder.next() {
+ let node = match event {
+ WalkEvent::Enter(node) => node,
+ WalkEvent::Leave(_) => continue,
+ };
+ let kind = node.kind();
+ match ast::Type::cast(node) {
+ Some(ty @ ast::Type::MacroType(_)) => {
+ preorder.skip_subtree();
+ cb(ty)
+ }
+ Some(ty) => {
+ cb(ty);
+ }
+ // skip const args
+ None if ast::ConstArg::can_cast(kind) => {
+ preorder.skip_subtree();
+ }
+ None => (),
+ }
+ }
+}
+
+pub fn vis_eq(this: &ast::Visibility, other: &ast::Visibility) -> bool {
+ match (this.kind(), other.kind()) {
+ (VisibilityKind::In(this), VisibilityKind::In(other)) => {
+ stdx::iter_eq_by(this.segments(), other.segments(), |lhs, rhs| {
+ lhs.kind().zip(rhs.kind()).map_or(false, |it| match it {
+ (PathSegmentKind::CrateKw, PathSegmentKind::CrateKw)
+ | (PathSegmentKind::SelfKw, PathSegmentKind::SelfKw)
+ | (PathSegmentKind::SuperKw, PathSegmentKind::SuperKw) => true,
+ (PathSegmentKind::Name(lhs), PathSegmentKind::Name(rhs)) => {
+ lhs.text() == rhs.text()
+ }
+ _ => false,
+ })
+ })
+ }
+ (VisibilityKind::PubSelf, VisibilityKind::PubSelf)
+ | (VisibilityKind::PubSuper, VisibilityKind::PubSuper)
+ | (VisibilityKind::PubCrate, VisibilityKind::PubCrate)
+ | (VisibilityKind::Pub, VisibilityKind::Pub) => true,
+ _ => false,
+ }
+}
+
+/// Returns the `let` only if there is exactly one (that is, `let pat = expr`
+/// or `((let pat = expr))`, but not `let pat = expr && expr` or `non_let_expr`).
+pub fn single_let(expr: ast::Expr) -> Option<ast::LetExpr> {
+ match expr {
+ ast::Expr::ParenExpr(expr) => expr.expr().and_then(single_let),
+ ast::Expr::LetExpr(expr) => Some(expr),
+ _ => None,
+ }
+}
+
+pub fn is_pattern_cond(expr: ast::Expr) -> bool {
+ match expr {
+ ast::Expr::BinExpr(expr)
+ if expr.op_kind() == Some(ast::BinaryOp::LogicOp(ast::LogicOp::And)) =>
+ {
+ expr.lhs()
+ .map(is_pattern_cond)
+ .or_else(|| expr.rhs().map(is_pattern_cond))
+ .unwrap_or(false)
+ }
+ ast::Expr::ParenExpr(expr) => expr.expr().map_or(false, is_pattern_cond),
+ ast::Expr::LetExpr(_) => true,
+ _ => false,
+ }
+}
+
+/// Calls `cb` on each expression inside `expr` that is at "tail position".
+/// Does not walk into `break` or `return` expressions.
+/// Note that modifying the tree while iterating it will cause undefined iteration which might
+/// potentially results in an out of bounds panic.
+pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) {
+ match expr {
+ ast::Expr::BlockExpr(b) => {
+ match b.modifier() {
+ Some(
+ ast::BlockModifier::Async(_)
+ | ast::BlockModifier::Try(_)
+ | ast::BlockModifier::Const(_),
+ ) => return cb(expr),
+
+ Some(ast::BlockModifier::Label(label)) => {
+ for_each_break_expr(Some(label), b.stmt_list(), &mut |b| {
+ cb(&ast::Expr::BreakExpr(b))
+ });
+ }
+ Some(ast::BlockModifier::Unsafe(_)) => (),
+ None => (),
+ }
+ if let Some(stmt_list) = b.stmt_list() {
+ if let Some(e) = stmt_list.tail_expr() {
+ for_each_tail_expr(&e, cb);
+ }
+ }
+ }
+ ast::Expr::IfExpr(if_) => {
+ let mut if_ = if_.clone();
+ loop {
+ if let Some(block) = if_.then_branch() {
+ for_each_tail_expr(&ast::Expr::BlockExpr(block), cb);
+ }
+ match if_.else_branch() {
+ Some(ast::ElseBranch::IfExpr(it)) => if_ = it,
+ Some(ast::ElseBranch::Block(block)) => {
+ for_each_tail_expr(&ast::Expr::BlockExpr(block), cb);
+ break;
+ }
+ None => break,
+ }
+ }
+ }
+ ast::Expr::LoopExpr(l) => {
+ for_each_break_expr(l.label(), l.loop_body().and_then(|it| it.stmt_list()), &mut |b| {
+ cb(&ast::Expr::BreakExpr(b))
+ })
+ }
+ ast::Expr::MatchExpr(m) => {
+ if let Some(arms) = m.match_arm_list() {
+ arms.arms().filter_map(|arm| arm.expr()).for_each(|e| for_each_tail_expr(&e, cb));
+ }
+ }
+ ast::Expr::ArrayExpr(_)
+ | ast::Expr::AwaitExpr(_)
+ | ast::Expr::BinExpr(_)
+ | ast::Expr::BoxExpr(_)
+ | ast::Expr::BreakExpr(_)
+ | ast::Expr::CallExpr(_)
+ | ast::Expr::CastExpr(_)
+ | ast::Expr::ClosureExpr(_)
+ | ast::Expr::ContinueExpr(_)
+ | ast::Expr::FieldExpr(_)
+ | ast::Expr::ForExpr(_)
+ | ast::Expr::IndexExpr(_)
+ | ast::Expr::Literal(_)
+ | ast::Expr::MacroExpr(_)
+ | ast::Expr::MacroStmts(_)
+ | ast::Expr::MethodCallExpr(_)
+ | ast::Expr::ParenExpr(_)
+ | ast::Expr::PathExpr(_)
+ | ast::Expr::PrefixExpr(_)
+ | ast::Expr::RangeExpr(_)
+ | ast::Expr::RecordExpr(_)
+ | ast::Expr::RefExpr(_)
+ | ast::Expr::ReturnExpr(_)
+ | ast::Expr::TryExpr(_)
+ | ast::Expr::TupleExpr(_)
+ | ast::Expr::WhileExpr(_)
+ | ast::Expr::LetExpr(_)
+ | ast::Expr::UnderscoreExpr(_)
+ | ast::Expr::YieldExpr(_) => cb(expr),
+ }
+}
+
+pub fn for_each_break_and_continue_expr(
+ label: Option<ast::Label>,
+ body: Option<ast::StmtList>,
+ cb: &mut dyn FnMut(ast::Expr),
+) {
+ let label = label.and_then(|lbl| lbl.lifetime());
+ if let Some(b) = body {
+ let tree_depth_iterator = TreeWithDepthIterator::new(b);
+ for (expr, depth) in tree_depth_iterator {
+ match expr {
+ ast::Expr::BreakExpr(b)
+ if (depth == 0 && b.lifetime().is_none())
+ || eq_label_lt(&label, &b.lifetime()) =>
+ {
+ cb(ast::Expr::BreakExpr(b));
+ }
+ ast::Expr::ContinueExpr(c)
+ if (depth == 0 && c.lifetime().is_none())
+ || eq_label_lt(&label, &c.lifetime()) =>
+ {
+ cb(ast::Expr::ContinueExpr(c));
+ }
+ _ => (),
+ }
+ }
+ }
+}
+
+fn for_each_break_expr(
+ label: Option<ast::Label>,
+ body: Option<ast::StmtList>,
+ cb: &mut dyn FnMut(ast::BreakExpr),
+) {
+ let label = label.and_then(|lbl| lbl.lifetime());
+ if let Some(b) = body {
+ let tree_depth_iterator = TreeWithDepthIterator::new(b);
+ for (expr, depth) in tree_depth_iterator {
+ match expr {
+ ast::Expr::BreakExpr(b)
+ if (depth == 0 && b.lifetime().is_none())
+ || eq_label_lt(&label, &b.lifetime()) =>
+ {
+ cb(b);
+ }
+ _ => (),
+ }
+ }
+ }
+}
+
+fn eq_label_lt(lt1: &Option<ast::Lifetime>, lt2: &Option<ast::Lifetime>) -> bool {
+ lt1.as_ref().zip(lt2.as_ref()).map_or(false, |(lt, lbl)| lt.text() == lbl.text())
+}
+
+struct TreeWithDepthIterator {
+ preorder: Preorder<RustLanguage>,
+ depth: u32,
+}
+
+impl TreeWithDepthIterator {
+ fn new(body: ast::StmtList) -> Self {
+ let preorder = body.syntax().preorder();
+ Self { preorder, depth: 0 }
+ }
+}
+
+impl Iterator for TreeWithDepthIterator {
+ type Item = (ast::Expr, u32);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ while let Some(event) = self.preorder.find_map(|ev| match ev {
+ WalkEvent::Enter(it) => ast::Expr::cast(it).map(WalkEvent::Enter),
+ WalkEvent::Leave(it) => ast::Expr::cast(it).map(WalkEvent::Leave),
+ }) {
+ match event {
+ WalkEvent::Enter(
+ ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_),
+ ) => {
+ self.depth += 1;
+ }
+ WalkEvent::Leave(
+ ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_),
+ ) => {
+ self.depth -= 1;
+ }
+ WalkEvent::Enter(ast::Expr::BlockExpr(e)) if e.label().is_some() => {
+ self.depth += 1;
+ }
+ WalkEvent::Leave(ast::Expr::BlockExpr(e)) if e.label().is_some() => {
+ self.depth -= 1;
+ }
+ WalkEvent::Enter(expr) => return Some((expr, self.depth)),
+ _ => (),
+ }
+ }
+ None
+ }
+}
+
+/// Parses the input token tree as comma separated plain paths.
+pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Path>> {
+ let r_paren = input.r_paren_token();
+ let tokens =
+ input.syntax().children_with_tokens().skip(1).map_while(|it| match it.into_token() {
+ // seeing a keyword means the attribute is unclosed so stop parsing here
+ Some(tok) if tok.kind().is_keyword() => None,
+ // don't include the right token tree parenthesis if it exists
+ tok @ Some(_) if tok == r_paren => None,
+ // only nodes that we can find are other TokenTrees, those are unexpected in this parse though
+ None => None,
+ Some(tok) => Some(tok),
+ });
+ let input_expressions = tokens.group_by(|tok| tok.kind() == T![,]);
+ let paths = input_expressions
+ .into_iter()
+ .filter_map(|(is_sep, group)| (!is_sep).then(|| group))
+ .filter_map(|mut tokens| {
+ syntax::hacks::parse_expr_from_str(&tokens.join("")).and_then(|expr| match expr {
+ ast::Expr::PathExpr(it) => it.path(),
+ _ => None,
+ })
+ })
+ .collect();
+ Some(paths)
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
new file mode 100644
index 000000000..2f531ca0c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
@@ -0,0 +1,533 @@
+[
+ (
+ Module {
+ id: ModuleId {
+ krate: CrateId(
+ 0,
+ ),
+ block: None,
+ local_id: Idx::<ModuleData>(0),
+ },
+ },
+ [
+ FileSymbol {
+ name: "Alias",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: TYPE_ALIAS,
+ range: 397..417,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 402..407,
+ },
+ },
+ kind: TypeAlias,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "CONST",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: CONST,
+ range: 340..361,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 346..351,
+ },
+ },
+ kind: Const,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "CONST_WITH_INNER",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: CONST,
+ range: 520..592,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 526..542,
+ },
+ },
+ kind: Const,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "Enum",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: ENUM,
+ range: 185..207,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 190..194,
+ },
+ },
+ kind: Enum,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "Macro",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: MACRO_DEF,
+ range: 153..168,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 159..164,
+ },
+ },
+ kind: Macro,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "STATIC",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: STATIC,
+ range: 362..396,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 369..375,
+ },
+ },
+ kind: Static,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "Struct",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: STRUCT,
+ range: 170..184,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 177..183,
+ },
+ },
+ kind: Struct,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "StructFromMacro",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ MacroFile(
+ MacroFile {
+ macro_call_id: MacroCallId(
+ 0,
+ ),
+ },
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: STRUCT,
+ range: 0..22,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 6..21,
+ },
+ },
+ kind: Struct,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "StructInFn",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: STRUCT,
+ range: 318..336,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 325..335,
+ },
+ },
+ kind: Struct,
+ container_name: Some(
+ "main",
+ ),
+ },
+ FileSymbol {
+ name: "StructInNamedConst",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: STRUCT,
+ range: 555..581,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 562..580,
+ },
+ },
+ kind: Struct,
+ container_name: Some(
+ "CONST_WITH_INNER",
+ ),
+ },
+ FileSymbol {
+ name: "StructInUnnamedConst",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: STRUCT,
+ range: 479..507,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 486..506,
+ },
+ },
+ kind: Struct,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "Trait",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: TRAIT,
+ range: 261..300,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 267..272,
+ },
+ },
+ kind: Trait,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "Union",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: UNION,
+ range: 208..222,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 214..219,
+ },
+ },
+ kind: Union,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "a_mod",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: MODULE,
+ range: 419..457,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 423..428,
+ },
+ },
+ kind: Module,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "b_mod",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: MODULE,
+ range: 594..604,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 598..603,
+ },
+ },
+ kind: Module,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "define_struct",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: MACRO_RULES,
+ range: 51..131,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 64..77,
+ },
+ },
+ kind: Macro,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "impl_fn",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: FN,
+ range: 242..257,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 245..252,
+ },
+ },
+ kind: Function,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "macro_rules_macro",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: MACRO_RULES,
+ range: 1..48,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 14..31,
+ },
+ },
+ kind: Macro,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "main",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: FN,
+ range: 302..338,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 305..309,
+ },
+ },
+ kind: Function,
+ container_name: None,
+ },
+ FileSymbol {
+ name: "trait_fn",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: FN,
+ range: 279..298,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 282..290,
+ },
+ },
+ kind: Function,
+ container_name: Some(
+ "Trait",
+ ),
+ },
+ ],
+ ),
+ (
+ Module {
+ id: ModuleId {
+ krate: CrateId(
+ 0,
+ ),
+ block: None,
+ local_id: Idx::<ModuleData>(1),
+ },
+ },
+ [
+ FileSymbol {
+ name: "StructInModA",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: STRUCT,
+ range: 435..455,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 442..454,
+ },
+ },
+ kind: Struct,
+ container_name: None,
+ },
+ ],
+ ),
+ (
+ Module {
+ id: ModuleId {
+ krate: CrateId(
+ 0,
+ ),
+ block: None,
+ local_id: Idx::<ModuleData>(2),
+ },
+ },
+ [
+ FileSymbol {
+ name: "StructInModB",
+ loc: DeclarationLocation {
+ hir_file_id: HirFileId(
+ FileId(
+ FileId(
+ 1,
+ ),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: STRUCT,
+ range: 0..20,
+ },
+ name_ptr: SyntaxNodePtr {
+ kind: NAME,
+ range: 7..19,
+ },
+ },
+ kind: Struct,
+ container_name: None,
+ },
+ ],
+ ),
+]
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/tests/sourcegen_lints.rs b/src/tools/rust-analyzer/crates/ide-db/src/tests/sourcegen_lints.rs
new file mode 100644
index 000000000..5042f6d81
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/tests/sourcegen_lints.rs
@@ -0,0 +1,284 @@
+//! Generates descriptors structure for unstable feature from Unstable Book
+use std::{borrow::Cow, fs, path::Path};
+
+use itertools::Itertools;
+use stdx::format_to;
+use test_utils::project_root;
+use xshell::{cmd, Shell};
+
+/// This clones rustc repo, and so is not worth to keep up-to-date. We update
+/// manually by un-ignoring the test from time to time.
+#[test]
+#[ignore]
+fn sourcegen_lint_completions() {
+ let sh = &Shell::new().unwrap();
+
+ let rust_repo = project_root().join("./target/rust");
+ if !rust_repo.exists() {
+ cmd!(sh, "git clone --depth=1 https://github.com/rust-lang/rust {rust_repo}")
+ .run()
+ .unwrap();
+ }
+
+ let mut contents = String::from(
+ r"
+#[derive(Clone)]
+pub struct Lint {
+ pub label: &'static str,
+ pub description: &'static str,
+}
+pub struct LintGroup {
+ pub lint: Lint,
+ pub children: &'static [&'static str],
+}
+",
+ );
+
+ generate_lint_descriptor(sh, &mut contents);
+ contents.push('\n');
+
+ generate_feature_descriptor(&mut contents, &rust_repo.join("src/doc/unstable-book/src"));
+ contents.push('\n');
+
+ let lints_json = project_root().join("./target/clippy_lints.json");
+ cmd!(
+ sh,
+ "curl https://rust-lang.github.io/rust-clippy/master/lints.json --output {lints_json}"
+ )
+ .run()
+ .unwrap();
+ generate_descriptor_clippy(&mut contents, &lints_json);
+
+ let contents = sourcegen::add_preamble("sourcegen_lints", sourcegen::reformat(contents));
+
+ let destination = project_root().join("crates/ide_db/src/generated/lints.rs");
+ sourcegen::ensure_file_contents(destination.as_path(), &contents);
+}
+
+fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
+ // FIXME: rustdoc currently requires an input file for -Whelp cc https://github.com/rust-lang/rust/pull/88831
+ let file = project_root().join(file!());
+ let stdout = cmd!(sh, "rustdoc -W help {file}").read().unwrap();
+ let start_lints = stdout.find("---- ------- -------").unwrap();
+ let start_lint_groups = stdout.find("---- ---------").unwrap();
+ let start_lints_rustdoc =
+ stdout.find("Lint checks provided by plugins loaded by this crate:").unwrap();
+ let start_lint_groups_rustdoc =
+ stdout.find("Lint groups provided by plugins loaded by this crate:").unwrap();
+
+ buf.push_str(r#"pub const DEFAULT_LINTS: &[Lint] = &["#);
+ buf.push('\n');
+
+ let lints = stdout[start_lints..].lines().skip(1).take_while(|l| !l.is_empty()).map(|line| {
+ let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
+ let (_default_level, description) = rest.trim().split_once(char::is_whitespace).unwrap();
+ (name.trim(), Cow::Borrowed(description.trim()), vec![])
+ });
+ let lint_groups =
+ stdout[start_lint_groups..].lines().skip(1).take_while(|l| !l.is_empty()).map(|line| {
+ let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
+ (
+ name.trim(),
+ format!("lint group for: {}", lints.trim()).into(),
+ lints
+ .split_ascii_whitespace()
+ .map(|s| s.trim().trim_matches(',').replace('-', "_"))
+ .collect(),
+ )
+ });
+
+ let lints = lints
+ .chain(lint_groups)
+ .sorted_by(|(ident, ..), (ident2, ..)| ident.cmp(ident2))
+ .collect::<Vec<_>>();
+ for (name, description, ..) in &lints {
+ push_lint_completion(buf, &name.replace('-', "_"), description);
+ }
+ buf.push_str("];\n");
+ buf.push_str(r#"pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &["#);
+ for (name, description, children) in &lints {
+ if !children.is_empty() {
+ // HACK: warnings is emitted with a general description, not with its members
+ if name == &"warnings" {
+ push_lint_group(buf, name, description, &Vec::new());
+ continue;
+ }
+ push_lint_group(buf, &name.replace('-', "_"), description, children);
+ }
+ }
+ buf.push('\n');
+ buf.push_str("];\n");
+
+ // rustdoc
+
+ buf.push('\n');
+ buf.push_str(r#"pub const RUSTDOC_LINTS: &[Lint] = &["#);
+ buf.push('\n');
+
+ let lints_rustdoc =
+ stdout[start_lints_rustdoc..].lines().skip(2).take_while(|l| !l.is_empty()).map(|line| {
+ let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
+ let (_default_level, description) =
+ rest.trim().split_once(char::is_whitespace).unwrap();
+ (name.trim(), Cow::Borrowed(description.trim()), vec![])
+ });
+ let lint_groups_rustdoc =
+ stdout[start_lint_groups_rustdoc..].lines().skip(2).take_while(|l| !l.is_empty()).map(
+ |line| {
+ let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
+ (
+ name.trim(),
+ format!("lint group for: {}", lints.trim()).into(),
+ lints
+ .split_ascii_whitespace()
+ .map(|s| s.trim().trim_matches(',').replace('-', "_"))
+ .collect(),
+ )
+ },
+ );
+
+ let lints_rustdoc = lints_rustdoc
+ .chain(lint_groups_rustdoc)
+ .sorted_by(|(ident, ..), (ident2, ..)| ident.cmp(ident2))
+ .collect::<Vec<_>>();
+
+ for (name, description, ..) in &lints_rustdoc {
+ push_lint_completion(buf, &name.replace('-', "_"), description)
+ }
+ buf.push_str("];\n");
+
+ buf.push_str(r#"pub const RUSTDOC_LINT_GROUPS: &[LintGroup] = &["#);
+ for (name, description, children) in &lints_rustdoc {
+ if !children.is_empty() {
+ push_lint_group(buf, &name.replace('-', "_"), description, children);
+ }
+ }
+ buf.push('\n');
+ buf.push_str("];\n");
+}
+
+fn generate_feature_descriptor(buf: &mut String, src_dir: &Path) {
+ let mut features = ["language-features", "library-features"]
+ .into_iter()
+ .flat_map(|it| sourcegen::list_files(&src_dir.join(it)))
+ .filter(|path| {
+ // Get all `.md ` files
+ path.extension().unwrap_or_default().to_str().unwrap_or_default() == "md"
+ })
+ .map(|path| {
+ let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace('-', "_");
+ let doc = fs::read_to_string(path).unwrap();
+ (feature_ident, doc)
+ })
+ .collect::<Vec<_>>();
+ features.sort_by(|(feature_ident, _), (feature_ident2, _)| feature_ident.cmp(feature_ident2));
+
+ buf.push_str(r#"pub const FEATURES: &[Lint] = &["#);
+ for (feature_ident, doc) in features.into_iter() {
+ push_lint_completion(buf, &feature_ident, &doc)
+ }
+ buf.push('\n');
+ buf.push_str("];\n");
+}
+
+#[derive(Default)]
+struct ClippyLint {
+ help: String,
+ id: String,
+}
+
+fn unescape(s: &str) -> String {
+ s.replace(r#"\""#, "").replace(r#"\n"#, "\n").replace(r#"\r"#, "")
+}
+
+fn generate_descriptor_clippy(buf: &mut String, path: &Path) {
+ let file_content = std::fs::read_to_string(path).unwrap();
+ let mut clippy_lints: Vec<ClippyLint> = Vec::new();
+ let mut clippy_groups: std::collections::BTreeMap<String, Vec<String>> = Default::default();
+
+ for line in file_content.lines().map(|line| line.trim()) {
+ if let Some(line) = line.strip_prefix(r#""id": ""#) {
+ let clippy_lint = ClippyLint {
+ id: line.strip_suffix(r#"","#).expect("should be suffixed by comma").into(),
+ help: String::new(),
+ };
+ clippy_lints.push(clippy_lint)
+ } else if let Some(line) = line.strip_prefix(r#""group": ""#) {
+ if let Some(group) = line.strip_suffix("\",") {
+ clippy_groups
+ .entry(group.to_owned())
+ .or_default()
+ .push(clippy_lints.last().unwrap().id.clone());
+ }
+ } else if let Some(line) = line.strip_prefix(r#""docs": ""#) {
+ let prefix_to_strip = r#" ### What it does"#;
+ let line = match line.strip_prefix(prefix_to_strip) {
+ Some(line) => line,
+ None => {
+ eprintln!("unexpected clippy prefix for {}", clippy_lints.last().unwrap().id);
+ continue;
+ }
+ };
+ // Only take the description, any more than this is a lot of additional data we would embed into the exe
+ // which seems unnecessary
+ let up_to = line.find(r#"###"#).expect("no second section found?");
+ let line = &line[..up_to];
+
+ let clippy_lint = clippy_lints.last_mut().expect("clippy lint must already exist");
+ clippy_lint.help = unescape(line).trim().to_string();
+ }
+ }
+ clippy_lints.sort_by(|lint, lint2| lint.id.cmp(&lint2.id));
+
+ buf.push_str(r#"pub const CLIPPY_LINTS: &[Lint] = &["#);
+ buf.push('\n');
+ for clippy_lint in clippy_lints.into_iter() {
+ let lint_ident = format!("clippy::{}", clippy_lint.id);
+ let doc = clippy_lint.help;
+ push_lint_completion(buf, &lint_ident, &doc);
+ }
+ buf.push_str("];\n");
+
+ buf.push_str(r#"pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &["#);
+ for (id, children) in clippy_groups {
+ let children = children.iter().map(|id| format!("clippy::{}", id)).collect::<Vec<_>>();
+ if !children.is_empty() {
+ let lint_ident = format!("clippy::{}", id);
+ let description = format!("lint group for: {}", children.iter().join(", "));
+ push_lint_group(buf, &lint_ident, &description, &children);
+ }
+ }
+ buf.push('\n');
+ buf.push_str("];\n");
+}
+
+fn push_lint_completion(buf: &mut String, label: &str, description: &str) {
+ format_to!(
+ buf,
+ r###" Lint {{
+ label: "{}",
+ description: r##"{}"##,
+ }},"###,
+ label,
+ description,
+ );
+}
+
+fn push_lint_group(buf: &mut String, label: &str, description: &str, children: &[String]) {
+ buf.push_str(
+ r###" LintGroup {
+ lint:
+ "###,
+ );
+
+ push_lint_completion(buf, label, description);
+
+ let children = format!("&[{}]", children.iter().map(|it| format!("\"{}\"", it)).join(", "));
+ format_to!(
+ buf,
+ r###"
+ children: {},
+ }},"###,
+ children,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/traits.rs b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs
new file mode 100644
index 000000000..6a7ea7c19
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs
@@ -0,0 +1,273 @@
+//! Functionality for obtaining data related to traits from the DB.
+
+use crate::{defs::Definition, RootDatabase};
+use hir::{db::HirDatabase, AsAssocItem, Semantics};
+use rustc_hash::FxHashSet;
+use syntax::{ast, AstNode};
+
+/// Given the `impl` block, attempts to find the trait this `impl` corresponds to.
+pub fn resolve_target_trait(
+ sema: &Semantics<'_, RootDatabase>,
+ impl_def: &ast::Impl,
+) -> Option<hir::Trait> {
+ let ast_path =
+ impl_def.trait_().map(|it| it.syntax().clone()).and_then(ast::PathType::cast)?.path()?;
+
+ match sema.resolve_path(&ast_path) {
+ Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def),
+ _ => None,
+ }
+}
+
+/// Given the `impl` block, returns the list of associated items (e.g. functions or types) that are
+/// missing in this `impl` block.
+pub fn get_missing_assoc_items(
+ sema: &Semantics<'_, RootDatabase>,
+ impl_def: &ast::Impl,
+) -> Vec<hir::AssocItem> {
+ let imp = match sema.to_def(impl_def) {
+ Some(it) => it,
+ None => return vec![],
+ };
+
+ // Names must be unique between constants and functions. However, type aliases
+ // may share the same name as a function or constant.
+ let mut impl_fns_consts = FxHashSet::default();
+ let mut impl_type = FxHashSet::default();
+
+ for item in imp.items(sema.db) {
+ match item {
+ hir::AssocItem::Function(it) => {
+ impl_fns_consts.insert(it.name(sema.db).to_string());
+ }
+ hir::AssocItem::Const(it) => {
+ if let Some(name) = it.name(sema.db) {
+ impl_fns_consts.insert(name.to_string());
+ }
+ }
+ hir::AssocItem::TypeAlias(it) => {
+ impl_type.insert(it.name(sema.db).to_string());
+ }
+ }
+ }
+
+ resolve_target_trait(sema, impl_def).map_or(vec![], |target_trait| {
+ target_trait
+ .items(sema.db)
+ .into_iter()
+ .filter(|i| match i {
+ hir::AssocItem::Function(f) => {
+ !impl_fns_consts.contains(&f.name(sema.db).to_string())
+ }
+ hir::AssocItem::TypeAlias(t) => !impl_type.contains(&t.name(sema.db).to_string()),
+ hir::AssocItem::Const(c) => c
+ .name(sema.db)
+ .map(|n| !impl_fns_consts.contains(&n.to_string()))
+ .unwrap_or_default(),
+ })
+ .collect()
+ })
+}
+
+/// Converts associated trait impl items to their trait definition counterpart
+pub(crate) fn convert_to_def_in_trait(db: &dyn HirDatabase, def: Definition) -> Definition {
+ (|| {
+ let assoc = def.as_assoc_item(db)?;
+ let trait_ = assoc.containing_trait_impl(db)?;
+ assoc_item_of_trait(db, assoc, trait_)
+ })()
+ .unwrap_or(def)
+}
+
+/// If this is an trait (impl) assoc item, returns the assoc item of the corresponding trait definition.
+pub(crate) fn as_trait_assoc_def(db: &dyn HirDatabase, def: Definition) -> Option<Definition> {
+ let assoc = def.as_assoc_item(db)?;
+ let trait_ = match assoc.container(db) {
+ hir::AssocItemContainer::Trait(_) => return Some(def),
+ hir::AssocItemContainer::Impl(i) => i.trait_(db),
+ }?;
+ assoc_item_of_trait(db, assoc, trait_)
+}
+
+fn assoc_item_of_trait(
+ db: &dyn HirDatabase,
+ assoc: hir::AssocItem,
+ trait_: hir::Trait,
+) -> Option<Definition> {
+ use hir::AssocItem::*;
+ let name = match assoc {
+ Function(it) => it.name(db),
+ Const(it) => it.name(db)?,
+ TypeAlias(it) => it.name(db),
+ };
+ let item = trait_.items(db).into_iter().find(|it| match (it, assoc) {
+ (Function(trait_func), Function(_)) => trait_func.name(db) == name,
+ (Const(trait_konst), Const(_)) => trait_konst.name(db).map_or(false, |it| it == name),
+ (TypeAlias(trait_type_alias), TypeAlias(_)) => trait_type_alias.name(db) == name,
+ _ => false,
+ })?;
+ Some(Definition::from(item))
+}
+
+#[cfg(test)]
+mod tests {
+ use base_db::{fixture::ChangeFixture, FilePosition};
+ use expect_test::{expect, Expect};
+ use hir::Semantics;
+ use syntax::ast::{self, AstNode};
+
+ use crate::RootDatabase;
+
+ /// Creates analysis from a multi-file fixture, returns positions marked with $0.
+ pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) {
+ let change_fixture = ChangeFixture::parse(ra_fixture);
+ let mut database = RootDatabase::default();
+ database.apply_change(change_fixture.change);
+ let (file_id, range_or_offset) =
+ change_fixture.file_position.expect("expected a marker ($0)");
+ let offset = range_or_offset.expect_offset();
+ (database, FilePosition { file_id, offset })
+ }
+
+ fn check_trait(ra_fixture: &str, expect: Expect) {
+ let (db, position) = position(ra_fixture);
+ let sema = Semantics::new(&db);
+ let file = sema.parse(position.file_id);
+ let impl_block: ast::Impl =
+ sema.find_node_at_offset_with_descend(file.syntax(), position.offset).unwrap();
+ let trait_ = crate::traits::resolve_target_trait(&sema, &impl_block);
+ let actual = match trait_ {
+ Some(trait_) => trait_.name(&db).to_string(),
+ None => String::new(),
+ };
+ expect.assert_eq(&actual);
+ }
+
+ fn check_missing_assoc(ra_fixture: &str, expect: Expect) {
+ let (db, position) = position(ra_fixture);
+ let sema = Semantics::new(&db);
+ let file = sema.parse(position.file_id);
+ let impl_block: ast::Impl =
+ sema.find_node_at_offset_with_descend(file.syntax(), position.offset).unwrap();
+ let items = crate::traits::get_missing_assoc_items(&sema, &impl_block);
+ let actual = items
+ .into_iter()
+ .map(|item| item.name(&db).unwrap().to_string())
+ .collect::<Vec<_>>()
+ .join("\n");
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn resolve_trait() {
+ check_trait(
+ r#"
+pub trait Foo {
+ fn bar();
+}
+impl Foo for u8 {
+ $0
+}
+ "#,
+ expect![["Foo"]],
+ );
+ check_trait(
+ r#"
+pub trait Foo {
+ fn bar();
+}
+impl Foo for u8 {
+ fn bar() {
+ fn baz() {
+ $0
+ }
+ baz();
+ }
+}
+ "#,
+ expect![["Foo"]],
+ );
+ check_trait(
+ r#"
+pub trait Foo {
+ fn bar();
+}
+pub struct Bar;
+impl Bar {
+ $0
+}
+ "#,
+ expect![[""]],
+ );
+ }
+
+ #[test]
+ fn missing_assoc_items() {
+ check_missing_assoc(
+ r#"
+pub trait Foo {
+ const FOO: u8;
+ fn bar();
+}
+impl Foo for u8 {
+ $0
+}"#,
+ expect![[r#"
+ FOO
+ bar"#]],
+ );
+
+ check_missing_assoc(
+ r#"
+pub trait Foo {
+ const FOO: u8;
+ fn bar();
+}
+impl Foo for u8 {
+ const FOO: u8 = 10;
+ $0
+}"#,
+ expect![[r#"
+ bar"#]],
+ );
+
+ check_missing_assoc(
+ r#"
+pub trait Foo {
+ const FOO: u8;
+ fn bar();
+}
+impl Foo for u8 {
+ const FOO: u8 = 10;
+ fn bar() {$0}
+}"#,
+ expect![[r#""#]],
+ );
+
+ check_missing_assoc(
+ r#"
+pub struct Foo;
+impl Foo {
+ fn bar() {$0}
+}"#,
+ expect![[r#""#]],
+ );
+
+ check_missing_assoc(
+ r#"
+trait Tr {
+ fn required();
+}
+macro_rules! m {
+ () => { fn required() {} };
+}
+impl Tr for () {
+ m!();
+ $0
+}
+
+ "#,
+ expect![[r#""#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs b/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs
new file mode 100644
index 000000000..46f47f258
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs
@@ -0,0 +1,86 @@
+//! This module contains structures for filtering the expected types.
+//! Use case for structures in this module is, for example, situation when you need to process
+//! only certain `Enum`s.
+
+use std::iter;
+
+use hir::Semantics;
+use syntax::ast::{self, make, Pat};
+
+use crate::RootDatabase;
+
+/// Enum types that implement `std::ops::Try` trait.
+#[derive(Clone, Copy)]
+pub enum TryEnum {
+ Result,
+ Option,
+}
+
+impl TryEnum {
+ const ALL: [TryEnum; 2] = [TryEnum::Option, TryEnum::Result];
+
+ /// Returns `Some(..)` if the provided type is an enum that implements `std::ops::Try`.
+ pub fn from_ty(sema: &Semantics<'_, RootDatabase>, ty: &hir::Type) -> Option<TryEnum> {
+ let enum_ = match ty.as_adt() {
+ Some(hir::Adt::Enum(it)) => it,
+ _ => return None,
+ };
+ TryEnum::ALL.iter().find_map(|&var| {
+ if enum_.name(sema.db).to_smol_str() == var.type_name() {
+ return Some(var);
+ }
+ None
+ })
+ }
+
+ pub fn happy_case(self) -> &'static str {
+ match self {
+ TryEnum::Result => "Ok",
+ TryEnum::Option => "Some",
+ }
+ }
+
+ pub fn sad_pattern(self) -> ast::Pat {
+ match self {
+ TryEnum::Result => make::tuple_struct_pat(
+ make::ext::ident_path("Err"),
+ iter::once(make::wildcard_pat().into()),
+ )
+ .into(),
+ TryEnum::Option => make::ext::simple_ident_pat(make::name("None")).into(),
+ }
+ }
+
+ pub fn happy_pattern(self, pat: Pat) -> ast::Pat {
+ match self {
+ TryEnum::Result => {
+ make::tuple_struct_pat(make::ext::ident_path("Ok"), iter::once(pat)).into()
+ }
+ TryEnum::Option => {
+ make::tuple_struct_pat(make::ext::ident_path("Some"), iter::once(pat)).into()
+ }
+ }
+ }
+
+ pub fn happy_pattern_wildcard(self) -> ast::Pat {
+ match self {
+ TryEnum::Result => make::tuple_struct_pat(
+ make::ext::ident_path("Ok"),
+ iter::once(make::wildcard_pat().into()),
+ )
+ .into(),
+ TryEnum::Option => make::tuple_struct_pat(
+ make::ext::ident_path("Some"),
+ iter::once(make::wildcard_pat().into()),
+ )
+ .into(),
+ }
+ }
+
+ fn type_name(self) -> &'static str {
+ match self {
+ TryEnum::Result => "Result",
+ TryEnum::Option => "Option",
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_contructor.rs b/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_contructor.rs
new file mode 100644
index 000000000..39431bed3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_contructor.rs
@@ -0,0 +1,34 @@
+//! Functionality for generating trivial contructors
+
+use hir::StructKind;
+use syntax::ast;
+
+/// given a type return the trivial contructor (if one exists)
+pub fn use_trivial_constructor(
+ db: &crate::RootDatabase,
+ path: ast::Path,
+ ty: &hir::Type,
+) -> Option<ast::Expr> {
+ match ty.as_adt() {
+ Some(hir::Adt::Enum(x)) => {
+ if let &[variant] = &*x.variants(db) {
+ if variant.kind(db) == hir::StructKind::Unit {
+ let path = ast::make::path_qualified(
+ path,
+ syntax::ast::make::path_segment(ast::make::name_ref(
+ &variant.name(db).to_smol_str(),
+ )),
+ );
+
+ return Some(syntax::ast::make::expr_path(path));
+ }
+ }
+ }
+ Some(hir::Adt::Struct(x)) if x.kind(db) == StructKind::Unit => {
+ return Some(syntax::ast::make::expr_path(path));
+ }
+ _ => {}
+ }
+
+ None
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
new file mode 100644
index 000000000..e221425ed
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
@@ -0,0 +1,34 @@
+[package]
+name = "ide-diagnostics"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+itertools = "0.10.3"
+
+
+either = "1.7.0"
+
+profile = { path = "../profile", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+text-edit = { path = "../text-edit", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
+hir = { path = "../hir", version = "0.0.0" }
+ide-db = { path = "../ide-db", version = "0.0.0" }
+
+[dev-dependencies]
+expect-test = "1.4.0"
+
+test-utils = { path = "../test-utils" }
+sourcegen = { path = "../sourcegen" }
+
+[features]
+in-rust-tree = []
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs
new file mode 100644
index 000000000..d12594a4c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs
@@ -0,0 +1,30 @@
+use crate::{Diagnostic, DiagnosticsContext};
+
+// Diagnostic: break-outside-of-loop
+//
+// This diagnostic is triggered if the `break` keyword is used outside of a loop.
+pub(crate) fn break_outside_of_loop(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::BreakOutsideOfLoop,
+) -> Diagnostic {
+ Diagnostic::new(
+ "break-outside-of-loop",
+ "break outside of loop",
+ ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn break_outside_of_loop() {
+ check_diagnostics(
+ r#"
+fn foo() { break; }
+ //^^^^^ error: break outside of loop
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs
new file mode 100644
index 000000000..2b7105362
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs
@@ -0,0 +1,203 @@
+//! Suggests shortening `Foo { field: field }` to `Foo { field }` in both
+//! expressions and patterns.
+
+use ide_db::{base_db::FileId, source_change::SourceChange};
+use syntax::{ast, match_ast, AstNode, SyntaxNode};
+use text_edit::TextEdit;
+
+use crate::{fix, Diagnostic, Severity};
+
+pub(crate) fn field_shorthand(acc: &mut Vec<Diagnostic>, file_id: FileId, node: &SyntaxNode) {
+ match_ast! {
+ match node {
+ ast::RecordExpr(it) => check_expr_field_shorthand(acc, file_id, it),
+ ast::RecordPat(it) => check_pat_field_shorthand(acc, file_id, it),
+ _ => ()
+ }
+ };
+}
+
+fn check_expr_field_shorthand(
+ acc: &mut Vec<Diagnostic>,
+ file_id: FileId,
+ record_expr: ast::RecordExpr,
+) {
+ let record_field_list = match record_expr.record_expr_field_list() {
+ Some(it) => it,
+ None => return,
+ };
+ for record_field in record_field_list.fields() {
+ let (name_ref, expr) = match record_field.name_ref().zip(record_field.expr()) {
+ Some(it) => it,
+ None => continue,
+ };
+
+ let field_name = name_ref.syntax().text().to_string();
+ let field_expr = expr.syntax().text().to_string();
+ let field_name_is_tup_index = name_ref.as_tuple_field().is_some();
+ if field_name != field_expr || field_name_is_tup_index {
+ continue;
+ }
+
+ let mut edit_builder = TextEdit::builder();
+ edit_builder.delete(record_field.syntax().text_range());
+ edit_builder.insert(record_field.syntax().text_range().start(), field_name);
+ let edit = edit_builder.finish();
+
+ let field_range = record_field.syntax().text_range();
+ acc.push(
+ Diagnostic::new("use-field-shorthand", "Shorthand struct initialization", field_range)
+ .severity(Severity::WeakWarning)
+ .with_fixes(Some(vec![fix(
+ "use_expr_field_shorthand",
+ "Use struct shorthand initialization",
+ SourceChange::from_text_edit(file_id, edit),
+ field_range,
+ )])),
+ );
+ }
+}
+
+fn check_pat_field_shorthand(
+ acc: &mut Vec<Diagnostic>,
+ file_id: FileId,
+ record_pat: ast::RecordPat,
+) {
+ let record_pat_field_list = match record_pat.record_pat_field_list() {
+ Some(it) => it,
+ None => return,
+ };
+ for record_pat_field in record_pat_field_list.fields() {
+ let (name_ref, pat) = match record_pat_field.name_ref().zip(record_pat_field.pat()) {
+ Some(it) => it,
+ None => continue,
+ };
+
+ let field_name = name_ref.syntax().text().to_string();
+ let field_pat = pat.syntax().text().to_string();
+ let field_name_is_tup_index = name_ref.as_tuple_field().is_some();
+ if field_name != field_pat || field_name_is_tup_index {
+ continue;
+ }
+
+ let mut edit_builder = TextEdit::builder();
+ edit_builder.delete(record_pat_field.syntax().text_range());
+ edit_builder.insert(record_pat_field.syntax().text_range().start(), field_name);
+ let edit = edit_builder.finish();
+
+ let field_range = record_pat_field.syntax().text_range();
+ acc.push(
+ Diagnostic::new("use-field-shorthand", "Shorthand struct pattern", field_range)
+ .severity(Severity::WeakWarning)
+ .with_fixes(Some(vec![fix(
+ "use_pat_field_shorthand",
+ "Use struct field shorthand",
+ SourceChange::from_text_edit(file_id, edit),
+ field_range,
+ )])),
+ );
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_diagnostics, check_fix};
+
+ #[test]
+ fn test_check_expr_field_shorthand() {
+ check_diagnostics(
+ r#"
+struct A { a: &'static str }
+fn main() { A { a: "hello" }; }
+"#,
+ );
+ check_diagnostics(
+ r#"
+struct A(usize);
+fn main() { A { 0: 0 }; }
+"#,
+ );
+
+ check_fix(
+ r#"
+struct A { a: &'static str }
+fn main() {
+ let a = "haha";
+ A { a$0: a };
+}
+"#,
+ r#"
+struct A { a: &'static str }
+fn main() {
+ let a = "haha";
+ A { a };
+}
+"#,
+ );
+
+ check_fix(
+ r#"
+struct A { a: &'static str, b: &'static str }
+fn main() {
+ let a = "haha";
+ let b = "bb";
+ A { a$0: a, b };
+}
+"#,
+ r#"
+struct A { a: &'static str, b: &'static str }
+fn main() {
+ let a = "haha";
+ let b = "bb";
+ A { a, b };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_check_pat_field_shorthand() {
+ check_diagnostics(
+ r#"
+struct A { a: &'static str }
+fn f(a: A) { let A { a: hello } = a; }
+"#,
+ );
+ check_diagnostics(
+ r#"
+struct A(usize);
+fn f(a: A) { let A { 0: 0 } = a; }
+"#,
+ );
+
+ check_fix(
+ r#"
+struct A { a: &'static str }
+fn f(a: A) {
+ let A { a$0: a } = a;
+}
+"#,
+ r#"
+struct A { a: &'static str }
+fn f(a: A) {
+ let A { a } = a;
+}
+"#,
+ );
+
+ check_fix(
+ r#"
+struct A { a: &'static str, b: &'static str }
+fn f(a: A) {
+ let A { a$0: a, b } = a;
+}
+"#,
+ r#"
+struct A { a: &'static str, b: &'static str }
+fn f(a: A) {
+ let A { a, b } = a;
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
new file mode 100644
index 000000000..97ea5c456
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
@@ -0,0 +1,144 @@
+use cfg::DnfExpr;
+use stdx::format_to;
+
+use crate::{Diagnostic, DiagnosticsContext, Severity};
+
+// Diagnostic: inactive-code
+//
+// This diagnostic is shown for code with inactive `#[cfg]` attributes.
+pub(crate) fn inactive_code(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::InactiveCode,
+) -> Option<Diagnostic> {
+ // If there's inactive code somewhere in a macro, don't propagate to the call-site.
+ if d.node.file_id.is_macro() {
+ return None;
+ }
+
+ let inactive = DnfExpr::new(d.cfg.clone()).why_inactive(&d.opts);
+ let mut message = "code is inactive due to #[cfg] directives".to_string();
+
+ if let Some(inactive) = inactive {
+ let inactive_reasons = inactive.to_string();
+
+ if inactive_reasons.is_empty() {
+ format_to!(message);
+ } else {
+ format_to!(message, ": {}", inactive);
+ }
+ }
+
+ let res = Diagnostic::new(
+ "inactive-code",
+ message,
+ ctx.sema.diagnostics_display_range(d.node.clone()).range,
+ )
+ .severity(Severity::WeakWarning)
+ .with_unused(true);
+ Some(res)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{tests::check_diagnostics_with_config, DiagnosticsConfig};
+
+ pub(crate) fn check(ra_fixture: &str) {
+ let config = DiagnosticsConfig::default();
+ check_diagnostics_with_config(config, ra_fixture)
+ }
+
+ #[test]
+ fn cfg_diagnostics() {
+ check(
+ r#"
+fn f() {
+ // The three g̶e̶n̶d̶e̶r̶s̶ statements:
+
+ #[cfg(a)] fn f() {} // Item statement
+ //^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
+ #[cfg(a)] {} // Expression statement
+ //^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
+ #[cfg(a)] let x = 0; // let statement
+ //^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
+
+ abc(#[cfg(a)] 0);
+ //^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
+ let x = Struct {
+ #[cfg(a)] f: 0,
+ //^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
+ };
+ match () {
+ () => (),
+ #[cfg(a)] () => (),
+ //^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
+ }
+
+ #[cfg(a)] 0 // Trailing expression of block
+ //^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn inactive_item() {
+ // Additional tests in `cfg` crate. This only tests disabled cfgs.
+
+ check(
+ r#"
+ #[cfg(no)] pub fn f() {}
+ //^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled
+
+ #[cfg(no)] #[cfg(no2)] mod m;
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no and no2 are disabled
+
+ #[cfg(all(not(a), b))] enum E {}
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: b is disabled
+
+ #[cfg(feature = "std")] use std;
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: feature = "std" is disabled
+
+ #[cfg(any())] pub fn f() {}
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives
+"#,
+ );
+ }
+
+ #[test]
+ fn inactive_assoc_item() {
+ // FIXME these currently don't work, hence the *
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ #[cfg(any())] pub fn f() {}
+ //*************************** weak: code is inactive due to #[cfg] directives
+}
+
+trait Bar {
+ #[cfg(any())] pub fn f() {}
+ //*************************** weak: code is inactive due to #[cfg] directives
+}
+"#,
+ );
+ }
+
+ /// Tests that `cfg` attributes behind `cfg_attr` is handled properly.
+ #[test]
+ fn inactive_via_cfg_attr() {
+ cov_mark::check!(cfg_attr_active);
+ check(
+ r#"
+ #[cfg_attr(not(never), cfg(no))] fn f() {}
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled
+
+ #[cfg_attr(not(never), cfg(not(no)))] fn f() {}
+
+ #[cfg_attr(never, cfg(no))] fn g() {}
+
+ #[cfg_attr(not(never), inline, cfg(no))] fn h() {}
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
new file mode 100644
index 000000000..6a78c08d4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
@@ -0,0 +1,486 @@
+use hir::{db::AstDatabase, InFile};
+use ide_db::{assists::Assist, defs::NameClass};
+use syntax::AstNode;
+
+use crate::{
+ // references::rename::rename_with_semantics,
+ unresolved_fix,
+ Diagnostic,
+ DiagnosticsContext,
+ Severity,
+};
+
+// Diagnostic: incorrect-ident-case
+//
+// This diagnostic is triggered if an item name doesn't follow https://doc.rust-lang.org/1.0.0/style/style/naming/README.html[Rust naming convention].
+pub(crate) fn incorrect_case(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Diagnostic {
+ Diagnostic::new(
+ "incorrect-ident-case",
+ format!(
+ "{} `{}` should have {} name, e.g. `{}`",
+ d.ident_type, d.ident_text, d.expected_case, d.suggested_text
+ ),
+ ctx.sema.diagnostics_display_range(InFile::new(d.file, d.ident.clone().into())).range,
+ )
+ .severity(Severity::WeakWarning)
+ .with_fixes(fixes(ctx, d))
+}
+
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Option<Vec<Assist>> {
+ let root = ctx.sema.db.parse_or_expand(d.file)?;
+ let name_node = d.ident.to_node(&root);
+ let def = NameClass::classify(&ctx.sema, &name_node)?.defined()?;
+
+ let name_node = InFile::new(d.file, name_node.syntax());
+ let frange = name_node.original_file_range(ctx.sema.db);
+
+ let label = format!("Rename to {}", d.suggested_text);
+ let mut res = unresolved_fix("change_case", &label, frange.range);
+ if ctx.resolve.should_resolve(&res.id) {
+ let source_change = def.rename(&ctx.sema, &d.suggested_text);
+ res.source_change = Some(source_change.ok().unwrap_or_default());
+ }
+
+ Some(vec![res])
+}
+
+#[cfg(test)]
+mod change_case {
+ use crate::tests::{check_diagnostics, check_fix};
+
+ #[test]
+ fn test_rename_incorrect_case() {
+ check_fix(
+ r#"
+pub struct test_struct$0 { one: i32 }
+
+pub fn some_fn(val: test_struct) -> test_struct {
+ test_struct { one: val.one + 1 }
+}
+"#,
+ r#"
+pub struct TestStruct { one: i32 }
+
+pub fn some_fn(val: TestStruct) -> TestStruct {
+ TestStruct { one: val.one + 1 }
+}
+"#,
+ );
+
+ check_fix(
+ r#"
+pub fn some_fn(NonSnakeCase$0: u8) -> u8 {
+ NonSnakeCase
+}
+"#,
+ r#"
+pub fn some_fn(non_snake_case: u8) -> u8 {
+ non_snake_case
+}
+"#,
+ );
+
+ check_fix(
+ r#"
+pub fn SomeFn$0(val: u8) -> u8 {
+ if val != 0 { SomeFn(val - 1) } else { val }
+}
+"#,
+ r#"
+pub fn some_fn(val: u8) -> u8 {
+ if val != 0 { some_fn(val - 1) } else { val }
+}
+"#,
+ );
+
+ check_fix(
+ r#"
+fn some_fn() {
+ let whatAWeird_Formatting$0 = 10;
+ another_func(whatAWeird_Formatting);
+}
+"#,
+ r#"
+fn some_fn() {
+ let what_aweird_formatting = 10;
+ another_func(what_aweird_formatting);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_uppercase_const_no_diagnostics() {
+ check_diagnostics(
+ r#"
+fn foo() {
+ const ANOTHER_ITEM: &str = "some_item";
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_incorrect_case_struct_method() {
+ check_fix(
+ r#"
+pub struct TestStruct;
+
+impl TestStruct {
+ pub fn SomeFn$0() -> TestStruct {
+ TestStruct
+ }
+}
+"#,
+ r#"
+pub struct TestStruct;
+
+impl TestStruct {
+ pub fn some_fn() -> TestStruct {
+ TestStruct
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_single_incorrect_case_diagnostic_in_function_name_issue_6970() {
+ check_diagnostics(
+ r#"
+fn FOO() {}
+// ^^^ 💡 weak: Function `FOO` should have snake_case name, e.g. `foo`
+"#,
+ );
+ check_fix(r#"fn FOO$0() {}"#, r#"fn foo() {}"#);
+ }
+
+ #[test]
+ fn incorrect_function_name() {
+ check_diagnostics(
+ r#"
+fn NonSnakeCaseName() {}
+// ^^^^^^^^^^^^^^^^ 💡 weak: Function `NonSnakeCaseName` should have snake_case name, e.g. `non_snake_case_name`
+"#,
+ );
+ }
+
+ #[test]
+ fn incorrect_function_params() {
+ check_diagnostics(
+ r#"
+fn foo(SomeParam: u8) {}
+ // ^^^^^^^^^ 💡 weak: Parameter `SomeParam` should have snake_case name, e.g. `some_param`
+
+fn foo2(ok_param: &str, CAPS_PARAM: u8) {}
+ // ^^^^^^^^^^ 💡 weak: Parameter `CAPS_PARAM` should have snake_case name, e.g. `caps_param`
+"#,
+ );
+ }
+
+ #[test]
+ fn incorrect_variable_names() {
+ check_diagnostics(
+ r#"
+fn foo() {
+ let SOME_VALUE = 10;
+ // ^^^^^^^^^^ 💡 weak: Variable `SOME_VALUE` should have snake_case name, e.g. `some_value`
+ let AnotherValue = 20;
+ // ^^^^^^^^^^^^ 💡 weak: Variable `AnotherValue` should have snake_case name, e.g. `another_value`
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn incorrect_struct_names() {
+ check_diagnostics(
+ r#"
+struct non_camel_case_name {}
+ // ^^^^^^^^^^^^^^^^^^^ 💡 weak: Structure `non_camel_case_name` should have CamelCase name, e.g. `NonCamelCaseName`
+
+struct SCREAMING_CASE {}
+ // ^^^^^^^^^^^^^^ 💡 weak: Structure `SCREAMING_CASE` should have CamelCase name, e.g. `ScreamingCase`
+"#,
+ );
+ }
+
+ #[test]
+ fn no_diagnostic_for_camel_cased_acronyms_in_struct_name() {
+ check_diagnostics(
+ r#"
+struct AABB {}
+"#,
+ );
+ }
+
+ #[test]
+ fn incorrect_struct_field() {
+ check_diagnostics(
+ r#"
+struct SomeStruct { SomeField: u8 }
+ // ^^^^^^^^^ 💡 weak: Field `SomeField` should have snake_case name, e.g. `some_field`
+"#,
+ );
+ }
+
+ #[test]
+ fn incorrect_enum_names() {
+ check_diagnostics(
+ r#"
+enum some_enum { Val(u8) }
+ // ^^^^^^^^^ 💡 weak: Enum `some_enum` should have CamelCase name, e.g. `SomeEnum`
+
+enum SOME_ENUM {}
+ // ^^^^^^^^^ 💡 weak: Enum `SOME_ENUM` should have CamelCase name, e.g. `SomeEnum`
+"#,
+ );
+ }
+
+ #[test]
+ fn no_diagnostic_for_camel_cased_acronyms_in_enum_name() {
+ check_diagnostics(
+ r#"
+enum AABB {}
+"#,
+ );
+ }
+
+ #[test]
+ fn incorrect_enum_variant_name() {
+ check_diagnostics(
+ r#"
+enum SomeEnum { SOME_VARIANT(u8) }
+ // ^^^^^^^^^^^^ 💡 weak: Variant `SOME_VARIANT` should have CamelCase name, e.g. `SomeVariant`
+"#,
+ );
+ }
+
+ #[test]
+ fn incorrect_const_name() {
+ check_diagnostics(
+ r#"
+const some_weird_const: u8 = 10;
+ // ^^^^^^^^^^^^^^^^ 💡 weak: Constant `some_weird_const` should have UPPER_SNAKE_CASE name, e.g. `SOME_WEIRD_CONST`
+"#,
+ );
+ }
+
+ #[test]
+ fn incorrect_static_name() {
+ check_diagnostics(
+ r#"
+static some_weird_const: u8 = 10;
+ // ^^^^^^^^^^^^^^^^ 💡 weak: Static variable `some_weird_const` should have UPPER_SNAKE_CASE name, e.g. `SOME_WEIRD_CONST`
+"#,
+ );
+ }
+
+ #[test]
+ fn fn_inside_impl_struct() {
+ check_diagnostics(
+ r#"
+struct someStruct;
+ // ^^^^^^^^^^ 💡 weak: Structure `someStruct` should have CamelCase name, e.g. `SomeStruct`
+
+impl someStruct {
+ fn SomeFunc(&self) {
+ // ^^^^^^^^ 💡 weak: Function `SomeFunc` should have snake_case name, e.g. `some_func`
+ let WHY_VAR_IS_CAPS = 10;
+ // ^^^^^^^^^^^^^^^ 💡 weak: Variable `WHY_VAR_IS_CAPS` should have snake_case name, e.g. `why_var_is_caps`
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_diagnostic_for_enum_varinats() {
+ check_diagnostics(
+ r#"
+enum Option { Some, None }
+
+fn main() {
+ match Option::None {
+ None => (),
+ Some => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn non_let_bind() {
+ check_diagnostics(
+ r#"
+enum Option { Some, None }
+
+fn main() {
+ match Option::None {
+ SOME_VAR @ None => (),
+ // ^^^^^^^^ 💡 weak: Variable `SOME_VAR` should have snake_case name, e.g. `some_var`
+ Some => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn allow_attributes_crate_attr() {
+ check_diagnostics(
+ r#"
+#![allow(non_snake_case)]
+#![allow(non_camel_case_types)]
+
+struct S {
+ fooBar: bool,
+}
+
+enum E {
+ fooBar,
+}
+
+mod F {
+ fn CheckItWorksWithCrateAttr(BAD_NAME_HI: u8) {}
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn complex_ignore() {
+ // FIXME: this should trigger errors for the second case.
+ check_diagnostics(
+ r#"
+trait T { fn a(); }
+struct U {}
+impl T for U {
+ fn a() {
+ #[allow(non_snake_case)]
+ trait __BitFlagsOk {
+ const HiImAlsoBad: u8 = 2;
+ fn Dirty(&self) -> bool { false }
+ }
+
+ trait __BitFlagsBad {
+ const HiImAlsoBad: u8 = 2;
+ fn Dirty(&self) -> bool { false }
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn infinite_loop_inner_items() {
+ check_diagnostics(
+ r#"
+fn qualify() {
+ mod foo {
+ use super::*;
+ }
+}
+ "#,
+ )
+ }
+
+ #[test] // Issue #8809.
+ fn parenthesized_parameter() {
+ check_diagnostics(r#"fn f((O): _) {}"#)
+ }
+
+ #[test]
+ fn ignores_extern_items() {
+ cov_mark::check!(extern_func_incorrect_case_ignored);
+ cov_mark::check!(extern_static_incorrect_case_ignored);
+ check_diagnostics(
+ r#"
+extern {
+ fn NonSnakeCaseName(SOME_VAR: u8) -> u8;
+ pub static SomeStatic: u8 = 10;
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn ignores_extern_items_from_macro() {
+ check_diagnostics(
+ r#"
+macro_rules! m {
+ () => {
+ fn NonSnakeCaseName(SOME_VAR: u8) -> u8;
+ pub static SomeStatic: u8 = 10;
+ }
+}
+
+extern {
+ m!();
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn bug_traits_arent_checked() {
+ // FIXME: Traits and functions in traits aren't currently checked by
+ // r-a, even though rustc will complain about them.
+ check_diagnostics(
+ r#"
+trait BAD_TRAIT {
+ fn BAD_FUNCTION();
+ fn BadFunction();
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn allow_attributes() {
+ check_diagnostics(
+ r#"
+#[allow(non_snake_case)]
+fn NonSnakeCaseName(SOME_VAR: u8) -> u8{
+ // cov_flags generated output from elsewhere in this file
+ extern "C" {
+ #[no_mangle]
+ static lower_case: u8;
+ }
+
+ let OtherVar = SOME_VAR + 1;
+ OtherVar
+}
+
+#[allow(nonstandard_style)]
+mod CheckNonstandardStyle {
+ fn HiImABadFnName() {}
+}
+
+#[allow(bad_style)]
+mod CheckBadStyle {
+ fn HiImABadFnName() {}
+}
+
+mod F {
+ #![allow(non_snake_case)]
+ fn CheckItWorksWithModAttr(BAD_NAME_HI: u8) {}
+}
+
+#[allow(non_snake_case, non_camel_case_types)]
+pub struct some_type {
+ SOME_FIELD: u8,
+ SomeField: u16,
+}
+
+#[allow(non_upper_case_globals)]
+pub const some_const: u8 = 10;
+
+#[allow(non_upper_case_globals)]
+pub static SomeStatic: u8 = 10;
+ "#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
new file mode 100644
index 000000000..c779266bc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
@@ -0,0 +1,38 @@
+use crate::{Diagnostic, DiagnosticsContext, Severity};
+
+// Diagnostic: invalid-derive-target
+//
+// This diagnostic is shown when the derive attribute is used on an item other than a `struct`,
+// `enum` or `union`.
+pub(crate) fn invalid_derive_target(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::InvalidDeriveTarget,
+) -> Diagnostic {
+ let display_range = ctx.sema.diagnostics_display_range(d.node.clone()).range;
+
+ Diagnostic::new(
+ "invalid-derive-target",
+ "`derive` may only be applied to `struct`s, `enum`s and `union`s",
+ display_range,
+ )
+ .severity(Severity::Error)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn fails_on_function() {
+ check_diagnostics(
+ r#"
+//- minicore:derive
+mod __ {
+ #[derive()]
+ //^^^^^^^^^^^ error: `derive` may only be applied to `struct`s, `enum`s and `union`s
+ fn main() {}
+}
+ "#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
new file mode 100644
index 000000000..d6a66dc15
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -0,0 +1,218 @@
+use crate::{Diagnostic, DiagnosticsContext};
+
+// Diagnostic: macro-error
+//
+// This diagnostic is shown for macro expansion errors.
+pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) -> Diagnostic {
+ // Use more accurate position if available.
+ let display_range = d
+ .precise_location
+ .unwrap_or_else(|| ctx.sema.diagnostics_display_range(d.node.clone()).range);
+
+ Diagnostic::new("macro-error", d.message.clone(), display_range).experimental()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{
+ tests::{check_diagnostics, check_diagnostics_with_config},
+ DiagnosticsConfig,
+ };
+
+ #[test]
+ fn builtin_macro_fails_expansion() {
+ check_diagnostics(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! include { () => {} }
+
+#[rustc_builtin_macro]
+macro_rules! compile_error { () => {} }
+
+ include!("doesntexist");
+//^^^^^^^ error: failed to load file `doesntexist`
+
+ compile_error!("compile_error macro works");
+//^^^^^^^^^^^^^ error: compile_error macro works
+ "#,
+ );
+ }
+
+ #[test]
+ fn eager_macro_concat() {
+ // FIXME: this is incorrectly handling `$crate`, resulting in a wrong diagnostic.
+ // See: https://github.com/rust-lang/rust-analyzer/issues/10300
+
+ check_diagnostics(
+ r#"
+//- /lib.rs crate:lib deps:core
+use core::{panic, concat};
+
+mod private {
+ pub use core::concat;
+}
+
+macro_rules! m {
+ () => {
+ panic!(concat!($crate::private::concat!("")));
+ };
+}
+
+fn f() {
+ m!();
+ //^^^^ error: unresolved macro `$crate::private::concat!`
+}
+
+//- /core.rs crate:core
+#[macro_export]
+#[rustc_builtin_macro]
+macro_rules! concat { () => {} }
+
+pub macro panic {
+ ($msg:expr) => (
+ $crate::panicking::panic_str($msg)
+ ),
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn include_macro_should_allow_empty_content() {
+ let mut config = DiagnosticsConfig::default();
+
+ // FIXME: This is a false-positive, the file is actually linked in via
+ // `include!` macro
+ config.disabled.insert("unlinked-file".to_string());
+
+ check_diagnostics_with_config(
+ config,
+ r#"
+//- /lib.rs
+#[rustc_builtin_macro]
+macro_rules! include { () => {} }
+
+include!("foo/bar.rs");
+//- /foo/bar.rs
+// empty
+"#,
+ );
+ }
+
+ #[test]
+ fn good_out_dir_diagnostic() {
+ check_diagnostics(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! include { () => {} }
+#[rustc_builtin_macro]
+macro_rules! env { () => {} }
+#[rustc_builtin_macro]
+macro_rules! concat { () => {} }
+
+ include!(concat!(env!("OUT_DIR"), "/out.rs"));
+//^^^^^^^ error: `OUT_DIR` not set, enable "build scripts" to fix
+"#,
+ );
+ }
+
+ #[test]
+ fn register_attr_and_tool() {
+ cov_mark::check!(register_attr);
+ cov_mark::check!(register_tool);
+ check_diagnostics(
+ r#"
+#![register_tool(tool)]
+#![register_attr(attr)]
+
+#[tool::path]
+#[attr]
+struct S;
+"#,
+ );
+ // NB: we don't currently emit diagnostics here
+ }
+
+ #[test]
+ fn macro_diag_builtin() {
+ check_diagnostics(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! env {}
+
+#[rustc_builtin_macro]
+macro_rules! include {}
+
+#[rustc_builtin_macro]
+macro_rules! compile_error {}
+
+#[rustc_builtin_macro]
+macro_rules! format_args { () => {} }
+
+fn main() {
+ // Test a handful of built-in (eager) macros:
+
+ include!(invalid);
+ //^^^^^^^ error: could not convert tokens
+ include!("does not exist");
+ //^^^^^^^ error: failed to load file `does not exist`
+
+ env!(invalid);
+ //^^^ error: could not convert tokens
+
+ env!("OUT_DIR");
+ //^^^ error: `OUT_DIR` not set, enable "build scripts" to fix
+
+ compile_error!("compile_error works");
+ //^^^^^^^^^^^^^ error: compile_error works
+
+ // Lazy:
+
+ format_args!();
+ //^^^^^^^^^^^ error: no rule matches input tokens
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn macro_rules_diag() {
+ check_diagnostics(
+ r#"
+macro_rules! m {
+ () => {};
+}
+fn f() {
+ m!();
+
+ m!(hi);
+ //^ error: leftover tokens
+}
+ "#,
+ );
+ }
+ #[test]
+ fn dollar_crate_in_builtin_macro() {
+ check_diagnostics(
+ r#"
+#[macro_export]
+#[rustc_builtin_macro]
+macro_rules! format_args {}
+
+#[macro_export]
+macro_rules! arg { () => {} }
+
+#[macro_export]
+macro_rules! outer {
+ () => {
+ $crate::format_args!( "", $crate::arg!(1) )
+ };
+}
+
+fn f() {
+ outer!();
+} //^^^^^^^^ error: leftover tokens
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
new file mode 100644
index 000000000..cd48bdba0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
@@ -0,0 +1,37 @@
+use crate::{Diagnostic, DiagnosticsContext, Severity};
+
+// Diagnostic: malformed-derive
+//
+// This diagnostic is shown when the derive attribute has invalid input.
+pub(crate) fn malformed_derive(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::MalformedDerive,
+) -> Diagnostic {
+ let display_range = ctx.sema.diagnostics_display_range(d.node.clone()).range;
+
+ Diagnostic::new(
+ "malformed-derive",
+ "malformed derive input, derive attributes are of the form `#[derive(Derive1, Derive2, ...)]`",
+ display_range,
+ )
+ .severity(Severity::Error)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn invalid_input() {
+ check_diagnostics(
+ r#"
+//- minicore:derive
+mod __ {
+ #[derive = "aaaa"]
+ //^^^^^^^^^^^^^^^^^^ error: malformed derive input, derive attributes are of the form `#[derive(Derive1, Derive2, ...)]`
+ struct Foo;
+}
+ "#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
new file mode 100644
index 000000000..5f8b3e543
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
@@ -0,0 +1,334 @@
+use syntax::{
+ ast::{self, HasArgList},
+ AstNode, TextRange,
+};
+
+use crate::{adjusted_display_range, Diagnostic, DiagnosticsContext};
+
+// Diagnostic: mismatched-arg-count
+//
+// This diagnostic is triggered if a function is invoked with an incorrect amount of arguments.
+pub(crate) fn mismatched_arg_count(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::MismatchedArgCount,
+) -> Diagnostic {
+ let s = if d.expected == 1 { "" } else { "s" };
+ let message = format!("expected {} argument{}, found {}", d.expected, s, d.found);
+ Diagnostic::new("mismatched-arg-count", message, invalid_args_range(ctx, d))
+}
+
+fn invalid_args_range(ctx: &DiagnosticsContext<'_>, d: &hir::MismatchedArgCount) -> TextRange {
+ adjusted_display_range::<ast::Expr>(ctx, d.call_expr.clone().map(|it| it.into()), &|expr| {
+ let arg_list = match expr {
+ ast::Expr::CallExpr(call) => call.arg_list()?,
+ ast::Expr::MethodCallExpr(call) => call.arg_list()?,
+ _ => return None,
+ };
+ if d.found < d.expected {
+ if d.found == 0 {
+ return Some(arg_list.syntax().text_range());
+ }
+ if let Some(r_paren) = arg_list.r_paren_token() {
+ return Some(r_paren.text_range());
+ }
+ }
+ if d.expected < d.found {
+ if d.expected == 0 {
+ return Some(arg_list.syntax().text_range());
+ }
+ let zip = arg_list.args().nth(d.expected).zip(arg_list.r_paren_token());
+ if let Some((arg, r_paren)) = zip {
+ return Some(arg.syntax().text_range().cover(r_paren.text_range()));
+ }
+ }
+
+ None
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn simple_free_fn_zero() {
+ check_diagnostics(
+ r#"
+fn zero() {}
+fn f() { zero(1); }
+ //^^^ error: expected 0 arguments, found 1
+"#,
+ );
+
+ check_diagnostics(
+ r#"
+fn zero() {}
+fn f() { zero(); }
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_free_fn_one() {
+ check_diagnostics(
+ r#"
+fn one(arg: u8) {}
+fn f() { one(); }
+ //^^ error: expected 1 argument, found 0
+"#,
+ );
+
+ check_diagnostics(
+ r#"
+fn one(arg: u8) {}
+fn f() { one(1); }
+"#,
+ );
+ }
+
+ #[test]
+ fn method_as_fn() {
+ check_diagnostics(
+ r#"
+struct S;
+impl S { fn method(&self) {} }
+
+fn f() {
+ S::method();
+} //^^ error: expected 1 argument, found 0
+"#,
+ );
+
+ check_diagnostics(
+ r#"
+struct S;
+impl S { fn method(&self) {} }
+
+fn f() {
+ S::method(&S);
+ S.method();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_with_arg() {
+ check_diagnostics(
+ r#"
+struct S;
+impl S { fn method(&self, arg: u8) {} }
+
+ fn f() {
+ S.method();
+ } //^^ error: expected 1 argument, found 0
+ "#,
+ );
+
+ check_diagnostics(
+ r#"
+struct S;
+impl S { fn method(&self, arg: u8) {} }
+
+fn f() {
+ S::method(&S, 0);
+ S.method(1);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_unknown_receiver() {
+ // note: this is incorrect code, so there might be errors on this in the
+ // future, but we shouldn't emit an argument count diagnostic here
+ check_diagnostics(
+ r#"
+trait Foo { fn method(&self, arg: usize) {} }
+
+fn f() {
+ let x;
+ x.method();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn tuple_struct() {
+ check_diagnostics(
+ r#"
+struct Tup(u8, u16);
+fn f() {
+ Tup(0);
+} //^ error: expected 2 arguments, found 1
+"#,
+ )
+ }
+
+ #[test]
+ fn enum_variant() {
+ check_diagnostics(
+ r#"
+enum En { Variant(u8, u16), }
+fn f() {
+ En::Variant(0);
+} //^ error: expected 2 arguments, found 1
+"#,
+ )
+ }
+
+ #[test]
+ fn enum_variant_type_macro() {
+ check_diagnostics(
+ r#"
+macro_rules! Type {
+ () => { u32 };
+}
+enum Foo {
+ Bar(Type![])
+}
+impl Foo {
+ fn new() {
+ Foo::Bar(0);
+ Foo::Bar(0, 1);
+ //^^ error: expected 1 argument, found 2
+ Foo::Bar();
+ //^^ error: expected 1 argument, found 0
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn varargs() {
+ check_diagnostics(
+ r#"
+extern "C" {
+ fn fixed(fixed: u8);
+ fn varargs(fixed: u8, ...);
+ fn varargs2(...);
+}
+
+fn f() {
+ unsafe {
+ fixed(0);
+ fixed(0, 1);
+ //^^ error: expected 1 argument, found 2
+ varargs(0);
+ varargs(0, 1);
+ varargs2();
+ varargs2(0);
+ varargs2(0, 1);
+ }
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn arg_count_lambda() {
+ check_diagnostics(
+ r#"
+fn main() {
+ let f = |()| ();
+ f();
+ //^^ error: expected 1 argument, found 0
+ f(());
+ f((), ());
+ //^^^ error: expected 1 argument, found 2
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn cfgd_out_call_arguments() {
+ check_diagnostics(
+ r#"
+struct C(#[cfg(FALSE)] ());
+impl C {
+ fn new() -> Self {
+ Self(
+ #[cfg(FALSE)]
+ (),
+ )
+ }
+
+ fn method(&self) {}
+}
+
+fn main() {
+ C::new().method(#[cfg(FALSE)] 0);
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn cfgd_out_fn_params() {
+ check_diagnostics(
+ r#"
+fn foo(#[cfg(NEVER)] x: ()) {}
+
+struct S;
+
+impl S {
+ fn method(#[cfg(NEVER)] self) {}
+ fn method2(#[cfg(NEVER)] self, arg: u8) {}
+ fn method3(self, #[cfg(NEVER)] arg: u8) {}
+}
+
+extern "C" {
+ fn fixed(fixed: u8, #[cfg(NEVER)] ...);
+ fn varargs(#[cfg(not(NEVER))] ...);
+}
+
+fn main() {
+ foo();
+ S::method();
+ S::method2(0);
+ S::method3(S);
+ S.method3();
+ unsafe {
+ fixed(0);
+ varargs(1, 2, 3);
+ }
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn legacy_const_generics() {
+ check_diagnostics(
+ r#"
+#[rustc_legacy_const_generics(1, 3)]
+fn mixed<const N1: &'static str, const N2: bool>(
+ a: u8,
+ b: i8,
+) {}
+
+fn f() {
+ mixed(0, "", -1, true);
+ mixed::<"", true>(0, -1);
+}
+
+#[rustc_legacy_const_generics(1, 3)]
+fn b<const N1: u8, const N2: u8>(
+ a: u8,
+ b: u8,
+) {}
+
+fn g() {
+ b(0, 1, 2, 3);
+ b::<1, 3>(0, 2);
+
+ b(0, 1, 2);
+ //^ error: expected 4 arguments, found 3
+}
+ "#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
new file mode 100644
index 000000000..edb1fc091
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -0,0 +1,837 @@
+use either::Either;
+use hir::{
+ db::{AstDatabase, HirDatabase},
+ known, AssocItem, HirDisplay, InFile, Type,
+};
+use ide_db::{
+ assists::Assist, famous_defs::FamousDefs, imports::import_assets::item_for_path_search,
+ source_change::SourceChange, use_trivial_contructor::use_trivial_constructor, FxHashMap,
+};
+use stdx::format_to;
+use syntax::{
+ algo,
+ ast::{self, make},
+ AstNode, SyntaxNode, SyntaxNodePtr,
+};
+use text_edit::TextEdit;
+
+use crate::{fix, Diagnostic, DiagnosticsContext};
+
+// Diagnostic: missing-fields
+//
+// This diagnostic is triggered if record lacks some fields that exist in the corresponding structure.
+//
+// Example:
+//
+// ```rust
+// struct A { a: u8, b: u8 }
+//
+// let a = A { a: 10 };
+// ```
+pub(crate) fn missing_fields(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Diagnostic {
+ let mut message = String::from("missing structure fields:\n");
+ for field in &d.missed_fields {
+ format_to!(message, "- {}\n", field);
+ }
+
+ let ptr = InFile::new(
+ d.file,
+ d.field_list_parent_path
+ .clone()
+ .map(SyntaxNodePtr::from)
+ .unwrap_or_else(|| d.field_list_parent.clone().either(|it| it.into(), |it| it.into())),
+ );
+
+ Diagnostic::new("missing-fields", message, ctx.sema.diagnostics_display_range(ptr).range)
+ .with_fixes(fixes(ctx, d))
+}
+
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Assist>> {
+ // Note that although we could add a diagnostics to
+ // fill the missing tuple field, e.g :
+ // `struct A(usize);`
+ // `let a = A { 0: () }`
+ // but it is uncommon usage and it should not be encouraged.
+ if d.missed_fields.iter().any(|it| it.as_tuple_index().is_some()) {
+ return None;
+ }
+
+ let root = ctx.sema.db.parse_or_expand(d.file)?;
+
+ let current_module = match &d.field_list_parent {
+ Either::Left(ptr) => ctx.sema.scope(ptr.to_node(&root).syntax()).map(|it| it.module()),
+ Either::Right(ptr) => ctx.sema.scope(ptr.to_node(&root).syntax()).map(|it| it.module()),
+ };
+
+ let build_text_edit = |parent_syntax, new_syntax: &SyntaxNode, old_syntax| {
+ let edit = {
+ let mut builder = TextEdit::builder();
+ if d.file.is_macro() {
+ // we can't map the diff up into the macro input unfortunately, as the macro loses all
+ // whitespace information so the diff wouldn't be applicable no matter what
+ // This has the downside that the cursor will be moved in macros by doing it without a diff
+ // but that is a trade off we can make.
+ // FIXME: this also currently discards a lot of whitespace in the input... we really need a formatter here
+ let range = ctx.sema.original_range_opt(old_syntax)?;
+ builder.replace(range.range, new_syntax.to_string());
+ } else {
+ algo::diff(old_syntax, new_syntax).into_text_edit(&mut builder);
+ }
+ builder.finish()
+ };
+ Some(vec![fix(
+ "fill_missing_fields",
+ "Fill struct fields",
+ SourceChange::from_text_edit(d.file.original_file(ctx.sema.db), edit),
+ ctx.sema.original_range(parent_syntax).range,
+ )])
+ };
+
+ match &d.field_list_parent {
+ Either::Left(record_expr) => {
+ let field_list_parent = record_expr.to_node(&root);
+ let missing_fields = ctx.sema.record_literal_missing_fields(&field_list_parent);
+
+ let mut locals = FxHashMap::default();
+ ctx.sema.scope(field_list_parent.syntax())?.process_all_names(&mut |name, def| {
+ if let hir::ScopeDef::Local(local) = def {
+ locals.insert(name, local);
+ }
+ });
+
+ let generate_fill_expr = |ty: &Type| match ctx.config.expr_fill_default {
+ crate::ExprFillDefaultMode::Todo => make::ext::expr_todo(),
+ crate::ExprFillDefaultMode::Default => {
+ get_default_constructor(ctx, d, ty).unwrap_or_else(|| make::ext::expr_todo())
+ }
+ };
+
+ let old_field_list = field_list_parent.record_expr_field_list()?;
+ let new_field_list = old_field_list.clone_for_update();
+ for (f, ty) in missing_fields.iter() {
+ let field_expr = if let Some(local_candidate) = locals.get(&f.name(ctx.sema.db)) {
+ cov_mark::hit!(field_shorthand);
+ let candidate_ty = local_candidate.ty(ctx.sema.db);
+ if ty.could_unify_with(ctx.sema.db, &candidate_ty) {
+ None
+ } else {
+ Some(generate_fill_expr(ty))
+ }
+ } else {
+ let expr = (|| -> Option<ast::Expr> {
+ let item_in_ns = hir::ItemInNs::from(hir::ModuleDef::from(ty.as_adt()?));
+
+ let type_path = current_module?.find_use_path(
+ ctx.sema.db,
+ item_for_path_search(ctx.sema.db, item_in_ns)?,
+ )?;
+
+ use_trivial_constructor(
+ &ctx.sema.db,
+ ide_db::helpers::mod_path_to_ast(&type_path),
+ &ty,
+ )
+ })();
+
+ if expr.is_some() {
+ expr
+ } else {
+ Some(generate_fill_expr(ty))
+ }
+ };
+ let field = make::record_expr_field(
+ make::name_ref(&f.name(ctx.sema.db).to_smol_str()),
+ field_expr,
+ );
+ new_field_list.add_field(field.clone_for_update());
+ }
+ build_text_edit(
+ field_list_parent.syntax(),
+ new_field_list.syntax(),
+ old_field_list.syntax(),
+ )
+ }
+ Either::Right(record_pat) => {
+ let field_list_parent = record_pat.to_node(&root);
+ let missing_fields = ctx.sema.record_pattern_missing_fields(&field_list_parent);
+
+ let old_field_list = field_list_parent.record_pat_field_list()?;
+ let new_field_list = old_field_list.clone_for_update();
+ for (f, _) in missing_fields.iter() {
+ let field = make::record_pat_field_shorthand(make::name_ref(
+ &f.name(ctx.sema.db).to_smol_str(),
+ ));
+ new_field_list.add_field(field.clone_for_update());
+ }
+ build_text_edit(
+ field_list_parent.syntax(),
+ new_field_list.syntax(),
+ old_field_list.syntax(),
+ )
+ }
+ }
+}
+
+fn make_ty(ty: &hir::Type, db: &dyn HirDatabase, module: hir::Module) -> ast::Type {
+ let ty_str = match ty.as_adt() {
+ Some(adt) => adt.name(db).to_string(),
+ None => ty.display_source_code(db, module.into()).ok().unwrap_or_else(|| "_".to_string()),
+ };
+
+ make::ty(&ty_str)
+}
+
+fn get_default_constructor(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::MissingFields,
+ ty: &Type,
+) -> Option<ast::Expr> {
+ if let Some(builtin_ty) = ty.as_builtin() {
+ if builtin_ty.is_int() || builtin_ty.is_uint() {
+ return Some(make::ext::zero_number());
+ }
+ if builtin_ty.is_float() {
+ return Some(make::ext::zero_float());
+ }
+ if builtin_ty.is_char() {
+ return Some(make::ext::empty_char());
+ }
+ if builtin_ty.is_str() {
+ return Some(make::ext::empty_str());
+ }
+ if builtin_ty.is_bool() {
+ return Some(make::ext::default_bool());
+ }
+ }
+
+ let krate = ctx.sema.to_module_def(d.file.original_file(ctx.sema.db))?.krate();
+ let module = krate.root_module(ctx.sema.db);
+
+ // Look for a ::new() associated function
+ let has_new_func = ty
+ .iterate_assoc_items(ctx.sema.db, krate, |assoc_item| {
+ if let AssocItem::Function(func) = assoc_item {
+ if func.name(ctx.sema.db) == known::new
+ && func.assoc_fn_params(ctx.sema.db).is_empty()
+ {
+ return Some(());
+ }
+ }
+
+ None
+ })
+ .is_some();
+
+ let famous_defs = FamousDefs(&ctx.sema, krate);
+ if has_new_func {
+ Some(make::ext::expr_ty_new(&make_ty(ty, ctx.sema.db, module)))
+ } else if ty.as_adt() == famous_defs.core_option_Option()?.ty(ctx.sema.db).as_adt() {
+ Some(make::ext::option_none())
+ } else if !ty.is_array()
+ && ty.impls_trait(ctx.sema.db, famous_defs.core_default_Default()?, &[])
+ {
+ Some(make::ext::expr_ty_default(&make_ty(ty, ctx.sema.db, module)))
+ } else {
+ None
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_diagnostics, check_fix};
+
+ #[test]
+ fn missing_record_pat_field_diagnostic() {
+ check_diagnostics(
+ r#"
+struct S { foo: i32, bar: () }
+fn baz(s: S) {
+ let S { foo: _ } = s;
+ //^ 💡 error: missing structure fields:
+ //| - bar
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn missing_record_pat_field_no_diagnostic_if_not_exhaustive() {
+ check_diagnostics(
+ r"
+struct S { foo: i32, bar: () }
+fn baz(s: S) -> i32 {
+ match s {
+ S { foo, .. } => foo,
+ }
+}
+",
+ )
+ }
+
+ #[test]
+ fn missing_record_pat_field_box() {
+ check_diagnostics(
+ r"
+struct S { s: Box<u32> }
+fn x(a: S) {
+ let S { box s } = a;
+}
+",
+ )
+ }
+
+ #[test]
+ fn missing_record_pat_field_ref() {
+ check_diagnostics(
+ r"
+struct S { s: u32 }
+fn x(a: S) {
+ let S { ref s } = a;
+}
+",
+ )
+ }
+
+ #[test]
+ fn missing_record_expr_in_assignee_expr() {
+ check_diagnostics(
+ r"
+struct S { s: usize, t: usize }
+struct S2 { s: S, t: () }
+struct T(S);
+fn regular(a: S) {
+ let s;
+ S { s, .. } = a;
+}
+fn nested(a: S2) {
+ let s;
+ S2 { s: S { s, .. }, .. } = a;
+}
+fn in_tuple(a: (S,)) {
+ let s;
+ (S { s, .. },) = a;
+}
+fn in_array(a: [S;1]) {
+ let s;
+ [S { s, .. },] = a;
+}
+fn in_tuple_struct(a: T) {
+ let s;
+ T(S { s, .. }) = a;
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn range_mapping_out_of_macros() {
+ check_fix(
+ r#"
+fn some() {}
+fn items() {}
+fn here() {}
+
+macro_rules! id { ($($tt:tt)*) => { $($tt)*}; }
+
+fn main() {
+ let _x = id![Foo { a: $042 }];
+}
+
+pub struct Foo { pub a: i32, pub b: i32 }
+"#,
+ r#"
+fn some() {}
+fn items() {}
+fn here() {}
+
+macro_rules! id { ($($tt:tt)*) => { $($tt)*}; }
+
+fn main() {
+ let _x = id![Foo {a:42, b: 0 }];
+}
+
+pub struct Foo { pub a: i32, pub b: i32 }
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_empty() {
+ check_fix(
+ r#"
+//- minicore: option
+struct TestStruct { one: i32, two: i64, three: Option<i32>, four: bool }
+
+fn test_fn() {
+ let s = TestStruct {$0};
+}
+"#,
+ r#"
+struct TestStruct { one: i32, two: i64, three: Option<i32>, four: bool }
+
+fn test_fn() {
+ let s = TestStruct { one: 0, two: 0, three: None, four: false };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_zst_fields() {
+ check_fix(
+ r#"
+struct Empty;
+
+struct TestStruct { one: i32, two: Empty }
+
+fn test_fn() {
+ let s = TestStruct {$0};
+}
+"#,
+ r#"
+struct Empty;
+
+struct TestStruct { one: i32, two: Empty }
+
+fn test_fn() {
+ let s = TestStruct { one: 0, two: Empty };
+}
+"#,
+ );
+ check_fix(
+ r#"
+enum Empty { Foo };
+
+struct TestStruct { one: i32, two: Empty }
+
+fn test_fn() {
+ let s = TestStruct {$0};
+}
+"#,
+ r#"
+enum Empty { Foo };
+
+struct TestStruct { one: i32, two: Empty }
+
+fn test_fn() {
+ let s = TestStruct { one: 0, two: Empty::Foo };
+}
+"#,
+ );
+
+ // make sure the assist doesn't fill non Unit variants
+ check_fix(
+ r#"
+struct Empty {};
+
+struct TestStruct { one: i32, two: Empty }
+
+fn test_fn() {
+ let s = TestStruct {$0};
+}
+"#,
+ r#"
+struct Empty {};
+
+struct TestStruct { one: i32, two: Empty }
+
+fn test_fn() {
+ let s = TestStruct { one: 0, two: todo!() };
+}
+"#,
+ );
+ check_fix(
+ r#"
+enum Empty { Foo {} };
+
+struct TestStruct { one: i32, two: Empty }
+
+fn test_fn() {
+ let s = TestStruct {$0};
+}
+"#,
+ r#"
+enum Empty { Foo {} };
+
+struct TestStruct { one: i32, two: Empty }
+
+fn test_fn() {
+ let s = TestStruct { one: 0, two: todo!() };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_self() {
+ check_fix(
+ r#"
+struct TestStruct { one: i32 }
+
+impl TestStruct {
+ fn test_fn() { let s = Self {$0}; }
+}
+"#,
+ r#"
+struct TestStruct { one: i32 }
+
+impl TestStruct {
+ fn test_fn() { let s = Self { one: 0 }; }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_enum() {
+ check_fix(
+ r#"
+enum Expr {
+ Bin { lhs: Box<Expr>, rhs: Box<Expr> }
+}
+
+impl Expr {
+ fn new_bin(lhs: Box<Expr>, rhs: Box<Expr>) -> Expr {
+ Expr::Bin {$0 }
+ }
+}
+"#,
+ r#"
+enum Expr {
+ Bin { lhs: Box<Expr>, rhs: Box<Expr> }
+}
+
+impl Expr {
+ fn new_bin(lhs: Box<Expr>, rhs: Box<Expr>) -> Expr {
+ Expr::Bin { lhs, rhs }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_partial() {
+ check_fix(
+ r#"
+struct TestStruct { one: i32, two: i64 }
+
+fn test_fn() {
+ let s = TestStruct{ two: 2$0 };
+}
+"#,
+ r"
+struct TestStruct { one: i32, two: i64 }
+
+fn test_fn() {
+ let s = TestStruct{ two: 2, one: 0 };
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_new() {
+ check_fix(
+ r#"
+struct TestWithNew(usize);
+impl TestWithNew {
+ pub fn new() -> Self {
+ Self(0)
+ }
+}
+struct TestStruct { one: i32, two: TestWithNew }
+
+fn test_fn() {
+ let s = TestStruct{ $0 };
+}
+"#,
+ r"
+struct TestWithNew(usize);
+impl TestWithNew {
+ pub fn new() -> Self {
+ Self(0)
+ }
+}
+struct TestStruct { one: i32, two: TestWithNew }
+
+fn test_fn() {
+ let s = TestStruct{ one: 0, two: TestWithNew::new() };
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_default() {
+ check_fix(
+ r#"
+//- minicore: default, option
+struct TestWithDefault(usize);
+impl Default for TestWithDefault {
+ pub fn default() -> Self {
+ Self(0)
+ }
+}
+struct TestStruct { one: i32, two: TestWithDefault }
+
+fn test_fn() {
+ let s = TestStruct{ $0 };
+}
+"#,
+ r"
+struct TestWithDefault(usize);
+impl Default for TestWithDefault {
+ pub fn default() -> Self {
+ Self(0)
+ }
+}
+struct TestStruct { one: i32, two: TestWithDefault }
+
+fn test_fn() {
+ let s = TestStruct{ one: 0, two: TestWithDefault::default() };
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_raw_ident() {
+ check_fix(
+ r#"
+struct TestStruct { r#type: u8 }
+
+fn test_fn() {
+ TestStruct { $0 };
+}
+"#,
+ r"
+struct TestStruct { r#type: u8 }
+
+fn test_fn() {
+ TestStruct { r#type: 0 };
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_no_diagnostic() {
+ check_diagnostics(
+ r#"
+struct TestStruct { one: i32, two: i64 }
+
+fn test_fn() {
+ let one = 1;
+ let s = TestStruct{ one, two: 2 };
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_no_diagnostic_on_spread() {
+ check_diagnostics(
+ r#"
+struct TestStruct { one: i32, two: i64 }
+
+fn test_fn() {
+ let one = 1;
+ let s = TestStruct{ ..a };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_blank_line() {
+ check_fix(
+ r#"
+struct S { a: (), b: () }
+
+fn f() {
+ S {
+ $0
+ };
+}
+"#,
+ r#"
+struct S { a: (), b: () }
+
+fn f() {
+ S {
+ a: todo!(),
+ b: todo!(),
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_shorthand() {
+ cov_mark::check!(field_shorthand);
+ check_fix(
+ r#"
+struct S { a: &'static str, b: i32 }
+
+fn f() {
+ let a = "hello";
+ let b = 1i32;
+ S {
+ $0
+ };
+}
+"#,
+ r#"
+struct S { a: &'static str, b: i32 }
+
+fn f() {
+ let a = "hello";
+ let b = 1i32;
+ S {
+ a,
+ b,
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_shorthand_ty_mismatch() {
+ check_fix(
+ r#"
+struct S { a: &'static str, b: i32 }
+
+fn f() {
+ let a = "hello";
+ let b = 1usize;
+ S {
+ $0
+ };
+}
+"#,
+ r#"
+struct S { a: &'static str, b: i32 }
+
+fn f() {
+ let a = "hello";
+ let b = 1usize;
+ S {
+ a,
+ b: 0,
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_fields_shorthand_unifies() {
+ check_fix(
+ r#"
+struct S<T> { a: &'static str, b: T }
+
+fn f() {
+ let a = "hello";
+ let b = 1i32;
+ S {
+ $0
+ };
+}
+"#,
+ r#"
+struct S<T> { a: &'static str, b: T }
+
+fn f() {
+ let a = "hello";
+ let b = 1i32;
+ S {
+ a,
+ b,
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_pat_fields() {
+ check_fix(
+ r#"
+struct S { a: &'static str, b: i32 }
+
+fn f() {
+ let S {
+ $0
+ };
+}
+"#,
+ r#"
+struct S { a: &'static str, b: i32 }
+
+fn f() {
+ let S {
+ a,
+ b,
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fill_struct_pat_fields_partial() {
+ check_fix(
+ r#"
+struct S { a: &'static str, b: i32 }
+
+fn f() {
+ let S {
+ a,$0
+ };
+}
+"#,
+ r#"
+struct S { a: &'static str, b: i32 }
+
+fn f() {
+ let S {
+ a,
+ b,
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn import_extern_crate_clash_with_inner_item() {
+ // This is more of a resolver test, but doesn't really work with the hir_def testsuite.
+
+ check_diagnostics(
+ r#"
+//- /lib.rs crate:lib deps:jwt
+mod permissions;
+
+use permissions::jwt;
+
+fn f() {
+ fn inner() {}
+ jwt::Claims {}; // should resolve to the local one with 0 fields, and not get a diagnostic
+}
+
+//- /permissions.rs
+pub mod jwt {
+ pub struct Claims {}
+}
+
+//- /jwt/lib.rs crate:jwt
+pub struct Claims {
+ field: u8,
+}
+ "#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
new file mode 100644
index 000000000..9e66fbfb7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
@@ -0,0 +1,1012 @@
+use hir::InFile;
+
+use crate::{Diagnostic, DiagnosticsContext};
+
+// Diagnostic: missing-match-arm
+//
+// This diagnostic is triggered if `match` block is missing one or more match arms.
+pub(crate) fn missing_match_arms(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::MissingMatchArms,
+) -> Diagnostic {
+ Diagnostic::new(
+ "missing-match-arm",
+ format!("missing match arm: {}", d.uncovered_patterns),
+ ctx.sema.diagnostics_display_range(InFile::new(d.file, d.match_expr.clone().into())).range,
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ fn check_diagnostics_no_bails(ra_fixture: &str) {
+ cov_mark::check_count!(validate_match_bailed_out, 0);
+ crate::tests::check_diagnostics(ra_fixture)
+ }
+
+ #[test]
+ fn empty_tuple() {
+ check_diagnostics_no_bails(
+ r#"
+fn main() {
+ match () { }
+ //^^ error: missing match arm: type `()` is non-empty
+ match (()) { }
+ //^^^^ error: missing match arm: type `()` is non-empty
+
+ match () { _ => (), }
+ match () { () => (), }
+ match (()) { (()) => (), }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn tuple_of_two_empty_tuple() {
+ check_diagnostics_no_bails(
+ r#"
+fn main() {
+ match ((), ()) { }
+ //^^^^^^^^ error: missing match arm: type `((), ())` is non-empty
+
+ match ((), ()) { ((), ()) => (), }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn boolean() {
+ check_diagnostics_no_bails(
+ r#"
+fn test_main() {
+ match false { }
+ //^^^^^ error: missing match arm: type `bool` is non-empty
+ match false { true => (), }
+ //^^^^^ error: missing match arm: `false` not covered
+ match (false, true) {}
+ //^^^^^^^^^^^^^ error: missing match arm: type `(bool, bool)` is non-empty
+ match (false, true) { (true, true) => (), }
+ //^^^^^^^^^^^^^ error: missing match arm: `(false, _)` not covered
+ match (false, true) {
+ //^^^^^^^^^^^^^ error: missing match arm: `(true, true)` not covered
+ (false, true) => (),
+ (false, false) => (),
+ (true, false) => (),
+ }
+ match (false, true) { (true, _x) => (), }
+ //^^^^^^^^^^^^^ error: missing match arm: `(false, _)` not covered
+
+ match false { true => (), false => (), }
+ match (false, true) {
+ (false, _) => (),
+ (true, false) => (),
+ (_, true) => (),
+ }
+ match (false, true) {
+ (true, true) => (),
+ (true, false) => (),
+ (false, true) => (),
+ (false, false) => (),
+ }
+ match (false, true) {
+ (true, _x) => (),
+ (false, true) => (),
+ (false, false) => (),
+ }
+ match (false, true, false) {
+ (false, ..) => (),
+ (true, ..) => (),
+ }
+ match (false, true, false) {
+ (.., false) => (),
+ (.., true) => (),
+ }
+ match (false, true, false) { (..) => (), }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn tuple_of_tuple_and_bools() {
+ check_diagnostics_no_bails(
+ r#"
+fn main() {
+ match (false, ((), false)) {}
+ //^^^^^^^^^^^^^^^^^^^^ error: missing match arm: type `(bool, ((), bool))` is non-empty
+ match (false, ((), false)) { (true, ((), true)) => (), }
+ //^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `(false, _)` not covered
+ match (false, ((), false)) { (true, _) => (), }
+ //^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `(false, _)` not covered
+
+ match (false, ((), false)) {
+ (true, ((), true)) => (),
+ (true, ((), false)) => (),
+ (false, ((), true)) => (),
+ (false, ((), false)) => (),
+ }
+ match (false, ((), false)) {
+ (true, ((), true)) => (),
+ (true, ((), false)) => (),
+ (false, _) => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn enums() {
+ check_diagnostics_no_bails(
+ r#"
+enum Either { A, B, }
+
+fn main() {
+ match Either::A { }
+ //^^^^^^^^^ error: missing match arm: `A` and `B` not covered
+ match Either::B { Either::A => (), }
+ //^^^^^^^^^ error: missing match arm: `B` not covered
+
+ match &Either::B {
+ //^^^^^^^^^^ error: missing match arm: `&B` not covered
+ Either::A => (),
+ }
+
+ match Either::B {
+ Either::A => (), Either::B => (),
+ }
+ match &Either::B {
+ Either::A => (), Either::B => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn enum_containing_bool() {
+ check_diagnostics_no_bails(
+ r#"
+enum Either { A(bool), B }
+
+fn main() {
+ match Either::B { }
+ //^^^^^^^^^ error: missing match arm: `A(_)` and `B` not covered
+ match Either::B {
+ //^^^^^^^^^ error: missing match arm: `A(false)` not covered
+ Either::A(true) => (), Either::B => ()
+ }
+
+ match Either::B {
+ Either::A(true) => (),
+ Either::A(false) => (),
+ Either::B => (),
+ }
+ match Either::B {
+ Either::B => (),
+ _ => (),
+ }
+ match Either::B {
+ Either::A(_) => (),
+ Either::B => (),
+ }
+
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn enum_different_sizes() {
+ check_diagnostics_no_bails(
+ r#"
+enum Either { A(bool), B(bool, bool) }
+
+fn main() {
+ match Either::A(false) {
+ //^^^^^^^^^^^^^^^^ error: missing match arm: `B(true, _)` not covered
+ Either::A(_) => (),
+ Either::B(false, _) => (),
+ }
+
+ match Either::A(false) {
+ Either::A(_) => (),
+ Either::B(true, _) => (),
+ Either::B(false, _) => (),
+ }
+ match Either::A(false) {
+ Either::A(true) | Either::A(false) => (),
+ Either::B(true, _) => (),
+ Either::B(false, _) => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn tuple_of_enum_no_diagnostic() {
+ check_diagnostics_no_bails(
+ r#"
+enum Either { A(bool), B(bool, bool) }
+enum Either2 { C, D }
+
+fn main() {
+ match (Either::A(false), Either2::C) {
+ (Either::A(true), _) | (Either::A(false), _) => (),
+ (Either::B(true, _), Either2::C) => (),
+ (Either::B(false, _), Either2::C) => (),
+ (Either::B(_, _), Either2::D) => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn or_pattern_no_diagnostic() {
+ check_diagnostics_no_bails(
+ r#"
+enum Either {A, B}
+
+fn main() {
+ match (Either::A, Either::B) {
+ (Either::A | Either::B, _) => (),
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn mismatched_types() {
+ cov_mark::check_count!(validate_match_bailed_out, 4);
+ // Match statements with arms that don't match the
+ // expression pattern do not fire this diagnostic.
+ check_diagnostics(
+ r#"
+enum Either { A, B }
+enum Either2 { C, D }
+
+fn main() {
+ match Either::A {
+ Either2::C => (),
+ Either2::D => (),
+ }
+ match (true, false) {
+ (true, false, true) => (),
+ (true) => (),
+ // ^^^^ error: expected (bool, bool), found bool
+ }
+ match (true, false) { (true,) => {} }
+ match (0) { () => () }
+ match Unresolved::Bar { Unresolved::Baz => () }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn mismatched_types_in_or_patterns() {
+ cov_mark::check_count!(validate_match_bailed_out, 2);
+ check_diagnostics(
+ r#"
+fn main() {
+ match false { true | () => {} }
+ match (false,) { (true | (),) => {} }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn malformed_match_arm_tuple_enum_missing_pattern() {
+ // We are testing to be sure we don't panic here when the match
+ // arm `Either::B` is missing its pattern.
+ check_diagnostics_no_bails(
+ r#"
+enum Either { A, B(u32) }
+
+fn main() {
+ match Either::A {
+ Either::A => (),
+ Either::B() => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn malformed_match_arm_extra_fields() {
+ cov_mark::check_count!(validate_match_bailed_out, 2);
+ check_diagnostics(
+ r#"
+enum A { B(isize, isize), C }
+fn main() {
+ match A::B(1, 2) {
+ A::B(_, _, _) => (),
+ }
+ match A::B(1, 2) {
+ A::C(_) => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn expr_diverges() {
+ cov_mark::check_count!(validate_match_bailed_out, 2);
+ check_diagnostics(
+ r#"
+enum Either { A, B }
+
+fn main() {
+ match loop {} {
+ Either::A => (),
+ Either::B => (),
+ }
+ match loop {} {
+ Either::A => (),
+ }
+ match loop { break Foo::A } {
+ //^^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `B` not covered
+ Either::A => (),
+ }
+ match loop { break Foo::A } {
+ Either::A => (),
+ Either::B => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn expr_partially_diverges() {
+ check_diagnostics_no_bails(
+ r#"
+enum Either<T> { A(T), B }
+
+fn foo() -> Either<!> { Either::B }
+fn main() -> u32 {
+ match foo() {
+ Either::A(val) => val,
+ Either::B => 0,
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn enum_record() {
+ check_diagnostics_no_bails(
+ r#"
+enum Either { A { foo: bool }, B }
+
+fn main() {
+ let a = Either::A { foo: true };
+ match a { }
+ //^ error: missing match arm: `A { .. }` and `B` not covered
+ match a { Either::A { foo: true } => () }
+ //^ error: missing match arm: `B` not covered
+ match a {
+ Either::A { } => (),
+ //^^^^^^^^^ 💡 error: missing structure fields:
+ // | - foo
+ Either::B => (),
+ }
+ match a {
+ //^ error: missing match arm: `B` not covered
+ Either::A { } => (),
+ } //^^^^^^^^^ 💡 error: missing structure fields:
+ // | - foo
+
+ match a {
+ Either::A { foo: true } => (),
+ Either::A { foo: false } => (),
+ Either::B => (),
+ }
+ match a {
+ Either::A { foo: _ } => (),
+ Either::B => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn enum_record_fields_out_of_order() {
+ check_diagnostics_no_bails(
+ r#"
+enum Either {
+ A { foo: bool, bar: () },
+ B,
+}
+
+fn main() {
+ let a = Either::A { foo: true, bar: () };
+ match a {
+ //^ error: missing match arm: `B` not covered
+ Either::A { bar: (), foo: false } => (),
+ Either::A { foo: true, bar: () } => (),
+ }
+
+ match a {
+ Either::A { bar: (), foo: false } => (),
+ Either::A { foo: true, bar: () } => (),
+ Either::B => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn enum_record_ellipsis() {
+ check_diagnostics_no_bails(
+ r#"
+enum Either {
+ A { foo: bool, bar: bool },
+ B,
+}
+
+fn main() {
+ let a = Either::B;
+ match a {
+ //^ error: missing match arm: `A { foo: false, .. }` not covered
+ Either::A { foo: true, .. } => (),
+ Either::B => (),
+ }
+ match a {
+ //^ error: missing match arm: `B` not covered
+ Either::A { .. } => (),
+ }
+
+ match a {
+ Either::A { foo: true, .. } => (),
+ Either::A { foo: false, .. } => (),
+ Either::B => (),
+ }
+
+ match a {
+ Either::A { .. } => (),
+ Either::B => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn enum_tuple_partial_ellipsis() {
+ check_diagnostics_no_bails(
+ r#"
+enum Either {
+ A(bool, bool, bool, bool),
+ B,
+}
+
+fn main() {
+ match Either::B {
+ //^^^^^^^^^ error: missing match arm: `A(false, _, _, true)` not covered
+ Either::A(true, .., true) => (),
+ Either::A(true, .., false) => (),
+ Either::A(false, .., false) => (),
+ Either::B => (),
+ }
+ match Either::B {
+ //^^^^^^^^^ error: missing match arm: `A(false, _, _, false)` not covered
+ Either::A(true, .., true) => (),
+ Either::A(true, .., false) => (),
+ Either::A(.., true) => (),
+ Either::B => (),
+ }
+
+ match Either::B {
+ Either::A(true, .., true) => (),
+ Either::A(true, .., false) => (),
+ Either::A(false, .., true) => (),
+ Either::A(false, .., false) => (),
+ Either::B => (),
+ }
+ match Either::B {
+ Either::A(true, .., true) => (),
+ Either::A(true, .., false) => (),
+ Either::A(.., true) => (),
+ Either::A(.., false) => (),
+ Either::B => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn never() {
+ check_diagnostics_no_bails(
+ r#"
+enum Never {}
+
+fn enum_(never: Never) {
+ match never {}
+}
+fn enum_ref(never: &Never) {
+ match never {}
+ //^^^^^ error: missing match arm: type `&Never` is non-empty
+}
+fn bang(never: !) {
+ match never {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unknown_type() {
+ cov_mark::check_count!(validate_match_bailed_out, 1);
+
+ check_diagnostics(
+ r#"
+enum Option<T> { Some(T), None }
+
+fn main() {
+ // `Never` is deliberately not defined so that it's an uninferred type.
+ match Option::<Never>::None {
+ None => (),
+ Some(never) => match never {},
+ }
+ match Option::<Never>::None {
+ //^^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `None` not covered
+ Option::Some(_never) => {},
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn tuple_of_bools_with_ellipsis_at_end_missing_arm() {
+ check_diagnostics_no_bails(
+ r#"
+fn main() {
+ match (false, true, false) {
+ //^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `(true, _, _)` not covered
+ (false, ..) => (),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn tuple_of_bools_with_ellipsis_at_beginning_missing_arm() {
+ check_diagnostics_no_bails(
+ r#"
+fn main() {
+ match (false, true, false) {
+ //^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `(_, _, true)` not covered
+ (.., false) => (),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn tuple_of_bools_with_ellipsis_in_middle_missing_arm() {
+ check_diagnostics_no_bails(
+ r#"
+fn main() {
+ match (false, true, false) {
+ //^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `(false, _, _)` not covered
+ (true, .., false) => (),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn record_struct() {
+ check_diagnostics_no_bails(
+ r#"struct Foo { a: bool }
+fn main(f: Foo) {
+ match f {}
+ //^ error: missing match arm: type `Foo` is non-empty
+ match f { Foo { a: true } => () }
+ //^ error: missing match arm: `Foo { a: false }` not covered
+ match &f { Foo { a: true } => () }
+ //^^ error: missing match arm: `&Foo { a: false }` not covered
+ match f { Foo { a: _ } => () }
+ match f {
+ Foo { a: true } => (),
+ Foo { a: false } => (),
+ }
+ match &f {
+ Foo { a: true } => (),
+ Foo { a: false } => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn tuple_struct() {
+ check_diagnostics_no_bails(
+ r#"struct Foo(bool);
+fn main(f: Foo) {
+ match f {}
+ //^ error: missing match arm: type `Foo` is non-empty
+ match f { Foo(true) => () }
+ //^ error: missing match arm: `Foo(false)` not covered
+ match f {
+ Foo(true) => (),
+ Foo(false) => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unit_struct() {
+ check_diagnostics_no_bails(
+ r#"struct Foo;
+fn main(f: Foo) {
+ match f {}
+ //^ error: missing match arm: type `Foo` is non-empty
+ match f { Foo => () }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn record_struct_ellipsis() {
+ check_diagnostics_no_bails(
+ r#"struct Foo { foo: bool, bar: bool }
+fn main(f: Foo) {
+ match f { Foo { foo: true, .. } => () }
+ //^ error: missing match arm: `Foo { foo: false, .. }` not covered
+ match f {
+ //^ error: missing match arm: `Foo { foo: false, bar: true }` not covered
+ Foo { foo: true, .. } => (),
+ Foo { bar: false, .. } => ()
+ }
+ match f { Foo { .. } => () }
+ match f {
+ Foo { foo: true, .. } => (),
+ Foo { foo: false, .. } => ()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn internal_or() {
+ check_diagnostics_no_bails(
+ r#"
+fn main() {
+ enum Either { A(bool), B }
+ match Either::B {
+ //^^^^^^^^^ error: missing match arm: `B` not covered
+ Either::A(true | false) => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_panic_at_unimplemented_subpattern_type() {
+ cov_mark::check_count!(validate_match_bailed_out, 1);
+
+ check_diagnostics(
+ r#"
+struct S { a: char}
+fn main(v: S) {
+ match v { S{ a } => {} }
+ match v { S{ a: _x } => {} }
+ match v { S{ a: 'a' } => {} }
+ match v { S{..} => {} }
+ match v { _ => {} }
+ match v { }
+ //^ error: missing match arm: type `S` is non-empty
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn binding() {
+ check_diagnostics_no_bails(
+ r#"
+fn main() {
+ match true {
+ _x @ true => {}
+ false => {}
+ }
+ match true { _x @ true => {} }
+ //^^^^ error: missing match arm: `false` not covered
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn binding_ref_has_correct_type() {
+ cov_mark::check_count!(validate_match_bailed_out, 1);
+
+ // Asserts `PatKind::Binding(ref _x): bool`, not &bool.
+ // If that's not true match checking will panic with "incompatible constructors"
+ // FIXME: make facilities to test this directly like `tests::check_infer(..)`
+ check_diagnostics(
+ r#"
+enum Foo { A }
+fn main() {
+ // FIXME: this should not bail out but current behavior is such as the old algorithm.
+ // ExprValidator::validate_match(..) checks types of top level patterns incorrecly.
+ match Foo::A {
+ ref _x => {}
+ Foo::A => {}
+ }
+ match (true,) {
+ (ref _x,) => {}
+ (true,) => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn enum_non_exhaustive() {
+ check_diagnostics_no_bails(
+ r#"
+//- /lib.rs crate:lib
+#[non_exhaustive]
+pub enum E { A, B }
+fn _local() {
+ match E::A { _ => {} }
+ match E::A {
+ E::A => {}
+ E::B => {}
+ }
+ match E::A {
+ E::A | E::B => {}
+ }
+}
+
+//- /main.rs crate:main deps:lib
+use lib::E;
+fn main() {
+ match E::A { _ => {} }
+ match E::A {
+ //^^^^ error: missing match arm: `_` not covered
+ E::A => {}
+ E::B => {}
+ }
+ match E::A {
+ //^^^^ error: missing match arm: `_` not covered
+ E::A | E::B => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn match_guard() {
+ check_diagnostics_no_bails(
+ r#"
+fn main() {
+ match true {
+ true if false => {}
+ true => {}
+ false => {}
+ }
+ match true {
+ //^^^^ error: missing match arm: `true` not covered
+ true if false => {}
+ false => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn pattern_type_is_of_substitution() {
+ check_diagnostics_no_bails(
+ r#"
+struct Foo<T>(T);
+struct Bar;
+fn main() {
+ match Foo(Bar) {
+ _ | Foo(Bar) => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn record_struct_no_such_field() {
+ cov_mark::check_count!(validate_match_bailed_out, 1);
+
+ check_diagnostics(
+ r#"
+struct Foo { }
+fn main(f: Foo) {
+ match f { Foo { bar } => () }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn match_ergonomics_issue_9095() {
+ check_diagnostics_no_bails(
+ r#"
+enum Foo<T> { A(T) }
+fn main() {
+ match &Foo::A(true) {
+ _ => {}
+ Foo::A(_) => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn normalize_field_ty() {
+ check_diagnostics_no_bails(
+ r"
+trait Trait { type Projection; }
+enum E {Foo, Bar}
+struct A;
+impl Trait for A { type Projection = E; }
+struct Next<T: Trait>(T::Projection);
+static __: () = {
+ let n: Next<A> = Next(E::Foo);
+ match n { Next(E::Foo) => {} }
+ // ^ error: missing match arm: `Next(Bar)` not covered
+ match n { Next(E::Foo | E::Bar) => {} }
+ match n { Next(E::Foo | _ ) => {} }
+ match n { Next(_ | E::Bar) => {} }
+ match n { _ | Next(E::Bar) => {} }
+ match &n { Next(E::Foo | E::Bar) => {} }
+ match &n { _ | Next(E::Bar) => {} }
+};",
+ );
+ }
+
+ #[test]
+ fn binding_mode_by_ref() {
+ check_diagnostics_no_bails(
+ r"
+enum E{ A, B }
+fn foo() {
+ match &E::A {
+ E::A => {}
+ x => {}
+ }
+}",
+ );
+ }
+
+ #[test]
+ fn macro_or_pat() {
+ check_diagnostics_no_bails(
+ r#"
+macro_rules! m {
+ () => {
+ Enum::Type1 | Enum::Type2
+ };
+}
+
+enum Enum {
+ Type1,
+ Type2,
+ Type3,
+}
+
+fn f(ty: Enum) {
+ match ty {
+ //^^ error: missing match arm: `Type3` not covered
+ m!() => (),
+ }
+
+ match ty {
+ m!() | Enum::Type3 => ()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unexpected_ty_fndef() {
+ cov_mark::check!(validate_match_bailed_out);
+ check_diagnostics(
+ r"
+enum Exp {
+ Tuple(()),
+}
+fn f() {
+ match __unknown {
+ Exp::Tuple => {}
+ }
+}",
+ );
+ }
+
+ mod false_negatives {
+ //! The implementation of match checking here is a work in progress. As we roll this out, we
+ //! prefer false negatives to false positives (ideally there would be no false positives). This
+ //! test module should document known false negatives. Eventually we will have a complete
+ //! implementation of match checking and this module will be empty.
+ //!
+ //! The reasons for documenting known false negatives:
+ //!
+ //! 1. It acts as a backlog of work that can be done to improve the behavior of the system.
+ //! 2. It ensures the code doesn't panic when handling these cases.
+ use super::*;
+
+ #[test]
+ fn integers() {
+ cov_mark::check_count!(validate_match_bailed_out, 1);
+
+ // We don't currently check integer exhaustiveness.
+ check_diagnostics(
+ r#"
+fn main() {
+ match 5 {
+ 10 => (),
+ 11..20 => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn reference_patterns_at_top_level() {
+ cov_mark::check_count!(validate_match_bailed_out, 1);
+
+ check_diagnostics(
+ r#"
+fn main() {
+ match &false {
+ &true => {}
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn reference_patterns_in_fields() {
+ cov_mark::check_count!(validate_match_bailed_out, 2);
+
+ check_diagnostics(
+ r#"
+fn main() {
+ match (&false,) {
+ (true,) => {}
+ }
+ match (&false,) {
+ (&true,) => {}
+ }
+}
+ "#,
+ );
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
new file mode 100644
index 000000000..7acd9228a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
@@ -0,0 +1,101 @@
+use crate::{Diagnostic, DiagnosticsContext};
+
+// Diagnostic: missing-unsafe
+//
+// This diagnostic is triggered if an operation marked as `unsafe` is used outside of an `unsafe` function or block.
+pub(crate) fn missing_unsafe(ctx: &DiagnosticsContext<'_>, d: &hir::MissingUnsafe) -> Diagnostic {
+ Diagnostic::new(
+ "missing-unsafe",
+ "this operation is unsafe and requires an unsafe function or block",
+ ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn missing_unsafe_diagnostic_with_raw_ptr() {
+ check_diagnostics(
+ r#"
+fn main() {
+ let x = &5 as *const usize;
+ unsafe { let y = *x; }
+ let z = *x;
+} //^^ error: this operation is unsafe and requires an unsafe function or block
+"#,
+ )
+ }
+
+ #[test]
+ fn missing_unsafe_diagnostic_with_unsafe_call() {
+ check_diagnostics(
+ r#"
+struct HasUnsafe;
+
+impl HasUnsafe {
+ unsafe fn unsafe_fn(&self) {
+ let x = &5 as *const usize;
+ let y = *x;
+ }
+}
+
+unsafe fn unsafe_fn() {
+ let x = &5 as *const usize;
+ let y = *x;
+}
+
+fn main() {
+ unsafe_fn();
+ //^^^^^^^^^^^ error: this operation is unsafe and requires an unsafe function or block
+ HasUnsafe.unsafe_fn();
+ //^^^^^^^^^^^^^^^^^^^^^ error: this operation is unsafe and requires an unsafe function or block
+ unsafe {
+ unsafe_fn();
+ HasUnsafe.unsafe_fn();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn missing_unsafe_diagnostic_with_static_mut() {
+ check_diagnostics(
+ r#"
+struct Ty {
+ a: u8,
+}
+
+static mut STATIC_MUT: Ty = Ty { a: 0 };
+
+fn main() {
+ let x = STATIC_MUT.a;
+ //^^^^^^^^^^ error: this operation is unsafe and requires an unsafe function or block
+ unsafe {
+ let x = STATIC_MUT.a;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_missing_unsafe_diagnostic_with_safe_intrinsic() {
+ check_diagnostics(
+ r#"
+extern "rust-intrinsic" {
+ pub fn bitreverse(x: u32) -> u32; // Safe intrinsic
+ pub fn floorf32(x: f32) -> f32; // Unsafe intrinsic
+}
+
+fn main() {
+ let _ = bitreverse(12);
+ let _ = floorf32(12.0);
+ //^^^^^^^^^^^^^^ error: this operation is unsafe and requires an unsafe function or block
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
new file mode 100644
index 000000000..e032c578f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
@@ -0,0 +1,283 @@
+use hir::{db::AstDatabase, HasSource, HirDisplay, Semantics};
+use ide_db::{base_db::FileId, source_change::SourceChange, RootDatabase};
+use syntax::{
+ ast::{self, edit::IndentLevel, make},
+ AstNode,
+};
+use text_edit::TextEdit;
+
+use crate::{fix, Assist, Diagnostic, DiagnosticsContext};
+
+// Diagnostic: no-such-field
+//
+// This diagnostic is triggered if created structure does not have field provided in record.
+pub(crate) fn no_such_field(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Diagnostic {
+ Diagnostic::new(
+ "no-such-field",
+ "no such field",
+ ctx.sema.diagnostics_display_range(d.field.clone().map(|it| it.into())).range,
+ )
+ .with_fixes(fixes(ctx, d))
+}
+
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Option<Vec<Assist>> {
+ let root = ctx.sema.db.parse_or_expand(d.field.file_id)?;
+ missing_record_expr_field_fixes(
+ &ctx.sema,
+ d.field.file_id.original_file(ctx.sema.db),
+ &d.field.value.to_node(&root),
+ )
+}
+
+fn missing_record_expr_field_fixes(
+ sema: &Semantics<'_, RootDatabase>,
+ usage_file_id: FileId,
+ record_expr_field: &ast::RecordExprField,
+) -> Option<Vec<Assist>> {
+ let record_lit = ast::RecordExpr::cast(record_expr_field.syntax().parent()?.parent()?)?;
+ let def_id = sema.resolve_variant(record_lit)?;
+ let module;
+ let def_file_id;
+ let record_fields = match def_id {
+ hir::VariantDef::Struct(s) => {
+ module = s.module(sema.db);
+ let source = s.source(sema.db)?;
+ def_file_id = source.file_id;
+ let fields = source.value.field_list()?;
+ record_field_list(fields)?
+ }
+ hir::VariantDef::Union(u) => {
+ module = u.module(sema.db);
+ let source = u.source(sema.db)?;
+ def_file_id = source.file_id;
+ source.value.record_field_list()?
+ }
+ hir::VariantDef::Variant(e) => {
+ module = e.module(sema.db);
+ let source = e.source(sema.db)?;
+ def_file_id = source.file_id;
+ let fields = source.value.field_list()?;
+ record_field_list(fields)?
+ }
+ };
+ let def_file_id = def_file_id.original_file(sema.db);
+
+ let new_field_type = sema.type_of_expr(&record_expr_field.expr()?)?.adjusted();
+ if new_field_type.is_unknown() {
+ return None;
+ }
+ let new_field = make::record_field(
+ None,
+ make::name(&record_expr_field.field_name()?.text()),
+ make::ty(&new_field_type.display_source_code(sema.db, module.into()).ok()?),
+ );
+
+ let last_field = record_fields.fields().last()?;
+ let last_field_syntax = last_field.syntax();
+ let indent = IndentLevel::from_node(last_field_syntax);
+
+ let mut new_field = new_field.to_string();
+ if usage_file_id != def_file_id {
+ new_field = format!("pub(crate) {}", new_field);
+ }
+ new_field = format!("\n{}{}", indent, new_field);
+
+ let needs_comma = !last_field_syntax.to_string().ends_with(',');
+ if needs_comma {
+ new_field = format!(",{}", new_field);
+ }
+
+ let source_change = SourceChange::from_text_edit(
+ def_file_id,
+ TextEdit::insert(last_field_syntax.text_range().end(), new_field),
+ );
+
+ return Some(vec![fix(
+ "create_field",
+ "Create field",
+ source_change,
+ record_expr_field.syntax().text_range(),
+ )]);
+
+ fn record_field_list(field_def_list: ast::FieldList) -> Option<ast::RecordFieldList> {
+ match field_def_list {
+ ast::FieldList::RecordFieldList(it) => Some(it),
+ ast::FieldList::TupleFieldList(_) => None,
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_diagnostics, check_fix};
+
+ #[test]
+ fn no_such_field_diagnostics() {
+ check_diagnostics(
+ r#"
+struct S { foo: i32, bar: () }
+impl S {
+ fn new() -> S {
+ S {
+ //^ 💡 error: missing structure fields:
+ //| - bar
+ foo: 92,
+ baz: 62,
+ //^^^^^^^ 💡 error: no such field
+ }
+ }
+}
+"#,
+ );
+ }
+ #[test]
+ fn no_such_field_with_feature_flag_diagnostics() {
+ check_diagnostics(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo
+struct MyStruct {
+ my_val: usize,
+ #[cfg(feature = "foo")]
+ bar: bool,
+}
+
+impl MyStruct {
+ #[cfg(feature = "foo")]
+ pub(crate) fn new(my_val: usize, bar: bool) -> Self {
+ Self { my_val, bar }
+ }
+ #[cfg(not(feature = "foo"))]
+ pub(crate) fn new(my_val: usize, _bar: bool) -> Self {
+ Self { my_val }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_such_field_enum_with_feature_flag_diagnostics() {
+ check_diagnostics(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo
+enum Foo {
+ #[cfg(not(feature = "foo"))]
+ Buz,
+ #[cfg(feature = "foo")]
+ Bar,
+ Baz
+}
+
+fn test_fn(f: Foo) {
+ match f {
+ Foo::Bar => {},
+ Foo::Baz => {},
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_such_field_with_feature_flag_diagnostics_on_struct_lit() {
+ check_diagnostics(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo
+struct S {
+ #[cfg(feature = "foo")]
+ foo: u32,
+ #[cfg(not(feature = "foo"))]
+ bar: u32,
+}
+
+impl S {
+ #[cfg(feature = "foo")]
+ fn new(foo: u32) -> Self {
+ Self { foo }
+ }
+ #[cfg(not(feature = "foo"))]
+ fn new(bar: u32) -> Self {
+ Self { bar }
+ }
+ fn new2(bar: u32) -> Self {
+ #[cfg(feature = "foo")]
+ { Self { foo: bar } }
+ #[cfg(not(feature = "foo"))]
+ { Self { bar } }
+ }
+ fn new2(val: u32) -> Self {
+ Self {
+ #[cfg(feature = "foo")]
+ foo: val,
+ #[cfg(not(feature = "foo"))]
+ bar: val,
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_such_field_with_type_macro() {
+ check_diagnostics(
+ r#"
+macro_rules! Type { () => { u32 }; }
+struct Foo { bar: Type![] }
+
+impl Foo {
+ fn new() -> Self {
+ Foo { bar: 0 }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_field_from_usage() {
+ check_fix(
+ r"
+fn main() {
+ Foo { bar: 3, baz$0: false};
+}
+struct Foo {
+ bar: i32
+}
+",
+ r"
+fn main() {
+ Foo { bar: 3, baz: false};
+}
+struct Foo {
+ bar: i32,
+ baz: bool
+}
+",
+ )
+ }
+
+ #[test]
+ fn test_add_field_in_other_file_from_usage() {
+ check_fix(
+ r#"
+//- /main.rs
+mod foo;
+
+fn main() {
+ foo::Foo { bar: 3, $0baz: false};
+}
+//- /foo.rs
+struct Foo {
+ bar: i32
+}
+"#,
+ r#"
+struct Foo {
+ bar: i32,
+ pub(crate) baz: bool
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
new file mode 100644
index 000000000..9826e1c70
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
@@ -0,0 +1,131 @@
+use hir::{db::AstDatabase, InFile};
+use ide_db::source_change::SourceChange;
+use syntax::{
+ ast::{self, HasArgList},
+ AstNode, TextRange,
+};
+use text_edit::TextEdit;
+
+use crate::{fix, Assist, Diagnostic, DiagnosticsContext, Severity};
+
+// Diagnostic: replace-filter-map-next-with-find-map
+//
+// This diagnostic is triggered when `.filter_map(..).next()` is used, rather than the more concise `.find_map(..)`.
+pub(crate) fn replace_filter_map_next_with_find_map(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::ReplaceFilterMapNextWithFindMap,
+) -> Diagnostic {
+ Diagnostic::new(
+ "replace-filter-map-next-with-find-map",
+ "replace filter_map(..).next() with find_map(..)",
+ ctx.sema.diagnostics_display_range(InFile::new(d.file, d.next_expr.clone().into())).range,
+ )
+ .severity(Severity::WeakWarning)
+ .with_fixes(fixes(ctx, d))
+}
+
+fn fixes(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::ReplaceFilterMapNextWithFindMap,
+) -> Option<Vec<Assist>> {
+ let root = ctx.sema.db.parse_or_expand(d.file)?;
+ let next_expr = d.next_expr.to_node(&root);
+ let next_call = ast::MethodCallExpr::cast(next_expr.syntax().clone())?;
+
+ let filter_map_call = ast::MethodCallExpr::cast(next_call.receiver()?.syntax().clone())?;
+ let filter_map_name_range = filter_map_call.name_ref()?.ident_token()?.text_range();
+ let filter_map_args = filter_map_call.arg_list()?;
+
+ let range_to_replace =
+ TextRange::new(filter_map_name_range.start(), next_expr.syntax().text_range().end());
+ let replacement = format!("find_map{}", filter_map_args.syntax().text());
+ let trigger_range = next_expr.syntax().text_range();
+
+ let edit = TextEdit::replace(range_to_replace, replacement);
+
+ let source_change = SourceChange::from_text_edit(d.file.original_file(ctx.sema.db), edit);
+
+ Some(vec![fix(
+ "replace_with_find_map",
+ "Replace filter_map(..).next() with find_map()",
+ source_change,
+ trigger_range,
+ )])
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_diagnostics, check_fix};
+
+ #[test]
+ fn replace_filter_map_next_with_find_map2() {
+ check_diagnostics(
+ r#"
+//- minicore: iterators
+fn foo() {
+ let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 weak: replace filter_map(..).next() with find_map(..)
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_filter_map_next_with_find_map_no_diagnostic_without_next() {
+ check_diagnostics(
+ r#"
+//- minicore: iterators
+fn foo() {
+ let m = core::iter::repeat(())
+ .filter_map(|()| Some(92))
+ .count();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_filter_map_next_with_find_map_no_diagnostic_with_intervening_methods() {
+ check_diagnostics(
+ r#"
+//- minicore: iterators
+fn foo() {
+ let m = core::iter::repeat(())
+ .filter_map(|()| Some(92))
+ .map(|x| x + 2)
+ .next();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_filter_map_next_with_find_map_no_diagnostic_if_not_in_chain() {
+ check_diagnostics(
+ r#"
+//- minicore: iterators
+fn foo() {
+ let m = core::iter::repeat(())
+ .filter_map(|()| Some(92));
+ let n = m.next();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_with_find_map() {
+ check_fix(
+ r#"
+//- minicore: iterators
+fn foo() {
+ let m = core::iter::repeat(()).$0filter_map(|()| Some(92)).next();
+}
+"#,
+ r#"
+fn foo() {
+ let m = core::iter::repeat(()).find_map(|()| Some(92));
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
new file mode 100644
index 000000000..6bf90e645
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -0,0 +1,573 @@
+use hir::{db::AstDatabase, HirDisplay, Type};
+use ide_db::{famous_defs::FamousDefs, source_change::SourceChange};
+use syntax::{
+ ast::{self, BlockExpr, ExprStmt},
+ AstNode,
+};
+use text_edit::TextEdit;
+
+use crate::{adjusted_display_range, fix, Assist, Diagnostic, DiagnosticsContext};
+
+// Diagnostic: type-mismatch
+//
+// This diagnostic is triggered when the type of an expression does not match
+// the expected type.
+pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Diagnostic {
+ let display_range = adjusted_display_range::<ast::BlockExpr>(
+ ctx,
+ d.expr.clone().map(|it| it.into()),
+ &|block| {
+ let r_curly_range = block.stmt_list()?.r_curly_token()?.text_range();
+ cov_mark::hit!(type_mismatch_on_block);
+ Some(r_curly_range)
+ },
+ );
+
+ let mut diag = Diagnostic::new(
+ "type-mismatch",
+ format!(
+ "expected {}, found {}",
+ d.expected.display(ctx.sema.db),
+ d.actual.display(ctx.sema.db)
+ ),
+ display_range,
+ )
+ .with_fixes(fixes(ctx, d));
+ if diag.fixes.is_none() {
+ diag.experimental = true;
+ }
+ diag
+}
+
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Option<Vec<Assist>> {
+ let mut fixes = Vec::new();
+
+ add_reference(ctx, d, &mut fixes);
+ add_missing_ok_or_some(ctx, d, &mut fixes);
+ remove_semicolon(ctx, d, &mut fixes);
+ str_ref_to_owned(ctx, d, &mut fixes);
+
+ if fixes.is_empty() {
+ None
+ } else {
+ Some(fixes)
+ }
+}
+
+fn add_reference(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::TypeMismatch,
+ acc: &mut Vec<Assist>,
+) -> Option<()> {
+ let root = ctx.sema.db.parse_or_expand(d.expr.file_id)?;
+ let expr_node = d.expr.value.to_node(&root);
+
+ let range = ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range;
+
+ let (_, mutability) = d.expected.as_reference()?;
+ let actual_with_ref = Type::reference(&d.actual, mutability);
+ if !actual_with_ref.could_coerce_to(ctx.sema.db, &d.expected) {
+ return None;
+ }
+
+ let ampersands = format!("&{}", mutability.as_keyword_for_ref());
+
+ let edit = TextEdit::insert(expr_node.syntax().text_range().start(), ampersands);
+ let source_change =
+ SourceChange::from_text_edit(d.expr.file_id.original_file(ctx.sema.db), edit);
+ acc.push(fix("add_reference_here", "Add reference here", source_change, range));
+ Some(())
+}
+
+fn add_missing_ok_or_some(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::TypeMismatch,
+ acc: &mut Vec<Assist>,
+) -> Option<()> {
+ let root = ctx.sema.db.parse_or_expand(d.expr.file_id)?;
+ let expr = d.expr.value.to_node(&root);
+ let expr_range = expr.syntax().text_range();
+ let scope = ctx.sema.scope(expr.syntax())?;
+
+ let expected_adt = d.expected.as_adt()?;
+ let expected_enum = expected_adt.as_enum()?;
+
+ let famous_defs = FamousDefs(&ctx.sema, scope.krate());
+ let core_result = famous_defs.core_result_Result();
+ let core_option = famous_defs.core_option_Option();
+
+ if Some(expected_enum) != core_result && Some(expected_enum) != core_option {
+ return None;
+ }
+
+ let variant_name = if Some(expected_enum) == core_result { "Ok" } else { "Some" };
+
+ let wrapped_actual_ty = expected_adt.ty_with_args(ctx.sema.db, &[d.actual.clone()]);
+
+ if !d.expected.could_unify_with(ctx.sema.db, &wrapped_actual_ty) {
+ return None;
+ }
+
+ let mut builder = TextEdit::builder();
+ builder.insert(expr.syntax().text_range().start(), format!("{}(", variant_name));
+ builder.insert(expr.syntax().text_range().end(), ")".to_string());
+ let source_change =
+ SourceChange::from_text_edit(d.expr.file_id.original_file(ctx.sema.db), builder.finish());
+ let name = format!("Wrap in {}", variant_name);
+ acc.push(fix("wrap_in_constructor", &name, source_change, expr_range));
+ Some(())
+}
+
+fn remove_semicolon(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::TypeMismatch,
+ acc: &mut Vec<Assist>,
+) -> Option<()> {
+ let root = ctx.sema.db.parse_or_expand(d.expr.file_id)?;
+ let expr = d.expr.value.to_node(&root);
+ if !d.actual.is_unit() {
+ return None;
+ }
+ let block = BlockExpr::cast(expr.syntax().clone())?;
+ let expr_before_semi =
+ block.statements().last().and_then(|s| ExprStmt::cast(s.syntax().clone()))?;
+ let type_before_semi = ctx.sema.type_of_expr(&expr_before_semi.expr()?)?.original();
+ if !type_before_semi.could_coerce_to(ctx.sema.db, &d.expected) {
+ return None;
+ }
+ let semicolon_range = expr_before_semi.semicolon_token()?.text_range();
+
+ let edit = TextEdit::delete(semicolon_range);
+ let source_change =
+ SourceChange::from_text_edit(d.expr.file_id.original_file(ctx.sema.db), edit);
+
+ acc.push(fix("remove_semicolon", "Remove this semicolon", source_change, semicolon_range));
+ Some(())
+}
+
+fn str_ref_to_owned(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::TypeMismatch,
+ acc: &mut Vec<Assist>,
+) -> Option<()> {
+ let expected = d.expected.display(ctx.sema.db);
+ let actual = d.actual.display(ctx.sema.db);
+
+ if expected.to_string() != "String" || actual.to_string() != "&str" {
+ return None;
+ }
+
+ let root = ctx.sema.db.parse_or_expand(d.expr.file_id)?;
+ let expr = d.expr.value.to_node(&root);
+ let expr_range = expr.syntax().text_range();
+
+ let to_owned = format!(".to_owned()");
+
+ let edit = TextEdit::insert(expr.syntax().text_range().end(), to_owned);
+ let source_change =
+ SourceChange::from_text_edit(d.expr.file_id.original_file(ctx.sema.db), edit);
+ acc.push(fix("str_ref_to_owned", "Add .to_owned() here", source_change, expr_range));
+
+ Some(())
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_diagnostics, check_fix, check_no_fix};
+
+ #[test]
+ fn missing_reference() {
+ check_diagnostics(
+ r#"
+fn main() {
+ test(123);
+ //^^^ 💡 error: expected &i32, found i32
+}
+fn test(arg: &i32) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_reference_to_int() {
+ check_fix(
+ r#"
+fn main() {
+ test(123$0);
+}
+fn test(arg: &i32) {}
+ "#,
+ r#"
+fn main() {
+ test(&123);
+}
+fn test(arg: &i32) {}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_add_mutable_reference_to_int() {
+ check_fix(
+ r#"
+fn main() {
+ test($0123);
+}
+fn test(arg: &mut i32) {}
+ "#,
+ r#"
+fn main() {
+ test(&mut 123);
+}
+fn test(arg: &mut i32) {}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_add_reference_to_array() {
+ check_fix(
+ r#"
+//- minicore: coerce_unsized
+fn main() {
+ test($0[1, 2, 3]);
+}
+fn test(arg: &[i32]) {}
+ "#,
+ r#"
+fn main() {
+ test(&[1, 2, 3]);
+}
+fn test(arg: &[i32]) {}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_add_reference_with_autoderef() {
+ check_fix(
+ r#"
+//- minicore: coerce_unsized, deref
+struct Foo;
+struct Bar;
+impl core::ops::Deref for Foo {
+ type Target = Bar;
+}
+
+fn main() {
+ test($0Foo);
+}
+fn test(arg: &Bar) {}
+ "#,
+ r#"
+struct Foo;
+struct Bar;
+impl core::ops::Deref for Foo {
+ type Target = Bar;
+}
+
+fn main() {
+ test(&Foo);
+}
+fn test(arg: &Bar) {}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_add_reference_to_method_call() {
+ check_fix(
+ r#"
+fn main() {
+ Test.call_by_ref($0123);
+}
+struct Test;
+impl Test {
+ fn call_by_ref(&self, arg: &i32) {}
+}
+ "#,
+ r#"
+fn main() {
+ Test.call_by_ref(&123);
+}
+struct Test;
+impl Test {
+ fn call_by_ref(&self, arg: &i32) {}
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_add_reference_to_let_stmt() {
+ check_fix(
+ r#"
+fn main() {
+ let test: &i32 = $0123;
+}
+ "#,
+ r#"
+fn main() {
+ let test: &i32 = &123;
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_add_mutable_reference_to_let_stmt() {
+ check_fix(
+ r#"
+fn main() {
+ let test: &mut i32 = $0123;
+}
+ "#,
+ r#"
+fn main() {
+ let test: &mut i32 = &mut 123;
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_wrap_return_type_option() {
+ check_fix(
+ r#"
+//- minicore: option, result
+fn div(x: i32, y: i32) -> Option<i32> {
+ if y == 0 {
+ return None;
+ }
+ x / y$0
+}
+"#,
+ r#"
+fn div(x: i32, y: i32) -> Option<i32> {
+ if y == 0 {
+ return None;
+ }
+ Some(x / y)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn const_generic_type_mismatch() {
+ check_diagnostics(
+ r#"
+ pub struct Rate<const N: u32>;
+ fn f<const N: u64>() -> Rate<N> { // FIXME: add some error
+ loop {}
+ }
+ fn run(t: Rate<5>) {
+ }
+ fn main() {
+ run(f()) // FIXME: remove this error
+ //^^^ error: expected Rate<5>, found Rate<_>
+ }
+"#,
+ );
+ }
+
+ #[test]
+ fn const_generic_unknown() {
+ check_diagnostics(
+ r#"
+ pub struct Rate<T, const NOM: u32, const DENOM: u32>(T);
+ fn run(t: Rate<u32, 1, 1>) {
+ }
+ fn main() {
+ run(Rate::<_, _, _>(5));
+ }
+"#,
+ );
+ }
+
+ #[test]
+ fn test_wrap_return_type_option_tails() {
+ check_fix(
+ r#"
+//- minicore: option, result
+fn div(x: i32, y: i32) -> Option<i32> {
+ if y == 0 {
+ Some(0)
+ } else if true {
+ 100$0
+ } else {
+ None
+ }
+}
+"#,
+ r#"
+fn div(x: i32, y: i32) -> Option<i32> {
+ if y == 0 {
+ Some(0)
+ } else if true {
+ Some(100)
+ } else {
+ None
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_wrap_return_type() {
+ check_fix(
+ r#"
+//- minicore: option, result
+fn div(x: i32, y: i32) -> Result<i32, ()> {
+ if y == 0 {
+ return Err(());
+ }
+ x / y$0
+}
+"#,
+ r#"
+fn div(x: i32, y: i32) -> Result<i32, ()> {
+ if y == 0 {
+ return Err(());
+ }
+ Ok(x / y)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_wrap_return_type_handles_generic_functions() {
+ check_fix(
+ r#"
+//- minicore: option, result
+fn div<T>(x: T) -> Result<T, i32> {
+ if x == 0 {
+ return Err(7);
+ }
+ $0x
+}
+"#,
+ r#"
+fn div<T>(x: T) -> Result<T, i32> {
+ if x == 0 {
+ return Err(7);
+ }
+ Ok(x)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_wrap_return_type_handles_type_aliases() {
+ check_fix(
+ r#"
+//- minicore: option, result
+type MyResult<T> = Result<T, ()>;
+
+fn div(x: i32, y: i32) -> MyResult<i32> {
+ if y == 0 {
+ return Err(());
+ }
+ x $0/ y
+}
+"#,
+ r#"
+type MyResult<T> = Result<T, ()>;
+
+fn div(x: i32, y: i32) -> MyResult<i32> {
+ if y == 0 {
+ return Err(());
+ }
+ Ok(x / y)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_in_const_and_static() {
+ check_fix(
+ r#"
+//- minicore: option, result
+static A: Option<()> = {($0)};
+ "#,
+ r#"
+static A: Option<()> = {Some(())};
+ "#,
+ );
+ check_fix(
+ r#"
+//- minicore: option, result
+const _: Option<()> = {($0)};
+ "#,
+ r#"
+const _: Option<()> = {Some(())};
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_wrap_return_type_not_applicable_when_expr_type_does_not_match_ok_type() {
+ check_no_fix(
+ r#"
+//- minicore: option, result
+fn foo() -> Result<(), i32> { 0$0 }
+"#,
+ );
+ }
+
+ #[test]
+ fn test_wrap_return_type_not_applicable_when_return_type_is_not_result_or_option() {
+ check_no_fix(
+ r#"
+//- minicore: option, result
+enum SomeOtherEnum { Ok(i32), Err(String) }
+
+fn foo() -> SomeOtherEnum { 0$0 }
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_semicolon() {
+ check_fix(r#"fn f() -> i32 { 92$0; }"#, r#"fn f() -> i32 { 92 }"#);
+ }
+
+ #[test]
+ fn str_ref_to_owned() {
+ check_fix(
+ r#"
+struct String;
+
+fn test() -> String {
+ "a"$0
+}
+ "#,
+ r#"
+struct String;
+
+fn test() -> String {
+ "a".to_owned()
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn type_mismatch_on_block() {
+ cov_mark::check!(type_mismatch_on_block);
+ check_diagnostics(
+ r#"
+fn f() -> i32 {
+ let x = 1;
+ let y = 2;
+ let _ = x + y;
+ }
+//^ error: expected i32, found ()
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs
new file mode 100644
index 000000000..e879de75c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs
@@ -0,0 +1,16 @@
+use crate::{Diagnostic, DiagnosticsContext, Severity};
+
+// Diagnostic: unimplemented-builtin-macro
+//
+// This diagnostic is shown for builtin macros which are not yet implemented by rust-analyzer
+pub(crate) fn unimplemented_builtin_macro(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnimplementedBuiltinMacro,
+) -> Diagnostic {
+ Diagnostic::new(
+ "unimplemented-builtin-macro",
+ "unimplemented built-in macro".to_string(),
+ ctx.sema.diagnostics_display_range(d.node.clone()).range,
+ )
+ .severity(Severity::WeakWarning)
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
new file mode 100644
index 000000000..c626932f1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
@@ -0,0 +1,336 @@
+//! Diagnostic emitted for files that aren't part of any crate.
+
+use hir::db::DefDatabase;
+use ide_db::{
+ base_db::{FileId, FileLoader, SourceDatabase, SourceDatabaseExt},
+ source_change::SourceChange,
+ RootDatabase,
+};
+use syntax::{
+ ast::{self, HasModuleItem, HasName},
+ AstNode, TextRange, TextSize,
+};
+use text_edit::TextEdit;
+
+use crate::{fix, Assist, Diagnostic, DiagnosticsContext, Severity};
+
+// Diagnostic: unlinked-file
+//
+// This diagnostic is shown for files that are not included in any crate, or files that are part of
+// crates rust-analyzer failed to discover. The file will not have IDE features available.
+pub(crate) fn unlinked_file(
+ ctx: &DiagnosticsContext<'_>,
+ acc: &mut Vec<Diagnostic>,
+ file_id: FileId,
+) {
+ // Limit diagnostic to the first few characters in the file. This matches how VS Code
+ // renders it with the full span, but on other editors, and is less invasive.
+ let range = ctx.sema.db.parse(file_id).syntax_node().text_range();
+ // FIXME: This is wrong if one of the first three characters is not ascii: `//Ы`.
+ let range = range.intersect(TextRange::up_to(TextSize::of("..."))).unwrap_or(range);
+
+ acc.push(
+ Diagnostic::new("unlinked-file", "file not included in module tree", range)
+ .severity(Severity::WeakWarning)
+ .with_fixes(fixes(ctx, file_id)),
+ );
+}
+
+fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> {
+ // If there's an existing module that could add `mod` or `pub mod` items to include the unlinked file,
+ // suggest that as a fix.
+
+ let source_root = ctx.sema.db.source_root(ctx.sema.db.file_source_root(file_id));
+ let our_path = source_root.path_for_file(&file_id)?;
+ let (mut module_name, _) = our_path.name_and_extension()?;
+
+ // Candidates to look for:
+ // - `mod.rs`, `main.rs` and `lib.rs` in the same folder
+ // - `$dir.rs` in the parent folder, where `$dir` is the directory containing `self.file_id`
+ let parent = our_path.parent()?;
+ let paths = {
+ let parent = if module_name == "mod" {
+ // for mod.rs we need to actually look up one higher
+ // and take the parent as our to be module name
+ let (name, _) = parent.name_and_extension()?;
+ module_name = name;
+ parent.parent()?
+ } else {
+ parent
+ };
+ let mut paths =
+ vec![parent.join("mod.rs")?, parent.join("lib.rs")?, parent.join("main.rs")?];
+
+ // `submod/bla.rs` -> `submod.rs`
+ let parent_mod = (|| {
+ let (name, _) = parent.name_and_extension()?;
+ parent.parent()?.join(&format!("{}.rs", name))
+ })();
+ paths.extend(parent_mod);
+ paths
+ };
+
+ for &parent_id in paths.iter().filter_map(|path| source_root.file_for_path(path)) {
+ for &krate in ctx.sema.db.relevant_crates(parent_id).iter() {
+ let crate_def_map = ctx.sema.db.crate_def_map(krate);
+ for (_, module) in crate_def_map.modules() {
+ if module.origin.is_inline() {
+ // We don't handle inline `mod parent {}`s, they use different paths.
+ continue;
+ }
+
+ if module.origin.file_id() == Some(parent_id) {
+ return make_fixes(ctx.sema.db, parent_id, module_name, file_id);
+ }
+ }
+ }
+ }
+
+ None
+}
+
+fn make_fixes(
+ db: &RootDatabase,
+ parent_file_id: FileId,
+ new_mod_name: &str,
+ added_file_id: FileId,
+) -> Option<Vec<Assist>> {
+ fn is_outline_mod(item: &ast::Item) -> bool {
+ matches!(item, ast::Item::Module(m) if m.item_list().is_none())
+ }
+
+ let mod_decl = format!("mod {};", new_mod_name);
+ let pub_mod_decl = format!("pub mod {};", new_mod_name);
+
+ let ast: ast::SourceFile = db.parse(parent_file_id).tree();
+
+ let mut mod_decl_builder = TextEdit::builder();
+ let mut pub_mod_decl_builder = TextEdit::builder();
+
+ // If there's an existing `mod m;` statement matching the new one, don't emit a fix (it's
+ // probably `#[cfg]`d out).
+ for item in ast.items() {
+ if let ast::Item::Module(m) = item {
+ if let Some(name) = m.name() {
+ if m.item_list().is_none() && name.to_string() == new_mod_name {
+ cov_mark::hit!(unlinked_file_skip_fix_when_mod_already_exists);
+ return None;
+ }
+ }
+ }
+ }
+
+ // If there are existing `mod m;` items, append after them (after the first group of them, rather).
+ match ast.items().skip_while(|item| !is_outline_mod(item)).take_while(is_outline_mod).last() {
+ Some(last) => {
+ cov_mark::hit!(unlinked_file_append_to_existing_mods);
+ let offset = last.syntax().text_range().end();
+ mod_decl_builder.insert(offset, format!("\n{}", mod_decl));
+ pub_mod_decl_builder.insert(offset, format!("\n{}", pub_mod_decl));
+ }
+ None => {
+ // Prepend before the first item in the file.
+ match ast.items().next() {
+ Some(item) => {
+ cov_mark::hit!(unlinked_file_prepend_before_first_item);
+ let offset = item.syntax().text_range().start();
+ mod_decl_builder.insert(offset, format!("{}\n\n", mod_decl));
+ pub_mod_decl_builder.insert(offset, format!("{}\n\n", pub_mod_decl));
+ }
+ None => {
+ // No items in the file, so just append at the end.
+ cov_mark::hit!(unlinked_file_empty_file);
+ let offset = ast.syntax().text_range().end();
+ mod_decl_builder.insert(offset, format!("{}\n", mod_decl));
+ pub_mod_decl_builder.insert(offset, format!("{}\n", pub_mod_decl));
+ }
+ }
+ }
+ }
+
+ let trigger_range = db.parse(added_file_id).tree().syntax().text_range();
+ Some(vec![
+ fix(
+ "add_mod_declaration",
+ &format!("Insert `{}`", mod_decl),
+ SourceChange::from_text_edit(parent_file_id, mod_decl_builder.finish()),
+ trigger_range,
+ ),
+ fix(
+ "add_pub_mod_declaration",
+ &format!("Insert `{}`", pub_mod_decl),
+ SourceChange::from_text_edit(parent_file_id, pub_mod_decl_builder.finish()),
+ trigger_range,
+ ),
+ ])
+}
+
+#[cfg(test)]
+mod tests {
+
+ use crate::tests::{check_diagnostics, check_fix, check_fixes, check_no_fix};
+
+ #[test]
+ fn unlinked_file_prepend_first_item() {
+ cov_mark::check!(unlinked_file_prepend_before_first_item);
+ // Only tests the first one for `pub mod` since the rest are the same
+ check_fixes(
+ r#"
+//- /main.rs
+fn f() {}
+//- /foo.rs
+$0
+"#,
+ vec![
+ r#"
+mod foo;
+
+fn f() {}
+"#,
+ r#"
+pub mod foo;
+
+fn f() {}
+"#,
+ ],
+ );
+ }
+
+ #[test]
+ fn unlinked_file_append_mod() {
+ cov_mark::check!(unlinked_file_append_to_existing_mods);
+ check_fix(
+ r#"
+//- /main.rs
+//! Comment on top
+
+mod preexisting;
+
+mod preexisting2;
+
+struct S;
+
+mod preexisting_bottom;)
+//- /foo.rs
+$0
+"#,
+ r#"
+//! Comment on top
+
+mod preexisting;
+
+mod preexisting2;
+mod foo;
+
+struct S;
+
+mod preexisting_bottom;)
+"#,
+ );
+ }
+
+ #[test]
+ fn unlinked_file_insert_in_empty_file() {
+ cov_mark::check!(unlinked_file_empty_file);
+ check_fix(
+ r#"
+//- /main.rs
+//- /foo.rs
+$0
+"#,
+ r#"
+mod foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn unlinked_file_insert_in_empty_file_mod_file() {
+ check_fix(
+ r#"
+//- /main.rs
+//- /foo/mod.rs
+$0
+"#,
+ r#"
+mod foo;
+"#,
+ );
+ check_fix(
+ r#"
+//- /main.rs
+mod bar;
+//- /bar.rs
+// bar module
+//- /bar/foo/mod.rs
+$0
+"#,
+ r#"
+// bar module
+mod foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn unlinked_file_old_style_modrs() {
+ check_fix(
+ r#"
+//- /main.rs
+mod submod;
+//- /submod/mod.rs
+// in mod.rs
+//- /submod/foo.rs
+$0
+"#,
+ r#"
+// in mod.rs
+mod foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn unlinked_file_new_style_mod() {
+ check_fix(
+ r#"
+//- /main.rs
+mod submod;
+//- /submod.rs
+//- /submod/foo.rs
+$0
+"#,
+ r#"
+mod foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn unlinked_file_with_cfg_off() {
+ cov_mark::check!(unlinked_file_skip_fix_when_mod_already_exists);
+ check_no_fix(
+ r#"
+//- /main.rs
+#[cfg(never)]
+mod foo;
+
+//- /foo.rs
+$0
+"#,
+ );
+ }
+
+ #[test]
+ fn unlinked_file_with_cfg_on() {
+ check_diagnostics(
+ r#"
+//- /main.rs
+#[cfg(not(never))]
+mod foo;
+
+//- /foo.rs
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs
new file mode 100644
index 000000000..74e4a69c6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs
@@ -0,0 +1,49 @@
+use crate::{Diagnostic, DiagnosticsContext};
+
+// Diagnostic: unresolved-extern-crate
+//
+// This diagnostic is triggered if rust-analyzer is unable to discover referred extern crate.
+pub(crate) fn unresolved_extern_crate(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnresolvedExternCrate,
+) -> Diagnostic {
+ Diagnostic::new(
+ "unresolved-extern-crate",
+ "unresolved extern crate",
+ ctx.sema.diagnostics_display_range(d.decl.clone().map(|it| it.into())).range,
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn unresolved_extern_crate() {
+ check_diagnostics(
+ r#"
+//- /main.rs crate:main deps:core
+extern crate core;
+ extern crate doesnotexist;
+//^^^^^^^^^^^^^^^^^^^^^^^^^^ error: unresolved extern crate
+//- /lib.rs crate:core
+"#,
+ );
+ }
+
+ #[test]
+ fn extern_crate_self_as() {
+ cov_mark::check!(extern_crate_self_as);
+ check_diagnostics(
+ r#"
+//- /lib.rs
+ extern crate doesnotexist;
+//^^^^^^^^^^^^^^^^^^^^^^^^^^ error: unresolved extern crate
+// Should not error.
+extern crate self as foo;
+struct Foo;
+use foo::Foo as Bar;
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs
new file mode 100644
index 000000000..e52a88459
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs
@@ -0,0 +1,90 @@
+use crate::{Diagnostic, DiagnosticsContext};
+
+// Diagnostic: unresolved-import
+//
+// This diagnostic is triggered if rust-analyzer is unable to resolve a path in
+// a `use` declaration.
+pub(crate) fn unresolved_import(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnresolvedImport,
+) -> Diagnostic {
+ Diagnostic::new(
+ "unresolved-import",
+ "unresolved import",
+ ctx.sema.diagnostics_display_range(d.decl.clone().map(|it| it.into())).range,
+ )
+ // This currently results in false positives in the following cases:
+ // - `cfg_if!`-generated code in libstd (we don't load the sysroot correctly)
+ // - `core::arch` (we don't handle `#[path = "../<path>"]` correctly)
+ // - proc macros and/or proc macro generated code
+ .experimental()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn unresolved_import() {
+ check_diagnostics(
+ r#"
+use does_exist;
+use does_not_exist;
+ //^^^^^^^^^^^^^^ error: unresolved import
+
+mod does_exist {}
+"#,
+ );
+ }
+
+ #[test]
+ fn unresolved_import_in_use_tree() {
+ // Only the relevant part of a nested `use` item should be highlighted.
+ check_diagnostics(
+ r#"
+use does_exist::{Exists, DoesntExist};
+ //^^^^^^^^^^^ error: unresolved import
+
+use {does_not_exist::*, does_exist};
+ //^^^^^^^^^^^^^^^^^ error: unresolved import
+
+use does_not_exist::{
+ a,
+ //^ error: unresolved import
+ b,
+ //^ error: unresolved import
+ c,
+ //^ error: unresolved import
+};
+
+mod does_exist {
+ pub struct Exists;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn dedup_unresolved_import_from_unresolved_crate() {
+ check_diagnostics(
+ r#"
+//- /main.rs crate:main
+mod a {
+ extern crate doesnotexist;
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^ error: unresolved extern crate
+
+ // Should not error, since we already errored for the missing crate.
+ use doesnotexist::{self, bla, *};
+
+ use crate::doesnotexist;
+ //^^^^^^^^^^^^^^^^^^^ error: unresolved import
+}
+
+mod m {
+ use super::doesnotexist;
+ //^^^^^^^^^^^^^^^^^^^ error: unresolved import
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
new file mode 100644
index 000000000..4b4312475
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
@@ -0,0 +1,76 @@
+use crate::{Diagnostic, DiagnosticsContext};
+
+// Diagnostic: unresolved-macro-call
+//
+// This diagnostic is triggered if rust-analyzer is unable to resolve the path
+// to a macro in a macro invocation.
+pub(crate) fn unresolved_macro_call(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnresolvedMacroCall,
+) -> Diagnostic {
+ // Use more accurate position if available.
+ let display_range = d
+ .precise_location
+ .unwrap_or_else(|| ctx.sema.diagnostics_display_range(d.macro_call.clone()).range);
+
+ let bang = if d.is_bang { "!" } else { "" };
+ Diagnostic::new(
+ "unresolved-macro-call",
+ format!("unresolved macro `{}{}`", d.path, bang),
+ display_range,
+ )
+ .experimental()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn unresolved_macro_diag() {
+ check_diagnostics(
+ r#"
+fn f() {
+ m!();
+} //^ error: unresolved macro `m!`
+
+"#,
+ );
+ }
+
+ #[test]
+ fn test_unresolved_macro_range() {
+ check_diagnostics(
+ r#"
+foo::bar!(92);
+ //^^^ error: unresolved macro `foo::bar!`
+"#,
+ );
+ }
+
+ #[test]
+ fn unresolved_legacy_scope_macro() {
+ check_diagnostics(
+ r#"
+macro_rules! m { () => {} }
+
+m!(); m2!();
+ //^^ error: unresolved macro `m2!`
+"#,
+ );
+ }
+
+ #[test]
+ fn unresolved_module_scope_macro() {
+ check_diagnostics(
+ r#"
+mod mac {
+#[macro_export]
+macro_rules! m { () => {} } }
+
+self::m!(); self::m2!();
+ //^^ error: unresolved macro `self::m2!`
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
new file mode 100644
index 000000000..b8f2a9e94
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
@@ -0,0 +1,156 @@
+use hir::db::AstDatabase;
+use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSystemEdit};
+use itertools::Itertools;
+use syntax::AstNode;
+
+use crate::{fix, Diagnostic, DiagnosticsContext};
+
+// Diagnostic: unresolved-module
+//
+// This diagnostic is triggered if rust-analyzer is unable to discover referred module.
+pub(crate) fn unresolved_module(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnresolvedModule,
+) -> Diagnostic {
+ Diagnostic::new(
+ "unresolved-module",
+ match &*d.candidates {
+ [] => "unresolved module".to_string(),
+ [candidate] => format!("unresolved module, can't find module file: {}", candidate),
+ [candidates @ .., last] => {
+ format!(
+ "unresolved module, can't find module file: {}, or {}",
+ candidates.iter().format(", "),
+ last
+ )
+ }
+ },
+ ctx.sema.diagnostics_display_range(d.decl.clone().map(|it| it.into())).range,
+ )
+ .with_fixes(fixes(ctx, d))
+}
+
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedModule) -> Option<Vec<Assist>> {
+ let root = ctx.sema.db.parse_or_expand(d.decl.file_id)?;
+ let unresolved_module = d.decl.value.to_node(&root);
+ Some(
+ d.candidates
+ .iter()
+ .map(|candidate| {
+ fix(
+ "create_module",
+ &format!("Create module at `{candidate}`"),
+ FileSystemEdit::CreateFile {
+ dst: AnchoredPathBuf {
+ anchor: d.decl.file_id.original_file(ctx.sema.db),
+ path: candidate.clone(),
+ },
+ initial_contents: "".to_string(),
+ }
+ .into(),
+ unresolved_module.syntax().text_range(),
+ )
+ })
+ .collect(),
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::expect;
+
+ use crate::tests::{check_diagnostics, check_expect};
+
+ #[test]
+ fn unresolved_module() {
+ check_diagnostics(
+ r#"
+//- /lib.rs
+mod foo;
+ mod bar;
+//^^^^^^^^ 💡 error: unresolved module, can't find module file: bar.rs, or bar/mod.rs
+mod baz {}
+//- /foo.rs
+"#,
+ );
+ }
+
+ #[test]
+ fn test_unresolved_module_diagnostic() {
+ check_expect(
+ r#"mod foo;"#,
+ expect![[r#"
+ [
+ Diagnostic {
+ code: DiagnosticCode(
+ "unresolved-module",
+ ),
+ message: "unresolved module, can't find module file: foo.rs, or foo/mod.rs",
+ range: 0..8,
+ severity: Error,
+ unused: false,
+ experimental: false,
+ fixes: Some(
+ [
+ Assist {
+ id: AssistId(
+ "create_module",
+ QuickFix,
+ ),
+ label: "Create module at `foo.rs`",
+ group: None,
+ target: 0..8,
+ source_change: Some(
+ SourceChange {
+ source_file_edits: {},
+ file_system_edits: [
+ CreateFile {
+ dst: AnchoredPathBuf {
+ anchor: FileId(
+ 0,
+ ),
+ path: "foo.rs",
+ },
+ initial_contents: "",
+ },
+ ],
+ is_snippet: false,
+ },
+ ),
+ trigger_signature_help: false,
+ },
+ Assist {
+ id: AssistId(
+ "create_module",
+ QuickFix,
+ ),
+ label: "Create module at `foo/mod.rs`",
+ group: None,
+ target: 0..8,
+ source_change: Some(
+ SourceChange {
+ source_file_edits: {},
+ file_system_edits: [
+ CreateFile {
+ dst: AnchoredPathBuf {
+ anchor: FileId(
+ 0,
+ ),
+ path: "foo/mod.rs",
+ },
+ initial_contents: "",
+ },
+ ],
+ is_snippet: false,
+ },
+ ),
+ trigger_signature_help: false,
+ },
+ ],
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs
new file mode 100644
index 000000000..760f51f90
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs
@@ -0,0 +1,62 @@
+use hir::db::DefDatabase;
+use syntax::NodeOrToken;
+
+use crate::{Diagnostic, DiagnosticsContext, Severity};
+
+// Diagnostic: unresolved-proc-macro
+//
+// This diagnostic is shown when a procedural macro can not be found. This usually means that
+// procedural macro support is simply disabled (and hence is only a weak hint instead of an error),
+// but can also indicate project setup problems.
+//
+// If you are seeing a lot of "proc macro not expanded" warnings, you can add this option to the
+// `rust-analyzer.diagnostics.disabled` list to prevent them from showing. Alternatively you can
+// enable support for procedural macros (see `rust-analyzer.procMacro.attributes.enable`).
+pub(crate) fn unresolved_proc_macro(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnresolvedProcMacro,
+ proc_macros_enabled: bool,
+ proc_attr_macros_enabled: bool,
+) -> Diagnostic {
+ // Use more accurate position if available.
+ let display_range = (|| {
+ let precise_location = d.precise_location?;
+ let root = ctx.sema.parse_or_expand(d.node.file_id)?;
+ match root.covering_element(precise_location) {
+ NodeOrToken::Node(it) => Some(ctx.sema.original_range(&it)),
+ NodeOrToken::Token(it) => d.node.with_value(it).original_file_range_opt(ctx.sema.db),
+ }
+ })()
+ .unwrap_or_else(|| ctx.sema.diagnostics_display_range(d.node.clone()))
+ .range;
+
+ let config_enabled = match d.kind {
+ hir::MacroKind::Attr => proc_macros_enabled && proc_attr_macros_enabled,
+ _ => proc_macros_enabled,
+ };
+
+ let message = match &d.macro_name {
+ Some(name) => format!("proc macro `{}` not expanded", name),
+ None => "proc macro not expanded".to_string(),
+ };
+ let severity = if config_enabled { Severity::Error } else { Severity::WeakWarning };
+ let def_map = ctx.sema.db.crate_def_map(d.krate);
+ let message = format!(
+ "{message}: {}",
+ if config_enabled {
+ match def_map.proc_macro_loading_error() {
+ Some(e) => e,
+ None => "proc macro not found in the built dylib",
+ }
+ } else {
+ match d.kind {
+ hir::MacroKind::Attr if proc_macros_enabled => {
+ "attribute macro expansion is disabled"
+ }
+ _ => "proc-macro expansion is disabled",
+ }
+ },
+ );
+
+ Diagnostic::new("unresolved-proc-macro", message, display_range).severity(severity)
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs
new file mode 100644
index 000000000..8b9330e04
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs
@@ -0,0 +1,148 @@
+use ide_db::{base_db::FileId, source_change::SourceChange};
+use itertools::Itertools;
+use syntax::{ast, AstNode, SyntaxNode, TextRange};
+use text_edit::TextEdit;
+
+use crate::{fix, Diagnostic, Severity};
+
+// Diagnostic: unnecessary-braces
+//
+// Diagnostic for unnecessary braces in `use` items.
+pub(crate) fn useless_braces(
+ acc: &mut Vec<Diagnostic>,
+ file_id: FileId,
+ node: &SyntaxNode,
+) -> Option<()> {
+ let use_tree_list = ast::UseTreeList::cast(node.clone())?;
+ if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() {
+ // If there is a comment inside the bracketed `use`,
+ // assume it is a commented out module path and don't show diagnostic.
+ if use_tree_list.has_inner_comment() {
+ return Some(());
+ }
+
+ let use_range = use_tree_list.syntax().text_range();
+ let edit = remove_braces(&single_use_tree).unwrap_or_else(|| {
+ let to_replace = single_use_tree.syntax().text().to_string();
+ let mut edit_builder = TextEdit::builder();
+ edit_builder.delete(use_range);
+ edit_builder.insert(use_range.start(), to_replace);
+ edit_builder.finish()
+ });
+
+ acc.push(
+ Diagnostic::new(
+ "unnecessary-braces",
+ "Unnecessary braces in use statement".to_string(),
+ use_range,
+ )
+ .severity(Severity::WeakWarning)
+ .with_fixes(Some(vec![fix(
+ "remove_braces",
+ "Remove unnecessary braces",
+ SourceChange::from_text_edit(file_id, edit),
+ use_range,
+ )])),
+ );
+ }
+
+ Some(())
+}
+
+fn remove_braces(single_use_tree: &ast::UseTree) -> Option<TextEdit> {
+ let use_tree_list_node = single_use_tree.syntax().parent()?;
+ if single_use_tree.path()?.segment()?.self_token().is_some() {
+ let start = use_tree_list_node.prev_sibling_or_token()?.text_range().start();
+ let end = use_tree_list_node.text_range().end();
+ return Some(TextEdit::delete(TextRange::new(start, end)));
+ }
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_diagnostics, check_fix};
+
+ #[test]
+ fn test_check_unnecessary_braces_in_use_statement() {
+ check_diagnostics(
+ r#"
+use a;
+use a::{c, d::e};
+
+mod a {
+ mod c {}
+ mod d {
+ mod e {}
+ }
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+use a;
+use a::{
+ c,
+ // d::e
+};
+
+mod a {
+ mod c {}
+ mod d {
+ mod e {}
+ }
+}
+"#,
+ );
+ check_fix(
+ r#"
+mod b {}
+use {$0b};
+"#,
+ r#"
+mod b {}
+use b;
+"#,
+ );
+ check_fix(
+ r#"
+mod b {}
+use {b$0};
+"#,
+ r#"
+mod b {}
+use b;
+"#,
+ );
+ check_fix(
+ r#"
+mod a { mod c {} }
+use a::{c$0};
+"#,
+ r#"
+mod a { mod c {} }
+use a::c;
+"#,
+ );
+ check_fix(
+ r#"
+mod a {}
+use a::{self$0};
+"#,
+ r#"
+mod a {}
+use a;
+"#,
+ );
+ check_fix(
+ r#"
+mod a { mod c {} mod d { mod e {} } }
+use a::{c, d::{e$0}};
+"#,
+ r#"
+mod a { mod c {} mod d { mod e {} } }
+use a::{c, d::e};
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
new file mode 100644
index 000000000..41abaa836
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
@@ -0,0 +1,260 @@
+//! Diagnostics rendering and fixits.
+//!
+//! Most of the diagnostics originate from the dark depth of the compiler, and
+//! are originally expressed in term of IR. When we emit the diagnostic, we are
+//! usually not in the position to decide how to best "render" it in terms of
+//! user-authored source code. We are especially not in the position to offer
+//! fixits, as the compiler completely lacks the infrastructure to edit the
+//! source code.
+//!
+//! Instead, we "bubble up" raw, structured diagnostics until the `hir` crate,
+//! where we "cook" them so that each diagnostic is formulated in terms of `hir`
+//! types. Well, at least that's the aspiration, the "cooking" is somewhat
+//! ad-hoc at the moment. Anyways, we get a bunch of ide-friendly diagnostic
+//! structs from hir, and we want to render them to unified serializable
+//! representation (span, level, message) here. If we can, we also provide
+//! fixits. By the way, that's why we want to keep diagnostics structured
+//! internally -- so that we have all the info to make fixes.
+//!
+//! We have one "handler" module per diagnostic code. Such a module contains
+//! rendering, optional fixes and tests. It's OK if some low-level compiler
+//! functionality ends up being tested via a diagnostic.
+//!
+//! There are also a couple of ad-hoc diagnostics implemented directly here, we
+//! don't yet have a great pattern for how to do them properly.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod handlers {
+ pub(crate) mod break_outside_of_loop;
+ pub(crate) mod inactive_code;
+ pub(crate) mod incorrect_case;
+ pub(crate) mod invalid_derive_target;
+ pub(crate) mod macro_error;
+ pub(crate) mod malformed_derive;
+ pub(crate) mod mismatched_arg_count;
+ pub(crate) mod missing_fields;
+ pub(crate) mod missing_match_arms;
+ pub(crate) mod missing_unsafe;
+ pub(crate) mod no_such_field;
+ pub(crate) mod replace_filter_map_next_with_find_map;
+ pub(crate) mod type_mismatch;
+ pub(crate) mod unimplemented_builtin_macro;
+ pub(crate) mod unresolved_extern_crate;
+ pub(crate) mod unresolved_import;
+ pub(crate) mod unresolved_macro_call;
+ pub(crate) mod unresolved_module;
+ pub(crate) mod unresolved_proc_macro;
+
+ // The handlers below are unusual, the implement the diagnostics as well.
+ pub(crate) mod field_shorthand;
+ pub(crate) mod useless_braces;
+ pub(crate) mod unlinked_file;
+}
+
+#[cfg(test)]
+mod tests;
+
+use hir::{diagnostics::AnyDiagnostic, InFile, Semantics};
+use ide_db::{
+ assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
+ base_db::{FileId, FileRange, SourceDatabase},
+ label::Label,
+ source_change::SourceChange,
+ FxHashSet, RootDatabase,
+};
+use syntax::{algo::find_node_at_range, ast::AstNode, SyntaxNodePtr, TextRange};
+
+#[derive(Copy, Clone, Debug, PartialEq)]
+pub struct DiagnosticCode(pub &'static str);
+
+impl DiagnosticCode {
+ pub fn as_str(&self) -> &str {
+ self.0
+ }
+}
+
+#[derive(Debug)]
+pub struct Diagnostic {
+ pub code: DiagnosticCode,
+ pub message: String,
+ pub range: TextRange,
+ pub severity: Severity,
+ pub unused: bool,
+ pub experimental: bool,
+ pub fixes: Option<Vec<Assist>>,
+}
+
+impl Diagnostic {
+ fn new(code: &'static str, message: impl Into<String>, range: TextRange) -> Diagnostic {
+ let message = message.into();
+ Diagnostic {
+ code: DiagnosticCode(code),
+ message,
+ range,
+ severity: Severity::Error,
+ unused: false,
+ experimental: false,
+ fixes: None,
+ }
+ }
+
+ fn experimental(mut self) -> Diagnostic {
+ self.experimental = true;
+ self
+ }
+
+ fn severity(mut self, severity: Severity) -> Diagnostic {
+ self.severity = severity;
+ self
+ }
+
+ fn with_fixes(mut self, fixes: Option<Vec<Assist>>) -> Diagnostic {
+ self.fixes = fixes;
+ self
+ }
+
+ fn with_unused(mut self, unused: bool) -> Diagnostic {
+ self.unused = unused;
+ self
+ }
+}
+
+#[derive(Debug, Copy, Clone)]
+pub enum Severity {
+ Error,
+ // We don't actually emit this one yet, but we should at some point.
+ // Warning,
+ WeakWarning,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum ExprFillDefaultMode {
+ Todo,
+ Default,
+}
+impl Default for ExprFillDefaultMode {
+ fn default() -> Self {
+ Self::Todo
+ }
+}
+
+#[derive(Default, Debug, Clone)]
+pub struct DiagnosticsConfig {
+ pub proc_macros_enabled: bool,
+ pub proc_attr_macros_enabled: bool,
+ pub disable_experimental: bool,
+ pub disabled: FxHashSet<String>,
+ pub expr_fill_default: ExprFillDefaultMode,
+}
+
+struct DiagnosticsContext<'a> {
+ config: &'a DiagnosticsConfig,
+ sema: Semantics<'a, RootDatabase>,
+ resolve: &'a AssistResolveStrategy,
+}
+
+pub fn diagnostics(
+ db: &RootDatabase,
+ config: &DiagnosticsConfig,
+ resolve: &AssistResolveStrategy,
+ file_id: FileId,
+) -> Vec<Diagnostic> {
+ let _p = profile::span("diagnostics");
+ let sema = Semantics::new(db);
+ let parse = db.parse(file_id);
+ let mut res = Vec::new();
+
+ // [#34344] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily.
+ res.extend(
+ parse.errors().iter().take(128).map(|err| {
+ Diagnostic::new("syntax-error", format!("Syntax Error: {}", err), err.range())
+ }),
+ );
+
+ for node in parse.tree().syntax().descendants() {
+ handlers::useless_braces::useless_braces(&mut res, file_id, &node);
+ handlers::field_shorthand::field_shorthand(&mut res, file_id, &node);
+ }
+
+ let module = sema.to_module_def(file_id);
+
+ let ctx = DiagnosticsContext { config, sema, resolve };
+ if module.is_none() {
+ handlers::unlinked_file::unlinked_file(&ctx, &mut res, file_id);
+ }
+
+ let mut diags = Vec::new();
+ if let Some(m) = module {
+ m.diagnostics(db, &mut diags)
+ }
+
+ for diag in diags {
+ #[rustfmt::skip]
+ let d = match diag {
+ AnyDiagnostic::BreakOutsideOfLoop(d) => handlers::break_outside_of_loop::break_outside_of_loop(&ctx, &d),
+ AnyDiagnostic::IncorrectCase(d) => handlers::incorrect_case::incorrect_case(&ctx, &d),
+ AnyDiagnostic::MacroError(d) => handlers::macro_error::macro_error(&ctx, &d),
+ AnyDiagnostic::MalformedDerive(d) => handlers::malformed_derive::malformed_derive(&ctx, &d),
+ AnyDiagnostic::MismatchedArgCount(d) => handlers::mismatched_arg_count::mismatched_arg_count(&ctx, &d),
+ AnyDiagnostic::MissingFields(d) => handlers::missing_fields::missing_fields(&ctx, &d),
+ AnyDiagnostic::MissingMatchArms(d) => handlers::missing_match_arms::missing_match_arms(&ctx, &d),
+ AnyDiagnostic::MissingUnsafe(d) => handlers::missing_unsafe::missing_unsafe(&ctx, &d),
+ AnyDiagnostic::NoSuchField(d) => handlers::no_such_field::no_such_field(&ctx, &d),
+ AnyDiagnostic::ReplaceFilterMapNextWithFindMap(d) => handlers::replace_filter_map_next_with_find_map::replace_filter_map_next_with_find_map(&ctx, &d),
+ AnyDiagnostic::TypeMismatch(d) => handlers::type_mismatch::type_mismatch(&ctx, &d),
+ AnyDiagnostic::UnimplementedBuiltinMacro(d) => handlers::unimplemented_builtin_macro::unimplemented_builtin_macro(&ctx, &d),
+ AnyDiagnostic::UnresolvedExternCrate(d) => handlers::unresolved_extern_crate::unresolved_extern_crate(&ctx, &d),
+ AnyDiagnostic::UnresolvedImport(d) => handlers::unresolved_import::unresolved_import(&ctx, &d),
+ AnyDiagnostic::UnresolvedMacroCall(d) => handlers::unresolved_macro_call::unresolved_macro_call(&ctx, &d),
+ AnyDiagnostic::UnresolvedModule(d) => handlers::unresolved_module::unresolved_module(&ctx, &d),
+ AnyDiagnostic::UnresolvedProcMacro(d) => handlers::unresolved_proc_macro::unresolved_proc_macro(&ctx, &d, config.proc_macros_enabled, config.proc_attr_macros_enabled),
+ AnyDiagnostic::InvalidDeriveTarget(d) => handlers::invalid_derive_target::invalid_derive_target(&ctx, &d),
+
+ AnyDiagnostic::InactiveCode(d) => match handlers::inactive_code::inactive_code(&ctx, &d) {
+ Some(it) => it,
+ None => continue,
+ }
+ };
+ res.push(d)
+ }
+
+ res.retain(|d| {
+ !ctx.config.disabled.contains(d.code.as_str())
+ && !(ctx.config.disable_experimental && d.experimental)
+ });
+
+ res
+}
+
+fn fix(id: &'static str, label: &str, source_change: SourceChange, target: TextRange) -> Assist {
+ let mut res = unresolved_fix(id, label, target);
+ res.source_change = Some(source_change);
+ res
+}
+
+fn unresolved_fix(id: &'static str, label: &str, target: TextRange) -> Assist {
+ assert!(!id.contains(' '));
+ Assist {
+ id: AssistId(id, AssistKind::QuickFix),
+ label: Label::new(label.to_string()),
+ group: None,
+ target,
+ source_change: None,
+ trigger_signature_help: false,
+ }
+}
+
+fn adjusted_display_range<N: AstNode>(
+ ctx: &DiagnosticsContext<'_>,
+ diag_ptr: InFile<SyntaxNodePtr>,
+ adj: &dyn Fn(N) -> Option<TextRange>,
+) -> TextRange {
+ let FileRange { file_id, range } = ctx.sema.diagnostics_display_range(diag_ptr);
+
+ let source_file = ctx.sema.db.parse(file_id);
+ find_node_at_range::<N>(&source_file.syntax_node(), range)
+ .filter(|it| it.syntax().text_range() == range)
+ .and_then(adj)
+ .unwrap_or(range)
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
new file mode 100644
index 000000000..7312bca32
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
@@ -0,0 +1,145 @@
+#[cfg(not(feature = "in-rust-tree"))]
+mod sourcegen;
+
+use expect_test::Expect;
+use ide_db::{
+ assists::AssistResolveStrategy,
+ base_db::{fixture::WithFixture, SourceDatabaseExt},
+ RootDatabase,
+};
+use stdx::trim_indent;
+use test_utils::{assert_eq_text, extract_annotations};
+
+use crate::{DiagnosticsConfig, ExprFillDefaultMode, Severity};
+
+/// Takes a multi-file input fixture with annotated cursor positions,
+/// and checks that:
+/// * a diagnostic is produced
+/// * the first diagnostic fix trigger range touches the input cursor position
+/// * that the contents of the file containing the cursor match `after` after the diagnostic fix is applied
+#[track_caller]
+pub(crate) fn check_fix(ra_fixture_before: &str, ra_fixture_after: &str) {
+ check_nth_fix(0, ra_fixture_before, ra_fixture_after);
+}
+/// Takes a multi-file input fixture with annotated cursor positions,
+/// and checks that:
+/// * a diagnostic is produced
+/// * every diagnostic fixes trigger range touches the input cursor position
+/// * that the contents of the file containing the cursor match `after` after each diagnostic fix is applied
+pub(crate) fn check_fixes(ra_fixture_before: &str, ra_fixtures_after: Vec<&str>) {
+ for (i, ra_fixture_after) in ra_fixtures_after.iter().enumerate() {
+ check_nth_fix(i, ra_fixture_before, ra_fixture_after)
+ }
+}
+
+#[track_caller]
+fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) {
+ let after = trim_indent(ra_fixture_after);
+
+ let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
+ let mut conf = DiagnosticsConfig::default();
+ conf.expr_fill_default = ExprFillDefaultMode::Default;
+ let diagnostic =
+ super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id)
+ .pop()
+ .expect("no diagnostics");
+ let fix = &diagnostic.fixes.expect("diagnostic misses fixes")[nth];
+ let actual = {
+ let source_change = fix.source_change.as_ref().unwrap();
+ let file_id = *source_change.source_file_edits.keys().next().unwrap();
+ let mut actual = db.file_text(file_id).to_string();
+
+ for edit in source_change.source_file_edits.values() {
+ edit.apply(&mut actual);
+ }
+ actual
+ };
+
+ assert!(
+ fix.target.contains_inclusive(file_position.offset),
+ "diagnostic fix range {:?} does not touch cursor position {:?}",
+ fix.target,
+ file_position.offset
+ );
+ assert_eq_text!(&after, &actual);
+}
+
+/// Checks that there's a diagnostic *without* fix at `$0`.
+pub(crate) fn check_no_fix(ra_fixture: &str) {
+ let (db, file_position) = RootDatabase::with_position(ra_fixture);
+ let diagnostic = super::diagnostics(
+ &db,
+ &DiagnosticsConfig::default(),
+ &AssistResolveStrategy::All,
+ file_position.file_id,
+ )
+ .pop()
+ .unwrap();
+ assert!(diagnostic.fixes.is_none(), "got a fix when none was expected: {:?}", diagnostic);
+}
+
+pub(crate) fn check_expect(ra_fixture: &str, expect: Expect) {
+ let (db, file_id) = RootDatabase::with_single_file(ra_fixture);
+ let diagnostics = super::diagnostics(
+ &db,
+ &DiagnosticsConfig::default(),
+ &AssistResolveStrategy::All,
+ file_id,
+ );
+ expect.assert_debug_eq(&diagnostics)
+}
+
+#[track_caller]
+pub(crate) fn check_diagnostics(ra_fixture: &str) {
+ let mut config = DiagnosticsConfig::default();
+ config.disabled.insert("inactive-code".to_string());
+ check_diagnostics_with_config(config, ra_fixture)
+}
+
+#[track_caller]
+pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixture: &str) {
+ let (db, files) = RootDatabase::with_many_files(ra_fixture);
+ for file_id in files {
+ let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
+
+ let expected = extract_annotations(&*db.file_text(file_id));
+ let mut actual = diagnostics
+ .into_iter()
+ .map(|d| {
+ let mut annotation = String::new();
+ if let Some(fixes) = &d.fixes {
+ assert!(!fixes.is_empty());
+ annotation.push_str("💡 ")
+ }
+ annotation.push_str(match d.severity {
+ Severity::Error => "error",
+ Severity::WeakWarning => "weak",
+ });
+ annotation.push_str(": ");
+ annotation.push_str(&d.message);
+ (d.range, annotation)
+ })
+ .collect::<Vec<_>>();
+ actual.sort_by_key(|(range, _)| range.start());
+ assert_eq!(expected, actual);
+ }
+}
+
+#[test]
+fn test_disabled_diagnostics() {
+ let mut config = DiagnosticsConfig::default();
+ config.disabled.insert("unresolved-module".into());
+
+ let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#);
+
+ let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
+ assert!(diagnostics.is_empty());
+
+ let diagnostics = super::diagnostics(
+ &db,
+ &DiagnosticsConfig::default(),
+ &AssistResolveStrategy::All,
+ file_id,
+ );
+ assert!(!diagnostics.is_empty());
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests/sourcegen.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests/sourcegen.rs
new file mode 100644
index 000000000..ec6558a46
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests/sourcegen.rs
@@ -0,0 +1,73 @@
+//! Generates `assists.md` documentation.
+
+use std::{fmt, fs, io, path::PathBuf};
+
+use sourcegen::project_root;
+
+#[test]
+fn sourcegen_diagnostic_docs() {
+ let diagnostics = Diagnostic::collect().unwrap();
+ let contents =
+ diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
+ let contents = sourcegen::add_preamble("sourcegen_diagnostic_docs", contents);
+ let dst = project_root().join("docs/user/generated_diagnostic.adoc");
+ fs::write(&dst, &contents).unwrap();
+}
+
+#[derive(Debug)]
+struct Diagnostic {
+ id: String,
+ location: sourcegen::Location,
+ doc: String,
+}
+
+impl Diagnostic {
+ fn collect() -> io::Result<Vec<Diagnostic>> {
+ let handlers_dir = project_root().join("crates/ide-diagnostics/src/handlers");
+
+ let mut res = Vec::new();
+ for path in sourcegen::list_rust_files(&handlers_dir) {
+ collect_file(&mut res, path)?;
+ }
+ res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
+ return Ok(res);
+
+ fn collect_file(acc: &mut Vec<Diagnostic>, path: PathBuf) -> io::Result<()> {
+ let text = fs::read_to_string(&path)?;
+ let comment_blocks = sourcegen::CommentBlock::extract("Diagnostic", &text);
+
+ for block in comment_blocks {
+ let id = block.id;
+ if let Err(msg) = is_valid_diagnostic_name(&id) {
+ panic!("invalid diagnostic name: {:?}:\n {}", id, msg)
+ }
+ let doc = block.contents.join("\n");
+ let location = sourcegen::Location { file: path.clone(), line: block.line };
+ acc.push(Diagnostic { id, location, doc })
+ }
+
+ Ok(())
+ }
+ }
+}
+
+fn is_valid_diagnostic_name(diagnostic: &str) -> Result<(), String> {
+ let diagnostic = diagnostic.trim();
+ if diagnostic.find(char::is_whitespace).is_some() {
+ return Err("Diagnostic names can't contain whitespace symbols".into());
+ }
+ if diagnostic.chars().any(|c| c.is_ascii_uppercase()) {
+ return Err("Diagnostic names can't contain uppercase symbols".into());
+ }
+ if diagnostic.chars().any(|c| !c.is_ascii()) {
+ return Err("Diagnostic can't contain non-ASCII symbols".into());
+ }
+
+ Ok(())
+}
+
+impl fmt::Display for Diagnostic {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ writeln!(f, "=== {}\n**Source:** {}\n{}", self.id, self.location, self.doc)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml
new file mode 100644
index 000000000..d36dd02d4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml
@@ -0,0 +1,26 @@
+[package]
+name = "ide-ssr"
+version = "0.0.0"
+description = "Structural search and replace of Rust code"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/rust-analyzer"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+
+itertools = "0.10.3"
+
+text-edit = { path = "../text-edit", version = "0.0.0" }
+parser = { path = "../parser", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+ide-db = { path = "../ide-db", version = "0.0.0" }
+hir = { path = "../hir", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
+expect-test = "1.4.0"
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/errors.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/errors.rs
new file mode 100644
index 000000000..c02bacae6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/errors.rs
@@ -0,0 +1,29 @@
+//! Code relating to errors produced by SSR.
+
+/// Constructs an SsrError taking arguments like the format macro.
+macro_rules! _error {
+ ($fmt:expr) => {$crate::SsrError::new(format!($fmt))};
+ ($fmt:expr, $($arg:tt)+) => {$crate::SsrError::new(format!($fmt, $($arg)+))}
+}
+pub(crate) use _error as error;
+
+/// Returns from the current function with an error, supplied by arguments as for format!
+macro_rules! _bail {
+ ($($tokens:tt)*) => {return Err(crate::errors::error!($($tokens)*))}
+}
+pub(crate) use _bail as bail;
+
+#[derive(Debug, PartialEq)]
+pub struct SsrError(pub(crate) String);
+
+impl std::fmt::Display for SsrError {
+ fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ write!(f, "Parse error: {}", self.0)
+ }
+}
+
+impl SsrError {
+ pub(crate) fn new(message: impl Into<String>) -> SsrError {
+ SsrError(message.into())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/fragments.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/fragments.rs
new file mode 100644
index 000000000..503754afe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/fragments.rs
@@ -0,0 +1,58 @@
+//! When specifying SSR rule, you generally want to map one *kind* of thing to
+//! the same kind of thing: path to path, expression to expression, type to
+//! type.
+//!
+//! The problem is, while this *kind* is generally obvious to the human, the ide
+//! needs to determine it somehow. We do this in a stupid way -- by pasting SSR
+//! rule into different contexts and checking what works.
+
+use syntax::{ast, AstNode, SyntaxNode};
+
+pub(crate) fn ty(s: &str) -> Result<SyntaxNode, ()> {
+ fragment::<ast::Type>("type T = {};", s)
+}
+
+pub(crate) fn item(s: &str) -> Result<SyntaxNode, ()> {
+ fragment::<ast::Item>("{}", s)
+}
+
+pub(crate) fn pat(s: &str) -> Result<SyntaxNode, ()> {
+ fragment::<ast::Pat>("const _: () = {let {} = ();};", s)
+}
+
+pub(crate) fn expr(s: &str) -> Result<SyntaxNode, ()> {
+ fragment::<ast::Expr>("const _: () = {};", s)
+}
+
+pub(crate) fn stmt(s: &str) -> Result<SyntaxNode, ()> {
+ let template = "const _: () = { {}; };";
+ let input = template.replace("{}", s);
+ let parse = syntax::SourceFile::parse(&input);
+ if !parse.errors().is_empty() {
+ return Err(());
+ }
+ let mut node =
+ parse.tree().syntax().descendants().skip(2).find_map(ast::Stmt::cast).ok_or(())?;
+ if !s.ends_with(';') && node.to_string().ends_with(';') {
+ node = node.clone_for_update();
+ node.syntax().last_token().map(|it| it.detach());
+ }
+ if node.to_string() != s {
+ return Err(());
+ }
+ Ok(node.syntax().clone_subtree())
+}
+
+fn fragment<T: AstNode>(template: &str, s: &str) -> Result<SyntaxNode, ()> {
+ let s = s.trim();
+ let input = template.replace("{}", s);
+ let parse = syntax::SourceFile::parse(&input);
+ if !parse.errors().is_empty() {
+ return Err(());
+ }
+ let node = parse.tree().syntax().descendants().find_map(T::cast).ok_or(())?;
+ if node.syntax().text() != s {
+ return Err(());
+ }
+ Ok(node.syntax().clone_subtree())
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs
new file mode 100644
index 000000000..5b6e01625
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs
@@ -0,0 +1,35 @@
+//! This module allows building an SSR MatchFinder by parsing the SSR rule
+//! from a comment.
+
+use ide_db::{
+ base_db::{FilePosition, FileRange, SourceDatabase},
+ RootDatabase,
+};
+use syntax::{
+ ast::{self, AstNode, AstToken},
+ TextRange,
+};
+
+use crate::MatchFinder;
+
+/// Attempts to build an SSR MatchFinder from a comment at the given file
+/// range. If successful, returns the MatchFinder and a TextRange covering
+/// comment.
+pub fn ssr_from_comment(
+ db: &RootDatabase,
+ frange: FileRange,
+) -> Option<(MatchFinder<'_>, TextRange)> {
+ let comment = {
+ let file = db.parse(frange.file_id);
+ file.tree().syntax().token_at_offset(frange.range.start()).find_map(ast::Comment::cast)
+ }?;
+ let comment_text_without_prefix = comment.text().strip_prefix(comment.prefix()).unwrap();
+ let ssr_rule = comment_text_without_prefix.parse().ok()?;
+
+ let lookup_context = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
+
+ let mut match_finder = MatchFinder::in_context(db, lookup_context, vec![]).ok()?;
+ match_finder.add_rule(ssr_rule).ok()?;
+
+ Some((match_finder, comment.syntax().text_range()))
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
new file mode 100644
index 000000000..a5e24daa9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
@@ -0,0 +1,358 @@
+//! Structural Search Replace
+//!
+//! Allows searching the AST for code that matches one or more patterns and then replacing that code
+//! based on a template.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+// Feature: Structural Search and Replace
+//
+// Search and replace with named wildcards that will match any expression, type, path, pattern or item.
+// The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`.
+// A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement.
+// Within a macro call, a placeholder will match up until whatever token follows the placeholder.
+//
+// All paths in both the search pattern and the replacement template must resolve in the context
+// in which this command is invoked. Paths in the search pattern will then match the code if they
+// resolve to the same item, even if they're written differently. For example if we invoke the
+// command in the module `foo` with a pattern of `Bar`, then code in the parent module that refers
+// to `foo::Bar` will match.
+//
+// Paths in the replacement template will be rendered appropriately for the context in which the
+// replacement occurs. For example if our replacement template is `foo::Bar` and we match some
+// code in the `foo` module, we'll insert just `Bar`.
+//
+// Inherent method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will
+// match `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`. When a
+// placeholder is the receiver of a method call in the search pattern (e.g. `$s.foo()`), but not in
+// the replacement template (e.g. `bar($s)`), then *, & and &mut will be added as needed to mirror
+// whatever autoderef and autoref was happening implicitly in the matched code.
+//
+// The scope of the search / replace will be restricted to the current selection if any, otherwise
+// it will apply to the whole workspace.
+//
+// Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`.
+//
+// Supported constraints:
+//
+// |===
+// | Constraint | Restricts placeholder
+//
+// | kind(literal) | Is a literal (e.g. `42` or `"forty two"`)
+// | not(a) | Negates the constraint `a`
+// |===
+//
+// Available via the command `rust-analyzer.ssr`.
+//
+// ```rust
+// // Using structural search replace command [foo($a, $b) ==>> ($a).foo($b)]
+//
+// // BEFORE
+// String::from(foo(y + 5, z))
+//
+// // AFTER
+// String::from((y + 5).foo(z))
+// ```
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Structural Search Replace**
+// |===
+//
+// Also available as an assist, by writing a comment containing the structural
+// search and replace rule. You will only see the assist if the comment can
+// be parsed as a valid structural search and replace rule.
+//
+// ```rust
+// // Place the cursor on the line below to see the assist 💡.
+// // foo($a, $b) ==>> ($a).foo($b)
+// ```
+
+mod from_comment;
+mod matching;
+mod nester;
+mod parsing;
+mod fragments;
+mod replacing;
+mod resolving;
+mod search;
+#[macro_use]
+mod errors;
+#[cfg(test)]
+mod tests;
+
+pub use crate::{errors::SsrError, from_comment::ssr_from_comment, matching::Match};
+
+use crate::{errors::bail, matching::MatchFailureReason};
+use hir::Semantics;
+use ide_db::{
+ base_db::{FileId, FilePosition, FileRange},
+ FxHashMap,
+};
+use resolving::ResolvedRule;
+use syntax::{ast, AstNode, SyntaxNode, TextRange};
+use text_edit::TextEdit;
+
+// A structured search replace rule. Create by calling `parse` on a str.
+#[derive(Debug)]
+pub struct SsrRule {
+ /// A structured pattern that we're searching for.
+ pattern: parsing::RawPattern,
+ /// What we'll replace it with.
+ template: parsing::RawPattern,
+ parsed_rules: Vec<parsing::ParsedRule>,
+}
+
+#[derive(Debug)]
+pub struct SsrPattern {
+ parsed_rules: Vec<parsing::ParsedRule>,
+}
+
+#[derive(Debug, Default)]
+pub struct SsrMatches {
+ pub matches: Vec<Match>,
+}
+
+/// Searches a crate for pattern matches and possibly replaces them with something else.
+pub struct MatchFinder<'db> {
+ /// Our source of information about the user's code.
+ sema: Semantics<'db, ide_db::RootDatabase>,
+ rules: Vec<ResolvedRule>,
+ resolution_scope: resolving::ResolutionScope<'db>,
+ restrict_ranges: Vec<FileRange>,
+}
+
+impl<'db> MatchFinder<'db> {
+ /// Constructs a new instance where names will be looked up as if they appeared at
+ /// `lookup_context`.
+ pub fn in_context(
+ db: &'db ide_db::RootDatabase,
+ lookup_context: FilePosition,
+ mut restrict_ranges: Vec<FileRange>,
+ ) -> Result<MatchFinder<'db>, SsrError> {
+ restrict_ranges.retain(|range| !range.range.is_empty());
+ let sema = Semantics::new(db);
+ let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context)
+ .ok_or_else(|| SsrError("no resolution scope for file".into()))?;
+ Ok(MatchFinder { sema, rules: Vec::new(), resolution_scope, restrict_ranges })
+ }
+
+ /// Constructs an instance using the start of the first file in `db` as the lookup context.
+ pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> {
+ use ide_db::base_db::SourceDatabaseExt;
+ use ide_db::symbol_index::SymbolsDatabase;
+ if let Some(first_file_id) =
+ db.local_roots().iter().next().and_then(|root| db.source_root(*root).iter().next())
+ {
+ MatchFinder::in_context(
+ db,
+ FilePosition { file_id: first_file_id, offset: 0.into() },
+ vec![],
+ )
+ } else {
+ bail!("No files to search");
+ }
+ }
+
+ /// Adds a rule to be applied. The order in which rules are added matters. Earlier rules take
+ /// precedence. If a node is matched by an earlier rule, then later rules won't be permitted to
+ /// match to it.
+ pub fn add_rule(&mut self, rule: SsrRule) -> Result<(), SsrError> {
+ for parsed_rule in rule.parsed_rules {
+ self.rules.push(ResolvedRule::new(
+ parsed_rule,
+ &self.resolution_scope,
+ self.rules.len(),
+ )?);
+ }
+ Ok(())
+ }
+
+ /// Finds matches for all added rules and returns edits for all found matches.
+ pub fn edits(&self) -> FxHashMap<FileId, TextEdit> {
+ use ide_db::base_db::SourceDatabaseExt;
+ let mut matches_by_file = FxHashMap::default();
+ for m in self.matches().matches {
+ matches_by_file
+ .entry(m.range.file_id)
+ .or_insert_with(SsrMatches::default)
+ .matches
+ .push(m);
+ }
+ matches_by_file
+ .into_iter()
+ .map(|(file_id, matches)| {
+ (
+ file_id,
+ replacing::matches_to_edit(
+ &matches,
+ &self.sema.db.file_text(file_id),
+ &self.rules,
+ ),
+ )
+ })
+ .collect()
+ }
+
+ /// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you
+ /// intend to do replacement, use `add_rule` instead.
+ pub fn add_search_pattern(&mut self, pattern: SsrPattern) -> Result<(), SsrError> {
+ for parsed_rule in pattern.parsed_rules {
+ self.rules.push(ResolvedRule::new(
+ parsed_rule,
+ &self.resolution_scope,
+ self.rules.len(),
+ )?);
+ }
+ Ok(())
+ }
+
+ /// Returns matches for all added rules.
+ pub fn matches(&self) -> SsrMatches {
+ let mut matches = Vec::new();
+ let mut usage_cache = search::UsageCache::default();
+ for rule in &self.rules {
+ self.find_matches_for_rule(rule, &mut usage_cache, &mut matches);
+ }
+ nester::nest_and_remove_collisions(matches, &self.sema)
+ }
+
+ /// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match
+ /// them, while recording reasons why they don't match. This API is useful for command
+ /// line-based debugging where providing a range is difficult.
+ pub fn debug_where_text_equal(&self, file_id: FileId, snippet: &str) -> Vec<MatchDebugInfo> {
+ use ide_db::base_db::SourceDatabaseExt;
+ let file = self.sema.parse(file_id);
+ let mut res = Vec::new();
+ let file_text = self.sema.db.file_text(file_id);
+ let mut remaining_text = file_text.as_str();
+ let mut base = 0;
+ let len = snippet.len() as u32;
+ while let Some(offset) = remaining_text.find(snippet) {
+ let start = base + offset as u32;
+ let end = start + len;
+ self.output_debug_for_nodes_at_range(
+ file.syntax(),
+ FileRange { file_id, range: TextRange::new(start.into(), end.into()) },
+ &None,
+ &mut res,
+ );
+ remaining_text = &remaining_text[offset + snippet.len()..];
+ base = end;
+ }
+ res
+ }
+
+ fn output_debug_for_nodes_at_range(
+ &self,
+ node: &SyntaxNode,
+ range: FileRange,
+ restrict_range: &Option<FileRange>,
+ out: &mut Vec<MatchDebugInfo>,
+ ) {
+ for node in node.children() {
+ let node_range = self.sema.original_range(&node);
+ if node_range.file_id != range.file_id || !node_range.range.contains_range(range.range)
+ {
+ continue;
+ }
+ if node_range.range == range.range {
+ for rule in &self.rules {
+ // For now we ignore rules that have a different kind than our node, otherwise
+ // we get lots of noise. If at some point we add support for restricting rules
+ // to a particular kind of thing (e.g. only match type references), then we can
+ // relax this. We special-case expressions, since function calls can match
+ // method calls.
+ if rule.pattern.node.kind() != node.kind()
+ && !(ast::Expr::can_cast(rule.pattern.node.kind())
+ && ast::Expr::can_cast(node.kind()))
+ {
+ continue;
+ }
+ out.push(MatchDebugInfo {
+ matched: matching::get_match(true, rule, &node, restrict_range, &self.sema)
+ .map_err(|e| MatchFailureReason {
+ reason: e.reason.unwrap_or_else(|| {
+ "Match failed, but no reason was given".to_owned()
+ }),
+ }),
+ pattern: rule.pattern.node.clone(),
+ node: node.clone(),
+ });
+ }
+ } else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) {
+ if let Some(expanded) = self.sema.expand(&macro_call) {
+ if let Some(tt) = macro_call.token_tree() {
+ self.output_debug_for_nodes_at_range(
+ &expanded,
+ range,
+ &Some(self.sema.original_range(tt.syntax())),
+ out,
+ );
+ }
+ }
+ }
+ self.output_debug_for_nodes_at_range(&node, range, restrict_range, out);
+ }
+ }
+}
+
+pub struct MatchDebugInfo {
+ node: SyntaxNode,
+ /// Our search pattern parsed as an expression or item, etc
+ pattern: SyntaxNode,
+ matched: Result<Match, MatchFailureReason>,
+}
+
+impl std::fmt::Debug for MatchDebugInfo {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match &self.matched {
+ Ok(_) => writeln!(f, "Node matched")?,
+ Err(reason) => writeln!(f, "Node failed to match because: {}", reason.reason)?,
+ }
+ writeln!(
+ f,
+ "============ AST ===========\n\
+ {:#?}",
+ self.node
+ )?;
+ writeln!(f, "========= PATTERN ==========")?;
+ writeln!(f, "{:#?}", self.pattern)?;
+ writeln!(f, "============================")?;
+ Ok(())
+ }
+}
+
+impl SsrMatches {
+ /// Returns `self` with any nested matches removed and made into top-level matches.
+ pub fn flattened(self) -> SsrMatches {
+ let mut out = SsrMatches::default();
+ self.flatten_into(&mut out);
+ out
+ }
+
+ fn flatten_into(self, out: &mut SsrMatches) {
+ for mut m in self.matches {
+ for p in m.placeholder_values.values_mut() {
+ std::mem::take(&mut p.inner_matches).flatten_into(out);
+ }
+ out.matches.push(m);
+ }
+ }
+}
+
+impl Match {
+ pub fn matched_text(&self) -> String {
+ self.matched_node.text().to_string()
+ }
+}
+
+impl std::error::Error for SsrError {}
+
+#[cfg(test)]
+impl MatchDebugInfo {
+ pub(crate) fn match_failure_reason(&self) -> Option<&str> {
+ self.matched.as_ref().err().map(|r| r.reason.as_str())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
new file mode 100644
index 000000000..e3a837ddc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
@@ -0,0 +1,803 @@
+//! This module is responsible for matching a search pattern against a node in the AST. In the
+//! process of matching, placeholder values are recorded.
+
+use crate::{
+ parsing::{Constraint, NodeKind, Placeholder, Var},
+ resolving::{ResolvedPattern, ResolvedRule, UfcsCallInfo},
+ SsrMatches,
+};
+use hir::Semantics;
+use ide_db::{base_db::FileRange, FxHashMap};
+use std::{cell::Cell, iter::Peekable};
+use syntax::{
+ ast::{self, AstNode, AstToken},
+ SmolStr, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken,
+};
+
+// Creates a match error. If we're currently attempting to match some code that we thought we were
+// going to match, as indicated by the --debug-snippet flag, then populate the reason field.
+macro_rules! match_error {
+ ($e:expr) => {{
+ MatchFailed {
+ reason: if recording_match_fail_reasons() {
+ Some(format!("{}", $e))
+ } else {
+ None
+ }
+ }
+ }};
+ ($fmt:expr, $($arg:tt)+) => {{
+ MatchFailed {
+ reason: if recording_match_fail_reasons() {
+ Some(format!($fmt, $($arg)+))
+ } else {
+ None
+ }
+ }
+ }};
+}
+
+// Fails the current match attempt, recording the supplied reason if we're recording match fail reasons.
+macro_rules! fail_match {
+ ($($args:tt)*) => {return Err(match_error!($($args)*))};
+}
+
+/// Information about a match that was found.
+#[derive(Debug)]
+pub struct Match {
+ pub(crate) range: FileRange,
+ pub(crate) matched_node: SyntaxNode,
+ pub(crate) placeholder_values: FxHashMap<Var, PlaceholderMatch>,
+ pub(crate) ignored_comments: Vec<ast::Comment>,
+ pub(crate) rule_index: usize,
+ /// The depth of matched_node.
+ pub(crate) depth: usize,
+ // Each path in the template rendered for the module in which the match was found.
+ pub(crate) rendered_template_paths: FxHashMap<SyntaxNode, hir::ModPath>,
+}
+
+/// Information about a placeholder bound in a match.
+#[derive(Debug)]
+pub(crate) struct PlaceholderMatch {
+ pub(crate) range: FileRange,
+ /// More matches, found within `node`.
+ pub(crate) inner_matches: SsrMatches,
+ /// How many times the code that the placeholder matched needed to be dereferenced. Will only be
+ /// non-zero if the placeholder matched to the receiver of a method call.
+ pub(crate) autoderef_count: usize,
+ pub(crate) autoref_kind: ast::SelfParamKind,
+}
+
+#[derive(Debug)]
+pub(crate) struct MatchFailureReason {
+ pub(crate) reason: String,
+}
+
+/// An "error" indicating that matching failed. Use the fail_match! macro to create and return this.
+#[derive(Clone)]
+pub(crate) struct MatchFailed {
+ /// The reason why we failed to match. Only present when debug_active true in call to
+ /// `get_match`.
+ pub(crate) reason: Option<String>,
+}
+
+/// Checks if `code` matches the search pattern found in `search_scope`, returning information about
+/// the match, if it does. Since we only do matching in this module and searching is done by the
+/// parent module, we don't populate nested matches.
+pub(crate) fn get_match(
+ debug_active: bool,
+ rule: &ResolvedRule,
+ code: &SyntaxNode,
+ restrict_range: &Option<FileRange>,
+ sema: &Semantics<'_, ide_db::RootDatabase>,
+) -> Result<Match, MatchFailed> {
+ record_match_fails_reasons_scope(debug_active, || {
+ Matcher::try_match(rule, code, restrict_range, sema)
+ })
+}
+
+/// Checks if our search pattern matches a particular node of the AST.
+struct Matcher<'db, 'sema> {
+ sema: &'sema Semantics<'db, ide_db::RootDatabase>,
+ /// If any placeholders come from anywhere outside of this range, then the match will be
+ /// rejected.
+ restrict_range: Option<FileRange>,
+ rule: &'sema ResolvedRule,
+}
+
+/// Which phase of matching we're currently performing. We do two phases because most attempted
+/// matches will fail and it means we can defer more expensive checks to the second phase.
+enum Phase<'a> {
+ /// On the first phase, we perform cheap checks. No state is mutated and nothing is recorded.
+ First,
+ /// On the second phase, we construct the `Match`. Things like what placeholders bind to is
+ /// recorded.
+ Second(&'a mut Match),
+}
+
+impl<'db, 'sema> Matcher<'db, 'sema> {
+ fn try_match(
+ rule: &ResolvedRule,
+ code: &SyntaxNode,
+ restrict_range: &Option<FileRange>,
+ sema: &'sema Semantics<'db, ide_db::RootDatabase>,
+ ) -> Result<Match, MatchFailed> {
+ let match_state = Matcher { sema, restrict_range: *restrict_range, rule };
+ // First pass at matching, where we check that node types and idents match.
+ match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?;
+ match_state.validate_range(&sema.original_range(code))?;
+ let mut the_match = Match {
+ range: sema.original_range(code),
+ matched_node: code.clone(),
+ placeholder_values: FxHashMap::default(),
+ ignored_comments: Vec::new(),
+ rule_index: rule.index,
+ depth: 0,
+ rendered_template_paths: FxHashMap::default(),
+ };
+ // Second matching pass, where we record placeholder matches, ignored comments and maybe do
+ // any other more expensive checks that we didn't want to do on the first pass.
+ match_state.attempt_match_node(
+ &mut Phase::Second(&mut the_match),
+ &rule.pattern.node,
+ code,
+ )?;
+ the_match.depth = sema.ancestors_with_macros(the_match.matched_node.clone()).count();
+ if let Some(template) = &rule.template {
+ the_match.render_template_paths(template, sema)?;
+ }
+ Ok(the_match)
+ }
+
+ /// Checks that `range` is within the permitted range if any. This is applicable when we're
+ /// processing a macro expansion and we want to fail the match if we're working with a node that
+ /// didn't originate from the token tree of the macro call.
+ fn validate_range(&self, range: &FileRange) -> Result<(), MatchFailed> {
+ if let Some(restrict_range) = &self.restrict_range {
+ if restrict_range.file_id != range.file_id
+ || !restrict_range.range.contains_range(range.range)
+ {
+ fail_match!("Node originated from a macro");
+ }
+ }
+ Ok(())
+ }
+
+ fn attempt_match_node(
+ &self,
+ phase: &mut Phase<'_>,
+ pattern: &SyntaxNode,
+ code: &SyntaxNode,
+ ) -> Result<(), MatchFailed> {
+ // Handle placeholders.
+ if let Some(placeholder) = self.get_placeholder_for_node(pattern) {
+ for constraint in &placeholder.constraints {
+ self.check_constraint(constraint, code)?;
+ }
+ if let Phase::Second(matches_out) = phase {
+ let original_range = self.sema.original_range(code);
+ // We validated the range for the node when we started the match, so the placeholder
+ // probably can't fail range validation, but just to be safe...
+ self.validate_range(&original_range)?;
+ matches_out.placeholder_values.insert(
+ placeholder.ident.clone(),
+ PlaceholderMatch::from_range(original_range),
+ );
+ }
+ return Ok(());
+ }
+ // We allow a UFCS call to match a method call, provided they resolve to the same function.
+ if let Some(pattern_ufcs) = self.rule.pattern.ufcs_function_calls.get(pattern) {
+ if let Some(code) = ast::MethodCallExpr::cast(code.clone()) {
+ return self.attempt_match_ufcs_to_method_call(phase, pattern_ufcs, &code);
+ }
+ if let Some(code) = ast::CallExpr::cast(code.clone()) {
+ return self.attempt_match_ufcs_to_ufcs(phase, pattern_ufcs, &code);
+ }
+ }
+ if pattern.kind() != code.kind() {
+ fail_match!(
+ "Pattern had `{}` ({:?}), code had `{}` ({:?})",
+ pattern.text(),
+ pattern.kind(),
+ code.text(),
+ code.kind()
+ );
+ }
+ // Some kinds of nodes have special handling. For everything else, we fall back to default
+ // matching.
+ match code.kind() {
+ SyntaxKind::RECORD_EXPR_FIELD_LIST => {
+ self.attempt_match_record_field_list(phase, pattern, code)
+ }
+ SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(phase, pattern, code),
+ SyntaxKind::PATH => self.attempt_match_path(phase, pattern, code),
+ _ => self.attempt_match_node_children(phase, pattern, code),
+ }
+ }
+
+ fn attempt_match_node_children(
+ &self,
+ phase: &mut Phase<'_>,
+ pattern: &SyntaxNode,
+ code: &SyntaxNode,
+ ) -> Result<(), MatchFailed> {
+ self.attempt_match_sequences(
+ phase,
+ PatternIterator::new(pattern),
+ code.children_with_tokens(),
+ )
+ }
+
+ fn attempt_match_sequences(
+ &self,
+ phase: &mut Phase<'_>,
+ pattern_it: PatternIterator,
+ mut code_it: SyntaxElementChildren,
+ ) -> Result<(), MatchFailed> {
+ let mut pattern_it = pattern_it.peekable();
+ loop {
+ match phase.next_non_trivial(&mut code_it) {
+ None => {
+ if let Some(p) = pattern_it.next() {
+ fail_match!("Part of the pattern was unmatched: {:?}", p);
+ }
+ return Ok(());
+ }
+ Some(SyntaxElement::Token(c)) => {
+ self.attempt_match_token(phase, &mut pattern_it, &c)?;
+ }
+ Some(SyntaxElement::Node(c)) => match pattern_it.next() {
+ Some(SyntaxElement::Node(p)) => {
+ self.attempt_match_node(phase, &p, &c)?;
+ }
+ Some(p) => fail_match!("Pattern wanted '{}', code has {}", p, c.text()),
+ None => fail_match!("Pattern reached end, code has {}", c.text()),
+ },
+ }
+ }
+ }
+
+ fn attempt_match_token(
+ &self,
+ phase: &mut Phase<'_>,
+ pattern: &mut Peekable<PatternIterator>,
+ code: &syntax::SyntaxToken,
+ ) -> Result<(), MatchFailed> {
+ phase.record_ignored_comments(code);
+ // Ignore whitespace and comments.
+ if code.kind().is_trivia() {
+ return Ok(());
+ }
+ if let Some(SyntaxElement::Token(p)) = pattern.peek() {
+ // If the code has a comma and the pattern is about to close something, then accept the
+ // comma without advancing the pattern. i.e. ignore trailing commas.
+ if code.kind() == SyntaxKind::COMMA && is_closing_token(p.kind()) {
+ return Ok(());
+ }
+ // Conversely, if the pattern has a comma and the code doesn't, skip that part of the
+ // pattern and continue to match the code.
+ if p.kind() == SyntaxKind::COMMA && is_closing_token(code.kind()) {
+ pattern.next();
+ }
+ }
+ // Consume an element from the pattern and make sure it matches.
+ match pattern.next() {
+ Some(SyntaxElement::Token(p)) => {
+ if p.kind() != code.kind() || p.text() != code.text() {
+ fail_match!(
+ "Pattern wanted token '{}' ({:?}), but code had token '{}' ({:?})",
+ p.text(),
+ p.kind(),
+ code.text(),
+ code.kind()
+ )
+ }
+ }
+ Some(SyntaxElement::Node(p)) => {
+ // Not sure if this is actually reachable.
+ fail_match!(
+ "Pattern wanted {:?}, but code had token '{}' ({:?})",
+ p,
+ code.text(),
+ code.kind()
+ );
+ }
+ None => {
+ fail_match!("Pattern exhausted, while code remains: `{}`", code.text());
+ }
+ }
+ Ok(())
+ }
+
+ fn check_constraint(
+ &self,
+ constraint: &Constraint,
+ code: &SyntaxNode,
+ ) -> Result<(), MatchFailed> {
+ match constraint {
+ Constraint::Kind(kind) => {
+ kind.matches(code)?;
+ }
+ Constraint::Not(sub) => {
+ if self.check_constraint(&*sub, code).is_ok() {
+ fail_match!("Constraint {:?} failed for '{}'", constraint, code.text());
+ }
+ }
+ }
+ Ok(())
+ }
+
+ /// Paths are matched based on whether they refer to the same thing, even if they're written
+ /// differently.
+ fn attempt_match_path(
+ &self,
+ phase: &mut Phase<'_>,
+ pattern: &SyntaxNode,
+ code: &SyntaxNode,
+ ) -> Result<(), MatchFailed> {
+ if let Some(pattern_resolved) = self.rule.pattern.resolved_paths.get(pattern) {
+ let pattern_path = ast::Path::cast(pattern.clone()).unwrap();
+ let code_path = ast::Path::cast(code.clone()).unwrap();
+ if let (Some(pattern_segment), Some(code_segment)) =
+ (pattern_path.segment(), code_path.segment())
+ {
+ // Match everything within the segment except for the name-ref, which is handled
+ // separately via comparing what the path resolves to below.
+ self.attempt_match_opt(
+ phase,
+ pattern_segment.generic_arg_list(),
+ code_segment.generic_arg_list(),
+ )?;
+ self.attempt_match_opt(
+ phase,
+ pattern_segment.param_list(),
+ code_segment.param_list(),
+ )?;
+ }
+ if matches!(phase, Phase::Second(_)) {
+ let resolution = self
+ .sema
+ .resolve_path(&code_path)
+ .ok_or_else(|| match_error!("Failed to resolve path `{}`", code.text()))?;
+ if pattern_resolved.resolution != resolution {
+ fail_match!("Pattern had path `{}` code had `{}`", pattern.text(), code.text());
+ }
+ }
+ } else {
+ return self.attempt_match_node_children(phase, pattern, code);
+ }
+ Ok(())
+ }
+
+ fn attempt_match_opt<T: AstNode>(
+ &self,
+ phase: &mut Phase<'_>,
+ pattern: Option<T>,
+ code: Option<T>,
+ ) -> Result<(), MatchFailed> {
+ match (pattern, code) {
+ (Some(p), Some(c)) => self.attempt_match_node(phase, p.syntax(), c.syntax()),
+ (None, None) => Ok(()),
+ (Some(p), None) => fail_match!("Pattern `{}` had nothing to match", p.syntax().text()),
+ (None, Some(c)) => {
+ fail_match!("Nothing in pattern to match code `{}`", c.syntax().text())
+ }
+ }
+ }
+
+ /// We want to allow the records to match in any order, so we have special matching logic for
+ /// them.
+ fn attempt_match_record_field_list(
+ &self,
+ phase: &mut Phase<'_>,
+ pattern: &SyntaxNode,
+ code: &SyntaxNode,
+ ) -> Result<(), MatchFailed> {
+ // Build a map keyed by field name.
+ let mut fields_by_name: FxHashMap<SmolStr, SyntaxNode> = FxHashMap::default();
+ for child in code.children() {
+ if let Some(record) = ast::RecordExprField::cast(child.clone()) {
+ if let Some(name) = record.field_name() {
+ fields_by_name.insert(name.text().into(), child.clone());
+ }
+ }
+ }
+ for p in pattern.children_with_tokens() {
+ if let SyntaxElement::Node(p) = p {
+ if let Some(name_element) = p.first_child_or_token() {
+ if self.get_placeholder(&name_element).is_some() {
+ // If the pattern is using placeholders for field names then order
+ // independence doesn't make sense. Fall back to regular ordered
+ // matching.
+ return self.attempt_match_node_children(phase, pattern, code);
+ }
+ if let Some(ident) = only_ident(name_element) {
+ let code_record = fields_by_name.remove(ident.text()).ok_or_else(|| {
+ match_error!(
+ "Placeholder has record field '{}', but code doesn't",
+ ident
+ )
+ })?;
+ self.attempt_match_node(phase, &p, &code_record)?;
+ }
+ }
+ }
+ }
+ if let Some(unmatched_fields) = fields_by_name.keys().next() {
+ fail_match!(
+ "{} field(s) of a record literal failed to match, starting with {}",
+ fields_by_name.len(),
+ unmatched_fields
+ );
+ }
+ Ok(())
+ }
+
+ /// Outside of token trees, a placeholder can only match a single AST node, whereas in a token
+ /// tree it can match a sequence of tokens. Note, that this code will only be used when the
+ /// pattern matches the macro invocation. For matches within the macro call, we'll already have
+ /// expanded the macro.
+ fn attempt_match_token_tree(
+ &self,
+ phase: &mut Phase<'_>,
+ pattern: &SyntaxNode,
+ code: &syntax::SyntaxNode,
+ ) -> Result<(), MatchFailed> {
+ let mut pattern = PatternIterator::new(pattern).peekable();
+ let mut children = code.children_with_tokens();
+ while let Some(child) = children.next() {
+ if let Some(placeholder) = pattern.peek().and_then(|p| self.get_placeholder(p)) {
+ pattern.next();
+ let next_pattern_token = pattern
+ .peek()
+ .and_then(|p| match p {
+ SyntaxElement::Token(t) => Some(t.clone()),
+ SyntaxElement::Node(n) => n.first_token(),
+ })
+ .map(|p| p.text().to_string());
+ let first_matched_token = child.clone();
+ let mut last_matched_token = child;
+ // Read code tokens util we reach one equal to the next token from our pattern
+ // or we reach the end of the token tree.
+ for next in &mut children {
+ match &next {
+ SyntaxElement::Token(t) => {
+ if Some(t.to_string()) == next_pattern_token {
+ pattern.next();
+ break;
+ }
+ }
+ SyntaxElement::Node(n) => {
+ if let Some(first_token) = n.first_token() {
+ if Some(first_token.text()) == next_pattern_token.as_deref() {
+ if let Some(SyntaxElement::Node(p)) = pattern.next() {
+ // We have a subtree that starts with the next token in our pattern.
+ self.attempt_match_token_tree(phase, &p, n)?;
+ break;
+ }
+ }
+ }
+ }
+ };
+ last_matched_token = next;
+ }
+ if let Phase::Second(match_out) = phase {
+ match_out.placeholder_values.insert(
+ placeholder.ident.clone(),
+ PlaceholderMatch::from_range(FileRange {
+ file_id: self.sema.original_range(code).file_id,
+ range: first_matched_token
+ .text_range()
+ .cover(last_matched_token.text_range()),
+ }),
+ );
+ }
+ continue;
+ }
+ // Match literal (non-placeholder) tokens.
+ match child {
+ SyntaxElement::Token(token) => {
+ self.attempt_match_token(phase, &mut pattern, &token)?;
+ }
+ SyntaxElement::Node(node) => match pattern.next() {
+ Some(SyntaxElement::Node(p)) => {
+ self.attempt_match_token_tree(phase, &p, &node)?;
+ }
+ Some(SyntaxElement::Token(p)) => fail_match!(
+ "Pattern has token '{}', code has subtree '{}'",
+ p.text(),
+ node.text()
+ ),
+ None => fail_match!("Pattern has nothing, code has '{}'", node.text()),
+ },
+ }
+ }
+ if let Some(p) = pattern.next() {
+ fail_match!("Reached end of token tree in code, but pattern still has {:?}", p);
+ }
+ Ok(())
+ }
+
+ fn attempt_match_ufcs_to_method_call(
+ &self,
+ phase: &mut Phase<'_>,
+ pattern_ufcs: &UfcsCallInfo,
+ code: &ast::MethodCallExpr,
+ ) -> Result<(), MatchFailed> {
+ use ast::HasArgList;
+ let code_resolved_function = self
+ .sema
+ .resolve_method_call(code)
+ .ok_or_else(|| match_error!("Failed to resolve method call"))?;
+ if pattern_ufcs.function != code_resolved_function {
+ fail_match!("Method call resolved to a different function");
+ }
+ // Check arguments.
+ let mut pattern_args = pattern_ufcs
+ .call_expr
+ .arg_list()
+ .ok_or_else(|| match_error!("Pattern function call has no args"))?
+ .args();
+ // If the function we're calling takes a self parameter, then we store additional
+ // information on the placeholder match about autoderef and autoref. This allows us to use
+ // the placeholder in a context where autoderef and autoref don't apply.
+ if code_resolved_function.self_param(self.sema.db).is_some() {
+ if let (Some(pattern_type), Some(expr)) =
+ (&pattern_ufcs.qualifier_type, &code.receiver())
+ {
+ let deref_count = self.check_expr_type(pattern_type, expr)?;
+ let pattern_receiver = pattern_args.next();
+ self.attempt_match_opt(phase, pattern_receiver.clone(), code.receiver())?;
+ if let Phase::Second(match_out) = phase {
+ if let Some(placeholder_value) = pattern_receiver
+ .and_then(|n| self.get_placeholder_for_node(n.syntax()))
+ .and_then(|placeholder| {
+ match_out.placeholder_values.get_mut(&placeholder.ident)
+ })
+ {
+ placeholder_value.autoderef_count = deref_count;
+ placeholder_value.autoref_kind = self
+ .sema
+ .resolve_method_call_as_callable(code)
+ .and_then(|callable| callable.receiver_param(self.sema.db))
+ .map(|self_param| self_param.kind())
+ .unwrap_or(ast::SelfParamKind::Owned);
+ }
+ }
+ }
+ } else {
+ self.attempt_match_opt(phase, pattern_args.next(), code.receiver())?;
+ }
+ let mut code_args =
+ code.arg_list().ok_or_else(|| match_error!("Code method call has no args"))?.args();
+ loop {
+ match (pattern_args.next(), code_args.next()) {
+ (None, None) => return Ok(()),
+ (p, c) => self.attempt_match_opt(phase, p, c)?,
+ }
+ }
+ }
+
+ fn attempt_match_ufcs_to_ufcs(
+ &self,
+ phase: &mut Phase<'_>,
+ pattern_ufcs: &UfcsCallInfo,
+ code: &ast::CallExpr,
+ ) -> Result<(), MatchFailed> {
+ use ast::HasArgList;
+ // Check that the first argument is the expected type.
+ if let (Some(pattern_type), Some(expr)) = (
+ &pattern_ufcs.qualifier_type,
+ &code.arg_list().and_then(|code_args| code_args.args().next()),
+ ) {
+ self.check_expr_type(pattern_type, expr)?;
+ }
+ self.attempt_match_node_children(phase, pattern_ufcs.call_expr.syntax(), code.syntax())
+ }
+
+ /// Verifies that `expr` matches `pattern_type`, possibly after dereferencing some number of
+ /// times. Returns the number of times it needed to be dereferenced.
+ fn check_expr_type(
+ &self,
+ pattern_type: &hir::Type,
+ expr: &ast::Expr,
+ ) -> Result<usize, MatchFailed> {
+ use hir::HirDisplay;
+ let code_type = self
+ .sema
+ .type_of_expr(expr)
+ .ok_or_else(|| {
+ match_error!("Failed to get receiver type for `{}`", expr.syntax().text())
+ })?
+ .original;
+ // Temporary needed to make the borrow checker happy.
+ let res = code_type
+ .autoderef(self.sema.db)
+ .enumerate()
+ .find(|(_, deref_code_type)| pattern_type == deref_code_type)
+ .map(|(count, _)| count)
+ .ok_or_else(|| {
+ match_error!(
+ "Pattern type `{}` didn't match code type `{}`",
+ pattern_type.display(self.sema.db),
+ code_type.display(self.sema.db)
+ )
+ });
+ res
+ }
+
+ fn get_placeholder_for_node(&self, node: &SyntaxNode) -> Option<&Placeholder> {
+ self.get_placeholder(&SyntaxElement::Node(node.clone()))
+ }
+
+ fn get_placeholder(&self, element: &SyntaxElement) -> Option<&Placeholder> {
+ only_ident(element.clone()).and_then(|ident| self.rule.get_placeholder(&ident))
+ }
+}
+
+impl Match {
+ fn render_template_paths(
+ &mut self,
+ template: &ResolvedPattern,
+ sema: &Semantics<'_, ide_db::RootDatabase>,
+ ) -> Result<(), MatchFailed> {
+ let module = sema
+ .scope(&self.matched_node)
+ .ok_or_else(|| match_error!("Matched node isn't in a module"))?
+ .module();
+ for (path, resolved_path) in &template.resolved_paths {
+ if let hir::PathResolution::Def(module_def) = resolved_path.resolution {
+ let mod_path = module.find_use_path(sema.db, module_def).ok_or_else(|| {
+ match_error!("Failed to render template path `{}` at match location")
+ })?;
+ self.rendered_template_paths.insert(path.clone(), mod_path);
+ }
+ }
+ Ok(())
+ }
+}
+
+impl Phase<'_> {
+ fn next_non_trivial(&mut self, code_it: &mut SyntaxElementChildren) -> Option<SyntaxElement> {
+ loop {
+ let c = code_it.next();
+ if let Some(SyntaxElement::Token(t)) = &c {
+ self.record_ignored_comments(t);
+ if t.kind().is_trivia() {
+ continue;
+ }
+ }
+ return c;
+ }
+ }
+
+ fn record_ignored_comments(&mut self, token: &SyntaxToken) {
+ if token.kind() == SyntaxKind::COMMENT {
+ if let Phase::Second(match_out) = self {
+ if let Some(comment) = ast::Comment::cast(token.clone()) {
+ match_out.ignored_comments.push(comment);
+ }
+ }
+ }
+ }
+}
+
+fn is_closing_token(kind: SyntaxKind) -> bool {
+ kind == SyntaxKind::R_PAREN || kind == SyntaxKind::R_CURLY || kind == SyntaxKind::R_BRACK
+}
+
+pub(crate) fn record_match_fails_reasons_scope<F, T>(debug_active: bool, f: F) -> T
+where
+ F: Fn() -> T,
+{
+ RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(debug_active));
+ let res = f();
+ RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(false));
+ res
+}
+
+// For performance reasons, we don't want to record the reason why every match fails, only the bit
+// of code that the user indicated they thought would match. We use a thread local to indicate when
+// we are trying to match that bit of code. This saves us having to pass a boolean into all the bits
+// of code that can make the decision to not match.
+thread_local! {
+ pub static RECORDING_MATCH_FAIL_REASONS: Cell<bool> = Cell::new(false);
+}
+
+fn recording_match_fail_reasons() -> bool {
+ RECORDING_MATCH_FAIL_REASONS.with(|c| c.get())
+}
+
+impl PlaceholderMatch {
+ fn from_range(range: FileRange) -> Self {
+ Self {
+ range,
+ inner_matches: SsrMatches::default(),
+ autoderef_count: 0,
+ autoref_kind: ast::SelfParamKind::Owned,
+ }
+ }
+}
+
+impl NodeKind {
+ fn matches(&self, node: &SyntaxNode) -> Result<(), MatchFailed> {
+ let ok = match self {
+ Self::Literal => {
+ cov_mark::hit!(literal_constraint);
+ ast::Literal::can_cast(node.kind())
+ }
+ };
+ if !ok {
+ fail_match!("Code '{}' isn't of kind {:?}", node.text(), self);
+ }
+ Ok(())
+ }
+}
+
+// If `node` contains nothing but an ident then return it, otherwise return None.
+fn only_ident(element: SyntaxElement) -> Option<SyntaxToken> {
+ match element {
+ SyntaxElement::Token(t) => {
+ if t.kind() == SyntaxKind::IDENT {
+ return Some(t);
+ }
+ }
+ SyntaxElement::Node(n) => {
+ let mut children = n.children_with_tokens();
+ if let (Some(only_child), None) = (children.next(), children.next()) {
+ return only_ident(only_child);
+ }
+ }
+ }
+ None
+}
+
+struct PatternIterator {
+ iter: SyntaxElementChildren,
+}
+
+impl Iterator for PatternIterator {
+ type Item = SyntaxElement;
+
+ fn next(&mut self) -> Option<SyntaxElement> {
+ for element in &mut self.iter {
+ if !element.kind().is_trivia() {
+ return Some(element);
+ }
+ }
+ None
+ }
+}
+
+impl PatternIterator {
+ fn new(parent: &SyntaxNode) -> Self {
+ Self { iter: parent.children_with_tokens() }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{MatchFinder, SsrRule};
+
+ #[test]
+ fn parse_match_replace() {
+ let rule: SsrRule = "foo($x) ==>> bar($x)".parse().unwrap();
+ let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }";
+
+ let (db, position, selections) = crate::tests::single_file(input);
+ let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
+ match_finder.add_rule(rule).unwrap();
+ let matches = match_finder.matches();
+ assert_eq!(matches.matches.len(), 1);
+ assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)");
+ assert_eq!(matches.matches[0].placeholder_values.len(), 1);
+
+ let edits = match_finder.edits();
+ assert_eq!(edits.len(), 1);
+ let edit = &edits[&position.file_id];
+ let mut after = input.to_string();
+ edit.apply(&mut after);
+ assert_eq!(after, "fn foo() {} fn bar() {} fn main() { bar(1+2); }");
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/nester.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/nester.rs
new file mode 100644
index 000000000..afaaafd1f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/nester.rs
@@ -0,0 +1,99 @@
+//! Converts a flat collection of matches into a nested form suitable for replacement. When there
+//! are multiple matches for a node, or that overlap, priority is given to the earlier rule. Nested
+//! matches are only permitted if the inner match is contained entirely within a placeholder of an
+//! outer match.
+//!
+//! For example, if our search pattern is `foo(foo($a))` and the code had `foo(foo(foo(foo(42))))`,
+//! then we'll get 3 matches, however only the outermost and innermost matches can be accepted. The
+//! middle match would take the second `foo` from the outer match.
+
+use ide_db::FxHashMap;
+use syntax::SyntaxNode;
+
+use crate::{Match, SsrMatches};
+
+pub(crate) fn nest_and_remove_collisions(
+ mut matches: Vec<Match>,
+ sema: &hir::Semantics<'_, ide_db::RootDatabase>,
+) -> SsrMatches {
+ // We sort the matches by depth then by rule index. Sorting by depth means that by the time we
+ // see a match, any parent matches or conflicting matches will have already been seen. Sorting
+ // by rule_index means that if there are two matches for the same node, the rule added first
+ // will take precedence.
+ matches.sort_by(|a, b| a.depth.cmp(&b.depth).then_with(|| a.rule_index.cmp(&b.rule_index)));
+ let mut collector = MatchCollector::default();
+ for m in matches {
+ collector.add_match(m, sema);
+ }
+ collector.into()
+}
+
+#[derive(Default)]
+struct MatchCollector {
+ matches_by_node: FxHashMap<SyntaxNode, Match>,
+}
+
+impl MatchCollector {
+ /// Attempts to add `m` to matches. If it conflicts with an existing match, it is discarded. If
+ /// it is entirely within the a placeholder of an existing match, then it is added as a child
+ /// match of the existing match.
+ fn add_match(&mut self, m: Match, sema: &hir::Semantics<'_, ide_db::RootDatabase>) {
+ let matched_node = m.matched_node.clone();
+ if let Some(existing) = self.matches_by_node.get_mut(&matched_node) {
+ try_add_sub_match(m, existing, sema);
+ return;
+ }
+ for ancestor in sema.ancestors_with_macros(m.matched_node.clone()) {
+ if let Some(existing) = self.matches_by_node.get_mut(&ancestor) {
+ try_add_sub_match(m, existing, sema);
+ return;
+ }
+ }
+ self.matches_by_node.insert(matched_node, m);
+ }
+}
+
+/// Attempts to add `m` as a sub-match of `existing`.
+fn try_add_sub_match(
+ m: Match,
+ existing: &mut Match,
+ sema: &hir::Semantics<'_, ide_db::RootDatabase>,
+) {
+ for p in existing.placeholder_values.values_mut() {
+ // Note, no need to check if p.range.file is equal to m.range.file, since we
+ // already know we're within `existing`.
+ if p.range.range.contains_range(m.range.range) {
+ // Convert the inner matches in `p` into a temporary MatchCollector. When
+ // we're done, we then convert it back into an SsrMatches. If we expected
+ // lots of inner matches, it might be worthwhile keeping a MatchCollector
+ // around for each placeholder match. However we expect most placeholder
+ // will have 0 and a few will have 1. More than that should hopefully be
+ // exceptional.
+ let mut collector = MatchCollector::default();
+ for m in std::mem::take(&mut p.inner_matches.matches) {
+ collector.matches_by_node.insert(m.matched_node.clone(), m);
+ }
+ collector.add_match(m, sema);
+ p.inner_matches = collector.into();
+ break;
+ }
+ }
+}
+
+impl From<MatchCollector> for SsrMatches {
+ fn from(mut match_collector: MatchCollector) -> Self {
+ let mut matches = SsrMatches::default();
+ for (_, m) in match_collector.matches_by_node.drain() {
+ matches.matches.push(m);
+ }
+ matches.matches.sort_by(|a, b| {
+ // Order matches by file_id then by start range. This should be sufficient since ranges
+ // shouldn't be overlapping.
+ a.range
+ .file_id
+ .cmp(&b.range.file_id)
+ .then_with(|| a.range.range.start().cmp(&b.range.range.start()))
+ });
+ matches
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs
new file mode 100644
index 000000000..f6220b928
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs
@@ -0,0 +1,406 @@
+//! This file contains code for parsing SSR rules, which look something like `foo($a) ==>> bar($b)`.
+//! We first split everything before and after the separator `==>>`. Next, both the search pattern
+//! and the replacement template get tokenized by the Rust tokenizer. Tokens are then searched for
+//! placeholders, which start with `$`. For replacement templates, this is the final form. For
+//! search patterns, we go further and parse the pattern as each kind of thing that we can match.
+//! e.g. expressions, type references etc.
+use ide_db::{FxHashMap, FxHashSet};
+use std::{fmt::Display, str::FromStr};
+use syntax::{SmolStr, SyntaxKind, SyntaxNode, T};
+
+use crate::errors::bail;
+use crate::{fragments, SsrError, SsrPattern, SsrRule};
+
+#[derive(Debug)]
+pub(crate) struct ParsedRule {
+ pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>,
+ pub(crate) pattern: SyntaxNode,
+ pub(crate) template: Option<SyntaxNode>,
+}
+
+#[derive(Debug)]
+pub(crate) struct RawPattern {
+ tokens: Vec<PatternElement>,
+}
+
+// Part of a search or replace pattern.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) enum PatternElement {
+ Token(Token),
+ Placeholder(Placeholder),
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) struct Placeholder {
+ /// The name of this placeholder. e.g. for "$a", this would be "a"
+ pub(crate) ident: Var,
+ /// A unique name used in place of this placeholder when we parse the pattern as Rust code.
+ stand_in_name: String,
+ pub(crate) constraints: Vec<Constraint>,
+}
+
+/// Represents a `$var` in an SSR query.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub(crate) struct Var(pub(crate) String);
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) enum Constraint {
+ Kind(NodeKind),
+ Not(Box<Constraint>),
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) enum NodeKind {
+ Literal,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub(crate) struct Token {
+ kind: SyntaxKind,
+ pub(crate) text: SmolStr,
+}
+
+impl ParsedRule {
+ fn new(
+ pattern: &RawPattern,
+ template: Option<&RawPattern>,
+ ) -> Result<Vec<ParsedRule>, SsrError> {
+ let raw_pattern = pattern.as_rust_code();
+ let raw_template = template.map(|t| t.as_rust_code());
+ let raw_template = raw_template.as_deref();
+ let mut builder = RuleBuilder {
+ placeholders_by_stand_in: pattern.placeholders_by_stand_in(),
+ rules: Vec::new(),
+ };
+
+ let raw_template_stmt = raw_template.map(fragments::stmt);
+ if let raw_template_expr @ Some(Ok(_)) = raw_template.map(fragments::expr) {
+ builder.try_add(fragments::expr(&raw_pattern), raw_template_expr);
+ } else {
+ builder.try_add(fragments::expr(&raw_pattern), raw_template_stmt.clone());
+ }
+ builder.try_add(fragments::ty(&raw_pattern), raw_template.map(fragments::ty));
+ builder.try_add(fragments::item(&raw_pattern), raw_template.map(fragments::item));
+ builder.try_add(fragments::pat(&raw_pattern), raw_template.map(fragments::pat));
+ builder.try_add(fragments::stmt(&raw_pattern), raw_template_stmt);
+ builder.build()
+ }
+}
+
+struct RuleBuilder {
+ placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>,
+ rules: Vec<ParsedRule>,
+}
+
+impl RuleBuilder {
+ fn try_add(
+ &mut self,
+ pattern: Result<SyntaxNode, ()>,
+ template: Option<Result<SyntaxNode, ()>>,
+ ) {
+ match (pattern, template) {
+ (Ok(pattern), Some(Ok(template))) => self.rules.push(ParsedRule {
+ placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
+ pattern,
+ template: Some(template),
+ }),
+ (Ok(pattern), None) => self.rules.push(ParsedRule {
+ placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
+ pattern,
+ template: None,
+ }),
+ _ => {}
+ }
+ }
+
+ fn build(mut self) -> Result<Vec<ParsedRule>, SsrError> {
+ if self.rules.is_empty() {
+ bail!("Not a valid Rust expression, type, item, path or pattern");
+ }
+ // If any rules contain paths, then we reject any rules that don't contain paths. Allowing a
+ // mix leads to strange semantics, since the path-based rules only match things where the
+ // path refers to semantically the same thing, whereas the non-path-based rules could match
+ // anything. Specifically, if we have a rule like `foo ==>> bar` we only want to match the
+ // `foo` that is in the current scope, not any `foo`. However "foo" can be parsed as a
+ // pattern (IDENT_PAT -> NAME -> IDENT). Allowing such a rule through would result in
+ // renaming everything called `foo` to `bar`. It'd also be slow, since without a path, we'd
+ // have to use the slow-scan search mechanism.
+ if self.rules.iter().any(|rule| contains_path(&rule.pattern)) {
+ let old_len = self.rules.len();
+ self.rules.retain(|rule| contains_path(&rule.pattern));
+ if self.rules.len() < old_len {
+ cov_mark::hit!(pattern_is_a_single_segment_path);
+ }
+ }
+ Ok(self.rules)
+ }
+}
+
+/// Returns whether there are any paths in `node`.
+fn contains_path(node: &SyntaxNode) -> bool {
+ node.kind() == SyntaxKind::PATH
+ || node.descendants().any(|node| node.kind() == SyntaxKind::PATH)
+}
+
+impl FromStr for SsrRule {
+ type Err = SsrError;
+
+ fn from_str(query: &str) -> Result<SsrRule, SsrError> {
+ let mut it = query.split("==>>");
+ let pattern = it.next().expect("at least empty string").trim();
+ let template = it
+ .next()
+ .ok_or_else(|| SsrError("Cannot find delimiter `==>>`".into()))?
+ .trim()
+ .to_string();
+ if it.next().is_some() {
+ return Err(SsrError("More than one delimiter found".into()));
+ }
+ let raw_pattern = pattern.parse()?;
+ let raw_template = template.parse()?;
+ let parsed_rules = ParsedRule::new(&raw_pattern, Some(&raw_template))?;
+ let rule = SsrRule { pattern: raw_pattern, template: raw_template, parsed_rules };
+ validate_rule(&rule)?;
+ Ok(rule)
+ }
+}
+
+impl FromStr for RawPattern {
+ type Err = SsrError;
+
+ fn from_str(pattern_str: &str) -> Result<RawPattern, SsrError> {
+ Ok(RawPattern { tokens: parse_pattern(pattern_str)? })
+ }
+}
+
+impl RawPattern {
+ /// Returns this search pattern as Rust source code that we can feed to the Rust parser.
+ fn as_rust_code(&self) -> String {
+ let mut res = String::new();
+ for t in &self.tokens {
+ res.push_str(match t {
+ PatternElement::Token(token) => token.text.as_str(),
+ PatternElement::Placeholder(placeholder) => placeholder.stand_in_name.as_str(),
+ });
+ }
+ res
+ }
+
+ pub(crate) fn placeholders_by_stand_in(&self) -> FxHashMap<SmolStr, Placeholder> {
+ let mut res = FxHashMap::default();
+ for t in &self.tokens {
+ if let PatternElement::Placeholder(placeholder) = t {
+ res.insert(SmolStr::new(placeholder.stand_in_name.clone()), placeholder.clone());
+ }
+ }
+ res
+ }
+}
+
+impl FromStr for SsrPattern {
+ type Err = SsrError;
+
+ fn from_str(pattern_str: &str) -> Result<SsrPattern, SsrError> {
+ let raw_pattern = pattern_str.parse()?;
+ let parsed_rules = ParsedRule::new(&raw_pattern, None)?;
+ Ok(SsrPattern { parsed_rules })
+ }
+}
+
+/// Returns `pattern_str`, parsed as a search or replace pattern. If `remove_whitespace` is true,
+/// then any whitespace tokens will be removed, which we do for the search pattern, but not for the
+/// replace pattern.
+fn parse_pattern(pattern_str: &str) -> Result<Vec<PatternElement>, SsrError> {
+ let mut res = Vec::new();
+ let mut placeholder_names = FxHashSet::default();
+ let mut tokens = tokenize(pattern_str)?.into_iter();
+ while let Some(token) = tokens.next() {
+ if token.kind == T![$] {
+ let placeholder = parse_placeholder(&mut tokens)?;
+ if !placeholder_names.insert(placeholder.ident.clone()) {
+ bail!("Placeholder `{}` repeats more than once", placeholder.ident);
+ }
+ res.push(PatternElement::Placeholder(placeholder));
+ } else {
+ res.push(PatternElement::Token(token));
+ }
+ }
+ Ok(res)
+}
+
+/// Checks for errors in a rule. e.g. the replace pattern referencing placeholders that the search
+/// pattern didn't define.
+fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> {
+ let mut defined_placeholders = FxHashSet::default();
+ for p in &rule.pattern.tokens {
+ if let PatternElement::Placeholder(placeholder) = p {
+ defined_placeholders.insert(&placeholder.ident);
+ }
+ }
+ let mut undefined = Vec::new();
+ for p in &rule.template.tokens {
+ if let PatternElement::Placeholder(placeholder) = p {
+ if !defined_placeholders.contains(&placeholder.ident) {
+ undefined.push(placeholder.ident.to_string());
+ }
+ if !placeholder.constraints.is_empty() {
+ bail!("Replacement placeholders cannot have constraints");
+ }
+ }
+ }
+ if !undefined.is_empty() {
+ bail!("Replacement contains undefined placeholders: {}", undefined.join(", "));
+ }
+ Ok(())
+}
+
+fn tokenize(source: &str) -> Result<Vec<Token>, SsrError> {
+ let lexed = parser::LexedStr::new(source);
+ if let Some((_, first_error)) = lexed.errors().next() {
+ bail!("Failed to parse pattern: {}", first_error);
+ }
+ let mut tokens: Vec<Token> = Vec::new();
+ for i in 0..lexed.len() {
+ tokens.push(Token { kind: lexed.kind(i), text: lexed.text(i).into() });
+ }
+ Ok(tokens)
+}
+
+fn parse_placeholder(tokens: &mut std::vec::IntoIter<Token>) -> Result<Placeholder, SsrError> {
+ let mut name = None;
+ let mut constraints = Vec::new();
+ if let Some(token) = tokens.next() {
+ match token.kind {
+ SyntaxKind::IDENT => {
+ name = Some(token.text);
+ }
+ T!['{'] => {
+ let token =
+ tokens.next().ok_or_else(|| SsrError::new("Unexpected end of placeholder"))?;
+ if token.kind == SyntaxKind::IDENT {
+ name = Some(token.text);
+ }
+ loop {
+ let token = tokens
+ .next()
+ .ok_or_else(|| SsrError::new("Placeholder is missing closing brace '}'"))?;
+ match token.kind {
+ T![:] => {
+ constraints.push(parse_constraint(tokens)?);
+ }
+ T!['}'] => break,
+ _ => bail!("Unexpected token while parsing placeholder: '{}'", token.text),
+ }
+ }
+ }
+ _ => {
+ bail!("Placeholders should either be $name or ${{name:constraints}}");
+ }
+ }
+ }
+ let name = name.ok_or_else(|| SsrError::new("Placeholder ($) with no name"))?;
+ Ok(Placeholder::new(name, constraints))
+}
+
+fn parse_constraint(tokens: &mut std::vec::IntoIter<Token>) -> Result<Constraint, SsrError> {
+ let constraint_type = tokens
+ .next()
+ .ok_or_else(|| SsrError::new("Found end of placeholder while looking for a constraint"))?
+ .text
+ .to_string();
+ match constraint_type.as_str() {
+ "kind" => {
+ expect_token(tokens, "(")?;
+ let t = tokens.next().ok_or_else(|| {
+ SsrError::new("Unexpected end of constraint while looking for kind")
+ })?;
+ if t.kind != SyntaxKind::IDENT {
+ bail!("Expected ident, found {:?} while parsing kind constraint", t.kind);
+ }
+ expect_token(tokens, ")")?;
+ Ok(Constraint::Kind(NodeKind::from(&t.text)?))
+ }
+ "not" => {
+ expect_token(tokens, "(")?;
+ let sub = parse_constraint(tokens)?;
+ expect_token(tokens, ")")?;
+ Ok(Constraint::Not(Box::new(sub)))
+ }
+ x => bail!("Unsupported constraint type '{}'", x),
+ }
+}
+
+fn expect_token(tokens: &mut std::vec::IntoIter<Token>, expected: &str) -> Result<(), SsrError> {
+ if let Some(t) = tokens.next() {
+ if t.text == expected {
+ return Ok(());
+ }
+ bail!("Expected {} found {}", expected, t.text);
+ }
+ bail!("Expected {} found end of stream", expected);
+}
+
+impl NodeKind {
+ fn from(name: &SmolStr) -> Result<NodeKind, SsrError> {
+ Ok(match name.as_str() {
+ "literal" => NodeKind::Literal,
+ _ => bail!("Unknown node kind '{}'", name),
+ })
+ }
+}
+
+impl Placeholder {
+ fn new(name: SmolStr, constraints: Vec<Constraint>) -> Self {
+ Self {
+ stand_in_name: format!("__placeholder_{}", name),
+ constraints,
+ ident: Var(name.to_string()),
+ }
+ }
+}
+
+impl Display for Var {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "${}", self.0)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn parser_happy_case() {
+ fn token(kind: SyntaxKind, text: &str) -> PatternElement {
+ PatternElement::Token(Token { kind, text: SmolStr::new(text) })
+ }
+ fn placeholder(name: &str) -> PatternElement {
+ PatternElement::Placeholder(Placeholder::new(SmolStr::new(name), Vec::new()))
+ }
+ let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap();
+ assert_eq!(
+ result.pattern.tokens,
+ vec![
+ token(SyntaxKind::IDENT, "foo"),
+ token(T!['('], "("),
+ placeholder("a"),
+ token(T![,], ","),
+ token(SyntaxKind::WHITESPACE, " "),
+ placeholder("b"),
+ token(T![')'], ")"),
+ ]
+ );
+ assert_eq!(
+ result.template.tokens,
+ vec![
+ token(SyntaxKind::IDENT, "bar"),
+ token(T!['('], "("),
+ placeholder("b"),
+ token(T![,], ","),
+ token(SyntaxKind::WHITESPACE, " "),
+ placeholder("a"),
+ token(T![')'], ")"),
+ ]
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs
new file mode 100644
index 000000000..e27ef6e35
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs
@@ -0,0 +1,242 @@
+//! Code for applying replacement templates for matches that have previously been found.
+
+use ide_db::{FxHashMap, FxHashSet};
+use itertools::Itertools;
+use syntax::{
+ ast::{self, AstNode, AstToken},
+ SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize,
+};
+use text_edit::TextEdit;
+
+use crate::{fragments, resolving::ResolvedRule, Match, SsrMatches};
+
+/// Returns a text edit that will replace each match in `matches` with its corresponding replacement
+/// template. Placeholders in the template will have been substituted with whatever they matched to
+/// in the original code.
+pub(crate) fn matches_to_edit(
+ matches: &SsrMatches,
+ file_src: &str,
+ rules: &[ResolvedRule],
+) -> TextEdit {
+ matches_to_edit_at_offset(matches, file_src, 0.into(), rules)
+}
+
+fn matches_to_edit_at_offset(
+ matches: &SsrMatches,
+ file_src: &str,
+ relative_start: TextSize,
+ rules: &[ResolvedRule],
+) -> TextEdit {
+ let mut edit_builder = TextEdit::builder();
+ for m in &matches.matches {
+ edit_builder.replace(
+ m.range.range.checked_sub(relative_start).unwrap(),
+ render_replace(m, file_src, rules),
+ );
+ }
+ edit_builder.finish()
+}
+
+struct ReplacementRenderer<'a> {
+ match_info: &'a Match,
+ file_src: &'a str,
+ rules: &'a [ResolvedRule],
+ rule: &'a ResolvedRule,
+ out: String,
+ // Map from a range within `out` to a token in `template` that represents a placeholder. This is
+ // used to validate that the generated source code doesn't split any placeholder expansions (see
+ // below).
+ placeholder_tokens_by_range: FxHashMap<TextRange, SyntaxToken>,
+ // Which placeholder tokens need to be wrapped in parenthesis in order to ensure that when `out`
+ // is parsed, placeholders don't get split. e.g. if a template of `$a.to_string()` results in `1
+ // + 2.to_string()` then the placeholder value `1 + 2` was split and needs parenthesis.
+ placeholder_tokens_requiring_parenthesis: FxHashSet<SyntaxToken>,
+}
+
+fn render_replace(match_info: &Match, file_src: &str, rules: &[ResolvedRule]) -> String {
+ let rule = &rules[match_info.rule_index];
+ let template = rule
+ .template
+ .as_ref()
+ .expect("You called MatchFinder::edits after calling MatchFinder::add_search_pattern");
+ let mut renderer = ReplacementRenderer {
+ match_info,
+ file_src,
+ rules,
+ rule,
+ out: String::new(),
+ placeholder_tokens_requiring_parenthesis: FxHashSet::default(),
+ placeholder_tokens_by_range: FxHashMap::default(),
+ };
+ renderer.render_node(&template.node);
+ renderer.maybe_rerender_with_extra_parenthesis(&template.node);
+ for comment in &match_info.ignored_comments {
+ renderer.out.push_str(&comment.syntax().to_string());
+ }
+ renderer.out
+}
+
+impl ReplacementRenderer<'_> {
+ fn render_node_children(&mut self, node: &SyntaxNode) {
+ for node_or_token in node.children_with_tokens() {
+ self.render_node_or_token(&node_or_token);
+ }
+ }
+
+ fn render_node_or_token(&mut self, node_or_token: &SyntaxElement) {
+ match node_or_token {
+ SyntaxElement::Token(token) => {
+ self.render_token(token);
+ }
+ SyntaxElement::Node(child_node) => {
+ self.render_node(child_node);
+ }
+ }
+ }
+
+ fn render_node(&mut self, node: &SyntaxNode) {
+ if let Some(mod_path) = self.match_info.rendered_template_paths.get(node) {
+ self.out.push_str(&mod_path.to_string());
+ // Emit everything except for the segment's name-ref, since we already effectively
+ // emitted that as part of `mod_path`.
+ if let Some(path) = ast::Path::cast(node.clone()) {
+ if let Some(segment) = path.segment() {
+ for node_or_token in segment.syntax().children_with_tokens() {
+ if node_or_token.kind() != SyntaxKind::NAME_REF {
+ self.render_node_or_token(&node_or_token);
+ }
+ }
+ }
+ }
+ } else {
+ self.render_node_children(node);
+ }
+ }
+
+ fn render_token(&mut self, token: &SyntaxToken) {
+ if let Some(placeholder) = self.rule.get_placeholder(token) {
+ if let Some(placeholder_value) =
+ self.match_info.placeholder_values.get(&placeholder.ident)
+ {
+ let range = &placeholder_value.range.range;
+ let mut matched_text =
+ self.file_src[usize::from(range.start())..usize::from(range.end())].to_owned();
+ // If a method call is performed directly on the placeholder, then autoderef and
+ // autoref will apply, so we can just substitute whatever the placeholder matched to
+ // directly. If we're not applying a method call, then we need to add explicitly
+ // deref and ref in order to match whatever was being done implicitly at the match
+ // site.
+ if !token_is_method_call_receiver(token)
+ && (placeholder_value.autoderef_count > 0
+ || placeholder_value.autoref_kind != ast::SelfParamKind::Owned)
+ {
+ cov_mark::hit!(replace_autoref_autoderef_capture);
+ let ref_kind = match placeholder_value.autoref_kind {
+ ast::SelfParamKind::Owned => "",
+ ast::SelfParamKind::Ref => "&",
+ ast::SelfParamKind::MutRef => "&mut ",
+ };
+ matched_text = format!(
+ "{}{}{}",
+ ref_kind,
+ "*".repeat(placeholder_value.autoderef_count),
+ matched_text
+ );
+ }
+ let edit = matches_to_edit_at_offset(
+ &placeholder_value.inner_matches,
+ self.file_src,
+ range.start(),
+ self.rules,
+ );
+ let needs_parenthesis =
+ self.placeholder_tokens_requiring_parenthesis.contains(token);
+ edit.apply(&mut matched_text);
+ if needs_parenthesis {
+ self.out.push('(');
+ }
+ self.placeholder_tokens_by_range.insert(
+ TextRange::new(
+ TextSize::of(&self.out),
+ TextSize::of(&self.out) + TextSize::of(&matched_text),
+ ),
+ token.clone(),
+ );
+ self.out.push_str(&matched_text);
+ if needs_parenthesis {
+ self.out.push(')');
+ }
+ } else {
+ // We validated that all placeholder references were valid before we
+ // started, so this shouldn't happen.
+ panic!(
+ "Internal error: replacement referenced unknown placeholder {}",
+ placeholder.ident
+ );
+ }
+ } else {
+ self.out.push_str(token.text());
+ }
+ }
+
+ // Checks if the resulting code, when parsed doesn't split any placeholders due to different
+ // order of operations between the search pattern and the replacement template. If any do, then
+ // we rerender the template and wrap the problematic placeholders with parenthesis.
+ fn maybe_rerender_with_extra_parenthesis(&mut self, template: &SyntaxNode) {
+ if let Some(node) = parse_as_kind(&self.out, template.kind()) {
+ self.remove_node_ranges(node);
+ if self.placeholder_tokens_by_range.is_empty() {
+ return;
+ }
+ self.placeholder_tokens_requiring_parenthesis =
+ self.placeholder_tokens_by_range.values().cloned().collect();
+ self.out.clear();
+ self.render_node(template);
+ }
+ }
+
+ fn remove_node_ranges(&mut self, node: SyntaxNode) {
+ self.placeholder_tokens_by_range.remove(&node.text_range());
+ for child in node.children() {
+ self.remove_node_ranges(child);
+ }
+ }
+}
+
+/// Returns whether token is the receiver of a method call. Note, being within the receiver of a
+/// method call doesn't count. e.g. if the token is `$a`, then `$a.foo()` will return true, while
+/// `($a + $b).foo()` or `x.foo($a)` will return false.
+fn token_is_method_call_receiver(token: &SyntaxToken) -> bool {
+ // Find the first method call among the ancestors of `token`, then check if the only token
+ // within the receiver is `token`.
+ if let Some(receiver) = token
+ .parent_ancestors()
+ .find_map(ast::MethodCallExpr::cast)
+ .and_then(|call| call.receiver())
+ {
+ let tokens = receiver.syntax().descendants_with_tokens().filter_map(|node_or_token| {
+ match node_or_token {
+ SyntaxElement::Token(t) => Some(t),
+ _ => None,
+ }
+ });
+ if let Some((only_token,)) = tokens.collect_tuple() {
+ return only_token == *token;
+ }
+ }
+ false
+}
+
+fn parse_as_kind(code: &str, kind: SyntaxKind) -> Option<SyntaxNode> {
+ if ast::Expr::can_cast(kind) {
+ if let Ok(expr) = fragments::expr(code) {
+ return Some(expr);
+ }
+ }
+ if ast::Item::can_cast(kind) {
+ if let Ok(item) = fragments::item(code) {
+ return Some(item);
+ }
+ }
+ None
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs
new file mode 100644
index 000000000..4731f14f4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs
@@ -0,0 +1,308 @@
+//! This module is responsible for resolving paths within rules.
+
+use hir::AsAssocItem;
+use ide_db::{base_db::FilePosition, FxHashMap};
+use parsing::Placeholder;
+use syntax::{ast, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken};
+
+use crate::{errors::error, parsing, SsrError};
+
+pub(crate) struct ResolutionScope<'db> {
+ scope: hir::SemanticsScope<'db>,
+ node: SyntaxNode,
+}
+
+pub(crate) struct ResolvedRule {
+ pub(crate) pattern: ResolvedPattern,
+ pub(crate) template: Option<ResolvedPattern>,
+ pub(crate) index: usize,
+}
+
+pub(crate) struct ResolvedPattern {
+ pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
+ pub(crate) node: SyntaxNode,
+ // Paths in `node` that we've resolved.
+ pub(crate) resolved_paths: FxHashMap<SyntaxNode, ResolvedPath>,
+ pub(crate) ufcs_function_calls: FxHashMap<SyntaxNode, UfcsCallInfo>,
+ pub(crate) contains_self: bool,
+}
+
+pub(crate) struct ResolvedPath {
+ pub(crate) resolution: hir::PathResolution,
+ /// The depth of the ast::Path that was resolved within the pattern.
+ pub(crate) depth: u32,
+}
+
+pub(crate) struct UfcsCallInfo {
+ pub(crate) call_expr: ast::CallExpr,
+ pub(crate) function: hir::Function,
+ pub(crate) qualifier_type: Option<hir::Type>,
+}
+
+impl ResolvedRule {
+ pub(crate) fn new(
+ rule: parsing::ParsedRule,
+ resolution_scope: &ResolutionScope<'_>,
+ index: usize,
+ ) -> Result<ResolvedRule, SsrError> {
+ let resolver =
+ Resolver { resolution_scope, placeholders_by_stand_in: rule.placeholders_by_stand_in };
+ let resolved_template = match rule.template {
+ Some(template) => Some(resolver.resolve_pattern_tree(template)?),
+ None => None,
+ };
+ Ok(ResolvedRule {
+ pattern: resolver.resolve_pattern_tree(rule.pattern)?,
+ template: resolved_template,
+ index,
+ })
+ }
+
+ pub(crate) fn get_placeholder(&self, token: &SyntaxToken) -> Option<&Placeholder> {
+ if token.kind() != SyntaxKind::IDENT {
+ return None;
+ }
+ self.pattern.placeholders_by_stand_in.get(token.text())
+ }
+}
+
+struct Resolver<'a, 'db> {
+ resolution_scope: &'a ResolutionScope<'db>,
+ placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
+}
+
+impl Resolver<'_, '_> {
+ fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result<ResolvedPattern, SsrError> {
+ use syntax::ast::AstNode;
+ use syntax::{SyntaxElement, T};
+ let mut resolved_paths = FxHashMap::default();
+ self.resolve(pattern.clone(), 0, &mut resolved_paths)?;
+ let ufcs_function_calls = resolved_paths
+ .iter()
+ .filter_map(|(path_node, resolved)| {
+ if let Some(grandparent) = path_node.parent().and_then(|parent| parent.parent()) {
+ if let Some(call_expr) = ast::CallExpr::cast(grandparent.clone()) {
+ if let hir::PathResolution::Def(hir::ModuleDef::Function(function)) =
+ resolved.resolution
+ {
+ if function.as_assoc_item(self.resolution_scope.scope.db).is_some() {
+ let qualifier_type =
+ self.resolution_scope.qualifier_type(path_node);
+ return Some((
+ grandparent,
+ UfcsCallInfo { call_expr, function, qualifier_type },
+ ));
+ }
+ }
+ }
+ }
+ None
+ })
+ .collect();
+ let contains_self =
+ pattern.descendants_with_tokens().any(|node_or_token| match node_or_token {
+ SyntaxElement::Token(t) => t.kind() == T![self],
+ _ => false,
+ });
+ Ok(ResolvedPattern {
+ node: pattern,
+ resolved_paths,
+ placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
+ ufcs_function_calls,
+ contains_self,
+ })
+ }
+
+ fn resolve(
+ &self,
+ node: SyntaxNode,
+ depth: u32,
+ resolved_paths: &mut FxHashMap<SyntaxNode, ResolvedPath>,
+ ) -> Result<(), SsrError> {
+ use syntax::ast::AstNode;
+ if let Some(path) = ast::Path::cast(node.clone()) {
+ if is_self(&path) {
+ // Self cannot be resolved like other paths.
+ return Ok(());
+ }
+ // Check if this is an appropriate place in the path to resolve. If the path is
+ // something like `a::B::<i32>::c` then we want to resolve `a::B`. If the path contains
+ // a placeholder. e.g. `a::$b::c` then we want to resolve `a`.
+ if !path_contains_type_arguments(path.qualifier())
+ && !self.path_contains_placeholder(&path)
+ {
+ let resolution = self
+ .resolution_scope
+ .resolve_path(&path)
+ .ok_or_else(|| error!("Failed to resolve path `{}`", node.text()))?;
+ if self.ok_to_use_path_resolution(&resolution) {
+ resolved_paths.insert(node, ResolvedPath { resolution, depth });
+ return Ok(());
+ }
+ }
+ }
+ for node in node.children() {
+ self.resolve(node, depth + 1, resolved_paths)?;
+ }
+ Ok(())
+ }
+
+ /// Returns whether `path` contains a placeholder, but ignores any placeholders within type
+ /// arguments.
+ fn path_contains_placeholder(&self, path: &ast::Path) -> bool {
+ if let Some(segment) = path.segment() {
+ if let Some(name_ref) = segment.name_ref() {
+ if self.placeholders_by_stand_in.contains_key(name_ref.text().as_str()) {
+ return true;
+ }
+ }
+ }
+ if let Some(qualifier) = path.qualifier() {
+ return self.path_contains_placeholder(&qualifier);
+ }
+ false
+ }
+
+ fn ok_to_use_path_resolution(&self, resolution: &hir::PathResolution) -> bool {
+ match resolution {
+ hir::PathResolution::Def(hir::ModuleDef::Function(function))
+ if function.as_assoc_item(self.resolution_scope.scope.db).is_some() =>
+ {
+ if function.self_param(self.resolution_scope.scope.db).is_some() {
+ // If we don't use this path resolution, then we won't be able to match method
+ // calls. e.g. `Foo::bar($s)` should match `x.bar()`.
+ true
+ } else {
+ cov_mark::hit!(replace_associated_trait_default_function_call);
+ false
+ }
+ }
+ hir::PathResolution::Def(
+ def @ (hir::ModuleDef::Const(_) | hir::ModuleDef::TypeAlias(_)),
+ ) if def.as_assoc_item(self.resolution_scope.scope.db).is_some() => {
+ // Not a function. Could be a constant or an associated type.
+ cov_mark::hit!(replace_associated_trait_constant);
+ false
+ }
+ _ => true,
+ }
+ }
+}
+
+impl<'db> ResolutionScope<'db> {
+ pub(crate) fn new(
+ sema: &hir::Semantics<'db, ide_db::RootDatabase>,
+ resolve_context: FilePosition,
+ ) -> Option<ResolutionScope<'db>> {
+ use syntax::ast::AstNode;
+ let file = sema.parse(resolve_context.file_id);
+ // Find a node at the requested position, falling back to the whole file.
+ let node = file
+ .syntax()
+ .token_at_offset(resolve_context.offset)
+ .left_biased()
+ .and_then(|token| token.parent())
+ .unwrap_or_else(|| file.syntax().clone());
+ let node = pick_node_for_resolution(node);
+ let scope = sema.scope(&node)?;
+ Some(ResolutionScope { scope, node })
+ }
+
+ /// Returns the function in which SSR was invoked, if any.
+ pub(crate) fn current_function(&self) -> Option<SyntaxNode> {
+ self.node.ancestors().find(|node| node.kind() == SyntaxKind::FN)
+ }
+
+ fn resolve_path(&self, path: &ast::Path) -> Option<hir::PathResolution> {
+ // First try resolving the whole path. This will work for things like
+ // `std::collections::HashMap`, but will fail for things like
+ // `std::collections::HashMap::new`.
+ if let Some(resolution) = self.scope.speculative_resolve(path) {
+ return Some(resolution);
+ }
+ // Resolution failed, try resolving the qualifier (e.g. `std::collections::HashMap` and if
+ // that succeeds, then iterate through the candidates on the resolved type with the provided
+ // name.
+ let resolved_qualifier = self.scope.speculative_resolve(&path.qualifier()?)?;
+ if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier {
+ let name = path.segment()?.name_ref()?;
+ let module = self.scope.module();
+ adt.ty(self.scope.db).iterate_path_candidates(
+ self.scope.db,
+ &self.scope,
+ &self.scope.visible_traits().0,
+ Some(module),
+ None,
+ |assoc_item| {
+ let item_name = assoc_item.name(self.scope.db)?;
+ if item_name.to_smol_str().as_str() == name.text() {
+ Some(hir::PathResolution::Def(assoc_item.into()))
+ } else {
+ None
+ }
+ },
+ )
+ } else {
+ None
+ }
+ }
+
+ fn qualifier_type(&self, path: &SyntaxNode) -> Option<hir::Type> {
+ use syntax::ast::AstNode;
+ if let Some(path) = ast::Path::cast(path.clone()) {
+ if let Some(qualifier) = path.qualifier() {
+ if let Some(hir::PathResolution::Def(hir::ModuleDef::Adt(adt))) =
+ self.resolve_path(&qualifier)
+ {
+ return Some(adt.ty(self.scope.db));
+ }
+ }
+ }
+ None
+ }
+}
+
+fn is_self(path: &ast::Path) -> bool {
+ path.segment().map(|segment| segment.self_token().is_some()).unwrap_or(false)
+}
+
+/// Returns a suitable node for resolving paths in the current scope. If we create a scope based on
+/// a statement node, then we can't resolve local variables that were defined in the current scope
+/// (only in parent scopes). So we find another node, ideally a child of the statement where local
+/// variable resolution is permitted.
+fn pick_node_for_resolution(node: SyntaxNode) -> SyntaxNode {
+ match node.kind() {
+ SyntaxKind::EXPR_STMT => {
+ if let Some(n) = node.first_child() {
+ cov_mark::hit!(cursor_after_semicolon);
+ return n;
+ }
+ }
+ SyntaxKind::LET_STMT | SyntaxKind::IDENT_PAT => {
+ if let Some(next) = node.next_sibling() {
+ return pick_node_for_resolution(next);
+ }
+ }
+ SyntaxKind::NAME => {
+ if let Some(parent) = node.parent() {
+ return pick_node_for_resolution(parent);
+ }
+ }
+ _ => {}
+ }
+ node
+}
+
+/// Returns whether `path` or any of its qualifiers contains type arguments.
+fn path_contains_type_arguments(path: Option<ast::Path>) -> bool {
+ if let Some(path) = path {
+ if let Some(segment) = path.segment() {
+ if segment.generic_arg_list().is_some() {
+ cov_mark::hit!(type_arguments_within_path);
+ return true;
+ }
+ }
+ return path_contains_type_arguments(path.qualifier());
+ }
+ false
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
new file mode 100644
index 000000000..0a85569b6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
@@ -0,0 +1,289 @@
+//! Searching for matches.
+
+use crate::{
+ matching,
+ resolving::{ResolvedPath, ResolvedPattern, ResolvedRule},
+ Match, MatchFinder,
+};
+use ide_db::{
+ base_db::{FileId, FileRange},
+ defs::Definition,
+ search::{SearchScope, UsageSearchResult},
+ FxHashSet,
+};
+use syntax::{ast, AstNode, SyntaxKind, SyntaxNode};
+
+/// A cache for the results of find_usages. This is for when we have multiple patterns that have the
+/// same path. e.g. if the pattern was `foo::Bar` that can parse as a path, an expression, a type
+/// and as a pattern. In each, the usages of `foo::Bar` are the same and we'd like to avoid finding
+/// them more than once.
+#[derive(Default)]
+pub(crate) struct UsageCache {
+ usages: Vec<(Definition, UsageSearchResult)>,
+}
+
+impl<'db> MatchFinder<'db> {
+ /// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make
+ /// replacement impossible, so further processing is required in order to properly nest matches
+ /// and remove overlapping matches. This is done in the `nesting` module.
+ pub(crate) fn find_matches_for_rule(
+ &self,
+ rule: &ResolvedRule,
+ usage_cache: &mut UsageCache,
+ matches_out: &mut Vec<Match>,
+ ) {
+ if rule.pattern.contains_self {
+ // If the pattern contains `self` we restrict the scope of the search to just the
+ // current method. No other method can reference the same `self`. This makes the
+ // behavior of `self` consistent with other variables.
+ if let Some(current_function) = self.resolution_scope.current_function() {
+ self.slow_scan_node(&current_function, rule, &None, matches_out);
+ }
+ return;
+ }
+ if pick_path_for_usages(&rule.pattern).is_none() {
+ self.slow_scan(rule, matches_out);
+ return;
+ }
+ self.find_matches_for_pattern_tree(rule, &rule.pattern, usage_cache, matches_out);
+ }
+
+ fn find_matches_for_pattern_tree(
+ &self,
+ rule: &ResolvedRule,
+ pattern: &ResolvedPattern,
+ usage_cache: &mut UsageCache,
+ matches_out: &mut Vec<Match>,
+ ) {
+ if let Some(resolved_path) = pick_path_for_usages(pattern) {
+ let definition: Definition = resolved_path.resolution.clone().into();
+ for file_range in self.find_usages(usage_cache, definition).file_ranges() {
+ for node_to_match in self.find_nodes_to_match(resolved_path, file_range) {
+ if !is_search_permitted_ancestors(&node_to_match) {
+ cov_mark::hit!(use_declaration_with_braces);
+ continue;
+ }
+ self.try_add_match(rule, &node_to_match, &None, matches_out);
+ }
+ }
+ }
+ }
+
+ fn find_nodes_to_match(
+ &self,
+ resolved_path: &ResolvedPath,
+ file_range: FileRange,
+ ) -> Vec<SyntaxNode> {
+ let file = self.sema.parse(file_range.file_id);
+ let depth = resolved_path.depth as usize;
+ let offset = file_range.range.start();
+
+ let mut paths = self
+ .sema
+ .find_nodes_at_offset_with_descend::<ast::Path>(file.syntax(), offset)
+ .peekable();
+
+ if paths.peek().is_some() {
+ paths
+ .filter_map(|path| {
+ self.sema.ancestors_with_macros(path.syntax().clone()).nth(depth)
+ })
+ .collect::<Vec<_>>()
+ } else {
+ self.sema
+ .find_nodes_at_offset_with_descend::<ast::MethodCallExpr>(file.syntax(), offset)
+ .filter_map(|path| {
+ // If the pattern contained a path and we found a reference to that path that wasn't
+ // itself a path, but was a method call, then we need to adjust how far up to try
+ // matching by how deep the path was within a CallExpr. The structure would have been
+ // CallExpr, PathExpr, Path - i.e. a depth offset of 2. We don't need to check if the
+ // path was part of a CallExpr because if it wasn't then all that will happen is we'll
+ // fail to match, which is the desired behavior.
+ const PATH_DEPTH_IN_CALL_EXPR: usize = 2;
+ if depth < PATH_DEPTH_IN_CALL_EXPR {
+ return None;
+ }
+ self.sema
+ .ancestors_with_macros(path.syntax().clone())
+ .nth(depth - PATH_DEPTH_IN_CALL_EXPR)
+ })
+ .collect::<Vec<_>>()
+ }
+ }
+
+ fn find_usages<'a>(
+ &self,
+ usage_cache: &'a mut UsageCache,
+ definition: Definition,
+ ) -> &'a UsageSearchResult {
+ // Logically if a lookup succeeds we should just return it. Unfortunately returning it would
+ // extend the lifetime of the borrow, then we wouldn't be able to do the insertion on a
+ // cache miss. This is a limitation of NLL and is fixed with Polonius. For now we do two
+ // lookups in the case of a cache hit.
+ if usage_cache.find(&definition).is_none() {
+ let usages = definition.usages(&self.sema).in_scope(self.search_scope()).all();
+ usage_cache.usages.push((definition, usages));
+ return &usage_cache.usages.last().unwrap().1;
+ }
+ usage_cache.find(&definition).unwrap()
+ }
+
+ /// Returns the scope within which we want to search. We don't want un unrestricted search
+ /// scope, since we don't want to find references in external dependencies.
+ fn search_scope(&self) -> SearchScope {
+ // FIXME: We should ideally have a test that checks that we edit local roots and not library
+ // roots. This probably would require some changes to fixtures, since currently everything
+ // seems to get put into a single source root.
+ let mut files = Vec::new();
+ self.search_files_do(|file_id| {
+ files.push(file_id);
+ });
+ SearchScope::files(&files)
+ }
+
+ fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) {
+ self.search_files_do(|file_id| {
+ let file = self.sema.parse(file_id);
+ let code = file.syntax();
+ self.slow_scan_node(code, rule, &None, matches_out);
+ })
+ }
+
+ fn search_files_do(&self, mut callback: impl FnMut(FileId)) {
+ if self.restrict_ranges.is_empty() {
+ // Unrestricted search.
+ use ide_db::base_db::SourceDatabaseExt;
+ use ide_db::symbol_index::SymbolsDatabase;
+ for &root in self.sema.db.local_roots().iter() {
+ let sr = self.sema.db.source_root(root);
+ for file_id in sr.iter() {
+ callback(file_id);
+ }
+ }
+ } else {
+ // Search is restricted, deduplicate file IDs (generally only one).
+ let mut files = FxHashSet::default();
+ for range in &self.restrict_ranges {
+ if files.insert(range.file_id) {
+ callback(range.file_id);
+ }
+ }
+ }
+ }
+
+ fn slow_scan_node(
+ &self,
+ code: &SyntaxNode,
+ rule: &ResolvedRule,
+ restrict_range: &Option<FileRange>,
+ matches_out: &mut Vec<Match>,
+ ) {
+ if !is_search_permitted(code) {
+ return;
+ }
+ self.try_add_match(rule, code, restrict_range, matches_out);
+ // If we've got a macro call, we already tried matching it pre-expansion, which is the only
+ // way to match the whole macro, now try expanding it and matching the expansion.
+ if let Some(macro_call) = ast::MacroCall::cast(code.clone()) {
+ if let Some(expanded) = self.sema.expand(&macro_call) {
+ if let Some(tt) = macro_call.token_tree() {
+ // When matching within a macro expansion, we only want to allow matches of
+ // nodes that originated entirely from within the token tree of the macro call.
+ // i.e. we don't want to match something that came from the macro itself.
+ self.slow_scan_node(
+ &expanded,
+ rule,
+ &Some(self.sema.original_range(tt.syntax())),
+ matches_out,
+ );
+ }
+ }
+ }
+ for child in code.children() {
+ self.slow_scan_node(&child, rule, restrict_range, matches_out);
+ }
+ }
+
+ fn try_add_match(
+ &self,
+ rule: &ResolvedRule,
+ code: &SyntaxNode,
+ restrict_range: &Option<FileRange>,
+ matches_out: &mut Vec<Match>,
+ ) {
+ if !self.within_range_restrictions(code) {
+ cov_mark::hit!(replace_nonpath_within_selection);
+ return;
+ }
+ if let Ok(m) = matching::get_match(false, rule, code, restrict_range, &self.sema) {
+ matches_out.push(m);
+ }
+ }
+
+ /// Returns whether `code` is within one of our range restrictions if we have any. No range
+ /// restrictions is considered unrestricted and always returns true.
+ fn within_range_restrictions(&self, code: &SyntaxNode) -> bool {
+ if self.restrict_ranges.is_empty() {
+ // There is no range restriction.
+ return true;
+ }
+ let node_range = self.sema.original_range(code);
+ for range in &self.restrict_ranges {
+ if range.file_id == node_range.file_id && range.range.contains_range(node_range.range) {
+ return true;
+ }
+ }
+ false
+ }
+}
+
+/// Returns whether we support matching within `node` and all of its ancestors.
+fn is_search_permitted_ancestors(node: &SyntaxNode) -> bool {
+ if let Some(parent) = node.parent() {
+ if !is_search_permitted_ancestors(&parent) {
+ return false;
+ }
+ }
+ is_search_permitted(node)
+}
+
+/// Returns whether we support matching within this kind of node.
+fn is_search_permitted(node: &SyntaxNode) -> bool {
+ // FIXME: Properly handle use declarations. At the moment, if our search pattern is `foo::bar`
+ // and the code is `use foo::{baz, bar}`, we'll match `bar`, since it resolves to `foo::bar`.
+ // However we'll then replace just the part we matched `bar`. We probably need to instead remove
+ // `bar` and insert a new use declaration.
+ node.kind() != SyntaxKind::USE
+}
+
+impl UsageCache {
+ fn find(&mut self, definition: &Definition) -> Option<&UsageSearchResult> {
+ // We expect a very small number of cache entries (generally 1), so a linear scan should be
+ // fast enough and avoids the need to implement Hash for Definition.
+ for (d, refs) in &self.usages {
+ if d == definition {
+ return Some(refs);
+ }
+ }
+ None
+ }
+}
+
+/// Returns a path that's suitable for path resolution. We exclude builtin types, since they aren't
+/// something that we can find references to. We then somewhat arbitrarily pick the path that is the
+/// longest as this is hopefully more likely to be less common, making it faster to find.
+fn pick_path_for_usages(pattern: &ResolvedPattern) -> Option<&ResolvedPath> {
+ // FIXME: Take the scope of the resolved path into account. e.g. if there are any paths that are
+ // private to the current module, then we definitely would want to pick them over say a path
+ // from std. Possibly we should go further than this and intersect the search scopes for all
+ // resolved paths then search only in that scope.
+ pattern
+ .resolved_paths
+ .iter()
+ .filter(|(_, p)| {
+ !matches!(p.resolution, hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_)))
+ })
+ .map(|(node, resolved)| (node.text().len(), resolved))
+ .max_by(|(a, _), (b, _)| a.cmp(b))
+ .map(|(_, resolved)| resolved)
+}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs
new file mode 100644
index 000000000..1ecb7aa9a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs
@@ -0,0 +1,1397 @@
+use expect_test::{expect, Expect};
+use ide_db::{
+ base_db::{salsa::Durability, FileId, FilePosition, FileRange, SourceDatabaseExt},
+ FxHashSet,
+};
+use std::sync::Arc;
+use test_utils::RangeOrOffset;
+
+use crate::{MatchFinder, SsrRule};
+
+fn parse_error_text(query: &str) -> String {
+ format!("{}", query.parse::<SsrRule>().unwrap_err())
+}
+
+#[test]
+fn parser_empty_query() {
+ assert_eq!(parse_error_text(""), "Parse error: Cannot find delimiter `==>>`");
+}
+
+#[test]
+fn parser_no_delimiter() {
+ assert_eq!(parse_error_text("foo()"), "Parse error: Cannot find delimiter `==>>`");
+}
+
+#[test]
+fn parser_two_delimiters() {
+ assert_eq!(
+ parse_error_text("foo() ==>> a ==>> b "),
+ "Parse error: More than one delimiter found"
+ );
+}
+
+#[test]
+fn parser_repeated_name() {
+ assert_eq!(
+ parse_error_text("foo($a, $a) ==>>"),
+ "Parse error: Placeholder `$a` repeats more than once"
+ );
+}
+
+#[test]
+fn parser_invalid_pattern() {
+ assert_eq!(
+ parse_error_text(" ==>> ()"),
+ "Parse error: Not a valid Rust expression, type, item, path or pattern"
+ );
+}
+
+#[test]
+fn parser_invalid_template() {
+ assert_eq!(
+ parse_error_text("() ==>> )"),
+ "Parse error: Not a valid Rust expression, type, item, path or pattern"
+ );
+}
+
+#[test]
+fn parser_undefined_placeholder_in_replacement() {
+ assert_eq!(
+ parse_error_text("42 ==>> $a"),
+ "Parse error: Replacement contains undefined placeholders: $a"
+ );
+}
+
+/// `code` may optionally contain a cursor marker `$0`. If it doesn't, then the position will be
+/// the start of the file. If there's a second cursor marker, then we'll return a single range.
+pub(crate) fn single_file(code: &str) -> (ide_db::RootDatabase, FilePosition, Vec<FileRange>) {
+ use ide_db::base_db::fixture::WithFixture;
+ use ide_db::symbol_index::SymbolsDatabase;
+ let (mut db, file_id, range_or_offset) = if code.contains(test_utils::CURSOR_MARKER) {
+ ide_db::RootDatabase::with_range_or_offset(code)
+ } else {
+ let (db, file_id) = ide_db::RootDatabase::with_single_file(code);
+ (db, file_id, RangeOrOffset::Offset(0.into()))
+ };
+ let selections;
+ let position;
+ match range_or_offset {
+ RangeOrOffset::Range(range) => {
+ position = FilePosition { file_id, offset: range.start() };
+ selections = vec![FileRange { file_id, range }];
+ }
+ RangeOrOffset::Offset(offset) => {
+ position = FilePosition { file_id, offset };
+ selections = vec![];
+ }
+ }
+ let mut local_roots = FxHashSet::default();
+ local_roots.insert(ide_db::base_db::fixture::WORKSPACE);
+ db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH);
+ (db, position, selections)
+}
+
+fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) {
+ assert_ssr_transforms(&[rule], input, expected);
+}
+
+fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
+ let (db, position, selections) = single_file(input);
+ let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
+ for rule in rules {
+ let rule: SsrRule = rule.parse().unwrap();
+ match_finder.add_rule(rule).unwrap();
+ }
+ let edits = match_finder.edits();
+ if edits.is_empty() {
+ panic!("No edits were made");
+ }
+ // Note, db.file_text is not necessarily the same as `input`, since fixture parsing alters
+ // stuff.
+ let mut actual = db.file_text(position.file_id).to_string();
+ edits[&position.file_id].apply(&mut actual);
+ expected.assert_eq(&actual);
+}
+
+fn print_match_debug_info(match_finder: &MatchFinder<'_>, file_id: FileId, snippet: &str) {
+ let debug_info = match_finder.debug_where_text_equal(file_id, snippet);
+ println!(
+ "Match debug info: {} nodes had text exactly equal to '{}'",
+ debug_info.len(),
+ snippet
+ );
+ for (index, d) in debug_info.iter().enumerate() {
+ println!("Node #{}\n{:#?}\n", index, d);
+ }
+}
+
+fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
+ let (db, position, selections) = single_file(code);
+ let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
+ match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
+ let matched_strings: Vec<String> =
+ match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect();
+ if matched_strings != expected && !expected.is_empty() {
+ print_match_debug_info(&match_finder, position.file_id, expected[0]);
+ }
+ assert_eq!(matched_strings, expected);
+}
+
+fn assert_no_match(pattern: &str, code: &str) {
+ let (db, position, selections) = single_file(code);
+ let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
+ match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
+ let matches = match_finder.matches().flattened().matches;
+ if !matches.is_empty() {
+ print_match_debug_info(&match_finder, position.file_id, &matches[0].matched_text());
+ panic!("Got {} matches when we expected none: {:#?}", matches.len(), matches);
+ }
+}
+
+fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) {
+ let (db, position, selections) = single_file(code);
+ let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
+ match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
+ let mut reasons = Vec::new();
+ for d in match_finder.debug_where_text_equal(position.file_id, snippet) {
+ if let Some(reason) = d.match_failure_reason() {
+ reasons.push(reason.to_owned());
+ }
+ }
+ assert_eq!(reasons, vec![expected_reason]);
+}
+
+#[test]
+fn ssr_let_stmt_in_macro_match() {
+ assert_matches(
+ "let a = 0",
+ r#"
+ macro_rules! m1 { ($a:stmt) => {$a}; }
+ fn f() {m1!{ let a = 0 };}"#,
+ // FIXME: Whitespace is not part of the matched block
+ &["leta=0"],
+ );
+}
+
+#[test]
+fn ssr_let_stmt_in_fn_match() {
+ assert_matches("let $a = 10;", "fn main() { let x = 10; x }", &["let x = 10;"]);
+ assert_matches("let $a = $b;", "fn main() { let x = 10; x }", &["let x = 10;"]);
+}
+
+#[test]
+fn ssr_block_expr_match() {
+ assert_matches("{ let $a = $b; }", "fn main() { let x = 10; }", &["{ let x = 10; }"]);
+ assert_matches("{ let $a = $b; $c }", "fn main() { let x = 10; x }", &["{ let x = 10; x }"]);
+}
+
+#[test]
+fn ssr_let_stmt_replace() {
+ // Pattern and template with trailing semicolon
+ assert_ssr_transform(
+ "let $a = $b; ==>> let $a = 11;",
+ "fn main() { let x = 10; x }",
+ expect![["fn main() { let x = 11; x }"]],
+ );
+}
+
+#[test]
+fn ssr_let_stmt_replace_expr() {
+ // Trailing semicolon should be dropped from the new expression
+ assert_ssr_transform(
+ "let $a = $b; ==>> $b",
+ "fn main() { let x = 10; }",
+ expect![["fn main() { 10 }"]],
+ );
+}
+
+#[test]
+fn ssr_blockexpr_replace_stmt_with_stmt() {
+ assert_ssr_transform(
+ "if $a() {$b;} ==>> $b;",
+ "{
+ if foo() {
+ bar();
+ }
+ Ok(())
+}",
+ expect![[r#"{
+ bar();
+ Ok(())
+}"#]],
+ );
+}
+
+#[test]
+fn ssr_blockexpr_match_trailing_expr() {
+ assert_matches(
+ "if $a() {$b;}",
+ "{
+ if foo() {
+ bar();
+ }
+}",
+ &["if foo() {
+ bar();
+ }"],
+ );
+}
+
+#[test]
+fn ssr_blockexpr_replace_trailing_expr_with_stmt() {
+ assert_ssr_transform(
+ "if $a() {$b;} ==>> $b;",
+ "{
+ if foo() {
+ bar();
+ }
+}",
+ expect![["{
+ bar();
+}"]],
+ );
+}
+
+#[test]
+fn ssr_function_to_method() {
+ assert_ssr_transform(
+ "my_function($a, $b) ==>> ($a).my_method($b)",
+ "fn my_function() {} fn main() { loop { my_function( other_func(x, y), z + w) } }",
+ expect![["fn my_function() {} fn main() { loop { (other_func(x, y)).my_method(z + w) } }"]],
+ )
+}
+
+#[test]
+fn ssr_nested_function() {
+ assert_ssr_transform(
+ "foo($a, $b, $c) ==>> bar($c, baz($a, $b))",
+ r#"
+ //- /lib.rs crate:foo
+ fn foo() {}
+ fn bar() {}
+ fn baz() {}
+ fn main { foo (x + value.method(b), x+y-z, true && false) }
+ "#,
+ expect![[r#"
+ fn foo() {}
+ fn bar() {}
+ fn baz() {}
+ fn main { bar(true && false, baz(x + value.method(b), x+y-z)) }
+ "#]],
+ )
+}
+
+#[test]
+fn ssr_expected_spacing() {
+ assert_ssr_transform(
+ "foo($x) + bar() ==>> bar($x)",
+ "fn foo() {} fn bar() {} fn main() { foo(5) + bar() }",
+ expect![["fn foo() {} fn bar() {} fn main() { bar(5) }"]],
+ );
+}
+
+#[test]
+fn ssr_with_extra_space() {
+ assert_ssr_transform(
+ "foo($x ) + bar() ==>> bar($x)",
+ "fn foo() {} fn bar() {} fn main() { foo( 5 ) +bar( ) }",
+ expect![["fn foo() {} fn bar() {} fn main() { bar(5) }"]],
+ );
+}
+
+#[test]
+fn ssr_keeps_nested_comment() {
+ assert_ssr_transform(
+ "foo($x) ==>> bar($x)",
+ "fn foo() {} fn bar() {} fn main() { foo(other(5 /* using 5 */)) }",
+ expect![["fn foo() {} fn bar() {} fn main() { bar(other(5 /* using 5 */)) }"]],
+ )
+}
+
+#[test]
+fn ssr_keeps_comment() {
+ assert_ssr_transform(
+ "foo($x) ==>> bar($x)",
+ "fn foo() {} fn bar() {} fn main() { foo(5 /* using 5 */) }",
+ expect![["fn foo() {} fn bar() {} fn main() { bar(5)/* using 5 */ }"]],
+ )
+}
+
+#[test]
+fn ssr_struct_lit() {
+ assert_ssr_transform(
+ "Foo{a: $a, b: $b} ==>> Foo::new($a, $b)",
+ r#"
+ struct Foo() {}
+ impl Foo { fn new() {} }
+ fn main() { Foo{b:2, a:1} }
+ "#,
+ expect![[r#"
+ struct Foo() {}
+ impl Foo { fn new() {} }
+ fn main() { Foo::new(1, 2) }
+ "#]],
+ )
+}
+
+#[test]
+fn ssr_struct_def() {
+ assert_ssr_transform(
+ "struct Foo { $f: $t } ==>> struct Foo($t);",
+ r#"struct Foo { field: i32 }"#,
+ expect![[r#"struct Foo(i32);"#]],
+ )
+}
+
+#[test]
+fn ignores_whitespace() {
+ assert_matches("1+2", "fn f() -> i32 {1 + 2}", &["1 + 2"]);
+ assert_matches("1 + 2", "fn f() -> i32 {1+2}", &["1+2"]);
+}
+
+#[test]
+fn no_match() {
+ assert_no_match("1 + 3", "fn f() -> i32 {1 + 2}");
+}
+
+#[test]
+fn match_fn_definition() {
+ assert_matches("fn $a($b: $t) {$c}", "fn f(a: i32) {bar()}", &["fn f(a: i32) {bar()}"]);
+}
+
+#[test]
+fn match_struct_definition() {
+ let code = r#"
+ struct Option<T> {}
+ struct Bar {}
+ struct Foo {name: Option<String>}"#;
+ assert_matches("struct $n {$f: Option<String>}", code, &["struct Foo {name: Option<String>}"]);
+}
+
+#[test]
+fn match_expr() {
+ let code = r#"
+ fn foo() {}
+ fn f() -> i32 {foo(40 + 2, 42)}"#;
+ assert_matches("foo($a, $b)", code, &["foo(40 + 2, 42)"]);
+ assert_no_match("foo($a, $b, $c)", code);
+ assert_no_match("foo($a)", code);
+}
+
+#[test]
+fn match_nested_method_calls() {
+ assert_matches(
+ "$a.z().z().z()",
+ "fn f() {h().i().j().z().z().z().d().e()}",
+ &["h().i().j().z().z().z()"],
+ );
+}
+
+// Make sure that our node matching semantics don't differ within macro calls.
+#[test]
+fn match_nested_method_calls_with_macro_call() {
+ assert_matches(
+ "$a.z().z().z()",
+ r#"
+ macro_rules! m1 { ($a:expr) => {$a}; }
+ fn f() {m1!(h().i().j().z().z().z().d().e())}"#,
+ &["h().i().j().z().z().z()"],
+ );
+}
+
+#[test]
+fn match_complex_expr() {
+ let code = r#"
+ fn foo() {} fn bar() {}
+ fn f() -> i32 {foo(bar(40, 2), 42)}"#;
+ assert_matches("foo($a, $b)", code, &["foo(bar(40, 2), 42)"]);
+ assert_no_match("foo($a, $b, $c)", code);
+ assert_no_match("foo($a)", code);
+ assert_matches("bar($a, $b)", code, &["bar(40, 2)"]);
+}
+
+// Trailing commas in the code should be ignored.
+#[test]
+fn match_with_trailing_commas() {
+ // Code has comma, pattern doesn't.
+ assert_matches("foo($a, $b)", "fn foo() {} fn f() {foo(1, 2,);}", &["foo(1, 2,)"]);
+ assert_matches("Foo{$a, $b}", "struct Foo {} fn f() {Foo{1, 2,};}", &["Foo{1, 2,}"]);
+
+ // Pattern has comma, code doesn't.
+ assert_matches("foo($a, $b,)", "fn foo() {} fn f() {foo(1, 2);}", &["foo(1, 2)"]);
+ assert_matches("Foo{$a, $b,}", "struct Foo {} fn f() {Foo{1, 2};}", &["Foo{1, 2}"]);
+}
+
+#[test]
+fn match_type() {
+ assert_matches("i32", "fn f() -> i32 {1 + 2}", &["i32"]);
+ assert_matches(
+ "Option<$a>",
+ "struct Option<T> {} fn f() -> Option<i32> {42}",
+ &["Option<i32>"],
+ );
+ assert_no_match(
+ "Option<$a>",
+ "struct Option<T> {} struct Result<T, E> {} fn f() -> Result<i32, ()> {42}",
+ );
+}
+
+#[test]
+fn match_struct_instantiation() {
+ let code = r#"
+ struct Foo {bar: i32, baz: i32}
+ fn f() {Foo {bar: 1, baz: 2}}"#;
+ assert_matches("Foo {bar: 1, baz: 2}", code, &["Foo {bar: 1, baz: 2}"]);
+ // Now with placeholders for all parts of the struct.
+ assert_matches("Foo {$a: $b, $c: $d}", code, &["Foo {bar: 1, baz: 2}"]);
+ assert_matches("Foo {}", "struct Foo {} fn f() {Foo {}}", &["Foo {}"]);
+}
+
+#[test]
+fn match_path() {
+ let code = r#"
+ mod foo {
+ pub fn bar() {}
+ }
+ fn f() {foo::bar(42)}"#;
+ assert_matches("foo::bar", code, &["foo::bar"]);
+ assert_matches("$a::bar", code, &["foo::bar"]);
+ assert_matches("foo::$b", code, &["foo::bar"]);
+}
+
+#[test]
+fn match_pattern() {
+ assert_matches("Some($a)", "struct Some(); fn f() {if let Some(x) = foo() {}}", &["Some(x)"]);
+}
+
+// If our pattern has a full path, e.g. a::b::c() and the code has c(), but c resolves to
+// a::b::c, then we should match.
+#[test]
+fn match_fully_qualified_fn_path() {
+ let code = r#"
+ mod a {
+ pub mod b {
+ pub fn c(_: i32) {}
+ }
+ }
+ use a::b::c;
+ fn f1() {
+ c(42);
+ }
+ "#;
+ assert_matches("a::b::c($a)", code, &["c(42)"]);
+}
+
+#[test]
+fn match_resolved_type_name() {
+ let code = r#"
+ mod m1 {
+ pub mod m2 {
+ pub trait Foo<T> {}
+ }
+ }
+ mod m3 {
+ trait Foo<T> {}
+ fn f1(f: Option<&dyn Foo<bool>>) {}
+ }
+ mod m4 {
+ use crate::m1::m2::Foo;
+ fn f1(f: Option<&dyn Foo<i32>>) {}
+ }
+ "#;
+ assert_matches("m1::m2::Foo<$t>", code, &["Foo<i32>"]);
+}
+
+#[test]
+fn type_arguments_within_path() {
+ cov_mark::check!(type_arguments_within_path);
+ let code = r#"
+ mod foo {
+ pub struct Bar<T> {t: T}
+ impl<T> Bar<T> {
+ pub fn baz() {}
+ }
+ }
+ fn f1() {foo::Bar::<i32>::baz();}
+ "#;
+ assert_no_match("foo::Bar::<i64>::baz()", code);
+ assert_matches("foo::Bar::<i32>::baz()", code, &["foo::Bar::<i32>::baz()"]);
+}
+
+#[test]
+fn literal_constraint() {
+ cov_mark::check!(literal_constraint);
+ let code = r#"
+ enum Option<T> { Some(T), None }
+ use Option::Some;
+ fn f1() {
+ let x1 = Some(42);
+ let x2 = Some("foo");
+ let x3 = Some(x1);
+ let x4 = Some(40 + 2);
+ let x5 = Some(true);
+ }
+ "#;
+ assert_matches("Some(${a:kind(literal)})", code, &["Some(42)", "Some(\"foo\")", "Some(true)"]);
+ assert_matches("Some(${a:not(kind(literal))})", code, &["Some(x1)", "Some(40 + 2)"]);
+}
+
+#[test]
+fn match_reordered_struct_instantiation() {
+ assert_matches(
+ "Foo {aa: 1, b: 2, ccc: 3}",
+ "struct Foo {} fn f() {Foo {b: 2, ccc: 3, aa: 1}}",
+ &["Foo {b: 2, ccc: 3, aa: 1}"],
+ );
+ assert_no_match("Foo {a: 1}", "struct Foo {} fn f() {Foo {b: 1}}");
+ assert_no_match("Foo {a: 1}", "struct Foo {} fn f() {Foo {a: 2}}");
+ assert_no_match("Foo {a: 1, b: 2}", "struct Foo {} fn f() {Foo {a: 1}}");
+ assert_no_match("Foo {a: 1, b: 2}", "struct Foo {} fn f() {Foo {b: 2}}");
+ assert_no_match("Foo {a: 1, }", "struct Foo {} fn f() {Foo {a: 1, b: 2}}");
+ assert_no_match("Foo {a: 1, z: 9}", "struct Foo {} fn f() {Foo {a: 1}}");
+}
+
+#[test]
+fn match_macro_invocation() {
+ assert_matches(
+ "foo!($a)",
+ "macro_rules! foo {() => {}} fn() {foo(foo!(foo()))}",
+ &["foo!(foo())"],
+ );
+ assert_matches(
+ "foo!(41, $a, 43)",
+ "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43)}",
+ &["foo!(41, 42, 43)"],
+ );
+ assert_no_match("foo!(50, $a, 43)", "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43}");
+ assert_no_match("foo!(41, $a, 50)", "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43}");
+ assert_matches(
+ "foo!($a())",
+ "macro_rules! foo {() => {}} fn() {foo!(bar())}",
+ &["foo!(bar())"],
+ );
+}
+
+// When matching within a macro expansion, we only allow matches of nodes that originated from
+// the macro call, not from the macro definition.
+#[test]
+fn no_match_expression_from_macro() {
+ assert_no_match(
+ "$a.clone()",
+ r#"
+ macro_rules! m1 {
+ () => {42.clone()}
+ }
+ fn f1() {m1!()}
+ "#,
+ );
+}
+
+// We definitely don't want to allow matching of an expression that part originates from the
+// macro call `42` and part from the macro definition `.clone()`.
+#[test]
+fn no_match_split_expression() {
+ assert_no_match(
+ "$a.clone()",
+ r#"
+ macro_rules! m1 {
+ ($x:expr) => {$x.clone()}
+ }
+ fn f1() {m1!(42)}
+ "#,
+ );
+}
+
+#[test]
+fn replace_function_call() {
+ // This test also makes sure that we ignore empty-ranges.
+ assert_ssr_transform(
+ "foo() ==>> bar()",
+ "fn foo() {$0$0} fn bar() {} fn f1() {foo(); foo();}",
+ expect![["fn foo() {} fn bar() {} fn f1() {bar(); bar();}"]],
+ );
+}
+
+#[test]
+fn replace_function_call_with_placeholders() {
+ assert_ssr_transform(
+ "foo($a, $b) ==>> bar($b, $a)",
+ "fn foo() {} fn bar() {} fn f1() {foo(5, 42)}",
+ expect![["fn foo() {} fn bar() {} fn f1() {bar(42, 5)}"]],
+ );
+}
+
+#[test]
+fn replace_nested_function_calls() {
+ assert_ssr_transform(
+ "foo($a) ==>> bar($a)",
+ "fn foo() {} fn bar() {} fn f1() {foo(foo(42))}",
+ expect![["fn foo() {} fn bar() {} fn f1() {bar(bar(42))}"]],
+ );
+}
+
+#[test]
+fn replace_associated_function_call() {
+ assert_ssr_transform(
+ "Foo::new() ==>> Bar::new()",
+ r#"
+ struct Foo {}
+ impl Foo { fn new() {} }
+ struct Bar {}
+ impl Bar { fn new() {} }
+ fn f1() {Foo::new();}
+ "#,
+ expect![[r#"
+ struct Foo {}
+ impl Foo { fn new() {} }
+ struct Bar {}
+ impl Bar { fn new() {} }
+ fn f1() {Bar::new();}
+ "#]],
+ );
+}
+
+#[test]
+fn replace_associated_trait_default_function_call() {
+ cov_mark::check!(replace_associated_trait_default_function_call);
+ assert_ssr_transform(
+ "Bar2::foo() ==>> Bar2::foo2()",
+ r#"
+ trait Foo { fn foo() {} }
+ pub struct Bar {}
+ impl Foo for Bar {}
+ pub struct Bar2 {}
+ impl Foo for Bar2 {}
+ impl Bar2 { fn foo2() {} }
+ fn main() {
+ Bar::foo();
+ Bar2::foo();
+ }
+ "#,
+ expect![[r#"
+ trait Foo { fn foo() {} }
+ pub struct Bar {}
+ impl Foo for Bar {}
+ pub struct Bar2 {}
+ impl Foo for Bar2 {}
+ impl Bar2 { fn foo2() {} }
+ fn main() {
+ Bar::foo();
+ Bar2::foo2();
+ }
+ "#]],
+ );
+}
+
+#[test]
+fn replace_associated_trait_constant() {
+ cov_mark::check!(replace_associated_trait_constant);
+ assert_ssr_transform(
+ "Bar2::VALUE ==>> Bar2::VALUE_2222",
+ r#"
+ trait Foo { const VALUE: i32; const VALUE_2222: i32; }
+ pub struct Bar {}
+ impl Foo for Bar { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; }
+ pub struct Bar2 {}
+ impl Foo for Bar2 { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; }
+ impl Bar2 { fn foo2() {} }
+ fn main() {
+ Bar::VALUE;
+ Bar2::VALUE;
+ }
+ "#,
+ expect![[r#"
+ trait Foo { const VALUE: i32; const VALUE_2222: i32; }
+ pub struct Bar {}
+ impl Foo for Bar { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; }
+ pub struct Bar2 {}
+ impl Foo for Bar2 { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; }
+ impl Bar2 { fn foo2() {} }
+ fn main() {
+ Bar::VALUE;
+ Bar2::VALUE_2222;
+ }
+ "#]],
+ );
+}
+
+#[test]
+fn replace_path_in_different_contexts() {
+ // Note the $0 inside module a::b which marks the point where the rule is interpreted. We
+ // replace foo with bar, but both need different path qualifiers in different contexts. In f4,
+ // foo is unqualified because of a use statement, however the replacement needs to be fully
+ // qualified.
+ assert_ssr_transform(
+ "c::foo() ==>> c::bar()",
+ r#"
+ mod a {
+ pub mod b {$0
+ pub mod c {
+ pub fn foo() {}
+ pub fn bar() {}
+ fn f1() { foo() }
+ }
+ fn f2() { c::foo() }
+ }
+ fn f3() { b::c::foo() }
+ }
+ use a::b::c::foo;
+ fn f4() { foo() }
+ "#,
+ expect![[r#"
+ mod a {
+ pub mod b {
+ pub mod c {
+ pub fn foo() {}
+ pub fn bar() {}
+ fn f1() { bar() }
+ }
+ fn f2() { c::bar() }
+ }
+ fn f3() { b::c::bar() }
+ }
+ use a::b::c::foo;
+ fn f4() { a::b::c::bar() }
+ "#]],
+ );
+}
+
+#[test]
+fn replace_associated_function_with_generics() {
+ assert_ssr_transform(
+ "c::Foo::<$a>::new() ==>> d::Bar::<$a>::default()",
+ r#"
+ mod c {
+ pub struct Foo<T> {v: T}
+ impl<T> Foo<T> { pub fn new() {} }
+ fn f1() {
+ Foo::<i32>::new();
+ }
+ }
+ mod d {
+ pub struct Bar<T> {v: T}
+ impl<T> Bar<T> { pub fn default() {} }
+ fn f1() {
+ super::c::Foo::<i32>::new();
+ }
+ }
+ "#,
+ expect![[r#"
+ mod c {
+ pub struct Foo<T> {v: T}
+ impl<T> Foo<T> { pub fn new() {} }
+ fn f1() {
+ crate::d::Bar::<i32>::default();
+ }
+ }
+ mod d {
+ pub struct Bar<T> {v: T}
+ impl<T> Bar<T> { pub fn default() {} }
+ fn f1() {
+ Bar::<i32>::default();
+ }
+ }
+ "#]],
+ );
+}
+
+#[test]
+fn replace_type() {
+ assert_ssr_transform(
+ "Result<(), $a> ==>> Option<$a>",
+ "struct Result<T, E> {} struct Option<T> {} fn f1() -> Result<(), Vec<Error>> {foo()}",
+ expect![[
+ "struct Result<T, E> {} struct Option<T> {} fn f1() -> Option<Vec<Error>> {foo()}"
+ ]],
+ );
+ assert_ssr_transform(
+ "dyn Trait<$a> ==>> DynTrait<$a>",
+ r#"
+trait Trait<T> {}
+struct DynTrait<T> {}
+fn f1() -> dyn Trait<Vec<Error>> {foo()}
+"#,
+ expect![[r#"
+trait Trait<T> {}
+struct DynTrait<T> {}
+fn f1() -> DynTrait<Vec<Error>> {foo()}
+"#]],
+ );
+}
+
+#[test]
+fn replace_macro_invocations() {
+ assert_ssr_transform(
+ "try!($a) ==>> $a?",
+ "macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(try!(foo()));}",
+ expect![["macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(foo()?);}"]],
+ );
+ // FIXME: Figure out why this doesn't work anymore
+ // assert_ssr_transform(
+ // "foo!($a($b)) ==>> foo($b, $a)",
+ // "macro_rules! foo {() => {}} fn f1() {foo!(abc(def() + 2));}",
+ // expect![["macro_rules! foo {() => {}} fn f1() {foo(def() + 2, abc);}"]],
+ // );
+}
+
+#[test]
+fn replace_binary_op() {
+ assert_ssr_transform(
+ "$a + $b ==>> $b + $a",
+ "fn f() {2 * 3 + 4 * 5}",
+ expect![["fn f() {4 * 5 + 2 * 3}"]],
+ );
+ assert_ssr_transform(
+ "$a + $b ==>> $b + $a",
+ "fn f() {1 + 2 + 3 + 4}",
+ expect![[r#"fn f() {4 + (3 + (2 + 1))}"#]],
+ );
+}
+
+#[test]
+fn match_binary_op() {
+ assert_matches("$a + $b", "fn f() {1 + 2 + 3 + 4}", &["1 + 2", "1 + 2 + 3", "1 + 2 + 3 + 4"]);
+}
+
+#[test]
+fn multiple_rules() {
+ assert_ssr_transforms(
+ &["$a + 1 ==>> add_one($a)", "$a + $b ==>> add($a, $b)"],
+ "fn add() {} fn add_one() {} fn f() -> i32 {3 + 2 + 1}",
+ expect![["fn add() {} fn add_one() {} fn f() -> i32 {add_one(add(3, 2))}"]],
+ )
+}
+
+#[test]
+fn multiple_rules_with_nested_matches() {
+ assert_ssr_transforms(
+ &["foo1($a) ==>> bar1($a)", "foo2($a) ==>> bar2($a)"],
+ r#"
+ fn foo1() {} fn foo2() {} fn bar1() {} fn bar2() {}
+ fn f() {foo1(foo2(foo1(foo2(foo1(42)))))}
+ "#,
+ expect![[r#"
+ fn foo1() {} fn foo2() {} fn bar1() {} fn bar2() {}
+ fn f() {bar1(bar2(bar1(bar2(bar1(42)))))}
+ "#]],
+ )
+}
+
+#[test]
+fn match_within_macro_invocation() {
+ let code = r#"
+ macro_rules! foo {
+ ($a:stmt; $b:expr) => {
+ $b
+ };
+ }
+ struct A {}
+ impl A {
+ fn bar() {}
+ }
+ fn f1() {
+ let aaa = A {};
+ foo!(macro_ignores_this(); aaa.bar());
+ }
+ "#;
+ assert_matches("$a.bar()", code, &["aaa.bar()"]);
+}
+
+#[test]
+fn replace_within_macro_expansion() {
+ assert_ssr_transform(
+ "$a.foo() ==>> bar($a)",
+ r#"
+ macro_rules! macro1 {
+ ($a:expr) => {$a}
+ }
+ fn bar() {}
+ fn f() {macro1!(5.x().foo().o2())}
+ "#,
+ expect![[r#"
+ macro_rules! macro1 {
+ ($a:expr) => {$a}
+ }
+ fn bar() {}
+ fn f() {macro1!(bar(5.x()).o2())}
+ "#]],
+ )
+}
+
+#[test]
+fn replace_outside_and_within_macro_expansion() {
+ assert_ssr_transform(
+ "foo($a) ==>> bar($a)",
+ r#"
+ fn foo() {} fn bar() {}
+ macro_rules! macro1 {
+ ($a:expr) => {$a}
+ }
+ fn f() {foo(foo(macro1!(foo(foo(42)))))}
+ "#,
+ expect![[r#"
+ fn foo() {} fn bar() {}
+ macro_rules! macro1 {
+ ($a:expr) => {$a}
+ }
+ fn f() {bar(bar(macro1!(bar(bar(42)))))}
+ "#]],
+ )
+}
+
+#[test]
+fn preserves_whitespace_within_macro_expansion() {
+ assert_ssr_transform(
+ "$a + $b ==>> $b - $a",
+ r#"
+ macro_rules! macro1 {
+ ($a:expr) => {$a}
+ }
+ fn f() {macro1!(1 * 2 + 3 + 4)}
+ "#,
+ expect![[r#"
+ macro_rules! macro1 {
+ ($a:expr) => {$a}
+ }
+ fn f() {macro1!(4 - (3 - 1 * 2))}
+ "#]],
+ )
+}
+
+#[test]
+fn add_parenthesis_when_necessary() {
+ assert_ssr_transform(
+ "foo($a) ==>> $a.to_string()",
+ r#"
+ fn foo(_: i32) {}
+ fn bar3(v: i32) {
+ foo(1 + 2);
+ foo(-v);
+ }
+ "#,
+ expect![[r#"
+ fn foo(_: i32) {}
+ fn bar3(v: i32) {
+ (1 + 2).to_string();
+ (-v).to_string();
+ }
+ "#]],
+ )
+}
+
+#[test]
+fn match_failure_reasons() {
+ let code = r#"
+ fn bar() {}
+ macro_rules! foo {
+ ($a:expr) => {
+ 1 + $a + 2
+ };
+ }
+ fn f1() {
+ bar(1, 2);
+ foo!(5 + 43.to_string() + 5);
+ }
+ "#;
+ assert_match_failure_reason(
+ "bar($a, 3)",
+ code,
+ "bar(1, 2)",
+ r#"Pattern wanted token '3' (INT_NUMBER), but code had token '2' (INT_NUMBER)"#,
+ );
+ assert_match_failure_reason(
+ "42.to_string()",
+ code,
+ "43.to_string()",
+ r#"Pattern wanted token '42' (INT_NUMBER), but code had token '43' (INT_NUMBER)"#,
+ );
+}
+
+#[test]
+fn overlapping_possible_matches() {
+ // There are three possible matches here, however the middle one, `foo(foo(foo(42)))` shouldn't
+ // match because it overlaps with the outer match. The inner match is permitted since it's is
+ // contained entirely within the placeholder of the outer match.
+ assert_matches(
+ "foo(foo($a))",
+ "fn foo() {} fn main() {foo(foo(foo(foo(42))))}",
+ &["foo(foo(42))", "foo(foo(foo(foo(42))))"],
+ );
+}
+
+#[test]
+fn use_declaration_with_braces() {
+ // It would be OK for a path rule to match and alter a use declaration. We shouldn't mess it up
+ // though. In particular, we must not change `use foo::{baz, bar}` to `use foo::{baz,
+ // foo2::bar2}`.
+ cov_mark::check!(use_declaration_with_braces);
+ assert_ssr_transform(
+ "foo::bar ==>> foo2::bar2",
+ r#"
+ mod foo { pub fn bar() {} pub fn baz() {} }
+ mod foo2 { pub fn bar2() {} }
+ use foo::{baz, bar};
+ fn main() { bar() }
+ "#,
+ expect![["
+ mod foo { pub fn bar() {} pub fn baz() {} }
+ mod foo2 { pub fn bar2() {} }
+ use foo::{baz, bar};
+ fn main() { foo2::bar2() }
+ "]],
+ )
+}
+
+#[test]
+fn ufcs_matches_method_call() {
+ let code = r#"
+ struct Foo {}
+ impl Foo {
+ fn new(_: i32) -> Foo { Foo {} }
+ fn do_stuff(&self, _: i32) {}
+ }
+ struct Bar {}
+ impl Bar {
+ fn new(_: i32) -> Bar { Bar {} }
+ fn do_stuff(&self, v: i32) {}
+ }
+ fn main() {
+ let b = Bar {};
+ let f = Foo {};
+ b.do_stuff(1);
+ f.do_stuff(2);
+ Foo::new(4).do_stuff(3);
+ // Too many / too few args - should never match
+ f.do_stuff(2, 10);
+ f.do_stuff();
+ }
+ "#;
+ assert_matches("Foo::do_stuff($a, $b)", code, &["f.do_stuff(2)", "Foo::new(4).do_stuff(3)"]);
+ // The arguments needs special handling in the case of a function call matching a method call
+ // and the first argument is different.
+ assert_matches("Foo::do_stuff($a, 2)", code, &["f.do_stuff(2)"]);
+ assert_matches("Foo::do_stuff(Foo::new(4), $b)", code, &["Foo::new(4).do_stuff(3)"]);
+
+ assert_ssr_transform(
+ "Foo::do_stuff(Foo::new($a), $b) ==>> Bar::new($b).do_stuff($a)",
+ code,
+ expect![[r#"
+ struct Foo {}
+ impl Foo {
+ fn new(_: i32) -> Foo { Foo {} }
+ fn do_stuff(&self, _: i32) {}
+ }
+ struct Bar {}
+ impl Bar {
+ fn new(_: i32) -> Bar { Bar {} }
+ fn do_stuff(&self, v: i32) {}
+ }
+ fn main() {
+ let b = Bar {};
+ let f = Foo {};
+ b.do_stuff(1);
+ f.do_stuff(2);
+ Bar::new(3).do_stuff(4);
+ // Too many / too few args - should never match
+ f.do_stuff(2, 10);
+ f.do_stuff();
+ }
+ "#]],
+ );
+}
+
+#[test]
+fn pattern_is_a_single_segment_path() {
+ cov_mark::check!(pattern_is_a_single_segment_path);
+ // The first function should not be altered because the `foo` in scope at the cursor position is
+ // a different `foo`. This case is special because "foo" can be parsed as a pattern (IDENT_PAT ->
+ // NAME -> IDENT), which contains no path. If we're not careful we'll end up matching the `foo`
+ // in `let foo` from the first function. Whether we should match the `let foo` in the second
+ // function is less clear. At the moment, we don't. Doing so sounds like a rename operation,
+ // which isn't really what SSR is for, especially since the replacement `bar` must be able to be
+ // resolved, which means if we rename `foo` we'll get a name collision.
+ assert_ssr_transform(
+ "foo ==>> bar",
+ r#"
+ fn f1() -> i32 {
+ let foo = 1;
+ let bar = 2;
+ foo
+ }
+ fn f1() -> i32 {
+ let foo = 1;
+ let bar = 2;
+ foo$0
+ }
+ "#,
+ expect![[r#"
+ fn f1() -> i32 {
+ let foo = 1;
+ let bar = 2;
+ foo
+ }
+ fn f1() -> i32 {
+ let foo = 1;
+ let bar = 2;
+ bar
+ }
+ "#]],
+ );
+}
+
+#[test]
+fn replace_local_variable_reference() {
+ // The pattern references a local variable `foo` in the block containing the cursor. We should
+ // only replace references to this variable `foo`, not other variables that just happen to have
+ // the same name.
+ cov_mark::check!(cursor_after_semicolon);
+ assert_ssr_transform(
+ "foo + $a ==>> $a - foo",
+ r#"
+ fn bar1() -> i32 {
+ let mut res = 0;
+ let foo = 5;
+ res += foo + 1;
+ let foo = 10;
+ res += foo + 2;$0
+ res += foo + 3;
+ let foo = 15;
+ res += foo + 4;
+ res
+ }
+ "#,
+ expect![[r#"
+ fn bar1() -> i32 {
+ let mut res = 0;
+ let foo = 5;
+ res += foo + 1;
+ let foo = 10;
+ res += 2 - foo;
+ res += 3 - foo;
+ let foo = 15;
+ res += foo + 4;
+ res
+ }
+ "#]],
+ )
+}
+
+#[test]
+fn replace_path_within_selection() {
+ assert_ssr_transform(
+ "foo ==>> bar",
+ r#"
+ fn main() {
+ let foo = 41;
+ let bar = 42;
+ do_stuff(foo);
+ do_stuff(foo);$0
+ do_stuff(foo);
+ do_stuff(foo);$0
+ do_stuff(foo);
+ }"#,
+ expect![[r#"
+ fn main() {
+ let foo = 41;
+ let bar = 42;
+ do_stuff(foo);
+ do_stuff(foo);
+ do_stuff(bar);
+ do_stuff(bar);
+ do_stuff(foo);
+ }"#]],
+ );
+}
+
+#[test]
+fn replace_nonpath_within_selection() {
+ cov_mark::check!(replace_nonpath_within_selection);
+ assert_ssr_transform(
+ "$a + $b ==>> $b * $a",
+ r#"
+ fn main() {
+ let v = 1 + 2;$0
+ let v2 = 3 + 3;
+ let v3 = 4 + 5;$0
+ let v4 = 6 + 7;
+ }"#,
+ expect![[r#"
+ fn main() {
+ let v = 1 + 2;
+ let v2 = 3 * 3;
+ let v3 = 5 * 4;
+ let v4 = 6 + 7;
+ }"#]],
+ );
+}
+
+#[test]
+fn replace_self() {
+ // `foo(self)` occurs twice in the code, however only the first occurrence is the `self` that's
+ // in scope where the rule is invoked.
+ assert_ssr_transform(
+ "foo(self) ==>> bar(self)",
+ r#"
+ struct S1 {}
+ fn foo(_: &S1) {}
+ fn bar(_: &S1) {}
+ impl S1 {
+ fn f1(&self) {
+ foo(self)$0
+ }
+ fn f2(&self) {
+ foo(self)
+ }
+ }
+ "#,
+ expect![[r#"
+ struct S1 {}
+ fn foo(_: &S1) {}
+ fn bar(_: &S1) {}
+ impl S1 {
+ fn f1(&self) {
+ bar(self)
+ }
+ fn f2(&self) {
+ foo(self)
+ }
+ }
+ "#]],
+ );
+}
+
+#[test]
+fn match_trait_method_call() {
+ // `Bar::foo` and `Bar2::foo` resolve to the same function. Make sure we only match if the type
+ // matches what's in the pattern. Also checks that we handle autoderef.
+ let code = r#"
+ pub struct Bar {}
+ pub struct Bar2 {}
+ pub trait Foo {
+ fn foo(&self, _: i32) {}
+ }
+ impl Foo for Bar {}
+ impl Foo for Bar2 {}
+ fn main() {
+ let v1 = Bar {};
+ let v2 = Bar2 {};
+ let v1_ref = &v1;
+ let v2_ref = &v2;
+ v1.foo(1);
+ v2.foo(2);
+ Bar::foo(&v1, 3);
+ Bar2::foo(&v2, 4);
+ v1_ref.foo(5);
+ v2_ref.foo(6);
+ }
+ "#;
+ assert_matches("Bar::foo($a, $b)", code, &["v1.foo(1)", "Bar::foo(&v1, 3)", "v1_ref.foo(5)"]);
+ assert_matches("Bar2::foo($a, $b)", code, &["v2.foo(2)", "Bar2::foo(&v2, 4)", "v2_ref.foo(6)"]);
+}
+
+#[test]
+fn replace_autoref_autoderef_capture() {
+ // Here we have several calls to `$a.foo()`. In the first case autoref is applied, in the
+ // second, we already have a reference, so it isn't. When $a is used in a context where autoref
+ // doesn't apply, we need to prefix it with `&`. Finally, we have some cases where autoderef
+ // needs to be applied.
+ cov_mark::check!(replace_autoref_autoderef_capture);
+ let code = r#"
+ struct Foo {}
+ impl Foo {
+ fn foo(&self) {}
+ fn foo2(&self) {}
+ }
+ fn bar(_: &Foo) {}
+ fn main() {
+ let f = Foo {};
+ let fr = &f;
+ let fr2 = &fr;
+ let fr3 = &fr2;
+ f.foo();
+ fr.foo();
+ fr2.foo();
+ fr3.foo();
+ }
+ "#;
+ assert_ssr_transform(
+ "Foo::foo($a) ==>> bar($a)",
+ code,
+ expect![[r#"
+ struct Foo {}
+ impl Foo {
+ fn foo(&self) {}
+ fn foo2(&self) {}
+ }
+ fn bar(_: &Foo) {}
+ fn main() {
+ let f = Foo {};
+ let fr = &f;
+ let fr2 = &fr;
+ let fr3 = &fr2;
+ bar(&f);
+ bar(&*fr);
+ bar(&**fr2);
+ bar(&***fr3);
+ }
+ "#]],
+ );
+ // If the placeholder is used as the receiver of another method call, then we don't need to
+ // explicitly autoderef or autoref.
+ assert_ssr_transform(
+ "Foo::foo($a) ==>> $a.foo2()",
+ code,
+ expect![[r#"
+ struct Foo {}
+ impl Foo {
+ fn foo(&self) {}
+ fn foo2(&self) {}
+ }
+ fn bar(_: &Foo) {}
+ fn main() {
+ let f = Foo {};
+ let fr = &f;
+ let fr2 = &fr;
+ let fr3 = &fr2;
+ f.foo2();
+ fr.foo2();
+ fr2.foo2();
+ fr3.foo2();
+ }
+ "#]],
+ );
+}
+
+#[test]
+fn replace_autoref_mut() {
+ let code = r#"
+ struct Foo {}
+ impl Foo {
+ fn foo(&mut self) {}
+ }
+ fn bar(_: &mut Foo) {}
+ fn main() {
+ let mut f = Foo {};
+ f.foo();
+ let fr = &mut f;
+ fr.foo();
+ }
+ "#;
+ assert_ssr_transform(
+ "Foo::foo($a) ==>> bar($a)",
+ code,
+ expect![[r#"
+ struct Foo {}
+ impl Foo {
+ fn foo(&mut self) {}
+ }
+ fn bar(_: &mut Foo) {}
+ fn main() {
+ let mut f = Foo {};
+ bar(&mut f);
+ let fr = &mut f;
+ bar(&mut *fr);
+ }
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide/Cargo.toml b/src/tools/rust-analyzer/crates/ide/Cargo.toml
new file mode 100644
index 000000000..0e9771cd2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/Cargo.toml
@@ -0,0 +1,47 @@
+[package]
+name = "ide"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+crossbeam-channel = "0.5.5"
+either = "1.7.0"
+itertools = "0.10.3"
+tracing = "0.1.35"
+oorandom = "11.1.3"
+pulldown-cmark-to-cmark = "10.0.1"
+pulldown-cmark = { version = "0.9.1", default-features = false }
+url = "2.2.2"
+dot = "0.1.4"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+text-edit = { path = "../text-edit", version = "0.0.0" }
+ide-db = { path = "../ide-db", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+ide-assists = { path = "../ide-assists", version = "0.0.0" }
+ide-diagnostics = { path = "../ide-diagnostics", version = "0.0.0" }
+ide-ssr = { path = "../ide-ssr", version = "0.0.0" }
+ide-completion = { path = "../ide-completion", version = "0.0.0" }
+
+# ide should depend only on the top-level `hir` package. if you need
+# something from some `hir-xxx` subpackage, reexport the API via `hir`.
+hir = { path = "../hir", version = "0.0.0" }
+
+[target.'cfg(not(any(target_arch = "wasm32", target_os = "emscripten")))'.dependencies]
+toolchain = { path = "../toolchain", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
+expect-test = "1.4.0"
+
+[features]
+in-rust-tree = ["ide-assists/in-rust-tree", "ide-diagnostics/in-rust-tree"]
diff --git a/src/tools/rust-analyzer/crates/ide/src/annotations.rs b/src/tools/rust-analyzer/crates/ide/src/annotations.rs
new file mode 100644
index 000000000..210c5c7fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/annotations.rs
@@ -0,0 +1,789 @@
+use hir::{HasSource, InFile, Semantics};
+use ide_db::{
+ base_db::{FileId, FilePosition, FileRange},
+ defs::Definition,
+ helpers::visit_file_defs,
+ RootDatabase,
+};
+use syntax::{ast::HasName, AstNode, TextRange};
+
+use crate::{
+ fn_references::find_all_methods,
+ goto_implementation::goto_implementation,
+ references::find_all_refs,
+ runnables::{runnables, Runnable},
+ NavigationTarget, RunnableKind,
+};
+
+// Feature: Annotations
+//
+// Provides user with annotations above items for looking up references or impl blocks
+// and running/debugging binaries.
+//
+// image::https://user-images.githubusercontent.com/48062697/113020672-b7c34f00-917a-11eb-8f6e-858735660a0e.png[]
+#[derive(Debug)]
+pub struct Annotation {
+ pub range: TextRange,
+ pub kind: AnnotationKind,
+}
+
+#[derive(Debug)]
+pub enum AnnotationKind {
+ Runnable(Runnable),
+ HasImpls { file_id: FileId, data: Option<Vec<NavigationTarget>> },
+ HasReferences { file_id: FileId, data: Option<Vec<FileRange>> },
+}
+
+pub struct AnnotationConfig {
+ pub binary_target: bool,
+ pub annotate_runnables: bool,
+ pub annotate_impls: bool,
+ pub annotate_references: bool,
+ pub annotate_method_references: bool,
+ pub annotate_enum_variant_references: bool,
+}
+
+pub(crate) fn annotations(
+ db: &RootDatabase,
+ config: &AnnotationConfig,
+ file_id: FileId,
+) -> Vec<Annotation> {
+ let mut annotations = Vec::default();
+
+ if config.annotate_runnables {
+ for runnable in runnables(db, file_id) {
+ if should_skip_runnable(&runnable.kind, config.binary_target) {
+ continue;
+ }
+
+ let range = runnable.nav.focus_or_full_range();
+
+ annotations.push(Annotation { range, kind: AnnotationKind::Runnable(runnable) });
+ }
+ }
+
+ visit_file_defs(&Semantics::new(db), file_id, &mut |def| {
+ let range = match def {
+ Definition::Const(konst) if config.annotate_references => {
+ konst.source(db).and_then(|node| name_range(db, node, file_id))
+ }
+ Definition::Trait(trait_) if config.annotate_references || config.annotate_impls => {
+ trait_.source(db).and_then(|node| name_range(db, node, file_id))
+ }
+ Definition::Adt(adt) => match adt {
+ hir::Adt::Enum(enum_) => {
+ if config.annotate_enum_variant_references {
+ enum_
+ .variants(db)
+ .into_iter()
+ .map(|variant| {
+ variant.source(db).and_then(|node| name_range(db, node, file_id))
+ })
+ .flatten()
+ .for_each(|range| {
+ annotations.push(Annotation {
+ range,
+ kind: AnnotationKind::HasReferences { file_id, data: None },
+ })
+ })
+ }
+ if config.annotate_references || config.annotate_impls {
+ enum_.source(db).and_then(|node| name_range(db, node, file_id))
+ } else {
+ None
+ }
+ }
+ _ => {
+ if config.annotate_references || config.annotate_impls {
+ adt.source(db).and_then(|node| name_range(db, node, file_id))
+ } else {
+ None
+ }
+ }
+ },
+ _ => None,
+ };
+
+ let range = match range {
+ Some(range) => range,
+ None => return,
+ };
+
+ if config.annotate_impls && !matches!(def, Definition::Const(_)) {
+ annotations
+ .push(Annotation { range, kind: AnnotationKind::HasImpls { file_id, data: None } });
+ }
+ if config.annotate_references {
+ annotations.push(Annotation {
+ range,
+ kind: AnnotationKind::HasReferences { file_id, data: None },
+ });
+ }
+
+ fn name_range<T: HasName>(
+ db: &RootDatabase,
+ node: InFile<T>,
+ source_file_id: FileId,
+ ) -> Option<TextRange> {
+ if let Some(InFile { file_id, value }) = node.original_ast_node(db) {
+ if file_id == source_file_id.into() {
+ return value.name().map(|it| it.syntax().text_range());
+ }
+ }
+ None
+ }
+ });
+
+ if config.annotate_method_references {
+ annotations.extend(find_all_methods(db, file_id).into_iter().map(
+ |FileRange { file_id, range }| Annotation {
+ range,
+ kind: AnnotationKind::HasReferences { file_id, data: None },
+ },
+ ));
+ }
+
+ annotations
+}
+
+pub(crate) fn resolve_annotation(db: &RootDatabase, mut annotation: Annotation) -> Annotation {
+ match annotation.kind {
+ AnnotationKind::HasImpls { file_id, ref mut data } => {
+ *data =
+ goto_implementation(db, FilePosition { file_id, offset: annotation.range.start() })
+ .map(|range| range.info);
+ }
+ AnnotationKind::HasReferences { file_id, ref mut data } => {
+ *data = find_all_refs(
+ &Semantics::new(db),
+ FilePosition { file_id, offset: annotation.range.start() },
+ None,
+ )
+ .map(|result| {
+ result
+ .into_iter()
+ .flat_map(|res| res.references)
+ .flat_map(|(file_id, access)| {
+ access.into_iter().map(move |(range, _)| FileRange { file_id, range })
+ })
+ .collect()
+ });
+ }
+ _ => {}
+ };
+
+ annotation
+}
+
+fn should_skip_runnable(kind: &RunnableKind, binary_target: bool) -> bool {
+ match kind {
+ RunnableKind::Bin => !binary_target,
+ _ => false,
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::{fixture, Annotation, AnnotationConfig};
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let (analysis, file_id) = fixture::file(ra_fixture);
+
+ let annotations: Vec<Annotation> = analysis
+ .annotations(
+ &AnnotationConfig {
+ binary_target: true,
+ annotate_runnables: true,
+ annotate_impls: true,
+ annotate_references: true,
+ annotate_method_references: true,
+ annotate_enum_variant_references: true,
+ },
+ file_id,
+ )
+ .unwrap()
+ .into_iter()
+ .map(|annotation| analysis.resolve_annotation(annotation).unwrap())
+ .collect();
+
+ expect.assert_debug_eq(&annotations);
+ }
+
+ #[test]
+ fn const_annotations() {
+ check(
+ r#"
+const DEMO: i32 = 123;
+
+const UNUSED: i32 = 123;
+
+fn main() {
+ let hello = DEMO;
+}
+ "#,
+ expect![[r#"
+ [
+ Annotation {
+ range: 53..57,
+ kind: Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 50..85,
+ focus_range: 53..57,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ ),
+ },
+ Annotation {
+ range: 6..10,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [
+ FileRange {
+ file_id: FileId(
+ 0,
+ ),
+ range: 78..82,
+ },
+ ],
+ ),
+ },
+ },
+ Annotation {
+ range: 30..36,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [],
+ ),
+ },
+ },
+ Annotation {
+ range: 53..57,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [],
+ ),
+ },
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn struct_references_annotations() {
+ check(
+ r#"
+struct Test;
+
+fn main() {
+ let test = Test;
+}
+ "#,
+ expect![[r#"
+ [
+ Annotation {
+ range: 17..21,
+ kind: Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 14..48,
+ focus_range: 17..21,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ ),
+ },
+ Annotation {
+ range: 7..11,
+ kind: HasImpls {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [],
+ ),
+ },
+ },
+ Annotation {
+ range: 7..11,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [
+ FileRange {
+ file_id: FileId(
+ 0,
+ ),
+ range: 41..45,
+ },
+ ],
+ ),
+ },
+ },
+ Annotation {
+ range: 17..21,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [],
+ ),
+ },
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn struct_and_trait_impls_annotations() {
+ check(
+ r#"
+struct Test;
+
+trait MyCoolTrait {}
+
+impl MyCoolTrait for Test {}
+
+fn main() {
+ let test = Test;
+}
+ "#,
+ expect![[r#"
+ [
+ Annotation {
+ range: 69..73,
+ kind: Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 66..100,
+ focus_range: 69..73,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ ),
+ },
+ Annotation {
+ range: 7..11,
+ kind: HasImpls {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [
+ NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 36..64,
+ focus_range: 57..61,
+ name: "impl",
+ kind: Impl,
+ },
+ ],
+ ),
+ },
+ },
+ Annotation {
+ range: 7..11,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [
+ FileRange {
+ file_id: FileId(
+ 0,
+ ),
+ range: 57..61,
+ },
+ FileRange {
+ file_id: FileId(
+ 0,
+ ),
+ range: 93..97,
+ },
+ ],
+ ),
+ },
+ },
+ Annotation {
+ range: 20..31,
+ kind: HasImpls {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [
+ NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 36..64,
+ focus_range: 57..61,
+ name: "impl",
+ kind: Impl,
+ },
+ ],
+ ),
+ },
+ },
+ Annotation {
+ range: 20..31,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [
+ FileRange {
+ file_id: FileId(
+ 0,
+ ),
+ range: 41..52,
+ },
+ ],
+ ),
+ },
+ },
+ Annotation {
+ range: 69..73,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [],
+ ),
+ },
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn runnable_annotation() {
+ check(
+ r#"
+fn main() {}
+ "#,
+ expect![[r#"
+ [
+ Annotation {
+ range: 3..7,
+ kind: Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 3..7,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ ),
+ },
+ Annotation {
+ range: 3..7,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [],
+ ),
+ },
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn method_annotations() {
+ check(
+ r#"
+struct Test;
+
+impl Test {
+ fn self_by_ref(&self) {}
+}
+
+fn main() {
+ Test.self_by_ref();
+}
+ "#,
+ expect![[r#"
+ [
+ Annotation {
+ range: 61..65,
+ kind: Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 58..95,
+ focus_range: 61..65,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ ),
+ },
+ Annotation {
+ range: 7..11,
+ kind: HasImpls {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [
+ NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 14..56,
+ focus_range: 19..23,
+ name: "impl",
+ kind: Impl,
+ },
+ ],
+ ),
+ },
+ },
+ Annotation {
+ range: 7..11,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [
+ FileRange {
+ file_id: FileId(
+ 0,
+ ),
+ range: 19..23,
+ },
+ FileRange {
+ file_id: FileId(
+ 0,
+ ),
+ range: 74..78,
+ },
+ ],
+ ),
+ },
+ },
+ Annotation {
+ range: 33..44,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [
+ FileRange {
+ file_id: FileId(
+ 0,
+ ),
+ range: 79..90,
+ },
+ ],
+ ),
+ },
+ },
+ Annotation {
+ range: 61..65,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [],
+ ),
+ },
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_annotations() {
+ check(
+ r#"
+fn main() {}
+
+mod tests {
+ #[test]
+ fn my_cool_test() {}
+}
+ "#,
+ expect![[r#"
+ [
+ Annotation {
+ range: 3..7,
+ kind: Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 3..7,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ ),
+ },
+ Annotation {
+ range: 18..23,
+ kind: Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 14..64,
+ focus_range: 18..23,
+ name: "tests",
+ kind: Module,
+ description: "mod tests",
+ },
+ kind: TestMod {
+ path: "tests",
+ },
+ cfg: None,
+ },
+ ),
+ },
+ Annotation {
+ range: 45..57,
+ kind: Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 30..62,
+ focus_range: 45..57,
+ name: "my_cool_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests::my_cool_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ),
+ },
+ Annotation {
+ range: 3..7,
+ kind: HasReferences {
+ file_id: FileId(
+ 0,
+ ),
+ data: Some(
+ [],
+ ),
+ },
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_no_annotations_outside_module_tree() {
+ check(
+ r#"
+//- /foo.rs
+struct Foo;
+//- /lib.rs
+// this file comes last since `check` checks the first file only
+"#,
+ expect![[r#"
+ []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_no_annotations_macro_struct_def() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! m {
+ () => {
+ struct A {}
+ };
+}
+
+m!();
+"#,
+ expect![[r#"
+ []
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs
new file mode 100644
index 000000000..a18a6bea9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs
@@ -0,0 +1,460 @@
+//! Entry point for call-hierarchy
+
+use hir::Semantics;
+use ide_db::{
+ defs::{Definition, NameClass, NameRefClass},
+ helpers::pick_best_token,
+ search::FileReference,
+ FxIndexMap, RootDatabase,
+};
+use syntax::{ast, AstNode, SyntaxKind::NAME, TextRange};
+
+use crate::{goto_definition, FilePosition, NavigationTarget, RangeInfo, TryToNav};
+
+#[derive(Debug, Clone)]
+pub struct CallItem {
+ pub target: NavigationTarget,
+ pub ranges: Vec<TextRange>,
+}
+
+impl CallItem {
+ #[cfg(test)]
+ pub(crate) fn debug_render(&self) -> String {
+ format!("{} : {:?}", self.target.debug_render(), self.ranges)
+ }
+}
+
+pub(crate) fn call_hierarchy(
+ db: &RootDatabase,
+ position: FilePosition,
+) -> Option<RangeInfo<Vec<NavigationTarget>>> {
+ goto_definition::goto_definition(db, position)
+}
+
+pub(crate) fn incoming_calls(
+ db: &RootDatabase,
+ FilePosition { file_id, offset }: FilePosition,
+) -> Option<Vec<CallItem>> {
+ let sema = &Semantics::new(db);
+
+ let file = sema.parse(file_id);
+ let file = file.syntax();
+ let mut calls = CallLocations::default();
+
+ let references = sema
+ .find_nodes_at_offset_with_descend(file, offset)
+ .filter_map(move |node| match node {
+ ast::NameLike::NameRef(name_ref) => match NameRefClass::classify(sema, &name_ref)? {
+ NameRefClass::Definition(def @ Definition::Function(_)) => Some(def),
+ _ => None,
+ },
+ ast::NameLike::Name(name) => match NameClass::classify(sema, &name)? {
+ NameClass::Definition(def @ Definition::Function(_)) => Some(def),
+ _ => None,
+ },
+ ast::NameLike::Lifetime(_) => None,
+ })
+ .flat_map(|func| func.usages(sema).all());
+
+ for (_, references) in references {
+ let references = references.into_iter().map(|FileReference { name, .. }| name);
+ for name in references {
+ // This target is the containing function
+ let nav = sema.ancestors_with_macros(name.syntax().clone()).find_map(|node| {
+ let def = ast::Fn::cast(node).and_then(|fn_| sema.to_def(&fn_))?;
+ def.try_to_nav(sema.db)
+ });
+ if let Some(nav) = nav {
+ calls.add(nav, sema.original_range(name.syntax()).range);
+ }
+ }
+ }
+
+ Some(calls.into_items())
+}
+
+pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> {
+ let sema = Semantics::new(db);
+ let file_id = position.file_id;
+ let file = sema.parse(file_id);
+ let file = file.syntax();
+ let token = pick_best_token(file.token_at_offset(position.offset), |kind| match kind {
+ NAME => 1,
+ _ => 0,
+ })?;
+ let mut calls = CallLocations::default();
+
+ sema.descend_into_macros(token)
+ .into_iter()
+ .filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast))
+ .filter_map(|item| match item {
+ ast::Item::Const(c) => c.body().map(|it| it.syntax().descendants()),
+ ast::Item::Fn(f) => f.body().map(|it| it.syntax().descendants()),
+ ast::Item::Static(s) => s.body().map(|it| it.syntax().descendants()),
+ _ => None,
+ })
+ .flatten()
+ .filter_map(ast::CallableExpr::cast)
+ .filter_map(|call_node| {
+ let (nav_target, range) = match call_node {
+ ast::CallableExpr::Call(call) => {
+ let expr = call.expr()?;
+ let callable = sema.type_of_expr(&expr)?.original.as_callable(db)?;
+ match callable.kind() {
+ hir::CallableKind::Function(it) => {
+ let range = expr.syntax().text_range();
+ it.try_to_nav(db).zip(Some(range))
+ }
+ _ => None,
+ }
+ }
+ ast::CallableExpr::MethodCall(expr) => {
+ let range = expr.name_ref()?.syntax().text_range();
+ let function = sema.resolve_method_call(&expr)?;
+ function.try_to_nav(db).zip(Some(range))
+ }
+ }?;
+ Some((nav_target, range))
+ })
+ .for_each(|(nav, range)| calls.add(nav, range));
+
+ Some(calls.into_items())
+}
+
+#[derive(Default)]
+struct CallLocations {
+ funcs: FxIndexMap<NavigationTarget, Vec<TextRange>>,
+}
+
+impl CallLocations {
+ fn add(&mut self, target: NavigationTarget, range: TextRange) {
+ self.funcs.entry(target).or_default().push(range);
+ }
+
+ fn into_items(self) -> Vec<CallItem> {
+ self.funcs.into_iter().map(|(target, ranges)| CallItem { target, ranges }).collect()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+ use ide_db::base_db::FilePosition;
+ use itertools::Itertools;
+
+ use crate::fixture;
+
+ fn check_hierarchy(
+ ra_fixture: &str,
+ expected: Expect,
+ expected_incoming: Expect,
+ expected_outgoing: Expect,
+ ) {
+ let (analysis, pos) = fixture::position(ra_fixture);
+
+ let mut navs = analysis.call_hierarchy(pos).unwrap().unwrap().info;
+ assert_eq!(navs.len(), 1);
+ let nav = navs.pop().unwrap();
+ expected.assert_eq(&nav.debug_render());
+
+ let item_pos =
+ FilePosition { file_id: nav.file_id, offset: nav.focus_or_full_range().start() };
+ let incoming_calls = analysis.incoming_calls(item_pos).unwrap().unwrap();
+ expected_incoming
+ .assert_eq(&incoming_calls.into_iter().map(|call| call.debug_render()).join("\n"));
+
+ let outgoing_calls = analysis.outgoing_calls(item_pos).unwrap().unwrap();
+ expected_outgoing
+ .assert_eq(&outgoing_calls.into_iter().map(|call| call.debug_render()).join("\n"));
+ }
+
+ #[test]
+ fn test_call_hierarchy_on_ref() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+fn callee() {}
+fn caller() {
+ call$0ee();
+}
+"#,
+ expect![["callee Function FileId(0) 0..14 3..9"]],
+ expect![["caller Function FileId(0) 15..44 18..24 : [33..39]"]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_on_def() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+fn call$0ee() {}
+fn caller() {
+ callee();
+}
+"#,
+ expect![["callee Function FileId(0) 0..14 3..9"]],
+ expect![["caller Function FileId(0) 15..44 18..24 : [33..39]"]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_in_same_fn() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+fn callee() {}
+fn caller() {
+ call$0ee();
+ callee();
+}
+"#,
+ expect![["callee Function FileId(0) 0..14 3..9"]],
+ expect![["caller Function FileId(0) 15..58 18..24 : [33..39, 47..53]"]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_in_different_fn() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+fn callee() {}
+fn caller1() {
+ call$0ee();
+}
+
+fn caller2() {
+ callee();
+}
+"#,
+ expect![["callee Function FileId(0) 0..14 3..9"]],
+ expect![["
+ caller1 Function FileId(0) 15..45 18..25 : [34..40]
+ caller2 Function FileId(0) 47..77 50..57 : [66..72]"]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_in_tests_mod() {
+ check_hierarchy(
+ r#"
+//- /lib.rs cfg:test
+fn callee() {}
+fn caller1() {
+ call$0ee();
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_caller() {
+ callee();
+ }
+}
+"#,
+ expect![["callee Function FileId(0) 0..14 3..9"]],
+ expect![[r#"
+ caller1 Function FileId(0) 15..45 18..25 : [34..40]
+ test_caller Function FileId(0) 95..149 110..121 : [134..140]"#]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_in_different_files() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+mod foo;
+use foo::callee;
+
+fn caller() {
+ call$0ee();
+}
+
+//- /foo/mod.rs
+pub fn callee() {}
+"#,
+ expect![["callee Function FileId(1) 0..18 7..13"]],
+ expect![["caller Function FileId(0) 27..56 30..36 : [45..51]"]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_outgoing() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+fn callee() {}
+fn call$0er() {
+ callee();
+ callee();
+}
+"#,
+ expect![["caller Function FileId(0) 15..58 18..24"]],
+ expect![[]],
+ expect![["callee Function FileId(0) 0..14 3..9 : [33..39, 47..53]"]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_outgoing_in_different_files() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+mod foo;
+use foo::callee;
+
+fn call$0er() {
+ callee();
+}
+
+//- /foo/mod.rs
+pub fn callee() {}
+"#,
+ expect![["caller Function FileId(0) 27..56 30..36"]],
+ expect![[]],
+ expect![["callee Function FileId(1) 0..18 7..13 : [45..51]"]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_incoming_outgoing() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+fn caller1() {
+ call$0er2();
+}
+
+fn caller2() {
+ caller3();
+}
+
+fn caller3() {
+
+}
+"#,
+ expect![["caller2 Function FileId(0) 33..64 36..43"]],
+ expect![["caller1 Function FileId(0) 0..31 3..10 : [19..26]"]],
+ expect![["caller3 Function FileId(0) 66..83 69..76 : [52..59]"]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_issue_5103() {
+ check_hierarchy(
+ r#"
+fn a() {
+ b()
+}
+
+fn b() {}
+
+fn main() {
+ a$0()
+}
+"#,
+ expect![["a Function FileId(0) 0..18 3..4"]],
+ expect![["main Function FileId(0) 31..52 34..38 : [47..48]"]],
+ expect![["b Function FileId(0) 20..29 23..24 : [13..14]"]],
+ );
+
+ check_hierarchy(
+ r#"
+fn a() {
+ b$0()
+}
+
+fn b() {}
+
+fn main() {
+ a()
+}
+"#,
+ expect![["b Function FileId(0) 20..29 23..24"]],
+ expect![["a Function FileId(0) 0..18 3..4 : [13..14]"]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_in_macros_incoming() {
+ check_hierarchy(
+ r#"
+macro_rules! define {
+ ($ident:ident) => {
+ fn $ident {}
+ }
+}
+macro_rules! call {
+ ($ident:ident) => {
+ $ident()
+ }
+}
+define!(callee)
+fn caller() {
+ call!(call$0ee);
+}
+"#,
+ expect![[r#"callee Function FileId(0) 144..159 152..158"#]],
+ expect![[r#"caller Function FileId(0) 160..194 163..169 : [184..190]"#]],
+ expect![[]],
+ );
+ check_hierarchy(
+ r#"
+macro_rules! define {
+ ($ident:ident) => {
+ fn $ident {}
+ }
+}
+macro_rules! call {
+ ($ident:ident) => {
+ $ident()
+ }
+}
+define!(cal$0lee)
+fn caller() {
+ call!(callee);
+}
+"#,
+ expect![[r#"callee Function FileId(0) 144..159 152..158"#]],
+ expect![[r#"caller Function FileId(0) 160..194 163..169 : [184..190]"#]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_in_macros_outgoing() {
+ check_hierarchy(
+ r#"
+macro_rules! define {
+ ($ident:ident) => {
+ fn $ident {}
+ }
+}
+macro_rules! call {
+ ($ident:ident) => {
+ $ident()
+ }
+}
+define!(callee)
+fn caller$0() {
+ call!(callee);
+}
+"#,
+ expect![[r#"caller Function FileId(0) 160..194 163..169"#]],
+ expect![[]],
+ // FIXME
+ expect![[]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
new file mode 100644
index 000000000..582e9fe7e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
@@ -0,0 +1,549 @@
+//! Extracts, resolves and rewrites links and intra-doc links in markdown documentation.
+
+#[cfg(test)]
+mod tests;
+
+mod intra_doc_links;
+
+use pulldown_cmark::{BrokenLink, CowStr, Event, InlineStr, LinkType, Options, Parser, Tag};
+use pulldown_cmark_to_cmark::{cmark_resume_with_options, Options as CMarkOptions};
+use stdx::format_to;
+use url::Url;
+
+use hir::{db::HirDatabase, Adt, AsAssocItem, AssocItem, AssocItemContainer, HasAttrs};
+use ide_db::{
+ base_db::{CrateOrigin, LangCrateOrigin, SourceDatabase},
+ defs::{Definition, NameClass, NameRefClass},
+ helpers::pick_best_token,
+ RootDatabase,
+};
+use syntax::{
+ ast::{self, IsString},
+ match_ast, AstNode, AstToken,
+ SyntaxKind::*,
+ SyntaxNode, SyntaxToken, TextRange, TextSize, T,
+};
+
+use crate::{
+ doc_links::intra_doc_links::{parse_intra_doc_link, strip_prefixes_suffixes},
+ FilePosition, Semantics,
+};
+
+/// Weblink to an item's documentation.
+pub(crate) type DocumentationLink = String;
+
+const MARKDOWN_OPTIONS: Options =
+ Options::ENABLE_FOOTNOTES.union(Options::ENABLE_TABLES).union(Options::ENABLE_TASKLISTS);
+
+/// Rewrite documentation links in markdown to point to an online host (e.g. docs.rs)
+pub(crate) fn rewrite_links(db: &RootDatabase, markdown: &str, definition: Definition) -> String {
+ let mut cb = broken_link_clone_cb;
+ let doc = Parser::new_with_broken_link_callback(markdown, MARKDOWN_OPTIONS, Some(&mut cb));
+
+ let doc = map_links(doc, |target, title| {
+ // This check is imperfect, there's some overlap between valid intra-doc links
+ // and valid URLs so we choose to be too eager to try to resolve what might be
+ // a URL.
+ if target.contains("://") {
+ (Some(LinkType::Inline), target.to_string(), title.to_string())
+ } else {
+ // Two possibilities:
+ // * path-based links: `../../module/struct.MyStruct.html`
+ // * module-based links (AKA intra-doc links): `super::super::module::MyStruct`
+ if let Some((target, title)) = rewrite_intra_doc_link(db, definition, target, title) {
+ return (None, target, title);
+ }
+ if let Some(target) = rewrite_url_link(db, definition, target) {
+ return (Some(LinkType::Inline), target, title.to_string());
+ }
+
+ (None, target.to_string(), title.to_string())
+ }
+ });
+ let mut out = String::new();
+ cmark_resume_with_options(
+ doc,
+ &mut out,
+ None,
+ CMarkOptions { code_block_token_count: 3, ..Default::default() },
+ )
+ .ok();
+ out
+}
+
+/// Remove all links in markdown documentation.
+pub(crate) fn remove_links(markdown: &str) -> String {
+ let mut drop_link = false;
+
+ let mut cb = |_: BrokenLink<'_>| {
+ let empty = InlineStr::try_from("").unwrap();
+ Some((CowStr::Inlined(empty), CowStr::Inlined(empty)))
+ };
+ let doc = Parser::new_with_broken_link_callback(markdown, MARKDOWN_OPTIONS, Some(&mut cb));
+ let doc = doc.filter_map(move |evt| match evt {
+ Event::Start(Tag::Link(link_type, target, title)) => {
+ if link_type == LinkType::Inline && target.contains("://") {
+ Some(Event::Start(Tag::Link(link_type, target, title)))
+ } else {
+ drop_link = true;
+ None
+ }
+ }
+ Event::End(_) if drop_link => {
+ drop_link = false;
+ None
+ }
+ _ => Some(evt),
+ });
+
+ let mut out = String::new();
+ cmark_resume_with_options(
+ doc,
+ &mut out,
+ None,
+ CMarkOptions { code_block_token_count: 3, ..Default::default() },
+ )
+ .ok();
+ out
+}
+
+/// Retrieve a link to documentation for the given symbol.
+pub(crate) fn external_docs(
+ db: &RootDatabase,
+ position: &FilePosition,
+) -> Option<DocumentationLink> {
+ let sema = &Semantics::new(db);
+ let file = sema.parse(position.file_id).syntax().clone();
+ let token = pick_best_token(file.token_at_offset(position.offset), |kind| match kind {
+ IDENT | INT_NUMBER | T![self] => 3,
+ T!['('] | T![')'] => 2,
+ kind if kind.is_trivia() => 0,
+ _ => 1,
+ })?;
+ let token = sema.descend_into_macros_single(token);
+
+ let node = token.parent()?;
+ let definition = match_ast! {
+ match node {
+ ast::NameRef(name_ref) => match NameRefClass::classify(sema, &name_ref)? {
+ NameRefClass::Definition(def) => def,
+ NameRefClass::FieldShorthand { local_ref: _, field_ref } => {
+ Definition::Field(field_ref)
+ }
+ },
+ ast::Name(name) => match NameClass::classify(sema, &name)? {
+ NameClass::Definition(it) | NameClass::ConstReference(it) => it,
+ NameClass::PatFieldShorthand { local_def: _, field_ref } => Definition::Field(field_ref),
+ },
+ _ => return None,
+ }
+ };
+
+ get_doc_link(db, definition)
+}
+
+/// Extracts all links from a given markdown text returning the definition text range, link-text
+/// and the namespace if known.
+pub(crate) fn extract_definitions_from_docs(
+ docs: &hir::Documentation,
+) -> Vec<(TextRange, String, Option<hir::Namespace>)> {
+ Parser::new_with_broken_link_callback(
+ docs.as_str(),
+ MARKDOWN_OPTIONS,
+ Some(&mut broken_link_clone_cb),
+ )
+ .into_offset_iter()
+ .filter_map(|(event, range)| match event {
+ Event::Start(Tag::Link(_, target, _)) => {
+ let (link, ns) = parse_intra_doc_link(&target);
+ Some((
+ TextRange::new(range.start.try_into().ok()?, range.end.try_into().ok()?),
+ link.to_string(),
+ ns,
+ ))
+ }
+ _ => None,
+ })
+ .collect()
+}
+
+pub(crate) fn resolve_doc_path_for_def(
+ db: &dyn HirDatabase,
+ def: Definition,
+ link: &str,
+ ns: Option<hir::Namespace>,
+) -> Option<Definition> {
+ match def {
+ Definition::Module(it) => it.resolve_doc_path(db, link, ns),
+ Definition::Function(it) => it.resolve_doc_path(db, link, ns),
+ Definition::Adt(it) => it.resolve_doc_path(db, link, ns),
+ Definition::Variant(it) => it.resolve_doc_path(db, link, ns),
+ Definition::Const(it) => it.resolve_doc_path(db, link, ns),
+ Definition::Static(it) => it.resolve_doc_path(db, link, ns),
+ Definition::Trait(it) => it.resolve_doc_path(db, link, ns),
+ Definition::TypeAlias(it) => it.resolve_doc_path(db, link, ns),
+ Definition::Macro(it) => it.resolve_doc_path(db, link, ns),
+ Definition::Field(it) => it.resolve_doc_path(db, link, ns),
+ Definition::BuiltinAttr(_)
+ | Definition::ToolModule(_)
+ | Definition::BuiltinType(_)
+ | Definition::SelfType(_)
+ | Definition::Local(_)
+ | Definition::GenericParam(_)
+ | Definition::Label(_)
+ | Definition::DeriveHelper(_) => None,
+ }
+ .map(Definition::from)
+}
+
+pub(crate) fn doc_attributes(
+ sema: &Semantics<'_, RootDatabase>,
+ node: &SyntaxNode,
+) -> Option<(hir::AttrsWithOwner, Definition)> {
+ match_ast! {
+ match node {
+ ast::SourceFile(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Module(def))),
+ ast::Module(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Module(def))),
+ ast::Fn(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Function(def))),
+ ast::Struct(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Adt(hir::Adt::Struct(def)))),
+ ast::Union(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Adt(hir::Adt::Union(def)))),
+ ast::Enum(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Adt(hir::Adt::Enum(def)))),
+ ast::Variant(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Variant(def))),
+ ast::Trait(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Trait(def))),
+ ast::Static(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Static(def))),
+ ast::Const(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Const(def))),
+ ast::TypeAlias(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::TypeAlias(def))),
+ ast::Impl(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::SelfType(def))),
+ ast::RecordField(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Field(def))),
+ ast::TupleField(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Field(def))),
+ ast::Macro(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Macro(def))),
+ // ast::Use(it) => sema.to_def(&it).map(|def| (Box::new(it) as _, def.attrs(sema.db))),
+ _ => None
+ }
+ }
+}
+
+pub(crate) struct DocCommentToken {
+ doc_token: SyntaxToken,
+ prefix_len: TextSize,
+}
+
+pub(crate) fn token_as_doc_comment(doc_token: &SyntaxToken) -> Option<DocCommentToken> {
+ (match_ast! {
+ match doc_token {
+ ast::Comment(comment) => TextSize::try_from(comment.prefix().len()).ok(),
+ ast::String(string) => doc_token.parent_ancestors().find_map(ast::Attr::cast)
+ .filter(|attr| attr.simple_name().as_deref() == Some("doc")).and_then(|_| string.open_quote_text_range().map(|it| it.len())),
+ _ => None,
+ }
+ }).map(|prefix_len| DocCommentToken { prefix_len, doc_token: doc_token.clone() })
+}
+
+impl DocCommentToken {
+ pub(crate) fn get_definition_with_descend_at<T>(
+ self,
+ sema: &Semantics<'_, RootDatabase>,
+ offset: TextSize,
+ // Definition, CommentOwner, range of intra doc link in original file
+ mut cb: impl FnMut(Definition, SyntaxNode, TextRange) -> Option<T>,
+ ) -> Option<T> {
+ let DocCommentToken { prefix_len, doc_token } = self;
+ // offset relative to the comments contents
+ let original_start = doc_token.text_range().start();
+ let relative_comment_offset = offset - original_start - prefix_len;
+
+ sema.descend_into_macros(doc_token).into_iter().find_map(|t| {
+ let (node, descended_prefix_len) = match_ast! {
+ match t {
+ ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?),
+ ast::String(string) => (t.parent_ancestors().skip_while(|n| n.kind() != ATTR).nth(1)?, string.open_quote_text_range()?.len()),
+ _ => return None,
+ }
+ };
+ let token_start = t.text_range().start();
+ let abs_in_expansion_offset = token_start + relative_comment_offset + descended_prefix_len;
+
+ let (attributes, def) = doc_attributes(sema, &node)?;
+ let (docs, doc_mapping) = attributes.docs_with_rangemap(sema.db)?;
+ let (in_expansion_range, link, ns) =
+ extract_definitions_from_docs(&docs).into_iter().find_map(|(range, link, ns)| {
+ let mapped = doc_mapping.map(range)?;
+ (mapped.value.contains(abs_in_expansion_offset)).then(|| (mapped.value, link, ns))
+ })?;
+ // get the relative range to the doc/attribute in the expansion
+ let in_expansion_relative_range = in_expansion_range - descended_prefix_len - token_start;
+ // Apply relative range to the original input comment
+ let absolute_range = in_expansion_relative_range + original_start + prefix_len;
+ let def = resolve_doc_path_for_def(sema.db, def, &link, ns)?;
+ cb(def, node, absolute_range)
+ })
+ }
+}
+
+fn broken_link_clone_cb<'a>(link: BrokenLink<'a>) -> Option<(CowStr<'a>, CowStr<'a>)> {
+ Some((/*url*/ link.reference.clone(), /*title*/ link.reference))
+}
+
+// FIXME:
+// BUG: For Option::Some
+// Returns https://doc.rust-lang.org/nightly/core/prelude/v1/enum.Option.html#variant.Some
+// Instead of https://doc.rust-lang.org/nightly/core/option/enum.Option.html
+//
+// This should cease to be a problem if RFC2988 (Stable Rustdoc URLs) is implemented
+// https://github.com/rust-lang/rfcs/pull/2988
+fn get_doc_link(db: &RootDatabase, def: Definition) -> Option<String> {
+ let (target, file, frag) = filename_and_frag_for_def(db, def)?;
+
+ let mut url = get_doc_base_url(db, target)?;
+
+ if let Some(path) = mod_path_of_def(db, target) {
+ url = url.join(&path).ok()?;
+ }
+
+ url = url.join(&file).ok()?;
+ url.set_fragment(frag.as_deref());
+
+ Some(url.into())
+}
+
+fn rewrite_intra_doc_link(
+ db: &RootDatabase,
+ def: Definition,
+ target: &str,
+ title: &str,
+) -> Option<(String, String)> {
+ let (link, ns) = parse_intra_doc_link(target);
+
+ let resolved = resolve_doc_path_for_def(db, def, link, ns)?;
+ let mut url = get_doc_base_url(db, resolved)?;
+
+ let (_, file, frag) = filename_and_frag_for_def(db, resolved)?;
+ if let Some(path) = mod_path_of_def(db, resolved) {
+ url = url.join(&path).ok()?;
+ }
+
+ url = url.join(&file).ok()?;
+ url.set_fragment(frag.as_deref());
+
+ Some((url.into(), strip_prefixes_suffixes(title).to_string()))
+}
+
+/// Try to resolve path to local documentation via path-based links (i.e. `../gateway/struct.Shard.html`).
+fn rewrite_url_link(db: &RootDatabase, def: Definition, target: &str) -> Option<String> {
+ if !(target.contains('#') || target.contains(".html")) {
+ return None;
+ }
+
+ let mut url = get_doc_base_url(db, def)?;
+ let (def, file, frag) = filename_and_frag_for_def(db, def)?;
+
+ if let Some(path) = mod_path_of_def(db, def) {
+ url = url.join(&path).ok()?;
+ }
+
+ url = url.join(&file).ok()?;
+ url.set_fragment(frag.as_deref());
+ url.join(target).ok().map(Into::into)
+}
+
+fn mod_path_of_def(db: &RootDatabase, def: Definition) -> Option<String> {
+ def.canonical_module_path(db).map(|it| {
+ let mut path = String::new();
+ it.flat_map(|it| it.name(db)).for_each(|name| format_to!(path, "{}/", name));
+ path
+ })
+}
+
+/// Rewrites a markdown document, applying 'callback' to each link.
+fn map_links<'e>(
+ events: impl Iterator<Item = Event<'e>>,
+ callback: impl Fn(&str, &str) -> (Option<LinkType>, String, String),
+) -> impl Iterator<Item = Event<'e>> {
+ let mut in_link = false;
+ // holds the origin link target on start event and the rewritten one on end event
+ let mut end_link_target: Option<CowStr<'_>> = None;
+ // normally link's type is determined by the type of link tag in the end event,
+ // however in some cases we want to change the link type, for example,
+ // `Shortcut` type parsed from Start/End tags doesn't make sense for url links
+ let mut end_link_type: Option<LinkType> = None;
+
+ events.map(move |evt| match evt {
+ Event::Start(Tag::Link(link_type, ref target, _)) => {
+ in_link = true;
+ end_link_target = Some(target.clone());
+ end_link_type = Some(link_type);
+ evt
+ }
+ Event::End(Tag::Link(link_type, target, _)) => {
+ in_link = false;
+ Event::End(Tag::Link(
+ end_link_type.unwrap_or(link_type),
+ end_link_target.take().unwrap_or(target),
+ CowStr::Borrowed(""),
+ ))
+ }
+ Event::Text(s) if in_link => {
+ let (link_type, link_target_s, link_name) =
+ callback(&end_link_target.take().unwrap(), &s);
+ end_link_target = Some(CowStr::Boxed(link_target_s.into()));
+ if !matches!(end_link_type, Some(LinkType::Autolink)) {
+ end_link_type = link_type;
+ }
+ Event::Text(CowStr::Boxed(link_name.into()))
+ }
+ Event::Code(s) if in_link => {
+ let (link_type, link_target_s, link_name) =
+ callback(&end_link_target.take().unwrap(), &s);
+ end_link_target = Some(CowStr::Boxed(link_target_s.into()));
+ if !matches!(end_link_type, Some(LinkType::Autolink)) {
+ end_link_type = link_type;
+ }
+ Event::Code(CowStr::Boxed(link_name.into()))
+ }
+ _ => evt,
+ })
+}
+
+/// Get the root URL for the documentation of a definition.
+///
+/// ```ignore
+/// https://doc.rust-lang.org/std/iter/trait.Iterator.html#tymethod.next
+/// ^^^^^^^^^^^^^^^^^^^^^^^^^^
+/// ```
+fn get_doc_base_url(db: &RootDatabase, def: Definition) -> Option<Url> {
+ // special case base url of `BuiltinType` to core
+ // https://github.com/rust-lang/rust-analyzer/issues/12250
+ if let Definition::BuiltinType(..) = def {
+ return Url::parse("https://doc.rust-lang.org/nightly/core/").ok();
+ };
+
+ let krate = def.krate(db)?;
+ let display_name = krate.display_name(db)?;
+
+ let base = match db.crate_graph()[krate.into()].origin {
+ // std and co do not specify `html_root_url` any longer so we gotta handwrite this ourself.
+ // FIXME: Use the toolchains channel instead of nightly
+ CrateOrigin::Lang(
+ origin @ (LangCrateOrigin::Alloc
+ | LangCrateOrigin::Core
+ | LangCrateOrigin::ProcMacro
+ | LangCrateOrigin::Std
+ | LangCrateOrigin::Test),
+ ) => {
+ format!("https://doc.rust-lang.org/nightly/{origin}")
+ }
+ _ => {
+ krate.get_html_root_url(db).or_else(|| {
+ let version = krate.version(db);
+ // Fallback to docs.rs. This uses `display_name` and can never be
+ // correct, but that's what fallbacks are about.
+ //
+ // FIXME: clicking on the link should just open the file in the editor,
+ // instead of falling back to external urls.
+ Some(format!(
+ "https://docs.rs/{krate}/{version}/",
+ krate = display_name,
+ version = version.as_deref().unwrap_or("*")
+ ))
+ })?
+ }
+ };
+ Url::parse(&base).ok()?.join(&format!("{}/", display_name)).ok()
+}
+
+/// Get the filename and extension generated for a symbol by rustdoc.
+///
+/// ```ignore
+/// https://doc.rust-lang.org/std/iter/trait.Iterator.html#tymethod.next
+/// ^^^^^^^^^^^^^^^^^^^
+/// ```
+fn filename_and_frag_for_def(
+ db: &dyn HirDatabase,
+ def: Definition,
+) -> Option<(Definition, String, Option<String>)> {
+ if let Some(assoc_item) = def.as_assoc_item(db) {
+ let def = match assoc_item.container(db) {
+ AssocItemContainer::Trait(t) => t.into(),
+ AssocItemContainer::Impl(i) => i.self_ty(db).as_adt()?.into(),
+ };
+ let (_, file, _) = filename_and_frag_for_def(db, def)?;
+ let frag = get_assoc_item_fragment(db, assoc_item)?;
+ return Some((def, file, Some(frag)));
+ }
+
+ let res = match def {
+ Definition::Adt(adt) => match adt {
+ Adt::Struct(s) => format!("struct.{}.html", s.name(db)),
+ Adt::Enum(e) => format!("enum.{}.html", e.name(db)),
+ Adt::Union(u) => format!("union.{}.html", u.name(db)),
+ },
+ Definition::Module(m) => match m.name(db) {
+ // `#[doc(keyword = "...")]` is internal used only by rust compiler
+ Some(name) => match m.attrs(db).by_key("doc").find_string_value_in_tt("keyword") {
+ Some(kw) => {
+ format!("keyword.{}.html", kw.trim_matches('"'))
+ }
+ None => format!("{}/index.html", name),
+ },
+ None => String::from("index.html"),
+ },
+ Definition::Trait(t) => format!("trait.{}.html", t.name(db)),
+ Definition::TypeAlias(t) => format!("type.{}.html", t.name(db)),
+ Definition::BuiltinType(t) => format!("primitive.{}.html", t.name()),
+ Definition::Function(f) => format!("fn.{}.html", f.name(db)),
+ Definition::Variant(ev) => {
+ format!("enum.{}.html#variant.{}", ev.parent_enum(db).name(db), ev.name(db))
+ }
+ Definition::Const(c) => format!("const.{}.html", c.name(db)?),
+ Definition::Static(s) => format!("static.{}.html", s.name(db)),
+ Definition::Macro(mac) => format!("macro.{}.html", mac.name(db)),
+ Definition::Field(field) => {
+ let def = match field.parent_def(db) {
+ hir::VariantDef::Struct(it) => Definition::Adt(it.into()),
+ hir::VariantDef::Union(it) => Definition::Adt(it.into()),
+ hir::VariantDef::Variant(it) => Definition::Variant(it),
+ };
+ let (_, file, _) = filename_and_frag_for_def(db, def)?;
+ return Some((def, file, Some(format!("structfield.{}", field.name(db)))));
+ }
+ Definition::SelfType(impl_) => {
+ let adt = impl_.self_ty(db).as_adt()?.into();
+ let (_, file, _) = filename_and_frag_for_def(db, adt)?;
+ // FIXME fragment numbering
+ return Some((adt, file, Some(String::from("impl"))));
+ }
+ Definition::Local(_)
+ | Definition::GenericParam(_)
+ | Definition::Label(_)
+ | Definition::BuiltinAttr(_)
+ | Definition::ToolModule(_)
+ | Definition::DeriveHelper(_) => return None,
+ };
+
+ Some((def, res, None))
+}
+
+/// Get the fragment required to link to a specific field, method, associated type, or associated constant.
+///
+/// ```ignore
+/// https://doc.rust-lang.org/std/iter/trait.Iterator.html#tymethod.next
+/// ^^^^^^^^^^^^^^
+/// ```
+fn get_assoc_item_fragment(db: &dyn HirDatabase, assoc_item: hir::AssocItem) -> Option<String> {
+ Some(match assoc_item {
+ AssocItem::Function(function) => {
+ let is_trait_method =
+ function.as_assoc_item(db).and_then(|assoc| assoc.containing_trait(db)).is_some();
+ // This distinction may get more complicated when specialization is available.
+ // Rustdoc makes this decision based on whether a method 'has defaultness'.
+ // Currently this is only the case for provided trait methods.
+ if is_trait_method && !function.has_body(db) {
+ format!("tymethod.{}", function.name(db))
+ } else {
+ format!("method.{}", function.name(db))
+ }
+ }
+ AssocItem::Const(constant) => format!("associatedconstant.{}", constant.name(db)?),
+ AssocItem::TypeAlias(ty) => format!("associatedtype.{}", ty.name(db)),
+ })
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links/intra_doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links/intra_doc_links.rs
new file mode 100644
index 000000000..1df9aaae2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links/intra_doc_links.rs
@@ -0,0 +1,77 @@
+//! Helper tools for intra doc links.
+
+const TYPES: ([&str; 9], [&str; 0]) =
+ (["type", "struct", "enum", "mod", "trait", "union", "module", "prim", "primitive"], []);
+const VALUES: ([&str; 8], [&str; 1]) =
+ (["value", "function", "fn", "method", "const", "static", "mod", "module"], ["()"]);
+const MACROS: ([&str; 2], [&str; 1]) = (["macro", "derive"], ["!"]);
+
+/// Extract the specified namespace from an intra-doc-link if one exists.
+///
+/// # Examples
+///
+/// * `struct MyStruct` -> ("MyStruct", `Namespace::Types`)
+/// * `panic!` -> ("panic", `Namespace::Macros`)
+/// * `fn@from_intra_spec` -> ("from_intra_spec", `Namespace::Values`)
+pub(super) fn parse_intra_doc_link(s: &str) -> (&str, Option<hir::Namespace>) {
+ let s = s.trim_matches('`');
+
+ [
+ (hir::Namespace::Types, (TYPES.0.iter(), TYPES.1.iter())),
+ (hir::Namespace::Values, (VALUES.0.iter(), VALUES.1.iter())),
+ (hir::Namespace::Macros, (MACROS.0.iter(), MACROS.1.iter())),
+ ]
+ .into_iter()
+ .find_map(|(ns, (mut prefixes, mut suffixes))| {
+ if let Some(prefix) = prefixes.find(|&&prefix| {
+ s.starts_with(prefix)
+ && s.chars().nth(prefix.len()).map_or(false, |c| c == '@' || c == ' ')
+ }) {
+ Some((&s[prefix.len() + 1..], ns))
+ } else {
+ suffixes.find_map(|&suffix| s.strip_suffix(suffix).zip(Some(ns)))
+ }
+ })
+ .map_or((s, None), |(s, ns)| (s, Some(ns)))
+}
+
+pub(super) fn strip_prefixes_suffixes(s: &str) -> &str {
+ [
+ (TYPES.0.iter(), TYPES.1.iter()),
+ (VALUES.0.iter(), VALUES.1.iter()),
+ (MACROS.0.iter(), MACROS.1.iter()),
+ ]
+ .into_iter()
+ .find_map(|(mut prefixes, mut suffixes)| {
+ if let Some(prefix) = prefixes.find(|&&prefix| {
+ s.starts_with(prefix)
+ && s.chars().nth(prefix.len()).map_or(false, |c| c == '@' || c == ' ')
+ }) {
+ Some(&s[prefix.len() + 1..])
+ } else {
+ suffixes.find_map(|&suffix| s.strip_suffix(suffix))
+ }
+ })
+ .unwrap_or(s)
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use super::*;
+
+ fn check(link: &str, expected: Expect) {
+ let (l, a) = parse_intra_doc_link(link);
+ let a = a.map_or_else(String::new, |a| format!(" ({:?})", a));
+ expected.assert_eq(&format!("{}{}", l, a));
+ }
+
+ #[test]
+ fn test_name() {
+ check("foo", expect![[r#"foo"#]]);
+ check("struct Struct", expect![[r#"Struct (Types)"#]]);
+ check("makro!", expect![[r#"makro (Macros)"#]]);
+ check("fn@function", expect![[r#"function (Values)"#]]);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
new file mode 100644
index 000000000..c6bfb6b9d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
@@ -0,0 +1,491 @@
+use expect_test::{expect, Expect};
+use hir::{HasAttrs, Semantics};
+use ide_db::{
+ base_db::{FilePosition, FileRange},
+ defs::Definition,
+ RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{ast, match_ast, AstNode, SyntaxNode};
+
+use crate::{
+ doc_links::{extract_definitions_from_docs, resolve_doc_path_for_def, rewrite_links},
+ fixture, TryToNav,
+};
+
+fn check_external_docs(ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let url = analysis.external_docs(position).unwrap().expect("could not find url for symbol");
+
+ expect.assert_eq(&url)
+}
+
+fn check_rewrite(ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let sema = &Semantics::new(&*analysis.db);
+ let (cursor_def, docs) = def_under_cursor(sema, &position);
+ let res = rewrite_links(sema.db, docs.as_str(), cursor_def);
+ expect.assert_eq(&res)
+}
+
+fn check_doc_links(ra_fixture: &str) {
+ let key_fn = |&(FileRange { file_id, range }, _): &_| (file_id, range.start());
+
+ let (analysis, position, mut expected) = fixture::annotations(ra_fixture);
+ expected.sort_by_key(key_fn);
+ let sema = &Semantics::new(&*analysis.db);
+ let (cursor_def, docs) = def_under_cursor(sema, &position);
+ let defs = extract_definitions_from_docs(&docs);
+ let actual: Vec<_> = defs
+ .into_iter()
+ .map(|(_, link, ns)| {
+ let def = resolve_doc_path_for_def(sema.db, cursor_def, &link, ns)
+ .unwrap_or_else(|| panic!("Failed to resolve {}", link));
+ let nav_target = def.try_to_nav(sema.db).unwrap();
+ let range =
+ FileRange { file_id: nav_target.file_id, range: nav_target.focus_or_full_range() };
+ (range, link)
+ })
+ .sorted_by_key(key_fn)
+ .collect();
+ assert_eq!(expected, actual);
+}
+
+fn def_under_cursor(
+ sema: &Semantics<'_, RootDatabase>,
+ position: &FilePosition,
+) -> (Definition, hir::Documentation) {
+ let (docs, def) = sema
+ .parse(position.file_id)
+ .syntax()
+ .token_at_offset(position.offset)
+ .left_biased()
+ .unwrap()
+ .parent_ancestors()
+ .find_map(|it| node_to_def(sema, &it))
+ .expect("no def found")
+ .unwrap();
+ let docs = docs.expect("no docs found for cursor def");
+ (def, docs)
+}
+
+fn node_to_def(
+ sema: &Semantics<'_, RootDatabase>,
+ node: &SyntaxNode,
+) -> Option<Option<(Option<hir::Documentation>, Definition)>> {
+ Some(match_ast! {
+ match node {
+ ast::SourceFile(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Module(def))),
+ ast::Module(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Module(def))),
+ ast::Fn(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Function(def))),
+ ast::Struct(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Adt(hir::Adt::Struct(def)))),
+ ast::Union(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Adt(hir::Adt::Union(def)))),
+ ast::Enum(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Adt(hir::Adt::Enum(def)))),
+ ast::Variant(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Variant(def))),
+ ast::Trait(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Trait(def))),
+ ast::Static(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Static(def))),
+ ast::Const(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Const(def))),
+ ast::TypeAlias(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::TypeAlias(def))),
+ ast::Impl(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::SelfType(def))),
+ ast::RecordField(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Field(def))),
+ ast::TupleField(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Field(def))),
+ ast::Macro(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Macro(def))),
+ // ast::Use(it) => sema.to_def(&it).map(|def| (Box::new(it) as _, def.attrs(sema.db))),
+ _ => return None,
+ }
+ })
+}
+
+#[test]
+fn external_docs_doc_url_crate() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:main deps:foo
+use foo$0::Foo;
+//- /lib.rs crate:foo
+pub struct Foo;
+"#,
+ expect![[r#"https://docs.rs/foo/*/foo/index.html"#]],
+ );
+}
+
+#[test]
+fn external_docs_doc_url_std_crate() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:std
+use self$0;
+"#,
+ expect![[r#"https://doc.rust-lang.org/nightly/std/index.html"#]],
+ );
+}
+
+#[test]
+fn external_docs_doc_url_struct() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub struct Fo$0o;
+"#,
+ expect![[r#"https://docs.rs/foo/*/foo/struct.Foo.html"#]],
+ );
+}
+
+#[test]
+fn external_docs_doc_url_struct_field() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub struct Foo {
+ field$0: ()
+}
+"#,
+ expect![[r##"https://docs.rs/foo/*/foo/struct.Foo.html#structfield.field"##]],
+ );
+}
+
+#[test]
+fn external_docs_doc_url_fn() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub fn fo$0o() {}
+"#,
+ expect![[r#"https://docs.rs/foo/*/foo/fn.foo.html"#]],
+ );
+}
+
+#[test]
+fn external_docs_doc_url_impl_assoc() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub struct Foo;
+impl Foo {
+ pub fn method$0() {}
+}
+"#,
+ expect![[r##"https://docs.rs/foo/*/foo/struct.Foo.html#method.method"##]],
+ );
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub struct Foo;
+impl Foo {
+ const CONST$0: () = ();
+}
+"#,
+ expect![[r##"https://docs.rs/foo/*/foo/struct.Foo.html#associatedconstant.CONST"##]],
+ );
+}
+
+#[test]
+fn external_docs_doc_url_impl_trait_assoc() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub struct Foo;
+pub trait Trait {
+ fn method() {}
+}
+impl Trait for Foo {
+ pub fn method$0() {}
+}
+"#,
+ expect![[r##"https://docs.rs/foo/*/foo/struct.Foo.html#method.method"##]],
+ );
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub struct Foo;
+pub trait Trait {
+ const CONST: () = ();
+}
+impl Trait for Foo {
+ const CONST$0: () = ();
+}
+"#,
+ expect![[r##"https://docs.rs/foo/*/foo/struct.Foo.html#associatedconstant.CONST"##]],
+ );
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub struct Foo;
+pub trait Trait {
+ type Type;
+}
+impl Trait for Foo {
+ type Type$0 = ();
+}
+"#,
+ expect![[r##"https://docs.rs/foo/*/foo/struct.Foo.html#associatedtype.Type"##]],
+ );
+}
+
+#[test]
+fn external_docs_doc_url_trait_assoc() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub trait Foo {
+ fn method$0();
+}
+"#,
+ expect![[r##"https://docs.rs/foo/*/foo/trait.Foo.html#tymethod.method"##]],
+ );
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub trait Foo {
+ const CONST$0: ();
+}
+"#,
+ expect![[r##"https://docs.rs/foo/*/foo/trait.Foo.html#associatedconstant.CONST"##]],
+ );
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub trait Foo {
+ type Type$0;
+}
+"#,
+ expect![[r##"https://docs.rs/foo/*/foo/trait.Foo.html#associatedtype.Type"##]],
+ );
+}
+
+#[test]
+fn external_docs_trait() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+trait Trait$0 {}
+"#,
+ expect![[r#"https://docs.rs/foo/*/foo/trait.Trait.html"#]],
+ )
+}
+
+#[test]
+fn external_docs_module() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub mod foo {
+ pub mod ba$0r {}
+}
+"#,
+ expect![[r#"https://docs.rs/foo/*/foo/foo/bar/index.html"#]],
+ )
+}
+
+#[test]
+fn external_docs_reexport_order() {
+ check_external_docs(
+ r#"
+//- /main.rs crate:foo
+pub mod wrapper {
+ pub use module::Item;
+
+ pub mod module {
+ pub struct Item;
+ }
+}
+
+fn foo() {
+ let bar: wrapper::It$0em;
+}
+ "#,
+ expect![[r#"https://docs.rs/foo/*/foo/wrapper/module/struct.Item.html"#]],
+ )
+}
+
+#[test]
+fn doc_links_items_simple() {
+ check_doc_links(
+ r#"
+//- /main.rs crate:main deps:krate
+/// [`krate`]
+//! [`Trait`]
+//! [`function`]
+//! [`CONST`]
+//! [`STATIC`]
+//! [`Struct`]
+//! [`Enum`]
+//! [`Union`]
+//! [`Type`]
+//! [`module`]
+use self$0;
+
+const CONST: () = ();
+ // ^^^^^ CONST
+static STATIC: () = ();
+ // ^^^^^^ STATIC
+trait Trait {
+ // ^^^^^ Trait
+}
+fn function() {}
+// ^^^^^^^^ function
+struct Struct;
+ // ^^^^^^ Struct
+enum Enum {}
+ // ^^^^ Enum
+union Union {__: ()}
+ // ^^^^^ Union
+type Type = ();
+ // ^^^^ Type
+mod module {}
+ // ^^^^^^ module
+//- /krate.rs crate:krate
+// empty
+//^file krate
+"#,
+ )
+}
+
+#[test]
+fn doc_links_inherent_impl_items() {
+ check_doc_links(
+ r#"
+// /// [`Struct::CONST`]
+// /// [`Struct::function`]
+/// FIXME #9694
+struct Struct$0;
+
+impl Struct {
+ const CONST: () = ();
+ fn function() {}
+}
+"#,
+ )
+}
+
+#[test]
+fn doc_links_trait_impl_items() {
+ check_doc_links(
+ r#"
+trait Trait {
+ type Type;
+ const CONST: usize;
+ fn function();
+}
+// /// [`Struct::Type`]
+// /// [`Struct::CONST`]
+// /// [`Struct::function`]
+/// FIXME #9694
+struct Struct$0;
+
+impl Trait for Struct {
+ type Type = ();
+ const CONST: () = ();
+ fn function() {}
+}
+"#,
+ )
+}
+
+#[test]
+fn doc_links_trait_items() {
+ check_doc_links(
+ r#"
+/// [`Trait`]
+/// [`Trait::Type`]
+/// [`Trait::CONST`]
+/// [`Trait::function`]
+trait Trait$0 {
+ // ^^^^^ Trait
+type Type;
+ // ^^^^ Trait::Type
+const CONST: usize;
+ // ^^^^^ Trait::CONST
+fn function();
+// ^^^^^^^^ Trait::function
+}
+ "#,
+ )
+}
+
+#[test]
+fn rewrite_html_root_url() {
+ check_rewrite(
+ r#"
+//- /main.rs crate:foo
+#![doc(arbitrary_attribute = "test", html_root_url = "https:/example.com", arbitrary_attribute2)]
+
+pub mod foo {
+ pub struct Foo;
+}
+/// [Foo](foo::Foo)
+pub struct B$0ar
+"#,
+ expect![[r#"[Foo](https://example.com/foo/foo/struct.Foo.html)"#]],
+ );
+}
+
+#[test]
+fn rewrite_on_field() {
+ check_rewrite(
+ r#"
+//- /main.rs crate:foo
+pub struct Foo {
+ /// [Foo](struct.Foo.html)
+ fie$0ld: ()
+}
+"#,
+ expect![[r#"[Foo](https://docs.rs/foo/*/foo/struct.Foo.html)"#]],
+ );
+}
+
+#[test]
+fn rewrite_struct() {
+ check_rewrite(
+ r#"
+//- /main.rs crate:foo
+/// [Foo]
+pub struct $0Foo;
+"#,
+ expect![[r#"[Foo](https://docs.rs/foo/*/foo/struct.Foo.html)"#]],
+ );
+ check_rewrite(
+ r#"
+//- /main.rs crate:foo
+/// [`Foo`]
+pub struct $0Foo;
+"#,
+ expect![[r#"[`Foo`](https://docs.rs/foo/*/foo/struct.Foo.html)"#]],
+ );
+ check_rewrite(
+ r#"
+//- /main.rs crate:foo
+/// [Foo](struct.Foo.html)
+pub struct $0Foo;
+"#,
+ expect![[r#"[Foo](https://docs.rs/foo/*/foo/struct.Foo.html)"#]],
+ );
+ check_rewrite(
+ r#"
+//- /main.rs crate:foo
+/// [struct Foo](struct.Foo.html)
+pub struct $0Foo;
+"#,
+ expect![[r#"[struct Foo](https://docs.rs/foo/*/foo/struct.Foo.html)"#]],
+ );
+ check_rewrite(
+ r#"
+//- /main.rs crate:foo
+/// [my Foo][foo]
+///
+/// [foo]: Foo
+pub struct $0Foo;
+"#,
+ expect![[r#"[my Foo](https://docs.rs/foo/*/foo/struct.Foo.html)"#]],
+ );
+ check_rewrite(
+ r#"
+//- /main.rs crate:foo
+/// [`foo`]
+///
+/// [`foo`]: Foo
+pub struct $0Foo;
+"#,
+ expect![["[`foo`]"]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
new file mode 100644
index 000000000..efa8551a0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
@@ -0,0 +1,521 @@
+use hir::Semantics;
+use ide_db::{
+ base_db::FileId, helpers::pick_best_token,
+ syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase,
+};
+use syntax::{ast, ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T};
+
+use crate::FilePosition;
+
+pub struct ExpandedMacro {
+ pub name: String,
+ pub expansion: String,
+}
+
+// Feature: Expand Macro Recursively
+//
+// Shows the full macro expansion of the macro at current cursor.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Expand macro recursively**
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113020648-b3973180-917a-11eb-84a9-ecb921293dc5.gif[]
+pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> {
+ let sema = Semantics::new(db);
+ let file = sema.parse(position.file_id);
+
+ let tok = pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind {
+ SyntaxKind::IDENT => 1,
+ _ => 0,
+ })?;
+
+ // due to how Rust Analyzer works internally, we need to special case derive attributes,
+ // otherwise they might not get found, e.g. here with the cursor at $0 `#[attr]` would expand:
+ // ```
+ // #[attr]
+ // #[derive($0Foo)]
+ // struct Bar;
+ // ```
+
+ let derive = sema.descend_into_macros(tok.clone()).into_iter().find_map(|descended| {
+ let hir_file = sema.hir_file_for(&descended.parent()?);
+ if !hir_file.is_derive_attr_pseudo_expansion(db) {
+ return None;
+ }
+
+ let name = descended.parent_ancestors().filter_map(ast::Path::cast).last()?.to_string();
+ // up map out of the #[derive] expansion
+ let token = hir::InFile::new(hir_file, descended).upmap(db)?.value;
+ let attr = token.parent_ancestors().find_map(ast::Attr::cast)?;
+ let expansions = sema.expand_derive_macro(&attr)?;
+ let idx = attr
+ .token_tree()?
+ .token_trees_and_tokens()
+ .filter_map(NodeOrToken::into_token)
+ .take_while(|it| it != &token)
+ .filter(|it| it.kind() == T![,])
+ .count();
+ let expansion =
+ format(db, SyntaxKind::MACRO_ITEMS, position.file_id, expansions.get(idx).cloned()?);
+ Some(ExpandedMacro { name, expansion })
+ });
+
+ if derive.is_some() {
+ return derive;
+ }
+
+ // FIXME: Intermix attribute and bang! expansions
+ // currently we only recursively expand one of the two types
+ let mut anc = tok.parent_ancestors();
+ let (name, expanded, kind) = loop {
+ let node = anc.next()?;
+
+ if let Some(item) = ast::Item::cast(node.clone()) {
+ if let Some(def) = sema.resolve_attr_macro_call(&item) {
+ break (
+ def.name(db).to_string(),
+ expand_attr_macro_recur(&sema, &item)?,
+ SyntaxKind::MACRO_ITEMS,
+ );
+ }
+ }
+ if let Some(mac) = ast::MacroCall::cast(node) {
+ break (
+ mac.path()?.segment()?.name_ref()?.to_string(),
+ expand_macro_recur(&sema, &mac)?,
+ mac.syntax().parent().map(|it| it.kind()).unwrap_or(SyntaxKind::MACRO_ITEMS),
+ );
+ }
+ };
+
+ // FIXME:
+ // macro expansion may lose all white space information
+ // But we hope someday we can use ra_fmt for that
+ let expansion = format(db, kind, position.file_id, expanded);
+
+ Some(ExpandedMacro { name, expansion })
+}
+
+fn expand_macro_recur(
+ sema: &Semantics<'_, RootDatabase>,
+ macro_call: &ast::MacroCall,
+) -> Option<SyntaxNode> {
+ let expanded = sema.expand(macro_call)?.clone_for_update();
+ expand(sema, expanded, ast::MacroCall::cast, expand_macro_recur)
+}
+
+fn expand_attr_macro_recur(
+ sema: &Semantics<'_, RootDatabase>,
+ item: &ast::Item,
+) -> Option<SyntaxNode> {
+ let expanded = sema.expand_attr_macro(item)?.clone_for_update();
+ expand(sema, expanded, ast::Item::cast, expand_attr_macro_recur)
+}
+
+fn expand<T: AstNode>(
+ sema: &Semantics<'_, RootDatabase>,
+ expanded: SyntaxNode,
+ f: impl FnMut(SyntaxNode) -> Option<T>,
+ exp: impl Fn(&Semantics<'_, RootDatabase>, &T) -> Option<SyntaxNode>,
+) -> Option<SyntaxNode> {
+ let children = expanded.descendants().filter_map(f);
+ let mut replacements = Vec::new();
+
+ for child in children {
+ if let Some(new_node) = exp(sema, &child) {
+ // check if the whole original syntax is replaced
+ if expanded == *child.syntax() {
+ return Some(new_node);
+ }
+ replacements.push((child, new_node));
+ }
+ }
+
+ replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
+ Some(expanded)
+}
+
+fn format(db: &RootDatabase, kind: SyntaxKind, file_id: FileId, expanded: SyntaxNode) -> String {
+ let expansion = insert_ws_into(expanded).to_string();
+
+ _format(db, kind, file_id, &expansion).unwrap_or(expansion)
+}
+
+#[cfg(any(test, target_arch = "wasm32", target_os = "emscripten"))]
+fn _format(
+ _db: &RootDatabase,
+ _kind: SyntaxKind,
+ _file_id: FileId,
+ _expansion: &str,
+) -> Option<String> {
+ None
+}
+
+#[cfg(not(any(test, target_arch = "wasm32", target_os = "emscripten")))]
+fn _format(
+ db: &RootDatabase,
+ kind: SyntaxKind,
+ file_id: FileId,
+ expansion: &str,
+) -> Option<String> {
+ use ide_db::base_db::{FileLoader, SourceDatabase};
+ // hack until we get hygiene working (same character amount to preserve formatting as much as possible)
+ const DOLLAR_CRATE_REPLACE: &str = &"__r_a_";
+ let expansion = expansion.replace("$crate", DOLLAR_CRATE_REPLACE);
+ let (prefix, suffix) = match kind {
+ SyntaxKind::MACRO_PAT => ("fn __(", ": u32);"),
+ SyntaxKind::MACRO_EXPR | SyntaxKind::MACRO_STMTS => ("fn __() {", "}"),
+ SyntaxKind::MACRO_TYPE => ("type __ =", ";"),
+ _ => ("", ""),
+ };
+ let expansion = format!("{prefix}{expansion}{suffix}");
+
+ let &crate_id = db.relevant_crates(file_id).iter().next()?;
+ let edition = db.crate_graph()[crate_id].edition;
+
+ let mut cmd = std::process::Command::new(toolchain::rustfmt());
+ cmd.arg("--edition");
+ cmd.arg(edition.to_string());
+
+ let mut rustfmt = cmd
+ .stdin(std::process::Stdio::piped())
+ .stdout(std::process::Stdio::piped())
+ .stderr(std::process::Stdio::piped())
+ .spawn()
+ .ok()?;
+
+ std::io::Write::write_all(&mut rustfmt.stdin.as_mut()?, expansion.as_bytes()).ok()?;
+
+ let output = rustfmt.wait_with_output().ok()?;
+ let captured_stdout = String::from_utf8(output.stdout).ok()?;
+
+ if output.status.success() && !captured_stdout.trim().is_empty() {
+ let output = captured_stdout.replace(DOLLAR_CRATE_REPLACE, "$crate");
+ let output = output.trim().strip_prefix(prefix)?;
+ let output = match kind {
+ SyntaxKind::MACRO_PAT => {
+ output.strip_suffix(suffix).or_else(|| output.strip_suffix(": u32,\n);"))?
+ }
+ _ => output.strip_suffix(suffix)?,
+ };
+ let trim_indent = stdx::trim_indent(output);
+ tracing::debug!("expand_macro: formatting succeeded");
+ Some(trim_indent)
+ } else {
+ None
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::fixture;
+
+ #[track_caller]
+ fn check(ra_fixture: &str, expect: Expect) {
+ let (analysis, pos) = fixture::position(ra_fixture);
+ let expansion = analysis.expand_macro(pos).unwrap().unwrap();
+ let actual = format!("{}\n{}", expansion.name, expansion.expansion);
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn macro_expand_as_keyword() {
+ check(
+ r#"
+macro_rules! bar {
+ ($i:tt) => { $i as _ }
+}
+fn main() {
+ let x: u64 = ba$0r!(5i64);
+}
+"#,
+ expect![[r#"
+ bar
+ 5i64 as _"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_underscore() {
+ check(
+ r#"
+macro_rules! bar {
+ ($i:tt) => { for _ in 0..$i {} }
+}
+fn main() {
+ ba$0r!(42);
+}
+"#,
+ expect![[r#"
+ bar
+ for _ in 0..42{}"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_recursive_expansion() {
+ check(
+ r#"
+macro_rules! bar {
+ () => { fn b() {} }
+}
+macro_rules! foo {
+ () => { bar!(); }
+}
+macro_rules! baz {
+ () => { foo!(); }
+}
+f$0oo!();
+"#,
+ expect![[r#"
+ foo
+ fn b(){}
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_multiple_lines() {
+ check(
+ r#"
+macro_rules! foo {
+ () => {
+ fn some_thing() -> u32 {
+ let a = 0;
+ a + 10
+ }
+ }
+}
+f$0oo!();
+ "#,
+ expect![[r#"
+ foo
+ fn some_thing() -> u32 {
+ let a = 0;
+ a+10
+ }"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_match_ast() {
+ check(
+ r#"
+macro_rules! match_ast {
+ (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
+ (match ($node:expr) {
+ $( ast::$ast:ident($it:ident) => $res:block, )*
+ _ => $catch_all:expr $(,)?
+ }) => {{
+ $( if let Some($it) = ast::$ast::cast($node.clone()) $res else )*
+ { $catch_all }
+ }};
+}
+
+fn main() {
+ mat$0ch_ast! {
+ match container {
+ ast::TraitDef(it) => {},
+ ast::ImplDef(it) => {},
+ _ => { continue },
+ }
+ }
+}
+"#,
+ expect![[r#"
+ match_ast
+ {
+ if let Some(it) = ast::TraitDef::cast(container.clone()){}
+ else if let Some(it) = ast::ImplDef::cast(container.clone()){}
+ else {
+ {
+ continue
+ }
+ }
+ }"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_match_ast_inside_let_statement() {
+ check(
+ r#"
+macro_rules! match_ast {
+ (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
+ (match ($node:expr) {}) => {{}};
+}
+
+fn main() {
+ let p = f(|it| {
+ let res = mat$0ch_ast! { match c {}};
+ Some(res)
+ })?;
+}
+"#,
+ expect![[r#"
+ match_ast
+ {}"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_inner_macro_rules() {
+ check(
+ r#"
+macro_rules! foo {
+ ($t:tt) => {{
+ macro_rules! bar {
+ () => {
+ $t
+ }
+ }
+ bar!()
+ }};
+}
+
+fn main() {
+ foo$0!(42);
+}
+ "#,
+ expect![[r#"
+ foo
+ {
+ macro_rules! bar {
+ () => {
+ 42
+ }
+ }
+ 42
+ }"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_inner_macro_fail_to_expand() {
+ check(
+ r#"
+macro_rules! bar {
+ (BAD) => {};
+}
+macro_rules! foo {
+ () => {bar!()};
+}
+
+fn main() {
+ let res = fo$0o!();
+}
+"#,
+ expect![[r#"
+ foo
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_with_dollar_crate() {
+ check(
+ r#"
+#[macro_export]
+macro_rules! bar {
+ () => {0};
+}
+macro_rules! foo {
+ () => {$crate::bar!()};
+}
+
+fn main() {
+ let res = fo$0o!();
+}
+"#,
+ expect![[r#"
+ foo
+ 0"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_with_dyn_absolute_path() {
+ check(
+ r#"
+macro_rules! foo {
+ () => {fn f<T>(_: &dyn ::std::marker::Copy) {}};
+}
+
+fn main() {
+ let res = fo$0o!();
+}
+"#,
+ expect![[r#"
+ foo
+ fn f<T>(_: &dyn ::std::marker::Copy){}"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_derive() {
+ check(
+ r#"
+//- proc_macros: identity
+//- minicore: clone, derive
+
+#[proc_macros::identity]
+#[derive(C$0lone)]
+struct Foo {}
+"#,
+ expect![[r#"
+ Clone
+ impl < >core::clone::Clone for Foo< >{}
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_derive2() {
+ check(
+ r#"
+//- minicore: copy, clone, derive
+
+#[derive(Cop$0y)]
+#[derive(Clone)]
+struct Foo {}
+"#,
+ expect![[r#"
+ Copy
+ impl < >core::marker::Copy for Foo< >{}
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_derive_multi() {
+ check(
+ r#"
+//- minicore: copy, clone, derive
+
+#[derive(Cop$0y, Clone)]
+struct Foo {}
+"#,
+ expect![[r#"
+ Copy
+ impl < >core::marker::Copy for Foo< >{}
+ "#]],
+ );
+ check(
+ r#"
+//- minicore: copy, clone, derive
+
+#[derive(Copy, Cl$0one)]
+struct Foo {}
+"#,
+ expect![[r#"
+ Clone
+ impl < >core::clone::Clone for Foo< >{}
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs
new file mode 100644
index 000000000..45f1fd748
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs
@@ -0,0 +1,662 @@
+use std::iter::successors;
+
+use hir::Semantics;
+use ide_db::RootDatabase;
+use syntax::{
+ algo::{self, skip_trivia_token},
+ ast::{self, AstNode, AstToken},
+ Direction, NodeOrToken,
+ SyntaxKind::{self, *},
+ SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, T,
+};
+
+use crate::FileRange;
+
+// Feature: Expand and Shrink Selection
+//
+// Extends or shrinks the current selection to the encompassing syntactic construct
+// (expression, statement, item, module, etc). It works with multiple cursors.
+//
+// This is a standard LSP feature and not a protocol extension.
+//
+// |===
+// | Editor | Shortcut
+//
+// | VS Code | kbd:[Alt+Shift+→], kbd:[Alt+Shift+←]
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113020651-b42fc800-917a-11eb-8a4f-cf1a07859fac.gif[]
+pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange {
+ let sema = Semantics::new(db);
+ let src = sema.parse(frange.file_id);
+ try_extend_selection(&sema, src.syntax(), frange).unwrap_or(frange.range)
+}
+
+fn try_extend_selection(
+ sema: &Semantics<'_, RootDatabase>,
+ root: &SyntaxNode,
+ frange: FileRange,
+) -> Option<TextRange> {
+ let range = frange.range;
+
+ let string_kinds = [COMMENT, STRING, BYTE_STRING];
+ let list_kinds = [
+ RECORD_PAT_FIELD_LIST,
+ MATCH_ARM_LIST,
+ RECORD_FIELD_LIST,
+ TUPLE_FIELD_LIST,
+ RECORD_EXPR_FIELD_LIST,
+ VARIANT_LIST,
+ USE_TREE_LIST,
+ GENERIC_PARAM_LIST,
+ GENERIC_ARG_LIST,
+ TYPE_BOUND_LIST,
+ PARAM_LIST,
+ ARG_LIST,
+ ARRAY_EXPR,
+ TUPLE_EXPR,
+ TUPLE_TYPE,
+ TUPLE_PAT,
+ WHERE_CLAUSE,
+ ];
+
+ if range.is_empty() {
+ let offset = range.start();
+ let mut leaves = root.token_at_offset(offset);
+ if leaves.clone().all(|it| it.kind() == WHITESPACE) {
+ return Some(extend_ws(root, leaves.next()?, offset));
+ }
+ let leaf_range = match leaves {
+ TokenAtOffset::None => return None,
+ TokenAtOffset::Single(l) => {
+ if string_kinds.contains(&l.kind()) {
+ extend_single_word_in_comment_or_string(&l, offset)
+ .unwrap_or_else(|| l.text_range())
+ } else {
+ l.text_range()
+ }
+ }
+ TokenAtOffset::Between(l, r) => pick_best(l, r).text_range(),
+ };
+ return Some(leaf_range);
+ };
+ let node = match root.covering_element(range) {
+ NodeOrToken::Token(token) => {
+ if token.text_range() != range {
+ return Some(token.text_range());
+ }
+ if let Some(comment) = ast::Comment::cast(token.clone()) {
+ if let Some(range) = extend_comments(comment) {
+ return Some(range);
+ }
+ }
+ token.parent()?
+ }
+ NodeOrToken::Node(node) => node,
+ };
+
+ // if we are in single token_tree, we maybe live in macro or attr
+ if node.kind() == TOKEN_TREE {
+ if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) {
+ if let Some(range) = extend_tokens_from_range(sema, macro_call, range) {
+ return Some(range);
+ }
+ }
+ }
+
+ if node.text_range() != range {
+ return Some(node.text_range());
+ }
+
+ let node = shallowest_node(&node);
+
+ if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
+ if let Some(range) = extend_list_item(&node) {
+ return Some(range);
+ }
+ }
+
+ node.parent().map(|it| it.text_range())
+}
+
+fn extend_tokens_from_range(
+ sema: &Semantics<'_, RootDatabase>,
+ macro_call: ast::MacroCall,
+ original_range: TextRange,
+) -> Option<TextRange> {
+ let src = macro_call.syntax().covering_element(original_range);
+ let (first_token, last_token) = match src {
+ NodeOrToken::Node(it) => (it.first_token()?, it.last_token()?),
+ NodeOrToken::Token(it) => (it.clone(), it),
+ };
+
+ let mut first_token = skip_trivia_token(first_token, Direction::Next)?;
+ let mut last_token = skip_trivia_token(last_token, Direction::Prev)?;
+
+ while !original_range.contains_range(first_token.text_range()) {
+ first_token = skip_trivia_token(first_token.next_token()?, Direction::Next)?;
+ }
+ while !original_range.contains_range(last_token.text_range()) {
+ last_token = skip_trivia_token(last_token.prev_token()?, Direction::Prev)?;
+ }
+
+ // compute original mapped token range
+ let extended = {
+ let fst_expanded = sema.descend_into_macros_single(first_token.clone());
+ let lst_expanded = sema.descend_into_macros_single(last_token.clone());
+ let mut lca =
+ algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?;
+ lca = shallowest_node(&lca);
+ if lca.first_token() == Some(fst_expanded) && lca.last_token() == Some(lst_expanded) {
+ lca = lca.parent()?;
+ }
+ lca
+ };
+
+ // Compute parent node range
+ let validate = |token: &SyntaxToken| -> bool {
+ let expanded = sema.descend_into_macros_single(token.clone());
+ let parent = match expanded.parent() {
+ Some(it) => it,
+ None => return false,
+ };
+ algo::least_common_ancestor(&extended, &parent).as_ref() == Some(&extended)
+ };
+
+ // Find the first and last text range under expanded parent
+ let first = successors(Some(first_token), |token| {
+ let token = token.prev_token()?;
+ skip_trivia_token(token, Direction::Prev)
+ })
+ .take_while(validate)
+ .last()?;
+
+ let last = successors(Some(last_token), |token| {
+ let token = token.next_token()?;
+ skip_trivia_token(token, Direction::Next)
+ })
+ .take_while(validate)
+ .last()?;
+
+ let range = first.text_range().cover(last.text_range());
+ if range.contains_range(original_range) && original_range != range {
+ Some(range)
+ } else {
+ None
+ }
+}
+
+/// Find the shallowest node with same range, which allows us to traverse siblings.
+fn shallowest_node(node: &SyntaxNode) -> SyntaxNode {
+ node.ancestors().take_while(|n| n.text_range() == node.text_range()).last().unwrap()
+}
+
+fn extend_single_word_in_comment_or_string(
+ leaf: &SyntaxToken,
+ offset: TextSize,
+) -> Option<TextRange> {
+ let text: &str = leaf.text();
+ let cursor_position: u32 = (offset - leaf.text_range().start()).into();
+
+ let (before, after) = text.split_at(cursor_position as usize);
+
+ fn non_word_char(c: char) -> bool {
+ !(c.is_alphanumeric() || c == '_')
+ }
+
+ let start_idx = before.rfind(non_word_char)? as u32;
+ let end_idx = after.find(non_word_char).unwrap_or_else(|| after.len()) as u32;
+
+ let from: TextSize = (start_idx + 1).into();
+ let to: TextSize = (cursor_position + end_idx).into();
+
+ let range = TextRange::new(from, to);
+ if range.is_empty() {
+ None
+ } else {
+ Some(range + leaf.text_range().start())
+ }
+}
+
+fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextSize) -> TextRange {
+ let ws_text = ws.text();
+ let suffix = TextRange::new(offset, ws.text_range().end()) - ws.text_range().start();
+ let prefix = TextRange::new(ws.text_range().start(), offset) - ws.text_range().start();
+ let ws_suffix = &ws_text[suffix];
+ let ws_prefix = &ws_text[prefix];
+ if ws_text.contains('\n') && !ws_suffix.contains('\n') {
+ if let Some(node) = ws.next_sibling_or_token() {
+ let start = match ws_prefix.rfind('\n') {
+ Some(idx) => ws.text_range().start() + TextSize::from((idx + 1) as u32),
+ None => node.text_range().start(),
+ };
+ let end = if root.text().char_at(node.text_range().end()) == Some('\n') {
+ node.text_range().end() + TextSize::of('\n')
+ } else {
+ node.text_range().end()
+ };
+ return TextRange::new(start, end);
+ }
+ }
+ ws.text_range()
+}
+
+fn pick_best(l: SyntaxToken, r: SyntaxToken) -> SyntaxToken {
+ return if priority(&r) > priority(&l) { r } else { l };
+ fn priority(n: &SyntaxToken) -> usize {
+ match n.kind() {
+ WHITESPACE => 0,
+ IDENT | T![self] | T![super] | T![crate] | T![Self] | LIFETIME_IDENT => 2,
+ _ => 1,
+ }
+ }
+}
+
+/// Extend list item selection to include nearby delimiter and whitespace.
+fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
+ fn is_single_line_ws(node: &SyntaxToken) -> bool {
+ node.kind() == WHITESPACE && !node.text().contains('\n')
+ }
+
+ fn nearby_delimiter(
+ delimiter_kind: SyntaxKind,
+ node: &SyntaxNode,
+ dir: Direction,
+ ) -> Option<SyntaxToken> {
+ node.siblings_with_tokens(dir)
+ .skip(1)
+ .find(|node| match node {
+ NodeOrToken::Node(_) => true,
+ NodeOrToken::Token(it) => !is_single_line_ws(it),
+ })
+ .and_then(|it| it.into_token())
+ .filter(|node| node.kind() == delimiter_kind)
+ }
+
+ let delimiter = match node.kind() {
+ TYPE_BOUND => T![+],
+ _ => T![,],
+ };
+
+ if let Some(delimiter_node) = nearby_delimiter(delimiter, node, Direction::Next) {
+ // Include any following whitespace when delimiter is after list item.
+ let final_node = delimiter_node
+ .next_sibling_or_token()
+ .and_then(|it| it.into_token())
+ .filter(is_single_line_ws)
+ .unwrap_or(delimiter_node);
+
+ return Some(TextRange::new(node.text_range().start(), final_node.text_range().end()));
+ }
+ if let Some(delimiter_node) = nearby_delimiter(delimiter, node, Direction::Prev) {
+ return Some(TextRange::new(delimiter_node.text_range().start(), node.text_range().end()));
+ }
+
+ None
+}
+
+fn extend_comments(comment: ast::Comment) -> Option<TextRange> {
+ let prev = adj_comments(&comment, Direction::Prev);
+ let next = adj_comments(&comment, Direction::Next);
+ if prev != next {
+ Some(TextRange::new(prev.syntax().text_range().start(), next.syntax().text_range().end()))
+ } else {
+ None
+ }
+}
+
+fn adj_comments(comment: &ast::Comment, dir: Direction) -> ast::Comment {
+ let mut res = comment.clone();
+ for element in comment.syntax().siblings_with_tokens(dir) {
+ let token = match element.as_token() {
+ None => break,
+ Some(token) => token,
+ };
+ if let Some(c) = ast::Comment::cast(token.clone()) {
+ res = c
+ } else if token.kind() != WHITESPACE || token.text().contains("\n\n") {
+ break;
+ }
+ }
+ res
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::fixture;
+
+ use super::*;
+
+ fn do_check(before: &str, afters: &[&str]) {
+ let (analysis, position) = fixture::position(before);
+ let before = analysis.file_text(position.file_id).unwrap();
+ let range = TextRange::empty(position.offset);
+ let mut frange = FileRange { file_id: position.file_id, range };
+
+ for &after in afters {
+ frange.range = analysis.extend_selection(frange).unwrap();
+ let actual = &before[frange.range];
+ assert_eq!(after, actual);
+ }
+ }
+
+ #[test]
+ fn test_extend_selection_arith() {
+ do_check(r#"fn foo() { $01 + 1 }"#, &["1", "1 + 1", "{ 1 + 1 }"]);
+ }
+
+ #[test]
+ fn test_extend_selection_list() {
+ do_check(r#"fn foo($0x: i32) {}"#, &["x", "x: i32"]);
+ do_check(r#"fn foo($0x: i32, y: i32) {}"#, &["x", "x: i32", "x: i32, "]);
+ do_check(r#"fn foo($0x: i32,y: i32) {}"#, &["x", "x: i32", "x: i32,", "(x: i32,y: i32)"]);
+ do_check(r#"fn foo(x: i32, $0y: i32) {}"#, &["y", "y: i32", ", y: i32"]);
+ do_check(r#"fn foo(x: i32, $0y: i32, ) {}"#, &["y", "y: i32", "y: i32, "]);
+ do_check(r#"fn foo(x: i32,$0y: i32) {}"#, &["y", "y: i32", ",y: i32"]);
+
+ do_check(r#"const FOO: [usize; 2] = [ 22$0 , 33];"#, &["22", "22 , "]);
+ do_check(r#"const FOO: [usize; 2] = [ 22 , 33$0];"#, &["33", ", 33"]);
+ do_check(r#"const FOO: [usize; 2] = [ 22 , 33$0 ,];"#, &["33", "33 ,", "[ 22 , 33 ,]"]);
+
+ do_check(r#"fn main() { (1, 2$0) }"#, &["2", ", 2", "(1, 2)"]);
+
+ do_check(
+ r#"
+const FOO: [usize; 2] = [
+ 22,
+ $033,
+]"#,
+ &["33", "33,"],
+ );
+
+ do_check(
+ r#"
+const FOO: [usize; 2] = [
+ 22
+ , 33$0,
+]"#,
+ &["33", "33,"],
+ );
+ }
+
+ #[test]
+ fn test_extend_selection_start_of_the_line() {
+ do_check(
+ r#"
+impl S {
+$0 fn foo() {
+
+ }
+}"#,
+ &[" fn foo() {\n\n }\n"],
+ );
+ }
+
+ #[test]
+ fn test_extend_selection_doc_comments() {
+ do_check(
+ r#"
+struct A;
+
+/// bla
+/// bla
+struct B {
+ $0
+}
+ "#,
+ &["\n \n", "{\n \n}", "/// bla\n/// bla\nstruct B {\n \n}"],
+ )
+ }
+
+ #[test]
+ fn test_extend_selection_comments() {
+ do_check(
+ r#"
+fn bar(){}
+
+// fn foo() {
+// 1 + $01
+// }
+
+// fn foo(){}
+ "#,
+ &["1", "// 1 + 1", "// fn foo() {\n// 1 + 1\n// }"],
+ );
+
+ do_check(
+ r#"
+// #[derive(Debug, Clone, Copy, PartialEq, Eq)]
+// pub enum Direction {
+// $0 Next,
+// Prev
+// }
+"#,
+ &[
+ "// Next,",
+ "// #[derive(Debug, Clone, Copy, PartialEq, Eq)]\n// pub enum Direction {\n// Next,\n// Prev\n// }",
+ ],
+ );
+
+ do_check(
+ r#"
+/*
+foo
+_bar1$0*/
+"#,
+ &["_bar1", "/*\nfoo\n_bar1*/"],
+ );
+
+ do_check(r#"//!$0foo_2 bar"#, &["foo_2", "//!foo_2 bar"]);
+
+ do_check(r#"/$0/foo bar"#, &["//foo bar"]);
+ }
+
+ #[test]
+ fn test_extend_selection_prefer_idents() {
+ do_check(
+ r#"
+fn main() { foo$0+bar;}
+"#,
+ &["foo", "foo+bar"],
+ );
+ do_check(
+ r#"
+fn main() { foo+$0bar;}
+"#,
+ &["bar", "foo+bar"],
+ );
+ }
+
+ #[test]
+ fn test_extend_selection_prefer_lifetimes() {
+ do_check(r#"fn foo<$0'a>() {}"#, &["'a", "<'a>"]);
+ do_check(r#"fn foo<'a$0>() {}"#, &["'a", "<'a>"]);
+ }
+
+ #[test]
+ fn test_extend_selection_select_first_word() {
+ do_check(r#"// foo bar b$0az quxx"#, &["baz", "// foo bar baz quxx"]);
+ do_check(
+ r#"
+impl S {
+fn foo() {
+// hel$0lo world
+}
+}
+"#,
+ &["hello", "// hello world"],
+ );
+ }
+
+ #[test]
+ fn test_extend_selection_string() {
+ do_check(
+ r#"
+fn bar(){}
+
+" fn f$0oo() {"
+"#,
+ &["foo", "\" fn foo() {\""],
+ );
+ }
+
+ #[test]
+ fn test_extend_trait_bounds_list_in_where_clause() {
+ do_check(
+ r#"
+fn foo<R>()
+ where
+ R: req::Request + 'static,
+ R::Params: DeserializeOwned$0 + panic::UnwindSafe + 'static,
+ R::Result: Serialize + 'static,
+"#,
+ &[
+ "DeserializeOwned",
+ "DeserializeOwned + ",
+ "DeserializeOwned + panic::UnwindSafe + 'static",
+ "R::Params: DeserializeOwned + panic::UnwindSafe + 'static",
+ "R::Params: DeserializeOwned + panic::UnwindSafe + 'static,",
+ ],
+ );
+ do_check(r#"fn foo<T>() where T: $0Copy"#, &["Copy"]);
+ do_check(r#"fn foo<T>() where T: $0Copy + Display"#, &["Copy", "Copy + "]);
+ do_check(r#"fn foo<T>() where T: $0Copy +Display"#, &["Copy", "Copy +"]);
+ do_check(r#"fn foo<T>() where T: $0Copy+Display"#, &["Copy", "Copy+"]);
+ do_check(r#"fn foo<T>() where T: Copy + $0Display"#, &["Display", "+ Display"]);
+ do_check(r#"fn foo<T>() where T: Copy + $0Display + Sync"#, &["Display", "Display + "]);
+ do_check(r#"fn foo<T>() where T: Copy +$0Display"#, &["Display", "+Display"]);
+ }
+
+ #[test]
+ fn test_extend_trait_bounds_list_inline() {
+ do_check(r#"fn foo<T: $0Copy>() {}"#, &["Copy"]);
+ do_check(r#"fn foo<T: $0Copy + Display>() {}"#, &["Copy", "Copy + "]);
+ do_check(r#"fn foo<T: $0Copy +Display>() {}"#, &["Copy", "Copy +"]);
+ do_check(r#"fn foo<T: $0Copy+Display>() {}"#, &["Copy", "Copy+"]);
+ do_check(r#"fn foo<T: Copy + $0Display>() {}"#, &["Display", "+ Display"]);
+ do_check(r#"fn foo<T: Copy + $0Display + Sync>() {}"#, &["Display", "Display + "]);
+ do_check(r#"fn foo<T: Copy +$0Display>() {}"#, &["Display", "+Display"]);
+ do_check(
+ r#"fn foo<T: Copy$0 + Display, U: Copy>() {}"#,
+ &[
+ "Copy",
+ "Copy + ",
+ "Copy + Display",
+ "T: Copy + Display",
+ "T: Copy + Display, ",
+ "<T: Copy + Display, U: Copy>",
+ ],
+ );
+ }
+
+ #[test]
+ fn test_extend_selection_on_tuple_in_type() {
+ do_check(
+ r#"fn main() { let _: (krate, $0_crate_def_map, module_id) = (); }"#,
+ &["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"],
+ );
+ // white space variations
+ do_check(
+ r#"fn main() { let _: (krate,$0_crate_def_map,module_id) = (); }"#,
+ &["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"],
+ );
+ do_check(
+ r#"
+fn main() { let _: (
+ krate,
+ _crate$0_def_map,
+ module_id
+) = (); }"#,
+ &[
+ "_crate_def_map",
+ "_crate_def_map,",
+ "(\n krate,\n _crate_def_map,\n module_id\n)",
+ ],
+ );
+ }
+
+ #[test]
+ fn test_extend_selection_on_tuple_in_rvalue() {
+ do_check(
+ r#"fn main() { let var = (krate, _crate_def_map$0, module_id); }"#,
+ &["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"],
+ );
+ // white space variations
+ do_check(
+ r#"fn main() { let var = (krate,_crate$0_def_map,module_id); }"#,
+ &["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"],
+ );
+ do_check(
+ r#"
+fn main() { let var = (
+ krate,
+ _crate_def_map$0,
+ module_id
+); }"#,
+ &[
+ "_crate_def_map",
+ "_crate_def_map,",
+ "(\n krate,\n _crate_def_map,\n module_id\n)",
+ ],
+ );
+ }
+
+ #[test]
+ fn test_extend_selection_on_tuple_pat() {
+ do_check(
+ r#"fn main() { let (krate, _crate_def_map$0, module_id) = var; }"#,
+ &["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"],
+ );
+ // white space variations
+ do_check(
+ r#"fn main() { let (krate,_crate$0_def_map,module_id) = var; }"#,
+ &["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"],
+ );
+ do_check(
+ r#"
+fn main() { let (
+ krate,
+ _crate_def_map$0,
+ module_id
+) = var; }"#,
+ &[
+ "_crate_def_map",
+ "_crate_def_map,",
+ "(\n krate,\n _crate_def_map,\n module_id\n)",
+ ],
+ );
+ }
+
+ #[test]
+ fn extend_selection_inside_macros() {
+ do_check(
+ r#"macro_rules! foo { ($item:item) => {$item} }
+ foo!{fn hello(na$0me:usize){}}"#,
+ &[
+ "name",
+ "name:usize",
+ "(name:usize)",
+ "fn hello(name:usize){}",
+ "{fn hello(name:usize){}}",
+ "foo!{fn hello(name:usize){}}",
+ ],
+ );
+ }
+
+ #[test]
+ fn extend_selection_inside_recur_macros() {
+ do_check(
+ r#" macro_rules! foo2 { ($item:item) => {$item} }
+ macro_rules! foo { ($item:item) => {foo2!($item);} }
+ foo!{fn hello(na$0me:usize){}}"#,
+ &[
+ "name",
+ "name:usize",
+ "(name:usize)",
+ "fn hello(name:usize){}",
+ "{fn hello(name:usize){}}",
+ "foo!{fn hello(name:usize){}}",
+ ],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/file_structure.rs b/src/tools/rust-analyzer/crates/ide/src/file_structure.rs
new file mode 100644
index 000000000..68fd0952b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/file_structure.rs
@@ -0,0 +1,579 @@
+use ide_db::SymbolKind;
+use syntax::{
+ ast::{self, HasAttrs, HasGenericParams, HasName},
+ match_ast, AstNode, AstToken, NodeOrToken, SourceFile, SyntaxNode, SyntaxToken, TextRange,
+ WalkEvent,
+};
+
+#[derive(Debug, Clone)]
+pub struct StructureNode {
+ pub parent: Option<usize>,
+ pub label: String,
+ pub navigation_range: TextRange,
+ pub node_range: TextRange,
+ pub kind: StructureNodeKind,
+ pub detail: Option<String>,
+ pub deprecated: bool,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub enum StructureNodeKind {
+ SymbolKind(SymbolKind),
+ Region,
+}
+
+// Feature: File Structure
+//
+// Provides a tree of the symbols defined in the file. Can be used to
+//
+// * fuzzy search symbol in a file (super useful)
+// * draw breadcrumbs to describe the context around the cursor
+// * draw outline of the file
+//
+// |===
+// | Editor | Shortcut
+//
+// | VS Code | kbd:[Ctrl+Shift+O]
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113020654-b42fc800-917a-11eb-8388-e7dc4d92b02e.gif[]
+
+pub(crate) fn file_structure(file: &SourceFile) -> Vec<StructureNode> {
+ let mut res = Vec::new();
+ let mut stack = Vec::new();
+
+ for event in file.syntax().preorder_with_tokens() {
+ match event {
+ WalkEvent::Enter(NodeOrToken::Node(node)) => {
+ if let Some(mut symbol) = structure_node(&node) {
+ symbol.parent = stack.last().copied();
+ stack.push(res.len());
+ res.push(symbol);
+ }
+ }
+ WalkEvent::Leave(NodeOrToken::Node(node)) => {
+ if structure_node(&node).is_some() {
+ stack.pop().unwrap();
+ }
+ }
+ WalkEvent::Enter(NodeOrToken::Token(token)) => {
+ if let Some(mut symbol) = structure_token(token) {
+ symbol.parent = stack.last().copied();
+ stack.push(res.len());
+ res.push(symbol);
+ }
+ }
+ WalkEvent::Leave(NodeOrToken::Token(token)) => {
+ if structure_token(token).is_some() {
+ stack.pop().unwrap();
+ }
+ }
+ }
+ }
+ res
+}
+
+fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
+ fn decl<N: HasName + HasAttrs>(node: N, kind: StructureNodeKind) -> Option<StructureNode> {
+ decl_with_detail(&node, None, kind)
+ }
+
+ fn decl_with_type_ref<N: HasName + HasAttrs>(
+ node: &N,
+ type_ref: Option<ast::Type>,
+ kind: StructureNodeKind,
+ ) -> Option<StructureNode> {
+ let detail = type_ref.map(|type_ref| {
+ let mut detail = String::new();
+ collapse_ws(type_ref.syntax(), &mut detail);
+ detail
+ });
+ decl_with_detail(node, detail, kind)
+ }
+
+ fn decl_with_detail<N: HasName + HasAttrs>(
+ node: &N,
+ detail: Option<String>,
+ kind: StructureNodeKind,
+ ) -> Option<StructureNode> {
+ let name = node.name()?;
+
+ Some(StructureNode {
+ parent: None,
+ label: name.text().to_string(),
+ navigation_range: name.syntax().text_range(),
+ node_range: node.syntax().text_range(),
+ kind,
+ detail,
+ deprecated: node.attrs().filter_map(|x| x.simple_name()).any(|x| x == "deprecated"),
+ })
+ }
+
+ fn collapse_ws(node: &SyntaxNode, output: &mut String) {
+ let mut can_insert_ws = false;
+ node.text().for_each_chunk(|chunk| {
+ for line in chunk.lines() {
+ let line = line.trim();
+ if line.is_empty() {
+ if can_insert_ws {
+ output.push(' ');
+ can_insert_ws = false;
+ }
+ } else {
+ output.push_str(line);
+ can_insert_ws = true;
+ }
+ }
+ })
+ }
+
+ match_ast! {
+ match node {
+ ast::Fn(it) => {
+ let mut detail = String::from("fn");
+ if let Some(type_param_list) = it.generic_param_list() {
+ collapse_ws(type_param_list.syntax(), &mut detail);
+ }
+ if let Some(param_list) = it.param_list() {
+ collapse_ws(param_list.syntax(), &mut detail);
+ }
+ if let Some(ret_type) = it.ret_type() {
+ detail.push(' ');
+ collapse_ws(ret_type.syntax(), &mut detail);
+ }
+
+ decl_with_detail(&it, Some(detail), StructureNodeKind::SymbolKind(SymbolKind::Function))
+ },
+ ast::Struct(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Struct)),
+ ast::Union(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Union)),
+ ast::Enum(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Enum)),
+ ast::Variant(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Variant)),
+ ast::Trait(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Trait)),
+ ast::Module(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Module)),
+ ast::TypeAlias(it) => decl_with_type_ref(&it, it.ty(), StructureNodeKind::SymbolKind(SymbolKind::TypeAlias)),
+ ast::RecordField(it) => decl_with_type_ref(&it, it.ty(), StructureNodeKind::SymbolKind(SymbolKind::Field)),
+ ast::Const(it) => decl_with_type_ref(&it, it.ty(), StructureNodeKind::SymbolKind(SymbolKind::Const)),
+ ast::Static(it) => decl_with_type_ref(&it, it.ty(), StructureNodeKind::SymbolKind(SymbolKind::Static)),
+ ast::Impl(it) => {
+ let target_type = it.self_ty()?;
+ let target_trait = it.trait_();
+ let label = match target_trait {
+ None => format!("impl {}", target_type.syntax().text()),
+ Some(t) => {
+ format!("impl {} for {}", t.syntax().text(), target_type.syntax().text(),)
+ }
+ };
+
+ let node = StructureNode {
+ parent: None,
+ label,
+ navigation_range: target_type.syntax().text_range(),
+ node_range: it.syntax().text_range(),
+ kind: StructureNodeKind::SymbolKind(SymbolKind::Impl),
+ detail: None,
+ deprecated: false,
+ };
+ Some(node)
+ },
+ ast::Macro(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Macro)),
+ _ => None,
+ }
+ }
+}
+
+fn structure_token(token: SyntaxToken) -> Option<StructureNode> {
+ if let Some(comment) = ast::Comment::cast(token) {
+ let text = comment.text().trim();
+
+ if let Some(region_name) = text.strip_prefix("// region:").map(str::trim) {
+ return Some(StructureNode {
+ parent: None,
+ label: region_name.to_string(),
+ navigation_range: comment.syntax().text_range(),
+ node_range: comment.syntax().text_range(),
+ kind: StructureNodeKind::Region,
+ detail: None,
+ deprecated: false,
+ });
+ }
+ }
+
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use super::*;
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let file = SourceFile::parse(ra_fixture).ok().unwrap();
+ let structure = file_structure(&file);
+ expect.assert_debug_eq(&structure)
+ }
+
+ #[test]
+ fn test_file_structure() {
+ check(
+ r#"
+struct Foo {
+ x: i32
+}
+
+mod m {
+ fn bar1() {}
+ fn bar2<T>(t: T) -> T {}
+ fn bar3<A,
+ B>(a: A,
+ b: B) -> Vec<
+ u32
+ > {}
+}
+
+enum E { X, Y(i32) }
+type T = ();
+static S: i32 = 92;
+const C: i32 = 92;
+
+impl E {}
+
+impl fmt::Debug for E {}
+
+macro_rules! mc {
+ () => {}
+}
+
+#[macro_export]
+macro_rules! mcexp {
+ () => {}
+}
+
+/// Doc comment
+macro_rules! mcexp {
+ () => {}
+}
+
+#[deprecated]
+fn obsolete() {}
+
+#[deprecated(note = "for awhile")]
+fn very_obsolete() {}
+
+// region: Some region name
+// endregion
+
+// region: dontpanic
+mod m {
+fn f() {}
+// endregion
+fn g() {}
+}
+"#,
+ expect![[r#"
+ [
+ StructureNode {
+ parent: None,
+ label: "Foo",
+ navigation_range: 8..11,
+ node_range: 1..26,
+ kind: SymbolKind(
+ Struct,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: Some(
+ 0,
+ ),
+ label: "x",
+ navigation_range: 18..19,
+ node_range: 18..24,
+ kind: SymbolKind(
+ Field,
+ ),
+ detail: Some(
+ "i32",
+ ),
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "m",
+ navigation_range: 32..33,
+ node_range: 28..158,
+ kind: SymbolKind(
+ Module,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: Some(
+ 2,
+ ),
+ label: "bar1",
+ navigation_range: 43..47,
+ node_range: 40..52,
+ kind: SymbolKind(
+ Function,
+ ),
+ detail: Some(
+ "fn()",
+ ),
+ deprecated: false,
+ },
+ StructureNode {
+ parent: Some(
+ 2,
+ ),
+ label: "bar2",
+ navigation_range: 60..64,
+ node_range: 57..81,
+ kind: SymbolKind(
+ Function,
+ ),
+ detail: Some(
+ "fn<T>(t: T) -> T",
+ ),
+ deprecated: false,
+ },
+ StructureNode {
+ parent: Some(
+ 2,
+ ),
+ label: "bar3",
+ navigation_range: 89..93,
+ node_range: 86..156,
+ kind: SymbolKind(
+ Function,
+ ),
+ detail: Some(
+ "fn<A, B>(a: A, b: B) -> Vec< u32 >",
+ ),
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "E",
+ navigation_range: 165..166,
+ node_range: 160..180,
+ kind: SymbolKind(
+ Enum,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: Some(
+ 6,
+ ),
+ label: "X",
+ navigation_range: 169..170,
+ node_range: 169..170,
+ kind: SymbolKind(
+ Variant,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: Some(
+ 6,
+ ),
+ label: "Y",
+ navigation_range: 172..173,
+ node_range: 172..178,
+ kind: SymbolKind(
+ Variant,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "T",
+ navigation_range: 186..187,
+ node_range: 181..193,
+ kind: SymbolKind(
+ TypeAlias,
+ ),
+ detail: Some(
+ "()",
+ ),
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "S",
+ navigation_range: 201..202,
+ node_range: 194..213,
+ kind: SymbolKind(
+ Static,
+ ),
+ detail: Some(
+ "i32",
+ ),
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "C",
+ navigation_range: 220..221,
+ node_range: 214..232,
+ kind: SymbolKind(
+ Const,
+ ),
+ detail: Some(
+ "i32",
+ ),
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "impl E",
+ navigation_range: 239..240,
+ node_range: 234..243,
+ kind: SymbolKind(
+ Impl,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "impl fmt::Debug for E",
+ navigation_range: 265..266,
+ node_range: 245..269,
+ kind: SymbolKind(
+ Impl,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "mc",
+ navigation_range: 284..286,
+ node_range: 271..303,
+ kind: SymbolKind(
+ Macro,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "mcexp",
+ navigation_range: 334..339,
+ node_range: 305..356,
+ kind: SymbolKind(
+ Macro,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "mcexp",
+ navigation_range: 387..392,
+ node_range: 358..409,
+ kind: SymbolKind(
+ Macro,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "obsolete",
+ navigation_range: 428..436,
+ node_range: 411..441,
+ kind: SymbolKind(
+ Function,
+ ),
+ detail: Some(
+ "fn()",
+ ),
+ deprecated: true,
+ },
+ StructureNode {
+ parent: None,
+ label: "very_obsolete",
+ navigation_range: 481..494,
+ node_range: 443..499,
+ kind: SymbolKind(
+ Function,
+ ),
+ detail: Some(
+ "fn()",
+ ),
+ deprecated: true,
+ },
+ StructureNode {
+ parent: None,
+ label: "Some region name",
+ navigation_range: 501..528,
+ node_range: 501..528,
+ kind: Region,
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: None,
+ label: "m",
+ navigation_range: 568..569,
+ node_range: 543..606,
+ kind: SymbolKind(
+ Module,
+ ),
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: Some(
+ 20,
+ ),
+ label: "dontpanic",
+ navigation_range: 543..563,
+ node_range: 543..563,
+ kind: Region,
+ detail: None,
+ deprecated: false,
+ },
+ StructureNode {
+ parent: Some(
+ 20,
+ ),
+ label: "f",
+ navigation_range: 575..576,
+ node_range: 572..581,
+ kind: SymbolKind(
+ Function,
+ ),
+ detail: Some(
+ "fn()",
+ ),
+ deprecated: false,
+ },
+ StructureNode {
+ parent: Some(
+ 20,
+ ),
+ label: "g",
+ navigation_range: 598..599,
+ node_range: 582..604,
+ kind: SymbolKind(
+ Function,
+ ),
+ detail: Some(
+ "fn()",
+ ),
+ deprecated: false,
+ },
+ ]
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/fixture.rs b/src/tools/rust-analyzer/crates/ide/src/fixture.rs
new file mode 100644
index 000000000..2ea6f6a9a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/fixture.rs
@@ -0,0 +1,87 @@
+//! Utilities for creating `Analysis` instances for tests.
+use hir::db::DefDatabase;
+use ide_db::base_db::fixture::ChangeFixture;
+use test_utils::{extract_annotations, RangeOrOffset};
+
+use crate::{Analysis, AnalysisHost, FileId, FilePosition, FileRange};
+
+/// Creates analysis for a single file.
+pub(crate) fn file(ra_fixture: &str) -> (Analysis, FileId) {
+ let mut host = AnalysisHost::default();
+ let change_fixture = ChangeFixture::parse(ra_fixture);
+ host.db.set_enable_proc_attr_macros(true);
+ host.db.apply_change(change_fixture.change);
+ (host.analysis(), change_fixture.files[0])
+}
+
+/// Creates analysis from a multi-file fixture, returns positions marked with $0.
+pub(crate) fn position(ra_fixture: &str) -> (Analysis, FilePosition) {
+ let mut host = AnalysisHost::default();
+ let change_fixture = ChangeFixture::parse(ra_fixture);
+ host.db.set_enable_proc_attr_macros(true);
+ host.db.apply_change(change_fixture.change);
+ let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
+ let offset = range_or_offset.expect_offset();
+ (host.analysis(), FilePosition { file_id, offset })
+}
+
+/// Creates analysis for a single file, returns range marked with a pair of $0.
+pub(crate) fn range(ra_fixture: &str) -> (Analysis, FileRange) {
+ let mut host = AnalysisHost::default();
+ let change_fixture = ChangeFixture::parse(ra_fixture);
+ host.db.set_enable_proc_attr_macros(true);
+ host.db.apply_change(change_fixture.change);
+ let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
+ let range = range_or_offset.expect_range();
+ (host.analysis(), FileRange { file_id, range })
+}
+
+/// Creates analysis for a single file, returns range marked with a pair of $0 or a position marked with $0.
+pub(crate) fn range_or_position(ra_fixture: &str) -> (Analysis, FileId, RangeOrOffset) {
+ let mut host = AnalysisHost::default();
+ let change_fixture = ChangeFixture::parse(ra_fixture);
+ host.db.set_enable_proc_attr_macros(true);
+ host.db.apply_change(change_fixture.change);
+ let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
+ (host.analysis(), file_id, range_or_offset)
+}
+
+/// Creates analysis from a multi-file fixture, returns positions marked with $0.
+pub(crate) fn annotations(ra_fixture: &str) -> (Analysis, FilePosition, Vec<(FileRange, String)>) {
+ let mut host = AnalysisHost::default();
+ let change_fixture = ChangeFixture::parse(ra_fixture);
+ host.db.set_enable_proc_attr_macros(true);
+ host.db.apply_change(change_fixture.change);
+ let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
+ let offset = range_or_offset.expect_offset();
+
+ let annotations = change_fixture
+ .files
+ .iter()
+ .flat_map(|&file_id| {
+ let file_text = host.analysis().file_text(file_id).unwrap();
+ let annotations = extract_annotations(&file_text);
+ annotations.into_iter().map(move |(range, data)| (FileRange { file_id, range }, data))
+ })
+ .collect();
+ (host.analysis(), FilePosition { file_id, offset }, annotations)
+}
+
+/// Creates analysis from a multi-file fixture with annonations without $0
+pub(crate) fn annotations_without_marker(ra_fixture: &str) -> (Analysis, Vec<(FileRange, String)>) {
+ let mut host = AnalysisHost::default();
+ let change_fixture = ChangeFixture::parse(ra_fixture);
+ host.db.set_enable_proc_attr_macros(true);
+ host.db.apply_change(change_fixture.change);
+
+ let annotations = change_fixture
+ .files
+ .iter()
+ .flat_map(|&file_id| {
+ let file_text = host.analysis().file_text(file_id).unwrap();
+ let annotations = extract_annotations(&file_text);
+ annotations.into_iter().map(move |(range, data)| (FileRange { file_id, range }, data))
+ })
+ .collect();
+ (host.analysis(), annotations)
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/fn_references.rs b/src/tools/rust-analyzer/crates/ide/src/fn_references.rs
new file mode 100644
index 000000000..63fb322ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/fn_references.rs
@@ -0,0 +1,94 @@
+//! This module implements a methods and free functions search in the specified file.
+//! We have to skip tests, so cannot reuse file_structure module.
+
+use hir::Semantics;
+use ide_assists::utils::test_related_attribute;
+use ide_db::RootDatabase;
+use syntax::{ast, ast::HasName, AstNode, SyntaxNode};
+
+use crate::{FileId, FileRange};
+
+pub(crate) fn find_all_methods(db: &RootDatabase, file_id: FileId) -> Vec<FileRange> {
+ let sema = Semantics::new(db);
+ let source_file = sema.parse(file_id);
+ source_file.syntax().descendants().filter_map(|it| method_range(it, file_id)).collect()
+}
+
+fn method_range(item: SyntaxNode, file_id: FileId) -> Option<FileRange> {
+ ast::Fn::cast(item).and_then(|fn_def| {
+ if test_related_attribute(&fn_def).is_some() {
+ None
+ } else {
+ fn_def.name().map(|name| FileRange { file_id, range: name.syntax().text_range() })
+ }
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::fixture;
+ use crate::{FileRange, TextSize};
+ use std::ops::RangeInclusive;
+
+ #[test]
+ fn test_find_all_methods() {
+ let (analysis, pos) = fixture::position(
+ r#"
+ fn private_fn() {$0}
+
+ pub fn pub_fn() {}
+
+ pub fn generic_fn<T>(arg: T) {}
+ "#,
+ );
+
+ let refs = analysis.find_all_methods(pos.file_id).unwrap();
+ check_result(&refs, &[3..=13, 27..=33, 47..=57]);
+ }
+
+ #[test]
+ fn test_find_trait_methods() {
+ let (analysis, pos) = fixture::position(
+ r#"
+ trait Foo {
+ fn bar() {$0}
+ fn baz() {}
+ }
+ "#,
+ );
+
+ let refs = analysis.find_all_methods(pos.file_id).unwrap();
+ check_result(&refs, &[19..=22, 35..=38]);
+ }
+
+ #[test]
+ fn test_skip_tests() {
+ let (analysis, pos) = fixture::position(
+ r#"
+ //- /lib.rs
+ #[test]
+ fn foo() {$0}
+
+ pub fn pub_fn() {}
+
+ mod tests {
+ #[test]
+ fn bar() {}
+ }
+ "#,
+ );
+
+ let refs = analysis.find_all_methods(pos.file_id).unwrap();
+ check_result(&refs, &[28..=34]);
+ }
+
+ fn check_result(refs: &[FileRange], expected: &[RangeInclusive<u32>]) {
+ assert_eq!(refs.len(), expected.len());
+
+ for (i, item) in refs.iter().enumerate() {
+ let range = &expected[i];
+ assert_eq!(TextSize::from(*range.start()), item.range.start());
+ assert_eq!(TextSize::from(*range.end()), item.range.end());
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs
new file mode 100755
index 000000000..c694d95d5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs
@@ -0,0 +1,626 @@
+use ide_db::{syntax_helpers::node_ext::vis_eq, FxHashSet};
+use syntax::{
+ ast::{self, AstNode, AstToken},
+ match_ast, Direction, NodeOrToken, SourceFile,
+ SyntaxKind::{self, *},
+ TextRange, TextSize,
+};
+
+use std::hash::Hash;
+
+const REGION_START: &str = "// region:";
+const REGION_END: &str = "// endregion";
+
+#[derive(Debug, PartialEq, Eq)]
+pub enum FoldKind {
+ Comment,
+ Imports,
+ Mods,
+ Block,
+ ArgList,
+ Region,
+ Consts,
+ Statics,
+ Array,
+ WhereClause,
+ ReturnType,
+ MatchArm,
+}
+
+#[derive(Debug)]
+pub struct Fold {
+ pub range: TextRange,
+ pub kind: FoldKind,
+}
+
+// Feature: Folding
+//
+// Defines folding regions for curly braced blocks, runs of consecutive use, mod, const or static
+// items, and `region` / `endregion` comment markers.
+pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
+ let mut res = vec![];
+ let mut visited_comments = FxHashSet::default();
+ let mut visited_imports = FxHashSet::default();
+ let mut visited_mods = FxHashSet::default();
+ let mut visited_consts = FxHashSet::default();
+ let mut visited_statics = FxHashSet::default();
+
+ // regions can be nested, here is a LIFO buffer
+ let mut region_starts: Vec<TextSize> = vec![];
+
+ for element in file.syntax().descendants_with_tokens() {
+ // Fold items that span multiple lines
+ if let Some(kind) = fold_kind(element.kind()) {
+ let is_multiline = match &element {
+ NodeOrToken::Node(node) => node.text().contains_char('\n'),
+ NodeOrToken::Token(token) => token.text().contains('\n'),
+ };
+ if is_multiline {
+ res.push(Fold { range: element.text_range(), kind });
+ continue;
+ }
+ }
+
+ match element {
+ NodeOrToken::Token(token) => {
+ // Fold groups of comments
+ if let Some(comment) = ast::Comment::cast(token) {
+ if visited_comments.contains(&comment) {
+ continue;
+ }
+ let text = comment.text().trim_start();
+ if text.starts_with(REGION_START) {
+ region_starts.push(comment.syntax().text_range().start());
+ } else if text.starts_with(REGION_END) {
+ if let Some(region) = region_starts.pop() {
+ res.push(Fold {
+ range: TextRange::new(region, comment.syntax().text_range().end()),
+ kind: FoldKind::Region,
+ })
+ }
+ } else if let Some(range) =
+ contiguous_range_for_comment(comment, &mut visited_comments)
+ {
+ res.push(Fold { range, kind: FoldKind::Comment })
+ }
+ }
+ }
+ NodeOrToken::Node(node) => {
+ match_ast! {
+ match node {
+ ast::Module(module) => {
+ if module.item_list().is_none() {
+ if let Some(range) = contiguous_range_for_item_group(
+ module,
+ &mut visited_mods,
+ ) {
+ res.push(Fold { range, kind: FoldKind::Mods })
+ }
+ }
+ },
+ ast::Use(use_) => {
+ if let Some(range) = contiguous_range_for_item_group(use_, &mut visited_imports) {
+ res.push(Fold { range, kind: FoldKind::Imports })
+ }
+ },
+ ast::Const(konst) => {
+ if let Some(range) = contiguous_range_for_item_group(konst, &mut visited_consts) {
+ res.push(Fold { range, kind: FoldKind::Consts })
+ }
+ },
+ ast::Static(statik) => {
+ if let Some(range) = contiguous_range_for_item_group(statik, &mut visited_statics) {
+ res.push(Fold { range, kind: FoldKind::Statics })
+ }
+ },
+ ast::WhereClause(where_clause) => {
+ if let Some(range) = fold_range_for_where_clause(where_clause) {
+ res.push(Fold { range, kind: FoldKind::WhereClause })
+ }
+ },
+ ast::MatchArm(match_arm) => {
+ if let Some(range) = fold_range_for_multiline_match_arm(match_arm) {
+ res.push(Fold {range, kind: FoldKind::MatchArm})
+ }
+ },
+ _ => (),
+ }
+ }
+ }
+ }
+ }
+
+ res
+}
+
+fn fold_kind(kind: SyntaxKind) -> Option<FoldKind> {
+ match kind {
+ COMMENT => Some(FoldKind::Comment),
+ ARG_LIST | PARAM_LIST => Some(FoldKind::ArgList),
+ ARRAY_EXPR => Some(FoldKind::Array),
+ RET_TYPE => Some(FoldKind::ReturnType),
+ ASSOC_ITEM_LIST
+ | RECORD_FIELD_LIST
+ | RECORD_PAT_FIELD_LIST
+ | RECORD_EXPR_FIELD_LIST
+ | ITEM_LIST
+ | EXTERN_ITEM_LIST
+ | USE_TREE_LIST
+ | BLOCK_EXPR
+ | MATCH_ARM_LIST
+ | VARIANT_LIST
+ | TOKEN_TREE => Some(FoldKind::Block),
+ _ => None,
+ }
+}
+
+fn contiguous_range_for_item_group<N>(first: N, visited: &mut FxHashSet<N>) -> Option<TextRange>
+where
+ N: ast::HasVisibility + Clone + Hash + Eq,
+{
+ if !visited.insert(first.clone()) {
+ return None;
+ }
+
+ let (mut last, mut last_vis) = (first.clone(), first.visibility());
+ for element in first.syntax().siblings_with_tokens(Direction::Next) {
+ let node = match element {
+ NodeOrToken::Token(token) => {
+ if let Some(ws) = ast::Whitespace::cast(token) {
+ if !ws.spans_multiple_lines() {
+ // Ignore whitespace without blank lines
+ continue;
+ }
+ }
+ // There is a blank line or another token, which means that the
+ // group ends here
+ break;
+ }
+ NodeOrToken::Node(node) => node,
+ };
+
+ if let Some(next) = N::cast(node) {
+ let next_vis = next.visibility();
+ if eq_visibility(next_vis.clone(), last_vis) {
+ visited.insert(next.clone());
+ last_vis = next_vis;
+ last = next;
+ continue;
+ }
+ }
+ // Stop if we find an item of a different kind or with a different visibility.
+ break;
+ }
+
+ if first != last {
+ Some(TextRange::new(first.syntax().text_range().start(), last.syntax().text_range().end()))
+ } else {
+ // The group consists of only one element, therefore it cannot be folded
+ None
+ }
+}
+
+fn eq_visibility(vis0: Option<ast::Visibility>, vis1: Option<ast::Visibility>) -> bool {
+ match (vis0, vis1) {
+ (None, None) => true,
+ (Some(vis0), Some(vis1)) => vis_eq(&vis0, &vis1),
+ _ => false,
+ }
+}
+
+fn contiguous_range_for_comment(
+ first: ast::Comment,
+ visited: &mut FxHashSet<ast::Comment>,
+) -> Option<TextRange> {
+ visited.insert(first.clone());
+
+ // Only fold comments of the same flavor
+ let group_kind = first.kind();
+ if !group_kind.shape.is_line() {
+ return None;
+ }
+
+ let mut last = first.clone();
+ for element in first.syntax().siblings_with_tokens(Direction::Next) {
+ match element {
+ NodeOrToken::Token(token) => {
+ if let Some(ws) = ast::Whitespace::cast(token.clone()) {
+ if !ws.spans_multiple_lines() {
+ // Ignore whitespace without blank lines
+ continue;
+ }
+ }
+ if let Some(c) = ast::Comment::cast(token) {
+ if c.kind() == group_kind {
+ let text = c.text().trim_start();
+ // regions are not real comments
+ if !(text.starts_with(REGION_START) || text.starts_with(REGION_END)) {
+ visited.insert(c.clone());
+ last = c;
+ continue;
+ }
+ }
+ }
+ // The comment group ends because either:
+ // * An element of a different kind was reached
+ // * A comment of a different flavor was reached
+ break;
+ }
+ NodeOrToken::Node(_) => break,
+ };
+ }
+
+ if first != last {
+ Some(TextRange::new(first.syntax().text_range().start(), last.syntax().text_range().end()))
+ } else {
+ // The group consists of only one element, therefore it cannot be folded
+ None
+ }
+}
+
+fn fold_range_for_where_clause(where_clause: ast::WhereClause) -> Option<TextRange> {
+ let first_where_pred = where_clause.predicates().next();
+ let last_where_pred = where_clause.predicates().last();
+
+ if first_where_pred != last_where_pred {
+ let start = where_clause.where_token()?.text_range().end();
+ let end = where_clause.syntax().text_range().end();
+ return Some(TextRange::new(start, end));
+ }
+ None
+}
+
+fn fold_range_for_multiline_match_arm(match_arm: ast::MatchArm) -> Option<TextRange> {
+ if let Some(_) = fold_kind(match_arm.expr()?.syntax().kind()) {
+ return None;
+ }
+ if match_arm.expr()?.syntax().text().contains_char('\n') {
+ return Some(match_arm.expr()?.syntax().text_range());
+ }
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use test_utils::extract_tags;
+
+ use super::*;
+
+ fn check(ra_fixture: &str) {
+ let (ranges, text) = extract_tags(ra_fixture, "fold");
+
+ let parse = SourceFile::parse(&text);
+ let mut folds = folding_ranges(&parse.tree());
+ folds.sort_by_key(|fold| (fold.range.start(), fold.range.end()));
+
+ assert_eq!(
+ folds.len(),
+ ranges.len(),
+ "The amount of folds is different than the expected amount"
+ );
+
+ for (fold, (range, attr)) in folds.iter().zip(ranges.into_iter()) {
+ assert_eq!(fold.range.start(), range.start(), "mismatched start of folding ranges");
+ assert_eq!(fold.range.end(), range.end(), "mismatched end of folding ranges");
+
+ let kind = match fold.kind {
+ FoldKind::Comment => "comment",
+ FoldKind::Imports => "imports",
+ FoldKind::Mods => "mods",
+ FoldKind::Block => "block",
+ FoldKind::ArgList => "arglist",
+ FoldKind::Region => "region",
+ FoldKind::Consts => "consts",
+ FoldKind::Statics => "statics",
+ FoldKind::Array => "array",
+ FoldKind::WhereClause => "whereclause",
+ FoldKind::ReturnType => "returntype",
+ FoldKind::MatchArm => "matcharm",
+ };
+ assert_eq!(kind, &attr.unwrap());
+ }
+ }
+
+ #[test]
+ fn test_fold_comments() {
+ check(
+ r#"
+<fold comment>// Hello
+// this is a multiline
+// comment
+//</fold>
+
+// But this is not
+
+fn main() <fold block>{
+ <fold comment>// We should
+ // also
+ // fold
+ // this one.</fold>
+ <fold comment>//! But this one is different
+ //! because it has another flavor</fold>
+ <fold comment>/* As does this
+ multiline comment */</fold>
+}</fold>
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fold_imports() {
+ check(
+ r#"
+use std::<fold block>{
+ str,
+ vec,
+ io as iop
+}</fold>;
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fold_mods() {
+ check(
+ r#"
+
+pub mod foo;
+<fold mods>mod after_pub;
+mod after_pub_next;</fold>
+
+<fold mods>mod before_pub;
+mod before_pub_next;</fold>
+pub mod bar;
+
+mod not_folding_single;
+pub mod foobar;
+pub not_folding_single_next;
+
+<fold mods>#[cfg(test)]
+mod with_attribute;
+mod with_attribute_next;</fold>
+
+mod inline0 {}
+mod inline1 {}
+
+mod inline2 <fold block>{
+
+}</fold>
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fold_import_groups() {
+ check(
+ r#"
+<fold imports>use std::str;
+use std::vec;
+use std::io as iop;</fold>
+
+<fold imports>use std::mem;
+use std::f64;</fold>
+
+<fold imports>use std::collections::HashMap;
+// Some random comment
+use std::collections::VecDeque;</fold>
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fold_import_and_groups() {
+ check(
+ r#"
+<fold imports>use std::str;
+use std::vec;
+use std::io as iop;</fold>
+
+<fold imports>use std::mem;
+use std::f64;</fold>
+
+use std::collections::<fold block>{
+ HashMap,
+ VecDeque,
+}</fold>;
+// Some random comment
+"#,
+ );
+ }
+
+ #[test]
+ fn test_folds_structs() {
+ check(
+ r#"
+struct Foo <fold block>{
+}</fold>
+"#,
+ );
+ }
+
+ #[test]
+ fn test_folds_traits() {
+ check(
+ r#"
+trait Foo <fold block>{
+}</fold>
+"#,
+ );
+ }
+
+ #[test]
+ fn test_folds_macros() {
+ check(
+ r#"
+macro_rules! foo <fold block>{
+ ($($tt:tt)*) => { $($tt)* }
+}</fold>
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fold_match_arms() {
+ check(
+ r#"
+fn main() <fold block>{
+ match 0 <fold block>{
+ 0 => 0,
+ _ => 1,
+ }</fold>
+}</fold>
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fold_multiline_non_block_match_arm() {
+ check(
+ r#"
+ fn main() <fold block>{
+ match foo <fold block>{
+ block => <fold block>{
+ }</fold>,
+ matcharm => <fold matcharm>some.
+ call().
+ chain()</fold>,
+ matcharm2
+ => 0,
+ match_expr => <fold matcharm>match foo2 <fold block>{
+ bar => (),
+ }</fold></fold>,
+ array_list => <fold array>[
+ 1,
+ 2,
+ 3,
+ ]</fold>,
+ strustS => <fold matcharm>StructS <fold block>{
+ a: 31,
+ }</fold></fold>,
+ }</fold>
+ }</fold>
+ "#,
+ )
+ }
+
+ #[test]
+ fn fold_big_calls() {
+ check(
+ r#"
+fn main() <fold block>{
+ frobnicate<fold arglist>(
+ 1,
+ 2,
+ 3,
+ )</fold>
+}</fold>
+"#,
+ )
+ }
+
+ #[test]
+ fn fold_record_literals() {
+ check(
+ r#"
+const _: S = S <fold block>{
+
+}</fold>;
+"#,
+ )
+ }
+
+ #[test]
+ fn fold_multiline_params() {
+ check(
+ r#"
+fn foo<fold arglist>(
+ x: i32,
+ y: String,
+)</fold> {}
+"#,
+ )
+ }
+
+ #[test]
+ fn fold_multiline_array() {
+ check(
+ r#"
+const FOO: [usize; 4] = <fold array>[
+ 1,
+ 2,
+ 3,
+ 4,
+]</fold>;
+"#,
+ )
+ }
+
+ #[test]
+ fn fold_region() {
+ check(
+ r#"
+// 1. some normal comment
+<fold region>// region: test
+// 2. some normal comment
+<fold region>// region: inner
+fn f() {}
+// endregion</fold>
+fn f2() {}
+// endregion: test</fold>
+"#,
+ )
+ }
+
+ #[test]
+ fn fold_consecutive_const() {
+ check(
+ r#"
+<fold consts>const FIRST_CONST: &str = "first";
+const SECOND_CONST: &str = "second";</fold>
+"#,
+ )
+ }
+
+ #[test]
+ fn fold_consecutive_static() {
+ check(
+ r#"
+<fold statics>static FIRST_STATIC: &str = "first";
+static SECOND_STATIC: &str = "second";</fold>
+"#,
+ )
+ }
+
+ #[test]
+ fn fold_where_clause() {
+ // fold multi-line and don't fold single line.
+ check(
+ r#"
+fn foo()
+where<fold whereclause>
+ A: Foo,
+ B: Foo,
+ C: Foo,
+ D: Foo,</fold> {}
+
+fn bar()
+where
+ A: Bar, {}
+"#,
+ )
+ }
+
+ #[test]
+ fn fold_return_type() {
+ check(
+ r#"
+fn foo()<fold returntype>-> (
+ bool,
+ bool,
+)</fold> { (true, true) }
+
+fn bar() -> (bool, bool) { (true, true) }
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
new file mode 100644
index 000000000..926292c9b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
@@ -0,0 +1,112 @@
+use hir::Semantics;
+use ide_db::{
+ defs::{Definition, NameClass, NameRefClass},
+ RootDatabase,
+};
+use syntax::{ast, match_ast, AstNode, SyntaxKind::*, T};
+
+use crate::{FilePosition, NavigationTarget, RangeInfo};
+
+// Feature: Go to Declaration
+//
+// Navigates to the declaration of an identifier.
+//
+// This is currently the same as `Go to Definition` with the exception of outline modules where it
+// will navigate to the `mod name;` item declaration.
+pub(crate) fn goto_declaration(
+ db: &RootDatabase,
+ position: FilePosition,
+) -> Option<RangeInfo<Vec<NavigationTarget>>> {
+ let sema = Semantics::new(db);
+ let file = sema.parse(position.file_id).syntax().clone();
+ let original_token = file
+ .token_at_offset(position.offset)
+ .find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?;
+ let range = original_token.text_range();
+ let info: Vec<NavigationTarget> = sema
+ .descend_into_macros(original_token)
+ .iter()
+ .filter_map(|token| {
+ let parent = token.parent()?;
+ let def = match_ast! {
+ match parent {
+ ast::NameRef(name_ref) => match NameRefClass::classify(&sema, &name_ref)? {
+ NameRefClass::Definition(it) => Some(it),
+ _ => None
+ },
+ ast::Name(name) => match NameClass::classify(&sema, &name)? {
+ NameClass::Definition(it) => Some(it),
+ _ => None
+ },
+ _ => None
+ }
+ };
+ match def? {
+ Definition::Module(module) => {
+ Some(NavigationTarget::from_module_to_decl(db, module))
+ }
+ _ => None,
+ }
+ })
+ .collect();
+
+ Some(RangeInfo::new(range, info))
+}
+
+#[cfg(test)]
+mod tests {
+ use ide_db::base_db::FileRange;
+ use itertools::Itertools;
+
+ use crate::fixture;
+
+ fn check(ra_fixture: &str) {
+ let (analysis, position, expected) = fixture::annotations(ra_fixture);
+ let navs = analysis
+ .goto_declaration(position)
+ .unwrap()
+ .expect("no declaration or definition found")
+ .info;
+ if navs.is_empty() {
+ panic!("unresolved reference")
+ }
+
+ let cmp = |&FileRange { file_id, range }: &_| (file_id, range.start());
+ let navs = navs
+ .into_iter()
+ .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
+ .sorted_by_key(cmp)
+ .collect::<Vec<_>>();
+ let expected = expected
+ .into_iter()
+ .map(|(FileRange { file_id, range }, _)| FileRange { file_id, range })
+ .sorted_by_key(cmp)
+ .collect::<Vec<_>>();
+ assert_eq!(expected, navs);
+ }
+
+ #[test]
+ fn goto_decl_module_outline() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+ // ^^^
+//- /foo.rs
+use self$0;
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_decl_module_inline() {
+ check(
+ r#"
+mod foo {
+ // ^^^
+ use self$0;
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
new file mode 100644
index 000000000..d9c97751c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
@@ -0,0 +1,1634 @@
+use std::{convert::TryInto, mem::discriminant};
+
+use crate::{doc_links::token_as_doc_comment, FilePosition, NavigationTarget, RangeInfo, TryToNav};
+use hir::{AsAssocItem, AssocItem, Semantics};
+use ide_db::{
+ base_db::{AnchoredPath, FileId, FileLoader},
+ defs::{Definition, IdentClass},
+ helpers::pick_best_token,
+ RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{ast, AstNode, AstToken, SyntaxKind::*, SyntaxToken, TextRange, T};
+
+// Feature: Go to Definition
+//
+// Navigates to the definition of an identifier.
+//
+// For outline modules, this will navigate to the source file of the module.
+//
+// |===
+// | Editor | Shortcut
+//
+// | VS Code | kbd:[F12]
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113065563-025fbe00-91b1-11eb-83e4-a5a703610b23.gif[]
+pub(crate) fn goto_definition(
+ db: &RootDatabase,
+ position: FilePosition,
+) -> Option<RangeInfo<Vec<NavigationTarget>>> {
+ let sema = &Semantics::new(db);
+ let file = sema.parse(position.file_id).syntax().clone();
+ let original_token =
+ pick_best_token(file.token_at_offset(position.offset), |kind| match kind {
+ IDENT
+ | INT_NUMBER
+ | LIFETIME_IDENT
+ | T![self]
+ | T![super]
+ | T![crate]
+ | T![Self]
+ | COMMENT => 2,
+ kind if kind.is_trivia() => 0,
+ _ => 1,
+ })?;
+ if let Some(doc_comment) = token_as_doc_comment(&original_token) {
+ return doc_comment.get_definition_with_descend_at(sema, position.offset, |def, _, _| {
+ let nav = def.try_to_nav(db)?;
+ Some(RangeInfo::new(original_token.text_range(), vec![nav]))
+ });
+ }
+ let navs = sema
+ .descend_into_macros(original_token.clone())
+ .into_iter()
+ .filter_map(|token| {
+ let parent = token.parent()?;
+ if let Some(tt) = ast::TokenTree::cast(parent) {
+ if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), position.file_id)
+ {
+ return Some(vec![x]);
+ }
+ }
+ Some(
+ IdentClass::classify_token(sema, &token)?
+ .definitions()
+ .into_iter()
+ .flat_map(|def| {
+ try_filter_trait_item_definition(sema, &def)
+ .unwrap_or_else(|| def_to_nav(sema.db, def))
+ })
+ .collect(),
+ )
+ })
+ .flatten()
+ .unique()
+ .collect::<Vec<NavigationTarget>>();
+
+ Some(RangeInfo::new(original_token.text_range(), navs))
+}
+
+fn try_lookup_include_path(
+ sema: &Semantics<'_, RootDatabase>,
+ tt: ast::TokenTree,
+ token: SyntaxToken,
+ file_id: FileId,
+) -> Option<NavigationTarget> {
+ let token = ast::String::cast(token)?;
+ let path = token.value()?.into_owned();
+ let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
+ let name = macro_call.path()?.segment()?.name_ref()?;
+ if !matches!(&*name.text(), "include" | "include_str" | "include_bytes") {
+ return None;
+ }
+ let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
+ let size = sema.db.file_text(file_id).len().try_into().ok()?;
+ Some(NavigationTarget {
+ file_id,
+ full_range: TextRange::new(0.into(), size),
+ name: path.into(),
+ focus_range: None,
+ kind: None,
+ container_name: None,
+ description: None,
+ docs: None,
+ })
+}
+/// finds the trait definition of an impl'd item, except function
+/// e.g.
+/// ```rust
+/// trait A { type a; }
+/// struct S;
+/// impl A for S { type a = i32; } // <-- on this associate type, will get the location of a in the trait
+/// ```
+fn try_filter_trait_item_definition(
+ sema: &Semantics<'_, RootDatabase>,
+ def: &Definition,
+) -> Option<Vec<NavigationTarget>> {
+ let db = sema.db;
+ let assoc = def.as_assoc_item(db)?;
+ match assoc {
+ AssocItem::Function(..) => None,
+ AssocItem::Const(..) | AssocItem::TypeAlias(..) => {
+ let imp = match assoc.container(db) {
+ hir::AssocItemContainer::Impl(imp) => imp,
+ _ => return None,
+ };
+ let trait_ = imp.trait_(db)?;
+ let name = def.name(db)?;
+ let discri_value = discriminant(&assoc);
+ trait_
+ .items(db)
+ .iter()
+ .filter(|itm| discriminant(*itm) == discri_value)
+ .find_map(|itm| (itm.name(db)? == name).then(|| itm.try_to_nav(db)).flatten())
+ .map(|it| vec![it])
+ }
+ }
+}
+
+fn def_to_nav(db: &RootDatabase, def: Definition) -> Vec<NavigationTarget> {
+ def.try_to_nav(db).map(|it| vec![it]).unwrap_or_default()
+}
+
+#[cfg(test)]
+mod tests {
+ use ide_db::base_db::FileRange;
+ use itertools::Itertools;
+
+ use crate::fixture;
+
+ #[track_caller]
+ fn check(ra_fixture: &str) {
+ let (analysis, position, expected) = fixture::annotations(ra_fixture);
+ let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info;
+ if navs.is_empty() {
+ panic!("unresolved reference")
+ }
+
+ let cmp = |&FileRange { file_id, range }: &_| (file_id, range.start());
+ let navs = navs
+ .into_iter()
+ .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
+ .sorted_by_key(cmp)
+ .collect::<Vec<_>>();
+ let expected = expected
+ .into_iter()
+ .map(|(FileRange { file_id, range }, _)| FileRange { file_id, range })
+ .sorted_by_key(cmp)
+ .collect::<Vec<_>>();
+ assert_eq!(expected, navs);
+ }
+
+ fn check_unresolved(ra_fixture: &str) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info;
+
+ assert!(navs.is_empty(), "didn't expect this to resolve anywhere: {:?}", navs)
+ }
+
+ #[test]
+ fn goto_def_if_items_same_name() {
+ check(
+ r#"
+trait Trait {
+ type A;
+ const A: i32;
+ //^
+}
+
+struct T;
+impl Trait for T {
+ type A = i32;
+ const A$0: i32 = -9;
+}"#,
+ );
+ }
+ #[test]
+ fn goto_def_in_mac_call_in_attr_invoc() {
+ check(
+ r#"
+//- proc_macros: identity
+pub struct Struct {
+ // ^^^^^^
+ field: i32,
+}
+
+macro_rules! identity {
+ ($($tt:tt)*) => {$($tt)*};
+}
+
+#[proc_macros::identity]
+fn function() {
+ identity!(Struct$0 { field: 0 });
+}
+
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_extern_crate() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+extern crate std$0;
+//- /std/lib.rs crate:std
+// empty
+//^file
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_renamed_extern_crate() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+extern crate std as abc$0;
+//- /std/lib.rs crate:std
+// empty
+//^file
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_in_items() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+enum E { X(Foo$0) }
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_at_start_of_item() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+enum E { X($0Foo) }
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_definition_resolves_correct_name() {
+ check(
+ r#"
+//- /lib.rs
+use a::Foo;
+mod a;
+mod b;
+enum E { X(Foo$0) }
+
+//- /a.rs
+struct Foo;
+ //^^^
+//- /b.rs
+struct Foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_module_declaration() {
+ check(
+ r#"
+//- /lib.rs
+mod $0foo;
+
+//- /foo.rs
+// empty
+//^file
+"#,
+ );
+
+ check(
+ r#"
+//- /lib.rs
+mod $0foo;
+
+//- /foo/mod.rs
+// empty
+//^file
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_macros() {
+ check(
+ r#"
+macro_rules! foo { () => { () } }
+ //^^^
+fn bar() {
+ $0foo!();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_macros_from_other_crates() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::foo;
+fn bar() {
+ $0foo!();
+}
+
+//- /foo/lib.rs crate:foo
+#[macro_export]
+macro_rules! foo { () => { () } }
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_macros_in_use_tree() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::foo$0;
+
+//- /foo/lib.rs crate:foo
+#[macro_export]
+macro_rules! foo { () => { () } }
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_macro_defined_fn_with_arg() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! define_fn {
+ ($name:ident) => (fn $name() {})
+}
+
+define_fn!(foo);
+ //^^^
+
+fn bar() {
+ $0foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_macro_defined_fn_no_arg() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! define_fn {
+ () => (fn foo() {})
+}
+
+ define_fn!();
+//^^^^^^^^^^^^^
+
+fn bar() {
+ $0foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_definition_works_for_macro_inside_pattern() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! foo {() => {0}}
+ //^^^
+
+fn bar() {
+ match (0,1) {
+ ($0foo!(), _) => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_definition_works_for_macro_inside_match_arm_lhs() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! foo {() => {0}}
+ //^^^
+fn bar() {
+ match 0 {
+ $0foo!() => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_use_alias() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo as bar$0;
+
+//- /foo/lib.rs crate:foo
+// empty
+//^file
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_use_alias_foo_macro() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::foo as bar$0;
+
+//- /foo/lib.rs crate:foo
+#[macro_export]
+macro_rules! foo { () => { () } }
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_methods() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ fn frobnicate(&self) { }
+ //^^^^^^^^^^
+}
+
+fn bar(foo: &Foo) {
+ foo.frobnicate$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_fields() {
+ check(
+ r#"
+struct Foo {
+ spam: u32,
+} //^^^^
+
+fn bar(foo: &Foo) {
+ foo.spam$0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_record_fields() {
+ check(
+ r#"
+//- /lib.rs
+struct Foo {
+ spam: u32,
+} //^^^^
+
+fn bar() -> Foo {
+ Foo {
+ spam$0: 0,
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_record_pat_fields() {
+ check(
+ r#"
+//- /lib.rs
+struct Foo {
+ spam: u32,
+} //^^^^
+
+fn bar(foo: Foo) -> Foo {
+ let Foo { spam$0: _, } = foo
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_record_fields_macros() {
+ check(
+ r"
+macro_rules! m { () => { 92 };}
+struct Foo { spam: u32 }
+ //^^^^
+
+fn bar() -> Foo {
+ Foo { spam$0: m!() }
+}
+",
+ );
+ }
+
+ #[test]
+ fn goto_for_tuple_fields() {
+ check(
+ r#"
+struct Foo(u32);
+ //^^^
+
+fn bar() {
+ let foo = Foo(0);
+ foo.$00;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_ufcs_inherent_methods() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ fn frobnicate() { }
+} //^^^^^^^^^^
+
+fn bar(foo: &Foo) {
+ Foo::frobnicate$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_ufcs_trait_methods_through_traits() {
+ check(
+ r#"
+trait Foo {
+ fn frobnicate();
+} //^^^^^^^^^^
+
+fn bar() {
+ Foo::frobnicate$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_ufcs_trait_methods_through_self() {
+ check(
+ r#"
+struct Foo;
+trait Trait {
+ fn frobnicate();
+} //^^^^^^^^^^
+impl Trait for Foo {}
+
+fn bar() {
+ Foo::frobnicate$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_definition_on_self() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ //^^^
+ pub fn new() -> Self {
+ Self$0 {}
+ }
+}
+"#,
+ );
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ //^^^
+ pub fn new() -> Self$0 {
+ Self {}
+ }
+}
+"#,
+ );
+
+ check(
+ r#"
+enum Foo { A }
+impl Foo {
+ //^^^
+ pub fn new() -> Self$0 {
+ Foo::A
+ }
+}
+"#,
+ );
+
+ check(
+ r#"
+enum Foo { A }
+impl Foo {
+ //^^^
+ pub fn thing(a: &Self$0) {
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_definition_on_self_in_trait_impl() {
+ check(
+ r#"
+struct Foo;
+trait Make {
+ fn new() -> Self;
+}
+impl Make for Foo {
+ //^^^
+ fn new() -> Self {
+ Self$0 {}
+ }
+}
+"#,
+ );
+
+ check(
+ r#"
+struct Foo;
+trait Make {
+ fn new() -> Self;
+}
+impl Make for Foo {
+ //^^^
+ fn new() -> Self$0 {
+ Self {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_when_used_on_definition_name_itself() {
+ check(
+ r#"
+struct Foo$0 { value: u32 }
+ //^^^
+ "#,
+ );
+
+ check(
+ r#"
+struct Foo {
+ field$0: string,
+} //^^^^^
+"#,
+ );
+
+ check(
+ r#"
+fn foo_test$0() { }
+ //^^^^^^^^
+"#,
+ );
+
+ check(
+ r#"
+enum Foo$0 { Variant }
+ //^^^
+"#,
+ );
+
+ check(
+ r#"
+enum Foo {
+ Variant1,
+ Variant2$0,
+ //^^^^^^^^
+ Variant3,
+}
+"#,
+ );
+
+ check(
+ r#"
+static INNER$0: &str = "";
+ //^^^^^
+"#,
+ );
+
+ check(
+ r#"
+const INNER$0: &str = "";
+ //^^^^^
+"#,
+ );
+
+ check(
+ r#"
+type Thing$0 = Option<()>;
+ //^^^^^
+"#,
+ );
+
+ check(
+ r#"
+trait Foo$0 { }
+ //^^^
+"#,
+ );
+
+ check(
+ r#"
+mod bar$0 { }
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_from_macro() {
+ check(
+ r#"
+macro_rules! id {
+ ($($tt:tt)*) => { $($tt)* }
+}
+fn foo() {}
+ //^^^
+id! {
+ fn bar() {
+ fo$0o();
+ }
+}
+mod confuse_index { fn foo(); }
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_through_format() {
+ check(
+ r#"
+#[macro_export]
+macro_rules! format {
+ ($($arg:tt)*) => ($crate::fmt::format($crate::__export::format_args!($($arg)*)))
+}
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! format_args {
+ ($fmt:expr) => ({ /* compiler built-in */ });
+ ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+pub mod __export {
+ pub use crate::format_args;
+ fn foo() {} // for index confusion
+}
+fn foo() -> i8 {}
+ //^^^
+fn test() {
+ format!("{}", fo$0o())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_through_included_file() {
+ check(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {}
+
+ include!("foo.rs");
+//^^^^^^^^^^^^^^^^^^^
+
+fn f() {
+ foo$0();
+}
+
+mod confuse_index {
+ pub fn foo() {}
+}
+
+//- /foo.rs
+fn foo() {}
+ "#,
+ );
+ }
+
+ #[test]
+ fn goto_for_type_param() {
+ check(
+ r#"
+struct Foo<T: Clone> { t: $0T }
+ //^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_within_macro() {
+ check(
+ r#"
+macro_rules! id {
+ ($($tt:tt)*) => ($($tt)*)
+}
+
+fn foo() {
+ let x = 1;
+ //^
+ id!({
+ let y = $0x;
+ let z = y;
+ });
+}
+"#,
+ );
+
+ check(
+ r#"
+macro_rules! id {
+ ($($tt:tt)*) => ($($tt)*)
+}
+
+fn foo() {
+ let x = 1;
+ id!({
+ let y = x;
+ //^
+ let z = $0y;
+ });
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_in_local_fn() {
+ check(
+ r#"
+fn main() {
+ fn foo() {
+ let x = 92;
+ //^
+ $0x;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_in_local_macro() {
+ check(
+ r#"
+fn bar() {
+ macro_rules! foo { () => { () } }
+ //^^^
+ $0foo!();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_field_init_shorthand() {
+ check(
+ r#"
+struct Foo { x: i32 }
+ //^
+fn main() {
+ let x = 92;
+ //^
+ Foo { x$0 };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_enum_variant_field() {
+ check(
+ r#"
+enum Foo {
+ Bar { x: i32 }
+ //^
+}
+fn baz(foo: Foo) {
+ match foo {
+ Foo::Bar { x$0 } => x
+ //^
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_enum_variant_self_pattern_const() {
+ check(
+ r#"
+enum Foo { Bar }
+ //^^^
+impl Foo {
+ fn baz(self) {
+ match self { Self::Bar$0 => {} }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_enum_variant_self_pattern_record() {
+ check(
+ r#"
+enum Foo { Bar { val: i32 } }
+ //^^^
+impl Foo {
+ fn baz(self) -> i32 {
+ match self { Self::Bar$0 { val } => {} }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_enum_variant_self_expr_const() {
+ check(
+ r#"
+enum Foo { Bar }
+ //^^^
+impl Foo {
+ fn baz(self) { Self::Bar$0; }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_enum_variant_self_expr_record() {
+ check(
+ r#"
+enum Foo { Bar { val: i32 } }
+ //^^^
+impl Foo {
+ fn baz(self) { Self::Bar$0 {val: 4}; }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_type_alias_generic_parameter() {
+ check(
+ r#"
+type Alias<T> = T$0;
+ //^
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_macro_container() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:foo
+foo::module$0::mac!();
+
+//- /foo/lib.rs crate:foo
+pub mod module {
+ //^^^^^^
+ #[macro_export]
+ macro_rules! _mac { () => { () } }
+ pub use crate::_mac as mac;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_assoc_ty_in_path() {
+ check(
+ r#"
+trait Iterator {
+ type Item;
+ //^^^^
+}
+
+fn f() -> impl Iterator<Item$0 = u8> {}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_super_assoc_ty_in_path() {
+ check(
+ r#"
+trait Super {
+ type Item;
+ //^^^^
+}
+
+trait Sub: Super {}
+
+fn f() -> impl Sub<Item$0 = u8> {}
+"#,
+ );
+ }
+
+ #[test]
+ fn unknown_assoc_ty() {
+ check_unresolved(
+ r#"
+trait Iterator { type Item; }
+fn f() -> impl Iterator<Invalid$0 = u8> {}
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_assoc_ty_in_path_multiple() {
+ check(
+ r#"
+trait Iterator {
+ type A;
+ //^
+ type B;
+}
+
+fn f() -> impl Iterator<A$0 = u8, B = ()> {}
+"#,
+ );
+ check(
+ r#"
+trait Iterator {
+ type A;
+ type B;
+ //^
+}
+
+fn f() -> impl Iterator<A = u8, B$0 = ()> {}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_assoc_ty_ufcs() {
+ check(
+ r#"
+trait Iterator {
+ type Item;
+ //^^^^
+}
+
+fn g() -> <() as Iterator<Item$0 = ()>>::Item {}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_assoc_ty_ufcs_multiple() {
+ check(
+ r#"
+trait Iterator {
+ type A;
+ //^
+ type B;
+}
+
+fn g() -> <() as Iterator<A$0 = (), B = u8>>::B {}
+"#,
+ );
+ check(
+ r#"
+trait Iterator {
+ type A;
+ type B;
+ //^
+}
+
+fn g() -> <() as Iterator<A = (), B$0 = u8>>::A {}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_self_param_ty_specified() {
+ check(
+ r#"
+struct Foo {}
+
+impl Foo {
+ fn bar(self: &Foo) {
+ //^^^^
+ let foo = sel$0f;
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_self_param_on_decl() {
+ check(
+ r#"
+struct Foo {}
+
+impl Foo {
+ fn bar(&self$0) {
+ //^^^^
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_lifetime_param_on_decl() {
+ check(
+ r#"
+fn foo<'foobar$0>(_: &'foobar ()) {
+ //^^^^^^^
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_lifetime_param_decl() {
+ check(
+ r#"
+fn foo<'foobar>(_: &'foobar$0 ()) {
+ //^^^^^^^
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_lifetime_param_decl_nested() {
+ check(
+ r#"
+fn foo<'foobar>(_: &'foobar ()) {
+ fn foo<'foobar>(_: &'foobar$0 ()) {}
+ //^^^^^^^
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_lifetime_hrtb() {
+ // FIXME: requires the HIR to somehow track these hrtb lifetimes
+ check_unresolved(
+ r#"
+trait Foo<T> {}
+fn foo<T>() where for<'a> T: Foo<&'a$0 (u8, u16)>, {}
+ //^^
+"#,
+ );
+ check_unresolved(
+ r#"
+trait Foo<T> {}
+fn foo<T>() where for<'a$0> T: Foo<&'a (u8, u16)>, {}
+ //^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_lifetime_hrtb_for_type() {
+ // FIXME: requires ForTypes to be implemented
+ check_unresolved(
+ r#"trait Foo<T> {}
+fn foo<T>() where T: for<'a> Foo<&'a$0 (u8, u16)>, {}
+ //^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_label() {
+ check(
+ r#"
+fn foo<'foo>(_: &'foo ()) {
+ 'foo: {
+ //^^^^
+ 'bar: loop {
+ break 'foo$0;
+ }
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_intra_doc_link_same_file() {
+ check(
+ r#"
+/// Blah, [`bar`](bar) .. [`foo`](foo$0) has [`bar`](bar)
+pub fn bar() { }
+
+/// You might want to see [`std::fs::read()`] too.
+pub fn foo() { }
+ //^^^
+
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_intra_doc_link_inner() {
+ check(
+ r#"
+//- /main.rs
+mod m;
+struct S;
+ //^
+
+//- /m.rs
+//! [`super::S$0`]
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_incomplete_field() {
+ check(
+ r#"
+struct A { a: u32 }
+ //^
+fn foo() { A { a$0: }; }
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_proc_macro() {
+ check(
+ r#"
+//- /main.rs crate:main deps:mac
+use mac::fn_macro;
+
+fn_macro$0!();
+
+//- /mac.rs crate:mac
+#![crate_type="proc-macro"]
+#[proc_macro]
+fn fn_macro() {}
+ //^^^^^^^^
+ "#,
+ )
+ }
+
+ #[test]
+ fn goto_intra_doc_links() {
+ check(
+ r#"
+
+pub mod theitem {
+ /// This is the item. Cool!
+ pub struct TheItem;
+ //^^^^^^^
+}
+
+/// Gives you a [`TheItem$0`].
+///
+/// [`TheItem`]: theitem::TheItem
+pub fn gimme() -> theitem::TheItem {
+ theitem::TheItem
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_ident_from_pat_macro() {
+ check(
+ r#"
+macro_rules! pat {
+ ($name:ident) => { Enum::Variant1($name) }
+}
+
+enum Enum {
+ Variant1(u8),
+ Variant2,
+}
+
+fn f(e: Enum) {
+ match e {
+ pat!(bind) => {
+ //^^^^
+ bind$0
+ }
+ Enum::Variant2 => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_include() {
+ check(
+ r#"
+//- /main.rs
+fn main() {
+ let str = include_str!("foo.txt$0");
+}
+//- /foo.txt
+// empty
+//^file
+"#,
+ );
+ }
+ #[cfg(test)]
+ mod goto_impl_of_trait_fn {
+ use super::check;
+ #[test]
+ fn cursor_on_impl() {
+ check(
+ r#"
+trait Twait {
+ fn a();
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+ fn a$0();
+ //^
+}
+ "#,
+ );
+ }
+ #[test]
+ fn method_call() {
+ check(
+ r#"
+trait Twait {
+ fn a(&self);
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+ fn a(&self){};
+ //^
+}
+fn f() {
+ let s = Stwuct;
+ s.a$0();
+}
+ "#,
+ );
+ }
+ #[test]
+ fn path_call() {
+ check(
+ r#"
+trait Twait {
+ fn a(&self);
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+ fn a(&self){};
+ //^
+}
+fn f() {
+ let s = Stwuct;
+ Stwuct::a$0(&s);
+}
+ "#,
+ );
+ }
+ #[test]
+ fn where_clause_can_work() {
+ check(
+ r#"
+trait G {
+ fn g(&self);
+}
+trait Bound{}
+trait EA{}
+struct Gen<T>(T);
+impl <T:EA> G for Gen<T> {
+ fn g(&self) {
+ }
+}
+impl <T> G for Gen<T>
+where T : Bound
+{
+ fn g(&self){
+ //^
+ }
+}
+struct A;
+impl Bound for A{}
+fn f() {
+ let gen = Gen::<A>(A);
+ gen.g$0();
+}
+ "#,
+ );
+ }
+ #[test]
+ fn wc_case_is_ok() {
+ check(
+ r#"
+trait G {
+ fn g(&self);
+}
+trait BParent{}
+trait Bound: BParent{}
+struct Gen<T>(T);
+impl <T> G for Gen<T>
+where T : Bound
+{
+ fn g(&self){
+ //^
+ }
+}
+struct A;
+impl Bound for A{}
+fn f() {
+ let gen = Gen::<A>(A);
+ gen.g$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_call_defaulted() {
+ check(
+ r#"
+trait Twait {
+ fn a(&self) {}
+ //^
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+}
+fn f() {
+ let s = Stwuct;
+ s.a$0();
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn method_call_on_generic() {
+ check(
+ r#"
+trait Twait {
+ fn a(&self) {}
+ //^
+}
+
+fn f<T: Twait>(s: T) {
+ s.a$0();
+}
+ "#,
+ );
+ }
+ }
+
+ #[test]
+ fn goto_def_of_trait_impl_const() {
+ check(
+ r#"
+trait Twait {
+ const NOMS: bool;
+ // ^^^^
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+ const NOMS$0: bool = true;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_of_trait_impl_type_alias() {
+ check(
+ r#"
+trait Twait {
+ type IsBad;
+ // ^^^^^
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+ type IsBad$0 = !;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_derive_input() {
+ check(
+ r#"
+ //- minicore:derive
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+ // ^^^^
+ #[derive(Copy$0)]
+ struct Foo;
+ "#,
+ );
+ check(
+ r#"
+//- minicore:derive
+#[rustc_builtin_macro]
+pub macro Copy {}
+ // ^^^^
+#[cfg_attr(feature = "false", derive)]
+#[derive(Copy$0)]
+struct Foo;
+ "#,
+ );
+ check(
+ r#"
+//- minicore:derive
+mod foo {
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+ // ^^^^
+}
+#[derive(foo::Copy$0)]
+struct Foo;
+ "#,
+ );
+ check(
+ r#"
+//- minicore:derive
+mod foo {
+ // ^^^
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+}
+#[derive(foo$0::Copy)]
+struct Foo;
+ "#,
+ );
+ }
+
+ #[test]
+ fn goto_def_in_macro_multi() {
+ check(
+ r#"
+struct Foo {
+ foo: ()
+ //^^^
+}
+macro_rules! foo {
+ ($ident:ident) => {
+ fn $ident(Foo { $ident }: Foo) {}
+ }
+}
+foo!(foo$0);
+ //^^^
+ //^^^
+"#,
+ );
+ check(
+ r#"
+fn bar() {}
+ //^^^
+struct bar;
+ //^^^
+macro_rules! foo {
+ ($ident:ident) => {
+ fn foo() {
+ let _: $ident = $ident;
+ }
+ }
+}
+
+foo!(bar$0);
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
new file mode 100644
index 000000000..04b51c839
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
@@ -0,0 +1,344 @@
+use hir::{AsAssocItem, Impl, Semantics};
+use ide_db::{
+ defs::{Definition, NameClass, NameRefClass},
+ helpers::pick_best_token,
+ RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{ast, AstNode, SyntaxKind::*, T};
+
+use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav};
+
+// Feature: Go to Implementation
+//
+// Navigates to the impl blocks of types.
+//
+// |===
+// | Editor | Shortcut
+//
+// | VS Code | kbd:[Ctrl+F12]
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113065566-02f85480-91b1-11eb-9288-aaad8abd8841.gif[]
+pub(crate) fn goto_implementation(
+ db: &RootDatabase,
+ position: FilePosition,
+) -> Option<RangeInfo<Vec<NavigationTarget>>> {
+ let sema = Semantics::new(db);
+ let source_file = sema.parse(position.file_id);
+ let syntax = source_file.syntax().clone();
+
+ let original_token =
+ pick_best_token(syntax.token_at_offset(position.offset), |kind| match kind {
+ IDENT | T![self] => 1,
+ _ => 0,
+ })?;
+ let range = original_token.text_range();
+ let navs = sema
+ .descend_into_macros(original_token)
+ .into_iter()
+ .filter_map(|token| token.parent().and_then(ast::NameLike::cast))
+ .filter_map(|node| match &node {
+ ast::NameLike::Name(name) => {
+ NameClass::classify(&sema, name).map(|class| match class {
+ NameClass::Definition(it) | NameClass::ConstReference(it) => it,
+ NameClass::PatFieldShorthand { local_def, field_ref: _ } => {
+ Definition::Local(local_def)
+ }
+ })
+ }
+ ast::NameLike::NameRef(name_ref) => {
+ NameRefClass::classify(&sema, name_ref).map(|class| match class {
+ NameRefClass::Definition(def) => def,
+ NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
+ Definition::Local(local_ref)
+ }
+ })
+ }
+ ast::NameLike::Lifetime(_) => None,
+ })
+ .unique()
+ .filter_map(|def| {
+ let navs = match def {
+ Definition::Trait(trait_) => impls_for_trait(&sema, trait_),
+ Definition::Adt(adt) => impls_for_ty(&sema, adt.ty(sema.db)),
+ Definition::TypeAlias(alias) => impls_for_ty(&sema, alias.ty(sema.db)),
+ Definition::BuiltinType(builtin) => impls_for_ty(&sema, builtin.ty(sema.db)),
+ Definition::Function(f) => {
+ let assoc = f.as_assoc_item(sema.db)?;
+ let name = assoc.name(sema.db)?;
+ let trait_ = assoc.containing_trait_or_trait_impl(sema.db)?;
+ impls_for_trait_item(&sema, trait_, name)
+ }
+ Definition::Const(c) => {
+ let assoc = c.as_assoc_item(sema.db)?;
+ let name = assoc.name(sema.db)?;
+ let trait_ = assoc.containing_trait_or_trait_impl(sema.db)?;
+ impls_for_trait_item(&sema, trait_, name)
+ }
+ _ => return None,
+ };
+ Some(navs)
+ })
+ .flatten()
+ .collect();
+
+ Some(RangeInfo { range, info: navs })
+}
+
+fn impls_for_ty(sema: &Semantics<'_, RootDatabase>, ty: hir::Type) -> Vec<NavigationTarget> {
+ Impl::all_for_type(sema.db, ty).into_iter().filter_map(|imp| imp.try_to_nav(sema.db)).collect()
+}
+
+fn impls_for_trait(
+ sema: &Semantics<'_, RootDatabase>,
+ trait_: hir::Trait,
+) -> Vec<NavigationTarget> {
+ Impl::all_for_trait(sema.db, trait_)
+ .into_iter()
+ .filter_map(|imp| imp.try_to_nav(sema.db))
+ .collect()
+}
+
+fn impls_for_trait_item(
+ sema: &Semantics<'_, RootDatabase>,
+ trait_: hir::Trait,
+ fun_name: hir::Name,
+) -> Vec<NavigationTarget> {
+ Impl::all_for_trait(sema.db, trait_)
+ .into_iter()
+ .filter_map(|imp| {
+ let item = imp.items(sema.db).iter().find_map(|itm| {
+ let itm_name = itm.name(sema.db)?;
+ (itm_name == fun_name).then(|| *itm)
+ })?;
+ item.try_to_nav(sema.db)
+ })
+ .collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use ide_db::base_db::FileRange;
+ use itertools::Itertools;
+
+ use crate::fixture;
+
+ fn check(ra_fixture: &str) {
+ let (analysis, position, expected) = fixture::annotations(ra_fixture);
+
+ let navs = analysis.goto_implementation(position).unwrap().unwrap().info;
+
+ let cmp = |frange: &FileRange| (frange.file_id, frange.range.start());
+
+ let actual = navs
+ .into_iter()
+ .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
+ .sorted_by_key(cmp)
+ .collect::<Vec<_>>();
+ let expected =
+ expected.into_iter().map(|(range, _)| range).sorted_by_key(cmp).collect::<Vec<_>>();
+ assert_eq!(expected, actual);
+ }
+
+ #[test]
+ fn goto_implementation_works() {
+ check(
+ r#"
+struct Foo$0;
+impl Foo {}
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_works_multiple_blocks() {
+ check(
+ r#"
+struct Foo$0;
+impl Foo {}
+ //^^^
+impl Foo {}
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_works_multiple_mods() {
+ check(
+ r#"
+struct Foo$0;
+mod a {
+ impl super::Foo {}
+ //^^^^^^^^^^
+}
+mod b {
+ impl super::Foo {}
+ //^^^^^^^^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_works_multiple_files() {
+ check(
+ r#"
+//- /lib.rs
+struct Foo$0;
+mod a;
+mod b;
+//- /a.rs
+impl crate::Foo {}
+ //^^^^^^^^^^
+//- /b.rs
+impl crate::Foo {}
+ //^^^^^^^^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_for_trait() {
+ check(
+ r#"
+trait T$0 {}
+struct Foo;
+impl T for Foo {}
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_for_trait_multiple_files() {
+ check(
+ r#"
+//- /lib.rs
+trait T$0 {};
+struct Foo;
+mod a;
+mod b;
+//- /a.rs
+impl crate::T for crate::Foo {}
+ //^^^^^^^^^^
+//- /b.rs
+impl crate::T for crate::Foo {}
+ //^^^^^^^^^^
+ "#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_all_impls() {
+ check(
+ r#"
+//- /lib.rs
+trait T {}
+struct Foo$0;
+impl Foo {}
+ //^^^
+impl T for Foo {}
+ //^^^
+impl T for &Foo {}
+ //^^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_to_builtin_derive() {
+ check(
+ r#"
+//- minicore: copy, derive
+ #[derive(Copy)]
+//^^^^^^^^^^^^^^^
+struct Foo$0;
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_type_alias() {
+ check(
+ r#"
+struct Foo;
+
+type Bar$0 = Foo;
+
+impl Foo {}
+ //^^^
+impl Bar {}
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_adt_generic() {
+ check(
+ r#"
+struct Foo$0<T>;
+
+impl<T> Foo<T> {}
+ //^^^^^^
+impl Foo<str> {}
+ //^^^^^^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_builtin() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:core
+fn foo(_: bool$0) {{}}
+//- /libcore.rs crate:core
+#[lang = "bool"]
+impl bool {}
+ //^^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_trait_functions() {
+ check(
+ r#"
+trait Tr {
+ fn f$0();
+}
+
+struct S;
+
+impl Tr for S {
+ fn f() {
+ //^
+ println!("Hello, world!");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_trait_assoc_const() {
+ check(
+ r#"
+trait Tr {
+ const C$0: usize;
+}
+
+struct S;
+
+impl Tr for S {
+ const C: usize = 4;
+ //^
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs
new file mode 100644
index 000000000..55cdb3200
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs
@@ -0,0 +1,296 @@
+use ide_db::{base_db::Upcast, defs::Definition, helpers::pick_best_token, RootDatabase};
+use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, T};
+
+use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav};
+
+// Feature: Go to Type Definition
+//
+// Navigates to the type of an identifier.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Go to Type Definition*
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113020657-b560f500-917a-11eb-9007-0f809733a338.gif[]
+pub(crate) fn goto_type_definition(
+ db: &RootDatabase,
+ position: FilePosition,
+) -> Option<RangeInfo<Vec<NavigationTarget>>> {
+ let sema = hir::Semantics::new(db);
+
+ let file: ast::SourceFile = sema.parse(position.file_id);
+ let token: SyntaxToken =
+ pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind {
+ IDENT | INT_NUMBER | T![self] => 2,
+ kind if kind.is_trivia() => 0,
+ _ => 1,
+ })?;
+
+ let mut res = Vec::new();
+ let mut push = |def: Definition| {
+ if let Some(nav) = def.try_to_nav(db) {
+ if !res.contains(&nav) {
+ res.push(nav);
+ }
+ }
+ };
+ let range = token.text_range();
+ sema.descend_into_macros(token)
+ .iter()
+ .filter_map(|token| {
+ let ty = sema.token_ancestors_with_macros(token.clone()).find_map(|node| {
+ let ty = match_ast! {
+ match node {
+ ast::Expr(it) => sema.type_of_expr(&it)?.original,
+ ast::Pat(it) => sema.type_of_pat(&it)?.original,
+ ast::SelfParam(it) => sema.type_of_self(&it)?,
+ ast::Type(it) => sema.resolve_type(&it)?,
+ ast::RecordField(it) => sema.to_def(&it).map(|d| d.ty(db.upcast()))?,
+ // can't match on RecordExprField directly as `ast::Expr` will match an iteration too early otherwise
+ ast::NameRef(it) => {
+ if let Some(record_field) = ast::RecordExprField::for_name_ref(&it) {
+ let (_, _, ty) = sema.resolve_record_field(&record_field)?;
+ ty
+ } else {
+ let record_field = ast::RecordPatField::for_field_name_ref(&it)?;
+ sema.resolve_record_pat_field(&record_field)?.ty(db)
+ }
+ },
+ _ => return None,
+ }
+ };
+
+ Some(ty)
+ });
+ ty
+ })
+ .for_each(|ty| {
+ // collect from each `ty` into the `res` result vec
+ let ty = ty.strip_references();
+ ty.walk(db, |t| {
+ if let Some(adt) = t.as_adt() {
+ push(adt.into());
+ } else if let Some(trait_) = t.as_dyn_trait() {
+ push(trait_.into());
+ } else if let Some(traits) = t.as_impl_traits(db) {
+ traits.for_each(|it| push(it.into()));
+ } else if let Some(trait_) = t.as_associated_type_parent_trait(db) {
+ push(trait_.into());
+ }
+ });
+ });
+ Some(RangeInfo::new(range, res))
+}
+
+#[cfg(test)]
+mod tests {
+ use ide_db::base_db::FileRange;
+ use itertools::Itertools;
+
+ use crate::fixture;
+
+ fn check(ra_fixture: &str) {
+ let (analysis, position, expected) = fixture::annotations(ra_fixture);
+ let navs = analysis.goto_type_definition(position).unwrap().unwrap().info;
+ assert_ne!(navs.len(), 0);
+
+ let cmp = |&FileRange { file_id, range }: &_| (file_id, range.start());
+ let navs = navs
+ .into_iter()
+ .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
+ .sorted_by_key(cmp)
+ .collect::<Vec<_>>();
+ let expected = expected
+ .into_iter()
+ .map(|(FileRange { file_id, range }, _)| FileRange { file_id, range })
+ .sorted_by_key(cmp)
+ .collect::<Vec<_>>();
+ assert_eq!(expected, navs);
+ }
+
+ #[test]
+ fn goto_type_definition_works_simple() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+fn foo() {
+ let f: Foo; f$0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_type_definition_record_expr_field() {
+ check(
+ r#"
+struct Bar;
+ // ^^^
+struct Foo { foo: Bar }
+fn foo() {
+ Foo { foo$0 }
+}
+"#,
+ );
+ check(
+ r#"
+struct Bar;
+ // ^^^
+struct Foo { foo: Bar }
+fn foo() {
+ Foo { foo$0: Bar }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_type_definition_record_pat_field() {
+ check(
+ r#"
+struct Bar;
+ // ^^^
+struct Foo { foo: Bar }
+fn foo() {
+ let Foo { foo$0 };
+}
+"#,
+ );
+ check(
+ r#"
+struct Bar;
+ // ^^^
+struct Foo { foo: Bar }
+fn foo() {
+ let Foo { foo$0: bar };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_type_definition_works_simple_ref() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+fn foo() {
+ let f: &Foo; f$0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_type_definition_works_through_macro() {
+ check(
+ r#"
+macro_rules! id { ($($tt:tt)*) => { $($tt)* } }
+struct Foo {}
+ //^^^
+id! {
+ fn bar() { let f$0 = Foo {}; }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_type_definition_for_param() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+fn foo($0f: Foo) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_type_definition_for_tuple_field() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+struct Bar(Foo);
+fn foo() {
+ let bar = Bar(Foo);
+ bar.$00;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_self_param() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+impl Foo {
+ fn f(&self$0) {}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_type_fallback() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+impl Foo$0 {}
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_struct_field() {
+ check(
+ r#"
+struct Bar;
+ //^^^
+
+struct Foo {
+ bar$0: Bar,
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_enum_struct_field() {
+ check(
+ r#"
+struct Bar;
+ //^^^
+
+enum Foo {
+ Bar {
+ bar$0: Bar
+ },
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_considers_generics() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+struct Bar<T, U>(T, U);
+ //^^^
+struct Baz<T>(T);
+ //^^^
+
+fn foo(x$0: Bar<Baz<Foo>, Baz<usize>) {}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
new file mode 100644
index 000000000..f2d7029ea
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
@@ -0,0 +1,1377 @@
+use hir::Semantics;
+use ide_db::{
+ base_db::{FileId, FilePosition},
+ defs::{Definition, IdentClass},
+ helpers::pick_best_token,
+ search::{FileReference, ReferenceCategory, SearchScope},
+ syntax_helpers::node_ext::{for_each_break_and_continue_expr, for_each_tail_expr, walk_expr},
+ FxHashSet, RootDatabase,
+};
+use syntax::{
+ ast::{self, HasLoopBody},
+ match_ast, AstNode,
+ SyntaxKind::{self, IDENT, INT_NUMBER},
+ SyntaxNode, SyntaxToken, TextRange, T,
+};
+
+use crate::{references, NavigationTarget, TryToNav};
+
+#[derive(PartialEq, Eq, Hash)]
+pub struct HighlightedRange {
+ pub range: TextRange,
+ // FIXME: This needs to be more precise. Reference category makes sense only
+ // for references, but we also have defs. And things like exit points are
+ // neither.
+ pub category: Option<ReferenceCategory>,
+}
+
+#[derive(Default, Clone)]
+pub struct HighlightRelatedConfig {
+ pub references: bool,
+ pub exit_points: bool,
+ pub break_points: bool,
+ pub yield_points: bool,
+}
+
+// Feature: Highlight Related
+//
+// Highlights constructs related to the thing under the cursor:
+//
+// . if on an identifier, highlights all references to that identifier in the current file
+// . if on an `async` or `await token, highlights all yield points for that async context
+// . if on a `return` or `fn` keyword, `?` character or `->` return type arrow, highlights all exit points for that context
+// . if on a `break`, `loop`, `while` or `for` token, highlights all break points for that loop or block context
+//
+// Note: `?` and `->` do not currently trigger this behavior in the VSCode editor.
+pub(crate) fn highlight_related(
+ sema: &Semantics<'_, RootDatabase>,
+ config: HighlightRelatedConfig,
+ FilePosition { offset, file_id }: FilePosition,
+) -> Option<Vec<HighlightedRange>> {
+ let _p = profile::span("highlight_related");
+ let syntax = sema.parse(file_id).syntax().clone();
+
+ let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind {
+ T![?] => 4, // prefer `?` when the cursor is sandwiched like in `await$0?`
+ T![->] => 3,
+ kind if kind.is_keyword() => 2,
+ IDENT | INT_NUMBER => 1,
+ _ => 0,
+ })?;
+ match token.kind() {
+ T![?] if config.exit_points && token.parent().and_then(ast::TryExpr::cast).is_some() => {
+ highlight_exit_points(sema, token)
+ }
+ T![fn] | T![return] | T![->] if config.exit_points => highlight_exit_points(sema, token),
+ T![await] | T![async] if config.yield_points => highlight_yield_points(token),
+ T![for] if config.break_points && token.parent().and_then(ast::ForExpr::cast).is_some() => {
+ highlight_break_points(token)
+ }
+ T![break] | T![loop] | T![while] | T![continue] if config.break_points => {
+ highlight_break_points(token)
+ }
+ _ if config.references => highlight_references(sema, &syntax, token, file_id),
+ _ => None,
+ }
+}
+
+fn highlight_references(
+ sema: &Semantics<'_, RootDatabase>,
+ node: &SyntaxNode,
+ token: SyntaxToken,
+ file_id: FileId,
+) -> Option<Vec<HighlightedRange>> {
+ let defs = find_defs(sema, token);
+ let usages = defs
+ .iter()
+ .filter_map(|&d| {
+ d.usages(sema)
+ .set_scope(Some(SearchScope::single_file(file_id)))
+ .include_self_refs()
+ .all()
+ .references
+ .remove(&file_id)
+ })
+ .flatten()
+ .map(|FileReference { category: access, range, .. }| HighlightedRange {
+ range,
+ category: access,
+ });
+ let mut res = FxHashSet::default();
+
+ let mut def_to_hl_range = |def| {
+ let hl_range = match def {
+ Definition::Module(module) => {
+ Some(NavigationTarget::from_module_to_decl(sema.db, module))
+ }
+ def => def.try_to_nav(sema.db),
+ }
+ .filter(|decl| decl.file_id == file_id)
+ .and_then(|decl| decl.focus_range)
+ .map(|range| {
+ let category =
+ references::decl_mutability(&def, node, range).then(|| ReferenceCategory::Write);
+ HighlightedRange { range, category }
+ });
+ if let Some(hl_range) = hl_range {
+ res.insert(hl_range);
+ }
+ };
+ for &def in &defs {
+ match def {
+ Definition::Local(local) => local
+ .associated_locals(sema.db)
+ .iter()
+ .for_each(|&local| def_to_hl_range(Definition::Local(local))),
+ def => def_to_hl_range(def),
+ }
+ }
+
+ res.extend(usages);
+ if res.is_empty() {
+ None
+ } else {
+ Some(res.into_iter().collect())
+ }
+}
+
+fn highlight_exit_points(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+) -> Option<Vec<HighlightedRange>> {
+ fn hl(
+ sema: &Semantics<'_, RootDatabase>,
+ body: Option<ast::Expr>,
+ ) -> Option<Vec<HighlightedRange>> {
+ let mut highlights = Vec::new();
+ let body = body?;
+ walk_expr(&body, &mut |expr| match expr {
+ ast::Expr::ReturnExpr(expr) => {
+ if let Some(token) = expr.return_token() {
+ highlights.push(HighlightedRange { category: None, range: token.text_range() });
+ }
+ }
+ ast::Expr::TryExpr(try_) => {
+ if let Some(token) = try_.question_mark_token() {
+ highlights.push(HighlightedRange { category: None, range: token.text_range() });
+ }
+ }
+ ast::Expr::MethodCallExpr(_) | ast::Expr::CallExpr(_) | ast::Expr::MacroExpr(_) => {
+ if sema.type_of_expr(&expr).map_or(false, |ty| ty.original.is_never()) {
+ highlights.push(HighlightedRange {
+ category: None,
+ range: expr.syntax().text_range(),
+ });
+ }
+ }
+ _ => (),
+ });
+ let tail = match body {
+ ast::Expr::BlockExpr(b) => b.tail_expr(),
+ e => Some(e),
+ };
+
+ if let Some(tail) = tail {
+ for_each_tail_expr(&tail, &mut |tail| {
+ let range = match tail {
+ ast::Expr::BreakExpr(b) => b
+ .break_token()
+ .map_or_else(|| tail.syntax().text_range(), |tok| tok.text_range()),
+ _ => tail.syntax().text_range(),
+ };
+ highlights.push(HighlightedRange { category: None, range })
+ });
+ }
+ Some(highlights)
+ }
+ for anc in token.parent_ancestors() {
+ return match_ast! {
+ match anc {
+ ast::Fn(fn_) => hl(sema, fn_.body().map(ast::Expr::BlockExpr)),
+ ast::ClosureExpr(closure) => hl(sema, closure.body()),
+ ast::BlockExpr(block_expr) => if matches!(block_expr.modifier(), Some(ast::BlockModifier::Async(_) | ast::BlockModifier::Try(_)| ast::BlockModifier::Const(_))) {
+ hl(sema, Some(block_expr.into()))
+ } else {
+ continue;
+ },
+ _ => continue,
+ }
+ };
+ }
+ None
+}
+
+fn highlight_break_points(token: SyntaxToken) -> Option<Vec<HighlightedRange>> {
+ fn hl(
+ cursor_token_kind: SyntaxKind,
+ token: Option<SyntaxToken>,
+ label: Option<ast::Label>,
+ body: Option<ast::StmtList>,
+ ) -> Option<Vec<HighlightedRange>> {
+ let mut highlights = Vec::new();
+ let range = cover_range(
+ token.map(|tok| tok.text_range()),
+ label.as_ref().map(|it| it.syntax().text_range()),
+ );
+ highlights.extend(range.map(|range| HighlightedRange { category: None, range }));
+ for_each_break_and_continue_expr(label, body, &mut |expr| {
+ let range: Option<TextRange> = match (cursor_token_kind, expr) {
+ (T![for] | T![while] | T![loop] | T![break], ast::Expr::BreakExpr(break_)) => {
+ cover_range(
+ break_.break_token().map(|it| it.text_range()),
+ break_.lifetime().map(|it| it.syntax().text_range()),
+ )
+ }
+ (
+ T![for] | T![while] | T![loop] | T![continue],
+ ast::Expr::ContinueExpr(continue_),
+ ) => cover_range(
+ continue_.continue_token().map(|it| it.text_range()),
+ continue_.lifetime().map(|it| it.syntax().text_range()),
+ ),
+ _ => None,
+ };
+ highlights.extend(range.map(|range| HighlightedRange { category: None, range }));
+ });
+ Some(highlights)
+ }
+ let parent = token.parent()?;
+ let lbl = match_ast! {
+ match parent {
+ ast::BreakExpr(b) => b.lifetime(),
+ ast::ContinueExpr(c) => c.lifetime(),
+ ast::LoopExpr(l) => l.label().and_then(|it| it.lifetime()),
+ ast::ForExpr(f) => f.label().and_then(|it| it.lifetime()),
+ ast::WhileExpr(w) => w.label().and_then(|it| it.lifetime()),
+ ast::BlockExpr(b) => Some(b.label().and_then(|it| it.lifetime())?),
+ _ => return None,
+ }
+ };
+ let lbl = lbl.as_ref();
+ let label_matches = |def_lbl: Option<ast::Label>| match lbl {
+ Some(lbl) => {
+ Some(lbl.text()) == def_lbl.and_then(|it| it.lifetime()).as_ref().map(|it| it.text())
+ }
+ None => true,
+ };
+ let token_kind = token.kind();
+ for anc in token.parent_ancestors().flat_map(ast::Expr::cast) {
+ return match anc {
+ ast::Expr::LoopExpr(l) if label_matches(l.label()) => hl(
+ token_kind,
+ l.loop_token(),
+ l.label(),
+ l.loop_body().and_then(|it| it.stmt_list()),
+ ),
+ ast::Expr::ForExpr(f) if label_matches(f.label()) => hl(
+ token_kind,
+ f.for_token(),
+ f.label(),
+ f.loop_body().and_then(|it| it.stmt_list()),
+ ),
+ ast::Expr::WhileExpr(w) if label_matches(w.label()) => hl(
+ token_kind,
+ w.while_token(),
+ w.label(),
+ w.loop_body().and_then(|it| it.stmt_list()),
+ ),
+ ast::Expr::BlockExpr(e) if e.label().is_some() && label_matches(e.label()) => {
+ hl(token_kind, None, e.label(), e.stmt_list())
+ }
+ _ => continue,
+ };
+ }
+ None
+}
+
+fn highlight_yield_points(token: SyntaxToken) -> Option<Vec<HighlightedRange>> {
+ fn hl(
+ async_token: Option<SyntaxToken>,
+ body: Option<ast::Expr>,
+ ) -> Option<Vec<HighlightedRange>> {
+ let mut highlights =
+ vec![HighlightedRange { category: None, range: async_token?.text_range() }];
+ if let Some(body) = body {
+ walk_expr(&body, &mut |expr| {
+ if let ast::Expr::AwaitExpr(expr) = expr {
+ if let Some(token) = expr.await_token() {
+ highlights
+ .push(HighlightedRange { category: None, range: token.text_range() });
+ }
+ }
+ });
+ }
+ Some(highlights)
+ }
+ for anc in token.parent_ancestors() {
+ return match_ast! {
+ match anc {
+ ast::Fn(fn_) => hl(fn_.async_token(), fn_.body().map(ast::Expr::BlockExpr)),
+ ast::BlockExpr(block_expr) => {
+ if block_expr.async_token().is_none() {
+ continue;
+ }
+ hl(block_expr.async_token(), Some(block_expr.into()))
+ },
+ ast::ClosureExpr(closure) => hl(closure.async_token(), closure.body()),
+ _ => continue,
+ }
+ };
+ }
+ None
+}
+
+fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange> {
+ match (r0, r1) {
+ (Some(r0), Some(r1)) => Some(r0.cover(r1)),
+ (Some(range), None) => Some(range),
+ (None, Some(range)) => Some(range),
+ (None, None) => None,
+ }
+}
+
+fn find_defs(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> FxHashSet<Definition> {
+ sema.descend_into_macros(token)
+ .into_iter()
+ .filter_map(|token| IdentClass::classify_token(sema, &token).map(IdentClass::definitions))
+ .flatten()
+ .collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::fixture;
+
+ use super::*;
+
+ #[track_caller]
+ fn check(ra_fixture: &str) {
+ let config = HighlightRelatedConfig {
+ break_points: true,
+ exit_points: true,
+ references: true,
+ yield_points: true,
+ };
+
+ check_with_config(ra_fixture, config);
+ }
+
+ #[track_caller]
+ fn check_with_config(ra_fixture: &str, config: HighlightRelatedConfig) {
+ let (analysis, pos, annotations) = fixture::annotations(ra_fixture);
+
+ let hls = analysis.highlight_related(config, pos).unwrap().unwrap_or_default();
+
+ let mut expected = annotations
+ .into_iter()
+ .map(|(r, access)| (r.range, (!access.is_empty()).then(|| access)))
+ .collect::<Vec<_>>();
+
+ let mut actual = hls
+ .into_iter()
+ .map(|hl| {
+ (
+ hl.range,
+ hl.category.map(|it| {
+ match it {
+ ReferenceCategory::Read => "read",
+ ReferenceCategory::Write => "write",
+ }
+ .to_string()
+ }),
+ )
+ })
+ .collect::<Vec<_>>();
+ actual.sort_by_key(|(range, _)| range.start());
+ expected.sort_by_key(|(range, _)| range.start());
+
+ assert_eq!(expected, actual);
+ }
+
+ #[test]
+ fn test_hl_tuple_fields() {
+ check(
+ r#"
+struct Tuple(u32, u32);
+
+fn foo(t: Tuple) {
+ t.0$0;
+ // ^ read
+ t.0;
+ // ^ read
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_module() {
+ check(
+ r#"
+//- /lib.rs
+mod foo$0;
+ // ^^^
+//- /foo.rs
+struct Foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_self_in_crate_root() {
+ check(
+ r#"
+use crate$0;
+ //^^^^^
+use self;
+ //^^^^
+mod __ {
+ use super;
+ //^^^^^
+}
+"#,
+ );
+ check(
+ r#"
+//- /main.rs crate:main deps:lib
+use lib$0;
+ //^^^
+//- /lib.rs crate:lib
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_self_in_module() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+//- /foo.rs
+use self$0;
+ // ^^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_local() {
+ check(
+ r#"
+fn foo() {
+ let mut bar = 3;
+ // ^^^ write
+ bar$0;
+ // ^^^ read
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_local_in_attr() {
+ check(
+ r#"
+//- proc_macros: identity
+#[proc_macros::identity]
+fn foo() {
+ let mut bar = 3;
+ // ^^^ write
+ bar$0;
+ // ^^^ read
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_multi_macro_usage() {
+ check(
+ r#"
+macro_rules! foo {
+ ($ident:ident) => {
+ fn $ident() -> $ident { loop {} }
+ struct $ident;
+ }
+}
+
+foo!(bar$0);
+ // ^^^
+fn foo() {
+ let bar: bar = bar();
+ // ^^^
+ // ^^^
+}
+"#,
+ );
+ check(
+ r#"
+macro_rules! foo {
+ ($ident:ident) => {
+ fn $ident() -> $ident { loop {} }
+ struct $ident;
+ }
+}
+
+foo!(bar);
+ // ^^^
+fn foo() {
+ let bar: bar$0 = bar();
+ // ^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_yield_points() {
+ check(
+ r#"
+pub async fn foo() {
+ // ^^^^^
+ let x = foo()
+ .await$0
+ // ^^^^^
+ .await;
+ // ^^^^^
+ || { 0.await };
+ (async { 0.await }).await
+ // ^^^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_yield_points2() {
+ check(
+ r#"
+pub async$0 fn foo() {
+ // ^^^^^
+ let x = foo()
+ .await
+ // ^^^^^
+ .await;
+ // ^^^^^
+ || { 0.await };
+ (async { 0.await }).await
+ // ^^^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_yield_nested_fn() {
+ check(
+ r#"
+async fn foo() {
+ async fn foo2() {
+ // ^^^^^
+ async fn foo3() {
+ 0.await
+ }
+ 0.await$0
+ // ^^^^^
+ }
+ 0.await
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_yield_nested_async_blocks() {
+ check(
+ r#"
+async fn foo() {
+ (async {
+ // ^^^^^
+ (async {
+ 0.await
+ }).await$0 }
+ // ^^^^^
+ ).await;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_exit_points() {
+ check(
+ r#"
+fn foo() -> u32 {
+ if true {
+ return$0 0;
+ // ^^^^^^
+ }
+
+ 0?;
+ // ^
+ 0xDEAD_BEEF
+ // ^^^^^^^^^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_exit_points2() {
+ check(
+ r#"
+fn foo() ->$0 u32 {
+ if true {
+ return 0;
+ // ^^^^^^
+ }
+
+ 0?;
+ // ^
+ 0xDEAD_BEEF
+ // ^^^^^^^^^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_exit_points3() {
+ check(
+ r#"
+fn$0 foo() -> u32 {
+ if true {
+ return 0;
+ // ^^^^^^
+ }
+
+ 0?;
+ // ^
+ 0xDEAD_BEEF
+ // ^^^^^^^^^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_prefer_ref_over_tail_exit() {
+ check(
+ r#"
+fn foo() -> u32 {
+// ^^^
+ if true {
+ return 0;
+ }
+
+ 0?;
+
+ foo$0()
+ // ^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_never_call_is_exit_point() {
+ check(
+ r#"
+struct Never;
+impl Never {
+ fn never(self) -> ! { loop {} }
+}
+macro_rules! never {
+ () => { never() }
+}
+fn never() -> ! { loop {} }
+fn foo() ->$0 u32 {
+ never();
+ // ^^^^^^^
+ never!();
+ // ^^^^^^^^
+
+ Never.never();
+ // ^^^^^^^^^^^^^
+
+ 0
+ // ^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_inner_tail_exit_points() {
+ check(
+ r#"
+fn foo() ->$0 u32 {
+ if true {
+ unsafe {
+ return 5;
+ // ^^^^^^
+ 5
+ // ^
+ }
+ } else if false {
+ 0
+ // ^
+ } else {
+ match 5 {
+ 6 => 100,
+ // ^^^
+ 7 => loop {
+ break 5;
+ // ^^^^^
+ }
+ 8 => 'a: loop {
+ 'b: loop {
+ break 'a 5;
+ // ^^^^^
+ break 'b 5;
+ break 5;
+ };
+ }
+ //
+ _ => 500,
+ // ^^^
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_inner_tail_exit_points_labeled_block() {
+ check(
+ r#"
+fn foo() ->$0 u32 {
+ 'foo: {
+ break 'foo 0;
+ // ^^^^^
+ loop {
+ break;
+ break 'foo 0;
+ // ^^^^^
+ }
+ 0
+ // ^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_loop() {
+ check(
+ r#"
+fn foo() {
+ 'outer: loop {
+ // ^^^^^^^^^^^^
+ break;
+ // ^^^^^
+ 'inner: loop {
+ break;
+ 'innermost: loop {
+ break 'outer;
+ // ^^^^^^^^^^^^
+ break 'inner;
+ }
+ break$0 'outer;
+ // ^^^^^^^^^^^^
+ break;
+ }
+ break;
+ // ^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_loop2() {
+ check(
+ r#"
+fn foo() {
+ 'outer: loop {
+ break;
+ 'inner: loop {
+ // ^^^^^^^^^^^^
+ break;
+ // ^^^^^
+ 'innermost: loop {
+ break 'outer;
+ break 'inner;
+ // ^^^^^^^^^^^^
+ }
+ break 'outer;
+ break$0;
+ // ^^^^^
+ }
+ break;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_for() {
+ check(
+ r#"
+fn foo() {
+ 'outer: for _ in () {
+ // ^^^^^^^^^^^
+ break;
+ // ^^^^^
+ 'inner: for _ in () {
+ break;
+ 'innermost: for _ in () {
+ break 'outer;
+ // ^^^^^^^^^^^^
+ break 'inner;
+ }
+ break$0 'outer;
+ // ^^^^^^^^^^^^
+ break;
+ }
+ break;
+ // ^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_for_but_not_continue() {
+ check(
+ r#"
+fn foo() {
+ 'outer: for _ in () {
+ // ^^^^^^^^^^^
+ break;
+ // ^^^^^
+ continue;
+ 'inner: for _ in () {
+ break;
+ continue;
+ 'innermost: for _ in () {
+ continue 'outer;
+ break 'outer;
+ // ^^^^^^^^^^^^
+ continue 'inner;
+ break 'inner;
+ }
+ break$0 'outer;
+ // ^^^^^^^^^^^^
+ continue 'outer;
+ break;
+ continue;
+ }
+ break;
+ // ^^^^^
+ continue;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_continue_for_but_not_break() {
+ check(
+ r#"
+fn foo() {
+ 'outer: for _ in () {
+ // ^^^^^^^^^^^
+ break;
+ continue;
+ // ^^^^^^^^
+ 'inner: for _ in () {
+ break;
+ continue;
+ 'innermost: for _ in () {
+ continue 'outer;
+ // ^^^^^^^^^^^^^^^
+ break 'outer;
+ continue 'inner;
+ break 'inner;
+ }
+ break 'outer;
+ continue$0 'outer;
+ // ^^^^^^^^^^^^^^^
+ break;
+ continue;
+ }
+ break;
+ continue;
+ // ^^^^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_and_continue() {
+ check(
+ r#"
+fn foo() {
+ 'outer: fo$0r _ in () {
+ // ^^^^^^^^^^^
+ break;
+ // ^^^^^
+ continue;
+ // ^^^^^^^^
+ 'inner: for _ in () {
+ break;
+ continue;
+ 'innermost: for _ in () {
+ continue 'outer;
+ // ^^^^^^^^^^^^^^^
+ break 'outer;
+ // ^^^^^^^^^^^^
+ continue 'inner;
+ break 'inner;
+ }
+ break 'outer;
+ // ^^^^^^^^^^^^
+ continue 'outer;
+ // ^^^^^^^^^^^^^^^
+ break;
+ continue;
+ }
+ break;
+ // ^^^^^
+ continue;
+ // ^^^^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_while() {
+ check(
+ r#"
+fn foo() {
+ 'outer: while true {
+ // ^^^^^^^^^^^^^
+ break;
+ // ^^^^^
+ 'inner: while true {
+ break;
+ 'innermost: while true {
+ break 'outer;
+ // ^^^^^^^^^^^^
+ break 'inner;
+ }
+ break$0 'outer;
+ // ^^^^^^^^^^^^
+ break;
+ }
+ break;
+ // ^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_labeled_block() {
+ check(
+ r#"
+fn foo() {
+ 'outer: {
+ // ^^^^^^^
+ break;
+ // ^^^^^
+ 'inner: {
+ break;
+ 'innermost: {
+ break 'outer;
+ // ^^^^^^^^^^^^
+ break 'inner;
+ }
+ break$0 'outer;
+ // ^^^^^^^^^^^^
+ break;
+ }
+ break;
+ // ^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_unlabeled_loop() {
+ check(
+ r#"
+fn foo() {
+ loop {
+ // ^^^^
+ break$0;
+ // ^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_unlabeled_block_in_loop() {
+ check(
+ r#"
+fn foo() {
+ loop {
+ // ^^^^
+ {
+ break$0;
+ // ^^^^^
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_field_shorthand() {
+ check(
+ r#"
+struct Struct { field: u32 }
+ //^^^^^
+fn function(field: u32) {
+ //^^^^^
+ Struct { field$0 }
+ //^^^^^ read
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_ref_local() {
+ let config = HighlightRelatedConfig {
+ references: false,
+ break_points: true,
+ exit_points: true,
+ yield_points: true,
+ };
+
+ check_with_config(
+ r#"
+fn foo() {
+ let x$0 = 5;
+ let y = x * 2;
+}
+"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_ref_local_preserved_break() {
+ let config = HighlightRelatedConfig {
+ references: false,
+ break_points: true,
+ exit_points: true,
+ yield_points: true,
+ };
+
+ check_with_config(
+ r#"
+fn foo() {
+ let x$0 = 5;
+ let y = x * 2;
+
+ loop {
+ break;
+ }
+}
+"#,
+ config.clone(),
+ );
+
+ check_with_config(
+ r#"
+fn foo() {
+ let x = 5;
+ let y = x * 2;
+
+ loop$0 {
+// ^^^^
+ break;
+// ^^^^^
+ }
+}
+"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_ref_local_preserved_yield() {
+ let config = HighlightRelatedConfig {
+ references: false,
+ break_points: true,
+ exit_points: true,
+ yield_points: true,
+ };
+
+ check_with_config(
+ r#"
+async fn foo() {
+ let x$0 = 5;
+ let y = x * 2;
+
+ 0.await;
+}
+"#,
+ config.clone(),
+ );
+
+ check_with_config(
+ r#"
+ async fn foo() {
+// ^^^^^
+ let x = 5;
+ let y = x * 2;
+
+ 0.await$0;
+// ^^^^^
+}
+"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_ref_local_preserved_exit() {
+ let config = HighlightRelatedConfig {
+ references: false,
+ break_points: true,
+ exit_points: true,
+ yield_points: true,
+ };
+
+ check_with_config(
+ r#"
+fn foo() -> i32 {
+ let x$0 = 5;
+ let y = x * 2;
+
+ if true {
+ return y;
+ }
+
+ 0?
+}
+"#,
+ config.clone(),
+ );
+
+ check_with_config(
+ r#"
+fn foo() ->$0 i32 {
+ let x = 5;
+ let y = x * 2;
+
+ if true {
+ return y;
+// ^^^^^^
+ }
+
+ 0?
+// ^
+"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_break() {
+ let config = HighlightRelatedConfig {
+ references: true,
+ break_points: false,
+ exit_points: true,
+ yield_points: true,
+ };
+
+ check_with_config(
+ r#"
+fn foo() {
+ loop {
+ break$0;
+ }
+}
+"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_yield() {
+ let config = HighlightRelatedConfig {
+ references: true,
+ break_points: true,
+ exit_points: true,
+ yield_points: false,
+ };
+
+ check_with_config(
+ r#"
+async$0 fn foo() {
+ 0.await;
+}
+"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_exit() {
+ let config = HighlightRelatedConfig {
+ references: true,
+ break_points: true,
+ exit_points: false,
+ yield_points: true,
+ };
+
+ check_with_config(
+ r#"
+fn foo() ->$0 i32 {
+ if true {
+ return -1;
+ }
+
+ 42
+}"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_multi_local() {
+ check(
+ r#"
+fn foo((
+ foo$0
+ //^^^
+ | foo
+ //^^^
+ | foo
+ //^^^
+): ()) {
+ foo;
+ //^^^read
+ let foo;
+}
+"#,
+ );
+ check(
+ r#"
+fn foo((
+ foo
+ //^^^
+ | foo$0
+ //^^^
+ | foo
+ //^^^
+): ()) {
+ foo;
+ //^^^read
+ let foo;
+}
+"#,
+ );
+ check(
+ r#"
+fn foo((
+ foo
+ //^^^
+ | foo
+ //^^^
+ | foo
+ //^^^
+): ()) {
+ foo$0;
+ //^^^read
+ let foo;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_trait_impl_methods() {
+ check(
+ r#"
+trait Trait {
+ fn func$0(self) {}
+ //^^^^
+}
+
+impl Trait for () {
+ fn func(self) {}
+ //^^^^
+}
+
+fn main() {
+ <()>::func(());
+ //^^^^
+ ().func();
+ //^^^^
+}
+"#,
+ );
+ check(
+ r#"
+trait Trait {
+ fn func(self) {}
+ //^^^^
+}
+
+impl Trait for () {
+ fn func$0(self) {}
+ //^^^^
+}
+
+fn main() {
+ <()>::func(());
+ //^^^^
+ ().func();
+ //^^^^
+}
+"#,
+ );
+ check(
+ r#"
+trait Trait {
+ fn func(self) {}
+ //^^^^
+}
+
+impl Trait for () {
+ fn func(self) {}
+ //^^^^
+}
+
+fn main() {
+ <()>::func(());
+ //^^^^
+ ().func$0();
+ //^^^^
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs
new file mode 100644
index 000000000..59c97f2dc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs
@@ -0,0 +1,390 @@
+mod render;
+
+#[cfg(test)]
+mod tests;
+
+use std::iter;
+
+use either::Either;
+use hir::{HasSource, Semantics};
+use ide_db::{
+ base_db::FileRange,
+ defs::{Definition, IdentClass},
+ famous_defs::FamousDefs,
+ helpers::pick_best_token,
+ FxIndexSet, RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxNode, SyntaxToken, T};
+
+use crate::{
+ doc_links::token_as_doc_comment,
+ markup::Markup,
+ runnables::{runnable_fn, runnable_mod},
+ FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, TryToNav,
+};
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct HoverConfig {
+ pub links_in_hover: bool,
+ pub documentation: Option<HoverDocFormat>,
+}
+
+impl HoverConfig {
+ fn markdown(&self) -> bool {
+ matches!(self.documentation, Some(HoverDocFormat::Markdown))
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum HoverDocFormat {
+ Markdown,
+ PlainText,
+}
+
+#[derive(Debug, Clone)]
+pub enum HoverAction {
+ Runnable(Runnable),
+ Implementation(FilePosition),
+ Reference(FilePosition),
+ GoToType(Vec<HoverGotoTypeData>),
+}
+
+impl HoverAction {
+ fn goto_type_from_targets(db: &RootDatabase, targets: Vec<hir::ModuleDef>) -> Self {
+ let targets = targets
+ .into_iter()
+ .filter_map(|it| {
+ Some(HoverGotoTypeData {
+ mod_path: render::path(
+ db,
+ it.module(db)?,
+ it.name(db).map(|name| name.to_string()),
+ ),
+ nav: it.try_to_nav(db)?,
+ })
+ })
+ .collect();
+ HoverAction::GoToType(targets)
+ }
+}
+
+#[derive(Debug, Clone, Eq, PartialEq, Hash)]
+pub struct HoverGotoTypeData {
+ pub mod_path: String,
+ pub nav: NavigationTarget,
+}
+
+/// Contains the results when hovering over an item
+#[derive(Debug, Default)]
+pub struct HoverResult {
+ pub markup: Markup,
+ pub actions: Vec<HoverAction>,
+}
+
+// Feature: Hover
+//
+// Shows additional information, like the type of an expression or the documentation for a definition when "focusing" code.
+// Focusing is usually hovering with a mouse, but can also be triggered with a shortcut.
+//
+// image::https://user-images.githubusercontent.com/48062697/113020658-b5f98b80-917a-11eb-9f88-3dbc27320c95.gif[]
+pub(crate) fn hover(
+ db: &RootDatabase,
+ FileRange { file_id, range }: FileRange,
+ config: &HoverConfig,
+) -> Option<RangeInfo<HoverResult>> {
+ let sema = &hir::Semantics::new(db);
+ let file = sema.parse(file_id).syntax().clone();
+
+ if !range.is_empty() {
+ return hover_ranged(&file, range, sema, config);
+ }
+ let offset = range.start();
+
+ let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
+ IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self] => 3,
+ T!['('] | T![')'] => 2,
+ kind if kind.is_trivia() => 0,
+ _ => 1,
+ })?;
+
+ if let Some(doc_comment) = token_as_doc_comment(&original_token) {
+ cov_mark::hit!(no_highlight_on_comment_hover);
+ return doc_comment.get_definition_with_descend_at(sema, offset, |def, node, range| {
+ let res = hover_for_definition(sema, file_id, def, &node, config)?;
+ Some(RangeInfo::new(range, res))
+ });
+ }
+
+ let in_attr = matches!(original_token.parent().and_then(ast::TokenTree::cast), Some(tt) if tt.syntax().ancestors().any(|it| ast::Meta::can_cast(it.kind())));
+ let descended = if in_attr {
+ [sema.descend_into_macros_with_kind_preference(original_token.clone())].into()
+ } else {
+ sema.descend_into_macros_with_same_text(original_token.clone())
+ };
+
+ // FIXME: Definition should include known lints and the like instead of having this special case here
+ let hovered_lint = descended.iter().find_map(|token| {
+ let attr = token.parent_ancestors().find_map(ast::Attr::cast)?;
+ render::try_for_lint(&attr, token)
+ });
+ if let Some(res) = hovered_lint {
+ return Some(RangeInfo::new(original_token.text_range(), res));
+ }
+
+ let result = descended
+ .iter()
+ .filter_map(|token| {
+ let node = token.parent()?;
+ let class = IdentClass::classify_token(sema, token)?;
+ Some(class.definitions().into_iter().zip(iter::once(node).cycle()))
+ })
+ .flatten()
+ .unique_by(|&(def, _)| def)
+ .filter_map(|(def, node)| hover_for_definition(sema, file_id, def, &node, config))
+ .reduce(|mut acc: HoverResult, HoverResult { markup, actions }| {
+ acc.actions.extend(actions);
+ acc.markup = Markup::from(format!("{}\n---\n{}", acc.markup, markup));
+ acc
+ });
+
+ if result.is_none() {
+ // fallbacks, show keywords or types
+
+ let res = descended.iter().find_map(|token| render::keyword(sema, config, token));
+ if let Some(res) = res {
+ return Some(RangeInfo::new(original_token.text_range(), res));
+ }
+ let res = descended
+ .iter()
+ .find_map(|token| hover_type_fallback(sema, config, token, &original_token));
+ if let Some(_) = res {
+ return res;
+ }
+ }
+ result.map(|mut res: HoverResult| {
+ res.actions = dedupe_or_merge_hover_actions(res.actions);
+ RangeInfo::new(original_token.text_range(), res)
+ })
+}
+
+pub(crate) fn hover_for_definition(
+ sema: &Semantics<'_, RootDatabase>,
+ file_id: FileId,
+ definition: Definition,
+ node: &SyntaxNode,
+ config: &HoverConfig,
+) -> Option<HoverResult> {
+ let famous_defs = match &definition {
+ Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(node)?.krate())),
+ _ => None,
+ };
+ render::definition(sema.db, definition, famous_defs.as_ref(), config).map(|markup| {
+ HoverResult {
+ markup: render::process_markup(sema.db, definition, &markup, config),
+ actions: show_implementations_action(sema.db, definition)
+ .into_iter()
+ .chain(show_fn_references_action(sema.db, definition))
+ .chain(runnable_action(sema, definition, file_id))
+ .chain(goto_type_action_for_def(sema.db, definition))
+ .collect(),
+ }
+ })
+}
+
+fn hover_ranged(
+ file: &SyntaxNode,
+ range: syntax::TextRange,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &HoverConfig,
+) -> Option<RangeInfo<HoverResult>> {
+ // FIXME: make this work in attributes
+ let expr_or_pat = file.covering_element(range).ancestors().find_map(|it| {
+ match_ast! {
+ match it {
+ ast::Expr(expr) => Some(Either::Left(expr)),
+ ast::Pat(pat) => Some(Either::Right(pat)),
+ _ => None,
+ }
+ }
+ })?;
+ let res = match &expr_or_pat {
+ Either::Left(ast::Expr::TryExpr(try_expr)) => render::try_expr(sema, config, try_expr),
+ Either::Left(ast::Expr::PrefixExpr(prefix_expr))
+ if prefix_expr.op_kind() == Some(ast::UnaryOp::Deref) =>
+ {
+ render::deref_expr(sema, config, prefix_expr)
+ }
+ _ => None,
+ };
+ let res = res.or_else(|| render::type_info(sema, config, &expr_or_pat));
+ res.map(|it| {
+ let range = match expr_or_pat {
+ Either::Left(it) => it.syntax().text_range(),
+ Either::Right(it) => it.syntax().text_range(),
+ };
+ RangeInfo::new(range, it)
+ })
+}
+
+fn hover_type_fallback(
+ sema: &Semantics<'_, RootDatabase>,
+ config: &HoverConfig,
+ token: &SyntaxToken,
+ original_token: &SyntaxToken,
+) -> Option<RangeInfo<HoverResult>> {
+ let node = token
+ .parent_ancestors()
+ .take_while(|it| !ast::Item::can_cast(it.kind()))
+ .find(|n| ast::Expr::can_cast(n.kind()) || ast::Pat::can_cast(n.kind()))?;
+
+ let expr_or_pat = match_ast! {
+ match node {
+ ast::Expr(it) => Either::Left(it),
+ ast::Pat(it) => Either::Right(it),
+ // If this node is a MACRO_CALL, it means that `descend_into_macros_many` failed to resolve.
+ // (e.g expanding a builtin macro). So we give up here.
+ ast::MacroCall(_it) => return None,
+ _ => return None,
+ }
+ };
+
+ let res = render::type_info(sema, config, &expr_or_pat)?;
+ let range = sema
+ .original_range_opt(&node)
+ .map(|frange| frange.range)
+ .unwrap_or_else(|| original_token.text_range());
+ Some(RangeInfo::new(range, res))
+}
+
+fn show_implementations_action(db: &RootDatabase, def: Definition) -> Option<HoverAction> {
+ fn to_action(nav_target: NavigationTarget) -> HoverAction {
+ HoverAction::Implementation(FilePosition {
+ file_id: nav_target.file_id,
+ offset: nav_target.focus_or_full_range().start(),
+ })
+ }
+
+ let adt = match def {
+ Definition::Trait(it) => return it.try_to_nav(db).map(to_action),
+ Definition::Adt(it) => Some(it),
+ Definition::SelfType(it) => it.self_ty(db).as_adt(),
+ _ => None,
+ }?;
+ adt.try_to_nav(db).map(to_action)
+}
+
+fn show_fn_references_action(db: &RootDatabase, def: Definition) -> Option<HoverAction> {
+ match def {
+ Definition::Function(it) => it.try_to_nav(db).map(|nav_target| {
+ HoverAction::Reference(FilePosition {
+ file_id: nav_target.file_id,
+ offset: nav_target.focus_or_full_range().start(),
+ })
+ }),
+ _ => None,
+ }
+}
+
+fn runnable_action(
+ sema: &hir::Semantics<'_, RootDatabase>,
+ def: Definition,
+ file_id: FileId,
+) -> Option<HoverAction> {
+ match def {
+ Definition::Module(it) => runnable_mod(sema, it).map(HoverAction::Runnable),
+ Definition::Function(func) => {
+ let src = func.source(sema.db)?;
+ if src.file_id != file_id.into() {
+ cov_mark::hit!(hover_macro_generated_struct_fn_doc_comment);
+ cov_mark::hit!(hover_macro_generated_struct_fn_doc_attr);
+ return None;
+ }
+
+ runnable_fn(sema, func).map(HoverAction::Runnable)
+ }
+ _ => None,
+ }
+}
+
+fn goto_type_action_for_def(db: &RootDatabase, def: Definition) -> Option<HoverAction> {
+ let mut targets: Vec<hir::ModuleDef> = Vec::new();
+ let mut push_new_def = |item: hir::ModuleDef| {
+ if !targets.contains(&item) {
+ targets.push(item);
+ }
+ };
+
+ if let Definition::GenericParam(hir::GenericParam::TypeParam(it)) = def {
+ it.trait_bounds(db).into_iter().for_each(|it| push_new_def(it.into()));
+ } else {
+ let ty = match def {
+ Definition::Local(it) => it.ty(db),
+ Definition::GenericParam(hir::GenericParam::ConstParam(it)) => it.ty(db),
+ Definition::Field(field) => field.ty(db),
+ Definition::Function(function) => function.ret_type(db),
+ _ => return None,
+ };
+
+ walk_and_push_ty(db, &ty, &mut push_new_def);
+ }
+
+ Some(HoverAction::goto_type_from_targets(db, targets))
+}
+
+fn walk_and_push_ty(
+ db: &RootDatabase,
+ ty: &hir::Type,
+ push_new_def: &mut dyn FnMut(hir::ModuleDef),
+) {
+ ty.walk(db, |t| {
+ if let Some(adt) = t.as_adt() {
+ push_new_def(adt.into());
+ } else if let Some(trait_) = t.as_dyn_trait() {
+ push_new_def(trait_.into());
+ } else if let Some(traits) = t.as_impl_traits(db) {
+ traits.for_each(|it| push_new_def(it.into()));
+ } else if let Some(trait_) = t.as_associated_type_parent_trait(db) {
+ push_new_def(trait_.into());
+ }
+ });
+}
+
+fn dedupe_or_merge_hover_actions(actions: Vec<HoverAction>) -> Vec<HoverAction> {
+ let mut deduped_actions = Vec::with_capacity(actions.len());
+ let mut go_to_type_targets = FxIndexSet::default();
+
+ let mut seen_implementation = false;
+ let mut seen_reference = false;
+ let mut seen_runnable = false;
+ for action in actions {
+ match action {
+ HoverAction::GoToType(targets) => {
+ go_to_type_targets.extend(targets);
+ }
+ HoverAction::Implementation(..) => {
+ if !seen_implementation {
+ seen_implementation = true;
+ deduped_actions.push(action);
+ }
+ }
+ HoverAction::Reference(..) => {
+ if !seen_reference {
+ seen_reference = true;
+ deduped_actions.push(action);
+ }
+ }
+ HoverAction::Runnable(..) => {
+ if !seen_runnable {
+ seen_runnable = true;
+ deduped_actions.push(action);
+ }
+ }
+ };
+ }
+
+ if !go_to_type_targets.is_empty() {
+ deduped_actions.push(HoverAction::GoToType(go_to_type_targets.into_iter().collect()));
+ }
+
+ deduped_actions
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
new file mode 100644
index 000000000..6c50a4e6a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
@@ -0,0 +1,563 @@
+//! Logic for rendering the different hover messages
+use std::fmt::Display;
+
+use either::Either;
+use hir::{AsAssocItem, AttributeTemplate, HasAttrs, HirDisplay, Semantics, TypeInfo};
+use ide_db::{
+ base_db::SourceDatabase,
+ defs::Definition,
+ famous_defs::FamousDefs,
+ generated::lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES},
+ RootDatabase,
+};
+use itertools::Itertools;
+use stdx::format_to;
+use syntax::{
+ algo, ast, match_ast, AstNode, Direction,
+ SyntaxKind::{LET_EXPR, LET_STMT},
+ SyntaxToken, T,
+};
+
+use crate::{
+ doc_links::{remove_links, rewrite_links},
+ hover::walk_and_push_ty,
+ markdown_remove::remove_markdown,
+ HoverAction, HoverConfig, HoverResult, Markup,
+};
+
+pub(super) fn type_info(
+ sema: &Semantics<'_, RootDatabase>,
+ config: &HoverConfig,
+ expr_or_pat: &Either<ast::Expr, ast::Pat>,
+) -> Option<HoverResult> {
+ let TypeInfo { original, adjusted } = match expr_or_pat {
+ Either::Left(expr) => sema.type_of_expr(expr)?,
+ Either::Right(pat) => sema.type_of_pat(pat)?,
+ };
+
+ let mut res = HoverResult::default();
+ let mut targets: Vec<hir::ModuleDef> = Vec::new();
+ let mut push_new_def = |item: hir::ModuleDef| {
+ if !targets.contains(&item) {
+ targets.push(item);
+ }
+ };
+ walk_and_push_ty(sema.db, &original, &mut push_new_def);
+
+ res.markup = if let Some(adjusted_ty) = adjusted {
+ walk_and_push_ty(sema.db, &adjusted_ty, &mut push_new_def);
+ let original = original.display(sema.db).to_string();
+ let adjusted = adjusted_ty.display(sema.db).to_string();
+ let static_text_diff_len = "Coerced to: ".len() - "Type: ".len();
+ format!(
+ "{bt_start}Type: {:>apad$}\nCoerced to: {:>opad$}\n{bt_end}",
+ original,
+ adjusted,
+ apad = static_text_diff_len + adjusted.len().max(original.len()),
+ opad = original.len(),
+ bt_start = if config.markdown() { "```text\n" } else { "" },
+ bt_end = if config.markdown() { "```\n" } else { "" }
+ )
+ .into()
+ } else {
+ if config.markdown() {
+ Markup::fenced_block(&original.display(sema.db))
+ } else {
+ original.display(sema.db).to_string().into()
+ }
+ };
+ res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets));
+ Some(res)
+}
+
+pub(super) fn try_expr(
+ sema: &Semantics<'_, RootDatabase>,
+ config: &HoverConfig,
+ try_expr: &ast::TryExpr,
+) -> Option<HoverResult> {
+ let inner_ty = sema.type_of_expr(&try_expr.expr()?)?.original;
+ let mut ancestors = try_expr.syntax().ancestors();
+ let mut body_ty = loop {
+ let next = ancestors.next()?;
+ break match_ast! {
+ match next {
+ ast::Fn(fn_) => sema.to_def(&fn_)?.ret_type(sema.db),
+ ast::Item(__) => return None,
+ ast::ClosureExpr(closure) => sema.type_of_expr(&closure.body()?)?.original,
+ ast::BlockExpr(block_expr) => if matches!(block_expr.modifier(), Some(ast::BlockModifier::Async(_) | ast::BlockModifier::Try(_)| ast::BlockModifier::Const(_))) {
+ sema.type_of_expr(&block_expr.into())?.original
+ } else {
+ continue;
+ },
+ _ => continue,
+ }
+ };
+ };
+
+ if inner_ty == body_ty {
+ return None;
+ }
+
+ let mut inner_ty = inner_ty;
+ let mut s = "Try Target".to_owned();
+
+ let adts = inner_ty.as_adt().zip(body_ty.as_adt());
+ if let Some((hir::Adt::Enum(inner), hir::Adt::Enum(body))) = adts {
+ let famous_defs = FamousDefs(sema, sema.scope(try_expr.syntax())?.krate());
+ // special case for two options, there is no value in showing them
+ if let Some(option_enum) = famous_defs.core_option_Option() {
+ if inner == option_enum && body == option_enum {
+ cov_mark::hit!(hover_try_expr_opt_opt);
+ return None;
+ }
+ }
+
+ // special case two results to show the error variants only
+ if let Some(result_enum) = famous_defs.core_result_Result() {
+ if inner == result_enum && body == result_enum {
+ let error_type_args =
+ inner_ty.type_arguments().nth(1).zip(body_ty.type_arguments().nth(1));
+ if let Some((inner, body)) = error_type_args {
+ inner_ty = inner;
+ body_ty = body;
+ s = "Try Error".to_owned();
+ }
+ }
+ }
+ }
+
+ let mut res = HoverResult::default();
+
+ let mut targets: Vec<hir::ModuleDef> = Vec::new();
+ let mut push_new_def = |item: hir::ModuleDef| {
+ if !targets.contains(&item) {
+ targets.push(item);
+ }
+ };
+ walk_and_push_ty(sema.db, &inner_ty, &mut push_new_def);
+ walk_and_push_ty(sema.db, &body_ty, &mut push_new_def);
+ res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets));
+
+ let inner_ty = inner_ty.display(sema.db).to_string();
+ let body_ty = body_ty.display(sema.db).to_string();
+ let ty_len_max = inner_ty.len().max(body_ty.len());
+
+ let l = "Propagated as: ".len() - " Type: ".len();
+ let static_text_len_diff = l as isize - s.len() as isize;
+ let tpad = static_text_len_diff.max(0) as usize;
+ let ppad = static_text_len_diff.min(0).abs() as usize;
+
+ res.markup = format!(
+ "{bt_start}{} Type: {:>pad0$}\nPropagated as: {:>pad1$}\n{bt_end}",
+ s,
+ inner_ty,
+ body_ty,
+ pad0 = ty_len_max + tpad,
+ pad1 = ty_len_max + ppad,
+ bt_start = if config.markdown() { "```text\n" } else { "" },
+ bt_end = if config.markdown() { "```\n" } else { "" }
+ )
+ .into();
+ Some(res)
+}
+
+pub(super) fn deref_expr(
+ sema: &Semantics<'_, RootDatabase>,
+ config: &HoverConfig,
+ deref_expr: &ast::PrefixExpr,
+) -> Option<HoverResult> {
+ let inner_ty = sema.type_of_expr(&deref_expr.expr()?)?.original;
+ let TypeInfo { original, adjusted } =
+ sema.type_of_expr(&ast::Expr::from(deref_expr.clone()))?;
+
+ let mut res = HoverResult::default();
+ let mut targets: Vec<hir::ModuleDef> = Vec::new();
+ let mut push_new_def = |item: hir::ModuleDef| {
+ if !targets.contains(&item) {
+ targets.push(item);
+ }
+ };
+ walk_and_push_ty(sema.db, &inner_ty, &mut push_new_def);
+ walk_and_push_ty(sema.db, &original, &mut push_new_def);
+
+ res.markup = if let Some(adjusted_ty) = adjusted {
+ walk_and_push_ty(sema.db, &adjusted_ty, &mut push_new_def);
+ let original = original.display(sema.db).to_string();
+ let adjusted = adjusted_ty.display(sema.db).to_string();
+ let inner = inner_ty.display(sema.db).to_string();
+ let type_len = "To type: ".len();
+ let coerced_len = "Coerced to: ".len();
+ let deref_len = "Dereferenced from: ".len();
+ let max_len = (original.len() + type_len)
+ .max(adjusted.len() + coerced_len)
+ .max(inner.len() + deref_len);
+ format!(
+ "{bt_start}Dereferenced from: {:>ipad$}\nTo type: {:>apad$}\nCoerced to: {:>opad$}\n{bt_end}",
+ inner,
+ original,
+ adjusted,
+ ipad = max_len - deref_len,
+ apad = max_len - type_len,
+ opad = max_len - coerced_len,
+ bt_start = if config.markdown() { "```text\n" } else { "" },
+ bt_end = if config.markdown() { "```\n" } else { "" }
+ )
+ .into()
+ } else {
+ let original = original.display(sema.db).to_string();
+ let inner = inner_ty.display(sema.db).to_string();
+ let type_len = "To type: ".len();
+ let deref_len = "Dereferenced from: ".len();
+ let max_len = (original.len() + type_len).max(inner.len() + deref_len);
+ format!(
+ "{bt_start}Dereferenced from: {:>ipad$}\nTo type: {:>apad$}\n{bt_end}",
+ inner,
+ original,
+ ipad = max_len - deref_len,
+ apad = max_len - type_len,
+ bt_start = if config.markdown() { "```text\n" } else { "" },
+ bt_end = if config.markdown() { "```\n" } else { "" }
+ )
+ .into()
+ };
+ res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets));
+
+ Some(res)
+}
+
+pub(super) fn keyword(
+ sema: &Semantics<'_, RootDatabase>,
+ config: &HoverConfig,
+ token: &SyntaxToken,
+) -> Option<HoverResult> {
+ if !token.kind().is_keyword() || !config.documentation.is_some() {
+ return None;
+ }
+ let parent = token.parent()?;
+ let famous_defs = FamousDefs(sema, sema.scope(&parent)?.krate());
+
+ let KeywordHint { description, keyword_mod, actions } = keyword_hints(sema, token, parent);
+
+ let doc_owner = find_std_module(&famous_defs, &keyword_mod)?;
+ let docs = doc_owner.attrs(sema.db).docs()?;
+ let markup = process_markup(
+ sema.db,
+ Definition::Module(doc_owner),
+ &markup(Some(docs.into()), description, None)?,
+ config,
+ );
+ Some(HoverResult { markup, actions })
+}
+
+pub(super) fn try_for_lint(attr: &ast::Attr, token: &SyntaxToken) -> Option<HoverResult> {
+ let (path, tt) = attr.as_simple_call()?;
+ if !tt.syntax().text_range().contains(token.text_range().start()) {
+ return None;
+ }
+ let (is_clippy, lints) = match &*path {
+ "feature" => (false, FEATURES),
+ "allow" | "deny" | "forbid" | "warn" => {
+ let is_clippy = algo::non_trivia_sibling(token.clone().into(), Direction::Prev)
+ .filter(|t| t.kind() == T![:])
+ .and_then(|t| algo::non_trivia_sibling(t, Direction::Prev))
+ .filter(|t| t.kind() == T![:])
+ .and_then(|t| algo::non_trivia_sibling(t, Direction::Prev))
+ .map_or(false, |t| {
+ t.kind() == T![ident] && t.into_token().map_or(false, |t| t.text() == "clippy")
+ });
+ if is_clippy {
+ (true, CLIPPY_LINTS)
+ } else {
+ (false, DEFAULT_LINTS)
+ }
+ }
+ _ => return None,
+ };
+
+ let tmp;
+ let needle = if is_clippy {
+ tmp = format!("clippy::{}", token.text());
+ &tmp
+ } else {
+ &*token.text()
+ };
+
+ let lint =
+ lints.binary_search_by_key(&needle, |lint| lint.label).ok().map(|idx| &lints[idx])?;
+ Some(HoverResult {
+ markup: Markup::from(format!("```\n{}\n```\n___\n\n{}", lint.label, lint.description)),
+ ..Default::default()
+ })
+}
+
+pub(super) fn process_markup(
+ db: &RootDatabase,
+ def: Definition,
+ markup: &Markup,
+ config: &HoverConfig,
+) -> Markup {
+ let markup = markup.as_str();
+ let markup = if !config.markdown() {
+ remove_markdown(markup)
+ } else if config.links_in_hover {
+ rewrite_links(db, markup, def)
+ } else {
+ remove_links(markup)
+ };
+ Markup::from(markup)
+}
+
+fn definition_owner_name(db: &RootDatabase, def: &Definition) -> Option<String> {
+ match def {
+ Definition::Field(f) => Some(f.parent_def(db).name(db)),
+ Definition::Local(l) => l.parent(db).name(db),
+ Definition::Function(f) => match f.as_assoc_item(db)?.container(db) {
+ hir::AssocItemContainer::Trait(t) => Some(t.name(db)),
+ hir::AssocItemContainer::Impl(i) => i.self_ty(db).as_adt().map(|adt| adt.name(db)),
+ },
+ Definition::Variant(e) => Some(e.parent_enum(db).name(db)),
+ _ => None,
+ }
+ .map(|name| name.to_string())
+}
+
+pub(super) fn path(db: &RootDatabase, module: hir::Module, item_name: Option<String>) -> String {
+ let crate_name =
+ db.crate_graph()[module.krate().into()].display_name.as_ref().map(|it| it.to_string());
+ let module_path = module
+ .path_to_root(db)
+ .into_iter()
+ .rev()
+ .flat_map(|it| it.name(db).map(|name| name.to_string()));
+ crate_name.into_iter().chain(module_path).chain(item_name).join("::")
+}
+
+pub(super) fn definition(
+ db: &RootDatabase,
+ def: Definition,
+ famous_defs: Option<&FamousDefs<'_, '_>>,
+ config: &HoverConfig,
+) -> Option<Markup> {
+ let mod_path = definition_mod_path(db, &def);
+ let (label, docs) = match def {
+ Definition::Macro(it) => label_and_docs(db, it),
+ Definition::Field(it) => label_and_docs(db, it),
+ Definition::Module(it) => label_and_docs(db, it),
+ Definition::Function(it) => label_and_docs(db, it),
+ Definition::Adt(it) => label_and_docs(db, it),
+ Definition::Variant(it) => label_and_docs(db, it),
+ Definition::Const(it) => label_value_and_docs(db, it, |it| {
+ let body = it.eval(db);
+ match body {
+ Ok(x) => Some(format!("{}", x)),
+ Err(_) => it.value(db).map(|x| format!("{}", x)),
+ }
+ }),
+ Definition::Static(it) => label_value_and_docs(db, it, |it| it.value(db)),
+ Definition::Trait(it) => label_and_docs(db, it),
+ Definition::TypeAlias(it) => label_and_docs(db, it),
+ Definition::BuiltinType(it) => {
+ return famous_defs
+ .and_then(|fd| builtin(fd, it))
+ .or_else(|| Some(Markup::fenced_block(&it.name())))
+ }
+ Definition::Local(it) => return local(db, it),
+ Definition::SelfType(impl_def) => {
+ impl_def.self_ty(db).as_adt().map(|adt| label_and_docs(db, adt))?
+ }
+ Definition::GenericParam(it) => label_and_docs(db, it),
+ Definition::Label(it) => return Some(Markup::fenced_block(&it.name(db))),
+ // FIXME: We should be able to show more info about these
+ Definition::BuiltinAttr(it) => return render_builtin_attr(db, it),
+ Definition::ToolModule(it) => return Some(Markup::fenced_block(&it.name(db))),
+ Definition::DeriveHelper(it) => (format!("derive_helper {}", it.name(db)), None),
+ };
+
+ let docs = match config.documentation {
+ Some(_) => docs.or_else(|| {
+ // docs are missing, for assoc items of trait impls try to fall back to the docs of the
+ // original item of the trait
+ let assoc = def.as_assoc_item(db)?;
+ let trait_ = assoc.containing_trait_impl(db)?;
+ let name = Some(assoc.name(db)?);
+ let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?;
+ item.docs(db)
+ }),
+ None => None,
+ };
+ let docs = docs.filter(|_| config.documentation.is_some()).map(Into::into);
+ markup(docs, label, mod_path)
+}
+
+fn render_builtin_attr(db: &RootDatabase, attr: hir::BuiltinAttr) -> Option<Markup> {
+ let name = attr.name(db);
+ let desc = format!("#[{}]", name);
+
+ let AttributeTemplate { word, list, name_value_str } = match attr.template(db) {
+ Some(template) => template,
+ None => return Some(Markup::fenced_block(&attr.name(db))),
+ };
+ let mut docs = "Valid forms are:".to_owned();
+ if word {
+ format_to!(docs, "\n - #\\[{}]", name);
+ }
+ if let Some(list) = list {
+ format_to!(docs, "\n - #\\[{}({})]", name, list);
+ }
+ if let Some(name_value_str) = name_value_str {
+ format_to!(docs, "\n - #\\[{} = {}]", name, name_value_str);
+ }
+ markup(Some(docs.replace('*', "\\*")), desc, None)
+}
+
+fn label_and_docs<D>(db: &RootDatabase, def: D) -> (String, Option<hir::Documentation>)
+where
+ D: HasAttrs + HirDisplay,
+{
+ let label = def.display(db).to_string();
+ let docs = def.attrs(db).docs();
+ (label, docs)
+}
+
+fn label_value_and_docs<D, E, V>(
+ db: &RootDatabase,
+ def: D,
+ value_extractor: E,
+) -> (String, Option<hir::Documentation>)
+where
+ D: HasAttrs + HirDisplay,
+ E: Fn(&D) -> Option<V>,
+ V: Display,
+{
+ let label = if let Some(value) = value_extractor(&def) {
+ format!("{} = {}", def.display(db), value)
+ } else {
+ def.display(db).to_string()
+ };
+ let docs = def.attrs(db).docs();
+ (label, docs)
+}
+
+fn definition_mod_path(db: &RootDatabase, def: &Definition) -> Option<String> {
+ if let Definition::GenericParam(_) = def {
+ return None;
+ }
+ def.module(db).map(|module| path(db, module, definition_owner_name(db, def)))
+}
+
+fn markup(docs: Option<String>, desc: String, mod_path: Option<String>) -> Option<Markup> {
+ let mut buf = String::new();
+
+ if let Some(mod_path) = mod_path {
+ if !mod_path.is_empty() {
+ format_to!(buf, "```rust\n{}\n```\n\n", mod_path);
+ }
+ }
+ format_to!(buf, "```rust\n{}\n```", desc);
+
+ if let Some(doc) = docs {
+ format_to!(buf, "\n___\n\n{}", doc);
+ }
+ Some(buf.into())
+}
+
+fn builtin(famous_defs: &FamousDefs<'_, '_>, builtin: hir::BuiltinType) -> Option<Markup> {
+ // std exposes prim_{} modules with docstrings on the root to document the builtins
+ let primitive_mod = format!("prim_{}", builtin.name());
+ let doc_owner = find_std_module(famous_defs, &primitive_mod)?;
+ let docs = doc_owner.attrs(famous_defs.0.db).docs()?;
+ markup(Some(docs.into()), builtin.name().to_string(), None)
+}
+
+fn find_std_module(famous_defs: &FamousDefs<'_, '_>, name: &str) -> Option<hir::Module> {
+ let db = famous_defs.0.db;
+ let std_crate = famous_defs.std()?;
+ let std_root_module = std_crate.root_module(db);
+ std_root_module
+ .children(db)
+ .find(|module| module.name(db).map_or(false, |module| module.to_string() == name))
+}
+
+fn local(db: &RootDatabase, it: hir::Local) -> Option<Markup> {
+ let ty = it.ty(db);
+ let ty = ty.display_truncated(db, None);
+ let is_mut = if it.is_mut(db) { "mut " } else { "" };
+ let desc = match it.source(db).value {
+ Either::Left(ident) => {
+ let name = it.name(db);
+ let let_kw = if ident
+ .syntax()
+ .parent()
+ .map_or(false, |p| p.kind() == LET_STMT || p.kind() == LET_EXPR)
+ {
+ "let "
+ } else {
+ ""
+ };
+ format!("{}{}{}: {}", let_kw, is_mut, name, ty)
+ }
+ Either::Right(_) => format!("{}self: {}", is_mut, ty),
+ };
+ markup(None, desc, None)
+}
+
+struct KeywordHint {
+ description: String,
+ keyword_mod: String,
+ actions: Vec<HoverAction>,
+}
+
+impl KeywordHint {
+ fn new(description: String, keyword_mod: String) -> Self {
+ Self { description, keyword_mod, actions: Vec::default() }
+ }
+}
+
+fn keyword_hints(
+ sema: &Semantics<'_, RootDatabase>,
+ token: &SyntaxToken,
+ parent: syntax::SyntaxNode,
+) -> KeywordHint {
+ match token.kind() {
+ T![await] | T![loop] | T![match] | T![unsafe] | T![as] | T![try] | T![if] | T![else] => {
+ let keyword_mod = format!("{}_keyword", token.text());
+
+ match ast::Expr::cast(parent).and_then(|site| sema.type_of_expr(&site)) {
+ // ignore the unit type ()
+ Some(ty) if !ty.adjusted.as_ref().unwrap_or(&ty.original).is_unit() => {
+ let mut targets: Vec<hir::ModuleDef> = Vec::new();
+ let mut push_new_def = |item: hir::ModuleDef| {
+ if !targets.contains(&item) {
+ targets.push(item);
+ }
+ };
+ walk_and_push_ty(sema.db, &ty.original, &mut push_new_def);
+
+ let ty = ty.adjusted();
+ let description = format!("{}: {}", token.text(), ty.display(sema.db));
+
+ KeywordHint {
+ description,
+ keyword_mod,
+ actions: vec![HoverAction::goto_type_from_targets(sema.db, targets)],
+ }
+ }
+ _ => KeywordHint {
+ description: token.text().to_string(),
+ keyword_mod,
+ actions: Vec::new(),
+ },
+ }
+ }
+ T![fn] => {
+ let module = match ast::FnPtrType::cast(parent) {
+ // treat fn keyword inside function pointer type as primitive
+ Some(_) => format!("prim_{}", token.text()),
+ None => format!("{}_keyword", token.text()),
+ };
+ KeywordHint::new(token.text().to_string(), module)
+ }
+ T![Self] => KeywordHint::new(token.text().to_string(), "self_upper_keyword".into()),
+ _ => KeywordHint::new(token.text().to_string(), format!("{}_keyword", token.text())),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
new file mode 100644
index 000000000..867d1f54d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
@@ -0,0 +1,5053 @@
+use expect_test::{expect, Expect};
+use ide_db::base_db::{FileLoader, FileRange};
+use syntax::TextRange;
+
+use crate::{fixture, hover::HoverDocFormat, HoverConfig};
+
+fn check_hover_no_result(ra_fixture: &str) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) },
+ FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
+ )
+ .unwrap();
+ assert!(hover.is_none(), "hover not expected but found: {:?}", hover.unwrap());
+}
+
+#[track_caller]
+fn check(ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) },
+ FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
+ )
+ .unwrap()
+ .unwrap();
+
+ let content = analysis.db.file_text(position.file_id);
+ let hovered_element = &content[hover.range];
+
+ let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup);
+ expect.assert_eq(&actual)
+}
+
+fn check_hover_no_links(ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: false, documentation: Some(HoverDocFormat::Markdown) },
+ FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
+ )
+ .unwrap()
+ .unwrap();
+
+ let content = analysis.db.file_text(position.file_id);
+ let hovered_element = &content[hover.range];
+
+ let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup);
+ expect.assert_eq(&actual)
+}
+
+fn check_hover_no_markdown(ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::PlainText) },
+ FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
+ )
+ .unwrap()
+ .unwrap();
+
+ let content = analysis.db.file_text(position.file_id);
+ let hovered_element = &content[hover.range];
+
+ let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup);
+ expect.assert_eq(&actual)
+}
+
+fn check_actions(ra_fixture: &str, expect: Expect) {
+ let (analysis, file_id, position) = fixture::range_or_position(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) },
+ FileRange { file_id, range: position.range_or_empty() },
+ )
+ .unwrap()
+ .unwrap();
+ expect.assert_debug_eq(&hover.info.actions)
+}
+
+fn check_hover_range(ra_fixture: &str, expect: Expect) {
+ let (analysis, range) = fixture::range(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: false, documentation: Some(HoverDocFormat::Markdown) },
+ range,
+ )
+ .unwrap()
+ .unwrap();
+ expect.assert_eq(hover.info.markup.as_str())
+}
+
+fn check_hover_range_no_results(ra_fixture: &str) {
+ let (analysis, range) = fixture::range(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: false, documentation: Some(HoverDocFormat::Markdown) },
+ range,
+ )
+ .unwrap();
+ assert!(hover.is_none());
+}
+
+#[test]
+fn hover_descend_macros_avoids_duplicates() {
+ check(
+ r#"
+macro_rules! dupe_use {
+ ($local:ident) => {
+ {
+ $local;
+ $local;
+ }
+ }
+}
+fn foo() {
+ let local = 0;
+ dupe_use!(local$0);
+}
+"#,
+ expect![[r#"
+ *local*
+
+ ```rust
+ let local: i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_all_macro_descends() {
+ check(
+ r#"
+macro_rules! m {
+ ($name:ident) => {
+ /// Outer
+ fn $name() {}
+
+ mod module {
+ /// Inner
+ fn $name() {}
+ }
+ };
+}
+
+m!(ab$0c);
+ "#,
+ expect![[r#"
+ *abc*
+
+ ```rust
+ test::module
+ ```
+
+ ```rust
+ fn abc()
+ ```
+
+ ---
+
+ Inner
+ ---
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn abc()
+ ```
+
+ ---
+
+ Outer
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_type_of_an_expression() {
+ check(
+ r#"
+pub fn foo() -> u32 { 1 }
+
+fn main() {
+ let foo_test = foo()$0;
+}
+"#,
+ expect![[r#"
+ *foo()*
+ ```rust
+ u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_remove_markdown_if_configured() {
+ check_hover_no_markdown(
+ r#"
+pub fn foo() -> u32 { 1 }
+
+fn main() {
+ let foo_test = foo()$0;
+}
+"#,
+ expect![[r#"
+ *foo()*
+ u32
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_long_type_of_an_expression() {
+ check(
+ r#"
+struct Scan<A, B, C> { a: A, b: B, c: C }
+struct Iter<I> { inner: I }
+enum Option<T> { Some(T), None }
+
+struct OtherStruct<T> { i: T }
+
+fn scan<A, B, C>(a: A, b: B, c: C) -> Iter<Scan<OtherStruct<A>, B, C>> {
+ Iter { inner: Scan { a, b, c } }
+}
+
+fn main() {
+ let num: i32 = 55;
+ let closure = |memo: &mut u32, value: &u32, _another: &mut u32| -> Option<u32> {
+ Option::Some(*memo + value)
+ };
+ let number = 5u32;
+ let mut iter$0 = scan(OtherStruct { i: num }, closure, number);
+}
+"#,
+ expect![[r#"
+ *iter*
+
+ ```rust
+ let mut iter: Iter<Scan<OtherStruct<OtherStruct<i32>>, |&mut u32, &u32, &mut u32| -> Option<u32>, u32>>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_fn_signature() {
+ // Single file with result
+ check(
+ r#"
+pub fn foo() -> u32 { 1 }
+
+fn main() { let foo_test = fo$0o(); }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo() -> u32
+ ```
+ "#]],
+ );
+
+ // Multiple candidates but results are ambiguous.
+ check(
+ r#"
+//- /a.rs
+pub fn foo() -> u32 { 1 }
+
+//- /b.rs
+pub fn foo() -> &str { "" }
+
+//- /c.rs
+pub fn foo(a: u32, b: u32) {}
+
+//- /main.rs
+mod a;
+mod b;
+mod c;
+
+fn main() { let foo_test = fo$0o(); }
+ "#,
+ expect![[r#"
+ *foo*
+ ```rust
+ {unknown}
+ ```
+ "#]],
+ );
+
+ // Use literal `crate` in path
+ check(
+ r#"
+pub struct X;
+
+fn foo() -> crate::X { X }
+
+fn main() { f$0oo(); }
+ "#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn foo() -> crate::X
+ ```
+ "#]],
+ );
+
+ // Check `super` in path
+ check(
+ r#"
+pub struct X;
+
+mod m { pub fn foo() -> super::X { super::X } }
+
+fn main() { m::f$0oo(); }
+ "#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test::m
+ ```
+
+ ```rust
+ pub fn foo() -> super::X
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_omits_unnamed_where_preds() {
+ check(
+ r#"
+pub fn foo(bar: impl T) { }
+
+fn main() { fo$0o(); }
+ "#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo(bar: impl T)
+ ```
+ "#]],
+ );
+ check(
+ r#"
+pub fn foo<V: AsRef<str>>(bar: impl T, baz: V) { }
+
+fn main() { fo$0o(); }
+ "#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo<V>(bar: impl T, baz: V)
+ where
+ V: AsRef<str>,
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_fn_signature_with_type_params() {
+ check(
+ r#"
+pub fn foo<'a, T: AsRef<str>>(b: &'a T) -> &'a str { }
+
+fn main() { let foo_test = fo$0o(); }
+ "#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo<'a, T>(b: &'a T) -> &'a str
+ where
+ T: AsRef<str>,
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_fn_signature_on_fn_name() {
+ check(
+ r#"
+pub fn foo$0(a: u32, b: u32) -> u32 {}
+
+fn main() { }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo(a: u32, b: u32) -> u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_fn_doc() {
+ check(
+ r#"
+/// # Example
+/// ```
+/// # use std::path::Path;
+/// #
+/// foo(Path::new("hello, world!"))
+/// ```
+pub fn foo$0(_: &Path) {}
+
+fn main() { }
+"#,
+ expect![[r##"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo(_: &Path)
+ ```
+
+ ---
+
+ # Example
+
+ ```
+ # use std::path::Path;
+ #
+ foo(Path::new("hello, world!"))
+ ```
+ "##]],
+ );
+}
+
+#[test]
+fn hover_shows_fn_doc_attr_raw_string() {
+ check(
+ r##"
+#[doc = r#"Raw string doc attr"#]
+pub fn foo$0(_: &Path) {}
+
+fn main() { }
+"##,
+ expect![[r##"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo(_: &Path)
+ ```
+
+ ---
+
+ Raw string doc attr
+ "##]],
+ );
+}
+
+#[test]
+fn hover_shows_struct_field_info() {
+ // Hovering over the field when instantiating
+ check(
+ r#"
+struct Foo { field_a: u32 }
+
+fn main() {
+ let foo = Foo { field_a$0: 0, };
+}
+"#,
+ expect![[r#"
+ *field_a*
+
+ ```rust
+ test::Foo
+ ```
+
+ ```rust
+ field_a: u32
+ ```
+ "#]],
+ );
+
+ // Hovering over the field in the definition
+ check(
+ r#"
+struct Foo { field_a$0: u32 }
+
+fn main() {
+ let foo = Foo { field_a: 0 };
+}
+"#,
+ expect![[r#"
+ *field_a*
+
+ ```rust
+ test::Foo
+ ```
+
+ ```rust
+ field_a: u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_const_static() {
+ check(
+ r#"const foo$0: u32 = 123;"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const foo: u32 = 123 (0x7B)
+ ```
+ "#]],
+ );
+ check(
+ r#"
+const foo$0: u32 = {
+ let x = foo();
+ x + 100
+};"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const foo: u32 = {
+ let x = foo();
+ x + 100
+ }
+ ```
+ "#]],
+ );
+
+ check(
+ r#"static foo$0: u32 = 456;"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ static foo: u32 = 456
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_default_generic_types() {
+ check(
+ r#"
+struct Test<K, T = u8> { k: K, t: T }
+
+fn main() {
+ let zz$0 = Test { t: 23u8, k: 33 };
+}"#,
+ expect![[r#"
+ *zz*
+
+ ```rust
+ let zz: Test<i32>
+ ```
+ "#]],
+ );
+ check_hover_range(
+ r#"
+struct Test<K, T = u8> { k: K, t: T }
+
+fn main() {
+ let $0zz$0 = Test { t: 23u8, k: 33 };
+}"#,
+ expect![[r#"
+ ```rust
+ Test<i32, u8>
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_some() {
+ check(
+ r#"
+enum Option<T> { Some(T) }
+use Option::Some;
+
+fn main() { So$0me(12); }
+"#,
+ expect![[r#"
+ *Some*
+
+ ```rust
+ test::Option
+ ```
+
+ ```rust
+ Some(T)
+ ```
+ "#]],
+ );
+
+ check(
+ r#"
+enum Option<T> { Some(T) }
+use Option::Some;
+
+fn main() { let b$0ar = Some(12); }
+"#,
+ expect![[r#"
+ *bar*
+
+ ```rust
+ let bar: Option<i32>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_enum_variant() {
+ check(
+ r#"
+enum Option<T> {
+ /// The None variant
+ Non$0e
+}
+"#,
+ expect![[r#"
+ *None*
+
+ ```rust
+ test::Option
+ ```
+
+ ```rust
+ None
+ ```
+
+ ---
+
+ The None variant
+ "#]],
+ );
+
+ check(
+ r#"
+enum Option<T> {
+ /// The Some variant
+ Some(T)
+}
+fn main() {
+ let s = Option::Som$0e(12);
+}
+"#,
+ expect![[r#"
+ *Some*
+
+ ```rust
+ test::Option
+ ```
+
+ ```rust
+ Some(T)
+ ```
+
+ ---
+
+ The Some variant
+ "#]],
+ );
+}
+
+#[test]
+fn hover_for_local_variable() {
+ check(
+ r#"fn func(foo: i32) { fo$0o; }"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ foo: i32
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_for_local_variable_pat() {
+ check(
+ r#"fn func(fo$0o: i32) {}"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ foo: i32
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_local_var_edge() {
+ check(
+ r#"fn func(foo: i32) { if true { $0foo; }; }"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ foo: i32
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_for_param_edge() {
+ check(
+ r#"fn func($0foo: i32) {}"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ foo: i32
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_for_param_with_multiple_traits() {
+ check(
+ r#"
+ //- minicore: sized
+ trait Deref {
+ type Target: ?Sized;
+ }
+ trait DerefMut {
+ type Target: ?Sized;
+ }
+ fn f(_x$0: impl Deref<Target=u8> + DerefMut<Target=u8>) {}"#,
+ expect![[r#"
+ *_x*
+
+ ```rust
+ _x: impl Deref<Target = u8> + DerefMut<Target = u8>
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_infer_associated_method_result() {
+ check(
+ r#"
+struct Thing { x: u32 }
+
+impl Thing {
+ fn new() -> Thing { Thing { x: 0 } }
+}
+
+fn main() { let foo_$0test = Thing::new(); }
+"#,
+ expect![[r#"
+ *foo_test*
+
+ ```rust
+ let foo_test: Thing
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_infer_associated_method_exact() {
+ check(
+ r#"
+mod wrapper {
+ pub struct Thing { x: u32 }
+
+ impl Thing {
+ pub fn new() -> Thing { Thing { x: 0 } }
+ }
+}
+
+fn main() { let foo_test = wrapper::Thing::new$0(); }
+"#,
+ expect![[r#"
+ *new*
+
+ ```rust
+ test::wrapper::Thing
+ ```
+
+ ```rust
+ pub fn new() -> Thing
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_infer_associated_const_in_pattern() {
+ check(
+ r#"
+struct X;
+impl X {
+ const C: u32 = 1;
+}
+
+fn main() {
+ match 1 {
+ X::C$0 => {},
+ 2 => {},
+ _ => {}
+ };
+}
+"#,
+ expect![[r#"
+ *C*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const C: u32 = 1
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_self() {
+ check(
+ r#"
+struct Thing { x: u32 }
+impl Thing {
+ fn new() -> Self { Self$0 { x: 0 } }
+}
+"#,
+ expect![[r#"
+ *Self*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ struct Thing
+ ```
+ "#]],
+ );
+ check(
+ r#"
+struct Thing { x: u32 }
+impl Thing {
+ fn new() -> Self$0 { Self { x: 0 } }
+}
+"#,
+ expect![[r#"
+ *Self*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ struct Thing
+ ```
+ "#]],
+ );
+ check(
+ r#"
+enum Thing { A }
+impl Thing {
+ pub fn new() -> Self$0 { Thing::A }
+}
+"#,
+ expect![[r#"
+ *Self*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ enum Thing
+ ```
+ "#]],
+ );
+ check(
+ r#"
+ enum Thing { A }
+ impl Thing {
+ pub fn thing(a: Self$0) {}
+ }
+ "#,
+ expect![[r#"
+ *Self*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ enum Thing
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_shadowing_pat() {
+ check(
+ r#"
+fn x() {}
+
+fn y() {
+ let x = 0i32;
+ x$0;
+}
+"#,
+ expect![[r#"
+ *x*
+
+ ```rust
+ let x: i32
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_macro_invocation() {
+ check(
+ r#"
+macro_rules! foo { () => {} }
+
+fn f() { fo$0o!(); }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ macro_rules! foo
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_macro2_invocation() {
+ check(
+ r#"
+/// foo bar
+///
+/// foo bar baz
+macro foo() {}
+
+fn f() { fo$0o!(); }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ macro foo
+ ```
+
+ ---
+
+ foo bar
+
+ foo bar baz
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_tuple_field() {
+ check(
+ r#"struct TS(String, i32$0);"#,
+ expect![[r#"
+ *i32*
+
+ ```rust
+ i32
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_through_macro() {
+ check(
+ r#"
+macro_rules! id { ($($tt:tt)*) => { $($tt)* } }
+fn foo() {}
+id! {
+ fn bar() { fo$0o(); }
+}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn foo()
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_through_attr() {
+ check(
+ r#"
+//- proc_macros: identity
+#[proc_macros::identity]
+fn foo$0() {}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn foo()
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_through_expr_in_macro() {
+ check(
+ r#"
+macro_rules! id { ($($tt:tt)*) => { $($tt)* } }
+fn foo(bar:u32) { let a = id!(ba$0r); }
+"#,
+ expect![[r#"
+ *bar*
+
+ ```rust
+ bar: u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_through_expr_in_macro_recursive() {
+ check(
+ r#"
+macro_rules! id_deep { ($($tt:tt)*) => { $($tt)* } }
+macro_rules! id { ($($tt:tt)*) => { id_deep!($($tt)*) } }
+fn foo(bar:u32) { let a = id!(ba$0r); }
+"#,
+ expect![[r#"
+ *bar*
+
+ ```rust
+ bar: u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_through_func_in_macro_recursive() {
+ check(
+ r#"
+macro_rules! id_deep { ($($tt:tt)*) => { $($tt)* } }
+macro_rules! id { ($($tt:tt)*) => { id_deep!($($tt)*) } }
+fn bar() -> u32 { 0 }
+fn foo() { let a = id!([0u32, bar($0)] ); }
+"#,
+ expect![[r#"
+ *bar()*
+ ```rust
+ u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_through_literal_string_in_macro() {
+ check(
+ r#"
+macro_rules! arr { ($($tt:tt)*) => { [$($tt)*] } }
+fn foo() {
+ let mastered_for_itunes = "";
+ let _ = arr!("Tr$0acks", &mastered_for_itunes);
+}
+"#,
+ expect![[r#"
+ *"Tracks"*
+ ```rust
+ &str
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_through_assert_macro() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! assert {}
+
+fn bar() -> bool { true }
+fn foo() {
+ assert!(ba$0r());
+}
+"#,
+ expect![[r#"
+ *bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn bar() -> bool
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_multiple_actions() {
+ check_actions(
+ r#"
+struct Bar;
+struct Foo { bar: Bar }
+
+fn foo(Foo { b$0ar }: &Foo) {}
+ "#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Bar",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..11,
+ focus_range: 7..10,
+ name: "Bar",
+ kind: Struct,
+ description: "struct Bar",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_through_literal_string_in_builtin_macro() {
+ check_hover_no_result(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! format {}
+
+ fn foo() {
+ format!("hel$0lo {}", 0);
+ }
+"#,
+ );
+}
+
+#[test]
+fn test_hover_non_ascii_space_doc() {
+ check(
+ "
+/// <- `\u{3000}` here
+fn foo() { }
+
+fn bar() { fo$0o(); }
+",
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn foo()
+ ```
+
+ ---
+
+ \<- ` ` here
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_function_show_qualifiers() {
+ check(
+ r#"async fn foo$0() {}"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ async fn foo()
+ ```
+ "#]],
+ );
+ check(
+ r#"pub const unsafe fn foo$0() {}"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub const unsafe fn foo()
+ ```
+ "#]],
+ );
+ // Top level `pub(crate)` will be displayed as no visibility.
+ check(
+ r#"mod m { pub(crate) async unsafe extern "C" fn foo$0() {} }"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test::m
+ ```
+
+ ```rust
+ pub(crate) async unsafe extern "C" fn foo()
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_function_show_types() {
+ check(
+ r#"fn foo$0(a: i32, b:i32) -> i32 { 0 }"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn foo(a: i32, b: i32) -> i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_function_pointer_show_identifiers() {
+ check(
+ r#"type foo$0 = fn(a: i32, b: i32) -> i32;"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ type foo = fn(a: i32, b: i32) -> i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_function_pointer_no_identifier() {
+ check(
+ r#"type foo$0 = fn(i32, _: i32) -> i32;"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ type foo = fn(i32, i32) -> i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_trait_show_qualifiers() {
+ check_actions(
+ r"unsafe trait foo$0() {}",
+ expect![[r#"
+ [
+ Implementation(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 13,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_extern_crate() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+extern crate st$0d;
+//- /std/lib.rs crate:std
+//! Standard library for this test
+//!
+//! Printed?
+//! abc123
+"#,
+ expect![[r#"
+ *std*
+
+ ```rust
+ extern crate std
+ ```
+
+ ---
+
+ Standard library for this test
+
+ Printed?
+ abc123
+ "#]],
+ );
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+extern crate std as ab$0c;
+//- /std/lib.rs crate:std
+//! Standard library for this test
+//!
+//! Printed?
+//! abc123
+"#,
+ expect![[r#"
+ *abc*
+
+ ```rust
+ extern crate std
+ ```
+
+ ---
+
+ Standard library for this test
+
+ Printed?
+ abc123
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_mod_with_same_name_as_function() {
+ check(
+ r#"
+use self::m$0y::Bar;
+mod my { pub struct Bar; }
+
+fn my() {}
+"#,
+ expect![[r#"
+ *my*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ mod my
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_struct_doc_comment() {
+ check(
+ r#"
+/// This is an example
+/// multiline doc
+///
+/// # Example
+///
+/// ```
+/// let five = 5;
+///
+/// assert_eq!(6, my_crate::add_one(5));
+/// ```
+struct Bar;
+
+fn foo() { let bar = Ba$0r; }
+"#,
+ expect![[r##"
+ *Bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ struct Bar
+ ```
+
+ ---
+
+ This is an example
+ multiline doc
+
+ # Example
+
+ ```
+ let five = 5;
+
+ assert_eq!(6, my_crate::add_one(5));
+ ```
+ "##]],
+ );
+}
+
+#[test]
+fn test_hover_struct_doc_attr() {
+ check(
+ r#"
+#[doc = "bar docs"]
+struct Bar;
+
+fn foo() { let bar = Ba$0r; }
+"#,
+ expect![[r#"
+ *Bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ struct Bar
+ ```
+
+ ---
+
+ bar docs
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_struct_doc_attr_multiple_and_mixed() {
+ check(
+ r#"
+/// bar docs 0
+#[doc = "bar docs 1"]
+#[doc = "bar docs 2"]
+struct Bar;
+
+fn foo() { let bar = Ba$0r; }
+"#,
+ expect![[r#"
+ *Bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ struct Bar
+ ```
+
+ ---
+
+ bar docs 0
+ bar docs 1
+ bar docs 2
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_external_url() {
+ check(
+ r#"
+pub struct Foo;
+/// [external](https://www.google.com)
+pub struct B$0ar
+"#,
+ expect![[r#"
+ *Bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub struct Bar
+ ```
+
+ ---
+
+ [external](https://www.google.com)
+ "#]],
+ );
+}
+
+// Check that we don't rewrite links which we can't identify
+#[test]
+fn test_hover_unknown_target() {
+ check(
+ r#"
+pub struct Foo;
+/// [baz](Baz)
+pub struct B$0ar
+"#,
+ expect![[r#"
+ *Bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub struct Bar
+ ```
+
+ ---
+
+ [baz](Baz)
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_no_links() {
+ check_hover_no_links(
+ r#"
+/// Test cases:
+/// case 1. bare URL: https://www.example.com/
+/// case 2. inline URL with title: [example](https://www.example.com/)
+/// case 3. code reference: [`Result`]
+/// case 4. code reference but miss footnote: [`String`]
+/// case 5. autolink: <http://www.example.com/>
+/// case 6. email address: <test@example.com>
+/// case 7. reference: [example][example]
+/// case 8. collapsed link: [example][]
+/// case 9. shortcut link: [example]
+/// case 10. inline without URL: [example]()
+/// case 11. reference: [foo][foo]
+/// case 12. reference: [foo][bar]
+/// case 13. collapsed link: [foo][]
+/// case 14. shortcut link: [foo]
+/// case 15. inline without URL: [foo]()
+/// case 16. just escaped text: \[foo]
+/// case 17. inline link: [Foo](foo::Foo)
+///
+/// [`Result`]: ../../std/result/enum.Result.html
+/// [^example]: https://www.example.com/
+pub fn fo$0o() {}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo()
+ ```
+
+ ---
+
+ Test cases:
+ case 1. bare URL: https://www.example.com/
+ case 2. inline URL with title: [example](https://www.example.com/)
+ case 3. code reference: `Result`
+ case 4. code reference but miss footnote: `String`
+ case 5. autolink: http://www.example.com/
+ case 6. email address: test@example.com
+ case 7. reference: example
+ case 8. collapsed link: example
+ case 9. shortcut link: example
+ case 10. inline without URL: example
+ case 11. reference: foo
+ case 12. reference: foo
+ case 13. collapsed link: foo
+ case 14. shortcut link: foo
+ case 15. inline without URL: foo
+ case 16. just escaped text: \[foo\]
+ case 17. inline link: Foo
+
+ [^example]: https://www.example.com/
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_macro_generated_struct_fn_doc_comment() {
+ cov_mark::check!(hover_macro_generated_struct_fn_doc_comment);
+
+ check(
+ r#"
+macro_rules! bar {
+ () => {
+ struct Bar;
+ impl Bar {
+ /// Do the foo
+ fn foo(&self) {}
+ }
+ }
+}
+
+bar!();
+
+fn foo() { let bar = Bar; bar.fo$0o(); }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test::Bar
+ ```
+
+ ```rust
+ fn foo(&self)
+ ```
+
+ ---
+
+ Do the foo
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_macro_generated_struct_fn_doc_attr() {
+ cov_mark::check!(hover_macro_generated_struct_fn_doc_attr);
+
+ check(
+ r#"
+macro_rules! bar {
+ () => {
+ struct Bar;
+ impl Bar {
+ #[doc = "Do the foo"]
+ fn foo(&self) {}
+ }
+ }
+}
+
+bar!();
+
+fn foo() { let bar = Bar; bar.fo$0o(); }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test::Bar
+ ```
+
+ ```rust
+ fn foo(&self)
+ ```
+
+ ---
+
+ Do the foo
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_variadic_function() {
+ check(
+ r#"
+extern "C" {
+ pub fn foo(bar: i32, ...) -> i32;
+}
+
+fn main() { let foo_test = unsafe { fo$0o(1, 2, 3); } }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub unsafe fn foo(bar: i32, ...) -> i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_trait_has_impl_action() {
+ check_actions(
+ r#"trait foo$0() {}"#,
+ expect![[r#"
+ [
+ Implementation(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 6,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_struct_has_impl_action() {
+ check_actions(
+ r"struct foo$0() {}",
+ expect![[r#"
+ [
+ Implementation(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 7,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_union_has_impl_action() {
+ check_actions(
+ r#"union foo$0() {}"#,
+ expect![[r#"
+ [
+ Implementation(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 6,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_enum_has_impl_action() {
+ check_actions(
+ r"enum foo$0() { A, B }",
+ expect![[r#"
+ [
+ Implementation(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 5,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_self_has_impl_action() {
+ check_actions(
+ r#"struct foo where Self$0:;"#,
+ expect![[r#"
+ [
+ Implementation(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 7,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_test_has_action() {
+ check_actions(
+ r#"
+#[test]
+fn foo_$0test() {}
+"#,
+ expect![[r#"
+ [
+ Reference(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 11,
+ },
+ ),
+ Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..24,
+ focus_range: 11..19,
+ name: "foo_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "foo_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_test_mod_has_action() {
+ check_actions(
+ r#"
+mod tests$0 {
+ #[test]
+ fn foo_test() {}
+}
+"#,
+ expect![[r#"
+ [
+ Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..46,
+ focus_range: 4..9,
+ name: "tests",
+ kind: Module,
+ description: "mod tests",
+ },
+ kind: TestMod {
+ path: "tests",
+ },
+ cfg: None,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_struct_has_goto_type_action() {
+ check_actions(
+ r#"
+struct S{ f1: u32 }
+
+fn main() { let s$0t = S{ f1:0 }; }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..19,
+ focus_range: 7..8,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_generic_struct_has_goto_type_actions() {
+ check_actions(
+ r#"
+struct Arg(u32);
+struct S<T>{ f1: T }
+
+fn main() { let s$0t = S{ f1:Arg(0) }; }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 17..37,
+ focus_range: 24..25,
+ name: "S",
+ kind: Struct,
+ description: "struct S<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::Arg",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..16,
+ focus_range: 7..10,
+ name: "Arg",
+ kind: Struct,
+ description: "struct Arg",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_generic_struct_has_flattened_goto_type_actions() {
+ check_actions(
+ r#"
+struct Arg(u32);
+struct S<T>{ f1: T }
+
+fn main() { let s$0t = S{ f1: S{ f1: Arg(0) } }; }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 17..37,
+ focus_range: 24..25,
+ name: "S",
+ kind: Struct,
+ description: "struct S<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::Arg",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..16,
+ focus_range: 7..10,
+ name: "Arg",
+ kind: Struct,
+ description: "struct Arg",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_tuple_has_goto_type_actions() {
+ check_actions(
+ r#"
+struct A(u32);
+struct B(u32);
+mod M {
+ pub struct C(u32);
+}
+
+fn main() { let s$0t = (A(1), B(2), M::C(3) ); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::A",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..14,
+ focus_range: 7..8,
+ name: "A",
+ kind: Struct,
+ description: "struct A",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::B",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 15..29,
+ focus_range: 22..23,
+ name: "B",
+ kind: Struct,
+ description: "struct B",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::M::C",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 42..60,
+ focus_range: 53..54,
+ name: "C",
+ kind: Struct,
+ description: "pub struct C",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_return_impl_trait_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+fn foo() -> impl Foo {}
+
+fn main() { let s$0t = foo(); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_generic_return_impl_trait_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo<T> {}
+struct S;
+fn foo() -> impl Foo<S> {}
+
+fn main() { let s$0t = foo(); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..15,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 16..25,
+ focus_range: 23..24,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_return_impl_traits_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+trait Bar {}
+fn foo() -> impl Foo + Bar {}
+
+fn main() { let s$0t = foo(); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::Bar",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 13..25,
+ focus_range: 19..22,
+ name: "Bar",
+ kind: Trait,
+ description: "trait Bar",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_generic_return_impl_traits_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo<T> {}
+trait Bar<T> {}
+struct S1 {}
+struct S2 {}
+
+fn foo() -> impl Foo<S1> + Bar<S2> {}
+
+fn main() { let s$0t = foo(); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..15,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::Bar",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 16..31,
+ focus_range: 22..25,
+ name: "Bar",
+ kind: Trait,
+ description: "trait Bar<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S1",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 32..44,
+ focus_range: 39..41,
+ name: "S1",
+ kind: Struct,
+ description: "struct S1",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S2",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 45..57,
+ focus_range: 52..54,
+ name: "S2",
+ kind: Struct,
+ description: "struct S2",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_arg_impl_trait_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+fn foo(ar$0g: &impl Foo) {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_arg_impl_traits_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+trait Bar<T> {}
+struct S{}
+
+fn foo(ar$0g: &impl Foo + Bar<S>) {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::Bar",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 13..28,
+ focus_range: 19..22,
+ name: "Bar",
+ kind: Trait,
+ description: "trait Bar<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 29..39,
+ focus_range: 36..37,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_async_block_impl_trait_has_goto_type_action() {
+ check_actions(
+ r#"
+//- /main.rs crate:main deps:core
+// we don't use minicore here so that this test doesn't randomly fail
+// when someone edits minicore
+struct S;
+fn foo() {
+ let fo$0o = async { S };
+}
+//- /core.rs crate:core
+pub mod future {
+ #[lang = "future_trait"]
+ pub trait Future {}
+}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "core::future::Future",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 1,
+ ),
+ full_range: 21..69,
+ focus_range: 60..66,
+ name: "Future",
+ kind: Trait,
+ description: "pub trait Future",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "main::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..110,
+ focus_range: 108..109,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_arg_generic_impl_trait_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo<T> {}
+struct S {}
+fn foo(ar$0g: &impl Foo<S>) {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..15,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 16..27,
+ focus_range: 23..24,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_dyn_return_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+struct S;
+impl Foo for S {}
+
+struct B<T>{}
+fn foo() -> B<dyn Foo> {}
+
+fn main() { let s$0t = foo(); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::B",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 42..55,
+ focus_range: 49..50,
+ name: "B",
+ kind: Struct,
+ description: "struct B<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_dyn_arg_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+fn foo(ar$0g: &dyn Foo) {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_generic_dyn_arg_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo<T> {}
+struct S {}
+fn foo(ar$0g: &dyn Foo<S>) {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..15,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 16..27,
+ focus_range: 23..24,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_goto_type_action_links_order() {
+ check_actions(
+ r#"
+trait ImplTrait<T> {}
+trait DynTrait<T> {}
+struct B<T> {}
+struct S {}
+
+fn foo(a$0rg: &impl ImplTrait<B<dyn DynTrait<B<S>>>>) {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::ImplTrait",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..21,
+ focus_range: 6..15,
+ name: "ImplTrait",
+ kind: Trait,
+ description: "trait ImplTrait<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::B",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 43..57,
+ focus_range: 50..51,
+ name: "B",
+ kind: Struct,
+ description: "struct B<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::DynTrait",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 22..42,
+ focus_range: 28..36,
+ name: "DynTrait",
+ kind: Trait,
+ description: "trait DynTrait<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 58..69,
+ focus_range: 65..66,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_associated_type_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {
+ type Item;
+ fn get(self) -> Self::Item {}
+}
+
+struct Bar{}
+struct S{}
+
+impl Foo for S { type Item = Bar; }
+
+fn test() -> impl Foo { S {} }
+
+fn main() { let s$0t = test().get(); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..62,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_const_param_has_goto_type_action() {
+ check_actions(
+ r#"
+struct Bar;
+struct Foo<const BAR: Bar>;
+
+impl<const BAR: Bar> Foo<BAR$0> {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Bar",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..11,
+ focus_range: 7..10,
+ name: "Bar",
+ kind: Struct,
+ description: "struct Bar",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_type_param_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+
+fn foo<T: Foo>(t: T$0){}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_self_has_go_to_type() {
+ check_actions(
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self$0) {}
+}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..11,
+ focus_range: 7..10,
+ name: "Foo",
+ kind: Struct,
+ description: "struct Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn hover_displays_normalized_crate_names() {
+ check(
+ r#"
+//- /lib.rs crate:name-with-dashes
+pub mod wrapper {
+ pub struct Thing { x: u32 }
+
+ impl Thing {
+ pub fn new() -> Thing { Thing { x: 0 } }
+ }
+}
+
+//- /main.rs crate:main deps:name-with-dashes
+fn main() { let foo_test = name_with_dashes::wrapper::Thing::new$0(); }
+"#,
+ expect![[r#"
+ *new*
+
+ ```rust
+ name_with_dashes::wrapper::Thing
+ ```
+
+ ```rust
+ pub fn new() -> Thing
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_field_pat_shorthand_ref_match_ergonomics() {
+ check(
+ r#"
+struct S {
+ f: i32,
+}
+
+fn main() {
+ let s = S { f: 0 };
+ let S { f$0 } = &s;
+}
+"#,
+ expect![[r#"
+ *f*
+
+ ```rust
+ f: &i32
+ ```
+ ---
+
+ ```rust
+ test::S
+ ```
+
+ ```rust
+ f: i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn const_generic_order() {
+ check(
+ r#"
+struct Foo;
+struct S$0T<const C: usize = 1, T = Foo>(T);
+"#,
+ expect![[r#"
+ *ST*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ struct ST<const C: usize, T = Foo>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn const_generic_positive_i8_literal() {
+ check(
+ r#"
+struct Const<const N: i8>;
+
+fn main() {
+ let v$0alue = Const::<1>;
+}
+"#,
+ expect![[r#"
+ *value*
+
+ ```rust
+ let value: Const<1>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn const_generic_zero_i8_literal() {
+ check(
+ r#"
+struct Const<const N: i8>;
+
+fn main() {
+ let v$0alue = Const::<0>;
+}
+"#,
+ expect![[r#"
+ *value*
+
+ ```rust
+ let value: Const<0>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn const_generic_negative_i8_literal() {
+ check(
+ r#"
+struct Const<const N: i8>;
+
+fn main() {
+ let v$0alue = Const::<-1>;
+}
+"#,
+ expect![[r#"
+ *value*
+
+ ```rust
+ let value: Const<-1>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn const_generic_bool_literal() {
+ check(
+ r#"
+struct Const<const F: bool>;
+
+fn main() {
+ let v$0alue = Const::<true>;
+}
+"#,
+ expect![[r#"
+ *value*
+
+ ```rust
+ let value: Const<true>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn const_generic_char_literal() {
+ check(
+ r#"
+struct Const<const C: char>;
+
+fn main() {
+ let v$0alue = Const::<'🦀'>;
+}
+"#,
+ expect![[r#"
+ *value*
+
+ ```rust
+ let value: Const<'🦀'>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_self_param_shows_type() {
+ check(
+ r#"
+struct Foo {}
+impl Foo {
+ fn bar(&sel$0f) {}
+}
+"#,
+ expect![[r#"
+ *self*
+
+ ```rust
+ self: &Foo
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_self_param_shows_type_for_arbitrary_self_type() {
+ check(
+ r#"
+struct Arc<T>(T);
+struct Foo {}
+impl Foo {
+ fn bar(sel$0f: Arc<Foo>) {}
+}
+"#,
+ expect![[r#"
+ *self*
+
+ ```rust
+ self: Arc<Foo>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_doc_outer_inner() {
+ check(
+ r#"
+/// Be quick;
+mod Foo$0 {
+ //! time is mana
+
+ /// This comment belongs to the function
+ fn foo() {}
+}
+"#,
+ expect![[r#"
+ *Foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ mod Foo
+ ```
+
+ ---
+
+ Be quick;
+ time is mana
+ "#]],
+ );
+}
+
+#[test]
+fn hover_doc_outer_inner_attribue() {
+ check(
+ r#"
+#[doc = "Be quick;"]
+mod Foo$0 {
+ #![doc = "time is mana"]
+
+ #[doc = "This comment belongs to the function"]
+ fn foo() {}
+}
+"#,
+ expect![[r#"
+ *Foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ mod Foo
+ ```
+
+ ---
+
+ Be quick;
+ time is mana
+ "#]],
+ );
+}
+
+#[test]
+fn hover_doc_block_style_indentend() {
+ check(
+ r#"
+/**
+ foo
+ ```rust
+ let x = 3;
+ ```
+*/
+fn foo$0() {}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn foo()
+ ```
+
+ ---
+
+ foo
+
+ ```rust
+ let x = 3;
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_comments_dont_highlight_parent() {
+ cov_mark::check!(no_highlight_on_comment_hover);
+ check_hover_no_result(
+ r#"
+fn no_hover() {
+ // no$0hover
+}
+"#,
+ );
+}
+
+#[test]
+fn hover_label() {
+ check(
+ r#"
+fn foo() {
+ 'label$0: loop {}
+}
+"#,
+ expect![[r#"
+ *'label*
+
+ ```rust
+ 'label
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_lifetime() {
+ check(
+ r#"fn foo<'lifetime>(_: &'lifetime$0 ()) {}"#,
+ expect![[r#"
+ *'lifetime*
+
+ ```rust
+ 'lifetime
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_type_param() {
+ check(
+ r#"
+//- minicore: sized
+struct Foo<T>(T);
+trait TraitA {}
+trait TraitB {}
+impl<T: TraitA + TraitB> Foo<T$0> where T: Sized {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: TraitA + TraitB
+ ```
+ "#]],
+ );
+ check(
+ r#"
+//- minicore: sized
+struct Foo<T>(T);
+impl<T> Foo<T$0> {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T
+ ```
+ "#]],
+ );
+ // lifetimes bounds arent being tracked yet
+ check(
+ r#"
+//- minicore: sized
+struct Foo<T>(T);
+impl<T: 'static> Foo<T$0> {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_type_param_sized_bounds() {
+ // implicit `: Sized` bound
+ check(
+ r#"
+//- minicore: sized
+trait Trait {}
+struct Foo<T>(T);
+impl<T: Trait> Foo<T$0> {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: Trait
+ ```
+ "#]],
+ );
+ check(
+ r#"
+//- minicore: sized
+trait Trait {}
+struct Foo<T>(T);
+impl<T: Trait + ?Sized> Foo<T$0> {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: Trait + ?Sized
+ ```
+ "#]],
+ );
+}
+
+mod type_param_sized_bounds {
+ use super::*;
+
+ #[test]
+ fn single_implicit() {
+ check(
+ r#"
+//- minicore: sized
+fn foo<T$0>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T
+ ```
+ "#]],
+ );
+ }
+
+ #[test]
+ fn single_explicit() {
+ check(
+ r#"
+//- minicore: sized
+fn foo<T$0: Sized>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T
+ ```
+ "#]],
+ );
+ }
+
+ #[test]
+ fn single_relaxed() {
+ check(
+ r#"
+//- minicore: sized
+fn foo<T$0: ?Sized>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: ?Sized
+ ```
+ "#]],
+ );
+ }
+
+ #[test]
+ fn multiple_implicit() {
+ check(
+ r#"
+//- minicore: sized
+trait Trait {}
+fn foo<T$0: Trait>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: Trait
+ ```
+ "#]],
+ );
+ }
+
+ #[test]
+ fn multiple_explicit() {
+ check(
+ r#"
+//- minicore: sized
+trait Trait {}
+fn foo<T$0: Trait + Sized>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: Trait
+ ```
+ "#]],
+ );
+ }
+
+ #[test]
+ fn multiple_relaxed() {
+ check(
+ r#"
+//- minicore: sized
+trait Trait {}
+fn foo<T$0: Trait + ?Sized>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: Trait + ?Sized
+ ```
+ "#]],
+ );
+ }
+
+ #[test]
+ fn mixed() {
+ check(
+ r#"
+//- minicore: sized
+fn foo<T$0: ?Sized + Sized + Sized>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T
+ ```
+ "#]],
+ );
+ check(
+ r#"
+//- minicore: sized
+trait Trait {}
+fn foo<T$0: Sized + ?Sized + Sized + Trait>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: Trait
+ ```
+ "#]],
+ );
+ }
+}
+
+#[test]
+fn hover_const_generic_type_alias() {
+ check(
+ r#"
+struct Foo<const LEN: usize>;
+type Fo$0o2 = Foo<2>;
+"#,
+ expect![[r#"
+ *Foo2*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ type Foo2 = Foo<2>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_const_param() {
+ check(
+ r#"
+struct Foo<const LEN: usize>;
+impl<const LEN: usize> Foo<LEN$0> {}
+"#,
+ expect![[r#"
+ *LEN*
+
+ ```rust
+ const LEN: usize
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_const_eval() {
+ // show hex for <10
+ check(
+ r#"
+/// This is a doc
+const FOO$0: usize = 1 << 3;
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: usize = 8
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show hex for >10
+ check(
+ r#"
+/// This is a doc
+const FOO$0: usize = (1 << 3) + (1 << 2);
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: usize = 12 (0xC)
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show original body when const eval fails
+ check(
+ r#"
+/// This is a doc
+const FOO$0: usize = 2 - 3;
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: usize = 2 - 3
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // don't show hex for negatives
+ check(
+ r#"
+/// This is a doc
+const FOO$0: i32 = 2 - 3;
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: i32 = -1
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ check(
+ r#"
+/// This is a doc
+const FOO$0: &str = "bar";
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: &str = "bar"
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show char literal
+ check(
+ r#"
+/// This is a doc
+const FOO$0: char = 'a';
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: char = 'a'
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show escaped char literal
+ check(
+ r#"
+/// This is a doc
+const FOO$0: char = '\x61';
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: char = 'a'
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show byte literal
+ check(
+ r#"
+/// This is a doc
+const FOO$0: u8 = b'a';
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: u8 = 97 (0x61)
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show escaped byte literal
+ check(
+ r#"
+/// This is a doc
+const FOO$0: u8 = b'\x61';
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: u8 = 97 (0x61)
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show float literal
+ check(
+ r#"
+ /// This is a doc
+ const FOO$0: f64 = 1.0234;
+ "#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: f64 = 1.0234
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ //show float typecasted from int
+ check(
+ r#"
+/// This is a doc
+const FOO$0: f32 = 1f32;
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: f32 = 1.0
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ //show f64 typecasted from float
+ check(
+ r#"
+/// This is a doc
+const FOO$0: f64 = 1.0f64;
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: f64 = 1.0
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+}
+
+#[test]
+fn hover_const_pat() {
+ check(
+ r#"
+/// This is a doc
+const FOO: usize = 3;
+fn foo() {
+ match 5 {
+ FOO$0 => (),
+ _ => ()
+ }
+}
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: usize = 3
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+}
+
+#[test]
+fn array_repeat_exp() {
+ check(
+ r#"
+fn main() {
+ let til$0e4 = [0_u32; (4 * 8 * 8) / 32];
+}
+ "#,
+ expect![[r#"
+ *tile4*
+
+ ```rust
+ let tile4: [u32; 8]
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_mod_def() {
+ check(
+ r#"
+//- /main.rs
+mod foo$0;
+//- /foo.rs
+//! For the horde!
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ mod foo
+ ```
+
+ ---
+
+ For the horde!
+ "#]],
+ );
+}
+
+#[test]
+fn hover_self_in_use() {
+ check(
+ r#"
+//! This should not appear
+mod foo {
+ /// But this should appear
+ pub mod bar {}
+}
+use foo::bar::{self$0};
+"#,
+ expect![[r#"
+ *self*
+
+ ```rust
+ test::foo
+ ```
+
+ ```rust
+ mod bar
+ ```
+
+ ---
+
+ But this should appear
+ "#]],
+ )
+}
+
+#[test]
+fn hover_keyword() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+fn f() { retur$0n; }
+//- /libstd.rs crate:std
+/// Docs for return_keyword
+mod return_keyword {}
+"#,
+ expect![[r#"
+ *return*
+
+ ```rust
+ return
+ ```
+
+ ---
+
+ Docs for return_keyword
+ "#]],
+ );
+}
+
+#[test]
+fn hover_keyword_doc() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+fn foo() {
+ let bar = mov$0e || {};
+}
+//- /libstd.rs crate:std
+#[doc(keyword = "move")]
+/// [closure]
+/// [closures][closure]
+/// [threads]
+/// <https://doc.rust-lang.org/nightly/book/ch13-01-closures.html>
+///
+/// [closure]: ../book/ch13-01-closures.html
+/// [threads]: ../book/ch16-01-threads.html#using-move-closures-with-threads
+mod move_keyword {}
+"#,
+ expect![[r##"
+ *move*
+
+ ```rust
+ move
+ ```
+
+ ---
+
+ [closure](https://doc.rust-lang.org/nightly/book/ch13-01-closures.html)
+ [closures](https://doc.rust-lang.org/nightly/book/ch13-01-closures.html)
+ [threads](https://doc.rust-lang.org/nightly/book/ch16-01-threads.html#using-move-closures-with-threads)
+ <https://doc.rust-lang.org/nightly/book/ch13-01-closures.html>
+ "##]],
+ );
+}
+
+#[test]
+fn hover_keyword_as_primitive() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+type F = f$0n(i32) -> i32;
+//- /libstd.rs crate:std
+/// Docs for prim_fn
+mod prim_fn {}
+"#,
+ expect![[r#"
+ *fn*
+
+ ```rust
+ fn
+ ```
+
+ ---
+
+ Docs for prim_fn
+ "#]],
+ );
+}
+
+#[test]
+fn hover_builtin() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+cosnt _: &str$0 = ""; }
+
+//- /libstd.rs crate:std
+/// Docs for prim_str
+/// [`foo`](../std/keyword.foo.html)
+mod prim_str {}
+"#,
+ expect![[r#"
+ *str*
+
+ ```rust
+ str
+ ```
+
+ ---
+
+ Docs for prim_str
+ [`foo`](https://doc.rust-lang.org/nightly/std/keyword.foo.html)
+ "#]],
+ );
+}
+
+#[test]
+fn hover_macro_expanded_function() {
+ check(
+ r#"
+struct S<'a, T>(&'a T);
+trait Clone {}
+macro_rules! foo {
+ () => {
+ fn bar<'t, T: Clone + 't>(s: &mut S<'t, T>, t: u32) -> *mut u32 where
+ 't: 't + 't,
+ for<'a> T: Clone + 'a
+ { 0 as _ }
+ };
+}
+
+foo!();
+
+fn main() {
+ bar$0;
+}
+"#,
+ expect![[r#"
+ *bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn bar<'t, T>(s: &mut S<'t, T>, t: u32) -> *mut u32
+ where
+ T: Clone + 't,
+ 't: 't + 't,
+ for<'a> T: Clone + 'a,
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_intra_doc_links() {
+ check(
+ r#"
+
+pub mod theitem {
+ /// This is the item. Cool!
+ pub struct TheItem;
+}
+
+/// Gives you a [`TheItem$0`].
+///
+/// [`TheItem`]: theitem::TheItem
+pub fn gimme() -> theitem::TheItem {
+ theitem::TheItem
+}
+"#,
+ expect![[r#"
+ *[`TheItem`]*
+
+ ```rust
+ test::theitem
+ ```
+
+ ```rust
+ pub struct TheItem
+ ```
+
+ ---
+
+ This is the item. Cool!
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_trait_assoc_typealias() {
+ check(
+ r#"
+ fn main() {}
+
+trait T1 {
+ type Bar;
+ type Baz;
+}
+
+struct Foo;
+
+mod t2 {
+ pub trait T2 {
+ type Bar;
+ }
+}
+
+use t2::T2;
+
+impl T2 for Foo {
+ type Bar = String;
+}
+
+impl T1 for Foo {
+ type Bar = <Foo as t2::T2>::Ba$0r;
+ // ^^^ unresolvedReference
+}
+ "#,
+ expect![[r#"
+*Bar*
+
+```rust
+test::t2
+```
+
+```rust
+pub type Bar
+```
+"#]],
+ );
+}
+#[test]
+fn hover_generic_assoc() {
+ check(
+ r#"
+fn foo<T: A>() where T::Assoc$0: {}
+
+trait A {
+ type Assoc;
+}"#,
+ expect![[r#"
+ *Assoc*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ type Assoc
+ ```
+ "#]],
+ );
+ check(
+ r#"
+fn foo<T: A>() {
+ let _: <T>::Assoc$0;
+}
+
+trait A {
+ type Assoc;
+}"#,
+ expect![[r#"
+ *Assoc*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ type Assoc
+ ```
+ "#]],
+ );
+ check(
+ r#"
+trait A where
+ Self::Assoc$0: ,
+{
+ type Assoc;
+}"#,
+ expect![[r#"
+ *Assoc*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ type Assoc
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn string_shadowed_with_inner_items() {
+ check(
+ r#"
+//- /main.rs crate:main deps:alloc
+
+/// Custom `String` type.
+struct String;
+
+fn f() {
+ let _: String$0;
+
+ fn inner() {}
+}
+
+//- /alloc.rs crate:alloc
+#[prelude_import]
+pub use string::*;
+
+mod string {
+ /// This is `alloc::String`.
+ pub struct String;
+}
+"#,
+ expect![[r#"
+ *String*
+
+ ```rust
+ main
+ ```
+
+ ```rust
+ struct String
+ ```
+
+ ---
+
+ Custom `String` type.
+ "#]],
+ )
+}
+
+#[test]
+fn function_doesnt_shadow_crate_in_use_tree() {
+ check(
+ r#"
+//- /main.rs crate:main deps:foo
+use foo$0::{foo};
+
+//- /foo.rs crate:foo
+pub fn foo() {}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ extern crate foo
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_feature() {
+ check(
+ r#"#![feature(box_syntax$0)]"#,
+ expect![[r##"
+ *box_syntax*
+ ```
+ box_syntax
+ ```
+ ___
+
+ # `box_syntax`
+
+ The tracking issue for this feature is: [#49733]
+
+ [#49733]: https://github.com/rust-lang/rust/issues/49733
+
+ See also [`box_patterns`](box-patterns.md)
+
+ ------------------------
+
+ Currently the only stable way to create a `Box` is via the `Box::new` method.
+ Also it is not possible in stable Rust to destructure a `Box` in a match
+ pattern. The unstable `box` keyword can be used to create a `Box`. An example
+ usage would be:
+
+ ```rust
+ #![feature(box_syntax)]
+
+ fn main() {
+ let b = box 5;
+ }
+ ```
+
+ "##]],
+ )
+}
+
+#[test]
+fn hover_lint() {
+ check(
+ r#"#![allow(arithmetic_overflow$0)]"#,
+ expect![[r#"
+ *arithmetic_overflow*
+ ```
+ arithmetic_overflow
+ ```
+ ___
+
+ arithmetic operation overflows
+ "#]],
+ )
+}
+
+#[test]
+fn hover_clippy_lint() {
+ check(
+ r#"#![allow(clippy::almost_swapped$0)]"#,
+ expect![[r#"
+ *almost_swapped*
+ ```
+ clippy::almost_swapped
+ ```
+ ___
+
+ Checks for `foo = bar; bar = foo` sequences.
+ "#]],
+ )
+}
+
+#[test]
+fn hover_attr_path_qualifier() {
+ check(
+ r#"
+//- /foo.rs crate:foo
+
+//- /lib.rs crate:main.rs deps:foo
+#[fo$0o::bar()]
+struct Foo;
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ extern crate foo
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_rename() {
+ check(
+ r#"
+use self as foo$0;
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ extern crate test
+ ```
+ "#]],
+ );
+ check(
+ r#"
+mod bar {}
+use bar::{self as foo$0};
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ mod bar
+ ```
+ "#]],
+ );
+ check(
+ r#"
+mod bar {
+ use super as foo$0;
+}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ extern crate test
+ ```
+ "#]],
+ );
+ check(
+ r#"
+use crate as foo$0;
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ extern crate test
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_attribute_in_macro() {
+ check(
+ r#"
+//- minicore:derive
+macro_rules! identity {
+ ($struct:item) => {
+ $struct
+ };
+}
+#[rustc_builtin_macro]
+pub macro Copy {}
+identity!{
+ #[derive(Copy$0)]
+ struct Foo;
+}
+"#,
+ expect![[r#"
+ *Copy*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ macro Copy
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_derive_input() {
+ check(
+ r#"
+//- minicore:derive
+#[rustc_builtin_macro]
+pub macro Copy {}
+#[derive(Copy$0)]
+struct Foo;
+"#,
+ expect![[r#"
+ *Copy*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ macro Copy
+ ```
+ "#]],
+ );
+ check(
+ r#"
+//- minicore:derive
+mod foo {
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+}
+#[derive(foo::Copy$0)]
+struct Foo;
+"#,
+ expect![[r#"
+ *Copy*
+
+ ```rust
+ test::foo
+ ```
+
+ ```rust
+ macro Copy
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_range_math() {
+ check_hover_range(
+ r#"
+fn f() { let expr = $01 + 2 * 3$0 }
+"#,
+ expect![[r#"
+ ```rust
+ i32
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn f() { let expr = 1 $0+ 2 * $03 }
+"#,
+ expect![[r#"
+ ```rust
+ i32
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn f() { let expr = 1 + $02 * 3$0 }
+"#,
+ expect![[r#"
+ ```rust
+ i32
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_range_arrays() {
+ check_hover_range(
+ r#"
+fn f() { let expr = $0[1, 2, 3, 4]$0 }
+"#,
+ expect![[r#"
+ ```rust
+ [i32; 4]
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn f() { let expr = [1, 2, $03, 4]$0 }
+"#,
+ expect![[r#"
+ ```rust
+ [i32; 4]
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn f() { let expr = [1, 2, $03$0, 4] }
+"#,
+ expect![[r#"
+ ```rust
+ i32
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_range_functions() {
+ check_hover_range(
+ r#"
+fn f<T>(a: &[T]) { }
+fn b() { $0f$0(&[1, 2, 3, 4, 5]); }
+"#,
+ expect![[r#"
+ ```rust
+ fn f<i32>(&[i32])
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn f<T>(a: &[T]) { }
+fn b() { f($0&[1, 2, 3, 4, 5]$0); }
+"#,
+ expect![[r#"
+ ```rust
+ &[i32; 5]
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_range_shows_nothing_when_invalid() {
+ check_hover_range_no_results(
+ r#"
+fn f<T>(a: &[T]) { }
+fn b()$0 { f(&[1, 2, 3, 4, 5]); }$0
+"#,
+ );
+
+ check_hover_range_no_results(
+ r#"
+fn f<T>$0(a: &[T]) { }
+fn b() { f(&[1, 2, 3,$0 4, 5]); }
+"#,
+ );
+
+ check_hover_range_no_results(
+ r#"
+fn $0f() { let expr = [1, 2, 3, 4]$0 }
+"#,
+ );
+}
+
+#[test]
+fn hover_range_shows_unit_for_statements() {
+ check_hover_range(
+ r#"
+fn f<T>(a: &[T]) { }
+fn b() { $0f(&[1, 2, 3, 4, 5]); }$0
+"#,
+ expect![[r#"
+ ```rust
+ ()
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn f() { let expr$0 = $0[1, 2, 3, 4] }
+"#,
+ expect![[r#"
+ ```rust
+ ()
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_range_for_pat() {
+ check_hover_range(
+ r#"
+fn foo() {
+ let $0x$0 = 0;
+}
+"#,
+ expect![[r#"
+ ```rust
+ i32
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn foo() {
+ let $0x$0 = "";
+}
+"#,
+ expect![[r#"
+ ```rust
+ &str
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_range_shows_coercions_if_applicable_expr() {
+ check_hover_range(
+ r#"
+fn foo() {
+ let x: &u32 = $0&&&&&0$0;
+}
+"#,
+ expect![[r#"
+ ```text
+ Type: &&&&&u32
+ Coerced to: &u32
+ ```
+ "#]],
+ );
+ check_hover_range(
+ r#"
+fn foo() {
+ let x: *const u32 = $0&0$0;
+}
+"#,
+ expect![[r#"
+ ```text
+ Type: &u32
+ Coerced to: *const u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_range_shows_type_actions() {
+ check_actions(
+ r#"
+struct Foo;
+fn foo() {
+ let x: &Foo = $0&&&&&Foo$0;
+}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..11,
+ focus_range: 7..10,
+ name: "Foo",
+ kind: Struct,
+ description: "struct Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn hover_try_expr_res() {
+ check_hover_range(
+ r#"
+//- minicore:result
+struct FooError;
+
+fn foo() -> Result<(), FooError> {
+ Ok($0Result::<(), FooError>::Ok(())?$0)
+}
+"#,
+ expect![[r#"
+ ```rust
+ ()
+ ```"#]],
+ );
+ check_hover_range(
+ r#"
+//- minicore:result
+struct FooError;
+struct BarError;
+
+fn foo() -> Result<(), FooError> {
+ Ok($0Result::<(), BarError>::Ok(())?$0)
+}
+"#,
+ expect![[r#"
+ ```text
+ Try Error Type: BarError
+ Propagated as: FooError
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_try_expr() {
+ check_hover_range(
+ r#"
+struct NotResult<T, U>(T, U);
+struct Short;
+struct Looooong;
+
+fn foo() -> NotResult<(), Looooong> {
+ $0NotResult((), Short)?$0;
+}
+"#,
+ expect![[r#"
+ ```text
+ Try Target Type: NotResult<(), Short>
+ Propagated as: NotResult<(), Looooong>
+ ```
+ "#]],
+ );
+ check_hover_range(
+ r#"
+struct NotResult<T, U>(T, U);
+struct Short;
+struct Looooong;
+
+fn foo() -> NotResult<(), Short> {
+ $0NotResult((), Looooong)?$0;
+}
+"#,
+ expect![[r#"
+ ```text
+ Try Target Type: NotResult<(), Looooong>
+ Propagated as: NotResult<(), Short>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_try_expr_option() {
+ cov_mark::check!(hover_try_expr_opt_opt);
+ check_hover_range(
+ r#"
+//- minicore: option, try
+
+fn foo() -> Option<()> {
+ $0Some(0)?$0;
+ None
+}
+"#,
+ expect![[r#"
+ ```rust
+ <Option<i32> as Try>::Output
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_deref_expr() {
+ check_hover_range(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+
+struct DerefExample<T> {
+ value: T
+}
+
+impl<T> Deref for DerefExample<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ &self.value
+ }
+}
+
+fn foo() {
+ let x = DerefExample { value: 0 };
+ let y: i32 = $0*x$0;
+}
+"#,
+ expect![[r#"
+ ```text
+ Dereferenced from: DerefExample<i32>
+ To type: i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_deref_expr_with_coercion() {
+ check_hover_range(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+
+struct DerefExample<T> {
+ value: T
+}
+
+impl<T> Deref for DerefExample<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ &self.value
+ }
+}
+
+fn foo() {
+ let x = DerefExample { value: &&&&&0 };
+ let y: &i32 = $0*x$0;
+}
+"#,
+ expect![[r#"
+ ```text
+ Dereferenced from: DerefExample<&&&&&i32>
+ To type: &&&&&i32
+ Coerced to: &i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_intra_in_macro() {
+ check(
+ r#"
+macro_rules! foo_macro {
+ ($(#[$attr:meta])* $name:ident) => {
+ $(#[$attr])*
+ pub struct $name;
+ }
+}
+
+foo_macro!(
+ /// Doc comment for [`Foo$0`]
+ Foo
+);
+"#,
+ expect![[r#"
+ *[`Foo`]*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub struct Foo
+ ```
+
+ ---
+
+ Doc comment for [`Foo`](https://docs.rs/test/*/test/struct.Foo.html)
+ "#]],
+ );
+}
+
+#[test]
+fn hover_intra_in_attr() {
+ check(
+ r#"
+#[doc = "Doc comment for [`Foo$0`]"]
+pub struct Foo;
+"#,
+ expect![[r#"
+ *[`Foo`]*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub struct Foo
+ ```
+
+ ---
+
+ Doc comment for [`Foo`](https://docs.rs/test/*/test/struct.Foo.html)
+ "#]],
+ );
+}
+
+#[test]
+fn hover_inert_attr() {
+ check(
+ r#"
+#[doc$0 = ""]
+pub struct Foo;
+"#,
+ expect![[r##"
+ *doc*
+
+ ```rust
+ #[doc]
+ ```
+
+ ---
+
+ Valid forms are:
+
+ * \#\[doc(hidden|inline|...)\]
+ * \#\[doc = string\]
+ "##]],
+ );
+ check(
+ r#"
+#[allow$0()]
+pub struct Foo;
+"#,
+ expect![[r##"
+ *allow*
+
+ ```rust
+ #[allow]
+ ```
+
+ ---
+
+ Valid forms are:
+
+ * \#\[allow(lint1, lint2, ..., /\*opt\*/ reason = "...")\]
+ "##]],
+ );
+}
+
+#[test]
+fn hover_dollar_crate() {
+ // $crate should be resolved to the right crate name.
+
+ check(
+ r#"
+//- /main.rs crate:main deps:dep
+dep::m!(KONST$0);
+//- /dep.rs crate:dep
+#[macro_export]
+macro_rules! m {
+ ( $name:ident ) => { const $name: $crate::Type = $crate::Type; };
+}
+
+pub struct Type;
+"#,
+ expect![[r#"
+ *KONST*
+
+ ```rust
+ main
+ ```
+
+ ```rust
+ const KONST: dep::Type = $crate::Type
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_record_variant() {
+ check(
+ r#"
+enum Enum {
+ RecordV$0 { field: u32 }
+}
+"#,
+ expect![[r#"
+ *RecordV*
+
+ ```rust
+ test::Enum
+ ```
+
+ ```rust
+ RecordV { field: u32 }
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_trait_impl_assoc_item_def_doc_forwarding() {
+ check(
+ r#"
+trait T {
+ /// Trait docs
+ fn func() {}
+}
+impl T for () {
+ fn func$0() {}
+}
+"#,
+ expect![[r#"
+ *func*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn func()
+ ```
+
+ ---
+
+ Trait docs
+ "#]],
+ );
+}
+
+#[test]
+fn hover_ranged_macro_call() {
+ check_hover_range(
+ r#"
+macro_rules! __rust_force_expr {
+ ($e:expr) => {
+ $e
+ };
+}
+macro_rules! vec {
+ ($elem:expr) => {
+ __rust_force_expr!($elem)
+ };
+}
+
+struct Struct;
+impl Struct {
+ fn foo(self) {}
+}
+
+fn f() {
+ $0vec![Struct]$0;
+}
+"#,
+ expect![[r#"
+ ```rust
+ Struct
+ ```"#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
new file mode 100644
index 000000000..5aae669aa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
@@ -0,0 +1,2818 @@
+use either::Either;
+use hir::{known, Callable, HasVisibility, HirDisplay, Mutability, Semantics, TypeInfo};
+use ide_db::{
+ base_db::FileRange, famous_defs::FamousDefs, syntax_helpers::node_ext::walk_ty, FxHashMap,
+ RootDatabase,
+};
+use itertools::Itertools;
+use stdx::to_lower_snake_case;
+use syntax::{
+ ast::{self, AstNode, HasArgList, HasGenericParams, HasName, UnaryOp},
+ match_ast, Direction, NodeOrToken, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken, TextRange,
+ TextSize, T,
+};
+
+use crate::FileId;
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct InlayHintsConfig {
+ pub render_colons: bool,
+ pub type_hints: bool,
+ pub parameter_hints: bool,
+ pub chaining_hints: bool,
+ pub reborrow_hints: ReborrowHints,
+ pub closure_return_type_hints: ClosureReturnTypeHints,
+ pub binding_mode_hints: bool,
+ pub lifetime_elision_hints: LifetimeElisionHints,
+ pub param_names_for_lifetime_elision_hints: bool,
+ pub hide_named_constructor_hints: bool,
+ pub hide_closure_initialization_hints: bool,
+ pub max_length: Option<usize>,
+ pub closing_brace_hints_min_lines: Option<usize>,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum ClosureReturnTypeHints {
+ Always,
+ WithBlock,
+ Never,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum LifetimeElisionHints {
+ Always,
+ SkipTrivial,
+ Never,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum ReborrowHints {
+ Always,
+ MutableOnly,
+ Never,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum InlayKind {
+ BindingModeHint,
+ ChainingHint,
+ ClosingBraceHint,
+ ClosureReturnTypeHint,
+ GenericParamListHint,
+ ImplicitReborrowHint,
+ LifetimeHint,
+ ParameterHint,
+ TypeHint,
+}
+
+#[derive(Debug)]
+pub struct InlayHint {
+ pub range: TextRange,
+ pub kind: InlayKind,
+ pub label: String,
+ pub tooltip: Option<InlayTooltip>,
+}
+
+#[derive(Debug)]
+pub enum InlayTooltip {
+ String(String),
+ HoverRanged(FileId, TextRange),
+ HoverOffset(FileId, TextSize),
+}
+
+// Feature: Inlay Hints
+//
+// rust-analyzer shows additional information inline with the source code.
+// Editors usually render this using read-only virtual text snippets interspersed with code.
+//
+// rust-analyzer by default shows hints for
+//
+// * types of local variables
+// * names of function arguments
+// * types of chained expressions
+//
+// Optionally, one can enable additional hints for
+//
+// * return types of closure expressions
+// * elided lifetimes
+// * compiler inserted reborrows
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Toggle inlay hints*
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113020660-b5f98b80-917a-11eb-8d70-3be3fd558cdd.png[]
+pub(crate) fn inlay_hints(
+ db: &RootDatabase,
+ file_id: FileId,
+ range_limit: Option<FileRange>,
+ config: &InlayHintsConfig,
+) -> Vec<InlayHint> {
+ let _p = profile::span("inlay_hints");
+ let sema = Semantics::new(db);
+ let file = sema.parse(file_id);
+ let file = file.syntax();
+
+ let mut acc = Vec::new();
+
+ if let Some(scope) = sema.scope(&file) {
+ let famous_defs = FamousDefs(&sema, scope.krate());
+
+ let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node);
+ match range_limit {
+ Some(FileRange { range, .. }) => match file.covering_element(range) {
+ NodeOrToken::Token(_) => return acc,
+ NodeOrToken::Node(n) => n
+ .descendants()
+ .filter(|descendant| range.intersect(descendant.text_range()).is_some())
+ .for_each(hints),
+ },
+ None => file.descendants().for_each(hints),
+ };
+ }
+
+ acc
+}
+
+fn hints(
+ hints: &mut Vec<InlayHint>,
+ famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ node: SyntaxNode,
+) {
+ closing_brace_hints(hints, sema, config, file_id, node.clone());
+ match_ast! {
+ match node {
+ ast::Expr(expr) => {
+ chaining_hints(hints, sema, &famous_defs, config, file_id, &expr);
+ match expr {
+ ast::Expr::CallExpr(it) => param_name_hints(hints, sema, config, ast::Expr::from(it)),
+ ast::Expr::MethodCallExpr(it) => {
+ param_name_hints(hints, sema, config, ast::Expr::from(it))
+ }
+ ast::Expr::ClosureExpr(it) => closure_ret_hints(hints, sema, &famous_defs, config, file_id, it),
+ // We could show reborrows for all expressions, but usually that is just noise to the user
+ // and the main point here is to show why "moving" a mutable reference doesn't necessarily move it
+ ast::Expr::PathExpr(_) => reborrow_hints(hints, sema, config, &expr),
+ _ => None,
+ }
+ },
+ ast::Pat(it) => {
+ binding_mode_hints(hints, sema, config, &it);
+ if let ast::Pat::IdentPat(it) = it {
+ bind_pat_hints(hints, sema, config, file_id, &it);
+ }
+ Some(())
+ },
+ ast::Item(it) => match it {
+ // FIXME: record impl lifetimes so they aren't being reused in assoc item lifetime inlay hints
+ ast::Item::Impl(_) => None,
+ ast::Item::Fn(it) => fn_lifetime_fn_hints(hints, config, it),
+ // static type elisions
+ ast::Item::Static(it) => implicit_static_hints(hints, config, Either::Left(it)),
+ ast::Item::Const(it) => implicit_static_hints(hints, config, Either::Right(it)),
+ _ => None,
+ },
+ // FIXME: fn-ptr type, dyn fn type, and trait object type elisions
+ ast::Type(_) => None,
+ _ => None,
+ }
+ };
+}
+
+fn closing_brace_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ node: SyntaxNode,
+) -> Option<()> {
+ let min_lines = config.closing_brace_hints_min_lines?;
+
+ let name = |it: ast::Name| it.syntax().text_range().start();
+
+ let mut closing_token;
+ let (label, name_offset) = if let Some(item_list) = ast::AssocItemList::cast(node.clone()) {
+ closing_token = item_list.r_curly_token()?;
+
+ let parent = item_list.syntax().parent()?;
+ match_ast! {
+ match parent {
+ ast::Impl(imp) => {
+ let imp = sema.to_def(&imp)?;
+ let ty = imp.self_ty(sema.db);
+ let trait_ = imp.trait_(sema.db);
+
+ (match trait_ {
+ Some(tr) => format!("impl {} for {}", tr.name(sema.db), ty.display_truncated(sema.db, config.max_length)),
+ None => format!("impl {}", ty.display_truncated(sema.db, config.max_length)),
+ }, None)
+ },
+ ast::Trait(tr) => {
+ (format!("trait {}", tr.name()?), tr.name().map(name))
+ },
+ _ => return None,
+ }
+ }
+ } else if let Some(list) = ast::ItemList::cast(node.clone()) {
+ closing_token = list.r_curly_token()?;
+
+ let module = ast::Module::cast(list.syntax().parent()?)?;
+ (format!("mod {}", module.name()?), module.name().map(name))
+ } else if let Some(block) = ast::BlockExpr::cast(node.clone()) {
+ closing_token = block.stmt_list()?.r_curly_token()?;
+
+ let parent = block.syntax().parent()?;
+ match_ast! {
+ match parent {
+ ast::Fn(it) => {
+ // FIXME: this could include parameters, but `HirDisplay` prints too much info
+ // and doesn't respect the max length either, so the hints end up way too long
+ (format!("fn {}", it.name()?), it.name().map(name))
+ },
+ ast::Static(it) => (format!("static {}", it.name()?), it.name().map(name)),
+ ast::Const(it) => {
+ if it.underscore_token().is_some() {
+ ("const _".into(), None)
+ } else {
+ (format!("const {}", it.name()?), it.name().map(name))
+ }
+ },
+ _ => return None,
+ }
+ }
+ } else if let Some(mac) = ast::MacroCall::cast(node.clone()) {
+ let last_token = mac.syntax().last_token()?;
+ if last_token.kind() != T![;] && last_token.kind() != SyntaxKind::R_CURLY {
+ return None;
+ }
+ closing_token = last_token;
+
+ (
+ format!("{}!", mac.path()?),
+ mac.path().and_then(|it| it.segment()).map(|it| it.syntax().text_range().start()),
+ )
+ } else {
+ return None;
+ };
+
+ if let Some(mut next) = closing_token.next_token() {
+ if next.kind() == T![;] {
+ if let Some(tok) = next.next_token() {
+ closing_token = next;
+ next = tok;
+ }
+ }
+ if !(next.kind() == SyntaxKind::WHITESPACE && next.text().contains('\n')) {
+ // Only display the hint if the `}` is the last token on the line
+ return None;
+ }
+ }
+
+ let mut lines = 1;
+ node.text().for_each_chunk(|s| lines += s.matches('\n').count());
+ if lines < min_lines {
+ return None;
+ }
+
+ acc.push(InlayHint {
+ range: closing_token.text_range(),
+ kind: InlayKind::ClosingBraceHint,
+ label,
+ tooltip: name_offset.map(|it| InlayTooltip::HoverOffset(file_id, it)),
+ });
+
+ None
+}
+
+fn implicit_static_hints(
+ acc: &mut Vec<InlayHint>,
+ config: &InlayHintsConfig,
+ statik_or_const: Either<ast::Static, ast::Const>,
+) -> Option<()> {
+ if config.lifetime_elision_hints != LifetimeElisionHints::Always {
+ return None;
+ }
+
+ if let Either::Right(it) = &statik_or_const {
+ if ast::AssocItemList::can_cast(
+ it.syntax().parent().map_or(SyntaxKind::EOF, |it| it.kind()),
+ ) {
+ return None;
+ }
+ }
+
+ if let Some(ast::Type::RefType(ty)) = statik_or_const.either(|it| it.ty(), |it| it.ty()) {
+ if ty.lifetime().is_none() {
+ let t = ty.amp_token()?;
+ acc.push(InlayHint {
+ range: t.text_range(),
+ kind: InlayKind::LifetimeHint,
+ label: "'static".to_owned(),
+ tooltip: Some(InlayTooltip::String("Elided static lifetime".into())),
+ });
+ }
+ }
+
+ Some(())
+}
+
+fn fn_lifetime_fn_hints(
+ acc: &mut Vec<InlayHint>,
+ config: &InlayHintsConfig,
+ func: ast::Fn,
+) -> Option<()> {
+ if config.lifetime_elision_hints == LifetimeElisionHints::Never {
+ return None;
+ }
+
+ let mk_lt_hint = |t: SyntaxToken, label| InlayHint {
+ range: t.text_range(),
+ kind: InlayKind::LifetimeHint,
+ label,
+ tooltip: Some(InlayTooltip::String("Elided lifetime".into())),
+ };
+
+ let param_list = func.param_list()?;
+ let generic_param_list = func.generic_param_list();
+ let ret_type = func.ret_type();
+ let self_param = param_list.self_param().filter(|it| it.amp_token().is_some());
+
+ let is_elided = |lt: &Option<ast::Lifetime>| match lt {
+ Some(lt) => matches!(lt.text().as_str(), "'_"),
+ None => true,
+ };
+
+ let potential_lt_refs = {
+ let mut acc: Vec<_> = vec![];
+ if let Some(self_param) = &self_param {
+ let lifetime = self_param.lifetime();
+ let is_elided = is_elided(&lifetime);
+ acc.push((None, self_param.amp_token(), lifetime, is_elided));
+ }
+ param_list.params().filter_map(|it| Some((it.pat(), it.ty()?))).for_each(|(pat, ty)| {
+ // FIXME: check path types
+ walk_ty(&ty, &mut |ty| match ty {
+ ast::Type::RefType(r) => {
+ let lifetime = r.lifetime();
+ let is_elided = is_elided(&lifetime);
+ acc.push((
+ pat.as_ref().and_then(|it| match it {
+ ast::Pat::IdentPat(p) => p.name(),
+ _ => None,
+ }),
+ r.amp_token(),
+ lifetime,
+ is_elided,
+ ))
+ }
+ _ => (),
+ })
+ });
+ acc
+ };
+
+ // allocate names
+ let mut gen_idx_name = {
+ let mut gen = (0u8..).map(|idx| match idx {
+ idx if idx < 10 => SmolStr::from_iter(['\'', (idx + 48) as char]),
+ idx => format!("'{idx}").into(),
+ });
+ move || gen.next().unwrap_or_default()
+ };
+ let mut allocated_lifetimes = vec![];
+
+ let mut used_names: FxHashMap<SmolStr, usize> =
+ match config.param_names_for_lifetime_elision_hints {
+ true => generic_param_list
+ .iter()
+ .flat_map(|gpl| gpl.lifetime_params())
+ .filter_map(|param| param.lifetime())
+ .filter_map(|lt| Some((SmolStr::from(lt.text().as_str().get(1..)?), 0)))
+ .collect(),
+ false => Default::default(),
+ };
+ {
+ let mut potential_lt_refs = potential_lt_refs.iter().filter(|&&(.., is_elided)| is_elided);
+ if let Some(_) = &self_param {
+ if let Some(_) = potential_lt_refs.next() {
+ allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints {
+ // self can't be used as a lifetime, so no need to check for collisions
+ "'self".into()
+ } else {
+ gen_idx_name()
+ });
+ }
+ }
+ potential_lt_refs.for_each(|(name, ..)| {
+ let name = match name {
+ Some(it) if config.param_names_for_lifetime_elision_hints => {
+ if let Some(c) = used_names.get_mut(it.text().as_str()) {
+ *c += 1;
+ SmolStr::from(format!("'{text}{c}", text = it.text().as_str()))
+ } else {
+ used_names.insert(it.text().as_str().into(), 0);
+ SmolStr::from_iter(["\'", it.text().as_str()])
+ }
+ }
+ _ => gen_idx_name(),
+ };
+ allocated_lifetimes.push(name);
+ });
+ }
+
+ // fetch output lifetime if elision rule applies
+ let output = match potential_lt_refs.as_slice() {
+ [(_, _, lifetime, _), ..] if self_param.is_some() || potential_lt_refs.len() == 1 => {
+ match lifetime {
+ Some(lt) => match lt.text().as_str() {
+ "'_" => allocated_lifetimes.get(0).cloned(),
+ "'static" => None,
+ name => Some(name.into()),
+ },
+ None => allocated_lifetimes.get(0).cloned(),
+ }
+ }
+ [..] => None,
+ };
+
+ if allocated_lifetimes.is_empty() && output.is_none() {
+ return None;
+ }
+
+ // apply hints
+ // apply output if required
+ let mut is_trivial = true;
+ if let (Some(output_lt), Some(r)) = (&output, ret_type) {
+ if let Some(ty) = r.ty() {
+ walk_ty(&ty, &mut |ty| match ty {
+ ast::Type::RefType(ty) if ty.lifetime().is_none() => {
+ if let Some(amp) = ty.amp_token() {
+ is_trivial = false;
+ acc.push(mk_lt_hint(amp, output_lt.to_string()));
+ }
+ }
+ _ => (),
+ })
+ }
+ }
+
+ if config.lifetime_elision_hints == LifetimeElisionHints::SkipTrivial && is_trivial {
+ return None;
+ }
+
+ let mut a = allocated_lifetimes.iter();
+ for (_, amp_token, _, is_elided) in potential_lt_refs {
+ if is_elided {
+ let t = amp_token?;
+ let lt = a.next()?;
+ acc.push(mk_lt_hint(t, lt.to_string()));
+ }
+ }
+
+ // generate generic param list things
+ match (generic_param_list, allocated_lifetimes.as_slice()) {
+ (_, []) => (),
+ (Some(gpl), allocated_lifetimes) => {
+ let angle_tok = gpl.l_angle_token()?;
+ let is_empty = gpl.generic_params().next().is_none();
+ acc.push(InlayHint {
+ range: angle_tok.text_range(),
+ kind: InlayKind::LifetimeHint,
+ label: format!(
+ "{}{}",
+ allocated_lifetimes.iter().format(", "),
+ if is_empty { "" } else { ", " }
+ ),
+ tooltip: Some(InlayTooltip::String("Elided lifetimes".into())),
+ });
+ }
+ (None, allocated_lifetimes) => acc.push(InlayHint {
+ range: func.name()?.syntax().text_range(),
+ kind: InlayKind::GenericParamListHint,
+ label: format!("<{}>", allocated_lifetimes.iter().format(", "),).into(),
+ tooltip: Some(InlayTooltip::String("Elided lifetimes".into())),
+ }),
+ }
+ Some(())
+}
+
+fn closure_ret_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ famous_defs: &FamousDefs<'_, '_>,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ closure: ast::ClosureExpr,
+) -> Option<()> {
+ if config.closure_return_type_hints == ClosureReturnTypeHints::Never {
+ return None;
+ }
+
+ if closure.ret_type().is_some() {
+ return None;
+ }
+
+ if !closure_has_block_body(&closure)
+ && config.closure_return_type_hints == ClosureReturnTypeHints::WithBlock
+ {
+ return None;
+ }
+
+ let param_list = closure.param_list()?;
+
+ let closure = sema.descend_node_into_attributes(closure.clone()).pop()?;
+ let ty = sema.type_of_expr(&ast::Expr::ClosureExpr(closure))?.adjusted();
+ let callable = ty.as_callable(sema.db)?;
+ let ty = callable.return_type();
+ if ty.is_unit() {
+ return None;
+ }
+ acc.push(InlayHint {
+ range: param_list.syntax().text_range(),
+ kind: InlayKind::ClosureReturnTypeHint,
+ label: hint_iterator(sema, &famous_defs, config, &ty)
+ .unwrap_or_else(|| ty.display_truncated(sema.db, config.max_length).to_string()),
+ tooltip: Some(InlayTooltip::HoverRanged(file_id, param_list.syntax().text_range())),
+ });
+ Some(())
+}
+
+fn reborrow_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ expr: &ast::Expr,
+) -> Option<()> {
+ if config.reborrow_hints == ReborrowHints::Never {
+ return None;
+ }
+
+ let descended = sema.descend_node_into_attributes(expr.clone()).pop();
+ let desc_expr = descended.as_ref().unwrap_or(expr);
+ let mutability = sema.is_implicit_reborrow(desc_expr)?;
+ let label = match mutability {
+ hir::Mutability::Shared if config.reborrow_hints != ReborrowHints::MutableOnly => "&*",
+ hir::Mutability::Mut => "&mut *",
+ _ => return None,
+ };
+ acc.push(InlayHint {
+ range: expr.syntax().text_range(),
+ kind: InlayKind::ImplicitReborrowHint,
+ label: label.to_string(),
+ tooltip: Some(InlayTooltip::String("Compiler inserted reborrow".into())),
+ });
+ Some(())
+}
+
+fn chaining_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ famous_defs: &FamousDefs<'_, '_>,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ expr: &ast::Expr,
+) -> Option<()> {
+ if !config.chaining_hints {
+ return None;
+ }
+
+ if matches!(expr, ast::Expr::RecordExpr(_)) {
+ return None;
+ }
+
+ let descended = sema.descend_node_into_attributes(expr.clone()).pop();
+ let desc_expr = descended.as_ref().unwrap_or(expr);
+
+ let mut tokens = expr
+ .syntax()
+ .siblings_with_tokens(Direction::Next)
+ .filter_map(NodeOrToken::into_token)
+ .filter(|t| match t.kind() {
+ SyntaxKind::WHITESPACE if !t.text().contains('\n') => false,
+ SyntaxKind::COMMENT => false,
+ _ => true,
+ });
+
+ // Chaining can be defined as an expression whose next sibling tokens are newline and dot
+ // Ignoring extra whitespace and comments
+ let next = tokens.next()?.kind();
+ if next == SyntaxKind::WHITESPACE {
+ let mut next_next = tokens.next()?.kind();
+ while next_next == SyntaxKind::WHITESPACE {
+ next_next = tokens.next()?.kind();
+ }
+ if next_next == T![.] {
+ let ty = sema.type_of_expr(desc_expr)?.original;
+ if ty.is_unknown() {
+ return None;
+ }
+ if matches!(expr, ast::Expr::PathExpr(_)) {
+ if let Some(hir::Adt::Struct(st)) = ty.as_adt() {
+ if st.fields(sema.db).is_empty() {
+ return None;
+ }
+ }
+ }
+ acc.push(InlayHint {
+ range: expr.syntax().text_range(),
+ kind: InlayKind::ChainingHint,
+ label: hint_iterator(sema, &famous_defs, config, &ty).unwrap_or_else(|| {
+ ty.display_truncated(sema.db, config.max_length).to_string()
+ }),
+ tooltip: Some(InlayTooltip::HoverRanged(file_id, expr.syntax().text_range())),
+ });
+ }
+ }
+ Some(())
+}
+
+fn param_name_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ expr: ast::Expr,
+) -> Option<()> {
+ if !config.parameter_hints {
+ return None;
+ }
+
+ let (callable, arg_list) = get_callable(sema, &expr)?;
+ let hints = callable
+ .params(sema.db)
+ .into_iter()
+ .zip(arg_list.args())
+ .filter_map(|((param, _ty), arg)| {
+ // Only annotate hints for expressions that exist in the original file
+ let range = sema.original_range_opt(arg.syntax())?;
+ let (param_name, name_syntax) = match param.as_ref()? {
+ Either::Left(pat) => ("self".to_string(), pat.name()),
+ Either::Right(pat) => match pat {
+ ast::Pat::IdentPat(it) => (it.name()?.to_string(), it.name()),
+ _ => return None,
+ },
+ };
+ Some((name_syntax, param_name, arg, range))
+ })
+ .filter(|(_, param_name, arg, _)| {
+ !should_hide_param_name_hint(sema, &callable, param_name, arg)
+ })
+ .map(|(param, param_name, _, FileRange { range, .. })| {
+ let mut tooltip = None;
+ if let Some(name) = param {
+ if let hir::CallableKind::Function(f) = callable.kind() {
+ // assert the file is cached so we can map out of macros
+ if let Some(_) = sema.source(f) {
+ tooltip = sema.original_range_opt(name.syntax());
+ }
+ }
+ }
+
+ InlayHint {
+ range,
+ kind: InlayKind::ParameterHint,
+ label: param_name,
+ tooltip: tooltip.map(|it| InlayTooltip::HoverOffset(it.file_id, it.range.start())),
+ }
+ });
+
+ acc.extend(hints);
+ Some(())
+}
+
+fn binding_mode_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ pat: &ast::Pat,
+) -> Option<()> {
+ if !config.binding_mode_hints {
+ return None;
+ }
+
+ let range = pat.syntax().text_range();
+ sema.pattern_adjustments(&pat).iter().for_each(|ty| {
+ let reference = ty.is_reference();
+ let mut_reference = ty.is_mutable_reference();
+ let r = match (reference, mut_reference) {
+ (true, true) => "&mut",
+ (true, false) => "&",
+ _ => return,
+ };
+ acc.push(InlayHint {
+ range,
+ kind: InlayKind::BindingModeHint,
+ label: r.to_string(),
+ tooltip: Some(InlayTooltip::String("Inferred binding mode".into())),
+ });
+ });
+ match pat {
+ ast::Pat::IdentPat(pat) if pat.ref_token().is_none() && pat.mut_token().is_none() => {
+ let bm = sema.binding_mode_of_pat(pat)?;
+ let bm = match bm {
+ hir::BindingMode::Move => return None,
+ hir::BindingMode::Ref(Mutability::Mut) => "ref mut",
+ hir::BindingMode::Ref(Mutability::Shared) => "ref",
+ };
+ acc.push(InlayHint {
+ range,
+ kind: InlayKind::BindingModeHint,
+ label: bm.to_string(),
+ tooltip: Some(InlayTooltip::String("Inferred binding mode".into())),
+ });
+ }
+ _ => (),
+ }
+
+ Some(())
+}
+
+fn bind_pat_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ pat: &ast::IdentPat,
+) -> Option<()> {
+ if !config.type_hints {
+ return None;
+ }
+
+ let descended = sema.descend_node_into_attributes(pat.clone()).pop();
+ let desc_pat = descended.as_ref().unwrap_or(pat);
+ let ty = sema.type_of_pat(&desc_pat.clone().into())?.original;
+
+ if should_not_display_type_hint(sema, config, pat, &ty) {
+ return None;
+ }
+
+ let krate = sema.scope(desc_pat.syntax())?.krate();
+ let famous_defs = FamousDefs(sema, krate);
+ let label = hint_iterator(sema, &famous_defs, config, &ty);
+
+ let label = match label {
+ Some(label) => label,
+ None => {
+ let ty_name = ty.display_truncated(sema.db, config.max_length).to_string();
+ if config.hide_named_constructor_hints
+ && is_named_constructor(sema, pat, &ty_name).is_some()
+ {
+ return None;
+ }
+ ty_name
+ }
+ };
+
+ acc.push(InlayHint {
+ range: match pat.name() {
+ Some(name) => name.syntax().text_range(),
+ None => pat.syntax().text_range(),
+ },
+ kind: InlayKind::TypeHint,
+ label,
+ tooltip: pat
+ .name()
+ .map(|it| it.syntax().text_range())
+ .map(|it| InlayTooltip::HoverRanged(file_id, it)),
+ });
+
+ Some(())
+}
+
+fn is_named_constructor(
+ sema: &Semantics<'_, RootDatabase>,
+ pat: &ast::IdentPat,
+ ty_name: &str,
+) -> Option<()> {
+ let let_node = pat.syntax().parent()?;
+ let expr = match_ast! {
+ match let_node {
+ ast::LetStmt(it) => it.initializer(),
+ ast::LetExpr(it) => it.expr(),
+ _ => None,
+ }
+ }?;
+
+ let expr = sema.descend_node_into_attributes(expr.clone()).pop().unwrap_or(expr);
+ // unwrap postfix expressions
+ let expr = match expr {
+ ast::Expr::TryExpr(it) => it.expr(),
+ ast::Expr::AwaitExpr(it) => it.expr(),
+ expr => Some(expr),
+ }?;
+ let expr = match expr {
+ ast::Expr::CallExpr(call) => match call.expr()? {
+ ast::Expr::PathExpr(path) => path,
+ _ => return None,
+ },
+ ast::Expr::PathExpr(path) => path,
+ _ => return None,
+ };
+ let path = expr.path()?;
+
+ let callable = sema.type_of_expr(&ast::Expr::PathExpr(expr))?.original.as_callable(sema.db);
+ let callable_kind = callable.map(|it| it.kind());
+ let qual_seg = match callable_kind {
+ Some(hir::CallableKind::Function(_) | hir::CallableKind::TupleEnumVariant(_)) => {
+ path.qualifier()?.segment()
+ }
+ _ => path.segment(),
+ }?;
+
+ let ctor_name = match qual_seg.kind()? {
+ ast::PathSegmentKind::Name(name_ref) => {
+ match qual_seg.generic_arg_list().map(|it| it.generic_args()) {
+ Some(generics) => format!("{}<{}>", name_ref, generics.format(", ")),
+ None => name_ref.to_string(),
+ }
+ }
+ ast::PathSegmentKind::Type { type_ref: Some(ty), trait_ref: None } => ty.to_string(),
+ _ => return None,
+ };
+ (ctor_name == ty_name).then(|| ())
+}
+
+/// Checks if the type is an Iterator from std::iter and replaces its hint with an `impl Iterator<Item = Ty>`.
+fn hint_iterator(
+ sema: &Semantics<'_, RootDatabase>,
+ famous_defs: &FamousDefs<'_, '_>,
+ config: &InlayHintsConfig,
+ ty: &hir::Type,
+) -> Option<String> {
+ let db = sema.db;
+ let strukt = ty.strip_references().as_adt()?;
+ let krate = strukt.module(db).krate();
+ if krate != famous_defs.core()? {
+ return None;
+ }
+ let iter_trait = famous_defs.core_iter_Iterator()?;
+ let iter_mod = famous_defs.core_iter()?;
+
+ // Assert that this struct comes from `core::iter`.
+ if !(strukt.visibility(db) == hir::Visibility::Public
+ && strukt.module(db).path_to_root(db).contains(&iter_mod))
+ {
+ return None;
+ }
+
+ if ty.impls_trait(db, iter_trait, &[]) {
+ let assoc_type_item = iter_trait.items(db).into_iter().find_map(|item| match item {
+ hir::AssocItem::TypeAlias(alias) if alias.name(db) == known::Item => Some(alias),
+ _ => None,
+ })?;
+ if let Some(ty) = ty.normalize_trait_assoc_type(db, &[], assoc_type_item) {
+ const LABEL_START: &str = "impl Iterator<Item = ";
+ const LABEL_END: &str = ">";
+
+ let ty_display = hint_iterator(sema, famous_defs, config, &ty)
+ .map(|assoc_type_impl| assoc_type_impl.to_string())
+ .unwrap_or_else(|| {
+ ty.display_truncated(
+ db,
+ config
+ .max_length
+ .map(|len| len.saturating_sub(LABEL_START.len() + LABEL_END.len())),
+ )
+ .to_string()
+ });
+ return Some(format!("{}{}{}", LABEL_START, ty_display, LABEL_END));
+ }
+ }
+
+ None
+}
+
+fn pat_is_enum_variant(db: &RootDatabase, bind_pat: &ast::IdentPat, pat_ty: &hir::Type) -> bool {
+ if let Some(hir::Adt::Enum(enum_data)) = pat_ty.as_adt() {
+ let pat_text = bind_pat.to_string();
+ enum_data
+ .variants(db)
+ .into_iter()
+ .map(|variant| variant.name(db).to_smol_str())
+ .any(|enum_name| enum_name == pat_text)
+ } else {
+ false
+ }
+}
+
+fn should_not_display_type_hint(
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ bind_pat: &ast::IdentPat,
+ pat_ty: &hir::Type,
+) -> bool {
+ let db = sema.db;
+
+ if pat_ty.is_unknown() {
+ return true;
+ }
+
+ if let Some(hir::Adt::Struct(s)) = pat_ty.as_adt() {
+ if s.fields(db).is_empty() && s.name(db).to_smol_str() == bind_pat.to_string() {
+ return true;
+ }
+ }
+
+ if config.hide_closure_initialization_hints {
+ if let Some(parent) = bind_pat.syntax().parent() {
+ if let Some(it) = ast::LetStmt::cast(parent.clone()) {
+ if let Some(ast::Expr::ClosureExpr(closure)) = it.initializer() {
+ if closure_has_block_body(&closure) {
+ return true;
+ }
+ }
+ }
+ }
+ }
+
+ for node in bind_pat.syntax().ancestors() {
+ match_ast! {
+ match node {
+ ast::LetStmt(it) => return it.ty().is_some(),
+ // FIXME: We might wanna show type hints in parameters for non-top level patterns as well
+ ast::Param(it) => return it.ty().is_some(),
+ ast::MatchArm(_) => return pat_is_enum_variant(db, bind_pat, pat_ty),
+ ast::LetExpr(_) => return pat_is_enum_variant(db, bind_pat, pat_ty),
+ ast::IfExpr(_) => return false,
+ ast::WhileExpr(_) => return false,
+ ast::ForExpr(it) => {
+ // We *should* display hint only if user provided "in {expr}" and we know the type of expr (and it's not unit).
+ // Type of expr should be iterable.
+ return it.in_token().is_none() ||
+ it.iterable()
+ .and_then(|iterable_expr| sema.type_of_expr(&iterable_expr))
+ .map(TypeInfo::original)
+ .map_or(true, |iterable_ty| iterable_ty.is_unknown() || iterable_ty.is_unit())
+ },
+ _ => (),
+ }
+ }
+ }
+ false
+}
+
+fn closure_has_block_body(closure: &ast::ClosureExpr) -> bool {
+ matches!(closure.body(), Some(ast::Expr::BlockExpr(_)))
+}
+
+fn should_hide_param_name_hint(
+ sema: &Semantics<'_, RootDatabase>,
+ callable: &hir::Callable,
+ param_name: &str,
+ argument: &ast::Expr,
+) -> bool {
+ // These are to be tested in the `parameter_hint_heuristics` test
+ // hide when:
+ // - the parameter name is a suffix of the function's name
+ // - the argument is a qualified constructing or call expression where the qualifier is an ADT
+ // - exact argument<->parameter match(ignoring leading underscore) or parameter is a prefix/suffix
+ // of argument with _ splitting it off
+ // - param starts with `ra_fixture`
+ // - param is a well known name in a unary function
+
+ let param_name = param_name.trim_start_matches('_');
+ if param_name.is_empty() {
+ return true;
+ }
+
+ if matches!(argument, ast::Expr::PrefixExpr(prefix) if prefix.op_kind() == Some(UnaryOp::Not)) {
+ return false;
+ }
+
+ let fn_name = match callable.kind() {
+ hir::CallableKind::Function(it) => Some(it.name(sema.db).to_smol_str()),
+ _ => None,
+ };
+ let fn_name = fn_name.as_deref();
+ is_param_name_suffix_of_fn_name(param_name, callable, fn_name)
+ || is_argument_similar_to_param_name(argument, param_name)
+ || param_name.starts_with("ra_fixture")
+ || (callable.n_params() == 1 && is_obvious_param(param_name))
+ || is_adt_constructor_similar_to_param_name(sema, argument, param_name)
+}
+
+fn is_argument_similar_to_param_name(argument: &ast::Expr, param_name: &str) -> bool {
+ // check whether param_name and argument are the same or
+ // whether param_name is a prefix/suffix of argument(split at `_`)
+ let argument = match get_string_representation(argument) {
+ Some(argument) => argument,
+ None => return false,
+ };
+
+ // std is honestly too panic happy...
+ let str_split_at = |str: &str, at| str.is_char_boundary(at).then(|| argument.split_at(at));
+
+ let param_name = param_name.trim_start_matches('_');
+ let argument = argument.trim_start_matches('_');
+
+ match str_split_at(argument, param_name.len()) {
+ Some((prefix, rest)) if prefix.eq_ignore_ascii_case(param_name) => {
+ return rest.is_empty() || rest.starts_with('_');
+ }
+ _ => (),
+ }
+ match argument.len().checked_sub(param_name.len()).and_then(|at| str_split_at(argument, at)) {
+ Some((rest, suffix)) if param_name.eq_ignore_ascii_case(suffix) => {
+ return rest.is_empty() || rest.ends_with('_');
+ }
+ _ => (),
+ }
+ false
+}
+
+/// Hide the parameter name of a unary function if it is a `_` - prefixed suffix of the function's name, or equal.
+///
+/// `fn strip_suffix(suffix)` will be hidden.
+/// `fn stripsuffix(suffix)` will not be hidden.
+fn is_param_name_suffix_of_fn_name(
+ param_name: &str,
+ callable: &Callable,
+ fn_name: Option<&str>,
+) -> bool {
+ match (callable.n_params(), fn_name) {
+ (1, Some(function)) => {
+ function == param_name
+ || function
+ .len()
+ .checked_sub(param_name.len())
+ .and_then(|at| function.is_char_boundary(at).then(|| function.split_at(at)))
+ .map_or(false, |(prefix, suffix)| {
+ suffix.eq_ignore_ascii_case(param_name) && prefix.ends_with('_')
+ })
+ }
+ _ => false,
+ }
+}
+
+fn is_adt_constructor_similar_to_param_name(
+ sema: &Semantics<'_, RootDatabase>,
+ argument: &ast::Expr,
+ param_name: &str,
+) -> bool {
+ let path = match argument {
+ ast::Expr::CallExpr(c) => c.expr().and_then(|e| match e {
+ ast::Expr::PathExpr(p) => p.path(),
+ _ => None,
+ }),
+ ast::Expr::PathExpr(p) => p.path(),
+ ast::Expr::RecordExpr(r) => r.path(),
+ _ => return false,
+ };
+ let path = match path {
+ Some(it) => it,
+ None => return false,
+ };
+ (|| match sema.resolve_path(&path)? {
+ hir::PathResolution::Def(hir::ModuleDef::Adt(_)) => {
+ Some(to_lower_snake_case(&path.segment()?.name_ref()?.text()) == param_name)
+ }
+ hir::PathResolution::Def(hir::ModuleDef::Function(_) | hir::ModuleDef::Variant(_)) => {
+ if to_lower_snake_case(&path.segment()?.name_ref()?.text()) == param_name {
+ return Some(true);
+ }
+ let qual = path.qualifier()?;
+ match sema.resolve_path(&qual)? {
+ hir::PathResolution::Def(hir::ModuleDef::Adt(_)) => {
+ Some(to_lower_snake_case(&qual.segment()?.name_ref()?.text()) == param_name)
+ }
+ _ => None,
+ }
+ }
+ _ => None,
+ })()
+ .unwrap_or(false)
+}
+
+fn get_string_representation(expr: &ast::Expr) -> Option<String> {
+ match expr {
+ ast::Expr::MethodCallExpr(method_call_expr) => {
+ let name_ref = method_call_expr.name_ref()?;
+ match name_ref.text().as_str() {
+ "clone" | "as_ref" => method_call_expr.receiver().map(|rec| rec.to_string()),
+ name_ref => Some(name_ref.to_owned()),
+ }
+ }
+ ast::Expr::MacroExpr(macro_expr) => {
+ Some(macro_expr.macro_call()?.path()?.segment()?.to_string())
+ }
+ ast::Expr::FieldExpr(field_expr) => Some(field_expr.name_ref()?.to_string()),
+ ast::Expr::PathExpr(path_expr) => Some(path_expr.path()?.segment()?.to_string()),
+ ast::Expr::PrefixExpr(prefix_expr) => get_string_representation(&prefix_expr.expr()?),
+ ast::Expr::RefExpr(ref_expr) => get_string_representation(&ref_expr.expr()?),
+ ast::Expr::CastExpr(cast_expr) => get_string_representation(&cast_expr.expr()?),
+ _ => None,
+ }
+}
+
+fn is_obvious_param(param_name: &str) -> bool {
+ // avoid displaying hints for common functions like map, filter, etc.
+ // or other obvious words used in std
+ let is_obvious_param_name =
+ matches!(param_name, "predicate" | "value" | "pat" | "rhs" | "other");
+ param_name.len() == 1 || is_obvious_param_name
+}
+
+fn get_callable(
+ sema: &Semantics<'_, RootDatabase>,
+ expr: &ast::Expr,
+) -> Option<(hir::Callable, ast::ArgList)> {
+ match expr {
+ ast::Expr::CallExpr(expr) => {
+ let descended = sema.descend_node_into_attributes(expr.clone()).pop();
+ let expr = descended.as_ref().unwrap_or(expr);
+ sema.type_of_expr(&expr.expr()?)?.original.as_callable(sema.db).zip(expr.arg_list())
+ }
+ ast::Expr::MethodCallExpr(expr) => {
+ let descended = sema.descend_node_into_attributes(expr.clone()).pop();
+ let expr = descended.as_ref().unwrap_or(expr);
+ sema.resolve_method_call_as_callable(expr).zip(expr.arg_list())
+ }
+ _ => None,
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+ use ide_db::base_db::FileRange;
+ use itertools::Itertools;
+ use syntax::{TextRange, TextSize};
+ use test_utils::extract_annotations;
+
+ use crate::inlay_hints::ReborrowHints;
+ use crate::{fixture, inlay_hints::InlayHintsConfig, LifetimeElisionHints};
+
+ use super::ClosureReturnTypeHints;
+
+ const DISABLED_CONFIG: InlayHintsConfig = InlayHintsConfig {
+ render_colons: false,
+ type_hints: false,
+ parameter_hints: false,
+ chaining_hints: false,
+ lifetime_elision_hints: LifetimeElisionHints::Never,
+ closure_return_type_hints: ClosureReturnTypeHints::Never,
+ reborrow_hints: ReborrowHints::Always,
+ binding_mode_hints: false,
+ hide_named_constructor_hints: false,
+ hide_closure_initialization_hints: false,
+ param_names_for_lifetime_elision_hints: false,
+ max_length: None,
+ closing_brace_hints_min_lines: None,
+ };
+ const TEST_CONFIG: InlayHintsConfig = InlayHintsConfig {
+ type_hints: true,
+ parameter_hints: true,
+ chaining_hints: true,
+ reborrow_hints: ReborrowHints::Always,
+ closure_return_type_hints: ClosureReturnTypeHints::WithBlock,
+ binding_mode_hints: true,
+ lifetime_elision_hints: LifetimeElisionHints::Always,
+ ..DISABLED_CONFIG
+ };
+
+ #[track_caller]
+ fn check(ra_fixture: &str) {
+ check_with_config(TEST_CONFIG, ra_fixture);
+ }
+
+ #[track_caller]
+ fn check_params(ra_fixture: &str) {
+ check_with_config(
+ InlayHintsConfig { parameter_hints: true, ..DISABLED_CONFIG },
+ ra_fixture,
+ );
+ }
+
+ #[track_caller]
+ fn check_types(ra_fixture: &str) {
+ check_with_config(InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG }, ra_fixture);
+ }
+
+ #[track_caller]
+ fn check_chains(ra_fixture: &str) {
+ check_with_config(InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, ra_fixture);
+ }
+
+ #[track_caller]
+ fn check_with_config(config: InlayHintsConfig, ra_fixture: &str) {
+ let (analysis, file_id) = fixture::file(ra_fixture);
+ let mut expected = extract_annotations(&*analysis.file_text(file_id).unwrap());
+ let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap();
+ let actual = inlay_hints
+ .into_iter()
+ .map(|it| (it.range, it.label.to_string()))
+ .sorted_by_key(|(range, _)| range.start())
+ .collect::<Vec<_>>();
+ expected.sort_by_key(|(range, _)| range.start());
+
+ assert_eq!(expected, actual, "\nExpected:\n{:#?}\n\nActual:\n{:#?}", expected, actual);
+ }
+
+ #[track_caller]
+ fn check_expect(config: InlayHintsConfig, ra_fixture: &str, expect: Expect) {
+ let (analysis, file_id) = fixture::file(ra_fixture);
+ let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap();
+ expect.assert_debug_eq(&inlay_hints)
+ }
+
+ #[test]
+ fn hints_disabled() {
+ check_with_config(
+ InlayHintsConfig { render_colons: true, ..DISABLED_CONFIG },
+ r#"
+fn foo(a: i32, b: i32) -> i32 { a + b }
+fn main() {
+ let _x = foo(4, 4);
+}"#,
+ );
+ }
+
+ // Parameter hint tests
+
+ #[test]
+ fn param_hints_only() {
+ check_params(
+ r#"
+fn foo(a: i32, b: i32) -> i32 { a + b }
+fn main() {
+ let _x = foo(
+ 4,
+ //^ a
+ 4,
+ //^ b
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn param_hints_on_closure() {
+ check_params(
+ r#"
+fn main() {
+ let clo = |a: u8, b: u8| a + b;
+ clo(
+ 1,
+ //^ a
+ 2,
+ //^ b
+ );
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn param_name_similar_to_fn_name_still_hints() {
+ check_params(
+ r#"
+fn max(x: i32, y: i32) -> i32 { x + y }
+fn main() {
+ let _x = max(
+ 4,
+ //^ x
+ 4,
+ //^ y
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn param_name_similar_to_fn_name() {
+ check_params(
+ r#"
+fn param_with_underscore(with_underscore: i32) -> i32 { with_underscore }
+fn main() {
+ let _x = param_with_underscore(
+ 4,
+ );
+}"#,
+ );
+ check_params(
+ r#"
+fn param_with_underscore(underscore: i32) -> i32 { underscore }
+fn main() {
+ let _x = param_with_underscore(
+ 4,
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn param_name_same_as_fn_name() {
+ check_params(
+ r#"
+fn foo(foo: i32) -> i32 { foo }
+fn main() {
+ let _x = foo(
+ 4,
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn never_hide_param_when_multiple_params() {
+ check_params(
+ r#"
+fn foo(foo: i32, bar: i32) -> i32 { bar + baz }
+fn main() {
+ let _x = foo(
+ 4,
+ //^ foo
+ 8,
+ //^ bar
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn param_hints_look_through_as_ref_and_clone() {
+ check_params(
+ r#"
+fn foo(bar: i32, baz: f32) {}
+
+fn main() {
+ let bar = 3;
+ let baz = &"baz";
+ let fez = 1.0;
+ foo(bar.clone(), bar.clone());
+ //^^^^^^^^^^^ baz
+ foo(bar.as_ref(), bar.as_ref());
+ //^^^^^^^^^^^^ baz
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn self_param_hints() {
+ check_params(
+ r#"
+struct Foo;
+
+impl Foo {
+ fn foo(self: Self) {}
+ fn bar(self: &Self) {}
+}
+
+fn main() {
+ Foo::foo(Foo);
+ //^^^ self
+ Foo::bar(&Foo);
+ //^^^^ self
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn param_name_hints_show_for_literals() {
+ check_params(
+ r#"pub fn test(a: i32, b: i32) -> [i32; 2] { [a, b] }
+fn main() {
+ test(
+ 0xa_b,
+ //^^^^^ a
+ 0xa_b,
+ //^^^^^ b
+ );
+}"#,
+ )
+ }
+
+ #[test]
+ fn function_call_parameter_hint() {
+ check_params(
+ r#"
+//- minicore: option
+struct FileId {}
+struct SmolStr {}
+
+struct TextRange {}
+struct SyntaxKind {}
+struct NavigationTarget {}
+
+struct Test {}
+
+impl Test {
+ fn method(&self, mut param: i32) -> i32 { param * 2 }
+
+ fn from_syntax(
+ file_id: FileId,
+ name: SmolStr,
+ focus_range: Option<TextRange>,
+ full_range: TextRange,
+ kind: SyntaxKind,
+ docs: Option<String>,
+ ) -> NavigationTarget {
+ NavigationTarget {}
+ }
+}
+
+fn test_func(mut foo: i32, bar: i32, msg: &str, _: i32, last: i32) -> i32 {
+ foo + bar
+}
+
+fn main() {
+ let not_literal = 1;
+ let _: i32 = test_func(1, 2, "hello", 3, not_literal);
+ //^ foo ^ bar ^^^^^^^ msg ^^^^^^^^^^^ last
+ let t: Test = Test {};
+ t.method(123);
+ //^^^ param
+ Test::method(&t, 3456);
+ //^^ self ^^^^ param
+ Test::from_syntax(
+ FileId {},
+ "impl".into(),
+ //^^^^^^^^^^^^^ name
+ None,
+ //^^^^ focus_range
+ TextRange {},
+ //^^^^^^^^^^^^ full_range
+ SyntaxKind {},
+ //^^^^^^^^^^^^^ kind
+ None,
+ //^^^^ docs
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn parameter_hint_heuristics() {
+ check_params(
+ r#"
+fn check(ra_fixture_thing: &str) {}
+
+fn map(f: i32) {}
+fn filter(predicate: i32) {}
+
+fn strip_suffix(suffix: &str) {}
+fn stripsuffix(suffix: &str) {}
+fn same(same: u32) {}
+fn same2(_same2: u32) {}
+
+fn enum_matches_param_name(completion_kind: CompletionKind) {}
+
+fn foo(param: u32) {}
+fn bar(param_eter: u32) {}
+
+enum CompletionKind {
+ Keyword,
+}
+
+fn non_ident_pat((a, b): (u32, u32)) {}
+
+fn main() {
+ const PARAM: u32 = 0;
+ foo(PARAM);
+ foo(!PARAM);
+ // ^^^^^^ param
+ check("");
+
+ map(0);
+ filter(0);
+
+ strip_suffix("");
+ stripsuffix("");
+ //^^ suffix
+ same(0);
+ same2(0);
+
+ enum_matches_param_name(CompletionKind::Keyword);
+
+ let param = 0;
+ foo(param);
+ foo(param as _);
+ let param_end = 0;
+ foo(param_end);
+ let start_param = 0;
+ foo(start_param);
+ let param2 = 0;
+ foo(param2);
+ //^^^^^^ param
+
+ macro_rules! param {
+ () => {};
+ };
+ foo(param!());
+
+ let param_eter = 0;
+ bar(param_eter);
+ let param_eter_end = 0;
+ bar(param_eter_end);
+ let start_param_eter = 0;
+ bar(start_param_eter);
+ let param_eter2 = 0;
+ bar(param_eter2);
+ //^^^^^^^^^^^ param_eter
+
+ non_ident_pat((0, 0));
+}"#,
+ );
+ }
+
+ // Type-Hint tests
+
+ #[test]
+ fn type_hints_only() {
+ check_types(
+ r#"
+fn foo(a: i32, b: i32) -> i32 { a + b }
+fn main() {
+ let _x = foo(4, 4);
+ //^^ i32
+}"#,
+ );
+ }
+
+ #[test]
+ fn type_hints_bindings_after_at() {
+ check_types(
+ r#"
+//- minicore: option
+fn main() {
+ let ref foo @ bar @ ref mut baz = 0;
+ //^^^ &i32
+ //^^^ i32
+ //^^^ &mut i32
+ let [x @ ..] = [0];
+ //^ [i32; 1]
+ if let x @ Some(_) = Some(0) {}
+ //^ Option<i32>
+ let foo @ (bar, baz) = (3, 3);
+ //^^^ (i32, i32)
+ //^^^ i32
+ //^^^ i32
+}"#,
+ );
+ }
+
+ #[test]
+ fn default_generic_types_should_not_be_displayed() {
+ check(
+ r#"
+struct Test<K, T = u8> { k: K, t: T }
+
+fn main() {
+ let zz = Test { t: 23u8, k: 33 };
+ //^^ Test<i32>
+ let zz_ref = &zz;
+ //^^^^^^ &Test<i32>
+ let test = || zz;
+ //^^^^ || -> Test<i32>
+}"#,
+ );
+ }
+
+ #[test]
+ fn shorten_iterators_in_associated_params() {
+ check_types(
+ r#"
+//- minicore: iterators
+use core::iter;
+
+pub struct SomeIter<T> {}
+
+impl<T> SomeIter<T> {
+ pub fn new() -> Self { SomeIter {} }
+ pub fn push(&mut self, t: T) {}
+}
+
+impl<T> Iterator for SomeIter<T> {
+ type Item = T;
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+}
+
+fn main() {
+ let mut some_iter = SomeIter::new();
+ //^^^^^^^^^ SomeIter<Take<Repeat<i32>>>
+ some_iter.push(iter::repeat(2).take(2));
+ let iter_of_iters = some_iter.take(2);
+ //^^^^^^^^^^^^^ impl Iterator<Item = impl Iterator<Item = i32>>
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn infer_call_method_return_associated_types_with_generic() {
+ check_types(
+ r#"
+ pub trait Default {
+ fn default() -> Self;
+ }
+ pub trait Foo {
+ type Bar: Default;
+ }
+
+ pub fn quux<T: Foo>() -> T::Bar {
+ let y = Default::default();
+ //^ <T as Foo>::Bar
+
+ y
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn fn_hints() {
+ check_types(
+ r#"
+//- minicore: fn, sized
+fn foo() -> impl Fn() { loop {} }
+fn foo1() -> impl Fn(f64) { loop {} }
+fn foo2() -> impl Fn(f64, f64) { loop {} }
+fn foo3() -> impl Fn(f64, f64) -> u32 { loop {} }
+fn foo4() -> &'static dyn Fn(f64, f64) -> u32 { loop {} }
+fn foo5() -> &'static dyn Fn(&'static dyn Fn(f64, f64) -> u32, f64) -> u32 { loop {} }
+fn foo6() -> impl Fn(f64, f64) -> u32 + Sized { loop {} }
+fn foo7() -> *const (impl Fn(f64, f64) -> u32 + Sized) { loop {} }
+
+fn main() {
+ let foo = foo();
+ // ^^^ impl Fn()
+ let foo = foo1();
+ // ^^^ impl Fn(f64)
+ let foo = foo2();
+ // ^^^ impl Fn(f64, f64)
+ let foo = foo3();
+ // ^^^ impl Fn(f64, f64) -> u32
+ let foo = foo4();
+ // ^^^ &dyn Fn(f64, f64) -> u32
+ let foo = foo5();
+ // ^^^ &dyn Fn(&dyn Fn(f64, f64) -> u32, f64) -> u32
+ let foo = foo6();
+ // ^^^ impl Fn(f64, f64) -> u32
+ let foo = foo7();
+ // ^^^ *const impl Fn(f64, f64) -> u32
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn check_hint_range_limit() {
+ let fixture = r#"
+ //- minicore: fn, sized
+ fn foo() -> impl Fn() { loop {} }
+ fn foo1() -> impl Fn(f64) { loop {} }
+ fn foo2() -> impl Fn(f64, f64) { loop {} }
+ fn foo3() -> impl Fn(f64, f64) -> u32 { loop {} }
+ fn foo4() -> &'static dyn Fn(f64, f64) -> u32 { loop {} }
+ fn foo5() -> &'static dyn Fn(&'static dyn Fn(f64, f64) -> u32, f64) -> u32 { loop {} }
+ fn foo6() -> impl Fn(f64, f64) -> u32 + Sized { loop {} }
+ fn foo7() -> *const (impl Fn(f64, f64) -> u32 + Sized) { loop {} }
+
+ fn main() {
+ let foo = foo();
+ let foo = foo1();
+ let foo = foo2();
+ // ^^^ impl Fn(f64, f64)
+ let foo = foo3();
+ // ^^^ impl Fn(f64, f64) -> u32
+ let foo = foo4();
+ let foo = foo5();
+ let foo = foo6();
+ let foo = foo7();
+ }
+ "#;
+ let (analysis, file_id) = fixture::file(fixture);
+ let expected = extract_annotations(&*analysis.file_text(file_id).unwrap());
+ let inlay_hints = analysis
+ .inlay_hints(
+ &InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG },
+ file_id,
+ Some(FileRange {
+ file_id,
+ range: TextRange::new(TextSize::from(500), TextSize::from(600)),
+ }),
+ )
+ .unwrap();
+ let actual =
+ inlay_hints.into_iter().map(|it| (it.range, it.label.to_string())).collect::<Vec<_>>();
+ assert_eq!(expected, actual, "\nExpected:\n{:#?}\n\nActual:\n{:#?}", expected, actual);
+ }
+
+ #[test]
+ fn fn_hints_ptr_rpit_fn_parentheses() {
+ check_types(
+ r#"
+//- minicore: fn, sized
+trait Trait {}
+
+fn foo1() -> *const impl Fn() { loop {} }
+fn foo2() -> *const (impl Fn() + Sized) { loop {} }
+fn foo3() -> *const (impl Fn() + ?Sized) { loop {} }
+fn foo4() -> *const (impl Sized + Fn()) { loop {} }
+fn foo5() -> *const (impl ?Sized + Fn()) { loop {} }
+fn foo6() -> *const (impl Fn() + Trait) { loop {} }
+fn foo7() -> *const (impl Fn() + Sized + Trait) { loop {} }
+fn foo8() -> *const (impl Fn() + ?Sized + Trait) { loop {} }
+fn foo9() -> *const (impl Fn() -> u8 + ?Sized) { loop {} }
+fn foo10() -> *const (impl Fn() + Sized + ?Sized) { loop {} }
+
+fn main() {
+ let foo = foo1();
+ // ^^^ *const impl Fn()
+ let foo = foo2();
+ // ^^^ *const impl Fn()
+ let foo = foo3();
+ // ^^^ *const (impl Fn() + ?Sized)
+ let foo = foo4();
+ // ^^^ *const impl Fn()
+ let foo = foo5();
+ // ^^^ *const (impl Fn() + ?Sized)
+ let foo = foo6();
+ // ^^^ *const (impl Fn() + Trait)
+ let foo = foo7();
+ // ^^^ *const (impl Fn() + Trait)
+ let foo = foo8();
+ // ^^^ *const (impl Fn() + Trait + ?Sized)
+ let foo = foo9();
+ // ^^^ *const (impl Fn() -> u8 + ?Sized)
+ let foo = foo10();
+ // ^^^ *const impl Fn()
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn unit_structs_have_no_type_hints() {
+ check_types(
+ r#"
+//- minicore: result
+struct SyntheticSyntax;
+
+fn main() {
+ match Ok(()) {
+ Ok(_) => (),
+ Err(SyntheticSyntax) => (),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn let_statement() {
+ check_types(
+ r#"
+#[derive(PartialEq)]
+enum Option<T> { None, Some(T) }
+
+#[derive(PartialEq)]
+struct Test { a: Option<u32>, b: u8 }
+
+fn main() {
+ struct InnerStruct {}
+
+ let test = 54;
+ //^^^^ i32
+ let test: i32 = 33;
+ let mut test = 33;
+ //^^^^ i32
+ let _ = 22;
+ let test = "test";
+ //^^^^ &str
+ let test = InnerStruct {};
+ //^^^^ InnerStruct
+
+ let test = unresolved();
+
+ let test = (42, 'a');
+ //^^^^ (i32, char)
+ let (a, (b, (c,)) = (2, (3, (9.2,));
+ //^ i32 ^ i32 ^ f64
+ let &x = &92;
+ //^ i32
+}"#,
+ );
+ }
+
+ #[test]
+ fn if_expr() {
+ check_types(
+ r#"
+//- minicore: option
+struct Test { a: Option<u32>, b: u8 }
+
+fn main() {
+ let test = Some(Test { a: Some(3), b: 1 });
+ //^^^^ Option<Test>
+ if let None = &test {};
+ if let test = &test {};
+ //^^^^ &Option<Test>
+ if let Some(test) = &test {};
+ //^^^^ &Test
+ if let Some(Test { a, b }) = &test {};
+ //^ &Option<u32> ^ &u8
+ if let Some(Test { a: x, b: y }) = &test {};
+ //^ &Option<u32> ^ &u8
+ if let Some(Test { a: Some(x), b: y }) = &test {};
+ //^ &u32 ^ &u8
+ if let Some(Test { a: None, b: y }) = &test {};
+ //^ &u8
+ if let Some(Test { b: y, .. }) = &test {};
+ //^ &u8
+ if test == None {}
+}"#,
+ );
+ }
+
+ #[test]
+ fn while_expr() {
+ check_types(
+ r#"
+//- minicore: option
+struct Test { a: Option<u32>, b: u8 }
+
+fn main() {
+ let test = Some(Test { a: Some(3), b: 1 });
+ //^^^^ Option<Test>
+ while let Some(Test { a: Some(x), b: y }) = &test {};
+ //^ &u32 ^ &u8
+}"#,
+ );
+ }
+
+ #[test]
+ fn match_arm_list() {
+ check_types(
+ r#"
+//- minicore: option
+struct Test { a: Option<u32>, b: u8 }
+
+fn main() {
+ match Some(Test { a: Some(3), b: 1 }) {
+ None => (),
+ test => (),
+ //^^^^ Option<Test>
+ Some(Test { a: Some(x), b: y }) => (),
+ //^ u32 ^ u8
+ _ => {}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn complete_for_hint() {
+ check_types(
+ r#"
+//- minicore: iterator
+pub struct Vec<T> {}
+
+impl<T> Vec<T> {
+ pub fn new() -> Self { Vec {} }
+ pub fn push(&mut self, t: T) {}
+}
+
+impl<T> IntoIterator for Vec<T> {
+ type Item=T;
+}
+
+fn main() {
+ let mut data = Vec::new();
+ //^^^^ Vec<&str>
+ data.push("foo");
+ for i in data {
+ //^ &str
+ let z = i;
+ //^ &str
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn multi_dyn_trait_bounds() {
+ check_types(
+ r#"
+pub struct Vec<T> {}
+
+impl<T> Vec<T> {
+ pub fn new() -> Self { Vec {} }
+}
+
+pub struct Box<T> {}
+
+trait Display {}
+trait Sync {}
+
+fn main() {
+ // The block expression wrapping disables the constructor hint hiding logic
+ let _v = { Vec::<Box<&(dyn Display + Sync)>>::new() };
+ //^^ Vec<Box<&(dyn Display + Sync)>>
+ let _v = { Vec::<Box<*const (dyn Display + Sync)>>::new() };
+ //^^ Vec<Box<*const (dyn Display + Sync)>>
+ let _v = { Vec::<Box<dyn Display + Sync>>::new() };
+ //^^ Vec<Box<dyn Display + Sync>>
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn shorten_iterator_hints() {
+ check_types(
+ r#"
+//- minicore: iterators
+use core::iter;
+
+struct MyIter;
+
+impl Iterator for MyIter {
+ type Item = ();
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+}
+
+fn main() {
+ let _x = MyIter;
+ //^^ MyIter
+ let _x = iter::repeat(0);
+ //^^ impl Iterator<Item = i32>
+ fn generic<T: Clone>(t: T) {
+ let _x = iter::repeat(t);
+ //^^ impl Iterator<Item = T>
+ let _chained = iter::repeat(t).take(10);
+ //^^^^^^^^ impl Iterator<Item = T>
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn skip_constructor_and_enum_type_hints() {
+ check_with_config(
+ InlayHintsConfig {
+ type_hints: true,
+ hide_named_constructor_hints: true,
+ ..DISABLED_CONFIG
+ },
+ r#"
+//- minicore: try, option
+use core::ops::ControlFlow;
+
+mod x {
+ pub mod y { pub struct Foo; }
+ pub struct Foo;
+ pub enum AnotherEnum {
+ Variant()
+ };
+}
+struct Struct;
+struct TupleStruct();
+
+impl Struct {
+ fn new() -> Self {
+ Struct
+ }
+ fn try_new() -> ControlFlow<(), Self> {
+ ControlFlow::Continue(Struct)
+ }
+}
+
+struct Generic<T>(T);
+impl Generic<i32> {
+ fn new() -> Self {
+ Generic(0)
+ }
+}
+
+enum Enum {
+ Variant(u32)
+}
+
+fn times2(value: i32) -> i32 {
+ 2 * value
+}
+
+fn main() {
+ let enumb = Enum::Variant(0);
+
+ let strukt = x::Foo;
+ let strukt = x::y::Foo;
+ let strukt = Struct;
+ let strukt = Struct::new();
+
+ let tuple_struct = TupleStruct();
+
+ let generic0 = Generic::new();
+ // ^^^^^^^^ Generic<i32>
+ let generic1 = Generic(0);
+ // ^^^^^^^^ Generic<i32>
+ let generic2 = Generic::<i32>::new();
+ let generic3 = <Generic<i32>>::new();
+ let generic4 = Generic::<i32>(0);
+
+
+ let option = Some(0);
+ // ^^^^^^ Option<i32>
+ let func = times2;
+ // ^^^^ fn times2(i32) -> i32
+ let closure = |x: i32| x * 2;
+ // ^^^^^^^ |i32| -> i32
+}
+
+fn fallible() -> ControlFlow<()> {
+ let strukt = Struct::try_new()?;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn shows_constructor_type_hints_when_enabled() {
+ check_types(
+ r#"
+//- minicore: try
+use core::ops::ControlFlow;
+
+struct Struct;
+struct TupleStruct();
+
+impl Struct {
+ fn new() -> Self {
+ Struct
+ }
+ fn try_new() -> ControlFlow<(), Self> {
+ ControlFlow::Continue(Struct)
+ }
+}
+
+struct Generic<T>(T);
+impl Generic<i32> {
+ fn new() -> Self {
+ Generic(0)
+ }
+}
+
+fn main() {
+ let strukt = Struct::new();
+ // ^^^^^^ Struct
+ let tuple_struct = TupleStruct();
+ // ^^^^^^^^^^^^ TupleStruct
+ let generic0 = Generic::new();
+ // ^^^^^^^^ Generic<i32>
+ let generic1 = Generic::<i32>::new();
+ // ^^^^^^^^ Generic<i32>
+ let generic2 = <Generic<i32>>::new();
+ // ^^^^^^^^ Generic<i32>
+}
+
+fn fallible() -> ControlFlow<()> {
+ let strukt = Struct::try_new()?;
+ // ^^^^^^ Struct
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn closures() {
+ check(
+ r#"
+fn main() {
+ let mut start = 0;
+ //^^^^^ i32
+ (0..2).for_each(|increment | { start += increment; });
+ //^^^^^^^^^ i32
+
+ let multiply =
+ //^^^^^^^^ |i32, i32| -> i32
+ | a, b| a * b
+ //^ i32 ^ i32
+
+ ;
+
+ let _: i32 = multiply(1, 2);
+ //^ a ^ b
+ let multiply_ref = &multiply;
+ //^^^^^^^^^^^^ &|i32, i32| -> i32
+
+ let return_42 = || 42;
+ //^^^^^^^^^ || -> i32
+ || { 42 };
+ //^^ i32
+}"#,
+ );
+ }
+
+ #[test]
+ fn return_type_hints_for_closure_without_block() {
+ check_with_config(
+ InlayHintsConfig {
+ closure_return_type_hints: ClosureReturnTypeHints::Always,
+ ..DISABLED_CONFIG
+ },
+ r#"
+fn main() {
+ let a = || { 0 };
+ //^^ i32
+ let b = || 0;
+ //^^ i32
+}"#,
+ );
+ }
+
+ #[test]
+ fn skip_closure_type_hints() {
+ check_with_config(
+ InlayHintsConfig {
+ type_hints: true,
+ hide_closure_initialization_hints: true,
+ ..DISABLED_CONFIG
+ },
+ r#"
+//- minicore: fn
+fn main() {
+ let multiple_2 = |x: i32| { x * 2 };
+
+ let multiple_2 = |x: i32| x * 2;
+ // ^^^^^^^^^^ |i32| -> i32
+
+ let (not) = (|x: bool| { !x });
+ // ^^^ |bool| -> bool
+
+ let (is_zero, _b) = (|x: usize| { x == 0 }, false);
+ // ^^^^^^^ |usize| -> bool
+ // ^^ bool
+
+ let plus_one = |x| { x + 1 };
+ // ^ u8
+ foo(plus_one);
+
+ let add_mul = bar(|x: u8| { x + 1 });
+ // ^^^^^^^ impl FnOnce(u8) -> u8 + ?Sized
+
+ let closure = if let Some(6) = add_mul(2).checked_sub(1) {
+ // ^^^^^^^ fn(i32) -> i32
+ |x: i32| { x * 2 }
+ } else {
+ |x: i32| { x * 3 }
+ };
+}
+
+fn foo(f: impl FnOnce(u8) -> u8) {}
+
+fn bar(f: impl FnOnce(u8) -> u8) -> impl FnOnce(u8) -> u8 {
+ move |x: u8| f(x) * 2
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn hint_truncation() {
+ check_with_config(
+ InlayHintsConfig { max_length: Some(8), ..TEST_CONFIG },
+ r#"
+struct Smol<T>(T);
+
+struct VeryLongOuterName<T>(T);
+
+fn main() {
+ let a = Smol(0u32);
+ //^ Smol<u32>
+ let b = VeryLongOuterName(0usize);
+ //^ VeryLongOuterName<…>
+ let c = Smol(Smol(0u32))
+ //^ Smol<Smol<…>>
+}"#,
+ );
+ }
+
+ // Chaining hint tests
+
+ #[test]
+ fn chaining_hints_ignore_comments() {
+ check_expect(
+ InlayHintsConfig { type_hints: false, chaining_hints: true, ..DISABLED_CONFIG },
+ r#"
+struct A(B);
+impl A { fn into_b(self) -> B { self.0 } }
+struct B(C);
+impl B { fn into_c(self) -> C { self.0 } }
+struct C;
+
+fn main() {
+ let c = A(B(C))
+ .into_b() // This is a comment
+ // This is another comment
+ .into_c();
+}
+"#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 147..172,
+ kind: ChainingHint,
+ label: "B",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 147..172,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 147..154,
+ kind: ChainingHint,
+ label: "A",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 147..154,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn chaining_hints_without_newlines() {
+ check_chains(
+ r#"
+struct A(B);
+impl A { fn into_b(self) -> B { self.0 } }
+struct B(C);
+impl B { fn into_c(self) -> C { self.0 } }
+struct C;
+
+fn main() {
+ let c = A(B(C)).into_b().into_c();
+}"#,
+ );
+ }
+
+ #[test]
+ fn struct_access_chaining_hints() {
+ check_expect(
+ InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
+ r#"
+struct A { pub b: B }
+struct B { pub c: C }
+struct C(pub bool);
+struct D;
+
+impl D {
+ fn foo(&self) -> i32 { 42 }
+}
+
+fn main() {
+ let x = A { b: B { c: C(true) } }
+ .b
+ .c
+ .0;
+ let x = D
+ .foo();
+}"#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 143..190,
+ kind: ChainingHint,
+ label: "C",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 143..190,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 143..179,
+ kind: ChainingHint,
+ label: "B",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 143..179,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn generic_chaining_hints() {
+ check_expect(
+ InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
+ r#"
+struct A<T>(T);
+struct B<T>(T);
+struct C<T>(T);
+struct X<T,R>(T, R);
+
+impl<T> A<T> {
+ fn new(t: T) -> Self { A(t) }
+ fn into_b(self) -> B<T> { B(self.0) }
+}
+impl<T> B<T> {
+ fn into_c(self) -> C<T> { C(self.0) }
+}
+fn main() {
+ let c = A::new(X(42, true))
+ .into_b()
+ .into_c();
+}
+"#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 246..283,
+ kind: ChainingHint,
+ label: "B<X<i32, bool>>",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 246..283,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 246..265,
+ kind: ChainingHint,
+ label: "A<X<i32, bool>>",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 246..265,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn shorten_iterator_chaining_hints() {
+ check_expect(
+ InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
+ r#"
+//- minicore: iterators
+use core::iter;
+
+struct MyIter;
+
+impl Iterator for MyIter {
+ type Item = ();
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+}
+
+fn main() {
+ let _x = MyIter.by_ref()
+ .take(5)
+ .by_ref()
+ .take(5)
+ .by_ref();
+}
+"#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 174..241,
+ kind: ChainingHint,
+ label: "impl Iterator<Item = ()>",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 174..241,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 174..224,
+ kind: ChainingHint,
+ label: "impl Iterator<Item = ()>",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 174..224,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 174..206,
+ kind: ChainingHint,
+ label: "impl Iterator<Item = ()>",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 174..206,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 174..189,
+ kind: ChainingHint,
+ label: "&mut MyIter",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 174..189,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn hints_in_attr_call() {
+ check_expect(
+ TEST_CONFIG,
+ r#"
+//- proc_macros: identity, input_replace
+struct Struct;
+impl Struct {
+ fn chain(self) -> Self {
+ self
+ }
+}
+#[proc_macros::identity]
+fn main() {
+ let strukt = Struct;
+ strukt
+ .chain()
+ .chain()
+ .chain();
+ Struct::chain(strukt);
+}
+"#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 124..130,
+ kind: TypeHint,
+ label: "Struct",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 124..130,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 145..185,
+ kind: ChainingHint,
+ label: "Struct",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 145..185,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 145..168,
+ kind: ChainingHint,
+ label: "Struct",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 145..168,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 222..228,
+ kind: ParameterHint,
+ label: "self",
+ tooltip: Some(
+ HoverOffset(
+ FileId(
+ 0,
+ ),
+ 42,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn hints_lifetimes() {
+ check(
+ r#"
+fn empty() {}
+
+fn no_gpl(a: &()) {}
+ //^^^^^^<'0>
+ // ^'0
+fn empty_gpl<>(a: &()) {}
+ // ^'0 ^'0
+fn partial<'b>(a: &(), b: &'b ()) {}
+// ^'0, $ ^'0
+fn partial<'a>(a: &'a (), b: &()) {}
+// ^'0, $ ^'0
+
+fn single_ret(a: &()) -> &() {}
+// ^^^^^^^^^^<'0>
+ // ^'0 ^'0
+fn full_mul(a: &(), b: &()) {}
+// ^^^^^^^^<'0, '1>
+ // ^'0 ^'1
+
+fn foo<'c>(a: &'c ()) -> &() {}
+ // ^'c
+
+fn nested_in(a: & &X< &()>) {}
+// ^^^^^^^^^<'0, '1, '2>
+ //^'0 ^'1 ^'2
+fn nested_out(a: &()) -> & &X< &()>{}
+// ^^^^^^^^^^<'0>
+ //^'0 ^'0 ^'0 ^'0
+
+impl () {
+ fn foo(&self) {}
+ // ^^^<'0>
+ // ^'0
+ fn foo(&self) -> &() {}
+ // ^^^<'0>
+ // ^'0 ^'0
+ fn foo(&self, a: &()) -> &() {}
+ // ^^^<'0, '1>
+ // ^'0 ^'1 ^'0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn hints_lifetimes_named() {
+ check_with_config(
+ InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG },
+ r#"
+fn nested_in<'named>(named: & &X< &()>) {}
+// ^'named1, 'named2, 'named3, $
+ //^'named1 ^'named2 ^'named3
+"#,
+ );
+ }
+
+ #[test]
+ fn hints_lifetimes_trivial_skip() {
+ check_with_config(
+ InlayHintsConfig {
+ lifetime_elision_hints: LifetimeElisionHints::SkipTrivial,
+ ..TEST_CONFIG
+ },
+ r#"
+fn no_gpl(a: &()) {}
+fn empty_gpl<>(a: &()) {}
+fn partial<'b>(a: &(), b: &'b ()) {}
+fn partial<'a>(a: &'a (), b: &()) {}
+
+fn single_ret(a: &()) -> &() {}
+// ^^^^^^^^^^<'0>
+ // ^'0 ^'0
+fn full_mul(a: &(), b: &()) {}
+
+fn foo<'c>(a: &'c ()) -> &() {}
+ // ^'c
+
+fn nested_in(a: & &X< &()>) {}
+fn nested_out(a: &()) -> & &X< &()>{}
+// ^^^^^^^^^^<'0>
+ //^'0 ^'0 ^'0 ^'0
+
+impl () {
+ fn foo(&self) {}
+ fn foo(&self) -> &() {}
+ // ^^^<'0>
+ // ^'0 ^'0
+ fn foo(&self, a: &()) -> &() {}
+ // ^^^<'0, '1>
+ // ^'0 ^'1 ^'0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn hints_lifetimes_static() {
+ check_with_config(
+ InlayHintsConfig {
+ lifetime_elision_hints: LifetimeElisionHints::Always,
+ ..TEST_CONFIG
+ },
+ r#"
+trait Trait {}
+static S: &str = "";
+// ^'static
+const C: &str = "";
+// ^'static
+const C: &dyn Trait = panic!();
+// ^'static
+
+impl () {
+ const C: &str = "";
+ const C: &dyn Trait = panic!();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn hints_implicit_reborrow() {
+ check_with_config(
+ InlayHintsConfig {
+ reborrow_hints: ReborrowHints::Always,
+ parameter_hints: true,
+ ..DISABLED_CONFIG
+ },
+ r#"
+fn __() {
+ let unique = &mut ();
+ let r_mov = unique;
+ let foo: &mut _ = unique;
+ //^^^^^^ &mut *
+ ref_mut_id(unique);
+ //^^^^^^ mut_ref
+ //^^^^^^ &mut *
+ let shared = ref_id(unique);
+ //^^^^^^ shared_ref
+ //^^^^^^ &*
+ let mov = shared;
+ let r_mov: &_ = shared;
+ ref_id(shared);
+ //^^^^^^ shared_ref
+
+ identity(unique);
+ identity(shared);
+}
+fn identity<T>(t: T) -> T {
+ t
+}
+fn ref_mut_id(mut_ref: &mut ()) -> &mut () {
+ mut_ref
+ //^^^^^^^ &mut *
+}
+fn ref_id(shared_ref: &()) -> &() {
+ shared_ref
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn hints_binding_modes() {
+ check_with_config(
+ InlayHintsConfig { binding_mode_hints: true, ..DISABLED_CONFIG },
+ r#"
+fn __(
+ (x,): (u32,),
+ (x,): &(u32,),
+ //^^^^&
+ //^ ref
+ (x,): &mut (u32,)
+ //^^^^&mut
+ //^ ref mut
+) {
+ let (x,) = (0,);
+ let (x,) = &(0,);
+ //^^^^ &
+ //^ ref
+ let (x,) = &mut (0,);
+ //^^^^ &mut
+ //^ ref mut
+ let &mut (x,) = &mut (0,);
+ let (ref mut x,) = &mut (0,);
+ //^^^^^^^^^^^^ &mut
+ let &mut (ref mut x,) = &mut (0,);
+ let (mut x,) = &mut (0,);
+ //^^^^^^^^ &mut
+ match (0,) {
+ (x,) => ()
+ }
+ match &(0,) {
+ (x,) => ()
+ //^^^^ &
+ //^ ref
+ }
+ match &mut (0,) {
+ (x,) => ()
+ //^^^^ &mut
+ //^ ref mut
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn hints_closing_brace() {
+ check_with_config(
+ InlayHintsConfig { closing_brace_hints_min_lines: Some(2), ..DISABLED_CONFIG },
+ r#"
+fn a() {}
+
+fn f() {
+} // no hint unless `}` is the last token on the line
+
+fn g() {
+ }
+//^ fn g
+
+fn h<T>(with: T, arguments: u8, ...) {
+ }
+//^ fn h
+
+trait Tr {
+ fn f();
+ fn g() {
+ }
+ //^ fn g
+ }
+//^ trait Tr
+impl Tr for () {
+ }
+//^ impl Tr for ()
+impl dyn Tr {
+ }
+//^ impl dyn Tr
+
+static S0: () = 0;
+static S1: () = {};
+static S2: () = {
+ };
+//^ static S2
+const _: () = {
+ };
+//^ const _
+
+mod m {
+ }
+//^ mod m
+
+m! {}
+m!();
+m!(
+ );
+//^ m!
+
+m! {
+ }
+//^ m!
+
+fn f() {
+ let v = vec![
+ ];
+ }
+//^ fn f
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/join_lines.rs b/src/tools/rust-analyzer/crates/ide/src/join_lines.rs
new file mode 100644
index 000000000..08621adde
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/join_lines.rs
@@ -0,0 +1,1087 @@
+use ide_assists::utils::extract_trivial_expression;
+use ide_db::syntax_helpers::node_ext::expr_as_name_ref;
+use itertools::Itertools;
+use syntax::{
+ ast::{self, AstNode, AstToken, IsString},
+ NodeOrToken, SourceFile, SyntaxElement,
+ SyntaxKind::{self, USE_TREE, WHITESPACE},
+ SyntaxToken, TextRange, TextSize, T,
+};
+
+use text_edit::{TextEdit, TextEditBuilder};
+
+pub struct JoinLinesConfig {
+ pub join_else_if: bool,
+ pub remove_trailing_comma: bool,
+ pub unwrap_trivial_blocks: bool,
+ pub join_assignments: bool,
+}
+
+// Feature: Join Lines
+//
+// Join selected lines into one, smartly fixing up whitespace, trailing commas, and braces.
+//
+// See
+// https://user-images.githubusercontent.com/1711539/124515923-4504e800-dde9-11eb-8d58-d97945a1a785.gif[this gif]
+// for the cases handled specially by joined lines.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Join lines**
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113020661-b6922200-917a-11eb-87c4-b75acc028f11.gif[]
+pub(crate) fn join_lines(
+ config: &JoinLinesConfig,
+ file: &SourceFile,
+ range: TextRange,
+) -> TextEdit {
+ let range = if range.is_empty() {
+ let syntax = file.syntax();
+ let text = syntax.text().slice(range.start()..);
+ let pos = match text.find_char('\n') {
+ None => return TextEdit::builder().finish(),
+ Some(pos) => pos,
+ };
+ TextRange::at(range.start() + pos, TextSize::of('\n'))
+ } else {
+ range
+ };
+
+ let mut edit = TextEdit::builder();
+ match file.syntax().covering_element(range) {
+ NodeOrToken::Node(node) => {
+ for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) {
+ remove_newlines(config, &mut edit, &token, range)
+ }
+ }
+ NodeOrToken::Token(token) => remove_newlines(config, &mut edit, &token, range),
+ };
+ edit.finish()
+}
+
+fn remove_newlines(
+ config: &JoinLinesConfig,
+ edit: &mut TextEditBuilder,
+ token: &SyntaxToken,
+ range: TextRange,
+) {
+ let intersection = match range.intersect(token.text_range()) {
+ Some(range) => range,
+ None => return,
+ };
+
+ let range = intersection - token.text_range().start();
+ let text = token.text();
+ for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') {
+ let pos: TextSize = (pos as u32).into();
+ let offset = token.text_range().start() + range.start() + pos;
+ if !edit.invalidates_offset(offset) {
+ remove_newline(config, edit, token, offset);
+ }
+ }
+}
+
+fn remove_newline(
+ config: &JoinLinesConfig,
+ edit: &mut TextEditBuilder,
+ token: &SyntaxToken,
+ offset: TextSize,
+) {
+ if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 {
+ let n_spaces_after_line_break = {
+ let suff = &token.text()[TextRange::new(
+ offset - token.text_range().start() + TextSize::of('\n'),
+ TextSize::of(token.text()),
+ )];
+ suff.bytes().take_while(|&b| b == b' ').count()
+ };
+
+ let mut no_space = false;
+ if let Some(string) = ast::String::cast(token.clone()) {
+ if let Some(range) = string.open_quote_text_range() {
+ cov_mark::hit!(join_string_literal_open_quote);
+ no_space |= range.end() == offset;
+ }
+ if let Some(range) = string.close_quote_text_range() {
+ cov_mark::hit!(join_string_literal_close_quote);
+ no_space |= range.start()
+ == offset
+ + TextSize::of('\n')
+ + TextSize::try_from(n_spaces_after_line_break).unwrap();
+ }
+ }
+
+ let range = TextRange::at(offset, ((n_spaces_after_line_break + 1) as u32).into());
+ let replace_with = if no_space { "" } else { " " };
+ edit.replace(range, replace_with.to_string());
+ return;
+ }
+
+ // The node is between two other nodes
+ let (prev, next) = match (token.prev_sibling_or_token(), token.next_sibling_or_token()) {
+ (Some(prev), Some(next)) => (prev, next),
+ _ => return,
+ };
+
+ if config.remove_trailing_comma && prev.kind() == T![,] {
+ match next.kind() {
+ T![')'] | T![']'] => {
+ // Removes: trailing comma, newline (incl. surrounding whitespace)
+ edit.delete(TextRange::new(prev.text_range().start(), token.text_range().end()));
+ return;
+ }
+ T!['}'] => {
+ // Removes: comma, newline (incl. surrounding whitespace)
+ let space = match prev.prev_sibling_or_token() {
+ Some(left) => compute_ws(left.kind(), next.kind()),
+ None => " ",
+ };
+ edit.replace(
+ TextRange::new(prev.text_range().start(), token.text_range().end()),
+ space.to_string(),
+ );
+ return;
+ }
+ _ => (),
+ }
+ }
+
+ if config.join_else_if {
+ if let (Some(prev), Some(_next)) = (as_if_expr(&prev), as_if_expr(&next)) {
+ match prev.else_token() {
+ Some(_) => cov_mark::hit!(join_two_ifs_with_existing_else),
+ None => {
+ cov_mark::hit!(join_two_ifs);
+ edit.replace(token.text_range(), " else ".to_string());
+ return;
+ }
+ }
+ }
+ }
+
+ if config.join_assignments {
+ if join_assignments(edit, &prev, &next).is_some() {
+ return;
+ }
+ }
+
+ if config.unwrap_trivial_blocks {
+ // Special case that turns something like:
+ //
+ // ```
+ // my_function({$0
+ // <some-expr>
+ // })
+ // ```
+ //
+ // into `my_function(<some-expr>)`
+ if join_single_expr_block(edit, token).is_some() {
+ return;
+ }
+ // ditto for
+ //
+ // ```
+ // use foo::{$0
+ // bar
+ // };
+ // ```
+ if join_single_use_tree(edit, token).is_some() {
+ return;
+ }
+ }
+
+ if let (Some(_), Some(next)) = (
+ prev.as_token().cloned().and_then(ast::Comment::cast),
+ next.as_token().cloned().and_then(ast::Comment::cast),
+ ) {
+ // Removes: newline (incl. surrounding whitespace), start of the next comment
+ edit.delete(TextRange::new(
+ token.text_range().start(),
+ next.syntax().text_range().start() + TextSize::of(next.prefix()),
+ ));
+ return;
+ }
+
+ // Remove newline but add a computed amount of whitespace characters
+ edit.replace(token.text_range(), compute_ws(prev.kind(), next.kind()).to_string());
+}
+
+fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {
+ let block_expr = ast::BlockExpr::cast(token.parent_ancestors().nth(1)?)?;
+ if !block_expr.is_standalone() {
+ return None;
+ }
+ let expr = extract_trivial_expression(&block_expr)?;
+
+ let block_range = block_expr.syntax().text_range();
+ let mut buf = expr.syntax().text().to_string();
+
+ // Match block needs to have a comma after the block
+ if let Some(match_arm) = block_expr.syntax().parent().and_then(ast::MatchArm::cast) {
+ if match_arm.comma_token().is_none() {
+ buf.push(',');
+ }
+ }
+
+ edit.replace(block_range, buf);
+
+ Some(())
+}
+
+fn join_single_use_tree(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {
+ let use_tree_list = ast::UseTreeList::cast(token.parent()?)?;
+ let (tree,) = use_tree_list.use_trees().collect_tuple()?;
+ edit.replace(use_tree_list.syntax().text_range(), tree.syntax().text().to_string());
+ Some(())
+}
+
+fn join_assignments(
+ edit: &mut TextEditBuilder,
+ prev: &SyntaxElement,
+ next: &SyntaxElement,
+) -> Option<()> {
+ let let_stmt = ast::LetStmt::cast(prev.as_node()?.clone())?;
+ if let_stmt.eq_token().is_some() {
+ cov_mark::hit!(join_assignments_already_initialized);
+ return None;
+ }
+ let let_ident_pat = match let_stmt.pat()? {
+ ast::Pat::IdentPat(it) => it,
+ _ => return None,
+ };
+
+ let expr_stmt = ast::ExprStmt::cast(next.as_node()?.clone())?;
+ let bin_expr = match expr_stmt.expr()? {
+ ast::Expr::BinExpr(it) => it,
+ _ => return None,
+ };
+ if !matches!(bin_expr.op_kind()?, ast::BinaryOp::Assignment { op: None }) {
+ return None;
+ }
+ let lhs = bin_expr.lhs()?;
+ let name_ref = expr_as_name_ref(&lhs)?;
+
+ if name_ref.to_string() != let_ident_pat.syntax().to_string() {
+ cov_mark::hit!(join_assignments_mismatch);
+ return None;
+ }
+
+ edit.delete(let_stmt.semicolon_token()?.text_range().cover(lhs.syntax().text_range()));
+ Some(())
+}
+
+fn as_if_expr(element: &SyntaxElement) -> Option<ast::IfExpr> {
+ let mut node = element.as_node()?.clone();
+ if let Some(stmt) = ast::ExprStmt::cast(node.clone()) {
+ node = stmt.expr()?.syntax().clone();
+ }
+ ast::IfExpr::cast(node)
+}
+
+fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str {
+ match left {
+ T!['('] | T!['['] => return "",
+ T!['{'] => {
+ if let USE_TREE = right {
+ return "";
+ }
+ }
+ _ => (),
+ }
+ match right {
+ T![')'] | T![']'] => return "",
+ T!['}'] => {
+ if let USE_TREE = left {
+ return "";
+ }
+ }
+ T![.] => return "",
+ _ => (),
+ }
+ " "
+}
+
+#[cfg(test)]
+mod tests {
+ use syntax::SourceFile;
+ use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range};
+
+ use super::*;
+
+ fn check_join_lines(ra_fixture_before: &str, ra_fixture_after: &str) {
+ let config = JoinLinesConfig {
+ join_else_if: true,
+ remove_trailing_comma: true,
+ unwrap_trivial_blocks: true,
+ join_assignments: true,
+ };
+
+ let (before_cursor_pos, before) = extract_offset(ra_fixture_before);
+ let file = SourceFile::parse(&before).ok().unwrap();
+
+ let range = TextRange::empty(before_cursor_pos);
+ let result = join_lines(&config, &file, range);
+
+ let actual = {
+ let mut actual = before;
+ result.apply(&mut actual);
+ actual
+ };
+ let actual_cursor_pos = result
+ .apply_to_offset(before_cursor_pos)
+ .expect("cursor position is affected by the edit");
+ let actual = add_cursor(&actual, actual_cursor_pos);
+ assert_eq_text!(ra_fixture_after, &actual);
+ }
+
+ fn check_join_lines_sel(ra_fixture_before: &str, ra_fixture_after: &str) {
+ let config = JoinLinesConfig {
+ join_else_if: true,
+ remove_trailing_comma: true,
+ unwrap_trivial_blocks: true,
+ join_assignments: true,
+ };
+
+ let (sel, before) = extract_range(ra_fixture_before);
+ let parse = SourceFile::parse(&before);
+ let result = join_lines(&config, &parse.tree(), sel);
+ let actual = {
+ let mut actual = before;
+ result.apply(&mut actual);
+ actual
+ };
+ assert_eq_text!(ra_fixture_after, &actual);
+ }
+
+ #[test]
+ fn test_join_lines_comma() {
+ check_join_lines(
+ r"
+fn foo() {
+ $0foo(1,
+ )
+}
+",
+ r"
+fn foo() {
+ $0foo(1)
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_lambda_block() {
+ check_join_lines(
+ r"
+pub fn reparse(&self, edit: &AtomTextEdit) -> File {
+ $0self.incremental_reparse(edit).unwrap_or_else(|| {
+ self.full_reparse(edit)
+ })
+}
+",
+ r"
+pub fn reparse(&self, edit: &AtomTextEdit) -> File {
+ $0self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit))
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_block() {
+ check_join_lines(
+ r"
+fn foo() {
+ foo($0{
+ 92
+ })
+}",
+ r"
+fn foo() {
+ foo($092)
+}",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_diverging_block() {
+ check_join_lines(
+ r"
+fn foo() {
+ loop {
+ match x {
+ 92 => $0{
+ continue;
+ }
+ }
+ }
+}
+ ",
+ r"
+fn foo() {
+ loop {
+ match x {
+ 92 => $0continue,
+ }
+ }
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn join_lines_adds_comma_for_block_in_match_arm() {
+ check_join_lines(
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0{
+ u.foo()
+ }
+ Err(v) => v,
+ }
+}",
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0u.foo(),
+ Err(v) => v,
+ }
+}",
+ );
+ }
+
+ #[test]
+ fn join_lines_multiline_in_block() {
+ check_join_lines(
+ r"
+fn foo() {
+ match ty {
+ $0 Some(ty) => {
+ match ty {
+ _ => false,
+ }
+ }
+ _ => true,
+ }
+}
+",
+ r"
+fn foo() {
+ match ty {
+ $0 Some(ty) => match ty {
+ _ => false,
+ },
+ _ => true,
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn join_lines_keeps_comma_for_block_in_match_arm() {
+ // We already have a comma
+ check_join_lines(
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0{
+ u.foo()
+ },
+ Err(v) => v,
+ }
+}",
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0u.foo(),
+ Err(v) => v,
+ }
+}",
+ );
+
+ // comma with whitespace between brace and ,
+ check_join_lines(
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0{
+ u.foo()
+ } ,
+ Err(v) => v,
+ }
+}",
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0u.foo() ,
+ Err(v) => v,
+ }
+}",
+ );
+
+ // comma with newline between brace and ,
+ check_join_lines(
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0{
+ u.foo()
+ }
+ ,
+ Err(v) => v,
+ }
+}",
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0u.foo()
+ ,
+ Err(v) => v,
+ }
+}",
+ );
+ }
+
+ #[test]
+ fn join_lines_keeps_comma_with_single_arg_tuple() {
+ // A single arg tuple
+ check_join_lines(
+ r"
+fn foo() {
+ let x = ($0{
+ 4
+ },);
+}",
+ r"
+fn foo() {
+ let x = ($04,);
+}",
+ );
+
+ // single arg tuple with whitespace between brace and comma
+ check_join_lines(
+ r"
+fn foo() {
+ let x = ($0{
+ 4
+ } ,);
+}",
+ r"
+fn foo() {
+ let x = ($04 ,);
+}",
+ );
+
+ // single arg tuple with newline between brace and comma
+ check_join_lines(
+ r"
+fn foo() {
+ let x = ($0{
+ 4
+ }
+ ,);
+}",
+ r"
+fn foo() {
+ let x = ($04
+ ,);
+}",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_use_items_left() {
+ // No space after the '{'
+ check_join_lines(
+ r"
+$0use syntax::{
+ TextSize, TextRange,
+};",
+ r"
+$0use syntax::{TextSize, TextRange,
+};",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_use_items_right() {
+ // No space after the '}'
+ check_join_lines(
+ r"
+use syntax::{
+$0 TextSize, TextRange
+};",
+ r"
+use syntax::{
+$0 TextSize, TextRange};",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_use_items_right_comma() {
+ // No space after the '}'
+ check_join_lines(
+ r"
+use syntax::{
+$0 TextSize, TextRange,
+};",
+ r"
+use syntax::{
+$0 TextSize, TextRange};",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_use_tree() {
+ check_join_lines(
+ r"
+use syntax::{
+ algo::$0{
+ find_token_at_offset,
+ },
+ ast,
+};",
+ r"
+use syntax::{
+ algo::$0find_token_at_offset,
+ ast,
+};",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_normal_comments() {
+ check_join_lines(
+ r"
+fn foo() {
+ // Hello$0
+ // world!
+}
+",
+ r"
+fn foo() {
+ // Hello$0 world!
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_doc_comments() {
+ check_join_lines(
+ r"
+fn foo() {
+ /// Hello$0
+ /// world!
+}
+",
+ r"
+fn foo() {
+ /// Hello$0 world!
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_mod_comments() {
+ check_join_lines(
+ r"
+fn foo() {
+ //! Hello$0
+ //! world!
+}
+",
+ r"
+fn foo() {
+ //! Hello$0 world!
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_multiline_comments_1() {
+ check_join_lines(
+ r"
+fn foo() {
+ // Hello$0
+ /* world! */
+}
+",
+ r"
+fn foo() {
+ // Hello$0 world! */
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_multiline_comments_2() {
+ check_join_lines(
+ r"
+fn foo() {
+ // The$0
+ /* quick
+ brown
+ fox! */
+}
+",
+ r"
+fn foo() {
+ // The$0 quick
+ brown
+ fox! */
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_selection_fn_args() {
+ check_join_lines_sel(
+ r"
+fn foo() {
+ $0foo(1,
+ 2,
+ 3,
+ $0)
+}
+ ",
+ r"
+fn foo() {
+ foo(1, 2, 3)
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_selection_struct() {
+ check_join_lines_sel(
+ r"
+struct Foo $0{
+ f: u32,
+}$0
+ ",
+ r"
+struct Foo { f: u32 }
+ ",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_selection_dot_chain() {
+ check_join_lines_sel(
+ r"
+fn foo() {
+ join($0type_params.type_params()
+ .filter_map(|it| it.name())
+ .map(|it| it.text())$0)
+}",
+ r"
+fn foo() {
+ join(type_params.type_params().filter_map(|it| it.name()).map(|it| it.text()))
+}",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_selection_lambda_block_body() {
+ check_join_lines_sel(
+ r"
+pub fn handle_find_matching_brace() {
+ params.offsets
+ .map(|offset| $0{
+ world.analysis().matching_brace(&file, offset).unwrap_or(offset)
+ }$0)
+ .collect();
+}",
+ r"
+pub fn handle_find_matching_brace() {
+ params.offsets
+ .map(|offset| world.analysis().matching_brace(&file, offset).unwrap_or(offset))
+ .collect();
+}",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_commented_block() {
+ check_join_lines(
+ r"
+fn main() {
+ let _ = {
+ // $0foo
+ // bar
+ 92
+ };
+}
+ ",
+ r"
+fn main() {
+ let _ = {
+ // $0foo bar
+ 92
+ };
+}
+ ",
+ )
+ }
+
+ #[test]
+ fn join_lines_mandatory_blocks_block() {
+ check_join_lines(
+ r"
+$0fn foo() {
+ 92
+}
+ ",
+ r"
+$0fn foo() { 92
+}
+ ",
+ );
+
+ check_join_lines(
+ r"
+fn foo() {
+ $0if true {
+ 92
+ }
+}
+ ",
+ r"
+fn foo() {
+ $0if true { 92
+ }
+}
+ ",
+ );
+
+ check_join_lines(
+ r"
+fn foo() {
+ $0loop {
+ 92
+ }
+}
+ ",
+ r"
+fn foo() {
+ $0loop { 92
+ }
+}
+ ",
+ );
+
+ check_join_lines(
+ r"
+fn foo() {
+ $0unsafe {
+ 92
+ }
+}
+ ",
+ r"
+fn foo() {
+ $0unsafe { 92
+ }
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn join_string_literal() {
+ {
+ cov_mark::check!(join_string_literal_open_quote);
+ check_join_lines(
+ r#"
+fn main() {
+ $0"
+hello
+";
+}
+"#,
+ r#"
+fn main() {
+ $0"hello
+";
+}
+"#,
+ );
+ }
+
+ {
+ cov_mark::check!(join_string_literal_close_quote);
+ check_join_lines(
+ r#"
+fn main() {
+ $0"hello
+";
+}
+"#,
+ r#"
+fn main() {
+ $0"hello";
+}
+"#,
+ );
+ check_join_lines(
+ r#"
+fn main() {
+ $0r"hello
+ ";
+}
+"#,
+ r#"
+fn main() {
+ $0r"hello";
+}
+"#,
+ );
+ }
+
+ check_join_lines(
+ r#"
+fn main() {
+ "
+$0hello
+world
+";
+}
+"#,
+ r#"
+fn main() {
+ "
+$0hello world
+";
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn join_last_line_empty() {
+ check_join_lines(
+ r#"
+fn main() {$0}
+"#,
+ r#"
+fn main() {$0}
+"#,
+ );
+ }
+
+ #[test]
+ fn join_two_ifs() {
+ cov_mark::check!(join_two_ifs);
+ check_join_lines(
+ r#"
+fn main() {
+ if foo {
+
+ }$0
+ if bar {
+
+ }
+}
+"#,
+ r#"
+fn main() {
+ if foo {
+
+ }$0 else if bar {
+
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn join_two_ifs_with_existing_else() {
+ cov_mark::check!(join_two_ifs_with_existing_else);
+ check_join_lines(
+ r#"
+fn main() {
+ if foo {
+
+ } else {
+
+ }$0
+ if bar {
+
+ }
+}
+"#,
+ r#"
+fn main() {
+ if foo {
+
+ } else {
+
+ }$0 if bar {
+
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn join_assignments() {
+ check_join_lines(
+ r#"
+fn foo() {
+ $0let foo;
+ foo = "bar";
+}
+"#,
+ r#"
+fn foo() {
+ $0let foo = "bar";
+}
+"#,
+ );
+
+ cov_mark::check!(join_assignments_mismatch);
+ check_join_lines(
+ r#"
+fn foo() {
+ let foo;
+ let qux;$0
+ foo = "bar";
+}
+"#,
+ r#"
+fn foo() {
+ let foo;
+ let qux;$0 foo = "bar";
+}
+"#,
+ );
+
+ cov_mark::check!(join_assignments_already_initialized);
+ check_join_lines(
+ r#"
+fn foo() {
+ let foo = "bar";$0
+ foo = "bar";
+}
+"#,
+ r#"
+fn foo() {
+ let foo = "bar";$0 foo = "bar";
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs
new file mode 100644
index 000000000..dd108fa79
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs
@@ -0,0 +1,702 @@
+//! ide crate provides "ide-centric" APIs for the rust-analyzer. That is,
+//! it generally operates with files and text ranges, and returns results as
+//! Strings, suitable for displaying to the human.
+//!
+//! What powers this API are the `RootDatabase` struct, which defines a `salsa`
+//! database, and the `hir` crate, where majority of the analysis happens.
+//! However, IDE specific bits of the analysis (most notably completion) happen
+//! in this crate.
+
+// For proving that RootDatabase is RefUnwindSafe.
+#![recursion_limit = "128"]
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+#[allow(unused)]
+macro_rules! eprintln {
+ ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
+}
+
+#[cfg(test)]
+mod fixture;
+
+mod markup;
+mod prime_caches;
+mod navigation_target;
+
+mod annotations;
+mod call_hierarchy;
+mod signature_help;
+mod doc_links;
+mod highlight_related;
+mod expand_macro;
+mod extend_selection;
+mod file_structure;
+mod fn_references;
+mod folding_ranges;
+mod goto_declaration;
+mod goto_definition;
+mod goto_implementation;
+mod goto_type_definition;
+mod hover;
+mod inlay_hints;
+mod join_lines;
+mod markdown_remove;
+mod matching_brace;
+mod moniker;
+mod move_item;
+mod parent_module;
+mod references;
+mod rename;
+mod runnables;
+mod ssr;
+mod static_index;
+mod status;
+mod syntax_highlighting;
+mod syntax_tree;
+mod typing;
+mod view_crate_graph;
+mod view_hir;
+mod view_item_tree;
+mod shuffle_crate_graph;
+
+use std::sync::Arc;
+
+use cfg::CfgOptions;
+use ide_db::{
+ base_db::{
+ salsa::{self, ParallelDatabase},
+ CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, VfsPath,
+ },
+ symbol_index, LineIndexDatabase,
+};
+use syntax::SourceFile;
+
+use crate::navigation_target::{ToNav, TryToNav};
+
+pub use crate::{
+ annotations::{Annotation, AnnotationConfig, AnnotationKind},
+ call_hierarchy::CallItem,
+ expand_macro::ExpandedMacro,
+ file_structure::{StructureNode, StructureNodeKind},
+ folding_ranges::{Fold, FoldKind},
+ highlight_related::{HighlightRelatedConfig, HighlightedRange},
+ hover::{HoverAction, HoverConfig, HoverDocFormat, HoverGotoTypeData, HoverResult},
+ inlay_hints::{
+ ClosureReturnTypeHints, InlayHint, InlayHintsConfig, InlayKind, InlayTooltip,
+ LifetimeElisionHints, ReborrowHints,
+ },
+ join_lines::JoinLinesConfig,
+ markup::Markup,
+ moniker::{MonikerKind, MonikerResult, PackageInformation},
+ move_item::Direction,
+ navigation_target::NavigationTarget,
+ prime_caches::ParallelPrimeCachesProgress,
+ references::ReferenceSearchResult,
+ rename::RenameError,
+ runnables::{Runnable, RunnableKind, TestId},
+ signature_help::SignatureHelp,
+ static_index::{StaticIndex, StaticIndexedFile, TokenId, TokenStaticData},
+ syntax_highlighting::{
+ tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag},
+ HlRange,
+ },
+};
+pub use hir::{Documentation, Semantics};
+pub use ide_assists::{
+ Assist, AssistConfig, AssistId, AssistKind, AssistResolveStrategy, SingleResolve,
+};
+pub use ide_completion::{
+ CallableSnippets, CompletionConfig, CompletionItem, CompletionItemKind, CompletionRelevance,
+ Snippet, SnippetScope,
+};
+pub use ide_db::{
+ base_db::{
+ Cancelled, Change, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange,
+ SourceRoot, SourceRootId,
+ },
+ label::Label,
+ line_index::{LineCol, LineColUtf16, LineIndex},
+ search::{ReferenceCategory, SearchScope},
+ source_change::{FileSystemEdit, SourceChange},
+ symbol_index::Query,
+ RootDatabase, SymbolKind,
+};
+pub use ide_diagnostics::{Diagnostic, DiagnosticsConfig, ExprFillDefaultMode, Severity};
+pub use ide_ssr::SsrError;
+pub use syntax::{TextRange, TextSize};
+pub use text_edit::{Indel, TextEdit};
+
+pub type Cancellable<T> = Result<T, Cancelled>;
+
+/// Info associated with a text range.
+#[derive(Debug)]
+pub struct RangeInfo<T> {
+ pub range: TextRange,
+ pub info: T,
+}
+
+impl<T> RangeInfo<T> {
+ pub fn new(range: TextRange, info: T) -> RangeInfo<T> {
+ RangeInfo { range, info }
+ }
+}
+
+/// `AnalysisHost` stores the current state of the world.
+#[derive(Debug)]
+pub struct AnalysisHost {
+ db: RootDatabase,
+}
+
+impl AnalysisHost {
+ pub fn new(lru_capacity: Option<usize>) -> AnalysisHost {
+ AnalysisHost { db: RootDatabase::new(lru_capacity) }
+ }
+
+ pub fn update_lru_capacity(&mut self, lru_capacity: Option<usize>) {
+ self.db.update_lru_capacity(lru_capacity);
+ }
+
+ /// Returns a snapshot of the current state, which you can query for
+ /// semantic information.
+ pub fn analysis(&self) -> Analysis {
+ Analysis { db: self.db.snapshot() }
+ }
+
+ /// Applies changes to the current state of the world. If there are
+ /// outstanding snapshots, they will be canceled.
+ pub fn apply_change(&mut self, change: Change) {
+ self.db.apply_change(change)
+ }
+
+ /// NB: this clears the database
+ pub fn per_query_memory_usage(&mut self) -> Vec<(String, profile::Bytes)> {
+ self.db.per_query_memory_usage()
+ }
+ pub fn request_cancellation(&mut self) {
+ self.db.request_cancellation();
+ }
+ pub fn raw_database(&self) -> &RootDatabase {
+ &self.db
+ }
+ pub fn raw_database_mut(&mut self) -> &mut RootDatabase {
+ &mut self.db
+ }
+
+ pub fn shuffle_crate_graph(&mut self) {
+ shuffle_crate_graph::shuffle_crate_graph(&mut self.db);
+ }
+}
+
+impl Default for AnalysisHost {
+ fn default() -> AnalysisHost {
+ AnalysisHost::new(None)
+ }
+}
+
+/// Analysis is a snapshot of a world state at a moment in time. It is the main
+/// entry point for asking semantic information about the world. When the world
+/// state is advanced using `AnalysisHost::apply_change` method, all existing
+/// `Analysis` are canceled (most method return `Err(Canceled)`).
+#[derive(Debug)]
+pub struct Analysis {
+ db: salsa::Snapshot<RootDatabase>,
+}
+
+// As a general design guideline, `Analysis` API are intended to be independent
+// from the language server protocol. That is, when exposing some functionality
+// we should think in terms of "what API makes most sense" and not in terms of
+// "what types LSP uses". Although currently LSP is the only consumer of the
+// API, the API should in theory be usable as a library, or via a different
+// protocol.
+impl Analysis {
+ // Creates an analysis instance for a single file, without any external
+ // dependencies, stdlib support or ability to apply changes. See
+ // `AnalysisHost` for creating a fully-featured analysis.
+ pub fn from_single_file(text: String) -> (Analysis, FileId) {
+ let mut host = AnalysisHost::default();
+ let file_id = FileId(0);
+ let mut file_set = FileSet::default();
+ file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_string()));
+ let source_root = SourceRoot::new_local(file_set);
+
+ let mut change = Change::new();
+ change.set_roots(vec![source_root]);
+ let mut crate_graph = CrateGraph::default();
+ // FIXME: cfg options
+ // Default to enable test for single file.
+ let mut cfg_options = CfgOptions::default();
+ cfg_options.insert_atom("test".into());
+ crate_graph.add_crate_root(
+ file_id,
+ Edition::CURRENT,
+ None,
+ None,
+ cfg_options.clone(),
+ cfg_options,
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+ change.change_file(file_id, Some(Arc::new(text)));
+ change.set_crate_graph(crate_graph);
+ host.apply_change(change);
+ (host.analysis(), file_id)
+ }
+
+ /// Debug info about the current state of the analysis.
+ pub fn status(&self, file_id: Option<FileId>) -> Cancellable<String> {
+ self.with_db(|db| status::status(&*db, file_id))
+ }
+
+ pub fn parallel_prime_caches<F>(&self, num_worker_threads: u8, cb: F) -> Cancellable<()>
+ where
+ F: Fn(ParallelPrimeCachesProgress) + Sync + std::panic::UnwindSafe,
+ {
+ self.with_db(move |db| prime_caches::parallel_prime_caches(db, num_worker_threads, &cb))
+ }
+
+ /// Gets the text of the source file.
+ pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<String>> {
+ self.with_db(|db| db.file_text(file_id))
+ }
+
+ /// Gets the syntax tree of the file.
+ pub fn parse(&self, file_id: FileId) -> Cancellable<SourceFile> {
+ self.with_db(|db| db.parse(file_id).tree())
+ }
+
+ /// Returns true if this file belongs to an immutable library.
+ pub fn is_library_file(&self, file_id: FileId) -> Cancellable<bool> {
+ use ide_db::base_db::SourceDatabaseExt;
+ self.with_db(|db| db.source_root(db.file_source_root(file_id)).is_library)
+ }
+
+ /// Gets the file's `LineIndex`: data structure to convert between absolute
+ /// offsets and line/column representation.
+ pub fn file_line_index(&self, file_id: FileId) -> Cancellable<Arc<LineIndex>> {
+ self.with_db(|db| db.line_index(file_id))
+ }
+
+ /// Selects the next syntactic nodes encompassing the range.
+ pub fn extend_selection(&self, frange: FileRange) -> Cancellable<TextRange> {
+ self.with_db(|db| extend_selection::extend_selection(db, frange))
+ }
+
+ /// Returns position of the matching brace (all types of braces are
+ /// supported).
+ pub fn matching_brace(&self, position: FilePosition) -> Cancellable<Option<TextSize>> {
+ self.with_db(|db| {
+ let parse = db.parse(position.file_id);
+ let file = parse.tree();
+ matching_brace::matching_brace(&file, position.offset)
+ })
+ }
+
+ /// Returns a syntax tree represented as `String`, for debug purposes.
+ // FIXME: use a better name here.
+ pub fn syntax_tree(
+ &self,
+ file_id: FileId,
+ text_range: Option<TextRange>,
+ ) -> Cancellable<String> {
+ self.with_db(|db| syntax_tree::syntax_tree(db, file_id, text_range))
+ }
+
+ pub fn view_hir(&self, position: FilePosition) -> Cancellable<String> {
+ self.with_db(|db| view_hir::view_hir(db, position))
+ }
+
+ pub fn view_item_tree(&self, file_id: FileId) -> Cancellable<String> {
+ self.with_db(|db| view_item_tree::view_item_tree(db, file_id))
+ }
+
+ /// Renders the crate graph to GraphViz "dot" syntax.
+ pub fn view_crate_graph(&self, full: bool) -> Cancellable<Result<String, String>> {
+ self.with_db(|db| view_crate_graph::view_crate_graph(db, full))
+ }
+
+ pub fn expand_macro(&self, position: FilePosition) -> Cancellable<Option<ExpandedMacro>> {
+ self.with_db(|db| expand_macro::expand_macro(db, position))
+ }
+
+ /// Returns an edit to remove all newlines in the range, cleaning up minor
+ /// stuff like trailing commas.
+ pub fn join_lines(&self, config: &JoinLinesConfig, frange: FileRange) -> Cancellable<TextEdit> {
+ self.with_db(|db| {
+ let parse = db.parse(frange.file_id);
+ join_lines::join_lines(config, &parse.tree(), frange.range)
+ })
+ }
+
+ /// Returns an edit which should be applied when opening a new line, fixing
+ /// up minor stuff like continuing the comment.
+ /// The edit will be a snippet (with `$0`).
+ pub fn on_enter(&self, position: FilePosition) -> Cancellable<Option<TextEdit>> {
+ self.with_db(|db| typing::on_enter(db, position))
+ }
+
+ /// Returns an edit which should be applied after a character was typed.
+ ///
+ /// This is useful for some on-the-fly fixups, like adding `;` to `let =`
+ /// automatically.
+ pub fn on_char_typed(
+ &self,
+ position: FilePosition,
+ char_typed: char,
+ autoclose: bool,
+ ) -> Cancellable<Option<SourceChange>> {
+ // Fast path to not even parse the file.
+ if !typing::TRIGGER_CHARS.contains(char_typed) {
+ return Ok(None);
+ }
+ if char_typed == '<' && !autoclose {
+ return Ok(None);
+ }
+
+ self.with_db(|db| typing::on_char_typed(db, position, char_typed))
+ }
+
+ /// Returns a tree representation of symbols in the file. Useful to draw a
+ /// file outline.
+ pub fn file_structure(&self, file_id: FileId) -> Cancellable<Vec<StructureNode>> {
+ self.with_db(|db| file_structure::file_structure(&db.parse(file_id).tree()))
+ }
+
+ /// Returns a list of the places in the file where type hints can be displayed.
+ pub fn inlay_hints(
+ &self,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ range: Option<FileRange>,
+ ) -> Cancellable<Vec<InlayHint>> {
+ self.with_db(|db| inlay_hints::inlay_hints(db, file_id, range, config))
+ }
+
+ /// Returns the set of folding ranges.
+ pub fn folding_ranges(&self, file_id: FileId) -> Cancellable<Vec<Fold>> {
+ self.with_db(|db| folding_ranges::folding_ranges(&db.parse(file_id).tree()))
+ }
+
+ /// Fuzzy searches for a symbol.
+ pub fn symbol_search(&self, query: Query) -> Cancellable<Vec<NavigationTarget>> {
+ self.with_db(|db| {
+ symbol_index::world_symbols(db, query)
+ .into_iter() // xx: should we make this a par iter?
+ .filter_map(|s| s.try_to_nav(db))
+ .collect::<Vec<_>>()
+ })
+ }
+
+ /// Returns the definitions from the symbol at `position`.
+ pub fn goto_definition(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
+ self.with_db(|db| goto_definition::goto_definition(db, position))
+ }
+
+ /// Returns the declaration from the symbol at `position`.
+ pub fn goto_declaration(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
+ self.with_db(|db| goto_declaration::goto_declaration(db, position))
+ }
+
+ /// Returns the impls from the symbol at `position`.
+ pub fn goto_implementation(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
+ self.with_db(|db| goto_implementation::goto_implementation(db, position))
+ }
+
+ /// Returns the type definitions for the symbol at `position`.
+ pub fn goto_type_definition(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
+ self.with_db(|db| goto_type_definition::goto_type_definition(db, position))
+ }
+
+ /// Finds all usages of the reference at point.
+ pub fn find_all_refs(
+ &self,
+ position: FilePosition,
+ search_scope: Option<SearchScope>,
+ ) -> Cancellable<Option<Vec<ReferenceSearchResult>>> {
+ self.with_db(|db| references::find_all_refs(&Semantics::new(db), position, search_scope))
+ }
+
+ /// Finds all methods and free functions for the file. Does not return tests!
+ pub fn find_all_methods(&self, file_id: FileId) -> Cancellable<Vec<FileRange>> {
+ self.with_db(|db| fn_references::find_all_methods(db, file_id))
+ }
+
+ /// Returns a short text describing element at position.
+ pub fn hover(
+ &self,
+ config: &HoverConfig,
+ range: FileRange,
+ ) -> Cancellable<Option<RangeInfo<HoverResult>>> {
+ self.with_db(|db| hover::hover(db, range, config))
+ }
+
+ /// Returns moniker of symbol at position.
+ pub fn moniker(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<RangeInfo<Vec<moniker::MonikerResult>>>> {
+ self.with_db(|db| moniker::moniker(db, position))
+ }
+
+ /// Return URL(s) for the documentation of the symbol under the cursor.
+ pub fn external_docs(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<doc_links::DocumentationLink>> {
+ self.with_db(|db| doc_links::external_docs(db, &position))
+ }
+
+ /// Computes parameter information at the given position.
+ pub fn signature_help(&self, position: FilePosition) -> Cancellable<Option<SignatureHelp>> {
+ self.with_db(|db| signature_help::signature_help(db, position))
+ }
+
+ /// Computes call hierarchy candidates for the given file position.
+ pub fn call_hierarchy(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
+ self.with_db(|db| call_hierarchy::call_hierarchy(db, position))
+ }
+
+ /// Computes incoming calls for the given file position.
+ pub fn incoming_calls(&self, position: FilePosition) -> Cancellable<Option<Vec<CallItem>>> {
+ self.with_db(|db| call_hierarchy::incoming_calls(db, position))
+ }
+
+ /// Computes outgoing calls for the given file position.
+ pub fn outgoing_calls(&self, position: FilePosition) -> Cancellable<Option<Vec<CallItem>>> {
+ self.with_db(|db| call_hierarchy::outgoing_calls(db, position))
+ }
+
+ /// Returns a `mod name;` declaration which created the current module.
+ pub fn parent_module(&self, position: FilePosition) -> Cancellable<Vec<NavigationTarget>> {
+ self.with_db(|db| parent_module::parent_module(db, position))
+ }
+
+ /// Returns crates this file belongs too.
+ pub fn crate_for(&self, file_id: FileId) -> Cancellable<Vec<CrateId>> {
+ self.with_db(|db| parent_module::crate_for(db, file_id))
+ }
+
+ /// Returns the edition of the given crate.
+ pub fn crate_edition(&self, crate_id: CrateId) -> Cancellable<Edition> {
+ self.with_db(|db| db.crate_graph()[crate_id].edition)
+ }
+
+ /// Returns the root file of the given crate.
+ pub fn crate_root(&self, crate_id: CrateId) -> Cancellable<FileId> {
+ self.with_db(|db| db.crate_graph()[crate_id].root_file_id)
+ }
+
+ /// Returns the set of possible targets to run for the current file.
+ pub fn runnables(&self, file_id: FileId) -> Cancellable<Vec<Runnable>> {
+ self.with_db(|db| runnables::runnables(db, file_id))
+ }
+
+ /// Returns the set of tests for the given file position.
+ pub fn related_tests(
+ &self,
+ position: FilePosition,
+ search_scope: Option<SearchScope>,
+ ) -> Cancellable<Vec<Runnable>> {
+ self.with_db(|db| runnables::related_tests(db, position, search_scope))
+ }
+
+ /// Computes syntax highlighting for the given file
+ pub fn highlight(&self, file_id: FileId) -> Cancellable<Vec<HlRange>> {
+ self.with_db(|db| syntax_highlighting::highlight(db, file_id, None, false))
+ }
+
+ /// Computes all ranges to highlight for a given item in a file.
+ pub fn highlight_related(
+ &self,
+ config: HighlightRelatedConfig,
+ position: FilePosition,
+ ) -> Cancellable<Option<Vec<HighlightedRange>>> {
+ self.with_db(|db| {
+ highlight_related::highlight_related(&Semantics::new(db), config, position)
+ })
+ }
+
+ /// Computes syntax highlighting for the given file range.
+ pub fn highlight_range(&self, frange: FileRange) -> Cancellable<Vec<HlRange>> {
+ self.with_db(|db| {
+ syntax_highlighting::highlight(db, frange.file_id, Some(frange.range), false)
+ })
+ }
+
+ /// Computes syntax highlighting for the given file.
+ pub fn highlight_as_html(&self, file_id: FileId, rainbow: bool) -> Cancellable<String> {
+ self.with_db(|db| syntax_highlighting::highlight_as_html(db, file_id, rainbow))
+ }
+
+ /// Computes completions at the given position.
+ pub fn completions(
+ &self,
+ config: &CompletionConfig,
+ position: FilePosition,
+ trigger_character: Option<char>,
+ ) -> Cancellable<Option<Vec<CompletionItem>>> {
+ self.with_db(|db| {
+ ide_completion::completions(db, config, position, trigger_character).map(Into::into)
+ })
+ }
+
+ /// Resolves additional completion data at the position given.
+ pub fn resolve_completion_edits(
+ &self,
+ config: &CompletionConfig,
+ position: FilePosition,
+ imports: impl IntoIterator<Item = (String, String)> + std::panic::UnwindSafe,
+ ) -> Cancellable<Vec<TextEdit>> {
+ Ok(self
+ .with_db(|db| ide_completion::resolve_completion_edits(db, config, position, imports))?
+ .unwrap_or_default())
+ }
+
+ /// Computes the set of diagnostics for the given file.
+ pub fn diagnostics(
+ &self,
+ config: &DiagnosticsConfig,
+ resolve: AssistResolveStrategy,
+ file_id: FileId,
+ ) -> Cancellable<Vec<Diagnostic>> {
+ self.with_db(|db| ide_diagnostics::diagnostics(db, config, &resolve, file_id))
+ }
+
+ /// Convenience function to return assists + quick fixes for diagnostics
+ pub fn assists_with_fixes(
+ &self,
+ assist_config: &AssistConfig,
+ diagnostics_config: &DiagnosticsConfig,
+ resolve: AssistResolveStrategy,
+ frange: FileRange,
+ ) -> Cancellable<Vec<Assist>> {
+ let include_fixes = match &assist_config.allowed {
+ Some(it) => it.iter().any(|&it| it == AssistKind::None || it == AssistKind::QuickFix),
+ None => true,
+ };
+
+ self.with_db(|db| {
+ let diagnostic_assists = if include_fixes {
+ ide_diagnostics::diagnostics(db, diagnostics_config, &resolve, frange.file_id)
+ .into_iter()
+ .flat_map(|it| it.fixes.unwrap_or_default())
+ .filter(|it| it.target.intersect(frange.range).is_some())
+ .collect()
+ } else {
+ Vec::new()
+ };
+ let ssr_assists = ssr::ssr_assists(db, &resolve, frange);
+ let assists = ide_assists::assists(db, assist_config, resolve, frange);
+
+ let mut res = diagnostic_assists;
+ res.extend(ssr_assists.into_iter());
+ res.extend(assists.into_iter());
+
+ res
+ })
+ }
+
+ /// Returns the edit required to rename reference at the position to the new
+ /// name.
+ pub fn rename(
+ &self,
+ position: FilePosition,
+ new_name: &str,
+ ) -> Cancellable<Result<SourceChange, RenameError>> {
+ self.with_db(|db| rename::rename(db, position, new_name))
+ }
+
+ pub fn prepare_rename(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Result<RangeInfo<()>, RenameError>> {
+ self.with_db(|db| rename::prepare_rename(db, position))
+ }
+
+ pub fn will_rename_file(
+ &self,
+ file_id: FileId,
+ new_name_stem: &str,
+ ) -> Cancellable<Option<SourceChange>> {
+ self.with_db(|db| rename::will_rename_file(db, file_id, new_name_stem))
+ }
+
+ pub fn structural_search_replace(
+ &self,
+ query: &str,
+ parse_only: bool,
+ resolve_context: FilePosition,
+ selections: Vec<FileRange>,
+ ) -> Cancellable<Result<SourceChange, SsrError>> {
+ self.with_db(|db| {
+ let rule: ide_ssr::SsrRule = query.parse()?;
+ let mut match_finder =
+ ide_ssr::MatchFinder::in_context(db, resolve_context, selections)?;
+ match_finder.add_rule(rule)?;
+ let edits = if parse_only { Default::default() } else { match_finder.edits() };
+ Ok(SourceChange::from(edits))
+ })
+ }
+
+ pub fn annotations(
+ &self,
+ config: &AnnotationConfig,
+ file_id: FileId,
+ ) -> Cancellable<Vec<Annotation>> {
+ self.with_db(|db| annotations::annotations(db, config, file_id))
+ }
+
+ pub fn resolve_annotation(&self, annotation: Annotation) -> Cancellable<Annotation> {
+ self.with_db(|db| annotations::resolve_annotation(db, annotation))
+ }
+
+ pub fn move_item(
+ &self,
+ range: FileRange,
+ direction: Direction,
+ ) -> Cancellable<Option<TextEdit>> {
+ self.with_db(|db| move_item::move_item(db, range, direction))
+ }
+
+ /// Performs an operation on the database that may be canceled.
+ ///
+ /// rust-analyzer needs to be able to answer semantic questions about the
+ /// code while the code is being modified. A common problem is that a
+ /// long-running query is being calculated when a new change arrives.
+ ///
+ /// We can't just apply the change immediately: this will cause the pending
+ /// query to see inconsistent state (it will observe an absence of
+ /// repeatable read). So what we do is we **cancel** all pending queries
+ /// before applying the change.
+ ///
+ /// Salsa implements cancellation by unwinding with a special value and
+ /// catching it on the API boundary.
+ fn with_db<F, T>(&self, f: F) -> Cancellable<T>
+ where
+ F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe,
+ {
+ Cancelled::catch(|| f(&self.db))
+ }
+}
+
+#[test]
+fn analysis_is_send() {
+ fn is_send<T: Send>() {}
+ is_send::<Analysis>();
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/markdown_remove.rs b/src/tools/rust-analyzer/crates/ide/src/markdown_remove.rs
new file mode 100644
index 000000000..3ec5c629e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/markdown_remove.rs
@@ -0,0 +1,22 @@
+//! Removes markdown from strings.
+use pulldown_cmark::{Event, Parser, Tag};
+
+/// Removes all markdown, keeping the text and code blocks
+///
+/// Currently limited in styling, i.e. no ascii tables or lists
+pub(crate) fn remove_markdown(markdown: &str) -> String {
+ let mut out = String::new();
+ let parser = Parser::new(markdown);
+
+ for event in parser {
+ match event {
+ Event::Text(text) | Event::Code(text) => out.push_str(&text),
+ Event::SoftBreak | Event::HardBreak | Event::Rule | Event::End(Tag::CodeBlock(_)) => {
+ out.push('\n')
+ }
+ _ => {}
+ }
+ }
+
+ out
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/markup.rs b/src/tools/rust-analyzer/crates/ide/src/markup.rs
new file mode 100644
index 000000000..60c193c40
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/markup.rs
@@ -0,0 +1,38 @@
+//! Markdown formatting.
+//!
+//! Sometimes, we want to display a "rich text" in the UI. At the moment, we use
+//! markdown for this purpose. It doesn't feel like a right option, but that's
+//! what is used by LSP, so let's keep it simple.
+use std::fmt;
+
+#[derive(Default, Debug)]
+pub struct Markup {
+ text: String,
+}
+
+impl From<Markup> for String {
+ fn from(markup: Markup) -> Self {
+ markup.text
+ }
+}
+
+impl From<String> for Markup {
+ fn from(text: String) -> Self {
+ Markup { text }
+ }
+}
+
+impl fmt::Display for Markup {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.text, f)
+ }
+}
+
+impl Markup {
+ pub fn as_str(&self) -> &str {
+ self.text.as_str()
+ }
+ pub fn fenced_block(contents: &impl fmt::Display) -> Markup {
+ format!("```rust\n{}\n```", contents).into()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs b/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs
new file mode 100644
index 000000000..da70cecdd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs
@@ -0,0 +1,78 @@
+use syntax::{
+ ast::{self, AstNode},
+ SourceFile, SyntaxKind, TextSize, T,
+};
+
+// Feature: Matching Brace
+//
+// If the cursor is on any brace (`<>(){}[]||`) which is a part of a brace-pair,
+// moves cursor to the matching brace. It uses the actual parser to determine
+// braces, so it won't confuse generics with comparisons.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Find matching brace**
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113065573-04298180-91b1-11eb-8dec-d4e2a202f304.gif[]
+pub(crate) fn matching_brace(file: &SourceFile, offset: TextSize) -> Option<TextSize> {
+ const BRACES: &[SyntaxKind] =
+ &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>], T![|], T![|]];
+ let (brace_token, brace_idx) = file
+ .syntax()
+ .token_at_offset(offset)
+ .filter_map(|node| {
+ let idx = BRACES.iter().position(|&brace| brace == node.kind())?;
+ Some((node, idx))
+ })
+ .last()?;
+ let parent = brace_token.parent()?;
+ if brace_token.kind() == T![|] && !ast::ParamList::can_cast(parent.kind()) {
+ cov_mark::hit!(pipes_not_braces);
+ return None;
+ }
+ let matching_kind = BRACES[brace_idx ^ 1];
+ let matching_node = parent
+ .children_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|node| node.kind() == matching_kind && node != &brace_token)?;
+ Some(matching_node.text_range().start())
+}
+
+#[cfg(test)]
+mod tests {
+ use test_utils::{add_cursor, assert_eq_text, extract_offset};
+
+ use super::*;
+
+ #[test]
+ fn test_matching_brace() {
+ fn do_check(before: &str, after: &str) {
+ let (pos, before) = extract_offset(before);
+ let parse = SourceFile::parse(&before);
+ let new_pos = match matching_brace(&parse.tree(), pos) {
+ None => pos,
+ Some(pos) => pos,
+ };
+ let actual = add_cursor(&before, new_pos);
+ assert_eq_text!(after, &actual);
+ }
+
+ do_check("struct Foo { a: i32, }$0", "struct Foo $0{ a: i32, }");
+ do_check("fn main() { |x: i32|$0 x * 2;}", "fn main() { $0|x: i32| x * 2;}");
+ do_check("fn main() { $0|x: i32| x * 2;}", "fn main() { |x: i32$0| x * 2;}");
+ do_check(
+ "fn func(x) { return (2 * (x + 3)$0) + 5;}",
+ "fn func(x) { return $0(2 * (x + 3)) + 5;}",
+ );
+
+ {
+ cov_mark::check!(pipes_not_braces);
+ do_check(
+ "fn main() { match 92 { 1 | 2 |$0 3 => 92 } }",
+ "fn main() { match 92 { 1 | 2 |$0 3 => 92 } }",
+ );
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/moniker.rs b/src/tools/rust-analyzer/crates/ide/src/moniker.rs
new file mode 100644
index 000000000..6bab9fa1e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/moniker.rs
@@ -0,0 +1,342 @@
+//! This module generates [moniker](https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/#exportsImports)
+//! for LSIF and LSP.
+
+use hir::{db::DefDatabase, AsAssocItem, AssocItemContainer, Crate, Name, Semantics};
+use ide_db::{
+ base_db::{CrateOrigin, FileId, FileLoader, FilePosition, LangCrateOrigin},
+ defs::{Definition, IdentClass},
+ helpers::pick_best_token,
+ RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{AstNode, SyntaxKind::*, T};
+
+use crate::{doc_links::token_as_doc_comment, RangeInfo};
+
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct MonikerIdentifier {
+ crate_name: String,
+ path: Vec<Name>,
+}
+
+impl ToString for MonikerIdentifier {
+ fn to_string(&self) -> String {
+ match self {
+ MonikerIdentifier { path, crate_name } => {
+ format!("{}::{}", crate_name, path.iter().map(|x| x.to_string()).join("::"))
+ }
+ }
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum MonikerKind {
+ Import,
+ Export,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MonikerResult {
+ pub identifier: MonikerIdentifier,
+ pub kind: MonikerKind,
+ pub package_information: PackageInformation,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PackageInformation {
+ pub name: String,
+ pub repo: String,
+ pub version: String,
+}
+
+pub(crate) fn crate_for_file(db: &RootDatabase, file_id: FileId) -> Option<Crate> {
+ for &krate in db.relevant_crates(file_id).iter() {
+ let crate_def_map = db.crate_def_map(krate);
+ for (_, data) in crate_def_map.modules() {
+ if data.origin.file_id() == Some(file_id) {
+ return Some(krate.into());
+ }
+ }
+ }
+ None
+}
+
+pub(crate) fn moniker(
+ db: &RootDatabase,
+ FilePosition { file_id, offset }: FilePosition,
+) -> Option<RangeInfo<Vec<MonikerResult>>> {
+ let sema = &Semantics::new(db);
+ let file = sema.parse(file_id).syntax().clone();
+ let current_crate = crate_for_file(db, file_id)?;
+ let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
+ IDENT
+ | INT_NUMBER
+ | LIFETIME_IDENT
+ | T![self]
+ | T![super]
+ | T![crate]
+ | T![Self]
+ | COMMENT => 2,
+ kind if kind.is_trivia() => 0,
+ _ => 1,
+ })?;
+ if let Some(doc_comment) = token_as_doc_comment(&original_token) {
+ return doc_comment.get_definition_with_descend_at(sema, offset, |def, _, _| {
+ let m = def_to_moniker(db, def, current_crate)?;
+ Some(RangeInfo::new(original_token.text_range(), vec![m]))
+ });
+ }
+ let navs = sema
+ .descend_into_macros(original_token.clone())
+ .into_iter()
+ .filter_map(|token| {
+ IdentClass::classify_token(sema, &token).map(IdentClass::definitions).map(|it| {
+ it.into_iter().flat_map(|def| def_to_moniker(sema.db, def, current_crate))
+ })
+ })
+ .flatten()
+ .unique()
+ .collect::<Vec<_>>();
+ Some(RangeInfo::new(original_token.text_range(), navs))
+}
+
+pub(crate) fn def_to_moniker(
+ db: &RootDatabase,
+ def: Definition,
+ from_crate: Crate,
+) -> Option<MonikerResult> {
+ if matches!(def, Definition::GenericParam(_) | Definition::SelfType(_) | Definition::Local(_)) {
+ return None;
+ }
+ let module = def.module(db)?;
+ let krate = module.krate();
+ let mut path = vec![];
+ path.extend(module.path_to_root(db).into_iter().filter_map(|x| x.name(db)));
+
+ // Handle associated items within a trait
+ if let Some(assoc) = def.as_assoc_item(db) {
+ let container = assoc.container(db);
+ match container {
+ AssocItemContainer::Trait(trait_) => {
+ // Because different traits can have functions with the same name,
+ // we have to include the trait name as part of the moniker for uniqueness.
+ path.push(trait_.name(db));
+ }
+ AssocItemContainer::Impl(impl_) => {
+ // Because a struct can implement multiple traits, for implementations
+ // we add both the struct name and the trait name to the path
+ if let Some(adt) = impl_.self_ty(db).as_adt() {
+ path.push(adt.name(db));
+ }
+
+ if let Some(trait_) = impl_.trait_(db) {
+ path.push(trait_.name(db));
+ }
+ }
+ }
+ }
+
+ if let Definition::Field(it) = def {
+ path.push(it.parent_def(db).name(db));
+ }
+
+ path.push(def.name(db)?);
+ Some(MonikerResult {
+ identifier: MonikerIdentifier {
+ crate_name: krate.display_name(db)?.crate_name().to_string(),
+ path,
+ },
+ kind: if krate == from_crate { MonikerKind::Export } else { MonikerKind::Import },
+ package_information: {
+ let name = krate.display_name(db)?.to_string();
+ let (repo, version) = match krate.origin(db) {
+ CrateOrigin::CratesIo { repo } => (repo?, krate.version(db)?),
+ CrateOrigin::Lang(lang) => (
+ "https://github.com/rust-lang/rust/".to_string(),
+ match lang {
+ LangCrateOrigin::Other => {
+ "https://github.com/rust-lang/rust/library/".into()
+ }
+ lang => format!("https://github.com/rust-lang/rust/library/{lang}",),
+ },
+ ),
+ };
+ PackageInformation { name, repo, version }
+ },
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::fixture;
+
+ use super::MonikerKind;
+
+ #[track_caller]
+ fn no_moniker(ra_fixture: &str) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ if let Some(x) = analysis.moniker(position).unwrap() {
+ assert_eq!(x.info.len(), 0, "Moniker founded but no moniker expected: {:?}", x);
+ }
+ }
+
+ #[track_caller]
+ fn check_moniker(ra_fixture: &str, identifier: &str, package: &str, kind: MonikerKind) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let x = analysis.moniker(position).unwrap().expect("no moniker found").info;
+ assert_eq!(x.len(), 1);
+ let x = x.into_iter().next().unwrap();
+ assert_eq!(identifier, x.identifier.to_string());
+ assert_eq!(package, format!("{:?}", x.package_information));
+ assert_eq!(kind, x.kind);
+ }
+
+ #[test]
+ fn basic() {
+ check_moniker(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::module::func;
+fn main() {
+ func$0();
+}
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub fn func() {}
+}
+"#,
+ "foo::module::func",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Import,
+ );
+ check_moniker(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::module::func;
+fn main() {
+ func();
+}
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub fn func$0() {}
+}
+"#,
+ "foo::module::func",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Export,
+ );
+ }
+
+ #[test]
+ fn moniker_for_trait() {
+ check_moniker(
+ r#"
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub trait MyTrait {
+ pub fn func$0() {}
+ }
+}
+"#,
+ "foo::module::MyTrait::func",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Export,
+ );
+ }
+
+ #[test]
+ fn moniker_for_trait_constant() {
+ check_moniker(
+ r#"
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub trait MyTrait {
+ const MY_CONST$0: u8;
+ }
+}
+"#,
+ "foo::module::MyTrait::MY_CONST",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Export,
+ );
+ }
+
+ #[test]
+ fn moniker_for_trait_type() {
+ check_moniker(
+ r#"
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub trait MyTrait {
+ type MyType$0;
+ }
+}
+"#,
+ "foo::module::MyTrait::MyType",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Export,
+ );
+ }
+
+ #[test]
+ fn moniker_for_trait_impl_function() {
+ check_moniker(
+ r#"
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub trait MyTrait {
+ pub fn func() {}
+ }
+
+ struct MyStruct {}
+
+ impl MyTrait for MyStruct {
+ pub fn func$0() {}
+ }
+}
+"#,
+ "foo::module::MyStruct::MyTrait::func",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Export,
+ );
+ }
+
+ #[test]
+ fn moniker_for_field() {
+ check_moniker(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::St;
+fn main() {
+ let x = St { a$0: 2 };
+}
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub struct St {
+ pub a: i32,
+}
+"#,
+ "foo::St::a",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Import,
+ );
+ }
+
+ #[test]
+ fn no_moniker_for_local() {
+ no_moniker(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::module::func;
+fn main() {
+ func();
+}
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub fn func() {
+ let x$0 = 2;
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/move_item.rs b/src/tools/rust-analyzer/crates/ide/src/move_item.rs
new file mode 100644
index 000000000..02e9fb8b5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/move_item.rs
@@ -0,0 +1,890 @@
+use std::{iter::once, mem};
+
+use hir::Semantics;
+use ide_db::{base_db::FileRange, helpers::pick_best_token, RootDatabase};
+use itertools::Itertools;
+use syntax::{algo, ast, match_ast, AstNode, SyntaxElement, SyntaxKind, SyntaxNode, TextRange};
+use text_edit::{TextEdit, TextEditBuilder};
+
+#[derive(Copy, Clone, Debug)]
+pub enum Direction {
+ Up,
+ Down,
+}
+
+// Feature: Move Item
+//
+// Move item under cursor or selection up and down.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Move item up**
+// | VS Code | **Rust Analyzer: Move item down**
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113065576-04298180-91b1-11eb-91ce-4505e99ed598.gif[]
+pub(crate) fn move_item(
+ db: &RootDatabase,
+ range: FileRange,
+ direction: Direction,
+) -> Option<TextEdit> {
+ let sema = Semantics::new(db);
+ let file = sema.parse(range.file_id);
+
+ let item = if range.range.is_empty() {
+ SyntaxElement::Token(pick_best_token(
+ file.syntax().token_at_offset(range.range.start()),
+ |kind| match kind {
+ SyntaxKind::IDENT | SyntaxKind::LIFETIME_IDENT => 2,
+ kind if kind.is_trivia() => 0,
+ _ => 1,
+ },
+ )?)
+ } else {
+ file.syntax().covering_element(range.range)
+ };
+
+ find_ancestors(item, direction, range.range)
+}
+
+fn find_ancestors(item: SyntaxElement, direction: Direction, range: TextRange) -> Option<TextEdit> {
+ let root = match item {
+ SyntaxElement::Node(node) => node,
+ SyntaxElement::Token(token) => token.parent()?,
+ };
+
+ let movable = [
+ SyntaxKind::ARG_LIST,
+ SyntaxKind::GENERIC_PARAM_LIST,
+ SyntaxKind::GENERIC_ARG_LIST,
+ SyntaxKind::VARIANT_LIST,
+ SyntaxKind::TYPE_BOUND_LIST,
+ SyntaxKind::MATCH_ARM,
+ SyntaxKind::PARAM,
+ SyntaxKind::LET_STMT,
+ SyntaxKind::EXPR_STMT,
+ SyntaxKind::IF_EXPR,
+ SyntaxKind::FOR_EXPR,
+ SyntaxKind::LOOP_EXPR,
+ SyntaxKind::WHILE_EXPR,
+ SyntaxKind::RETURN_EXPR,
+ SyntaxKind::MATCH_EXPR,
+ SyntaxKind::MACRO_CALL,
+ SyntaxKind::TYPE_ALIAS,
+ SyntaxKind::TRAIT,
+ SyntaxKind::IMPL,
+ SyntaxKind::MACRO_DEF,
+ SyntaxKind::STRUCT,
+ SyntaxKind::UNION,
+ SyntaxKind::ENUM,
+ SyntaxKind::FN,
+ SyntaxKind::MODULE,
+ SyntaxKind::USE,
+ SyntaxKind::STATIC,
+ SyntaxKind::CONST,
+ SyntaxKind::MACRO_RULES,
+ SyntaxKind::MACRO_DEF,
+ ];
+
+ let ancestor = once(root.clone())
+ .chain(root.ancestors())
+ .find(|ancestor| movable.contains(&ancestor.kind()))?;
+
+ move_in_direction(&ancestor, direction, range)
+}
+
+fn move_in_direction(
+ node: &SyntaxNode,
+ direction: Direction,
+ range: TextRange,
+) -> Option<TextEdit> {
+ match_ast! {
+ match node {
+ ast::ArgList(it) => swap_sibling_in_list(node, it.args(), range, direction),
+ ast::GenericParamList(it) => swap_sibling_in_list(node, it.generic_params(), range, direction),
+ ast::GenericArgList(it) => swap_sibling_in_list(node, it.generic_args(), range, direction),
+ ast::VariantList(it) => swap_sibling_in_list(node, it.variants(), range, direction),
+ ast::TypeBoundList(it) => swap_sibling_in_list(node, it.bounds(), range, direction),
+ _ => Some(replace_nodes(range, node, &match direction {
+ Direction::Up => node.prev_sibling(),
+ Direction::Down => node.next_sibling(),
+ }?))
+ }
+ }
+}
+
+fn swap_sibling_in_list<A: AstNode + Clone, I: Iterator<Item = A>>(
+ node: &SyntaxNode,
+ list: I,
+ range: TextRange,
+ direction: Direction,
+) -> Option<TextEdit> {
+ let list_lookup = list.tuple_windows().find(|(l, r)| match direction {
+ Direction::Up => r.syntax().text_range().contains_range(range),
+ Direction::Down => l.syntax().text_range().contains_range(range),
+ });
+
+ if let Some((l, r)) = list_lookup {
+ Some(replace_nodes(range, l.syntax(), r.syntax()))
+ } else {
+ // Cursor is beyond any movable list item (for example, on curly brace in enum).
+ // It's not necessary, that parent of list is movable (arg list's parent is not, for example),
+ // and we have to continue tree traversal to find suitable node.
+ find_ancestors(SyntaxElement::Node(node.parent()?), direction, range)
+ }
+}
+
+fn replace_nodes<'a>(
+ range: TextRange,
+ mut first: &'a SyntaxNode,
+ mut second: &'a SyntaxNode,
+) -> TextEdit {
+ let cursor_offset = if range.is_empty() {
+ // FIXME: `applySnippetTextEdits` does not support non-empty selection ranges
+ if first.text_range().contains_range(range) {
+ Some(range.start() - first.text_range().start())
+ } else if second.text_range().contains_range(range) {
+ mem::swap(&mut first, &mut second);
+ Some(range.start() - first.text_range().start())
+ } else {
+ None
+ }
+ } else {
+ None
+ };
+
+ let first_with_cursor = match cursor_offset {
+ Some(offset) => {
+ let mut item_text = first.text().to_string();
+ item_text.insert_str(offset.into(), "$0");
+ item_text
+ }
+ None => first.text().to_string(),
+ };
+
+ let mut edit = TextEditBuilder::default();
+
+ algo::diff(first, second).into_text_edit(&mut edit);
+ edit.replace(second.text_range(), first_with_cursor);
+
+ edit.finish()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::fixture;
+ use expect_test::{expect, Expect};
+
+ use crate::Direction;
+
+ fn check(ra_fixture: &str, expect: Expect, direction: Direction) {
+ let (analysis, range) = fixture::range(ra_fixture);
+ let edit = analysis.move_item(range, direction).unwrap().unwrap_or_default();
+ let mut file = analysis.file_text(range.file_id).unwrap().to_string();
+ edit.apply(&mut file);
+ expect.assert_eq(&file);
+ }
+
+ #[test]
+ fn test_moves_match_arm_up() {
+ check(
+ r#"
+fn main() {
+ match true {
+ true => {
+ println!("Hello, world");
+ },
+ false =>$0$0 {
+ println!("Test");
+ }
+ };
+}
+"#,
+ expect![[r#"
+ fn main() {
+ match true {
+ false =>$0 {
+ println!("Test");
+ }
+ true => {
+ println!("Hello, world");
+ },
+ };
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_match_arm_down() {
+ check(
+ r#"
+fn main() {
+ match true {
+ true =>$0$0 {
+ println!("Hello, world");
+ },
+ false => {
+ println!("Test");
+ }
+ };
+}
+"#,
+ expect![[r#"
+ fn main() {
+ match true {
+ false => {
+ println!("Test");
+ }
+ true =>$0 {
+ println!("Hello, world");
+ },
+ };
+ }
+ "#]],
+ Direction::Down,
+ );
+ }
+
+ #[test]
+ fn test_nowhere_to_move() {
+ check(
+ r#"
+fn main() {
+ match true {
+ true =>$0$0 {
+ println!("Hello, world");
+ },
+ false => {
+ println!("Test");
+ }
+ };
+}
+"#,
+ expect![[r#"
+ fn main() {
+ match true {
+ true => {
+ println!("Hello, world");
+ },
+ false => {
+ println!("Test");
+ }
+ };
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_let_stmt_up() {
+ check(
+ r#"
+fn main() {
+ let test = 123;
+ let test2$0$0 = 456;
+}
+"#,
+ expect![[r#"
+ fn main() {
+ let test2$0 = 456;
+ let test = 123;
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_expr_up() {
+ check(
+ r#"
+fn main() {
+ println!("Hello, world");
+ println!("All I want to say is...");$0$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ println!("All I want to say is...");$0
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ check(
+ r#"
+fn main() {
+ println!("Hello, world");
+
+ if true {
+ println!("Test");
+ }$0$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ if true {
+ println!("Test");
+ }$0
+
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ check(
+ r#"
+fn main() {
+ println!("Hello, world");
+
+ for i in 0..10 {
+ println!("Test");
+ }$0$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ for i in 0..10 {
+ println!("Test");
+ }$0
+
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ check(
+ r#"
+fn main() {
+ println!("Hello, world");
+
+ loop {
+ println!("Test");
+ }$0$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ loop {
+ println!("Test");
+ }$0
+
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ check(
+ r#"
+fn main() {
+ println!("Hello, world");
+
+ while true {
+ println!("Test");
+ }$0$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ while true {
+ println!("Test");
+ }$0
+
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ check(
+ r#"
+fn main() {
+ println!("Hello, world");
+
+ return 123;$0$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ return 123;$0
+
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_nowhere_to_move_stmt() {
+ check(
+ r#"
+fn main() {
+ println!("All I want to say is...");$0$0
+ println!("Hello, world");
+}
+"#,
+ expect![[r#"
+ fn main() {
+ println!("All I want to say is...");
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_move_item() {
+ check(
+ r#"
+fn main() {}
+
+fn foo() {}$0$0
+"#,
+ expect![[r#"
+ fn foo() {}$0
+
+ fn main() {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_move_impl_up() {
+ check(
+ r#"
+struct Yay;
+
+trait Wow {}
+
+impl Wow for Yay $0$0{}
+"#,
+ expect![[r#"
+ struct Yay;
+
+ impl Wow for Yay $0{}
+
+ trait Wow {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_move_use_up() {
+ check(
+ r#"
+use std::vec::Vec;
+use std::collections::HashMap$0$0;
+"#,
+ expect![[r#"
+ use std::collections::HashMap$0;
+ use std::vec::Vec;
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_match_expr_up() {
+ check(
+ r#"
+fn main() {
+ let test = 123;
+
+ $0match test {
+ 456 => {},
+ _ => {}
+ };$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ match test {
+ 456 => {},
+ _ => {}
+ };
+
+ let test = 123;
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_param() {
+ check(
+ r#"
+fn test(one: i32, two$0$0: u32) {}
+
+fn main() {
+ test(123, 456);
+}
+"#,
+ expect![[r#"
+ fn test(two$0: u32, one: i32) {}
+
+ fn main() {
+ test(123, 456);
+ }
+ "#]],
+ Direction::Up,
+ );
+ check(
+ r#"
+fn f($0$0arg: u8, arg2: u16) {}
+"#,
+ expect![[r#"
+ fn f(arg2: u16, $0arg: u8) {}
+ "#]],
+ Direction::Down,
+ );
+ }
+
+ #[test]
+ fn test_moves_arg_up() {
+ check(
+ r#"
+fn test(one: i32, two: u32) {}
+
+fn main() {
+ test(123, 456$0$0);
+}
+"#,
+ expect![[r#"
+ fn test(one: i32, two: u32) {}
+
+ fn main() {
+ test(456$0, 123);
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_arg_down() {
+ check(
+ r#"
+fn test(one: i32, two: u32) {}
+
+fn main() {
+ test(123$0$0, 456);
+}
+"#,
+ expect![[r#"
+ fn test(one: i32, two: u32) {}
+
+ fn main() {
+ test(456, 123$0);
+ }
+ "#]],
+ Direction::Down,
+ );
+ }
+
+ #[test]
+ fn test_nowhere_to_move_arg() {
+ check(
+ r#"
+fn test(one: i32, two: u32) {}
+
+fn main() {
+ test(123$0$0, 456);
+}
+"#,
+ expect![[r#"
+ fn test(one: i32, two: u32) {}
+
+ fn main() {
+ test(123, 456);
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_generic_param_up() {
+ check(
+ r#"
+struct Test<A, B$0$0>(A, B);
+
+fn main() {}
+"#,
+ expect![[r#"
+ struct Test<B$0, A>(A, B);
+
+ fn main() {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_generic_arg_up() {
+ check(
+ r#"
+struct Test<A, B>(A, B);
+
+fn main() {
+ let t = Test::<i32, &str$0$0>(123, "yay");
+}
+"#,
+ expect![[r#"
+ struct Test<A, B>(A, B);
+
+ fn main() {
+ let t = Test::<&str$0, i32>(123, "yay");
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_variant_up() {
+ check(
+ r#"
+enum Hello {
+ One,
+ Two$0$0
+}
+
+fn main() {}
+"#,
+ expect![[r#"
+ enum Hello {
+ Two$0,
+ One
+ }
+
+ fn main() {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_type_bound_up() {
+ check(
+ r#"
+trait One {}
+
+trait Two {}
+
+fn test<T: One + Two$0$0>(t: T) {}
+
+fn main() {}
+"#,
+ expect![[r#"
+ trait One {}
+
+ trait Two {}
+
+ fn test<T: Two$0 + One>(t: T) {}
+
+ fn main() {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_prioritizes_trait_items() {
+ check(
+ r#"
+struct Test;
+
+trait Yay {
+ type One;
+
+ type Two;
+
+ fn inner();
+}
+
+impl Yay for Test {
+ type One = i32;
+
+ type Two = u32;
+
+ fn inner() {$0$0
+ println!("Mmmm");
+ }
+}
+"#,
+ expect![[r#"
+ struct Test;
+
+ trait Yay {
+ type One;
+
+ type Two;
+
+ fn inner();
+ }
+
+ impl Yay for Test {
+ type One = i32;
+
+ fn inner() {$0
+ println!("Mmmm");
+ }
+
+ type Two = u32;
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_weird_nesting() {
+ check(
+ r#"
+fn test() {
+ mod hello {
+ fn inner() {}
+ }
+
+ mod hi {$0$0
+ fn inner() {}
+ }
+}
+"#,
+ expect![[r#"
+ fn test() {
+ mod hi {$0
+ fn inner() {}
+ }
+
+ mod hello {
+ fn inner() {}
+ }
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_cursor_at_item_start() {
+ check(
+ r#"
+$0$0#[derive(Debug)]
+enum FooBar {
+ Foo,
+ Bar,
+}
+
+fn main() {}
+"#,
+ expect![[r##"
+ fn main() {}
+
+ $0#[derive(Debug)]
+ enum FooBar {
+ Foo,
+ Bar,
+ }
+ "##]],
+ Direction::Down,
+ );
+ check(
+ r#"
+$0$0enum FooBar {
+ Foo,
+ Bar,
+}
+
+fn main() {}
+"#,
+ expect![[r#"
+ fn main() {}
+
+ $0enum FooBar {
+ Foo,
+ Bar,
+ }
+ "#]],
+ Direction::Down,
+ );
+ check(
+ r#"
+struct Test;
+
+trait SomeTrait {}
+
+$0$0impl SomeTrait for Test {}
+
+fn main() {}
+"#,
+ expect![[r#"
+ struct Test;
+
+ $0impl SomeTrait for Test {}
+
+ trait SomeTrait {}
+
+ fn main() {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_cursor_at_item_end() {
+ check(
+ r#"
+enum FooBar {
+ Foo,
+ Bar,
+}$0$0
+
+fn main() {}
+"#,
+ expect![[r#"
+ fn main() {}
+
+ enum FooBar {
+ Foo,
+ Bar,
+ }$0
+ "#]],
+ Direction::Down,
+ );
+ check(
+ r#"
+struct Test;
+
+trait SomeTrait {}
+
+impl SomeTrait for Test {}$0$0
+
+fn main() {}
+"#,
+ expect![[r#"
+ struct Test;
+
+ impl SomeTrait for Test {}$0
+
+ trait SomeTrait {}
+
+ fn main() {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn handles_empty_file() {
+ check(r#"$0$0"#, expect![[r#""#]], Direction::Up);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
new file mode 100644
index 000000000..9f049e298
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
@@ -0,0 +1,623 @@
+//! See [`NavigationTarget`].
+
+use std::fmt;
+
+use either::Either;
+use hir::{
+ symbols::FileSymbol, AssocItem, Documentation, FieldSource, HasAttrs, HasSource, HirDisplay,
+ InFile, ModuleSource, Semantics,
+};
+use ide_db::{
+ base_db::{FileId, FileRange},
+ SymbolKind,
+};
+use ide_db::{defs::Definition, RootDatabase};
+use stdx::never;
+use syntax::{
+ ast::{self, HasName},
+ match_ast, AstNode, SmolStr, SyntaxNode, TextRange,
+};
+
+/// `NavigationTarget` represents an element in the editor's UI which you can
+/// click on to navigate to a particular piece of code.
+///
+/// Typically, a `NavigationTarget` corresponds to some element in the source
+/// code, like a function or a struct, but this is not strictly required.
+#[derive(Clone, PartialEq, Eq, Hash)]
+pub struct NavigationTarget {
+ pub file_id: FileId,
+ /// Range which encompasses the whole element.
+ ///
+ /// Should include body, doc comments, attributes, etc.
+ ///
+ /// Clients should use this range to answer "is the cursor inside the
+ /// element?" question.
+ pub full_range: TextRange,
+ /// A "most interesting" range within the `full_range`.
+ ///
+ /// Typically, `full_range` is the whole syntax node, including doc
+ /// comments, and `focus_range` is the range of the identifier.
+ ///
+ /// Clients should place the cursor on this range when navigating to this target.
+ pub focus_range: Option<TextRange>,
+ pub name: SmolStr,
+ pub kind: Option<SymbolKind>,
+ pub container_name: Option<SmolStr>,
+ pub description: Option<String>,
+ pub docs: Option<Documentation>,
+}
+
+impl fmt::Debug for NavigationTarget {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut f = f.debug_struct("NavigationTarget");
+ macro_rules! opt {
+ ($($name:ident)*) => {$(
+ if let Some(it) = &self.$name {
+ f.field(stringify!($name), it);
+ }
+ )*}
+ }
+ f.field("file_id", &self.file_id).field("full_range", &self.full_range);
+ opt!(focus_range);
+ f.field("name", &self.name);
+ opt!(kind container_name description docs);
+ f.finish()
+ }
+}
+
+pub(crate) trait ToNav {
+ fn to_nav(&self, db: &RootDatabase) -> NavigationTarget;
+}
+
+pub(crate) trait TryToNav {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget>;
+}
+
+impl<T: TryToNav, U: TryToNav> TryToNav for Either<T, U> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ match self {
+ Either::Left(it) => it.try_to_nav(db),
+ Either::Right(it) => it.try_to_nav(db),
+ }
+ }
+}
+
+impl NavigationTarget {
+ pub fn focus_or_full_range(&self) -> TextRange {
+ self.focus_range.unwrap_or(self.full_range)
+ }
+
+ pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget {
+ let name = module.name(db).map(|it| it.to_smol_str()).unwrap_or_default();
+ if let Some(src @ InFile { value, .. }) = &module.declaration_source(db) {
+ let FileRange { file_id, range: full_range } = src.syntax().original_file_range(db);
+ let focus_range =
+ value.name().and_then(|it| orig_focus_range(db, src.file_id, it.syntax()));
+ let mut res = NavigationTarget::from_syntax(
+ file_id,
+ name,
+ focus_range,
+ full_range,
+ SymbolKind::Module,
+ );
+ res.docs = module.attrs(db).docs();
+ res.description = Some(module.display(db).to_string());
+ return res;
+ }
+ module.to_nav(db)
+ }
+
+ #[cfg(test)]
+ pub(crate) fn debug_render(&self) -> String {
+ let mut buf = format!(
+ "{} {:?} {:?} {:?}",
+ self.name,
+ self.kind.unwrap(),
+ self.file_id,
+ self.full_range
+ );
+ if let Some(focus_range) = self.focus_range {
+ buf.push_str(&format!(" {:?}", focus_range))
+ }
+ if let Some(container_name) = &self.container_name {
+ buf.push_str(&format!(" {}", container_name))
+ }
+ buf
+ }
+
+ /// Allows `NavigationTarget` to be created from a `NameOwner`
+ pub(crate) fn from_named(
+ db: &RootDatabase,
+ node @ InFile { file_id, value }: InFile<&dyn ast::HasName>,
+ kind: SymbolKind,
+ ) -> NavigationTarget {
+ let name = value.name().map(|it| it.text().into()).unwrap_or_else(|| "_".into());
+ let focus_range = value.name().and_then(|it| orig_focus_range(db, file_id, it.syntax()));
+ let FileRange { file_id, range } = node.map(|it| it.syntax()).original_file_range(db);
+
+ NavigationTarget::from_syntax(file_id, name, focus_range, range, kind)
+ }
+
+ fn from_syntax(
+ file_id: FileId,
+ name: SmolStr,
+ focus_range: Option<TextRange>,
+ full_range: TextRange,
+ kind: SymbolKind,
+ ) -> NavigationTarget {
+ NavigationTarget {
+ file_id,
+ name,
+ kind: Some(kind),
+ full_range,
+ focus_range,
+ container_name: None,
+ description: None,
+ docs: None,
+ }
+ }
+}
+
+impl TryToNav for FileSymbol {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ let full_range = self.loc.original_range(db)?;
+ let name_range = self.loc.original_name_range(db)?;
+
+ Some(NavigationTarget {
+ file_id: full_range.file_id,
+ name: self.name.clone(),
+ kind: Some(self.kind.into()),
+ full_range: full_range.range,
+ focus_range: Some(name_range.range),
+ container_name: self.container_name.clone(),
+ description: description_from_symbol(db, self),
+ docs: None,
+ })
+ }
+}
+
+impl TryToNav for Definition {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ match self {
+ Definition::Local(it) => Some(it.to_nav(db)),
+ Definition::Label(it) => Some(it.to_nav(db)),
+ Definition::Module(it) => Some(it.to_nav(db)),
+ Definition::Macro(it) => it.try_to_nav(db),
+ Definition::Field(it) => it.try_to_nav(db),
+ Definition::SelfType(it) => it.try_to_nav(db),
+ Definition::GenericParam(it) => it.try_to_nav(db),
+ Definition::Function(it) => it.try_to_nav(db),
+ Definition::Adt(it) => it.try_to_nav(db),
+ Definition::Variant(it) => it.try_to_nav(db),
+ Definition::Const(it) => it.try_to_nav(db),
+ Definition::Static(it) => it.try_to_nav(db),
+ Definition::Trait(it) => it.try_to_nav(db),
+ Definition::TypeAlias(it) => it.try_to_nav(db),
+ Definition::BuiltinType(_) => None,
+ Definition::ToolModule(_) => None,
+ Definition::BuiltinAttr(_) => None,
+ // FIXME: The focus range should be set to the helper declaration
+ Definition::DeriveHelper(it) => it.derive().try_to_nav(db),
+ }
+ }
+}
+
+impl TryToNav for hir::ModuleDef {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ match self {
+ hir::ModuleDef::Module(it) => Some(it.to_nav(db)),
+ hir::ModuleDef::Function(it) => it.try_to_nav(db),
+ hir::ModuleDef::Adt(it) => it.try_to_nav(db),
+ hir::ModuleDef::Variant(it) => it.try_to_nav(db),
+ hir::ModuleDef::Const(it) => it.try_to_nav(db),
+ hir::ModuleDef::Static(it) => it.try_to_nav(db),
+ hir::ModuleDef::Trait(it) => it.try_to_nav(db),
+ hir::ModuleDef::TypeAlias(it) => it.try_to_nav(db),
+ hir::ModuleDef::Macro(it) => it.try_to_nav(db),
+ hir::ModuleDef::BuiltinType(_) => None,
+ }
+ }
+}
+
+pub(crate) trait ToNavFromAst {
+ const KIND: SymbolKind;
+}
+impl ToNavFromAst for hir::Function {
+ const KIND: SymbolKind = SymbolKind::Function;
+}
+impl ToNavFromAst for hir::Const {
+ const KIND: SymbolKind = SymbolKind::Const;
+}
+impl ToNavFromAst for hir::Static {
+ const KIND: SymbolKind = SymbolKind::Static;
+}
+impl ToNavFromAst for hir::Struct {
+ const KIND: SymbolKind = SymbolKind::Struct;
+}
+impl ToNavFromAst for hir::Enum {
+ const KIND: SymbolKind = SymbolKind::Enum;
+}
+impl ToNavFromAst for hir::Variant {
+ const KIND: SymbolKind = SymbolKind::Variant;
+}
+impl ToNavFromAst for hir::Union {
+ const KIND: SymbolKind = SymbolKind::Union;
+}
+impl ToNavFromAst for hir::TypeAlias {
+ const KIND: SymbolKind = SymbolKind::TypeAlias;
+}
+impl ToNavFromAst for hir::Trait {
+ const KIND: SymbolKind = SymbolKind::Trait;
+}
+
+impl<D> TryToNav for D
+where
+ D: HasSource + ToNavFromAst + Copy + HasAttrs + HirDisplay,
+ D::Ast: ast::HasName,
+{
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ let src = self.source(db)?;
+ let mut res = NavigationTarget::from_named(
+ db,
+ src.as_ref().map(|it| it as &dyn ast::HasName),
+ D::KIND,
+ );
+ res.docs = self.docs(db);
+ res.description = Some(self.display(db).to_string());
+ Some(res)
+ }
+}
+
+impl ToNav for hir::Module {
+ fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
+ let InFile { file_id, value } = self.definition_source(db);
+
+ let name = self.name(db).map(|it| it.to_smol_str()).unwrap_or_default();
+ let (syntax, focus) = match &value {
+ ModuleSource::SourceFile(node) => (node.syntax(), None),
+ ModuleSource::Module(node) => (
+ node.syntax(),
+ node.name().and_then(|it| orig_focus_range(db, file_id, it.syntax())),
+ ),
+ ModuleSource::BlockExpr(node) => (node.syntax(), None),
+ };
+ let FileRange { file_id, range: full_range } =
+ InFile::new(file_id, syntax).original_file_range(db);
+ NavigationTarget::from_syntax(file_id, name, focus, full_range, SymbolKind::Module)
+ }
+}
+
+impl TryToNav for hir::Impl {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ let InFile { file_id, value } = self.source(db)?;
+ let derive_attr = self.is_builtin_derive(db);
+
+ let focus_range = if derive_attr.is_some() {
+ None
+ } else {
+ value.self_ty().and_then(|ty| orig_focus_range(db, file_id, ty.syntax()))
+ };
+
+ let FileRange { file_id, range: full_range } = match &derive_attr {
+ Some(attr) => attr.syntax().original_file_range(db),
+ None => InFile::new(file_id, value.syntax()).original_file_range(db),
+ };
+
+ Some(NavigationTarget::from_syntax(
+ file_id,
+ "impl".into(),
+ focus_range,
+ full_range,
+ SymbolKind::Impl,
+ ))
+ }
+}
+
+impl TryToNav for hir::Field {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ let src = self.source(db)?;
+
+ let field_source = match &src.value {
+ FieldSource::Named(it) => {
+ let mut res =
+ NavigationTarget::from_named(db, src.with_value(it), SymbolKind::Field);
+ res.docs = self.docs(db);
+ res.description = Some(self.display(db).to_string());
+ res
+ }
+ FieldSource::Pos(it) => {
+ let FileRange { file_id, range } =
+ src.with_value(it.syntax()).original_file_range(db);
+ NavigationTarget::from_syntax(file_id, "".into(), None, range, SymbolKind::Field)
+ }
+ };
+ Some(field_source)
+ }
+}
+
+impl TryToNav for hir::Macro {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ let src = self.source(db)?;
+ let name_owner: &dyn ast::HasName = match &src.value {
+ Either::Left(it) => it,
+ Either::Right(it) => it,
+ };
+ let mut res = NavigationTarget::from_named(
+ db,
+ src.as_ref().with_value(name_owner),
+ self.kind(db).into(),
+ );
+ res.docs = self.docs(db);
+ Some(res)
+ }
+}
+
+impl TryToNav for hir::Adt {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ match self {
+ hir::Adt::Struct(it) => it.try_to_nav(db),
+ hir::Adt::Union(it) => it.try_to_nav(db),
+ hir::Adt::Enum(it) => it.try_to_nav(db),
+ }
+ }
+}
+
+impl TryToNav for hir::AssocItem {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ match self {
+ AssocItem::Function(it) => it.try_to_nav(db),
+ AssocItem::Const(it) => it.try_to_nav(db),
+ AssocItem::TypeAlias(it) => it.try_to_nav(db),
+ }
+ }
+}
+
+impl TryToNav for hir::GenericParam {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ match self {
+ hir::GenericParam::TypeParam(it) => it.try_to_nav(db),
+ hir::GenericParam::ConstParam(it) => it.try_to_nav(db),
+ hir::GenericParam::LifetimeParam(it) => it.try_to_nav(db),
+ }
+ }
+}
+
+impl ToNav for hir::Local {
+ fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
+ let InFile { file_id, value } = self.source(db);
+ let (node, name) = match &value {
+ Either::Left(bind_pat) => (bind_pat.syntax(), bind_pat.name()),
+ Either::Right(it) => (it.syntax(), it.name()),
+ };
+ let focus_range = name.and_then(|it| orig_focus_range(db, file_id, it.syntax()));
+ let FileRange { file_id, range: full_range } =
+ InFile::new(file_id, node).original_file_range(db);
+
+ let name = self.name(db).to_smol_str();
+ let kind = if self.is_self(db) {
+ SymbolKind::SelfParam
+ } else if self.is_param(db) {
+ SymbolKind::ValueParam
+ } else {
+ SymbolKind::Local
+ };
+ NavigationTarget {
+ file_id,
+ name,
+ kind: Some(kind),
+ full_range,
+ focus_range,
+ container_name: None,
+ description: None,
+ docs: None,
+ }
+ }
+}
+
+impl ToNav for hir::Label {
+ fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
+ let InFile { file_id, value } = self.source(db);
+ let name = self.name(db).to_smol_str();
+
+ let range = |syntax: &_| InFile::new(file_id, syntax).original_file_range(db);
+ let FileRange { file_id, range: full_range } = range(value.syntax());
+ let focus_range = value.lifetime().map(|lt| range(lt.syntax()).range);
+
+ NavigationTarget {
+ file_id,
+ name,
+ kind: Some(SymbolKind::Label),
+ full_range,
+ focus_range,
+ container_name: None,
+ description: None,
+ docs: None,
+ }
+ }
+}
+
+impl TryToNav for hir::TypeParam {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ let InFile { file_id, value } = self.merge().source(db)?;
+ let name = self.name(db).to_smol_str();
+
+ let value = match value {
+ Either::Left(ast::TypeOrConstParam::Type(x)) => Either::Left(x),
+ Either::Left(ast::TypeOrConstParam::Const(_)) => {
+ never!();
+ return None;
+ }
+ Either::Right(x) => Either::Right(x),
+ };
+
+ let range = |syntax: &_| InFile::new(file_id, syntax).original_file_range(db);
+ let focus_range = |syntax: &_| InFile::new(file_id, syntax).original_file_range_opt(db);
+ let FileRange { file_id, range: full_range } = match &value {
+ Either::Left(type_param) => range(type_param.syntax()),
+ Either::Right(trait_) => trait_
+ .name()
+ .and_then(|name| focus_range(name.syntax()))
+ .unwrap_or_else(|| range(trait_.syntax())),
+ };
+ let focus_range = value
+ .either(|it| it.name(), |it| it.name())
+ .and_then(|it| focus_range(it.syntax()))
+ .map(|it| it.range);
+ Some(NavigationTarget {
+ file_id,
+ name,
+ kind: Some(SymbolKind::TypeParam),
+ full_range,
+ focus_range,
+ container_name: None,
+ description: None,
+ docs: None,
+ })
+ }
+}
+
+impl TryToNav for hir::TypeOrConstParam {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ self.split(db).try_to_nav(db)
+ }
+}
+
+impl TryToNav for hir::LifetimeParam {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ let InFile { file_id, value } = self.source(db)?;
+ let name = self.name(db).to_smol_str();
+
+ let FileRange { file_id, range: full_range } =
+ InFile::new(file_id, value.syntax()).original_file_range(db);
+ Some(NavigationTarget {
+ file_id,
+ name,
+ kind: Some(SymbolKind::LifetimeParam),
+ full_range,
+ focus_range: Some(full_range),
+ container_name: None,
+ description: None,
+ docs: None,
+ })
+ }
+}
+
+impl TryToNav for hir::ConstParam {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ let InFile { file_id, value } = self.merge().source(db)?;
+ let name = self.name(db).to_smol_str();
+
+ let value = match value {
+ Either::Left(ast::TypeOrConstParam::Const(x)) => x,
+ _ => {
+ never!();
+ return None;
+ }
+ };
+
+ let focus_range = value.name().and_then(|it| orig_focus_range(db, file_id, it.syntax()));
+ let FileRange { file_id, range: full_range } =
+ InFile::new(file_id, value.syntax()).original_file_range(db);
+ Some(NavigationTarget {
+ file_id,
+ name,
+ kind: Some(SymbolKind::ConstParam),
+ full_range,
+ focus_range,
+ container_name: None,
+ description: None,
+ docs: None,
+ })
+ }
+}
+
+/// Get a description of a symbol.
+///
+/// e.g. `struct Name`, `enum Name`, `fn Name`
+pub(crate) fn description_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option<String> {
+ let sema = Semantics::new(db);
+ let node = symbol.loc.syntax(&sema)?;
+
+ match_ast! {
+ match node {
+ ast::Fn(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::Struct(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::Enum(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::Trait(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::Module(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::TypeAlias(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::Const(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::Static(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::RecordField(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::Variant(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ ast::Union(it) => sema.to_def(&it).map(|it| it.display(db).to_string()),
+ _ => None,
+ }
+ }
+}
+
+fn orig_focus_range(
+ db: &RootDatabase,
+ file_id: hir::HirFileId,
+ syntax: &SyntaxNode,
+) -> Option<TextRange> {
+ InFile::new(file_id, syntax).original_file_range_opt(db).map(|it| it.range)
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::expect;
+
+ use crate::{fixture, Query};
+
+ #[test]
+ fn test_nav_for_symbol() {
+ let (analysis, _) = fixture::file(
+ r#"
+enum FooInner { }
+fn foo() { enum FooInner { } }
+"#,
+ );
+
+ let navs = analysis.symbol_search(Query::new("FooInner".to_string())).unwrap();
+ expect![[r#"
+ [
+ NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..17,
+ focus_range: 5..13,
+ name: "FooInner",
+ kind: Enum,
+ description: "enum FooInner",
+ },
+ NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 29..46,
+ focus_range: 34..42,
+ name: "FooInner",
+ kind: Enum,
+ container_name: "foo",
+ description: "enum FooInner",
+ },
+ ]
+ "#]]
+ .assert_debug_eq(&navs);
+ }
+
+ #[test]
+ fn test_world_symbols_are_case_sensitive() {
+ let (analysis, _) = fixture::file(
+ r#"
+fn foo() {}
+struct Foo;
+"#,
+ );
+
+ let navs = analysis.symbol_search(Query::new("foo".to_string())).unwrap();
+ assert_eq!(navs.len(), 2)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs
new file mode 100644
index 000000000..9b1f48044
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs
@@ -0,0 +1,167 @@
+use hir::Semantics;
+use ide_db::{
+ base_db::{CrateId, FileId, FilePosition},
+ RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{
+ algo::find_node_at_offset,
+ ast::{self, AstNode},
+};
+
+use crate::NavigationTarget;
+
+// Feature: Parent Module
+//
+// Navigates to the parent module of the current module.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Locate parent module**
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113065580-04c21800-91b1-11eb-9a32-00086161c0bd.gif[]
+
+/// This returns `Vec` because a module may be included from several places.
+pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<NavigationTarget> {
+ let sema = Semantics::new(db);
+ let source_file = sema.parse(position.file_id);
+
+ let mut module = find_node_at_offset::<ast::Module>(source_file.syntax(), position.offset);
+
+ // If cursor is literally on `mod foo`, go to the grandpa.
+ if let Some(m) = &module {
+ if !m
+ .item_list()
+ .map_or(false, |it| it.syntax().text_range().contains_inclusive(position.offset))
+ {
+ cov_mark::hit!(test_resolve_parent_module_on_module_decl);
+ module = m.syntax().ancestors().skip(1).find_map(ast::Module::cast);
+ }
+ }
+
+ match module {
+ Some(module) => sema
+ .to_def(&module)
+ .into_iter()
+ .map(|module| NavigationTarget::from_module_to_decl(db, module))
+ .collect(),
+ None => sema
+ .to_module_defs(position.file_id)
+ .map(|module| NavigationTarget::from_module_to_decl(db, module))
+ .collect(),
+ }
+}
+
+/// Returns `Vec` for the same reason as `parent_module`
+pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
+ let sema = Semantics::new(db);
+ sema.to_module_defs(file_id).map(|module| module.krate().into()).unique().collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use ide_db::base_db::FileRange;
+
+ use crate::fixture;
+
+ fn check(ra_fixture: &str) {
+ let (analysis, position, expected) = fixture::annotations(ra_fixture);
+ let navs = analysis.parent_module(position).unwrap();
+ let navs = navs
+ .iter()
+ .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
+ .collect::<Vec<_>>();
+ assert_eq!(expected.into_iter().map(|(fr, _)| fr).collect::<Vec<_>>(), navs);
+ }
+
+ #[test]
+ fn test_resolve_parent_module() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+ //^^^
+
+//- /foo.rs
+$0// empty
+"#,
+ );
+ }
+
+ #[test]
+ fn test_resolve_parent_module_on_module_decl() {
+ cov_mark::check!(test_resolve_parent_module_on_module_decl);
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+ //^^^
+//- /foo.rs
+mod $0bar;
+
+//- /foo/bar.rs
+// empty
+"#,
+ );
+ }
+
+ #[test]
+ fn test_resolve_parent_module_for_inline() {
+ check(
+ r#"
+//- /lib.rs
+mod foo {
+ mod bar {
+ mod baz { $0 }
+ } //^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_resolve_multi_parent_module() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+ //^^^
+#[path = "foo.rs"]
+mod bar;
+ //^^^
+//- /foo.rs
+$0
+"#,
+ );
+ }
+
+ #[test]
+ fn test_resolve_crate_root() {
+ let (analysis, file_id) = fixture::file(
+ r#"
+//- /foo.rs
+$0
+//- /main.rs
+mod foo;
+"#,
+ );
+ assert_eq!(analysis.crate_for(file_id).unwrap().len(), 1);
+ }
+
+ #[test]
+ fn test_resolve_multi_parent_crate() {
+ let (analysis, file_id) = fixture::file(
+ r#"
+//- /baz.rs
+$0
+//- /foo.rs crate:foo
+mod baz;
+//- /bar.rs crate:bar
+mod baz;
+"#,
+ );
+ assert_eq!(analysis.crate_for(file_id).unwrap().len(), 2);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/prime_caches.rs b/src/tools/rust-analyzer/crates/ide/src/prime_caches.rs
new file mode 100644
index 000000000..296270036
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/prime_caches.rs
@@ -0,0 +1,158 @@
+//! rust-analyzer is lazy and doesn't compute anything unless asked. This
+//! sometimes is counter productive when, for example, the first goto definition
+//! request takes longer to compute. This modules implemented prepopulation of
+//! various caches, it's not really advanced at the moment.
+mod topologic_sort;
+
+use std::time::Duration;
+
+use hir::db::DefDatabase;
+use ide_db::{
+ base_db::{
+ salsa::{Database, ParallelDatabase, Snapshot},
+ Cancelled, CrateGraph, CrateId, SourceDatabase, SourceDatabaseExt,
+ },
+ FxHashSet, FxIndexMap,
+};
+
+use crate::RootDatabase;
+
+/// We're indexing many crates.
+#[derive(Debug)]
+pub struct ParallelPrimeCachesProgress {
+ /// the crates that we are currently priming.
+ pub crates_currently_indexing: Vec<String>,
+ /// the total number of crates we want to prime.
+ pub crates_total: usize,
+ /// the total number of crates that have finished priming
+ pub crates_done: usize,
+}
+
+pub(crate) fn parallel_prime_caches(
+ db: &RootDatabase,
+ num_worker_threads: u8,
+ cb: &(dyn Fn(ParallelPrimeCachesProgress) + Sync),
+) {
+ let _p = profile::span("prime_caches");
+
+ let graph = db.crate_graph();
+ let mut crates_to_prime = {
+ let crate_ids = compute_crates_to_prime(db, &graph);
+
+ let mut builder = topologic_sort::TopologicalSortIter::builder();
+
+ for &crate_id in &crate_ids {
+ let crate_data = &graph[crate_id];
+ let dependencies = crate_data
+ .dependencies
+ .iter()
+ .map(|d| d.crate_id)
+ .filter(|i| crate_ids.contains(i));
+
+ builder.add(crate_id, dependencies);
+ }
+
+ builder.build()
+ };
+
+ enum ParallelPrimeCacheWorkerProgress {
+ BeginCrate { crate_id: CrateId, crate_name: String },
+ EndCrate { crate_id: CrateId },
+ }
+
+ let (work_sender, progress_receiver) = {
+ let (progress_sender, progress_receiver) = crossbeam_channel::unbounded();
+ let (work_sender, work_receiver) = crossbeam_channel::unbounded();
+ let prime_caches_worker = move |db: Snapshot<RootDatabase>| {
+ while let Ok((crate_id, crate_name)) = work_receiver.recv() {
+ progress_sender
+ .send(ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name })?;
+
+ // This also computes the DefMap
+ db.import_map(crate_id);
+
+ progress_sender.send(ParallelPrimeCacheWorkerProgress::EndCrate { crate_id })?;
+ }
+
+ Ok::<_, crossbeam_channel::SendError<_>>(())
+ };
+
+ for _ in 0..num_worker_threads {
+ let worker = prime_caches_worker.clone();
+ let db = db.snapshot();
+ std::thread::spawn(move || Cancelled::catch(|| worker(db)));
+ }
+
+ (work_sender, progress_receiver)
+ };
+
+ let crates_total = crates_to_prime.pending();
+ let mut crates_done = 0;
+
+ // an index map is used to preserve ordering so we can sort the progress report in order of
+ // "longest crate to index" first
+ let mut crates_currently_indexing =
+ FxIndexMap::with_capacity_and_hasher(num_worker_threads as _, Default::default());
+
+ while crates_done < crates_total {
+ db.unwind_if_cancelled();
+
+ for crate_id in &mut crates_to_prime {
+ work_sender
+ .send((
+ crate_id,
+ graph[crate_id].display_name.as_deref().unwrap_or_default().to_string(),
+ ))
+ .ok();
+ }
+
+ // recv_timeout is somewhat a hack, we need a way to from this thread check to see if the current salsa revision
+ // is cancelled on a regular basis. workers will only exit if they are processing a task that is cancelled, or
+ // if this thread exits, and closes the work channel.
+ let worker_progress = match progress_receiver.recv_timeout(Duration::from_millis(10)) {
+ Ok(p) => p,
+ Err(crossbeam_channel::RecvTimeoutError::Timeout) => {
+ continue;
+ }
+ Err(crossbeam_channel::RecvTimeoutError::Disconnected) => {
+ // our workers may have died from a cancelled task, so we'll check and re-raise here.
+ db.unwind_if_cancelled();
+ break;
+ }
+ };
+ match worker_progress {
+ ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name } => {
+ crates_currently_indexing.insert(crate_id, crate_name);
+ }
+ ParallelPrimeCacheWorkerProgress::EndCrate { crate_id } => {
+ crates_currently_indexing.remove(&crate_id);
+ crates_to_prime.mark_done(crate_id);
+ crates_done += 1;
+ }
+ };
+
+ let progress = ParallelPrimeCachesProgress {
+ crates_currently_indexing: crates_currently_indexing.values().cloned().collect(),
+ crates_done,
+ crates_total,
+ };
+
+ cb(progress);
+ }
+}
+
+fn compute_crates_to_prime(db: &RootDatabase, graph: &CrateGraph) -> FxHashSet<CrateId> {
+ // We're only interested in the workspace crates and the `ImportMap`s of their direct
+ // dependencies, though in practice the latter also compute the `DefMap`s.
+ // We don't prime transitive dependencies because they're generally not visible in
+ // the current workspace.
+ graph
+ .iter()
+ .filter(|&id| {
+ let file_id = graph[id].root_file_id;
+ let root_id = db.file_source_root(file_id);
+ !db.source_root(root_id).is_library
+ })
+ .flat_map(|id| graph[id].dependencies.iter().map(|krate| krate.crate_id))
+ .collect()
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/prime_caches/topologic_sort.rs b/src/tools/rust-analyzer/crates/ide/src/prime_caches/topologic_sort.rs
new file mode 100644
index 000000000..9c3ceedbb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/prime_caches/topologic_sort.rs
@@ -0,0 +1,98 @@
+//! helper data structure to schedule work for parallel prime caches.
+use std::{collections::VecDeque, hash::Hash};
+
+use ide_db::FxHashMap;
+
+pub(crate) struct TopologicSortIterBuilder<T> {
+ nodes: FxHashMap<T, Entry<T>>,
+}
+
+impl<T> TopologicSortIterBuilder<T>
+where
+ T: Copy + Eq + PartialEq + Hash,
+{
+ fn new() -> Self {
+ Self { nodes: Default::default() }
+ }
+
+ fn get_or_create_entry(&mut self, item: T) -> &mut Entry<T> {
+ self.nodes.entry(item).or_default()
+ }
+
+ pub(crate) fn add(&mut self, item: T, predecessors: impl IntoIterator<Item = T>) {
+ let mut num_predecessors = 0;
+
+ for predecessor in predecessors.into_iter() {
+ self.get_or_create_entry(predecessor).successors.push(item);
+ num_predecessors += 1;
+ }
+
+ let entry = self.get_or_create_entry(item);
+ entry.num_predecessors += num_predecessors;
+ }
+
+ pub(crate) fn build(self) -> TopologicalSortIter<T> {
+ let ready = self
+ .nodes
+ .iter()
+ .filter_map(
+ |(item, entry)| if entry.num_predecessors == 0 { Some(*item) } else { None },
+ )
+ .collect();
+
+ TopologicalSortIter { nodes: self.nodes, ready }
+ }
+}
+
+pub(crate) struct TopologicalSortIter<T> {
+ ready: VecDeque<T>,
+ nodes: FxHashMap<T, Entry<T>>,
+}
+
+impl<T> TopologicalSortIter<T>
+where
+ T: Copy + Eq + PartialEq + Hash,
+{
+ pub(crate) fn builder() -> TopologicSortIterBuilder<T> {
+ TopologicSortIterBuilder::new()
+ }
+
+ pub(crate) fn pending(&self) -> usize {
+ self.nodes.len()
+ }
+
+ pub(crate) fn mark_done(&mut self, item: T) {
+ let entry = self.nodes.remove(&item).expect("invariant: unknown item marked as done");
+
+ for successor in entry.successors {
+ let succ_entry = self
+ .nodes
+ .get_mut(&successor)
+ .expect("invariant: unknown successor referenced by entry");
+
+ succ_entry.num_predecessors -= 1;
+ if succ_entry.num_predecessors == 0 {
+ self.ready.push_back(successor);
+ }
+ }
+ }
+}
+
+impl<T> Iterator for TopologicalSortIter<T> {
+ type Item = T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.ready.pop_front()
+ }
+}
+
+struct Entry<T> {
+ successors: Vec<T>,
+ num_predecessors: usize,
+}
+
+impl<T> Default for Entry<T> {
+ fn default() -> Self {
+ Self { successors: Default::default(), num_predecessors: 0 }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs
new file mode 100644
index 000000000..1a6beec18
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/references.rs
@@ -0,0 +1,1636 @@
+//! This module implements a reference search.
+//! First, the element at the cursor position must be either an `ast::Name`
+//! or `ast::NameRef`. If it's an `ast::NameRef`, at the classification step we
+//! try to resolve the direct tree parent of this element, otherwise we
+//! already have a definition and just need to get its HIR together with
+//! some information that is needed for further steps of searching.
+//! After that, we collect files that might contain references and look
+//! for text occurrences of the identifier. If there's an `ast::NameRef`
+//! at the index that the match starts at and its tree parent is
+//! resolved to the search element definition, we get a reference.
+
+use hir::{PathResolution, Semantics};
+use ide_db::{
+ base_db::FileId,
+ defs::{Definition, NameClass, NameRefClass},
+ search::{ReferenceCategory, SearchScope, UsageSearchResult},
+ FxHashMap, RootDatabase,
+};
+use syntax::{
+ algo::find_node_at_offset,
+ ast::{self, HasName},
+ match_ast, AstNode,
+ SyntaxKind::*,
+ SyntaxNode, TextRange, TextSize, T,
+};
+
+use crate::{FilePosition, NavigationTarget, TryToNav};
+
+#[derive(Debug, Clone)]
+pub struct ReferenceSearchResult {
+ pub declaration: Option<Declaration>,
+ pub references: FxHashMap<FileId, Vec<(TextRange, Option<ReferenceCategory>)>>,
+}
+
+#[derive(Debug, Clone)]
+pub struct Declaration {
+ pub nav: NavigationTarget,
+ pub is_mut: bool,
+}
+
+// Feature: Find All References
+//
+// Shows all references of the item at the cursor location
+//
+// |===
+// | Editor | Shortcut
+//
+// | VS Code | kbd:[Shift+Alt+F12]
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113020670-b7c34f00-917a-11eb-8003-370ac5f2b3cb.gif[]
+pub(crate) fn find_all_refs(
+ sema: &Semantics<'_, RootDatabase>,
+ position: FilePosition,
+ search_scope: Option<SearchScope>,
+) -> Option<Vec<ReferenceSearchResult>> {
+ let _p = profile::span("find_all_refs");
+ let syntax = sema.parse(position.file_id).syntax().clone();
+ let make_searcher = |literal_search: bool| {
+ move |def: Definition| {
+ let declaration = match def {
+ Definition::Module(module) => {
+ Some(NavigationTarget::from_module_to_decl(sema.db, module))
+ }
+ def => def.try_to_nav(sema.db),
+ }
+ .map(|nav| {
+ let decl_range = nav.focus_or_full_range();
+ Declaration {
+ is_mut: decl_mutability(&def, sema.parse(nav.file_id).syntax(), decl_range),
+ nav,
+ }
+ });
+ let mut usages =
+ def.usages(sema).set_scope(search_scope.clone()).include_self_refs().all();
+
+ if literal_search {
+ retain_adt_literal_usages(&mut usages, def, sema);
+ }
+
+ let references = usages
+ .into_iter()
+ .map(|(file_id, refs)| {
+ (
+ file_id,
+ refs.into_iter()
+ .map(|file_ref| (file_ref.range, file_ref.category))
+ .collect(),
+ )
+ })
+ .collect();
+
+ ReferenceSearchResult { declaration, references }
+ }
+ };
+
+ match name_for_constructor_search(&syntax, position) {
+ Some(name) => {
+ let def = match NameClass::classify(sema, &name)? {
+ NameClass::Definition(it) | NameClass::ConstReference(it) => it,
+ NameClass::PatFieldShorthand { local_def: _, field_ref } => {
+ Definition::Field(field_ref)
+ }
+ };
+ Some(vec![make_searcher(true)(def)])
+ }
+ None => {
+ let search = make_searcher(false);
+ Some(find_defs(sema, &syntax, position.offset)?.map(search).collect())
+ }
+ }
+}
+
+pub(crate) fn find_defs<'a>(
+ sema: &'a Semantics<'_, RootDatabase>,
+ syntax: &SyntaxNode,
+ offset: TextSize,
+) -> Option<impl Iterator<Item = Definition> + 'a> {
+ let token = syntax.token_at_offset(offset).find(|t| {
+ matches!(
+ t.kind(),
+ IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self]
+ )
+ });
+ token.map(|token| {
+ sema.descend_into_macros_with_same_text(token)
+ .into_iter()
+ .filter_map(|it| ast::NameLike::cast(it.parent()?))
+ .filter_map(move |name_like| {
+ let def = match name_like {
+ ast::NameLike::NameRef(name_ref) => {
+ match NameRefClass::classify(sema, &name_ref)? {
+ NameRefClass::Definition(def) => def,
+ NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
+ Definition::Local(local_ref)
+ }
+ }
+ }
+ ast::NameLike::Name(name) => match NameClass::classify(sema, &name)? {
+ NameClass::Definition(it) | NameClass::ConstReference(it) => it,
+ NameClass::PatFieldShorthand { local_def, field_ref: _ } => {
+ Definition::Local(local_def)
+ }
+ },
+ ast::NameLike::Lifetime(lifetime) => {
+ NameRefClass::classify_lifetime(sema, &lifetime)
+ .and_then(|class| match class {
+ NameRefClass::Definition(it) => Some(it),
+ _ => None,
+ })
+ .or_else(|| {
+ NameClass::classify_lifetime(sema, &lifetime)
+ .and_then(NameClass::defined)
+ })?
+ }
+ };
+ Some(def)
+ })
+ })
+}
+
+pub(crate) fn decl_mutability(def: &Definition, syntax: &SyntaxNode, range: TextRange) -> bool {
+ match def {
+ Definition::Local(_) | Definition::Field(_) => {}
+ _ => return false,
+ };
+
+ match find_node_at_offset::<ast::LetStmt>(syntax, range.start()) {
+ Some(stmt) if stmt.initializer().is_some() => match stmt.pat() {
+ Some(ast::Pat::IdentPat(it)) => it.mut_token().is_some(),
+ _ => false,
+ },
+ _ => false,
+ }
+}
+
+/// Filter out all non-literal usages for adt-defs
+fn retain_adt_literal_usages(
+ usages: &mut UsageSearchResult,
+ def: Definition,
+ sema: &Semantics<'_, RootDatabase>,
+) {
+ let refs = usages.references.values_mut();
+ match def {
+ Definition::Adt(hir::Adt::Enum(enum_)) => {
+ refs.for_each(|it| {
+ it.retain(|reference| {
+ reference
+ .name
+ .as_name_ref()
+ .map_or(false, |name_ref| is_enum_lit_name_ref(sema, enum_, name_ref))
+ })
+ });
+ usages.references.retain(|_, it| !it.is_empty());
+ }
+ Definition::Adt(_) | Definition::Variant(_) => {
+ refs.for_each(|it| {
+ it.retain(|reference| reference.name.as_name_ref().map_or(false, is_lit_name_ref))
+ });
+ usages.references.retain(|_, it| !it.is_empty());
+ }
+ _ => {}
+ }
+}
+
+/// Returns `Some` if the cursor is at a position for an item to search for all its constructor/literal usages
+fn name_for_constructor_search(syntax: &SyntaxNode, position: FilePosition) -> Option<ast::Name> {
+ let token = syntax.token_at_offset(position.offset).right_biased()?;
+ let token_parent = token.parent()?;
+ let kind = token.kind();
+ if kind == T![;] {
+ ast::Struct::cast(token_parent)
+ .filter(|struct_| struct_.field_list().is_none())
+ .and_then(|struct_| struct_.name())
+ } else if kind == T!['{'] {
+ match_ast! {
+ match token_parent {
+ ast::RecordFieldList(rfl) => match_ast! {
+ match (rfl.syntax().parent()?) {
+ ast::Variant(it) => it.name(),
+ ast::Struct(it) => it.name(),
+ ast::Union(it) => it.name(),
+ _ => None,
+ }
+ },
+ ast::VariantList(vl) => ast::Enum::cast(vl.syntax().parent()?)?.name(),
+ _ => None,
+ }
+ }
+ } else if kind == T!['('] {
+ let tfl = ast::TupleFieldList::cast(token_parent)?;
+ match_ast! {
+ match (tfl.syntax().parent()?) {
+ ast::Variant(it) => it.name(),
+ ast::Struct(it) => it.name(),
+ _ => None,
+ }
+ }
+ } else {
+ None
+ }
+}
+
+fn is_enum_lit_name_ref(
+ sema: &Semantics<'_, RootDatabase>,
+ enum_: hir::Enum,
+ name_ref: &ast::NameRef,
+) -> bool {
+ let path_is_variant_of_enum = |path: ast::Path| {
+ matches!(
+ sema.resolve_path(&path),
+ Some(PathResolution::Def(hir::ModuleDef::Variant(variant)))
+ if variant.parent_enum(sema.db) == enum_
+ )
+ };
+ name_ref
+ .syntax()
+ .ancestors()
+ .find_map(|ancestor| {
+ match_ast! {
+ match ancestor {
+ ast::PathExpr(path_expr) => path_expr.path().map(path_is_variant_of_enum),
+ ast::RecordExpr(record_expr) => record_expr.path().map(path_is_variant_of_enum),
+ _ => None,
+ }
+ }
+ })
+ .unwrap_or(false)
+}
+
+fn path_ends_with(path: Option<ast::Path>, name_ref: &ast::NameRef) -> bool {
+ path.and_then(|path| path.segment())
+ .and_then(|segment| segment.name_ref())
+ .map_or(false, |segment| segment == *name_ref)
+}
+
+fn is_lit_name_ref(name_ref: &ast::NameRef) -> bool {
+ name_ref.syntax().ancestors().find_map(|ancestor| {
+ match_ast! {
+ match ancestor {
+ ast::PathExpr(path_expr) => Some(path_ends_with(path_expr.path(), name_ref)),
+ ast::RecordExpr(record_expr) => Some(path_ends_with(record_expr.path(), name_ref)),
+ _ => None,
+ }
+ }
+ }).unwrap_or(false)
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+ use ide_db::{base_db::FileId, search::ReferenceCategory};
+ use stdx::format_to;
+
+ use crate::{fixture, SearchScope};
+
+ #[test]
+ fn test_struct_literal_after_space() {
+ check(
+ r#"
+struct Foo $0{
+ a: i32,
+}
+impl Foo {
+ fn f() -> i32 { 42 }
+}
+fn main() {
+ let f: Foo;
+ f = Foo {a: Foo::f()};
+}
+"#,
+ expect![[r#"
+ Foo Struct FileId(0) 0..26 7..10
+
+ FileId(0) 101..104
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_struct_literal_before_space() {
+ check(
+ r#"
+struct Foo$0 {}
+ fn main() {
+ let f: Foo;
+ f = Foo {};
+}
+"#,
+ expect![[r#"
+ Foo Struct FileId(0) 0..13 7..10
+
+ FileId(0) 41..44
+ FileId(0) 54..57
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_struct_literal_with_generic_type() {
+ check(
+ r#"
+struct Foo<T> $0{}
+ fn main() {
+ let f: Foo::<i32>;
+ f = Foo {};
+}
+"#,
+ expect![[r#"
+ Foo Struct FileId(0) 0..16 7..10
+
+ FileId(0) 64..67
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_struct_literal_for_tuple() {
+ check(
+ r#"
+struct Foo$0(i32);
+
+fn main() {
+ let f: Foo;
+ f = Foo(1);
+}
+"#,
+ expect![[r#"
+ Foo Struct FileId(0) 0..16 7..10
+
+ FileId(0) 54..57
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_struct_literal_for_union() {
+ check(
+ r#"
+union Foo $0{
+ x: u32
+}
+
+fn main() {
+ let f: Foo;
+ f = Foo { x: 1 };
+}
+"#,
+ expect![[r#"
+ Foo Union FileId(0) 0..24 6..9
+
+ FileId(0) 62..65
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_enum_after_space() {
+ check(
+ r#"
+enum Foo $0{
+ A,
+ B(),
+ C{},
+}
+fn main() {
+ let f: Foo;
+ f = Foo::A;
+ f = Foo::B();
+ f = Foo::C{};
+}
+"#,
+ expect![[r#"
+ Foo Enum FileId(0) 0..37 5..8
+
+ FileId(0) 74..77
+ FileId(0) 90..93
+ FileId(0) 108..111
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_variant_record_after_space() {
+ check(
+ r#"
+enum Foo {
+ A $0{ n: i32 },
+ B,
+}
+fn main() {
+ let f: Foo;
+ f = Foo::B;
+ f = Foo::A { n: 92 };
+}
+"#,
+ expect![[r#"
+ A Variant FileId(0) 15..27 15..16
+
+ FileId(0) 95..96
+ "#]],
+ );
+ }
+ #[test]
+ fn test_variant_tuple_before_paren() {
+ check(
+ r#"
+enum Foo {
+ A$0(i32),
+ B,
+}
+fn main() {
+ let f: Foo;
+ f = Foo::B;
+ f = Foo::A(92);
+}
+"#,
+ expect![[r#"
+ A Variant FileId(0) 15..21 15..16
+
+ FileId(0) 89..90
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_enum_before_space() {
+ check(
+ r#"
+enum Foo$0 {
+ A,
+ B,
+}
+fn main() {
+ let f: Foo;
+ f = Foo::A;
+}
+"#,
+ expect![[r#"
+ Foo Enum FileId(0) 0..26 5..8
+
+ FileId(0) 50..53
+ FileId(0) 63..66
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_enum_with_generic_type() {
+ check(
+ r#"
+enum Foo<T> $0{
+ A(T),
+ B,
+}
+fn main() {
+ let f: Foo<i8>;
+ f = Foo::A(1);
+}
+"#,
+ expect![[r#"
+ Foo Enum FileId(0) 0..32 5..8
+
+ FileId(0) 73..76
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_enum_for_tuple() {
+ check(
+ r#"
+enum Foo$0{
+ A(i8),
+ B(i8),
+}
+fn main() {
+ let f: Foo;
+ f = Foo::A(1);
+}
+"#,
+ expect![[r#"
+ Foo Enum FileId(0) 0..33 5..8
+
+ FileId(0) 70..73
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_for_local() {
+ check(
+ r#"
+fn main() {
+ let mut i = 1;
+ let j = 1;
+ i = i$0 + j;
+
+ {
+ i = 0;
+ }
+
+ i = 5;
+}"#,
+ expect![[r#"
+ i Local FileId(0) 20..25 24..25 Write
+
+ FileId(0) 50..51 Write
+ FileId(0) 54..55 Read
+ FileId(0) 76..77 Write
+ FileId(0) 94..95 Write
+ "#]],
+ );
+ }
+
+ #[test]
+ fn search_filters_by_range() {
+ check(
+ r#"
+fn foo() {
+ let spam$0 = 92;
+ spam + spam
+}
+fn bar() {
+ let spam = 92;
+ spam + spam
+}
+"#,
+ expect![[r#"
+ spam Local FileId(0) 19..23 19..23
+
+ FileId(0) 34..38 Read
+ FileId(0) 41..45 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_for_param_inside() {
+ check(
+ r#"
+fn foo(i : u32) -> u32 { i$0 }
+"#,
+ expect![[r#"
+ i ValueParam FileId(0) 7..8 7..8
+
+ FileId(0) 25..26 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_for_fn_param() {
+ check(
+ r#"
+fn foo(i$0 : u32) -> u32 { i }
+"#,
+ expect![[r#"
+ i ValueParam FileId(0) 7..8 7..8
+
+ FileId(0) 25..26 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_field_name() {
+ check(
+ r#"
+//- /lib.rs
+struct Foo {
+ pub spam$0: u32,
+}
+
+fn main(s: Foo) {
+ let f = s.spam;
+}
+"#,
+ expect![[r#"
+ spam Field FileId(0) 17..30 21..25
+
+ FileId(0) 67..71 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_impl_item_name() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ fn f$0(&self) { }
+}
+"#,
+ expect![[r#"
+ f Function FileId(0) 27..43 30..31
+
+ (no references)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_enum_var_name() {
+ check(
+ r#"
+enum Foo {
+ A,
+ B$0,
+ C,
+}
+"#,
+ expect![[r#"
+ B Variant FileId(0) 22..23 22..23
+
+ (no references)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_enum_var_field() {
+ check(
+ r#"
+enum Foo {
+ A,
+ B { field$0: u8 },
+ C,
+}
+"#,
+ expect![[r#"
+ field Field FileId(0) 26..35 26..31
+
+ (no references)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_two_modules() {
+ check(
+ r#"
+//- /lib.rs
+pub mod foo;
+pub mod bar;
+
+fn f() {
+ let i = foo::Foo { n: 5 };
+}
+
+//- /foo.rs
+use crate::bar;
+
+pub struct Foo {
+ pub n: u32,
+}
+
+fn f() {
+ let i = bar::Bar { n: 5 };
+}
+
+//- /bar.rs
+use crate::foo;
+
+pub struct Bar {
+ pub n: u32,
+}
+
+fn f() {
+ let i = foo::Foo$0 { n: 5 };
+}
+"#,
+ expect![[r#"
+ Foo Struct FileId(1) 17..51 28..31
+
+ FileId(0) 53..56
+ FileId(2) 79..82
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_decl_module() {
+ check(
+ r#"
+//- /lib.rs
+mod foo$0;
+
+use foo::Foo;
+
+fn f() {
+ let i = Foo { n: 5 };
+}
+
+//- /foo.rs
+pub struct Foo {
+ pub n: u32,
+}
+"#,
+ expect![[r#"
+ foo Module FileId(0) 0..8 4..7
+
+ FileId(0) 14..17
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_decl_module_on_self() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+
+//- /foo.rs
+use self$0;
+"#,
+ expect![[r#"
+ foo Module FileId(0) 0..8 4..7
+
+ FileId(1) 4..8
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_decl_module_on_self_crate_root() {
+ check(
+ r#"
+//- /lib.rs
+use self$0;
+"#,
+ expect![[r#"
+ Module FileId(0) 0..10
+
+ FileId(0) 4..8
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_super_mod_vis() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+
+//- /foo.rs
+mod some;
+use some::Foo;
+
+fn f() {
+ let i = Foo { n: 5 };
+}
+
+//- /foo/some.rs
+pub(super) struct Foo$0 {
+ pub n: u32,
+}
+"#,
+ expect![[r#"
+ Foo Struct FileId(2) 0..41 18..21
+
+ FileId(1) 20..23
+ FileId(1) 47..50
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_with_scope() {
+ let code = r#"
+ //- /lib.rs
+ mod foo;
+ mod bar;
+
+ pub fn quux$0() {}
+
+ //- /foo.rs
+ fn f() { super::quux(); }
+
+ //- /bar.rs
+ fn f() { super::quux(); }
+ "#;
+
+ check_with_scope(
+ code,
+ None,
+ expect![[r#"
+ quux Function FileId(0) 19..35 26..30
+
+ FileId(1) 16..20
+ FileId(2) 16..20
+ "#]],
+ );
+
+ check_with_scope(
+ code,
+ Some(SearchScope::single_file(FileId(2))),
+ expect![[r#"
+ quux Function FileId(0) 19..35 26..30
+
+ FileId(2) 16..20
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_macro_def() {
+ check(
+ r#"
+#[macro_export]
+macro_rules! m1$0 { () => (()) }
+
+fn foo() {
+ m1();
+ m1();
+}
+"#,
+ expect![[r#"
+ m1 Macro FileId(0) 0..46 29..31
+
+ FileId(0) 63..65
+ FileId(0) 73..75
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_basic_highlight_read_write() {
+ check(
+ r#"
+fn foo() {
+ let mut i$0 = 0;
+ i = i + 1;
+}
+"#,
+ expect![[r#"
+ i Local FileId(0) 19..24 23..24 Write
+
+ FileId(0) 34..35 Write
+ FileId(0) 38..39 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_basic_highlight_field_read_write() {
+ check(
+ r#"
+struct S {
+ f: u32,
+}
+
+fn foo() {
+ let mut s = S{f: 0};
+ s.f$0 = 0;
+}
+"#,
+ expect![[r#"
+ f Field FileId(0) 15..21 15..16
+
+ FileId(0) 55..56 Read
+ FileId(0) 68..69 Write
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_basic_highlight_decl_no_write() {
+ check(
+ r#"
+fn foo() {
+ let i$0;
+ i = 1;
+}
+"#,
+ expect![[r#"
+ i Local FileId(0) 19..20 19..20
+
+ FileId(0) 26..27 Write
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_struct_function_refs_outside_module() {
+ check(
+ r#"
+mod foo {
+ pub struct Foo;
+
+ impl Foo {
+ pub fn new$0() -> Foo { Foo }
+ }
+}
+
+fn main() {
+ let _f = foo::Foo::new();
+}
+"#,
+ expect![[r#"
+ new Function FileId(0) 54..81 61..64
+
+ FileId(0) 126..129
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_nested_module() {
+ check(
+ r#"
+//- /lib.rs
+mod foo { mod bar; }
+
+fn f$0() {}
+
+//- /foo/bar.rs
+use crate::f;
+
+fn g() { f(); }
+"#,
+ expect![[r#"
+ f Function FileId(0) 22..31 25..26
+
+ FileId(1) 11..12
+ FileId(1) 24..25
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_struct_pat() {
+ check(
+ r#"
+struct S {
+ field$0: u8,
+}
+
+fn f(s: S) {
+ match s {
+ S { field } => {}
+ }
+}
+"#,
+ expect![[r#"
+ field Field FileId(0) 15..24 15..20
+
+ FileId(0) 68..73 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_enum_var_pat() {
+ check(
+ r#"
+enum En {
+ Variant {
+ field$0: u8,
+ }
+}
+
+fn f(e: En) {
+ match e {
+ En::Variant { field } => {}
+ }
+}
+"#,
+ expect![[r#"
+ field Field FileId(0) 32..41 32..37
+
+ FileId(0) 102..107 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_enum_var_privacy() {
+ check(
+ r#"
+mod m {
+ pub enum En {
+ Variant {
+ field$0: u8,
+ }
+ }
+}
+
+fn f() -> m::En {
+ m::En::Variant { field: 0 }
+}
+"#,
+ expect![[r#"
+ field Field FileId(0) 56..65 56..61
+
+ FileId(0) 125..130 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_self_refs() {
+ check(
+ r#"
+struct Foo { bar: i32 }
+
+impl Foo {
+ fn foo(self) {
+ let x = self$0.bar;
+ if true {
+ let _ = match () {
+ () => self,
+ };
+ }
+ }
+}
+"#,
+ expect![[r#"
+ self SelfParam FileId(0) 47..51 47..51
+
+ FileId(0) 71..75 Read
+ FileId(0) 152..156 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_self_refs_decl() {
+ check(
+ r#"
+struct Foo { bar: i32 }
+
+impl Foo {
+ fn foo(self$0) {
+ self;
+ }
+}
+"#,
+ expect![[r#"
+ self SelfParam FileId(0) 47..51 47..51
+
+ FileId(0) 63..67 Read
+ "#]],
+ );
+ }
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ check_with_scope(ra_fixture, None, expect)
+ }
+
+ fn check_with_scope(ra_fixture: &str, search_scope: Option<SearchScope>, expect: Expect) {
+ let (analysis, pos) = fixture::position(ra_fixture);
+ let refs = analysis.find_all_refs(pos, search_scope).unwrap().unwrap();
+
+ let mut actual = String::new();
+ for refs in refs {
+ actual += "\n\n";
+
+ if let Some(decl) = refs.declaration {
+ format_to!(actual, "{}", decl.nav.debug_render());
+ if decl.is_mut {
+ format_to!(actual, " {:?}", ReferenceCategory::Write)
+ }
+ actual += "\n\n";
+ }
+
+ for (file_id, references) in &refs.references {
+ for (range, access) in references {
+ format_to!(actual, "{:?} {:?}", file_id, range);
+ if let Some(access) = access {
+ format_to!(actual, " {:?}", access);
+ }
+ actual += "\n";
+ }
+ }
+
+ if refs.references.is_empty() {
+ actual += "(no references)\n";
+ }
+ }
+ expect.assert_eq(actual.trim_start())
+ }
+
+ #[test]
+ fn test_find_lifetimes_function() {
+ check(
+ r#"
+trait Foo<'a> {}
+impl<'a> Foo<'a> for &'a () {}
+fn foo<'a, 'b: 'a>(x: &'a$0 ()) -> &'a () where &'a (): Foo<'a> {
+ fn bar<'a>(_: &'a ()) {}
+ x
+}
+"#,
+ expect![[r#"
+ 'a LifetimeParam FileId(0) 55..57 55..57
+
+ FileId(0) 63..65
+ FileId(0) 71..73
+ FileId(0) 82..84
+ FileId(0) 95..97
+ FileId(0) 106..108
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_lifetimes_type_alias() {
+ check(
+ r#"
+type Foo<'a, T> where T: 'a$0 = &'a T;
+"#,
+ expect![[r#"
+ 'a LifetimeParam FileId(0) 9..11 9..11
+
+ FileId(0) 25..27
+ FileId(0) 31..33
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_lifetimes_trait_impl() {
+ check(
+ r#"
+trait Foo<'a> {
+ fn foo() -> &'a ();
+}
+impl<'a> Foo<'a> for &'a () {
+ fn foo() -> &'a$0 () {
+ unimplemented!()
+ }
+}
+"#,
+ expect![[r#"
+ 'a LifetimeParam FileId(0) 47..49 47..49
+
+ FileId(0) 55..57
+ FileId(0) 64..66
+ FileId(0) 89..91
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_map_range_to_original() {
+ check(
+ r#"
+macro_rules! foo {($i:ident) => {$i} }
+fn main() {
+ let a$0 = "test";
+ foo!(a);
+}
+"#,
+ expect![[r#"
+ a Local FileId(0) 59..60 59..60
+
+ FileId(0) 80..81 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_map_range_to_original_ref() {
+ check(
+ r#"
+macro_rules! foo {($i:ident) => {$i} }
+fn main() {
+ let a = "test";
+ foo!(a$0);
+}
+"#,
+ expect![[r#"
+ a Local FileId(0) 59..60 59..60
+
+ FileId(0) 80..81 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_labels() {
+ check(
+ r#"
+fn foo<'a>() -> &'a () {
+ 'a: loop {
+ 'b: loop {
+ continue 'a$0;
+ }
+ break 'a;
+ }
+}
+"#,
+ expect![[r#"
+ 'a Label FileId(0) 29..32 29..31
+
+ FileId(0) 80..82
+ FileId(0) 108..110
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_const_param() {
+ check(
+ r#"
+fn foo<const FOO$0: usize>() -> usize {
+ FOO
+}
+"#,
+ expect![[r#"
+ FOO ConstParam FileId(0) 7..23 13..16
+
+ FileId(0) 42..45
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_trait() {
+ check(
+ r#"
+trait Foo$0 where Self: {}
+
+impl Foo for () {}
+"#,
+ expect![[r#"
+ Foo Trait FileId(0) 0..24 6..9
+
+ FileId(0) 31..34
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_trait_self() {
+ check(
+ r#"
+trait Foo where Self$0 {
+ fn f() -> Self;
+}
+
+impl Foo for () {}
+"#,
+ expect![[r#"
+ Self TypeParam FileId(0) 6..9 6..9
+
+ FileId(0) 16..20
+ FileId(0) 37..41
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_self_ty() {
+ check(
+ r#"
+ struct $0Foo;
+
+ impl Foo where Self: {
+ fn f() -> Self;
+ }
+ "#,
+ expect![[r#"
+ Foo Struct FileId(0) 0..11 7..10
+
+ FileId(0) 18..21
+ FileId(0) 28..32
+ FileId(0) 50..54
+ "#]],
+ );
+ check(
+ r#"
+struct Foo;
+
+impl Foo where Self: {
+ fn f() -> Self$0;
+}
+"#,
+ expect![[r#"
+ impl Impl FileId(0) 13..57 18..21
+
+ FileId(0) 18..21
+ FileId(0) 28..32
+ FileId(0) 50..54
+ "#]],
+ );
+ }
+ #[test]
+ fn test_self_variant_with_payload() {
+ check(
+ r#"
+enum Foo { Bar() }
+
+impl Foo {
+ fn foo(self) {
+ match self {
+ Self::Bar$0() => (),
+ }
+ }
+}
+
+"#,
+ expect![[r#"
+ Bar Variant FileId(0) 11..16 11..14
+
+ FileId(0) 89..92
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_attr_differs_from_fn_with_same_name() {
+ check(
+ r#"
+#[test]
+fn test$0() {
+ test();
+}
+"#,
+ expect![[r#"
+ test Function FileId(0) 0..33 11..15
+
+ FileId(0) 24..28
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_const_in_pattern() {
+ check(
+ r#"
+const A$0: i32 = 42;
+
+fn main() {
+ match A {
+ A => (),
+ _ => (),
+ }
+ if let A = A {}
+}
+"#,
+ expect![[r#"
+ A Const FileId(0) 0..18 6..7
+
+ FileId(0) 42..43
+ FileId(0) 54..55
+ FileId(0) 97..98
+ FileId(0) 101..102
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_primitives() {
+ check(
+ r#"
+fn foo(_: bool) -> bo$0ol { true }
+"#,
+ expect![[r#"
+ FileId(0) 10..14
+ FileId(0) 19..23
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_transitive() {
+ check(
+ r#"
+//- /level3.rs new_source_root:local crate:level3
+pub struct Fo$0o;
+//- /level2.rs new_source_root:local crate:level2 deps:level3
+pub use level3::Foo;
+//- /level1.rs new_source_root:local crate:level1 deps:level2
+pub use level2::Foo;
+//- /level0.rs new_source_root:local crate:level0 deps:level1
+pub use level1::Foo;
+"#,
+ expect![[r#"
+ Foo Struct FileId(0) 0..15 11..14
+
+ FileId(1) 16..19
+ FileId(2) 16..19
+ FileId(3) 16..19
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_decl_macro_references() {
+ check(
+ r#"
+//- /lib.rs crate:lib
+#[macro_use]
+mod qux;
+mod bar;
+
+pub use self::foo;
+//- /qux.rs
+#[macro_export]
+macro_rules! foo$0 {
+ () => {struct Foo;};
+}
+//- /bar.rs
+foo!();
+//- /other.rs crate:other deps:lib new_source_root:local
+lib::foo!();
+"#,
+ expect![[r#"
+ foo Macro FileId(1) 0..61 29..32
+
+ FileId(0) 46..49
+ FileId(2) 0..3
+ FileId(3) 5..8
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_doesnt_reference_attribute_on_call() {
+ check(
+ r#"
+macro_rules! m {
+ () => {};
+}
+
+#[proc_macro_test::attr_noop]
+m$0!();
+
+"#,
+ expect![[r#"
+ m Macro FileId(0) 0..32 13..14
+
+ FileId(0) 64..65
+ "#]],
+ );
+ }
+
+ #[test]
+ fn multi_def() {
+ check(
+ r#"
+macro_rules! m {
+ ($name:ident) => {
+ mod module {
+ pub fn $name() {}
+ }
+
+ pub fn $name() {}
+ }
+}
+
+m!(func$0);
+
+fn f() {
+ func();
+ module::func();
+}
+ "#,
+ expect![[r#"
+ func Function FileId(0) 137..146 140..144
+
+ FileId(0) 161..165
+
+
+ func Function FileId(0) 137..146 140..144
+
+ FileId(0) 181..185
+ "#]],
+ )
+ }
+
+ #[test]
+ fn attr_expanded() {
+ check(
+ r#"
+//- proc_macros: identity
+#[proc_macros::identity]
+fn func$0() {
+ func();
+}
+"#,
+ expect![[r#"
+ func Function FileId(0) 25..50 28..32
+
+ FileId(0) 41..45
+ "#]],
+ )
+ }
+
+ #[test]
+ fn attr_assoc_item() {
+ check(
+ r#"
+//- proc_macros: identity
+
+trait Trait {
+ #[proc_macros::identity]
+ fn func() {
+ Self::func$0();
+ }
+}
+"#,
+ expect![[r#"
+ func Function FileId(0) 48..87 51..55
+
+ FileId(0) 74..78
+ "#]],
+ )
+ }
+
+ // FIXME: import is classified as function
+ #[test]
+ fn attr() {
+ check(
+ r#"
+//- proc_macros: identity
+use proc_macros::identity;
+
+#[proc_macros::$0identity]
+fn func() {}
+"#,
+ expect![[r#"
+ identity Attribute FileId(1) 1..107 32..40
+
+ FileId(0) 43..51
+ "#]],
+ );
+ check(
+ r#"
+#![crate_type="proc-macro"]
+#[proc_macro_attribute]
+fn func$0() {}
+"#,
+ expect![[r#"
+ func Attribute FileId(0) 28..64 55..59
+
+ (no references)
+ "#]],
+ );
+ }
+
+ // FIXME: import is classified as function
+ #[test]
+ fn proc_macro() {
+ check(
+ r#"
+//- proc_macros: mirror
+use proc_macros::mirror;
+
+mirror$0! {}
+"#,
+ expect![[r#"
+ mirror Macro FileId(1) 1..77 22..28
+
+ FileId(0) 26..32
+ "#]],
+ )
+ }
+
+ #[test]
+ fn derive() {
+ check(
+ r#"
+//- proc_macros: derive_identity
+//- minicore: derive
+use proc_macros::DeriveIdentity;
+
+#[derive(proc_macros::DeriveIdentity$0)]
+struct Foo;
+"#,
+ expect![[r#"
+ derive_identity Derive FileId(2) 1..107 45..60
+
+ FileId(0) 17..31
+ FileId(0) 56..70
+ "#]],
+ );
+ check(
+ r#"
+#![crate_type="proc-macro"]
+#[proc_macro_derive(Derive, attributes(x))]
+pub fn deri$0ve(_stream: TokenStream) -> TokenStream {}
+"#,
+ expect![[r#"
+ derive Derive FileId(0) 28..125 79..85
+
+ (no references)
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs
new file mode 100644
index 000000000..fe44856dc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs
@@ -0,0 +1,2252 @@
+//! Renaming functionality.
+//!
+//! This is mostly front-end for [`ide_db::rename`], but it also includes the
+//! tests. This module also implements a couple of magic tricks, like renaming
+//! `self` and to `self` (to switch between associated function and method).
+
+use hir::{AsAssocItem, InFile, Semantics};
+use ide_db::{
+ base_db::FileId,
+ defs::{Definition, NameClass, NameRefClass},
+ rename::{bail, format_err, source_edit_from_references, IdentifierKind},
+ RootDatabase,
+};
+use itertools::Itertools;
+use stdx::{always, never};
+use syntax::{ast, AstNode, SyntaxNode};
+
+use text_edit::TextEdit;
+
+use crate::{FilePosition, RangeInfo, SourceChange};
+
+pub use ide_db::rename::RenameError;
+
+type RenameResult<T> = Result<T, RenameError>;
+
+/// Prepares a rename. The sole job of this function is to return the TextRange of the thing that is
+/// being targeted for a rename.
+pub(crate) fn prepare_rename(
+ db: &RootDatabase,
+ position: FilePosition,
+) -> RenameResult<RangeInfo<()>> {
+ let sema = Semantics::new(db);
+ let source_file = sema.parse(position.file_id);
+ let syntax = source_file.syntax();
+
+ let res = find_definitions(&sema, syntax, position)?
+ .map(|(name_like, def)| {
+ // ensure all ranges are valid
+
+ if def.range_for_rename(&sema).is_none() {
+ bail!("No references found at position")
+ }
+ let frange = sema.original_range(name_like.syntax());
+
+ always!(
+ frange.range.contains_inclusive(position.offset)
+ && frange.file_id == position.file_id
+ );
+ Ok(frange.range)
+ })
+ .reduce(|acc, cur| match (acc, cur) {
+ // ensure all ranges are the same
+ (Ok(acc_inner), Ok(cur_inner)) if acc_inner == cur_inner => Ok(acc_inner),
+ (Err(e), _) => Err(e),
+ _ => bail!("inconsistent text range"),
+ });
+
+ match res {
+ // ensure at least one definition was found
+ Some(res) => res.map(|range| RangeInfo::new(range, ())),
+ None => bail!("No references found at position"),
+ }
+}
+
+// Feature: Rename
+//
+// Renames the item below the cursor and all of its references
+//
+// |===
+// | Editor | Shortcut
+//
+// | VS Code | kbd:[F2]
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113065582-055aae80-91b1-11eb-8ade-2b58e6d81883.gif[]
+pub(crate) fn rename(
+ db: &RootDatabase,
+ position: FilePosition,
+ new_name: &str,
+) -> RenameResult<SourceChange> {
+ let sema = Semantics::new(db);
+ let source_file = sema.parse(position.file_id);
+ let syntax = source_file.syntax();
+
+ let defs = find_definitions(&sema, syntax, position)?;
+
+ let ops: RenameResult<Vec<SourceChange>> = defs
+ .map(|(_namelike, def)| {
+ if let Definition::Local(local) = def {
+ if let Some(self_param) = local.as_self_param(sema.db) {
+ cov_mark::hit!(rename_self_to_param);
+ return rename_self_to_param(&sema, local, self_param, new_name);
+ }
+ if new_name == "self" {
+ cov_mark::hit!(rename_to_self);
+ return rename_to_self(&sema, local);
+ }
+ }
+ def.rename(&sema, new_name)
+ })
+ .collect();
+
+ ops?.into_iter()
+ .reduce(|acc, elem| acc.merge(elem))
+ .ok_or_else(|| format_err!("No references found at position"))
+}
+
+/// Called by the client when it is about to rename a file.
+pub(crate) fn will_rename_file(
+ db: &RootDatabase,
+ file_id: FileId,
+ new_name_stem: &str,
+) -> Option<SourceChange> {
+ let sema = Semantics::new(db);
+ let module = sema.to_module_def(file_id)?;
+ let def = Definition::Module(module);
+ let mut change = def.rename(&sema, new_name_stem).ok()?;
+ change.file_system_edits.clear();
+ Some(change)
+}
+
+fn find_definitions(
+ sema: &Semantics<'_, RootDatabase>,
+ syntax: &SyntaxNode,
+ position: FilePosition,
+) -> RenameResult<impl Iterator<Item = (ast::NameLike, Definition)>> {
+ let symbols = sema
+ .find_nodes_at_offset_with_descend::<ast::NameLike>(syntax, position.offset)
+ .map(|name_like| {
+ let res = match &name_like {
+ // renaming aliases would rename the item being aliased as the HIR doesn't track aliases yet
+ ast::NameLike::Name(name)
+ if name
+ .syntax()
+ .parent()
+ .map_or(false, |it| ast::Rename::can_cast(it.kind())) =>
+ {
+ bail!("Renaming aliases is currently unsupported")
+ }
+ ast::NameLike::Name(name) => NameClass::classify(sema, name)
+ .map(|class| match class {
+ NameClass::Definition(it) | NameClass::ConstReference(it) => it,
+ NameClass::PatFieldShorthand { local_def, field_ref: _ } => {
+ Definition::Local(local_def)
+ }
+ })
+ .map(|def| (name_like.clone(), def))
+ .ok_or_else(|| format_err!("No references found at position")),
+ ast::NameLike::NameRef(name_ref) => {
+ NameRefClass::classify(sema, name_ref)
+ .map(|class| match class {
+ NameRefClass::Definition(def) => def,
+ NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
+ Definition::Local(local_ref)
+ }
+ })
+ .ok_or_else(|| format_err!("No references found at position"))
+ .and_then(|def| {
+ // if the name differs from the definitions name it has to be an alias
+ if def
+ .name(sema.db)
+ .map_or(false, |it| it.to_smol_str() != name_ref.text().as_str())
+ {
+ Err(format_err!("Renaming aliases is currently unsupported"))
+ } else {
+ Ok((name_like.clone(), def))
+ }
+ })
+ }
+ ast::NameLike::Lifetime(lifetime) => {
+ NameRefClass::classify_lifetime(sema, lifetime)
+ .and_then(|class| match class {
+ NameRefClass::Definition(def) => Some(def),
+ _ => None,
+ })
+ .or_else(|| {
+ NameClass::classify_lifetime(sema, lifetime).and_then(|it| match it {
+ NameClass::Definition(it) => Some(it),
+ _ => None,
+ })
+ })
+ .map(|def| (name_like, def))
+ .ok_or_else(|| format_err!("No references found at position"))
+ }
+ };
+ res
+ });
+
+ let res: RenameResult<Vec<_>> = symbols.collect();
+ match res {
+ Ok(v) => {
+ if v.is_empty() {
+ // FIXME: some semantic duplication between "empty vec" and "Err()"
+ Err(format_err!("No references found at position"))
+ } else {
+ // remove duplicates, comparing `Definition`s
+ Ok(v.into_iter().unique_by(|t| t.1))
+ }
+ }
+ Err(e) => Err(e),
+ }
+}
+
+fn rename_to_self(
+ sema: &Semantics<'_, RootDatabase>,
+ local: hir::Local,
+) -> RenameResult<SourceChange> {
+ if never!(local.is_self(sema.db)) {
+ bail!("rename_to_self invoked on self");
+ }
+
+ let fn_def = match local.parent(sema.db) {
+ hir::DefWithBody::Function(func) => func,
+ _ => bail!("Cannot rename local to self outside of function"),
+ };
+
+ if fn_def.self_param(sema.db).is_some() {
+ bail!("Method already has a self parameter");
+ }
+
+ let params = fn_def.assoc_fn_params(sema.db);
+ let first_param = params
+ .first()
+ .ok_or_else(|| format_err!("Cannot rename local to self unless it is a parameter"))?;
+ match first_param.as_local(sema.db) {
+ Some(plocal) => {
+ if plocal != local {
+ bail!("Only the first parameter may be renamed to self");
+ }
+ }
+ None => bail!("rename_to_self invoked on destructuring parameter"),
+ }
+
+ let assoc_item = fn_def
+ .as_assoc_item(sema.db)
+ .ok_or_else(|| format_err!("Cannot rename parameter to self for free function"))?;
+ let impl_ = match assoc_item.container(sema.db) {
+ hir::AssocItemContainer::Trait(_) => {
+ bail!("Cannot rename parameter to self for trait functions");
+ }
+ hir::AssocItemContainer::Impl(impl_) => impl_,
+ };
+ let first_param_ty = first_param.ty();
+ let impl_ty = impl_.self_ty(sema.db);
+ let (ty, self_param) = if impl_ty.remove_ref().is_some() {
+ // if the impl is a ref to the type we can just match the `&T` with self directly
+ (first_param_ty.clone(), "self")
+ } else {
+ first_param_ty.remove_ref().map_or((first_param_ty.clone(), "self"), |ty| {
+ (ty, if first_param_ty.is_mutable_reference() { "&mut self" } else { "&self" })
+ })
+ };
+
+ if ty != impl_ty {
+ bail!("Parameter type differs from impl block type");
+ }
+
+ let InFile { file_id, value: param_source } =
+ first_param.source(sema.db).ok_or_else(|| format_err!("No source for parameter found"))?;
+
+ let def = Definition::Local(local);
+ let usages = def.usages(sema).all();
+ let mut source_change = SourceChange::default();
+ source_change.extend(usages.iter().map(|(&file_id, references)| {
+ (file_id, source_edit_from_references(references, def, "self"))
+ }));
+ source_change.insert_source_edit(
+ file_id.original_file(sema.db),
+ TextEdit::replace(param_source.syntax().text_range(), String::from(self_param)),
+ );
+ Ok(source_change)
+}
+
+fn rename_self_to_param(
+ sema: &Semantics<'_, RootDatabase>,
+ local: hir::Local,
+ self_param: hir::SelfParam,
+ new_name: &str,
+) -> RenameResult<SourceChange> {
+ if new_name == "self" {
+ // Let's do nothing rather than complain.
+ cov_mark::hit!(rename_self_to_self);
+ return Ok(SourceChange::default());
+ }
+
+ let identifier_kind = IdentifierKind::classify(new_name)?;
+
+ let InFile { file_id, value: self_param } =
+ self_param.source(sema.db).ok_or_else(|| format_err!("cannot find function source"))?;
+
+ let def = Definition::Local(local);
+ let usages = def.usages(sema).all();
+ let edit = text_edit_from_self_param(&self_param, new_name)
+ .ok_or_else(|| format_err!("No target type found"))?;
+ if usages.len() > 1 && identifier_kind == IdentifierKind::Underscore {
+ bail!("Cannot rename reference to `_` as it is being referenced multiple times");
+ }
+ let mut source_change = SourceChange::default();
+ source_change.insert_source_edit(file_id.original_file(sema.db), edit);
+ source_change.extend(usages.iter().map(|(&file_id, references)| {
+ (file_id, source_edit_from_references(references, def, new_name))
+ }));
+ Ok(source_change)
+}
+
+fn text_edit_from_self_param(self_param: &ast::SelfParam, new_name: &str) -> Option<TextEdit> {
+ fn target_type_name(impl_def: &ast::Impl) -> Option<String> {
+ if let Some(ast::Type::PathType(p)) = impl_def.self_ty() {
+ return Some(p.path()?.segment()?.name_ref()?.text().to_string());
+ }
+ None
+ }
+
+ let impl_def = self_param.syntax().ancestors().find_map(ast::Impl::cast)?;
+ let type_name = target_type_name(&impl_def)?;
+
+ let mut replacement_text = String::from(new_name);
+ replacement_text.push_str(": ");
+ match (self_param.amp_token(), self_param.mut_token()) {
+ (Some(_), None) => replacement_text.push('&'),
+ (Some(_), Some(_)) => replacement_text.push_str("&mut "),
+ (_, _) => (),
+ };
+ replacement_text.push_str(type_name.as_str());
+
+ Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text))
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+ use stdx::trim_indent;
+ use test_utils::assert_eq_text;
+ use text_edit::TextEdit;
+
+ use crate::{fixture, FileId};
+
+ use super::{RangeInfo, RenameError};
+
+ #[track_caller]
+ fn check(new_name: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
+ let ra_fixture_after = &trim_indent(ra_fixture_after);
+ let (analysis, position) = fixture::position(ra_fixture_before);
+ let rename_result = analysis
+ .rename(position, new_name)
+ .unwrap_or_else(|err| panic!("Rename to '{}' was cancelled: {}", new_name, err));
+ match rename_result {
+ Ok(source_change) => {
+ let mut text_edit_builder = TextEdit::builder();
+ let mut file_id: Option<FileId> = None;
+ for edit in source_change.source_file_edits {
+ file_id = Some(edit.0);
+ for indel in edit.1.into_iter() {
+ text_edit_builder.replace(indel.delete, indel.insert);
+ }
+ }
+ if let Some(file_id) = file_id {
+ let mut result = analysis.file_text(file_id).unwrap().to_string();
+ text_edit_builder.finish().apply(&mut result);
+ assert_eq_text!(ra_fixture_after, &*result);
+ }
+ }
+ Err(err) => {
+ if ra_fixture_after.starts_with("error:") {
+ let error_message = ra_fixture_after
+ .chars()
+ .into_iter()
+ .skip("error:".len())
+ .collect::<String>();
+ assert_eq!(error_message.trim(), err.to_string());
+ } else {
+ panic!("Rename to '{}' failed unexpectedly: {}", new_name, err)
+ }
+ }
+ };
+ }
+
+ fn check_expect(new_name: &str, ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let source_change =
+ analysis.rename(position, new_name).unwrap().expect("Expect returned a RenameError");
+ expect.assert_debug_eq(&source_change)
+ }
+
+ fn check_expect_will_rename_file(new_name: &str, ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let source_change = analysis
+ .will_rename_file(position.file_id, new_name)
+ .unwrap()
+ .expect("Expect returned a RenameError");
+ expect.assert_debug_eq(&source_change)
+ }
+
+ fn check_prepare(ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let result = analysis
+ .prepare_rename(position)
+ .unwrap_or_else(|err| panic!("PrepareRename was cancelled: {}", err));
+ match result {
+ Ok(RangeInfo { range, info: () }) => {
+ let source = analysis.file_text(position.file_id).unwrap();
+ expect.assert_eq(&format!("{:?}: {}", range, &source[range]))
+ }
+ Err(RenameError(err)) => expect.assert_eq(&err),
+ };
+ }
+
+ #[test]
+ fn test_prepare_rename_namelikes() {
+ check_prepare(r"fn name$0<'lifetime>() {}", expect![[r#"3..7: name"#]]);
+ check_prepare(r"fn name<'lifetime$0>() {}", expect![[r#"8..17: 'lifetime"#]]);
+ check_prepare(r"fn name<'lifetime>() { name$0(); }", expect![[r#"23..27: name"#]]);
+ }
+
+ #[test]
+ fn test_prepare_rename_in_macro() {
+ check_prepare(
+ r"macro_rules! foo {
+ ($ident:ident) => {
+ pub struct $ident;
+ }
+}
+foo!(Foo$0);",
+ expect![[r#"83..86: Foo"#]],
+ );
+ }
+
+ #[test]
+ fn test_prepare_rename_keyword() {
+ check_prepare(r"struct$0 Foo;", expect![[r#"No references found at position"#]]);
+ }
+
+ #[test]
+ fn test_prepare_rename_tuple_field() {
+ check_prepare(
+ r#"
+struct Foo(i32);
+
+fn baz() {
+ let mut x = Foo(4);
+ x.0$0 = 5;
+}
+"#,
+ expect![[r#"No references found at position"#]],
+ );
+ }
+
+ #[test]
+ fn test_prepare_rename_builtin() {
+ check_prepare(
+ r#"
+fn foo() {
+ let x: i32$0 = 0;
+}
+"#,
+ expect![[r#"No references found at position"#]],
+ );
+ }
+
+ #[test]
+ fn test_prepare_rename_self() {
+ check_prepare(
+ r#"
+struct Foo {}
+
+impl Foo {
+ fn foo(self) -> Self$0 {
+ self
+ }
+}
+"#,
+ expect![[r#"No references found at position"#]],
+ );
+ }
+
+ #[test]
+ fn test_rename_to_underscore() {
+ check("_", r#"fn main() { let i$0 = 1; }"#, r#"fn main() { let _ = 1; }"#);
+ }
+
+ #[test]
+ fn test_rename_to_raw_identifier() {
+ check("r#fn", r#"fn main() { let i$0 = 1; }"#, r#"fn main() { let r#fn = 1; }"#);
+ }
+
+ #[test]
+ fn test_rename_to_invalid_identifier1() {
+ check(
+ "invalid!",
+ r#"fn main() { let i$0 = 1; }"#,
+ "error: Invalid name `invalid!`: not an identifier",
+ );
+ }
+
+ #[test]
+ fn test_rename_to_invalid_identifier2() {
+ check(
+ "multiple tokens",
+ r#"fn main() { let i$0 = 1; }"#,
+ "error: Invalid name `multiple tokens`: not an identifier",
+ );
+ }
+
+ #[test]
+ fn test_rename_to_invalid_identifier3() {
+ check(
+ "let",
+ r#"fn main() { let i$0 = 1; }"#,
+ "error: Invalid name `let`: not an identifier",
+ );
+ }
+
+ #[test]
+ fn test_rename_to_invalid_identifier_lifetime() {
+ cov_mark::check!(rename_not_an_ident_ref);
+ check(
+ "'foo",
+ r#"fn main() { let i$0 = 1; }"#,
+ "error: Invalid name `'foo`: not an identifier",
+ );
+ }
+
+ #[test]
+ fn test_rename_to_invalid_identifier_lifetime2() {
+ cov_mark::check!(rename_not_a_lifetime_ident_ref);
+ check(
+ "foo",
+ r#"fn main<'a>(_: &'a$0 ()) {}"#,
+ "error: Invalid name `foo`: not a lifetime identifier",
+ );
+ }
+
+ #[test]
+ fn test_rename_to_underscore_invalid() {
+ cov_mark::check!(rename_underscore_multiple);
+ check(
+ "_",
+ r#"fn main(foo$0: ()) {foo;}"#,
+ "error: Cannot rename reference to `_` as it is being referenced multiple times",
+ );
+ }
+
+ #[test]
+ fn test_rename_mod_invalid() {
+ check(
+ "'foo",
+ r#"mod foo$0 {}"#,
+ "error: Invalid name `'foo`: cannot rename module to 'foo",
+ );
+ }
+
+ #[test]
+ fn test_rename_for_local() {
+ check(
+ "k",
+ r#"
+fn main() {
+ let mut i = 1;
+ let j = 1;
+ i = i$0 + j;
+
+ { i = 0; }
+
+ i = 5;
+}
+"#,
+ r#"
+fn main() {
+ let mut k = 1;
+ let j = 1;
+ k = k + j;
+
+ { k = 0; }
+
+ k = 5;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_unresolved_reference() {
+ check(
+ "new_name",
+ r#"fn main() { let _ = unresolved_ref$0; }"#,
+ "error: No references found at position",
+ );
+ }
+
+ #[test]
+ fn test_rename_macro_multiple_occurrences() {
+ check(
+ "Baaah",
+ r#"macro_rules! foo {
+ ($ident:ident) => {
+ const $ident: () = ();
+ struct $ident {}
+ };
+}
+
+foo!($0Foo);
+const _: () = Foo;
+const _: Foo = Foo {};
+ "#,
+ r#"
+macro_rules! foo {
+ ($ident:ident) => {
+ const $ident: () = ();
+ struct $ident {}
+ };
+}
+
+foo!(Baaah);
+const _: () = Baaah;
+const _: Baaah = Baaah {};
+ "#,
+ )
+ }
+
+ #[test]
+ fn test_rename_for_macro_args() {
+ check(
+ "b",
+ r#"
+macro_rules! foo {($i:ident) => {$i} }
+fn main() {
+ let a$0 = "test";
+ foo!(a);
+}
+"#,
+ r#"
+macro_rules! foo {($i:ident) => {$i} }
+fn main() {
+ let b = "test";
+ foo!(b);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_for_macro_args_rev() {
+ check(
+ "b",
+ r#"
+macro_rules! foo {($i:ident) => {$i} }
+fn main() {
+ let a = "test";
+ foo!(a$0);
+}
+"#,
+ r#"
+macro_rules! foo {($i:ident) => {$i} }
+fn main() {
+ let b = "test";
+ foo!(b);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_for_macro_define_fn() {
+ check(
+ "bar",
+ r#"
+macro_rules! define_fn {($id:ident) => { fn $id{} }}
+define_fn!(foo);
+fn main() {
+ fo$0o();
+}
+"#,
+ r#"
+macro_rules! define_fn {($id:ident) => { fn $id{} }}
+define_fn!(bar);
+fn main() {
+ bar();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_for_macro_define_fn_rev() {
+ check(
+ "bar",
+ r#"
+macro_rules! define_fn {($id:ident) => { fn $id{} }}
+define_fn!(fo$0o);
+fn main() {
+ foo();
+}
+"#,
+ r#"
+macro_rules! define_fn {($id:ident) => { fn $id{} }}
+define_fn!(bar);
+fn main() {
+ bar();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_for_param_inside() {
+ check("j", r#"fn foo(i : u32) -> u32 { i$0 }"#, r#"fn foo(j : u32) -> u32 { j }"#);
+ }
+
+ #[test]
+ fn test_rename_refs_for_fn_param() {
+ check("j", r#"fn foo(i$0 : u32) -> u32 { i }"#, r#"fn foo(j : u32) -> u32 { j }"#);
+ }
+
+ #[test]
+ fn test_rename_for_mut_param() {
+ check("j", r#"fn foo(mut i$0 : u32) -> u32 { i }"#, r#"fn foo(mut j : u32) -> u32 { j }"#);
+ }
+
+ #[test]
+ fn test_rename_struct_field() {
+ check(
+ "foo",
+ r#"
+struct Foo { field$0: i32 }
+
+impl Foo {
+ fn new(i: i32) -> Self {
+ Self { field: i }
+ }
+}
+"#,
+ r#"
+struct Foo { foo: i32 }
+
+impl Foo {
+ fn new(i: i32) -> Self {
+ Self { foo: i }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_field_in_field_shorthand() {
+ cov_mark::check!(test_rename_field_in_field_shorthand);
+ check(
+ "field",
+ r#"
+struct Foo { foo$0: i32 }
+
+impl Foo {
+ fn new(foo: i32) -> Self {
+ Self { foo }
+ }
+}
+"#,
+ r#"
+struct Foo { field: i32 }
+
+impl Foo {
+ fn new(foo: i32) -> Self {
+ Self { field: foo }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_local_in_field_shorthand() {
+ cov_mark::check!(test_rename_local_in_field_shorthand);
+ check(
+ "j",
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn new(i$0: i32) -> Self {
+ Self { i }
+ }
+}
+"#,
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn new(j: i32) -> Self {
+ Self { i: j }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_field_shorthand_correct_struct() {
+ check(
+ "j",
+ r#"
+struct Foo { i$0: i32 }
+struct Bar { i: i32 }
+
+impl Bar {
+ fn new(i: i32) -> Self {
+ Self { i }
+ }
+}
+"#,
+ r#"
+struct Foo { j: i32 }
+struct Bar { i: i32 }
+
+impl Bar {
+ fn new(i: i32) -> Self {
+ Self { i }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_shadow_local_for_struct_shorthand() {
+ check(
+ "j",
+ r#"
+struct Foo { i: i32 }
+
+fn baz(i$0: i32) -> Self {
+ let x = Foo { i };
+ {
+ let i = 0;
+ Foo { i }
+ }
+}
+"#,
+ r#"
+struct Foo { i: i32 }
+
+fn baz(j: i32) -> Self {
+ let x = Foo { i: j };
+ {
+ let i = 0;
+ Foo { i }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_mod() {
+ check_expect(
+ "foo2",
+ r#"
+//- /lib.rs
+mod bar;
+
+//- /bar.rs
+mod foo$0;
+
+//- /bar/foo.rs
+// empty
+"#,
+ expect![[r#"
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 1,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "foo2",
+ delete: 4..7,
+ },
+ ],
+ },
+ },
+ file_system_edits: [
+ MoveFile {
+ src: FileId(
+ 2,
+ ),
+ dst: AnchoredPathBuf {
+ anchor: FileId(
+ 2,
+ ),
+ path: "foo2.rs",
+ },
+ },
+ ],
+ is_snippet: false,
+ }
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_rename_mod_in_use_tree() {
+ check_expect(
+ "quux",
+ r#"
+//- /main.rs
+pub mod foo;
+pub mod bar;
+fn main() {}
+
+//- /foo.rs
+pub struct FooContent;
+
+//- /bar.rs
+use crate::foo$0::FooContent;
+"#,
+ expect![[r#"
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 0,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "quux",
+ delete: 8..11,
+ },
+ ],
+ },
+ FileId(
+ 2,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "quux",
+ delete: 11..14,
+ },
+ ],
+ },
+ },
+ file_system_edits: [
+ MoveFile {
+ src: FileId(
+ 1,
+ ),
+ dst: AnchoredPathBuf {
+ anchor: FileId(
+ 1,
+ ),
+ path: "quux.rs",
+ },
+ },
+ ],
+ is_snippet: false,
+ }
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_rename_mod_in_dir() {
+ check_expect(
+ "foo2",
+ r#"
+//- /lib.rs
+mod fo$0o;
+//- /foo/mod.rs
+// empty
+"#,
+ expect![[r#"
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 0,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "foo2",
+ delete: 4..7,
+ },
+ ],
+ },
+ },
+ file_system_edits: [
+ MoveDir {
+ src: AnchoredPathBuf {
+ anchor: FileId(
+ 1,
+ ),
+ path: "../foo",
+ },
+ src_id: FileId(
+ 1,
+ ),
+ dst: AnchoredPathBuf {
+ anchor: FileId(
+ 1,
+ ),
+ path: "../foo2",
+ },
+ },
+ ],
+ is_snippet: false,
+ }
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_rename_unusually_nested_mod() {
+ check_expect(
+ "bar",
+ r#"
+//- /lib.rs
+mod outer { mod fo$0o; }
+
+//- /outer/foo.rs
+// empty
+"#,
+ expect![[r#"
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 0,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "bar",
+ delete: 16..19,
+ },
+ ],
+ },
+ },
+ file_system_edits: [
+ MoveFile {
+ src: FileId(
+ 1,
+ ),
+ dst: AnchoredPathBuf {
+ anchor: FileId(
+ 1,
+ ),
+ path: "bar.rs",
+ },
+ },
+ ],
+ is_snippet: false,
+ }
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_module_rename_in_path() {
+ check(
+ "baz",
+ r#"
+mod $0foo {
+ pub use self::bar as qux;
+ pub fn bar() {}
+}
+
+fn main() { foo::bar(); }
+"#,
+ r#"
+mod baz {
+ pub use self::bar as qux;
+ pub fn bar() {}
+}
+
+fn main() { baz::bar(); }
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_mod_filename_and_path() {
+ check_expect(
+ "foo2",
+ r#"
+//- /lib.rs
+mod bar;
+fn f() {
+ bar::foo::fun()
+}
+
+//- /bar.rs
+pub mod foo$0;
+
+//- /bar/foo.rs
+// pub fn fun() {}
+"#,
+ expect![[r#"
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 0,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "foo2",
+ delete: 27..30,
+ },
+ ],
+ },
+ FileId(
+ 1,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "foo2",
+ delete: 8..11,
+ },
+ ],
+ },
+ },
+ file_system_edits: [
+ MoveFile {
+ src: FileId(
+ 2,
+ ),
+ dst: AnchoredPathBuf {
+ anchor: FileId(
+ 2,
+ ),
+ path: "foo2.rs",
+ },
+ },
+ ],
+ is_snippet: false,
+ }
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_rename_mod_recursive() {
+ check_expect(
+ "foo2",
+ r#"
+//- /lib.rs
+mod foo$0;
+
+//- /foo.rs
+mod bar;
+mod corge;
+
+//- /foo/bar.rs
+mod qux;
+
+//- /foo/bar/qux.rs
+mod quux;
+
+//- /foo/bar/qux/quux/mod.rs
+// empty
+
+//- /foo/corge.rs
+// empty
+"#,
+ expect![[r#"
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 0,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "foo2",
+ delete: 4..7,
+ },
+ ],
+ },
+ },
+ file_system_edits: [
+ MoveFile {
+ src: FileId(
+ 1,
+ ),
+ dst: AnchoredPathBuf {
+ anchor: FileId(
+ 1,
+ ),
+ path: "foo2.rs",
+ },
+ },
+ MoveDir {
+ src: AnchoredPathBuf {
+ anchor: FileId(
+ 1,
+ ),
+ path: "foo",
+ },
+ src_id: FileId(
+ 1,
+ ),
+ dst: AnchoredPathBuf {
+ anchor: FileId(
+ 1,
+ ),
+ path: "foo2",
+ },
+ },
+ ],
+ is_snippet: false,
+ }
+ "#]],
+ )
+ }
+ #[test]
+ fn test_rename_mod_ref_by_super() {
+ check(
+ "baz",
+ r#"
+ mod $0foo {
+ struct X;
+
+ mod bar {
+ use super::X;
+ }
+ }
+ "#,
+ r#"
+ mod baz {
+ struct X;
+
+ mod bar {
+ use super::X;
+ }
+ }
+ "#,
+ )
+ }
+
+ #[test]
+ fn test_rename_mod_in_macro() {
+ check(
+ "bar",
+ r#"
+//- /foo.rs
+
+//- /lib.rs
+macro_rules! submodule {
+ ($name:ident) => {
+ mod $name;
+ };
+}
+
+submodule!($0foo);
+"#,
+ r#"
+macro_rules! submodule {
+ ($name:ident) => {
+ mod $name;
+ };
+}
+
+submodule!(bar);
+"#,
+ )
+ }
+
+ #[test]
+ fn test_rename_mod_for_crate_root() {
+ check_expect_will_rename_file(
+ "main",
+ r#"
+//- /lib.rs
+use crate::foo as bar;
+fn foo() {}
+mod bar$0;
+"#,
+ expect![[r#"
+ SourceChange {
+ source_file_edits: {},
+ file_system_edits: [],
+ is_snippet: false,
+ }
+ "#]],
+ )
+ }
+
+ #[test]
+ fn test_enum_variant_from_module_1() {
+ cov_mark::check!(rename_non_local);
+ check(
+ "Baz",
+ r#"
+mod foo {
+ pub enum Foo { Bar$0 }
+}
+
+fn func(f: foo::Foo) {
+ match f {
+ foo::Foo::Bar => {}
+ }
+}
+"#,
+ r#"
+mod foo {
+ pub enum Foo { Baz }
+}
+
+fn func(f: foo::Foo) {
+ match f {
+ foo::Foo::Baz => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_enum_variant_from_module_2() {
+ check(
+ "baz",
+ r#"
+mod foo {
+ pub struct Foo { pub bar$0: uint }
+}
+
+fn foo(f: foo::Foo) {
+ let _ = f.bar;
+}
+"#,
+ r#"
+mod foo {
+ pub struct Foo { pub baz: uint }
+}
+
+fn foo(f: foo::Foo) {
+ let _ = f.baz;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_parameter_to_self() {
+ cov_mark::check!(rename_to_self);
+ check(
+ "self",
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn f(foo$0: &mut Foo) -> i32 {
+ foo.i
+ }
+}
+"#,
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn f(&mut self) -> i32 {
+ self.i
+ }
+}
+"#,
+ );
+ check(
+ "self",
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn f(foo$0: Foo) -> i32 {
+ foo.i
+ }
+}
+"#,
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn f(self) -> i32 {
+ self.i
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_parameter_to_self_error_no_impl() {
+ check(
+ "self",
+ r#"
+struct Foo { i: i32 }
+
+fn f(foo$0: &mut Foo) -> i32 {
+ foo.i
+}
+"#,
+ "error: Cannot rename parameter to self for free function",
+ );
+ check(
+ "self",
+ r#"
+struct Foo { i: i32 }
+struct Bar;
+
+impl Bar {
+ fn f(foo$0: &mut Foo) -> i32 {
+ foo.i
+ }
+}
+"#,
+ "error: Parameter type differs from impl block type",
+ );
+ }
+
+ #[test]
+ fn test_parameter_to_self_error_not_first() {
+ check(
+ "self",
+ r#"
+struct Foo { i: i32 }
+impl Foo {
+ fn f(x: (), foo$0: &mut Foo) -> i32 {
+ foo.i
+ }
+}
+"#,
+ "error: Only the first parameter may be renamed to self",
+ );
+ }
+
+ #[test]
+ fn test_parameter_to_self_impl_ref() {
+ check(
+ "self",
+ r#"
+struct Foo { i: i32 }
+impl &Foo {
+ fn f(foo$0: &Foo) -> i32 {
+ foo.i
+ }
+}
+"#,
+ r#"
+struct Foo { i: i32 }
+impl &Foo {
+ fn f(self) -> i32 {
+ self.i
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_self_to_parameter() {
+ check(
+ "foo",
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn f(&mut $0self) -> i32 {
+ self.i
+ }
+}
+"#,
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn f(foo: &mut Foo) -> i32 {
+ foo.i
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_owned_self_to_parameter() {
+ cov_mark::check!(rename_self_to_param);
+ check(
+ "foo",
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn f($0self) -> i32 {
+ self.i
+ }
+}
+"#,
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn f(foo: Foo) -> i32 {
+ foo.i
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_self_in_path_to_parameter() {
+ check(
+ "foo",
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn f(&self) -> i32 {
+ let self_var = 1;
+ self$0.i
+ }
+}
+"#,
+ r#"
+struct Foo { i: i32 }
+
+impl Foo {
+ fn f(foo: &Foo) -> i32 {
+ let self_var = 1;
+ foo.i
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_field_put_init_shorthand() {
+ cov_mark::check!(test_rename_field_put_init_shorthand);
+ check(
+ "bar",
+ r#"
+struct Foo { i$0: i32 }
+
+fn foo(bar: i32) -> Foo {
+ Foo { i: bar }
+}
+"#,
+ r#"
+struct Foo { bar: i32 }
+
+fn foo(bar: i32) -> Foo {
+ Foo { bar }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_local_put_init_shorthand() {
+ cov_mark::check!(test_rename_local_put_init_shorthand);
+ check(
+ "i",
+ r#"
+struct Foo { i: i32 }
+
+fn foo(bar$0: i32) -> Foo {
+ Foo { i: bar }
+}
+"#,
+ r#"
+struct Foo { i: i32 }
+
+fn foo(i: i32) -> Foo {
+ Foo { i }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_struct_field_pat_into_shorthand() {
+ cov_mark::check!(test_rename_field_put_init_shorthand_pat);
+ check(
+ "baz",
+ r#"
+struct Foo { i$0: i32 }
+
+fn foo(foo: Foo) {
+ let Foo { i: ref baz @ qux } = foo;
+ let _ = qux;
+}
+"#,
+ r#"
+struct Foo { baz: i32 }
+
+fn foo(foo: Foo) {
+ let Foo { baz: ref baz @ qux } = foo;
+ let _ = qux;
+}
+"#,
+ );
+ check(
+ "baz",
+ r#"
+struct Foo { i$0: i32 }
+
+fn foo(foo: Foo) {
+ let Foo { i: ref baz } = foo;
+ let _ = qux;
+}
+"#,
+ r#"
+struct Foo { baz: i32 }
+
+fn foo(foo: Foo) {
+ let Foo { ref baz } = foo;
+ let _ = qux;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_struct_local_pat_into_shorthand() {
+ cov_mark::check!(test_rename_local_put_init_shorthand_pat);
+ check(
+ "field",
+ r#"
+struct Foo { field: i32 }
+
+fn foo(foo: Foo) {
+ let Foo { field: qux$0 } = foo;
+ let _ = qux;
+}
+"#,
+ r#"
+struct Foo { field: i32 }
+
+fn foo(foo: Foo) {
+ let Foo { field } = foo;
+ let _ = field;
+}
+"#,
+ );
+ check(
+ "field",
+ r#"
+struct Foo { field: i32 }
+
+fn foo(foo: Foo) {
+ let Foo { field: x @ qux$0 } = foo;
+ let _ = qux;
+}
+"#,
+ r#"
+struct Foo { field: i32 }
+
+fn foo(foo: Foo) {
+ let Foo { field: x @ field } = foo;
+ let _ = field;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_binding_in_destructure_pat() {
+ let expected_fixture = r#"
+struct Foo {
+ i: i32,
+}
+
+fn foo(foo: Foo) {
+ let Foo { i: bar } = foo;
+ let _ = bar;
+}
+"#;
+ check(
+ "bar",
+ r#"
+struct Foo {
+ i: i32,
+}
+
+fn foo(foo: Foo) {
+ let Foo { i: b } = foo;
+ let _ = b$0;
+}
+"#,
+ expected_fixture,
+ );
+ check(
+ "bar",
+ r#"
+struct Foo {
+ i: i32,
+}
+
+fn foo(foo: Foo) {
+ let Foo { i } = foo;
+ let _ = i$0;
+}
+"#,
+ expected_fixture,
+ );
+ }
+
+ #[test]
+ fn test_rename_binding_in_destructure_param_pat() {
+ check(
+ "bar",
+ r#"
+struct Foo {
+ i: i32
+}
+
+fn foo(Foo { i }: Foo) -> i32 {
+ i$0
+}
+"#,
+ r#"
+struct Foo {
+ i: i32
+}
+
+fn foo(Foo { i: bar }: Foo) -> i32 {
+ bar
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_struct_field_complex_ident_pat() {
+ cov_mark::check!(rename_record_pat_field_name_split);
+ check(
+ "baz",
+ r#"
+struct Foo { i$0: i32 }
+
+fn foo(foo: Foo) {
+ let Foo { ref i } = foo;
+}
+"#,
+ r#"
+struct Foo { baz: i32 }
+
+fn foo(foo: Foo) {
+ let Foo { baz: ref i } = foo;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_lifetimes() {
+ cov_mark::check!(rename_lifetime);
+ check(
+ "'yeeee",
+ r#"
+trait Foo<'a> {
+ fn foo() -> &'a ();
+}
+impl<'a> Foo<'a> for &'a () {
+ fn foo() -> &'a$0 () {
+ unimplemented!()
+ }
+}
+"#,
+ r#"
+trait Foo<'a> {
+ fn foo() -> &'a ();
+}
+impl<'yeeee> Foo<'yeeee> for &'yeeee () {
+ fn foo() -> &'yeeee () {
+ unimplemented!()
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_rename_bind_pat() {
+ check(
+ "new_name",
+ r#"
+fn main() {
+ enum CustomOption<T> {
+ None,
+ Some(T),
+ }
+
+ let test_variable = CustomOption::Some(22);
+
+ match test_variable {
+ CustomOption::Some(foo$0) if foo == 11 => {}
+ _ => (),
+ }
+}"#,
+ r#"
+fn main() {
+ enum CustomOption<T> {
+ None,
+ Some(T),
+ }
+
+ let test_variable = CustomOption::Some(22);
+
+ match test_variable {
+ CustomOption::Some(new_name) if new_name == 11 => {}
+ _ => (),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_label() {
+ check(
+ "'foo",
+ r#"
+fn foo<'a>() -> &'a () {
+ 'a: {
+ 'b: loop {
+ break 'a$0;
+ }
+ }
+}
+"#,
+ r#"
+fn foo<'a>() -> &'a () {
+ 'foo: {
+ 'b: loop {
+ break 'foo;
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_self_to_self() {
+ cov_mark::check!(rename_self_to_self);
+ check(
+ "self",
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(self$0) {}
+}
+"#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(self) {}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_rename_field_in_pat_in_macro_doesnt_shorthand() {
+ // ideally we would be able to make this emit a short hand, but I doubt this is easily possible
+ check(
+ "baz",
+ r#"
+macro_rules! foo {
+ ($pattern:pat) => {
+ let $pattern = loop {};
+ };
+}
+struct Foo {
+ bar$0: u32,
+}
+fn foo() {
+ foo!(Foo { bar: baz });
+}
+"#,
+ r#"
+macro_rules! foo {
+ ($pattern:pat) => {
+ let $pattern = loop {};
+ };
+}
+struct Foo {
+ baz: u32,
+}
+fn foo() {
+ foo!(Foo { baz: baz });
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_rename_tuple_field() {
+ check(
+ "foo",
+ r#"
+struct Foo(i32);
+
+fn baz() {
+ let mut x = Foo(4);
+ x.0$0 = 5;
+}
+"#,
+ "error: No identifier available to rename",
+ );
+ }
+
+ #[test]
+ fn test_rename_builtin() {
+ check(
+ "foo",
+ r#"
+fn foo() {
+ let x: i32$0 = 0;
+}
+"#,
+ "error: Cannot rename builtin type",
+ );
+ }
+
+ #[test]
+ fn test_rename_self() {
+ check(
+ "foo",
+ r#"
+struct Foo {}
+
+impl Foo {
+ fn foo(self) -> Self$0 {
+ self
+ }
+}
+"#,
+ "error: Cannot rename `Self`",
+ );
+ }
+
+ #[test]
+ fn test_rename_ignores_self_ty() {
+ check(
+ "Fo0",
+ r#"
+struct $0Foo;
+
+impl Foo where Self: {}
+"#,
+ r#"
+struct Fo0;
+
+impl Fo0 where Self: {}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_fails_on_aliases() {
+ check(
+ "Baz",
+ r#"
+struct Foo;
+use Foo as Bar$0;
+"#,
+ "error: Renaming aliases is currently unsupported",
+ );
+ check(
+ "Baz",
+ r#"
+struct Foo;
+use Foo as Bar;
+use Bar$0;
+"#,
+ "error: Renaming aliases is currently unsupported",
+ );
+ }
+
+ #[test]
+ fn test_rename_trait_method() {
+ let res = r"
+trait Foo {
+ fn foo(&self) {
+ self.foo();
+ }
+}
+
+impl Foo for () {
+ fn foo(&self) {
+ self.foo();
+ }
+}";
+ check(
+ "foo",
+ r#"
+trait Foo {
+ fn bar$0(&self) {
+ self.bar();
+ }
+}
+
+impl Foo for () {
+ fn bar(&self) {
+ self.bar();
+ }
+}"#,
+ res,
+ );
+ check(
+ "foo",
+ r#"
+trait Foo {
+ fn bar(&self) {
+ self.bar$0();
+ }
+}
+
+impl Foo for () {
+ fn bar(&self) {
+ self.bar();
+ }
+}"#,
+ res,
+ );
+ check(
+ "foo",
+ r#"
+trait Foo {
+ fn bar(&self) {
+ self.bar();
+ }
+}
+
+impl Foo for () {
+ fn bar$0(&self) {
+ self.bar();
+ }
+}"#,
+ res,
+ );
+ check(
+ "foo",
+ r#"
+trait Foo {
+ fn bar(&self) {
+ self.bar();
+ }
+}
+
+impl Foo for () {
+ fn bar(&self) {
+ self.bar$0();
+ }
+}"#,
+ res,
+ );
+ }
+
+ #[test]
+ fn test_rename_trait_method_prefix_of_second() {
+ check(
+ "qux",
+ r#"
+trait Foo {
+ fn foo$0() {}
+ fn foobar() {}
+}
+"#,
+ r#"
+trait Foo {
+ fn qux() {}
+ fn foobar() {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_rename_trait_const() {
+ let res = r"
+trait Foo {
+ const FOO: ();
+}
+
+impl Foo for () {
+ const FOO: ();
+}
+fn f() { <()>::FOO; }";
+ check(
+ "FOO",
+ r#"
+trait Foo {
+ const BAR$0: ();
+}
+
+impl Foo for () {
+ const BAR: ();
+}
+fn f() { <()>::BAR; }"#,
+ res,
+ );
+ check(
+ "FOO",
+ r#"
+trait Foo {
+ const BAR: ();
+}
+
+impl Foo for () {
+ const BAR$0: ();
+}
+fn f() { <()>::BAR; }"#,
+ res,
+ );
+ check(
+ "FOO",
+ r#"
+trait Foo {
+ const BAR: ();
+}
+
+impl Foo for () {
+ const BAR: ();
+}
+fn f() { <()>::BAR$0; }"#,
+ res,
+ );
+ }
+
+ #[test]
+ fn defs_from_macros_arent_renamed() {
+ check(
+ "lol",
+ r#"
+macro_rules! m { () => { fn f() {} } }
+m!();
+fn main() { f$0() }
+"#,
+ "error: No identifier available to rename",
+ )
+ }
+
+ #[test]
+ fn attributed_item() {
+ check(
+ "function",
+ r#"
+//- proc_macros: identity
+
+#[proc_macros::identity]
+fn func$0() {
+ func();
+}
+"#,
+ r#"
+
+#[proc_macros::identity]
+fn function() {
+ function();
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn in_macro_multi_mapping() {
+ check(
+ "a",
+ r#"
+fn foo() {
+ macro_rules! match_ast2 {
+ ($node:ident {
+ $( $res:expr, )*
+ }) => {{
+ $( if $node { $res } else )*
+ { loop {} }
+ }};
+ }
+ let $0d = 3;
+ match_ast2! {
+ d {
+ d,
+ d,
+ }
+ };
+}
+"#,
+ r#"
+fn foo() {
+ macro_rules! match_ast2 {
+ ($node:ident {
+ $( $res:expr, )*
+ }) => {{
+ $( if $node { $res } else )*
+ { loop {} }
+ }};
+ }
+ let a = 3;
+ match_ast2! {
+ a {
+ a,
+ a,
+ }
+ };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn rename_multi_local() {
+ check(
+ "bar",
+ r#"
+fn foo((foo$0 | foo | foo): ()) {
+ foo;
+ let foo;
+}
+"#,
+ r#"
+fn foo((bar | bar | bar): ()) {
+ bar;
+ let foo;
+}
+"#,
+ );
+ check(
+ "bar",
+ r#"
+fn foo((foo | foo$0 | foo): ()) {
+ foo;
+ let foo;
+}
+"#,
+ r#"
+fn foo((bar | bar | bar): ()) {
+ bar;
+ let foo;
+}
+"#,
+ );
+ check(
+ "bar",
+ r#"
+fn foo((foo | foo | foo): ()) {
+ foo$0;
+ let foo;
+}
+"#,
+ r#"
+fn foo((bar | bar | bar): ()) {
+ bar;
+ let foo;
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
new file mode 100644
index 000000000..bec770ed9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
@@ -0,0 +1,2163 @@
+use std::fmt;
+
+use ast::HasName;
+use cfg::CfgExpr;
+use hir::{AsAssocItem, HasAttrs, HasSource, HirDisplay, Semantics};
+use ide_assists::utils::test_related_attribute;
+use ide_db::{
+ base_db::{FilePosition, FileRange},
+ defs::Definition,
+ helpers::visit_file_defs,
+ search::SearchScope,
+ FxHashMap, FxHashSet, RootDatabase, SymbolKind,
+};
+use itertools::Itertools;
+use stdx::{always, format_to};
+use syntax::{
+ ast::{self, AstNode, HasAttrs as _},
+ SmolStr, SyntaxNode,
+};
+
+use crate::{references, FileId, NavigationTarget, ToNav, TryToNav};
+
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+pub struct Runnable {
+ pub use_name_in_title: bool,
+ pub nav: NavigationTarget,
+ pub kind: RunnableKind,
+ pub cfg: Option<CfgExpr>,
+}
+
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+pub enum TestId {
+ Name(SmolStr),
+ Path(String),
+}
+
+impl fmt::Display for TestId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ TestId::Name(name) => name.fmt(f),
+ TestId::Path(path) => path.fmt(f),
+ }
+ }
+}
+
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+pub enum RunnableKind {
+ Test { test_id: TestId, attr: TestAttr },
+ TestMod { path: String },
+ Bench { test_id: TestId },
+ DocTest { test_id: TestId },
+ Bin,
+}
+
+#[cfg(test)]
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+enum RunnableTestKind {
+ Test,
+ TestMod,
+ DocTest,
+ Bench,
+ Bin,
+}
+
+impl Runnable {
+ // test package::module::testname
+ pub fn label(&self, target: Option<String>) -> String {
+ match &self.kind {
+ RunnableKind::Test { test_id, .. } => format!("test {}", test_id),
+ RunnableKind::TestMod { path } => format!("test-mod {}", path),
+ RunnableKind::Bench { test_id } => format!("bench {}", test_id),
+ RunnableKind::DocTest { test_id, .. } => format!("doctest {}", test_id),
+ RunnableKind::Bin => {
+ target.map_or_else(|| "run binary".to_string(), |t| format!("run {}", t))
+ }
+ }
+ }
+
+ pub fn title(&self) -> String {
+ let mut s = String::from("▶\u{fe0e} Run ");
+ if self.use_name_in_title {
+ format_to!(s, "{}", self.nav.name);
+ if !matches!(self.kind, RunnableKind::Bin) {
+ s.push(' ');
+ }
+ }
+ let suffix = match &self.kind {
+ RunnableKind::TestMod { .. } => "Tests",
+ RunnableKind::Test { .. } => "Test",
+ RunnableKind::DocTest { .. } => "Doctest",
+ RunnableKind::Bench { .. } => "Bench",
+ RunnableKind::Bin => return s,
+ };
+ s.push_str(suffix);
+ s
+ }
+
+ #[cfg(test)]
+ fn test_kind(&self) -> RunnableTestKind {
+ match &self.kind {
+ RunnableKind::TestMod { .. } => RunnableTestKind::TestMod,
+ RunnableKind::Test { .. } => RunnableTestKind::Test,
+ RunnableKind::DocTest { .. } => RunnableTestKind::DocTest,
+ RunnableKind::Bench { .. } => RunnableTestKind::Bench,
+ RunnableKind::Bin => RunnableTestKind::Bin,
+ }
+ }
+}
+
+// Feature: Run
+//
+// Shows a popup suggesting to run a test/benchmark/binary **at the current cursor
+// location**. Super useful for repeatedly running just a single test. Do bind this
+// to a shortcut!
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Run**
+// |===
+// image::https://user-images.githubusercontent.com/48062697/113065583-055aae80-91b1-11eb-958f-d67efcaf6a2f.gif[]
+pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
+ let sema = Semantics::new(db);
+
+ let mut res = Vec::new();
+ // Record all runnables that come from macro expansions here instead.
+ // In case an expansion creates multiple runnables we want to name them to avoid emitting a bunch of equally named runnables.
+ let mut in_macro_expansion = FxHashMap::<hir::HirFileId, Vec<Runnable>>::default();
+ let mut add_opt = |runnable: Option<Runnable>, def| {
+ if let Some(runnable) = runnable.filter(|runnable| {
+ always!(
+ runnable.nav.file_id == file_id,
+ "tried adding a runnable pointing to a different file: {:?} for {:?}",
+ runnable.kind,
+ file_id
+ )
+ }) {
+ if let Some(def) = def {
+ let file_id = match def {
+ Definition::Module(it) => it.declaration_source(db).map(|src| src.file_id),
+ Definition::Function(it) => it.source(db).map(|src| src.file_id),
+ _ => None,
+ };
+ if let Some(file_id) = file_id.filter(|file| file.call_node(db).is_some()) {
+ in_macro_expansion.entry(file_id).or_default().push(runnable);
+ return;
+ }
+ }
+ res.push(runnable);
+ }
+ };
+ visit_file_defs(&sema, file_id, &mut |def| {
+ let runnable = match def {
+ Definition::Module(it) => runnable_mod(&sema, it),
+ Definition::Function(it) => runnable_fn(&sema, it),
+ Definition::SelfType(impl_) => runnable_impl(&sema, &impl_),
+ _ => None,
+ };
+ add_opt(
+ runnable
+ .or_else(|| module_def_doctest(sema.db, def))
+ // #[macro_export] mbe macros are declared in the root, while their definition may reside in a different module
+ .filter(|it| it.nav.file_id == file_id),
+ Some(def),
+ );
+ if let Definition::SelfType(impl_) = def {
+ impl_.items(db).into_iter().for_each(|assoc| {
+ let runnable = match assoc {
+ hir::AssocItem::Function(it) => {
+ runnable_fn(&sema, it).or_else(|| module_def_doctest(sema.db, it.into()))
+ }
+ hir::AssocItem::Const(it) => module_def_doctest(sema.db, it.into()),
+ hir::AssocItem::TypeAlias(it) => module_def_doctest(sema.db, it.into()),
+ };
+ add_opt(runnable, Some(assoc.into()))
+ });
+ }
+ });
+
+ sema.to_module_defs(file_id)
+ .map(|it| runnable_mod_outline_definition(&sema, it))
+ .for_each(|it| add_opt(it, None));
+
+ res.extend(in_macro_expansion.into_iter().flat_map(|(_, runnables)| {
+ let use_name_in_title = runnables.len() != 1;
+ runnables.into_iter().map(move |mut r| {
+ r.use_name_in_title = use_name_in_title;
+ r
+ })
+ }));
+ res
+}
+
+// Feature: Related Tests
+//
+// Provides a sneak peek of all tests where the current item is used.
+//
+// The simplest way to use this feature is via the context menu:
+// - Right-click on the selected item. The context menu opens.
+// - Select **Peek related tests**
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Peek related tests**
+// |===
+pub(crate) fn related_tests(
+ db: &RootDatabase,
+ position: FilePosition,
+ search_scope: Option<SearchScope>,
+) -> Vec<Runnable> {
+ let sema = Semantics::new(db);
+ let mut res: FxHashSet<Runnable> = FxHashSet::default();
+ let syntax = sema.parse(position.file_id).syntax().clone();
+
+ find_related_tests(&sema, &syntax, position, search_scope, &mut res);
+
+ res.into_iter().collect()
+}
+
+fn find_related_tests(
+ sema: &Semantics<'_, RootDatabase>,
+ syntax: &SyntaxNode,
+ position: FilePosition,
+ search_scope: Option<SearchScope>,
+ tests: &mut FxHashSet<Runnable>,
+) {
+ // FIXME: why is this using references::find_defs, this should use ide_db::search
+ let defs = match references::find_defs(sema, syntax, position.offset) {
+ Some(defs) => defs,
+ None => return,
+ };
+ for def in defs {
+ let defs = def
+ .usages(sema)
+ .set_scope(search_scope.clone())
+ .all()
+ .references
+ .into_values()
+ .flatten();
+ for ref_ in defs {
+ let name_ref = match ref_.name {
+ ast::NameLike::NameRef(name_ref) => name_ref,
+ _ => continue,
+ };
+ if let Some(fn_def) =
+ sema.ancestors_with_macros(name_ref.syntax().clone()).find_map(ast::Fn::cast)
+ {
+ if let Some(runnable) = as_test_runnable(sema, &fn_def) {
+ // direct test
+ tests.insert(runnable);
+ } else if let Some(module) = parent_test_module(sema, &fn_def) {
+ // indirect test
+ find_related_tests_in_module(sema, syntax, &fn_def, &module, tests);
+ }
+ }
+ }
+ }
+}
+
+fn find_related_tests_in_module(
+ sema: &Semantics<'_, RootDatabase>,
+ syntax: &SyntaxNode,
+ fn_def: &ast::Fn,
+ parent_module: &hir::Module,
+ tests: &mut FxHashSet<Runnable>,
+) {
+ let fn_name = match fn_def.name() {
+ Some(it) => it,
+ _ => return,
+ };
+ let mod_source = parent_module.definition_source(sema.db);
+ let range = match &mod_source.value {
+ hir::ModuleSource::Module(m) => m.syntax().text_range(),
+ hir::ModuleSource::BlockExpr(b) => b.syntax().text_range(),
+ hir::ModuleSource::SourceFile(f) => f.syntax().text_range(),
+ };
+
+ let file_id = mod_source.file_id.original_file(sema.db);
+ let mod_scope = SearchScope::file_range(FileRange { file_id, range });
+ let fn_pos = FilePosition { file_id, offset: fn_name.syntax().text_range().start() };
+ find_related_tests(sema, syntax, fn_pos, Some(mod_scope), tests)
+}
+
+fn as_test_runnable(sema: &Semantics<'_, RootDatabase>, fn_def: &ast::Fn) -> Option<Runnable> {
+ if test_related_attribute(fn_def).is_some() {
+ let function = sema.to_def(fn_def)?;
+ runnable_fn(sema, function)
+ } else {
+ None
+ }
+}
+
+fn parent_test_module(sema: &Semantics<'_, RootDatabase>, fn_def: &ast::Fn) -> Option<hir::Module> {
+ fn_def.syntax().ancestors().find_map(|node| {
+ let module = ast::Module::cast(node)?;
+ let module = sema.to_def(&module)?;
+
+ if has_test_function_or_multiple_test_submodules(sema, &module) {
+ Some(module)
+ } else {
+ None
+ }
+ })
+}
+
+pub(crate) fn runnable_fn(
+ sema: &Semantics<'_, RootDatabase>,
+ def: hir::Function,
+) -> Option<Runnable> {
+ let func = def.source(sema.db)?;
+ let name = def.name(sema.db).to_smol_str();
+
+ let root = def.module(sema.db).krate().root_module(sema.db);
+
+ let kind = if name == "main" && def.module(sema.db) == root {
+ RunnableKind::Bin
+ } else {
+ let test_id = || {
+ let canonical_path = {
+ let def: hir::ModuleDef = def.into();
+ def.canonical_path(sema.db)
+ };
+ canonical_path.map(TestId::Path).unwrap_or(TestId::Name(name))
+ };
+
+ if test_related_attribute(&func.value).is_some() {
+ let attr = TestAttr::from_fn(&func.value);
+ RunnableKind::Test { test_id: test_id(), attr }
+ } else if func.value.has_atom_attr("bench") {
+ RunnableKind::Bench { test_id: test_id() }
+ } else {
+ return None;
+ }
+ };
+
+ let nav = NavigationTarget::from_named(
+ sema.db,
+ func.as_ref().map(|it| it as &dyn ast::HasName),
+ SymbolKind::Function,
+ );
+ let cfg = def.attrs(sema.db).cfg();
+ Some(Runnable { use_name_in_title: false, nav, kind, cfg })
+}
+
+pub(crate) fn runnable_mod(
+ sema: &Semantics<'_, RootDatabase>,
+ def: hir::Module,
+) -> Option<Runnable> {
+ if !has_test_function_or_multiple_test_submodules(sema, &def) {
+ return None;
+ }
+ let path =
+ def.path_to_root(sema.db).into_iter().rev().filter_map(|it| it.name(sema.db)).join("::");
+
+ let attrs = def.attrs(sema.db);
+ let cfg = attrs.cfg();
+ let nav = NavigationTarget::from_module_to_decl(sema.db, def);
+ Some(Runnable { use_name_in_title: false, nav, kind: RunnableKind::TestMod { path }, cfg })
+}
+
+pub(crate) fn runnable_impl(
+ sema: &Semantics<'_, RootDatabase>,
+ def: &hir::Impl,
+) -> Option<Runnable> {
+ let attrs = def.attrs(sema.db);
+ if !has_runnable_doc_test(&attrs) {
+ return None;
+ }
+ let cfg = attrs.cfg();
+ let nav = def.try_to_nav(sema.db)?;
+ let ty = def.self_ty(sema.db);
+ let adt_name = ty.as_adt()?.name(sema.db);
+ let mut ty_args = ty.type_arguments().peekable();
+ let params = if ty_args.peek().is_some() {
+ format!("<{}>", ty_args.format_with(", ", |ty, cb| cb(&ty.display(sema.db))))
+ } else {
+ String::new()
+ };
+ let test_id = TestId::Path(format!("{}{}", adt_name, params));
+
+ Some(Runnable { use_name_in_title: false, nav, kind: RunnableKind::DocTest { test_id }, cfg })
+}
+
+/// Creates a test mod runnable for outline modules at the top of their definition.
+fn runnable_mod_outline_definition(
+ sema: &Semantics<'_, RootDatabase>,
+ def: hir::Module,
+) -> Option<Runnable> {
+ if !has_test_function_or_multiple_test_submodules(sema, &def) {
+ return None;
+ }
+ let path =
+ def.path_to_root(sema.db).into_iter().rev().filter_map(|it| it.name(sema.db)).join("::");
+
+ let attrs = def.attrs(sema.db);
+ let cfg = attrs.cfg();
+ match def.definition_source(sema.db).value {
+ hir::ModuleSource::SourceFile(_) => Some(Runnable {
+ use_name_in_title: false,
+ nav: def.to_nav(sema.db),
+ kind: RunnableKind::TestMod { path },
+ cfg,
+ }),
+ _ => None,
+ }
+}
+
+fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option<Runnable> {
+ let attrs = match def {
+ Definition::Module(it) => it.attrs(db),
+ Definition::Function(it) => it.attrs(db),
+ Definition::Adt(it) => it.attrs(db),
+ Definition::Variant(it) => it.attrs(db),
+ Definition::Const(it) => it.attrs(db),
+ Definition::Static(it) => it.attrs(db),
+ Definition::Trait(it) => it.attrs(db),
+ Definition::TypeAlias(it) => it.attrs(db),
+ Definition::Macro(it) => it.attrs(db),
+ Definition::SelfType(it) => it.attrs(db),
+ _ => return None,
+ };
+ if !has_runnable_doc_test(&attrs) {
+ return None;
+ }
+ let def_name = def.name(db)?;
+ let path = (|| {
+ let mut path = String::new();
+ def.canonical_module_path(db)?
+ .flat_map(|it| it.name(db))
+ .for_each(|name| format_to!(path, "{}::", name));
+ // This probably belongs to canonical_path?
+ if let Some(assoc_item) = def.as_assoc_item(db) {
+ if let hir::AssocItemContainer::Impl(imp) = assoc_item.container(db) {
+ let ty = imp.self_ty(db);
+ if let Some(adt) = ty.as_adt() {
+ let name = adt.name(db);
+ let mut ty_args = ty.type_arguments().peekable();
+ format_to!(path, "{}", name);
+ if ty_args.peek().is_some() {
+ format_to!(
+ path,
+ "<{}>",
+ ty_args.format_with(", ", |ty, cb| cb(&ty.display(db)))
+ );
+ }
+ format_to!(path, "::{}", def_name);
+ return Some(path);
+ }
+ }
+ }
+ format_to!(path, "{}", def_name);
+ Some(path)
+ })();
+
+ let test_id = path.map_or_else(|| TestId::Name(def_name.to_smol_str()), TestId::Path);
+
+ let mut nav = match def {
+ Definition::Module(def) => NavigationTarget::from_module_to_decl(db, def),
+ def => def.try_to_nav(db)?,
+ };
+ nav.focus_range = None;
+ nav.description = None;
+ nav.docs = None;
+ nav.kind = None;
+ let res = Runnable {
+ use_name_in_title: false,
+ nav,
+ kind: RunnableKind::DocTest { test_id },
+ cfg: attrs.cfg(),
+ };
+ Some(res)
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct TestAttr {
+ pub ignore: bool,
+}
+
+impl TestAttr {
+ fn from_fn(fn_def: &ast::Fn) -> TestAttr {
+ let ignore = fn_def
+ .attrs()
+ .filter_map(|attr| attr.simple_name())
+ .any(|attribute_text| attribute_text == "ignore");
+ TestAttr { ignore }
+ }
+}
+
+const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
+const RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE: &[&str] =
+ &["", "rust", "should_panic", "edition2015", "edition2018", "edition2021"];
+
+fn has_runnable_doc_test(attrs: &hir::Attrs) -> bool {
+ attrs.docs().map_or(false, |doc| {
+ let mut in_code_block = false;
+
+ for line in String::from(doc).lines() {
+ if let Some(header) =
+ RUSTDOC_FENCES.into_iter().find_map(|fence| line.strip_prefix(fence))
+ {
+ in_code_block = !in_code_block;
+
+ if in_code_block
+ && header
+ .split(',')
+ .all(|sub| RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE.contains(&sub.trim()))
+ {
+ return true;
+ }
+ }
+ }
+
+ false
+ })
+}
+
+// We could create runnables for modules with number_of_test_submodules > 0,
+// but that bloats the runnables for no real benefit, since all tests can be run by the submodule already
+fn has_test_function_or_multiple_test_submodules(
+ sema: &Semantics<'_, RootDatabase>,
+ module: &hir::Module,
+) -> bool {
+ let mut number_of_test_submodules = 0;
+
+ for item in module.declarations(sema.db) {
+ match item {
+ hir::ModuleDef::Function(f) => {
+ if let Some(it) = f.source(sema.db) {
+ if test_related_attribute(&it.value).is_some() {
+ return true;
+ }
+ }
+ }
+ hir::ModuleDef::Module(submodule) => {
+ if has_test_function_or_multiple_test_submodules(sema, &submodule) {
+ number_of_test_submodules += 1;
+ }
+ }
+ _ => (),
+ }
+ }
+
+ number_of_test_submodules > 1
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::fixture;
+
+ use super::{RunnableTestKind::*, *};
+
+ fn check(
+ ra_fixture: &str,
+ // FIXME: fold this into `expect` as well
+ actions: &[RunnableTestKind],
+ expect: Expect,
+ ) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let mut runnables = analysis.runnables(position.file_id).unwrap();
+ runnables.sort_by_key(|it| (it.nav.full_range.start(), it.nav.name.clone()));
+ expect.assert_debug_eq(&runnables);
+ assert_eq!(
+ actions,
+ runnables.into_iter().map(|it| it.test_kind()).collect::<Vec<_>>().as_slice()
+ );
+ }
+
+ fn check_tests(ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let tests = analysis.related_tests(position, None).unwrap();
+ expect.assert_debug_eq(&tests);
+ }
+
+ #[test]
+ fn test_runnables() {
+ check(
+ r#"
+//- /lib.rs
+$0
+fn main() {}
+
+#[test]
+fn test_foo() {}
+
+#[test]
+#[ignore]
+fn test_foo() {}
+
+#[bench]
+fn bench() {}
+
+mod not_a_root {
+ fn main() {}
+}
+"#,
+ &[TestMod, Bin, Test, Test, Bench],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..137,
+ name: "",
+ kind: Module,
+ },
+ kind: TestMod {
+ path: "",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..13,
+ focus_range: 4..8,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 15..39,
+ focus_range: 26..34,
+ name: "test_foo",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "test_foo",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 41..75,
+ focus_range: 62..70,
+ name: "test_foo",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "test_foo",
+ ),
+ attr: TestAttr {
+ ignore: true,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 77..99,
+ focus_range: 89..94,
+ name: "bench",
+ kind: Function,
+ },
+ kind: Bench {
+ test_id: Path(
+ "bench",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_doc_test() {
+ check(
+ r#"
+//- /lib.rs
+$0
+fn main() {}
+
+/// ```
+/// let x = 5;
+/// ```
+fn should_have_runnable() {}
+
+/// ```edition2018
+/// let x = 5;
+/// ```
+fn should_have_runnable_1() {}
+
+/// ```
+/// let z = 55;
+/// ```
+///
+/// ```ignore
+/// let z = 56;
+/// ```
+fn should_have_runnable_2() {}
+
+/**
+```rust
+let z = 55;
+```
+*/
+fn should_have_no_runnable_3() {}
+
+/**
+ ```rust
+ let z = 55;
+ ```
+*/
+fn should_have_no_runnable_4() {}
+
+/// ```no_run
+/// let z = 55;
+/// ```
+fn should_have_no_runnable() {}
+
+/// ```ignore
+/// let z = 55;
+/// ```
+fn should_have_no_runnable_2() {}
+
+/// ```compile_fail
+/// let z = 55;
+/// ```
+fn should_have_no_runnable_3() {}
+
+/// ```text
+/// arbitrary plain text
+/// ```
+fn should_have_no_runnable_4() {}
+
+/// ```text
+/// arbitrary plain text
+/// ```
+///
+/// ```sh
+/// $ shell code
+/// ```
+fn should_have_no_runnable_5() {}
+
+/// ```rust,no_run
+/// let z = 55;
+/// ```
+fn should_have_no_runnable_6() {}
+
+/// ```
+/// let x = 5;
+/// ```
+struct StructWithRunnable(String);
+
+/// ```
+/// let x = 5;
+/// ```
+impl StructWithRunnable {}
+
+trait Test {
+ fn test() -> usize {
+ 5usize
+ }
+}
+
+/// ```
+/// let x = 5;
+/// ```
+impl Test for StructWithRunnable {}
+"#,
+ &[Bin, DocTest, DocTest, DocTest, DocTest, DocTest, DocTest, DocTest, DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..13,
+ focus_range: 4..8,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 15..74,
+ name: "should_have_runnable",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "should_have_runnable",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 76..148,
+ name: "should_have_runnable_1",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "should_have_runnable_1",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 150..254,
+ name: "should_have_runnable_2",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "should_have_runnable_2",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 256..320,
+ name: "should_have_no_runnable_3",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "should_have_no_runnable_3",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 322..398,
+ name: "should_have_no_runnable_4",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "should_have_no_runnable_4",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 900..965,
+ name: "StructWithRunnable",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "StructWithRunnable",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 967..1024,
+ focus_range: 1003..1021,
+ name: "impl",
+ kind: Impl,
+ },
+ kind: DocTest {
+ test_id: Path(
+ "StructWithRunnable",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1088..1154,
+ focus_range: 1133..1151,
+ name: "impl",
+ kind: Impl,
+ },
+ kind: DocTest {
+ test_id: Path(
+ "StructWithRunnable",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_doc_test_in_impl() {
+ check(
+ r#"
+//- /lib.rs
+$0
+fn main() {}
+
+struct Data;
+impl Data {
+ /// ```
+ /// let x = 5;
+ /// ```
+ fn foo() {}
+}
+"#,
+ &[Bin, DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..13,
+ focus_range: 4..8,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 44..98,
+ name: "foo",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "Data::foo",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_module() {
+ check(
+ r#"
+//- /lib.rs
+$0
+mod test_mod {
+ #[test]
+ fn test_foo1() {}
+}
+"#,
+ &[TestMod, Test],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..51,
+ focus_range: 5..13,
+ name: "test_mod",
+ kind: Module,
+ description: "mod test_mod",
+ },
+ kind: TestMod {
+ path: "test_mod",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 20..49,
+ focus_range: 35..44,
+ name: "test_foo1",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "test_mod::test_foo1",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn only_modules_with_test_functions_or_more_than_one_test_submodule_have_runners() {
+ check(
+ r#"
+//- /lib.rs
+$0
+mod root_tests {
+ mod nested_tests_0 {
+ mod nested_tests_1 {
+ #[test]
+ fn nested_test_11() {}
+
+ #[test]
+ fn nested_test_12() {}
+ }
+
+ mod nested_tests_2 {
+ #[test]
+ fn nested_test_2() {}
+ }
+
+ mod nested_tests_3 {}
+ }
+
+ mod nested_tests_4 {}
+}
+"#,
+ &[TestMod, TestMod, Test, Test, TestMod, Test],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 22..323,
+ focus_range: 26..40,
+ name: "nested_tests_0",
+ kind: Module,
+ description: "mod nested_tests_0",
+ },
+ kind: TestMod {
+ path: "root_tests::nested_tests_0",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 51..192,
+ focus_range: 55..69,
+ name: "nested_tests_1",
+ kind: Module,
+ description: "mod nested_tests_1",
+ },
+ kind: TestMod {
+ path: "root_tests::nested_tests_0::nested_tests_1",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 84..126,
+ focus_range: 107..121,
+ name: "nested_test_11",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "root_tests::nested_tests_0::nested_tests_1::nested_test_11",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 140..182,
+ focus_range: 163..177,
+ name: "nested_test_12",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "root_tests::nested_tests_0::nested_tests_1::nested_test_12",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 202..286,
+ focus_range: 206..220,
+ name: "nested_tests_2",
+ kind: Module,
+ description: "mod nested_tests_2",
+ },
+ kind: TestMod {
+ path: "root_tests::nested_tests_0::nested_tests_2",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 235..276,
+ focus_range: 258..271,
+ name: "nested_test_2",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "root_tests::nested_tests_0::nested_tests_2::nested_test_2",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_with_feature() {
+ check(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo
+$0
+#[test]
+#[cfg(feature = "foo")]
+fn test_foo1() {}
+"#,
+ &[TestMod, Test],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..51,
+ name: "",
+ kind: Module,
+ },
+ kind: TestMod {
+ path: "",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..50,
+ focus_range: 36..45,
+ name: "test_foo1",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "test_foo1",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: Some(
+ Atom(
+ KeyValue {
+ key: "feature",
+ value: "foo",
+ },
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_with_features() {
+ check(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo,feature=bar
+$0
+#[test]
+#[cfg(all(feature = "foo", feature = "bar"))]
+fn test_foo1() {}
+"#,
+ &[TestMod, Test],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..73,
+ name: "",
+ kind: Module,
+ },
+ kind: TestMod {
+ path: "",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..72,
+ focus_range: 58..67,
+ name: "test_foo1",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "test_foo1",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: Some(
+ All(
+ [
+ Atom(
+ KeyValue {
+ key: "feature",
+ value: "foo",
+ },
+ ),
+ Atom(
+ KeyValue {
+ key: "feature",
+ value: "bar",
+ },
+ ),
+ ],
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_no_test_function_in_module() {
+ check(
+ r#"
+//- /lib.rs
+$0
+mod test_mod {
+ fn foo1() {}
+}
+"#,
+ &[],
+ expect![[r#"
+ []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_doc_runnables_impl_mod() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+//- /foo.rs
+struct Foo;$0
+impl Foo {
+ /// ```
+ /// let x = 5;
+ /// ```
+ fn foo() {}
+}
+ "#,
+ &[DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 1,
+ ),
+ full_range: 27..81,
+ name: "foo",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "foo::Foo::foo",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_in_macro() {
+ check(
+ r#"
+//- /lib.rs
+$0
+macro_rules! gen {
+ () => {
+ #[test]
+ fn foo_test() {}
+ }
+}
+macro_rules! gen2 {
+ () => {
+ mod tests2 {
+ #[test]
+ fn foo_test2() {}
+ }
+ }
+}
+mod tests {
+ gen!();
+}
+gen2!();
+"#,
+ &[TestMod, TestMod, Test, Test, TestMod],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..237,
+ name: "",
+ kind: Module,
+ },
+ kind: TestMod {
+ path: "",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 202..227,
+ focus_range: 206..211,
+ name: "tests",
+ kind: Module,
+ description: "mod tests",
+ },
+ kind: TestMod {
+ path: "tests",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 218..225,
+ name: "foo_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests::foo_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 228..236,
+ name: "foo_test2",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests2::foo_test2",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 228..236,
+ name: "tests2",
+ kind: Module,
+ description: "mod tests2",
+ },
+ kind: TestMod {
+ path: "tests2",
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn big_mac() {
+ check(
+ r#"
+//- /lib.rs
+$0
+macro_rules! foo {
+ () => {
+ mod foo_tests {
+ #[test]
+ fn foo0() {}
+ #[test]
+ fn foo1() {}
+ #[test]
+ fn foo2() {}
+ }
+ };
+}
+foo!();
+"#,
+ &[Test, Test, Test, TestMod],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 210..217,
+ name: "foo0",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "foo_tests::foo0",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 210..217,
+ name: "foo1",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "foo_tests::foo1",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 210..217,
+ name: "foo2",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "foo_tests::foo2",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 210..217,
+ name: "foo_tests",
+ kind: Module,
+ description: "mod foo_tests",
+ },
+ kind: TestMod {
+ path: "foo_tests",
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn dont_recurse_in_outline_submodules() {
+ check(
+ r#"
+//- /lib.rs
+$0
+mod m;
+//- /m.rs
+mod tests {
+ #[test]
+ fn t() {}
+}
+"#,
+ &[],
+ expect![[r#"
+ []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn outline_submodule1() {
+ check(
+ r#"
+//- /lib.rs
+$0
+mod m;
+//- /m.rs
+#[test]
+fn t0() {}
+#[test]
+fn t1() {}
+"#,
+ &[TestMod],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..7,
+ focus_range: 5..6,
+ name: "m",
+ kind: Module,
+ description: "mod m",
+ },
+ kind: TestMod {
+ path: "m",
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn outline_submodule2() {
+ check(
+ r#"
+//- /lib.rs
+mod m;
+//- /m.rs
+$0
+#[test]
+fn t0() {}
+#[test]
+fn t1() {}
+"#,
+ &[TestMod, Test, Test],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 1,
+ ),
+ full_range: 0..39,
+ name: "m",
+ kind: Module,
+ },
+ kind: TestMod {
+ path: "m",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 1,
+ ),
+ full_range: 1..19,
+ focus_range: 12..14,
+ name: "t0",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "m::t0",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 1,
+ ),
+ full_range: 20..38,
+ focus_range: 31..33,
+ name: "t1",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "m::t1",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn attributed_module() {
+ check(
+ r#"
+//- proc_macros: identity
+//- /lib.rs
+$0
+#[proc_macros::identity]
+mod module {
+ #[test]
+ fn t0() {}
+ #[test]
+ fn t1() {}
+}
+"#,
+ &[TestMod, Test, Test],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 26..94,
+ focus_range: 30..36,
+ name: "module",
+ kind: Module,
+ description: "mod module",
+ },
+ kind: TestMod {
+ path: "module",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 43..65,
+ focus_range: 58..60,
+ name: "t0",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "module::t0",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 70..92,
+ focus_range: 85..87,
+ name: "t1",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "module::t1",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn find_no_tests() {
+ check_tests(
+ r#"
+//- /lib.rs
+fn foo$0() { };
+"#,
+ expect![[r#"
+ []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn find_direct_fn_test() {
+ check_tests(
+ r#"
+//- /lib.rs
+fn foo$0() { };
+
+mod tests {
+ #[test]
+ fn foo_test() {
+ super::foo()
+ }
+}
+"#,
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 31..85,
+ focus_range: 46..54,
+ name: "foo_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests::foo_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn find_direct_struct_test() {
+ check_tests(
+ r#"
+//- /lib.rs
+struct Fo$0o;
+fn foo(arg: &Foo) { };
+
+mod tests {
+ use super::*;
+
+ #[test]
+ fn foo_test() {
+ foo(Foo);
+ }
+}
+"#,
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 71..122,
+ focus_range: 86..94,
+ name: "foo_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests::foo_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn find_indirect_fn_test() {
+ check_tests(
+ r#"
+//- /lib.rs
+fn foo$0() { };
+
+mod tests {
+ use super::foo;
+
+ fn check1() {
+ check2()
+ }
+
+ fn check2() {
+ foo()
+ }
+
+ #[test]
+ fn foo_test() {
+ check1()
+ }
+}
+"#,
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 133..183,
+ focus_range: 148..156,
+ name: "foo_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests::foo_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn tests_are_unique() {
+ check_tests(
+ r#"
+//- /lib.rs
+fn foo$0() { };
+
+mod tests {
+ use super::foo;
+
+ #[test]
+ fn foo_test() {
+ foo();
+ foo();
+ }
+
+ #[test]
+ fn foo2_test() {
+ foo();
+ foo();
+ }
+
+}
+"#,
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 52..115,
+ focus_range: 67..75,
+ name: "foo_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests::foo_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 121..185,
+ focus_range: 136..145,
+ name: "foo2_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests::foo2_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn doc_test_type_params() {
+ check(
+ r#"
+//- /lib.rs
+$0
+struct Foo<T, U>;
+
+impl<T, U> Foo<T, U> {
+ /// ```rust
+ /// ````
+ fn t() {}
+}
+"#,
+ &[DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 47..85,
+ name: "t",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "Foo<T, U>::t",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn doc_test_macro_export_mbe() {
+ check(
+ r#"
+//- /lib.rs
+$0
+mod foo;
+
+//- /foo.rs
+/// ```
+/// fn foo() {
+/// }
+/// ```
+#[macro_export]
+macro_rules! foo {
+ () => {
+
+ };
+}
+"#,
+ &[],
+ expect![[r#"
+ []
+ "#]],
+ );
+ check(
+ r#"
+//- /lib.rs
+$0
+/// ```
+/// fn foo() {
+/// }
+/// ```
+#[macro_export]
+macro_rules! foo {
+ () => {
+
+ };
+}
+"#,
+ &[DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..94,
+ name: "foo",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "foo",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/shuffle_crate_graph.rs b/src/tools/rust-analyzer/crates/ide/src/shuffle_crate_graph.rs
new file mode 100644
index 000000000..15cb89dcc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/shuffle_crate_graph.rs
@@ -0,0 +1,71 @@
+use std::sync::Arc;
+
+use ide_db::{
+ base_db::{salsa::Durability, CrateGraph, SourceDatabase},
+ FxHashMap, RootDatabase,
+};
+
+// Feature: Shuffle Crate Graph
+//
+// Randomizes all crate IDs in the crate graph, for debugging.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Shuffle Crate Graph**
+// |===
+pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) {
+ let crate_graph = db.crate_graph();
+
+ let mut shuffled_ids = crate_graph.iter().collect::<Vec<_>>();
+ shuffle(&mut shuffled_ids);
+
+ let mut new_graph = CrateGraph::default();
+
+ let mut map = FxHashMap::default();
+ for old_id in shuffled_ids.iter().copied() {
+ let data = &crate_graph[old_id];
+ let new_id = new_graph.add_crate_root(
+ data.root_file_id,
+ data.edition,
+ data.display_name.clone(),
+ data.version.clone(),
+ data.cfg_options.clone(),
+ data.potential_cfg_options.clone(),
+ data.env.clone(),
+ data.proc_macro.clone(),
+ data.is_proc_macro,
+ data.origin.clone(),
+ );
+ map.insert(old_id, new_id);
+ }
+
+ for old_id in shuffled_ids.iter().copied() {
+ let data = &crate_graph[old_id];
+ for dep in &data.dependencies {
+ let mut new_dep = dep.clone();
+ new_dep.crate_id = map[&dep.crate_id];
+ new_graph.add_dep(map[&old_id], new_dep).unwrap();
+ }
+ }
+
+ db.set_crate_graph_with_durability(Arc::new(new_graph), Durability::HIGH);
+}
+
+fn shuffle<T>(slice: &mut [T]) {
+ let mut rng = oorandom::Rand32::new(seed());
+
+ let mut remaining = slice.len() - 1;
+ while remaining > 0 {
+ let index = rng.rand_range(0..remaining as u32);
+ slice.swap(remaining, index as usize);
+ remaining -= 1;
+ }
+}
+
+fn seed() -> u64 {
+ use std::collections::hash_map::RandomState;
+ use std::hash::{BuildHasher, Hasher};
+
+ RandomState::new().build_hasher().finish()
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
new file mode 100644
index 000000000..fedc1a435
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
@@ -0,0 +1,1334 @@
+//! This module provides primitives for showing type and function parameter information when editing
+//! a call or use-site.
+
+use std::collections::BTreeSet;
+
+use either::Either;
+use hir::{AssocItem, GenericParam, HasAttrs, HirDisplay, Semantics, Trait};
+use ide_db::{active_parameter::callable_for_node, base_db::FilePosition};
+use stdx::format_to;
+use syntax::{
+ algo,
+ ast::{self, HasArgList},
+ match_ast, AstNode, Direction, SyntaxToken, TextRange, TextSize,
+};
+
+use crate::RootDatabase;
+
+/// Contains information about an item signature as seen from a use site.
+///
+/// This includes the "active parameter", which is the parameter whose value is currently being
+/// edited.
+#[derive(Debug)]
+pub struct SignatureHelp {
+ pub doc: Option<String>,
+ pub signature: String,
+ pub active_parameter: Option<usize>,
+ parameters: Vec<TextRange>,
+}
+
+impl SignatureHelp {
+ pub fn parameter_labels(&self) -> impl Iterator<Item = &str> + '_ {
+ self.parameters.iter().map(move |&it| &self.signature[it])
+ }
+
+ pub fn parameter_ranges(&self) -> &[TextRange] {
+ &self.parameters
+ }
+
+ fn push_call_param(&mut self, param: &str) {
+ self.push_param('(', param);
+ }
+
+ fn push_generic_param(&mut self, param: &str) {
+ self.push_param('<', param);
+ }
+
+ fn push_param(&mut self, opening_delim: char, param: &str) {
+ if !self.signature.ends_with(opening_delim) {
+ self.signature.push_str(", ");
+ }
+ let start = TextSize::of(&self.signature);
+ self.signature.push_str(param);
+ let end = TextSize::of(&self.signature);
+ self.parameters.push(TextRange::new(start, end))
+ }
+}
+
+/// Computes parameter information for the given position.
+pub(crate) fn signature_help(db: &RootDatabase, position: FilePosition) -> Option<SignatureHelp> {
+ let sema = Semantics::new(db);
+ let file = sema.parse(position.file_id);
+ let file = file.syntax();
+ let token = file
+ .token_at_offset(position.offset)
+ .left_biased()
+ // if the cursor is sandwiched between two space tokens and the call is unclosed
+ // this prevents us from leaving the CallExpression
+ .and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?;
+ let token = sema.descend_into_macros_single(token);
+
+ for node in token.parent_ancestors() {
+ match_ast! {
+ match node {
+ ast::ArgList(arg_list) => {
+ let cursor_outside = arg_list.r_paren_token().as_ref() == Some(&token);
+ if cursor_outside {
+ return None;
+ }
+ return signature_help_for_call(&sema, token);
+ },
+ ast::GenericArgList(garg_list) => {
+ let cursor_outside = garg_list.r_angle_token().as_ref() == Some(&token);
+ if cursor_outside {
+ return None;
+ }
+ return signature_help_for_generics(&sema, token);
+ },
+ _ => (),
+ }
+ }
+ }
+
+ None
+}
+
+fn signature_help_for_call(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+) -> Option<SignatureHelp> {
+ // Find the calling expression and its NameRef
+ let mut node = token.parent()?;
+ let calling_node = loop {
+ if let Some(callable) = ast::CallableExpr::cast(node.clone()) {
+ if callable
+ .arg_list()
+ .map_or(false, |it| it.syntax().text_range().contains(token.text_range().start()))
+ {
+ break callable;
+ }
+ }
+
+ // Stop at multi-line expressions, since the signature of the outer call is not very
+ // helpful inside them.
+ if let Some(expr) = ast::Expr::cast(node.clone()) {
+ if expr.syntax().text().contains_char('\n') {
+ return None;
+ }
+ }
+
+ node = node.parent()?;
+ };
+
+ let (callable, active_parameter) = callable_for_node(sema, &calling_node, &token)?;
+
+ let mut res =
+ SignatureHelp { doc: None, signature: String::new(), parameters: vec![], active_parameter };
+
+ let db = sema.db;
+ let mut fn_params = None;
+ match callable.kind() {
+ hir::CallableKind::Function(func) => {
+ res.doc = func.docs(db).map(|it| it.into());
+ format_to!(res.signature, "fn {}", func.name(db));
+ fn_params = Some(match callable.receiver_param(db) {
+ Some(_self) => func.params_without_self(db),
+ None => func.assoc_fn_params(db),
+ });
+ }
+ hir::CallableKind::TupleStruct(strukt) => {
+ res.doc = strukt.docs(db).map(|it| it.into());
+ format_to!(res.signature, "struct {}", strukt.name(db));
+ }
+ hir::CallableKind::TupleEnumVariant(variant) => {
+ res.doc = variant.docs(db).map(|it| it.into());
+ format_to!(
+ res.signature,
+ "enum {}::{}",
+ variant.parent_enum(db).name(db),
+ variant.name(db)
+ );
+ }
+ hir::CallableKind::Closure | hir::CallableKind::FnPtr => (),
+ }
+
+ res.signature.push('(');
+ {
+ if let Some(self_param) = callable.receiver_param(db) {
+ format_to!(res.signature, "{}", self_param)
+ }
+ let mut buf = String::new();
+ for (idx, (pat, ty)) in callable.params(db).into_iter().enumerate() {
+ buf.clear();
+ if let Some(pat) = pat {
+ match pat {
+ Either::Left(_self) => format_to!(buf, "self: "),
+ Either::Right(pat) => format_to!(buf, "{}: ", pat),
+ }
+ }
+ // APITs (argument position `impl Trait`s) are inferred as {unknown} as the user is
+ // in the middle of entering call arguments.
+ // In that case, fall back to render definitions of the respective parameters.
+ // This is overly conservative: we do not substitute known type vars
+ // (see FIXME in tests::impl_trait) and falling back on any unknowns.
+ match (ty.contains_unknown(), fn_params.as_deref()) {
+ (true, Some(fn_params)) => format_to!(buf, "{}", fn_params[idx].ty().display(db)),
+ _ => format_to!(buf, "{}", ty.display(db)),
+ }
+ res.push_call_param(&buf);
+ }
+ }
+ res.signature.push(')');
+
+ let mut render = |ret_type: hir::Type| {
+ if !ret_type.is_unit() {
+ format_to!(res.signature, " -> {}", ret_type.display(db));
+ }
+ };
+ match callable.kind() {
+ hir::CallableKind::Function(func) if callable.return_type().contains_unknown() => {
+ render(func.ret_type(db))
+ }
+ hir::CallableKind::Function(_) | hir::CallableKind::Closure | hir::CallableKind::FnPtr => {
+ render(callable.return_type())
+ }
+ hir::CallableKind::TupleStruct(_) | hir::CallableKind::TupleEnumVariant(_) => {}
+ }
+ Some(res)
+}
+
+fn signature_help_for_generics(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+) -> Option<SignatureHelp> {
+ let parent = token.parent()?;
+ let arg_list = parent
+ .ancestors()
+ .filter_map(ast::GenericArgList::cast)
+ .find(|list| list.syntax().text_range().contains(token.text_range().start()))?;
+
+ let mut active_parameter = arg_list
+ .generic_args()
+ .take_while(|arg| arg.syntax().text_range().end() <= token.text_range().start())
+ .count();
+
+ let first_arg_is_non_lifetime = arg_list
+ .generic_args()
+ .next()
+ .map_or(false, |arg| !matches!(arg, ast::GenericArg::LifetimeArg(_)));
+
+ let mut generics_def = if let Some(path) =
+ arg_list.syntax().ancestors().find_map(ast::Path::cast)
+ {
+ let res = sema.resolve_path(&path)?;
+ let generic_def: hir::GenericDef = match res {
+ hir::PathResolution::Def(hir::ModuleDef::Adt(it)) => it.into(),
+ hir::PathResolution::Def(hir::ModuleDef::Function(it)) => it.into(),
+ hir::PathResolution::Def(hir::ModuleDef::Trait(it)) => it.into(),
+ hir::PathResolution::Def(hir::ModuleDef::TypeAlias(it)) => it.into(),
+ hir::PathResolution::Def(hir::ModuleDef::Variant(it)) => it.into(),
+ hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_))
+ | hir::PathResolution::Def(hir::ModuleDef::Const(_))
+ | hir::PathResolution::Def(hir::ModuleDef::Macro(_))
+ | hir::PathResolution::Def(hir::ModuleDef::Module(_))
+ | hir::PathResolution::Def(hir::ModuleDef::Static(_)) => return None,
+ hir::PathResolution::BuiltinAttr(_)
+ | hir::PathResolution::ToolModule(_)
+ | hir::PathResolution::Local(_)
+ | hir::PathResolution::TypeParam(_)
+ | hir::PathResolution::ConstParam(_)
+ | hir::PathResolution::SelfType(_)
+ | hir::PathResolution::DeriveHelper(_) => return None,
+ };
+
+ generic_def
+ } else if let Some(method_call) = arg_list.syntax().parent().and_then(ast::MethodCallExpr::cast)
+ {
+ // recv.method::<$0>()
+ let method = sema.resolve_method_call(&method_call)?;
+ method.into()
+ } else {
+ return None;
+ };
+
+ let mut res = SignatureHelp {
+ doc: None,
+ signature: String::new(),
+ parameters: vec![],
+ active_parameter: None,
+ };
+
+ let db = sema.db;
+ match generics_def {
+ hir::GenericDef::Function(it) => {
+ res.doc = it.docs(db).map(|it| it.into());
+ format_to!(res.signature, "fn {}", it.name(db));
+ }
+ hir::GenericDef::Adt(hir::Adt::Enum(it)) => {
+ res.doc = it.docs(db).map(|it| it.into());
+ format_to!(res.signature, "enum {}", it.name(db));
+ }
+ hir::GenericDef::Adt(hir::Adt::Struct(it)) => {
+ res.doc = it.docs(db).map(|it| it.into());
+ format_to!(res.signature, "struct {}", it.name(db));
+ }
+ hir::GenericDef::Adt(hir::Adt::Union(it)) => {
+ res.doc = it.docs(db).map(|it| it.into());
+ format_to!(res.signature, "union {}", it.name(db));
+ }
+ hir::GenericDef::Trait(it) => {
+ res.doc = it.docs(db).map(|it| it.into());
+ format_to!(res.signature, "trait {}", it.name(db));
+ }
+ hir::GenericDef::TypeAlias(it) => {
+ res.doc = it.docs(db).map(|it| it.into());
+ format_to!(res.signature, "type {}", it.name(db));
+ }
+ hir::GenericDef::Variant(it) => {
+ // In paths, generics of an enum can be specified *after* one of its variants.
+ // eg. `None::<u8>`
+ // We'll use the signature of the enum, but include the docs of the variant.
+ res.doc = it.docs(db).map(|it| it.into());
+ let it = it.parent_enum(db);
+ format_to!(res.signature, "enum {}", it.name(db));
+ generics_def = it.into();
+ }
+ // These don't have generic args that can be specified
+ hir::GenericDef::Impl(_) | hir::GenericDef::Const(_) => return None,
+ }
+
+ let params = generics_def.params(sema.db);
+ let num_lifetime_params =
+ params.iter().take_while(|param| matches!(param, GenericParam::LifetimeParam(_))).count();
+ if first_arg_is_non_lifetime {
+ // Lifetime parameters were omitted.
+ active_parameter += num_lifetime_params;
+ }
+ res.active_parameter = Some(active_parameter);
+
+ res.signature.push('<');
+ let mut buf = String::new();
+ for param in params {
+ if let hir::GenericParam::TypeParam(ty) = param {
+ if ty.is_implicit(db) {
+ continue;
+ }
+ }
+
+ buf.clear();
+ format_to!(buf, "{}", param.display(db));
+ res.push_generic_param(&buf);
+ }
+ if let hir::GenericDef::Trait(tr) = generics_def {
+ add_assoc_type_bindings(db, &mut res, tr, arg_list);
+ }
+ res.signature.push('>');
+
+ Some(res)
+}
+
+fn add_assoc_type_bindings(
+ db: &RootDatabase,
+ res: &mut SignatureHelp,
+ tr: Trait,
+ args: ast::GenericArgList,
+) {
+ if args.syntax().ancestors().find_map(ast::TypeBound::cast).is_none() {
+ // Assoc type bindings are only valid in type bound position.
+ return;
+ }
+
+ let present_bindings = args
+ .generic_args()
+ .filter_map(|arg| match arg {
+ ast::GenericArg::AssocTypeArg(arg) => arg.name_ref().map(|n| n.to_string()),
+ _ => None,
+ })
+ .collect::<BTreeSet<_>>();
+
+ let mut buf = String::new();
+ for binding in &present_bindings {
+ buf.clear();
+ format_to!(buf, "{} = …", binding);
+ res.push_generic_param(&buf);
+ }
+
+ for item in tr.items_with_supertraits(db) {
+ if let AssocItem::TypeAlias(ty) = item {
+ let name = ty.name(db).to_smol_str();
+ if !present_bindings.contains(&*name) {
+ buf.clear();
+ format_to!(buf, "{} = …", name);
+ res.push_generic_param(&buf);
+ }
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use std::iter;
+
+ use expect_test::{expect, Expect};
+ use ide_db::base_db::{fixture::ChangeFixture, FilePosition};
+ use stdx::format_to;
+
+ use crate::RootDatabase;
+
+ /// Creates analysis from a multi-file fixture, returns positions marked with $0.
+ pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) {
+ let change_fixture = ChangeFixture::parse(ra_fixture);
+ let mut database = RootDatabase::default();
+ database.apply_change(change_fixture.change);
+ let (file_id, range_or_offset) =
+ change_fixture.file_position.expect("expected a marker ($0)");
+ let offset = range_or_offset.expect_offset();
+ (database, FilePosition { file_id, offset })
+ }
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ // Implicitly add `Sized` to avoid noisy `T: ?Sized` in the results.
+ let fixture = format!(
+ r#"
+#[lang = "sized"] trait Sized {{}}
+{ra_fixture}
+ "#
+ );
+ let (db, position) = position(&fixture);
+ let sig_help = crate::signature_help::signature_help(&db, position);
+ let actual = match sig_help {
+ Some(sig_help) => {
+ let mut rendered = String::new();
+ if let Some(docs) = &sig_help.doc {
+ format_to!(rendered, "{}\n------\n", docs.as_str());
+ }
+ format_to!(rendered, "{}\n", sig_help.signature);
+ let mut offset = 0;
+ for (i, range) in sig_help.parameter_ranges().iter().enumerate() {
+ let is_active = sig_help.active_parameter == Some(i);
+
+ let start = u32::from(range.start());
+ let gap = start.checked_sub(offset).unwrap_or_else(|| {
+ panic!("parameter ranges out of order: {:?}", sig_help.parameter_ranges())
+ });
+ rendered.extend(iter::repeat(' ').take(gap as usize));
+ let param_text = &sig_help.signature[*range];
+ let width = param_text.chars().count(); // …
+ let marker = if is_active { '^' } else { '-' };
+ rendered.extend(iter::repeat(marker).take(width));
+ offset += gap + u32::from(range.len());
+ }
+ if !sig_help.parameter_ranges().is_empty() {
+ format_to!(rendered, "\n");
+ }
+ rendered
+ }
+ None => String::new(),
+ };
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn test_fn_signature_two_args() {
+ check(
+ r#"
+fn foo(x: u32, y: u32) -> u32 {x + y}
+fn bar() { foo($03, ); }
+"#,
+ expect![[r#"
+ fn foo(x: u32, y: u32) -> u32
+ ^^^^^^ ------
+ "#]],
+ );
+ check(
+ r#"
+fn foo(x: u32, y: u32) -> u32 {x + y}
+fn bar() { foo(3$0, ); }
+"#,
+ expect![[r#"
+ fn foo(x: u32, y: u32) -> u32
+ ^^^^^^ ------
+ "#]],
+ );
+ check(
+ r#"
+fn foo(x: u32, y: u32) -> u32 {x + y}
+fn bar() { foo(3,$0 ); }
+"#,
+ expect![[r#"
+ fn foo(x: u32, y: u32) -> u32
+ ------ ^^^^^^
+ "#]],
+ );
+ check(
+ r#"
+fn foo(x: u32, y: u32) -> u32 {x + y}
+fn bar() { foo(3, $0); }
+"#,
+ expect![[r#"
+ fn foo(x: u32, y: u32) -> u32
+ ------ ^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_two_args_empty() {
+ check(
+ r#"
+fn foo(x: u32, y: u32) -> u32 {x + y}
+fn bar() { foo($0); }
+"#,
+ expect![[r#"
+ fn foo(x: u32, y: u32) -> u32
+ ^^^^^^ ------
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_two_args_first_generics() {
+ check(
+ r#"
+fn foo<T, U: Copy + Display>(x: T, y: U) -> u32
+ where T: Copy + Display, U: Debug
+{ x + y }
+
+fn bar() { foo($03, ); }
+"#,
+ expect![[r#"
+ fn foo(x: i32, y: U) -> u32
+ ^^^^^^ ----
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_no_params() {
+ check(
+ r#"
+fn foo<T>() -> T where T: Copy + Display {}
+fn bar() { foo($0); }
+"#,
+ expect![[r#"
+ fn foo() -> T
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_for_impl() {
+ check(
+ r#"
+struct F;
+impl F { pub fn new() { } }
+fn bar() {
+ let _ : F = F::new($0);
+}
+"#,
+ expect![[r#"
+ fn new()
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_for_method_self() {
+ check(
+ r#"
+struct S;
+impl S { pub fn do_it(&self) {} }
+
+fn bar() {
+ let s: S = S;
+ s.do_it($0);
+}
+"#,
+ expect![[r#"
+ fn do_it(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_for_method_with_arg() {
+ check(
+ r#"
+struct S;
+impl S {
+ fn foo(&self, x: i32) {}
+}
+
+fn main() { S.foo($0); }
+"#,
+ expect![[r#"
+ fn foo(&self, x: i32)
+ ^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_for_generic_method() {
+ check(
+ r#"
+struct S<T>(T);
+impl<T> S<T> {
+ fn foo(&self, x: T) {}
+}
+
+fn main() { S(1u32).foo($0); }
+"#,
+ expect![[r#"
+ fn foo(&self, x: u32)
+ ^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_for_method_with_arg_as_assoc_fn() {
+ check(
+ r#"
+struct S;
+impl S {
+ fn foo(&self, x: i32) {}
+}
+
+fn main() { S::foo($0); }
+"#,
+ expect![[r#"
+ fn foo(self: &S, x: i32)
+ ^^^^^^^^ ------
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_with_docs_simple() {
+ check(
+ r#"
+/// test
+// non-doc-comment
+fn foo(j: u32) -> u32 {
+ j
+}
+
+fn bar() {
+ let _ = foo($0);
+}
+"#,
+ expect![[r#"
+ test
+ ------
+ fn foo(j: u32) -> u32
+ ^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_with_docs() {
+ check(
+ r#"
+/// Adds one to the number given.
+///
+/// # Examples
+///
+/// ```
+/// let five = 5;
+///
+/// assert_eq!(6, my_crate::add_one(5));
+/// ```
+pub fn add_one(x: i32) -> i32 {
+ x + 1
+}
+
+pub fn do() {
+ add_one($0
+}"#,
+ expect![[r##"
+ Adds one to the number given.
+
+ # Examples
+
+ ```
+ let five = 5;
+
+ assert_eq!(6, my_crate::add_one(5));
+ ```
+ ------
+ fn add_one(x: i32) -> i32
+ ^^^^^^
+ "##]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_with_docs_impl() {
+ check(
+ r#"
+struct addr;
+impl addr {
+ /// Adds one to the number given.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let five = 5;
+ ///
+ /// assert_eq!(6, my_crate::add_one(5));
+ /// ```
+ pub fn add_one(x: i32) -> i32 {
+ x + 1
+ }
+}
+
+pub fn do_it() {
+ addr {};
+ addr::add_one($0);
+}
+"#,
+ expect![[r##"
+ Adds one to the number given.
+
+ # Examples
+
+ ```
+ let five = 5;
+
+ assert_eq!(6, my_crate::add_one(5));
+ ```
+ ------
+ fn add_one(x: i32) -> i32
+ ^^^^^^
+ "##]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_with_docs_from_actix() {
+ check(
+ r#"
+trait Actor {
+ /// Actor execution context type
+ type Context;
+}
+trait WriteHandler<E>
+where
+ Self: Actor
+{
+ /// Method is called when writer finishes.
+ ///
+ /// By default this method stops actor's `Context`.
+ fn finished(&mut self, ctx: &mut Self::Context) {}
+}
+
+fn foo(mut r: impl WriteHandler<()>) {
+ r.finished($0);
+}
+"#,
+ expect![[r#"
+ Method is called when writer finishes.
+
+ By default this method stops actor's `Context`.
+ ------
+ fn finished(&mut self, ctx: &mut <impl WriteHandler<()> as Actor>::Context)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn call_info_bad_offset() {
+ check(
+ r#"
+fn foo(x: u32, y: u32) -> u32 {x + y}
+fn bar() { foo $0 (3, ); }
+"#,
+ expect![[""]],
+ );
+ }
+
+ #[test]
+ fn outside_of_arg_list() {
+ check(
+ r#"
+fn foo(a: u8) {}
+fn f() {
+ foo(123)$0
+}
+"#,
+ expect![[]],
+ );
+ check(
+ r#"
+fn foo<T>(a: u8) {}
+fn f() {
+ foo::<u32>$0()
+}
+"#,
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_nested_method_in_lambda() {
+ check(
+ r#"
+struct Foo;
+impl Foo { fn bar(&self, _: u32) { } }
+
+fn bar(_: u32) { }
+
+fn main() {
+ let foo = Foo;
+ std::thread::spawn(move || foo.bar($0));
+}
+"#,
+ expect![[r#"
+ fn bar(&self, _: u32)
+ ^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn works_for_tuple_structs() {
+ check(
+ r#"
+/// A cool tuple struct
+struct S(u32, i32);
+fn main() {
+ let s = S(0, $0);
+}
+"#,
+ expect![[r#"
+ A cool tuple struct
+ ------
+ struct S(u32, i32)
+ --- ^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn generic_struct() {
+ check(
+ r#"
+struct S<T>(T);
+fn main() {
+ let s = S($0);
+}
+"#,
+ expect![[r#"
+ struct S({unknown})
+ ^^^^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn works_for_enum_variants() {
+ check(
+ r#"
+enum E {
+ /// A Variant
+ A(i32),
+ /// Another
+ B,
+ /// And C
+ C { a: i32, b: i32 }
+}
+
+fn main() {
+ let a = E::A($0);
+}
+"#,
+ expect![[r#"
+ A Variant
+ ------
+ enum E::A(i32)
+ ^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn cant_call_struct_record() {
+ check(
+ r#"
+struct S { x: u32, y: i32 }
+fn main() {
+ let s = S($0);
+}
+"#,
+ expect![[""]],
+ );
+ }
+
+ #[test]
+ fn cant_call_enum_record() {
+ check(
+ r#"
+enum E {
+ /// A Variant
+ A(i32),
+ /// Another
+ B,
+ /// And C
+ C { a: i32, b: i32 }
+}
+
+fn main() {
+ let a = E::C($0);
+}
+"#,
+ expect![[""]],
+ );
+ }
+
+ #[test]
+ fn fn_signature_for_call_in_macro() {
+ check(
+ r#"
+macro_rules! id { ($($tt:tt)*) => { $($tt)* } }
+fn foo() { }
+id! {
+ fn bar() { foo($0); }
+}
+"#,
+ expect![[r#"
+ fn foo()
+ "#]],
+ );
+ }
+
+ #[test]
+ fn call_info_for_lambdas() {
+ check(
+ r#"
+struct S;
+fn foo(s: S) -> i32 { 92 }
+fn main() {
+ (|s| foo(s))($0)
+}
+ "#,
+ expect![[r#"
+ (s: S) -> i32
+ ^^^^
+ "#]],
+ )
+ }
+
+ #[test]
+ fn call_info_for_fn_ptr() {
+ check(
+ r#"
+fn main(f: fn(i32, f64) -> char) {
+ f(0, $0)
+}
+ "#,
+ expect![[r#"
+ (i32, f64) -> char
+ --- ^^^
+ "#]],
+ )
+ }
+
+ #[test]
+ fn call_info_for_unclosed_call() {
+ check(
+ r#"
+fn foo(foo: u32, bar: u32) {}
+fn main() {
+ foo($0
+}"#,
+ expect![[r#"
+ fn foo(foo: u32, bar: u32)
+ ^^^^^^^^ --------
+ "#]],
+ );
+ // check with surrounding space
+ check(
+ r#"
+fn foo(foo: u32, bar: u32) {}
+fn main() {
+ foo( $0
+}"#,
+ expect![[r#"
+ fn foo(foo: u32, bar: u32)
+ ^^^^^^^^ --------
+ "#]],
+ )
+ }
+
+ #[test]
+ fn test_multiline_argument() {
+ check(
+ r#"
+fn callee(a: u8, b: u8) {}
+fn main() {
+ callee(match 0 {
+ 0 => 1,$0
+ })
+}"#,
+ expect![[r#""#]],
+ );
+ check(
+ r#"
+fn callee(a: u8, b: u8) {}
+fn main() {
+ callee(match 0 {
+ 0 => 1,
+ },$0)
+}"#,
+ expect![[r#"
+ fn callee(a: u8, b: u8)
+ ----- ^^^^^
+ "#]],
+ );
+ check(
+ r#"
+fn callee(a: u8, b: u8) {}
+fn main() {
+ callee($0match 0 {
+ 0 => 1,
+ })
+}"#,
+ expect![[r#"
+ fn callee(a: u8, b: u8)
+ ^^^^^ -----
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_generics_simple() {
+ check(
+ r#"
+/// Option docs.
+enum Option<T> {
+ Some(T),
+ None,
+}
+
+fn f() {
+ let opt: Option<$0
+}
+ "#,
+ expect![[r#"
+ Option docs.
+ ------
+ enum Option<T>
+ ^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_generics_on_variant() {
+ check(
+ r#"
+/// Option docs.
+enum Option<T> {
+ /// Some docs.
+ Some(T),
+ /// None docs.
+ None,
+}
+
+use Option::*;
+
+fn f() {
+ None::<$0
+}
+ "#,
+ expect![[r#"
+ None docs.
+ ------
+ enum Option<T>
+ ^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_lots_of_generics() {
+ check(
+ r#"
+trait Tr<T> {}
+
+struct S<T>(T);
+
+impl<T> S<T> {
+ fn f<G, H>(g: G, h: impl Tr<G>) where G: Tr<()> {}
+}
+
+fn f() {
+ S::<u8>::f::<(), $0
+}
+ "#,
+ expect![[r#"
+ fn f<G: Tr<()>, H>
+ --------- ^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_generics_in_trait_ufcs() {
+ check(
+ r#"
+trait Tr {
+ fn f<T: Tr, U>() {}
+}
+
+struct S;
+
+impl Tr for S {}
+
+fn f() {
+ <S as Tr>::f::<$0
+}
+ "#,
+ expect![[r#"
+ fn f<T: Tr, U>
+ ^^^^^ -
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_generics_in_method_call() {
+ check(
+ r#"
+struct S;
+
+impl S {
+ fn f<T>(&self) {}
+}
+
+fn f() {
+ S.f::<$0
+}
+ "#,
+ expect![[r#"
+ fn f<T>
+ ^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_generic_param_in_method_call() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ fn test<V>(&mut self, val: V) {}
+}
+fn sup() {
+ Foo.test($0)
+}
+"#,
+ expect![[r#"
+ fn test(&mut self, val: V)
+ ^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_generic_kinds() {
+ check(
+ r#"
+fn callee<'a, const A: u8, T, const C: u8>() {}
+
+fn f() {
+ callee::<'static, $0
+}
+ "#,
+ expect![[r#"
+ fn callee<'a, const A: u8, T, const C: u8>
+ -- ^^^^^^^^^^^ - -----------
+ "#]],
+ );
+ check(
+ r#"
+fn callee<'a, const A: u8, T, const C: u8>() {}
+
+fn f() {
+ callee::<NON_LIFETIME$0
+}
+ "#,
+ expect![[r#"
+ fn callee<'a, const A: u8, T, const C: u8>
+ -- ^^^^^^^^^^^ - -----------
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_trait_assoc_types() {
+ check(
+ r#"
+trait Trait<'a, T> {
+ type Assoc;
+}
+fn f() -> impl Trait<(), $0
+ "#,
+ expect![[r#"
+ trait Trait<'a, T, Assoc = …>
+ -- - ^^^^^^^^^
+ "#]],
+ );
+ check(
+ r#"
+trait Iterator {
+ type Item;
+}
+fn f() -> impl Iterator<$0
+ "#,
+ expect![[r#"
+ trait Iterator<Item = …>
+ ^^^^^^^^
+ "#]],
+ );
+ check(
+ r#"
+trait Iterator {
+ type Item;
+}
+fn f() -> impl Iterator<Item = $0
+ "#,
+ expect![[r#"
+ trait Iterator<Item = …>
+ ^^^^^^^^
+ "#]],
+ );
+ check(
+ r#"
+trait Tr {
+ type A;
+ type B;
+}
+fn f() -> impl Tr<$0
+ "#,
+ expect![[r#"
+ trait Tr<A = …, B = …>
+ ^^^^^ -----
+ "#]],
+ );
+ check(
+ r#"
+trait Tr {
+ type A;
+ type B;
+}
+fn f() -> impl Tr<B$0
+ "#,
+ expect![[r#"
+ trait Tr<A = …, B = …>
+ ^^^^^ -----
+ "#]],
+ );
+ check(
+ r#"
+trait Tr {
+ type A;
+ type B;
+}
+fn f() -> impl Tr<B = $0
+ "#,
+ expect![[r#"
+ trait Tr<B = …, A = …>
+ ^^^^^ -----
+ "#]],
+ );
+ check(
+ r#"
+trait Tr {
+ type A;
+ type B;
+}
+fn f() -> impl Tr<B = (), $0
+ "#,
+ expect![[r#"
+ trait Tr<B = …, A = …>
+ ----- ^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_supertrait_assoc() {
+ check(
+ r#"
+trait Super {
+ type SuperTy;
+}
+trait Sub: Super + Super {
+ type SubTy;
+}
+fn f() -> impl Sub<$0
+ "#,
+ expect![[r#"
+ trait Sub<SubTy = …, SuperTy = …>
+ ^^^^^^^^^ -----------
+ "#]],
+ );
+ }
+
+ #[test]
+ fn no_assoc_types_outside_type_bounds() {
+ check(
+ r#"
+trait Tr<T> {
+ type Assoc;
+}
+
+impl Tr<$0
+ "#,
+ expect![[r#"
+ trait Tr<T>
+ ^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn impl_trait() {
+ // FIXME: Substitute type vars in impl trait (`U` -> `i8`)
+ check(
+ r#"
+trait Trait<T> {}
+struct Wrap<T>(T);
+fn foo<U>(x: Wrap<impl Trait<U>>) {}
+fn f() {
+ foo::<i8>($0)
+}
+"#,
+ expect![[r#"
+ fn foo(x: Wrap<impl Trait<U>>)
+ ^^^^^^^^^^^^^^^^^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn fully_qualified_syntax() {
+ check(
+ r#"
+fn f() {
+ trait A { fn foo(&self, other: Self); }
+ A::foo(&self$0, other);
+}
+"#,
+ expect![[r#"
+ fn foo(self: &Self, other: Self)
+ ^^^^^^^^^^^ -----------
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/ssr.rs b/src/tools/rust-analyzer/crates/ide/src/ssr.rs
new file mode 100644
index 000000000..497eb1cc1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/ssr.rs
@@ -0,0 +1,255 @@
+//! This module provides an SSR assist. It is not desirable to include this
+//! assist in ide_assists because that would require the ide_assists crate
+//! depend on the ide_ssr crate.
+
+use ide_assists::{Assist, AssistId, AssistKind, AssistResolveStrategy, GroupLabel};
+use ide_db::{base_db::FileRange, label::Label, source_change::SourceChange, RootDatabase};
+
+pub(crate) fn ssr_assists(
+ db: &RootDatabase,
+ resolve: &AssistResolveStrategy,
+ frange: FileRange,
+) -> Vec<Assist> {
+ let mut ssr_assists = Vec::with_capacity(2);
+
+ let (match_finder, comment_range) = match ide_ssr::ssr_from_comment(db, frange) {
+ Some(ssr_data) => ssr_data,
+ None => return ssr_assists,
+ };
+ let id = AssistId("ssr", AssistKind::RefactorRewrite);
+
+ let (source_change_for_file, source_change_for_workspace) = if resolve.should_resolve(&id) {
+ let edits = match_finder.edits();
+
+ let source_change_for_file = {
+ let text_edit_for_file = edits.get(&frange.file_id).cloned().unwrap_or_default();
+ SourceChange::from_text_edit(frange.file_id, text_edit_for_file)
+ };
+
+ let source_change_for_workspace = SourceChange::from(match_finder.edits());
+
+ (Some(source_change_for_file), Some(source_change_for_workspace))
+ } else {
+ (None, None)
+ };
+
+ let assists = vec![
+ ("Apply SSR in file", source_change_for_file),
+ ("Apply SSR in workspace", source_change_for_workspace),
+ ];
+
+ for (label, source_change) in assists.into_iter() {
+ let assist = Assist {
+ id,
+ label: Label::new(label.to_string()),
+ group: Some(GroupLabel("Apply SSR".into())),
+ target: comment_range,
+ source_change,
+ trigger_signature_help: false,
+ };
+
+ ssr_assists.push(assist);
+ }
+
+ ssr_assists
+}
+
+#[cfg(test)]
+mod tests {
+ use std::sync::Arc;
+
+ use expect_test::expect;
+ use ide_assists::{Assist, AssistResolveStrategy};
+ use ide_db::{
+ base_db::{fixture::WithFixture, salsa::Durability, FileRange},
+ symbol_index::SymbolsDatabase,
+ FxHashSet, RootDatabase,
+ };
+
+ use super::ssr_assists;
+
+ fn get_assists(ra_fixture: &str, resolve: AssistResolveStrategy) -> Vec<Assist> {
+ let (mut db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture);
+ let mut local_roots = FxHashSet::default();
+ local_roots.insert(ide_db::base_db::fixture::WORKSPACE);
+ db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH);
+ ssr_assists(&db, &resolve, FileRange { file_id, range: range_or_offset.into() })
+ }
+
+ #[test]
+ fn not_applicable_comment_not_ssr() {
+ let ra_fixture = r#"
+ //- /lib.rs
+
+ // This is foo $0
+ fn foo() {}
+ "#;
+ let assists = get_assists(ra_fixture, AssistResolveStrategy::All);
+
+ assert_eq!(0, assists.len());
+ }
+
+ #[test]
+ fn resolve_edits_true() {
+ let assists = get_assists(
+ r#"
+ //- /lib.rs
+ mod bar;
+
+ // 2 ==>> 3$0
+ fn foo() { 2 }
+
+ //- /bar.rs
+ fn bar() { 2 }
+ "#,
+ AssistResolveStrategy::All,
+ );
+
+ assert_eq!(2, assists.len());
+ let mut assists = assists.into_iter();
+
+ let apply_in_file_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "ssr",
+ RefactorRewrite,
+ ),
+ label: "Apply SSR in file",
+ group: Some(
+ GroupLabel(
+ "Apply SSR",
+ ),
+ ),
+ target: 10..21,
+ source_change: Some(
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 0,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "3",
+ delete: 33..34,
+ },
+ ],
+ },
+ },
+ file_system_edits: [],
+ is_snippet: false,
+ },
+ ),
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&apply_in_file_assist);
+
+ let apply_in_workspace_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "ssr",
+ RefactorRewrite,
+ ),
+ label: "Apply SSR in workspace",
+ group: Some(
+ GroupLabel(
+ "Apply SSR",
+ ),
+ ),
+ target: 10..21,
+ source_change: Some(
+ SourceChange {
+ source_file_edits: {
+ FileId(
+ 0,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "3",
+ delete: 33..34,
+ },
+ ],
+ },
+ FileId(
+ 1,
+ ): TextEdit {
+ indels: [
+ Indel {
+ insert: "3",
+ delete: 11..12,
+ },
+ ],
+ },
+ },
+ file_system_edits: [],
+ is_snippet: false,
+ },
+ ),
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&apply_in_workspace_assist);
+ }
+
+ #[test]
+ fn resolve_edits_false() {
+ let assists = get_assists(
+ r#"
+ //- /lib.rs
+ mod bar;
+
+ // 2 ==>> 3$0
+ fn foo() { 2 }
+
+ //- /bar.rs
+ fn bar() { 2 }
+ "#,
+ AssistResolveStrategy::None,
+ );
+
+ assert_eq!(2, assists.len());
+ let mut assists = assists.into_iter();
+
+ let apply_in_file_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "ssr",
+ RefactorRewrite,
+ ),
+ label: "Apply SSR in file",
+ group: Some(
+ GroupLabel(
+ "Apply SSR",
+ ),
+ ),
+ target: 10..21,
+ source_change: None,
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&apply_in_file_assist);
+
+ let apply_in_workspace_assist = assists.next().unwrap();
+ expect![[r#"
+ Assist {
+ id: AssistId(
+ "ssr",
+ RefactorRewrite,
+ ),
+ label: "Apply SSR in workspace",
+ group: Some(
+ GroupLabel(
+ "Apply SSR",
+ ),
+ ),
+ target: 10..21,
+ source_change: None,
+ trigger_signature_help: false,
+ }
+ "#]]
+ .assert_debug_eq(&apply_in_workspace_assist);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
new file mode 100644
index 000000000..d74b64041
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
@@ -0,0 +1,321 @@
+//! This module provides `StaticIndex` which is used for powering
+//! read-only code browsers and emitting LSIF
+
+use std::collections::HashMap;
+
+use hir::{db::HirDatabase, Crate, Module, Semantics};
+use ide_db::{
+ base_db::{FileId, FileRange, SourceDatabaseExt},
+ defs::{Definition, IdentClass},
+ FxHashSet, RootDatabase,
+};
+use syntax::{AstNode, SyntaxKind::*, SyntaxToken, TextRange, T};
+
+use crate::{
+ hover::hover_for_definition,
+ moniker::{crate_for_file, def_to_moniker, MonikerResult},
+ Analysis, Fold, HoverConfig, HoverDocFormat, HoverResult, InlayHint, InlayHintsConfig,
+ TryToNav,
+};
+
+/// A static representation of fully analyzed source code.
+///
+/// The intended use-case is powering read-only code browsers and emitting LSIF
+#[derive(Debug)]
+pub struct StaticIndex<'a> {
+ pub files: Vec<StaticIndexedFile>,
+ pub tokens: TokenStore,
+ analysis: &'a Analysis,
+ db: &'a RootDatabase,
+ def_map: HashMap<Definition, TokenId>,
+}
+
+#[derive(Debug)]
+pub struct ReferenceData {
+ pub range: FileRange,
+ pub is_definition: bool,
+}
+
+#[derive(Debug)]
+pub struct TokenStaticData {
+ pub hover: Option<HoverResult>,
+ pub definition: Option<FileRange>,
+ pub references: Vec<ReferenceData>,
+ pub moniker: Option<MonikerResult>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TokenId(usize);
+
+impl TokenId {
+ pub fn raw(self) -> usize {
+ self.0
+ }
+}
+
+#[derive(Default, Debug)]
+pub struct TokenStore(Vec<TokenStaticData>);
+
+impl TokenStore {
+ pub fn insert(&mut self, data: TokenStaticData) -> TokenId {
+ let id = TokenId(self.0.len());
+ self.0.push(data);
+ id
+ }
+
+ pub fn get_mut(&mut self, id: TokenId) -> Option<&mut TokenStaticData> {
+ self.0.get_mut(id.0)
+ }
+
+ pub fn get(&self, id: TokenId) -> Option<&TokenStaticData> {
+ self.0.get(id.0)
+ }
+
+ pub fn iter(self) -> impl Iterator<Item = (TokenId, TokenStaticData)> {
+ self.0.into_iter().enumerate().map(|(i, x)| (TokenId(i), x))
+ }
+}
+
+#[derive(Debug)]
+pub struct StaticIndexedFile {
+ pub file_id: FileId,
+ pub folds: Vec<Fold>,
+ pub inlay_hints: Vec<InlayHint>,
+ pub tokens: Vec<(TextRange, TokenId)>,
+}
+
+fn all_modules(db: &dyn HirDatabase) -> Vec<Module> {
+ let mut worklist: Vec<_> =
+ Crate::all(db).into_iter().map(|krate| krate.root_module(db)).collect();
+ let mut modules = Vec::new();
+
+ while let Some(module) = worklist.pop() {
+ modules.push(module);
+ worklist.extend(module.children(db));
+ }
+
+ modules
+}
+
+impl StaticIndex<'_> {
+ fn add_file(&mut self, file_id: FileId) {
+ let current_crate = crate_for_file(self.db, file_id);
+ let folds = self.analysis.folding_ranges(file_id).unwrap();
+ let inlay_hints = self
+ .analysis
+ .inlay_hints(
+ &InlayHintsConfig {
+ render_colons: true,
+ type_hints: true,
+ parameter_hints: true,
+ chaining_hints: true,
+ closure_return_type_hints: crate::ClosureReturnTypeHints::WithBlock,
+ lifetime_elision_hints: crate::LifetimeElisionHints::Never,
+ reborrow_hints: crate::ReborrowHints::Never,
+ hide_named_constructor_hints: false,
+ hide_closure_initialization_hints: false,
+ param_names_for_lifetime_elision_hints: false,
+ binding_mode_hints: false,
+ max_length: Some(25),
+ closing_brace_hints_min_lines: Some(25),
+ },
+ file_id,
+ None,
+ )
+ .unwrap();
+ // hovers
+ let sema = hir::Semantics::new(self.db);
+ let tokens_or_nodes = sema.parse(file_id).syntax().clone();
+ let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|x| match x {
+ syntax::NodeOrToken::Node(_) => None,
+ syntax::NodeOrToken::Token(x) => Some(x),
+ });
+ let hover_config =
+ HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) };
+ let tokens = tokens.filter(|token| {
+ matches!(
+ token.kind(),
+ IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self]
+ )
+ });
+ let mut result = StaticIndexedFile { file_id, inlay_hints, folds, tokens: vec![] };
+ for token in tokens {
+ let range = token.text_range();
+ let node = token.parent().unwrap();
+ let def = match get_definition(&sema, token.clone()) {
+ Some(x) => x,
+ None => continue,
+ };
+ let id = if let Some(x) = self.def_map.get(&def) {
+ *x
+ } else {
+ let x = self.tokens.insert(TokenStaticData {
+ hover: hover_for_definition(&sema, file_id, def, &node, &hover_config),
+ definition: def
+ .try_to_nav(self.db)
+ .map(|x| FileRange { file_id: x.file_id, range: x.focus_or_full_range() }),
+ references: vec![],
+ moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)),
+ });
+ self.def_map.insert(def, x);
+ x
+ };
+ let token = self.tokens.get_mut(id).unwrap();
+ token.references.push(ReferenceData {
+ range: FileRange { range, file_id },
+ is_definition: match def.try_to_nav(self.db) {
+ Some(x) => x.file_id == file_id && x.focus_or_full_range() == range,
+ None => false,
+ },
+ });
+ result.tokens.push((range, id));
+ }
+ self.files.push(result);
+ }
+
+ pub fn compute(analysis: &Analysis) -> StaticIndex<'_> {
+ let db = &*analysis.db;
+ let work = all_modules(db).into_iter().filter(|module| {
+ let file_id = module.definition_source(db).file_id.original_file(db);
+ let source_root = db.file_source_root(file_id);
+ let source_root = db.source_root(source_root);
+ !source_root.is_library
+ });
+ let mut this = StaticIndex {
+ files: vec![],
+ tokens: Default::default(),
+ analysis,
+ db,
+ def_map: Default::default(),
+ };
+ let mut visited_files = FxHashSet::default();
+ for module in work {
+ let file_id = module.definition_source(db).file_id.original_file(db);
+ if visited_files.contains(&file_id) {
+ continue;
+ }
+ this.add_file(file_id);
+ // mark the file
+ visited_files.insert(file_id);
+ }
+ this
+ }
+}
+
+fn get_definition(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option<Definition> {
+ for token in sema.descend_into_macros(token) {
+ let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions);
+ if let Some(&[x]) = def.as_deref() {
+ return Some(x);
+ } else {
+ continue;
+ };
+ }
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{fixture, StaticIndex};
+ use ide_db::base_db::FileRange;
+ use std::collections::HashSet;
+ use syntax::TextSize;
+
+ fn check_all_ranges(ra_fixture: &str) {
+ let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
+ let s = StaticIndex::compute(&analysis);
+ let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect();
+ for f in s.files {
+ for (range, _) in f.tokens {
+ let x = FileRange { file_id: f.file_id, range };
+ if !range_set.contains(&x) {
+ panic!("additional range {:?}", x);
+ }
+ range_set.remove(&x);
+ }
+ }
+ if !range_set.is_empty() {
+ panic!("unfound ranges {:?}", range_set);
+ }
+ }
+
+ fn check_definitions(ra_fixture: &str) {
+ let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
+ let s = StaticIndex::compute(&analysis);
+ let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect();
+ for (_, t) in s.tokens.iter() {
+ if let Some(x) = t.definition {
+ if x.range.start() == TextSize::from(0) {
+ // ignore definitions that are whole of file
+ continue;
+ }
+ if !range_set.contains(&x) {
+ panic!("additional definition {:?}", x);
+ }
+ range_set.remove(&x);
+ }
+ }
+ if !range_set.is_empty() {
+ panic!("unfound definitions {:?}", range_set);
+ }
+ }
+
+ #[test]
+ fn struct_and_enum() {
+ check_all_ranges(
+ r#"
+struct Foo;
+ //^^^
+enum E { X(Foo) }
+ //^ ^ ^^^
+"#,
+ );
+ check_definitions(
+ r#"
+struct Foo;
+ //^^^
+enum E { X(Foo) }
+ //^ ^
+"#,
+ );
+ }
+
+ #[test]
+ fn multi_crate() {
+ check_definitions(
+ r#"
+//- /main.rs crate:main deps:foo
+
+
+use foo::func;
+
+fn main() {
+ //^^^^
+ func();
+}
+//- /foo/lib.rs crate:foo
+
+pub func() {
+
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn derives() {
+ check_all_ranges(
+ r#"
+//- minicore:derive
+#[rustc_builtin_macro]
+//^^^^^^^^^^^^^^^^^^^
+pub macro Copy {}
+ //^^^^
+#[derive(Copy)]
+//^^^^^^ ^^^^
+struct Hello(i32);
+ //^^^^^ ^^^
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/status.rs b/src/tools/rust-analyzer/crates/ide/src/status.rs
new file mode 100644
index 000000000..3191870eb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/status.rs
@@ -0,0 +1,164 @@
+use std::{fmt, iter::FromIterator, sync::Arc};
+
+use hir::{ExpandResult, MacroFile};
+use ide_db::base_db::{
+ salsa::debug::{DebugQueryTable, TableEntry},
+ CrateId, FileId, FileTextQuery, SourceDatabase, SourceRootId,
+};
+use ide_db::{
+ symbol_index::{LibrarySymbolsQuery, SymbolIndex},
+ RootDatabase,
+};
+use itertools::Itertools;
+use profile::{memory_usage, Bytes};
+use std::env;
+use stdx::format_to;
+use syntax::{ast, Parse, SyntaxNode};
+
+fn syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats {
+ ide_db::base_db::ParseQuery.in_db(db).entries::<SyntaxTreeStats>()
+}
+fn macro_syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats {
+ hir::db::ParseMacroExpansionQuery.in_db(db).entries::<SyntaxTreeStats>()
+}
+
+// Feature: Status
+//
+// Shows internal statistic about memory usage of rust-analyzer.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Status**
+// |===
+// image::https://user-images.githubusercontent.com/48062697/113065584-05f34500-91b1-11eb-98cc-5c196f76be7f.gif[]
+pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
+ let mut buf = String::new();
+ format_to!(buf, "{}\n", FileTextQuery.in_db(db).entries::<FilesStats>());
+ format_to!(buf, "{}\n", LibrarySymbolsQuery.in_db(db).entries::<LibrarySymbolsStats>());
+ format_to!(buf, "{}\n", syntax_tree_stats(db));
+ format_to!(buf, "{} (Macros)\n", macro_syntax_tree_stats(db));
+ format_to!(buf, "{} in total\n", memory_usage());
+ if env::var("RA_COUNT").is_ok() {
+ format_to!(buf, "\nCounts:\n{}", profile::countme::get_all());
+ }
+
+ if let Some(file_id) = file_id {
+ format_to!(buf, "\nFile info:\n");
+ let crates = crate::parent_module::crate_for(db, file_id);
+ if crates.is_empty() {
+ format_to!(buf, "Does not belong to any crate");
+ }
+ let crate_graph = db.crate_graph();
+ for krate in crates {
+ let display_crate = |krate: CrateId| match &crate_graph[krate].display_name {
+ Some(it) => format!("{}({:?})", it, krate),
+ None => format!("{:?}", krate),
+ };
+ format_to!(buf, "Crate: {}\n", display_crate(krate));
+ let deps = crate_graph[krate]
+ .dependencies
+ .iter()
+ .map(|dep| format!("{}={:?}", dep.name, dep.crate_id))
+ .format(", ");
+ format_to!(buf, "Dependencies: {}\n", deps);
+ }
+ }
+
+ buf.trim().to_string()
+}
+
+#[derive(Default)]
+struct FilesStats {
+ total: usize,
+ size: Bytes,
+}
+
+impl fmt::Display for FilesStats {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(fmt, "{} of files", self.size)
+ }
+}
+
+impl FromIterator<TableEntry<FileId, Arc<String>>> for FilesStats {
+ fn from_iter<T>(iter: T) -> FilesStats
+ where
+ T: IntoIterator<Item = TableEntry<FileId, Arc<String>>>,
+ {
+ let mut res = FilesStats::default();
+ for entry in iter {
+ res.total += 1;
+ res.size += entry.value.unwrap().len();
+ }
+ res
+ }
+}
+
+#[derive(Default)]
+pub(crate) struct SyntaxTreeStats {
+ total: usize,
+ pub(crate) retained: usize,
+}
+
+impl fmt::Display for SyntaxTreeStats {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(fmt, "{} trees, {} preserved", self.total, self.retained)
+ }
+}
+
+impl FromIterator<TableEntry<FileId, Parse<ast::SourceFile>>> for SyntaxTreeStats {
+ fn from_iter<T>(iter: T) -> SyntaxTreeStats
+ where
+ T: IntoIterator<Item = TableEntry<FileId, Parse<ast::SourceFile>>>,
+ {
+ let mut res = SyntaxTreeStats::default();
+ for entry in iter {
+ res.total += 1;
+ res.retained += entry.value.is_some() as usize;
+ }
+ res
+ }
+}
+
+impl<M> FromIterator<TableEntry<MacroFile, ExpandResult<Option<(Parse<SyntaxNode>, M)>>>>
+ for SyntaxTreeStats
+{
+ fn from_iter<T>(iter: T) -> SyntaxTreeStats
+ where
+ T: IntoIterator<Item = TableEntry<MacroFile, ExpandResult<Option<(Parse<SyntaxNode>, M)>>>>,
+ {
+ let mut res = SyntaxTreeStats::default();
+ for entry in iter {
+ res.total += 1;
+ res.retained += entry.value.is_some() as usize;
+ }
+ res
+ }
+}
+
+#[derive(Default)]
+struct LibrarySymbolsStats {
+ total: usize,
+ size: Bytes,
+}
+
+impl fmt::Display for LibrarySymbolsStats {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(fmt, "{} of index symbols ({})", self.size, self.total)
+ }
+}
+
+impl FromIterator<TableEntry<SourceRootId, Arc<SymbolIndex>>> for LibrarySymbolsStats {
+ fn from_iter<T>(iter: T) -> LibrarySymbolsStats
+ where
+ T: IntoIterator<Item = TableEntry<SourceRootId, Arc<SymbolIndex>>>,
+ {
+ let mut res = LibrarySymbolsStats::default();
+ for entry in iter {
+ let symbols = entry.value.unwrap();
+ res.total += symbols.len();
+ res.size += symbols.memory_size();
+ }
+ res
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
new file mode 100644
index 000000000..3fb49b45d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
@@ -0,0 +1,449 @@
+pub(crate) mod tags;
+
+mod highlights;
+mod injector;
+
+mod highlight;
+mod format;
+mod macro_;
+mod inject;
+mod escape;
+
+mod html;
+#[cfg(test)]
+mod tests;
+
+use hir::{Name, Semantics};
+use ide_db::{FxHashMap, RootDatabase};
+use syntax::{
+ ast, AstNode, AstToken, NodeOrToken, SyntaxKind::*, SyntaxNode, TextRange, WalkEvent, T,
+};
+
+use crate::{
+ syntax_highlighting::{
+ escape::highlight_escape_string, format::highlight_format_string, highlights::Highlights,
+ macro_::MacroHighlighter, tags::Highlight,
+ },
+ FileId, HlMod, HlTag,
+};
+
+pub(crate) use html::highlight_as_html;
+
+#[derive(Debug, Clone, Copy)]
+pub struct HlRange {
+ pub range: TextRange,
+ pub highlight: Highlight,
+ pub binding_hash: Option<u64>,
+}
+
+// Feature: Semantic Syntax Highlighting
+//
+// rust-analyzer highlights the code semantically.
+// For example, `Bar` in `foo::Bar` might be colored differently depending on whether `Bar` is an enum or a trait.
+// rust-analyzer does not specify colors directly, instead it assigns a tag (like `struct`) and a set of modifiers (like `declaration`) to each token.
+// It's up to the client to map those to specific colors.
+//
+// The general rule is that a reference to an entity gets colored the same way as the entity itself.
+// We also give special modifier for `mut` and `&mut` local variables.
+//
+//
+// .Token Tags
+//
+// Rust-analyzer currently emits the following token tags:
+//
+// - For items:
+// +
+// [horizontal]
+// attribute:: Emitted for attribute macros.
+// enum:: Emitted for enums.
+// function:: Emitted for free-standing functions.
+// derive:: Emitted for derive macros.
+// macro:: Emitted for function-like macros.
+// method:: Emitted for associated functions, also knowns as methods.
+// namespace:: Emitted for modules.
+// struct:: Emitted for structs.
+// trait:: Emitted for traits.
+// typeAlias:: Emitted for type aliases and `Self` in `impl`s.
+// union:: Emitted for unions.
+//
+// - For literals:
+// +
+// [horizontal]
+// boolean:: Emitted for the boolean literals `true` and `false`.
+// character:: Emitted for character literals.
+// number:: Emitted for numeric literals.
+// string:: Emitted for string literals.
+// escapeSequence:: Emitted for escaped sequences inside strings like `\n`.
+// formatSpecifier:: Emitted for format specifiers `{:?}` in `format!`-like macros.
+//
+// - For operators:
+// +
+// [horizontal]
+// operator:: Emitted for general operators.
+// arithmetic:: Emitted for the arithmetic operators `+`, `-`, `*`, `/`, `+=`, `-=`, `*=`, `/=`.
+// bitwise:: Emitted for the bitwise operators `|`, `&`, `!`, `^`, `|=`, `&=`, `^=`.
+// comparison:: Emitted for the comparison operators `>`, `<`, `==`, `>=`, `<=`, `!=`.
+// logical:: Emitted for the logical operators `||`, `&&`, `!`.
+//
+// - For punctuation:
+// +
+// [horizontal]
+// punctuation:: Emitted for general punctuation.
+// attributeBracket:: Emitted for attribute invocation brackets, that is the `#[` and `]` tokens.
+// angle:: Emitted for `<>` angle brackets.
+// brace:: Emitted for `{}` braces.
+// bracket:: Emitted for `[]` brackets.
+// parenthesis:: Emitted for `()` parentheses.
+// colon:: Emitted for the `:` token.
+// comma:: Emitted for the `,` token.
+// dot:: Emitted for the `.` token.
+// semi:: Emitted for the `;` token.
+// macroBang:: Emitted for the `!` token in macro calls.
+//
+// //-
+//
+// [horizontal]
+// builtinAttribute:: Emitted for names to builtin attributes in attribute path, the `repr` in `#[repr(u8)]` for example.
+// builtinType:: Emitted for builtin types like `u32`, `str` and `f32`.
+// comment:: Emitted for comments.
+// constParameter:: Emitted for const parameters.
+// deriveHelper:: Emitted for derive helper attributes.
+// enumMember:: Emitted for enum variants.
+// generic:: Emitted for generic tokens that have no mapping.
+// keyword:: Emitted for keywords.
+// label:: Emitted for labels.
+// lifetime:: Emitted for lifetimes.
+// parameter:: Emitted for non-self function parameters.
+// property:: Emitted for struct and union fields.
+// selfKeyword:: Emitted for the self function parameter and self path-specifier.
+// selfTypeKeyword:: Emitted for the Self type parameter.
+// toolModule:: Emitted for tool modules.
+// typeParameter:: Emitted for type parameters.
+// unresolvedReference:: Emitted for unresolved references, names that rust-analyzer can't find the definition of.
+// variable:: Emitted for locals, constants and statics.
+//
+//
+// .Token Modifiers
+//
+// Token modifiers allow to style some elements in the source code more precisely.
+//
+// Rust-analyzer currently emits the following token modifiers:
+//
+// [horizontal]
+// async:: Emitted for async functions and the `async` and `await` keywords.
+// attribute:: Emitted for tokens inside attributes.
+// callable:: Emitted for locals whose types implements one of the `Fn*` traits.
+// constant:: Emitted for consts.
+// consuming:: Emitted for locals that are being consumed when use in a function call.
+// controlFlow:: Emitted for control-flow related tokens, this includes the `?` operator.
+// crateRoot:: Emitted for crate names, like `serde` and `crate`.
+// declaration:: Emitted for names of definitions, like `foo` in `fn foo() {}`.
+// defaultLibrary:: Emitted for items from built-in crates (std, core, alloc, test and proc_macro).
+// documentation:: Emitted for documentation comments.
+// injected:: Emitted for doc-string injected highlighting like rust source blocks in documentation.
+// intraDocLink:: Emitted for intra doc links in doc-strings.
+// library:: Emitted for items that are defined outside of the current crate.
+// mutable:: Emitted for mutable locals and statics as well as functions taking `&mut self`.
+// public:: Emitted for items that are from the current crate and are `pub`.
+// reference:: Emitted for locals behind a reference and functions taking `self` by reference.
+// static:: Emitted for "static" functions, also known as functions that do not take a `self` param, as well as statics and consts.
+// trait:: Emitted for associated trait items.
+// unsafe:: Emitted for unsafe operations, like unsafe function calls, as well as the `unsafe` token.
+//
+//
+// image::https://user-images.githubusercontent.com/48062697/113164457-06cfb980-9239-11eb-819b-0f93e646acf8.png[]
+// image::https://user-images.githubusercontent.com/48062697/113187625-f7f50100-9250-11eb-825e-91c58f236071.png[]
+pub(crate) fn highlight(
+ db: &RootDatabase,
+ file_id: FileId,
+ range_to_highlight: Option<TextRange>,
+ syntactic_name_ref_highlighting: bool,
+) -> Vec<HlRange> {
+ let _p = profile::span("highlight");
+ let sema = Semantics::new(db);
+
+ // Determine the root based on the given range.
+ let (root, range_to_highlight) = {
+ let source_file = sema.parse(file_id);
+ let source_file = source_file.syntax();
+ match range_to_highlight {
+ Some(range) => {
+ let node = match source_file.covering_element(range) {
+ NodeOrToken::Node(it) => it,
+ NodeOrToken::Token(it) => it.parent().unwrap_or_else(|| source_file.clone()),
+ };
+ (node, range)
+ }
+ None => (source_file.clone(), source_file.text_range()),
+ }
+ };
+
+ let mut hl = highlights::Highlights::new(root.text_range());
+ let krate = match sema.scope(&root) {
+ Some(it) => it.krate(),
+ None => return hl.to_vec(),
+ };
+ traverse(
+ &mut hl,
+ &sema,
+ file_id,
+ &root,
+ krate,
+ range_to_highlight,
+ syntactic_name_ref_highlighting,
+ );
+ hl.to_vec()
+}
+
+fn traverse(
+ hl: &mut Highlights,
+ sema: &Semantics<'_, RootDatabase>,
+ file_id: FileId,
+ root: &SyntaxNode,
+ krate: hir::Crate,
+ range_to_highlight: TextRange,
+ syntactic_name_ref_highlighting: bool,
+) {
+ let is_unlinked = sema.to_module_def(file_id).is_none();
+ let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default();
+
+ enum AttrOrDerive {
+ Attr(ast::Item),
+ Derive(ast::Item),
+ }
+
+ impl AttrOrDerive {
+ fn item(&self) -> &ast::Item {
+ match self {
+ AttrOrDerive::Attr(item) | AttrOrDerive::Derive(item) => item,
+ }
+ }
+ }
+
+ let mut tt_level = 0;
+ let mut attr_or_derive_item = None;
+ let mut current_macro: Option<ast::Macro> = None;
+ let mut macro_highlighter = MacroHighlighter::default();
+ let mut inside_attribute = false;
+
+ // Walk all nodes, keeping track of whether we are inside a macro or not.
+ // If in macro, expand it first and highlight the expanded code.
+ for event in root.preorder_with_tokens() {
+ use WalkEvent::{Enter, Leave};
+
+ let range = match &event {
+ Enter(it) | Leave(it) => it.text_range(),
+ };
+
+ // Element outside of the viewport, no need to highlight
+ if range_to_highlight.intersect(range).is_none() {
+ continue;
+ }
+
+ // set macro and attribute highlighting states
+ match event.clone() {
+ Enter(NodeOrToken::Node(node)) if ast::TokenTree::can_cast(node.kind()) => {
+ tt_level += 1;
+ }
+ Leave(NodeOrToken::Node(node)) if ast::TokenTree::can_cast(node.kind()) => {
+ tt_level -= 1;
+ }
+ Enter(NodeOrToken::Node(node)) if ast::Attr::can_cast(node.kind()) => {
+ inside_attribute = true
+ }
+ Leave(NodeOrToken::Node(node)) if ast::Attr::can_cast(node.kind()) => {
+ inside_attribute = false
+ }
+
+ Enter(NodeOrToken::Node(node)) if ast::Item::can_cast(node.kind()) => {
+ match ast::Item::cast(node.clone()) {
+ Some(ast::Item::MacroRules(mac)) => {
+ macro_highlighter.init();
+ current_macro = Some(mac.into());
+ continue;
+ }
+ Some(ast::Item::MacroDef(mac)) => {
+ macro_highlighter.init();
+ current_macro = Some(mac.into());
+ continue;
+ }
+ Some(item) => {
+ if matches!(node.kind(), FN | CONST | STATIC) {
+ bindings_shadow_count.clear();
+ }
+
+ if attr_or_derive_item.is_none() {
+ if sema.is_attr_macro_call(&item) {
+ attr_or_derive_item = Some(AttrOrDerive::Attr(item));
+ } else {
+ let adt = match item {
+ ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
+ ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
+ ast::Item::Union(it) => Some(ast::Adt::Union(it)),
+ _ => None,
+ };
+ match adt {
+ Some(adt) if sema.is_derive_annotated(&adt) => {
+ attr_or_derive_item =
+ Some(AttrOrDerive::Derive(ast::Item::from(adt)));
+ }
+ _ => (),
+ }
+ }
+ }
+ }
+ _ => (),
+ }
+ }
+ Leave(NodeOrToken::Node(node)) if ast::Item::can_cast(node.kind()) => {
+ match ast::Item::cast(node.clone()) {
+ Some(ast::Item::MacroRules(mac)) => {
+ assert_eq!(current_macro, Some(mac.into()));
+ current_macro = None;
+ macro_highlighter = MacroHighlighter::default();
+ }
+ Some(ast::Item::MacroDef(mac)) => {
+ assert_eq!(current_macro, Some(mac.into()));
+ current_macro = None;
+ macro_highlighter = MacroHighlighter::default();
+ }
+ Some(item)
+ if attr_or_derive_item.as_ref().map_or(false, |it| *it.item() == item) =>
+ {
+ attr_or_derive_item = None;
+ }
+ _ => (),
+ }
+ }
+ _ => (),
+ }
+
+ let element = match event {
+ Enter(NodeOrToken::Token(tok)) if tok.kind() == WHITESPACE => continue,
+ Enter(it) => it,
+ Leave(NodeOrToken::Token(_)) => continue,
+ Leave(NodeOrToken::Node(node)) => {
+ // Doc comment highlighting injection, we do this when leaving the node
+ // so that we overwrite the highlighting of the doc comment itself.
+ inject::doc_comment(hl, sema, file_id, &node);
+ continue;
+ }
+ };
+
+ if current_macro.is_some() {
+ if let Some(tok) = element.as_token() {
+ macro_highlighter.advance(tok);
+ }
+ }
+
+ let element = match element.clone() {
+ NodeOrToken::Node(n) => match ast::NameLike::cast(n) {
+ Some(n) => NodeOrToken::Node(n),
+ None => continue,
+ },
+ NodeOrToken::Token(t) => NodeOrToken::Token(t),
+ };
+ let token = element.as_token().cloned();
+
+ // Descending tokens into macros is expensive even if no descending occurs, so make sure
+ // that we actually are in a position where descending is possible.
+ let in_macro = tt_level > 0
+ || match attr_or_derive_item {
+ Some(AttrOrDerive::Attr(_)) => true,
+ Some(AttrOrDerive::Derive(_)) => inside_attribute,
+ None => false,
+ };
+ let descended_element = if in_macro {
+ // Attempt to descend tokens into macro-calls.
+ match element {
+ NodeOrToken::Token(token) if token.kind() != COMMENT => {
+ let token = match attr_or_derive_item {
+ Some(AttrOrDerive::Attr(_)) => {
+ sema.descend_into_macros_with_kind_preference(token)
+ }
+ Some(AttrOrDerive::Derive(_)) | None => {
+ sema.descend_into_macros_single(token)
+ }
+ };
+ match token.parent().and_then(ast::NameLike::cast) {
+ // Remap the token into the wrapping single token nodes
+ Some(parent) => match (token.kind(), parent.syntax().kind()) {
+ (T![self] | T![ident], NAME | NAME_REF) => NodeOrToken::Node(parent),
+ (T![self] | T![super] | T![crate] | T![Self], NAME_REF) => {
+ NodeOrToken::Node(parent)
+ }
+ (INT_NUMBER, NAME_REF) => NodeOrToken::Node(parent),
+ (LIFETIME_IDENT, LIFETIME) => NodeOrToken::Node(parent),
+ _ => NodeOrToken::Token(token),
+ },
+ None => NodeOrToken::Token(token),
+ }
+ }
+ e => e,
+ }
+ } else {
+ element
+ };
+
+ // FIXME: do proper macro def highlighting https://github.com/rust-lang/rust-analyzer/issues/6232
+ // Skip metavariables from being highlighted to prevent keyword highlighting in them
+ if descended_element.as_token().and_then(|t| macro_highlighter.highlight(t)).is_some() {
+ continue;
+ }
+
+ // string highlight injections, note this does not use the descended element as proc-macros
+ // can rewrite string literals which invalidates our indices
+ if let (Some(token), Some(descended_token)) = (token, descended_element.as_token()) {
+ if ast::String::can_cast(token.kind()) && ast::String::can_cast(descended_token.kind())
+ {
+ let string = ast::String::cast(token);
+ let string_to_highlight = ast::String::cast(descended_token.clone());
+ if let Some((string, expanded_string)) = string.zip(string_to_highlight) {
+ if string.is_raw() {
+ if inject::ra_fixture(hl, sema, &string, &expanded_string).is_some() {
+ continue;
+ }
+ }
+ highlight_format_string(hl, &string, &expanded_string, range);
+ highlight_escape_string(hl, &string, range.start());
+ }
+ } else if ast::ByteString::can_cast(token.kind())
+ && ast::ByteString::can_cast(descended_token.kind())
+ {
+ if let Some(byte_string) = ast::ByteString::cast(token) {
+ highlight_escape_string(hl, &byte_string, range.start());
+ }
+ }
+ }
+
+ let element = match descended_element {
+ NodeOrToken::Node(name_like) => highlight::name_like(
+ sema,
+ krate,
+ &mut bindings_shadow_count,
+ syntactic_name_ref_highlighting,
+ name_like,
+ ),
+ NodeOrToken::Token(token) => highlight::token(sema, token).zip(Some(None)),
+ };
+ if let Some((mut highlight, binding_hash)) = element {
+ if is_unlinked && highlight.tag == HlTag::UnresolvedReference {
+ // do not emit unresolved references if the file is unlinked
+ // let the editor do its highlighting for these tokens instead
+ continue;
+ }
+ if highlight.tag == HlTag::UnresolvedReference
+ && matches!(attr_or_derive_item, Some(AttrOrDerive::Derive(_)) if inside_attribute)
+ {
+ // do not emit unresolved references in derive helpers if the token mapping maps to
+ // something unresolvable. FIXME: There should be a way to prevent that
+ continue;
+ }
+ if inside_attribute {
+ highlight |= HlMod::Attribute
+ }
+
+ hl.add(HlRange { range, highlight, binding_hash });
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs
new file mode 100644
index 000000000..6a1236c79
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs
@@ -0,0 +1,25 @@
+//! Syntax highlighting for escape sequences
+use crate::syntax_highlighting::highlights::Highlights;
+use crate::{HlRange, HlTag};
+use syntax::ast::IsString;
+use syntax::TextSize;
+
+pub(super) fn highlight_escape_string<T: IsString>(
+ stack: &mut Highlights,
+ string: &T,
+ start: TextSize,
+) {
+ string.escaped_char_ranges(&mut |piece_range, char| {
+ if char.is_err() {
+ return;
+ }
+
+ if string.text()[piece_range.start().into()..].starts_with('\\') {
+ stack.add(HlRange {
+ range: piece_range + start,
+ highlight: HlTag::EscapeSequence.into(),
+ binding_hash: None,
+ });
+ }
+ });
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs
new file mode 100644
index 000000000..2ed57e201
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs
@@ -0,0 +1,50 @@
+//! Syntax highlighting for format macro strings.
+use ide_db::{
+ syntax_helpers::format_string::{is_format_string, lex_format_specifiers, FormatSpecifier},
+ SymbolKind,
+};
+use syntax::{ast, TextRange};
+
+use crate::{syntax_highlighting::highlights::Highlights, HlRange, HlTag};
+
+pub(super) fn highlight_format_string(
+ stack: &mut Highlights,
+ string: &ast::String,
+ expanded_string: &ast::String,
+ range: TextRange,
+) {
+ if !is_format_string(expanded_string) {
+ return;
+ }
+
+ lex_format_specifiers(string, &mut |piece_range, kind| {
+ if let Some(highlight) = highlight_format_specifier(kind) {
+ stack.add(HlRange {
+ range: piece_range + range.start(),
+ highlight: highlight.into(),
+ binding_hash: None,
+ });
+ }
+ });
+}
+
+fn highlight_format_specifier(kind: FormatSpecifier) -> Option<HlTag> {
+ Some(match kind {
+ FormatSpecifier::Open
+ | FormatSpecifier::Close
+ | FormatSpecifier::Colon
+ | FormatSpecifier::Fill
+ | FormatSpecifier::Align
+ | FormatSpecifier::Sign
+ | FormatSpecifier::NumberSign
+ | FormatSpecifier::DollarSign
+ | FormatSpecifier::Dot
+ | FormatSpecifier::Asterisk
+ | FormatSpecifier::QuestionMark => HlTag::FormatSpecifier,
+
+ FormatSpecifier::Integer | FormatSpecifier::Zero => HlTag::NumericLiteral,
+
+ FormatSpecifier::Identifier => HlTag::Symbol(SymbolKind::Local),
+ FormatSpecifier::Escape => HlTag::EscapeSequence,
+ })
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
new file mode 100644
index 000000000..9395e914c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
@@ -0,0 +1,690 @@
+//! Computes color for a single element.
+
+use hir::{AsAssocItem, HasVisibility, Semantics};
+use ide_db::{
+ defs::{Definition, IdentClass, NameClass, NameRefClass},
+ FxHashMap, RootDatabase, SymbolKind,
+};
+use syntax::{
+ ast, match_ast, AstNode, AstToken, NodeOrToken,
+ SyntaxKind::{self, *},
+ SyntaxNode, SyntaxToken, T,
+};
+
+use crate::{
+ syntax_highlighting::tags::{HlOperator, HlPunct},
+ Highlight, HlMod, HlTag,
+};
+
+pub(super) fn token(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option<Highlight> {
+ if let Some(comment) = ast::Comment::cast(token.clone()) {
+ let h = HlTag::Comment;
+ return Some(match comment.kind().doc {
+ Some(_) => h | HlMod::Documentation,
+ None => h.into(),
+ });
+ }
+
+ let highlight: Highlight = match token.kind() {
+ STRING | BYTE_STRING => HlTag::StringLiteral.into(),
+ INT_NUMBER if token.parent_ancestors().nth(1).map(|it| it.kind()) == Some(FIELD_EXPR) => {
+ SymbolKind::Field.into()
+ }
+ INT_NUMBER | FLOAT_NUMBER => HlTag::NumericLiteral.into(),
+ BYTE => HlTag::ByteLiteral.into(),
+ CHAR => HlTag::CharLiteral.into(),
+ IDENT if token.parent().and_then(ast::TokenTree::cast).is_some() => {
+ // from this point on we are inside a token tree, this only happens for identifiers
+ // that were not mapped down into macro invocations
+ HlTag::None.into()
+ }
+ p if p.is_punct() => punctuation(sema, token, p),
+ k if k.is_keyword() => keyword(sema, token, k)?,
+ _ => return None,
+ };
+ Some(highlight)
+}
+
+pub(super) fn name_like(
+ sema: &Semantics<'_, RootDatabase>,
+ krate: hir::Crate,
+ bindings_shadow_count: &mut FxHashMap<hir::Name, u32>,
+ syntactic_name_ref_highlighting: bool,
+ name_like: ast::NameLike,
+) -> Option<(Highlight, Option<u64>)> {
+ let mut binding_hash = None;
+ let highlight = match name_like {
+ ast::NameLike::NameRef(name_ref) => highlight_name_ref(
+ sema,
+ krate,
+ bindings_shadow_count,
+ &mut binding_hash,
+ syntactic_name_ref_highlighting,
+ name_ref,
+ ),
+ ast::NameLike::Name(name) => {
+ highlight_name(sema, bindings_shadow_count, &mut binding_hash, krate, name)
+ }
+ ast::NameLike::Lifetime(lifetime) => match IdentClass::classify_lifetime(sema, &lifetime) {
+ Some(IdentClass::NameClass(NameClass::Definition(def))) => {
+ highlight_def(sema, krate, def) | HlMod::Definition
+ }
+ Some(IdentClass::NameRefClass(NameRefClass::Definition(def))) => {
+ highlight_def(sema, krate, def)
+ }
+ // FIXME: Fallback for 'static and '_, as we do not resolve these yet
+ _ => SymbolKind::LifetimeParam.into(),
+ },
+ };
+ Some((highlight, binding_hash))
+}
+
+fn punctuation(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+ kind: SyntaxKind,
+) -> Highlight {
+ let parent = token.parent();
+ let parent_kind = parent.as_ref().map_or(EOF, SyntaxNode::kind);
+ match (kind, parent_kind) {
+ (T![?], _) => HlTag::Operator(HlOperator::Other) | HlMod::ControlFlow,
+ (T![&], BIN_EXPR) => HlOperator::Bitwise.into(),
+ (T![&], _) => {
+ let h = HlTag::Operator(HlOperator::Other).into();
+ let is_unsafe = parent
+ .and_then(ast::RefExpr::cast)
+ .map(|ref_expr| sema.is_unsafe_ref_expr(&ref_expr));
+ if let Some(true) = is_unsafe {
+ h | HlMod::Unsafe
+ } else {
+ h
+ }
+ }
+ (T![::] | T![->] | T![=>] | T![..] | T![=] | T![@] | T![.], _) => HlOperator::Other.into(),
+ (T![!], MACRO_CALL | MACRO_RULES) => HlPunct::MacroBang.into(),
+ (T![!], NEVER_TYPE) => HlTag::BuiltinType.into(),
+ (T![!], PREFIX_EXPR) => HlOperator::Logical.into(),
+ (T![*], PTR_TYPE) => HlTag::Keyword.into(),
+ (T![*], PREFIX_EXPR) => {
+ let is_raw_ptr = (|| {
+ let prefix_expr = parent.and_then(ast::PrefixExpr::cast)?;
+ let expr = prefix_expr.expr()?;
+ sema.type_of_expr(&expr)?.original.is_raw_ptr().then(|| ())
+ })();
+ if let Some(()) = is_raw_ptr {
+ HlTag::Operator(HlOperator::Other) | HlMod::Unsafe
+ } else {
+ HlOperator::Other.into()
+ }
+ }
+ (T![-], PREFIX_EXPR) => {
+ let prefix_expr = parent.and_then(ast::PrefixExpr::cast).and_then(|e| e.expr());
+ match prefix_expr {
+ Some(ast::Expr::Literal(_)) => HlTag::NumericLiteral,
+ _ => HlTag::Operator(HlOperator::Other),
+ }
+ .into()
+ }
+ (T![+] | T![-] | T![*] | T![/] | T![%], BIN_EXPR) => HlOperator::Arithmetic.into(),
+ (T![+=] | T![-=] | T![*=] | T![/=] | T![%=], BIN_EXPR) => {
+ Highlight::from(HlOperator::Arithmetic) | HlMod::Mutable
+ }
+ (T![|] | T![&] | T![!] | T![^] | T![>>] | T![<<], BIN_EXPR) => HlOperator::Bitwise.into(),
+ (T![|=] | T![&=] | T![^=] | T![>>=] | T![<<=], BIN_EXPR) => {
+ Highlight::from(HlOperator::Bitwise) | HlMod::Mutable
+ }
+ (T![&&] | T![||], BIN_EXPR) => HlOperator::Logical.into(),
+ (T![>] | T![<] | T![==] | T![>=] | T![<=] | T![!=], BIN_EXPR) => {
+ HlOperator::Comparison.into()
+ }
+ (_, PREFIX_EXPR | BIN_EXPR | RANGE_EXPR | RANGE_PAT | REST_PAT) => HlOperator::Other.into(),
+ (_, ATTR) => HlTag::AttributeBracket.into(),
+ (kind, _) => match kind {
+ T!['['] | T![']'] => HlPunct::Bracket,
+ T!['{'] | T!['}'] => HlPunct::Brace,
+ T!['('] | T![')'] => HlPunct::Parenthesis,
+ T![<] | T![>] => HlPunct::Angle,
+ T![,] => HlPunct::Comma,
+ T![:] => HlPunct::Colon,
+ T![;] => HlPunct::Semi,
+ T![.] => HlPunct::Dot,
+ _ => HlPunct::Other,
+ }
+ .into(),
+ }
+}
+
+fn keyword(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+ kind: SyntaxKind,
+) -> Option<Highlight> {
+ let h = Highlight::new(HlTag::Keyword);
+ let h = match kind {
+ T![await] => h | HlMod::Async | HlMod::ControlFlow,
+ T![async] => h | HlMod::Async,
+ T![break]
+ | T![continue]
+ | T![else]
+ | T![if]
+ | T![in]
+ | T![loop]
+ | T![match]
+ | T![return]
+ | T![while]
+ | T![yield] => h | HlMod::ControlFlow,
+ T![for] if parent_matches::<ast::ForExpr>(&token) => h | HlMod::ControlFlow,
+ T![unsafe] => h | HlMod::Unsafe,
+ T![true] | T![false] => HlTag::BoolLiteral.into(),
+ // crate is handled just as a token if it's in an `extern crate`
+ T![crate] if parent_matches::<ast::ExternCrate>(&token) => h,
+ // self, crate, super and `Self` are handled as either a Name or NameRef already, unless they
+ // are inside unmapped token trees
+ T![self] | T![crate] | T![super] | T![Self] if parent_matches::<ast::NameRef>(&token) => {
+ return None
+ }
+ T![self] if parent_matches::<ast::Name>(&token) => return None,
+ T![ref] => match token.parent().and_then(ast::IdentPat::cast) {
+ Some(ident) if sema.is_unsafe_ident_pat(&ident) => h | HlMod::Unsafe,
+ _ => h,
+ },
+ _ => h,
+ };
+ Some(h)
+}
+
+fn highlight_name_ref(
+ sema: &Semantics<'_, RootDatabase>,
+ krate: hir::Crate,
+ bindings_shadow_count: &mut FxHashMap<hir::Name, u32>,
+ binding_hash: &mut Option<u64>,
+ syntactic_name_ref_highlighting: bool,
+ name_ref: ast::NameRef,
+) -> Highlight {
+ let db = sema.db;
+ if let Some(res) = highlight_method_call_by_name_ref(sema, krate, &name_ref) {
+ return res;
+ }
+
+ let name_class = match NameRefClass::classify(sema, &name_ref) {
+ Some(name_kind) => name_kind,
+ None if syntactic_name_ref_highlighting => {
+ return highlight_name_ref_by_syntax(name_ref, sema, krate)
+ }
+ // FIXME: This is required for helper attributes used by proc-macros, as those do not map down
+ // to anything when used.
+ // We can fix this for derive attributes since derive helpers are recorded, but not for
+ // general attributes.
+ None if name_ref.syntax().ancestors().any(|it| it.kind() == ATTR) => {
+ return HlTag::Symbol(SymbolKind::Attribute).into();
+ }
+ None => return HlTag::UnresolvedReference.into(),
+ };
+ let mut h = match name_class {
+ NameRefClass::Definition(def) => {
+ if let Definition::Local(local) = &def {
+ let name = local.name(db);
+ let shadow_count = bindings_shadow_count.entry(name.clone()).or_default();
+ *binding_hash = Some(calc_binding_hash(&name, *shadow_count))
+ };
+
+ let mut h = highlight_def(sema, krate, def);
+
+ match def {
+ Definition::Local(local) if is_consumed_lvalue(name_ref.syntax(), &local, db) => {
+ h |= HlMod::Consuming;
+ }
+ Definition::Trait(trait_) if trait_.is_unsafe(db) => {
+ if ast::Impl::for_trait_name_ref(&name_ref)
+ .map_or(false, |impl_| impl_.unsafe_token().is_some())
+ {
+ h |= HlMod::Unsafe;
+ }
+ }
+ Definition::Field(field) => {
+ if let Some(parent) = name_ref.syntax().parent() {
+ if matches!(parent.kind(), FIELD_EXPR | RECORD_PAT_FIELD) {
+ if let hir::VariantDef::Union(_) = field.parent_def(db) {
+ h |= HlMod::Unsafe;
+ }
+ }
+ }
+ }
+ Definition::Macro(_) => {
+ if let Some(macro_call) =
+ ide_db::syntax_helpers::node_ext::full_path_of_name_ref(&name_ref)
+ .and_then(|it| it.syntax().parent().and_then(ast::MacroCall::cast))
+ {
+ if sema.is_unsafe_macro_call(&macro_call) {
+ h |= HlMod::Unsafe;
+ }
+ }
+ }
+ _ => (),
+ }
+
+ h
+ }
+ NameRefClass::FieldShorthand { .. } => SymbolKind::Field.into(),
+ };
+
+ h.tag = match name_ref.token_kind() {
+ T![Self] => HlTag::Symbol(SymbolKind::SelfType),
+ T![self] => HlTag::Symbol(SymbolKind::SelfParam),
+ T![super] | T![crate] => HlTag::Keyword,
+ _ => h.tag,
+ };
+ h
+}
+
+fn highlight_name(
+ sema: &Semantics<'_, RootDatabase>,
+ bindings_shadow_count: &mut FxHashMap<hir::Name, u32>,
+ binding_hash: &mut Option<u64>,
+ krate: hir::Crate,
+ name: ast::Name,
+) -> Highlight {
+ let name_kind = NameClass::classify(sema, &name);
+ if let Some(NameClass::Definition(Definition::Local(local))) = &name_kind {
+ let name = local.name(sema.db);
+ let shadow_count = bindings_shadow_count.entry(name.clone()).or_default();
+ *shadow_count += 1;
+ *binding_hash = Some(calc_binding_hash(&name, *shadow_count))
+ };
+ match name_kind {
+ Some(NameClass::Definition(def)) => {
+ let mut h = highlight_def(sema, krate, def) | HlMod::Definition;
+ if let Definition::Trait(trait_) = &def {
+ if trait_.is_unsafe(sema.db) {
+ h |= HlMod::Unsafe;
+ }
+ }
+ h
+ }
+ Some(NameClass::ConstReference(def)) => highlight_def(sema, krate, def),
+ Some(NameClass::PatFieldShorthand { field_ref, .. }) => {
+ let mut h = HlTag::Symbol(SymbolKind::Field).into();
+ if let hir::VariantDef::Union(_) = field_ref.parent_def(sema.db) {
+ h |= HlMod::Unsafe;
+ }
+ h
+ }
+ None => highlight_name_by_syntax(name) | HlMod::Definition,
+ }
+}
+
+fn calc_binding_hash(name: &hir::Name, shadow_count: u32) -> u64 {
+ fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 {
+ use std::{collections::hash_map::DefaultHasher, hash::Hasher};
+
+ let mut hasher = DefaultHasher::new();
+ x.hash(&mut hasher);
+ hasher.finish()
+ }
+
+ hash((name, shadow_count))
+}
+
+fn highlight_def(
+ sema: &Semantics<'_, RootDatabase>,
+ krate: hir::Crate,
+ def: Definition,
+) -> Highlight {
+ let db = sema.db;
+ let mut h = match def {
+ Definition::Macro(m) => Highlight::new(HlTag::Symbol(m.kind(sema.db).into())),
+ Definition::Field(_) => Highlight::new(HlTag::Symbol(SymbolKind::Field)),
+ Definition::Module(module) => {
+ let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Module));
+ if module.is_crate_root(db) {
+ h |= HlMod::CrateRoot;
+ }
+ h
+ }
+ Definition::Function(func) => {
+ let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Function));
+ if let Some(item) = func.as_assoc_item(db) {
+ h |= HlMod::Associated;
+ match func.self_param(db) {
+ Some(sp) => match sp.access(db) {
+ hir::Access::Exclusive => {
+ h |= HlMod::Mutable;
+ h |= HlMod::Reference;
+ }
+ hir::Access::Shared => h |= HlMod::Reference,
+ hir::Access::Owned => h |= HlMod::Consuming,
+ },
+ None => h |= HlMod::Static,
+ }
+
+ match item.container(db) {
+ hir::AssocItemContainer::Impl(i) => {
+ if i.trait_(db).is_some() {
+ h |= HlMod::Trait;
+ }
+ }
+ hir::AssocItemContainer::Trait(_t) => {
+ h |= HlMod::Trait;
+ }
+ }
+ }
+
+ if func.is_unsafe_to_call(db) {
+ h |= HlMod::Unsafe;
+ }
+ if func.is_async(db) {
+ h |= HlMod::Async;
+ }
+
+ h
+ }
+ Definition::Adt(adt) => {
+ let h = match adt {
+ hir::Adt::Struct(_) => HlTag::Symbol(SymbolKind::Struct),
+ hir::Adt::Enum(_) => HlTag::Symbol(SymbolKind::Enum),
+ hir::Adt::Union(_) => HlTag::Symbol(SymbolKind::Union),
+ };
+
+ Highlight::new(h)
+ }
+ Definition::Variant(_) => Highlight::new(HlTag::Symbol(SymbolKind::Variant)),
+ Definition::Const(konst) => {
+ let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Const));
+
+ if let Some(item) = konst.as_assoc_item(db) {
+ h |= HlMod::Associated;
+ match item.container(db) {
+ hir::AssocItemContainer::Impl(i) => {
+ if i.trait_(db).is_some() {
+ h |= HlMod::Trait;
+ }
+ }
+ hir::AssocItemContainer::Trait(_t) => {
+ h |= HlMod::Trait;
+ }
+ }
+ }
+
+ h
+ }
+ Definition::Trait(_) => Highlight::new(HlTag::Symbol(SymbolKind::Trait)),
+ Definition::TypeAlias(type_) => {
+ let mut h = Highlight::new(HlTag::Symbol(SymbolKind::TypeAlias));
+
+ if let Some(item) = type_.as_assoc_item(db) {
+ h |= HlMod::Associated;
+ match item.container(db) {
+ hir::AssocItemContainer::Impl(i) => {
+ if i.trait_(db).is_some() {
+ h |= HlMod::Trait;
+ }
+ }
+ hir::AssocItemContainer::Trait(_t) => {
+ h |= HlMod::Trait;
+ }
+ }
+ }
+
+ h
+ }
+ Definition::BuiltinType(_) => Highlight::new(HlTag::BuiltinType),
+ Definition::Static(s) => {
+ let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Static));
+
+ if s.is_mut(db) {
+ h |= HlMod::Mutable;
+ h |= HlMod::Unsafe;
+ }
+
+ h
+ }
+ Definition::SelfType(_) => Highlight::new(HlTag::Symbol(SymbolKind::Impl)),
+ Definition::GenericParam(it) => match it {
+ hir::GenericParam::TypeParam(_) => Highlight::new(HlTag::Symbol(SymbolKind::TypeParam)),
+ hir::GenericParam::ConstParam(_) => {
+ Highlight::new(HlTag::Symbol(SymbolKind::ConstParam))
+ }
+ hir::GenericParam::LifetimeParam(_) => {
+ Highlight::new(HlTag::Symbol(SymbolKind::LifetimeParam))
+ }
+ },
+ Definition::Local(local) => {
+ let tag = if local.is_self(db) {
+ HlTag::Symbol(SymbolKind::SelfParam)
+ } else if local.is_param(db) {
+ HlTag::Symbol(SymbolKind::ValueParam)
+ } else {
+ HlTag::Symbol(SymbolKind::Local)
+ };
+ let mut h = Highlight::new(tag);
+ let ty = local.ty(db);
+ if local.is_mut(db) || ty.is_mutable_reference() {
+ h |= HlMod::Mutable;
+ }
+ if local.is_ref(db) || ty.is_reference() {
+ h |= HlMod::Reference;
+ }
+ if ty.as_callable(db).is_some() || ty.impls_fnonce(db) {
+ h |= HlMod::Callable;
+ }
+ h
+ }
+ Definition::Label(_) => Highlight::new(HlTag::Symbol(SymbolKind::Label)),
+ Definition::BuiltinAttr(_) => Highlight::new(HlTag::Symbol(SymbolKind::BuiltinAttr)),
+ Definition::ToolModule(_) => Highlight::new(HlTag::Symbol(SymbolKind::ToolModule)),
+ Definition::DeriveHelper(_) => Highlight::new(HlTag::Symbol(SymbolKind::DeriveHelper)),
+ };
+
+ let def_crate = def.krate(db);
+ let is_from_other_crate = def_crate != Some(krate);
+ let is_from_builtin_crate = def_crate.map_or(false, |def_crate| def_crate.is_builtin(db));
+ let is_builtin_type = matches!(def, Definition::BuiltinType(_));
+ let is_public = def.visibility(db) == Some(hir::Visibility::Public);
+
+ match (is_from_other_crate, is_builtin_type, is_public) {
+ (true, false, _) => h |= HlMod::Library,
+ (false, _, true) => h |= HlMod::Public,
+ _ => {}
+ }
+
+ if is_from_builtin_crate {
+ h |= HlMod::DefaultLibrary;
+ }
+
+ h
+}
+
+fn highlight_method_call_by_name_ref(
+ sema: &Semantics<'_, RootDatabase>,
+ krate: hir::Crate,
+ name_ref: &ast::NameRef,
+) -> Option<Highlight> {
+ let mc = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast)?;
+ highlight_method_call(sema, krate, &mc)
+}
+
+fn highlight_method_call(
+ sema: &Semantics<'_, RootDatabase>,
+ krate: hir::Crate,
+ method_call: &ast::MethodCallExpr,
+) -> Option<Highlight> {
+ let func = sema.resolve_method_call(method_call)?;
+
+ let mut h = SymbolKind::Function.into();
+ h |= HlMod::Associated;
+
+ if func.is_unsafe_to_call(sema.db) || sema.is_unsafe_method_call(method_call) {
+ h |= HlMod::Unsafe;
+ }
+ if func.is_async(sema.db) {
+ h |= HlMod::Async;
+ }
+ if func
+ .as_assoc_item(sema.db)
+ .and_then(|it| it.containing_trait_or_trait_impl(sema.db))
+ .is_some()
+ {
+ h |= HlMod::Trait;
+ }
+
+ let def_crate = func.module(sema.db).krate();
+ let is_from_other_crate = def_crate != krate;
+ let is_from_builtin_crate = def_crate.is_builtin(sema.db);
+ let is_public = func.visibility(sema.db) == hir::Visibility::Public;
+
+ if is_from_other_crate {
+ h |= HlMod::Library;
+ } else if is_public {
+ h |= HlMod::Public;
+ }
+
+ if is_from_builtin_crate {
+ h |= HlMod::DefaultLibrary;
+ }
+
+ if let Some(self_param) = func.self_param(sema.db) {
+ match self_param.access(sema.db) {
+ hir::Access::Shared => h |= HlMod::Reference,
+ hir::Access::Exclusive => {
+ h |= HlMod::Mutable;
+ h |= HlMod::Reference;
+ }
+ hir::Access::Owned => {
+ if let Some(receiver_ty) =
+ method_call.receiver().and_then(|it| sema.type_of_expr(&it))
+ {
+ if !receiver_ty.adjusted().is_copy(sema.db) {
+ h |= HlMod::Consuming
+ }
+ }
+ }
+ }
+ }
+ Some(h)
+}
+
+fn highlight_name_by_syntax(name: ast::Name) -> Highlight {
+ let default = HlTag::UnresolvedReference;
+
+ let parent = match name.syntax().parent() {
+ Some(it) => it,
+ _ => return default.into(),
+ };
+
+ let tag = match parent.kind() {
+ STRUCT => SymbolKind::Struct,
+ ENUM => SymbolKind::Enum,
+ VARIANT => SymbolKind::Variant,
+ UNION => SymbolKind::Union,
+ TRAIT => SymbolKind::Trait,
+ TYPE_ALIAS => SymbolKind::TypeAlias,
+ TYPE_PARAM => SymbolKind::TypeParam,
+ RECORD_FIELD => SymbolKind::Field,
+ MODULE => SymbolKind::Module,
+ FN => SymbolKind::Function,
+ CONST => SymbolKind::Const,
+ STATIC => SymbolKind::Static,
+ IDENT_PAT => SymbolKind::Local,
+ _ => return default.into(),
+ };
+
+ tag.into()
+}
+
+fn highlight_name_ref_by_syntax(
+ name: ast::NameRef,
+ sema: &Semantics<'_, RootDatabase>,
+ krate: hir::Crate,
+) -> Highlight {
+ let default = HlTag::UnresolvedReference;
+
+ let parent = match name.syntax().parent() {
+ Some(it) => it,
+ _ => return default.into(),
+ };
+
+ match parent.kind() {
+ METHOD_CALL_EXPR => ast::MethodCallExpr::cast(parent)
+ .and_then(|it| highlight_method_call(sema, krate, &it))
+ .unwrap_or_else(|| SymbolKind::Function.into()),
+ FIELD_EXPR => {
+ let h = HlTag::Symbol(SymbolKind::Field);
+ let is_union = ast::FieldExpr::cast(parent)
+ .and_then(|field_expr| sema.resolve_field(&field_expr))
+ .map_or(false, |field| {
+ matches!(field.parent_def(sema.db), hir::VariantDef::Union(_))
+ });
+ if is_union {
+ h | HlMod::Unsafe
+ } else {
+ h.into()
+ }
+ }
+ PATH_SEGMENT => {
+ let name_based_fallback = || {
+ if name.text().chars().next().unwrap_or_default().is_uppercase() {
+ SymbolKind::Struct.into()
+ } else {
+ SymbolKind::Module.into()
+ }
+ };
+ let path = match parent.parent().and_then(ast::Path::cast) {
+ Some(it) => it,
+ _ => return name_based_fallback(),
+ };
+ let expr = match path.syntax().parent() {
+ Some(parent) => match_ast! {
+ match parent {
+ ast::PathExpr(path) => path,
+ ast::MacroCall(_) => return SymbolKind::Macro.into(),
+ _ => return name_based_fallback(),
+ }
+ },
+ // within path, decide whether it is module or adt by checking for uppercase name
+ None => return name_based_fallback(),
+ };
+ let parent = match expr.syntax().parent() {
+ Some(it) => it,
+ None => return default.into(),
+ };
+
+ match parent.kind() {
+ CALL_EXPR => SymbolKind::Function.into(),
+ _ => if name.text().chars().next().unwrap_or_default().is_uppercase() {
+ SymbolKind::Struct
+ } else {
+ SymbolKind::Const
+ }
+ .into(),
+ }
+ }
+ _ => default.into(),
+ }
+}
+
+fn is_consumed_lvalue(node: &SyntaxNode, local: &hir::Local, db: &RootDatabase) -> bool {
+ // When lvalues are passed as arguments and they're not Copy, then mark them as Consuming.
+ parents_match(node.clone().into(), &[PATH_SEGMENT, PATH, PATH_EXPR, ARG_LIST])
+ && !local.ty(db).is_copy(db)
+}
+
+/// Returns true if the parent nodes of `node` all match the `SyntaxKind`s in `kinds` exactly.
+fn parents_match(mut node: NodeOrToken<SyntaxNode, SyntaxToken>, mut kinds: &[SyntaxKind]) -> bool {
+ while let (Some(parent), [kind, rest @ ..]) = (&node.parent(), kinds) {
+ if parent.kind() != *kind {
+ return false;
+ }
+
+ // FIXME: Would be nice to get parent out of the match, but binding by-move and by-value
+ // in the same pattern is unstable: rust-lang/rust#68354.
+ node = node.parent().unwrap().into();
+ kinds = rest;
+ }
+
+ // Only true if we matched all expected kinds
+ kinds.is_empty()
+}
+
+fn parent_matches<N: AstNode>(token: &SyntaxToken) -> bool {
+ token.parent().map_or(false, |it| N::can_cast(it.kind()))
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlights.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlights.rs
new file mode 100644
index 000000000..340290eaf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlights.rs
@@ -0,0 +1,92 @@
+//! Collects a tree of highlighted ranges and flattens it.
+use std::iter;
+
+use stdx::equal_range_by;
+use syntax::TextRange;
+
+use crate::{HlRange, HlTag};
+
+pub(super) struct Highlights {
+ root: Node,
+}
+
+struct Node {
+ hl_range: HlRange,
+ nested: Vec<Node>,
+}
+
+impl Highlights {
+ pub(super) fn new(range: TextRange) -> Highlights {
+ Highlights {
+ root: Node::new(HlRange { range, highlight: HlTag::None.into(), binding_hash: None }),
+ }
+ }
+
+ pub(super) fn add(&mut self, hl_range: HlRange) {
+ self.root.add(hl_range);
+ }
+
+ pub(super) fn to_vec(&self) -> Vec<HlRange> {
+ let mut res = Vec::new();
+ self.root.flatten(&mut res);
+ res
+ }
+}
+
+impl Node {
+ fn new(hl_range: HlRange) -> Node {
+ Node { hl_range, nested: Vec::new() }
+ }
+
+ fn add(&mut self, hl_range: HlRange) {
+ assert!(self.hl_range.range.contains_range(hl_range.range));
+
+ // Fast path
+ if let Some(last) = self.nested.last_mut() {
+ if last.hl_range.range.contains_range(hl_range.range) {
+ return last.add(hl_range);
+ }
+ if last.hl_range.range.end() <= hl_range.range.start() {
+ return self.nested.push(Node::new(hl_range));
+ }
+ }
+
+ let overlapping =
+ equal_range_by(&self.nested, |n| TextRange::ordering(n.hl_range.range, hl_range.range));
+
+ if overlapping.len() == 1
+ && self.nested[overlapping.start].hl_range.range.contains_range(hl_range.range)
+ {
+ return self.nested[overlapping.start].add(hl_range);
+ }
+
+ let nested = self
+ .nested
+ .splice(overlapping.clone(), iter::once(Node::new(hl_range)))
+ .collect::<Vec<_>>();
+ self.nested[overlapping.start].nested = nested;
+ }
+
+ fn flatten(&self, acc: &mut Vec<HlRange>) {
+ let mut start = self.hl_range.range.start();
+ let mut nested = self.nested.iter();
+ loop {
+ let next = nested.next();
+ let end = next.map_or(self.hl_range.range.end(), |it| it.hl_range.range.start());
+ if start < end {
+ acc.push(HlRange {
+ range: TextRange::new(start, end),
+ highlight: self.hl_range.highlight,
+ binding_hash: self.hl_range.binding_hash,
+ });
+ }
+ start = match next {
+ Some(child) => {
+ child.flatten(acc);
+ child.hl_range.range.end()
+ }
+ None => break,
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
new file mode 100644
index 000000000..9777c014c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
@@ -0,0 +1,97 @@
+//! Renders a bit of code as HTML.
+
+use ide_db::base_db::SourceDatabase;
+use oorandom::Rand32;
+use stdx::format_to;
+use syntax::AstNode;
+
+use crate::{syntax_highlighting::highlight, FileId, RootDatabase};
+
+pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String {
+ let parse = db.parse(file_id);
+
+ fn rainbowify(seed: u64) -> String {
+ let mut rng = Rand32::new(seed);
+ format!(
+ "hsl({h},{s}%,{l}%)",
+ h = rng.rand_range(0..361),
+ s = rng.rand_range(42..99),
+ l = rng.rand_range(40..91),
+ )
+ }
+
+ let hl_ranges = highlight(db, file_id, None, false);
+ let text = parse.tree().syntax().to_string();
+ let mut buf = String::new();
+ buf.push_str(STYLE);
+ buf.push_str("<pre><code>");
+ for r in &hl_ranges {
+ let chunk = html_escape(&text[r.range]);
+ if r.highlight.is_empty() {
+ format_to!(buf, "{}", chunk);
+ continue;
+ }
+
+ let class = r.highlight.to_string().replace('.', " ");
+ let color = match (rainbow, r.binding_hash) {
+ (true, Some(hash)) => {
+ format!(" data-binding-hash=\"{}\" style=\"color: {};\"", hash, rainbowify(hash))
+ }
+ _ => "".into(),
+ };
+ format_to!(buf, "<span class=\"{}\"{}>{}</span>", class, color, chunk);
+ }
+ buf.push_str("</code></pre>");
+ buf
+}
+
+//FIXME: like, real html escaping
+fn html_escape(text: &str) -> String {
+ text.replace('<', "&lt;").replace('>', "&gt;")
+}
+
+const STYLE: &str = "
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+";
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
new file mode 100644
index 000000000..f376f9fda
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
@@ -0,0 +1,279 @@
+//! "Recursive" Syntax highlighting for code in doctests and fixtures.
+
+use std::mem;
+
+use either::Either;
+use hir::{InFile, Semantics};
+use ide_db::{
+ active_parameter::ActiveParameter, base_db::FileId, defs::Definition, rust_doc::is_rust_fence,
+ SymbolKind,
+};
+use syntax::{
+ ast::{self, AstNode, IsString, QuoteOffsets},
+ AstToken, NodeOrToken, SyntaxNode, TextRange, TextSize,
+};
+
+use crate::{
+ doc_links::{doc_attributes, extract_definitions_from_docs, resolve_doc_path_for_def},
+ syntax_highlighting::{highlights::Highlights, injector::Injector},
+ Analysis, HlMod, HlRange, HlTag, RootDatabase,
+};
+
+pub(super) fn ra_fixture(
+ hl: &mut Highlights,
+ sema: &Semantics<'_, RootDatabase>,
+ literal: &ast::String,
+ expanded: &ast::String,
+) -> Option<()> {
+ let active_parameter = ActiveParameter::at_token(sema, expanded.syntax().clone())?;
+ if !active_parameter.ident().map_or(false, |name| name.text().starts_with("ra_fixture")) {
+ return None;
+ }
+ let value = literal.value()?;
+
+ if let Some(range) = literal.open_quote_text_range() {
+ hl.add(HlRange { range, highlight: HlTag::StringLiteral.into(), binding_hash: None })
+ }
+
+ let mut inj = Injector::default();
+
+ let mut text = &*value;
+ let mut offset: TextSize = 0.into();
+
+ while !text.is_empty() {
+ let marker = "$0";
+ let idx = text.find(marker).unwrap_or(text.len());
+ let (chunk, next) = text.split_at(idx);
+ inj.add(chunk, TextRange::at(offset, TextSize::of(chunk)));
+
+ text = next;
+ offset += TextSize::of(chunk);
+
+ if let Some(next) = text.strip_prefix(marker) {
+ if let Some(range) = literal.map_range_up(TextRange::at(offset, TextSize::of(marker))) {
+ hl.add(HlRange { range, highlight: HlTag::Keyword.into(), binding_hash: None });
+ }
+
+ text = next;
+
+ let marker_len = TextSize::of(marker);
+ offset += marker_len;
+ }
+ }
+
+ let (analysis, tmp_file_id) = Analysis::from_single_file(inj.take_text());
+
+ for mut hl_range in analysis.highlight(tmp_file_id).unwrap() {
+ for range in inj.map_range_up(hl_range.range) {
+ if let Some(range) = literal.map_range_up(range) {
+ hl_range.range = range;
+ hl.add(hl_range);
+ }
+ }
+ }
+
+ if let Some(range) = literal.close_quote_text_range() {
+ hl.add(HlRange { range, highlight: HlTag::StringLiteral.into(), binding_hash: None })
+ }
+
+ Some(())
+}
+
+const RUSTDOC_FENCE_LENGTH: usize = 3;
+const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
+
+/// Injection of syntax highlighting of doctests and intra doc links.
+pub(super) fn doc_comment(
+ hl: &mut Highlights,
+ sema: &Semantics<'_, RootDatabase>,
+ src_file_id: FileId,
+ node: &SyntaxNode,
+) {
+ let (attributes, def) = match doc_attributes(sema, node) {
+ Some(it) => it,
+ None => return,
+ };
+ let src_file_id = src_file_id.into();
+
+ // Extract intra-doc links and emit highlights for them.
+ if let Some((docs, doc_mapping)) = attributes.docs_with_rangemap(sema.db) {
+ extract_definitions_from_docs(&docs)
+ .into_iter()
+ .filter_map(|(range, link, ns)| {
+ doc_mapping.map(range).filter(|mapping| mapping.file_id == src_file_id).and_then(
+ |InFile { value: mapped_range, .. }| {
+ Some(mapped_range).zip(resolve_doc_path_for_def(sema.db, def, &link, ns))
+ },
+ )
+ })
+ .for_each(|(range, def)| {
+ hl.add(HlRange {
+ range,
+ highlight: module_def_to_hl_tag(def)
+ | HlMod::Documentation
+ | HlMod::Injected
+ | HlMod::IntraDocLink,
+ binding_hash: None,
+ })
+ });
+ }
+
+ // Extract doc-test sources from the docs and calculate highlighting for them.
+
+ let mut inj = Injector::default();
+ inj.add_unmapped("fn doctest() {\n");
+
+ let attrs_source_map = attributes.source_map(sema.db);
+
+ let mut is_codeblock = false;
+ let mut is_doctest = false;
+
+ let mut new_comments = Vec::new();
+ let mut string;
+
+ for attr in attributes.by_key("doc").attrs() {
+ let InFile { file_id, value: src } = attrs_source_map.source_of(attr);
+ if file_id != src_file_id {
+ continue;
+ }
+ let (line, range) = match &src {
+ Either::Left(it) => {
+ string = match find_doc_string_in_attr(attr, it) {
+ Some(it) => it,
+ None => continue,
+ };
+ let text = string.text();
+ let text_range = string.syntax().text_range();
+ match string.quote_offsets() {
+ Some(QuoteOffsets { contents, .. }) => {
+ (&text[contents - text_range.start()], contents)
+ }
+ None => (text, text_range),
+ }
+ }
+ Either::Right(comment) => {
+ let value = comment.prefix().len();
+ let range = comment.syntax().text_range();
+ (
+ &comment.text()[value..],
+ TextRange::new(range.start() + TextSize::try_from(value).unwrap(), range.end()),
+ )
+ }
+ };
+
+ let mut range_start = range.start();
+ for line in line.split('\n') {
+ let line_len = TextSize::from(line.len() as u32);
+ let prev_range_start = {
+ let next_range_start = range_start + line_len + TextSize::from(1);
+ mem::replace(&mut range_start, next_range_start)
+ };
+ let mut pos = TextSize::from(0);
+
+ match RUSTDOC_FENCES.into_iter().find_map(|fence| line.find(fence)) {
+ Some(idx) => {
+ is_codeblock = !is_codeblock;
+ // Check whether code is rust by inspecting fence guards
+ let guards = &line[idx + RUSTDOC_FENCE_LENGTH..];
+ let is_rust = is_rust_fence(guards);
+ is_doctest = is_codeblock && is_rust;
+ continue;
+ }
+ None if !is_doctest => continue,
+ None => (),
+ }
+
+ // whitespace after comment is ignored
+ if let Some(ws) = line[pos.into()..].chars().next().filter(|c| c.is_whitespace()) {
+ pos += TextSize::of(ws);
+ }
+ // lines marked with `#` should be ignored in output, we skip the `#` char
+ if line[pos.into()..].starts_with('#') {
+ pos += TextSize::of('#');
+ }
+
+ new_comments.push(TextRange::at(prev_range_start, pos));
+ inj.add(&line[pos.into()..], TextRange::new(pos, line_len) + prev_range_start);
+ inj.add_unmapped("\n");
+ }
+ }
+
+ if new_comments.is_empty() {
+ return; // no need to run an analysis on an empty file
+ }
+
+ inj.add_unmapped("\n}");
+
+ let (analysis, tmp_file_id) = Analysis::from_single_file(inj.take_text());
+
+ if let Ok(ranges) = analysis.with_db(|db| super::highlight(db, tmp_file_id, None, true)) {
+ for HlRange { range, highlight, binding_hash } in ranges {
+ for range in inj.map_range_up(range) {
+ hl.add(HlRange { range, highlight: highlight | HlMod::Injected, binding_hash });
+ }
+ }
+ }
+
+ for range in new_comments {
+ hl.add(HlRange {
+ range,
+ highlight: HlTag::Comment | HlMod::Documentation,
+ binding_hash: None,
+ });
+ }
+}
+
+fn find_doc_string_in_attr(attr: &hir::Attr, it: &ast::Attr) -> Option<ast::String> {
+ match it.expr() {
+ // #[doc = lit]
+ Some(ast::Expr::Literal(lit)) => match lit.kind() {
+ ast::LiteralKind::String(it) => Some(it),
+ _ => None,
+ },
+ // #[cfg_attr(..., doc = "", ...)]
+ None => {
+ // We gotta hunt the string token manually here
+ let text = attr.string_value()?;
+ // FIXME: We just pick the first string literal that has the same text as the doc attribute
+ // This means technically we might highlight the wrong one
+ it.syntax()
+ .descendants_with_tokens()
+ .filter_map(NodeOrToken::into_token)
+ .filter_map(ast::String::cast)
+ .find(|string| {
+ string.text().get(1..string.text().len() - 1).map_or(false, |it| it == text)
+ })
+ }
+ _ => None,
+ }
+}
+
+fn module_def_to_hl_tag(def: Definition) -> HlTag {
+ let symbol = match def {
+ Definition::Module(_) => SymbolKind::Module,
+ Definition::Function(_) => SymbolKind::Function,
+ Definition::Adt(hir::Adt::Struct(_)) => SymbolKind::Struct,
+ Definition::Adt(hir::Adt::Enum(_)) => SymbolKind::Enum,
+ Definition::Adt(hir::Adt::Union(_)) => SymbolKind::Union,
+ Definition::Variant(_) => SymbolKind::Variant,
+ Definition::Const(_) => SymbolKind::Const,
+ Definition::Static(_) => SymbolKind::Static,
+ Definition::Trait(_) => SymbolKind::Trait,
+ Definition::TypeAlias(_) => SymbolKind::TypeAlias,
+ Definition::BuiltinType(_) => return HlTag::BuiltinType,
+ Definition::Macro(_) => SymbolKind::Macro,
+ Definition::Field(_) => SymbolKind::Field,
+ Definition::SelfType(_) => SymbolKind::Impl,
+ Definition::Local(_) => SymbolKind::Local,
+ Definition::GenericParam(gp) => match gp {
+ hir::GenericParam::TypeParam(_) => SymbolKind::TypeParam,
+ hir::GenericParam::ConstParam(_) => SymbolKind::ConstParam,
+ hir::GenericParam::LifetimeParam(_) => SymbolKind::LifetimeParam,
+ },
+ Definition::Label(_) => SymbolKind::Label,
+ Definition::BuiltinAttr(_) => SymbolKind::BuiltinAttr,
+ Definition::ToolModule(_) => SymbolKind::ToolModule,
+ Definition::DeriveHelper(_) => SymbolKind::DeriveHelper,
+ };
+ HlTag::Symbol(symbol)
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/injector.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/injector.rs
new file mode 100644
index 000000000..a902fd717
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/injector.rs
@@ -0,0 +1,81 @@
+//! Extracts a subsequence of a text document, remembering the mapping of ranges
+//! between original and extracted texts.
+use std::ops::{self, Sub};
+
+use stdx::equal_range_by;
+use syntax::{TextRange, TextSize};
+
+#[derive(Default)]
+pub(super) struct Injector {
+ buf: String,
+ ranges: Vec<(TextRange, Option<Delta<TextSize>>)>,
+}
+
+impl Injector {
+ pub(super) fn add(&mut self, text: &str, source_range: TextRange) {
+ let len = TextSize::of(text);
+ assert_eq!(len, source_range.len());
+ self.add_impl(text, Some(source_range.start()));
+ }
+
+ pub(super) fn add_unmapped(&mut self, text: &str) {
+ self.add_impl(text, None);
+ }
+
+ fn add_impl(&mut self, text: &str, source: Option<TextSize>) {
+ let len = TextSize::of(text);
+ let target_range = TextRange::at(TextSize::of(&self.buf), len);
+ self.ranges.push((target_range, source.map(|it| Delta::new(target_range.start(), it))));
+ self.buf.push_str(text);
+ }
+
+ pub(super) fn take_text(&mut self) -> String {
+ std::mem::take(&mut self.buf)
+ }
+
+ pub(super) fn map_range_up(&self, range: TextRange) -> impl Iterator<Item = TextRange> + '_ {
+ equal_range_by(&self.ranges, |&(r, _)| TextRange::ordering(r, range)).filter_map(move |i| {
+ let (target_range, delta) = self.ranges[i];
+ let intersection = target_range.intersect(range).unwrap();
+ Some(intersection + delta?)
+ })
+ }
+}
+
+#[derive(Clone, Copy)]
+enum Delta<T> {
+ Add(T),
+ Sub(T),
+}
+
+impl<T> Delta<T> {
+ fn new(from: T, to: T) -> Delta<T>
+ where
+ T: Ord + Sub<Output = T>,
+ {
+ if to >= from {
+ Delta::Add(to - from)
+ } else {
+ Delta::Sub(from - to)
+ }
+ }
+}
+
+impl ops::Add<Delta<TextSize>> for TextSize {
+ type Output = TextSize;
+
+ fn add(self, rhs: Delta<TextSize>) -> TextSize {
+ match rhs {
+ Delta::Add(it) => self + it,
+ Delta::Sub(it) => self - it,
+ }
+ }
+}
+
+impl ops::Add<Delta<TextSize>> for TextRange {
+ type Output = TextRange;
+
+ fn add(self, rhs: Delta<TextSize>) -> TextRange {
+ TextRange::at(self.start() + rhs, self.len())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/macro_.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/macro_.rs
new file mode 100644
index 000000000..1099d9c23
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/macro_.rs
@@ -0,0 +1,128 @@
+//! Syntax highlighting for macro_rules!.
+use syntax::{SyntaxKind, SyntaxToken, TextRange, T};
+
+use crate::{HlRange, HlTag};
+
+#[derive(Default)]
+pub(super) struct MacroHighlighter {
+ state: Option<MacroMatcherParseState>,
+}
+
+impl MacroHighlighter {
+ pub(super) fn init(&mut self) {
+ self.state = Some(MacroMatcherParseState::default());
+ }
+
+ pub(super) fn advance(&mut self, token: &SyntaxToken) {
+ if let Some(state) = self.state.as_mut() {
+ update_macro_state(state, token);
+ }
+ }
+
+ pub(super) fn highlight(&self, token: &SyntaxToken) -> Option<HlRange> {
+ if let Some(state) = self.state.as_ref() {
+ if matches!(state.rule_state, RuleState::Matcher | RuleState::Expander) {
+ if let Some(range) = is_metavariable(token) {
+ return Some(HlRange {
+ range,
+ highlight: HlTag::UnresolvedReference.into(),
+ binding_hash: None,
+ });
+ }
+ }
+ }
+ None
+ }
+}
+
+struct MacroMatcherParseState {
+ /// Opening and corresponding closing bracket of the matcher or expander of the current rule
+ paren_ty: Option<(SyntaxKind, SyntaxKind)>,
+ paren_level: usize,
+ rule_state: RuleState,
+ /// Whether we are inside the outer `{` `}` macro block that holds the rules
+ in_invoc_body: bool,
+}
+
+impl Default for MacroMatcherParseState {
+ fn default() -> Self {
+ MacroMatcherParseState {
+ paren_ty: None,
+ paren_level: 0,
+ in_invoc_body: false,
+ rule_state: RuleState::None,
+ }
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq)]
+enum RuleState {
+ Matcher,
+ Expander,
+ Between,
+ None,
+}
+
+impl RuleState {
+ fn transition(&mut self) {
+ *self = match self {
+ RuleState::Matcher => RuleState::Between,
+ RuleState::Expander => RuleState::None,
+ RuleState::Between => RuleState::Expander,
+ RuleState::None => RuleState::Matcher,
+ };
+ }
+}
+
+fn update_macro_state(state: &mut MacroMatcherParseState, tok: &SyntaxToken) {
+ if !state.in_invoc_body {
+ if tok.kind() == T!['{'] || tok.kind() == T!['('] {
+ state.in_invoc_body = true;
+ }
+ return;
+ }
+
+ match state.paren_ty {
+ Some((open, close)) => {
+ if tok.kind() == open {
+ state.paren_level += 1;
+ } else if tok.kind() == close {
+ state.paren_level -= 1;
+ if state.paren_level == 0 {
+ state.rule_state.transition();
+ state.paren_ty = None;
+ }
+ }
+ }
+ None => {
+ match tok.kind() {
+ T!['('] => {
+ state.paren_ty = Some((T!['('], T![')']));
+ }
+ T!['{'] => {
+ state.paren_ty = Some((T!['{'], T!['}']));
+ }
+ T!['['] => {
+ state.paren_ty = Some((T!['['], T![']']));
+ }
+ _ => (),
+ }
+ if state.paren_ty.is_some() {
+ state.paren_level = 1;
+ state.rule_state.transition();
+ }
+ }
+ }
+}
+
+fn is_metavariable(token: &SyntaxToken) -> Option<TextRange> {
+ match token.kind() {
+ kind if kind == SyntaxKind::IDENT || kind.is_keyword() => {
+ if let Some(_dollar) = token.prev_token().filter(|t| t.kind() == T![$]) {
+ return Some(token.text_range());
+ }
+ }
+ _ => (),
+ };
+ None
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs
new file mode 100644
index 000000000..5262770f3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs
@@ -0,0 +1,340 @@
+//! Defines token tags we use for syntax highlighting.
+//! A tag is not unlike a CSS class.
+
+use std::{
+ fmt::{self, Write},
+ ops,
+};
+
+use ide_db::SymbolKind;
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct Highlight {
+ pub tag: HlTag,
+ pub mods: HlMods,
+}
+
+#[derive(Default, Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct HlMods(u32);
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub enum HlTag {
+ Symbol(SymbolKind),
+
+ AttributeBracket,
+ BoolLiteral,
+ BuiltinType,
+ ByteLiteral,
+ CharLiteral,
+ Comment,
+ EscapeSequence,
+ FormatSpecifier,
+ Keyword,
+ NumericLiteral,
+ Operator(HlOperator),
+ Punctuation(HlPunct),
+ StringLiteral,
+ UnresolvedReference,
+
+ // For things which don't have a specific highlight.
+ None,
+}
+
+// Don't forget to adjust the feature description in crates/ide/src/syntax_highlighting.rs.
+// And make sure to use the lsp strings used when converting to the protocol in crates\rust-analyzer\src\semantic_tokens.rs, not the names of the variants here.
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+#[repr(u8)]
+pub enum HlMod {
+ /// Used for items in traits and impls.
+ Associated = 0,
+ /// Used with keywords like `async` and `await`.
+ Async,
+ /// Used to differentiate individual elements within attributes.
+ Attribute,
+ /// Callable item or value.
+ Callable,
+ /// Value that is being consumed in a function call
+ Consuming,
+ /// Used with keywords like `if` and `break`.
+ ControlFlow,
+ /// Used for crate names, like `serde`.
+ CrateRoot,
+ /// Used for items from built-in crates (std, core, alloc, test and proc_macro).
+ DefaultLibrary,
+ /// `foo` in `fn foo(x: i32)` is a definition, `foo` in `foo(90 + 2)` is
+ /// not.
+ Definition,
+ /// Doc-strings like this one.
+ Documentation,
+ /// Highlighting injection like rust code in doc strings or ra_fixture.
+ Injected,
+ /// Used for intra doc links in doc injection.
+ IntraDocLink,
+ /// Used for items from other crates.
+ Library,
+ /// Mutable binding.
+ Mutable,
+ /// Used for public items.
+ Public,
+ /// Immutable reference.
+ Reference,
+ /// Used for associated functions.
+ Static,
+ /// Used for items in traits and trait impls.
+ Trait,
+ // Keep this last!
+ /// Used for unsafe functions, unsafe traits, mutable statics, union accesses and unsafe operations.
+ Unsafe,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub enum HlPunct {
+ /// []
+ Bracket,
+ /// {}
+ Brace,
+ /// ()
+ Parenthesis,
+ /// <>
+ Angle,
+ /// ,
+ Comma,
+ /// .
+ Dot,
+ /// :
+ Colon,
+ /// ;
+ Semi,
+ /// ! (only for macro calls)
+ MacroBang,
+ ///
+ Other,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub enum HlOperator {
+ /// |, &, !, ^, |=, &=, ^=
+ Bitwise,
+ /// +, -, *, /, +=, -=, *=, /=
+ Arithmetic,
+ /// &&, ||, !
+ Logical,
+ /// >, <, ==, >=, <=, !=
+ Comparison,
+ ///
+ Other,
+}
+
+impl HlTag {
+ fn as_str(self) -> &'static str {
+ match self {
+ HlTag::Symbol(symbol) => match symbol {
+ SymbolKind::Attribute => "attribute",
+ SymbolKind::BuiltinAttr => "builtin_attr",
+ SymbolKind::Const => "constant",
+ SymbolKind::ConstParam => "const_param",
+ SymbolKind::Derive => "derive",
+ SymbolKind::DeriveHelper => "derive_helper",
+ SymbolKind::Enum => "enum",
+ SymbolKind::Field => "field",
+ SymbolKind::Function => "function",
+ SymbolKind::Impl => "self_type",
+ SymbolKind::Label => "label",
+ SymbolKind::LifetimeParam => "lifetime",
+ SymbolKind::Local => "variable",
+ SymbolKind::Macro => "macro",
+ SymbolKind::Module => "module",
+ SymbolKind::SelfParam => "self_keyword",
+ SymbolKind::SelfType => "self_type_keyword",
+ SymbolKind::Static => "static",
+ SymbolKind::Struct => "struct",
+ SymbolKind::ToolModule => "tool_module",
+ SymbolKind::Trait => "trait",
+ SymbolKind::TypeAlias => "type_alias",
+ SymbolKind::TypeParam => "type_param",
+ SymbolKind::Union => "union",
+ SymbolKind::ValueParam => "value_param",
+ SymbolKind::Variant => "enum_variant",
+ },
+ HlTag::AttributeBracket => "attribute_bracket",
+ HlTag::BoolLiteral => "bool_literal",
+ HlTag::BuiltinType => "builtin_type",
+ HlTag::ByteLiteral => "byte_literal",
+ HlTag::CharLiteral => "char_literal",
+ HlTag::Comment => "comment",
+ HlTag::EscapeSequence => "escape_sequence",
+ HlTag::FormatSpecifier => "format_specifier",
+ HlTag::Keyword => "keyword",
+ HlTag::Punctuation(punct) => match punct {
+ HlPunct::Bracket => "bracket",
+ HlPunct::Brace => "brace",
+ HlPunct::Parenthesis => "parenthesis",
+ HlPunct::Angle => "angle",
+ HlPunct::Comma => "comma",
+ HlPunct::Dot => "dot",
+ HlPunct::Colon => "colon",
+ HlPunct::Semi => "semicolon",
+ HlPunct::MacroBang => "macro_bang",
+ HlPunct::Other => "punctuation",
+ },
+ HlTag::NumericLiteral => "numeric_literal",
+ HlTag::Operator(op) => match op {
+ HlOperator::Bitwise => "bitwise",
+ HlOperator::Arithmetic => "arithmetic",
+ HlOperator::Logical => "logical",
+ HlOperator::Comparison => "comparison",
+ HlOperator::Other => "operator",
+ },
+ HlTag::StringLiteral => "string_literal",
+ HlTag::UnresolvedReference => "unresolved_reference",
+ HlTag::None => "none",
+ }
+ }
+}
+
+impl fmt::Display for HlTag {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(self.as_str(), f)
+ }
+}
+
+impl HlMod {
+ const ALL: &'static [HlMod; HlMod::Unsafe as u8 as usize + 1] = &[
+ HlMod::Associated,
+ HlMod::Async,
+ HlMod::Attribute,
+ HlMod::Callable,
+ HlMod::Consuming,
+ HlMod::ControlFlow,
+ HlMod::CrateRoot,
+ HlMod::DefaultLibrary,
+ HlMod::Definition,
+ HlMod::Documentation,
+ HlMod::Injected,
+ HlMod::IntraDocLink,
+ HlMod::Library,
+ HlMod::Mutable,
+ HlMod::Public,
+ HlMod::Reference,
+ HlMod::Static,
+ HlMod::Trait,
+ HlMod::Unsafe,
+ ];
+
+ fn as_str(self) -> &'static str {
+ match self {
+ HlMod::Associated => "associated",
+ HlMod::Async => "async",
+ HlMod::Attribute => "attribute",
+ HlMod::Callable => "callable",
+ HlMod::Consuming => "consuming",
+ HlMod::ControlFlow => "control",
+ HlMod::CrateRoot => "crate_root",
+ HlMod::DefaultLibrary => "default_library",
+ HlMod::Definition => "declaration",
+ HlMod::Documentation => "documentation",
+ HlMod::Injected => "injected",
+ HlMod::IntraDocLink => "intra_doc_link",
+ HlMod::Library => "library",
+ HlMod::Mutable => "mutable",
+ HlMod::Public => "public",
+ HlMod::Reference => "reference",
+ HlMod::Static => "static",
+ HlMod::Trait => "trait",
+ HlMod::Unsafe => "unsafe",
+ }
+ }
+
+ fn mask(self) -> u32 {
+ 1 << (self as u32)
+ }
+}
+
+impl fmt::Display for HlMod {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(self.as_str(), f)
+ }
+}
+
+impl fmt::Display for Highlight {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.tag.fmt(f)?;
+ for modifier in self.mods.iter() {
+ f.write_char('.')?;
+ modifier.fmt(f)?;
+ }
+ Ok(())
+ }
+}
+
+impl From<HlTag> for Highlight {
+ fn from(tag: HlTag) -> Highlight {
+ Highlight::new(tag)
+ }
+}
+
+impl From<HlOperator> for Highlight {
+ fn from(op: HlOperator) -> Highlight {
+ Highlight::new(HlTag::Operator(op))
+ }
+}
+
+impl From<HlPunct> for Highlight {
+ fn from(punct: HlPunct) -> Highlight {
+ Highlight::new(HlTag::Punctuation(punct))
+ }
+}
+
+impl From<SymbolKind> for Highlight {
+ fn from(sym: SymbolKind) -> Highlight {
+ Highlight::new(HlTag::Symbol(sym))
+ }
+}
+
+impl Highlight {
+ pub(crate) fn new(tag: HlTag) -> Highlight {
+ Highlight { tag, mods: HlMods::default() }
+ }
+ pub fn is_empty(&self) -> bool {
+ self.tag == HlTag::None && self.mods == HlMods::default()
+ }
+}
+
+impl ops::BitOr<HlMod> for HlTag {
+ type Output = Highlight;
+
+ fn bitor(self, rhs: HlMod) -> Highlight {
+ Highlight::new(self) | rhs
+ }
+}
+
+impl ops::BitOrAssign<HlMod> for HlMods {
+ fn bitor_assign(&mut self, rhs: HlMod) {
+ self.0 |= rhs.mask();
+ }
+}
+
+impl ops::BitOrAssign<HlMod> for Highlight {
+ fn bitor_assign(&mut self, rhs: HlMod) {
+ self.mods |= rhs;
+ }
+}
+
+impl ops::BitOr<HlMod> for Highlight {
+ type Output = Highlight;
+
+ fn bitor(mut self, rhs: HlMod) -> Highlight {
+ self |= rhs;
+ self
+ }
+}
+
+impl HlMods {
+ pub fn contains(self, m: HlMod) -> bool {
+ self.0 & m.mask() == m.mask()
+ }
+
+ pub fn iter(self) -> impl Iterator<Item = HlMod> {
+ HlMod::ALL.iter().copied().filter(move |it| self.0 & it.mask() == it.mask())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html
new file mode 100644
index 000000000..e07fd3925
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html
@@ -0,0 +1,62 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">fn</span> <span class="function declaration">not_static</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">struct</span> <span class="struct declaration">foo</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">impl</span> <span class="struct">foo</span> <span class="brace">{</span>
+ <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public static">is_static</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+ <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public reference">is_not_static</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">trait</span> <span class="trait declaration">t</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated declaration static trait">t_is_static</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+ <span class="keyword">fn</span> <span class="function associated declaration reference trait">t_is_not_static</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">impl</span> <span class="trait">t</span> <span class="keyword">for</span> <span class="struct">foo</span> <span class="brace">{</span>
+ <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public static trait">is_static</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+ <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public reference trait">is_not_static</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html
new file mode 100644
index 000000000..1a4398814
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html
@@ -0,0 +1,58 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">allow</span><span class="parenthesis attribute">(</span><span class="none attribute">dead_code</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="tool_module attribute library">rustfmt</span><span class="operator attribute">::</span><span class="tool_module attribute library">skip</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="module attribute crate_root library">proc_macros</span><span class="operator attribute">::</span><span class="attribute attribute library">identity</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="derive attribute default_library library">Copy</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="comment documentation">/// This is a doc comment</span>
+<span class="comment">// This is a normal comment</span>
+<span class="comment documentation">/// This is a doc comment</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="derive attribute default_library library">Copy</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="comment">// This is another normal comment</span>
+<span class="comment documentation">/// This is another doc comment</span>
+<span class="comment">// This is another normal comment</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="derive attribute default_library library">Copy</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="comment">// The reason for these being here is to test AttrIds</span>
+<span class="keyword">struct</span> <span class="struct declaration">Foo</span><span class="semicolon">;</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html
new file mode 100644
index 000000000..1e4c06df7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html
@@ -0,0 +1,66 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root library">foo</span><span class="semicolon">;</span>
+<span class="keyword">use</span> <span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">iter</span><span class="semicolon">;</span>
+
+<span class="keyword">pub</span> <span class="keyword">const</span> <span class="constant declaration public">NINETY_TWO</span><span class="colon">:</span> <span class="builtin_type">u8</span> <span class="operator">=</span> <span class="numeric_literal">92</span><span class="semicolon">;</span>
+
+<span class="keyword">use</span> <span class="module crate_root library">foo</span> <span class="keyword">as</span> <span class="module crate_root declaration library">foooo</span><span class="semicolon">;</span>
+
+<span class="keyword">pub</span><span class="parenthesis">(</span><span class="keyword crate_root public">crate</span><span class="parenthesis">)</span> <span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="variable declaration">baz</span> <span class="operator">=</span> <span class="module default_library library">iter</span><span class="operator">::</span><span class="function default_library library">repeat</span><span class="parenthesis">(</span><span class="numeric_literal">92</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="keyword">mod</span> <span class="module declaration">bar</span> <span class="brace">{</span>
+ <span class="keyword">pub</span><span class="parenthesis">(</span><span class="keyword control">in</span> <span class="keyword crate_root public">super</span><span class="parenthesis">)</span> <span class="keyword">const</span> <span class="constant declaration">FORTY_TWO</span><span class="colon">:</span> <span class="builtin_type">u8</span> <span class="operator">=</span> <span class="numeric_literal">42</span><span class="semicolon">;</span>
+
+ <span class="keyword">mod</span> <span class="module declaration">baz</span> <span class="brace">{</span>
+ <span class="keyword">use</span> <span class="keyword">super</span><span class="operator">::</span><span class="keyword crate_root public">super</span><span class="operator">::</span><span class="constant public">NINETY_TWO</span><span class="semicolon">;</span>
+ <span class="keyword">use</span> <span class="keyword crate_root public">crate</span><span class="operator">::</span><span class="module crate_root library">foooo</span><span class="operator">::</span><span class="struct library">Point</span><span class="semicolon">;</span>
+
+ <span class="keyword">pub</span><span class="parenthesis">(</span><span class="keyword control">in</span> <span class="keyword">super</span><span class="operator">::</span><span class="keyword crate_root public">super</span><span class="parenthesis">)</span> <span class="keyword">const</span> <span class="constant declaration">TWENTY_NINE</span><span class="colon">:</span> <span class="builtin_type">u8</span> <span class="operator">=</span> <span class="numeric_literal">29</span><span class="semicolon">;</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+</code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html
new file mode 100644
index 000000000..5d66f832d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html
@@ -0,0 +1,50 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">use</span> <span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">iter</span><span class="semicolon">;</span>
+
+<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="variable declaration">foo</span> <span class="operator">=</span> <span class="enum_variant default_library library">Some</span><span class="parenthesis">(</span><span class="numeric_literal">92</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration">nums</span> <span class="operator">=</span> <span class="module default_library library">iter</span><span class="operator">::</span><span class="function default_library library">repeat</span><span class="parenthesis">(</span><span class="variable">foo</span><span class="operator">.</span><span class="function associated consuming default_library library">unwrap</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
new file mode 100644
index 000000000..a747b4bc1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
@@ -0,0 +1,190 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="comment documentation">//! This is a module to test doc injection.</span>
+<span class="comment documentation">//! ```</span>
+<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">test</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
+<span class="comment documentation">//! ```</span>
+
+<span class="keyword">mod</span> <span class="module declaration">outline_module</span><span class="semicolon">;</span>
+
+<span class="comment documentation">/// ```</span>
+<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"early doctests should not go boom"</span><span class="semicolon injected">;</span>
+<span class="comment documentation">/// ```</span>
+<span class="keyword">struct</span> <span class="struct declaration">Foo</span> <span class="brace">{</span>
+ <span class="field declaration">bar</span><span class="colon">:</span> <span class="builtin_type">bool</span><span class="comma">,</span>
+<span class="brace">}</span>
+
+<span class="comment documentation">/// This is an impl with a code block.</span>
+<span class="comment documentation">///</span>
+<span class="comment documentation">/// ```</span>
+<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">foo</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span>
+<span class="comment documentation">///</span>
+<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="brace injected">}</span>
+<span class="comment documentation">/// ```</span>
+<span class="keyword">impl</span> <span class="struct">Foo</span> <span class="brace">{</span>
+ <span class="comment documentation">/// ```</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"Call me</span>
+ <span class="comment">// KILLER WHALE</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="string_literal injected"> Ishmael."</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">/// ```</span>
+ <span class="keyword">pub</span> <span class="keyword">const</span> <span class="constant associated declaration public">bar</span><span class="colon">:</span> <span class="builtin_type">bool</span> <span class="operator">=</span> <span class="bool_literal">true</span><span class="semicolon">;</span>
+
+ <span class="comment documentation">/// Constructs a new `Foo`.</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">/// # Examples</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">/// ```</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> #</span><span class="none injected"> </span><span class="attribute_bracket attribute injected">#</span><span class="attribute_bracket attribute injected">!</span><span class="attribute_bracket attribute injected">[</span><span class="builtin_attr attribute injected library">allow</span><span class="parenthesis attribute injected">(</span><span class="none attribute injected">unused_mut</span><span class="parenthesis attribute injected">)</span><span class="attribute_bracket attribute injected">]</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="keyword injected">mut</span><span class="none injected"> </span><span class="variable declaration injected mutable">foo</span><span class="colon injected">:</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">/// ```</span>
+ <span class="keyword">pub</span> <span class="keyword">const</span> <span class="keyword">fn</span> <span class="function associated declaration public static">new</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="struct">Foo</span> <span class="brace">{</span>
+ <span class="struct">Foo</span> <span class="brace">{</span> <span class="field">bar</span><span class="colon">:</span> <span class="bool_literal">true</span> <span class="brace">}</span>
+ <span class="brace">}</span>
+
+ <span class="comment documentation">/// `bar` method on `Foo`.</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">/// # Examples</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">/// ```</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">use</span><span class="none injected"> </span><span class="module injected">x</span><span class="operator injected">::</span><span class="module injected">y</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foo</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">// calls bar on foo</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="macro injected">assert</span><span class="macro_bang injected">!</span><span class="parenthesis injected">(</span><span class="none injected">foo</span><span class="operator injected">.</span><span class="none injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">bar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="variable injected">foo</span><span class="operator injected">.</span><span class="field injected">bar</span><span class="none injected"> </span><span class="logical injected">||</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="constant injected">bar</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">/* multi-line</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected"> comment */</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected reference">multi_line_string</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"Foo</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="string_literal injected"> bar</span><span class="escape_sequence injected">\n</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="string_literal injected"> "</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">/// ```</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">/// ```rust,no_run</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foobar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="operator injected">.</span><span class="function injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">/// ```</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">/// ~~~rust,no_run</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">// code block with tilde.</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foobar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="operator injected">.</span><span class="function injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">/// ~~~</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">/// ```</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">// functions</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">foo</span><span class="angle injected">&lt;</span><span class="type_param declaration injected">T</span><span class="comma injected">,</span><span class="none injected"> </span><span class="keyword injected">const</span><span class="none injected"> </span><span class="const_param declaration injected">X</span><span class="colon injected">:</span><span class="none injected"> </span><span class="builtin_type injected">usize</span><span class="angle injected">&gt;</span><span class="parenthesis injected">(</span><span class="value_param declaration injected">arg</span><span class="colon injected">:</span><span class="none injected"> </span><span class="builtin_type injected">i32</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="none injected"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">x</span><span class="colon injected">:</span><span class="none injected"> </span><span class="type_param injected">T</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="const_param injected">X</span><span class="semicolon injected">;</span>
+ <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="brace injected">}</span>
+ <span class="comment documentation">/// ```</span>
+ <span class="comment documentation">///</span>
+ <span class="comment documentation">/// ```sh</span>
+ <span class="comment documentation">/// echo 1</span>
+ <span class="comment documentation">/// ```</span>
+ <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public reference">foo</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">bool</span> <span class="brace">{</span>
+ <span class="bool_literal">true</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="comment documentation">/// </span><span class="struct documentation injected intra_doc_link">[`Foo`](Foo)</span><span class="comment documentation"> is a struct</span>
+<span class="comment documentation">/// This function is &gt; </span><span class="function documentation injected intra_doc_link">[`all_the_links`](all_the_links)</span><span class="comment documentation"> &lt;</span>
+<span class="comment documentation">/// </span><span class="macro documentation injected intra_doc_link">[`noop`](noop)</span><span class="comment documentation"> is a macro below</span>
+<span class="comment documentation">/// </span><span class="struct documentation injected intra_doc_link">[`Item`]</span><span class="comment documentation"> is a struct in the module </span><span class="module documentation injected intra_doc_link">[`module`]</span>
+<span class="comment documentation">///</span>
+<span class="comment documentation">/// [`Item`]: module::Item</span>
+<span class="comment documentation">/// [mix_and_match]: ThisShouldntResolve</span>
+<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration public">all_the_links</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">pub</span> <span class="keyword">mod</span> <span class="module declaration public">module</span> <span class="brace">{</span>
+ <span class="keyword">pub</span> <span class="keyword">struct</span> <span class="struct declaration public">Item</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="comment documentation">/// ```</span>
+<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">macro_rules</span><span class="macro_bang injected">!</span><span class="none injected"> </span><span class="macro declaration injected">noop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="punctuation injected">$</span><span class="none injected">expr</span><span class="colon injected">:</span><span class="none injected">expr</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="operator injected">=</span><span class="angle injected">&gt;</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="punctuation injected">$</span><span class="none injected">expr </span><span class="brace injected">}</span><span class="brace injected">}</span>
+<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="macro injected">noop</span><span class="macro_bang injected">!</span><span class="parenthesis injected">(</span><span class="numeric_literal injected">1</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+<span class="comment documentation">/// ```</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">noop</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span>expr<span class="colon">:</span>expr<span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span>
+ <span class="punctuation">$</span>expr
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="comment documentation">/// ```rust</span>
+<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+<span class="comment documentation">/// ```</span>
+<span class="comment documentation">///</span>
+<span class="comment documentation">/// ```</span>
+<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword control injected">loop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">cfg_attr</span><span class="parenthesis attribute">(</span><span class="none attribute">not</span><span class="parenthesis attribute">(</span><span class="none attribute">feature</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"false"</span><span class="parenthesis attribute">)</span><span class="comma attribute">,</span> <span class="none attribute">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"</span><span class="keyword control injected">loop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span><span class="string_literal attribute">"</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"</span><span class="keyword control injected">loop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span><span class="string_literal attribute">"</span><span class="attribute_bracket attribute">]</span>
+<span class="comment documentation">/// ```</span>
+<span class="comment documentation">///</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">cfg_attr</span><span class="parenthesis attribute">(</span><span class="none attribute">feature</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"alloc"</span><span class="comma attribute">,</span> <span class="none attribute">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"```rust"</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">cfg_attr</span><span class="parenthesis attribute">(</span><span class="none attribute">not</span><span class="parenthesis attribute">(</span><span class="none attribute">feature</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"alloc"</span><span class="parenthesis attribute">)</span><span class="comma attribute">,</span> <span class="none attribute">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"```ignore"</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="module injected">alloc</span><span class="operator injected">::</span><span class="macro injected">vec</span><span class="macro_bang injected">!</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+<span class="comment documentation">/// ```</span>
+<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration public">mix_and_match</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="comment documentation">/**
+It is beyond me why you'd use these when you got ///
+```rust
+</span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation">
+```
+</span><span class="function documentation injected intra_doc_link">[`block_comments2`]</span><span class="comment documentation"> tests these with indentation
+ */</span>
+<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration public">block_comments</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="comment documentation">/**
+ Really, I don't get it
+ ```rust
+</span><span class="comment documentation"> </span><span class="none injected"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation">
+ ```
+ </span><span class="function documentation injected intra_doc_link">[`block_comments`]</span><span class="comment documentation"> tests these without indentation
+*/</span>
+<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration public">block_comments2</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+
+</code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html
new file mode 100644
index 000000000..af41796e2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html
@@ -0,0 +1,47 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root default_library library">std</span><span class="semicolon">;</span>
+<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root library">alloc</span> <span class="keyword">as</span> <span class="module crate_root declaration library">abc</span><span class="semicolon">;</span>
+</code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
new file mode 100644
index 000000000..a97802cbb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
@@ -0,0 +1,233 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">use</span> <span class="module">inner</span><span class="operator">::</span><span class="brace">{</span><span class="self_keyword">self</span> <span class="keyword">as</span> <span class="module declaration">inner_mod</span><span class="brace">}</span><span class="semicolon">;</span>
+<span class="keyword">mod</span> <span class="module declaration">inner</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">pub</span> <span class="keyword">mod</span> <span class="module declaration public">ops</span> <span class="brace">{</span>
+ <span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">lang</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"fn_once"</span><span class="attribute_bracket attribute">]</span>
+ <span class="keyword">pub</span> <span class="keyword">trait</span> <span class="trait declaration public">FnOnce</span><span class="angle">&lt;</span><span class="type_param declaration">Args</span><span class="angle">&gt;</span> <span class="brace">{</span><span class="brace">}</span>
+
+ <span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">lang</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"fn_mut"</span><span class="attribute_bracket attribute">]</span>
+ <span class="keyword">pub</span> <span class="keyword">trait</span> <span class="trait declaration public">FnMut</span><span class="angle">&lt;</span><span class="type_param declaration">Args</span><span class="angle">&gt;</span><span class="colon">:</span> <span class="trait public">FnOnce</span><span class="angle">&lt;</span><span class="type_param">Args</span><span class="angle">&gt;</span> <span class="brace">{</span><span class="brace">}</span>
+
+ <span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">lang</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"fn"</span><span class="attribute_bracket attribute">]</span>
+ <span class="keyword">pub</span> <span class="keyword">trait</span> <span class="trait declaration public">Fn</span><span class="angle">&lt;</span><span class="type_param declaration">Args</span><span class="angle">&gt;</span><span class="colon">:</span> <span class="trait public">FnMut</span><span class="angle">&lt;</span><span class="type_param">Args</span><span class="angle">&gt;</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">struct</span> <span class="struct declaration">Foo</span> <span class="brace">{</span>
+ <span class="field declaration">x</span><span class="colon">:</span> <span class="builtin_type">u32</span><span class="comma">,</span>
+<span class="brace">}</span>
+
+<span class="keyword">trait</span> <span class="trait declaration">Bar</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated declaration reference trait">bar</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="keyword">impl</span> <span class="trait">Bar</span> <span class="keyword">for</span> <span class="struct">Foo</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated declaration reference trait">bar</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span> <span class="brace">{</span>
+ <span class="self_keyword reference">self</span><span class="operator">.</span><span class="field">x</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">impl</span> <span class="struct">Foo</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated consuming declaration">baz</span><span class="parenthesis">(</span><span class="keyword">mut</span> <span class="self_keyword declaration mutable">self</span><span class="comma">,</span> <span class="value_param declaration">f</span><span class="colon">:</span> <span class="struct">Foo</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span> <span class="brace">{</span>
+ <span class="value_param">f</span><span class="operator">.</span><span class="function associated consuming">baz</span><span class="parenthesis">(</span><span class="self_keyword consuming mutable">self</span><span class="parenthesis">)</span>
+ <span class="brace">}</span>
+
+ <span class="keyword">fn</span> <span class="function associated declaration mutable reference">qux</span><span class="parenthesis">(</span><span class="operator">&</span><span class="keyword">mut</span> <span class="self_keyword declaration mutable reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="self_keyword mutable reference">self</span><span class="operator">.</span><span class="field">x</span> <span class="operator">=</span> <span class="numeric_literal">0</span><span class="semicolon">;</span>
+ <span class="brace">}</span>
+
+ <span class="keyword">fn</span> <span class="function associated declaration reference">quop</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span> <span class="brace">{</span>
+ <span class="self_keyword reference">self</span><span class="operator">.</span><span class="field">x</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">use</span> <span class="self_keyword crate_root public">self</span><span class="operator">::</span><span class="struct">FooCopy</span><span class="operator">::</span><span class="brace">{</span><span class="self_keyword">self</span> <span class="keyword">as</span> <span class="struct declaration">BarCopy</span><span class="brace">}</span><span class="semicolon">;</span>
+
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="derive attribute default_library library">Copy</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">struct</span> <span class="struct declaration">FooCopy</span> <span class="brace">{</span>
+ <span class="field declaration">x</span><span class="colon">:</span> <span class="builtin_type">u32</span><span class="comma">,</span>
+<span class="brace">}</span>
+
+<span class="keyword">impl</span> <span class="struct">FooCopy</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated consuming declaration">baz</span><span class="parenthesis">(</span><span class="self_keyword declaration">self</span><span class="comma">,</span> <span class="value_param declaration">f</span><span class="colon">:</span> <span class="struct">FooCopy</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">u32</span> <span class="brace">{</span>
+ <span class="value_param">f</span><span class="operator">.</span><span class="function associated">baz</span><span class="parenthesis">(</span><span class="self_keyword">self</span><span class="parenthesis">)</span>
+ <span class="brace">}</span>
+
+ <span class="keyword">fn</span> <span class="function associated declaration mutable reference">qux</span><span class="parenthesis">(</span><span class="operator">&</span><span class="keyword">mut</span> <span class="self_keyword declaration mutable reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="self_keyword mutable reference">self</span><span class="operator">.</span><span class="field">x</span> <span class="operator">=</span> <span class="numeric_literal">0</span><span class="semicolon">;</span>
+ <span class="brace">}</span>
+
+ <span class="keyword">fn</span> <span class="function associated declaration reference">quop</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">u32</span> <span class="brace">{</span>
+ <span class="self_keyword reference">self</span><span class="operator">.</span><span class="field">x</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">str</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="function">str</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">foo</span><span class="angle">&lt;</span><span class="lifetime declaration">'a</span><span class="comma">,</span> <span class="type_param declaration">T</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="type_param">T</span> <span class="brace">{</span>
+ <span class="function">foo</span><span class="operator">::</span><span class="angle">&lt;</span><span class="lifetime">'a</span><span class="comma">,</span> <span class="builtin_type">i32</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="parenthesis">)</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">never</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">!</span> <span class="brace">{</span>
+ <span class="keyword control">loop</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">const_param</span><span class="angle">&lt;</span><span class="keyword">const</span> <span class="const_param declaration">FOO</span><span class="colon">:</span> <span class="builtin_type">usize</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">usize</span> <span class="brace">{</span>
+ <span class="function">const_param</span><span class="operator">::</span><span class="angle">&lt;</span><span class="brace">{</span> <span class="const_param">FOO</span> <span class="brace">}</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="const_param">FOO</span>
+<span class="brace">}</span>
+
+<span class="keyword">use</span> <span class="module public">ops</span><span class="operator">::</span><span class="trait public">Fn</span><span class="semicolon">;</span>
+<span class="keyword">fn</span> <span class="function declaration">baz</span><span class="angle">&lt;</span><span class="type_param declaration">F</span><span class="colon">:</span> <span class="trait public">Fn</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="value_param callable declaration">f</span><span class="colon">:</span> <span class="type_param">F</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="value_param callable">f</span><span class="parenthesis">(</span><span class="parenthesis">)</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">foobar</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="keyword">impl</span> <span class="trait default_library library">Copy</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">foo</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="variable declaration">bar</span> <span class="operator">=</span> <span class="function">foobar</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="comment">// comment</span>
+<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">x</span> <span class="operator">=</span> <span class="numeric_literal">42</span><span class="semicolon">;</span>
+ <span class="variable mutable">x</span> <span class="arithmetic mutable">+=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration mutable reference">y</span> <span class="operator">=</span> <span class="operator">&</span><span class="keyword">mut</span> <span class="variable mutable">x</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration reference">z</span> <span class="operator">=</span> <span class="operator">&</span><span class="variable mutable reference">y</span><span class="semicolon">;</span>
+
+ <span class="keyword">let</span> <span class="struct">Foo</span> <span class="brace">{</span> <span class="field">x</span><span class="colon">:</span> <span class="variable declaration">z</span><span class="comma">,</span> <span class="variable declaration">y</span> <span class="brace">}</span> <span class="operator">=</span> <span class="struct">Foo</span> <span class="brace">{</span> <span class="field">x</span><span class="colon">:</span> <span class="variable reference">z</span><span class="comma">,</span> <span class="variable mutable reference">y</span> <span class="brace">}</span><span class="semicolon">;</span>
+
+ <span class="variable">y</span><span class="semicolon">;</span>
+
+ <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">foo</span> <span class="operator">=</span> <span class="struct">Foo</span> <span class="brace">{</span> <span class="field">x</span><span class="comma">,</span> <span class="unresolved_reference">y</span><span class="colon">:</span> <span class="variable mutable">x</span> <span class="brace">}</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration">foo2</span> <span class="operator">=</span> <span class="struct">Foo</span> <span class="brace">{</span> <span class="field">x</span><span class="comma">,</span> <span class="unresolved_reference">y</span><span class="colon">:</span> <span class="variable mutable">x</span> <span class="brace">}</span><span class="semicolon">;</span>
+ <span class="variable mutable">foo</span><span class="operator">.</span><span class="function associated reference">quop</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="variable mutable">foo</span><span class="operator">.</span><span class="function associated mutable reference">qux</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="variable mutable">foo</span><span class="operator">.</span><span class="function associated consuming">baz</span><span class="parenthesis">(</span><span class="variable consuming">foo2</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+ <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">copy</span> <span class="operator">=</span> <span class="struct">FooCopy</span> <span class="brace">{</span> <span class="field">x</span> <span class="brace">}</span><span class="semicolon">;</span>
+ <span class="variable mutable">copy</span><span class="operator">.</span><span class="function associated reference">quop</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="variable mutable">copy</span><span class="operator">.</span><span class="function associated mutable reference">qux</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="variable mutable">copy</span><span class="operator">.</span><span class="function associated">baz</span><span class="parenthesis">(</span><span class="variable mutable">copy</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+ <span class="keyword">let</span> <span class="variable callable declaration">a</span> <span class="operator">=</span> <span class="punctuation">|</span><span class="value_param declaration">x</span><span class="punctuation">|</span> <span class="value_param">x</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable callable declaration">bar</span> <span class="operator">=</span> <span class="struct">Foo</span><span class="operator">::</span><span class="function associated consuming">baz</span><span class="semicolon">;</span>
+
+ <span class="keyword">let</span> <span class="variable declaration">baz</span> <span class="operator">=</span> <span class="parenthesis">(</span><span class="numeric_literal">-</span><span class="numeric_literal">42</span><span class="comma">,</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration">baz</span> <span class="operator">=</span> <span class="operator">-</span><span class="variable">baz</span><span class="operator">.</span><span class="field">0</span><span class="semicolon">;</span>
+
+ <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="logical">!</span><span class="bool_literal">true</span><span class="semicolon">;</span>
+
+ <span class="label declaration">'foo</span><span class="colon">:</span> <span class="keyword control">loop</span> <span class="brace">{</span>
+ <span class="keyword control">break</span> <span class="label">'foo</span><span class="semicolon">;</span>
+ <span class="keyword control">continue</span> <span class="label">'foo</span><span class="semicolon">;</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">enum</span> <span class="enum declaration">Option</span><span class="angle">&lt;</span><span class="type_param declaration">T</span><span class="angle">&gt;</span> <span class="brace">{</span>
+ <span class="enum_variant declaration">Some</span><span class="parenthesis">(</span><span class="type_param">T</span><span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="enum_variant declaration">None</span><span class="comma">,</span>
+<span class="brace">}</span>
+<span class="keyword">use</span> <span class="enum">Option</span><span class="operator">::</span><span class="punctuation">*</span><span class="semicolon">;</span>
+
+<span class="keyword">impl</span><span class="angle">&lt;</span><span class="type_param declaration">T</span><span class="angle">&gt;</span> <span class="enum">Option</span><span class="angle">&lt;</span><span class="type_param">T</span><span class="angle">&gt;</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated consuming declaration">and</span><span class="angle">&lt;</span><span class="type_param declaration">U</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="self_keyword declaration">self</span><span class="comma">,</span> <span class="value_param declaration">other</span><span class="colon">:</span> <span class="enum">Option</span><span class="angle">&lt;</span><span class="type_param">U</span><span class="angle">&gt;</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="enum">Option</span><span class="angle">&lt;</span><span class="parenthesis">(</span><span class="type_param">T</span><span class="comma">,</span> <span class="type_param">U</span><span class="parenthesis">)</span><span class="angle">&gt;</span> <span class="brace">{</span>
+ <span class="keyword control">match</span> <span class="value_param">other</span> <span class="brace">{</span>
+ <span class="enum_variant">None</span> <span class="operator">=&gt;</span> <span class="unresolved_reference">unimplemented</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="variable declaration">Nope</span> <span class="operator">=&gt;</span> <span class="variable">Nope</span><span class="comma">,</span>
+ <span class="brace">}</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword async">async</span> <span class="keyword">fn</span> <span class="function async declaration">learn_and_sing</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="variable declaration">song</span> <span class="operator">=</span> <span class="unresolved_reference">learn_song</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="operator">.</span><span class="keyword async control">await</span><span class="semicolon">;</span>
+ <span class="unresolved_reference">sing_song</span><span class="parenthesis">(</span><span class="variable consuming">song</span><span class="parenthesis">)</span><span class="operator">.</span><span class="keyword async control">await</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="keyword async">async</span> <span class="keyword">fn</span> <span class="function async declaration">async_main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="variable declaration">f1</span> <span class="operator">=</span> <span class="function async">learn_and_sing</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration">f2</span> <span class="operator">=</span> <span class="unresolved_reference">dance</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="unresolved_reference">futures</span><span class="operator">::</span><span class="unresolved_reference">join</span><span class="macro_bang">!</span><span class="parenthesis">(</span>f1<span class="comma">,</span> f2<span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">use_foo_items</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="variable declaration">bob</span> <span class="operator">=</span> <span class="module crate_root library">foo</span><span class="operator">::</span><span class="struct library">Person</span> <span class="brace">{</span>
+ <span class="field library">name</span><span class="colon">:</span> <span class="string_literal">"Bob"</span><span class="comma">,</span>
+ <span class="field library">age</span><span class="colon">:</span> <span class="module crate_root library">foo</span><span class="operator">::</span><span class="module library">consts</span><span class="operator">::</span><span class="constant library">NUMBER</span><span class="comma">,</span>
+ <span class="brace">}</span><span class="semicolon">;</span>
+
+ <span class="keyword">let</span> <span class="variable declaration">control_flow</span> <span class="operator">=</span> <span class="module crate_root library">foo</span><span class="operator">::</span><span class="function library">identity</span><span class="parenthesis">(</span><span class="module crate_root library">foo</span><span class="operator">::</span><span class="enum library">ControlFlow</span><span class="operator">::</span><span class="enum_variant library">Continue</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+ <span class="keyword control">if</span> <span class="variable">control_flow</span><span class="operator">.</span><span class="function associated consuming library">should_die</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="module crate_root library">foo</span><span class="operator">::</span><span class="unresolved_reference">die</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">pub</span> <span class="keyword">enum</span> <span class="enum declaration public">Bool</span> <span class="brace">{</span> <span class="enum_variant declaration public">True</span><span class="comma">,</span> <span class="enum_variant declaration public">False</span> <span class="brace">}</span>
+
+<span class="keyword">impl</span> <span class="enum public">Bool</span> <span class="brace">{</span>
+ <span class="keyword">pub</span> <span class="keyword">const</span> <span class="keyword">fn</span> <span class="function associated consuming declaration public">to_primitive</span><span class="parenthesis">(</span><span class="self_keyword declaration">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">bool</span> <span class="brace">{</span>
+ <span class="bool_literal">true</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+<span class="keyword">const</span> <span class="constant declaration">USAGE_OF_BOOL</span><span class="colon">:</span><span class="builtin_type">bool</span> <span class="operator">=</span> <span class="enum public">Bool</span><span class="operator">::</span><span class="enum_variant public">True</span><span class="operator">.</span><span class="function associated consuming public">to_primitive</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+<span class="keyword">trait</span> <span class="trait declaration">Baz</span> <span class="brace">{</span>
+ <span class="keyword">type</span> <span class="type_alias associated declaration trait">Qux</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">baz</span><span class="angle">&lt;</span><span class="type_param declaration">T</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="value_param declaration">t</span><span class="colon">:</span> <span class="type_param">T</span><span class="parenthesis">)</span>
+<span class="keyword">where</span>
+ <span class="type_param">T</span><span class="colon">:</span> <span class="trait">Baz</span><span class="comma">,</span>
+ <span class="angle">&lt;</span><span class="type_param">T</span> <span class="keyword">as</span> <span class="trait">Baz</span><span class="angle">&gt;</span><span class="operator">::</span><span class="type_alias associated trait">Qux</span><span class="colon">:</span> <span class="trait">Bar</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">gp_shadows_trait</span><span class="angle">&lt;</span><span class="type_param declaration">Baz</span><span class="colon">:</span> <span class="trait">Bar</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="type_param">Baz</span><span class="operator">::</span><span class="function associated reference trait">bar</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+</code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
new file mode 100644
index 000000000..ced7d22f0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
@@ -0,0 +1,62 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">fn</span> <span class="function declaration">fixture</span><span class="parenthesis">(</span><span class="value_param declaration reference">ra_fixture</span><span class="colon">:</span> <span class="operator">&</span><span class="builtin_type">str</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="function">fixture</span><span class="parenthesis">(</span><span class="string_literal">r#"</span>
+<span class="keyword">trait</span> <span class="trait declaration">Foo</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated declaration static trait">foo</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="unresolved_reference">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"2 + 2 = {}"</span><span class="comma">,</span> <span class="numeric_literal">4</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="brace">}</span>
+<span class="brace">}</span><span class="string_literal">"#</span>
+ <span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="function">fixture</span><span class="parenthesis">(</span><span class="string_literal">r"</span>
+<span class="keyword">fn</span> <span class="function declaration">foo</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="function">foo</span><span class="parenthesis">(</span><span class="keyword">$0</span><span class="brace">{</span>
+ <span class="numeric_literal">92</span>
+ <span class="brace">}</span><span class="keyword">$0</span><span class="parenthesis">)</span>
+<span class="brace">}</span><span class="string_literal">"</span>
+ <span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html
new file mode 100644
index 000000000..66f9ede96
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html
@@ -0,0 +1,58 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="self_keyword crate_root public">self</span><span class="semicolon">;</span>
+
+<span class="keyword">use</span> <span class="keyword crate_root public">crate</span><span class="semicolon">;</span>
+<span class="keyword">use</span> <span class="self_keyword crate_root public">self</span><span class="semicolon">;</span>
+<span class="keyword">mod</span> <span class="module declaration">__</span> <span class="brace">{</span>
+ <span class="keyword">use</span> <span class="keyword crate_root public">super</span><span class="operator">::</span><span class="punctuation">*</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">void</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>tt<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">*</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="brace">}</span>
+<span class="macro">void</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="keyword">Self</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="keyword">struct</span> <span class="struct declaration">__</span> <span class="keyword">where</span> <span class="self_type_keyword">Self</span><span class="colon">:</span><span class="semicolon">;</span>
+<span class="keyword">fn</span> <span class="function declaration">__</span><span class="parenthesis">(</span><span class="punctuation">_</span><span class="colon">:</span> <span class="unresolved_reference">Self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html
new file mode 100644
index 000000000..2d85fc8c9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html
@@ -0,0 +1,55 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">struct</span> <span class="struct declaration">Foo</span><span class="angle">&lt;</span><span class="lifetime declaration">'a</span><span class="comma">,</span> <span class="lifetime declaration">'b</span><span class="comma">,</span> <span class="lifetime declaration">'c</span><span class="angle">&gt;</span> <span class="keyword">where</span> <span class="lifetime">'a</span><span class="colon">:</span> <span class="lifetime">'a</span><span class="comma">,</span> <span class="lifetime">'static</span><span class="colon">:</span> <span class="lifetime">'static</span> <span class="brace">{</span>
+ <span class="field declaration">field</span><span class="colon">:</span> <span class="operator">&</span><span class="lifetime">'a</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="field declaration">field2</span><span class="colon">:</span> <span class="operator">&</span><span class="lifetime">'static</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="comma">,</span>
+<span class="brace">}</span>
+<span class="keyword">impl</span><span class="angle">&lt;</span><span class="lifetime declaration">'a</span><span class="angle">&gt;</span> <span class="struct">Foo</span><span class="angle">&lt;</span><span class="lifetime">'_</span><span class="comma">,</span> <span class="lifetime">'a</span><span class="comma">,</span> <span class="lifetime">'static</span><span class="angle">&gt;</span>
+<span class="keyword">where</span>
+ <span class="lifetime">'a</span><span class="colon">:</span> <span class="lifetime">'a</span><span class="comma">,</span>
+ <span class="lifetime">'static</span><span class="colon">:</span> <span class="lifetime">'static</span>
+<span class="brace">{</span><span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
new file mode 100644
index 000000000..54d427952
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
@@ -0,0 +1,96 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="module crate_root library">proc_macros</span><span class="operator">::</span><span class="macro library">mirror</span><span class="macro_bang">!</span> <span class="brace">{</span>
+ <span class="brace">{</span>
+ <span class="comma">,</span><span class="builtin_type">i32</span> <span class="colon">:</span><span class="field declaration public">x</span> <span class="keyword">pub</span>
+ <span class="comma">,</span><span class="builtin_type">i32</span> <span class="colon">:</span><span class="field declaration public">y</span> <span class="keyword">pub</span>
+ <span class="brace">}</span> <span class="struct declaration">Foo</span> <span class="keyword">struct</span>
+<span class="brace">}</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">def_fn</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>tt<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">*</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>tt<span class="parenthesis">)</span><span class="punctuation">*</span><span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="macro">def_fn</span><span class="macro_bang">!</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function declaration">bar</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-</span><span class="operator">&gt;</span> <span class="builtin_type">u32</span> <span class="brace">{</span>
+ <span class="numeric_literal">100</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">dont_color_me_braces</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span><span class="numeric_literal">0</span><span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">noop</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span>expr<span class="colon">:</span>expr<span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span>
+ <span class="punctuation">$</span>expr
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="comment documentation">/// textually shadow previous definition</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">noop</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span>expr<span class="colon">:</span>expr<span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span>
+ <span class="punctuation">$</span>expr
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">keyword_frag</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span>type<span class="colon">:</span>ty<span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span><span class="punctuation">$</span>type<span class="parenthesis">)</span>
+<span class="brace">}</span>
+
+<span class="keyword">macro</span> <span class="macro declaration">with_args</span><span class="parenthesis">(</span><span class="punctuation">$</span>i<span class="colon">:</span>ident<span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="punctuation">$</span>i
+<span class="brace">}</span>
+
+<span class="keyword">macro</span> <span class="macro declaration">without_args</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span>i<span class="colon">:</span>ident<span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span>
+ <span class="punctuation">$</span>i
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="unresolved_reference">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello, {}!"</span><span class="comma">,</span> <span class="numeric_literal">92</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">dont_color_me_braces</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">noop</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="macro">noop</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="numeric_literal">1</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html
new file mode 100644
index 000000000..8a1d69816
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html
@@ -0,0 +1,51 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="comment documentation">//! </span><span class="struct documentation injected intra_doc_link">[Struct]</span>
+<span class="comment documentation">//! This is an intra doc injection test for modules</span>
+<span class="comment documentation">//! </span><span class="struct documentation injected intra_doc_link">[Struct]</span>
+<span class="comment documentation">//! This is an intra doc injection test for modules</span>
+
+<span class="keyword">pub</span> <span class="keyword">struct</span> <span class="struct declaration public">Struct</span><span class="semicolon">;</span>
+</code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html
new file mode 100644
index 000000000..c4c3e3dc2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html
@@ -0,0 +1,50 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="comment documentation">/// </span><span class="struct documentation injected intra_doc_link">[crate::foo::Struct]</span>
+<span class="comment documentation">/// This is an intra doc injection test for modules</span>
+<span class="comment documentation">/// </span><span class="struct documentation injected intra_doc_link">[crate::foo::Struct]</span>
+<span class="comment documentation">/// This is an intra doc injection test for modules</span>
+<span class="keyword">mod</span> <span class="module declaration">foo</span><span class="semicolon">;</span>
+</code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html
new file mode 100644
index 000000000..2369071ae
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html
@@ -0,0 +1,58 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="numeric_literal">1</span> <span class="arithmetic">+</span> <span class="numeric_literal">1</span> <span class="arithmetic">-</span> <span class="numeric_literal">1</span> <span class="arithmetic">*</span> <span class="numeric_literal">1</span> <span class="arithmetic">/</span> <span class="numeric_literal">1</span> <span class="arithmetic">%</span> <span class="numeric_literal">1</span> <span class="bitwise">|</span> <span class="numeric_literal">1</span> <span class="bitwise">&</span> <span class="numeric_literal">1</span> <span class="logical">!</span> <span class="numeric_literal">1</span> <span class="bitwise">^</span> <span class="numeric_literal">1</span> <span class="bitwise">&gt;&gt;</span> <span class="numeric_literal">1</span> <span class="bitwise">&lt;&lt;</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">a</span> <span class="operator">=</span> <span class="numeric_literal">0</span><span class="semicolon">;</span>
+ <span class="variable mutable">a</span> <span class="arithmetic mutable">+=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="variable mutable">a</span> <span class="arithmetic mutable">-=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="variable mutable">a</span> <span class="arithmetic mutable">*=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="variable mutable">a</span> <span class="arithmetic mutable">/=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="variable mutable">a</span> <span class="arithmetic mutable">%=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="variable mutable">a</span> <span class="bitwise mutable">|=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="variable mutable">a</span> <span class="bitwise mutable">&=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="variable mutable">a</span> <span class="bitwise mutable">^=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="variable mutable">a</span> <span class="bitwise mutable">&gt;&gt;=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+ <span class="variable mutable">a</span> <span class="bitwise mutable">&lt;&lt;=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
+<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html
new file mode 100644
index 000000000..bff35c897
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html
@@ -0,0 +1,56 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="8121853618659664005" style="color: hsl(273,88%,88%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="2705725358298919760" style="color: hsl(76,47%,83%);">x</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="8121853618659664005" style="color: hsl(273,88%,88%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="3365759661443752373" style="color: hsl(15,86%,51%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="8121853618659664005" style="color: hsl(273,88%,88%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+ <span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="794745962933817518" style="color: hsl(127,71%,87%);">x</span> <span class="operator">=</span> <span class="string_literal">"other color please!"</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="6717528807933952652" style="color: hsl(90,74%,79%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="794745962933817518" style="color: hsl(127,71%,87%);">x</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">bar</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable reference" data-binding-hash="8121853618659664005" style="color: hsl(273,88%,88%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
+<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
new file mode 100644
index 000000000..c627bc9b0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
@@ -0,0 +1,164 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">println</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">*</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span><span class="brace">{</span>
+ <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>io<span class="colon">:</span><span class="colon">:</span>_print<span class="parenthesis">(</span><span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>format_args_nl<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span><span class="punctuation">*</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="brace">}</span><span class="parenthesis">)</span>
+<span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">macro_export</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">format_args</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">macro_export</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">const_format_args</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">macro_export</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">format_args_nl</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">mod</span> <span class="module declaration">panic</span> <span class="brace">{</span>
+ <span class="keyword">pub</span> <span class="keyword">macro</span> <span class="macro declaration">panic_2015</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
+ <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic<span class="parenthesis">(</span><span class="string_literal">"explicit panic"</span><span class="parenthesis">)</span>
+ <span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="colon">:</span>literal <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="operator control">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
+ <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="parenthesis">)</span>
+ <span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="comment">// Use `panic_str` instead of `panic_display::&lt;&str&gt;` for non_fmt_panic lint.</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="colon">:</span>expr <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="operator control">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
+ <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic_str<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="parenthesis">)</span>
+ <span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="comment">// Special-case the single-argument case for const_panic.</span>
+ <span class="parenthesis">(</span><span class="string_literal">"{}"</span><span class="comma">,</span> <span class="punctuation">$</span>arg<span class="colon">:</span>expr <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="operator control">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
+ <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic_display<span class="parenthesis">(</span><span class="operator">&</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span>
+ <span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span>fmt<span class="colon">:</span>expr<span class="comma">,</span> <span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
+ <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic_fmt<span class="parenthesis">(</span><span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>const_format_args<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span>fmt<span class="comma">,</span> <span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span><span class="parenthesis">)</span>
+ <span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="parenthesis attribute">(</span><span class="none attribute">std_panic</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">macro_export</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">panic</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">assert</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">asm</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">toho</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span><span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panic<span class="punctuation">!</span><span class="parenthesis">(</span><span class="string_literal">"not yet implemented"</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span><span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panic<span class="punctuation">!</span><span class="parenthesis">(</span><span class="string_literal">"not yet implemented: {}"</span><span class="comma">,</span> <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>format_args<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="escape_sequence">{{</span><span class="string_literal">Hello</span><span class="escape_sequence">}}</span><span class="string_literal">"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="comment">// from https://doc.rust-lang.org/std/fmt/index.html</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello"</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "Hello"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello, </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="string_literal">"world"</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "Hello, world!"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"The number is </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="numeric_literal">1</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "The number is 1"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">?</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="parenthesis">(</span><span class="numeric_literal">3</span><span class="comma">,</span> <span class="numeric_literal">4</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "(3, 4)"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="variable">value</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> value<span class="operator">=</span><span class="numeric_literal">4</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "4"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="numeric_literal">1</span><span class="comma">,</span> <span class="numeric_literal">2</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "1 2"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="numeric_literal">0</span><span class="numeric_literal">4</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="numeric_literal">42</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "0042" with leading zerosV</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="numeric_literal">1</span><span class="format_specifier">}</span><span class="string_literal"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal"> </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="numeric_literal">1</span><span class="comma">,</span> <span class="numeric_literal">2</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "2 1 1 2"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="variable">argument</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> argument <span class="operator">=</span> <span class="string_literal">"test"</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "test"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="variable">name</span><span class="format_specifier">}</span><span class="string_literal"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="numeric_literal">1</span><span class="comma">,</span> name <span class="operator">=</span> <span class="numeric_literal">2</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "2 1"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="variable">a</span><span class="format_specifier">}</span><span class="string_literal"> </span><span class="format_specifier">{</span><span class="variable">c</span><span class="format_specifier">}</span><span class="string_literal"> </span><span class="format_specifier">{</span><span class="variable">b</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> a<span class="operator">=</span><span class="string_literal">"a"</span><span class="comma">,</span> b<span class="operator">=</span><span class="char_literal">'b'</span><span class="comma">,</span> c<span class="operator">=</span><span class="numeric_literal">3</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "a 3 b"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="escape_sequence">{{</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="escape_sequence">}}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="numeric_literal">2</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "{2}"</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="numeric_literal">1</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="comma">,</span> <span class="numeric_literal">5</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="numeric_literal">1</span><span class="format_specifier">:</span><span class="numeric_literal">0</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="numeric_literal">5</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="variable">width</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="comma">,</span> width <span class="operator">=</span> <span class="numeric_literal">5</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">&lt;</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">-</span><span class="format_specifier">&lt;</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">^</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">&gt;</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">+</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="numeric_literal">5</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">#</span><span class="variable">x</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="numeric_literal">27</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="numeric_literal">0</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="numeric_literal">5</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="numeric_literal">0</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="numeric_literal">-</span><span class="numeric_literal">5</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">#</span><span class="numeric_literal">0</span><span class="numeric_literal">10</span><span class="variable">x</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="numeric_literal">27</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal"> is </span><span class="format_specifier">{</span><span class="numeric_literal">1</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="comma">,</span> <span class="numeric_literal">0.01</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="numeric_literal">1</span><span class="format_specifier">}</span><span class="string_literal"> is </span><span class="format_specifier">{</span><span class="numeric_literal">2</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="numeric_literal">0</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="numeric_literal">5</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="comma">,</span> <span class="numeric_literal">0.01</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal"> is </span><span class="format_specifier">{</span><span class="numeric_literal">2</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="numeric_literal">1</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="comma">,</span> <span class="numeric_literal">5</span><span class="comma">,</span> <span class="numeric_literal">0.01</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal"> is </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="format_specifier">*</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="comma">,</span> <span class="numeric_literal">5</span><span class="comma">,</span> <span class="numeric_literal">0.01</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal"> is </span><span class="format_specifier">{</span><span class="numeric_literal">2</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="format_specifier">*</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="comma">,</span> <span class="numeric_literal">5</span><span class="comma">,</span> <span class="numeric_literal">0.01</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal"> is </span><span class="format_specifier">{</span><span class="variable">number</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="variable">prec</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="string_literal">"x"</span><span class="comma">,</span> prec <span class="operator">=</span> <span class="numeric_literal">5</span><span class="comma">,</span> number <span class="operator">=</span> <span class="numeric_literal">0.01</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">, `</span><span class="format_specifier">{</span><span class="variable">name</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="format_specifier">*</span><span class="format_specifier">}</span><span class="string_literal">` has 3 fractional digits"</span><span class="comma">,</span> <span class="string_literal">"Hello"</span><span class="comma">,</span> <span class="numeric_literal">3</span><span class="comma">,</span> name<span class="operator">=</span><span class="numeric_literal">1234.56</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">, `</span><span class="format_specifier">{</span><span class="variable">name</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="format_specifier">*</span><span class="format_specifier">}</span><span class="string_literal">` has 3 characters"</span><span class="comma">,</span> <span class="string_literal">"Hello"</span><span class="comma">,</span> <span class="numeric_literal">3</span><span class="comma">,</span> name<span class="operator">=</span><span class="string_literal">"1234.56"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">, `</span><span class="format_specifier">{</span><span class="variable">name</span><span class="format_specifier">:</span><span class="format_specifier">&gt;</span><span class="numeric_literal">8</span><span class="format_specifier">.</span><span class="format_specifier">*</span><span class="format_specifier">}</span><span class="string_literal">` has 3 right-aligned characters"</span><span class="comma">,</span> <span class="string_literal">"Hello"</span><span class="comma">,</span> <span class="numeric_literal">3</span><span class="comma">,</span> name<span class="operator">=</span><span class="string_literal">"1234.56"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+ <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">"{}"</span>
+ <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">"{{}}"</span><span class="semicolon">;</span>
+
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="escape_sequence">{{</span><span class="escape_sequence">}}</span><span class="string_literal">"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="escape_sequence">{{</span><span class="string_literal"> Hello"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello </span><span class="escape_sequence">}}</span><span class="string_literal">"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="escape_sequence">{{</span><span class="string_literal">Hello</span><span class="escape_sequence">}}</span><span class="string_literal">"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="escape_sequence">{{</span><span class="string_literal"> Hello </span><span class="escape_sequence">}}</span><span class="string_literal">"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="escape_sequence">{{</span><span class="string_literal">Hello </span><span class="escape_sequence">}}</span><span class="string_literal">"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="escape_sequence">{{</span><span class="string_literal"> Hello</span><span class="escape_sequence">}}</span><span class="string_literal">"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">r"Hello, </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">!"</span><span class="comma">,</span> <span class="string_literal">"world"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+ <span class="comment">// escape sequences</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"Hello</span><span class="escape_sequence">\n</span><span class="string_literal">World"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="escape_sequence">\u{48}</span><span class="escape_sequence">\x65</span><span class="escape_sequence">\x6C</span><span class="escape_sequence">\x6C</span><span class="escape_sequence">\x6F</span><span class="string_literal"> World"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+ <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">"</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x00</span><span class="escape_sequence">\x63</span><span class="escape_sequence">\n</span><span class="string_literal">"</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">b"</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x00</span><span class="escape_sequence">\x63</span><span class="escape_sequence">\n</span><span class="string_literal">"</span><span class="semicolon">;</span>
+
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="escape_sequence">\x41</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> A <span class="operator">=</span> <span class="numeric_literal">92</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="variable">ничоси</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> ничоси <span class="operator">=</span> <span class="numeric_literal">92</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+ <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="variable">x</span><span class="format_specifier">?</span><span class="format_specifier">}</span><span class="string_literal"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal"> "</span><span class="comma">,</span> <span class="unresolved_reference">thingy</span><span class="comma">,</span> <span class="unresolved_reference">n2</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">panic</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="numeric_literal">0</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">panic</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"more </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="numeric_literal">1</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">assert</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="bool_literal">true</span><span class="comma">,</span> <span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="comma">,</span> <span class="numeric_literal">1</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">assert</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="bool_literal">true</span><span class="comma">,</span> <span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal"> asdasd"</span><span class="comma">,</span> <span class="numeric_literal">1</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">toho</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">fmt"</span><span class="comma">,</span> <span class="numeric_literal">0</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro unsafe">asm</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal">"mov eax, </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis">(</span>concat<span class="punctuation">!</span><span class="parenthesis">(</span><span class="string_literal">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">"</span><span class="parenthesis">)</span><span class="comma">,</span> <span class="string_literal">"{}"</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
new file mode 100644
index 000000000..0716bae75
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
@@ -0,0 +1,126 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.function.unsafe { color: #BC8383; }
+.trait.unsafe { color: #BC8383; }
+.operator.unsafe { color: #BC8383; }
+.mutable.unsafe { color: #BC8383; text-decoration: underline; }
+.keyword.unsafe { color: #BC8383; font-weight: bold; }
+.macro.unsafe { color: #BC8383; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">id</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>tt<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">*</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span>
+ <span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>tt<span class="parenthesis">)</span><span class="punctuation">*</span>
+ <span class="brace">}</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">unsafe_deref</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span>
+ <span class="punctuation">*</span><span class="parenthesis">(</span><span class="operator">&</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="keyword">as</span> <span class="punctuation">*</span><span class="keyword">const</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="parenthesis">)</span>
+ <span class="brace">}</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+<span class="keyword">static</span> <span class="keyword">mut</span> <span class="static declaration mutable unsafe">MUT_GLOBAL</span><span class="colon">:</span> <span class="struct">Struct</span> <span class="operator">=</span> <span class="struct">Struct</span> <span class="brace">{</span> <span class="field">field</span><span class="colon">:</span> <span class="numeric_literal">0</span> <span class="brace">}</span><span class="semicolon">;</span>
+<span class="keyword">static</span> <span class="static declaration">GLOBAL</span><span class="colon">:</span> <span class="struct">Struct</span> <span class="operator">=</span> <span class="struct">Struct</span> <span class="brace">{</span> <span class="field">field</span><span class="colon">:</span> <span class="numeric_literal">0</span> <span class="brace">}</span><span class="semicolon">;</span>
+<span class="keyword unsafe">unsafe</span> <span class="keyword">fn</span> <span class="function declaration unsafe">unsafe_fn</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">union</span> <span class="union declaration">Union</span> <span class="brace">{</span>
+ <span class="field declaration">a</span><span class="colon">:</span> <span class="builtin_type">u32</span><span class="comma">,</span>
+ <span class="field declaration">b</span><span class="colon">:</span> <span class="builtin_type">f32</span><span class="comma">,</span>
+<span class="brace">}</span>
+
+<span class="keyword">struct</span> <span class="struct declaration">Struct</span> <span class="brace">{</span> <span class="field declaration">field</span><span class="colon">:</span> <span class="builtin_type">i32</span> <span class="brace">}</span>
+<span class="keyword">impl</span> <span class="struct">Struct</span> <span class="brace">{</span>
+ <span class="keyword unsafe">unsafe</span> <span class="keyword">fn</span> <span class="function associated declaration reference unsafe">unsafe_method</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">repr</span><span class="parenthesis attribute">(</span><span class="none attribute">packed</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">struct</span> <span class="struct declaration">Packed</span> <span class="brace">{</span>
+ <span class="field declaration">a</span><span class="colon">:</span> <span class="builtin_type">u16</span><span class="comma">,</span>
+<span class="brace">}</span>
+
+<span class="keyword unsafe">unsafe</span> <span class="keyword">trait</span> <span class="trait declaration unsafe">UnsafeTrait</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="keyword unsafe">unsafe</span> <span class="keyword">impl</span> <span class="trait unsafe">UnsafeTrait</span> <span class="keyword">for</span> <span class="struct">Packed</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="keyword">impl</span> <span class="punctuation">!</span><span class="trait">UnsafeTrait</span> <span class="keyword">for</span> <span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">unsafe_trait_bound</span><span class="angle">&lt;</span><span class="type_param declaration">T</span><span class="colon">:</span> <span class="trait">UnsafeTrait</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="punctuation">_</span><span class="colon">:</span> <span class="type_param">T</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="keyword">trait</span> <span class="trait declaration">DoTheAutoref</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated declaration reference trait">calls_autoref</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
+<span class="keyword">impl</span> <span class="trait">DoTheAutoref</span> <span class="keyword">for</span> <span class="builtin_type">u16</span> <span class="brace">{</span>
+ <span class="keyword">fn</span> <span class="function associated declaration reference trait">calls_autoref</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="brace">}</span>
+
+<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+ <span class="keyword">let</span> <span class="variable declaration">x</span> <span class="operator">=</span> <span class="operator">&</span><span class="numeric_literal">5</span> <span class="keyword">as</span> <span class="keyword">*</span><span class="keyword">const</span> <span class="punctuation">_</span> <span class="keyword">as</span> <span class="keyword">*</span><span class="keyword">const</span> <span class="builtin_type">usize</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration">u</span> <span class="operator">=</span> <span class="union">Union</span> <span class="brace">{</span> <span class="field">b</span><span class="colon">:</span> <span class="numeric_literal">0</span> <span class="brace">}</span><span class="semicolon">;</span>
+
+ <span class="macro">id</span><span class="macro_bang">!</span> <span class="brace">{</span>
+ <span class="keyword unsafe">unsafe</span> <span class="brace">{</span> <span class="macro unsafe">unsafe_deref</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">}</span>
+ <span class="brace">}</span><span class="semicolon">;</span>
+
+ <span class="keyword unsafe">unsafe</span> <span class="brace">{</span>
+ <span class="macro unsafe">unsafe_deref</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro unsafe">id</span><span class="macro_bang">!</span> <span class="brace">{</span> <span class="macro unsafe">unsafe_deref</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">}</span><span class="semicolon">;</span>
+
+ <span class="comment">// unsafe fn and method calls</span>
+ <span class="function unsafe">unsafe_fn</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration">b</span> <span class="operator">=</span> <span class="variable">u</span><span class="operator">.</span><span class="field unsafe">b</span><span class="semicolon">;</span>
+ <span class="keyword control">match</span> <span class="variable">u</span> <span class="brace">{</span>
+ <span class="union">Union</span> <span class="brace">{</span> <span class="field unsafe">b</span><span class="colon">:</span> <span class="numeric_literal">0</span> <span class="brace">}</span> <span class="operator">=&gt;</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="union">Union</span> <span class="brace">{</span> <span class="field unsafe">a</span> <span class="brace">}</span> <span class="operator">=&gt;</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="comma">,</span>
+ <span class="brace">}</span>
+ <span class="struct">Struct</span> <span class="brace">{</span> <span class="field">field</span><span class="colon">:</span> <span class="numeric_literal">0</span> <span class="brace">}</span><span class="operator">.</span><span class="function associated reference unsafe">unsafe_method</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+
+ <span class="comment">// unsafe deref</span>
+ <span class="operator unsafe">*</span><span class="variable">x</span><span class="semicolon">;</span>
+
+ <span class="comment">// unsafe access to a static mut</span>
+ <span class="static mutable unsafe">MUT_GLOBAL</span><span class="operator">.</span><span class="field">field</span><span class="semicolon">;</span>
+ <span class="static">GLOBAL</span><span class="operator">.</span><span class="field">field</span><span class="semicolon">;</span>
+
+ <span class="comment">// unsafe ref of packed fields</span>
+ <span class="keyword">let</span> <span class="variable declaration">packed</span> <span class="operator">=</span> <span class="struct">Packed</span> <span class="brace">{</span> <span class="field">a</span><span class="colon">:</span> <span class="numeric_literal">0</span> <span class="brace">}</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration reference">a</span> <span class="operator">=</span> <span class="operator unsafe">&</span><span class="variable">packed</span><span class="operator">.</span><span class="field">a</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="keyword unsafe">ref</span> <span class="variable declaration reference">a</span> <span class="operator">=</span> <span class="variable">packed</span><span class="operator">.</span><span class="field">a</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="struct">Packed</span> <span class="brace">{</span> <span class="keyword unsafe">ref</span> <span class="field">a</span> <span class="brace">}</span> <span class="operator">=</span> <span class="variable">packed</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="struct">Packed</span> <span class="brace">{</span> <span class="field">a</span><span class="colon">:</span> <span class="keyword unsafe">ref</span> <span class="variable declaration reference">_a</span> <span class="brace">}</span> <span class="operator">=</span> <span class="variable">packed</span><span class="semicolon">;</span>
+
+ <span class="comment">// unsafe auto ref of packed field</span>
+ <span class="variable">packed</span><span class="operator">.</span><span class="field">a</span><span class="operator">.</span><span class="function associated reference trait unsafe">calls_autoref</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="brace">}</span>
+<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
new file mode 100644
index 000000000..99be7c664
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
@@ -0,0 +1,1096 @@
+use std::time::Instant;
+
+use expect_test::{expect_file, ExpectFile};
+use ide_db::SymbolKind;
+use test_utils::{bench, bench_fixture, skip_slow_tests, AssertLinear};
+
+use crate::{fixture, FileRange, HlTag, TextRange};
+
+#[test]
+fn attributes() {
+ check_highlighting(
+ r#"
+//- proc_macros: identity
+//- minicore: derive, copy
+#[allow(dead_code)]
+#[rustfmt::skip]
+#[proc_macros::identity]
+#[derive(Copy)]
+/// This is a doc comment
+// This is a normal comment
+/// This is a doc comment
+#[derive(Copy)]
+// This is another normal comment
+/// This is another doc comment
+// This is another normal comment
+#[derive(Copy)]
+// The reason for these being here is to test AttrIds
+struct Foo;
+"#,
+ expect_file!["./test_data/highlight_attributes.html"],
+ false,
+ );
+}
+
+#[test]
+fn macros() {
+ check_highlighting(
+ r#"
+//- proc_macros: mirror
+proc_macros::mirror! {
+ {
+ ,i32 :x pub
+ ,i32 :y pub
+ } Foo struct
+}
+macro_rules! def_fn {
+ ($($tt:tt)*) => {$($tt)*}
+}
+
+def_fn! {
+ fn bar() -> u32 {
+ 100
+ }
+}
+
+macro_rules! dont_color_me_braces {
+ () => {0}
+}
+
+macro_rules! noop {
+ ($expr:expr) => {
+ $expr
+ }
+}
+
+/// textually shadow previous definition
+macro_rules! noop {
+ ($expr:expr) => {
+ $expr
+ }
+}
+
+macro_rules! keyword_frag {
+ ($type:ty) => ($type)
+}
+
+macro with_args($i:ident) {
+ $i
+}
+
+macro without_args {
+ ($i:ident) => {
+ $i
+ }
+}
+
+fn main() {
+ println!("Hello, {}!", 92);
+ dont_color_me_braces!();
+ noop!(noop!(1));
+}
+"#,
+ expect_file!["./test_data/highlight_macros.html"],
+ false,
+ );
+}
+
+/// If what you want to test feels like a specific entity consider making a new test instead,
+/// this test fixture here in fact should shrink instead of grow ideally.
+#[test]
+fn test_highlighting() {
+ check_highlighting(
+ r#"
+//- minicore: derive, copy
+//- /main.rs crate:main deps:foo
+use inner::{self as inner_mod};
+mod inner {}
+
+pub mod ops {
+ #[lang = "fn_once"]
+ pub trait FnOnce<Args> {}
+
+ #[lang = "fn_mut"]
+ pub trait FnMut<Args>: FnOnce<Args> {}
+
+ #[lang = "fn"]
+ pub trait Fn<Args>: FnMut<Args> {}
+}
+
+struct Foo {
+ x: u32,
+}
+
+trait Bar {
+ fn bar(&self) -> i32;
+}
+
+impl Bar for Foo {
+ fn bar(&self) -> i32 {
+ self.x
+ }
+}
+
+impl Foo {
+ fn baz(mut self, f: Foo) -> i32 {
+ f.baz(self)
+ }
+
+ fn qux(&mut self) {
+ self.x = 0;
+ }
+
+ fn quop(&self) -> i32 {
+ self.x
+ }
+}
+
+use self::FooCopy::{self as BarCopy};
+
+#[derive(Copy)]
+struct FooCopy {
+ x: u32,
+}
+
+impl FooCopy {
+ fn baz(self, f: FooCopy) -> u32 {
+ f.baz(self)
+ }
+
+ fn qux(&mut self) {
+ self.x = 0;
+ }
+
+ fn quop(&self) -> u32 {
+ self.x
+ }
+}
+
+fn str() {
+ str();
+}
+
+fn foo<'a, T>() -> T {
+ foo::<'a, i32>()
+}
+
+fn never() -> ! {
+ loop {}
+}
+
+fn const_param<const FOO: usize>() -> usize {
+ const_param::<{ FOO }>();
+ FOO
+}
+
+use ops::Fn;
+fn baz<F: Fn() -> ()>(f: F) {
+ f()
+}
+
+fn foobar() -> impl Copy {}
+
+fn foo() {
+ let bar = foobar();
+}
+
+// comment
+fn main() {
+ let mut x = 42;
+ x += 1;
+ let y = &mut x;
+ let z = &y;
+
+ let Foo { x: z, y } = Foo { x: z, y };
+
+ y;
+
+ let mut foo = Foo { x, y: x };
+ let foo2 = Foo { x, y: x };
+ foo.quop();
+ foo.qux();
+ foo.baz(foo2);
+
+ let mut copy = FooCopy { x };
+ copy.quop();
+ copy.qux();
+ copy.baz(copy);
+
+ let a = |x| x;
+ let bar = Foo::baz;
+
+ let baz = (-42,);
+ let baz = -baz.0;
+
+ let _ = !true;
+
+ 'foo: loop {
+ break 'foo;
+ continue 'foo;
+ }
+}
+
+enum Option<T> {
+ Some(T),
+ None,
+}
+use Option::*;
+
+impl<T> Option<T> {
+ fn and<U>(self, other: Option<U>) -> Option<(T, U)> {
+ match other {
+ None => unimplemented!(),
+ Nope => Nope,
+ }
+ }
+}
+
+async fn learn_and_sing() {
+ let song = learn_song().await;
+ sing_song(song).await;
+}
+
+async fn async_main() {
+ let f1 = learn_and_sing();
+ let f2 = dance();
+ futures::join!(f1, f2);
+}
+
+fn use_foo_items() {
+ let bob = foo::Person {
+ name: "Bob",
+ age: foo::consts::NUMBER,
+ };
+
+ let control_flow = foo::identity(foo::ControlFlow::Continue);
+
+ if control_flow.should_die() {
+ foo::die!();
+ }
+}
+
+pub enum Bool { True, False }
+
+impl Bool {
+ pub const fn to_primitive(self) -> bool {
+ true
+ }
+}
+const USAGE_OF_BOOL:bool = Bool::True.to_primitive();
+
+trait Baz {
+ type Qux;
+}
+
+fn baz<T>(t: T)
+where
+ T: Baz,
+ <T as Baz>::Qux: Bar {}
+
+fn gp_shadows_trait<Baz: Bar>() {
+ Baz::bar;
+}
+
+//- /foo.rs crate:foo
+pub struct Person {
+ pub name: &'static str,
+ pub age: u8,
+}
+
+pub enum ControlFlow {
+ Continue,
+ Die,
+}
+
+impl ControlFlow {
+ pub fn should_die(self) -> bool {
+ matches!(self, ControlFlow::Die)
+ }
+}
+
+pub fn identity<T>(x: T) -> T { x }
+
+pub mod consts {
+ pub const NUMBER: i64 = 92;
+}
+
+macro_rules! die {
+ () => {
+ panic!();
+ };
+}
+"#,
+ expect_file!["./test_data/highlight_general.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_lifetime_highlighting() {
+ check_highlighting(
+ r#"
+//- minicore: derive
+
+#[derive()]
+struct Foo<'a, 'b, 'c> where 'a: 'a, 'static: 'static {
+ field: &'a (),
+ field2: &'static (),
+}
+impl<'a> Foo<'_, 'a, 'static>
+where
+ 'a: 'a,
+ 'static: 'static
+{}
+"#,
+ expect_file!["./test_data/highlight_lifetimes.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_keyword_highlighting() {
+ check_highlighting(
+ r#"
+extern crate self;
+
+use crate;
+use self;
+mod __ {
+ use super::*;
+}
+
+macro_rules! void {
+ ($($tt:tt)*) => {}
+}
+void!(Self);
+struct __ where Self:;
+fn __(_: Self) {}
+"#,
+ expect_file!["./test_data/highlight_keywords.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_string_highlighting() {
+ // The format string detection is based on macro-expansion,
+ // thus, we have to copy the macro definition from `std`
+ check_highlighting(
+ r#"
+macro_rules! println {
+ ($($arg:tt)*) => ({
+ $crate::io::_print($crate::format_args_nl!($($arg)*));
+ })
+}
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! format_args {}
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! const_format_args {}
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! format_args_nl {}
+
+mod panic {
+ pub macro panic_2015 {
+ () => (
+ $crate::panicking::panic("explicit panic")
+ ),
+ ($msg:literal $(,)?) => (
+ $crate::panicking::panic($msg)
+ ),
+ // Use `panic_str` instead of `panic_display::<&str>` for non_fmt_panic lint.
+ ($msg:expr $(,)?) => (
+ $crate::panicking::panic_str($msg)
+ ),
+ // Special-case the single-argument case for const_panic.
+ ("{}", $arg:expr $(,)?) => (
+ $crate::panicking::panic_display(&$arg)
+ ),
+ ($fmt:expr, $($arg:tt)+) => (
+ $crate::panicking::panic_fmt($crate::const_format_args!($fmt, $($arg)+))
+ ),
+ }
+}
+
+#[rustc_builtin_macro(std_panic)]
+#[macro_export]
+macro_rules! panic {}
+#[rustc_builtin_macro]
+macro_rules! assert {}
+#[rustc_builtin_macro]
+macro_rules! asm {}
+
+macro_rules! toho {
+ () => ($crate::panic!("not yet implemented"));
+ ($($arg:tt)+) => ($crate::panic!("not yet implemented: {}", $crate::format_args!($($arg)+)));
+}
+
+fn main() {
+ println!("Hello {{Hello}}");
+ // from https://doc.rust-lang.org/std/fmt/index.html
+ println!("Hello"); // => "Hello"
+ println!("Hello, {}!", "world"); // => "Hello, world!"
+ println!("The number is {}", 1); // => "The number is 1"
+ println!("{:?}", (3, 4)); // => "(3, 4)"
+ println!("{value}", value=4); // => "4"
+ println!("{} {}", 1, 2); // => "1 2"
+ println!("{:04}", 42); // => "0042" with leading zerosV
+ println!("{1} {} {0} {}", 1, 2); // => "2 1 1 2"
+ println!("{argument}", argument = "test"); // => "test"
+ println!("{name} {}", 1, name = 2); // => "2 1"
+ println!("{a} {c} {b}", a="a", b='b', c=3); // => "a 3 b"
+ println!("{{{}}}", 2); // => "{2}"
+ println!("Hello {:5}!", "x");
+ println!("Hello {:1$}!", "x", 5);
+ println!("Hello {1:0$}!", 5, "x");
+ println!("Hello {:width$}!", "x", width = 5);
+ println!("Hello {:<5}!", "x");
+ println!("Hello {:-<5}!", "x");
+ println!("Hello {:^5}!", "x");
+ println!("Hello {:>5}!", "x");
+ println!("Hello {:+}!", 5);
+ println!("{:#x}!", 27);
+ println!("Hello {:05}!", 5);
+ println!("Hello {:05}!", -5);
+ println!("{:#010x}!", 27);
+ println!("Hello {0} is {1:.5}", "x", 0.01);
+ println!("Hello {1} is {2:.0$}", 5, "x", 0.01);
+ println!("Hello {0} is {2:.1$}", "x", 5, 0.01);
+ println!("Hello {} is {:.*}", "x", 5, 0.01);
+ println!("Hello {} is {2:.*}", "x", 5, 0.01);
+ println!("Hello {} is {number:.prec$}", "x", prec = 5, number = 0.01);
+ println!("{}, `{name:.*}` has 3 fractional digits", "Hello", 3, name=1234.56);
+ println!("{}, `{name:.*}` has 3 characters", "Hello", 3, name="1234.56");
+ println!("{}, `{name:>8.*}` has 3 right-aligned characters", "Hello", 3, name="1234.56");
+
+ let _ = "{}"
+ let _ = "{{}}";
+
+ println!("Hello {{}}");
+ println!("{{ Hello");
+ println!("Hello }}");
+ println!("{{Hello}}");
+ println!("{{ Hello }}");
+ println!("{{Hello }}");
+ println!("{{ Hello}}");
+
+ println!(r"Hello, {}!", "world");
+
+ // escape sequences
+ println!("Hello\nWorld");
+ println!("\u{48}\x65\x6C\x6C\x6F World");
+
+ let _ = "\x28\x28\x00\x63\n";
+ let _ = b"\x28\x28\x00\x63\n";
+
+ println!("{\x41}", A = 92);
+ println!("{ничоси}", ничоси = 92);
+
+ println!("{:x?} {} ", thingy, n2);
+ panic!("{}", 0);
+ panic!("more {}", 1);
+ assert!(true, "{}", 1);
+ assert!(true, "{} asdasd", 1);
+ toho!("{}fmt", 0);
+ asm!("mov eax, {0}");
+ format_args!(concat!("{}"), "{}");
+}"#,
+ expect_file!["./test_data/highlight_strings.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_unsafe_highlighting() {
+ check_highlighting(
+ r#"
+macro_rules! id {
+ ($($tt:tt)*) => {
+ $($tt)*
+ };
+}
+macro_rules! unsafe_deref {
+ () => {
+ *(&() as *const ())
+ };
+}
+static mut MUT_GLOBAL: Struct = Struct { field: 0 };
+static GLOBAL: Struct = Struct { field: 0 };
+unsafe fn unsafe_fn() {}
+
+union Union {
+ a: u32,
+ b: f32,
+}
+
+struct Struct { field: i32 }
+impl Struct {
+ unsafe fn unsafe_method(&self) {}
+}
+
+#[repr(packed)]
+struct Packed {
+ a: u16,
+}
+
+unsafe trait UnsafeTrait {}
+unsafe impl UnsafeTrait for Packed {}
+impl !UnsafeTrait for () {}
+
+fn unsafe_trait_bound<T: UnsafeTrait>(_: T) {}
+
+trait DoTheAutoref {
+ fn calls_autoref(&self);
+}
+
+impl DoTheAutoref for u16 {
+ fn calls_autoref(&self) {}
+}
+
+fn main() {
+ let x = &5 as *const _ as *const usize;
+ let u = Union { b: 0 };
+
+ id! {
+ unsafe { unsafe_deref!() }
+ };
+
+ unsafe {
+ unsafe_deref!();
+ id! { unsafe_deref!() };
+
+ // unsafe fn and method calls
+ unsafe_fn();
+ let b = u.b;
+ match u {
+ Union { b: 0 } => (),
+ Union { a } => (),
+ }
+ Struct { field: 0 }.unsafe_method();
+
+ // unsafe deref
+ *x;
+
+ // unsafe access to a static mut
+ MUT_GLOBAL.field;
+ GLOBAL.field;
+
+ // unsafe ref of packed fields
+ let packed = Packed { a: 0 };
+ let a = &packed.a;
+ let ref a = packed.a;
+ let Packed { ref a } = packed;
+ let Packed { a: ref _a } = packed;
+
+ // unsafe auto ref of packed field
+ packed.a.calls_autoref();
+ }
+}
+"#,
+ expect_file!["./test_data/highlight_unsafe.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_highlight_doc_comment() {
+ check_highlighting(
+ r#"
+//- /main.rs
+//! This is a module to test doc injection.
+//! ```
+//! fn test() {}
+//! ```
+
+mod outline_module;
+
+/// ```
+/// let _ = "early doctests should not go boom";
+/// ```
+struct Foo {
+ bar: bool,
+}
+
+/// This is an impl with a code block.
+///
+/// ```
+/// fn foo() {
+///
+/// }
+/// ```
+impl Foo {
+ /// ```
+ /// let _ = "Call me
+ // KILLER WHALE
+ /// Ishmael.";
+ /// ```
+ pub const bar: bool = true;
+
+ /// Constructs a new `Foo`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #![allow(unused_mut)]
+ /// let mut foo: Foo = Foo::new();
+ /// ```
+ pub const fn new() -> Foo {
+ Foo { bar: true }
+ }
+
+ /// `bar` method on `Foo`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use x::y;
+ ///
+ /// let foo = Foo::new();
+ ///
+ /// // calls bar on foo
+ /// assert!(foo.bar());
+ ///
+ /// let bar = foo.bar || Foo::bar;
+ ///
+ /// /* multi-line
+ /// comment */
+ ///
+ /// let multi_line_string = "Foo
+ /// bar\n
+ /// ";
+ ///
+ /// ```
+ ///
+ /// ```rust,no_run
+ /// let foobar = Foo::new().bar();
+ /// ```
+ ///
+ /// ~~~rust,no_run
+ /// // code block with tilde.
+ /// let foobar = Foo::new().bar();
+ /// ~~~
+ ///
+ /// ```
+ /// // functions
+ /// fn foo<T, const X: usize>(arg: i32) {
+ /// let x: T = X;
+ /// }
+ /// ```
+ ///
+ /// ```sh
+ /// echo 1
+ /// ```
+ pub fn foo(&self) -> bool {
+ true
+ }
+}
+
+/// [`Foo`](Foo) is a struct
+/// This function is > [`all_the_links`](all_the_links) <
+/// [`noop`](noop) is a macro below
+/// [`Item`] is a struct in the module [`module`]
+///
+/// [`Item`]: module::Item
+/// [mix_and_match]: ThisShouldntResolve
+pub fn all_the_links() {}
+
+pub mod module {
+ pub struct Item;
+}
+
+/// ```
+/// macro_rules! noop { ($expr:expr) => { $expr }}
+/// noop!(1);
+/// ```
+macro_rules! noop {
+ ($expr:expr) => {
+ $expr
+ }
+}
+
+/// ```rust
+/// let _ = example(&[1, 2, 3]);
+/// ```
+///
+/// ```
+/// loop {}
+#[cfg_attr(not(feature = "false"), doc = "loop {}")]
+#[doc = "loop {}"]
+/// ```
+///
+#[cfg_attr(feature = "alloc", doc = "```rust")]
+#[cfg_attr(not(feature = "alloc"), doc = "```ignore")]
+/// let _ = example(&alloc::vec![1, 2, 3]);
+/// ```
+pub fn mix_and_match() {}
+
+/**
+It is beyond me why you'd use these when you got ///
+```rust
+let _ = example(&[1, 2, 3]);
+```
+[`block_comments2`] tests these with indentation
+ */
+pub fn block_comments() {}
+
+/**
+ Really, I don't get it
+ ```rust
+ let _ = example(&[1, 2, 3]);
+ ```
+ [`block_comments`] tests these without indentation
+*/
+pub fn block_comments2() {}
+
+//- /outline_module.rs
+//! This is an outline module whose purpose is to test that its inline attribute injection does not
+//! spill into its parent.
+//! ```
+//! fn test() {}
+//! ```
+"#,
+ expect_file!["./test_data/highlight_doctest.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_extern_crate() {
+ check_highlighting(
+ r#"
+//- /main.rs crate:main deps:std,alloc
+extern crate std;
+extern crate alloc as abc;
+//- /std/lib.rs crate:std
+pub struct S;
+//- /alloc/lib.rs crate:alloc
+pub struct A
+"#,
+ expect_file!["./test_data/highlight_extern_crate.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_crate_root() {
+ check_highlighting(
+ r#"
+//- minicore: iterators
+//- /main.rs crate:main deps:foo
+extern crate foo;
+use core::iter;
+
+pub const NINETY_TWO: u8 = 92;
+
+use foo as foooo;
+
+pub(crate) fn main() {
+ let baz = iter::repeat(92);
+}
+
+mod bar {
+ pub(in super) const FORTY_TWO: u8 = 42;
+
+ mod baz {
+ use super::super::NINETY_TWO;
+ use crate::foooo::Point;
+
+ pub(in super::super) const TWENTY_NINE: u8 = 29;
+ }
+}
+//- /foo.rs crate:foo
+struct Point {
+ x: u8,
+ y: u8,
+}
+
+mod inner {
+ pub(super) fn swap(p: crate::Point) -> crate::Point {
+ crate::Point { x: p.y, y: p.x }
+ }
+}
+"#,
+ expect_file!["./test_data/highlight_crate_root.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_default_library() {
+ check_highlighting(
+ r#"
+//- minicore: option, iterators
+use core::iter;
+
+fn main() {
+ let foo = Some(92);
+ let nums = iter::repeat(foo.unwrap());
+}
+"#,
+ expect_file!["./test_data/highlight_default_library.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_associated_function() {
+ check_highlighting(
+ r#"
+fn not_static() {}
+
+struct foo {}
+
+impl foo {
+ pub fn is_static() {}
+ pub fn is_not_static(&self) {}
+}
+
+trait t {
+ fn t_is_static() {}
+ fn t_is_not_static(&self) {}
+}
+
+impl t for foo {
+ pub fn is_static() {}
+ pub fn is_not_static(&self) {}
+}
+"#,
+ expect_file!["./test_data/highlight_assoc_functions.html"],
+ false,
+ )
+}
+
+#[test]
+fn test_injection() {
+ check_highlighting(
+ r##"
+fn fixture(ra_fixture: &str) {}
+
+fn main() {
+ fixture(r#"
+trait Foo {
+ fn foo() {
+ println!("2 + 2 = {}", 4);
+ }
+}"#
+ );
+ fixture(r"
+fn foo() {
+ foo(\$0{
+ 92
+ }\$0)
+}"
+ );
+}
+"##,
+ expect_file!["./test_data/highlight_injection.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_operators() {
+ check_highlighting(
+ r##"
+fn main() {
+ 1 + 1 - 1 * 1 / 1 % 1 | 1 & 1 ! 1 ^ 1 >> 1 << 1;
+ let mut a = 0;
+ a += 1;
+ a -= 1;
+ a *= 1;
+ a /= 1;
+ a %= 1;
+ a |= 1;
+ a &= 1;
+ a ^= 1;
+ a >>= 1;
+ a <<= 1;
+}
+"##,
+ expect_file!["./test_data/highlight_operators.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_mod_hl_injection() {
+ check_highlighting(
+ r##"
+//- /foo.rs
+//! [Struct]
+//! This is an intra doc injection test for modules
+//! [Struct]
+//! This is an intra doc injection test for modules
+
+pub struct Struct;
+//- /lib.rs crate:foo
+/// [crate::foo::Struct]
+/// This is an intra doc injection test for modules
+/// [crate::foo::Struct]
+/// This is an intra doc injection test for modules
+mod foo;
+"##,
+ expect_file!["./test_data/highlight_module_docs_inline.html"],
+ false,
+ );
+ check_highlighting(
+ r##"
+//- /lib.rs crate:foo
+/// [crate::foo::Struct]
+/// This is an intra doc injection test for modules
+/// [crate::foo::Struct]
+/// This is an intra doc injection test for modules
+mod foo;
+//- /foo.rs
+//! [Struct]
+//! This is an intra doc injection test for modules
+//! [Struct]
+//! This is an intra doc injection test for modules
+
+pub struct Struct;
+"##,
+ expect_file!["./test_data/highlight_module_docs_outline.html"],
+ false,
+ );
+}
+
+#[test]
+#[cfg_attr(
+ all(unix, not(target_pointer_width = "64")),
+ ignore = "depends on `DefaultHasher` outputs"
+)]
+fn test_rainbow_highlighting() {
+ check_highlighting(
+ r#"
+fn main() {
+ let hello = "hello";
+ let x = hello.to_string();
+ let y = hello.to_string();
+
+ let x = "other color please!";
+ let y = x.to_string();
+}
+
+fn bar() {
+ let mut hello = "hello";
+}
+"#,
+ expect_file!["./test_data/highlight_rainbow.html"],
+ true,
+ );
+}
+
+#[test]
+fn test_ranges() {
+ let (analysis, file_id) = fixture::file(
+ r#"
+#[derive(Clone, Debug)]
+struct Foo {
+ pub x: i32,
+ pub y: i32,
+}
+"#,
+ );
+
+ // The "x"
+ let highlights = &analysis
+ .highlight_range(FileRange { file_id, range: TextRange::at(45.into(), 1.into()) })
+ .unwrap();
+
+ assert_eq!(&highlights[0].highlight.to_string(), "field.declaration.public");
+}
+
+#[test]
+fn ranges_sorted() {
+ let (analysis, file_id) = fixture::file(
+ r#"
+#[foo(bar = "bar")]
+macro_rules! test {}
+}"#
+ .trim(),
+ );
+ let _ = analysis.highlight(file_id).unwrap();
+}
+
+/// Highlights the code given by the `ra_fixture` argument, renders the
+/// result as HTML, and compares it with the HTML file given as `snapshot`.
+/// Note that the `snapshot` file is overwritten by the rendered HTML.
+fn check_highlighting(ra_fixture: &str, expect: ExpectFile, rainbow: bool) {
+ let (analysis, file_id) = fixture::file(ra_fixture.trim());
+ let actual_html = &analysis.highlight_as_html(file_id, rainbow).unwrap();
+ expect.assert_eq(actual_html)
+}
+
+#[test]
+fn benchmark_syntax_highlighting_long_struct() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let fixture = bench_fixture::big_struct();
+ let (analysis, file_id) = fixture::file(&fixture);
+
+ let hash = {
+ let _pt = bench("syntax highlighting long struct");
+ analysis
+ .highlight(file_id)
+ .unwrap()
+ .iter()
+ .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct))
+ .count()
+ };
+ assert_eq!(hash, 2001);
+}
+
+#[test]
+fn syntax_highlighting_not_quadratic() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let mut al = AssertLinear::default();
+ while al.next_round() {
+ for i in 6..=10 {
+ let n = 1 << i;
+
+ let fixture = bench_fixture::big_struct_n(n);
+ let (analysis, file_id) = fixture::file(&fixture);
+
+ let time = Instant::now();
+
+ let hash = analysis
+ .highlight(file_id)
+ .unwrap()
+ .iter()
+ .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct))
+ .count();
+ assert!(hash > n as usize);
+
+ let elapsed = time.elapsed();
+ al.sample(n as f64, elapsed.as_millis() as f64);
+ }
+ }
+}
+
+#[test]
+fn benchmark_syntax_highlighting_parser() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let fixture = bench_fixture::glorious_old_parser();
+ let (analysis, file_id) = fixture::file(&fixture);
+
+ let hash = {
+ let _pt = bench("syntax highlighting parser");
+ analysis
+ .highlight(file_id)
+ .unwrap()
+ .iter()
+ .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Function))
+ .count()
+ };
+ assert_eq!(hash, 1609);
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_tree.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_tree.rs
new file mode 100644
index 000000000..9003e7cd3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_tree.rs
@@ -0,0 +1,339 @@
+use ide_db::base_db::{FileId, SourceDatabase};
+use ide_db::RootDatabase;
+use syntax::{
+ AstNode, NodeOrToken, SourceFile, SyntaxKind::STRING, SyntaxToken, TextRange, TextSize,
+};
+
+// Feature: Show Syntax Tree
+//
+// Shows the parse tree of the current file. It exists mostly for debugging
+// rust-analyzer itself.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Show Syntax Tree**
+// |===
+// image::https://user-images.githubusercontent.com/48062697/113065586-068bdb80-91b1-11eb-9507-fee67f9f45a0.gif[]
+pub(crate) fn syntax_tree(
+ db: &RootDatabase,
+ file_id: FileId,
+ text_range: Option<TextRange>,
+) -> String {
+ let parse = db.parse(file_id);
+ if let Some(text_range) = text_range {
+ let node = match parse.tree().syntax().covering_element(text_range) {
+ NodeOrToken::Node(node) => node,
+ NodeOrToken::Token(token) => {
+ if let Some(tree) = syntax_tree_for_string(&token, text_range) {
+ return tree;
+ }
+ token.parent().unwrap()
+ }
+ };
+
+ format!("{:#?}", node)
+ } else {
+ format!("{:#?}", parse.tree().syntax())
+ }
+}
+
+/// Attempts parsing the selected contents of a string literal
+/// as rust syntax and returns its syntax tree
+fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option<String> {
+ // When the range is inside a string
+ // we'll attempt parsing it as rust syntax
+ // to provide the syntax tree of the contents of the string
+ match token.kind() {
+ STRING => syntax_tree_for_token(token, text_range),
+ _ => None,
+ }
+}
+
+fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option<String> {
+ // Range of the full node
+ let node_range = node.text_range();
+ let text = node.text().to_string();
+
+ // We start at some point inside the node
+ // Either we have selected the whole string
+ // or our selection is inside it
+ let start = text_range.start() - node_range.start();
+
+ // how many characters we have selected
+ let len = text_range.len();
+
+ let node_len = node_range.len();
+
+ let start = start;
+
+ // We want to cap our length
+ let len = len.min(node_len);
+
+ // Ensure our slice is inside the actual string
+ let end =
+ if start + len < TextSize::of(&text) { start + len } else { TextSize::of(&text) - start };
+
+ let text = &text[TextRange::new(start, end)];
+
+ // Remove possible extra string quotes from the start
+ // and the end of the string
+ let text = text
+ .trim_start_matches('r')
+ .trim_start_matches('#')
+ .trim_start_matches('"')
+ .trim_end_matches('#')
+ .trim_end_matches('"')
+ .trim()
+ // Remove custom markers
+ .replace("$0", "");
+
+ let parsed = SourceFile::parse(&text);
+
+ // If the "file" parsed without errors,
+ // return its syntax
+ if parsed.errors().is_empty() {
+ return Some(format!("{:#?}", parsed.tree().syntax()));
+ }
+
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::expect;
+
+ use crate::fixture;
+
+ fn check(ra_fixture: &str, expect: expect_test::Expect) {
+ let (analysis, file_id) = fixture::file(ra_fixture);
+ let syn = analysis.syntax_tree(file_id, None).unwrap();
+ expect.assert_eq(&syn)
+ }
+ fn check_range(ra_fixture: &str, expect: expect_test::Expect) {
+ let (analysis, frange) = fixture::range(ra_fixture);
+ let syn = analysis.syntax_tree(frange.file_id, Some(frange.range)).unwrap();
+ expect.assert_eq(&syn)
+ }
+
+ #[test]
+ fn test_syntax_tree_without_range() {
+ // Basic syntax
+ check(
+ r#"fn foo() {}"#,
+ expect![[r#"
+ SOURCE_FILE@0..11
+ FN@0..11
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..6
+ IDENT@3..6 "foo"
+ PARAM_LIST@6..8
+ L_PAREN@6..7 "("
+ R_PAREN@7..8 ")"
+ WHITESPACE@8..9 " "
+ BLOCK_EXPR@9..11
+ STMT_LIST@9..11
+ L_CURLY@9..10 "{"
+ R_CURLY@10..11 "}"
+ "#]],
+ );
+
+ check(
+ r#"
+fn test() {
+ assert!("
+ fn foo() {
+ }
+ ", "");
+}"#,
+ expect![[r#"
+ SOURCE_FILE@0..60
+ FN@0..60
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..7
+ IDENT@3..7 "test"
+ PARAM_LIST@7..9
+ L_PAREN@7..8 "("
+ R_PAREN@8..9 ")"
+ WHITESPACE@9..10 " "
+ BLOCK_EXPR@10..60
+ STMT_LIST@10..60
+ L_CURLY@10..11 "{"
+ WHITESPACE@11..16 "\n "
+ EXPR_STMT@16..58
+ MACRO_EXPR@16..57
+ MACRO_CALL@16..57
+ PATH@16..22
+ PATH_SEGMENT@16..22
+ NAME_REF@16..22
+ IDENT@16..22 "assert"
+ BANG@22..23 "!"
+ TOKEN_TREE@23..57
+ L_PAREN@23..24 "("
+ STRING@24..52 "\"\n fn foo() {\n ..."
+ COMMA@52..53 ","
+ WHITESPACE@53..54 " "
+ STRING@54..56 "\"\""
+ R_PAREN@56..57 ")"
+ SEMICOLON@57..58 ";"
+ WHITESPACE@58..59 "\n"
+ R_CURLY@59..60 "}"
+ "#]],
+ )
+ }
+
+ #[test]
+ fn test_syntax_tree_with_range() {
+ check_range(
+ r#"$0fn foo() {}$0"#,
+ expect![[r#"
+ FN@0..11
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..6
+ IDENT@3..6 "foo"
+ PARAM_LIST@6..8
+ L_PAREN@6..7 "("
+ R_PAREN@7..8 ")"
+ WHITESPACE@8..9 " "
+ BLOCK_EXPR@9..11
+ STMT_LIST@9..11
+ L_CURLY@9..10 "{"
+ R_CURLY@10..11 "}"
+ "#]],
+ );
+
+ check_range(
+ r#"
+fn test() {
+ $0assert!("
+ fn foo() {
+ }
+ ", "");$0
+}"#,
+ expect![[r#"
+ EXPR_STMT@16..58
+ MACRO_EXPR@16..57
+ MACRO_CALL@16..57
+ PATH@16..22
+ PATH_SEGMENT@16..22
+ NAME_REF@16..22
+ IDENT@16..22 "assert"
+ BANG@22..23 "!"
+ TOKEN_TREE@23..57
+ L_PAREN@23..24 "("
+ STRING@24..52 "\"\n fn foo() {\n ..."
+ COMMA@52..53 ","
+ WHITESPACE@53..54 " "
+ STRING@54..56 "\"\""
+ R_PAREN@56..57 ")"
+ SEMICOLON@57..58 ";"
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_syntax_tree_inside_string() {
+ check_range(
+ r#"fn test() {
+ assert!("
+$0fn foo() {
+}$0
+fn bar() {
+}
+ ", "");
+}"#,
+ expect![[r#"
+ SOURCE_FILE@0..12
+ FN@0..12
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..6
+ IDENT@3..6 "foo"
+ PARAM_LIST@6..8
+ L_PAREN@6..7 "("
+ R_PAREN@7..8 ")"
+ WHITESPACE@8..9 " "
+ BLOCK_EXPR@9..12
+ STMT_LIST@9..12
+ L_CURLY@9..10 "{"
+ WHITESPACE@10..11 "\n"
+ R_CURLY@11..12 "}"
+ "#]],
+ );
+
+ // With a raw string
+ check_range(
+ r###"fn test() {
+ assert!(r#"
+$0fn foo() {
+}$0
+fn bar() {
+}
+ "#, "");
+}"###,
+ expect![[r#"
+ SOURCE_FILE@0..12
+ FN@0..12
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..6
+ IDENT@3..6 "foo"
+ PARAM_LIST@6..8
+ L_PAREN@6..7 "("
+ R_PAREN@7..8 ")"
+ WHITESPACE@8..9 " "
+ BLOCK_EXPR@9..12
+ STMT_LIST@9..12
+ L_CURLY@9..10 "{"
+ WHITESPACE@10..11 "\n"
+ R_CURLY@11..12 "}"
+ "#]],
+ );
+
+ // With a raw string
+ check_range(
+ r###"fn test() {
+ assert!(r$0#"
+fn foo() {
+}
+fn bar() {
+}"$0#, "");
+}"###,
+ expect![[r#"
+ SOURCE_FILE@0..25
+ FN@0..12
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..6
+ IDENT@3..6 "foo"
+ PARAM_LIST@6..8
+ L_PAREN@6..7 "("
+ R_PAREN@7..8 ")"
+ WHITESPACE@8..9 " "
+ BLOCK_EXPR@9..12
+ STMT_LIST@9..12
+ L_CURLY@9..10 "{"
+ WHITESPACE@10..11 "\n"
+ R_CURLY@11..12 "}"
+ WHITESPACE@12..13 "\n"
+ FN@13..25
+ FN_KW@13..15 "fn"
+ WHITESPACE@15..16 " "
+ NAME@16..19
+ IDENT@16..19 "bar"
+ PARAM_LIST@19..21
+ L_PAREN@19..20 "("
+ R_PAREN@20..21 ")"
+ WHITESPACE@21..22 " "
+ BLOCK_EXPR@22..25
+ STMT_LIST@22..25
+ L_CURLY@22..23 "{"
+ WHITESPACE@23..24 "\n"
+ R_CURLY@24..25 "}"
+ "#]],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/typing.rs b/src/tools/rust-analyzer/crates/ide/src/typing.rs
new file mode 100644
index 000000000..9118f3c69
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/typing.rs
@@ -0,0 +1,1210 @@
+//! This module handles auto-magic editing actions applied together with users
+//! edits. For example, if the user typed
+//!
+//! ```text
+//! foo
+//! .bar()
+//! .baz()
+//! | // <- cursor is here
+//! ```
+//!
+//! and types `.` next, we want to indent the dot.
+//!
+//! Language server executes such typing assists synchronously. That is, they
+//! block user's typing and should be pretty fast for this reason!
+
+mod on_enter;
+
+use ide_db::{
+ base_db::{FilePosition, SourceDatabase},
+ RootDatabase,
+};
+use syntax::{
+ algo::{ancestors_at_offset, find_node_at_offset},
+ ast::{self, edit::IndentLevel, AstToken},
+ AstNode, Parse, SourceFile, SyntaxKind, TextRange, TextSize, T,
+};
+
+use text_edit::{Indel, TextEdit};
+
+use crate::SourceChange;
+
+pub(crate) use on_enter::on_enter;
+
+// Don't forget to add new trigger characters to `server_capabilities` in `caps.rs`.
+pub(crate) const TRIGGER_CHARS: &str = ".=<>{";
+
+struct ExtendedTextEdit {
+ edit: TextEdit,
+ is_snippet: bool,
+}
+
+// Feature: On Typing Assists
+//
+// Some features trigger on typing certain characters:
+//
+// - typing `let =` tries to smartly add `;` if `=` is followed by an existing expression
+// - typing `=` between two expressions adds `;` when in statement position
+// - typing `=` to turn an assignment into an equality comparison removes `;` when in expression position
+// - typing `.` in a chain method call auto-indents
+// - typing `{` in front of an expression inserts a closing `}` after the expression
+// - typing `{` in a use item adds a closing `}` in the right place
+//
+// VS Code::
+//
+// Add the following to `settings.json`:
+// [source,json]
+// ----
+// "editor.formatOnType": true,
+// ----
+//
+// image::https://user-images.githubusercontent.com/48062697/113166163-69758500-923a-11eb-81ee-eb33ec380399.gif[]
+// image::https://user-images.githubusercontent.com/48062697/113171066-105c2000-923f-11eb-87ab-f4a263346567.gif[]
+pub(crate) fn on_char_typed(
+ db: &RootDatabase,
+ position: FilePosition,
+ char_typed: char,
+) -> Option<SourceChange> {
+ if !stdx::always!(TRIGGER_CHARS.contains(char_typed)) {
+ return None;
+ }
+ let file = &db.parse(position.file_id);
+ if !stdx::always!(file.tree().syntax().text().char_at(position.offset) == Some(char_typed)) {
+ return None;
+ }
+ let edit = on_char_typed_inner(file, position.offset, char_typed)?;
+ let mut sc = SourceChange::from_text_edit(position.file_id, edit.edit);
+ sc.is_snippet = edit.is_snippet;
+ Some(sc)
+}
+
+fn on_char_typed_inner(
+ file: &Parse<SourceFile>,
+ offset: TextSize,
+ char_typed: char,
+) -> Option<ExtendedTextEdit> {
+ if !stdx::always!(TRIGGER_CHARS.contains(char_typed)) {
+ return None;
+ }
+ return match char_typed {
+ '.' => conv(on_dot_typed(&file.tree(), offset)),
+ '=' => conv(on_eq_typed(&file.tree(), offset)),
+ '<' => on_left_angle_typed(&file.tree(), offset),
+ '>' => conv(on_right_angle_typed(&file.tree(), offset)),
+ '{' => conv(on_opening_brace_typed(file, offset)),
+ _ => return None,
+ };
+
+ fn conv(text_edit: Option<TextEdit>) -> Option<ExtendedTextEdit> {
+ Some(ExtendedTextEdit { edit: text_edit?, is_snippet: false })
+ }
+}
+
+/// Inserts a closing `}` when the user types an opening `{`, wrapping an existing expression in a
+/// block, or a part of a `use` item.
+fn on_opening_brace_typed(file: &Parse<SourceFile>, offset: TextSize) -> Option<TextEdit> {
+ if !stdx::always!(file.tree().syntax().text().char_at(offset) == Some('{')) {
+ return None;
+ }
+
+ let brace_token = file.tree().syntax().token_at_offset(offset).right_biased()?;
+ if brace_token.kind() != SyntaxKind::L_CURLY {
+ return None;
+ }
+
+ // Remove the `{` to get a better parse tree, and reparse.
+ let range = brace_token.text_range();
+ if !stdx::always!(range.len() == TextSize::of('{')) {
+ return None;
+ }
+ let file = file.reparse(&Indel::delete(range));
+
+ if let Some(edit) = brace_expr(&file.tree(), offset) {
+ return Some(edit);
+ }
+
+ if let Some(edit) = brace_use_path(&file.tree(), offset) {
+ return Some(edit);
+ }
+
+ return None;
+
+ fn brace_use_path(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
+ let segment: ast::PathSegment = find_node_at_offset(file.syntax(), offset)?;
+ if segment.syntax().text_range().start() != offset {
+ return None;
+ }
+
+ let tree: ast::UseTree = find_node_at_offset(file.syntax(), offset)?;
+
+ Some(TextEdit::insert(
+ tree.syntax().text_range().end() + TextSize::of("{"),
+ "}".to_string(),
+ ))
+ }
+
+ fn brace_expr(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
+ let mut expr: ast::Expr = find_node_at_offset(file.syntax(), offset)?;
+ if expr.syntax().text_range().start() != offset {
+ return None;
+ }
+
+ // Enclose the outermost expression starting at `offset`
+ while let Some(parent) = expr.syntax().parent() {
+ if parent.text_range().start() != expr.syntax().text_range().start() {
+ break;
+ }
+
+ match ast::Expr::cast(parent) {
+ Some(parent) => expr = parent,
+ None => break,
+ }
+ }
+
+ // If it's a statement in a block, we don't know how many statements should be included
+ if ast::ExprStmt::can_cast(expr.syntax().parent()?.kind()) {
+ return None;
+ }
+
+ // Insert `}` right after the expression.
+ Some(TextEdit::insert(
+ expr.syntax().text_range().end() + TextSize::of("{"),
+ "}".to_string(),
+ ))
+ }
+}
+
+/// Returns an edit which should be applied after `=` was typed. Primarily,
+/// this works when adding `let =`.
+// FIXME: use a snippet completion instead of this hack here.
+fn on_eq_typed(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
+ if !stdx::always!(file.syntax().text().char_at(offset) == Some('=')) {
+ return None;
+ }
+
+ if let Some(edit) = let_stmt(file, offset) {
+ return Some(edit);
+ }
+ if let Some(edit) = assign_expr(file, offset) {
+ return Some(edit);
+ }
+ if let Some(edit) = assign_to_eq(file, offset) {
+ return Some(edit);
+ }
+
+ return None;
+
+ fn assign_expr(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
+ let binop: ast::BinExpr = find_node_at_offset(file.syntax(), offset)?;
+ if !matches!(binop.op_kind(), Some(ast::BinaryOp::Assignment { op: None })) {
+ return None;
+ }
+
+ // Parent must be `ExprStmt` or `StmtList` for `;` to be valid.
+ if let Some(expr_stmt) = ast::ExprStmt::cast(binop.syntax().parent()?) {
+ if expr_stmt.semicolon_token().is_some() {
+ return None;
+ }
+ } else {
+ if !ast::StmtList::can_cast(binop.syntax().parent()?.kind()) {
+ return None;
+ }
+ }
+
+ let expr = binop.rhs()?;
+ let expr_range = expr.syntax().text_range();
+ if expr_range.contains(offset) && offset != expr_range.start() {
+ return None;
+ }
+ if file.syntax().text().slice(offset..expr_range.start()).contains_char('\n') {
+ return None;
+ }
+ let offset = expr.syntax().text_range().end();
+ Some(TextEdit::insert(offset, ";".to_string()))
+ }
+
+ /// `a =$0 b;` removes the semicolon if an expression is valid in this context.
+ fn assign_to_eq(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
+ let binop: ast::BinExpr = find_node_at_offset(file.syntax(), offset)?;
+ if !matches!(binop.op_kind(), Some(ast::BinaryOp::CmpOp(ast::CmpOp::Eq { negated: false })))
+ {
+ return None;
+ }
+
+ let expr_stmt = ast::ExprStmt::cast(binop.syntax().parent()?)?;
+ let semi = expr_stmt.semicolon_token()?;
+
+ if expr_stmt.syntax().next_sibling().is_some() {
+ // Not the last statement in the list.
+ return None;
+ }
+
+ Some(TextEdit::delete(semi.text_range()))
+ }
+
+ fn let_stmt(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
+ let let_stmt: ast::LetStmt = find_node_at_offset(file.syntax(), offset)?;
+ if let_stmt.semicolon_token().is_some() {
+ return None;
+ }
+ let expr = let_stmt.initializer()?;
+ let expr_range = expr.syntax().text_range();
+ if expr_range.contains(offset) && offset != expr_range.start() {
+ return None;
+ }
+ if file.syntax().text().slice(offset..expr_range.start()).contains_char('\n') {
+ return None;
+ }
+ let offset = let_stmt.syntax().text_range().end();
+ Some(TextEdit::insert(offset, ";".to_string()))
+ }
+}
+
+/// Returns an edit which should be applied when a dot ('.') is typed on a blank line, indenting the line appropriately.
+fn on_dot_typed(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
+ if !stdx::always!(file.syntax().text().char_at(offset) == Some('.')) {
+ return None;
+ }
+ let whitespace =
+ file.syntax().token_at_offset(offset).left_biased().and_then(ast::Whitespace::cast)?;
+
+ // if prior is fn call over multiple lines dont indent
+ // or if previous is method call over multiples lines keep that indent
+ let current_indent = {
+ let text = whitespace.text();
+ let (_prefix, suffix) = text.rsplit_once('\n')?;
+ suffix
+ };
+ let current_indent_len = TextSize::of(current_indent);
+
+ let parent = whitespace.syntax().parent()?;
+ // Make sure dot is a part of call chain
+ let receiver = if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) {
+ field_expr.expr()?
+ } else if let Some(method_call_expr) = ast::MethodCallExpr::cast(parent.clone()) {
+ method_call_expr.receiver()?
+ } else {
+ return None;
+ };
+
+ let receiver_is_multiline = receiver.syntax().text().find_char('\n').is_some();
+ let target_indent = match (receiver, receiver_is_multiline) {
+ // if receiver is multiline field or method call, just take the previous `.` indentation
+ (ast::Expr::MethodCallExpr(expr), true) => {
+ expr.dot_token().as_ref().map(IndentLevel::from_token)
+ }
+ (ast::Expr::FieldExpr(expr), true) => {
+ expr.dot_token().as_ref().map(IndentLevel::from_token)
+ }
+ // if receiver is multiline expression, just keeps its indentation
+ (_, true) => Some(IndentLevel::from_node(&parent)),
+ _ => None,
+ };
+ let target_indent = match target_indent {
+ Some(x) => x,
+ // in all other cases, take previous indentation and indent once
+ None => IndentLevel::from_node(&parent) + 1,
+ }
+ .to_string();
+
+ if current_indent_len == TextSize::of(&target_indent) {
+ return None;
+ }
+
+ Some(TextEdit::replace(TextRange::new(offset - current_indent_len, offset), target_indent))
+}
+
+/// Add closing `>` for generic arguments/parameters.
+fn on_left_angle_typed(file: &SourceFile, offset: TextSize) -> Option<ExtendedTextEdit> {
+ let file_text = file.syntax().text();
+ if !stdx::always!(file_text.char_at(offset) == Some('<')) {
+ return None;
+ }
+
+ // Find the next non-whitespace char in the line.
+ let mut next_offset = offset + TextSize::of('<');
+ while file_text.char_at(next_offset) == Some(' ') {
+ next_offset += TextSize::of(' ')
+ }
+ if file_text.char_at(next_offset) == Some('>') {
+ return None;
+ }
+
+ let range = TextRange::at(offset, TextSize::of('<'));
+ if let Some(t) = file.syntax().token_at_offset(offset).left_biased() {
+ if T![impl] == t.kind() {
+ return Some(ExtendedTextEdit {
+ edit: TextEdit::replace(range, "<$0>".to_string()),
+ is_snippet: true,
+ });
+ }
+ }
+
+ if ancestors_at_offset(file.syntax(), offset)
+ .find(|n| {
+ ast::GenericParamList::can_cast(n.kind()) || ast::GenericArgList::can_cast(n.kind())
+ })
+ .is_some()
+ {
+ return Some(ExtendedTextEdit {
+ edit: TextEdit::replace(range, "<$0>".to_string()),
+ is_snippet: true,
+ });
+ }
+
+ None
+}
+
+/// Adds a space after an arrow when `fn foo() { ... }` is turned into `fn foo() -> { ... }`
+fn on_right_angle_typed(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
+ let file_text = file.syntax().text();
+ if !stdx::always!(file_text.char_at(offset) == Some('>')) {
+ return None;
+ }
+ let after_arrow = offset + TextSize::of('>');
+ if file_text.char_at(after_arrow) != Some('{') {
+ return None;
+ }
+ if find_node_at_offset::<ast::RetType>(file.syntax(), offset).is_none() {
+ return None;
+ }
+
+ Some(TextEdit::insert(after_arrow, " ".to_string()))
+}
+
+#[cfg(test)]
+mod tests {
+ use test_utils::{assert_eq_text, extract_offset};
+
+ use super::*;
+
+ impl ExtendedTextEdit {
+ fn apply(&self, text: &mut String) {
+ self.edit.apply(text);
+ }
+ }
+
+ fn do_type_char(char_typed: char, before: &str) -> Option<String> {
+ let (offset, mut before) = extract_offset(before);
+ let edit = TextEdit::insert(offset, char_typed.to_string());
+ edit.apply(&mut before);
+ let parse = SourceFile::parse(&before);
+ on_char_typed_inner(&parse, offset, char_typed).map(|it| {
+ it.apply(&mut before);
+ before.to_string()
+ })
+ }
+
+ fn type_char(char_typed: char, ra_fixture_before: &str, ra_fixture_after: &str) {
+ let actual = do_type_char(char_typed, ra_fixture_before)
+ .unwrap_or_else(|| panic!("typing `{}` did nothing", char_typed));
+
+ assert_eq_text!(ra_fixture_after, &actual);
+ }
+
+ fn type_char_noop(char_typed: char, ra_fixture_before: &str) {
+ let file_change = do_type_char(char_typed, ra_fixture_before);
+ assert!(file_change.is_none())
+ }
+
+ #[test]
+ fn test_semi_after_let() {
+ // do_check(r"
+ // fn foo() {
+ // let foo =$0
+ // }
+ // ", r"
+ // fn foo() {
+ // let foo =;
+ // }
+ // ");
+ type_char(
+ '=',
+ r#"
+fn foo() {
+ let foo $0 1 + 1
+}
+"#,
+ r#"
+fn foo() {
+ let foo = 1 + 1;
+}
+"#,
+ );
+ // do_check(r"
+ // fn foo() {
+ // let foo =$0
+ // let bar = 1;
+ // }
+ // ", r"
+ // fn foo() {
+ // let foo =;
+ // let bar = 1;
+ // }
+ // ");
+ }
+
+ #[test]
+ fn test_semi_after_assign() {
+ type_char(
+ '=',
+ r#"
+fn f() {
+ i $0 0
+}
+"#,
+ r#"
+fn f() {
+ i = 0;
+}
+"#,
+ );
+ type_char(
+ '=',
+ r#"
+fn f() {
+ i $0 0
+ i
+}
+"#,
+ r#"
+fn f() {
+ i = 0;
+ i
+}
+"#,
+ );
+ type_char_noop(
+ '=',
+ r#"
+fn f(x: u8) {
+ if x $0
+}
+"#,
+ );
+ type_char_noop(
+ '=',
+ r#"
+fn f(x: u8) {
+ if x $0 {}
+}
+"#,
+ );
+ type_char_noop(
+ '=',
+ r#"
+fn f(x: u8) {
+ if x $0 0 {}
+}
+"#,
+ );
+ type_char_noop(
+ '=',
+ r#"
+fn f() {
+ g(i $0 0);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn assign_to_eq() {
+ type_char(
+ '=',
+ r#"
+fn f(a: u8) {
+ a =$0 0;
+}
+"#,
+ r#"
+fn f(a: u8) {
+ a == 0
+}
+"#,
+ );
+ type_char(
+ '=',
+ r#"
+fn f(a: u8) {
+ a $0= 0;
+}
+"#,
+ r#"
+fn f(a: u8) {
+ a == 0
+}
+"#,
+ );
+ type_char_noop(
+ '=',
+ r#"
+fn f(a: u8) {
+ let e = a =$0 0;
+}
+"#,
+ );
+ type_char_noop(
+ '=',
+ r#"
+fn f(a: u8) {
+ let e = a =$0 0;
+ e
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn indents_new_chain_call() {
+ type_char(
+ '.',
+ r#"
+fn main() {
+ xs.foo()
+ $0
+}
+ "#,
+ r#"
+fn main() {
+ xs.foo()
+ .
+}
+ "#,
+ );
+ type_char_noop(
+ '.',
+ r#"
+fn main() {
+ xs.foo()
+ $0
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn indents_new_chain_call_with_semi() {
+ type_char(
+ '.',
+ r"
+fn main() {
+ xs.foo()
+ $0;
+}
+ ",
+ r#"
+fn main() {
+ xs.foo()
+ .;
+}
+ "#,
+ );
+ type_char_noop(
+ '.',
+ r#"
+fn main() {
+ xs.foo()
+ $0;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn indents_new_chain_call_with_let() {
+ type_char(
+ '.',
+ r#"
+fn main() {
+ let _ = foo
+ $0
+ bar()
+}
+"#,
+ r#"
+fn main() {
+ let _ = foo
+ .
+ bar()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn indents_continued_chain_call() {
+ type_char(
+ '.',
+ r#"
+fn main() {
+ xs.foo()
+ .first()
+ $0
+}
+ "#,
+ r#"
+fn main() {
+ xs.foo()
+ .first()
+ .
+}
+ "#,
+ );
+ type_char_noop(
+ '.',
+ r#"
+fn main() {
+ xs.foo()
+ .first()
+ $0
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn indents_middle_of_chain_call() {
+ type_char(
+ '.',
+ r#"
+fn source_impl() {
+ let var = enum_defvariant_list().unwrap()
+ $0
+ .nth(92)
+ .unwrap();
+}
+ "#,
+ r#"
+fn source_impl() {
+ let var = enum_defvariant_list().unwrap()
+ .
+ .nth(92)
+ .unwrap();
+}
+ "#,
+ );
+ type_char_noop(
+ '.',
+ r#"
+fn source_impl() {
+ let var = enum_defvariant_list().unwrap()
+ $0
+ .nth(92)
+ .unwrap();
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn dont_indent_freestanding_dot() {
+ type_char_noop(
+ '.',
+ r#"
+fn main() {
+ $0
+}
+ "#,
+ );
+ type_char_noop(
+ '.',
+ r#"
+fn main() {
+$0
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn adds_space_after_return_type() {
+ type_char(
+ '>',
+ r#"
+fn foo() -$0{ 92 }
+"#,
+ r#"
+fn foo() -> { 92 }
+"#,
+ );
+ }
+
+ #[test]
+ fn adds_closing_brace_for_expr() {
+ type_char(
+ '{',
+ r#"
+fn f() { match () { _ => $0() } }
+ "#,
+ r#"
+fn f() { match () { _ => {()} } }
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+fn f() { $0() }
+ "#,
+ r#"
+fn f() { {()} }
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+fn f() { let x = $0(); }
+ "#,
+ r#"
+fn f() { let x = {()}; }
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+fn f() { let x = $0a.b(); }
+ "#,
+ r#"
+fn f() { let x = {a.b()}; }
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+const S: () = $0();
+fn f() {}
+ "#,
+ r#"
+const S: () = {()};
+fn f() {}
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+const S: () = $0a.b();
+fn f() {}
+ "#,
+ r#"
+const S: () = {a.b()};
+fn f() {}
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+fn f() {
+ match x {
+ 0 => $0(),
+ 1 => (),
+ }
+}
+ "#,
+ r#"
+fn f() {
+ match x {
+ 0 => {()},
+ 1 => (),
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn noop_in_string_literal() {
+ // Regression test for #9351
+ type_char_noop(
+ '{',
+ r##"
+fn check_with(ra_fixture: &str, expect: Expect) {
+ let base = r#"
+enum E { T(), R$0, C }
+use self::E::X;
+const Z: E = E::C;
+mod m {}
+asdasdasdasdasdasda
+sdasdasdasdasdasda
+sdasdasdasdasd
+"#;
+ let actual = completion_list(&format!("{}\n{}", base, ra_fixture));
+ expect.assert_eq(&actual)
+}
+ "##,
+ );
+ }
+
+ #[test]
+ fn noop_in_item_position_with_macro() {
+ type_char_noop('{', r#"$0println!();"#);
+ type_char_noop(
+ '{',
+ r#"
+fn main() $0println!("hello");
+}"#,
+ );
+ }
+
+ #[test]
+ fn adds_closing_brace_for_use_tree() {
+ type_char(
+ '{',
+ r#"
+use some::$0Path;
+ "#,
+ r#"
+use some::{Path};
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+use some::{Path, $0Other};
+ "#,
+ r#"
+use some::{Path, {Other}};
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+use some::{$0Path, Other};
+ "#,
+ r#"
+use some::{{Path}, Other};
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+use some::path::$0to::Item;
+ "#,
+ r#"
+use some::path::{to::Item};
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+use some::$0path::to::Item;
+ "#,
+ r#"
+use some::{path::to::Item};
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+use $0some::path::to::Item;
+ "#,
+ r#"
+use {some::path::to::Item};
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+use some::path::$0to::{Item};
+ "#,
+ r#"
+use some::path::{to::{Item}};
+ "#,
+ );
+ type_char(
+ '{',
+ r#"
+use $0Thing as _;
+ "#,
+ r#"
+use {Thing as _};
+ "#,
+ );
+
+ type_char_noop(
+ '{',
+ r#"
+use some::pa$0th::to::Item;
+ "#,
+ );
+ }
+
+ #[test]
+ fn adds_closing_angle_bracket_for_generic_args() {
+ type_char(
+ '<',
+ r#"
+fn foo() {
+ bar::$0
+}
+ "#,
+ r#"
+fn foo() {
+ bar::<$0>
+}
+ "#,
+ );
+
+ type_char(
+ '<',
+ r#"
+fn foo(bar: &[u64]) {
+ bar.iter().collect::$0();
+}
+ "#,
+ r#"
+fn foo(bar: &[u64]) {
+ bar.iter().collect::<$0>();
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn adds_closing_angle_bracket_for_generic_params() {
+ type_char(
+ '<',
+ r#"
+fn foo$0() {}
+ "#,
+ r#"
+fn foo<$0>() {}
+ "#,
+ );
+ type_char(
+ '<',
+ r#"
+fn foo$0
+ "#,
+ r#"
+fn foo<$0>
+ "#,
+ );
+ type_char(
+ '<',
+ r#"
+struct Foo$0 {}
+ "#,
+ r#"
+struct Foo<$0> {}
+ "#,
+ );
+ type_char(
+ '<',
+ r#"
+struct Foo$0();
+ "#,
+ r#"
+struct Foo<$0>();
+ "#,
+ );
+ type_char(
+ '<',
+ r#"
+struct Foo$0
+ "#,
+ r#"
+struct Foo<$0>
+ "#,
+ );
+ type_char(
+ '<',
+ r#"
+enum Foo$0
+ "#,
+ r#"
+enum Foo<$0>
+ "#,
+ );
+ type_char(
+ '<',
+ r#"
+trait Foo$0
+ "#,
+ r#"
+trait Foo<$0>
+ "#,
+ );
+ type_char(
+ '<',
+ r#"
+type Foo$0 = Bar;
+ "#,
+ r#"
+type Foo<$0> = Bar;
+ "#,
+ );
+ type_char(
+ '<',
+ r#"
+impl$0 Foo {}
+ "#,
+ r#"
+impl<$0> Foo {}
+ "#,
+ );
+ type_char(
+ '<',
+ r#"
+impl<T> Foo$0 {}
+ "#,
+ r#"
+impl<T> Foo<$0> {}
+ "#,
+ );
+ type_char(
+ '<',
+ r#"
+impl Foo$0 {}
+ "#,
+ r#"
+impl Foo<$0> {}
+ "#,
+ );
+ }
+
+ #[test]
+ fn dont_add_closing_angle_bracket_for_comparison() {
+ type_char_noop(
+ '<',
+ r#"
+fn main() {
+ 42$0
+}
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+fn main() {
+ 42 $0
+}
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+fn main() {
+ let foo = 42;
+ foo $0
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn dont_add_closing_angle_bracket_if_it_is_already_there() {
+ type_char_noop(
+ '<',
+ r#"
+fn foo() {
+ bar::$0>
+}
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+fn foo(bar: &[u64]) {
+ bar.iter().collect::$0 >();
+}
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+fn foo$0>() {}
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+fn foo$0>
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+struct Foo$0> {}
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+struct Foo$0>();
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+struct Foo$0>
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+enum Foo$0>
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+trait Foo$0>
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+type Foo$0> = Bar;
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+impl$0> Foo {}
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+impl<T> Foo$0> {}
+ "#,
+ );
+ type_char_noop(
+ '<',
+ r#"
+impl Foo$0> {}
+ "#,
+ );
+ }
+
+ #[test]
+ fn regression_629() {
+ type_char_noop(
+ '.',
+ r#"
+fn foo() {
+ CompletionItem::new(
+ CompletionKind::Reference,
+ ctx.source_range(),
+ field.name().to_string(),
+ )
+ .foo()
+ $0
+}
+"#,
+ );
+ type_char_noop(
+ '.',
+ r#"
+fn foo() {
+ CompletionItem::new(
+ CompletionKind::Reference,
+ ctx.source_range(),
+ field.name().to_string(),
+ )
+ $0
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs
new file mode 100644
index 000000000..48c171327
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs
@@ -0,0 +1,616 @@
+//! Handles the `Enter` key press. At the momently, this only continues
+//! comments, but should handle indent some time in the future as well.
+
+use ide_db::base_db::{FilePosition, SourceDatabase};
+use ide_db::RootDatabase;
+use syntax::{
+ algo::find_node_at_offset,
+ ast::{self, edit::IndentLevel, AstToken},
+ AstNode, SmolStr, SourceFile,
+ SyntaxKind::*,
+ SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset,
+};
+
+use text_edit::TextEdit;
+
+// Feature: On Enter
+//
+// rust-analyzer can override kbd:[Enter] key to make it smarter:
+//
+// - kbd:[Enter] inside triple-slash comments automatically inserts `///`
+// - kbd:[Enter] in the middle or after a trailing space in `//` inserts `//`
+// - kbd:[Enter] inside `//!` doc comments automatically inserts `//!`
+// - kbd:[Enter] after `{` indents contents and closing `}` of single-line block
+//
+// This action needs to be assigned to shortcut explicitly.
+//
+// Note that, depending on the other installed extensions, this feature can visibly slow down typing.
+// Similarly, if rust-analyzer crashes or stops responding, `Enter` might not work.
+// In that case, you can still press `Shift-Enter` to insert a newline.
+//
+// VS Code::
+//
+// Add the following to `keybindings.json`:
+// [source,json]
+// ----
+// {
+// "key": "Enter",
+// "command": "rust-analyzer.onEnter",
+// "when": "editorTextFocus && !suggestWidgetVisible && editorLangId == rust"
+// }
+// ----
+//
+// When using the Vim plugin:
+// [source,json]
+// ----
+// {
+// "key": "Enter",
+// "command": "rust-analyzer.onEnter",
+// "when": "editorTextFocus && !suggestWidgetVisible && editorLangId == rust && vim.mode == 'Insert'"
+// }
+// ----
+//
+// image::https://user-images.githubusercontent.com/48062697/113065578-04c21800-91b1-11eb-82b8-22b8c481e645.gif[]
+pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<TextEdit> {
+ let parse = db.parse(position.file_id);
+ let file = parse.tree();
+ let token = file.syntax().token_at_offset(position.offset).left_biased()?;
+
+ if let Some(comment) = ast::Comment::cast(token.clone()) {
+ return on_enter_in_comment(&comment, &file, position.offset);
+ }
+
+ if token.kind() == L_CURLY {
+ // Typing enter after the `{` of a block expression, where the `}` is on the same line
+ if let Some(edit) = find_node_at_offset(file.syntax(), position.offset - TextSize::of('{'))
+ .and_then(|block| on_enter_in_block(block, position))
+ {
+ cov_mark::hit!(indent_block_contents);
+ return Some(edit);
+ }
+
+ // Typing enter after the `{` of a use tree list.
+ if let Some(edit) = find_node_at_offset(file.syntax(), position.offset - TextSize::of('{'))
+ .and_then(|list| on_enter_in_use_tree_list(list, position))
+ {
+ cov_mark::hit!(indent_block_contents);
+ return Some(edit);
+ }
+ }
+
+ None
+}
+
+fn on_enter_in_comment(
+ comment: &ast::Comment,
+ file: &ast::SourceFile,
+ offset: TextSize,
+) -> Option<TextEdit> {
+ if comment.kind().shape.is_block() {
+ return None;
+ }
+
+ let prefix = comment.prefix();
+ let comment_range = comment.syntax().text_range();
+ if offset < comment_range.start() + TextSize::of(prefix) {
+ return None;
+ }
+
+ let mut remove_trailing_whitespace = false;
+ // Continuing single-line non-doc comments (like this one :) ) is annoying
+ if prefix == "//" && comment_range.end() == offset {
+ if comment.text().ends_with(' ') {
+ cov_mark::hit!(continues_end_of_line_comment_with_space);
+ remove_trailing_whitespace = true;
+ } else if !followed_by_comment(comment) {
+ return None;
+ }
+ }
+
+ let indent = node_indent(file, comment.syntax())?;
+ let inserted = format!("\n{}{} $0", indent, prefix);
+ let delete = if remove_trailing_whitespace {
+ let trimmed_len = comment.text().trim_end().len() as u32;
+ let trailing_whitespace_len = comment.text().len() as u32 - trimmed_len;
+ TextRange::new(offset - TextSize::from(trailing_whitespace_len), offset)
+ } else {
+ TextRange::empty(offset)
+ };
+ let edit = TextEdit::replace(delete, inserted);
+ Some(edit)
+}
+
+fn on_enter_in_block(block: ast::BlockExpr, position: FilePosition) -> Option<TextEdit> {
+ let contents = block_contents(&block)?;
+
+ if block.syntax().text().contains_char('\n') {
+ return None;
+ }
+
+ let indent = IndentLevel::from_node(block.syntax());
+ let mut edit = TextEdit::insert(position.offset, format!("\n{}$0", indent + 1));
+ edit.union(TextEdit::insert(contents.text_range().end(), format!("\n{}", indent))).ok()?;
+ Some(edit)
+}
+
+fn on_enter_in_use_tree_list(list: ast::UseTreeList, position: FilePosition) -> Option<TextEdit> {
+ if list.syntax().text().contains_char('\n') {
+ return None;
+ }
+
+ let indent = IndentLevel::from_node(list.syntax());
+ let mut edit = TextEdit::insert(position.offset, format!("\n{}$0", indent + 1));
+ edit.union(TextEdit::insert(
+ list.r_curly_token()?.text_range().start(),
+ format!("\n{}", indent),
+ ))
+ .ok()?;
+ Some(edit)
+}
+
+fn block_contents(block: &ast::BlockExpr) -> Option<SyntaxNode> {
+ let mut node = block.tail_expr().map(|e| e.syntax().clone());
+
+ for stmt in block.statements() {
+ if node.is_some() {
+ // More than 1 node in the block
+ return None;
+ }
+
+ node = Some(stmt.syntax().clone());
+ }
+
+ node
+}
+
+fn followed_by_comment(comment: &ast::Comment) -> bool {
+ let ws = match comment.syntax().next_token().and_then(ast::Whitespace::cast) {
+ Some(it) => it,
+ None => return false,
+ };
+ if ws.spans_multiple_lines() {
+ return false;
+ }
+ ws.syntax().next_token().and_then(ast::Comment::cast).is_some()
+}
+
+fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option<SmolStr> {
+ let ws = match file.syntax().token_at_offset(token.text_range().start()) {
+ TokenAtOffset::Between(l, r) => {
+ assert!(r == *token);
+ l
+ }
+ TokenAtOffset::Single(n) => {
+ assert!(n == *token);
+ return Some("".into());
+ }
+ TokenAtOffset::None => unreachable!(),
+ };
+ if ws.kind() != WHITESPACE {
+ return None;
+ }
+ let text = ws.text();
+ let pos = text.rfind('\n').map(|it| it + 1).unwrap_or(0);
+ Some(text[pos..].into())
+}
+
+#[cfg(test)]
+mod tests {
+ use stdx::trim_indent;
+ use test_utils::assert_eq_text;
+
+ use crate::fixture;
+
+ fn apply_on_enter(before: &str) -> Option<String> {
+ let (analysis, position) = fixture::position(before);
+ let result = analysis.on_enter(position).unwrap()?;
+
+ let mut actual = analysis.file_text(position.file_id).unwrap().to_string();
+ result.apply(&mut actual);
+ Some(actual)
+ }
+
+ fn do_check(ra_fixture_before: &str, ra_fixture_after: &str) {
+ let ra_fixture_after = &trim_indent(ra_fixture_after);
+ let actual = apply_on_enter(ra_fixture_before).unwrap();
+ assert_eq_text!(ra_fixture_after, &actual);
+ }
+
+ fn do_check_noop(ra_fixture_text: &str) {
+ assert!(apply_on_enter(ra_fixture_text).is_none())
+ }
+
+ #[test]
+ fn continues_doc_comment() {
+ do_check(
+ r"
+/// Some docs$0
+fn foo() {
+}
+",
+ r"
+/// Some docs
+/// $0
+fn foo() {
+}
+",
+ );
+
+ do_check(
+ r"
+impl S {
+ /// Some$0 docs.
+ fn foo() {}
+}
+",
+ r"
+impl S {
+ /// Some
+ /// $0 docs.
+ fn foo() {}
+}
+",
+ );
+
+ do_check(
+ r"
+///$0 Some docs
+fn foo() {
+}
+",
+ r"
+///
+/// $0 Some docs
+fn foo() {
+}
+",
+ );
+ }
+
+ #[test]
+ fn does_not_continue_before_doc_comment() {
+ do_check_noop(r"$0//! docz");
+ }
+
+ #[test]
+ fn continues_another_doc_comment() {
+ do_check(
+ r#"
+fn main() {
+ //! Documentation for$0 on enter
+ let x = 1 + 1;
+}
+"#,
+ r#"
+fn main() {
+ //! Documentation for
+ //! $0 on enter
+ let x = 1 + 1;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn continues_code_comment_in_the_middle_of_line() {
+ do_check(
+ r"
+fn main() {
+ // Fix$0 me
+ let x = 1 + 1;
+}
+",
+ r"
+fn main() {
+ // Fix
+ // $0 me
+ let x = 1 + 1;
+}
+",
+ );
+ }
+
+ #[test]
+ fn continues_code_comment_in_the_middle_several_lines() {
+ do_check(
+ r"
+fn main() {
+ // Fix$0
+ // me
+ let x = 1 + 1;
+}
+",
+ r"
+fn main() {
+ // Fix
+ // $0
+ // me
+ let x = 1 + 1;
+}
+",
+ );
+ }
+
+ #[test]
+ fn does_not_continue_end_of_line_comment() {
+ do_check_noop(
+ r"
+fn main() {
+ // Fix me$0
+ let x = 1 + 1;
+}
+",
+ );
+ }
+
+ #[test]
+ fn continues_end_of_line_comment_with_space() {
+ cov_mark::check!(continues_end_of_line_comment_with_space);
+ do_check(
+ r#"
+fn main() {
+ // Fix me $0
+ let x = 1 + 1;
+}
+"#,
+ r#"
+fn main() {
+ // Fix me
+ // $0
+ let x = 1 + 1;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trims_all_trailing_whitespace() {
+ do_check(
+ "
+fn main() {
+ // Fix me \t\t $0
+ let x = 1 + 1;
+}
+",
+ "
+fn main() {
+ // Fix me
+ // $0
+ let x = 1 + 1;
+}
+",
+ );
+ }
+
+ #[test]
+ fn indents_fn_body_block() {
+ cov_mark::check!(indent_block_contents);
+ do_check(
+ r#"
+fn f() {$0()}
+ "#,
+ r#"
+fn f() {
+ $0()
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn indents_block_expr() {
+ do_check(
+ r#"
+fn f() {
+ let x = {$0()};
+}
+ "#,
+ r#"
+fn f() {
+ let x = {
+ $0()
+ };
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn indents_match_arm() {
+ do_check(
+ r#"
+fn f() {
+ match 6 {
+ 1 => {$0f()},
+ _ => (),
+ }
+}
+ "#,
+ r#"
+fn f() {
+ match 6 {
+ 1 => {
+ $0f()
+ },
+ _ => (),
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn indents_block_with_statement() {
+ do_check(
+ r#"
+fn f() {$0a = b}
+ "#,
+ r#"
+fn f() {
+ $0a = b
+}
+ "#,
+ );
+ do_check(
+ r#"
+fn f() {$0fn f() {}}
+ "#,
+ r#"
+fn f() {
+ $0fn f() {}
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn indents_nested_blocks() {
+ do_check(
+ r#"
+fn f() {$0{}}
+ "#,
+ r#"
+fn f() {
+ $0{}
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn does_not_indent_empty_block() {
+ do_check_noop(
+ r#"
+fn f() {$0}
+ "#,
+ );
+ do_check_noop(
+ r#"
+fn f() {{$0}}
+ "#,
+ );
+ }
+
+ #[test]
+ fn does_not_indent_block_with_too_much_content() {
+ do_check_noop(
+ r#"
+fn f() {$0 a = b; ()}
+ "#,
+ );
+ do_check_noop(
+ r#"
+fn f() {$0 a = b; a = b; }
+ "#,
+ );
+ }
+
+ #[test]
+ fn does_not_indent_multiline_block() {
+ do_check_noop(
+ r#"
+fn f() {$0
+}
+ "#,
+ );
+ do_check_noop(
+ r#"
+fn f() {$0
+
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn indents_use_tree_list() {
+ do_check(
+ r#"
+use crate::{$0};
+ "#,
+ r#"
+use crate::{
+ $0
+};
+ "#,
+ );
+ do_check(
+ r#"
+use crate::{$0Object, path::to::OtherThing};
+ "#,
+ r#"
+use crate::{
+ $0Object, path::to::OtherThing
+};
+ "#,
+ );
+ do_check(
+ r#"
+use {crate::{$0Object, path::to::OtherThing}};
+ "#,
+ r#"
+use {crate::{
+ $0Object, path::to::OtherThing
+}};
+ "#,
+ );
+ do_check(
+ r#"
+use {
+ crate::{$0Object, path::to::OtherThing}
+};
+ "#,
+ r#"
+use {
+ crate::{
+ $0Object, path::to::OtherThing
+ }
+};
+ "#,
+ );
+ }
+
+ #[test]
+ fn does_not_indent_use_tree_list_when_not_at_curly_brace() {
+ do_check_noop(
+ r#"
+use path::{Thing$0};
+ "#,
+ );
+ }
+
+ #[test]
+ fn does_not_indent_use_tree_list_without_curly_braces() {
+ do_check_noop(
+ r#"
+use path::Thing$0;
+ "#,
+ );
+ do_check_noop(
+ r#"
+use path::$0Thing;
+ "#,
+ );
+ do_check_noop(
+ r#"
+use path::Thing$0};
+ "#,
+ );
+ do_check_noop(
+ r#"
+use path::{$0Thing;
+ "#,
+ );
+ }
+
+ #[test]
+ fn does_not_indent_multiline_use_tree_list() {
+ do_check_noop(
+ r#"
+use path::{$0
+ Thing
+};
+ "#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs b/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs
new file mode 100644
index 000000000..51291a645
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs
@@ -0,0 +1,93 @@
+use std::sync::Arc;
+
+use dot::{Id, LabelText};
+use ide_db::{
+ base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceDatabaseExt},
+ FxHashSet, RootDatabase,
+};
+
+// Feature: View Crate Graph
+//
+// Renders the currently loaded crate graph as an SVG graphic. Requires the `dot` tool, which
+// is part of graphviz, to be installed.
+//
+// Only workspace crates are included, no crates.io dependencies or sysroot crates.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: View Crate Graph**
+// |===
+pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result<String, String> {
+ let crate_graph = db.crate_graph();
+ let crates_to_render = crate_graph
+ .iter()
+ .filter(|krate| {
+ if full {
+ true
+ } else {
+ // Only render workspace crates
+ let root_id = db.file_source_root(crate_graph[*krate].root_file_id);
+ !db.source_root(root_id).is_library
+ }
+ })
+ .collect();
+ let graph = DotCrateGraph { graph: crate_graph, crates_to_render };
+
+ let mut dot = Vec::new();
+ dot::render(&graph, &mut dot).unwrap();
+ Ok(String::from_utf8(dot).unwrap())
+}
+
+struct DotCrateGraph {
+ graph: Arc<CrateGraph>,
+ crates_to_render: FxHashSet<CrateId>,
+}
+
+type Edge<'a> = (CrateId, &'a Dependency);
+
+impl<'a> dot::GraphWalk<'a, CrateId, Edge<'a>> for DotCrateGraph {
+ fn nodes(&'a self) -> dot::Nodes<'a, CrateId> {
+ self.crates_to_render.iter().copied().collect()
+ }
+
+ fn edges(&'a self) -> dot::Edges<'a, Edge<'a>> {
+ self.crates_to_render
+ .iter()
+ .flat_map(|krate| {
+ self.graph[*krate]
+ .dependencies
+ .iter()
+ .filter(|dep| self.crates_to_render.contains(&dep.crate_id))
+ .map(move |dep| (*krate, dep))
+ })
+ .collect()
+ }
+
+ fn source(&'a self, edge: &Edge<'a>) -> CrateId {
+ edge.0
+ }
+
+ fn target(&'a self, edge: &Edge<'a>) -> CrateId {
+ edge.1.crate_id
+ }
+}
+
+impl<'a> dot::Labeller<'a, CrateId, Edge<'a>> for DotCrateGraph {
+ fn graph_id(&'a self) -> Id<'a> {
+ Id::new("rust_analyzer_crate_graph").unwrap()
+ }
+
+ fn node_id(&'a self, n: &CrateId) -> Id<'a> {
+ Id::new(format!("_{}", n.0)).unwrap()
+ }
+
+ fn node_shape(&'a self, _node: &CrateId) -> Option<LabelText<'a>> {
+ Some(LabelText::LabelStr("box".into()))
+ }
+
+ fn node_label(&'a self, n: &CrateId) -> LabelText<'a> {
+ let name = self.graph[*n].display_name.as_ref().map_or("(unnamed crate)", |name| &*name);
+ LabelText::LabelStr(name.into())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_hir.rs b/src/tools/rust-analyzer/crates/ide/src/view_hir.rs
new file mode 100644
index 000000000..7312afe53
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/view_hir.rs
@@ -0,0 +1,26 @@
+use hir::{Function, Semantics};
+use ide_db::base_db::FilePosition;
+use ide_db::RootDatabase;
+use syntax::{algo::find_node_at_offset, ast, AstNode};
+
+// Feature: View Hir
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: View Hir**
+// |===
+// image::https://user-images.githubusercontent.com/48062697/113065588-068bdb80-91b1-11eb-9a78-0b4ef1e972fb.gif[]
+pub(crate) fn view_hir(db: &RootDatabase, position: FilePosition) -> String {
+ body_hir(db, position).unwrap_or_else(|| "Not inside a function body".to_string())
+}
+
+fn body_hir(db: &RootDatabase, position: FilePosition) -> Option<String> {
+ let sema = Semantics::new(db);
+ let source_file = sema.parse(position.file_id);
+
+ let function = find_node_at_offset::<ast::Fn>(source_file.syntax(), position.offset)?;
+
+ let function: Function = sema.to_def(&function)?;
+ Some(function.debug_hir(db))
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs
new file mode 100644
index 000000000..3dc03085d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs
@@ -0,0 +1,16 @@
+use hir::db::DefDatabase;
+use ide_db::base_db::FileId;
+use ide_db::RootDatabase;
+
+// Feature: Debug ItemTree
+//
+// Displays the ItemTree of the currently open file, for debugging.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **Rust Analyzer: Debug ItemTree**
+// |===
+pub(crate) fn view_item_tree(db: &RootDatabase, file_id: FileId) -> String {
+ db.file_item_tree(file_id.into()).pretty_print()
+}
diff --git a/src/tools/rust-analyzer/crates/limit/Cargo.toml b/src/tools/rust-analyzer/crates/limit/Cargo.toml
new file mode 100644
index 000000000..893db436d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/limit/Cargo.toml
@@ -0,0 +1,11 @@
+[package]
+name = "limit"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[features]
+tracking = []
+default = ["tracking"]
diff --git a/src/tools/rust-analyzer/crates/limit/src/lib.rs b/src/tools/rust-analyzer/crates/limit/src/lib.rs
new file mode 100644
index 000000000..d6a706a7c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/limit/src/lib.rs
@@ -0,0 +1,69 @@
+//! limit defines a struct to enforce limits.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+#[cfg(feature = "tracking")]
+use std::sync::atomic::AtomicUsize;
+
+/// Represents a struct used to enforce a numerical limit.
+pub struct Limit {
+ upper_bound: usize,
+ #[cfg(feature = "tracking")]
+ max: AtomicUsize,
+}
+
+impl Limit {
+ /// Creates a new limit.
+ #[inline]
+ pub const fn new(upper_bound: usize) -> Self {
+ Self {
+ upper_bound,
+ #[cfg(feature = "tracking")]
+ max: AtomicUsize::new(0),
+ }
+ }
+
+ /// Creates a new limit.
+ #[inline]
+ #[cfg(feature = "tracking")]
+ pub const fn new_tracking(upper_bound: usize) -> Self {
+ Self {
+ upper_bound,
+ #[cfg(feature = "tracking")]
+ max: AtomicUsize::new(1),
+ }
+ }
+
+ /// Gets the underlying numeric limit.
+ #[inline]
+ pub const fn inner(&self) -> usize {
+ self.upper_bound
+ }
+
+ /// Checks whether the given value is below the limit.
+ /// Returns `Ok` when `other` is below `self`, and `Err` otherwise.
+ #[inline]
+ pub fn check(&self, other: usize) -> Result<(), ()> {
+ if other > self.upper_bound {
+ Err(())
+ } else {
+ #[cfg(feature = "tracking")]
+ loop {
+ use std::sync::atomic::Ordering;
+ let old_max = self.max.load(Ordering::Relaxed);
+ if other <= old_max || old_max == 0 {
+ break;
+ }
+ if self
+ .max
+ .compare_exchange_weak(old_max, other, Ordering::Relaxed, Ordering::Relaxed)
+ .is_ok()
+ {
+ eprintln!("new max: {}", other);
+ }
+ }
+
+ Ok(())
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/Cargo.toml b/src/tools/rust-analyzer/crates/mbe/Cargo.toml
new file mode 100644
index 000000000..5ff3448a1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "mbe"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+rustc-hash = "1.1.0"
+smallvec = "1.9.0"
+tracing = "0.1.35"
+
+syntax = { path = "../syntax", version = "0.0.0" }
+parser = { path = "../parser", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
diff --git a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
new file mode 100644
index 000000000..ac691578d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
@@ -0,0 +1,222 @@
+//! This module add real world mbe example for benchmark tests
+
+use rustc_hash::FxHashMap;
+use syntax::{
+ ast::{self, HasName},
+ AstNode, SmolStr,
+};
+use test_utils::{bench, bench_fixture, skip_slow_tests};
+
+use crate::{
+ parser::{Op, RepeatKind, Separator},
+ syntax_node_to_token_tree, DeclarativeMacro,
+};
+
+#[test]
+fn benchmark_parse_macro_rules() {
+ if skip_slow_tests() {
+ return;
+ }
+ let rules = macro_rules_fixtures_tt();
+ let hash: usize = {
+ let _pt = bench("mbe parse macro rules");
+ rules.values().map(|it| DeclarativeMacro::parse_macro_rules(it).unwrap().rules.len()).sum()
+ };
+ assert_eq!(hash, 1144);
+}
+
+#[test]
+fn benchmark_expand_macro_rules() {
+ if skip_slow_tests() {
+ return;
+ }
+ let rules = macro_rules_fixtures();
+ let invocations = invocation_fixtures(&rules);
+
+ let hash: usize = {
+ let _pt = bench("mbe expand macro rules");
+ invocations
+ .into_iter()
+ .map(|(id, tt)| {
+ let res = rules[&id].expand(&tt);
+ assert!(res.err.is_none());
+ res.value.token_trees.len()
+ })
+ .sum()
+ };
+ assert_eq!(hash, 69413);
+}
+
+fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
+ macro_rules_fixtures_tt()
+ .into_iter()
+ .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt).unwrap()))
+ .collect()
+}
+
+fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> {
+ let fixture = bench_fixture::numerous_macro_rules();
+ let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
+
+ source_file
+ .syntax()
+ .descendants()
+ .filter_map(ast::MacroRules::cast)
+ .map(|rule| {
+ let id = rule.name().unwrap().to_string();
+ let (def_tt, _) = syntax_node_to_token_tree(rule.token_tree().unwrap().syntax());
+ (id, def_tt)
+ })
+ .collect()
+}
+
+/// Generate random invocation fixtures from rules
+fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(String, tt::Subtree)> {
+ let mut seed = 123456789;
+ let mut res = Vec::new();
+
+ for (name, it) in rules {
+ for rule in &it.rules {
+ // Generate twice
+ for _ in 0..2 {
+ // The input are generated by filling the `Op` randomly.
+ // However, there are some cases generated are ambiguous for expanding, for example:
+ // ```rust
+ // macro_rules! m {
+ // ($($t:ident),* as $ty:ident) => {}
+ // }
+ // m!(as u32); // error: local ambiguity: multiple parsing options: built-in NTs ident ('t') or 1 other option.
+ // ```
+ //
+ // So we just skip any error cases and try again
+ let mut try_cnt = 0;
+ loop {
+ let mut subtree = tt::Subtree::default();
+ for op in rule.lhs.iter() {
+ collect_from_op(op, &mut subtree, &mut seed);
+ }
+ if it.expand(&subtree).err.is_none() {
+ res.push((name.clone(), subtree));
+ break;
+ }
+ try_cnt += 1;
+ if try_cnt > 100 {
+ panic!("invocaton fixture {} cannot be generated.\n", name);
+ }
+ }
+ }
+ }
+ }
+ return res;
+
+ fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) {
+ return match op {
+ Op::Var { kind, .. } => match kind.as_ref().map(|it| it.as_str()) {
+ Some("ident") => parent.token_trees.push(make_ident("foo")),
+ Some("ty") => parent.token_trees.push(make_ident("Foo")),
+ Some("tt") => parent.token_trees.push(make_ident("foo")),
+ Some("vis") => parent.token_trees.push(make_ident("pub")),
+ Some("pat") => parent.token_trees.push(make_ident("foo")),
+ Some("path") => parent.token_trees.push(make_ident("foo")),
+ Some("literal") => parent.token_trees.push(make_literal("1")),
+ Some("expr") => parent.token_trees.push(make_ident("foo")),
+ Some("lifetime") => {
+ parent.token_trees.push(make_punct('\''));
+ parent.token_trees.push(make_ident("a"));
+ }
+ Some("block") => {
+ parent.token_trees.push(make_subtree(tt::DelimiterKind::Brace, None))
+ }
+ Some("item") => {
+ parent.token_trees.push(make_ident("fn"));
+ parent.token_trees.push(make_ident("foo"));
+ parent.token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None));
+ parent.token_trees.push(make_subtree(tt::DelimiterKind::Brace, None));
+ }
+ Some("meta") => {
+ parent.token_trees.push(make_ident("foo"));
+ parent.token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None));
+ }
+
+ None => (),
+ Some(kind) => panic!("Unhandled kind {}", kind),
+ },
+ Op::Leaf(leaf) => parent.token_trees.push(leaf.clone().into()),
+ Op::Repeat { tokens, kind, separator } => {
+ let max = 10;
+ let cnt = match kind {
+ RepeatKind::ZeroOrMore => rand(seed) % max,
+ RepeatKind::OneOrMore => 1 + rand(seed) % max,
+ RepeatKind::ZeroOrOne => rand(seed) % 2,
+ };
+ for i in 0..cnt {
+ for it in tokens.iter() {
+ collect_from_op(it, parent, seed);
+ }
+ if i + 1 != cnt {
+ if let Some(sep) = separator {
+ match sep {
+ Separator::Literal(it) => {
+ parent.token_trees.push(tt::Leaf::Literal(it.clone()).into())
+ }
+ Separator::Ident(it) => {
+ parent.token_trees.push(tt::Leaf::Ident(it.clone()).into())
+ }
+ Separator::Puncts(puncts) => {
+ for it in puncts {
+ parent.token_trees.push(tt::Leaf::Punct(*it).into())
+ }
+ }
+ };
+ }
+ }
+ }
+ }
+ Op::Subtree { tokens, delimiter } => {
+ let mut subtree = tt::Subtree { delimiter: *delimiter, token_trees: Vec::new() };
+ tokens.iter().for_each(|it| {
+ collect_from_op(it, &mut subtree, seed);
+ });
+ parent.token_trees.push(subtree.into());
+ }
+ Op::Ignore { .. } | Op::Index { .. } => {}
+ };
+
+ // Simple linear congruential generator for determistic result
+ fn rand(seed: &mut usize) -> usize {
+ let a = 1664525;
+ let c = 1013904223;
+ *seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
+ *seed
+ }
+ fn make_ident(ident: &str) -> tt::TokenTree {
+ tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), text: SmolStr::new(ident) })
+ .into()
+ }
+ fn make_punct(char: char) -> tt::TokenTree {
+ tt::Leaf::Punct(tt::Punct {
+ id: tt::TokenId::unspecified(),
+ char,
+ spacing: tt::Spacing::Alone,
+ })
+ .into()
+ }
+ fn make_literal(lit: &str) -> tt::TokenTree {
+ tt::Leaf::Literal(tt::Literal {
+ id: tt::TokenId::unspecified(),
+ text: SmolStr::new(lit),
+ })
+ .into()
+ }
+ fn make_subtree(
+ kind: tt::DelimiterKind,
+ token_trees: Option<Vec<tt::TokenTree>>,
+ ) -> tt::TokenTree {
+ tt::Subtree {
+ delimiter: Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind }),
+ token_trees: token_trees.unwrap_or_default(),
+ }
+ .into()
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander.rs b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
new file mode 100644
index 000000000..1e1bfa550
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
@@ -0,0 +1,121 @@
+//! This module takes a (parsed) definition of `macro_rules` invocation, a
+//! `tt::TokenTree` representing an argument of macro invocation, and produces a
+//! `tt::TokenTree` for the result of the expansion.
+
+mod matcher;
+mod transcriber;
+
+use rustc_hash::FxHashMap;
+use syntax::SmolStr;
+
+use crate::{ExpandError, ExpandResult};
+
+pub(crate) fn expand_rules(
+ rules: &[crate::Rule],
+ input: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let mut match_: Option<(matcher::Match, &crate::Rule)> = None;
+ for rule in rules {
+ let new_match = matcher::match_(&rule.lhs, input);
+
+ if new_match.err.is_none() {
+ // If we find a rule that applies without errors, we're done.
+ // Unconditionally returning the transcription here makes the
+ // `test_repeat_bad_var` test fail.
+ let ExpandResult { value, err: transcribe_err } =
+ transcriber::transcribe(&rule.rhs, &new_match.bindings);
+ if transcribe_err.is_none() {
+ return ExpandResult::ok(value);
+ }
+ }
+ // Use the rule if we matched more tokens, or bound variables count
+ if let Some((prev_match, _)) = &match_ {
+ if (new_match.unmatched_tts, -(new_match.bound_count as i32))
+ < (prev_match.unmatched_tts, -(prev_match.bound_count as i32))
+ {
+ match_ = Some((new_match, rule));
+ }
+ } else {
+ match_ = Some((new_match, rule));
+ }
+ }
+ if let Some((match_, rule)) = match_ {
+ // if we got here, there was no match without errors
+ let ExpandResult { value, err: transcribe_err } =
+ transcriber::transcribe(&rule.rhs, &match_.bindings);
+ ExpandResult { value, err: match_.err.or(transcribe_err) }
+ } else {
+ ExpandResult::only_err(ExpandError::NoMatchingRule)
+ }
+}
+
+/// The actual algorithm for expansion is not too hard, but is pretty tricky.
+/// `Bindings` structure is the key to understanding what we are doing here.
+///
+/// On the high level, it stores mapping from meta variables to the bits of
+/// syntax it should be substituted with. For example, if `$e:expr` is matched
+/// with `1 + 1` by macro_rules, the `Binding` will store `$e -> 1 + 1`.
+///
+/// The tricky bit is dealing with repetitions (`$()*`). Consider this example:
+///
+/// ```not_rust
+/// macro_rules! foo {
+/// ($($ i:ident $($ e:expr),*);*) => {
+/// $(fn $ i() { $($ e);*; })*
+/// }
+/// }
+/// foo! { foo 1,2,3; bar 4,5,6 }
+/// ```
+///
+/// Here, the `$i` meta variable is matched first with `foo` and then with
+/// `bar`, and `$e` is matched in turn with `1`, `2`, `3`, `4`, `5`, `6`.
+///
+/// To represent such "multi-mappings", we use a recursive structures: we map
+/// variables not to values, but to *lists* of values or other lists (that is,
+/// to the trees).
+///
+/// For the above example, the bindings would store
+///
+/// ```not_rust
+/// i -> [foo, bar]
+/// e -> [[1, 2, 3], [4, 5, 6]]
+/// ```
+///
+/// We construct `Bindings` in the `match_lhs`. The interesting case is
+/// `TokenTree::Repeat`, where we use `push_nested` to create the desired
+/// nesting structure.
+///
+/// The other side of the puzzle is `expand_subtree`, where we use the bindings
+/// to substitute meta variables in the output template. When expanding, we
+/// maintain a `nesting` stack of indices which tells us which occurrence from
+/// the `Bindings` we should take. We push to the stack when we enter a
+/// repetition.
+///
+/// In other words, `Bindings` is a *multi* mapping from `SmolStr` to
+/// `tt::TokenTree`, where the index to select a particular `TokenTree` among
+/// many is not a plain `usize`, but a `&[usize]`.
+#[derive(Debug, Default, Clone, PartialEq, Eq)]
+struct Bindings {
+ inner: FxHashMap<SmolStr, Binding>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+enum Binding {
+ Fragment(Fragment),
+ Nested(Vec<Binding>),
+ Empty,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+enum Fragment {
+ /// token fragments are just copy-pasted into the output
+ Tokens(tt::TokenTree),
+ /// Expr ast fragments are surrounded with `()` on insertion to preserve
+ /// precedence. Note that this impl is different from the one currently in
+ /// `rustc` -- `rustc` doesn't translate fragments into token trees at all.
+ ///
+ /// At one point in time, we tried to to use "fake" delimiters here a-la
+ /// proc-macro delimiter=none. As we later discovered, "none" delimiters are
+ /// tricky to handle in the parser, and rustc doesn't handle those either.
+ Expr(tt::TokenTree),
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
new file mode 100644
index 000000000..5020e9aba
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
@@ -0,0 +1,914 @@
+//! An NFA-based parser, which is porting from rustc mbe parsing code
+//!
+//! See <https://github.com/rust-lang/rust/blob/70b18bc2cbac4712020019f5bf57c00905373205/compiler/rustc_expand/src/mbe/macro_parser.rs>
+//! Here is a quick intro to how the parser works, copied from rustc:
+//!
+//! A 'position' is a dot in the middle of a matcher, usually represented as a
+//! dot. For example `· a $( a )* a b` is a position, as is `a $( · a )* a b`.
+//!
+//! The parser walks through the input a character at a time, maintaining a list
+//! of threads consistent with the current position in the input string: `cur_items`.
+//!
+//! As it processes them, it fills up `eof_items` with threads that would be valid if
+//! the macro invocation is now over, `bb_items` with threads that are waiting on
+//! a Rust non-terminal like `$e:expr`, and `next_items` with threads that are waiting
+//! on a particular token. Most of the logic concerns moving the · through the
+//! repetitions indicated by Kleene stars. The rules for moving the · without
+//! consuming any input are called epsilon transitions. It only advances or calls
+//! out to the real Rust parser when no `cur_items` threads remain.
+//!
+//! Example:
+//!
+//! ```text, ignore
+//! Start parsing a a a a b against [· a $( a )* a b].
+//!
+//! Remaining input: a a a a b
+//! next: [· a $( a )* a b]
+//!
+//! - - - Advance over an a. - - -
+//!
+//! Remaining input: a a a b
+//! cur: [a · $( a )* a b]
+//! Descend/Skip (first item).
+//! next: [a $( · a )* a b] [a $( a )* · a b].
+//!
+//! - - - Advance over an a. - - -
+//!
+//! Remaining input: a a b
+//! cur: [a $( a · )* a b] [a $( a )* a · b]
+//! Follow epsilon transition: Finish/Repeat (first item)
+//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
+//!
+//! - - - Advance over an a. - - - (this looks exactly like the last step)
+//!
+//! Remaining input: a b
+//! cur: [a $( a · )* a b] [a $( a )* a · b]
+//! Follow epsilon transition: Finish/Repeat (first item)
+//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
+//!
+//! - - - Advance over an a. - - - (this looks exactly like the last step)
+//!
+//! Remaining input: b
+//! cur: [a $( a · )* a b] [a $( a )* a · b]
+//! Follow epsilon transition: Finish/Repeat (first item)
+//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
+//!
+//! - - - Advance over a b. - - -
+//!
+//! Remaining input: ''
+//! eof: [a $( a )* a b ·]
+//! ```
+
+use std::rc::Rc;
+
+use smallvec::{smallvec, SmallVec};
+use syntax::SmolStr;
+
+use crate::{
+ expander::{Binding, Bindings, ExpandResult, Fragment},
+ parser::{Op, RepeatKind, Separator},
+ tt_iter::TtIter,
+ ExpandError, MetaTemplate,
+};
+
+impl Bindings {
+ fn push_optional(&mut self, name: &SmolStr) {
+ // FIXME: Do we have a better way to represent an empty token ?
+ // Insert an empty subtree for empty token
+ let tt = tt::Subtree::default().into();
+ self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt)));
+ }
+
+ fn push_empty(&mut self, name: &SmolStr) {
+ self.inner.insert(name.clone(), Binding::Empty);
+ }
+
+ fn bindings(&self) -> impl Iterator<Item = &Binding> {
+ self.inner.values()
+ }
+}
+
+#[derive(Clone, Debug, Default, PartialEq, Eq)]
+pub(super) struct Match {
+ pub(super) bindings: Bindings,
+ /// We currently just keep the first error and count the rest to compare matches.
+ pub(super) err: Option<ExpandError>,
+ pub(super) err_count: usize,
+ /// How many top-level token trees were left to match.
+ pub(super) unmatched_tts: usize,
+ /// The number of bound variables
+ pub(super) bound_count: usize,
+}
+
+impl Match {
+ fn add_err(&mut self, err: ExpandError) {
+ let prev_err = self.err.take();
+ self.err = prev_err.or(Some(err));
+ self.err_count += 1;
+ }
+}
+
+/// Matching errors are added to the `Match`.
+pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree) -> Match {
+ let mut res = match_loop(pattern, input);
+ res.bound_count = count(res.bindings.bindings());
+ return res;
+
+ fn count<'a>(bindings: impl Iterator<Item = &'a Binding>) -> usize {
+ bindings
+ .map(|it| match it {
+ Binding::Fragment(_) => 1,
+ Binding::Empty => 1,
+ Binding::Nested(it) => count(it.iter()),
+ })
+ .sum()
+ }
+}
+
+#[derive(Debug, Clone)]
+enum BindingKind {
+ Empty(SmolStr),
+ Optional(SmolStr),
+ Fragment(SmolStr, Fragment),
+ Nested(usize, usize),
+}
+
+#[derive(Debug, Clone)]
+struct BindingsIdx(usize, usize);
+
+#[derive(Debug, Clone)]
+enum LinkNode<T> {
+ Node(T),
+ Parent { idx: usize, len: usize },
+}
+
+#[derive(Default)]
+struct BindingsBuilder {
+ nodes: Vec<Vec<LinkNode<Rc<BindingKind>>>>,
+ nested: Vec<Vec<LinkNode<usize>>>,
+}
+
+impl BindingsBuilder {
+ fn alloc(&mut self) -> BindingsIdx {
+ let idx = self.nodes.len();
+ self.nodes.push(Vec::new());
+ let nidx = self.nested.len();
+ self.nested.push(Vec::new());
+ BindingsIdx(idx, nidx)
+ }
+
+ fn copy(&mut self, bindings: &BindingsIdx) -> BindingsIdx {
+ let idx = copy_parent(bindings.0, &mut self.nodes);
+ let nidx = copy_parent(bindings.1, &mut self.nested);
+ return BindingsIdx(idx, nidx);
+
+ fn copy_parent<T>(idx: usize, target: &mut Vec<Vec<LinkNode<T>>>) -> usize
+ where
+ T: Clone,
+ {
+ let new_idx = target.len();
+ let len = target[idx].len();
+ if len < 4 {
+ target.push(target[idx].clone())
+ } else {
+ target.push(vec![LinkNode::Parent { idx, len }]);
+ }
+ new_idx
+ }
+ }
+
+ fn push_empty(&mut self, idx: &mut BindingsIdx, var: &SmolStr) {
+ self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Empty(var.clone()))));
+ }
+
+ fn push_optional(&mut self, idx: &mut BindingsIdx, var: &SmolStr) {
+ self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Optional(var.clone()))));
+ }
+
+ fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment) {
+ self.nodes[idx.0]
+ .push(LinkNode::Node(Rc::new(BindingKind::Fragment(var.clone(), fragment))));
+ }
+
+ fn push_nested(&mut self, parent: &mut BindingsIdx, child: &BindingsIdx) {
+ let BindingsIdx(idx, nidx) = self.copy(child);
+ self.nodes[parent.0].push(LinkNode::Node(Rc::new(BindingKind::Nested(idx, nidx))));
+ }
+
+ fn push_default(&mut self, idx: &mut BindingsIdx) {
+ self.nested[idx.1].push(LinkNode::Node(idx.0));
+ let new_idx = self.nodes.len();
+ self.nodes.push(Vec::new());
+ idx.0 = new_idx;
+ }
+
+ fn build(self, idx: &BindingsIdx) -> Bindings {
+ let mut bindings = Bindings::default();
+ self.build_inner(&mut bindings, &self.nodes[idx.0]);
+ bindings
+ }
+
+ fn build_inner(&self, bindings: &mut Bindings, link_nodes: &[LinkNode<Rc<BindingKind>>]) {
+ let mut nodes = Vec::new();
+ self.collect_nodes(link_nodes, &mut nodes);
+
+ for cmd in nodes {
+ match &**cmd {
+ BindingKind::Empty(name) => {
+ bindings.push_empty(name);
+ }
+ BindingKind::Optional(name) => {
+ bindings.push_optional(name);
+ }
+ BindingKind::Fragment(name, fragment) => {
+ bindings.inner.insert(name.clone(), Binding::Fragment(fragment.clone()));
+ }
+ BindingKind::Nested(idx, nested_idx) => {
+ let mut nested_nodes = Vec::new();
+ self.collect_nested(*idx, *nested_idx, &mut nested_nodes);
+
+ for (idx, iter) in nested_nodes.into_iter().enumerate() {
+ for (key, value) in &iter.inner {
+ let bindings = bindings
+ .inner
+ .entry(key.clone())
+ .or_insert_with(|| Binding::Nested(Vec::new()));
+
+ if let Binding::Nested(it) = bindings {
+ // insert empty nested bindings before this one
+ while it.len() < idx {
+ it.push(Binding::Nested(Vec::new()));
+ }
+ it.push(value.clone());
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ fn collect_nested_ref<'a>(
+ &'a self,
+ id: usize,
+ len: usize,
+ nested_refs: &mut Vec<&'a Vec<LinkNode<Rc<BindingKind>>>>,
+ ) {
+ self.nested[id].iter().take(len).for_each(|it| match it {
+ LinkNode::Node(id) => nested_refs.push(&self.nodes[*id]),
+ LinkNode::Parent { idx, len } => self.collect_nested_ref(*idx, *len, nested_refs),
+ });
+ }
+
+ fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec<Bindings>) {
+ let last = &self.nodes[idx];
+ let mut nested_refs = Vec::new();
+ self.nested[nested_idx].iter().for_each(|it| match *it {
+ LinkNode::Node(idx) => nested_refs.push(&self.nodes[idx]),
+ LinkNode::Parent { idx, len } => self.collect_nested_ref(idx, len, &mut nested_refs),
+ });
+ nested_refs.push(last);
+
+ nested_refs.into_iter().for_each(|iter| {
+ let mut child_bindings = Bindings::default();
+ self.build_inner(&mut child_bindings, iter);
+ nested.push(child_bindings)
+ })
+ }
+
+ fn collect_nodes_ref<'a>(
+ &'a self,
+ id: usize,
+ len: usize,
+ nodes: &mut Vec<&'a Rc<BindingKind>>,
+ ) {
+ self.nodes[id].iter().take(len).for_each(|it| match it {
+ LinkNode::Node(it) => nodes.push(it),
+ LinkNode::Parent { idx, len } => self.collect_nodes_ref(*idx, *len, nodes),
+ });
+ }
+
+ fn collect_nodes<'a>(
+ &'a self,
+ link_nodes: &'a [LinkNode<Rc<BindingKind>>],
+ nodes: &mut Vec<&'a Rc<BindingKind>>,
+ ) {
+ link_nodes.iter().for_each(|it| match it {
+ LinkNode::Node(it) => nodes.push(it),
+ LinkNode::Parent { idx, len } => self.collect_nodes_ref(*idx, *len, nodes),
+ });
+ }
+}
+
+#[derive(Debug, Clone)]
+struct MatchState<'t> {
+ /// The position of the "dot" in this matcher
+ dot: OpDelimitedIter<'t>,
+
+ /// Token subtree stack
+ /// When matching against matchers with nested delimited submatchers (e.g., `pat ( pat ( .. )
+ /// pat ) pat`), we need to keep track of the matchers we are descending into. This stack does
+ /// that where the bottom of the stack is the outermost matcher.
+ stack: SmallVec<[OpDelimitedIter<'t>; 4]>,
+
+ /// The "parent" matcher position if we are in a repetition. That is, the matcher position just
+ /// before we enter the repetition.
+ up: Option<Box<MatchState<'t>>>,
+
+ /// The separator if we are in a repetition.
+ sep: Option<Separator>,
+
+ /// The KleeneOp of this sequence if we are in a repetition.
+ sep_kind: Option<RepeatKind>,
+
+ /// Number of tokens of seperator parsed
+ sep_parsed: Option<usize>,
+
+ /// Matched meta variables bindings
+ bindings: BindingsIdx,
+
+ /// Cached result of meta variable parsing
+ meta_result: Option<(TtIter<'t>, ExpandResult<Option<Fragment>>)>,
+
+ /// Is error occuried in this state, will `poised` to "parent"
+ is_error: bool,
+}
+
+/// Process the matcher positions of `cur_items` until it is empty. In the process, this will
+/// produce more items in `next_items`, `eof_items`, and `bb_items`.
+///
+/// For more info about the how this happens, see the module-level doc comments and the inline
+/// comments of this function.
+///
+/// # Parameters
+///
+/// - `src`: the current token of the parser.
+/// - `stack`: the "parent" frames of the token tree
+/// - `res`: the match result to store errors
+/// - `cur_items`: the set of current items to be processed. This should be empty by the end of a
+/// successful execution of this function.
+/// - `next_items`: the set of newly generated items. These are used to replenish `cur_items` in
+/// the function `parse`.
+/// - `eof_items`: the set of items that would be valid if this was the EOF.
+/// - `bb_items`: the set of items that are waiting for the black-box parser.
+/// - `error_items`: the set of items in errors, used for error-resilient parsing
+fn match_loop_inner<'t>(
+ src: TtIter<'t>,
+ stack: &[TtIter<'t>],
+ res: &mut Match,
+ bindings_builder: &mut BindingsBuilder,
+ cur_items: &mut SmallVec<[MatchState<'t>; 1]>,
+ bb_items: &mut SmallVec<[MatchState<'t>; 1]>,
+ next_items: &mut Vec<MatchState<'t>>,
+ eof_items: &mut SmallVec<[MatchState<'t>; 1]>,
+ error_items: &mut SmallVec<[MatchState<'t>; 1]>,
+) {
+ macro_rules! try_push {
+ ($items: expr, $it:expr) => {
+ if $it.is_error {
+ error_items.push($it);
+ } else {
+ $items.push($it);
+ }
+ };
+ }
+
+ while let Some(mut item) = cur_items.pop() {
+ while item.dot.is_eof() {
+ match item.stack.pop() {
+ Some(frame) => {
+ item.dot = frame;
+ item.dot.next();
+ }
+ None => break,
+ }
+ }
+ let op = match item.dot.peek() {
+ None => {
+ // We are at or past the end of the matcher of `item`.
+ if item.up.is_some() {
+ if item.sep_parsed.is_none() {
+ // Get the `up` matcher
+ let mut new_pos = *item.up.clone().unwrap();
+ new_pos.bindings = bindings_builder.copy(&new_pos.bindings);
+ // Add matches from this repetition to the `matches` of `up`
+ bindings_builder.push_nested(&mut new_pos.bindings, &item.bindings);
+
+ // Move the "dot" past the repetition in `up`
+ new_pos.dot.next();
+ new_pos.is_error = new_pos.is_error || item.is_error;
+ cur_items.push(new_pos);
+ }
+
+ // Check if we need a separator.
+ // We check the separator one by one
+ let sep_idx = *item.sep_parsed.as_ref().unwrap_or(&0);
+ let sep_len = item.sep.as_ref().map_or(0, Separator::tt_count);
+ if item.sep.is_some() && sep_idx != sep_len {
+ let sep = item.sep.as_ref().unwrap();
+ if src.clone().expect_separator(sep, sep_idx) {
+ item.dot.next();
+ item.sep_parsed = Some(sep_idx + 1);
+ try_push!(next_items, item);
+ }
+ }
+ // We don't need a separator. Move the "dot" back to the beginning of the matcher
+ // and try to match again UNLESS we are only allowed to have _one_ repetition.
+ else if item.sep_kind != Some(RepeatKind::ZeroOrOne) {
+ item.dot = item.dot.reset();
+ item.sep_parsed = None;
+ bindings_builder.push_default(&mut item.bindings);
+ cur_items.push(item);
+ }
+ } else {
+ // If we are not in a repetition, then being at the end of a matcher means that we have
+ // reached the potential end of the input.
+ try_push!(eof_items, item);
+ }
+ continue;
+ }
+ Some(it) => it,
+ };
+
+ // We are in the middle of a matcher.
+ match op {
+ OpDelimited::Op(Op::Repeat { tokens, kind, separator }) => {
+ if matches!(kind, RepeatKind::ZeroOrMore | RepeatKind::ZeroOrOne) {
+ let mut new_item = item.clone();
+ new_item.bindings = bindings_builder.copy(&new_item.bindings);
+ new_item.dot.next();
+ collect_vars(
+ &mut |s| {
+ bindings_builder.push_empty(&mut new_item.bindings, &s);
+ },
+ tokens,
+ );
+ cur_items.push(new_item);
+ }
+ cur_items.push(MatchState {
+ dot: tokens.iter_delimited(None),
+ stack: Default::default(),
+ up: Some(Box::new(item)),
+ sep: separator.clone(),
+ sep_kind: Some(*kind),
+ sep_parsed: None,
+ bindings: bindings_builder.alloc(),
+ meta_result: None,
+ is_error: false,
+ })
+ }
+ OpDelimited::Op(Op::Subtree { tokens, delimiter }) => {
+ if let Ok(subtree) = src.clone().expect_subtree() {
+ if subtree.delimiter_kind() == delimiter.map(|it| it.kind) {
+ item.stack.push(item.dot);
+ item.dot = tokens.iter_delimited(delimiter.as_ref());
+ cur_items.push(item);
+ }
+ }
+ }
+ OpDelimited::Op(Op::Var { kind, name, .. }) => {
+ if let Some(kind) = kind {
+ let mut fork = src.clone();
+ let match_res = match_meta_var(kind.as_str(), &mut fork);
+ match match_res.err {
+ None => {
+ // Some meta variables are optional (e.g. vis)
+ if match_res.value.is_some() {
+ item.meta_result = Some((fork, match_res));
+ try_push!(bb_items, item);
+ } else {
+ bindings_builder.push_optional(&mut item.bindings, name);
+ item.dot.next();
+ cur_items.push(item);
+ }
+ }
+ Some(err) => {
+ res.add_err(err);
+ if let Some(fragment) = match_res.value {
+ bindings_builder.push_fragment(&mut item.bindings, name, fragment);
+ }
+ item.is_error = true;
+ error_items.push(item);
+ }
+ }
+ }
+ }
+ OpDelimited::Op(Op::Leaf(leaf)) => {
+ if let Err(err) = match_leaf(leaf, &mut src.clone()) {
+ res.add_err(err);
+ item.is_error = true;
+ } else {
+ item.dot.next();
+ }
+ try_push!(next_items, item);
+ }
+ OpDelimited::Op(Op::Ignore { .. } | Op::Index { .. }) => {}
+ OpDelimited::Open => {
+ if matches!(src.clone().next(), Some(tt::TokenTree::Subtree(..))) {
+ item.dot.next();
+ try_push!(next_items, item);
+ }
+ }
+ OpDelimited::Close => {
+ let is_delim_closed = src.peek_n(0).is_none() && !stack.is_empty();
+ if is_delim_closed {
+ item.dot.next();
+ try_push!(next_items, item);
+ }
+ }
+ }
+ }
+}
+
+fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match {
+ let mut src = TtIter::new(src);
+ let mut stack: SmallVec<[TtIter<'_>; 1]> = SmallVec::new();
+ let mut res = Match::default();
+ let mut error_recover_item = None;
+
+ let mut bindings_builder = BindingsBuilder::default();
+
+ let mut cur_items = smallvec![MatchState {
+ dot: pattern.iter_delimited(None),
+ stack: Default::default(),
+ up: None,
+ sep: None,
+ sep_kind: None,
+ sep_parsed: None,
+ bindings: bindings_builder.alloc(),
+ is_error: false,
+ meta_result: None,
+ }];
+
+ let mut next_items = vec![];
+
+ loop {
+ let mut bb_items = SmallVec::new();
+ let mut eof_items = SmallVec::new();
+ let mut error_items = SmallVec::new();
+
+ stdx::always!(next_items.is_empty());
+
+ match_loop_inner(
+ src.clone(),
+ &stack,
+ &mut res,
+ &mut bindings_builder,
+ &mut cur_items,
+ &mut bb_items,
+ &mut next_items,
+ &mut eof_items,
+ &mut error_items,
+ );
+ stdx::always!(cur_items.is_empty());
+
+ if !error_items.is_empty() {
+ error_recover_item = error_items.pop().map(|it| it.bindings);
+ } else if let [state, ..] = &*eof_items {
+ error_recover_item = Some(state.bindings.clone());
+ }
+
+ // We need to do some post processing after the `match_loop_inner`.
+ // If we reached the EOF, check that there is EXACTLY ONE possible matcher. Otherwise,
+ // either the parse is ambiguous (which should never happen) or there is a syntax error.
+ if src.peek_n(0).is_none() && stack.is_empty() {
+ if let [state] = &*eof_items {
+ // remove all errors, because it is the correct answer !
+ res = Match::default();
+ res.bindings = bindings_builder.build(&state.bindings);
+ } else {
+ // Error recovery
+ if let Some(item) = error_recover_item {
+ res.bindings = bindings_builder.build(&item);
+ }
+ res.add_err(ExpandError::UnexpectedToken);
+ }
+ return res;
+ }
+
+ // If there are no possible next positions AND we aren't waiting for the black-box parser,
+ // then there is a syntax error.
+ //
+ // Another possibility is that we need to call out to parse some rust nonterminal
+ // (black-box) parser. However, if there is not EXACTLY ONE of these, something is wrong.
+ let has_leftover_tokens = (bb_items.is_empty() && next_items.is_empty())
+ || !(bb_items.is_empty() || next_items.is_empty())
+ || bb_items.len() > 1;
+ if has_leftover_tokens {
+ res.unmatched_tts += src.len();
+ while let Some(it) = stack.pop() {
+ src = it;
+ res.unmatched_tts += src.len();
+ }
+ res.add_err(ExpandError::LeftoverTokens);
+
+ if let Some(error_reover_item) = error_recover_item {
+ res.bindings = bindings_builder.build(&error_reover_item);
+ }
+ return res;
+ }
+ // Dump all possible `next_items` into `cur_items` for the next iteration.
+ else if !next_items.is_empty() {
+ // Now process the next token
+ cur_items.extend(next_items.drain(..));
+
+ match src.next() {
+ Some(tt::TokenTree::Subtree(subtree)) => {
+ stack.push(src.clone());
+ src = TtIter::new(subtree);
+ }
+ None => {
+ if let Some(iter) = stack.pop() {
+ src = iter;
+ }
+ }
+ _ => (),
+ }
+ }
+ // Finally, we have the case where we need to call the black-box parser to get some
+ // nonterminal.
+ else {
+ stdx::always!(bb_items.len() == 1);
+ let mut item = bb_items.pop().unwrap();
+
+ if let Some(OpDelimited::Op(Op::Var { name, .. })) = item.dot.peek() {
+ let (iter, match_res) = item.meta_result.take().unwrap();
+ match match_res.value {
+ Some(fragment) => {
+ bindings_builder.push_fragment(&mut item.bindings, name, fragment);
+ }
+ None if match_res.err.is_none() => {
+ bindings_builder.push_optional(&mut item.bindings, name);
+ }
+ None => {}
+ }
+ if let Some(err) = match_res.err {
+ res.add_err(err);
+ }
+ src = iter.clone();
+ item.dot.next();
+ } else {
+ unreachable!()
+ }
+ cur_items.push(item);
+ }
+ stdx::always!(!cur_items.is_empty());
+ }
+}
+
+fn match_leaf(lhs: &tt::Leaf, src: &mut TtIter<'_>) -> Result<(), ExpandError> {
+ let rhs = src
+ .expect_leaf()
+ .map_err(|()| ExpandError::binding_error(format!("expected leaf: `{lhs}`")))?;
+ match (lhs, rhs) {
+ (
+ tt::Leaf::Punct(tt::Punct { char: lhs, .. }),
+ tt::Leaf::Punct(tt::Punct { char: rhs, .. }),
+ ) if lhs == rhs => Ok(()),
+ (
+ tt::Leaf::Ident(tt::Ident { text: lhs, .. }),
+ tt::Leaf::Ident(tt::Ident { text: rhs, .. }),
+ ) if lhs == rhs => Ok(()),
+ (
+ tt::Leaf::Literal(tt::Literal { text: lhs, .. }),
+ tt::Leaf::Literal(tt::Literal { text: rhs, .. }),
+ ) if lhs == rhs => Ok(()),
+ _ => Err(ExpandError::UnexpectedToken),
+ }
+}
+
+fn match_meta_var(kind: &str, input: &mut TtIter<'_>) -> ExpandResult<Option<Fragment>> {
+ let fragment = match kind {
+ "path" => parser::PrefixEntryPoint::Path,
+ "ty" => parser::PrefixEntryPoint::Ty,
+ // FIXME: These two should actually behave differently depending on the edition.
+ //
+ // https://doc.rust-lang.org/edition-guide/rust-2021/or-patterns-macro-rules.html
+ "pat" | "pat_param" => parser::PrefixEntryPoint::Pat,
+ "stmt" => parser::PrefixEntryPoint::Stmt,
+ "block" => parser::PrefixEntryPoint::Block,
+ "meta" => parser::PrefixEntryPoint::MetaItem,
+ "item" => parser::PrefixEntryPoint::Item,
+ "vis" => parser::PrefixEntryPoint::Vis,
+ "expr" => {
+ // `expr` should not match underscores.
+ // HACK: Macro expansion should not be done using "rollback and try another alternative".
+ // rustc [explicitly checks the next token][0].
+ // [0]: https://github.com/rust-lang/rust/blob/f0c4da499/compiler/rustc_expand/src/mbe/macro_parser.rs#L576
+ match input.peek_n(0) {
+ Some(tt::TokenTree::Leaf(tt::Leaf::Ident(it))) if it.text == "_" => {
+ return ExpandResult::only_err(ExpandError::NoMatchingRule)
+ }
+ _ => {}
+ };
+ return input
+ .expect_fragment(parser::PrefixEntryPoint::Expr)
+ .map(|tt| tt.map(Fragment::Expr));
+ }
+ _ => {
+ let tt_result = match kind {
+ "ident" => input
+ .expect_ident()
+ .map(|ident| tt::Leaf::from(ident.clone()).into())
+ .map_err(|()| ExpandError::binding_error("expected ident")),
+ "tt" => input
+ .expect_tt()
+ .map_err(|()| ExpandError::binding_error("expected token tree")),
+ "lifetime" => input
+ .expect_lifetime()
+ .map_err(|()| ExpandError::binding_error("expected lifetime")),
+ "literal" => {
+ let neg = input.eat_char('-');
+ input
+ .expect_literal()
+ .map(|literal| {
+ let lit = literal.clone();
+ match neg {
+ None => lit.into(),
+ Some(neg) => tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: None,
+ token_trees: vec![neg, lit.into()],
+ }),
+ }
+ })
+ .map_err(|()| ExpandError::binding_error("expected literal"))
+ }
+ _ => Err(ExpandError::UnexpectedToken),
+ };
+ return tt_result.map(|it| Some(Fragment::Tokens(it))).into();
+ }
+ };
+ input.expect_fragment(fragment).map(|it| it.map(Fragment::Tokens))
+}
+
+fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) {
+ for op in pattern.iter() {
+ match op {
+ Op::Var { name, .. } => collector_fun(name.clone()),
+ Op::Leaf(_) => (),
+ Op::Subtree { tokens, .. } => collect_vars(collector_fun, tokens),
+ Op::Repeat { tokens, .. } => collect_vars(collector_fun, tokens),
+ Op::Ignore { .. } | Op::Index { .. } => {}
+ }
+ }
+}
+impl MetaTemplate {
+ fn iter_delimited<'a>(&'a self, delimited: Option<&'a tt::Delimiter>) -> OpDelimitedIter<'a> {
+ OpDelimitedIter { inner: &self.0, idx: 0, delimited }
+ }
+}
+
+#[derive(Debug, Clone, Copy)]
+enum OpDelimited<'a> {
+ Op(&'a Op),
+ Open,
+ Close,
+}
+
+#[derive(Debug, Clone, Copy)]
+struct OpDelimitedIter<'a> {
+ inner: &'a [Op],
+ delimited: Option<&'a tt::Delimiter>,
+ idx: usize,
+}
+
+impl<'a> OpDelimitedIter<'a> {
+ fn is_eof(&self) -> bool {
+ let len = self.inner.len() + if self.delimited.is_some() { 2 } else { 0 };
+ self.idx >= len
+ }
+
+ fn peek(&self) -> Option<OpDelimited<'a>> {
+ match self.delimited {
+ None => self.inner.get(self.idx).map(OpDelimited::Op),
+ Some(_) => match self.idx {
+ 0 => Some(OpDelimited::Open),
+ i if i == self.inner.len() + 1 => Some(OpDelimited::Close),
+ i => self.inner.get(i - 1).map(OpDelimited::Op),
+ },
+ }
+ }
+
+ fn reset(&self) -> Self {
+ Self { inner: self.inner, idx: 0, delimited: self.delimited }
+ }
+}
+
+impl<'a> Iterator for OpDelimitedIter<'a> {
+ type Item = OpDelimited<'a>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let res = self.peek();
+ self.idx += 1;
+ res
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let len = self.inner.len() + if self.delimited.is_some() { 2 } else { 0 };
+ let remain = len.saturating_sub(self.idx);
+ (remain, Some(remain))
+ }
+}
+
+impl<'a> TtIter<'a> {
+ fn expect_separator(&mut self, separator: &Separator, idx: usize) -> bool {
+ let mut fork = self.clone();
+ let ok = match separator {
+ Separator::Ident(lhs) if idx == 0 => match fork.expect_ident_or_underscore() {
+ Ok(rhs) => rhs.text == lhs.text,
+ Err(_) => false,
+ },
+ Separator::Literal(lhs) if idx == 0 => match fork.expect_literal() {
+ Ok(rhs) => match rhs {
+ tt::Leaf::Literal(rhs) => rhs.text == lhs.text,
+ tt::Leaf::Ident(rhs) => rhs.text == lhs.text,
+ tt::Leaf::Punct(_) => false,
+ },
+ Err(_) => false,
+ },
+ Separator::Puncts(lhss) if idx < lhss.len() => match fork.expect_punct() {
+ Ok(rhs) => rhs.char == lhss[idx].char,
+ Err(_) => false,
+ },
+ _ => false,
+ };
+ if ok {
+ *self = fork;
+ }
+ ok
+ }
+
+ fn expect_tt(&mut self) -> Result<tt::TokenTree, ()> {
+ match self.peek_n(0) {
+ Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '\'' => {
+ return self.expect_lifetime();
+ }
+ _ => (),
+ }
+
+ let tt = self.next().ok_or(())?.clone();
+ let punct = match tt {
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => {
+ punct
+ }
+ _ => return Ok(tt),
+ };
+
+ let (second, third) = match (self.peek_n(0), self.peek_n(1)) {
+ (
+ Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))),
+ Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p3))),
+ ) if p2.spacing == tt::Spacing::Joint => (p2.char, Some(p3.char)),
+ (Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), _) => (p2.char, None),
+ _ => return Ok(tt),
+ };
+
+ match (punct.char, second, third) {
+ ('.', '.', Some('.' | '=')) | ('<', '<', Some('=')) | ('>', '>', Some('=')) => {
+ let tt2 = self.next().unwrap().clone();
+ let tt3 = self.next().unwrap().clone();
+ Ok(tt::Subtree { delimiter: None, token_trees: vec![tt, tt2, tt3] }.into())
+ }
+ ('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _)
+ | ('-' | '=' | '>', '>', _)
+ | (':', ':', _)
+ | ('.', '.', _)
+ | ('&', '&', _)
+ | ('<', '<', _)
+ | ('|', '|', _) => {
+ let tt2 = self.next().unwrap().clone();
+ Ok(tt::Subtree { delimiter: None, token_trees: vec![tt, tt2] }.into())
+ }
+ _ => Ok(tt),
+ }
+ }
+
+ fn expect_lifetime(&mut self) -> Result<tt::TokenTree, ()> {
+ let punct = self.expect_punct()?;
+ if punct.char != '\'' {
+ return Err(());
+ }
+ let ident = self.expect_ident_or_underscore()?;
+
+ Ok(tt::Subtree {
+ delimiter: None,
+ token_trees: vec![
+ tt::Leaf::Punct(*punct).into(),
+ tt::Leaf::Ident(ident.clone()).into(),
+ ],
+ }
+ .into())
+ }
+
+ fn eat_char(&mut self, c: char) -> Option<tt::TokenTree> {
+ let mut fork = self.clone();
+ match fork.expect_char(c) {
+ Ok(_) => {
+ let tt = self.next().cloned();
+ *self = fork;
+ tt
+ }
+ Err(_) => None,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
new file mode 100644
index 000000000..7bcc84740
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
@@ -0,0 +1,272 @@
+//! Transcriber takes a template, like `fn $ident() {}`, a set of bindings like
+//! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}`
+
+use syntax::SmolStr;
+use tt::{Delimiter, Subtree};
+
+use crate::{
+ expander::{Binding, Bindings, Fragment},
+ parser::{Op, RepeatKind, Separator},
+ ExpandError, ExpandResult, MetaTemplate,
+};
+
+impl Bindings {
+ fn contains(&self, name: &str) -> bool {
+ self.inner.contains_key(name)
+ }
+
+ fn get(&self, name: &str, nesting: &mut [NestingState]) -> Result<&Fragment, ExpandError> {
+ macro_rules! binding_err {
+ ($($arg:tt)*) => { ExpandError::binding_error(format!($($arg)*)) };
+ }
+
+ let mut b: &Binding =
+ self.inner.get(name).ok_or_else(|| binding_err!("could not find binding `{name}`"))?;
+ for nesting_state in nesting.iter_mut() {
+ nesting_state.hit = true;
+ b = match b {
+ Binding::Fragment(_) => break,
+ Binding::Nested(bs) => bs.get(nesting_state.idx).ok_or_else(|| {
+ nesting_state.at_end = true;
+ binding_err!("could not find nested binding `{name}`")
+ })?,
+ Binding::Empty => {
+ nesting_state.at_end = true;
+ return Err(binding_err!("could not find empty binding `{name}`"));
+ }
+ };
+ }
+ match b {
+ Binding::Fragment(it) => Ok(it),
+ Binding::Nested(_) => {
+ Err(binding_err!("expected simple binding, found nested binding `{name}`"))
+ }
+ Binding::Empty => {
+ Err(binding_err!("expected simple binding, found empty binding `{name}`"))
+ }
+ }
+ }
+}
+
+pub(super) fn transcribe(
+ template: &MetaTemplate,
+ bindings: &Bindings,
+) -> ExpandResult<tt::Subtree> {
+ let mut ctx = ExpandCtx { bindings, nesting: Vec::new() };
+ let mut arena: Vec<tt::TokenTree> = Vec::new();
+ expand_subtree(&mut ctx, template, None, &mut arena)
+}
+
+#[derive(Debug)]
+struct NestingState {
+ idx: usize,
+ /// `hit` is currently necessary to tell `expand_repeat` if it should stop
+ /// because there is no variable in use by the current repetition
+ hit: bool,
+ /// `at_end` is currently necessary to tell `expand_repeat` if it should stop
+ /// because there is no more value available for the current repetition
+ at_end: bool,
+}
+
+#[derive(Debug)]
+struct ExpandCtx<'a> {
+ bindings: &'a Bindings,
+ nesting: Vec<NestingState>,
+}
+
+fn expand_subtree(
+ ctx: &mut ExpandCtx<'_>,
+ template: &MetaTemplate,
+ delimiter: Option<Delimiter>,
+ arena: &mut Vec<tt::TokenTree>,
+) -> ExpandResult<tt::Subtree> {
+ // remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation
+ let start_elements = arena.len();
+ let mut err = None;
+ for op in template.iter() {
+ match op {
+ Op::Leaf(tt) => arena.push(tt.clone().into()),
+ Op::Subtree { tokens, delimiter } => {
+ let ExpandResult { value: tt, err: e } =
+ expand_subtree(ctx, tokens, *delimiter, arena);
+ err = err.or(e);
+ arena.push(tt.into());
+ }
+ Op::Var { name, id, .. } => {
+ let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id);
+ err = err.or(e);
+ push_fragment(arena, fragment);
+ }
+ Op::Repeat { tokens: subtree, kind, separator } => {
+ let ExpandResult { value: fragment, err: e } =
+ expand_repeat(ctx, subtree, *kind, separator, arena);
+ err = err.or(e);
+ push_fragment(arena, fragment)
+ }
+ Op::Ignore { name, id } => {
+ // Expand the variable, but ignore the result. This registers the repetition count.
+ expand_var(ctx, name, *id);
+ }
+ Op::Index { depth } => {
+ let index = ctx
+ .nesting
+ .get(ctx.nesting.len() - 1 - (*depth as usize))
+ .map_or(0, |nest| nest.idx);
+ arena.push(
+ tt::Leaf::Literal(tt::Literal {
+ text: index.to_string().into(),
+ id: tt::TokenId::unspecified(),
+ })
+ .into(),
+ );
+ }
+ }
+ }
+ // drain the elements added in this instance of expand_subtree
+ let tts = arena.drain(start_elements..).collect();
+ ExpandResult { value: tt::Subtree { delimiter, token_trees: tts }, err }
+}
+
+fn expand_var(ctx: &mut ExpandCtx<'_>, v: &SmolStr, id: tt::TokenId) -> ExpandResult<Fragment> {
+ // We already handle $crate case in mbe parser
+ debug_assert!(v != "crate");
+
+ if !ctx.bindings.contains(v) {
+ // Note that it is possible to have a `$var` inside a macro which is not bound.
+ // For example:
+ // ```
+ // macro_rules! foo {
+ // ($a:ident, $b:ident, $c:tt) => {
+ // macro_rules! bar {
+ // ($bi:ident) => {
+ // fn $bi() -> u8 {$c}
+ // }
+ // }
+ // }
+ // ```
+ // We just treat it a normal tokens
+ let tt = tt::Subtree {
+ delimiter: None,
+ token_trees: vec![
+ tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, id }).into(),
+ tt::Leaf::from(tt::Ident { text: v.clone(), id }).into(),
+ ],
+ }
+ .into();
+ ExpandResult::ok(Fragment::Tokens(tt))
+ } else {
+ ctx.bindings.get(v, &mut ctx.nesting).map_or_else(
+ |e| ExpandResult { value: Fragment::Tokens(tt::TokenTree::empty()), err: Some(e) },
+ |b| ExpandResult::ok(b.clone()),
+ )
+ }
+}
+
+fn expand_repeat(
+ ctx: &mut ExpandCtx<'_>,
+ template: &MetaTemplate,
+ kind: RepeatKind,
+ separator: &Option<Separator>,
+ arena: &mut Vec<tt::TokenTree>,
+) -> ExpandResult<Fragment> {
+ let mut buf: Vec<tt::TokenTree> = Vec::new();
+ ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false });
+ // Dirty hack to make macro-expansion terminate.
+ // This should be replaced by a proper macro-by-example implementation
+ let limit = 65536;
+ let mut has_seps = 0;
+ let mut counter = 0;
+
+ loop {
+ let ExpandResult { value: mut t, err: e } = expand_subtree(ctx, template, None, arena);
+ let nesting_state = ctx.nesting.last_mut().unwrap();
+ if nesting_state.at_end || !nesting_state.hit {
+ break;
+ }
+ nesting_state.idx += 1;
+ nesting_state.hit = false;
+
+ counter += 1;
+ if counter == limit {
+ tracing::warn!(
+ "expand_tt in repeat pattern exceed limit => {:#?}\n{:#?}",
+ template,
+ ctx
+ );
+ return ExpandResult {
+ value: Fragment::Tokens(Subtree::default().into()),
+ err: Some(ExpandError::LimitExceeded),
+ };
+ }
+
+ if e.is_some() {
+ continue;
+ }
+
+ t.delimiter = None;
+ push_subtree(&mut buf, t);
+
+ if let Some(sep) = separator {
+ has_seps = match sep {
+ Separator::Ident(ident) => {
+ buf.push(tt::Leaf::from(ident.clone()).into());
+ 1
+ }
+ Separator::Literal(lit) => {
+ buf.push(tt::Leaf::from(lit.clone()).into());
+ 1
+ }
+ Separator::Puncts(puncts) => {
+ for &punct in puncts {
+ buf.push(tt::Leaf::from(punct).into());
+ }
+ puncts.len()
+ }
+ };
+ }
+
+ if RepeatKind::ZeroOrOne == kind {
+ break;
+ }
+ }
+
+ ctx.nesting.pop().unwrap();
+ for _ in 0..has_seps {
+ buf.pop();
+ }
+
+ // Check if it is a single token subtree without any delimiter
+ // e.g {Delimiter:None> ['>'] /Delimiter:None>}
+ let tt = tt::Subtree { delimiter: None, token_trees: buf }.into();
+
+ if RepeatKind::OneOrMore == kind && counter == 0 {
+ return ExpandResult {
+ value: Fragment::Tokens(tt),
+ err: Some(ExpandError::UnexpectedToken),
+ };
+ }
+ ExpandResult::ok(Fragment::Tokens(tt))
+}
+
+fn push_fragment(buf: &mut Vec<tt::TokenTree>, fragment: Fragment) {
+ match fragment {
+ Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt),
+ Fragment::Expr(tt::TokenTree::Subtree(mut tt)) => {
+ if tt.delimiter.is_none() {
+ tt.delimiter = Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Parenthesis,
+ })
+ }
+ buf.push(tt.into())
+ }
+ Fragment::Tokens(tt) | Fragment::Expr(tt) => buf.push(tt),
+ }
+}
+
+fn push_subtree(buf: &mut Vec<tt::TokenTree>, tt: tt::Subtree) {
+ match tt.delimiter {
+ None => buf.extend(tt.token_trees),
+ Some(_) => buf.push(tt.into()),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/lib.rs b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
new file mode 100644
index 000000000..79da84f4a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
@@ -0,0 +1,352 @@
+//! `mbe` (short for Macro By Example) crate contains code for handling
+//! `macro_rules` macros. It uses `TokenTree` (from `tt` package) as the
+//! interface, although it contains some code to bridge `SyntaxNode`s and
+//! `TokenTree`s as well!
+//!
+//! The tes for this functionality live in another crate:
+//! `hir_def::macro_expansion_tests::mbe`.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod parser;
+mod expander;
+mod syntax_bridge;
+mod tt_iter;
+mod to_parser_input;
+
+#[cfg(test)]
+mod benchmark;
+mod token_map;
+
+use std::fmt;
+
+use crate::{
+ parser::{MetaTemplate, Op},
+ tt_iter::TtIter,
+};
+
+// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces
+pub use ::parser::TopEntryPoint;
+pub use tt::{Delimiter, DelimiterKind, Punct};
+
+pub use crate::{
+ syntax_bridge::{
+ parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
+ syntax_node_to_token_tree_with_modifications, token_tree_to_syntax_node, SyntheticToken,
+ SyntheticTokenId,
+ },
+ token_map::TokenMap,
+};
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum ParseError {
+ UnexpectedToken(Box<str>),
+ Expected(Box<str>),
+ InvalidRepeat,
+ RepetitionEmptyTokenTree,
+}
+
+impl ParseError {
+ fn expected(e: &str) -> ParseError {
+ ParseError::Expected(e.into())
+ }
+
+ fn unexpected(e: &str) -> ParseError {
+ ParseError::UnexpectedToken(e.into())
+ }
+}
+
+impl fmt::Display for ParseError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ParseError::UnexpectedToken(it) => f.write_str(it),
+ ParseError::Expected(it) => f.write_str(it),
+ ParseError::InvalidRepeat => f.write_str("invalid repeat"),
+ ParseError::RepetitionEmptyTokenTree => f.write_str("empty token tree in repetition"),
+ }
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum ExpandError {
+ BindingError(Box<Box<str>>),
+ LeftoverTokens,
+ ConversionError,
+ LimitExceeded,
+ NoMatchingRule,
+ UnexpectedToken,
+}
+
+impl ExpandError {
+ fn binding_error(e: impl Into<Box<str>>) -> ExpandError {
+ ExpandError::BindingError(Box::new(e.into()))
+ }
+}
+
+impl fmt::Display for ExpandError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ExpandError::NoMatchingRule => f.write_str("no rule matches input tokens"),
+ ExpandError::UnexpectedToken => f.write_str("unexpected token in input"),
+ ExpandError::BindingError(e) => f.write_str(e),
+ ExpandError::ConversionError => f.write_str("could not convert tokens"),
+ ExpandError::LimitExceeded => f.write_str("Expand exceed limit"),
+ ExpandError::LeftoverTokens => f.write_str("leftover tokens"),
+ }
+ }
+}
+
+/// This struct contains AST for a single `macro_rules` definition. What might
+/// be very confusing is that AST has almost exactly the same shape as
+/// `tt::TokenTree`, but there's a crucial difference: in macro rules, `$ident`
+/// and `$()*` have special meaning (see `Var` and `Repeat` data structures)
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct DeclarativeMacro {
+ rules: Vec<Rule>,
+ /// Highest id of the token we have in TokenMap
+ shift: Shift,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+struct Rule {
+ lhs: MetaTemplate,
+ rhs: MetaTemplate,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct Shift(u32);
+
+impl Shift {
+ pub fn new(tt: &tt::Subtree) -> Shift {
+ // Note that TokenId is started from zero,
+ // We have to add 1 to prevent duplication.
+ let value = max_id(tt).map_or(0, |it| it + 1);
+ return Shift(value);
+
+ // Find the max token id inside a subtree
+ fn max_id(subtree: &tt::Subtree) -> Option<u32> {
+ let filter = |tt: &_| match tt {
+ tt::TokenTree::Subtree(subtree) => {
+ let tree_id = max_id(subtree);
+ match subtree.delimiter {
+ Some(it) if it.id != tt::TokenId::unspecified() => {
+ Some(tree_id.map_or(it.id.0, |t| t.max(it.id.0)))
+ }
+ _ => tree_id,
+ }
+ }
+ tt::TokenTree::Leaf(leaf) => {
+ let &(tt::Leaf::Ident(tt::Ident { id, .. })
+ | tt::Leaf::Punct(tt::Punct { id, .. })
+ | tt::Leaf::Literal(tt::Literal { id, .. })) = leaf;
+
+ (id != tt::TokenId::unspecified()).then(|| id.0)
+ }
+ };
+ subtree.token_trees.iter().filter_map(filter).max()
+ }
+ }
+
+ /// Shift given TokenTree token id
+ pub fn shift_all(self, tt: &mut tt::Subtree) {
+ for t in &mut tt.token_trees {
+ match t {
+ tt::TokenTree::Leaf(
+ tt::Leaf::Ident(tt::Ident { id, .. })
+ | tt::Leaf::Punct(tt::Punct { id, .. })
+ | tt::Leaf::Literal(tt::Literal { id, .. }),
+ ) => *id = self.shift(*id),
+ tt::TokenTree::Subtree(tt) => {
+ if let Some(it) = tt.delimiter.as_mut() {
+ it.id = self.shift(it.id);
+ }
+ self.shift_all(tt)
+ }
+ }
+ }
+ }
+
+ pub fn shift(self, id: tt::TokenId) -> tt::TokenId {
+ if id == tt::TokenId::unspecified() {
+ id
+ } else {
+ tt::TokenId(id.0 + self.0)
+ }
+ }
+
+ pub fn unshift(self, id: tt::TokenId) -> Option<tt::TokenId> {
+ id.0.checked_sub(self.0).map(tt::TokenId)
+ }
+}
+
+#[derive(Debug, Eq, PartialEq)]
+pub enum Origin {
+ Def,
+ Call,
+}
+
+impl DeclarativeMacro {
+ /// The old, `macro_rules! m {}` flavor.
+ pub fn parse_macro_rules(tt: &tt::Subtree) -> Result<DeclarativeMacro, ParseError> {
+ // Note: this parsing can be implemented using mbe machinery itself, by
+ // matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing
+ // manually seems easier.
+ let mut src = TtIter::new(tt);
+ let mut rules = Vec::new();
+ while src.len() > 0 {
+ let rule = Rule::parse(&mut src, true)?;
+ rules.push(rule);
+ if let Err(()) = src.expect_char(';') {
+ if src.len() > 0 {
+ return Err(ParseError::expected("expected `;`"));
+ }
+ break;
+ }
+ }
+
+ for Rule { lhs, .. } in &rules {
+ validate(lhs)?;
+ }
+
+ Ok(DeclarativeMacro { rules, shift: Shift::new(tt) })
+ }
+
+ /// The new, unstable `macro m {}` flavor.
+ pub fn parse_macro2(tt: &tt::Subtree) -> Result<DeclarativeMacro, ParseError> {
+ let mut src = TtIter::new(tt);
+ let mut rules = Vec::new();
+
+ if Some(tt::DelimiterKind::Brace) == tt.delimiter_kind() {
+ cov_mark::hit!(parse_macro_def_rules);
+ while src.len() > 0 {
+ let rule = Rule::parse(&mut src, true)?;
+ rules.push(rule);
+ if let Err(()) = src.expect_any_char(&[';', ',']) {
+ if src.len() > 0 {
+ return Err(ParseError::expected("expected `;` or `,` to delimit rules"));
+ }
+ break;
+ }
+ }
+ } else {
+ cov_mark::hit!(parse_macro_def_simple);
+ let rule = Rule::parse(&mut src, false)?;
+ if src.len() != 0 {
+ return Err(ParseError::expected("remaining tokens in macro def"));
+ }
+ rules.push(rule);
+ }
+
+ for Rule { lhs, .. } in &rules {
+ validate(lhs)?;
+ }
+
+ Ok(DeclarativeMacro { rules, shift: Shift::new(tt) })
+ }
+
+ pub fn expand(&self, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
+ // apply shift
+ let mut tt = tt.clone();
+ self.shift.shift_all(&mut tt);
+ expander::expand_rules(&self.rules, &tt)
+ }
+
+ pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
+ self.shift.shift(id)
+ }
+
+ pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, Origin) {
+ match self.shift.unshift(id) {
+ Some(id) => (id, Origin::Call),
+ None => (id, Origin::Def),
+ }
+ }
+
+ pub fn shift(&self) -> Shift {
+ self.shift
+ }
+}
+
+impl Rule {
+ fn parse(src: &mut TtIter<'_>, expect_arrow: bool) -> Result<Self, ParseError> {
+ let lhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?;
+ if expect_arrow {
+ src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?;
+ src.expect_char('>').map_err(|()| ParseError::expected("expected `>`"))?;
+ }
+ let rhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?;
+
+ let lhs = MetaTemplate::parse_pattern(lhs)?;
+ let rhs = MetaTemplate::parse_template(rhs)?;
+
+ Ok(crate::Rule { lhs, rhs })
+ }
+}
+
+fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> {
+ for op in pattern.iter() {
+ match op {
+ Op::Subtree { tokens, .. } => validate(tokens)?,
+ Op::Repeat { tokens: subtree, separator, .. } => {
+ // Checks that no repetition which could match an empty token
+ // https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558
+ let lsh_is_empty_seq = separator.is_none() && subtree.iter().all(|child_op| {
+ match child_op {
+ // vis is optional
+ Op::Var { kind: Some(kind), .. } => kind == "vis",
+ Op::Repeat {
+ kind: parser::RepeatKind::ZeroOrMore | parser::RepeatKind::ZeroOrOne,
+ ..
+ } => true,
+ _ => false,
+ }
+ });
+ if lsh_is_empty_seq {
+ return Err(ParseError::RepetitionEmptyTokenTree);
+ }
+ validate(subtree)?
+ }
+ _ => (),
+ }
+ }
+ Ok(())
+}
+
+pub type ExpandResult<T> = ValueResult<T, ExpandError>;
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct ValueResult<T, E> {
+ pub value: T,
+ pub err: Option<E>,
+}
+
+impl<T, E> ValueResult<T, E> {
+ pub fn ok(value: T) -> Self {
+ Self { value, err: None }
+ }
+
+ pub fn only_err(err: E) -> Self
+ where
+ T: Default,
+ {
+ Self { value: Default::default(), err: Some(err) }
+ }
+
+ pub fn map<U>(self, f: impl FnOnce(T) -> U) -> ValueResult<U, E> {
+ ValueResult { value: f(self.value), err: self.err }
+ }
+
+ pub fn map_err<E2>(self, f: impl FnOnce(E) -> E2) -> ValueResult<T, E2> {
+ ValueResult { value: self.value, err: self.err.map(f) }
+ }
+
+ pub fn result(self) -> Result<T, E> {
+ self.err.map_or(Ok(self.value), Err)
+ }
+}
+
+impl<T: Default, E> From<Result<T, E>> for ValueResult<T, E> {
+ fn from(result: Result<T, E>) -> Self {
+ result.map_or_else(Self::only_err, Self::ok)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/parser.rs b/src/tools/rust-analyzer/crates/mbe/src/parser.rs
new file mode 100644
index 000000000..acb4be584
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/src/parser.rs
@@ -0,0 +1,261 @@
+//! Parser recognizes special macro syntax, `$var` and `$(repeat)*`, in token
+//! trees.
+
+use smallvec::SmallVec;
+use syntax::SmolStr;
+
+use crate::{tt_iter::TtIter, ParseError};
+
+/// Consider
+///
+/// ```
+/// macro_rules! an_macro {
+/// ($x:expr + $y:expr) => ($y * $x)
+/// }
+/// ```
+///
+/// Stuff to the left of `=>` is a [`MetaTemplate`] pattern (which is matched
+/// with input).
+///
+/// Stuff to the right is a [`MetaTemplate`] template which is used to produce
+/// output.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) struct MetaTemplate(pub(crate) Vec<Op>);
+
+impl MetaTemplate {
+ pub(crate) fn parse_pattern(pattern: &tt::Subtree) -> Result<MetaTemplate, ParseError> {
+ MetaTemplate::parse(pattern, Mode::Pattern)
+ }
+
+ pub(crate) fn parse_template(template: &tt::Subtree) -> Result<MetaTemplate, ParseError> {
+ MetaTemplate::parse(template, Mode::Template)
+ }
+
+ pub(crate) fn iter(&self) -> impl Iterator<Item = &Op> {
+ self.0.iter()
+ }
+
+ fn parse(tt: &tt::Subtree, mode: Mode) -> Result<MetaTemplate, ParseError> {
+ let mut src = TtIter::new(tt);
+
+ let mut res = Vec::new();
+ while let Some(first) = src.next() {
+ let op = next_op(first, &mut src, mode)?;
+ res.push(op);
+ }
+
+ Ok(MetaTemplate(res))
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) enum Op {
+ Var { name: SmolStr, kind: Option<SmolStr>, id: tt::TokenId },
+ Ignore { name: SmolStr, id: tt::TokenId },
+ Index { depth: u32 },
+ Repeat { tokens: MetaTemplate, kind: RepeatKind, separator: Option<Separator> },
+ Leaf(tt::Leaf),
+ Subtree { tokens: MetaTemplate, delimiter: Option<tt::Delimiter> },
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub(crate) enum RepeatKind {
+ ZeroOrMore,
+ OneOrMore,
+ ZeroOrOne,
+}
+
+#[derive(Clone, Debug, Eq)]
+pub(crate) enum Separator {
+ Literal(tt::Literal),
+ Ident(tt::Ident),
+ Puncts(SmallVec<[tt::Punct; 3]>),
+}
+
+// Note that when we compare a Separator, we just care about its textual value.
+impl PartialEq for Separator {
+ fn eq(&self, other: &Separator) -> bool {
+ use Separator::*;
+
+ match (self, other) {
+ (Ident(a), Ident(b)) => a.text == b.text,
+ (Literal(a), Literal(b)) => a.text == b.text,
+ (Puncts(a), Puncts(b)) if a.len() == b.len() => {
+ let a_iter = a.iter().map(|a| a.char);
+ let b_iter = b.iter().map(|b| b.char);
+ a_iter.eq(b_iter)
+ }
+ _ => false,
+ }
+ }
+}
+
+impl Separator {
+ pub(crate) fn tt_count(&self) -> usize {
+ match self {
+ Separator::Literal(_) => 1,
+ Separator::Ident(_) => 1,
+ Separator::Puncts(it) => it.len(),
+ }
+ }
+}
+
+#[derive(Clone, Copy)]
+enum Mode {
+ Pattern,
+ Template,
+}
+
+fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Result<Op, ParseError> {
+ let res = match first {
+ tt::TokenTree::Leaf(leaf @ tt::Leaf::Punct(tt::Punct { char: '$', .. })) => {
+ // Note that the '$' itself is a valid token inside macro_rules.
+ let second = match src.next() {
+ None => return Ok(Op::Leaf(leaf.clone())),
+ Some(it) => it,
+ };
+ match second {
+ tt::TokenTree::Subtree(subtree) => match subtree.delimiter_kind() {
+ Some(tt::DelimiterKind::Parenthesis) => {
+ let (separator, kind) = parse_repeat(src)?;
+ let tokens = MetaTemplate::parse(subtree, mode)?;
+ Op::Repeat { tokens, separator, kind }
+ }
+ Some(tt::DelimiterKind::Brace) => match mode {
+ Mode::Template => {
+ parse_metavar_expr(&mut TtIter::new(subtree)).map_err(|()| {
+ ParseError::unexpected("invalid metavariable expression")
+ })?
+ }
+ Mode::Pattern => {
+ return Err(ParseError::unexpected(
+ "`${}` metavariable expressions are not allowed in matchers",
+ ))
+ }
+ },
+ _ => {
+ return Err(ParseError::expected(
+ "expected `$()` repetition or `${}` expression",
+ ))
+ }
+ },
+ tt::TokenTree::Leaf(leaf) => match leaf {
+ tt::Leaf::Ident(ident) if ident.text == "crate" => {
+ // We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path.
+ Op::Leaf(tt::Leaf::from(tt::Ident { text: "$crate".into(), id: ident.id }))
+ }
+ tt::Leaf::Ident(ident) => {
+ let kind = eat_fragment_kind(src, mode)?;
+ let name = ident.text.clone();
+ let id = ident.id;
+ Op::Var { name, kind, id }
+ }
+ tt::Leaf::Literal(lit) if is_boolean_literal(lit) => {
+ let kind = eat_fragment_kind(src, mode)?;
+ let name = lit.text.clone();
+ let id = lit.id;
+ Op::Var { name, kind, id }
+ }
+ tt::Leaf::Punct(punct @ tt::Punct { char: '$', .. }) => match mode {
+ Mode::Pattern => {
+ return Err(ParseError::unexpected(
+ "`$$` is not allowed on the pattern side",
+ ))
+ }
+ Mode::Template => Op::Leaf(tt::Leaf::Punct(*punct)),
+ },
+ tt::Leaf::Punct(_) | tt::Leaf::Literal(_) => {
+ return Err(ParseError::expected("expected ident"))
+ }
+ },
+ }
+ }
+ tt::TokenTree::Leaf(tt) => Op::Leaf(tt.clone()),
+ tt::TokenTree::Subtree(subtree) => {
+ let tokens = MetaTemplate::parse(subtree, mode)?;
+ Op::Subtree { tokens, delimiter: subtree.delimiter }
+ }
+ };
+ Ok(res)
+}
+
+fn eat_fragment_kind(src: &mut TtIter<'_>, mode: Mode) -> Result<Option<SmolStr>, ParseError> {
+ if let Mode::Pattern = mode {
+ src.expect_char(':').map_err(|()| ParseError::unexpected("missing fragment specifier"))?;
+ let ident = src
+ .expect_ident()
+ .map_err(|()| ParseError::unexpected("missing fragment specifier"))?;
+ return Ok(Some(ident.text.clone()));
+ };
+ Ok(None)
+}
+
+fn is_boolean_literal(lit: &tt::Literal) -> bool {
+ matches!(lit.text.as_str(), "true" | "false")
+}
+
+fn parse_repeat(src: &mut TtIter<'_>) -> Result<(Option<Separator>, RepeatKind), ParseError> {
+ let mut separator = Separator::Puncts(SmallVec::new());
+ for tt in src {
+ let tt = match tt {
+ tt::TokenTree::Leaf(leaf) => leaf,
+ tt::TokenTree::Subtree(_) => return Err(ParseError::InvalidRepeat),
+ };
+ let has_sep = match &separator {
+ Separator::Puncts(puncts) => !puncts.is_empty(),
+ _ => true,
+ };
+ match tt {
+ tt::Leaf::Ident(_) | tt::Leaf::Literal(_) if has_sep => {
+ return Err(ParseError::InvalidRepeat)
+ }
+ tt::Leaf::Ident(ident) => separator = Separator::Ident(ident.clone()),
+ tt::Leaf::Literal(lit) => separator = Separator::Literal(lit.clone()),
+ tt::Leaf::Punct(punct) => {
+ let repeat_kind = match punct.char {
+ '*' => RepeatKind::ZeroOrMore,
+ '+' => RepeatKind::OneOrMore,
+ '?' => RepeatKind::ZeroOrOne,
+ _ => match &mut separator {
+ Separator::Puncts(puncts) if puncts.len() != 3 => {
+ puncts.push(*punct);
+ continue;
+ }
+ _ => return Err(ParseError::InvalidRepeat),
+ },
+ };
+ return Ok((has_sep.then(|| separator), repeat_kind));
+ }
+ }
+ }
+ Err(ParseError::InvalidRepeat)
+}
+
+fn parse_metavar_expr(src: &mut TtIter<'_>) -> Result<Op, ()> {
+ let func = src.expect_ident()?;
+ let args = src.expect_subtree()?;
+
+ if args.delimiter_kind() != Some(tt::DelimiterKind::Parenthesis) {
+ return Err(());
+ }
+
+ let mut args = TtIter::new(args);
+
+ let op = match &*func.text {
+ "ignore" => {
+ let ident = args.expect_ident()?;
+ Op::Ignore { name: ident.text.clone(), id: ident.id }
+ }
+ "index" => {
+ let depth = if args.len() == 0 { 0 } else { args.expect_u32_literal()? };
+ Op::Index { depth }
+ }
+ _ => return Err(()),
+ };
+
+ if args.next().is_some() {
+ return Err(());
+ }
+
+ Ok(op)
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
new file mode 100644
index 000000000..aca6ecd42
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
@@ -0,0 +1,844 @@
+//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
+
+use rustc_hash::FxHashMap;
+use stdx::{always, non_empty_vec::NonEmptyVec};
+use syntax::{
+ ast::{self, make::tokens::doc_comment},
+ AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
+ SyntaxKind::*,
+ SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T,
+};
+use tt::buffer::{Cursor, TokenBuffer};
+
+use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap};
+
+/// Convert the syntax node to a `TokenTree` (what macro
+/// will consume).
+pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
+ let (subtree, token_map, _) = syntax_node_to_token_tree_with_modifications(
+ node,
+ Default::default(),
+ 0,
+ Default::default(),
+ Default::default(),
+ );
+ (subtree, token_map)
+}
+
+/// Convert the syntax node to a `TokenTree` (what macro will consume)
+/// with the censored range excluded.
+pub fn syntax_node_to_token_tree_with_modifications(
+ node: &SyntaxNode,
+ existing_token_map: TokenMap,
+ next_id: u32,
+ replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+) -> (tt::Subtree, TokenMap, u32) {
+ let global_offset = node.text_range().start();
+ let mut c = Convertor::new(node, global_offset, existing_token_map, next_id, replace, append);
+ let subtree = convert_tokens(&mut c);
+ c.id_alloc.map.shrink_to_fit();
+ always!(c.replace.is_empty(), "replace: {:?}", c.replace);
+ always!(c.append.is_empty(), "append: {:?}", c.append);
+ (subtree, c.id_alloc.map, c.id_alloc.next_id)
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct SyntheticTokenId(pub u32);
+
+#[derive(Debug, Clone)]
+pub struct SyntheticToken {
+ pub kind: SyntaxKind,
+ pub text: SmolStr,
+ pub range: TextRange,
+ pub id: SyntheticTokenId,
+}
+
+// The following items are what `rustc` macro can be parsed into :
+// link: https://github.com/rust-lang/rust/blob/9ebf47851a357faa4cd97f4b1dc7835f6376e639/src/libsyntax/ext/expand.rs#L141
+// * Expr(P<ast::Expr>) -> token_tree_to_expr
+// * Pat(P<ast::Pat>) -> token_tree_to_pat
+// * Ty(P<ast::Ty>) -> token_tree_to_ty
+// * Stmts(SmallVec<[ast::Stmt; 1]>) -> token_tree_to_stmts
+// * Items(SmallVec<[P<ast::Item>; 1]>) -> token_tree_to_items
+//
+// * TraitItems(SmallVec<[ast::TraitItem; 1]>)
+// * AssocItems(SmallVec<[ast::AssocItem; 1]>)
+// * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
+
+pub fn token_tree_to_syntax_node(
+ tt: &tt::Subtree,
+ entry_point: parser::TopEntryPoint,
+) -> (Parse<SyntaxNode>, TokenMap) {
+ let buffer = match tt {
+ tt::Subtree { delimiter: None, token_trees } => {
+ TokenBuffer::from_tokens(token_trees.as_slice())
+ }
+ _ => TokenBuffer::from_subtree(tt),
+ };
+ let parser_input = to_parser_input(&buffer);
+ let parser_output = entry_point.parse(&parser_input);
+ let mut tree_sink = TtTreeSink::new(buffer.begin());
+ for event in parser_output.iter() {
+ match event {
+ parser::Step::Token { kind, n_input_tokens: n_raw_tokens } => {
+ tree_sink.token(kind, n_raw_tokens)
+ }
+ parser::Step::Enter { kind } => tree_sink.start_node(kind),
+ parser::Step::Exit => tree_sink.finish_node(),
+ parser::Step::Error { msg } => tree_sink.error(msg.to_string()),
+ }
+ }
+ let (parse, range_map) = tree_sink.finish();
+ (parse, range_map)
+}
+
+/// Convert a string to a `TokenTree`
+pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> {
+ let lexed = parser::LexedStr::new(text);
+ if lexed.errors().next().is_some() {
+ return None;
+ }
+
+ let mut conv = RawConvertor {
+ lexed,
+ pos: 0,
+ id_alloc: TokenIdAlloc {
+ map: Default::default(),
+ global_offset: TextSize::default(),
+ next_id: 0,
+ },
+ };
+
+ let subtree = convert_tokens(&mut conv);
+ Some((subtree, conv.id_alloc.map))
+}
+
+/// Split token tree with separate expr: $($e:expr)SEP*
+pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
+ if tt.token_trees.is_empty() {
+ return Vec::new();
+ }
+
+ let mut iter = TtIter::new(tt);
+ let mut res = Vec::new();
+
+ while iter.peek_n(0).is_some() {
+ let expanded = iter.expect_fragment(parser::PrefixEntryPoint::Expr);
+
+ res.push(match expanded.value {
+ None => break,
+ Some(tt @ tt::TokenTree::Leaf(_)) => {
+ tt::Subtree { delimiter: None, token_trees: vec![tt] }
+ }
+ Some(tt::TokenTree::Subtree(tt)) => tt,
+ });
+
+ let mut fork = iter.clone();
+ if fork.expect_char(sep).is_err() {
+ break;
+ }
+ iter = fork;
+ }
+
+ if iter.peek_n(0).is_some() {
+ res.push(tt::Subtree { delimiter: None, token_trees: iter.into_iter().cloned().collect() });
+ }
+
+ res
+}
+
+fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
+ struct StackEntry {
+ subtree: tt::Subtree,
+ idx: usize,
+ open_range: TextRange,
+ }
+
+ let entry = StackEntry {
+ subtree: tt::Subtree { delimiter: None, ..Default::default() },
+ // never used (delimiter is `None`)
+ idx: !0,
+ open_range: TextRange::empty(TextSize::of('.')),
+ };
+ let mut stack = NonEmptyVec::new(entry);
+
+ loop {
+ let StackEntry { subtree, .. } = stack.last_mut();
+ let result = &mut subtree.token_trees;
+ let (token, range) = match conv.bump() {
+ Some(it) => it,
+ None => break,
+ };
+ let synth_id = token.synthetic_id(conv);
+
+ let kind = token.kind(conv);
+ if kind == COMMENT {
+ if let Some(tokens) = conv.convert_doc_comment(&token) {
+ // FIXME: There has to be a better way to do this
+ // Add the comments token id to the converted doc string
+ let id = conv.id_alloc().alloc(range, synth_id);
+ result.extend(tokens.into_iter().map(|mut tt| {
+ if let tt::TokenTree::Subtree(sub) = &mut tt {
+ if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) =
+ sub.token_trees.get_mut(2)
+ {
+ lit.id = id
+ }
+ }
+ tt
+ }));
+ }
+ continue;
+ }
+ let tt = if kind.is_punct() && kind != UNDERSCORE {
+ if synth_id.is_none() {
+ assert_eq!(range.len(), TextSize::of('.'));
+ }
+
+ if let Some(delim) = subtree.delimiter {
+ let expected = match delim.kind {
+ tt::DelimiterKind::Parenthesis => T![')'],
+ tt::DelimiterKind::Brace => T!['}'],
+ tt::DelimiterKind::Bracket => T![']'],
+ };
+
+ if kind == expected {
+ if let Some(entry) = stack.pop() {
+ conv.id_alloc().close_delim(entry.idx, Some(range));
+ stack.last_mut().subtree.token_trees.push(entry.subtree.into());
+ }
+ continue;
+ }
+ }
+
+ let delim = match kind {
+ T!['('] => Some(tt::DelimiterKind::Parenthesis),
+ T!['{'] => Some(tt::DelimiterKind::Brace),
+ T!['['] => Some(tt::DelimiterKind::Bracket),
+ _ => None,
+ };
+
+ if let Some(kind) = delim {
+ let mut subtree = tt::Subtree::default();
+ let (id, idx) = conv.id_alloc().open_delim(range, synth_id);
+ subtree.delimiter = Some(tt::Delimiter { id, kind });
+ stack.push(StackEntry { subtree, idx, open_range: range });
+ continue;
+ }
+
+ let spacing = match conv.peek().map(|next| next.kind(conv)) {
+ Some(kind)
+ if !kind.is_trivia()
+ && kind.is_punct()
+ && kind != T!['[']
+ && kind != T!['{']
+ && kind != T!['(']
+ && kind != UNDERSCORE =>
+ {
+ tt::Spacing::Joint
+ }
+ _ => tt::Spacing::Alone,
+ };
+ let char = match token.to_char(conv) {
+ Some(c) => c,
+ None => {
+ panic!("Token from lexer must be single char: token = {:#?}", token);
+ }
+ };
+ tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range, synth_id) })
+ .into()
+ } else {
+ macro_rules! make_leaf {
+ ($i:ident) => {
+ tt::$i { id: conv.id_alloc().alloc(range, synth_id), text: token.to_text(conv) }
+ .into()
+ };
+ }
+ let leaf: tt::Leaf = match kind {
+ T![true] | T![false] => make_leaf!(Ident),
+ IDENT => make_leaf!(Ident),
+ UNDERSCORE => make_leaf!(Ident),
+ k if k.is_keyword() => make_leaf!(Ident),
+ k if k.is_literal() => make_leaf!(Literal),
+ LIFETIME_IDENT => {
+ let char_unit = TextSize::of('\'');
+ let r = TextRange::at(range.start(), char_unit);
+ let apostrophe = tt::Leaf::from(tt::Punct {
+ char: '\'',
+ spacing: tt::Spacing::Joint,
+ id: conv.id_alloc().alloc(r, synth_id),
+ });
+ result.push(apostrophe.into());
+
+ let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
+ let ident = tt::Leaf::from(tt::Ident {
+ text: SmolStr::new(&token.to_text(conv)[1..]),
+ id: conv.id_alloc().alloc(r, synth_id),
+ });
+ result.push(ident.into());
+ continue;
+ }
+ _ => continue,
+ };
+
+ leaf.into()
+ };
+ result.push(tt);
+ }
+
+ // If we get here, we've consumed all input tokens.
+ // We might have more than one subtree in the stack, if the delimiters are improperly balanced.
+ // Merge them so we're left with one.
+ while let Some(entry) = stack.pop() {
+ let parent = stack.last_mut();
+
+ conv.id_alloc().close_delim(entry.idx, None);
+ let leaf: tt::Leaf = tt::Punct {
+ id: conv.id_alloc().alloc(entry.open_range, None),
+ char: match entry.subtree.delimiter.unwrap().kind {
+ tt::DelimiterKind::Parenthesis => '(',
+ tt::DelimiterKind::Brace => '{',
+ tt::DelimiterKind::Bracket => '[',
+ },
+ spacing: tt::Spacing::Alone,
+ }
+ .into();
+ parent.subtree.token_trees.push(leaf.into());
+ parent.subtree.token_trees.extend(entry.subtree.token_trees);
+ }
+
+ let subtree = stack.into_last().subtree;
+ if let [tt::TokenTree::Subtree(first)] = &*subtree.token_trees {
+ first.clone()
+ } else {
+ subtree
+ }
+}
+
+/// Returns the textual content of a doc comment block as a quoted string
+/// That is, strips leading `///` (or `/**`, etc)
+/// and strips the ending `*/`
+/// And then quote the string, which is needed to convert to `tt::Literal`
+fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
+ let prefix_len = comment.prefix().len();
+ let mut text = &comment.text()[prefix_len..];
+
+ // Remove ending "*/"
+ if comment.kind().shape == ast::CommentShape::Block {
+ text = &text[0..text.len() - 2];
+ }
+
+ // Quote the string
+ // Note that `tt::Literal` expect an escaped string
+ let text = format!("\"{}\"", text.escape_debug());
+ text.into()
+}
+
+fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>> {
+ cov_mark::hit!(test_meta_doc_comments);
+ let comment = ast::Comment::cast(token.clone())?;
+ let doc = comment.kind().doc?;
+
+ // Make `doc="\" Comments\""
+ let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)];
+
+ // Make `#![]`
+ let mut token_trees = Vec::with_capacity(3);
+ token_trees.push(mk_punct('#'));
+ if let ast::CommentPlacement::Inner = doc {
+ token_trees.push(mk_punct('!'));
+ }
+ token_trees.push(tt::TokenTree::from(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ kind: tt::DelimiterKind::Bracket,
+ id: tt::TokenId::unspecified(),
+ }),
+ token_trees: meta_tkns,
+ }));
+
+ return Some(token_trees);
+
+ // Helper functions
+ fn mk_ident(s: &str) -> tt::TokenTree {
+ tt::TokenTree::from(tt::Leaf::from(tt::Ident {
+ text: s.into(),
+ id: tt::TokenId::unspecified(),
+ }))
+ }
+
+ fn mk_punct(c: char) -> tt::TokenTree {
+ tt::TokenTree::from(tt::Leaf::from(tt::Punct {
+ char: c,
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ }))
+ }
+
+ fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree {
+ let lit = tt::Literal { text: doc_comment_text(comment), id: tt::TokenId::unspecified() };
+
+ tt::TokenTree::from(tt::Leaf::from(lit))
+ }
+}
+
+struct TokenIdAlloc {
+ map: TokenMap,
+ global_offset: TextSize,
+ next_id: u32,
+}
+
+impl TokenIdAlloc {
+ fn alloc(
+ &mut self,
+ absolute_range: TextRange,
+ synthetic_id: Option<SyntheticTokenId>,
+ ) -> tt::TokenId {
+ let relative_range = absolute_range - self.global_offset;
+ let token_id = tt::TokenId(self.next_id);
+ self.next_id += 1;
+ self.map.insert(token_id, relative_range);
+ if let Some(id) = synthetic_id {
+ self.map.insert_synthetic(token_id, id);
+ }
+ token_id
+ }
+
+ fn open_delim(
+ &mut self,
+ open_abs_range: TextRange,
+ synthetic_id: Option<SyntheticTokenId>,
+ ) -> (tt::TokenId, usize) {
+ let token_id = tt::TokenId(self.next_id);
+ self.next_id += 1;
+ let idx = self.map.insert_delim(
+ token_id,
+ open_abs_range - self.global_offset,
+ open_abs_range - self.global_offset,
+ );
+ if let Some(id) = synthetic_id {
+ self.map.insert_synthetic(token_id, id);
+ }
+ (token_id, idx)
+ }
+
+ fn close_delim(&mut self, idx: usize, close_abs_range: Option<TextRange>) {
+ match close_abs_range {
+ None => {
+ self.map.remove_delim(idx);
+ }
+ Some(close) => {
+ self.map.update_close_delim(idx, close - self.global_offset);
+ }
+ }
+ }
+}
+
+/// A raw token (straight from lexer) convertor
+struct RawConvertor<'a> {
+ lexed: parser::LexedStr<'a>,
+ pos: usize,
+ id_alloc: TokenIdAlloc,
+}
+
+trait SrcToken<Ctx>: std::fmt::Debug {
+ fn kind(&self, ctx: &Ctx) -> SyntaxKind;
+
+ fn to_char(&self, ctx: &Ctx) -> Option<char>;
+
+ fn to_text(&self, ctx: &Ctx) -> SmolStr;
+
+ fn synthetic_id(&self, ctx: &Ctx) -> Option<SyntheticTokenId>;
+}
+
+trait TokenConvertor: Sized {
+ type Token: SrcToken<Self>;
+
+ fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>;
+
+ fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
+
+ fn peek(&self) -> Option<Self::Token>;
+
+ fn id_alloc(&mut self) -> &mut TokenIdAlloc;
+}
+
+impl<'a> SrcToken<RawConvertor<'a>> for usize {
+ fn kind(&self, ctx: &RawConvertor<'a>) -> SyntaxKind {
+ ctx.lexed.kind(*self)
+ }
+
+ fn to_char(&self, ctx: &RawConvertor<'a>) -> Option<char> {
+ ctx.lexed.text(*self).chars().next()
+ }
+
+ fn to_text(&self, ctx: &RawConvertor<'_>) -> SmolStr {
+ ctx.lexed.text(*self).into()
+ }
+
+ fn synthetic_id(&self, _ctx: &RawConvertor<'a>) -> Option<SyntheticTokenId> {
+ None
+ }
+}
+
+impl<'a> TokenConvertor for RawConvertor<'a> {
+ type Token = usize;
+
+ fn convert_doc_comment(&self, &token: &usize) -> Option<Vec<tt::TokenTree>> {
+ let text = self.lexed.text(token);
+ convert_doc_comment(&doc_comment(text))
+ }
+
+ fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
+ if self.pos == self.lexed.len() {
+ return None;
+ }
+ let token = self.pos;
+ self.pos += 1;
+ let range = self.lexed.text_range(token);
+ let range = TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
+
+ Some((token, range))
+ }
+
+ fn peek(&self) -> Option<Self::Token> {
+ if self.pos == self.lexed.len() {
+ return None;
+ }
+ Some(self.pos)
+ }
+
+ fn id_alloc(&mut self) -> &mut TokenIdAlloc {
+ &mut self.id_alloc
+ }
+}
+
+struct Convertor {
+ id_alloc: TokenIdAlloc,
+ current: Option<SyntaxToken>,
+ current_synthetic: Vec<SyntheticToken>,
+ preorder: PreorderWithTokens,
+ replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ range: TextRange,
+ punct_offset: Option<(SyntaxToken, TextSize)>,
+}
+
+impl Convertor {
+ fn new(
+ node: &SyntaxNode,
+ global_offset: TextSize,
+ existing_token_map: TokenMap,
+ next_id: u32,
+ mut replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ mut append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ ) -> Convertor {
+ let range = node.text_range();
+ let mut preorder = node.preorder_with_tokens();
+ let (first, synthetic) = Self::next_token(&mut preorder, &mut replace, &mut append);
+ Convertor {
+ id_alloc: { TokenIdAlloc { map: existing_token_map, global_offset, next_id } },
+ current: first,
+ current_synthetic: synthetic,
+ preorder,
+ range,
+ replace,
+ append,
+ punct_offset: None,
+ }
+ }
+
+ fn next_token(
+ preorder: &mut PreorderWithTokens,
+ replace: &mut FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ append: &mut FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ ) -> (Option<SyntaxToken>, Vec<SyntheticToken>) {
+ while let Some(ev) = preorder.next() {
+ let ele = match ev {
+ WalkEvent::Enter(ele) => ele,
+ WalkEvent::Leave(ele) => {
+ if let Some(mut v) = append.remove(&ele) {
+ if !v.is_empty() {
+ v.reverse();
+ return (None, v);
+ }
+ }
+ continue;
+ }
+ };
+ if let Some(mut v) = replace.remove(&ele) {
+ preorder.skip_subtree();
+ if !v.is_empty() {
+ v.reverse();
+ return (None, v);
+ }
+ }
+ match ele {
+ SyntaxElement::Token(t) => return (Some(t), Vec::new()),
+ _ => {}
+ }
+ }
+ (None, Vec::new())
+ }
+}
+
+#[derive(Debug)]
+enum SynToken {
+ Ordinary(SyntaxToken),
+ // FIXME is this supposed to be `Punct`?
+ Punch(SyntaxToken, TextSize),
+ Synthetic(SyntheticToken),
+}
+
+impl SynToken {
+ fn token(&self) -> Option<&SyntaxToken> {
+ match self {
+ SynToken::Ordinary(it) | SynToken::Punch(it, _) => Some(it),
+ SynToken::Synthetic(_) => None,
+ }
+ }
+}
+
+impl SrcToken<Convertor> for SynToken {
+ fn kind(&self, _ctx: &Convertor) -> SyntaxKind {
+ match self {
+ SynToken::Ordinary(token) => token.kind(),
+ SynToken::Punch(token, _) => token.kind(),
+ SynToken::Synthetic(token) => token.kind,
+ }
+ }
+ fn to_char(&self, _ctx: &Convertor) -> Option<char> {
+ match self {
+ SynToken::Ordinary(_) => None,
+ SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
+ SynToken::Synthetic(token) if token.text.len() == 1 => token.text.chars().next(),
+ SynToken::Synthetic(_) => None,
+ }
+ }
+ fn to_text(&self, _ctx: &Convertor) -> SmolStr {
+ match self {
+ SynToken::Ordinary(token) => token.text().into(),
+ SynToken::Punch(token, _) => token.text().into(),
+ SynToken::Synthetic(token) => token.text.clone(),
+ }
+ }
+
+ fn synthetic_id(&self, _ctx: &Convertor) -> Option<SyntheticTokenId> {
+ match self {
+ SynToken::Synthetic(token) => Some(token.id),
+ _ => None,
+ }
+ }
+}
+
+impl TokenConvertor for Convertor {
+ type Token = SynToken;
+ fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
+ convert_doc_comment(token.token()?)
+ }
+
+ fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
+ if let Some((punct, offset)) = self.punct_offset.clone() {
+ if usize::from(offset) + 1 < punct.text().len() {
+ let offset = offset + TextSize::of('.');
+ let range = punct.text_range();
+ self.punct_offset = Some((punct.clone(), offset));
+ let range = TextRange::at(range.start() + offset, TextSize::of('.'));
+ return Some((SynToken::Punch(punct, offset), range));
+ }
+ }
+
+ if let Some(synth_token) = self.current_synthetic.pop() {
+ if self.current_synthetic.is_empty() {
+ let (new_current, new_synth) =
+ Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
+ self.current = new_current;
+ self.current_synthetic = new_synth;
+ }
+ let range = synth_token.range;
+ return Some((SynToken::Synthetic(synth_token), range));
+ }
+
+ let curr = self.current.clone()?;
+ if !&self.range.contains_range(curr.text_range()) {
+ return None;
+ }
+ let (new_current, new_synth) =
+ Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
+ self.current = new_current;
+ self.current_synthetic = new_synth;
+ let token = if curr.kind().is_punct() {
+ self.punct_offset = Some((curr.clone(), 0.into()));
+ let range = curr.text_range();
+ let range = TextRange::at(range.start(), TextSize::of('.'));
+ (SynToken::Punch(curr, 0.into()), range)
+ } else {
+ self.punct_offset = None;
+ let range = curr.text_range();
+ (SynToken::Ordinary(curr), range)
+ };
+
+ Some(token)
+ }
+
+ fn peek(&self) -> Option<Self::Token> {
+ if let Some((punct, mut offset)) = self.punct_offset.clone() {
+ offset += TextSize::of('.');
+ if usize::from(offset) < punct.text().len() {
+ return Some(SynToken::Punch(punct, offset));
+ }
+ }
+
+ if let Some(synth_token) = self.current_synthetic.last() {
+ return Some(SynToken::Synthetic(synth_token.clone()));
+ }
+
+ let curr = self.current.clone()?;
+ if !self.range.contains_range(curr.text_range()) {
+ return None;
+ }
+
+ let token = if curr.kind().is_punct() {
+ SynToken::Punch(curr, 0.into())
+ } else {
+ SynToken::Ordinary(curr)
+ };
+ Some(token)
+ }
+
+ fn id_alloc(&mut self) -> &mut TokenIdAlloc {
+ &mut self.id_alloc
+ }
+}
+
+struct TtTreeSink<'a> {
+ buf: String,
+ cursor: Cursor<'a>,
+ open_delims: FxHashMap<tt::TokenId, TextSize>,
+ text_pos: TextSize,
+ inner: SyntaxTreeBuilder,
+ token_map: TokenMap,
+}
+
+impl<'a> TtTreeSink<'a> {
+ fn new(cursor: Cursor<'a>) -> Self {
+ TtTreeSink {
+ buf: String::new(),
+ cursor,
+ open_delims: FxHashMap::default(),
+ text_pos: 0.into(),
+ inner: SyntaxTreeBuilder::default(),
+ token_map: TokenMap::default(),
+ }
+ }
+
+ fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) {
+ self.token_map.shrink_to_fit();
+ (self.inner.finish(), self.token_map)
+ }
+}
+
+fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> &'static str {
+ let texts = match d {
+ tt::DelimiterKind::Parenthesis => "()",
+ tt::DelimiterKind::Brace => "{}",
+ tt::DelimiterKind::Bracket => "[]",
+ };
+
+ let idx = closing as usize;
+ &texts[idx..texts.len() - (1 - idx)]
+}
+
+impl<'a> TtTreeSink<'a> {
+ fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
+ if kind == LIFETIME_IDENT {
+ n_tokens = 2;
+ }
+
+ let mut last = self.cursor;
+ for _ in 0..n_tokens {
+ let tmp: u8;
+ if self.cursor.eof() {
+ break;
+ }
+ last = self.cursor;
+ let text: &str = loop {
+ break match self.cursor.token_tree() {
+ Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
+ // Mark the range if needed
+ let (text, id) = match leaf {
+ tt::Leaf::Ident(ident) => (ident.text.as_str(), ident.id),
+ tt::Leaf::Punct(punct) => {
+ assert!(punct.char.is_ascii());
+ tmp = punct.char as u8;
+ (std::str::from_utf8(std::slice::from_ref(&tmp)).unwrap(), punct.id)
+ }
+ tt::Leaf::Literal(lit) => (lit.text.as_str(), lit.id),
+ };
+ let range = TextRange::at(self.text_pos, TextSize::of(text));
+ self.token_map.insert(id, range);
+ self.cursor = self.cursor.bump();
+ text
+ }
+ Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
+ self.cursor = self.cursor.subtree().unwrap();
+ match subtree.delimiter {
+ Some(d) => {
+ self.open_delims.insert(d.id, self.text_pos);
+ delim_to_str(d.kind, false)
+ }
+ None => continue,
+ }
+ }
+ None => {
+ let parent = self.cursor.end().unwrap();
+ self.cursor = self.cursor.bump();
+ match parent.delimiter {
+ Some(d) => {
+ if let Some(open_delim) = self.open_delims.get(&d.id) {
+ let open_range = TextRange::at(*open_delim, TextSize::of('('));
+ let close_range =
+ TextRange::at(self.text_pos, TextSize::of('('));
+ self.token_map.insert_delim(d.id, open_range, close_range);
+ }
+ delim_to_str(d.kind, true)
+ }
+ None => continue,
+ }
+ }
+ };
+ };
+ self.buf += text;
+ self.text_pos += TextSize::of(text);
+ }
+
+ self.inner.token(kind, self.buf.as_str());
+ self.buf.clear();
+ // Add whitespace between adjoint puncts
+ let next = last.bump();
+ if let (
+ Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(curr), _)),
+ Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(_), _)),
+ ) = (last.token_tree(), next.token_tree())
+ {
+ // Note: We always assume the semi-colon would be the last token in
+ // other parts of RA such that we don't add whitespace here.
+ if curr.spacing == tt::Spacing::Alone && curr.char != ';' {
+ self.inner.token(WHITESPACE, " ");
+ self.text_pos += TextSize::of(' ');
+ }
+ }
+ }
+
+ fn start_node(&mut self, kind: SyntaxKind) {
+ self.inner.start_node(kind);
+ }
+
+ fn finish_node(&mut self) {
+ self.inner.finish_node();
+ }
+
+ fn error(&mut self, error: String) {
+ self.inner.error(error, self.text_pos)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs b/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs
new file mode 100644
index 000000000..783c3ca4a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs
@@ -0,0 +1,99 @@
+//! Convert macro-by-example tokens which are specific to macro expansion into a
+//! format that works for our parser.
+
+use syntax::{SyntaxKind, SyntaxKind::*, T};
+use tt::buffer::TokenBuffer;
+
+pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_>) -> parser::Input {
+ let mut res = parser::Input::default();
+
+ let mut current = buffer.begin();
+
+ while !current.eof() {
+ let cursor = current;
+ let tt = cursor.token_tree();
+
+ // Check if it is lifetime
+ if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(punct), _)) = tt {
+ if punct.char == '\'' {
+ let next = cursor.bump();
+ match next.token_tree() {
+ Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Ident(_ident), _)) => {
+ res.push(LIFETIME_IDENT);
+ current = next.bump();
+ continue;
+ }
+ _ => panic!("Next token must be ident : {:#?}", next.token_tree()),
+ }
+ }
+ }
+
+ current = match tt {
+ Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
+ match leaf {
+ tt::Leaf::Literal(lit) => {
+ let is_negated = lit.text.starts_with('-');
+ let inner_text = &lit.text[if is_negated { 1 } else { 0 }..];
+
+ let kind = parser::LexedStr::single_token(inner_text)
+ .map(|(kind, _error)| kind)
+ .filter(|kind| {
+ kind.is_literal()
+ && (!is_negated || matches!(kind, FLOAT_NUMBER | INT_NUMBER))
+ })
+ .unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &lit));
+
+ res.push(kind);
+ }
+ tt::Leaf::Ident(ident) => match ident.text.as_ref() {
+ "_" => res.push(T![_]),
+ i if i.starts_with('\'') => res.push(LIFETIME_IDENT),
+ _ => match SyntaxKind::from_keyword(&ident.text) {
+ Some(kind) => res.push(kind),
+ None => {
+ let contextual_keyword =
+ SyntaxKind::from_contextual_keyword(&ident.text)
+ .unwrap_or(SyntaxKind::IDENT);
+ res.push_ident(contextual_keyword);
+ }
+ },
+ },
+ tt::Leaf::Punct(punct) => {
+ let kind = SyntaxKind::from_char(punct.char)
+ .unwrap_or_else(|| panic!("{:#?} is not a valid punct", punct));
+ res.push(kind);
+ if punct.spacing == tt::Spacing::Joint {
+ res.was_joint();
+ }
+ }
+ }
+ cursor.bump()
+ }
+ Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
+ if let Some(d) = subtree.delimiter_kind() {
+ res.push(match d {
+ tt::DelimiterKind::Parenthesis => T!['('],
+ tt::DelimiterKind::Brace => T!['{'],
+ tt::DelimiterKind::Bracket => T!['['],
+ });
+ }
+ cursor.subtree().unwrap()
+ }
+ None => match cursor.end() {
+ Some(subtree) => {
+ if let Some(d) = subtree.delimiter_kind() {
+ res.push(match d {
+ tt::DelimiterKind::Parenthesis => T![')'],
+ tt::DelimiterKind::Brace => T!['}'],
+ tt::DelimiterKind::Bracket => T![']'],
+ })
+ }
+ cursor.bump()
+ }
+ None => continue,
+ },
+ };
+ }
+
+ res
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/token_map.rs b/src/tools/rust-analyzer/crates/mbe/src/token_map.rs
new file mode 100644
index 000000000..c923e7a69
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/src/token_map.rs
@@ -0,0 +1,113 @@
+//! Mapping between `TokenId`s and the token's position in macro definitions or inputs.
+
+use std::hash::Hash;
+
+use parser::{SyntaxKind, T};
+use syntax::{TextRange, TextSize};
+
+use crate::syntax_bridge::SyntheticTokenId;
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
+enum TokenTextRange {
+ Token(TextRange),
+ Delimiter(TextRange),
+}
+
+impl TokenTextRange {
+ fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> {
+ match self {
+ TokenTextRange::Token(it) => Some(it),
+ TokenTextRange::Delimiter(it) => match kind {
+ T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())),
+ T!['}'] | T![')'] | T![']'] => {
+ Some(TextRange::at(it.end() - TextSize::of('}'), 1.into()))
+ }
+ _ => None,
+ },
+ }
+ }
+}
+
+/// Maps `tt::TokenId` to the relative range of the original token.
+#[derive(Debug, PartialEq, Eq, Clone, Default, Hash)]
+pub struct TokenMap {
+ /// Maps `tt::TokenId` to the *relative* source range.
+ entries: Vec<(tt::TokenId, TokenTextRange)>,
+ pub synthetic_entries: Vec<(tt::TokenId, SyntheticTokenId)>,
+}
+
+impl TokenMap {
+ pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
+ let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
+ TokenTextRange::Token(it) => *it == relative_range,
+ TokenTextRange::Delimiter(it) => {
+ let open = TextRange::at(it.start(), 1.into());
+ let close = TextRange::at(it.end() - TextSize::of('}'), 1.into());
+ open == relative_range || close == relative_range
+ }
+ })?;
+ Some(token_id)
+ }
+
+ pub fn ranges_by_token(
+ &self,
+ token_id: tt::TokenId,
+ kind: SyntaxKind,
+ ) -> impl Iterator<Item = TextRange> + '_ {
+ self.entries
+ .iter()
+ .filter(move |&&(tid, _)| tid == token_id)
+ .filter_map(move |(_, range)| range.by_kind(kind))
+ }
+
+ pub fn synthetic_token_id(&self, token_id: tt::TokenId) -> Option<SyntheticTokenId> {
+ self.synthetic_entries.iter().find(|(tid, _)| *tid == token_id).map(|(_, id)| *id)
+ }
+
+ pub fn first_range_by_token(
+ &self,
+ token_id: tt::TokenId,
+ kind: SyntaxKind,
+ ) -> Option<TextRange> {
+ self.ranges_by_token(token_id, kind).next()
+ }
+
+ pub(crate) fn shrink_to_fit(&mut self) {
+ self.entries.shrink_to_fit();
+ self.synthetic_entries.shrink_to_fit();
+ }
+
+ pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
+ self.entries.push((token_id, TokenTextRange::Token(relative_range)));
+ }
+
+ pub(crate) fn insert_synthetic(&mut self, token_id: tt::TokenId, id: SyntheticTokenId) {
+ self.synthetic_entries.push((token_id, id));
+ }
+
+ pub(crate) fn insert_delim(
+ &mut self,
+ token_id: tt::TokenId,
+ open_relative_range: TextRange,
+ close_relative_range: TextRange,
+ ) -> usize {
+ let res = self.entries.len();
+ let cover = open_relative_range.cover(close_relative_range);
+
+ self.entries.push((token_id, TokenTextRange::Delimiter(cover)));
+ res
+ }
+
+ pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
+ let (_, token_text_range) = &mut self.entries[idx];
+ if let TokenTextRange::Delimiter(dim) = token_text_range {
+ let cover = dim.cover(close_relative_range);
+ *token_text_range = TokenTextRange::Delimiter(cover);
+ }
+ }
+
+ pub(crate) fn remove_delim(&mut self, idx: usize) {
+ // FIXME: This could be accidentally quadratic
+ self.entries.remove(idx);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
new file mode 100644
index 000000000..7aceb676c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
@@ -0,0 +1,160 @@
+//! A "Parser" structure for token trees. We use this when parsing a declarative
+//! macro definition into a list of patterns and templates.
+
+use syntax::SyntaxKind;
+use tt::buffer::TokenBuffer;
+
+use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult};
+
+#[derive(Debug, Clone)]
+pub(crate) struct TtIter<'a> {
+ pub(crate) inner: std::slice::Iter<'a, tt::TokenTree>,
+}
+
+impl<'a> TtIter<'a> {
+ pub(crate) fn new(subtree: &'a tt::Subtree) -> TtIter<'a> {
+ TtIter { inner: subtree.token_trees.iter() }
+ }
+
+ pub(crate) fn expect_char(&mut self, char: char) -> Result<(), ()> {
+ match self.next() {
+ Some(&tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: c, .. }))) if c == char => {
+ Ok(())
+ }
+ _ => Err(()),
+ }
+ }
+
+ pub(crate) fn expect_any_char(&mut self, chars: &[char]) -> Result<(), ()> {
+ match self.next() {
+ Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: c, .. })))
+ if chars.contains(c) =>
+ {
+ Ok(())
+ }
+ _ => Err(()),
+ }
+ }
+
+ pub(crate) fn expect_subtree(&mut self) -> Result<&'a tt::Subtree, ()> {
+ match self.next() {
+ Some(tt::TokenTree::Subtree(it)) => Ok(it),
+ _ => Err(()),
+ }
+ }
+
+ pub(crate) fn expect_leaf(&mut self) -> Result<&'a tt::Leaf, ()> {
+ match self.next() {
+ Some(tt::TokenTree::Leaf(it)) => Ok(it),
+ _ => Err(()),
+ }
+ }
+
+ pub(crate) fn expect_ident(&mut self) -> Result<&'a tt::Ident, ()> {
+ match self.expect_leaf()? {
+ tt::Leaf::Ident(it) if it.text != "_" => Ok(it),
+ _ => Err(()),
+ }
+ }
+
+ pub(crate) fn expect_ident_or_underscore(&mut self) -> Result<&'a tt::Ident, ()> {
+ match self.expect_leaf()? {
+ tt::Leaf::Ident(it) => Ok(it),
+ _ => Err(()),
+ }
+ }
+
+ pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Leaf, ()> {
+ let it = self.expect_leaf()?;
+ match it {
+ tt::Leaf::Literal(_) => Ok(it),
+ tt::Leaf::Ident(ident) if ident.text == "true" || ident.text == "false" => Ok(it),
+ _ => Err(()),
+ }
+ }
+
+ pub(crate) fn expect_u32_literal(&mut self) -> Result<u32, ()> {
+ match self.expect_literal()? {
+ tt::Leaf::Literal(lit) => lit.text.parse().map_err(drop),
+ _ => Err(()),
+ }
+ }
+
+ pub(crate) fn expect_punct(&mut self) -> Result<&'a tt::Punct, ()> {
+ match self.expect_leaf()? {
+ tt::Leaf::Punct(it) => Ok(it),
+ _ => Err(()),
+ }
+ }
+
+ pub(crate) fn expect_fragment(
+ &mut self,
+ entry_point: parser::PrefixEntryPoint,
+ ) -> ExpandResult<Option<tt::TokenTree>> {
+ let buffer = TokenBuffer::from_tokens(self.inner.as_slice());
+ let parser_input = to_parser_input(&buffer);
+ let tree_traversal = entry_point.parse(&parser_input);
+
+ let mut cursor = buffer.begin();
+ let mut error = false;
+ for step in tree_traversal.iter() {
+ match step {
+ parser::Step::Token { kind, mut n_input_tokens } => {
+ if kind == SyntaxKind::LIFETIME_IDENT {
+ n_input_tokens = 2;
+ }
+ for _ in 0..n_input_tokens {
+ cursor = cursor.bump_subtree();
+ }
+ }
+ parser::Step::Enter { .. } | parser::Step::Exit => (),
+ parser::Step::Error { .. } => error = true,
+ }
+ }
+
+ let err = if error || !cursor.is_root() {
+ Some(ExpandError::binding_error(format!("expected {entry_point:?}")))
+ } else {
+ None
+ };
+
+ let mut curr = buffer.begin();
+ let mut res = vec![];
+
+ if cursor.is_root() {
+ while curr != cursor {
+ if let Some(token) = curr.token_tree() {
+ res.push(token);
+ }
+ curr = curr.bump();
+ }
+ }
+ self.inner = self.inner.as_slice()[res.len()..].iter();
+ let res = match res.len() {
+ 1 => Some(res[0].cloned()),
+ 0 => None,
+ _ => Some(tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: None,
+ token_trees: res.into_iter().map(|it| it.cloned()).collect(),
+ })),
+ };
+ ExpandResult { value: res, err }
+ }
+
+ pub(crate) fn peek_n(&self, n: usize) -> Option<&tt::TokenTree> {
+ self.inner.as_slice().get(n)
+ }
+}
+
+impl<'a> Iterator for TtIter<'a> {
+ type Item = &'a tt::TokenTree;
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner.next()
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+impl<'a> std::iter::ExactSizeIterator for TtIter<'a> {}
diff --git a/src/tools/rust-analyzer/crates/parser/Cargo.toml b/src/tools/rust-analyzer/crates/parser/Cargo.toml
new file mode 100644
index 000000000..a286a6bcd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/Cargo.toml
@@ -0,0 +1,19 @@
+[package]
+name = "parser"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+drop_bomb = "0.1.5"
+rustc_lexer = { version = "725.0.0", package = "rustc-ap-rustc_lexer" }
+limit = { path = "../limit", version = "0.0.0" }
+
+[dev-dependencies]
+expect-test = "1.4.0"
+sourcegen = { path = "../sourcegen" }
diff --git a/src/tools/rust-analyzer/crates/parser/src/event.rs b/src/tools/rust-analyzer/crates/parser/src/event.rs
new file mode 100644
index 000000000..b0e70e794
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/event.rs
@@ -0,0 +1,133 @@
+//! This module provides a way to construct a `File`.
+//! It is intended to be completely decoupled from the
+//! parser, so as to allow to evolve the tree representation
+//! and the parser algorithm independently.
+//!
+//! The `TreeSink` trait is the bridge between the parser and the
+//! tree builder: the parser produces a stream of events like
+//! `start node`, `finish node`, and `FileBuilder` converts
+//! this stream to a real tree.
+use std::mem;
+
+use crate::{
+ output::Output,
+ SyntaxKind::{self, *},
+};
+
+/// `Parser` produces a flat list of `Event`s.
+/// They are converted to a tree-structure in
+/// a separate pass, via `TreeBuilder`.
+#[derive(Debug)]
+pub(crate) enum Event {
+ /// This event signifies the start of the node.
+ /// It should be either abandoned (in which case the
+ /// `kind` is `TOMBSTONE`, and the event is ignored),
+ /// or completed via a `Finish` event.
+ ///
+ /// All tokens between a `Start` and a `Finish` would
+ /// become the children of the respective node.
+ ///
+ /// For left-recursive syntactic constructs, the parser produces
+ /// a child node before it sees a parent. `forward_parent`
+ /// saves the position of current event's parent.
+ ///
+ /// Consider this path
+ ///
+ /// foo::bar
+ ///
+ /// The events for it would look like this:
+ ///
+ /// ```text
+ /// START(PATH) IDENT('foo') FINISH START(PATH) T![::] IDENT('bar') FINISH
+ /// | /\
+ /// | |
+ /// +------forward-parent------+
+ /// ```
+ ///
+ /// And the tree would look like this
+ ///
+ /// ```text
+ /// +--PATH---------+
+ /// | | |
+ /// | | |
+ /// | '::' 'bar'
+ /// |
+ /// PATH
+ /// |
+ /// 'foo'
+ /// ```
+ ///
+ /// See also `CompletedMarker::precede`.
+ Start {
+ kind: SyntaxKind,
+ forward_parent: Option<u32>,
+ },
+
+ /// Complete the previous `Start` event
+ Finish,
+
+ /// Produce a single leaf-element.
+ /// `n_raw_tokens` is used to glue complex contextual tokens.
+ /// For example, lexer tokenizes `>>` as `>`, `>`, and
+ /// `n_raw_tokens = 2` is used to produced a single `>>`.
+ Token {
+ kind: SyntaxKind,
+ n_raw_tokens: u8,
+ },
+
+ Error {
+ msg: String,
+ },
+}
+
+impl Event {
+ pub(crate) fn tombstone() -> Self {
+ Event::Start { kind: TOMBSTONE, forward_parent: None }
+ }
+}
+
+/// Generate the syntax tree with the control of events.
+pub(super) fn process(mut events: Vec<Event>) -> Output {
+ let mut res = Output::default();
+ let mut forward_parents = Vec::new();
+
+ for i in 0..events.len() {
+ match mem::replace(&mut events[i], Event::tombstone()) {
+ Event::Start { kind, forward_parent } => {
+ // For events[A, B, C], B is A's forward_parent, C is B's forward_parent,
+ // in the normal control flow, the parent-child relation: `A -> B -> C`,
+ // while with the magic forward_parent, it writes: `C <- B <- A`.
+
+ // append `A` into parents.
+ forward_parents.push(kind);
+ let mut idx = i;
+ let mut fp = forward_parent;
+ while let Some(fwd) = fp {
+ idx += fwd as usize;
+ // append `A`'s forward_parent `B`
+ fp = match mem::replace(&mut events[idx], Event::tombstone()) {
+ Event::Start { kind, forward_parent } => {
+ forward_parents.push(kind);
+ forward_parent
+ }
+ _ => unreachable!(),
+ };
+ // append `B`'s forward_parent `C` in the next stage.
+ }
+
+ for kind in forward_parents.drain(..).rev() {
+ if kind != TOMBSTONE {
+ res.enter_node(kind);
+ }
+ }
+ }
+ Event::Finish => res.leave_node(),
+ Event::Token { kind, n_raw_tokens } => {
+ res.token(kind, n_raw_tokens);
+ }
+ Event::Error { msg } => res.error(msg),
+ }
+ }
+
+ res
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar.rs b/src/tools/rust-analyzer/crates/parser/src/grammar.rs
new file mode 100644
index 000000000..b74683296
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar.rs
@@ -0,0 +1,342 @@
+//! This is the actual "grammar" of the Rust language.
+//!
+//! Each function in this module and its children corresponds
+//! to a production of the formal grammar. Submodules roughly
+//! correspond to different *areas* of the grammar. By convention,
+//! each submodule starts with `use super::*` import and exports
+//! "public" productions via `pub(super)`.
+//!
+//! See docs for [`Parser`](super::parser::Parser) to learn about API,
+//! available to the grammar, and see docs for [`Event`](super::event::Event)
+//! to learn how this actually manages to produce parse trees.
+//!
+//! Code in this module also contains inline tests, which start with
+//! `// test name-of-the-test` comment and look like this:
+//!
+//! ```
+//! // test function_with_zero_parameters
+//! // fn foo() {}
+//! ```
+//!
+//! After adding a new inline-test, run `cargo test -p xtask` to
+//! extract it as a standalone text-fixture into
+//! `crates/syntax/test_data/parser/`, and run `cargo test` once to
+//! create the "gold" value.
+//!
+//! Coding convention: rules like `where_clause` always produce either a
+//! node or an error, rules like `opt_where_clause` may produce nothing.
+//! Non-opt rules typically start with `assert!(p.at(FIRST_TOKEN))`, the
+//! caller is responsible for branching on the first token.
+
+mod attributes;
+mod expressions;
+mod items;
+mod params;
+mod paths;
+mod patterns;
+mod generic_args;
+mod generic_params;
+mod types;
+
+use crate::{
+ parser::{CompletedMarker, Marker, Parser},
+ SyntaxKind::{self, *},
+ TokenSet, T,
+};
+
+pub(crate) mod entry {
+ use super::*;
+
+ pub(crate) mod prefix {
+ use super::*;
+
+ pub(crate) fn vis(p: &mut Parser<'_>) {
+ let _ = opt_visibility(p, false);
+ }
+
+ pub(crate) fn block(p: &mut Parser<'_>) {
+ expressions::block_expr(p);
+ }
+
+ pub(crate) fn stmt(p: &mut Parser<'_>) {
+ expressions::stmt(p, expressions::Semicolon::Forbidden);
+ }
+
+ pub(crate) fn pat(p: &mut Parser<'_>) {
+ patterns::pattern_single(p);
+ }
+
+ pub(crate) fn ty(p: &mut Parser<'_>) {
+ types::type_(p);
+ }
+ pub(crate) fn expr(p: &mut Parser<'_>) {
+ let _ = expressions::expr(p);
+ }
+ pub(crate) fn path(p: &mut Parser<'_>) {
+ let _ = paths::type_path(p);
+ }
+ pub(crate) fn item(p: &mut Parser<'_>) {
+ items::item_or_macro(p, true);
+ }
+ // Parse a meta item , which excluded [], e.g : #[ MetaItem ]
+ pub(crate) fn meta_item(p: &mut Parser<'_>) {
+ attributes::meta(p);
+ }
+ }
+
+ pub(crate) mod top {
+ use super::*;
+
+ pub(crate) fn source_file(p: &mut Parser<'_>) {
+ let m = p.start();
+ p.eat(SHEBANG);
+ items::mod_contents(p, false);
+ m.complete(p, SOURCE_FILE);
+ }
+
+ pub(crate) fn macro_stmts(p: &mut Parser<'_>) {
+ let m = p.start();
+
+ while !p.at(EOF) {
+ expressions::stmt(p, expressions::Semicolon::Optional);
+ }
+
+ m.complete(p, MACRO_STMTS);
+ }
+
+ pub(crate) fn macro_items(p: &mut Parser<'_>) {
+ let m = p.start();
+ items::mod_contents(p, false);
+ m.complete(p, MACRO_ITEMS);
+ }
+
+ pub(crate) fn pattern(p: &mut Parser<'_>) {
+ let m = p.start();
+ patterns::pattern_top(p);
+ if p.at(EOF) {
+ m.abandon(p);
+ return;
+ }
+ while !p.at(EOF) {
+ p.bump_any();
+ }
+ m.complete(p, ERROR);
+ }
+
+ pub(crate) fn type_(p: &mut Parser<'_>) {
+ let m = p.start();
+ types::type_(p);
+ if p.at(EOF) {
+ m.abandon(p);
+ return;
+ }
+ while !p.at(EOF) {
+ p.bump_any();
+ }
+ m.complete(p, ERROR);
+ }
+
+ pub(crate) fn expr(p: &mut Parser<'_>) {
+ let m = p.start();
+ expressions::expr(p);
+ if p.at(EOF) {
+ m.abandon(p);
+ return;
+ }
+ while !p.at(EOF) {
+ p.bump_any();
+ }
+ m.complete(p, ERROR);
+ }
+
+ pub(crate) fn meta_item(p: &mut Parser<'_>) {
+ let m = p.start();
+ attributes::meta(p);
+ if p.at(EOF) {
+ m.abandon(p);
+ return;
+ }
+ while !p.at(EOF) {
+ p.bump_any();
+ }
+ m.complete(p, ERROR);
+ }
+ }
+}
+
+pub(crate) fn reparser(
+ node: SyntaxKind,
+ first_child: Option<SyntaxKind>,
+ parent: Option<SyntaxKind>,
+) -> Option<fn(&mut Parser<'_>)> {
+ let res = match node {
+ BLOCK_EXPR => expressions::block_expr,
+ RECORD_FIELD_LIST => items::record_field_list,
+ RECORD_EXPR_FIELD_LIST => items::record_expr_field_list,
+ VARIANT_LIST => items::variant_list,
+ MATCH_ARM_LIST => items::match_arm_list,
+ USE_TREE_LIST => items::use_tree_list,
+ EXTERN_ITEM_LIST => items::extern_item_list,
+ TOKEN_TREE if first_child? == T!['{'] => items::token_tree,
+ ASSOC_ITEM_LIST => match parent? {
+ IMPL | TRAIT => items::assoc_item_list,
+ _ => return None,
+ },
+ ITEM_LIST => items::item_list,
+ _ => return None,
+ };
+ Some(res)
+}
+
+#[derive(Clone, Copy, PartialEq, Eq)]
+enum BlockLike {
+ Block,
+ NotBlock,
+}
+
+impl BlockLike {
+ fn is_block(self) -> bool {
+ self == BlockLike::Block
+ }
+}
+
+fn opt_visibility(p: &mut Parser<'_>, in_tuple_field: bool) -> bool {
+ match p.current() {
+ T![pub] => {
+ let m = p.start();
+ p.bump(T![pub]);
+ if p.at(T!['(']) {
+ match p.nth(1) {
+ // test crate_visibility
+ // pub(crate) struct S;
+ // pub(self) struct S;
+ // pub(super) struct S;
+
+ // test pub_parens_typepath
+ // struct B(pub (super::A));
+ // struct B(pub (crate::A,));
+ T![crate] | T![self] | T![super] | T![ident] if p.nth(2) != T![:] => {
+ // If we are in a tuple struct, then the parens following `pub`
+ // might be an tuple field, not part of the visibility. So in that
+ // case we don't want to consume an identifier.
+
+ // test pub_tuple_field
+ // struct MyStruct(pub (u32, u32));
+ if !(in_tuple_field && matches!(p.nth(1), T![ident])) {
+ p.bump(T!['(']);
+ paths::use_path(p);
+ p.expect(T![')']);
+ }
+ }
+ // test crate_visibility_in
+ // pub(in super::A) struct S;
+ // pub(in crate) struct S;
+ T![in] => {
+ p.bump(T!['(']);
+ p.bump(T![in]);
+ paths::use_path(p);
+ p.expect(T![')']);
+ }
+ _ => (),
+ }
+ }
+ m.complete(p, VISIBILITY);
+ true
+ }
+ // test crate_keyword_vis
+ // crate fn main() { }
+ // struct S { crate field: u32 }
+ // struct T(crate u32);
+ T![crate] => {
+ if p.nth_at(1, T![::]) {
+ // test crate_keyword_path
+ // fn foo() { crate::foo(); }
+ return false;
+ }
+ let m = p.start();
+ p.bump(T![crate]);
+ m.complete(p, VISIBILITY);
+ true
+ }
+ _ => false,
+ }
+}
+
+fn opt_rename(p: &mut Parser<'_>) {
+ if p.at(T![as]) {
+ let m = p.start();
+ p.bump(T![as]);
+ if !p.eat(T![_]) {
+ name(p);
+ }
+ m.complete(p, RENAME);
+ }
+}
+
+fn abi(p: &mut Parser<'_>) {
+ assert!(p.at(T![extern]));
+ let abi = p.start();
+ p.bump(T![extern]);
+ p.eat(STRING);
+ abi.complete(p, ABI);
+}
+
+fn opt_ret_type(p: &mut Parser<'_>) -> bool {
+ if p.at(T![->]) {
+ let m = p.start();
+ p.bump(T![->]);
+ types::type_no_bounds(p);
+ m.complete(p, RET_TYPE);
+ true
+ } else {
+ false
+ }
+}
+
+fn name_r(p: &mut Parser<'_>, recovery: TokenSet) {
+ if p.at(IDENT) {
+ let m = p.start();
+ p.bump(IDENT);
+ m.complete(p, NAME);
+ } else {
+ p.err_recover("expected a name", recovery);
+ }
+}
+
+fn name(p: &mut Parser<'_>) {
+ name_r(p, TokenSet::EMPTY);
+}
+
+fn name_ref(p: &mut Parser<'_>) {
+ if p.at(IDENT) {
+ let m = p.start();
+ p.bump(IDENT);
+ m.complete(p, NAME_REF);
+ } else {
+ p.err_and_bump("expected identifier");
+ }
+}
+
+fn name_ref_or_index(p: &mut Parser<'_>) {
+ assert!(p.at(IDENT) || p.at(INT_NUMBER));
+ let m = p.start();
+ p.bump_any();
+ m.complete(p, NAME_REF);
+}
+
+fn lifetime(p: &mut Parser<'_>) {
+ assert!(p.at(LIFETIME_IDENT));
+ let m = p.start();
+ p.bump(LIFETIME_IDENT);
+ m.complete(p, LIFETIME);
+}
+
+fn error_block(p: &mut Parser<'_>, message: &str) {
+ assert!(p.at(T!['{']));
+ let m = p.start();
+ p.error(message);
+ p.bump(T!['{']);
+ expressions::expr_block_contents(p);
+ p.eat(T!['}']);
+ m.complete(p, ERROR);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs
new file mode 100644
index 000000000..0cf6a16f8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/attributes.rs
@@ -0,0 +1,53 @@
+use super::*;
+
+pub(super) fn inner_attrs(p: &mut Parser<'_>) {
+ while p.at(T![#]) && p.nth(1) == T![!] {
+ attr(p, true);
+ }
+}
+
+pub(super) fn outer_attrs(p: &mut Parser<'_>) {
+ while p.at(T![#]) {
+ attr(p, false);
+ }
+}
+
+fn attr(p: &mut Parser<'_>, inner: bool) {
+ assert!(p.at(T![#]));
+
+ let attr = p.start();
+ p.bump(T![#]);
+
+ if inner {
+ p.bump(T![!]);
+ }
+
+ if p.eat(T!['[']) {
+ meta(p);
+
+ if !p.eat(T![']']) {
+ p.error("expected `]`");
+ }
+ } else {
+ p.error("expected `[`");
+ }
+ attr.complete(p, ATTR);
+}
+
+pub(super) fn meta(p: &mut Parser<'_>) {
+ let meta = p.start();
+ paths::use_path(p);
+
+ match p.current() {
+ T![=] => {
+ p.bump(T![=]);
+ if !expressions::expr(p) {
+ p.error("expected expression");
+ }
+ }
+ T!['('] | T!['['] | T!['{'] => items::token_tree(p),
+ _ => {}
+ }
+
+ meta.complete(p, META);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs
new file mode 100644
index 000000000..e7402104e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs
@@ -0,0 +1,625 @@
+mod atom;
+
+use super::*;
+
+pub(crate) use self::atom::{block_expr, match_arm_list};
+pub(super) use self::atom::{literal, LITERAL_FIRST};
+
+#[derive(PartialEq, Eq)]
+pub(super) enum Semicolon {
+ Required,
+ Optional,
+ Forbidden,
+}
+
+const EXPR_FIRST: TokenSet = LHS_FIRST;
+
+pub(super) fn expr(p: &mut Parser<'_>) -> bool {
+ let r = Restrictions { forbid_structs: false, prefer_stmt: false };
+ expr_bp(p, None, r, 1).is_some()
+}
+
+pub(super) fn expr_stmt(
+ p: &mut Parser<'_>,
+ m: Option<Marker>,
+) -> Option<(CompletedMarker, BlockLike)> {
+ let r = Restrictions { forbid_structs: false, prefer_stmt: true };
+ expr_bp(p, m, r, 1)
+}
+
+fn expr_no_struct(p: &mut Parser<'_>) {
+ let r = Restrictions { forbid_structs: true, prefer_stmt: false };
+ expr_bp(p, None, r, 1);
+}
+
+/// Parses the expression in `let pattern = expression`.
+/// It needs to be parsed with lower precedence than `&&`, so that
+/// `if let true = true && false` is parsed as `if (let true = true) && (true)`
+/// and not `if let true = (true && true)`.
+fn expr_let(p: &mut Parser<'_>) {
+ let r = Restrictions { forbid_structs: true, prefer_stmt: false };
+ expr_bp(p, None, r, 5);
+}
+
+pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) {
+ if p.eat(T![;]) {
+ return;
+ }
+
+ let m = p.start();
+ // test attr_on_expr_stmt
+ // fn foo() {
+ // #[A] foo();
+ // #[B] bar!{}
+ // #[C] #[D] {}
+ // #[D] return ();
+ // }
+ attributes::outer_attrs(p);
+
+ if p.at(T![let]) {
+ let_stmt(p, m, semicolon);
+ return;
+ }
+
+ // test block_items
+ // fn a() { fn b() {} }
+ let m = match items::opt_item(p, m) {
+ Ok(()) => return,
+ Err(m) => m,
+ };
+
+ if let Some((cm, blocklike)) = expr_stmt(p, Some(m)) {
+ if !(p.at(T!['}']) || (semicolon != Semicolon::Required && p.at(EOF))) {
+ // test no_semi_after_block
+ // fn foo() {
+ // if true {}
+ // loop {}
+ // match () {}
+ // while true {}
+ // for _ in () {}
+ // {}
+ // {}
+ // macro_rules! test {
+ // () => {}
+ // }
+ // test!{}
+ // }
+ let m = cm.precede(p);
+ match semicolon {
+ Semicolon::Required => {
+ if blocklike.is_block() {
+ p.eat(T![;]);
+ } else {
+ p.expect(T![;]);
+ }
+ }
+ Semicolon::Optional => {
+ p.eat(T![;]);
+ }
+ Semicolon::Forbidden => (),
+ }
+ m.complete(p, EXPR_STMT);
+ }
+ }
+
+ // test let_stmt
+ // fn f() { let x: i32 = 92; }
+ fn let_stmt(p: &mut Parser<'_>, m: Marker, with_semi: Semicolon) {
+ p.bump(T![let]);
+ patterns::pattern(p);
+ if p.at(T![:]) {
+ // test let_stmt_ascription
+ // fn f() { let x: i32; }
+ types::ascription(p);
+ }
+ if p.eat(T![=]) {
+ // test let_stmt_init
+ // fn f() { let x = 92; }
+ expressions::expr(p);
+ }
+
+ if p.at(T![else]) {
+ // test let_else
+ // fn f() { let Some(x) = opt else { return }; }
+
+ let m = p.start();
+ p.bump(T![else]);
+ block_expr(p);
+ m.complete(p, LET_ELSE);
+ }
+
+ match with_semi {
+ Semicolon::Forbidden => (),
+ Semicolon::Optional => {
+ p.eat(T![;]);
+ }
+ Semicolon::Required => {
+ p.expect(T![;]);
+ }
+ }
+ m.complete(p, LET_STMT);
+ }
+}
+
+pub(super) fn expr_block_contents(p: &mut Parser<'_>) {
+ attributes::inner_attrs(p);
+
+ while !p.at(EOF) && !p.at(T!['}']) {
+ // test nocontentexpr
+ // fn foo(){
+ // ;;;some_expr();;;;{;;;};;;;Ok(())
+ // }
+
+ // test nocontentexpr_after_item
+ // fn simple_function() {
+ // enum LocalEnum {
+ // One,
+ // Two,
+ // };
+ // fn f() {};
+ // struct S {};
+ // }
+ stmt(p, Semicolon::Required);
+ }
+}
+
+#[derive(Clone, Copy)]
+struct Restrictions {
+ forbid_structs: bool,
+ prefer_stmt: bool,
+}
+
+/// Binding powers of operators for a Pratt parser.
+///
+/// See <https://matklad.github.io/2020/04/13/simple-but-powerful-pratt-parsing.html>
+#[rustfmt::skip]
+fn current_op(p: &Parser<'_>) -> (u8, SyntaxKind) {
+ const NOT_AN_OP: (u8, SyntaxKind) = (0, T![@]);
+ match p.current() {
+ T![|] if p.at(T![||]) => (3, T![||]),
+ T![|] if p.at(T![|=]) => (1, T![|=]),
+ T![|] => (6, T![|]),
+ T![>] if p.at(T![>>=]) => (1, T![>>=]),
+ T![>] if p.at(T![>>]) => (9, T![>>]),
+ T![>] if p.at(T![>=]) => (5, T![>=]),
+ T![>] => (5, T![>]),
+ T![=] if p.at(T![=>]) => NOT_AN_OP,
+ T![=] if p.at(T![==]) => (5, T![==]),
+ T![=] => (1, T![=]),
+ T![<] if p.at(T![<=]) => (5, T![<=]),
+ T![<] if p.at(T![<<=]) => (1, T![<<=]),
+ T![<] if p.at(T![<<]) => (9, T![<<]),
+ T![<] => (5, T![<]),
+ T![+] if p.at(T![+=]) => (1, T![+=]),
+ T![+] => (10, T![+]),
+ T![^] if p.at(T![^=]) => (1, T![^=]),
+ T![^] => (7, T![^]),
+ T![%] if p.at(T![%=]) => (1, T![%=]),
+ T![%] => (11, T![%]),
+ T![&] if p.at(T![&=]) => (1, T![&=]),
+ // If you update this, remember to update `expr_let()` too.
+ T![&] if p.at(T![&&]) => (4, T![&&]),
+ T![&] => (8, T![&]),
+ T![/] if p.at(T![/=]) => (1, T![/=]),
+ T![/] => (11, T![/]),
+ T![*] if p.at(T![*=]) => (1, T![*=]),
+ T![*] => (11, T![*]),
+ T![.] if p.at(T![..=]) => (2, T![..=]),
+ T![.] if p.at(T![..]) => (2, T![..]),
+ T![!] if p.at(T![!=]) => (5, T![!=]),
+ T![-] if p.at(T![-=]) => (1, T![-=]),
+ T![-] => (10, T![-]),
+ T![as] => (12, T![as]),
+
+ _ => NOT_AN_OP
+ }
+}
+
+// Parses expression with binding power of at least bp.
+fn expr_bp(
+ p: &mut Parser<'_>,
+ m: Option<Marker>,
+ mut r: Restrictions,
+ bp: u8,
+) -> Option<(CompletedMarker, BlockLike)> {
+ let m = m.unwrap_or_else(|| {
+ let m = p.start();
+ attributes::outer_attrs(p);
+ m
+ });
+ let mut lhs = match lhs(p, r) {
+ Some((lhs, blocklike)) => {
+ let lhs = lhs.extend_to(p, m);
+ if r.prefer_stmt && blocklike.is_block() {
+ // test stmt_bin_expr_ambiguity
+ // fn f() {
+ // let _ = {1} & 2;
+ // {1} &2;
+ // }
+ return Some((lhs, BlockLike::Block));
+ }
+ lhs
+ }
+ None => {
+ m.abandon(p);
+ return None;
+ }
+ };
+
+ loop {
+ let is_range = p.at(T![..]) || p.at(T![..=]);
+ let (op_bp, op) = current_op(p);
+ if op_bp < bp {
+ break;
+ }
+ // test as_precedence
+ // fn f() { let _ = &1 as *const i32; }
+ if p.at(T![as]) {
+ lhs = cast_expr(p, lhs);
+ continue;
+ }
+ let m = lhs.precede(p);
+ p.bump(op);
+
+ // test binop_resets_statementness
+ // fn f() { v = {1}&2; }
+ r = Restrictions { prefer_stmt: false, ..r };
+
+ if is_range {
+ // test postfix_range
+ // fn foo() {
+ // let x = 1..;
+ // match 1.. { _ => () };
+ // match a.b()..S { _ => () };
+ // }
+ let has_trailing_expression =
+ p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(T!['{']));
+ if !has_trailing_expression {
+ // no RHS
+ lhs = m.complete(p, RANGE_EXPR);
+ break;
+ }
+ }
+
+ expr_bp(p, None, Restrictions { prefer_stmt: false, ..r }, op_bp + 1);
+ lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR });
+ }
+ Some((lhs, BlockLike::NotBlock))
+}
+
+const LHS_FIRST: TokenSet =
+ atom::ATOM_EXPR_FIRST.union(TokenSet::new(&[T![&], T![*], T![!], T![.], T![-]]));
+
+fn lhs(p: &mut Parser<'_>, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> {
+ let m;
+ let kind = match p.current() {
+ // test ref_expr
+ // fn foo() {
+ // // reference operator
+ // let _ = &1;
+ // let _ = &mut &f();
+ // let _ = &raw;
+ // let _ = &raw.0;
+ // // raw reference operator
+ // let _ = &raw mut foo;
+ // let _ = &raw const foo;
+ // }
+ T![&] => {
+ m = p.start();
+ p.bump(T![&]);
+ if p.at_contextual_kw(T![raw]) && (p.nth_at(1, T![mut]) || p.nth_at(1, T![const])) {
+ p.bump_remap(T![raw]);
+ p.bump_any();
+ } else {
+ p.eat(T![mut]);
+ }
+ REF_EXPR
+ }
+ // test unary_expr
+ // fn foo() {
+ // **&1;
+ // !!true;
+ // --1;
+ // }
+ T![*] | T![!] | T![-] => {
+ m = p.start();
+ p.bump_any();
+ PREFIX_EXPR
+ }
+ _ => {
+ // test full_range_expr
+ // fn foo() { xs[..]; }
+ for op in [T![..=], T![..]] {
+ if p.at(op) {
+ m = p.start();
+ p.bump(op);
+ if p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(T!['{'])) {
+ expr_bp(p, None, r, 2);
+ }
+ let cm = m.complete(p, RANGE_EXPR);
+ return Some((cm, BlockLike::NotBlock));
+ }
+ }
+
+ // test expression_after_block
+ // fn foo() {
+ // let mut p = F{x: 5};
+ // {p}.x = 10;
+ // }
+ let (lhs, blocklike) = atom::atom_expr(p, r)?;
+ let (cm, block_like) =
+ postfix_expr(p, lhs, blocklike, !(r.prefer_stmt && blocklike.is_block()));
+ return Some((cm, block_like));
+ }
+ };
+ // parse the interior of the unary expression
+ expr_bp(p, None, r, 255);
+ let cm = m.complete(p, kind);
+ Some((cm, BlockLike::NotBlock))
+}
+
+fn postfix_expr(
+ p: &mut Parser<'_>,
+ mut lhs: CompletedMarker,
+ // Calls are disallowed if the type is a block and we prefer statements because the call cannot be disambiguated from a tuple
+ // E.g. `while true {break}();` is parsed as
+ // `while true {break}; ();`
+ mut block_like: BlockLike,
+ mut allow_calls: bool,
+) -> (CompletedMarker, BlockLike) {
+ loop {
+ lhs = match p.current() {
+ // test stmt_postfix_expr_ambiguity
+ // fn foo() {
+ // match () {
+ // _ => {}
+ // () => {}
+ // [] => {}
+ // }
+ // }
+ T!['('] if allow_calls => call_expr(p, lhs),
+ T!['['] if allow_calls => index_expr(p, lhs),
+ T![.] => match postfix_dot_expr(p, lhs) {
+ Ok(it) => it,
+ Err(it) => {
+ lhs = it;
+ break;
+ }
+ },
+ T![?] => try_expr(p, lhs),
+ _ => break,
+ };
+ allow_calls = true;
+ block_like = BlockLike::NotBlock;
+ }
+ return (lhs, block_like);
+
+ fn postfix_dot_expr(
+ p: &mut Parser<'_>,
+ lhs: CompletedMarker,
+ ) -> Result<CompletedMarker, CompletedMarker> {
+ assert!(p.at(T![.]));
+ if p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth_at(2, T![::])) {
+ return Ok(method_call_expr(p, lhs));
+ }
+
+ // test await_expr
+ // fn foo() {
+ // x.await;
+ // x.0.await;
+ // x.0().await?.hello();
+ // }
+ if p.nth(1) == T![await] {
+ let m = lhs.precede(p);
+ p.bump(T![.]);
+ p.bump(T![await]);
+ return Ok(m.complete(p, AWAIT_EXPR));
+ }
+
+ if p.at(T![..=]) || p.at(T![..]) {
+ return Err(lhs);
+ }
+
+ Ok(field_expr(p, lhs))
+ }
+}
+
+// test call_expr
+// fn foo() {
+// let _ = f();
+// let _ = f()(1)(1, 2,);
+// let _ = f(<Foo>::func());
+// f(<Foo as Trait>::func());
+// }
+fn call_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker {
+ assert!(p.at(T!['(']));
+ let m = lhs.precede(p);
+ arg_list(p);
+ m.complete(p, CALL_EXPR)
+}
+
+// test index_expr
+// fn foo() {
+// x[1][2];
+// }
+fn index_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker {
+ assert!(p.at(T!['[']));
+ let m = lhs.precede(p);
+ p.bump(T!['[']);
+ expr(p);
+ p.expect(T![']']);
+ m.complete(p, INDEX_EXPR)
+}
+
+// test method_call_expr
+// fn foo() {
+// x.foo();
+// y.bar::<T>(1, 2,);
+// }
+fn method_call_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker {
+ assert!(p.at(T![.]) && p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth_at(2, T![::])));
+ let m = lhs.precede(p);
+ p.bump_any();
+ name_ref(p);
+ generic_args::opt_generic_arg_list(p, true);
+ if p.at(T!['(']) {
+ arg_list(p);
+ }
+ m.complete(p, METHOD_CALL_EXPR)
+}
+
+// test field_expr
+// fn foo() {
+// x.foo;
+// x.0.bar;
+// x.0();
+// }
+fn field_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker {
+ assert!(p.at(T![.]));
+ let m = lhs.precede(p);
+ p.bump(T![.]);
+ if p.at(IDENT) || p.at(INT_NUMBER) {
+ name_ref_or_index(p);
+ } else if p.at(FLOAT_NUMBER) {
+ // FIXME: How to recover and instead parse INT + T![.]?
+ p.bump_any();
+ } else {
+ p.error("expected field name or number");
+ }
+ m.complete(p, FIELD_EXPR)
+}
+
+// test try_expr
+// fn foo() {
+// x?;
+// }
+fn try_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker {
+ assert!(p.at(T![?]));
+ let m = lhs.precede(p);
+ p.bump(T![?]);
+ m.complete(p, TRY_EXPR)
+}
+
+// test cast_expr
+// fn foo() {
+// 82 as i32;
+// 81 as i8 + 1;
+// 79 as i16 - 1;
+// 0x36 as u8 <= 0x37;
+// }
+fn cast_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker {
+ assert!(p.at(T![as]));
+ let m = lhs.precede(p);
+ p.bump(T![as]);
+ // Use type_no_bounds(), because cast expressions are not
+ // allowed to have bounds.
+ types::type_no_bounds(p);
+ m.complete(p, CAST_EXPR)
+}
+
+fn arg_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['(']));
+ let m = p.start();
+ p.bump(T!['(']);
+ while !p.at(T![')']) && !p.at(EOF) {
+ // test arg_with_attr
+ // fn main() {
+ // foo(#[attr] 92)
+ // }
+ if !expr(p) {
+ break;
+ }
+ if !p.at(T![')']) && !p.expect(T![,]) {
+ break;
+ }
+ }
+ p.eat(T![')']);
+ m.complete(p, ARG_LIST);
+}
+
+// test path_expr
+// fn foo() {
+// let _ = a;
+// let _ = a::b;
+// let _ = ::a::<b>;
+// let _ = format!();
+// }
+fn path_expr(p: &mut Parser<'_>, r: Restrictions) -> (CompletedMarker, BlockLike) {
+ assert!(paths::is_path_start(p));
+ let m = p.start();
+ paths::expr_path(p);
+ match p.current() {
+ T!['{'] if !r.forbid_structs => {
+ record_expr_field_list(p);
+ (m.complete(p, RECORD_EXPR), BlockLike::NotBlock)
+ }
+ T![!] if !p.at(T![!=]) => {
+ let block_like = items::macro_call_after_excl(p);
+ (m.complete(p, MACRO_CALL).precede(p).complete(p, MACRO_EXPR), block_like)
+ }
+ _ => (m.complete(p, PATH_EXPR), BlockLike::NotBlock),
+ }
+}
+
+// test record_lit
+// fn foo() {
+// S {};
+// S { x, y: 32, };
+// S { x, y: 32, ..Default::default() };
+// TupleStruct { 0: 1 };
+// }
+pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['{']));
+ let m = p.start();
+ p.bump(T!['{']);
+ while !p.at(EOF) && !p.at(T!['}']) {
+ let m = p.start();
+ // test record_literal_field_with_attr
+ // fn main() {
+ // S { #[cfg(test)] field: 1 }
+ // }
+ attributes::outer_attrs(p);
+
+ match p.current() {
+ IDENT | INT_NUMBER => {
+ // test_err record_literal_before_ellipsis_recovery
+ // fn main() {
+ // S { field ..S::default() }
+ // }
+ if p.nth_at(1, T![:]) || p.nth_at(1, T![..]) {
+ name_ref_or_index(p);
+ p.expect(T![:]);
+ }
+ expr(p);
+ m.complete(p, RECORD_EXPR_FIELD);
+ }
+ T![.] if p.at(T![..]) => {
+ m.abandon(p);
+ p.bump(T![..]);
+
+ // test destructuring_assignment_struct_rest_pattern
+ // fn foo() {
+ // S { .. } = S {};
+ // }
+
+ // We permit `.. }` on the left-hand side of a destructuring assignment.
+ if !p.at(T!['}']) {
+ expr(p);
+ }
+ }
+ T!['{'] => {
+ error_block(p, "expected a field");
+ m.abandon(p);
+ }
+ _ => {
+ p.err_and_bump("expected identifier");
+ m.abandon(p);
+ }
+ }
+ if !p.at(T!['}']) {
+ p.expect(T![,]);
+ }
+ }
+ p.expect(T!['}']);
+ m.complete(p, RECORD_EXPR_FIELD_LIST);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs
new file mode 100644
index 000000000..99f42a266
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs
@@ -0,0 +1,643 @@
+use super::*;
+
+// test expr_literals
+// fn foo() {
+// let _ = true;
+// let _ = false;
+// let _ = 1;
+// let _ = 2.0;
+// let _ = b'a';
+// let _ = 'b';
+// let _ = "c";
+// let _ = r"d";
+// let _ = b"e";
+// let _ = br"f";
+// }
+pub(crate) const LITERAL_FIRST: TokenSet = TokenSet::new(&[
+ T![true],
+ T![false],
+ INT_NUMBER,
+ FLOAT_NUMBER,
+ BYTE,
+ CHAR,
+ STRING,
+ BYTE_STRING,
+]);
+
+pub(crate) fn literal(p: &mut Parser<'_>) -> Option<CompletedMarker> {
+ if !p.at_ts(LITERAL_FIRST) {
+ return None;
+ }
+ let m = p.start();
+ p.bump_any();
+ Some(m.complete(p, LITERAL))
+}
+
+// E.g. for after the break in `if break {}`, this should not match
+pub(super) const ATOM_EXPR_FIRST: TokenSet =
+ LITERAL_FIRST.union(paths::PATH_FIRST).union(TokenSet::new(&[
+ T!['('],
+ T!['{'],
+ T!['['],
+ T![|],
+ T![move],
+ T![box],
+ T![if],
+ T![while],
+ T![match],
+ T![unsafe],
+ T![return],
+ T![yield],
+ T![break],
+ T![continue],
+ T![async],
+ T![try],
+ T![const],
+ T![loop],
+ T![for],
+ LIFETIME_IDENT,
+ ]));
+
+const EXPR_RECOVERY_SET: TokenSet = TokenSet::new(&[T![let]]);
+
+pub(super) fn atom_expr(
+ p: &mut Parser<'_>,
+ r: Restrictions,
+) -> Option<(CompletedMarker, BlockLike)> {
+ if let Some(m) = literal(p) {
+ return Some((m, BlockLike::NotBlock));
+ }
+ if paths::is_path_start(p) {
+ return Some(path_expr(p, r));
+ }
+ let la = p.nth(1);
+ let done = match p.current() {
+ T!['('] => tuple_expr(p),
+ T!['['] => array_expr(p),
+ T![if] => if_expr(p),
+ T![let] => let_expr(p),
+ T![_] => {
+ // test destructuring_assignment_wildcard_pat
+ // fn foo() {
+ // _ = 1;
+ // Some(_) = None;
+ // }
+ let m = p.start();
+ p.bump(T![_]);
+ m.complete(p, UNDERSCORE_EXPR)
+ }
+ T![loop] => loop_expr(p, None),
+ T![box] => box_expr(p, None),
+ T![while] => while_expr(p, None),
+ T![try] => try_block_expr(p, None),
+ T![match] => match_expr(p),
+ T![return] => return_expr(p),
+ T![yield] => yield_expr(p),
+ T![continue] => continue_expr(p),
+ T![break] => break_expr(p, r),
+
+ LIFETIME_IDENT if la == T![:] => {
+ let m = p.start();
+ label(p);
+ match p.current() {
+ T![loop] => loop_expr(p, Some(m)),
+ T![for] => for_expr(p, Some(m)),
+ T![while] => while_expr(p, Some(m)),
+ // test labeled_block
+ // fn f() { 'label: {}; }
+ T!['{'] => {
+ stmt_list(p);
+ m.complete(p, BLOCK_EXPR)
+ }
+ _ => {
+ // test_err misplaced_label_err
+ // fn main() {
+ // 'loop: impl
+ // }
+ p.error("expected a loop");
+ m.complete(p, ERROR);
+ return None;
+ }
+ }
+ }
+ // test effect_blocks
+ // fn f() { unsafe { } }
+ // fn f() { const { } }
+ // fn f() { async { } }
+ // fn f() { async move { } }
+ T![const] | T![unsafe] | T![async] if la == T!['{'] => {
+ let m = p.start();
+ p.bump_any();
+ stmt_list(p);
+ m.complete(p, BLOCK_EXPR)
+ }
+ T![async] if la == T![move] && p.nth(2) == T!['{'] => {
+ let m = p.start();
+ p.bump(T![async]);
+ p.eat(T![move]);
+ stmt_list(p);
+ m.complete(p, BLOCK_EXPR)
+ }
+ T!['{'] => {
+ // test for_range_from
+ // fn foo() {
+ // for x in 0 .. {
+ // break;
+ // }
+ // }
+ let m = p.start();
+ stmt_list(p);
+ m.complete(p, BLOCK_EXPR)
+ }
+
+ T![static] | T![async] | T![move] | T![|] => closure_expr(p),
+ T![for] if la == T![<] => closure_expr(p),
+ T![for] => for_expr(p, None),
+
+ _ => {
+ p.err_recover("expected expression", EXPR_RECOVERY_SET);
+ return None;
+ }
+ };
+ let blocklike = match done.kind() {
+ IF_EXPR | WHILE_EXPR | FOR_EXPR | LOOP_EXPR | MATCH_EXPR | BLOCK_EXPR => BlockLike::Block,
+ _ => BlockLike::NotBlock,
+ };
+ Some((done, blocklike))
+}
+
+// test tuple_expr
+// fn foo() {
+// ();
+// (1);
+// (1,);
+// }
+fn tuple_expr(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T!['(']));
+ let m = p.start();
+ p.expect(T!['(']);
+
+ let mut saw_comma = false;
+ let mut saw_expr = false;
+ while !p.at(EOF) && !p.at(T![')']) {
+ saw_expr = true;
+
+ // test tuple_attrs
+ // const A: (i64, i64) = (1, #[cfg(test)] 2);
+ if !expr(p) {
+ break;
+ }
+
+ if !p.at(T![')']) {
+ saw_comma = true;
+ p.expect(T![,]);
+ }
+ }
+ p.expect(T![')']);
+ m.complete(p, if saw_expr && !saw_comma { PAREN_EXPR } else { TUPLE_EXPR })
+}
+
+// test array_expr
+// fn foo() {
+// [];
+// [1];
+// [1, 2,];
+// [1; 2];
+// }
+fn array_expr(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T!['[']));
+ let m = p.start();
+
+ let mut n_exprs = 0u32;
+ let mut has_semi = false;
+
+ p.bump(T!['[']);
+ while !p.at(EOF) && !p.at(T![']']) {
+ n_exprs += 1;
+
+ // test array_attrs
+ // const A: &[i64] = &[1, #[cfg(test)] 2];
+ if !expr(p) {
+ break;
+ }
+
+ if n_exprs == 1 && p.eat(T![;]) {
+ has_semi = true;
+ continue;
+ }
+
+ if has_semi || !p.at(T![']']) && !p.expect(T![,]) {
+ break;
+ }
+ }
+ p.expect(T![']']);
+
+ m.complete(p, ARRAY_EXPR)
+}
+
+// test lambda_expr
+// fn foo() {
+// || ();
+// || -> i32 { 92 };
+// |x| x;
+// move |x: i32,| x;
+// async || {};
+// move || {};
+// async move || {};
+// static || {};
+// static move || {};
+// static async || {};
+// static async move || {};
+// for<'a> || {};
+// for<'a> move || {};
+// }
+fn closure_expr(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(match p.current() {
+ T![static] | T![async] | T![move] | T![|] => true,
+ T![for] => p.nth(1) == T![<],
+ _ => false,
+ });
+
+ let m = p.start();
+
+ if p.at(T![for]) {
+ types::for_binder(p);
+ }
+
+ p.eat(T![static]);
+ p.eat(T![async]);
+ p.eat(T![move]);
+
+ if !p.at(T![|]) {
+ p.error("expected `|`");
+ return m.complete(p, CLOSURE_EXPR);
+ }
+ params::param_list_closure(p);
+ if opt_ret_type(p) {
+ // test lambda_ret_block
+ // fn main() { || -> i32 { 92 }(); }
+ block_expr(p);
+ } else if p.at_ts(EXPR_FIRST) {
+ expr(p);
+ } else {
+ p.error("expected expression");
+ }
+ m.complete(p, CLOSURE_EXPR)
+}
+
+// test if_expr
+// fn foo() {
+// if true {};
+// if true {} else {};
+// if true {} else if false {} else {};
+// if S {};
+// if { true } { } else { };
+// }
+fn if_expr(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![if]));
+ let m = p.start();
+ p.bump(T![if]);
+ expr_no_struct(p);
+ block_expr(p);
+ if p.at(T![else]) {
+ p.bump(T![else]);
+ if p.at(T![if]) {
+ if_expr(p);
+ } else {
+ block_expr(p);
+ }
+ }
+ m.complete(p, IF_EXPR)
+}
+
+// test label
+// fn foo() {
+// 'a: loop {}
+// 'b: while true {}
+// 'c: for x in () {}
+// }
+fn label(p: &mut Parser<'_>) {
+ assert!(p.at(LIFETIME_IDENT) && p.nth(1) == T![:]);
+ let m = p.start();
+ lifetime(p);
+ p.bump_any();
+ m.complete(p, LABEL);
+}
+
+// test loop_expr
+// fn foo() {
+// loop {};
+// }
+fn loop_expr(p: &mut Parser<'_>, m: Option<Marker>) -> CompletedMarker {
+ assert!(p.at(T![loop]));
+ let m = m.unwrap_or_else(|| p.start());
+ p.bump(T![loop]);
+ block_expr(p);
+ m.complete(p, LOOP_EXPR)
+}
+
+// test while_expr
+// fn foo() {
+// while true {};
+// while let Some(x) = it.next() {};
+// while { true } {};
+// }
+fn while_expr(p: &mut Parser<'_>, m: Option<Marker>) -> CompletedMarker {
+ assert!(p.at(T![while]));
+ let m = m.unwrap_or_else(|| p.start());
+ p.bump(T![while]);
+ expr_no_struct(p);
+ block_expr(p);
+ m.complete(p, WHILE_EXPR)
+}
+
+// test for_expr
+// fn foo() {
+// for x in [] {};
+// }
+fn for_expr(p: &mut Parser<'_>, m: Option<Marker>) -> CompletedMarker {
+ assert!(p.at(T![for]));
+ let m = m.unwrap_or_else(|| p.start());
+ p.bump(T![for]);
+ patterns::pattern(p);
+ p.expect(T![in]);
+ expr_no_struct(p);
+ block_expr(p);
+ m.complete(p, FOR_EXPR)
+}
+
+// test let_expr
+// fn foo() {
+// if let Some(_) = None && true {}
+// while 1 == 5 && (let None = None) {}
+// }
+fn let_expr(p: &mut Parser<'_>) -> CompletedMarker {
+ let m = p.start();
+ p.bump(T![let]);
+ patterns::pattern_top(p);
+ p.expect(T![=]);
+ expr_let(p);
+ m.complete(p, LET_EXPR)
+}
+
+// test match_expr
+// fn foo() {
+// match () { };
+// match S {};
+// match { } { _ => () };
+// match { S {} } {};
+// }
+fn match_expr(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![match]));
+ let m = p.start();
+ p.bump(T![match]);
+ expr_no_struct(p);
+ if p.at(T!['{']) {
+ match_arm_list(p);
+ } else {
+ p.error("expected `{`");
+ }
+ m.complete(p, MATCH_EXPR)
+}
+
+pub(crate) fn match_arm_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['{']));
+ let m = p.start();
+ p.eat(T!['{']);
+
+ // test match_arms_inner_attribute
+ // fn foo() {
+ // match () {
+ // #![doc("Inner attribute")]
+ // #![doc("Can be")]
+ // #![doc("Stacked")]
+ // _ => (),
+ // }
+ // }
+ attributes::inner_attrs(p);
+
+ while !p.at(EOF) && !p.at(T!['}']) {
+ if p.at(T!['{']) {
+ error_block(p, "expected match arm");
+ continue;
+ }
+ match_arm(p);
+ }
+ p.expect(T!['}']);
+ m.complete(p, MATCH_ARM_LIST);
+}
+
+// test match_arm
+// fn foo() {
+// match () {
+// _ => (),
+// _ if Test > Test{field: 0} => (),
+// X | Y if Z => (),
+// | X | Y if Z => (),
+// | X => (),
+// };
+// }
+fn match_arm(p: &mut Parser<'_>) {
+ let m = p.start();
+ // test match_arms_outer_attributes
+ // fn foo() {
+ // match () {
+ // #[cfg(feature = "some")]
+ // _ => (),
+ // #[cfg(feature = "other")]
+ // _ => (),
+ // #[cfg(feature = "many")]
+ // #[cfg(feature = "attributes")]
+ // #[cfg(feature = "before")]
+ // _ => (),
+ // }
+ // }
+ attributes::outer_attrs(p);
+
+ patterns::pattern_top_r(p, TokenSet::EMPTY);
+ if p.at(T![if]) {
+ match_guard(p);
+ }
+ p.expect(T![=>]);
+ let blocklike = match expr_stmt(p, None) {
+ Some((_, blocklike)) => blocklike,
+ None => BlockLike::NotBlock,
+ };
+
+ // test match_arms_commas
+ // fn foo() {
+ // match () {
+ // _ => (),
+ // _ => {}
+ // _ => ()
+ // }
+ // }
+ if !p.eat(T![,]) && !blocklike.is_block() && !p.at(T!['}']) {
+ p.error("expected `,`");
+ }
+ m.complete(p, MATCH_ARM);
+}
+
+// test match_guard
+// fn foo() {
+// match () {
+// _ if foo => (),
+// _ if let foo = bar => (),
+// }
+// }
+fn match_guard(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![if]));
+ let m = p.start();
+ p.bump(T![if]);
+ expr(p);
+ m.complete(p, MATCH_GUARD)
+}
+
+// test block
+// fn a() {}
+// fn b() { let _ = 1; }
+// fn c() { 1; 2; }
+// fn d() { 1; 2 }
+pub(crate) fn block_expr(p: &mut Parser<'_>) {
+ if !p.at(T!['{']) {
+ p.error("expected a block");
+ return;
+ }
+ let m = p.start();
+ stmt_list(p);
+ m.complete(p, BLOCK_EXPR);
+}
+
+fn stmt_list(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T!['{']));
+ let m = p.start();
+ p.bump(T!['{']);
+ expr_block_contents(p);
+ p.expect(T!['}']);
+ m.complete(p, STMT_LIST)
+}
+
+// test return_expr
+// fn foo() {
+// return;
+// return 92;
+// }
+fn return_expr(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![return]));
+ let m = p.start();
+ p.bump(T![return]);
+ if p.at_ts(EXPR_FIRST) {
+ expr(p);
+ }
+ m.complete(p, RETURN_EXPR)
+}
+// test yield_expr
+// fn foo() {
+// yield;
+// yield 1;
+// }
+fn yield_expr(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![yield]));
+ let m = p.start();
+ p.bump(T![yield]);
+ if p.at_ts(EXPR_FIRST) {
+ expr(p);
+ }
+ m.complete(p, YIELD_EXPR)
+}
+
+// test continue_expr
+// fn foo() {
+// loop {
+// continue;
+// continue 'l;
+// }
+// }
+fn continue_expr(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![continue]));
+ let m = p.start();
+ p.bump(T![continue]);
+ if p.at(LIFETIME_IDENT) {
+ lifetime(p);
+ }
+ m.complete(p, CONTINUE_EXPR)
+}
+
+// test break_expr
+// fn foo() {
+// loop {
+// break;
+// break 'l;
+// break 92;
+// break 'l 92;
+// }
+// }
+fn break_expr(p: &mut Parser<'_>, r: Restrictions) -> CompletedMarker {
+ assert!(p.at(T![break]));
+ let m = p.start();
+ p.bump(T![break]);
+ if p.at(LIFETIME_IDENT) {
+ lifetime(p);
+ }
+ // test break_ambiguity
+ // fn foo(){
+ // if break {}
+ // while break {}
+ // for i in break {}
+ // match break {}
+ // }
+ if p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(T!['{'])) {
+ expr(p);
+ }
+ m.complete(p, BREAK_EXPR)
+}
+
+// test try_block_expr
+// fn foo() {
+// let _ = try {};
+// }
+fn try_block_expr(p: &mut Parser<'_>, m: Option<Marker>) -> CompletedMarker {
+ assert!(p.at(T![try]));
+ let m = m.unwrap_or_else(|| p.start());
+ // Special-case `try!` as macro.
+ // This is a hack until we do proper edition support
+ if p.nth_at(1, T![!]) {
+ // test try_macro_fallback
+ // fn foo() { try!(Ok(())); }
+ let macro_call = p.start();
+ let path = p.start();
+ let path_segment = p.start();
+ let name_ref = p.start();
+ p.bump_remap(IDENT);
+ name_ref.complete(p, NAME_REF);
+ path_segment.complete(p, PATH_SEGMENT);
+ path.complete(p, PATH);
+ let _block_like = items::macro_call_after_excl(p);
+ macro_call.complete(p, MACRO_CALL);
+ return m.complete(p, MACRO_EXPR);
+ }
+
+ p.bump(T![try]);
+ if p.at(T!['{']) {
+ stmt_list(p);
+ } else {
+ p.error("expected a block");
+ }
+ m.complete(p, BLOCK_EXPR)
+}
+
+// test box_expr
+// fn foo() {
+// let x = box 1i32;
+// let y = (box 1i32, box 2i32);
+// let z = Foo(box 1i32, box 2i32);
+// }
+fn box_expr(p: &mut Parser<'_>, m: Option<Marker>) -> CompletedMarker {
+ assert!(p.at(T![box]));
+ let m = m.unwrap_or_else(|| p.start());
+ p.bump(T![box]);
+ if p.at_ts(EXPR_FIRST) {
+ expr(p);
+ }
+ m.complete(p, BOX_EXPR)
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs
new file mode 100644
index 000000000..c438943a0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs
@@ -0,0 +1,131 @@
+use super::*;
+
+pub(super) fn opt_generic_arg_list(p: &mut Parser<'_>, colon_colon_required: bool) {
+ let m;
+ if p.at(T![::]) && p.nth(2) == T![<] {
+ m = p.start();
+ p.bump(T![::]);
+ p.bump(T![<]);
+ } else if !colon_colon_required && p.at(T![<]) && p.nth(1) != T![=] {
+ m = p.start();
+ p.bump(T![<]);
+ } else {
+ return;
+ }
+
+ while !p.at(EOF) && !p.at(T![>]) {
+ generic_arg(p);
+ if !p.at(T![>]) && !p.expect(T![,]) {
+ break;
+ }
+ }
+ p.expect(T![>]);
+ m.complete(p, GENERIC_ARG_LIST);
+}
+
+// test generic_arg
+// type T = S<i32>;
+fn generic_arg(p: &mut Parser<'_>) {
+ match p.current() {
+ LIFETIME_IDENT => lifetime_arg(p),
+ T!['{'] | T![true] | T![false] | T![-] => const_arg(p),
+ k if k.is_literal() => const_arg(p),
+ // test associated_type_bounds
+ // fn print_all<T: Iterator<Item, Item::Item, Item::<true>, Item: Display, Item<'a> = Item>>(printables: T) {}
+
+ // test macro_inside_generic_arg
+ // type A = Foo<syn::Token![_]>;
+ IDENT if [T![<], T![=], T![:]].contains(&p.nth(1)) && !p.nth_at(1, T![::]) => {
+ let m = p.start();
+ name_ref(p);
+ opt_generic_arg_list(p, false);
+ match p.current() {
+ T![=] => {
+ p.bump_any();
+ if types::TYPE_FIRST.contains(p.current()) {
+ // test assoc_type_eq
+ // type T = StreamingIterator<Item<'a> = &'a T>;
+ types::type_(p);
+ } else {
+ // test assoc_const_eq
+ // fn foo<F: Foo<N=3>>() {}
+ // const TEST: usize = 3;
+ // fn bar<F: Foo<N={TEST}>>() {}
+ const_arg(p);
+ }
+ m.complete(p, ASSOC_TYPE_ARG);
+ }
+ // test assoc_type_bound
+ // type T = StreamingIterator<Item<'a>: Clone>;
+ T![:] if !p.at(T![::]) => {
+ generic_params::bounds(p);
+ m.complete(p, ASSOC_TYPE_ARG);
+ }
+ _ => {
+ let m = m.complete(p, PATH_SEGMENT).precede(p).complete(p, PATH);
+ let m = paths::type_path_for_qualifier(p, m);
+ m.precede(p).complete(p, PATH_TYPE).precede(p).complete(p, TYPE_ARG);
+ }
+ }
+ }
+ _ => type_arg(p),
+ }
+}
+
+// test lifetime_arg
+// type T = S<'static>;
+fn lifetime_arg(p: &mut Parser<'_>) {
+ let m = p.start();
+ lifetime(p);
+ m.complete(p, LIFETIME_ARG);
+}
+
+pub(super) fn const_arg_expr(p: &mut Parser<'_>) {
+ // The tests in here are really for `const_arg`, which wraps the content
+ // CONST_ARG.
+ match p.current() {
+ // test const_arg_block
+ // type T = S<{90 + 2}>;
+ T!['{'] => {
+ expressions::block_expr(p);
+ }
+ // test const_arg_literal
+ // type T = S<"hello", 0xdeadbeef>;
+ k if k.is_literal() => {
+ expressions::literal(p);
+ }
+ // test const_arg_bool_literal
+ // type T = S<true>;
+ T![true] | T![false] => {
+ expressions::literal(p);
+ }
+ // test const_arg_negative_number
+ // type T = S<-92>;
+ T![-] => {
+ let lm = p.start();
+ p.bump(T![-]);
+ expressions::literal(p);
+ lm.complete(p, PREFIX_EXPR);
+ }
+ _ => {
+ // This shouldn't be hit by `const_arg`
+ let lm = p.start();
+ paths::use_path(p);
+ lm.complete(p, PATH_EXPR);
+ }
+ }
+}
+
+// test const_arg
+// type T = S<92>;
+pub(super) fn const_arg(p: &mut Parser<'_>) {
+ let m = p.start();
+ const_arg_expr(p);
+ m.complete(p, CONST_ARG);
+}
+
+fn type_arg(p: &mut Parser<'_>) {
+ let m = p.start();
+ types::type_(p);
+ m.complete(p, TYPE_ARG);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs
new file mode 100644
index 000000000..6db28ef13
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs
@@ -0,0 +1,242 @@
+use super::*;
+
+pub(super) fn opt_generic_param_list(p: &mut Parser<'_>) {
+ if p.at(T![<]) {
+ generic_param_list(p);
+ }
+}
+
+// test generic_param_list
+// fn f<T: Clone>() {}
+fn generic_param_list(p: &mut Parser<'_>) {
+ assert!(p.at(T![<]));
+ let m = p.start();
+ p.bump(T![<]);
+
+ while !p.at(EOF) && !p.at(T![>]) {
+ generic_param(p);
+ if !p.at(T![>]) && !p.expect(T![,]) {
+ break;
+ }
+ }
+ p.expect(T![>]);
+ m.complete(p, GENERIC_PARAM_LIST);
+}
+
+fn generic_param(p: &mut Parser<'_>) {
+ let m = p.start();
+ // test generic_param_attribute
+ // fn foo<#[lt_attr] 'a, #[t_attr] T>() {}
+ attributes::outer_attrs(p);
+ match p.current() {
+ LIFETIME_IDENT => lifetime_param(p, m),
+ IDENT => type_param(p, m),
+ T![const] => const_param(p, m),
+ _ => {
+ m.abandon(p);
+ p.err_and_bump("expected type parameter");
+ }
+ }
+}
+
+// test lifetime_param
+// fn f<'a: 'b>() {}
+fn lifetime_param(p: &mut Parser<'_>, m: Marker) {
+ assert!(p.at(LIFETIME_IDENT));
+ lifetime(p);
+ if p.at(T![:]) {
+ lifetime_bounds(p);
+ }
+ m.complete(p, LIFETIME_PARAM);
+}
+
+// test type_param
+// fn f<T: Clone>() {}
+fn type_param(p: &mut Parser<'_>, m: Marker) {
+ assert!(p.at(IDENT));
+ name(p);
+ if p.at(T![:]) {
+ bounds(p);
+ }
+ if p.at(T![=]) {
+ // test type_param_default
+ // struct S<T = i32>;
+ p.bump(T![=]);
+ types::type_(p);
+ }
+ m.complete(p, TYPE_PARAM);
+}
+
+// test const_param
+// struct S<const N: u32>;
+fn const_param(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![const]);
+ name(p);
+ if p.at(T![:]) {
+ types::ascription(p);
+ } else {
+ p.error("missing type for const parameter");
+ }
+
+ if p.at(T![=]) {
+ // test const_param_default_literal
+ // struct A<const N: i32 = -1>;
+ p.bump(T![=]);
+
+ // test const_param_default_expression
+ // struct A<const N: i32 = { 1 }>;
+
+ // test const_param_default_path
+ // struct A<const N: i32 = i32::MAX>;
+ generic_args::const_arg_expr(p);
+ }
+
+ m.complete(p, CONST_PARAM);
+}
+
+fn lifetime_bounds(p: &mut Parser<'_>) {
+ assert!(p.at(T![:]));
+ p.bump(T![:]);
+ while p.at(LIFETIME_IDENT) {
+ lifetime(p);
+ if !p.eat(T![+]) {
+ break;
+ }
+ }
+}
+
+// test type_param_bounds
+// struct S<T: 'a + ?Sized + (Copy) + ~const Drop>;
+pub(super) fn bounds(p: &mut Parser<'_>) {
+ assert!(p.at(T![:]));
+ p.bump(T![:]);
+ bounds_without_colon(p);
+}
+
+pub(super) fn bounds_without_colon(p: &mut Parser<'_>) {
+ let m = p.start();
+ bounds_without_colon_m(p, m);
+}
+
+pub(super) fn bounds_without_colon_m(p: &mut Parser<'_>, marker: Marker) -> CompletedMarker {
+ while type_bound(p) {
+ if !p.eat(T![+]) {
+ break;
+ }
+ }
+ marker.complete(p, TYPE_BOUND_LIST)
+}
+
+fn type_bound(p: &mut Parser<'_>) -> bool {
+ let m = p.start();
+ let has_paren = p.eat(T!['(']);
+ match p.current() {
+ LIFETIME_IDENT => lifetime(p),
+ T![for] => types::for_type(p, false),
+ T![?] if p.nth_at(1, T![for]) => {
+ // test question_for_type_trait_bound
+ // fn f<T>() where T: ?for<> Sized {}
+ p.bump_any();
+ types::for_type(p, false)
+ }
+ current => {
+ match current {
+ T![?] => p.bump_any(),
+ T![~] => {
+ p.bump_any();
+ p.expect(T![const]);
+ }
+ _ => (),
+ }
+ if paths::is_use_path_start(p) {
+ types::path_type_(p, false);
+ } else {
+ m.abandon(p);
+ return false;
+ }
+ }
+ }
+ if has_paren {
+ p.expect(T![')']);
+ }
+ m.complete(p, TYPE_BOUND);
+
+ true
+}
+
+// test where_clause
+// fn foo()
+// where
+// 'a: 'b + 'c,
+// T: Clone + Copy + 'static,
+// Iterator::Item: 'a,
+// <T as Iterator>::Item: 'a
+// {}
+pub(super) fn opt_where_clause(p: &mut Parser<'_>) {
+ if !p.at(T![where]) {
+ return;
+ }
+ let m = p.start();
+ p.bump(T![where]);
+
+ while is_where_predicate(p) {
+ where_predicate(p);
+
+ let comma = p.eat(T![,]);
+
+ match p.current() {
+ T!['{'] | T![;] | T![=] => break,
+ _ => (),
+ }
+
+ if !comma {
+ p.error("expected comma");
+ }
+ }
+
+ m.complete(p, WHERE_CLAUSE);
+
+ fn is_where_predicate(p: &mut Parser<'_>) -> bool {
+ match p.current() {
+ LIFETIME_IDENT => true,
+ T![impl] => false,
+ token => types::TYPE_FIRST.contains(token),
+ }
+ }
+}
+
+fn where_predicate(p: &mut Parser<'_>) {
+ let m = p.start();
+ match p.current() {
+ LIFETIME_IDENT => {
+ lifetime(p);
+ if p.at(T![:]) {
+ bounds(p);
+ } else {
+ p.error("expected colon");
+ }
+ }
+ T![impl] => {
+ p.error("expected lifetime or type");
+ }
+ _ => {
+ if p.at(T![for]) {
+ // test where_pred_for
+ // fn for_trait<F>()
+ // where
+ // for<'a> F: Fn(&'a str)
+ // { }
+ types::for_binder(p);
+ }
+
+ types::type_(p);
+
+ if p.at(T![:]) {
+ bounds(p);
+ } else {
+ p.error("expected colon");
+ }
+ }
+ }
+ m.complete(p, WHERE_PRED);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
new file mode 100644
index 000000000..5e0951bf8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
@@ -0,0 +1,465 @@
+mod consts;
+mod adt;
+mod traits;
+mod use_item;
+
+pub(crate) use self::{
+ adt::{record_field_list, variant_list},
+ expressions::{match_arm_list, record_expr_field_list},
+ traits::assoc_item_list,
+ use_item::use_tree_list,
+};
+use super::*;
+
+// test mod_contents
+// fn foo() {}
+// macro_rules! foo {}
+// foo::bar!();
+// super::baz! {}
+// struct S;
+pub(super) fn mod_contents(p: &mut Parser<'_>, stop_on_r_curly: bool) {
+ attributes::inner_attrs(p);
+ while !p.at(EOF) && !(p.at(T!['}']) && stop_on_r_curly) {
+ item_or_macro(p, stop_on_r_curly);
+ }
+}
+
+pub(super) const ITEM_RECOVERY_SET: TokenSet = TokenSet::new(&[
+ T![fn],
+ T![struct],
+ T![enum],
+ T![impl],
+ T![trait],
+ T![const],
+ T![static],
+ T![let],
+ T![mod],
+ T![pub],
+ T![crate],
+ T![use],
+ T![macro],
+ T![;],
+]);
+
+pub(super) fn item_or_macro(p: &mut Parser<'_>, stop_on_r_curly: bool) {
+ let m = p.start();
+ attributes::outer_attrs(p);
+
+ let m = match opt_item(p, m) {
+ Ok(()) => {
+ if p.at(T![;]) {
+ p.err_and_bump(
+ "expected item, found `;`\n\
+ consider removing this semicolon",
+ );
+ }
+ return;
+ }
+ Err(m) => m,
+ };
+
+ if paths::is_use_path_start(p) {
+ match macro_call(p) {
+ BlockLike::Block => (),
+ BlockLike::NotBlock => {
+ p.expect(T![;]);
+ }
+ }
+ m.complete(p, MACRO_CALL);
+ return;
+ }
+
+ m.abandon(p);
+ match p.current() {
+ T!['{'] => error_block(p, "expected an item"),
+ T!['}'] if !stop_on_r_curly => {
+ let e = p.start();
+ p.error("unmatched `}`");
+ p.bump(T!['}']);
+ e.complete(p, ERROR);
+ }
+ EOF | T!['}'] => p.error("expected an item"),
+ _ => p.err_and_bump("expected an item"),
+ }
+}
+
+/// Try to parse an item, completing `m` in case of success.
+pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> {
+ // test_err pub_expr
+ // fn foo() { pub 92; }
+ let has_visibility = opt_visibility(p, false);
+
+ let m = match opt_item_without_modifiers(p, m) {
+ Ok(()) => return Ok(()),
+ Err(m) => m,
+ };
+
+ let mut has_mods = false;
+ let mut has_extern = false;
+
+ // modifiers
+ if p.at(T![const]) && p.nth(1) != T!['{'] {
+ p.eat(T![const]);
+ has_mods = true;
+ }
+
+ // test_err async_without_semicolon
+ // fn foo() { let _ = async {} }
+ if p.at(T![async]) && !matches!(p.nth(1), T!['{'] | T![move] | T![|]) {
+ p.eat(T![async]);
+ has_mods = true;
+ }
+
+ // test_err unsafe_block_in_mod
+ // fn foo(){} unsafe { } fn bar(){}
+ if p.at(T![unsafe]) && p.nth(1) != T!['{'] {
+ p.eat(T![unsafe]);
+ has_mods = true;
+ }
+
+ if p.at(T![extern]) {
+ has_extern = true;
+ has_mods = true;
+ abi(p);
+ }
+ if p.at_contextual_kw(T![auto]) && p.nth(1) == T![trait] {
+ p.bump_remap(T![auto]);
+ has_mods = true;
+ }
+
+ // test default_item
+ // default impl T for Foo {}
+ if p.at_contextual_kw(T![default]) {
+ match p.nth(1) {
+ T![fn] | T![type] | T![const] | T![impl] => {
+ p.bump_remap(T![default]);
+ has_mods = true;
+ }
+ // test default_unsafe_item
+ // default unsafe impl T for Foo {
+ // default unsafe fn foo() {}
+ // }
+ T![unsafe] if matches!(p.nth(2), T![impl] | T![fn]) => {
+ p.bump_remap(T![default]);
+ p.bump(T![unsafe]);
+ has_mods = true;
+ }
+ // test default_async_fn
+ // impl T for Foo {
+ // default async fn foo() {}
+ // }
+ T![async] => {
+ let mut maybe_fn = p.nth(2);
+ let is_unsafe = if matches!(maybe_fn, T![unsafe]) {
+ // test default_async_unsafe_fn
+ // impl T for Foo {
+ // default async unsafe fn foo() {}
+ // }
+ maybe_fn = p.nth(3);
+ true
+ } else {
+ false
+ };
+
+ if matches!(maybe_fn, T![fn]) {
+ p.bump_remap(T![default]);
+ p.bump(T![async]);
+ if is_unsafe {
+ p.bump(T![unsafe]);
+ }
+ has_mods = true;
+ }
+ }
+ _ => (),
+ }
+ }
+
+ // test existential_type
+ // existential type Foo: Fn() -> usize;
+ if p.at_contextual_kw(T![existential]) && p.nth(1) == T![type] {
+ p.bump_remap(T![existential]);
+ has_mods = true;
+ }
+
+ // items
+ match p.current() {
+ T![fn] => fn_(p, m),
+
+ T![const] if p.nth(1) != T!['{'] => consts::konst(p, m),
+
+ T![trait] => traits::trait_(p, m),
+ T![impl] => traits::impl_(p, m),
+
+ T![type] => type_alias(p, m),
+
+ // test extern_block
+ // unsafe extern "C" {}
+ // extern {}
+ T!['{'] if has_extern => {
+ extern_item_list(p);
+ m.complete(p, EXTERN_BLOCK);
+ }
+
+ _ if has_visibility || has_mods => {
+ if has_mods {
+ p.error("expected existential, fn, trait or impl");
+ } else {
+ p.error("expected an item");
+ }
+ m.complete(p, ERROR);
+ }
+
+ _ => return Err(m),
+ }
+ Ok(())
+}
+
+fn opt_item_without_modifiers(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> {
+ let la = p.nth(1);
+ match p.current() {
+ T![extern] if la == T![crate] => extern_crate(p, m),
+ T![use] => use_item::use_(p, m),
+ T![mod] => mod_item(p, m),
+
+ T![type] => type_alias(p, m),
+ T![struct] => adt::strukt(p, m),
+ T![enum] => adt::enum_(p, m),
+ IDENT if p.at_contextual_kw(T![union]) && p.nth(1) == IDENT => adt::union(p, m),
+
+ T![macro] => macro_def(p, m),
+ IDENT if p.at_contextual_kw(T![macro_rules]) && p.nth(1) == BANG => macro_rules(p, m),
+
+ T![const] if (la == IDENT || la == T![_] || la == T![mut]) => consts::konst(p, m),
+ T![static] if (la == IDENT || la == T![_] || la == T![mut]) => consts::static_(p, m),
+
+ _ => return Err(m),
+ };
+ Ok(())
+}
+
+// test extern_crate
+// extern crate foo;
+fn extern_crate(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![extern]);
+ p.bump(T![crate]);
+
+ if p.at(T![self]) {
+ // test extern_crate_self
+ // extern crate self;
+ let m = p.start();
+ p.bump(T![self]);
+ m.complete(p, NAME_REF);
+ } else {
+ name_ref(p);
+ }
+
+ // test extern_crate_rename
+ // extern crate foo as bar;
+ opt_rename(p);
+ p.expect(T![;]);
+ m.complete(p, EXTERN_CRATE);
+}
+
+// test mod_item
+// mod a;
+pub(crate) fn mod_item(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![mod]);
+ name(p);
+ if p.at(T!['{']) {
+ // test mod_item_curly
+ // mod b { }
+ item_list(p);
+ } else if !p.eat(T![;]) {
+ p.error("expected `;` or `{`");
+ }
+ m.complete(p, MODULE);
+}
+
+// test type_alias
+// type Foo = Bar;
+fn type_alias(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![type]);
+
+ name(p);
+
+ // test type_item_type_params
+ // type Result<T> = ();
+ generic_params::opt_generic_param_list(p);
+
+ if p.at(T![:]) {
+ generic_params::bounds(p);
+ }
+
+ // test type_item_where_clause_deprecated
+ // type Foo where Foo: Copy = ();
+ generic_params::opt_where_clause(p);
+ if p.eat(T![=]) {
+ types::type_(p);
+ }
+
+ // test type_item_where_clause
+ // type Foo = () where Foo: Copy;
+ generic_params::opt_where_clause(p);
+
+ p.expect(T![;]);
+ m.complete(p, TYPE_ALIAS);
+}
+
+pub(crate) fn item_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['{']));
+ let m = p.start();
+ p.bump(T!['{']);
+ mod_contents(p, true);
+ p.expect(T!['}']);
+ m.complete(p, ITEM_LIST);
+}
+
+pub(crate) fn extern_item_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['{']));
+ let m = p.start();
+ p.bump(T!['{']);
+ mod_contents(p, true);
+ p.expect(T!['}']);
+ m.complete(p, EXTERN_ITEM_LIST);
+}
+
+fn macro_rules(p: &mut Parser<'_>, m: Marker) {
+ assert!(p.at_contextual_kw(T![macro_rules]));
+ p.bump_remap(T![macro_rules]);
+ p.expect(T![!]);
+
+ if p.at(IDENT) {
+ name(p);
+ }
+ // Special-case `macro_rules! try`.
+ // This is a hack until we do proper edition support
+
+ // test try_macro_rules
+ // macro_rules! try { () => {} }
+ if p.at(T![try]) {
+ let m = p.start();
+ p.bump_remap(IDENT);
+ m.complete(p, NAME);
+ }
+
+ match p.current() {
+ // test macro_rules_non_brace
+ // macro_rules! m ( ($i:ident) => {} );
+ // macro_rules! m [ ($i:ident) => {} ];
+ T!['['] | T!['('] => {
+ token_tree(p);
+ p.expect(T![;]);
+ }
+ T!['{'] => token_tree(p),
+ _ => p.error("expected `{`, `[`, `(`"),
+ }
+ m.complete(p, MACRO_RULES);
+}
+
+// test macro_def
+// macro m($i:ident) {}
+fn macro_def(p: &mut Parser<'_>, m: Marker) {
+ p.expect(T![macro]);
+ name_r(p, ITEM_RECOVERY_SET);
+ if p.at(T!['{']) {
+ // test macro_def_curly
+ // macro m { ($i:ident) => {} }
+ token_tree(p);
+ } else if p.at(T!['(']) {
+ let m = p.start();
+ token_tree(p);
+ match p.current() {
+ T!['{'] | T!['['] | T!['('] => token_tree(p),
+ _ => p.error("expected `{`, `[`, `(`"),
+ }
+ m.complete(p, TOKEN_TREE);
+ } else {
+ p.error("unmatched `(`");
+ }
+
+ m.complete(p, MACRO_DEF);
+}
+
+// test fn
+// fn foo() {}
+fn fn_(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![fn]);
+
+ name_r(p, ITEM_RECOVERY_SET);
+ // test function_type_params
+ // fn foo<T: Clone + Copy>(){}
+ generic_params::opt_generic_param_list(p);
+
+ if p.at(T!['(']) {
+ params::param_list_fn_def(p);
+ } else {
+ p.error("expected function arguments");
+ }
+ // test function_ret_type
+ // fn foo() {}
+ // fn bar() -> () {}
+ opt_ret_type(p);
+
+ // test function_where_clause
+ // fn foo<T>() where T: Copy {}
+ generic_params::opt_where_clause(p);
+
+ if p.at(T![;]) {
+ // test fn_decl
+ // trait T { fn foo(); }
+ p.bump(T![;]);
+ } else {
+ expressions::block_expr(p);
+ }
+ m.complete(p, FN);
+}
+
+fn macro_call(p: &mut Parser<'_>) -> BlockLike {
+ assert!(paths::is_use_path_start(p));
+ paths::use_path(p);
+ macro_call_after_excl(p)
+}
+
+pub(super) fn macro_call_after_excl(p: &mut Parser<'_>) -> BlockLike {
+ p.expect(T![!]);
+
+ match p.current() {
+ T!['{'] => {
+ token_tree(p);
+ BlockLike::Block
+ }
+ T!['('] | T!['['] => {
+ token_tree(p);
+ BlockLike::NotBlock
+ }
+ _ => {
+ p.error("expected `{`, `[`, `(`");
+ BlockLike::NotBlock
+ }
+ }
+}
+
+pub(crate) fn token_tree(p: &mut Parser<'_>) {
+ let closing_paren_kind = match p.current() {
+ T!['{'] => T!['}'],
+ T!['('] => T![')'],
+ T!['['] => T![']'],
+ _ => unreachable!(),
+ };
+ let m = p.start();
+ p.bump_any();
+ while !p.at(EOF) && !p.at(closing_paren_kind) {
+ match p.current() {
+ T!['{'] | T!['('] | T!['['] => token_tree(p),
+ T!['}'] => {
+ p.error("unmatched `}`");
+ m.complete(p, TOKEN_TREE);
+ return;
+ }
+ T![')'] | T![']'] => p.err_and_bump("unmatched brace"),
+ _ => p.bump_any(),
+ }
+ }
+ p.expect(closing_paren_kind);
+ m.complete(p, TOKEN_TREE);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items/adt.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items/adt.rs
new file mode 100644
index 000000000..e7d30516b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items/adt.rs
@@ -0,0 +1,168 @@
+use super::*;
+
+// test struct_item
+// struct S {}
+pub(super) fn strukt(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![struct]);
+ struct_or_union(p, m, true);
+}
+
+// test union_item
+// struct U { i: i32, f: f32 }
+pub(super) fn union(p: &mut Parser<'_>, m: Marker) {
+ assert!(p.at_contextual_kw(T![union]));
+ p.bump_remap(T![union]);
+ struct_or_union(p, m, false);
+}
+
+fn struct_or_union(p: &mut Parser<'_>, m: Marker, is_struct: bool) {
+ name_r(p, ITEM_RECOVERY_SET);
+ generic_params::opt_generic_param_list(p);
+ match p.current() {
+ T![where] => {
+ generic_params::opt_where_clause(p);
+ match p.current() {
+ T![;] => p.bump(T![;]),
+ T!['{'] => record_field_list(p),
+ _ => {
+ //FIXME: special case `(` error message
+ p.error("expected `;` or `{`");
+ }
+ }
+ }
+ T!['{'] => record_field_list(p),
+ // test unit_struct
+ // struct S;
+ T![;] if is_struct => {
+ p.bump(T![;]);
+ }
+ // test tuple_struct
+ // struct S(String, usize);
+ T!['('] if is_struct => {
+ tuple_field_list(p);
+ // test tuple_struct_where
+ // struct S<T>(T) where T: Clone;
+ generic_params::opt_where_clause(p);
+ p.expect(T![;]);
+ }
+ _ => p.error(if is_struct { "expected `;`, `{`, or `(`" } else { "expected `{`" }),
+ }
+ m.complete(p, if is_struct { STRUCT } else { UNION });
+}
+
+pub(super) fn enum_(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![enum]);
+ name_r(p, ITEM_RECOVERY_SET);
+ generic_params::opt_generic_param_list(p);
+ generic_params::opt_where_clause(p);
+ if p.at(T!['{']) {
+ variant_list(p);
+ } else {
+ p.error("expected `{`");
+ }
+ m.complete(p, ENUM);
+}
+
+pub(crate) fn variant_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['{']));
+ let m = p.start();
+ p.bump(T!['{']);
+ while !p.at(EOF) && !p.at(T!['}']) {
+ if p.at(T!['{']) {
+ error_block(p, "expected enum variant");
+ continue;
+ }
+ variant(p);
+ if !p.at(T!['}']) {
+ p.expect(T![,]);
+ }
+ }
+ p.expect(T!['}']);
+ m.complete(p, VARIANT_LIST);
+
+ fn variant(p: &mut Parser<'_>) {
+ let m = p.start();
+ attributes::outer_attrs(p);
+ if p.at(IDENT) {
+ name(p);
+ match p.current() {
+ T!['{'] => record_field_list(p),
+ T!['('] => tuple_field_list(p),
+ _ => (),
+ }
+
+ // test variant_discriminant
+ // enum E { X(i32) = 10 }
+ if p.eat(T![=]) {
+ expressions::expr(p);
+ }
+ m.complete(p, VARIANT);
+ } else {
+ m.abandon(p);
+ p.err_and_bump("expected enum variant");
+ }
+ }
+}
+
+// test record_field_list
+// struct S { a: i32, b: f32 }
+pub(crate) fn record_field_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['{']));
+ let m = p.start();
+ p.bump(T!['{']);
+ while !p.at(T!['}']) && !p.at(EOF) {
+ if p.at(T!['{']) {
+ error_block(p, "expected field");
+ continue;
+ }
+ record_field(p);
+ if !p.at(T!['}']) {
+ p.expect(T![,]);
+ }
+ }
+ p.expect(T!['}']);
+ m.complete(p, RECORD_FIELD_LIST);
+
+ fn record_field(p: &mut Parser<'_>) {
+ let m = p.start();
+ // test record_field_attrs
+ // struct S { #[attr] f: f32 }
+ attributes::outer_attrs(p);
+ opt_visibility(p, false);
+ if p.at(IDENT) {
+ name(p);
+ p.expect(T![:]);
+ types::type_(p);
+ m.complete(p, RECORD_FIELD);
+ } else {
+ m.abandon(p);
+ p.err_and_bump("expected field declaration");
+ }
+ }
+}
+
+fn tuple_field_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['(']));
+ let m = p.start();
+ p.bump(T!['(']);
+ while !p.at(T![')']) && !p.at(EOF) {
+ let m = p.start();
+ // test tuple_field_attrs
+ // struct S (#[attr] f32);
+ attributes::outer_attrs(p);
+ opt_visibility(p, true);
+ if !p.at_ts(types::TYPE_FIRST) {
+ p.error("expected a type");
+ m.complete(p, ERROR);
+ break;
+ }
+ types::type_(p);
+ m.complete(p, TUPLE_FIELD);
+
+ if !p.at(T![')']) {
+ p.expect(T![,]);
+ }
+ }
+ p.expect(T![')']);
+ m.complete(p, TUPLE_FIELD_LIST);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items/consts.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items/consts.rs
new file mode 100644
index 000000000..9549ec9b4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items/consts.rs
@@ -0,0 +1,37 @@
+use super::*;
+
+// test const_item
+// const C: u32 = 92;
+pub(super) fn konst(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![const]);
+ const_or_static(p, m, true);
+}
+
+pub(super) fn static_(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![static]);
+ const_or_static(p, m, false);
+}
+
+fn const_or_static(p: &mut Parser<'_>, m: Marker, is_const: bool) {
+ p.eat(T![mut]);
+
+ if is_const && p.eat(T![_]) {
+ // test anonymous_const
+ // const _: u32 = 0;
+ } else {
+ // test_err anonymous_static
+ // static _: i32 = 5;
+ name(p);
+ }
+
+ if p.at(T![:]) {
+ types::ascription(p);
+ } else {
+ p.error("missing type for `const` or `static`");
+ }
+ if p.eat(T![=]) {
+ expressions::expr(p);
+ }
+ p.expect(T![;]);
+ m.complete(p, if is_const { CONST } else { STATIC });
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items/traits.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items/traits.rs
new file mode 100644
index 000000000..c982e2d56
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items/traits.rs
@@ -0,0 +1,140 @@
+use super::*;
+
+// test trait_item
+// trait T { fn new() -> Self; }
+pub(super) fn trait_(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![trait]);
+ name_r(p, ITEM_RECOVERY_SET);
+
+ // test trait_item_generic_params
+ // trait X<U: Debug + Display> {}
+ generic_params::opt_generic_param_list(p);
+
+ if p.eat(T![=]) {
+ // test trait_alias
+ // trait Z<U> = T<U>;
+ generic_params::bounds_without_colon(p);
+
+ // test trait_alias_where_clause
+ // trait Z<U> = T<U> where U: Copy;
+ // trait Z<U> = where Self: T<U>;
+ generic_params::opt_where_clause(p);
+ p.expect(T![;]);
+ m.complete(p, TRAIT);
+ return;
+ }
+
+ if p.at(T![:]) {
+ // test trait_item_bounds
+ // trait T: Hash + Clone {}
+ generic_params::bounds(p);
+ }
+
+ // test trait_item_where_clause
+ // trait T where Self: Copy {}
+ generic_params::opt_where_clause(p);
+
+ if p.at(T!['{']) {
+ assoc_item_list(p);
+ } else {
+ p.error("expected `{`");
+ }
+ m.complete(p, TRAIT);
+}
+
+// test impl_item
+// impl S {}
+pub(super) fn impl_(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![impl]);
+ if p.at(T![<]) && not_a_qualified_path(p) {
+ generic_params::opt_generic_param_list(p);
+ }
+
+ // test impl_item_const
+ // impl const Send for S {}
+ p.eat(T![const]);
+
+ // FIXME: never type
+ // impl ! {}
+
+ // test impl_item_neg
+ // impl !Send for S {}
+ p.eat(T![!]);
+ impl_type(p);
+ if p.eat(T![for]) {
+ impl_type(p);
+ }
+ generic_params::opt_where_clause(p);
+ if p.at(T!['{']) {
+ assoc_item_list(p);
+ } else {
+ p.error("expected `{`");
+ }
+ m.complete(p, IMPL);
+}
+
+// test assoc_item_list
+// impl F {
+// type A = i32;
+// const B: i32 = 92;
+// fn foo() {}
+// fn bar(&self) {}
+// }
+pub(crate) fn assoc_item_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['{']));
+
+ let m = p.start();
+ p.bump(T!['{']);
+ // test assoc_item_list_inner_attrs
+ // impl S { #![attr] }
+ attributes::inner_attrs(p);
+
+ while !p.at(EOF) && !p.at(T!['}']) {
+ if p.at(T!['{']) {
+ error_block(p, "expected an item");
+ continue;
+ }
+ item_or_macro(p, true);
+ }
+ p.expect(T!['}']);
+ m.complete(p, ASSOC_ITEM_LIST);
+}
+
+// test impl_type_params
+// impl<const N: u32> Bar<N> {}
+fn not_a_qualified_path(p: &Parser<'_>) -> bool {
+ // There's an ambiguity between generic parameters and qualified paths in impls.
+ // If we see `<` it may start both, so we have to inspect some following tokens.
+ // The following combinations can only start generics,
+ // but not qualified paths (with one exception):
+ // `<` `>` - empty generic parameters
+ // `<` `#` - generic parameters with attributes
+ // `<` `const` - const generic parameters
+ // `<` (LIFETIME_IDENT|IDENT) `>` - single generic parameter
+ // `<` (LIFETIME_IDENT|IDENT) `,` - first generic parameter in a list
+ // `<` (LIFETIME_IDENT|IDENT) `:` - generic parameter with bounds
+ // `<` (LIFETIME_IDENT|IDENT) `=` - generic parameter with a default
+ // The only truly ambiguous case is
+ // `<` IDENT `>` `::` IDENT ...
+ // we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`)
+ // because this is what almost always expected in practice, qualified paths in impls
+ // (`impl <Type>::AssocTy { ... }`) aren't even allowed by type checker at the moment.
+ if p.nth(1) == T![#] || p.nth(1) == T![>] || p.nth(1) == T![const] {
+ return true;
+ }
+ (p.nth(1) == LIFETIME_IDENT || p.nth(1) == IDENT)
+ && (p.nth(2) == T![>] || p.nth(2) == T![,] || p.nth(2) == T![:] || p.nth(2) == T![=])
+}
+
+// test_err impl_type
+// impl Type {}
+// impl Trait1 for T {}
+// impl impl NotType {}
+// impl Trait2 for impl NotType {}
+pub(crate) fn impl_type(p: &mut Parser<'_>) {
+ if p.at(T![impl]) {
+ p.error("expected trait or type");
+ return;
+ }
+ types::type_(p);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items/use_item.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items/use_item.rs
new file mode 100644
index 000000000..69880b794
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items/use_item.rs
@@ -0,0 +1,93 @@
+use super::*;
+
+// test use_item
+// use std::collections;
+pub(super) fn use_(p: &mut Parser<'_>, m: Marker) {
+ p.bump(T![use]);
+ use_tree(p, true);
+ p.expect(T![;]);
+ m.complete(p, USE);
+}
+
+// test use_tree
+// use outer::tree::{inner::tree};
+fn use_tree(p: &mut Parser<'_>, top_level: bool) {
+ let m = p.start();
+ match p.current() {
+ // test use_tree_star
+ // use *;
+ // use std::{*};
+ T![*] => p.bump(T![*]),
+ // test use_tree_abs_star
+ // use ::*;
+ // use std::{::*};
+ T![:] if p.at(T![::]) && p.nth(2) == T![*] => {
+ p.bump(T![::]);
+ p.bump(T![*]);
+ }
+ T!['{'] => use_tree_list(p),
+ T![:] if p.at(T![::]) && p.nth(2) == T!['{'] => {
+ p.bump(T![::]);
+ use_tree_list(p);
+ }
+
+ // test use_tree_path
+ // use ::std;
+ // use std::collections;
+ //
+ // use self::m;
+ // use super::m;
+ // use crate::m;
+ _ if paths::is_use_path_start(p) => {
+ paths::use_path(p);
+ match p.current() {
+ // test use_tree_alias
+ // use std as stdlib;
+ // use Trait as _;
+ T![as] => opt_rename(p),
+ T![:] if p.at(T![::]) => {
+ p.bump(T![::]);
+ match p.current() {
+ // test use_tree_path_star
+ // use std::*;
+ T![*] => p.bump(T![*]),
+ // test use_tree_path_use_tree
+ // use std::{collections};
+ T!['{'] => use_tree_list(p),
+ _ => p.error("expected `{` or `*`"),
+ }
+ }
+ _ => (),
+ }
+ }
+ _ => {
+ m.abandon(p);
+ let msg = "expected one of `*`, `::`, `{`, `self`, `super` or an identifier";
+ if top_level {
+ p.err_recover(msg, ITEM_RECOVERY_SET);
+ } else {
+ // if we are parsing a nested tree, we have to eat a token to
+ // main balanced `{}`
+ p.err_and_bump(msg);
+ }
+ return;
+ }
+ }
+ m.complete(p, USE_TREE);
+}
+
+// test use_tree_list
+// use {a, b, c};
+pub(crate) fn use_tree_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['{']));
+ let m = p.start();
+ p.bump(T!['{']);
+ while !p.at(EOF) && !p.at(T!['}']) {
+ use_tree(p, false);
+ if !p.at(T!['}']) {
+ p.expect(T![,]);
+ }
+ }
+ p.expect(T!['}']);
+ m.complete(p, USE_TREE_LIST);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/params.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/params.rs
new file mode 100644
index 000000000..20e8e95f0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/params.rs
@@ -0,0 +1,209 @@
+use super::*;
+
+// test param_list
+// fn a() {}
+// fn b(x: i32) {}
+// fn c(x: i32, ) {}
+// fn d(x: i32, y: ()) {}
+pub(super) fn param_list_fn_def(p: &mut Parser<'_>) {
+ list_(p, Flavor::FnDef);
+}
+
+// test param_list_opt_patterns
+// fn foo<F: FnMut(&mut Foo<'a>)>(){}
+pub(super) fn param_list_fn_trait(p: &mut Parser<'_>) {
+ list_(p, Flavor::FnTrait);
+}
+
+pub(super) fn param_list_fn_ptr(p: &mut Parser<'_>) {
+ list_(p, Flavor::FnPointer);
+}
+
+pub(super) fn param_list_closure(p: &mut Parser<'_>) {
+ list_(p, Flavor::Closure);
+}
+
+#[derive(Debug, Clone, Copy)]
+enum Flavor {
+ FnDef, // Includes trait fn params; omitted param idents are not supported
+ FnTrait, // Params for `Fn(...)`/`FnMut(...)`/`FnOnce(...)` annotations
+ FnPointer,
+ Closure,
+}
+
+fn list_(p: &mut Parser<'_>, flavor: Flavor) {
+ use Flavor::*;
+
+ let (bra, ket) = match flavor {
+ Closure => (T![|], T![|]),
+ FnDef | FnTrait | FnPointer => (T!['('], T![')']),
+ };
+
+ let list_marker = p.start();
+ p.bump(bra);
+
+ let mut param_marker = None;
+ if let FnDef = flavor {
+ // test self_param_outer_attr
+ // fn f(#[must_use] self) {}
+ let m = p.start();
+ attributes::outer_attrs(p);
+ match opt_self_param(p, m) {
+ Ok(()) => {}
+ Err(m) => param_marker = Some(m),
+ }
+ }
+
+ while !p.at(EOF) && !p.at(ket) {
+ // test param_outer_arg
+ // fn f(#[attr1] pat: Type) {}
+ let m = match param_marker.take() {
+ Some(m) => m,
+ None => {
+ let m = p.start();
+ attributes::outer_attrs(p);
+ m
+ }
+ };
+
+ if !p.at_ts(PARAM_FIRST) {
+ p.error("expected value parameter");
+ m.abandon(p);
+ break;
+ }
+ param(p, m, flavor);
+ if !p.at(ket) {
+ p.expect(T![,]);
+ }
+ }
+
+ if let Some(m) = param_marker {
+ m.abandon(p);
+ }
+
+ p.expect(ket);
+ list_marker.complete(p, PARAM_LIST);
+}
+
+const PARAM_FIRST: TokenSet = patterns::PATTERN_FIRST.union(types::TYPE_FIRST);
+
+fn param(p: &mut Parser<'_>, m: Marker, flavor: Flavor) {
+ match flavor {
+ // test param_list_vararg
+ // extern "C" { fn printf(format: *const i8, ..., _: u8) -> i32; }
+ Flavor::FnDef | Flavor::FnPointer if p.eat(T![...]) => {}
+
+ // test fn_def_param
+ // fn foo(..., (x, y): (i32, i32)) {}
+ Flavor::FnDef => {
+ patterns::pattern(p);
+ if !variadic_param(p) {
+ if p.at(T![:]) {
+ types::ascription(p);
+ } else {
+ // test_err missing_fn_param_type
+ // fn f(x y: i32, z, t: i32) {}
+ p.error("missing type for function parameter");
+ }
+ }
+ }
+ // test value_parameters_no_patterns
+ // type F = Box<Fn(i32, &i32, &i32, ())>;
+ Flavor::FnTrait => {
+ types::type_(p);
+ }
+ // test fn_pointer_param_ident_path
+ // type Foo = fn(Bar::Baz);
+ // type Qux = fn(baz: Bar::Baz);
+
+ // test fn_pointer_unnamed_arg
+ // type Foo = fn(_: bar);
+ Flavor::FnPointer => {
+ if (p.at(IDENT) || p.at(UNDERSCORE)) && p.nth(1) == T![:] && !p.nth_at(1, T![::]) {
+ patterns::pattern_single(p);
+ if !variadic_param(p) {
+ if p.at(T![:]) {
+ types::ascription(p);
+ } else {
+ p.error("missing type for function parameter");
+ }
+ }
+ } else {
+ types::type_(p);
+ }
+ }
+ // test closure_params
+ // fn main() {
+ // let foo = |bar, baz: Baz, qux: Qux::Quux| ();
+ // }
+ Flavor::Closure => {
+ patterns::pattern_single(p);
+ if p.at(T![:]) && !p.at(T![::]) {
+ types::ascription(p);
+ }
+ }
+ }
+ m.complete(p, PARAM);
+}
+
+fn variadic_param(p: &mut Parser<'_>) -> bool {
+ if p.at(T![:]) && p.nth_at(1, T![...]) {
+ p.bump(T![:]);
+ p.bump(T![...]);
+ true
+ } else {
+ false
+ }
+}
+
+// test self_param
+// impl S {
+// fn a(self) {}
+// fn b(&self,) {}
+// fn c(&'a self,) {}
+// fn d(&'a mut self, x: i32) {}
+// fn e(mut self) {}
+// }
+fn opt_self_param(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> {
+ if p.at(T![self]) || p.at(T![mut]) && p.nth(1) == T![self] {
+ p.eat(T![mut]);
+ self_as_name(p);
+ // test arb_self_types
+ // impl S {
+ // fn a(self: &Self) {}
+ // fn b(mut self: Box<Self>) {}
+ // }
+ if p.at(T![:]) {
+ types::ascription(p);
+ }
+ } else {
+ let la1 = p.nth(1);
+ let la2 = p.nth(2);
+ let la3 = p.nth(3);
+ if !matches!(
+ (p.current(), la1, la2, la3),
+ (T![&], T![self], _, _)
+ | (T![&], T![mut] | LIFETIME_IDENT, T![self], _)
+ | (T![&], LIFETIME_IDENT, T![mut], T![self])
+ ) {
+ return Err(m);
+ }
+ p.bump(T![&]);
+ if p.at(LIFETIME_IDENT) {
+ lifetime(p);
+ }
+ p.eat(T![mut]);
+ self_as_name(p);
+ }
+ m.complete(p, SELF_PARAM);
+ if !p.at(T![')']) {
+ p.expect(T![,]);
+ }
+ Ok(())
+}
+
+fn self_as_name(p: &mut Parser<'_>) {
+ let m = p.start();
+ p.bump(T![self]);
+ m.complete(p, NAME);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs
new file mode 100644
index 000000000..8de5d33a1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs
@@ -0,0 +1,132 @@
+use super::*;
+
+pub(super) const PATH_FIRST: TokenSet =
+ TokenSet::new(&[IDENT, T![self], T![super], T![crate], T![Self], T![:], T![<]]);
+
+pub(super) fn is_path_start(p: &Parser<'_>) -> bool {
+ is_use_path_start(p) || p.at(T![<]) || p.at(T![Self])
+}
+
+pub(super) fn is_use_path_start(p: &Parser<'_>) -> bool {
+ match p.current() {
+ IDENT | T![self] | T![super] | T![crate] => true,
+ T![:] if p.at(T![::]) => true,
+ _ => false,
+ }
+}
+
+pub(super) fn use_path(p: &mut Parser<'_>) {
+ path(p, Mode::Use);
+}
+
+pub(crate) fn type_path(p: &mut Parser<'_>) {
+ path(p, Mode::Type);
+}
+
+pub(super) fn expr_path(p: &mut Parser<'_>) {
+ path(p, Mode::Expr);
+}
+
+pub(crate) fn type_path_for_qualifier(
+ p: &mut Parser<'_>,
+ qual: CompletedMarker,
+) -> CompletedMarker {
+ path_for_qualifier(p, Mode::Type, qual)
+}
+
+#[derive(Clone, Copy, Eq, PartialEq)]
+enum Mode {
+ Use,
+ Type,
+ Expr,
+}
+
+fn path(p: &mut Parser<'_>, mode: Mode) {
+ let path = p.start();
+ path_segment(p, mode, true);
+ let qual = path.complete(p, PATH);
+ path_for_qualifier(p, mode, qual);
+}
+
+fn path_for_qualifier(
+ p: &mut Parser<'_>,
+ mode: Mode,
+ mut qual: CompletedMarker,
+) -> CompletedMarker {
+ loop {
+ let use_tree = mode == Mode::Use && matches!(p.nth(2), T![*] | T!['{']);
+ if p.at(T![::]) && !use_tree {
+ let path = qual.precede(p);
+ p.bump(T![::]);
+ path_segment(p, mode, false);
+ let path = path.complete(p, PATH);
+ qual = path;
+ } else {
+ return qual;
+ }
+ }
+}
+
+fn path_segment(p: &mut Parser<'_>, mode: Mode, first: bool) {
+ let m = p.start();
+ // test qual_paths
+ // type X = <A as B>::Output;
+ // fn foo() { <usize as Default>::default(); }
+ if first && p.eat(T![<]) {
+ types::type_(p);
+ if p.eat(T![as]) {
+ if is_use_path_start(p) {
+ types::path_type(p);
+ } else {
+ p.error("expected a trait");
+ }
+ }
+ p.expect(T![>]);
+ } else {
+ let mut empty = true;
+ if first {
+ p.eat(T![::]);
+ empty = false;
+ }
+ match p.current() {
+ IDENT => {
+ name_ref(p);
+ opt_path_type_args(p, mode);
+ }
+ // test crate_path
+ // use crate::foo;
+ T![self] | T![super] | T![crate] | T![Self] => {
+ let m = p.start();
+ p.bump_any();
+ m.complete(p, NAME_REF);
+ }
+ _ => {
+ p.err_recover("expected identifier", items::ITEM_RECOVERY_SET);
+ if empty {
+ // test_err empty_segment
+ // use crate::;
+ m.abandon(p);
+ return;
+ }
+ }
+ };
+ }
+ m.complete(p, PATH_SEGMENT);
+}
+
+fn opt_path_type_args(p: &mut Parser<'_>, mode: Mode) {
+ match mode {
+ Mode::Use => {}
+ Mode::Type => {
+ // test path_fn_trait_args
+ // type F = Box<Fn(i32) -> ()>;
+ if p.at(T!['(']) {
+ params::param_list_fn_trait(p);
+ opt_ret_type(p);
+ } else {
+ generic_args::opt_generic_arg_list(p, false);
+ }
+ }
+ Mode::Expr => generic_args::opt_generic_arg_list(p, true),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs
new file mode 100644
index 000000000..4cbf10306
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs
@@ -0,0 +1,440 @@
+use super::*;
+
+pub(super) const PATTERN_FIRST: TokenSet =
+ expressions::LITERAL_FIRST.union(paths::PATH_FIRST).union(TokenSet::new(&[
+ T![box],
+ T![ref],
+ T![mut],
+ T!['('],
+ T!['['],
+ T![&],
+ T![_],
+ T![-],
+ T![.],
+ ]));
+
+pub(crate) fn pattern(p: &mut Parser<'_>) {
+ pattern_r(p, PAT_RECOVERY_SET);
+}
+
+/// Parses a pattern list separated by pipes `|`.
+pub(super) fn pattern_top(p: &mut Parser<'_>) {
+ pattern_top_r(p, PAT_RECOVERY_SET);
+}
+
+pub(crate) fn pattern_single(p: &mut Parser<'_>) {
+ pattern_single_r(p, PAT_RECOVERY_SET);
+}
+
+/// Parses a pattern list separated by pipes `|`
+/// using the given `recovery_set`.
+pub(super) fn pattern_top_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
+ p.eat(T![|]);
+ pattern_r(p, recovery_set);
+}
+
+/// Parses a pattern list separated by pipes `|`, with no leading `|`,using the
+/// given `recovery_set`.
+
+// test or_pattern
+// fn main() {
+// match () {
+// (_ | _) => (),
+// &(_ | _) => (),
+// (_ | _,) => (),
+// [_ | _,] => (),
+// }
+// }
+fn pattern_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
+ let m = p.start();
+ pattern_single_r(p, recovery_set);
+
+ if !p.at(T![|]) {
+ m.abandon(p);
+ return;
+ }
+ while p.eat(T![|]) {
+ pattern_single_r(p, recovery_set);
+ }
+ m.complete(p, OR_PAT);
+}
+
+fn pattern_single_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
+ if let Some(lhs) = atom_pat(p, recovery_set) {
+ // test range_pat
+ // fn main() {
+ // match 92 {
+ // 0 ... 100 => (),
+ // 101 ..= 200 => (),
+ // 200 .. 301 => (),
+ // 302 .. => (),
+ // }
+ //
+ // match Some(10 as u8) {
+ // Some(0) | None => (),
+ // Some(1..) => ()
+ // }
+ //
+ // match (10 as u8, 5 as u8) {
+ // (0, _) => (),
+ // (1.., _) => ()
+ // }
+ // }
+
+ // FIXME: support half_open_range_patterns (`..=2`),
+ // exclusive_range_pattern (`..5`) with missing lhs
+ for range_op in [T![...], T![..=], T![..]] {
+ if p.at(range_op) {
+ let m = lhs.precede(p);
+ p.bump(range_op);
+
+ // `0 .. =>` or `let 0 .. =` or `Some(0 .. )`
+ // ^ ^ ^
+ if p.at(T![=]) | p.at(T![')']) | p.at(T![,]) {
+ // test half_open_range_pat
+ // fn f() { let 0 .. = 1u32; }
+ } else {
+ atom_pat(p, recovery_set);
+ }
+ m.complete(p, RANGE_PAT);
+ return;
+ }
+ }
+ }
+}
+
+const PAT_RECOVERY_SET: TokenSet =
+ TokenSet::new(&[T![let], T![if], T![while], T![loop], T![match], T![')'], T![,], T![=]]);
+
+fn atom_pat(p: &mut Parser<'_>, recovery_set: TokenSet) -> Option<CompletedMarker> {
+ let m = match p.current() {
+ T![box] => box_pat(p),
+ T![ref] | T![mut] => ident_pat(p, true),
+ T![const] => const_block_pat(p),
+ IDENT => match p.nth(1) {
+ // Checks the token after an IDENT to see if a pattern is a path (Struct { .. }) or macro
+ // (T![x]).
+ T!['('] | T!['{'] | T![!] => path_or_macro_pat(p),
+ T![:] if p.nth_at(1, T![::]) => path_or_macro_pat(p),
+ _ => ident_pat(p, true),
+ },
+
+ // test type_path_in_pattern
+ // fn main() { let <_>::Foo = (); }
+ _ if paths::is_path_start(p) => path_or_macro_pat(p),
+ _ if is_literal_pat_start(p) => literal_pat(p),
+
+ T![.] if p.at(T![..]) => rest_pat(p),
+ T![_] => wildcard_pat(p),
+ T![&] => ref_pat(p),
+ T!['('] => tuple_pat(p),
+ T!['['] => slice_pat(p),
+
+ _ => {
+ p.err_recover("expected pattern", recovery_set);
+ return None;
+ }
+ };
+
+ Some(m)
+}
+
+fn is_literal_pat_start(p: &Parser<'_>) -> bool {
+ p.at(T![-]) && (p.nth(1) == INT_NUMBER || p.nth(1) == FLOAT_NUMBER)
+ || p.at_ts(expressions::LITERAL_FIRST)
+}
+
+// test literal_pattern
+// fn main() {
+// match () {
+// -1 => (),
+// 92 => (),
+// 'c' => (),
+// "hello" => (),
+// }
+// }
+fn literal_pat(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(is_literal_pat_start(p));
+ let m = p.start();
+ if p.at(T![-]) {
+ p.bump(T![-]);
+ }
+ expressions::literal(p);
+ m.complete(p, LITERAL_PAT)
+}
+
+// test path_part
+// fn foo() {
+// let foo::Bar = ();
+// let ::Bar = ();
+// let Bar { .. } = ();
+// let Bar(..) = ();
+// }
+fn path_or_macro_pat(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(paths::is_path_start(p));
+ let m = p.start();
+ paths::expr_path(p);
+ let kind = match p.current() {
+ T!['('] => {
+ tuple_pat_fields(p);
+ TUPLE_STRUCT_PAT
+ }
+ T!['{'] => {
+ record_pat_field_list(p);
+ RECORD_PAT
+ }
+ // test marco_pat
+ // fn main() {
+ // let m!(x) = 0;
+ // }
+ T![!] => {
+ items::macro_call_after_excl(p);
+ return m.complete(p, MACRO_CALL).precede(p).complete(p, MACRO_PAT);
+ }
+ _ => PATH_PAT,
+ };
+ m.complete(p, kind)
+}
+
+// test tuple_pat_fields
+// fn foo() {
+// let S() = ();
+// let S(_) = ();
+// let S(_,) = ();
+// let S(_, .. , x) = ();
+// }
+fn tuple_pat_fields(p: &mut Parser<'_>) {
+ assert!(p.at(T!['(']));
+ p.bump(T!['(']);
+ pat_list(p, T![')']);
+ p.expect(T![')']);
+}
+
+// test record_pat_field
+// fn foo() {
+// let S { 0: 1 } = ();
+// let S { x: 1 } = ();
+// let S { #[cfg(any())] x: 1 } = ();
+// }
+fn record_pat_field(p: &mut Parser<'_>) {
+ match p.current() {
+ IDENT | INT_NUMBER if p.nth(1) == T![:] => {
+ name_ref_or_index(p);
+ p.bump(T![:]);
+ pattern(p);
+ }
+ T![box] => {
+ // FIXME: not all box patterns should be allowed
+ box_pat(p);
+ }
+ T![ref] | T![mut] | IDENT => {
+ ident_pat(p, false);
+ }
+ _ => {
+ p.err_and_bump("expected identifier");
+ }
+ }
+}
+
+// test record_pat_field_list
+// fn foo() {
+// let S {} = ();
+// let S { f, ref mut g } = ();
+// let S { h: _, ..} = ();
+// let S { h: _, } = ();
+// let S { #[cfg(any())] .. } = ();
+// }
+fn record_pat_field_list(p: &mut Parser<'_>) {
+ assert!(p.at(T!['{']));
+ let m = p.start();
+ p.bump(T!['{']);
+ while !p.at(EOF) && !p.at(T!['}']) {
+ let m = p.start();
+ attributes::outer_attrs(p);
+
+ match p.current() {
+ // A trailing `..` is *not* treated as a REST_PAT.
+ T![.] if p.at(T![..]) => {
+ p.bump(T![..]);
+ m.complete(p, REST_PAT);
+ }
+ T!['{'] => {
+ error_block(p, "expected ident");
+ m.abandon(p);
+ }
+ _ => {
+ record_pat_field(p);
+ m.complete(p, RECORD_PAT_FIELD);
+ }
+ }
+ if !p.at(T!['}']) {
+ p.expect(T![,]);
+ }
+ }
+ p.expect(T!['}']);
+ m.complete(p, RECORD_PAT_FIELD_LIST);
+}
+
+// test placeholder_pat
+// fn main() { let _ = (); }
+fn wildcard_pat(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![_]));
+ let m = p.start();
+ p.bump(T![_]);
+ m.complete(p, WILDCARD_PAT)
+}
+
+// test dot_dot_pat
+// fn main() {
+// let .. = ();
+// //
+// // Tuples
+// //
+// let (a, ..) = ();
+// let (a, ..,) = ();
+// let Tuple(a, ..) = ();
+// let Tuple(a, ..,) = ();
+// let (.., ..) = ();
+// let Tuple(.., ..) = ();
+// let (.., a, ..) = ();
+// let Tuple(.., a, ..) = ();
+// //
+// // Slices
+// //
+// let [..] = ();
+// let [head, ..] = ();
+// let [head, tail @ ..] = ();
+// let [head, .., cons] = ();
+// let [head, mid @ .., cons] = ();
+// let [head, .., .., cons] = ();
+// let [head, .., mid, tail @ ..] = ();
+// let [head, .., mid, .., cons] = ();
+// }
+fn rest_pat(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![..]));
+ let m = p.start();
+ p.bump(T![..]);
+ m.complete(p, REST_PAT)
+}
+
+// test ref_pat
+// fn main() {
+// let &a = ();
+// let &mut b = ();
+// }
+fn ref_pat(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![&]));
+ let m = p.start();
+ p.bump(T![&]);
+ p.eat(T![mut]);
+ pattern_single(p);
+ m.complete(p, REF_PAT)
+}
+
+// test tuple_pat
+// fn main() {
+// let (a, b, ..) = ();
+// let (a,) = ();
+// let (..) = ();
+// let () = ();
+// }
+fn tuple_pat(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T!['(']));
+ let m = p.start();
+ p.bump(T!['(']);
+ let mut has_comma = false;
+ let mut has_pat = false;
+ let mut has_rest = false;
+ while !p.at(EOF) && !p.at(T![')']) {
+ has_pat = true;
+ if !p.at_ts(PATTERN_FIRST) {
+ p.error("expected a pattern");
+ break;
+ }
+ has_rest |= p.at(T![..]);
+
+ pattern(p);
+ if !p.at(T![')']) {
+ has_comma = true;
+ p.expect(T![,]);
+ }
+ }
+ p.expect(T![')']);
+
+ m.complete(p, if !has_comma && !has_rest && has_pat { PAREN_PAT } else { TUPLE_PAT })
+}
+
+// test slice_pat
+// fn main() {
+// let [a, b, ..] = [];
+// }
+fn slice_pat(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T!['[']));
+ let m = p.start();
+ p.bump(T!['[']);
+ pat_list(p, T![']']);
+ p.expect(T![']']);
+ m.complete(p, SLICE_PAT)
+}
+
+fn pat_list(p: &mut Parser<'_>, ket: SyntaxKind) {
+ while !p.at(EOF) && !p.at(ket) {
+ if !p.at_ts(PATTERN_FIRST) {
+ p.error("expected a pattern");
+ break;
+ }
+
+ pattern(p);
+ if !p.at(ket) {
+ p.expect(T![,]);
+ }
+ }
+}
+
+// test bind_pat
+// fn main() {
+// let a = ();
+// let mut b = ();
+// let ref c = ();
+// let ref mut d = ();
+// let e @ _ = ();
+// let ref mut f @ g @ _ = ();
+// }
+fn ident_pat(p: &mut Parser<'_>, with_at: bool) -> CompletedMarker {
+ assert!(matches!(p.current(), T![ref] | T![mut] | IDENT));
+ let m = p.start();
+ p.eat(T![ref]);
+ p.eat(T![mut]);
+ name_r(p, PAT_RECOVERY_SET);
+ if with_at && p.eat(T![@]) {
+ pattern_single(p);
+ }
+ m.complete(p, IDENT_PAT)
+}
+
+// test box_pat
+// fn main() {
+// let box i = ();
+// let box Outer { box i, j: box Inner(box &x) } = ();
+// let box ref mut i = ();
+// }
+fn box_pat(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![box]));
+ let m = p.start();
+ p.bump(T![box]);
+ pattern_single(p);
+ m.complete(p, BOX_PAT)
+}
+
+// test const_block_pat
+// fn main() {
+// let const { 15 } = ();
+// let const { foo(); bar() } = ();
+// }
+fn const_block_pat(p: &mut Parser<'_>) -> CompletedMarker {
+ assert!(p.at(T![const]));
+ let m = p.start();
+ p.bump(T![const]);
+ expressions::block_expr(p);
+ m.complete(p, CONST_BLOCK_PAT)
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs
new file mode 100644
index 000000000..5c6e18fee
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs
@@ -0,0 +1,352 @@
+use super::*;
+
+pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(TokenSet::new(&[
+ T!['('],
+ T!['['],
+ T![<],
+ T![!],
+ T![*],
+ T![&],
+ T![_],
+ T![fn],
+ T![unsafe],
+ T![extern],
+ T![for],
+ T![impl],
+ T![dyn],
+ T![Self],
+]));
+
+const TYPE_RECOVERY_SET: TokenSet = TokenSet::new(&[
+ T![')'],
+ T![,],
+ // test_err struct_field_recover
+ // struct S { f pub g: () }
+ T![pub],
+]);
+
+pub(crate) fn type_(p: &mut Parser<'_>) {
+ type_with_bounds_cond(p, true);
+}
+
+pub(super) fn type_no_bounds(p: &mut Parser<'_>) {
+ type_with_bounds_cond(p, false);
+}
+
+fn type_with_bounds_cond(p: &mut Parser<'_>, allow_bounds: bool) {
+ match p.current() {
+ T!['('] => paren_or_tuple_type(p),
+ T![!] => never_type(p),
+ T![*] => ptr_type(p),
+ T!['['] => array_or_slice_type(p),
+ T![&] => ref_type(p),
+ T![_] => infer_type(p),
+ T![fn] | T![unsafe] | T![extern] => fn_ptr_type(p),
+ T![for] => for_type(p, allow_bounds),
+ T![impl] => impl_trait_type(p),
+ T![dyn] => dyn_trait_type(p),
+ // Some path types are not allowed to have bounds (no plus)
+ T![<] => path_type_(p, allow_bounds),
+ _ if paths::is_path_start(p) => path_or_macro_type_(p, allow_bounds),
+ _ => {
+ p.err_recover("expected type", TYPE_RECOVERY_SET);
+ }
+ }
+}
+
+pub(super) fn ascription(p: &mut Parser<'_>) {
+ assert!(p.at(T![:]));
+ p.bump(T![:]);
+ if p.at(T![=]) {
+ // recover from `let x: = expr;`, `const X: = expr;` and similars
+ // hopefully no type starts with `=`
+ p.error("missing type");
+ return;
+ }
+ type_(p);
+}
+
+fn paren_or_tuple_type(p: &mut Parser<'_>) {
+ assert!(p.at(T!['(']));
+ let m = p.start();
+ p.bump(T!['(']);
+ let mut n_types: u32 = 0;
+ let mut trailing_comma: bool = false;
+ while !p.at(EOF) && !p.at(T![')']) {
+ n_types += 1;
+ type_(p);
+ if p.eat(T![,]) {
+ trailing_comma = true;
+ } else {
+ trailing_comma = false;
+ break;
+ }
+ }
+ p.expect(T![')']);
+
+ let kind = if n_types == 1 && !trailing_comma {
+ // test paren_type
+ // type T = (i32);
+ PAREN_TYPE
+ } else {
+ // test unit_type
+ // type T = ();
+
+ // test singleton_tuple_type
+ // type T = (i32,);
+ TUPLE_TYPE
+ };
+ m.complete(p, kind);
+}
+
+// test never_type
+// type Never = !;
+fn never_type(p: &mut Parser<'_>) {
+ assert!(p.at(T![!]));
+ let m = p.start();
+ p.bump(T![!]);
+ m.complete(p, NEVER_TYPE);
+}
+
+fn ptr_type(p: &mut Parser<'_>) {
+ assert!(p.at(T![*]));
+ let m = p.start();
+ p.bump(T![*]);
+
+ match p.current() {
+ // test pointer_type_mut
+ // type M = *mut ();
+ // type C = *mut ();
+ T![mut] | T![const] => p.bump_any(),
+ _ => {
+ // test_err pointer_type_no_mutability
+ // type T = *();
+ p.error(
+ "expected mut or const in raw pointer type \
+ (use `*mut T` or `*const T` as appropriate)",
+ );
+ }
+ };
+
+ type_no_bounds(p);
+ m.complete(p, PTR_TYPE);
+}
+
+fn array_or_slice_type(p: &mut Parser<'_>) {
+ assert!(p.at(T!['[']));
+ let m = p.start();
+ p.bump(T!['[']);
+
+ type_(p);
+ let kind = match p.current() {
+ // test slice_type
+ // type T = [()];
+ T![']'] => {
+ p.bump(T![']']);
+ SLICE_TYPE
+ }
+
+ // test array_type
+ // type T = [(); 92];
+ T![;] => {
+ p.bump(T![;]);
+ expressions::expr(p);
+ p.expect(T![']']);
+ ARRAY_TYPE
+ }
+ // test_err array_type_missing_semi
+ // type T = [() 92];
+ _ => {
+ p.error("expected `;` or `]`");
+ SLICE_TYPE
+ }
+ };
+ m.complete(p, kind);
+}
+
+// test reference_type;
+// type A = &();
+// type B = &'static ();
+// type C = &mut ();
+fn ref_type(p: &mut Parser<'_>) {
+ assert!(p.at(T![&]));
+ let m = p.start();
+ p.bump(T![&]);
+ if p.at(LIFETIME_IDENT) {
+ lifetime(p);
+ }
+ p.eat(T![mut]);
+ type_no_bounds(p);
+ m.complete(p, REF_TYPE);
+}
+
+// test placeholder_type
+// type Placeholder = _;
+fn infer_type(p: &mut Parser<'_>) {
+ assert!(p.at(T![_]));
+ let m = p.start();
+ p.bump(T![_]);
+ m.complete(p, INFER_TYPE);
+}
+
+// test fn_pointer_type
+// type A = fn();
+// type B = unsafe fn();
+// type C = unsafe extern "C" fn();
+// type D = extern "C" fn ( u8 , ... ) -> u8;
+fn fn_ptr_type(p: &mut Parser<'_>) {
+ let m = p.start();
+ p.eat(T![unsafe]);
+ if p.at(T![extern]) {
+ abi(p);
+ }
+ // test_err fn_pointer_type_missing_fn
+ // type F = unsafe ();
+ if !p.eat(T![fn]) {
+ m.abandon(p);
+ p.error("expected `fn`");
+ return;
+ }
+ if p.at(T!['(']) {
+ params::param_list_fn_ptr(p);
+ } else {
+ p.error("expected parameters");
+ }
+ // test fn_pointer_type_with_ret
+ // type F = fn() -> ();
+ opt_ret_type(p);
+ m.complete(p, FN_PTR_TYPE);
+}
+
+pub(super) fn for_binder(p: &mut Parser<'_>) {
+ assert!(p.at(T![for]));
+ p.bump(T![for]);
+ if p.at(T![<]) {
+ generic_params::opt_generic_param_list(p);
+ } else {
+ p.error("expected `<`");
+ }
+}
+
+// test for_type
+// type A = for<'a> fn() -> ();
+// type B = for<'a> unsafe extern "C" fn(&'a ()) -> ();
+// type Obj = for<'a> PartialEq<&'a i32>;
+pub(super) fn for_type(p: &mut Parser<'_>, allow_bounds: bool) {
+ assert!(p.at(T![for]));
+ let m = p.start();
+ for_binder(p);
+ match p.current() {
+ T![fn] | T![unsafe] | T![extern] => {}
+ // OK: legacy trait object format
+ _ if paths::is_use_path_start(p) => {}
+ _ => {
+ p.error("expected a function pointer or path");
+ }
+ }
+ type_no_bounds(p);
+ let completed = m.complete(p, FOR_TYPE);
+
+ // test no_dyn_trait_leading_for
+ // type A = for<'a> Test<'a> + Send;
+ if allow_bounds {
+ opt_type_bounds_as_dyn_trait_type(p, completed);
+ }
+}
+
+// test impl_trait_type
+// type A = impl Iterator<Item=Foo<'a>> + 'a;
+fn impl_trait_type(p: &mut Parser<'_>) {
+ assert!(p.at(T![impl]));
+ let m = p.start();
+ p.bump(T![impl]);
+ generic_params::bounds_without_colon(p);
+ m.complete(p, IMPL_TRAIT_TYPE);
+}
+
+// test dyn_trait_type
+// type A = dyn Iterator<Item=Foo<'a>> + 'a;
+fn dyn_trait_type(p: &mut Parser<'_>) {
+ assert!(p.at(T![dyn]));
+ let m = p.start();
+ p.bump(T![dyn]);
+ generic_params::bounds_without_colon(p);
+ m.complete(p, DYN_TRAIT_TYPE);
+}
+
+// test path_type
+// type A = Foo;
+// type B = ::Foo;
+// type C = self::Foo;
+// type D = super::Foo;
+pub(super) fn path_type(p: &mut Parser<'_>) {
+ path_type_(p, true);
+}
+
+// test macro_call_type
+// type A = foo!();
+// type B = crate::foo!();
+fn path_or_macro_type_(p: &mut Parser<'_>, allow_bounds: bool) {
+ assert!(paths::is_path_start(p));
+ let r = p.start();
+ let m = p.start();
+
+ paths::type_path(p);
+
+ let kind = if p.at(T![!]) && !p.at(T![!=]) {
+ items::macro_call_after_excl(p);
+ m.complete(p, MACRO_CALL);
+ MACRO_TYPE
+ } else {
+ m.abandon(p);
+ PATH_TYPE
+ };
+
+ let path = r.complete(p, kind);
+
+ if allow_bounds {
+ opt_type_bounds_as_dyn_trait_type(p, path);
+ }
+}
+
+pub(super) fn path_type_(p: &mut Parser<'_>, allow_bounds: bool) {
+ assert!(paths::is_path_start(p));
+ let m = p.start();
+ paths::type_path(p);
+
+ // test path_type_with_bounds
+ // fn foo() -> Box<T + 'f> {}
+ // fn foo() -> Box<dyn T + 'f> {}
+ let path = m.complete(p, PATH_TYPE);
+ if allow_bounds {
+ opt_type_bounds_as_dyn_trait_type(p, path);
+ }
+}
+
+/// This turns a parsed PATH_TYPE or FOR_TYPE optionally into a DYN_TRAIT_TYPE
+/// with a TYPE_BOUND_LIST
+fn opt_type_bounds_as_dyn_trait_type(p: &mut Parser<'_>, type_marker: CompletedMarker) {
+ assert!(matches!(
+ type_marker.kind(),
+ SyntaxKind::PATH_TYPE | SyntaxKind::FOR_TYPE | SyntaxKind::MACRO_TYPE
+ ));
+ if !p.at(T![+]) {
+ return;
+ }
+
+ // First create a TYPE_BOUND from the completed PATH_TYPE
+ let m = type_marker.precede(p).complete(p, TYPE_BOUND);
+
+ // Next setup a marker for the TYPE_BOUND_LIST
+ let m = m.precede(p);
+
+ // This gets consumed here so it gets properly set
+ // in the TYPE_BOUND_LIST
+ p.eat(T![+]);
+
+ // Parse rest of the bounds into the TYPE_BOUND_LIST
+ let m = generic_params::bounds_without_colon_m(p, m);
+
+ // Finally precede everything with DYN_TRAIT_TYPE
+ m.precede(p).complete(p, DYN_TRAIT_TYPE);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/input.rs b/src/tools/rust-analyzer/crates/parser/src/input.rs
new file mode 100644
index 000000000..9504bd4d9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/input.rs
@@ -0,0 +1,88 @@
+//! See [`Input`].
+
+use crate::SyntaxKind;
+
+#[allow(non_camel_case_types)]
+type bits = u64;
+
+/// Input for the parser -- a sequence of tokens.
+///
+/// As of now, parser doesn't have access to the *text* of the tokens, and makes
+/// decisions based solely on their classification. Unlike `LexerToken`, the
+/// `Tokens` doesn't include whitespace and comments. Main input to the parser.
+///
+/// Struct of arrays internally, but this shouldn't really matter.
+#[derive(Default)]
+pub struct Input {
+ kind: Vec<SyntaxKind>,
+ joint: Vec<bits>,
+ contextual_kind: Vec<SyntaxKind>,
+}
+
+/// `pub` impl used by callers to create `Tokens`.
+impl Input {
+ #[inline]
+ pub fn push(&mut self, kind: SyntaxKind) {
+ self.push_impl(kind, SyntaxKind::EOF)
+ }
+ #[inline]
+ pub fn push_ident(&mut self, contextual_kind: SyntaxKind) {
+ self.push_impl(SyntaxKind::IDENT, contextual_kind)
+ }
+ /// Sets jointness for the last token we've pushed.
+ ///
+ /// This is a separate API rather than an argument to the `push` to make it
+ /// convenient both for textual and mbe tokens. With text, you know whether
+ /// the *previous* token was joint, with mbe, you know whether the *current*
+ /// one is joint. This API allows for styles of usage:
+ ///
+ /// ```
+ /// // In text:
+ /// tokens.was_joint(prev_joint);
+ /// tokens.push(curr);
+ ///
+ /// // In MBE:
+ /// token.push(curr);
+ /// tokens.push(curr_joint)
+ /// ```
+ #[inline]
+ pub fn was_joint(&mut self) {
+ let n = self.len() - 1;
+ let (idx, b_idx) = self.bit_index(n);
+ self.joint[idx] |= 1 << b_idx;
+ }
+ #[inline]
+ fn push_impl(&mut self, kind: SyntaxKind, contextual_kind: SyntaxKind) {
+ let idx = self.len();
+ if idx % (bits::BITS as usize) == 0 {
+ self.joint.push(0);
+ }
+ self.kind.push(kind);
+ self.contextual_kind.push(contextual_kind);
+ }
+}
+
+/// pub(crate) impl used by the parser to consume `Tokens`.
+impl Input {
+ pub(crate) fn kind(&self, idx: usize) -> SyntaxKind {
+ self.kind.get(idx).copied().unwrap_or(SyntaxKind::EOF)
+ }
+ pub(crate) fn contextual_kind(&self, idx: usize) -> SyntaxKind {
+ self.contextual_kind.get(idx).copied().unwrap_or(SyntaxKind::EOF)
+ }
+ pub(crate) fn is_joint(&self, n: usize) -> bool {
+ let (idx, b_idx) = self.bit_index(n);
+ self.joint[idx] & 1 << b_idx != 0
+ }
+}
+
+impl Input {
+ fn bit_index(&self, n: usize) -> (usize, usize) {
+ let idx = n / (bits::BITS as usize);
+ let b_idx = n % (bits::BITS as usize);
+ (idx, b_idx)
+ }
+ fn len(&self) -> usize {
+ self.kind.len()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
new file mode 100644
index 000000000..f4b9988ea
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
@@ -0,0 +1,300 @@
+//! Lexing `&str` into a sequence of Rust tokens.
+//!
+//! Note that strictly speaking the parser in this crate is not required to work
+//! on tokens which originated from text. Macros, eg, can synthesize tokens out
+//! of thin air. So, ideally, lexer should be an orthogonal crate. It is however
+//! convenient to include a text-based lexer here!
+//!
+//! Note that these tokens, unlike the tokens we feed into the parser, do
+//! include info about comments and whitespace.
+
+use std::ops;
+
+use crate::{
+ SyntaxKind::{self, *},
+ T,
+};
+
+pub struct LexedStr<'a> {
+ text: &'a str,
+ kind: Vec<SyntaxKind>,
+ start: Vec<u32>,
+ error: Vec<LexError>,
+}
+
+struct LexError {
+ msg: String,
+ token: u32,
+}
+
+impl<'a> LexedStr<'a> {
+ pub fn new(text: &'a str) -> LexedStr<'a> {
+ let mut conv = Converter::new(text);
+ if let Some(shebang_len) = rustc_lexer::strip_shebang(text) {
+ conv.res.push(SHEBANG, conv.offset);
+ conv.offset = shebang_len;
+ };
+
+ for token in rustc_lexer::tokenize(&text[conv.offset..]) {
+ let token_text = &text[conv.offset..][..token.len];
+
+ conv.extend_token(&token.kind, token_text);
+ }
+
+ conv.finalize_with_eof()
+ }
+
+ pub fn single_token(text: &'a str) -> Option<(SyntaxKind, Option<String>)> {
+ if text.is_empty() {
+ return None;
+ }
+
+ let token = rustc_lexer::first_token(text);
+ if token.len != text.len() {
+ return None;
+ }
+
+ let mut conv = Converter::new(text);
+ conv.extend_token(&token.kind, text);
+ match &*conv.res.kind {
+ [kind] => Some((*kind, conv.res.error.pop().map(|it| it.msg.clone()))),
+ _ => None,
+ }
+ }
+
+ pub fn as_str(&self) -> &str {
+ self.text
+ }
+
+ pub fn len(&self) -> usize {
+ self.kind.len() - 1
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ pub fn kind(&self, i: usize) -> SyntaxKind {
+ assert!(i < self.len());
+ self.kind[i]
+ }
+
+ pub fn text(&self, i: usize) -> &str {
+ self.range_text(i..i + 1)
+ }
+ pub fn range_text(&self, r: ops::Range<usize>) -> &str {
+ assert!(r.start < r.end && r.end <= self.len());
+ let lo = self.start[r.start] as usize;
+ let hi = self.start[r.end] as usize;
+ &self.text[lo..hi]
+ }
+
+ // Naming is hard.
+ pub fn text_range(&self, i: usize) -> ops::Range<usize> {
+ assert!(i < self.len());
+ let lo = self.start[i] as usize;
+ let hi = self.start[i + 1] as usize;
+ lo..hi
+ }
+ pub fn text_start(&self, i: usize) -> usize {
+ assert!(i <= self.len());
+ self.start[i] as usize
+ }
+ pub fn text_len(&self, i: usize) -> usize {
+ assert!(i < self.len());
+ let r = self.text_range(i);
+ r.end - r.start
+ }
+
+ pub fn error(&self, i: usize) -> Option<&str> {
+ assert!(i < self.len());
+ let err = self.error.binary_search_by_key(&(i as u32), |i| i.token).ok()?;
+ Some(self.error[err].msg.as_str())
+ }
+
+ pub fn errors(&self) -> impl Iterator<Item = (usize, &str)> + '_ {
+ self.error.iter().map(|it| (it.token as usize, it.msg.as_str()))
+ }
+
+ fn push(&mut self, kind: SyntaxKind, offset: usize) {
+ self.kind.push(kind);
+ self.start.push(offset as u32);
+ }
+}
+
+struct Converter<'a> {
+ res: LexedStr<'a>,
+ offset: usize,
+}
+
+impl<'a> Converter<'a> {
+ fn new(text: &'a str) -> Self {
+ Self {
+ res: LexedStr { text, kind: Vec::new(), start: Vec::new(), error: Vec::new() },
+ offset: 0,
+ }
+ }
+
+ fn finalize_with_eof(mut self) -> LexedStr<'a> {
+ self.res.push(EOF, self.offset);
+ self.res
+ }
+
+ fn push(&mut self, kind: SyntaxKind, len: usize, err: Option<&str>) {
+ self.res.push(kind, self.offset);
+ self.offset += len;
+
+ if let Some(err) = err {
+ let token = self.res.len() as u32;
+ let msg = err.to_string();
+ self.res.error.push(LexError { msg, token });
+ }
+ }
+
+ fn extend_token(&mut self, kind: &rustc_lexer::TokenKind, token_text: &str) {
+ // A note on an intended tradeoff:
+ // We drop some useful information here (see patterns with double dots `..`)
+ // Storing that info in `SyntaxKind` is not possible due to its layout requirements of
+ // being `u16` that come from `rowan::SyntaxKind`.
+ let mut err = "";
+
+ let syntax_kind = {
+ match kind {
+ rustc_lexer::TokenKind::LineComment { doc_style: _ } => COMMENT,
+ rustc_lexer::TokenKind::BlockComment { doc_style: _, terminated } => {
+ if !terminated {
+ err = "Missing trailing `*/` symbols to terminate the block comment";
+ }
+ COMMENT
+ }
+
+ rustc_lexer::TokenKind::Whitespace => WHITESPACE,
+
+ rustc_lexer::TokenKind::Ident if token_text == "_" => UNDERSCORE,
+ rustc_lexer::TokenKind::Ident => {
+ SyntaxKind::from_keyword(token_text).unwrap_or(IDENT)
+ }
+
+ rustc_lexer::TokenKind::RawIdent => IDENT,
+ rustc_lexer::TokenKind::Literal { kind, .. } => {
+ self.extend_literal(token_text.len(), kind);
+ return;
+ }
+
+ rustc_lexer::TokenKind::Lifetime { starts_with_number } => {
+ if *starts_with_number {
+ err = "Lifetime name cannot start with a number";
+ }
+ LIFETIME_IDENT
+ }
+
+ rustc_lexer::TokenKind::Semi => T![;],
+ rustc_lexer::TokenKind::Comma => T![,],
+ rustc_lexer::TokenKind::Dot => T![.],
+ rustc_lexer::TokenKind::OpenParen => T!['('],
+ rustc_lexer::TokenKind::CloseParen => T![')'],
+ rustc_lexer::TokenKind::OpenBrace => T!['{'],
+ rustc_lexer::TokenKind::CloseBrace => T!['}'],
+ rustc_lexer::TokenKind::OpenBracket => T!['['],
+ rustc_lexer::TokenKind::CloseBracket => T![']'],
+ rustc_lexer::TokenKind::At => T![@],
+ rustc_lexer::TokenKind::Pound => T![#],
+ rustc_lexer::TokenKind::Tilde => T![~],
+ rustc_lexer::TokenKind::Question => T![?],
+ rustc_lexer::TokenKind::Colon => T![:],
+ rustc_lexer::TokenKind::Dollar => T![$],
+ rustc_lexer::TokenKind::Eq => T![=],
+ rustc_lexer::TokenKind::Bang => T![!],
+ rustc_lexer::TokenKind::Lt => T![<],
+ rustc_lexer::TokenKind::Gt => T![>],
+ rustc_lexer::TokenKind::Minus => T![-],
+ rustc_lexer::TokenKind::And => T![&],
+ rustc_lexer::TokenKind::Or => T![|],
+ rustc_lexer::TokenKind::Plus => T![+],
+ rustc_lexer::TokenKind::Star => T![*],
+ rustc_lexer::TokenKind::Slash => T![/],
+ rustc_lexer::TokenKind::Caret => T![^],
+ rustc_lexer::TokenKind::Percent => T![%],
+ rustc_lexer::TokenKind::Unknown => ERROR,
+ }
+ };
+
+ let err = if err.is_empty() { None } else { Some(err) };
+ self.push(syntax_kind, token_text.len(), err);
+ }
+
+ fn extend_literal(&mut self, len: usize, kind: &rustc_lexer::LiteralKind) {
+ let mut err = "";
+
+ let syntax_kind = match *kind {
+ rustc_lexer::LiteralKind::Int { empty_int, base: _ } => {
+ if empty_int {
+ err = "Missing digits after the integer base prefix";
+ }
+ INT_NUMBER
+ }
+ rustc_lexer::LiteralKind::Float { empty_exponent, base: _ } => {
+ if empty_exponent {
+ err = "Missing digits after the exponent symbol";
+ }
+ FLOAT_NUMBER
+ }
+ rustc_lexer::LiteralKind::Char { terminated } => {
+ if !terminated {
+ err = "Missing trailing `'` symbol to terminate the character literal";
+ }
+ CHAR
+ }
+ rustc_lexer::LiteralKind::Byte { terminated } => {
+ if !terminated {
+ err = "Missing trailing `'` symbol to terminate the byte literal";
+ }
+ BYTE
+ }
+ rustc_lexer::LiteralKind::Str { terminated } => {
+ if !terminated {
+ err = "Missing trailing `\"` symbol to terminate the string literal";
+ }
+ STRING
+ }
+ rustc_lexer::LiteralKind::ByteStr { terminated } => {
+ if !terminated {
+ err = "Missing trailing `\"` symbol to terminate the byte string literal";
+ }
+ BYTE_STRING
+ }
+ rustc_lexer::LiteralKind::RawStr { err: raw_str_err, .. } => {
+ if let Some(raw_str_err) = raw_str_err {
+ err = match raw_str_err {
+ rustc_lexer::RawStrError::InvalidStarter { .. } => "Missing `\"` symbol after `#` symbols to begin the raw string literal",
+ rustc_lexer::RawStrError::NoTerminator { expected, found, .. } => if expected == found {
+ "Missing trailing `\"` to terminate the raw string literal"
+ } else {
+ "Missing trailing `\"` with `#` symbols to terminate the raw string literal"
+ },
+ rustc_lexer::RawStrError::TooManyDelimiters { .. } => "Too many `#` symbols: raw strings may be delimited by up to 65535 `#` symbols",
+ };
+ };
+ STRING
+ }
+ rustc_lexer::LiteralKind::RawByteStr { err: raw_str_err, .. } => {
+ if let Some(raw_str_err) = raw_str_err {
+ err = match raw_str_err {
+ rustc_lexer::RawStrError::InvalidStarter { .. } => "Missing `\"` symbol after `#` symbols to begin the raw byte string literal",
+ rustc_lexer::RawStrError::NoTerminator { expected, found, .. } => if expected == found {
+ "Missing trailing `\"` to terminate the raw byte string literal"
+ } else {
+ "Missing trailing `\"` with `#` symbols to terminate the raw byte string literal"
+ },
+ rustc_lexer::RawStrError::TooManyDelimiters { .. } => "Too many `#` symbols: raw byte strings may be delimited by up to 65535 `#` symbols",
+ };
+ };
+
+ BYTE_STRING
+ }
+ };
+
+ let err = if err.is_empty() { None } else { Some(err) };
+ self.push(syntax_kind, len, err);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/lib.rs b/src/tools/rust-analyzer/crates/parser/src/lib.rs
new file mode 100644
index 000000000..87be47927
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/lib.rs
@@ -0,0 +1,181 @@
+//! The Rust parser.
+//!
+//! NOTE: The crate is undergoing refactors, don't believe everything the docs
+//! say :-)
+//!
+//! The parser doesn't know about concrete representation of tokens and syntax
+//! trees. Abstract [`TokenSource`] and [`TreeSink`] traits are used instead. As
+//! a consequence, this crate does not contain a lexer.
+//!
+//! The [`Parser`] struct from the [`parser`] module is a cursor into the
+//! sequence of tokens. Parsing routines use [`Parser`] to inspect current
+//! state and advance the parsing.
+//!
+//! The actual parsing happens in the [`grammar`] module.
+//!
+//! Tests for this crate live in the `syntax` crate.
+//!
+//! [`Parser`]: crate::parser::Parser
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![allow(rustdoc::private_intra_doc_links)]
+
+mod lexed_str;
+mod token_set;
+mod syntax_kind;
+mod event;
+mod parser;
+mod grammar;
+mod input;
+mod output;
+mod shortcuts;
+
+#[cfg(test)]
+mod tests;
+
+pub(crate) use token_set::TokenSet;
+
+pub use crate::{
+ input::Input,
+ lexed_str::LexedStr,
+ output::{Output, Step},
+ shortcuts::StrStep,
+ syntax_kind::SyntaxKind,
+};
+
+/// Parse the whole of the input as a given syntactic construct.
+///
+/// This covers two main use-cases:
+///
+/// * Parsing a Rust file.
+/// * Parsing a result of macro expansion.
+///
+/// That is, for something like
+///
+/// ```
+/// quick_check! {
+/// fn prop() {}
+/// }
+/// ```
+///
+/// the input to the macro will be parsed with [`PrefixEntryPoint::Item`], and
+/// the result will be [`TopEntryPoint::MacroItems`].
+///
+/// [`TopEntryPoint::parse`] makes a guarantee that
+/// * all input is consumed
+/// * the result is a valid tree (there's one root node)
+#[derive(Debug)]
+pub enum TopEntryPoint {
+ SourceFile,
+ MacroStmts,
+ MacroItems,
+ Pattern,
+ Type,
+ Expr,
+ /// Edge case -- macros generally don't expand to attributes, with the
+ /// exception of `cfg_attr` which does!
+ MetaItem,
+}
+
+impl TopEntryPoint {
+ pub fn parse(&self, input: &Input) -> Output {
+ let entry_point: fn(&'_ mut parser::Parser<'_>) = match self {
+ TopEntryPoint::SourceFile => grammar::entry::top::source_file,
+ TopEntryPoint::MacroStmts => grammar::entry::top::macro_stmts,
+ TopEntryPoint::MacroItems => grammar::entry::top::macro_items,
+ TopEntryPoint::Pattern => grammar::entry::top::pattern,
+ TopEntryPoint::Type => grammar::entry::top::type_,
+ TopEntryPoint::Expr => grammar::entry::top::expr,
+ TopEntryPoint::MetaItem => grammar::entry::top::meta_item,
+ };
+ let mut p = parser::Parser::new(input);
+ entry_point(&mut p);
+ let events = p.finish();
+ let res = event::process(events);
+
+ if cfg!(debug_assertions) {
+ let mut depth = 0;
+ let mut first = true;
+ for step in res.iter() {
+ assert!(depth > 0 || first);
+ first = false;
+ match step {
+ Step::Enter { .. } => depth += 1,
+ Step::Exit => depth -= 1,
+ Step::Token { .. } | Step::Error { .. } => (),
+ }
+ }
+ assert!(!first, "no tree at all");
+ }
+
+ res
+ }
+}
+
+/// Parse a prefix of the input as a given syntactic construct.
+///
+/// This is used by macro-by-example parser to implement things like `$i:item`
+/// and the naming of variants follows the naming of macro fragments.
+///
+/// Note that this is generally non-optional -- the result is intentionally not
+/// `Option<Output>`. The way MBE work, by the time we *try* to parse `$e:expr`
+/// we already commit to expression. In other words, this API by design can't be
+/// used to implement "rollback and try another alternative" logic.
+#[derive(Debug)]
+pub enum PrefixEntryPoint {
+ Vis,
+ Block,
+ Stmt,
+ Pat,
+ Ty,
+ Expr,
+ Path,
+ Item,
+ MetaItem,
+}
+
+impl PrefixEntryPoint {
+ pub fn parse(&self, input: &Input) -> Output {
+ let entry_point: fn(&'_ mut parser::Parser<'_>) = match self {
+ PrefixEntryPoint::Vis => grammar::entry::prefix::vis,
+ PrefixEntryPoint::Block => grammar::entry::prefix::block,
+ PrefixEntryPoint::Stmt => grammar::entry::prefix::stmt,
+ PrefixEntryPoint::Pat => grammar::entry::prefix::pat,
+ PrefixEntryPoint::Ty => grammar::entry::prefix::ty,
+ PrefixEntryPoint::Expr => grammar::entry::prefix::expr,
+ PrefixEntryPoint::Path => grammar::entry::prefix::path,
+ PrefixEntryPoint::Item => grammar::entry::prefix::item,
+ PrefixEntryPoint::MetaItem => grammar::entry::prefix::meta_item,
+ };
+ let mut p = parser::Parser::new(input);
+ entry_point(&mut p);
+ let events = p.finish();
+ event::process(events)
+ }
+}
+
+/// A parsing function for a specific braced-block.
+pub struct Reparser(fn(&mut parser::Parser<'_>));
+
+impl Reparser {
+ /// If the node is a braced block, return the corresponding `Reparser`.
+ pub fn for_node(
+ node: SyntaxKind,
+ first_child: Option<SyntaxKind>,
+ parent: Option<SyntaxKind>,
+ ) -> Option<Reparser> {
+ grammar::reparser(node, first_child, parent).map(Reparser)
+ }
+
+ /// Re-parse given tokens using this `Reparser`.
+ ///
+ /// Tokens must start with `{`, end with `}` and form a valid brace
+ /// sequence.
+ pub fn parse(self, tokens: &Input) -> Output {
+ let Reparser(r) = self;
+ let mut p = parser::Parser::new(tokens);
+ r(&mut p);
+ let events = p.finish();
+ event::process(events)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/output.rs b/src/tools/rust-analyzer/crates/parser/src/output.rs
new file mode 100644
index 000000000..e9ec9822d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/output.rs
@@ -0,0 +1,77 @@
+//! See [`Output`]
+
+use crate::SyntaxKind;
+
+/// Output of the parser -- a DFS traversal of a concrete syntax tree.
+///
+/// Use the [`Output::iter`] method to iterate over traversal steps and consume
+/// a syntax tree.
+///
+/// In a sense, this is just a sequence of [`SyntaxKind`]-colored parenthesis
+/// interspersed into the original [`crate::Input`]. The output is fundamentally
+/// coordinated with the input and `n_input_tokens` refers to the number of
+/// times [`crate::Input::push`] was called.
+#[derive(Default)]
+pub struct Output {
+ /// 32-bit encoding of events. If LSB is zero, then that's an index into the
+ /// error vector. Otherwise, it's one of the thee other variants, with data encoded as
+ ///
+ /// |16 bit kind|8 bit n_input_tokens|4 bit tag|4 bit leftover|
+ ///
+ event: Vec<u32>,
+ error: Vec<String>,
+}
+
+#[derive(Debug)]
+pub enum Step<'a> {
+ Token { kind: SyntaxKind, n_input_tokens: u8 },
+ Enter { kind: SyntaxKind },
+ Exit,
+ Error { msg: &'a str },
+}
+
+impl Output {
+ pub fn iter(&self) -> impl Iterator<Item = Step<'_>> {
+ self.event.iter().map(|&event| {
+ if event & 0b1 == 0 {
+ return Step::Error { msg: self.error[(event as usize) >> 1].as_str() };
+ }
+ let tag = ((event & 0x0000_00F0) >> 4) as u8;
+ match tag {
+ 0 => {
+ let kind: SyntaxKind = (((event & 0xFFFF_0000) >> 16) as u16).into();
+ let n_input_tokens = ((event & 0x0000_FF00) >> 8) as u8;
+ Step::Token { kind, n_input_tokens }
+ }
+ 1 => {
+ let kind: SyntaxKind = (((event & 0xFFFF_0000) >> 16) as u16).into();
+ Step::Enter { kind }
+ }
+ 2 => Step::Exit,
+ _ => unreachable!(),
+ }
+ })
+ }
+
+ pub(crate) fn token(&mut self, kind: SyntaxKind, n_tokens: u8) {
+ let e = ((kind as u16 as u32) << 16) | ((n_tokens as u32) << 8) | (0 << 4) | 1;
+ self.event.push(e)
+ }
+
+ pub(crate) fn enter_node(&mut self, kind: SyntaxKind) {
+ let e = ((kind as u16 as u32) << 16) | (1 << 4) | 1;
+ self.event.push(e)
+ }
+
+ pub(crate) fn leave_node(&mut self) {
+ let e = 2 << 4 | 1;
+ self.event.push(e)
+ }
+
+ pub(crate) fn error(&mut self, error: String) {
+ let idx = self.error.len();
+ self.error.push(error);
+ let e = (idx as u32) << 1;
+ self.event.push(e);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/parser.rs b/src/tools/rust-analyzer/crates/parser/src/parser.rs
new file mode 100644
index 000000000..48d8350e0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/parser.rs
@@ -0,0 +1,340 @@
+//! See [`Parser`].
+
+use std::cell::Cell;
+
+use drop_bomb::DropBomb;
+use limit::Limit;
+
+use crate::{
+ event::Event,
+ input::Input,
+ SyntaxKind::{self, EOF, ERROR, TOMBSTONE},
+ TokenSet, T,
+};
+
+/// `Parser` struct provides the low-level API for
+/// navigating through the stream of tokens and
+/// constructing the parse tree. The actual parsing
+/// happens in the [`grammar`](super::grammar) module.
+///
+/// However, the result of this `Parser` is not a real
+/// tree, but rather a flat stream of events of the form
+/// "start expression, consume number literal,
+/// finish expression". See `Event` docs for more.
+pub(crate) struct Parser<'t> {
+ inp: &'t Input,
+ pos: usize,
+ events: Vec<Event>,
+ steps: Cell<u32>,
+}
+
+static PARSER_STEP_LIMIT: Limit = Limit::new(15_000_000);
+
+impl<'t> Parser<'t> {
+ pub(super) fn new(inp: &'t Input) -> Parser<'t> {
+ Parser { inp, pos: 0, events: Vec::new(), steps: Cell::new(0) }
+ }
+
+ pub(crate) fn finish(self) -> Vec<Event> {
+ self.events
+ }
+
+ /// Returns the kind of the current token.
+ /// If parser has already reached the end of input,
+ /// the special `EOF` kind is returned.
+ pub(crate) fn current(&self) -> SyntaxKind {
+ self.nth(0)
+ }
+
+ /// Lookahead operation: returns the kind of the next nth
+ /// token.
+ pub(crate) fn nth(&self, n: usize) -> SyntaxKind {
+ assert!(n <= 3);
+
+ let steps = self.steps.get();
+ assert!(PARSER_STEP_LIMIT.check(steps as usize).is_ok(), "the parser seems stuck");
+ self.steps.set(steps + 1);
+
+ self.inp.kind(self.pos + n)
+ }
+
+ /// Checks if the current token is `kind`.
+ pub(crate) fn at(&self, kind: SyntaxKind) -> bool {
+ self.nth_at(0, kind)
+ }
+
+ pub(crate) fn nth_at(&self, n: usize, kind: SyntaxKind) -> bool {
+ match kind {
+ T![-=] => self.at_composite2(n, T![-], T![=]),
+ T![->] => self.at_composite2(n, T![-], T![>]),
+ T![::] => self.at_composite2(n, T![:], T![:]),
+ T![!=] => self.at_composite2(n, T![!], T![=]),
+ T![..] => self.at_composite2(n, T![.], T![.]),
+ T![*=] => self.at_composite2(n, T![*], T![=]),
+ T![/=] => self.at_composite2(n, T![/], T![=]),
+ T![&&] => self.at_composite2(n, T![&], T![&]),
+ T![&=] => self.at_composite2(n, T![&], T![=]),
+ T![%=] => self.at_composite2(n, T![%], T![=]),
+ T![^=] => self.at_composite2(n, T![^], T![=]),
+ T![+=] => self.at_composite2(n, T![+], T![=]),
+ T![<<] => self.at_composite2(n, T![<], T![<]),
+ T![<=] => self.at_composite2(n, T![<], T![=]),
+ T![==] => self.at_composite2(n, T![=], T![=]),
+ T![=>] => self.at_composite2(n, T![=], T![>]),
+ T![>=] => self.at_composite2(n, T![>], T![=]),
+ T![>>] => self.at_composite2(n, T![>], T![>]),
+ T![|=] => self.at_composite2(n, T![|], T![=]),
+ T![||] => self.at_composite2(n, T![|], T![|]),
+
+ T![...] => self.at_composite3(n, T![.], T![.], T![.]),
+ T![..=] => self.at_composite3(n, T![.], T![.], T![=]),
+ T![<<=] => self.at_composite3(n, T![<], T![<], T![=]),
+ T![>>=] => self.at_composite3(n, T![>], T![>], T![=]),
+
+ _ => self.inp.kind(self.pos + n) == kind,
+ }
+ }
+
+ /// Consume the next token if `kind` matches.
+ pub(crate) fn eat(&mut self, kind: SyntaxKind) -> bool {
+ if !self.at(kind) {
+ return false;
+ }
+ let n_raw_tokens = match kind {
+ T![-=]
+ | T![->]
+ | T![::]
+ | T![!=]
+ | T![..]
+ | T![*=]
+ | T![/=]
+ | T![&&]
+ | T![&=]
+ | T![%=]
+ | T![^=]
+ | T![+=]
+ | T![<<]
+ | T![<=]
+ | T![==]
+ | T![=>]
+ | T![>=]
+ | T![>>]
+ | T![|=]
+ | T![||] => 2,
+
+ T![...] | T![..=] | T![<<=] | T![>>=] => 3,
+ _ => 1,
+ };
+ self.do_bump(kind, n_raw_tokens);
+ true
+ }
+
+ fn at_composite2(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind) -> bool {
+ self.inp.kind(self.pos + n) == k1
+ && self.inp.kind(self.pos + n + 1) == k2
+ && self.inp.is_joint(self.pos + n)
+ }
+
+ fn at_composite3(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind, k3: SyntaxKind) -> bool {
+ self.inp.kind(self.pos + n) == k1
+ && self.inp.kind(self.pos + n + 1) == k2
+ && self.inp.kind(self.pos + n + 2) == k3
+ && self.inp.is_joint(self.pos + n)
+ && self.inp.is_joint(self.pos + n + 1)
+ }
+
+ /// Checks if the current token is in `kinds`.
+ pub(crate) fn at_ts(&self, kinds: TokenSet) -> bool {
+ kinds.contains(self.current())
+ }
+
+ /// Checks if the current token is contextual keyword with text `t`.
+ pub(crate) fn at_contextual_kw(&self, kw: SyntaxKind) -> bool {
+ self.inp.contextual_kind(self.pos) == kw
+ }
+
+ /// Starts a new node in the syntax tree. All nodes and tokens
+ /// consumed between the `start` and the corresponding `Marker::complete`
+ /// belong to the same node.
+ pub(crate) fn start(&mut self) -> Marker {
+ let pos = self.events.len() as u32;
+ self.push_event(Event::tombstone());
+ Marker::new(pos)
+ }
+
+ /// Consume the next token if `kind` matches.
+ pub(crate) fn bump(&mut self, kind: SyntaxKind) {
+ assert!(self.eat(kind));
+ }
+
+ /// Advances the parser by one token
+ pub(crate) fn bump_any(&mut self) {
+ let kind = self.nth(0);
+ if kind == EOF {
+ return;
+ }
+ self.do_bump(kind, 1);
+ }
+
+ /// Advances the parser by one token, remapping its kind.
+ /// This is useful to create contextual keywords from
+ /// identifiers. For example, the lexer creates a `union`
+ /// *identifier* token, but the parser remaps it to the
+ /// `union` keyword, and keyword is what ends up in the
+ /// final tree.
+ pub(crate) fn bump_remap(&mut self, kind: SyntaxKind) {
+ if self.nth(0) == EOF {
+ // FIXME: panic!?
+ return;
+ }
+ self.do_bump(kind, 1);
+ }
+
+ /// Emit error with the `message`
+ /// FIXME: this should be much more fancy and support
+ /// structured errors with spans and notes, like rustc
+ /// does.
+ pub(crate) fn error<T: Into<String>>(&mut self, message: T) {
+ let msg = message.into();
+ self.push_event(Event::Error { msg });
+ }
+
+ /// Consume the next token if it is `kind` or emit an error
+ /// otherwise.
+ pub(crate) fn expect(&mut self, kind: SyntaxKind) -> bool {
+ if self.eat(kind) {
+ return true;
+ }
+ self.error(format!("expected {:?}", kind));
+ false
+ }
+
+ /// Create an error node and consume the next token.
+ pub(crate) fn err_and_bump(&mut self, message: &str) {
+ self.err_recover(message, TokenSet::EMPTY);
+ }
+
+ /// Create an error node and consume the next token.
+ pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) {
+ match self.current() {
+ T!['{'] | T!['}'] => {
+ self.error(message);
+ return;
+ }
+ _ => (),
+ }
+
+ if self.at_ts(recovery) {
+ self.error(message);
+ return;
+ }
+
+ let m = self.start();
+ self.error(message);
+ self.bump_any();
+ m.complete(self, ERROR);
+ }
+
+ fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) {
+ self.pos += n_raw_tokens as usize;
+ self.push_event(Event::Token { kind, n_raw_tokens });
+ }
+
+ fn push_event(&mut self, event: Event) {
+ self.events.push(event);
+ }
+}
+
+/// See [`Parser::start`].
+pub(crate) struct Marker {
+ pos: u32,
+ bomb: DropBomb,
+}
+
+impl Marker {
+ fn new(pos: u32) -> Marker {
+ Marker { pos, bomb: DropBomb::new("Marker must be either completed or abandoned") }
+ }
+
+ /// Finishes the syntax tree node and assigns `kind` to it,
+ /// and mark the create a `CompletedMarker` for possible future
+ /// operation like `.precede()` to deal with forward_parent.
+ pub(crate) fn complete(mut self, p: &mut Parser<'_>, kind: SyntaxKind) -> CompletedMarker {
+ self.bomb.defuse();
+ let idx = self.pos as usize;
+ match &mut p.events[idx] {
+ Event::Start { kind: slot, .. } => {
+ *slot = kind;
+ }
+ _ => unreachable!(),
+ }
+ p.push_event(Event::Finish);
+ CompletedMarker::new(self.pos, kind)
+ }
+
+ /// Abandons the syntax tree node. All its children
+ /// are attached to its parent instead.
+ pub(crate) fn abandon(mut self, p: &mut Parser<'_>) {
+ self.bomb.defuse();
+ let idx = self.pos as usize;
+ if idx == p.events.len() - 1 {
+ match p.events.pop() {
+ Some(Event::Start { kind: TOMBSTONE, forward_parent: None }) => (),
+ _ => unreachable!(),
+ }
+ }
+ }
+}
+
+pub(crate) struct CompletedMarker {
+ pos: u32,
+ kind: SyntaxKind,
+}
+
+impl CompletedMarker {
+ fn new(pos: u32, kind: SyntaxKind) -> Self {
+ CompletedMarker { pos, kind }
+ }
+
+ /// This method allows to create a new node which starts
+ /// *before* the current one. That is, parser could start
+ /// node `A`, then complete it, and then after parsing the
+ /// whole `A`, decide that it should have started some node
+ /// `B` before starting `A`. `precede` allows to do exactly
+ /// that. See also docs about
+ /// [`Event::Start::forward_parent`](crate::event::Event::Start::forward_parent).
+ ///
+ /// Given completed events `[START, FINISH]` and its corresponding
+ /// `CompletedMarker(pos: 0, _)`.
+ /// Append a new `START` events as `[START, FINISH, NEWSTART]`,
+ /// then mark `NEWSTART` as `START`'s parent with saving its relative
+ /// distance to `NEWSTART` into forward_parent(=2 in this case);
+ pub(crate) fn precede(self, p: &mut Parser<'_>) -> Marker {
+ let new_pos = p.start();
+ let idx = self.pos as usize;
+ match &mut p.events[idx] {
+ Event::Start { forward_parent, .. } => {
+ *forward_parent = Some(new_pos.pos - self.pos);
+ }
+ _ => unreachable!(),
+ }
+ new_pos
+ }
+
+ /// Extends this completed marker *to the left* up to `m`.
+ pub(crate) fn extend_to(self, p: &mut Parser<'_>, mut m: Marker) -> CompletedMarker {
+ m.bomb.defuse();
+ let idx = m.pos as usize;
+ match &mut p.events[idx] {
+ Event::Start { forward_parent, .. } => {
+ *forward_parent = Some(self.pos - m.pos);
+ }
+ _ => unreachable!(),
+ }
+ self
+ }
+
+ pub(crate) fn kind(&self) -> SyntaxKind {
+ self.kind
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
new file mode 100644
index 000000000..4b805fadd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
@@ -0,0 +1,215 @@
+//! Shortcuts that span lexer/parser abstraction.
+//!
+//! The way Rust works, parser doesn't necessary parse text, and you might
+//! tokenize text without parsing it further. So, it makes sense to keep
+//! abstract token parsing, and string tokenization as completely separate
+//! layers.
+//!
+//! However, often you do pares text into syntax trees and the glue code for
+//! that needs to live somewhere. Rather than putting it to lexer or parser, we
+//! use a separate shortcuts module for that.
+
+use std::mem;
+
+use crate::{
+ LexedStr, Step,
+ SyntaxKind::{self, *},
+};
+
+#[derive(Debug)]
+pub enum StrStep<'a> {
+ Token { kind: SyntaxKind, text: &'a str },
+ Enter { kind: SyntaxKind },
+ Exit,
+ Error { msg: &'a str, pos: usize },
+}
+
+impl<'a> LexedStr<'a> {
+ pub fn to_input(&self) -> crate::Input {
+ let mut res = crate::Input::default();
+ let mut was_joint = false;
+ for i in 0..self.len() {
+ let kind = self.kind(i);
+ if kind.is_trivia() {
+ was_joint = false
+ } else {
+ if kind == SyntaxKind::IDENT {
+ let token_text = self.text(i);
+ let contextual_kw = SyntaxKind::from_contextual_keyword(token_text)
+ .unwrap_or(SyntaxKind::IDENT);
+ res.push_ident(contextual_kw);
+ } else {
+ if was_joint {
+ res.was_joint();
+ }
+ res.push(kind);
+ }
+ was_joint = true;
+ }
+ }
+ res
+ }
+
+ /// NB: only valid to call with Output from Reparser/TopLevelEntry.
+ pub fn intersperse_trivia(
+ &self,
+ output: &crate::Output,
+ sink: &mut dyn FnMut(StrStep<'_>),
+ ) -> bool {
+ let mut builder = Builder { lexed: self, pos: 0, state: State::PendingEnter, sink };
+
+ for event in output.iter() {
+ match event {
+ Step::Token { kind, n_input_tokens: n_raw_tokens } => {
+ builder.token(kind, n_raw_tokens)
+ }
+ Step::Enter { kind } => builder.enter(kind),
+ Step::Exit => builder.exit(),
+ Step::Error { msg } => {
+ let text_pos = builder.lexed.text_start(builder.pos);
+ (builder.sink)(StrStep::Error { msg, pos: text_pos });
+ }
+ }
+ }
+
+ match mem::replace(&mut builder.state, State::Normal) {
+ State::PendingExit => {
+ builder.eat_trivias();
+ (builder.sink)(StrStep::Exit);
+ }
+ State::PendingEnter | State::Normal => unreachable!(),
+ }
+
+ let is_eof = builder.pos == builder.lexed.len();
+ is_eof
+ }
+}
+
+struct Builder<'a, 'b> {
+ lexed: &'a LexedStr<'a>,
+ pos: usize,
+ state: State,
+ sink: &'b mut dyn FnMut(StrStep<'_>),
+}
+
+enum State {
+ PendingEnter,
+ Normal,
+ PendingExit,
+}
+
+impl Builder<'_, '_> {
+ fn token(&mut self, kind: SyntaxKind, n_tokens: u8) {
+ match mem::replace(&mut self.state, State::Normal) {
+ State::PendingEnter => unreachable!(),
+ State::PendingExit => (self.sink)(StrStep::Exit),
+ State::Normal => (),
+ }
+ self.eat_trivias();
+ self.do_token(kind, n_tokens as usize);
+ }
+
+ fn enter(&mut self, kind: SyntaxKind) {
+ match mem::replace(&mut self.state, State::Normal) {
+ State::PendingEnter => {
+ (self.sink)(StrStep::Enter { kind });
+ // No need to attach trivias to previous node: there is no
+ // previous node.
+ return;
+ }
+ State::PendingExit => (self.sink)(StrStep::Exit),
+ State::Normal => (),
+ }
+
+ let n_trivias =
+ (self.pos..self.lexed.len()).take_while(|&it| self.lexed.kind(it).is_trivia()).count();
+ let leading_trivias = self.pos..self.pos + n_trivias;
+ let n_attached_trivias = n_attached_trivias(
+ kind,
+ leading_trivias.rev().map(|it| (self.lexed.kind(it), self.lexed.text(it))),
+ );
+ self.eat_n_trivias(n_trivias - n_attached_trivias);
+ (self.sink)(StrStep::Enter { kind });
+ self.eat_n_trivias(n_attached_trivias);
+ }
+
+ fn exit(&mut self) {
+ match mem::replace(&mut self.state, State::PendingExit) {
+ State::PendingEnter => unreachable!(),
+ State::PendingExit => (self.sink)(StrStep::Exit),
+ State::Normal => (),
+ }
+ }
+
+ fn eat_trivias(&mut self) {
+ while self.pos < self.lexed.len() {
+ let kind = self.lexed.kind(self.pos);
+ if !kind.is_trivia() {
+ break;
+ }
+ self.do_token(kind, 1);
+ }
+ }
+
+ fn eat_n_trivias(&mut self, n: usize) {
+ for _ in 0..n {
+ let kind = self.lexed.kind(self.pos);
+ assert!(kind.is_trivia());
+ self.do_token(kind, 1);
+ }
+ }
+
+ fn do_token(&mut self, kind: SyntaxKind, n_tokens: usize) {
+ let text = &self.lexed.range_text(self.pos..self.pos + n_tokens);
+ self.pos += n_tokens;
+ (self.sink)(StrStep::Token { kind, text });
+ }
+}
+
+fn n_attached_trivias<'a>(
+ kind: SyntaxKind,
+ trivias: impl Iterator<Item = (SyntaxKind, &'a str)>,
+) -> usize {
+ match kind {
+ CONST | ENUM | FN | IMPL | MACRO_CALL | MACRO_DEF | MACRO_RULES | MODULE | RECORD_FIELD
+ | STATIC | STRUCT | TRAIT | TUPLE_FIELD | TYPE_ALIAS | UNION | USE | VARIANT => {
+ let mut res = 0;
+ let mut trivias = trivias.enumerate().peekable();
+
+ while let Some((i, (kind, text))) = trivias.next() {
+ match kind {
+ WHITESPACE if text.contains("\n\n") => {
+ // we check whether the next token is a doc-comment
+ // and skip the whitespace in this case
+ if let Some((COMMENT, peek_text)) = trivias.peek().map(|(_, pair)| pair) {
+ if is_outer(peek_text) {
+ continue;
+ }
+ }
+ break;
+ }
+ COMMENT => {
+ if is_inner(text) {
+ break;
+ }
+ res = i + 1;
+ }
+ _ => (),
+ }
+ }
+ res
+ }
+ _ => 0,
+ }
+}
+
+fn is_outer(text: &str) -> bool {
+ if text.starts_with("////") || text.starts_with("/***") {
+ return false;
+ }
+ text.starts_with("///") || text.starts_with("/**")
+}
+
+fn is_inner(text: &str) -> bool {
+ text.starts_with("//!") || text.starts_with("/*!")
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind.rs
new file mode 100644
index 000000000..0483adc77
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind.rs
@@ -0,0 +1,29 @@
+//! Defines [`SyntaxKind`] -- a fieldless enum of all possible syntactic
+//! constructs of the Rust language.
+
+mod generated;
+
+#[allow(unreachable_pub)]
+pub use self::generated::{SyntaxKind, T};
+
+impl From<u16> for SyntaxKind {
+ #[inline]
+ fn from(d: u16) -> SyntaxKind {
+ assert!(d <= (SyntaxKind::__LAST as u16));
+ unsafe { std::mem::transmute::<u16, SyntaxKind>(d) }
+ }
+}
+
+impl From<SyntaxKind> for u16 {
+ #[inline]
+ fn from(k: SyntaxKind) -> u16 {
+ k as u16
+ }
+}
+
+impl SyntaxKind {
+ #[inline]
+ pub fn is_trivia(self) -> bool {
+ matches!(self, SyntaxKind::WHITESPACE | SyntaxKind::COMMENT)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs
new file mode 100644
index 000000000..628fa745e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs
@@ -0,0 +1,390 @@
+//! Generated by `sourcegen_ast`, do not edit by hand.
+
+#![allow(bad_style, missing_docs, unreachable_pub)]
+#[doc = r" The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT`."]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+#[repr(u16)]
+pub enum SyntaxKind {
+ #[doc(hidden)]
+ TOMBSTONE,
+ #[doc(hidden)]
+ EOF,
+ SEMICOLON,
+ COMMA,
+ L_PAREN,
+ R_PAREN,
+ L_CURLY,
+ R_CURLY,
+ L_BRACK,
+ R_BRACK,
+ L_ANGLE,
+ R_ANGLE,
+ AT,
+ POUND,
+ TILDE,
+ QUESTION,
+ DOLLAR,
+ AMP,
+ PIPE,
+ PLUS,
+ STAR,
+ SLASH,
+ CARET,
+ PERCENT,
+ UNDERSCORE,
+ DOT,
+ DOT2,
+ DOT3,
+ DOT2EQ,
+ COLON,
+ COLON2,
+ EQ,
+ EQ2,
+ FAT_ARROW,
+ BANG,
+ NEQ,
+ MINUS,
+ THIN_ARROW,
+ LTEQ,
+ GTEQ,
+ PLUSEQ,
+ MINUSEQ,
+ PIPEEQ,
+ AMPEQ,
+ CARETEQ,
+ SLASHEQ,
+ STAREQ,
+ PERCENTEQ,
+ AMP2,
+ PIPE2,
+ SHL,
+ SHR,
+ SHLEQ,
+ SHREQ,
+ AS_KW,
+ ASYNC_KW,
+ AWAIT_KW,
+ BOX_KW,
+ BREAK_KW,
+ CONST_KW,
+ CONTINUE_KW,
+ CRATE_KW,
+ DYN_KW,
+ ELSE_KW,
+ ENUM_KW,
+ EXTERN_KW,
+ FALSE_KW,
+ FN_KW,
+ FOR_KW,
+ IF_KW,
+ IMPL_KW,
+ IN_KW,
+ LET_KW,
+ LOOP_KW,
+ MACRO_KW,
+ MATCH_KW,
+ MOD_KW,
+ MOVE_KW,
+ MUT_KW,
+ PUB_KW,
+ REF_KW,
+ RETURN_KW,
+ SELF_KW,
+ SELF_TYPE_KW,
+ STATIC_KW,
+ STRUCT_KW,
+ SUPER_KW,
+ TRAIT_KW,
+ TRUE_KW,
+ TRY_KW,
+ TYPE_KW,
+ UNSAFE_KW,
+ USE_KW,
+ WHERE_KW,
+ WHILE_KW,
+ YIELD_KW,
+ AUTO_KW,
+ DEFAULT_KW,
+ EXISTENTIAL_KW,
+ UNION_KW,
+ RAW_KW,
+ MACRO_RULES_KW,
+ INT_NUMBER,
+ FLOAT_NUMBER,
+ CHAR,
+ BYTE,
+ STRING,
+ BYTE_STRING,
+ ERROR,
+ IDENT,
+ WHITESPACE,
+ LIFETIME_IDENT,
+ COMMENT,
+ SHEBANG,
+ SOURCE_FILE,
+ STRUCT,
+ UNION,
+ ENUM,
+ FN,
+ RET_TYPE,
+ EXTERN_CRATE,
+ MODULE,
+ USE,
+ STATIC,
+ CONST,
+ TRAIT,
+ IMPL,
+ TYPE_ALIAS,
+ MACRO_CALL,
+ MACRO_RULES,
+ MACRO_ARM,
+ TOKEN_TREE,
+ MACRO_DEF,
+ PAREN_TYPE,
+ TUPLE_TYPE,
+ MACRO_TYPE,
+ NEVER_TYPE,
+ PATH_TYPE,
+ PTR_TYPE,
+ ARRAY_TYPE,
+ SLICE_TYPE,
+ REF_TYPE,
+ INFER_TYPE,
+ FN_PTR_TYPE,
+ FOR_TYPE,
+ IMPL_TRAIT_TYPE,
+ DYN_TRAIT_TYPE,
+ OR_PAT,
+ PAREN_PAT,
+ REF_PAT,
+ BOX_PAT,
+ IDENT_PAT,
+ WILDCARD_PAT,
+ REST_PAT,
+ PATH_PAT,
+ RECORD_PAT,
+ RECORD_PAT_FIELD_LIST,
+ RECORD_PAT_FIELD,
+ TUPLE_STRUCT_PAT,
+ TUPLE_PAT,
+ SLICE_PAT,
+ RANGE_PAT,
+ LITERAL_PAT,
+ MACRO_PAT,
+ CONST_BLOCK_PAT,
+ TUPLE_EXPR,
+ ARRAY_EXPR,
+ PAREN_EXPR,
+ PATH_EXPR,
+ CLOSURE_EXPR,
+ IF_EXPR,
+ WHILE_EXPR,
+ LOOP_EXPR,
+ FOR_EXPR,
+ CONTINUE_EXPR,
+ BREAK_EXPR,
+ LABEL,
+ BLOCK_EXPR,
+ STMT_LIST,
+ RETURN_EXPR,
+ YIELD_EXPR,
+ LET_EXPR,
+ UNDERSCORE_EXPR,
+ MACRO_EXPR,
+ MATCH_EXPR,
+ MATCH_ARM_LIST,
+ MATCH_ARM,
+ MATCH_GUARD,
+ RECORD_EXPR,
+ RECORD_EXPR_FIELD_LIST,
+ RECORD_EXPR_FIELD,
+ BOX_EXPR,
+ CALL_EXPR,
+ INDEX_EXPR,
+ METHOD_CALL_EXPR,
+ FIELD_EXPR,
+ AWAIT_EXPR,
+ TRY_EXPR,
+ CAST_EXPR,
+ REF_EXPR,
+ PREFIX_EXPR,
+ RANGE_EXPR,
+ BIN_EXPR,
+ EXTERN_BLOCK,
+ EXTERN_ITEM_LIST,
+ VARIANT,
+ RECORD_FIELD_LIST,
+ RECORD_FIELD,
+ TUPLE_FIELD_LIST,
+ TUPLE_FIELD,
+ VARIANT_LIST,
+ ITEM_LIST,
+ ASSOC_ITEM_LIST,
+ ATTR,
+ META,
+ USE_TREE,
+ USE_TREE_LIST,
+ PATH,
+ PATH_SEGMENT,
+ LITERAL,
+ RENAME,
+ VISIBILITY,
+ WHERE_CLAUSE,
+ WHERE_PRED,
+ ABI,
+ NAME,
+ NAME_REF,
+ LET_STMT,
+ LET_ELSE,
+ EXPR_STMT,
+ GENERIC_PARAM_LIST,
+ GENERIC_PARAM,
+ LIFETIME_PARAM,
+ TYPE_PARAM,
+ CONST_PARAM,
+ GENERIC_ARG_LIST,
+ LIFETIME,
+ LIFETIME_ARG,
+ TYPE_ARG,
+ ASSOC_TYPE_ARG,
+ CONST_ARG,
+ PARAM_LIST,
+ PARAM,
+ SELF_PARAM,
+ ARG_LIST,
+ TYPE_BOUND,
+ TYPE_BOUND_LIST,
+ MACRO_ITEMS,
+ MACRO_STMTS,
+ #[doc(hidden)]
+ __LAST,
+}
+use self::SyntaxKind::*;
+impl SyntaxKind {
+ pub fn is_keyword(self) -> bool {
+ match self {
+ AS_KW | ASYNC_KW | AWAIT_KW | BOX_KW | BREAK_KW | CONST_KW | CONTINUE_KW | CRATE_KW
+ | DYN_KW | ELSE_KW | ENUM_KW | EXTERN_KW | FALSE_KW | FN_KW | FOR_KW | IF_KW
+ | IMPL_KW | IN_KW | LET_KW | LOOP_KW | MACRO_KW | MATCH_KW | MOD_KW | MOVE_KW
+ | MUT_KW | PUB_KW | REF_KW | RETURN_KW | SELF_KW | SELF_TYPE_KW | STATIC_KW
+ | STRUCT_KW | SUPER_KW | TRAIT_KW | TRUE_KW | TRY_KW | TYPE_KW | UNSAFE_KW | USE_KW
+ | WHERE_KW | WHILE_KW | YIELD_KW | AUTO_KW | DEFAULT_KW | EXISTENTIAL_KW | UNION_KW
+ | RAW_KW | MACRO_RULES_KW => true,
+ _ => false,
+ }
+ }
+ pub fn is_punct(self) -> bool {
+ match self {
+ SEMICOLON | COMMA | L_PAREN | R_PAREN | L_CURLY | R_CURLY | L_BRACK | R_BRACK
+ | L_ANGLE | R_ANGLE | AT | POUND | TILDE | QUESTION | DOLLAR | AMP | PIPE | PLUS
+ | STAR | SLASH | CARET | PERCENT | UNDERSCORE | DOT | DOT2 | DOT3 | DOT2EQ | COLON
+ | COLON2 | EQ | EQ2 | FAT_ARROW | BANG | NEQ | MINUS | THIN_ARROW | LTEQ | GTEQ
+ | PLUSEQ | MINUSEQ | PIPEEQ | AMPEQ | CARETEQ | SLASHEQ | STAREQ | PERCENTEQ | AMP2
+ | PIPE2 | SHL | SHR | SHLEQ | SHREQ => true,
+ _ => false,
+ }
+ }
+ pub fn is_literal(self) -> bool {
+ match self {
+ INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE | STRING | BYTE_STRING => true,
+ _ => false,
+ }
+ }
+ pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
+ let kw = match ident {
+ "as" => AS_KW,
+ "async" => ASYNC_KW,
+ "await" => AWAIT_KW,
+ "box" => BOX_KW,
+ "break" => BREAK_KW,
+ "const" => CONST_KW,
+ "continue" => CONTINUE_KW,
+ "crate" => CRATE_KW,
+ "dyn" => DYN_KW,
+ "else" => ELSE_KW,
+ "enum" => ENUM_KW,
+ "extern" => EXTERN_KW,
+ "false" => FALSE_KW,
+ "fn" => FN_KW,
+ "for" => FOR_KW,
+ "if" => IF_KW,
+ "impl" => IMPL_KW,
+ "in" => IN_KW,
+ "let" => LET_KW,
+ "loop" => LOOP_KW,
+ "macro" => MACRO_KW,
+ "match" => MATCH_KW,
+ "mod" => MOD_KW,
+ "move" => MOVE_KW,
+ "mut" => MUT_KW,
+ "pub" => PUB_KW,
+ "ref" => REF_KW,
+ "return" => RETURN_KW,
+ "self" => SELF_KW,
+ "Self" => SELF_TYPE_KW,
+ "static" => STATIC_KW,
+ "struct" => STRUCT_KW,
+ "super" => SUPER_KW,
+ "trait" => TRAIT_KW,
+ "true" => TRUE_KW,
+ "try" => TRY_KW,
+ "type" => TYPE_KW,
+ "unsafe" => UNSAFE_KW,
+ "use" => USE_KW,
+ "where" => WHERE_KW,
+ "while" => WHILE_KW,
+ "yield" => YIELD_KW,
+ _ => return None,
+ };
+ Some(kw)
+ }
+ pub fn from_contextual_keyword(ident: &str) -> Option<SyntaxKind> {
+ let kw = match ident {
+ "auto" => AUTO_KW,
+ "default" => DEFAULT_KW,
+ "existential" => EXISTENTIAL_KW,
+ "union" => UNION_KW,
+ "raw" => RAW_KW,
+ "macro_rules" => MACRO_RULES_KW,
+ _ => return None,
+ };
+ Some(kw)
+ }
+ pub fn from_char(c: char) -> Option<SyntaxKind> {
+ let tok = match c {
+ ';' => SEMICOLON,
+ ',' => COMMA,
+ '(' => L_PAREN,
+ ')' => R_PAREN,
+ '{' => L_CURLY,
+ '}' => R_CURLY,
+ '[' => L_BRACK,
+ ']' => R_BRACK,
+ '<' => L_ANGLE,
+ '>' => R_ANGLE,
+ '@' => AT,
+ '#' => POUND,
+ '~' => TILDE,
+ '?' => QUESTION,
+ '$' => DOLLAR,
+ '&' => AMP,
+ '|' => PIPE,
+ '+' => PLUS,
+ '*' => STAR,
+ '/' => SLASH,
+ '^' => CARET,
+ '%' => PERCENT,
+ '_' => UNDERSCORE,
+ '.' => DOT,
+ ':' => COLON,
+ '=' => EQ,
+ '!' => BANG,
+ '-' => MINUS,
+ _ => return None,
+ };
+ Some(tok)
+ }
+}
+#[macro_export]
+macro_rules ! T { [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [existential] => { $ crate :: SyntaxKind :: EXISTENTIAL_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; }
+pub use T;
diff --git a/src/tools/rust-analyzer/crates/parser/src/tests.rs b/src/tools/rust-analyzer/crates/parser/src/tests.rs
new file mode 100644
index 000000000..735c0b3e4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/tests.rs
@@ -0,0 +1,166 @@
+mod sourcegen_inline_tests;
+mod top_entries;
+mod prefix_entries;
+
+use std::{
+ fmt::Write,
+ fs,
+ path::{Path, PathBuf},
+};
+
+use expect_test::expect_file;
+
+use crate::{LexedStr, TopEntryPoint};
+
+#[test]
+fn lex_ok() {
+ for case in TestCase::list("lexer/ok") {
+ let actual = lex(&case.text);
+ expect_file![case.rast].assert_eq(&actual)
+ }
+}
+
+#[test]
+fn lex_err() {
+ for case in TestCase::list("lexer/err") {
+ let actual = lex(&case.text);
+ expect_file![case.rast].assert_eq(&actual)
+ }
+}
+
+fn lex(text: &str) -> String {
+ let lexed = LexedStr::new(text);
+
+ let mut res = String::new();
+ for i in 0..lexed.len() {
+ let kind = lexed.kind(i);
+ let text = lexed.text(i);
+ let error = lexed.error(i);
+
+ let error = error.map(|err| format!(" error: {}", err)).unwrap_or_default();
+ writeln!(res, "{:?} {:?}{}", kind, text, error).unwrap();
+ }
+ res
+}
+
+#[test]
+fn parse_ok() {
+ for case in TestCase::list("parser/ok") {
+ let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
+ assert!(!errors, "errors in an OK file {}:\n{}", case.rs.display(), actual);
+ expect_file![case.rast].assert_eq(&actual);
+ }
+}
+
+#[test]
+fn parse_inline_ok() {
+ for case in TestCase::list("parser/inline/ok") {
+ let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
+ assert!(!errors, "errors in an OK file {}:\n{}", case.rs.display(), actual);
+ expect_file![case.rast].assert_eq(&actual);
+ }
+}
+
+#[test]
+fn parse_err() {
+ for case in TestCase::list("parser/err") {
+ let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
+ assert!(errors, "no errors in an ERR file {}:\n{}", case.rs.display(), actual);
+ expect_file![case.rast].assert_eq(&actual)
+ }
+}
+
+#[test]
+fn parse_inline_err() {
+ for case in TestCase::list("parser/inline/err") {
+ let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
+ assert!(errors, "no errors in an ERR file {}:\n{}", case.rs.display(), actual);
+ expect_file![case.rast].assert_eq(&actual)
+ }
+}
+
+fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) {
+ let lexed = LexedStr::new(text);
+ let input = lexed.to_input();
+ let output = entry.parse(&input);
+
+ let mut buf = String::new();
+ let mut errors = Vec::new();
+ let mut indent = String::new();
+ let mut depth = 0;
+ let mut len = 0;
+ lexed.intersperse_trivia(&output, &mut |step| match step {
+ crate::StrStep::Token { kind, text } => {
+ assert!(depth > 0);
+ len += text.len();
+ write!(buf, "{}", indent).unwrap();
+ write!(buf, "{:?} {:?}\n", kind, text).unwrap();
+ }
+ crate::StrStep::Enter { kind } => {
+ assert!(depth > 0 || len == 0);
+ depth += 1;
+ write!(buf, "{}", indent).unwrap();
+ write!(buf, "{:?}\n", kind).unwrap();
+ indent.push_str(" ");
+ }
+ crate::StrStep::Exit => {
+ assert!(depth > 0);
+ depth -= 1;
+ indent.pop();
+ indent.pop();
+ }
+ crate::StrStep::Error { msg, pos } => {
+ assert!(depth > 0);
+ errors.push(format!("error {}: {}\n", pos, msg))
+ }
+ });
+ assert_eq!(
+ len,
+ text.len(),
+ "didn't parse all text.\nParsed:\n{}\n\nAll:\n{}\n",
+ &text[..len],
+ text
+ );
+
+ for (token, msg) in lexed.errors() {
+ let pos = lexed.text_start(token);
+ errors.push(format!("error {}: {}\n", pos, msg));
+ }
+
+ let has_errors = !errors.is_empty();
+ for e in errors {
+ buf.push_str(&e);
+ }
+ (buf, has_errors)
+}
+
+#[derive(PartialEq, Eq, PartialOrd, Ord)]
+struct TestCase {
+ rs: PathBuf,
+ rast: PathBuf,
+ text: String,
+}
+
+impl TestCase {
+ fn list(path: &'static str) -> Vec<TestCase> {
+ let crate_root_dir = Path::new(env!("CARGO_MANIFEST_DIR"));
+ let test_data_dir = crate_root_dir.join("test_data");
+ let dir = test_data_dir.join(path);
+
+ let mut res = Vec::new();
+ let read_dir = fs::read_dir(&dir)
+ .unwrap_or_else(|err| panic!("can't `read_dir` {}: {}", dir.display(), err));
+ for file in read_dir {
+ let file = file.unwrap();
+ let path = file.path();
+ if path.extension().unwrap_or_default() == "rs" {
+ let rs = path;
+ let rast = rs.with_extension("rast");
+ let text = fs::read_to_string(&rs).unwrap();
+ res.push(TestCase { rs, rast, text });
+ }
+ }
+ res.sort();
+ res
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs b/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs
new file mode 100644
index 000000000..e626b4f27
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs
@@ -0,0 +1,107 @@
+use crate::{LexedStr, PrefixEntryPoint, Step};
+
+#[test]
+fn vis() {
+ check(PrefixEntryPoint::Vis, "pub(crate) fn foo() {}", "pub(crate)");
+ check(PrefixEntryPoint::Vis, "fn foo() {}", "");
+ check(PrefixEntryPoint::Vis, "pub(fn foo() {}", "pub");
+ check(PrefixEntryPoint::Vis, "pub(crate fn foo() {}", "pub(crate");
+ check(PrefixEntryPoint::Vis, "crate fn foo() {}", "crate");
+}
+
+#[test]
+fn block() {
+ check(PrefixEntryPoint::Block, "{}, 92", "{}");
+ check(PrefixEntryPoint::Block, "{, 92)", "{, 92)");
+ check(PrefixEntryPoint::Block, "()", "");
+}
+
+#[test]
+fn stmt() {
+ check(PrefixEntryPoint::Stmt, "92; fn", "92");
+ check(PrefixEntryPoint::Stmt, "let _ = 92; 1", "let _ = 92");
+ check(PrefixEntryPoint::Stmt, "pub fn f() {} = 92", "pub fn f() {}");
+ check(PrefixEntryPoint::Stmt, "struct S;;", "struct S;");
+ check(PrefixEntryPoint::Stmt, "fn f() {};", "fn f() {}");
+ check(PrefixEntryPoint::Stmt, ";;;", ";");
+ check(PrefixEntryPoint::Stmt, "+", "+");
+ check(PrefixEntryPoint::Stmt, "@", "@");
+ check(PrefixEntryPoint::Stmt, "loop {} - 1", "loop {}");
+}
+
+#[test]
+fn pat() {
+ check(PrefixEntryPoint::Pat, "x y", "x");
+ check(PrefixEntryPoint::Pat, "fn f() {}", "fn");
+ // FIXME: This one is wrong, we should consume only one pattern.
+ check(PrefixEntryPoint::Pat, ".. ..", ".. ..");
+}
+
+#[test]
+fn ty() {
+ check(PrefixEntryPoint::Ty, "fn() foo", "fn()");
+ check(PrefixEntryPoint::Ty, "Clone + Copy + fn", "Clone + Copy +");
+ check(PrefixEntryPoint::Ty, "struct f", "struct");
+}
+
+#[test]
+fn expr() {
+ check(PrefixEntryPoint::Expr, "92 92", "92");
+ check(PrefixEntryPoint::Expr, "+1", "+");
+ check(PrefixEntryPoint::Expr, "-1", "-1");
+ check(PrefixEntryPoint::Expr, "fn foo() {}", "fn");
+ check(PrefixEntryPoint::Expr, "#[attr] ()", "#[attr] ()");
+}
+
+#[test]
+fn path() {
+ check(PrefixEntryPoint::Path, "foo::bar baz", "foo::bar");
+ check(PrefixEntryPoint::Path, "foo::<> baz", "foo::<>");
+ check(PrefixEntryPoint::Path, "foo<> baz", "foo<>");
+ check(PrefixEntryPoint::Path, "Fn() -> i32?", "Fn() -> i32");
+ // FIXME: This shouldn't be accepted as path actually.
+ check(PrefixEntryPoint::Path, "<_>::foo", "<_>::foo");
+}
+
+#[test]
+fn item() {
+ // FIXME: This shouldn't consume the semicolon.
+ check(PrefixEntryPoint::Item, "fn foo() {};", "fn foo() {};");
+ check(PrefixEntryPoint::Item, "#[attr] pub struct S {} 92", "#[attr] pub struct S {}");
+ check(PrefixEntryPoint::Item, "item!{}?", "item!{}");
+ check(PrefixEntryPoint::Item, "????", "?");
+}
+
+#[test]
+fn meta_item() {
+ check(PrefixEntryPoint::MetaItem, "attr, ", "attr");
+ check(PrefixEntryPoint::MetaItem, "attr(some token {stream});", "attr(some token {stream})");
+ check(PrefixEntryPoint::MetaItem, "path::attr = 2 * 2!", "path::attr = 2 * 2");
+}
+
+#[track_caller]
+fn check(entry: PrefixEntryPoint, input: &str, prefix: &str) {
+ let lexed = LexedStr::new(input);
+ let input = lexed.to_input();
+
+ let mut n_tokens = 0;
+ for step in entry.parse(&input).iter() {
+ match step {
+ Step::Token { n_input_tokens, .. } => n_tokens += n_input_tokens as usize,
+ Step::Enter { .. } | Step::Exit | Step::Error { .. } => (),
+ }
+ }
+
+ let mut i = 0;
+ loop {
+ if n_tokens == 0 {
+ break;
+ }
+ if !lexed.kind(i).is_trivia() {
+ n_tokens -= 1;
+ }
+ i += 1;
+ }
+ let buf = &lexed.as_str()[..lexed.text_start(i)];
+ assert_eq!(buf, prefix);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/tests/sourcegen_inline_tests.rs b/src/tools/rust-analyzer/crates/parser/src/tests/sourcegen_inline_tests.rs
new file mode 100644
index 000000000..7b2b703de
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/tests/sourcegen_inline_tests.rs
@@ -0,0 +1,123 @@
+//! This module greps parser's code for specially formatted comments and turns
+//! them into tests.
+
+use std::{
+ collections::HashMap,
+ fs, iter,
+ path::{Path, PathBuf},
+};
+
+#[test]
+fn sourcegen_parser_tests() {
+ let grammar_dir = sourcegen::project_root().join(Path::new("crates/parser/src/grammar"));
+ let tests = tests_from_dir(&grammar_dir);
+
+ install_tests(&tests.ok, "crates/parser/test_data/parser/inline/ok");
+ install_tests(&tests.err, "crates/parser/test_data/parser/inline/err");
+
+ fn install_tests(tests: &HashMap<String, Test>, into: &str) {
+ let tests_dir = sourcegen::project_root().join(into);
+ if !tests_dir.is_dir() {
+ fs::create_dir_all(&tests_dir).unwrap();
+ }
+ // ok is never actually read, but it needs to be specified to create a Test in existing_tests
+ let existing = existing_tests(&tests_dir, true);
+ for t in existing.keys().filter(|&t| !tests.contains_key(t)) {
+ panic!("Test is deleted: {}", t);
+ }
+
+ let mut new_idx = existing.len() + 1;
+ for (name, test) in tests {
+ let path = match existing.get(name) {
+ Some((path, _test)) => path.clone(),
+ None => {
+ let file_name = format!("{:04}_{}.rs", new_idx, name);
+ new_idx += 1;
+ tests_dir.join(file_name)
+ }
+ };
+ sourcegen::ensure_file_contents(&path, &test.text);
+ }
+ }
+}
+
+#[derive(Debug)]
+struct Test {
+ name: String,
+ text: String,
+ ok: bool,
+}
+
+#[derive(Default, Debug)]
+struct Tests {
+ ok: HashMap<String, Test>,
+ err: HashMap<String, Test>,
+}
+
+fn collect_tests(s: &str) -> Vec<Test> {
+ let mut res = Vec::new();
+ for comment_block in sourcegen::CommentBlock::extract_untagged(s) {
+ let first_line = &comment_block.contents[0];
+ let (name, ok) = if let Some(name) = first_line.strip_prefix("test ") {
+ (name.to_string(), true)
+ } else if let Some(name) = first_line.strip_prefix("test_err ") {
+ (name.to_string(), false)
+ } else {
+ continue;
+ };
+ let text: String = comment_block.contents[1..]
+ .iter()
+ .cloned()
+ .chain(iter::once(String::new()))
+ .collect::<Vec<_>>()
+ .join("\n");
+ assert!(!text.trim().is_empty() && text.ends_with('\n'));
+ res.push(Test { name, text, ok })
+ }
+ res
+}
+
+fn tests_from_dir(dir: &Path) -> Tests {
+ let mut res = Tests::default();
+ for entry in sourcegen::list_rust_files(dir) {
+ process_file(&mut res, entry.as_path());
+ }
+ let grammar_rs = dir.parent().unwrap().join("grammar.rs");
+ process_file(&mut res, &grammar_rs);
+ return res;
+
+ fn process_file(res: &mut Tests, path: &Path) {
+ let text = fs::read_to_string(path).unwrap();
+
+ for test in collect_tests(&text) {
+ if test.ok {
+ if let Some(old_test) = res.ok.insert(test.name.clone(), test) {
+ panic!("Duplicate test: {}", old_test.name);
+ }
+ } else if let Some(old_test) = res.err.insert(test.name.clone(), test) {
+ panic!("Duplicate test: {}", old_test.name);
+ }
+ }
+ }
+}
+
+fn existing_tests(dir: &Path, ok: bool) -> HashMap<String, (PathBuf, Test)> {
+ let mut res = HashMap::default();
+ for file in fs::read_dir(dir).unwrap() {
+ let file = file.unwrap();
+ let path = file.path();
+ if path.extension().unwrap_or_default() != "rs" {
+ continue;
+ }
+ let name = {
+ let file_name = path.file_name().unwrap().to_str().unwrap();
+ file_name[5..file_name.len() - 3].to_string()
+ };
+ let text = fs::read_to_string(&path).unwrap();
+ let test = Test { name: name.clone(), text, ok };
+ if let Some(old) = res.insert(name, (path, test)) {
+ println!("Duplicate test: {:?}", old);
+ }
+ }
+ res
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs b/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs
new file mode 100644
index 000000000..eb640dc7f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs
@@ -0,0 +1,312 @@
+use expect_test::expect;
+
+use crate::TopEntryPoint;
+
+#[test]
+fn source_file() {
+ check(
+ TopEntryPoint::SourceFile,
+ "",
+ expect![[r#"
+ SOURCE_FILE
+ "#]],
+ );
+
+ check(
+ TopEntryPoint::SourceFile,
+ "struct S;",
+ expect![[r#"
+ SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ "#]],
+ );
+
+ check(
+ TopEntryPoint::SourceFile,
+ "@error@",
+ expect![[r#"
+ SOURCE_FILE
+ ERROR
+ AT "@"
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "error"
+ ERROR
+ AT "@"
+ error 0: expected an item
+ error 6: expected BANG
+ error 6: expected `{`, `[`, `(`
+ error 6: expected SEMICOLON
+ error 6: expected an item
+ "#]],
+ );
+}
+
+#[test]
+fn macro_stmt() {
+ check(
+ TopEntryPoint::MacroStmts,
+ "",
+ expect![[r#"
+ MACRO_STMTS
+ "#]],
+ );
+ check(
+ TopEntryPoint::MacroStmts,
+ "#!/usr/bin/rust",
+ expect![[r##"
+ MACRO_STMTS
+ ERROR
+ SHEBANG "#!/usr/bin/rust"
+ error 0: expected expression
+ "##]],
+ );
+ check(
+ TopEntryPoint::MacroStmts,
+ "let x = 1 2 struct S;",
+ expect![[r#"
+ MACRO_STMTS
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ EXPR_STMT
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ "#]],
+ );
+}
+
+#[test]
+fn macro_items() {
+ check(
+ TopEntryPoint::MacroItems,
+ "",
+ expect![[r#"
+ MACRO_ITEMS
+ "#]],
+ );
+ check(
+ TopEntryPoint::MacroItems,
+ "#!/usr/bin/rust",
+ expect![[r##"
+ MACRO_ITEMS
+ ERROR
+ SHEBANG "#!/usr/bin/rust"
+ error 0: expected an item
+ "##]],
+ );
+ check(
+ TopEntryPoint::MacroItems,
+ "struct S; foo!{}",
+ expect![[r#"
+ MACRO_ITEMS
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ WHITESPACE " "
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ BANG "!"
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ "#]],
+ );
+}
+
+#[test]
+fn macro_pattern() {
+ check(
+ TopEntryPoint::Pattern,
+ "",
+ expect![[r#"
+ ERROR
+ error 0: expected pattern
+ "#]],
+ );
+ check(
+ TopEntryPoint::Pattern,
+ "Some(_)",
+ expect![[r#"
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ R_PAREN ")"
+ "#]],
+ );
+
+ check(
+ TopEntryPoint::Pattern,
+ "None leftover tokens",
+ expect![[r#"
+ ERROR
+ IDENT_PAT
+ NAME
+ IDENT "None"
+ WHITESPACE " "
+ IDENT "leftover"
+ WHITESPACE " "
+ IDENT "tokens"
+ "#]],
+ );
+
+ check(
+ TopEntryPoint::Pattern,
+ "@err",
+ expect![[r#"
+ ERROR
+ ERROR
+ AT "@"
+ IDENT "err"
+ error 0: expected pattern
+ "#]],
+ );
+}
+
+#[test]
+fn type_() {
+ check(
+ TopEntryPoint::Type,
+ "",
+ expect![[r#"
+ ERROR
+ error 0: expected type
+ "#]],
+ );
+
+ check(
+ TopEntryPoint::Type,
+ "Option<!>",
+ expect![[r#"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Option"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ NEVER_TYPE
+ BANG "!"
+ R_ANGLE ">"
+ "#]],
+ );
+ check(
+ TopEntryPoint::Type,
+ "() () ()",
+ expect![[r#"
+ ERROR
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ L_PAREN "("
+ R_PAREN ")"
+ "#]],
+ );
+ check(
+ TopEntryPoint::Type,
+ "$$$",
+ expect![[r#"
+ ERROR
+ ERROR
+ DOLLAR "$"
+ DOLLAR "$"
+ DOLLAR "$"
+ error 0: expected type
+ "#]],
+ );
+}
+
+#[test]
+fn expr() {
+ check(
+ TopEntryPoint::Expr,
+ "",
+ expect![[r#"
+ ERROR
+ error 0: expected expression
+ "#]],
+ );
+ check(
+ TopEntryPoint::Expr,
+ "2 + 2 == 5",
+ expect![[r#"
+ BIN_EXPR
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "5"
+ "#]],
+ );
+ check(
+ TopEntryPoint::Expr,
+ "let _ = 0;",
+ expect![[r#"
+ ERROR
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ "#]],
+ );
+}
+
+#[track_caller]
+fn check(entry: TopEntryPoint, input: &str, expect: expect_test::Expect) {
+ let (parsed, _errors) = super::parse(entry, input);
+ expect.assert_eq(&parsed)
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/token_set.rs b/src/tools/rust-analyzer/crates/parser/src/token_set.rs
new file mode 100644
index 000000000..cd4894c1e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/src/token_set.rs
@@ -0,0 +1,42 @@
+//! A bit-set of `SyntaxKind`s.
+
+use crate::SyntaxKind;
+
+/// A bit-set of `SyntaxKind`s
+#[derive(Clone, Copy)]
+pub(crate) struct TokenSet(u128);
+
+impl TokenSet {
+ pub(crate) const EMPTY: TokenSet = TokenSet(0);
+
+ pub(crate) const fn new(kinds: &[SyntaxKind]) -> TokenSet {
+ let mut res = 0u128;
+ let mut i = 0;
+ while i < kinds.len() {
+ res |= mask(kinds[i]);
+ i += 1;
+ }
+ TokenSet(res)
+ }
+
+ pub(crate) const fn union(self, other: TokenSet) -> TokenSet {
+ TokenSet(self.0 | other.0)
+ }
+
+ pub(crate) const fn contains(&self, kind: SyntaxKind) -> bool {
+ self.0 & mask(kind) != 0
+ }
+}
+
+const fn mask(kind: SyntaxKind) -> u128 {
+ 1u128 << (kind as usize)
+}
+
+#[test]
+fn token_set_works_for_tokens() {
+ use crate::SyntaxKind::*;
+ let ts = TokenSet::new(&[EOF, SHEBANG]);
+ assert!(ts.contains(EOF));
+ assert!(ts.contains(SHEBANG));
+ assert!(!ts.contains(PLUS));
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.rast
new file mode 100644
index 000000000..af03d73ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.rast
@@ -0,0 +1,48 @@
+FLOAT_NUMBER "0e" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "0E" error: Missing digits after the exponent symbol
+WHITESPACE "\n\n"
+FLOAT_NUMBER "42e+" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42e-" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42E+" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42E-" error: Missing digits after the exponent symbol
+WHITESPACE "\n\n"
+INT_NUMBER "42"
+DOT "."
+IDENT "e"
+PLUS "+"
+WHITESPACE "\n"
+INT_NUMBER "42"
+DOT "."
+IDENT "e"
+MINUS "-"
+WHITESPACE "\n"
+INT_NUMBER "42"
+DOT "."
+IDENT "E"
+PLUS "+"
+WHITESPACE "\n"
+INT_NUMBER "42"
+DOT "."
+IDENT "E"
+MINUS "-"
+WHITESPACE "\n\n"
+FLOAT_NUMBER "42.2e+" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2e-" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2E+" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2E-" error: Missing digits after the exponent symbol
+WHITESPACE "\n\n"
+FLOAT_NUMBER "42.2e+f32" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2e-f32" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2E+f32" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2E-f32" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.rs
new file mode 100644
index 000000000..286584c88
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.rs
@@ -0,0 +1,22 @@
+0e
+0E
+
+42e+
+42e-
+42E+
+42E-
+
+42.e+
+42.e-
+42.E+
+42.E-
+
+42.2e+
+42.2e-
+42.2E+
+42.2E-
+
+42.2e+f32
+42.2e-f32
+42.2E+f32
+42.2E-f32
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.txt
new file mode 100644
index 000000000..af03d73ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_exponent.txt
@@ -0,0 +1,48 @@
+FLOAT_NUMBER "0e" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "0E" error: Missing digits after the exponent symbol
+WHITESPACE "\n\n"
+FLOAT_NUMBER "42e+" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42e-" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42E+" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42E-" error: Missing digits after the exponent symbol
+WHITESPACE "\n\n"
+INT_NUMBER "42"
+DOT "."
+IDENT "e"
+PLUS "+"
+WHITESPACE "\n"
+INT_NUMBER "42"
+DOT "."
+IDENT "e"
+MINUS "-"
+WHITESPACE "\n"
+INT_NUMBER "42"
+DOT "."
+IDENT "E"
+PLUS "+"
+WHITESPACE "\n"
+INT_NUMBER "42"
+DOT "."
+IDENT "E"
+MINUS "-"
+WHITESPACE "\n\n"
+FLOAT_NUMBER "42.2e+" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2e-" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2E+" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2E-" error: Missing digits after the exponent symbol
+WHITESPACE "\n\n"
+FLOAT_NUMBER "42.2e+f32" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2e-f32" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2E+f32" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
+FLOAT_NUMBER "42.2E-f32" error: Missing digits after the exponent symbol
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.rast
new file mode 100644
index 000000000..7f7194f45
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.rast
@@ -0,0 +1,26 @@
+INT_NUMBER "0b" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0o" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0x" error: Missing digits after the integer base prefix
+WHITESPACE "\n\n"
+INT_NUMBER "0b_" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0o_" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0x_" error: Missing digits after the integer base prefix
+WHITESPACE "\n\n"
+INT_NUMBER "0bnoDigit" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0onoDigit" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0xnoDigit" error: Missing digits after the integer base prefix
+WHITESPACE "\n\n"
+INT_NUMBER "0xG" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0xg" error: Missing digits after the integer base prefix
+WHITESPACE "\n\n"
+INT_NUMBER "0x_g" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0x_G" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.rs
new file mode 100644
index 000000000..aa2a9fdca
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.rs
@@ -0,0 +1,17 @@
+0b
+0o
+0x
+
+0b_
+0o_
+0x_
+
+0bnoDigit
+0onoDigit
+0xnoDigit
+
+0xG
+0xg
+
+0x_g
+0x_G
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.txt
new file mode 100644
index 000000000..7f7194f45
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/empty_int.txt
@@ -0,0 +1,26 @@
+INT_NUMBER "0b" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0o" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0x" error: Missing digits after the integer base prefix
+WHITESPACE "\n\n"
+INT_NUMBER "0b_" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0o_" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0x_" error: Missing digits after the integer base prefix
+WHITESPACE "\n\n"
+INT_NUMBER "0bnoDigit" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0onoDigit" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0xnoDigit" error: Missing digits after the integer base prefix
+WHITESPACE "\n\n"
+INT_NUMBER "0xG" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0xg" error: Missing digits after the integer base prefix
+WHITESPACE "\n\n"
+INT_NUMBER "0x_g" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
+INT_NUMBER "0x_G" error: Missing digits after the integer base prefix
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.rast
new file mode 100644
index 000000000..e919bf2a4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.rast
@@ -0,0 +1,4 @@
+LIFETIME_IDENT "'1" error: Lifetime name cannot start with a number
+WHITESPACE "\n"
+LIFETIME_IDENT "'1lifetime" error: Lifetime name cannot start with a number
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.rs
new file mode 100644
index 000000000..a7698a404
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.rs
@@ -0,0 +1,2 @@
+'1
+'1lifetime
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.txt
new file mode 100644
index 000000000..e919bf2a4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/lifetime_starts_with_a_number.txt
@@ -0,0 +1,4 @@
+LIFETIME_IDENT "'1" error: Lifetime name cannot start with a number
+WHITESPACE "\n"
+LIFETIME_IDENT "'1lifetime" error: Lifetime name cannot start with a number
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.rast
new file mode 100644
index 000000000..7d2c32976
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.rast
@@ -0,0 +1 @@
+COMMENT "/*" error: Missing trailing `*/` symbols to terminate the block comment
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.rs
new file mode 100644
index 000000000..22e83649f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.rs
@@ -0,0 +1 @@
+/* \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.txt
new file mode 100644
index 000000000..7d2c32976
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_at_eof.txt
@@ -0,0 +1 @@
+COMMENT "/*" error: Missing trailing `*/` symbols to terminate the block comment
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.rast
new file mode 100644
index 000000000..227a20660
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.rast
@@ -0,0 +1 @@
+COMMENT "/* comment\n" error: Missing trailing `*/` symbols to terminate the block comment
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.rs
new file mode 100644
index 000000000..c45c2844d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.rs
@@ -0,0 +1 @@
+/* comment
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.txt
new file mode 100644
index 000000000..227a20660
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_block_comment_with_content.txt
@@ -0,0 +1 @@
+COMMENT "/* comment\n" error: Missing trailing `*/` symbols to terminate the block comment
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.rast
new file mode 100644
index 000000000..36944dbb2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.rast
@@ -0,0 +1 @@
+BYTE "b'" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.rs
new file mode 100644
index 000000000..795dc7e25
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.rs
@@ -0,0 +1 @@
+b' \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.txt
new file mode 100644
index 000000000..36944dbb2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_at_eof.txt
@@ -0,0 +1 @@
+BYTE "b'" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.rast
new file mode 100644
index 000000000..534a3cadc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.rast
@@ -0,0 +1 @@
+BYTE_STRING "b\"" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.rs
new file mode 100644
index 000000000..36f4f4321
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.rs
@@ -0,0 +1 @@
+b" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.txt
new file mode 100644
index 000000000..534a3cadc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_at_eof.txt
@@ -0,0 +1 @@
+BYTE_STRING "b\"" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.rast
new file mode 100644
index 000000000..03f61de9a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.rast
@@ -0,0 +1 @@
+BYTE_STRING "b\"\\x7f" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.rs
new file mode 100644
index 000000000..836c112c1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.rs
@@ -0,0 +1 @@
+b"\x7f \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.txt
new file mode 100644
index 000000000..03f61de9a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ascii_escape.txt
@@ -0,0 +1 @@
+BYTE_STRING "b\"\\x7f" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.rast
new file mode 100644
index 000000000..e11d49d1e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.rast
@@ -0,0 +1 @@
+BYTE_STRING "b\"🦀" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.rs
new file mode 100644
index 000000000..3c23a0372
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.rs
@@ -0,0 +1 @@
+b"🦀 \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.txt
new file mode 100644
index 000000000..e11d49d1e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_ferris.txt
@@ -0,0 +1 @@
+BYTE_STRING "b\"🦀" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.rast
new file mode 100644
index 000000000..4e374b120
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.rast
@@ -0,0 +1 @@
+BYTE_STRING "b\"\\" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.rs
new file mode 100644
index 000000000..cce661538
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.rs
@@ -0,0 +1 @@
+b"\ \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.txt
new file mode 100644
index 000000000..4e374b120
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash.txt
@@ -0,0 +1 @@
+BYTE_STRING "b\"\\" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.rast
new file mode 100644
index 000000000..ee1997586
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.rast
@@ -0,0 +1 @@
+BYTE_STRING "b\"\\\"" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.rs
new file mode 100644
index 000000000..f2ff58ba9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.rs
@@ -0,0 +1 @@
+b"\" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.txt
new file mode 100644
index 000000000..ee1997586
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_double_quote.txt
@@ -0,0 +1 @@
+BYTE_STRING "b\"\\\"" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.rast
new file mode 100644
index 000000000..b109d8629
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.rast
@@ -0,0 +1 @@
+BYTE_STRING "b\"\\n" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.rs
new file mode 100644
index 000000000..5e680aabb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.rs
@@ -0,0 +1 @@
+b"\n \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.txt
new file mode 100644
index 000000000..b109d8629
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_slash_n.txt
@@ -0,0 +1 @@
+BYTE_STRING "b\"\\n" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.rast
new file mode 100644
index 000000000..eaca94fa4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.rast
@@ -0,0 +1 @@
+BYTE_STRING "b\" " error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.rs
new file mode 100644
index 000000000..d6898541e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.rs
@@ -0,0 +1 @@
+b" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.txt
new file mode 100644
index 000000000..eaca94fa4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_space.txt
@@ -0,0 +1 @@
+BYTE_STRING "b\" " error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.rast
new file mode 100644
index 000000000..3b79f48bc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.rast
@@ -0,0 +1 @@
+BYTE_STRING "b\"\\u{20AA}" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.rs
new file mode 100644
index 000000000..1c6df1d00
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.rs
@@ -0,0 +1 @@
+b"\u{20AA} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.txt
new file mode 100644
index 000000000..3b79f48bc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_string_with_unicode_escape.txt
@@ -0,0 +1 @@
+BYTE_STRING "b\"\\u{20AA}" error: Missing trailing `"` symbol to terminate the byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.rast
new file mode 100644
index 000000000..5525376f4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.rast
@@ -0,0 +1 @@
+BYTE "b'\\x7f" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.rs
new file mode 100644
index 000000000..d146a8090
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.rs
@@ -0,0 +1 @@
+b'\x7f \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.txt
new file mode 100644
index 000000000..5525376f4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ascii_escape.txt
@@ -0,0 +1 @@
+BYTE "b'\\x7f" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.rast
new file mode 100644
index 000000000..e7a8be4f6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.rast
@@ -0,0 +1 @@
+BYTE "b'🦀" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.rs
new file mode 100644
index 000000000..c9230dc24
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.rs
@@ -0,0 +1 @@
+b'🦀 \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.txt
new file mode 100644
index 000000000..e7a8be4f6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_ferris.txt
@@ -0,0 +1 @@
+BYTE "b'🦀" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.rast
new file mode 100644
index 000000000..d9937135a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.rast
@@ -0,0 +1 @@
+BYTE "b'\\" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.rs
new file mode 100644
index 000000000..abffa5037
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.rs
@@ -0,0 +1 @@
+b'\ \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.txt
new file mode 100644
index 000000000..d9937135a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash.txt
@@ -0,0 +1 @@
+BYTE "b'\\" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.rast
new file mode 100644
index 000000000..c408cdb2b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.rast
@@ -0,0 +1 @@
+BYTE "b'\\n" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.rs
new file mode 100644
index 000000000..4f46836a9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.rs
@@ -0,0 +1 @@
+b'\n \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.txt
new file mode 100644
index 000000000..c408cdb2b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_n.txt
@@ -0,0 +1 @@
+BYTE "b'\\n" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.rast
new file mode 100644
index 000000000..b331f9560
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.rast
@@ -0,0 +1 @@
+BYTE "b'\\'" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.rs
new file mode 100644
index 000000000..645b641ee
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.rs
@@ -0,0 +1 @@
+b'\' \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.txt
new file mode 100644
index 000000000..b331f9560
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_slash_single_quote.txt
@@ -0,0 +1 @@
+BYTE "b'\\'" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.rast
new file mode 100644
index 000000000..80c0e1c00
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.rast
@@ -0,0 +1 @@
+BYTE "b' " error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.rs
new file mode 100644
index 000000000..93b7f9c87
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.rs
@@ -0,0 +1 @@
+b' \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.txt
new file mode 100644
index 000000000..80c0e1c00
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_space.txt
@@ -0,0 +1 @@
+BYTE "b' " error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.rast
new file mode 100644
index 000000000..e1c3dc141
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.rast
@@ -0,0 +1 @@
+BYTE "b'\\u{20AA}" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.rs
new file mode 100644
index 000000000..a3dec7c25
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.rs
@@ -0,0 +1 @@
+b'\u{20AA} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.txt
new file mode 100644
index 000000000..e1c3dc141
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_byte_with_unicode_escape.txt
@@ -0,0 +1 @@
+BYTE "b'\\u{20AA}" error: Missing trailing `'` symbol to terminate the byte literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.rast
new file mode 100644
index 000000000..218c7a2d7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.rast
@@ -0,0 +1 @@
+CHAR "'" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.rs
new file mode 100644
index 000000000..ad2823b48
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.rs
@@ -0,0 +1 @@
+' \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.txt
new file mode 100644
index 000000000..218c7a2d7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_at_eof.txt
@@ -0,0 +1 @@
+CHAR "'" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.rast
new file mode 100644
index 000000000..a0d8e1b83
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.rast
@@ -0,0 +1 @@
+CHAR "'\\x7f" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.rs
new file mode 100644
index 000000000..cf74b4dad
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.rs
@@ -0,0 +1 @@
+'\x7f \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.txt
new file mode 100644
index 000000000..a0d8e1b83
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ascii_escape.txt
@@ -0,0 +1 @@
+CHAR "'\\x7f" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.rast
new file mode 100644
index 000000000..56f19cce0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.rast
@@ -0,0 +1 @@
+CHAR "'🦀" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.rs
new file mode 100644
index 000000000..e264a4152
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.rs
@@ -0,0 +1 @@
+'🦀 \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.txt
new file mode 100644
index 000000000..56f19cce0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_ferris.txt
@@ -0,0 +1 @@
+CHAR "'🦀" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.rast
new file mode 100644
index 000000000..cfa0e0752
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.rast
@@ -0,0 +1 @@
+CHAR "'\\" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.rs
new file mode 100644
index 000000000..6ba258b10
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.rs
@@ -0,0 +1 @@
+'\ \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.txt
new file mode 100644
index 000000000..cfa0e0752
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash.txt
@@ -0,0 +1 @@
+CHAR "'\\" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.rast
new file mode 100644
index 000000000..6a42a4e22
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.rast
@@ -0,0 +1 @@
+CHAR "'\\n" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.rs
new file mode 100644
index 000000000..78bef7e3e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.rs
@@ -0,0 +1 @@
+'\n \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.txt
new file mode 100644
index 000000000..6a42a4e22
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_n.txt
@@ -0,0 +1 @@
+CHAR "'\\n" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.rast
new file mode 100644
index 000000000..1275f6aa8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.rast
@@ -0,0 +1 @@
+CHAR "'\\'" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.rs
new file mode 100644
index 000000000..a0e722065
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.rs
@@ -0,0 +1 @@
+'\' \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.txt
new file mode 100644
index 000000000..1275f6aa8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_slash_single_quote.txt
@@ -0,0 +1 @@
+CHAR "'\\'" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.rast
new file mode 100644
index 000000000..746c425c4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.rast
@@ -0,0 +1 @@
+CHAR "' " error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.rs
new file mode 100644
index 000000000..309ecfe47
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.rs
@@ -0,0 +1 @@
+' \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.txt
new file mode 100644
index 000000000..746c425c4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_space.txt
@@ -0,0 +1 @@
+CHAR "' " error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.rast
new file mode 100644
index 000000000..9abd59098
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.rast
@@ -0,0 +1 @@
+CHAR "'\\u{20AA}" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.rs
new file mode 100644
index 000000000..50be91f68
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.rs
@@ -0,0 +1 @@
+'\u{20AA} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.txt
new file mode 100644
index 000000000..9abd59098
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_char_with_unicode_escape.txt
@@ -0,0 +1 @@
+CHAR "'\\u{20AA}" error: Missing trailing `'` symbol to terminate the character literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.rast
new file mode 100644
index 000000000..15ce8905a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.rast
@@ -0,0 +1 @@
+COMMENT "/* /* /*\n" error: Missing trailing `*/` symbols to terminate the block comment
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.rs
new file mode 100644
index 000000000..3fcfc9660
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.rs
@@ -0,0 +1 @@
+/* /* /*
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.txt
new file mode 100644
index 000000000..15ce8905a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_entirely.txt
@@ -0,0 +1 @@
+COMMENT "/* /* /*\n" error: Missing trailing `*/` symbols to terminate the block comment
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.rast
new file mode 100644
index 000000000..e9b74ee7f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.rast
@@ -0,0 +1 @@
+COMMENT "/** /*! /* comment */ */\n" error: Missing trailing `*/` symbols to terminate the block comment
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.rs
new file mode 100644
index 000000000..26c898f01
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.rs
@@ -0,0 +1 @@
+/** /*! /* comment */ */
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.txt
new file mode 100644
index 000000000..e9b74ee7f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_nested_block_comment_partially.txt
@@ -0,0 +1 @@
+COMMENT "/** /*! /* comment */ */\n" error: Missing trailing `*/` symbols to terminate the block comment
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.rast
new file mode 100644
index 000000000..6ec1780c3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.rast
@@ -0,0 +1 @@
+BYTE_STRING "br##\"" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.rs
new file mode 100644
index 000000000..ae5bae622
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.rs
@@ -0,0 +1 @@
+br##" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.txt
new file mode 100644
index 000000000..6ec1780c3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_at_eof.txt
@@ -0,0 +1 @@
+BYTE_STRING "br##\"" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.rast
new file mode 100644
index 000000000..d65f1bb2f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.rast
@@ -0,0 +1 @@
+BYTE_STRING "br##\"\\x7f" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.rs
new file mode 100644
index 000000000..d50270afe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.rs
@@ -0,0 +1 @@
+br##"\x7f \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.txt
new file mode 100644
index 000000000..d65f1bb2f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ascii_escape.txt
@@ -0,0 +1 @@
+BYTE_STRING "br##\"\\x7f" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.rast
new file mode 100644
index 000000000..0f9e0a165
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.rast
@@ -0,0 +1 @@
+BYTE_STRING "br##\"🦀" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.rs
new file mode 100644
index 000000000..9ef01207a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.rs
@@ -0,0 +1 @@
+br##"🦀 \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.txt
new file mode 100644
index 000000000..0f9e0a165
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_ferris.txt
@@ -0,0 +1 @@
+BYTE_STRING "br##\"🦀" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.rast
new file mode 100644
index 000000000..202dcd2d4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.rast
@@ -0,0 +1 @@
+BYTE_STRING "br##\"\\" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.rs
new file mode 100644
index 000000000..0b3c015d7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.rs
@@ -0,0 +1 @@
+br##"\ \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.txt
new file mode 100644
index 000000000..202dcd2d4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash.txt
@@ -0,0 +1 @@
+BYTE_STRING "br##\"\\" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.rast
new file mode 100644
index 000000000..d45485b52
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.rast
@@ -0,0 +1 @@
+BYTE_STRING "br##\"\\n" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.rs
new file mode 100644
index 000000000..0d8b0e7ab
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.rs
@@ -0,0 +1 @@
+br##"\n \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.txt
new file mode 100644
index 000000000..d45485b52
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_slash_n.txt
@@ -0,0 +1 @@
+BYTE_STRING "br##\"\\n" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.rast
new file mode 100644
index 000000000..1bfabbc3a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.rast
@@ -0,0 +1 @@
+BYTE_STRING "br##\" " error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.rs
new file mode 100644
index 000000000..14c602fd2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.rs
@@ -0,0 +1 @@
+br##" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.txt
new file mode 100644
index 000000000..1bfabbc3a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_space.txt
@@ -0,0 +1 @@
+BYTE_STRING "br##\" " error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.rast
new file mode 100644
index 000000000..104ab8aae
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.rast
@@ -0,0 +1 @@
+BYTE_STRING "br##\"\\u{20AA}" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.rs
new file mode 100644
index 000000000..90e299a1a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.rs
@@ -0,0 +1 @@
+br##"\u{20AA} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.txt
new file mode 100644
index 000000000..104ab8aae
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_byte_string_with_unicode_escape.txt
@@ -0,0 +1 @@
+BYTE_STRING "br##\"\\u{20AA}" error: Missing trailing `"` with `#` symbols to terminate the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.rast
new file mode 100644
index 000000000..71b20fd19
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.rast
@@ -0,0 +1 @@
+STRING "r##\"" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.rs
new file mode 100644
index 000000000..557c59b62
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.rs
@@ -0,0 +1 @@
+r##" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.txt
new file mode 100644
index 000000000..71b20fd19
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_at_eof.txt
@@ -0,0 +1 @@
+STRING "r##\"" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.rast
new file mode 100644
index 000000000..dc106dd24
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.rast
@@ -0,0 +1 @@
+STRING "r##\"\\x7f" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.rs
new file mode 100644
index 000000000..5bec883dc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.rs
@@ -0,0 +1 @@
+r##"\x7f \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.txt
new file mode 100644
index 000000000..dc106dd24
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ascii_escape.txt
@@ -0,0 +1 @@
+STRING "r##\"\\x7f" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.rast
new file mode 100644
index 000000000..30ee029f6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.rast
@@ -0,0 +1 @@
+STRING "r##\"🦀" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.rs
new file mode 100644
index 000000000..bd046e4bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.rs
@@ -0,0 +1 @@
+r##"🦀 \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.txt
new file mode 100644
index 000000000..30ee029f6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_ferris.txt
@@ -0,0 +1 @@
+STRING "r##\"🦀" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.rast
new file mode 100644
index 000000000..8a6f6cc43
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.rast
@@ -0,0 +1 @@
+STRING "r##\"\\" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.rs
new file mode 100644
index 000000000..9242077b8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.rs
@@ -0,0 +1 @@
+r##"\ \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.txt
new file mode 100644
index 000000000..8a6f6cc43
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash.txt
@@ -0,0 +1 @@
+STRING "r##\"\\" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.rast
new file mode 100644
index 000000000..f46eff251
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.rast
@@ -0,0 +1 @@
+STRING "r##\"\\n" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.rs
new file mode 100644
index 000000000..db1c16f2b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.rs
@@ -0,0 +1 @@
+r##"\n \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.txt
new file mode 100644
index 000000000..f46eff251
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_slash_n.txt
@@ -0,0 +1 @@
+STRING "r##\"\\n" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.rast
new file mode 100644
index 000000000..49b6afea4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.rast
@@ -0,0 +1 @@
+STRING "r##\" " error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.rs
new file mode 100644
index 000000000..f104bae4f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.rs
@@ -0,0 +1 @@
+r##" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.txt
new file mode 100644
index 000000000..49b6afea4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_space.txt
@@ -0,0 +1 @@
+STRING "r##\" " error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.rast
new file mode 100644
index 000000000..d10d6d8e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.rast
@@ -0,0 +1 @@
+STRING "r##\"\\u{20AA}" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.rs
new file mode 100644
index 000000000..bf05c3913
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.rs
@@ -0,0 +1 @@
+r##"\u{20AA} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.txt
new file mode 100644
index 000000000..d10d6d8e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_raw_string_with_unicode_escape.txt
@@ -0,0 +1 @@
+STRING "r##\"\\u{20AA}" error: Missing trailing `"` with `#` symbols to terminate the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.rast
new file mode 100644
index 000000000..3b89ce0ca
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.rast
@@ -0,0 +1 @@
+STRING "\"" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.rs
new file mode 100644
index 000000000..9d68933c4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.rs
@@ -0,0 +1 @@
+" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.txt
new file mode 100644
index 000000000..3b89ce0ca
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_at_eof.txt
@@ -0,0 +1 @@
+STRING "\"" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.rast
new file mode 100644
index 000000000..6694cf17a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.rast
@@ -0,0 +1 @@
+STRING "\"\\x7f" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.rs
new file mode 100644
index 000000000..56186a344
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.rs
@@ -0,0 +1 @@
+"\x7f \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.txt
new file mode 100644
index 000000000..6694cf17a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ascii_escape.txt
@@ -0,0 +1 @@
+STRING "\"\\x7f" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.rast
new file mode 100644
index 000000000..5f4501c18
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.rast
@@ -0,0 +1 @@
+STRING "\"🦀" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.rs
new file mode 100644
index 000000000..d439b8d2a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.rs
@@ -0,0 +1 @@
+"🦀 \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.txt
new file mode 100644
index 000000000..5f4501c18
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_ferris.txt
@@ -0,0 +1 @@
+STRING "\"🦀" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.rast
new file mode 100644
index 000000000..a8ac565ac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.rast
@@ -0,0 +1 @@
+STRING "\"\\" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.rs
new file mode 100644
index 000000000..00a258400
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.rs
@@ -0,0 +1 @@
+"\ \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.txt
new file mode 100644
index 000000000..a8ac565ac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash.txt
@@ -0,0 +1 @@
+STRING "\"\\" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.rast
new file mode 100644
index 000000000..919183b91
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.rast
@@ -0,0 +1 @@
+STRING "\"\\\"" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.rs
new file mode 100644
index 000000000..403c2d6dd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.rs
@@ -0,0 +1 @@
+"\" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.txt
new file mode 100644
index 000000000..919183b91
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_double_quote.txt
@@ -0,0 +1 @@
+STRING "\"\\\"" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.rast
new file mode 100644
index 000000000..39e288af9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.rast
@@ -0,0 +1 @@
+STRING "\"\\n" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.rs
new file mode 100644
index 000000000..a0c29b8cf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.rs
@@ -0,0 +1 @@
+"\n \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.txt
new file mode 100644
index 000000000..39e288af9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_slash_n.txt
@@ -0,0 +1 @@
+STRING "\"\\n" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.rast
new file mode 100644
index 000000000..dcff94d7e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.rast
@@ -0,0 +1 @@
+STRING "\" " error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.rs
new file mode 100644
index 000000000..72cdc841f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.rs
@@ -0,0 +1 @@
+" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.txt
new file mode 100644
index 000000000..dcff94d7e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_space.txt
@@ -0,0 +1 @@
+STRING "\" " error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.rast
new file mode 100644
index 000000000..ac232b530
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.rast
@@ -0,0 +1 @@
+STRING "\"\\u{20AA}" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.rs
new file mode 100644
index 000000000..ed24095c3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.rs
@@ -0,0 +1 @@
+"\u{20AA} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.txt
new file mode 100644
index 000000000..ac232b530
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unclosed_string_with_unicode_escape.txt
@@ -0,0 +1 @@
+STRING "\"\\u{20AA}" error: Missing trailing `"` symbol to terminate the string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.rast
new file mode 100644
index 000000000..cf942c92f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.rast
@@ -0,0 +1 @@
+BYTE_STRING "br##" error: Missing `"` symbol after `#` symbols to begin the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.rs
new file mode 100644
index 000000000..7e8cadf4f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.rs
@@ -0,0 +1 @@
+br## \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.txt
new file mode 100644
index 000000000..cf942c92f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_at_eof.txt
@@ -0,0 +1 @@
+BYTE_STRING "br##" error: Missing `"` symbol after `#` symbols to begin the raw byte string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.rast
new file mode 100644
index 000000000..042769c27
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.rast
@@ -0,0 +1,9 @@
+BYTE_STRING "br## " error: Missing `"` symbol after `#` symbols to begin the raw byte string literal
+IDENT "I"
+WHITESPACE " "
+IDENT "lack"
+WHITESPACE " "
+IDENT "a"
+WHITESPACE " "
+IDENT "quote"
+BANG "!"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.rs
new file mode 100644
index 000000000..d9b55455a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.rs
@@ -0,0 +1 @@
+br## I lack a quote! \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.txt
new file mode 100644
index 000000000..042769c27
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_byte_string_with_ascii.txt
@@ -0,0 +1,9 @@
+BYTE_STRING "br## " error: Missing `"` symbol after `#` symbols to begin the raw byte string literal
+IDENT "I"
+WHITESPACE " "
+IDENT "lack"
+WHITESPACE " "
+IDENT "a"
+WHITESPACE " "
+IDENT "quote"
+BANG "!"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.rast
new file mode 100644
index 000000000..2f7c7529a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.rast
@@ -0,0 +1 @@
+STRING "r##" error: Missing `"` symbol after `#` symbols to begin the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.rs
new file mode 100644
index 000000000..eddf8d080
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.rs
@@ -0,0 +1 @@
+r## \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.txt
new file mode 100644
index 000000000..2f7c7529a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_at_eof.txt
@@ -0,0 +1 @@
+STRING "r##" error: Missing `"` symbol after `#` symbols to begin the raw string literal
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.rast
new file mode 100644
index 000000000..4a06b0abe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.rast
@@ -0,0 +1,9 @@
+STRING "r## " error: Missing `"` symbol after `#` symbols to begin the raw string literal
+IDENT "I"
+WHITESPACE " "
+IDENT "lack"
+WHITESPACE " "
+IDENT "a"
+WHITESPACE " "
+IDENT "quote"
+BANG "!"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.rs
new file mode 100644
index 000000000..534668a9b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.rs
@@ -0,0 +1 @@
+r## I lack a quote! \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.txt
new file mode 100644
index 000000000..4a06b0abe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unstarted_raw_string_with_ascii.txt
@@ -0,0 +1,9 @@
+STRING "r## " error: Missing `"` symbol after `#` symbols to begin the raw string literal
+IDENT "I"
+WHITESPACE " "
+IDENT "lack"
+WHITESPACE " "
+IDENT "a"
+WHITESPACE " "
+IDENT "quote"
+BANG "!"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.rast
new file mode 100644
index 000000000..18bb5cad8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.rast
@@ -0,0 +1,6 @@
+COMMENT "/* */"
+WHITESPACE "\n"
+COMMENT "/**/"
+WHITESPACE "\n"
+COMMENT "/* /* */ */"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.rs
new file mode 100644
index 000000000..b880a59d9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.rs
@@ -0,0 +1,3 @@
+/* */
+/**/
+/* /* */ */
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.txt
new file mode 100644
index 000000000..18bb5cad8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/block_comment.txt
@@ -0,0 +1,6 @@
+COMMENT "/* */"
+WHITESPACE "\n"
+COMMENT "/**/"
+WHITESPACE "\n"
+COMMENT "/* /* */ */"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rast
new file mode 100644
index 000000000..c848ac368
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rast
@@ -0,0 +1,22 @@
+BYTE "b''"
+WHITESPACE " "
+BYTE "b'x'"
+WHITESPACE " "
+BYTE_STRING "b\"foo\""
+WHITESPACE " "
+BYTE_STRING "br\"\""
+WHITESPACE "\n"
+BYTE "b''suf"
+WHITESPACE " "
+BYTE_STRING "b\"\"ix"
+WHITESPACE " "
+BYTE_STRING "br\"\"br"
+WHITESPACE "\n"
+BYTE "b'\\n'"
+WHITESPACE " "
+BYTE "b'\\\\'"
+WHITESPACE " "
+BYTE "b'\\''"
+WHITESPACE " "
+BYTE "b'hello'"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rs
new file mode 100644
index 000000000..b54930f5e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rs
@@ -0,0 +1,3 @@
+b'' b'x' b"foo" br""
+b''suf b""ix br""br
+b'\n' b'\\' b'\'' b'hello'
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.txt
new file mode 100644
index 000000000..c848ac368
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.txt
@@ -0,0 +1,22 @@
+BYTE "b''"
+WHITESPACE " "
+BYTE "b'x'"
+WHITESPACE " "
+BYTE_STRING "b\"foo\""
+WHITESPACE " "
+BYTE_STRING "br\"\""
+WHITESPACE "\n"
+BYTE "b''suf"
+WHITESPACE " "
+BYTE_STRING "b\"\"ix"
+WHITESPACE " "
+BYTE_STRING "br\"\"br"
+WHITESPACE "\n"
+BYTE "b'\\n'"
+WHITESPACE " "
+BYTE "b'\\\\'"
+WHITESPACE " "
+BYTE "b'\\''"
+WHITESPACE " "
+BYTE "b'hello'"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rast
new file mode 100644
index 000000000..66e58cc29
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rast
@@ -0,0 +1,16 @@
+CHAR "'x'"
+WHITESPACE " "
+CHAR "' '"
+WHITESPACE " "
+CHAR "'0'"
+WHITESPACE " "
+CHAR "'hello'"
+WHITESPACE " "
+CHAR "'\\x7f'"
+WHITESPACE " "
+CHAR "'\\n'"
+WHITESPACE " "
+CHAR "'\\\\'"
+WHITESPACE " "
+CHAR "'\\''"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rs
new file mode 100644
index 000000000..454ee0a5f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rs
@@ -0,0 +1 @@
+'x' ' ' '0' 'hello' '\x7f' '\n' '\\' '\''
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.txt
new file mode 100644
index 000000000..66e58cc29
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.txt
@@ -0,0 +1,16 @@
+CHAR "'x'"
+WHITESPACE " "
+CHAR "' '"
+WHITESPACE " "
+CHAR "'0'"
+WHITESPACE " "
+CHAR "'hello'"
+WHITESPACE " "
+CHAR "'\\x7f'"
+WHITESPACE " "
+CHAR "'\\n'"
+WHITESPACE " "
+CHAR "'\\\\'"
+WHITESPACE " "
+CHAR "'\\''"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.rast
new file mode 100644
index 000000000..7f5ce9de1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.rast
@@ -0,0 +1,3 @@
+IDENT "hello"
+WHITESPACE " "
+IDENT "world"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.rs
new file mode 100644
index 000000000..95d09f2b1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.rs
@@ -0,0 +1 @@
+hello world \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.txt
new file mode 100644
index 000000000..7f5ce9de1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/hello.txt
@@ -0,0 +1,3 @@
+IDENT "hello"
+WHITESPACE " "
+IDENT "world"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.rast
new file mode 100644
index 000000000..5689644c0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.rast
@@ -0,0 +1,14 @@
+IDENT "foo"
+WHITESPACE " "
+IDENT "foo_"
+WHITESPACE " "
+IDENT "_foo"
+WHITESPACE " "
+UNDERSCORE "_"
+WHITESPACE " "
+IDENT "__"
+WHITESPACE " "
+IDENT "x"
+WHITESPACE " "
+IDENT "привет"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.rs
new file mode 100644
index 000000000..c05c9c009
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.rs
@@ -0,0 +1 @@
+foo foo_ _foo _ __ x привет
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.txt
new file mode 100644
index 000000000..5689644c0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/ident.txt
@@ -0,0 +1,14 @@
+IDENT "foo"
+WHITESPACE " "
+IDENT "foo_"
+WHITESPACE " "
+IDENT "_foo"
+WHITESPACE " "
+UNDERSCORE "_"
+WHITESPACE " "
+IDENT "__"
+WHITESPACE " "
+IDENT "x"
+WHITESPACE " "
+IDENT "привет"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.rast
new file mode 100644
index 000000000..e19b1399a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.rast
@@ -0,0 +1,64 @@
+ASYNC_KW "async"
+WHITESPACE " "
+FN_KW "fn"
+WHITESPACE " "
+USE_KW "use"
+WHITESPACE " "
+STRUCT_KW "struct"
+WHITESPACE " "
+TRAIT_KW "trait"
+WHITESPACE " "
+ENUM_KW "enum"
+WHITESPACE " "
+IMPL_KW "impl"
+WHITESPACE " "
+TRUE_KW "true"
+WHITESPACE " "
+FALSE_KW "false"
+WHITESPACE " "
+AS_KW "as"
+WHITESPACE " "
+EXTERN_KW "extern"
+WHITESPACE " "
+CRATE_KW "crate"
+WHITESPACE "\n"
+MOD_KW "mod"
+WHITESPACE " "
+PUB_KW "pub"
+WHITESPACE " "
+SELF_KW "self"
+WHITESPACE " "
+SUPER_KW "super"
+WHITESPACE " "
+IN_KW "in"
+WHITESPACE " "
+WHERE_KW "where"
+WHITESPACE " "
+FOR_KW "for"
+WHITESPACE " "
+LOOP_KW "loop"
+WHITESPACE " "
+WHILE_KW "while"
+WHITESPACE " "
+IF_KW "if"
+WHITESPACE " "
+MATCH_KW "match"
+WHITESPACE " "
+CONST_KW "const"
+WHITESPACE "\n"
+STATIC_KW "static"
+WHITESPACE " "
+MUT_KW "mut"
+WHITESPACE " "
+TYPE_KW "type"
+WHITESPACE " "
+REF_KW "ref"
+WHITESPACE " "
+LET_KW "let"
+WHITESPACE " "
+ELSE_KW "else"
+WHITESPACE " "
+MOVE_KW "move"
+WHITESPACE " "
+RETURN_KW "return"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.rs
new file mode 100644
index 000000000..1e91bff4e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.rs
@@ -0,0 +1,3 @@
+async fn use struct trait enum impl true false as extern crate
+mod pub self super in where for loop while if match const
+static mut type ref let else move return
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.txt
new file mode 100644
index 000000000..e19b1399a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/keywords.txt
@@ -0,0 +1,64 @@
+ASYNC_KW "async"
+WHITESPACE " "
+FN_KW "fn"
+WHITESPACE " "
+USE_KW "use"
+WHITESPACE " "
+STRUCT_KW "struct"
+WHITESPACE " "
+TRAIT_KW "trait"
+WHITESPACE " "
+ENUM_KW "enum"
+WHITESPACE " "
+IMPL_KW "impl"
+WHITESPACE " "
+TRUE_KW "true"
+WHITESPACE " "
+FALSE_KW "false"
+WHITESPACE " "
+AS_KW "as"
+WHITESPACE " "
+EXTERN_KW "extern"
+WHITESPACE " "
+CRATE_KW "crate"
+WHITESPACE "\n"
+MOD_KW "mod"
+WHITESPACE " "
+PUB_KW "pub"
+WHITESPACE " "
+SELF_KW "self"
+WHITESPACE " "
+SUPER_KW "super"
+WHITESPACE " "
+IN_KW "in"
+WHITESPACE " "
+WHERE_KW "where"
+WHITESPACE " "
+FOR_KW "for"
+WHITESPACE " "
+LOOP_KW "loop"
+WHITESPACE " "
+WHILE_KW "while"
+WHITESPACE " "
+IF_KW "if"
+WHITESPACE " "
+MATCH_KW "match"
+WHITESPACE " "
+CONST_KW "const"
+WHITESPACE "\n"
+STATIC_KW "static"
+WHITESPACE " "
+MUT_KW "mut"
+WHITESPACE " "
+TYPE_KW "type"
+WHITESPACE " "
+REF_KW "ref"
+WHITESPACE " "
+LET_KW "let"
+WHITESPACE " "
+ELSE_KW "else"
+WHITESPACE " "
+MOVE_KW "move"
+WHITESPACE " "
+RETURN_KW "return"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.rast
new file mode 100644
index 000000000..eeb1e9541
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.rast
@@ -0,0 +1,8 @@
+LIFETIME_IDENT "'a"
+WHITESPACE " "
+LIFETIME_IDENT "'foo"
+WHITESPACE " "
+LIFETIME_IDENT "'foo_bar_baz"
+WHITESPACE " "
+LIFETIME_IDENT "'_"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.rs
new file mode 100644
index 000000000..b764f1dce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.rs
@@ -0,0 +1 @@
+'a 'foo 'foo_bar_baz '_
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.txt
new file mode 100644
index 000000000..eeb1e9541
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/lifetimes.txt
@@ -0,0 +1,8 @@
+LIFETIME_IDENT "'a"
+WHITESPACE " "
+LIFETIME_IDENT "'foo"
+WHITESPACE " "
+LIFETIME_IDENT "'foo_bar_baz"
+WHITESPACE " "
+LIFETIME_IDENT "'_"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.rast
new file mode 100644
index 000000000..8d13c3f61
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.rast
@@ -0,0 +1,57 @@
+INT_NUMBER "0"
+WHITESPACE " "
+INT_NUMBER "00"
+WHITESPACE " "
+INT_NUMBER "0_"
+WHITESPACE " "
+FLOAT_NUMBER "0."
+WHITESPACE " "
+INT_NUMBER "0z"
+WHITESPACE "\n"
+INT_NUMBER "01790"
+WHITESPACE " "
+INT_NUMBER "0b1790"
+WHITESPACE " "
+INT_NUMBER "0o1790"
+WHITESPACE " "
+INT_NUMBER "0x1790aAbBcCdDeEfF"
+WHITESPACE " "
+INT_NUMBER "001279"
+WHITESPACE " "
+INT_NUMBER "0_1279"
+WHITESPACE " "
+FLOAT_NUMBER "0.1279"
+WHITESPACE " "
+FLOAT_NUMBER "0e1279"
+WHITESPACE " "
+FLOAT_NUMBER "0E1279"
+WHITESPACE "\n"
+INT_NUMBER "0"
+DOT "."
+DOT "."
+INT_NUMBER "2"
+WHITESPACE "\n"
+INT_NUMBER "0"
+DOT "."
+IDENT "foo"
+L_PAREN "("
+R_PAREN ")"
+WHITESPACE "\n"
+FLOAT_NUMBER "0e+1"
+WHITESPACE "\n"
+INT_NUMBER "0"
+DOT "."
+IDENT "e"
+PLUS "+"
+INT_NUMBER "1"
+WHITESPACE "\n"
+FLOAT_NUMBER "0.0E-2"
+WHITESPACE "\n"
+FLOAT_NUMBER "0___0.10000____0000e+111__"
+WHITESPACE "\n"
+INT_NUMBER "1i64"
+WHITESPACE " "
+FLOAT_NUMBER "92.0f32"
+WHITESPACE " "
+INT_NUMBER "11__s"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.rs
new file mode 100644
index 000000000..bc761c235
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.rs
@@ -0,0 +1,9 @@
+0 00 0_ 0. 0z
+01790 0b1790 0o1790 0x1790aAbBcCdDeEfF 001279 0_1279 0.1279 0e1279 0E1279
+0..2
+0.foo()
+0e+1
+0.e+1
+0.0E-2
+0___0.10000____0000e+111__
+1i64 92.0f32 11__s
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.txt
new file mode 100644
index 000000000..8d13c3f61
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/numbers.txt
@@ -0,0 +1,57 @@
+INT_NUMBER "0"
+WHITESPACE " "
+INT_NUMBER "00"
+WHITESPACE " "
+INT_NUMBER "0_"
+WHITESPACE " "
+FLOAT_NUMBER "0."
+WHITESPACE " "
+INT_NUMBER "0z"
+WHITESPACE "\n"
+INT_NUMBER "01790"
+WHITESPACE " "
+INT_NUMBER "0b1790"
+WHITESPACE " "
+INT_NUMBER "0o1790"
+WHITESPACE " "
+INT_NUMBER "0x1790aAbBcCdDeEfF"
+WHITESPACE " "
+INT_NUMBER "001279"
+WHITESPACE " "
+INT_NUMBER "0_1279"
+WHITESPACE " "
+FLOAT_NUMBER "0.1279"
+WHITESPACE " "
+FLOAT_NUMBER "0e1279"
+WHITESPACE " "
+FLOAT_NUMBER "0E1279"
+WHITESPACE "\n"
+INT_NUMBER "0"
+DOT "."
+DOT "."
+INT_NUMBER "2"
+WHITESPACE "\n"
+INT_NUMBER "0"
+DOT "."
+IDENT "foo"
+L_PAREN "("
+R_PAREN ")"
+WHITESPACE "\n"
+FLOAT_NUMBER "0e+1"
+WHITESPACE "\n"
+INT_NUMBER "0"
+DOT "."
+IDENT "e"
+PLUS "+"
+INT_NUMBER "1"
+WHITESPACE "\n"
+FLOAT_NUMBER "0.0E-2"
+WHITESPACE "\n"
+FLOAT_NUMBER "0___0.10000____0000e+111__"
+WHITESPACE "\n"
+INT_NUMBER "1i64"
+WHITESPACE " "
+FLOAT_NUMBER "92.0f32"
+WHITESPACE " "
+INT_NUMBER "11__s"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.rast
new file mode 100644
index 000000000..fddad9982
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.rast
@@ -0,0 +1,2 @@
+IDENT "r#raw_ident"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.rs
new file mode 100644
index 000000000..b40a1b6a2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.rs
@@ -0,0 +1 @@
+r#raw_ident
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.txt
new file mode 100644
index 000000000..fddad9982
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_ident.txt
@@ -0,0 +1,2 @@
+IDENT "r#raw_ident"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.rast
new file mode 100644
index 000000000..13cf733b7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.rast
@@ -0,0 +1,2 @@
+STRING "r###\"this is a r##\"raw\"## string\"###"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.rs
new file mode 100644
index 000000000..e5ed0b693
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.rs
@@ -0,0 +1 @@
+r###"this is a r##"raw"## string"###
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.txt
new file mode 100644
index 000000000..13cf733b7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/raw_strings.txt
@@ -0,0 +1,2 @@
+STRING "r###\"this is a r##\"raw\"## string\"###"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.rast
new file mode 100644
index 000000000..a7681e9f5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.rast
@@ -0,0 +1,22 @@
+SHEBANG "#!/usr/bin/env bash"
+WHITESPACE "\n"
+COMMENT "// hello"
+WHITESPACE "\n"
+COMMENT "//! World"
+WHITESPACE "\n"
+COMMENT "//!! Inner line doc"
+WHITESPACE "\n"
+COMMENT "/// Outer line doc"
+WHITESPACE "\n"
+COMMENT "//// Just a comment"
+WHITESPACE "\n\n"
+COMMENT "//"
+WHITESPACE "\n"
+COMMENT "//!"
+WHITESPACE "\n"
+COMMENT "//!!"
+WHITESPACE "\n"
+COMMENT "///"
+WHITESPACE "\n"
+COMMENT "////"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.rs
new file mode 100644
index 000000000..4b6653f9c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.rs
@@ -0,0 +1,12 @@
+#!/usr/bin/env bash
+// hello
+//! World
+//!! Inner line doc
+/// Outer line doc
+//// Just a comment
+
+//
+//!
+//!!
+///
+////
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.txt
new file mode 100644
index 000000000..a7681e9f5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.txt
@@ -0,0 +1,22 @@
+SHEBANG "#!/usr/bin/env bash"
+WHITESPACE "\n"
+COMMENT "// hello"
+WHITESPACE "\n"
+COMMENT "//! World"
+WHITESPACE "\n"
+COMMENT "//!! Inner line doc"
+WHITESPACE "\n"
+COMMENT "/// Outer line doc"
+WHITESPACE "\n"
+COMMENT "//// Just a comment"
+WHITESPACE "\n\n"
+COMMENT "//"
+WHITESPACE "\n"
+COMMENT "//!"
+WHITESPACE "\n"
+COMMENT "//!!"
+WHITESPACE "\n"
+COMMENT "///"
+WHITESPACE "\n"
+COMMENT "////"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.rast
new file mode 100644
index 000000000..ec222591b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.rast
@@ -0,0 +1,8 @@
+STRING "\"hello\""
+WHITESPACE " "
+STRING "r\"world\""
+WHITESPACE " "
+STRING "\"\\n\\\"\\\\no escape\""
+WHITESPACE " "
+STRING "\"multi\nline\""
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.rs
new file mode 100644
index 000000000..4ddb5bffc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.rs
@@ -0,0 +1,2 @@
+"hello" r"world" "\n\"\\no escape" "multi
+line"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.txt
new file mode 100644
index 000000000..ec222591b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/strings.txt
@@ -0,0 +1,8 @@
+STRING "\"hello\""
+WHITESPACE " "
+STRING "r\"world\""
+WHITESPACE " "
+STRING "\"\\n\\\"\\\\no escape\""
+WHITESPACE " "
+STRING "\"multi\nline\""
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.rast
new file mode 100644
index 000000000..533ccff9a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.rast
@@ -0,0 +1,77 @@
+SEMICOLON ";"
+WHITESPACE " "
+COMMA ","
+WHITESPACE " "
+L_PAREN "("
+WHITESPACE " "
+R_PAREN ")"
+WHITESPACE " "
+L_CURLY "{"
+WHITESPACE " "
+R_CURLY "}"
+WHITESPACE " "
+L_BRACK "["
+WHITESPACE " "
+R_BRACK "]"
+WHITESPACE " "
+L_ANGLE "<"
+WHITESPACE " "
+R_ANGLE ">"
+WHITESPACE " "
+AT "@"
+WHITESPACE " "
+POUND "#"
+WHITESPACE " "
+TILDE "~"
+WHITESPACE " "
+QUESTION "?"
+WHITESPACE " "
+DOLLAR "$"
+WHITESPACE " "
+AMP "&"
+WHITESPACE " "
+PIPE "|"
+WHITESPACE " "
+PLUS "+"
+WHITESPACE " "
+STAR "*"
+WHITESPACE " "
+SLASH "/"
+WHITESPACE " "
+CARET "^"
+WHITESPACE " "
+PERCENT "%"
+WHITESPACE "\n"
+DOT "."
+WHITESPACE " "
+DOT "."
+DOT "."
+WHITESPACE " "
+DOT "."
+DOT "."
+DOT "."
+WHITESPACE " "
+DOT "."
+DOT "."
+EQ "="
+WHITESPACE "\n"
+COLON ":"
+WHITESPACE " "
+COLON ":"
+COLON ":"
+WHITESPACE "\n"
+EQ "="
+WHITESPACE " "
+EQ "="
+R_ANGLE ">"
+WHITESPACE "\n"
+BANG "!"
+WHITESPACE " "
+BANG "!"
+EQ "="
+WHITESPACE "\n"
+MINUS "-"
+WHITESPACE " "
+MINUS "-"
+R_ANGLE ">"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.rs
new file mode 100644
index 000000000..487569b5a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.rs
@@ -0,0 +1,6 @@
+; , ( ) { } [ ] < > @ # ~ ? $ & | + * / ^ %
+. .. ... ..=
+: ::
+= =>
+! !=
+- ->
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.txt
new file mode 100644
index 000000000..533ccff9a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/symbols.txt
@@ -0,0 +1,77 @@
+SEMICOLON ";"
+WHITESPACE " "
+COMMA ","
+WHITESPACE " "
+L_PAREN "("
+WHITESPACE " "
+R_PAREN ")"
+WHITESPACE " "
+L_CURLY "{"
+WHITESPACE " "
+R_CURLY "}"
+WHITESPACE " "
+L_BRACK "["
+WHITESPACE " "
+R_BRACK "]"
+WHITESPACE " "
+L_ANGLE "<"
+WHITESPACE " "
+R_ANGLE ">"
+WHITESPACE " "
+AT "@"
+WHITESPACE " "
+POUND "#"
+WHITESPACE " "
+TILDE "~"
+WHITESPACE " "
+QUESTION "?"
+WHITESPACE " "
+DOLLAR "$"
+WHITESPACE " "
+AMP "&"
+WHITESPACE " "
+PIPE "|"
+WHITESPACE " "
+PLUS "+"
+WHITESPACE " "
+STAR "*"
+WHITESPACE " "
+SLASH "/"
+WHITESPACE " "
+CARET "^"
+WHITESPACE " "
+PERCENT "%"
+WHITESPACE "\n"
+DOT "."
+WHITESPACE " "
+DOT "."
+DOT "."
+WHITESPACE " "
+DOT "."
+DOT "."
+DOT "."
+WHITESPACE " "
+DOT "."
+DOT "."
+EQ "="
+WHITESPACE "\n"
+COLON ":"
+WHITESPACE " "
+COLON ":"
+COLON ":"
+WHITESPACE "\n"
+EQ "="
+WHITESPACE " "
+EQ "="
+R_ANGLE ">"
+WHITESPACE "\n"
+BANG "!"
+WHITESPACE " "
+BANG "!"
+EQ "="
+WHITESPACE "\n"
+MINUS "-"
+WHITESPACE " "
+MINUS "-"
+R_ANGLE ">"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.rast
new file mode 100644
index 000000000..8ccb79e4e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.rast
@@ -0,0 +1,12 @@
+IDENT "a"
+WHITESPACE " "
+IDENT "b"
+WHITESPACE " "
+IDENT "c"
+WHITESPACE "\n"
+IDENT "d"
+WHITESPACE "\n\n"
+IDENT "e"
+WHITESPACE "\t"
+IDENT "f"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.rs
new file mode 100644
index 000000000..08fce1418
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.rs
@@ -0,0 +1,4 @@
+a b c
+d
+
+e f
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.txt b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.txt
new file mode 100644
index 000000000..8ccb79e4e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/whitespace.txt
@@ -0,0 +1,12 @@
+IDENT "a"
+WHITESPACE " "
+IDENT "b"
+WHITESPACE " "
+IDENT "c"
+WHITESPACE "\n"
+IDENT "d"
+WHITESPACE "\n\n"
+IDENT "e"
+WHITESPACE "\t"
+IDENT "f"
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0000_struct_field_missing_comma.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0000_struct_field_missing_comma.rast
new file mode 100644
index 000000000..b30328c82
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0000_struct_field_missing_comma.rast
@@ -0,0 +1,34 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "b"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE "\n"
+ R_CURLY "}"
+error 21: expected COMMA
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0000_struct_field_missing_comma.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0000_struct_field_missing_comma.rs
new file mode 100644
index 000000000..fe5030d89
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0000_struct_field_missing_comma.rs
@@ -0,0 +1,4 @@
+struct S {
+ a: u32
+ b: u32
+} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0001_item_recovery_in_file.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0001_item_recovery_in_file.rast
new file mode 100644
index 000000000..959b87ebb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0001_item_recovery_in_file.rast
@@ -0,0 +1,18 @@
+SOURCE_FILE
+ ERROR
+ IF_KW "if"
+ WHITESPACE " "
+ ERROR
+ MATCH_KW "match"
+ WHITESPACE "\n\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+error 0: expected an item
+error 3: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0001_item_recovery_in_file.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0001_item_recovery_in_file.rs
new file mode 100644
index 000000000..98f23de1f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0001_item_recovery_in_file.rs
@@ -0,0 +1,3 @@
+if match
+
+struct S {} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast
new file mode 100644
index 000000000..ec6c31510
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast
@@ -0,0 +1,45 @@
+SOURCE_FILE
+ SHEBANG "#!/use/bin/env rusti"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ ERROR
+ SLASH "/"
+ USE
+ USE_KW "use"
+ ERROR
+ SLASH "/"
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bin"
+ ERROR
+ SLASH "/"
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "env"
+ WHITESPACE " "
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "rusti"
+ WHITESPACE "\n"
+error 23: expected `[`
+error 23: expected an item
+error 27: expected one of `*`, `::`, `{`, `self`, `super` or an identifier
+error 28: expected SEMICOLON
+error 31: expected BANG
+error 31: expected `{`, `[`, `(`
+error 31: expected SEMICOLON
+error 31: expected an item
+error 35: expected BANG
+error 35: expected `{`, `[`, `(`
+error 35: expected SEMICOLON
+error 41: expected BANG
+error 41: expected `{`, `[`, `(`
+error 41: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rs
new file mode 100644
index 000000000..48a3a3980
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rs
@@ -0,0 +1,2 @@
+#!/use/bin/env rusti
+#!/use/bin/env rusti
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0003_C++_semicolon.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0003_C++_semicolon.rast
new file mode 100644
index 000000000..00131bea5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0003_C++_semicolon.rast
@@ -0,0 +1,39 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "b"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "String"
+ COMMA ","
+ WHITESPACE "\n"
+ R_CURLY "}"
+ ERROR
+ SEMICOLON ";"
+error 39: expected item, found `;`
+consider removing this semicolon
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0003_C++_semicolon.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0003_C++_semicolon.rs
new file mode 100644
index 000000000..009312270
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0003_C++_semicolon.rs
@@ -0,0 +1,4 @@
+struct S {
+ a: i32,
+ b: String,
+}; \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0004_use_path_bad_segment.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0004_use_path_bad_segment.rast
new file mode 100644
index 000000000..44e192a5f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0004_use_path_bad_segment.rast
@@ -0,0 +1,15 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ ERROR
+ INT_NUMBER "92"
+ SEMICOLON ";"
+error 9: expected identifier
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0004_use_path_bad_segment.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0004_use_path_bad_segment.rs
new file mode 100644
index 000000000..060e65d06
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0004_use_path_bad_segment.rs
@@ -0,0 +1 @@
+use foo::92; \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0005_attribute_recover.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0005_attribute_recover.rast
new file mode 100644
index 000000000..6ff072e20
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0005_attribute_recover.rast
@@ -0,0 +1,62 @@
+SOURCE_FILE
+ FN
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "foo"
+ COMMA ","
+ WHITESPACE " "
+ PLUS "+"
+ COMMA ","
+ WHITESPACE " "
+ INT_NUMBER "92"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n\n"
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ TOKEN_TREE
+ L_PAREN "("
+ WHITESPACE "\n"
+ FN_KW "fn"
+ WHITESPACE " "
+ IDENT "foo"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 53: expected R_PAREN
+error 53: expected `]`
+error 53: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0005_attribute_recover.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0005_attribute_recover.rs
new file mode 100644
index 000000000..de7f81628
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0005_attribute_recover.rs
@@ -0,0 +1,8 @@
+#[foo(foo, +, 92)]
+fn foo() {
+}
+
+
+#[foo(
+fn foo() {
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0006_named_field_recovery.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0006_named_field_recovery.rast
new file mode 100644
index 000000000..7a4aa93b2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0006_named_field_recovery.rast
@@ -0,0 +1,74 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "f"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ COMMA ","
+ WHITESPACE "\n "
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ ERROR
+ INT_NUMBER "92"
+ WHITESPACE "\n "
+ ERROR
+ PLUS "+"
+ WHITESPACE " "
+ ERROR
+ MINUS "-"
+ WHITESPACE " "
+ ERROR
+ STAR "*"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ COMMA ","
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "z"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f64"
+ COMMA ","
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 31: expected field declaration
+error 33: expected COMMA
+error 38: expected field declaration
+error 39: expected COMMA
+error 40: expected field declaration
+error 41: expected COMMA
+error 42: expected field declaration
+error 43: expected COMMA
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0006_named_field_recovery.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0006_named_field_recovery.rs
new file mode 100644
index 000000000..8069c111b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0006_named_field_recovery.rs
@@ -0,0 +1,7 @@
+struct S {
+ f: u32,
+ pub 92
+ + - *
+ pub x: u32,
+ z: f64,
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0007_stray_curly_in_file.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0007_stray_curly_in_file.rast
new file mode 100644
index 000000000..5d87ff866
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0007_stray_curly_in_file.rast
@@ -0,0 +1,33 @@
+SOURCE_FILE
+ ERROR
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ WHITESPACE "\n\n"
+ ERROR
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ ERROR
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 0: unmatched `}`
+error 14: unmatched `}`
+error 29: unmatched `}`
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0007_stray_curly_in_file.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0007_stray_curly_in_file.rs
new file mode 100644
index 000000000..dc869fb78
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0007_stray_curly_in_file.rs
@@ -0,0 +1,9 @@
+}
+
+struct S;
+
+}
+
+fn foo(){}
+
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0008_item_block_recovery.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0008_item_block_recovery.rast
new file mode 100644
index 000000000..60b2fe987
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0008_item_block_recovery.rast
@@ -0,0 +1,80 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ ERROR
+ L_CURLY "{"
+ WHITESPACE "\n "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE " "
+ ELSE_KW "else"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "baz"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 17: expected BANG
+error 19: expected SEMICOLON
+error 20: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0008_item_block_recovery.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0008_item_block_recovery.rs
new file mode 100644
index 000000000..9fcac19b5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0008_item_block_recovery.rs
@@ -0,0 +1,13 @@
+fn foo() {
+}
+
+bar() {
+ if true {
+ 1
+ } else {
+ 2 + 3
+ }
+}
+
+fn baz() {
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0009_broken_struct_type_parameter.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0009_broken_struct_type_parameter.rast
new file mode 100644
index 000000000..a01543217
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0009_broken_struct_type_parameter.rast
@@ -0,0 +1,56 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ ERROR
+ INT_NUMBER "90"
+ WHITESPACE " "
+ ERROR
+ PLUS "+"
+ WHITESPACE " "
+ ERROR
+ INT_NUMBER "2"
+ ERROR
+ R_ANGLE ">"
+ WHITESPACE " "
+ ERROR
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f"
+ ERROR
+ COLON ":"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 9: expected type parameter
+error 11: expected COMMA
+error 11: expected R_ANGLE
+error 11: expected `;`, `{`, or `(`
+error 12: expected an item
+error 14: expected an item
+error 15: expected an item
+error 17: expected an item
+error 24: expected SEMICOLON
+error 24: expected expression
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0009_broken_struct_type_parameter.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0009_broken_struct_type_parameter.rs
new file mode 100644
index 000000000..0dd30d0bd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0009_broken_struct_type_parameter.rs
@@ -0,0 +1,5 @@
+struct S<90 + 2> {
+ f: u32
+}
+
+struct T;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0010_unsafe_lambda_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0010_unsafe_lambda_block.rast
new file mode 100644
index 000000000..9427ee5c0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0010_unsafe_lambda_block.rast
@@ -0,0 +1,45 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ EXPR_STMT
+ BLOCK_EXPR
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 24: expected a block
+error 24: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0010_unsafe_lambda_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0010_unsafe_lambda_block.rs
new file mode 100644
index 000000000..985775282
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0010_unsafe_lambda_block.rs
@@ -0,0 +1,3 @@
+fn main() {
+ || -> () unsafe { () };
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rast
new file mode 100644
index 000000000..bd5ec4b7c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rast
@@ -0,0 +1,13 @@
+SOURCE_FILE
+ ERROR
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 6: expected existential, fn, trait or impl
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rs
new file mode 100644
index 000000000..c1bd0a2d1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rs
@@ -0,0 +1 @@
+extern struct Foo;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0012_broken_lambda.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0012_broken_lambda.rast
new file mode 100644
index 000000000..f31c27633
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0012_broken_lambda.rast
@@ -0,0 +1,387 @@
+SOURCE_FILE@0..389
+ FN@0..389
+ VISIBILITY@0..10
+ PUB_KW@0..3
+ L_PAREN@3..4
+ SUPER_KW@4..9
+ R_PAREN@9..10
+ WHITESPACE@10..11
+ FN_KW@11..13
+ WHITESPACE@13..14
+ NAME@14..21
+ IDENT@14..21 "process"
+ GENERIC_PARAM_LIST@21..38
+ L_ANGLE@21..22
+ LIFETIME_PARAM@22..24
+ LIFETIME@22..24 "'a"
+ COMMA@24..25
+ WHITESPACE@25..26
+ TYPE_PARAM@26..37
+ NAME@26..27
+ IDENT@26..27 "S"
+ COLON@27..28
+ WHITESPACE@28..29
+ PATH@29..37
+ PATH_SEGMENT@29..37
+ NAME_REF@29..33
+ IDENT@29..33 "Sink"
+ GENERIC_ARG_LIST@33..37
+ L_ANGLE@33..34
+ LIFETIME_ARG@34..36
+ LIFETIME@34..36 "'a"
+ R_ANGLE@36..37
+ R_ANGLE@37..38
+ PARAM_LIST@38..93
+ L_PAREN@38..39
+ PARAM@39..54
+ IDENT_PAT@39..46
+ NAME@39..46
+ IDENT@39..46 "builder"
+ COLON@46..47
+ WHITESPACE@47..48
+ REF_TYPE@48..54
+ AMP@48..49
+ MUT_KW@49..52
+ WHITESPACE@52..53
+ PATH_TYPE@53..54
+ PATH@53..54
+ PATH_SEGMENT@53..54
+ NAME_REF@53..54
+ IDENT@53..54 "S"
+ COMMA@54..55
+ WHITESPACE@55..56
+ PARAM@56..72
+ IDENT_PAT@56..62
+ NAME@56..62
+ IDENT@56..62 "tokens"
+ COLON@62..63
+ WHITESPACE@63..64
+ REF_TYPE@64..72
+ AMP@64..65
+ SLICE_TYPE@65..72
+ L_BRACK@65..66
+ PATH_TYPE@66..71
+ PATH@66..71
+ PATH_SEGMENT@66..71
+ NAME_REF@66..71
+ IDENT@66..71 "Token"
+ R_BRACK@71..72
+ COMMA@72..73
+ WHITESPACE@73..74
+ PARAM@74..92
+ IDENT_PAT@74..80
+ NAME@74..80
+ IDENT@74..80 "events"
+ COLON@80..81
+ WHITESPACE@81..82
+ PATH_TYPE@82..92
+ PATH@82..92
+ PATH_SEGMENT@82..92
+ NAME_REF@82..85
+ IDENT@82..85 "Vec"
+ GENERIC_ARG_LIST@85..92
+ L_ANGLE@85..86
+ TYPE_ARG@86..91
+ PATH_TYPE@86..91
+ PATH@86..91
+ PATH_SEGMENT@86..91
+ NAME_REF@86..91
+ IDENT@86..91 "Event"
+ R_ANGLE@91..92
+ R_PAREN@92..93
+ WHITESPACE@93..94
+ BLOCK_EXPR@94..389
+ L_CURLY@94..95
+ WHITESPACE@95..100
+ LET_STMT@100..125
+ LET_KW@100..103
+ WHITESPACE@103..104
+ IDENT_PAT@104..120
+ MUT_KW@104..107
+ WHITESPACE@107..108
+ NAME@108..120
+ IDENT@108..120 "next_tok_idx"
+ WHITESPACE@120..121
+ EQ@121..122
+ WHITESPACE@122..123
+ LITERAL@123..124
+ INT_NUMBER@123..124 "0"
+ SEMICOLON@124..125
+ WHITESPACE@125..130
+ LET_STMT@130..389
+ LET_KW@130..133
+ WHITESPACE@133..134
+ IDENT_PAT@134..140
+ NAME@134..140
+ IDENT@134..140 "eat_ws"
+ WHITESPACE@140..141
+ EQ@141..142
+ WHITESPACE@142..143
+ CLOSURE_EXPR@143..389
+ PARAM_LIST@143..388
+ PIPE@143..144
+ PARAM@144..159
+ IDENT_PAT@144..147
+ NAME@144..147
+ IDENT@144..147 "idx"
+ COLON@147..148
+ WHITESPACE@148..149
+ REF_TYPE@149..159
+ AMP@149..150
+ MUT_KW@150..153
+ WHITESPACE@153..154
+ PATH_TYPE@154..159
+ PATH@154..159
+ PATH_SEGMENT@154..159
+ NAME_REF@154..159
+ IDENT@154..159 "usize"
+ COMMA@159..160
+ WHITESPACE@160..161
+ PARAM@161..167
+ REF_PAT@161..167
+ AMP@161..162
+ MUT_KW@162..165
+ WHITESPACE@165..166
+ err: `expected pattern`
+ ERROR@166..167
+ PIPE@166..167
+ err: `expected COMMA`
+ WHITESPACE@167..168
+ err: `expected pattern`
+ PARAM@168..169
+ ERROR@168..169
+ L_CURLY@168..169
+ err: `expected COMMA`
+ WHITESPACE@169..178
+ err: `expected pattern`
+ PARAM@178..183
+ ERROR@178..183
+ WHILE_KW@178..183
+ err: `expected COMMA`
+ WHITESPACE@183..184
+ err: `expected pattern`
+ PARAM@184..187
+ ERROR@184..187
+ LET_KW@184..187
+ err: `expected COMMA`
+ WHITESPACE@187..188
+ PARAM@188..199
+ TUPLE_STRUCT_PAT@188..199
+ PATH@188..192
+ PATH_SEGMENT@188..192
+ NAME_REF@188..192
+ IDENT@188..192 "Some"
+ L_PAREN@192..193
+ IDENT_PAT@193..198
+ NAME@193..198
+ IDENT@193..198 "token"
+ R_PAREN@198..199
+ err: `expected COMMA`
+ WHITESPACE@199..200
+ err: `expected pattern`
+ PARAM@200..201
+ ERROR@200..201
+ EQ@200..201
+ err: `expected COMMA`
+ WHITESPACE@201..202
+ PARAM@202..208
+ IDENT_PAT@202..208
+ NAME@202..208
+ IDENT@202..208 "tokens"
+ err: `expected COMMA`
+ err: `expected pattern`
+ PARAM@208..209
+ ERROR@208..209
+ DOT@208..209
+ err: `expected COMMA`
+ PARAM@209..218
+ TUPLE_STRUCT_PAT@209..218
+ PATH@209..212
+ PATH_SEGMENT@209..212
+ NAME_REF@209..212
+ IDENT@209..212 "get"
+ L_PAREN@212..213
+ err: `expected pattern`
+ ERROR@213..214
+ STAR@213..214
+ err: `expected COMMA`
+ IDENT_PAT@214..217
+ NAME@214..217
+ IDENT@214..217 "idx"
+ R_PAREN@217..218
+ err: `expected COMMA`
+ WHITESPACE@218..219
+ err: `expected pattern`
+ PARAM@219..220
+ ERROR@219..220
+ L_CURLY@219..220
+ err: `expected COMMA`
+ WHITESPACE@220..233
+ err: `expected pattern`
+ PARAM@233..235
+ ERROR@233..235
+ IF_KW@233..235
+ err: `expected COMMA`
+ WHITESPACE@235..236
+ err: `expected pattern`
+ PARAM@236..237
+ ERROR@236..237
+ BANG@236..237
+ err: `expected COMMA`
+ PARAM@237..242
+ IDENT_PAT@237..242
+ NAME@237..242
+ IDENT@237..242 "token"
+ err: `expected COMMA`
+ err: `expected pattern`
+ PARAM@242..243
+ ERROR@242..243
+ DOT@242..243
+ err: `expected COMMA`
+ PARAM@243..247
+ IDENT_PAT@243..247
+ NAME@243..247
+ IDENT@243..247 "kind"
+ err: `expected COMMA`
+ err: `expected pattern`
+ PARAM@247..248
+ ERROR@247..248
+ DOT@247..248
+ err: `expected COMMA`
+ PARAM@248..259
+ TUPLE_STRUCT_PAT@248..259
+ PATH@248..257
+ PATH_SEGMENT@248..257
+ NAME_REF@248..257
+ IDENT@248..257 "is_trivia"
+ L_PAREN@257..258
+ R_PAREN@258..259
+ err: `expected COMMA`
+ WHITESPACE@259..260
+ err: `expected pattern`
+ PARAM@260..261
+ ERROR@260..261
+ L_CURLY@260..261
+ err: `expected COMMA`
+ WHITESPACE@261..278
+ PARAM@278..283
+ IDENT_PAT@278..283
+ NAME@278..283
+ IDENT@278..283 "break"
+ err: `expected COMMA`
+ err: `expected pattern`
+ PARAM@283..284
+ ERROR@283..284
+ SEMICOLON@283..284
+ err: `expected COMMA`
+ WHITESPACE@284..297
+ err: `expected pattern`
+ PARAM@297..298
+ ERROR@297..298
+ R_CURLY@297..298
+ err: `expected COMMA`
+ WHITESPACE@298..311
+ PARAM@311..318
+ IDENT_PAT@311..318
+ NAME@311..318
+ IDENT@311..318 "builder"
+ err: `expected COMMA`
+ err: `expected pattern`
+ PARAM@318..319
+ ERROR@318..319
+ DOT@318..319
+ err: `expected COMMA`
+ PARAM@319..346
+ TUPLE_STRUCT_PAT@319..346
+ PATH@319..323
+ PATH_SEGMENT@319..323
+ NAME_REF@319..323
+ IDENT@319..323 "leaf"
+ L_PAREN@323..324
+ IDENT_PAT@324..329
+ NAME@324..329
+ IDENT@324..329 "token"
+ err: `expected COMMA`
+ err: `expected pattern`
+ ERROR@329..330
+ DOT@329..330
+ err: `expected COMMA`
+ IDENT_PAT@330..334
+ NAME@330..334
+ IDENT@330..334 "kind"
+ COMMA@334..335
+ WHITESPACE@335..336
+ IDENT_PAT@336..341
+ NAME@336..341
+ IDENT@336..341 "token"
+ err: `expected COMMA`
+ err: `expected pattern`
+ ERROR@341..342
+ DOT@341..342
+ err: `expected COMMA`
+ IDENT_PAT@342..345
+ NAME@342..345
+ IDENT@342..345 "len"
+ R_PAREN@345..346
+ err: `expected COMMA`
+ err: `expected pattern`
+ PARAM@346..347
+ ERROR@346..347
+ SEMICOLON@346..347
+ err: `expected COMMA`
+ WHITESPACE@347..360
+ err: `expected pattern`
+ PARAM@360..361
+ ERROR@360..361
+ STAR@360..361
+ err: `expected COMMA`
+ PARAM@361..364
+ IDENT_PAT@361..364
+ NAME@361..364
+ IDENT@361..364 "idx"
+ err: `expected COMMA`
+ WHITESPACE@364..365
+ err: `expected pattern`
+ PARAM@365..366
+ ERROR@365..366
+ PLUS@365..366
+ err: `expected COMMA`
+ err: `expected pattern`
+ PARAM@366..367
+ ERROR@366..367
+ EQ@366..367
+ err: `expected COMMA`
+ WHITESPACE@367..368
+ PARAM@368..369
+ LITERAL@368..369
+ INT_NUMBER@368..369 "1"
+ err: `expected COMMA`
+ WHITESPACE@369..378
+ err: `expected pattern`
+ PARAM@378..379
+ ERROR@378..379
+ R_CURLY@378..379
+ err: `expected COMMA`
+ WHITESPACE@379..384
+ err: `expected pattern`
+ PARAM@384..385
+ ERROR@384..385
+ R_CURLY@384..385
+ err: `expected COMMA`
+ err: `expected pattern`
+ PARAM@385..386
+ ERROR@385..386
+ SEMICOLON@385..386
+ err: `expected COMMA`
+ WHITESPACE@386..387
+ err: `expected pattern`
+ PARAM@387..388
+ ERROR@387..388
+ R_CURLY@387..388
+ err: `expected COMMA`
+ err: `expected PIPE`
+ WHITESPACE@388..389
+ err: `expected expression`
+ err: `expected SEMI`
+ err: `expected R_CURLY`
+ ERROR@389..389
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0013_invalid_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0013_invalid_type.rast
new file mode 100644
index 000000000..eec84a0c6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0013_invalid_type.rast
@@ -0,0 +1,89 @@
+SOURCE_FILE
+ STRUCT
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "Cache"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ WHITESPACE "\n "
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "RefCell"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "HashMap"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ WHITESPACE "\n "
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "TypeId"
+ COMMA ","
+ WHITESPACE "\n "
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Box"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ ERROR
+ AT "@"
+ WHITESPACE " "
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Any"
+ ERROR
+ ERROR
+ R_ANGLE ">"
+ ERROR
+ COMMA ","
+ WHITESPACE "\n "
+ ERROR
+ R_ANGLE ">"
+ ERROR
+ R_ANGLE ">"
+ WHITESPACE "\n"
+ ERROR
+ R_PAREN ")"
+ ERROR
+ SEMICOLON ";"
+ WHITESPACE "\n\n"
+error 67: expected type
+error 68: expected COMMA
+error 68: expected R_ANGLE
+error 68: expected COMMA
+error 68: expected R_ANGLE
+error 68: expected COMMA
+error 68: expected R_ANGLE
+error 68: expected COMMA
+error 72: expected COMMA
+error 72: expected a type
+error 72: expected R_PAREN
+error 72: expected SEMICOLON
+error 72: expected an item
+error 73: expected an item
+error 79: expected an item
+error 80: expected an item
+error 82: expected an item
+error 83: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0013_invalid_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0013_invalid_type.rs
new file mode 100644
index 000000000..20dde3bc3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0013_invalid_type.rs
@@ -0,0 +1,7 @@
+pub struct Cache(
+ RefCell<HashMap<
+ TypeId,
+ Box<@ Any>,
+ >>
+);
+
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0014_where_no_bounds.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0014_where_no_bounds.rast
new file mode 100644
index 000000000..fd2f9ada3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0014_where_no_bounds.rast
@@ -0,0 +1,32 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 19: expected colon
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0014_where_no_bounds.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0014_where_no_bounds.rs
new file mode 100644
index 000000000..75c1d2f98
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0014_where_no_bounds.rs
@@ -0,0 +1 @@
+fn foo<T>() where T {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0015_curly_in_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0015_curly_in_params.rast
new file mode 100644
index 000000000..8e169320d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0015_curly_in_params.rast
@@ -0,0 +1,24 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ ERROR
+ R_CURLY "}"
+ ERROR
+ R_PAREN ")"
+ WHITESPACE " "
+ ERROR
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 7: expected value parameter
+error 7: expected R_PAREN
+error 7: expected a block
+error 7: unmatched `}`
+error 8: expected an item
+error 10: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0015_curly_in_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0015_curly_in_params.rs
new file mode 100644
index 000000000..156e70251
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0015_curly_in_params.rs
@@ -0,0 +1,2 @@
+fn foo(}) {
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0016_missing_semi.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0016_missing_semi.rast
new file mode 100644
index 000000000..c48c35bf8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0016_missing_semi.rast
@@ -0,0 +1,44 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ WHITESPACE "\n "
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE "\n "
+ R_PAREN ")"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 38: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0016_missing_semi.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0016_missing_semi.rs
new file mode 100644
index 000000000..9ae857686
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0016_missing_semi.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ foo(
+ 1, 2
+ )
+ return 92;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0017_incomplete_binexpr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0017_incomplete_binexpr.rast
new file mode 100644
index 000000000..807356462
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0017_incomplete_binexpr.rast
@@ -0,0 +1,47 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "foo"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "bar"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 44: expected expression
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0017_incomplete_binexpr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0017_incomplete_binexpr.rs
new file mode 100644
index 000000000..17bd49777
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0017_incomplete_binexpr.rs
@@ -0,0 +1,4 @@
+fn foo(foo: i32) {
+ let bar = 92;
+ 1 +
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0018_incomplete_fn.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0018_incomplete_fn.rast
new file mode 100644
index 000000000..6524d8e8f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0018_incomplete_fn.rast
@@ -0,0 +1,134 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "FnScopes"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "new_scope"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_PAT
+ AMP "&"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "ScopeId"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "res"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ METHOD_CALL_EXPR
+ FIELD_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_KW "self"
+ DOT "."
+ NAME_REF
+ IDENT "scopes"
+ DOT "."
+ NAME_REF
+ IDENT "len"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ METHOD_CALL_EXPR
+ FIELD_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_KW "self"
+ DOT "."
+ NAME_REF
+ IDENT "scopes"
+ DOT "."
+ NAME_REF
+ IDENT "push"
+ ARG_LIST
+ L_PAREN "("
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "ScopeData"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "parent"
+ COLON ":"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "None"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "entries"
+ COLON ":"
+ WHITESPACE " "
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "vec"
+ BANG "!"
+ TOKEN_TREE
+ L_BRACK "["
+ R_BRACK "]"
+ WHITESPACE " "
+ R_CURLY "}"
+ R_PAREN ")"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "set_parent"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 34: expected pattern
+error 34: missing type for function parameter
+error 180: expected function arguments
+error 180: expected a block
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0018_incomplete_fn.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0018_incomplete_fn.rs
new file mode 100644
index 000000000..fe604006c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0018_incomplete_fn.rs
@@ -0,0 +1,8 @@
+impl FnScopes {
+ fn new_scope(&) -> ScopeId {
+ let res = self.scopes.len();
+ self.scopes.push(ScopeData { parent: None, entries: vec![] })
+ }
+
+ fn set_parent
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0019_let_recover.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0019_let_recover.rast
new file mode 100644
index 000000000..7d62e0cc1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0019_let_recover.rast
@@ -0,0 +1,107 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "11"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "bar"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "baz"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE "\n "
+ EXPR_STMT
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE "\n "
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 27: expected SEMICOLON
+error 52: expected pattern
+error 52: expected SEMICOLON
+error 78: expected pattern
+error 78: expected SEMICOLON
+error 101: expected pattern
+error 101: expected SEMICOLON
+error 127: expected pattern
+error 127: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0019_let_recover.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0019_let_recover.rs
new file mode 100644
index 000000000..5108d5a49
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0019_let_recover.rs
@@ -0,0 +1,12 @@
+fn foo() {
+ let foo = 11
+ let bar = 1;
+ let
+ let baz = 92;
+ let
+ if true {}
+ let
+ while true {}
+ let
+ loop {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0020_fn_recover.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0020_fn_recover.rast
new file mode 100644
index 000000000..56d124cb9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0020_fn_recover.rast
@@ -0,0 +1,21 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 2: expected a name
+error 2: expected function arguments
+error 2: expected a block
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0020_fn_recover.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0020_fn_recover.rs
new file mode 100644
index 000000000..3393b668b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0020_fn_recover.rs
@@ -0,0 +1,3 @@
+fn
+
+fn foo() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0021_incomplete_param.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0021_incomplete_param.rast
new file mode 100644
index 000000000..762840aa2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0021_incomplete_param.rast
@@ -0,0 +1,34 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "y"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 16: missing type for function parameter
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0021_incomplete_param.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0021_incomplete_param.rs
new file mode 100644
index 000000000..7a6c264f6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0021_incomplete_param.rs
@@ -0,0 +1,2 @@
+fn foo(x: i32, y) {
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0022_bad_exprs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0022_bad_exprs.rast
new file mode 100644
index 000000000..900394bd9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0022_bad_exprs.rast
@@ -0,0 +1,171 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ ARRAY_EXPR
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ COMMA ","
+ WHITESPACE " "
+ ERROR
+ AT "@"
+ ERROR
+ COMMA ","
+ WHITESPACE " "
+ STRUCT
+ STRUCT_KW "struct"
+ ERROR
+ COMMA ","
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ ERROR
+ R_BRACK "]"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ COMMA ","
+ WHITESPACE " "
+ ERROR
+ AT "@"
+ ERROR
+ COMMA ","
+ WHITESPACE " "
+ IMPL
+ IMPL_KW "impl"
+ ERROR
+ COMMA ","
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ ERROR
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ METHOD_CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ DOT "."
+ NAME_REF
+ IDENT "bar"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ COMMA ","
+ WHITESPACE " "
+ ERROR
+ AT "@"
+ ERROR
+ COMMA ","
+ WHITESPACE " "
+ ERROR
+ R_BRACK "]"
+ ERROR
+ COMMA ","
+ WHITESPACE " "
+ TRAIT
+ TRAIT_KW "trait"
+ ERROR
+ COMMA ","
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ ERROR
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 16: expected expression
+error 17: expected R_BRACK
+error 17: expected SEMICOLON
+error 17: expected expression
+error 25: expected a name
+error 26: expected `;`, `{`, or `(`
+error 30: expected pattern
+error 31: expected SEMICOLON
+error 53: expected expression
+error 54: expected SEMICOLON
+error 54: expected expression
+error 60: expected type
+error 60: expected `{`
+error 60: expected expression
+error 65: expected pattern
+error 65: expected SEMICOLON
+error 65: expected expression
+error 92: expected expression
+error 93: expected SEMICOLON
+error 93: expected expression
+error 95: expected expression
+error 96: expected expression
+error 103: expected a name
+error 104: expected `{`
+error 108: expected pattern
+error 108: expected SEMICOLON
+error 108: expected expression
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0022_bad_exprs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0022_bad_exprs.rs
new file mode 100644
index 000000000..cd2d493a1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0022_bad_exprs.rs
@@ -0,0 +1,3 @@
+fn a() { [1, 2, @, struct, let] }
+fn b() { foo(1, 2, @, impl, let) }
+fn c() { foo.bar(1, 2, @, ], trait, let) }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0023_mismatched_paren.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0023_mismatched_paren.rast
new file mode 100644
index 000000000..4064a7a1f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0023_mismatched_paren.rast
@@ -0,0 +1,45 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ BANG "!"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_PAREN "("
+ WHITESPACE "\n "
+ IDENT "bar"
+ COMMA ","
+ WHITESPACE " "
+ STRING "\"baz\""
+ COMMA ","
+ WHITESPACE " "
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ FLOAT_NUMBER "2.0"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE " "
+ COMMENT "//~ ERROR incorrect close delimiter"
+ WHITESPACE "\n"
+ ERROR
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 49: unmatched `}`
+error 92: unmatched `}`
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0023_mismatched_paren.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0023_mismatched_paren.rs
new file mode 100644
index 000000000..0206d563e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0023_mismatched_paren.rs
@@ -0,0 +1,5 @@
+fn main() {
+ foo! (
+ bar, "baz", 1, 2.0
+ } //~ ERROR incorrect close delimiter
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rast
new file mode 100644
index 000000000..d374f8661
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rast
@@ -0,0 +1,327 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ R_PAREN ")"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ L_PAREN "("
+ QUESTION "?"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Sized"
+ R_PAREN ")"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ L_PAREN "("
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ R_PAREN ")"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ DYN_TRAIT_TYPE
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Box"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PAREN_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ R_PAREN ")"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ L_PAREN "("
+ QUESTION "?"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Sized"
+ R_PAREN ")"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ L_PAREN "("
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ R_PAREN ")"
+ ERROR
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Box"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PAREN_TYPE
+ L_PAREN "("
+ ERROR
+ QUESTION "?"
+ EXPR_STMT
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Sized"
+ ERROR
+ R_PAREN ")"
+ WHITESPACE " "
+ ERROR
+ PLUS "+"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ CLOSURE_EXPR
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ BIN_EXPR
+ BIN_EXPR
+ BIN_EXPR
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait"
+ L_ANGLE "<"
+ ERROR
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ ERROR
+ R_PAREN ")"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ R_PAREN ")"
+ R_ANGLE ">"
+ ERROR
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ ERROR
+ COLON ":"
+ WHITESPACE " "
+ BIN_EXPR
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Box"
+ L_ANGLE "<"
+ TUPLE_EXPR
+ L_PAREN "("
+ CLOSURE_EXPR
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ BIN_EXPR
+ BIN_EXPR
+ BIN_EXPR
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait"
+ L_ANGLE "<"
+ ERROR
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ ERROR
+ R_PAREN ")"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ R_PAREN ")"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ ERROR
+ QUESTION "?"
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Sized"
+ R_PAREN ")"
+ R_ANGLE ">"
+ ERROR
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 88: expected COMMA
+error 88: expected R_ANGLE
+error 121: expected SEMICOLON
+error 121: expected expression
+error 140: expected type
+error 141: expected R_PAREN
+error 141: expected COMMA
+error 141: expected R_ANGLE
+error 141: expected SEMICOLON
+error 146: expected SEMICOLON
+error 146: expected expression
+error 148: expected expression
+error 158: expected `|`
+error 158: expected COMMA
+error 165: expected expression
+error 168: expected expression
+error 179: expected expression
+error 180: expected COMMA
+error 190: expected EQ
+error 190: expected expression
+error 191: expected COMMA
+error 204: expected `|`
+error 204: expected COMMA
+error 211: expected expression
+error 214: expected expression
+error 228: expected expression
+error 229: expected R_PAREN
+error 229: expected COMMA
+error 236: expected expression
+error 237: expected COMMA
+error 237: expected expression
+error 237: expected R_PAREN
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rs
new file mode 100644
index 000000000..6c2e95c02
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rs
@@ -0,0 +1,7 @@
+fn f<T: (Copy) + (?Sized) + (for<'a> Trait<'a>)>() {}
+
+fn main() {
+ let _: Box<(Copy) + (?Sized) + (for<'a> Trait<'a>)>;
+ let _: Box<(?Sized) + (for<'a> Trait<'a>) + (Copy)>;
+ let _: Box<(for<'a> Trait<'a>) + (Copy) + (?Sized)>;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0025_nope.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0025_nope.rast
new file mode 100644
index 000000000..6b49724ec
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0025_nope.rast
@@ -0,0 +1,209 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "Test"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "Var1"
+ COMMA ","
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "Var2"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "String"
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "Var3"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "abc"
+ COLON ":"
+ WHITESPACE " "
+ ERROR
+ L_CURLY "{"
+ R_CURLY "}"
+ ERROR
+ COMMA ","
+ WHITESPACE " "
+ COMMENT "//~ ERROR: expected type, found `{`"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ COMMENT "// recover..."
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "Test2"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "Fine"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "Test3"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "StillFine"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "def"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ ENUM
+ COMMENT "// fail again"
+ WHITESPACE "\n "
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "Test4"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "Nope"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ ERROR
+ ERROR
+ L_CURLY "{"
+ R_CURLY "}"
+ ERROR
+ R_PAREN ")"
+ WHITESPACE " "
+ COMMENT "//~ ERROR: found `{`"
+ WHITESPACE "\n "
+ COMMENT "//~^ ERROR: found `{`"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ COMMENT "// still recover later"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ ERROR
+ SEMICOLON ";"
+ WHITESPACE " "
+ COMMENT "//~ ERROR: expected pattern"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 95: expected type
+error 95: expected COMMA
+error 96: expected field
+error 98: expected field declaration
+error 371: expected COMMA
+error 372: expected a type
+error 372: expected R_PAREN
+error 372: expected COMMA
+error 372: expected enum variant
+error 374: expected enum variant
+error 494: expected pattern
+error 495: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0025_nope.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0025_nope.rs
new file mode 100644
index 000000000..c78abe80a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0025_nope.rs
@@ -0,0 +1,32 @@
+fn main() {
+ enum Test {
+ Var1,
+ Var2(String),
+ Var3 {
+ abc: {}, //~ ERROR: expected type, found `{`
+ },
+ }
+
+ // recover...
+ let a = 1;
+ enum Test2 {
+ Fine,
+ }
+
+ enum Test3 {
+ StillFine {
+ def: i32,
+ },
+ }
+
+ {
+ // fail again
+ enum Test4 {
+ Nope(i32 {}) //~ ERROR: found `{`
+ //~^ ERROR: found `{`
+ }
+ }
+ // still recover later
+ let; //~ ERROR: expected pattern
+ let _ = 0;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0026_imp_recovery.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0026_imp_recovery.rast
new file mode 100644
index 000000000..1068418e0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0026_imp_recovery.rast
@@ -0,0 +1,49 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ R_ANGLE ">"
+ WHITESPACE "\n"
+ IMPL
+ IMPL_KW "impl"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "OnceCell"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ R_ANGLE ">"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 14: expected trait or type
+error 14: expected `{`
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0026_imp_recovery.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0026_imp_recovery.rs
new file mode 100644
index 000000000..829ca1c4b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0026_imp_recovery.rs
@@ -0,0 +1,2 @@
+impl<T: Clone>
+impl<T> OnceCell<T> {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplere_where_for.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplere_where_for.rast
new file mode 100644
index 000000000..674c8d536
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplere_where_for.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 26: expected type
+error 26: expected colon
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplere_where_for.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplere_where_for.rs
new file mode 100644
index 000000000..2792c2084
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplere_where_for.rs
@@ -0,0 +1,3 @@
+fn foo()
+ where for<'a>
+{}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0029_field_completion.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0029_field_completion.rast
new file mode 100644
index 000000000..fb037112f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0029_field_completion.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FIELD_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ DOT "."
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 21: expected field name or number
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0029_field_completion.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0029_field_completion.rs
new file mode 100644
index 000000000..a7cdc17bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0029_field_completion.rs
@@ -0,0 +1,3 @@
+fn foo(a: A) {
+ a.
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rast
new file mode 100644
index 000000000..327bf94a4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rast
@@ -0,0 +1,205 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ ATTR
+ POUND "#"
+ ERROR
+ BANG "!"
+ ARRAY_EXPR
+ L_BRACK "["
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ STRING "\"Not allowed here\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ ATTR
+ POUND "#"
+ ERROR
+ BANG "!"
+ ARRAY_EXPR
+ L_BRACK "["
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ STRING "\"Nor here\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "test"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ ERROR
+ BANG "!"
+ ARRAY_EXPR
+ L_BRACK "["
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ STRING "\"Nor here\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 52: expected `[`
+error 52: expected pattern
+error 53: expected FAT_ARROW
+error 78: expected `,`
+error 161: expected `[`
+error 161: expected pattern
+error 162: expected FAT_ARROW
+error 232: expected `[`
+error 232: expected pattern
+error 233: expected FAT_ARROW
+error 250: expected `,`
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rs
new file mode 100644
index 000000000..06aa47770
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0032_match_arms_inner_attrs.rs
@@ -0,0 +1,20 @@
+fn foo() {
+ match () {
+ _ => (),
+ #![doc("Not allowed here")]
+ _ => (),
+ }
+
+ match () {
+ _ => (),
+ _ => (),
+ #![doc("Nor here")]
+ }
+
+ match () {
+ #[cfg(test)]
+ #![doc("Nor here")]
+ _ => (),
+ _ => (),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0033_match_arms_outer_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0033_match_arms_outer_attrs.rast
new file mode 100644
index 000000000..b5bc3d84d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0033_match_arms_outer_attrs.rast
@@ -0,0 +1,68 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "test"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 80: expected pattern
+error 80: expected FAT_ARROW
+error 80: expected expression
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0033_match_arms_outer_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0033_match_arms_outer_attrs.rs
new file mode 100644
index 000000000..4635222da
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0033_match_arms_outer_attrs.rs
@@ -0,0 +1,7 @@
+fn foo() {
+ match () {
+ _ => (),
+ _ => (),
+ #[cfg(test)]
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0034_bad_box_pattern.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0034_bad_box_pattern.rast
new file mode 100644
index 000000000..7a2ae9103
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0034_bad_box_pattern.rast
@@ -0,0 +1,96 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ REF_KW "ref"
+ WHITESPACE " "
+ ERROR
+ BOX_KW "box"
+ WHITESPACE " "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ ERROR
+ BOX_KW "box"
+ WHITESPACE " "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ REF_KW "ref"
+ WHITESPACE " "
+ MUT_KW "mut"
+ WHITESPACE " "
+ ERROR
+ BOX_KW "box"
+ WHITESPACE " "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+error 24: expected a name
+error 27: expected SEMICOLON
+error 48: expected a name
+error 51: expected SEMICOLON
+error 76: expected a name
+error 79: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0034_bad_box_pattern.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0034_bad_box_pattern.rs
new file mode 100644
index 000000000..d3fa2e468
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0034_bad_box_pattern.rs
@@ -0,0 +1,6 @@
+fn main() {
+ let ref box i = ();
+ let mut box i = ();
+ let ref mut box i = ();
+}
+
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0035_use_recover.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0035_use_recover.rast
new file mode 100644
index 000000000..f9287d42e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0035_use_recover.rast
@@ -0,0 +1,55 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "baz"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 17: expected one of `*`, `::`, `{`, `self`, `super` or an identifier
+error 17: expected SEMICOLON
+error 37: expected one of `*`, `::`, `{`, `self`, `super` or an identifier
+error 37: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0035_use_recover.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0035_use_recover.rs
new file mode 100644
index 000000000..4a2668126
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0035_use_recover.rs
@@ -0,0 +1,5 @@
+use foo::bar;
+use
+use crate::baz;
+use
+fn f() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0036_partial_use.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0036_partial_use.rast
new file mode 100644
index 000000000..13e76e683
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0036_partial_use.rast
@@ -0,0 +1,51 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ USE_TREE_LIST
+ L_CURLY "{"
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "error"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Error"
+ ERROR
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ ERROR
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "io"
+ ERROR
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 22: expected COMMA
+error 22: expected one of `*`, `::`, `{`, `self`, `super` or an identifier
+error 23: expected COMMA
+error 24: expected one of `*`, `::`, `{`, `self`, `super` or an identifier
+error 27: expected COMMA
+error 35: expected COMMA
+error 35: expected one of `*`, `::`, `{`, `self`, `super` or an identifier
+error 36: expected COMMA
+error 36: expected R_CURLY
+error 36: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0036_partial_use.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0036_partial_use.rs
new file mode 100644
index 000000000..d521a5bb2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0036_partial_use.rs
@@ -0,0 +1,2 @@
+use std::{error::Error;
+use std::io;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0039_lambda_recovery.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0039_lambda_recovery.rast
new file mode 100644
index 000000000..8ca160601
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0039_lambda_recovery.rast
@@ -0,0 +1,83 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ METHOD_CALL_EXPR
+ METHOD_CALL_EXPR
+ METHOD_CALL_EXPR
+ ARRAY_EXPR
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ R_BRACK "]"
+ DOT "."
+ NAME_REF
+ IDENT "iter"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n "
+ DOT "."
+ NAME_REF
+ IDENT "map"
+ ARG_LIST
+ L_PAREN "("
+ CLOSURE_EXPR
+ PARAM_LIST
+ PIPE "|"
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "it"
+ PIPE "|"
+ R_PAREN ")"
+ WHITESPACE "\n "
+ DOT "."
+ NAME_REF
+ IDENT "max"
+ GENERIC_ARG_LIST
+ COLON2 "::"
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_ANGLE ">"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 56: expected expression
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0039_lambda_recovery.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0039_lambda_recovery.rs
new file mode 100644
index 000000000..a2f74bd87
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0039_lambda_recovery.rs
@@ -0,0 +1,5 @@
+fn foo() -> i32 {
+ [1, 2, 3].iter()
+ .map(|it|)
+ .max::<i32>();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rast
new file mode 100644
index 000000000..9cea337ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rast
@@ -0,0 +1,75 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ ERROR
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ ERROR
+ ASYNC_KW "async"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ BLOCK_EXPR
+ TRY_KW "try"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ ERROR
+ LABEL
+ LIFETIME
+ LIFETIME_IDENT "'label"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 24: expected existential, fn, trait or impl
+error 41: expected existential, fn, trait or impl
+error 56: expected a block
+error 75: expected a loop
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rs
new file mode 100644
index 000000000..8fa324c1a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rs
@@ -0,0 +1,6 @@
+fn main() {
+ { unsafe 92 }
+ { async 92 }
+ { try 92 }
+ { 'label: 92 }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rast
new file mode 100644
index 000000000..cb4fb1642
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rast
@@ -0,0 +1,256 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "ForRef"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "ForTup"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ COMMA ","
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "ForSlice"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ SLICE_TYPE
+ L_BRACK "["
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "ForForFn"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ R_ANGLE ">"
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "for_for_for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ R_ANGLE ">"
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'c"
+ R_ANGLE ">"
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'c"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ R_PAREN ")"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ COMMA ","
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 21: expected a function pointer or path
+error 52: expected a function pointer or path
+error 88: expected a function pointer or path
+error 119: expected a function pointer or path
+error 195: expected a function pointer or path
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rs
new file mode 100644
index 000000000..0e9f8ccb4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rs
@@ -0,0 +1,9 @@
+type ForRef = for<'a> &'a u32;
+type ForTup = for<'a> (&'a u32,);
+type ForSlice = for<'a> [u32];
+type ForForFn = for<'a> for<'b> fn(&'a i32, &'b i32);
+fn for_for_for<T>()
+where
+ for<'a> for<'b> for<'c> fn(&'a T, &'b T, &'c T): Copy,
+{
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast
new file mode 100644
index 000000000..96e471a69
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast
@@ -0,0 +1,48 @@
+SOURCE_FILE
+ ERROR
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN
+ ASYNC_KW "async"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ CONST
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ CONST_KW "const"
+ WHITESPACE " "
+ ERROR
+ FN_KW "fn"
+ WHITESPACE " "
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ ERROR
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 6: expected existential, fn, trait or impl
+error 38: expected a name
+error 40: missing type for `const` or `static`
+error 40: expected SEMICOLON
+error 44: expected BANG
+error 46: expected SEMICOLON
+error 47: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rs
new file mode 100644
index 000000000..731e58013
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rs
@@ -0,0 +1,2 @@
+unsafe async fn foo() {}
+unsafe const fn bar() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repated_extern_modifier.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repated_extern_modifier.rast
new file mode 100644
index 000000000..4b2a74036
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repated_extern_modifier.rast
@@ -0,0 +1,15 @@
+SOURCE_FILE
+ ERROR
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ ERROR
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE "\n"
+error 10: expected existential, fn, trait or impl
+error 21: expected existential, fn, trait or impl
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repated_extern_modifier.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repated_extern_modifier.rs
new file mode 100644
index 000000000..db32b98df
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repated_extern_modifier.rs
@@ -0,0 +1 @@
+extern "C" extern "C"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0048_double_fish.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0048_double_fish.rast
new file mode 100644
index 000000000..3a05bfee1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0048_double_fish.rast
@@ -0,0 +1,123 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ GENERIC_ARG_LIST
+ COLON2 "::"
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Item"
+ GENERIC_ARG_LIST
+ COLON2 "::"
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "lol"
+ R_ANGLE ">"
+ COLON2 "::"
+ ERROR
+ L_ANGLE "<"
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "nope"
+ SHR ">>"
+ ERROR
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "g"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Item"
+ GENERIC_ARG_LIST
+ COLON2 "::"
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "lol"
+ R_ANGLE ">"
+ COLON2 "::"
+ ERROR
+ L_ANGLE "<"
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "nope"
+ R_ANGLE ">"
+ WHITESPACE " "
+ ERROR
+ EQ "="
+ WHITESPACE " "
+ EXPR_STMT
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 30: expected identifier
+error 31: expected COMMA
+error 31: expected R_ANGLE
+error 31: expected SEMICOLON
+error 37: expected expression
+error 75: expected identifier
+error 76: expected SEMICOLON
+error 82: expected expression
+error 83: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0048_double_fish.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0048_double_fish.rs
new file mode 100644
index 000000000..31c12bfff
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0048_double_fish.rs
@@ -0,0 +1,7 @@
+fn f() {
+ S::<Item::<lol>::<nope>>;
+}
+
+fn g() {
+ let _: Item::<lol>::<nope> = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0001_array_type_missing_semi.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0001_array_type_missing_semi.rast
new file mode 100644
index 000000000..ed739a7e3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0001_array_type_missing_semi.rast
@@ -0,0 +1,27 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ SLICE_TYPE
+ L_BRACK "["
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ ERROR
+ INT_NUMBER "92"
+ ERROR
+ R_BRACK "]"
+ ERROR
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 12: expected `;` or `]`
+error 12: expected SEMICOLON
+error 13: expected an item
+error 15: expected an item
+error 16: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0001_array_type_missing_semi.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0001_array_type_missing_semi.rs
new file mode 100644
index 000000000..a94851443
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0001_array_type_missing_semi.rs
@@ -0,0 +1 @@
+type T = [() 92];
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rast
new file mode 100644
index 000000000..56cea4b15
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rast
@@ -0,0 +1,28 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ ERROR
+ LABEL
+ LIFETIME
+ LIFETIME_IDENT "'loop"
+ COLON ":"
+ WHITESPACE " "
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 22: expected a loop
+error 27: expected type
+error 27: expected `{`
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rs
new file mode 100644
index 000000000..a2164c510
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0002_misplaced_label_err.rs
@@ -0,0 +1,3 @@
+fn main() {
+ 'loop: impl
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0003_pointer_type_no_mutability.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0003_pointer_type_no_mutability.rast
new file mode 100644
index 000000000..354c4135a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0003_pointer_type_no_mutability.rast
@@ -0,0 +1,17 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 10: expected mut or const in raw pointer type (use `*mut T` or `*const T` as appropriate)
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0003_pointer_type_no_mutability.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0003_pointer_type_no_mutability.rs
new file mode 100644
index 000000000..fae705131
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0003_pointer_type_no_mutability.rs
@@ -0,0 +1 @@
+type T = *();
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0004_impl_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0004_impl_type.rast
new file mode 100644
index 000000000..dbeb878a2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0004_impl_type.rast
@@ -0,0 +1,79 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Type"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait1"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "NotType"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait2"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "NotType"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 38: expected trait or type
+error 38: expected `{`
+error 70: expected trait or type
+error 70: expected `{`
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0004_impl_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0004_impl_type.rs
new file mode 100644
index 000000000..b8c7b65e3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0004_impl_type.rs
@@ -0,0 +1,4 @@
+impl Type {}
+impl Trait1 for T {}
+impl impl NotType {}
+impl Trait2 for impl NotType {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rast
new file mode 100644
index 000000000..eb0595293
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rast
@@ -0,0 +1,23 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "F"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ ERROR
+ L_PAREN "("
+ ERROR
+ R_PAREN ")"
+ ERROR
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 15: expected `fn`
+error 15: expected SEMICOLON
+error 16: expected an item
+error 17: expected an item
+error 18: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rs
new file mode 100644
index 000000000..f014914ff
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0005_fn_pointer_type_missing_fn.rs
@@ -0,0 +1 @@
+type F = unsafe ();
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0006_unsafe_block_in_mod.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0006_unsafe_block_in_mod.rast
new file mode 100644
index 000000000..77c2b56ad
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0006_unsafe_block_in_mod.rast
@@ -0,0 +1,37 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ ERROR
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ ERROR
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 11: expected an item
+error 18: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0006_unsafe_block_in_mod.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0006_unsafe_block_in_mod.rs
new file mode 100644
index 000000000..26141e904
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0006_unsafe_block_in_mod.rs
@@ -0,0 +1 @@
+fn foo(){} unsafe { } fn bar(){}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0007_async_without_semicolon.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0007_async_without_semicolon.rast
new file mode 100644
index 000000000..bf20d5fa4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0007_async_without_semicolon.rast
@@ -0,0 +1,32 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BLOCK_EXPR
+ ASYNC_KW "async"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 27: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0007_async_without_semicolon.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0007_async_without_semicolon.rs
new file mode 100644
index 000000000..9a423248c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0007_async_without_semicolon.rs
@@ -0,0 +1 @@
+fn foo() { let _ = async {} }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0008_pub_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0008_pub_expr.rast
new file mode 100644
index 000000000..0ae9f64e7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0008_pub_expr.rast
@@ -0,0 +1,26 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ ERROR
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ EXPR_STMT
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 14: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0008_pub_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0008_pub_expr.rs
new file mode 100644
index 000000000..2976f6862
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0008_pub_expr.rs
@@ -0,0 +1 @@
+fn foo() { pub 92; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0013_anonymous_static.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0013_anonymous_static.rast
new file mode 100644
index 000000000..823db94f5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0013_anonymous_static.rast
@@ -0,0 +1,21 @@
+SOURCE_FILE
+ STATIC
+ STATIC_KW "static"
+ WHITESPACE " "
+ ERROR
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "5"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 7: expected a name
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0013_anonymous_static.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0013_anonymous_static.rs
new file mode 100644
index 000000000..df8cecb43
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0013_anonymous_static.rs
@@ -0,0 +1 @@
+static _: i32 = 5;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast
new file mode 100644
index 000000000..f51196004
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast
@@ -0,0 +1,49 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "field"
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "default"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 25: expected COLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rs
new file mode 100644
index 000000000..a4e5b2f69
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rs
@@ -0,0 +1,3 @@
+fn main() {
+ S { field ..S::default() }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_struct_field_recover.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_struct_field_recover.rast
new file mode 100644
index 000000000..458d7f4e2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_struct_field_recover.rast
@@ -0,0 +1,31 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_FIELD
+ NAME
+ IDENT "f"
+ WHITESPACE " "
+ RECORD_FIELD
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ NAME
+ IDENT "g"
+ COLON ":"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 12: expected COLON
+error 12: expected type
+error 12: expected COMMA
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_struct_field_recover.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_struct_field_recover.rs
new file mode 100644
index 000000000..da32227ad
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0014_struct_field_recover.rs
@@ -0,0 +1 @@
+struct S { f pub g: () }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_empty_segment.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_empty_segment.rast
new file mode 100644
index 000000000..b03f5ad9f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_empty_segment.rast
@@ -0,0 +1,14 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ COLON2 "::"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 11: expected identifier
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_empty_segment.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_empty_segment.rs
new file mode 100644
index 000000000..7510664e1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_empty_segment.rs
@@ -0,0 +1 @@
+use crate::;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rast
new file mode 100644
index 000000000..e72df374d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rast
@@ -0,0 +1,53 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "y"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "z"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "t"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 6: missing type for function parameter
+error 6: expected COMMA
+error 16: missing type for function parameter
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rs
new file mode 100644
index 000000000..4a95b9084
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0015_missing_fn_param_type.rs
@@ -0,0 +1 @@
+fn f(x y: i32, z, t: i32) {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0002_use_tree_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0002_use_tree_list.rast
new file mode 100644
index 000000000..f3b1129f2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0002_use_tree_list.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ USE_TREE_LIST
+ L_CURLY "{"
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ COMMA ","
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "c"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0002_use_tree_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0002_use_tree_list.rs
new file mode 100644
index 000000000..6fa175f54
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0002_use_tree_list.rs
@@ -0,0 +1 @@
+use {a, b, c};
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0003_where_pred_for.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0003_where_pred_for.rast
new file mode 100644
index 000000000..8407e99f6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0003_where_pred_for.rast
@@ -0,0 +1,63 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "for_trait"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "F"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "F"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "str"
+ R_PAREN ")"
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0003_where_pred_for.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0003_where_pred_for.rs
new file mode 100644
index 000000000..423bc105b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0003_where_pred_for.rs
@@ -0,0 +1,4 @@
+fn for_trait<F>()
+where
+ for<'a> F: Fn(&'a str)
+{ }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rast
new file mode 100644
index 000000000..902b06484
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rast
@@ -0,0 +1,60 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "F"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Box"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ REF_TYPE
+ AMP "&"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ REF_TYPE
+ AMP "&"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs
new file mode 100644
index 000000000..93636e926
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs
@@ -0,0 +1 @@
+type F = Box<Fn(i32, &i32, &i32, ())>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0005_function_type_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0005_function_type_params.rast
new file mode 100644
index 000000000..3858e3eed
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0005_function_type_params.rast
@@ -0,0 +1,38 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0005_function_type_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0005_function_type_params.rs
new file mode 100644
index 000000000..9df40ed39
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0005_function_type_params.rs
@@ -0,0 +1 @@
+fn foo<T: Clone + Copy>(){}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0006_self_param.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0006_self_param.rast
new file mode 100644
index 000000000..67e282363
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0006_self_param.rast
@@ -0,0 +1,128 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ NAME
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ AMP "&"
+ NAME
+ SELF_KW "self"
+ COMMA ","
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ COMMA ","
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "d"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "e"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0006_self_param.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0006_self_param.rs
new file mode 100644
index 000000000..80c0a43f5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0006_self_param.rs
@@ -0,0 +1,7 @@
+impl S {
+ fn a(self) {}
+ fn b(&self,) {}
+ fn c(&'a self,) {}
+ fn d(&'a mut self, x: i32) {}
+ fn e(mut self) {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0007_type_param_bounds.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0007_type_param_bounds.rast
new file mode 100644
index 000000000..dee860c24
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0007_type_param_bounds.rast
@@ -0,0 +1,53 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ QUESTION "?"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Sized"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ R_PAREN ")"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ TILDE "~"
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Drop"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0007_type_param_bounds.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0007_type_param_bounds.rs
new file mode 100644
index 000000000..5da3083b9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0007_type_param_bounds.rs
@@ -0,0 +1 @@
+struct S<T: 'a + ?Sized + (Copy) + ~const Drop>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0008_path_part.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0008_path_part.rast
new file mode 100644
index 000000000..4ccda19a8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0008_path_part.rast
@@ -0,0 +1,98 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ PATH_PAT
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Bar"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ PATH_PAT
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "Bar"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Bar"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Bar"
+ L_PAREN "("
+ REST_PAT
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0008_path_part.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0008_path_part.rs
new file mode 100644
index 000000000..f6e32c7c1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0008_path_part.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ let foo::Bar = ();
+ let ::Bar = ();
+ let Bar { .. } = ();
+ let Bar(..) = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0009_loop_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0009_loop_expr.rast
new file mode 100644
index 000000000..ab3b49b0d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0009_loop_expr.rast
@@ -0,0 +1,26 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0009_loop_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0009_loop_expr.rs
new file mode 100644
index 000000000..9f078fa48
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0009_loop_expr.rs
@@ -0,0 +1,3 @@
+fn foo() {
+ loop {};
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0010_extern_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0010_extern_block.rast
new file mode 100644
index 000000000..7a3cd6a0d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0010_extern_block.rast
@@ -0,0 +1,21 @@
+SOURCE_FILE
+ EXTERN_BLOCK
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ EXTERN_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ EXTERN_BLOCK
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ EXTERN_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0010_extern_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0010_extern_block.rs
new file mode 100644
index 000000000..bee5ac845
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0010_extern_block.rs
@@ -0,0 +1,2 @@
+unsafe extern "C" {}
+extern {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rast
new file mode 100644
index 000000000..8498724b9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rast
@@ -0,0 +1,60 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ FIELD_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ DOT "."
+ NAME_REF
+ IDENT "foo"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ FIELD_EXPR
+ FIELD_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ DOT "."
+ NAME_REF
+ INT_NUMBER "0"
+ DOT "."
+ NAME_REF
+ IDENT "bar"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ FIELD_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ DOT "."
+ NAME_REF
+ INT_NUMBER "0"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rs
new file mode 100644
index 000000000..b8da2ddc3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ x.foo;
+ x.0.bar;
+ x.0();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rast
new file mode 100644
index 000000000..31c87d1b3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rast
@@ -0,0 +1,33 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rs
new file mode 100644
index 000000000..2d30e8521
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rs
@@ -0,0 +1 @@
+type Foo = () where Foo: Copy;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0013_pointer_type_mut.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0013_pointer_type_mut.rast
new file mode 100644
index 000000000..bfe7ed5b4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0013_pointer_type_mut.rast
@@ -0,0 +1,35 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "M"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "C"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0013_pointer_type_mut.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0013_pointer_type_mut.rs
new file mode 100644
index 000000000..04b2bb9ba
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0013_pointer_type_mut.rs
@@ -0,0 +1,2 @@
+type M = *mut ();
+type C = *mut ();
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0014_never_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0014_never_type.rast
new file mode 100644
index 000000000..53dbf3999
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0014_never_type.rast
@@ -0,0 +1,13 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Never"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ NEVER_TYPE
+ BANG "!"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0014_never_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0014_never_type.rs
new file mode 100644
index 000000000..de399fcf4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0014_never_type.rs
@@ -0,0 +1 @@
+type Never = !;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0015_continue_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0015_continue_expr.rast
new file mode 100644
index 000000000..5d80a57a2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0015_continue_expr.rast
@@ -0,0 +1,38 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CONTINUE_EXPR
+ CONTINUE_KW "continue"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CONTINUE_EXPR
+ CONTINUE_KW "continue"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'l"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0015_continue_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0015_continue_expr.rs
new file mode 100644
index 000000000..474cc3f0e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0015_continue_expr.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ loop {
+ continue;
+ continue 'l;
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0017_array_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0017_array_type.rast
new file mode 100644
index 000000000..2a5c644d4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0017_array_type.rast
@@ -0,0 +1,21 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ ARRAY_TYPE
+ L_BRACK "["
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0017_array_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0017_array_type.rs
new file mode 100644
index 000000000..27eb22f22
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0017_array_type.rs
@@ -0,0 +1 @@
+type T = [(); 92];
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0018_arb_self_types.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0018_arb_self_types.rast
new file mode 100644
index 000000000..a0b562629
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0018_arb_self_types.rast
@@ -0,0 +1,76 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ NAME
+ SELF_KW "self"
+ COLON ":"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_TYPE_KW "Self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Box"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_TYPE_KW "Self"
+ R_ANGLE ">"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0018_arb_self_types.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0018_arb_self_types.rs
new file mode 100644
index 000000000..6a170d5ac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0018_arb_self_types.rs
@@ -0,0 +1,4 @@
+impl S {
+ fn a(self: &Self) {}
+ fn b(mut self: Box<Self>) {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0019_unary_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0019_unary_expr.rast
new file mode 100644
index 000000000..525b26745
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0019_unary_expr.rast
@@ -0,0 +1,45 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ PREFIX_EXPR
+ STAR "*"
+ PREFIX_EXPR
+ STAR "*"
+ REF_EXPR
+ AMP "&"
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ PREFIX_EXPR
+ BANG "!"
+ PREFIX_EXPR
+ BANG "!"
+ LITERAL
+ TRUE_KW "true"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ PREFIX_EXPR
+ MINUS "-"
+ PREFIX_EXPR
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0019_unary_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0019_unary_expr.rs
new file mode 100644
index 000000000..f1c3f7118
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0019_unary_expr.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ **&1;
+ !!true;
+ --1;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0021_assoc_item_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0021_assoc_item_list.rast
new file mode 100644
index 000000000..def7373c9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0021_assoc_item_list.rast
@@ -0,0 +1,81 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "F"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ CONST
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "B"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ AMP "&"
+ NAME
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0021_assoc_item_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0021_assoc_item_list.rs
new file mode 100644
index 000000000..f10851487
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0021_assoc_item_list.rs
@@ -0,0 +1,6 @@
+impl F {
+ type A = i32;
+ const B: i32 = 92;
+ fn foo() {}
+ fn bar(&self) {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0022_crate_visibility.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0022_crate_visibility.rast
new file mode 100644
index 000000000..8738292a9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0022_crate_visibility.rast
@@ -0,0 +1,49 @@
+SOURCE_FILE
+ STRUCT
+ VISIBILITY
+ PUB_KW "pub"
+ L_PAREN "("
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ R_PAREN ")"
+ WHITESPACE " "
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ VISIBILITY
+ PUB_KW "pub"
+ L_PAREN "("
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ VISIBILITY
+ PUB_KW "pub"
+ L_PAREN "("
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SUPER_KW "super"
+ R_PAREN ")"
+ WHITESPACE " "
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0022_crate_visibility.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0022_crate_visibility.rs
new file mode 100644
index 000000000..a790a485f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0022_crate_visibility.rs
@@ -0,0 +1,3 @@
+pub(crate) struct S;
+pub(self) struct S;
+pub(super) struct S;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0023_placeholder_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0023_placeholder_type.rast
new file mode 100644
index 000000000..d9db1c34b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0023_placeholder_type.rast
@@ -0,0 +1,13 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Placeholder"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ INFER_TYPE
+ UNDERSCORE "_"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0023_placeholder_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0023_placeholder_type.rs
new file mode 100644
index 000000000..7952dbd57
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0023_placeholder_type.rs
@@ -0,0 +1 @@
+type Placeholder = _;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rast
new file mode 100644
index 000000000..235a9d7f4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rast
@@ -0,0 +1,42 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ SLICE_PAT
+ L_BRACK "["
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "b"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ ARRAY_EXPR
+ L_BRACK "["
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rs
new file mode 100644
index 000000000..7955973b9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rs
@@ -0,0 +1,3 @@
+fn main() {
+ let [a, b, ..] = [];
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0025_slice_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0025_slice_type.rast
new file mode 100644
index 000000000..0bcb31524
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0025_slice_type.rast
@@ -0,0 +1,17 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ SLICE_TYPE
+ L_BRACK "["
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0025_slice_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0025_slice_type.rs
new file mode 100644
index 000000000..4da1af827
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0025_slice_type.rs
@@ -0,0 +1 @@
+type T = [()];
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rast
new file mode 100644
index 000000000..3cdaf32b5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rast
@@ -0,0 +1,105 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ L_PAREN "("
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ L_PAREN "("
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COMMA ","
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ L_PAREN "("
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ WHITESPACE " "
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rs
new file mode 100644
index 000000000..0dfe63629
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ let S() = ();
+ let S(_) = ();
+ let S(_,) = ();
+ let S(_, .. , x) = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0027_ref_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0027_ref_pat.rast
new file mode 100644
index 000000000..4516fd011
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0027_ref_pat.rast
@@ -0,0 +1,50 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ REF_PAT
+ AMP "&"
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ REF_PAT
+ AMP "&"
+ MUT_KW "mut"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "b"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0027_ref_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0027_ref_pat.rs
new file mode 100644
index 000000000..de41f5cae
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0027_ref_pat.rs
@@ -0,0 +1,4 @@
+fn main() {
+ let &a = ();
+ let &mut b = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0028_impl_trait_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0028_impl_trait_type.rast
new file mode 100644
index 000000000..c7478da94
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0028_impl_trait_type.rast
@@ -0,0 +1,45 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ IMPL_TRAIT_TYPE
+ IMPL_KW "impl"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Iterator"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "Item"
+ EQ "="
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0028_impl_trait_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0028_impl_trait_type.rs
new file mode 100644
index 000000000..54c5a7c46
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0028_impl_trait_type.rs
@@ -0,0 +1 @@
+type A = impl Iterator<Item=Foo<'a>> + 'a;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0029_cast_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0029_cast_expr.rast
new file mode 100644
index 000000000..d53dde538
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0029_cast_expr.rast
@@ -0,0 +1,90 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CAST_EXPR
+ LITERAL
+ INT_NUMBER "82"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ CAST_EXPR
+ LITERAL
+ INT_NUMBER "81"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i8"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ CAST_EXPR
+ LITERAL
+ INT_NUMBER "79"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i16"
+ WHITESPACE " "
+ MINUS "-"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ CAST_EXPR
+ LITERAL
+ INT_NUMBER "0x36"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ WHITESPACE " "
+ LTEQ "<="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0x37"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0029_cast_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0029_cast_expr.rs
new file mode 100644
index 000000000..bfe8e4b36
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0029_cast_expr.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ 82 as i32;
+ 81 as i8 + 1;
+ 79 as i16 - 1;
+ 0x36 as u8 <= 0x37;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0030_let_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0030_let_expr.rast
new file mode 100644
index 000000000..dcffcb1ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0030_let_expr.rast
@@ -0,0 +1,90 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ BIN_EXPR
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "None"
+ WHITESPACE " "
+ AMP2 "&&"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ BIN_EXPR
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "5"
+ WHITESPACE " "
+ AMP2 "&&"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "None"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "None"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0030_let_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0030_let_expr.rs
new file mode 100644
index 000000000..0131d5e33
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0030_let_expr.rs
@@ -0,0 +1,4 @@
+fn foo() {
+ if let Some(_) = None && true {}
+ while 1 == 5 && (let None = None) {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0031_while_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0031_while_expr.rast
new file mode 100644
index 000000000..16c522414
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0031_while_expr.rast
@@ -0,0 +1,87 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ METHOD_CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "it"
+ DOT "."
+ NAME_REF
+ IDENT "next"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0031_while_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0031_while_expr.rs
new file mode 100644
index 000000000..2f8188160
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0031_while_expr.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ while true {};
+ while let Some(x) = it.next() {};
+ while { true } {};
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0032_fn_pointer_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0032_fn_pointer_type.rast
new file mode 100644
index 000000000..608b0be16
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0032_fn_pointer_type.rast
@@ -0,0 +1,98 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "B"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FN_PTR_TYPE
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "C"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FN_PTR_TYPE
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "D"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FN_PTR_TYPE
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ PARAM_LIST
+ L_PAREN "("
+ WHITESPACE " "
+ PARAM
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ WHITESPACE " "
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ DOT3 "..."
+ WHITESPACE " "
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0032_fn_pointer_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0032_fn_pointer_type.rs
new file mode 100644
index 000000000..9493da83d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0032_fn_pointer_type.rs
@@ -0,0 +1,4 @@
+type A = fn();
+type B = unsafe fn();
+type C = unsafe extern "C" fn();
+type D = extern "C" fn ( u8 , ... ) -> u8;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0033_reference_type;.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0033_reference_type;.rast
new file mode 100644
index 000000000..b5c9d7a8d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0033_reference_type;.rast
@@ -0,0 +1,51 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "B"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'static"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "C"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ MUT_KW "mut"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0033_reference_type;.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0033_reference_type;.rs
new file mode 100644
index 000000000..3ac0badab
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0033_reference_type;.rs
@@ -0,0 +1,3 @@
+type A = &();
+type B = &'static ();
+type C = &mut ();
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0034_break_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0034_break_expr.rast
new file mode 100644
index 000000000..06c053d0f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0034_break_expr.rast
@@ -0,0 +1,57 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'l"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'l"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0034_break_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0034_break_expr.rs
new file mode 100644
index 000000000..1b4094636
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0034_break_expr.rs
@@ -0,0 +1,8 @@
+fn foo() {
+ loop {
+ break;
+ break 'l;
+ break 92;
+ break 'l 92;
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0037_qual_paths.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0037_qual_paths.rast
new file mode 100644
index 000000000..8c66cfe59
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0037_qual_paths.rast
@@ -0,0 +1,79 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "X"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "B"
+ R_ANGLE ">"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Output"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Default"
+ R_ANGLE ">"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "default"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0037_qual_paths.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0037_qual_paths.rs
new file mode 100644
index 000000000..d140692e2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0037_qual_paths.rs
@@ -0,0 +1,2 @@
+type X = <A as B>::Output;
+fn foo() { <usize as Default>::default(); }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0038_full_range_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0038_full_range_expr.rast
new file mode 100644
index 000000000..9ffc07630
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0038_full_range_expr.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ INDEX_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "xs"
+ L_BRACK "["
+ RANGE_EXPR
+ DOT2 ".."
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0038_full_range_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0038_full_range_expr.rs
new file mode 100644
index 000000000..ae21ad94c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0038_full_range_expr.rs
@@ -0,0 +1 @@
+fn foo() { xs[..]; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rast
new file mode 100644
index 000000000..07b0210e4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rast
@@ -0,0 +1,63 @@
+SOURCE_FILE
+ FN
+ VISIBILITY
+ CRATE_KW "crate"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_FIELD
+ VISIBILITY
+ CRATE_KW "crate"
+ WHITESPACE " "
+ NAME
+ IDENT "field"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ VISIBILITY
+ CRATE_KW "crate"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rs
new file mode 100644
index 000000000..e2b5f2161
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rs
@@ -0,0 +1,3 @@
+crate fn main() { }
+struct S { crate field: u32 }
+struct T(crate u32);
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0041_trait_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0041_trait_item.rast
new file mode 100644
index 000000000..dd7f76eb9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0041_trait_item.rast
@@ -0,0 +1,31 @@
+SOURCE_FILE
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "new"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_TYPE_KW "Self"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0041_trait_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0041_trait_item.rs
new file mode 100644
index 000000000..dcd9a7114
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0041_trait_item.rs
@@ -0,0 +1 @@
+trait T { fn new() -> Self; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0042_call_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0042_call_expr.rast
new file mode 100644
index 000000000..19cc8d5ac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0042_call_expr.rast
@@ -0,0 +1,148 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ CALL_EXPR
+ CALL_EXPR
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ COMMA ","
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f"
+ ARG_LIST
+ L_PAREN "("
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ R_ANGLE ">"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "func"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f"
+ ARG_LIST
+ L_PAREN "("
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait"
+ R_ANGLE ">"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "func"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0042_call_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0042_call_expr.rs
new file mode 100644
index 000000000..ffbf46d6d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0042_call_expr.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ let _ = f();
+ let _ = f()(1)(1, 2,);
+ let _ = f(<Foo>::func());
+ f(<Foo as Trait>::func());
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0044_block_items.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0044_block_items.rast
new file mode 100644
index 000000000..2e4b515ca
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0044_block_items.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0044_block_items.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0044_block_items.rs
new file mode 100644
index 000000000..d9868718c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0044_block_items.rs
@@ -0,0 +1 @@
+fn a() { fn b() {} }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0045_param_list_opt_patterns.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0045_param_list_opt_patterns.rast
new file mode 100644
index 000000000..e9d93a0d0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0045_param_list_opt_patterns.rast
@@ -0,0 +1,48 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "F"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "FnMut"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_TYPE
+ AMP "&"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ R_PAREN ")"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0045_param_list_opt_patterns.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0045_param_list_opt_patterns.rs
new file mode 100644
index 000000000..9b93442c0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0045_param_list_opt_patterns.rs
@@ -0,0 +1 @@
+fn foo<F: FnMut(&mut Foo<'a>)>(){}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0046_singleton_tuple_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0046_singleton_tuple_type.rast
new file mode 100644
index 000000000..0129955d1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0046_singleton_tuple_type.rast
@@ -0,0 +1,20 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0046_singleton_tuple_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0046_singleton_tuple_type.rs
new file mode 100644
index 000000000..cb66bad24
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0046_singleton_tuple_type.rs
@@ -0,0 +1 @@
+type T = (i32,);
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0048_path_type_with_bounds.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0048_path_type_with_bounds.rast
new file mode 100644
index 000000000..a059e124a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0048_path_type_with_bounds.rast
@@ -0,0 +1,85 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Box"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ DYN_TRAIT_TYPE
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'f"
+ R_ANGLE ">"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Box"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ DYN_TRAIT_TYPE
+ DYN_KW "dyn"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'f"
+ R_ANGLE ">"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0048_path_type_with_bounds.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0048_path_type_with_bounds.rs
new file mode 100644
index 000000000..4bb0f63b7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0048_path_type_with_bounds.rs
@@ -0,0 +1,2 @@
+fn foo() -> Box<T + 'f> {}
+fn foo() -> Box<dyn T + 'f> {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0050_fn_decl.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0050_fn_decl.rast
new file mode 100644
index 000000000..f7fac807f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0050_fn_decl.rast
@@ -0,0 +1,22 @@
+SOURCE_FILE
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0050_fn_decl.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0050_fn_decl.rs
new file mode 100644
index 000000000..c9f74f7f5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0050_fn_decl.rs
@@ -0,0 +1 @@
+trait T { fn foo(); }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0051_unit_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0051_unit_type.rast
new file mode 100644
index 000000000..b3df31535
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0051_unit_type.rast
@@ -0,0 +1,14 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0051_unit_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0051_unit_type.rs
new file mode 100644
index 000000000..c039cf7d3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0051_unit_type.rs
@@ -0,0 +1 @@
+type T = ();
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0052_path_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0052_path_type.rast
new file mode 100644
index 000000000..d498d3721
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0052_path_type.rast
@@ -0,0 +1,72 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "B"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "Foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "C"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_KW "self"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "D"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SUPER_KW "super"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0052_path_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0052_path_type.rs
new file mode 100644
index 000000000..bf94f32e1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0052_path_type.rs
@@ -0,0 +1,4 @@
+type A = Foo;
+type B = ::Foo;
+type C = self::Foo;
+type D = super::Foo;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0053_path_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0053_path_expr.rast
new file mode 100644
index 000000000..48e123ab1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0053_path_expr.rast
@@ -0,0 +1,97 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "a"
+ GENERIC_ARG_LIST
+ COLON2 "::"
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "format"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0053_path_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0053_path_expr.rs
new file mode 100644
index 000000000..333ebabef
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0053_path_expr.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ let _ = a;
+ let _ = a::b;
+ let _ = ::a::<b>;
+ let _ = format!();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0054_record_field_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0054_record_field_attrs.rast
new file mode 100644
index 000000000..639ee0eb7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0054_record_field_attrs.rast
@@ -0,0 +1,33 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_FIELD
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f32"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0054_record_field_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0054_record_field_attrs.rs
new file mode 100644
index 000000000..d7f0b4382
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0054_record_field_attrs.rs
@@ -0,0 +1 @@
+struct S { #[attr] f: f32 }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0055_literal_pattern.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0055_literal_pattern.rast
new file mode 100644
index 000000000..c83ea7ade
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0055_literal_pattern.rast
@@ -0,0 +1,77 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ LITERAL_PAT
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "92"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ LITERAL_PAT
+ LITERAL
+ CHAR "'c'"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ LITERAL_PAT
+ LITERAL
+ STRING "\"hello\""
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0055_literal_pattern.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0055_literal_pattern.rs
new file mode 100644
index 000000000..6dfd67b4c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0055_literal_pattern.rs
@@ -0,0 +1,8 @@
+fn main() {
+ match () {
+ -1 => (),
+ 92 => (),
+ 'c' => (),
+ "hello" => (),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0056_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0056_where_clause.rast
new file mode 100644
index 000000000..a3cbe457e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0056_where_clause.rast
@@ -0,0 +1,117 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'c"
+ COMMA ","
+ WHITESPACE "\n "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'static"
+ COMMA ","
+ WHITESPACE "\n "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Iterator"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Item"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COMMA ","
+ WHITESPACE "\n "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Iterator"
+ R_ANGLE ">"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Item"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0056_where_clause.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0056_where_clause.rs
new file mode 100644
index 000000000..19d7e571b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0056_where_clause.rs
@@ -0,0 +1,7 @@
+fn foo()
+where
+ 'a: 'b + 'c,
+ T: Clone + Copy + 'static,
+ Iterator::Item: 'a,
+ <T as Iterator>::Item: 'a
+{}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast
new file mode 100644
index 000000000..44c967e8d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast
@@ -0,0 +1,251 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ RANGE_PAT
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "0"
+ WHITESPACE " "
+ DOT3 "..."
+ WHITESPACE " "
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "100"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ RANGE_PAT
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "101"
+ WHITESPACE " "
+ DOT2EQ "..="
+ WHITESPACE " "
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "200"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ RANGE_PAT
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "200"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "301"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ RANGE_PAT
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "302"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ CAST_EXPR
+ LITERAL
+ INT_NUMBER "10"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ OR_PAT
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "0"
+ R_PAREN ")"
+ WHITESPACE " "
+ PIPE "|"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "None"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ RANGE_PAT
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "1"
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ CAST_EXPR
+ LITERAL
+ INT_NUMBER "10"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ COMMA ","
+ WHITESPACE " "
+ CAST_EXPR
+ LITERAL
+ INT_NUMBER "5"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ TUPLE_PAT
+ L_PAREN "("
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "0"
+ COMMA ","
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ TUPLE_PAT
+ L_PAREN "("
+ RANGE_PAT
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "1"
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs
new file mode 100644
index 000000000..6c586a895
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs
@@ -0,0 +1,18 @@
+fn main() {
+ match 92 {
+ 0 ... 100 => (),
+ 101 ..= 200 => (),
+ 200 .. 301 => (),
+ 302 .. => (),
+ }
+
+ match Some(10 as u8) {
+ Some(0) | None => (),
+ Some(1..) => ()
+ }
+
+ match (10 as u8, 5 as u8) {
+ (0, _) => (),
+ (1.., _) => ()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0059_match_arms_commas.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0059_match_arms_commas.rast
new file mode 100644
index 000000000..94897c2d2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0059_match_arms_commas.rast
@@ -0,0 +1,60 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0059_match_arms_commas.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0059_match_arms_commas.rs
new file mode 100644
index 000000000..1f25d577a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0059_match_arms_commas.rs
@@ -0,0 +1,7 @@
+fn foo() {
+ match () {
+ _ => (),
+ _ => {}
+ _ => ()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0060_extern_crate.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0060_extern_crate.rast
new file mode 100644
index 000000000..0a660957d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0060_extern_crate.rast
@@ -0,0 +1,10 @@
+SOURCE_FILE
+ EXTERN_CRATE
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ CRATE_KW "crate"
+ WHITESPACE " "
+ NAME_REF
+ IDENT "foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0060_extern_crate.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0060_extern_crate.rs
new file mode 100644
index 000000000..49af74e1b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0060_extern_crate.rs
@@ -0,0 +1 @@
+extern crate foo;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0061_record_lit.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0061_record_lit.rast
new file mode 100644
index 000000000..9997d0ae3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0061_record_lit.rast
@@ -0,0 +1,125 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "y"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "32"
+ COMMA ","
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "y"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "32"
+ COMMA ","
+ WHITESPACE " "
+ DOT2 ".."
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Default"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "default"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "TupleStruct"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ NAME_REF
+ INT_NUMBER "0"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0061_record_lit.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0061_record_lit.rs
new file mode 100644
index 000000000..6285e5549
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0061_record_lit.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ S {};
+ S { x, y: 32, };
+ S { x, y: 32, ..Default::default() };
+ TupleStruct { 0: 1 };
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0062_mod_contents.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0062_mod_contents.rast
new file mode 100644
index 000000000..5f60e03d4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0062_mod_contents.rast
@@ -0,0 +1,65 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ MACRO_RULES
+ MACRO_RULES_KW "macro_rules"
+ BANG "!"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ MACRO_CALL
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ MACRO_CALL
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SUPER_KW "super"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "baz"
+ BANG "!"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0062_mod_contents.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0062_mod_contents.rs
new file mode 100644
index 000000000..24a15c5c5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0062_mod_contents.rs
@@ -0,0 +1,5 @@
+fn foo() {}
+macro_rules! foo {}
+foo::bar!();
+super::baz! {}
+struct S;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0063_impl_item_neg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0063_impl_item_neg.rast
new file mode 100644
index 000000000..805052fbc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0063_impl_item_neg.rast
@@ -0,0 +1,23 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ BANG "!"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Send"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0063_impl_item_neg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0063_impl_item_neg.rs
new file mode 100644
index 000000000..a7bd4b048
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0063_impl_item_neg.rs
@@ -0,0 +1 @@
+impl !Send for S {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0064_if_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0064_if_expr.rast
new file mode 100644
index 000000000..e2e964e44
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0064_if_expr.rast
@@ -0,0 +1,126 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ ELSE_KW "else"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ ELSE_KW "else"
+ WHITESPACE " "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ LITERAL
+ FALSE_KW "false"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ ELSE_KW "else"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ ELSE_KW "else"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0064_if_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0064_if_expr.rs
new file mode 100644
index 000000000..40f227ba3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0064_if_expr.rs
@@ -0,0 +1,7 @@
+fn foo() {
+ if true {};
+ if true {} else {};
+ if true {} else if false {} else {};
+ if S {};
+ if { true } { } else { };
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0065_dyn_trait_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0065_dyn_trait_type.rast
new file mode 100644
index 000000000..e37d43aac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0065_dyn_trait_type.rast
@@ -0,0 +1,45 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ DYN_TRAIT_TYPE
+ DYN_KW "dyn"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Iterator"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "Item"
+ EQ "="
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0065_dyn_trait_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0065_dyn_trait_type.rs
new file mode 100644
index 000000000..c3ecabb99
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0065_dyn_trait_type.rs
@@ -0,0 +1 @@
+type A = dyn Iterator<Item=Foo<'a>> + 'a;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0066_match_arm.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0066_match_arm.rast
new file mode 100644
index 000000000..8189cf0a8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0066_match_arm.rast
@@ -0,0 +1,152 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ MATCH_GUARD
+ IF_KW "if"
+ WHITESPACE " "
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Test"
+ WHITESPACE " "
+ R_ANGLE ">"
+ WHITESPACE " "
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Test"
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "field"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ R_CURLY "}"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ OR_PAT
+ IDENT_PAT
+ NAME
+ IDENT "X"
+ WHITESPACE " "
+ PIPE "|"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "Y"
+ WHITESPACE " "
+ MATCH_GUARD
+ IF_KW "if"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Z"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ PIPE "|"
+ WHITESPACE " "
+ OR_PAT
+ IDENT_PAT
+ NAME
+ IDENT "X"
+ WHITESPACE " "
+ PIPE "|"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "Y"
+ WHITESPACE " "
+ MATCH_GUARD
+ IF_KW "if"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Z"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ PIPE "|"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "X"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0066_match_arm.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0066_match_arm.rs
new file mode 100644
index 000000000..9e009e24f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0066_match_arm.rs
@@ -0,0 +1,9 @@
+fn foo() {
+ match () {
+ _ => (),
+ _ if Test > Test{field: 0} => (),
+ X | Y if Z => (),
+ | X | Y if Z => (),
+ | X => (),
+ };
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0067_crate_path.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0067_crate_path.rast
new file mode 100644
index 000000000..f71367ae1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0067_crate_path.rast
@@ -0,0 +1,16 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0067_crate_path.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0067_crate_path.rs
new file mode 100644
index 000000000..1bbb5930b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0067_crate_path.rs
@@ -0,0 +1 @@
+use crate::foo;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rast
new file mode 100644
index 000000000..e387e14d1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rast
@@ -0,0 +1,53 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BIN_EXPR
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ LITERAL
+ INT_NUMBER "1"
+ R_CURLY "}"
+ WHITESPACE " "
+ AMP "&"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ LITERAL
+ INT_NUMBER "1"
+ R_CURLY "}"
+ WHITESPACE " "
+ EXPR_STMT
+ REF_EXPR
+ AMP "&"
+ LITERAL
+ INT_NUMBER "2"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rs
new file mode 100644
index 000000000..7e8bd87bf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0070_stmt_bin_expr_ambiguity.rs
@@ -0,0 +1,4 @@
+fn f() {
+ let _ = {1} & 2;
+ {1} &2;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0071_match_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0071_match_expr.rast
new file mode 100644
index 000000000..0d6cd390e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0071_match_expr.rast
@@ -0,0 +1,96 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0071_match_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0071_match_expr.rs
new file mode 100644
index 000000000..c4021dc10
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0071_match_expr.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ match () { };
+ match S {};
+ match { } { _ => () };
+ match { S {} } {};
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0072_return_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0072_return_expr.rast
new file mode 100644
index 000000000..62cff1220
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0072_return_expr.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RETURN_EXPR
+ RETURN_KW "return"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0072_return_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0072_return_expr.rs
new file mode 100644
index 000000000..5733666b6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0072_return_expr.rs
@@ -0,0 +1,4 @@
+fn foo() {
+ return;
+ return 92;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0073_type_item_type_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0073_type_item_type_params.rast
new file mode 100644
index 000000000..60ac3b3c4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0073_type_item_type_params.rast
@@ -0,0 +1,20 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Result"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0073_type_item_type_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0073_type_item_type_params.rs
new file mode 100644
index 000000000..defd110c4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0073_type_item_type_params.rs
@@ -0,0 +1 @@
+type Result<T> = ();
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rast
new file mode 100644
index 000000000..950421feb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rast
@@ -0,0 +1,63 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ MATCH_ARM
+ TUPLE_PAT
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ MATCH_ARM
+ SLICE_PAT
+ L_BRACK "["
+ R_BRACK "]"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rs
new file mode 100644
index 000000000..2edd578f9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0074_stmt_postfix_expr_ambiguity.rs
@@ -0,0 +1,7 @@
+fn foo() {
+ match () {
+ _ => {}
+ () => {}
+ [] => {}
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0075_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0075_block.rast
new file mode 100644
index 000000000..a23364d15
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0075_block.rast
@@ -0,0 +1,90 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE " "
+ EXPR_STMT
+ LITERAL
+ INT_NUMBER "2"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "d"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0075_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0075_block.rs
new file mode 100644
index 000000000..81f44c533
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0075_block.rs
@@ -0,0 +1,4 @@
+fn a() {}
+fn b() { let _ = 1; }
+fn c() { 1; 2; }
+fn d() { 1; 2 }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0076_function_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0076_function_where_clause.rast
new file mode 100644
index 000000000..a000d7e59
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0076_function_where_clause.rast
@@ -0,0 +1,40 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0076_function_where_clause.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0076_function_where_clause.rs
new file mode 100644
index 000000000..f0920b2a8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0076_function_where_clause.rs
@@ -0,0 +1 @@
+fn foo<T>() where T: Copy {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0077_try_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0077_try_expr.rast
new file mode 100644
index 000000000..c3aa8c15d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0077_try_expr.rast
@@ -0,0 +1,26 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ TRY_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ QUESTION "?"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0077_try_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0077_try_expr.rs
new file mode 100644
index 000000000..8b74f7bc8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0077_try_expr.rs
@@ -0,0 +1,3 @@
+fn foo() {
+ x?;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0078_type_alias.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0078_type_alias.rast
new file mode 100644
index 000000000..c5da79974
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0078_type_alias.rast
@@ -0,0 +1,16 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Bar"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0078_type_alias.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0078_type_alias.rs
new file mode 100644
index 000000000..04c0344fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0078_type_alias.rs
@@ -0,0 +1 @@
+type Foo = Bar;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0079_impl_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0079_impl_item.rast
new file mode 100644
index 000000000..879676309
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0079_impl_item.rast
@@ -0,0 +1,14 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0079_impl_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0079_impl_item.rs
new file mode 100644
index 000000000..647799d7c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0079_impl_item.rs
@@ -0,0 +1 @@
+impl S {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0080_postfix_range.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0080_postfix_range.rast
new file mode 100644
index 000000000..3a59cf7b8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0080_postfix_range.rast
@@ -0,0 +1,96 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RANGE_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ DOT2 ".."
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ RANGE_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ DOT2 ".."
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ RANGE_EXPR
+ METHOD_CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ DOT "."
+ NAME_REF
+ IDENT "b"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ DOT2 ".."
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0080_postfix_range.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0080_postfix_range.rs
new file mode 100644
index 000000000..e7b7cfc6b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0080_postfix_range.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ let x = 1..;
+ match 1.. { _ => () };
+ match a.b()..S { _ => () };
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0081_for_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0081_for_type.rast
new file mode 100644
index 000000000..7600457a9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0081_for_type.rast
@@ -0,0 +1,117 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "B"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ FN_PTR_TYPE
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Obj"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "PartialEq"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0081_for_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0081_for_type.rs
new file mode 100644
index 000000000..8ac7b9e10
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0081_for_type.rs
@@ -0,0 +1,3 @@
+type A = for<'a> fn() -> ();
+type B = for<'a> unsafe extern "C" fn(&'a ()) -> ();
+type Obj = for<'a> PartialEq<&'a i32>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0082_ref_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0082_ref_expr.rast
new file mode 100644
index 000000000..108b0802c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0082_ref_expr.rast
@@ -0,0 +1,139 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ COMMENT "// reference operator"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ MUT_KW "mut"
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "raw"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ FIELD_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "raw"
+ DOT "."
+ NAME_REF
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ COMMENT "// raw reference operator"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ RAW_KW "raw"
+ WHITESPACE " "
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ RAW_KW "raw"
+ WHITESPACE " "
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0082_ref_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0082_ref_expr.rs
new file mode 100644
index 000000000..c5262f446
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0082_ref_expr.rs
@@ -0,0 +1,10 @@
+fn foo() {
+ // reference operator
+ let _ = &1;
+ let _ = &mut &f();
+ let _ = &raw;
+ let _ = &raw.0;
+ // raw reference operator
+ let _ = &raw mut foo;
+ let _ = &raw const foo;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0083_struct_items.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0083_struct_items.rast
new file mode 100644
index 000000000..cdbc40fe0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0083_struct_items.rast
@@ -0,0 +1,87 @@
+SOURCE_FILE@0..106
+ STRUCT@0..11
+ STRUCT_KW@0..6 "struct"
+ WHITESPACE@6..7 " "
+ NAME@7..10
+ IDENT@7..10 "Foo"
+ SEMICOLON@10..11 ";"
+ WHITESPACE@11..12 "\n"
+ STRUCT@12..25
+ STRUCT_KW@12..18 "struct"
+ WHITESPACE@18..19 " "
+ NAME@19..22
+ IDENT@19..22 "Foo"
+ WHITESPACE@22..23 " "
+ RECORD_FIELD_LIST@23..25
+ L_CURLY@23..24 "{"
+ R_CURLY@24..25 "}"
+ WHITESPACE@25..26 "\n"
+ STRUCT@26..39
+ STRUCT_KW@26..32 "struct"
+ WHITESPACE@32..33 " "
+ NAME@33..36
+ IDENT@33..36 "Foo"
+ TUPLE_FIELD_LIST@36..38
+ L_PAREN@36..37 "("
+ R_PAREN@37..38 ")"
+ SEMICOLON@38..39 ";"
+ WHITESPACE@39..40 "\n"
+ STRUCT@40..66
+ STRUCT_KW@40..46 "struct"
+ WHITESPACE@46..47 " "
+ NAME@47..50
+ IDENT@47..50 "Foo"
+ TUPLE_FIELD_LIST@50..65
+ L_PAREN@50..51 "("
+ TUPLE_FIELD@51..57
+ PATH_TYPE@51..57
+ PATH@51..57
+ PATH_SEGMENT@51..57
+ NAME_REF@51..57
+ IDENT@51..57 "String"
+ COMMA@57..58 ","
+ WHITESPACE@58..59 " "
+ TUPLE_FIELD@59..64
+ PATH_TYPE@59..64
+ PATH@59..64
+ PATH_SEGMENT@59..64
+ NAME_REF@59..64
+ IDENT@59..64 "usize"
+ R_PAREN@64..65 ")"
+ SEMICOLON@65..66 ";"
+ WHITESPACE@66..67 "\n"
+ STRUCT@67..105
+ STRUCT_KW@67..73 "struct"
+ WHITESPACE@73..74 " "
+ NAME@74..77
+ IDENT@74..77 "Foo"
+ WHITESPACE@77..78 " "
+ RECORD_FIELD_LIST@78..105
+ L_CURLY@78..79 "{"
+ WHITESPACE@79..84 "\n "
+ RECORD_FIELD@84..90
+ NAME@84..85
+ IDENT@84..85 "a"
+ COLON@85..86 ":"
+ WHITESPACE@86..87 " "
+ PATH_TYPE@87..90
+ PATH@87..90
+ PATH_SEGMENT@87..90
+ NAME_REF@87..90
+ IDENT@87..90 "i32"
+ COMMA@90..91 ","
+ WHITESPACE@91..96 "\n "
+ RECORD_FIELD@96..102
+ NAME@96..97
+ IDENT@96..97 "b"
+ COLON@97..98 ":"
+ WHITESPACE@98..99 " "
+ PATH_TYPE@99..102
+ PATH@99..102
+ PATH_SEGMENT@99..102
+ NAME_REF@99..102
+ IDENT@99..102 "f32"
+ COMMA@102..103 ","
+ WHITESPACE@103..104 "\n"
+ R_CURLY@104..105 "}"
+ WHITESPACE@105..106 "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0084_paren_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0084_paren_type.rast
new file mode 100644
index 000000000..29995bb75
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0084_paren_type.rast
@@ -0,0 +1,19 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PAREN_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0084_paren_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0084_paren_type.rs
new file mode 100644
index 000000000..6e1b25101
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0084_paren_type.rs
@@ -0,0 +1 @@
+type T = (i32);
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rast
new file mode 100644
index 000000000..403c265ea
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rast
@@ -0,0 +1,136 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ FALSE_KW "false"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ FLOAT_NUMBER "2.0"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ BYTE "b'a'"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ CHAR "'b'"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ STRING "\"c\""
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ STRING "r\"d\""
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ BYTE_STRING "b\"e\""
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ BYTE_STRING "br\"f\""
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rs
new file mode 100644
index 000000000..2e11a5a6e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rs
@@ -0,0 +1,12 @@
+fn foo() {
+ let _ = true;
+ let _ = false;
+ let _ = 1;
+ let _ = 2.0;
+ let _ = b'a';
+ let _ = 'b';
+ let _ = "c";
+ let _ = r"d";
+ let _ = b"e";
+ let _ = br"f";
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0086_function_ret_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0086_function_ret_type.rast
new file mode 100644
index 000000000..6687c843f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0086_function_ret_type.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0086_function_ret_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0086_function_ret_type.rs
new file mode 100644
index 000000000..d22d8cada
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0086_function_ret_type.rs
@@ -0,0 +1,2 @@
+fn foo() {}
+fn bar() -> () {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.rast
new file mode 100644
index 000000000..cbf5e84e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.rast
@@ -0,0 +1,67 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ FOR_EXPR
+ FOR_KW "for"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "i"
+ WHITESPACE " "
+ IN_KW "in"
+ WHITESPACE " "
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.rs
new file mode 100644
index 000000000..560eb05b9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.rs
@@ -0,0 +1,6 @@
+fn foo(){
+ if break {}
+ while break {}
+ for i in break {}
+ match break {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0090_type_param_default.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0090_type_param_default.rast
new file mode 100644
index 000000000..cf7236f62
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0090_type_param_default.rast
@@ -0,0 +1,22 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0090_type_param_default.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0090_type_param_default.rs
new file mode 100644
index 000000000..540eacb02
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0090_type_param_default.rs
@@ -0,0 +1 @@
+struct S<T = i32>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rast
new file mode 100644
index 000000000..372c867ae
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rast
@@ -0,0 +1,23 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "F"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rs
new file mode 100644
index 000000000..e3ba5e87f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0092_fn_pointer_type_with_ret.rs
@@ -0,0 +1 @@
+type F = fn() -> ();
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0093_index_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0093_index_expr.rast
new file mode 100644
index 000000000..6969259fc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0093_index_expr.rast
@@ -0,0 +1,34 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ INDEX_EXPR
+ INDEX_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "1"
+ R_BRACK "]"
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "2"
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0093_index_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0093_index_expr.rs
new file mode 100644
index 000000000..b9ba78a6c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0093_index_expr.rs
@@ -0,0 +1,3 @@
+fn foo() {
+ x[1][2];
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0095_placeholder_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0095_placeholder_pat.rast
new file mode 100644
index 000000000..d39c3df2b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0095_placeholder_pat.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0095_placeholder_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0095_placeholder_pat.rs
new file mode 100644
index 000000000..4d719c433
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0095_placeholder_pat.rs
@@ -0,0 +1 @@
+fn main() { let _ = (); }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.rast
new file mode 100644
index 000000000..f89cc15e7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.rast
@@ -0,0 +1,125 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ FOR_EXPR
+ FOR_KW "for"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ IN_KW "in"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ MACRO_RULES
+ MACRO_RULES_KW "macro_rules"
+ BANG "!"
+ WHITESPACE " "
+ NAME
+ IDENT "test"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE "\n "
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ R_ANGLE ">"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "test"
+ BANG "!"
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.rs
new file mode 100644
index 000000000..4919665cb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.rs
@@ -0,0 +1,13 @@
+fn foo() {
+ if true {}
+ loop {}
+ match () {}
+ while true {}
+ for _ in () {}
+ {}
+ {}
+ macro_rules! test {
+ () => {}
+ }
+ test!{}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0099_param_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0099_param_list.rast
new file mode 100644
index 000000000..d240a52f6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0099_param_list.rast
@@ -0,0 +1,103 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "d"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "y"
+ COLON ":"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0099_param_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0099_param_list.rs
new file mode 100644
index 000000000..9d55bedbb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0099_param_list.rs
@@ -0,0 +1,4 @@
+fn a() {}
+fn b(x: i32) {}
+fn c(x: i32, ) {}
+fn d(x: i32, y: ()) {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0100_for_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0100_for_expr.rast
new file mode 100644
index 000000000..6bc3c0fb0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0100_for_expr.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ FOR_EXPR
+ FOR_KW "for"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ IN_KW "in"
+ WHITESPACE " "
+ ARRAY_EXPR
+ L_BRACK "["
+ R_BRACK "]"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0100_for_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0100_for_expr.rs
new file mode 100644
index 000000000..972197d2a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0100_for_expr.rs
@@ -0,0 +1,3 @@
+fn foo() {
+ for x in [] {};
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0102_record_pat_field_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0102_record_pat_field_list.rast
new file mode 100644
index 000000000..f69ae1d64
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0102_record_pat_field_list.rast
@@ -0,0 +1,175 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ IDENT_PAT
+ NAME
+ IDENT "f"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ IDENT_PAT
+ REF_KW "ref"
+ WHITESPACE " "
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "g"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ NAME_REF
+ IDENT "h"
+ COLON ":"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ NAME_REF
+ IDENT "h"
+ COLON ":"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COMMA ","
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ REST_PAT
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "any"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0102_record_pat_field_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0102_record_pat_field_list.rs
new file mode 100644
index 000000000..0bfaae7c4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0102_record_pat_field_list.rs
@@ -0,0 +1,7 @@
+fn foo() {
+ let S {} = ();
+ let S { f, ref mut g } = ();
+ let S { h: _, ..} = ();
+ let S { h: _, } = ();
+ let S { #[cfg(any())] .. } = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0103_array_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0103_array_expr.rast
new file mode 100644
index 000000000..60395948c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0103_array_expr.rast
@@ -0,0 +1,55 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ ARRAY_EXPR
+ L_BRACK "["
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ ARRAY_EXPR
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "1"
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ ARRAY_EXPR
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ COMMA ","
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ ARRAY_EXPR
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0103_array_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0103_array_expr.rs
new file mode 100644
index 000000000..4dc1999d1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0103_array_expr.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ [];
+ [1];
+ [1, 2,];
+ [1; 2];
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rast
new file mode 100644
index 000000000..fd83daf84
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rast
@@ -0,0 +1,41 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "F"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Box"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rs
new file mode 100644
index 000000000..17ed20e5b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rs
@@ -0,0 +1 @@
+type F = Box<Fn(i32) -> ()>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0106_lambda_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0106_lambda_expr.rast
new file mode 100644
index 000000000..c25ad8430
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0106_lambda_expr.rast
@@ -0,0 +1,246 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ PARAM_LIST
+ PIPE "|"
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ PIPE "|"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ MOVE_KW "move"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ PIPE "|"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ ASYNC_KW "async"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ MOVE_KW "move"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ ASYNC_KW "async"
+ WHITESPACE " "
+ MOVE_KW "move"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ STATIC_KW "static"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ STATIC_KW "static"
+ WHITESPACE " "
+ MOVE_KW "move"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ STATIC_KW "static"
+ WHITESPACE " "
+ ASYNC_KW "async"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ STATIC_KW "static"
+ WHITESPACE " "
+ ASYNC_KW "async"
+ WHITESPACE " "
+ MOVE_KW "move"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ MOVE_KW "move"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0106_lambda_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0106_lambda_expr.rs
new file mode 100644
index 000000000..75516d258
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0106_lambda_expr.rs
@@ -0,0 +1,15 @@
+fn foo() {
+ || ();
+ || -> i32 { 92 };
+ |x| x;
+ move |x: i32,| x;
+ async || {};
+ move || {};
+ async move || {};
+ static || {};
+ static move || {};
+ static async || {};
+ static async move || {};
+ for<'a> || {};
+ for<'a> move || {};
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rast
new file mode 100644
index 000000000..dcbcfe123
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rast
@@ -0,0 +1,63 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ METHOD_CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ DOT "."
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ METHOD_CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "y"
+ DOT "."
+ NAME_REF
+ IDENT "bar"
+ GENERIC_ARG_LIST
+ COLON2 "::"
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ R_ANGLE ">"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ COMMA ","
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rs
new file mode 100644
index 000000000..1a3aa35ae
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rs
@@ -0,0 +1,4 @@
+fn foo() {
+ x.foo();
+ y.bar::<T>(1, 2,);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0108_tuple_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0108_tuple_expr.rast
new file mode 100644
index 000000000..ac5a71703
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0108_tuple_expr.rast
@@ -0,0 +1,39 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ PAREN_EXPR
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ TUPLE_EXPR
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0108_tuple_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0108_tuple_expr.rs
new file mode 100644
index 000000000..e4f774280
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0108_tuple_expr.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ ();
+ (1);
+ (1,);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0109_label.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0109_label.rast
new file mode 100644
index 000000000..48d0bde84
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0109_label.rast
@@ -0,0 +1,70 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ LOOP_EXPR
+ LABEL
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COLON ":"
+ WHITESPACE " "
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ WHILE_EXPR
+ LABEL
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ COLON ":"
+ WHITESPACE " "
+ WHILE_KW "while"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FOR_EXPR
+ LABEL
+ LIFETIME
+ LIFETIME_IDENT "'c"
+ COLON ":"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ IN_KW "in"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0109_label.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0109_label.rs
new file mode 100644
index 000000000..48e83f263
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0109_label.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ 'a: loop {}
+ 'b: while true {}
+ 'c: for x in () {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rast
new file mode 100644
index 000000000..cebe98c43
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rast
@@ -0,0 +1,90 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "b"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ REST_PAT
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rs
new file mode 100644
index 000000000..ba719879d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rs
@@ -0,0 +1,6 @@
+fn main() {
+ let (a, b, ..) = ();
+ let (a,) = ();
+ let (..) = ();
+ let () = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0112_bind_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0112_bind_pat.rast
new file mode 100644
index 000000000..eb1c32474
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0112_bind_pat.rast
@@ -0,0 +1,128 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ REF_KW "ref"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ REF_KW "ref"
+ WHITESPACE " "
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "d"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "e"
+ WHITESPACE " "
+ AT "@"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ REF_KW "ref"
+ WHITESPACE " "
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ WHITESPACE " "
+ AT "@"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "g"
+ WHITESPACE " "
+ AT "@"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0112_bind_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0112_bind_pat.rs
new file mode 100644
index 000000000..820a9e72c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0112_bind_pat.rs
@@ -0,0 +1,8 @@
+fn main() {
+ let a = ();
+ let mut b = ();
+ let ref c = ();
+ let ref mut d = ();
+ let e @ _ = ();
+ let ref mut f @ g @ _ = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0113_nocontentexpr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0113_nocontentexpr.rast
new file mode 100644
index 000000000..8bd90a7f6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0113_nocontentexpr.rast
@@ -0,0 +1,57 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "some_expr"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ EXPR_STMT
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ R_CURLY "}"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Ok"
+ ARG_LIST
+ L_PAREN "("
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0113_nocontentexpr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0113_nocontentexpr.rs
new file mode 100644
index 000000000..bbf09e367
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0113_nocontentexpr.rs
@@ -0,0 +1,3 @@
+fn foo(){
+ ;;;some_expr();;;;{;;;};;;;Ok(())
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0114_tuple_struct_where.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0114_tuple_struct_where.rast
new file mode 100644
index 000000000..aab774165
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0114_tuple_struct_where.rast
@@ -0,0 +1,42 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ R_PAREN ")"
+ WHITESPACE " "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0114_tuple_struct_where.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0114_tuple_struct_where.rs
new file mode 100644
index 000000000..a602e0018
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0114_tuple_struct_where.rs
@@ -0,0 +1 @@
+struct S<T>(T) where T: Clone;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0115_tuple_field_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0115_tuple_field_attrs.rast
new file mode 100644
index 000000000..1699602f4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0115_tuple_field_attrs.rast
@@ -0,0 +1,28 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f32"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0115_tuple_field_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0115_tuple_field_attrs.rs
new file mode 100644
index 000000000..648ffe565
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0115_tuple_field_attrs.rs
@@ -0,0 +1 @@
+struct S (#[attr] f32);
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0117_macro_call_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0117_macro_call_type.rast
new file mode 100644
index 000000000..8165cb7d9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0117_macro_call_type.rast
@@ -0,0 +1,46 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ MACRO_TYPE
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "B"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ MACRO_TYPE
+ MACRO_CALL
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0117_macro_call_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0117_macro_call_type.rs
new file mode 100644
index 000000000..edb470c89
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0117_macro_call_type.rs
@@ -0,0 +1,2 @@
+type A = foo!();
+type B = crate::foo!();
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0118_match_guard.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0118_match_guard.rast
new file mode 100644
index 000000000..96318b521
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0118_match_guard.rast
@@ -0,0 +1,77 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ MATCH_GUARD
+ IF_KW "if"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ MATCH_GUARD
+ IF_KW "if"
+ WHITESPACE " "
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0118_match_guard.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0118_match_guard.rs
new file mode 100644
index 000000000..cfe05ce4e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0118_match_guard.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ match () {
+ _ if foo => (),
+ _ if let foo = bar => (),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rast
new file mode 100644
index 000000000..6fd9f4246
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rast
@@ -0,0 +1,84 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"Inner attribute\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"Can be\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"Stacked\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rs
new file mode 100644
index 000000000..54a67c9d7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0120_match_arms_inner_attribute.rs
@@ -0,0 +1,8 @@
+fn foo() {
+ match () {
+ #![doc("Inner attribute")]
+ #![doc("Can be")]
+ #![doc("Stacked")]
+ _ => (),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rast
new file mode 100644
index 000000000..0f7580c1a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rast
@@ -0,0 +1,151 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "feature"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ STRING "\"some\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "feature"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ STRING "\"other\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "feature"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ STRING "\"many\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "feature"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ STRING "\"attributes\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "feature"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ STRING "\"before\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rs
new file mode 100644
index 000000000..676db42d1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0121_match_arms_outer_attributes.rs
@@ -0,0 +1,12 @@
+fn foo() {
+ match () {
+ #[cfg(feature = "some")]
+ _ => (),
+ #[cfg(feature = "other")]
+ _ => (),
+ #[cfg(feature = "many")]
+ #[cfg(feature = "attributes")]
+ #[cfg(feature = "before")]
+ _ => (),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rast
new file mode 100644
index 000000000..338d53995
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rast
@@ -0,0 +1,62 @@
+SOURCE_FILE
+ EXTERN_BLOCK
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ EXTERN_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "printf"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "format"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i8"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ DOT3 "..."
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rs
new file mode 100644
index 000000000..533096cd5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rs
@@ -0,0 +1 @@
+extern "C" { fn printf(format: *const i8, ..., _: u8) -> i32; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rast
new file mode 100644
index 000000000..8d9b61630
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rast
@@ -0,0 +1,33 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rs
new file mode 100644
index 000000000..0f454d121
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rs
@@ -0,0 +1 @@
+fn foo() { crate::foo(); }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rast
new file mode 100644
index 000000000..a1df70841
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rast
@@ -0,0 +1,49 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "test"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE " "
+ NAME_REF
+ IDENT "field"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rs
new file mode 100644
index 000000000..a6c7760c7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_record_literal_field_with_attr.rs
@@ -0,0 +1,3 @@
+fn main() {
+ S { #[cfg(test)] field: 1 }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rast
new file mode 100644
index 000000000..81b7f2b3c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rast
@@ -0,0 +1,105 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ R_BRACK "]"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MACRO_EXPR
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "B"
+ R_BRACK "]"
+ WHITESPACE " "
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ BANG "!"
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "C"
+ R_BRACK "]"
+ WHITESPACE " "
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "D"
+ R_BRACK "]"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RETURN_EXPR
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "D"
+ R_BRACK "]"
+ WHITESPACE " "
+ RETURN_KW "return"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rs
new file mode 100644
index 000000000..b28c078f9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0126_attr_on_expr_stmt.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ #[A] foo();
+ #[B] bar!{}
+ #[C] #[D] {}
+ #[D] return ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0129_marco_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0129_marco_pat.rast
new file mode 100644
index 000000000..cedaa9045
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0129_marco_pat.rast
@@ -0,0 +1,37 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ MACRO_PAT
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "m"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "x"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0129_marco_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0129_marco_pat.rs
new file mode 100644
index 000000000..811181d9b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0129_marco_pat.rs
@@ -0,0 +1,3 @@
+fn main() {
+ let m!(x) = 0;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_let_stmt.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_let_stmt.rast
new file mode 100644
index 000000000..de9d0fc19
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_let_stmt.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_let_stmt.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_let_stmt.rs
new file mode 100644
index 000000000..8003999fd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_let_stmt.rs
@@ -0,0 +1 @@
+fn f() { let x: i32 = 92; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_try_block_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_try_block_expr.rast
new file mode 100644
index 000000000..aec8fbf47
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_try_block_expr.rast
@@ -0,0 +1,32 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BLOCK_EXPR
+ TRY_KW "try"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_try_block_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_try_block_expr.rs
new file mode 100644
index 000000000..0f1b41eb6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0130_try_block_expr.rs
@@ -0,0 +1,3 @@
+fn foo() {
+ let _ = try {};
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rast
new file mode 100644
index 000000000..b73780261
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rast
@@ -0,0 +1,31 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ EXISTENTIAL_KW "existential"
+ WHITESPACE " "
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Fn"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs
new file mode 100644
index 000000000..23baf7145
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs
@@ -0,0 +1 @@
+existential type Foo: Fn() -> usize;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0132_box_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0132_box_expr.rast
new file mode 100644
index 000000000..b21f37cd8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0132_box_expr.rast
@@ -0,0 +1,90 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BOX_EXPR
+ BOX_KW "box"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1i32"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "y"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ BOX_EXPR
+ BOX_KW "box"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1i32"
+ COMMA ","
+ WHITESPACE " "
+ BOX_EXPR
+ BOX_KW "box"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2i32"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "z"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ ARG_LIST
+ L_PAREN "("
+ BOX_EXPR
+ BOX_KW "box"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1i32"
+ COMMA ","
+ WHITESPACE " "
+ BOX_EXPR
+ BOX_KW "box"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2i32"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0132_box_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0132_box_expr.rs
new file mode 100644
index 000000000..fc9923b71
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0132_box_expr.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ let x = box 1i32;
+ let y = (box 1i32, box 2i32);
+ let z = Foo(box 1i32, box 2i32);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rast
new file mode 100644
index 000000000..f5ee12fe9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rast
@@ -0,0 +1,64 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "simple_function"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "LocalEnum"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "One"
+ COMMA ","
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "Two"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rs
new file mode 100644
index 000000000..eadc7fffb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0134_nocontentexpr_after_item.rs
@@ -0,0 +1,8 @@
+fn simple_function() {
+ enum LocalEnum {
+ One,
+ Two,
+ };
+ fn f() {};
+ struct S {};
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rast
new file mode 100644
index 000000000..9d37ada0d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rast
@@ -0,0 +1,70 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ AWAIT_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ DOT "."
+ AWAIT_KW "await"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ AWAIT_EXPR
+ FIELD_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ DOT "."
+ NAME_REF
+ INT_NUMBER "0"
+ DOT "."
+ AWAIT_KW "await"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ METHOD_CALL_EXPR
+ TRY_EXPR
+ AWAIT_EXPR
+ CALL_EXPR
+ FIELD_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ DOT "."
+ NAME_REF
+ INT_NUMBER "0"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ DOT "."
+ AWAIT_KW "await"
+ QUESTION "?"
+ DOT "."
+ NAME_REF
+ IDENT "hello"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rs
new file mode 100644
index 000000000..d2ba89ca6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ x.await;
+ x.0.await;
+ x.0().await?.hello();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_associated_type_bounds.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_associated_type_bounds.rast
new file mode 100644
index 000000000..8cbc98c51
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_associated_type_bounds.rast
@@ -0,0 +1,111 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "print_all"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Iterator"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Item"
+ COMMA ","
+ WHITESPACE " "
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Item"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Item"
+ COMMA ","
+ WHITESPACE " "
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Item"
+ GENERIC_ARG_LIST
+ COLON2 "::"
+ L_ANGLE "<"
+ CONST_ARG
+ LITERAL
+ TRUE_KW "true"
+ R_ANGLE ">"
+ COMMA ","
+ WHITESPACE " "
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "Item"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Display"
+ COMMA ","
+ WHITESPACE " "
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "Item"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Item"
+ R_ANGLE ">"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "printables"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_associated_type_bounds.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_associated_type_bounds.rs
new file mode 100644
index 000000000..0f7a2d160
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_associated_type_bounds.rs
@@ -0,0 +1 @@
+fn print_all<T: Iterator<Item, Item::Item, Item::<true>, Item: Display, Item<'a> = Item>>(printables: T) {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_expression_after_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_expression_after_block.rast
new file mode 100644
index 000000000..553ac356d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_expression_after_block.rast
@@ -0,0 +1,66 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "p"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "F"
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "5"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ FIELD_EXPR
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "p"
+ R_CURLY "}"
+ DOT "."
+ NAME_REF
+ IDENT "x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "10"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_expression_after_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_expression_after_block.rs
new file mode 100644
index 000000000..76007e3ee
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_expression_after_block.rs
@@ -0,0 +1,4 @@
+fn foo() {
+ let mut p = F{x: 5};
+ {p}.x = 10;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_self_param_outer_attr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_self_param_outer_attr.rast
new file mode 100644
index 000000000..db583f7d5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_self_param_outer_attr.rast
@@ -0,0 +1,28 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "must_use"
+ R_BRACK "]"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_self_param_outer_attr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_self_param_outer_attr.rs
new file mode 100644
index 000000000..35155057a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0138_self_param_outer_attr.rs
@@ -0,0 +1 @@
+fn f(#[must_use] self) {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0139_param_outer_arg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0139_param_outer_arg.rast
new file mode 100644
index 000000000..c63ea020a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0139_param_outer_arg.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr1"
+ R_BRACK "]"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "pat"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Type"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0139_param_outer_arg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0139_param_outer_arg.rs
new file mode 100644
index 000000000..c238be791
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0139_param_outer_arg.rs
@@ -0,0 +1 @@
+fn f(#[attr1] pat: Type) {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0142_for_range_from.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0142_for_range_from.rast
new file mode 100644
index 000000000..90cf3101c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0142_for_range_from.rast
@@ -0,0 +1,42 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FOR_EXPR
+ FOR_KW "for"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ IN_KW "in"
+ WHITESPACE " "
+ RANGE_EXPR
+ LITERAL
+ INT_NUMBER "0"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0142_for_range_from.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0142_for_range_from.rs
new file mode 100644
index 000000000..af0d40a7a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0142_for_range_from.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ for x in 0 .. {
+ break;
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0143_box_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0143_box_pat.rast
new file mode 100644
index 000000000..df22decde
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0143_box_pat.rast
@@ -0,0 +1,111 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ BOX_PAT
+ BOX_KW "box"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "i"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ BOX_PAT
+ BOX_KW "box"
+ WHITESPACE " "
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Outer"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ BOX_PAT
+ BOX_KW "box"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "i"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ NAME_REF
+ IDENT "j"
+ COLON ":"
+ WHITESPACE " "
+ BOX_PAT
+ BOX_KW "box"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Inner"
+ L_PAREN "("
+ BOX_PAT
+ BOX_KW "box"
+ WHITESPACE " "
+ REF_PAT
+ AMP "&"
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ BOX_PAT
+ BOX_KW "box"
+ WHITESPACE " "
+ IDENT_PAT
+ REF_KW "ref"
+ WHITESPACE " "
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "i"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0143_box_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0143_box_pat.rs
new file mode 100644
index 000000000..9d458aa1e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0143_box_pat.rs
@@ -0,0 +1,5 @@
+fn main() {
+ let box i = ();
+ let box Outer { box i, j: box Inner(box &x) } = ();
+ let box ref mut i = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0144_dot_dot_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0144_dot_dot_pat.rast
new file mode 100644
index 000000000..4d4011e6b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0144_dot_dot_pat.rast
@@ -0,0 +1,456 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ COMMENT "//"
+ WHITESPACE "\n "
+ COMMENT "// Tuples"
+ WHITESPACE "\n "
+ COMMENT "//"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Tuple"
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Tuple"
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Tuple"
+ L_PAREN "("
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Tuple"
+ L_PAREN "("
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ COMMENT "//"
+ WHITESPACE "\n "
+ COMMENT "// Slices"
+ WHITESPACE "\n "
+ COMMENT "//"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ SLICE_PAT
+ L_BRACK "["
+ REST_PAT
+ DOT2 ".."
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ SLICE_PAT
+ L_BRACK "["
+ IDENT_PAT
+ NAME
+ IDENT "head"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ SLICE_PAT
+ L_BRACK "["
+ IDENT_PAT
+ NAME
+ IDENT "head"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "tail"
+ WHITESPACE " "
+ AT "@"
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ SLICE_PAT
+ L_BRACK "["
+ IDENT_PAT
+ NAME
+ IDENT "head"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "cons"
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ SLICE_PAT
+ L_BRACK "["
+ IDENT_PAT
+ NAME
+ IDENT "head"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "mid"
+ WHITESPACE " "
+ AT "@"
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "cons"
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ SLICE_PAT
+ L_BRACK "["
+ IDENT_PAT
+ NAME
+ IDENT "head"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "cons"
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ SLICE_PAT
+ L_BRACK "["
+ IDENT_PAT
+ NAME
+ IDENT "head"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "mid"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "tail"
+ WHITESPACE " "
+ AT "@"
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ SLICE_PAT
+ L_BRACK "["
+ IDENT_PAT
+ NAME
+ IDENT "head"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "mid"
+ COMMA ","
+ WHITESPACE " "
+ REST_PAT
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "cons"
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0144_dot_dot_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0144_dot_dot_pat.rs
new file mode 100644
index 000000000..3262f27e1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0144_dot_dot_pat.rs
@@ -0,0 +1,25 @@
+fn main() {
+ let .. = ();
+ //
+ // Tuples
+ //
+ let (a, ..) = ();
+ let (a, ..,) = ();
+ let Tuple(a, ..) = ();
+ let Tuple(a, ..,) = ();
+ let (.., ..) = ();
+ let Tuple(.., ..) = ();
+ let (.., a, ..) = ();
+ let Tuple(.., a, ..) = ();
+ //
+ // Slices
+ //
+ let [..] = ();
+ let [head, ..] = ();
+ let [head, tail @ ..] = ();
+ let [head, .., cons] = ();
+ let [head, mid @ .., cons] = ();
+ let [head, .., .., cons] = ();
+ let [head, .., mid, tail @ ..] = ();
+ let [head, .., mid, .., cons] = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0145_record_pat_field.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0145_record_pat_field.rast
new file mode 100644
index 000000000..f3d2fde46
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0145_record_pat_field.rast
@@ -0,0 +1,123 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ NAME_REF
+ INT_NUMBER "0"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ NAME_REF
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "any"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE " "
+ NAME_REF
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0145_record_pat_field.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0145_record_pat_field.rs
new file mode 100644
index 000000000..53cfdc22d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0145_record_pat_field.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ let S { 0: 1 } = ();
+ let S { x: 1 } = ();
+ let S { #[cfg(any())] x: 1 } = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0146_as_precedence.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0146_as_precedence.rast
new file mode 100644
index 000000000..4079d2a99
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0146_as_precedence.rast
@@ -0,0 +1,43 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ CAST_EXPR
+ REF_EXPR
+ AMP "&"
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0146_as_precedence.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0146_as_precedence.rs
new file mode 100644
index 000000000..70559c5ef
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0146_as_precedence.rs
@@ -0,0 +1 @@
+fn f() { let _ = &1 as *const i32; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_const_param.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_const_param.rast
new file mode 100644
index 000000000..24595a1a1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_const_param.rast
@@ -0,0 +1,23 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ CONST_PARAM
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "N"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_const_param.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_const_param.rs
new file mode 100644
index 000000000..8cdb3b703
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_const_param.rs
@@ -0,0 +1 @@
+struct S<const N: u32>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rast
new file mode 100644
index 000000000..01de13a90
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rast
@@ -0,0 +1,19 @@
+SOURCE_FILE
+ MACRO_DEF
+ MACRO_KW "macro"
+ WHITESPACE " "
+ NAME
+ IDENT "m"
+ TOKEN_TREE
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ IDENT "i"
+ COLON ":"
+ IDENT "ident"
+ R_PAREN ")"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rs
new file mode 100644
index 000000000..a014ae546
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0147_macro_def.rs
@@ -0,0 +1 @@
+macro m($i:ident) {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_array_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_array_attrs.rast
new file mode 100644
index 000000000..6eb8af331
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_array_attrs.rast
@@ -0,0 +1,48 @@
+SOURCE_FILE
+ CONST
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ COLON ":"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ SLICE_TYPE
+ L_BRACK "["
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i64"
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ ARRAY_EXPR
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "test"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE " "
+ INT_NUMBER "2"
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_array_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_array_attrs.rs
new file mode 100644
index 000000000..2ac310924
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_array_attrs.rs
@@ -0,0 +1 @@
+const A: &[i64] = &[1, #[cfg(test)] 2];
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_impl_type_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_impl_type_params.rast
new file mode 100644
index 000000000..24977a22a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_impl_type_params.rast
@@ -0,0 +1,38 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ CONST_PARAM
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "N"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Bar"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "N"
+ R_ANGLE ">"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_impl_type_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_impl_type_params.rs
new file mode 100644
index 000000000..cb0a105c2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0150_impl_type_params.rs
@@ -0,0 +1 @@
+impl<const N: u32> Bar<N> {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_fn.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_fn.rast
new file mode 100644
index 000000000..a88b3393f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_fn.rast
@@ -0,0 +1,15 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_fn.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_fn.rs
new file mode 100644
index 000000000..8f3b7ef11
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_fn.rs
@@ -0,0 +1 @@
+fn foo() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rast
new file mode 100644
index 000000000..2ef66484a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rast
@@ -0,0 +1,33 @@
+SOURCE_FILE
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "Z"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "U"
+ R_ANGLE ">"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "U"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rs
new file mode 100644
index 000000000..71d76789f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0151_trait_alias.rs
@@ -0,0 +1 @@
+trait Z<U> = T<U>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0152_arg_with_attr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0152_arg_with_attr.rast
new file mode 100644
index 000000000..ae1074c36
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0152_arg_with_attr.rast
@@ -0,0 +1,38 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ INT_NUMBER "92"
+ R_PAREN ")"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0152_arg_with_attr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0152_arg_with_attr.rs
new file mode 100644
index 000000000..5daf1d7b0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0152_arg_with_attr.rs
@@ -0,0 +1,3 @@
+fn main() {
+ foo(#[attr] 92)
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0153_pub_parens_typepath.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0153_pub_parens_typepath.rast
new file mode 100644
index 000000000..2dede8359
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0153_pub_parens_typepath.rast
@@ -0,0 +1,56 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "B"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ PAREN_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SUPER_KW "super"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "B"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ COMMA ","
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0153_pub_parens_typepath.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0153_pub_parens_typepath.rs
new file mode 100644
index 000000000..d4c163822
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0153_pub_parens_typepath.rs
@@ -0,0 +1,2 @@
+struct B(pub (super::A));
+struct B(pub (crate::A,));
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rast
new file mode 100644
index 000000000..ee8465e6c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rast
@@ -0,0 +1,58 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Bar"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Baz"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Qux"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "baz"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Bar"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Baz"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rs
new file mode 100644
index 000000000..80a1701fd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_fn_pointer_param_ident_path.rs
@@ -0,0 +1,2 @@
+type Foo = fn(Bar::Baz);
+type Qux = fn(baz: Bar::Baz);
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_no_dyn_trait_leading_for.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_no_dyn_trait_leading_for.rast
new file mode 100644
index 000000000..30a2842e5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_no_dyn_trait_leading_for.rast
@@ -0,0 +1,43 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ DYN_TRAIT_TYPE
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Test"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Send"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_no_dyn_trait_leading_for.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_no_dyn_trait_leading_for.rs
new file mode 100644
index 000000000..47a71fd19
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_no_dyn_trait_leading_for.rs
@@ -0,0 +1 @@
+type A = for<'a> Test<'a> + Send;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_tuple_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_tuple_attrs.rast
new file mode 100644
index 000000000..39857b23c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_tuple_attrs.rast
@@ -0,0 +1,51 @@
+SOURCE_FILE
+ CONST
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ COLON ":"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i64"
+ COMMA ","
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i64"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "test"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE " "
+ INT_NUMBER "2"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_tuple_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_tuple_attrs.rs
new file mode 100644
index 000000000..f84b7ab31
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0154_tuple_attrs.rs
@@ -0,0 +1 @@
+const A: (i64, i64) = (1, #[cfg(test)] 2);
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0155_closure_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0155_closure_params.rast
new file mode 100644
index 000000000..318eb89de
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0155_closure_params.rast
@@ -0,0 +1,70 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ CLOSURE_EXPR
+ PARAM_LIST
+ PIPE "|"
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "bar"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "baz"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Baz"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "qux"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Qux"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Quux"
+ PIPE "|"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0155_closure_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0155_closure_params.rs
new file mode 100644
index 000000000..6ca8dd2d6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0155_closure_params.rs
@@ -0,0 +1,3 @@
+fn main() {
+ let foo = |bar, baz: Baz, qux: Qux::Quux| ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_const_block_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_const_block_pat.rast
new file mode 100644
index 000000000..59de2b9f1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_const_block_pat.rast
@@ -0,0 +1,79 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ CONST_BLOCK_PAT
+ CONST_KW "const"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "15"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ CONST_BLOCK_PAT
+ CONST_KW "const"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_const_block_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_const_block_pat.rs
new file mode 100644
index 000000000..dce9defac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_const_block_pat.rs
@@ -0,0 +1,4 @@
+fn main() {
+ let const { 15 } = ();
+ let const { foo(); bar() } = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rast
new file mode 100644
index 000000000..ce425a1af
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rast
@@ -0,0 +1,48 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ DOT3 "..."
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ TUPLE_PAT
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "y"
+ R_PAREN ")"
+ COLON ":"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rs
new file mode 100644
index 000000000..7b4c62658
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rs
@@ -0,0 +1 @@
+fn foo(..., (x, y): (i32, i32)) {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_or_pattern.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_or_pattern.rast
new file mode 100644
index 000000000..6a2046d9e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_or_pattern.rast
@@ -0,0 +1,112 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ PAREN_PAT
+ L_PAREN "("
+ OR_PAT
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ PIPE "|"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ REF_PAT
+ AMP "&"
+ PAREN_PAT
+ L_PAREN "("
+ OR_PAT
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ PIPE "|"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ TUPLE_PAT
+ L_PAREN "("
+ OR_PAT
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ PIPE "|"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COMMA ","
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ SLICE_PAT
+ L_BRACK "["
+ OR_PAT
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ PIPE "|"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COMMA ","
+ R_BRACK "]"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_or_pattern.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_or_pattern.rs
new file mode 100644
index 000000000..a26316605
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_or_pattern.rs
@@ -0,0 +1,8 @@
+fn main() {
+ match () {
+ (_ | _) => (),
+ &(_ | _) => (),
+ (_ | _,) => (),
+ [_ | _,] => (),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rast
new file mode 100644
index 000000000..8a525c6e0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rast
@@ -0,0 +1,26 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs
new file mode 100644
index 000000000..1ebbe5b03
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs
@@ -0,0 +1 @@
+type Foo = fn(_: bar);
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_variant_discriminant.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_variant_discriminant.rast
new file mode 100644
index 000000000..9f0c5a761
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_variant_discriminant.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "E"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ VARIANT
+ NAME
+ IDENT "X"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "10"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_variant_discriminant.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_variant_discriminant.rs
new file mode 100644
index 000000000..c8c5c0f17
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0157_variant_discriminant.rs
@@ -0,0 +1 @@
+enum E { X(i32) = 10 }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_binop_resets_statementness.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_binop_resets_statementness.rast
new file mode 100644
index 000000000..f667c1972
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_binop_resets_statementness.rast
@@ -0,0 +1,38 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "v"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BIN_EXPR
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ LITERAL
+ INT_NUMBER "1"
+ R_CURLY "}"
+ AMP "&"
+ LITERAL
+ INT_NUMBER "2"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_binop_resets_statementness.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_binop_resets_statementness.rs
new file mode 100644
index 000000000..e325e4667
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_binop_resets_statementness.rs
@@ -0,0 +1 @@
+fn f() { v = {1}&2; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_lambda_ret_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_lambda_ret_block.rast
new file mode 100644
index 000000000..93238bd8f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_lambda_ret_block.rast
@@ -0,0 +1,45 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ CALL_EXPR
+ CLOSURE_EXPR
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ WHITESPACE " "
+ R_CURLY "}"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_lambda_ret_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_lambda_ret_block.rs
new file mode 100644
index 000000000..061118d3a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_lambda_ret_block.rs
@@ -0,0 +1 @@
+fn main() { || -> i32 { 92 }(); }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_macro_rules_non_brace.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_macro_rules_non_brace.rast
new file mode 100644
index 000000000..45cd4d2aa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_macro_rules_non_brace.rast
@@ -0,0 +1,57 @@
+SOURCE_FILE
+ MACRO_RULES
+ MACRO_RULES_KW "macro_rules"
+ BANG "!"
+ WHITESPACE " "
+ NAME
+ IDENT "m"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_PAREN "("
+ WHITESPACE " "
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ IDENT "i"
+ COLON ":"
+ IDENT "ident"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ R_ANGLE ">"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ MACRO_RULES
+ MACRO_RULES_KW "macro_rules"
+ BANG "!"
+ WHITESPACE " "
+ NAME
+ IDENT "m"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_BRACK "["
+ WHITESPACE " "
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ IDENT "i"
+ COLON ":"
+ IDENT "ident"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ R_ANGLE ">"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_macro_rules_non_brace.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_macro_rules_non_brace.rs
new file mode 100644
index 000000000..6033a28cd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0158_macro_rules_non_brace.rs
@@ -0,0 +1,2 @@
+macro_rules! m ( ($i:ident) => {} );
+macro_rules! m [ ($i:ident) => {} ];
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rast
new file mode 100644
index 000000000..0adb678fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "try"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "Ok"
+ TOKEN_TREE
+ L_PAREN "("
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rs
new file mode 100644
index 000000000..61a6b46a0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rs
@@ -0,0 +1 @@
+fn foo() { try!(Ok(())); }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_yield_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_yield_expr.rast
new file mode 100644
index 000000000..31aa58de2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_yield_expr.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ YIELD_EXPR
+ YIELD_KW "yield"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ YIELD_EXPR
+ YIELD_KW "yield"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_yield_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_yield_expr.rs
new file mode 100644
index 000000000..596e221f7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_yield_expr.rs
@@ -0,0 +1,4 @@
+fn foo() {
+ yield;
+ yield 1;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_crate_visibility_in.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_crate_visibility_in.rast
new file mode 100644
index 000000000..ac45c5695
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_crate_visibility_in.rast
@@ -0,0 +1,42 @@
+SOURCE_FILE
+ STRUCT
+ VISIBILITY
+ PUB_KW "pub"
+ L_PAREN "("
+ IN_KW "in"
+ WHITESPACE " "
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SUPER_KW "super"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ R_PAREN ")"
+ WHITESPACE " "
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ VISIBILITY
+ PUB_KW "pub"
+ L_PAREN "("
+ IN_KW "in"
+ WHITESPACE " "
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ R_PAREN ")"
+ WHITESPACE " "
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_crate_visibility_in.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_crate_visibility_in.rs
new file mode 100644
index 000000000..2856dbd84
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_crate_visibility_in.rs
@@ -0,0 +1,2 @@
+pub(in super::A) struct S;
+pub(in crate) struct S;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rast
new file mode 100644
index 000000000..e6916ae97
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rast
@@ -0,0 +1,24 @@
+SOURCE_FILE
+ MACRO_RULES
+ MACRO_RULES_KW "macro_rules"
+ BANG "!"
+ WHITESPACE " "
+ NAME
+ IDENT "try"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ R_ANGLE ">"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rs
new file mode 100644
index 000000000..2e2ab6e60
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rs
@@ -0,0 +1 @@
+macro_rules! try { () => {} }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_impl_item_const.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_impl_item_const.rast
new file mode 100644
index 000000000..f7c7aaabc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_impl_item_const.rast
@@ -0,0 +1,24 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Send"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_impl_item_const.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_impl_item_const.rs
new file mode 100644
index 000000000..3252d6f36
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_impl_item_const.rs
@@ -0,0 +1 @@
+impl const Send for S {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_labeled_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_labeled_block.rast
new file mode 100644
index 000000000..181251d4f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_labeled_block.rast
@@ -0,0 +1,28 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ BLOCK_EXPR
+ LABEL
+ LIFETIME
+ LIFETIME_IDENT "'label"
+ COLON ":"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_labeled_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_labeled_block.rs
new file mode 100644
index 000000000..18b4ff4b1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0161_labeled_block.rs
@@ -0,0 +1 @@
+fn f() { 'label: {}; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0162_default_async_unsafe_fn.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0162_default_async_unsafe_fn.rast
new file mode 100644
index 000000000..7c2f7b34c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0162_default_async_unsafe_fn.rast
@@ -0,0 +1,43 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ ASYNC_KW "async"
+ WHITESPACE " "
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0162_default_async_unsafe_fn.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0162_default_async_unsafe_fn.rs
new file mode 100644
index 000000000..05c20a68f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0162_default_async_unsafe_fn.rs
@@ -0,0 +1,3 @@
+impl T for Foo {
+ default async unsafe fn foo() {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_async_fn.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_async_fn.rast
new file mode 100644
index 000000000..06b37e239
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_async_fn.rast
@@ -0,0 +1,41 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ ASYNC_KW "async"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_async_fn.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_async_fn.rs
new file mode 100644
index 000000000..78c3b4d85
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_async_fn.rs
@@ -0,0 +1,3 @@
+impl T for Foo {
+ default async fn foo() {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_unsafe_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_unsafe_item.rast
new file mode 100644
index 000000000..b180d0b72
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_unsafe_item.rast
@@ -0,0 +1,45 @@
+SOURCE_FILE
+ IMPL
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_unsafe_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_unsafe_item.rs
new file mode 100644
index 000000000..96340f84a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0163_default_unsafe_item.rs
@@ -0,0 +1,3 @@
+default unsafe impl T for Foo {
+ default unsafe fn foo() {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_default_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_default_item.rast
new file mode 100644
index 000000000..7a8e8cf1d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_default_item.rast
@@ -0,0 +1,24 @@
+SOURCE_FILE
+ IMPL
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_default_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_default_item.rs
new file mode 100644
index 000000000..a6836cbd5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_default_item.rs
@@ -0,0 +1 @@
+default impl T for Foo {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_type_path_in_pattern.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_type_path_in_pattern.rast
new file mode 100644
index 000000000..297f7575c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_type_path_in_pattern.rast
@@ -0,0 +1,39 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ PATH_PAT
+ PATH
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ INFER_TYPE
+ UNDERSCORE "_"
+ R_ANGLE ">"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_type_path_in_pattern.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_type_path_in_pattern.rs
new file mode 100644
index 000000000..ebe26834d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0164_type_path_in_pattern.rs
@@ -0,0 +1 @@
+fn main() { let <_>::Foo = (); }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast
new file mode 100644
index 000000000..3d3587a70
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast
@@ -0,0 +1,32 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ RANGE_PAT
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "0"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1u32"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs
new file mode 100644
index 000000000..1360eda05
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs
@@ -0,0 +1 @@
+fn f() { let 0 .. = 1u32; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_rename.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_rename.rast
new file mode 100644
index 000000000..5a5aca96f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_rename.rast
@@ -0,0 +1,16 @@
+SOURCE_FILE
+ EXTERN_CRATE
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ CRATE_KW "crate"
+ WHITESPACE " "
+ NAME_REF
+ IDENT "foo"
+ WHITESPACE " "
+ RENAME
+ AS_KW "as"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_rename.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_rename.rs
new file mode 100644
index 000000000..fc76e17dd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_rename.rs
@@ -0,0 +1 @@
+extern crate foo as bar;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_self.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_self.rast
new file mode 100644
index 000000000..edea4245f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_self.rast
@@ -0,0 +1,10 @@
+SOURCE_FILE
+ EXTERN_CRATE
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ CRATE_KW "crate"
+ WHITESPACE " "
+ NAME_REF
+ SELF_KW "self"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_self.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_self.rs
new file mode 100644
index 000000000..c969ed109
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0168_extern_crate_self.rs
@@ -0,0 +1 @@
+extern crate self;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0169_mod_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0169_mod_item.rast
new file mode 100644
index 000000000..4d505916c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0169_mod_item.rast
@@ -0,0 +1,8 @@
+SOURCE_FILE
+ MODULE
+ MOD_KW "mod"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0169_mod_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0169_mod_item.rs
new file mode 100644
index 000000000..f21af614d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0169_mod_item.rs
@@ -0,0 +1 @@
+mod a;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_mod_item_curly.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_mod_item_curly.rast
new file mode 100644
index 000000000..d5e3f3493
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_mod_item_curly.rast
@@ -0,0 +1,12 @@
+SOURCE_FILE
+ MODULE
+ MOD_KW "mod"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ WHITESPACE " "
+ ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_mod_item_curly.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_mod_item_curly.rs
new file mode 100644
index 000000000..16b1b43e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_mod_item_curly.rs
@@ -0,0 +1 @@
+mod b { }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_tuple_struct.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_tuple_struct.rast
new file mode 100644
index 000000000..6e5f6c2d2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_tuple_struct.rast
@@ -0,0 +1,25 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "String"
+ COMMA ","
+ WHITESPACE " "
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_tuple_struct.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_tuple_struct.rs
new file mode 100644
index 000000000..b4e05717e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0170_tuple_struct.rs
@@ -0,0 +1 @@
+struct S(String, usize);
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0171_struct_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0171_struct_item.rast
new file mode 100644
index 000000000..78f968207
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0171_struct_item.rast
@@ -0,0 +1,11 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0171_struct_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0171_struct_item.rs
new file mode 100644
index 000000000..5f1a34f49
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0171_struct_item.rs
@@ -0,0 +1 @@
+struct S {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_const_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_const_item.rast
new file mode 100644
index 000000000..909983c9a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_const_item.rast
@@ -0,0 +1,20 @@
+SOURCE_FILE
+ CONST
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "C"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_const_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_const_item.rs
new file mode 100644
index 000000000..6d5f5be65
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_const_item.rs
@@ -0,0 +1 @@
+const C: u32 = 92;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_record_field_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_record_field_list.rast
new file mode 100644
index 000000000..065d7e7e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_record_field_list.rast
@@ -0,0 +1,35 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_FIELD
+ NAME
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_FIELD
+ NAME
+ IDENT "b"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f32"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_record_field_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_record_field_list.rs
new file mode 100644
index 000000000..a3bd7787d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0172_record_field_list.rs
@@ -0,0 +1 @@
+struct S { a: i32, b: f32 }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_anonymous_const.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_anonymous_const.rast
new file mode 100644
index 000000000..d81b4ff26
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_anonymous_const.rast
@@ -0,0 +1,19 @@
+SOURCE_FILE
+ CONST
+ CONST_KW "const"
+ WHITESPACE " "
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_anonymous_const.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_anonymous_const.rs
new file mode 100644
index 000000000..c1d5cdfc6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_anonymous_const.rs
@@ -0,0 +1 @@
+const _: u32 = 0;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_macro_def_curly.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_macro_def_curly.rast
new file mode 100644
index 000000000..5cf305d26
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_macro_def_curly.rast
@@ -0,0 +1,27 @@
+SOURCE_FILE
+ MACRO_DEF
+ MACRO_KW "macro"
+ WHITESPACE " "
+ NAME
+ IDENT "m"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ IDENT "i"
+ COLON ":"
+ IDENT "ident"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ R_ANGLE ">"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_macro_def_curly.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_macro_def_curly.rs
new file mode 100644
index 000000000..5ed0c777d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_macro_def_curly.rs
@@ -0,0 +1 @@
+macro m { ($i:ident) => {} }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_union_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_union_item.rast
new file mode 100644
index 000000000..af608fc4a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_union_item.rast
@@ -0,0 +1,35 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "U"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_FIELD
+ NAME
+ IDENT "i"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_FIELD
+ NAME
+ IDENT "f"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f32"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_union_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_union_item.rs
new file mode 100644
index 000000000..5edf50de3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0173_union_item.rs
@@ -0,0 +1 @@
+struct U { i: i32, f: f32 }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_trait_item_generic_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_trait_item_generic_params.rast
new file mode 100644
index 000000000..01f212e71
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_trait_item_generic_params.rast
@@ -0,0 +1,35 @@
+SOURCE_FILE
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "X"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "U"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Debug"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Display"
+ R_ANGLE ">"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_trait_item_generic_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_trait_item_generic_params.rs
new file mode 100644
index 000000000..4a51926a6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_trait_item_generic_params.rs
@@ -0,0 +1 @@
+trait X<U: Debug + Display> {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_unit_struct.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_unit_struct.rast
new file mode 100644
index 000000000..438dea6f4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_unit_struct.rast
@@ -0,0 +1,8 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_unit_struct.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_unit_struct.rs
new file mode 100644
index 000000000..28377c276
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_unit_struct.rs
@@ -0,0 +1 @@
+struct S;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_use_tree_star.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_use_tree_star.rast
new file mode 100644
index 000000000..8662423f5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_use_tree_star.rast
@@ -0,0 +1,24 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ STAR "*"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ USE_TREE_LIST
+ L_CURLY "{"
+ USE_TREE
+ STAR "*"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_use_tree_star.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_use_tree_star.rs
new file mode 100644
index 000000000..b8c613440
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0174_use_tree_star.rs
@@ -0,0 +1,2 @@
+use *;
+use std::{*};
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0175_trait_item_bounds.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0175_trait_item_bounds.rast
new file mode 100644
index 000000000..bab831456
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0175_trait_item_bounds.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Hash"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0175_trait_item_bounds.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0175_trait_item_bounds.rs
new file mode 100644
index 000000000..e6ad2b56a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0175_trait_item_bounds.rs
@@ -0,0 +1 @@
+trait T: Hash + Clone {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_trait_item_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_trait_item_where_clause.rast
new file mode 100644
index 000000000..46cd8ee66
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_trait_item_where_clause.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_TYPE_KW "Self"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_trait_item_where_clause.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_trait_item_where_clause.rs
new file mode 100644
index 000000000..52a6a806f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_trait_item_where_clause.rs
@@ -0,0 +1 @@
+trait T where Self: Copy {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_use_tree_alias.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_use_tree_alias.rast
new file mode 100644
index 000000000..ef0dd6ba1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_use_tree_alias.rast
@@ -0,0 +1,32 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ WHITESPACE " "
+ RENAME
+ AS_KW "as"
+ WHITESPACE " "
+ NAME
+ IDENT "stdlib"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait"
+ WHITESPACE " "
+ RENAME
+ AS_KW "as"
+ WHITESPACE " "
+ UNDERSCORE "_"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_use_tree_alias.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_use_tree_alias.rs
new file mode 100644
index 000000000..19a6906a2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0176_use_tree_alias.rs
@@ -0,0 +1,2 @@
+use std as stdlib;
+use Trait as _;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_assoc_item_list_inner_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_assoc_item_list_inner_attrs.rast
new file mode 100644
index 000000000..9cb3c8a5c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_assoc_item_list_inner_attrs.rast
@@ -0,0 +1,26 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_assoc_item_list_inner_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_assoc_item_list_inner_attrs.rs
new file mode 100644
index 000000000..915e2c932
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_assoc_item_list_inner_attrs.rs
@@ -0,0 +1 @@
+impl S { #![attr] }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rast
new file mode 100644
index 000000000..4443d9d14
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rast
@@ -0,0 +1,96 @@
+SOURCE_FILE
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "Z"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "U"
+ R_ANGLE ">"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "U"
+ R_ANGLE ">"
+ WHITESPACE " "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "U"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "Z"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "U"
+ R_ANGLE ">"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_TYPE_KW "Self"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "U"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rs
new file mode 100644
index 000000000..a90d54b01
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_trait_alias_where_clause.rs
@@ -0,0 +1,2 @@
+trait Z<U> = T<U> where U: Copy;
+trait Z<U> = where Self: T<U>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree.rast
new file mode 100644
index 000000000..98231cdc2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "outer"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "tree"
+ COLON2 "::"
+ USE_TREE_LIST
+ L_CURLY "{"
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "inner"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "tree"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree.rs
new file mode 100644
index 000000000..3cc394348
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree.rs
@@ -0,0 +1 @@
+use outer::tree::{inner::tree};
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree_path.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree_path.rast
new file mode 100644
index 000000000..ede22dbaf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree_path.rast
@@ -0,0 +1,72 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "std"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "collections"
+ SEMICOLON ";"
+ WHITESPACE "\n\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_KW "self"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "m"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SUPER_KW "super"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "m"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "m"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree_path.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree_path.rs
new file mode 100644
index 000000000..5b22f8852
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0177_use_tree_path.rs
@@ -0,0 +1,6 @@
+use ::std;
+use std::collections;
+
+use self::m;
+use super::m;
+use crate::m;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0178_use_tree_path_use_tree.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0178_use_tree_path_use_tree.rast
new file mode 100644
index 000000000..ed3cafae1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0178_use_tree_path_use_tree.rast
@@ -0,0 +1,20 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ USE_TREE_LIST
+ L_CURLY "{"
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "collections"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0178_use_tree_path_use_tree.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0178_use_tree_path_use_tree.rs
new file mode 100644
index 000000000..c3086f51a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0178_use_tree_path_use_tree.rs
@@ -0,0 +1 @@
+use std::{collections};
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rast
new file mode 100644
index 000000000..b4dc1f25d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rast
@@ -0,0 +1,26 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ COLON2 "::"
+ STAR "*"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ USE_TREE_LIST
+ L_CURLY "{"
+ USE_TREE
+ COLON2 "::"
+ STAR "*"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rs
new file mode 100644
index 000000000..caae0ba02
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rs
@@ -0,0 +1,2 @@
+use ::*;
+use std::{::*};
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0180_use_tree_path_star.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0180_use_tree_path_star.rast
new file mode 100644
index 000000000..d255adb5a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0180_use_tree_path_star.rast
@@ -0,0 +1,13 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ STAR "*"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0180_use_tree_path_star.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0180_use_tree_path_star.rs
new file mode 100644
index 000000000..dd601cffe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0180_use_tree_path_star.rs
@@ -0,0 +1 @@
+use std::*;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_generic_param_attribute.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_generic_param_attribute.rast
new file mode 100644
index 000000000..28a216e87
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_generic_param_attribute.rast
@@ -0,0 +1,46 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "lt_attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COMMA ","
+ WHITESPACE " "
+ TYPE_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "t_attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_generic_param_attribute.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_generic_param_attribute.rs
new file mode 100644
index 000000000..0509f81da
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_generic_param_attribute.rs
@@ -0,0 +1 @@
+fn foo<#[lt_attr] 'a, #[t_attr] T>() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_use_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_use_item.rast
new file mode 100644
index 000000000..25761ed8c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_use_item.rast
@@ -0,0 +1,16 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "collections"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_use_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_use_item.rs
new file mode 100644
index 000000000..48ac87b14
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0181_use_item.rs
@@ -0,0 +1 @@
+use std::collections;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0182_lifetime_param.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0182_lifetime_param.rast
new file mode 100644
index 000000000..c595031f3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0182_lifetime_param.rast
@@ -0,0 +1,25 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COLON ":"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0182_lifetime_param.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0182_lifetime_param.rs
new file mode 100644
index 000000000..2bb38ece8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0182_lifetime_param.rs
@@ -0,0 +1 @@
+fn f<'a: 'b>() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_const_arg_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_const_arg_block.rast
new file mode 100644
index 000000000..ea8866da2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_const_arg_block.rast
@@ -0,0 +1,32 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ CONST_ARG
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "90"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ R_CURLY "}"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_const_arg_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_const_arg_block.rs
new file mode 100644
index 000000000..1c279db28
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_const_arg_block.rs
@@ -0,0 +1 @@
+type T = S<{90 + 2}>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_type_param.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_type_param.rast
new file mode 100644
index 000000000..becb77e04
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_type_param.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_type_param.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_type_param.rs
new file mode 100644
index 000000000..b250bc6bf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0183_type_param.rs
@@ -0,0 +1 @@
+fn f<T: Clone>() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_const_arg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_const_arg.rast
new file mode 100644
index 000000000..1e0300717
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_const_arg.rast
@@ -0,0 +1,22 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ CONST_ARG
+ LITERAL
+ INT_NUMBER "92"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_const_arg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_const_arg.rs
new file mode 100644
index 000000000..8b5e5dbe1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_const_arg.rs
@@ -0,0 +1 @@
+type T = S<92>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_generic_param_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_generic_param_list.rast
new file mode 100644
index 000000000..becb77e04
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_generic_param_list.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_generic_param_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_generic_param_list.rs
new file mode 100644
index 000000000..b250bc6bf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0184_generic_param_list.rs
@@ -0,0 +1 @@
+fn f<T: Clone>() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0185_assoc_type_bound.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0185_assoc_type_bound.rast
new file mode 100644
index 000000000..f2e4e0106
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0185_assoc_type_bound.rast
@@ -0,0 +1,37 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "StreamingIterator"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "Item"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0185_assoc_type_bound.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0185_assoc_type_bound.rs
new file mode 100644
index 000000000..daae97e4f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0185_assoc_type_bound.rs
@@ -0,0 +1 @@
+type T = StreamingIterator<Item<'a>: Clone>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0186_lifetime_arg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0186_lifetime_arg.rast
new file mode 100644
index 000000000..dbd7ff306
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0186_lifetime_arg.rast
@@ -0,0 +1,22 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'static"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0186_lifetime_arg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0186_lifetime_arg.rs
new file mode 100644
index 000000000..41715aa27
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0186_lifetime_arg.rs
@@ -0,0 +1 @@
+type T = S<'static>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0187_assoc_type_eq.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0187_assoc_type_eq.rast
new file mode 100644
index 000000000..970431840
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0187_assoc_type_eq.rast
@@ -0,0 +1,41 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "StreamingIterator"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "Item"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0187_assoc_type_eq.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0187_assoc_type_eq.rs
new file mode 100644
index 000000000..359141747
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0187_assoc_type_eq.rs
@@ -0,0 +1 @@
+type T = StreamingIterator<Item<'a> = &'a T>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rast
new file mode 100644
index 000000000..11002bf98
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ CONST_PARAM
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "N"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "MAX"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rs
new file mode 100644
index 000000000..f3da43ca0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rs
@@ -0,0 +1 @@
+struct A<const N: i32 = i32::MAX>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0189_const_arg_literal.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0189_const_arg_literal.rast
new file mode 100644
index 000000000..03d414e33
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0189_const_arg_literal.rast
@@ -0,0 +1,27 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ CONST_ARG
+ LITERAL
+ STRING "\"hello\""
+ COMMA ","
+ WHITESPACE " "
+ CONST_ARG
+ LITERAL
+ INT_NUMBER "0xdeadbeef"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0189_const_arg_literal.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0189_const_arg_literal.rs
new file mode 100644
index 000000000..7eacada73
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0189_const_arg_literal.rs
@@ -0,0 +1 @@
+type T = S<"hello", 0xdeadbeef>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0190_generic_arg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0190_generic_arg.rast
new file mode 100644
index 000000000..5a01f154b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0190_generic_arg.rast
@@ -0,0 +1,25 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0190_generic_arg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0190_generic_arg.rs
new file mode 100644
index 000000000..f2ccc558b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0190_generic_arg.rs
@@ -0,0 +1 @@
+type T = S<i32>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0191_const_arg_negative_number.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0191_const_arg_negative_number.rast
new file mode 100644
index 000000000..e504badbd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0191_const_arg_negative_number.rast
@@ -0,0 +1,24 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ CONST_ARG
+ PREFIX_EXPR
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "92"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0191_const_arg_negative_number.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0191_const_arg_negative_number.rs
new file mode 100644
index 000000000..d0a87bdc0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0191_const_arg_negative_number.rs
@@ -0,0 +1 @@
+type T = S<-92>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0192_const_arg_bool_literal.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0192_const_arg_bool_literal.rast
new file mode 100644
index 000000000..aea23e463
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0192_const_arg_bool_literal.rast
@@ -0,0 +1,22 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ CONST_ARG
+ LITERAL
+ TRUE_KW "true"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0192_const_arg_bool_literal.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0192_const_arg_bool_literal.rs
new file mode 100644
index 000000000..4b92e2d48
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0192_const_arg_bool_literal.rs
@@ -0,0 +1 @@
+type T = S<true>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0193_let_stmt_init.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0193_let_stmt_init.rast
new file mode 100644
index 000000000..1b6399158
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0193_let_stmt_init.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0193_let_stmt_init.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0193_let_stmt_init.rs
new file mode 100644
index 000000000..232c0db41
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0193_let_stmt_init.rs
@@ -0,0 +1 @@
+fn f() { let x = 92; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_else.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_else.rast
new file mode 100644
index 000000000..ce7f1a35e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_else.rast
@@ -0,0 +1,51 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "opt"
+ WHITESPACE " "
+ LET_ELSE
+ ELSE_KW "else"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_else.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_else.rs
new file mode 100644
index 000000000..8303de06f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_else.rs
@@ -0,0 +1 @@
+fn f() { let Some(x) = opt else { return }; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_stmt_ascription.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_stmt_ascription.rast
new file mode 100644
index 000000000..ac8e1d93c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_stmt_ascription.rast
@@ -0,0 +1,31 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_stmt_ascription.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_stmt_ascription.rs
new file mode 100644
index 000000000..a94161dff
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_let_stmt_ascription.rs
@@ -0,0 +1 @@
+fn f() { let x: i32; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_macro_inside_generic_arg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_macro_inside_generic_arg.rast
new file mode 100644
index 000000000..88f8a7345
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_macro_inside_generic_arg.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ MACRO_TYPE
+ MACRO_CALL
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "syn"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Token"
+ BANG "!"
+ TOKEN_TREE
+ L_BRACK "["
+ UNDERSCORE "_"
+ R_BRACK "]"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_macro_inside_generic_arg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_macro_inside_generic_arg.rs
new file mode 100644
index 000000000..8d43a53d9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0194_macro_inside_generic_arg.rs
@@ -0,0 +1 @@
+type A = Foo<syn::Token![_]>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0196_pub_tuple_field.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0196_pub_tuple_field.rast
new file mode 100644
index 000000000..a23ddf69f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0196_pub_tuple_field.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "MyStruct"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ COMMA ","
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0196_pub_tuple_field.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0196_pub_tuple_field.rs
new file mode 100644
index 000000000..00d8feba9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0196_pub_tuple_field.rs
@@ -0,0 +1 @@
+struct MyStruct(pub (u32, u32));
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0197_destructuring_assignment_struct_rest_pattern.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0197_destructuring_assignment_struct_rest_pattern.rast
new file mode 100644
index 000000000..fb8aa5acc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0197_destructuring_assignment_struct_rest_pattern.rast
@@ -0,0 +1,44 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0197_destructuring_assignment_struct_rest_pattern.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0197_destructuring_assignment_struct_rest_pattern.rs
new file mode 100644
index 000000000..22a5b5f3e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0197_destructuring_assignment_struct_rest_pattern.rs
@@ -0,0 +1,3 @@
+fn foo() {
+ S { .. } = S {};
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0198_destructuring_assignment_wildcard_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0198_destructuring_assignment_wildcard_pat.rast
new file mode 100644
index 000000000..5f53d3451
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0198_destructuring_assignment_wildcard_pat.rast
@@ -0,0 +1,50 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ UNDERSCORE_EXPR
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ UNDERSCORE_EXPR
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "None"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0198_destructuring_assignment_wildcard_pat.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0198_destructuring_assignment_wildcard_pat.rs
new file mode 100644
index 000000000..91acfb3a0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0198_destructuring_assignment_wildcard_pat.rs
@@ -0,0 +1,4 @@
+fn foo() {
+ _ = 1;
+ Some(_) = None;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_const_param_default_expression.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_const_param_default_expression.rast
new file mode 100644
index 000000000..0607ff54f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_const_param_default_expression.rast
@@ -0,0 +1,34 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ CONST_PARAM
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "N"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ R_CURLY "}"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_const_param_default_expression.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_const_param_default_expression.rs
new file mode 100644
index 000000000..551bde0b0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_const_param_default_expression.rs
@@ -0,0 +1 @@
+struct A<const N: i32 = { 1 }>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_effect_blocks.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_effect_blocks.rast
new file mode 100644
index 000000000..f14080c90
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_effect_blocks.rast
@@ -0,0 +1,95 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ BLOCK_EXPR
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ BLOCK_EXPR
+ CONST_KW "const"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ BLOCK_EXPR
+ ASYNC_KW "async"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ BLOCK_EXPR
+ ASYNC_KW "async"
+ WHITESPACE " "
+ MOVE_KW "move"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_effect_blocks.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_effect_blocks.rs
new file mode 100644
index 000000000..c57d24b2f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_effect_blocks.rs
@@ -0,0 +1,4 @@
+fn f() { unsafe { } }
+fn f() { const { } }
+fn f() { async { } }
+fn f() { async move { } }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_type_item_where_clause_deprecated.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_type_item_where_clause_deprecated.rast
new file mode 100644
index 000000000..7210b7389
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_type_item_where_clause_deprecated.rast
@@ -0,0 +1,33 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ WHITESPACE " "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_type_item_where_clause_deprecated.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_type_item_where_clause_deprecated.rs
new file mode 100644
index 000000000..a602d07f0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0199_type_item_where_clause_deprecated.rs
@@ -0,0 +1 @@
+type Foo where Foo: Copy = ();
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_assoc_const_eq.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_assoc_const_eq.rast
new file mode 100644
index 000000000..fa2733e7f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_assoc_const_eq.rast
@@ -0,0 +1,105 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "F"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "N"
+ EQ "="
+ CONST_ARG
+ LITERAL
+ INT_NUMBER "3"
+ R_ANGLE ">"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ CONST
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "TEST"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "F"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "N"
+ EQ "="
+ CONST_ARG
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "TEST"
+ R_CURLY "}"
+ R_ANGLE ">"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_assoc_const_eq.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_assoc_const_eq.rs
new file mode 100644
index 000000000..b43c4e36a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_assoc_const_eq.rs
@@ -0,0 +1,3 @@
+fn foo<F: Foo<N=3>>() {}
+const TEST: usize = 3;
+fn bar<F: Foo<N={TEST}>>() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_const_param_default_literal.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_const_param_default_literal.rast
new file mode 100644
index 000000000..8e5231365
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_const_param_default_literal.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ CONST_PARAM
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "N"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PREFIX_EXPR
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_const_param_default_literal.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_const_param_default_literal.rs
new file mode 100644
index 000000000..879ecffa7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0200_const_param_default_literal.rs
@@ -0,0 +1 @@
+struct A<const N: i32 = -1>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0201_question_for_type_trait_bound.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0201_question_for_type_trait_bound.rast
new file mode 100644
index 000000000..56e2d1095
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0201_question_for_type_trait_bound.rast
@@ -0,0 +1,47 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ QUESTION "?"
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Sized"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0201_question_for_type_trait_bound.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0201_question_for_type_trait_bound.rs
new file mode 100644
index 000000000..f80dd90d4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0201_question_for_type_trait_bound.rs
@@ -0,0 +1 @@
+fn f<T>() where T: ?for<> Sized {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0000_empty.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0000_empty.rast
new file mode 100644
index 000000000..40b9ef804
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0000_empty.rast
@@ -0,0 +1 @@
+SOURCE_FILE
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0000_empty.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0000_empty.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0000_empty.rs
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0001_struct_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0001_struct_item.rast
new file mode 100644
index 000000000..0e9639f23
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0001_struct_item.rast
@@ -0,0 +1,39 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ R_ANGLE ">"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "f"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ COMMA ","
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0001_struct_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0001_struct_item.rs
new file mode 100644
index 000000000..512aeb3e7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0001_struct_item.rs
@@ -0,0 +1,3 @@
+struct S<T: Copy> {
+ f: T,
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0002_struct_item_field.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0002_struct_item_field.rast
new file mode 100644
index 000000000..dd52e5850
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0002_struct_item_field.rast
@@ -0,0 +1,22 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "foo"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE "\n"
+ R_CURLY "}"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0002_struct_item_field.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0002_struct_item_field.rs
new file mode 100644
index 000000000..cc3866d25
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0002_struct_item_field.rs
@@ -0,0 +1,3 @@
+struct S {
+ foo: u32
+} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0004_file_shebang.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0004_file_shebang.rast
new file mode 100644
index 000000000..698957189
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0004_file_shebang.rast
@@ -0,0 +1,2 @@
+SOURCE_FILE
+ SHEBANG "#!/use/bin/env rusti"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0004_file_shebang.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0004_file_shebang.rs
new file mode 100644
index 000000000..53dc9e617
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0004_file_shebang.rs
@@ -0,0 +1 @@
+#!/use/bin/env rusti \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0005_fn_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0005_fn_item.rast
new file mode 100644
index 000000000..756d20e4d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0005_fn_item.rast
@@ -0,0 +1,16 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0005_fn_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0005_fn_item.rs
new file mode 100644
index 000000000..03210551c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0005_fn_item.rs
@@ -0,0 +1,2 @@
+fn foo() {
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0006_inner_attributes.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0006_inner_attributes.rast
new file mode 100644
index 000000000..cb63ba80e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0006_inner_attributes.rast
@@ -0,0 +1,194 @@
+SOURCE_FILE
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ TOKEN_TREE
+ L_PAREN "("
+ TRUE_KW "true"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "ident"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "ident"
+ COMMA ","
+ WHITESPACE " "
+ INT_NUMBER "100"
+ COMMA ","
+ WHITESPACE " "
+ TRUE_KW "true"
+ COMMA ","
+ WHITESPACE " "
+ STRING "\"true\""
+ COMMA ","
+ WHITESPACE " "
+ IDENT "ident"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ INT_NUMBER "100"
+ COMMA ","
+ WHITESPACE " "
+ IDENT "ident"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ STRING "\"hello\""
+ COMMA ","
+ WHITESPACE " "
+ IDENT "ident"
+ TOKEN_TREE
+ L_PAREN "("
+ INT_NUMBER "100"
+ R_PAREN ")"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ TOKEN_TREE
+ L_PAREN "("
+ INT_NUMBER "100"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "enabled"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TRUE_KW "true"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "enabled"
+ TOKEN_TREE
+ L_PAREN "("
+ TRUE_KW "true"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"hello\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "repr"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "C"
+ COMMA ","
+ WHITESPACE " "
+ IDENT "align"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ INT_NUMBER "4"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "repr"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "C"
+ COMMA ","
+ WHITESPACE " "
+ IDENT "align"
+ TOKEN_TREE
+ L_PAREN "("
+ INT_NUMBER "4"
+ R_PAREN ")"
+ R_PAREN ")"
+ R_BRACK "]"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0006_inner_attributes.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0006_inner_attributes.rs
new file mode 100644
index 000000000..e81f8b1e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0006_inner_attributes.rs
@@ -0,0 +1,10 @@
+#![attr]
+#![attr(true)]
+#![attr(ident)]
+#![attr(ident, 100, true, "true", ident = 100, ident = "hello", ident(100))]
+#![attr(100)]
+#![attr(enabled = true)]
+#![enabled(true)]
+#![attr("hello")]
+#![repr(C, align = 4)]
+#![repr(C, align(4))] \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0007_extern_crate.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0007_extern_crate.rast
new file mode 100644
index 000000000..8b9259fd6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0007_extern_crate.rast
@@ -0,0 +1,40 @@
+SOURCE_FILE
+ EXTERN_CRATE
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ CRATE_KW "crate"
+ WHITESPACE " "
+ NAME_REF
+ IDENT "foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ EXTERN_CRATE
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ CRATE_KW "crate"
+ WHITESPACE " "
+ NAME_REF
+ IDENT "foo"
+ WHITESPACE " "
+ RENAME
+ AS_KW "as"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ EXTERN_CRATE
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ CRATE_KW "crate"
+ WHITESPACE " "
+ NAME_REF
+ SELF_KW "self"
+ WHITESPACE " "
+ RENAME
+ AS_KW "as"
+ WHITESPACE " "
+ NAME
+ IDENT "baz"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0007_extern_crate.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0007_extern_crate.rs
new file mode 100644
index 000000000..ab81a608c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0007_extern_crate.rs
@@ -0,0 +1,3 @@
+extern crate foo;
+extern crate foo as bar;
+extern crate self as baz;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0008_mod_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0008_mod_item.rast
new file mode 100644
index 000000000..adee67181
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0008_mod_item.rast
@@ -0,0 +1,77 @@
+SOURCE_FILE
+ MODULE
+ MOD_KW "mod"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ WHITESPACE " "
+ ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ MODULE
+ MOD_KW "mod"
+ WHITESPACE " "
+ NAME
+ IDENT "d"
+ WHITESPACE " "
+ ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ MODULE
+ MOD_KW "mod"
+ WHITESPACE " "
+ NAME
+ IDENT "e"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ MODULE
+ MOD_KW "mod"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ WHITESPACE " "
+ ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0008_mod_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0008_mod_item.rs
new file mode 100644
index 000000000..4ff0d9795
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0008_mod_item.rs
@@ -0,0 +1,12 @@
+mod c {
+ fn foo() {
+ }
+ struct S {}
+}
+
+mod d {
+ #![attr]
+ mod e;
+ mod f {
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0009_use_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0009_use_item.rast
new file mode 100644
index 000000000..04a44ef7e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0009_use_item.rast
@@ -0,0 +1,21 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "bar"
+ SEMICOLON ";"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0009_use_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0009_use_item.rs
new file mode 100644
index 000000000..05a6aff83
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0009_use_item.rs
@@ -0,0 +1,2 @@
+use foo;
+use ::bar; \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0010_use_path_segments.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0010_use_path_segments.rast
new file mode 100644
index 000000000..ddadec817
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0010_use_path_segments.rast
@@ -0,0 +1,42 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "baz"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "baz"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0010_use_path_segments.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0010_use_path_segments.rs
new file mode 100644
index 000000000..1e71b7a6c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0010_use_path_segments.rs
@@ -0,0 +1,2 @@
+use ::foo::bar::baz;
+use foo::bar::baz;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0011_outer_attribute.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0011_outer_attribute.rast
new file mode 100644
index 000000000..dbb9bc54d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0011_outer_attribute.rast
@@ -0,0 +1,61 @@
+SOURCE_FILE
+ FN
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "test"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Ignore"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ MODULE
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "path"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ STRING "\"a.rs\""
+ R_BRACK "]"
+ WHITESPACE "\n"
+ MOD_KW "mod"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0011_outer_attribute.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0011_outer_attribute.rs
new file mode 100644
index 000000000..6f04cb171
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0011_outer_attribute.rs
@@ -0,0 +1,6 @@
+#[cfg(test)]
+#[Ignore]
+fn foo() {}
+
+#[path = "a.rs"]
+mod b;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0012_visibility.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0012_visibility.rast
new file mode 100644
index 000000000..a95bc2301
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0012_visibility.rast
@@ -0,0 +1,133 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ MACRO_DEF
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ MACRO_KW "macro"
+ WHITESPACE " "
+ NAME
+ IDENT "m"
+ TOKEN_TREE
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ COLON ":"
+ IDENT "ident"
+ R_PAREN ")"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ L_PAREN "("
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ R_PAREN ")"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ L_PAREN "("
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SUPER_KW "super"
+ R_PAREN ")"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "d"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ L_PAREN "("
+ IN_KW "in"
+ WHITESPACE " "
+ PATH
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "baz"
+ R_PAREN ")"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "e"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0012_visibility.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0012_visibility.rs
new file mode 100644
index 000000000..129d486fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0012_visibility.rs
@@ -0,0 +1,6 @@
+fn a() {}
+pub fn b() {}
+pub macro m($:ident) {}
+pub(crate) fn c() {}
+pub(super) fn d() {}
+pub(in foo::bar::baz) fn e() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0013_use_path_self_super.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0013_use_path_self_super.rast
new file mode 100644
index 000000000..8a0149cac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0013_use_path_self_super.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_KW "self"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SUPER_KW "super"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ SUPER_KW "super"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0013_use_path_self_super.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0013_use_path_self_super.rs
new file mode 100644
index 000000000..9d9eb9917
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0013_use_path_self_super.rs
@@ -0,0 +1,2 @@
+use self::foo;
+use super::super::bar;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0014_use_tree.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0014_use_tree.rast
new file mode 100644
index 000000000..b37edc365
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0014_use_tree.rast
@@ -0,0 +1,95 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ STAR "*"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ COLON2 "::"
+ STAR "*"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ COLON2 "::"
+ USE_TREE_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ USE_TREE_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ STAR "*"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ USE_TREE_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ USE_TREE_LIST
+ L_CURLY "{"
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ COMMA ","
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "c"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0014_use_tree.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0014_use_tree.rs
new file mode 100644
index 000000000..5e4aa3a33
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0014_use_tree.rs
@@ -0,0 +1,7 @@
+use *;
+use ::*;
+use ::{};
+use {};
+use foo::*;
+use foo::{};
+use ::foo::{a, b, c};
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rast
new file mode 100644
index 000000000..ddf8aad6f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rast
@@ -0,0 +1,65 @@
+SOURCE_FILE
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ WHITESPACE " "
+ RENAME
+ AS_KW "as"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ COLON2 "::"
+ USE_TREE_LIST
+ L_CURLY "{"
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ RENAME
+ AS_KW "as"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ COMMA ","
+ WHITESPACE " "
+ USE_TREE
+ STAR "*"
+ COMMA ","
+ WHITESPACE " "
+ USE_TREE
+ COLON2 "::"
+ STAR "*"
+ COMMA ","
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "foo"
+ WHITESPACE " "
+ RENAME
+ AS_KW "as"
+ WHITESPACE " "
+ NAME
+ IDENT "x"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rs
new file mode 100644
index 000000000..46a0783a2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rs
@@ -0,0 +1,2 @@
+use foo as bar;
+use foo::{a as b, *, ::*, ::foo as x};
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0016_struct_flavors.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0016_struct_flavors.rast
new file mode 100644
index 000000000..eb2724e2f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0016_struct_flavors.rast
@@ -0,0 +1,93 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "B"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "C"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "D"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ COMMA ","
+ WHITESPACE "\n "
+ RECORD_FIELD
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "E"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ COMMA ","
+ WHITESPACE " "
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "y"
+ COMMA ","
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0016_struct_flavors.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0016_struct_flavors.rs
new file mode 100644
index 000000000..69638350c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0016_struct_flavors.rs
@@ -0,0 +1,10 @@
+struct A;
+struct B {}
+struct C();
+
+struct D {
+ a: u32,
+ pub b: u32
+}
+
+struct E(pub x, y,);
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0017_attr_trailing_comma.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0017_attr_trailing_comma.rast
new file mode 100644
index 000000000..7c914e254
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0017_attr_trailing_comma.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE
+ FN
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "a"
+ COMMA ","
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0017_attr_trailing_comma.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0017_attr_trailing_comma.rs
new file mode 100644
index 000000000..fe0a7bb97
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0017_attr_trailing_comma.rs
@@ -0,0 +1,2 @@
+#[foo(a,)]
+fn foo() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0018_struct_type_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0018_struct_type_params.rast
new file mode 100644
index 000000000..11ebc7efb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0018_struct_type_params.rast
@@ -0,0 +1,274 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S1"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S2"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S3"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_FIELD
+ NAME
+ IDENT "u"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S4"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S5"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S6"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COLON ":"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S7"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COLON ":"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S8"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COLON ":"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S9"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COLON ":"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'c"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S10"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COMMA ","
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S11"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COMMA ","
+ WHITESPACE " "
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S12"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COLON ":"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ PLUS "+"
+ COMMA ","
+ WHITESPACE " "
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ COLON ":"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'c"
+ COMMA ","
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S13"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S14"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COMMA ","
+ WHITESPACE " "
+ TYPE_PARAM
+ NAME
+ IDENT "U"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S15"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COMMA ","
+ WHITESPACE " "
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COMMA ","
+ WHITESPACE " "
+ TYPE_PARAM
+ NAME
+ IDENT "U"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0018_struct_type_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0018_struct_type_params.rs
new file mode 100644
index 000000000..88c544923
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0018_struct_type_params.rs
@@ -0,0 +1,17 @@
+struct S1<T>;
+struct S2<T>(u32);
+struct S3<T> { u: u32 }
+
+struct S4<>;
+struct S5<'a>;
+struct S6<'a:>;
+struct S7<'a: 'b>;
+struct S8<'a: 'b + >;
+struct S9<'a: 'b + 'c>;
+struct S10<'a,>;
+struct S11<'a, 'b>;
+struct S12<'a: 'b+, 'b: 'c,>;
+
+struct S13<T>;
+struct S14<T, U>;
+struct S15<'a, T, U>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0019_enums.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0019_enums.rast
new file mode 100644
index 000000000..dd47e3aa4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0019_enums.rast
@@ -0,0 +1,155 @@
+SOURCE_FILE
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "E1"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "E2"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "E3"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "X"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "E4"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "X"
+ COMMA ","
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "E5"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "A"
+ COMMA ","
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "B"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ COMMA ","
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "C"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ COMMA ","
+ WHITESPACE "\n "
+ RECORD_FIELD
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f64"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ COMMA ","
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "F"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ COMMA ","
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "D"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ COMMA ","
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "E"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0019_enums.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0019_enums.rs
new file mode 100644
index 000000000..7a1afa0e6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0019_enums.rs
@@ -0,0 +1,25 @@
+enum E1 {
+}
+
+enum E2<T> {
+}
+
+enum E3 {
+ X
+}
+
+enum E4 {
+ X,
+}
+
+enum E5 {
+ A,
+ B = 92,
+ C {
+ a: u32,
+ pub b: f64,
+ },
+ F {},
+ D(u32,),
+ E(),
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0020_type_param_bounds.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0020_type_param_bounds.rast
new file mode 100644
index 000000000..043a966ff
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0020_type_param_bounds.rast
@@ -0,0 +1,283 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "B"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ TYPE_BOUND_LIST
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "C"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "D"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "E"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'d"
+ WHITESPACE " "
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "F"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'d"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "G"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "H"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_KW "self"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Bar"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "I"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ TYPE_BOUND_LIST
+ COMMA ","
+ WHITESPACE " "
+ TYPE_PARAM
+ NAME
+ IDENT "U"
+ COLON ":"
+ TYPE_BOUND_LIST
+ COMMA ","
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "K"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ COLON ":"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'d"
+ COMMA ","
+ WHITESPACE " "
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'d"
+ COLON ":"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ COMMA ","
+ WHITESPACE " "
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'d"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Clone"
+ R_ANGLE ">"
+ SEMICOLON ";"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0020_type_param_bounds.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0020_type_param_bounds.rs
new file mode 100644
index 000000000..712898978
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0020_type_param_bounds.rs
@@ -0,0 +1,10 @@
+struct A<T>;
+struct B<T:>;
+struct C<T: 'a>;
+struct D<T: 'a + >;
+struct E<T: 'a + 'd >;
+struct F<T: 'a + 'd + Clone>;
+struct G<T: Clone + Copy>;
+struct H<T: ::Foo + self::Bar + 'a>;
+struct I<T:, U:,>;
+struct K<'a: 'd, 'd: 'a + 'b, T: 'a + 'd + Clone>; \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0022_empty_extern_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0022_empty_extern_block.rast
new file mode 100644
index 000000000..ef2fb66dd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0022_empty_extern_block.rast
@@ -0,0 +1,21 @@
+SOURCE_FILE
+ EXTERN_BLOCK
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ EXTERN_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ EXTERN_BLOCK
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ EXTERN_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0022_empty_extern_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0022_empty_extern_block.rs
new file mode 100644
index 000000000..f5fe0e6ef
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0022_empty_extern_block.rs
@@ -0,0 +1,5 @@
+extern {
+}
+
+extern "C" {
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0023_static_items.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0023_static_items.rast
new file mode 100644
index 000000000..b164e828e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0023_static_items.rast
@@ -0,0 +1,41 @@
+SOURCE_FILE
+ STATIC
+ STATIC_KW "static"
+ WHITESPACE " "
+ NAME
+ IDENT "FOO"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ STATIC
+ STATIC_KW "static"
+ WHITESPACE " "
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "BAR"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0023_static_items.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0023_static_items.rs
new file mode 100644
index 000000000..5fb92ce33
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0023_static_items.rs
@@ -0,0 +1,2 @@
+static FOO: u32 = 1;
+static mut BAR: i32 = 92;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0024_const_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0024_const_item.rast
new file mode 100644
index 000000000..40b9ef804
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0024_const_item.rast
@@ -0,0 +1 @@
+SOURCE_FILE
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0024_const_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0024_const_item.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0024_const_item.rs
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0025_extern_fn_in_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0025_extern_fn_in_block.rast
new file mode 100644
index 000000000..9c5f5ac64
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0025_extern_fn_in_block.rast
@@ -0,0 +1,33 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0025_extern_fn_in_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0025_extern_fn_in_block.rs
new file mode 100644
index 000000000..289809809
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0025_extern_fn_in_block.rs
@@ -0,0 +1,3 @@
+fn main() {
+ extern fn f() {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0026_const_fn_in_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0026_const_fn_in_block.rast
new file mode 100644
index 000000000..ca9a3df86
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0026_const_fn_in_block.rast
@@ -0,0 +1,32 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ CONST_KW "const"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0026_const_fn_in_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0026_const_fn_in_block.rs
new file mode 100644
index 000000000..7641a3d28
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0026_const_fn_in_block.rs
@@ -0,0 +1,3 @@
+fn main() {
+ const fn f() {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0027_unsafe_fn_in_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0027_unsafe_fn_in_block.rast
new file mode 100644
index 000000000..88ebd1095
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0027_unsafe_fn_in_block.rast
@@ -0,0 +1,43 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ BLOCK_EXPR
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0027_unsafe_fn_in_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0027_unsafe_fn_in_block.rs
new file mode 100644
index 000000000..f3c5ff938
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0027_unsafe_fn_in_block.rs
@@ -0,0 +1,4 @@
+fn main() {
+ unsafe fn f() {}
+ unsafe { 92 }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0028_operator_binding_power.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0028_operator_binding_power.rast
new file mode 100644
index 000000000..ae08c0756
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0028_operator_binding_power.rast
@@ -0,0 +1,186 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "binding_power"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BIN_EXPR
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ BIN_EXPR
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ STAR "*"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ WHITESPACE " "
+ PERCENT "%"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "4"
+ WHITESPACE " "
+ MINUS "-"
+ WHITESPACE " "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "5"
+ WHITESPACE " "
+ SLASH "/"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "6"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ STAR "*"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ SHL "<<"
+ WHITESPACE " "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ AMP "&"
+ WHITESPACE " "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ SHR ">>"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ CARET "^"
+ WHITESPACE " "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ AMP "&"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ PIPE "|"
+ WHITESPACE " "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ CARET "^"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ PIPE "|"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ AMP2 "&&"
+ WHITESPACE " "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ COMMENT "//1 || 2 && 2;"
+ WHITESPACE "\n "
+ COMMENT "//1 .. 2 || 3;"
+ WHITESPACE "\n "
+ COMMENT "//1 = 2 .. 3;"
+ WHITESPACE "\n "
+ COMMENT "//---&*1 - --2 * 9;"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0028_operator_binding_power.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0028_operator_binding_power.rs
new file mode 100644
index 000000000..cc9598470
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0028_operator_binding_power.rs
@@ -0,0 +1,14 @@
+fn binding_power() {
+ let x = 1 + 2 * 3 % 4 - 5 / 6;
+ 1 + 2 * 3;
+ 1 << 2 + 3;
+ 1 & 2 >> 3;
+ 1 ^ 2 & 3;
+ 1 | 2 ^ 3;
+ 1 == 2 | 3;
+ 1 && 2 == 3;
+ //1 || 2 && 2;
+ //1 .. 2 || 3;
+ //1 = 2 .. 3;
+ //---&*1 - --2 * 9;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rast
new file mode 100644
index 000000000..5acc54e71
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rast
@@ -0,0 +1,152 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RANGE_EXPR
+ DOT2 ".."
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ RANGE_EXPR
+ DOT2 ".."
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "z"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RANGE_EXPR
+ LITERAL
+ FALSE_KW "false"
+ DOT2 ".."
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RANGE_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ DOT2 ".."
+ SEMICOLON ";"
+ WHITESPACE "\n \n "
+ EXPR_STMT
+ RANGE_EXPR
+ DOT2EQ "..="
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ RANGE_EXPR
+ DOT2EQ "..="
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "z"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RANGE_EXPR
+ LITERAL
+ FALSE_KW "false"
+ DOT2EQ "..="
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RANGE_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ DOT2 ".."
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rs
new file mode 100644
index 000000000..f9ff444d4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rs
@@ -0,0 +1,11 @@
+fn foo() {
+ ..1 + 1;
+ ..z = 2;
+ x = false..1 == 1;
+ let x = 1..;
+
+ ..=1 + 1;
+ ..=z = 2;
+ x = false..=1 == 1;
+ let x = 1..;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_string_suffixes.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_string_suffixes.rast
new file mode 100644
index 000000000..44211c7c4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_string_suffixes.rast
@@ -0,0 +1,64 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ CHAR "'c'u32"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ STRING "\"string\"invalid"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ BYTE "b'b'_suff"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ BYTE_STRING "b\"bs\"invalid"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_string_suffixes.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_string_suffixes.rs
new file mode 100644
index 000000000..261aad1fb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_string_suffixes.rs
@@ -0,0 +1,6 @@
+fn main() {
+ let _ = 'c'u32;
+ let _ = "string"invalid;
+ let _ = b'b'_suff;
+ let _ = b"bs"invalid;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_traits.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_traits.rast
new file mode 100644
index 000000000..44423581e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_traits.rast
@@ -0,0 +1,61 @@
+SOURCE_FILE
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "Runnable"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "handler"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "TraitWithExpr"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "fn_with_expr"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ ARRAY_TYPE
+ L_BRACK "["
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ SEMICOLON ";"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ R_BRACK "]"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_traits.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_traits.rs
new file mode 100644
index 000000000..ac30843ef
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0030_traits.rs
@@ -0,0 +1,7 @@
+trait Runnable {
+ fn handler();
+}
+
+trait TraitWithExpr {
+ fn fn_with_expr(x: [i32; 1]);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0031_extern.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0031_extern.rast
new file mode 100644
index 000000000..70b527808
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0031_extern.rast
@@ -0,0 +1,973 @@
+SOURCE_FILE
+ EXTERN_BLOCK
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ EXTERN_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "socket"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "domain"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "ty"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "protocol"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bind"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "fd"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "addr"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "sockaddr"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "len"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "socklen_t"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "connect"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "socket"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "address"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "sockaddr"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "len"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "socklen_t"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "listen"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "socket"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "backlog"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "getsockname"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "socket"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "address"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "sockaddr"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "address_len"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "socklen_t"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "getsockopt"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "sockfd"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "level"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "optname"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "optval"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_void"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "optlen"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "socklen_t"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "setsockopt"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "socket"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "level"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "name"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "value"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_void"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "option_len"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "socklen_t"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "getpeername"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "socket"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "address"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "sockaddr"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "address_len"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "socklen_t"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "sendto"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "socket"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "buf"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_void"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "len"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "size_t"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "flags"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "addr"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "sockaddr"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "addrlen"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "socklen_t"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "ssize_t"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "send"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "socket"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "buf"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_void"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "len"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "size_t"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "flags"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "ssize_t"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "recvfrom"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "socket"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "buf"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_void"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "len"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "size_t"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "flags"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "addr"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "sockaddr"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "addrlen"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "socklen_t"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "ssize_t"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "recv"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "socket"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "buf"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_void"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "len"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "size_t"
+ COMMA ","
+ WHITESPACE "\n "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "flags"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "c_int"
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "ssize_t"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0031_extern.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0031_extern.rs
new file mode 100644
index 000000000..b33ac273c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0031_extern.rs
@@ -0,0 +1,29 @@
+extern {
+ pub fn socket(domain: ::c_int, ty: ::c_int, protocol: ::c_int) -> ::c_int;
+ pub fn bind(fd: ::c_int, addr: *const sockaddr, len: socklen_t) -> ::c_int;
+ pub fn connect(socket: ::c_int, address: *const sockaddr,
+ len: socklen_t) -> ::c_int;
+ pub fn listen(socket: ::c_int, backlog: ::c_int) -> ::c_int;
+ pub fn getsockname(socket: ::c_int, address: *mut sockaddr,
+ address_len: *mut socklen_t) -> ::c_int;
+ pub fn getsockopt(sockfd: ::c_int,
+ level: ::c_int,
+ optname: ::c_int,
+ optval: *mut ::c_void,
+ optlen: *mut ::socklen_t) -> ::c_int;
+ pub fn setsockopt(socket: ::c_int, level: ::c_int, name: ::c_int,
+ value: *const ::c_void,
+ option_len: socklen_t) -> ::c_int;
+ pub fn getpeername(socket: ::c_int, address: *mut sockaddr,
+ address_len: *mut socklen_t) -> ::c_int;
+ pub fn sendto(socket: ::c_int, buf: *const ::c_void, len: ::size_t,
+ flags: ::c_int, addr: *const sockaddr,
+ addrlen: socklen_t) -> ::ssize_t;
+ pub fn send(socket: ::c_int, buf: *const ::c_void, len: ::size_t,
+ flags: ::c_int) -> ::ssize_t;
+ pub fn recvfrom(socket: ::c_int, buf: *mut ::c_void, len: ::size_t,
+ flags: ::c_int, addr: *mut ::sockaddr,
+ addrlen: *mut ::socklen_t) -> ::ssize_t;
+ pub fn recv(socket: ::c_int, buf: *mut ::c_void, len: ::size_t,
+ flags: ::c_int) -> ::ssize_t;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rast
new file mode 100644
index 000000000..86f6af97c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rast
@@ -0,0 +1,93 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "test_serialization"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "SER"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "SER"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Serialize"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'de"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Deserialize"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'de"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "PartialEq"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "fmt"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Debug"
+ COMMA ","
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rs
new file mode 100644
index 000000000..588170fbe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rs
@@ -0,0 +1,4 @@
+fn test_serialization<SER>()
+where
+ SER: Serialize + for<'de> Deserialize<'de> + PartialEq + std::fmt::Debug,
+{}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0033_label_break.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0033_label_break.rast
new file mode 100644
index 000000000..df1acd6b8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0033_label_break.rast
@@ -0,0 +1,223 @@
+SOURCE_FILE
+ FN
+ COMMENT "// format with label break value."
+ WHITESPACE "\n"
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ LABEL
+ LIFETIME
+ LIFETIME_IDENT "'empty_block"
+ COLON ":"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ LABEL
+ LIFETIME
+ LIFETIME_IDENT "'block"
+ COLON ":"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "do_thing"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "condition_not_met"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'block"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "do_next_thing"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "condition_not_met"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'block"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "do_last_thing"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "result"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BLOCK_EXPR
+ LABEL
+ LIFETIME
+ LIFETIME_IDENT "'block"
+ COLON ":"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ COMMENT "// comment"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'block"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ COMMENT "/* comment */"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ LIFETIME
+ LIFETIME_IDENT "'block"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ LITERAL
+ INT_NUMBER "3"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0033_label_break.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0033_label_break.rs
new file mode 100644
index 000000000..728d78137
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0033_label_break.rs
@@ -0,0 +1,28 @@
+// format with label break value.
+fn main() {
+ 'empty_block: {}
+
+ 'block: {
+ do_thing();
+ if condition_not_met() {
+ break 'block;
+ }
+ do_next_thing();
+ if condition_not_met() {
+ break 'block;
+ }
+ do_last_thing();
+ }
+
+ let result = 'block: {
+ if foo() {
+ // comment
+ break 'block 1;
+ }
+ if bar() {
+ /* comment */
+ break 'block 2;
+ }
+ 3
+ };
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0034_crate_path_in_call.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0034_crate_path_in_call.rast
new file mode 100644
index 000000000..2b3b86ebf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0034_crate_path_in_call.rast
@@ -0,0 +1,43 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "make_query"
+ ARG_LIST
+ L_PAREN "("
+ PATH_EXPR
+ PATH
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ CRATE_KW "crate"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "module_map"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "module_tree"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0034_crate_path_in_call.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0034_crate_path_in_call.rs
new file mode 100644
index 000000000..f1ed30220
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0034_crate_path_in_call.rs
@@ -0,0 +1,3 @@
+fn main() {
+ make_query(crate::module_map::module_tree);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0035_weird_exprs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0035_weird_exprs.rast
new file mode 100644
index 000000000..318d492ab
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0035_weird_exprs.rast
@@ -0,0 +1,2339 @@
+SOURCE_FILE
+ COMMENT "//! Adapted from a `rustc` test, which can be found at "
+ WHITESPACE "\n"
+ COMMENT "//! https://github.com/rust-lang/rust/blob/6d34ec18c7d7e574553f6347ecf08e1e1c45c13d/src/test/run-pass/weird-exprs.rs."
+ WHITESPACE "\n"
+ COMMENT "//! "
+ WHITESPACE "\n"
+ COMMENT "//! Reported to rust-analyzer in https://github.com/rust-lang/rust-analyzer/issues/290"
+ WHITESPACE "\n\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "allow"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "non_camel_case_types"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "allow"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "dead_code"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "allow"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "unreachable_code"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "allow"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "unused_parens"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n\n"
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "recursion_limit"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ STRING "\"128\""
+ R_BRACK "]"
+ WHITESPACE "\n\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cell"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Cell"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ USE
+ USE_KW "use"
+ WHITESPACE " "
+ USE_TREE
+ PATH
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "mem"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "swap"
+ SEMICOLON ";"
+ WHITESPACE "\n\n"
+ COMMENT "// Just a grab bag of stuff that you wouldn't want to actually write."
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "strange"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bool"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "_x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bool"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "funny"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "_x"
+ COLON ":"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "f"
+ ARG_LIST
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "what"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "the"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Cell"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bool"
+ R_ANGLE ">"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE " "
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ PREFIX_EXPR
+ BANG "!"
+ METHOD_CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ DOT "."
+ NAME_REF
+ IDENT "get"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ METHOD_CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ DOT "."
+ NAME_REF
+ IDENT "set"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ TRUE_KW "true"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "i"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Cell"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "new"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ FALSE_KW "false"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "dont"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ CLOSURE_EXPR
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "the"
+ ARG_LIST
+ L_PAREN "("
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i"
+ R_PAREN ")"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "dont"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "assert"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "i"
+ DOT "."
+ IDENT "get"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "zombiejesus"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE " "
+ ELSE_KW "else"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE " "
+ ELSE_KW "else"
+ WHITESPACE " "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RETURN_EXPR
+ RETURN_KW "return"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "notsure"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "_x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "isize"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "_y"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BIN_EXPR
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "_x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "_x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "_z"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BIN_EXPR
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "_x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ R_PAREN ")"
+ WHITESPACE " "
+ L_ANGLE "<"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "_x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "_a"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BIN_EXPR
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "_x"
+ WHITESPACE " "
+ PLUSEQ "+="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "_x"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "_b"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BIN_EXPR
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "swap"
+ ARG_LIST
+ L_PAREN "("
+ REF_EXPR
+ AMP "&"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "_y"
+ COMMA ","
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "_z"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "swap"
+ ARG_LIST
+ L_PAREN "("
+ REF_EXPR
+ AMP "&"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "_y"
+ COMMA ","
+ WHITESPACE " "
+ REF_EXPR
+ AMP "&"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "_z"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "canttouchthis"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "p"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bool"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "_a"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "assert"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ TOKEN_TREE
+ L_PAREN "("
+ TRUE_KW "true"
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "assert"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "p"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "_c"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "assert"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "p"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "_b"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bool"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "println"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"{}\""
+ COMMA ","
+ WHITESPACE " "
+ INT_NUMBER "0"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "angrydome"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ BREAK_EXPR
+ BREAK_KW "break"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "i"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i"
+ WHITESPACE " "
+ PLUSEQ "+="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE " "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ CONTINUE_EXPR
+ CONTINUE_KW "continue"
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ MATCH_ARM
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ R_CURLY "}"
+ COMMA ","
+ WHITESPACE " "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "panic"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"wat\""
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "evil_lincoln"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "_evil"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "println"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"lincoln\""
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "dots"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "assert_eq"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "String"
+ COLON ":"
+ COLON ":"
+ IDENT "from"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"..................................................\""
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ IDENT "format"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"{:?}\""
+ COMMA ","
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE "\n "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ WHITESPACE " "
+ DOT "."
+ DOT "."
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "u8"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "u8"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ WHITESPACE " "
+ NEQ "!="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0u8"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "assert_eq"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ INT_NUMBER "8u8"
+ COMMA ","
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE "\n "
+ IDENT "macro_rules"
+ BANG "!"
+ WHITESPACE " "
+ IDENT "u8"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE "\n "
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "u8"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ R_ANGLE ">"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MOD_KW "mod"
+ WHITESPACE " "
+ IDENT "u8"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE "\n "
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ IDENT "u8"
+ L_ANGLE "<"
+ LIFETIME_IDENT "'u8"
+ COLON ":"
+ WHITESPACE " "
+ LIFETIME_IDENT "'u8"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LIFETIME_IDENT "'u8"
+ R_ANGLE ">"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "u8"
+ COLON ":"
+ WHITESPACE " "
+ AMP "&"
+ LIFETIME_IDENT "'u8"
+ WHITESPACE " "
+ IDENT "u8"
+ R_PAREN ")"
+ WHITESPACE " "
+ MINUS "-"
+ R_ANGLE ">"
+ WHITESPACE " "
+ AMP "&"
+ LIFETIME_IDENT "'u8"
+ WHITESPACE " "
+ IDENT "u8"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE "\n "
+ STRING "\"u8\""
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ IDENT "u8"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ IDENT "u8"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "u8"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_KW "let"
+ WHITESPACE " "
+ AMP "&"
+ IDENT "u8"
+ COLON ":"
+ WHITESPACE " "
+ AMP "&"
+ IDENT "u8"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ IDENT "u8"
+ COLON ":"
+ COLON ":"
+ IDENT "u8"
+ TOKEN_TREE
+ L_PAREN "("
+ AMP "&"
+ INT_NUMBER "8u8"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ CRATE_KW "crate"
+ COLON ":"
+ COLON ":"
+ IDENT "u8"
+ TOKEN_TREE
+ L_PAREN "("
+ INT_NUMBER "0u8"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ IDENT "u8"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "fishy"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "assert_eq"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "String"
+ COLON ":"
+ COLON ":"
+ IDENT "from"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"><>\""
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ IDENT "String"
+ COLON ":"
+ COLON ":"
+ L_ANGLE "<"
+ R_ANGLE ">"
+ COLON ":"
+ COLON ":"
+ IDENT "from"
+ COLON ":"
+ COLON ":"
+ L_ANGLE "<"
+ R_ANGLE ">"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"><>\""
+ R_PAREN ")"
+ DOT "."
+ IDENT "chars"
+ COLON ":"
+ COLON ":"
+ L_ANGLE "<"
+ R_ANGLE ">"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ DOT "."
+ IDENT "rev"
+ COLON ":"
+ COLON ":"
+ L_ANGLE "<"
+ R_ANGLE ">"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ DOT "."
+ IDENT "collect"
+ COLON ":"
+ COLON ":"
+ L_ANGLE "<"
+ IDENT "String"
+ R_ANGLE ">"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "union"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ UNION
+ UNION_KW "union"
+ WHITESPACE " "
+ NAME
+ IDENT "union"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'union"
+ R_ANGLE ">"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_FIELD
+ NAME
+ IDENT "union"
+ COLON ":"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'union"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "union"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'union"
+ R_ANGLE ">"
+ COMMA ","
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "special_characters"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "val"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PREFIX_EXPR
+ BANG "!"
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ CALL_EXPR
+ PAREN_EXPR
+ L_PAREN "("
+ CLOSURE_EXPR
+ PARAM_LIST
+ PIPE "|"
+ PARAM
+ TUPLE_PAT
+ L_PAREN "("
+ REST_PAT
+ DOT2 ".."
+ R_PAREN ")"
+ COLON ":"
+ TUPLE_TYPE
+ L_PAREN "("
+ INFER_TYPE
+ UNDERSCORE "_"
+ COMMA ","
+ INFER_TYPE
+ UNDERSCORE "_"
+ R_PAREN ")"
+ COMMA ","
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "__"
+ AT "@"
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ PIPE "|"
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "__"
+ R_PAREN ")"
+ ARG_LIST
+ L_PAREN "("
+ TUPLE_EXPR
+ L_PAREN "("
+ REF_EXPR
+ AMP "&"
+ PREFIX_EXPR
+ STAR "*"
+ LITERAL
+ STRING "\"\\\\\""
+ COMMA ","
+ LITERAL
+ CHAR "'🤔'"
+ R_PAREN ")"
+ COMMENT "/**/"
+ COMMA ","
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ R_PAREN ")"
+ EQ2 "=="
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ EXPR_STMT
+ REF_EXPR
+ AMP "&"
+ INDEX_EXPR
+ ARRAY_EXPR
+ L_BRACK "["
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ R_BRACK "]"
+ L_BRACK "["
+ RANGE_EXPR
+ DOT2 ".."
+ R_BRACK "]"
+ SEMICOLON ";"
+ R_CURLY "}"
+ R_PAREN ")"
+ COMMENT "//"
+ WHITESPACE "\n "
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "assert"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ BANG "!"
+ IDENT "val"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "punch_card"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ IMPL_TRAIT_TYPE
+ IMPL_KW "impl"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "fmt"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Debug"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE "\n "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE "\n "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE "\n "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE "\n "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE "\n "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE "\n "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2EQ "..="
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "ktulhu"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ EXPR_STMT
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ SEMICOLON ";"
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "strange"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "funny"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "what"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "zombiejesus"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "notsure"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "canttouchthis"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "angrydome"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "evil_lincoln"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "dots"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "8u8"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "fishy"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "union"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "special_characters"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "punch_card"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "ktulhu"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0035_weird_exprs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0035_weird_exprs.rs
new file mode 100644
index 000000000..fb7d706b0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0035_weird_exprs.rs
@@ -0,0 +1,154 @@
+//! Adapted from a `rustc` test, which can be found at
+//! https://github.com/rust-lang/rust/blob/6d34ec18c7d7e574553f6347ecf08e1e1c45c13d/src/test/run-pass/weird-exprs.rs.
+//!
+//! Reported to rust-analyzer in https://github.com/rust-lang/rust-analyzer/issues/290
+
+#![allow(non_camel_case_types)]
+#![allow(dead_code)]
+#![allow(unreachable_code)]
+#![allow(unused_parens)]
+
+#![recursion_limit = "128"]
+
+use std::cell::Cell;
+use std::mem::swap;
+
+// Just a grab bag of stuff that you wouldn't want to actually write.
+
+fn strange() -> bool { let _x: bool = return true; }
+
+fn funny() {
+ fn f(_x: ()) { }
+ f(return);
+}
+
+fn what() {
+ fn the(x: &Cell<bool>) {
+ return while !x.get() { x.set(true); };
+ }
+ let i = &Cell::new(false);
+ let dont = {||the(i)};
+ dont();
+ assert!((i.get()));
+}
+
+fn zombiejesus() {
+ loop {
+ while (return) {
+ if (return) {
+ match (return) {
+ 1 => {
+ if (return) {
+ return
+ } else {
+ return
+ }
+ }
+ _ => { return }
+ };
+ } else if (return) {
+ return;
+ }
+ }
+ if (return) { break; }
+ }
+}
+
+fn notsure() {
+ let mut _x: isize;
+ let mut _y = (_x = 0) == (_x = 0);
+ let mut _z = (_x = 0) < (_x = 0);
+ let _a = (_x += 0) == (_x = 0);
+ let _b = swap(&mut _y, &mut _z) == swap(&mut _y, &mut _z);
+}
+
+fn canttouchthis() -> usize {
+ fn p() -> bool { true }
+ let _a = (assert!((true)) == (assert!(p())));
+ let _c = (assert!((p())) == ());
+ let _b: bool = (println!("{}", 0) == (return 0));
+}
+
+fn angrydome() {
+ loop { if break { } }
+ let mut i = 0;
+ loop { i += 1; if i == 1 { match (continue) { 1 => { }, _ => panic!("wat") } }
+ break; }
+}
+
+fn evil_lincoln() { let _evil = println!("lincoln"); }
+
+fn dots() {
+ assert_eq!(String::from(".................................................."),
+ format!("{:?}", .. .. .. .. .. .. .. .. .. .. .. .. ..
+ .. .. .. .. .. .. .. .. .. .. .. ..));
+}
+
+fn u8(u8: u8) {
+ if u8 != 0u8 {
+ assert_eq!(8u8, {
+ macro_rules! u8 {
+ (u8) => {
+ mod u8 {
+ pub fn u8<'u8: 'u8 + 'u8>(u8: &'u8 u8) -> &'u8 u8 {
+ "u8";
+ u8
+ }
+ }
+ };
+ }
+
+ u8!(u8);
+ let &u8: &u8 = u8::u8(&8u8);
+ crate::u8(0u8);
+ u8
+ });
+ }
+}
+
+fn fishy() {
+ assert_eq!(String::from("><>"),
+ String::<>::from::<>("><>").chars::<>().rev::<>().collect::<String>());
+}
+
+fn union() {
+ union union<'union> { union: &'union union<'union>, }
+}
+
+fn special_characters() {
+ let val = !((|(..):(_,_),__@_|__)((&*"\\",'🤔')/**/,{})=={&[..=..][..];})//
+ ;
+ assert!(!val);
+}
+
+fn punch_card() -> impl std::fmt::Debug {
+ ..=..=.. .. .. .. .. .. .. .. .. .. .. ..=.. ..
+ ..=.. ..=.. .. .. .. .. .. .. .. .. ..=..=..=..
+ ..=.. ..=.. ..=.. ..=.. .. ..=..=.. .. ..=.. ..
+ ..=..=.. .. ..=.. ..=.. ..=.. .. .. .. ..=.. ..
+ ..=.. ..=.. ..=.. ..=.. .. ..=.. .. .. ..=.. ..
+ ..=.. ..=.. ..=.. ..=.. .. .. ..=.. .. ..=.. ..
+ ..=.. ..=.. .. ..=..=.. ..=..=.. .. .. ..=.. ..
+}
+
+fn ktulhu() {
+ ;;;();;;;;;;;;()
+}
+
+pub fn main() {
+ strange();
+ funny();
+ what();
+ zombiejesus();
+ notsure();
+ canttouchthis();
+ angrydome();
+ evil_lincoln();
+ dots();
+ u8(8u8);
+ fishy();
+ union();
+ special_characters();
+ punch_card();
+ ktulhu();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0036_fully_qualified.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0036_fully_qualified.rast
new file mode 100644
index 000000000..9382020e2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0036_fully_qualified.rast
@@ -0,0 +1,93 @@
+SOURCE_FILE
+ COMMENT "// https://github.com/rust-lang/rust-analyzer/issues/311"
+ WHITESPACE "\n\n"
+ FN
+ VISIBILITY
+ PUB_KW "pub"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "S"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Iterator"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "String"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Iterator"
+ R_ANGLE ">"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Item"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Eq"
+ COMMA ","
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ METHOD_CALL_EXPR
+ LITERAL
+ STRING "\"\""
+ DOT "."
+ NAME_REF
+ IDENT "to_owned"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0036_fully_qualified.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0036_fully_qualified.rs
new file mode 100644
index 000000000..f8a085dc7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0036_fully_qualified.rs
@@ -0,0 +1,8 @@
+// https://github.com/rust-lang/rust-analyzer/issues/311
+
+pub fn foo<S: Iterator>() -> String
+where
+ <S as Iterator>::Item: Eq,
+{
+ "".to_owned()
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0037_mod.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0037_mod.rast
new file mode 100644
index 000000000..b4a3fc629
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0037_mod.rast
@@ -0,0 +1,16 @@
+SOURCE_FILE
+ COMMENT "// https://github.com/rust-lang/rust-analyzer/issues/357"
+ WHITESPACE "\n\n"
+ COMMENT "//! docs"
+ WHITESPACE "\n"
+ MODULE
+ COMMENT "// non-docs"
+ WHITESPACE "\n"
+ MOD_KW "mod"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ WHITESPACE " "
+ ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0037_mod.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0037_mod.rs
new file mode 100644
index 000000000..05f6cf05c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0037_mod.rs
@@ -0,0 +1,5 @@
+// https://github.com/rust-lang/rust-analyzer/issues/357
+
+//! docs
+// non-docs
+mod foo {} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0038_where_pred_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0038_where_pred_type.rast
new file mode 100644
index 000000000..e89763042
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0038_where_pred_type.rast
@@ -0,0 +1,43 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "test"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ TUPLE_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u64"
+ COMMA ","
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u64"
+ R_PAREN ")"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0038_where_pred_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0038_where_pred_type.rs
new file mode 100644
index 000000000..8bfc341a5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0038_where_pred_type.rs
@@ -0,0 +1 @@
+fn test() where (u64, u64): Foo {} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0039_raw_fn_item.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0039_raw_fn_item.rast
new file mode 100644
index 000000000..2eeed781c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0039_raw_fn_item.rast
@@ -0,0 +1,16 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "r#foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0039_raw_fn_item.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0039_raw_fn_item.rs
new file mode 100644
index 000000000..8380d1e79
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0039_raw_fn_item.rs
@@ -0,0 +1,2 @@
+fn r#foo() {
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0040_raw_struct_item_field.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0040_raw_struct_item_field.rast
new file mode 100644
index 000000000..ceb918420
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0040_raw_struct_item_field.rast
@@ -0,0 +1,22 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ RECORD_FIELD
+ NAME
+ IDENT "r#foo"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ WHITESPACE "\n"
+ R_CURLY "}"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0040_raw_struct_item_field.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0040_raw_struct_item_field.rs
new file mode 100644
index 000000000..098a60a72
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0040_raw_struct_item_field.rs
@@ -0,0 +1,3 @@
+struct S {
+ r#foo: u32
+} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0041_raw_keywords.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0041_raw_keywords.rast
new file mode 100644
index 000000000..dacf0ce74
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0041_raw_keywords.rast
@@ -0,0 +1,50 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "r#struct"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "92"
+ SEMICOLON ";"
+ WHITESPACE " "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "r#trait"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "r#struct"
+ WHITESPACE " "
+ STAR "*"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0041_raw_keywords.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0041_raw_keywords.rs
new file mode 100644
index 000000000..d59a6d347
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0041_raw_keywords.rs
@@ -0,0 +1 @@
+fn foo() { let r#struct = 92; let r#trait = r#struct * 2; } \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0042_ufcs_call_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0042_ufcs_call_list.rast
new file mode 100644
index 000000000..a536b0e88
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0042_ufcs_call_list.rast
@@ -0,0 +1,127 @@
+SOURCE_FILE
+ COMMENT "// https://github.com/rust-lang/rust-analyzer/issues/596"
+ WHITESPACE "\n\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ SEMICOLON ";"
+ WHITESPACE "\n\n"
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bool"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "unimplemented"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "baz"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bool"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "baz"
+ ARG_LIST
+ L_PAREN "("
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ R_ANGLE ">"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0042_ufcs_call_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0042_ufcs_call_list.rs
new file mode 100644
index 000000000..09b18982e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0042_ufcs_call_list.rs
@@ -0,0 +1,15 @@
+// https://github.com/rust-lang/rust-analyzer/issues/596
+
+struct Foo;
+
+impl Foo {
+ fn bar() -> bool {
+ unimplemented!()
+ }
+}
+
+fn baz(_: bool) {}
+
+fn main() {
+ baz(<Foo>::bar())
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0043_complex_assignment.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0043_complex_assignment.rast
new file mode 100644
index 000000000..3b02c3f96
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0043_complex_assignment.rast
@@ -0,0 +1,110 @@
+SOURCE_FILE
+ COMMENT "// https://github.com/rust-lang/rust-analyzer/issues/674"
+ WHITESPACE "\n\n"
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "Repr"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_FIELD
+ NAME
+ IDENT "raw"
+ COLON ":"
+ WHITESPACE " "
+ ARRAY_TYPE
+ L_BRACK "["
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ SEMICOLON ";"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ R_BRACK "]"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "abc"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ INDEX_EXPR
+ FIELD_EXPR
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Repr"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "raw"
+ COLON ":"
+ WHITESPACE " "
+ ARRAY_EXPR
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "0"
+ R_BRACK "]"
+ WHITESPACE " "
+ R_CURLY "}"
+ DOT "."
+ NAME_REF
+ IDENT "raw"
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "0"
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Repr"
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "raw"
+ COLON ":"
+ ARRAY_EXPR
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "0"
+ R_BRACK "]"
+ R_CURLY "}"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0043_complex_assignment.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0043_complex_assignment.rs
new file mode 100644
index 000000000..961dc8c7d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0043_complex_assignment.rs
@@ -0,0 +1,8 @@
+// https://github.com/rust-lang/rust-analyzer/issues/674
+
+struct Repr { raw: [u8; 1] }
+
+fn abc() {
+ Repr { raw: [0] }.raw[0] = 0;
+ Repr{raw:[0]}();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0044_let_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0044_let_attrs.rast
new file mode 100644
index 000000000..f3c20337e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0044_let_attrs.rast
@@ -0,0 +1,77 @@
+SOURCE_FILE
+ FN
+ COMMENT "// https://github.com/rust-lang/rust-analyzer/issues/677"
+ WHITESPACE "\n"
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "feature"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ STRING "\"backtrace\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "exit_code"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "panic"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "catch_unwind"
+ ARG_LIST
+ L_PAREN "("
+ CLOSURE_EXPR
+ MOVE_KW "move"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "main"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0044_let_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0044_let_attrs.rs
new file mode 100644
index 000000000..7d1524879
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0044_let_attrs.rs
@@ -0,0 +1,5 @@
+// https://github.com/rust-lang/rust-analyzer/issues/677
+fn main() {
+ #[cfg(feature = "backtrace")]
+ let exit_code = panic::catch_unwind(move || main());
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0045_block_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0045_block_attrs.rast
new file mode 100644
index 000000000..bef138071
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0045_block_attrs.rast
@@ -0,0 +1,230 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "inner"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"Inner attributes allowed here\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ COMMENT "//! As are ModuleDoc style comments"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"Inner attributes are allowed in blocks used as statements\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"Being validated is not affected by duplcates\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ COMMENT "//! As are ModuleDoc style comments"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"Inner attributes are allowed in blocks when they are the last statement of another block\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n "
+ COMMENT "//! As are ModuleDoc style comments"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "outer"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BLOCK_EXPR
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"Outer attributes are always allowed\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ IMPL
+ COMMENT "// https://github.com/rust-lang/rust-analyzer/issues/689"
+ WHITESPACE "\n"
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Whatever"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "salsa_event"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ AMP "&"
+ NAME
+ SELF_KW "self"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "event_fn"
+ COLON ":"
+ WHITESPACE " "
+ IMPL_TRAIT_TYPE
+ IMPL_KW "impl"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Fn"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Event"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_TYPE_KW "Self"
+ R_ANGLE ">"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "allow"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "unused_variables"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE " "
+ COMMENT "// this is `inner_attr` of the block"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0045_block_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0045_block_attrs.rs
new file mode 100644
index 000000000..f16c4566e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0045_block_attrs.rs
@@ -0,0 +1,24 @@
+fn inner() {
+ #![doc("Inner attributes allowed here")]
+ //! As are ModuleDoc style comments
+ {
+ #![doc("Inner attributes are allowed in blocks used as statements")]
+ #![doc("Being validated is not affected by duplcates")]
+ //! As are ModuleDoc style comments
+ };
+ {
+ #![doc("Inner attributes are allowed in blocks when they are the last statement of another block")]
+ //! As are ModuleDoc style comments
+ }
+}
+
+fn outer() {
+ let _ = #[doc("Outer attributes are always allowed")] {};
+}
+
+// https://github.com/rust-lang/rust-analyzer/issues/689
+impl Whatever {
+ fn salsa_event(&self, event_fn: impl Fn() -> Event<Self>) {
+ #![allow(unused_variables)] // this is `inner_attr` of the block
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0046_extern_inner_attributes.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0046_extern_inner_attributes.rast
new file mode 100644
index 000000000..4eb51cfdf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0046_extern_inner_attributes.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE
+ EXTERN_BLOCK
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ EXTERN_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ COMMENT "//! This is a doc comment"
+ WHITESPACE "\n "
+ ATTR
+ POUND "#"
+ BANG "!"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "doc"
+ TOKEN_TREE
+ L_PAREN "("
+ STRING "\"This is also a doc comment\""
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0046_extern_inner_attributes.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0046_extern_inner_attributes.rs
new file mode 100644
index 000000000..fe67e2df4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0046_extern_inner_attributes.rs
@@ -0,0 +1,4 @@
+extern "C" {
+ //! This is a doc comment
+ #![doc("This is also a doc comment")]
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0047_minus_in_inner_pattern.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0047_minus_in_inner_pattern.rast
new file mode 100644
index 000000000..c7eb3687d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0047_minus_in_inner_pattern.rast
@@ -0,0 +1,323 @@
+SOURCE_FILE
+ COMMENT "// https://github.com/rust-lang/rust-analyzer/issues/972"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ PREFIX_EXPR
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ LITERAL_PAT
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ TUPLE_EXPR
+ L_PAREN "("
+ PREFIX_EXPR
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ PREFIX_EXPR
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ TUPLE_PAT
+ L_PAREN "("
+ LITERAL_PAT
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL_PAT
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ EXPR_STMT
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "B"
+ ARG_LIST
+ L_PAREN "("
+ PREFIX_EXPR
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ PREFIX_EXPR
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_ARM
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "B"
+ L_PAREN "("
+ LITERAL_PAT
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL_PAT
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ MATCH_ARM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ FAT_ARROW "=>"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n\n "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ LITERAL_PAT
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ PREFIX_EXPR
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ ENUM
+ ENUM_KW "enum"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ WHITESPACE " "
+ VARIANT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ VARIANT
+ NAME
+ IDENT "B"
+ TUPLE_FIELD_LIST
+ L_PAREN "("
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i8"
+ COMMA ","
+ WHITESPACE " "
+ TUPLE_FIELD
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i8"
+ R_PAREN ")"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ RANGE_PAT
+ LITERAL_PAT
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "128"
+ DOT2EQ "..="
+ LITERAL_PAT
+ LITERAL
+ INT_NUMBER "127"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i8"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0047_minus_in_inner_pattern.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0047_minus_in_inner_pattern.rs
new file mode 100644
index 000000000..13dc46afa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0047_minus_in_inner_pattern.rs
@@ -0,0 +1,27 @@
+// https://github.com/rust-lang/rust-analyzer/issues/972
+
+fn main() {
+ match Some(-1) {
+ Some(-1) => (),
+ _ => (),
+ }
+
+ match Some((-1, -1)) {
+ Some((-1, -1)) => (),
+ _ => (),
+ }
+
+ match A::B(-1, -1) {
+ A::B(-1, -1) => (),
+ _ => (),
+ }
+
+ if let Some(-1) = Some(-1) {
+ }
+}
+
+enum A {
+ B(i8, i8)
+}
+
+fn foo(-128..=127: i8) {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0048_compound_assignment.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0048_compound_assignment.rast
new file mode 100644
index 000000000..e0f163b1a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0048_compound_assignment.rast
@@ -0,0 +1,201 @@
+SOURCE_FILE
+ COMMENT "// https://github.com/rust-lang/rust-analyzer/pull/983"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "compound_assignment"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ PLUSEQ "+="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ MINUSEQ "-="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ STAREQ "*="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ PERCENTEQ "%="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "4"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ SLASHEQ "/="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "5"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ PIPEEQ "|="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "6"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ AMPEQ "&="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "7"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ CARETEQ "^="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "8"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ LTEQ "<="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "9"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ GTEQ ">="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "10"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ SHREQ ">>="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "11"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ WHITESPACE " "
+ SHLEQ "<<="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "12"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0048_compound_assignment.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0048_compound_assignment.rs
new file mode 100644
index 000000000..1a6a9bdf5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0048_compound_assignment.rs
@@ -0,0 +1,17 @@
+// https://github.com/rust-lang/rust-analyzer/pull/983
+
+fn compound_assignment() {
+ let mut a = 0;
+ a += 1;
+ a -= 2;
+ a *= 3;
+ a %= 4;
+ a /= 5;
+ a |= 6;
+ a &= 7;
+ a ^= 8;
+ a <= 9;
+ a >= 10;
+ a >>= 11;
+ a <<= 12;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0049_async_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0049_async_block.rast
new file mode 100644
index 000000000..f376821e2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0049_async_block.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ ASYNC_KW "async"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ ASYNC_KW "async"
+ WHITESPACE " "
+ MOVE_KW "move"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0049_async_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0049_async_block.rs
new file mode 100644
index 000000000..4781b3225
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0049_async_block.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ async {};
+ async move {};
+}
+
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0050_async_block_as_argument.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0050_async_block_as_argument.rast
new file mode 100644
index 000000000..53ddf35cc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0050_async_block_as_argument.rast
@@ -0,0 +1,92 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ IMPL_TRAIT_TYPE
+ IMPL_KW "impl"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "std"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "future"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Future"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "Output"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_ANGLE ">"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ BLOCK_EXPR
+ ASYNC_KW "async"
+ WHITESPACE " "
+ MOVE_KW "move"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "12"
+ WHITESPACE " "
+ R_CURLY "}"
+ R_PAREN ")"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0050_async_block_as_argument.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0050_async_block_as_argument.rs
new file mode 100644
index 000000000..ec4612cff
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0050_async_block_as_argument.rs
@@ -0,0 +1,5 @@
+fn foo(x: impl std::future::Future<Output = i32>) {}
+
+fn main() {
+ foo(async move { 12 })
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rast
new file mode 100644
index 000000000..f8b11e778
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rast
@@ -0,0 +1,548 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "g1"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr1"
+ R_BRACK "]"
+ WHITESPACE " "
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr2"
+ R_BRACK "]"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "pat"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Type"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "g2"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr1"
+ R_BRACK "]"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ EXTERN_BLOCK
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ EXTERN_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "printf"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "format"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ CONST_KW "const"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i8"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ DOT3 "..."
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "F"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "FnMut"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ R_PAREN ")"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u64"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ ATTR
+ POUND "#"
+ WHITESPACE " "
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "must_use"
+ R_BRACK "]"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "g1"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "g2"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ AMP "&"
+ NAME
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "g3"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ AMP "&"
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "g4"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "g5"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_TYPE_KW "Self"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "d"
+ PARAM_LIST
+ L_PAREN "("
+ SELF_PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "attr"
+ R_BRACK "]"
+ WHITESPACE " "
+ NAME
+ SELF_KW "self"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Rc"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ SELF_TYPE_KW "Self"
+ R_ANGLE ">"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rs
new file mode 100644
index 000000000..de350d858
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rs
@@ -0,0 +1,21 @@
+fn g1(#[attr1] #[attr2] pat: Type) {}
+fn g2(#[attr1] x: u8) {}
+
+extern "C" { fn printf(format: *const i8, #[attr] ...) -> i32; }
+
+fn foo<F: FnMut(#[attr] &mut Foo<'a>)>(){}
+
+trait Foo {
+ fn bar(#[attr] _: u64, # [attr] mut x: i32);
+}
+
+impl S {
+ fn f(#[must_use] self) {}
+ fn g1(#[attr] self) {}
+ fn g2(#[attr] &self) {}
+ fn g3<'a>(#[attr] &mut self) {}
+ fn g4<'a>(#[attr] &'a self) {}
+ fn g5<'a>(#[attr] &'a mut self) {}
+ fn c(#[attr] self: Self) {}
+ fn d(#[attr] self: Rc<Self>) {}
+} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0052_for_range_block.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0052_for_range_block.rast
new file mode 100644
index 000000000..0c9dd432f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0052_for_range_block.rast
@@ -0,0 +1,81 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FOR_EXPR
+ FOR_KW "for"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "_x"
+ WHITESPACE " "
+ IN_KW "in"
+ WHITESPACE " "
+ RANGE_EXPR
+ LITERAL
+ INT_NUMBER "0"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ METHOD_CALL_EXPR
+ PAREN_EXPR
+ L_PAREN "("
+ RANGE_EXPR
+ LITERAL
+ INT_NUMBER "0"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ R_CURLY "}"
+ R_PAREN ")"
+ DOT "."
+ NAME_REF
+ IDENT "sum"
+ GENERIC_ARG_LIST
+ COLON2 "::"
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u32"
+ R_ANGLE ">"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0052_for_range_block.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0052_for_range_block.rs
new file mode 100644
index 000000000..b51b19630
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0052_for_range_block.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ for _x in 0 .. (0 .. {1 + 2}).sum::<u32>() {
+ break;
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rast
new file mode 100644
index 000000000..b94d43beb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rast
@@ -0,0 +1,37 @@
+SOURCE_FILE
+ MACRO_RULES
+ COMMENT "/// Some docs"
+ WHITESPACE "\n"
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "macro_export"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ MACRO_RULES_KW "macro_rules"
+ BANG "!"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE "\n "
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ R_ANGLE ">"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rs
new file mode 100644
index 000000000..b59c23c56
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0053_outer_attribute_on_macro_rules.rs
@@ -0,0 +1,5 @@
+/// Some docs
+#[macro_export]
+macro_rules! foo {
+ () => {};
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0054_qual_path_in_type_arg.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0054_qual_path_in_type_arg.rast
new file mode 100644
index 000000000..4e1e31f37
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0054_qual_path_in_type_arg.rast
@@ -0,0 +1,126 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Baz"
+ R_ANGLE ">"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ IMPL_TRAIT_TYPE
+ IMPL_KW "impl"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "FnMut"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Y"
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ IMPL_TRAIT_TYPE
+ IMPL_KW "impl"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "FnMut"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_TYPE
+ AMP "&"
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Y"
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0054_qual_path_in_type_arg.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0054_qual_path_in_type_arg.rs
new file mode 100644
index 000000000..0d3f5722a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0054_qual_path_in_type_arg.rs
@@ -0,0 +1,5 @@
+fn a() -> Foo<bar::Baz> {}
+
+fn b(_: impl FnMut(x::Y)) {}
+
+fn c(_: impl FnMut(&x::Y)) {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rast
new file mode 100644
index 000000000..684f499df
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rast
@@ -0,0 +1,50 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "X"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ R_PAREN ")"
+ COLON ":"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ COLON2 "::"
+ NAME_REF
+ IDENT "X"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rs
new file mode 100644
index 000000000..cd204f65e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rs
@@ -0,0 +1,5 @@
+type X = ();
+
+fn main() {
+ let ():::X = ();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0056_neq_in_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0056_neq_in_type.rast
new file mode 100644
index 000000000..55ce31275
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0056_neq_in_type.rast
@@ -0,0 +1,65 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ BIN_EXPR
+ CAST_EXPR
+ METHOD_CALL_EXPR
+ LITERAL
+ FLOAT_NUMBER "1.0f32"
+ DOT "."
+ NAME_REF
+ IDENT "floor"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i64"
+ WHITESPACE " "
+ NEQ "!="
+ WHITESPACE " "
+ CAST_EXPR
+ METHOD_CALL_EXPR
+ LITERAL
+ FLOAT_NUMBER "1.0f32"
+ DOT "."
+ NAME_REF
+ IDENT "floor"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i64"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0056_neq_in_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0056_neq_in_type.rs
new file mode 100644
index 000000000..6210683ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0056_neq_in_type.rs
@@ -0,0 +1,3 @@
+fn main() {
+ if 1.0f32.floor() as i64 != 1.0f32.floor() as i64 {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0057_loop_in_call.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0057_loop_in_call.rast
new file mode 100644
index 000000000..67837e475
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0057_loop_in_call.rast
@@ -0,0 +1,59 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0057_loop_in_call.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0057_loop_in_call.rs
new file mode 100644
index 000000000..31c12522f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0057_loop_in_call.rs
@@ -0,0 +1,5 @@
+fn foo(x: i32) {}
+
+fn main() {
+ foo(loop {});
+} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0058_unary_expr_precedence.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0058_unary_expr_precedence.rast
new file mode 100644
index 000000000..683d5070a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0058_unary_expr_precedence.rast
@@ -0,0 +1,97 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ PREFIX_EXPR
+ STAR "*"
+ REF_EXPR
+ AMP "&"
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CAST_EXPR
+ PREFIX_EXPR
+ STAR "*"
+ REF_EXPR
+ AMP "&"
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u64"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ PREFIX_EXPR
+ STAR "*"
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ REF_EXPR
+ AMP "&"
+ INDEX_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "1"
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RANGE_EXPR
+ PREFIX_EXPR
+ MINUS "-"
+ LITERAL
+ INT_NUMBER "1"
+ DOT2 ".."
+ LITERAL
+ INT_NUMBER "2"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0058_unary_expr_precedence.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0058_unary_expr_precedence.rs
new file mode 100644
index 000000000..100fccc64
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0058_unary_expr_precedence.rs
@@ -0,0 +1,7 @@
+fn foo() {
+ 1 + *&2 + 3;
+ *&1 as u64;
+ *x(1);
+ &x[1];
+ -1..2;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0059_loops_in_parens.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0059_loops_in_parens.rast
new file mode 100644
index 000000000..79bc7f971
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0059_loops_in_parens.rast
@@ -0,0 +1,100 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ FOR_EXPR
+ FOR_KW "for"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ IN_KW "in"
+ WHITESPACE " "
+ METHOD_CALL_EXPR
+ ARRAY_EXPR
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "1"
+ R_BRACK "]"
+ DOT "."
+ NAME_REF
+ IDENT "into_iter"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ LOOP_EXPR
+ LOOP_KW "loop"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ BREAK_EXPR
+ BREAK_KW "break"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0059_loops_in_parens.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0059_loops_in_parens.rs
new file mode 100644
index 000000000..6e8b718aa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0059_loops_in_parens.rs
@@ -0,0 +1,5 @@
+fn main() {
+ Some(for _ in [1].into_iter() {});
+ Some(loop { break; });
+ Some(while true {});
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0060_as_range.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0060_as_range.rast
new file mode 100644
index 000000000..81fc02b6f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0060_as_range.rast
@@ -0,0 +1,56 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RANGE_EXPR
+ CAST_EXPR
+ LITERAL
+ INT_NUMBER "0"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ WHITESPACE " "
+ DOT2 ".."
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RANGE_EXPR
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ CAST_EXPR
+ LITERAL
+ INT_NUMBER "2"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ WHITESPACE " "
+ DOT2 ".."
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0060_as_range.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0060_as_range.rs
new file mode 100644
index 000000000..f063ffadb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0060_as_range.rs
@@ -0,0 +1,4 @@
+fn main() {
+ 0 as usize ..;
+ 1 + 2 as usize ..;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0061_match_full_range.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0061_match_full_range.rast
new file mode 100644
index 000000000..2f56e9041
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0061_match_full_range.rast
@@ -0,0 +1,27 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MATCH_EXPR
+ MATCH_KW "match"
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ WHITESPACE " "
+ MATCH_ARM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0061_match_full_range.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0061_match_full_range.rs
new file mode 100644
index 000000000..2c4ed11e1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0061_match_full_range.rs
@@ -0,0 +1,4 @@
+fn main() {
+ match .. {
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0062_macro_2.0.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0062_macro_2.0.rast
new file mode 100644
index 000000000..3915ed750
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0062_macro_2.0.rast
@@ -0,0 +1,177 @@
+SOURCE_FILE
+ MACRO_DEF
+ MACRO_KW "macro"
+ WHITESPACE " "
+ NAME
+ IDENT "parse_use_trees"
+ TOKEN_TREE
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ IDENT "s"
+ COLON ":"
+ IDENT "expr"
+ R_PAREN ")"
+ COMMA ","
+ STAR "*"
+ WHITESPACE " "
+ DOLLAR "$"
+ TOKEN_TREE
+ L_PAREN "("
+ COMMA ","
+ R_PAREN ")"
+ STAR "*"
+ R_PAREN ")"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE "\n "
+ IDENT "vec"
+ BANG "!"
+ TOKEN_TREE
+ L_BRACK "["
+ WHITESPACE "\n "
+ DOLLAR "$"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "parse_use_tree"
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ IDENT "s"
+ R_PAREN ")"
+ COMMA ","
+ R_PAREN ")"
+ STAR "*"
+ WHITESPACE "\n "
+ R_BRACK "]"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ FN
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "test"
+ R_BRACK "]"
+ WHITESPACE "\n"
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "test_use_tree_merge"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ MACRO_DEF
+ MACRO_KW "macro"
+ WHITESPACE " "
+ NAME
+ IDENT "test_merge"
+ TOKEN_TREE
+ TOKEN_TREE
+ L_PAREN "("
+ TOKEN_TREE
+ L_BRACK "["
+ DOLLAR "$"
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ IDENT "input"
+ COLON ":"
+ IDENT "expr"
+ R_PAREN ")"
+ COMMA ","
+ STAR "*"
+ WHITESPACE " "
+ DOLLAR "$"
+ TOKEN_TREE
+ L_PAREN "("
+ COMMA ","
+ R_PAREN ")"
+ STAR "*"
+ R_BRACK "]"
+ COMMA ","
+ WHITESPACE " "
+ TOKEN_TREE
+ L_BRACK "["
+ DOLLAR "$"
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ IDENT "output"
+ COLON ":"
+ IDENT "expr"
+ R_PAREN ")"
+ COMMA ","
+ STAR "*"
+ WHITESPACE " "
+ DOLLAR "$"
+ TOKEN_TREE
+ L_PAREN "("
+ COMMA ","
+ R_PAREN ")"
+ STAR "*"
+ R_BRACK "]"
+ R_PAREN ")"
+ WHITESPACE " "
+ TOKEN_TREE
+ L_CURLY "{"
+ WHITESPACE "\n "
+ IDENT "assert_eq"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ WHITESPACE "\n "
+ IDENT "merge_use_trees"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "parse_use_trees"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ IDENT "input"
+ COMMA ","
+ R_PAREN ")"
+ STAR "*"
+ R_PAREN ")"
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ IDENT "parse_use_trees"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ TOKEN_TREE
+ L_PAREN "("
+ DOLLAR "$"
+ IDENT "output"
+ COMMA ","
+ R_PAREN ")"
+ STAR "*"
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n "
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0062_macro_2.0.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0062_macro_2.0.rs
new file mode 100644
index 000000000..781047ba1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0062_macro_2.0.rs
@@ -0,0 +1,15 @@
+macro parse_use_trees($($s:expr),* $(,)*) {
+ vec![
+ $(parse_use_tree($s),)*
+ ]
+}
+
+#[test]
+fn test_use_tree_merge() {
+ macro test_merge([$($input:expr),* $(,)*], [$($output:expr),* $(,)*]) {
+ assert_eq!(
+ merge_use_trees(parse_use_trees!($($input,)*)),
+ parse_use_trees!($($output,)*),
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_trait_fn_patterns.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_trait_fn_patterns.rast
new file mode 100644
index 000000000..a86b21d27
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_trait_fn_patterns.rast
@@ -0,0 +1,198 @@
+SOURCE_FILE
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f1"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ TUPLE_PAT
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "b"
+ R_PAREN ")"
+ COLON ":"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ COMMA ","
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f2"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ IDENT_PAT
+ NAME
+ IDENT "b"
+ WHITESPACE " "
+ R_CURLY "}"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f3"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "NewType"
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ R_PAREN ")"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "NewType"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f4"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_PAT
+ AMP "&"
+ REF_PAT
+ AMP "&"
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ REF_TYPE
+ AMP "&"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u64"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "x"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_trait_fn_patterns.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_trait_fn_patterns.rs
new file mode 100644
index 000000000..3b666af8e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_trait_fn_patterns.rs
@@ -0,0 +1,7 @@
+trait T {
+ fn f1((a, b): (usize, usize)) {}
+ fn f2(S { a, b }: S) {}
+ fn f3(NewType(a): NewType) {}
+ fn f4(&&a: &&usize) {}
+ fn bar(_: u64, mut x: i32);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_variadic_fun.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_variadic_fun.rast
new file mode 100644
index 000000000..e36399123
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_variadic_fun.rast
@@ -0,0 +1,134 @@
+SOURCE_FILE
+ EXTERN_BLOCK
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ EXTERN_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ DOT3 "..."
+ COMMA ","
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ DOT3 "..."
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ MUT_KW "mut"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "cfg"
+ TOKEN_TREE
+ L_PAREN "("
+ IDENT "never"
+ R_PAREN ")"
+ R_BRACK "]"
+ WHITESPACE " "
+ SLICE_PAT
+ L_BRACK "["
+ IDENT_PAT
+ NAME
+ IDENT "w"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "t"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "f"
+ R_BRACK "]"
+ COLON ":"
+ WHITESPACE " "
+ DOT3 "..."
+ COMMA ","
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_variadic_fun.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_variadic_fun.rs
new file mode 100644
index 000000000..a16afbaf3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0063_variadic_fun.rs
@@ -0,0 +1,5 @@
+extern "C" {
+ fn a(_: *mut u8, ...,);
+ fn b(_: *mut u8, _: ...);
+ fn c(_: *mut u8, #[cfg(never)] [w, t, f]: ...,);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0064_impl_fn_params.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0064_impl_fn_params.rast
new file mode 100644
index 000000000..18cecc810
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0064_impl_fn_params.rast
@@ -0,0 +1,166 @@
+SOURCE_FILE
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "U"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f1"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ TUPLE_PAT
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "b"
+ R_PAREN ")"
+ COLON ":"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ COMMA ","
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ R_PAREN ")"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f2"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ RECORD_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_PAT_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_PAT_FIELD
+ IDENT_PAT
+ NAME
+ IDENT "b"
+ WHITESPACE " "
+ R_CURLY "}"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f3"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "NewType"
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ R_PAREN ")"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "NewType"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f4"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_PAT
+ AMP "&"
+ REF_PAT
+ AMP "&"
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ REF_TYPE
+ AMP "&"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0064_impl_fn_params.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0064_impl_fn_params.rs
new file mode 100644
index 000000000..b49e872d7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0064_impl_fn_params.rs
@@ -0,0 +1,6 @@
+impl U {
+ fn f1((a, b): (usize, usize)) {}
+ fn f2(S { a, b }: S) {}
+ fn f3(NewType(a): NewType) {}
+ fn f4(&&a: &&usize) {}
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_comment_newline.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_comment_newline.rast
new file mode 100644
index 000000000..3ffcb48f5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_comment_newline.rast
@@ -0,0 +1,17 @@
+SOURCE_FILE
+ FN
+ COMMENT "/// Example"
+ WHITESPACE "\n\n"
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "test"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_comment_newline.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_comment_newline.rs
new file mode 100644
index 000000000..1fafe216b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_comment_newline.rs
@@ -0,0 +1,3 @@
+/// Example
+
+fn test() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_plus_after_fn_trait_bound.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_plus_after_fn_trait_bound.rast
new file mode 100644
index 000000000..ba7b6042a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_plus_after_fn_trait_bound.rast
@@ -0,0 +1,61 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE " "
+ WHERE_PRED
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Fn"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Send"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_plus_after_fn_trait_bound.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_plus_after_fn_trait_bound.rs
new file mode 100644
index 000000000..29f3655e0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0065_plus_after_fn_trait_bound.rs
@@ -0,0 +1 @@
+fn f<T>() where T: Fn() -> u8 + Send {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0066_default_modifier.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0066_default_modifier.rast
new file mode 100644
index 000000000..a4303098a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0066_default_modifier.rast
@@ -0,0 +1,222 @@
+SOURCE_FILE
+ TRAIT
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ TYPE_ALIAS
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Bar"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ CONST
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ IMPL
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ TYPE_ALIAS
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Bar"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ CONST
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ FN
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ FN
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ IMPL
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ IMPL
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ FOR_KW "for"
+ WHITESPACE " "
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0066_default_modifier.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0066_default_modifier.rs
new file mode 100644
index 000000000..e443e3495
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0066_default_modifier.rs
@@ -0,0 +1,16 @@
+trait T {
+ default type T = Bar;
+ default const f: u8 = 0;
+ default fn foo() {}
+ default unsafe fn bar() {}
+}
+
+impl T for Foo {
+ default type T = Bar;
+ default const f: u8 = 0;
+ default fn foo() {}
+ default unsafe fn bar() {}
+}
+
+default impl T for () {}
+default unsafe impl T for () {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rast
new file mode 100644
index 000000000..136fce93d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rast
@@ -0,0 +1,413 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "for_trait"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "F"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "F"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "str"
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "for_ref"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "F"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "F"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Debug"
+ COMMA ","
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "for_parens"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "F"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PAREN_TYPE
+ L_PAREN "("
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "F"
+ R_PAREN ")"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "str"
+ R_PAREN ")"
+ COMMA ","
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "for_slice"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "F"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ SLICE_TYPE
+ L_BRACK "["
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "F"
+ R_BRACK "]"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Eq"
+ COMMA ","
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "for_qpath"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "_t"
+ COLON ":"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ R_PAREN ")"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Baz"
+ R_ANGLE ">"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Iterator"
+ COMMA ","
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "for_for_fn"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ WHERE_CLAUSE
+ WHERE_KW "where"
+ WHITESPACE "\n "
+ WHERE_PRED
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ R_ANGLE ">"
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ COMMA ","
+ WHITESPACE " "
+ PARAM
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ R_PAREN ")"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ COMMA ","
+ WHITESPACE "\n"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rs
new file mode 100644
index 000000000..9058c4619
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rs
@@ -0,0 +1,30 @@
+fn for_trait<F>()
+where
+ for<'a> F: Fn(&'a str),
+{
+}
+fn for_ref<F>()
+where
+ for<'a> &'a F: Debug,
+{
+}
+fn for_parens<F>()
+where
+ for<'a> (&'a F): Fn(&'a str),
+{
+}
+fn for_slice<F>()
+where
+ for<'a> [&'a F]: Eq,
+{
+}
+fn for_qpath<T>(_t: &T)
+where
+ for<'a> <&'a T as Baz>::Foo: Iterator,
+{
+}
+fn for_for_fn<T>()
+where
+ for<'a> for<'b> fn(&'a T, &'b T): Copy,
+{
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rast
new file mode 100644
index 000000000..41fc5691a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rast
@@ -0,0 +1,238 @@
+SOURCE_FILE
+ FN
+ ASYNC_KW "async"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ CONST_KW "const"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ CONST_KW "const"
+ WHITESPACE " "
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C\""
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ ASYNC_KW "async"
+ WHITESPACE " "
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ FN
+ CONST_KW "const"
+ WHITESPACE " "
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "bar"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ TRAIT
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ TRAIT
+ AUTO_KW "auto"
+ WHITESPACE " "
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ TRAIT
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ AUTO_KW "auto"
+ WHITESPACE " "
+ TRAIT_KW "trait"
+ WHITESPACE " "
+ NAME
+ IDENT "T"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ IMPL
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ IMPL
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ IMPL
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ DEFAULT_KW "default"
+ WHITESPACE " "
+ IMPL_KW "impl"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ ASSOC_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n\n"
+ EXTERN_BLOCK
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ ABI
+ EXTERN_KW "extern"
+ WHITESPACE " "
+ STRING "\"C++\""
+ WHITESPACE " "
+ EXTERN_ITEM_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rs
new file mode 100644
index 000000000..6d27a082c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rs
@@ -0,0 +1,18 @@
+async fn foo() {}
+extern fn foo() {}
+const fn foo() {}
+const unsafe fn foo() {}
+unsafe extern "C" fn foo() {}
+unsafe fn foo() {}
+async unsafe fn foo() {}
+const unsafe fn bar() {}
+
+unsafe trait T {}
+auto trait T {}
+unsafe auto trait T {}
+
+unsafe impl Foo {}
+default impl Foo {}
+unsafe default impl Foo {}
+
+unsafe extern "C++" {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0069_multi_trait_object.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0069_multi_trait_object.rast
new file mode 100644
index 000000000..9e8f4e197
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0069_multi_trait_object.rast
@@ -0,0 +1,204 @@
+SOURCE_FILE
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PAREN_TYPE
+ L_PAREN "("
+ DYN_TRAIT_TYPE
+ DYN_KW "dyn"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Send"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Sync"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PTR_TYPE
+ STAR "*"
+ CONST_KW "const"
+ WHITESPACE " "
+ PAREN_TYPE
+ L_PAREN "("
+ DYN_TRAIT_TYPE
+ DYN_KW "dyn"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Send"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Sync"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ TYPE_ALIAS
+ TYPE_KW "type"
+ WHITESPACE " "
+ NAME
+ IDENT "Foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PAREN_TYPE
+ L_PAREN "("
+ DYN_TRAIT_TYPE
+ DYN_KW "dyn"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Send"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ LIFETIME
+ LIFETIME_IDENT "'static"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "b"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ CAST_EXPR
+ PAREN_EXPR
+ L_PAREN "("
+ REF_EXPR
+ AMP "&"
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ R_PAREN ")"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ REF_TYPE
+ AMP "&"
+ PAREN_TYPE
+ L_PAREN "("
+ DYN_TRAIT_TYPE
+ DYN_KW "dyn"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Add"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Other"
+ COMMA ","
+ WHITESPACE " "
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "Output"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Addable"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Other"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0069_multi_trait_object.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0069_multi_trait_object.rs
new file mode 100644
index 000000000..97eb79c48
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0069_multi_trait_object.rs
@@ -0,0 +1,6 @@
+type Foo<'a> = &'a (dyn Send + Sync);
+type Foo = *const (dyn Send + Sync);
+type Foo = fn() -> (dyn Send + 'static);
+fn main() {
+ let b = (&a) as &(dyn Add<Other, Output = Addable> + Other);
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0070_expr_attr_placement.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0070_expr_attr_placement.rast
new file mode 100644
index 000000000..3d00b27ab
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0070_expr_attr_placement.rast
@@ -0,0 +1,59 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ PAREN_EXPR
+ L_PAREN "("
+ BIN_EXPR
+ TRY_EXPR
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ R_BRACK "]"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "lhs"
+ QUESTION "?"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ AWAIT_EXPR
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ R_BRACK "]"
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "rhs"
+ DOT "."
+ AWAIT_KW "await"
+ R_PAREN ")"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0070_expr_attr_placement.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0070_expr_attr_placement.rs
new file mode 100644
index 000000000..d8b7a3832
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0070_expr_attr_placement.rs
@@ -0,0 +1,3 @@
+fn f() {
+ (#[a] lhs? + #[b] rhs.await)
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0071_stmt_attr_placement.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0071_stmt_attr_placement.rast
new file mode 100644
index 000000000..1cafc775c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0071_stmt_attr_placement.rast
@@ -0,0 +1,72 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BLOCK_EXPR
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ R_BRACK "]"
+ WHITESPACE " "
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ TRY_EXPR
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "B"
+ R_BRACK "]"
+ WHITESPACE " "
+ MACRO_EXPR
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ BANG "!"
+ TOKEN_TREE
+ L_PAREN "("
+ R_PAREN ")"
+ QUESTION "?"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ REF_EXPR
+ ATTR
+ POUND "#"
+ L_BRACK "["
+ META
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "C"
+ R_BRACK "]"
+ WHITESPACE " "
+ AMP "&"
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0071_stmt_attr_placement.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0071_stmt_attr_placement.rs
new file mode 100644
index 000000000..b4d5204bc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0071_stmt_attr_placement.rs
@@ -0,0 +1,4 @@
+fn foo() {
+ #[A] { #[B] bar!()? }
+ #[C] &()
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0072_destructuring_assignment.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0072_destructuring_assignment.rast
new file mode 100644
index 000000000..e8b836dfb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0072_destructuring_assignment.rast
@@ -0,0 +1,352 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ MUT_KW "mut"
+ WHITESPACE " "
+ NAME
+ IDENT "b"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "0"
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ TUPLE_EXPR
+ L_PAREN "("
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ COMMA ","
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PAREN_EXPR
+ L_PAREN "("
+ UNDERSCORE_EXPR
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ RECORD_FIELD
+ NAME
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ DOT2 ".."
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ DOT2 ".."
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ COLON2 "::"
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "default"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ BIN_EXPR
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ RANGE_EXPR
+ DOT2 ".."
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ METHOD_CALL_EXPR
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ ARG_LIST
+ L_PAREN "("
+ LITERAL
+ INT_NUMBER "0"
+ R_PAREN ")"
+ DOT "."
+ WHITESPACE "\n "
+ NAME_REF
+ IDENT "Ok"
+ ARG_LIST
+ L_PAREN "("
+ UNDERSCORE_EXPR
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_STMT
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_PAT
+ L_PAREN "("
+ IDENT_PAT
+ NAME
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "b"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ ARRAY_EXPR
+ L_BRACK "["
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ R_BRACK "]"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ ARRAY_EXPR
+ L_BRACK "["
+ LITERAL
+ INT_NUMBER "1"
+ COMMA ","
+ WHITESPACE " "
+ RANGE_EXPR
+ DOT2 ".."
+ COMMA ","
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "2"
+ R_BRACK "]"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ TUPLE_EXPR
+ L_PAREN "("
+ UNDERSCORE_EXPR
+ UNDERSCORE "_"
+ COMMA ","
+ WHITESPACE " "
+ UNDERSCORE_EXPR
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ PAREN_EXPR
+ L_PAREN "("
+ UNDERSCORE_EXPR
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ BIN_EXPR
+ UNDERSCORE_EXPR
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "a"
+ COMMA ","
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "b"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0072_destructuring_assignment.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0072_destructuring_assignment.rs
new file mode 100644
index 000000000..9d3e86603
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0072_destructuring_assignment.rs
@@ -0,0 +1,14 @@
+fn foo() {
+ let (mut a, mut b) = (0, 1);
+ (b, a, ..) = (a, b);
+ (_) = ..;
+ struct S { a: i32 }
+ S { .. } = S { ..S::default() };
+ Some(..) = Some(0).
+ Ok(_) = 0;
+ let (a, b);
+ [a, .., b] = [1, .., 2];
+ (_, _) = (a, b);
+ (_) = (a, b);
+ _ = (a, b);
+}
diff --git a/src/tools/rust-analyzer/crates/paths/Cargo.toml b/src/tools/rust-analyzer/crates/paths/Cargo.toml
new file mode 100644
index 000000000..5e83de7d9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/paths/Cargo.toml
@@ -0,0 +1,16 @@
+[package]
+name = "paths"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+# Adding this dep sadly puts a lot of rust-analyzer crates after the
+# serde-derive crate. Even though we don't activate the derive feature here,
+# someone else in the crate graph certainly does!
+# serde = "1"
diff --git a/src/tools/rust-analyzer/crates/paths/src/lib.rs b/src/tools/rust-analyzer/crates/paths/src/lib.rs
new file mode 100644
index 000000000..025093f4a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/paths/src/lib.rs
@@ -0,0 +1,299 @@
+//! Thin wrappers around `std::path`, distinguishing between absolute and
+//! relative paths.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use std::{
+ borrow::Borrow,
+ ffi::OsStr,
+ ops,
+ path::{Component, Path, PathBuf},
+};
+
+/// Wrapper around an absolute [`PathBuf`].
+#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
+pub struct AbsPathBuf(PathBuf);
+
+impl From<AbsPathBuf> for PathBuf {
+ fn from(AbsPathBuf(path_buf): AbsPathBuf) -> PathBuf {
+ path_buf
+ }
+}
+
+impl ops::Deref for AbsPathBuf {
+ type Target = AbsPath;
+ fn deref(&self) -> &AbsPath {
+ self.as_path()
+ }
+}
+
+impl AsRef<Path> for AbsPathBuf {
+ fn as_ref(&self) -> &Path {
+ self.0.as_path()
+ }
+}
+
+impl AsRef<AbsPath> for AbsPathBuf {
+ fn as_ref(&self) -> &AbsPath {
+ self.as_path()
+ }
+}
+
+impl Borrow<AbsPath> for AbsPathBuf {
+ fn borrow(&self) -> &AbsPath {
+ self.as_path()
+ }
+}
+
+impl TryFrom<PathBuf> for AbsPathBuf {
+ type Error = PathBuf;
+ fn try_from(path_buf: PathBuf) -> Result<AbsPathBuf, PathBuf> {
+ if !path_buf.is_absolute() {
+ return Err(path_buf);
+ }
+ Ok(AbsPathBuf(path_buf))
+ }
+}
+
+impl TryFrom<&str> for AbsPathBuf {
+ type Error = PathBuf;
+ fn try_from(path: &str) -> Result<AbsPathBuf, PathBuf> {
+ AbsPathBuf::try_from(PathBuf::from(path))
+ }
+}
+
+impl PartialEq<AbsPath> for AbsPathBuf {
+ fn eq(&self, other: &AbsPath) -> bool {
+ self.as_path() == other
+ }
+}
+
+impl AbsPathBuf {
+ /// Wrap the given absolute path in `AbsPathBuf`
+ ///
+ /// # Panics
+ ///
+ /// Panics if `path` is not absolute.
+ pub fn assert(path: PathBuf) -> AbsPathBuf {
+ AbsPathBuf::try_from(path)
+ .unwrap_or_else(|path| panic!("expected absolute path, got {}", path.display()))
+ }
+
+ /// Coerces to an `AbsPath` slice.
+ ///
+ /// Equivalent of [`PathBuf::as_path`] for `AbsPathBuf`.
+ pub fn as_path(&self) -> &AbsPath {
+ AbsPath::assert(self.0.as_path())
+ }
+
+ /// Equivalent of [`PathBuf::pop`] for `AbsPathBuf`.
+ ///
+ /// Note that this won't remove the root component, so `self` will still be
+ /// absolute.
+ pub fn pop(&mut self) -> bool {
+ self.0.pop()
+ }
+}
+
+/// Wrapper around an absolute [`Path`].
+#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
+#[repr(transparent)]
+pub struct AbsPath(Path);
+
+impl AsRef<Path> for AbsPath {
+ fn as_ref(&self) -> &Path {
+ &self.0
+ }
+}
+
+impl<'a> TryFrom<&'a Path> for &'a AbsPath {
+ type Error = &'a Path;
+ fn try_from(path: &'a Path) -> Result<&'a AbsPath, &'a Path> {
+ if !path.is_absolute() {
+ return Err(path);
+ }
+ Ok(AbsPath::assert(path))
+ }
+}
+
+impl AbsPath {
+ /// Wrap the given absolute path in `AbsPath`
+ ///
+ /// # Panics
+ ///
+ /// Panics if `path` is not absolute.
+ pub fn assert(path: &Path) -> &AbsPath {
+ assert!(path.is_absolute());
+ unsafe { &*(path as *const Path as *const AbsPath) }
+ }
+
+ /// Equivalent of [`Path::parent`] for `AbsPath`.
+ pub fn parent(&self) -> Option<&AbsPath> {
+ self.0.parent().map(AbsPath::assert)
+ }
+
+ /// Equivalent of [`Path::join`] for `AbsPath`.
+ pub fn join(&self, path: impl AsRef<Path>) -> AbsPathBuf {
+ self.as_ref().join(path).try_into().unwrap()
+ }
+
+ /// Normalize the given path:
+ /// - Removes repeated separators: `/a//b` becomes `/a/b`
+ /// - Removes occurrences of `.` and resolves `..`.
+ /// - Removes trailing slashes: `/a/b/` becomes `/a/b`.
+ ///
+ /// # Example
+ /// ```
+ /// # use paths::AbsPathBuf;
+ /// let abs_path_buf = AbsPathBuf::assert("/a/../../b/.//c//".into());
+ /// let normalized = abs_path_buf.normalize();
+ /// assert_eq!(normalized, AbsPathBuf::assert("/b/c".into()));
+ /// ```
+ pub fn normalize(&self) -> AbsPathBuf {
+ AbsPathBuf(normalize_path(&self.0))
+ }
+
+ /// Equivalent of [`Path::to_path_buf`] for `AbsPath`.
+ pub fn to_path_buf(&self) -> AbsPathBuf {
+ AbsPathBuf::try_from(self.0.to_path_buf()).unwrap()
+ }
+
+ /// Equivalent of [`Path::strip_prefix`] for `AbsPath`.
+ ///
+ /// Returns a relative path.
+ pub fn strip_prefix(&self, base: &AbsPath) -> Option<&RelPath> {
+ self.0.strip_prefix(base).ok().map(RelPath::new_unchecked)
+ }
+ pub fn starts_with(&self, base: &AbsPath) -> bool {
+ self.0.starts_with(&base.0)
+ }
+ pub fn ends_with(&self, suffix: &RelPath) -> bool {
+ self.0.ends_with(&suffix.0)
+ }
+
+ // region:delegate-methods
+
+ // Note that we deliberately don't implement `Deref<Target = Path>` here.
+ //
+ // The problem with `Path` is that it directly exposes convenience IO-ing
+ // methods. For example, `Path::exists` delegates to `fs::metadata`.
+ //
+ // For `AbsPath`, we want to make sure that this is a POD type, and that all
+ // IO goes via `fs`. That way, it becomes easier to mock IO when we need it.
+
+ pub fn file_name(&self) -> Option<&OsStr> {
+ self.0.file_name()
+ }
+ pub fn extension(&self) -> Option<&OsStr> {
+ self.0.extension()
+ }
+ pub fn file_stem(&self) -> Option<&OsStr> {
+ self.0.file_stem()
+ }
+ pub fn as_os_str(&self) -> &OsStr {
+ self.0.as_os_str()
+ }
+ pub fn display(&self) -> std::path::Display<'_> {
+ self.0.display()
+ }
+ #[deprecated(note = "use std::fs::metadata().is_ok() instead")]
+ pub fn exists(&self) -> bool {
+ self.0.exists()
+ }
+ // endregion:delegate-methods
+}
+
+/// Wrapper around a relative [`PathBuf`].
+#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
+pub struct RelPathBuf(PathBuf);
+
+impl From<RelPathBuf> for PathBuf {
+ fn from(RelPathBuf(path_buf): RelPathBuf) -> PathBuf {
+ path_buf
+ }
+}
+
+impl ops::Deref for RelPathBuf {
+ type Target = RelPath;
+ fn deref(&self) -> &RelPath {
+ self.as_path()
+ }
+}
+
+impl AsRef<Path> for RelPathBuf {
+ fn as_ref(&self) -> &Path {
+ self.0.as_path()
+ }
+}
+
+impl TryFrom<PathBuf> for RelPathBuf {
+ type Error = PathBuf;
+ fn try_from(path_buf: PathBuf) -> Result<RelPathBuf, PathBuf> {
+ if !path_buf.is_relative() {
+ return Err(path_buf);
+ }
+ Ok(RelPathBuf(path_buf))
+ }
+}
+
+impl TryFrom<&str> for RelPathBuf {
+ type Error = PathBuf;
+ fn try_from(path: &str) -> Result<RelPathBuf, PathBuf> {
+ RelPathBuf::try_from(PathBuf::from(path))
+ }
+}
+
+impl RelPathBuf {
+ /// Coerces to a `RelPath` slice.
+ ///
+ /// Equivalent of [`PathBuf::as_path`] for `RelPathBuf`.
+ pub fn as_path(&self) -> &RelPath {
+ RelPath::new_unchecked(self.0.as_path())
+ }
+}
+
+/// Wrapper around a relative [`Path`].
+#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
+#[repr(transparent)]
+pub struct RelPath(Path);
+
+impl AsRef<Path> for RelPath {
+ fn as_ref(&self) -> &Path {
+ &self.0
+ }
+}
+
+impl RelPath {
+ /// Creates a new `RelPath` from `path`, without checking if it is relative.
+ pub fn new_unchecked(path: &Path) -> &RelPath {
+ unsafe { &*(path as *const Path as *const RelPath) }
+ }
+}
+
+/// Taken from <https://github.com/rust-lang/cargo/blob/79c769c3d7b4c2cf6a93781575b7f592ef974255/src/cargo/util/paths.rs#L60-L85>
+fn normalize_path(path: &Path) -> PathBuf {
+ let mut components = path.components().peekable();
+ let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().copied() {
+ components.next();
+ PathBuf::from(c.as_os_str())
+ } else {
+ PathBuf::new()
+ };
+
+ for component in components {
+ match component {
+ Component::Prefix(..) => unreachable!(),
+ Component::RootDir => {
+ ret.push(component.as_os_str());
+ }
+ Component::CurDir => {}
+ Component::ParentDir => {
+ ret.pop();
+ }
+ Component::Normal(c) => {
+ ret.push(c);
+ }
+ }
+ }
+ ret
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
new file mode 100644
index 000000000..85a1c13fe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
@@ -0,0 +1,31 @@
+[package]
+name = "proc-macro-api"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+object = { version = "0.29.0", default-features = false, features = [
+ "std",
+ "read_core",
+ "elf",
+ "macho",
+ "pe",
+] }
+serde = { version = "1.0.137", features = ["derive"] }
+serde_json = { version = "1.0.81", features = ["unbounded_depth"] }
+tracing = "0.1.35"
+memmap2 = "0.5.4"
+snap = "1.0.5"
+
+paths = { path = "../paths", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+# Intentionally *not* depend on anything salsa-related
+# base-db = { path = "../base-db", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
new file mode 100644
index 000000000..d7010e825
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
@@ -0,0 +1,181 @@
+//! Client-side Proc-Macro crate
+//!
+//! We separate proc-macro expanding logic to an extern program to allow
+//! different implementations (e.g. wasm or dylib loading). And this crate
+//! is used to provide basic infrastructure for communication between two
+//! processes: Client (RA itself), Server (the external program)
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+pub mod msg;
+mod process;
+mod version;
+
+use paths::AbsPathBuf;
+use std::{
+ ffi::OsStr,
+ fmt, io,
+ sync::{Arc, Mutex},
+};
+
+use serde::{Deserialize, Serialize};
+use tt::Subtree;
+
+use crate::{
+ msg::{ExpandMacro, FlatTree, PanicMessage},
+ process::ProcMacroProcessSrv,
+};
+
+pub use version::{read_dylib_info, read_version, RustCInfo};
+
+#[derive(Copy, Clone, Eq, PartialEq, Debug, Serialize, Deserialize)]
+pub enum ProcMacroKind {
+ CustomDerive,
+ FuncLike,
+ Attr,
+}
+
+/// A handle to an external process which load dylibs with macros (.so or .dll)
+/// and runs actual macro expansion functions.
+#[derive(Debug)]
+pub struct ProcMacroServer {
+ /// Currently, the proc macro process expands all procedural macros sequentially.
+ ///
+ /// That means that concurrent salsa requests may block each other when expanding proc macros,
+ /// which is unfortunate, but simple and good enough for the time being.
+ ///
+ /// Therefore, we just wrap the `ProcMacroProcessSrv` in a mutex here.
+ process: Arc<Mutex<ProcMacroProcessSrv>>,
+}
+
+pub struct MacroDylib {
+ path: AbsPathBuf,
+}
+
+impl MacroDylib {
+ // FIXME: this is buggy due to TOCTOU, we should check the version in the
+ // macro process instead.
+ pub fn new(path: AbsPathBuf) -> io::Result<MacroDylib> {
+ let _p = profile::span("MacroDylib::new");
+
+ let info = version::read_dylib_info(&path)?;
+ if info.version.0 < 1 || info.version.1 < 47 {
+ let msg = format!("proc-macro {} built by {:#?} is not supported by Rust Analyzer, please update your rust version.", path.display(), info);
+ return Err(io::Error::new(io::ErrorKind::InvalidData, msg));
+ }
+
+ Ok(MacroDylib { path })
+ }
+}
+
+/// A handle to a specific macro (a `#[proc_macro]` annotated function).
+///
+/// It exists withing a context of a specific [`ProcMacroProcess`] -- currently
+/// we share a single expander process for all macros.
+#[derive(Debug, Clone)]
+pub struct ProcMacro {
+ process: Arc<Mutex<ProcMacroProcessSrv>>,
+ dylib_path: AbsPathBuf,
+ name: String,
+ kind: ProcMacroKind,
+}
+
+impl Eq for ProcMacro {}
+impl PartialEq for ProcMacro {
+ fn eq(&self, other: &Self) -> bool {
+ self.name == other.name
+ && self.kind == other.kind
+ && self.dylib_path == other.dylib_path
+ && Arc::ptr_eq(&self.process, &other.process)
+ }
+}
+
+pub struct ServerError {
+ pub message: String,
+ pub io: Option<io::Error>,
+}
+
+impl fmt::Display for ServerError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.message.fmt(f)?;
+ if let Some(io) = &self.io {
+ f.write_str(": ")?;
+ io.fmt(f)?;
+ }
+ Ok(())
+ }
+}
+
+pub struct MacroPanic {
+ pub message: String,
+}
+
+impl ProcMacroServer {
+ /// Spawns an external process as the proc macro server and returns a client connected to it.
+ pub fn spawn(
+ process_path: AbsPathBuf,
+ args: impl IntoIterator<Item = impl AsRef<OsStr>>,
+ ) -> io::Result<ProcMacroServer> {
+ let process = ProcMacroProcessSrv::run(process_path, args)?;
+ Ok(ProcMacroServer { process: Arc::new(Mutex::new(process)) })
+ }
+
+ pub fn load_dylib(&self, dylib: MacroDylib) -> Result<Vec<ProcMacro>, ServerError> {
+ let _p = profile::span("ProcMacroClient::by_dylib_path");
+ let macros =
+ self.process.lock().unwrap_or_else(|e| e.into_inner()).find_proc_macros(&dylib.path)?;
+
+ match macros {
+ Ok(macros) => Ok(macros
+ .into_iter()
+ .map(|(name, kind)| ProcMacro {
+ process: self.process.clone(),
+ name,
+ kind,
+ dylib_path: dylib.path.clone(),
+ })
+ .collect()),
+ Err(message) => Err(ServerError { message, io: None }),
+ }
+ }
+}
+
+impl ProcMacro {
+ pub fn name(&self) -> &str {
+ &self.name
+ }
+
+ pub fn kind(&self) -> ProcMacroKind {
+ self.kind
+ }
+
+ pub fn expand(
+ &self,
+ subtree: &Subtree,
+ attr: Option<&Subtree>,
+ env: Vec<(String, String)>,
+ ) -> Result<Result<Subtree, PanicMessage>, ServerError> {
+ let current_dir = env
+ .iter()
+ .find(|(name, _)| name == "CARGO_MANIFEST_DIR")
+ .map(|(_, value)| value.clone());
+
+ let task = ExpandMacro {
+ macro_body: FlatTree::new(subtree),
+ macro_name: self.name.to_string(),
+ attributes: attr.map(FlatTree::new),
+ lib: self.dylib_path.to_path_buf().into(),
+ env,
+ current_dir,
+ };
+
+ let request = msg::Request::ExpandMacro(task);
+ let response = self.process.lock().unwrap_or_else(|e| e.into_inner()).send_task(request)?;
+ match response {
+ msg::Response::ExpandMacro(it) => Ok(it.map(FlatTree::to_subtree)),
+ msg::Response::ListMacros { .. } => {
+ Err(ServerError { message: "unexpected response".to_string(), io: None })
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs
new file mode 100644
index 000000000..f9c2b9fda
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs
@@ -0,0 +1,154 @@
+//! Defines messages for cross-process message passing based on `ndjson` wire protocol
+pub(crate) mod flat;
+
+use std::{
+ io::{self, BufRead, Write},
+ path::PathBuf,
+};
+
+use serde::{de::DeserializeOwned, Deserialize, Serialize};
+
+use crate::ProcMacroKind;
+
+pub use crate::msg::flat::FlatTree;
+
+#[derive(Debug, Serialize, Deserialize)]
+pub enum Request {
+ ListMacros { dylib_path: PathBuf },
+ ExpandMacro(ExpandMacro),
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub enum Response {
+ ListMacros(Result<Vec<(String, ProcMacroKind)>, String>),
+ ExpandMacro(Result<FlatTree, PanicMessage>),
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct PanicMessage(pub String);
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct ExpandMacro {
+ /// Argument of macro call.
+ ///
+ /// In custom derive this will be a struct or enum; in attribute-like macro - underlying
+ /// item; in function-like macro - the macro body.
+ pub macro_body: FlatTree,
+
+ /// Name of macro to expand.
+ ///
+ /// In custom derive this is the name of the derived trait (`Serialize`, `Getters`, etc.).
+ /// In attribute-like and function-like macros - single name of macro itself (`show_streams`).
+ pub macro_name: String,
+
+ /// Possible attributes for the attribute-like macros.
+ pub attributes: Option<FlatTree>,
+
+ pub lib: PathBuf,
+
+ /// Environment variables to set during macro expansion.
+ pub env: Vec<(String, String)>,
+
+ pub current_dir: Option<String>,
+}
+
+pub trait Message: Serialize + DeserializeOwned {
+ fn read(inp: &mut impl BufRead, buf: &mut String) -> io::Result<Option<Self>> {
+ Ok(match read_json(inp, buf)? {
+ None => None,
+ Some(text) => {
+ let mut deserializer = serde_json::Deserializer::from_str(text);
+ // Note that some proc-macro generate very deep syntax tree
+ // We have to disable the current limit of serde here
+ deserializer.disable_recursion_limit();
+ Some(Self::deserialize(&mut deserializer)?)
+ }
+ })
+ }
+ fn write(self, out: &mut impl Write) -> io::Result<()> {
+ let text = serde_json::to_string(&self)?;
+ write_json(out, &text)
+ }
+}
+
+impl Message for Request {}
+impl Message for Response {}
+
+fn read_json<'a>(inp: &mut impl BufRead, buf: &'a mut String) -> io::Result<Option<&'a String>> {
+ loop {
+ buf.clear();
+
+ inp.read_line(buf)?;
+ buf.pop(); // Remove trailing '\n'
+
+ if buf.is_empty() {
+ return Ok(None);
+ }
+
+ // Some ill behaved macro try to use stdout for debugging
+ // We ignore it here
+ if !buf.starts_with('{') {
+ tracing::error!("proc-macro tried to print : {}", buf);
+ continue;
+ }
+
+ return Ok(Some(buf));
+ }
+}
+
+fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {
+ tracing::debug!("> {}", msg);
+ out.write_all(msg.as_bytes())?;
+ out.write_all(b"\n")?;
+ out.flush()?;
+ Ok(())
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use tt::*;
+
+ fn fixture_token_tree() -> Subtree {
+ let mut subtree = Subtree::default();
+ subtree
+ .token_trees
+ .push(TokenTree::Leaf(Ident { text: "struct".into(), id: TokenId(0) }.into()));
+ subtree
+ .token_trees
+ .push(TokenTree::Leaf(Ident { text: "Foo".into(), id: TokenId(1) }.into()));
+ subtree.token_trees.push(TokenTree::Leaf(Leaf::Literal(Literal {
+ text: "Foo".into(),
+ id: TokenId::unspecified(),
+ })));
+ subtree.token_trees.push(TokenTree::Leaf(Leaf::Punct(Punct {
+ char: '@',
+ id: TokenId::unspecified(),
+ spacing: Spacing::Joint,
+ })));
+ subtree.token_trees.push(TokenTree::Subtree(Subtree {
+ delimiter: Some(Delimiter { id: TokenId(2), kind: DelimiterKind::Brace }),
+ token_trees: vec![],
+ }));
+ subtree
+ }
+
+ #[test]
+ fn test_proc_macro_rpc_works() {
+ let tt = fixture_token_tree();
+ let task = ExpandMacro {
+ macro_body: FlatTree::new(&tt),
+ macro_name: Default::default(),
+ attributes: None,
+ lib: std::env::current_dir().unwrap(),
+ env: Default::default(),
+ current_dir: Default::default(),
+ };
+
+ let json = serde_json::to_string(&task).unwrap();
+ // println!("{}", json);
+ let back: ExpandMacro = serde_json::from_str(&json).unwrap();
+
+ assert_eq!(tt, back.macro_body.to_subtree());
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs
new file mode 100644
index 000000000..8437444e1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs
@@ -0,0 +1,328 @@
+//! Serialization-friendly representation of `tt::Subtree`.
+//!
+//! It is possible to serialize `Subtree` as is, as a tree, but using
+//! arbitrary-nested trees in JSON is problematic, as they can cause the JSON
+//! parser to overflow the stack.
+//!
+//! Additionally, such implementation would be pretty verbose, and we do care
+//! about performance here a bit.
+//!
+//! So what this module does is dumping a `tt::Subtree` into a bunch of flat
+//! array of numbers. See the test in the parent module to get an example
+//! output.
+//!
+//! ```json
+//! {
+//! // Array of subtrees, each subtree is represented by 4 numbers:
+//! // id of delimiter, delimiter kind, index of first child in `token_tree`,
+//! // index of last child in `token_tree`
+//! "subtree":[4294967295,0,0,5,2,2,5,5],
+//! // 2 ints per literal: [token id, index into `text`]
+//! "literal":[4294967295,1],
+//! // 3 ints per punct: [token id, char, spacing]
+//! "punct":[4294967295,64,1],
+//! // 2 ints per ident: [token id, index into `text`]
+//! "ident": [0,0,1,1],
+//! // children of all subtrees, concatenated. Each child is represented as `index << 2 | tag`
+//! // where tag denotes one of subtree, literal, punct or ident.
+//! "token_tree":[3,7,1,4],
+//! // Strings shared by idents and literals
+//! "text": ["struct","Foo"]
+//! }
+//! ```
+//!
+//! We probably should replace most of the code here with bincode someday, but,
+//! as we don't have bincode in Cargo.toml yet, lets stick with serde_json for
+//! the time being.
+
+use std::{
+ collections::{HashMap, VecDeque},
+ convert::TryInto,
+};
+
+use serde::{Deserialize, Serialize};
+use tt::TokenId;
+
+#[derive(Serialize, Deserialize, Debug)]
+pub struct FlatTree {
+ subtree: Vec<u32>,
+ literal: Vec<u32>,
+ punct: Vec<u32>,
+ ident: Vec<u32>,
+ token_tree: Vec<u32>,
+ text: Vec<String>,
+}
+
+struct SubtreeRepr {
+ id: tt::TokenId,
+ kind: Option<tt::DelimiterKind>,
+ tt: [u32; 2],
+}
+
+struct LiteralRepr {
+ id: tt::TokenId,
+ text: u32,
+}
+
+struct PunctRepr {
+ id: tt::TokenId,
+ char: char,
+ spacing: tt::Spacing,
+}
+
+struct IdentRepr {
+ id: tt::TokenId,
+ text: u32,
+}
+
+impl FlatTree {
+ pub fn new(subtree: &tt::Subtree) -> FlatTree {
+ let mut w = Writer {
+ string_table: HashMap::new(),
+ work: VecDeque::new(),
+
+ subtree: Vec::new(),
+ literal: Vec::new(),
+ punct: Vec::new(),
+ ident: Vec::new(),
+ token_tree: Vec::new(),
+ text: Vec::new(),
+ };
+ w.write(subtree);
+
+ return FlatTree {
+ subtree: write_vec(w.subtree, SubtreeRepr::write),
+ literal: write_vec(w.literal, LiteralRepr::write),
+ punct: write_vec(w.punct, PunctRepr::write),
+ ident: write_vec(w.ident, IdentRepr::write),
+ token_tree: w.token_tree,
+ text: w.text,
+ };
+
+ fn write_vec<T, F: Fn(T) -> [u32; N], const N: usize>(xs: Vec<T>, f: F) -> Vec<u32> {
+ xs.into_iter().flat_map(f).collect()
+ }
+ }
+
+ pub fn to_subtree(self) -> tt::Subtree {
+ return Reader {
+ subtree: read_vec(self.subtree, SubtreeRepr::read),
+ literal: read_vec(self.literal, LiteralRepr::read),
+ punct: read_vec(self.punct, PunctRepr::read),
+ ident: read_vec(self.ident, IdentRepr::read),
+ token_tree: self.token_tree,
+ text: self.text,
+ }
+ .read();
+
+ fn read_vec<T, F: Fn([u32; N]) -> T, const N: usize>(xs: Vec<u32>, f: F) -> Vec<T> {
+ let mut chunks = xs.chunks_exact(N);
+ let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap())).collect();
+ assert!(chunks.remainder().is_empty());
+ res
+ }
+ }
+}
+
+impl SubtreeRepr {
+ fn write(self) -> [u32; 4] {
+ let kind = match self.kind {
+ None => 0,
+ Some(tt::DelimiterKind::Parenthesis) => 1,
+ Some(tt::DelimiterKind::Brace) => 2,
+ Some(tt::DelimiterKind::Bracket) => 3,
+ };
+ [self.id.0, kind, self.tt[0], self.tt[1]]
+ }
+ fn read([id, kind, lo, len]: [u32; 4]) -> SubtreeRepr {
+ let kind = match kind {
+ 0 => None,
+ 1 => Some(tt::DelimiterKind::Parenthesis),
+ 2 => Some(tt::DelimiterKind::Brace),
+ 3 => Some(tt::DelimiterKind::Bracket),
+ other => panic!("bad kind {}", other),
+ };
+ SubtreeRepr { id: TokenId(id), kind, tt: [lo, len] }
+ }
+}
+
+impl LiteralRepr {
+ fn write(self) -> [u32; 2] {
+ [self.id.0, self.text]
+ }
+ fn read([id, text]: [u32; 2]) -> LiteralRepr {
+ LiteralRepr { id: TokenId(id), text }
+ }
+}
+
+impl PunctRepr {
+ fn write(self) -> [u32; 3] {
+ let spacing = match self.spacing {
+ tt::Spacing::Alone => 0,
+ tt::Spacing::Joint => 1,
+ };
+ [self.id.0, self.char as u32, spacing]
+ }
+ fn read([id, char, spacing]: [u32; 3]) -> PunctRepr {
+ let spacing = match spacing {
+ 0 => tt::Spacing::Alone,
+ 1 => tt::Spacing::Joint,
+ other => panic!("bad spacing {}", other),
+ };
+ PunctRepr { id: TokenId(id), char: char.try_into().unwrap(), spacing }
+ }
+}
+
+impl IdentRepr {
+ fn write(self) -> [u32; 2] {
+ [self.id.0, self.text]
+ }
+ fn read(data: [u32; 2]) -> IdentRepr {
+ IdentRepr { id: TokenId(data[0]), text: data[1] }
+ }
+}
+
+struct Writer<'a> {
+ work: VecDeque<(usize, &'a tt::Subtree)>,
+ string_table: HashMap<&'a str, u32>,
+
+ subtree: Vec<SubtreeRepr>,
+ literal: Vec<LiteralRepr>,
+ punct: Vec<PunctRepr>,
+ ident: Vec<IdentRepr>,
+ token_tree: Vec<u32>,
+ text: Vec<String>,
+}
+
+impl<'a> Writer<'a> {
+ fn write(&mut self, root: &'a tt::Subtree) {
+ self.enqueue(root);
+ while let Some((idx, subtree)) = self.work.pop_front() {
+ self.subtree(idx, subtree);
+ }
+ }
+
+ fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree) {
+ let mut first_tt = self.token_tree.len();
+ let n_tt = subtree.token_trees.len();
+ self.token_tree.resize(first_tt + n_tt, !0);
+
+ self.subtree[idx].tt = [first_tt as u32, (first_tt + n_tt) as u32];
+
+ for child in &subtree.token_trees {
+ let idx_tag = match child {
+ tt::TokenTree::Subtree(it) => {
+ let idx = self.enqueue(it);
+ idx << 2 | 0b00
+ }
+ tt::TokenTree::Leaf(leaf) => match leaf {
+ tt::Leaf::Literal(lit) => {
+ let idx = self.literal.len() as u32;
+ let text = self.intern(&lit.text);
+ self.literal.push(LiteralRepr { id: lit.id, text });
+ idx << 2 | 0b01
+ }
+ tt::Leaf::Punct(punct) => {
+ let idx = self.punct.len() as u32;
+ self.punct.push(PunctRepr {
+ char: punct.char,
+ spacing: punct.spacing,
+ id: punct.id,
+ });
+ idx << 2 | 0b10
+ }
+ tt::Leaf::Ident(ident) => {
+ let idx = self.ident.len() as u32;
+ let text = self.intern(&ident.text);
+ self.ident.push(IdentRepr { id: ident.id, text });
+ idx << 2 | 0b11
+ }
+ },
+ };
+ self.token_tree[first_tt] = idx_tag;
+ first_tt += 1;
+ }
+ }
+
+ fn enqueue(&mut self, subtree: &'a tt::Subtree) -> u32 {
+ let idx = self.subtree.len();
+ let delimiter_id = subtree.delimiter.map_or(TokenId::unspecified(), |it| it.id);
+ let delimiter_kind = subtree.delimiter.map(|it| it.kind);
+ self.subtree.push(SubtreeRepr { id: delimiter_id, kind: delimiter_kind, tt: [!0, !0] });
+ self.work.push_back((idx, subtree));
+ idx as u32
+ }
+
+ pub(crate) fn intern(&mut self, text: &'a str) -> u32 {
+ let table = &mut self.text;
+ *self.string_table.entry(text).or_insert_with(|| {
+ let idx = table.len();
+ table.push(text.to_string());
+ idx as u32
+ })
+ }
+}
+
+struct Reader {
+ subtree: Vec<SubtreeRepr>,
+ literal: Vec<LiteralRepr>,
+ punct: Vec<PunctRepr>,
+ ident: Vec<IdentRepr>,
+ token_tree: Vec<u32>,
+ text: Vec<String>,
+}
+
+impl Reader {
+ pub(crate) fn read(self) -> tt::Subtree {
+ let mut res: Vec<Option<tt::Subtree>> = vec![None; self.subtree.len()];
+ for i in (0..self.subtree.len()).rev() {
+ let repr = &self.subtree[i];
+ let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize];
+ let s = tt::Subtree {
+ delimiter: repr.kind.map(|kind| tt::Delimiter { id: repr.id, kind }),
+ token_trees: token_trees
+ .iter()
+ .copied()
+ .map(|idx_tag| {
+ let tag = idx_tag & 0b11;
+ let idx = (idx_tag >> 2) as usize;
+ match tag {
+ // XXX: we iterate subtrees in reverse to guarantee
+ // that this unwrap doesn't fire.
+ 0b00 => res[idx].take().unwrap().into(),
+ 0b01 => {
+ let repr = &self.literal[idx];
+ tt::Leaf::Literal(tt::Literal {
+ text: self.text[repr.text as usize].as_str().into(),
+ id: repr.id,
+ })
+ .into()
+ }
+ 0b10 => {
+ let repr = &self.punct[idx];
+ tt::Leaf::Punct(tt::Punct {
+ char: repr.char,
+ spacing: repr.spacing,
+ id: repr.id,
+ })
+ .into()
+ }
+ 0b11 => {
+ let repr = &self.ident[idx];
+ tt::Leaf::Ident(tt::Ident {
+ text: self.text[repr.text as usize].as_str().into(),
+ id: repr.id,
+ })
+ .into()
+ }
+ other => panic!("bad tag: {}", other),
+ }
+ })
+ .collect(),
+ };
+ res[i] = Some(s);
+ }
+
+ res[0].take().unwrap()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
new file mode 100644
index 000000000..c4018d3b3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
@@ -0,0 +1,107 @@
+//! Handle process life-time and message passing for proc-macro client
+
+use std::{
+ ffi::{OsStr, OsString},
+ io::{self, BufRead, BufReader, Write},
+ process::{Child, ChildStdin, ChildStdout, Command, Stdio},
+};
+
+use paths::{AbsPath, AbsPathBuf};
+use stdx::JodChild;
+
+use crate::{
+ msg::{Message, Request, Response},
+ ProcMacroKind, ServerError,
+};
+
+#[derive(Debug)]
+pub(crate) struct ProcMacroProcessSrv {
+ _process: Process,
+ stdin: ChildStdin,
+ stdout: BufReader<ChildStdout>,
+}
+
+impl ProcMacroProcessSrv {
+ pub(crate) fn run(
+ process_path: AbsPathBuf,
+ args: impl IntoIterator<Item = impl AsRef<OsStr>>,
+ ) -> io::Result<ProcMacroProcessSrv> {
+ let mut process = Process::run(process_path, args)?;
+ let (stdin, stdout) = process.stdio().expect("couldn't access child stdio");
+
+ let srv = ProcMacroProcessSrv { _process: process, stdin, stdout };
+
+ Ok(srv)
+ }
+
+ pub(crate) fn find_proc_macros(
+ &mut self,
+ dylib_path: &AbsPath,
+ ) -> Result<Result<Vec<(String, ProcMacroKind)>, String>, ServerError> {
+ let request = Request::ListMacros { dylib_path: dylib_path.to_path_buf().into() };
+
+ let response = self.send_task(request)?;
+
+ match response {
+ Response::ListMacros(it) => Ok(it),
+ Response::ExpandMacro { .. } => {
+ Err(ServerError { message: "unexpected response".to_string(), io: None })
+ }
+ }
+ }
+
+ pub(crate) fn send_task(&mut self, req: Request) -> Result<Response, ServerError> {
+ let mut buf = String::new();
+ send_request(&mut self.stdin, &mut self.stdout, req, &mut buf)
+ }
+}
+
+#[derive(Debug)]
+struct Process {
+ child: JodChild,
+}
+
+impl Process {
+ fn run(
+ path: AbsPathBuf,
+ args: impl IntoIterator<Item = impl AsRef<OsStr>>,
+ ) -> io::Result<Process> {
+ let args: Vec<OsString> = args.into_iter().map(|s| s.as_ref().into()).collect();
+ let child = JodChild(mk_child(&path, &args)?);
+ Ok(Process { child })
+ }
+
+ fn stdio(&mut self) -> Option<(ChildStdin, BufReader<ChildStdout>)> {
+ let stdin = self.child.stdin.take()?;
+ let stdout = self.child.stdout.take()?;
+ let read = BufReader::new(stdout);
+
+ Some((stdin, read))
+ }
+}
+
+fn mk_child(
+ path: &AbsPath,
+ args: impl IntoIterator<Item = impl AsRef<OsStr>>,
+) -> io::Result<Child> {
+ Command::new(path.as_os_str())
+ .args(args)
+ .env("RUST_ANALYZER_INTERNALS_DO_NOT_USE", "this is unstable")
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::inherit())
+ .spawn()
+}
+
+fn send_request(
+ mut writer: &mut impl Write,
+ mut reader: &mut impl BufRead,
+ req: Request,
+ buf: &mut String,
+) -> Result<Response, ServerError> {
+ req.write(&mut writer)
+ .map_err(|err| ServerError { message: "failed to write request".into(), io: Some(err) })?;
+ let res = Response::read(&mut reader, buf)
+ .map_err(|err| ServerError { message: "failed to read response".into(), io: Some(err) })?;
+ res.ok_or_else(|| ServerError { message: "server exited".into(), io: None })
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs
new file mode 100644
index 000000000..030531b80
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs
@@ -0,0 +1,151 @@
+//! Reading proc-macro rustc version information from binary data
+
+use std::{
+ fs::File,
+ io::{self, Read},
+};
+
+use memmap2::Mmap;
+use object::read::{File as BinaryFile, Object, ObjectSection};
+use paths::AbsPath;
+use snap::read::FrameDecoder as SnapDecoder;
+
+#[derive(Debug)]
+pub struct RustCInfo {
+ pub version: (usize, usize, usize),
+ pub channel: String,
+ pub commit: Option<String>,
+ pub date: Option<String>,
+ // something like "rustc 1.58.1 (db9d1b20b 2022-01-20)"
+ pub version_string: String,
+}
+
+/// Read rustc dylib information
+pub fn read_dylib_info(dylib_path: &AbsPath) -> io::Result<RustCInfo> {
+ macro_rules! err {
+ ($e:literal) => {
+ io::Error::new(io::ErrorKind::InvalidData, $e)
+ };
+ }
+
+ let ver_str = read_version(dylib_path)?;
+ let mut items = ver_str.split_whitespace();
+ let tag = items.next().ok_or_else(|| err!("version format error"))?;
+ if tag != "rustc" {
+ return Err(err!("version format error (No rustc tag)"));
+ }
+
+ let version_part = items.next().ok_or_else(|| err!("no version string"))?;
+ let mut version_parts = version_part.split('-');
+ let version = version_parts.next().ok_or_else(|| err!("no version"))?;
+ let channel = version_parts.next().unwrap_or_default().to_string();
+
+ let commit = match items.next() {
+ Some(commit) => {
+ match commit.len() {
+ 0 => None,
+ _ => Some(commit[1..].to_string() /* remove ( */),
+ }
+ }
+ None => None,
+ };
+ let date = match items.next() {
+ Some(date) => {
+ match date.len() {
+ 0 => None,
+ _ => Some(date[0..date.len() - 2].to_string() /* remove ) */),
+ }
+ }
+ None => None,
+ };
+
+ let version_numbers = version
+ .split('.')
+ .map(|it| it.parse::<usize>())
+ .collect::<Result<Vec<_>, _>>()
+ .map_err(|_| err!("version number error"))?;
+
+ if version_numbers.len() != 3 {
+ return Err(err!("version number format error"));
+ }
+ let version = (version_numbers[0], version_numbers[1], version_numbers[2]);
+
+ Ok(RustCInfo { version, channel, commit, date, version_string: ver_str })
+}
+
+/// This is used inside read_version() to locate the ".rustc" section
+/// from a proc macro crate's binary file.
+fn read_section<'a>(dylib_binary: &'a [u8], section_name: &str) -> io::Result<&'a [u8]> {
+ BinaryFile::parse(dylib_binary)
+ .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?
+ .section_by_name(section_name)
+ .ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, "section read error"))?
+ .data()
+ .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
+}
+
+/// Check the version of rustc that was used to compile a proc macro crate's
+///
+/// binary file.
+/// A proc macro crate binary's ".rustc" section has following byte layout:
+/// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes
+/// * ff060000 734e6150 is followed, it's the snappy format magic bytes,
+/// means bytes from here(including this sequence) are compressed in
+/// snappy compression format. Version info is inside here, so decompress
+/// this.
+/// The bytes you get after decompressing the snappy format portion has
+/// following layout:
+/// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes(again)
+/// * [crate root bytes] next 4 bytes is to store crate root position,
+/// according to rustc's source code comment
+/// * [length byte] next 1 byte tells us how many bytes we should read next
+/// for the version string's utf8 bytes
+/// * [version string bytes encoded in utf8] <- GET THIS BOI
+/// * [some more bytes that we don't really care but about still there] :-)
+/// Check this issue for more about the bytes layout:
+/// <https://github.com/rust-lang/rust-analyzer/issues/6174>
+pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
+ let dylib_file = File::open(dylib_path)?;
+ let dylib_mmaped = unsafe { Mmap::map(&dylib_file) }?;
+
+ let dot_rustc = read_section(&dylib_mmaped, ".rustc")?;
+
+ // check if magic is valid
+ if &dot_rustc[0..4] != b"rust" {
+ return Err(io::Error::new(
+ io::ErrorKind::InvalidData,
+ format!("unknown metadata magic, expected `rust`, found `{:?}`", &dot_rustc[0..4]),
+ ));
+ }
+ let version = u32::from_be_bytes([dot_rustc[4], dot_rustc[5], dot_rustc[6], dot_rustc[7]]);
+ // Last supported version is:
+ // https://github.com/rust-lang/rust/commit/0696e79f2740ad89309269b460579e548a5cd632
+ match version {
+ 5 | 6 => {}
+ _ => {
+ return Err(io::Error::new(
+ io::ErrorKind::InvalidData,
+ format!("unsupported metadata version {}", version),
+ ));
+ }
+ }
+
+ let snappy_portion = &dot_rustc[8..];
+
+ let mut snappy_decoder = SnapDecoder::new(snappy_portion);
+
+ // the bytes before version string bytes, so this basically is:
+ // 8 bytes for [b'r',b'u',b's',b't',0,0,0,5]
+ // 4 bytes for [crate root bytes]
+ // 1 byte for length of version string
+ // so 13 bytes in total, and we should check the 13th byte
+ // to know the length
+ let mut bytes_before_version = [0u8; 13];
+ snappy_decoder.read_exact(&mut bytes_before_version)?;
+ let length = bytes_before_version[12];
+
+ let mut version_string_utf8 = vec![0u8; length as usize];
+ snappy_decoder.read_exact(&mut version_string_utf8)?;
+ let version_string = String::from_utf8(version_string_utf8);
+ version_string.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml
new file mode 100644
index 000000000..9d0da5dee
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml
@@ -0,0 +1,17 @@
+[package]
+name = "proc-macro-srv-cli"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[dependencies]
+proc-macro-srv = { version = "0.0.0", path = "../proc-macro-srv" }
+
+[features]
+sysroot-abi = ["proc-macro-srv/sysroot-abi"]
+
+[[bin]]
+name = "rust-analyzer-proc-macro-srv"
+path = "src/main.rs"
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs
new file mode 100644
index 000000000..ac9fa9f5a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs
@@ -0,0 +1,19 @@
+//! A standalone binary for `proc-macro-srv`.
+
+use proc_macro_srv::cli;
+
+fn main() -> std::io::Result<()> {
+ let v = std::env::var("RUST_ANALYZER_INTERNALS_DO_NOT_USE");
+ match v.as_deref() {
+ Ok("this is unstable") => {
+ // very well, if you must
+ }
+ _ => {
+ eprintln!("If you're rust-analyzer, you can use this tool by exporting RUST_ANALYZER_INTERNALS_DO_NOT_USE='this is unstable'.");
+ eprintln!("If not, you probably shouldn't use this tool. But do what you want: I'm an error message, not a cop.");
+ std::process::exit(122);
+ }
+ }
+
+ cli::run()
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml
new file mode 100644
index 000000000..5746eac0b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml
@@ -0,0 +1,36 @@
+[package]
+name = "proc-macro-srv"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+object = { version = "0.29.0", default-features = false, features = [
+ "std",
+ "read_core",
+ "elf",
+ "macho",
+ "pe",
+] }
+libloading = "0.7.3"
+memmap2 = "0.5.4"
+
+tt = { path = "../tt", version = "0.0.0" }
+mbe = { path = "../mbe", version = "0.0.0" }
+paths = { path = "../paths", version = "0.0.0" }
+proc-macro-api = { path = "../proc-macro-api", version = "0.0.0" }
+crossbeam = "0.8.1"
+
+[dev-dependencies]
+expect-test = "1.4.0"
+
+# used as proc macro test targets
+proc-macro-test = { path = "../proc-macro-test" }
+
+[features]
+sysroot-abi = []
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/build.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/build.rs
new file mode 100644
index 000000000..a8c732f31
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/build.rs
@@ -0,0 +1,25 @@
+//! Determine rustc version `proc-macro-srv` (and thus the sysroot ABI) is
+//! build with and make it accessible at runtime for ABI selection.
+
+use std::{env, fs::File, io::Write, path::PathBuf, process::Command};
+
+fn main() {
+ let mut path = PathBuf::from(env::var_os("OUT_DIR").unwrap());
+ path.push("rustc_version.rs");
+ let mut f = File::create(&path).unwrap();
+
+ let rustc = env::var("RUSTC").expect("proc-macro-srv's build script expects RUSTC to be set");
+ let output = Command::new(rustc).arg("--version").output().expect("rustc --version must run");
+ let version_string = std::str::from_utf8(&output.stdout[..])
+ .expect("rustc --version output must be UTF-8")
+ .trim();
+
+ write!(
+ f,
+ "
+ #[allow(dead_code)]
+ pub(crate) const RUSTC_VERSION_STRING: &str = {version_string:?};
+ "
+ )
+ .unwrap();
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/mod.rs
new file mode 100644
index 000000000..1c91ac0fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/mod.rs
@@ -0,0 +1,104 @@
+//! Macro ABI for version 1.58 of rustc
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod proc_macro;
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod ra_server;
+
+use libloading::Library;
+use proc_macro_api::ProcMacroKind;
+
+use super::PanicMessage;
+
+pub(crate) struct Abi {
+ exported_macros: Vec<proc_macro::bridge::client::ProcMacro>,
+}
+
+impl From<proc_macro::bridge::PanicMessage> for PanicMessage {
+ fn from(p: proc_macro::bridge::PanicMessage) -> Self {
+ Self { message: p.as_str().map(|s| s.to_string()) }
+ }
+}
+
+impl Abi {
+ pub unsafe fn from_lib(lib: &Library, symbol_name: String) -> Result<Abi, libloading::Error> {
+ let macros: libloading::Symbol<'_, &&[proc_macro::bridge::client::ProcMacro]> =
+ lib.get(symbol_name.as_bytes())?;
+ Ok(Self { exported_macros: macros.to_vec() })
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, PanicMessage> {
+ let parsed_body = ra_server::TokenStream::with_subtree(macro_body.clone());
+
+ let parsed_attributes = attributes.map_or(ra_server::TokenStream::new(), |attr| {
+ ra_server::TokenStream::with_subtree(attr.clone())
+ });
+
+ for proc_macro in &self.exported_macros {
+ match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive {
+ trait_name, client, ..
+ } if *trait_name == macro_name => {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_attributes,
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ _ => continue,
+ }
+ }
+
+ Err(proc_macro::bridge::PanicMessage::String("Nothing to expand".to_string()).into())
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ self.exported_macros
+ .iter()
+ .map(|proc_macro| match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
+ (trait_name.to_string(), ProcMacroKind::CustomDerive)
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, .. } => {
+ (name.to_string(), ProcMacroKind::FuncLike)
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, .. } => {
+ (name.to_string(), ProcMacroKind::Attr)
+ }
+ })
+ .collect()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/buffer.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/buffer.rs
new file mode 100644
index 000000000..d82669d3e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/buffer.rs
@@ -0,0 +1,143 @@
+//! Buffer management for same-process client<->server communication.
+
+use std::io::{self, Write};
+use std::mem;
+use std::ops::{Deref, DerefMut};
+use std::slice;
+
+#[repr(C)]
+pub struct Buffer<T: Copy> {
+ data: *mut T,
+ len: usize,
+ capacity: usize,
+ reserve: extern "C" fn(Buffer<T>, usize) -> Buffer<T>,
+ drop: extern "C" fn(Buffer<T>),
+}
+
+unsafe impl<T: Copy + Sync> Sync for Buffer<T> {}
+unsafe impl<T: Copy + Send> Send for Buffer<T> {}
+
+impl<T: Copy> Default for Buffer<T> {
+ fn default() -> Self {
+ Self::from(vec![])
+ }
+}
+
+impl<T: Copy> Deref for Buffer<T> {
+ type Target = [T];
+ fn deref(&self) -> &[T] {
+ unsafe { slice::from_raw_parts(self.data as *const T, self.len) }
+ }
+}
+
+impl<T: Copy> DerefMut for Buffer<T> {
+ fn deref_mut(&mut self) -> &mut [T] {
+ unsafe { slice::from_raw_parts_mut(self.data, self.len) }
+ }
+}
+
+impl<T: Copy> Buffer<T> {
+ pub(super) fn new() -> Self {
+ Self::default()
+ }
+
+ pub(super) fn clear(&mut self) {
+ self.len = 0;
+ }
+
+ pub(super) fn take(&mut self) -> Self {
+ mem::take(self)
+ }
+
+ // We have the array method separate from extending from a slice. This is
+ // because in the case of small arrays, codegen can be more efficient
+ // (avoiding a memmove call). With extend_from_slice, LLVM at least
+ // currently is not able to make that optimization.
+ pub(super) fn extend_from_array<const N: usize>(&mut self, xs: &[T; N]) {
+ if xs.len() > (self.capacity - self.len) {
+ let b = self.take();
+ *self = (b.reserve)(b, xs.len());
+ }
+ unsafe {
+ xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len());
+ self.len += xs.len();
+ }
+ }
+
+ pub(super) fn extend_from_slice(&mut self, xs: &[T]) {
+ if xs.len() > (self.capacity - self.len) {
+ let b = self.take();
+ *self = (b.reserve)(b, xs.len());
+ }
+ unsafe {
+ xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len());
+ self.len += xs.len();
+ }
+ }
+
+ pub(super) fn push(&mut self, v: T) {
+ // The code here is taken from Vec::push, and we know that reserve()
+ // will panic if we're exceeding isize::MAX bytes and so there's no need
+ // to check for overflow.
+ if self.len == self.capacity {
+ let b = self.take();
+ *self = (b.reserve)(b, 1);
+ }
+ unsafe {
+ *self.data.add(self.len) = v;
+ self.len += 1;
+ }
+ }
+}
+
+impl Write for Buffer<u8> {
+ fn write(&mut self, xs: &[u8]) -> io::Result<usize> {
+ self.extend_from_slice(xs);
+ Ok(xs.len())
+ }
+
+ fn write_all(&mut self, xs: &[u8]) -> io::Result<()> {
+ self.extend_from_slice(xs);
+ Ok(())
+ }
+
+ fn flush(&mut self) -> io::Result<()> {
+ Ok(())
+ }
+}
+
+impl<T: Copy> Drop for Buffer<T> {
+ fn drop(&mut self) {
+ let b = self.take();
+ (b.drop)(b);
+ }
+}
+
+impl<T: Copy> From<Vec<T>> for Buffer<T> {
+ fn from(mut v: Vec<T>) -> Self {
+ let (data, len, capacity) = (v.as_mut_ptr(), v.len(), v.capacity());
+ mem::forget(v);
+
+ // This utility function is nested in here because it can *only*
+ // be safely called on `Buffer`s created by *this* `proc_macro`.
+ fn to_vec<T: Copy>(b: Buffer<T>) -> Vec<T> {
+ unsafe {
+ let Buffer { data, len, capacity, .. } = b;
+ mem::forget(b);
+ Vec::from_raw_parts(data, len, capacity)
+ }
+ }
+
+ extern "C" fn reserve<T: Copy>(b: Buffer<T>, additional: usize) -> Buffer<T> {
+ let mut v = to_vec(b);
+ v.reserve(additional);
+ Buffer::from(v)
+ }
+
+ extern "C" fn drop<T: Copy>(b: Buffer<T>) {
+ mem::drop(to_vec(b));
+ }
+
+ Buffer { data, len, capacity, reserve, drop }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs
new file mode 100644
index 000000000..ed0e91da3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs
@@ -0,0 +1,485 @@
+//! Client-side types.
+
+use super::*;
+
+macro_rules! define_handles {
+ (
+ 'owned: $($oty:ident,)*
+ 'interned: $($ity:ident,)*
+ ) => {
+ #[repr(C)]
+ #[allow(non_snake_case)]
+ pub struct HandleCounters {
+ $($oty: AtomicUsize,)*
+ $($ity: AtomicUsize,)*
+ }
+
+ impl HandleCounters {
+ // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of
+ // a wrapper `fn` pointer, once `const fn` can reference `static`s.
+ extern "C" fn get() -> &'static Self {
+ static COUNTERS: HandleCounters = HandleCounters {
+ $($oty: AtomicUsize::new(1),)*
+ $($ity: AtomicUsize::new(1),)*
+ };
+ &COUNTERS
+ }
+ }
+
+ // FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
+ #[repr(C)]
+ #[allow(non_snake_case)]
+ pub(super) struct HandleStore<S: server::Types> {
+ $($oty: handle::OwnedStore<S::$oty>,)*
+ $($ity: handle::InternedStore<S::$ity>,)*
+ }
+
+ impl<S: server::Types> HandleStore<S> {
+ pub(super) fn new(handle_counters: &'static HandleCounters) -> Self {
+ HandleStore {
+ $($oty: handle::OwnedStore::new(&handle_counters.$oty),)*
+ $($ity: handle::InternedStore::new(&handle_counters.$ity),)*
+ }
+ }
+ }
+
+ $(
+ #[repr(C)]
+ pub(crate) struct $oty(handle::Handle);
+
+ // Forward `Drop::drop` to the inherent `drop` method.
+ impl Drop for $oty {
+ fn drop(&mut self) {
+ $oty(self.0).drop();
+ }
+ }
+
+ impl<S> Encode<S> for $oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ let handle = self.0;
+ mem::forget(self);
+ handle.encode(w, s);
+ }
+ }
+
+ impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$oty, $oty>
+ {
+ fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
+ s.$oty.take(handle::Handle::decode(r, &mut ()))
+ }
+ }
+
+ impl<S> Encode<S> for &$oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.0.encode(w, s);
+ }
+ }
+
+ impl<'s, S: server::Types> Decode<'_, 's, HandleStore<server::MarkedTypes<S>>>
+ for &'s Marked<S::$oty, $oty>
+ {
+ fn decode(r: &mut Reader<'_>, s: &'s HandleStore<server::MarkedTypes<S>>) -> Self {
+ &s.$oty[handle::Handle::decode(r, &mut ())]
+ }
+ }
+
+ impl<S> Encode<S> for &mut $oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.0.encode(w, s);
+ }
+ }
+
+ impl<'s, S: server::Types> DecodeMut<'_, 's, HandleStore<server::MarkedTypes<S>>>
+ for &'s mut Marked<S::$oty, $oty>
+ {
+ fn decode(
+ r: &mut Reader<'_>,
+ s: &'s mut HandleStore<server::MarkedTypes<S>>
+ ) -> Self {
+ &mut s.$oty[handle::Handle::decode(r, &mut ())]
+ }
+ }
+
+ impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$oty, $oty>
+ {
+ fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
+ s.$oty.alloc(self).encode(w, s);
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $oty {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $oty(handle::Handle::decode(r, s))
+ }
+ }
+ )*
+
+ $(
+ #[repr(C)]
+ #[derive(Copy, Clone, PartialEq, Eq, Hash)]
+ pub(crate) struct $ity(handle::Handle);
+
+ impl<S> Encode<S> for $ity {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.0.encode(w, s);
+ }
+ }
+
+ impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$ity, $ity>
+ {
+ fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
+ s.$ity.copy(handle::Handle::decode(r, &mut ()))
+ }
+ }
+
+ impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$ity, $ity>
+ {
+ fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
+ s.$ity.alloc(self).encode(w, s);
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $ity {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $ity(handle::Handle::decode(r, s))
+ }
+ }
+ )*
+ }
+}
+define_handles! {
+ 'owned:
+ FreeFunctions,
+ TokenStream,
+ TokenStreamBuilder,
+ TokenStreamIter,
+ Group,
+ Literal,
+ SourceFile,
+ MultiSpan,
+ Diagnostic,
+
+ 'interned:
+ Punct,
+ Ident,
+ Span,
+}
+
+// FIXME(eddyb) generate these impls by pattern-matching on the
+// names of methods - also could use the presence of `fn drop`
+// to distinguish between 'owned and 'interned, above.
+// Alternatively, special 'modes" could be listed of types in with_api
+// instead of pattern matching on methods, here and in server decl.
+
+impl Clone for TokenStream {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Clone for TokenStreamIter {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Clone for Group {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Clone for Literal {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl fmt::Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Literal")
+ // format the kind without quotes, as in `kind: Float`
+ .field("kind", &format_args!("{}", &self.debug_kind()))
+ .field("symbol", &self.symbol())
+ // format `Some("...")` on one line even in {:#?} mode
+ .field("suffix", &format_args!("{:?}", &self.suffix()))
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+impl Clone for SourceFile {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.debug())
+ }
+}
+
+macro_rules! define_client_side {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
+ }),* $(,)?) => {
+ $(impl $name {
+ $(pub(crate) fn $method($($arg: $arg_ty),*) $(-> $ret_ty)* {
+ Bridge::with(|bridge| {
+ let mut b = bridge.cached_buffer.take();
+
+ b.clear();
+ api_tags::Method::$name(api_tags::$name::$method).encode(&mut b, &mut ());
+ reverse_encode!(b; $($arg),*);
+
+ b = bridge.dispatch.call(b);
+
+ let r = Result::<_, PanicMessage>::decode(&mut &b[..], &mut ());
+
+ bridge.cached_buffer = b;
+
+ r.unwrap_or_else(|e| panic::resume_unwind(e.into()))
+ })
+ })*
+ })*
+ }
+}
+with_api!(self, self, define_client_side);
+
+enum BridgeState<'a> {
+ /// No server is currently connected to this client.
+ NotConnected,
+
+ /// A server is connected and available for requests.
+ Connected(Bridge<'a>),
+
+ /// Access to the bridge is being exclusively acquired
+ /// (e.g., during `BridgeState::with`).
+ InUse,
+}
+
+enum BridgeStateL {}
+
+impl<'a> scoped_cell::ApplyL<'a> for BridgeStateL {
+ type Out = BridgeState<'a>;
+}
+
+thread_local! {
+ static BRIDGE_STATE: scoped_cell::ScopedCell<BridgeStateL> =
+ scoped_cell::ScopedCell::new(BridgeState::NotConnected);
+}
+
+impl BridgeState<'_> {
+ /// Take exclusive control of the thread-local
+ /// `BridgeState`, and pass it to `f`, mutably.
+ /// The state will be restored after `f` exits, even
+ /// by panic, including modifications made to it by `f`.
+ ///
+ /// N.B., while `f` is running, the thread-local state
+ /// is `BridgeState::InUse`.
+ fn with<R>(f: impl FnOnce(&mut BridgeState<'_>) -> R) -> R {
+ BRIDGE_STATE.with(|state| {
+ state.replace(BridgeState::InUse, |mut state| {
+ // FIXME(#52812) pass `f` directly to `replace` when `RefMutL` is gone
+ f(&mut *state)
+ })
+ })
+ }
+}
+
+impl Bridge<'_> {
+ pub(crate) fn is_available() -> bool {
+ BridgeState::with(|state| match state {
+ BridgeState::Connected(_) | BridgeState::InUse => true,
+ BridgeState::NotConnected => false,
+ })
+ }
+
+ fn enter<R>(self, f: impl FnOnce() -> R) -> R {
+ let force_show_panics = self.force_show_panics;
+ // Hide the default panic output within `proc_macro` expansions.
+ // NB. the server can't do this because it may use a different libstd.
+ static HIDE_PANICS_DURING_EXPANSION: Once = Once::new();
+ HIDE_PANICS_DURING_EXPANSION.call_once(|| {
+ let prev = panic::take_hook();
+ panic::set_hook(Box::new(move |info| {
+ let show = BridgeState::with(|state| match state {
+ BridgeState::NotConnected => true,
+ BridgeState::Connected(_) | BridgeState::InUse => force_show_panics,
+ });
+ if show {
+ prev(info)
+ }
+ }));
+ });
+
+ BRIDGE_STATE.with(|state| state.set(BridgeState::Connected(self), f))
+ }
+
+ fn with<R>(f: impl FnOnce(&mut Bridge<'_>) -> R) -> R {
+ BridgeState::with(|state| match state {
+ BridgeState::NotConnected => {
+ panic!("procedural macro API is used outside of a procedural macro");
+ }
+ BridgeState::InUse => {
+ panic!("procedural macro API is used while it's already in use");
+ }
+ BridgeState::Connected(bridge) => f(bridge),
+ })
+ }
+}
+
+/// A client-side "global object" (usually a function pointer),
+/// which may be using a different `proc_macro` from the one
+/// used by the server, but can be interacted with compatibly.
+///
+/// N.B., `F` must have FFI-friendly memory layout (e.g., a pointer).
+/// The call ABI of function pointers used for `F` doesn't
+/// need to match between server and client, since it's only
+/// passed between them and (eventually) called by the client.
+#[repr(C)]
+#[derive(Copy, Clone)]
+pub struct Client<F> {
+ // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of
+ // a wrapper `fn` pointer, once `const fn` can reference `static`s.
+ pub(super) get_handle_counters: extern "C" fn() -> &'static HandleCounters,
+ pub(super) run: extern "C" fn(Bridge<'_>, F) -> Buffer<u8>,
+ pub(super) f: F,
+}
+
+/// Client-side helper for handling client panics, entering the bridge,
+/// deserializing input and serializing output.
+// FIXME(eddyb) maybe replace `Bridge::enter` with this?
+fn run_client<A: for<'a, 's> DecodeMut<'a, 's, ()>, R: Encode<()>>(
+ mut bridge: Bridge<'_>,
+ f: impl FnOnce(A) -> R,
+) -> Buffer<u8> {
+ // The initial `cached_buffer` contains the input.
+ let mut b = bridge.cached_buffer.take();
+
+ panic::catch_unwind(panic::AssertUnwindSafe(|| {
+ bridge.enter(|| {
+ let reader = &mut &b[..];
+ let input = A::decode(reader, &mut ());
+
+ // Put the `cached_buffer` back in the `Bridge`, for requests.
+ Bridge::with(|bridge| bridge.cached_buffer = b.take());
+
+ let output = f(input);
+
+ // Take the `cached_buffer` back out, for the output value.
+ b = Bridge::with(|bridge| bridge.cached_buffer.take());
+
+ // HACK(eddyb) Separate encoding a success value (`Ok(output)`)
+ // from encoding a panic (`Err(e: PanicMessage)`) to avoid
+ // having handles outside the `bridge.enter(|| ...)` scope, and
+ // to catch panics that could happen while encoding the success.
+ //
+ // Note that panics should be impossible beyond this point, but
+ // this is defensively trying to avoid any accidental panicking
+ // reaching the `extern "C"` (which should `abort` but might not
+ // at the moment, so this is also potentially preventing UB).
+ b.clear();
+ Ok::<_, ()>(output).encode(&mut b, &mut ());
+ })
+ }))
+ .map_err(PanicMessage::from)
+ .unwrap_or_else(|e| {
+ b.clear();
+ Err::<(), _>(e).encode(&mut b, &mut ());
+ });
+ b
+}
+
+impl Client<fn(super::super::TokenStream) -> super::super::TokenStream> {
+ pub fn expand1(f: fn(super::super::TokenStream) -> super::super::TokenStream) -> Self {
+ extern "C" fn run(
+ bridge: Bridge<'_>,
+ f: impl FnOnce(super::super::TokenStream) -> super::super::TokenStream,
+ ) -> Buffer<u8> {
+ run_client(bridge, |input| f(super::super::TokenStream(input)).0)
+ }
+ Client { get_handle_counters: HandleCounters::get, run, f }
+ }
+}
+
+impl Client<fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream> {
+ pub fn expand2(
+ f: fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream,
+ ) -> Self {
+ extern "C" fn run(
+ bridge: Bridge<'_>,
+ f: impl FnOnce(
+ super::super::TokenStream,
+ super::super::TokenStream,
+ ) -> super::super::TokenStream,
+ ) -> Buffer<u8> {
+ run_client(bridge, |(input, input2)| {
+ f(super::super::TokenStream(input), super::super::TokenStream(input2)).0
+ })
+ }
+ Client { get_handle_counters: HandleCounters::get, run, f }
+ }
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+pub enum ProcMacro {
+ CustomDerive {
+ trait_name: &'static str,
+ attributes: &'static [&'static str],
+ client: Client<fn(super::super::TokenStream) -> super::super::TokenStream>,
+ },
+
+ Attr {
+ name: &'static str,
+ client: Client<
+ fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream,
+ >,
+ },
+
+ Bang {
+ name: &'static str,
+ client: Client<fn(super::super::TokenStream) -> super::super::TokenStream>,
+ },
+}
+
+impl ProcMacro {
+ pub fn name(&self) -> &'static str {
+ match self {
+ ProcMacro::CustomDerive { trait_name, .. } => trait_name,
+ ProcMacro::Attr { name, .. } => name,
+ ProcMacro::Bang { name, .. } => name,
+ }
+ }
+
+ pub fn custom_derive(
+ trait_name: &'static str,
+ attributes: &'static [&'static str],
+ expand: fn(super::super::TokenStream) -> super::super::TokenStream,
+ ) -> Self {
+ ProcMacro::CustomDerive { trait_name, attributes, client: Client::expand1(expand) }
+ }
+
+ pub fn attr(
+ name: &'static str,
+ expand: fn(
+ super::super::TokenStream,
+ super::super::TokenStream,
+ ) -> super::super::TokenStream,
+ ) -> Self {
+ ProcMacro::Attr { name, client: Client::expand2(expand) }
+ }
+
+ pub fn bang(
+ name: &'static str,
+ expand: fn(super::super::TokenStream) -> super::super::TokenStream,
+ ) -> Self {
+ ProcMacro::Bang { name, client: Client::expand1(expand) }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/closure.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/closure.rs
new file mode 100644
index 000000000..5be71cc3d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/closure.rs
@@ -0,0 +1,24 @@
+//! Closure type (equivalent to `&mut dyn FnMut(A) -> R`) that's `repr(C)`.
+
+#[repr(C)]
+pub struct Closure<'a, A, R> {
+ call: unsafe extern "C" fn(&mut Env, A) -> R,
+ env: &'a mut Env,
+}
+
+struct Env;
+
+impl<'a, A, R, F: FnMut(A) -> R> From<&'a mut F> for Closure<'a, A, R> {
+ fn from(f: &'a mut F) -> Self {
+ unsafe extern "C" fn call<A, R, F: FnMut(A) -> R>(env: &mut Env, arg: A) -> R {
+ (*(env as *mut _ as *mut F))(arg)
+ }
+ Closure { call: call::<A, R, F>, env: unsafe { &mut *(f as *mut _ as *mut Env) } }
+ }
+}
+
+impl<'a, A, R> Closure<'a, A, R> {
+ pub fn call(&mut self, arg: A) -> R {
+ unsafe { (self.call)(self.env, arg) }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/handle.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/handle.rs
new file mode 100644
index 000000000..bcbb86812
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/handle.rs
@@ -0,0 +1,70 @@
+//! Server-side handles and storage for per-handle data.
+
+use std::collections::{BTreeMap, HashMap};
+use std::hash::Hash;
+use std::num::NonZeroU32;
+use std::ops::{Index, IndexMut};
+use std::sync::atomic::{AtomicUsize, Ordering};
+
+pub(super) type Handle = NonZeroU32;
+
+pub(super) struct OwnedStore<T: 'static> {
+ counter: &'static AtomicUsize,
+ data: BTreeMap<Handle, T>,
+}
+
+impl<T> OwnedStore<T> {
+ pub(super) fn new(counter: &'static AtomicUsize) -> Self {
+ // Ensure the handle counter isn't 0, which would panic later,
+ // when `NonZeroU32::new` (aka `Handle::new`) is called in `alloc`.
+ assert_ne!(counter.load(Ordering::SeqCst), 0);
+
+ OwnedStore { counter, data: BTreeMap::new() }
+ }
+}
+
+impl<T> OwnedStore<T> {
+ pub(super) fn alloc(&mut self, x: T) -> Handle {
+ let counter = self.counter.fetch_add(1, Ordering::SeqCst);
+ let handle = Handle::new(counter as u32).expect("`proc_macro` handle counter overflowed");
+ assert!(self.data.insert(handle, x).is_none());
+ handle
+ }
+
+ pub(super) fn take(&mut self, h: Handle) -> T {
+ self.data.remove(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+impl<T> Index<Handle> for OwnedStore<T> {
+ type Output = T;
+ fn index(&self, h: Handle) -> &T {
+ self.data.get(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+impl<T> IndexMut<Handle> for OwnedStore<T> {
+ fn index_mut(&mut self, h: Handle) -> &mut T {
+ self.data.get_mut(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+pub(super) struct InternedStore<T: 'static> {
+ owned: OwnedStore<T>,
+ interner: HashMap<T, Handle>,
+}
+
+impl<T: Copy + Eq + Hash> InternedStore<T> {
+ pub(super) fn new(counter: &'static AtomicUsize) -> Self {
+ InternedStore { owned: OwnedStore::new(counter), interner: HashMap::new() }
+ }
+
+ pub(super) fn alloc(&mut self, x: T) -> Handle {
+ let owned = &mut self.owned;
+ *self.interner.entry(x).or_insert_with(|| owned.alloc(x))
+ }
+
+ pub(super) fn copy(&mut self, h: Handle) -> T {
+ self.owned[h]
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/mod.rs
new file mode 100644
index 000000000..b7968c529
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/mod.rs
@@ -0,0 +1,429 @@
+//! Internal interface for communicating between a `proc_macro` client
+//! (a proc macro crate) and a `proc_macro` server (a compiler front-end).
+//!
+//! Serialization (with C ABI buffers) and unique integer handles are employed
+//! to allow safely interfacing between two copies of `proc_macro` built
+//! (from the same source) by different compilers with potentially mismatching
+//! Rust ABIs (e.g., stage0/bin/rustc vs stage1/bin/rustc during bootstrap).
+
+#![deny(unsafe_code)]
+
+pub use super::{Delimiter, Level, LineColumn, Spacing};
+use std::fmt;
+use std::hash::Hash;
+use std::marker;
+use std::mem;
+use std::ops::Bound;
+use std::panic;
+use std::sync::atomic::AtomicUsize;
+use std::sync::Once;
+use std::thread;
+
+/// Higher-order macro describing the server RPC API, allowing automatic
+/// generation of type-safe Rust APIs, both client-side and server-side.
+///
+/// `with_api!(MySelf, my_self, my_macro)` expands to:
+/// ```rust,ignore (pseudo-code)
+/// my_macro! {
+/// // ...
+/// Literal {
+/// // ...
+/// fn character(ch: char) -> MySelf::Literal;
+/// // ...
+/// fn span(my_self: &MySelf::Literal) -> MySelf::Span;
+/// fn set_span(my_self: &mut MySelf::Literal, span: MySelf::Span);
+/// },
+/// // ...
+/// }
+/// ```
+///
+/// The first two arguments serve to customize the arguments names
+/// and argument/return types, to enable several different usecases:
+///
+/// If `my_self` is just `self`, then each `fn` signature can be used
+/// as-is for a method. If it's anything else (`self_` in practice),
+/// then the signatures don't have a special `self` argument, and
+/// can, therefore, have a different one introduced.
+///
+/// If `MySelf` is just `Self`, then the types are only valid inside
+/// a trait or a trait impl, where the trait has associated types
+/// for each of the API types. If non-associated types are desired,
+/// a module name (`self` in practice) can be used instead of `Self`.
+macro_rules! with_api {
+ ($S:ident, $self:ident, $m:ident) => {
+ $m! {
+ FreeFunctions {
+ fn drop($self: $S::FreeFunctions);
+ fn track_env_var(var: &str, value: Option<&str>);
+ fn track_path(path: &str);
+ },
+ TokenStream {
+ fn drop($self: $S::TokenStream);
+ fn clone($self: &$S::TokenStream) -> $S::TokenStream;
+ fn new() -> $S::TokenStream;
+ fn is_empty($self: &$S::TokenStream) -> bool;
+ fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>;
+ fn from_str(src: &str) -> $S::TokenStream;
+ fn to_string($self: &$S::TokenStream) -> String;
+ fn from_token_tree(
+ tree: TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>,
+ ) -> $S::TokenStream;
+ fn into_iter($self: $S::TokenStream) -> $S::TokenStreamIter;
+ },
+ TokenStreamBuilder {
+ fn drop($self: $S::TokenStreamBuilder);
+ fn new() -> $S::TokenStreamBuilder;
+ fn push($self: &mut $S::TokenStreamBuilder, stream: $S::TokenStream);
+ fn build($self: $S::TokenStreamBuilder) -> $S::TokenStream;
+ },
+ TokenStreamIter {
+ fn drop($self: $S::TokenStreamIter);
+ fn clone($self: &$S::TokenStreamIter) -> $S::TokenStreamIter;
+ fn next(
+ $self: &mut $S::TokenStreamIter,
+ ) -> Option<TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>>;
+ },
+ Group {
+ fn drop($self: $S::Group);
+ fn clone($self: &$S::Group) -> $S::Group;
+ fn new(delimiter: Delimiter, stream: $S::TokenStream) -> $S::Group;
+ fn delimiter($self: &$S::Group) -> Delimiter;
+ fn stream($self: &$S::Group) -> $S::TokenStream;
+ fn span($self: &$S::Group) -> $S::Span;
+ fn span_open($self: &$S::Group) -> $S::Span;
+ fn span_close($self: &$S::Group) -> $S::Span;
+ fn set_span($self: &mut $S::Group, span: $S::Span);
+ },
+ Punct {
+ fn new(ch: char, spacing: Spacing) -> $S::Punct;
+ fn as_char($self: $S::Punct) -> char;
+ fn spacing($self: $S::Punct) -> Spacing;
+ fn span($self: $S::Punct) -> $S::Span;
+ fn with_span($self: $S::Punct, span: $S::Span) -> $S::Punct;
+ },
+ Ident {
+ fn new(string: &str, span: $S::Span, is_raw: bool) -> $S::Ident;
+ fn span($self: $S::Ident) -> $S::Span;
+ fn with_span($self: $S::Ident, span: $S::Span) -> $S::Ident;
+ },
+ Literal {
+ fn drop($self: $S::Literal);
+ fn clone($self: &$S::Literal) -> $S::Literal;
+ fn from_str(s: &str) -> Result<$S::Literal, ()>;
+ fn to_string($self: &$S::Literal) -> String;
+ fn debug_kind($self: &$S::Literal) -> String;
+ fn symbol($self: &$S::Literal) -> String;
+ fn suffix($self: &$S::Literal) -> Option<String>;
+ fn integer(n: &str) -> $S::Literal;
+ fn typed_integer(n: &str, kind: &str) -> $S::Literal;
+ fn float(n: &str) -> $S::Literal;
+ fn f32(n: &str) -> $S::Literal;
+ fn f64(n: &str) -> $S::Literal;
+ fn string(string: &str) -> $S::Literal;
+ fn character(ch: char) -> $S::Literal;
+ fn byte_string(bytes: &[u8]) -> $S::Literal;
+ fn span($self: &$S::Literal) -> $S::Span;
+ fn set_span($self: &mut $S::Literal, span: $S::Span);
+ fn subspan(
+ $self: &$S::Literal,
+ start: Bound<usize>,
+ end: Bound<usize>,
+ ) -> Option<$S::Span>;
+ },
+ SourceFile {
+ fn drop($self: $S::SourceFile);
+ fn clone($self: &$S::SourceFile) -> $S::SourceFile;
+ fn eq($self: &$S::SourceFile, other: &$S::SourceFile) -> bool;
+ fn path($self: &$S::SourceFile) -> String;
+ fn is_real($self: &$S::SourceFile) -> bool;
+ },
+ MultiSpan {
+ fn drop($self: $S::MultiSpan);
+ fn new() -> $S::MultiSpan;
+ fn push($self: &mut $S::MultiSpan, span: $S::Span);
+ },
+ Diagnostic {
+ fn drop($self: $S::Diagnostic);
+ fn new(level: Level, msg: &str, span: $S::MultiSpan) -> $S::Diagnostic;
+ fn sub(
+ $self: &mut $S::Diagnostic,
+ level: Level,
+ msg: &str,
+ span: $S::MultiSpan,
+ );
+ fn emit($self: $S::Diagnostic);
+ },
+ Span {
+ fn debug($self: $S::Span) -> String;
+ fn def_site() -> $S::Span;
+ fn call_site() -> $S::Span;
+ fn mixed_site() -> $S::Span;
+ fn source_file($self: $S::Span) -> $S::SourceFile;
+ fn parent($self: $S::Span) -> Option<$S::Span>;
+ fn source($self: $S::Span) -> $S::Span;
+ fn start($self: $S::Span) -> LineColumn;
+ fn end($self: $S::Span) -> LineColumn;
+ fn before($self: $S::Span) -> $S::Span;
+ fn after($self: $S::Span) -> $S::Span;
+ fn join($self: $S::Span, other: $S::Span) -> Option<$S::Span>;
+ fn resolved_at($self: $S::Span, at: $S::Span) -> $S::Span;
+ fn source_text($self: $S::Span) -> Option<String>;
+ fn save_span($self: $S::Span) -> usize;
+ fn recover_proc_macro_span(id: usize) -> $S::Span;
+ },
+ }
+ };
+}
+
+// FIXME(eddyb) this calls `encode` for each argument, but in reverse,
+// to avoid borrow conflicts from borrows started by `&mut` arguments.
+macro_rules! reverse_encode {
+ ($writer:ident;) => {};
+ ($writer:ident; $first:ident $(, $rest:ident)*) => {
+ reverse_encode!($writer; $($rest),*);
+ $first.encode(&mut $writer, &mut ());
+ }
+}
+
+// FIXME(eddyb) this calls `decode` for each argument, but in reverse,
+// to avoid borrow conflicts from borrows started by `&mut` arguments.
+macro_rules! reverse_decode {
+ ($reader:ident, $s:ident;) => {};
+ ($reader:ident, $s:ident; $first:ident: $first_ty:ty $(, $rest:ident: $rest_ty:ty)*) => {
+ reverse_decode!($reader, $s; $($rest: $rest_ty),*);
+ let $first = <$first_ty>::decode(&mut $reader, $s);
+ }
+}
+
+#[allow(unsafe_code)]
+mod buffer;
+#[forbid(unsafe_code)]
+pub mod client;
+#[allow(unsafe_code)]
+mod closure;
+#[forbid(unsafe_code)]
+mod handle;
+#[macro_use]
+#[forbid(unsafe_code)]
+mod rpc;
+#[allow(unsafe_code)]
+mod scoped_cell;
+#[forbid(unsafe_code)]
+pub mod server;
+
+use buffer::Buffer;
+pub use rpc::PanicMessage;
+use rpc::{Decode, DecodeMut, Encode, Reader, Writer};
+
+/// An active connection between a server and a client.
+/// The server creates the bridge (`Bridge::run_server` in `server.rs`),
+/// then passes it to the client through the function pointer in the `run`
+/// field of `client::Client`. The client holds its copy of the `Bridge`
+/// in TLS during its execution (`Bridge::{enter, with}` in `client.rs`).
+#[repr(C)]
+pub struct Bridge<'a> {
+ /// Reusable buffer (only `clear`-ed, never shrunk), primarily
+ /// used for making requests, but also for passing input to client.
+ cached_buffer: Buffer<u8>,
+
+ /// Server-side function that the client uses to make requests.
+ dispatch: closure::Closure<'a, Buffer<u8>, Buffer<u8>>,
+
+ /// If 'true', always invoke the default panic hook
+ force_show_panics: bool,
+}
+
+#[forbid(unsafe_code)]
+#[allow(non_camel_case_types)]
+mod api_tags {
+ use super::rpc::{DecodeMut, Encode, Reader, Writer};
+
+ macro_rules! declare_tags {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
+ }),* $(,)?) => {
+ $(
+ pub(super) enum $name {
+ $($method),*
+ }
+ rpc_encode_decode!(enum $name { $($method),* });
+ )*
+
+
+ pub(super) enum Method {
+ $($name($name)),*
+ }
+ rpc_encode_decode!(enum Method { $($name(m)),* });
+ }
+ }
+ with_api!(self, self, declare_tags);
+}
+
+/// Helper to wrap associated types to allow trait impl dispatch.
+/// That is, normally a pair of impls for `T::Foo` and `T::Bar`
+/// can overlap, but if the impls are, instead, on types like
+/// `Marked<T::Foo, Foo>` and `Marked<T::Bar, Bar>`, they can't.
+trait Mark {
+ type Unmarked;
+ fn mark(unmarked: Self::Unmarked) -> Self;
+}
+
+/// Unwrap types wrapped by `Mark::mark` (see `Mark` for details).
+trait Unmark {
+ type Unmarked;
+ fn unmark(self) -> Self::Unmarked;
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+struct Marked<T, M> {
+ value: T,
+ _marker: marker::PhantomData<M>,
+}
+
+impl<T, M> Mark for Marked<T, M> {
+ type Unmarked = T;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ Marked { value: unmarked, _marker: marker::PhantomData }
+ }
+}
+impl<T, M> Unmark for Marked<T, M> {
+ type Unmarked = T;
+ fn unmark(self) -> Self::Unmarked {
+ self.value
+ }
+}
+impl<'a, T, M> Unmark for &'a Marked<T, M> {
+ type Unmarked = &'a T;
+ fn unmark(self) -> Self::Unmarked {
+ &self.value
+ }
+}
+impl<'a, T, M> Unmark for &'a mut Marked<T, M> {
+ type Unmarked = &'a mut T;
+ fn unmark(self) -> Self::Unmarked {
+ &mut self.value
+ }
+}
+
+impl<T: Mark> Mark for Option<T> {
+ type Unmarked = Option<T::Unmarked>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ unmarked.map(T::mark)
+ }
+}
+impl<T: Unmark> Unmark for Option<T> {
+ type Unmarked = Option<T::Unmarked>;
+ fn unmark(self) -> Self::Unmarked {
+ self.map(T::unmark)
+ }
+}
+
+impl<T: Mark, E: Mark> Mark for Result<T, E> {
+ type Unmarked = Result<T::Unmarked, E::Unmarked>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ unmarked.map(T::mark).map_err(E::mark)
+ }
+}
+impl<T: Unmark, E: Unmark> Unmark for Result<T, E> {
+ type Unmarked = Result<T::Unmarked, E::Unmarked>;
+ fn unmark(self) -> Self::Unmarked {
+ self.map(T::unmark).map_err(E::unmark)
+ }
+}
+
+macro_rules! mark_noop {
+ ($($ty:ty),* $(,)?) => {
+ $(
+ impl Mark for $ty {
+ type Unmarked = Self;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ unmarked
+ }
+ }
+ impl Unmark for $ty {
+ type Unmarked = Self;
+ fn unmark(self) -> Self::Unmarked {
+ self
+ }
+ }
+ )*
+ }
+}
+mark_noop! {
+ (),
+ bool,
+ char,
+ &'_ [u8],
+ &'_ str,
+ String,
+ usize,
+ Delimiter,
+ Level,
+ LineColumn,
+ Spacing,
+ Bound<usize>,
+}
+
+rpc_encode_decode!(
+ enum Delimiter {
+ Parenthesis,
+ Brace,
+ Bracket,
+ None,
+ }
+);
+rpc_encode_decode!(
+ enum Level {
+ Error,
+ Warning,
+ Note,
+ Help,
+ }
+);
+rpc_encode_decode!(struct LineColumn { line, column });
+rpc_encode_decode!(
+ enum Spacing {
+ Alone,
+ Joint,
+ }
+);
+
+#[derive(Clone)]
+pub enum TokenTree<G, P, I, L> {
+ Group(G),
+ Punct(P),
+ Ident(I),
+ Literal(L),
+}
+
+impl<G: Mark, P: Mark, I: Mark, L: Mark> Mark for TokenTree<G, P, I, L> {
+ type Unmarked = TokenTree<G::Unmarked, P::Unmarked, I::Unmarked, L::Unmarked>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ match unmarked {
+ TokenTree::Group(tt) => TokenTree::Group(G::mark(tt)),
+ TokenTree::Punct(tt) => TokenTree::Punct(P::mark(tt)),
+ TokenTree::Ident(tt) => TokenTree::Ident(I::mark(tt)),
+ TokenTree::Literal(tt) => TokenTree::Literal(L::mark(tt)),
+ }
+ }
+}
+impl<G: Unmark, P: Unmark, I: Unmark, L: Unmark> Unmark for TokenTree<G, P, I, L> {
+ type Unmarked = TokenTree<G::Unmarked, P::Unmarked, I::Unmarked, L::Unmarked>;
+ fn unmark(self) -> Self::Unmarked {
+ match self {
+ TokenTree::Group(tt) => TokenTree::Group(tt.unmark()),
+ TokenTree::Punct(tt) => TokenTree::Punct(tt.unmark()),
+ TokenTree::Ident(tt) => TokenTree::Ident(tt.unmark()),
+ TokenTree::Literal(tt) => TokenTree::Literal(tt.unmark()),
+ }
+ }
+}
+
+rpc_encode_decode!(
+ enum TokenTree<G, P, I, L> {
+ Group(tt),
+ Punct(tt),
+ Ident(tt),
+ Literal(tt),
+ }
+);
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/rpc.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/rpc.rs
new file mode 100644
index 000000000..d50564d01
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/rpc.rs
@@ -0,0 +1,305 @@
+//! Serialization for client-server communication.
+
+use std::any::Any;
+use std::char;
+use std::io::Write;
+use std::num::NonZeroU32;
+use std::ops::Bound;
+use std::str;
+
+pub(super) type Writer = super::buffer::Buffer<u8>;
+
+pub(super) trait Encode<S>: Sized {
+ fn encode(self, w: &mut Writer, s: &mut S);
+}
+
+pub(super) type Reader<'a> = &'a [u8];
+
+pub(super) trait Decode<'a, 's, S>: Sized {
+ fn decode(r: &mut Reader<'a>, s: &'s S) -> Self;
+}
+
+pub(super) trait DecodeMut<'a, 's, S>: Sized {
+ fn decode(r: &mut Reader<'a>, s: &'s mut S) -> Self;
+}
+
+macro_rules! rpc_encode_decode {
+ (le $ty:ty) => {
+ impl<S> Encode<S> for $ty {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.extend_from_array(&self.to_le_bytes());
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $ty {
+ fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
+ const N: usize = ::std::mem::size_of::<$ty>();
+
+ let mut bytes = [0; N];
+ bytes.copy_from_slice(&r[..N]);
+ *r = &r[N..];
+
+ Self::from_le_bytes(bytes)
+ }
+ }
+ };
+ (struct $name:ident { $($field:ident),* $(,)? }) => {
+ impl<S> Encode<S> for $name {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ $(self.$field.encode(w, s);)*
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $name {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $name {
+ $($field: DecodeMut::decode(r, s)),*
+ }
+ }
+ }
+ };
+ (enum $name:ident $(<$($T:ident),+>)? { $($variant:ident $(($field:ident))*),* $(,)? }) => {
+ impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ // HACK(eddyb): `Tag` enum duplicated between the
+ // two impls as there's no other place to stash it.
+ #[allow(non_upper_case_globals)]
+ mod tag {
+ #[repr(u8)] enum Tag { $($variant),* }
+
+ $(pub const $variant: u8 = Tag::$variant as u8;)*
+ }
+
+ match self {
+ $($name::$variant $(($field))* => {
+ tag::$variant.encode(w, s);
+ $($field.encode(w, s);)*
+ })*
+ }
+ }
+ }
+
+ impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S>
+ for $name $(<$($T),+>)?
+ {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ // HACK(eddyb): `Tag` enum duplicated between the
+ // two impls as there's no other place to stash it.
+ #[allow(non_upper_case_globals)]
+ mod tag {
+ #[repr(u8)] enum Tag { $($variant),* }
+
+ $(pub const $variant: u8 = Tag::$variant as u8;)*
+ }
+
+ match u8::decode(r, s) {
+ $(tag::$variant => {
+ $(let $field = DecodeMut::decode(r, s);)*
+ $name::$variant $(($field))*
+ })*
+ _ => unreachable!(),
+ }
+ }
+ }
+ }
+}
+
+impl<S> Encode<S> for () {
+ fn encode(self, _: &mut Writer, _: &mut S) {}
+}
+
+impl<S> DecodeMut<'_, '_, S> for () {
+ fn decode(_: &mut Reader<'_>, _: &mut S) -> Self {}
+}
+
+impl<S> Encode<S> for u8 {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.push(self);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for u8 {
+ fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
+ let x = r[0];
+ *r = &r[1..];
+ x
+ }
+}
+
+rpc_encode_decode!(le u32);
+rpc_encode_decode!(le usize);
+
+impl<S> Encode<S> for bool {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ (self as u8).encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for bool {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ match u8::decode(r, s) {
+ 0 => false,
+ 1 => true,
+ _ => unreachable!(),
+ }
+ }
+}
+
+impl<S> Encode<S> for char {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ (self as u32).encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for char {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ char::from_u32(u32::decode(r, s)).unwrap()
+ }
+}
+
+impl<S> Encode<S> for NonZeroU32 {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.get().encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for NonZeroU32 {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ Self::new(u32::decode(r, s)).unwrap()
+ }
+}
+
+impl<S, A: Encode<S>, B: Encode<S>> Encode<S> for (A, B) {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.0.encode(w, s);
+ self.1.encode(w, s);
+ }
+}
+
+impl<'a, S, A: for<'s> DecodeMut<'a, 's, S>, B: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S>
+ for (A, B)
+{
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ (DecodeMut::decode(r, s), DecodeMut::decode(r, s))
+ }
+}
+
+rpc_encode_decode!(
+ enum Bound<T> {
+ Included(x),
+ Excluded(x),
+ Unbounded,
+ }
+);
+
+rpc_encode_decode!(
+ enum Option<T> {
+ None,
+ Some(x),
+ }
+);
+
+rpc_encode_decode!(
+ enum Result<T, E> {
+ Ok(x),
+ Err(e),
+ }
+);
+
+impl<S> Encode<S> for &[u8] {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.len().encode(w, s);
+ w.write_all(self).unwrap();
+ }
+}
+
+impl<'a, S> DecodeMut<'a, '_, S> for &'a [u8] {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ let len = usize::decode(r, s);
+ let xs = &r[..len];
+ *r = &r[len..];
+ xs
+ }
+}
+
+impl<S> Encode<S> for &str {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.as_bytes().encode(w, s);
+ }
+}
+
+impl<'a, S> DecodeMut<'a, '_, S> for &'a str {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ str::from_utf8(<&[u8]>::decode(r, s)).unwrap()
+ }
+}
+
+impl<S> Encode<S> for String {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self[..].encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for String {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ <&str>::decode(r, s).to_string()
+ }
+}
+
+/// Simplified version of panic payloads, ignoring
+/// types other than `&'static str` and `String`.
+pub enum PanicMessage {
+ StaticStr(&'static str),
+ String(String),
+ Unknown,
+}
+
+impl From<Box<dyn Any + Send>> for PanicMessage {
+ fn from(payload: Box<dyn Any + Send + 'static>) -> Self {
+ if let Some(s) = payload.downcast_ref::<&'static str>() {
+ return PanicMessage::StaticStr(s);
+ }
+ if let Ok(s) = payload.downcast::<String>() {
+ return PanicMessage::String(*s);
+ }
+ PanicMessage::Unknown
+ }
+}
+
+impl Into<Box<dyn Any + Send>> for PanicMessage {
+ fn into(self) -> Box<dyn Any + Send> {
+ match self {
+ PanicMessage::StaticStr(s) => Box::new(s),
+ PanicMessage::String(s) => Box::new(s),
+ PanicMessage::Unknown => {
+ struct UnknownPanicMessage;
+ Box::new(UnknownPanicMessage)
+ }
+ }
+ }
+}
+
+impl PanicMessage {
+ pub fn as_str(&self) -> Option<&str> {
+ match self {
+ PanicMessage::StaticStr(s) => Some(s),
+ PanicMessage::String(s) => Some(s),
+ PanicMessage::Unknown => None,
+ }
+ }
+}
+
+impl<S> Encode<S> for PanicMessage {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.as_str().encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for PanicMessage {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ match Option::<String>::decode(r, s) {
+ Some(s) => PanicMessage::String(s),
+ None => PanicMessage::Unknown,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/scoped_cell.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/scoped_cell.rs
new file mode 100644
index 000000000..b0c2e5b9c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/scoped_cell.rs
@@ -0,0 +1,81 @@
+//! `Cell` variant for (scoped) existential lifetimes.
+
+use std::cell::Cell;
+use std::mem;
+use std::ops::{Deref, DerefMut};
+
+/// Type lambda application, with a lifetime.
+#[allow(unused_lifetimes)]
+pub trait ApplyL<'a> {
+ type Out;
+}
+
+/// Type lambda taking a lifetime, i.e., `Lifetime -> Type`.
+pub trait LambdaL: for<'a> ApplyL<'a> {}
+
+impl<T: for<'a> ApplyL<'a>> LambdaL for T {}
+
+// HACK(eddyb) work around projection limitations with a newtype
+// FIXME(#52812) replace with `&'a mut <T as ApplyL<'b>>::Out`
+pub struct RefMutL<'a, 'b, T: LambdaL>(&'a mut <T as ApplyL<'b>>::Out);
+
+impl<'a, 'b, T: LambdaL> Deref for RefMutL<'a, 'b, T> {
+ type Target = <T as ApplyL<'b>>::Out;
+ fn deref(&self) -> &Self::Target {
+ self.0
+ }
+}
+
+impl<'a, 'b, T: LambdaL> DerefMut for RefMutL<'a, 'b, T> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ self.0
+ }
+}
+
+pub struct ScopedCell<T: LambdaL>(Cell<<T as ApplyL<'static>>::Out>);
+
+impl<T: LambdaL> ScopedCell<T> {
+ pub fn new(value: <T as ApplyL<'static>>::Out) -> Self {
+ ScopedCell(Cell::new(value))
+ }
+
+ /// Sets the value in `self` to `replacement` while
+ /// running `f`, which gets the old value, mutably.
+ /// The old value will be restored after `f` exits, even
+ /// by panic, including modifications made to it by `f`.
+ pub fn replace<'a, R>(
+ &self,
+ replacement: <T as ApplyL<'a>>::Out,
+ f: impl for<'b, 'c> FnOnce(RefMutL<'b, 'c, T>) -> R,
+ ) -> R {
+ /// Wrapper that ensures that the cell always gets filled
+ /// (with the original state, optionally changed by `f`),
+ /// even if `f` had panicked.
+ struct PutBackOnDrop<'a, T: LambdaL> {
+ cell: &'a ScopedCell<T>,
+ value: Option<<T as ApplyL<'static>>::Out>,
+ }
+
+ impl<'a, T: LambdaL> Drop for PutBackOnDrop<'a, T> {
+ fn drop(&mut self) {
+ self.cell.0.set(self.value.take().unwrap());
+ }
+ }
+
+ let mut put_back_on_drop = PutBackOnDrop {
+ cell: self,
+ value: Some(self.0.replace(unsafe {
+ let erased = mem::transmute_copy(&replacement);
+ mem::forget(replacement);
+ erased
+ })),
+ };
+
+ f(RefMutL(put_back_on_drop.value.as_mut().unwrap()))
+ }
+
+ /// Sets the value in `self` to `value` while running `f`.
+ pub fn set<R>(&self, value: <T as ApplyL<'_>>::Out, f: impl FnOnce() -> R) -> R {
+ self.replace(value, |_| f())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/server.rs
new file mode 100644
index 000000000..06a197913
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/server.rs
@@ -0,0 +1,352 @@
+//! Server-side traits.
+
+use super::*;
+
+// FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
+use super::client::HandleStore;
+
+/// Declare an associated item of one of the traits below, optionally
+/// adjusting it (i.e., adding bounds to types and default bodies to methods).
+macro_rules! associated_item {
+ (type FreeFunctions) =>
+ (type FreeFunctions: 'static;);
+ (type TokenStream) =>
+ (type TokenStream: 'static + Clone;);
+ (type TokenStreamBuilder) =>
+ (type TokenStreamBuilder: 'static;);
+ (type TokenStreamIter) =>
+ (type TokenStreamIter: 'static + Clone;);
+ (type Group) =>
+ (type Group: 'static + Clone;);
+ (type Punct) =>
+ (type Punct: 'static + Copy + Eq + Hash;);
+ (type Ident) =>
+ (type Ident: 'static + Copy + Eq + Hash;);
+ (type Literal) =>
+ (type Literal: 'static + Clone;);
+ (type SourceFile) =>
+ (type SourceFile: 'static + Clone;);
+ (type MultiSpan) =>
+ (type MultiSpan: 'static;);
+ (type Diagnostic) =>
+ (type Diagnostic: 'static;);
+ (type Span) =>
+ (type Span: 'static + Copy + Eq + Hash;);
+ (fn drop(&mut self, $arg:ident: $arg_ty:ty)) =>
+ (fn drop(&mut self, $arg: $arg_ty) { mem::drop($arg) });
+ (fn clone(&mut self, $arg:ident: $arg_ty:ty) -> $ret_ty:ty) =>
+ (fn clone(&mut self, $arg: $arg_ty) -> $ret_ty { $arg.clone() });
+ ($($item:tt)*) => ($($item)*;)
+}
+
+macro_rules! declare_server_traits {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ pub trait Types {
+ $(associated_item!(type $name);)*
+ }
+
+ $(pub trait $name: Types {
+ $(associated_item!(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)?);)*
+ })*
+
+ pub trait Server: Types $(+ $name)* {}
+ impl<S: Types $(+ $name)*> Server for S {}
+ }
+}
+with_api!(Self, self_, declare_server_traits);
+
+pub(super) struct MarkedTypes<S: Types>(S);
+
+macro_rules! define_mark_types_impls {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ impl<S: Types> Types for MarkedTypes<S> {
+ $(type $name = Marked<S::$name, client::$name>;)*
+ }
+
+ $(impl<S: $name> $name for MarkedTypes<S> {
+ $(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)? {
+ <_>::mark($name::$method(&mut self.0, $($arg.unmark()),*))
+ })*
+ })*
+ }
+}
+with_api!(Self, self_, define_mark_types_impls);
+
+struct Dispatcher<S: Types> {
+ handle_store: HandleStore<S>,
+ server: S,
+}
+
+macro_rules! define_dispatcher_impl {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ // FIXME(eddyb) `pub` only for `ExecutionStrategy` below.
+ pub trait DispatcherTrait {
+ // HACK(eddyb) these are here to allow `Self::$name` to work below.
+ $(type $name;)*
+ fn dispatch(&mut self, b: Buffer<u8>) -> Buffer<u8>;
+ }
+
+ impl<S: Server> DispatcherTrait for Dispatcher<MarkedTypes<S>> {
+ $(type $name = <MarkedTypes<S> as Types>::$name;)*
+ fn dispatch(&mut self, mut b: Buffer<u8>) -> Buffer<u8> {
+ let Dispatcher { handle_store, server } = self;
+
+ let mut reader = &b[..];
+ match api_tags::Method::decode(&mut reader, &mut ()) {
+ $(api_tags::Method::$name(m) => match m {
+ $(api_tags::$name::$method => {
+ let mut call_method = || {
+ reverse_decode!(reader, handle_store; $($arg: $arg_ty),*);
+ $name::$method(server, $($arg),*)
+ };
+ // HACK(eddyb) don't use `panic::catch_unwind` in a panic.
+ // If client and server happen to use the same `libstd`,
+ // `catch_unwind` asserts that the panic counter was 0,
+ // even when the closure passed to it didn't panic.
+ let r = if thread::panicking() {
+ Ok(call_method())
+ } else {
+ panic::catch_unwind(panic::AssertUnwindSafe(call_method))
+ .map_err(PanicMessage::from)
+ };
+
+ b.clear();
+ r.encode(&mut b, handle_store);
+ })*
+ }),*
+ }
+ b
+ }
+ }
+ }
+}
+with_api!(Self, self_, define_dispatcher_impl);
+
+pub trait ExecutionStrategy {
+ fn run_bridge_and_client<D: Copy + Send + 'static>(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer<u8>,
+ run_client: extern "C" fn(Bridge<'_>, D) -> Buffer<u8>,
+ client_data: D,
+ force_show_panics: bool,
+ ) -> Buffer<u8>;
+}
+
+pub struct SameThread;
+
+impl ExecutionStrategy for SameThread {
+ fn run_bridge_and_client<D: Copy + Send + 'static>(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer<u8>,
+ run_client: extern "C" fn(Bridge<'_>, D) -> Buffer<u8>,
+ client_data: D,
+ force_show_panics: bool,
+ ) -> Buffer<u8> {
+ let mut dispatch = |b| dispatcher.dispatch(b);
+
+ run_client(
+ Bridge { cached_buffer: input, dispatch: (&mut dispatch).into(), force_show_panics },
+ client_data,
+ )
+ }
+}
+
+// NOTE(eddyb) Two implementations are provided, the second one is a bit
+// faster but neither is anywhere near as fast as same-thread execution.
+
+pub struct CrossThread1;
+
+impl ExecutionStrategy for CrossThread1 {
+ fn run_bridge_and_client<D: Copy + Send + 'static>(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer<u8>,
+ run_client: extern "C" fn(Bridge<'_>, D) -> Buffer<u8>,
+ client_data: D,
+ force_show_panics: bool,
+ ) -> Buffer<u8> {
+ use std::sync::mpsc::channel;
+
+ let (req_tx, req_rx) = channel();
+ let (res_tx, res_rx) = channel();
+
+ let join_handle = thread::spawn(move || {
+ let mut dispatch = |b| {
+ req_tx.send(b).unwrap();
+ res_rx.recv().unwrap()
+ };
+
+ run_client(
+ Bridge {
+ cached_buffer: input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ },
+ client_data,
+ )
+ });
+
+ for b in req_rx {
+ res_tx.send(dispatcher.dispatch(b)).unwrap();
+ }
+
+ join_handle.join().unwrap()
+ }
+}
+
+pub struct CrossThread2;
+
+impl ExecutionStrategy for CrossThread2 {
+ fn run_bridge_and_client<D: Copy + Send + 'static>(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer<u8>,
+ run_client: extern "C" fn(Bridge<'_>, D) -> Buffer<u8>,
+ client_data: D,
+ force_show_panics: bool,
+ ) -> Buffer<u8> {
+ use std::sync::{Arc, Mutex};
+
+ enum State<T> {
+ Req(T),
+ Res(T),
+ }
+
+ let mut state = Arc::new(Mutex::new(State::Res(Buffer::new())));
+
+ let server_thread = thread::current();
+ let state2 = state.clone();
+ let join_handle = thread::spawn(move || {
+ let mut dispatch = |b| {
+ *state2.lock().unwrap() = State::Req(b);
+ server_thread.unpark();
+ loop {
+ thread::park();
+ if let State::Res(b) = &mut *state2.lock().unwrap() {
+ break b.take();
+ }
+ }
+ };
+
+ let r = run_client(
+ Bridge {
+ cached_buffer: input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ },
+ client_data,
+ );
+
+ // Wake up the server so it can exit the dispatch loop.
+ drop(state2);
+ server_thread.unpark();
+
+ r
+ });
+
+ // Check whether `state2` was dropped, to know when to stop.
+ while Arc::get_mut(&mut state).is_none() {
+ thread::park();
+ let mut b = match &mut *state.lock().unwrap() {
+ State::Req(b) => b.take(),
+ _ => continue,
+ };
+ b = dispatcher.dispatch(b.take());
+ *state.lock().unwrap() = State::Res(b);
+ join_handle.thread().unpark();
+ }
+
+ join_handle.join().unwrap()
+ }
+}
+
+fn run_server<
+ S: Server,
+ I: Encode<HandleStore<MarkedTypes<S>>>,
+ O: for<'a, 's> DecodeMut<'a, 's, HandleStore<MarkedTypes<S>>>,
+ D: Copy + Send + 'static,
+>(
+ strategy: &impl ExecutionStrategy,
+ handle_counters: &'static client::HandleCounters,
+ server: S,
+ input: I,
+ run_client: extern "C" fn(Bridge<'_>, D) -> Buffer<u8>,
+ client_data: D,
+ force_show_panics: bool,
+) -> Result<O, PanicMessage> {
+ let mut dispatcher =
+ Dispatcher { handle_store: HandleStore::new(handle_counters), server: MarkedTypes(server) };
+
+ let mut b = Buffer::new();
+ input.encode(&mut b, &mut dispatcher.handle_store);
+
+ b = strategy.run_bridge_and_client(
+ &mut dispatcher,
+ b,
+ run_client,
+ client_data,
+ force_show_panics,
+ );
+
+ Result::decode(&mut &b[..], &mut dispatcher.handle_store)
+}
+
+impl client::Client<fn(super::super::TokenStream) -> super::super::TokenStream> {
+ pub fn run<S: Server>(
+ &self,
+ strategy: &impl ExecutionStrategy,
+ server: S,
+ input: S::TokenStream,
+ force_show_panics: bool,
+ ) -> Result<S::TokenStream, PanicMessage> {
+ let client::Client { get_handle_counters, run, f } = *self;
+ run_server(
+ strategy,
+ get_handle_counters(),
+ server,
+ <MarkedTypes<S> as Types>::TokenStream::mark(input),
+ run,
+ f,
+ force_show_panics,
+ )
+ .map(<MarkedTypes<S> as Types>::TokenStream::unmark)
+ }
+}
+
+impl
+ client::Client<
+ fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream,
+ >
+{
+ pub fn run<S: Server>(
+ &self,
+ strategy: &impl ExecutionStrategy,
+ server: S,
+ input: S::TokenStream,
+ input2: S::TokenStream,
+ force_show_panics: bool,
+ ) -> Result<S::TokenStream, PanicMessage> {
+ let client::Client { get_handle_counters, run, f } = *self;
+ run_server(
+ strategy,
+ get_handle_counters(),
+ server,
+ (
+ <MarkedTypes<S> as Types>::TokenStream::mark(input),
+ <MarkedTypes<S> as Types>::TokenStream::mark(input2),
+ ),
+ run,
+ f,
+ force_show_panics,
+ )
+ .map(<MarkedTypes<S> as Types>::TokenStream::unmark)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/diagnostic.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/diagnostic.rs
new file mode 100644
index 000000000..cda239f87
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/diagnostic.rs
@@ -0,0 +1,166 @@
+//! lib-proc-macro diagnostic
+//!
+//! Copy from <https://github.com/rust-lang/rust/blob/6050e523bae6de61de4e060facc43dc512adaccd/src/libproc_macro/diagnostic.rs>
+//! augmented with removing unstable features
+
+use super::Span;
+
+/// An enum representing a diagnostic level.
+#[derive(Copy, Clone, Debug)]
+#[non_exhaustive]
+pub enum Level {
+ /// An error.
+ Error,
+ /// A warning.
+ Warning,
+ /// A note.
+ Note,
+ /// A help message.
+ Help,
+}
+
+/// Trait implemented by types that can be converted into a set of `Span`s.
+pub trait MultiSpan {
+ /// Converts `self` into a `Vec<Span>`.
+ fn into_spans(self) -> Vec<Span>;
+}
+
+impl MultiSpan for Span {
+ fn into_spans(self) -> Vec<Span> {
+ vec![self]
+ }
+}
+
+impl MultiSpan for Vec<Span> {
+ fn into_spans(self) -> Vec<Span> {
+ self
+ }
+}
+
+impl<'a> MultiSpan for &'a [Span] {
+ fn into_spans(self) -> Vec<Span> {
+ self.to_vec()
+ }
+}
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+macro_rules! diagnostic_child_methods {
+ ($spanned:ident, $regular:ident, $level:expr) => {
+ #[doc = concat!("Adds a new child diagnostics message to `self` with the [`",
+ stringify!($level), "`] level, and the given `spans` and `message`.")]
+ pub fn $spanned<S, T>(mut self, spans: S, message: T) -> Diagnostic
+ where
+ S: MultiSpan,
+ T: Into<String>,
+ {
+ self.children.push(Diagnostic::spanned(spans, $level, message));
+ self
+ }
+
+ #[doc = concat!("Adds a new child diagnostic message to `self` with the [`",
+ stringify!($level), "`] level, and the given `message`.")]
+ pub fn $regular<T: Into<String>>(mut self, message: T) -> Diagnostic {
+ self.children.push(Diagnostic::new($level, message));
+ self
+ }
+ };
+}
+
+/// Iterator over the children diagnostics of a `Diagnostic`.
+#[derive(Debug, Clone)]
+pub struct Children<'a>(std::slice::Iter<'a, Diagnostic>);
+
+impl<'a> Iterator for Children<'a> {
+ type Item = &'a Diagnostic;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.0.next()
+ }
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+
+ /// Creates a new diagnostic with the given `level` and `message` pointing to
+ /// the given set of `spans`.
+ pub fn spanned<S, T>(spans: S, level: Level, message: T) -> Diagnostic
+ where
+ S: MultiSpan,
+ T: Into<String>,
+ {
+ Diagnostic { level, message: message.into(), spans: spans.into_spans(), children: vec![] }
+ }
+
+ diagnostic_child_methods!(span_error, error, Level::Error);
+ diagnostic_child_methods!(span_warning, warning, Level::Warning);
+ diagnostic_child_methods!(span_note, note, Level::Note);
+ diagnostic_child_methods!(span_help, help, Level::Help);
+
+ /// Returns the diagnostic `level` for `self`.
+ pub fn level(&self) -> Level {
+ self.level
+ }
+
+ /// Sets the level in `self` to `level`.
+ pub fn set_level(&mut self, level: Level) {
+ self.level = level;
+ }
+
+ /// Returns the message in `self`.
+ pub fn message(&self) -> &str {
+ &self.message
+ }
+
+ /// Sets the message in `self` to `message`.
+ pub fn set_message<T: Into<String>>(&mut self, message: T) {
+ self.message = message.into();
+ }
+
+ /// Returns the `Span`s in `self`.
+ pub fn spans(&self) -> &[Span] {
+ &self.spans
+ }
+
+ /// Sets the `Span`s in `self` to `spans`.
+ pub fn set_spans<S: MultiSpan>(&mut self, spans: S) {
+ self.spans = spans.into_spans();
+ }
+
+ /// Returns an iterator over the children diagnostics of `self`.
+ pub fn children(&self) -> Children<'_> {
+ Children(self.children.iter())
+ }
+
+ /// Emit the diagnostic.
+ pub fn emit(self) {
+ fn to_internal(spans: Vec<Span>) -> super::bridge::client::MultiSpan {
+ let mut multi_span = super::bridge::client::MultiSpan::new();
+ for span in spans {
+ multi_span.push(span.0);
+ }
+ multi_span
+ }
+
+ let mut diag = super::bridge::client::Diagnostic::new(
+ self.level,
+ &self.message[..],
+ to_internal(self.spans),
+ );
+ for c in self.children {
+ diag.sub(c.level, &c.message[..], to_internal(c.spans));
+ }
+ diag.emit();
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs
new file mode 100644
index 000000000..4a07f2277
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs
@@ -0,0 +1,1056 @@
+//! A support library for macro authors when defining new macros.
+//!
+//! This library, provided by the standard distribution, provides the types
+//! consumed in the interfaces of procedurally defined macro definitions such as
+//! function-like macros `#[proc_macro]`, macro attributes `#[proc_macro_attribute]` and
+//! custom derive attributes`#[proc_macro_derive]`.
+//!
+//! See [the book] for more.
+//!
+//! [the book]: ../book/ch19-06-macros.html#procedural-macros-for-generating-code-from-attributes
+
+#[doc(hidden)]
+pub mod bridge;
+
+mod diagnostic;
+
+pub use diagnostic::{Diagnostic, Level, MultiSpan};
+
+use std::cmp::Ordering;
+use std::ops::RangeBounds;
+use std::path::PathBuf;
+use std::str::FromStr;
+use std::{error, fmt, iter, mem};
+
+/// Determines whether proc_macro has been made accessible to the currently
+/// running program.
+///
+/// The proc_macro crate is only intended for use inside the implementation of
+/// procedural macros. All the functions in this crate panic if invoked from
+/// outside of a procedural macro, such as from a build script or unit test or
+/// ordinary Rust binary.
+///
+/// With consideration for Rust libraries that are designed to support both
+/// macro and non-macro use cases, `proc_macro::is_available()` provides a
+/// non-panicking way to detect whether the infrastructure required to use the
+/// API of proc_macro is presently available. Returns true if invoked from
+/// inside of a procedural macro, false if invoked from any other binary.
+pub fn is_available() -> bool {
+ bridge::Bridge::is_available()
+}
+
+/// The main type provided by this crate, representing an abstract stream of
+/// tokens, or, more specifically, a sequence of token trees.
+/// The type provide interfaces for iterating over those token trees and, conversely,
+/// collecting a number of token trees into one stream.
+///
+/// This is both the input and output of `#[proc_macro]`, `#[proc_macro_attribute]`
+/// and `#[proc_macro_derive]` definitions.
+#[derive(Clone)]
+pub struct TokenStream(bridge::client::TokenStream);
+
+/// Error returned from `TokenStream::from_str`.
+#[non_exhaustive]
+#[derive(Debug)]
+pub struct LexError;
+
+impl fmt::Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("cannot parse string into token stream")
+ }
+}
+
+impl error::Error for LexError {}
+
+/// Error returned from `TokenStream::expand_expr`.
+#[non_exhaustive]
+#[derive(Debug)]
+pub struct ExpandError;
+
+impl fmt::Display for ExpandError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("macro expansion failed")
+ }
+}
+
+impl error::Error for ExpandError {}
+
+impl TokenStream {
+ /// Returns an empty `TokenStream` containing no token trees.
+ pub fn new() -> TokenStream {
+ TokenStream(bridge::client::TokenStream::new())
+ }
+
+ /// Checks if this `TokenStream` is empty.
+ pub fn is_empty(&self) -> bool {
+ self.0.is_empty()
+ }
+
+ /// Parses this `TokenStream` as an expression and attempts to expand any
+ /// macros within it. Returns the expanded `TokenStream`.
+ ///
+ /// Currently only expressions expanding to literals will succeed, although
+ /// this may be relaxed in the future.
+ ///
+ /// NOTE: In error conditions, `expand_expr` may leave macros unexpanded,
+ /// report an error, failing compilation, and/or return an `Err(..)`. The
+ /// specific behavior for any error condition, and what conditions are
+ /// considered errors, is unspecified and may change in the future.
+ pub fn expand_expr(&self) -> Result<TokenStream, ExpandError> {
+ match bridge::client::TokenStream::expand_expr(&self.0) {
+ Ok(stream) => Ok(TokenStream(stream)),
+ Err(_) => Err(ExpandError),
+ }
+ }
+}
+
+/// Attempts to break the string into tokens and parse those tokens into a token stream.
+/// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+/// or characters not existing in the language.
+/// All tokens in the parsed stream get `Span::call_site()` spans.
+///
+/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+/// change these errors into `LexError`s later.
+impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ Ok(TokenStream(bridge::client::TokenStream::from_str(src)))
+ }
+}
+
+/// Prints the token stream as a string that is supposed to be losslessly convertible back
+/// into the same token stream (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters and negative numeric literals.
+impl fmt::Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+/// Prints token in a form convenient for debugging.
+impl fmt::Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("TokenStream ")?;
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+impl Default for TokenStream {
+ fn default() -> Self {
+ TokenStream::new()
+ }
+}
+
+pub use quote::{quote, quote_span};
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream(bridge::client::TokenStream::from_token_tree(match tree {
+ TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0),
+ TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0),
+ TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0),
+ TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0),
+ }))
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl iter::FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl iter::FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut builder = bridge::client::TokenStreamBuilder::new();
+ streams.into_iter().for_each(|stream| builder.push(stream.0));
+ TokenStream(builder.build())
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ // FIXME(eddyb) Use an optimized implementation if/when possible.
+ *self = iter::once(mem::replace(self, Self::new())).chain(streams).collect();
+ }
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use super::{bridge, Group, Ident, Literal, Punct, TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ #[derive(Clone)]
+ pub struct IntoIter(bridge::client::TokenStreamIter);
+
+ impl Iterator for IntoIter {
+ type Item = TokenTree;
+
+ fn next(&mut self) -> Option<TokenTree> {
+ self.0.next().map(|tree| match tree {
+ bridge::TokenTree::Group(tt) => TokenTree::Group(Group(tt)),
+ bridge::TokenTree::Punct(tt) => TokenTree::Punct(Punct(tt)),
+ bridge::TokenTree::Ident(tt) => TokenTree::Ident(Ident(tt)),
+ bridge::TokenTree::Literal(tt) => TokenTree::Literal(Literal(tt)),
+ })
+ }
+ }
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = IntoIter;
+
+ fn into_iter(self) -> IntoIter {
+ IntoIter(self.0.into_iter())
+ }
+ }
+}
+
+/// `quote!(..)` accepts arbitrary tokens and expands into a `TokenStream` describing the input.
+/// For example, `quote!(a + b)` will produce an expression, that, when evaluated, constructs
+/// the `TokenStream` `[Ident("a"), Punct('+', Alone), Ident("b")]`.
+///
+/// Unquoting is done with `$`, and works by taking the single next ident as the unquoted term.
+/// To quote `$` itself, use `$$`.
+//pub macro quote($($t:tt)*) {
+//[> compiler built-in <]
+//}
+
+#[doc(hidden)]
+mod quote;
+
+/// A region of source code, along with macro expansion information.
+#[derive(Copy, Clone)]
+pub struct Span(bridge::client::Span);
+
+macro_rules! diagnostic_method {
+ ($name:ident, $level:expr) => {
+ /// Creates a new `Diagnostic` with the given `message` at the span
+ /// `self`.
+ pub fn $name<T: Into<String>>(self, message: T) -> Diagnostic {
+ Diagnostic::spanned(self, $level, message)
+ }
+ };
+}
+
+impl Span {
+ /// A span that resolves at the macro definition site.
+ pub fn def_site() -> Span {
+ Span(bridge::client::Span::def_site())
+ }
+
+ /// The span of the invocation of the current procedural macro.
+ /// Identifiers created with this span will be resolved as if they were written
+ /// directly at the macro call location (call-site hygiene) and other code
+ /// at the macro call site will be able to refer to them as well.
+ pub fn call_site() -> Span {
+ Span(bridge::client::Span::call_site())
+ }
+
+ /// A span that represents `macro_rules` hygiene, and sometimes resolves at the macro
+ /// definition site (local variables, labels, `$crate`) and sometimes at the macro
+ /// call site (everything else).
+ /// The span location is taken from the call-site.
+ pub fn mixed_site() -> Span {
+ Span(bridge::client::Span::mixed_site())
+ }
+
+ /// The original source file into which this span points.
+ pub fn source_file(&self) -> SourceFile {
+ SourceFile(self.0.source_file())
+ }
+
+ /// The `Span` for the tokens in the previous macro expansion from which
+ /// `self` was generated from, if any.
+ pub fn parent(&self) -> Option<Span> {
+ self.0.parent().map(Span)
+ }
+
+ /// The span for the origin source code that `self` was generated from. If
+ /// this `Span` wasn't generated from other macro expansions then the return
+ /// value is the same as `*self`.
+ pub fn source(&self) -> Span {
+ Span(self.0.source())
+ }
+
+ /// Gets the starting line/column in the source file for this span.
+ pub fn start(&self) -> LineColumn {
+ self.0.start().add_1_to_column()
+ }
+
+ /// Gets the ending line/column in the source file for this span.
+ pub fn end(&self) -> LineColumn {
+ self.0.end().add_1_to_column()
+ }
+
+ /// Creates an empty span pointing to directly before this span.
+ pub fn before(&self) -> Span {
+ Span(self.0.before())
+ }
+
+ /// Creates an empty span pointing to directly after this span.
+ pub fn after(&self) -> Span {
+ Span(self.0.after())
+ }
+
+ /// Creates a new span encompassing `self` and `other`.
+ ///
+ /// Returns `None` if `self` and `other` are from different files.
+ pub fn join(&self, other: Span) -> Option<Span> {
+ self.0.join(other.0).map(Span)
+ }
+
+ /// Creates a new span with the same line/column information as `self` but
+ /// that resolves symbols as though it were at `other`.
+ pub fn resolved_at(&self, other: Span) -> Span {
+ Span(self.0.resolved_at(other.0))
+ }
+
+ /// Creates a new span with the same name resolution behavior as `self` but
+ /// with the line/column information of `other`.
+ pub fn located_at(&self, other: Span) -> Span {
+ other.resolved_at(*self)
+ }
+
+ /// Compares to spans to see if they're equal.
+ pub fn eq(&self, other: &Span) -> bool {
+ self.0 == other.0
+ }
+
+ /// Returns the source text behind a span. This preserves the original source
+ /// code, including spaces and comments. It only returns a result if the span
+ /// corresponds to real source code.
+ ///
+ /// Note: The observable result of a macro should only rely on the tokens and
+ /// not on this source text. The result of this function is a best effort to
+ /// be used for diagnostics only.
+ pub fn source_text(&self) -> Option<String> {
+ self.0.source_text()
+ }
+
+ // Used by the implementation of `Span::quote`
+ #[doc(hidden)]
+ pub fn save_span(&self) -> usize {
+ self.0.save_span()
+ }
+
+ // Used by the implementation of `Span::quote`
+ #[doc(hidden)]
+ pub fn recover_proc_macro_span(id: usize) -> Span {
+ Span(bridge::client::Span::recover_proc_macro_span(id))
+ }
+
+ diagnostic_method!(error, Level::Error);
+ diagnostic_method!(warning, Level::Warning);
+ diagnostic_method!(note, Level::Note);
+ diagnostic_method!(help, Level::Help);
+}
+
+/// Prints a span in a form convenient for debugging.
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// A line-column pair representing the start or end of a `Span`.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct LineColumn {
+ /// The 1-indexed line in the source file on which the span starts or ends (inclusive).
+ pub line: usize,
+ /// The 1-indexed column (number of bytes in UTF-8 encoding) in the source
+ /// file on which the span starts or ends (inclusive).
+ pub column: usize,
+}
+
+impl LineColumn {
+ fn add_1_to_column(self) -> Self {
+ LineColumn { line: self.line, column: self.column + 1 }
+ }
+}
+
+impl Ord for LineColumn {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.line.cmp(&other.line).then(self.column.cmp(&other.column))
+ }
+}
+
+impl PartialOrd for LineColumn {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+/// The source file of a given `Span`.
+#[derive(Clone)]
+pub struct SourceFile(bridge::client::SourceFile);
+
+impl SourceFile {
+ /// Gets the path to this source file.
+ ///
+ /// ### Note
+ /// If the code span associated with this `SourceFile` was generated by an external macro, this
+ /// macro, this might not be an actual path on the filesystem. Use [`is_real`] to check.
+ ///
+ /// Also note that even if `is_real` returns `true`, if `--remap-path-prefix` was passed on
+ /// the command line, the path as given might not actually be valid.
+ ///
+ /// [`is_real`]: Self::is_real
+ pub fn path(&self) -> PathBuf {
+ PathBuf::from(self.0.path())
+ }
+
+ /// Returns `true` if this source file is a real source file, and not generated by an external
+ /// macro's expansion.
+ pub fn is_real(&self) -> bool {
+ // This is a hack until intercrate spans are implemented and we can have real source files
+ // for spans generated in external macros.
+ // https://github.com/rust-lang/rust/pull/43604#issuecomment-333334368
+ self.0.is_real()
+ }
+}
+
+impl fmt::Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("SourceFile")
+ .field("path", &self.path())
+ .field("is_real", &self.is_real())
+ .finish()
+ }
+}
+
+impl PartialEq for SourceFile {
+ fn eq(&self, other: &Self) -> bool {
+ self.0.eq(&other.0)
+ }
+}
+
+impl Eq for SourceFile {}
+
+/// A single token or a delimited sequence of token trees (e.g., `[1, (), ..]`).
+#[derive(Clone)]
+pub enum TokenTree {
+ /// A token stream surrounded by bracket delimiters.
+ Group(Group),
+ /// An identifier.
+ Ident(Ident),
+ /// A single punctuation character (`+`, `,`, `$`, etc.).
+ Punct(Punct),
+ /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
+ Literal(Literal),
+}
+
+impl TokenTree {
+ /// Returns the span of this tree, delegating to the `span` method of
+ /// the contained token or a delimited stream.
+ pub fn span(&self) -> Span {
+ match *self {
+ TokenTree::Group(ref t) => t.span(),
+ TokenTree::Ident(ref t) => t.span(),
+ TokenTree::Punct(ref t) => t.span(),
+ TokenTree::Literal(ref t) => t.span(),
+ }
+ }
+
+ /// Configures the span for *only this token*.
+ ///
+ /// Note that if this token is a `Group` then this method will not configure
+ /// the span of each of the internal tokens, this will simply delegate to
+ /// the `set_span` method of each variant.
+ pub fn set_span(&mut self, span: Span) {
+ match *self {
+ TokenTree::Group(ref mut t) => t.set_span(span),
+ TokenTree::Ident(ref mut t) => t.set_span(span),
+ TokenTree::Punct(ref mut t) => t.set_span(span),
+ TokenTree::Literal(ref mut t) => t.set_span(span),
+ }
+ }
+}
+
+/// Prints token tree in a form convenient for debugging.
+impl fmt::Debug for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // Each of these has the name in the struct type in the derived debug,
+ // so don't bother with an extra layer of indirection
+ match *self {
+ TokenTree::Group(ref tt) => tt.fmt(f),
+ TokenTree::Ident(ref tt) => tt.fmt(f),
+ TokenTree::Punct(ref tt) => tt.fmt(f),
+ TokenTree::Literal(ref tt) => tt.fmt(f),
+ }
+ }
+}
+
+impl From<Group> for TokenTree {
+ fn from(g: Group) -> TokenTree {
+ TokenTree::Group(g)
+ }
+}
+
+impl From<Ident> for TokenTree {
+ fn from(g: Ident) -> TokenTree {
+ TokenTree::Ident(g)
+ }
+}
+
+impl From<Punct> for TokenTree {
+ fn from(g: Punct) -> TokenTree {
+ TokenTree::Punct(g)
+ }
+}
+
+impl From<Literal> for TokenTree {
+ fn from(g: Literal) -> TokenTree {
+ TokenTree::Literal(g)
+ }
+}
+
+/// Prints the token tree as a string that is supposed to be losslessly convertible back
+/// into the same token tree (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters and negative numeric literals.
+impl fmt::Display for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+/// A delimited token stream.
+///
+/// A `Group` internally contains a `TokenStream` which is surrounded by `Delimiter`s.
+#[derive(Clone)]
+pub struct Group(bridge::client::Group);
+
+/// Describes how a sequence of token trees is delimited.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Delimiter {
+ /// `( ... )`
+ Parenthesis,
+ /// `{ ... }`
+ Brace,
+ /// `[ ... ]`
+ Bracket,
+ /// `Ø ... Ø`
+ /// An implicit delimiter, that may, for example, appear around tokens coming from a
+ /// "macro variable" `$var`. It is important to preserve operator priorities in cases like
+ /// `$var * 3` where `$var` is `1 + 2`.
+ /// Implicit delimiters might not survive roundtrip of a token stream through a string.
+ None,
+}
+
+impl Group {
+ /// Creates a new `Group` with the given delimiter and token stream.
+ ///
+ /// This constructor will set the span for this group to
+ /// `Span::call_site()`. To change the span you can use the `set_span`
+ /// method below.
+ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+ Group(bridge::client::Group::new(delimiter, stream.0))
+ }
+
+ /// Returns the delimiter of this `Group`
+ pub fn delimiter(&self) -> Delimiter {
+ self.0.delimiter()
+ }
+
+ /// Returns the `TokenStream` of tokens that are delimited in this `Group`.
+ ///
+ /// Note that the returned token stream does not include the delimiter
+ /// returned above.
+ pub fn stream(&self) -> TokenStream {
+ TokenStream(self.0.stream())
+ }
+
+ /// Returns the span for the delimiters of this token stream, spanning the
+ /// entire `Group`.
+ ///
+ /// ```text
+ /// pub fn span(&self) -> Span {
+ /// ^^^^^^^
+ /// ```
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Returns the span pointing to the opening delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_open(&self) -> Span {
+ /// ^
+ /// ```
+ pub fn span_open(&self) -> Span {
+ Span(self.0.span_open())
+ }
+
+ /// Returns the span pointing to the closing delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_close(&self) -> Span {
+ /// ^
+ /// ```
+ pub fn span_close(&self) -> Span {
+ Span(self.0.span_close())
+ }
+
+ /// Configures the span for this `Group`'s delimiters, but not its internal
+ /// tokens.
+ ///
+ /// This method will **not** set the span of all the internal tokens spanned
+ /// by this group, but rather it will only set the span of the delimiter
+ /// tokens at the level of the `Group`.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.set_span(span.0);
+ }
+}
+
+/// Prints the group as a string that should be losslessly convertible back
+/// into the same group (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters.
+impl fmt::Display for Group {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Group {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Group")
+ .field("delimiter", &self.delimiter())
+ .field("stream", &self.stream())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+/// A `Punct` is a single punctuation character such as `+`, `-` or `#`.
+///
+/// Multi-character operators like `+=` are represented as two instances of `Punct` with different
+/// forms of `Spacing` returned.
+#[derive(Clone)]
+pub struct Punct(bridge::client::Punct);
+
+/// Describes whether a `Punct` is followed immediately by another `Punct` ([`Spacing::Joint`]) or
+/// by a different token or whitespace ([`Spacing::Alone`]).
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Spacing {
+ /// A `Punct` is not immediately followed by another `Punct`.
+ /// E.g. `+` is `Alone` in `+ =`, `+ident` and `+()`.
+ Alone,
+ /// A `Punct` is immediately followed by another `Punct`.
+ /// E.g. `+` is `Joint` in `+=` and `++`.
+ ///
+ /// Additionally, single quote `'` can join with identifiers to form lifetimes: `'ident`.
+ Joint,
+}
+
+impl Punct {
+ /// Creates a new `Punct` from the given character and spacing.
+ /// The `ch` argument must be a valid punctuation character permitted by the language,
+ /// otherwise the function will panic.
+ ///
+ /// The returned `Punct` will have the default span of `Span::call_site()`
+ /// which can be further configured with the `set_span` method below.
+ pub fn new(ch: char, spacing: Spacing) -> Punct {
+ Punct(bridge::client::Punct::new(ch, spacing))
+ }
+
+ /// Returns the value of this punctuation character as `char`.
+ pub fn as_char(&self) -> char {
+ self.0.as_char()
+ }
+
+ /// Returns the spacing of this punctuation character, indicating whether it's immediately
+ /// followed by another `Punct` in the token stream, so they can potentially be combined into
+ /// a multi-character operator (`Joint`), or it's followed by some other token or whitespace
+ /// (`Alone`) so the operator has certainly ended.
+ pub fn spacing(&self) -> Spacing {
+ self.0.spacing()
+ }
+
+ /// Returns the span for this punctuation character.
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configure the span for this punctuation character.
+ pub fn set_span(&mut self, span: Span) {
+ self.0 = self.0.with_span(span.0);
+ }
+}
+
+/// Prints the punctuation character as a string that should be losslessly convertible
+/// back into the same character.
+impl fmt::Display for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Punct")
+ .field("ch", &self.as_char())
+ .field("spacing", &self.spacing())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+impl PartialEq<char> for Punct {
+ fn eq(&self, rhs: &char) -> bool {
+ self.as_char() == *rhs
+ }
+}
+
+impl PartialEq<Punct> for char {
+ fn eq(&self, rhs: &Punct) -> bool {
+ *self == rhs.as_char()
+ }
+}
+
+/// An identifier (`ident`).
+#[derive(Clone)]
+pub struct Ident(bridge::client::Ident);
+
+impl Ident {
+ /// Creates a new `Ident` with the given `string` as well as the specified
+ /// `span`.
+ /// The `string` argument must be a valid identifier permitted by the
+ /// language (including keywords, e.g. `self` or `fn`). Otherwise, the function will panic.
+ ///
+ /// Note that `span`, currently in rustc, configures the hygiene information
+ /// for this identifier.
+ ///
+ /// As of this time `Span::call_site()` explicitly opts-in to "call-site" hygiene
+ /// meaning that identifiers created with this span will be resolved as if they were written
+ /// directly at the location of the macro call, and other code at the macro call site will be
+ /// able to refer to them as well.
+ ///
+ /// Later spans like `Span::def_site()` will allow to opt-in to "definition-site" hygiene
+ /// meaning that identifiers created with this span will be resolved at the location of the
+ /// macro definition and other code at the macro call site will not be able to refer to them.
+ ///
+ /// Due to the current importance of hygiene this constructor, unlike other
+ /// tokens, requires a `Span` to be specified at construction.
+ pub fn new(string: &str, span: Span) -> Ident {
+ Ident(bridge::client::Ident::new(string, span.0, false))
+ }
+
+ /// Same as `Ident::new`, but creates a raw identifier (`r#ident`).
+ /// The `string` argument be a valid identifier permitted by the language
+ /// (including keywords, e.g. `fn`). Keywords which are usable in path segments
+ /// (e.g. `self`, `super`) are not supported, and will cause a panic.
+ pub fn new_raw(string: &str, span: Span) -> Ident {
+ Ident(bridge::client::Ident::new(string, span.0, true))
+ }
+
+ /// Returns the span of this `Ident`, encompassing the entire string returned
+ /// by [`to_string`](Self::to_string).
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configures the span of this `Ident`, possibly changing its hygiene context.
+ pub fn set_span(&mut self, span: Span) {
+ self.0 = self.0.with_span(span.0);
+ }
+}
+
+/// Prints the identifier as a string that should be losslessly convertible
+/// back into the same identifier.
+impl fmt::Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Ident")
+ .field("ident", &self.to_string())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+/// A literal string (`"hello"`), byte string (`b"hello"`),
+/// character (`'a'`), byte character (`b'a'`), an integer or floating point number
+/// with or without a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
+/// Boolean literals like `true` and `false` do not belong here, they are `Ident`s.
+#[derive(Clone)]
+pub struct Literal(bridge::client::Literal);
+
+macro_rules! suffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new suffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1u32` where the integer
+ /// value specified is the first part of the token and the integral is
+ /// also suffixed at the end.
+ /// Literals created from negative numbers might not survive round-trips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ pub fn $name(n: $kind) -> Literal {
+ Literal(bridge::client::Literal::typed_integer(&n.to_string(), stringify!($kind)))
+ }
+ )*)
+}
+
+macro_rules! unsuffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new unsuffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1` where the integer
+ /// value specified is the first part of the token. No suffix is
+ /// specified on this token, meaning that invocations like
+ /// `Literal::i8_unsuffixed(1)` are equivalent to
+ /// `Literal::u32_unsuffixed(1)`.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ pub fn $name(n: $kind) -> Literal {
+ Literal(bridge::client::Literal::integer(&n.to_string()))
+ }
+ )*)
+}
+
+impl Literal {
+ suffixed_int_literals! {
+ u8_suffixed => u8,
+ u16_suffixed => u16,
+ u32_suffixed => u32,
+ u64_suffixed => u64,
+ u128_suffixed => u128,
+ usize_suffixed => usize,
+ i8_suffixed => i8,
+ i16_suffixed => i16,
+ i32_suffixed => i32,
+ i64_suffixed => i64,
+ i128_suffixed => i128,
+ isize_suffixed => isize,
+ }
+
+ unsuffixed_int_literals! {
+ u8_unsuffixed => u8,
+ u16_unsuffixed => u16,
+ u32_unsuffixed => u32,
+ u64_unsuffixed => u64,
+ u128_unsuffixed => u128,
+ usize_unsuffixed => usize,
+ i8_unsuffixed => i8,
+ i16_unsuffixed => i16,
+ i32_unsuffixed => i32,
+ i64_unsuffixed => i64,
+ i128_unsuffixed => i128,
+ isize_unsuffixed => isize,
+ }
+
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f32_unsuffixed(n: f32) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {}", n);
+ }
+ let mut repr = n.to_string();
+ if !repr.contains('.') {
+ repr.push_str(".0");
+ }
+ Literal(bridge::client::Literal::float(&repr))
+ }
+
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This constructor will create a literal like `1.0f32` where the value
+ /// specified is the preceding part of the token and `f32` is the suffix of
+ /// the token. This token will always be inferred to be an `f32` in the
+ /// compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f32_suffixed(n: f32) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {}", n);
+ }
+ Literal(bridge::client::Literal::f32(&n.to_string()))
+ }
+
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f64_unsuffixed(n: f64) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {}", n);
+ }
+ let mut repr = n.to_string();
+ if !repr.contains('.') {
+ repr.push_str(".0");
+ }
+ Literal(bridge::client::Literal::float(&repr))
+ }
+
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This constructor will create a literal like `1.0f64` where the value
+ /// specified is the preceding part of the token and `f64` is the suffix of
+ /// the token. This token will always be inferred to be an `f64` in the
+ /// compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f64_suffixed(n: f64) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {}", n);
+ }
+ Literal(bridge::client::Literal::f64(&n.to_string()))
+ }
+
+ /// String literal.
+ pub fn string(string: &str) -> Literal {
+ Literal(bridge::client::Literal::string(string))
+ }
+
+ /// Character literal.
+ pub fn character(ch: char) -> Literal {
+ Literal(bridge::client::Literal::character(ch))
+ }
+
+ /// Byte string literal.
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+ Literal(bridge::client::Literal::byte_string(bytes))
+ }
+
+ /// Returns the span encompassing this literal.
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configures the span associated for this literal.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.set_span(span.0);
+ }
+
+ /// Returns a `Span` that is a subset of `self.span()` containing only the
+ /// source bytes in range `range`. Returns `None` if the would-be trimmed
+ /// span is outside the bounds of `self`.
+ // FIXME(SergioBenitez): check that the byte range starts and ends at a
+ // UTF-8 boundary of the source. otherwise, it's likely that a panic will
+ // occur elsewhere when the source text is printed.
+ // FIXME(SergioBenitez): there is no way for the user to know what
+ // `self.span()` actually maps to, so this method can currently only be
+ // called blindly. For example, `to_string()` for the character 'c' returns
+ // "'\u{63}'"; there is no way for the user to know whether the source text
+ // was 'c' or whether it was '\u{63}'.
+ pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
+ self.0.subspan(range.start_bound().cloned(), range.end_bound().cloned()).map(Span)
+ }
+}
+
+/// Parse a single literal from its stringified representation.
+///
+/// In order to parse successfully, the input string must not contain anything
+/// but the literal token. Specifically, it must not contain whitespace or
+/// comments in addition to the literal.
+///
+/// The resulting literal token will have a `Span::call_site()` span.
+///
+/// NOTE: some errors may cause panics instead of returning `LexError`. We
+/// reserve the right to change these errors into `LexError`s later.
+impl FromStr for Literal {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<Self, LexError> {
+ match bridge::client::Literal::from_str(src) {
+ Ok(literal) => Ok(Literal(literal)),
+ Err(()) => Err(LexError),
+ }
+ }
+}
+
+/// Prints the literal as a string that should be losslessly convertible
+/// back into the same literal (except for possible rounding for floating point literals).
+impl fmt::Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// Tracked access to environment variables.
+pub mod tracked_env {
+ use std::env::{self, VarError};
+ use std::ffi::OsStr;
+
+ /// Retrieve an environment variable and add it to build dependency info.
+ /// Build system executing the compiler will know that the variable was accessed during
+ /// compilation, and will be able to rerun the build when the value of that variable changes.
+ /// Besides the dependency tracking this function should be equivalent to `env::var` from the
+ /// standard library, except that the argument must be UTF-8.
+ pub fn var<K: AsRef<OsStr> + AsRef<str>>(key: K) -> Result<String, VarError> {
+ let key: &str = key.as_ref();
+ let value = env::var(key);
+ super::bridge::client::FreeFunctions::track_env_var(key, value.as_deref().ok());
+ value
+ }
+}
+
+/// Tracked access to additional files.
+pub mod tracked_path {
+
+ /// Track a file explicitly.
+ ///
+ /// Commonly used for tracking asset preprocessing.
+ pub fn path<P: AsRef<str>>(path: P) {
+ let path: &str = path.as_ref();
+ super::bridge::client::FreeFunctions::track_path(path);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/quote.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/quote.rs
new file mode 100644
index 000000000..b539ab9c0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/quote.rs
@@ -0,0 +1,140 @@
+//! # Quasiquoter
+//! This file contains the implementation internals of the quasiquoter provided by `quote!`.
+
+//! This quasiquoter uses macros 2.0 hygiene to reliably access
+//! items from `proc_macro`, to build a `proc_macro::TokenStream`.
+
+use super::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+
+macro_rules! quote_tt {
+ (($($t:tt)*)) => { Group::new(Delimiter::Parenthesis, quote!($($t)*)) };
+ ([$($t:tt)*]) => { Group::new(Delimiter::Bracket, quote!($($t)*)) };
+ ({$($t:tt)*}) => { Group::new(Delimiter::Brace, quote!($($t)*)) };
+ (,) => { Punct::new(',', Spacing::Alone) };
+ (.) => { Punct::new('.', Spacing::Alone) };
+ (:) => { Punct::new(':', Spacing::Alone) };
+ (;) => { Punct::new(';', Spacing::Alone) };
+ (!) => { Punct::new('!', Spacing::Alone) };
+ (<) => { Punct::new('<', Spacing::Alone) };
+ (>) => { Punct::new('>', Spacing::Alone) };
+ (&) => { Punct::new('&', Spacing::Alone) };
+ (=) => { Punct::new('=', Spacing::Alone) };
+ ($i:ident) => { Ident::new(stringify!($i), Span::def_site()) };
+}
+
+macro_rules! quote_ts {
+ ((@ $($t:tt)*)) => { $($t)* };
+ (::) => {
+ [
+ TokenTree::from(Punct::new(':', Spacing::Joint)),
+ TokenTree::from(Punct::new(':', Spacing::Alone)),
+ ].iter()
+ .cloned()
+ .map(|mut x| {
+ x.set_span(Span::def_site());
+ x
+ })
+ .collect::<TokenStream>()
+ };
+ ($t:tt) => { TokenTree::from(quote_tt!($t)) };
+}
+
+/// Simpler version of the real `quote!` macro, implemented solely
+/// through `macro_rules`, for bootstrapping the real implementation
+/// (see the `quote` function), which does not have access to the
+/// real `quote!` macro due to the `proc_macro` crate not being
+/// able to depend on itself.
+///
+/// Note: supported tokens are a subset of the real `quote!`, but
+/// unquoting is different: instead of `$x`, this uses `(@ expr)`.
+macro_rules! quote {
+ () => { TokenStream::new() };
+ ($($t:tt)*) => {
+ [
+ $(TokenStream::from(quote_ts!($t)),)*
+ ].iter().cloned().collect::<TokenStream>()
+ };
+}
+
+/// Quote a `TokenStream` into a `TokenStream`.
+/// This is the actual implementation of the `quote!()` proc macro.
+///
+/// It is loaded by the compiler in `register_builtin_macros`.
+pub fn quote(stream: TokenStream) -> TokenStream {
+ if stream.is_empty() {
+ return quote!(crate::TokenStream::new());
+ }
+ let proc_macro_crate = quote!(crate);
+ let mut after_dollar = false;
+ let tokens = stream
+ .into_iter()
+ .filter_map(|tree| {
+ if after_dollar {
+ after_dollar = false;
+ match tree {
+ TokenTree::Ident(_) => {
+ return Some(quote!(Into::<crate::TokenStream>::into(
+ Clone::clone(&(@ tree))),));
+ }
+ TokenTree::Punct(ref tt) if tt.as_char() == '$' => {}
+ _ => panic!("`$` must be followed by an ident or `$` in `quote!`"),
+ }
+ } else if let TokenTree::Punct(ref tt) = tree {
+ if tt.as_char() == '$' {
+ after_dollar = true;
+ return None;
+ }
+ }
+
+ Some(quote!(crate::TokenStream::from((@ match tree {
+ TokenTree::Punct(tt) => quote!(crate::TokenTree::Punct(crate::Punct::new(
+ (@ TokenTree::from(Literal::character(tt.as_char()))),
+ (@ match tt.spacing() {
+ Spacing::Alone => quote!(crate::Spacing::Alone),
+ Spacing::Joint => quote!(crate::Spacing::Joint),
+ }),
+ ))),
+ TokenTree::Group(tt) => quote!(crate::TokenTree::Group(crate::Group::new(
+ (@ match tt.delimiter() {
+ Delimiter::Parenthesis => quote!(crate::Delimiter::Parenthesis),
+ Delimiter::Brace => quote!(crate::Delimiter::Brace),
+ Delimiter::Bracket => quote!(crate::Delimiter::Bracket),
+ Delimiter::None => quote!(crate::Delimiter::None),
+ }),
+ (@ quote(tt.stream())),
+ ))),
+ TokenTree::Ident(tt) => quote!(crate::TokenTree::Ident(crate::Ident::new(
+ (@ TokenTree::from(Literal::string(&tt.to_string()))),
+ (@ quote_span(proc_macro_crate.clone(), tt.span())),
+ ))),
+ TokenTree::Literal(tt) => quote!(crate::TokenTree::Literal({
+ let mut iter = (@ TokenTree::from(Literal::string(&tt.to_string())))
+ .parse::<crate::TokenStream>()
+ .unwrap()
+ .into_iter();
+ if let (Some(crate::TokenTree::Literal(mut lit)), None) =
+ (iter.next(), iter.next())
+ {
+ lit.set_span((@ quote_span(proc_macro_crate.clone(), tt.span())));
+ lit
+ } else {
+ unreachable!()
+ }
+ }))
+ })),))
+ })
+ .collect::<TokenStream>();
+
+ if after_dollar {
+ panic!("unexpected trailing `$` in `quote!`");
+ }
+
+ quote!([(@ tokens)].iter().cloned().collect::<crate::TokenStream>())
+}
+
+/// Quote a `Span` into a `TokenStream`.
+/// This is needed to implement a custom quoter.
+pub fn quote_span(proc_macro_crate: TokenStream, span: Span) -> TokenStream {
+ let id = span.save_span();
+ quote!((@ proc_macro_crate ) ::Span::recover_proc_macro_span((@ TokenTree::from(Literal::usize_unsuffixed(id)))))
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs
new file mode 100644
index 000000000..ebdfca00d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs
@@ -0,0 +1,819 @@
+//! Rustc proc-macro server implementation with tt
+//!
+//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
+//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
+//! we could provide any TokenStream implementation.
+//! The original idea from fedochet is using proc-macro2 as backend,
+//! we use tt instead for better integration with RA.
+//!
+//! FIXME: No span and source file information is implemented yet
+
+use super::proc_macro::bridge::{self, server};
+
+use std::collections::HashMap;
+use std::hash::Hash;
+use std::iter::FromIterator;
+use std::ops::Bound;
+use std::{ascii, vec::IntoIter};
+
+type Group = tt::Subtree;
+type TokenTree = tt::TokenTree;
+type Punct = tt::Punct;
+type Spacing = tt::Spacing;
+type Literal = tt::Literal;
+type Span = tt::TokenId;
+
+#[derive(Debug, Clone)]
+pub struct TokenStream {
+ pub token_trees: Vec<TokenTree>,
+}
+
+impl TokenStream {
+ pub fn new() -> Self {
+ TokenStream { token_trees: Default::default() }
+ }
+
+ pub fn with_subtree(subtree: tt::Subtree) -> Self {
+ if subtree.delimiter.is_some() {
+ TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
+ } else {
+ TokenStream { token_trees: subtree.token_trees }
+ }
+ }
+
+ pub fn into_subtree(self) -> tt::Subtree {
+ tt::Subtree { delimiter: None, token_trees: self.token_trees }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.token_trees.is_empty()
+ }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream { token_trees: vec![tree] }
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut builder = TokenStreamBuilder::new();
+ streams.into_iter().for_each(|stream| builder.push(stream));
+ builder.build()
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ for item in streams {
+ for tkn in item {
+ match tkn {
+ tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
+ self.token_trees.extend(subtree.token_trees);
+ }
+ _ => {
+ self.token_trees.push(tkn);
+ }
+ }
+ }
+ }
+ }
+}
+
+#[derive(Clone)]
+pub struct SourceFile {
+ // FIXME stub
+}
+
+type Level = super::proc_macro::Level;
+type LineColumn = super::proc_macro::LineColumn;
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+}
+
+// Rustc Server Ident has to be `Copyable`
+// We use a stub here for bypassing
+#[derive(Hash, Eq, PartialEq, Copy, Clone)]
+pub struct IdentId(u32);
+
+#[derive(Clone, Hash, Eq, PartialEq)]
+struct IdentData(tt::Ident);
+
+#[derive(Default)]
+struct IdentInterner {
+ idents: HashMap<IdentData, u32>,
+ ident_data: Vec<IdentData>,
+}
+
+impl IdentInterner {
+ fn intern(&mut self, data: &IdentData) -> u32 {
+ if let Some(index) = self.idents.get(data) {
+ return *index;
+ }
+
+ let index = self.idents.len() as u32;
+ self.ident_data.push(data.clone());
+ self.idents.insert(data.clone(), index);
+ index
+ }
+
+ fn get(&self, index: u32) -> &IdentData {
+ &self.ident_data[index as usize]
+ }
+
+ #[allow(unused)]
+ fn get_mut(&mut self, index: u32) -> &mut IdentData {
+ self.ident_data.get_mut(index as usize).expect("Should be consistent")
+ }
+}
+
+pub struct TokenStreamBuilder {
+ acc: TokenStream,
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use std::str::FromStr;
+
+ use super::{TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = super::IntoIter<TokenTree>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.token_trees.into_iter()
+ }
+ }
+
+ type LexError = String;
+
+ /// Attempts to break the string into tokens and parse those tokens into a token stream.
+ /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+ /// or characters not existing in the language.
+ /// All tokens in the parsed stream get `Span::call_site()` spans.
+ ///
+ /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+ /// change these errors into `LexError`s later.
+ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ let (subtree, _token_map) =
+ mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
+
+ let subtree = subtree_replace_token_ids_with_unspecified(subtree);
+ Ok(TokenStream::with_subtree(subtree))
+ }
+ }
+
+ impl ToString for TokenStream {
+ fn to_string(&self) -> String {
+ tt::pretty(&self.token_trees)
+ }
+ }
+
+ fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
+ tt::Subtree {
+ delimiter: subtree
+ .delimiter
+ .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
+ token_trees: subtree
+ .token_trees
+ .into_iter()
+ .map(token_tree_replace_token_ids_with_unspecified)
+ .collect(),
+ }
+ }
+
+ fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
+ match tt {
+ tt::TokenTree::Leaf(leaf) => {
+ tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
+ }
+ tt::TokenTree::Subtree(subtree) => {
+ tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
+ }
+ }
+ }
+
+ fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
+ match leaf {
+ tt::Leaf::Literal(lit) => {
+ tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
+ }
+ tt::Leaf::Punct(punct) => {
+ tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
+ }
+ tt::Leaf::Ident(ident) => {
+ tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
+ }
+ }
+ }
+}
+
+impl TokenStreamBuilder {
+ fn new() -> TokenStreamBuilder {
+ TokenStreamBuilder { acc: TokenStream::new() }
+ }
+
+ fn push(&mut self, stream: TokenStream) {
+ self.acc.extend(stream.into_iter())
+ }
+
+ fn build(self) -> TokenStream {
+ self.acc
+ }
+}
+
+pub struct FreeFunctions;
+
+#[derive(Clone)]
+pub struct TokenStreamIter {
+ trees: IntoIter<TokenTree>,
+}
+
+#[derive(Default)]
+pub struct RustAnalyzer {
+ ident_interner: IdentInterner,
+ // FIXME: store span information here.
+}
+
+impl server::Types for RustAnalyzer {
+ type FreeFunctions = FreeFunctions;
+ type TokenStream = TokenStream;
+ type TokenStreamBuilder = TokenStreamBuilder;
+ type TokenStreamIter = TokenStreamIter;
+ type Group = Group;
+ type Punct = Punct;
+ type Ident = IdentId;
+ type Literal = Literal;
+ type SourceFile = SourceFile;
+ type Diagnostic = Diagnostic;
+ type Span = Span;
+ type MultiSpan = Vec<Span>;
+}
+
+impl server::FreeFunctions for RustAnalyzer {
+ fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
+ // FIXME: track env var accesses
+ // https://github.com/rust-lang/rust/pull/71858
+ }
+ fn track_path(&mut self, _path: &str) {}
+}
+
+impl server::TokenStream for RustAnalyzer {
+ fn new(&mut self) -> Self::TokenStream {
+ Self::TokenStream::new()
+ }
+
+ fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+ stream.is_empty()
+ }
+ fn from_str(&mut self, src: &str) -> Self::TokenStream {
+ use std::str::FromStr;
+
+ Self::TokenStream::from_str(src).expect("cannot parse string")
+ }
+ fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+ stream.to_string()
+ }
+ fn from_token_tree(
+ &mut self,
+ tree: bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
+ ) -> Self::TokenStream {
+ match tree {
+ bridge::TokenTree::Group(group) => {
+ let tree = TokenTree::from(group);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Ident(IdentId(index)) => {
+ let IdentData(ident) = self.ident_interner.get(index).clone();
+ let ident: tt::Ident = ident;
+ let leaf = tt::Leaf::from(ident);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Literal(literal) => {
+ let leaf = tt::Leaf::from(literal);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Punct(p) => {
+ let leaf = tt::Leaf::from(p);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+ }
+ }
+
+ fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter {
+ let trees: Vec<TokenTree> = stream.into_iter().collect();
+ TokenStreamIter { trees: trees.into_iter() }
+ }
+
+ fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
+ Ok(self_.clone())
+ }
+}
+
+impl server::TokenStreamBuilder for RustAnalyzer {
+ fn new(&mut self) -> Self::TokenStreamBuilder {
+ Self::TokenStreamBuilder::new()
+ }
+ fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) {
+ builder.push(stream)
+ }
+ fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream {
+ builder.build()
+ }
+}
+
+impl server::TokenStreamIter for RustAnalyzer {
+ fn next(
+ &mut self,
+ iter: &mut Self::TokenStreamIter,
+ ) -> Option<bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
+ iter.trees.next().map(|tree| match tree {
+ TokenTree::Subtree(group) => bridge::TokenTree::Group(group),
+ TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+ bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident))))
+ }
+ TokenTree::Leaf(tt::Leaf::Literal(literal)) => bridge::TokenTree::Literal(literal),
+ TokenTree::Leaf(tt::Leaf::Punct(punct)) => bridge::TokenTree::Punct(punct),
+ })
+ }
+}
+
+fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> {
+ let kind = match d {
+ bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
+ bridge::Delimiter::Brace => tt::DelimiterKind::Brace,
+ bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket,
+ bridge::Delimiter::None => return None,
+ };
+ Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
+}
+
+fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter {
+ match d.map(|it| it.kind) {
+ Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis,
+ Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace,
+ Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket,
+ None => bridge::Delimiter::None,
+ }
+}
+
+fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing {
+ match spacing {
+ bridge::Spacing::Alone => Spacing::Alone,
+ bridge::Spacing::Joint => Spacing::Joint,
+ }
+}
+
+fn spacing_to_external(spacing: Spacing) -> bridge::Spacing {
+ match spacing {
+ Spacing::Alone => bridge::Spacing::Alone,
+ Spacing::Joint => bridge::Spacing::Joint,
+ }
+}
+
+impl server::Group for RustAnalyzer {
+ fn new(&mut self, delimiter: bridge::Delimiter, stream: Self::TokenStream) -> Self::Group {
+ Self::Group { delimiter: delim_to_internal(delimiter), token_trees: stream.token_trees }
+ }
+ fn delimiter(&mut self, group: &Self::Group) -> bridge::Delimiter {
+ delim_to_external(group.delimiter)
+ }
+
+ // NOTE: Return value of do not include delimiter
+ fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
+ TokenStream { token_trees: group.token_trees.clone() }
+ }
+
+ fn span(&mut self, group: &Self::Group) -> Self::Span {
+ group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+ }
+
+ fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
+ if let Some(delim) = &mut group.delimiter {
+ delim.id = span;
+ }
+ }
+
+ fn span_open(&mut self, group: &Self::Group) -> Self::Span {
+ // FIXME we only store one `TokenId` for the delimiters
+ group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+ }
+
+ fn span_close(&mut self, group: &Self::Group) -> Self::Span {
+ // FIXME we only store one `TokenId` for the delimiters
+ group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+ }
+}
+
+impl server::Punct for RustAnalyzer {
+ fn new(&mut self, ch: char, spacing: bridge::Spacing) -> Self::Punct {
+ tt::Punct {
+ char: ch,
+ spacing: spacing_to_internal(spacing),
+ id: tt::TokenId::unspecified(),
+ }
+ }
+ fn as_char(&mut self, punct: Self::Punct) -> char {
+ punct.char
+ }
+ fn spacing(&mut self, punct: Self::Punct) -> bridge::Spacing {
+ spacing_to_external(punct.spacing)
+ }
+ fn span(&mut self, punct: Self::Punct) -> Self::Span {
+ punct.id
+ }
+ fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
+ tt::Punct { id: span, ..punct }
+ }
+}
+
+impl server::Ident for RustAnalyzer {
+ fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident {
+ IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span })))
+ }
+
+ fn span(&mut self, ident: Self::Ident) -> Self::Span {
+ self.ident_interner.get(ident.0).0.id
+ }
+ fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
+ let data = self.ident_interner.get(ident.0);
+ let new = IdentData(tt::Ident { id: span, ..data.0.clone() });
+ IdentId(self.ident_interner.intern(&new))
+ }
+}
+
+impl server::Literal for RustAnalyzer {
+ fn debug_kind(&mut self, _literal: &Self::Literal) -> String {
+ // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these.
+ // They must still be present to be ABI-compatible and work with upstream proc_macro.
+ "".to_owned()
+ }
+ fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
+ Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() })
+ }
+ fn symbol(&mut self, literal: &Self::Literal) -> String {
+ literal.text.to_string()
+ }
+ fn suffix(&mut self, _literal: &Self::Literal) -> Option<String> {
+ None
+ }
+
+ fn to_string(&mut self, literal: &Self::Literal) -> String {
+ literal.to_string()
+ }
+
+ fn integer(&mut self, n: &str) -> Self::Literal {
+ let n = match n.parse::<i128>() {
+ Ok(n) => n.to_string(),
+ Err(_) => n.parse::<u128>().unwrap().to_string(),
+ };
+ Literal { text: n.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
+ macro_rules! def_suffixed_integer {
+ ($kind:ident, $($ty:ty),*) => {
+ match $kind {
+ $(
+ stringify!($ty) => {
+ let n: $ty = n.parse().unwrap();
+ format!(concat!("{}", stringify!($ty)), n)
+ }
+ )*
+ _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind),
+ }
+ }
+ }
+
+ let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize};
+
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn float(&mut self, n: &str) -> Self::Literal {
+ let n: f64 = n.parse().unwrap();
+ let mut text = f64::to_string(&n);
+ if !text.contains('.') {
+ text += ".0"
+ }
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn f32(&mut self, n: &str) -> Self::Literal {
+ let n: f32 = n.parse().unwrap();
+ let text = format!("{}f32", n);
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn f64(&mut self, n: &str) -> Self::Literal {
+ let n: f64 = n.parse().unwrap();
+ let text = format!("{}f64", n);
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn string(&mut self, string: &str) -> Self::Literal {
+ let mut escaped = String::new();
+ for ch in string.chars() {
+ escaped.extend(ch.escape_debug());
+ }
+ Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn character(&mut self, ch: char) -> Self::Literal {
+ Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
+ let string = bytes
+ .iter()
+ .cloned()
+ .flat_map(ascii::escape_default)
+ .map(Into::<char>::into)
+ .collect::<String>();
+
+ Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn span(&mut self, literal: &Self::Literal) -> Self::Span {
+ literal.id
+ }
+
+ fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
+ literal.id = span;
+ }
+
+ fn subspan(
+ &mut self,
+ _literal: &Self::Literal,
+ _start: Bound<usize>,
+ _end: Bound<usize>,
+ ) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+}
+
+impl server::SourceFile for RustAnalyzer {
+ // FIXME these are all stubs
+ fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
+ true
+ }
+ fn path(&mut self, _file: &Self::SourceFile) -> String {
+ String::new()
+ }
+ fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
+ true
+ }
+}
+
+impl server::Diagnostic for RustAnalyzer {
+ fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
+ let mut diag = Diagnostic::new(level, msg);
+ diag.spans = spans;
+ diag
+ }
+
+ fn sub(
+ &mut self,
+ _diag: &mut Self::Diagnostic,
+ _level: Level,
+ _msg: &str,
+ _spans: Self::MultiSpan,
+ ) {
+ // FIXME handle diagnostic
+ //
+ }
+
+ fn emit(&mut self, _diag: Self::Diagnostic) {
+ // FIXME handle diagnostic
+ // diag.emit()
+ }
+}
+
+impl server::Span for RustAnalyzer {
+ fn debug(&mut self, span: Self::Span) -> String {
+ format!("{:?}", span.0)
+ }
+ fn def_site(&mut self) -> Self::Span {
+ // MySpan(self.span_interner.intern(&MySpanData(Span::def_site())))
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+ fn call_site(&mut self) -> Self::Span {
+ // MySpan(self.span_interner.intern(&MySpanData(Span::call_site())))
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+ fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
+ SourceFile {}
+ }
+ fn save_span(&mut self, _span: Self::Span) -> usize {
+ // FIXME stub
+ 0
+ }
+ fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
+ // FIXME stub
+ tt::TokenId::unspecified()
+ }
+ /// Recent feature, not yet in the proc_macro
+ ///
+ /// See PR:
+ /// https://github.com/rust-lang/rust/pull/55780
+ fn source_text(&mut self, _span: Self::Span) -> Option<String> {
+ None
+ }
+
+ fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+ fn source(&mut self, span: Self::Span) -> Self::Span {
+ // FIXME handle span
+ span
+ }
+ fn start(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn end(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
+ // Just return the first span again, because some macros will unwrap the result.
+ Some(first)
+ }
+ fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+
+ fn mixed_site(&mut self) -> Self::Span {
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+
+ fn after(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+
+ fn before(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+}
+
+impl server::MultiSpan for RustAnalyzer {
+ fn new(&mut self) -> Self::MultiSpan {
+ // FIXME handle span
+ vec![]
+ }
+
+ fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
+ //TODP
+ other.push(span)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::super::proc_macro::bridge::server::Literal;
+ use super::*;
+
+ #[test]
+ fn test_ra_server_literals() {
+ let mut srv = RustAnalyzer { ident_interner: IdentInterner::default() };
+ assert_eq!(srv.integer("1234").text, "1234");
+
+ assert_eq!(srv.typed_integer("12", "u8").text, "12u8");
+ assert_eq!(srv.typed_integer("255", "u16").text, "255u16");
+ assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32");
+ assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64");
+ assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128");
+ assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize");
+ assert_eq!(srv.typed_integer("127", "i8").text, "127i8");
+ assert_eq!(srv.typed_integer("255", "i16").text, "255i16");
+ assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32");
+ assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64");
+ assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128");
+ assert_eq!(srv.float("0").text, "0.0");
+ assert_eq!(srv.float("15684.5867").text, "15684.5867");
+ assert_eq!(srv.f32("15684.58").text, "15684.58f32");
+ assert_eq!(srv.f64("15684.58").text, "15684.58f64");
+
+ assert_eq!(srv.string("hello_world").text, "\"hello_world\"");
+ assert_eq!(srv.character('c').text, "'c'");
+ assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\"");
+
+ // u128::max
+ assert_eq!(
+ srv.integer("340282366920938463463374607431768211455").text,
+ "340282366920938463463374607431768211455"
+ );
+ // i128::min
+ assert_eq!(
+ srv.integer("-170141183460469231731687303715884105728").text,
+ "-170141183460469231731687303715884105728"
+ );
+ }
+
+ #[test]
+ fn test_ra_server_to_string() {
+ let s = TokenStream {
+ token_trees: vec![
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "struct".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "T".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Brace,
+ }),
+ token_trees: vec![],
+ }),
+ ],
+ };
+
+ assert_eq!(s.to_string(), "struct T {}");
+ }
+
+ #[test]
+ fn test_ra_server_from_str() {
+ use std::str::FromStr;
+ let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Parenthesis,
+ }),
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "a".into(),
+ id: tt::TokenId::unspecified(),
+ }))],
+ });
+
+ let t1 = TokenStream::from_str("(a)").unwrap();
+ assert_eq!(t1.token_trees.len(), 1);
+ assert_eq!(t1.token_trees[0], subtree_paren_a);
+
+ let t2 = TokenStream::from_str("(a);").unwrap();
+ assert_eq!(t2.token_trees.len(), 2);
+ assert_eq!(t2.token_trees[0], subtree_paren_a);
+
+ let underscore = TokenStream::from_str("_").unwrap();
+ assert_eq!(
+ underscore.token_trees[0],
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "_".into(),
+ id: tt::TokenId::unspecified(),
+ }))
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs
new file mode 100644
index 000000000..76e89e319
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs
@@ -0,0 +1,105 @@
+//! Macro ABI for version 1.63 of rustc
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod proc_macro;
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod ra_server;
+
+use libloading::Library;
+use proc_macro_api::ProcMacroKind;
+
+use super::PanicMessage;
+
+pub use ra_server::TokenStream;
+
+pub(crate) struct Abi {
+ exported_macros: Vec<proc_macro::bridge::client::ProcMacro>,
+}
+
+impl From<proc_macro::bridge::PanicMessage> for PanicMessage {
+ fn from(p: proc_macro::bridge::PanicMessage) -> Self {
+ Self { message: p.as_str().map(|s| s.to_string()) }
+ }
+}
+
+impl Abi {
+ pub unsafe fn from_lib(lib: &Library, symbol_name: String) -> Result<Abi, libloading::Error> {
+ let macros: libloading::Symbol<'_, &&[proc_macro::bridge::client::ProcMacro]> =
+ lib.get(symbol_name.as_bytes())?;
+ Ok(Self { exported_macros: macros.to_vec() })
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, PanicMessage> {
+ let parsed_body = TokenStream::with_subtree(macro_body.clone());
+
+ let parsed_attributes =
+ attributes.map_or(TokenStream::new(), |attr| TokenStream::with_subtree(attr.clone()));
+
+ for proc_macro in &self.exported_macros {
+ match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive {
+ trait_name, client, ..
+ } if *trait_name == macro_name => {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_attributes,
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ _ => continue,
+ }
+ }
+
+ Err(proc_macro::bridge::PanicMessage::String("Nothing to expand".to_string()).into())
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ self.exported_macros
+ .iter()
+ .map(|proc_macro| match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
+ (trait_name.to_string(), ProcMacroKind::CustomDerive)
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, .. } => {
+ (name.to_string(), ProcMacroKind::FuncLike)
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, .. } => {
+ (name.to_string(), ProcMacroKind::Attr)
+ }
+ })
+ .collect()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/buffer.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/buffer.rs
new file mode 100644
index 000000000..48030f8d8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/buffer.rs
@@ -0,0 +1,156 @@
+//! Buffer management for same-process client<->server communication.
+
+use std::io::{self, Write};
+use std::mem;
+use std::ops::{Deref, DerefMut};
+use std::slice;
+
+#[repr(C)]
+pub struct Buffer {
+ data: *mut u8,
+ len: usize,
+ capacity: usize,
+ reserve: extern "C" fn(Buffer, usize) -> Buffer,
+ drop: extern "C" fn(Buffer),
+}
+
+unsafe impl Sync for Buffer {}
+unsafe impl Send for Buffer {}
+
+impl Default for Buffer {
+ #[inline]
+ fn default() -> Self {
+ Self::from(vec![])
+ }
+}
+
+impl Deref for Buffer {
+ type Target = [u8];
+ #[inline]
+ fn deref(&self) -> &[u8] {
+ unsafe { slice::from_raw_parts(self.data as *const u8, self.len) }
+ }
+}
+
+impl DerefMut for Buffer {
+ #[inline]
+ fn deref_mut(&mut self) -> &mut [u8] {
+ unsafe { slice::from_raw_parts_mut(self.data, self.len) }
+ }
+}
+
+impl Buffer {
+ #[inline]
+ pub(super) fn new() -> Self {
+ Self::default()
+ }
+
+ #[inline]
+ pub(super) fn clear(&mut self) {
+ self.len = 0;
+ }
+
+ #[inline]
+ pub(super) fn take(&mut self) -> Self {
+ mem::take(self)
+ }
+
+ // We have the array method separate from extending from a slice. This is
+ // because in the case of small arrays, codegen can be more efficient
+ // (avoiding a memmove call). With extend_from_slice, LLVM at least
+ // currently is not able to make that optimization.
+ #[inline]
+ pub(super) fn extend_from_array<const N: usize>(&mut self, xs: &[u8; N]) {
+ if xs.len() > (self.capacity - self.len) {
+ let b = self.take();
+ *self = (b.reserve)(b, xs.len());
+ }
+ unsafe {
+ xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len());
+ self.len += xs.len();
+ }
+ }
+
+ #[inline]
+ pub(super) fn extend_from_slice(&mut self, xs: &[u8]) {
+ if xs.len() > (self.capacity - self.len) {
+ let b = self.take();
+ *self = (b.reserve)(b, xs.len());
+ }
+ unsafe {
+ xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len());
+ self.len += xs.len();
+ }
+ }
+
+ #[inline]
+ pub(super) fn push(&mut self, v: u8) {
+ // The code here is taken from Vec::push, and we know that reserve()
+ // will panic if we're exceeding isize::MAX bytes and so there's no need
+ // to check for overflow.
+ if self.len == self.capacity {
+ let b = self.take();
+ *self = (b.reserve)(b, 1);
+ }
+ unsafe {
+ *self.data.add(self.len) = v;
+ self.len += 1;
+ }
+ }
+}
+
+impl Write for Buffer {
+ #[inline]
+ fn write(&mut self, xs: &[u8]) -> io::Result<usize> {
+ self.extend_from_slice(xs);
+ Ok(xs.len())
+ }
+
+ #[inline]
+ fn write_all(&mut self, xs: &[u8]) -> io::Result<()> {
+ self.extend_from_slice(xs);
+ Ok(())
+ }
+
+ #[inline]
+ fn flush(&mut self) -> io::Result<()> {
+ Ok(())
+ }
+}
+
+impl Drop for Buffer {
+ #[inline]
+ fn drop(&mut self) {
+ let b = self.take();
+ (b.drop)(b);
+ }
+}
+
+impl From<Vec<u8>> for Buffer {
+ fn from(mut v: Vec<u8>) -> Self {
+ let (data, len, capacity) = (v.as_mut_ptr(), v.len(), v.capacity());
+ mem::forget(v);
+
+ // This utility function is nested in here because it can *only*
+ // be safely called on `Buffer`s created by *this* `proc_macro`.
+ fn to_vec(b: Buffer) -> Vec<u8> {
+ unsafe {
+ let Buffer { data, len, capacity, .. } = b;
+ mem::forget(b);
+ Vec::from_raw_parts(data, len, capacity)
+ }
+ }
+
+ extern "C" fn reserve(b: Buffer, additional: usize) -> Buffer {
+ let mut v = to_vec(b);
+ v.reserve(additional);
+ Buffer::from(v)
+ }
+
+ extern "C" fn drop(b: Buffer) {
+ mem::drop(to_vec(b));
+ }
+
+ Buffer { data, len, capacity, reserve, drop }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs
new file mode 100644
index 000000000..102027d14
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs
@@ -0,0 +1,510 @@
+//! Client-side types.
+
+use super::*;
+
+use std::marker::PhantomData;
+
+macro_rules! define_handles {
+ (
+ 'owned: $($oty:ident,)*
+ 'interned: $($ity:ident,)*
+ ) => {
+ #[repr(C)]
+ #[allow(non_snake_case)]
+ pub struct HandleCounters {
+ $($oty: AtomicUsize,)*
+ $($ity: AtomicUsize,)*
+ }
+
+ impl HandleCounters {
+ // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of
+ // a wrapper `fn` pointer, once `const fn` can reference `static`s.
+ extern "C" fn get() -> &'static Self {
+ static COUNTERS: HandleCounters = HandleCounters {
+ $($oty: AtomicUsize::new(1),)*
+ $($ity: AtomicUsize::new(1),)*
+ };
+ &COUNTERS
+ }
+ }
+
+ // FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
+ #[repr(C)]
+ #[allow(non_snake_case)]
+ pub(super) struct HandleStore<S: server::Types> {
+ $($oty: handle::OwnedStore<S::$oty>,)*
+ $($ity: handle::InternedStore<S::$ity>,)*
+ }
+
+ impl<S: server::Types> HandleStore<S> {
+ pub(super) fn new(handle_counters: &'static HandleCounters) -> Self {
+ HandleStore {
+ $($oty: handle::OwnedStore::new(&handle_counters.$oty),)*
+ $($ity: handle::InternedStore::new(&handle_counters.$ity),)*
+ }
+ }
+ }
+
+ $(
+ #[repr(C)]
+ pub(crate) struct $oty {
+ handle: handle::Handle,
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual
+ // way of doing this, but that requires unstable features.
+ // rust-analyzer uses this code and avoids unstable features.
+ _marker: PhantomData<*mut ()>,
+ }
+
+ // Forward `Drop::drop` to the inherent `drop` method.
+ impl Drop for $oty {
+ fn drop(&mut self) {
+ $oty {
+ handle: self.handle,
+ _marker: PhantomData,
+ }.drop();
+ }
+ }
+
+ impl<S> Encode<S> for $oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ let handle = self.handle;
+ mem::forget(self);
+ handle.encode(w, s);
+ }
+ }
+
+ impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$oty, $oty>
+ {
+ fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
+ s.$oty.take(handle::Handle::decode(r, &mut ()))
+ }
+ }
+
+ impl<S> Encode<S> for &$oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.handle.encode(w, s);
+ }
+ }
+
+ impl<'s, S: server::Types> Decode<'_, 's, HandleStore<server::MarkedTypes<S>>>
+ for &'s Marked<S::$oty, $oty>
+ {
+ fn decode(r: &mut Reader<'_>, s: &'s HandleStore<server::MarkedTypes<S>>) -> Self {
+ &s.$oty[handle::Handle::decode(r, &mut ())]
+ }
+ }
+
+ impl<S> Encode<S> for &mut $oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.handle.encode(w, s);
+ }
+ }
+
+ impl<'s, S: server::Types> DecodeMut<'_, 's, HandleStore<server::MarkedTypes<S>>>
+ for &'s mut Marked<S::$oty, $oty>
+ {
+ fn decode(
+ r: &mut Reader<'_>,
+ s: &'s mut HandleStore<server::MarkedTypes<S>>
+ ) -> Self {
+ &mut s.$oty[handle::Handle::decode(r, &mut ())]
+ }
+ }
+
+ impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$oty, $oty>
+ {
+ fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
+ s.$oty.alloc(self).encode(w, s);
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $oty {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $oty {
+ handle: handle::Handle::decode(r, s),
+ _marker: PhantomData,
+ }
+ }
+ }
+ )*
+
+ $(
+ #[repr(C)]
+ #[derive(Copy, Clone, PartialEq, Eq, Hash)]
+ pub(crate) struct $ity {
+ handle: handle::Handle,
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual
+ // way of doing this, but that requires unstable features.
+ // rust-analyzer uses this code and avoids unstable features.
+ _marker: PhantomData<*mut ()>,
+ }
+
+ impl<S> Encode<S> for $ity {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.handle.encode(w, s);
+ }
+ }
+
+ impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$ity, $ity>
+ {
+ fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
+ s.$ity.copy(handle::Handle::decode(r, &mut ()))
+ }
+ }
+
+ impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$ity, $ity>
+ {
+ fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
+ s.$ity.alloc(self).encode(w, s);
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $ity {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $ity {
+ handle: handle::Handle::decode(r, s),
+ _marker: PhantomData,
+ }
+ }
+ }
+ )*
+ }
+}
+define_handles! {
+ 'owned:
+ FreeFunctions,
+ TokenStream,
+ Group,
+ Literal,
+ SourceFile,
+ MultiSpan,
+ Diagnostic,
+
+ 'interned:
+ Punct,
+ Ident,
+ Span,
+}
+
+// FIXME(eddyb) generate these impls by pattern-matching on the
+// names of methods - also could use the presence of `fn drop`
+// to distinguish between 'owned and 'interned, above.
+// Alternatively, special "modes" could be listed of types in with_api
+// instead of pattern matching on methods, here and in server decl.
+
+impl Clone for TokenStream {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Clone for Group {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Clone for Literal {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl fmt::Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Literal")
+ // format the kind without quotes, as in `kind: Float`
+ .field("kind", &format_args!("{}", &self.debug_kind()))
+ .field("symbol", &self.symbol())
+ // format `Some("...")` on one line even in {:#?} mode
+ .field("suffix", &format_args!("{:?}", &self.suffix()))
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+impl Clone for SourceFile {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.debug())
+ }
+}
+
+macro_rules! define_client_side {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
+ }),* $(,)?) => {
+ $(impl $name {
+ $(pub(crate) fn $method($($arg: $arg_ty),*) $(-> $ret_ty)* {
+ Bridge::with(|bridge| {
+ let mut buf = bridge.cached_buffer.take();
+
+ buf.clear();
+ api_tags::Method::$name(api_tags::$name::$method).encode(&mut buf, &mut ());
+ reverse_encode!(buf; $($arg),*);
+
+ buf = bridge.dispatch.call(buf);
+
+ let r = Result::<_, PanicMessage>::decode(&mut &buf[..], &mut ());
+
+ bridge.cached_buffer = buf;
+
+ r.unwrap_or_else(|e| panic::resume_unwind(e.into()))
+ })
+ })*
+ })*
+ }
+}
+with_api!(self, self, define_client_side);
+
+enum BridgeState<'a> {
+ /// No server is currently connected to this client.
+ NotConnected,
+
+ /// A server is connected and available for requests.
+ Connected(Bridge<'a>),
+
+ /// Access to the bridge is being exclusively acquired
+ /// (e.g., during `BridgeState::with`).
+ InUse,
+}
+
+enum BridgeStateL {}
+
+impl<'a> scoped_cell::ApplyL<'a> for BridgeStateL {
+ type Out = BridgeState<'a>;
+}
+
+thread_local! {
+ static BRIDGE_STATE: scoped_cell::ScopedCell<BridgeStateL> =
+ scoped_cell::ScopedCell::new(BridgeState::NotConnected);
+}
+
+impl BridgeState<'_> {
+ /// Take exclusive control of the thread-local
+ /// `BridgeState`, and pass it to `f`, mutably.
+ /// The state will be restored after `f` exits, even
+ /// by panic, including modifications made to it by `f`.
+ ///
+ /// N.B., while `f` is running, the thread-local state
+ /// is `BridgeState::InUse`.
+ fn with<R>(f: impl FnOnce(&mut BridgeState<'_>) -> R) -> R {
+ BRIDGE_STATE.with(|state| {
+ state.replace(BridgeState::InUse, |mut state| {
+ // FIXME(#52812) pass `f` directly to `replace` when `RefMutL` is gone
+ f(&mut *state)
+ })
+ })
+ }
+}
+
+impl Bridge<'_> {
+ pub(crate) fn is_available() -> bool {
+ BridgeState::with(|state| match state {
+ BridgeState::Connected(_) | BridgeState::InUse => true,
+ BridgeState::NotConnected => false,
+ })
+ }
+
+ fn enter<R>(self, f: impl FnOnce() -> R) -> R {
+ let force_show_panics = self.force_show_panics;
+ // Hide the default panic output within `proc_macro` expansions.
+ // NB. the server can't do this because it may use a different libstd.
+ static HIDE_PANICS_DURING_EXPANSION: Once = Once::new();
+ HIDE_PANICS_DURING_EXPANSION.call_once(|| {
+ let prev = panic::take_hook();
+ panic::set_hook(Box::new(move |info| {
+ let show = BridgeState::with(|state| match state {
+ BridgeState::NotConnected => true,
+ BridgeState::Connected(_) | BridgeState::InUse => force_show_panics,
+ });
+ if show {
+ prev(info)
+ }
+ }));
+ });
+
+ BRIDGE_STATE.with(|state| state.set(BridgeState::Connected(self), f))
+ }
+
+ fn with<R>(f: impl FnOnce(&mut Bridge<'_>) -> R) -> R {
+ BridgeState::with(|state| match state {
+ BridgeState::NotConnected => {
+ panic!("procedural macro API is used outside of a procedural macro");
+ }
+ BridgeState::InUse => {
+ panic!("procedural macro API is used while it's already in use");
+ }
+ BridgeState::Connected(bridge) => f(bridge),
+ })
+ }
+}
+
+/// A client-side RPC entry-point, which may be using a different `proc_macro`
+/// from the one used by the server, but can be invoked compatibly.
+///
+/// Note that the (phantom) `I` ("input") and `O` ("output") type parameters
+/// decorate the `Client<I, O>` with the RPC "interface" of the entry-point, but
+/// do not themselves participate in ABI, at all, only facilitate type-checking.
+///
+/// E.g. `Client<TokenStream, TokenStream>` is the common proc macro interface,
+/// used for `#[proc_macro] fn foo(input: TokenStream) -> TokenStream`,
+/// indicating that the RPC input and output will be serialized token streams,
+/// and forcing the use of APIs that take/return `S::TokenStream`, server-side.
+#[repr(C)]
+pub struct Client<I, O> {
+ // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of
+ // a wrapper `fn` pointer, once `const fn` can reference `static`s.
+ pub(super) get_handle_counters: extern "C" fn() -> &'static HandleCounters,
+
+ pub(super) run: extern "C" fn(Bridge<'_>) -> Buffer,
+
+ pub(super) _marker: PhantomData<fn(I) -> O>,
+}
+
+impl<I, O> Copy for Client<I, O> {}
+impl<I, O> Clone for Client<I, O> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+/// Client-side helper for handling client panics, entering the bridge,
+/// deserializing input and serializing output.
+// FIXME(eddyb) maybe replace `Bridge::enter` with this?
+fn run_client<A: for<'a, 's> DecodeMut<'a, 's, ()>, R: Encode<()>>(
+ mut bridge: Bridge<'_>,
+ f: impl FnOnce(A) -> R,
+) -> Buffer {
+ // The initial `cached_buffer` contains the input.
+ let mut buf = bridge.cached_buffer.take();
+
+ panic::catch_unwind(panic::AssertUnwindSafe(|| {
+ bridge.enter(|| {
+ let reader = &mut &buf[..];
+ let input = A::decode(reader, &mut ());
+
+ // Put the `cached_buffer` back in the `Bridge`, for requests.
+ Bridge::with(|bridge| bridge.cached_buffer = buf.take());
+
+ let output = f(input);
+
+ // Take the `cached_buffer` back out, for the output value.
+ buf = Bridge::with(|bridge| bridge.cached_buffer.take());
+
+ // HACK(eddyb) Separate encoding a success value (`Ok(output)`)
+ // from encoding a panic (`Err(e: PanicMessage)`) to avoid
+ // having handles outside the `bridge.enter(|| ...)` scope, and
+ // to catch panics that could happen while encoding the success.
+ //
+ // Note that panics should be impossible beyond this point, but
+ // this is defensively trying to avoid any accidental panicking
+ // reaching the `extern "C"` (which should `abort` but might not
+ // at the moment, so this is also potentially preventing UB).
+ buf.clear();
+ Ok::<_, ()>(output).encode(&mut buf, &mut ());
+ })
+ }))
+ .map_err(PanicMessage::from)
+ .unwrap_or_else(|e| {
+ buf.clear();
+ Err::<(), _>(e).encode(&mut buf, &mut ());
+ });
+ buf
+}
+
+impl Client<super::super::TokenStream, super::super::TokenStream> {
+ pub const fn expand1(
+ f: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy,
+ ) -> Self {
+ Client {
+ get_handle_counters: HandleCounters::get,
+ run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| {
+ run_client(bridge, |input| f(super::super::TokenStream(input)).0)
+ }),
+ _marker: PhantomData,
+ }
+ }
+}
+
+impl Client<(super::super::TokenStream, super::super::TokenStream), super::super::TokenStream> {
+ pub const fn expand2(
+ f: impl Fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream
+ + Copy,
+ ) -> Self {
+ Client {
+ get_handle_counters: HandleCounters::get,
+ run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| {
+ run_client(bridge, |(input, input2)| {
+ f(super::super::TokenStream(input), super::super::TokenStream(input2)).0
+ })
+ }),
+ _marker: PhantomData,
+ }
+ }
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+pub enum ProcMacro {
+ CustomDerive {
+ trait_name: &'static str,
+ attributes: &'static [&'static str],
+ client: Client<super::super::TokenStream, super::super::TokenStream>,
+ },
+
+ Attr {
+ name: &'static str,
+ client: Client<
+ (super::super::TokenStream, super::super::TokenStream),
+ super::super::TokenStream,
+ >,
+ },
+
+ Bang {
+ name: &'static str,
+ client: Client<super::super::TokenStream, super::super::TokenStream>,
+ },
+}
+
+impl ProcMacro {
+ pub fn name(&self) -> &'static str {
+ match self {
+ ProcMacro::CustomDerive { trait_name, .. } => trait_name,
+ ProcMacro::Attr { name, .. } => name,
+ ProcMacro::Bang { name, .. } => name,
+ }
+ }
+
+ pub const fn custom_derive(
+ trait_name: &'static str,
+ attributes: &'static [&'static str],
+ expand: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy,
+ ) -> Self {
+ ProcMacro::CustomDerive { trait_name, attributes, client: Client::expand1(expand) }
+ }
+
+ pub const fn attr(
+ name: &'static str,
+ expand: impl Fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream
+ + Copy,
+ ) -> Self {
+ ProcMacro::Attr { name, client: Client::expand2(expand) }
+ }
+
+ pub const fn bang(
+ name: &'static str,
+ expand: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy,
+ ) -> Self {
+ ProcMacro::Bang { name, client: Client::expand1(expand) }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/closure.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/closure.rs
new file mode 100644
index 000000000..d371ae3ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/closure.rs
@@ -0,0 +1,32 @@
+//! Closure type (equivalent to `&mut dyn FnMut(A) -> R`) that's `repr(C)`.
+
+use std::marker::PhantomData;
+
+#[repr(C)]
+pub struct Closure<'a, A, R> {
+ call: unsafe extern "C" fn(*mut Env, A) -> R,
+ env: *mut Env,
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual way of doing
+ // this, but that requires unstable features. rust-analyzer uses this code
+ // and avoids unstable features.
+ //
+ // The `'a` lifetime parameter represents the lifetime of `Env`.
+ _marker: PhantomData<*mut &'a mut ()>,
+}
+
+struct Env;
+
+impl<'a, A, R, F: FnMut(A) -> R> From<&'a mut F> for Closure<'a, A, R> {
+ fn from(f: &'a mut F) -> Self {
+ unsafe extern "C" fn call<A, R, F: FnMut(A) -> R>(env: *mut Env, arg: A) -> R {
+ (*(env as *mut _ as *mut F))(arg)
+ }
+ Closure { call: call::<A, R, F>, env: f as *mut _ as *mut Env, _marker: PhantomData }
+ }
+}
+
+impl<'a, A, R> Closure<'a, A, R> {
+ pub fn call(&mut self, arg: A) -> R {
+ unsafe { (self.call)(self.env, arg) }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/handle.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/handle.rs
new file mode 100644
index 000000000..c219a9465
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/handle.rs
@@ -0,0 +1,89 @@
+//! Server-side handles and storage for per-handle data.
+
+use std::collections::{BTreeMap, HashMap};
+use std::hash::{BuildHasher, Hash};
+use std::num::NonZeroU32;
+use std::ops::{Index, IndexMut};
+use std::sync::atomic::{AtomicUsize, Ordering};
+
+pub(super) type Handle = NonZeroU32;
+
+/// A store that associates values of type `T` with numeric handles. A value can
+/// be looked up using its handle.
+pub(super) struct OwnedStore<T: 'static> {
+ counter: &'static AtomicUsize,
+ data: BTreeMap<Handle, T>,
+}
+
+impl<T> OwnedStore<T> {
+ pub(super) fn new(counter: &'static AtomicUsize) -> Self {
+ // Ensure the handle counter isn't 0, which would panic later,
+ // when `NonZeroU32::new` (aka `Handle::new`) is called in `alloc`.
+ assert_ne!(counter.load(Ordering::SeqCst), 0);
+
+ OwnedStore { counter, data: BTreeMap::new() }
+ }
+}
+
+impl<T> OwnedStore<T> {
+ pub(super) fn alloc(&mut self, x: T) -> Handle {
+ let counter = self.counter.fetch_add(1, Ordering::SeqCst);
+ let handle = Handle::new(counter as u32).expect("`proc_macro` handle counter overflowed");
+ assert!(self.data.insert(handle, x).is_none());
+ handle
+ }
+
+ pub(super) fn take(&mut self, h: Handle) -> T {
+ self.data.remove(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+impl<T> Index<Handle> for OwnedStore<T> {
+ type Output = T;
+ fn index(&self, h: Handle) -> &T {
+ self.data.get(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+impl<T> IndexMut<Handle> for OwnedStore<T> {
+ fn index_mut(&mut self, h: Handle) -> &mut T {
+ self.data.get_mut(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+// HACK(eddyb) deterministic `std::collections::hash_map::RandomState` replacement
+// that doesn't require adding any dependencies to `proc_macro` (like `rustc-hash`).
+#[derive(Clone)]
+struct NonRandomState;
+
+impl BuildHasher for NonRandomState {
+ type Hasher = std::collections::hash_map::DefaultHasher;
+ #[inline]
+ fn build_hasher(&self) -> Self::Hasher {
+ Self::Hasher::new()
+ }
+}
+
+/// Like `OwnedStore`, but avoids storing any value more than once.
+pub(super) struct InternedStore<T: 'static> {
+ owned: OwnedStore<T>,
+ interner: HashMap<T, Handle, NonRandomState>,
+}
+
+impl<T: Copy + Eq + Hash> InternedStore<T> {
+ pub(super) fn new(counter: &'static AtomicUsize) -> Self {
+ InternedStore {
+ owned: OwnedStore::new(counter),
+ interner: HashMap::with_hasher(NonRandomState),
+ }
+ }
+
+ pub(super) fn alloc(&mut self, x: T) -> Handle {
+ let owned = &mut self.owned;
+ *self.interner.entry(x).or_insert_with(|| owned.alloc(x))
+ }
+
+ pub(super) fn copy(&mut self, h: Handle) -> T {
+ self.owned[h]
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/mod.rs
new file mode 100644
index 000000000..4967da493
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/mod.rs
@@ -0,0 +1,451 @@
+//! Internal interface for communicating between a `proc_macro` client
+//! (a proc macro crate) and a `proc_macro` server (a compiler front-end).
+//!
+//! Serialization (with C ABI buffers) and unique integer handles are employed
+//! to allow safely interfacing between two copies of `proc_macro` built
+//! (from the same source) by different compilers with potentially mismatching
+//! Rust ABIs (e.g., stage0/bin/rustc vs stage1/bin/rustc during bootstrap).
+
+#![deny(unsafe_code)]
+
+pub use super::{Delimiter, Level, LineColumn, Spacing};
+use std::fmt;
+use std::hash::Hash;
+use std::marker;
+use std::mem;
+use std::ops::Bound;
+use std::panic;
+use std::sync::atomic::AtomicUsize;
+use std::sync::Once;
+use std::thread;
+
+/// Higher-order macro describing the server RPC API, allowing automatic
+/// generation of type-safe Rust APIs, both client-side and server-side.
+///
+/// `with_api!(MySelf, my_self, my_macro)` expands to:
+/// ```rust,ignore (pseudo-code)
+/// my_macro! {
+/// // ...
+/// Literal {
+/// // ...
+/// fn character(ch: char) -> MySelf::Literal;
+/// // ...
+/// fn span(my_self: &MySelf::Literal) -> MySelf::Span;
+/// fn set_span(my_self: &mut MySelf::Literal, span: MySelf::Span);
+/// },
+/// // ...
+/// }
+/// ```
+///
+/// The first two arguments serve to customize the arguments names
+/// and argument/return types, to enable several different usecases:
+///
+/// If `my_self` is just `self`, then each `fn` signature can be used
+/// as-is for a method. If it's anything else (`self_` in practice),
+/// then the signatures don't have a special `self` argument, and
+/// can, therefore, have a different one introduced.
+///
+/// If `MySelf` is just `Self`, then the types are only valid inside
+/// a trait or a trait impl, where the trait has associated types
+/// for each of the API types. If non-associated types are desired,
+/// a module name (`self` in practice) can be used instead of `Self`.
+macro_rules! with_api {
+ ($S:ident, $self:ident, $m:ident) => {
+ $m! {
+ FreeFunctions {
+ fn drop($self: $S::FreeFunctions);
+ fn track_env_var(var: &str, value: Option<&str>);
+ fn track_path(path: &str);
+ },
+ TokenStream {
+ fn drop($self: $S::TokenStream);
+ fn clone($self: &$S::TokenStream) -> $S::TokenStream;
+ fn is_empty($self: &$S::TokenStream) -> bool;
+ fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>;
+ fn from_str(src: &str) -> $S::TokenStream;
+ fn to_string($self: &$S::TokenStream) -> String;
+ fn from_token_tree(
+ tree: TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>,
+ ) -> $S::TokenStream;
+ fn concat_trees(
+ base: Option<$S::TokenStream>,
+ trees: Vec<TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>>,
+ ) -> $S::TokenStream;
+ fn concat_streams(
+ base: Option<$S::TokenStream>,
+ streams: Vec<$S::TokenStream>,
+ ) -> $S::TokenStream;
+ fn into_trees(
+ $self: $S::TokenStream
+ ) -> Vec<TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>>;
+ },
+ Group {
+ fn drop($self: $S::Group);
+ fn clone($self: &$S::Group) -> $S::Group;
+ fn new(delimiter: Delimiter, stream: Option<$S::TokenStream>) -> $S::Group;
+ fn delimiter($self: &$S::Group) -> Delimiter;
+ fn stream($self: &$S::Group) -> $S::TokenStream;
+ fn span($self: &$S::Group) -> $S::Span;
+ fn span_open($self: &$S::Group) -> $S::Span;
+ fn span_close($self: &$S::Group) -> $S::Span;
+ fn set_span($self: &mut $S::Group, span: $S::Span);
+ },
+ Punct {
+ fn new(ch: char, spacing: Spacing) -> $S::Punct;
+ fn as_char($self: $S::Punct) -> char;
+ fn spacing($self: $S::Punct) -> Spacing;
+ fn span($self: $S::Punct) -> $S::Span;
+ fn with_span($self: $S::Punct, span: $S::Span) -> $S::Punct;
+ },
+ Ident {
+ fn new(string: &str, span: $S::Span, is_raw: bool) -> $S::Ident;
+ fn span($self: $S::Ident) -> $S::Span;
+ fn with_span($self: $S::Ident, span: $S::Span) -> $S::Ident;
+ },
+ Literal {
+ fn drop($self: $S::Literal);
+ fn clone($self: &$S::Literal) -> $S::Literal;
+ fn from_str(s: &str) -> Result<$S::Literal, ()>;
+ fn to_string($self: &$S::Literal) -> String;
+ fn debug_kind($self: &$S::Literal) -> String;
+ fn symbol($self: &$S::Literal) -> String;
+ fn suffix($self: &$S::Literal) -> Option<String>;
+ fn integer(n: &str) -> $S::Literal;
+ fn typed_integer(n: &str, kind: &str) -> $S::Literal;
+ fn float(n: &str) -> $S::Literal;
+ fn f32(n: &str) -> $S::Literal;
+ fn f64(n: &str) -> $S::Literal;
+ fn string(string: &str) -> $S::Literal;
+ fn character(ch: char) -> $S::Literal;
+ fn byte_string(bytes: &[u8]) -> $S::Literal;
+ fn span($self: &$S::Literal) -> $S::Span;
+ fn set_span($self: &mut $S::Literal, span: $S::Span);
+ fn subspan(
+ $self: &$S::Literal,
+ start: Bound<usize>,
+ end: Bound<usize>,
+ ) -> Option<$S::Span>;
+ },
+ SourceFile {
+ fn drop($self: $S::SourceFile);
+ fn clone($self: &$S::SourceFile) -> $S::SourceFile;
+ fn eq($self: &$S::SourceFile, other: &$S::SourceFile) -> bool;
+ fn path($self: &$S::SourceFile) -> String;
+ fn is_real($self: &$S::SourceFile) -> bool;
+ },
+ MultiSpan {
+ fn drop($self: $S::MultiSpan);
+ fn new() -> $S::MultiSpan;
+ fn push($self: &mut $S::MultiSpan, span: $S::Span);
+ },
+ Diagnostic {
+ fn drop($self: $S::Diagnostic);
+ fn new(level: Level, msg: &str, span: $S::MultiSpan) -> $S::Diagnostic;
+ fn sub(
+ $self: &mut $S::Diagnostic,
+ level: Level,
+ msg: &str,
+ span: $S::MultiSpan,
+ );
+ fn emit($self: $S::Diagnostic);
+ },
+ Span {
+ fn debug($self: $S::Span) -> String;
+ fn def_site() -> $S::Span;
+ fn call_site() -> $S::Span;
+ fn mixed_site() -> $S::Span;
+ fn source_file($self: $S::Span) -> $S::SourceFile;
+ fn parent($self: $S::Span) -> Option<$S::Span>;
+ fn source($self: $S::Span) -> $S::Span;
+ fn start($self: $S::Span) -> LineColumn;
+ fn end($self: $S::Span) -> LineColumn;
+ fn before($self: $S::Span) -> $S::Span;
+ fn after($self: $S::Span) -> $S::Span;
+ fn join($self: $S::Span, other: $S::Span) -> Option<$S::Span>;
+ fn resolved_at($self: $S::Span, at: $S::Span) -> $S::Span;
+ fn source_text($self: $S::Span) -> Option<String>;
+ fn save_span($self: $S::Span) -> usize;
+ fn recover_proc_macro_span(id: usize) -> $S::Span;
+ },
+ }
+ };
+}
+
+// FIXME(eddyb) this calls `encode` for each argument, but in reverse,
+// to match the ordering in `reverse_decode`.
+macro_rules! reverse_encode {
+ ($writer:ident;) => {};
+ ($writer:ident; $first:ident $(, $rest:ident)*) => {
+ reverse_encode!($writer; $($rest),*);
+ $first.encode(&mut $writer, &mut ());
+ }
+}
+
+// FIXME(eddyb) this calls `decode` for each argument, but in reverse,
+// to avoid borrow conflicts from borrows started by `&mut` arguments.
+macro_rules! reverse_decode {
+ ($reader:ident, $s:ident;) => {};
+ ($reader:ident, $s:ident; $first:ident: $first_ty:ty $(, $rest:ident: $rest_ty:ty)*) => {
+ reverse_decode!($reader, $s; $($rest: $rest_ty),*);
+ let $first = <$first_ty>::decode(&mut $reader, $s);
+ }
+}
+
+#[allow(unsafe_code)]
+mod buffer;
+#[forbid(unsafe_code)]
+pub mod client;
+#[allow(unsafe_code)]
+mod closure;
+#[forbid(unsafe_code)]
+mod handle;
+#[macro_use]
+#[forbid(unsafe_code)]
+mod rpc;
+#[allow(unsafe_code)]
+mod scoped_cell;
+#[allow(unsafe_code)]
+mod selfless_reify;
+#[forbid(unsafe_code)]
+pub mod server;
+
+use buffer::Buffer;
+pub use rpc::PanicMessage;
+use rpc::{Decode, DecodeMut, Encode, Reader, Writer};
+
+/// An active connection between a server and a client.
+/// The server creates the bridge (`Bridge::run_server` in `server.rs`),
+/// then passes it to the client through the function pointer in the `run`
+/// field of `client::Client`. The client holds its copy of the `Bridge`
+/// in TLS during its execution (`Bridge::{enter, with}` in `client.rs`).
+#[repr(C)]
+pub struct Bridge<'a> {
+ /// Reusable buffer (only `clear`-ed, never shrunk), primarily
+ /// used for making requests, but also for passing input to client.
+ cached_buffer: Buffer,
+
+ /// Server-side function that the client uses to make requests.
+ dispatch: closure::Closure<'a, Buffer, Buffer>,
+
+ /// If 'true', always invoke the default panic hook
+ force_show_panics: bool,
+
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual way of doing
+ // this, but that requires unstable features. rust-analyzer uses this code
+ // and avoids unstable features.
+ _marker: marker::PhantomData<*mut ()>,
+}
+
+#[forbid(unsafe_code)]
+#[allow(non_camel_case_types)]
+mod api_tags {
+ use super::rpc::{DecodeMut, Encode, Reader, Writer};
+
+ macro_rules! declare_tags {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
+ }),* $(,)?) => {
+ $(
+ pub(super) enum $name {
+ $($method),*
+ }
+ rpc_encode_decode!(enum $name { $($method),* });
+ )*
+
+ pub(super) enum Method {
+ $($name($name)),*
+ }
+ rpc_encode_decode!(enum Method { $($name(m)),* });
+ }
+ }
+ with_api!(self, self, declare_tags);
+}
+
+/// Helper to wrap associated types to allow trait impl dispatch.
+/// That is, normally a pair of impls for `T::Foo` and `T::Bar`
+/// can overlap, but if the impls are, instead, on types like
+/// `Marked<T::Foo, Foo>` and `Marked<T::Bar, Bar>`, they can't.
+trait Mark {
+ type Unmarked;
+ fn mark(unmarked: Self::Unmarked) -> Self;
+}
+
+/// Unwrap types wrapped by `Mark::mark` (see `Mark` for details).
+trait Unmark {
+ type Unmarked;
+ fn unmark(self) -> Self::Unmarked;
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+struct Marked<T, M> {
+ value: T,
+ _marker: marker::PhantomData<M>,
+}
+
+impl<T, M> Mark for Marked<T, M> {
+ type Unmarked = T;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ Marked { value: unmarked, _marker: marker::PhantomData }
+ }
+}
+impl<T, M> Unmark for Marked<T, M> {
+ type Unmarked = T;
+ fn unmark(self) -> Self::Unmarked {
+ self.value
+ }
+}
+impl<'a, T, M> Unmark for &'a Marked<T, M> {
+ type Unmarked = &'a T;
+ fn unmark(self) -> Self::Unmarked {
+ &self.value
+ }
+}
+impl<'a, T, M> Unmark for &'a mut Marked<T, M> {
+ type Unmarked = &'a mut T;
+ fn unmark(self) -> Self::Unmarked {
+ &mut self.value
+ }
+}
+
+impl<T: Mark> Mark for Vec<T> {
+ type Unmarked = Vec<T::Unmarked>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ // Should be a no-op due to std's in-place collect optimizations.
+ unmarked.into_iter().map(T::mark).collect()
+ }
+}
+impl<T: Unmark> Unmark for Vec<T> {
+ type Unmarked = Vec<T::Unmarked>;
+ fn unmark(self) -> Self::Unmarked {
+ // Should be a no-op due to std's in-place collect optimizations.
+ self.into_iter().map(T::unmark).collect()
+ }
+}
+
+macro_rules! mark_noop {
+ ($($ty:ty),* $(,)?) => {
+ $(
+ impl Mark for $ty {
+ type Unmarked = Self;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ unmarked
+ }
+ }
+ impl Unmark for $ty {
+ type Unmarked = Self;
+ fn unmark(self) -> Self::Unmarked {
+ self
+ }
+ }
+ )*
+ }
+}
+mark_noop! {
+ (),
+ bool,
+ char,
+ &'_ [u8],
+ &'_ str,
+ String,
+ usize,
+ Delimiter,
+ Level,
+ LineColumn,
+ Spacing,
+}
+
+rpc_encode_decode!(
+ enum Delimiter {
+ Parenthesis,
+ Brace,
+ Bracket,
+ None,
+ }
+);
+rpc_encode_decode!(
+ enum Level {
+ Error,
+ Warning,
+ Note,
+ Help,
+ }
+);
+rpc_encode_decode!(struct LineColumn { line, column });
+rpc_encode_decode!(
+ enum Spacing {
+ Alone,
+ Joint,
+ }
+);
+
+macro_rules! mark_compound {
+ (enum $name:ident <$($T:ident),+> { $($variant:ident $(($field:ident))?),* $(,)? }) => {
+ impl<$($T: Mark),+> Mark for $name <$($T),+> {
+ type Unmarked = $name <$($T::Unmarked),+>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ match unmarked {
+ $($name::$variant $(($field))? => {
+ $name::$variant $((Mark::mark($field)))?
+ })*
+ }
+ }
+ }
+
+ impl<$($T: Unmark),+> Unmark for $name <$($T),+> {
+ type Unmarked = $name <$($T::Unmarked),+>;
+ fn unmark(self) -> Self::Unmarked {
+ match self {
+ $($name::$variant $(($field))? => {
+ $name::$variant $((Unmark::unmark($field)))?
+ })*
+ }
+ }
+ }
+ }
+}
+
+macro_rules! compound_traits {
+ ($($t:tt)*) => {
+ rpc_encode_decode!($($t)*);
+ mark_compound!($($t)*);
+ };
+}
+
+compound_traits!(
+ enum Bound<T> {
+ Included(x),
+ Excluded(x),
+ Unbounded,
+ }
+);
+
+compound_traits!(
+ enum Option<T> {
+ Some(t),
+ None,
+ }
+);
+
+compound_traits!(
+ enum Result<T, E> {
+ Ok(t),
+ Err(e),
+ }
+);
+
+#[derive(Clone)]
+pub enum TokenTree<G, P, I, L> {
+ Group(G),
+ Punct(P),
+ Ident(I),
+ Literal(L),
+}
+
+compound_traits!(
+ enum TokenTree<G, P, I, L> {
+ Group(tt),
+ Punct(tt),
+ Ident(tt),
+ Literal(tt),
+ }
+);
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/rpc.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/rpc.rs
new file mode 100644
index 000000000..e9d7a46c0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/rpc.rs
@@ -0,0 +1,304 @@
+//! Serialization for client-server communication.
+
+use std::any::Any;
+use std::char;
+use std::io::Write;
+use std::num::NonZeroU32;
+use std::str;
+
+pub(super) type Writer = super::buffer::Buffer;
+
+pub(super) trait Encode<S>: Sized {
+ fn encode(self, w: &mut Writer, s: &mut S);
+}
+
+pub(super) type Reader<'a> = &'a [u8];
+
+pub(super) trait Decode<'a, 's, S>: Sized {
+ fn decode(r: &mut Reader<'a>, s: &'s S) -> Self;
+}
+
+pub(super) trait DecodeMut<'a, 's, S>: Sized {
+ fn decode(r: &mut Reader<'a>, s: &'s mut S) -> Self;
+}
+
+macro_rules! rpc_encode_decode {
+ (le $ty:ty) => {
+ impl<S> Encode<S> for $ty {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.extend_from_array(&self.to_le_bytes());
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $ty {
+ fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
+ const N: usize = ::std::mem::size_of::<$ty>();
+
+ let mut bytes = [0; N];
+ bytes.copy_from_slice(&r[..N]);
+ *r = &r[N..];
+
+ Self::from_le_bytes(bytes)
+ }
+ }
+ };
+ (struct $name:ident $(<$($T:ident),+>)? { $($field:ident),* $(,)? }) => {
+ impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ $(self.$field.encode(w, s);)*
+ }
+ }
+
+ impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S>
+ for $name $(<$($T),+>)?
+ {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ $name {
+ $($field: DecodeMut::decode(r, s)),*
+ }
+ }
+ }
+ };
+ (enum $name:ident $(<$($T:ident),+>)? { $($variant:ident $(($field:ident))*),* $(,)? }) => {
+ impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ // HACK(eddyb): `Tag` enum duplicated between the
+ // two impls as there's no other place to stash it.
+ #[allow(non_upper_case_globals)]
+ mod tag {
+ #[repr(u8)] enum Tag { $($variant),* }
+
+ $(pub const $variant: u8 = Tag::$variant as u8;)*
+ }
+
+ match self {
+ $($name::$variant $(($field))* => {
+ tag::$variant.encode(w, s);
+ $($field.encode(w, s);)*
+ })*
+ }
+ }
+ }
+
+ impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S>
+ for $name $(<$($T),+>)?
+ {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ // HACK(eddyb): `Tag` enum duplicated between the
+ // two impls as there's no other place to stash it.
+ #[allow(non_upper_case_globals)]
+ mod tag {
+ #[repr(u8)] enum Tag { $($variant),* }
+
+ $(pub const $variant: u8 = Tag::$variant as u8;)*
+ }
+
+ match u8::decode(r, s) {
+ $(tag::$variant => {
+ $(let $field = DecodeMut::decode(r, s);)*
+ $name::$variant $(($field))*
+ })*
+ _ => unreachable!(),
+ }
+ }
+ }
+ }
+}
+
+impl<S> Encode<S> for () {
+ fn encode(self, _: &mut Writer, _: &mut S) {}
+}
+
+impl<S> DecodeMut<'_, '_, S> for () {
+ fn decode(_: &mut Reader<'_>, _: &mut S) -> Self {}
+}
+
+impl<S> Encode<S> for u8 {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.push(self);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for u8 {
+ fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
+ let x = r[0];
+ *r = &r[1..];
+ x
+ }
+}
+
+rpc_encode_decode!(le u32);
+rpc_encode_decode!(le usize);
+
+impl<S> Encode<S> for bool {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ (self as u8).encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for bool {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ match u8::decode(r, s) {
+ 0 => false,
+ 1 => true,
+ _ => unreachable!(),
+ }
+ }
+}
+
+impl<S> Encode<S> for char {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ (self as u32).encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for char {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ char::from_u32(u32::decode(r, s)).unwrap()
+ }
+}
+
+impl<S> Encode<S> for NonZeroU32 {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.get().encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for NonZeroU32 {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ Self::new(u32::decode(r, s)).unwrap()
+ }
+}
+
+impl<S, A: Encode<S>, B: Encode<S>> Encode<S> for (A, B) {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.0.encode(w, s);
+ self.1.encode(w, s);
+ }
+}
+
+impl<'a, S, A: for<'s> DecodeMut<'a, 's, S>, B: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S>
+ for (A, B)
+{
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ (DecodeMut::decode(r, s), DecodeMut::decode(r, s))
+ }
+}
+
+impl<S> Encode<S> for &[u8] {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.len().encode(w, s);
+ w.write_all(self).unwrap();
+ }
+}
+
+impl<'a, S> DecodeMut<'a, '_, S> for &'a [u8] {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ let len = usize::decode(r, s);
+ let xs = &r[..len];
+ *r = &r[len..];
+ xs
+ }
+}
+
+impl<S> Encode<S> for &str {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.as_bytes().encode(w, s);
+ }
+}
+
+impl<'a, S> DecodeMut<'a, '_, S> for &'a str {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ str::from_utf8(<&[u8]>::decode(r, s)).unwrap()
+ }
+}
+
+impl<S> Encode<S> for String {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self[..].encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for String {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ <&str>::decode(r, s).to_string()
+ }
+}
+
+impl<S, T: Encode<S>> Encode<S> for Vec<T> {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.len().encode(w, s);
+ for x in self {
+ x.encode(w, s);
+ }
+ }
+}
+
+impl<'a, S, T: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S> for Vec<T> {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ let len = usize::decode(r, s);
+ let mut vec = Vec::with_capacity(len);
+ for _ in 0..len {
+ vec.push(T::decode(r, s));
+ }
+ vec
+ }
+}
+
+/// Simplified version of panic payloads, ignoring
+/// types other than `&'static str` and `String`.
+pub enum PanicMessage {
+ StaticStr(&'static str),
+ String(String),
+ Unknown,
+}
+
+impl From<Box<dyn Any + Send>> for PanicMessage {
+ fn from(payload: Box<dyn Any + Send + 'static>) -> Self {
+ if let Some(s) = payload.downcast_ref::<&'static str>() {
+ return PanicMessage::StaticStr(s);
+ }
+ if let Ok(s) = payload.downcast::<String>() {
+ return PanicMessage::String(*s);
+ }
+ PanicMessage::Unknown
+ }
+}
+
+impl Into<Box<dyn Any + Send>> for PanicMessage {
+ fn into(self) -> Box<dyn Any + Send> {
+ match self {
+ PanicMessage::StaticStr(s) => Box::new(s),
+ PanicMessage::String(s) => Box::new(s),
+ PanicMessage::Unknown => {
+ struct UnknownPanicMessage;
+ Box::new(UnknownPanicMessage)
+ }
+ }
+ }
+}
+
+impl PanicMessage {
+ pub fn as_str(&self) -> Option<&str> {
+ match self {
+ PanicMessage::StaticStr(s) => Some(s),
+ PanicMessage::String(s) => Some(s),
+ PanicMessage::Unknown => None,
+ }
+ }
+}
+
+impl<S> Encode<S> for PanicMessage {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.as_str().encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for PanicMessage {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ match Option::<String>::decode(r, s) {
+ Some(s) => PanicMessage::String(s),
+ None => PanicMessage::Unknown,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/scoped_cell.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/scoped_cell.rs
new file mode 100644
index 000000000..2cde1f65a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/scoped_cell.rs
@@ -0,0 +1,81 @@
+//! `Cell` variant for (scoped) existential lifetimes.
+
+use std::cell::Cell;
+use std::mem;
+use std::ops::{Deref, DerefMut};
+
+/// Type lambda application, with a lifetime.
+#[allow(unused_lifetimes)]
+pub trait ApplyL<'a> {
+ type Out;
+}
+
+/// Type lambda taking a lifetime, i.e., `Lifetime -> Type`.
+pub trait LambdaL: for<'a> ApplyL<'a> {}
+
+impl<T: for<'a> ApplyL<'a>> LambdaL for T {}
+
+// HACK(eddyb) work around projection limitations with a newtype
+// FIXME(#52812) replace with `&'a mut <T as ApplyL<'b>>::Out`
+pub struct RefMutL<'a, 'b, T: LambdaL>(&'a mut <T as ApplyL<'b>>::Out);
+
+impl<'a, 'b, T: LambdaL> Deref for RefMutL<'a, 'b, T> {
+ type Target = <T as ApplyL<'b>>::Out;
+ fn deref(&self) -> &Self::Target {
+ self.0
+ }
+}
+
+impl<'a, 'b, T: LambdaL> DerefMut for RefMutL<'a, 'b, T> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ self.0
+ }
+}
+
+pub struct ScopedCell<T: LambdaL>(Cell<<T as ApplyL<'static>>::Out>);
+
+impl<T: LambdaL> ScopedCell<T> {
+ pub const fn new(value: <T as ApplyL<'static>>::Out) -> Self {
+ ScopedCell(Cell::new(value))
+ }
+
+ /// Sets the value in `self` to `replacement` while
+ /// running `f`, which gets the old value, mutably.
+ /// The old value will be restored after `f` exits, even
+ /// by panic, including modifications made to it by `f`.
+ pub fn replace<'a, R>(
+ &self,
+ replacement: <T as ApplyL<'a>>::Out,
+ f: impl for<'b, 'c> FnOnce(RefMutL<'b, 'c, T>) -> R,
+ ) -> R {
+ /// Wrapper that ensures that the cell always gets filled
+ /// (with the original state, optionally changed by `f`),
+ /// even if `f` had panicked.
+ struct PutBackOnDrop<'a, T: LambdaL> {
+ cell: &'a ScopedCell<T>,
+ value: Option<<T as ApplyL<'static>>::Out>,
+ }
+
+ impl<'a, T: LambdaL> Drop for PutBackOnDrop<'a, T> {
+ fn drop(&mut self) {
+ self.cell.0.set(self.value.take().unwrap());
+ }
+ }
+
+ let mut put_back_on_drop = PutBackOnDrop {
+ cell: self,
+ value: Some(self.0.replace(unsafe {
+ let erased = mem::transmute_copy(&replacement);
+ mem::forget(replacement);
+ erased
+ })),
+ };
+
+ f(RefMutL(put_back_on_drop.value.as_mut().unwrap()))
+ }
+
+ /// Sets the value in `self` to `value` while running `f`.
+ pub fn set<R>(&self, value: <T as ApplyL<'_>>::Out, f: impl FnOnce() -> R) -> R {
+ self.replace(value, |_| f())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/selfless_reify.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/selfless_reify.rs
new file mode 100644
index 000000000..4ee4bb87c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/selfless_reify.rs
@@ -0,0 +1,83 @@
+//! Abstraction for creating `fn` pointers from any callable that *effectively*
+//! has the equivalent of implementing `Default`, even if the compiler neither
+//! provides `Default` nor allows reifying closures (i.e. creating `fn` pointers)
+//! other than those with absolutely no captures.
+//!
+//! More specifically, for a closure-like type to be "effectively `Default`":
+//! * it must be a ZST (zero-sized type): no information contained within, so
+//! that `Default`'s return value (if it were implemented) is unambiguous
+//! * it must be `Copy`: no captured "unique ZST tokens" or any other similar
+//! types that would make duplicating values at will unsound
+//! * combined with the ZST requirement, this confers a kind of "telecopy"
+//! ability: similar to `Copy`, but without keeping the value around, and
+//! instead "reconstructing" it (a noop given it's a ZST) when needed
+//! * it must be *provably* inhabited: no captured uninhabited types or any
+//! other types that cannot be constructed by the user of this abstraction
+//! * the proof is a value of the closure-like type itself, in a sense the
+//! "seed" for the "telecopy" process made possible by ZST + `Copy`
+//! * this requirement is the only reason an abstraction limited to a specific
+//! usecase is required: ZST + `Copy` can be checked with *at worst* a panic
+//! at the "attempted `::default()` call" time, but that doesn't guarantee
+//! that the value can be soundly created, and attempting to use the typical
+//! "proof ZST token" approach leads yet again to having a ZST + `Copy` type
+//! that is not proof of anything without a value (i.e. isomorphic to a
+//! newtype of the type it's trying to prove the inhabitation of)
+//!
+//! A more flexible (and safer) solution to the general problem could exist once
+//! `const`-generic parameters can have type parameters in their types:
+//!
+//! ```rust,ignore (needs future const-generics)
+//! extern "C" fn ffi_wrapper<
+//! A, R,
+//! F: Fn(A) -> R,
+//! const f: F, // <-- this `const`-generic is not yet allowed
+//! >(arg: A) -> R {
+//! f(arg)
+//! }
+//! ```
+
+use std::mem;
+
+// FIXME(eddyb) this could be `trait` impls except for the `const fn` requirement.
+macro_rules! define_reify_functions {
+ ($(
+ fn $name:ident $(<$($param:ident),*>)?
+ for $(extern $abi:tt)? fn($($arg:ident: $arg_ty:ty),*) -> $ret_ty:ty;
+ )+) => {
+ $(pub const fn $name<
+ $($($param,)*)?
+ F: Fn($($arg_ty),*) -> $ret_ty + Copy
+ >(f: F) -> $(extern $abi)? fn($($arg_ty),*) -> $ret_ty {
+ // FIXME(eddyb) describe the `F` type (e.g. via `type_name::<F>`) once panic
+ // formatting becomes possible in `const fn`.
+ assert!(mem::size_of::<F>() == 0, "selfless_reify: closure must be zero-sized");
+
+ $(extern $abi)? fn wrapper<
+ $($($param,)*)?
+ F: Fn($($arg_ty),*) -> $ret_ty + Copy
+ >($($arg: $arg_ty),*) -> $ret_ty {
+ let f = unsafe {
+ // SAFETY: `F` satisfies all criteria for "out of thin air"
+ // reconstructability (see module-level doc comment).
+ mem::MaybeUninit::<F>::uninit().assume_init()
+ };
+ f($($arg),*)
+ }
+ let _f_proof = f;
+ wrapper::<
+ $($($param,)*)?
+ F
+ >
+ })+
+ }
+}
+
+define_reify_functions! {
+ fn _reify_to_extern_c_fn_unary<A, R> for extern "C" fn(arg: A) -> R;
+
+ // HACK(eddyb) this abstraction is used with `for<'a> fn(Bridge<'a>) -> T`
+ // but that doesn't work with just `reify_to_extern_c_fn_unary` because of
+ // the `fn` pointer type being "higher-ranked" (i.e. the `for<'a>` binder).
+ // FIXME(eddyb) try to remove the lifetime from `Bridge`, that'd help.
+ fn reify_to_extern_c_fn_hrt_bridge<R> for extern "C" fn(bridge: super::Bridge<'_>) -> R;
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/server.rs
new file mode 100644
index 000000000..0fb3c6985
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/server.rs
@@ -0,0 +1,332 @@
+//! Server-side traits.
+
+use super::*;
+
+// FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
+use super::client::HandleStore;
+
+pub trait Types {
+ type FreeFunctions: 'static;
+ type TokenStream: 'static + Clone;
+ type Group: 'static + Clone;
+ type Punct: 'static + Copy + Eq + Hash;
+ type Ident: 'static + Copy + Eq + Hash;
+ type Literal: 'static + Clone;
+ type SourceFile: 'static + Clone;
+ type MultiSpan: 'static;
+ type Diagnostic: 'static;
+ type Span: 'static + Copy + Eq + Hash;
+}
+
+/// Declare an associated fn of one of the traits below, adding necessary
+/// default bodies.
+macro_rules! associated_fn {
+ (fn drop(&mut self, $arg:ident: $arg_ty:ty)) =>
+ (fn drop(&mut self, $arg: $arg_ty) { mem::drop($arg) });
+
+ (fn clone(&mut self, $arg:ident: $arg_ty:ty) -> $ret_ty:ty) =>
+ (fn clone(&mut self, $arg: $arg_ty) -> $ret_ty { $arg.clone() });
+
+ ($($item:tt)*) => ($($item)*;)
+}
+
+macro_rules! declare_server_traits {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ $(pub trait $name: Types {
+ $(associated_fn!(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)?);)*
+ })*
+
+ pub trait Server: Types $(+ $name)* {}
+ impl<S: Types $(+ $name)*> Server for S {}
+ }
+}
+with_api!(Self, self_, declare_server_traits);
+
+pub(super) struct MarkedTypes<S: Types>(S);
+
+macro_rules! define_mark_types_impls {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ impl<S: Types> Types for MarkedTypes<S> {
+ $(type $name = Marked<S::$name, client::$name>;)*
+ }
+
+ $(impl<S: $name> $name for MarkedTypes<S> {
+ $(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)? {
+ <_>::mark($name::$method(&mut self.0, $($arg.unmark()),*))
+ })*
+ })*
+ }
+}
+with_api!(Self, self_, define_mark_types_impls);
+
+struct Dispatcher<S: Types> {
+ handle_store: HandleStore<S>,
+ server: S,
+}
+
+macro_rules! define_dispatcher_impl {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ // FIXME(eddyb) `pub` only for `ExecutionStrategy` below.
+ pub trait DispatcherTrait {
+ // HACK(eddyb) these are here to allow `Self::$name` to work below.
+ $(type $name;)*
+ fn dispatch(&mut self, buf: Buffer) -> Buffer;
+ }
+
+ impl<S: Server> DispatcherTrait for Dispatcher<MarkedTypes<S>> {
+ $(type $name = <MarkedTypes<S> as Types>::$name;)*
+ fn dispatch(&mut self, mut buf: Buffer) -> Buffer {
+ let Dispatcher { handle_store, server } = self;
+
+ let mut reader = &buf[..];
+ match api_tags::Method::decode(&mut reader, &mut ()) {
+ $(api_tags::Method::$name(m) => match m {
+ $(api_tags::$name::$method => {
+ let mut call_method = || {
+ reverse_decode!(reader, handle_store; $($arg: $arg_ty),*);
+ $name::$method(server, $($arg),*)
+ };
+ // HACK(eddyb) don't use `panic::catch_unwind` in a panic.
+ // If client and server happen to use the same `libstd`,
+ // `catch_unwind` asserts that the panic counter was 0,
+ // even when the closure passed to it didn't panic.
+ let r = if thread::panicking() {
+ Ok(call_method())
+ } else {
+ panic::catch_unwind(panic::AssertUnwindSafe(call_method))
+ .map_err(PanicMessage::from)
+ };
+
+ buf.clear();
+ r.encode(&mut buf, handle_store);
+ })*
+ }),*
+ }
+ buf
+ }
+ }
+ }
+}
+with_api!(Self, self_, define_dispatcher_impl);
+
+pub trait ExecutionStrategy {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(Bridge<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer;
+}
+
+pub struct SameThread;
+
+impl ExecutionStrategy for SameThread {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(Bridge<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer {
+ let mut dispatch = |buf| dispatcher.dispatch(buf);
+
+ run_client(Bridge {
+ cached_buffer: input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ _marker: marker::PhantomData,
+ })
+ }
+}
+
+// NOTE(eddyb) Two implementations are provided, the second one is a bit
+// faster but neither is anywhere near as fast as same-thread execution.
+
+pub struct CrossThread1;
+
+impl ExecutionStrategy for CrossThread1 {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(Bridge<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer {
+ use std::sync::mpsc::channel;
+
+ let (req_tx, req_rx) = channel();
+ let (res_tx, res_rx) = channel();
+
+ let join_handle = thread::spawn(move || {
+ let mut dispatch = |buf| {
+ req_tx.send(buf).unwrap();
+ res_rx.recv().unwrap()
+ };
+
+ run_client(Bridge {
+ cached_buffer: input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ _marker: marker::PhantomData,
+ })
+ });
+
+ for b in req_rx {
+ res_tx.send(dispatcher.dispatch(b)).unwrap();
+ }
+
+ join_handle.join().unwrap()
+ }
+}
+
+pub struct CrossThread2;
+
+impl ExecutionStrategy for CrossThread2 {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(Bridge<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer {
+ use std::sync::{Arc, Mutex};
+
+ enum State<T> {
+ Req(T),
+ Res(T),
+ }
+
+ let mut state = Arc::new(Mutex::new(State::Res(Buffer::new())));
+
+ let server_thread = thread::current();
+ let state2 = state.clone();
+ let join_handle = thread::spawn(move || {
+ let mut dispatch = |b| {
+ *state2.lock().unwrap() = State::Req(b);
+ server_thread.unpark();
+ loop {
+ thread::park();
+ if let State::Res(b) = &mut *state2.lock().unwrap() {
+ break b.take();
+ }
+ }
+ };
+
+ let r = run_client(Bridge {
+ cached_buffer: input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ _marker: marker::PhantomData,
+ });
+
+ // Wake up the server so it can exit the dispatch loop.
+ drop(state2);
+ server_thread.unpark();
+
+ r
+ });
+
+ // Check whether `state2` was dropped, to know when to stop.
+ while Arc::get_mut(&mut state).is_none() {
+ thread::park();
+ let mut b = match &mut *state.lock().unwrap() {
+ State::Req(b) => b.take(),
+ _ => continue,
+ };
+ b = dispatcher.dispatch(b.take());
+ *state.lock().unwrap() = State::Res(b);
+ join_handle.thread().unpark();
+ }
+
+ join_handle.join().unwrap()
+ }
+}
+
+fn run_server<
+ S: Server,
+ I: Encode<HandleStore<MarkedTypes<S>>>,
+ O: for<'a, 's> DecodeMut<'a, 's, HandleStore<MarkedTypes<S>>>,
+>(
+ strategy: &impl ExecutionStrategy,
+ handle_counters: &'static client::HandleCounters,
+ server: S,
+ input: I,
+ run_client: extern "C" fn(Bridge<'_>) -> Buffer,
+ force_show_panics: bool,
+) -> Result<O, PanicMessage> {
+ let mut dispatcher =
+ Dispatcher { handle_store: HandleStore::new(handle_counters), server: MarkedTypes(server) };
+
+ let mut buf = Buffer::new();
+ input.encode(&mut buf, &mut dispatcher.handle_store);
+
+ buf = strategy.run_bridge_and_client(&mut dispatcher, buf, run_client, force_show_panics);
+
+ Result::decode(&mut &buf[..], &mut dispatcher.handle_store)
+}
+
+impl client::Client<super::super::TokenStream, super::super::TokenStream> {
+ pub fn run<S>(
+ &self,
+ strategy: &impl ExecutionStrategy,
+ server: S,
+ input: S::TokenStream,
+ force_show_panics: bool,
+ ) -> Result<S::TokenStream, PanicMessage>
+ where
+ S: Server,
+ S::TokenStream: Default,
+ {
+ let client::Client { get_handle_counters, run, _marker } = *self;
+ run_server(
+ strategy,
+ get_handle_counters(),
+ server,
+ <MarkedTypes<S> as Types>::TokenStream::mark(input),
+ run,
+ force_show_panics,
+ )
+ .map(|s| <Option<<MarkedTypes<S> as Types>::TokenStream>>::unmark(s).unwrap_or_default())
+ }
+}
+
+impl
+ client::Client<
+ (super::super::TokenStream, super::super::TokenStream),
+ super::super::TokenStream,
+ >
+{
+ pub fn run<S>(
+ &self,
+ strategy: &impl ExecutionStrategy,
+ server: S,
+ input: S::TokenStream,
+ input2: S::TokenStream,
+ force_show_panics: bool,
+ ) -> Result<S::TokenStream, PanicMessage>
+ where
+ S: Server,
+ S::TokenStream: Default,
+ {
+ let client::Client { get_handle_counters, run, _marker } = *self;
+ run_server(
+ strategy,
+ get_handle_counters(),
+ server,
+ (
+ <MarkedTypes<S> as Types>::TokenStream::mark(input),
+ <MarkedTypes<S> as Types>::TokenStream::mark(input2),
+ ),
+ run,
+ force_show_panics,
+ )
+ .map(|s| <Option<<MarkedTypes<S> as Types>::TokenStream>>::unmark(s).unwrap_or_default())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/diagnostic.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/diagnostic.rs
new file mode 100644
index 000000000..3fade2dc4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/diagnostic.rs
@@ -0,0 +1,166 @@
+//! lib-proc-macro diagnostic
+//!
+//! Copy from <https://github.com/rust-lang/rust/blob/e45d9973b2665897a768312e971b82cc62633103/src/libproc_macro/diagnostic.rs>
+//! augmented with removing unstable features
+
+use super::Span;
+
+/// An enum representing a diagnostic level.
+#[derive(Copy, Clone, Debug)]
+#[non_exhaustive]
+pub enum Level {
+ /// An error.
+ Error,
+ /// A warning.
+ Warning,
+ /// A note.
+ Note,
+ /// A help message.
+ Help,
+}
+
+/// Trait implemented by types that can be converted into a set of `Span`s.
+pub trait MultiSpan {
+ /// Converts `self` into a `Vec<Span>`.
+ fn into_spans(self) -> Vec<Span>;
+}
+
+impl MultiSpan for Span {
+ fn into_spans(self) -> Vec<Span> {
+ vec![self]
+ }
+}
+
+impl MultiSpan for Vec<Span> {
+ fn into_spans(self) -> Vec<Span> {
+ self
+ }
+}
+
+impl<'a> MultiSpan for &'a [Span] {
+ fn into_spans(self) -> Vec<Span> {
+ self.to_vec()
+ }
+}
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+macro_rules! diagnostic_child_methods {
+ ($spanned:ident, $regular:ident, $level:expr) => {
+ #[doc = concat!("Adds a new child diagnostics message to `self` with the [`",
+ stringify!($level), "`] level, and the given `spans` and `message`.")]
+ pub fn $spanned<S, T>(mut self, spans: S, message: T) -> Diagnostic
+ where
+ S: MultiSpan,
+ T: Into<String>,
+ {
+ self.children.push(Diagnostic::spanned(spans, $level, message));
+ self
+ }
+
+ #[doc = concat!("Adds a new child diagnostic message to `self` with the [`",
+ stringify!($level), "`] level, and the given `message`.")]
+ pub fn $regular<T: Into<String>>(mut self, message: T) -> Diagnostic {
+ self.children.push(Diagnostic::new($level, message));
+ self
+ }
+ };
+}
+
+/// Iterator over the children diagnostics of a `Diagnostic`.
+#[derive(Debug, Clone)]
+pub struct Children<'a>(std::slice::Iter<'a, Diagnostic>);
+
+impl<'a> Iterator for Children<'a> {
+ type Item = &'a Diagnostic;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.0.next()
+ }
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+
+ /// Creates a new diagnostic with the given `level` and `message` pointing to
+ /// the given set of `spans`.
+ pub fn spanned<S, T>(spans: S, level: Level, message: T) -> Diagnostic
+ where
+ S: MultiSpan,
+ T: Into<String>,
+ {
+ Diagnostic { level, message: message.into(), spans: spans.into_spans(), children: vec![] }
+ }
+
+ diagnostic_child_methods!(span_error, error, Level::Error);
+ diagnostic_child_methods!(span_warning, warning, Level::Warning);
+ diagnostic_child_methods!(span_note, note, Level::Note);
+ diagnostic_child_methods!(span_help, help, Level::Help);
+
+ /// Returns the diagnostic `level` for `self`.
+ pub fn level(&self) -> Level {
+ self.level
+ }
+
+ /// Sets the level in `self` to `level`.
+ pub fn set_level(&mut self, level: Level) {
+ self.level = level;
+ }
+
+ /// Returns the message in `self`.
+ pub fn message(&self) -> &str {
+ &self.message
+ }
+
+ /// Sets the message in `self` to `message`.
+ pub fn set_message<T: Into<String>>(&mut self, message: T) {
+ self.message = message.into();
+ }
+
+ /// Returns the `Span`s in `self`.
+ pub fn spans(&self) -> &[Span] {
+ &self.spans
+ }
+
+ /// Sets the `Span`s in `self` to `spans`.
+ pub fn set_spans<S: MultiSpan>(&mut self, spans: S) {
+ self.spans = spans.into_spans();
+ }
+
+ /// Returns an iterator over the children diagnostics of `self`.
+ pub fn children(&self) -> Children<'_> {
+ Children(self.children.iter())
+ }
+
+ /// Emit the diagnostic.
+ pub fn emit(self) {
+ fn to_internal(spans: Vec<Span>) -> super::bridge::client::MultiSpan {
+ let mut multi_span = super::bridge::client::MultiSpan::new();
+ for span in spans {
+ multi_span.push(span.0);
+ }
+ multi_span
+ }
+
+ let mut diag = super::bridge::client::Diagnostic::new(
+ self.level,
+ &self.message[..],
+ to_internal(self.spans),
+ );
+ for c in self.children {
+ diag.sub(c.level, &c.message[..], to_internal(c.spans));
+ }
+ diag.emit();
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs
new file mode 100644
index 000000000..c50a16bf4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs
@@ -0,0 +1,1106 @@
+//! A support library for macro authors when defining new macros.
+//!
+//! This library, provided by the standard distribution, provides the types
+//! consumed in the interfaces of procedurally defined macro definitions such as
+//! function-like macros `#[proc_macro]`, macro attributes `#[proc_macro_attribute]` and
+//! custom derive attributes`#[proc_macro_derive]`.
+//!
+//! See [the book] for more.
+//!
+//! [the book]: ../book/ch19-06-macros.html#procedural-macros-for-generating-code-from-attributes
+
+#[doc(hidden)]
+pub mod bridge;
+
+mod diagnostic;
+
+pub use diagnostic::{Diagnostic, Level, MultiSpan};
+
+use std::cmp::Ordering;
+use std::ops::RangeBounds;
+use std::path::PathBuf;
+use std::str::FromStr;
+use std::{error, fmt, iter, mem};
+
+/// Determines whether proc_macro has been made accessible to the currently
+/// running program.
+///
+/// The proc_macro crate is only intended for use inside the implementation of
+/// procedural macros. All the functions in this crate panic if invoked from
+/// outside of a procedural macro, such as from a build script or unit test or
+/// ordinary Rust binary.
+///
+/// With consideration for Rust libraries that are designed to support both
+/// macro and non-macro use cases, `proc_macro::is_available()` provides a
+/// non-panicking way to detect whether the infrastructure required to use the
+/// API of proc_macro is presently available. Returns true if invoked from
+/// inside of a procedural macro, false if invoked from any other binary.
+pub fn is_available() -> bool {
+ bridge::Bridge::is_available()
+}
+
+/// The main type provided by this crate, representing an abstract stream of
+/// tokens, or, more specifically, a sequence of token trees.
+/// The type provide interfaces for iterating over those token trees and, conversely,
+/// collecting a number of token trees into one stream.
+///
+/// This is both the input and output of `#[proc_macro]`, `#[proc_macro_attribute]`
+/// and `#[proc_macro_derive]` definitions.
+#[derive(Clone)]
+pub struct TokenStream(Option<bridge::client::TokenStream>);
+
+/// Error returned from `TokenStream::from_str`.
+#[non_exhaustive]
+#[derive(Debug)]
+pub struct LexError;
+
+impl fmt::Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("cannot parse string into token stream")
+ }
+}
+
+impl error::Error for LexError {}
+
+/// Error returned from `TokenStream::expand_expr`.
+#[non_exhaustive]
+#[derive(Debug)]
+pub struct ExpandError;
+
+impl fmt::Display for ExpandError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("macro expansion failed")
+ }
+}
+
+impl error::Error for ExpandError {}
+
+impl TokenStream {
+ /// Returns an empty `TokenStream` containing no token trees.
+ pub fn new() -> TokenStream {
+ TokenStream(None)
+ }
+
+ /// Checks if this `TokenStream` is empty.
+ pub fn is_empty(&self) -> bool {
+ self.0.as_ref().map(|h| h.is_empty()).unwrap_or(true)
+ }
+
+ /// Parses this `TokenStream` as an expression and attempts to expand any
+ /// macros within it. Returns the expanded `TokenStream`.
+ ///
+ /// Currently only expressions expanding to literals will succeed, although
+ /// this may be relaxed in the future.
+ ///
+ /// NOTE: In error conditions, `expand_expr` may leave macros unexpanded,
+ /// report an error, failing compilation, and/or return an `Err(..)`. The
+ /// specific behavior for any error condition, and what conditions are
+ /// considered errors, is unspecified and may change in the future.
+ pub fn expand_expr(&self) -> Result<TokenStream, ExpandError> {
+ let stream = self.0.as_ref().ok_or(ExpandError)?;
+ match bridge::client::TokenStream::expand_expr(stream) {
+ Ok(stream) => Ok(TokenStream(Some(stream))),
+ Err(_) => Err(ExpandError),
+ }
+ }
+}
+
+/// Attempts to break the string into tokens and parse those tokens into a token stream.
+/// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+/// or characters not existing in the language.
+/// All tokens in the parsed stream get `Span::call_site()` spans.
+///
+/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+/// change these errors into `LexError`s later.
+impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ Ok(TokenStream(Some(bridge::client::TokenStream::from_str(src))))
+ }
+}
+
+/// Prints the token stream as a string that is supposed to be losslessly convertible back
+/// into the same token stream (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters and negative numeric literals.
+impl fmt::Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+/// Prints token in a form convenient for debugging.
+impl fmt::Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("TokenStream ")?;
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+impl Default for TokenStream {
+ fn default() -> Self {
+ TokenStream::new()
+ }
+}
+
+pub use quote::{quote, quote_span};
+
+fn tree_to_bridge_tree(
+ tree: TokenTree,
+) -> bridge::TokenTree<
+ bridge::client::Group,
+ bridge::client::Punct,
+ bridge::client::Ident,
+ bridge::client::Literal,
+> {
+ match tree {
+ TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0),
+ TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0),
+ TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0),
+ TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0),
+ }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream(Some(bridge::client::TokenStream::from_token_tree(tree_to_bridge_tree(tree))))
+ }
+}
+
+/// Non-generic helper for implementing `FromIterator<TokenStream>` and
+/// `Extend<TokenStream>` with less monomorphization in calling crates.
+struct ConcatStreamsHelper {
+ streams: Vec<bridge::client::TokenStream>,
+}
+
+impl ConcatStreamsHelper {
+ fn new(capacity: usize) -> Self {
+ ConcatStreamsHelper { streams: Vec::with_capacity(capacity) }
+ }
+
+ fn push(&mut self, stream: TokenStream) {
+ if let Some(stream) = stream.0 {
+ self.streams.push(stream);
+ }
+ }
+
+ fn build(mut self) -> TokenStream {
+ if self.streams.len() <= 1 {
+ TokenStream(self.streams.pop())
+ } else {
+ TokenStream(Some(bridge::client::TokenStream::concat_streams(None, self.streams)))
+ }
+ }
+
+ fn append_to(mut self, stream: &mut TokenStream) {
+ if self.streams.is_empty() {
+ return;
+ }
+ let base = stream.0.take();
+ if base.is_none() && self.streams.len() == 1 {
+ stream.0 = self.streams.pop();
+ } else {
+ stream.0 = Some(bridge::client::TokenStream::concat_streams(base, self.streams));
+ }
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl iter::FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl iter::FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let iter = streams.into_iter();
+ let mut builder = ConcatStreamsHelper::new(iter.size_hint().0);
+ iter.for_each(|stream| builder.push(stream));
+ builder.build()
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ // FIXME(eddyb) Use an optimized implementation if/when possible.
+ *self = iter::once(mem::replace(self, Self::new())).chain(streams).collect();
+ }
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use super::{bridge, Group, Ident, Literal, Punct, TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ #[derive(Clone)]
+ pub struct IntoIter(
+ std::vec::IntoIter<
+ bridge::TokenTree<
+ bridge::client::Group,
+ bridge::client::Punct,
+ bridge::client::Ident,
+ bridge::client::Literal,
+ >,
+ >,
+ );
+
+ impl Iterator for IntoIter {
+ type Item = TokenTree;
+
+ fn next(&mut self) -> Option<TokenTree> {
+ self.0.next().map(|tree| match tree {
+ bridge::TokenTree::Group(tt) => TokenTree::Group(Group(tt)),
+ bridge::TokenTree::Punct(tt) => TokenTree::Punct(Punct(tt)),
+ bridge::TokenTree::Ident(tt) => TokenTree::Ident(Ident(tt)),
+ bridge::TokenTree::Literal(tt) => TokenTree::Literal(Literal(tt)),
+ })
+ }
+ }
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = IntoIter;
+
+ fn into_iter(self) -> IntoIter {
+ IntoIter(self.0.map(|v| v.into_trees()).unwrap_or_default().into_iter())
+ }
+ }
+}
+
+#[doc(hidden)]
+mod quote;
+
+/// A region of source code, along with macro expansion information.
+#[derive(Copy, Clone)]
+pub struct Span(bridge::client::Span);
+
+macro_rules! diagnostic_method {
+ ($name:ident, $level:expr) => {
+ /// Creates a new `Diagnostic` with the given `message` at the span
+ /// `self`.
+ pub fn $name<T: Into<String>>(self, message: T) -> Diagnostic {
+ Diagnostic::spanned(self, $level, message)
+ }
+ };
+}
+
+impl Span {
+ /// A span that resolves at the macro definition site.
+ pub fn def_site() -> Span {
+ Span(bridge::client::Span::def_site())
+ }
+
+ /// The span of the invocation of the current procedural macro.
+ /// Identifiers created with this span will be resolved as if they were written
+ /// directly at the macro call location (call-site hygiene) and other code
+ /// at the macro call site will be able to refer to them as well.
+ pub fn call_site() -> Span {
+ Span(bridge::client::Span::call_site())
+ }
+
+ /// A span that represents `macro_rules` hygiene, and sometimes resolves at the macro
+ /// definition site (local variables, labels, `$crate`) and sometimes at the macro
+ /// call site (everything else).
+ /// The span location is taken from the call-site.
+ pub fn mixed_site() -> Span {
+ Span(bridge::client::Span::mixed_site())
+ }
+
+ /// The original source file into which this span points.
+ pub fn source_file(&self) -> SourceFile {
+ SourceFile(self.0.source_file())
+ }
+
+ /// The `Span` for the tokens in the previous macro expansion from which
+ /// `self` was generated from, if any.
+ pub fn parent(&self) -> Option<Span> {
+ self.0.parent().map(Span)
+ }
+
+ /// The span for the origin source code that `self` was generated from. If
+ /// this `Span` wasn't generated from other macro expansions then the return
+ /// value is the same as `*self`.
+ pub fn source(&self) -> Span {
+ Span(self.0.source())
+ }
+
+ /// Gets the starting line/column in the source file for this span.
+ pub fn start(&self) -> LineColumn {
+ self.0.start().add_1_to_column()
+ }
+
+ /// Gets the ending line/column in the source file for this span.
+ pub fn end(&self) -> LineColumn {
+ self.0.end().add_1_to_column()
+ }
+
+ /// Creates an empty span pointing to directly before this span.
+ pub fn before(&self) -> Span {
+ Span(self.0.before())
+ }
+
+ /// Creates an empty span pointing to directly after this span.
+ pub fn after(&self) -> Span {
+ Span(self.0.after())
+ }
+
+ /// Creates a new span encompassing `self` and `other`.
+ ///
+ /// Returns `None` if `self` and `other` are from different files.
+ pub fn join(&self, other: Span) -> Option<Span> {
+ self.0.join(other.0).map(Span)
+ }
+
+ /// Creates a new span with the same line/column information as `self` but
+ /// that resolves symbols as though it were at `other`.
+ pub fn resolved_at(&self, other: Span) -> Span {
+ Span(self.0.resolved_at(other.0))
+ }
+
+ /// Creates a new span with the same name resolution behavior as `self` but
+ /// with the line/column information of `other`.
+ pub fn located_at(&self, other: Span) -> Span {
+ other.resolved_at(*self)
+ }
+
+ /// Compares to spans to see if they're equal.
+ pub fn eq(&self, other: &Span) -> bool {
+ self.0 == other.0
+ }
+
+ /// Returns the source text behind a span. This preserves the original source
+ /// code, including spaces and comments. It only returns a result if the span
+ /// corresponds to real source code.
+ ///
+ /// Note: The observable result of a macro should only rely on the tokens and
+ /// not on this source text. The result of this function is a best effort to
+ /// be used for diagnostics only.
+ pub fn source_text(&self) -> Option<String> {
+ self.0.source_text()
+ }
+
+ // Used by the implementation of `Span::quote`
+ #[doc(hidden)]
+ pub fn save_span(&self) -> usize {
+ self.0.save_span()
+ }
+
+ // Used by the implementation of `Span::quote`
+ #[doc(hidden)]
+ pub fn recover_proc_macro_span(id: usize) -> Span {
+ Span(bridge::client::Span::recover_proc_macro_span(id))
+ }
+
+ diagnostic_method!(error, Level::Error);
+ diagnostic_method!(warning, Level::Warning);
+ diagnostic_method!(note, Level::Note);
+ diagnostic_method!(help, Level::Help);
+}
+
+/// Prints a span in a form convenient for debugging.
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// A line-column pair representing the start or end of a `Span`.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct LineColumn {
+ /// The 1-indexed line in the source file on which the span starts or ends (inclusive).
+ pub line: usize,
+ /// The 1-indexed column (number of bytes in UTF-8 encoding) in the source
+ /// file on which the span starts or ends (inclusive).
+ pub column: usize,
+}
+
+impl LineColumn {
+ fn add_1_to_column(self) -> Self {
+ LineColumn { line: self.line, column: self.column + 1 }
+ }
+}
+
+impl Ord for LineColumn {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.line.cmp(&other.line).then(self.column.cmp(&other.column))
+ }
+}
+
+impl PartialOrd for LineColumn {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+/// The source file of a given `Span`.
+#[derive(Clone)]
+pub struct SourceFile(bridge::client::SourceFile);
+
+impl SourceFile {
+ /// Gets the path to this source file.
+ ///
+ /// ### Note
+ /// If the code span associated with this `SourceFile` was generated by an external macro, this
+ /// macro, this might not be an actual path on the filesystem. Use [`is_real`] to check.
+ ///
+ /// Also note that even if `is_real` returns `true`, if `--remap-path-prefix` was passed on
+ /// the command line, the path as given might not actually be valid.
+ ///
+ /// [`is_real`]: Self::is_real
+ pub fn path(&self) -> PathBuf {
+ PathBuf::from(self.0.path())
+ }
+
+ /// Returns `true` if this source file is a real source file, and not generated by an external
+ /// macro's expansion.
+ pub fn is_real(&self) -> bool {
+ // This is a hack until intercrate spans are implemented and we can have real source files
+ // for spans generated in external macros.
+ // https://github.com/rust-lang/rust/pull/43604#issuecomment-333334368
+ self.0.is_real()
+ }
+}
+
+impl fmt::Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("SourceFile")
+ .field("path", &self.path())
+ .field("is_real", &self.is_real())
+ .finish()
+ }
+}
+
+impl PartialEq for SourceFile {
+ fn eq(&self, other: &Self) -> bool {
+ self.0.eq(&other.0)
+ }
+}
+
+impl Eq for SourceFile {}
+
+/// A single token or a delimited sequence of token trees (e.g., `[1, (), ..]`).
+#[derive(Clone)]
+pub enum TokenTree {
+ /// A token stream surrounded by bracket delimiters.
+ Group(Group),
+ /// An identifier.
+ Ident(Ident),
+ /// A single punctuation character (`+`, `,`, `$`, etc.).
+ Punct(Punct),
+ /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
+ Literal(Literal),
+}
+
+impl TokenTree {
+ /// Returns the span of this tree, delegating to the `span` method of
+ /// the contained token or a delimited stream.
+ pub fn span(&self) -> Span {
+ match *self {
+ TokenTree::Group(ref t) => t.span(),
+ TokenTree::Ident(ref t) => t.span(),
+ TokenTree::Punct(ref t) => t.span(),
+ TokenTree::Literal(ref t) => t.span(),
+ }
+ }
+
+ /// Configures the span for *only this token*.
+ ///
+ /// Note that if this token is a `Group` then this method will not configure
+ /// the span of each of the internal tokens, this will simply delegate to
+ /// the `set_span` method of each variant.
+ pub fn set_span(&mut self, span: Span) {
+ match *self {
+ TokenTree::Group(ref mut t) => t.set_span(span),
+ TokenTree::Ident(ref mut t) => t.set_span(span),
+ TokenTree::Punct(ref mut t) => t.set_span(span),
+ TokenTree::Literal(ref mut t) => t.set_span(span),
+ }
+ }
+}
+
+/// Prints token tree in a form convenient for debugging.
+impl fmt::Debug for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // Each of these has the name in the struct type in the derived debug,
+ // so don't bother with an extra layer of indirection
+ match *self {
+ TokenTree::Group(ref tt) => tt.fmt(f),
+ TokenTree::Ident(ref tt) => tt.fmt(f),
+ TokenTree::Punct(ref tt) => tt.fmt(f),
+ TokenTree::Literal(ref tt) => tt.fmt(f),
+ }
+ }
+}
+
+impl From<Group> for TokenTree {
+ fn from(g: Group) -> TokenTree {
+ TokenTree::Group(g)
+ }
+}
+
+impl From<Ident> for TokenTree {
+ fn from(g: Ident) -> TokenTree {
+ TokenTree::Ident(g)
+ }
+}
+
+impl From<Punct> for TokenTree {
+ fn from(g: Punct) -> TokenTree {
+ TokenTree::Punct(g)
+ }
+}
+
+impl From<Literal> for TokenTree {
+ fn from(g: Literal) -> TokenTree {
+ TokenTree::Literal(g)
+ }
+}
+
+/// Prints the token tree as a string that is supposed to be losslessly convertible back
+/// into the same token tree (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters and negative numeric literals.
+impl fmt::Display for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+/// A delimited token stream.
+///
+/// A `Group` internally contains a `TokenStream` which is surrounded by `Delimiter`s.
+#[derive(Clone)]
+pub struct Group(bridge::client::Group);
+
+/// Describes how a sequence of token trees is delimited.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Delimiter {
+ /// `( ... )`
+ Parenthesis,
+ /// `{ ... }`
+ Brace,
+ /// `[ ... ]`
+ Bracket,
+ /// `Ø ... Ø`
+ /// An invisible delimiter, that may, for example, appear around tokens coming from a
+ /// "macro variable" `$var`. It is important to preserve operator priorities in cases like
+ /// `$var * 3` where `$var` is `1 + 2`.
+ /// Invisible delimiters might not survive roundtrip of a token stream through a string.
+ None,
+}
+
+impl Group {
+ /// Creates a new `Group` with the given delimiter and token stream.
+ ///
+ /// This constructor will set the span for this group to
+ /// `Span::call_site()`. To change the span you can use the `set_span`
+ /// method below.
+ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+ Group(bridge::client::Group::new(delimiter, stream.0))
+ }
+
+ /// Returns the delimiter of this `Group`
+ pub fn delimiter(&self) -> Delimiter {
+ self.0.delimiter()
+ }
+
+ /// Returns the `TokenStream` of tokens that are delimited in this `Group`.
+ ///
+ /// Note that the returned token stream does not include the delimiter
+ /// returned above.
+ pub fn stream(&self) -> TokenStream {
+ TokenStream(Some(self.0.stream()))
+ }
+
+ /// Returns the span for the delimiters of this token stream, spanning the
+ /// entire `Group`.
+ ///
+ /// ```text
+ /// pub fn span(&self) -> Span {
+ /// ^^^^^^^
+ /// ```
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Returns the span pointing to the opening delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_open(&self) -> Span {
+ /// ^
+ /// ```
+ pub fn span_open(&self) -> Span {
+ Span(self.0.span_open())
+ }
+
+ /// Returns the span pointing to the closing delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_close(&self) -> Span {
+ /// ^
+ /// ```
+ pub fn span_close(&self) -> Span {
+ Span(self.0.span_close())
+ }
+
+ /// Configures the span for this `Group`'s delimiters, but not its internal
+ /// tokens.
+ ///
+ /// This method will **not** set the span of all the internal tokens spanned
+ /// by this group, but rather it will only set the span of the delimiter
+ /// tokens at the level of the `Group`.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.set_span(span.0);
+ }
+}
+
+/// Prints the group as a string that should be losslessly convertible back
+/// into the same group (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters.
+impl fmt::Display for Group {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Group {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Group")
+ .field("delimiter", &self.delimiter())
+ .field("stream", &self.stream())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+/// A `Punct` is a single punctuation character such as `+`, `-` or `#`.
+///
+/// Multi-character operators like `+=` are represented as two instances of `Punct` with different
+/// forms of `Spacing` returned.
+#[derive(Clone)]
+pub struct Punct(bridge::client::Punct);
+
+/// Describes whether a `Punct` is followed immediately by another `Punct` ([`Spacing::Joint`]) or
+/// by a different token or whitespace ([`Spacing::Alone`]).
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Spacing {
+ /// A `Punct` is not immediately followed by another `Punct`.
+ /// E.g. `+` is `Alone` in `+ =`, `+ident` and `+()`.
+ Alone,
+ /// A `Punct` is immediately followed by another `Punct`.
+ /// E.g. `+` is `Joint` in `+=` and `++`.
+ ///
+ /// Additionally, single quote `'` can join with identifiers to form lifetimes: `'ident`.
+ Joint,
+}
+
+impl Punct {
+ /// Creates a new `Punct` from the given character and spacing.
+ /// The `ch` argument must be a valid punctuation character permitted by the language,
+ /// otherwise the function will panic.
+ ///
+ /// The returned `Punct` will have the default span of `Span::call_site()`
+ /// which can be further configured with the `set_span` method below.
+ pub fn new(ch: char, spacing: Spacing) -> Punct {
+ Punct(bridge::client::Punct::new(ch, spacing))
+ }
+
+ /// Returns the value of this punctuation character as `char`.
+ pub fn as_char(&self) -> char {
+ self.0.as_char()
+ }
+
+ /// Returns the spacing of this punctuation character, indicating whether it's immediately
+ /// followed by another `Punct` in the token stream, so they can potentially be combined into
+ /// a multi-character operator (`Joint`), or it's followed by some other token or whitespace
+ /// (`Alone`) so the operator has certainly ended.
+ pub fn spacing(&self) -> Spacing {
+ self.0.spacing()
+ }
+
+ /// Returns the span for this punctuation character.
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configure the span for this punctuation character.
+ pub fn set_span(&mut self, span: Span) {
+ self.0 = self.0.with_span(span.0);
+ }
+}
+
+/// Prints the punctuation character as a string that should be losslessly convertible
+/// back into the same character.
+impl fmt::Display for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Punct")
+ .field("ch", &self.as_char())
+ .field("spacing", &self.spacing())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+impl PartialEq<char> for Punct {
+ fn eq(&self, rhs: &char) -> bool {
+ self.as_char() == *rhs
+ }
+}
+
+impl PartialEq<Punct> for char {
+ fn eq(&self, rhs: &Punct) -> bool {
+ *self == rhs.as_char()
+ }
+}
+
+/// An identifier (`ident`).
+#[derive(Clone)]
+pub struct Ident(bridge::client::Ident);
+
+impl Ident {
+ /// Creates a new `Ident` with the given `string` as well as the specified
+ /// `span`.
+ /// The `string` argument must be a valid identifier permitted by the
+ /// language (including keywords, e.g. `self` or `fn`). Otherwise, the function will panic.
+ ///
+ /// Note that `span`, currently in rustc, configures the hygiene information
+ /// for this identifier.
+ ///
+ /// As of this time `Span::call_site()` explicitly opts-in to "call-site" hygiene
+ /// meaning that identifiers created with this span will be resolved as if they were written
+ /// directly at the location of the macro call, and other code at the macro call site will be
+ /// able to refer to them as well.
+ ///
+ /// Later spans like `Span::def_site()` will allow to opt-in to "definition-site" hygiene
+ /// meaning that identifiers created with this span will be resolved at the location of the
+ /// macro definition and other code at the macro call site will not be able to refer to them.
+ ///
+ /// Due to the current importance of hygiene this constructor, unlike other
+ /// tokens, requires a `Span` to be specified at construction.
+ pub fn new(string: &str, span: Span) -> Ident {
+ Ident(bridge::client::Ident::new(string, span.0, false))
+ }
+
+ /// Same as `Ident::new`, but creates a raw identifier (`r#ident`).
+ /// The `string` argument be a valid identifier permitted by the language
+ /// (including keywords, e.g. `fn`). Keywords which are usable in path segments
+ /// (e.g. `self`, `super`) are not supported, and will cause a panic.
+ pub fn new_raw(string: &str, span: Span) -> Ident {
+ Ident(bridge::client::Ident::new(string, span.0, true))
+ }
+
+ /// Returns the span of this `Ident`, encompassing the entire string returned
+ /// by [`to_string`](Self::to_string).
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configures the span of this `Ident`, possibly changing its hygiene context.
+ pub fn set_span(&mut self, span: Span) {
+ self.0 = self.0.with_span(span.0);
+ }
+}
+
+/// Prints the identifier as a string that should be losslessly convertible
+/// back into the same identifier.
+impl fmt::Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Ident")
+ .field("ident", &self.to_string())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+/// A literal string (`"hello"`), byte string (`b"hello"`),
+/// character (`'a'`), byte character (`b'a'`), an integer or floating point number
+/// with or without a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
+/// Boolean literals like `true` and `false` do not belong here, they are `Ident`s.
+#[derive(Clone)]
+pub struct Literal(bridge::client::Literal);
+
+macro_rules! suffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new suffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1u32` where the integer
+ /// value specified is the first part of the token and the integral is
+ /// also suffixed at the end.
+ /// Literals created from negative numbers might not survive round-trips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ pub fn $name(n: $kind) -> Literal {
+ Literal(bridge::client::Literal::typed_integer(&n.to_string(), stringify!($kind)))
+ }
+ )*)
+}
+
+macro_rules! unsuffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new unsuffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1` where the integer
+ /// value specified is the first part of the token. No suffix is
+ /// specified on this token, meaning that invocations like
+ /// `Literal::i8_unsuffixed(1)` are equivalent to
+ /// `Literal::u32_unsuffixed(1)`.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ pub fn $name(n: $kind) -> Literal {
+ Literal(bridge::client::Literal::integer(&n.to_string()))
+ }
+ )*)
+}
+
+impl Literal {
+ suffixed_int_literals! {
+ u8_suffixed => u8,
+ u16_suffixed => u16,
+ u32_suffixed => u32,
+ u64_suffixed => u64,
+ u128_suffixed => u128,
+ usize_suffixed => usize,
+ i8_suffixed => i8,
+ i16_suffixed => i16,
+ i32_suffixed => i32,
+ i64_suffixed => i64,
+ i128_suffixed => i128,
+ isize_suffixed => isize,
+ }
+
+ unsuffixed_int_literals! {
+ u8_unsuffixed => u8,
+ u16_unsuffixed => u16,
+ u32_unsuffixed => u32,
+ u64_unsuffixed => u64,
+ u128_unsuffixed => u128,
+ usize_unsuffixed => usize,
+ i8_unsuffixed => i8,
+ i16_unsuffixed => i16,
+ i32_unsuffixed => i32,
+ i64_unsuffixed => i64,
+ i128_unsuffixed => i128,
+ isize_unsuffixed => isize,
+ }
+
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f32_unsuffixed(n: f32) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ let mut repr = n.to_string();
+ if !repr.contains('.') {
+ repr.push_str(".0");
+ }
+ Literal(bridge::client::Literal::float(&repr))
+ }
+
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This constructor will create a literal like `1.0f32` where the value
+ /// specified is the preceding part of the token and `f32` is the suffix of
+ /// the token. This token will always be inferred to be an `f32` in the
+ /// compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f32_suffixed(n: f32) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ Literal(bridge::client::Literal::f32(&n.to_string()))
+ }
+
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f64_unsuffixed(n: f64) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ let mut repr = n.to_string();
+ if !repr.contains('.') {
+ repr.push_str(".0");
+ }
+ Literal(bridge::client::Literal::float(&repr))
+ }
+
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This constructor will create a literal like `1.0f64` where the value
+ /// specified is the preceding part of the token and `f64` is the suffix of
+ /// the token. This token will always be inferred to be an `f64` in the
+ /// compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f64_suffixed(n: f64) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ Literal(bridge::client::Literal::f64(&n.to_string()))
+ }
+
+ /// String literal.
+ pub fn string(string: &str) -> Literal {
+ Literal(bridge::client::Literal::string(string))
+ }
+
+ /// Character literal.
+ pub fn character(ch: char) -> Literal {
+ Literal(bridge::client::Literal::character(ch))
+ }
+
+ /// Byte string literal.
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+ Literal(bridge::client::Literal::byte_string(bytes))
+ }
+
+ /// Returns the span encompassing this literal.
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configures the span associated for this literal.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.set_span(span.0);
+ }
+
+ /// Returns a `Span` that is a subset of `self.span()` containing only the
+ /// source bytes in range `range`. Returns `None` if the would-be trimmed
+ /// span is outside the bounds of `self`.
+ // FIXME(SergioBenitez): check that the byte range starts and ends at a
+ // UTF-8 boundary of the source. otherwise, it's likely that a panic will
+ // occur elsewhere when the source text is printed.
+ // FIXME(SergioBenitez): there is no way for the user to know what
+ // `self.span()` actually maps to, so this method can currently only be
+ // called blindly. For example, `to_string()` for the character 'c' returns
+ // "'\u{63}'"; there is no way for the user to know whether the source text
+ // was 'c' or whether it was '\u{63}'.
+ pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
+ self.0.subspan(range.start_bound().cloned(), range.end_bound().cloned()).map(Span)
+ }
+}
+
+/// Parse a single literal from its stringified representation.
+///
+/// In order to parse successfully, the input string must not contain anything
+/// but the literal token. Specifically, it must not contain whitespace or
+/// comments in addition to the literal.
+///
+/// The resulting literal token will have a `Span::call_site()` span.
+///
+/// NOTE: some errors may cause panics instead of returning `LexError`. We
+/// reserve the right to change these errors into `LexError`s later.
+impl FromStr for Literal {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<Self, LexError> {
+ match bridge::client::Literal::from_str(src) {
+ Ok(literal) => Ok(Literal(literal)),
+ Err(()) => Err(LexError),
+ }
+ }
+}
+
+/// Prints the literal as a string that should be losslessly convertible
+/// back into the same literal (except for possible rounding for floating point literals).
+impl fmt::Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// Tracked access to environment variables.
+pub mod tracked_env {
+ use std::env::{self, VarError};
+ use std::ffi::OsStr;
+
+ /// Retrieve an environment variable and add it to build dependency info.
+ /// Build system executing the compiler will know that the variable was accessed during
+ /// compilation, and will be able to rerun the build when the value of that variable changes.
+ /// Besides the dependency tracking this function should be equivalent to `env::var` from the
+ /// standard library, except that the argument must be UTF-8.
+ pub fn var<K: AsRef<OsStr> + AsRef<str>>(key: K) -> Result<String, VarError> {
+ let key: &str = key.as_ref();
+ let value = env::var(key);
+ super::bridge::client::FreeFunctions::track_env_var(key, value.as_deref().ok());
+ value
+ }
+}
+
+/// Tracked access to additional files.
+pub mod tracked_path {
+
+ /// Track a file explicitly.
+ ///
+ /// Commonly used for tracking asset preprocessing.
+ pub fn path<P: AsRef<str>>(path: P) {
+ let path: &str = path.as_ref();
+ super::bridge::client::FreeFunctions::track_path(path);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/quote.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/quote.rs
new file mode 100644
index 000000000..39309faa4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/quote.rs
@@ -0,0 +1,139 @@
+//! # Quasiquoter
+//! This file contains the implementation internals of the quasiquoter provided by `quote!`.
+
+//! This quasiquoter uses macros 2.0 hygiene to reliably access
+//! items from `proc_macro`, to build a `proc_macro::TokenStream`.
+
+use super::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+
+macro_rules! quote_tt {
+ (($($t:tt)*)) => { Group::new(Delimiter::Parenthesis, quote!($($t)*)) };
+ ([$($t:tt)*]) => { Group::new(Delimiter::Bracket, quote!($($t)*)) };
+ ({$($t:tt)*}) => { Group::new(Delimiter::Brace, quote!($($t)*)) };
+ (,) => { Punct::new(',', Spacing::Alone) };
+ (.) => { Punct::new('.', Spacing::Alone) };
+ (;) => { Punct::new(';', Spacing::Alone) };
+ (!) => { Punct::new('!', Spacing::Alone) };
+ (<) => { Punct::new('<', Spacing::Alone) };
+ (>) => { Punct::new('>', Spacing::Alone) };
+ (&) => { Punct::new('&', Spacing::Alone) };
+ (=) => { Punct::new('=', Spacing::Alone) };
+ ($i:ident) => { Ident::new(stringify!($i), Span::def_site()) };
+}
+
+macro_rules! quote_ts {
+ ((@ $($t:tt)*)) => { $($t)* };
+ (::) => {
+ [
+ TokenTree::from(Punct::new(':', Spacing::Joint)),
+ TokenTree::from(Punct::new(':', Spacing::Alone)),
+ ].iter()
+ .cloned()
+ .map(|mut x| {
+ x.set_span(Span::def_site());
+ x
+ })
+ .collect::<TokenStream>()
+ };
+ ($t:tt) => { TokenTree::from(quote_tt!($t)) };
+}
+
+/// Simpler version of the real `quote!` macro, implemented solely
+/// through `macro_rules`, for bootstrapping the real implementation
+/// (see the `quote` function), which does not have access to the
+/// real `quote!` macro due to the `proc_macro` crate not being
+/// able to depend on itself.
+///
+/// Note: supported tokens are a subset of the real `quote!`, but
+/// unquoting is different: instead of `$x`, this uses `(@ expr)`.
+macro_rules! quote {
+ () => { TokenStream::new() };
+ ($($t:tt)*) => {
+ [
+ $(TokenStream::from(quote_ts!($t)),)*
+ ].iter().cloned().collect::<TokenStream>()
+ };
+}
+
+/// Quote a `TokenStream` into a `TokenStream`.
+/// This is the actual implementation of the `quote!()` proc macro.
+///
+/// It is loaded by the compiler in `register_builtin_macros`.
+pub fn quote(stream: TokenStream) -> TokenStream {
+ if stream.is_empty() {
+ return quote!(super::TokenStream::new());
+ }
+ let proc_macro_crate = quote!(crate);
+ let mut after_dollar = false;
+ let tokens = stream
+ .into_iter()
+ .filter_map(|tree| {
+ if after_dollar {
+ after_dollar = false;
+ match tree {
+ TokenTree::Ident(_) => {
+ return Some(quote!(Into::<super::TokenStream>::into(
+ Clone::clone(&(@ tree))),));
+ }
+ TokenTree::Punct(ref tt) if tt.as_char() == '$' => {}
+ _ => panic!("`$` must be followed by an ident or `$` in `quote!`"),
+ }
+ } else if let TokenTree::Punct(ref tt) = tree {
+ if tt.as_char() == '$' {
+ after_dollar = true;
+ return None;
+ }
+ }
+
+ Some(quote!(super::TokenStream::from((@ match tree {
+ TokenTree::Punct(tt) => quote!(super::TokenTree::Punct(super::Punct::new(
+ (@ TokenTree::from(Literal::character(tt.as_char()))),
+ (@ match tt.spacing() {
+ Spacing::Alone => quote!(super::Spacing::Alone),
+ Spacing::Joint => quote!(super::Spacing::Joint),
+ }),
+ ))),
+ TokenTree::Group(tt) => quote!(super::TokenTree::Group(super::Group::new(
+ (@ match tt.delimiter() {
+ Delimiter::Parenthesis => quote!(super::Delimiter::Parenthesis),
+ Delimiter::Brace => quote!(super::Delimiter::Brace),
+ Delimiter::Bracket => quote!(super::Delimiter::Bracket),
+ Delimiter::None => quote!(super::Delimiter::None),
+ }),
+ (@ quote(tt.stream())),
+ ))),
+ TokenTree::Ident(tt) => quote!(super::TokenTree::Ident(super::Ident::new(
+ (@ TokenTree::from(Literal::string(&tt.to_string()))),
+ (@ quote_span(proc_macro_crate.clone(), tt.span())),
+ ))),
+ TokenTree::Literal(tt) => quote!(super::TokenTree::Literal({
+ let mut iter = (@ TokenTree::from(Literal::string(&tt.to_string())))
+ .parse::<super::TokenStream>()
+ .unwrap()
+ .into_iter();
+ if let (Some(super::TokenTree::Literal(mut lit)), None) =
+ (iter.next(), iter.next())
+ {
+ lit.set_span((@ quote_span(proc_macro_crate.clone(), tt.span())));
+ lit
+ } else {
+ unreachable!()
+ }
+ }))
+ })),))
+ })
+ .collect::<TokenStream>();
+
+ if after_dollar {
+ panic!("unexpected trailing `$` in `quote!`");
+ }
+
+ quote!([(@ tokens)].iter().cloned().collect::<super::TokenStream>())
+}
+
+/// Quote a `Span` into a `TokenStream`.
+/// This is needed to implement a custom quoter.
+pub fn quote_span(proc_macro_crate: TokenStream, span: Span) -> TokenStream {
+ let id = span.save_span();
+ quote!((@ proc_macro_crate ) ::Span::recover_proc_macro_span((@ TokenTree::from(Literal::usize_unsuffixed(id)))))
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs
new file mode 100644
index 000000000..05a565fbf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs
@@ -0,0 +1,834 @@
+//! Rustc proc-macro server implementation with tt
+//!
+//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
+//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
+//! we could provide any TokenStream implementation.
+//! The original idea from fedochet is using proc-macro2 as backend,
+//! we use tt instead for better integration with RA.
+//!
+//! FIXME: No span and source file information is implemented yet
+
+use super::proc_macro::bridge::{self, server};
+
+use std::collections::HashMap;
+use std::hash::Hash;
+use std::iter::FromIterator;
+use std::ops::Bound;
+use std::{ascii, vec::IntoIter};
+
+type Group = tt::Subtree;
+type TokenTree = tt::TokenTree;
+type Punct = tt::Punct;
+type Spacing = tt::Spacing;
+type Literal = tt::Literal;
+type Span = tt::TokenId;
+
+#[derive(Debug, Default, Clone)]
+pub struct TokenStream {
+ pub token_trees: Vec<TokenTree>,
+}
+
+impl TokenStream {
+ pub fn new() -> Self {
+ TokenStream::default()
+ }
+
+ pub fn with_subtree(subtree: tt::Subtree) -> Self {
+ if subtree.delimiter.is_some() {
+ TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
+ } else {
+ TokenStream { token_trees: subtree.token_trees }
+ }
+ }
+
+ pub fn into_subtree(self) -> tt::Subtree {
+ tt::Subtree { delimiter: None, token_trees: self.token_trees }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.token_trees.is_empty()
+ }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream { token_trees: vec![tree] }
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut builder = TokenStreamBuilder::new();
+ streams.into_iter().for_each(|stream| builder.push(stream));
+ builder.build()
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ for item in streams {
+ for tkn in item {
+ match tkn {
+ tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
+ self.token_trees.extend(subtree.token_trees);
+ }
+ _ => {
+ self.token_trees.push(tkn);
+ }
+ }
+ }
+ }
+ }
+}
+
+#[derive(Clone)]
+pub struct SourceFile {
+ // FIXME stub
+}
+
+type Level = super::proc_macro::Level;
+type LineColumn = super::proc_macro::LineColumn;
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+}
+
+// Rustc Server Ident has to be `Copyable`
+// We use a stub here for bypassing
+#[derive(Hash, Eq, PartialEq, Copy, Clone)]
+pub struct IdentId(u32);
+
+#[derive(Clone, Hash, Eq, PartialEq)]
+struct IdentData(tt::Ident);
+
+#[derive(Default)]
+struct IdentInterner {
+ idents: HashMap<IdentData, u32>,
+ ident_data: Vec<IdentData>,
+}
+
+impl IdentInterner {
+ fn intern(&mut self, data: &IdentData) -> u32 {
+ if let Some(index) = self.idents.get(data) {
+ return *index;
+ }
+
+ let index = self.idents.len() as u32;
+ self.ident_data.push(data.clone());
+ self.idents.insert(data.clone(), index);
+ index
+ }
+
+ fn get(&self, index: u32) -> &IdentData {
+ &self.ident_data[index as usize]
+ }
+
+ #[allow(unused)]
+ fn get_mut(&mut self, index: u32) -> &mut IdentData {
+ self.ident_data.get_mut(index as usize).expect("Should be consistent")
+ }
+}
+
+pub struct TokenStreamBuilder {
+ acc: TokenStream,
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use std::str::FromStr;
+
+ use super::{TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = super::IntoIter<TokenTree>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.token_trees.into_iter()
+ }
+ }
+
+ type LexError = String;
+
+ /// Attempts to break the string into tokens and parse those tokens into a token stream.
+ /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+ /// or characters not existing in the language.
+ /// All tokens in the parsed stream get `Span::call_site()` spans.
+ ///
+ /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+ /// change these errors into `LexError`s later.
+ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ let (subtree, _token_map) =
+ mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
+
+ let subtree = subtree_replace_token_ids_with_unspecified(subtree);
+ Ok(TokenStream::with_subtree(subtree))
+ }
+ }
+
+ impl ToString for TokenStream {
+ fn to_string(&self) -> String {
+ tt::pretty(&self.token_trees)
+ }
+ }
+
+ fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
+ tt::Subtree {
+ delimiter: subtree
+ .delimiter
+ .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
+ token_trees: subtree
+ .token_trees
+ .into_iter()
+ .map(token_tree_replace_token_ids_with_unspecified)
+ .collect(),
+ }
+ }
+
+ fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
+ match tt {
+ tt::TokenTree::Leaf(leaf) => {
+ tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
+ }
+ tt::TokenTree::Subtree(subtree) => {
+ tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
+ }
+ }
+ }
+
+ fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
+ match leaf {
+ tt::Leaf::Literal(lit) => {
+ tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
+ }
+ tt::Leaf::Punct(punct) => {
+ tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
+ }
+ tt::Leaf::Ident(ident) => {
+ tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
+ }
+ }
+ }
+}
+
+impl TokenStreamBuilder {
+ fn new() -> TokenStreamBuilder {
+ TokenStreamBuilder { acc: TokenStream::new() }
+ }
+
+ fn push(&mut self, stream: TokenStream) {
+ self.acc.extend(stream.into_iter())
+ }
+
+ fn build(self) -> TokenStream {
+ self.acc
+ }
+}
+
+pub struct FreeFunctions;
+
+#[derive(Clone)]
+pub struct TokenStreamIter {
+ trees: IntoIter<TokenTree>,
+}
+
+#[derive(Default)]
+pub struct RustAnalyzer {
+ ident_interner: IdentInterner,
+ // FIXME: store span information here.
+}
+
+impl server::Types for RustAnalyzer {
+ type FreeFunctions = FreeFunctions;
+ type TokenStream = TokenStream;
+ type Group = Group;
+ type Punct = Punct;
+ type Ident = IdentId;
+ type Literal = Literal;
+ type SourceFile = SourceFile;
+ type Diagnostic = Diagnostic;
+ type Span = Span;
+ type MultiSpan = Vec<Span>;
+}
+
+impl server::FreeFunctions for RustAnalyzer {
+ fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
+ // FIXME: track env var accesses
+ // https://github.com/rust-lang/rust/pull/71858
+ }
+ fn track_path(&mut self, _path: &str) {}
+}
+
+impl server::TokenStream for RustAnalyzer {
+ fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+ stream.is_empty()
+ }
+ fn from_str(&mut self, src: &str) -> Self::TokenStream {
+ use std::str::FromStr;
+
+ Self::TokenStream::from_str(src).expect("cannot parse string")
+ }
+ fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+ stream.to_string()
+ }
+ fn from_token_tree(
+ &mut self,
+ tree: bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
+ ) -> Self::TokenStream {
+ match tree {
+ bridge::TokenTree::Group(group) => {
+ let tree = TokenTree::from(group);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Ident(IdentId(index)) => {
+ let IdentData(ident) = self.ident_interner.get(index).clone();
+ let ident: tt::Ident = ident;
+ let leaf = tt::Leaf::from(ident);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Literal(literal) => {
+ let leaf = tt::Leaf::from(literal);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Punct(p) => {
+ let leaf = tt::Leaf::from(p);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+ }
+ }
+
+ fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
+ Ok(self_.clone())
+ }
+
+ fn concat_trees(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ trees: Vec<bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for tree in trees {
+ builder.push(self.from_token_tree(tree));
+ }
+ builder.build()
+ }
+
+ fn concat_streams(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ streams: Vec<Self::TokenStream>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for stream in streams {
+ builder.push(stream);
+ }
+ builder.build()
+ }
+
+ fn into_trees(
+ &mut self,
+ stream: Self::TokenStream,
+ ) -> Vec<bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
+ stream
+ .into_iter()
+ .map(|tree| match tree {
+ tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+ bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident))))
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => bridge::TokenTree::Literal(lit),
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => bridge::TokenTree::Punct(punct),
+ tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(subtree),
+ })
+ .collect()
+ }
+}
+
+fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> {
+ let kind = match d {
+ bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
+ bridge::Delimiter::Brace => tt::DelimiterKind::Brace,
+ bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket,
+ bridge::Delimiter::None => return None,
+ };
+ Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
+}
+
+fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter {
+ match d.map(|it| it.kind) {
+ Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis,
+ Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace,
+ Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket,
+ None => bridge::Delimiter::None,
+ }
+}
+
+fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing {
+ match spacing {
+ bridge::Spacing::Alone => Spacing::Alone,
+ bridge::Spacing::Joint => Spacing::Joint,
+ }
+}
+
+fn spacing_to_external(spacing: Spacing) -> bridge::Spacing {
+ match spacing {
+ Spacing::Alone => bridge::Spacing::Alone,
+ Spacing::Joint => bridge::Spacing::Joint,
+ }
+}
+
+impl server::Group for RustAnalyzer {
+ fn new(
+ &mut self,
+ delimiter: bridge::Delimiter,
+ stream: Option<Self::TokenStream>,
+ ) -> Self::Group {
+ Self::Group {
+ delimiter: delim_to_internal(delimiter),
+ token_trees: stream.unwrap_or_default().token_trees,
+ }
+ }
+ fn delimiter(&mut self, group: &Self::Group) -> bridge::Delimiter {
+ delim_to_external(group.delimiter)
+ }
+
+ // NOTE: Return value of do not include delimiter
+ fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
+ TokenStream { token_trees: group.token_trees.clone() }
+ }
+
+ fn span(&mut self, group: &Self::Group) -> Self::Span {
+ group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+ }
+
+ fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
+ if let Some(delim) = &mut group.delimiter {
+ delim.id = span;
+ }
+ }
+
+ fn span_open(&mut self, group: &Self::Group) -> Self::Span {
+ // FIXME we only store one `TokenId` for the delimiters
+ group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+ }
+
+ fn span_close(&mut self, group: &Self::Group) -> Self::Span {
+ // FIXME we only store one `TokenId` for the delimiters
+ group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+ }
+}
+
+impl server::Punct for RustAnalyzer {
+ fn new(&mut self, ch: char, spacing: bridge::Spacing) -> Self::Punct {
+ tt::Punct {
+ char: ch,
+ spacing: spacing_to_internal(spacing),
+ id: tt::TokenId::unspecified(),
+ }
+ }
+ fn as_char(&mut self, punct: Self::Punct) -> char {
+ punct.char
+ }
+ fn spacing(&mut self, punct: Self::Punct) -> bridge::Spacing {
+ spacing_to_external(punct.spacing)
+ }
+ fn span(&mut self, punct: Self::Punct) -> Self::Span {
+ punct.id
+ }
+ fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
+ tt::Punct { id: span, ..punct }
+ }
+}
+
+impl server::Ident for RustAnalyzer {
+ fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident {
+ IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span })))
+ }
+
+ fn span(&mut self, ident: Self::Ident) -> Self::Span {
+ self.ident_interner.get(ident.0).0.id
+ }
+ fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
+ let data = self.ident_interner.get(ident.0);
+ let new = IdentData(tt::Ident { id: span, ..data.0.clone() });
+ IdentId(self.ident_interner.intern(&new))
+ }
+}
+
+impl server::Literal for RustAnalyzer {
+ fn debug_kind(&mut self, _literal: &Self::Literal) -> String {
+ // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these.
+ // They must still be present to be ABI-compatible and work with upstream proc_macro.
+ "".to_owned()
+ }
+ fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
+ Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() })
+ }
+ fn symbol(&mut self, literal: &Self::Literal) -> String {
+ literal.text.to_string()
+ }
+ fn suffix(&mut self, _literal: &Self::Literal) -> Option<String> {
+ None
+ }
+
+ fn to_string(&mut self, literal: &Self::Literal) -> String {
+ literal.to_string()
+ }
+
+ fn integer(&mut self, n: &str) -> Self::Literal {
+ let n = match n.parse::<i128>() {
+ Ok(n) => n.to_string(),
+ Err(_) => n.parse::<u128>().unwrap().to_string(),
+ };
+ Literal { text: n.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
+ macro_rules! def_suffixed_integer {
+ ($kind:ident, $($ty:ty),*) => {
+ match $kind {
+ $(
+ stringify!($ty) => {
+ let n: $ty = n.parse().unwrap();
+ format!(concat!("{}", stringify!($ty)), n)
+ }
+ )*
+ _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind),
+ }
+ }
+ }
+
+ let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize};
+
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn float(&mut self, n: &str) -> Self::Literal {
+ let n: f64 = n.parse().unwrap();
+ let mut text = f64::to_string(&n);
+ if !text.contains('.') {
+ text += ".0"
+ }
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn f32(&mut self, n: &str) -> Self::Literal {
+ let n: f32 = n.parse().unwrap();
+ let text = format!("{}f32", n);
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn f64(&mut self, n: &str) -> Self::Literal {
+ let n: f64 = n.parse().unwrap();
+ let text = format!("{}f64", n);
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn string(&mut self, string: &str) -> Self::Literal {
+ let mut escaped = String::new();
+ for ch in string.chars() {
+ escaped.extend(ch.escape_debug());
+ }
+ Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn character(&mut self, ch: char) -> Self::Literal {
+ Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
+ let string = bytes
+ .iter()
+ .cloned()
+ .flat_map(ascii::escape_default)
+ .map(Into::<char>::into)
+ .collect::<String>();
+
+ Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn span(&mut self, literal: &Self::Literal) -> Self::Span {
+ literal.id
+ }
+
+ fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
+ literal.id = span;
+ }
+
+ fn subspan(
+ &mut self,
+ _literal: &Self::Literal,
+ _start: Bound<usize>,
+ _end: Bound<usize>,
+ ) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+}
+
+impl server::SourceFile for RustAnalyzer {
+ // FIXME these are all stubs
+ fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
+ true
+ }
+ fn path(&mut self, _file: &Self::SourceFile) -> String {
+ String::new()
+ }
+ fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
+ true
+ }
+}
+
+impl server::Diagnostic for RustAnalyzer {
+ fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
+ let mut diag = Diagnostic::new(level, msg);
+ diag.spans = spans;
+ diag
+ }
+
+ fn sub(
+ &mut self,
+ _diag: &mut Self::Diagnostic,
+ _level: Level,
+ _msg: &str,
+ _spans: Self::MultiSpan,
+ ) {
+ // FIXME handle diagnostic
+ //
+ }
+
+ fn emit(&mut self, _diag: Self::Diagnostic) {
+ // FIXME handle diagnostic
+ // diag.emit()
+ }
+}
+
+impl server::Span for RustAnalyzer {
+ fn debug(&mut self, span: Self::Span) -> String {
+ format!("{:?}", span.0)
+ }
+ fn def_site(&mut self) -> Self::Span {
+ // MySpan(self.span_interner.intern(&MySpanData(Span::def_site())))
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+ fn call_site(&mut self) -> Self::Span {
+ // MySpan(self.span_interner.intern(&MySpanData(Span::call_site())))
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+ fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
+ SourceFile {}
+ }
+ fn save_span(&mut self, _span: Self::Span) -> usize {
+ // FIXME stub
+ 0
+ }
+ fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
+ // FIXME stub
+ tt::TokenId::unspecified()
+ }
+ /// Recent feature, not yet in the proc_macro
+ ///
+ /// See PR:
+ /// https://github.com/rust-lang/rust/pull/55780
+ fn source_text(&mut self, _span: Self::Span) -> Option<String> {
+ None
+ }
+
+ fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+ fn source(&mut self, span: Self::Span) -> Self::Span {
+ // FIXME handle span
+ span
+ }
+ fn start(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn end(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
+ // Just return the first span again, because some macros will unwrap the result.
+ Some(first)
+ }
+ fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+
+ fn mixed_site(&mut self) -> Self::Span {
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+
+ fn after(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+
+ fn before(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+}
+
+impl server::MultiSpan for RustAnalyzer {
+ fn new(&mut self) -> Self::MultiSpan {
+ // FIXME handle span
+ vec![]
+ }
+
+ fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
+ //TODP
+ other.push(span)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::super::proc_macro::bridge::server::Literal;
+ use super::*;
+
+ #[test]
+ fn test_ra_server_literals() {
+ let mut srv = RustAnalyzer { ident_interner: IdentInterner::default() };
+ assert_eq!(srv.integer("1234").text, "1234");
+
+ assert_eq!(srv.typed_integer("12", "u8").text, "12u8");
+ assert_eq!(srv.typed_integer("255", "u16").text, "255u16");
+ assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32");
+ assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64");
+ assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128");
+ assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize");
+ assert_eq!(srv.typed_integer("127", "i8").text, "127i8");
+ assert_eq!(srv.typed_integer("255", "i16").text, "255i16");
+ assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32");
+ assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64");
+ assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128");
+ assert_eq!(srv.float("0").text, "0.0");
+ assert_eq!(srv.float("15684.5867").text, "15684.5867");
+ assert_eq!(srv.f32("15684.58").text, "15684.58f32");
+ assert_eq!(srv.f64("15684.58").text, "15684.58f64");
+
+ assert_eq!(srv.string("hello_world").text, "\"hello_world\"");
+ assert_eq!(srv.character('c').text, "'c'");
+ assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\"");
+
+ // u128::max
+ assert_eq!(
+ srv.integer("340282366920938463463374607431768211455").text,
+ "340282366920938463463374607431768211455"
+ );
+ // i128::min
+ assert_eq!(
+ srv.integer("-170141183460469231731687303715884105728").text,
+ "-170141183460469231731687303715884105728"
+ );
+ }
+
+ #[test]
+ fn test_ra_server_to_string() {
+ let s = TokenStream {
+ token_trees: vec![
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "struct".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "T".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Brace,
+ }),
+ token_trees: vec![],
+ }),
+ ],
+ };
+
+ assert_eq!(s.to_string(), "struct T {}");
+ }
+
+ #[test]
+ fn test_ra_server_from_str() {
+ use std::str::FromStr;
+ let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Parenthesis,
+ }),
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "a".into(),
+ id: tt::TokenId::unspecified(),
+ }))],
+ });
+
+ let t1 = TokenStream::from_str("(a)").unwrap();
+ assert_eq!(t1.token_trees.len(), 1);
+ assert_eq!(t1.token_trees[0], subtree_paren_a);
+
+ let t2 = TokenStream::from_str("(a);").unwrap();
+ assert_eq!(t2.token_trees.len(), 2);
+ assert_eq!(t2.token_trees[0], subtree_paren_a);
+
+ let underscore = TokenStream::from_str("_").unwrap();
+ assert_eq!(
+ underscore.token_trees[0],
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "_".into(),
+ id: tt::TokenId::unspecified(),
+ }))
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/mod.rs
new file mode 100644
index 000000000..9d56f0eaf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/mod.rs
@@ -0,0 +1,105 @@
+//! Proc macro ABI.
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod proc_macro;
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod ra_server;
+
+use libloading::Library;
+use proc_macro_api::ProcMacroKind;
+
+use super::PanicMessage;
+
+pub use ra_server::TokenStream;
+
+pub(crate) struct Abi {
+ exported_macros: Vec<proc_macro::bridge::client::ProcMacro>,
+}
+
+impl From<proc_macro::bridge::PanicMessage> for PanicMessage {
+ fn from(p: proc_macro::bridge::PanicMessage) -> Self {
+ Self { message: p.as_str().map(|s| s.to_string()) }
+ }
+}
+
+impl Abi {
+ pub unsafe fn from_lib(lib: &Library, symbol_name: String) -> Result<Abi, libloading::Error> {
+ let macros: libloading::Symbol<'_, &&[proc_macro::bridge::client::ProcMacro]> =
+ lib.get(symbol_name.as_bytes())?;
+ Ok(Self { exported_macros: macros.to_vec() })
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, PanicMessage> {
+ let parsed_body = TokenStream::with_subtree(macro_body.clone());
+
+ let parsed_attributes =
+ attributes.map_or(TokenStream::new(), |attr| TokenStream::with_subtree(attr.clone()));
+
+ for proc_macro in &self.exported_macros {
+ match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive {
+ trait_name, client, ..
+ } if *trait_name == macro_name => {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_attributes,
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ _ => continue,
+ }
+ }
+
+ Err(proc_macro::bridge::PanicMessage::String("Nothing to expand".to_string()).into())
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ self.exported_macros
+ .iter()
+ .map(|proc_macro| match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
+ (trait_name.to_string(), ProcMacroKind::CustomDerive)
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, .. } => {
+ (name.to_string(), ProcMacroKind::FuncLike)
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, .. } => {
+ (name.to_string(), ProcMacroKind::Attr)
+ }
+ })
+ .collect()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/buffer.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/buffer.rs
new file mode 100644
index 000000000..48030f8d8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/buffer.rs
@@ -0,0 +1,156 @@
+//! Buffer management for same-process client<->server communication.
+
+use std::io::{self, Write};
+use std::mem;
+use std::ops::{Deref, DerefMut};
+use std::slice;
+
+#[repr(C)]
+pub struct Buffer {
+ data: *mut u8,
+ len: usize,
+ capacity: usize,
+ reserve: extern "C" fn(Buffer, usize) -> Buffer,
+ drop: extern "C" fn(Buffer),
+}
+
+unsafe impl Sync for Buffer {}
+unsafe impl Send for Buffer {}
+
+impl Default for Buffer {
+ #[inline]
+ fn default() -> Self {
+ Self::from(vec![])
+ }
+}
+
+impl Deref for Buffer {
+ type Target = [u8];
+ #[inline]
+ fn deref(&self) -> &[u8] {
+ unsafe { slice::from_raw_parts(self.data as *const u8, self.len) }
+ }
+}
+
+impl DerefMut for Buffer {
+ #[inline]
+ fn deref_mut(&mut self) -> &mut [u8] {
+ unsafe { slice::from_raw_parts_mut(self.data, self.len) }
+ }
+}
+
+impl Buffer {
+ #[inline]
+ pub(super) fn new() -> Self {
+ Self::default()
+ }
+
+ #[inline]
+ pub(super) fn clear(&mut self) {
+ self.len = 0;
+ }
+
+ #[inline]
+ pub(super) fn take(&mut self) -> Self {
+ mem::take(self)
+ }
+
+ // We have the array method separate from extending from a slice. This is
+ // because in the case of small arrays, codegen can be more efficient
+ // (avoiding a memmove call). With extend_from_slice, LLVM at least
+ // currently is not able to make that optimization.
+ #[inline]
+ pub(super) fn extend_from_array<const N: usize>(&mut self, xs: &[u8; N]) {
+ if xs.len() > (self.capacity - self.len) {
+ let b = self.take();
+ *self = (b.reserve)(b, xs.len());
+ }
+ unsafe {
+ xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len());
+ self.len += xs.len();
+ }
+ }
+
+ #[inline]
+ pub(super) fn extend_from_slice(&mut self, xs: &[u8]) {
+ if xs.len() > (self.capacity - self.len) {
+ let b = self.take();
+ *self = (b.reserve)(b, xs.len());
+ }
+ unsafe {
+ xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len());
+ self.len += xs.len();
+ }
+ }
+
+ #[inline]
+ pub(super) fn push(&mut self, v: u8) {
+ // The code here is taken from Vec::push, and we know that reserve()
+ // will panic if we're exceeding isize::MAX bytes and so there's no need
+ // to check for overflow.
+ if self.len == self.capacity {
+ let b = self.take();
+ *self = (b.reserve)(b, 1);
+ }
+ unsafe {
+ *self.data.add(self.len) = v;
+ self.len += 1;
+ }
+ }
+}
+
+impl Write for Buffer {
+ #[inline]
+ fn write(&mut self, xs: &[u8]) -> io::Result<usize> {
+ self.extend_from_slice(xs);
+ Ok(xs.len())
+ }
+
+ #[inline]
+ fn write_all(&mut self, xs: &[u8]) -> io::Result<()> {
+ self.extend_from_slice(xs);
+ Ok(())
+ }
+
+ #[inline]
+ fn flush(&mut self) -> io::Result<()> {
+ Ok(())
+ }
+}
+
+impl Drop for Buffer {
+ #[inline]
+ fn drop(&mut self) {
+ let b = self.take();
+ (b.drop)(b);
+ }
+}
+
+impl From<Vec<u8>> for Buffer {
+ fn from(mut v: Vec<u8>) -> Self {
+ let (data, len, capacity) = (v.as_mut_ptr(), v.len(), v.capacity());
+ mem::forget(v);
+
+ // This utility function is nested in here because it can *only*
+ // be safely called on `Buffer`s created by *this* `proc_macro`.
+ fn to_vec(b: Buffer) -> Vec<u8> {
+ unsafe {
+ let Buffer { data, len, capacity, .. } = b;
+ mem::forget(b);
+ Vec::from_raw_parts(data, len, capacity)
+ }
+ }
+
+ extern "C" fn reserve(b: Buffer, additional: usize) -> Buffer {
+ let mut v = to_vec(b);
+ v.reserve(additional);
+ Buffer::from(v)
+ }
+
+ extern "C" fn drop(b: Buffer) {
+ mem::drop(to_vec(b));
+ }
+
+ Buffer { data, len, capacity, reserve, drop }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/client.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/client.rs
new file mode 100644
index 000000000..22bda8ba5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/client.rs
@@ -0,0 +1,529 @@
+//! Client-side types.
+
+use super::*;
+
+use std::marker::PhantomData;
+
+macro_rules! define_handles {
+ (
+ 'owned: $($oty:ident,)*
+ 'interned: $($ity:ident,)*
+ ) => {
+ #[repr(C)]
+ #[allow(non_snake_case)]
+ pub struct HandleCounters {
+ $($oty: AtomicUsize,)*
+ $($ity: AtomicUsize,)*
+ }
+
+ impl HandleCounters {
+ // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of
+ // a wrapper `fn` pointer, once `const fn` can reference `static`s.
+ extern "C" fn get() -> &'static Self {
+ static COUNTERS: HandleCounters = HandleCounters {
+ $($oty: AtomicUsize::new(1),)*
+ $($ity: AtomicUsize::new(1),)*
+ };
+ &COUNTERS
+ }
+ }
+
+ // FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
+ #[repr(C)]
+ #[allow(non_snake_case)]
+ pub(super) struct HandleStore<S: server::Types> {
+ $($oty: handle::OwnedStore<S::$oty>,)*
+ $($ity: handle::InternedStore<S::$ity>,)*
+ }
+
+ impl<S: server::Types> HandleStore<S> {
+ pub(super) fn new(handle_counters: &'static HandleCounters) -> Self {
+ HandleStore {
+ $($oty: handle::OwnedStore::new(&handle_counters.$oty),)*
+ $($ity: handle::InternedStore::new(&handle_counters.$ity),)*
+ }
+ }
+ }
+
+ $(
+ #[repr(C)]
+ pub(crate) struct $oty {
+ handle: handle::Handle,
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual
+ // way of doing this, but that requires unstable features.
+ // rust-analyzer uses this code and avoids unstable features.
+ _marker: PhantomData<*mut ()>,
+ }
+
+ // Forward `Drop::drop` to the inherent `drop` method.
+ impl Drop for $oty {
+ fn drop(&mut self) {
+ $oty {
+ handle: self.handle,
+ _marker: PhantomData,
+ }.drop();
+ }
+ }
+
+ impl<S> Encode<S> for $oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ let handle = self.handle;
+ mem::forget(self);
+ handle.encode(w, s);
+ }
+ }
+
+ impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$oty, $oty>
+ {
+ fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
+ s.$oty.take(handle::Handle::decode(r, &mut ()))
+ }
+ }
+
+ impl<S> Encode<S> for &$oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.handle.encode(w, s);
+ }
+ }
+
+ impl<'s, S: server::Types> Decode<'_, 's, HandleStore<server::MarkedTypes<S>>>
+ for &'s Marked<S::$oty, $oty>
+ {
+ fn decode(r: &mut Reader<'_>, s: &'s HandleStore<server::MarkedTypes<S>>) -> Self {
+ &s.$oty[handle::Handle::decode(r, &mut ())]
+ }
+ }
+
+ impl<S> Encode<S> for &mut $oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.handle.encode(w, s);
+ }
+ }
+
+ impl<'s, S: server::Types> DecodeMut<'_, 's, HandleStore<server::MarkedTypes<S>>>
+ for &'s mut Marked<S::$oty, $oty>
+ {
+ fn decode(
+ r: &mut Reader<'_>,
+ s: &'s mut HandleStore<server::MarkedTypes<S>>
+ ) -> Self {
+ &mut s.$oty[handle::Handle::decode(r, &mut ())]
+ }
+ }
+
+ impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$oty, $oty>
+ {
+ fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
+ s.$oty.alloc(self).encode(w, s);
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $oty {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $oty {
+ handle: handle::Handle::decode(r, s),
+ _marker: PhantomData,
+ }
+ }
+ }
+ )*
+
+ $(
+ #[repr(C)]
+ #[derive(Copy, Clone, PartialEq, Eq, Hash)]
+ pub(crate) struct $ity {
+ handle: handle::Handle,
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual
+ // way of doing this, but that requires unstable features.
+ // rust-analyzer uses this code and avoids unstable features.
+ _marker: PhantomData<*mut ()>,
+ }
+
+ impl<S> Encode<S> for $ity {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.handle.encode(w, s);
+ }
+ }
+
+ impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$ity, $ity>
+ {
+ fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
+ s.$ity.copy(handle::Handle::decode(r, &mut ()))
+ }
+ }
+
+ impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$ity, $ity>
+ {
+ fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
+ s.$ity.alloc(self).encode(w, s);
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $ity {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $ity {
+ handle: handle::Handle::decode(r, s),
+ _marker: PhantomData,
+ }
+ }
+ }
+ )*
+ }
+}
+define_handles! {
+ 'owned:
+ FreeFunctions,
+ TokenStream,
+ Literal,
+ SourceFile,
+ MultiSpan,
+ Diagnostic,
+
+ 'interned:
+ Ident,
+ Span,
+}
+
+// FIXME(eddyb) generate these impls by pattern-matching on the
+// names of methods - also could use the presence of `fn drop`
+// to distinguish between 'owned and 'interned, above.
+// Alternatively, special "modes" could be listed of types in with_api
+// instead of pattern matching on methods, here and in server decl.
+
+impl Clone for TokenStream {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Clone for Literal {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl fmt::Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Literal")
+ // format the kind without quotes, as in `kind: Float`
+ .field("kind", &format_args!("{}", &self.debug_kind()))
+ .field("symbol", &self.symbol())
+ // format `Some("...")` on one line even in {:#?} mode
+ .field("suffix", &format_args!("{:?}", &self.suffix()))
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+impl Clone for SourceFile {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Span {
+ pub(crate) fn def_site() -> Span {
+ Bridge::with(|bridge| bridge.globals.def_site)
+ }
+
+ pub(crate) fn call_site() -> Span {
+ Bridge::with(|bridge| bridge.globals.call_site)
+ }
+
+ pub(crate) fn mixed_site() -> Span {
+ Bridge::with(|bridge| bridge.globals.mixed_site)
+ }
+}
+
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.debug())
+ }
+}
+
+macro_rules! define_client_side {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
+ }),* $(,)?) => {
+ $(impl $name {
+ $(pub(crate) fn $method($($arg: $arg_ty),*) $(-> $ret_ty)* {
+ Bridge::with(|bridge| {
+ let mut buf = bridge.cached_buffer.take();
+
+ buf.clear();
+ api_tags::Method::$name(api_tags::$name::$method).encode(&mut buf, &mut ());
+ reverse_encode!(buf; $($arg),*);
+
+ buf = bridge.dispatch.call(buf);
+
+ let r = Result::<_, PanicMessage>::decode(&mut &buf[..], &mut ());
+
+ bridge.cached_buffer = buf;
+
+ r.unwrap_or_else(|e| panic::resume_unwind(e.into()))
+ })
+ })*
+ })*
+ }
+}
+with_api!(self, self, define_client_side);
+
+struct Bridge<'a> {
+ /// Reusable buffer (only `clear`-ed, never shrunk), primarily
+ /// used for making requests.
+ cached_buffer: Buffer,
+
+ /// Server-side function that the client uses to make requests.
+ dispatch: closure::Closure<'a, Buffer, Buffer>,
+
+ /// Provided globals for this macro expansion.
+ globals: ExpnGlobals<Span>,
+}
+
+enum BridgeState<'a> {
+ /// No server is currently connected to this client.
+ NotConnected,
+
+ /// A server is connected and available for requests.
+ Connected(Bridge<'a>),
+
+ /// Access to the bridge is being exclusively acquired
+ /// (e.g., during `BridgeState::with`).
+ InUse,
+}
+
+enum BridgeStateL {}
+
+impl<'a> scoped_cell::ApplyL<'a> for BridgeStateL {
+ type Out = BridgeState<'a>;
+}
+
+thread_local! {
+ static BRIDGE_STATE: scoped_cell::ScopedCell<BridgeStateL> =
+ scoped_cell::ScopedCell::new(BridgeState::NotConnected);
+}
+
+impl BridgeState<'_> {
+ /// Take exclusive control of the thread-local
+ /// `BridgeState`, and pass it to `f`, mutably.
+ /// The state will be restored after `f` exits, even
+ /// by panic, including modifications made to it by `f`.
+ ///
+ /// N.B., while `f` is running, the thread-local state
+ /// is `BridgeState::InUse`.
+ fn with<R>(f: impl FnOnce(&mut BridgeState<'_>) -> R) -> R {
+ BRIDGE_STATE.with(|state| {
+ state.replace(BridgeState::InUse, |mut state| {
+ // FIXME(#52812) pass `f` directly to `replace` when `RefMutL` is gone
+ f(&mut *state)
+ })
+ })
+ }
+}
+
+impl Bridge<'_> {
+ fn with<R>(f: impl FnOnce(&mut Bridge<'_>) -> R) -> R {
+ BridgeState::with(|state| match state {
+ BridgeState::NotConnected => {
+ panic!("procedural macro API is used outside of a procedural macro");
+ }
+ BridgeState::InUse => {
+ panic!("procedural macro API is used while it's already in use");
+ }
+ BridgeState::Connected(bridge) => f(bridge),
+ })
+ }
+}
+
+pub(crate) fn is_available() -> bool {
+ BridgeState::with(|state| match state {
+ BridgeState::Connected(_) | BridgeState::InUse => true,
+ BridgeState::NotConnected => false,
+ })
+}
+
+/// A client-side RPC entry-point, which may be using a different `proc_macro`
+/// from the one used by the server, but can be invoked compatibly.
+///
+/// Note that the (phantom) `I` ("input") and `O` ("output") type parameters
+/// decorate the `Client<I, O>` with the RPC "interface" of the entry-point, but
+/// do not themselves participate in ABI, at all, only facilitate type-checking.
+///
+/// E.g. `Client<TokenStream, TokenStream>` is the common proc macro interface,
+/// used for `#[proc_macro] fn foo(input: TokenStream) -> TokenStream`,
+/// indicating that the RPC input and output will be serialized token streams,
+/// and forcing the use of APIs that take/return `S::TokenStream`, server-side.
+#[repr(C)]
+pub struct Client<I, O> {
+ // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of
+ // a wrapper `fn` pointer, once `const fn` can reference `static`s.
+ pub(super) get_handle_counters: extern "C" fn() -> &'static HandleCounters,
+
+ pub(super) run: extern "C" fn(BridgeConfig<'_>) -> Buffer,
+
+ pub(super) _marker: PhantomData<fn(I) -> O>,
+}
+
+impl<I, O> Copy for Client<I, O> {}
+impl<I, O> Clone for Client<I, O> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+fn maybe_install_panic_hook(force_show_panics: bool) {
+ // Hide the default panic output within `proc_macro` expansions.
+ // NB. the server can't do this because it may use a different libstd.
+ static HIDE_PANICS_DURING_EXPANSION: Once = Once::new();
+ HIDE_PANICS_DURING_EXPANSION.call_once(|| {
+ let prev = panic::take_hook();
+ panic::set_hook(Box::new(move |info| {
+ let show = BridgeState::with(|state| match state {
+ BridgeState::NotConnected => true,
+ BridgeState::Connected(_) | BridgeState::InUse => force_show_panics,
+ });
+ if show {
+ prev(info)
+ }
+ }));
+ });
+}
+
+/// Client-side helper for handling client panics, entering the bridge,
+/// deserializing input and serializing output.
+// FIXME(eddyb) maybe replace `Bridge::enter` with this?
+fn run_client<A: for<'a, 's> DecodeMut<'a, 's, ()>, R: Encode<()>>(
+ config: BridgeConfig<'_>,
+ f: impl FnOnce(A) -> R,
+) -> Buffer {
+ let BridgeConfig { input: mut buf, dispatch, force_show_panics, .. } = config;
+
+ panic::catch_unwind(panic::AssertUnwindSafe(|| {
+ maybe_install_panic_hook(force_show_panics);
+
+ let reader = &mut &buf[..];
+ let (globals, input) = <(ExpnGlobals<Span>, A)>::decode(reader, &mut ());
+
+ // Put the buffer we used for input back in the `Bridge` for requests.
+ let new_state =
+ BridgeState::Connected(Bridge { cached_buffer: buf.take(), dispatch, globals });
+
+ BRIDGE_STATE.with(|state| {
+ state.set(new_state, || {
+ let output = f(input);
+
+ // Take the `cached_buffer` back out, for the output value.
+ buf = Bridge::with(|bridge| bridge.cached_buffer.take());
+
+ // HACK(eddyb) Separate encoding a success value (`Ok(output)`)
+ // from encoding a panic (`Err(e: PanicMessage)`) to avoid
+ // having handles outside the `bridge.enter(|| ...)` scope, and
+ // to catch panics that could happen while encoding the success.
+ //
+ // Note that panics should be impossible beyond this point, but
+ // this is defensively trying to avoid any accidental panicking
+ // reaching the `extern "C"` (which should `abort` but might not
+ // at the moment, so this is also potentially preventing UB).
+ buf.clear();
+ Ok::<_, ()>(output).encode(&mut buf, &mut ());
+ })
+ })
+ }))
+ .map_err(PanicMessage::from)
+ .unwrap_or_else(|e| {
+ buf.clear();
+ Err::<(), _>(e).encode(&mut buf, &mut ());
+ });
+ buf
+}
+
+impl Client<super::super::TokenStream, super::super::TokenStream> {
+ pub const fn expand1(
+ f: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy,
+ ) -> Self {
+ Client {
+ get_handle_counters: HandleCounters::get,
+ run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| {
+ run_client(bridge, |input| f(super::super::TokenStream(input)).0)
+ }),
+ _marker: PhantomData,
+ }
+ }
+}
+
+impl Client<(super::super::TokenStream, super::super::TokenStream), super::super::TokenStream> {
+ pub const fn expand2(
+ f: impl Fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream
+ + Copy,
+ ) -> Self {
+ Client {
+ get_handle_counters: HandleCounters::get,
+ run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| {
+ run_client(bridge, |(input, input2)| {
+ f(super::super::TokenStream(input), super::super::TokenStream(input2)).0
+ })
+ }),
+ _marker: PhantomData,
+ }
+ }
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+pub enum ProcMacro {
+ CustomDerive {
+ trait_name: &'static str,
+ attributes: &'static [&'static str],
+ client: Client<super::super::TokenStream, super::super::TokenStream>,
+ },
+
+ Attr {
+ name: &'static str,
+ client: Client<
+ (super::super::TokenStream, super::super::TokenStream),
+ super::super::TokenStream,
+ >,
+ },
+
+ Bang {
+ name: &'static str,
+ client: Client<super::super::TokenStream, super::super::TokenStream>,
+ },
+}
+
+impl ProcMacro {
+ pub fn name(&self) -> &'static str {
+ match self {
+ ProcMacro::CustomDerive { trait_name, .. } => trait_name,
+ ProcMacro::Attr { name, .. } => name,
+ ProcMacro::Bang { name, .. } => name,
+ }
+ }
+
+ pub const fn custom_derive(
+ trait_name: &'static str,
+ attributes: &'static [&'static str],
+ expand: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy,
+ ) -> Self {
+ ProcMacro::CustomDerive { trait_name, attributes, client: Client::expand1(expand) }
+ }
+
+ pub const fn attr(
+ name: &'static str,
+ expand: impl Fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream
+ + Copy,
+ ) -> Self {
+ ProcMacro::Attr { name, client: Client::expand2(expand) }
+ }
+
+ pub const fn bang(
+ name: &'static str,
+ expand: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy,
+ ) -> Self {
+ ProcMacro::Bang { name, client: Client::expand1(expand) }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/closure.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/closure.rs
new file mode 100644
index 000000000..d371ae3ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/closure.rs
@@ -0,0 +1,32 @@
+//! Closure type (equivalent to `&mut dyn FnMut(A) -> R`) that's `repr(C)`.
+
+use std::marker::PhantomData;
+
+#[repr(C)]
+pub struct Closure<'a, A, R> {
+ call: unsafe extern "C" fn(*mut Env, A) -> R,
+ env: *mut Env,
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual way of doing
+ // this, but that requires unstable features. rust-analyzer uses this code
+ // and avoids unstable features.
+ //
+ // The `'a` lifetime parameter represents the lifetime of `Env`.
+ _marker: PhantomData<*mut &'a mut ()>,
+}
+
+struct Env;
+
+impl<'a, A, R, F: FnMut(A) -> R> From<&'a mut F> for Closure<'a, A, R> {
+ fn from(f: &'a mut F) -> Self {
+ unsafe extern "C" fn call<A, R, F: FnMut(A) -> R>(env: *mut Env, arg: A) -> R {
+ (*(env as *mut _ as *mut F))(arg)
+ }
+ Closure { call: call::<A, R, F>, env: f as *mut _ as *mut Env, _marker: PhantomData }
+ }
+}
+
+impl<'a, A, R> Closure<'a, A, R> {
+ pub fn call(&mut self, arg: A) -> R {
+ unsafe { (self.call)(self.env, arg) }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/handle.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/handle.rs
new file mode 100644
index 000000000..c219a9465
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/handle.rs
@@ -0,0 +1,89 @@
+//! Server-side handles and storage for per-handle data.
+
+use std::collections::{BTreeMap, HashMap};
+use std::hash::{BuildHasher, Hash};
+use std::num::NonZeroU32;
+use std::ops::{Index, IndexMut};
+use std::sync::atomic::{AtomicUsize, Ordering};
+
+pub(super) type Handle = NonZeroU32;
+
+/// A store that associates values of type `T` with numeric handles. A value can
+/// be looked up using its handle.
+pub(super) struct OwnedStore<T: 'static> {
+ counter: &'static AtomicUsize,
+ data: BTreeMap<Handle, T>,
+}
+
+impl<T> OwnedStore<T> {
+ pub(super) fn new(counter: &'static AtomicUsize) -> Self {
+ // Ensure the handle counter isn't 0, which would panic later,
+ // when `NonZeroU32::new` (aka `Handle::new`) is called in `alloc`.
+ assert_ne!(counter.load(Ordering::SeqCst), 0);
+
+ OwnedStore { counter, data: BTreeMap::new() }
+ }
+}
+
+impl<T> OwnedStore<T> {
+ pub(super) fn alloc(&mut self, x: T) -> Handle {
+ let counter = self.counter.fetch_add(1, Ordering::SeqCst);
+ let handle = Handle::new(counter as u32).expect("`proc_macro` handle counter overflowed");
+ assert!(self.data.insert(handle, x).is_none());
+ handle
+ }
+
+ pub(super) fn take(&mut self, h: Handle) -> T {
+ self.data.remove(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+impl<T> Index<Handle> for OwnedStore<T> {
+ type Output = T;
+ fn index(&self, h: Handle) -> &T {
+ self.data.get(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+impl<T> IndexMut<Handle> for OwnedStore<T> {
+ fn index_mut(&mut self, h: Handle) -> &mut T {
+ self.data.get_mut(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+// HACK(eddyb) deterministic `std::collections::hash_map::RandomState` replacement
+// that doesn't require adding any dependencies to `proc_macro` (like `rustc-hash`).
+#[derive(Clone)]
+struct NonRandomState;
+
+impl BuildHasher for NonRandomState {
+ type Hasher = std::collections::hash_map::DefaultHasher;
+ #[inline]
+ fn build_hasher(&self) -> Self::Hasher {
+ Self::Hasher::new()
+ }
+}
+
+/// Like `OwnedStore`, but avoids storing any value more than once.
+pub(super) struct InternedStore<T: 'static> {
+ owned: OwnedStore<T>,
+ interner: HashMap<T, Handle, NonRandomState>,
+}
+
+impl<T: Copy + Eq + Hash> InternedStore<T> {
+ pub(super) fn new(counter: &'static AtomicUsize) -> Self {
+ InternedStore {
+ owned: OwnedStore::new(counter),
+ interner: HashMap::with_hasher(NonRandomState),
+ }
+ }
+
+ pub(super) fn alloc(&mut self, x: T) -> Handle {
+ let owned = &mut self.owned;
+ *self.interner.entry(x).or_insert_with(|| owned.alloc(x))
+ }
+
+ pub(super) fn copy(&mut self, h: Handle) -> T {
+ self.owned[h]
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/mod.rs
new file mode 100644
index 000000000..ffd440793
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/mod.rs
@@ -0,0 +1,493 @@
+//! Internal interface for communicating between a `proc_macro` client
+//! (a proc macro crate) and a `proc_macro` server (a compiler front-end).
+//!
+//! Serialization (with C ABI buffers) and unique integer handles are employed
+//! to allow safely interfacing between two copies of `proc_macro` built
+//! (from the same source) by different compilers with potentially mismatching
+//! Rust ABIs (e.g., stage0/bin/rustc vs stage1/bin/rustc during bootstrap).
+
+#![deny(unsafe_code)]
+
+pub use super::{Delimiter, Level, LineColumn, Spacing};
+use std::fmt;
+use std::hash::Hash;
+use std::marker;
+use std::mem;
+use std::ops::Bound;
+use std::panic;
+use std::sync::atomic::AtomicUsize;
+use std::sync::Once;
+use std::thread;
+
+/// Higher-order macro describing the server RPC API, allowing automatic
+/// generation of type-safe Rust APIs, both client-side and server-side.
+///
+/// `with_api!(MySelf, my_self, my_macro)` expands to:
+/// ```rust,ignore (pseudo-code)
+/// my_macro! {
+/// // ...
+/// Literal {
+/// // ...
+/// fn character(ch: char) -> MySelf::Literal;
+/// // ...
+/// fn span(my_self: &MySelf::Literal) -> MySelf::Span;
+/// fn set_span(my_self: &mut MySelf::Literal, span: MySelf::Span);
+/// },
+/// // ...
+/// }
+/// ```
+///
+/// The first two arguments serve to customize the arguments names
+/// and argument/return types, to enable several different usecases:
+///
+/// If `my_self` is just `self`, then each `fn` signature can be used
+/// as-is for a method. If it's anything else (`self_` in practice),
+/// then the signatures don't have a special `self` argument, and
+/// can, therefore, have a different one introduced.
+///
+/// If `MySelf` is just `Self`, then the types are only valid inside
+/// a trait or a trait impl, where the trait has associated types
+/// for each of the API types. If non-associated types are desired,
+/// a module name (`self` in practice) can be used instead of `Self`.
+macro_rules! with_api {
+ ($S:ident, $self:ident, $m:ident) => {
+ $m! {
+ FreeFunctions {
+ fn drop($self: $S::FreeFunctions);
+ fn track_env_var(var: &str, value: Option<&str>);
+ fn track_path(path: &str);
+ },
+ TokenStream {
+ fn drop($self: $S::TokenStream);
+ fn clone($self: &$S::TokenStream) -> $S::TokenStream;
+ fn is_empty($self: &$S::TokenStream) -> bool;
+ fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>;
+ fn from_str(src: &str) -> $S::TokenStream;
+ fn to_string($self: &$S::TokenStream) -> String;
+ fn from_token_tree(
+ tree: TokenTree<$S::TokenStream, $S::Span, $S::Ident, $S::Literal>,
+ ) -> $S::TokenStream;
+ fn concat_trees(
+ base: Option<$S::TokenStream>,
+ trees: Vec<TokenTree<$S::TokenStream, $S::Span, $S::Ident, $S::Literal>>,
+ ) -> $S::TokenStream;
+ fn concat_streams(
+ base: Option<$S::TokenStream>,
+ streams: Vec<$S::TokenStream>,
+ ) -> $S::TokenStream;
+ fn into_trees(
+ $self: $S::TokenStream
+ ) -> Vec<TokenTree<$S::TokenStream, $S::Span, $S::Ident, $S::Literal>>;
+ },
+ Ident {
+ fn new(string: &str, span: $S::Span, is_raw: bool) -> $S::Ident;
+ fn span($self: $S::Ident) -> $S::Span;
+ fn with_span($self: $S::Ident, span: $S::Span) -> $S::Ident;
+ },
+ Literal {
+ fn drop($self: $S::Literal);
+ fn clone($self: &$S::Literal) -> $S::Literal;
+ fn from_str(s: &str) -> Result<$S::Literal, ()>;
+ fn to_string($self: &$S::Literal) -> String;
+ fn debug_kind($self: &$S::Literal) -> String;
+ fn symbol($self: &$S::Literal) -> String;
+ fn suffix($self: &$S::Literal) -> Option<String>;
+ fn integer(n: &str) -> $S::Literal;
+ fn typed_integer(n: &str, kind: &str) -> $S::Literal;
+ fn float(n: &str) -> $S::Literal;
+ fn f32(n: &str) -> $S::Literal;
+ fn f64(n: &str) -> $S::Literal;
+ fn string(string: &str) -> $S::Literal;
+ fn character(ch: char) -> $S::Literal;
+ fn byte_string(bytes: &[u8]) -> $S::Literal;
+ fn span($self: &$S::Literal) -> $S::Span;
+ fn set_span($self: &mut $S::Literal, span: $S::Span);
+ fn subspan(
+ $self: &$S::Literal,
+ start: Bound<usize>,
+ end: Bound<usize>,
+ ) -> Option<$S::Span>;
+ },
+ SourceFile {
+ fn drop($self: $S::SourceFile);
+ fn clone($self: &$S::SourceFile) -> $S::SourceFile;
+ fn eq($self: &$S::SourceFile, other: &$S::SourceFile) -> bool;
+ fn path($self: &$S::SourceFile) -> String;
+ fn is_real($self: &$S::SourceFile) -> bool;
+ },
+ MultiSpan {
+ fn drop($self: $S::MultiSpan);
+ fn new() -> $S::MultiSpan;
+ fn push($self: &mut $S::MultiSpan, span: $S::Span);
+ },
+ Diagnostic {
+ fn drop($self: $S::Diagnostic);
+ fn new(level: Level, msg: &str, span: $S::MultiSpan) -> $S::Diagnostic;
+ fn sub(
+ $self: &mut $S::Diagnostic,
+ level: Level,
+ msg: &str,
+ span: $S::MultiSpan,
+ );
+ fn emit($self: $S::Diagnostic);
+ },
+ Span {
+ fn debug($self: $S::Span) -> String;
+ fn source_file($self: $S::Span) -> $S::SourceFile;
+ fn parent($self: $S::Span) -> Option<$S::Span>;
+ fn source($self: $S::Span) -> $S::Span;
+ fn start($self: $S::Span) -> LineColumn;
+ fn end($self: $S::Span) -> LineColumn;
+ fn before($self: $S::Span) -> $S::Span;
+ fn after($self: $S::Span) -> $S::Span;
+ fn join($self: $S::Span, other: $S::Span) -> Option<$S::Span>;
+ fn resolved_at($self: $S::Span, at: $S::Span) -> $S::Span;
+ fn source_text($self: $S::Span) -> Option<String>;
+ fn save_span($self: $S::Span) -> usize;
+ fn recover_proc_macro_span(id: usize) -> $S::Span;
+ },
+ }
+ };
+}
+
+// FIXME(eddyb) this calls `encode` for each argument, but in reverse,
+// to match the ordering in `reverse_decode`.
+macro_rules! reverse_encode {
+ ($writer:ident;) => {};
+ ($writer:ident; $first:ident $(, $rest:ident)*) => {
+ reverse_encode!($writer; $($rest),*);
+ $first.encode(&mut $writer, &mut ());
+ }
+}
+
+// FIXME(eddyb) this calls `decode` for each argument, but in reverse,
+// to avoid borrow conflicts from borrows started by `&mut` arguments.
+macro_rules! reverse_decode {
+ ($reader:ident, $s:ident;) => {};
+ ($reader:ident, $s:ident; $first:ident: $first_ty:ty $(, $rest:ident: $rest_ty:ty)*) => {
+ reverse_decode!($reader, $s; $($rest: $rest_ty),*);
+ let $first = <$first_ty>::decode(&mut $reader, $s);
+ }
+}
+
+#[allow(unsafe_code)]
+mod buffer;
+#[forbid(unsafe_code)]
+pub mod client;
+#[allow(unsafe_code)]
+mod closure;
+#[forbid(unsafe_code)]
+mod handle;
+#[macro_use]
+#[forbid(unsafe_code)]
+mod rpc;
+#[allow(unsafe_code)]
+mod scoped_cell;
+#[allow(unsafe_code)]
+mod selfless_reify;
+#[forbid(unsafe_code)]
+pub mod server;
+
+use buffer::Buffer;
+pub use rpc::PanicMessage;
+use rpc::{Decode, DecodeMut, Encode, Reader, Writer};
+
+/// Configuration for establishing an active connection between a server and a
+/// client. The server creates the bridge config (`run_server` in `server.rs`),
+/// then passes it to the client through the function pointer in the `run` field
+/// of `client::Client`. The client constructs a local `Bridge` from the config
+/// in TLS during its execution (`Bridge::{enter, with}` in `client.rs`).
+#[repr(C)]
+pub struct BridgeConfig<'a> {
+ /// Buffer used to pass initial input to the client.
+ input: Buffer,
+
+ /// Server-side function that the client uses to make requests.
+ dispatch: closure::Closure<'a, Buffer, Buffer>,
+
+ /// If 'true', always invoke the default panic hook
+ force_show_panics: bool,
+
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual way of doing
+ // this, but that requires unstable features. rust-analyzer uses this code
+ // and avoids unstable features.
+ _marker: marker::PhantomData<*mut ()>,
+}
+
+#[forbid(unsafe_code)]
+#[allow(non_camel_case_types)]
+mod api_tags {
+ use super::rpc::{DecodeMut, Encode, Reader, Writer};
+
+ macro_rules! declare_tags {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
+ }),* $(,)?) => {
+ $(
+ pub(super) enum $name {
+ $($method),*
+ }
+ rpc_encode_decode!(enum $name { $($method),* });
+ )*
+
+ pub(super) enum Method {
+ $($name($name)),*
+ }
+ rpc_encode_decode!(enum Method { $($name(m)),* });
+ }
+ }
+ with_api!(self, self, declare_tags);
+}
+
+/// Helper to wrap associated types to allow trait impl dispatch.
+/// That is, normally a pair of impls for `T::Foo` and `T::Bar`
+/// can overlap, but if the impls are, instead, on types like
+/// `Marked<T::Foo, Foo>` and `Marked<T::Bar, Bar>`, they can't.
+trait Mark {
+ type Unmarked;
+ fn mark(unmarked: Self::Unmarked) -> Self;
+}
+
+/// Unwrap types wrapped by `Mark::mark` (see `Mark` for details).
+trait Unmark {
+ type Unmarked;
+ fn unmark(self) -> Self::Unmarked;
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+struct Marked<T, M> {
+ value: T,
+ _marker: marker::PhantomData<M>,
+}
+
+impl<T, M> Mark for Marked<T, M> {
+ type Unmarked = T;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ Marked { value: unmarked, _marker: marker::PhantomData }
+ }
+}
+impl<T, M> Unmark for Marked<T, M> {
+ type Unmarked = T;
+ fn unmark(self) -> Self::Unmarked {
+ self.value
+ }
+}
+impl<'a, T, M> Unmark for &'a Marked<T, M> {
+ type Unmarked = &'a T;
+ fn unmark(self) -> Self::Unmarked {
+ &self.value
+ }
+}
+impl<'a, T, M> Unmark for &'a mut Marked<T, M> {
+ type Unmarked = &'a mut T;
+ fn unmark(self) -> Self::Unmarked {
+ &mut self.value
+ }
+}
+
+impl<T: Mark> Mark for Vec<T> {
+ type Unmarked = Vec<T::Unmarked>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ // Should be a no-op due to std's in-place collect optimizations.
+ unmarked.into_iter().map(T::mark).collect()
+ }
+}
+impl<T: Unmark> Unmark for Vec<T> {
+ type Unmarked = Vec<T::Unmarked>;
+ fn unmark(self) -> Self::Unmarked {
+ // Should be a no-op due to std's in-place collect optimizations.
+ self.into_iter().map(T::unmark).collect()
+ }
+}
+
+macro_rules! mark_noop {
+ ($($ty:ty),* $(,)?) => {
+ $(
+ impl Mark for $ty {
+ type Unmarked = Self;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ unmarked
+ }
+ }
+ impl Unmark for $ty {
+ type Unmarked = Self;
+ fn unmark(self) -> Self::Unmarked {
+ self
+ }
+ }
+ )*
+ }
+}
+mark_noop! {
+ (),
+ bool,
+ char,
+ &'_ [u8],
+ &'_ str,
+ String,
+ u8,
+ usize,
+ Delimiter,
+ Level,
+ LineColumn,
+ Spacing,
+}
+
+rpc_encode_decode!(
+ enum Delimiter {
+ Parenthesis,
+ Brace,
+ Bracket,
+ None,
+ }
+);
+rpc_encode_decode!(
+ enum Level {
+ Error,
+ Warning,
+ Note,
+ Help,
+ }
+);
+rpc_encode_decode!(struct LineColumn { line, column });
+rpc_encode_decode!(
+ enum Spacing {
+ Alone,
+ Joint,
+ }
+);
+
+macro_rules! mark_compound {
+ (struct $name:ident <$($T:ident),+> { $($field:ident),* $(,)? }) => {
+ impl<$($T: Mark),+> Mark for $name <$($T),+> {
+ type Unmarked = $name <$($T::Unmarked),+>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ $name {
+ $($field: Mark::mark(unmarked.$field)),*
+ }
+ }
+ }
+ impl<$($T: Unmark),+> Unmark for $name <$($T),+> {
+ type Unmarked = $name <$($T::Unmarked),+>;
+ fn unmark(self) -> Self::Unmarked {
+ $name {
+ $($field: Unmark::unmark(self.$field)),*
+ }
+ }
+ }
+ };
+ (enum $name:ident <$($T:ident),+> { $($variant:ident $(($field:ident))?),* $(,)? }) => {
+ impl<$($T: Mark),+> Mark for $name <$($T),+> {
+ type Unmarked = $name <$($T::Unmarked),+>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ match unmarked {
+ $($name::$variant $(($field))? => {
+ $name::$variant $((Mark::mark($field)))?
+ })*
+ }
+ }
+ }
+ impl<$($T: Unmark),+> Unmark for $name <$($T),+> {
+ type Unmarked = $name <$($T::Unmarked),+>;
+ fn unmark(self) -> Self::Unmarked {
+ match self {
+ $($name::$variant $(($field))? => {
+ $name::$variant $((Unmark::unmark($field)))?
+ })*
+ }
+ }
+ }
+ }
+}
+
+macro_rules! compound_traits {
+ ($($t:tt)*) => {
+ rpc_encode_decode!($($t)*);
+ mark_compound!($($t)*);
+ };
+}
+
+compound_traits!(
+ enum Bound<T> {
+ Included(x),
+ Excluded(x),
+ Unbounded,
+ }
+);
+
+compound_traits!(
+ enum Option<T> {
+ Some(t),
+ None,
+ }
+);
+
+compound_traits!(
+ enum Result<T, E> {
+ Ok(t),
+ Err(e),
+ }
+);
+
+#[derive(Copy, Clone)]
+pub struct DelimSpan<Span> {
+ pub open: Span,
+ pub close: Span,
+ pub entire: Span,
+}
+
+impl<Span: Copy> DelimSpan<Span> {
+ pub fn from_single(span: Span) -> Self {
+ DelimSpan { open: span, close: span, entire: span }
+ }
+}
+
+compound_traits!(struct DelimSpan<Span> { open, close, entire });
+
+#[derive(Clone)]
+pub struct Group<TokenStream, Span> {
+ pub delimiter: Delimiter,
+ pub stream: Option<TokenStream>,
+ pub span: DelimSpan<Span>,
+}
+
+compound_traits!(struct Group<TokenStream, Span> { delimiter, stream, span });
+
+#[derive(Clone)]
+pub struct Punct<Span> {
+ pub ch: u8,
+ pub joint: bool,
+ pub span: Span,
+}
+
+compound_traits!(struct Punct<Span> { ch, joint, span });
+
+#[derive(Clone)]
+pub enum TokenTree<TokenStream, Span, Ident, Literal> {
+ Group(Group<TokenStream, Span>),
+ Punct(Punct<Span>),
+ Ident(Ident),
+ Literal(Literal),
+}
+
+compound_traits!(
+ enum TokenTree<TokenStream, Span, Ident, Literal> {
+ Group(tt),
+ Punct(tt),
+ Ident(tt),
+ Literal(tt),
+ }
+);
+
+/// Globals provided alongside the initial inputs for a macro expansion.
+/// Provides values such as spans which are used frequently to avoid RPC.
+#[derive(Clone)]
+pub struct ExpnGlobals<Span> {
+ pub def_site: Span,
+ pub call_site: Span,
+ pub mixed_site: Span,
+}
+
+compound_traits!(
+ struct ExpnGlobals<Span> { def_site, call_site, mixed_site }
+);
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/rpc.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/rpc.rs
new file mode 100644
index 000000000..e9d7a46c0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/rpc.rs
@@ -0,0 +1,304 @@
+//! Serialization for client-server communication.
+
+use std::any::Any;
+use std::char;
+use std::io::Write;
+use std::num::NonZeroU32;
+use std::str;
+
+pub(super) type Writer = super::buffer::Buffer;
+
+pub(super) trait Encode<S>: Sized {
+ fn encode(self, w: &mut Writer, s: &mut S);
+}
+
+pub(super) type Reader<'a> = &'a [u8];
+
+pub(super) trait Decode<'a, 's, S>: Sized {
+ fn decode(r: &mut Reader<'a>, s: &'s S) -> Self;
+}
+
+pub(super) trait DecodeMut<'a, 's, S>: Sized {
+ fn decode(r: &mut Reader<'a>, s: &'s mut S) -> Self;
+}
+
+macro_rules! rpc_encode_decode {
+ (le $ty:ty) => {
+ impl<S> Encode<S> for $ty {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.extend_from_array(&self.to_le_bytes());
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $ty {
+ fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
+ const N: usize = ::std::mem::size_of::<$ty>();
+
+ let mut bytes = [0; N];
+ bytes.copy_from_slice(&r[..N]);
+ *r = &r[N..];
+
+ Self::from_le_bytes(bytes)
+ }
+ }
+ };
+ (struct $name:ident $(<$($T:ident),+>)? { $($field:ident),* $(,)? }) => {
+ impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ $(self.$field.encode(w, s);)*
+ }
+ }
+
+ impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S>
+ for $name $(<$($T),+>)?
+ {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ $name {
+ $($field: DecodeMut::decode(r, s)),*
+ }
+ }
+ }
+ };
+ (enum $name:ident $(<$($T:ident),+>)? { $($variant:ident $(($field:ident))*),* $(,)? }) => {
+ impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ // HACK(eddyb): `Tag` enum duplicated between the
+ // two impls as there's no other place to stash it.
+ #[allow(non_upper_case_globals)]
+ mod tag {
+ #[repr(u8)] enum Tag { $($variant),* }
+
+ $(pub const $variant: u8 = Tag::$variant as u8;)*
+ }
+
+ match self {
+ $($name::$variant $(($field))* => {
+ tag::$variant.encode(w, s);
+ $($field.encode(w, s);)*
+ })*
+ }
+ }
+ }
+
+ impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S>
+ for $name $(<$($T),+>)?
+ {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ // HACK(eddyb): `Tag` enum duplicated between the
+ // two impls as there's no other place to stash it.
+ #[allow(non_upper_case_globals)]
+ mod tag {
+ #[repr(u8)] enum Tag { $($variant),* }
+
+ $(pub const $variant: u8 = Tag::$variant as u8;)*
+ }
+
+ match u8::decode(r, s) {
+ $(tag::$variant => {
+ $(let $field = DecodeMut::decode(r, s);)*
+ $name::$variant $(($field))*
+ })*
+ _ => unreachable!(),
+ }
+ }
+ }
+ }
+}
+
+impl<S> Encode<S> for () {
+ fn encode(self, _: &mut Writer, _: &mut S) {}
+}
+
+impl<S> DecodeMut<'_, '_, S> for () {
+ fn decode(_: &mut Reader<'_>, _: &mut S) -> Self {}
+}
+
+impl<S> Encode<S> for u8 {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.push(self);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for u8 {
+ fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
+ let x = r[0];
+ *r = &r[1..];
+ x
+ }
+}
+
+rpc_encode_decode!(le u32);
+rpc_encode_decode!(le usize);
+
+impl<S> Encode<S> for bool {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ (self as u8).encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for bool {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ match u8::decode(r, s) {
+ 0 => false,
+ 1 => true,
+ _ => unreachable!(),
+ }
+ }
+}
+
+impl<S> Encode<S> for char {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ (self as u32).encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for char {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ char::from_u32(u32::decode(r, s)).unwrap()
+ }
+}
+
+impl<S> Encode<S> for NonZeroU32 {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.get().encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for NonZeroU32 {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ Self::new(u32::decode(r, s)).unwrap()
+ }
+}
+
+impl<S, A: Encode<S>, B: Encode<S>> Encode<S> for (A, B) {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.0.encode(w, s);
+ self.1.encode(w, s);
+ }
+}
+
+impl<'a, S, A: for<'s> DecodeMut<'a, 's, S>, B: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S>
+ for (A, B)
+{
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ (DecodeMut::decode(r, s), DecodeMut::decode(r, s))
+ }
+}
+
+impl<S> Encode<S> for &[u8] {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.len().encode(w, s);
+ w.write_all(self).unwrap();
+ }
+}
+
+impl<'a, S> DecodeMut<'a, '_, S> for &'a [u8] {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ let len = usize::decode(r, s);
+ let xs = &r[..len];
+ *r = &r[len..];
+ xs
+ }
+}
+
+impl<S> Encode<S> for &str {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.as_bytes().encode(w, s);
+ }
+}
+
+impl<'a, S> DecodeMut<'a, '_, S> for &'a str {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ str::from_utf8(<&[u8]>::decode(r, s)).unwrap()
+ }
+}
+
+impl<S> Encode<S> for String {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self[..].encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for String {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ <&str>::decode(r, s).to_string()
+ }
+}
+
+impl<S, T: Encode<S>> Encode<S> for Vec<T> {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.len().encode(w, s);
+ for x in self {
+ x.encode(w, s);
+ }
+ }
+}
+
+impl<'a, S, T: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S> for Vec<T> {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ let len = usize::decode(r, s);
+ let mut vec = Vec::with_capacity(len);
+ for _ in 0..len {
+ vec.push(T::decode(r, s));
+ }
+ vec
+ }
+}
+
+/// Simplified version of panic payloads, ignoring
+/// types other than `&'static str` and `String`.
+pub enum PanicMessage {
+ StaticStr(&'static str),
+ String(String),
+ Unknown,
+}
+
+impl From<Box<dyn Any + Send>> for PanicMessage {
+ fn from(payload: Box<dyn Any + Send + 'static>) -> Self {
+ if let Some(s) = payload.downcast_ref::<&'static str>() {
+ return PanicMessage::StaticStr(s);
+ }
+ if let Ok(s) = payload.downcast::<String>() {
+ return PanicMessage::String(*s);
+ }
+ PanicMessage::Unknown
+ }
+}
+
+impl Into<Box<dyn Any + Send>> for PanicMessage {
+ fn into(self) -> Box<dyn Any + Send> {
+ match self {
+ PanicMessage::StaticStr(s) => Box::new(s),
+ PanicMessage::String(s) => Box::new(s),
+ PanicMessage::Unknown => {
+ struct UnknownPanicMessage;
+ Box::new(UnknownPanicMessage)
+ }
+ }
+ }
+}
+
+impl PanicMessage {
+ pub fn as_str(&self) -> Option<&str> {
+ match self {
+ PanicMessage::StaticStr(s) => Some(s),
+ PanicMessage::String(s) => Some(s),
+ PanicMessage::Unknown => None,
+ }
+ }
+}
+
+impl<S> Encode<S> for PanicMessage {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.as_str().encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for PanicMessage {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ match Option::<String>::decode(r, s) {
+ Some(s) => PanicMessage::String(s),
+ None => PanicMessage::Unknown,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/scoped_cell.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/scoped_cell.rs
new file mode 100644
index 000000000..2cde1f65a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/scoped_cell.rs
@@ -0,0 +1,81 @@
+//! `Cell` variant for (scoped) existential lifetimes.
+
+use std::cell::Cell;
+use std::mem;
+use std::ops::{Deref, DerefMut};
+
+/// Type lambda application, with a lifetime.
+#[allow(unused_lifetimes)]
+pub trait ApplyL<'a> {
+ type Out;
+}
+
+/// Type lambda taking a lifetime, i.e., `Lifetime -> Type`.
+pub trait LambdaL: for<'a> ApplyL<'a> {}
+
+impl<T: for<'a> ApplyL<'a>> LambdaL for T {}
+
+// HACK(eddyb) work around projection limitations with a newtype
+// FIXME(#52812) replace with `&'a mut <T as ApplyL<'b>>::Out`
+pub struct RefMutL<'a, 'b, T: LambdaL>(&'a mut <T as ApplyL<'b>>::Out);
+
+impl<'a, 'b, T: LambdaL> Deref for RefMutL<'a, 'b, T> {
+ type Target = <T as ApplyL<'b>>::Out;
+ fn deref(&self) -> &Self::Target {
+ self.0
+ }
+}
+
+impl<'a, 'b, T: LambdaL> DerefMut for RefMutL<'a, 'b, T> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ self.0
+ }
+}
+
+pub struct ScopedCell<T: LambdaL>(Cell<<T as ApplyL<'static>>::Out>);
+
+impl<T: LambdaL> ScopedCell<T> {
+ pub const fn new(value: <T as ApplyL<'static>>::Out) -> Self {
+ ScopedCell(Cell::new(value))
+ }
+
+ /// Sets the value in `self` to `replacement` while
+ /// running `f`, which gets the old value, mutably.
+ /// The old value will be restored after `f` exits, even
+ /// by panic, including modifications made to it by `f`.
+ pub fn replace<'a, R>(
+ &self,
+ replacement: <T as ApplyL<'a>>::Out,
+ f: impl for<'b, 'c> FnOnce(RefMutL<'b, 'c, T>) -> R,
+ ) -> R {
+ /// Wrapper that ensures that the cell always gets filled
+ /// (with the original state, optionally changed by `f`),
+ /// even if `f` had panicked.
+ struct PutBackOnDrop<'a, T: LambdaL> {
+ cell: &'a ScopedCell<T>,
+ value: Option<<T as ApplyL<'static>>::Out>,
+ }
+
+ impl<'a, T: LambdaL> Drop for PutBackOnDrop<'a, T> {
+ fn drop(&mut self) {
+ self.cell.0.set(self.value.take().unwrap());
+ }
+ }
+
+ let mut put_back_on_drop = PutBackOnDrop {
+ cell: self,
+ value: Some(self.0.replace(unsafe {
+ let erased = mem::transmute_copy(&replacement);
+ mem::forget(replacement);
+ erased
+ })),
+ };
+
+ f(RefMutL(put_back_on_drop.value.as_mut().unwrap()))
+ }
+
+ /// Sets the value in `self` to `value` while running `f`.
+ pub fn set<R>(&self, value: <T as ApplyL<'_>>::Out, f: impl FnOnce() -> R) -> R {
+ self.replace(value, |_| f())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/selfless_reify.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/selfless_reify.rs
new file mode 100644
index 000000000..907ad256e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/selfless_reify.rs
@@ -0,0 +1,84 @@
+//! Abstraction for creating `fn` pointers from any callable that *effectively*
+//! has the equivalent of implementing `Default`, even if the compiler neither
+//! provides `Default` nor allows reifying closures (i.e. creating `fn` pointers)
+//! other than those with absolutely no captures.
+//!
+//! More specifically, for a closure-like type to be "effectively `Default`":
+//! * it must be a ZST (zero-sized type): no information contained within, so
+//! that `Default`'s return value (if it were implemented) is unambiguous
+//! * it must be `Copy`: no captured "unique ZST tokens" or any other similar
+//! types that would make duplicating values at will unsound
+//! * combined with the ZST requirement, this confers a kind of "telecopy"
+//! ability: similar to `Copy`, but without keeping the value around, and
+//! instead "reconstructing" it (a noop given it's a ZST) when needed
+//! * it must be *provably* inhabited: no captured uninhabited types or any
+//! other types that cannot be constructed by the user of this abstraction
+//! * the proof is a value of the closure-like type itself, in a sense the
+//! "seed" for the "telecopy" process made possible by ZST + `Copy`
+//! * this requirement is the only reason an abstraction limited to a specific
+//! usecase is required: ZST + `Copy` can be checked with *at worst* a panic
+//! at the "attempted `::default()` call" time, but that doesn't guarantee
+//! that the value can be soundly created, and attempting to use the typical
+//! "proof ZST token" approach leads yet again to having a ZST + `Copy` type
+//! that is not proof of anything without a value (i.e. isomorphic to a
+//! newtype of the type it's trying to prove the inhabitation of)
+//!
+//! A more flexible (and safer) solution to the general problem could exist once
+//! `const`-generic parameters can have type parameters in their types:
+//!
+//! ```rust,ignore (needs future const-generics)
+//! extern "C" fn ffi_wrapper<
+//! A, R,
+//! F: Fn(A) -> R,
+//! const f: F, // <-- this `const`-generic is not yet allowed
+//! >(arg: A) -> R {
+//! f(arg)
+//! }
+//! ```
+
+use std::mem;
+
+// FIXME(eddyb) this could be `trait` impls except for the `const fn` requirement.
+macro_rules! define_reify_functions {
+ ($(
+ fn $name:ident $(<$($param:ident),*>)?
+ for $(extern $abi:tt)? fn($($arg:ident: $arg_ty:ty),*) -> $ret_ty:ty;
+ )+) => {
+ $(pub const fn $name<
+ $($($param,)*)?
+ F: Fn($($arg_ty),*) -> $ret_ty + Copy
+ >(f: F) -> $(extern $abi)? fn($($arg_ty),*) -> $ret_ty {
+ // FIXME(eddyb) describe the `F` type (e.g. via `type_name::<F>`) once panic
+ // formatting becomes possible in `const fn`.
+ assert!(mem::size_of::<F>() == 0, "selfless_reify: closure must be zero-sized");
+
+ $(extern $abi)? fn wrapper<
+ $($($param,)*)?
+ F: Fn($($arg_ty),*) -> $ret_ty + Copy
+ >($($arg: $arg_ty),*) -> $ret_ty {
+ let f = unsafe {
+ // SAFETY: `F` satisfies all criteria for "out of thin air"
+ // reconstructability (see module-level doc comment).
+ mem::MaybeUninit::<F>::uninit().assume_init()
+ };
+ f($($arg),*)
+ }
+ let _f_proof = f;
+ wrapper::<
+ $($($param,)*)?
+ F
+ >
+ })+
+ }
+}
+
+define_reify_functions! {
+ fn _reify_to_extern_c_fn_unary<A, R> for extern "C" fn(arg: A) -> R;
+
+ // HACK(eddyb) this abstraction is used with `for<'a> fn(BridgeConfig<'a>)
+ // -> T` but that doesn't work with just `reify_to_extern_c_fn_unary`
+ // because of the `fn` pointer type being "higher-ranked" (i.e. the
+ // `for<'a>` binder).
+ // FIXME(eddyb) try to remove the lifetime from `BridgeConfig`, that'd help.
+ fn reify_to_extern_c_fn_hrt_bridge<R> for extern "C" fn(bridge: super::BridgeConfig<'_>) -> R;
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/server.rs
new file mode 100644
index 000000000..6e7a8d8c1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/server.rs
@@ -0,0 +1,339 @@
+//! Server-side traits.
+
+use super::*;
+
+// FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
+use super::client::HandleStore;
+
+pub trait Types {
+ type FreeFunctions: 'static;
+ type TokenStream: 'static + Clone;
+ type Ident: 'static + Copy + Eq + Hash;
+ type Literal: 'static + Clone;
+ type SourceFile: 'static + Clone;
+ type MultiSpan: 'static;
+ type Diagnostic: 'static;
+ type Span: 'static + Copy + Eq + Hash;
+}
+
+/// Declare an associated fn of one of the traits below, adding necessary
+/// default bodies.
+macro_rules! associated_fn {
+ (fn drop(&mut self, $arg:ident: $arg_ty:ty)) =>
+ (fn drop(&mut self, $arg: $arg_ty) { mem::drop($arg) });
+
+ (fn clone(&mut self, $arg:ident: $arg_ty:ty) -> $ret_ty:ty) =>
+ (fn clone(&mut self, $arg: $arg_ty) -> $ret_ty { $arg.clone() });
+
+ ($($item:tt)*) => ($($item)*;)
+}
+
+macro_rules! declare_server_traits {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ $(pub trait $name: Types {
+ $(associated_fn!(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)?);)*
+ })*
+
+ pub trait Server: Types $(+ $name)* {
+ fn globals(&mut self) -> ExpnGlobals<Self::Span>;
+ }
+ }
+}
+with_api!(Self, self_, declare_server_traits);
+
+pub(super) struct MarkedTypes<S: Types>(S);
+
+impl<S: Server> Server for MarkedTypes<S> {
+ fn globals(&mut self) -> ExpnGlobals<Self::Span> {
+ <_>::mark(Server::globals(&mut self.0))
+ }
+}
+
+macro_rules! define_mark_types_impls {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ impl<S: Types> Types for MarkedTypes<S> {
+ $(type $name = Marked<S::$name, client::$name>;)*
+ }
+
+ $(impl<S: $name> $name for MarkedTypes<S> {
+ $(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)? {
+ <_>::mark($name::$method(&mut self.0, $($arg.unmark()),*))
+ })*
+ })*
+ }
+}
+with_api!(Self, self_, define_mark_types_impls);
+
+struct Dispatcher<S: Types> {
+ handle_store: HandleStore<S>,
+ server: S,
+}
+
+macro_rules! define_dispatcher_impl {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ // FIXME(eddyb) `pub` only for `ExecutionStrategy` below.
+ pub trait DispatcherTrait {
+ // HACK(eddyb) these are here to allow `Self::$name` to work below.
+ $(type $name;)*
+ fn dispatch(&mut self, buf: Buffer) -> Buffer;
+ }
+
+ impl<S: Server> DispatcherTrait for Dispatcher<MarkedTypes<S>> {
+ $(type $name = <MarkedTypes<S> as Types>::$name;)*
+ fn dispatch(&mut self, mut buf: Buffer) -> Buffer {
+ let Dispatcher { handle_store, server } = self;
+
+ let mut reader = &buf[..];
+ match api_tags::Method::decode(&mut reader, &mut ()) {
+ $(api_tags::Method::$name(m) => match m {
+ $(api_tags::$name::$method => {
+ let mut call_method = || {
+ reverse_decode!(reader, handle_store; $($arg: $arg_ty),*);
+ $name::$method(server, $($arg),*)
+ };
+ // HACK(eddyb) don't use `panic::catch_unwind` in a panic.
+ // If client and server happen to use the same `libstd`,
+ // `catch_unwind` asserts that the panic counter was 0,
+ // even when the closure passed to it didn't panic.
+ let r = if thread::panicking() {
+ Ok(call_method())
+ } else {
+ panic::catch_unwind(panic::AssertUnwindSafe(call_method))
+ .map_err(PanicMessage::from)
+ };
+
+ buf.clear();
+ r.encode(&mut buf, handle_store);
+ })*
+ }),*
+ }
+ buf
+ }
+ }
+ }
+}
+with_api!(Self, self_, define_dispatcher_impl);
+
+pub trait ExecutionStrategy {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer;
+}
+
+pub struct SameThread;
+
+impl ExecutionStrategy for SameThread {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer {
+ let mut dispatch = |buf| dispatcher.dispatch(buf);
+
+ run_client(BridgeConfig {
+ input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ _marker: marker::PhantomData,
+ })
+ }
+}
+
+// NOTE(eddyb) Two implementations are provided, the second one is a bit
+// faster but neither is anywhere near as fast as same-thread execution.
+
+pub struct CrossThread1;
+
+impl ExecutionStrategy for CrossThread1 {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer {
+ use std::sync::mpsc::channel;
+
+ let (req_tx, req_rx) = channel();
+ let (res_tx, res_rx) = channel();
+
+ let join_handle = thread::spawn(move || {
+ let mut dispatch = |buf| {
+ req_tx.send(buf).unwrap();
+ res_rx.recv().unwrap()
+ };
+
+ run_client(BridgeConfig {
+ input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ _marker: marker::PhantomData,
+ })
+ });
+
+ for b in req_rx {
+ res_tx.send(dispatcher.dispatch(b)).unwrap();
+ }
+
+ join_handle.join().unwrap()
+ }
+}
+
+pub struct CrossThread2;
+
+impl ExecutionStrategy for CrossThread2 {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer {
+ use std::sync::{Arc, Mutex};
+
+ enum State<T> {
+ Req(T),
+ Res(T),
+ }
+
+ let mut state = Arc::new(Mutex::new(State::Res(Buffer::new())));
+
+ let server_thread = thread::current();
+ let state2 = state.clone();
+ let join_handle = thread::spawn(move || {
+ let mut dispatch = |b| {
+ *state2.lock().unwrap() = State::Req(b);
+ server_thread.unpark();
+ loop {
+ thread::park();
+ if let State::Res(b) = &mut *state2.lock().unwrap() {
+ break b.take();
+ }
+ }
+ };
+
+ let r = run_client(BridgeConfig {
+ input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ _marker: marker::PhantomData,
+ });
+
+ // Wake up the server so it can exit the dispatch loop.
+ drop(state2);
+ server_thread.unpark();
+
+ r
+ });
+
+ // Check whether `state2` was dropped, to know when to stop.
+ while Arc::get_mut(&mut state).is_none() {
+ thread::park();
+ let mut b = match &mut *state.lock().unwrap() {
+ State::Req(b) => b.take(),
+ _ => continue,
+ };
+ b = dispatcher.dispatch(b.take());
+ *state.lock().unwrap() = State::Res(b);
+ join_handle.thread().unpark();
+ }
+
+ join_handle.join().unwrap()
+ }
+}
+
+fn run_server<
+ S: Server,
+ I: Encode<HandleStore<MarkedTypes<S>>>,
+ O: for<'a, 's> DecodeMut<'a, 's, HandleStore<MarkedTypes<S>>>,
+>(
+ strategy: &impl ExecutionStrategy,
+ handle_counters: &'static client::HandleCounters,
+ server: S,
+ input: I,
+ run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer,
+ force_show_panics: bool,
+) -> Result<O, PanicMessage> {
+ let mut dispatcher =
+ Dispatcher { handle_store: HandleStore::new(handle_counters), server: MarkedTypes(server) };
+
+ let globals = dispatcher.server.globals();
+
+ let mut buf = Buffer::new();
+ (globals, input).encode(&mut buf, &mut dispatcher.handle_store);
+
+ buf = strategy.run_bridge_and_client(&mut dispatcher, buf, run_client, force_show_panics);
+
+ Result::decode(&mut &buf[..], &mut dispatcher.handle_store)
+}
+
+impl client::Client<super::super::TokenStream, super::super::TokenStream> {
+ pub fn run<S>(
+ &self,
+ strategy: &impl ExecutionStrategy,
+ server: S,
+ input: S::TokenStream,
+ force_show_panics: bool,
+ ) -> Result<S::TokenStream, PanicMessage>
+ where
+ S: Server,
+ S::TokenStream: Default,
+ {
+ let client::Client { get_handle_counters, run, _marker } = *self;
+ run_server(
+ strategy,
+ get_handle_counters(),
+ server,
+ <MarkedTypes<S> as Types>::TokenStream::mark(input),
+ run,
+ force_show_panics,
+ )
+ .map(|s| <Option<<MarkedTypes<S> as Types>::TokenStream>>::unmark(s).unwrap_or_default())
+ }
+}
+
+impl
+ client::Client<
+ (super::super::TokenStream, super::super::TokenStream),
+ super::super::TokenStream,
+ >
+{
+ pub fn run<S>(
+ &self,
+ strategy: &impl ExecutionStrategy,
+ server: S,
+ input: S::TokenStream,
+ input2: S::TokenStream,
+ force_show_panics: bool,
+ ) -> Result<S::TokenStream, PanicMessage>
+ where
+ S: Server,
+ S::TokenStream: Default,
+ {
+ let client::Client { get_handle_counters, run, _marker } = *self;
+ run_server(
+ strategy,
+ get_handle_counters(),
+ server,
+ (
+ <MarkedTypes<S> as Types>::TokenStream::mark(input),
+ <MarkedTypes<S> as Types>::TokenStream::mark(input2),
+ ),
+ run,
+ force_show_panics,
+ )
+ .map(|s| <Option<<MarkedTypes<S> as Types>::TokenStream>>::unmark(s).unwrap_or_default())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/diagnostic.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/diagnostic.rs
new file mode 100644
index 000000000..3fade2dc4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/diagnostic.rs
@@ -0,0 +1,166 @@
+//! lib-proc-macro diagnostic
+//!
+//! Copy from <https://github.com/rust-lang/rust/blob/e45d9973b2665897a768312e971b82cc62633103/src/libproc_macro/diagnostic.rs>
+//! augmented with removing unstable features
+
+use super::Span;
+
+/// An enum representing a diagnostic level.
+#[derive(Copy, Clone, Debug)]
+#[non_exhaustive]
+pub enum Level {
+ /// An error.
+ Error,
+ /// A warning.
+ Warning,
+ /// A note.
+ Note,
+ /// A help message.
+ Help,
+}
+
+/// Trait implemented by types that can be converted into a set of `Span`s.
+pub trait MultiSpan {
+ /// Converts `self` into a `Vec<Span>`.
+ fn into_spans(self) -> Vec<Span>;
+}
+
+impl MultiSpan for Span {
+ fn into_spans(self) -> Vec<Span> {
+ vec![self]
+ }
+}
+
+impl MultiSpan for Vec<Span> {
+ fn into_spans(self) -> Vec<Span> {
+ self
+ }
+}
+
+impl<'a> MultiSpan for &'a [Span] {
+ fn into_spans(self) -> Vec<Span> {
+ self.to_vec()
+ }
+}
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+macro_rules! diagnostic_child_methods {
+ ($spanned:ident, $regular:ident, $level:expr) => {
+ #[doc = concat!("Adds a new child diagnostics message to `self` with the [`",
+ stringify!($level), "`] level, and the given `spans` and `message`.")]
+ pub fn $spanned<S, T>(mut self, spans: S, message: T) -> Diagnostic
+ where
+ S: MultiSpan,
+ T: Into<String>,
+ {
+ self.children.push(Diagnostic::spanned(spans, $level, message));
+ self
+ }
+
+ #[doc = concat!("Adds a new child diagnostic message to `self` with the [`",
+ stringify!($level), "`] level, and the given `message`.")]
+ pub fn $regular<T: Into<String>>(mut self, message: T) -> Diagnostic {
+ self.children.push(Diagnostic::new($level, message));
+ self
+ }
+ };
+}
+
+/// Iterator over the children diagnostics of a `Diagnostic`.
+#[derive(Debug, Clone)]
+pub struct Children<'a>(std::slice::Iter<'a, Diagnostic>);
+
+impl<'a> Iterator for Children<'a> {
+ type Item = &'a Diagnostic;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.0.next()
+ }
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+
+ /// Creates a new diagnostic with the given `level` and `message` pointing to
+ /// the given set of `spans`.
+ pub fn spanned<S, T>(spans: S, level: Level, message: T) -> Diagnostic
+ where
+ S: MultiSpan,
+ T: Into<String>,
+ {
+ Diagnostic { level, message: message.into(), spans: spans.into_spans(), children: vec![] }
+ }
+
+ diagnostic_child_methods!(span_error, error, Level::Error);
+ diagnostic_child_methods!(span_warning, warning, Level::Warning);
+ diagnostic_child_methods!(span_note, note, Level::Note);
+ diagnostic_child_methods!(span_help, help, Level::Help);
+
+ /// Returns the diagnostic `level` for `self`.
+ pub fn level(&self) -> Level {
+ self.level
+ }
+
+ /// Sets the level in `self` to `level`.
+ pub fn set_level(&mut self, level: Level) {
+ self.level = level;
+ }
+
+ /// Returns the message in `self`.
+ pub fn message(&self) -> &str {
+ &self.message
+ }
+
+ /// Sets the message in `self` to `message`.
+ pub fn set_message<T: Into<String>>(&mut self, message: T) {
+ self.message = message.into();
+ }
+
+ /// Returns the `Span`s in `self`.
+ pub fn spans(&self) -> &[Span] {
+ &self.spans
+ }
+
+ /// Sets the `Span`s in `self` to `spans`.
+ pub fn set_spans<S: MultiSpan>(&mut self, spans: S) {
+ self.spans = spans.into_spans();
+ }
+
+ /// Returns an iterator over the children diagnostics of `self`.
+ pub fn children(&self) -> Children<'_> {
+ Children(self.children.iter())
+ }
+
+ /// Emit the diagnostic.
+ pub fn emit(self) {
+ fn to_internal(spans: Vec<Span>) -> super::bridge::client::MultiSpan {
+ let mut multi_span = super::bridge::client::MultiSpan::new();
+ for span in spans {
+ multi_span.push(span.0);
+ }
+ multi_span
+ }
+
+ let mut diag = super::bridge::client::Diagnostic::new(
+ self.level,
+ &self.message[..],
+ to_internal(self.spans),
+ );
+ for c in self.children {
+ diag.sub(c.level, &c.message[..], to_internal(c.spans));
+ }
+ diag.emit();
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs
new file mode 100644
index 000000000..be62c73ef
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs
@@ -0,0 +1,1125 @@
+//! A support library for macro authors when defining new macros.
+//!
+//! This library, provided by the standard distribution, provides the types
+//! consumed in the interfaces of procedurally defined macro definitions such as
+//! function-like macros `#[proc_macro]`, macro attributes `#[proc_macro_attribute]` and
+//! custom derive attributes`#[proc_macro_derive]`.
+//!
+//! See [the book] for more.
+//!
+//! [the book]: ../book/ch19-06-macros.html#procedural-macros-for-generating-code-from-attributes
+
+#[doc(hidden)]
+pub mod bridge;
+
+mod diagnostic;
+
+pub use diagnostic::{Diagnostic, Level, MultiSpan};
+
+use std::cmp::Ordering;
+use std::ops::RangeBounds;
+use std::path::PathBuf;
+use std::str::FromStr;
+use std::{error, fmt, iter, mem};
+
+/// Determines whether proc_macro has been made accessible to the currently
+/// running program.
+///
+/// The proc_macro crate is only intended for use inside the implementation of
+/// procedural macros. All the functions in this crate panic if invoked from
+/// outside of a procedural macro, such as from a build script or unit test or
+/// ordinary Rust binary.
+///
+/// With consideration for Rust libraries that are designed to support both
+/// macro and non-macro use cases, `proc_macro::is_available()` provides a
+/// non-panicking way to detect whether the infrastructure required to use the
+/// API of proc_macro is presently available. Returns true if invoked from
+/// inside of a procedural macro, false if invoked from any other binary.
+pub fn is_available() -> bool {
+ bridge::client::is_available()
+}
+
+/// The main type provided by this crate, representing an abstract stream of
+/// tokens, or, more specifically, a sequence of token trees.
+/// The type provide interfaces for iterating over those token trees and, conversely,
+/// collecting a number of token trees into one stream.
+///
+/// This is both the input and output of `#[proc_macro]`, `#[proc_macro_attribute]`
+/// and `#[proc_macro_derive]` definitions.
+#[derive(Clone)]
+pub struct TokenStream(Option<bridge::client::TokenStream>);
+
+/// Error returned from `TokenStream::from_str`.
+#[non_exhaustive]
+#[derive(Debug)]
+pub struct LexError;
+
+impl fmt::Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("cannot parse string into token stream")
+ }
+}
+
+impl error::Error for LexError {}
+
+/// Error returned from `TokenStream::expand_expr`.
+#[non_exhaustive]
+#[derive(Debug)]
+pub struct ExpandError;
+
+impl fmt::Display for ExpandError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("macro expansion failed")
+ }
+}
+
+impl error::Error for ExpandError {}
+
+impl TokenStream {
+ /// Returns an empty `TokenStream` containing no token trees.
+ pub fn new() -> TokenStream {
+ TokenStream(None)
+ }
+
+ /// Checks if this `TokenStream` is empty.
+ pub fn is_empty(&self) -> bool {
+ self.0.as_ref().map(|h| h.is_empty()).unwrap_or(true)
+ }
+
+ /// Parses this `TokenStream` as an expression and attempts to expand any
+ /// macros within it. Returns the expanded `TokenStream`.
+ ///
+ /// Currently only expressions expanding to literals will succeed, although
+ /// this may be relaxed in the future.
+ ///
+ /// NOTE: In error conditions, `expand_expr` may leave macros unexpanded,
+ /// report an error, failing compilation, and/or return an `Err(..)`. The
+ /// specific behavior for any error condition, and what conditions are
+ /// considered errors, is unspecified and may change in the future.
+ pub fn expand_expr(&self) -> Result<TokenStream, ExpandError> {
+ let stream = self.0.as_ref().ok_or(ExpandError)?;
+ match bridge::client::TokenStream::expand_expr(stream) {
+ Ok(stream) => Ok(TokenStream(Some(stream))),
+ Err(_) => Err(ExpandError),
+ }
+ }
+}
+
+/// Attempts to break the string into tokens and parse those tokens into a token stream.
+/// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+/// or characters not existing in the language.
+/// All tokens in the parsed stream get `Span::call_site()` spans.
+///
+/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+/// change these errors into `LexError`s later.
+impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ Ok(TokenStream(Some(bridge::client::TokenStream::from_str(src))))
+ }
+}
+
+/// Prints the token stream as a string that is supposed to be losslessly convertible back
+/// into the same token stream (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters and negative numeric literals.
+impl fmt::Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+/// Prints token in a form convenient for debugging.
+impl fmt::Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("TokenStream ")?;
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+impl Default for TokenStream {
+ fn default() -> Self {
+ TokenStream::new()
+ }
+}
+
+pub use quote::{quote, quote_span};
+
+fn tree_to_bridge_tree(
+ tree: TokenTree,
+) -> bridge::TokenTree<
+ bridge::client::TokenStream,
+ bridge::client::Span,
+ bridge::client::Ident,
+ bridge::client::Literal,
+> {
+ match tree {
+ TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0),
+ TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0),
+ TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0),
+ TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0),
+ }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream(Some(bridge::client::TokenStream::from_token_tree(tree_to_bridge_tree(tree))))
+ }
+}
+
+/// Non-generic helper for implementing `FromIterator<TokenStream>` and
+/// `Extend<TokenStream>` with less monomorphization in calling crates.
+struct ConcatStreamsHelper {
+ streams: Vec<bridge::client::TokenStream>,
+}
+
+impl ConcatStreamsHelper {
+ fn new(capacity: usize) -> Self {
+ ConcatStreamsHelper { streams: Vec::with_capacity(capacity) }
+ }
+
+ fn push(&mut self, stream: TokenStream) {
+ if let Some(stream) = stream.0 {
+ self.streams.push(stream);
+ }
+ }
+
+ fn build(mut self) -> TokenStream {
+ if self.streams.len() <= 1 {
+ TokenStream(self.streams.pop())
+ } else {
+ TokenStream(Some(bridge::client::TokenStream::concat_streams(None, self.streams)))
+ }
+ }
+
+ fn append_to(mut self, stream: &mut TokenStream) {
+ if self.streams.is_empty() {
+ return;
+ }
+ let base = stream.0.take();
+ if base.is_none() && self.streams.len() == 1 {
+ stream.0 = self.streams.pop();
+ } else {
+ stream.0 = Some(bridge::client::TokenStream::concat_streams(base, self.streams));
+ }
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl iter::FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl iter::FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let iter = streams.into_iter();
+ let mut builder = ConcatStreamsHelper::new(iter.size_hint().0);
+ iter.for_each(|stream| builder.push(stream));
+ builder.build()
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ // FIXME(eddyb) Use an optimized implementation if/when possible.
+ *self = iter::once(mem::replace(self, Self::new())).chain(streams).collect();
+ }
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use super::{bridge, Group, Ident, Literal, Punct, TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ #[derive(Clone)]
+ pub struct IntoIter(
+ std::vec::IntoIter<
+ bridge::TokenTree<
+ bridge::client::TokenStream,
+ bridge::client::Span,
+ bridge::client::Ident,
+ bridge::client::Literal,
+ >,
+ >,
+ );
+
+ impl Iterator for IntoIter {
+ type Item = TokenTree;
+
+ fn next(&mut self) -> Option<TokenTree> {
+ self.0.next().map(|tree| match tree {
+ bridge::TokenTree::Group(tt) => TokenTree::Group(Group(tt)),
+ bridge::TokenTree::Punct(tt) => TokenTree::Punct(Punct(tt)),
+ bridge::TokenTree::Ident(tt) => TokenTree::Ident(Ident(tt)),
+ bridge::TokenTree::Literal(tt) => TokenTree::Literal(Literal(tt)),
+ })
+ }
+ }
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = IntoIter;
+
+ fn into_iter(self) -> IntoIter {
+ IntoIter(self.0.map(|v| v.into_trees()).unwrap_or_default().into_iter())
+ }
+ }
+}
+
+#[doc(hidden)]
+mod quote;
+
+/// A region of source code, along with macro expansion information.
+#[derive(Copy, Clone)]
+pub struct Span(bridge::client::Span);
+
+macro_rules! diagnostic_method {
+ ($name:ident, $level:expr) => {
+ /// Creates a new `Diagnostic` with the given `message` at the span
+ /// `self`.
+ pub fn $name<T: Into<String>>(self, message: T) -> Diagnostic {
+ Diagnostic::spanned(self, $level, message)
+ }
+ };
+}
+
+impl Span {
+ /// A span that resolves at the macro definition site.
+ pub fn def_site() -> Span {
+ Span(bridge::client::Span::def_site())
+ }
+
+ /// The span of the invocation of the current procedural macro.
+ /// Identifiers created with this span will be resolved as if they were written
+ /// directly at the macro call location (call-site hygiene) and other code
+ /// at the macro call site will be able to refer to them as well.
+ pub fn call_site() -> Span {
+ Span(bridge::client::Span::call_site())
+ }
+
+ /// A span that represents `macro_rules` hygiene, and sometimes resolves at the macro
+ /// definition site (local variables, labels, `$crate`) and sometimes at the macro
+ /// call site (everything else).
+ /// The span location is taken from the call-site.
+ pub fn mixed_site() -> Span {
+ Span(bridge::client::Span::mixed_site())
+ }
+
+ /// The original source file into which this span points.
+ pub fn source_file(&self) -> SourceFile {
+ SourceFile(self.0.source_file())
+ }
+
+ /// The `Span` for the tokens in the previous macro expansion from which
+ /// `self` was generated from, if any.
+ pub fn parent(&self) -> Option<Span> {
+ self.0.parent().map(Span)
+ }
+
+ /// The span for the origin source code that `self` was generated from. If
+ /// this `Span` wasn't generated from other macro expansions then the return
+ /// value is the same as `*self`.
+ pub fn source(&self) -> Span {
+ Span(self.0.source())
+ }
+
+ /// Gets the starting line/column in the source file for this span.
+ pub fn start(&self) -> LineColumn {
+ self.0.start().add_1_to_column()
+ }
+
+ /// Gets the ending line/column in the source file for this span.
+ pub fn end(&self) -> LineColumn {
+ self.0.end().add_1_to_column()
+ }
+
+ /// Creates an empty span pointing to directly before this span.
+ pub fn before(&self) -> Span {
+ Span(self.0.before())
+ }
+
+ /// Creates an empty span pointing to directly after this span.
+ pub fn after(&self) -> Span {
+ Span(self.0.after())
+ }
+
+ /// Creates a new span encompassing `self` and `other`.
+ ///
+ /// Returns `None` if `self` and `other` are from different files.
+ pub fn join(&self, other: Span) -> Option<Span> {
+ self.0.join(other.0).map(Span)
+ }
+
+ /// Creates a new span with the same line/column information as `self` but
+ /// that resolves symbols as though it were at `other`.
+ pub fn resolved_at(&self, other: Span) -> Span {
+ Span(self.0.resolved_at(other.0))
+ }
+
+ /// Creates a new span with the same name resolution behavior as `self` but
+ /// with the line/column information of `other`.
+ pub fn located_at(&self, other: Span) -> Span {
+ other.resolved_at(*self)
+ }
+
+ /// Compares to spans to see if they're equal.
+ pub fn eq(&self, other: &Span) -> bool {
+ self.0 == other.0
+ }
+
+ /// Returns the source text behind a span. This preserves the original source
+ /// code, including spaces and comments. It only returns a result if the span
+ /// corresponds to real source code.
+ ///
+ /// Note: The observable result of a macro should only rely on the tokens and
+ /// not on this source text. The result of this function is a best effort to
+ /// be used for diagnostics only.
+ pub fn source_text(&self) -> Option<String> {
+ self.0.source_text()
+ }
+
+ // Used by the implementation of `Span::quote`
+ #[doc(hidden)]
+ pub fn save_span(&self) -> usize {
+ self.0.save_span()
+ }
+
+ // Used by the implementation of `Span::quote`
+ #[doc(hidden)]
+ pub fn recover_proc_macro_span(id: usize) -> Span {
+ Span(bridge::client::Span::recover_proc_macro_span(id))
+ }
+
+ diagnostic_method!(error, Level::Error);
+ diagnostic_method!(warning, Level::Warning);
+ diagnostic_method!(note, Level::Note);
+ diagnostic_method!(help, Level::Help);
+}
+
+/// Prints a span in a form convenient for debugging.
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// A line-column pair representing the start or end of a `Span`.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct LineColumn {
+ /// The 1-indexed line in the source file on which the span starts or ends (inclusive).
+ pub line: usize,
+ /// The 1-indexed column (number of bytes in UTF-8 encoding) in the source
+ /// file on which the span starts or ends (inclusive).
+ pub column: usize,
+}
+
+impl LineColumn {
+ fn add_1_to_column(self) -> Self {
+ LineColumn { line: self.line, column: self.column + 1 }
+ }
+}
+
+impl Ord for LineColumn {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.line.cmp(&other.line).then(self.column.cmp(&other.column))
+ }
+}
+
+impl PartialOrd for LineColumn {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+/// The source file of a given `Span`.
+#[derive(Clone)]
+pub struct SourceFile(bridge::client::SourceFile);
+
+impl SourceFile {
+ /// Gets the path to this source file.
+ ///
+ /// ### Note
+ /// If the code span associated with this `SourceFile` was generated by an external macro, this
+ /// macro, this might not be an actual path on the filesystem. Use [`is_real`] to check.
+ ///
+ /// Also note that even if `is_real` returns `true`, if `--remap-path-prefix` was passed on
+ /// the command line, the path as given might not actually be valid.
+ ///
+ /// [`is_real`]: Self::is_real
+ pub fn path(&self) -> PathBuf {
+ PathBuf::from(self.0.path())
+ }
+
+ /// Returns `true` if this source file is a real source file, and not generated by an external
+ /// macro's expansion.
+ pub fn is_real(&self) -> bool {
+ // This is a hack until intercrate spans are implemented and we can have real source files
+ // for spans generated in external macros.
+ // https://github.com/rust-lang/rust/pull/43604#issuecomment-333334368
+ self.0.is_real()
+ }
+}
+
+impl fmt::Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("SourceFile")
+ .field("path", &self.path())
+ .field("is_real", &self.is_real())
+ .finish()
+ }
+}
+
+impl PartialEq for SourceFile {
+ fn eq(&self, other: &Self) -> bool {
+ self.0.eq(&other.0)
+ }
+}
+
+impl Eq for SourceFile {}
+
+/// A single token or a delimited sequence of token trees (e.g., `[1, (), ..]`).
+#[derive(Clone)]
+pub enum TokenTree {
+ /// A token stream surrounded by bracket delimiters.
+ Group(Group),
+ /// An identifier.
+ Ident(Ident),
+ /// A single punctuation character (`+`, `,`, `$`, etc.).
+ Punct(Punct),
+ /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
+ Literal(Literal),
+}
+
+impl TokenTree {
+ /// Returns the span of this tree, delegating to the `span` method of
+ /// the contained token or a delimited stream.
+ pub fn span(&self) -> Span {
+ match *self {
+ TokenTree::Group(ref t) => t.span(),
+ TokenTree::Ident(ref t) => t.span(),
+ TokenTree::Punct(ref t) => t.span(),
+ TokenTree::Literal(ref t) => t.span(),
+ }
+ }
+
+ /// Configures the span for *only this token*.
+ ///
+ /// Note that if this token is a `Group` then this method will not configure
+ /// the span of each of the internal tokens, this will simply delegate to
+ /// the `set_span` method of each variant.
+ pub fn set_span(&mut self, span: Span) {
+ match *self {
+ TokenTree::Group(ref mut t) => t.set_span(span),
+ TokenTree::Ident(ref mut t) => t.set_span(span),
+ TokenTree::Punct(ref mut t) => t.set_span(span),
+ TokenTree::Literal(ref mut t) => t.set_span(span),
+ }
+ }
+}
+
+/// Prints token tree in a form convenient for debugging.
+impl fmt::Debug for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // Each of these has the name in the struct type in the derived debug,
+ // so don't bother with an extra layer of indirection
+ match *self {
+ TokenTree::Group(ref tt) => tt.fmt(f),
+ TokenTree::Ident(ref tt) => tt.fmt(f),
+ TokenTree::Punct(ref tt) => tt.fmt(f),
+ TokenTree::Literal(ref tt) => tt.fmt(f),
+ }
+ }
+}
+
+impl From<Group> for TokenTree {
+ fn from(g: Group) -> TokenTree {
+ TokenTree::Group(g)
+ }
+}
+
+impl From<Ident> for TokenTree {
+ fn from(g: Ident) -> TokenTree {
+ TokenTree::Ident(g)
+ }
+}
+
+impl From<Punct> for TokenTree {
+ fn from(g: Punct) -> TokenTree {
+ TokenTree::Punct(g)
+ }
+}
+
+impl From<Literal> for TokenTree {
+ fn from(g: Literal) -> TokenTree {
+ TokenTree::Literal(g)
+ }
+}
+
+/// Prints the token tree as a string that is supposed to be losslessly convertible back
+/// into the same token tree (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters and negative numeric literals.
+impl fmt::Display for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+/// A delimited token stream.
+///
+/// A `Group` internally contains a `TokenStream` which is surrounded by `Delimiter`s.
+#[derive(Clone)]
+pub struct Group(bridge::Group<bridge::client::TokenStream, bridge::client::Span>);
+
+/// Describes how a sequence of token trees is delimited.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Delimiter {
+ /// `( ... )`
+ Parenthesis,
+ /// `{ ... }`
+ Brace,
+ /// `[ ... ]`
+ Bracket,
+ /// `Ø ... Ø`
+ /// An invisible delimiter, that may, for example, appear around tokens coming from a
+ /// "macro variable" `$var`. It is important to preserve operator priorities in cases like
+ /// `$var * 3` where `$var` is `1 + 2`.
+ /// Invisible delimiters might not survive roundtrip of a token stream through a string.
+ None,
+}
+
+impl Group {
+ /// Creates a new `Group` with the given delimiter and token stream.
+ ///
+ /// This constructor will set the span for this group to
+ /// `Span::call_site()`. To change the span you can use the `set_span`
+ /// method below.
+ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+ Group(bridge::Group {
+ delimiter,
+ stream: stream.0,
+ span: bridge::DelimSpan::from_single(Span::call_site().0),
+ })
+ }
+
+ /// Returns the delimiter of this `Group`
+ pub fn delimiter(&self) -> Delimiter {
+ self.0.delimiter
+ }
+
+ /// Returns the `TokenStream` of tokens that are delimited in this `Group`.
+ ///
+ /// Note that the returned token stream does not include the delimiter
+ /// returned above.
+ pub fn stream(&self) -> TokenStream {
+ TokenStream(self.0.stream.clone())
+ }
+
+ /// Returns the span for the delimiters of this token stream, spanning the
+ /// entire `Group`.
+ ///
+ /// ```text
+ /// pub fn span(&self) -> Span {
+ /// ^^^^^^^
+ /// ```
+ pub fn span(&self) -> Span {
+ Span(self.0.span.entire)
+ }
+
+ /// Returns the span pointing to the opening delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_open(&self) -> Span {
+ /// ^
+ /// ```
+ pub fn span_open(&self) -> Span {
+ Span(self.0.span.open)
+ }
+
+ /// Returns the span pointing to the closing delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_close(&self) -> Span {
+ /// ^
+ /// ```
+ pub fn span_close(&self) -> Span {
+ Span(self.0.span.close)
+ }
+
+ /// Configures the span for this `Group`'s delimiters, but not its internal
+ /// tokens.
+ ///
+ /// This method will **not** set the span of all the internal tokens spanned
+ /// by this group, but rather it will only set the span of the delimiter
+ /// tokens at the level of the `Group`.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.span = bridge::DelimSpan::from_single(span.0);
+ }
+}
+
+/// Prints the group as a string that should be losslessly convertible back
+/// into the same group (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters.
+impl fmt::Display for Group {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Group {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Group")
+ .field("delimiter", &self.delimiter())
+ .field("stream", &self.stream())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+/// A `Punct` is a single punctuation character such as `+`, `-` or `#`.
+///
+/// Multi-character operators like `+=` are represented as two instances of `Punct` with different
+/// forms of `Spacing` returned.
+#[derive(Clone)]
+pub struct Punct(bridge::Punct<bridge::client::Span>);
+
+/// Describes whether a `Punct` is followed immediately by another `Punct` ([`Spacing::Joint`]) or
+/// by a different token or whitespace ([`Spacing::Alone`]).
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Spacing {
+ /// A `Punct` is not immediately followed by another `Punct`.
+ /// E.g. `+` is `Alone` in `+ =`, `+ident` and `+()`.
+ Alone,
+ /// A `Punct` is immediately followed by another `Punct`.
+ /// E.g. `+` is `Joint` in `+=` and `++`.
+ ///
+ /// Additionally, single quote `'` can join with identifiers to form lifetimes: `'ident`.
+ Joint,
+}
+
+impl Punct {
+ /// Creates a new `Punct` from the given character and spacing.
+ /// The `ch` argument must be a valid punctuation character permitted by the language,
+ /// otherwise the function will panic.
+ ///
+ /// The returned `Punct` will have the default span of `Span::call_site()`
+ /// which can be further configured with the `set_span` method below.
+ pub fn new(ch: char, spacing: Spacing) -> Punct {
+ const LEGAL_CHARS: &[char] = &[
+ '=', '<', '>', '!', '~', '+', '-', '*', '/', '%', '^', '&', '|', '@', '.', ',', ';',
+ ':', '#', '$', '?', '\'',
+ ];
+ if !LEGAL_CHARS.contains(&ch) {
+ panic!("unsupported character `{:?}`", ch);
+ }
+ Punct(bridge::Punct {
+ ch: ch as u8,
+ joint: spacing == Spacing::Joint,
+ span: Span::call_site().0,
+ })
+ }
+
+ /// Returns the value of this punctuation character as `char`.
+ pub fn as_char(&self) -> char {
+ self.0.ch as char
+ }
+
+ /// Returns the spacing of this punctuation character, indicating whether it's immediately
+ /// followed by another `Punct` in the token stream, so they can potentially be combined into
+ /// a multi-character operator (`Joint`), or it's followed by some other token or whitespace
+ /// (`Alone`) so the operator has certainly ended.
+ pub fn spacing(&self) -> Spacing {
+ if self.0.joint {
+ Spacing::Joint
+ } else {
+ Spacing::Alone
+ }
+ }
+
+ /// Returns the span for this punctuation character.
+ pub fn span(&self) -> Span {
+ Span(self.0.span)
+ }
+
+ /// Configure the span for this punctuation character.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.span = span.0;
+ }
+}
+
+/// Prints the punctuation character as a string that should be losslessly convertible
+/// back into the same character.
+impl fmt::Display for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Punct")
+ .field("ch", &self.as_char())
+ .field("spacing", &self.spacing())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+impl PartialEq<char> for Punct {
+ fn eq(&self, rhs: &char) -> bool {
+ self.as_char() == *rhs
+ }
+}
+
+impl PartialEq<Punct> for char {
+ fn eq(&self, rhs: &Punct) -> bool {
+ *self == rhs.as_char()
+ }
+}
+
+/// An identifier (`ident`).
+#[derive(Clone)]
+pub struct Ident(bridge::client::Ident);
+
+impl Ident {
+ /// Creates a new `Ident` with the given `string` as well as the specified
+ /// `span`.
+ /// The `string` argument must be a valid identifier permitted by the
+ /// language (including keywords, e.g. `self` or `fn`). Otherwise, the function will panic.
+ ///
+ /// Note that `span`, currently in rustc, configures the hygiene information
+ /// for this identifier.
+ ///
+ /// As of this time `Span::call_site()` explicitly opts-in to "call-site" hygiene
+ /// meaning that identifiers created with this span will be resolved as if they were written
+ /// directly at the location of the macro call, and other code at the macro call site will be
+ /// able to refer to them as well.
+ ///
+ /// Later spans like `Span::def_site()` will allow to opt-in to "definition-site" hygiene
+ /// meaning that identifiers created with this span will be resolved at the location of the
+ /// macro definition and other code at the macro call site will not be able to refer to them.
+ ///
+ /// Due to the current importance of hygiene this constructor, unlike other
+ /// tokens, requires a `Span` to be specified at construction.
+ pub fn new(string: &str, span: Span) -> Ident {
+ Ident(bridge::client::Ident::new(string, span.0, false))
+ }
+
+ /// Same as `Ident::new`, but creates a raw identifier (`r#ident`).
+ /// The `string` argument be a valid identifier permitted by the language
+ /// (including keywords, e.g. `fn`). Keywords which are usable in path segments
+ /// (e.g. `self`, `super`) are not supported, and will cause a panic.
+ pub fn new_raw(string: &str, span: Span) -> Ident {
+ Ident(bridge::client::Ident::new(string, span.0, true))
+ }
+
+ /// Returns the span of this `Ident`, encompassing the entire string returned
+ /// by [`to_string`](Self::to_string).
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configures the span of this `Ident`, possibly changing its hygiene context.
+ pub fn set_span(&mut self, span: Span) {
+ self.0 = self.0.with_span(span.0);
+ }
+}
+
+/// Prints the identifier as a string that should be losslessly convertible
+/// back into the same identifier.
+impl fmt::Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Ident")
+ .field("ident", &self.to_string())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+/// A literal string (`"hello"`), byte string (`b"hello"`),
+/// character (`'a'`), byte character (`b'a'`), an integer or floating point number
+/// with or without a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
+/// Boolean literals like `true` and `false` do not belong here, they are `Ident`s.
+#[derive(Clone)]
+pub struct Literal(bridge::client::Literal);
+
+macro_rules! suffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new suffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1u32` where the integer
+ /// value specified is the first part of the token and the integral is
+ /// also suffixed at the end.
+ /// Literals created from negative numbers might not survive round-trips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ pub fn $name(n: $kind) -> Literal {
+ Literal(bridge::client::Literal::typed_integer(&n.to_string(), stringify!($kind)))
+ }
+ )*)
+}
+
+macro_rules! unsuffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new unsuffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1` where the integer
+ /// value specified is the first part of the token. No suffix is
+ /// specified on this token, meaning that invocations like
+ /// `Literal::i8_unsuffixed(1)` are equivalent to
+ /// `Literal::u32_unsuffixed(1)`.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ pub fn $name(n: $kind) -> Literal {
+ Literal(bridge::client::Literal::integer(&n.to_string()))
+ }
+ )*)
+}
+
+impl Literal {
+ suffixed_int_literals! {
+ u8_suffixed => u8,
+ u16_suffixed => u16,
+ u32_suffixed => u32,
+ u64_suffixed => u64,
+ u128_suffixed => u128,
+ usize_suffixed => usize,
+ i8_suffixed => i8,
+ i16_suffixed => i16,
+ i32_suffixed => i32,
+ i64_suffixed => i64,
+ i128_suffixed => i128,
+ isize_suffixed => isize,
+ }
+
+ unsuffixed_int_literals! {
+ u8_unsuffixed => u8,
+ u16_unsuffixed => u16,
+ u32_unsuffixed => u32,
+ u64_unsuffixed => u64,
+ u128_unsuffixed => u128,
+ usize_unsuffixed => usize,
+ i8_unsuffixed => i8,
+ i16_unsuffixed => i16,
+ i32_unsuffixed => i32,
+ i64_unsuffixed => i64,
+ i128_unsuffixed => i128,
+ isize_unsuffixed => isize,
+ }
+
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f32_unsuffixed(n: f32) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ let mut repr = n.to_string();
+ if !repr.contains('.') {
+ repr.push_str(".0");
+ }
+ Literal(bridge::client::Literal::float(&repr))
+ }
+
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This constructor will create a literal like `1.0f32` where the value
+ /// specified is the preceding part of the token and `f32` is the suffix of
+ /// the token. This token will always be inferred to be an `f32` in the
+ /// compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f32_suffixed(n: f32) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ Literal(bridge::client::Literal::f32(&n.to_string()))
+ }
+
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f64_unsuffixed(n: f64) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ let mut repr = n.to_string();
+ if !repr.contains('.') {
+ repr.push_str(".0");
+ }
+ Literal(bridge::client::Literal::float(&repr))
+ }
+
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This constructor will create a literal like `1.0f64` where the value
+ /// specified is the preceding part of the token and `f64` is the suffix of
+ /// the token. This token will always be inferred to be an `f64` in the
+ /// compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f64_suffixed(n: f64) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ Literal(bridge::client::Literal::f64(&n.to_string()))
+ }
+
+ /// String literal.
+ pub fn string(string: &str) -> Literal {
+ Literal(bridge::client::Literal::string(string))
+ }
+
+ /// Character literal.
+ pub fn character(ch: char) -> Literal {
+ Literal(bridge::client::Literal::character(ch))
+ }
+
+ /// Byte string literal.
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+ Literal(bridge::client::Literal::byte_string(bytes))
+ }
+
+ /// Returns the span encompassing this literal.
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configures the span associated for this literal.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.set_span(span.0);
+ }
+
+ /// Returns a `Span` that is a subset of `self.span()` containing only the
+ /// source bytes in range `range`. Returns `None` if the would-be trimmed
+ /// span is outside the bounds of `self`.
+ // FIXME(SergioBenitez): check that the byte range starts and ends at a
+ // UTF-8 boundary of the source. otherwise, it's likely that a panic will
+ // occur elsewhere when the source text is printed.
+ // FIXME(SergioBenitez): there is no way for the user to know what
+ // `self.span()` actually maps to, so this method can currently only be
+ // called blindly. For example, `to_string()` for the character 'c' returns
+ // "'\u{63}'"; there is no way for the user to know whether the source text
+ // was 'c' or whether it was '\u{63}'.
+ pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
+ self.0.subspan(range.start_bound().cloned(), range.end_bound().cloned()).map(Span)
+ }
+}
+
+/// Parse a single literal from its stringified representation.
+///
+/// In order to parse successfully, the input string must not contain anything
+/// but the literal token. Specifically, it must not contain whitespace or
+/// comments in addition to the literal.
+///
+/// The resulting literal token will have a `Span::call_site()` span.
+///
+/// NOTE: some errors may cause panics instead of returning `LexError`. We
+/// reserve the right to change these errors into `LexError`s later.
+impl FromStr for Literal {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<Self, LexError> {
+ match bridge::client::Literal::from_str(src) {
+ Ok(literal) => Ok(Literal(literal)),
+ Err(()) => Err(LexError),
+ }
+ }
+}
+
+/// Prints the literal as a string that should be losslessly convertible
+/// back into the same literal (except for possible rounding for floating point literals).
+impl fmt::Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// Tracked access to environment variables.
+pub mod tracked_env {
+ use std::env::{self, VarError};
+ use std::ffi::OsStr;
+
+ /// Retrieve an environment variable and add it to build dependency info.
+ /// Build system executing the compiler will know that the variable was accessed during
+ /// compilation, and will be able to rerun the build when the value of that variable changes.
+ /// Besides the dependency tracking this function should be equivalent to `env::var` from the
+ /// standard library, except that the argument must be UTF-8.
+ pub fn var<K: AsRef<OsStr> + AsRef<str>>(key: K) -> Result<String, VarError> {
+ let key: &str = key.as_ref();
+ let value = env::var(key);
+ super::bridge::client::FreeFunctions::track_env_var(key, value.as_deref().ok());
+ value
+ }
+}
+
+/// Tracked access to additional files.
+pub mod tracked_path {
+
+ /// Track a file explicitly.
+ ///
+ /// Commonly used for tracking asset preprocessing.
+ pub fn path<P: AsRef<str>>(path: P) {
+ let path: &str = path.as_ref();
+ super::bridge::client::FreeFunctions::track_path(path);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/quote.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/quote.rs
new file mode 100644
index 000000000..39309faa4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/quote.rs
@@ -0,0 +1,139 @@
+//! # Quasiquoter
+//! This file contains the implementation internals of the quasiquoter provided by `quote!`.
+
+//! This quasiquoter uses macros 2.0 hygiene to reliably access
+//! items from `proc_macro`, to build a `proc_macro::TokenStream`.
+
+use super::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+
+macro_rules! quote_tt {
+ (($($t:tt)*)) => { Group::new(Delimiter::Parenthesis, quote!($($t)*)) };
+ ([$($t:tt)*]) => { Group::new(Delimiter::Bracket, quote!($($t)*)) };
+ ({$($t:tt)*}) => { Group::new(Delimiter::Brace, quote!($($t)*)) };
+ (,) => { Punct::new(',', Spacing::Alone) };
+ (.) => { Punct::new('.', Spacing::Alone) };
+ (;) => { Punct::new(';', Spacing::Alone) };
+ (!) => { Punct::new('!', Spacing::Alone) };
+ (<) => { Punct::new('<', Spacing::Alone) };
+ (>) => { Punct::new('>', Spacing::Alone) };
+ (&) => { Punct::new('&', Spacing::Alone) };
+ (=) => { Punct::new('=', Spacing::Alone) };
+ ($i:ident) => { Ident::new(stringify!($i), Span::def_site()) };
+}
+
+macro_rules! quote_ts {
+ ((@ $($t:tt)*)) => { $($t)* };
+ (::) => {
+ [
+ TokenTree::from(Punct::new(':', Spacing::Joint)),
+ TokenTree::from(Punct::new(':', Spacing::Alone)),
+ ].iter()
+ .cloned()
+ .map(|mut x| {
+ x.set_span(Span::def_site());
+ x
+ })
+ .collect::<TokenStream>()
+ };
+ ($t:tt) => { TokenTree::from(quote_tt!($t)) };
+}
+
+/// Simpler version of the real `quote!` macro, implemented solely
+/// through `macro_rules`, for bootstrapping the real implementation
+/// (see the `quote` function), which does not have access to the
+/// real `quote!` macro due to the `proc_macro` crate not being
+/// able to depend on itself.
+///
+/// Note: supported tokens are a subset of the real `quote!`, but
+/// unquoting is different: instead of `$x`, this uses `(@ expr)`.
+macro_rules! quote {
+ () => { TokenStream::new() };
+ ($($t:tt)*) => {
+ [
+ $(TokenStream::from(quote_ts!($t)),)*
+ ].iter().cloned().collect::<TokenStream>()
+ };
+}
+
+/// Quote a `TokenStream` into a `TokenStream`.
+/// This is the actual implementation of the `quote!()` proc macro.
+///
+/// It is loaded by the compiler in `register_builtin_macros`.
+pub fn quote(stream: TokenStream) -> TokenStream {
+ if stream.is_empty() {
+ return quote!(super::TokenStream::new());
+ }
+ let proc_macro_crate = quote!(crate);
+ let mut after_dollar = false;
+ let tokens = stream
+ .into_iter()
+ .filter_map(|tree| {
+ if after_dollar {
+ after_dollar = false;
+ match tree {
+ TokenTree::Ident(_) => {
+ return Some(quote!(Into::<super::TokenStream>::into(
+ Clone::clone(&(@ tree))),));
+ }
+ TokenTree::Punct(ref tt) if tt.as_char() == '$' => {}
+ _ => panic!("`$` must be followed by an ident or `$` in `quote!`"),
+ }
+ } else if let TokenTree::Punct(ref tt) = tree {
+ if tt.as_char() == '$' {
+ after_dollar = true;
+ return None;
+ }
+ }
+
+ Some(quote!(super::TokenStream::from((@ match tree {
+ TokenTree::Punct(tt) => quote!(super::TokenTree::Punct(super::Punct::new(
+ (@ TokenTree::from(Literal::character(tt.as_char()))),
+ (@ match tt.spacing() {
+ Spacing::Alone => quote!(super::Spacing::Alone),
+ Spacing::Joint => quote!(super::Spacing::Joint),
+ }),
+ ))),
+ TokenTree::Group(tt) => quote!(super::TokenTree::Group(super::Group::new(
+ (@ match tt.delimiter() {
+ Delimiter::Parenthesis => quote!(super::Delimiter::Parenthesis),
+ Delimiter::Brace => quote!(super::Delimiter::Brace),
+ Delimiter::Bracket => quote!(super::Delimiter::Bracket),
+ Delimiter::None => quote!(super::Delimiter::None),
+ }),
+ (@ quote(tt.stream())),
+ ))),
+ TokenTree::Ident(tt) => quote!(super::TokenTree::Ident(super::Ident::new(
+ (@ TokenTree::from(Literal::string(&tt.to_string()))),
+ (@ quote_span(proc_macro_crate.clone(), tt.span())),
+ ))),
+ TokenTree::Literal(tt) => quote!(super::TokenTree::Literal({
+ let mut iter = (@ TokenTree::from(Literal::string(&tt.to_string())))
+ .parse::<super::TokenStream>()
+ .unwrap()
+ .into_iter();
+ if let (Some(super::TokenTree::Literal(mut lit)), None) =
+ (iter.next(), iter.next())
+ {
+ lit.set_span((@ quote_span(proc_macro_crate.clone(), tt.span())));
+ lit
+ } else {
+ unreachable!()
+ }
+ }))
+ })),))
+ })
+ .collect::<TokenStream>();
+
+ if after_dollar {
+ panic!("unexpected trailing `$` in `quote!`");
+ }
+
+ quote!([(@ tokens)].iter().cloned().collect::<super::TokenStream>())
+}
+
+/// Quote a `Span` into a `TokenStream`.
+/// This is needed to implement a custom quoter.
+pub fn quote_span(proc_macro_crate: TokenStream, span: Span) -> TokenStream {
+ let id = span.save_span();
+ quote!((@ proc_macro_crate ) ::Span::recover_proc_macro_span((@ TokenTree::from(Literal::usize_unsuffixed(id)))))
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs
new file mode 100644
index 000000000..7e8e67856
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs
@@ -0,0 +1,792 @@
+//! Rustc proc-macro server implementation with tt
+//!
+//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
+//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
+//! we could provide any TokenStream implementation.
+//! The original idea from fedochet is using proc-macro2 as backend,
+//! we use tt instead for better integration with RA.
+//!
+//! FIXME: No span and source file information is implemented yet
+
+use super::proc_macro::bridge::{self, server};
+
+use std::collections::HashMap;
+use std::hash::Hash;
+use std::iter::FromIterator;
+use std::ops::Bound;
+use std::{ascii, vec::IntoIter};
+
+type Group = tt::Subtree;
+type TokenTree = tt::TokenTree;
+type Punct = tt::Punct;
+type Spacing = tt::Spacing;
+type Literal = tt::Literal;
+type Span = tt::TokenId;
+
+#[derive(Debug, Default, Clone)]
+pub struct TokenStream {
+ pub token_trees: Vec<TokenTree>,
+}
+
+impl TokenStream {
+ pub fn new() -> Self {
+ TokenStream::default()
+ }
+
+ pub fn with_subtree(subtree: tt::Subtree) -> Self {
+ if subtree.delimiter.is_some() {
+ TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
+ } else {
+ TokenStream { token_trees: subtree.token_trees }
+ }
+ }
+
+ pub fn into_subtree(self) -> tt::Subtree {
+ tt::Subtree { delimiter: None, token_trees: self.token_trees }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.token_trees.is_empty()
+ }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream { token_trees: vec![tree] }
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut builder = TokenStreamBuilder::new();
+ streams.into_iter().for_each(|stream| builder.push(stream));
+ builder.build()
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ for item in streams {
+ for tkn in item {
+ match tkn {
+ tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
+ self.token_trees.extend(subtree.token_trees);
+ }
+ _ => {
+ self.token_trees.push(tkn);
+ }
+ }
+ }
+ }
+ }
+}
+
+#[derive(Clone)]
+pub struct SourceFile {
+ // FIXME stub
+}
+
+type Level = super::proc_macro::Level;
+type LineColumn = super::proc_macro::LineColumn;
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+}
+
+// Rustc Server Ident has to be `Copyable`
+// We use a stub here for bypassing
+#[derive(Hash, Eq, PartialEq, Copy, Clone)]
+pub struct IdentId(u32);
+
+#[derive(Clone, Hash, Eq, PartialEq)]
+struct IdentData(tt::Ident);
+
+#[derive(Default)]
+struct IdentInterner {
+ idents: HashMap<IdentData, u32>,
+ ident_data: Vec<IdentData>,
+}
+
+impl IdentInterner {
+ fn intern(&mut self, data: &IdentData) -> u32 {
+ if let Some(index) = self.idents.get(data) {
+ return *index;
+ }
+
+ let index = self.idents.len() as u32;
+ self.ident_data.push(data.clone());
+ self.idents.insert(data.clone(), index);
+ index
+ }
+
+ fn get(&self, index: u32) -> &IdentData {
+ &self.ident_data[index as usize]
+ }
+
+ #[allow(unused)]
+ fn get_mut(&mut self, index: u32) -> &mut IdentData {
+ self.ident_data.get_mut(index as usize).expect("Should be consistent")
+ }
+}
+
+pub struct TokenStreamBuilder {
+ acc: TokenStream,
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use std::str::FromStr;
+
+ use super::{TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = super::IntoIter<TokenTree>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.token_trees.into_iter()
+ }
+ }
+
+ type LexError = String;
+
+ /// Attempts to break the string into tokens and parse those tokens into a token stream.
+ /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+ /// or characters not existing in the language.
+ /// All tokens in the parsed stream get `Span::call_site()` spans.
+ ///
+ /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+ /// change these errors into `LexError`s later.
+ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ let (subtree, _token_map) =
+ mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
+
+ let subtree = subtree_replace_token_ids_with_unspecified(subtree);
+ Ok(TokenStream::with_subtree(subtree))
+ }
+ }
+
+ impl ToString for TokenStream {
+ fn to_string(&self) -> String {
+ tt::pretty(&self.token_trees)
+ }
+ }
+
+ fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
+ tt::Subtree {
+ delimiter: subtree
+ .delimiter
+ .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
+ token_trees: subtree
+ .token_trees
+ .into_iter()
+ .map(token_tree_replace_token_ids_with_unspecified)
+ .collect(),
+ }
+ }
+
+ fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
+ match tt {
+ tt::TokenTree::Leaf(leaf) => {
+ tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
+ }
+ tt::TokenTree::Subtree(subtree) => {
+ tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
+ }
+ }
+ }
+
+ fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
+ match leaf {
+ tt::Leaf::Literal(lit) => {
+ tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
+ }
+ tt::Leaf::Punct(punct) => {
+ tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
+ }
+ tt::Leaf::Ident(ident) => {
+ tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
+ }
+ }
+ }
+}
+
+impl TokenStreamBuilder {
+ fn new() -> TokenStreamBuilder {
+ TokenStreamBuilder { acc: TokenStream::new() }
+ }
+
+ fn push(&mut self, stream: TokenStream) {
+ self.acc.extend(stream.into_iter())
+ }
+
+ fn build(self) -> TokenStream {
+ self.acc
+ }
+}
+
+pub struct FreeFunctions;
+
+#[derive(Clone)]
+pub struct TokenStreamIter {
+ trees: IntoIter<TokenTree>,
+}
+
+#[derive(Default)]
+pub struct RustAnalyzer {
+ ident_interner: IdentInterner,
+ // FIXME: store span information here.
+}
+
+impl server::Types for RustAnalyzer {
+ type FreeFunctions = FreeFunctions;
+ type TokenStream = TokenStream;
+ type Ident = IdentId;
+ type Literal = Literal;
+ type SourceFile = SourceFile;
+ type Diagnostic = Diagnostic;
+ type Span = Span;
+ type MultiSpan = Vec<Span>;
+}
+
+impl server::FreeFunctions for RustAnalyzer {
+ fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
+ // FIXME: track env var accesses
+ // https://github.com/rust-lang/rust/pull/71858
+ }
+ fn track_path(&mut self, _path: &str) {}
+}
+
+impl server::TokenStream for RustAnalyzer {
+ fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+ stream.is_empty()
+ }
+ fn from_str(&mut self, src: &str) -> Self::TokenStream {
+ use std::str::FromStr;
+
+ Self::TokenStream::from_str(src).expect("cannot parse string")
+ }
+ fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+ stream.to_string()
+ }
+ fn from_token_tree(
+ &mut self,
+ tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Ident, Self::Literal>,
+ ) -> Self::TokenStream {
+ match tree {
+ bridge::TokenTree::Group(group) => {
+ let group = Group {
+ delimiter: delim_to_internal(group.delimiter),
+ token_trees: match group.stream {
+ Some(stream) => stream.into_iter().collect(),
+ None => Vec::new(),
+ },
+ };
+ let tree = TokenTree::from(group);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Ident(IdentId(index)) => {
+ let IdentData(ident) = self.ident_interner.get(index).clone();
+ let ident: tt::Ident = ident;
+ let leaf = tt::Leaf::from(ident);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Literal(literal) => {
+ let leaf = tt::Leaf::from(literal);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Punct(p) => {
+ let punct = tt::Punct {
+ char: p.ch as char,
+ spacing: if p.joint { Spacing::Joint } else { Spacing::Alone },
+ id: p.span,
+ };
+ let leaf = tt::Leaf::from(punct);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+ }
+ }
+
+ fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
+ Ok(self_.clone())
+ }
+
+ fn concat_trees(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Ident, Self::Literal>>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for tree in trees {
+ builder.push(self.from_token_tree(tree));
+ }
+ builder.build()
+ }
+
+ fn concat_streams(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ streams: Vec<Self::TokenStream>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for stream in streams {
+ builder.push(stream);
+ }
+ builder.build()
+ }
+
+ fn into_trees(
+ &mut self,
+ stream: Self::TokenStream,
+ ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Ident, Self::Literal>> {
+ stream
+ .into_iter()
+ .map(|tree| match tree {
+ tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+ bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident))))
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => bridge::TokenTree::Literal(lit),
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
+ bridge::TokenTree::Punct(bridge::Punct {
+ ch: punct.char as u8,
+ joint: punct.spacing == Spacing::Joint,
+ span: punct.id,
+ })
+ }
+ tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group {
+ delimiter: delim_to_external(subtree.delimiter),
+ stream: if subtree.token_trees.is_empty() {
+ None
+ } else {
+ Some(subtree.token_trees.into_iter().collect())
+ },
+ span: bridge::DelimSpan::from_single(
+ subtree.delimiter.map_or(Span::unspecified(), |del| del.id),
+ ),
+ }),
+ })
+ .collect()
+ }
+}
+
+fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> {
+ let kind = match d {
+ bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
+ bridge::Delimiter::Brace => tt::DelimiterKind::Brace,
+ bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket,
+ bridge::Delimiter::None => return None,
+ };
+ Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
+}
+
+fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter {
+ match d.map(|it| it.kind) {
+ Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis,
+ Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace,
+ Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket,
+ None => bridge::Delimiter::None,
+ }
+}
+
+fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing {
+ match spacing {
+ bridge::Spacing::Alone => Spacing::Alone,
+ bridge::Spacing::Joint => Spacing::Joint,
+ }
+}
+
+fn spacing_to_external(spacing: Spacing) -> bridge::Spacing {
+ match spacing {
+ Spacing::Alone => bridge::Spacing::Alone,
+ Spacing::Joint => bridge::Spacing::Joint,
+ }
+}
+
+impl server::Ident for RustAnalyzer {
+ fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident {
+ IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span })))
+ }
+
+ fn span(&mut self, ident: Self::Ident) -> Self::Span {
+ self.ident_interner.get(ident.0).0.id
+ }
+ fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
+ let data = self.ident_interner.get(ident.0);
+ let new = IdentData(tt::Ident { id: span, ..data.0.clone() });
+ IdentId(self.ident_interner.intern(&new))
+ }
+}
+
+impl server::Literal for RustAnalyzer {
+ fn debug_kind(&mut self, _literal: &Self::Literal) -> String {
+ // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these.
+ // They must still be present to be ABI-compatible and work with upstream proc_macro.
+ "".to_owned()
+ }
+ fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
+ Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() })
+ }
+ fn symbol(&mut self, literal: &Self::Literal) -> String {
+ literal.text.to_string()
+ }
+ fn suffix(&mut self, _literal: &Self::Literal) -> Option<String> {
+ None
+ }
+
+ fn to_string(&mut self, literal: &Self::Literal) -> String {
+ literal.to_string()
+ }
+
+ fn integer(&mut self, n: &str) -> Self::Literal {
+ let n = match n.parse::<i128>() {
+ Ok(n) => n.to_string(),
+ Err(_) => n.parse::<u128>().unwrap().to_string(),
+ };
+ Literal { text: n.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
+ macro_rules! def_suffixed_integer {
+ ($kind:ident, $($ty:ty),*) => {
+ match $kind {
+ $(
+ stringify!($ty) => {
+ let n: $ty = n.parse().unwrap();
+ format!(concat!("{}", stringify!($ty)), n)
+ }
+ )*
+ _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind),
+ }
+ }
+ }
+
+ let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize};
+
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn float(&mut self, n: &str) -> Self::Literal {
+ let n: f64 = n.parse().unwrap();
+ let mut text = f64::to_string(&n);
+ if !text.contains('.') {
+ text += ".0"
+ }
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn f32(&mut self, n: &str) -> Self::Literal {
+ let n: f32 = n.parse().unwrap();
+ let text = format!("{}f32", n);
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn f64(&mut self, n: &str) -> Self::Literal {
+ let n: f64 = n.parse().unwrap();
+ let text = format!("{}f64", n);
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn string(&mut self, string: &str) -> Self::Literal {
+ let mut escaped = String::new();
+ for ch in string.chars() {
+ escaped.extend(ch.escape_debug());
+ }
+ Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn character(&mut self, ch: char) -> Self::Literal {
+ Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
+ let string = bytes
+ .iter()
+ .cloned()
+ .flat_map(ascii::escape_default)
+ .map(Into::<char>::into)
+ .collect::<String>();
+
+ Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn span(&mut self, literal: &Self::Literal) -> Self::Span {
+ literal.id
+ }
+
+ fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
+ literal.id = span;
+ }
+
+ fn subspan(
+ &mut self,
+ _literal: &Self::Literal,
+ _start: Bound<usize>,
+ _end: Bound<usize>,
+ ) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+}
+
+impl server::SourceFile for RustAnalyzer {
+ // FIXME these are all stubs
+ fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
+ true
+ }
+ fn path(&mut self, _file: &Self::SourceFile) -> String {
+ String::new()
+ }
+ fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
+ true
+ }
+}
+
+impl server::Diagnostic for RustAnalyzer {
+ fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
+ let mut diag = Diagnostic::new(level, msg);
+ diag.spans = spans;
+ diag
+ }
+
+ fn sub(
+ &mut self,
+ _diag: &mut Self::Diagnostic,
+ _level: Level,
+ _msg: &str,
+ _spans: Self::MultiSpan,
+ ) {
+ // FIXME handle diagnostic
+ //
+ }
+
+ fn emit(&mut self, _diag: Self::Diagnostic) {
+ // FIXME handle diagnostic
+ // diag.emit()
+ }
+}
+
+impl server::Span for RustAnalyzer {
+ fn debug(&mut self, span: Self::Span) -> String {
+ format!("{:?}", span.0)
+ }
+ fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
+ SourceFile {}
+ }
+ fn save_span(&mut self, _span: Self::Span) -> usize {
+ // FIXME stub
+ 0
+ }
+ fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
+ // FIXME stub
+ tt::TokenId::unspecified()
+ }
+ /// Recent feature, not yet in the proc_macro
+ ///
+ /// See PR:
+ /// https://github.com/rust-lang/rust/pull/55780
+ fn source_text(&mut self, _span: Self::Span) -> Option<String> {
+ None
+ }
+
+ fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+ fn source(&mut self, span: Self::Span) -> Self::Span {
+ // FIXME handle span
+ span
+ }
+ fn start(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn end(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
+ // Just return the first span again, because some macros will unwrap the result.
+ Some(first)
+ }
+ fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+
+ fn after(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+
+ fn before(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+}
+
+impl server::MultiSpan for RustAnalyzer {
+ fn new(&mut self) -> Self::MultiSpan {
+ // FIXME handle span
+ vec![]
+ }
+
+ fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
+ //TODP
+ other.push(span)
+ }
+}
+
+impl server::Server for RustAnalyzer {
+ fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
+ bridge::ExpnGlobals {
+ def_site: Span::unspecified(),
+ call_site: Span::unspecified(),
+ mixed_site: Span::unspecified(),
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::super::proc_macro::bridge::server::Literal;
+ use super::*;
+
+ #[test]
+ fn test_ra_server_literals() {
+ let mut srv = RustAnalyzer { ident_interner: IdentInterner::default() };
+ assert_eq!(srv.integer("1234").text, "1234");
+
+ assert_eq!(srv.typed_integer("12", "u8").text, "12u8");
+ assert_eq!(srv.typed_integer("255", "u16").text, "255u16");
+ assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32");
+ assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64");
+ assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128");
+ assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize");
+ assert_eq!(srv.typed_integer("127", "i8").text, "127i8");
+ assert_eq!(srv.typed_integer("255", "i16").text, "255i16");
+ assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32");
+ assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64");
+ assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128");
+ assert_eq!(srv.float("0").text, "0.0");
+ assert_eq!(srv.float("15684.5867").text, "15684.5867");
+ assert_eq!(srv.f32("15684.58").text, "15684.58f32");
+ assert_eq!(srv.f64("15684.58").text, "15684.58f64");
+
+ assert_eq!(srv.string("hello_world").text, "\"hello_world\"");
+ assert_eq!(srv.character('c').text, "'c'");
+ assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\"");
+
+ // u128::max
+ assert_eq!(
+ srv.integer("340282366920938463463374607431768211455").text,
+ "340282366920938463463374607431768211455"
+ );
+ // i128::min
+ assert_eq!(
+ srv.integer("-170141183460469231731687303715884105728").text,
+ "-170141183460469231731687303715884105728"
+ );
+ }
+
+ #[test]
+ fn test_ra_server_to_string() {
+ let s = TokenStream {
+ token_trees: vec![
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "struct".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "T".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Brace,
+ }),
+ token_trees: vec![],
+ }),
+ ],
+ };
+
+ assert_eq!(s.to_string(), "struct T {}");
+ }
+
+ #[test]
+ fn test_ra_server_from_str() {
+ use std::str::FromStr;
+ let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Parenthesis,
+ }),
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "a".into(),
+ id: tt::TokenId::unspecified(),
+ }))],
+ });
+
+ let t1 = TokenStream::from_str("(a)").unwrap();
+ assert_eq!(t1.token_trees.len(), 1);
+ assert_eq!(t1.token_trees[0], subtree_paren_a);
+
+ let t2 = TokenStream::from_str("(a);").unwrap();
+ assert_eq!(t2.token_trees.len(), 2);
+ assert_eq!(t2.token_trees[0], subtree_paren_a);
+
+ let underscore = TokenStream::from_str("_").unwrap();
+ assert_eq!(
+ underscore.token_trees[0],
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "_".into(),
+ id: tt::TokenId::unspecified(),
+ }))
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs
new file mode 100644
index 000000000..44712f419
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs
@@ -0,0 +1,102 @@
+//! Proc macro ABI
+
+extern crate proc_macro;
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod ra_server;
+
+use libloading::Library;
+use proc_macro_api::ProcMacroKind;
+
+use super::PanicMessage;
+
+pub(crate) struct Abi {
+ exported_macros: Vec<proc_macro::bridge::client::ProcMacro>,
+}
+
+impl From<proc_macro::bridge::PanicMessage> for PanicMessage {
+ fn from(p: proc_macro::bridge::PanicMessage) -> Self {
+ Self { message: p.as_str().map(|s| s.to_string()) }
+ }
+}
+
+impl Abi {
+ pub unsafe fn from_lib(lib: &Library, symbol_name: String) -> Result<Abi, libloading::Error> {
+ let macros: libloading::Symbol<'_, &&[proc_macro::bridge::client::ProcMacro]> =
+ lib.get(symbol_name.as_bytes())?;
+ Ok(Self { exported_macros: macros.to_vec() })
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, PanicMessage> {
+ let parsed_body = ra_server::TokenStream::with_subtree(macro_body.clone());
+
+ let parsed_attributes = attributes.map_or(ra_server::TokenStream::new(), |attr| {
+ ra_server::TokenStream::with_subtree(attr.clone())
+ });
+
+ for proc_macro in &self.exported_macros {
+ match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive {
+ trait_name, client, ..
+ } if *trait_name == macro_name => {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_attributes,
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ _ => continue,
+ }
+ }
+
+ Err(proc_macro::bridge::PanicMessage::String("Nothing to expand".to_string()).into())
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ self.exported_macros
+ .iter()
+ .map(|proc_macro| match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
+ (trait_name.to_string(), ProcMacroKind::CustomDerive)
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, .. } => {
+ (name.to_string(), ProcMacroKind::FuncLike)
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, .. } => {
+ (name.to_string(), ProcMacroKind::Attr)
+ }
+ })
+ .collect()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs
new file mode 100644
index 000000000..46882845a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs
@@ -0,0 +1,518 @@
+//! proc-macro server implementation
+//!
+//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
+//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
+//! we could provide any TokenStream implementation.
+//! The original idea from fedochet is using proc-macro2 as backend,
+//! we use tt instead for better integration with RA.
+//!
+//! FIXME: No span and source file information is implemented yet
+
+use super::proc_macro::{
+ self,
+ bridge::{self, server},
+};
+
+mod token_stream;
+pub use token_stream::TokenStream;
+use token_stream::TokenStreamBuilder;
+
+mod symbol;
+pub use symbol::*;
+
+use std::{iter::FromIterator, ops::Bound};
+
+type Group = tt::Subtree;
+type TokenTree = tt::TokenTree;
+type Punct = tt::Punct;
+type Spacing = tt::Spacing;
+type Literal = tt::Literal;
+type Span = tt::TokenId;
+
+#[derive(Clone)]
+pub struct SourceFile {
+ // FIXME stub
+}
+
+type Level = super::proc_macro::Level;
+type LineColumn = super::proc_macro::LineColumn;
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+}
+
+pub struct FreeFunctions;
+
+#[derive(Default)]
+pub struct RustAnalyzer {
+ // FIXME: store span information here.
+}
+
+impl server::Types for RustAnalyzer {
+ type FreeFunctions = FreeFunctions;
+ type TokenStream = TokenStream;
+ type SourceFile = SourceFile;
+ type MultiSpan = Vec<Span>;
+ type Diagnostic = Diagnostic;
+ type Span = Span;
+ type Symbol = Symbol;
+}
+
+impl server::FreeFunctions for RustAnalyzer {
+ fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
+ // FIXME: track env var accesses
+ // https://github.com/rust-lang/rust/pull/71858
+ }
+ fn track_path(&mut self, _path: &str) {}
+
+ fn literal_from_str(
+ &mut self,
+ s: &str,
+ ) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> {
+ // FIXME: keep track of LitKind and Suffix
+ Ok(bridge::Literal {
+ kind: bridge::LitKind::Err,
+ symbol: Symbol::intern(s),
+ suffix: None,
+ span: tt::TokenId::unspecified(),
+ })
+ }
+}
+
+impl server::TokenStream for RustAnalyzer {
+ fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+ stream.is_empty()
+ }
+ fn from_str(&mut self, src: &str) -> Self::TokenStream {
+ use std::str::FromStr;
+
+ Self::TokenStream::from_str(src).expect("cannot parse string")
+ }
+ fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+ stream.to_string()
+ }
+ fn from_token_tree(
+ &mut self,
+ tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
+ ) -> Self::TokenStream {
+ match tree {
+ bridge::TokenTree::Group(group) => {
+ let group = Group {
+ delimiter: delim_to_internal(group.delimiter),
+ token_trees: match group.stream {
+ Some(stream) => stream.into_iter().collect(),
+ None => Vec::new(),
+ },
+ };
+ let tree = TokenTree::from(group);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Ident(ident) => {
+ // FIXME: handle raw idents
+ let text = ident.sym.text();
+ let ident: tt::Ident = tt::Ident { text, id: ident.span };
+ let leaf = tt::Leaf::from(ident);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Literal(literal) => {
+ let literal = LiteralFormatter(literal);
+ let text = literal
+ .with_stringify_parts(|parts| tt::SmolStr::from_iter(parts.iter().copied()));
+
+ let literal = tt::Literal { text, id: literal.0.span };
+ let leaf = tt::Leaf::from(literal);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Punct(p) => {
+ let punct = tt::Punct {
+ char: p.ch as char,
+ spacing: if p.joint { Spacing::Joint } else { Spacing::Alone },
+ id: p.span,
+ };
+ let leaf = tt::Leaf::from(punct);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+ }
+ }
+
+ fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
+ Ok(self_.clone())
+ }
+
+ fn concat_trees(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for tree in trees {
+ builder.push(self.from_token_tree(tree));
+ }
+ builder.build()
+ }
+
+ fn concat_streams(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ streams: Vec<Self::TokenStream>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for stream in streams {
+ builder.push(stream);
+ }
+ builder.build()
+ }
+
+ fn into_trees(
+ &mut self,
+ stream: Self::TokenStream,
+ ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
+ stream
+ .into_iter()
+ .map(|tree| match tree {
+ tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+ bridge::TokenTree::Ident(bridge::Ident {
+ sym: Symbol::intern(&ident.text),
+ // FIXME: handle raw idents
+ is_raw: false,
+ span: ident.id,
+ })
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
+ bridge::TokenTree::Literal(bridge::Literal {
+ // FIXME: handle literal kinds
+ kind: bridge::LitKind::Err,
+ symbol: Symbol::intern(&lit.text),
+ // FIXME: handle suffixes
+ suffix: None,
+ span: lit.id,
+ })
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
+ bridge::TokenTree::Punct(bridge::Punct {
+ ch: punct.char as u8,
+ joint: punct.spacing == Spacing::Joint,
+ span: punct.id,
+ })
+ }
+ tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group {
+ delimiter: delim_to_external(subtree.delimiter),
+ stream: if subtree.token_trees.is_empty() {
+ None
+ } else {
+ Some(subtree.token_trees.into_iter().collect())
+ },
+ span: bridge::DelimSpan::from_single(
+ subtree.delimiter.map_or(Span::unspecified(), |del| del.id),
+ ),
+ }),
+ })
+ .collect()
+ }
+}
+
+fn delim_to_internal(d: proc_macro::Delimiter) -> Option<tt::Delimiter> {
+ let kind = match d {
+ proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
+ proc_macro::Delimiter::Brace => tt::DelimiterKind::Brace,
+ proc_macro::Delimiter::Bracket => tt::DelimiterKind::Bracket,
+ proc_macro::Delimiter::None => return None,
+ };
+ Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
+}
+
+fn delim_to_external(d: Option<tt::Delimiter>) -> proc_macro::Delimiter {
+ match d.map(|it| it.kind) {
+ Some(tt::DelimiterKind::Parenthesis) => proc_macro::Delimiter::Parenthesis,
+ Some(tt::DelimiterKind::Brace) => proc_macro::Delimiter::Brace,
+ Some(tt::DelimiterKind::Bracket) => proc_macro::Delimiter::Bracket,
+ None => proc_macro::Delimiter::None,
+ }
+}
+
+fn spacing_to_internal(spacing: proc_macro::Spacing) -> Spacing {
+ match spacing {
+ proc_macro::Spacing::Alone => Spacing::Alone,
+ proc_macro::Spacing::Joint => Spacing::Joint,
+ }
+}
+
+fn spacing_to_external(spacing: Spacing) -> proc_macro::Spacing {
+ match spacing {
+ Spacing::Alone => proc_macro::Spacing::Alone,
+ Spacing::Joint => proc_macro::Spacing::Joint,
+ }
+}
+
+impl server::SourceFile for RustAnalyzer {
+ // FIXME these are all stubs
+ fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
+ true
+ }
+ fn path(&mut self, _file: &Self::SourceFile) -> String {
+ String::new()
+ }
+ fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
+ true
+ }
+}
+
+impl server::Diagnostic for RustAnalyzer {
+ fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
+ let mut diag = Diagnostic::new(level, msg);
+ diag.spans = spans;
+ diag
+ }
+
+ fn sub(
+ &mut self,
+ _diag: &mut Self::Diagnostic,
+ _level: Level,
+ _msg: &str,
+ _spans: Self::MultiSpan,
+ ) {
+ // FIXME handle diagnostic
+ //
+ }
+
+ fn emit(&mut self, _diag: Self::Diagnostic) {
+ // FIXME handle diagnostic
+ // diag.emit()
+ }
+}
+
+impl server::Span for RustAnalyzer {
+ fn debug(&mut self, span: Self::Span) -> String {
+ format!("{:?}", span.0)
+ }
+ fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
+ SourceFile {}
+ }
+ fn save_span(&mut self, _span: Self::Span) -> usize {
+ // FIXME stub
+ 0
+ }
+ fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
+ // FIXME stub
+ tt::TokenId::unspecified()
+ }
+ /// Recent feature, not yet in the proc_macro
+ ///
+ /// See PR:
+ /// https://github.com/rust-lang/rust/pull/55780
+ fn source_text(&mut self, _span: Self::Span) -> Option<String> {
+ None
+ }
+
+ fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+ fn source(&mut self, span: Self::Span) -> Self::Span {
+ // FIXME handle span
+ span
+ }
+ fn start(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn end(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
+ // Just return the first span again, because some macros will unwrap the result.
+ Some(first)
+ }
+ fn subspan(
+ &mut self,
+ span: Self::Span,
+ _start: Bound<usize>,
+ _end: Bound<usize>,
+ ) -> Option<Self::Span> {
+ // Just return the span again, because some macros will unwrap the result.
+ Some(span)
+ }
+ fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+
+ fn after(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+
+ fn before(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+}
+
+impl server::MultiSpan for RustAnalyzer {
+ fn new(&mut self) -> Self::MultiSpan {
+ // FIXME handle span
+ vec![]
+ }
+
+ fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
+ //TODP
+ other.push(span)
+ }
+}
+
+impl server::Symbol for RustAnalyzer {
+ fn normalize_and_validate_ident(&mut self, string: &str) -> Result<Self::Symbol, ()> {
+ // FIXME: nfc-normalize and validate idents
+ Ok(<Self as server::Server>::intern_symbol(string))
+ }
+}
+
+impl server::Server for RustAnalyzer {
+ fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
+ bridge::ExpnGlobals {
+ def_site: Span::unspecified(),
+ call_site: Span::unspecified(),
+ mixed_site: Span::unspecified(),
+ }
+ }
+
+ fn intern_symbol(ident: &str) -> Self::Symbol {
+ Symbol::intern(&tt::SmolStr::from(ident))
+ }
+
+ fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
+ f(symbol.text().as_str())
+ }
+}
+
+struct LiteralFormatter(bridge::Literal<tt::TokenId, Symbol>);
+
+impl LiteralFormatter {
+ /// Invokes the callback with a `&[&str]` consisting of each part of the
+ /// literal's representation. This is done to allow the `ToString` and
+ /// `Display` implementations to borrow references to symbol values, and
+ /// both be optimized to reduce overhead.
+ fn with_stringify_parts<R>(&self, f: impl FnOnce(&[&str]) -> R) -> R {
+ /// Returns a string containing exactly `num` '#' characters.
+ /// Uses a 256-character source string literal which is always safe to
+ /// index with a `u8` index.
+ fn get_hashes_str(num: u8) -> &'static str {
+ const HASHES: &str = "\
+ ################################################################\
+ ################################################################\
+ ################################################################\
+ ################################################################\
+ ";
+ const _: () = assert!(HASHES.len() == 256);
+ &HASHES[..num as usize]
+ }
+
+ self.with_symbol_and_suffix(|symbol, suffix| match self.0.kind {
+ bridge::LitKind::Byte => f(&["b'", symbol, "'", suffix]),
+ bridge::LitKind::Char => f(&["'", symbol, "'", suffix]),
+ bridge::LitKind::Str => f(&["\"", symbol, "\"", suffix]),
+ bridge::LitKind::StrRaw(n) => {
+ let hashes = get_hashes_str(n);
+ f(&["r", hashes, "\"", symbol, "\"", hashes, suffix])
+ }
+ bridge::LitKind::ByteStr => f(&["b\"", symbol, "\"", suffix]),
+ bridge::LitKind::ByteStrRaw(n) => {
+ let hashes = get_hashes_str(n);
+ f(&["br", hashes, "\"", symbol, "\"", hashes, suffix])
+ }
+ _ => f(&[symbol, suffix]),
+ })
+ }
+
+ fn with_symbol_and_suffix<R>(&self, f: impl FnOnce(&str, &str) -> R) -> R {
+ let symbol = self.0.symbol.text();
+ let suffix = self.0.suffix.map(|s| s.text()).unwrap_or_default();
+ f(symbol.as_str(), suffix.as_str())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_ra_server_to_string() {
+ let s = TokenStream {
+ token_trees: vec![
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "struct".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "T".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Brace,
+ }),
+ token_trees: vec![],
+ }),
+ ],
+ };
+
+ assert_eq!(s.to_string(), "struct T {}");
+ }
+
+ #[test]
+ fn test_ra_server_from_str() {
+ use std::str::FromStr;
+ let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Parenthesis,
+ }),
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "a".into(),
+ id: tt::TokenId::unspecified(),
+ }))],
+ });
+
+ let t1 = TokenStream::from_str("(a)").unwrap();
+ assert_eq!(t1.token_trees.len(), 1);
+ assert_eq!(t1.token_trees[0], subtree_paren_a);
+
+ let t2 = TokenStream::from_str("(a);").unwrap();
+ assert_eq!(t2.token_trees.len(), 2);
+ assert_eq!(t2.token_trees[0], subtree_paren_a);
+
+ let underscore = TokenStream::from_str("_").unwrap();
+ assert_eq!(
+ underscore.token_trees[0],
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "_".into(),
+ id: tt::TokenId::unspecified(),
+ }))
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/symbol.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/symbol.rs
new file mode 100644
index 000000000..51dfba2ea
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/symbol.rs
@@ -0,0 +1,46 @@
+//! Symbol interner for proc-macro-srv
+
+use std::{cell::RefCell, collections::HashMap};
+use tt::SmolStr;
+
+thread_local! {
+ static SYMBOL_INTERNER: RefCell<SymbolInterner> = Default::default();
+}
+
+// ID for an interned symbol.
+#[derive(Hash, Eq, PartialEq, Copy, Clone)]
+pub struct Symbol(u32);
+
+impl Symbol {
+ pub fn intern(data: &str) -> Symbol {
+ SYMBOL_INTERNER.with(|i| i.borrow_mut().intern(data))
+ }
+
+ pub fn text(&self) -> SmolStr {
+ SYMBOL_INTERNER.with(|i| i.borrow().get(self).clone())
+ }
+}
+
+#[derive(Default)]
+struct SymbolInterner {
+ idents: HashMap<SmolStr, u32>,
+ ident_data: Vec<SmolStr>,
+}
+
+impl SymbolInterner {
+ fn intern(&mut self, data: &str) -> Symbol {
+ if let Some(index) = self.idents.get(data) {
+ return Symbol(*index);
+ }
+
+ let index = self.idents.len() as u32;
+ let data = SmolStr::from(data);
+ self.ident_data.push(data.clone());
+ self.idents.insert(data, index);
+ Symbol(index)
+ }
+
+ fn get(&self, sym: &Symbol) -> &SmolStr {
+ &self.ident_data[sym.0 as usize]
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs
new file mode 100644
index 000000000..113bb52c1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs
@@ -0,0 +1,179 @@
+//! TokenStream implementation used by sysroot ABI
+
+use tt::TokenTree;
+
+#[derive(Debug, Default, Clone)]
+pub struct TokenStream {
+ pub token_trees: Vec<TokenTree>,
+}
+
+impl TokenStream {
+ pub fn new() -> Self {
+ TokenStream::default()
+ }
+
+ pub fn with_subtree(subtree: tt::Subtree) -> Self {
+ if subtree.delimiter.is_some() {
+ TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
+ } else {
+ TokenStream { token_trees: subtree.token_trees }
+ }
+ }
+
+ pub fn into_subtree(self) -> tt::Subtree {
+ tt::Subtree { delimiter: None, token_trees: self.token_trees }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.token_trees.is_empty()
+ }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream { token_trees: vec![tree] }
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut builder = TokenStreamBuilder::new();
+ streams.into_iter().for_each(|stream| builder.push(stream));
+ builder.build()
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ for item in streams {
+ for tkn in item {
+ match tkn {
+ tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
+ self.token_trees.extend(subtree.token_trees);
+ }
+ _ => {
+ self.token_trees.push(tkn);
+ }
+ }
+ }
+ }
+ }
+}
+
+pub struct TokenStreamBuilder {
+ acc: TokenStream,
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use std::str::FromStr;
+
+ use super::{TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = std::vec::IntoIter<TokenTree>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.token_trees.into_iter()
+ }
+ }
+
+ type LexError = String;
+
+ /// Attempts to break the string into tokens and parse those tokens into a token stream.
+ /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+ /// or characters not existing in the language.
+ /// All tokens in the parsed stream get `Span::call_site()` spans.
+ ///
+ /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+ /// change these errors into `LexError`s later.
+ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ let (subtree, _token_map) =
+ mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
+
+ let subtree = subtree_replace_token_ids_with_unspecified(subtree);
+ Ok(TokenStream::with_subtree(subtree))
+ }
+ }
+
+ impl ToString for TokenStream {
+ fn to_string(&self) -> String {
+ tt::pretty(&self.token_trees)
+ }
+ }
+
+ fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
+ tt::Subtree {
+ delimiter: subtree
+ .delimiter
+ .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
+ token_trees: subtree
+ .token_trees
+ .into_iter()
+ .map(token_tree_replace_token_ids_with_unspecified)
+ .collect(),
+ }
+ }
+
+ fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
+ match tt {
+ tt::TokenTree::Leaf(leaf) => {
+ tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
+ }
+ tt::TokenTree::Subtree(subtree) => {
+ tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
+ }
+ }
+ }
+
+ fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
+ match leaf {
+ tt::Leaf::Literal(lit) => {
+ tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
+ }
+ tt::Leaf::Punct(punct) => {
+ tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
+ }
+ tt::Leaf::Ident(ident) => {
+ tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
+ }
+ }
+ }
+}
+
+impl TokenStreamBuilder {
+ pub(super) fn new() -> TokenStreamBuilder {
+ TokenStreamBuilder { acc: TokenStream::new() }
+ }
+
+ pub(super) fn push(&mut self, stream: TokenStream) {
+ self.acc.extend(stream.into_iter())
+ }
+
+ pub(super) fn build(self) -> TokenStream {
+ self.acc
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/mod.rs
new file mode 100644
index 000000000..bcf3f1184
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/mod.rs
@@ -0,0 +1,155 @@
+//! Procedural macros are implemented by compiling the macro providing crate
+//! to a dynamic library with a particular ABI which the compiler uses to expand
+//! macros. Unfortunately this ABI is not specified and can change from version
+//! to version of the compiler. To support this we copy the ABI from the rust
+//! compiler into submodules of this module (e.g proc_macro_srv::abis::abi_1_47).
+//!
+//! All of these ABIs are subsumed in the `Abi` enum, which exposes a simple
+//! interface the rest of rust analyzer can use to talk to the macro
+//! provider.
+//!
+//! # Adding a new ABI
+//!
+//! To add a new ABI you'll need to copy the source of the target proc_macro
+//! crate from the source tree of the Rust compiler into this directory tree.
+//! Then you'll need to modify it
+//! - Remove any feature! or other things which won't compile on stable
+//! - change any absolute imports to relative imports within the ABI tree
+//!
+//! Then you'll need to add a branch to the `Abi` enum and an implementation of
+//! `Abi::expand`, `Abi::list_macros` and `Abi::from_lib` for the new ABI. See
+//! `proc_macro_srv/src/abis/abi_1_47/mod.rs` for an example. Finally you'll
+//! need to update the conditionals in `Abi::from_lib` to return your new ABI
+//! for the relevant versions of the rust compiler
+//!
+
+mod abi_1_58;
+mod abi_1_63;
+mod abi_1_64;
+#[cfg(feature = "sysroot-abi")]
+mod abi_sysroot;
+
+// see `build.rs`
+include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
+
+// Used by `test/utils.rs`
+#[cfg(test)]
+pub(crate) use abi_1_64::TokenStream as TestTokenStream;
+
+use super::dylib::LoadProcMacroDylibError;
+pub(crate) use abi_1_58::Abi as Abi_1_58;
+pub(crate) use abi_1_63::Abi as Abi_1_63;
+pub(crate) use abi_1_64::Abi as Abi_1_64;
+#[cfg(feature = "sysroot-abi")]
+pub(crate) use abi_sysroot::Abi as Abi_Sysroot;
+use libloading::Library;
+use proc_macro_api::{ProcMacroKind, RustCInfo};
+
+pub struct PanicMessage {
+ message: Option<String>,
+}
+
+impl PanicMessage {
+ pub fn as_str(&self) -> Option<String> {
+ self.message.clone()
+ }
+}
+
+pub(crate) enum Abi {
+ Abi1_58(Abi_1_58),
+ Abi1_63(Abi_1_63),
+ Abi1_64(Abi_1_64),
+ #[cfg(feature = "sysroot-abi")]
+ AbiSysroot(Abi_Sysroot),
+}
+
+impl Abi {
+ /// Load a new ABI.
+ ///
+ /// # Arguments
+ ///
+ /// *`lib` - The dynamic library containing the macro implementations
+ /// *`symbol_name` - The symbol name the macros can be found attributes
+ /// *`info` - RustCInfo about the compiler that was used to compile the
+ /// macro crate. This is the information we use to figure out
+ /// which ABI to return
+ pub fn from_lib(
+ lib: &Library,
+ symbol_name: String,
+ info: RustCInfo,
+ ) -> Result<Abi, LoadProcMacroDylibError> {
+ // the sysroot ABI relies on `extern proc_macro` with unstable features,
+ // instead of a snapshot of the proc macro bridge's source code. it's only
+ // enabled if we have an exact version match.
+ #[cfg(feature = "sysroot-abi")]
+ {
+ if info.version_string == RUSTC_VERSION_STRING {
+ let inner = unsafe { Abi_Sysroot::from_lib(lib, symbol_name) }?;
+ return Ok(Abi::AbiSysroot(inner));
+ }
+
+ // if we reached this point, versions didn't match. in testing, we
+ // want that to panic - this could mean that the format of `rustc
+ // --version` no longer matches the format of the version string
+ // stored in the `.rustc` section, and we want to catch that in-tree
+ // with `x.py test`
+ #[cfg(test)]
+ {
+ let allow_mismatch = std::env::var("PROC_MACRO_SRV_ALLOW_SYSROOT_MISMATCH");
+ if let Ok("1") = allow_mismatch.as_deref() {
+ // only used by rust-analyzer developers, when working on the
+ // sysroot ABI from the rust-analyzer repository - which should
+ // only happen pre-subtree. this can be removed later.
+ } else {
+ panic!(
+ "sysroot ABI mismatch: dylib rustc version (read from .rustc section): {:?} != proc-macro-srv version (read from 'rustc --version'): {:?}",
+ info.version_string, RUSTC_VERSION_STRING
+ );
+ }
+ }
+ }
+
+ // FIXME: this should use exclusive ranges when they're stable
+ // https://github.com/rust-lang/rust/issues/37854
+ match (info.version.0, info.version.1) {
+ (1, 58..=62) => {
+ let inner = unsafe { Abi_1_58::from_lib(lib, symbol_name) }?;
+ Ok(Abi::Abi1_58(inner))
+ }
+ (1, 63) => {
+ let inner = unsafe { Abi_1_63::from_lib(lib, symbol_name) }?;
+ Ok(Abi::Abi1_63(inner))
+ }
+ (1, 64..) => {
+ let inner = unsafe { Abi_1_64::from_lib(lib, symbol_name) }?;
+ Ok(Abi::Abi1_64(inner))
+ }
+ _ => Err(LoadProcMacroDylibError::UnsupportedABI),
+ }
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, PanicMessage> {
+ match self {
+ Self::Abi1_58(abi) => abi.expand(macro_name, macro_body, attributes),
+ Self::Abi1_63(abi) => abi.expand(macro_name, macro_body, attributes),
+ Self::Abi1_64(abi) => abi.expand(macro_name, macro_body, attributes),
+ #[cfg(feature = "sysroot-abi")]
+ Self::AbiSysroot(abi) => abi.expand(macro_name, macro_body, attributes),
+ }
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ match self {
+ Self::Abi1_58(abi) => abi.list_macros(),
+ Self::Abi1_63(abi) => abi.list_macros(),
+ Self::Abi1_64(abi) => abi.list_macros(),
+ #[cfg(feature = "sysroot-abi")]
+ Self::AbiSysroot(abi) => abi.list_macros(),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/cli.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/cli.rs
new file mode 100644
index 000000000..f1e131c13
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/cli.rs
@@ -0,0 +1,31 @@
+//! Driver for proc macro server
+use std::io;
+
+use proc_macro_api::msg::{self, Message};
+
+use crate::ProcMacroSrv;
+
+pub fn run() -> io::Result<()> {
+ let mut srv = ProcMacroSrv::default();
+ let mut buf = String::new();
+
+ while let Some(req) = read_request(&mut buf)? {
+ let res = match req {
+ msg::Request::ListMacros { dylib_path } => {
+ msg::Response::ListMacros(srv.list_macros(&dylib_path))
+ }
+ msg::Request::ExpandMacro(task) => msg::Response::ExpandMacro(srv.expand(task)),
+ };
+ write_response(res)?
+ }
+
+ Ok(())
+}
+
+fn read_request(buf: &mut String) -> io::Result<Option<msg::Request>> {
+ msg::Request::read(&mut io::stdin().lock(), buf)
+}
+
+fn write_response(msg: msg::Response) -> io::Result<()> {
+ msg.write(&mut io::stdout().lock())
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs
new file mode 100644
index 000000000..2b6c070fe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs
@@ -0,0 +1,199 @@
+//! Handles dynamic library loading for proc macro
+
+use std::{
+ convert::TryInto,
+ fmt,
+ fs::File,
+ io,
+ path::{Path, PathBuf},
+};
+
+use libloading::Library;
+use memmap2::Mmap;
+use object::Object;
+use paths::AbsPath;
+use proc_macro_api::{read_dylib_info, ProcMacroKind};
+
+use super::abis::Abi;
+
+const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_";
+
+fn invalid_data_err(e: impl Into<Box<dyn std::error::Error + Send + Sync>>) -> io::Error {
+ io::Error::new(io::ErrorKind::InvalidData, e)
+}
+
+fn is_derive_registrar_symbol(symbol: &str) -> bool {
+ symbol.contains(NEW_REGISTRAR_SYMBOL)
+}
+
+fn find_registrar_symbol(file: &Path) -> io::Result<Option<String>> {
+ let file = File::open(file)?;
+ let buffer = unsafe { Mmap::map(&file)? };
+
+ Ok(object::File::parse(&*buffer)
+ .map_err(invalid_data_err)?
+ .exports()
+ .map_err(invalid_data_err)?
+ .into_iter()
+ .map(|export| export.name())
+ .filter_map(|sym| String::from_utf8(sym.into()).ok())
+ .find(|sym| is_derive_registrar_symbol(sym))
+ .map(|sym| {
+ // From MacOS docs:
+ // https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man3/dlsym.3.html
+ // Unlike other dyld API's, the symbol name passed to dlsym() must NOT be
+ // prepended with an underscore.
+ if cfg!(target_os = "macos") && sym.starts_with('_') {
+ sym[1..].to_owned()
+ } else {
+ sym
+ }
+ }))
+}
+
+/// Loads dynamic library in platform dependent manner.
+///
+/// For unix, you have to use RTLD_DEEPBIND flag to escape problems described
+/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample)
+/// and [here](https://github.com/rust-lang/rust/issues/60593).
+///
+/// Usage of RTLD_DEEPBIND
+/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1)
+///
+/// It seems that on Windows that behaviour is default, so we do nothing in that case.
+#[cfg(windows)]
+fn load_library(file: &Path) -> Result<Library, libloading::Error> {
+ unsafe { Library::new(file) }
+}
+
+#[cfg(unix)]
+fn load_library(file: &Path) -> Result<Library, libloading::Error> {
+ use libloading::os::unix::Library as UnixLibrary;
+ use std::os::raw::c_int;
+
+ const RTLD_NOW: c_int = 0x00002;
+ const RTLD_DEEPBIND: c_int = 0x00008;
+
+ unsafe { UnixLibrary::open(Some(file), RTLD_NOW | RTLD_DEEPBIND).map(|lib| lib.into()) }
+}
+
+#[derive(Debug)]
+pub enum LoadProcMacroDylibError {
+ Io(io::Error),
+ LibLoading(libloading::Error),
+ UnsupportedABI,
+}
+
+impl fmt::Display for LoadProcMacroDylibError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Self::Io(e) => e.fmt(f),
+ Self::UnsupportedABI => write!(f, "unsupported ABI version"),
+ Self::LibLoading(e) => e.fmt(f),
+ }
+ }
+}
+
+impl From<io::Error> for LoadProcMacroDylibError {
+ fn from(e: io::Error) -> Self {
+ LoadProcMacroDylibError::Io(e)
+ }
+}
+
+impl From<libloading::Error> for LoadProcMacroDylibError {
+ fn from(e: libloading::Error) -> Self {
+ LoadProcMacroDylibError::LibLoading(e)
+ }
+}
+
+struct ProcMacroLibraryLibloading {
+ // Hold on to the library so it doesn't unload
+ _lib: Library,
+ abi: Abi,
+}
+
+impl ProcMacroLibraryLibloading {
+ fn open(file: &Path) -> Result<Self, LoadProcMacroDylibError> {
+ let symbol_name = find_registrar_symbol(file)?.ok_or_else(|| {
+ invalid_data_err(format!("Cannot find registrar symbol in file {}", file.display()))
+ })?;
+
+ let abs_file: &AbsPath = file.try_into().map_err(|_| {
+ invalid_data_err(format!("expected an absolute path, got {}", file.display()))
+ })?;
+ let version_info = read_dylib_info(abs_file)?;
+
+ let lib = load_library(file).map_err(invalid_data_err)?;
+ let abi = Abi::from_lib(&lib, symbol_name, version_info)?;
+ Ok(ProcMacroLibraryLibloading { _lib: lib, abi })
+ }
+}
+
+pub struct Expander {
+ inner: ProcMacroLibraryLibloading,
+}
+
+impl Expander {
+ pub fn new(lib: &Path) -> Result<Expander, LoadProcMacroDylibError> {
+ // Some libraries for dynamic loading require canonicalized path even when it is
+ // already absolute
+ let lib = lib.canonicalize()?;
+
+ let lib = ensure_file_with_lock_free_access(&lib)?;
+
+ let library = ProcMacroLibraryLibloading::open(lib.as_ref())?;
+
+ Ok(Expander { inner: library })
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, String> {
+ let result = self.inner.abi.expand(macro_name, macro_body, attributes);
+ result.map_err(|e| e.as_str().unwrap_or_else(|| "<unknown error>".to_string()))
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ self.inner.abi.list_macros()
+ }
+}
+
+/// Copy the dylib to temp directory to prevent locking in Windows
+#[cfg(windows)]
+fn ensure_file_with_lock_free_access(path: &Path) -> io::Result<PathBuf> {
+ use std::collections::hash_map::RandomState;
+ use std::ffi::OsString;
+ use std::hash::{BuildHasher, Hasher};
+
+ if std::env::var("RA_DONT_COPY_PROC_MACRO_DLL").is_ok() {
+ return Ok(path.to_path_buf());
+ }
+
+ let mut to = std::env::temp_dir();
+
+ let file_name = path.file_name().ok_or_else(|| {
+ io::Error::new(
+ io::ErrorKind::InvalidInput,
+ format!("File path is invalid: {}", path.display()),
+ )
+ })?;
+
+ // Generate a unique number by abusing `HashMap`'s hasher.
+ // Maybe this will also "inspire" a libs team member to finally put `rand` in libstd.
+ let t = RandomState::new().build_hasher().finish();
+
+ let mut unique_name = OsString::from(t.to_string());
+ unique_name.push(file_name);
+
+ to.push(unique_name);
+ std::fs::copy(path, &to).unwrap();
+ Ok(to)
+}
+
+#[cfg(unix)]
+fn ensure_file_with_lock_free_access(path: &Path) -> io::Result<PathBuf> {
+ Ok(path.to_path_buf())
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
new file mode 100644
index 000000000..4c205b9ca
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
@@ -0,0 +1,160 @@
+//! RA Proc Macro Server
+//!
+//! This library is able to call compiled Rust custom derive dynamic libraries on arbitrary code.
+//! The general idea here is based on <https://github.com/fedochet/rust-proc-macro-expander>.
+//!
+//! But we adapt it to better fit RA needs:
+//!
+//! * We use `tt` for proc-macro `TokenStream` server, it is easier to manipulate and interact with
+//! RA than `proc-macro2` token stream.
+//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable`
+//! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)…
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![cfg_attr(
+ feature = "sysroot-abi",
+ feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)
+)]
+#![allow(unreachable_pub)]
+
+mod dylib;
+mod abis;
+
+use std::{
+ collections::{hash_map::Entry, HashMap},
+ env,
+ ffi::OsString,
+ fs,
+ path::{Path, PathBuf},
+ time::SystemTime,
+};
+
+use proc_macro_api::{
+ msg::{ExpandMacro, FlatTree, PanicMessage},
+ ProcMacroKind,
+};
+
+#[derive(Default)]
+pub(crate) struct ProcMacroSrv {
+ expanders: HashMap<(PathBuf, SystemTime), dylib::Expander>,
+}
+
+const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024;
+
+impl ProcMacroSrv {
+ pub fn expand(&mut self, task: ExpandMacro) -> Result<FlatTree, PanicMessage> {
+ let expander = self.expander(task.lib.as_ref()).map_err(|err| {
+ debug_assert!(false, "should list macros before asking to expand");
+ PanicMessage(format!("failed to load macro: {}", err))
+ })?;
+
+ let prev_env = EnvSnapshot::new();
+ for (k, v) in &task.env {
+ env::set_var(k, v);
+ }
+ let prev_working_dir = match task.current_dir {
+ Some(dir) => {
+ let prev_working_dir = std::env::current_dir().ok();
+ if let Err(err) = std::env::set_current_dir(&dir) {
+ eprintln!("Failed to set the current working dir to {}. Error: {:?}", dir, err)
+ }
+ prev_working_dir
+ }
+ None => None,
+ };
+
+ let macro_body = task.macro_body.to_subtree();
+ let attributes = task.attributes.map(|it| it.to_subtree());
+ // FIXME: replace this with std's scoped threads once they stabilize
+ // (then remove dependency on crossbeam)
+ let result = crossbeam::scope(|s| {
+ let res = match s
+ .builder()
+ .stack_size(EXPANDER_STACK_SIZE)
+ .name(task.macro_name.clone())
+ .spawn(|_| {
+ expander
+ .expand(&task.macro_name, &macro_body, attributes.as_ref())
+ .map(|it| FlatTree::new(&it))
+ }) {
+ Ok(handle) => handle.join(),
+ Err(e) => std::panic::resume_unwind(Box::new(e)),
+ };
+
+ match res {
+ Ok(res) => res,
+ Err(e) => std::panic::resume_unwind(e),
+ }
+ });
+ let result = match result {
+ Ok(result) => result,
+ Err(e) => std::panic::resume_unwind(e),
+ };
+
+ prev_env.rollback();
+
+ if let Some(dir) = prev_working_dir {
+ if let Err(err) = std::env::set_current_dir(&dir) {
+ eprintln!(
+ "Failed to set the current working dir to {}. Error: {:?}",
+ dir.display(),
+ err
+ )
+ }
+ }
+
+ result.map_err(PanicMessage)
+ }
+
+ pub(crate) fn list_macros(
+ &mut self,
+ dylib_path: &Path,
+ ) -> Result<Vec<(String, ProcMacroKind)>, String> {
+ let expander = self.expander(dylib_path)?;
+ Ok(expander.list_macros())
+ }
+
+ fn expander(&mut self, path: &Path) -> Result<&dylib::Expander, String> {
+ let time = fs::metadata(path).and_then(|it| it.modified()).map_err(|err| {
+ format!("Failed to get file metadata for {}: {:?}", path.display(), err)
+ })?;
+
+ Ok(match self.expanders.entry((path.to_path_buf(), time)) {
+ Entry::Vacant(v) => v.insert(dylib::Expander::new(path).map_err(|err| {
+ format!("Cannot create expander for {}: {:?}", path.display(), err)
+ })?),
+ Entry::Occupied(e) => e.into_mut(),
+ })
+ }
+}
+
+struct EnvSnapshot {
+ vars: HashMap<OsString, OsString>,
+}
+
+impl EnvSnapshot {
+ fn new() -> EnvSnapshot {
+ EnvSnapshot { vars: env::vars_os().collect() }
+ }
+
+ fn rollback(self) {
+ let mut old_vars = self.vars;
+ for (name, value) in env::vars_os() {
+ let old_value = old_vars.remove(&name);
+ if old_value != Some(value) {
+ match old_value {
+ None => env::remove_var(name),
+ Some(old_value) => env::set_var(name, old_value),
+ }
+ }
+ }
+ for (name, old_value) in old_vars {
+ env::set_var(name, old_value)
+ }
+ }
+}
+
+pub mod cli;
+
+#[cfg(test)]
+mod tests;
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
new file mode 100644
index 000000000..07222907f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
@@ -0,0 +1,166 @@
+//! proc-macro tests
+
+#[macro_use]
+mod utils;
+use expect_test::expect;
+use paths::AbsPathBuf;
+use utils::*;
+
+#[test]
+fn test_derive_empty() {
+ assert_expand("DeriveEmpty", r#"struct S;"#, expect![[r#"SUBTREE $"#]]);
+}
+
+#[test]
+fn test_derive_error() {
+ assert_expand(
+ "DeriveError",
+ r#"struct S;"#,
+ expect![[r##"
+ SUBTREE $
+ IDENT compile_error 4294967295
+ PUNCH ! [alone] 4294967295
+ SUBTREE () 4294967295
+ LITERAL "#[derive(DeriveError)] struct S ;" 4294967295
+ PUNCH ; [alone] 4294967295"##]],
+ );
+}
+
+#[test]
+fn test_fn_like_macro_noop() {
+ assert_expand(
+ "fn_like_noop",
+ r#"ident, 0, 1, []"#,
+ expect![[r#"
+ SUBTREE $
+ IDENT ident 4294967295
+ PUNCH , [alone] 4294967295
+ LITERAL 0 4294967295
+ PUNCH , [alone] 4294967295
+ LITERAL 1 4294967295
+ PUNCH , [alone] 4294967295
+ SUBTREE [] 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_macro_clone_ident_subtree() {
+ assert_expand(
+ "fn_like_clone_tokens",
+ r#"ident, []"#,
+ expect![[r#"
+ SUBTREE $
+ IDENT ident 4294967295
+ PUNCH , [alone] 4294967295
+ SUBTREE [] 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_macro_clone_raw_ident() {
+ assert_expand(
+ "fn_like_clone_tokens",
+ "r#async",
+ expect![[r#"
+ SUBTREE $
+ IDENT async 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_mk_literals() {
+ assert_expand(
+ "fn_like_mk_literals",
+ r#""#,
+ expect![[r#"
+ SUBTREE $
+ LITERAL b"byte_string" 4294967295
+ LITERAL 'c' 4294967295
+ LITERAL "string" 4294967295
+ LITERAL 3.14f64 4294967295
+ LITERAL 3.14 4294967295
+ LITERAL 123i64 4294967295
+ LITERAL 123 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_mk_idents() {
+ // FIXME: this test is wrong: raw should be 'r#raw' but ABIs 1.64 and below
+ // simply ignore `is_raw` when implementing the `Ident` interface.
+ assert_expand(
+ "fn_like_mk_idents",
+ r#""#,
+ expect![[r#"
+ SUBTREE $
+ IDENT standard 4294967295
+ IDENT raw 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_macro_clone_literals() {
+ assert_expand(
+ "fn_like_clone_tokens",
+ r#"1u16, 2_u32, -4i64, 3.14f32, "hello bridge""#,
+ expect![[r#"
+ SUBTREE $
+ LITERAL 1u16 4294967295
+ PUNCH , [alone] 4294967295
+ LITERAL 2_u32 4294967295
+ PUNCH , [alone] 4294967295
+ PUNCH - [alone] 4294967295
+ LITERAL 4i64 4294967295
+ PUNCH , [alone] 4294967295
+ LITERAL 3.14f32 4294967295
+ PUNCH , [alone] 4294967295
+ LITERAL "hello bridge" 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_attr_macro() {
+ // Corresponds to
+ // #[proc_macro_test::attr_error(some arguments)]
+ // mod m {}
+ assert_expand_attr(
+ "attr_error",
+ r#"mod m {}"#,
+ r#"some arguments"#,
+ expect![[r##"
+ SUBTREE $
+ IDENT compile_error 4294967295
+ PUNCH ! [alone] 4294967295
+ SUBTREE () 4294967295
+ LITERAL "#[attr_error(some arguments)] mod m {}" 4294967295
+ PUNCH ; [alone] 4294967295"##]],
+ );
+}
+
+/// Tests that we find and classify all proc macros correctly.
+#[test]
+fn list_test_macros() {
+ let res = list().join("\n");
+
+ expect![[r#"
+ fn_like_noop [FuncLike]
+ fn_like_panic [FuncLike]
+ fn_like_error [FuncLike]
+ fn_like_clone_tokens [FuncLike]
+ fn_like_mk_literals [FuncLike]
+ fn_like_mk_idents [FuncLike]
+ attr_noop [Attr]
+ attr_panic [Attr]
+ attr_error [Attr]
+ DeriveEmpty [CustomDerive]
+ DerivePanic [CustomDerive]
+ DeriveError [CustomDerive]"#]]
+ .assert_eq(&res);
+}
+
+#[test]
+fn test_version_check() {
+ let path = AbsPathBuf::assert(fixtures::proc_macro_test_dylib_path());
+ let info = proc_macro_api::read_dylib_info(&path).unwrap();
+ assert!(info.version.1 >= 50);
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs
new file mode 100644
index 000000000..f881fe868
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs
@@ -0,0 +1,47 @@
+//! utils used in proc-macro tests
+
+use crate::dylib;
+use crate::ProcMacroSrv;
+use expect_test::Expect;
+use std::str::FromStr;
+
+pub mod fixtures {
+ pub fn proc_macro_test_dylib_path() -> std::path::PathBuf {
+ proc_macro_test::PROC_MACRO_TEST_LOCATION.into()
+ }
+}
+
+fn parse_string(code: &str) -> Option<crate::abis::TestTokenStream> {
+ // This is a bit strange. We need to parse a string into a token stream into
+ // order to create a tt::SubTree from it in fixtures. `into_subtree` is
+ // implemented by all the ABIs we have so we arbitrarily choose one ABI to
+ // write a `parse_string` function for and use that. The tests don't really
+ // care which ABI we're using as the `into_subtree` function isn't part of
+ // the ABI and shouldn't change between ABI versions.
+ crate::abis::TestTokenStream::from_str(code).ok()
+}
+
+pub fn assert_expand(macro_name: &str, ra_fixture: &str, expect: Expect) {
+ assert_expand_impl(macro_name, ra_fixture, None, expect);
+}
+
+pub fn assert_expand_attr(macro_name: &str, ra_fixture: &str, attr_args: &str, expect: Expect) {
+ assert_expand_impl(macro_name, ra_fixture, Some(attr_args), expect);
+}
+
+fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect: Expect) {
+ let path = fixtures::proc_macro_test_dylib_path();
+ let expander = dylib::Expander::new(&path).unwrap();
+ let fixture = parse_string(input).unwrap();
+ let attr = attr.map(|attr| parse_string(attr).unwrap().into_subtree());
+
+ let res = expander.expand(macro_name, &fixture.into_subtree(), attr.as_ref()).unwrap();
+ expect.assert_eq(&format!("{:?}", res));
+}
+
+pub(crate) fn list() -> Vec<String> {
+ let dylib_path = fixtures::proc_macro_test_dylib_path();
+ let mut srv = ProcMacroSrv::default();
+ let res = srv.list_macros(&dylib_path).unwrap();
+ res.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect()
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml
new file mode 100644
index 000000000..684477191
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "proc-macro-test"
+version = "0.0.0"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+publish = false
+
+[lib]
+doctest = false
+
+[build-dependencies]
+proc-macro-test-impl = { path = "imp", version = "0.0.0" }
+toolchain = { path = "../toolchain", version = "0.0.0" }
+cargo_metadata = "0.15.0"
diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/build.rs b/src/tools/rust-analyzer/crates/proc-macro-test/build.rs
new file mode 100644
index 000000000..a80c96261
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-test/build.rs
@@ -0,0 +1,106 @@
+//! This will build the proc macro in `imp`, and copy the resulting dylib artifact into the
+//! `OUT_DIR`.
+//!
+//! `proc-macro-test` itself contains only a path to that artifact.
+//!
+//! The `PROC_MACRO_TEST_TOOLCHAIN` environment variable can be exported to use
+//! a specific rustup toolchain: this allows testing against older ABIs (e.g.
+//! 1.58) and future ABIs (stage1, nightly)
+
+use std::{
+ env, fs,
+ path::{Path, PathBuf},
+ process::Command,
+};
+
+use cargo_metadata::Message;
+
+fn main() {
+ println!("cargo:rerun-if-changed=imp");
+ println!("cargo:rerun-if-env-changed=PROC_MACRO_TEST_TOOLCHAIN");
+
+ let out_dir = env::var_os("OUT_DIR").unwrap();
+ let out_dir = Path::new(&out_dir);
+
+ let name = "proc-macro-test-impl";
+ let version = "0.0.0";
+
+ let imp_dir = std::env::current_dir().unwrap().join("imp");
+
+ let staging_dir = out_dir.join("proc-macro-test-imp-staging");
+ // this'll error out if the staging dir didn't previously exist. using
+ // `std::fs::exists` would suffer from TOCTOU so just do our best to
+ // wipe it and ignore errors.
+ let _ = std::fs::remove_dir_all(&staging_dir);
+
+ println!("Creating {}", staging_dir.display());
+ std::fs::create_dir_all(&staging_dir).unwrap();
+
+ let src_dir = staging_dir.join("src");
+ println!("Creating {}", src_dir.display());
+ std::fs::create_dir_all(src_dir).unwrap();
+
+ for item_els in [&["Cargo.toml"][..], &["src", "lib.rs"]] {
+ let mut src = imp_dir.clone();
+ let mut dst = staging_dir.clone();
+ for el in item_els {
+ src.push(el);
+ dst.push(el);
+ }
+ println!("Copying {} to {}", src.display(), dst.display());
+ std::fs::copy(src, dst).unwrap();
+ }
+
+ let target_dir = out_dir.join("target");
+
+ let mut cmd = if let Ok(toolchain) = std::env::var("PROC_MACRO_TEST_TOOLCHAIN") {
+ // leverage rustup to find user-specific toolchain
+ let mut cmd = Command::new("cargo");
+ cmd.arg(format!("+{toolchain}"));
+ cmd
+ } else {
+ Command::new(toolchain::cargo())
+ };
+
+ cmd.current_dir(&staging_dir)
+ .args(&["build", "-p", "proc-macro-test-impl", "--message-format", "json"])
+ // Explicit override the target directory to avoid using the same one which the parent
+ // cargo is using, or we'll deadlock.
+ // This can happen when `CARGO_TARGET_DIR` is set or global config forces all cargo
+ // instance to use the same target directory.
+ .arg("--target-dir")
+ .arg(&target_dir);
+
+ println!("Running {:?}", cmd);
+
+ let output = cmd.output().unwrap();
+ if !output.status.success() {
+ println!("proc-macro-test-impl failed to build");
+ println!("============ stdout ============");
+ println!("{}", String::from_utf8_lossy(&output.stdout));
+ println!("============ stderr ============");
+ println!("{}", String::from_utf8_lossy(&output.stderr));
+ panic!("proc-macro-test-impl failed to build");
+ }
+
+ let mut artifact_path = None;
+ for message in Message::parse_stream(output.stdout.as_slice()) {
+ match message.unwrap() {
+ Message::CompilerArtifact(artifact) => {
+ if artifact.target.kind.contains(&"proc-macro".to_string()) {
+ let repr = format!("{} {}", name, version);
+ if artifact.package_id.repr.starts_with(&repr) {
+ artifact_path = Some(PathBuf::from(&artifact.filenames[0]));
+ }
+ }
+ }
+ _ => (), // Unknown message
+ }
+ }
+
+ // This file is under `target_dir` and is already under `OUT_DIR`.
+ let artifact_path = artifact_path.expect("no dylib for proc-macro-test-impl found");
+
+ let info_path = out_dir.join("proc_macro_test_location.txt");
+ fs::write(info_path, artifact_path.to_str().unwrap()).unwrap();
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/imp/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-test/imp/Cargo.toml
new file mode 100644
index 000000000..2d1fc3c5c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-test/imp/Cargo.toml
@@ -0,0 +1,17 @@
+[package]
+name = "proc-macro-test-impl"
+version = "0.0.0"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+publish = false
+
+[lib]
+doctest = false
+proc-macro = true
+
+[workspace]
+
+[dependencies]
+# this crate should not have any dependencies, since it uses its own workspace,
+# and its own `Cargo.lock`
diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs
new file mode 100644
index 000000000..feeacdb64
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs
@@ -0,0 +1,114 @@
+//! Exports a few trivial procedural macros for testing.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
+
+#[proc_macro]
+pub fn fn_like_noop(args: TokenStream) -> TokenStream {
+ args
+}
+
+#[proc_macro]
+pub fn fn_like_panic(args: TokenStream) -> TokenStream {
+ panic!("fn_like_panic!({})", args);
+}
+
+#[proc_macro]
+pub fn fn_like_error(args: TokenStream) -> TokenStream {
+ format!("compile_error!(\"fn_like_error!({})\");", args).parse().unwrap()
+}
+
+#[proc_macro]
+pub fn fn_like_clone_tokens(args: TokenStream) -> TokenStream {
+ clone_stream(args)
+}
+
+#[proc_macro]
+pub fn fn_like_mk_literals(_args: TokenStream) -> TokenStream {
+ let trees: Vec<TokenTree> = vec![
+ TokenTree::from(Literal::byte_string(b"byte_string")),
+ TokenTree::from(Literal::character('c')),
+ TokenTree::from(Literal::string("string")),
+ // as of 2022-07-21, there's no method on `Literal` to build a raw
+ // string or a raw byte string
+ TokenTree::from(Literal::f64_suffixed(3.14)),
+ TokenTree::from(Literal::f64_unsuffixed(3.14)),
+ TokenTree::from(Literal::i64_suffixed(123)),
+ TokenTree::from(Literal::i64_unsuffixed(123)),
+ ];
+ TokenStream::from_iter(trees)
+}
+
+#[proc_macro]
+pub fn fn_like_mk_idents(_args: TokenStream) -> TokenStream {
+ let trees: Vec<TokenTree> = vec![
+ TokenTree::from(Ident::new("standard", Span::call_site())),
+ TokenTree::from(Ident::new_raw("raw", Span::call_site())),
+ ];
+ TokenStream::from_iter(trees)
+}
+
+#[proc_macro_attribute]
+pub fn attr_noop(_args: TokenStream, item: TokenStream) -> TokenStream {
+ item
+}
+
+#[proc_macro_attribute]
+pub fn attr_panic(args: TokenStream, item: TokenStream) -> TokenStream {
+ panic!("#[attr_panic {}] {}", args, item);
+}
+
+#[proc_macro_attribute]
+pub fn attr_error(args: TokenStream, item: TokenStream) -> TokenStream {
+ format!("compile_error!(\"#[attr_error({})] {}\");", args, item).parse().unwrap()
+}
+
+#[proc_macro_derive(DeriveEmpty)]
+pub fn derive_empty(_item: TokenStream) -> TokenStream {
+ TokenStream::new()
+}
+
+#[proc_macro_derive(DerivePanic)]
+pub fn derive_panic(item: TokenStream) -> TokenStream {
+ panic!("#[derive(DerivePanic)] {}", item);
+}
+
+#[proc_macro_derive(DeriveError)]
+pub fn derive_error(item: TokenStream) -> TokenStream {
+ format!("compile_error!(\"#[derive(DeriveError)] {}\");", item).parse().unwrap()
+}
+
+fn clone_stream(ts: TokenStream) -> TokenStream {
+ ts.into_iter().map(clone_tree).collect()
+}
+
+fn clone_tree(t: TokenTree) -> TokenTree {
+ match t {
+ TokenTree::Group(orig) => {
+ let mut new = Group::new(orig.delimiter(), clone_stream(orig.stream()));
+ new.set_span(orig.span());
+ TokenTree::Group(new)
+ }
+ TokenTree::Ident(orig) => {
+ let s = orig.to_string();
+ if let Some(rest) = s.strip_prefix("r#") {
+ TokenTree::Ident(Ident::new_raw(rest, orig.span()))
+ } else {
+ TokenTree::Ident(Ident::new(&s, orig.span()))
+ }
+ }
+ TokenTree::Punct(orig) => {
+ let mut new = Punct::new(orig.as_char(), orig.spacing());
+ new.set_span(orig.span());
+ TokenTree::Punct(new)
+ }
+ TokenTree::Literal(orig) => {
+ // this goes through `literal_from_str` as of 2022-07-18, cf.
+ // https://github.com/rust-lang/rust/commit/b34c79f8f1ef4d0149ad4bf77e1759c07a9a01a8
+ let mut new: Literal = orig.to_string().parse().unwrap();
+ new.set_span(orig.span());
+ TokenTree::Literal(new)
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-test/src/lib.rs
new file mode 100644
index 000000000..6d57bc81e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-test/src/lib.rs
@@ -0,0 +1,6 @@
+//! Exports a few trivial procedural macros for testing.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+pub static PROC_MACRO_TEST_LOCATION: &str =
+ include_str!(concat!(env!("OUT_DIR"), "/proc_macro_test_location.txt"));
diff --git a/src/tools/rust-analyzer/crates/profile/Cargo.toml b/src/tools/rust-analyzer/crates/profile/Cargo.toml
new file mode 100644
index 000000000..0b78a45a2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/profile/Cargo.toml
@@ -0,0 +1,31 @@
+[package]
+name = "profile"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+once_cell = "1.12.0"
+cfg-if = "1.0.0"
+libc = "0.2.126"
+la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+countme = { version = "3.0.1", features = ["enable"] }
+jemalloc-ctl = { version = "0.5.0", package = "tikv-jemalloc-ctl", optional = true }
+
+[target.'cfg(target_os = "linux")'.dependencies]
+perf-event = "0.4.7"
+
+[target.'cfg(windows)'.dependencies]
+winapi = { version = "0.3.9", features = ["processthreadsapi", "psapi"] }
+
+[features]
+cpu_profiler = []
+jemalloc = ["jemalloc-ctl"]
+
+# Uncomment to enable for the whole crate graph
+# default = [ "cpu_profiler" ]
diff --git a/src/tools/rust-analyzer/crates/profile/src/google_cpu_profiler.rs b/src/tools/rust-analyzer/crates/profile/src/google_cpu_profiler.rs
new file mode 100644
index 000000000..cae6caeaa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/profile/src/google_cpu_profiler.rs
@@ -0,0 +1,44 @@
+//! https://github.com/gperftools/gperftools
+
+use std::{
+ ffi::CString,
+ os::raw::c_char,
+ path::Path,
+ sync::atomic::{AtomicUsize, Ordering},
+};
+
+#[link(name = "profiler")]
+#[allow(non_snake_case)]
+extern "C" {
+ fn ProfilerStart(fname: *const c_char) -> i32;
+ fn ProfilerStop();
+}
+
+const OFF: usize = 0;
+const ON: usize = 1;
+const PENDING: usize = 2;
+
+fn transition(current: usize, new: usize) -> bool {
+ static STATE: AtomicUsize = AtomicUsize::new(OFF);
+
+ STATE.compare_exchange(current, new, Ordering::SeqCst, Ordering::SeqCst).is_ok()
+}
+
+pub(crate) fn start(path: &Path) {
+ if !transition(OFF, PENDING) {
+ panic!("profiler already started");
+ }
+ let path = CString::new(path.display().to_string()).unwrap();
+ if unsafe { ProfilerStart(path.as_ptr()) } == 0 {
+ panic!("profiler failed to start")
+ }
+ assert!(transition(PENDING, ON));
+}
+
+pub(crate) fn stop() {
+ if !transition(ON, PENDING) {
+ panic!("profiler is not started")
+ }
+ unsafe { ProfilerStop() };
+ assert!(transition(PENDING, OFF));
+}
diff --git a/src/tools/rust-analyzer/crates/profile/src/hprof.rs b/src/tools/rust-analyzer/crates/profile/src/hprof.rs
new file mode 100644
index 000000000..b562c193e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/profile/src/hprof.rs
@@ -0,0 +1,326 @@
+//! Simple hierarchical profiler
+use std::{
+ cell::RefCell,
+ collections::{BTreeMap, HashSet},
+ env, fmt,
+ io::{stderr, Write},
+ sync::{
+ atomic::{AtomicBool, Ordering},
+ RwLock,
+ },
+ time::{Duration, Instant},
+};
+
+use once_cell::sync::Lazy;
+
+use crate::tree::{Idx, Tree};
+
+/// Filtering syntax
+/// env RA_PROFILE=* // dump everything
+/// env RA_PROFILE=foo|bar|baz // enabled only selected entries
+/// env RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10 ms
+pub fn init() {
+ countme::enable(env::var("RA_COUNT").is_ok());
+ let spec = env::var("RA_PROFILE").unwrap_or_default();
+ init_from(&spec);
+}
+
+pub fn init_from(spec: &str) {
+ let filter = if spec.is_empty() { Filter::disabled() } else { Filter::from_spec(spec) };
+ filter.install();
+}
+
+type Label = &'static str;
+
+/// This function starts a profiling scope in the current execution stack with a given description.
+/// It returns a `Profile` struct that measures elapsed time between this method invocation and `Profile` struct drop.
+/// It supports nested profiling scopes in case when this function is invoked multiple times at the execution stack.
+/// In this case the profiling information will be nested at the output.
+/// Profiling information is being printed in the stderr.
+///
+/// # Example
+/// ```
+/// profile::init_from("profile1|profile2@2");
+/// profiling_function1();
+///
+/// fn profiling_function1() {
+/// let _p = profile::span("profile1");
+/// profiling_function2();
+/// }
+///
+/// fn profiling_function2() {
+/// let _p = profile::span("profile2");
+/// }
+/// ```
+/// This will print in the stderr the following:
+/// ```text
+/// 0ms - profile
+/// 0ms - profile2
+/// ```
+#[inline]
+pub fn span(label: Label) -> ProfileSpan {
+ debug_assert!(!label.is_empty());
+
+ let enabled = PROFILING_ENABLED.load(Ordering::Relaxed);
+ if enabled && with_profile_stack(|stack| stack.push(label)) {
+ ProfileSpan(Some(ProfilerImpl { label, detail: None }))
+ } else {
+ ProfileSpan(None)
+ }
+}
+
+#[inline]
+pub fn heartbeat_span() -> HeartbeatSpan {
+ let enabled = PROFILING_ENABLED.load(Ordering::Relaxed);
+ HeartbeatSpan::new(enabled)
+}
+
+#[inline]
+pub fn heartbeat() {
+ let enabled = PROFILING_ENABLED.load(Ordering::Relaxed);
+ if enabled {
+ with_profile_stack(|it| it.heartbeat(1));
+ }
+}
+
+pub struct ProfileSpan(Option<ProfilerImpl>);
+
+struct ProfilerImpl {
+ label: Label,
+ detail: Option<String>,
+}
+
+impl ProfileSpan {
+ pub fn detail(mut self, detail: impl FnOnce() -> String) -> ProfileSpan {
+ if let Some(profiler) = &mut self.0 {
+ profiler.detail = Some(detail());
+ }
+ self
+ }
+}
+
+impl Drop for ProfilerImpl {
+ #[inline]
+ fn drop(&mut self) {
+ with_profile_stack(|it| it.pop(self.label, self.detail.take()));
+ }
+}
+
+pub struct HeartbeatSpan {
+ enabled: bool,
+}
+
+impl HeartbeatSpan {
+ #[inline]
+ pub fn new(enabled: bool) -> Self {
+ if enabled {
+ with_profile_stack(|it| it.heartbeats(true));
+ }
+ Self { enabled }
+ }
+}
+
+impl Drop for HeartbeatSpan {
+ fn drop(&mut self) {
+ if self.enabled {
+ with_profile_stack(|it| it.heartbeats(false));
+ }
+ }
+}
+
+static PROFILING_ENABLED: AtomicBool = AtomicBool::new(false);
+static FILTER: Lazy<RwLock<Filter>> = Lazy::new(Default::default);
+
+fn with_profile_stack<T>(f: impl FnOnce(&mut ProfileStack) -> T) -> T {
+ thread_local!(static STACK: RefCell<ProfileStack> = RefCell::new(ProfileStack::new()));
+ STACK.with(|it| f(&mut *it.borrow_mut()))
+}
+
+#[derive(Default, Clone, Debug)]
+struct Filter {
+ depth: usize,
+ allowed: HashSet<String>,
+ longer_than: Duration,
+ heartbeat_longer_than: Duration,
+ version: usize,
+}
+
+impl Filter {
+ fn disabled() -> Filter {
+ Filter::default()
+ }
+
+ fn from_spec(mut spec: &str) -> Filter {
+ let longer_than = if let Some(idx) = spec.rfind('>') {
+ let longer_than = spec[idx + 1..].parse().expect("invalid profile longer_than");
+ spec = &spec[..idx];
+ Duration::from_millis(longer_than)
+ } else {
+ Duration::new(0, 0)
+ };
+ let heartbeat_longer_than = longer_than;
+
+ let depth = if let Some(idx) = spec.rfind('@') {
+ let depth: usize = spec[idx + 1..].parse().expect("invalid profile depth");
+ spec = &spec[..idx];
+ depth
+ } else {
+ 999
+ };
+ let allowed =
+ if spec == "*" { HashSet::new() } else { spec.split('|').map(String::from).collect() };
+ Filter { depth, allowed, longer_than, heartbeat_longer_than, version: 0 }
+ }
+
+ fn install(mut self) {
+ PROFILING_ENABLED.store(self.depth > 0, Ordering::SeqCst);
+ let mut old = FILTER.write().unwrap();
+ self.version = old.version + 1;
+ *old = self;
+ }
+}
+
+struct ProfileStack {
+ frames: Vec<Frame>,
+ filter: Filter,
+ messages: Tree<Message>,
+ heartbeats: bool,
+}
+
+struct Frame {
+ t: Instant,
+ heartbeats: u32,
+}
+
+#[derive(Default)]
+struct Message {
+ duration: Duration,
+ label: Label,
+ detail: Option<String>,
+}
+
+impl ProfileStack {
+ fn new() -> ProfileStack {
+ ProfileStack {
+ frames: Vec::new(),
+ messages: Tree::default(),
+ filter: Default::default(),
+ heartbeats: false,
+ }
+ }
+
+ fn push(&mut self, label: Label) -> bool {
+ if self.frames.is_empty() {
+ if let Ok(f) = FILTER.try_read() {
+ if f.version > self.filter.version {
+ self.filter = f.clone();
+ }
+ };
+ }
+ if self.frames.len() > self.filter.depth {
+ return false;
+ }
+ let allowed = &self.filter.allowed;
+ if self.frames.is_empty() && !allowed.is_empty() && !allowed.contains(label) {
+ return false;
+ }
+
+ self.frames.push(Frame { t: Instant::now(), heartbeats: 0 });
+ self.messages.start();
+ true
+ }
+
+ fn pop(&mut self, label: Label, detail: Option<String>) {
+ let frame = self.frames.pop().unwrap();
+ let duration = frame.t.elapsed();
+
+ if self.heartbeats {
+ self.heartbeat(frame.heartbeats);
+ let avg_span = duration / (frame.heartbeats + 1);
+ if avg_span > self.filter.heartbeat_longer_than {
+ eprintln!("Too few heartbeats {} ({}/{:?})?", label, frame.heartbeats, duration);
+ }
+ }
+
+ self.messages.finish(Message { duration, label, detail });
+ if self.frames.is_empty() {
+ let longer_than = self.filter.longer_than;
+ // Convert to millis for comparison to avoid problems with rounding
+ // (otherwise we could print `0ms` despite user's `>0` filter when
+ // `duration` is just a few nanos).
+ if duration.as_millis() > longer_than.as_millis() {
+ if let Some(root) = self.messages.root() {
+ print(&self.messages, root, 0, longer_than, &mut stderr().lock());
+ }
+ }
+ self.messages.clear();
+ }
+ }
+
+ fn heartbeats(&mut self, yes: bool) {
+ self.heartbeats = yes;
+ }
+ fn heartbeat(&mut self, n: u32) {
+ if let Some(frame) = self.frames.last_mut() {
+ frame.heartbeats += n;
+ }
+ }
+}
+
+fn print(
+ tree: &Tree<Message>,
+ curr: Idx<Message>,
+ level: u32,
+ longer_than: Duration,
+ out: &mut impl Write,
+) {
+ let current_indent = " ".repeat(level as usize);
+ let detail = tree[curr].detail.as_ref().map(|it| format!(" @ {}", it)).unwrap_or_default();
+ writeln!(
+ out,
+ "{}{} - {}{}",
+ current_indent,
+ ms(tree[curr].duration),
+ tree[curr].label,
+ detail,
+ )
+ .expect("printing profiling info");
+
+ let mut accounted_for = Duration::default();
+ let mut short_children = BTreeMap::new(); // Use `BTreeMap` to get deterministic output.
+ for child in tree.children(curr) {
+ accounted_for += tree[child].duration;
+
+ if tree[child].duration.as_millis() > longer_than.as_millis() {
+ print(tree, child, level + 1, longer_than, out);
+ } else {
+ let (total_duration, cnt) =
+ short_children.entry(tree[child].label).or_insert((Duration::default(), 0));
+ *total_duration += tree[child].duration;
+ *cnt += 1;
+ }
+ }
+
+ for (child_msg, (duration, count)) in &short_children {
+ writeln!(out, " {}{} - {} ({} calls)", current_indent, ms(*duration), child_msg, count)
+ .expect("printing profiling info");
+ }
+
+ let unaccounted = tree[curr].duration - accounted_for;
+ if tree.children(curr).next().is_some() && unaccounted > longer_than {
+ writeln!(out, " {}{} - ???", current_indent, ms(unaccounted))
+ .expect("printing profiling info");
+ }
+}
+
+#[allow(non_camel_case_types)]
+struct ms(Duration);
+
+impl fmt::Display for ms {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.0.as_millis() {
+ 0 => f.write_str(" 0 "),
+ n => write!(f, "{:5}ms", n),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/profile/src/lib.rs b/src/tools/rust-analyzer/crates/profile/src/lib.rs
new file mode 100644
index 000000000..00f7952e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/profile/src/lib.rs
@@ -0,0 +1,130 @@
+//! A collection of tools for profiling rust-analyzer.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod stop_watch;
+mod memory_usage;
+#[cfg(feature = "cpu_profiler")]
+mod google_cpu_profiler;
+mod hprof;
+mod tree;
+
+use std::cell::RefCell;
+
+pub use crate::{
+ hprof::{heartbeat, heartbeat_span, init, init_from, span},
+ memory_usage::{Bytes, MemoryUsage},
+ stop_watch::{StopWatch, StopWatchSpan},
+};
+
+pub use countme;
+/// Include `_c: Count<Self>` field in important structs to count them.
+///
+/// To view the counts, run with `RA_COUNT=1`. The overhead of disabled count is
+/// almost zero.
+pub use countme::Count;
+
+thread_local!(static IN_SCOPE: RefCell<bool> = RefCell::new(false));
+
+/// Allows to check if the current code is withing some dynamic scope, can be
+/// useful during debugging to figure out why a function is called.
+pub struct Scope {
+ prev: bool,
+}
+
+impl Scope {
+ #[must_use]
+ pub fn enter() -> Scope {
+ let prev = IN_SCOPE.with(|slot| std::mem::replace(&mut *slot.borrow_mut(), true));
+ Scope { prev }
+ }
+ pub fn is_active() -> bool {
+ IN_SCOPE.with(|slot| *slot.borrow())
+ }
+}
+
+impl Drop for Scope {
+ fn drop(&mut self) {
+ IN_SCOPE.with(|slot| *slot.borrow_mut() = self.prev);
+ }
+}
+
+/// A wrapper around google_cpu_profiler.
+///
+/// Usage:
+/// 1. Install gpref_tools (<https://github.com/gperftools/gperftools>), probably packaged with your Linux distro.
+/// 2. Build with `cpu_profiler` feature.
+/// 3. Run the code, the *raw* output would be in the `./out.profile` file.
+/// 4. Install pprof for visualization (<https://github.com/google/pprof>).
+/// 5. Bump sampling frequency to once per ms: `export CPUPROFILE_FREQUENCY=1000`
+/// 6. Use something like `pprof -svg target/release/rust-analyzer ./out.profile` to see the results.
+///
+/// For example, here's how I run profiling on NixOS:
+///
+/// ```bash
+/// $ bat -p shell.nix
+/// with import <nixpkgs> {};
+/// mkShell {
+/// buildInputs = [ gperftools ];
+/// shellHook = ''
+/// export LD_LIBRARY_PATH="${gperftools}/lib:"
+/// '';
+/// }
+/// $ set -x CPUPROFILE_FREQUENCY 1000
+/// $ nix-shell --run 'cargo test --release --package rust-analyzer --lib -- benchmarks::benchmark_integrated_highlighting --exact --nocapture'
+/// $ pprof -svg target/release/deps/rust_analyzer-8739592dc93d63cb crates/rust-analyzer/out.profile > profile.svg
+/// ```
+///
+/// See this diff for how to profile completions:
+///
+/// <https://github.com/rust-lang/rust-analyzer/pull/5306>
+#[derive(Debug)]
+pub struct CpuSpan {
+ _private: (),
+}
+
+#[must_use]
+pub fn cpu_span() -> CpuSpan {
+ #[cfg(feature = "cpu_profiler")]
+ {
+ google_cpu_profiler::start("./out.profile".as_ref())
+ }
+
+ #[cfg(not(feature = "cpu_profiler"))]
+ {
+ eprintln!(
+ r#"cpu profiling is disabled, uncomment `default = [ "cpu_profiler" ]` in Cargo.toml to enable."#
+ );
+ }
+
+ CpuSpan { _private: () }
+}
+
+impl Drop for CpuSpan {
+ fn drop(&mut self) {
+ #[cfg(feature = "cpu_profiler")]
+ {
+ google_cpu_profiler::stop();
+ let profile_data = std::env::current_dir().unwrap().join("out.profile");
+ eprintln!("Profile data saved to:\n\n {}\n", profile_data.display());
+ let mut cmd = std::process::Command::new("pprof");
+ cmd.arg("-svg").arg(std::env::current_exe().unwrap()).arg(&profile_data);
+ let out = cmd.output();
+
+ match out {
+ Ok(out) if out.status.success() => {
+ let svg = profile_data.with_extension("svg");
+ std::fs::write(&svg, &out.stdout).unwrap();
+ eprintln!("Profile rendered to:\n\n {}\n", svg.display());
+ }
+ _ => {
+ eprintln!("Failed to run:\n\n {:?}\n", cmd);
+ }
+ }
+ }
+ }
+}
+
+pub fn memory_usage() -> MemoryUsage {
+ MemoryUsage::now()
+}
diff --git a/src/tools/rust-analyzer/crates/profile/src/memory_usage.rs b/src/tools/rust-analyzer/crates/profile/src/memory_usage.rs
new file mode 100644
index 000000000..ee882b4cb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/profile/src/memory_usage.rs
@@ -0,0 +1,127 @@
+//! Like [`std::time::Instant`], but for memory.
+//!
+//! Measures the total size of all currently allocated objects.
+use std::fmt;
+
+use cfg_if::cfg_if;
+
+#[derive(Copy, Clone)]
+pub struct MemoryUsage {
+ pub allocated: Bytes,
+}
+
+impl fmt::Display for MemoryUsage {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.allocated.fmt(f)
+ }
+}
+
+impl std::ops::Sub for MemoryUsage {
+ type Output = MemoryUsage;
+ fn sub(self, rhs: MemoryUsage) -> MemoryUsage {
+ MemoryUsage { allocated: self.allocated - rhs.allocated }
+ }
+}
+
+impl MemoryUsage {
+ pub fn now() -> MemoryUsage {
+ cfg_if! {
+ if #[cfg(all(feature = "jemalloc", not(target_env = "msvc")))] {
+ jemalloc_ctl::epoch::advance().unwrap();
+ MemoryUsage {
+ allocated: Bytes(jemalloc_ctl::stats::allocated::read().unwrap() as isize),
+ }
+ } else if #[cfg(all(target_os = "linux", target_env = "gnu"))] {
+ memusage_linux()
+ } else if #[cfg(windows)] {
+ // There doesn't seem to be an API for determining heap usage, so we try to
+ // approximate that by using the Commit Charge value.
+
+ use winapi::um::processthreadsapi::*;
+ use winapi::um::psapi::*;
+ use std::mem::{MaybeUninit, size_of};
+
+ let proc = unsafe { GetCurrentProcess() };
+ let mut mem_counters = MaybeUninit::uninit();
+ let cb = size_of::<PROCESS_MEMORY_COUNTERS>();
+ let ret = unsafe { GetProcessMemoryInfo(proc, mem_counters.as_mut_ptr(), cb as u32) };
+ assert!(ret != 0);
+
+ let usage = unsafe { mem_counters.assume_init().PagefileUsage };
+ MemoryUsage { allocated: Bytes(usage as isize) }
+ } else {
+ MemoryUsage { allocated: Bytes(0) }
+ }
+ }
+ }
+}
+
+#[cfg(all(target_os = "linux", target_env = "gnu", not(feature = "jemalloc")))]
+fn memusage_linux() -> MemoryUsage {
+ // Linux/glibc has 2 APIs for allocator introspection that we can use: mallinfo and mallinfo2.
+ // mallinfo uses `int` fields and cannot handle memory usage exceeding 2 GB.
+ // mallinfo2 is very recent, so its presence needs to be detected at runtime.
+ // Both are abysmally slow.
+
+ use std::ffi::CStr;
+ use std::sync::atomic::{AtomicUsize, Ordering};
+
+ static MALLINFO2: AtomicUsize = AtomicUsize::new(1);
+
+ let mut mallinfo2 = MALLINFO2.load(Ordering::Relaxed);
+ if mallinfo2 == 1 {
+ let cstr = CStr::from_bytes_with_nul(b"mallinfo2\0").unwrap();
+ mallinfo2 = unsafe { libc::dlsym(libc::RTLD_DEFAULT, cstr.as_ptr()) } as usize;
+ // NB: races don't matter here, since they'll always store the same value
+ MALLINFO2.store(mallinfo2, Ordering::Relaxed);
+ }
+
+ if mallinfo2 == 0 {
+ // mallinfo2 does not exist, use mallinfo.
+ let alloc = unsafe { libc::mallinfo() }.uordblks as isize;
+ MemoryUsage { allocated: Bytes(alloc) }
+ } else {
+ let mallinfo2: fn() -> libc::mallinfo2 = unsafe { std::mem::transmute(mallinfo2) };
+ let alloc = mallinfo2().uordblks as isize;
+ MemoryUsage { allocated: Bytes(alloc) }
+ }
+}
+
+#[derive(Default, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
+pub struct Bytes(isize);
+
+impl Bytes {
+ pub fn megabytes(self) -> isize {
+ self.0 / 1024 / 1024
+ }
+}
+
+impl fmt::Display for Bytes {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let bytes = self.0;
+ let mut value = bytes;
+ let mut suffix = "b";
+ if value.abs() > 4096 {
+ value /= 1024;
+ suffix = "kb";
+ if value.abs() > 4096 {
+ value /= 1024;
+ suffix = "mb";
+ }
+ }
+ f.pad(&format!("{}{}", value, suffix))
+ }
+}
+
+impl std::ops::AddAssign<usize> for Bytes {
+ fn add_assign(&mut self, x: usize) {
+ self.0 += x as isize;
+ }
+}
+
+impl std::ops::Sub for Bytes {
+ type Output = Bytes;
+ fn sub(self, rhs: Bytes) -> Bytes {
+ Bytes(self.0 - rhs.0)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/profile/src/stop_watch.rs b/src/tools/rust-analyzer/crates/profile/src/stop_watch.rs
new file mode 100644
index 000000000..625832848
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/profile/src/stop_watch.rs
@@ -0,0 +1,101 @@
+//! Like `std::time::Instant`, but also measures memory & CPU cycles.
+use std::{
+ fmt,
+ time::{Duration, Instant},
+};
+
+use crate::MemoryUsage;
+
+pub struct StopWatch {
+ time: Instant,
+ #[cfg(target_os = "linux")]
+ counter: Option<perf_event::Counter>,
+ memory: Option<MemoryUsage>,
+}
+
+pub struct StopWatchSpan {
+ pub time: Duration,
+ pub instructions: Option<u64>,
+ pub memory: Option<MemoryUsage>,
+}
+
+impl StopWatch {
+ pub fn start() -> StopWatch {
+ #[cfg(target_os = "linux")]
+ let counter = {
+ // When debugging rust-analyzer using rr, the perf-related syscalls cause it to abort.
+ // We allow disabling perf by setting the env var `RA_DISABLE_PERF`.
+
+ use once_cell::sync::Lazy;
+ static PERF_ENABLED: Lazy<bool> =
+ Lazy::new(|| std::env::var_os("RA_DISABLE_PERF").is_none());
+
+ if *PERF_ENABLED {
+ let mut counter = perf_event::Builder::new()
+ .build()
+ .map_err(|err| eprintln!("Failed to create perf counter: {}", err))
+ .ok();
+ if let Some(counter) = &mut counter {
+ if let Err(err) = counter.enable() {
+ eprintln!("Failed to start perf counter: {}", err)
+ }
+ }
+ counter
+ } else {
+ None
+ }
+ };
+ let time = Instant::now();
+ StopWatch {
+ time,
+ #[cfg(target_os = "linux")]
+ counter,
+ memory: None,
+ }
+ }
+ pub fn memory(mut self, yes: bool) -> StopWatch {
+ if yes {
+ self.memory = Some(MemoryUsage::now());
+ }
+ self
+ }
+ pub fn elapsed(&mut self) -> StopWatchSpan {
+ let time = self.time.elapsed();
+
+ #[cfg(target_os = "linux")]
+ let instructions = self.counter.as_mut().and_then(|it| {
+ it.read().map_err(|err| eprintln!("Failed to read perf counter: {}", err)).ok()
+ });
+ #[cfg(not(target_os = "linux"))]
+ let instructions = None;
+
+ let memory = self.memory.map(|it| MemoryUsage::now() - it);
+ StopWatchSpan { time, instructions, memory }
+ }
+}
+
+impl fmt::Display for StopWatchSpan {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{:.2?}", self.time)?;
+ if let Some(mut instructions) = self.instructions {
+ let mut prefix = "";
+ if instructions > 10000 {
+ instructions /= 1000;
+ prefix = "k";
+ }
+ if instructions > 10000 {
+ instructions /= 1000;
+ prefix = "m";
+ }
+ if instructions > 10000 {
+ instructions /= 1000;
+ prefix = "g";
+ }
+ write!(f, ", {}{}instr", instructions, prefix)?;
+ }
+ if let Some(memory) = self.memory {
+ write!(f, ", {}", memory)?;
+ }
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/profile/src/tree.rs b/src/tools/rust-analyzer/crates/profile/src/tree.rs
new file mode 100644
index 000000000..62f0c30b5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/profile/src/tree.rs
@@ -0,0 +1,84 @@
+//! A simple tree implementation which tries to not allocate all over the place.
+use std::ops;
+
+use la_arena::Arena;
+
+#[derive(Default)]
+pub(crate) struct Tree<T> {
+ nodes: Arena<Node<T>>,
+ current_path: Vec<(Idx<T>, Option<Idx<T>>)>,
+}
+
+pub(crate) type Idx<T> = la_arena::Idx<Node<T>>;
+
+impl<T> Tree<T> {
+ pub(crate) fn start(&mut self)
+ where
+ T: Default,
+ {
+ let me = self.nodes.alloc(Node::new(T::default()));
+ if let Some((parent, last_child)) = self.current_path.last_mut() {
+ let slot = match *last_child {
+ Some(last_child) => &mut self.nodes[last_child].next_sibling,
+ None => &mut self.nodes[*parent].first_child,
+ };
+ let prev = slot.replace(me);
+ assert!(prev.is_none());
+ *last_child = Some(me);
+ }
+
+ self.current_path.push((me, None));
+ }
+
+ pub(crate) fn finish(&mut self, data: T) {
+ let (me, _last_child) = self.current_path.pop().unwrap();
+ self.nodes[me].data = data;
+ }
+
+ pub(crate) fn root(&self) -> Option<Idx<T>> {
+ self.nodes.iter().next().map(|(idx, _)| idx)
+ }
+
+ pub(crate) fn children(&self, idx: Idx<T>) -> impl Iterator<Item = Idx<T>> + '_ {
+ NodeIter { nodes: &self.nodes, next: self.nodes[idx].first_child }
+ }
+ pub(crate) fn clear(&mut self) {
+ self.nodes.clear();
+ self.current_path.clear();
+ }
+}
+
+impl<T> ops::Index<Idx<T>> for Tree<T> {
+ type Output = T;
+ fn index(&self, index: Idx<T>) -> &T {
+ &self.nodes[index].data
+ }
+}
+
+pub(crate) struct Node<T> {
+ data: T,
+ first_child: Option<Idx<T>>,
+ next_sibling: Option<Idx<T>>,
+}
+
+impl<T> Node<T> {
+ fn new(data: T) -> Node<T> {
+ Node { data, first_child: None, next_sibling: None }
+ }
+}
+
+struct NodeIter<'a, T> {
+ nodes: &'a Arena<Node<T>>,
+ next: Option<Idx<T>>,
+}
+
+impl<'a, T> Iterator for NodeIter<'a, T> {
+ type Item = Idx<T>;
+
+ fn next(&mut self) -> Option<Idx<T>> {
+ self.next.map(|next| {
+ self.next = self.nodes[next].next_sibling;
+ next
+ })
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/Cargo.toml b/src/tools/rust-analyzer/crates/project-model/Cargo.toml
new file mode 100644
index 000000000..bc75d6faa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/Cargo.toml
@@ -0,0 +1,28 @@
+[package]
+name = "project-model"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+tracing = "0.1.35"
+rustc-hash = "1.1.0"
+cargo_metadata = "0.15.0"
+semver = "1.0.10"
+serde = { version = "1.0.137", features = ["derive"] }
+serde_json = "1.0.81"
+anyhow = "1.0.57"
+expect-test = "1.4.0"
+la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+
+cfg = { path = "../cfg", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+toolchain = { path = "../toolchain", version = "0.0.0" }
+paths = { path = "../paths", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs
new file mode 100644
index 000000000..ee7f8339a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs
@@ -0,0 +1,238 @@
+//! Workspace information we get from cargo consists of two pieces. The first is
+//! the output of `cargo metadata`. The second is the output of running
+//! `build.rs` files (`OUT_DIR` env var, extra cfg flags) and compiling proc
+//! macro.
+//!
+//! This module implements this second part. We use "build script" terminology
+//! here, but it covers procedural macros as well.
+
+use std::{cell::RefCell, io, path::PathBuf, process::Command};
+
+use cargo_metadata::{camino::Utf8Path, Message};
+use la_arena::ArenaMap;
+use paths::AbsPathBuf;
+use rustc_hash::FxHashMap;
+use serde::Deserialize;
+
+use crate::{cfg_flag::CfgFlag, CargoConfig, CargoWorkspace, Package};
+
+#[derive(Debug, Default, Clone, PartialEq, Eq)]
+pub struct WorkspaceBuildScripts {
+ outputs: ArenaMap<Package, Option<BuildScriptOutput>>,
+ error: Option<String>,
+}
+
+#[derive(Debug, Clone, Default, PartialEq, Eq)]
+pub(crate) struct BuildScriptOutput {
+ /// List of config flags defined by this package's build script.
+ pub(crate) cfgs: Vec<CfgFlag>,
+ /// List of cargo-related environment variables with their value.
+ ///
+ /// If the package has a build script which defines environment variables,
+ /// they can also be found here.
+ pub(crate) envs: Vec<(String, String)>,
+ /// Directory where a build script might place its output.
+ pub(crate) out_dir: Option<AbsPathBuf>,
+ /// Path to the proc-macro library file if this package exposes proc-macros.
+ pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>,
+}
+
+impl WorkspaceBuildScripts {
+ fn build_command(config: &CargoConfig) -> Command {
+ if let Some([program, args @ ..]) = config.run_build_script_command.as_deref() {
+ let mut cmd = Command::new(program);
+ cmd.args(args);
+ return cmd;
+ }
+
+ let mut cmd = Command::new(toolchain::cargo());
+
+ cmd.args(&["check", "--quiet", "--workspace", "--message-format=json"]);
+
+ // --all-targets includes tests, benches and examples in addition to the
+ // default lib and bins. This is an independent concept from the --targets
+ // flag below.
+ cmd.arg("--all-targets");
+
+ if let Some(target) = &config.target {
+ cmd.args(&["--target", target]);
+ }
+
+ if config.all_features {
+ cmd.arg("--all-features");
+ } else {
+ if config.no_default_features {
+ cmd.arg("--no-default-features");
+ }
+ if !config.features.is_empty() {
+ cmd.arg("--features");
+ cmd.arg(config.features.join(" "));
+ }
+ }
+
+ cmd
+ }
+
+ pub(crate) fn run(
+ config: &CargoConfig,
+ workspace: &CargoWorkspace,
+ progress: &dyn Fn(String),
+ ) -> io::Result<WorkspaceBuildScripts> {
+ let mut cmd = Self::build_command(config);
+
+ if config.wrap_rustc_in_build_scripts {
+ // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use
+ // that to compile only proc macros and build scripts during the initial
+ // `cargo check`.
+ let myself = std::env::current_exe()?;
+ cmd.env("RUSTC_WRAPPER", myself);
+ cmd.env("RA_RUSTC_WRAPPER", "1");
+ }
+
+ cmd.current_dir(workspace.workspace_root());
+
+ let mut res = WorkspaceBuildScripts::default();
+ let outputs = &mut res.outputs;
+ // NB: Cargo.toml could have been modified between `cargo metadata` and
+ // `cargo check`. We shouldn't assume that package ids we see here are
+ // exactly those from `config`.
+ let mut by_id: FxHashMap<String, Package> = FxHashMap::default();
+ for package in workspace.packages() {
+ outputs.insert(package, None);
+ by_id.insert(workspace[package].id.clone(), package);
+ }
+
+ let errors = RefCell::new(String::new());
+ let push_err = |err: &str| {
+ let mut e = errors.borrow_mut();
+ e.push_str(err);
+ e.push('\n');
+ };
+
+ tracing::info!("Running build scripts: {:?}", cmd);
+ let output = stdx::process::spawn_with_streaming_output(
+ cmd,
+ &mut |line| {
+ // Copy-pasted from existing cargo_metadata. It seems like we
+ // should be using serde_stacker here?
+ let mut deserializer = serde_json::Deserializer::from_str(line);
+ deserializer.disable_recursion_limit();
+ let message = Message::deserialize(&mut deserializer)
+ .unwrap_or_else(|_| Message::TextLine(line.to_string()));
+
+ match message {
+ Message::BuildScriptExecuted(message) => {
+ let package = match by_id.get(&message.package_id.repr) {
+ Some(&it) => it,
+ None => return,
+ };
+ let cfgs = {
+ let mut acc = Vec::new();
+ for cfg in message.cfgs {
+ match cfg.parse::<CfgFlag>() {
+ Ok(it) => acc.push(it),
+ Err(err) => {
+ push_err(&format!(
+ "invalid cfg from cargo-metadata: {}",
+ err
+ ));
+ return;
+ }
+ };
+ }
+ acc
+ };
+ // cargo_metadata crate returns default (empty) path for
+ // older cargos, which is not absolute, so work around that.
+ let out_dir = message.out_dir.into_os_string();
+ if !out_dir.is_empty() {
+ let data = outputs[package].get_or_insert_with(Default::default);
+ data.out_dir = Some(AbsPathBuf::assert(PathBuf::from(out_dir)));
+ data.cfgs = cfgs;
+ }
+ if !message.env.is_empty() {
+ outputs[package].get_or_insert_with(Default::default).envs =
+ message.env;
+ }
+ }
+ Message::CompilerArtifact(message) => {
+ let package = match by_id.get(&message.package_id.repr) {
+ Some(it) => *it,
+ None => return,
+ };
+
+ progress(format!("metadata {}", message.target.name));
+
+ if message.target.kind.iter().any(|k| k == "proc-macro") {
+ // Skip rmeta file
+ if let Some(filename) =
+ message.filenames.iter().find(|name| is_dylib(name))
+ {
+ let filename = AbsPathBuf::assert(PathBuf::from(&filename));
+ outputs[package]
+ .get_or_insert_with(Default::default)
+ .proc_macro_dylib_path = Some(filename);
+ }
+ }
+ }
+ Message::CompilerMessage(message) => {
+ progress(message.target.name);
+
+ if let Some(diag) = message.message.rendered.as_deref() {
+ push_err(diag);
+ }
+ }
+ Message::BuildFinished(_) => {}
+ Message::TextLine(_) => {}
+ _ => {}
+ }
+ },
+ &mut |line| {
+ push_err(line);
+ },
+ )?;
+
+ for package in workspace.packages() {
+ if let Some(package_build_data) = &mut outputs[package] {
+ tracing::info!(
+ "{}: {:?}",
+ workspace[package].manifest.parent().display(),
+ package_build_data,
+ );
+ // inject_cargo_env(package, package_build_data);
+ if let Some(out_dir) = &package_build_data.out_dir {
+ // NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!()
+ if let Some(out_dir) = out_dir.as_os_str().to_str().map(|s| s.to_owned()) {
+ package_build_data.envs.push(("OUT_DIR".to_string(), out_dir));
+ }
+ }
+ }
+ }
+
+ let mut errors = errors.into_inner();
+ if !output.status.success() {
+ if errors.is_empty() {
+ errors = "cargo check failed".to_string();
+ }
+ res.error = Some(errors);
+ }
+
+ Ok(res)
+ }
+
+ pub fn error(&self) -> Option<&str> {
+ self.error.as_deref()
+ }
+
+ pub(crate) fn get_output(&self, idx: Package) -> Option<&BuildScriptOutput> {
+ self.outputs.get(idx)?.as_ref()
+ }
+}
+
+// FIXME: File a better way to know if it is a dylib.
+fn is_dylib(path: &Utf8Path) -> bool {
+ match path.extension().map(|e| e.to_string().to_lowercase()) {
+ None => false,
+ Some(ext) => matches!(ext.as_str(), "dll" | "dylib" | "so"),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
new file mode 100644
index 000000000..597880c2c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
@@ -0,0 +1,504 @@
+//! See [`CargoWorkspace`].
+
+use std::iter;
+use std::path::PathBuf;
+use std::{ops, process::Command};
+
+use anyhow::{Context, Result};
+use base_db::Edition;
+use cargo_metadata::{CargoOpt, MetadataCommand};
+use la_arena::{Arena, Idx};
+use paths::{AbsPath, AbsPathBuf};
+use rustc_hash::FxHashMap;
+use serde::Deserialize;
+use serde_json::from_value;
+
+use crate::CfgOverrides;
+use crate::{utf8_stdout, ManifestPath};
+
+/// [`CargoWorkspace`] represents the logical structure of, well, a Cargo
+/// workspace. It pretty closely mirrors `cargo metadata` output.
+///
+/// Note that internally, rust analyzer uses a different structure:
+/// `CrateGraph`. `CrateGraph` is lower-level: it knows only about the crates,
+/// while this knows about `Packages` & `Targets`: purely cargo-related
+/// concepts.
+///
+/// We use absolute paths here, `cargo metadata` guarantees to always produce
+/// abs paths.
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct CargoWorkspace {
+ packages: Arena<PackageData>,
+ targets: Arena<TargetData>,
+ workspace_root: AbsPathBuf,
+}
+
+impl ops::Index<Package> for CargoWorkspace {
+ type Output = PackageData;
+ fn index(&self, index: Package) -> &PackageData {
+ &self.packages[index]
+ }
+}
+
+impl ops::Index<Target> for CargoWorkspace {
+ type Output = TargetData;
+ fn index(&self, index: Target) -> &TargetData {
+ &self.targets[index]
+ }
+}
+
+/// Describes how to set the rustc source directory.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum RustcSource {
+ /// Explicit path for the rustc source directory.
+ Path(AbsPathBuf),
+ /// Try to automatically detect where the rustc source directory is.
+ Discover,
+}
+
+/// Crates to disable `#[cfg(test)]` on.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum UnsetTestCrates {
+ None,
+ Only(Vec<String>),
+ All,
+}
+
+impl Default for UnsetTestCrates {
+ fn default() -> Self {
+ Self::None
+ }
+}
+
+#[derive(Default, Clone, Debug, PartialEq, Eq)]
+pub struct CargoConfig {
+ /// Do not activate the `default` feature.
+ pub no_default_features: bool,
+
+ /// Activate all available features
+ pub all_features: bool,
+
+ /// List of features to activate.
+ /// This will be ignored if `cargo_all_features` is true.
+ pub features: Vec<String>,
+
+ /// rustc target
+ pub target: Option<String>,
+
+ /// Don't load sysroot crates (`std`, `core` & friends). Might be useful
+ /// when debugging isolated issues.
+ pub no_sysroot: bool,
+
+ /// rustc private crate source
+ pub rustc_source: Option<RustcSource>,
+
+ /// crates to disable `#[cfg(test)]` on
+ pub unset_test_crates: UnsetTestCrates,
+
+ pub wrap_rustc_in_build_scripts: bool,
+
+ pub run_build_script_command: Option<Vec<String>>,
+}
+
+impl CargoConfig {
+ pub fn cfg_overrides(&self) -> CfgOverrides {
+ match &self.unset_test_crates {
+ UnsetTestCrates::None => CfgOverrides::Selective(iter::empty().collect()),
+ UnsetTestCrates::Only(unset_test_crates) => CfgOverrides::Selective(
+ unset_test_crates
+ .iter()
+ .cloned()
+ .zip(iter::repeat_with(|| {
+ cfg::CfgDiff::new(Vec::new(), vec![cfg::CfgAtom::Flag("test".into())])
+ .unwrap()
+ }))
+ .collect(),
+ ),
+ UnsetTestCrates::All => CfgOverrides::Wildcard(
+ cfg::CfgDiff::new(Vec::new(), vec![cfg::CfgAtom::Flag("test".into())]).unwrap(),
+ ),
+ }
+ }
+}
+
+pub type Package = Idx<PackageData>;
+
+pub type Target = Idx<TargetData>;
+
+/// Information associated with a cargo crate
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct PackageData {
+ /// Version given in the `Cargo.toml`
+ pub version: semver::Version,
+ /// Name as given in the `Cargo.toml`
+ pub name: String,
+ /// Repository as given in the `Cargo.toml`
+ pub repository: Option<String>,
+ /// Path containing the `Cargo.toml`
+ pub manifest: ManifestPath,
+ /// Targets provided by the crate (lib, bin, example, test, ...)
+ pub targets: Vec<Target>,
+ /// Does this package come from the local filesystem (and is editable)?
+ pub is_local: bool,
+ // Whether this package is a member of the workspace
+ pub is_member: bool,
+ /// List of packages this package depends on
+ pub dependencies: Vec<PackageDependency>,
+ /// Rust edition for this package
+ pub edition: Edition,
+ /// Features provided by the crate, mapped to the features required by that feature.
+ pub features: FxHashMap<String, Vec<String>>,
+ /// List of features enabled on this package
+ pub active_features: Vec<String>,
+ /// String representation of package id
+ pub id: String,
+ /// The contents of [package.metadata.rust-analyzer]
+ pub metadata: RustAnalyzerPackageMetaData,
+}
+
+#[derive(Deserialize, Default, Debug, Clone, Eq, PartialEq)]
+pub struct RustAnalyzerPackageMetaData {
+ pub rustc_private: bool,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct PackageDependency {
+ pub pkg: Package,
+ pub name: String,
+ pub kind: DepKind,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq, PartialOrd, Ord)]
+pub enum DepKind {
+ /// Available to the library, binary, and dev targets in the package (but not the build script).
+ Normal,
+ /// Available only to test and bench targets (and the library target, when built with `cfg(test)`).
+ Dev,
+ /// Available only to the build script target.
+ Build,
+}
+
+impl DepKind {
+ fn iter(list: &[cargo_metadata::DepKindInfo]) -> impl Iterator<Item = Self> + '_ {
+ let mut dep_kinds = Vec::new();
+ if list.is_empty() {
+ dep_kinds.push(Self::Normal);
+ }
+ for info in list {
+ let kind = match info.kind {
+ cargo_metadata::DependencyKind::Normal => Self::Normal,
+ cargo_metadata::DependencyKind::Development => Self::Dev,
+ cargo_metadata::DependencyKind::Build => Self::Build,
+ cargo_metadata::DependencyKind::Unknown => continue,
+ };
+ dep_kinds.push(kind);
+ }
+ dep_kinds.sort_unstable();
+ dep_kinds.dedup();
+ dep_kinds.into_iter()
+ }
+}
+
+/// Information associated with a package's target
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct TargetData {
+ /// Package that provided this target
+ pub package: Package,
+ /// Name as given in the `Cargo.toml` or generated from the file name
+ pub name: String,
+ /// Path to the main source file of the target
+ pub root: AbsPathBuf,
+ /// Kind of target
+ pub kind: TargetKind,
+ /// Is this target a proc-macro
+ pub is_proc_macro: bool,
+ /// Required features of the target without which it won't build
+ pub required_features: Vec<String>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum TargetKind {
+ Bin,
+ /// Any kind of Cargo lib crate-type (dylib, rlib, proc-macro, ...).
+ Lib,
+ Example,
+ Test,
+ Bench,
+ BuildScript,
+ Other,
+}
+
+impl TargetKind {
+ fn new(kinds: &[String]) -> TargetKind {
+ for kind in kinds {
+ return match kind.as_str() {
+ "bin" => TargetKind::Bin,
+ "test" => TargetKind::Test,
+ "bench" => TargetKind::Bench,
+ "example" => TargetKind::Example,
+ "custom-build" => TargetKind::BuildScript,
+ "proc-macro" => TargetKind::Lib,
+ _ if kind.contains("lib") => TargetKind::Lib,
+ _ => continue,
+ };
+ }
+ TargetKind::Other
+ }
+}
+
+#[derive(Deserialize, Default)]
+// Deserialise helper for the cargo metadata
+struct PackageMetadata {
+ #[serde(rename = "rust-analyzer")]
+ rust_analyzer: Option<RustAnalyzerPackageMetaData>,
+}
+
+impl CargoWorkspace {
+ pub fn fetch_metadata(
+ cargo_toml: &ManifestPath,
+ current_dir: &AbsPath,
+ config: &CargoConfig,
+ progress: &dyn Fn(String),
+ ) -> Result<cargo_metadata::Metadata> {
+ let target = config
+ .target
+ .clone()
+ .or_else(|| cargo_config_build_target(cargo_toml))
+ .or_else(|| rustc_discover_host_triple(cargo_toml));
+
+ let mut meta = MetadataCommand::new();
+ meta.cargo_path(toolchain::cargo());
+ meta.manifest_path(cargo_toml.to_path_buf());
+ if config.all_features {
+ meta.features(CargoOpt::AllFeatures);
+ } else {
+ if config.no_default_features {
+ // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures`
+ // https://github.com/oli-obk/cargo_metadata/issues/79
+ meta.features(CargoOpt::NoDefaultFeatures);
+ }
+ if !config.features.is_empty() {
+ meta.features(CargoOpt::SomeFeatures(config.features.clone()));
+ }
+ }
+ meta.current_dir(current_dir.as_os_str());
+
+ if let Some(target) = target {
+ meta.other_options(vec![String::from("--filter-platform"), target]);
+ }
+
+ // FIXME: Fetching metadata is a slow process, as it might require
+ // calling crates.io. We should be reporting progress here, but it's
+ // unclear whether cargo itself supports it.
+ progress("metadata".to_string());
+
+ let meta =
+ meta.exec().with_context(|| format!("Failed to run `{:?}`", meta.cargo_command()))?;
+
+ Ok(meta)
+ }
+
+ pub fn new(mut meta: cargo_metadata::Metadata) -> CargoWorkspace {
+ let mut pkg_by_id = FxHashMap::default();
+ let mut packages = Arena::default();
+ let mut targets = Arena::default();
+
+ let ws_members = &meta.workspace_members;
+
+ meta.packages.sort_by(|a, b| a.id.cmp(&b.id));
+ for meta_pkg in &meta.packages {
+ let cargo_metadata::Package {
+ id,
+ edition,
+ name,
+ manifest_path,
+ version,
+ metadata,
+ repository,
+ ..
+ } = meta_pkg;
+ let meta = from_value::<PackageMetadata>(metadata.clone()).unwrap_or_default();
+ let edition = match edition {
+ cargo_metadata::Edition::E2015 => Edition::Edition2015,
+ cargo_metadata::Edition::E2018 => Edition::Edition2018,
+ cargo_metadata::Edition::E2021 => Edition::Edition2021,
+ _ => {
+ tracing::error!("Unsupported edition `{:?}`", edition);
+ Edition::CURRENT
+ }
+ };
+ // We treat packages without source as "local" packages. That includes all members of
+ // the current workspace, as well as any path dependency outside the workspace.
+ let is_local = meta_pkg.source.is_none();
+ let is_member = ws_members.contains(id);
+
+ let pkg = packages.alloc(PackageData {
+ id: id.repr.clone(),
+ name: name.clone(),
+ version: version.clone(),
+ manifest: AbsPathBuf::assert(PathBuf::from(&manifest_path)).try_into().unwrap(),
+ targets: Vec::new(),
+ is_local,
+ is_member,
+ edition,
+ repository: repository.clone(),
+ dependencies: Vec::new(),
+ features: meta_pkg.features.clone().into_iter().collect(),
+ active_features: Vec::new(),
+ metadata: meta.rust_analyzer.unwrap_or_default(),
+ });
+ let pkg_data = &mut packages[pkg];
+ pkg_by_id.insert(id, pkg);
+ for meta_tgt in &meta_pkg.targets {
+ let is_proc_macro = meta_tgt.kind.as_slice() == ["proc-macro"];
+ let tgt = targets.alloc(TargetData {
+ package: pkg,
+ name: meta_tgt.name.clone(),
+ root: AbsPathBuf::assert(PathBuf::from(&meta_tgt.src_path)),
+ kind: TargetKind::new(meta_tgt.kind.as_slice()),
+ is_proc_macro,
+ required_features: meta_tgt.required_features.clone(),
+ });
+ pkg_data.targets.push(tgt);
+ }
+ }
+ let resolve = meta.resolve.expect("metadata executed with deps");
+ for mut node in resolve.nodes {
+ let source = match pkg_by_id.get(&node.id) {
+ Some(&src) => src,
+ // FIXME: replace this and a similar branch below with `.unwrap`, once
+ // https://github.com/rust-lang/cargo/issues/7841
+ // is fixed and hits stable (around 1.43-is probably?).
+ None => {
+ tracing::error!("Node id do not match in cargo metadata, ignoring {}", node.id);
+ continue;
+ }
+ };
+ node.deps.sort_by(|a, b| a.pkg.cmp(&b.pkg));
+ for (dep_node, kind) in node
+ .deps
+ .iter()
+ .flat_map(|dep| DepKind::iter(&dep.dep_kinds).map(move |kind| (dep, kind)))
+ {
+ let pkg = match pkg_by_id.get(&dep_node.pkg) {
+ Some(&pkg) => pkg,
+ None => {
+ tracing::error!(
+ "Dep node id do not match in cargo metadata, ignoring {}",
+ dep_node.pkg
+ );
+ continue;
+ }
+ };
+ let dep = PackageDependency { name: dep_node.name.clone(), pkg, kind };
+ packages[source].dependencies.push(dep);
+ }
+ packages[source].active_features.extend(node.features);
+ }
+
+ let workspace_root =
+ AbsPathBuf::assert(PathBuf::from(meta.workspace_root.into_os_string()));
+
+ CargoWorkspace { packages, targets, workspace_root }
+ }
+
+ pub fn packages<'a>(&'a self) -> impl Iterator<Item = Package> + ExactSizeIterator + 'a {
+ self.packages.iter().map(|(id, _pkg)| id)
+ }
+
+ pub fn target_by_root(&self, root: &AbsPath) -> Option<Target> {
+ self.packages()
+ .filter(|&pkg| self[pkg].is_member)
+ .find_map(|pkg| self[pkg].targets.iter().find(|&&it| &self[it].root == root))
+ .copied()
+ }
+
+ pub fn workspace_root(&self) -> &AbsPath {
+ &self.workspace_root
+ }
+
+ pub fn package_flag(&self, package: &PackageData) -> String {
+ if self.is_unique(&*package.name) {
+ package.name.clone()
+ } else {
+ format!("{}:{}", package.name, package.version)
+ }
+ }
+
+ pub fn parent_manifests(&self, manifest_path: &ManifestPath) -> Option<Vec<ManifestPath>> {
+ let mut found = false;
+ let parent_manifests = self
+ .packages()
+ .filter_map(|pkg| {
+ if !found && &self[pkg].manifest == manifest_path {
+ found = true
+ }
+ self[pkg].dependencies.iter().find_map(|dep| {
+ if &self[dep.pkg].manifest == manifest_path {
+ return Some(self[pkg].manifest.clone());
+ }
+ None
+ })
+ })
+ .collect::<Vec<ManifestPath>>();
+
+ // some packages has this pkg as dep. return their manifests
+ if parent_manifests.len() > 0 {
+ return Some(parent_manifests);
+ }
+
+ // this pkg is inside this cargo workspace, fallback to workspace root
+ if found {
+ return Some(vec![
+ ManifestPath::try_from(self.workspace_root().join("Cargo.toml")).ok()?
+ ]);
+ }
+
+ // not in this workspace
+ None
+ }
+
+ fn is_unique(&self, name: &str) -> bool {
+ self.packages.iter().filter(|(_, v)| v.name == name).count() == 1
+ }
+}
+
+fn rustc_discover_host_triple(cargo_toml: &ManifestPath) -> Option<String> {
+ let mut rustc = Command::new(toolchain::rustc());
+ rustc.current_dir(cargo_toml.parent()).arg("-vV");
+ tracing::debug!("Discovering host platform by {:?}", rustc);
+ match utf8_stdout(rustc) {
+ Ok(stdout) => {
+ let field = "host: ";
+ let target = stdout.lines().find_map(|l| l.strip_prefix(field));
+ if let Some(target) = target {
+ Some(target.to_string())
+ } else {
+ // If we fail to resolve the host platform, it's not the end of the world.
+ tracing::info!("rustc -vV did not report host platform, got:\n{}", stdout);
+ None
+ }
+ }
+ Err(e) => {
+ tracing::warn!("Failed to discover host platform: {}", e);
+ None
+ }
+ }
+}
+
+fn cargo_config_build_target(cargo_toml: &ManifestPath) -> Option<String> {
+ let mut cargo_config = Command::new(toolchain::cargo());
+ cargo_config
+ .current_dir(cargo_toml.parent())
+ .args(&["-Z", "unstable-options", "config", "get", "build.target"])
+ .env("RUSTC_BOOTSTRAP", "1");
+ // if successful we receive `build.target = "target-triple"`
+ tracing::debug!("Discovering cargo config target by {:?}", cargo_config);
+ match utf8_stdout(cargo_config) {
+ Ok(stdout) => stdout
+ .strip_prefix("build.target = \"")
+ .and_then(|stdout| stdout.strip_suffix('"'))
+ .map(ToOwned::to_owned),
+ Err(_) => None,
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/src/cfg_flag.rs b/src/tools/rust-analyzer/crates/project-model/src/cfg_flag.rs
new file mode 100644
index 000000000..f3dd8f513
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/src/cfg_flag.rs
@@ -0,0 +1,63 @@
+//! Parsing of CfgFlags as command line arguments, as in
+//!
+//! rustc main.rs --cfg foo --cfg 'feature="bar"'
+use std::{fmt, str::FromStr};
+
+use cfg::CfgOptions;
+
+#[derive(Clone, Eq, PartialEq, Debug)]
+pub enum CfgFlag {
+ Atom(String),
+ KeyValue { key: String, value: String },
+}
+
+impl FromStr for CfgFlag {
+ type Err = String;
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ let res = match s.split_once('=') {
+ Some((key, value)) => {
+ if !(value.starts_with('"') && value.ends_with('"')) {
+ return Err(format!("Invalid cfg ({:?}), value should be in quotes", s));
+ }
+ let key = key.to_string();
+ let value = value[1..value.len() - 1].to_string();
+ CfgFlag::KeyValue { key, value }
+ }
+ None => CfgFlag::Atom(s.into()),
+ };
+ Ok(res)
+ }
+}
+
+impl<'de> serde::Deserialize<'de> for CfgFlag {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ String::deserialize(deserializer)?.parse().map_err(serde::de::Error::custom)
+ }
+}
+
+impl Extend<CfgFlag> for CfgOptions {
+ fn extend<T: IntoIterator<Item = CfgFlag>>(&mut self, iter: T) {
+ for cfg_flag in iter {
+ match cfg_flag {
+ CfgFlag::Atom(it) => self.insert_atom(it.into()),
+ CfgFlag::KeyValue { key, value } => self.insert_key_value(key.into(), value.into()),
+ }
+ }
+ }
+}
+
+impl fmt::Display for CfgFlag {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ CfgFlag::Atom(atom) => f.write_str(atom),
+ CfgFlag::KeyValue { key, value } => {
+ f.write_str(key)?;
+ f.write_str("=")?;
+ f.write_str(value)
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs
new file mode 100644
index 000000000..e3f83084a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs
@@ -0,0 +1,159 @@
+//! In rust-analyzer, we maintain a strict separation between pure abstract
+//! semantic project model and a concrete model of a particular build system.
+//!
+//! Pure model is represented by the [`base_db::CrateGraph`] from another crate.
+//!
+//! In this crate, we are conserned with "real world" project models.
+//!
+//! Specifically, here we have a representation for a Cargo project
+//! ([`CargoWorkspace`]) and for manually specified layout ([`ProjectJson`]).
+//!
+//! Roughly, the things we do here are:
+//!
+//! * Project discovery (where's the relevant Cargo.toml for the current dir).
+//! * Custom build steps (`build.rs` code generation and compilation of
+//! procedural macros).
+//! * Lowering of concrete model to a [`base_db::CrateGraph`]
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod manifest_path;
+mod cargo_workspace;
+mod cfg_flag;
+mod project_json;
+mod sysroot;
+mod workspace;
+mod rustc_cfg;
+mod build_scripts;
+
+#[cfg(test)]
+mod tests;
+
+use std::{
+ fs::{self, read_dir, ReadDir},
+ io,
+ process::Command,
+};
+
+use anyhow::{bail, format_err, Context, Result};
+use paths::{AbsPath, AbsPathBuf};
+use rustc_hash::FxHashSet;
+
+pub use crate::{
+ build_scripts::WorkspaceBuildScripts,
+ cargo_workspace::{
+ CargoConfig, CargoWorkspace, Package, PackageData, PackageDependency, RustcSource, Target,
+ TargetData, TargetKind, UnsetTestCrates,
+ },
+ manifest_path::ManifestPath,
+ project_json::{ProjectJson, ProjectJsonData},
+ sysroot::Sysroot,
+ workspace::{CfgOverrides, PackageRoot, ProjectWorkspace},
+};
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub enum ProjectManifest {
+ ProjectJson(ManifestPath),
+ CargoToml(ManifestPath),
+}
+
+impl ProjectManifest {
+ pub fn from_manifest_file(path: AbsPathBuf) -> Result<ProjectManifest> {
+ let path = ManifestPath::try_from(path)
+ .map_err(|path| format_err!("bad manifest path: {}", path.display()))?;
+ if path.file_name().unwrap_or_default() == "rust-project.json" {
+ return Ok(ProjectManifest::ProjectJson(path));
+ }
+ if path.file_name().unwrap_or_default() == "Cargo.toml" {
+ return Ok(ProjectManifest::CargoToml(path));
+ }
+ bail!("project root must point to Cargo.toml or rust-project.json: {}", path.display())
+ }
+
+ pub fn discover_single(path: &AbsPath) -> Result<ProjectManifest> {
+ let mut candidates = ProjectManifest::discover(path)?;
+ let res = match candidates.pop() {
+ None => bail!("no projects"),
+ Some(it) => it,
+ };
+
+ if !candidates.is_empty() {
+ bail!("more than one project")
+ }
+ Ok(res)
+ }
+
+ pub fn discover(path: &AbsPath) -> io::Result<Vec<ProjectManifest>> {
+ if let Some(project_json) = find_in_parent_dirs(path, "rust-project.json") {
+ return Ok(vec![ProjectManifest::ProjectJson(project_json)]);
+ }
+ return find_cargo_toml(path)
+ .map(|paths| paths.into_iter().map(ProjectManifest::CargoToml).collect());
+
+ fn find_cargo_toml(path: &AbsPath) -> io::Result<Vec<ManifestPath>> {
+ match find_in_parent_dirs(path, "Cargo.toml") {
+ Some(it) => Ok(vec![it]),
+ None => Ok(find_cargo_toml_in_child_dir(read_dir(path)?)),
+ }
+ }
+
+ fn find_in_parent_dirs(path: &AbsPath, target_file_name: &str) -> Option<ManifestPath> {
+ if path.file_name().unwrap_or_default() == target_file_name {
+ if let Ok(manifest) = ManifestPath::try_from(path.to_path_buf()) {
+ return Some(manifest);
+ }
+ }
+
+ let mut curr = Some(path);
+
+ while let Some(path) = curr {
+ let candidate = path.join(target_file_name);
+ if fs::metadata(&candidate).is_ok() {
+ if let Ok(manifest) = ManifestPath::try_from(candidate) {
+ return Some(manifest);
+ }
+ }
+ curr = path.parent();
+ }
+
+ None
+ }
+
+ fn find_cargo_toml_in_child_dir(entities: ReadDir) -> Vec<ManifestPath> {
+ // Only one level down to avoid cycles the easy way and stop a runaway scan with large projects
+ entities
+ .filter_map(Result::ok)
+ .map(|it| it.path().join("Cargo.toml"))
+ .filter(|it| it.exists())
+ .map(AbsPathBuf::assert)
+ .filter_map(|it| it.try_into().ok())
+ .collect()
+ }
+ }
+
+ pub fn discover_all(paths: &[AbsPathBuf]) -> Vec<ProjectManifest> {
+ let mut res = paths
+ .iter()
+ .filter_map(|it| ProjectManifest::discover(it.as_ref()).ok())
+ .flatten()
+ .collect::<FxHashSet<_>>()
+ .into_iter()
+ .collect::<Vec<_>>();
+ res.sort();
+ res
+ }
+}
+
+fn utf8_stdout(mut cmd: Command) -> Result<String> {
+ let output = cmd.output().with_context(|| format!("{:?} failed", cmd))?;
+ if !output.status.success() {
+ match String::from_utf8(output.stderr) {
+ Ok(stderr) if !stderr.is_empty() => {
+ bail!("{:?} failed, {}\nstderr:\n{}", cmd, output.status, stderr)
+ }
+ _ => bail!("{:?} failed, {}", cmd, output.status),
+ }
+ }
+ let stdout = String::from_utf8(output.stdout)?;
+ Ok(stdout.trim().to_string())
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs b/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs
new file mode 100644
index 000000000..4910fd3d1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs
@@ -0,0 +1,51 @@
+//! See [`ManifestPath`].
+use std::{ops, path::Path};
+
+use paths::{AbsPath, AbsPathBuf};
+
+/// More or less [`AbsPathBuf`] with non-None parent.
+///
+/// We use it to store path to Cargo.toml, as we frequently use the parent dir
+/// as a working directory to spawn various commands, and its nice to not have
+/// to `.unwrap()` everywhere.
+///
+/// This could have been named `AbsNonRootPathBuf`, as we don't enforce that
+/// this stores manifest files in particular, but we only use this for manifests
+/// at the moment in practice.
+#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct ManifestPath {
+ file: AbsPathBuf,
+}
+
+impl TryFrom<AbsPathBuf> for ManifestPath {
+ type Error = AbsPathBuf;
+
+ fn try_from(file: AbsPathBuf) -> Result<Self, Self::Error> {
+ if file.parent().is_none() {
+ Err(file)
+ } else {
+ Ok(ManifestPath { file })
+ }
+ }
+}
+
+impl ManifestPath {
+ // Shadow `parent` from `Deref`.
+ pub fn parent(&self) -> &AbsPath {
+ self.file.parent().unwrap()
+ }
+}
+
+impl ops::Deref for ManifestPath {
+ type Target = AbsPath;
+
+ fn deref(&self) -> &Self::Target {
+ &*self.file
+ }
+}
+
+impl AsRef<Path> for ManifestPath {
+ fn as_ref(&self) -> &Path {
+ self.file.as_ref()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs
new file mode 100644
index 000000000..63d1d0ace
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs
@@ -0,0 +1,198 @@
+//! `rust-project.json` file format.
+//!
+//! This format is spiritually a serialization of [`base_db::CrateGraph`]. The
+//! idea here is that people who do not use Cargo, can instead teach their build
+//! system to generate `rust-project.json` which can be ingested by
+//! rust-analyzer.
+
+use std::path::PathBuf;
+
+use base_db::{CrateDisplayName, CrateId, CrateName, Dependency, Edition};
+use paths::{AbsPath, AbsPathBuf};
+use rustc_hash::FxHashMap;
+use serde::{de, Deserialize};
+
+use crate::cfg_flag::CfgFlag;
+
+/// Roots and crates that compose this Rust project.
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct ProjectJson {
+ /// e.g. `path/to/sysroot`
+ pub(crate) sysroot: Option<AbsPathBuf>,
+ /// e.g. `path/to/sysroot/lib/rustlib/src/rust`
+ pub(crate) sysroot_src: Option<AbsPathBuf>,
+ project_root: AbsPathBuf,
+ crates: Vec<Crate>,
+}
+
+/// A crate points to the root module of a crate and lists the dependencies of the crate. This is
+/// useful in creating the crate graph.
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct Crate {
+ pub(crate) display_name: Option<CrateDisplayName>,
+ pub(crate) root_module: AbsPathBuf,
+ pub(crate) edition: Edition,
+ pub(crate) version: Option<String>,
+ pub(crate) deps: Vec<Dependency>,
+ pub(crate) cfg: Vec<CfgFlag>,
+ pub(crate) target: Option<String>,
+ pub(crate) env: FxHashMap<String, String>,
+ pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>,
+ pub(crate) is_workspace_member: bool,
+ pub(crate) include: Vec<AbsPathBuf>,
+ pub(crate) exclude: Vec<AbsPathBuf>,
+ pub(crate) is_proc_macro: bool,
+ pub(crate) repository: Option<String>,
+}
+
+impl ProjectJson {
+ /// Create a new ProjectJson instance.
+ ///
+ /// # Arguments
+ ///
+ /// * `base` - The path to the workspace root (i.e. the folder containing `rust-project.json`)
+ /// * `data` - The parsed contents of `rust-project.json`, or project json that's passed via
+ /// configuration.
+ pub fn new(base: &AbsPath, data: ProjectJsonData) -> ProjectJson {
+ ProjectJson {
+ sysroot: data.sysroot.map(|it| base.join(it)),
+ sysroot_src: data.sysroot_src.map(|it| base.join(it)),
+ project_root: base.to_path_buf(),
+ crates: data
+ .crates
+ .into_iter()
+ .map(|crate_data| {
+ let is_workspace_member = crate_data.is_workspace_member.unwrap_or_else(|| {
+ crate_data.root_module.is_relative()
+ && !crate_data.root_module.starts_with("..")
+ || crate_data.root_module.starts_with(base)
+ });
+ let root_module = base.join(crate_data.root_module).normalize();
+ let (include, exclude) = match crate_data.source {
+ Some(src) => {
+ let absolutize = |dirs: Vec<PathBuf>| {
+ dirs.into_iter()
+ .map(|it| base.join(it).normalize())
+ .collect::<Vec<_>>()
+ };
+ (absolutize(src.include_dirs), absolutize(src.exclude_dirs))
+ }
+ None => (vec![root_module.parent().unwrap().to_path_buf()], Vec::new()),
+ };
+
+ Crate {
+ display_name: crate_data
+ .display_name
+ .map(CrateDisplayName::from_canonical_name),
+ root_module,
+ edition: crate_data.edition.into(),
+ version: crate_data.version.as_ref().map(ToString::to_string),
+ deps: crate_data
+ .deps
+ .into_iter()
+ .map(|dep_data| {
+ Dependency::new(dep_data.name, CrateId(dep_data.krate as u32))
+ })
+ .collect::<Vec<_>>(),
+ cfg: crate_data.cfg,
+ target: crate_data.target,
+ env: crate_data.env,
+ proc_macro_dylib_path: crate_data
+ .proc_macro_dylib_path
+ .map(|it| base.join(it)),
+ is_workspace_member,
+ include,
+ exclude,
+ is_proc_macro: crate_data.is_proc_macro,
+ repository: crate_data.repository,
+ }
+ })
+ .collect::<Vec<_>>(),
+ }
+ }
+ /// Returns the number of crates in the project.
+ pub fn n_crates(&self) -> usize {
+ self.crates.len()
+ }
+ /// Returns an iterator over the crates in the project.
+ pub fn crates(&self) -> impl Iterator<Item = (CrateId, &Crate)> + '_ {
+ self.crates.iter().enumerate().map(|(idx, krate)| (CrateId(idx as u32), krate))
+ }
+ /// Returns the path to the project's root folder.
+ pub fn path(&self) -> &AbsPath {
+ &self.project_root
+ }
+}
+
+#[derive(Deserialize, Debug, Clone)]
+pub struct ProjectJsonData {
+ sysroot: Option<PathBuf>,
+ sysroot_src: Option<PathBuf>,
+ crates: Vec<CrateData>,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+struct CrateData {
+ display_name: Option<String>,
+ root_module: PathBuf,
+ edition: EditionData,
+ #[serde(default)]
+ version: Option<semver::Version>,
+ deps: Vec<DepData>,
+ #[serde(default)]
+ cfg: Vec<CfgFlag>,
+ target: Option<String>,
+ #[serde(default)]
+ env: FxHashMap<String, String>,
+ proc_macro_dylib_path: Option<PathBuf>,
+ is_workspace_member: Option<bool>,
+ source: Option<CrateSource>,
+ #[serde(default)]
+ is_proc_macro: bool,
+ #[serde(default)]
+ repository: Option<String>,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename = "edition")]
+enum EditionData {
+ #[serde(rename = "2015")]
+ Edition2015,
+ #[serde(rename = "2018")]
+ Edition2018,
+ #[serde(rename = "2021")]
+ Edition2021,
+}
+
+impl From<EditionData> for Edition {
+ fn from(data: EditionData) -> Self {
+ match data {
+ EditionData::Edition2015 => Edition::Edition2015,
+ EditionData::Edition2018 => Edition::Edition2018,
+ EditionData::Edition2021 => Edition::Edition2021,
+ }
+ }
+}
+
+#[derive(Deserialize, Debug, Clone)]
+struct DepData {
+ /// Identifies a crate by position in the crates array.
+ #[serde(rename = "crate")]
+ krate: usize,
+ #[serde(deserialize_with = "deserialize_crate_name")]
+ name: CrateName,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+struct CrateSource {
+ include_dirs: Vec<PathBuf>,
+ exclude_dirs: Vec<PathBuf>,
+}
+
+fn deserialize_crate_name<'de, D>(de: D) -> Result<CrateName, D::Error>
+where
+ D: de::Deserializer<'de>,
+{
+ let name = String::deserialize(de)?;
+ CrateName::new(&name).map_err(|err| de::Error::custom(format!("invalid crate name: {:?}", err)))
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs b/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs
new file mode 100644
index 000000000..17e244d06
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs
@@ -0,0 +1,60 @@
+//! Runs `rustc --print cfg` to get built-in cfg flags.
+
+use std::process::Command;
+
+use anyhow::Result;
+
+use crate::{cfg_flag::CfgFlag, utf8_stdout, ManifestPath};
+
+pub(crate) fn get(cargo_toml: Option<&ManifestPath>, target: Option<&str>) -> Vec<CfgFlag> {
+ let _p = profile::span("rustc_cfg::get");
+ let mut res = Vec::with_capacity(6 * 2 + 1);
+
+ // Some nightly-only cfgs, which are required for stdlib
+ res.push(CfgFlag::Atom("target_thread_local".into()));
+ for ty in ["8", "16", "32", "64", "cas", "ptr"] {
+ for key in ["target_has_atomic", "target_has_atomic_load_store"] {
+ res.push(CfgFlag::KeyValue { key: key.to_string(), value: ty.into() });
+ }
+ }
+
+ match get_rust_cfgs(cargo_toml, target) {
+ Ok(rustc_cfgs) => {
+ tracing::debug!(
+ "rustc cfgs found: {:?}",
+ rustc_cfgs
+ .lines()
+ .map(|it| it.parse::<CfgFlag>().map(|it| it.to_string()))
+ .collect::<Vec<_>>()
+ );
+ res.extend(rustc_cfgs.lines().filter_map(|it| it.parse().ok()));
+ }
+ Err(e) => tracing::error!("failed to get rustc cfgs: {e:?}"),
+ }
+
+ res
+}
+
+fn get_rust_cfgs(cargo_toml: Option<&ManifestPath>, target: Option<&str>) -> Result<String> {
+ if let Some(cargo_toml) = cargo_toml {
+ let mut cargo_config = Command::new(toolchain::cargo());
+ cargo_config
+ .current_dir(cargo_toml.parent())
+ .args(&["-Z", "unstable-options", "rustc", "--print", "cfg"])
+ .env("RUSTC_BOOTSTRAP", "1");
+ if let Some(target) = target {
+ cargo_config.args(&["--target", target]);
+ }
+ match utf8_stdout(cargo_config) {
+ Ok(it) => return Ok(it),
+ Err(e) => tracing::debug!("{e:?}: falling back to querying rustc for cfgs"),
+ }
+ }
+ // using unstable cargo features failed, fall back to using plain rustc
+ let mut cmd = Command::new(toolchain::rustc());
+ cmd.args(&["--print", "cfg", "-O"]);
+ if let Some(target) = target {
+ cmd.args(&["--target", target]);
+ }
+ utf8_stdout(cmd)
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs
new file mode 100644
index 000000000..362bb0f5e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs
@@ -0,0 +1,232 @@
+//! Loads "sysroot" crate.
+//!
+//! One confusing point here is that normally sysroot is a bunch of `.rlib`s,
+//! but we can't process `.rlib` and need source code instead. The source code
+//! is typically installed with `rustup component add rust-src` command.
+
+use std::{env, fs, iter, ops, path::PathBuf, process::Command};
+
+use anyhow::{format_err, Result};
+use la_arena::{Arena, Idx};
+use paths::{AbsPath, AbsPathBuf};
+
+use crate::{utf8_stdout, ManifestPath};
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Sysroot {
+ root: AbsPathBuf,
+ src_root: AbsPathBuf,
+ crates: Arena<SysrootCrateData>,
+}
+
+pub(crate) type SysrootCrate = Idx<SysrootCrateData>;
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct SysrootCrateData {
+ pub name: String,
+ pub root: ManifestPath,
+ pub deps: Vec<SysrootCrate>,
+}
+
+impl ops::Index<SysrootCrate> for Sysroot {
+ type Output = SysrootCrateData;
+ fn index(&self, index: SysrootCrate) -> &SysrootCrateData {
+ &self.crates[index]
+ }
+}
+
+impl Sysroot {
+ /// Returns sysroot "root" directory, where `bin/`, `etc/`, `lib/`, `libexec/`
+ /// subfolder live, like:
+ /// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu`
+ pub fn root(&self) -> &AbsPath {
+ &self.root
+ }
+
+ /// Returns the sysroot "source" directory, where stdlib sources are located, like:
+ /// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library`
+ pub fn src_root(&self) -> &AbsPath {
+ &self.src_root
+ }
+
+ pub fn public_deps(&self) -> impl Iterator<Item = (&'static str, SysrootCrate, bool)> + '_ {
+ // core is added as a dependency before std in order to
+ // mimic rustcs dependency order
+ ["core", "alloc", "std"]
+ .into_iter()
+ .zip(iter::repeat(true))
+ .chain(iter::once(("test", false)))
+ .filter_map(move |(name, prelude)| Some((name, self.by_name(name)?, prelude)))
+ }
+
+ pub fn proc_macro(&self) -> Option<SysrootCrate> {
+ self.by_name("proc_macro")
+ }
+
+ pub fn crates<'a>(&'a self) -> impl Iterator<Item = SysrootCrate> + ExactSizeIterator + 'a {
+ self.crates.iter().map(|(id, _data)| id)
+ }
+
+ pub fn discover(dir: &AbsPath) -> Result<Sysroot> {
+ tracing::debug!("Discovering sysroot for {}", dir.display());
+ let sysroot_dir = discover_sysroot_dir(dir)?;
+ let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir, dir)?;
+ let res = Sysroot::load(sysroot_dir, sysroot_src_dir)?;
+ Ok(res)
+ }
+
+ pub fn discover_rustc(cargo_toml: &ManifestPath) -> Option<ManifestPath> {
+ tracing::debug!("Discovering rustc source for {}", cargo_toml.display());
+ let current_dir = cargo_toml.parent();
+ discover_sysroot_dir(current_dir).ok().and_then(|sysroot_dir| get_rustc_src(&sysroot_dir))
+ }
+
+ pub fn load(sysroot_dir: AbsPathBuf, sysroot_src_dir: AbsPathBuf) -> Result<Sysroot> {
+ let mut sysroot =
+ Sysroot { root: sysroot_dir, src_root: sysroot_src_dir, crates: Arena::default() };
+
+ for path in SYSROOT_CRATES.trim().lines() {
+ let name = path.split('/').last().unwrap();
+ let root = [format!("{}/src/lib.rs", path), format!("lib{}/lib.rs", path)]
+ .into_iter()
+ .map(|it| sysroot.src_root.join(it))
+ .filter_map(|it| ManifestPath::try_from(it).ok())
+ .find(|it| fs::metadata(it).is_ok());
+
+ if let Some(root) = root {
+ sysroot.crates.alloc(SysrootCrateData {
+ name: name.into(),
+ root,
+ deps: Vec::new(),
+ });
+ }
+ }
+
+ if let Some(std) = sysroot.by_name("std") {
+ for dep in STD_DEPS.trim().lines() {
+ if let Some(dep) = sysroot.by_name(dep) {
+ sysroot.crates[std].deps.push(dep)
+ }
+ }
+ }
+
+ if let Some(alloc) = sysroot.by_name("alloc") {
+ if let Some(core) = sysroot.by_name("core") {
+ sysroot.crates[alloc].deps.push(core);
+ }
+ }
+
+ if let Some(proc_macro) = sysroot.by_name("proc_macro") {
+ if let Some(std) = sysroot.by_name("std") {
+ sysroot.crates[proc_macro].deps.push(std);
+ }
+ }
+
+ if sysroot.by_name("core").is_none() {
+ let var_note = if env::var_os("RUST_SRC_PATH").is_some() {
+ " (`RUST_SRC_PATH` might be incorrect, try unsetting it)"
+ } else {
+ ""
+ };
+ anyhow::bail!(
+ "could not find libcore in sysroot path `{}`{}",
+ sysroot.src_root.as_path().display(),
+ var_note,
+ );
+ }
+
+ Ok(sysroot)
+ }
+
+ fn by_name(&self, name: &str) -> Option<SysrootCrate> {
+ let (id, _data) = self.crates.iter().find(|(_id, data)| data.name == name)?;
+ Some(id)
+ }
+}
+
+fn discover_sysroot_dir(current_dir: &AbsPath) -> Result<AbsPathBuf> {
+ let mut rustc = Command::new(toolchain::rustc());
+ rustc.current_dir(current_dir).args(&["--print", "sysroot"]);
+ tracing::debug!("Discovering sysroot by {:?}", rustc);
+ let stdout = utf8_stdout(rustc)?;
+ Ok(AbsPathBuf::assert(PathBuf::from(stdout)))
+}
+
+fn discover_sysroot_src_dir(
+ sysroot_path: &AbsPathBuf,
+ current_dir: &AbsPath,
+) -> Result<AbsPathBuf> {
+ if let Ok(path) = env::var("RUST_SRC_PATH") {
+ let path = AbsPathBuf::try_from(path.as_str())
+ .map_err(|path| format_err!("RUST_SRC_PATH must be absolute: {}", path.display()))?;
+ let core = path.join("core");
+ if fs::metadata(&core).is_ok() {
+ tracing::debug!("Discovered sysroot by RUST_SRC_PATH: {}", path.display());
+ return Ok(path);
+ }
+ tracing::debug!("RUST_SRC_PATH is set, but is invalid (no core: {:?}), ignoring", core);
+ }
+
+ get_rust_src(sysroot_path)
+ .or_else(|| {
+ let mut rustup = Command::new(toolchain::rustup());
+ rustup.current_dir(current_dir).args(&["component", "add", "rust-src"]);
+ utf8_stdout(rustup).ok()?;
+ get_rust_src(sysroot_path)
+ })
+ .ok_or_else(|| {
+ format_err!(
+ "\
+can't load standard library from sysroot
+{}
+(discovered via `rustc --print sysroot`)
+try installing the Rust source the same way you installed rustc",
+ sysroot_path.display(),
+ )
+ })
+}
+
+fn get_rustc_src(sysroot_path: &AbsPath) -> Option<ManifestPath> {
+ let rustc_src = sysroot_path.join("lib/rustlib/rustc-src/rust/compiler/rustc/Cargo.toml");
+ let rustc_src = ManifestPath::try_from(rustc_src).ok()?;
+ tracing::debug!("Checking for rustc source code: {}", rustc_src.display());
+ if fs::metadata(&rustc_src).is_ok() {
+ Some(rustc_src)
+ } else {
+ None
+ }
+}
+
+fn get_rust_src(sysroot_path: &AbsPath) -> Option<AbsPathBuf> {
+ let rust_src = sysroot_path.join("lib/rustlib/src/rust/library");
+ tracing::debug!("Checking sysroot: {}", rust_src.display());
+ if fs::metadata(&rust_src).is_ok() {
+ Some(rust_src)
+ } else {
+ None
+ }
+}
+
+const SYSROOT_CRATES: &str = "
+alloc
+core
+panic_abort
+panic_unwind
+proc_macro
+profiler_builtins
+std
+stdarch/crates/std_detect
+term
+test
+unwind";
+
+const STD_DEPS: &str = "
+alloc
+core
+panic_abort
+panic_unwind
+profiler_builtins
+std_detect
+term
+test
+unwind";
diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs
new file mode 100644
index 000000000..e304a59c0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs
@@ -0,0 +1,1820 @@
+use std::{
+ ops::Deref,
+ path::{Path, PathBuf},
+};
+
+use base_db::{CrateGraph, FileId};
+use cfg::{CfgAtom, CfgDiff};
+use expect_test::{expect, Expect};
+use paths::{AbsPath, AbsPathBuf};
+use serde::de::DeserializeOwned;
+
+use crate::{
+ CargoWorkspace, CfgOverrides, ProjectJson, ProjectJsonData, ProjectWorkspace, Sysroot,
+ WorkspaceBuildScripts,
+};
+
+fn load_cargo(file: &str) -> CrateGraph {
+ load_cargo_with_overrides(file, CfgOverrides::default())
+}
+
+fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> CrateGraph {
+ let meta = get_test_json_file(file);
+ let cargo_workspace = CargoWorkspace::new(meta);
+ let project_workspace = ProjectWorkspace::Cargo {
+ cargo: cargo_workspace,
+ build_scripts: WorkspaceBuildScripts::default(),
+ sysroot: None,
+ rustc: None,
+ rustc_cfg: Vec::new(),
+ cfg_overrides,
+ };
+ to_crate_graph(project_workspace)
+}
+
+fn load_rust_project(file: &str) -> CrateGraph {
+ let data = get_test_json_file(file);
+ let project = rooted_project_json(data);
+ let sysroot = Some(get_fake_sysroot());
+ let project_workspace = ProjectWorkspace::Json { project, sysroot, rustc_cfg: Vec::new() };
+ to_crate_graph(project_workspace)
+}
+
+fn get_test_json_file<T: DeserializeOwned>(file: &str) -> T {
+ let file = get_test_path(file);
+ let data = std::fs::read_to_string(file).unwrap();
+ let mut json = data.parse::<serde_json::Value>().unwrap();
+ fixup_paths(&mut json);
+ return serde_json::from_value(json).unwrap();
+
+ fn fixup_paths(val: &mut serde_json::Value) {
+ match val {
+ serde_json::Value::String(s) => replace_root(s, true),
+ serde_json::Value::Array(vals) => vals.iter_mut().for_each(fixup_paths),
+ serde_json::Value::Object(kvals) => kvals.values_mut().for_each(fixup_paths),
+ serde_json::Value::Null | serde_json::Value::Bool(_) | serde_json::Value::Number(_) => {
+ }
+ }
+ }
+}
+
+fn replace_root(s: &mut String, direction: bool) {
+ if direction {
+ let root = if cfg!(windows) { r#"C:\\ROOT\"# } else { "/ROOT/" };
+ *s = s.replace("$ROOT$", root)
+ } else {
+ let root = if cfg!(windows) { r#"C:\\\\ROOT\\"# } else { "/ROOT/" };
+ *s = s.replace(root, "$ROOT$")
+ }
+}
+
+fn get_test_path(file: &str) -> PathBuf {
+ let base = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
+ base.join("test_data").join(file)
+}
+
+fn get_fake_sysroot() -> Sysroot {
+ let sysroot_path = get_test_path("fake-sysroot");
+ // there's no `libexec/` directory with a `proc-macro-srv` binary in that
+ // fake sysroot, so we give them both the same path:
+ let sysroot_dir = AbsPathBuf::assert(sysroot_path);
+ let sysroot_src_dir = sysroot_dir.clone();
+ Sysroot::load(sysroot_dir, sysroot_src_dir).unwrap()
+}
+
+fn rooted_project_json(data: ProjectJsonData) -> ProjectJson {
+ let mut root = "$ROOT$".to_string();
+ replace_root(&mut root, true);
+ let path = Path::new(&root);
+ let base = AbsPath::assert(path);
+ ProjectJson::new(base, data)
+}
+
+fn to_crate_graph(project_workspace: ProjectWorkspace) -> CrateGraph {
+ project_workspace.to_crate_graph(&mut |_, _| Ok(Vec::new()), &mut {
+ let mut counter = 0;
+ move |_path| {
+ counter += 1;
+ Some(FileId(counter))
+ }
+ })
+}
+
+fn check_crate_graph(crate_graph: CrateGraph, expect: Expect) {
+ let mut crate_graph = format!("{:#?}", crate_graph);
+ replace_root(&mut crate_graph, false);
+ expect.assert_eq(&crate_graph);
+}
+
+#[test]
+fn cargo_hello_world_project_model_with_wildcard_overrides() {
+ let cfg_overrides = CfgOverrides::Wildcard(
+ CfgDiff::new(Vec::new(), vec![CfgAtom::Flag("test".into())]).unwrap(),
+ );
+ let crate_graph = load_cargo_with_overrides("hello-world-metadata.json", cfg_overrides);
+ check_crate_graph(
+ crate_graph,
+ expect![[r#"
+ CrateGraph {
+ arena: {
+ CrateId(
+ 0,
+ ): CrateData {
+ root_file_id: FileId(
+ 1,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello-world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 2,
+ ): CrateData {
+ root_file_id: FileId(
+ 3,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "an_example",
+ ),
+ canonical_name: "an-example",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 4,
+ ): CrateData {
+ root_file_id: FileId(
+ 5,
+ ),
+ edition: Edition2015,
+ version: Some(
+ "0.2.98",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "libc",
+ ),
+ canonical_name: "libc",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "feature=default",
+ "feature=std",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "feature=align",
+ "feature=const-extern-fn",
+ "feature=default",
+ "feature=extra_traits",
+ "feature=rustc-dep-of-std",
+ "feature=std",
+ "feature=use_std",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ "CARGO_PKG_VERSION": "0.2.98",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "libc",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "libc",
+ "CARGO_PKG_VERSION_PATCH": "98",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "2",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: Some(
+ "https://github.com/rust-lang/libc",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 1,
+ ): CrateData {
+ root_file_id: FileId(
+ 2,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello-world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 3,
+ ): CrateData {
+ root_file_id: FileId(
+ 4,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "it",
+ ),
+ canonical_name: "it",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ },
+ }"#]],
+ )
+}
+
+#[test]
+fn cargo_hello_world_project_model_with_selective_overrides() {
+ let cfg_overrides = {
+ CfgOverrides::Selective(
+ std::iter::once((
+ "libc".to_owned(),
+ CfgDiff::new(Vec::new(), vec![CfgAtom::Flag("test".into())]).unwrap(),
+ ))
+ .collect(),
+ )
+ };
+ let crate_graph = load_cargo_with_overrides("hello-world-metadata.json", cfg_overrides);
+ check_crate_graph(
+ crate_graph,
+ expect![[r#"
+ CrateGraph {
+ arena: {
+ CrateId(
+ 0,
+ ): CrateData {
+ root_file_id: FileId(
+ 1,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello-world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 2,
+ ): CrateData {
+ root_file_id: FileId(
+ 3,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "an_example",
+ ),
+ canonical_name: "an-example",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 4,
+ ): CrateData {
+ root_file_id: FileId(
+ 5,
+ ),
+ edition: Edition2015,
+ version: Some(
+ "0.2.98",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "libc",
+ ),
+ canonical_name: "libc",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "feature=default",
+ "feature=std",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "feature=align",
+ "feature=const-extern-fn",
+ "feature=default",
+ "feature=extra_traits",
+ "feature=rustc-dep-of-std",
+ "feature=std",
+ "feature=use_std",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ "CARGO_PKG_VERSION": "0.2.98",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "libc",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "libc",
+ "CARGO_PKG_VERSION_PATCH": "98",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "2",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: Some(
+ "https://github.com/rust-lang/libc",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 1,
+ ): CrateData {
+ root_file_id: FileId(
+ 2,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello-world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 3,
+ ): CrateData {
+ root_file_id: FileId(
+ 4,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "it",
+ ),
+ canonical_name: "it",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ },
+ }"#]],
+ )
+}
+
+#[test]
+fn cargo_hello_world_project_model() {
+ let crate_graph = load_cargo("hello-world-metadata.json");
+ check_crate_graph(
+ crate_graph,
+ expect![[r#"
+ CrateGraph {
+ arena: {
+ CrateId(
+ 0,
+ ): CrateData {
+ root_file_id: FileId(
+ 1,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello-world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 2,
+ ): CrateData {
+ root_file_id: FileId(
+ 3,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "an_example",
+ ),
+ canonical_name: "an-example",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 4,
+ ): CrateData {
+ root_file_id: FileId(
+ 5,
+ ),
+ edition: Edition2015,
+ version: Some(
+ "0.2.98",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "libc",
+ ),
+ canonical_name: "libc",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "feature=default",
+ "feature=std",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "feature=align",
+ "feature=const-extern-fn",
+ "feature=default",
+ "feature=extra_traits",
+ "feature=rustc-dep-of-std",
+ "feature=std",
+ "feature=use_std",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ "CARGO_PKG_VERSION": "0.2.98",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "libc",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "libc",
+ "CARGO_PKG_VERSION_PATCH": "98",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "2",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: Some(
+ "https://github.com/rust-lang/libc",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 1,
+ ): CrateData {
+ root_file_id: FileId(
+ 2,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello-world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 3,
+ ): CrateData {
+ root_file_id: FileId(
+ 4,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "it",
+ ),
+ canonical_name: "it",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ },
+ }"#]],
+ )
+}
+
+#[test]
+fn rust_project_hello_world_project_model() {
+ let crate_graph = load_rust_project("hello-world-project.json");
+ check_crate_graph(
+ crate_graph,
+ expect![[r#"
+ CrateGraph {
+ arena: {
+ CrateId(
+ 0,
+ ): CrateData {
+ root_file_id: FileId(
+ 1,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "alloc",
+ ),
+ canonical_name: "alloc",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 1,
+ ),
+ name: CrateName(
+ "core",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Alloc,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 10,
+ ): CrateData {
+ root_file_id: FileId(
+ 11,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "unwind",
+ ),
+ canonical_name: "unwind",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Other,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 7,
+ ): CrateData {
+ root_file_id: FileId(
+ 8,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "std_detect",
+ ),
+ canonical_name: "std_detect",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Other,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 4,
+ ): CrateData {
+ root_file_id: FileId(
+ 5,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "proc_macro",
+ ),
+ canonical_name: "proc_macro",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 6,
+ ),
+ name: CrateName(
+ "std",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Other,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 1,
+ ): CrateData {
+ root_file_id: FileId(
+ 2,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "core",
+ ),
+ canonical_name: "core",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Core,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 11,
+ ): CrateData {
+ root_file_id: FileId(
+ 12,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello_world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 1,
+ ),
+ name: CrateName(
+ "core",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "alloc",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 6,
+ ),
+ name: CrateName(
+ "std",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 9,
+ ),
+ name: CrateName(
+ "test",
+ ),
+ prelude: false,
+ },
+ ],
+ proc_macro: Err(
+ "no proc macro dylib present",
+ ),
+ origin: CratesIo {
+ repo: None,
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 8,
+ ): CrateData {
+ root_file_id: FileId(
+ 9,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "term",
+ ),
+ canonical_name: "term",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Other,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 5,
+ ): CrateData {
+ root_file_id: FileId(
+ 6,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "profiler_builtins",
+ ),
+ canonical_name: "profiler_builtins",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Other,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 2,
+ ): CrateData {
+ root_file_id: FileId(
+ 3,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "panic_abort",
+ ),
+ canonical_name: "panic_abort",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Other,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 9,
+ ): CrateData {
+ root_file_id: FileId(
+ 10,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "test",
+ ),
+ canonical_name: "test",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Test,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 6,
+ ): CrateData {
+ root_file_id: FileId(
+ 7,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "std",
+ ),
+ canonical_name: "std",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "alloc",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 1,
+ ),
+ name: CrateName(
+ "core",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 2,
+ ),
+ name: CrateName(
+ "panic_abort",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 3,
+ ),
+ name: CrateName(
+ "panic_unwind",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 5,
+ ),
+ name: CrateName(
+ "profiler_builtins",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 7,
+ ),
+ name: CrateName(
+ "std_detect",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 8,
+ ),
+ name: CrateName(
+ "term",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 9,
+ ),
+ name: CrateName(
+ "test",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 10,
+ ),
+ name: CrateName(
+ "unwind",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Std,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 3,
+ ): CrateData {
+ root_file_id: FileId(
+ 4,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "panic_unwind",
+ ),
+ canonical_name: "panic_unwind",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Other,
+ ),
+ is_proc_macro: false,
+ },
+ },
+ }"#]],
+ );
+}
+
+#[test]
+fn rust_project_is_proc_macro_has_proc_macro_dep() {
+ let crate_graph = load_rust_project("is-proc-macro-project.json");
+ // Since the project only defines one crate (outside the sysroot crates),
+ // it should be the one with the biggest Id.
+ let crate_id = crate_graph.iter().max().unwrap();
+ let crate_data = &crate_graph[crate_id];
+ // Assert that the project crate with `is_proc_macro` has a dependency
+ // on the proc_macro sysroot crate.
+ crate_data.dependencies.iter().find(|&dep| dep.name.deref() == "proc_macro").unwrap();
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
new file mode 100644
index 000000000..b144006b4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
@@ -0,0 +1,1032 @@
+//! Handles lowering of build-system specific workspace information (`cargo
+//! metadata` or `rust-project.json`) into representation stored in the salsa
+//! database -- `CrateGraph`.
+
+use std::{collections::VecDeque, fmt, fs, process::Command};
+
+use anyhow::{format_err, Context, Result};
+use base_db::{
+ CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env,
+ FileId, LangCrateOrigin, ProcMacroLoadResult,
+};
+use cfg::{CfgDiff, CfgOptions};
+use paths::{AbsPath, AbsPathBuf};
+use rustc_hash::{FxHashMap, FxHashSet};
+use stdx::always;
+
+use crate::{
+ build_scripts::BuildScriptOutput,
+ cargo_workspace::{DepKind, PackageData, RustcSource},
+ cfg_flag::CfgFlag,
+ rustc_cfg,
+ sysroot::SysrootCrate,
+ utf8_stdout, CargoConfig, CargoWorkspace, ManifestPath, ProjectJson, ProjectManifest, Sysroot,
+ TargetKind, WorkspaceBuildScripts,
+};
+
+/// A set of cfg-overrides per crate.
+///
+/// `Wildcard(..)` is useful e.g. disabling `#[cfg(test)]` on all crates,
+/// without having to first obtain a list of all crates.
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum CfgOverrides {
+ /// A single global set of overrides matching all crates.
+ Wildcard(CfgDiff),
+ /// A set of overrides matching specific crates.
+ Selective(FxHashMap<String, CfgDiff>),
+}
+
+impl Default for CfgOverrides {
+ fn default() -> Self {
+ Self::Selective(FxHashMap::default())
+ }
+}
+
+impl CfgOverrides {
+ pub fn len(&self) -> usize {
+ match self {
+ CfgOverrides::Wildcard(_) => 1,
+ CfgOverrides::Selective(hash_map) => hash_map.len(),
+ }
+ }
+}
+
+/// `PackageRoot` describes a package root folder.
+/// Which may be an external dependency, or a member of
+/// the current workspace.
+#[derive(Debug, Clone, Eq, PartialEq, Hash)]
+pub struct PackageRoot {
+ /// Is from the local filesystem and may be edited
+ pub is_local: bool,
+ pub include: Vec<AbsPathBuf>,
+ pub exclude: Vec<AbsPathBuf>,
+}
+
+#[derive(Clone, Eq, PartialEq)]
+pub enum ProjectWorkspace {
+ /// Project workspace was discovered by running `cargo metadata` and `rustc --print sysroot`.
+ Cargo {
+ cargo: CargoWorkspace,
+ build_scripts: WorkspaceBuildScripts,
+ sysroot: Option<Sysroot>,
+ rustc: Option<CargoWorkspace>,
+ /// Holds cfg flags for the current target. We get those by running
+ /// `rustc --print cfg`.
+ ///
+ /// FIXME: make this a per-crate map, as, eg, build.rs might have a
+ /// different target.
+ rustc_cfg: Vec<CfgFlag>,
+ cfg_overrides: CfgOverrides,
+ },
+ /// Project workspace was manually specified using a `rust-project.json` file.
+ Json { project: ProjectJson, sysroot: Option<Sysroot>, rustc_cfg: Vec<CfgFlag> },
+
+ // FIXME: The primary limitation of this approach is that the set of detached files needs to be fixed at the beginning.
+ // That's not the end user experience we should strive for.
+ // Ideally, you should be able to just open a random detached file in existing cargo projects, and get the basic features working.
+ // That needs some changes on the salsa-level though.
+ // In particular, we should split the unified CrateGraph (which currently has maximal durability) into proper crate graph, and a set of ad hoc roots (with minimal durability).
+ // Then, we need to hide the graph behind the queries such that most queries look only at the proper crate graph, and fall back to ad hoc roots only if there's no results.
+ // After this, we should be able to tweak the logic in reload.rs to add newly opened files, which don't belong to any existing crates, to the set of the detached files.
+ // //
+ /// Project with a set of disjoint files, not belonging to any particular workspace.
+ /// Backed by basic sysroot crates for basic completion and highlighting.
+ DetachedFiles { files: Vec<AbsPathBuf>, sysroot: Sysroot, rustc_cfg: Vec<CfgFlag> },
+}
+
+impl fmt::Debug for ProjectWorkspace {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // Make sure this isn't too verbose.
+ match self {
+ ProjectWorkspace::Cargo {
+ cargo,
+ build_scripts: _,
+ sysroot,
+ rustc,
+ rustc_cfg,
+ cfg_overrides,
+ } => f
+ .debug_struct("Cargo")
+ .field("root", &cargo.workspace_root().file_name())
+ .field("n_packages", &cargo.packages().len())
+ .field("sysroot", &sysroot.is_some())
+ .field(
+ "n_rustc_compiler_crates",
+ &rustc.as_ref().map_or(0, |rc| rc.packages().len()),
+ )
+ .field("n_rustc_cfg", &rustc_cfg.len())
+ .field("n_cfg_overrides", &cfg_overrides.len())
+ .finish(),
+ ProjectWorkspace::Json { project, sysroot, rustc_cfg } => {
+ let mut debug_struct = f.debug_struct("Json");
+ debug_struct.field("n_crates", &project.n_crates());
+ if let Some(sysroot) = sysroot {
+ debug_struct.field("n_sysroot_crates", &sysroot.crates().len());
+ }
+ debug_struct.field("n_rustc_cfg", &rustc_cfg.len());
+ debug_struct.finish()
+ }
+ ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => f
+ .debug_struct("DetachedFiles")
+ .field("n_files", &files.len())
+ .field("n_sysroot_crates", &sysroot.crates().len())
+ .field("n_rustc_cfg", &rustc_cfg.len())
+ .finish(),
+ }
+ }
+}
+
+impl ProjectWorkspace {
+ pub fn load(
+ manifest: ProjectManifest,
+ config: &CargoConfig,
+ progress: &dyn Fn(String),
+ ) -> Result<ProjectWorkspace> {
+ let res = match manifest {
+ ProjectManifest::ProjectJson(project_json) => {
+ let file = fs::read_to_string(&project_json).with_context(|| {
+ format!("Failed to read json file {}", project_json.display())
+ })?;
+ let data = serde_json::from_str(&file).with_context(|| {
+ format!("Failed to deserialize json file {}", project_json.display())
+ })?;
+ let project_location = project_json.parent().to_path_buf();
+ let project_json = ProjectJson::new(&project_location, data);
+ ProjectWorkspace::load_inline(project_json, config.target.as_deref())?
+ }
+ ProjectManifest::CargoToml(cargo_toml) => {
+ let cargo_version = utf8_stdout({
+ let mut cmd = Command::new(toolchain::cargo());
+ cmd.arg("--version");
+ cmd
+ })?;
+
+ let meta = CargoWorkspace::fetch_metadata(
+ &cargo_toml,
+ cargo_toml.parent(),
+ config,
+ progress,
+ )
+ .with_context(|| {
+ format!(
+ "Failed to read Cargo metadata from Cargo.toml file {}, {}",
+ cargo_toml.display(),
+ cargo_version
+ )
+ })?;
+ let cargo = CargoWorkspace::new(meta);
+
+ let sysroot = if config.no_sysroot {
+ None
+ } else {
+ Some(Sysroot::discover(cargo_toml.parent()).with_context(|| {
+ format!(
+ "Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?",
+ cargo_toml.display()
+ )
+ })?)
+ };
+
+ let rustc_dir = match &config.rustc_source {
+ Some(RustcSource::Path(path)) => ManifestPath::try_from(path.clone()).ok(),
+ Some(RustcSource::Discover) => Sysroot::discover_rustc(&cargo_toml),
+ None => None,
+ };
+
+ let rustc = match rustc_dir {
+ Some(rustc_dir) => Some({
+ let meta = CargoWorkspace::fetch_metadata(
+ &rustc_dir,
+ cargo_toml.parent(),
+ config,
+ progress,
+ )
+ .with_context(|| {
+ "Failed to read Cargo metadata for Rust sources".to_string()
+ })?;
+ CargoWorkspace::new(meta)
+ }),
+ None => None,
+ };
+
+ let rustc_cfg = rustc_cfg::get(Some(&cargo_toml), config.target.as_deref());
+
+ let cfg_overrides = config.cfg_overrides();
+ ProjectWorkspace::Cargo {
+ cargo,
+ build_scripts: WorkspaceBuildScripts::default(),
+ sysroot,
+ rustc,
+ rustc_cfg,
+ cfg_overrides,
+ }
+ }
+ };
+
+ Ok(res)
+ }
+
+ pub fn load_inline(
+ project_json: ProjectJson,
+ target: Option<&str>,
+ ) -> Result<ProjectWorkspace> {
+ let sysroot = match (project_json.sysroot.clone(), project_json.sysroot_src.clone()) {
+ (Some(sysroot), Some(sysroot_src)) => Some(Sysroot::load(sysroot, sysroot_src)?),
+ (Some(sysroot), None) => {
+ // assume sysroot is structured like rustup's and guess `sysroot_src`
+ let sysroot_src =
+ sysroot.join("lib").join("rustlib").join("src").join("rust").join("library");
+
+ Some(Sysroot::load(sysroot, sysroot_src)?)
+ }
+ (None, Some(sysroot_src)) => {
+ // assume sysroot is structured like rustup's and guess `sysroot`
+ let mut sysroot = sysroot_src.clone();
+ for _ in 0..5 {
+ sysroot.pop();
+ }
+ Some(Sysroot::load(sysroot, sysroot_src)?)
+ }
+ (None, None) => None,
+ };
+
+ let rustc_cfg = rustc_cfg::get(None, target);
+ Ok(ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg })
+ }
+
+ pub fn load_detached_files(detached_files: Vec<AbsPathBuf>) -> Result<ProjectWorkspace> {
+ let sysroot = Sysroot::discover(
+ detached_files
+ .first()
+ .and_then(|it| it.parent())
+ .ok_or_else(|| format_err!("No detached files to load"))?,
+ )?;
+ let rustc_cfg = rustc_cfg::get(None, None);
+ Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg })
+ }
+
+ pub fn run_build_scripts(
+ &self,
+ config: &CargoConfig,
+ progress: &dyn Fn(String),
+ ) -> Result<WorkspaceBuildScripts> {
+ match self {
+ ProjectWorkspace::Cargo { cargo, .. } => {
+ WorkspaceBuildScripts::run(config, cargo, progress).with_context(|| {
+ format!("Failed to run build scripts for {}", &cargo.workspace_root().display())
+ })
+ }
+ ProjectWorkspace::Json { .. } | ProjectWorkspace::DetachedFiles { .. } => {
+ Ok(WorkspaceBuildScripts::default())
+ }
+ }
+ }
+
+ pub fn set_build_scripts(&mut self, bs: WorkspaceBuildScripts) {
+ match self {
+ ProjectWorkspace::Cargo { build_scripts, .. } => *build_scripts = bs,
+ _ => {
+ always!(bs == WorkspaceBuildScripts::default());
+ }
+ }
+ }
+
+ /// Returns the roots for the current `ProjectWorkspace`
+ /// The return type contains the path and whether or not
+ /// the root is a member of the current workspace
+ pub fn to_roots(&self) -> Vec<PackageRoot> {
+ match self {
+ ProjectWorkspace::Json { project, sysroot, rustc_cfg: _ } => project
+ .crates()
+ .map(|(_, krate)| PackageRoot {
+ is_local: krate.is_workspace_member,
+ include: krate.include.clone(),
+ exclude: krate.exclude.clone(),
+ })
+ .collect::<FxHashSet<_>>()
+ .into_iter()
+ .chain(sysroot.as_ref().into_iter().flat_map(|sysroot| {
+ sysroot.crates().map(move |krate| PackageRoot {
+ is_local: false,
+ include: vec![sysroot[krate].root.parent().to_path_buf()],
+ exclude: Vec::new(),
+ })
+ }))
+ .collect::<Vec<_>>(),
+ ProjectWorkspace::Cargo {
+ cargo,
+ sysroot,
+ rustc,
+ rustc_cfg: _,
+ cfg_overrides: _,
+ build_scripts,
+ } => {
+ cargo
+ .packages()
+ .map(|pkg| {
+ let is_local = cargo[pkg].is_local;
+ let pkg_root = cargo[pkg].manifest.parent().to_path_buf();
+
+ let mut include = vec![pkg_root.clone()];
+ let out_dir =
+ build_scripts.get_output(pkg).and_then(|it| it.out_dir.clone());
+ include.extend(out_dir);
+
+ // In case target's path is manually set in Cargo.toml to be
+ // outside the package root, add its parent as an extra include.
+ // An example of this situation would look like this:
+ //
+ // ```toml
+ // [lib]
+ // path = "../../src/lib.rs"
+ // ```
+ let extra_targets = cargo[pkg]
+ .targets
+ .iter()
+ .filter(|&&tgt| cargo[tgt].kind == TargetKind::Lib)
+ .filter_map(|&tgt| cargo[tgt].root.parent())
+ .map(|tgt| tgt.normalize().to_path_buf())
+ .filter(|path| !path.starts_with(&pkg_root));
+ include.extend(extra_targets);
+
+ let mut exclude = vec![pkg_root.join(".git")];
+ if is_local {
+ exclude.push(pkg_root.join("target"));
+ } else {
+ exclude.push(pkg_root.join("tests"));
+ exclude.push(pkg_root.join("examples"));
+ exclude.push(pkg_root.join("benches"));
+ }
+ PackageRoot { is_local, include, exclude }
+ })
+ .chain(sysroot.iter().map(|sysroot| PackageRoot {
+ is_local: false,
+ include: vec![sysroot.src_root().to_path_buf()],
+ exclude: Vec::new(),
+ }))
+ .chain(rustc.iter().flat_map(|rustc| {
+ rustc.packages().map(move |krate| PackageRoot {
+ is_local: false,
+ include: vec![rustc[krate].manifest.parent().to_path_buf()],
+ exclude: Vec::new(),
+ })
+ }))
+ .collect()
+ }
+ ProjectWorkspace::DetachedFiles { files, sysroot, .. } => files
+ .iter()
+ .map(|detached_file| PackageRoot {
+ is_local: true,
+ include: vec![detached_file.clone()],
+ exclude: Vec::new(),
+ })
+ .chain(sysroot.crates().map(|krate| PackageRoot {
+ is_local: false,
+ include: vec![sysroot[krate].root.parent().to_path_buf()],
+ exclude: Vec::new(),
+ }))
+ .collect(),
+ }
+ }
+
+ pub fn n_packages(&self) -> usize {
+ match self {
+ ProjectWorkspace::Json { project, .. } => project.n_crates(),
+ ProjectWorkspace::Cargo { cargo, sysroot, rustc, .. } => {
+ let rustc_package_len = rustc.as_ref().map_or(0, |it| it.packages().len());
+ let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.crates().len());
+ cargo.packages().len() + sysroot_package_len + rustc_package_len
+ }
+ ProjectWorkspace::DetachedFiles { sysroot, files, .. } => {
+ sysroot.crates().len() + files.len()
+ }
+ }
+ }
+
+ pub fn to_crate_graph(
+ &self,
+ load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
+ load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+ ) -> CrateGraph {
+ let _p = profile::span("ProjectWorkspace::to_crate_graph");
+
+ let mut crate_graph = match self {
+ ProjectWorkspace::Json { project, sysroot, rustc_cfg } => project_json_to_crate_graph(
+ rustc_cfg.clone(),
+ load_proc_macro,
+ load,
+ project,
+ sysroot,
+ ),
+ ProjectWorkspace::Cargo {
+ cargo,
+ sysroot,
+ rustc,
+ rustc_cfg,
+ cfg_overrides,
+ build_scripts,
+ } => cargo_to_crate_graph(
+ rustc_cfg.clone(),
+ cfg_overrides,
+ load_proc_macro,
+ load,
+ cargo,
+ build_scripts,
+ sysroot.as_ref(),
+ rustc,
+ ),
+ ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => {
+ detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot)
+ }
+ };
+ if crate_graph.patch_cfg_if() {
+ tracing::debug!("Patched std to depend on cfg-if")
+ } else {
+ tracing::debug!("Did not patch std to depend on cfg-if")
+ }
+ crate_graph
+ }
+}
+
+fn project_json_to_crate_graph(
+ rustc_cfg: Vec<CfgFlag>,
+ load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
+ load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+ project: &ProjectJson,
+ sysroot: &Option<Sysroot>,
+) -> CrateGraph {
+ let mut crate_graph = CrateGraph::default();
+ let sysroot_deps = sysroot
+ .as_ref()
+ .map(|sysroot| sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load));
+
+ let mut cfg_cache: FxHashMap<&str, Vec<CfgFlag>> = FxHashMap::default();
+ let crates: FxHashMap<CrateId, CrateId> = project
+ .crates()
+ .filter_map(|(crate_id, krate)| {
+ let file_path = &krate.root_module;
+ let file_id = load(file_path)?;
+ Some((crate_id, krate, file_id))
+ })
+ .map(|(crate_id, krate, file_id)| {
+ let env = krate.env.clone().into_iter().collect();
+ let proc_macro = match krate.proc_macro_dylib_path.clone() {
+ Some(it) => load_proc_macro(
+ krate.display_name.as_ref().map(|it| it.canonical_name()).unwrap_or(""),
+ &it,
+ ),
+ None => Err("no proc macro dylib present".into()),
+ };
+
+ let target_cfgs = match krate.target.as_deref() {
+ Some(target) => {
+ cfg_cache.entry(target).or_insert_with(|| rustc_cfg::get(None, Some(target)))
+ }
+ None => &rustc_cfg,
+ };
+
+ let mut cfg_options = CfgOptions::default();
+ cfg_options.extend(target_cfgs.iter().chain(krate.cfg.iter()).cloned());
+ (
+ crate_id,
+ crate_graph.add_crate_root(
+ file_id,
+ krate.edition,
+ krate.display_name.clone(),
+ krate.version.clone(),
+ cfg_options.clone(),
+ cfg_options,
+ env,
+ proc_macro,
+ krate.is_proc_macro,
+ if krate.display_name.is_some() {
+ CrateOrigin::CratesIo { repo: krate.repository.clone() }
+ } else {
+ CrateOrigin::CratesIo { repo: None }
+ },
+ ),
+ )
+ })
+ .collect();
+
+ for (from, krate) in project.crates() {
+ if let Some(&from) = crates.get(&from) {
+ if let Some((public_deps, libproc_macro)) = &sysroot_deps {
+ public_deps.add(from, &mut crate_graph);
+ if krate.is_proc_macro {
+ if let Some(proc_macro) = libproc_macro {
+ add_dep(
+ &mut crate_graph,
+ from,
+ CrateName::new("proc_macro").unwrap(),
+ *proc_macro,
+ );
+ }
+ }
+ }
+
+ for dep in &krate.deps {
+ if let Some(&to) = crates.get(&dep.crate_id) {
+ add_dep(&mut crate_graph, from, dep.name.clone(), to)
+ }
+ }
+ }
+ }
+ crate_graph
+}
+
+fn cargo_to_crate_graph(
+ rustc_cfg: Vec<CfgFlag>,
+ override_cfg: &CfgOverrides,
+ load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
+ load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+ cargo: &CargoWorkspace,
+ build_scripts: &WorkspaceBuildScripts,
+ sysroot: Option<&Sysroot>,
+ rustc: &Option<CargoWorkspace>,
+) -> CrateGraph {
+ let _p = profile::span("cargo_to_crate_graph");
+ let mut crate_graph = CrateGraph::default();
+ let (public_deps, libproc_macro) = match sysroot {
+ Some(sysroot) => sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load),
+ None => (SysrootPublicDeps::default(), None),
+ };
+
+ let mut cfg_options = CfgOptions::default();
+ cfg_options.extend(rustc_cfg);
+
+ let mut pkg_to_lib_crate = FxHashMap::default();
+
+ cfg_options.insert_atom("debug_assertions".into());
+
+ let mut pkg_crates = FxHashMap::default();
+ // Does any crate signal to rust-analyzer that they need the rustc_private crates?
+ let mut has_private = false;
+ // Next, create crates for each package, target pair
+ for pkg in cargo.packages() {
+ let mut cfg_options = cfg_options.clone();
+
+ let overrides = match override_cfg {
+ CfgOverrides::Wildcard(cfg_diff) => Some(cfg_diff),
+ CfgOverrides::Selective(cfg_overrides) => cfg_overrides.get(&cargo[pkg].name),
+ };
+
+ // Add test cfg for local crates
+ if cargo[pkg].is_local {
+ cfg_options.insert_atom("test".into());
+ }
+
+ if let Some(overrides) = overrides {
+ // FIXME: this is sort of a hack to deal with #![cfg(not(test))] vanishing such as seen
+ // in ed25519_dalek (#7243), and libcore (#9203) (although you only hit that one while
+ // working on rust-lang/rust as that's the only time it appears outside sysroot).
+ //
+ // A more ideal solution might be to reanalyze crates based on where the cursor is and
+ // figure out the set of cfgs that would have to apply to make it active.
+
+ cfg_options.apply_diff(overrides.clone());
+ };
+
+ has_private |= cargo[pkg].metadata.rustc_private;
+ let mut lib_tgt = None;
+ for &tgt in cargo[pkg].targets.iter() {
+ if cargo[tgt].kind != TargetKind::Lib && !cargo[pkg].is_member {
+ // For non-workspace-members, Cargo does not resolve dev-dependencies, so we don't
+ // add any targets except the library target, since those will not work correctly if
+ // they use dev-dependencies.
+ // In fact, they can break quite badly if multiple client workspaces get merged:
+ // https://github.com/rust-lang/rust-analyzer/issues/11300
+ continue;
+ }
+
+ if let Some(file_id) = load(&cargo[tgt].root) {
+ let crate_id = add_target_crate_root(
+ &mut crate_graph,
+ &cargo[pkg],
+ build_scripts.get_output(pkg),
+ cfg_options.clone(),
+ &mut |path| load_proc_macro(&cargo[tgt].name, path),
+ file_id,
+ &cargo[tgt].name,
+ cargo[tgt].is_proc_macro,
+ );
+ if cargo[tgt].kind == TargetKind::Lib {
+ lib_tgt = Some((crate_id, cargo[tgt].name.clone()));
+ pkg_to_lib_crate.insert(pkg, crate_id);
+ }
+ if let Some(proc_macro) = libproc_macro {
+ add_dep_with_prelude(
+ &mut crate_graph,
+ crate_id,
+ CrateName::new("proc_macro").unwrap(),
+ proc_macro,
+ cargo[tgt].is_proc_macro,
+ );
+ }
+
+ pkg_crates.entry(pkg).or_insert_with(Vec::new).push((crate_id, cargo[tgt].kind));
+ }
+ }
+
+ // Set deps to the core, std and to the lib target of the current package
+ for (from, kind) in pkg_crates.get(&pkg).into_iter().flatten() {
+ // Add sysroot deps first so that a lib target named `core` etc. can overwrite them.
+ public_deps.add(*from, &mut crate_graph);
+
+ if let Some((to, name)) = lib_tgt.clone() {
+ if to != *from && *kind != TargetKind::BuildScript {
+ // (build script can not depend on its library target)
+
+ // For root projects with dashes in their name,
+ // cargo metadata does not do any normalization,
+ // so we do it ourselves currently
+ let name = CrateName::normalize_dashes(&name);
+ add_dep(&mut crate_graph, *from, name, to);
+ }
+ }
+ }
+ }
+
+ // Now add a dep edge from all targets of upstream to the lib
+ // target of downstream.
+ for pkg in cargo.packages() {
+ for dep in cargo[pkg].dependencies.iter() {
+ let name = CrateName::new(&dep.name).unwrap();
+ if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) {
+ for (from, kind) in pkg_crates.get(&pkg).into_iter().flatten() {
+ if dep.kind == DepKind::Build && *kind != TargetKind::BuildScript {
+ // Only build scripts may depend on build dependencies.
+ continue;
+ }
+ if dep.kind != DepKind::Build && *kind == TargetKind::BuildScript {
+ // Build scripts may only depend on build dependencies.
+ continue;
+ }
+
+ add_dep(&mut crate_graph, *from, name.clone(), to)
+ }
+ }
+ }
+ }
+
+ if has_private {
+ // If the user provided a path to rustc sources, we add all the rustc_private crates
+ // and create dependencies on them for the crates which opt-in to that
+ if let Some(rustc_workspace) = rustc {
+ handle_rustc_crates(
+ rustc_workspace,
+ load,
+ &mut crate_graph,
+ &cfg_options,
+ override_cfg,
+ load_proc_macro,
+ &mut pkg_to_lib_crate,
+ &public_deps,
+ cargo,
+ &pkg_crates,
+ build_scripts,
+ );
+ }
+ }
+ crate_graph
+}
+
+fn detached_files_to_crate_graph(
+ rustc_cfg: Vec<CfgFlag>,
+ load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+ detached_files: &[AbsPathBuf],
+ sysroot: &Sysroot,
+) -> CrateGraph {
+ let _p = profile::span("detached_files_to_crate_graph");
+ let mut crate_graph = CrateGraph::default();
+ let (public_deps, _libproc_macro) =
+ sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load);
+
+ let mut cfg_options = CfgOptions::default();
+ cfg_options.extend(rustc_cfg);
+
+ for detached_file in detached_files {
+ let file_id = match load(detached_file) {
+ Some(file_id) => file_id,
+ None => {
+ tracing::error!("Failed to load detached file {:?}", detached_file);
+ continue;
+ }
+ };
+ let display_name = detached_file
+ .file_stem()
+ .and_then(|os_str| os_str.to_str())
+ .map(|file_stem| CrateDisplayName::from_canonical_name(file_stem.to_string()));
+ let detached_file_crate = crate_graph.add_crate_root(
+ file_id,
+ Edition::CURRENT,
+ display_name,
+ None,
+ cfg_options.clone(),
+ cfg_options.clone(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None },
+ );
+
+ public_deps.add(detached_file_crate, &mut crate_graph);
+ }
+ crate_graph
+}
+
+fn handle_rustc_crates(
+ rustc_workspace: &CargoWorkspace,
+ load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+ crate_graph: &mut CrateGraph,
+ cfg_options: &CfgOptions,
+ override_cfg: &CfgOverrides,
+ load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
+ pkg_to_lib_crate: &mut FxHashMap<la_arena::Idx<crate::PackageData>, CrateId>,
+ public_deps: &SysrootPublicDeps,
+ cargo: &CargoWorkspace,
+ pkg_crates: &FxHashMap<la_arena::Idx<crate::PackageData>, Vec<(CrateId, TargetKind)>>,
+ build_scripts: &WorkspaceBuildScripts,
+) {
+ let mut rustc_pkg_crates = FxHashMap::default();
+ // The root package of the rustc-dev component is rustc_driver, so we match that
+ let root_pkg =
+ rustc_workspace.packages().find(|package| rustc_workspace[*package].name == "rustc_driver");
+ // The rustc workspace might be incomplete (such as if rustc-dev is not
+ // installed for the current toolchain) and `rustc_source` is set to discover.
+ if let Some(root_pkg) = root_pkg {
+ // Iterate through every crate in the dependency subtree of rustc_driver using BFS
+ let mut queue = VecDeque::new();
+ queue.push_back(root_pkg);
+ while let Some(pkg) = queue.pop_front() {
+ // Don't duplicate packages if they are dependended on a diamond pattern
+ // N.B. if this line is ommitted, we try to analyse over 4_800_000 crates
+ // which is not ideal
+ if rustc_pkg_crates.contains_key(&pkg) {
+ continue;
+ }
+ for dep in &rustc_workspace[pkg].dependencies {
+ queue.push_back(dep.pkg);
+ }
+
+ let mut cfg_options = cfg_options.clone();
+
+ let overrides = match override_cfg {
+ CfgOverrides::Wildcard(cfg_diff) => Some(cfg_diff),
+ CfgOverrides::Selective(cfg_overrides) => {
+ cfg_overrides.get(&rustc_workspace[pkg].name)
+ }
+ };
+
+ if let Some(overrides) = overrides {
+ // FIXME: this is sort of a hack to deal with #![cfg(not(test))] vanishing such as seen
+ // in ed25519_dalek (#7243), and libcore (#9203) (although you only hit that one while
+ // working on rust-lang/rust as that's the only time it appears outside sysroot).
+ //
+ // A more ideal solution might be to reanalyze crates based on where the cursor is and
+ // figure out the set of cfgs that would have to apply to make it active.
+
+ cfg_options.apply_diff(overrides.clone());
+ };
+
+ for &tgt in rustc_workspace[pkg].targets.iter() {
+ if rustc_workspace[tgt].kind != TargetKind::Lib {
+ continue;
+ }
+ if let Some(file_id) = load(&rustc_workspace[tgt].root) {
+ let crate_id = add_target_crate_root(
+ crate_graph,
+ &rustc_workspace[pkg],
+ build_scripts.get_output(pkg),
+ cfg_options.clone(),
+ &mut |path| load_proc_macro(&rustc_workspace[tgt].name, path),
+ file_id,
+ &rustc_workspace[tgt].name,
+ rustc_workspace[tgt].is_proc_macro,
+ );
+ pkg_to_lib_crate.insert(pkg, crate_id);
+ // Add dependencies on core / std / alloc for this crate
+ public_deps.add(crate_id, crate_graph);
+ rustc_pkg_crates.entry(pkg).or_insert_with(Vec::new).push(crate_id);
+ }
+ }
+ }
+ }
+ // Now add a dep edge from all targets of upstream to the lib
+ // target of downstream.
+ for pkg in rustc_pkg_crates.keys().copied() {
+ for dep in rustc_workspace[pkg].dependencies.iter() {
+ let name = CrateName::new(&dep.name).unwrap();
+ if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) {
+ for &from in rustc_pkg_crates.get(&pkg).into_iter().flatten() {
+ add_dep(crate_graph, from, name.clone(), to);
+ }
+ }
+ }
+ }
+ // Add a dependency on the rustc_private crates for all targets of each package
+ // which opts in
+ for dep in rustc_workspace.packages() {
+ let name = CrateName::normalize_dashes(&rustc_workspace[dep].name);
+
+ if let Some(&to) = pkg_to_lib_crate.get(&dep) {
+ for pkg in cargo.packages() {
+ let package = &cargo[pkg];
+ if !package.metadata.rustc_private {
+ continue;
+ }
+ for (from, _) in pkg_crates.get(&pkg).into_iter().flatten() {
+ // Avoid creating duplicate dependencies
+ // This avoids the situation where `from` depends on e.g. `arrayvec`, but
+ // `rust_analyzer` thinks that it should use the one from the `rustc_source`
+ // instead of the one from `crates.io`
+ if !crate_graph[*from].dependencies.iter().any(|d| d.name == name) {
+ add_dep(crate_graph, *from, name.clone(), to);
+ }
+ }
+ }
+ }
+ }
+}
+
+fn add_target_crate_root(
+ crate_graph: &mut CrateGraph,
+ pkg: &PackageData,
+ build_data: Option<&BuildScriptOutput>,
+ cfg_options: CfgOptions,
+ load_proc_macro: &mut dyn FnMut(&AbsPath) -> ProcMacroLoadResult,
+ file_id: FileId,
+ cargo_name: &str,
+ is_proc_macro: bool,
+) -> CrateId {
+ let edition = pkg.edition;
+ let mut potential_cfg_options = cfg_options.clone();
+ potential_cfg_options.extend(
+ pkg.features
+ .iter()
+ .map(|feat| CfgFlag::KeyValue { key: "feature".into(), value: feat.0.into() }),
+ );
+ let cfg_options = {
+ let mut opts = cfg_options;
+ for feature in pkg.active_features.iter() {
+ opts.insert_key_value("feature".into(), feature.into());
+ }
+ if let Some(cfgs) = build_data.as_ref().map(|it| &it.cfgs) {
+ opts.extend(cfgs.iter().cloned());
+ }
+ opts
+ };
+
+ let mut env = Env::default();
+ inject_cargo_env(pkg, &mut env);
+
+ if let Some(envs) = build_data.map(|it| &it.envs) {
+ for (k, v) in envs {
+ env.set(k, v.clone());
+ }
+ }
+
+ let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) {
+ Some(Some(it)) => load_proc_macro(it),
+ Some(None) => Err("no proc macro dylib present".into()),
+ None => Err("crate has not (yet) been built".into()),
+ };
+
+ let display_name = CrateDisplayName::from_canonical_name(cargo_name.to_string());
+ crate_graph.add_crate_root(
+ file_id,
+ edition,
+ Some(display_name),
+ Some(pkg.version.to_string()),
+ cfg_options,
+ potential_cfg_options,
+ env,
+ proc_macro,
+ is_proc_macro,
+ CrateOrigin::CratesIo { repo: pkg.repository.clone() },
+ )
+}
+
+#[derive(Default)]
+struct SysrootPublicDeps {
+ deps: Vec<(CrateName, CrateId, bool)>,
+}
+
+impl SysrootPublicDeps {
+ /// Makes `from` depend on the public sysroot crates.
+ fn add(&self, from: CrateId, crate_graph: &mut CrateGraph) {
+ for (name, krate, prelude) in &self.deps {
+ add_dep_with_prelude(crate_graph, from, name.clone(), *krate, *prelude);
+ }
+ }
+}
+
+fn sysroot_to_crate_graph(
+ crate_graph: &mut CrateGraph,
+ sysroot: &Sysroot,
+ rustc_cfg: Vec<CfgFlag>,
+ load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+) -> (SysrootPublicDeps, Option<CrateId>) {
+ let _p = profile::span("sysroot_to_crate_graph");
+ let mut cfg_options = CfgOptions::default();
+ cfg_options.extend(rustc_cfg);
+ let sysroot_crates: FxHashMap<SysrootCrate, CrateId> = sysroot
+ .crates()
+ .filter_map(|krate| {
+ let file_id = load(&sysroot[krate].root)?;
+
+ let env = Env::default();
+ let display_name = CrateDisplayName::from_canonical_name(sysroot[krate].name.clone());
+ let crate_id = crate_graph.add_crate_root(
+ file_id,
+ Edition::CURRENT,
+ Some(display_name),
+ None,
+ cfg_options.clone(),
+ cfg_options.clone(),
+ env,
+ Err("no proc macro loaded for sysroot crate".into()),
+ false,
+ CrateOrigin::Lang(LangCrateOrigin::from(&*sysroot[krate].name)),
+ );
+ Some((krate, crate_id))
+ })
+ .collect();
+
+ for from in sysroot.crates() {
+ for &to in sysroot[from].deps.iter() {
+ let name = CrateName::new(&sysroot[to].name).unwrap();
+ if let (Some(&from), Some(&to)) = (sysroot_crates.get(&from), sysroot_crates.get(&to)) {
+ add_dep(crate_graph, from, name, to);
+ }
+ }
+ }
+
+ let public_deps = SysrootPublicDeps {
+ deps: sysroot
+ .public_deps()
+ .map(|(name, idx, prelude)| {
+ (CrateName::new(name).unwrap(), sysroot_crates[&idx], prelude)
+ })
+ .collect::<Vec<_>>(),
+ };
+
+ let libproc_macro = sysroot.proc_macro().and_then(|it| sysroot_crates.get(&it).copied());
+ (public_deps, libproc_macro)
+}
+
+fn add_dep(graph: &mut CrateGraph, from: CrateId, name: CrateName, to: CrateId) {
+ add_dep_inner(graph, from, Dependency::new(name, to))
+}
+
+fn add_dep_with_prelude(
+ graph: &mut CrateGraph,
+ from: CrateId,
+ name: CrateName,
+ to: CrateId,
+ prelude: bool,
+) {
+ add_dep_inner(graph, from, Dependency::with_prelude(name, to, prelude))
+}
+
+fn add_dep_inner(graph: &mut CrateGraph, from: CrateId, dep: Dependency) {
+ if let Err(err) = graph.add_dep(from, dep) {
+ tracing::error!("{}", err)
+ }
+}
+
+/// Recreates the compile-time environment variables that Cargo sets.
+///
+/// Should be synced with
+/// <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates>
+///
+/// FIXME: ask Cargo to provide this data instead of re-deriving.
+fn inject_cargo_env(package: &PackageData, env: &mut Env) {
+ // FIXME: Missing variables:
+ // CARGO_BIN_NAME, CARGO_BIN_EXE_<name>
+
+ let manifest_dir = package.manifest.parent();
+ env.set("CARGO_MANIFEST_DIR", manifest_dir.as_os_str().to_string_lossy().into_owned());
+
+ // Not always right, but works for common cases.
+ env.set("CARGO", "cargo".into());
+
+ env.set("CARGO_PKG_VERSION", package.version.to_string());
+ env.set("CARGO_PKG_VERSION_MAJOR", package.version.major.to_string());
+ env.set("CARGO_PKG_VERSION_MINOR", package.version.minor.to_string());
+ env.set("CARGO_PKG_VERSION_PATCH", package.version.patch.to_string());
+ env.set("CARGO_PKG_VERSION_PRE", package.version.pre.to_string());
+
+ env.set("CARGO_PKG_AUTHORS", String::new());
+
+ env.set("CARGO_PKG_NAME", package.name.clone());
+ // FIXME: This isn't really correct (a package can have many crates with different names), but
+ // it's better than leaving the variable unset.
+ env.set("CARGO_CRATE_NAME", CrateName::normalize_dashes(&package.name).to_string());
+ env.set("CARGO_PKG_DESCRIPTION", String::new());
+ env.set("CARGO_PKG_HOMEPAGE", String::new());
+ env.set("CARGO_PKG_REPOSITORY", String::new());
+ env.set("CARGO_PKG_LICENSE", String::new());
+
+ env.set("CARGO_PKG_LICENSE_FILE", String::new());
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/alloc/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/alloc/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/alloc/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/core/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/core/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/core/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/panic_abort/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/panic_abort/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/panic_abort/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/panic_unwind/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/panic_unwind/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/panic_unwind/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/proc_macro/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/proc_macro/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/proc_macro/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/profiler_builtins/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/profiler_builtins/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/profiler_builtins/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/std/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/std/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/std/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/stdarch/crates/std_detect/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/stdarch/crates/std_detect/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/stdarch/crates/std_detect/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/term/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/term/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/term/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/test/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/test/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/test/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/unwind/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/unwind/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/unwind/src/lib.rs
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/hello-world-metadata.json b/src/tools/rust-analyzer/crates/project-model/test_data/hello-world-metadata.json
new file mode 100644
index 000000000..b6142eeaf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/hello-world-metadata.json
@@ -0,0 +1,245 @@
+{
+ "packages": [
+ {
+ "name": "hello-world",
+ "version": "0.1.0",
+ "id": "hello-world 0.1.0 (path+file://$ROOT$hello-world)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": null,
+ "dependencies": [
+ {
+ "name": "libc",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "req": "^0.2",
+ "kind": null,
+ "rename": null,
+ "optional": false,
+ "uses_default_features": true,
+ "features": [],
+ "target": null,
+ "registry": null
+ }
+ ],
+ "targets": [
+ {
+ "kind": [
+ "lib"
+ ],
+ "crate_types": [
+ "lib"
+ ],
+ "name": "hello-world",
+ "src_path": "$ROOT$hello-world/src/lib.rs",
+ "edition": "2018",
+ "doc": true,
+ "doctest": true,
+ "test": true
+ },
+ {
+ "kind": [
+ "bin"
+ ],
+ "crate_types": [
+ "bin"
+ ],
+ "name": "hello-world",
+ "src_path": "$ROOT$hello-world/src/main.rs",
+ "edition": "2018",
+ "doc": true,
+ "doctest": false,
+ "test": true
+ },
+ {
+ "kind": [
+ "example"
+ ],
+ "crate_types": [
+ "bin"
+ ],
+ "name": "an-example",
+ "src_path": "$ROOT$hello-world/examples/an-example.rs",
+ "edition": "2018",
+ "doc": false,
+ "doctest": false,
+ "test": false
+ },
+ {
+ "kind": [
+ "test"
+ ],
+ "crate_types": [
+ "bin"
+ ],
+ "name": "it",
+ "src_path": "$ROOT$hello-world/tests/it.rs",
+ "edition": "2018",
+ "doc": false,
+ "doctest": false,
+ "test": true
+ }
+ ],
+ "features": {},
+ "manifest_path": "$ROOT$hello-world/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2018",
+ "links": null
+ },
+ {
+ "name": "libc",
+ "version": "0.2.98",
+ "id": "libc 0.2.98 (registry+https://github.com/rust-lang/crates.io-index)",
+ "license": "MIT OR Apache-2.0",
+ "license_file": null,
+ "description": "Raw FFI bindings to platform libraries like libc.\n",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "dependencies": [
+ {
+ "name": "rustc-std-workspace-core",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "req": "^1.0.0",
+ "kind": null,
+ "rename": null,
+ "optional": true,
+ "uses_default_features": true,
+ "features": [],
+ "target": null,
+ "registry": null
+ }
+ ],
+ "targets": [
+ {
+ "kind": [
+ "lib"
+ ],
+ "crate_types": [
+ "lib"
+ ],
+ "name": "libc",
+ "src_path": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98/src/lib.rs",
+ "edition": "2015",
+ "doc": true,
+ "doctest": true,
+ "test": true
+ },
+ {
+ "kind": [
+ "test"
+ ],
+ "crate_types": [
+ "bin"
+ ],
+ "name": "const_fn",
+ "src_path": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98/tests/const_fn.rs",
+ "edition": "2015",
+ "doc": false,
+ "doctest": false,
+ "test": true
+ },
+ {
+ "kind": [
+ "custom-build"
+ ],
+ "crate_types": [
+ "bin"
+ ],
+ "name": "build-script-build",
+ "src_path": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98/build.rs",
+ "edition": "2015",
+ "doc": false,
+ "doctest": false,
+ "test": false
+ }
+ ],
+ "features": {
+ "align": [],
+ "const-extern-fn": [],
+ "default": [
+ "std"
+ ],
+ "extra_traits": [],
+ "rustc-dep-of-std": [
+ "align",
+ "rustc-std-workspace-core"
+ ],
+ "std": [],
+ "use_std": [
+ "std"
+ ]
+ },
+ "manifest_path": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [
+ "The Rust Project Developers"
+ ],
+ "categories": [
+ "external-ffi-bindings",
+ "no-std",
+ "os"
+ ],
+ "keywords": [
+ "libc",
+ "ffi",
+ "bindings",
+ "operating",
+ "system"
+ ],
+ "readme": "README.md",
+ "repository": "https://github.com/rust-lang/libc",
+ "homepage": "https://github.com/rust-lang/libc",
+ "documentation": "https://docs.rs/libc/",
+ "edition": "2015",
+ "links": null
+ }
+ ],
+ "workspace_members": [
+ "hello-world 0.1.0 (path+file://$ROOT$hello-world)"
+ ],
+ "resolve": {
+ "nodes": [
+ {
+ "id": "hello-world 0.1.0 (path+file://$ROOT$hello-world)",
+ "dependencies": [
+ "libc 0.2.98 (registry+https://github.com/rust-lang/crates.io-index)"
+ ],
+ "deps": [
+ {
+ "name": "libc",
+ "pkg": "libc 0.2.98 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": null
+ }
+ ]
+ }
+ ],
+ "features": []
+ },
+ {
+ "id": "libc 0.2.98 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dependencies": [],
+ "deps": [],
+ "features": [
+ "default",
+ "std"
+ ]
+ }
+ ],
+ "root": "hello-world 0.1.0 (path+file://$ROOT$hello-world)"
+ },
+ "target_directory": "$ROOT$hello-world/target",
+ "version": 1,
+ "workspace_root": "$ROOT$hello-world",
+ "metadata": null
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/hello-world-project.json b/src/tools/rust-analyzer/crates/project-model/test_data/hello-world-project.json
new file mode 100644
index 000000000..b27ab1f42
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/hello-world-project.json
@@ -0,0 +1,12 @@
+{
+ "sysroot_src": null,
+ "crates": [
+ {
+ "display_name": "hello_world",
+ "root_module": "$ROOT$src/lib.rs",
+ "edition": "2018",
+ "deps": [],
+ "is_workspace_member": true
+ }
+ ]
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/is-proc-macro-project.json b/src/tools/rust-analyzer/crates/project-model/test_data/is-proc-macro-project.json
new file mode 100644
index 000000000..5d500a472
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/is-proc-macro-project.json
@@ -0,0 +1,13 @@
+{
+ "sysroot_src": null,
+ "crates": [
+ {
+ "display_name": "is_proc_macro",
+ "root_module": "$ROOT$src/lib.rs",
+ "edition": "2018",
+ "deps": [],
+ "is_workspace_member": true,
+ "is_proc_macro": true
+ }
+ ]
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
new file mode 100644
index 000000000..07771d1b3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
@@ -0,0 +1,92 @@
+[package]
+name = "rust-analyzer"
+version = "0.0.0"
+authors = ["rust-analyzer Team"]
+homepage = "https://github.com/rust-analyzer/rust-analyzer"
+description = "A language server for the Rust programming language"
+documentation = "https://rust-analyzer.github.io/manual.html"
+license = "MIT OR Apache-2.0"
+autobins = false
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[[bin]]
+name = "rust-analyzer"
+path = "src/bin/main.rs"
+
+[dependencies]
+anyhow = "1.0.57"
+crossbeam-channel = "0.5.5"
+dissimilar = "1.0.4"
+itertools = "0.10.3"
+lsp-types = { version = "0.93.0", features = ["proposed"] }
+parking_lot = "0.12.1"
+xflags = "0.2.4"
+oorandom = "11.1.3"
+rustc-hash = "1.1.0"
+serde = { version = "1.0.137", features = ["derive"] }
+serde_json = { version = "1.0.81", features = ["preserve_order"] }
+threadpool = "1.8.1"
+rayon = "1.5.3"
+num_cpus = "1.13.1"
+mimalloc = { version = "0.1.29", default-features = false, optional = true }
+lsp-server = { version = "0.6.0", path = "../../lib/lsp-server" }
+tracing = "0.1.35"
+tracing-subscriber = { version = "0.3.14", default-features = false, features = [
+ "env-filter",
+ "registry",
+ "fmt",
+ "tracing-log",
+] }
+tracing-log = "0.1.3"
+tracing-tree = "0.2.1"
+always-assert = "0.1.2"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+flycheck = { path = "../flycheck", version = "0.0.0" }
+ide = { path = "../ide", version = "0.0.0" }
+ide-db = { path = "../ide-db", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+project-model = { path = "../project-model", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+vfs = { path = "../vfs", version = "0.0.0" }
+vfs-notify = { path = "../vfs-notify", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
+toolchain = { path = "../toolchain", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+proc-macro-api = { path = "../proc-macro-api", version = "0.0.0" }
+
+# This should only be used in CLI
+ide-ssr = { path = "../ide-ssr", version = "0.0.0" }
+hir = { path = "../hir", version = "0.0.0" }
+hir-def = { path = "../hir-def", version = "0.0.0" }
+hir-ty = { path = "../hir-ty", version = "0.0.0" }
+proc-macro-srv = { path = "../proc-macro-srv", version = "0.0.0" }
+
+[target.'cfg(windows)'.dependencies]
+winapi = "0.3.9"
+
+[target.'cfg(not(target_env = "msvc"))'.dependencies]
+jemallocator = { version = "0.5.0", package = "tikv-jemallocator", optional = true }
+
+[dev-dependencies]
+expect-test = "1.4.0"
+jod-thread = "0.1.2"
+xshell = "0.2.2"
+
+test-utils = { path = "../test-utils" }
+sourcegen = { path = "../sourcegen" }
+mbe = { path = "../mbe" }
+
+[features]
+jemalloc = ["jemallocator", "profile/jemalloc"]
+force-always-assert = ["always-assert/force"]
+in-rust-tree = [
+ "proc-macro-srv/sysroot-abi",
+ "sourcegen/in-rust-tree",
+ "ide/in-rust-tree",
+ "syntax/in-rust-tree"
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/build.rs b/src/tools/rust-analyzer/crates/rust-analyzer/build.rs
new file mode 100644
index 000000000..15935e2da
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/build.rs
@@ -0,0 +1,50 @@
+//! Construct version in the `commit-hash date channel` format
+
+use std::{env, path::PathBuf, process::Command};
+
+fn main() {
+ set_rerun();
+ set_commit_info();
+ if option_env!("CFG_RELEASE").is_none() {
+ println!("cargo:rustc-env=POKE_RA_DEVS=1");
+ }
+}
+
+fn set_rerun() {
+ println!("cargo:rerun-if-env-changed=CFG_RELEASE");
+
+ let mut manifest_dir = PathBuf::from(
+ env::var("CARGO_MANIFEST_DIR").expect("`CARGO_MANIFEST_DIR` is always set by cargo."),
+ );
+
+ while manifest_dir.parent().is_some() {
+ let head_ref = manifest_dir.join(".git/HEAD");
+ if head_ref.exists() {
+ println!("cargo:rerun-if-changed={}", head_ref.display());
+ return;
+ }
+
+ manifest_dir.pop();
+ }
+
+ println!("cargo:warning=Could not find `.git/HEAD` from manifest dir!");
+}
+
+fn set_commit_info() {
+ let output = match Command::new("git")
+ .arg("log")
+ .arg("-1")
+ .arg("--date=short")
+ .arg("--format=%H %h %cd")
+ .output()
+ {
+ Ok(output) if output.status.success() => output,
+ _ => return,
+ };
+ let stdout = String::from_utf8(output.stdout).unwrap();
+ let mut parts = stdout.split_whitespace();
+ let mut next = || parts.next().unwrap();
+ println!("cargo:rustc-env=RA_COMMIT_HASH={}", next());
+ println!("cargo:rustc-env=RA_COMMIT_SHORT_HASH={}", next());
+ println!("cargo:rustc-env=RA_COMMIT_DATE={}", next())
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs
new file mode 100644
index 000000000..0b69f75bc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs
@@ -0,0 +1,155 @@
+//! Simple logger that logs either to stderr or to a file, using `tracing_subscriber`
+//! filter syntax and `tracing_appender` for non blocking output.
+
+use std::{
+ fmt,
+ fs::File,
+ io::{self, Stderr},
+ sync::Arc,
+};
+
+use rust_analyzer::Result;
+use tracing::{level_filters::LevelFilter, Event, Subscriber};
+use tracing_log::NormalizeEvent;
+use tracing_subscriber::{
+ fmt::{
+ format::Writer, writer::BoxMakeWriter, FmtContext, FormatEvent, FormatFields,
+ FormattedFields, MakeWriter,
+ },
+ layer::SubscriberExt,
+ registry::LookupSpan,
+ util::SubscriberInitExt,
+ EnvFilter, Registry,
+};
+use tracing_tree::HierarchicalLayer;
+
+pub(crate) struct Logger {
+ filter: EnvFilter,
+ file: Option<File>,
+}
+
+struct MakeWriterStderr;
+
+impl<'a> MakeWriter<'a> for MakeWriterStderr {
+ type Writer = Stderr;
+
+ fn make_writer(&'a self) -> Self::Writer {
+ io::stderr()
+ }
+}
+
+impl Logger {
+ pub(crate) fn new(file: Option<File>, filter: Option<&str>) -> Logger {
+ let filter = filter.map_or(EnvFilter::default(), EnvFilter::new);
+
+ Logger { filter, file }
+ }
+
+ pub(crate) fn install(self) -> Result<()> {
+ // The meaning of CHALK_DEBUG I suspected is to tell chalk crates
+ // (i.e. chalk-solve, chalk-ir, chalk-recursive) how to filter tracing
+ // logs. But now we can only have just one filter, which means we have to
+ // merge chalk filter to our main filter (from RA_LOG env).
+ //
+ // The acceptable syntax of CHALK_DEBUG is `target[span{field=value}]=level`.
+ // As the value should only affect chalk crates, we'd better mannually
+ // specify the target. And for simplicity, CHALK_DEBUG only accept the value
+ // that specify level.
+ let chalk_level_dir = std::env::var("CHALK_DEBUG")
+ .map(|val| {
+ val.parse::<LevelFilter>().expect(
+ "invalid CHALK_DEBUG value, expect right log level (like debug or trace)",
+ )
+ })
+ .ok();
+
+ let chalk_layer = HierarchicalLayer::default()
+ .with_indent_lines(true)
+ .with_ansi(false)
+ .with_indent_amount(2)
+ .with_writer(io::stderr);
+
+ let writer = match self.file {
+ Some(file) => BoxMakeWriter::new(Arc::new(file)),
+ None => BoxMakeWriter::new(io::stderr),
+ };
+ let ra_fmt_layer =
+ tracing_subscriber::fmt::layer().event_format(LoggerFormatter).with_writer(writer);
+
+ match chalk_level_dir {
+ Some(val) => {
+ Registry::default()
+ .with(
+ self.filter
+ .add_directive(format!("chalk_solve={}", val).parse()?)
+ .add_directive(format!("chalk_ir={}", val).parse()?)
+ .add_directive(format!("chalk_recursive={}", val).parse()?),
+ )
+ .with(ra_fmt_layer)
+ .with(chalk_layer)
+ .init();
+ }
+ None => {
+ Registry::default().with(self.filter).with(ra_fmt_layer).init();
+ }
+ };
+
+ Ok(())
+ }
+}
+
+#[derive(Debug)]
+struct LoggerFormatter;
+
+impl<S, N> FormatEvent<S, N> for LoggerFormatter
+where
+ S: Subscriber + for<'a> LookupSpan<'a>,
+ N: for<'a> FormatFields<'a> + 'static,
+{
+ fn format_event(
+ &self,
+ ctx: &FmtContext<'_, S, N>,
+ mut writer: Writer<'_>,
+ event: &Event<'_>,
+ ) -> fmt::Result {
+ // Write level and target
+ let level = *event.metadata().level();
+
+ // If this event is issued from `log` crate, then the value of target is
+ // always "log". `tracing-log` has hard coded it for some reason, so we
+ // need to extract it using `normalized_metadata` method which is part of
+ // `tracing_log::NormalizeEvent`.
+ let target = match event.normalized_metadata() {
+ // This event is issued from `log` crate
+ Some(log) => log.target(),
+ None => event.metadata().target(),
+ };
+ write!(writer, "[{} {}] ", level, target)?;
+
+ // Write spans and fields of each span
+ ctx.visit_spans(|span| {
+ write!(writer, "{}", span.name())?;
+
+ let ext = span.extensions();
+
+ // `FormattedFields` is a a formatted representation of the span's
+ // fields, which is stored in its extensions by the `fmt` layer's
+ // `new_span` method. The fields will have been formatted
+ // by the same field formatter that's provided to the event
+ // formatter in the `FmtContext`.
+ let fields = &ext.get::<FormattedFields<N>>().expect("will never be `None`");
+
+ if !fields.is_empty() {
+ write!(writer, "{{{}}}", fields)?;
+ }
+ write!(writer, ": ")?;
+
+ Ok(())
+ })?;
+
+ // Write fields on the event
+ ctx.field_format().format_fields(writer.by_ref(), event)?;
+
+ writeln!(writer)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
new file mode 100644
index 000000000..e9de23cb3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
@@ -0,0 +1,239 @@
+//! Driver for rust-analyzer.
+//!
+//! Based on cli flags, either spawns an LSP server, or runs a batch analysis
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod logger;
+mod rustc_wrapper;
+
+use std::{env, fs, path::Path, process};
+
+use lsp_server::Connection;
+use project_model::ProjectManifest;
+use rust_analyzer::{cli::flags, config::Config, from_json, lsp_ext::supports_utf8, Result};
+use vfs::AbsPathBuf;
+
+#[cfg(all(feature = "mimalloc"))]
+#[global_allocator]
+static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
+
+#[cfg(all(feature = "jemalloc", not(target_env = "msvc")))]
+#[global_allocator]
+static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
+
+fn main() {
+ if std::env::var("RA_RUSTC_WRAPPER").is_ok() {
+ let mut args = std::env::args_os();
+ let _me = args.next().unwrap();
+ let rustc = args.next().unwrap();
+ let code = match rustc_wrapper::run_rustc_skipping_cargo_checking(rustc, args.collect()) {
+ Ok(rustc_wrapper::ExitCode(code)) => code.unwrap_or(102),
+ Err(err) => {
+ eprintln!("{}", err);
+ 101
+ }
+ };
+ process::exit(code);
+ }
+
+ if let Err(err) = try_main() {
+ tracing::error!("Unexpected error: {}", err);
+ eprintln!("{}", err);
+ process::exit(101);
+ }
+}
+
+fn try_main() -> Result<()> {
+ let flags = flags::RustAnalyzer::from_env()?;
+
+ #[cfg(debug_assertions)]
+ if flags.wait_dbg || env::var("RA_WAIT_DBG").is_ok() {
+ #[allow(unused_mut)]
+ let mut d = 4;
+ while d == 4 {
+ d = 4;
+ }
+ }
+
+ let mut log_file = flags.log_file.as_deref();
+
+ let env_log_file = env::var("RA_LOG_FILE").ok();
+ if let Some(env_log_file) = env_log_file.as_deref() {
+ log_file = Some(Path::new(env_log_file));
+ }
+
+ setup_logging(log_file)?;
+ let verbosity = flags.verbosity();
+
+ match flags.subcommand {
+ flags::RustAnalyzerCmd::LspServer(cmd) => {
+ if cmd.print_config_schema {
+ println!("{:#}", Config::json_schema());
+ return Ok(());
+ }
+ if cmd.version {
+ println!("rust-analyzer {}", rust_analyzer::version());
+ return Ok(());
+ }
+ if cmd.help {
+ println!("{}", flags::RustAnalyzer::HELP);
+ return Ok(());
+ }
+ with_extra_thread("LspServer", run_server)?;
+ }
+ flags::RustAnalyzerCmd::ProcMacro(flags::ProcMacro) => {
+ with_extra_thread("MacroExpander", || proc_macro_srv::cli::run().map_err(Into::into))?;
+ }
+ flags::RustAnalyzerCmd::Parse(cmd) => cmd.run()?,
+ flags::RustAnalyzerCmd::Symbols(cmd) => cmd.run()?,
+ flags::RustAnalyzerCmd::Highlight(cmd) => cmd.run()?,
+ flags::RustAnalyzerCmd::AnalysisStats(cmd) => cmd.run(verbosity)?,
+ flags::RustAnalyzerCmd::Diagnostics(cmd) => cmd.run()?,
+ flags::RustAnalyzerCmd::Ssr(cmd) => cmd.run()?,
+ flags::RustAnalyzerCmd::Search(cmd) => cmd.run()?,
+ flags::RustAnalyzerCmd::Lsif(cmd) => cmd.run()?,
+ }
+ Ok(())
+}
+
+fn setup_logging(log_file: Option<&Path>) -> Result<()> {
+ if cfg!(windows) {
+ // This is required so that windows finds our pdb that is placed right beside the exe.
+ // By default it doesn't look at the folder the exe resides in, only in the current working
+ // directory which we set to the project workspace.
+ // https://docs.microsoft.com/en-us/windows-hardware/drivers/debugger/general-environment-variables
+ // https://docs.microsoft.com/en-us/windows/win32/api/dbghelp/nf-dbghelp-syminitialize
+ if let Ok(path) = env::current_exe() {
+ if let Some(path) = path.parent() {
+ env::set_var("_NT_SYMBOL_PATH", path);
+ }
+ }
+ }
+ if env::var("RUST_BACKTRACE").is_err() {
+ env::set_var("RUST_BACKTRACE", "short");
+ }
+
+ let log_file = match log_file {
+ Some(path) => {
+ if let Some(parent) = path.parent() {
+ let _ = fs::create_dir_all(parent);
+ }
+ Some(fs::File::create(path)?)
+ }
+ None => None,
+ };
+ let filter = env::var("RA_LOG").ok();
+ // deliberately enable all `error` logs if the user has not set RA_LOG, as there is usually useful
+ // information in there for debugging
+ logger::Logger::new(log_file, filter.as_deref().or(Some("error"))).install()?;
+
+ profile::init();
+
+ Ok(())
+}
+
+const STACK_SIZE: usize = 1024 * 1024 * 8;
+
+/// Parts of rust-analyzer can use a lot of stack space, and some operating systems only give us
+/// 1 MB by default (eg. Windows), so this spawns a new thread with hopefully sufficient stack
+/// space.
+fn with_extra_thread(
+ thread_name: impl Into<String>,
+ f: impl FnOnce() -> Result<()> + Send + 'static,
+) -> Result<()> {
+ let handle =
+ std::thread::Builder::new().name(thread_name.into()).stack_size(STACK_SIZE).spawn(f)?;
+ match handle.join() {
+ Ok(res) => res,
+ Err(panic) => std::panic::resume_unwind(panic),
+ }
+}
+
+fn run_server() -> Result<()> {
+ tracing::info!("server version {} will start", rust_analyzer::version());
+
+ let (connection, io_threads) = Connection::stdio();
+
+ let (initialize_id, initialize_params) = connection.initialize_start()?;
+ tracing::info!("InitializeParams: {}", initialize_params);
+ let initialize_params =
+ from_json::<lsp_types::InitializeParams>("InitializeParams", &initialize_params)?;
+
+ let root_path = match initialize_params
+ .root_uri
+ .and_then(|it| it.to_file_path().ok())
+ .and_then(|it| AbsPathBuf::try_from(it).ok())
+ {
+ Some(it) => it,
+ None => {
+ let cwd = env::current_dir()?;
+ AbsPathBuf::assert(cwd)
+ }
+ };
+
+ let mut config = Config::new(root_path, initialize_params.capabilities);
+ if let Some(json) = initialize_params.initialization_options {
+ if let Err(e) = config.update(json) {
+ use lsp_types::{
+ notification::{Notification, ShowMessage},
+ MessageType, ShowMessageParams,
+ };
+ let not = lsp_server::Notification::new(
+ ShowMessage::METHOD.to_string(),
+ ShowMessageParams { typ: MessageType::WARNING, message: e.to_string() },
+ );
+ connection.sender.send(lsp_server::Message::Notification(not)).unwrap();
+ }
+ }
+
+ let server_capabilities = rust_analyzer::server_capabilities(&config);
+
+ let initialize_result = lsp_types::InitializeResult {
+ capabilities: server_capabilities,
+ server_info: Some(lsp_types::ServerInfo {
+ name: String::from("rust-analyzer"),
+ version: Some(rust_analyzer::version().to_string()),
+ }),
+ offset_encoding: if supports_utf8(config.caps()) {
+ Some("utf-8".to_string())
+ } else {
+ None
+ },
+ };
+
+ let initialize_result = serde_json::to_value(initialize_result).unwrap();
+
+ connection.initialize_finish(initialize_id, initialize_result)?;
+
+ if let Some(client_info) = initialize_params.client_info {
+ tracing::info!("Client '{}' {}", client_info.name, client_info.version.unwrap_or_default());
+ }
+
+ if config.linked_projects().is_empty() && config.detached_files().is_empty() {
+ let workspace_roots = initialize_params
+ .workspace_folders
+ .map(|workspaces| {
+ workspaces
+ .into_iter()
+ .filter_map(|it| it.uri.to_file_path().ok())
+ .filter_map(|it| AbsPathBuf::try_from(it).ok())
+ .collect::<Vec<_>>()
+ })
+ .filter(|workspaces| !workspaces.is_empty())
+ .unwrap_or_else(|| vec![config.root_path().clone()]);
+
+ let discovered = ProjectManifest::discover_all(&workspace_roots);
+ tracing::info!("discovered projects: {:?}", discovered);
+ if discovered.is_empty() {
+ tracing::error!("failed to find any projects in {:?}", workspace_roots);
+ }
+ config.discovered_projects = Some(discovered);
+ }
+
+ rust_analyzer::main_loop(config, connection)?;
+
+ io_threads.join()?;
+ tracing::info!("server did shut down");
+ Ok(())
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/rustc_wrapper.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/rustc_wrapper.rs
new file mode 100644
index 000000000..2f6d4706d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/rustc_wrapper.rs
@@ -0,0 +1,46 @@
+//! We setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself during the
+//! initial `cargo check`. That way, we avoid checking the actual project, and
+//! only build proc macros and build.rs.
+//!
+//! Code taken from IntelliJ :0)
+//! https://github.com/intellij-rust/intellij-rust/blob/master/native-helper/src/main.rs
+use std::{
+ ffi::OsString,
+ io,
+ process::{Command, Stdio},
+};
+
+/// ExitCode/ExitStatus are impossible to create :(.
+pub(crate) struct ExitCode(pub(crate) Option<i32>);
+
+pub(crate) fn run_rustc_skipping_cargo_checking(
+ rustc_executable: OsString,
+ args: Vec<OsString>,
+) -> io::Result<ExitCode> {
+ let is_cargo_check = args.iter().any(|arg| {
+ let arg = arg.to_string_lossy();
+ // `cargo check` invokes `rustc` with `--emit=metadata` argument.
+ //
+ // https://doc.rust-lang.org/rustc/command-line-arguments.html#--emit-specifies-the-types-of-output-files-to-generate
+ // link — Generates the crates specified by --crate-type. The default
+ // output filenames depend on the crate type and platform. This
+ // is the default if --emit is not specified.
+ // metadata — Generates a file containing metadata about the crate.
+ // The default output filename is CRATE_NAME.rmeta.
+ arg.starts_with("--emit=") && arg.contains("metadata") && !arg.contains("link")
+ });
+ if is_cargo_check {
+ return Ok(ExitCode(Some(0)));
+ }
+ run_rustc(rustc_executable, args)
+}
+
+fn run_rustc(rustc_executable: OsString, args: Vec<OsString>) -> io::Result<ExitCode> {
+ let mut child = Command::new(rustc_executable)
+ .args(args)
+ .stdin(Stdio::inherit())
+ .stdout(Stdio::inherit())
+ .stderr(Stdio::inherit())
+ .spawn()?;
+ Ok(ExitCode(child.wait()?.code()))
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/caps.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/caps.rs
new file mode 100644
index 000000000..cda95cd86
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/caps.rs
@@ -0,0 +1,210 @@
+//! Advertises the capabilities of the LSP Server.
+use lsp_types::{
+ CallHierarchyServerCapability, ClientCapabilities, CodeActionKind, CodeActionOptions,
+ CodeActionProviderCapability, CodeLensOptions, CompletionOptions,
+ CompletionOptionsCompletionItem, DeclarationCapability, DocumentOnTypeFormattingOptions,
+ FileOperationFilter, FileOperationPattern, FileOperationPatternKind,
+ FileOperationRegistrationOptions, FoldingRangeProviderCapability, HoverProviderCapability,
+ ImplementationProviderCapability, InlayHintOptions, InlayHintServerCapabilities, OneOf,
+ RenameOptions, SaveOptions, SelectionRangeProviderCapability, SemanticTokensFullOptions,
+ SemanticTokensLegend, SemanticTokensOptions, ServerCapabilities, SignatureHelpOptions,
+ TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
+ TypeDefinitionProviderCapability, WorkDoneProgressOptions,
+ WorkspaceFileOperationsServerCapabilities, WorkspaceServerCapabilities,
+};
+use serde_json::json;
+
+use crate::config::{Config, RustfmtConfig};
+use crate::semantic_tokens;
+
+pub fn server_capabilities(config: &Config) -> ServerCapabilities {
+ ServerCapabilities {
+ text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions {
+ open_close: Some(true),
+ change: Some(TextDocumentSyncKind::INCREMENTAL),
+ will_save: None,
+ will_save_wait_until: None,
+ save: Some(SaveOptions::default().into()),
+ })),
+ hover_provider: Some(HoverProviderCapability::Simple(true)),
+ completion_provider: Some(CompletionOptions {
+ resolve_provider: completions_resolve_provider(config.caps()),
+ trigger_characters: Some(vec![
+ ":".to_string(),
+ ".".to_string(),
+ "'".to_string(),
+ "(".to_string(),
+ ]),
+ all_commit_characters: None,
+ completion_item: completion_item(&config),
+ work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
+ }),
+ signature_help_provider: Some(SignatureHelpOptions {
+ trigger_characters: Some(vec!["(".to_string(), ",".to_string(), "<".to_string()]),
+ retrigger_characters: None,
+ work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
+ }),
+ declaration_provider: Some(DeclarationCapability::Simple(true)),
+ definition_provider: Some(OneOf::Left(true)),
+ type_definition_provider: Some(TypeDefinitionProviderCapability::Simple(true)),
+ implementation_provider: Some(ImplementationProviderCapability::Simple(true)),
+ references_provider: Some(OneOf::Left(true)),
+ document_highlight_provider: Some(OneOf::Left(true)),
+ document_symbol_provider: Some(OneOf::Left(true)),
+ workspace_symbol_provider: Some(OneOf::Left(true)),
+ code_action_provider: Some(code_action_capabilities(config.caps())),
+ code_lens_provider: Some(CodeLensOptions { resolve_provider: Some(true) }),
+ document_formatting_provider: Some(OneOf::Left(true)),
+ document_range_formatting_provider: match config.rustfmt() {
+ RustfmtConfig::Rustfmt { enable_range_formatting: true, .. } => Some(OneOf::Left(true)),
+ _ => Some(OneOf::Left(false)),
+ },
+ document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions {
+ first_trigger_character: "=".to_string(),
+ more_trigger_character: Some(more_trigger_character(&config)),
+ }),
+ selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)),
+ folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)),
+ rename_provider: Some(OneOf::Right(RenameOptions {
+ prepare_provider: Some(true),
+ work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
+ })),
+ linked_editing_range_provider: None,
+ document_link_provider: None,
+ color_provider: None,
+ execute_command_provider: None,
+ workspace: Some(WorkspaceServerCapabilities {
+ workspace_folders: None,
+ file_operations: Some(WorkspaceFileOperationsServerCapabilities {
+ did_create: None,
+ will_create: None,
+ did_rename: None,
+ will_rename: Some(FileOperationRegistrationOptions {
+ filters: vec![
+ FileOperationFilter {
+ scheme: Some(String::from("file")),
+ pattern: FileOperationPattern {
+ glob: String::from("**/*.rs"),
+ matches: Some(FileOperationPatternKind::File),
+ options: None,
+ },
+ },
+ FileOperationFilter {
+ scheme: Some(String::from("file")),
+ pattern: FileOperationPattern {
+ glob: String::from("**"),
+ matches: Some(FileOperationPatternKind::Folder),
+ options: None,
+ },
+ },
+ ],
+ }),
+ did_delete: None,
+ will_delete: None,
+ }),
+ }),
+ call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)),
+ semantic_tokens_provider: Some(
+ SemanticTokensOptions {
+ legend: SemanticTokensLegend {
+ token_types: semantic_tokens::SUPPORTED_TYPES.to_vec(),
+ token_modifiers: semantic_tokens::SUPPORTED_MODIFIERS.to_vec(),
+ },
+
+ full: Some(SemanticTokensFullOptions::Delta { delta: Some(true) }),
+ range: Some(true),
+ work_done_progress_options: Default::default(),
+ }
+ .into(),
+ ),
+ moniker_provider: None,
+ inlay_hint_provider: Some(OneOf::Right(InlayHintServerCapabilities::Options(
+ InlayHintOptions {
+ work_done_progress_options: Default::default(),
+ resolve_provider: Some(true),
+ },
+ ))),
+ experimental: Some(json!({
+ "externalDocs": true,
+ "hoverRange": true,
+ "joinLines": true,
+ "matchingBrace": true,
+ "moveItem": true,
+ "onEnter": true,
+ "openCargoToml": true,
+ "parentModule": true,
+ "runnables": {
+ "kinds": [ "cargo" ],
+ },
+ "ssr": true,
+ "workspaceSymbolScopeKindFiltering": true,
+ })),
+ }
+}
+
+fn completions_resolve_provider(client_caps: &ClientCapabilities) -> Option<bool> {
+ if completion_item_edit_resolve(client_caps) {
+ Some(true)
+ } else {
+ tracing::info!("No `additionalTextEdits` completion resolve capability was found in the client capabilities, autoimport completion is disabled");
+ None
+ }
+}
+
+/// Parses client capabilities and returns all completion resolve capabilities rust-analyzer supports.
+pub(crate) fn completion_item_edit_resolve(caps: &ClientCapabilities) -> bool {
+ (|| {
+ Some(
+ caps.text_document
+ .as_ref()?
+ .completion
+ .as_ref()?
+ .completion_item
+ .as_ref()?
+ .resolve_support
+ .as_ref()?
+ .properties
+ .iter()
+ .any(|cap_string| cap_string.as_str() == "additionalTextEdits"),
+ )
+ })() == Some(true)
+}
+
+fn completion_item(config: &Config) -> Option<CompletionOptionsCompletionItem> {
+ Some(CompletionOptionsCompletionItem {
+ label_details_support: Some(config.completion_label_details_support()),
+ })
+}
+
+fn code_action_capabilities(client_caps: &ClientCapabilities) -> CodeActionProviderCapability {
+ client_caps
+ .text_document
+ .as_ref()
+ .and_then(|it| it.code_action.as_ref())
+ .and_then(|it| it.code_action_literal_support.as_ref())
+ .map_or(CodeActionProviderCapability::Simple(true), |_| {
+ CodeActionProviderCapability::Options(CodeActionOptions {
+ // Advertise support for all built-in CodeActionKinds.
+ // Ideally we would base this off of the client capabilities
+ // but the client is supposed to fall back gracefully for unknown values.
+ code_action_kinds: Some(vec![
+ CodeActionKind::EMPTY,
+ CodeActionKind::QUICKFIX,
+ CodeActionKind::REFACTOR,
+ CodeActionKind::REFACTOR_EXTRACT,
+ CodeActionKind::REFACTOR_INLINE,
+ CodeActionKind::REFACTOR_REWRITE,
+ ]),
+ resolve_provider: Some(true),
+ work_done_progress_options: Default::default(),
+ })
+ })
+}
+
+fn more_trigger_character(config: &Config) -> Vec<String> {
+ let mut res = vec![".".to_string(), ">".to_string(), "{".to_string()];
+ if config.snippet_cap() {
+ res.push("<".to_string());
+ }
+ res
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs
new file mode 100644
index 000000000..1c39e9391
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs
@@ -0,0 +1,228 @@
+//! See `CargoTargetSpec`
+
+use std::mem;
+
+use cfg::{CfgAtom, CfgExpr};
+use ide::{FileId, RunnableKind, TestId};
+use project_model::{self, ManifestPath, TargetKind};
+use vfs::AbsPathBuf;
+
+use crate::{global_state::GlobalStateSnapshot, Result};
+
+/// Abstract representation of Cargo target.
+///
+/// We use it to cook up the set of cli args we need to pass to Cargo to
+/// build/test/run the target.
+#[derive(Clone)]
+pub(crate) struct CargoTargetSpec {
+ pub(crate) workspace_root: AbsPathBuf,
+ pub(crate) cargo_toml: ManifestPath,
+ pub(crate) package: String,
+ pub(crate) target: String,
+ pub(crate) target_kind: TargetKind,
+ pub(crate) required_features: Vec<String>,
+}
+
+impl CargoTargetSpec {
+ pub(crate) fn runnable_args(
+ snap: &GlobalStateSnapshot,
+ spec: Option<CargoTargetSpec>,
+ kind: &RunnableKind,
+ cfg: &Option<CfgExpr>,
+ ) -> Result<(Vec<String>, Vec<String>)> {
+ let mut args = Vec::new();
+ let mut extra_args = Vec::new();
+
+ match kind {
+ RunnableKind::Test { test_id, attr } => {
+ args.push("test".to_string());
+ extra_args.push(test_id.to_string());
+ if let TestId::Path(_) = test_id {
+ extra_args.push("--exact".to_string());
+ }
+ extra_args.push("--nocapture".to_string());
+ if attr.ignore {
+ extra_args.push("--ignored".to_string());
+ }
+ }
+ RunnableKind::TestMod { path } => {
+ args.push("test".to_string());
+ extra_args.push(path.to_string());
+ extra_args.push("--nocapture".to_string());
+ }
+ RunnableKind::Bench { test_id } => {
+ args.push("bench".to_string());
+ extra_args.push(test_id.to_string());
+ if let TestId::Path(_) = test_id {
+ extra_args.push("--exact".to_string());
+ }
+ extra_args.push("--nocapture".to_string());
+ }
+ RunnableKind::DocTest { test_id } => {
+ args.push("test".to_string());
+ args.push("--doc".to_string());
+ extra_args.push(test_id.to_string());
+ extra_args.push("--nocapture".to_string());
+ }
+ RunnableKind::Bin => {
+ let subcommand = match spec {
+ Some(CargoTargetSpec { target_kind: TargetKind::Test, .. }) => "test",
+ _ => "run",
+ };
+ args.push(subcommand.to_string());
+ }
+ }
+
+ let target_required_features = if let Some(mut spec) = spec {
+ let required_features = mem::take(&mut spec.required_features);
+ spec.push_to(&mut args, kind);
+ required_features
+ } else {
+ Vec::new()
+ };
+
+ let cargo_config = snap.config.cargo();
+ if cargo_config.all_features {
+ args.push("--all-features".to_string());
+
+ for feature in target_required_features {
+ args.push("--features".to_string());
+ args.push(feature);
+ }
+ } else {
+ let mut features = Vec::new();
+ if let Some(cfg) = cfg.as_ref() {
+ required_features(cfg, &mut features);
+ }
+
+ features.extend(cargo_config.features);
+ features.extend(target_required_features);
+
+ features.dedup();
+ for feature in features {
+ args.push("--features".to_string());
+ args.push(feature);
+ }
+ }
+
+ Ok((args, extra_args))
+ }
+
+ pub(crate) fn for_file(
+ global_state_snapshot: &GlobalStateSnapshot,
+ file_id: FileId,
+ ) -> Result<Option<CargoTargetSpec>> {
+ let crate_id = match &*global_state_snapshot.analysis.crate_for(file_id)? {
+ &[crate_id, ..] => crate_id,
+ _ => return Ok(None),
+ };
+ let (cargo_ws, target) = match global_state_snapshot.cargo_target_for_crate_root(crate_id) {
+ Some(it) => it,
+ None => return Ok(None),
+ };
+
+ let target_data = &cargo_ws[target];
+ let package_data = &cargo_ws[target_data.package];
+ let res = CargoTargetSpec {
+ workspace_root: cargo_ws.workspace_root().to_path_buf(),
+ cargo_toml: package_data.manifest.clone(),
+ package: cargo_ws.package_flag(package_data),
+ target: target_data.name.clone(),
+ target_kind: target_data.kind,
+ required_features: target_data.required_features.clone(),
+ };
+
+ Ok(Some(res))
+ }
+
+ pub(crate) fn push_to(self, buf: &mut Vec<String>, kind: &RunnableKind) {
+ buf.push("--package".to_string());
+ buf.push(self.package);
+
+ // Can't mix --doc with other target flags
+ if let RunnableKind::DocTest { .. } = kind {
+ return;
+ }
+ match self.target_kind {
+ TargetKind::Bin => {
+ buf.push("--bin".to_string());
+ buf.push(self.target);
+ }
+ TargetKind::Test => {
+ buf.push("--test".to_string());
+ buf.push(self.target);
+ }
+ TargetKind::Bench => {
+ buf.push("--bench".to_string());
+ buf.push(self.target);
+ }
+ TargetKind::Example => {
+ buf.push("--example".to_string());
+ buf.push(self.target);
+ }
+ TargetKind::Lib => {
+ buf.push("--lib".to_string());
+ }
+ TargetKind::Other | TargetKind::BuildScript => (),
+ }
+ }
+}
+
+/// Fill minimal features needed
+fn required_features(cfg_expr: &CfgExpr, features: &mut Vec<String>) {
+ match cfg_expr {
+ CfgExpr::Atom(CfgAtom::KeyValue { key, value }) if key == "feature" => {
+ features.push(value.to_string())
+ }
+ CfgExpr::All(preds) => {
+ preds.iter().for_each(|cfg| required_features(cfg, features));
+ }
+ CfgExpr::Any(preds) => {
+ for cfg in preds {
+ let len_features = features.len();
+ required_features(cfg, features);
+ if len_features != features.len() {
+ break;
+ }
+ }
+ }
+ _ => {}
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use cfg::CfgExpr;
+ use mbe::syntax_node_to_token_tree;
+ use syntax::{
+ ast::{self, AstNode},
+ SmolStr,
+ };
+
+ fn check(cfg: &str, expected_features: &[&str]) {
+ let cfg_expr = {
+ let source_file = ast::SourceFile::parse(cfg).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ let (tt, _) = syntax_node_to_token_tree(tt.syntax());
+ CfgExpr::parse(&tt)
+ };
+
+ let mut features = vec![];
+ required_features(&cfg_expr, &mut features);
+
+ let expected_features =
+ expected_features.iter().map(|&it| SmolStr::new(it)).collect::<Vec<_>>();
+
+ assert_eq!(features, expected_features);
+ }
+
+ #[test]
+ fn test_cfg_expr_minimal_features_needed() {
+ check(r#"#![cfg(feature = "baz")]"#, &["baz"]);
+ check(r#"#![cfg(all(feature = "baz", feature = "foo"))]"#, &["baz", "foo"]);
+ check(r#"#![cfg(any(feature = "baz", feature = "foo", unix))]"#, &["baz"]);
+ check(r#"#![cfg(foo)]"#, &[]);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs
new file mode 100644
index 000000000..6ccdaa86d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs
@@ -0,0 +1,69 @@
+//! Various batch processing tasks, intended primarily for debugging.
+
+pub mod flags;
+pub mod load_cargo;
+mod parse;
+mod symbols;
+mod highlight;
+mod analysis_stats;
+mod diagnostics;
+mod ssr;
+mod lsif;
+
+mod progress_report;
+
+use std::io::Read;
+
+use anyhow::Result;
+use ide::AnalysisHost;
+use vfs::Vfs;
+
+#[derive(Clone, Copy)]
+pub enum Verbosity {
+ Spammy,
+ Verbose,
+ Normal,
+ Quiet,
+}
+
+impl Verbosity {
+ pub fn is_verbose(self) -> bool {
+ matches!(self, Verbosity::Verbose | Verbosity::Spammy)
+ }
+ pub fn is_spammy(self) -> bool {
+ matches!(self, Verbosity::Spammy)
+ }
+}
+
+fn read_stdin() -> Result<String> {
+ let mut buff = String::new();
+ std::io::stdin().read_to_string(&mut buff)?;
+ Ok(buff)
+}
+
+fn report_metric(metric: &str, value: u64, unit: &str) {
+ if std::env::var("RA_METRICS").is_err() {
+ return;
+ }
+ println!("METRIC:{}:{}:{}", metric, value, unit)
+}
+
+fn print_memory_usage(mut host: AnalysisHost, vfs: Vfs) {
+ let mut mem = host.per_query_memory_usage();
+
+ let before = profile::memory_usage();
+ drop(vfs);
+ let vfs = before.allocated - profile::memory_usage().allocated;
+ mem.push(("VFS".into(), vfs));
+
+ let before = profile::memory_usage();
+ drop(host);
+ mem.push(("Unaccounted".into(), before.allocated - profile::memory_usage().allocated));
+
+ mem.push(("Remaining".into(), profile::memory_usage().allocated));
+
+ for (name, bytes) in mem {
+ // NOTE: Not a debug print, so avoid going through the `eprintln` defined above.
+ eprintln!("{:>8} {}", bytes, name);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
new file mode 100644
index 000000000..f52e1e751
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -0,0 +1,447 @@
+//! Fully type-check project and print various stats, like the number of type
+//! errors.
+
+use std::{
+ env,
+ time::{SystemTime, UNIX_EPOCH},
+};
+
+use hir::{
+ db::{AstDatabase, DefDatabase, HirDatabase},
+ AssocItem, Crate, Function, HasSource, HirDisplay, ModuleDef,
+};
+use hir_def::{
+ body::{BodySourceMap, SyntheticSyntax},
+ expr::ExprId,
+ FunctionId,
+};
+use hir_ty::{TyExt, TypeWalk};
+use ide::{Analysis, AnalysisHost, LineCol, RootDatabase};
+use ide_db::base_db::{
+ salsa::{self, debug::DebugQueryTable, ParallelDatabase},
+ SourceDatabase, SourceDatabaseExt,
+};
+use itertools::Itertools;
+use oorandom::Rand32;
+use profile::{Bytes, StopWatch};
+use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
+use rayon::prelude::*;
+use rustc_hash::FxHashSet;
+use stdx::format_to;
+use syntax::{AstNode, SyntaxNode};
+use vfs::{AbsPathBuf, Vfs, VfsPath};
+
+use crate::cli::{
+ flags::{self, OutputFormat},
+ load_cargo::{load_workspace, LoadCargoConfig},
+ print_memory_usage,
+ progress_report::ProgressReport,
+ report_metric, Result, Verbosity,
+};
+
+/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
+struct Snap<DB>(DB);
+impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> {
+ fn clone(&self) -> Snap<salsa::Snapshot<DB>> {
+ Snap(self.0.snapshot())
+ }
+}
+
+impl flags::AnalysisStats {
+ pub fn run(self, verbosity: Verbosity) -> Result<()> {
+ let mut rng = {
+ let seed = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_millis() as u64;
+ Rand32::new(seed)
+ };
+
+ let mut cargo_config = CargoConfig::default();
+ cargo_config.no_sysroot = self.no_sysroot;
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: !self.disable_build_scripts,
+ with_proc_macro: !self.disable_proc_macros,
+ prefill_caches: false,
+ };
+ let no_progress = &|_| ();
+
+ let mut db_load_sw = self.stop_watch();
+
+ let path = AbsPathBuf::assert(env::current_dir()?.join(&self.path));
+ let manifest = ProjectManifest::discover_single(&path)?;
+
+ let mut workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?;
+ let metadata_time = db_load_sw.elapsed();
+
+ let build_scripts_time = if self.disable_build_scripts {
+ None
+ } else {
+ let mut build_scripts_sw = self.stop_watch();
+ let bs = workspace.run_build_scripts(&cargo_config, no_progress)?;
+ workspace.set_build_scripts(bs);
+ Some(build_scripts_sw.elapsed())
+ };
+
+ let (host, vfs, _proc_macro) = load_workspace(workspace, &load_cargo_config)?;
+ let db = host.raw_database();
+ eprint!("{:<20} {}", "Database loaded:", db_load_sw.elapsed());
+ eprint!(" (metadata {}", metadata_time);
+ if let Some(build_scripts_time) = build_scripts_time {
+ eprint!("; build {}", build_scripts_time);
+ }
+ eprintln!(")");
+
+ let mut analysis_sw = self.stop_watch();
+ let mut num_crates = 0;
+ let mut visited_modules = FxHashSet::default();
+ let mut visit_queue = Vec::new();
+
+ let mut krates = Crate::all(db);
+ if self.randomize {
+ shuffle(&mut rng, &mut krates);
+ }
+ for krate in krates {
+ let module = krate.root_module(db);
+ let file_id = module.definition_source(db).file_id;
+ let file_id = file_id.original_file(db);
+ let source_root = db.file_source_root(file_id);
+ let source_root = db.source_root(source_root);
+ if !source_root.is_library || self.with_deps {
+ num_crates += 1;
+ visit_queue.push(module);
+ }
+ }
+
+ if self.randomize {
+ shuffle(&mut rng, &mut visit_queue);
+ }
+
+ eprint!(" crates: {}", num_crates);
+ let mut num_decls = 0;
+ let mut funcs = Vec::new();
+ while let Some(module) = visit_queue.pop() {
+ if visited_modules.insert(module) {
+ visit_queue.extend(module.children(db));
+
+ for decl in module.declarations(db) {
+ num_decls += 1;
+ if let ModuleDef::Function(f) = decl {
+ funcs.push(f);
+ }
+ }
+
+ for impl_def in module.impl_defs(db) {
+ for item in impl_def.items(db) {
+ num_decls += 1;
+ if let AssocItem::Function(f) = item {
+ funcs.push(f);
+ }
+ }
+ }
+ }
+ }
+ eprintln!(", mods: {}, decls: {}, fns: {}", visited_modules.len(), num_decls, funcs.len());
+ eprintln!("{:<20} {}", "Item Collection:", analysis_sw.elapsed());
+
+ if self.randomize {
+ shuffle(&mut rng, &mut funcs);
+ }
+
+ if !self.skip_inference {
+ self.run_inference(&host, db, &vfs, &funcs, verbosity);
+ }
+
+ let total_span = analysis_sw.elapsed();
+ eprintln!("{:<20} {}", "Total:", total_span);
+ report_metric("total time", total_span.time.as_millis() as u64, "ms");
+ if let Some(instructions) = total_span.instructions {
+ report_metric("total instructions", instructions, "#instr");
+ }
+ if let Some(memory) = total_span.memory {
+ report_metric("total memory", memory.allocated.megabytes() as u64, "MB");
+ }
+
+ if env::var("RA_COUNT").is_ok() {
+ eprintln!("{}", profile::countme::get_all());
+ }
+
+ if self.source_stats {
+ let mut total_file_size = Bytes::default();
+ for e in ide_db::base_db::ParseQuery.in_db(db).entries::<Vec<_>>() {
+ total_file_size += syntax_len(db.parse(e.key).syntax_node())
+ }
+
+ let mut total_macro_file_size = Bytes::default();
+ for e in hir::db::ParseMacroExpansionQuery.in_db(db).entries::<Vec<_>>() {
+ if let Some((val, _)) = db.parse_macro_expansion(e.key).value {
+ total_macro_file_size += syntax_len(val.syntax_node())
+ }
+ }
+ eprintln!("source files: {}, macro files: {}", total_file_size, total_macro_file_size);
+ }
+
+ if self.memory_usage && verbosity.is_verbose() {
+ print_memory_usage(host, vfs);
+ }
+
+ Ok(())
+ }
+
+ fn run_inference(
+ &self,
+ host: &AnalysisHost,
+ db: &RootDatabase,
+ vfs: &Vfs,
+ funcs: &[Function],
+ verbosity: Verbosity,
+ ) {
+ let mut bar = match verbosity {
+ Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
+ _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
+ _ => ProgressReport::new(funcs.len() as u64),
+ };
+
+ if self.parallel {
+ let mut inference_sw = self.stop_watch();
+ let snap = Snap(db.snapshot());
+ funcs
+ .par_iter()
+ .map_with(snap, |snap, &f| {
+ let f_id = FunctionId::from(f);
+ snap.0.body(f_id.into());
+ snap.0.infer(f_id.into());
+ })
+ .count();
+ eprintln!("{:<20} {}", "Parallel Inference:", inference_sw.elapsed());
+ }
+
+ let mut inference_sw = self.stop_watch();
+ bar.tick();
+ let mut num_exprs = 0;
+ let mut num_exprs_unknown = 0;
+ let mut num_exprs_partially_unknown = 0;
+ let mut num_type_mismatches = 0;
+ let analysis = host.analysis();
+ for f in funcs.iter().copied() {
+ let name = f.name(db);
+ let full_name = f
+ .module(db)
+ .path_to_root(db)
+ .into_iter()
+ .rev()
+ .filter_map(|it| it.name(db))
+ .chain(Some(f.name(db)))
+ .join("::");
+ if let Some(only_name) = self.only.as_deref() {
+ if name.to_string() != only_name && full_name != only_name {
+ continue;
+ }
+ }
+ let mut msg = format!("processing: {}", full_name);
+ if verbosity.is_verbose() {
+ if let Some(src) = f.source(db) {
+ let original_file = src.file_id.original_file(db);
+ let path = vfs.file_path(original_file);
+ let syntax_range = src.value.syntax().text_range();
+ format_to!(msg, " ({} {:?})", path, syntax_range);
+ }
+ }
+ if verbosity.is_spammy() {
+ bar.println(msg.to_string());
+ }
+ bar.set_message(&msg);
+ let f_id = FunctionId::from(f);
+ let (body, sm) = db.body_with_source_map(f_id.into());
+ let inference_result = db.infer(f_id.into());
+ let (previous_exprs, previous_unknown, previous_partially_unknown) =
+ (num_exprs, num_exprs_unknown, num_exprs_partially_unknown);
+ for (expr_id, _) in body.exprs.iter() {
+ let ty = &inference_result[expr_id];
+ num_exprs += 1;
+ let unknown_or_partial = if ty.is_unknown() {
+ num_exprs_unknown += 1;
+ if verbosity.is_spammy() {
+ if let Some((path, start, end)) =
+ expr_syntax_range(db, &analysis, vfs, &sm, expr_id)
+ {
+ bar.println(format!(
+ "{} {}:{}-{}:{}: Unknown type",
+ path,
+ start.line + 1,
+ start.col,
+ end.line + 1,
+ end.col,
+ ));
+ } else {
+ bar.println(format!("{}: Unknown type", name,));
+ }
+ }
+ true
+ } else {
+ let mut is_partially_unknown = false;
+ ty.walk(&mut |ty| {
+ if ty.is_unknown() {
+ is_partially_unknown = true;
+ }
+ });
+ if is_partially_unknown {
+ num_exprs_partially_unknown += 1;
+ }
+ is_partially_unknown
+ };
+ if self.only.is_some() && verbosity.is_spammy() {
+ // in super-verbose mode for just one function, we print every single expression
+ if let Some((_, start, end)) =
+ expr_syntax_range(db, &analysis, vfs, &sm, expr_id)
+ {
+ bar.println(format!(
+ "{}:{}-{}:{}: {}",
+ start.line + 1,
+ start.col,
+ end.line + 1,
+ end.col,
+ ty.display(db)
+ ));
+ } else {
+ bar.println(format!("unknown location: {}", ty.display(db)));
+ }
+ }
+ if unknown_or_partial && self.output == Some(OutputFormat::Csv) {
+ println!(
+ r#"{},type,"{}""#,
+ location_csv(db, &analysis, vfs, &sm, expr_id),
+ ty.display(db)
+ );
+ }
+ if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) {
+ num_type_mismatches += 1;
+ if verbosity.is_verbose() {
+ if let Some((path, start, end)) =
+ expr_syntax_range(db, &analysis, vfs, &sm, expr_id)
+ {
+ bar.println(format!(
+ "{} {}:{}-{}:{}: Expected {}, got {}",
+ path,
+ start.line + 1,
+ start.col,
+ end.line + 1,
+ end.col,
+ mismatch.expected.display(db),
+ mismatch.actual.display(db)
+ ));
+ } else {
+ bar.println(format!(
+ "{}: Expected {}, got {}",
+ name,
+ mismatch.expected.display(db),
+ mismatch.actual.display(db)
+ ));
+ }
+ }
+ if self.output == Some(OutputFormat::Csv) {
+ println!(
+ r#"{},mismatch,"{}","{}""#,
+ location_csv(db, &analysis, vfs, &sm, expr_id),
+ mismatch.expected.display(db),
+ mismatch.actual.display(db)
+ );
+ }
+ }
+ }
+ if verbosity.is_spammy() {
+ bar.println(format!(
+ "In {}: {} exprs, {} unknown, {} partial",
+ full_name,
+ num_exprs - previous_exprs,
+ num_exprs_unknown - previous_unknown,
+ num_exprs_partially_unknown - previous_partially_unknown
+ ));
+ }
+ bar.inc(1);
+ }
+
+ bar.finish_and_clear();
+ eprintln!(
+ " exprs: {}, ??ty: {} ({}%), ?ty: {} ({}%), !ty: {}",
+ num_exprs,
+ num_exprs_unknown,
+ percentage(num_exprs_unknown, num_exprs),
+ num_exprs_partially_unknown,
+ percentage(num_exprs_partially_unknown, num_exprs),
+ num_type_mismatches
+ );
+ report_metric("unknown type", num_exprs_unknown, "#");
+ report_metric("type mismatches", num_type_mismatches, "#");
+
+ eprintln!("{:<20} {}", "Inference:", inference_sw.elapsed());
+ }
+
+ fn stop_watch(&self) -> StopWatch {
+ StopWatch::start().memory(self.memory_usage)
+ }
+}
+
+fn location_csv(
+ db: &RootDatabase,
+ analysis: &Analysis,
+ vfs: &Vfs,
+ sm: &BodySourceMap,
+ expr_id: ExprId,
+) -> String {
+ let src = match sm.expr_syntax(expr_id) {
+ Ok(s) => s,
+ Err(SyntheticSyntax) => return "synthetic,,".to_string(),
+ };
+ let root = db.parse_or_expand(src.file_id).unwrap();
+ let node = src.map(|e| e.to_node(&root).syntax().clone());
+ let original_range = node.as_ref().original_file_range(db);
+ let path = vfs.file_path(original_range.file_id);
+ let line_index = analysis.file_line_index(original_range.file_id).unwrap();
+ let text_range = original_range.range;
+ let (start, end) =
+ (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
+ format!("{},{}:{},{}:{}", path, start.line + 1, start.col, end.line + 1, end.col)
+}
+
+fn expr_syntax_range(
+ db: &RootDatabase,
+ analysis: &Analysis,
+ vfs: &Vfs,
+ sm: &BodySourceMap,
+ expr_id: ExprId,
+) -> Option<(VfsPath, LineCol, LineCol)> {
+ let src = sm.expr_syntax(expr_id);
+ if let Ok(src) = src {
+ let root = db.parse_or_expand(src.file_id).unwrap();
+ let node = src.map(|e| e.to_node(&root).syntax().clone());
+ let original_range = node.as_ref().original_file_range(db);
+ let path = vfs.file_path(original_range.file_id);
+ let line_index = analysis.file_line_index(original_range.file_id).unwrap();
+ let text_range = original_range.range;
+ let (start, end) =
+ (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
+ Some((path, start, end))
+ } else {
+ None
+ }
+}
+
+fn shuffle<T>(rng: &mut Rand32, slice: &mut [T]) {
+ for i in 0..slice.len() {
+ randomize_first(rng, &mut slice[i..]);
+ }
+
+ fn randomize_first<T>(rng: &mut Rand32, slice: &mut [T]) {
+ assert!(!slice.is_empty());
+ let idx = rng.rand_range(0..slice.len() as u32) as usize;
+ slice.swap(0, idx);
+ }
+}
+
+fn percentage(n: u64, total: u64) -> u64 {
+ (n * 100).checked_div(total).unwrap_or(100)
+}
+
+fn syntax_len(node: SyntaxNode) -> usize {
+ // Macro expanded code doesn't contain whitespace, so erase *all* whitespace
+ // to make macro and non-macro code comparable.
+ node.to_string().replace(|it: char| it.is_ascii_whitespace(), "").len()
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
new file mode 100644
index 000000000..52511ceb5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
@@ -0,0 +1,86 @@
+//! Analyze all modules in a project for diagnostics. Exits with a non-zero
+//! status code if any errors are found.
+
+use rustc_hash::FxHashSet;
+
+use hir::{db::HirDatabase, Crate, Module};
+use ide::{AssistResolveStrategy, DiagnosticsConfig, Severity};
+use ide_db::base_db::SourceDatabaseExt;
+
+use crate::cli::{
+ flags,
+ load_cargo::{load_workspace_at, LoadCargoConfig},
+};
+
+impl flags::Diagnostics {
+ pub fn run(self) -> anyhow::Result<()> {
+ let cargo_config = Default::default();
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: !self.disable_build_scripts,
+ with_proc_macro: !self.disable_proc_macros,
+ prefill_caches: false,
+ };
+ let (host, _vfs, _proc_macro) =
+ load_workspace_at(&self.path, &cargo_config, &load_cargo_config, &|_| {})?;
+ let db = host.raw_database();
+ let analysis = host.analysis();
+
+ let mut found_error = false;
+ let mut visited_files = FxHashSet::default();
+
+ let work = all_modules(db).into_iter().filter(|module| {
+ let file_id = module.definition_source(db).file_id.original_file(db);
+ let source_root = db.file_source_root(file_id);
+ let source_root = db.source_root(source_root);
+ !source_root.is_library
+ });
+
+ for module in work {
+ let file_id = module.definition_source(db).file_id.original_file(db);
+ if !visited_files.contains(&file_id) {
+ let crate_name =
+ module.krate().display_name(db).as_deref().unwrap_or("unknown").to_string();
+ println!("processing crate: {}, module: {}", crate_name, _vfs.file_path(file_id));
+ for diagnostic in analysis
+ .diagnostics(
+ &DiagnosticsConfig::default(),
+ AssistResolveStrategy::None,
+ file_id,
+ )
+ .unwrap()
+ {
+ if matches!(diagnostic.severity, Severity::Error) {
+ found_error = true;
+ }
+
+ println!("{:?}", diagnostic);
+ }
+
+ visited_files.insert(file_id);
+ }
+ }
+
+ println!();
+ println!("diagnostic scan complete");
+
+ if found_error {
+ println!();
+ anyhow::bail!("diagnostic error detected")
+ }
+
+ Ok(())
+ }
+}
+
+fn all_modules(db: &dyn HirDatabase) -> Vec<Module> {
+ let mut worklist: Vec<_> =
+ Crate::all(db).into_iter().map(|krate| krate.root_module(db)).collect();
+ let mut modules = Vec::new();
+
+ while let Some(module) = worklist.pop() {
+ modules.push(module);
+ worklist.extend(module.children(db));
+ }
+
+ modules
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
new file mode 100644
index 000000000..19907ebdd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
@@ -0,0 +1,248 @@
+//! Grammar for the command-line arguments.
+#![allow(unreachable_pub)]
+use std::{path::PathBuf, str::FromStr};
+
+use ide_ssr::{SsrPattern, SsrRule};
+
+use crate::cli::Verbosity;
+
+xflags::xflags! {
+ src "./src/cli/flags.rs"
+
+ /// LSP server for the Rust programming language.
+ cmd rust-analyzer {
+ /// Verbosity level, can be repeated multiple times.
+ repeated -v, --verbose
+ /// Verbosity level.
+ optional -q, --quiet
+
+ /// Log to the specified file instead of stderr.
+ optional --log-file path: PathBuf
+ /// Flush log records to the file immediately.
+ optional --no-log-buffering
+
+ /// Wait until a debugger is attached to (requires debug build).
+ optional --wait-dbg
+
+ default cmd lsp-server {
+ /// Print version.
+ optional --version
+ /// Print help.
+ optional -h, --help
+
+ /// Dump a LSP config JSON schema.
+ optional --print-config-schema
+ }
+
+ /// Parse stdin.
+ cmd parse {
+ /// Suppress printing.
+ optional --no-dump
+ }
+
+ /// Parse stdin and print the list of symbols.
+ cmd symbols {}
+
+ /// Highlight stdin as html.
+ cmd highlight {
+ /// Enable rainbow highlighting of identifiers.
+ optional --rainbow
+ }
+
+ /// Batch typecheck project and print summary statistics
+ cmd analysis-stats
+ /// Directory with Cargo.toml.
+ required path: PathBuf
+ {
+ optional --output format: OutputFormat
+
+ /// Randomize order in which crates, modules, and items are processed.
+ optional --randomize
+ /// Run type inference in parallel.
+ optional --parallel
+ /// Collect memory usage statistics.
+ optional --memory-usage
+ /// Print the total length of all source and macro files (whitespace is not counted).
+ optional --source-stats
+
+ /// Only analyze items matching this path.
+ optional -o, --only path: String
+ /// Also analyze all dependencies.
+ optional --with-deps
+ /// Don't load sysroot crates (`std`, `core` & friends).
+ optional --no-sysroot
+
+ /// Don't run build scripts or load `OUT_DIR` values by running `cargo check` before analysis.
+ optional --disable-build-scripts
+ /// Don't use expand proc macros.
+ optional --disable-proc-macros
+ /// Only resolve names, don't run type inference.
+ optional --skip-inference
+ }
+
+ cmd diagnostics
+ /// Directory with Cargo.toml.
+ required path: PathBuf
+ {
+ /// Don't run build scripts or load `OUT_DIR` values by running `cargo check` before analysis.
+ optional --disable-build-scripts
+ /// Don't use expand proc macros.
+ optional --disable-proc-macros
+ }
+
+ cmd ssr
+ /// A structured search replace rule (`$a.foo($b) ==> bar($a, $b)`)
+ repeated rule: SsrRule
+ {}
+
+ cmd search
+ /// A structured search replace pattern (`$a.foo($b)`)
+ repeated pattern: SsrPattern
+ {
+ /// Prints debug information for any nodes with source exactly equal to snippet.
+ optional --debug snippet: String
+ }
+
+ cmd proc-macro {}
+
+ cmd lsif
+ required path: PathBuf
+ {}
+ }
+}
+
+// generated start
+// The following code is generated by `xflags` macro.
+// Run `env UPDATE_XFLAGS=1 cargo build` to regenerate.
+#[derive(Debug)]
+pub struct RustAnalyzer {
+ pub verbose: u32,
+ pub quiet: bool,
+ pub log_file: Option<PathBuf>,
+ pub no_log_buffering: bool,
+ pub wait_dbg: bool,
+ pub subcommand: RustAnalyzerCmd,
+}
+
+#[derive(Debug)]
+pub enum RustAnalyzerCmd {
+ LspServer(LspServer),
+ Parse(Parse),
+ Symbols(Symbols),
+ Highlight(Highlight),
+ AnalysisStats(AnalysisStats),
+ Diagnostics(Diagnostics),
+ Ssr(Ssr),
+ Search(Search),
+ ProcMacro(ProcMacro),
+ Lsif(Lsif),
+}
+
+#[derive(Debug)]
+pub struct LspServer {
+ pub version: bool,
+ pub help: bool,
+ pub print_config_schema: bool,
+}
+
+#[derive(Debug)]
+pub struct Parse {
+ pub no_dump: bool,
+}
+
+#[derive(Debug)]
+pub struct Symbols;
+
+#[derive(Debug)]
+pub struct Highlight {
+ pub rainbow: bool,
+}
+
+#[derive(Debug)]
+pub struct AnalysisStats {
+ pub path: PathBuf,
+
+ pub output: Option<OutputFormat>,
+ pub randomize: bool,
+ pub parallel: bool,
+ pub memory_usage: bool,
+ pub source_stats: bool,
+ pub only: Option<String>,
+ pub with_deps: bool,
+ pub no_sysroot: bool,
+ pub disable_build_scripts: bool,
+ pub disable_proc_macros: bool,
+ pub skip_inference: bool,
+}
+
+#[derive(Debug)]
+pub struct Diagnostics {
+ pub path: PathBuf,
+
+ pub disable_build_scripts: bool,
+ pub disable_proc_macros: bool,
+}
+
+#[derive(Debug)]
+pub struct Ssr {
+ pub rule: Vec<SsrRule>,
+}
+
+#[derive(Debug)]
+pub struct Search {
+ pub pattern: Vec<SsrPattern>,
+
+ pub debug: Option<String>,
+}
+
+#[derive(Debug)]
+pub struct ProcMacro;
+
+#[derive(Debug)]
+pub struct Lsif {
+ pub path: PathBuf,
+}
+
+impl RustAnalyzer {
+ pub const HELP: &'static str = Self::HELP_;
+
+ #[allow(dead_code)]
+ pub fn from_env() -> xflags::Result<Self> {
+ Self::from_env_()
+ }
+
+ #[allow(dead_code)]
+ pub fn from_vec(args: Vec<std::ffi::OsString>) -> xflags::Result<Self> {
+ Self::from_vec_(args)
+ }
+}
+// generated end
+
+#[derive(Debug, PartialEq, Eq)]
+pub enum OutputFormat {
+ Csv,
+}
+
+impl RustAnalyzer {
+ pub fn verbosity(&self) -> Verbosity {
+ if self.quiet {
+ return Verbosity::Quiet;
+ }
+ match self.verbose {
+ 0 => Verbosity::Normal,
+ 1 => Verbosity::Verbose,
+ _ => Verbosity::Spammy,
+ }
+ }
+}
+
+impl FromStr for OutputFormat {
+ type Err = String;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ match s {
+ "csv" => Ok(Self::Csv),
+ _ => Err(format!("unknown output format `{}`", s)),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/highlight.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/highlight.rs
new file mode 100644
index 000000000..4f9b362f1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/highlight.rs
@@ -0,0 +1,14 @@
+//! Read Rust code on stdin, print HTML highlighted version to stdout.
+
+use ide::Analysis;
+
+use crate::cli::{flags, read_stdin};
+
+impl flags::Highlight {
+ pub fn run(self) -> anyhow::Result<()> {
+ let (analysis, file_id) = Analysis::from_single_file(read_stdin()?);
+ let html = analysis.highlight_as_html(file_id, self.rainbow).unwrap();
+ println!("{}", html);
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs
new file mode 100644
index 000000000..5d1c013c3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs
@@ -0,0 +1,164 @@
+//! Loads a Cargo project into a static instance of analysis, without support
+//! for incorporating changes.
+use std::{path::Path, sync::Arc};
+
+use anyhow::Result;
+use crossbeam_channel::{unbounded, Receiver};
+use hir::db::DefDatabase;
+use ide::{AnalysisHost, Change};
+use ide_db::base_db::CrateGraph;
+use proc_macro_api::ProcMacroServer;
+use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
+use vfs::{loader::Handle, AbsPath, AbsPathBuf};
+
+use crate::reload::{load_proc_macro, ProjectFolders, SourceRootConfig};
+
+// Note: Since this type is used by external tools that use rust-analyzer as a library
+// what otherwise would be `pub(crate)` has to be `pub` here instead.
+pub struct LoadCargoConfig {
+ pub load_out_dirs_from_check: bool,
+ pub with_proc_macro: bool,
+ pub prefill_caches: bool,
+}
+
+// Note: Since this function is used by external tools that use rust-analyzer as a library
+// what otherwise would be `pub(crate)` has to be `pub` here instead.
+pub fn load_workspace_at(
+ root: &Path,
+ cargo_config: &CargoConfig,
+ load_config: &LoadCargoConfig,
+ progress: &dyn Fn(String),
+) -> Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> {
+ let root = AbsPathBuf::assert(std::env::current_dir()?.join(root));
+ let root = ProjectManifest::discover_single(&root)?;
+ let mut workspace = ProjectWorkspace::load(root, cargo_config, progress)?;
+
+ if load_config.load_out_dirs_from_check {
+ let build_scripts = workspace.run_build_scripts(cargo_config, progress)?;
+ workspace.set_build_scripts(build_scripts)
+ }
+
+ load_workspace(workspace, load_config)
+}
+
+// Note: Since this function is used by external tools that use rust-analyzer as a library
+// what otherwise would be `pub(crate)` has to be `pub` here instead.
+//
+// The reason both, `load_workspace_at` and `load_workspace` are `pub` is that some of
+// these tools need access to `ProjectWorkspace`, too, which `load_workspace_at` hides.
+pub fn load_workspace(
+ ws: ProjectWorkspace,
+ load_config: &LoadCargoConfig,
+) -> Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> {
+ let (sender, receiver) = unbounded();
+ let mut vfs = vfs::Vfs::default();
+ let mut loader = {
+ let loader =
+ vfs_notify::NotifyHandle::spawn(Box::new(move |msg| sender.send(msg).unwrap()));
+ Box::new(loader)
+ };
+
+ let proc_macro_client = if load_config.with_proc_macro {
+ let path = AbsPathBuf::assert(std::env::current_exe()?);
+ Ok(ProcMacroServer::spawn(path, &["proc-macro"]).unwrap())
+ } else {
+ Err("proc macro server not started".to_owned())
+ };
+
+ let crate_graph = ws.to_crate_graph(
+ &mut |_, path: &AbsPath| {
+ load_proc_macro(proc_macro_client.as_ref().map_err(|e| &**e), path, &[])
+ },
+ &mut |path: &AbsPath| {
+ let contents = loader.load_sync(path);
+ let path = vfs::VfsPath::from(path.to_path_buf());
+ vfs.set_file_contents(path.clone(), contents);
+ vfs.file_id(&path)
+ },
+ );
+
+ let project_folders = ProjectFolders::new(&[ws], &[]);
+ loader.set_config(vfs::loader::Config {
+ load: project_folders.load,
+ watch: vec![],
+ version: 0,
+ });
+
+ tracing::debug!("crate graph: {:?}", crate_graph);
+ let host =
+ load_crate_graph(crate_graph, project_folders.source_root_config, &mut vfs, &receiver);
+
+ if load_config.prefill_caches {
+ host.analysis().parallel_prime_caches(1, |_| {})?;
+ }
+ Ok((host, vfs, proc_macro_client.ok()))
+}
+
+fn load_crate_graph(
+ crate_graph: CrateGraph,
+ source_root_config: SourceRootConfig,
+ vfs: &mut vfs::Vfs,
+ receiver: &Receiver<vfs::loader::Message>,
+) -> AnalysisHost {
+ let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok());
+ let mut host = AnalysisHost::new(lru_cap);
+ let mut analysis_change = Change::new();
+
+ host.raw_database_mut().set_enable_proc_attr_macros(true);
+
+ // wait until Vfs has loaded all roots
+ for task in receiver {
+ match task {
+ vfs::loader::Message::Progress { n_done, n_total, config_version: _ } => {
+ if n_done == n_total {
+ break;
+ }
+ }
+ vfs::loader::Message::Loaded { files } => {
+ for (path, contents) in files {
+ vfs.set_file_contents(path.into(), contents);
+ }
+ }
+ }
+ }
+ let changes = vfs.take_changes();
+ for file in changes {
+ if file.exists() {
+ let contents = vfs.file_contents(file.file_id).to_vec();
+ if let Ok(text) = String::from_utf8(contents) {
+ analysis_change.change_file(file.file_id, Some(Arc::new(text)))
+ }
+ }
+ }
+ let source_roots = source_root_config.partition(vfs);
+ analysis_change.set_roots(source_roots);
+
+ analysis_change.set_crate_graph(crate_graph);
+
+ host.apply_change(analysis_change);
+ host
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use hir::Crate;
+
+ #[test]
+ fn test_loading_rust_analyzer() {
+ let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap();
+ let cargo_config = CargoConfig::default();
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: false,
+ with_proc_macro: false,
+ prefill_caches: false,
+ };
+ let (host, _vfs, _proc_macro) =
+ load_workspace_at(path, &cargo_config, &load_cargo_config, &|_| {}).unwrap();
+
+ let n_crates = Crate::all(host.raw_database()).len();
+ // RA has quite a few crates, but the exact count doesn't matter
+ assert!(n_crates > 20);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
new file mode 100644
index 000000000..491c55a04
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
@@ -0,0 +1,328 @@
+//! LSIF (language server index format) generator
+
+use std::collections::HashMap;
+use std::env;
+use std::time::Instant;
+
+use ide::{
+ Analysis, FileId, FileRange, MonikerKind, PackageInformation, RootDatabase, StaticIndex,
+ StaticIndexedFile, TokenId, TokenStaticData,
+};
+use ide_db::LineIndexDatabase;
+
+use ide_db::base_db::salsa::{self, ParallelDatabase};
+use lsp_types::{self, lsif};
+use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
+use vfs::{AbsPathBuf, Vfs};
+
+use crate::cli::{
+ flags,
+ load_cargo::{load_workspace, LoadCargoConfig},
+ Result,
+};
+use crate::line_index::{LineEndings, LineIndex, OffsetEncoding};
+use crate::to_proto;
+use crate::version::version;
+
+/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
+struct Snap<DB>(DB);
+impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> {
+ fn clone(&self) -> Snap<salsa::Snapshot<DB>> {
+ Snap(self.0.snapshot())
+ }
+}
+
+struct LsifManager<'a> {
+ count: i32,
+ token_map: HashMap<TokenId, Id>,
+ range_map: HashMap<FileRange, Id>,
+ file_map: HashMap<FileId, Id>,
+ package_map: HashMap<PackageInformation, Id>,
+ analysis: &'a Analysis,
+ db: &'a RootDatabase,
+ vfs: &'a Vfs,
+}
+
+#[derive(Clone, Copy)]
+struct Id(i32);
+
+impl From<Id> for lsp_types::NumberOrString {
+ fn from(Id(x): Id) -> Self {
+ lsp_types::NumberOrString::Number(x)
+ }
+}
+
+impl LsifManager<'_> {
+ fn new<'a>(analysis: &'a Analysis, db: &'a RootDatabase, vfs: &'a Vfs) -> LsifManager<'a> {
+ LsifManager {
+ count: 0,
+ token_map: HashMap::default(),
+ range_map: HashMap::default(),
+ file_map: HashMap::default(),
+ package_map: HashMap::default(),
+ analysis,
+ db,
+ vfs,
+ }
+ }
+
+ fn add(&mut self, data: lsif::Element) -> Id {
+ let id = Id(self.count);
+ self.emit(&serde_json::to_string(&lsif::Entry { id: id.into(), data }).unwrap());
+ self.count += 1;
+ id
+ }
+
+ fn add_vertex(&mut self, vertex: lsif::Vertex) -> Id {
+ self.add(lsif::Element::Vertex(vertex))
+ }
+
+ fn add_edge(&mut self, edge: lsif::Edge) -> Id {
+ self.add(lsif::Element::Edge(edge))
+ }
+
+ // FIXME: support file in addition to stdout here
+ fn emit(&self, data: &str) {
+ println!("{}", data);
+ }
+
+ fn get_token_id(&mut self, id: TokenId) -> Id {
+ if let Some(x) = self.token_map.get(&id) {
+ return *x;
+ }
+ let result_set_id = self.add_vertex(lsif::Vertex::ResultSet(lsif::ResultSet { key: None }));
+ self.token_map.insert(id, result_set_id);
+ result_set_id
+ }
+
+ fn get_package_id(&mut self, package_information: PackageInformation) -> Id {
+ if let Some(x) = self.package_map.get(&package_information) {
+ return *x;
+ }
+ let pi = package_information.clone();
+ let result_set_id =
+ self.add_vertex(lsif::Vertex::PackageInformation(lsif::PackageInformation {
+ name: pi.name,
+ manager: "cargo".to_string(),
+ uri: None,
+ content: None,
+ repository: Some(lsif::Repository {
+ url: pi.repo,
+ r#type: "git".to_string(),
+ commit_id: None,
+ }),
+ version: Some(pi.version),
+ }));
+ self.package_map.insert(package_information, result_set_id);
+ result_set_id
+ }
+
+ fn get_range_id(&mut self, id: FileRange) -> Id {
+ if let Some(x) = self.range_map.get(&id) {
+ return *x;
+ }
+ let file_id = id.file_id;
+ let doc_id = self.get_file_id(file_id);
+ let line_index = self.db.line_index(file_id);
+ let line_index = LineIndex {
+ index: line_index,
+ encoding: OffsetEncoding::Utf16,
+ endings: LineEndings::Unix,
+ };
+ let range_id = self.add_vertex(lsif::Vertex::Range {
+ range: to_proto::range(&line_index, id.range),
+ tag: None,
+ });
+ self.add_edge(lsif::Edge::Contains(lsif::EdgeDataMultiIn {
+ in_vs: vec![range_id.into()],
+ out_v: doc_id.into(),
+ }));
+ range_id
+ }
+
+ fn get_file_id(&mut self, id: FileId) -> Id {
+ if let Some(x) = self.file_map.get(&id) {
+ return *x;
+ }
+ let path = self.vfs.file_path(id);
+ let path = path.as_path().unwrap();
+ let doc_id = self.add_vertex(lsif::Vertex::Document(lsif::Document {
+ language_id: "rust".to_string(),
+ uri: lsp_types::Url::from_file_path(path).unwrap(),
+ }));
+ self.file_map.insert(id, doc_id);
+ doc_id
+ }
+
+ fn add_token(&mut self, id: TokenId, token: TokenStaticData) {
+ let result_set_id = self.get_token_id(id);
+ if let Some(hover) = token.hover {
+ let hover_id = self.add_vertex(lsif::Vertex::HoverResult {
+ result: lsp_types::Hover {
+ contents: lsp_types::HoverContents::Markup(to_proto::markup_content(
+ hover.markup,
+ ide::HoverDocFormat::Markdown,
+ )),
+ range: None,
+ },
+ });
+ self.add_edge(lsif::Edge::Hover(lsif::EdgeData {
+ in_v: hover_id.into(),
+ out_v: result_set_id.into(),
+ }));
+ }
+ if let Some(moniker) = token.moniker {
+ let package_id = self.get_package_id(moniker.package_information);
+ let moniker_id = self.add_vertex(lsif::Vertex::Moniker(lsp_types::Moniker {
+ scheme: "rust-analyzer".to_string(),
+ identifier: moniker.identifier.to_string(),
+ unique: lsp_types::UniquenessLevel::Scheme,
+ kind: Some(match moniker.kind {
+ MonikerKind::Import => lsp_types::MonikerKind::Import,
+ MonikerKind::Export => lsp_types::MonikerKind::Export,
+ }),
+ }));
+ self.add_edge(lsif::Edge::PackageInformation(lsif::EdgeData {
+ in_v: package_id.into(),
+ out_v: moniker_id.into(),
+ }));
+ self.add_edge(lsif::Edge::Moniker(lsif::EdgeData {
+ in_v: moniker_id.into(),
+ out_v: result_set_id.into(),
+ }));
+ }
+ if let Some(def) = token.definition {
+ let result_id = self.add_vertex(lsif::Vertex::DefinitionResult);
+ let def_vertex = self.get_range_id(def);
+ self.add_edge(lsif::Edge::Item(lsif::Item {
+ document: (*self.file_map.get(&def.file_id).unwrap()).into(),
+ property: None,
+ edge_data: lsif::EdgeDataMultiIn {
+ in_vs: vec![def_vertex.into()],
+ out_v: result_id.into(),
+ },
+ }));
+ self.add_edge(lsif::Edge::Definition(lsif::EdgeData {
+ in_v: result_id.into(),
+ out_v: result_set_id.into(),
+ }));
+ }
+ if !token.references.is_empty() {
+ let result_id = self.add_vertex(lsif::Vertex::ReferenceResult);
+ self.add_edge(lsif::Edge::References(lsif::EdgeData {
+ in_v: result_id.into(),
+ out_v: result_set_id.into(),
+ }));
+ let mut edges = token.references.iter().fold(
+ HashMap::<_, Vec<lsp_types::NumberOrString>>::new(),
+ |mut edges, x| {
+ let entry =
+ edges.entry((x.range.file_id, x.is_definition)).or_insert_with(Vec::new);
+ entry.push((*self.range_map.get(&x.range).unwrap()).into());
+ edges
+ },
+ );
+ for x in token.references {
+ if let Some(vertices) = edges.remove(&(x.range.file_id, x.is_definition)) {
+ self.add_edge(lsif::Edge::Item(lsif::Item {
+ document: (*self.file_map.get(&x.range.file_id).unwrap()).into(),
+ property: Some(if x.is_definition {
+ lsif::ItemKind::Definitions
+ } else {
+ lsif::ItemKind::References
+ }),
+ edge_data: lsif::EdgeDataMultiIn {
+ in_vs: vertices,
+ out_v: result_id.into(),
+ },
+ }));
+ }
+ }
+ }
+ }
+
+ fn add_file(&mut self, file: StaticIndexedFile) {
+ let StaticIndexedFile { file_id, tokens, folds, .. } = file;
+ let doc_id = self.get_file_id(file_id);
+ let text = self.analysis.file_text(file_id).unwrap();
+ let line_index = self.db.line_index(file_id);
+ let line_index = LineIndex {
+ index: line_index,
+ encoding: OffsetEncoding::Utf16,
+ endings: LineEndings::Unix,
+ };
+ let result = folds
+ .into_iter()
+ .map(|it| to_proto::folding_range(&*text, &line_index, false, it))
+ .collect();
+ let folding_id = self.add_vertex(lsif::Vertex::FoldingRangeResult { result });
+ self.add_edge(lsif::Edge::FoldingRange(lsif::EdgeData {
+ in_v: folding_id.into(),
+ out_v: doc_id.into(),
+ }));
+ let tokens_id = tokens
+ .into_iter()
+ .map(|(range, id)| {
+ let range_id = self.add_vertex(lsif::Vertex::Range {
+ range: to_proto::range(&line_index, range),
+ tag: None,
+ });
+ self.range_map.insert(FileRange { file_id, range }, range_id);
+ let result_set_id = self.get_token_id(id);
+ self.add_edge(lsif::Edge::Next(lsif::EdgeData {
+ in_v: result_set_id.into(),
+ out_v: range_id.into(),
+ }));
+ range_id.into()
+ })
+ .collect();
+ self.add_edge(lsif::Edge::Contains(lsif::EdgeDataMultiIn {
+ in_vs: tokens_id,
+ out_v: doc_id.into(),
+ }));
+ }
+}
+
+impl flags::Lsif {
+ pub fn run(self) -> Result<()> {
+ eprintln!("Generating LSIF started...");
+ let now = Instant::now();
+ let cargo_config = CargoConfig::default();
+ let no_progress = &|_| ();
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: true,
+ with_proc_macro: true,
+ prefill_caches: false,
+ };
+ let path = AbsPathBuf::assert(env::current_dir()?.join(&self.path));
+ let manifest = ProjectManifest::discover_single(&path)?;
+
+ let workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?;
+
+ let (host, vfs, _proc_macro) = load_workspace(workspace, &load_cargo_config)?;
+ let db = host.raw_database();
+ let analysis = host.analysis();
+
+ let si = StaticIndex::compute(&analysis);
+
+ let mut lsif = LsifManager::new(&analysis, db, &vfs);
+ lsif.add_vertex(lsif::Vertex::MetaData(lsif::MetaData {
+ version: String::from("0.5.0"),
+ project_root: lsp_types::Url::from_file_path(path).unwrap(),
+ position_encoding: lsif::Encoding::Utf16,
+ tool_info: Some(lsp_types::lsif::ToolInfo {
+ name: "rust-analyzer".to_string(),
+ args: vec![],
+ version: Some(version().to_string()),
+ }),
+ }));
+ for file in si.files {
+ lsif.add_file(file);
+ }
+ for (id, token) in si.tokens.iter() {
+ lsif.add_token(id, token);
+ }
+ eprintln!("Generating LSIF finished in {:?}", now.elapsed());
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/parse.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/parse.rs
new file mode 100644
index 000000000..5ef8cdff4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/parse.rs
@@ -0,0 +1,17 @@
+//! Read Rust code on stdin, print syntax tree on stdout.
+use syntax::{AstNode, SourceFile};
+
+use crate::cli::{flags, read_stdin};
+
+impl flags::Parse {
+ pub fn run(self) -> anyhow::Result<()> {
+ let _p = profile::span("parsing");
+ let text = read_stdin()?;
+ let file = SourceFile::parse(&text).tree();
+ if !self.no_dump {
+ println!("{:#?}", file.syntax());
+ }
+ std::mem::forget(file);
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/progress_report.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/progress_report.rs
new file mode 100644
index 000000000..5a2dc39d5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/progress_report.rs
@@ -0,0 +1,122 @@
+//! A simple progress bar
+//!
+//! A single thread non-optimized progress bar
+use std::io::{self, Write};
+
+/// A Simple ASCII Progress Bar
+pub(crate) struct ProgressReport {
+ curr: f32,
+ text: String,
+ hidden: bool,
+
+ len: u64,
+ pos: u64,
+ msg: String,
+}
+
+impl ProgressReport {
+ pub(crate) fn new(len: u64) -> ProgressReport {
+ ProgressReport {
+ curr: 0.0,
+ text: String::new(),
+ hidden: false,
+ len,
+ pos: 0,
+ msg: String::new(),
+ }
+ }
+
+ pub(crate) fn hidden() -> ProgressReport {
+ ProgressReport {
+ curr: 0.0,
+ text: String::new(),
+ hidden: true,
+ len: 0,
+ pos: 0,
+ msg: String::new(),
+ }
+ }
+
+ pub(crate) fn set_message(&mut self, msg: &str) {
+ self.msg = msg.to_string();
+ self.tick();
+ }
+
+ pub(crate) fn println<I: Into<String>>(&mut self, msg: I) {
+ self.clear();
+ println!("{}", msg.into());
+ self.tick();
+ }
+
+ pub(crate) fn inc(&mut self, delta: u64) {
+ self.pos += delta;
+ if self.len == 0 {
+ self.set_value(0.0)
+ } else {
+ self.set_value((self.pos as f32) / (self.len as f32))
+ }
+ self.tick();
+ }
+
+ pub(crate) fn finish_and_clear(&mut self) {
+ self.clear();
+ }
+
+ pub(crate) fn tick(&mut self) {
+ if self.hidden {
+ return;
+ }
+ let percent = (self.curr * 100.0) as u32;
+ let text = format!("{}/{} {:3>}% {}", self.pos, self.len, percent, self.msg);
+ self.update_text(&text);
+ }
+
+ fn update_text(&mut self, text: &str) {
+ // Get length of common portion
+ let mut common_prefix_length = 0;
+ let common_length = usize::min(self.text.len(), text.len());
+
+ while common_prefix_length < common_length
+ && text.chars().nth(common_prefix_length).unwrap()
+ == self.text.chars().nth(common_prefix_length).unwrap()
+ {
+ common_prefix_length += 1;
+ }
+
+ // Backtrack to the first differing character
+ let mut output = String::new();
+ output += &'\x08'.to_string().repeat(self.text.len() - common_prefix_length);
+ // Output new suffix
+ output += &text[common_prefix_length..text.len()];
+
+ // If the new text is shorter than the old one: delete overlapping characters
+ if let Some(overlap_count) = self.text.len().checked_sub(text.len()) {
+ if overlap_count > 0 {
+ output += &" ".repeat(overlap_count);
+ output += &"\x08".repeat(overlap_count);
+ }
+ }
+
+ let _ = io::stdout().write(output.as_bytes());
+ let _ = io::stdout().flush();
+ self.text = text.to_string();
+ }
+
+ fn set_value(&mut self, value: f32) {
+ self.curr = f32::max(0.0, f32::min(1.0, value));
+ }
+
+ fn clear(&mut self) {
+ if self.hidden {
+ return;
+ }
+
+ // Fill all last text to space and return the cursor
+ let spaces = " ".repeat(self.text.len());
+ let backspaces = "\x08".repeat(self.text.len());
+ print!("{}{}{}", backspaces, spaces, backspaces);
+ let _ = io::stdout().flush();
+
+ self.text = String::new();
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
new file mode 100644
index 000000000..e8291782b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
@@ -0,0 +1,86 @@
+//! Applies structured search replace rules from the command line.
+
+use ide_ssr::MatchFinder;
+use project_model::CargoConfig;
+
+use crate::cli::{
+ flags,
+ load_cargo::{load_workspace_at, LoadCargoConfig},
+ Result,
+};
+
+impl flags::Ssr {
+ pub fn run(self) -> Result<()> {
+ use ide_db::base_db::SourceDatabaseExt;
+ let cargo_config = CargoConfig::default();
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: true,
+ with_proc_macro: true,
+ prefill_caches: false,
+ };
+ let (host, vfs, _proc_macro) = load_workspace_at(
+ &std::env::current_dir()?,
+ &cargo_config,
+ &load_cargo_config,
+ &|_| {},
+ )?;
+ let db = host.raw_database();
+ let mut match_finder = MatchFinder::at_first_file(db)?;
+ for rule in self.rule {
+ match_finder.add_rule(rule)?;
+ }
+ let edits = match_finder.edits();
+ for (file_id, edit) in edits {
+ if let Some(path) = vfs.file_path(file_id).as_path() {
+ let mut contents = db.file_text(file_id).to_string();
+ edit.apply(&mut contents);
+ std::fs::write(path, contents)?;
+ }
+ }
+ Ok(())
+ }
+}
+
+impl flags::Search {
+ /// Searches for `patterns`, printing debug information for any nodes whose text exactly matches
+ /// `debug_snippet`. This is intended for debugging and probably isn't in it's current form useful
+ /// for much else.
+ pub fn run(self) -> Result<()> {
+ use ide_db::base_db::SourceDatabaseExt;
+ use ide_db::symbol_index::SymbolsDatabase;
+ let cargo_config = CargoConfig::default();
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: true,
+ with_proc_macro: true,
+ prefill_caches: false,
+ };
+ let (host, _vfs, _proc_macro) = load_workspace_at(
+ &std::env::current_dir()?,
+ &cargo_config,
+ &load_cargo_config,
+ &|_| {},
+ )?;
+ let db = host.raw_database();
+ let mut match_finder = MatchFinder::at_first_file(db)?;
+ for pattern in self.pattern {
+ match_finder.add_search_pattern(pattern)?;
+ }
+ if let Some(debug_snippet) = &self.debug {
+ for &root in db.local_roots().iter() {
+ let sr = db.source_root(root);
+ for file_id in sr.iter() {
+ for debug_info in match_finder.debug_where_text_equal(file_id, debug_snippet) {
+ println!("{:#?}", debug_info);
+ }
+ }
+ }
+ } else {
+ for m in match_finder.matches().flattened().matches {
+ // We could possibly at some point do something more useful than just printing
+ // the matched text. For now though, that's the easiest thing to do.
+ println!("{}", m.matched_text());
+ }
+ }
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/symbols.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/symbols.rs
new file mode 100644
index 000000000..84659b5ea
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/symbols.rs
@@ -0,0 +1,16 @@
+//! Read Rust code on stdin, print syntax tree on stdout.
+use ide::Analysis;
+
+use crate::cli::{flags, read_stdin};
+
+impl flags::Symbols {
+ pub fn run(self) -> anyhow::Result<()> {
+ let text = read_stdin()?;
+ let (analysis, file_id) = Analysis::from_single_file(text);
+ let structure = analysis.file_structure(file_id).unwrap();
+ for s in structure {
+ println!("{:?}", s);
+ }
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
new file mode 100644
index 000000000..ac0fdf85a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
@@ -0,0 +1,1985 @@
+//! Config used by the language server.
+//!
+//! We currently get this config from `initialize` LSP request, which is not the
+//! best way to do it, but was the simplest thing we could implement.
+//!
+//! Of particular interest is the `feature_flags` hash map: while other fields
+//! configure the server itself, feature flags are passed into analysis, and
+//! tweak things like automatic insertion of `()` in completions.
+
+use std::{ffi::OsString, fmt, iter, path::PathBuf};
+
+use flycheck::FlycheckConfig;
+use ide::{
+ AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode,
+ HighlightRelatedConfig, HoverConfig, HoverDocFormat, InlayHintsConfig, JoinLinesConfig,
+ Snippet, SnippetScope,
+};
+use ide_db::{
+ imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind},
+ SnippetCap,
+};
+use itertools::Itertools;
+use lsp_types::{ClientCapabilities, MarkupKind};
+use project_model::{
+ CargoConfig, ProjectJson, ProjectJsonData, ProjectManifest, RustcSource, UnsetTestCrates,
+};
+use rustc_hash::{FxHashMap, FxHashSet};
+use serde::{de::DeserializeOwned, Deserialize};
+use vfs::AbsPathBuf;
+
+use crate::{
+ caps::completion_item_edit_resolve,
+ diagnostics::DiagnosticsMapConfig,
+ line_index::OffsetEncoding,
+ lsp_ext::{self, supports_utf8, WorkspaceSymbolSearchKind, WorkspaceSymbolSearchScope},
+};
+
+mod patch_old_style;
+
+// Conventions for configuration keys to preserve maximal extendability without breakage:
+// - Toggles (be it binary true/false or with more options in-between) should almost always suffix as `_enable`
+// This has the benefit of namespaces being extensible, and if the suffix doesn't fit later it can be changed without breakage.
+// - In general be wary of using the namespace of something verbatim, it prevents us from adding subkeys in the future
+// - Don't use abbreviations unless really necessary
+// - foo_command = overrides the subcommand, foo_overrideCommand allows full overwriting, extra args only applies for foo_command
+
+// Defines the server-side configuration of the rust-analyzer. We generate
+// *parts* of VS Code's `package.json` config from this.
+//
+// However, editor specific config, which the server doesn't know about, should
+// be specified directly in `package.json`.
+//
+// To deprecate an option by replacing it with another name use `new_name | `old_name` so that we keep
+// parsing the old name.
+config_data! {
+ struct ConfigData {
+ /// Placeholder expression to use for missing expressions in assists.
+ assist_expressionFillDefault: ExprFillDefaultDef = "\"todo\"",
+
+ /// Warm up caches on project load.
+ cachePriming_enable: bool = "true",
+ /// How many worker threads to handle priming caches. The default `0` means to pick automatically.
+ cachePriming_numThreads: ParallelCachePrimingNumThreads = "0",
+
+ /// Automatically refresh project info via `cargo metadata` on
+ /// `Cargo.toml` or `.cargo/config.toml` changes.
+ cargo_autoreload: bool = "true",
+ /// Run build scripts (`build.rs`) for more precise code analysis.
+ cargo_buildScripts_enable: bool = "true",
+ /// Override the command rust-analyzer uses to run build scripts and
+ /// build procedural macros. The command is required to output json
+ /// and should therefore include `--message-format=json` or a similar
+ /// option.
+ ///
+ /// By default, a cargo invocation will be constructed for the configured
+ /// targets and features, with the following base command line:
+ ///
+ /// ```bash
+ /// cargo check --quiet --workspace --message-format=json --all-targets
+ /// ```
+ /// .
+ cargo_buildScripts_overrideCommand: Option<Vec<String>> = "null",
+ /// Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to
+ /// avoid checking unnecessary things.
+ cargo_buildScripts_useRustcWrapper: bool = "true",
+ /// List of features to activate.
+ ///
+ /// Set this to `"all"` to pass `--all-features` to cargo.
+ cargo_features: CargoFeatures = "[]",
+ /// Whether to pass `--no-default-features` to cargo.
+ cargo_noDefaultFeatures: bool = "false",
+ /// Internal config for debugging, disables loading of sysroot crates.
+ cargo_noSysroot: bool = "false",
+ /// Compilation target override (target triple).
+ cargo_target: Option<String> = "null",
+ /// Unsets `#[cfg(test)]` for the specified crates.
+ cargo_unsetTest: Vec<String> = "[\"core\"]",
+
+ /// Check all targets and tests (`--all-targets`).
+ checkOnSave_allTargets: bool = "true",
+ /// Cargo command to use for `cargo check`.
+ checkOnSave_command: String = "\"check\"",
+ /// Run specified `cargo check` command for diagnostics on save.
+ checkOnSave_enable: bool = "true",
+ /// Extra arguments for `cargo check`.
+ checkOnSave_extraArgs: Vec<String> = "[]",
+ /// List of features to activate. Defaults to
+ /// `#rust-analyzer.cargo.features#`.
+ ///
+ /// Set to `"all"` to pass `--all-features` to Cargo.
+ checkOnSave_features: Option<CargoFeatures> = "null",
+ /// Whether to pass `--no-default-features` to Cargo. Defaults to
+ /// `#rust-analyzer.cargo.noDefaultFeatures#`.
+ checkOnSave_noDefaultFeatures: Option<bool> = "null",
+ /// Override the command rust-analyzer uses instead of `cargo check` for
+ /// diagnostics on save. The command is required to output json and
+ /// should therefor include `--message-format=json` or a similar option.
+ ///
+ /// If you're changing this because you're using some tool wrapping
+ /// Cargo, you might also want to change
+ /// `#rust-analyzer.cargo.buildScripts.overrideCommand#`.
+ ///
+ /// An example command would be:
+ ///
+ /// ```bash
+ /// cargo check --workspace --message-format=json --all-targets
+ /// ```
+ /// .
+ checkOnSave_overrideCommand: Option<Vec<String>> = "null",
+ /// Check for a specific target. Defaults to
+ /// `#rust-analyzer.cargo.target#`.
+ checkOnSave_target: Option<String> = "null",
+
+ /// Toggles the additional completions that automatically add imports when completed.
+ /// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
+ completion_autoimport_enable: bool = "true",
+ /// Toggles the additional completions that automatically show method calls and field accesses
+ /// with `self` prefixed to them when inside a method.
+ completion_autoself_enable: bool = "true",
+ /// Whether to add parenthesis and argument snippets when completing function.
+ completion_callable_snippets: CallableCompletionDef = "\"fill_arguments\"",
+ /// Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
+ completion_postfix_enable: bool = "true",
+ /// Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.
+ completion_privateEditable_enable: bool = "false",
+ /// Custom completion snippets.
+ // NOTE: Keep this list in sync with the feature docs of user snippets.
+ completion_snippets_custom: FxHashMap<String, SnippetDef> = r#"{
+ "Arc::new": {
+ "postfix": "arc",
+ "body": "Arc::new(${receiver})",
+ "requires": "std::sync::Arc",
+ "description": "Put the expression into an `Arc`",
+ "scope": "expr"
+ },
+ "Rc::new": {
+ "postfix": "rc",
+ "body": "Rc::new(${receiver})",
+ "requires": "std::rc::Rc",
+ "description": "Put the expression into an `Rc`",
+ "scope": "expr"
+ },
+ "Box::pin": {
+ "postfix": "pinbox",
+ "body": "Box::pin(${receiver})",
+ "requires": "std::boxed::Box",
+ "description": "Put the expression into a pinned `Box`",
+ "scope": "expr"
+ },
+ "Ok": {
+ "postfix": "ok",
+ "body": "Ok(${receiver})",
+ "description": "Wrap the expression in a `Result::Ok`",
+ "scope": "expr"
+ },
+ "Err": {
+ "postfix": "err",
+ "body": "Err(${receiver})",
+ "description": "Wrap the expression in a `Result::Err`",
+ "scope": "expr"
+ },
+ "Some": {
+ "postfix": "some",
+ "body": "Some(${receiver})",
+ "description": "Wrap the expression in an `Option::Some`",
+ "scope": "expr"
+ }
+ }"#,
+
+ /// List of rust-analyzer diagnostics to disable.
+ diagnostics_disabled: FxHashSet<String> = "[]",
+ /// Whether to show native rust-analyzer diagnostics.
+ diagnostics_enable: bool = "true",
+ /// Whether to show experimental rust-analyzer diagnostics that might
+ /// have more false positives than usual.
+ diagnostics_experimental_enable: bool = "false",
+ /// Map of prefixes to be substituted when parsing diagnostic file paths.
+ /// This should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`.
+ diagnostics_remapPrefix: FxHashMap<String, String> = "{}",
+ /// List of warnings that should be displayed with hint severity.
+ ///
+ /// The warnings will be indicated by faded text or three dots in code
+ /// and will not show up in the `Problems Panel`.
+ diagnostics_warningsAsHint: Vec<String> = "[]",
+ /// List of warnings that should be displayed with info severity.
+ ///
+ /// The warnings will be indicated by a blue squiggly underline in code
+ /// and a blue icon in the `Problems Panel`.
+ diagnostics_warningsAsInfo: Vec<String> = "[]",
+
+ /// These directories will be ignored by rust-analyzer. They are
+ /// relative to the workspace root, and globs are not supported. You may
+ /// also need to add the folders to Code's `files.watcherExclude`.
+ files_excludeDirs: Vec<PathBuf> = "[]",
+ /// Controls file watching implementation.
+ files_watcher: FilesWatcherDef = "\"client\"",
+
+ /// Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
+ highlightRelated_breakPoints_enable: bool = "true",
+ /// Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).
+ highlightRelated_exitPoints_enable: bool = "true",
+ /// Enables highlighting of related references while the cursor is on any identifier.
+ highlightRelated_references_enable: bool = "true",
+ /// Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.
+ highlightRelated_yieldPoints_enable: bool = "true",
+
+ /// Whether to show `Debug` action. Only applies when
+ /// `#rust-analyzer.hover.actions.enable#` is set.
+ hover_actions_debug_enable: bool = "true",
+ /// Whether to show HoverActions in Rust files.
+ hover_actions_enable: bool = "true",
+ /// Whether to show `Go to Type Definition` action. Only applies when
+ /// `#rust-analyzer.hover.actions.enable#` is set.
+ hover_actions_gotoTypeDef_enable: bool = "true",
+ /// Whether to show `Implementations` action. Only applies when
+ /// `#rust-analyzer.hover.actions.enable#` is set.
+ hover_actions_implementations_enable: bool = "true",
+ /// Whether to show `References` action. Only applies when
+ /// `#rust-analyzer.hover.actions.enable#` is set.
+ hover_actions_references_enable: bool = "false",
+ /// Whether to show `Run` action. Only applies when
+ /// `#rust-analyzer.hover.actions.enable#` is set.
+ hover_actions_run_enable: bool = "true",
+
+ /// Whether to show documentation on hover.
+ hover_documentation_enable: bool = "true",
+ /// Use markdown syntax for links in hover.
+ hover_links_enable: bool = "true",
+
+ /// Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.
+ imports_granularity_enforce: bool = "false",
+ /// How imports should be grouped into use statements.
+ imports_granularity_group: ImportGranularityDef = "\"crate\"",
+ /// Group inserted imports by the https://rust-analyzer.github.io/manual.html#auto-import[following order]. Groups are separated by newlines.
+ imports_group_enable: bool = "true",
+ /// Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
+ imports_merge_glob: bool = "true",
+ /// The path structure for newly inserted paths to use.
+ imports_prefix: ImportPrefixDef = "\"plain\"",
+
+ /// Whether to show inlay type hints for binding modes.
+ inlayHints_bindingModeHints_enable: bool = "false",
+ /// Whether to show inlay type hints for method chains.
+ inlayHints_chainingHints_enable: bool = "true",
+ /// Whether to show inlay hints after a closing `}` to indicate what item it belongs to.
+ inlayHints_closingBraceHints_enable: bool = "true",
+ /// Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1
+ /// to always show them).
+ inlayHints_closingBraceHints_minLines: usize = "25",
+ /// Whether to show inlay type hints for return types of closures.
+ inlayHints_closureReturnTypeHints_enable: ClosureReturnTypeHintsDef = "\"never\"",
+ /// Whether to show inlay type hints for elided lifetimes in function signatures.
+ inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = "\"never\"",
+ /// Whether to prefer using parameter names as the name for elided lifetime hints if possible.
+ inlayHints_lifetimeElisionHints_useParameterNames: bool = "false",
+ /// Maximum length for inlay hints. Set to null to have an unlimited length.
+ inlayHints_maxLength: Option<usize> = "25",
+ /// Whether to show function parameter name inlay hints at the call
+ /// site.
+ inlayHints_parameterHints_enable: bool = "true",
+ /// Whether to show inlay type hints for compiler inserted reborrows.
+ inlayHints_reborrowHints_enable: ReborrowHintsDef = "\"never\"",
+ /// Whether to render leading colons for type hints, and trailing colons for parameter hints.
+ inlayHints_renderColons: bool = "true",
+ /// Whether to show inlay type hints for variables.
+ inlayHints_typeHints_enable: bool = "true",
+ /// Whether to hide inlay type hints for `let` statements that initialize to a closure.
+ /// Only applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.
+ inlayHints_typeHints_hideClosureInitialization: bool = "false",
+ /// Whether to hide inlay type hints for constructors.
+ inlayHints_typeHints_hideNamedConstructor: bool = "false",
+
+ /// Join lines merges consecutive declaration and initialization of an assignment.
+ joinLines_joinAssignments: bool = "true",
+ /// Join lines inserts else between consecutive ifs.
+ joinLines_joinElseIf: bool = "true",
+ /// Join lines removes trailing commas.
+ joinLines_removeTrailingComma: bool = "true",
+ /// Join lines unwraps trivial blocks.
+ joinLines_unwrapTrivialBlock: bool = "true",
+
+ /// Whether to show `Debug` lens. Only applies when
+ /// `#rust-analyzer.lens.enable#` is set.
+ lens_debug_enable: bool = "true",
+ /// Whether to show CodeLens in Rust files.
+ lens_enable: bool = "true",
+ /// Internal config: use custom client-side commands even when the
+ /// client doesn't set the corresponding capability.
+ lens_forceCustomCommands: bool = "true",
+ /// Whether to show `Implementations` lens. Only applies when
+ /// `#rust-analyzer.lens.enable#` is set.
+ lens_implementations_enable: bool = "true",
+ /// Whether to show `References` lens for Struct, Enum, and Union.
+ /// Only applies when `#rust-analyzer.lens.enable#` is set.
+ lens_references_adt_enable: bool = "false",
+ /// Whether to show `References` lens for Enum Variants.
+ /// Only applies when `#rust-analyzer.lens.enable#` is set.
+ lens_references_enumVariant_enable: bool = "false",
+ /// Whether to show `Method References` lens. Only applies when
+ /// `#rust-analyzer.lens.enable#` is set.
+ lens_references_method_enable: bool = "false",
+ /// Whether to show `References` lens for Trait.
+ /// Only applies when `#rust-analyzer.lens.enable#` is set.
+ lens_references_trait_enable: bool = "false",
+ /// Whether to show `Run` lens. Only applies when
+ /// `#rust-analyzer.lens.enable#` is set.
+ lens_run_enable: bool = "true",
+
+ /// Disable project auto-discovery in favor of explicitly specified set
+ /// of projects.
+ ///
+ /// Elements must be paths pointing to `Cargo.toml`,
+ /// `rust-project.json`, or JSON objects in `rust-project.json` format.
+ linkedProjects: Vec<ManifestOrProjectJson> = "[]",
+
+ /// Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.
+ lru_capacity: Option<usize> = "null",
+
+ /// Whether to show `can't find Cargo.toml` error message.
+ notifications_cargoTomlNotFound: bool = "true",
+
+ /// Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.
+ procMacro_attributes_enable: bool = "true",
+ /// Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.
+ procMacro_enable: bool = "true",
+ /// These proc-macros will be ignored when trying to expand them.
+ ///
+ /// This config takes a map of crate names with the exported proc-macro names to ignore as values.
+ procMacro_ignored: FxHashMap<Box<str>, Box<[Box<str>]>> = "{}",
+ /// Internal config, path to proc-macro server executable (typically,
+ /// this is rust-analyzer itself, but we override this in tests).
+ procMacro_server: Option<PathBuf> = "null",
+
+ /// Command to be executed instead of 'cargo' for runnables.
+ runnables_command: Option<String> = "null",
+ /// Additional arguments to be passed to cargo for runnables such as
+ /// tests or binaries. For example, it may be `--release`.
+ runnables_extraArgs: Vec<String> = "[]",
+
+ /// Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private
+ /// projects, or "discover" to try to automatically find it if the `rustc-dev` component
+ /// is installed.
+ ///
+ /// Any project which uses rust-analyzer with the rustcPrivate
+ /// crates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.
+ ///
+ /// This option does not take effect until rust-analyzer is restarted.
+ rustc_source: Option<String> = "null",
+
+ /// Additional arguments to `rustfmt`.
+ rustfmt_extraArgs: Vec<String> = "[]",
+ /// Advanced option, fully override the command rust-analyzer uses for
+ /// formatting.
+ rustfmt_overrideCommand: Option<Vec<String>> = "null",
+ /// Enables the use of rustfmt's unstable range formatting command for the
+ /// `textDocument/rangeFormatting` request. The rustfmt option is unstable and only
+ /// available on a nightly build.
+ rustfmt_rangeFormatting_enable: bool = "false",
+
+ /// Use semantic tokens for strings.
+ ///
+ /// In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
+ /// By disabling semantic tokens for strings, other grammars can be used to highlight
+ /// their contents.
+ semanticHighlighting_strings_enable: bool = "true",
+
+ /// Show full signature of the callable. Only shows parameters if disabled.
+ signatureInfo_detail: SignatureDetail = "\"full\"",
+ /// Show documentation.
+ signatureInfo_documentation_enable: bool = "true",
+
+ /// Whether to insert closing angle brackets when typing an opening angle bracket of a generic argument list.
+ typing_autoClosingAngleBrackets_enable: bool = "false",
+
+ /// Workspace symbol search kind.
+ workspace_symbol_search_kind: WorkspaceSymbolSearchKindDef = "\"only_types\"",
+ /// Limits the number of items returned from a workspace symbol search (Defaults to 128).
+ /// Some clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.
+ /// Other clients requires all results upfront and might require a higher limit.
+ workspace_symbol_search_limit: usize = "128",
+ /// Workspace symbol search scope.
+ workspace_symbol_search_scope: WorkspaceSymbolSearchScopeDef = "\"workspace\"",
+ }
+}
+
+impl Default for ConfigData {
+ fn default() -> Self {
+ ConfigData::from_json(serde_json::Value::Null, &mut Vec::new())
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct Config {
+ pub discovered_projects: Option<Vec<ProjectManifest>>,
+ caps: lsp_types::ClientCapabilities,
+ root_path: AbsPathBuf,
+ data: ConfigData,
+ detached_files: Vec<AbsPathBuf>,
+ snippets: Vec<Snippet>,
+}
+
+type ParallelCachePrimingNumThreads = u8;
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum LinkedProject {
+ ProjectManifest(ProjectManifest),
+ InlineJsonProject(ProjectJson),
+}
+
+impl From<ProjectManifest> for LinkedProject {
+ fn from(v: ProjectManifest) -> Self {
+ LinkedProject::ProjectManifest(v)
+ }
+}
+
+impl From<ProjectJson> for LinkedProject {
+ fn from(v: ProjectJson) -> Self {
+ LinkedProject::InlineJsonProject(v)
+ }
+}
+
+pub struct CallInfoConfig {
+ pub params_only: bool,
+ pub docs: bool,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct LensConfig {
+ // runnables
+ pub run: bool,
+ pub debug: bool,
+
+ // implementations
+ pub implementations: bool,
+
+ // references
+ pub method_refs: bool,
+ pub refs_adt: bool, // for Struct, Enum, Union and Trait
+ pub refs_trait: bool, // for Struct, Enum, Union and Trait
+ pub enum_variant_refs: bool,
+}
+
+impl LensConfig {
+ pub fn any(&self) -> bool {
+ self.run
+ || self.debug
+ || self.implementations
+ || self.method_refs
+ || self.refs_adt
+ || self.refs_trait
+ || self.enum_variant_refs
+ }
+
+ pub fn none(&self) -> bool {
+ !self.any()
+ }
+
+ pub fn runnable(&self) -> bool {
+ self.run || self.debug
+ }
+
+ pub fn references(&self) -> bool {
+ self.method_refs || self.refs_adt || self.refs_trait || self.enum_variant_refs
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct HoverActionsConfig {
+ pub implementations: bool,
+ pub references: bool,
+ pub run: bool,
+ pub debug: bool,
+ pub goto_type_def: bool,
+}
+
+impl HoverActionsConfig {
+ pub const NO_ACTIONS: Self = Self {
+ implementations: false,
+ references: false,
+ run: false,
+ debug: false,
+ goto_type_def: false,
+ };
+
+ pub fn any(&self) -> bool {
+ self.implementations || self.references || self.runnable() || self.goto_type_def
+ }
+
+ pub fn none(&self) -> bool {
+ !self.any()
+ }
+
+ pub fn runnable(&self) -> bool {
+ self.run || self.debug
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct FilesConfig {
+ pub watcher: FilesWatcher,
+ pub exclude: Vec<AbsPathBuf>,
+}
+
+#[derive(Debug, Clone)]
+pub enum FilesWatcher {
+ Client,
+ Server,
+}
+
+#[derive(Debug, Clone)]
+pub struct NotificationsConfig {
+ pub cargo_toml_not_found: bool,
+}
+
+#[derive(Debug, Clone)]
+pub enum RustfmtConfig {
+ Rustfmt { extra_args: Vec<String>, enable_range_formatting: bool },
+ CustomCommand { command: String, args: Vec<String> },
+}
+
+/// Configuration for runnable items, such as `main` function or tests.
+#[derive(Debug, Clone)]
+pub struct RunnablesConfig {
+ /// Custom command to be executed instead of `cargo` for runnables.
+ pub override_cargo: Option<String>,
+ /// Additional arguments for the `cargo`, e.g. `--release`.
+ pub cargo_extra_args: Vec<String>,
+}
+
+/// Configuration for workspace symbol search requests.
+#[derive(Debug, Clone)]
+pub struct WorkspaceSymbolConfig {
+ /// In what scope should the symbol be searched in.
+ pub search_scope: WorkspaceSymbolSearchScope,
+ /// What kind of symbol is being searched for.
+ pub search_kind: WorkspaceSymbolSearchKind,
+ /// How many items are returned at most.
+ pub search_limit: usize,
+}
+
+pub struct ClientCommandsConfig {
+ pub run_single: bool,
+ pub debug_single: bool,
+ pub show_reference: bool,
+ pub goto_location: bool,
+ pub trigger_parameter_hints: bool,
+}
+
+#[derive(Debug)]
+pub struct ConfigUpdateError {
+ errors: Vec<(String, serde_json::Error)>,
+}
+
+impl fmt::Display for ConfigUpdateError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let errors = self.errors.iter().format_with("\n", |(key, e), f| {
+ f(key)?;
+ f(&": ")?;
+ f(e)
+ });
+ write!(
+ f,
+ "rust-analyzer found {} invalid config value{}:\n{}",
+ self.errors.len(),
+ if self.errors.len() == 1 { "" } else { "s" },
+ errors
+ )
+ }
+}
+
+impl Config {
+ pub fn new(root_path: AbsPathBuf, caps: ClientCapabilities) -> Self {
+ Config {
+ caps,
+ data: ConfigData::default(),
+ detached_files: Vec::new(),
+ discovered_projects: None,
+ root_path,
+ snippets: Default::default(),
+ }
+ }
+
+ pub fn update(&mut self, mut json: serde_json::Value) -> Result<(), ConfigUpdateError> {
+ tracing::info!("updating config from JSON: {:#}", json);
+ if json.is_null() || json.as_object().map_or(false, |it| it.is_empty()) {
+ return Ok(());
+ }
+ let mut errors = Vec::new();
+ self.detached_files =
+ get_field::<Vec<PathBuf>>(&mut json, &mut errors, "detachedFiles", None, "[]")
+ .into_iter()
+ .map(AbsPathBuf::assert)
+ .collect();
+ patch_old_style::patch_json_for_outdated_configs(&mut json);
+ self.data = ConfigData::from_json(json, &mut errors);
+ tracing::debug!("deserialized config data: {:#?}", self.data);
+ self.snippets.clear();
+ for (name, def) in self.data.completion_snippets_custom.iter() {
+ if def.prefix.is_empty() && def.postfix.is_empty() {
+ continue;
+ }
+ let scope = match def.scope {
+ SnippetScopeDef::Expr => SnippetScope::Expr,
+ SnippetScopeDef::Type => SnippetScope::Type,
+ SnippetScopeDef::Item => SnippetScope::Item,
+ };
+ match Snippet::new(
+ &def.prefix,
+ &def.postfix,
+ &def.body,
+ def.description.as_ref().unwrap_or(name),
+ &def.requires,
+ scope,
+ ) {
+ Some(snippet) => self.snippets.push(snippet),
+ None => errors.push((
+ format!("snippet {name} is invalid"),
+ <serde_json::Error as serde::de::Error>::custom(
+ "snippet path is invalid or triggers are missing",
+ ),
+ )),
+ }
+ }
+
+ self.validate(&mut errors);
+
+ if errors.is_empty() {
+ Ok(())
+ } else {
+ Err(ConfigUpdateError { errors })
+ }
+ }
+
+ fn validate(&self, error_sink: &mut Vec<(String, serde_json::Error)>) {
+ use serde::de::Error;
+ if self.data.checkOnSave_command.is_empty() {
+ error_sink.push((
+ "/checkOnSave/command".to_string(),
+ serde_json::Error::custom("expected a non-empty string"),
+ ));
+ }
+ }
+
+ pub fn json_schema() -> serde_json::Value {
+ ConfigData::json_schema()
+ }
+
+ pub fn root_path(&self) -> &AbsPathBuf {
+ &self.root_path
+ }
+
+ pub fn caps(&self) -> &lsp_types::ClientCapabilities {
+ &self.caps
+ }
+
+ pub fn detached_files(&self) -> &[AbsPathBuf] {
+ &self.detached_files
+ }
+}
+
+macro_rules! try_ {
+ ($expr:expr) => {
+ || -> _ { Some($expr) }()
+ };
+}
+macro_rules! try_or {
+ ($expr:expr, $or:expr) => {
+ try_!($expr).unwrap_or($or)
+ };
+}
+
+macro_rules! try_or_def {
+ ($expr:expr) => {
+ try_!($expr).unwrap_or_default()
+ };
+}
+
+impl Config {
+ pub fn linked_projects(&self) -> Vec<LinkedProject> {
+ match self.data.linkedProjects.as_slice() {
+ [] => match self.discovered_projects.as_ref() {
+ Some(discovered_projects) => {
+ let exclude_dirs: Vec<_> = self
+ .data
+ .files_excludeDirs
+ .iter()
+ .map(|p| self.root_path.join(p))
+ .collect();
+ discovered_projects
+ .iter()
+ .filter(|p| {
+ let (ProjectManifest::ProjectJson(path)
+ | ProjectManifest::CargoToml(path)) = p;
+ !exclude_dirs.iter().any(|p| path.starts_with(p))
+ })
+ .cloned()
+ .map(LinkedProject::from)
+ .collect()
+ }
+ None => Vec::new(),
+ },
+ linked_projects => linked_projects
+ .iter()
+ .filter_map(|linked_project| match linked_project {
+ ManifestOrProjectJson::Manifest(it) => {
+ let path = self.root_path.join(it);
+ ProjectManifest::from_manifest_file(path)
+ .map_err(|e| tracing::error!("failed to load linked project: {}", e))
+ .ok()
+ .map(Into::into)
+ }
+ ManifestOrProjectJson::ProjectJson(it) => {
+ Some(ProjectJson::new(&self.root_path, it.clone()).into())
+ }
+ })
+ .collect(),
+ }
+ }
+
+ pub fn did_save_text_document_dynamic_registration(&self) -> bool {
+ let caps = try_or_def!(self.caps.text_document.as_ref()?.synchronization.clone()?);
+ caps.did_save == Some(true) && caps.dynamic_registration == Some(true)
+ }
+
+ pub fn did_change_watched_files_dynamic_registration(&self) -> bool {
+ try_or_def!(
+ self.caps.workspace.as_ref()?.did_change_watched_files.as_ref()?.dynamic_registration?
+ )
+ }
+
+ pub fn prefill_caches(&self) -> bool {
+ self.data.cachePriming_enable
+ }
+
+ pub fn location_link(&self) -> bool {
+ try_or_def!(self.caps.text_document.as_ref()?.definition?.link_support?)
+ }
+
+ pub fn line_folding_only(&self) -> bool {
+ try_or_def!(self.caps.text_document.as_ref()?.folding_range.as_ref()?.line_folding_only?)
+ }
+
+ pub fn hierarchical_symbols(&self) -> bool {
+ try_or_def!(
+ self.caps
+ .text_document
+ .as_ref()?
+ .document_symbol
+ .as_ref()?
+ .hierarchical_document_symbol_support?
+ )
+ }
+
+ pub fn code_action_literals(&self) -> bool {
+ try_!(self
+ .caps
+ .text_document
+ .as_ref()?
+ .code_action
+ .as_ref()?
+ .code_action_literal_support
+ .as_ref()?)
+ .is_some()
+ }
+
+ pub fn work_done_progress(&self) -> bool {
+ try_or_def!(self.caps.window.as_ref()?.work_done_progress?)
+ }
+
+ pub fn will_rename(&self) -> bool {
+ try_or_def!(self.caps.workspace.as_ref()?.file_operations.as_ref()?.will_rename?)
+ }
+
+ pub fn change_annotation_support(&self) -> bool {
+ try_!(self
+ .caps
+ .workspace
+ .as_ref()?
+ .workspace_edit
+ .as_ref()?
+ .change_annotation_support
+ .as_ref()?)
+ .is_some()
+ }
+
+ pub fn code_action_resolve(&self) -> bool {
+ try_or_def!(self
+ .caps
+ .text_document
+ .as_ref()?
+ .code_action
+ .as_ref()?
+ .resolve_support
+ .as_ref()?
+ .properties
+ .as_slice())
+ .iter()
+ .any(|it| it == "edit")
+ }
+
+ pub fn signature_help_label_offsets(&self) -> bool {
+ try_or_def!(
+ self.caps
+ .text_document
+ .as_ref()?
+ .signature_help
+ .as_ref()?
+ .signature_information
+ .as_ref()?
+ .parameter_information
+ .as_ref()?
+ .label_offset_support?
+ )
+ }
+
+ pub fn completion_label_details_support(&self) -> bool {
+ try_!(self
+ .caps
+ .text_document
+ .as_ref()?
+ .completion
+ .as_ref()?
+ .completion_item
+ .as_ref()?
+ .label_details_support
+ .as_ref()?)
+ .is_some()
+ }
+
+ pub fn offset_encoding(&self) -> OffsetEncoding {
+ if supports_utf8(&self.caps) {
+ OffsetEncoding::Utf8
+ } else {
+ OffsetEncoding::Utf16
+ }
+ }
+
+ fn experimental(&self, index: &'static str) -> bool {
+ try_or_def!(self.caps.experimental.as_ref()?.get(index)?.as_bool()?)
+ }
+
+ pub fn code_action_group(&self) -> bool {
+ self.experimental("codeActionGroup")
+ }
+
+ pub fn server_status_notification(&self) -> bool {
+ self.experimental("serverStatusNotification")
+ }
+
+ pub fn publish_diagnostics(&self) -> bool {
+ self.data.diagnostics_enable
+ }
+
+ pub fn diagnostics(&self) -> DiagnosticsConfig {
+ DiagnosticsConfig {
+ proc_attr_macros_enabled: self.expand_proc_attr_macros(),
+ proc_macros_enabled: self.data.procMacro_enable,
+ disable_experimental: !self.data.diagnostics_experimental_enable,
+ disabled: self.data.diagnostics_disabled.clone(),
+ expr_fill_default: match self.data.assist_expressionFillDefault {
+ ExprFillDefaultDef::Todo => ExprFillDefaultMode::Todo,
+ ExprFillDefaultDef::Default => ExprFillDefaultMode::Default,
+ },
+ }
+ }
+
+ pub fn diagnostics_map(&self) -> DiagnosticsMapConfig {
+ DiagnosticsMapConfig {
+ remap_prefix: self.data.diagnostics_remapPrefix.clone(),
+ warnings_as_info: self.data.diagnostics_warningsAsInfo.clone(),
+ warnings_as_hint: self.data.diagnostics_warningsAsHint.clone(),
+ }
+ }
+
+ pub fn lru_capacity(&self) -> Option<usize> {
+ self.data.lru_capacity
+ }
+
+ pub fn proc_macro_srv(&self) -> Option<(AbsPathBuf, Vec<OsString>)> {
+ if !self.data.procMacro_enable {
+ return None;
+ }
+ let path = match &self.data.procMacro_server {
+ Some(it) => self.root_path.join(it),
+ None => AbsPathBuf::assert(std::env::current_exe().ok()?),
+ };
+ Some((path, vec!["proc-macro".into()]))
+ }
+
+ pub fn dummy_replacements(&self) -> &FxHashMap<Box<str>, Box<[Box<str>]>> {
+ &self.data.procMacro_ignored
+ }
+
+ pub fn expand_proc_attr_macros(&self) -> bool {
+ self.data.procMacro_enable && self.data.procMacro_attributes_enable
+ }
+
+ pub fn files(&self) -> FilesConfig {
+ FilesConfig {
+ watcher: match self.data.files_watcher {
+ FilesWatcherDef::Client if self.did_change_watched_files_dynamic_registration() => {
+ FilesWatcher::Client
+ }
+ _ => FilesWatcher::Server,
+ },
+ exclude: self.data.files_excludeDirs.iter().map(|it| self.root_path.join(it)).collect(),
+ }
+ }
+
+ pub fn notifications(&self) -> NotificationsConfig {
+ NotificationsConfig { cargo_toml_not_found: self.data.notifications_cargoTomlNotFound }
+ }
+
+ pub fn cargo_autoreload(&self) -> bool {
+ self.data.cargo_autoreload
+ }
+
+ pub fn run_build_scripts(&self) -> bool {
+ self.data.cargo_buildScripts_enable || self.data.procMacro_enable
+ }
+
+ pub fn cargo(&self) -> CargoConfig {
+ let rustc_source = self.data.rustc_source.as_ref().map(|rustc_src| {
+ if rustc_src == "discover" {
+ RustcSource::Discover
+ } else {
+ RustcSource::Path(self.root_path.join(rustc_src))
+ }
+ });
+
+ CargoConfig {
+ no_default_features: self.data.cargo_noDefaultFeatures,
+ all_features: matches!(self.data.cargo_features, CargoFeatures::All),
+ features: match &self.data.cargo_features {
+ CargoFeatures::All => vec![],
+ CargoFeatures::Listed(it) => it.clone(),
+ },
+ target: self.data.cargo_target.clone(),
+ no_sysroot: self.data.cargo_noSysroot,
+ rustc_source,
+ unset_test_crates: UnsetTestCrates::Only(self.data.cargo_unsetTest.clone()),
+ wrap_rustc_in_build_scripts: self.data.cargo_buildScripts_useRustcWrapper,
+ run_build_script_command: self.data.cargo_buildScripts_overrideCommand.clone(),
+ }
+ }
+
+ pub fn rustfmt(&self) -> RustfmtConfig {
+ match &self.data.rustfmt_overrideCommand {
+ Some(args) if !args.is_empty() => {
+ let mut args = args.clone();
+ let command = args.remove(0);
+ RustfmtConfig::CustomCommand { command, args }
+ }
+ Some(_) | None => RustfmtConfig::Rustfmt {
+ extra_args: self.data.rustfmt_extraArgs.clone(),
+ enable_range_formatting: self.data.rustfmt_rangeFormatting_enable,
+ },
+ }
+ }
+
+ pub fn flycheck(&self) -> Option<FlycheckConfig> {
+ if !self.data.checkOnSave_enable {
+ return None;
+ }
+ let flycheck_config = match &self.data.checkOnSave_overrideCommand {
+ Some(args) if !args.is_empty() => {
+ let mut args = args.clone();
+ let command = args.remove(0);
+ FlycheckConfig::CustomCommand { command, args }
+ }
+ Some(_) | None => FlycheckConfig::CargoCommand {
+ command: self.data.checkOnSave_command.clone(),
+ target_triple: self
+ .data
+ .checkOnSave_target
+ .clone()
+ .or_else(|| self.data.cargo_target.clone()),
+ all_targets: self.data.checkOnSave_allTargets,
+ no_default_features: self
+ .data
+ .checkOnSave_noDefaultFeatures
+ .unwrap_or(self.data.cargo_noDefaultFeatures),
+ all_features: matches!(
+ self.data.checkOnSave_features.as_ref().unwrap_or(&self.data.cargo_features),
+ CargoFeatures::All
+ ),
+ features: match self
+ .data
+ .checkOnSave_features
+ .clone()
+ .unwrap_or_else(|| self.data.cargo_features.clone())
+ {
+ CargoFeatures::All => vec![],
+ CargoFeatures::Listed(it) => it,
+ },
+ extra_args: self.data.checkOnSave_extraArgs.clone(),
+ },
+ };
+ Some(flycheck_config)
+ }
+
+ pub fn runnables(&self) -> RunnablesConfig {
+ RunnablesConfig {
+ override_cargo: self.data.runnables_command.clone(),
+ cargo_extra_args: self.data.runnables_extraArgs.clone(),
+ }
+ }
+
+ pub fn inlay_hints(&self) -> InlayHintsConfig {
+ InlayHintsConfig {
+ render_colons: self.data.inlayHints_renderColons,
+ type_hints: self.data.inlayHints_typeHints_enable,
+ parameter_hints: self.data.inlayHints_parameterHints_enable,
+ chaining_hints: self.data.inlayHints_chainingHints_enable,
+ closure_return_type_hints: match self.data.inlayHints_closureReturnTypeHints_enable {
+ ClosureReturnTypeHintsDef::Always => ide::ClosureReturnTypeHints::Always,
+ ClosureReturnTypeHintsDef::Never => ide::ClosureReturnTypeHints::Never,
+ ClosureReturnTypeHintsDef::WithBlock => ide::ClosureReturnTypeHints::WithBlock,
+ },
+ lifetime_elision_hints: match self.data.inlayHints_lifetimeElisionHints_enable {
+ LifetimeElisionDef::Always => ide::LifetimeElisionHints::Always,
+ LifetimeElisionDef::Never => ide::LifetimeElisionHints::Never,
+ LifetimeElisionDef::SkipTrivial => ide::LifetimeElisionHints::SkipTrivial,
+ },
+ hide_named_constructor_hints: self.data.inlayHints_typeHints_hideNamedConstructor,
+ hide_closure_initialization_hints: self
+ .data
+ .inlayHints_typeHints_hideClosureInitialization,
+ reborrow_hints: match self.data.inlayHints_reborrowHints_enable {
+ ReborrowHintsDef::Always => ide::ReborrowHints::Always,
+ ReborrowHintsDef::Never => ide::ReborrowHints::Never,
+ ReborrowHintsDef::Mutable => ide::ReborrowHints::MutableOnly,
+ },
+ binding_mode_hints: self.data.inlayHints_bindingModeHints_enable,
+ param_names_for_lifetime_elision_hints: self
+ .data
+ .inlayHints_lifetimeElisionHints_useParameterNames,
+ max_length: self.data.inlayHints_maxLength,
+ closing_brace_hints_min_lines: if self.data.inlayHints_closingBraceHints_enable {
+ Some(self.data.inlayHints_closingBraceHints_minLines)
+ } else {
+ None
+ },
+ }
+ }
+
+ fn insert_use_config(&self) -> InsertUseConfig {
+ InsertUseConfig {
+ granularity: match self.data.imports_granularity_group {
+ ImportGranularityDef::Preserve => ImportGranularity::Preserve,
+ ImportGranularityDef::Item => ImportGranularity::Item,
+ ImportGranularityDef::Crate => ImportGranularity::Crate,
+ ImportGranularityDef::Module => ImportGranularity::Module,
+ },
+ enforce_granularity: self.data.imports_granularity_enforce,
+ prefix_kind: match self.data.imports_prefix {
+ ImportPrefixDef::Plain => PrefixKind::Plain,
+ ImportPrefixDef::ByCrate => PrefixKind::ByCrate,
+ ImportPrefixDef::BySelf => PrefixKind::BySelf,
+ },
+ group: self.data.imports_group_enable,
+ skip_glob_imports: !self.data.imports_merge_glob,
+ }
+ }
+
+ pub fn completion(&self) -> CompletionConfig {
+ CompletionConfig {
+ enable_postfix_completions: self.data.completion_postfix_enable,
+ enable_imports_on_the_fly: self.data.completion_autoimport_enable
+ && completion_item_edit_resolve(&self.caps),
+ enable_self_on_the_fly: self.data.completion_autoself_enable,
+ enable_private_editable: self.data.completion_privateEditable_enable,
+ callable: match self.data.completion_callable_snippets {
+ CallableCompletionDef::FillArguments => Some(CallableSnippets::FillArguments),
+ CallableCompletionDef::AddParentheses => Some(CallableSnippets::AddParentheses),
+ CallableCompletionDef::None => None,
+ },
+ insert_use: self.insert_use_config(),
+ snippet_cap: SnippetCap::new(try_or_def!(
+ self.caps
+ .text_document
+ .as_ref()?
+ .completion
+ .as_ref()?
+ .completion_item
+ .as_ref()?
+ .snippet_support?
+ )),
+ snippets: self.snippets.clone(),
+ }
+ }
+
+ pub fn snippet_cap(&self) -> bool {
+ self.experimental("snippetTextEdit")
+ }
+
+ pub fn assist(&self) -> AssistConfig {
+ AssistConfig {
+ snippet_cap: SnippetCap::new(self.experimental("snippetTextEdit")),
+ allowed: None,
+ insert_use: self.insert_use_config(),
+ }
+ }
+
+ pub fn join_lines(&self) -> JoinLinesConfig {
+ JoinLinesConfig {
+ join_else_if: self.data.joinLines_joinElseIf,
+ remove_trailing_comma: self.data.joinLines_removeTrailingComma,
+ unwrap_trivial_blocks: self.data.joinLines_unwrapTrivialBlock,
+ join_assignments: self.data.joinLines_joinAssignments,
+ }
+ }
+
+ pub fn call_info(&self) -> CallInfoConfig {
+ CallInfoConfig {
+ params_only: matches!(self.data.signatureInfo_detail, SignatureDetail::Parameters),
+ docs: self.data.signatureInfo_documentation_enable,
+ }
+ }
+
+ pub fn lens(&self) -> LensConfig {
+ LensConfig {
+ run: self.data.lens_enable && self.data.lens_run_enable,
+ debug: self.data.lens_enable && self.data.lens_debug_enable,
+ implementations: self.data.lens_enable && self.data.lens_implementations_enable,
+ method_refs: self.data.lens_enable && self.data.lens_references_method_enable,
+ refs_adt: self.data.lens_enable && self.data.lens_references_adt_enable,
+ refs_trait: self.data.lens_enable && self.data.lens_references_trait_enable,
+ enum_variant_refs: self.data.lens_enable
+ && self.data.lens_references_enumVariant_enable,
+ }
+ }
+
+ pub fn hover_actions(&self) -> HoverActionsConfig {
+ let enable = self.experimental("hoverActions") && self.data.hover_actions_enable;
+ HoverActionsConfig {
+ implementations: enable && self.data.hover_actions_implementations_enable,
+ references: enable && self.data.hover_actions_references_enable,
+ run: enable && self.data.hover_actions_run_enable,
+ debug: enable && self.data.hover_actions_debug_enable,
+ goto_type_def: enable && self.data.hover_actions_gotoTypeDef_enable,
+ }
+ }
+
+ pub fn highlighting_strings(&self) -> bool {
+ self.data.semanticHighlighting_strings_enable
+ }
+
+ pub fn hover(&self) -> HoverConfig {
+ HoverConfig {
+ links_in_hover: self.data.hover_links_enable,
+ documentation: self.data.hover_documentation_enable.then(|| {
+ let is_markdown = try_or_def!(self
+ .caps
+ .text_document
+ .as_ref()?
+ .hover
+ .as_ref()?
+ .content_format
+ .as_ref()?
+ .as_slice())
+ .contains(&MarkupKind::Markdown);
+ if is_markdown {
+ HoverDocFormat::Markdown
+ } else {
+ HoverDocFormat::PlainText
+ }
+ }),
+ }
+ }
+
+ pub fn workspace_symbol(&self) -> WorkspaceSymbolConfig {
+ WorkspaceSymbolConfig {
+ search_scope: match self.data.workspace_symbol_search_scope {
+ WorkspaceSymbolSearchScopeDef::Workspace => WorkspaceSymbolSearchScope::Workspace,
+ WorkspaceSymbolSearchScopeDef::WorkspaceAndDependencies => {
+ WorkspaceSymbolSearchScope::WorkspaceAndDependencies
+ }
+ },
+ search_kind: match self.data.workspace_symbol_search_kind {
+ WorkspaceSymbolSearchKindDef::OnlyTypes => WorkspaceSymbolSearchKind::OnlyTypes,
+ WorkspaceSymbolSearchKindDef::AllSymbols => WorkspaceSymbolSearchKind::AllSymbols,
+ },
+ search_limit: self.data.workspace_symbol_search_limit,
+ }
+ }
+
+ pub fn semantic_tokens_refresh(&self) -> bool {
+ try_or_def!(self.caps.workspace.as_ref()?.semantic_tokens.as_ref()?.refresh_support?)
+ }
+
+ pub fn code_lens_refresh(&self) -> bool {
+ try_or_def!(self.caps.workspace.as_ref()?.code_lens.as_ref()?.refresh_support?)
+ }
+
+ pub fn insert_replace_support(&self) -> bool {
+ try_or_def!(
+ self.caps
+ .text_document
+ .as_ref()?
+ .completion
+ .as_ref()?
+ .completion_item
+ .as_ref()?
+ .insert_replace_support?
+ )
+ }
+
+ pub fn client_commands(&self) -> ClientCommandsConfig {
+ let commands =
+ try_or!(self.caps.experimental.as_ref()?.get("commands")?, &serde_json::Value::Null);
+ let commands: Option<lsp_ext::ClientCommandOptions> =
+ serde_json::from_value(commands.clone()).ok();
+ let force = commands.is_none() && self.data.lens_forceCustomCommands;
+ let commands = commands.map(|it| it.commands).unwrap_or_default();
+
+ let get = |name: &str| commands.iter().any(|it| it == name) || force;
+
+ ClientCommandsConfig {
+ run_single: get("rust-analyzer.runSingle"),
+ debug_single: get("rust-analyzer.debugSingle"),
+ show_reference: get("rust-analyzer.showReferences"),
+ goto_location: get("rust-analyzer.gotoLocation"),
+ trigger_parameter_hints: get("editor.action.triggerParameterHints"),
+ }
+ }
+
+ pub fn highlight_related(&self) -> HighlightRelatedConfig {
+ HighlightRelatedConfig {
+ references: self.data.highlightRelated_references_enable,
+ break_points: self.data.highlightRelated_breakPoints_enable,
+ exit_points: self.data.highlightRelated_exitPoints_enable,
+ yield_points: self.data.highlightRelated_yieldPoints_enable,
+ }
+ }
+
+ pub fn prime_caches_num_threads(&self) -> u8 {
+ match self.data.cachePriming_numThreads {
+ 0 => num_cpus::get_physical().try_into().unwrap_or(u8::MAX),
+ n => n,
+ }
+ }
+
+ pub fn typing_autoclose_angle(&self) -> bool {
+ self.data.typing_autoClosingAngleBrackets_enable
+ }
+}
+// Deserialization definitions
+
+macro_rules! create_bool_or_string_de {
+ ($ident:ident<$bool:literal, $string:literal>) => {
+ fn $ident<'de, D>(d: D) -> Result<(), D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ struct V;
+ impl<'de> serde::de::Visitor<'de> for V {
+ type Value = ();
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str(concat!(
+ stringify!($bool),
+ " or \"",
+ stringify!($string),
+ "\""
+ ))
+ }
+
+ fn visit_bool<E>(self, v: bool) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ match v {
+ $bool => Ok(()),
+ _ => Err(serde::de::Error::invalid_value(
+ serde::de::Unexpected::Bool(v),
+ &self,
+ )),
+ }
+ }
+
+ fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ match v {
+ $string => Ok(()),
+ _ => Err(serde::de::Error::invalid_value(
+ serde::de::Unexpected::Str(v),
+ &self,
+ )),
+ }
+ }
+
+ fn visit_enum<A>(self, a: A) -> Result<Self::Value, A::Error>
+ where
+ A: serde::de::EnumAccess<'de>,
+ {
+ use serde::de::VariantAccess;
+ let (variant, va) = a.variant::<&'de str>()?;
+ va.unit_variant()?;
+ match variant {
+ $string => Ok(()),
+ _ => Err(serde::de::Error::invalid_value(
+ serde::de::Unexpected::Str(variant),
+ &self,
+ )),
+ }
+ }
+ }
+ d.deserialize_any(V)
+ }
+ };
+}
+create_bool_or_string_de!(true_or_always<true, "always">);
+create_bool_or_string_de!(false_or_never<false, "never">);
+
+macro_rules! named_unit_variant {
+ ($variant:ident) => {
+ pub(super) fn $variant<'de, D>(deserializer: D) -> Result<(), D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ struct V;
+ impl<'de> serde::de::Visitor<'de> for V {
+ type Value = ();
+ fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.write_str(concat!("\"", stringify!($variant), "\""))
+ }
+ fn visit_str<E: serde::de::Error>(self, value: &str) -> Result<Self::Value, E> {
+ if value == stringify!($variant) {
+ Ok(())
+ } else {
+ Err(E::invalid_value(serde::de::Unexpected::Str(value), &self))
+ }
+ }
+ }
+ deserializer.deserialize_str(V)
+ }
+ };
+}
+
+mod de_unit_v {
+ named_unit_variant!(all);
+ named_unit_variant!(skip_trivial);
+ named_unit_variant!(mutable);
+ named_unit_variant!(with_block);
+}
+
+#[derive(Deserialize, Debug, Clone, Copy)]
+#[serde(rename_all = "snake_case")]
+enum SnippetScopeDef {
+ Expr,
+ Item,
+ Type,
+}
+
+impl Default for SnippetScopeDef {
+ fn default() -> Self {
+ SnippetScopeDef::Expr
+ }
+}
+
+#[derive(Deserialize, Debug, Clone, Default)]
+#[serde(default)]
+struct SnippetDef {
+ #[serde(deserialize_with = "single_or_array")]
+ prefix: Vec<String>,
+ #[serde(deserialize_with = "single_or_array")]
+ postfix: Vec<String>,
+ description: Option<String>,
+ #[serde(deserialize_with = "single_or_array")]
+ body: Vec<String>,
+ #[serde(deserialize_with = "single_or_array")]
+ requires: Vec<String>,
+ scope: SnippetScopeDef,
+}
+
+fn single_or_array<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
+where
+ D: serde::Deserializer<'de>,
+{
+ struct SingleOrVec;
+
+ impl<'de> serde::de::Visitor<'de> for SingleOrVec {
+ type Value = Vec<String>;
+
+ fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ formatter.write_str("string or array of strings")
+ }
+
+ fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ Ok(vec![value.to_owned()])
+ }
+
+ fn visit_seq<A>(self, seq: A) -> Result<Self::Value, A::Error>
+ where
+ A: serde::de::SeqAccess<'de>,
+ {
+ Deserialize::deserialize(serde::de::value::SeqAccessDeserializer::new(seq))
+ }
+ }
+
+ deserializer.deserialize_any(SingleOrVec)
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(untagged)]
+enum ManifestOrProjectJson {
+ Manifest(PathBuf),
+ ProjectJson(ProjectJsonData),
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum ExprFillDefaultDef {
+ Todo,
+ Default,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum ImportGranularityDef {
+ Preserve,
+ Item,
+ Crate,
+ Module,
+}
+
+#[derive(Deserialize, Debug, Copy, Clone)]
+#[serde(rename_all = "snake_case")]
+enum CallableCompletionDef {
+ FillArguments,
+ AddParentheses,
+ None,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(untagged)]
+enum CargoFeatures {
+ #[serde(deserialize_with = "de_unit_v::all")]
+ All,
+ Listed(Vec<String>),
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(untagged)]
+enum LifetimeElisionDef {
+ #[serde(deserialize_with = "true_or_always")]
+ Always,
+ #[serde(deserialize_with = "false_or_never")]
+ Never,
+ #[serde(deserialize_with = "de_unit_v::skip_trivial")]
+ SkipTrivial,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(untagged)]
+enum ClosureReturnTypeHintsDef {
+ #[serde(deserialize_with = "true_or_always")]
+ Always,
+ #[serde(deserialize_with = "false_or_never")]
+ Never,
+ #[serde(deserialize_with = "de_unit_v::with_block")]
+ WithBlock,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(untagged)]
+enum ReborrowHintsDef {
+ #[serde(deserialize_with = "true_or_always")]
+ Always,
+ #[serde(deserialize_with = "false_or_never")]
+ Never,
+ #[serde(deserialize_with = "de_unit_v::mutable")]
+ Mutable,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum FilesWatcherDef {
+ Client,
+ Notify,
+ Server,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum ImportPrefixDef {
+ Plain,
+ #[serde(alias = "self")]
+ BySelf,
+ #[serde(alias = "crate")]
+ ByCrate,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum WorkspaceSymbolSearchScopeDef {
+ Workspace,
+ WorkspaceAndDependencies,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum SignatureDetail {
+ Full,
+ Parameters,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum WorkspaceSymbolSearchKindDef {
+ OnlyTypes,
+ AllSymbols,
+}
+
+macro_rules! _config_data {
+ (struct $name:ident {
+ $(
+ $(#[doc=$doc:literal])*
+ $field:ident $(| $alias:ident)*: $ty:ty = $default:expr,
+ )*
+ }) => {
+ #[allow(non_snake_case)]
+ #[derive(Debug, Clone)]
+ struct $name { $($field: $ty,)* }
+ impl $name {
+ fn from_json(mut json: serde_json::Value, error_sink: &mut Vec<(String, serde_json::Error)>) -> $name {
+ $name {$(
+ $field: get_field(
+ &mut json,
+ error_sink,
+ stringify!($field),
+ None$(.or(Some(stringify!($alias))))*,
+ $default,
+ ),
+ )*}
+ }
+
+ fn json_schema() -> serde_json::Value {
+ schema(&[
+ $({
+ let field = stringify!($field);
+ let ty = stringify!($ty);
+
+ (field, ty, &[$($doc),*], $default)
+ },)*
+ ])
+ }
+
+ #[cfg(test)]
+ fn manual() -> String {
+ manual(&[
+ $({
+ let field = stringify!($field);
+ let ty = stringify!($ty);
+
+ (field, ty, &[$($doc),*], $default)
+ },)*
+ ])
+ }
+ }
+
+ #[test]
+ fn fields_are_sorted() {
+ [$(stringify!($field)),*].windows(2).for_each(|w| assert!(w[0] <= w[1], "{} <= {} does not hold", w[0], w[1]));
+ }
+ };
+}
+use _config_data as config_data;
+
+fn get_field<T: DeserializeOwned>(
+ json: &mut serde_json::Value,
+ error_sink: &mut Vec<(String, serde_json::Error)>,
+ field: &'static str,
+ alias: Option<&'static str>,
+ default: &str,
+) -> T {
+ let default = serde_json::from_str(default).unwrap();
+ // XXX: check alias first, to work-around the VS Code where it pre-fills the
+ // defaults instead of sending an empty object.
+ alias
+ .into_iter()
+ .chain(iter::once(field))
+ .find_map(move |field| {
+ let mut pointer = field.replace('_', "/");
+ pointer.insert(0, '/');
+ json.pointer_mut(&pointer).and_then(|it| match serde_json::from_value(it.take()) {
+ Ok(it) => Some(it),
+ Err(e) => {
+ tracing::warn!("Failed to deserialize config field at {}: {:?}", pointer, e);
+ error_sink.push((pointer, e));
+ None
+ }
+ })
+ })
+ .unwrap_or(default)
+}
+
+fn schema(fields: &[(&'static str, &'static str, &[&str], &str)]) -> serde_json::Value {
+ for ((f1, ..), (f2, ..)) in fields.iter().zip(&fields[1..]) {
+ fn key(f: &str) -> &str {
+ f.splitn(2, '_').next().unwrap()
+ }
+ assert!(key(f1) <= key(f2), "wrong field order: {:?} {:?}", f1, f2);
+ }
+
+ let map = fields
+ .iter()
+ .map(|(field, ty, doc, default)| {
+ let name = field.replace('_', ".");
+ let name = format!("rust-analyzer.{}", name);
+ let props = field_props(field, ty, doc, default);
+ (name, props)
+ })
+ .collect::<serde_json::Map<_, _>>();
+ map.into()
+}
+
+fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json::Value {
+ let doc = doc_comment_to_string(doc);
+ let doc = doc.trim_end_matches('\n');
+ assert!(
+ doc.ends_with('.') && doc.starts_with(char::is_uppercase),
+ "bad docs for {}: {:?}",
+ field,
+ doc
+ );
+ let default = default.parse::<serde_json::Value>().unwrap();
+
+ let mut map = serde_json::Map::default();
+ macro_rules! set {
+ ($($key:literal: $value:tt),*$(,)?) => {{$(
+ map.insert($key.into(), serde_json::json!($value));
+ )*}};
+ }
+ set!("markdownDescription": doc);
+ set!("default": default);
+
+ match ty {
+ "bool" => set!("type": "boolean"),
+ "usize" => set!("type": "integer", "minimum": 0),
+ "String" => set!("type": "string"),
+ "Vec<String>" => set! {
+ "type": "array",
+ "items": { "type": "string" },
+ },
+ "Vec<PathBuf>" => set! {
+ "type": "array",
+ "items": { "type": "string" },
+ },
+ "FxHashSet<String>" => set! {
+ "type": "array",
+ "items": { "type": "string" },
+ "uniqueItems": true,
+ },
+ "FxHashMap<Box<str>, Box<[Box<str>]>>" => set! {
+ "type": "object",
+ },
+ "FxHashMap<String, SnippetDef>" => set! {
+ "type": "object",
+ },
+ "FxHashMap<String, String>" => set! {
+ "type": "object",
+ },
+ "Option<usize>" => set! {
+ "type": ["null", "integer"],
+ "minimum": 0,
+ },
+ "Option<String>" => set! {
+ "type": ["null", "string"],
+ },
+ "Option<PathBuf>" => set! {
+ "type": ["null", "string"],
+ },
+ "Option<bool>" => set! {
+ "type": ["null", "boolean"],
+ },
+ "Option<Vec<String>>" => set! {
+ "type": ["null", "array"],
+ "items": { "type": "string" },
+ },
+ "MergeBehaviorDef" => set! {
+ "type": "string",
+ "enum": ["none", "crate", "module"],
+ "enumDescriptions": [
+ "Do not merge imports at all.",
+ "Merge imports from the same crate into a single `use` statement.",
+ "Merge imports from the same module into a single `use` statement."
+ ],
+ },
+ "ExprFillDefaultDef" => set! {
+ "type": "string",
+ "enum": ["todo", "default"],
+ "enumDescriptions": [
+ "Fill missing expressions with the `todo` macro",
+ "Fill missing expressions with reasonable defaults, `new` or `default` constructors."
+ ],
+ },
+ "ImportGranularityDef" => set! {
+ "type": "string",
+ "enum": ["preserve", "crate", "module", "item"],
+ "enumDescriptions": [
+ "Do not change the granularity of any imports and preserve the original structure written by the developer.",
+ "Merge imports from the same crate into a single use statement. Conversely, imports from different crates are split into separate statements.",
+ "Merge imports from the same module into a single use statement. Conversely, imports from different modules are split into separate statements.",
+ "Flatten imports so that each has its own use statement."
+ ],
+ },
+ "ImportPrefixDef" => set! {
+ "type": "string",
+ "enum": [
+ "plain",
+ "self",
+ "crate"
+ ],
+ "enumDescriptions": [
+ "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item.",
+ "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item. Prefixes `self` in front of the path if it starts with a module.",
+ "Force import paths to be absolute by always starting them with `crate` or the extern crate name they come from."
+ ],
+ },
+ "Vec<ManifestOrProjectJson>" => set! {
+ "type": "array",
+ "items": { "type": ["string", "object"] },
+ },
+ "WorkspaceSymbolSearchScopeDef" => set! {
+ "type": "string",
+ "enum": ["workspace", "workspace_and_dependencies"],
+ "enumDescriptions": [
+ "Search in current workspace only.",
+ "Search in current workspace and dependencies."
+ ],
+ },
+ "WorkspaceSymbolSearchKindDef" => set! {
+ "type": "string",
+ "enum": ["only_types", "all_symbols"],
+ "enumDescriptions": [
+ "Search for types only.",
+ "Search for all symbols kinds."
+ ],
+ },
+ "ParallelCachePrimingNumThreads" => set! {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 255
+ },
+ "LifetimeElisionDef" => set! {
+ "type": "string",
+ "enum": [
+ "always",
+ "never",
+ "skip_trivial"
+ ],
+ "enumDescriptions": [
+ "Always show lifetime elision hints.",
+ "Never show lifetime elision hints.",
+ "Only show lifetime elision hints if a return type is involved."
+ ]
+ },
+ "ClosureReturnTypeHintsDef" => set! {
+ "type": "string",
+ "enum": [
+ "always",
+ "never",
+ "with_block"
+ ],
+ "enumDescriptions": [
+ "Always show type hints for return types of closures.",
+ "Never show type hints for return types of closures.",
+ "Only show type hints for return types of closures with blocks."
+ ]
+ },
+ "ReborrowHintsDef" => set! {
+ "type": "string",
+ "enum": [
+ "always",
+ "never",
+ "mutable"
+ ],
+ "enumDescriptions": [
+ "Always show reborrow hints.",
+ "Never show reborrow hints.",
+ "Only show mutable reborrow hints."
+ ]
+ },
+ "CargoFeatures" => set! {
+ "anyOf": [
+ {
+ "type": "string",
+ "enum": [
+ "all"
+ ],
+ "enumDescriptions": [
+ "Pass `--all-features` to cargo",
+ ]
+ },
+ {
+ "type": "array",
+ "items": { "type": "string" }
+ }
+ ],
+ },
+ "Option<CargoFeatures>" => set! {
+ "anyOf": [
+ {
+ "type": "string",
+ "enum": [
+ "all"
+ ],
+ "enumDescriptions": [
+ "Pass `--all-features` to cargo",
+ ]
+ },
+ {
+ "type": "array",
+ "items": { "type": "string" }
+ },
+ { "type": "null" }
+ ],
+ },
+ "CallableCompletionDef" => set! {
+ "type": "string",
+ "enum": [
+ "fill_arguments",
+ "add_parentheses",
+ "none",
+ ],
+ "enumDescriptions": [
+ "Add call parentheses and pre-fill arguments.",
+ "Add call parentheses.",
+ "Do no snippet completions for callables."
+ ]
+ },
+ "SignatureDetail" => set! {
+ "type": "string",
+ "enum": ["full", "parameters"],
+ "enumDescriptions": [
+ "Show the entire signature.",
+ "Show only the parameters."
+ ],
+ },
+ "FilesWatcherDef" => set! {
+ "type": "string",
+ "enum": ["client", "server"],
+ "enumDescriptions": [
+ "Use the client (editor) to watch files for changes",
+ "Use server-side file watching",
+ ],
+ },
+ _ => panic!("missing entry for {}: {}", ty, default),
+ }
+
+ map.into()
+}
+
+#[cfg(test)]
+fn manual(fields: &[(&'static str, &'static str, &[&str], &str)]) -> String {
+ fields
+ .iter()
+ .map(|(field, _ty, doc, default)| {
+ let name = format!("rust-analyzer.{}", field.replace('_', "."));
+ let doc = doc_comment_to_string(*doc);
+ if default.contains('\n') {
+ format!(
+ r#"[[{}]]{}::
++
+--
+Default:
+----
+{}
+----
+{}
+--
+"#,
+ name, name, default, doc
+ )
+ } else {
+ format!("[[{}]]{} (default: `{}`)::\n+\n--\n{}--\n", name, name, default, doc)
+ }
+ })
+ .collect::<String>()
+}
+
+fn doc_comment_to_string(doc: &[&str]) -> String {
+ doc.iter().map(|it| it.strip_prefix(' ').unwrap_or(it)).map(|it| format!("{}\n", it)).collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use std::fs;
+
+ use test_utils::{ensure_file_contents, project_root};
+
+ use super::*;
+
+ #[test]
+ fn generate_package_json_config() {
+ let s = Config::json_schema();
+ let schema = format!("{:#}", s);
+ let mut schema = schema
+ .trim_start_matches('{')
+ .trim_end_matches('}')
+ .replace(" ", " ")
+ .replace('\n', "\n ")
+ .trim_start_matches('\n')
+ .trim_end()
+ .to_string();
+ schema.push_str(",\n");
+
+ // Transform the asciidoc form link to markdown style.
+ //
+ // https://link[text] => [text](https://link)
+ let url_matches = schema.match_indices("https://");
+ let mut url_offsets = url_matches.map(|(idx, _)| idx).collect::<Vec<usize>>();
+ url_offsets.reverse();
+ for idx in url_offsets {
+ let link = &schema[idx..];
+ // matching on whitespace to ignore normal links
+ if let Some(link_end) = link.find(|c| c == ' ' || c == '[') {
+ if link.chars().nth(link_end) == Some('[') {
+ if let Some(link_text_end) = link.find(']') {
+ let link_text = link[link_end..(link_text_end + 1)].to_string();
+
+ schema.replace_range((idx + link_end)..(idx + link_text_end + 1), "");
+ schema.insert(idx, '(');
+ schema.insert(idx + link_end + 1, ')');
+ schema.insert_str(idx, &link_text);
+ }
+ }
+ }
+ }
+
+ let package_json_path = project_root().join("editors/code/package.json");
+ let mut package_json = fs::read_to_string(&package_json_path).unwrap();
+
+ let start_marker = " \"$generated-start\": {},\n";
+ let end_marker = " \"$generated-end\": {}\n";
+
+ let start = package_json.find(start_marker).unwrap() + start_marker.len();
+ let end = package_json.find(end_marker).unwrap();
+
+ let p = remove_ws(&package_json[start..end]);
+ let s = remove_ws(&schema);
+ if !p.contains(&s) {
+ package_json.replace_range(start..end, &schema);
+ ensure_file_contents(&package_json_path, &package_json)
+ }
+ }
+
+ #[test]
+ fn generate_config_documentation() {
+ let docs_path = project_root().join("docs/user/generated_config.adoc");
+ let expected = ConfigData::manual();
+ ensure_file_contents(&docs_path, &expected);
+ }
+
+ fn remove_ws(text: &str) -> String {
+ text.replace(char::is_whitespace, "")
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs
new file mode 100644
index 000000000..472e2e0ee
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs
@@ -0,0 +1,135 @@
+//! See [`patch_json_for_outdated_configs`]
+use serde_json::{json, Value};
+
+/// This function patches the json config to the new expected keys.
+/// That is we try to load old known config keys here and convert them to the new ones.
+/// See https://github.com/rust-lang/rust-analyzer/pull/12010
+pub(super) fn patch_json_for_outdated_configs(json: &mut Value) {
+ let copy = json.clone();
+
+ macro_rules! patch {
+ ($(
+ $($src:ident).+ -> $($dst:ident).+ ;
+ )+) => { $(
+ match copy.pointer(concat!($("/", stringify!($src)),+)).cloned() {
+ Some(Value::Object(_)) | None => (),
+ Some(it) => {
+ let mut last = it;
+ for segment in [$(stringify!($dst)),+].into_iter().rev() {
+ last = Value::Object(serde_json::Map::from_iter(std::iter::once((segment.to_string(), last))));
+ }
+
+ merge(json, last);
+ },
+ }
+ )+ };
+ }
+
+ patch! {
+ assist.allowMergingIntoGlobImports -> imports.merge.glob;
+ assist.exprFillDefault -> assist.expressionFillDefault;
+ assist.importEnforceGranularity -> imports.granularity.enforce;
+ assist.importGranularity -> imports.granularity.group;
+ assist.importMergeBehavior -> imports.granularity.group;
+ assist.importMergeBehaviour -> imports.granularity.group;
+ assist.importGroup -> imports.group.enable;
+ assist.importPrefix -> imports.prefix;
+ primeCaches.enable -> cachePriming.enable;
+ cache.warmup -> cachePriming.enable;
+ cargo.loadOutDirsFromCheck -> cargo.buildScripts.enable;
+ cargo.runBuildScripts -> cargo.buildScripts.enable;
+ cargo.runBuildScriptsCommand -> cargo.buildScripts.overrideCommand;
+ cargo.useRustcWrapperForBuildScripts -> cargo.buildScripts.useRustcWrapper;
+ diagnostics.enableExperimental -> diagnostics.experimental.enable;
+ experimental.procAttrMacros -> procMacro.attributes.enable;
+ highlighting.strings -> semanticHighlighting.strings.enable;
+ highlightRelated.breakPoints -> semanticHighlighting.breakPoints.enable;
+ highlightRelated.exitPoints -> semanticHighlighting.exitPoints.enable;
+ highlightRelated.yieldPoints -> semanticHighlighting.yieldPoints.enable;
+ highlightRelated.references -> semanticHighlighting.references.enable;
+ hover.documentation -> hover.documentation.enable;
+ hover.linksInHover -> hover.links.enable;
+ hoverActions.linksInHover -> hover.links.enable;
+ hoverActions.debug -> hover.actions.debug.enable;
+ hoverActions.enable -> hover.actions.enable;
+ hoverActions.gotoTypeDef -> hover.actions.gotoTypeDef.enable;
+ hoverActions.implementations -> hover.actions.implementations.enable;
+ hoverActions.references -> hover.actions.references.enable;
+ hoverActions.run -> hover.actions.run.enable;
+ inlayHints.chainingHints -> inlayHints.chainingHints.enable;
+ inlayHints.closureReturnTypeHints -> inlayHints.closureReturnTypeHints.enable;
+ inlayHints.hideNamedConstructorHints -> inlayHints.typeHints.hideNamedConstructorHints;
+ inlayHints.parameterHints -> inlayHints.parameterHints.enable;
+ inlayHints.reborrowHints -> inlayHints.reborrowHints.enable;
+ inlayHints.typeHints -> inlayHints.typeHints.enable;
+ lruCapacity -> lru.capacity;
+ runnables.cargoExtraArgs -> runnables.extraArgs ;
+ runnables.overrideCargo -> runnables.command ;
+ rustcSource -> rustc.source;
+ rustfmt.enableRangeFormatting -> rustfmt.rangeFormatting.enable;
+ }
+
+ // completion.snippets -> completion.snippets.custom;
+ if let Some(Value::Object(obj)) = copy.pointer("/completion/snippets").cloned() {
+ if obj.len() != 1 || obj.get("custom").is_none() {
+ merge(
+ json,
+ json! {{
+ "completion": {
+ "snippets": {
+ "custom": obj
+ },
+ },
+ }},
+ );
+ }
+ }
+
+ // callInfo_full -> signatureInfo_detail, signatureInfo_documentation_enable
+ if let Some(Value::Bool(b)) = copy.pointer("/callInfo/full") {
+ let sig_info = match b {
+ true => json!({ "signatureInfo": {
+ "documentation": {"enable": true}},
+ "detail": "full"
+ }),
+ false => json!({ "signatureInfo": {
+ "documentation": {"enable": false}},
+ "detail": "parameters"
+ }),
+ };
+ merge(json, sig_info);
+ }
+
+ // cargo_allFeatures, cargo_features -> cargo_features
+ if let Some(Value::Bool(true)) = copy.pointer("/cargo/allFeatures") {
+ merge(json, json!({ "cargo": { "features": "all" } }));
+ }
+
+ // checkOnSave_allFeatures, checkOnSave_features -> checkOnSave_features
+ if let Some(Value::Bool(true)) = copy.pointer("/checkOnSave/allFeatures") {
+ merge(json, json!({ "checkOnSave": { "features": "all" } }));
+ }
+
+ // completion_addCallArgumentSnippets completion_addCallParenthesis -> completion_callable_snippets
+ let res = match (
+ copy.pointer("/completion/addCallArgumentSnippets"),
+ copy.pointer("/completion/addCallParenthesis"),
+ ) {
+ (Some(Value::Bool(true)), Some(Value::Bool(true))) => json!("fill_arguments"),
+ (Some(Value::Bool(true)), _) => json!("add_parentheses"),
+ (Some(Value::Bool(false)), Some(Value::Bool(false))) => json!("none"),
+ (_, _) => return,
+ };
+ merge(json, json!({ "completion": { "callable": {"snippets": res }} }));
+}
+
+fn merge(dst: &mut Value, src: Value) {
+ match (dst, src) {
+ (Value::Object(dst), Value::Object(src)) => {
+ for (k, v) in src {
+ merge(dst.entry(k).or_insert(v.clone()), v)
+ }
+ }
+ (dst, src) => *dst = src,
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs
new file mode 100644
index 000000000..202a01adf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs
@@ -0,0 +1,109 @@
+//! Book keeping for keeping diagnostics easily in sync with the client.
+pub(crate) mod to_proto;
+
+use std::{mem, sync::Arc};
+
+use ide::FileId;
+use rustc_hash::{FxHashMap, FxHashSet};
+
+use crate::lsp_ext;
+
+pub(crate) type CheckFixes = Arc<FxHashMap<FileId, Vec<Fix>>>;
+
+#[derive(Debug, Default, Clone)]
+pub struct DiagnosticsMapConfig {
+ pub remap_prefix: FxHashMap<String, String>,
+ pub warnings_as_info: Vec<String>,
+ pub warnings_as_hint: Vec<String>,
+}
+
+#[derive(Debug, Default, Clone)]
+pub(crate) struct DiagnosticCollection {
+ // FIXME: should be FxHashMap<FileId, Vec<ra_id::Diagnostic>>
+ pub(crate) native: FxHashMap<FileId, Vec<lsp_types::Diagnostic>>,
+ // FIXME: should be Vec<flycheck::Diagnostic>
+ pub(crate) check: FxHashMap<FileId, Vec<lsp_types::Diagnostic>>,
+ pub(crate) check_fixes: CheckFixes,
+ changes: FxHashSet<FileId>,
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct Fix {
+ // Fixes may be triggerable from multiple ranges.
+ pub(crate) ranges: Vec<lsp_types::Range>,
+ pub(crate) action: lsp_ext::CodeAction,
+}
+
+impl DiagnosticCollection {
+ pub(crate) fn clear_check(&mut self) {
+ Arc::make_mut(&mut self.check_fixes).clear();
+ self.changes.extend(self.check.drain().map(|(key, _value)| key))
+ }
+
+ pub(crate) fn clear_native_for(&mut self, file_id: FileId) {
+ self.native.remove(&file_id);
+ self.changes.insert(file_id);
+ }
+
+ pub(crate) fn add_check_diagnostic(
+ &mut self,
+ file_id: FileId,
+ diagnostic: lsp_types::Diagnostic,
+ fix: Option<Fix>,
+ ) {
+ let diagnostics = self.check.entry(file_id).or_default();
+ for existing_diagnostic in diagnostics.iter() {
+ if are_diagnostics_equal(existing_diagnostic, &diagnostic) {
+ return;
+ }
+ }
+
+ let check_fixes = Arc::make_mut(&mut self.check_fixes);
+ check_fixes.entry(file_id).or_default().extend(fix);
+ diagnostics.push(diagnostic);
+ self.changes.insert(file_id);
+ }
+
+ pub(crate) fn set_native_diagnostics(
+ &mut self,
+ file_id: FileId,
+ diagnostics: Vec<lsp_types::Diagnostic>,
+ ) {
+ if let Some(existing_diagnostics) = self.native.get(&file_id) {
+ if existing_diagnostics.len() == diagnostics.len()
+ && diagnostics
+ .iter()
+ .zip(existing_diagnostics)
+ .all(|(new, existing)| are_diagnostics_equal(new, existing))
+ {
+ return;
+ }
+ }
+
+ self.native.insert(file_id, diagnostics);
+ self.changes.insert(file_id);
+ }
+
+ pub(crate) fn diagnostics_for(
+ &self,
+ file_id: FileId,
+ ) -> impl Iterator<Item = &lsp_types::Diagnostic> {
+ let native = self.native.get(&file_id).into_iter().flatten();
+ let check = self.check.get(&file_id).into_iter().flatten();
+ native.chain(check)
+ }
+
+ pub(crate) fn take_changes(&mut self) -> Option<FxHashSet<FileId>> {
+ if self.changes.is_empty() {
+ return None;
+ }
+ Some(mem::take(&mut self.changes))
+ }
+}
+
+fn are_diagnostics_equal(left: &lsp_types::Diagnostic, right: &lsp_types::Diagnostic) -> bool {
+ left.source == right.source
+ && left.severity == right.severity
+ && left.range == right.range
+ && left.message == right.message
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/clippy_pass_by_ref.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/clippy_pass_by_ref.txt
new file mode 100644
index 000000000..c3b540e31
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/clippy_pass_by_ref.txt
@@ -0,0 +1,301 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/mir/tagset.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 41,
+ character: 23,
+ },
+ end: Position {
+ line: 41,
+ character: 28,
+ },
+ },
+ severity: Some(
+ Warning,
+ ),
+ code: Some(
+ String(
+ "trivially_copy_pass_by_ref",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "rust-lang.github.io",
+ ),
+ ),
+ port: None,
+ path: "/rust-clippy/master/index.html",
+ query: None,
+ fragment: Some(
+ "trivially_copy_pass_by_ref",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "clippy",
+ ),
+ message: "this argument is passed by reference, but would be more efficient if passed by value\n#[warn(clippy::trivially_copy_pass_by_ref)] implied by #[warn(clippy::all)]\nfor further information visit https://rust-lang.github.io/rust-clippy/master/index.html#trivially_copy_pass_by_ref",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/lib.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 8,
+ },
+ end: Position {
+ line: 0,
+ character: 19,
+ },
+ },
+ },
+ message: "lint level defined here",
+ },
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/mir/tagset.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 41,
+ character: 23,
+ },
+ end: Position {
+ line: 41,
+ character: 28,
+ },
+ },
+ },
+ message: "consider passing by value instead: `self`",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/lib.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 8,
+ },
+ end: Position {
+ line: 0,
+ character: 19,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: Some(
+ String(
+ "trivially_copy_pass_by_ref",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "rust-lang.github.io",
+ ),
+ ),
+ port: None,
+ path: "/rust-clippy/master/index.html",
+ query: None,
+ fragment: Some(
+ "trivially_copy_pass_by_ref",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "clippy",
+ ),
+ message: "lint level defined here",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/mir/tagset.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 41,
+ character: 23,
+ },
+ end: Position {
+ line: 41,
+ character: 28,
+ },
+ },
+ },
+ message: "original diagnostic",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/mir/tagset.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 41,
+ character: 23,
+ },
+ end: Position {
+ line: 41,
+ character: 28,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: Some(
+ String(
+ "trivially_copy_pass_by_ref",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "rust-lang.github.io",
+ ),
+ ),
+ port: None,
+ path: "/rust-clippy/master/index.html",
+ query: None,
+ fragment: Some(
+ "trivially_copy_pass_by_ref",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "clippy",
+ ),
+ message: "consider passing by value instead: `self`",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/mir/tagset.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 41,
+ character: 23,
+ },
+ end: Position {
+ line: 41,
+ character: 28,
+ },
+ },
+ },
+ message: "original diagnostic",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/handles_macro_location.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/handles_macro_location.txt
new file mode 100644
index 000000000..989e5cf66
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/handles_macro_location.txt
@@ -0,0 +1,64 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 1,
+ character: 4,
+ },
+ end: Position {
+ line: 1,
+ character: 26,
+ },
+ },
+ severity: Some(
+ Error,
+ ),
+ code: Some(
+ String(
+ "E0277",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "doc.rust-lang.org",
+ ),
+ ),
+ port: None,
+ path: "/error-index.html",
+ query: None,
+ fragment: Some(
+ "E0277",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "rustc",
+ ),
+ message: "can't compare `{integer}` with `&str`\nthe trait `std::cmp::PartialEq<&str>` is not implemented for `{integer}`",
+ related_information: None,
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/macro_compiler_error.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/macro_compiler_error.txt
new file mode 100644
index 000000000..fe5cf9b3b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/macro_compiler_error.txt
@@ -0,0 +1,229 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/hir_def/src/path.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 271,
+ character: 8,
+ },
+ end: Position {
+ line: 271,
+ character: 50,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: None,
+ code_description: None,
+ source: Some(
+ "rustc",
+ ),
+ message: "Please register your known path in the path module",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/hir_def/src/path.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 264,
+ character: 8,
+ },
+ end: Position {
+ line: 264,
+ character: 76,
+ },
+ },
+ },
+ message: "Exact error occurred here",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/hir_def/src/data.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 79,
+ character: 15,
+ },
+ end: Position {
+ line: 79,
+ character: 41,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: None,
+ code_description: None,
+ source: Some(
+ "rustc",
+ ),
+ message: "Please register your known path in the path module",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/hir_def/src/path.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 264,
+ character: 8,
+ },
+ end: Position {
+ line: 264,
+ character: 76,
+ },
+ },
+ },
+ message: "Exact error occurred here",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/hir_def/src/path.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 264,
+ character: 8,
+ },
+ end: Position {
+ line: 264,
+ character: 76,
+ },
+ },
+ severity: Some(
+ Error,
+ ),
+ code: None,
+ code_description: None,
+ source: Some(
+ "rustc",
+ ),
+ message: "Please register your known path in the path module",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/hir_def/src/path.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 271,
+ character: 8,
+ },
+ end: Position {
+ line: 271,
+ character: 50,
+ },
+ },
+ },
+ message: "Error originated from macro call here",
+ },
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/hir_def/src/data.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 79,
+ character: 15,
+ },
+ end: Position {
+ line: 79,
+ character: 41,
+ },
+ },
+ },
+ message: "Error originated from macro call here",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/reasonable_line_numbers_from_empty_file.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/reasonable_line_numbers_from_empty_file.txt
new file mode 100644
index 000000000..df00b330b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/reasonable_line_numbers_from_empty_file.txt
@@ -0,0 +1,64 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/src/bin/current.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ severity: Some(
+ Error,
+ ),
+ code: Some(
+ String(
+ "E0601",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "doc.rust-lang.org",
+ ),
+ ),
+ port: None,
+ path: "/error-index.html",
+ query: None,
+ fragment: Some(
+ "E0601",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "rustc",
+ ),
+ message: "`main` function not found in crate `current`\nconsider adding a `main` function to `src/bin/current.rs`",
+ related_information: None,
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_incompatible_type_for_trait.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_incompatible_type_for_trait.txt
new file mode 100644
index 000000000..dc36aa761
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_incompatible_type_for_trait.txt
@@ -0,0 +1,64 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/ty/list_iter.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 51,
+ character: 4,
+ },
+ end: Position {
+ line: 51,
+ character: 47,
+ },
+ },
+ severity: Some(
+ Error,
+ ),
+ code: Some(
+ String(
+ "E0053",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "doc.rust-lang.org",
+ ),
+ ),
+ port: None,
+ path: "/error-index.html",
+ query: None,
+ fragment: Some(
+ "E0053",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "rustc",
+ ),
+ message: "method `next` has an incompatible type for trait\nexpected type `fn(&mut ty::list_iter::ListIterator<'list, M>) -> std::option::Option<&ty::Ref<M>>`\n found type `fn(&ty::list_iter::ListIterator<'list, M>) -> std::option::Option<&'list ty::Ref<M>>`",
+ related_information: None,
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_mismatched_type.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_mismatched_type.txt
new file mode 100644
index 000000000..d557196c2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_mismatched_type.txt
@@ -0,0 +1,64 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/runtime/compiler_support.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 47,
+ character: 64,
+ },
+ end: Position {
+ line: 47,
+ character: 69,
+ },
+ },
+ severity: Some(
+ Error,
+ ),
+ code: Some(
+ String(
+ "E0308",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "doc.rust-lang.org",
+ ),
+ ),
+ port: None,
+ path: "/error-index.html",
+ query: None,
+ fragment: Some(
+ "E0308",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "rustc",
+ ),
+ message: "mismatched types\nexpected usize, found u32",
+ related_information: None,
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_range_map_lsp_position.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_range_map_lsp_position.txt
new file mode 100644
index 000000000..a100fa07f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_range_map_lsp_position.txt
@@ -0,0 +1,184 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/test_diagnostics/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 3,
+ character: 17,
+ },
+ end: Position {
+ line: 3,
+ character: 27,
+ },
+ },
+ severity: Some(
+ Error,
+ ),
+ code: Some(
+ String(
+ "E0308",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "doc.rust-lang.org",
+ ),
+ ),
+ port: None,
+ path: "/error-index.html",
+ query: None,
+ fragment: Some(
+ "E0308",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "rustc",
+ ),
+ message: "mismatched types\nexpected `u32`, found `&str`",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/test_diagnostics/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 3,
+ character: 11,
+ },
+ end: Position {
+ line: 3,
+ character: 14,
+ },
+ },
+ },
+ message: "expected due to this",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/test_diagnostics/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 3,
+ character: 11,
+ },
+ end: Position {
+ line: 3,
+ character: 14,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: Some(
+ String(
+ "E0308",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "doc.rust-lang.org",
+ ),
+ ),
+ port: None,
+ path: "/error-index.html",
+ query: None,
+ fragment: Some(
+ "E0308",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "rustc",
+ ),
+ message: "expected due to this",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/crates/test_diagnostics/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 3,
+ character: 17,
+ },
+ end: Position {
+ line: 3,
+ character: 27,
+ },
+ },
+ },
+ message: "original diagnostic",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable.txt
new file mode 100644
index 000000000..1c5c33622
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable.txt
@@ -0,0 +1,212 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ severity: Some(
+ Warning,
+ ),
+ code: Some(
+ String(
+ "unused_variables",
+ ),
+ ),
+ code_description: None,
+ source: Some(
+ "rustc",
+ ),
+ message: "unused variable: `foo`\n#[warn(unused_variables)] on by default",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ },
+ message: "consider prefixing with an underscore: `_foo`",
+ },
+ ],
+ ),
+ tags: Some(
+ [
+ Unnecessary,
+ ],
+ ),
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: Some(
+ String(
+ "unused_variables",
+ ),
+ ),
+ code_description: None,
+ source: Some(
+ "rustc",
+ ),
+ message: "consider prefixing with an underscore: `_foo`",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ },
+ message: "original diagnostic",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: Some(
+ Fix {
+ ranges: [
+ Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ ],
+ action: CodeAction {
+ title: "consider prefixing with an underscore: `_foo`",
+ group: None,
+ kind: Some(
+ CodeActionKind(
+ "quickfix",
+ ),
+ ),
+ command: None,
+ edit: Some(
+ SnippetWorkspaceEdit {
+ changes: Some(
+ {
+ Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ }: [
+ TextEdit {
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ new_text: "_foo",
+ },
+ ],
+ },
+ ),
+ document_changes: None,
+ change_annotations: None,
+ },
+ ),
+ is_preferred: Some(
+ true,
+ ),
+ data: None,
+ },
+ },
+ ),
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable_as_hint.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable_as_hint.txt
new file mode 100644
index 000000000..3ab3412d9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable_as_hint.txt
@@ -0,0 +1,212 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: Some(
+ String(
+ "unused_variables",
+ ),
+ ),
+ code_description: None,
+ source: Some(
+ "rustc",
+ ),
+ message: "unused variable: `foo`\n#[warn(unused_variables)] on by default",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ },
+ message: "consider prefixing with an underscore: `_foo`",
+ },
+ ],
+ ),
+ tags: Some(
+ [
+ Unnecessary,
+ ],
+ ),
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: Some(
+ String(
+ "unused_variables",
+ ),
+ ),
+ code_description: None,
+ source: Some(
+ "rustc",
+ ),
+ message: "consider prefixing with an underscore: `_foo`",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ },
+ message: "original diagnostic",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: Some(
+ Fix {
+ ranges: [
+ Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ ],
+ action: CodeAction {
+ title: "consider prefixing with an underscore: `_foo`",
+ group: None,
+ kind: Some(
+ CodeActionKind(
+ "quickfix",
+ ),
+ ),
+ command: None,
+ edit: Some(
+ SnippetWorkspaceEdit {
+ changes: Some(
+ {
+ Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ }: [
+ TextEdit {
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ new_text: "_foo",
+ },
+ ],
+ },
+ ),
+ document_changes: None,
+ change_annotations: None,
+ },
+ ),
+ is_preferred: Some(
+ true,
+ ),
+ data: None,
+ },
+ },
+ ),
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable_as_info.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable_as_info.txt
new file mode 100644
index 000000000..0702420aa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_unused_variable_as_info.txt
@@ -0,0 +1,212 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ severity: Some(
+ Information,
+ ),
+ code: Some(
+ String(
+ "unused_variables",
+ ),
+ ),
+ code_description: None,
+ source: Some(
+ "rustc",
+ ),
+ message: "unused variable: `foo`\n#[warn(unused_variables)] on by default",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ },
+ message: "consider prefixing with an underscore: `_foo`",
+ },
+ ],
+ ),
+ tags: Some(
+ [
+ Unnecessary,
+ ],
+ ),
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: Some(
+ String(
+ "unused_variables",
+ ),
+ ),
+ code_description: None,
+ source: Some(
+ "rustc",
+ ),
+ message: "consider prefixing with an underscore: `_foo`",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ },
+ message: "original diagnostic",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: Some(
+ Fix {
+ ranges: [
+ Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ ],
+ action: CodeAction {
+ title: "consider prefixing with an underscore: `_foo`",
+ group: None,
+ kind: Some(
+ CodeActionKind(
+ "quickfix",
+ ),
+ ),
+ command: None,
+ edit: Some(
+ SnippetWorkspaceEdit {
+ changes: Some(
+ {
+ Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/driver/subcommand/repl.rs",
+ query: None,
+ fragment: None,
+ }: [
+ TextEdit {
+ range: Range {
+ start: Position {
+ line: 290,
+ character: 8,
+ },
+ end: Position {
+ line: 290,
+ character: 11,
+ },
+ },
+ new_text: "_foo",
+ },
+ ],
+ },
+ ),
+ document_changes: None,
+ change_annotations: None,
+ },
+ ),
+ is_preferred: Some(
+ true,
+ ),
+ data: None,
+ },
+ },
+ ),
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_wrong_number_of_parameters.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_wrong_number_of_parameters.txt
new file mode 100644
index 000000000..8ec92888c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/rustc_wrong_number_of_parameters.txt
@@ -0,0 +1,184 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/ty/select.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 103,
+ character: 17,
+ },
+ end: Position {
+ line: 103,
+ character: 29,
+ },
+ },
+ severity: Some(
+ Error,
+ ),
+ code: Some(
+ String(
+ "E0061",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "doc.rust-lang.org",
+ ),
+ ),
+ port: None,
+ path: "/error-index.html",
+ query: None,
+ fragment: Some(
+ "E0061",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "rustc",
+ ),
+ message: "this function takes 2 parameters but 3 parameters were supplied\nexpected 2 parameters",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/ty/select.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 218,
+ character: 4,
+ },
+ end: Position {
+ line: 230,
+ character: 5,
+ },
+ },
+ },
+ message: "defined here",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/ty/select.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 218,
+ character: 4,
+ },
+ end: Position {
+ line: 230,
+ character: 5,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: Some(
+ String(
+ "E0061",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "doc.rust-lang.org",
+ ),
+ ),
+ port: None,
+ path: "/error-index.html",
+ query: None,
+ fragment: Some(
+ "E0061",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "rustc",
+ ),
+ message: "defined here",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/compiler/ty/select.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 103,
+ character: 17,
+ },
+ end: Position {
+ line: 103,
+ character: 29,
+ },
+ },
+ },
+ message: "original diagnostic",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/snap_multi_line_fix.txt b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/snap_multi_line_fix.txt
new file mode 100644
index 000000000..4365e450d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/test_data/snap_multi_line_fix.txt
@@ -0,0 +1,388 @@
+[
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 3,
+ character: 4,
+ },
+ end: Position {
+ line: 3,
+ character: 5,
+ },
+ },
+ severity: Some(
+ Warning,
+ ),
+ code: Some(
+ String(
+ "let_and_return",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "rust-lang.github.io",
+ ),
+ ),
+ port: None,
+ path: "/rust-clippy/master/index.html",
+ query: None,
+ fragment: Some(
+ "let_and_return",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "clippy",
+ ),
+ message: "returning the result of a let binding from a block\n`#[warn(clippy::let_and_return)]` on by default\nfor further information visit https://rust-lang.github.io/rust-clippy/master/index.html#let_and_return",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 2,
+ character: 4,
+ },
+ end: Position {
+ line: 2,
+ character: 30,
+ },
+ },
+ },
+ message: "unnecessary let binding",
+ },
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 2,
+ character: 4,
+ },
+ end: Position {
+ line: 2,
+ character: 30,
+ },
+ },
+ },
+ message: "return the expression directly: `(0..10).collect()`",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 2,
+ character: 4,
+ },
+ end: Position {
+ line: 2,
+ character: 30,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: Some(
+ String(
+ "let_and_return",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "rust-lang.github.io",
+ ),
+ ),
+ port: None,
+ path: "/rust-clippy/master/index.html",
+ query: None,
+ fragment: Some(
+ "let_and_return",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "clippy",
+ ),
+ message: "unnecessary let binding",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 3,
+ character: 4,
+ },
+ end: Position {
+ line: 3,
+ character: 5,
+ },
+ },
+ },
+ message: "original diagnostic",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: None,
+ },
+ MappedRustDiagnostic {
+ url: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ diagnostic: Diagnostic {
+ range: Range {
+ start: Position {
+ line: 2,
+ character: 4,
+ },
+ end: Position {
+ line: 2,
+ character: 30,
+ },
+ },
+ severity: Some(
+ Hint,
+ ),
+ code: Some(
+ String(
+ "let_and_return",
+ ),
+ ),
+ code_description: Some(
+ CodeDescription {
+ href: Url {
+ scheme: "https",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: Some(
+ Domain(
+ "rust-lang.github.io",
+ ),
+ ),
+ port: None,
+ path: "/rust-clippy/master/index.html",
+ query: None,
+ fragment: Some(
+ "let_and_return",
+ ),
+ },
+ },
+ ),
+ source: Some(
+ "clippy",
+ ),
+ message: "return the expression directly: `(0..10).collect()`",
+ related_information: Some(
+ [
+ DiagnosticRelatedInformation {
+ location: Location {
+ uri: Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/src/main.rs",
+ query: None,
+ fragment: None,
+ },
+ range: Range {
+ start: Position {
+ line: 3,
+ character: 4,
+ },
+ end: Position {
+ line: 3,
+ character: 5,
+ },
+ },
+ },
+ message: "original diagnostic",
+ },
+ ],
+ ),
+ tags: None,
+ data: None,
+ },
+ fix: Some(
+ Fix {
+ ranges: [
+ Range {
+ start: Position {
+ line: 2,
+ character: 4,
+ },
+ end: Position {
+ line: 2,
+ character: 30,
+ },
+ },
+ Range {
+ start: Position {
+ line: 3,
+ character: 4,
+ },
+ end: Position {
+ line: 3,
+ character: 5,
+ },
+ },
+ ],
+ action: CodeAction {
+ title: "return the expression directly: `(0..10).collect()`",
+ group: None,
+ kind: Some(
+ CodeActionKind(
+ "quickfix",
+ ),
+ ),
+ command: None,
+ edit: Some(
+ SnippetWorkspaceEdit {
+ changes: Some(
+ {
+ Url {
+ scheme: "file",
+ cannot_be_a_base: false,
+ username: "",
+ password: None,
+ host: None,
+ port: None,
+ path: "/test/src/main.rs",
+ query: None,
+ fragment: None,
+ }: [
+ TextEdit {
+ range: Range {
+ start: Position {
+ line: 2,
+ character: 4,
+ },
+ end: Position {
+ line: 2,
+ character: 30,
+ },
+ },
+ new_text: "",
+ },
+ TextEdit {
+ range: Range {
+ start: Position {
+ line: 3,
+ character: 4,
+ },
+ end: Position {
+ line: 3,
+ character: 5,
+ },
+ },
+ new_text: "(0..10).collect()",
+ },
+ ],
+ },
+ ),
+ document_changes: None,
+ change_annotations: None,
+ },
+ ),
+ is_preferred: Some(
+ true,
+ ),
+ data: None,
+ },
+ },
+ ),
+ },
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs
new file mode 100644
index 000000000..cff4bd7f6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs
@@ -0,0 +1,1843 @@
+//! This module provides the functionality needed to convert diagnostics from
+//! `cargo check` json format to the LSP diagnostic format.
+use std::collections::HashMap;
+
+use flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan};
+use itertools::Itertools;
+use stdx::format_to;
+use vfs::{AbsPath, AbsPathBuf};
+
+use crate::{
+ global_state::GlobalStateSnapshot, line_index::OffsetEncoding, lsp_ext,
+ to_proto::url_from_abs_path,
+};
+
+use super::{DiagnosticsMapConfig, Fix};
+
+/// Determines the LSP severity from a diagnostic
+fn diagnostic_severity(
+ config: &DiagnosticsMapConfig,
+ level: flycheck::DiagnosticLevel,
+ code: Option<flycheck::DiagnosticCode>,
+) -> Option<lsp_types::DiagnosticSeverity> {
+ let res = match level {
+ DiagnosticLevel::Ice => lsp_types::DiagnosticSeverity::ERROR,
+ DiagnosticLevel::Error => lsp_types::DiagnosticSeverity::ERROR,
+ DiagnosticLevel::Warning => match &code {
+ // HACK: special case for `warnings` rustc lint.
+ Some(code)
+ if config.warnings_as_hint.iter().any(|lint| {
+ lint == "warnings" || ide_db::helpers::lint_eq_or_in_group(&code.code, lint)
+ }) =>
+ {
+ lsp_types::DiagnosticSeverity::HINT
+ }
+ // HACK: special case for `warnings` rustc lint.
+ Some(code)
+ if config.warnings_as_info.iter().any(|lint| {
+ lint == "warnings" || ide_db::helpers::lint_eq_or_in_group(&code.code, lint)
+ }) =>
+ {
+ lsp_types::DiagnosticSeverity::INFORMATION
+ }
+ _ => lsp_types::DiagnosticSeverity::WARNING,
+ },
+ DiagnosticLevel::Note => lsp_types::DiagnosticSeverity::INFORMATION,
+ DiagnosticLevel::Help => lsp_types::DiagnosticSeverity::HINT,
+ _ => return None,
+ };
+ Some(res)
+}
+
+/// Checks whether a file name is from macro invocation and does not refer to an actual file.
+fn is_dummy_macro_file(file_name: &str) -> bool {
+ // FIXME: current rustc does not seem to emit `<macro file>` files anymore?
+ file_name.starts_with('<') && file_name.ends_with('>')
+}
+
+/// Converts a Rust span to a LSP location
+fn location(
+ config: &DiagnosticsMapConfig,
+ workspace_root: &AbsPath,
+ span: &DiagnosticSpan,
+ snap: &GlobalStateSnapshot,
+) -> lsp_types::Location {
+ let file_name = resolve_path(config, workspace_root, &span.file_name);
+ let uri = url_from_abs_path(&file_name);
+
+ let range = {
+ let offset_encoding = snap.config.offset_encoding();
+ lsp_types::Range::new(
+ position(&offset_encoding, span, span.line_start, span.column_start),
+ position(&offset_encoding, span, span.line_end, span.column_end),
+ )
+ };
+ lsp_types::Location::new(uri, range)
+}
+
+fn position(
+ offset_encoding: &OffsetEncoding,
+ span: &DiagnosticSpan,
+ line_offset: usize,
+ column_offset: usize,
+) -> lsp_types::Position {
+ let line_index = line_offset - span.line_start;
+
+ let mut true_column_offset = column_offset;
+ if let Some(line) = span.text.get(line_index) {
+ if line.text.chars().count() == line.text.len() {
+ // all one byte utf-8 char
+ return lsp_types::Position {
+ line: (line_offset as u32).saturating_sub(1),
+ character: (column_offset as u32).saturating_sub(1),
+ };
+ }
+ let mut char_offset = 0;
+ let len_func = match offset_encoding {
+ OffsetEncoding::Utf8 => char::len_utf8,
+ OffsetEncoding::Utf16 => char::len_utf16,
+ };
+ for c in line.text.chars() {
+ char_offset += 1;
+ if char_offset > column_offset {
+ break;
+ }
+ true_column_offset += len_func(c) - 1;
+ }
+ }
+
+ lsp_types::Position {
+ line: (line_offset as u32).saturating_sub(1),
+ character: (true_column_offset as u32).saturating_sub(1),
+ }
+}
+
+/// Extracts a suitable "primary" location from a rustc diagnostic.
+///
+/// This takes locations pointing into the standard library, or generally outside the current
+/// workspace into account and tries to avoid those, in case macros are involved.
+fn primary_location(
+ config: &DiagnosticsMapConfig,
+ workspace_root: &AbsPath,
+ span: &DiagnosticSpan,
+ snap: &GlobalStateSnapshot,
+) -> lsp_types::Location {
+ let span_stack = std::iter::successors(Some(span), |span| Some(&span.expansion.as_ref()?.span));
+ for span in span_stack.clone() {
+ let abs_path = resolve_path(config, workspace_root, &span.file_name);
+ if !is_dummy_macro_file(&span.file_name) && abs_path.starts_with(workspace_root) {
+ return location(config, workspace_root, span, snap);
+ }
+ }
+
+ // Fall back to the outermost macro invocation if no suitable span comes up.
+ let last_span = span_stack.last().unwrap();
+ location(config, workspace_root, last_span, snap)
+}
+
+/// Converts a secondary Rust span to a LSP related information
+///
+/// If the span is unlabelled this will return `None`.
+fn diagnostic_related_information(
+ config: &DiagnosticsMapConfig,
+ workspace_root: &AbsPath,
+ span: &DiagnosticSpan,
+ snap: &GlobalStateSnapshot,
+) -> Option<lsp_types::DiagnosticRelatedInformation> {
+ let message = span.label.clone()?;
+ let location = location(config, workspace_root, span, snap);
+ Some(lsp_types::DiagnosticRelatedInformation { location, message })
+}
+
+/// Resolves paths applying any matching path prefix remappings, and then
+/// joining the path to the workspace root.
+fn resolve_path(
+ config: &DiagnosticsMapConfig,
+ workspace_root: &AbsPath,
+ file_name: &str,
+) -> AbsPathBuf {
+ match config
+ .remap_prefix
+ .iter()
+ .find_map(|(from, to)| file_name.strip_prefix(from).map(|file_name| (to, file_name)))
+ {
+ Some((to, file_name)) => workspace_root.join(format!("{}{}", to, file_name)),
+ None => workspace_root.join(file_name),
+ }
+}
+
+struct SubDiagnostic {
+ related: lsp_types::DiagnosticRelatedInformation,
+ suggested_fix: Option<Fix>,
+}
+
+enum MappedRustChildDiagnostic {
+ SubDiagnostic(SubDiagnostic),
+ MessageLine(String),
+}
+
+fn map_rust_child_diagnostic(
+ config: &DiagnosticsMapConfig,
+ workspace_root: &AbsPath,
+ rd: &flycheck::Diagnostic,
+ snap: &GlobalStateSnapshot,
+) -> MappedRustChildDiagnostic {
+ let spans: Vec<&DiagnosticSpan> = rd.spans.iter().filter(|s| s.is_primary).collect();
+ if spans.is_empty() {
+ // `rustc` uses these spanless children as a way to print multi-line
+ // messages
+ return MappedRustChildDiagnostic::MessageLine(rd.message.clone());
+ }
+
+ let mut edit_map: HashMap<lsp_types::Url, Vec<lsp_types::TextEdit>> = HashMap::new();
+ let mut suggested_replacements = Vec::new();
+ for &span in &spans {
+ if let Some(suggested_replacement) = &span.suggested_replacement {
+ if !suggested_replacement.is_empty() {
+ suggested_replacements.push(suggested_replacement);
+ }
+ let location = location(config, workspace_root, span, snap);
+ let edit = lsp_types::TextEdit::new(location.range, suggested_replacement.clone());
+
+ // Only actually emit a quickfix if the suggestion is "valid enough".
+ // We accept both "MaybeIncorrect" and "MachineApplicable". "MaybeIncorrect" means that
+ // the suggestion is *complete* (contains no placeholders where code needs to be
+ // inserted), but might not be what the user wants, or might need minor adjustments.
+ if matches!(
+ span.suggestion_applicability,
+ None | Some(Applicability::MaybeIncorrect | Applicability::MachineApplicable)
+ ) {
+ edit_map.entry(location.uri).or_default().push(edit);
+ }
+ }
+ }
+
+ // rustc renders suggestion diagnostics by appending the suggested replacement, so do the same
+ // here, otherwise the diagnostic text is missing useful information.
+ let mut message = rd.message.clone();
+ if !suggested_replacements.is_empty() {
+ message.push_str(": ");
+ let suggestions =
+ suggested_replacements.iter().map(|suggestion| format!("`{}`", suggestion)).join(", ");
+ message.push_str(&suggestions);
+ }
+
+ if edit_map.is_empty() {
+ MappedRustChildDiagnostic::SubDiagnostic(SubDiagnostic {
+ related: lsp_types::DiagnosticRelatedInformation {
+ location: location(config, workspace_root, spans[0], snap),
+ message,
+ },
+ suggested_fix: None,
+ })
+ } else {
+ MappedRustChildDiagnostic::SubDiagnostic(SubDiagnostic {
+ related: lsp_types::DiagnosticRelatedInformation {
+ location: location(config, workspace_root, spans[0], snap),
+ message: message.clone(),
+ },
+ suggested_fix: Some(Fix {
+ ranges: spans
+ .iter()
+ .map(|&span| location(config, workspace_root, span, snap).range)
+ .collect(),
+ action: lsp_ext::CodeAction {
+ title: message,
+ group: None,
+ kind: Some(lsp_types::CodeActionKind::QUICKFIX),
+ edit: Some(lsp_ext::SnippetWorkspaceEdit {
+ // FIXME: there's no good reason to use edit_map here....
+ changes: Some(edit_map),
+ document_changes: None,
+ change_annotations: None,
+ }),
+ is_preferred: Some(true),
+ data: None,
+ command: None,
+ },
+ }),
+ })
+ }
+}
+
+#[derive(Debug)]
+pub(crate) struct MappedRustDiagnostic {
+ pub(crate) url: lsp_types::Url,
+ pub(crate) diagnostic: lsp_types::Diagnostic,
+ pub(crate) fix: Option<Fix>,
+}
+
+/// Converts a Rust root diagnostic to LSP form
+///
+/// This flattens the Rust diagnostic by:
+///
+/// 1. Creating a LSP diagnostic with the root message and primary span.
+/// 2. Adding any labelled secondary spans to `relatedInformation`
+/// 3. Categorising child diagnostics as either `SuggestedFix`es,
+/// `relatedInformation` or additional message lines.
+///
+/// If the diagnostic has no primary span this will return `None`
+pub(crate) fn map_rust_diagnostic_to_lsp(
+ config: &DiagnosticsMapConfig,
+ rd: &flycheck::Diagnostic,
+ workspace_root: &AbsPath,
+ snap: &GlobalStateSnapshot,
+) -> Vec<MappedRustDiagnostic> {
+ let primary_spans: Vec<&DiagnosticSpan> = rd.spans.iter().filter(|s| s.is_primary).collect();
+ if primary_spans.is_empty() {
+ return Vec::new();
+ }
+
+ let severity = diagnostic_severity(config, rd.level, rd.code.clone());
+
+ let mut source = String::from("rustc");
+ let mut code = rd.code.as_ref().map(|c| c.code.clone());
+ if let Some(code_val) = &code {
+ // See if this is an RFC #2103 scoped lint (e.g. from Clippy)
+ let scoped_code: Vec<&str> = code_val.split("::").collect();
+ if scoped_code.len() == 2 {
+ source = String::from(scoped_code[0]);
+ code = Some(String::from(scoped_code[1]));
+ }
+ }
+
+ let mut needs_primary_span_label = true;
+ let mut subdiagnostics = Vec::new();
+ let mut tags = Vec::new();
+
+ for secondary_span in rd.spans.iter().filter(|s| !s.is_primary) {
+ let related = diagnostic_related_information(config, workspace_root, secondary_span, snap);
+ if let Some(related) = related {
+ subdiagnostics.push(SubDiagnostic { related, suggested_fix: None });
+ }
+ }
+
+ let mut message = rd.message.clone();
+ for child in &rd.children {
+ let child = map_rust_child_diagnostic(config, workspace_root, child, snap);
+ match child {
+ MappedRustChildDiagnostic::SubDiagnostic(sub) => {
+ subdiagnostics.push(sub);
+ }
+ MappedRustChildDiagnostic::MessageLine(message_line) => {
+ format_to!(message, "\n{}", message_line);
+
+ // These secondary messages usually duplicate the content of the
+ // primary span label.
+ needs_primary_span_label = false;
+ }
+ }
+ }
+
+ if let Some(code) = &rd.code {
+ let code = code.code.as_str();
+ if matches!(
+ code,
+ "dead_code"
+ | "unknown_lints"
+ | "unreachable_code"
+ | "unused_attributes"
+ | "unused_imports"
+ | "unused_macros"
+ | "unused_variables"
+ ) {
+ tags.push(lsp_types::DiagnosticTag::UNNECESSARY);
+ }
+
+ if matches!(code, "deprecated") {
+ tags.push(lsp_types::DiagnosticTag::DEPRECATED);
+ }
+ }
+
+ let code_description = match source.as_str() {
+ "rustc" => rustc_code_description(code.as_deref()),
+ "clippy" => clippy_code_description(code.as_deref()),
+ _ => None,
+ };
+
+ primary_spans
+ .iter()
+ .flat_map(|primary_span| {
+ let primary_location = primary_location(config, workspace_root, primary_span, snap);
+
+ let mut message = message.clone();
+ if needs_primary_span_label {
+ if let Some(primary_span_label) = &primary_span.label {
+ format_to!(message, "\n{}", primary_span_label);
+ }
+ }
+
+ // Each primary diagnostic span may result in multiple LSP diagnostics.
+ let mut diagnostics = Vec::new();
+
+ let mut related_info_macro_calls = vec![];
+
+ // If error occurs from macro expansion, add related info pointing to
+ // where the error originated
+ // Also, we would generate an additional diagnostic, so that exact place of macro
+ // will be highlighted in the error origin place.
+ let span_stack = std::iter::successors(Some(*primary_span), |span| {
+ Some(&span.expansion.as_ref()?.span)
+ });
+ for (i, span) in span_stack.enumerate() {
+ if is_dummy_macro_file(&span.file_name) {
+ continue;
+ }
+
+ // First span is the original diagnostic, others are macro call locations that
+ // generated that code.
+ let is_in_macro_call = i != 0;
+
+ let secondary_location = location(config, workspace_root, span, snap);
+ if secondary_location == primary_location {
+ continue;
+ }
+ related_info_macro_calls.push(lsp_types::DiagnosticRelatedInformation {
+ location: secondary_location.clone(),
+ message: if is_in_macro_call {
+ "Error originated from macro call here".to_string()
+ } else {
+ "Actual error occurred here".to_string()
+ },
+ });
+ // For the additional in-macro diagnostic we add the inverse message pointing to the error location in code.
+ let information_for_additional_diagnostic =
+ vec![lsp_types::DiagnosticRelatedInformation {
+ location: primary_location.clone(),
+ message: "Exact error occurred here".to_string(),
+ }];
+
+ let diagnostic = lsp_types::Diagnostic {
+ range: secondary_location.range,
+ // downgrade to hint if we're pointing at the macro
+ severity: Some(lsp_types::DiagnosticSeverity::HINT),
+ code: code.clone().map(lsp_types::NumberOrString::String),
+ code_description: code_description.clone(),
+ source: Some(source.clone()),
+ message: message.clone(),
+ related_information: Some(information_for_additional_diagnostic),
+ tags: if tags.is_empty() { None } else { Some(tags.clone()) },
+ data: None,
+ };
+ diagnostics.push(MappedRustDiagnostic {
+ url: secondary_location.uri,
+ diagnostic,
+ fix: None,
+ });
+ }
+
+ // Emit the primary diagnostic.
+ diagnostics.push(MappedRustDiagnostic {
+ url: primary_location.uri.clone(),
+ diagnostic: lsp_types::Diagnostic {
+ range: primary_location.range,
+ severity,
+ code: code.clone().map(lsp_types::NumberOrString::String),
+ code_description: code_description.clone(),
+ source: Some(source.clone()),
+ message,
+ related_information: {
+ let info = related_info_macro_calls
+ .iter()
+ .cloned()
+ .chain(subdiagnostics.iter().map(|sub| sub.related.clone()))
+ .collect::<Vec<_>>();
+ if info.is_empty() {
+ None
+ } else {
+ Some(info)
+ }
+ },
+ tags: if tags.is_empty() { None } else { Some(tags.clone()) },
+ data: None,
+ },
+ fix: None,
+ });
+
+ // Emit hint-level diagnostics for all `related_information` entries such as "help"s.
+ // This is useful because they will show up in the user's editor, unlike
+ // `related_information`, which just produces hard-to-read links, at least in VS Code.
+ let back_ref = lsp_types::DiagnosticRelatedInformation {
+ location: primary_location,
+ message: "original diagnostic".to_string(),
+ };
+ for sub in &subdiagnostics {
+ diagnostics.push(MappedRustDiagnostic {
+ url: sub.related.location.uri.clone(),
+ fix: sub.suggested_fix.clone(),
+ diagnostic: lsp_types::Diagnostic {
+ range: sub.related.location.range,
+ severity: Some(lsp_types::DiagnosticSeverity::HINT),
+ code: code.clone().map(lsp_types::NumberOrString::String),
+ code_description: code_description.clone(),
+ source: Some(source.clone()),
+ message: sub.related.message.clone(),
+ related_information: Some(vec![back_ref.clone()]),
+ tags: None, // don't apply modifiers again
+ data: None,
+ },
+ });
+ }
+
+ diagnostics
+ })
+ .collect()
+}
+
+fn rustc_code_description(code: Option<&str>) -> Option<lsp_types::CodeDescription> {
+ code.filter(|code| {
+ let mut chars = code.chars();
+ chars.next().map_or(false, |c| c == 'E')
+ && chars.by_ref().take(4).all(|c| c.is_ascii_digit())
+ && chars.next().is_none()
+ })
+ .and_then(|code| {
+ lsp_types::Url::parse(&format!("https://doc.rust-lang.org/error-index.html#{}", code))
+ .ok()
+ .map(|href| lsp_types::CodeDescription { href })
+ })
+}
+
+fn clippy_code_description(code: Option<&str>) -> Option<lsp_types::CodeDescription> {
+ code.and_then(|code| {
+ lsp_types::Url::parse(&format!(
+ "https://rust-lang.github.io/rust-clippy/master/index.html#{}",
+ code
+ ))
+ .ok()
+ .map(|href| lsp_types::CodeDescription { href })
+ })
+}
+
+#[cfg(test)]
+#[cfg(not(windows))]
+mod tests {
+ use std::{convert::TryInto, path::Path};
+
+ use crate::{config::Config, global_state::GlobalState};
+
+ use super::*;
+
+ use expect_test::{expect_file, ExpectFile};
+ use lsp_types::ClientCapabilities;
+
+ fn check(diagnostics_json: &str, expect: ExpectFile) {
+ check_with_config(DiagnosticsMapConfig::default(), diagnostics_json, expect)
+ }
+
+ fn check_with_config(config: DiagnosticsMapConfig, diagnostics_json: &str, expect: ExpectFile) {
+ let diagnostic: flycheck::Diagnostic = serde_json::from_str(diagnostics_json).unwrap();
+ let workspace_root: &AbsPath = Path::new("/test/").try_into().unwrap();
+ let (sender, _) = crossbeam_channel::unbounded();
+ let state = GlobalState::new(
+ sender,
+ Config::new(workspace_root.to_path_buf(), ClientCapabilities::default()),
+ );
+ let snap = state.snapshot();
+ let actual = map_rust_diagnostic_to_lsp(&config, &diagnostic, workspace_root, &snap);
+ expect.assert_debug_eq(&actual)
+ }
+
+ #[test]
+ fn rustc_incompatible_type_for_trait() {
+ check(
+ r##"{
+ "message": "method `next` has an incompatible type for trait",
+ "code": {
+ "code": "E0053",
+ "explanation": "\nThe parameters of any trait method must match between a trait implementation\nand the trait definition.\n\nHere are a couple examples of this error:\n\n```compile_fail,E0053\ntrait Foo {\n fn foo(x: u16);\n fn bar(&self);\n}\n\nstruct Bar;\n\nimpl Foo for Bar {\n // error, expected u16, found i16\n fn foo(x: i16) { }\n\n // error, types differ in mutability\n fn bar(&mut self) { }\n}\n```\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "compiler/ty/list_iter.rs",
+ "byte_start": 1307,
+ "byte_end": 1350,
+ "line_start": 52,
+ "line_end": 52,
+ "column_start": 5,
+ "column_end": 48,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " fn next(&self) -> Option<&'list ty::Ref<M>> {",
+ "highlight_start": 5,
+ "highlight_end": 48
+ }
+ ],
+ "label": "types differ in mutability",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "expected type `fn(&mut ty::list_iter::ListIterator<'list, M>) -> std::option::Option<&ty::Ref<M>>`\n found type `fn(&ty::list_iter::ListIterator<'list, M>) -> std::option::Option<&'list ty::Ref<M>>`",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "error[E0053]: method `next` has an incompatible type for trait\n --> compiler/ty/list_iter.rs:52:5\n |\n52 | fn next(&self) -> Option<&'list ty::Ref<M>> {\n | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ types differ in mutability\n |\n = note: expected type `fn(&mut ty::list_iter::ListIterator<'list, M>) -> std::option::Option<&ty::Ref<M>>`\n found type `fn(&ty::list_iter::ListIterator<'list, M>) -> std::option::Option<&'list ty::Ref<M>>`\n\n"
+ }
+ "##,
+ expect_file!["./test_data/rustc_incompatible_type_for_trait.txt"],
+ );
+ }
+
+ #[test]
+ fn rustc_unused_variable() {
+ check(
+ r##"{
+ "message": "unused variable: `foo`",
+ "code": {
+ "code": "unused_variables",
+ "explanation": null
+ },
+ "level": "warning",
+ "spans": [
+ {
+ "file_name": "driver/subcommand/repl.rs",
+ "byte_start": 9228,
+ "byte_end": 9231,
+ "line_start": 291,
+ "line_end": 291,
+ "column_start": 9,
+ "column_end": 12,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let foo = 42;",
+ "highlight_start": 9,
+ "highlight_end": 12
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "#[warn(unused_variables)] on by default",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "consider prefixing with an underscore",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "driver/subcommand/repl.rs",
+ "byte_start": 9228,
+ "byte_end": 9231,
+ "line_start": 291,
+ "line_end": 291,
+ "column_start": 9,
+ "column_end": 12,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let foo = 42;",
+ "highlight_start": 9,
+ "highlight_end": 12
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "_foo",
+ "suggestion_applicability": "MachineApplicable",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "warning: unused variable: `foo`\n --> driver/subcommand/repl.rs:291:9\n |\n291 | let foo = 42;\n | ^^^ help: consider prefixing with an underscore: `_foo`\n |\n = note: #[warn(unused_variables)] on by default\n\n"
+ }"##,
+ expect_file!["./test_data/rustc_unused_variable.txt"],
+ );
+ }
+
+ #[test]
+ #[cfg(not(windows))]
+ fn rustc_unused_variable_as_info() {
+ check_with_config(
+ DiagnosticsMapConfig {
+ warnings_as_info: vec!["unused_variables".to_string()],
+ ..DiagnosticsMapConfig::default()
+ },
+ r##"{
+ "message": "unused variable: `foo`",
+ "code": {
+ "code": "unused_variables",
+ "explanation": null
+ },
+ "level": "warning",
+ "spans": [
+ {
+ "file_name": "driver/subcommand/repl.rs",
+ "byte_start": 9228,
+ "byte_end": 9231,
+ "line_start": 291,
+ "line_end": 291,
+ "column_start": 9,
+ "column_end": 12,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let foo = 42;",
+ "highlight_start": 9,
+ "highlight_end": 12
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "#[warn(unused_variables)] on by default",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "consider prefixing with an underscore",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "driver/subcommand/repl.rs",
+ "byte_start": 9228,
+ "byte_end": 9231,
+ "line_start": 291,
+ "line_end": 291,
+ "column_start": 9,
+ "column_end": 12,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let foo = 42;",
+ "highlight_start": 9,
+ "highlight_end": 12
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "_foo",
+ "suggestion_applicability": "MachineApplicable",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "warning: unused variable: `foo`\n --> driver/subcommand/repl.rs:291:9\n |\n291 | let foo = 42;\n | ^^^ help: consider prefixing with an underscore: `_foo`\n |\n = note: #[warn(unused_variables)] on by default\n\n"
+ }"##,
+ expect_file!["./test_data/rustc_unused_variable_as_info.txt"],
+ );
+ }
+
+ #[test]
+ #[cfg(not(windows))]
+ fn rustc_unused_variable_as_hint() {
+ check_with_config(
+ DiagnosticsMapConfig {
+ warnings_as_hint: vec!["unused_variables".to_string()],
+ ..DiagnosticsMapConfig::default()
+ },
+ r##"{
+ "message": "unused variable: `foo`",
+ "code": {
+ "code": "unused_variables",
+ "explanation": null
+ },
+ "level": "warning",
+ "spans": [
+ {
+ "file_name": "driver/subcommand/repl.rs",
+ "byte_start": 9228,
+ "byte_end": 9231,
+ "line_start": 291,
+ "line_end": 291,
+ "column_start": 9,
+ "column_end": 12,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let foo = 42;",
+ "highlight_start": 9,
+ "highlight_end": 12
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "#[warn(unused_variables)] on by default",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "consider prefixing with an underscore",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "driver/subcommand/repl.rs",
+ "byte_start": 9228,
+ "byte_end": 9231,
+ "line_start": 291,
+ "line_end": 291,
+ "column_start": 9,
+ "column_end": 12,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let foo = 42;",
+ "highlight_start": 9,
+ "highlight_end": 12
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "_foo",
+ "suggestion_applicability": "MachineApplicable",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "warning: unused variable: `foo`\n --> driver/subcommand/repl.rs:291:9\n |\n291 | let foo = 42;\n | ^^^ help: consider prefixing with an underscore: `_foo`\n |\n = note: #[warn(unused_variables)] on by default\n\n"
+ }"##,
+ expect_file!["./test_data/rustc_unused_variable_as_hint.txt"],
+ );
+ }
+
+ #[test]
+ fn rustc_wrong_number_of_parameters() {
+ check(
+ r##"{
+ "message": "this function takes 2 parameters but 3 parameters were supplied",
+ "code": {
+ "code": "E0061",
+ "explanation": "\nThe number of arguments passed to a function must match the number of arguments\nspecified in the function signature.\n\nFor example, a function like:\n\n```\nfn f(a: u16, b: &str) {}\n```\n\nMust always be called with exactly two arguments, e.g., `f(2, \"test\")`.\n\nNote that Rust does not have a notion of optional function arguments or\nvariadic functions (except for its C-FFI).\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "compiler/ty/select.rs",
+ "byte_start": 8787,
+ "byte_end": 9241,
+ "line_start": 219,
+ "line_end": 231,
+ "column_start": 5,
+ "column_end": 6,
+ "is_primary": false,
+ "text": [
+ {
+ "text": " pub fn add_evidence(",
+ "highlight_start": 5,
+ "highlight_end": 25
+ },
+ {
+ "text": " &mut self,",
+ "highlight_start": 1,
+ "highlight_end": 19
+ },
+ {
+ "text": " target_poly: &ty::Ref<ty::Poly>,",
+ "highlight_start": 1,
+ "highlight_end": 41
+ },
+ {
+ "text": " evidence_poly: &ty::Ref<ty::Poly>,",
+ "highlight_start": 1,
+ "highlight_end": 43
+ },
+ {
+ "text": " ) {",
+ "highlight_start": 1,
+ "highlight_end": 8
+ },
+ {
+ "text": " match target_poly {",
+ "highlight_start": 1,
+ "highlight_end": 28
+ },
+ {
+ "text": " ty::Ref::Var(tvar, _) => self.add_var_evidence(tvar, evidence_poly),",
+ "highlight_start": 1,
+ "highlight_end": 81
+ },
+ {
+ "text": " ty::Ref::Fixed(target_ty) => {",
+ "highlight_start": 1,
+ "highlight_end": 43
+ },
+ {
+ "text": " let evidence_ty = evidence_poly.resolve_to_ty();",
+ "highlight_start": 1,
+ "highlight_end": 65
+ },
+ {
+ "text": " self.add_evidence_ty(target_ty, evidence_poly, evidence_ty)",
+ "highlight_start": 1,
+ "highlight_end": 76
+ },
+ {
+ "text": " }",
+ "highlight_start": 1,
+ "highlight_end": 14
+ },
+ {
+ "text": " }",
+ "highlight_start": 1,
+ "highlight_end": 10
+ },
+ {
+ "text": " }",
+ "highlight_start": 1,
+ "highlight_end": 6
+ }
+ ],
+ "label": "defined here",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ },
+ {
+ "file_name": "compiler/ty/select.rs",
+ "byte_start": 4045,
+ "byte_end": 4057,
+ "line_start": 104,
+ "line_end": 104,
+ "column_start": 18,
+ "column_end": 30,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " self.add_evidence(target_fixed, evidence_fixed, false);",
+ "highlight_start": 18,
+ "highlight_end": 30
+ }
+ ],
+ "label": "expected 2 parameters",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": "error[E0061]: this function takes 2 parameters but 3 parameters were supplied\n --> compiler/ty/select.rs:104:18\n |\n104 | self.add_evidence(target_fixed, evidence_fixed, false);\n | ^^^^^^^^^^^^ expected 2 parameters\n...\n219 | / pub fn add_evidence(\n220 | | &mut self,\n221 | | target_poly: &ty::Ref<ty::Poly>,\n222 | | evidence_poly: &ty::Ref<ty::Poly>,\n... |\n230 | | }\n231 | | }\n | |_____- defined here\n\n"
+ }"##,
+ expect_file!["./test_data/rustc_wrong_number_of_parameters.txt"],
+ );
+ }
+
+ #[test]
+ fn clippy_pass_by_ref() {
+ check(
+ r##"{
+ "message": "this argument is passed by reference, but would be more efficient if passed by value",
+ "code": {
+ "code": "clippy::trivially_copy_pass_by_ref",
+ "explanation": null
+ },
+ "level": "warning",
+ "spans": [
+ {
+ "file_name": "compiler/mir/tagset.rs",
+ "byte_start": 941,
+ "byte_end": 946,
+ "line_start": 42,
+ "line_end": 42,
+ "column_start": 24,
+ "column_end": 29,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " pub fn is_disjoint(&self, other: Self) -> bool {",
+ "highlight_start": 24,
+ "highlight_end": 29
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "lint level defined here",
+ "code": null,
+ "level": "note",
+ "spans": [
+ {
+ "file_name": "compiler/lib.rs",
+ "byte_start": 8,
+ "byte_end": 19,
+ "line_start": 1,
+ "line_end": 1,
+ "column_start": 9,
+ "column_end": 20,
+ "is_primary": true,
+ "text": [
+ {
+ "text": "#![warn(clippy::all)]",
+ "highlight_start": 9,
+ "highlight_end": 20
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "#[warn(clippy::trivially_copy_pass_by_ref)] implied by #[warn(clippy::all)]",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#trivially_copy_pass_by_ref",
+ "code": null,
+ "level": "help",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "consider passing by value instead",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "compiler/mir/tagset.rs",
+ "byte_start": 941,
+ "byte_end": 946,
+ "line_start": 42,
+ "line_end": 42,
+ "column_start": 24,
+ "column_end": 29,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " pub fn is_disjoint(&self, other: Self) -> bool {",
+ "highlight_start": 24,
+ "highlight_end": 29
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "self",
+ "suggestion_applicability": "Unspecified",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "warning: this argument is passed by reference, but would be more efficient if passed by value\n --> compiler/mir/tagset.rs:42:24\n |\n42 | pub fn is_disjoint(&self, other: Self) -> bool {\n | ^^^^^ help: consider passing by value instead: `self`\n |\nnote: lint level defined here\n --> compiler/lib.rs:1:9\n |\n1 | #![warn(clippy::all)]\n | ^^^^^^^^^^^\n = note: #[warn(clippy::trivially_copy_pass_by_ref)] implied by #[warn(clippy::all)]\n = help: for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#trivially_copy_pass_by_ref\n\n"
+ }"##,
+ expect_file!["./test_data/clippy_pass_by_ref.txt"],
+ );
+ }
+
+ #[test]
+ fn rustc_range_map_lsp_position() {
+ check(
+ r##"{
+ "message": "mismatched types",
+ "code": {
+ "code": "E0308",
+ "explanation": "Expected type did not match the received type.\n\nErroneous code examples:\n\n```compile_fail,E0308\nfn plus_one(x: i32) -> i32 {\n x + 1\n}\n\nplus_one(\"Not a number\");\n// ^^^^^^^^^^^^^^ expected `i32`, found `&str`\n\nif \"Not a bool\" {\n// ^^^^^^^^^^^^ expected `bool`, found `&str`\n}\n\nlet x: f32 = \"Not a float\";\n// --- ^^^^^^^^^^^^^ expected `f32`, found `&str`\n// |\n// expected due to this\n```\n\nThis error occurs when an expression was used in a place where the compiler\nexpected an expression of a different type. It can occur in several cases, the\nmost common being when calling a function and passing an argument which has a\ndifferent type than the matching type in the function declaration.\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "crates/test_diagnostics/src/main.rs",
+ "byte_start": 87,
+ "byte_end": 105,
+ "line_start": 4,
+ "line_end": 4,
+ "column_start": 18,
+ "column_end": 24,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let x: u32 = \"𐐀𐐀𐐀𐐀\"; // 17-23",
+ "highlight_start": 18,
+ "highlight_end": 24
+ }
+ ],
+ "label": "expected `u32`, found `&str`",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ },
+ {
+ "file_name": "crates/test_diagnostics/src/main.rs",
+ "byte_start": 81,
+ "byte_end": 84,
+ "line_start": 4,
+ "line_end": 4,
+ "column_start": 12,
+ "column_end": 15,
+ "is_primary": false,
+ "text": [
+ {
+ "text": " let x: u32 = \"𐐀𐐀𐐀𐐀\"; // 17-23",
+ "highlight_start": 12,
+ "highlight_end": 15
+ }
+ ],
+ "label": "expected due to this",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": "error[E0308]: mismatched types\n --> crates/test_diagnostics/src/main.rs:4:18\n |\n4 | let x: u32 = \"𐐀𐐀𐐀𐐀\"; // 17-23\n | --- ^^^^^^ expected `u32`, found `&str`\n | |\n | expected due to this\n\n"
+ }"##,
+ expect_file!("./test_data/rustc_range_map_lsp_position.txt"),
+ )
+ }
+
+ #[test]
+ fn rustc_mismatched_type() {
+ check(
+ r##"{
+ "message": "mismatched types",
+ "code": {
+ "code": "E0308",
+ "explanation": "\nThis error occurs when the compiler was unable to infer the concrete type of a\nvariable. It can occur for several cases, the most common of which is a\nmismatch in the expected type that the compiler inferred for a variable's\ninitializing expression, and the actual type explicitly assigned to the\nvariable.\n\nFor example:\n\n```compile_fail,E0308\nlet x: i32 = \"I am not a number!\";\n// ~~~ ~~~~~~~~~~~~~~~~~~~~\n// | |\n// | initializing expression;\n// | compiler infers type `&str`\n// |\n// type `i32` assigned to variable `x`\n```\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "runtime/compiler_support.rs",
+ "byte_start": 1589,
+ "byte_end": 1594,
+ "line_start": 48,
+ "line_end": 48,
+ "column_start": 65,
+ "column_end": 70,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let layout = alloc::Layout::from_size_align_unchecked(size, align);",
+ "highlight_start": 65,
+ "highlight_end": 70
+ }
+ ],
+ "label": "expected usize, found u32",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": "error[E0308]: mismatched types\n --> runtime/compiler_support.rs:48:65\n |\n48 | let layout = alloc::Layout::from_size_align_unchecked(size, align);\n | ^^^^^ expected usize, found u32\n\n"
+ }"##,
+ expect_file!["./test_data/rustc_mismatched_type.txt"],
+ );
+ }
+
+ #[test]
+ fn handles_macro_location() {
+ check(
+ r##"{
+ "rendered": "error[E0277]: can't compare `{integer}` with `&str`\n --> src/main.rs:2:5\n |\n2 | assert_eq!(1, \"love\");\n | ^^^^^^^^^^^^^^^^^^^^^^ no implementation for `{integer} == &str`\n |\n = help: the trait `std::cmp::PartialEq<&str>` is not implemented for `{integer}`\n = note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)\n\n",
+ "children": [
+ {
+ "children": [],
+ "code": null,
+ "level": "help",
+ "message": "the trait `std::cmp::PartialEq<&str>` is not implemented for `{integer}`",
+ "rendered": null,
+ "spans": []
+ }
+ ],
+ "code": {
+ "code": "E0277",
+ "explanation": "\nYou tried to use a type which doesn't implement some trait in a place which\nexpected that trait. Erroneous code example:\n\n```compile_fail,E0277\n// here we declare the Foo trait with a bar method\ntrait Foo {\n fn bar(&self);\n}\n\n// we now declare a function which takes an object implementing the Foo trait\nfn some_func<T: Foo>(foo: T) {\n foo.bar();\n}\n\nfn main() {\n // we now call the method with the i32 type, which doesn't implement\n // the Foo trait\n some_func(5i32); // error: the trait bound `i32 : Foo` is not satisfied\n}\n```\n\nIn order to fix this error, verify that the type you're using does implement\nthe trait. Example:\n\n```\ntrait Foo {\n fn bar(&self);\n}\n\nfn some_func<T: Foo>(foo: T) {\n foo.bar(); // we can now use this method since i32 implements the\n // Foo trait\n}\n\n// we implement the trait on the i32 type\nimpl Foo for i32 {\n fn bar(&self) {}\n}\n\nfn main() {\n some_func(5i32); // ok!\n}\n```\n\nOr in a generic context, an erroneous code example would look like:\n\n```compile_fail,E0277\nfn some_func<T>(foo: T) {\n println!(\"{:?}\", foo); // error: the trait `core::fmt::Debug` is not\n // implemented for the type `T`\n}\n\nfn main() {\n // We now call the method with the i32 type,\n // which *does* implement the Debug trait.\n some_func(5i32);\n}\n```\n\nNote that the error here is in the definition of the generic function: Although\nwe only call it with a parameter that does implement `Debug`, the compiler\nstill rejects the function: It must work with all possible input types. In\norder to make this example compile, we need to restrict the generic type we're\naccepting:\n\n```\nuse std::fmt;\n\n// Restrict the input type to types that implement Debug.\nfn some_func<T: fmt::Debug>(foo: T) {\n println!(\"{:?}\", foo);\n}\n\nfn main() {\n // Calling the method is still fine, as i32 implements Debug.\n some_func(5i32);\n\n // This would fail to compile now:\n // struct WithoutDebug;\n // some_func(WithoutDebug);\n}\n```\n\nRust only looks at the signature of the called function, as such it must\nalready specify all requirements that will be used for every type parameter.\n"
+ },
+ "level": "error",
+ "message": "can't compare `{integer}` with `&str`",
+ "spans": [
+ {
+ "byte_end": 155,
+ "byte_start": 153,
+ "column_end": 33,
+ "column_start": 31,
+ "expansion": {
+ "def_site_span": {
+ "byte_end": 940,
+ "byte_start": 0,
+ "column_end": 6,
+ "column_start": 1,
+ "expansion": null,
+ "file_name": "<::core::macros::assert_eq macros>",
+ "is_primary": false,
+ "label": null,
+ "line_end": 36,
+ "line_start": 1,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 35,
+ "highlight_start": 1,
+ "text": "($ left : expr, $ right : expr) =>"
+ },
+ {
+ "highlight_end": 3,
+ "highlight_start": 1,
+ "text": "({"
+ },
+ {
+ "highlight_end": 33,
+ "highlight_start": 1,
+ "text": " match (& $ left, & $ right)"
+ },
+ {
+ "highlight_end": 7,
+ "highlight_start": 1,
+ "text": " {"
+ },
+ {
+ "highlight_end": 34,
+ "highlight_start": 1,
+ "text": " (left_val, right_val) =>"
+ },
+ {
+ "highlight_end": 11,
+ "highlight_start": 1,
+ "text": " {"
+ },
+ {
+ "highlight_end": 46,
+ "highlight_start": 1,
+ "text": " if ! (* left_val == * right_val)"
+ },
+ {
+ "highlight_end": 15,
+ "highlight_start": 1,
+ "text": " {"
+ },
+ {
+ "highlight_end": 25,
+ "highlight_start": 1,
+ "text": " panic !"
+ },
+ {
+ "highlight_end": 57,
+ "highlight_start": 1,
+ "text": " (r#\"assertion failed: `(left == right)`"
+ },
+ {
+ "highlight_end": 16,
+ "highlight_start": 1,
+ "text": " left: `{:?}`,"
+ },
+ {
+ "highlight_end": 18,
+ "highlight_start": 1,
+ "text": " right: `{:?}`\"#,"
+ },
+ {
+ "highlight_end": 47,
+ "highlight_start": 1,
+ "text": " & * left_val, & * right_val)"
+ },
+ {
+ "highlight_end": 15,
+ "highlight_start": 1,
+ "text": " }"
+ },
+ {
+ "highlight_end": 11,
+ "highlight_start": 1,
+ "text": " }"
+ },
+ {
+ "highlight_end": 7,
+ "highlight_start": 1,
+ "text": " }"
+ },
+ {
+ "highlight_end": 42,
+ "highlight_start": 1,
+ "text": " }) ; ($ left : expr, $ right : expr,) =>"
+ },
+ {
+ "highlight_end": 49,
+ "highlight_start": 1,
+ "text": "({ $ crate :: assert_eq ! ($ left, $ right) }) ;"
+ },
+ {
+ "highlight_end": 53,
+ "highlight_start": 1,
+ "text": "($ left : expr, $ right : expr, $ ($ arg : tt) +) =>"
+ },
+ {
+ "highlight_end": 3,
+ "highlight_start": 1,
+ "text": "({"
+ },
+ {
+ "highlight_end": 37,
+ "highlight_start": 1,
+ "text": " match (& ($ left), & ($ right))"
+ },
+ {
+ "highlight_end": 7,
+ "highlight_start": 1,
+ "text": " {"
+ },
+ {
+ "highlight_end": 34,
+ "highlight_start": 1,
+ "text": " (left_val, right_val) =>"
+ },
+ {
+ "highlight_end": 11,
+ "highlight_start": 1,
+ "text": " {"
+ },
+ {
+ "highlight_end": 46,
+ "highlight_start": 1,
+ "text": " if ! (* left_val == * right_val)"
+ },
+ {
+ "highlight_end": 15,
+ "highlight_start": 1,
+ "text": " {"
+ },
+ {
+ "highlight_end": 25,
+ "highlight_start": 1,
+ "text": " panic !"
+ },
+ {
+ "highlight_end": 57,
+ "highlight_start": 1,
+ "text": " (r#\"assertion failed: `(left == right)`"
+ },
+ {
+ "highlight_end": 16,
+ "highlight_start": 1,
+ "text": " left: `{:?}`,"
+ },
+ {
+ "highlight_end": 22,
+ "highlight_start": 1,
+ "text": " right: `{:?}`: {}\"#,"
+ },
+ {
+ "highlight_end": 72,
+ "highlight_start": 1,
+ "text": " & * left_val, & * right_val, $ crate :: format_args !"
+ },
+ {
+ "highlight_end": 33,
+ "highlight_start": 1,
+ "text": " ($ ($ arg) +))"
+ },
+ {
+ "highlight_end": 15,
+ "highlight_start": 1,
+ "text": " }"
+ },
+ {
+ "highlight_end": 11,
+ "highlight_start": 1,
+ "text": " }"
+ },
+ {
+ "highlight_end": 7,
+ "highlight_start": 1,
+ "text": " }"
+ },
+ {
+ "highlight_end": 6,
+ "highlight_start": 1,
+ "text": " }) ;"
+ }
+ ]
+ },
+ "macro_decl_name": "assert_eq!",
+ "span": {
+ "byte_end": 38,
+ "byte_start": 16,
+ "column_end": 27,
+ "column_start": 5,
+ "expansion": null,
+ "file_name": "src/main.rs",
+ "is_primary": false,
+ "label": null,
+ "line_end": 2,
+ "line_start": 2,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 27,
+ "highlight_start": 5,
+ "text": " assert_eq!(1, \"love\");"
+ }
+ ]
+ }
+ },
+ "file_name": "<::core::macros::assert_eq macros>",
+ "is_primary": true,
+ "label": "no implementation for `{integer} == &str`",
+ "line_end": 7,
+ "line_start": 7,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 33,
+ "highlight_start": 31,
+ "text": " if ! (* left_val == * right_val)"
+ }
+ ]
+ }
+ ]
+ }"##,
+ expect_file!["./test_data/handles_macro_location.txt"],
+ );
+ }
+
+ #[test]
+ fn macro_compiler_error() {
+ check(
+ r##"{
+ "rendered": "error: Please register your known path in the path module\n --> crates/hir_def/src/path.rs:265:9\n |\n265 | compile_error!(\"Please register your known path in the path module\")\n | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n | \n ::: crates/hir_def/src/data.rs:80:16\n |\n80 | let path = path![std::future::Future];\n | -------------------------- in this macro invocation\n\n",
+ "children": [],
+ "code": null,
+ "level": "error",
+ "message": "Please register your known path in the path module",
+ "spans": [
+ {
+ "byte_end": 8285,
+ "byte_start": 8217,
+ "column_end": 77,
+ "column_start": 9,
+ "expansion": {
+ "def_site_span": {
+ "byte_end": 8294,
+ "byte_start": 7858,
+ "column_end": 2,
+ "column_start": 1,
+ "expansion": null,
+ "file_name": "crates/hir_def/src/path.rs",
+ "is_primary": false,
+ "label": null,
+ "line_end": 267,
+ "line_start": 254,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 28,
+ "highlight_start": 1,
+ "text": "macro_rules! __known_path {"
+ },
+ {
+ "highlight_end": 37,
+ "highlight_start": 1,
+ "text": " (std::iter::IntoIterator) => {};"
+ },
+ {
+ "highlight_end": 33,
+ "highlight_start": 1,
+ "text": " (std::result::Result) => {};"
+ },
+ {
+ "highlight_end": 29,
+ "highlight_start": 1,
+ "text": " (std::ops::Range) => {};"
+ },
+ {
+ "highlight_end": 33,
+ "highlight_start": 1,
+ "text": " (std::ops::RangeFrom) => {};"
+ },
+ {
+ "highlight_end": 33,
+ "highlight_start": 1,
+ "text": " (std::ops::RangeFull) => {};"
+ },
+ {
+ "highlight_end": 31,
+ "highlight_start": 1,
+ "text": " (std::ops::RangeTo) => {};"
+ },
+ {
+ "highlight_end": 40,
+ "highlight_start": 1,
+ "text": " (std::ops::RangeToInclusive) => {};"
+ },
+ {
+ "highlight_end": 38,
+ "highlight_start": 1,
+ "text": " (std::ops::RangeInclusive) => {};"
+ },
+ {
+ "highlight_end": 27,
+ "highlight_start": 1,
+ "text": " (std::ops::Try) => {};"
+ },
+ {
+ "highlight_end": 22,
+ "highlight_start": 1,
+ "text": " ($path:path) => {"
+ },
+ {
+ "highlight_end": 77,
+ "highlight_start": 1,
+ "text": " compile_error!(\"Please register your known path in the path module\")"
+ },
+ {
+ "highlight_end": 7,
+ "highlight_start": 1,
+ "text": " };"
+ },
+ {
+ "highlight_end": 2,
+ "highlight_start": 1,
+ "text": "}"
+ }
+ ]
+ },
+ "macro_decl_name": "$crate::__known_path!",
+ "span": {
+ "byte_end": 8427,
+ "byte_start": 8385,
+ "column_end": 51,
+ "column_start": 9,
+ "expansion": {
+ "def_site_span": {
+ "byte_end": 8611,
+ "byte_start": 8312,
+ "column_end": 2,
+ "column_start": 1,
+ "expansion": null,
+ "file_name": "crates/hir_def/src/path.rs",
+ "is_primary": false,
+ "label": null,
+ "line_end": 277,
+ "line_start": 270,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 22,
+ "highlight_start": 1,
+ "text": "macro_rules! __path {"
+ },
+ {
+ "highlight_end": 43,
+ "highlight_start": 1,
+ "text": " ($start:ident $(:: $seg:ident)*) => ({"
+ },
+ {
+ "highlight_end": 51,
+ "highlight_start": 1,
+ "text": " $crate::__known_path!($start $(:: $seg)*);"
+ },
+ {
+ "highlight_end": 87,
+ "highlight_start": 1,
+ "text": " $crate::path::ModPath::from_simple_segments($crate::path::PathKind::Abs, vec!["
+ },
+ {
+ "highlight_end": 76,
+ "highlight_start": 1,
+ "text": " $crate::path::__name![$start], $($crate::path::__name![$seg],)*"
+ },
+ {
+ "highlight_end": 11,
+ "highlight_start": 1,
+ "text": " ])"
+ },
+ {
+ "highlight_end": 8,
+ "highlight_start": 1,
+ "text": " });"
+ },
+ {
+ "highlight_end": 2,
+ "highlight_start": 1,
+ "text": "}"
+ }
+ ]
+ },
+ "macro_decl_name": "path!",
+ "span": {
+ "byte_end": 2966,
+ "byte_start": 2940,
+ "column_end": 42,
+ "column_start": 16,
+ "expansion": null,
+ "file_name": "crates/hir_def/src/data.rs",
+ "is_primary": false,
+ "label": null,
+ "line_end": 80,
+ "line_start": 80,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 42,
+ "highlight_start": 16,
+ "text": " let path = path![std::future::Future];"
+ }
+ ]
+ }
+ },
+ "file_name": "crates/hir_def/src/path.rs",
+ "is_primary": false,
+ "label": null,
+ "line_end": 272,
+ "line_start": 272,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 51,
+ "highlight_start": 9,
+ "text": " $crate::__known_path!($start $(:: $seg)*);"
+ }
+ ]
+ }
+ },
+ "file_name": "crates/hir_def/src/path.rs",
+ "is_primary": true,
+ "label": null,
+ "line_end": 265,
+ "line_start": 265,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 77,
+ "highlight_start": 9,
+ "text": " compile_error!(\"Please register your known path in the path module\")"
+ }
+ ]
+ }
+ ]
+ }
+ "##,
+ expect_file!["./test_data/macro_compiler_error.txt"],
+ );
+ }
+
+ #[test]
+ fn snap_multi_line_fix() {
+ check(
+ r##"{
+ "rendered": "warning: returning the result of a let binding from a block\n --> src/main.rs:4:5\n |\n3 | let a = (0..10).collect();\n | -------------------------- unnecessary let binding\n4 | a\n | ^\n |\n = note: `#[warn(clippy::let_and_return)]` on by default\n = help: for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#let_and_return\nhelp: return the expression directly\n |\n3 | \n4 | (0..10).collect()\n |\n\n",
+ "children": [
+ {
+ "children": [],
+ "code": null,
+ "level": "note",
+ "message": "`#[warn(clippy::let_and_return)]` on by default",
+ "rendered": null,
+ "spans": []
+ },
+ {
+ "children": [],
+ "code": null,
+ "level": "help",
+ "message": "for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#let_and_return",
+ "rendered": null,
+ "spans": []
+ },
+ {
+ "children": [],
+ "code": null,
+ "level": "help",
+ "message": "return the expression directly",
+ "rendered": null,
+ "spans": [
+ {
+ "byte_end": 55,
+ "byte_start": 29,
+ "column_end": 31,
+ "column_start": 5,
+ "expansion": null,
+ "file_name": "src/main.rs",
+ "is_primary": true,
+ "label": null,
+ "line_end": 3,
+ "line_start": 3,
+ "suggested_replacement": "",
+ "suggestion_applicability": "MachineApplicable",
+ "text": [
+ {
+ "highlight_end": 31,
+ "highlight_start": 5,
+ "text": " let a = (0..10).collect();"
+ }
+ ]
+ },
+ {
+ "byte_end": 61,
+ "byte_start": 60,
+ "column_end": 6,
+ "column_start": 5,
+ "expansion": null,
+ "file_name": "src/main.rs",
+ "is_primary": true,
+ "label": null,
+ "line_end": 4,
+ "line_start": 4,
+ "suggested_replacement": "(0..10).collect()",
+ "suggestion_applicability": "MachineApplicable",
+ "text": [
+ {
+ "highlight_end": 6,
+ "highlight_start": 5,
+ "text": " a"
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ "code": {
+ "code": "clippy::let_and_return",
+ "explanation": null
+ },
+ "level": "warning",
+ "message": "returning the result of a let binding from a block",
+ "spans": [
+ {
+ "byte_end": 55,
+ "byte_start": 29,
+ "column_end": 31,
+ "column_start": 5,
+ "expansion": null,
+ "file_name": "src/main.rs",
+ "is_primary": false,
+ "label": "unnecessary let binding",
+ "line_end": 3,
+ "line_start": 3,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 31,
+ "highlight_start": 5,
+ "text": " let a = (0..10).collect();"
+ }
+ ]
+ },
+ {
+ "byte_end": 61,
+ "byte_start": 60,
+ "column_end": 6,
+ "column_start": 5,
+ "expansion": null,
+ "file_name": "src/main.rs",
+ "is_primary": true,
+ "label": null,
+ "line_end": 4,
+ "line_start": 4,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 6,
+ "highlight_start": 5,
+ "text": " a"
+ }
+ ]
+ }
+ ]
+ }
+ "##,
+ expect_file!["./test_data/snap_multi_line_fix.txt"],
+ );
+ }
+
+ #[test]
+ fn reasonable_line_numbers_from_empty_file() {
+ check(
+ r##"{
+ "message": "`main` function not found in crate `current`",
+ "code": {
+ "code": "E0601",
+ "explanation": "No `main` function was found in a binary crate.\n\nTo fix this error, add a `main` function:\n\n```\nfn main() {\n // Your program will start here.\n println!(\"Hello world!\");\n}\n```\n\nIf you don't know the basics of Rust, you can look at the\n[Rust Book][rust-book] to get started.\n\n[rust-book]: https://doc.rust-lang.org/book/\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "src/bin/current.rs",
+ "byte_start": 0,
+ "byte_end": 0,
+ "line_start": 0,
+ "line_end": 0,
+ "column_start": 1,
+ "column_end": 1,
+ "is_primary": true,
+ "text": [],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "consider adding a `main` function to `src/bin/current.rs`",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "error[E0601]: `main` function not found in crate `current`\n |\n = note: consider adding a `main` function to `src/bin/current.rs`\n\n"
+ }"##,
+ expect_file!["./test_data/reasonable_line_numbers_from_empty_file.txt"],
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diff.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diff.rs
new file mode 100644
index 000000000..3fcfb4a1b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diff.rs
@@ -0,0 +1,53 @@
+//! Generate minimal `TextEdit`s from different text versions
+use dissimilar::Chunk;
+use ide::{TextEdit, TextRange, TextSize};
+
+pub(crate) fn diff(left: &str, right: &str) -> TextEdit {
+ let chunks = dissimilar::diff(left, right);
+ textedit_from_chunks(chunks)
+}
+
+fn textedit_from_chunks(chunks: Vec<dissimilar::Chunk<'_>>) -> TextEdit {
+ let mut builder = TextEdit::builder();
+ let mut pos = TextSize::default();
+
+ let mut chunks = chunks.into_iter().peekable();
+ while let Some(chunk) = chunks.next() {
+ if let (Chunk::Delete(deleted), Some(&Chunk::Insert(inserted))) = (chunk, chunks.peek()) {
+ chunks.next().unwrap();
+ let deleted_len = TextSize::of(deleted);
+ builder.replace(TextRange::at(pos, deleted_len), inserted.into());
+ pos += deleted_len;
+ continue;
+ }
+
+ match chunk {
+ Chunk::Equal(text) => {
+ pos += TextSize::of(text);
+ }
+ Chunk::Delete(deleted) => {
+ let deleted_len = TextSize::of(deleted);
+ builder.delete(TextRange::at(pos, deleted_len));
+ pos += deleted_len;
+ }
+ Chunk::Insert(inserted) => {
+ builder.insert(pos, inserted.into());
+ }
+ }
+ }
+ builder.finish()
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn diff_applies() {
+ let mut original = String::from("fn foo(a:u32){\n}");
+ let result = "fn foo(a: u32) {}";
+ let edit = diff(&original, result);
+ edit.apply(&mut original);
+ assert_eq!(original, result);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs
new file mode 100644
index 000000000..f16559148
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs
@@ -0,0 +1,266 @@
+//! See [RequestDispatcher].
+use std::{fmt, panic, thread};
+
+use ide::Cancelled;
+use lsp_server::ExtractError;
+use serde::{de::DeserializeOwned, Serialize};
+
+use crate::{
+ global_state::{GlobalState, GlobalStateSnapshot},
+ main_loop::Task,
+ version::version,
+ LspError, Result,
+};
+
+/// A visitor for routing a raw JSON request to an appropriate handler function.
+///
+/// Most requests are read-only and async and are handled on the threadpool
+/// (`on` method).
+///
+/// Some read-only requests are latency sensitive, and are immediately handled
+/// on the main loop thread (`on_sync`). These are typically typing-related
+/// requests.
+///
+/// Some requests modify the state, and are run on the main thread to get
+/// `&mut` (`on_sync_mut`).
+///
+/// Read-only requests are wrapped into `catch_unwind` -- they don't modify the
+/// state, so it's OK to recover from their failures.
+pub(crate) struct RequestDispatcher<'a> {
+ pub(crate) req: Option<lsp_server::Request>,
+ pub(crate) global_state: &'a mut GlobalState,
+}
+
+impl<'a> RequestDispatcher<'a> {
+ /// Dispatches the request onto the current thread, given full access to
+ /// mutable global state. Unlike all other methods here, this one isn't
+ /// guarded by `catch_unwind`, so, please, don't make bugs :-)
+ pub(crate) fn on_sync_mut<R>(
+ &mut self,
+ f: fn(&mut GlobalState, R::Params) -> Result<R::Result>,
+ ) -> &mut Self
+ where
+ R: lsp_types::request::Request,
+ R::Params: DeserializeOwned + panic::UnwindSafe + fmt::Debug,
+ R::Result: Serialize,
+ {
+ let (req, params, panic_context) = match self.parse::<R>() {
+ Some(it) => it,
+ None => return self,
+ };
+ let result = {
+ let _pctx = stdx::panic_context::enter(panic_context);
+ f(self.global_state, params)
+ };
+ if let Ok(response) = result_to_response::<R>(req.id.clone(), result) {
+ self.global_state.respond(response);
+ }
+
+ self
+ }
+
+ /// Dispatches the request onto the current thread.
+ pub(crate) fn on_sync<R>(
+ &mut self,
+ f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
+ ) -> &mut Self
+ where
+ R: lsp_types::request::Request,
+ R::Params: DeserializeOwned + panic::UnwindSafe + fmt::Debug,
+ R::Result: Serialize,
+ {
+ let (req, params, panic_context) = match self.parse::<R>() {
+ Some(it) => it,
+ None => return self,
+ };
+ let global_state_snapshot = self.global_state.snapshot();
+
+ let result = panic::catch_unwind(move || {
+ let _pctx = stdx::panic_context::enter(panic_context);
+ f(global_state_snapshot, params)
+ });
+
+ if let Ok(response) = thread_result_to_response::<R>(req.id.clone(), result) {
+ self.global_state.respond(response);
+ }
+
+ self
+ }
+
+ /// Dispatches the request onto thread pool
+ pub(crate) fn on<R>(
+ &mut self,
+ f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
+ ) -> &mut Self
+ where
+ R: lsp_types::request::Request + 'static,
+ R::Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug,
+ R::Result: Serialize,
+ {
+ let (req, params, panic_context) = match self.parse::<R>() {
+ Some(it) => it,
+ None => return self,
+ };
+
+ self.global_state.task_pool.handle.spawn({
+ let world = self.global_state.snapshot();
+ move || {
+ let result = panic::catch_unwind(move || {
+ let _pctx = stdx::panic_context::enter(panic_context);
+ f(world, params)
+ });
+ match thread_result_to_response::<R>(req.id.clone(), result) {
+ Ok(response) => Task::Response(response),
+ Err(_) => Task::Retry(req),
+ }
+ }
+ });
+
+ self
+ }
+
+ pub(crate) fn finish(&mut self) {
+ if let Some(req) = self.req.take() {
+ tracing::error!("unknown request: {:?}", req);
+ let response = lsp_server::Response::new_err(
+ req.id,
+ lsp_server::ErrorCode::MethodNotFound as i32,
+ "unknown request".to_string(),
+ );
+ self.global_state.respond(response);
+ }
+ }
+
+ fn parse<R>(&mut self) -> Option<(lsp_server::Request, R::Params, String)>
+ where
+ R: lsp_types::request::Request,
+ R::Params: DeserializeOwned + fmt::Debug,
+ {
+ let req = match &self.req {
+ Some(req) if req.method == R::METHOD => self.req.take()?,
+ _ => return None,
+ };
+
+ let res = crate::from_json(R::METHOD, &req.params);
+ match res {
+ Ok(params) => {
+ let panic_context =
+ format!("\nversion: {}\nrequest: {} {:#?}", version(), R::METHOD, params);
+ Some((req, params, panic_context))
+ }
+ Err(err) => {
+ let response = lsp_server::Response::new_err(
+ req.id,
+ lsp_server::ErrorCode::InvalidParams as i32,
+ err.to_string(),
+ );
+ self.global_state.respond(response);
+ None
+ }
+ }
+ }
+}
+
+fn thread_result_to_response<R>(
+ id: lsp_server::RequestId,
+ result: thread::Result<Result<R::Result>>,
+) -> Result<lsp_server::Response, Cancelled>
+where
+ R: lsp_types::request::Request,
+ R::Params: DeserializeOwned,
+ R::Result: Serialize,
+{
+ match result {
+ Ok(result) => result_to_response::<R>(id, result),
+ Err(panic) => {
+ let panic_message = panic
+ .downcast_ref::<String>()
+ .map(String::as_str)
+ .or_else(|| panic.downcast_ref::<&str>().copied());
+
+ let mut message = "request handler panicked".to_string();
+ if let Some(panic_message) = panic_message {
+ message.push_str(": ");
+ message.push_str(panic_message)
+ };
+
+ Ok(lsp_server::Response::new_err(
+ id,
+ lsp_server::ErrorCode::InternalError as i32,
+ message,
+ ))
+ }
+ }
+}
+
+fn result_to_response<R>(
+ id: lsp_server::RequestId,
+ result: Result<R::Result>,
+) -> Result<lsp_server::Response, Cancelled>
+where
+ R: lsp_types::request::Request,
+ R::Params: DeserializeOwned,
+ R::Result: Serialize,
+{
+ let res = match result {
+ Ok(resp) => lsp_server::Response::new_ok(id, &resp),
+ Err(e) => match e.downcast::<LspError>() {
+ Ok(lsp_error) => lsp_server::Response::new_err(id, lsp_error.code, lsp_error.message),
+ Err(e) => match e.downcast::<Cancelled>() {
+ Ok(cancelled) => return Err(*cancelled),
+ Err(e) => lsp_server::Response::new_err(
+ id,
+ lsp_server::ErrorCode::InternalError as i32,
+ e.to_string(),
+ ),
+ },
+ },
+ };
+ Ok(res)
+}
+
+pub(crate) struct NotificationDispatcher<'a> {
+ pub(crate) not: Option<lsp_server::Notification>,
+ pub(crate) global_state: &'a mut GlobalState,
+}
+
+impl<'a> NotificationDispatcher<'a> {
+ pub(crate) fn on<N>(
+ &mut self,
+ f: fn(&mut GlobalState, N::Params) -> Result<()>,
+ ) -> Result<&mut Self>
+ where
+ N: lsp_types::notification::Notification,
+ N::Params: DeserializeOwned + Send,
+ {
+ let not = match self.not.take() {
+ Some(it) => it,
+ None => return Ok(self),
+ };
+ let params = match not.extract::<N::Params>(N::METHOD) {
+ Ok(it) => it,
+ Err(ExtractError::JsonError { method, error }) => {
+ panic!("Invalid request\nMethod: {method}\n error: {error}",)
+ }
+ Err(ExtractError::MethodMismatch(not)) => {
+ self.not = Some(not);
+ return Ok(self);
+ }
+ };
+ let _pctx = stdx::panic_context::enter(format!(
+ "\nversion: {}\nnotification: {}",
+ version(),
+ N::METHOD
+ ));
+ f(self.global_state, params)?;
+ Ok(self)
+ }
+
+ pub(crate) fn finish(&mut self) {
+ if let Some(not) = &self.not {
+ if !not.method.starts_with("$/") {
+ tracing::error!("unhandled notification: {:?}", not);
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs
new file mode 100644
index 000000000..7bdd34d1f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs
@@ -0,0 +1,117 @@
+//! Conversion lsp_types types to rust-analyzer specific ones.
+use anyhow::format_err;
+use ide::{Annotation, AnnotationKind, AssistKind, LineCol, LineColUtf16};
+use ide_db::base_db::{FileId, FilePosition, FileRange};
+use syntax::{TextRange, TextSize};
+use vfs::AbsPathBuf;
+
+use crate::{
+ from_json,
+ global_state::GlobalStateSnapshot,
+ line_index::{LineIndex, OffsetEncoding},
+ lsp_ext,
+ lsp_utils::invalid_params_error,
+ Result,
+};
+
+pub(crate) fn abs_path(url: &lsp_types::Url) -> Result<AbsPathBuf> {
+ let path = url.to_file_path().map_err(|()| "url is not a file")?;
+ Ok(AbsPathBuf::try_from(path).unwrap())
+}
+
+pub(crate) fn vfs_path(url: &lsp_types::Url) -> Result<vfs::VfsPath> {
+ abs_path(url).map(vfs::VfsPath::from)
+}
+
+pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> Result<TextSize> {
+ let line_col = match line_index.encoding {
+ OffsetEncoding::Utf8 => {
+ LineCol { line: position.line as u32, col: position.character as u32 }
+ }
+ OffsetEncoding::Utf16 => {
+ let line_col =
+ LineColUtf16 { line: position.line as u32, col: position.character as u32 };
+ line_index.index.to_utf8(line_col)
+ }
+ };
+ let text_size =
+ line_index.index.offset(line_col).ok_or_else(|| format_err!("Invalid offset"))?;
+ Ok(text_size)
+}
+
+pub(crate) fn text_range(line_index: &LineIndex, range: lsp_types::Range) -> Result<TextRange> {
+ let start = offset(line_index, range.start)?;
+ let end = offset(line_index, range.end)?;
+ let text_range = TextRange::new(start, end);
+ Ok(text_range)
+}
+
+pub(crate) fn file_id(snap: &GlobalStateSnapshot, url: &lsp_types::Url) -> Result<FileId> {
+ snap.url_to_file_id(url)
+}
+
+pub(crate) fn file_position(
+ snap: &GlobalStateSnapshot,
+ tdpp: lsp_types::TextDocumentPositionParams,
+) -> Result<FilePosition> {
+ let file_id = file_id(snap, &tdpp.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let offset = offset(&line_index, tdpp.position)?;
+ Ok(FilePosition { file_id, offset })
+}
+
+pub(crate) fn file_range(
+ snap: &GlobalStateSnapshot,
+ text_document_identifier: lsp_types::TextDocumentIdentifier,
+ range: lsp_types::Range,
+) -> Result<FileRange> {
+ let file_id = file_id(snap, &text_document_identifier.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let range = text_range(&line_index, range)?;
+ Ok(FileRange { file_id, range })
+}
+
+pub(crate) fn assist_kind(kind: lsp_types::CodeActionKind) -> Option<AssistKind> {
+ let assist_kind = match &kind {
+ k if k == &lsp_types::CodeActionKind::EMPTY => AssistKind::None,
+ k if k == &lsp_types::CodeActionKind::QUICKFIX => AssistKind::QuickFix,
+ k if k == &lsp_types::CodeActionKind::REFACTOR => AssistKind::Refactor,
+ k if k == &lsp_types::CodeActionKind::REFACTOR_EXTRACT => AssistKind::RefactorExtract,
+ k if k == &lsp_types::CodeActionKind::REFACTOR_INLINE => AssistKind::RefactorInline,
+ k if k == &lsp_types::CodeActionKind::REFACTOR_REWRITE => AssistKind::RefactorRewrite,
+ _ => return None,
+ };
+
+ Some(assist_kind)
+}
+
+pub(crate) fn annotation(
+ snap: &GlobalStateSnapshot,
+ code_lens: lsp_types::CodeLens,
+) -> Result<Annotation> {
+ let data =
+ code_lens.data.ok_or_else(|| invalid_params_error("code lens without data".to_string()))?;
+ let resolve = from_json::<lsp_ext::CodeLensResolveData>("CodeLensResolveData", &data)?;
+
+ match resolve {
+ lsp_ext::CodeLensResolveData::Impls(params) => {
+ let file_id =
+ snap.url_to_file_id(&params.text_document_position_params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ Ok(Annotation {
+ range: text_range(&line_index, code_lens.range)?,
+ kind: AnnotationKind::HasImpls { file_id, data: None },
+ })
+ }
+ lsp_ext::CodeLensResolveData::References(params) => {
+ let file_id = snap.url_to_file_id(&params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ Ok(Annotation {
+ range: text_range(&line_index, code_lens.range)?,
+ kind: AnnotationKind::HasReferences { file_id, data: None },
+ })
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
new file mode 100644
index 000000000..8f881cba4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
@@ -0,0 +1,375 @@
+//! The context or environment in which the language server functions. In our
+//! server implementation this is know as the `WorldState`.
+//!
+//! Each tick provides an immutable snapshot of the state as `WorldSnapshot`.
+
+use std::{sync::Arc, time::Instant};
+
+use crossbeam_channel::{unbounded, Receiver, Sender};
+use flycheck::FlycheckHandle;
+use ide::{Analysis, AnalysisHost, Cancellable, Change, FileId};
+use ide_db::base_db::{CrateId, FileLoader, SourceDatabase};
+use lsp_types::{SemanticTokens, Url};
+use parking_lot::{Mutex, RwLock};
+use proc_macro_api::ProcMacroServer;
+use project_model::{CargoWorkspace, ProjectWorkspace, Target, WorkspaceBuildScripts};
+use rustc_hash::FxHashMap;
+use vfs::AnchoredPathBuf;
+
+use crate::{
+ config::Config,
+ diagnostics::{CheckFixes, DiagnosticCollection},
+ from_proto,
+ line_index::{LineEndings, LineIndex},
+ lsp_ext,
+ main_loop::Task,
+ mem_docs::MemDocs,
+ op_queue::OpQueue,
+ reload::{self, SourceRootConfig},
+ task_pool::TaskPool,
+ to_proto::url_from_abs_path,
+ Result,
+};
+
+// Enforces drop order
+pub(crate) struct Handle<H, C> {
+ pub(crate) handle: H,
+ pub(crate) receiver: C,
+}
+
+pub(crate) type ReqHandler = fn(&mut GlobalState, lsp_server::Response);
+pub(crate) type ReqQueue = lsp_server::ReqQueue<(String, Instant), ReqHandler>;
+
+/// `GlobalState` is the primary mutable state of the language server
+///
+/// The most interesting components are `vfs`, which stores a consistent
+/// snapshot of the file systems, and `analysis_host`, which stores our
+/// incremental salsa database.
+///
+/// Note that this struct has more than one impl in various modules!
+pub(crate) struct GlobalState {
+ sender: Sender<lsp_server::Message>,
+ req_queue: ReqQueue,
+ pub(crate) task_pool: Handle<TaskPool<Task>, Receiver<Task>>,
+ pub(crate) loader: Handle<Box<dyn vfs::loader::Handle>, Receiver<vfs::loader::Message>>,
+ pub(crate) config: Arc<Config>,
+ pub(crate) analysis_host: AnalysisHost,
+ pub(crate) diagnostics: DiagnosticCollection,
+ pub(crate) mem_docs: MemDocs,
+ pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
+ pub(crate) shutdown_requested: bool,
+ pub(crate) proc_macro_changed: bool,
+ pub(crate) last_reported_status: Option<lsp_ext::ServerStatusParams>,
+ pub(crate) source_root_config: SourceRootConfig,
+ pub(crate) proc_macro_clients: Vec<Result<ProcMacroServer, String>>,
+
+ pub(crate) flycheck: Vec<FlycheckHandle>,
+ pub(crate) flycheck_sender: Sender<flycheck::Message>,
+ pub(crate) flycheck_receiver: Receiver<flycheck::Message>,
+
+ pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
+ pub(crate) vfs_config_version: u32,
+ pub(crate) vfs_progress_config_version: u32,
+ pub(crate) vfs_progress_n_total: usize,
+ pub(crate) vfs_progress_n_done: usize,
+
+ /// `workspaces` field stores the data we actually use, while the `OpQueue`
+ /// stores the result of the last fetch.
+ ///
+ /// If the fetch (partially) fails, we do not update the current value.
+ ///
+ /// The handling of build data is subtle. We fetch workspace in two phases:
+ ///
+ /// *First*, we run `cargo metadata`, which gives us fast results for
+ /// initial analysis.
+ ///
+ /// *Second*, we run `cargo check` which runs build scripts and compiles
+ /// proc macros.
+ ///
+ /// We need both for the precise analysis, but we want rust-analyzer to be
+ /// at least partially available just after the first phase. That's because
+ /// first phase is much faster, and is much less likely to fail.
+ ///
+ /// This creates a complication -- by the time the second phase completes,
+ /// the results of the fist phase could be invalid. That is, while we run
+ /// `cargo check`, the user edits `Cargo.toml`, we notice this, and the new
+ /// `cargo metadata` completes before `cargo check`.
+ ///
+ /// An additional complication is that we want to avoid needless work. When
+ /// the user just adds comments or whitespace to Cargo.toml, we do not want
+ /// to invalidate any salsa caches.
+ pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
+ pub(crate) fetch_workspaces_queue: OpQueue<Vec<anyhow::Result<ProjectWorkspace>>>,
+ pub(crate) fetch_build_data_queue:
+ OpQueue<(Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>,
+
+ pub(crate) prime_caches_queue: OpQueue<()>,
+}
+
+/// An immutable snapshot of the world's state at a point in time.
+pub(crate) struct GlobalStateSnapshot {
+ pub(crate) config: Arc<Config>,
+ pub(crate) analysis: Analysis,
+ pub(crate) check_fixes: CheckFixes,
+ mem_docs: MemDocs,
+ pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
+ vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
+ pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
+}
+
+impl std::panic::UnwindSafe for GlobalStateSnapshot {}
+
+impl GlobalState {
+ pub(crate) fn new(sender: Sender<lsp_server::Message>, config: Config) -> GlobalState {
+ let loader = {
+ let (sender, receiver) = unbounded::<vfs::loader::Message>();
+ let handle: vfs_notify::NotifyHandle =
+ vfs::loader::Handle::spawn(Box::new(move |msg| sender.send(msg).unwrap()));
+ let handle = Box::new(handle) as Box<dyn vfs::loader::Handle>;
+ Handle { handle, receiver }
+ };
+
+ let task_pool = {
+ let (sender, receiver) = unbounded();
+ let handle = TaskPool::new(sender);
+ Handle { handle, receiver }
+ };
+
+ let analysis_host = AnalysisHost::new(config.lru_capacity());
+ let (flycheck_sender, flycheck_receiver) = unbounded();
+ let mut this = GlobalState {
+ sender,
+ req_queue: ReqQueue::default(),
+ task_pool,
+ loader,
+ config: Arc::new(config.clone()),
+ analysis_host,
+ diagnostics: Default::default(),
+ mem_docs: MemDocs::default(),
+ semantic_tokens_cache: Arc::new(Default::default()),
+ shutdown_requested: false,
+ proc_macro_changed: false,
+ last_reported_status: None,
+ source_root_config: SourceRootConfig::default(),
+ proc_macro_clients: vec![],
+
+ flycheck: Vec::new(),
+ flycheck_sender,
+ flycheck_receiver,
+
+ vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))),
+ vfs_config_version: 0,
+ vfs_progress_config_version: 0,
+ vfs_progress_n_total: 0,
+ vfs_progress_n_done: 0,
+
+ workspaces: Arc::new(Vec::new()),
+ fetch_workspaces_queue: OpQueue::default(),
+ prime_caches_queue: OpQueue::default(),
+
+ fetch_build_data_queue: OpQueue::default(),
+ };
+ // Apply any required database inputs from the config.
+ this.update_configuration(config);
+ this
+ }
+
+ pub(crate) fn process_changes(&mut self) -> bool {
+ let _p = profile::span("GlobalState::process_changes");
+ let mut fs_changes = Vec::new();
+ // A file was added or deleted
+ let mut has_structure_changes = false;
+
+ let (change, changed_files) = {
+ let mut change = Change::new();
+ let (vfs, line_endings_map) = &mut *self.vfs.write();
+ let changed_files = vfs.take_changes();
+ if changed_files.is_empty() {
+ return false;
+ }
+
+ for file in &changed_files {
+ if let Some(path) = vfs.file_path(file.file_id).as_path() {
+ let path = path.to_path_buf();
+ if reload::should_refresh_for_change(&path, file.change_kind) {
+ self.fetch_workspaces_queue
+ .request_op(format!("vfs file change: {}", path.display()));
+ }
+ fs_changes.push((path, file.change_kind));
+ if file.is_created_or_deleted() {
+ has_structure_changes = true;
+ }
+ }
+
+ if !file.exists() {
+ self.diagnostics.clear_native_for(file.file_id);
+ }
+
+ let text = if file.exists() {
+ let bytes = vfs.file_contents(file.file_id).to_vec();
+ String::from_utf8(bytes).ok().and_then(|text| {
+ let (text, line_endings) = LineEndings::normalize(text);
+ line_endings_map.insert(file.file_id, line_endings);
+ Some(Arc::new(text))
+ })
+ } else {
+ None
+ };
+ change.change_file(file.file_id, text);
+ }
+ if has_structure_changes {
+ let roots = self.source_root_config.partition(vfs);
+ change.set_roots(roots);
+ }
+ (change, changed_files)
+ };
+
+ self.analysis_host.apply_change(change);
+
+ let raw_database = &self.analysis_host.raw_database();
+ self.proc_macro_changed =
+ changed_files.iter().filter(|file| !file.is_created_or_deleted()).any(|file| {
+ let crates = raw_database.relevant_crates(file.file_id);
+ let crate_graph = raw_database.crate_graph();
+
+ crates.iter().any(|&krate| crate_graph[krate].is_proc_macro)
+ });
+ true
+ }
+
+ pub(crate) fn snapshot(&self) -> GlobalStateSnapshot {
+ GlobalStateSnapshot {
+ config: Arc::clone(&self.config),
+ workspaces: Arc::clone(&self.workspaces),
+ analysis: self.analysis_host.analysis(),
+ vfs: Arc::clone(&self.vfs),
+ check_fixes: Arc::clone(&self.diagnostics.check_fixes),
+ mem_docs: self.mem_docs.clone(),
+ semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
+ }
+ }
+
+ pub(crate) fn send_request<R: lsp_types::request::Request>(
+ &mut self,
+ params: R::Params,
+ handler: ReqHandler,
+ ) {
+ let request = self.req_queue.outgoing.register(R::METHOD.to_string(), params, handler);
+ self.send(request.into());
+ }
+
+ pub(crate) fn complete_request(&mut self, response: lsp_server::Response) {
+ let handler = self
+ .req_queue
+ .outgoing
+ .complete(response.id.clone())
+ .expect("received response for unknown request");
+ handler(self, response)
+ }
+
+ pub(crate) fn send_notification<N: lsp_types::notification::Notification>(
+ &mut self,
+ params: N::Params,
+ ) {
+ let not = lsp_server::Notification::new(N::METHOD.to_string(), params);
+ self.send(not.into());
+ }
+
+ pub(crate) fn register_request(
+ &mut self,
+ request: &lsp_server::Request,
+ request_received: Instant,
+ ) {
+ self.req_queue
+ .incoming
+ .register(request.id.clone(), (request.method.clone(), request_received));
+ }
+
+ pub(crate) fn respond(&mut self, response: lsp_server::Response) {
+ if let Some((method, start)) = self.req_queue.incoming.complete(response.id.clone()) {
+ if let Some(err) = &response.error {
+ if err.message.starts_with("server panicked") {
+ self.poke_rust_analyzer_developer(format!("{}, check the log", err.message))
+ }
+ }
+
+ let duration = start.elapsed();
+ tracing::debug!("handled {} - ({}) in {:0.2?}", method, response.id, duration);
+ self.send(response.into());
+ }
+ }
+
+ pub(crate) fn cancel(&mut self, request_id: lsp_server::RequestId) {
+ if let Some(response) = self.req_queue.incoming.cancel(request_id) {
+ self.send(response.into());
+ }
+ }
+
+ fn send(&mut self, message: lsp_server::Message) {
+ self.sender.send(message).unwrap()
+ }
+}
+
+impl Drop for GlobalState {
+ fn drop(&mut self) {
+ self.analysis_host.request_cancellation();
+ }
+}
+
+impl GlobalStateSnapshot {
+ pub(crate) fn url_to_file_id(&self, url: &Url) -> Result<FileId> {
+ url_to_file_id(&self.vfs.read().0, url)
+ }
+
+ pub(crate) fn file_id_to_url(&self, id: FileId) -> Url {
+ file_id_to_url(&self.vfs.read().0, id)
+ }
+
+ pub(crate) fn file_line_index(&self, file_id: FileId) -> Cancellable<LineIndex> {
+ let endings = self.vfs.read().1[&file_id];
+ let index = self.analysis.file_line_index(file_id)?;
+ let res = LineIndex { index, endings, encoding: self.config.offset_encoding() };
+ Ok(res)
+ }
+
+ pub(crate) fn url_file_version(&self, url: &Url) -> Option<i32> {
+ let path = from_proto::vfs_path(url).ok()?;
+ Some(self.mem_docs.get(&path)?.version)
+ }
+
+ pub(crate) fn anchored_path(&self, path: &AnchoredPathBuf) -> Url {
+ let mut base = self.vfs.read().0.file_path(path.anchor);
+ base.pop();
+ let path = base.join(&path.path).unwrap();
+ let path = path.as_path().unwrap();
+ url_from_abs_path(path)
+ }
+
+ pub(crate) fn cargo_target_for_crate_root(
+ &self,
+ crate_id: CrateId,
+ ) -> Option<(&CargoWorkspace, Target)> {
+ let file_id = self.analysis.crate_root(crate_id).ok()?;
+ let path = self.vfs.read().0.file_path(file_id);
+ let path = path.as_path()?;
+ self.workspaces.iter().find_map(|ws| match ws {
+ ProjectWorkspace::Cargo { cargo, .. } => {
+ cargo.target_by_root(path).map(|it| (cargo, it))
+ }
+ ProjectWorkspace::Json { .. } => None,
+ ProjectWorkspace::DetachedFiles { .. } => None,
+ })
+ }
+}
+
+pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url {
+ let path = vfs.file_path(id);
+ let path = path.as_path().unwrap();
+ url_from_abs_path(path)
+}
+
+pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> Result<FileId> {
+ let path = from_proto::vfs_path(url)?;
+ let res = vfs.file_id(&path).ok_or_else(|| format!("file not found: {}", path))?;
+ Ok(res)
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs
new file mode 100644
index 000000000..deb777c95
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs
@@ -0,0 +1,1892 @@
+//! This module is responsible for implementing handlers for Language Server
+//! Protocol. The majority of requests are fulfilled by calling into the
+//! `ide` crate.
+
+use std::{
+ io::Write as _,
+ process::{self, Stdio},
+};
+
+use anyhow::Context;
+use ide::{
+ AnnotationConfig, AssistKind, AssistResolveStrategy, FileId, FilePosition, FileRange,
+ HoverAction, HoverGotoTypeData, Query, RangeInfo, Runnable, RunnableKind, SingleResolve,
+ SourceChange, TextEdit,
+};
+use ide_db::SymbolKind;
+use lsp_server::ErrorCode;
+use lsp_types::{
+ CallHierarchyIncomingCall, CallHierarchyIncomingCallsParams, CallHierarchyItem,
+ CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams,
+ CodeLens, CompletionItem, Diagnostic, DiagnosticTag, DocumentFormattingParams, FoldingRange,
+ FoldingRangeParams, HoverContents, InlayHint, InlayHintParams, Location, LocationLink,
+ NumberOrString, Position, PrepareRenameResponse, Range, RenameParams,
+ SemanticTokensDeltaParams, SemanticTokensFullDeltaResult, SemanticTokensParams,
+ SemanticTokensRangeParams, SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation,
+ SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit,
+};
+use project_model::{ManifestPath, ProjectWorkspace, TargetKind};
+use serde_json::json;
+use stdx::{format_to, never};
+use syntax::{algo, ast, AstNode, TextRange, TextSize, T};
+use vfs::AbsPathBuf;
+
+use crate::{
+ cargo_target_spec::CargoTargetSpec,
+ config::{RustfmtConfig, WorkspaceSymbolConfig},
+ diff::diff,
+ from_proto,
+ global_state::{GlobalState, GlobalStateSnapshot},
+ line_index::LineEndings,
+ lsp_ext::{self, PositionOrRange, ViewCrateGraphParams, WorkspaceSymbolParams},
+ lsp_utils::{all_edits_are_disjoint, invalid_params_error},
+ to_proto, LspError, Result,
+};
+
+pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result<()> {
+ state.proc_macro_clients.clear();
+ state.proc_macro_changed = false;
+ state.fetch_workspaces_queue.request_op("reload workspace request".to_string());
+ state.fetch_build_data_queue.request_op("reload workspace request".to_string());
+ Ok(())
+}
+
+pub(crate) fn handle_analyzer_status(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::AnalyzerStatusParams,
+) -> Result<String> {
+ let _p = profile::span("handle_analyzer_status");
+
+ let mut buf = String::new();
+
+ let mut file_id = None;
+ if let Some(tdi) = params.text_document {
+ match from_proto::file_id(&snap, &tdi.uri) {
+ Ok(it) => file_id = Some(it),
+ Err(_) => format_to!(buf, "file {} not found in vfs", tdi.uri),
+ }
+ }
+
+ if snap.workspaces.is_empty() {
+ buf.push_str("No workspaces\n")
+ } else {
+ buf.push_str("Workspaces:\n");
+ format_to!(
+ buf,
+ "Loaded {:?} packages across {} workspace{}.\n",
+ snap.workspaces.iter().map(|w| w.n_packages()).sum::<usize>(),
+ snap.workspaces.len(),
+ if snap.workspaces.len() == 1 { "" } else { "s" }
+ );
+ }
+ buf.push_str("\nAnalysis:\n");
+ buf.push_str(
+ &snap
+ .analysis
+ .status(file_id)
+ .unwrap_or_else(|_| "Analysis retrieval was cancelled".to_owned()),
+ );
+ Ok(buf)
+}
+
+pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> Result<String> {
+ let _p = profile::span("handle_memory_usage");
+ let mut mem = state.analysis_host.per_query_memory_usage();
+ mem.push(("Remaining".into(), profile::memory_usage().allocated));
+
+ let mut out = String::new();
+ for (name, bytes) in mem {
+ format_to!(out, "{:>8} {}\n", bytes, name);
+ }
+ Ok(out)
+}
+
+pub(crate) fn handle_shuffle_crate_graph(state: &mut GlobalState, _: ()) -> Result<()> {
+ state.analysis_host.shuffle_crate_graph();
+ Ok(())
+}
+
+pub(crate) fn handle_syntax_tree(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::SyntaxTreeParams,
+) -> Result<String> {
+ let _p = profile::span("handle_syntax_tree");
+ let id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let line_index = snap.file_line_index(id)?;
+ let text_range = params.range.and_then(|r| from_proto::text_range(&line_index, r).ok());
+ let res = snap.analysis.syntax_tree(id, text_range)?;
+ Ok(res)
+}
+
+pub(crate) fn handle_view_hir(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<String> {
+ let _p = profile::span("handle_view_hir");
+ let position = from_proto::file_position(&snap, params)?;
+ let res = snap.analysis.view_hir(position)?;
+ Ok(res)
+}
+
+pub(crate) fn handle_view_file_text(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentIdentifier,
+) -> Result<String> {
+ let file_id = from_proto::file_id(&snap, &params.uri)?;
+ Ok(snap.analysis.file_text(file_id)?.to_string())
+}
+
+pub(crate) fn handle_view_item_tree(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::ViewItemTreeParams,
+) -> Result<String> {
+ let _p = profile::span("handle_view_item_tree");
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let res = snap.analysis.view_item_tree(file_id)?;
+ Ok(res)
+}
+
+pub(crate) fn handle_view_crate_graph(
+ snap: GlobalStateSnapshot,
+ params: ViewCrateGraphParams,
+) -> Result<String> {
+ let _p = profile::span("handle_view_crate_graph");
+ let dot = snap.analysis.view_crate_graph(params.full)??;
+ Ok(dot)
+}
+
+pub(crate) fn handle_expand_macro(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::ExpandMacroParams,
+) -> Result<Option<lsp_ext::ExpandedMacro>> {
+ let _p = profile::span("handle_expand_macro");
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let offset = from_proto::offset(&line_index, params.position)?;
+
+ let res = snap.analysis.expand_macro(FilePosition { file_id, offset })?;
+ Ok(res.map(|it| lsp_ext::ExpandedMacro { name: it.name, expansion: it.expansion }))
+}
+
+pub(crate) fn handle_selection_range(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::SelectionRangeParams,
+) -> Result<Option<Vec<lsp_types::SelectionRange>>> {
+ let _p = profile::span("handle_selection_range");
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let res: Result<Vec<lsp_types::SelectionRange>> = params
+ .positions
+ .into_iter()
+ .map(|position| {
+ let offset = from_proto::offset(&line_index, position)?;
+ let mut ranges = Vec::new();
+ {
+ let mut range = TextRange::new(offset, offset);
+ loop {
+ ranges.push(range);
+ let frange = FileRange { file_id, range };
+ let next = snap.analysis.extend_selection(frange)?;
+ if next == range {
+ break;
+ } else {
+ range = next
+ }
+ }
+ }
+ let mut range = lsp_types::SelectionRange {
+ range: to_proto::range(&line_index, *ranges.last().unwrap()),
+ parent: None,
+ };
+ for &r in ranges.iter().rev().skip(1) {
+ range = lsp_types::SelectionRange {
+ range: to_proto::range(&line_index, r),
+ parent: Some(Box::new(range)),
+ }
+ }
+ Ok(range)
+ })
+ .collect();
+
+ Ok(Some(res?))
+}
+
+pub(crate) fn handle_matching_brace(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::MatchingBraceParams,
+) -> Result<Vec<Position>> {
+ let _p = profile::span("handle_matching_brace");
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ params
+ .positions
+ .into_iter()
+ .map(|position| {
+ let offset = from_proto::offset(&line_index, position);
+ offset.map(|offset| {
+ let offset = match snap.analysis.matching_brace(FilePosition { file_id, offset }) {
+ Ok(Some(matching_brace_offset)) => matching_brace_offset,
+ Err(_) | Ok(None) => offset,
+ };
+ to_proto::position(&line_index, offset)
+ })
+ })
+ .collect()
+}
+
+pub(crate) fn handle_join_lines(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::JoinLinesParams,
+) -> Result<Vec<lsp_types::TextEdit>> {
+ let _p = profile::span("handle_join_lines");
+
+ let config = snap.config.join_lines();
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ let mut res = TextEdit::default();
+ for range in params.ranges {
+ let range = from_proto::text_range(&line_index, range)?;
+ let edit = snap.analysis.join_lines(&config, FileRange { file_id, range })?;
+ match res.union(edit) {
+ Ok(()) => (),
+ Err(_edit) => {
+ // just ignore overlapping edits
+ }
+ }
+ }
+
+ Ok(to_proto::text_edit_vec(&line_index, res))
+}
+
+pub(crate) fn handle_on_enter(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
+ let _p = profile::span("handle_on_enter");
+ let position = from_proto::file_position(&snap, params)?;
+ let edit = match snap.analysis.on_enter(position)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+ let line_index = snap.file_line_index(position.file_id)?;
+ let edit = to_proto::snippet_text_edit_vec(&line_index, true, edit);
+ Ok(Some(edit))
+}
+
+pub(crate) fn handle_on_type_formatting(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::DocumentOnTypeFormattingParams,
+) -> Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
+ let _p = profile::span("handle_on_type_formatting");
+ let mut position = from_proto::file_position(&snap, params.text_document_position)?;
+ let line_index = snap.file_line_index(position.file_id)?;
+
+ // in `ide`, the `on_type` invariant is that
+ // `text.char_at(position) == typed_char`.
+ position.offset -= TextSize::of('.');
+ let char_typed = params.ch.chars().next().unwrap_or('\0');
+
+ let text = snap.analysis.file_text(position.file_id)?;
+ if stdx::never!(!text[usize::from(position.offset)..].starts_with(char_typed)) {
+ return Ok(None);
+ }
+
+ // We have an assist that inserts ` ` after typing `->` in `fn foo() ->{`,
+ // but it requires precise cursor positioning to work, and one can't
+ // position the cursor with on_type formatting. So, let's just toggle this
+ // feature off here, hoping that we'll enable it one day, 😿.
+ if char_typed == '>' {
+ return Ok(None);
+ }
+
+ let edit =
+ snap.analysis.on_char_typed(position, char_typed, snap.config.typing_autoclose_angle())?;
+ let edit = match edit {
+ Some(it) => it,
+ None => return Ok(None),
+ };
+
+ // This should be a single-file edit
+ let (_, text_edit) = edit.source_file_edits.into_iter().next().unwrap();
+
+ let change = to_proto::snippet_text_edit_vec(&line_index, edit.is_snippet, text_edit);
+ Ok(Some(change))
+}
+
+pub(crate) fn handle_document_symbol(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::DocumentSymbolParams,
+) -> Result<Option<lsp_types::DocumentSymbolResponse>> {
+ let _p = profile::span("handle_document_symbol");
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ let mut parents: Vec<(lsp_types::DocumentSymbol, Option<usize>)> = Vec::new();
+
+ for symbol in snap.analysis.file_structure(file_id)? {
+ let mut tags = Vec::new();
+ if symbol.deprecated {
+ tags.push(SymbolTag::DEPRECATED)
+ };
+
+ #[allow(deprecated)]
+ let doc_symbol = lsp_types::DocumentSymbol {
+ name: symbol.label,
+ detail: symbol.detail,
+ kind: to_proto::structure_node_kind(symbol.kind),
+ tags: Some(tags),
+ deprecated: Some(symbol.deprecated),
+ range: to_proto::range(&line_index, symbol.node_range),
+ selection_range: to_proto::range(&line_index, symbol.navigation_range),
+ children: None,
+ };
+ parents.push((doc_symbol, symbol.parent));
+ }
+
+ // Builds hierarchy from a flat list, in reverse order (so that indices
+ // makes sense)
+ let document_symbols = {
+ let mut acc = Vec::new();
+ while let Some((mut node, parent_idx)) = parents.pop() {
+ if let Some(children) = &mut node.children {
+ children.reverse();
+ }
+ let parent = match parent_idx {
+ None => &mut acc,
+ Some(i) => parents[i].0.children.get_or_insert_with(Vec::new),
+ };
+ parent.push(node);
+ }
+ acc.reverse();
+ acc
+ };
+
+ let res = if snap.config.hierarchical_symbols() {
+ document_symbols.into()
+ } else {
+ let url = to_proto::url(&snap, file_id);
+ let mut symbol_information = Vec::<SymbolInformation>::new();
+ for symbol in document_symbols {
+ flatten_document_symbol(&symbol, None, &url, &mut symbol_information);
+ }
+ symbol_information.into()
+ };
+ return Ok(Some(res));
+
+ fn flatten_document_symbol(
+ symbol: &lsp_types::DocumentSymbol,
+ container_name: Option<String>,
+ url: &Url,
+ res: &mut Vec<SymbolInformation>,
+ ) {
+ let mut tags = Vec::new();
+
+ #[allow(deprecated)]
+ if let Some(true) = symbol.deprecated {
+ tags.push(SymbolTag::DEPRECATED)
+ }
+
+ #[allow(deprecated)]
+ res.push(SymbolInformation {
+ name: symbol.name.clone(),
+ kind: symbol.kind,
+ tags: Some(tags),
+ deprecated: symbol.deprecated,
+ location: Location::new(url.clone(), symbol.range),
+ container_name,
+ });
+
+ for child in symbol.children.iter().flatten() {
+ flatten_document_symbol(child, Some(symbol.name.clone()), url, res);
+ }
+ }
+}
+
+pub(crate) fn handle_workspace_symbol(
+ snap: GlobalStateSnapshot,
+ params: WorkspaceSymbolParams,
+) -> Result<Option<Vec<SymbolInformation>>> {
+ let _p = profile::span("handle_workspace_symbol");
+
+ let config = snap.config.workspace_symbol();
+ let (all_symbols, libs) = decide_search_scope_and_kind(&params, &config);
+ let limit = config.search_limit;
+
+ let query = {
+ let query: String = params.query.chars().filter(|&c| c != '#' && c != '*').collect();
+ let mut q = Query::new(query);
+ if !all_symbols {
+ q.only_types();
+ }
+ if libs {
+ q.libs();
+ }
+ q.limit(limit);
+ q
+ };
+ let mut res = exec_query(&snap, query)?;
+ if res.is_empty() && !all_symbols {
+ let mut query = Query::new(params.query);
+ query.limit(limit);
+ res = exec_query(&snap, query)?;
+ }
+
+ return Ok(Some(res));
+
+ fn decide_search_scope_and_kind(
+ params: &WorkspaceSymbolParams,
+ config: &WorkspaceSymbolConfig,
+ ) -> (bool, bool) {
+ // Support old-style parsing of markers in the query.
+ let mut all_symbols = params.query.contains('#');
+ let mut libs = params.query.contains('*');
+
+ // If no explicit marker was set, check request params. If that's also empty
+ // use global config.
+ if !all_symbols {
+ let search_kind = match params.search_kind {
+ Some(ref search_kind) => search_kind,
+ None => &config.search_kind,
+ };
+ all_symbols = match search_kind {
+ lsp_ext::WorkspaceSymbolSearchKind::OnlyTypes => false,
+ lsp_ext::WorkspaceSymbolSearchKind::AllSymbols => true,
+ }
+ }
+
+ if !libs {
+ let search_scope = match params.search_scope {
+ Some(ref search_scope) => search_scope,
+ None => &config.search_scope,
+ };
+ libs = match search_scope {
+ lsp_ext::WorkspaceSymbolSearchScope::Workspace => false,
+ lsp_ext::WorkspaceSymbolSearchScope::WorkspaceAndDependencies => true,
+ }
+ }
+
+ (all_symbols, libs)
+ }
+
+ fn exec_query(snap: &GlobalStateSnapshot, query: Query) -> Result<Vec<SymbolInformation>> {
+ let mut res = Vec::new();
+ for nav in snap.analysis.symbol_search(query)? {
+ let container_name = nav.container_name.as_ref().map(|v| v.to_string());
+
+ #[allow(deprecated)]
+ let info = SymbolInformation {
+ name: nav.name.to_string(),
+ kind: nav
+ .kind
+ .map(to_proto::symbol_kind)
+ .unwrap_or(lsp_types::SymbolKind::VARIABLE),
+ tags: None,
+ location: to_proto::location_from_nav(snap, nav)?,
+ container_name,
+ deprecated: None,
+ };
+ res.push(info);
+ }
+ Ok(res)
+ }
+}
+
+pub(crate) fn handle_will_rename_files(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::RenameFilesParams,
+) -> Result<Option<lsp_types::WorkspaceEdit>> {
+ let _p = profile::span("handle_will_rename_files");
+
+ let source_changes: Vec<SourceChange> = params
+ .files
+ .into_iter()
+ .filter_map(|file_rename| {
+ let from = Url::parse(&file_rename.old_uri).ok()?;
+ let to = Url::parse(&file_rename.new_uri).ok()?;
+
+ let from_path = from.to_file_path().ok()?;
+ let to_path = to.to_file_path().ok()?;
+
+ // Limit to single-level moves for now.
+ match (from_path.parent(), to_path.parent()) {
+ (Some(p1), Some(p2)) if p1 == p2 => {
+ if from_path.is_dir() {
+ // add '/' to end of url -- from `file://path/to/folder` to `file://path/to/folder/`
+ let mut old_folder_name = from_path.file_stem()?.to_str()?.to_string();
+ old_folder_name.push('/');
+ let from_with_trailing_slash = from.join(&old_folder_name).ok()?;
+
+ let imitate_from_url = from_with_trailing_slash.join("mod.rs").ok()?;
+ let new_file_name = to_path.file_name()?.to_str()?;
+ Some((
+ snap.url_to_file_id(&imitate_from_url).ok()?,
+ new_file_name.to_string(),
+ ))
+ } else {
+ let old_name = from_path.file_stem()?.to_str()?;
+ let new_name = to_path.file_stem()?.to_str()?;
+ match (old_name, new_name) {
+ ("mod", _) => None,
+ (_, "mod") => None,
+ _ => Some((snap.url_to_file_id(&from).ok()?, new_name.to_string())),
+ }
+ }
+ }
+ _ => None,
+ }
+ })
+ .filter_map(|(file_id, new_name)| {
+ snap.analysis.will_rename_file(file_id, &new_name).ok()?
+ })
+ .collect();
+
+ // Drop file system edits since we're just renaming things on the same level
+ let mut source_changes = source_changes.into_iter();
+ let mut source_change = source_changes.next().unwrap_or_default();
+ source_change.file_system_edits.clear();
+ // no collect here because we want to merge text edits on same file ids
+ source_change.extend(source_changes.flat_map(|it| it.source_file_edits));
+ if source_change.source_file_edits.is_empty() {
+ Ok(None)
+ } else {
+ to_proto::workspace_edit(&snap, source_change).map(Some)
+ }
+}
+
+pub(crate) fn handle_goto_definition(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::GotoDefinitionParams,
+) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
+ let _p = profile::span("handle_goto_definition");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let nav_info = match snap.analysis.goto_definition(position)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+ let src = FileRange { file_id: position.file_id, range: nav_info.range };
+ let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_goto_declaration(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::request::GotoDeclarationParams,
+) -> Result<Option<lsp_types::request::GotoDeclarationResponse>> {
+ let _p = profile::span("handle_goto_declaration");
+ let position = from_proto::file_position(&snap, params.text_document_position_params.clone())?;
+ let nav_info = match snap.analysis.goto_declaration(position)? {
+ None => return handle_goto_definition(snap, params),
+ Some(it) => it,
+ };
+ let src = FileRange { file_id: position.file_id, range: nav_info.range };
+ let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_goto_implementation(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::request::GotoImplementationParams,
+) -> Result<Option<lsp_types::request::GotoImplementationResponse>> {
+ let _p = profile::span("handle_goto_implementation");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let nav_info = match snap.analysis.goto_implementation(position)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+ let src = FileRange { file_id: position.file_id, range: nav_info.range };
+ let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_goto_type_definition(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::request::GotoTypeDefinitionParams,
+) -> Result<Option<lsp_types::request::GotoTypeDefinitionResponse>> {
+ let _p = profile::span("handle_goto_type_definition");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let nav_info = match snap.analysis.goto_type_definition(position)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+ let src = FileRange { file_id: position.file_id, range: nav_info.range };
+ let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_parent_module(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
+ let _p = profile::span("handle_parent_module");
+ if let Ok(file_path) = &params.text_document.uri.to_file_path() {
+ if file_path.file_name().unwrap_or_default() == "Cargo.toml" {
+ // search workspaces for parent packages or fallback to workspace root
+ let abs_path_buf = match AbsPathBuf::try_from(file_path.to_path_buf()).ok() {
+ Some(abs_path_buf) => abs_path_buf,
+ None => return Ok(None),
+ };
+
+ let manifest_path = match ManifestPath::try_from(abs_path_buf).ok() {
+ Some(manifest_path) => manifest_path,
+ None => return Ok(None),
+ };
+
+ let links: Vec<LocationLink> = snap
+ .workspaces
+ .iter()
+ .filter_map(|ws| match ws {
+ ProjectWorkspace::Cargo { cargo, .. } => cargo.parent_manifests(&manifest_path),
+ _ => None,
+ })
+ .flatten()
+ .map(|parent_manifest_path| LocationLink {
+ origin_selection_range: None,
+ target_uri: to_proto::url_from_abs_path(&parent_manifest_path),
+ target_range: Range::default(),
+ target_selection_range: Range::default(),
+ })
+ .collect::<_>();
+ return Ok(Some(links.into()));
+ }
+
+ // check if invoked at the crate root
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let crate_id = match snap.analysis.crate_for(file_id)?.first() {
+ Some(&crate_id) => crate_id,
+ None => return Ok(None),
+ };
+ let cargo_spec = match CargoTargetSpec::for_file(&snap, file_id)? {
+ Some(it) => it,
+ None => return Ok(None),
+ };
+
+ if snap.analysis.crate_root(crate_id)? == file_id {
+ let cargo_toml_url = to_proto::url_from_abs_path(&cargo_spec.cargo_toml);
+ let res = vec![LocationLink {
+ origin_selection_range: None,
+ target_uri: cargo_toml_url,
+ target_range: Range::default(),
+ target_selection_range: Range::default(),
+ }]
+ .into();
+ return Ok(Some(res));
+ }
+ }
+
+ // locate parent module by semantics
+ let position = from_proto::file_position(&snap, params)?;
+ let navs = snap.analysis.parent_module(position)?;
+ let res = to_proto::goto_definition_response(&snap, None, navs)?;
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_runnables(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::RunnablesParams,
+) -> Result<Vec<lsp_ext::Runnable>> {
+ let _p = profile::span("handle_runnables");
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let offset = params.position.and_then(|it| from_proto::offset(&line_index, it).ok());
+ let cargo_spec = CargoTargetSpec::for_file(&snap, file_id)?;
+
+ let expect_test = match offset {
+ Some(offset) => {
+ let source_file = snap.analysis.parse(file_id)?;
+ algo::find_node_at_offset::<ast::MacroCall>(source_file.syntax(), offset)
+ .and_then(|it| it.path()?.segment()?.name_ref())
+ .map_or(false, |it| it.text() == "expect" || it.text() == "expect_file")
+ }
+ None => false,
+ };
+
+ let mut res = Vec::new();
+ for runnable in snap.analysis.runnables(file_id)? {
+ if let Some(offset) = offset {
+ if !runnable.nav.full_range.contains_inclusive(offset) {
+ continue;
+ }
+ }
+ if should_skip_target(&runnable, cargo_spec.as_ref()) {
+ continue;
+ }
+ let mut runnable = to_proto::runnable(&snap, runnable)?;
+ if expect_test {
+ runnable.label = format!("{} + expect", runnable.label);
+ runnable.args.expect_test = Some(true);
+ }
+ res.push(runnable);
+ }
+
+ // Add `cargo check` and `cargo test` for all targets of the whole package
+ let config = snap.config.runnables();
+ match cargo_spec {
+ Some(spec) => {
+ for cmd in ["check", "test"] {
+ res.push(lsp_ext::Runnable {
+ label: format!("cargo {} -p {} --all-targets", cmd, spec.package),
+ location: None,
+ kind: lsp_ext::RunnableKind::Cargo,
+ args: lsp_ext::CargoRunnable {
+ workspace_root: Some(spec.workspace_root.clone().into()),
+ override_cargo: config.override_cargo.clone(),
+ cargo_args: vec![
+ cmd.to_string(),
+ "--package".to_string(),
+ spec.package.clone(),
+ "--all-targets".to_string(),
+ ],
+ cargo_extra_args: config.cargo_extra_args.clone(),
+ executable_args: Vec::new(),
+ expect_test: None,
+ },
+ })
+ }
+ }
+ None => {
+ if !snap.config.linked_projects().is_empty()
+ || !snap
+ .config
+ .discovered_projects
+ .as_ref()
+ .map(|projects| projects.is_empty())
+ .unwrap_or(true)
+ {
+ res.push(lsp_ext::Runnable {
+ label: "cargo check --workspace".to_string(),
+ location: None,
+ kind: lsp_ext::RunnableKind::Cargo,
+ args: lsp_ext::CargoRunnable {
+ workspace_root: None,
+ override_cargo: config.override_cargo,
+ cargo_args: vec!["check".to_string(), "--workspace".to_string()],
+ cargo_extra_args: config.cargo_extra_args,
+ executable_args: Vec::new(),
+ expect_test: None,
+ },
+ });
+ }
+ }
+ }
+ Ok(res)
+}
+
+pub(crate) fn handle_related_tests(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<Vec<lsp_ext::TestInfo>> {
+ let _p = profile::span("handle_related_tests");
+ let position = from_proto::file_position(&snap, params)?;
+
+ let tests = snap.analysis.related_tests(position, None)?;
+ let mut res = Vec::new();
+ for it in tests {
+ if let Ok(runnable) = to_proto::runnable(&snap, it) {
+ res.push(lsp_ext::TestInfo { runnable })
+ }
+ }
+
+ Ok(res)
+}
+
+pub(crate) fn handle_completion(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::CompletionParams,
+) -> Result<Option<lsp_types::CompletionResponse>> {
+ let _p = profile::span("handle_completion");
+ let text_document_position = params.text_document_position.clone();
+ let position = from_proto::file_position(&snap, params.text_document_position)?;
+ let completion_trigger_character =
+ params.context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next());
+
+ if Some(':') == completion_trigger_character {
+ let source_file = snap.analysis.parse(position.file_id)?;
+ let left_token = source_file.syntax().token_at_offset(position.offset).left_biased();
+ let completion_triggered_after_single_colon = match left_token {
+ Some(left_token) => left_token.kind() == T![:],
+ None => true,
+ };
+ if completion_triggered_after_single_colon {
+ return Ok(None);
+ }
+ }
+
+ let completion_config = &snap.config.completion();
+ let items = match snap.analysis.completions(
+ completion_config,
+ position,
+ completion_trigger_character,
+ )? {
+ None => return Ok(None),
+ Some(items) => items,
+ };
+ let line_index = snap.file_line_index(position.file_id)?;
+
+ let items =
+ to_proto::completion_items(&snap.config, &line_index, text_document_position, items);
+
+ let completion_list = lsp_types::CompletionList { is_incomplete: true, items };
+ Ok(Some(completion_list.into()))
+}
+
+pub(crate) fn handle_completion_resolve(
+ snap: GlobalStateSnapshot,
+ mut original_completion: CompletionItem,
+) -> Result<CompletionItem> {
+ let _p = profile::span("handle_completion_resolve");
+
+ if !all_edits_are_disjoint(&original_completion, &[]) {
+ return Err(invalid_params_error(
+ "Received a completion with overlapping edits, this is not LSP-compliant".to_string(),
+ )
+ .into());
+ }
+
+ let data = match original_completion.data.take() {
+ Some(it) => it,
+ None => return Ok(original_completion),
+ };
+
+ let resolve_data: lsp_ext::CompletionResolveData = serde_json::from_value(data)?;
+
+ let file_id = from_proto::file_id(&snap, &resolve_data.position.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let offset = from_proto::offset(&line_index, resolve_data.position.position)?;
+
+ let additional_edits = snap
+ .analysis
+ .resolve_completion_edits(
+ &snap.config.completion(),
+ FilePosition { file_id, offset },
+ resolve_data
+ .imports
+ .into_iter()
+ .map(|import| (import.full_import_path, import.imported_name)),
+ )?
+ .into_iter()
+ .flat_map(|edit| edit.into_iter().map(|indel| to_proto::text_edit(&line_index, indel)))
+ .collect::<Vec<_>>();
+
+ if !all_edits_are_disjoint(&original_completion, &additional_edits) {
+ return Err(LspError::new(
+ ErrorCode::InternalError as i32,
+ "Import edit overlaps with the original completion edits, this is not LSP-compliant"
+ .into(),
+ )
+ .into());
+ }
+
+ if let Some(original_additional_edits) = original_completion.additional_text_edits.as_mut() {
+ original_additional_edits.extend(additional_edits.into_iter())
+ } else {
+ original_completion.additional_text_edits = Some(additional_edits);
+ }
+
+ Ok(original_completion)
+}
+
+pub(crate) fn handle_folding_range(
+ snap: GlobalStateSnapshot,
+ params: FoldingRangeParams,
+) -> Result<Option<Vec<FoldingRange>>> {
+ let _p = profile::span("handle_folding_range");
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let folds = snap.analysis.folding_ranges(file_id)?;
+ let text = snap.analysis.file_text(file_id)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let line_folding_only = snap.config.line_folding_only();
+ let res = folds
+ .into_iter()
+ .map(|it| to_proto::folding_range(&*text, &line_index, line_folding_only, it))
+ .collect();
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_signature_help(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::SignatureHelpParams,
+) -> Result<Option<lsp_types::SignatureHelp>> {
+ let _p = profile::span("handle_signature_help");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let help = match snap.analysis.signature_help(position)? {
+ Some(it) => it,
+ None => return Ok(None),
+ };
+ let config = snap.config.call_info();
+ let res = to_proto::signature_help(help, config, snap.config.signature_help_label_offsets());
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_hover(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::HoverParams,
+) -> Result<Option<lsp_ext::Hover>> {
+ let _p = profile::span("handle_hover");
+ let range = match params.position {
+ PositionOrRange::Position(position) => Range::new(position, position),
+ PositionOrRange::Range(range) => range,
+ };
+
+ let file_range = from_proto::file_range(&snap, params.text_document, range)?;
+ let info = match snap.analysis.hover(&snap.config.hover(), file_range)? {
+ None => return Ok(None),
+ Some(info) => info,
+ };
+
+ let line_index = snap.file_line_index(file_range.file_id)?;
+ let range = to_proto::range(&line_index, info.range);
+ let markup_kind =
+ snap.config.hover().documentation.map_or(ide::HoverDocFormat::Markdown, |kind| kind);
+ let hover = lsp_ext::Hover {
+ hover: lsp_types::Hover {
+ contents: HoverContents::Markup(to_proto::markup_content(
+ info.info.markup,
+ markup_kind,
+ )),
+ range: Some(range),
+ },
+ actions: if snap.config.hover_actions().none() {
+ Vec::new()
+ } else {
+ prepare_hover_actions(&snap, &info.info.actions)
+ },
+ };
+
+ Ok(Some(hover))
+}
+
+pub(crate) fn handle_prepare_rename(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<Option<PrepareRenameResponse>> {
+ let _p = profile::span("handle_prepare_rename");
+ let position = from_proto::file_position(&snap, params)?;
+
+ let change = snap.analysis.prepare_rename(position)?.map_err(to_proto::rename_error)?;
+
+ let line_index = snap.file_line_index(position.file_id)?;
+ let range = to_proto::range(&line_index, change.range);
+ Ok(Some(PrepareRenameResponse::Range(range)))
+}
+
+pub(crate) fn handle_rename(
+ snap: GlobalStateSnapshot,
+ params: RenameParams,
+) -> Result<Option<WorkspaceEdit>> {
+ let _p = profile::span("handle_rename");
+ let position = from_proto::file_position(&snap, params.text_document_position)?;
+
+ let mut change =
+ snap.analysis.rename(position, &*params.new_name)?.map_err(to_proto::rename_error)?;
+
+ // this is kind of a hack to prevent double edits from happening when moving files
+ // When a module gets renamed by renaming the mod declaration this causes the file to move
+ // which in turn will trigger a WillRenameFiles request to the server for which we reply with a
+ // a second identical set of renames, the client will then apply both edits causing incorrect edits
+ // with this we only emit source_file_edits in the WillRenameFiles response which will do the rename instead
+ // See https://github.com/microsoft/vscode-languageserver-node/issues/752 for more info
+ if !change.file_system_edits.is_empty() && snap.config.will_rename() {
+ change.source_file_edits.clear();
+ }
+ let workspace_edit = to_proto::workspace_edit(&snap, change)?;
+ Ok(Some(workspace_edit))
+}
+
+pub(crate) fn handle_references(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::ReferenceParams,
+) -> Result<Option<Vec<Location>>> {
+ let _p = profile::span("handle_references");
+ let position = from_proto::file_position(&snap, params.text_document_position)?;
+
+ let refs = match snap.analysis.find_all_refs(position, None)? {
+ None => return Ok(None),
+ Some(refs) => refs,
+ };
+
+ let include_declaration = params.context.include_declaration;
+ let locations = refs
+ .into_iter()
+ .flat_map(|refs| {
+ let decl = if include_declaration {
+ refs.declaration.map(|decl| FileRange {
+ file_id: decl.nav.file_id,
+ range: decl.nav.focus_or_full_range(),
+ })
+ } else {
+ None
+ };
+ refs.references
+ .into_iter()
+ .flat_map(|(file_id, refs)| {
+ refs.into_iter().map(move |(range, _)| FileRange { file_id, range })
+ })
+ .chain(decl)
+ })
+ .filter_map(|frange| to_proto::location(&snap, frange).ok())
+ .collect();
+
+ Ok(Some(locations))
+}
+
+pub(crate) fn handle_formatting(
+ snap: GlobalStateSnapshot,
+ params: DocumentFormattingParams,
+) -> Result<Option<Vec<lsp_types::TextEdit>>> {
+ let _p = profile::span("handle_formatting");
+
+ run_rustfmt(&snap, params.text_document, None)
+}
+
+pub(crate) fn handle_range_formatting(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::DocumentRangeFormattingParams,
+) -> Result<Option<Vec<lsp_types::TextEdit>>> {
+ let _p = profile::span("handle_range_formatting");
+
+ run_rustfmt(&snap, params.text_document, Some(params.range))
+}
+
+pub(crate) fn handle_code_action(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::CodeActionParams,
+) -> Result<Option<Vec<lsp_ext::CodeAction>>> {
+ let _p = profile::span("handle_code_action");
+
+ if !snap.config.code_action_literals() {
+ // We intentionally don't support command-based actions, as those either
+ // require either custom client-code or server-initiated edits. Server
+ // initiated edits break causality, so we avoid those.
+ return Ok(None);
+ }
+
+ let line_index =
+ snap.file_line_index(from_proto::file_id(&snap, &params.text_document.uri)?)?;
+ let frange = from_proto::file_range(&snap, params.text_document.clone(), params.range)?;
+
+ let mut assists_config = snap.config.assist();
+ assists_config.allowed = params
+ .context
+ .only
+ .clone()
+ .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect());
+
+ let mut res: Vec<lsp_ext::CodeAction> = Vec::new();
+
+ let code_action_resolve_cap = snap.config.code_action_resolve();
+ let resolve = if code_action_resolve_cap {
+ AssistResolveStrategy::None
+ } else {
+ AssistResolveStrategy::All
+ };
+ let assists = snap.analysis.assists_with_fixes(
+ &assists_config,
+ &snap.config.diagnostics(),
+ resolve,
+ frange,
+ )?;
+ for (index, assist) in assists.into_iter().enumerate() {
+ let resolve_data =
+ if code_action_resolve_cap { Some((index, params.clone())) } else { None };
+ let code_action = to_proto::code_action(&snap, assist, resolve_data)?;
+ res.push(code_action)
+ }
+
+ // Fixes from `cargo check`.
+ for fix in snap.check_fixes.get(&frange.file_id).into_iter().flatten() {
+ // FIXME: this mapping is awkward and shouldn't exist. Refactor
+ // `snap.check_fixes` to not convert to LSP prematurely.
+ let intersect_fix_range = fix
+ .ranges
+ .iter()
+ .copied()
+ .filter_map(|range| from_proto::text_range(&line_index, range).ok())
+ .any(|fix_range| fix_range.intersect(frange.range).is_some());
+ if intersect_fix_range {
+ res.push(fix.action.clone());
+ }
+ }
+
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_code_action_resolve(
+ snap: GlobalStateSnapshot,
+ mut code_action: lsp_ext::CodeAction,
+) -> Result<lsp_ext::CodeAction> {
+ let _p = profile::span("handle_code_action_resolve");
+ let params = match code_action.data.take() {
+ Some(it) => it,
+ None => return Err(invalid_params_error("code action without data".to_string()).into()),
+ };
+
+ let file_id = from_proto::file_id(&snap, &params.code_action_params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let range = from_proto::text_range(&line_index, params.code_action_params.range)?;
+ let frange = FileRange { file_id, range };
+
+ let mut assists_config = snap.config.assist();
+ assists_config.allowed = params
+ .code_action_params
+ .context
+ .only
+ .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect());
+
+ let (assist_index, assist_resolve) = match parse_action_id(&params.id) {
+ Ok(parsed_data) => parsed_data,
+ Err(e) => {
+ return Err(invalid_params_error(format!(
+ "Failed to parse action id string '{}': {}",
+ params.id, e
+ ))
+ .into())
+ }
+ };
+
+ let expected_assist_id = assist_resolve.assist_id.clone();
+ let expected_kind = assist_resolve.assist_kind;
+
+ let assists = snap.analysis.assists_with_fixes(
+ &assists_config,
+ &snap.config.diagnostics(),
+ AssistResolveStrategy::Single(assist_resolve),
+ frange,
+ )?;
+
+ let assist = match assists.get(assist_index) {
+ Some(assist) => assist,
+ None => return Err(invalid_params_error(format!(
+ "Failed to find the assist for index {} provided by the resolve request. Resolve request assist id: {}",
+ assist_index, params.id,
+ ))
+ .into())
+ };
+ if assist.id.0 != expected_assist_id || assist.id.1 != expected_kind {
+ return Err(invalid_params_error(format!(
+ "Mismatching assist at index {} for the resolve parameters given. Resolve request assist id: {}, actual id: {:?}.",
+ assist_index, params.id, assist.id
+ ))
+ .into());
+ }
+ let ca = to_proto::code_action(&snap, assist.clone(), None)?;
+ code_action.edit = ca.edit;
+ code_action.command = ca.command;
+ Ok(code_action)
+}
+
+fn parse_action_id(action_id: &str) -> Result<(usize, SingleResolve), String> {
+ let id_parts = action_id.split(':').collect::<Vec<_>>();
+ match id_parts.as_slice() {
+ [assist_id_string, assist_kind_string, index_string] => {
+ let assist_kind: AssistKind = assist_kind_string.parse()?;
+ let index: usize = match index_string.parse() {
+ Ok(index) => index,
+ Err(e) => return Err(format!("Incorrect index string: {}", e)),
+ };
+ Ok((index, SingleResolve { assist_id: assist_id_string.to_string(), assist_kind }))
+ }
+ _ => Err("Action id contains incorrect number of segments".to_string()),
+ }
+}
+
+pub(crate) fn handle_code_lens(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::CodeLensParams,
+) -> Result<Option<Vec<CodeLens>>> {
+ let _p = profile::span("handle_code_lens");
+
+ let lens_config = snap.config.lens();
+ if lens_config.none() {
+ // early return before any db query!
+ return Ok(Some(Vec::default()));
+ }
+
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let cargo_target_spec = CargoTargetSpec::for_file(&snap, file_id)?;
+
+ let annotations = snap.analysis.annotations(
+ &AnnotationConfig {
+ binary_target: cargo_target_spec
+ .map(|spec| {
+ matches!(
+ spec.target_kind,
+ TargetKind::Bin | TargetKind::Example | TargetKind::Test
+ )
+ })
+ .unwrap_or(false),
+ annotate_runnables: lens_config.runnable(),
+ annotate_impls: lens_config.implementations,
+ annotate_references: lens_config.refs_adt,
+ annotate_method_references: lens_config.method_refs,
+ annotate_enum_variant_references: lens_config.enum_variant_refs,
+ },
+ file_id,
+ )?;
+
+ let mut res = Vec::new();
+ for a in annotations {
+ to_proto::code_lens(&mut res, &snap, a)?;
+ }
+
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_code_lens_resolve(
+ snap: GlobalStateSnapshot,
+ code_lens: CodeLens,
+) -> Result<CodeLens> {
+ let annotation = from_proto::annotation(&snap, code_lens.clone())?;
+ let annotation = snap.analysis.resolve_annotation(annotation)?;
+
+ let mut acc = Vec::new();
+ to_proto::code_lens(&mut acc, &snap, annotation)?;
+
+ let res = match acc.pop() {
+ Some(it) if acc.is_empty() => it,
+ _ => {
+ never!();
+ code_lens
+ }
+ };
+
+ Ok(res)
+}
+
+pub(crate) fn handle_document_highlight(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::DocumentHighlightParams,
+) -> Result<Option<Vec<lsp_types::DocumentHighlight>>> {
+ let _p = profile::span("handle_document_highlight");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let line_index = snap.file_line_index(position.file_id)?;
+
+ let refs = match snap.analysis.highlight_related(snap.config.highlight_related(), position)? {
+ None => return Ok(None),
+ Some(refs) => refs,
+ };
+ let res = refs
+ .into_iter()
+ .map(|ide::HighlightedRange { range, category }| lsp_types::DocumentHighlight {
+ range: to_proto::range(&line_index, range),
+ kind: category.map(to_proto::document_highlight_kind),
+ })
+ .collect();
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_ssr(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::SsrParams,
+) -> Result<lsp_types::WorkspaceEdit> {
+ let _p = profile::span("handle_ssr");
+ let selections = params
+ .selections
+ .iter()
+ .map(|range| from_proto::file_range(&snap, params.position.text_document.clone(), *range))
+ .collect::<Result<Vec<_>, _>>()?;
+ let position = from_proto::file_position(&snap, params.position)?;
+ let source_change = snap.analysis.structural_search_replace(
+ &params.query,
+ params.parse_only,
+ position,
+ selections,
+ )??;
+ to_proto::workspace_edit(&snap, source_change)
+}
+
+pub(crate) fn publish_diagnostics(
+ snap: &GlobalStateSnapshot,
+ file_id: FileId,
+) -> Result<Vec<Diagnostic>> {
+ let _p = profile::span("publish_diagnostics");
+ let line_index = snap.file_line_index(file_id)?;
+
+ let diagnostics: Vec<Diagnostic> = snap
+ .analysis
+ .diagnostics(&snap.config.diagnostics(), AssistResolveStrategy::None, file_id)?
+ .into_iter()
+ .map(|d| Diagnostic {
+ range: to_proto::range(&line_index, d.range),
+ severity: Some(to_proto::diagnostic_severity(d.severity)),
+ code: Some(NumberOrString::String(d.code.as_str().to_string())),
+ code_description: Some(lsp_types::CodeDescription {
+ href: lsp_types::Url::parse(&format!(
+ "https://rust-analyzer.github.io/manual.html#{}",
+ d.code.as_str()
+ ))
+ .unwrap(),
+ }),
+ source: Some("rust-analyzer".to_string()),
+ // https://github.com/rust-lang/rust-analyzer/issues/11404
+ message: if !d.message.is_empty() { d.message } else { " ".to_string() },
+ related_information: None,
+ tags: if d.unused { Some(vec![DiagnosticTag::UNNECESSARY]) } else { None },
+ data: None,
+ })
+ .collect();
+ Ok(diagnostics)
+}
+
+pub(crate) fn handle_inlay_hints(
+ snap: GlobalStateSnapshot,
+ params: InlayHintParams,
+) -> Result<Option<Vec<InlayHint>>> {
+ let _p = profile::span("handle_inlay_hints");
+ let document_uri = &params.text_document.uri;
+ let file_id = from_proto::file_id(&snap, document_uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let range = from_proto::file_range(
+ &snap,
+ TextDocumentIdentifier::new(document_uri.to_owned()),
+ params.range,
+ )?;
+ let inlay_hints_config = snap.config.inlay_hints();
+ Ok(Some(
+ snap.analysis
+ .inlay_hints(&inlay_hints_config, file_id, Some(range))?
+ .into_iter()
+ .map(|it| {
+ to_proto::inlay_hint(&snap, &line_index, inlay_hints_config.render_colons, it)
+ })
+ .collect(),
+ ))
+}
+
+pub(crate) fn handle_inlay_hints_resolve(
+ snap: GlobalStateSnapshot,
+ mut hint: InlayHint,
+) -> Result<InlayHint> {
+ let _p = profile::span("handle_inlay_hints_resolve");
+ let data = match hint.data.take() {
+ Some(it) => it,
+ None => return Ok(hint),
+ };
+
+ let resolve_data: lsp_ext::InlayHintResolveData = serde_json::from_value(data)?;
+
+ let file_range = from_proto::file_range(
+ &snap,
+ resolve_data.text_document,
+ match resolve_data.position {
+ PositionOrRange::Position(pos) => Range::new(pos, pos),
+ PositionOrRange::Range(range) => range,
+ },
+ )?;
+ let info = match snap.analysis.hover(&snap.config.hover(), file_range)? {
+ None => return Ok(hint),
+ Some(info) => info,
+ };
+
+ let markup_kind =
+ snap.config.hover().documentation.map_or(ide::HoverDocFormat::Markdown, |kind| kind);
+
+ // FIXME: hover actions?
+ hint.tooltip = Some(lsp_types::InlayHintTooltip::MarkupContent(to_proto::markup_content(
+ info.info.markup,
+ markup_kind,
+ )));
+ Ok(hint)
+}
+
+pub(crate) fn handle_call_hierarchy_prepare(
+ snap: GlobalStateSnapshot,
+ params: CallHierarchyPrepareParams,
+) -> Result<Option<Vec<CallHierarchyItem>>> {
+ let _p = profile::span("handle_call_hierarchy_prepare");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+
+ let nav_info = match snap.analysis.call_hierarchy(position)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+
+ let RangeInfo { range: _, info: navs } = nav_info;
+ let res = navs
+ .into_iter()
+ .filter(|it| it.kind == Some(SymbolKind::Function))
+ .map(|it| to_proto::call_hierarchy_item(&snap, it))
+ .collect::<Result<Vec<_>>>()?;
+
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_call_hierarchy_incoming(
+ snap: GlobalStateSnapshot,
+ params: CallHierarchyIncomingCallsParams,
+) -> Result<Option<Vec<CallHierarchyIncomingCall>>> {
+ let _p = profile::span("handle_call_hierarchy_incoming");
+ let item = params.item;
+
+ let doc = TextDocumentIdentifier::new(item.uri);
+ let frange = from_proto::file_range(&snap, doc, item.selection_range)?;
+ let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
+
+ let call_items = match snap.analysis.incoming_calls(fpos)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+
+ let mut res = vec![];
+
+ for call_item in call_items.into_iter() {
+ let file_id = call_item.target.file_id;
+ let line_index = snap.file_line_index(file_id)?;
+ let item = to_proto::call_hierarchy_item(&snap, call_item.target)?;
+ res.push(CallHierarchyIncomingCall {
+ from: item,
+ from_ranges: call_item
+ .ranges
+ .into_iter()
+ .map(|it| to_proto::range(&line_index, it))
+ .collect(),
+ });
+ }
+
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_call_hierarchy_outgoing(
+ snap: GlobalStateSnapshot,
+ params: CallHierarchyOutgoingCallsParams,
+) -> Result<Option<Vec<CallHierarchyOutgoingCall>>> {
+ let _p = profile::span("handle_call_hierarchy_outgoing");
+ let item = params.item;
+
+ let doc = TextDocumentIdentifier::new(item.uri);
+ let frange = from_proto::file_range(&snap, doc, item.selection_range)?;
+ let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
+
+ let call_items = match snap.analysis.outgoing_calls(fpos)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+
+ let mut res = vec![];
+
+ for call_item in call_items.into_iter() {
+ let file_id = call_item.target.file_id;
+ let line_index = snap.file_line_index(file_id)?;
+ let item = to_proto::call_hierarchy_item(&snap, call_item.target)?;
+ res.push(CallHierarchyOutgoingCall {
+ to: item,
+ from_ranges: call_item
+ .ranges
+ .into_iter()
+ .map(|it| to_proto::range(&line_index, it))
+ .collect(),
+ });
+ }
+
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_semantic_tokens_full(
+ snap: GlobalStateSnapshot,
+ params: SemanticTokensParams,
+) -> Result<Option<SemanticTokensResult>> {
+ let _p = profile::span("handle_semantic_tokens_full");
+
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let text = snap.analysis.file_text(file_id)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ let highlights = snap.analysis.highlight(file_id)?;
+ let highlight_strings = snap.config.highlighting_strings();
+ let semantic_tokens =
+ to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
+
+ // Unconditionally cache the tokens
+ snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone());
+
+ Ok(Some(semantic_tokens.into()))
+}
+
+pub(crate) fn handle_semantic_tokens_full_delta(
+ snap: GlobalStateSnapshot,
+ params: SemanticTokensDeltaParams,
+) -> Result<Option<SemanticTokensFullDeltaResult>> {
+ let _p = profile::span("handle_semantic_tokens_full_delta");
+
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let text = snap.analysis.file_text(file_id)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ let highlights = snap.analysis.highlight(file_id)?;
+ let highlight_strings = snap.config.highlighting_strings();
+ let semantic_tokens =
+ to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
+
+ let mut cache = snap.semantic_tokens_cache.lock();
+ let cached_tokens = cache.entry(params.text_document.uri).or_default();
+
+ if let Some(prev_id) = &cached_tokens.result_id {
+ if *prev_id == params.previous_result_id {
+ let delta = to_proto::semantic_token_delta(cached_tokens, &semantic_tokens);
+ *cached_tokens = semantic_tokens;
+ return Ok(Some(delta.into()));
+ }
+ }
+
+ *cached_tokens = semantic_tokens.clone();
+
+ Ok(Some(semantic_tokens.into()))
+}
+
+pub(crate) fn handle_semantic_tokens_range(
+ snap: GlobalStateSnapshot,
+ params: SemanticTokensRangeParams,
+) -> Result<Option<SemanticTokensRangeResult>> {
+ let _p = profile::span("handle_semantic_tokens_range");
+
+ let frange = from_proto::file_range(&snap, params.text_document, params.range)?;
+ let text = snap.analysis.file_text(frange.file_id)?;
+ let line_index = snap.file_line_index(frange.file_id)?;
+
+ let highlights = snap.analysis.highlight_range(frange)?;
+ let highlight_strings = snap.config.highlighting_strings();
+ let semantic_tokens =
+ to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
+ Ok(Some(semantic_tokens.into()))
+}
+
+pub(crate) fn handle_open_docs(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<Option<lsp_types::Url>> {
+ let _p = profile::span("handle_open_docs");
+ let position = from_proto::file_position(&snap, params)?;
+
+ let remote = snap.analysis.external_docs(position)?;
+
+ Ok(remote.and_then(|remote| Url::parse(&remote).ok()))
+}
+
+pub(crate) fn handle_open_cargo_toml(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::OpenCargoTomlParams,
+) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
+ let _p = profile::span("handle_open_cargo_toml");
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+
+ let cargo_spec = match CargoTargetSpec::for_file(&snap, file_id)? {
+ Some(it) => it,
+ None => return Ok(None),
+ };
+
+ let cargo_toml_url = to_proto::url_from_abs_path(&cargo_spec.cargo_toml);
+ let res: lsp_types::GotoDefinitionResponse =
+ Location::new(cargo_toml_url, Range::default()).into();
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_move_item(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::MoveItemParams,
+) -> Result<Vec<lsp_ext::SnippetTextEdit>> {
+ let _p = profile::span("handle_move_item");
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let range = from_proto::file_range(&snap, params.text_document, params.range)?;
+
+ let direction = match params.direction {
+ lsp_ext::MoveItemDirection::Up => ide::Direction::Up,
+ lsp_ext::MoveItemDirection::Down => ide::Direction::Down,
+ };
+
+ match snap.analysis.move_item(range, direction)? {
+ Some(text_edit) => {
+ let line_index = snap.file_line_index(file_id)?;
+ Ok(to_proto::snippet_text_edit_vec(&line_index, true, text_edit))
+ }
+ None => Ok(vec![]),
+ }
+}
+
+fn to_command_link(command: lsp_types::Command, tooltip: String) -> lsp_ext::CommandLink {
+ lsp_ext::CommandLink { tooltip: Some(tooltip), command }
+}
+
+fn show_impl_command_link(
+ snap: &GlobalStateSnapshot,
+ position: &FilePosition,
+) -> Option<lsp_ext::CommandLinkGroup> {
+ if snap.config.hover_actions().implementations && snap.config.client_commands().show_reference {
+ if let Some(nav_data) = snap.analysis.goto_implementation(*position).unwrap_or(None) {
+ let uri = to_proto::url(snap, position.file_id);
+ let line_index = snap.file_line_index(position.file_id).ok()?;
+ let position = to_proto::position(&line_index, position.offset);
+ let locations: Vec<_> = nav_data
+ .info
+ .into_iter()
+ .filter_map(|nav| to_proto::location_from_nav(snap, nav).ok())
+ .collect();
+ let title = to_proto::implementation_title(locations.len());
+ let command = to_proto::command::show_references(title, &uri, position, locations);
+
+ return Some(lsp_ext::CommandLinkGroup {
+ commands: vec![to_command_link(command, "Go to implementations".into())],
+ ..Default::default()
+ });
+ }
+ }
+ None
+}
+
+fn show_ref_command_link(
+ snap: &GlobalStateSnapshot,
+ position: &FilePosition,
+) -> Option<lsp_ext::CommandLinkGroup> {
+ if snap.config.hover_actions().references && snap.config.client_commands().show_reference {
+ if let Some(ref_search_res) = snap.analysis.find_all_refs(*position, None).unwrap_or(None) {
+ let uri = to_proto::url(snap, position.file_id);
+ let line_index = snap.file_line_index(position.file_id).ok()?;
+ let position = to_proto::position(&line_index, position.offset);
+ let locations: Vec<_> = ref_search_res
+ .into_iter()
+ .flat_map(|res| res.references)
+ .flat_map(|(file_id, ranges)| {
+ ranges.into_iter().filter_map(move |(range, _)| {
+ to_proto::location(snap, FileRange { file_id, range }).ok()
+ })
+ })
+ .collect();
+ let title = to_proto::reference_title(locations.len());
+ let command = to_proto::command::show_references(title, &uri, position, locations);
+
+ return Some(lsp_ext::CommandLinkGroup {
+ commands: vec![to_command_link(command, "Go to references".into())],
+ ..Default::default()
+ });
+ }
+ }
+ None
+}
+
+fn runnable_action_links(
+ snap: &GlobalStateSnapshot,
+ runnable: Runnable,
+) -> Option<lsp_ext::CommandLinkGroup> {
+ let hover_actions_config = snap.config.hover_actions();
+ if !hover_actions_config.runnable() {
+ return None;
+ }
+
+ let cargo_spec = CargoTargetSpec::for_file(snap, runnable.nav.file_id).ok()?;
+ if should_skip_target(&runnable, cargo_spec.as_ref()) {
+ return None;
+ }
+
+ let client_commands_config = snap.config.client_commands();
+ if !(client_commands_config.run_single || client_commands_config.debug_single) {
+ return None;
+ }
+
+ let title = runnable.title();
+ let r = to_proto::runnable(snap, runnable).ok()?;
+
+ let mut group = lsp_ext::CommandLinkGroup::default();
+
+ if hover_actions_config.run && client_commands_config.run_single {
+ let run_command = to_proto::command::run_single(&r, &title);
+ group.commands.push(to_command_link(run_command, r.label.clone()));
+ }
+
+ if hover_actions_config.debug && client_commands_config.debug_single {
+ let dbg_command = to_proto::command::debug_single(&r);
+ group.commands.push(to_command_link(dbg_command, r.label));
+ }
+
+ Some(group)
+}
+
+fn goto_type_action_links(
+ snap: &GlobalStateSnapshot,
+ nav_targets: &[HoverGotoTypeData],
+) -> Option<lsp_ext::CommandLinkGroup> {
+ if !snap.config.hover_actions().goto_type_def
+ || nav_targets.is_empty()
+ || !snap.config.client_commands().goto_location
+ {
+ return None;
+ }
+
+ Some(lsp_ext::CommandLinkGroup {
+ title: Some("Go to ".into()),
+ commands: nav_targets
+ .iter()
+ .filter_map(|it| {
+ to_proto::command::goto_location(snap, &it.nav)
+ .map(|cmd| to_command_link(cmd, it.mod_path.clone()))
+ })
+ .collect(),
+ })
+}
+
+fn prepare_hover_actions(
+ snap: &GlobalStateSnapshot,
+ actions: &[HoverAction],
+) -> Vec<lsp_ext::CommandLinkGroup> {
+ actions
+ .iter()
+ .filter_map(|it| match it {
+ HoverAction::Implementation(position) => show_impl_command_link(snap, position),
+ HoverAction::Reference(position) => show_ref_command_link(snap, position),
+ HoverAction::Runnable(r) => runnable_action_links(snap, r.clone()),
+ HoverAction::GoToType(targets) => goto_type_action_links(snap, targets),
+ })
+ .collect()
+}
+
+fn should_skip_target(runnable: &Runnable, cargo_spec: Option<&CargoTargetSpec>) -> bool {
+ match runnable.kind {
+ RunnableKind::Bin => {
+ // Do not suggest binary run on other target than binary
+ match &cargo_spec {
+ Some(spec) => !matches!(
+ spec.target_kind,
+ TargetKind::Bin | TargetKind::Example | TargetKind::Test
+ ),
+ None => true,
+ }
+ }
+ _ => false,
+ }
+}
+
+fn run_rustfmt(
+ snap: &GlobalStateSnapshot,
+ text_document: TextDocumentIdentifier,
+ range: Option<lsp_types::Range>,
+) -> Result<Option<Vec<lsp_types::TextEdit>>> {
+ let file_id = from_proto::file_id(snap, &text_document.uri)?;
+ let file = snap.analysis.file_text(file_id)?;
+ let crate_ids = snap.analysis.crate_for(file_id)?;
+
+ let line_index = snap.file_line_index(file_id)?;
+
+ let mut rustfmt = match snap.config.rustfmt() {
+ RustfmtConfig::Rustfmt { extra_args, enable_range_formatting } => {
+ let mut cmd = process::Command::new(toolchain::rustfmt());
+ cmd.args(extra_args);
+ // try to chdir to the file so we can respect `rustfmt.toml`
+ // FIXME: use `rustfmt --config-path` once
+ // https://github.com/rust-lang/rustfmt/issues/4660 gets fixed
+ match text_document.uri.to_file_path() {
+ Ok(mut path) => {
+ // pop off file name
+ if path.pop() && path.is_dir() {
+ cmd.current_dir(path);
+ }
+ }
+ Err(_) => {
+ tracing::error!(
+ "Unable to get file path for {}, rustfmt.toml might be ignored",
+ text_document.uri
+ );
+ }
+ }
+ if let Some(&crate_id) = crate_ids.first() {
+ // Assume all crates are in the same edition
+ let edition = snap.analysis.crate_edition(crate_id)?;
+ cmd.arg("--edition");
+ cmd.arg(edition.to_string());
+ }
+
+ if let Some(range) = range {
+ if !enable_range_formatting {
+ return Err(LspError::new(
+ ErrorCode::InvalidRequest as i32,
+ String::from(
+ "rustfmt range formatting is unstable. \
+ Opt-in by using a nightly build of rustfmt and setting \
+ `rustfmt.rangeFormatting.enable` to true in your LSP configuration",
+ ),
+ )
+ .into());
+ }
+
+ let frange = from_proto::file_range(snap, text_document, range)?;
+ let start_line = line_index.index.line_col(frange.range.start()).line;
+ let end_line = line_index.index.line_col(frange.range.end()).line;
+
+ cmd.arg("--unstable-features");
+ cmd.arg("--file-lines");
+ cmd.arg(
+ json!([{
+ "file": "stdin",
+ "range": [start_line, end_line]
+ }])
+ .to_string(),
+ );
+ }
+
+ cmd
+ }
+ RustfmtConfig::CustomCommand { command, args } => {
+ let mut cmd = process::Command::new(command);
+ cmd.args(args);
+ cmd
+ }
+ };
+
+ let mut rustfmt = rustfmt
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .spawn()
+ .context(format!("Failed to spawn {:?}", rustfmt))?;
+
+ rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?;
+
+ let output = rustfmt.wait_with_output()?;
+ let captured_stdout = String::from_utf8(output.stdout)?;
+ let captured_stderr = String::from_utf8(output.stderr).unwrap_or_default();
+
+ if !output.status.success() {
+ let rustfmt_not_installed =
+ captured_stderr.contains("not installed") || captured_stderr.contains("not available");
+
+ return match output.status.code() {
+ Some(1) if !rustfmt_not_installed => {
+ // While `rustfmt` doesn't have a specific exit code for parse errors this is the
+ // likely cause exiting with 1. Most Language Servers swallow parse errors on
+ // formatting because otherwise an error is surfaced to the user on top of the
+ // syntax error diagnostics they're already receiving. This is especially jarring
+ // if they have format on save enabled.
+ tracing::info!("rustfmt exited with status 1, assuming parse error and ignoring");
+ Ok(None)
+ }
+ _ => {
+ // Something else happened - e.g. `rustfmt` is missing or caught a signal
+ Err(LspError::new(
+ -32900,
+ format!(
+ r#"rustfmt exited with:
+ Status: {}
+ stdout: {}
+ stderr: {}"#,
+ output.status, captured_stdout, captured_stderr,
+ ),
+ )
+ .into())
+ }
+ };
+ }
+
+ let (new_text, new_line_endings) = LineEndings::normalize(captured_stdout);
+
+ if line_index.endings != new_line_endings {
+ // If line endings are different, send the entire file.
+ // Diffing would not work here, as the line endings might be the only
+ // difference.
+ Ok(Some(to_proto::text_edit_vec(
+ &line_index,
+ TextEdit::replace(TextRange::up_to(TextSize::of(&*file)), new_text),
+ )))
+ } else if *file == new_text {
+ // The document is already formatted correctly -- no edits needed.
+ Ok(None)
+ } else {
+ Ok(Some(to_proto::text_edit_vec(&line_index, diff(&file, &new_text))))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
new file mode 100644
index 000000000..47cdd8dfc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -0,0 +1,196 @@
+//! Fully integrated benchmarks for rust-analyzer, which load real cargo
+//! projects.
+//!
+//! The benchmark here is used to debug specific performance regressions. If you
+//! notice that, eg, completion is slow in some specific case, you can modify
+//! code here exercise this specific completion, and thus have a fast
+//! edit/compile/test cycle.
+//!
+//! Note that "Rust Analyzer: Run" action does not allow running a single test
+//! in release mode in VS Code. There's however "Rust Analyzer: Copy Run Command Line"
+//! which you can use to paste the command in terminal and add `--release` manually.
+
+use std::sync::Arc;
+
+use ide::{CallableSnippets, Change, CompletionConfig, FilePosition, TextSize};
+use ide_db::{
+ imports::insert_use::{ImportGranularity, InsertUseConfig},
+ SnippetCap,
+};
+use project_model::CargoConfig;
+use test_utils::project_root;
+use vfs::{AbsPathBuf, VfsPath};
+
+use crate::cli::load_cargo::{load_workspace_at, LoadCargoConfig};
+
+#[test]
+fn integrated_highlighting_benchmark() {
+ if std::env::var("RUN_SLOW_BENCHES").is_err() {
+ return;
+ }
+
+ // Load rust-analyzer itself.
+ let workspace_to_load = project_root();
+ let file = "./crates/ide-db/src/apply_change.rs";
+
+ let cargo_config = CargoConfig::default();
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: true,
+ with_proc_macro: false,
+ prefill_caches: false,
+ };
+
+ let (mut host, vfs, _proc_macro) = {
+ let _it = stdx::timeit("workspace loading");
+ load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap()
+ };
+
+ let file_id = {
+ let file = workspace_to_load.join(file);
+ let path = VfsPath::from(AbsPathBuf::assert(file));
+ vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path))
+ };
+
+ {
+ let _it = stdx::timeit("initial");
+ let analysis = host.analysis();
+ analysis.highlight_as_html(file_id, false).unwrap();
+ }
+
+ profile::init_from("*>100");
+ // let _s = profile::heartbeat_span();
+
+ {
+ let _it = stdx::timeit("change");
+ let mut text = host.analysis().file_text(file_id).unwrap().to_string();
+ text.push_str("\npub fn _dummy() {}\n");
+ let mut change = Change::new();
+ change.change_file(file_id, Some(Arc::new(text)));
+ host.apply_change(change);
+ }
+
+ {
+ let _it = stdx::timeit("after change");
+ let _span = profile::cpu_span();
+ let analysis = host.analysis();
+ analysis.highlight_as_html(file_id, false).unwrap();
+ }
+}
+
+#[test]
+fn integrated_completion_benchmark() {
+ if std::env::var("RUN_SLOW_BENCHES").is_err() {
+ return;
+ }
+
+ // Load rust-analyzer itself.
+ let workspace_to_load = project_root();
+ let file = "./crates/hir/src/lib.rs";
+
+ let cargo_config = CargoConfig::default();
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: true,
+ with_proc_macro: false,
+ prefill_caches: true,
+ };
+
+ let (mut host, vfs, _proc_macro) = {
+ let _it = stdx::timeit("workspace loading");
+ load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap()
+ };
+
+ let file_id = {
+ let file = workspace_to_load.join(file);
+ let path = VfsPath::from(AbsPathBuf::assert(file));
+ vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path))
+ };
+
+ {
+ let _it = stdx::timeit("initial");
+ let analysis = host.analysis();
+ analysis.highlight_as_html(file_id, false).unwrap();
+ }
+
+ profile::init_from("*>5");
+ // let _s = profile::heartbeat_span();
+
+ let completion_offset = {
+ let _it = stdx::timeit("change");
+ let mut text = host.analysis().file_text(file_id).unwrap().to_string();
+ let completion_offset =
+ patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
+ + "sel".len();
+ let mut change = Change::new();
+ change.change_file(file_id, Some(Arc::new(text)));
+ host.apply_change(change);
+ completion_offset
+ };
+
+ {
+ let _p = profile::span("unqualified path completion");
+ let _span = profile::cpu_span();
+ let analysis = host.analysis();
+ let config = CompletionConfig {
+ enable_postfix_completions: true,
+ enable_imports_on_the_fly: true,
+ enable_self_on_the_fly: true,
+ enable_private_editable: true,
+ callable: Some(CallableSnippets::FillArguments),
+ snippet_cap: SnippetCap::new(true),
+ insert_use: InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ prefix_kind: hir::PrefixKind::ByCrate,
+ enforce_granularity: true,
+ group: true,
+ skip_glob_imports: true,
+ },
+ snippets: Vec::new(),
+ };
+ let position =
+ FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
+ analysis.completions(&config, position, None).unwrap();
+ }
+
+ let completion_offset = {
+ let _it = stdx::timeit("change");
+ let mut text = host.analysis().file_text(file_id).unwrap().to_string();
+ let completion_offset =
+ patch(&mut text, "sel;\ndb.struct_data(self.id)", "self.;\ndb.struct_data(self.id)")
+ + "self.".len();
+ let mut change = Change::new();
+ change.change_file(file_id, Some(Arc::new(text)));
+ host.apply_change(change);
+ completion_offset
+ };
+
+ {
+ let _p = profile::span("dot completion");
+ let _span = profile::cpu_span();
+ let analysis = host.analysis();
+ let config = CompletionConfig {
+ enable_postfix_completions: true,
+ enable_imports_on_the_fly: true,
+ enable_self_on_the_fly: true,
+ enable_private_editable: true,
+ callable: Some(CallableSnippets::FillArguments),
+ snippet_cap: SnippetCap::new(true),
+ insert_use: InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ prefix_kind: hir::PrefixKind::ByCrate,
+ enforce_granularity: true,
+ group: true,
+ skip_glob_imports: true,
+ },
+ snippets: Vec::new(),
+ };
+ let position =
+ FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
+ analysis.completions(&config, position, None).unwrap();
+ }
+}
+
+fn patch(what: &mut String, from: &str, to: &str) -> usize {
+ let idx = what.find(from).unwrap();
+ *what = what.replacen(from, to, 1);
+ idx
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
new file mode 100644
index 000000000..552379752
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
@@ -0,0 +1,80 @@
+//! Implementation of the LSP for rust-analyzer.
+//!
+//! This crate takes Rust-specific analysis results from ide and translates
+//! into LSP types.
+//!
+//! It also is the root of all state. `world` module defines the bulk of the
+//! state, and `main_loop` module defines the rules for modifying it.
+//!
+//! The `cli` submodule implements some batch-processing analysis, primarily as
+//! a debugging aid.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+pub mod cli;
+
+#[allow(unused)]
+macro_rules! eprintln {
+ ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
+}
+
+mod caps;
+mod cargo_target_spec;
+mod diagnostics;
+mod diff;
+mod dispatch;
+mod from_proto;
+mod global_state;
+mod handlers;
+mod line_index;
+mod lsp_utils;
+mod main_loop;
+mod markdown;
+mod mem_docs;
+mod op_queue;
+mod reload;
+mod semantic_tokens;
+mod task_pool;
+mod to_proto;
+mod version;
+
+pub mod config;
+pub mod lsp_ext;
+
+#[cfg(test)]
+mod integrated_benchmarks;
+
+use std::fmt;
+
+use serde::de::DeserializeOwned;
+
+pub use crate::{caps::server_capabilities, main_loop::main_loop, version::version};
+
+pub type Error = Box<dyn std::error::Error + Send + Sync>;
+pub type Result<T, E = Error> = std::result::Result<T, E>;
+
+pub fn from_json<T: DeserializeOwned>(what: &'static str, json: &serde_json::Value) -> Result<T> {
+ let res = serde_json::from_value(json.clone())
+ .map_err(|e| format!("Failed to deserialize {}: {}; {}", what, e, json))?;
+ Ok(res)
+}
+
+#[derive(Debug)]
+struct LspError {
+ code: i32,
+ message: String,
+}
+
+impl LspError {
+ fn new(code: i32, message: String) -> LspError {
+ LspError { code, message }
+ }
+}
+
+impl fmt::Display for LspError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "Language Server request failed with {}. ({})", self.code, self.message)
+ }
+}
+
+impl std::error::Error for LspError {}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/line_index.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/line_index.rs
new file mode 100644
index 000000000..c116414da
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/line_index.rs
@@ -0,0 +1,68 @@
+//! Enhances `ide::LineIndex` with additional info required to convert offsets
+//! into lsp positions.
+//!
+//! We maintain invariant that all internal strings use `\n` as line separator.
+//! This module does line ending conversion and detection (so that we can
+//! convert back to `\r\n` on the way out).
+
+use std::sync::Arc;
+
+pub enum OffsetEncoding {
+ Utf8,
+ Utf16,
+}
+
+pub(crate) struct LineIndex {
+ pub(crate) index: Arc<ide::LineIndex>,
+ pub(crate) endings: LineEndings,
+ pub(crate) encoding: OffsetEncoding,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub(crate) enum LineEndings {
+ Unix,
+ Dos,
+}
+
+impl LineEndings {
+ /// Replaces `\r\n` with `\n` in-place in `src`.
+ pub(crate) fn normalize(src: String) -> (String, LineEndings) {
+ if !src.as_bytes().contains(&b'\r') {
+ return (src, LineEndings::Unix);
+ }
+
+ // We replace `\r\n` with `\n` in-place, which doesn't break utf-8 encoding.
+ // While we *can* call `as_mut_vec` and do surgery on the live string
+ // directly, let's rather steal the contents of `src`. This makes the code
+ // safe even if a panic occurs.
+
+ let mut buf = src.into_bytes();
+ let mut gap_len = 0;
+ let mut tail = buf.as_mut_slice();
+ loop {
+ let idx = match find_crlf(&tail[gap_len..]) {
+ None => tail.len(),
+ Some(idx) => idx + gap_len,
+ };
+ tail.copy_within(gap_len..idx, 0);
+ tail = &mut tail[idx - gap_len..];
+ if tail.len() == gap_len {
+ break;
+ }
+ gap_len += 1;
+ }
+
+ // Account for removed `\r`.
+ // After `set_len`, `buf` is guaranteed to contain utf-8 again.
+ let new_len = buf.len() - gap_len;
+ let src = unsafe {
+ buf.set_len(new_len);
+ String::from_utf8_unchecked(buf)
+ };
+ return (src, LineEndings::Dos);
+
+ fn find_crlf(src: &[u8]) -> Option<usize> {
+ src.windows(2).position(|it| it == b"\r\n")
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs
new file mode 100644
index 000000000..5f0e10862
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs
@@ -0,0 +1,549 @@
+//! rust-analyzer extensions to the LSP.
+
+use std::{collections::HashMap, path::PathBuf};
+
+use lsp_types::request::Request;
+use lsp_types::{
+ notification::Notification, CodeActionKind, DocumentOnTypeFormattingParams,
+ PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams,
+};
+use serde::{Deserialize, Serialize};
+
+pub enum AnalyzerStatus {}
+
+impl Request for AnalyzerStatus {
+ type Params = AnalyzerStatusParams;
+ type Result = String;
+ const METHOD: &'static str = "rust-analyzer/analyzerStatus";
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct AnalyzerStatusParams {
+ pub text_document: Option<TextDocumentIdentifier>,
+}
+
+pub enum MemoryUsage {}
+
+impl Request for MemoryUsage {
+ type Params = ();
+ type Result = String;
+ const METHOD: &'static str = "rust-analyzer/memoryUsage";
+}
+
+pub enum ShuffleCrateGraph {}
+
+impl Request for ShuffleCrateGraph {
+ type Params = ();
+ type Result = ();
+ const METHOD: &'static str = "rust-analyzer/shuffleCrateGraph";
+}
+
+pub enum ReloadWorkspace {}
+
+impl Request for ReloadWorkspace {
+ type Params = ();
+ type Result = ();
+ const METHOD: &'static str = "rust-analyzer/reloadWorkspace";
+}
+
+pub enum SyntaxTree {}
+
+impl Request for SyntaxTree {
+ type Params = SyntaxTreeParams;
+ type Result = String;
+ const METHOD: &'static str = "rust-analyzer/syntaxTree";
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct SyntaxTreeParams {
+ pub text_document: TextDocumentIdentifier,
+ pub range: Option<Range>,
+}
+
+pub enum ViewHir {}
+
+impl Request for ViewHir {
+ type Params = lsp_types::TextDocumentPositionParams;
+ type Result = String;
+ const METHOD: &'static str = "rust-analyzer/viewHir";
+}
+
+pub enum ViewFileText {}
+
+impl Request for ViewFileText {
+ type Params = lsp_types::TextDocumentIdentifier;
+ type Result = String;
+ const METHOD: &'static str = "rust-analyzer/viewFileText";
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct ViewCrateGraphParams {
+ /// Include *all* crates, not just crates in the workspace.
+ pub full: bool,
+}
+
+pub enum ViewCrateGraph {}
+
+impl Request for ViewCrateGraph {
+ type Params = ViewCrateGraphParams;
+ type Result = String;
+ const METHOD: &'static str = "rust-analyzer/viewCrateGraph";
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct ViewItemTreeParams {
+ pub text_document: TextDocumentIdentifier,
+}
+
+pub enum ViewItemTree {}
+
+impl Request for ViewItemTree {
+ type Params = ViewItemTreeParams;
+ type Result = String;
+ const METHOD: &'static str = "rust-analyzer/viewItemTree";
+}
+
+pub enum ExpandMacro {}
+
+impl Request for ExpandMacro {
+ type Params = ExpandMacroParams;
+ type Result = Option<ExpandedMacro>;
+ const METHOD: &'static str = "rust-analyzer/expandMacro";
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct ExpandMacroParams {
+ pub text_document: TextDocumentIdentifier,
+ pub position: Position,
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct ExpandedMacro {
+ pub name: String,
+ pub expansion: String,
+}
+
+pub enum MatchingBrace {}
+
+impl Request for MatchingBrace {
+ type Params = MatchingBraceParams;
+ type Result = Vec<Position>;
+ const METHOD: &'static str = "experimental/matchingBrace";
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct MatchingBraceParams {
+ pub text_document: TextDocumentIdentifier,
+ pub positions: Vec<Position>,
+}
+
+pub enum ParentModule {}
+
+impl Request for ParentModule {
+ type Params = lsp_types::TextDocumentPositionParams;
+ type Result = Option<lsp_types::GotoDefinitionResponse>;
+ const METHOD: &'static str = "experimental/parentModule";
+}
+
+pub enum JoinLines {}
+
+impl Request for JoinLines {
+ type Params = JoinLinesParams;
+ type Result = Vec<lsp_types::TextEdit>;
+ const METHOD: &'static str = "experimental/joinLines";
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct JoinLinesParams {
+ pub text_document: TextDocumentIdentifier,
+ pub ranges: Vec<Range>,
+}
+
+pub enum OnEnter {}
+
+impl Request for OnEnter {
+ type Params = lsp_types::TextDocumentPositionParams;
+ type Result = Option<Vec<SnippetTextEdit>>;
+ const METHOD: &'static str = "experimental/onEnter";
+}
+
+pub enum Runnables {}
+
+impl Request for Runnables {
+ type Params = RunnablesParams;
+ type Result = Vec<Runnable>;
+ const METHOD: &'static str = "experimental/runnables";
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct RunnablesParams {
+ pub text_document: TextDocumentIdentifier,
+ pub position: Option<Position>,
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct Runnable {
+ pub label: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub location: Option<lsp_types::LocationLink>,
+ pub kind: RunnableKind,
+ pub args: CargoRunnable,
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+#[serde(rename_all = "lowercase")]
+pub enum RunnableKind {
+ Cargo,
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct CargoRunnable {
+ // command to be executed instead of cargo
+ pub override_cargo: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub workspace_root: Option<PathBuf>,
+ // command, --package and --lib stuff
+ pub cargo_args: Vec<String>,
+ // user-specified additional cargo args, like `--release`.
+ pub cargo_extra_args: Vec<String>,
+ // stuff after --
+ pub executable_args: Vec<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub expect_test: Option<bool>,
+}
+
+pub enum RelatedTests {}
+
+impl Request for RelatedTests {
+ type Params = lsp_types::TextDocumentPositionParams;
+ type Result = Vec<TestInfo>;
+ const METHOD: &'static str = "rust-analyzer/relatedTests";
+}
+
+#[derive(Debug, Deserialize, Serialize)]
+pub struct TestInfo {
+ pub runnable: Runnable,
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct InlayHintsParams {
+ pub text_document: TextDocumentIdentifier,
+ pub range: Option<lsp_types::Range>,
+}
+
+pub enum Ssr {}
+
+impl Request for Ssr {
+ type Params = SsrParams;
+ type Result = lsp_types::WorkspaceEdit;
+ const METHOD: &'static str = "experimental/ssr";
+}
+
+#[derive(Debug, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SsrParams {
+ pub query: String,
+ pub parse_only: bool,
+
+ /// File position where SSR was invoked. Paths in `query` will be resolved relative to this
+ /// position.
+ #[serde(flatten)]
+ pub position: lsp_types::TextDocumentPositionParams,
+
+ /// Current selections. Search/replace will be restricted to these if non-empty.
+ pub selections: Vec<lsp_types::Range>,
+}
+
+pub enum ServerStatusNotification {}
+
+impl Notification for ServerStatusNotification {
+ type Params = ServerStatusParams;
+ const METHOD: &'static str = "experimental/serverStatus";
+}
+
+#[derive(Deserialize, Serialize, PartialEq, Eq, Clone)]
+pub struct ServerStatusParams {
+ pub health: Health,
+ pub quiescent: bool,
+ pub message: Option<String>,
+}
+
+#[derive(Serialize, Deserialize, Clone, Copy, PartialEq, Eq)]
+#[serde(rename_all = "camelCase")]
+pub enum Health {
+ Ok,
+ Warning,
+ Error,
+}
+
+pub enum CodeActionRequest {}
+
+impl Request for CodeActionRequest {
+ type Params = lsp_types::CodeActionParams;
+ type Result = Option<Vec<CodeAction>>;
+ const METHOD: &'static str = "textDocument/codeAction";
+}
+
+pub enum CodeActionResolveRequest {}
+impl Request for CodeActionResolveRequest {
+ type Params = CodeAction;
+ type Result = CodeAction;
+ const METHOD: &'static str = "codeAction/resolve";
+}
+
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CodeAction {
+ pub title: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub group: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub kind: Option<CodeActionKind>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub command: Option<lsp_types::Command>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub edit: Option<SnippetWorkspaceEdit>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub is_preferred: Option<bool>,
+
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub data: Option<CodeActionData>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct CodeActionData {
+ pub code_action_params: lsp_types::CodeActionParams,
+ pub id: String,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SnippetWorkspaceEdit {
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub changes: Option<HashMap<lsp_types::Url, Vec<lsp_types::TextEdit>>>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub document_changes: Option<Vec<SnippetDocumentChangeOperation>>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub change_annotations:
+ Option<HashMap<lsp_types::ChangeAnnotationIdentifier, lsp_types::ChangeAnnotation>>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged, rename_all = "lowercase")]
+pub enum SnippetDocumentChangeOperation {
+ Op(lsp_types::ResourceOp),
+ Edit(SnippetTextDocumentEdit),
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SnippetTextDocumentEdit {
+ pub text_document: lsp_types::OptionalVersionedTextDocumentIdentifier,
+ pub edits: Vec<SnippetTextEdit>,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SnippetTextEdit {
+ pub range: Range,
+ pub new_text: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub insert_text_format: Option<lsp_types::InsertTextFormat>,
+ /// The annotation id if this is an annotated
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub annotation_id: Option<lsp_types::ChangeAnnotationIdentifier>,
+}
+
+pub enum HoverRequest {}
+
+impl Request for HoverRequest {
+ type Params = HoverParams;
+ type Result = Option<Hover>;
+ const METHOD: &'static str = "textDocument/hover";
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct HoverParams {
+ pub text_document: TextDocumentIdentifier,
+ pub position: PositionOrRange,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(untagged)]
+pub enum PositionOrRange {
+ Position(lsp_types::Position),
+ Range(lsp_types::Range),
+}
+
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
+pub struct Hover {
+ #[serde(flatten)]
+ pub hover: lsp_types::Hover,
+ #[serde(skip_serializing_if = "Vec::is_empty")]
+ pub actions: Vec<CommandLinkGroup>,
+}
+
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+pub struct CommandLinkGroup {
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub title: Option<String>,
+ pub commands: Vec<CommandLink>,
+}
+
+// LSP v3.15 Command does not have a `tooltip` field, vscode supports one.
+#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
+pub struct CommandLink {
+ #[serde(flatten)]
+ pub command: lsp_types::Command,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub tooltip: Option<String>,
+}
+
+pub enum ExternalDocs {}
+
+impl Request for ExternalDocs {
+ type Params = lsp_types::TextDocumentPositionParams;
+ type Result = Option<lsp_types::Url>;
+ const METHOD: &'static str = "experimental/externalDocs";
+}
+
+pub enum OpenCargoToml {}
+
+impl Request for OpenCargoToml {
+ type Params = OpenCargoTomlParams;
+ type Result = Option<lsp_types::GotoDefinitionResponse>;
+ const METHOD: &'static str = "experimental/openCargoToml";
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct OpenCargoTomlParams {
+ pub text_document: TextDocumentIdentifier,
+}
+
+/// Information about CodeLens, that is to be resolved.
+#[derive(Debug, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub(crate) enum CodeLensResolveData {
+ Impls(lsp_types::request::GotoImplementationParams),
+ References(lsp_types::TextDocumentPositionParams),
+}
+
+pub fn supports_utf8(caps: &lsp_types::ClientCapabilities) -> bool {
+ caps.offset_encoding.as_deref().unwrap_or_default().iter().any(|it| it == "utf-8")
+}
+
+pub enum MoveItem {}
+
+impl Request for MoveItem {
+ type Params = MoveItemParams;
+ type Result = Vec<SnippetTextEdit>;
+ const METHOD: &'static str = "experimental/moveItem";
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct MoveItemParams {
+ pub direction: MoveItemDirection,
+ pub text_document: TextDocumentIdentifier,
+ pub range: Range,
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+pub enum MoveItemDirection {
+ Up,
+ Down,
+}
+
+#[derive(Debug)]
+pub enum WorkspaceSymbol {}
+
+impl Request for WorkspaceSymbol {
+ type Params = WorkspaceSymbolParams;
+ type Result = Option<Vec<lsp_types::SymbolInformation>>;
+ const METHOD: &'static str = "workspace/symbol";
+}
+
+#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct WorkspaceSymbolParams {
+ #[serde(flatten)]
+ pub partial_result_params: PartialResultParams,
+
+ #[serde(flatten)]
+ pub work_done_progress_params: WorkDoneProgressParams,
+
+ /// A non-empty query string
+ pub query: String,
+
+ pub search_scope: Option<WorkspaceSymbolSearchScope>,
+
+ pub search_kind: Option<WorkspaceSymbolSearchKind>,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
+#[serde(rename_all = "camelCase")]
+pub enum WorkspaceSymbolSearchScope {
+ Workspace,
+ WorkspaceAndDependencies,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
+#[serde(rename_all = "camelCase")]
+pub enum WorkspaceSymbolSearchKind {
+ OnlyTypes,
+ AllSymbols,
+}
+
+/// The document on type formatting request is sent from the client to
+/// the server to format parts of the document during typing. This is
+/// almost same as lsp_types::request::OnTypeFormatting, but the
+/// result has SnippetTextEdit in it instead of TextEdit.
+#[derive(Debug)]
+pub enum OnTypeFormatting {}
+
+impl Request for OnTypeFormatting {
+ type Params = DocumentOnTypeFormattingParams;
+ type Result = Option<Vec<SnippetTextEdit>>;
+ const METHOD: &'static str = "textDocument/onTypeFormatting";
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct CompletionResolveData {
+ pub position: lsp_types::TextDocumentPositionParams,
+ pub imports: Vec<CompletionImport>,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct InlayHintResolveData {
+ pub text_document: TextDocumentIdentifier,
+ pub position: PositionOrRange,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct CompletionImport {
+ pub full_import_path: String,
+ pub imported_name: String,
+}
+
+#[derive(Debug, Deserialize, Default)]
+pub struct ClientCommandOptions {
+ pub commands: Vec<String>,
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs
new file mode 100644
index 000000000..5a37cbe2e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs
@@ -0,0 +1,407 @@
+//! Utilities for LSP-related boilerplate code.
+use std::{ops::Range, sync::Arc};
+
+use lsp_server::Notification;
+
+use crate::{
+ from_proto,
+ global_state::GlobalState,
+ line_index::{LineEndings, LineIndex, OffsetEncoding},
+ LspError,
+};
+
+pub(crate) fn invalid_params_error(message: String) -> LspError {
+ LspError { code: lsp_server::ErrorCode::InvalidParams as i32, message }
+}
+
+pub(crate) fn notification_is<N: lsp_types::notification::Notification>(
+ notification: &Notification,
+) -> bool {
+ notification.method == N::METHOD
+}
+
+#[derive(Debug, Eq, PartialEq)]
+pub(crate) enum Progress {
+ Begin,
+ Report,
+ End,
+}
+
+impl Progress {
+ pub(crate) fn fraction(done: usize, total: usize) -> f64 {
+ assert!(done <= total);
+ done as f64 / total.max(1) as f64
+ }
+}
+
+impl GlobalState {
+ pub(crate) fn show_message(&mut self, typ: lsp_types::MessageType, message: String) {
+ let message = message;
+ self.send_notification::<lsp_types::notification::ShowMessage>(
+ lsp_types::ShowMessageParams { typ, message },
+ )
+ }
+
+ /// Sends a notification to the client containing the error `message`.
+ /// If `additional_info` is [`Some`], appends a note to the notification telling to check the logs.
+ /// This will always log `message` + `additional_info` to the server's error log.
+ pub(crate) fn show_and_log_error(&mut self, message: String, additional_info: Option<String>) {
+ let mut message = message;
+ match additional_info {
+ Some(additional_info) => {
+ tracing::error!("{}\n\n{}", &message, &additional_info);
+ if tracing::enabled!(tracing::Level::ERROR) {
+ message.push_str("\n\nCheck the server logs for additional info.");
+ }
+ }
+ None => tracing::error!("{}", &message),
+ }
+
+ self.send_notification::<lsp_types::notification::ShowMessage>(
+ lsp_types::ShowMessageParams { typ: lsp_types::MessageType::ERROR, message },
+ )
+ }
+
+ /// rust-analyzer is resilient -- if it fails, this doesn't usually affect
+ /// the user experience. Part of that is that we deliberately hide panics
+ /// from the user.
+ ///
+ /// We do however want to pester rust-analyzer developers with panics and
+ /// other "you really gotta fix that" messages. The current strategy is to
+ /// be noisy for "from source" builds or when profiling is enabled.
+ ///
+ /// It's unclear if making from source `cargo xtask install` builds more
+ /// panicky is a good idea, let's see if we can keep our awesome bleeding
+ /// edge users from being upset!
+ pub(crate) fn poke_rust_analyzer_developer(&mut self, message: String) {
+ let from_source_build = option_env!("POKE_RA_DEVS").is_some();
+ let profiling_enabled = std::env::var("RA_PROFILE").is_ok();
+ if from_source_build || profiling_enabled {
+ self.show_message(lsp_types::MessageType::ERROR, message)
+ }
+ }
+
+ pub(crate) fn report_progress(
+ &mut self,
+ title: &str,
+ state: Progress,
+ message: Option<String>,
+ fraction: Option<f64>,
+ ) {
+ if !self.config.work_done_progress() {
+ return;
+ }
+ let percentage = fraction.map(|f| {
+ assert!((0.0..=1.0).contains(&f));
+ (f * 100.0) as u32
+ });
+ let token = lsp_types::ProgressToken::String(format!("rustAnalyzer/{}", title));
+ let work_done_progress = match state {
+ Progress::Begin => {
+ self.send_request::<lsp_types::request::WorkDoneProgressCreate>(
+ lsp_types::WorkDoneProgressCreateParams { token: token.clone() },
+ |_, _| (),
+ );
+
+ lsp_types::WorkDoneProgress::Begin(lsp_types::WorkDoneProgressBegin {
+ title: title.into(),
+ cancellable: None,
+ message,
+ percentage,
+ })
+ }
+ Progress::Report => {
+ lsp_types::WorkDoneProgress::Report(lsp_types::WorkDoneProgressReport {
+ cancellable: None,
+ message,
+ percentage,
+ })
+ }
+ Progress::End => {
+ lsp_types::WorkDoneProgress::End(lsp_types::WorkDoneProgressEnd { message })
+ }
+ };
+ self.send_notification::<lsp_types::notification::Progress>(lsp_types::ProgressParams {
+ token,
+ value: lsp_types::ProgressParamsValue::WorkDone(work_done_progress),
+ });
+ }
+}
+
+pub(crate) fn apply_document_changes(
+ old_text: &mut String,
+ content_changes: Vec<lsp_types::TextDocumentContentChangeEvent>,
+) {
+ let mut line_index = LineIndex {
+ index: Arc::new(ide::LineIndex::new(old_text)),
+ // We don't care about line endings or offset encoding here.
+ endings: LineEndings::Unix,
+ encoding: OffsetEncoding::Utf16,
+ };
+
+ // The changes we got must be applied sequentially, but can cross lines so we
+ // have to keep our line index updated.
+ // Some clients (e.g. Code) sort the ranges in reverse. As an optimization, we
+ // remember the last valid line in the index and only rebuild it if needed.
+ // The VFS will normalize the end of lines to `\n`.
+ enum IndexValid {
+ All,
+ UpToLineExclusive(u32),
+ }
+
+ impl IndexValid {
+ fn covers(&self, line: u32) -> bool {
+ match *self {
+ IndexValid::UpToLineExclusive(to) => to > line,
+ _ => true,
+ }
+ }
+ }
+
+ let mut index_valid = IndexValid::All;
+ for change in content_changes {
+ match change.range {
+ Some(range) => {
+ if !index_valid.covers(range.end.line) {
+ line_index.index = Arc::new(ide::LineIndex::new(old_text));
+ }
+ index_valid = IndexValid::UpToLineExclusive(range.start.line);
+ if let Ok(range) = from_proto::text_range(&line_index, range) {
+ old_text.replace_range(Range::<usize>::from(range), &change.text);
+ }
+ }
+ None => {
+ *old_text = change.text;
+ index_valid = IndexValid::UpToLineExclusive(0);
+ }
+ }
+ }
+}
+
+/// Checks that the edits inside the completion and the additional edits do not overlap.
+/// LSP explicitly forbids the additional edits to overlap both with the main edit and themselves.
+pub(crate) fn all_edits_are_disjoint(
+ completion: &lsp_types::CompletionItem,
+ additional_edits: &[lsp_types::TextEdit],
+) -> bool {
+ let mut edit_ranges = Vec::new();
+ match completion.text_edit.as_ref() {
+ Some(lsp_types::CompletionTextEdit::Edit(edit)) => {
+ edit_ranges.push(edit.range);
+ }
+ Some(lsp_types::CompletionTextEdit::InsertAndReplace(edit)) => {
+ let replace = edit.replace;
+ let insert = edit.insert;
+ if replace.start != insert.start
+ || insert.start > insert.end
+ || insert.end > replace.end
+ {
+ // insert has to be a prefix of replace but it is not
+ return false;
+ }
+ edit_ranges.push(replace);
+ }
+ None => {}
+ }
+ if let Some(additional_changes) = completion.additional_text_edits.as_ref() {
+ edit_ranges.extend(additional_changes.iter().map(|edit| edit.range));
+ };
+ edit_ranges.extend(additional_edits.iter().map(|edit| edit.range));
+ edit_ranges.sort_by_key(|range| (range.start, range.end));
+ edit_ranges
+ .iter()
+ .zip(edit_ranges.iter().skip(1))
+ .all(|(previous, next)| previous.end <= next.start)
+}
+
+#[cfg(test)]
+mod tests {
+ use lsp_types::{
+ CompletionItem, CompletionTextEdit, InsertReplaceEdit, Position, Range,
+ TextDocumentContentChangeEvent,
+ };
+
+ use super::*;
+
+ #[test]
+ fn test_apply_document_changes() {
+ macro_rules! c {
+ [$($sl:expr, $sc:expr; $el:expr, $ec:expr => $text:expr),+] => {
+ vec![$(TextDocumentContentChangeEvent {
+ range: Some(Range {
+ start: Position { line: $sl, character: $sc },
+ end: Position { line: $el, character: $ec },
+ }),
+ range_length: None,
+ text: String::from($text),
+ }),+]
+ };
+ }
+
+ let mut text = String::new();
+ apply_document_changes(&mut text, vec![]);
+ assert_eq!(text, "");
+ apply_document_changes(
+ &mut text,
+ vec![TextDocumentContentChangeEvent {
+ range: None,
+ range_length: None,
+ text: String::from("the"),
+ }],
+ );
+ assert_eq!(text, "the");
+ apply_document_changes(&mut text, c![0, 3; 0, 3 => " quick"]);
+ assert_eq!(text, "the quick");
+ apply_document_changes(&mut text, c![0, 0; 0, 4 => "", 0, 5; 0, 5 => " foxes"]);
+ assert_eq!(text, "quick foxes");
+ apply_document_changes(&mut text, c![0, 11; 0, 11 => "\ndream"]);
+ assert_eq!(text, "quick foxes\ndream");
+ apply_document_changes(&mut text, c![1, 0; 1, 0 => "have "]);
+ assert_eq!(text, "quick foxes\nhave dream");
+ apply_document_changes(
+ &mut text,
+ c![0, 0; 0, 0 => "the ", 1, 4; 1, 4 => " quiet", 1, 16; 1, 16 => "s\n"],
+ );
+ assert_eq!(text, "the quick foxes\nhave quiet dreams\n");
+ apply_document_changes(&mut text, c![0, 15; 0, 15 => "\n", 2, 17; 2, 17 => "\n"]);
+ assert_eq!(text, "the quick foxes\n\nhave quiet dreams\n\n");
+ apply_document_changes(
+ &mut text,
+ c![1, 0; 1, 0 => "DREAM", 2, 0; 2, 0 => "they ", 3, 0; 3, 0 => "DON'T THEY?"],
+ );
+ assert_eq!(text, "the quick foxes\nDREAM\nthey have quiet dreams\nDON'T THEY?\n");
+ apply_document_changes(&mut text, c![0, 10; 1, 5 => "", 2, 0; 2, 12 => ""]);
+ assert_eq!(text, "the quick \nthey have quiet dreams\n");
+
+ text = String::from("❤️");
+ apply_document_changes(&mut text, c![0, 0; 0, 0 => "a"]);
+ assert_eq!(text, "a❤️");
+
+ text = String::from("a\nb");
+ apply_document_changes(&mut text, c![0, 1; 1, 0 => "\nțc", 0, 1; 1, 1 => "d"]);
+ assert_eq!(text, "adcb");
+
+ text = String::from("a\nb");
+ apply_document_changes(&mut text, c![0, 1; 1, 0 => "ț\nc", 0, 2; 0, 2 => "c"]);
+ assert_eq!(text, "ațc\ncb");
+ }
+
+ #[test]
+ fn empty_completion_disjoint_tests() {
+ let empty_completion =
+ CompletionItem::new_simple("label".to_string(), "detail".to_string());
+
+ let disjoint_edit_1 = lsp_types::TextEdit::new(
+ Range::new(Position::new(2, 2), Position::new(3, 3)),
+ "new_text".to_string(),
+ );
+ let disjoint_edit_2 = lsp_types::TextEdit::new(
+ Range::new(Position::new(3, 3), Position::new(4, 4)),
+ "new_text".to_string(),
+ );
+
+ let joint_edit = lsp_types::TextEdit::new(
+ Range::new(Position::new(1, 1), Position::new(5, 5)),
+ "new_text".to_string(),
+ );
+
+ assert!(
+ all_edits_are_disjoint(&empty_completion, &[]),
+ "Empty completion has all its edits disjoint"
+ );
+ assert!(
+ all_edits_are_disjoint(
+ &empty_completion,
+ &[disjoint_edit_1.clone(), disjoint_edit_2.clone()]
+ ),
+ "Empty completion is disjoint to whatever disjoint extra edits added"
+ );
+
+ assert!(
+ !all_edits_are_disjoint(
+ &empty_completion,
+ &[disjoint_edit_1, disjoint_edit_2, joint_edit]
+ ),
+ "Empty completion does not prevent joint extra edits from failing the validation"
+ );
+ }
+
+ #[test]
+ fn completion_with_joint_edits_disjoint_tests() {
+ let disjoint_edit = lsp_types::TextEdit::new(
+ Range::new(Position::new(1, 1), Position::new(2, 2)),
+ "new_text".to_string(),
+ );
+ let disjoint_edit_2 = lsp_types::TextEdit::new(
+ Range::new(Position::new(2, 2), Position::new(3, 3)),
+ "new_text".to_string(),
+ );
+ let joint_edit = lsp_types::TextEdit::new(
+ Range::new(Position::new(1, 1), Position::new(5, 5)),
+ "new_text".to_string(),
+ );
+
+ let mut completion_with_joint_edits =
+ CompletionItem::new_simple("label".to_string(), "detail".to_string());
+ completion_with_joint_edits.additional_text_edits =
+ Some(vec![disjoint_edit.clone(), joint_edit.clone()]);
+ assert!(
+ !all_edits_are_disjoint(&completion_with_joint_edits, &[]),
+ "Completion with disjoint edits fails the validation even with empty extra edits"
+ );
+
+ completion_with_joint_edits.text_edit =
+ Some(CompletionTextEdit::Edit(disjoint_edit.clone()));
+ completion_with_joint_edits.additional_text_edits = Some(vec![joint_edit.clone()]);
+ assert!(
+ !all_edits_are_disjoint(&completion_with_joint_edits, &[]),
+ "Completion with disjoint edits fails the validation even with empty extra edits"
+ );
+
+ completion_with_joint_edits.text_edit =
+ Some(CompletionTextEdit::InsertAndReplace(InsertReplaceEdit {
+ new_text: "new_text".to_string(),
+ insert: disjoint_edit.range,
+ replace: disjoint_edit_2.range,
+ }));
+ completion_with_joint_edits.additional_text_edits = Some(vec![joint_edit]);
+ assert!(
+ !all_edits_are_disjoint(&completion_with_joint_edits, &[]),
+ "Completion with disjoint edits fails the validation even with empty extra edits"
+ );
+ }
+
+ #[test]
+ fn completion_with_disjoint_edits_disjoint_tests() {
+ let disjoint_edit = lsp_types::TextEdit::new(
+ Range::new(Position::new(1, 1), Position::new(2, 2)),
+ "new_text".to_string(),
+ );
+ let disjoint_edit_2 = lsp_types::TextEdit::new(
+ Range::new(Position::new(2, 2), Position::new(3, 3)),
+ "new_text".to_string(),
+ );
+ let joint_edit = lsp_types::TextEdit::new(
+ Range::new(Position::new(1, 1), Position::new(5, 5)),
+ "new_text".to_string(),
+ );
+
+ let mut completion_with_disjoint_edits =
+ CompletionItem::new_simple("label".to_string(), "detail".to_string());
+ completion_with_disjoint_edits.text_edit = Some(CompletionTextEdit::Edit(disjoint_edit));
+ let completion_with_disjoint_edits = completion_with_disjoint_edits;
+
+ assert!(
+ all_edits_are_disjoint(&completion_with_disjoint_edits, &[]),
+ "Completion with disjoint edits is valid"
+ );
+ assert!(
+ !all_edits_are_disjoint(&completion_with_disjoint_edits, &[joint_edit]),
+ "Completion with disjoint edits and joint extra edit is invalid"
+ );
+ assert!(
+ all_edits_are_disjoint(&completion_with_disjoint_edits, &[disjoint_edit_2]),
+ "Completion with disjoint edits and joint extra edit is valid"
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
new file mode 100644
index 000000000..5845cf712
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
@@ -0,0 +1,823 @@
+//! The main loop of `rust-analyzer` responsible for dispatching LSP
+//! requests/replies and notifications back to the client.
+use std::{
+ fmt,
+ sync::Arc,
+ time::{Duration, Instant},
+};
+
+use always_assert::always;
+use crossbeam_channel::{select, Receiver};
+use ide_db::base_db::{SourceDatabaseExt, VfsPath};
+use lsp_server::{Connection, Notification, Request};
+use lsp_types::notification::Notification as _;
+use vfs::{ChangeKind, FileId};
+
+use crate::{
+ config::Config,
+ dispatch::{NotificationDispatcher, RequestDispatcher},
+ from_proto,
+ global_state::{file_id_to_url, url_to_file_id, GlobalState},
+ handlers, lsp_ext,
+ lsp_utils::{apply_document_changes, notification_is, Progress},
+ mem_docs::DocumentData,
+ reload::{self, BuildDataProgress, ProjectWorkspaceProgress},
+ Result,
+};
+
+pub fn main_loop(config: Config, connection: Connection) -> Result<()> {
+ tracing::info!("initial config: {:#?}", config);
+
+ // Windows scheduler implements priority boosts: if thread waits for an
+ // event (like a condvar), and event fires, priority of the thread is
+ // temporary bumped. This optimization backfires in our case: each time the
+ // `main_loop` schedules a task to run on a threadpool, the worker threads
+ // gets a higher priority, and (on a machine with fewer cores) displaces the
+ // main loop! We work-around this by marking the main loop as a
+ // higher-priority thread.
+ //
+ // https://docs.microsoft.com/en-us/windows/win32/procthread/scheduling-priorities
+ // https://docs.microsoft.com/en-us/windows/win32/procthread/priority-boosts
+ // https://github.com/rust-lang/rust-analyzer/issues/2835
+ #[cfg(windows)]
+ unsafe {
+ use winapi::um::processthreadsapi::*;
+ let thread = GetCurrentThread();
+ let thread_priority_above_normal = 1;
+ SetThreadPriority(thread, thread_priority_above_normal);
+ }
+
+ GlobalState::new(connection.sender, config).run(connection.receiver)
+}
+
+enum Event {
+ Lsp(lsp_server::Message),
+ Task(Task),
+ Vfs(vfs::loader::Message),
+ Flycheck(flycheck::Message),
+}
+
+#[derive(Debug)]
+pub(crate) enum Task {
+ Response(lsp_server::Response),
+ Retry(lsp_server::Request),
+ Diagnostics(Vec<(FileId, Vec<lsp_types::Diagnostic>)>),
+ PrimeCaches(PrimeCachesProgress),
+ FetchWorkspace(ProjectWorkspaceProgress),
+ FetchBuildData(BuildDataProgress),
+}
+
+#[derive(Debug)]
+pub(crate) enum PrimeCachesProgress {
+ Begin,
+ Report(ide::ParallelPrimeCachesProgress),
+ End { cancelled: bool },
+}
+
+impl fmt::Debug for Event {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let debug_verbose_not = |not: &Notification, f: &mut fmt::Formatter<'_>| {
+ f.debug_struct("Notification").field("method", &not.method).finish()
+ };
+
+ match self {
+ Event::Lsp(lsp_server::Message::Notification(not)) => {
+ if notification_is::<lsp_types::notification::DidOpenTextDocument>(not)
+ || notification_is::<lsp_types::notification::DidChangeTextDocument>(not)
+ {
+ return debug_verbose_not(not, f);
+ }
+ }
+ Event::Task(Task::Response(resp)) => {
+ return f
+ .debug_struct("Response")
+ .field("id", &resp.id)
+ .field("error", &resp.error)
+ .finish();
+ }
+ _ => (),
+ }
+ match self {
+ Event::Lsp(it) => fmt::Debug::fmt(it, f),
+ Event::Task(it) => fmt::Debug::fmt(it, f),
+ Event::Vfs(it) => fmt::Debug::fmt(it, f),
+ Event::Flycheck(it) => fmt::Debug::fmt(it, f),
+ }
+ }
+}
+
+impl GlobalState {
+ fn run(mut self, inbox: Receiver<lsp_server::Message>) -> Result<()> {
+ if self.config.linked_projects().is_empty()
+ && self.config.detached_files().is_empty()
+ && self.config.notifications().cargo_toml_not_found
+ {
+ self.show_and_log_error("rust-analyzer failed to discover workspace".to_string(), None);
+ };
+
+ if self.config.did_save_text_document_dynamic_registration() {
+ let save_registration_options = lsp_types::TextDocumentSaveRegistrationOptions {
+ include_text: Some(false),
+ text_document_registration_options: lsp_types::TextDocumentRegistrationOptions {
+ document_selector: Some(vec![
+ lsp_types::DocumentFilter {
+ language: None,
+ scheme: None,
+ pattern: Some("**/*.rs".into()),
+ },
+ lsp_types::DocumentFilter {
+ language: None,
+ scheme: None,
+ pattern: Some("**/Cargo.toml".into()),
+ },
+ lsp_types::DocumentFilter {
+ language: None,
+ scheme: None,
+ pattern: Some("**/Cargo.lock".into()),
+ },
+ ]),
+ },
+ };
+
+ let registration = lsp_types::Registration {
+ id: "textDocument/didSave".to_string(),
+ method: "textDocument/didSave".to_string(),
+ register_options: Some(serde_json::to_value(save_registration_options).unwrap()),
+ };
+ self.send_request::<lsp_types::request::RegisterCapability>(
+ lsp_types::RegistrationParams { registrations: vec![registration] },
+ |_, _| (),
+ );
+ }
+
+ self.fetch_workspaces_queue.request_op("startup".to_string());
+ if let Some(cause) = self.fetch_workspaces_queue.should_start_op() {
+ self.fetch_workspaces(cause);
+ }
+
+ while let Some(event) = self.next_event(&inbox) {
+ if let Event::Lsp(lsp_server::Message::Notification(not)) = &event {
+ if not.method == lsp_types::notification::Exit::METHOD {
+ return Ok(());
+ }
+ }
+ self.handle_event(event)?
+ }
+
+ Err("client exited without proper shutdown sequence".into())
+ }
+
+ fn next_event(&self, inbox: &Receiver<lsp_server::Message>) -> Option<Event> {
+ select! {
+ recv(inbox) -> msg =>
+ msg.ok().map(Event::Lsp),
+
+ recv(self.task_pool.receiver) -> task =>
+ Some(Event::Task(task.unwrap())),
+
+ recv(self.loader.receiver) -> task =>
+ Some(Event::Vfs(task.unwrap())),
+
+ recv(self.flycheck_receiver) -> task =>
+ Some(Event::Flycheck(task.unwrap())),
+ }
+ }
+
+ fn handle_event(&mut self, event: Event) -> Result<()> {
+ let loop_start = Instant::now();
+ // NOTE: don't count blocking select! call as a loop-turn time
+ let _p = profile::span("GlobalState::handle_event");
+
+ tracing::debug!("handle_event({:?})", event);
+ let task_queue_len = self.task_pool.handle.len();
+ if task_queue_len > 0 {
+ tracing::info!("task queue len: {}", task_queue_len);
+ }
+
+ let was_quiescent = self.is_quiescent();
+ match event {
+ Event::Lsp(msg) => match msg {
+ lsp_server::Message::Request(req) => self.on_new_request(loop_start, req),
+ lsp_server::Message::Notification(not) => {
+ self.on_notification(not)?;
+ }
+ lsp_server::Message::Response(resp) => self.complete_request(resp),
+ },
+ Event::Task(mut task) => {
+ let _p = profile::span("GlobalState::handle_event/task");
+ let mut prime_caches_progress = Vec::new();
+ loop {
+ match task {
+ Task::Response(response) => self.respond(response),
+ Task::Retry(req) => self.on_request(req),
+ Task::Diagnostics(diagnostics_per_file) => {
+ for (file_id, diagnostics) in diagnostics_per_file {
+ self.diagnostics.set_native_diagnostics(file_id, diagnostics)
+ }
+ }
+ Task::PrimeCaches(progress) => match progress {
+ PrimeCachesProgress::Begin => prime_caches_progress.push(progress),
+ PrimeCachesProgress::Report(_) => {
+ match prime_caches_progress.last_mut() {
+ Some(last @ PrimeCachesProgress::Report(_)) => {
+ // Coalesce subsequent update events.
+ *last = progress;
+ }
+ _ => prime_caches_progress.push(progress),
+ }
+ }
+ PrimeCachesProgress::End { .. } => prime_caches_progress.push(progress),
+ },
+ Task::FetchWorkspace(progress) => {
+ let (state, msg) = match progress {
+ ProjectWorkspaceProgress::Begin => (Progress::Begin, None),
+ ProjectWorkspaceProgress::Report(msg) => {
+ (Progress::Report, Some(msg))
+ }
+ ProjectWorkspaceProgress::End(workspaces) => {
+ self.fetch_workspaces_queue.op_completed(workspaces);
+
+ let old = Arc::clone(&self.workspaces);
+ self.switch_workspaces("fetched workspace".to_string());
+ let workspaces_updated = !Arc::ptr_eq(&old, &self.workspaces);
+
+ if self.config.run_build_scripts() && workspaces_updated {
+ self.fetch_build_data_queue
+ .request_op(format!("workspace updated"));
+ }
+
+ (Progress::End, None)
+ }
+ };
+
+ self.report_progress("Fetching", state, msg, None);
+ }
+ Task::FetchBuildData(progress) => {
+ let (state, msg) = match progress {
+ BuildDataProgress::Begin => (Some(Progress::Begin), None),
+ BuildDataProgress::Report(msg) => {
+ (Some(Progress::Report), Some(msg))
+ }
+ BuildDataProgress::End(build_data_result) => {
+ self.fetch_build_data_queue.op_completed(build_data_result);
+
+ self.switch_workspaces("fetched build data".to_string());
+
+ (Some(Progress::End), None)
+ }
+ };
+
+ if let Some(state) = state {
+ self.report_progress("Loading", state, msg, None);
+ }
+ }
+ }
+
+ // Coalesce multiple task events into one loop turn
+ task = match self.task_pool.receiver.try_recv() {
+ Ok(task) => task,
+ Err(_) => break,
+ };
+ }
+
+ for progress in prime_caches_progress {
+ let (state, message, fraction);
+ match progress {
+ PrimeCachesProgress::Begin => {
+ state = Progress::Begin;
+ message = None;
+ fraction = 0.0;
+ }
+ PrimeCachesProgress::Report(report) => {
+ state = Progress::Report;
+
+ message = match &report.crates_currently_indexing[..] {
+ [crate_name] => Some(format!(
+ "{}/{} ({})",
+ report.crates_done, report.crates_total, crate_name
+ )),
+ [crate_name, rest @ ..] => Some(format!(
+ "{}/{} ({} + {} more)",
+ report.crates_done,
+ report.crates_total,
+ crate_name,
+ rest.len()
+ )),
+ _ => None,
+ };
+
+ fraction = Progress::fraction(report.crates_done, report.crates_total);
+ }
+ PrimeCachesProgress::End { cancelled } => {
+ state = Progress::End;
+ message = None;
+ fraction = 1.0;
+
+ self.prime_caches_queue.op_completed(());
+ if cancelled {
+ self.prime_caches_queue
+ .request_op("restart after cancellation".to_string());
+ }
+ }
+ };
+
+ self.report_progress("Indexing", state, message, Some(fraction));
+ }
+ }
+ Event::Vfs(mut task) => {
+ let _p = profile::span("GlobalState::handle_event/vfs");
+ loop {
+ match task {
+ vfs::loader::Message::Loaded { files } => {
+ let vfs = &mut self.vfs.write().0;
+ for (path, contents) in files {
+ let path = VfsPath::from(path);
+ if !self.mem_docs.contains(&path) {
+ vfs.set_file_contents(path, contents);
+ }
+ }
+ }
+ vfs::loader::Message::Progress { n_total, n_done, config_version } => {
+ always!(config_version <= self.vfs_config_version);
+
+ self.vfs_progress_config_version = config_version;
+ self.vfs_progress_n_total = n_total;
+ self.vfs_progress_n_done = n_done;
+
+ let state = if n_done == 0 {
+ Progress::Begin
+ } else if n_done < n_total {
+ Progress::Report
+ } else {
+ assert_eq!(n_done, n_total);
+ Progress::End
+ };
+ self.report_progress(
+ "Roots Scanned",
+ state,
+ Some(format!("{}/{}", n_done, n_total)),
+ Some(Progress::fraction(n_done, n_total)),
+ )
+ }
+ }
+ // Coalesce many VFS event into a single loop turn
+ task = match self.loader.receiver.try_recv() {
+ Ok(task) => task,
+ Err(_) => break,
+ }
+ }
+ }
+ Event::Flycheck(mut task) => {
+ let _p = profile::span("GlobalState::handle_event/flycheck");
+ loop {
+ match task {
+ flycheck::Message::AddDiagnostic { workspace_root, diagnostic } => {
+ let snap = self.snapshot();
+ let diagnostics =
+ crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp(
+ &self.config.diagnostics_map(),
+ &diagnostic,
+ &workspace_root,
+ &snap,
+ );
+ for diag in diagnostics {
+ match url_to_file_id(&self.vfs.read().0, &diag.url) {
+ Ok(file_id) => self.diagnostics.add_check_diagnostic(
+ file_id,
+ diag.diagnostic,
+ diag.fix,
+ ),
+ Err(err) => {
+ tracing::error!(
+ "File with cargo diagnostic not found in VFS: {}",
+ err
+ );
+ }
+ };
+ }
+ }
+
+ flycheck::Message::Progress { id, progress } => {
+ let (state, message) = match progress {
+ flycheck::Progress::DidStart => {
+ self.diagnostics.clear_check();
+ (Progress::Begin, None)
+ }
+ flycheck::Progress::DidCheckCrate(target) => {
+ (Progress::Report, Some(target))
+ }
+ flycheck::Progress::DidCancel => (Progress::End, None),
+ flycheck::Progress::DidFinish(result) => {
+ if let Err(err) = result {
+ self.show_and_log_error(
+ "cargo check failed".to_string(),
+ Some(err.to_string()),
+ );
+ }
+ (Progress::End, None)
+ }
+ };
+
+ // When we're running multiple flychecks, we have to include a disambiguator in
+ // the title, or the editor complains. Note that this is a user-facing string.
+ let title = if self.flycheck.len() == 1 {
+ match self.config.flycheck() {
+ Some(config) => format!("{}", config),
+ None => "cargo check".to_string(),
+ }
+ } else {
+ format!("cargo check (#{})", id + 1)
+ };
+ self.report_progress(&title, state, message, None);
+ }
+ }
+ // Coalesce many flycheck updates into a single loop turn
+ task = match self.flycheck_receiver.try_recv() {
+ Ok(task) => task,
+ Err(_) => break,
+ }
+ }
+ }
+ }
+
+ let state_changed = self.process_changes();
+ let memdocs_added_or_removed = self.mem_docs.take_changes();
+
+ if self.is_quiescent() {
+ if !was_quiescent {
+ for flycheck in &self.flycheck {
+ flycheck.update();
+ }
+ if self.config.prefill_caches() {
+ self.prime_caches_queue.request_op("became quiescent".to_string());
+ }
+ }
+
+ if !was_quiescent || state_changed {
+ // Refresh semantic tokens if the client supports it.
+ if self.config.semantic_tokens_refresh() {
+ self.semantic_tokens_cache.lock().clear();
+ self.send_request::<lsp_types::request::SemanticTokensRefresh>((), |_, _| ());
+ }
+
+ // Refresh code lens if the client supports it.
+ if self.config.code_lens_refresh() {
+ self.send_request::<lsp_types::request::CodeLensRefresh>((), |_, _| ());
+ }
+ }
+
+ if !was_quiescent || state_changed || memdocs_added_or_removed {
+ if self.config.publish_diagnostics() {
+ self.update_diagnostics()
+ }
+ }
+ }
+
+ if let Some(diagnostic_changes) = self.diagnostics.take_changes() {
+ for file_id in diagnostic_changes {
+ let db = self.analysis_host.raw_database();
+ let source_root = db.file_source_root(file_id);
+ if db.source_root(source_root).is_library {
+ // Only publish diagnostics for files in the workspace, not from crates.io deps
+ // or the sysroot.
+ // While theoretically these should never have errors, we have quite a few false
+ // positives particularly in the stdlib, and those diagnostics would stay around
+ // forever if we emitted them here.
+ continue;
+ }
+
+ let url = file_id_to_url(&self.vfs.read().0, file_id);
+ let mut diagnostics =
+ self.diagnostics.diagnostics_for(file_id).cloned().collect::<Vec<_>>();
+ // https://github.com/rust-lang/rust-analyzer/issues/11404
+ for d in &mut diagnostics {
+ if d.message.is_empty() {
+ d.message = " ".to_string();
+ }
+ if let Some(rds) = d.related_information.as_mut() {
+ for rd in rds {
+ if rd.message.is_empty() {
+ rd.message = " ".to_string();
+ }
+ }
+ }
+ }
+ let version = from_proto::vfs_path(&url)
+ .map(|path| self.mem_docs.get(&path).map(|it| it.version))
+ .unwrap_or_default();
+
+ self.send_notification::<lsp_types::notification::PublishDiagnostics>(
+ lsp_types::PublishDiagnosticsParams { uri: url, diagnostics, version },
+ );
+ }
+ }
+
+ if self.config.cargo_autoreload() {
+ if let Some(cause) = self.fetch_workspaces_queue.should_start_op() {
+ self.fetch_workspaces(cause);
+ }
+ }
+
+ if !self.fetch_workspaces_queue.op_in_progress() {
+ if let Some(cause) = self.fetch_build_data_queue.should_start_op() {
+ self.fetch_build_data(cause);
+ }
+ }
+
+ if let Some(cause) = self.prime_caches_queue.should_start_op() {
+ tracing::debug!(%cause, "will prime caches");
+ let num_worker_threads = self.config.prime_caches_num_threads();
+
+ self.task_pool.handle.spawn_with_sender({
+ let analysis = self.snapshot().analysis;
+ move |sender| {
+ sender.send(Task::PrimeCaches(PrimeCachesProgress::Begin)).unwrap();
+ let res = analysis.parallel_prime_caches(num_worker_threads, |progress| {
+ let report = PrimeCachesProgress::Report(progress);
+ sender.send(Task::PrimeCaches(report)).unwrap();
+ });
+ sender
+ .send(Task::PrimeCaches(PrimeCachesProgress::End {
+ cancelled: res.is_err(),
+ }))
+ .unwrap();
+ }
+ });
+ }
+
+ let status = self.current_status();
+ if self.last_reported_status.as_ref() != Some(&status) {
+ self.last_reported_status = Some(status.clone());
+
+ if let (lsp_ext::Health::Error, Some(message)) = (status.health, &status.message) {
+ self.show_message(lsp_types::MessageType::ERROR, message.clone());
+ }
+
+ if self.config.server_status_notification() {
+ self.send_notification::<lsp_ext::ServerStatusNotification>(status);
+ }
+ }
+
+ let loop_duration = loop_start.elapsed();
+ if loop_duration > Duration::from_millis(100) && was_quiescent {
+ tracing::warn!("overly long loop turn: {:?}", loop_duration);
+ self.poke_rust_analyzer_developer(format!(
+ "overly long loop turn: {:?}",
+ loop_duration
+ ));
+ }
+ Ok(())
+ }
+
+ fn on_new_request(&mut self, request_received: Instant, req: Request) {
+ self.register_request(&req, request_received);
+ self.on_request(req);
+ }
+
+ fn on_request(&mut self, req: Request) {
+ if self.shutdown_requested {
+ self.respond(lsp_server::Response::new_err(
+ req.id,
+ lsp_server::ErrorCode::InvalidRequest as i32,
+ "Shutdown already requested.".to_owned(),
+ ));
+ return;
+ }
+
+ // Avoid flashing a bunch of unresolved references during initial load.
+ if self.workspaces.is_empty() && !self.is_quiescent() {
+ self.respond(lsp_server::Response::new_err(
+ req.id,
+ lsp_server::ErrorCode::ContentModified as i32,
+ "waiting for cargo metadata or cargo check".to_owned(),
+ ));
+ return;
+ }
+
+ RequestDispatcher { req: Some(req), global_state: self }
+ .on_sync_mut::<lsp_types::request::Shutdown>(|s, ()| {
+ s.shutdown_requested = true;
+ Ok(())
+ })
+ .on_sync_mut::<lsp_ext::ReloadWorkspace>(handlers::handle_workspace_reload)
+ .on_sync_mut::<lsp_ext::MemoryUsage>(handlers::handle_memory_usage)
+ .on_sync_mut::<lsp_ext::ShuffleCrateGraph>(handlers::handle_shuffle_crate_graph)
+ .on_sync::<lsp_ext::JoinLines>(handlers::handle_join_lines)
+ .on_sync::<lsp_ext::OnEnter>(handlers::handle_on_enter)
+ .on_sync::<lsp_types::request::SelectionRangeRequest>(handlers::handle_selection_range)
+ .on_sync::<lsp_ext::MatchingBrace>(handlers::handle_matching_brace)
+ .on::<lsp_ext::AnalyzerStatus>(handlers::handle_analyzer_status)
+ .on::<lsp_ext::SyntaxTree>(handlers::handle_syntax_tree)
+ .on::<lsp_ext::ViewHir>(handlers::handle_view_hir)
+ .on::<lsp_ext::ViewFileText>(handlers::handle_view_file_text)
+ .on::<lsp_ext::ViewCrateGraph>(handlers::handle_view_crate_graph)
+ .on::<lsp_ext::ViewItemTree>(handlers::handle_view_item_tree)
+ .on::<lsp_ext::ExpandMacro>(handlers::handle_expand_macro)
+ .on::<lsp_ext::ParentModule>(handlers::handle_parent_module)
+ .on::<lsp_ext::Runnables>(handlers::handle_runnables)
+ .on::<lsp_ext::RelatedTests>(handlers::handle_related_tests)
+ .on::<lsp_ext::CodeActionRequest>(handlers::handle_code_action)
+ .on::<lsp_ext::CodeActionResolveRequest>(handlers::handle_code_action_resolve)
+ .on::<lsp_ext::HoverRequest>(handlers::handle_hover)
+ .on::<lsp_ext::ExternalDocs>(handlers::handle_open_docs)
+ .on::<lsp_ext::OpenCargoToml>(handlers::handle_open_cargo_toml)
+ .on::<lsp_ext::MoveItem>(handlers::handle_move_item)
+ .on::<lsp_ext::WorkspaceSymbol>(handlers::handle_workspace_symbol)
+ .on::<lsp_ext::OnTypeFormatting>(handlers::handle_on_type_formatting)
+ .on::<lsp_types::request::DocumentSymbolRequest>(handlers::handle_document_symbol)
+ .on::<lsp_types::request::GotoDefinition>(handlers::handle_goto_definition)
+ .on::<lsp_types::request::GotoDeclaration>(handlers::handle_goto_declaration)
+ .on::<lsp_types::request::GotoImplementation>(handlers::handle_goto_implementation)
+ .on::<lsp_types::request::GotoTypeDefinition>(handlers::handle_goto_type_definition)
+ .on::<lsp_types::request::InlayHintRequest>(handlers::handle_inlay_hints)
+ .on::<lsp_types::request::InlayHintResolveRequest>(handlers::handle_inlay_hints_resolve)
+ .on::<lsp_types::request::Completion>(handlers::handle_completion)
+ .on::<lsp_types::request::ResolveCompletionItem>(handlers::handle_completion_resolve)
+ .on::<lsp_types::request::CodeLensRequest>(handlers::handle_code_lens)
+ .on::<lsp_types::request::CodeLensResolve>(handlers::handle_code_lens_resolve)
+ .on::<lsp_types::request::FoldingRangeRequest>(handlers::handle_folding_range)
+ .on::<lsp_types::request::SignatureHelpRequest>(handlers::handle_signature_help)
+ .on::<lsp_types::request::PrepareRenameRequest>(handlers::handle_prepare_rename)
+ .on::<lsp_types::request::Rename>(handlers::handle_rename)
+ .on::<lsp_types::request::References>(handlers::handle_references)
+ .on::<lsp_types::request::Formatting>(handlers::handle_formatting)
+ .on::<lsp_types::request::RangeFormatting>(handlers::handle_range_formatting)
+ .on::<lsp_types::request::DocumentHighlightRequest>(handlers::handle_document_highlight)
+ .on::<lsp_types::request::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)
+ .on::<lsp_types::request::CallHierarchyIncomingCalls>(
+ handlers::handle_call_hierarchy_incoming,
+ )
+ .on::<lsp_types::request::CallHierarchyOutgoingCalls>(
+ handlers::handle_call_hierarchy_outgoing,
+ )
+ .on::<lsp_types::request::SemanticTokensFullRequest>(
+ handlers::handle_semantic_tokens_full,
+ )
+ .on::<lsp_types::request::SemanticTokensFullDeltaRequest>(
+ handlers::handle_semantic_tokens_full_delta,
+ )
+ .on::<lsp_types::request::SemanticTokensRangeRequest>(
+ handlers::handle_semantic_tokens_range,
+ )
+ .on::<lsp_types::request::WillRenameFiles>(handlers::handle_will_rename_files)
+ .on::<lsp_ext::Ssr>(handlers::handle_ssr)
+ .finish();
+ }
+
+ fn on_notification(&mut self, not: Notification) -> Result<()> {
+ NotificationDispatcher { not: Some(not), global_state: self }
+ .on::<lsp_types::notification::Cancel>(|this, params| {
+ let id: lsp_server::RequestId = match params.id {
+ lsp_types::NumberOrString::Number(id) => id.into(),
+ lsp_types::NumberOrString::String(id) => id.into(),
+ };
+ this.cancel(id);
+ Ok(())
+ })?
+ .on::<lsp_types::notification::WorkDoneProgressCancel>(|_this, _params| {
+ // Just ignore this. It is OK to continue sending progress
+ // notifications for this token, as the client can't know when
+ // we accepted notification.
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidOpenTextDocument>(|this, params| {
+ if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
+ let already_exists = this
+ .mem_docs
+ .insert(path.clone(), DocumentData::new(params.text_document.version))
+ .is_err();
+ if already_exists {
+ tracing::error!("duplicate DidOpenTextDocument: {}", path)
+ }
+ this.vfs
+ .write()
+ .0
+ .set_file_contents(path, Some(params.text_document.text.into_bytes()));
+ }
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidChangeTextDocument>(|this, params| {
+ if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
+ match this.mem_docs.get_mut(&path) {
+ Some(doc) => {
+ // The version passed in DidChangeTextDocument is the version after all edits are applied
+ // so we should apply it before the vfs is notified.
+ doc.version = params.text_document.version;
+ }
+ None => {
+ tracing::error!("unexpected DidChangeTextDocument: {}", path);
+ return Ok(());
+ }
+ };
+
+ let vfs = &mut this.vfs.write().0;
+ let file_id = vfs.file_id(&path).unwrap();
+ let mut text = String::from_utf8(vfs.file_contents(file_id).to_vec()).unwrap();
+ apply_document_changes(&mut text, params.content_changes);
+
+ vfs.set_file_contents(path, Some(text.into_bytes()));
+ }
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidCloseTextDocument>(|this, params| {
+ if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
+ if this.mem_docs.remove(&path).is_err() {
+ tracing::error!("orphan DidCloseTextDocument: {}", path);
+ }
+
+ this.semantic_tokens_cache.lock().remove(&params.text_document.uri);
+
+ if let Some(path) = path.as_path() {
+ this.loader.handle.invalidate(path.to_path_buf());
+ }
+ }
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidSaveTextDocument>(|this, params| {
+ for flycheck in &this.flycheck {
+ flycheck.update();
+ }
+ if let Ok(abs_path) = from_proto::abs_path(&params.text_document.uri) {
+ if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) {
+ this.fetch_workspaces_queue
+ .request_op(format!("DidSaveTextDocument {}", abs_path.display()));
+ }
+ }
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidChangeConfiguration>(|this, _params| {
+ // As stated in https://github.com/microsoft/language-server-protocol/issues/676,
+ // this notification's parameters should be ignored and the actual config queried separately.
+ this.send_request::<lsp_types::request::WorkspaceConfiguration>(
+ lsp_types::ConfigurationParams {
+ items: vec![lsp_types::ConfigurationItem {
+ scope_uri: None,
+ section: Some("rust-analyzer".to_string()),
+ }],
+ },
+ |this, resp| {
+ tracing::debug!("config update response: '{:?}", resp);
+ let lsp_server::Response { error, result, .. } = resp;
+
+ match (error, result) {
+ (Some(err), _) => {
+ tracing::error!("failed to fetch the server settings: {:?}", err)
+ }
+ (None, Some(mut configs)) => {
+ if let Some(json) = configs.get_mut(0) {
+ // Note that json can be null according to the spec if the client can't
+ // provide a configuration. This is handled in Config::update below.
+ let mut config = Config::clone(&*this.config);
+ if let Err(error) = config.update(json.take()) {
+ this.show_message(
+ lsp_types::MessageType::WARNING,
+ error.to_string(),
+ );
+ }
+ this.update_configuration(config);
+ }
+ }
+ (None, None) => tracing::error!(
+ "received empty server settings response from the client"
+ ),
+ }
+ },
+ );
+
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidChangeWatchedFiles>(|this, params| {
+ for change in params.changes {
+ if let Ok(path) = from_proto::abs_path(&change.uri) {
+ this.loader.handle.invalidate(path);
+ }
+ }
+ Ok(())
+ })?
+ .finish();
+ Ok(())
+ }
+
+ fn update_diagnostics(&mut self) {
+ let subscriptions = self
+ .mem_docs
+ .iter()
+ .map(|path| self.vfs.read().0.file_id(path).unwrap())
+ .collect::<Vec<_>>();
+
+ tracing::trace!("updating notifications for {:?}", subscriptions);
+
+ let snapshot = self.snapshot();
+ self.task_pool.handle.spawn(move || {
+ let diagnostics = subscriptions
+ .into_iter()
+ .filter_map(|file_id| {
+ handlers::publish_diagnostics(&snapshot, file_id)
+ .ok()
+ .map(|diags| (file_id, diags))
+ })
+ .collect::<Vec<_>>();
+ Task::Diagnostics(diagnostics)
+ })
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/markdown.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/markdown.rs
new file mode 100644
index 000000000..912ed1e76
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/markdown.rs
@@ -0,0 +1,157 @@
+//! Transforms markdown
+use ide_db::rust_doc::is_rust_fence;
+
+const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
+
+pub(crate) fn format_docs(src: &str) -> String {
+ let mut processed_lines = Vec::new();
+ let mut in_code_block = false;
+ let mut is_rust = false;
+
+ for mut line in src.lines() {
+ if in_code_block && is_rust && code_line_ignored_by_rustdoc(line) {
+ continue;
+ }
+
+ if let Some(header) = RUSTDOC_FENCES.into_iter().find_map(|fence| line.strip_prefix(fence))
+ {
+ in_code_block ^= true;
+
+ if in_code_block {
+ is_rust = is_rust_fence(header);
+
+ if is_rust {
+ line = "```rust";
+ }
+ }
+ }
+
+ if in_code_block {
+ let trimmed = line.trim_start();
+ if trimmed.starts_with("##") {
+ line = &trimmed[1..];
+ }
+ }
+
+ processed_lines.push(line);
+ }
+ processed_lines.join("\n")
+}
+
+fn code_line_ignored_by_rustdoc(line: &str) -> bool {
+ let trimmed = line.trim();
+ trimmed == "#" || trimmed.starts_with("# ") || trimmed.starts_with("#\t")
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_format_docs_adds_rust() {
+ let comment = "```\nfn some_rust() {}\n```";
+ assert_eq!(format_docs(comment), "```rust\nfn some_rust() {}\n```");
+ }
+
+ #[test]
+ fn test_format_docs_handles_plain_text() {
+ let comment = "```text\nthis is plain text\n```";
+ assert_eq!(format_docs(comment), "```text\nthis is plain text\n```");
+ }
+
+ #[test]
+ fn test_format_docs_handles_non_rust() {
+ let comment = "```sh\nsupposedly shell code\n```";
+ assert_eq!(format_docs(comment), "```sh\nsupposedly shell code\n```");
+ }
+
+ #[test]
+ fn test_format_docs_handles_rust_alias() {
+ let comment = "```ignore\nlet z = 55;\n```";
+ assert_eq!(format_docs(comment), "```rust\nlet z = 55;\n```");
+ }
+
+ #[test]
+ fn test_format_docs_handles_complex_code_block_attrs() {
+ let comment = "```rust,no_run\nlet z = 55;\n```";
+ assert_eq!(format_docs(comment), "```rust\nlet z = 55;\n```");
+ }
+
+ #[test]
+ fn test_format_docs_handles_error_codes() {
+ let comment = "```compile_fail,E0641\nlet b = 0 as *const _;\n```";
+ assert_eq!(format_docs(comment), "```rust\nlet b = 0 as *const _;\n```");
+ }
+
+ #[test]
+ fn test_format_docs_skips_comments_in_rust_block() {
+ let comment =
+ "```rust\n # skip1\n# skip2\n#stay1\nstay2\n#\n #\n # \n #\tskip3\n\t#\t\n```";
+ assert_eq!(format_docs(comment), "```rust\n#stay1\nstay2\n```");
+ }
+
+ #[test]
+ fn test_format_docs_does_not_skip_lines_if_plain_text() {
+ let comment =
+ "```text\n # stay1\n# stay2\n#stay3\nstay4\n#\n #\n # \n #\tstay5\n\t#\t\n```";
+ assert_eq!(
+ format_docs(comment),
+ "```text\n # stay1\n# stay2\n#stay3\nstay4\n#\n #\n # \n #\tstay5\n\t#\t\n```",
+ );
+ }
+
+ #[test]
+ fn test_format_docs_keeps_comments_outside_of_rust_block() {
+ let comment = " # stay1\n# stay2\n#stay3\nstay4\n#\n #\n # \n #\tstay5\n\t#\t";
+ assert_eq!(format_docs(comment), comment);
+ }
+
+ #[test]
+ fn test_format_docs_preserves_newlines() {
+ let comment = "this\nis\nmultiline";
+ assert_eq!(format_docs(comment), comment);
+ }
+
+ #[test]
+ fn test_code_blocks_in_comments_marked_as_rust() {
+ let comment = r#"```rust
+fn main(){}
+```
+Some comment.
+```
+let a = 1;
+```"#;
+
+ assert_eq!(
+ format_docs(comment),
+ "```rust\nfn main(){}\n```\nSome comment.\n```rust\nlet a = 1;\n```"
+ );
+ }
+
+ #[test]
+ fn test_code_blocks_in_comments_marked_as_text() {
+ let comment = r#"```text
+filler
+text
+```
+Some comment.
+```
+let a = 1;
+```"#;
+
+ assert_eq!(
+ format_docs(comment),
+ "```text\nfiller\ntext\n```\nSome comment.\n```rust\nlet a = 1;\n```"
+ );
+ }
+
+ #[test]
+ fn test_format_docs_handles_escape_double_hashes() {
+ let comment = r#"```rust
+let s = "foo
+## bar # baz";
+```"#;
+
+ assert_eq!(format_docs(comment), "```rust\nlet s = \"foo\n# bar # baz\";\n```");
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/mem_docs.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/mem_docs.rs
new file mode 100644
index 000000000..f86a0f66a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/mem_docs.rs
@@ -0,0 +1,65 @@
+//! In-memory document information.
+
+use std::mem;
+
+use rustc_hash::FxHashMap;
+use vfs::VfsPath;
+
+/// Holds the set of in-memory documents.
+///
+/// For these document, there true contents is maintained by the client. It
+/// might be different from what's on disk.
+#[derive(Default, Clone)]
+pub(crate) struct MemDocs {
+ mem_docs: FxHashMap<VfsPath, DocumentData>,
+ added_or_removed: bool,
+}
+
+impl MemDocs {
+ pub(crate) fn contains(&self, path: &VfsPath) -> bool {
+ self.mem_docs.contains_key(path)
+ }
+ pub(crate) fn insert(&mut self, path: VfsPath, data: DocumentData) -> Result<(), ()> {
+ self.added_or_removed = true;
+ match self.mem_docs.insert(path, data) {
+ Some(_) => Err(()),
+ None => Ok(()),
+ }
+ }
+ pub(crate) fn remove(&mut self, path: &VfsPath) -> Result<(), ()> {
+ self.added_or_removed = true;
+ match self.mem_docs.remove(path) {
+ Some(_) => Ok(()),
+ None => Err(()),
+ }
+ }
+ pub(crate) fn get(&self, path: &VfsPath) -> Option<&DocumentData> {
+ self.mem_docs.get(path)
+ }
+ pub(crate) fn get_mut(&mut self, path: &VfsPath) -> Option<&mut DocumentData> {
+ // NB: don't set `self.added_or_removed` here, as that purposefully only
+ // tracks changes to the key set.
+ self.mem_docs.get_mut(path)
+ }
+ pub(crate) fn iter(&self) -> impl Iterator<Item = &VfsPath> {
+ self.mem_docs.keys()
+ }
+ pub(crate) fn take_changes(&mut self) -> bool {
+ mem::replace(&mut self.added_or_removed, false)
+ }
+}
+
+/// Information about a document that the Language Client
+/// knows about.
+/// Its lifetime is driven by the textDocument/didOpen and textDocument/didClose
+/// client notifications.
+#[derive(Debug, Clone)]
+pub(crate) struct DocumentData {
+ pub(crate) version: i32,
+}
+
+impl DocumentData {
+ pub(crate) fn new(version: i32) -> Self {
+ DocumentData { version }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs
new file mode 100644
index 000000000..97aca0161
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs
@@ -0,0 +1,44 @@
+//! Bookkeeping to make sure only one long-running operation is being executed
+//! at a time.
+
+pub(crate) type Cause = String;
+
+pub(crate) struct OpQueue<Output> {
+ op_requested: Option<Cause>,
+ op_in_progress: bool,
+ last_op_result: Output,
+}
+
+impl<Output: Default> Default for OpQueue<Output> {
+ fn default() -> Self {
+ Self { op_requested: None, op_in_progress: false, last_op_result: Default::default() }
+ }
+}
+
+impl<Output> OpQueue<Output> {
+ pub(crate) fn request_op(&mut self, reason: Cause) {
+ self.op_requested = Some(reason);
+ }
+ pub(crate) fn should_start_op(&mut self) -> Option<Cause> {
+ if self.op_in_progress {
+ return None;
+ }
+ self.op_in_progress = self.op_requested.is_some();
+ self.op_requested.take()
+ }
+ pub(crate) fn op_completed(&mut self, result: Output) {
+ assert!(self.op_in_progress);
+ self.op_in_progress = false;
+ self.last_op_result = result;
+ }
+
+ pub(crate) fn last_op_result(&self) -> &Output {
+ &self.last_op_result
+ }
+ pub(crate) fn op_in_progress(&self) -> bool {
+ self.op_in_progress
+ }
+ pub(crate) fn op_requested(&self) -> bool {
+ self.op_requested.is_some()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
new file mode 100644
index 000000000..eaab275bc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
@@ -0,0 +1,705 @@
+//! Project loading & configuration updates.
+//!
+//! This is quite tricky. The main problem is time and changes -- there's no
+//! fixed "project" rust-analyzer is working with, "current project" is itself
+//! mutable state. For example, when the user edits `Cargo.toml` by adding a new
+//! dependency, project model changes. What's more, switching project model is
+//! not instantaneous -- it takes time to run `cargo metadata` and (for proc
+//! macros) `cargo check`.
+//!
+//! The main guiding principle here is, as elsewhere in rust-analyzer,
+//! robustness. We try not to assume that the project model exists or is
+//! correct. Instead, we try to provide a best-effort service. Even if the
+//! project is currently loading and we don't have a full project model, we
+//! still want to respond to various requests.
+use std::{mem, sync::Arc};
+
+use flycheck::{FlycheckConfig, FlycheckHandle};
+use hir::db::DefDatabase;
+use ide::Change;
+use ide_db::base_db::{
+ CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
+ ProcMacroLoadResult, SourceRoot, VfsPath,
+};
+use proc_macro_api::{MacroDylib, ProcMacroServer};
+use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
+use syntax::SmolStr;
+use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
+
+use crate::{
+ config::{Config, FilesWatcher, LinkedProject},
+ global_state::GlobalState,
+ lsp_ext,
+ main_loop::Task,
+ op_queue::Cause,
+};
+
+#[derive(Debug)]
+pub(crate) enum ProjectWorkspaceProgress {
+ Begin,
+ Report(String),
+ End(Vec<anyhow::Result<ProjectWorkspace>>),
+}
+
+#[derive(Debug)]
+pub(crate) enum BuildDataProgress {
+ Begin,
+ Report(String),
+ End((Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)),
+}
+
+impl GlobalState {
+ pub(crate) fn is_quiescent(&self) -> bool {
+ !(self.fetch_workspaces_queue.op_in_progress()
+ || self.fetch_build_data_queue.op_in_progress()
+ || self.vfs_progress_config_version < self.vfs_config_version
+ || self.vfs_progress_n_done < self.vfs_progress_n_total)
+ }
+
+ pub(crate) fn update_configuration(&mut self, config: Config) {
+ let _p = profile::span("GlobalState::update_configuration");
+ let old_config = mem::replace(&mut self.config, Arc::new(config));
+ if self.config.lru_capacity() != old_config.lru_capacity() {
+ self.analysis_host.update_lru_capacity(self.config.lru_capacity());
+ }
+ if self.config.linked_projects() != old_config.linked_projects() {
+ self.fetch_workspaces_queue.request_op("linked projects changed".to_string())
+ } else if self.config.flycheck() != old_config.flycheck() {
+ self.reload_flycheck();
+ }
+
+ if self.analysis_host.raw_database().enable_proc_attr_macros()
+ != self.config.expand_proc_attr_macros()
+ {
+ self.analysis_host
+ .raw_database_mut()
+ .set_enable_proc_attr_macros(self.config.expand_proc_attr_macros());
+ }
+ }
+
+ pub(crate) fn current_status(&self) -> lsp_ext::ServerStatusParams {
+ let mut status = lsp_ext::ServerStatusParams {
+ health: lsp_ext::Health::Ok,
+ quiescent: self.is_quiescent(),
+ message: None,
+ };
+
+ if self.proc_macro_changed {
+ status.health = lsp_ext::Health::Warning;
+ status.message =
+ Some("Reload required due to source changes of a procedural macro.".into())
+ }
+ if let Err(_) = self.fetch_build_data_error() {
+ status.health = lsp_ext::Health::Warning;
+ status.message =
+ Some("Failed to run build scripts of some packages, check the logs.".to_string());
+ }
+ if !self.config.cargo_autoreload()
+ && self.is_quiescent()
+ && self.fetch_workspaces_queue.op_requested()
+ {
+ status.health = lsp_ext::Health::Warning;
+ status.message = Some("Workspace reload required".to_string())
+ }
+
+ if let Err(error) = self.fetch_workspace_error() {
+ status.health = lsp_ext::Health::Error;
+ status.message = Some(error)
+ }
+ status
+ }
+
+ pub(crate) fn fetch_workspaces(&mut self, cause: Cause) {
+ tracing::info!(%cause, "will fetch workspaces");
+
+ self.task_pool.handle.spawn_with_sender({
+ let linked_projects = self.config.linked_projects();
+ let detached_files = self.config.detached_files().to_vec();
+ let cargo_config = self.config.cargo();
+
+ move |sender| {
+ let progress = {
+ let sender = sender.clone();
+ move |msg| {
+ sender
+ .send(Task::FetchWorkspace(ProjectWorkspaceProgress::Report(msg)))
+ .unwrap()
+ }
+ };
+
+ sender.send(Task::FetchWorkspace(ProjectWorkspaceProgress::Begin)).unwrap();
+
+ let mut workspaces = linked_projects
+ .iter()
+ .map(|project| match project {
+ LinkedProject::ProjectManifest(manifest) => {
+ project_model::ProjectWorkspace::load(
+ manifest.clone(),
+ &cargo_config,
+ &progress,
+ )
+ }
+ LinkedProject::InlineJsonProject(it) => {
+ project_model::ProjectWorkspace::load_inline(
+ it.clone(),
+ cargo_config.target.as_deref(),
+ )
+ }
+ })
+ .collect::<Vec<_>>();
+
+ if !detached_files.is_empty() {
+ workspaces
+ .push(project_model::ProjectWorkspace::load_detached_files(detached_files));
+ }
+
+ tracing::info!("did fetch workspaces {:?}", workspaces);
+ sender
+ .send(Task::FetchWorkspace(ProjectWorkspaceProgress::End(workspaces)))
+ .unwrap();
+ }
+ });
+ }
+
+ pub(crate) fn fetch_build_data(&mut self, cause: Cause) {
+ tracing::info!(%cause, "will fetch build data");
+ let workspaces = Arc::clone(&self.workspaces);
+ let config = self.config.cargo();
+ self.task_pool.handle.spawn_with_sender(move |sender| {
+ sender.send(Task::FetchBuildData(BuildDataProgress::Begin)).unwrap();
+
+ let progress = {
+ let sender = sender.clone();
+ move |msg| {
+ sender.send(Task::FetchBuildData(BuildDataProgress::Report(msg))).unwrap()
+ }
+ };
+ let mut res = Vec::new();
+ for ws in workspaces.iter() {
+ res.push(ws.run_build_scripts(&config, &progress));
+ }
+ sender.send(Task::FetchBuildData(BuildDataProgress::End((workspaces, res)))).unwrap();
+ });
+ }
+
+ pub(crate) fn switch_workspaces(&mut self, cause: Cause) {
+ let _p = profile::span("GlobalState::switch_workspaces");
+ tracing::info!(%cause, "will switch workspaces");
+
+ if let Err(error_message) = self.fetch_workspace_error() {
+ self.show_and_log_error(error_message, None);
+ if !self.workspaces.is_empty() {
+ // It only makes sense to switch to a partially broken workspace
+ // if we don't have any workspace at all yet.
+ return;
+ }
+ }
+
+ if let Err(error) = self.fetch_build_data_error() {
+ self.show_and_log_error(
+ "rust-analyzer failed to run build scripts".to_string(),
+ Some(error),
+ );
+ }
+
+ let workspaces = self
+ .fetch_workspaces_queue
+ .last_op_result()
+ .iter()
+ .filter_map(|res| res.as_ref().ok().cloned())
+ .collect::<Vec<_>>();
+
+ fn eq_ignore_build_data<'a>(
+ left: &'a ProjectWorkspace,
+ right: &'a ProjectWorkspace,
+ ) -> bool {
+ let key = |p: &'a ProjectWorkspace| match p {
+ ProjectWorkspace::Cargo {
+ cargo,
+ sysroot,
+ rustc,
+ rustc_cfg,
+ cfg_overrides,
+
+ build_scripts: _,
+ } => Some((cargo, sysroot, rustc, rustc_cfg, cfg_overrides)),
+ _ => None,
+ };
+ match (key(left), key(right)) {
+ (Some(lk), Some(rk)) => lk == rk,
+ _ => left == right,
+ }
+ }
+
+ let same_workspaces = workspaces.len() == self.workspaces.len()
+ && workspaces
+ .iter()
+ .zip(self.workspaces.iter())
+ .all(|(l, r)| eq_ignore_build_data(l, r));
+
+ if same_workspaces {
+ let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result();
+ if Arc::ptr_eq(workspaces, &self.workspaces) {
+ tracing::debug!("set build scripts to workspaces");
+
+ let workspaces = workspaces
+ .iter()
+ .cloned()
+ .zip(build_scripts)
+ .map(|(mut ws, bs)| {
+ ws.set_build_scripts(bs.as_ref().ok().cloned().unwrap_or_default());
+ ws
+ })
+ .collect::<Vec<_>>();
+
+ // Workspaces are the same, but we've updated build data.
+ self.workspaces = Arc::new(workspaces);
+ } else {
+ tracing::info!("build scripts do not match the version of the active workspace");
+ // Current build scripts do not match the version of the active
+ // workspace, so there's nothing for us to update.
+ return;
+ }
+ } else {
+ tracing::debug!("abandon build scripts for workspaces");
+
+ // Here, we completely changed the workspace (Cargo.toml edit), so
+ // we don't care about build-script results, they are stale.
+ self.workspaces = Arc::new(workspaces)
+ }
+
+ if let FilesWatcher::Client = self.config.files().watcher {
+ let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
+ watchers: self
+ .workspaces
+ .iter()
+ .flat_map(|ws| ws.to_roots())
+ .filter(|it| it.is_local)
+ .flat_map(|root| {
+ root.include.into_iter().flat_map(|it| {
+ [
+ format!("{}/**/*.rs", it.display()),
+ format!("{}/**/Cargo.toml", it.display()),
+ format!("{}/**/Cargo.lock", it.display()),
+ ]
+ })
+ })
+ .map(|glob_pattern| lsp_types::FileSystemWatcher { glob_pattern, kind: None })
+ .collect(),
+ };
+ let registration = lsp_types::Registration {
+ id: "workspace/didChangeWatchedFiles".to_string(),
+ method: "workspace/didChangeWatchedFiles".to_string(),
+ register_options: Some(serde_json::to_value(registration_options).unwrap()),
+ };
+ self.send_request::<lsp_types::request::RegisterCapability>(
+ lsp_types::RegistrationParams { registrations: vec![registration] },
+ |_, _| (),
+ );
+ }
+
+ let mut change = Change::new();
+
+ let files_config = self.config.files();
+ let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude);
+
+ let standalone_server_name =
+ format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX);
+
+ if self.proc_macro_clients.is_empty() {
+ if let Some((path, args)) = self.config.proc_macro_srv() {
+ self.proc_macro_clients = self
+ .workspaces
+ .iter()
+ .map(|ws| {
+ let mut args = args.clone();
+ let mut path = path.clone();
+
+ if let ProjectWorkspace::Cargo { sysroot, .. } = ws {
+ tracing::info!("Found a cargo workspace...");
+ if let Some(sysroot) = sysroot.as_ref() {
+ tracing::info!("Found a cargo workspace with a sysroot...");
+ let server_path =
+ sysroot.root().join("libexec").join(&standalone_server_name);
+ if std::fs::metadata(&server_path).is_ok() {
+ tracing::info!(
+ "And the server exists at {}",
+ server_path.display()
+ );
+ path = server_path;
+ args = vec![];
+ } else {
+ tracing::info!(
+ "And the server does not exist at {}",
+ server_path.display()
+ );
+ }
+ }
+ }
+
+ tracing::info!(
+ "Using proc-macro server at {} with args {:?}",
+ path.display(),
+ args
+ );
+ ProcMacroServer::spawn(path.clone(), args.clone()).map_err(|err| {
+ let error = format!(
+ "Failed to run proc_macro_srv from path {}, error: {:?}",
+ path.display(),
+ err
+ );
+ tracing::error!(error);
+ error
+ })
+ })
+ .collect();
+ }
+ }
+
+ let watch = match files_config.watcher {
+ FilesWatcher::Client => vec![],
+ FilesWatcher::Server => project_folders.watch,
+ };
+ self.vfs_config_version += 1;
+ self.loader.handle.set_config(vfs::loader::Config {
+ load: project_folders.load,
+ watch,
+ version: self.vfs_config_version,
+ });
+
+ // Create crate graph from all the workspaces
+ let crate_graph = {
+ let dummy_replacements = self.config.dummy_replacements();
+
+ let vfs = &mut self.vfs.write().0;
+ let loader = &mut self.loader;
+ let mem_docs = &self.mem_docs;
+ let mut load = move |path: &AbsPath| {
+ let _p = profile::span("GlobalState::load");
+ let vfs_path = vfs::VfsPath::from(path.to_path_buf());
+ if !mem_docs.contains(&vfs_path) {
+ let contents = loader.handle.load_sync(path);
+ vfs.set_file_contents(vfs_path.clone(), contents);
+ }
+ let res = vfs.file_id(&vfs_path);
+ if res.is_none() {
+ tracing::warn!("failed to load {}", path.display())
+ }
+ res
+ };
+
+ let mut crate_graph = CrateGraph::default();
+ for (idx, ws) in self.workspaces.iter().enumerate() {
+ let proc_macro_client = match self.proc_macro_clients.get(idx) {
+ Some(res) => res.as_ref().map_err(|e| &**e),
+ None => Err("Proc macros are disabled"),
+ };
+ let mut load_proc_macro = move |crate_name: &str, path: &AbsPath| {
+ load_proc_macro(
+ proc_macro_client,
+ path,
+ dummy_replacements.get(crate_name).map(|v| &**v).unwrap_or_default(),
+ )
+ };
+ crate_graph.extend(ws.to_crate_graph(&mut load_proc_macro, &mut load));
+ }
+ crate_graph
+ };
+ change.set_crate_graph(crate_graph);
+
+ self.source_root_config = project_folders.source_root_config;
+
+ self.analysis_host.apply_change(change);
+ self.process_changes();
+ self.reload_flycheck();
+ tracing::info!("did switch workspaces");
+ }
+
+ fn fetch_workspace_error(&self) -> Result<(), String> {
+ let mut buf = String::new();
+
+ for ws in self.fetch_workspaces_queue.last_op_result() {
+ if let Err(err) = ws {
+ stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err);
+ }
+ }
+
+ if buf.is_empty() {
+ return Ok(());
+ }
+
+ Err(buf)
+ }
+
+ fn fetch_build_data_error(&self) -> Result<(), String> {
+ let mut buf = String::new();
+
+ for ws in &self.fetch_build_data_queue.last_op_result().1 {
+ match ws {
+ Ok(data) => match data.error() {
+ Some(stderr) => stdx::format_to!(buf, "{:#}\n", stderr),
+ _ => (),
+ },
+ // io errors
+ Err(err) => stdx::format_to!(buf, "{:#}\n", err),
+ }
+ }
+
+ if buf.is_empty() {
+ Ok(())
+ } else {
+ Err(buf)
+ }
+ }
+
+ fn reload_flycheck(&mut self) {
+ let _p = profile::span("GlobalState::reload_flycheck");
+ let config = match self.config.flycheck() {
+ Some(it) => it,
+ None => {
+ self.flycheck = Vec::new();
+ self.diagnostics.clear_check();
+ return;
+ }
+ };
+
+ let sender = self.flycheck_sender.clone();
+ self.flycheck = self
+ .workspaces
+ .iter()
+ .enumerate()
+ .filter_map(|(id, w)| match w {
+ ProjectWorkspace::Cargo { cargo, .. } => Some((id, cargo.workspace_root())),
+ ProjectWorkspace::Json { project, .. } => {
+ // Enable flychecks for json projects if a custom flycheck command was supplied
+ // in the workspace configuration.
+ match config {
+ FlycheckConfig::CustomCommand { .. } => Some((id, project.path())),
+ _ => None,
+ }
+ }
+ ProjectWorkspace::DetachedFiles { .. } => None,
+ })
+ .map(|(id, root)| {
+ let sender = sender.clone();
+ FlycheckHandle::spawn(
+ id,
+ Box::new(move |msg| sender.send(msg).unwrap()),
+ config.clone(),
+ root.to_path_buf(),
+ )
+ })
+ .collect();
+ }
+}
+
+#[derive(Default)]
+pub(crate) struct ProjectFolders {
+ pub(crate) load: Vec<vfs::loader::Entry>,
+ pub(crate) watch: Vec<usize>,
+ pub(crate) source_root_config: SourceRootConfig,
+}
+
+impl ProjectFolders {
+ pub(crate) fn new(
+ workspaces: &[ProjectWorkspace],
+ global_excludes: &[AbsPathBuf],
+ ) -> ProjectFolders {
+ let mut res = ProjectFolders::default();
+ let mut fsc = FileSetConfig::builder();
+ let mut local_filesets = vec![];
+
+ for root in workspaces.iter().flat_map(|ws| ws.to_roots()) {
+ let file_set_roots: Vec<VfsPath> =
+ root.include.iter().cloned().map(VfsPath::from).collect();
+
+ let entry = {
+ let mut dirs = vfs::loader::Directories::default();
+ dirs.extensions.push("rs".into());
+ dirs.include.extend(root.include);
+ dirs.exclude.extend(root.exclude);
+ for excl in global_excludes {
+ if dirs
+ .include
+ .iter()
+ .any(|incl| incl.starts_with(excl) || excl.starts_with(incl))
+ {
+ dirs.exclude.push(excl.clone());
+ }
+ }
+
+ vfs::loader::Entry::Directories(dirs)
+ };
+
+ if root.is_local {
+ res.watch.push(res.load.len());
+ }
+ res.load.push(entry);
+
+ if root.is_local {
+ local_filesets.push(fsc.len());
+ }
+ fsc.add_file_set(file_set_roots)
+ }
+
+ let fsc = fsc.build();
+ res.source_root_config = SourceRootConfig { fsc, local_filesets };
+
+ res
+ }
+}
+
+#[derive(Default, Debug)]
+pub(crate) struct SourceRootConfig {
+ pub(crate) fsc: FileSetConfig,
+ pub(crate) local_filesets: Vec<usize>,
+}
+
+impl SourceRootConfig {
+ pub(crate) fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> {
+ let _p = profile::span("SourceRootConfig::partition");
+ self.fsc
+ .partition(vfs)
+ .into_iter()
+ .enumerate()
+ .map(|(idx, file_set)| {
+ let is_local = self.local_filesets.contains(&idx);
+ if is_local {
+ SourceRoot::new_local(file_set)
+ } else {
+ SourceRoot::new_library(file_set)
+ }
+ })
+ .collect()
+ }
+}
+
+/// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace`
+/// with an identity dummy expander.
+pub(crate) fn load_proc_macro(
+ server: Result<&ProcMacroServer, &str>,
+ path: &AbsPath,
+ dummy_replace: &[Box<str>],
+) -> ProcMacroLoadResult {
+ let res: Result<Vec<_>, String> = (|| {
+ let dylib = MacroDylib::new(path.to_path_buf())
+ .map_err(|io| format!("Proc-macro dylib loading failed: {io}"))?;
+ let server = server.map_err(ToOwned::to_owned)?;
+ let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?;
+ if vec.is_empty() {
+ return Err("proc macro library returned no proc macros".to_string());
+ }
+ Ok(vec
+ .into_iter()
+ .map(|expander| expander_to_proc_macro(expander, dummy_replace))
+ .collect())
+ })();
+ return match res {
+ Ok(proc_macros) => {
+ tracing::info!(
+ "Loaded proc-macros for {}: {:?}",
+ path.display(),
+ proc_macros.iter().map(|it| it.name.clone()).collect::<Vec<_>>()
+ );
+ Ok(proc_macros)
+ }
+ Err(e) => {
+ tracing::warn!("proc-macro loading for {} failed: {e}", path.display());
+ Err(e)
+ }
+ };
+
+ fn expander_to_proc_macro(
+ expander: proc_macro_api::ProcMacro,
+ dummy_replace: &[Box<str>],
+ ) -> ProcMacro {
+ let name = SmolStr::from(expander.name());
+ let kind = match expander.kind() {
+ proc_macro_api::ProcMacroKind::CustomDerive => ProcMacroKind::CustomDerive,
+ proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike,
+ proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr,
+ };
+ let expander: Arc<dyn ProcMacroExpander> =
+ if dummy_replace.iter().any(|replace| &**replace == name) {
+ Arc::new(DummyExpander)
+ } else {
+ Arc::new(Expander(expander))
+ };
+ ProcMacro { name, kind, expander }
+ }
+
+ #[derive(Debug)]
+ struct Expander(proc_macro_api::ProcMacro);
+
+ impl ProcMacroExpander for Expander {
+ fn expand(
+ &self,
+ subtree: &tt::Subtree,
+ attrs: Option<&tt::Subtree>,
+ env: &Env,
+ ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+ let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
+ match self.0.expand(subtree, attrs, env) {
+ Ok(Ok(subtree)) => Ok(subtree),
+ Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
+ Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
+ }
+ }
+ }
+
+ /// Dummy identity expander, used for proc-macros that are deliberately ignored by the user.
+ #[derive(Debug)]
+ struct DummyExpander;
+
+ impl ProcMacroExpander for DummyExpander {
+ fn expand(
+ &self,
+ subtree: &tt::Subtree,
+ _: Option<&tt::Subtree>,
+ _: &Env,
+ ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+ Ok(subtree.clone())
+ }
+ }
+}
+
+pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool {
+ const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
+ const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];
+ let file_name = path.file_name().unwrap_or_default();
+
+ if file_name == "Cargo.toml" || file_name == "Cargo.lock" {
+ return true;
+ }
+ if change_kind == ChangeKind::Modify {
+ return false;
+ }
+ if path.extension().unwrap_or_default() != "rs" {
+ if (file_name == "config.toml" || file_name == "config")
+ && path.parent().map(|parent| parent.as_ref().ends_with(".cargo")) == Some(true)
+ {
+ return true;
+ }
+ return false;
+ }
+ if IMPLICIT_TARGET_FILES.iter().any(|it| path.as_ref().ends_with(it)) {
+ return true;
+ }
+ let parent = match path.parent() {
+ Some(it) => it,
+ None => return false,
+ };
+ if IMPLICIT_TARGET_DIRS.iter().any(|it| parent.as_ref().ends_with(it)) {
+ return true;
+ }
+ if file_name == "main.rs" {
+ let grand_parent = match parent.parent() {
+ Some(it) => it,
+ None => return false,
+ };
+ if IMPLICIT_TARGET_DIRS.iter().any(|it| grand_parent.as_ref().ends_with(it)) {
+ return true;
+ }
+ }
+ false
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/semantic_tokens.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/semantic_tokens.rs
new file mode 100644
index 000000000..6c78b5df1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/semantic_tokens.rs
@@ -0,0 +1,301 @@
+//! Semantic Tokens helpers
+
+use std::ops;
+
+use lsp_types::{
+ Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens,
+ SemanticTokensEdit,
+};
+
+macro_rules! define_semantic_token_types {
+ ($(($ident:ident, $string:literal)),*$(,)?) => {
+ $(pub(crate) const $ident: SemanticTokenType = SemanticTokenType::new($string);)*
+
+ pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[
+ SemanticTokenType::COMMENT,
+ SemanticTokenType::KEYWORD,
+ SemanticTokenType::STRING,
+ SemanticTokenType::NUMBER,
+ SemanticTokenType::REGEXP,
+ SemanticTokenType::OPERATOR,
+ SemanticTokenType::NAMESPACE,
+ SemanticTokenType::TYPE,
+ SemanticTokenType::STRUCT,
+ SemanticTokenType::CLASS,
+ SemanticTokenType::INTERFACE,
+ SemanticTokenType::ENUM,
+ SemanticTokenType::ENUM_MEMBER,
+ SemanticTokenType::TYPE_PARAMETER,
+ SemanticTokenType::FUNCTION,
+ SemanticTokenType::METHOD,
+ SemanticTokenType::PROPERTY,
+ SemanticTokenType::MACRO,
+ SemanticTokenType::VARIABLE,
+ SemanticTokenType::PARAMETER,
+ $($ident),*
+ ];
+ };
+}
+
+define_semantic_token_types![
+ (ANGLE, "angle"),
+ (ARITHMETIC, "arithmetic"),
+ (ATTRIBUTE, "attribute"),
+ (ATTRIBUTE_BRACKET, "attributeBracket"),
+ (BITWISE, "bitwise"),
+ (BOOLEAN, "boolean"),
+ (BRACE, "brace"),
+ (BRACKET, "bracket"),
+ (BUILTIN_ATTRIBUTE, "builtinAttribute"),
+ (BUILTIN_TYPE, "builtinType"),
+ (CHAR, "character"),
+ (COLON, "colon"),
+ (COMMA, "comma"),
+ (COMPARISON, "comparison"),
+ (CONST_PARAMETER, "constParameter"),
+ (DERIVE, "derive"),
+ (DERIVE_HELPER, "deriveHelper"),
+ (DOT, "dot"),
+ (ESCAPE_SEQUENCE, "escapeSequence"),
+ (FORMAT_SPECIFIER, "formatSpecifier"),
+ (GENERIC, "generic"),
+ (LABEL, "label"),
+ (LIFETIME, "lifetime"),
+ (LOGICAL, "logical"),
+ (MACRO_BANG, "macroBang"),
+ (OPERATOR, "operator"),
+ (PARENTHESIS, "parenthesis"),
+ (PUNCTUATION, "punctuation"),
+ (SELF_KEYWORD, "selfKeyword"),
+ (SELF_TYPE_KEYWORD, "selfTypeKeyword"),
+ (SEMICOLON, "semicolon"),
+ (TYPE_ALIAS, "typeAlias"),
+ (TOOL_MODULE, "toolModule"),
+ (UNION, "union"),
+ (UNRESOLVED_REFERENCE, "unresolvedReference"),
+];
+
+macro_rules! define_semantic_token_modifiers {
+ ($(($ident:ident, $string:literal)),*$(,)?) => {
+ $(pub(crate) const $ident: SemanticTokenModifier = SemanticTokenModifier::new($string);)*
+
+ pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[
+ SemanticTokenModifier::DOCUMENTATION,
+ SemanticTokenModifier::DECLARATION,
+ SemanticTokenModifier::DEFINITION,
+ SemanticTokenModifier::STATIC,
+ SemanticTokenModifier::ABSTRACT,
+ SemanticTokenModifier::DEPRECATED,
+ SemanticTokenModifier::READONLY,
+ SemanticTokenModifier::DEFAULT_LIBRARY,
+ $($ident),*
+ ];
+ };
+}
+
+define_semantic_token_modifiers![
+ (ASYNC, "async"),
+ (ATTRIBUTE_MODIFIER, "attribute"),
+ (CALLABLE, "callable"),
+ (CONSTANT, "constant"),
+ (CONSUMING, "consuming"),
+ (CONTROL_FLOW, "controlFlow"),
+ (CRATE_ROOT, "crateRoot"),
+ (INJECTED, "injected"),
+ (INTRA_DOC_LINK, "intraDocLink"),
+ (LIBRARY, "library"),
+ (MUTABLE, "mutable"),
+ (PUBLIC, "public"),
+ (REFERENCE, "reference"),
+ (TRAIT_MODIFIER, "trait"),
+ (UNSAFE, "unsafe"),
+];
+
+#[derive(Default)]
+pub(crate) struct ModifierSet(pub(crate) u32);
+
+impl ops::BitOrAssign<SemanticTokenModifier> for ModifierSet {
+ fn bitor_assign(&mut self, rhs: SemanticTokenModifier) {
+ let idx = SUPPORTED_MODIFIERS.iter().position(|it| it == &rhs).unwrap();
+ self.0 |= 1 << idx;
+ }
+}
+
+/// Tokens are encoded relative to each other.
+///
+/// This is a direct port of <https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45>
+pub(crate) struct SemanticTokensBuilder {
+ id: String,
+ prev_line: u32,
+ prev_char: u32,
+ data: Vec<SemanticToken>,
+}
+
+impl SemanticTokensBuilder {
+ pub(crate) fn new(id: String) -> Self {
+ SemanticTokensBuilder { id, prev_line: 0, prev_char: 0, data: Default::default() }
+ }
+
+ /// Push a new token onto the builder
+ pub(crate) fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) {
+ let mut push_line = range.start.line as u32;
+ let mut push_char = range.start.character as u32;
+
+ if !self.data.is_empty() {
+ push_line -= self.prev_line;
+ if push_line == 0 {
+ push_char -= self.prev_char;
+ }
+ }
+
+ // A token cannot be multiline
+ let token_len = range.end.character - range.start.character;
+
+ let token = SemanticToken {
+ delta_line: push_line,
+ delta_start: push_char,
+ length: token_len as u32,
+ token_type: token_index,
+ token_modifiers_bitset: modifier_bitset,
+ };
+
+ self.data.push(token);
+
+ self.prev_line = range.start.line as u32;
+ self.prev_char = range.start.character as u32;
+ }
+
+ pub(crate) fn build(self) -> SemanticTokens {
+ SemanticTokens { result_id: Some(self.id), data: self.data }
+ }
+}
+
+pub(crate) fn diff_tokens(old: &[SemanticToken], new: &[SemanticToken]) -> Vec<SemanticTokensEdit> {
+ let offset = new.iter().zip(old.iter()).take_while(|&(n, p)| n == p).count();
+
+ let (_, old) = old.split_at(offset);
+ let (_, new) = new.split_at(offset);
+
+ let offset_from_end =
+ new.iter().rev().zip(old.iter().rev()).take_while(|&(n, p)| n == p).count();
+
+ let (old, _) = old.split_at(old.len() - offset_from_end);
+ let (new, _) = new.split_at(new.len() - offset_from_end);
+
+ if old.is_empty() && new.is_empty() {
+ vec![]
+ } else {
+ // The lsp data field is actually a byte-diff but we
+ // travel in tokens so `start` and `delete_count` are in multiples of the
+ // serialized size of `SemanticToken`.
+ vec![SemanticTokensEdit {
+ start: 5 * offset as u32,
+ delete_count: 5 * old.len() as u32,
+ data: Some(new.into()),
+ }]
+ }
+}
+
+pub(crate) fn type_index(ty: SemanticTokenType) -> u32 {
+ SUPPORTED_TYPES.iter().position(|it| *it == ty).unwrap() as u32
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ fn from(t: (u32, u32, u32, u32, u32)) -> SemanticToken {
+ SemanticToken {
+ delta_line: t.0,
+ delta_start: t.1,
+ length: t.2,
+ token_type: t.3,
+ token_modifiers_bitset: t.4,
+ }
+ }
+
+ #[test]
+ fn test_diff_insert_at_end() {
+ let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+ let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
+
+ let edits = diff_tokens(&before, &after);
+ assert_eq!(
+ edits[0],
+ SemanticTokensEdit {
+ start: 10,
+ delete_count: 0,
+ data: Some(vec![from((11, 12, 13, 14, 15))])
+ }
+ );
+ }
+
+ #[test]
+ fn test_diff_insert_at_beginning() {
+ let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+ let after = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+
+ let edits = diff_tokens(&before, &after);
+ assert_eq!(
+ edits[0],
+ SemanticTokensEdit {
+ start: 0,
+ delete_count: 0,
+ data: Some(vec![from((11, 12, 13, 14, 15))])
+ }
+ );
+ }
+
+ #[test]
+ fn test_diff_insert_in_middle() {
+ let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+ let after = [
+ from((1, 2, 3, 4, 5)),
+ from((10, 20, 30, 40, 50)),
+ from((60, 70, 80, 90, 100)),
+ from((6, 7, 8, 9, 10)),
+ ];
+
+ let edits = diff_tokens(&before, &after);
+ assert_eq!(
+ edits[0],
+ SemanticTokensEdit {
+ start: 5,
+ delete_count: 0,
+ data: Some(vec![from((10, 20, 30, 40, 50)), from((60, 70, 80, 90, 100))])
+ }
+ );
+ }
+
+ #[test]
+ fn test_diff_remove_from_end() {
+ let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
+ let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+
+ let edits = diff_tokens(&before, &after);
+ assert_eq!(edits[0], SemanticTokensEdit { start: 10, delete_count: 5, data: Some(vec![]) });
+ }
+
+ #[test]
+ fn test_diff_remove_from_beginning() {
+ let before = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+ let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+
+ let edits = diff_tokens(&before, &after);
+ assert_eq!(edits[0], SemanticTokensEdit { start: 0, delete_count: 5, data: Some(vec![]) });
+ }
+
+ #[test]
+ fn test_diff_remove_from_middle() {
+ let before = [
+ from((1, 2, 3, 4, 5)),
+ from((10, 20, 30, 40, 50)),
+ from((60, 70, 80, 90, 100)),
+ from((6, 7, 8, 9, 10)),
+ ];
+ let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+
+ let edits = diff_tokens(&before, &after);
+ assert_eq!(edits[0], SemanticTokensEdit { start: 5, delete_count: 10, data: Some(vec![]) });
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/task_pool.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/task_pool.rs
new file mode 100644
index 000000000..aeeb3b7c5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/task_pool.rs
@@ -0,0 +1,52 @@
+//! A thin wrapper around `ThreadPool` to make sure that we join all things
+//! properly.
+use crossbeam_channel::Sender;
+
+pub(crate) struct TaskPool<T> {
+ sender: Sender<T>,
+ inner: threadpool::ThreadPool,
+}
+
+impl<T> TaskPool<T> {
+ pub(crate) fn new(sender: Sender<T>) -> TaskPool<T> {
+ const STACK_SIZE: usize = 8 * 1024 * 1024;
+
+ let inner = threadpool::Builder::new()
+ .thread_name("Worker".into())
+ .thread_stack_size(STACK_SIZE)
+ .build();
+ TaskPool { sender, inner }
+ }
+
+ pub(crate) fn spawn<F>(&mut self, task: F)
+ where
+ F: FnOnce() -> T + Send + 'static,
+ T: Send + 'static,
+ {
+ self.inner.execute({
+ let sender = self.sender.clone();
+ move || sender.send(task()).unwrap()
+ })
+ }
+
+ pub(crate) fn spawn_with_sender<F>(&mut self, task: F)
+ where
+ F: FnOnce(Sender<T>) + Send + 'static,
+ T: Send + 'static,
+ {
+ self.inner.execute({
+ let sender = self.sender.clone();
+ move || task(sender)
+ })
+ }
+
+ pub(crate) fn len(&self) -> usize {
+ self.inner.queued_count()
+ }
+}
+
+impl<T> Drop for TaskPool<T> {
+ fn drop(&mut self) {
+ self.inner.join()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs
new file mode 100644
index 000000000..7f4fa57fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs
@@ -0,0 +1,1397 @@
+//! Conversion of rust-analyzer specific types to lsp_types equivalents.
+use std::{
+ iter::once,
+ path,
+ sync::atomic::{AtomicU32, Ordering},
+};
+
+use ide::{
+ Annotation, AnnotationKind, Assist, AssistKind, Cancellable, CompletionItem,
+ CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange, FileSystemEdit,
+ Fold, FoldKind, Highlight, HlMod, HlOperator, HlPunct, HlRange, HlTag, Indel, InlayHint,
+ InlayKind, Markup, NavigationTarget, ReferenceCategory, RenameError, Runnable, Severity,
+ SignatureHelp, SourceChange, StructureNodeKind, SymbolKind, TextEdit, TextRange, TextSize,
+};
+use itertools::Itertools;
+use serde_json::to_value;
+use vfs::AbsPath;
+
+use crate::{
+ cargo_target_spec::CargoTargetSpec,
+ config::{CallInfoConfig, Config},
+ global_state::GlobalStateSnapshot,
+ line_index::{LineEndings, LineIndex, OffsetEncoding},
+ lsp_ext,
+ lsp_utils::invalid_params_error,
+ semantic_tokens, Result,
+};
+
+pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
+ let line_col = line_index.index.line_col(offset);
+ match line_index.encoding {
+ OffsetEncoding::Utf8 => lsp_types::Position::new(line_col.line, line_col.col),
+ OffsetEncoding::Utf16 => {
+ let line_col = line_index.index.to_utf16(line_col);
+ lsp_types::Position::new(line_col.line, line_col.col)
+ }
+ }
+}
+
+pub(crate) fn range(line_index: &LineIndex, range: TextRange) -> lsp_types::Range {
+ let start = position(line_index, range.start());
+ let end = position(line_index, range.end());
+ lsp_types::Range::new(start, end)
+}
+
+pub(crate) fn symbol_kind(symbol_kind: SymbolKind) -> lsp_types::SymbolKind {
+ match symbol_kind {
+ SymbolKind::Function => lsp_types::SymbolKind::FUNCTION,
+ SymbolKind::Struct => lsp_types::SymbolKind::STRUCT,
+ SymbolKind::Enum => lsp_types::SymbolKind::ENUM,
+ SymbolKind::Variant => lsp_types::SymbolKind::ENUM_MEMBER,
+ SymbolKind::Trait => lsp_types::SymbolKind::INTERFACE,
+ SymbolKind::Macro
+ | SymbolKind::BuiltinAttr
+ | SymbolKind::Attribute
+ | SymbolKind::Derive
+ | SymbolKind::DeriveHelper => lsp_types::SymbolKind::FUNCTION,
+ SymbolKind::Module | SymbolKind::ToolModule => lsp_types::SymbolKind::MODULE,
+ SymbolKind::TypeAlias | SymbolKind::TypeParam | SymbolKind::SelfType => {
+ lsp_types::SymbolKind::TYPE_PARAMETER
+ }
+ SymbolKind::Field => lsp_types::SymbolKind::FIELD,
+ SymbolKind::Static => lsp_types::SymbolKind::CONSTANT,
+ SymbolKind::Const => lsp_types::SymbolKind::CONSTANT,
+ SymbolKind::ConstParam => lsp_types::SymbolKind::CONSTANT,
+ SymbolKind::Impl => lsp_types::SymbolKind::OBJECT,
+ SymbolKind::Local
+ | SymbolKind::SelfParam
+ | SymbolKind::LifetimeParam
+ | SymbolKind::ValueParam
+ | SymbolKind::Label => lsp_types::SymbolKind::VARIABLE,
+ SymbolKind::Union => lsp_types::SymbolKind::STRUCT,
+ }
+}
+
+pub(crate) fn structure_node_kind(kind: StructureNodeKind) -> lsp_types::SymbolKind {
+ match kind {
+ StructureNodeKind::SymbolKind(symbol) => symbol_kind(symbol),
+ StructureNodeKind::Region => lsp_types::SymbolKind::NAMESPACE,
+ }
+}
+
+pub(crate) fn document_highlight_kind(
+ category: ReferenceCategory,
+) -> lsp_types::DocumentHighlightKind {
+ match category {
+ ReferenceCategory::Read => lsp_types::DocumentHighlightKind::READ,
+ ReferenceCategory::Write => lsp_types::DocumentHighlightKind::WRITE,
+ }
+}
+
+pub(crate) fn diagnostic_severity(severity: Severity) -> lsp_types::DiagnosticSeverity {
+ match severity {
+ Severity::Error => lsp_types::DiagnosticSeverity::ERROR,
+ Severity::WeakWarning => lsp_types::DiagnosticSeverity::HINT,
+ }
+}
+
+pub(crate) fn documentation(documentation: Documentation) -> lsp_types::Documentation {
+ let value = crate::markdown::format_docs(documentation.as_str());
+ let markup_content = lsp_types::MarkupContent { kind: lsp_types::MarkupKind::Markdown, value };
+ lsp_types::Documentation::MarkupContent(markup_content)
+}
+
+pub(crate) fn completion_item_kind(
+ completion_item_kind: CompletionItemKind,
+) -> lsp_types::CompletionItemKind {
+ match completion_item_kind {
+ CompletionItemKind::Binding => lsp_types::CompletionItemKind::VARIABLE,
+ CompletionItemKind::BuiltinType => lsp_types::CompletionItemKind::STRUCT,
+ CompletionItemKind::InferredType => lsp_types::CompletionItemKind::SNIPPET,
+ CompletionItemKind::Keyword => lsp_types::CompletionItemKind::KEYWORD,
+ CompletionItemKind::Method => lsp_types::CompletionItemKind::METHOD,
+ CompletionItemKind::Snippet => lsp_types::CompletionItemKind::SNIPPET,
+ CompletionItemKind::UnresolvedReference => lsp_types::CompletionItemKind::REFERENCE,
+ CompletionItemKind::SymbolKind(symbol) => match symbol {
+ SymbolKind::Attribute => lsp_types::CompletionItemKind::FUNCTION,
+ SymbolKind::Const => lsp_types::CompletionItemKind::CONSTANT,
+ SymbolKind::ConstParam => lsp_types::CompletionItemKind::TYPE_PARAMETER,
+ SymbolKind::Derive => lsp_types::CompletionItemKind::FUNCTION,
+ SymbolKind::DeriveHelper => lsp_types::CompletionItemKind::FUNCTION,
+ SymbolKind::Enum => lsp_types::CompletionItemKind::ENUM,
+ SymbolKind::Field => lsp_types::CompletionItemKind::FIELD,
+ SymbolKind::Function => lsp_types::CompletionItemKind::FUNCTION,
+ SymbolKind::Impl => lsp_types::CompletionItemKind::TEXT,
+ SymbolKind::Label => lsp_types::CompletionItemKind::VARIABLE,
+ SymbolKind::LifetimeParam => lsp_types::CompletionItemKind::TYPE_PARAMETER,
+ SymbolKind::Local => lsp_types::CompletionItemKind::VARIABLE,
+ SymbolKind::Macro => lsp_types::CompletionItemKind::FUNCTION,
+ SymbolKind::Module => lsp_types::CompletionItemKind::MODULE,
+ SymbolKind::SelfParam => lsp_types::CompletionItemKind::VALUE,
+ SymbolKind::SelfType => lsp_types::CompletionItemKind::TYPE_PARAMETER,
+ SymbolKind::Static => lsp_types::CompletionItemKind::VALUE,
+ SymbolKind::Struct => lsp_types::CompletionItemKind::STRUCT,
+ SymbolKind::Trait => lsp_types::CompletionItemKind::INTERFACE,
+ SymbolKind::TypeAlias => lsp_types::CompletionItemKind::STRUCT,
+ SymbolKind::TypeParam => lsp_types::CompletionItemKind::TYPE_PARAMETER,
+ SymbolKind::Union => lsp_types::CompletionItemKind::STRUCT,
+ SymbolKind::ValueParam => lsp_types::CompletionItemKind::VALUE,
+ SymbolKind::Variant => lsp_types::CompletionItemKind::ENUM_MEMBER,
+ SymbolKind::BuiltinAttr => lsp_types::CompletionItemKind::FUNCTION,
+ SymbolKind::ToolModule => lsp_types::CompletionItemKind::MODULE,
+ },
+ }
+}
+
+pub(crate) fn text_edit(line_index: &LineIndex, indel: Indel) -> lsp_types::TextEdit {
+ let range = range(line_index, indel.delete);
+ let new_text = match line_index.endings {
+ LineEndings::Unix => indel.insert,
+ LineEndings::Dos => indel.insert.replace('\n', "\r\n"),
+ };
+ lsp_types::TextEdit { range, new_text }
+}
+
+pub(crate) fn completion_text_edit(
+ line_index: &LineIndex,
+ insert_replace_support: Option<lsp_types::Position>,
+ indel: Indel,
+) -> lsp_types::CompletionTextEdit {
+ let text_edit = text_edit(line_index, indel);
+ match insert_replace_support {
+ Some(cursor_pos) => lsp_types::InsertReplaceEdit {
+ new_text: text_edit.new_text,
+ insert: lsp_types::Range { start: text_edit.range.start, end: cursor_pos },
+ replace: text_edit.range,
+ }
+ .into(),
+ None => text_edit.into(),
+ }
+}
+
+pub(crate) fn snippet_text_edit(
+ line_index: &LineIndex,
+ is_snippet: bool,
+ indel: Indel,
+) -> lsp_ext::SnippetTextEdit {
+ let text_edit = text_edit(line_index, indel);
+ let insert_text_format =
+ if is_snippet { Some(lsp_types::InsertTextFormat::SNIPPET) } else { None };
+ lsp_ext::SnippetTextEdit {
+ range: text_edit.range,
+ new_text: text_edit.new_text,
+ insert_text_format,
+ annotation_id: None,
+ }
+}
+
+pub(crate) fn text_edit_vec(
+ line_index: &LineIndex,
+ text_edit: TextEdit,
+) -> Vec<lsp_types::TextEdit> {
+ text_edit.into_iter().map(|indel| self::text_edit(line_index, indel)).collect()
+}
+
+pub(crate) fn snippet_text_edit_vec(
+ line_index: &LineIndex,
+ is_snippet: bool,
+ text_edit: TextEdit,
+) -> Vec<lsp_ext::SnippetTextEdit> {
+ text_edit
+ .into_iter()
+ .map(|indel| self::snippet_text_edit(line_index, is_snippet, indel))
+ .collect()
+}
+
+pub(crate) fn completion_items(
+ config: &Config,
+ line_index: &LineIndex,
+ tdpp: lsp_types::TextDocumentPositionParams,
+ items: Vec<CompletionItem>,
+) -> Vec<lsp_types::CompletionItem> {
+ let max_relevance = items.iter().map(|it| it.relevance().score()).max().unwrap_or_default();
+ let mut res = Vec::with_capacity(items.len());
+ for item in items {
+ completion_item(&mut res, config, line_index, &tdpp, max_relevance, item)
+ }
+ res
+}
+
+fn completion_item(
+ acc: &mut Vec<lsp_types::CompletionItem>,
+ config: &Config,
+ line_index: &LineIndex,
+ tdpp: &lsp_types::TextDocumentPositionParams,
+ max_relevance: u32,
+ item: CompletionItem,
+) {
+ let insert_replace_support = config.insert_replace_support().then(|| tdpp.position);
+ let mut additional_text_edits = Vec::new();
+
+ // LSP does not allow arbitrary edits in completion, so we have to do a
+ // non-trivial mapping here.
+ let text_edit = {
+ let mut text_edit = None;
+ let source_range = item.source_range();
+ for indel in item.text_edit().iter() {
+ if indel.delete.contains_range(source_range) {
+ text_edit = Some(if indel.delete == source_range {
+ self::completion_text_edit(line_index, insert_replace_support, indel.clone())
+ } else {
+ assert!(source_range.end() == indel.delete.end());
+ let range1 = TextRange::new(indel.delete.start(), source_range.start());
+ let range2 = source_range;
+ let indel1 = Indel::replace(range1, String::new());
+ let indel2 = Indel::replace(range2, indel.insert.clone());
+ additional_text_edits.push(self::text_edit(line_index, indel1));
+ self::completion_text_edit(line_index, insert_replace_support, indel2)
+ })
+ } else {
+ assert!(source_range.intersect(indel.delete).is_none());
+ let text_edit = self::text_edit(line_index, indel.clone());
+ additional_text_edits.push(text_edit);
+ }
+ }
+ text_edit.unwrap()
+ };
+
+ let insert_text_format = item.is_snippet().then(|| lsp_types::InsertTextFormat::SNIPPET);
+ let tags = item.deprecated().then(|| vec![lsp_types::CompletionItemTag::DEPRECATED]);
+ let command = if item.trigger_call_info() && config.client_commands().trigger_parameter_hints {
+ Some(command::trigger_parameter_hints())
+ } else {
+ None
+ };
+
+ let mut lsp_item = lsp_types::CompletionItem {
+ label: item.label().to_string(),
+ detail: item.detail().map(|it| it.to_string()),
+ filter_text: Some(item.lookup().to_string()),
+ kind: Some(completion_item_kind(item.kind())),
+ text_edit: Some(text_edit),
+ additional_text_edits: Some(additional_text_edits),
+ documentation: item.documentation().map(documentation),
+ deprecated: Some(item.deprecated()),
+ tags,
+ command,
+ insert_text_format,
+ ..Default::default()
+ };
+
+ if config.completion_label_details_support() {
+ lsp_item.label_details = Some(lsp_types::CompletionItemLabelDetails {
+ detail: None,
+ description: lsp_item.detail.clone(),
+ });
+ }
+
+ set_score(&mut lsp_item, max_relevance, item.relevance());
+
+ if config.completion().enable_imports_on_the_fly {
+ if let imports @ [_, ..] = item.imports_to_add() {
+ let imports: Vec<_> = imports
+ .iter()
+ .filter_map(|import_edit| {
+ let import_path = &import_edit.import_path;
+ let import_name = import_path.segments().last()?;
+ Some(lsp_ext::CompletionImport {
+ full_import_path: import_path.to_string(),
+ imported_name: import_name.to_string(),
+ })
+ })
+ .collect();
+ if !imports.is_empty() {
+ let data = lsp_ext::CompletionResolveData { position: tdpp.clone(), imports };
+ lsp_item.data = Some(to_value(data).unwrap());
+ }
+ }
+ }
+
+ if let Some((mutability, offset, relevance)) = item.ref_match() {
+ let mut lsp_item_with_ref = lsp_item.clone();
+ set_score(&mut lsp_item_with_ref, max_relevance, relevance);
+ lsp_item_with_ref.label =
+ format!("&{}{}", mutability.as_keyword_for_ref(), lsp_item_with_ref.label);
+ lsp_item_with_ref.additional_text_edits.get_or_insert_with(Default::default).push(
+ self::text_edit(
+ line_index,
+ Indel::insert(offset, format!("&{}", mutability.as_keyword_for_ref())),
+ ),
+ );
+
+ acc.push(lsp_item_with_ref);
+ };
+
+ acc.push(lsp_item);
+
+ fn set_score(
+ res: &mut lsp_types::CompletionItem,
+ max_relevance: u32,
+ relevance: CompletionRelevance,
+ ) {
+ if relevance.is_relevant() && relevance.score() == max_relevance {
+ res.preselect = Some(true);
+ }
+ // The relevance needs to be inverted to come up with a sort score
+ // because the client will sort ascending.
+ let sort_score = relevance.score() ^ 0xFF_FF_FF_FF;
+ // Zero pad the string to ensure values can be properly sorted
+ // by the client. Hex format is used because it is easier to
+ // visually compare very large values, which the sort text
+ // tends to be since it is the opposite of the score.
+ res.sort_text = Some(format!("{:08x}", sort_score));
+ }
+}
+
+pub(crate) fn signature_help(
+ call_info: SignatureHelp,
+ config: CallInfoConfig,
+ label_offsets: bool,
+) -> lsp_types::SignatureHelp {
+ let (label, parameters) = match (config.params_only, label_offsets) {
+ (concise, false) => {
+ let params = call_info
+ .parameter_labels()
+ .map(|label| lsp_types::ParameterInformation {
+ label: lsp_types::ParameterLabel::Simple(label.to_string()),
+ documentation: None,
+ })
+ .collect::<Vec<_>>();
+ let label =
+ if concise { call_info.parameter_labels().join(", ") } else { call_info.signature };
+ (label, params)
+ }
+ (false, true) => {
+ let params = call_info
+ .parameter_ranges()
+ .iter()
+ .map(|it| {
+ let start = call_info.signature[..it.start().into()].chars().count() as u32;
+ let end = call_info.signature[..it.end().into()].chars().count() as u32;
+ [start, end]
+ })
+ .map(|label_offsets| lsp_types::ParameterInformation {
+ label: lsp_types::ParameterLabel::LabelOffsets(label_offsets),
+ documentation: None,
+ })
+ .collect::<Vec<_>>();
+ (call_info.signature, params)
+ }
+ (true, true) => {
+ let mut params = Vec::new();
+ let mut label = String::new();
+ let mut first = true;
+ for param in call_info.parameter_labels() {
+ if !first {
+ label.push_str(", ");
+ }
+ first = false;
+ let start = label.chars().count() as u32;
+ label.push_str(param);
+ let end = label.chars().count() as u32;
+ params.push(lsp_types::ParameterInformation {
+ label: lsp_types::ParameterLabel::LabelOffsets([start, end]),
+ documentation: None,
+ });
+ }
+
+ (label, params)
+ }
+ };
+
+ let documentation = call_info.doc.filter(|_| config.docs).map(|doc| {
+ lsp_types::Documentation::MarkupContent(lsp_types::MarkupContent {
+ kind: lsp_types::MarkupKind::Markdown,
+ value: doc,
+ })
+ });
+
+ let active_parameter = call_info.active_parameter.map(|it| it as u32);
+
+ let signature = lsp_types::SignatureInformation {
+ label,
+ documentation,
+ parameters: Some(parameters),
+ active_parameter,
+ };
+ lsp_types::SignatureHelp {
+ signatures: vec![signature],
+ active_signature: Some(0),
+ active_parameter,
+ }
+}
+
+pub(crate) fn inlay_hint(
+ snap: &GlobalStateSnapshot,
+ line_index: &LineIndex,
+ render_colons: bool,
+ inlay_hint: InlayHint,
+) -> lsp_types::InlayHint {
+ lsp_types::InlayHint {
+ position: match inlay_hint.kind {
+ // before annotated thing
+ InlayKind::ParameterHint
+ | InlayKind::ImplicitReborrowHint
+ | InlayKind::BindingModeHint => position(line_index, inlay_hint.range.start()),
+ // after annotated thing
+ InlayKind::ClosureReturnTypeHint
+ | InlayKind::TypeHint
+ | InlayKind::ChainingHint
+ | InlayKind::GenericParamListHint
+ | InlayKind::LifetimeHint
+ | InlayKind::ClosingBraceHint => position(line_index, inlay_hint.range.end()),
+ },
+ padding_left: Some(match inlay_hint.kind {
+ InlayKind::TypeHint => !render_colons,
+ InlayKind::ChainingHint | InlayKind::ClosingBraceHint => true,
+ InlayKind::BindingModeHint
+ | InlayKind::ClosureReturnTypeHint
+ | InlayKind::GenericParamListHint
+ | InlayKind::ImplicitReborrowHint
+ | InlayKind::LifetimeHint
+ | InlayKind::ParameterHint => false,
+ }),
+ padding_right: Some(match inlay_hint.kind {
+ InlayKind::ChainingHint
+ | InlayKind::ClosureReturnTypeHint
+ | InlayKind::GenericParamListHint
+ | InlayKind::ImplicitReborrowHint
+ | InlayKind::TypeHint
+ | InlayKind::ClosingBraceHint => false,
+ InlayKind::BindingModeHint => inlay_hint.label != "&",
+ InlayKind::ParameterHint | InlayKind::LifetimeHint => true,
+ }),
+ label: lsp_types::InlayHintLabel::String(match inlay_hint.kind {
+ InlayKind::ParameterHint if render_colons => format!("{}:", inlay_hint.label),
+ InlayKind::TypeHint if render_colons => format!(": {}", inlay_hint.label),
+ InlayKind::ClosureReturnTypeHint => format!(" -> {}", inlay_hint.label),
+ _ => inlay_hint.label.clone(),
+ }),
+ kind: match inlay_hint.kind {
+ InlayKind::ParameterHint => Some(lsp_types::InlayHintKind::PARAMETER),
+ InlayKind::ClosureReturnTypeHint | InlayKind::TypeHint | InlayKind::ChainingHint => {
+ Some(lsp_types::InlayHintKind::TYPE)
+ }
+ InlayKind::BindingModeHint
+ | InlayKind::GenericParamListHint
+ | InlayKind::LifetimeHint
+ | InlayKind::ImplicitReborrowHint
+ | InlayKind::ClosingBraceHint => None,
+ },
+ text_edits: None,
+ data: (|| match inlay_hint.tooltip {
+ Some(ide::InlayTooltip::HoverOffset(file_id, offset)) => {
+ let uri = url(snap, file_id);
+ let line_index = snap.file_line_index(file_id).ok()?;
+
+ let text_document = lsp_types::TextDocumentIdentifier { uri };
+ to_value(lsp_ext::InlayHintResolveData {
+ text_document,
+ position: lsp_ext::PositionOrRange::Position(position(&line_index, offset)),
+ })
+ .ok()
+ }
+ Some(ide::InlayTooltip::HoverRanged(file_id, text_range)) => {
+ let uri = url(snap, file_id);
+ let text_document = lsp_types::TextDocumentIdentifier { uri };
+ let line_index = snap.file_line_index(file_id).ok()?;
+ to_value(lsp_ext::InlayHintResolveData {
+ text_document,
+ position: lsp_ext::PositionOrRange::Range(range(&line_index, text_range)),
+ })
+ .ok()
+ }
+ _ => None,
+ })(),
+ tooltip: Some(match inlay_hint.tooltip {
+ Some(ide::InlayTooltip::String(s)) => lsp_types::InlayHintTooltip::String(s),
+ _ => lsp_types::InlayHintTooltip::String(inlay_hint.label),
+ }),
+ }
+}
+
+static TOKEN_RESULT_COUNTER: AtomicU32 = AtomicU32::new(1);
+
+pub(crate) fn semantic_tokens(
+ text: &str,
+ line_index: &LineIndex,
+ highlights: Vec<HlRange>,
+ highlight_strings: bool,
+) -> lsp_types::SemanticTokens {
+ let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string();
+ let mut builder = semantic_tokens::SemanticTokensBuilder::new(id);
+
+ for highlight_range in highlights {
+ if highlight_range.highlight.is_empty() {
+ continue;
+ }
+ let (ty, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
+ if !highlight_strings && ty == lsp_types::SemanticTokenType::STRING {
+ continue;
+ }
+ let token_index = semantic_tokens::type_index(ty);
+ let modifier_bitset = mods.0;
+
+ for mut text_range in line_index.index.lines(highlight_range.range) {
+ if text[text_range].ends_with('\n') {
+ text_range =
+ TextRange::new(text_range.start(), text_range.end() - TextSize::of('\n'));
+ }
+ let range = range(line_index, text_range);
+ builder.push(range, token_index, modifier_bitset);
+ }
+ }
+
+ builder.build()
+}
+
+pub(crate) fn semantic_token_delta(
+ previous: &lsp_types::SemanticTokens,
+ current: &lsp_types::SemanticTokens,
+) -> lsp_types::SemanticTokensDelta {
+ let result_id = current.result_id.clone();
+ let edits = semantic_tokens::diff_tokens(&previous.data, &current.data);
+ lsp_types::SemanticTokensDelta { result_id, edits }
+}
+
+fn semantic_token_type_and_modifiers(
+ highlight: Highlight,
+) -> (lsp_types::SemanticTokenType, semantic_tokens::ModifierSet) {
+ let mut mods = semantic_tokens::ModifierSet::default();
+ let type_ = match highlight.tag {
+ HlTag::Symbol(symbol) => match symbol {
+ SymbolKind::Attribute => semantic_tokens::ATTRIBUTE,
+ SymbolKind::Derive => semantic_tokens::DERIVE,
+ SymbolKind::DeriveHelper => semantic_tokens::DERIVE_HELPER,
+ SymbolKind::Module => lsp_types::SemanticTokenType::NAMESPACE,
+ SymbolKind::Impl => semantic_tokens::TYPE_ALIAS,
+ SymbolKind::Field => lsp_types::SemanticTokenType::PROPERTY,
+ SymbolKind::TypeParam => lsp_types::SemanticTokenType::TYPE_PARAMETER,
+ SymbolKind::ConstParam => semantic_tokens::CONST_PARAMETER,
+ SymbolKind::LifetimeParam => semantic_tokens::LIFETIME,
+ SymbolKind::Label => semantic_tokens::LABEL,
+ SymbolKind::ValueParam => lsp_types::SemanticTokenType::PARAMETER,
+ SymbolKind::SelfParam => semantic_tokens::SELF_KEYWORD,
+ SymbolKind::SelfType => semantic_tokens::SELF_TYPE_KEYWORD,
+ SymbolKind::Local => lsp_types::SemanticTokenType::VARIABLE,
+ SymbolKind::Function => {
+ if highlight.mods.contains(HlMod::Associated) {
+ lsp_types::SemanticTokenType::METHOD
+ } else {
+ lsp_types::SemanticTokenType::FUNCTION
+ }
+ }
+ SymbolKind::Const => {
+ mods |= semantic_tokens::CONSTANT;
+ mods |= lsp_types::SemanticTokenModifier::STATIC;
+ lsp_types::SemanticTokenType::VARIABLE
+ }
+ SymbolKind::Static => {
+ mods |= lsp_types::SemanticTokenModifier::STATIC;
+ lsp_types::SemanticTokenType::VARIABLE
+ }
+ SymbolKind::Struct => lsp_types::SemanticTokenType::STRUCT,
+ SymbolKind::Enum => lsp_types::SemanticTokenType::ENUM,
+ SymbolKind::Variant => lsp_types::SemanticTokenType::ENUM_MEMBER,
+ SymbolKind::Union => semantic_tokens::UNION,
+ SymbolKind::TypeAlias => semantic_tokens::TYPE_ALIAS,
+ SymbolKind::Trait => lsp_types::SemanticTokenType::INTERFACE,
+ SymbolKind::Macro => lsp_types::SemanticTokenType::MACRO,
+ SymbolKind::BuiltinAttr => semantic_tokens::BUILTIN_ATTRIBUTE,
+ SymbolKind::ToolModule => semantic_tokens::TOOL_MODULE,
+ },
+ HlTag::AttributeBracket => semantic_tokens::ATTRIBUTE_BRACKET,
+ HlTag::BoolLiteral => semantic_tokens::BOOLEAN,
+ HlTag::BuiltinType => semantic_tokens::BUILTIN_TYPE,
+ HlTag::ByteLiteral | HlTag::NumericLiteral => lsp_types::SemanticTokenType::NUMBER,
+ HlTag::CharLiteral => semantic_tokens::CHAR,
+ HlTag::Comment => lsp_types::SemanticTokenType::COMMENT,
+ HlTag::EscapeSequence => semantic_tokens::ESCAPE_SEQUENCE,
+ HlTag::FormatSpecifier => semantic_tokens::FORMAT_SPECIFIER,
+ HlTag::Keyword => lsp_types::SemanticTokenType::KEYWORD,
+ HlTag::None => semantic_tokens::GENERIC,
+ HlTag::Operator(op) => match op {
+ HlOperator::Bitwise => semantic_tokens::BITWISE,
+ HlOperator::Arithmetic => semantic_tokens::ARITHMETIC,
+ HlOperator::Logical => semantic_tokens::LOGICAL,
+ HlOperator::Comparison => semantic_tokens::COMPARISON,
+ HlOperator::Other => semantic_tokens::OPERATOR,
+ },
+ HlTag::StringLiteral => lsp_types::SemanticTokenType::STRING,
+ HlTag::UnresolvedReference => semantic_tokens::UNRESOLVED_REFERENCE,
+ HlTag::Punctuation(punct) => match punct {
+ HlPunct::Bracket => semantic_tokens::BRACKET,
+ HlPunct::Brace => semantic_tokens::BRACE,
+ HlPunct::Parenthesis => semantic_tokens::PARENTHESIS,
+ HlPunct::Angle => semantic_tokens::ANGLE,
+ HlPunct::Comma => semantic_tokens::COMMA,
+ HlPunct::Dot => semantic_tokens::DOT,
+ HlPunct::Colon => semantic_tokens::COLON,
+ HlPunct::Semi => semantic_tokens::SEMICOLON,
+ HlPunct::Other => semantic_tokens::PUNCTUATION,
+ HlPunct::MacroBang => semantic_tokens::MACRO_BANG,
+ },
+ };
+
+ for modifier in highlight.mods.iter() {
+ let modifier = match modifier {
+ HlMod::Associated => continue,
+ HlMod::Async => semantic_tokens::ASYNC,
+ HlMod::Attribute => semantic_tokens::ATTRIBUTE_MODIFIER,
+ HlMod::Callable => semantic_tokens::CALLABLE,
+ HlMod::Consuming => semantic_tokens::CONSUMING,
+ HlMod::ControlFlow => semantic_tokens::CONTROL_FLOW,
+ HlMod::CrateRoot => semantic_tokens::CRATE_ROOT,
+ HlMod::DefaultLibrary => lsp_types::SemanticTokenModifier::DEFAULT_LIBRARY,
+ HlMod::Definition => lsp_types::SemanticTokenModifier::DECLARATION,
+ HlMod::Documentation => lsp_types::SemanticTokenModifier::DOCUMENTATION,
+ HlMod::Injected => semantic_tokens::INJECTED,
+ HlMod::IntraDocLink => semantic_tokens::INTRA_DOC_LINK,
+ HlMod::Library => semantic_tokens::LIBRARY,
+ HlMod::Mutable => semantic_tokens::MUTABLE,
+ HlMod::Public => semantic_tokens::PUBLIC,
+ HlMod::Reference => semantic_tokens::REFERENCE,
+ HlMod::Static => lsp_types::SemanticTokenModifier::STATIC,
+ HlMod::Trait => semantic_tokens::TRAIT_MODIFIER,
+ HlMod::Unsafe => semantic_tokens::UNSAFE,
+ };
+ mods |= modifier;
+ }
+
+ (type_, mods)
+}
+
+pub(crate) fn folding_range(
+ text: &str,
+ line_index: &LineIndex,
+ line_folding_only: bool,
+ fold: Fold,
+) -> lsp_types::FoldingRange {
+ let kind = match fold.kind {
+ FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment),
+ FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports),
+ FoldKind::Region => Some(lsp_types::FoldingRangeKind::Region),
+ FoldKind::Mods
+ | FoldKind::Block
+ | FoldKind::ArgList
+ | FoldKind::Consts
+ | FoldKind::Statics
+ | FoldKind::WhereClause
+ | FoldKind::ReturnType
+ | FoldKind::Array
+ | FoldKind::MatchArm => None,
+ };
+
+ let range = range(line_index, fold.range);
+
+ if line_folding_only {
+ // Clients with line_folding_only == true (such as VSCode) will fold the whole end line
+ // even if it contains text not in the folding range. To prevent that we exclude
+ // range.end.line from the folding region if there is more text after range.end
+ // on the same line.
+ let has_more_text_on_end_line = text[TextRange::new(fold.range.end(), TextSize::of(text))]
+ .chars()
+ .take_while(|it| *it != '\n')
+ .any(|it| !it.is_whitespace());
+
+ let end_line = if has_more_text_on_end_line {
+ range.end.line.saturating_sub(1)
+ } else {
+ range.end.line
+ };
+
+ lsp_types::FoldingRange {
+ start_line: range.start.line,
+ start_character: None,
+ end_line,
+ end_character: None,
+ kind,
+ }
+ } else {
+ lsp_types::FoldingRange {
+ start_line: range.start.line,
+ start_character: Some(range.start.character),
+ end_line: range.end.line,
+ end_character: Some(range.end.character),
+ kind,
+ }
+ }
+}
+
+pub(crate) fn url(snap: &GlobalStateSnapshot, file_id: FileId) -> lsp_types::Url {
+ snap.file_id_to_url(file_id)
+}
+
+/// Returns a `Url` object from a given path, will lowercase drive letters if present.
+/// This will only happen when processing windows paths.
+///
+/// When processing non-windows path, this is essentially the same as `Url::from_file_path`.
+pub(crate) fn url_from_abs_path(path: &AbsPath) -> lsp_types::Url {
+ let url = lsp_types::Url::from_file_path(path).unwrap();
+ match path.as_ref().components().next() {
+ Some(path::Component::Prefix(prefix))
+ if matches!(prefix.kind(), path::Prefix::Disk(_) | path::Prefix::VerbatimDisk(_)) =>
+ {
+ // Need to lowercase driver letter
+ }
+ _ => return url,
+ }
+
+ let driver_letter_range = {
+ let (scheme, drive_letter, _rest) = match url.as_str().splitn(3, ':').collect_tuple() {
+ Some(it) => it,
+ None => return url,
+ };
+ let start = scheme.len() + ':'.len_utf8();
+ start..(start + drive_letter.len())
+ };
+
+ // Note: lowercasing the `path` itself doesn't help, the `Url::parse`
+ // machinery *also* canonicalizes the drive letter. So, just massage the
+ // string in place.
+ let mut url: String = url.into();
+ url[driver_letter_range].make_ascii_lowercase();
+ lsp_types::Url::parse(&url).unwrap()
+}
+
+pub(crate) fn optional_versioned_text_document_identifier(
+ snap: &GlobalStateSnapshot,
+ file_id: FileId,
+) -> lsp_types::OptionalVersionedTextDocumentIdentifier {
+ let url = url(snap, file_id);
+ let version = snap.url_file_version(&url);
+ lsp_types::OptionalVersionedTextDocumentIdentifier { uri: url, version }
+}
+
+pub(crate) fn location(
+ snap: &GlobalStateSnapshot,
+ frange: FileRange,
+) -> Result<lsp_types::Location> {
+ let url = url(snap, frange.file_id);
+ let line_index = snap.file_line_index(frange.file_id)?;
+ let range = range(&line_index, frange.range);
+ let loc = lsp_types::Location::new(url, range);
+ Ok(loc)
+}
+
+/// Prefer using `location_link`, if the client has the cap.
+pub(crate) fn location_from_nav(
+ snap: &GlobalStateSnapshot,
+ nav: NavigationTarget,
+) -> Result<lsp_types::Location> {
+ let url = url(snap, nav.file_id);
+ let line_index = snap.file_line_index(nav.file_id)?;
+ let range = range(&line_index, nav.full_range);
+ let loc = lsp_types::Location::new(url, range);
+ Ok(loc)
+}
+
+pub(crate) fn location_link(
+ snap: &GlobalStateSnapshot,
+ src: Option<FileRange>,
+ target: NavigationTarget,
+) -> Result<lsp_types::LocationLink> {
+ let origin_selection_range = match src {
+ Some(src) => {
+ let line_index = snap.file_line_index(src.file_id)?;
+ let range = range(&line_index, src.range);
+ Some(range)
+ }
+ None => None,
+ };
+ let (target_uri, target_range, target_selection_range) = location_info(snap, target)?;
+ let res = lsp_types::LocationLink {
+ origin_selection_range,
+ target_uri,
+ target_range,
+ target_selection_range,
+ };
+ Ok(res)
+}
+
+fn location_info(
+ snap: &GlobalStateSnapshot,
+ target: NavigationTarget,
+) -> Result<(lsp_types::Url, lsp_types::Range, lsp_types::Range)> {
+ let line_index = snap.file_line_index(target.file_id)?;
+
+ let target_uri = url(snap, target.file_id);
+ let target_range = range(&line_index, target.full_range);
+ let target_selection_range =
+ target.focus_range.map(|it| range(&line_index, it)).unwrap_or(target_range);
+ Ok((target_uri, target_range, target_selection_range))
+}
+
+pub(crate) fn goto_definition_response(
+ snap: &GlobalStateSnapshot,
+ src: Option<FileRange>,
+ targets: Vec<NavigationTarget>,
+) -> Result<lsp_types::GotoDefinitionResponse> {
+ if snap.config.location_link() {
+ let links = targets
+ .into_iter()
+ .map(|nav| location_link(snap, src, nav))
+ .collect::<Result<Vec<_>>>()?;
+ Ok(links.into())
+ } else {
+ let locations = targets
+ .into_iter()
+ .map(|nav| {
+ location(snap, FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
+ })
+ .collect::<Result<Vec<_>>>()?;
+ Ok(locations.into())
+ }
+}
+
+fn outside_workspace_annotation_id() -> String {
+ String::from("OutsideWorkspace")
+}
+
+pub(crate) fn snippet_text_document_edit(
+ snap: &GlobalStateSnapshot,
+ is_snippet: bool,
+ file_id: FileId,
+ edit: TextEdit,
+) -> Result<lsp_ext::SnippetTextDocumentEdit> {
+ let text_document = optional_versioned_text_document_identifier(snap, file_id);
+ let line_index = snap.file_line_index(file_id)?;
+ let mut edits: Vec<_> =
+ edit.into_iter().map(|it| snippet_text_edit(&line_index, is_snippet, it)).collect();
+
+ if snap.analysis.is_library_file(file_id)? && snap.config.change_annotation_support() {
+ for edit in &mut edits {
+ edit.annotation_id = Some(outside_workspace_annotation_id())
+ }
+ }
+ Ok(lsp_ext::SnippetTextDocumentEdit { text_document, edits })
+}
+
+pub(crate) fn snippet_text_document_ops(
+ snap: &GlobalStateSnapshot,
+ file_system_edit: FileSystemEdit,
+) -> Cancellable<Vec<lsp_ext::SnippetDocumentChangeOperation>> {
+ let mut ops = Vec::new();
+ match file_system_edit {
+ FileSystemEdit::CreateFile { dst, initial_contents } => {
+ let uri = snap.anchored_path(&dst);
+ let create_file = lsp_types::ResourceOp::Create(lsp_types::CreateFile {
+ uri: uri.clone(),
+ options: None,
+ annotation_id: None,
+ });
+ ops.push(lsp_ext::SnippetDocumentChangeOperation::Op(create_file));
+ if !initial_contents.is_empty() {
+ let text_document =
+ lsp_types::OptionalVersionedTextDocumentIdentifier { uri, version: None };
+ let text_edit = lsp_ext::SnippetTextEdit {
+ range: lsp_types::Range::default(),
+ new_text: initial_contents,
+ insert_text_format: Some(lsp_types::InsertTextFormat::PLAIN_TEXT),
+ annotation_id: None,
+ };
+ let edit_file =
+ lsp_ext::SnippetTextDocumentEdit { text_document, edits: vec![text_edit] };
+ ops.push(lsp_ext::SnippetDocumentChangeOperation::Edit(edit_file));
+ }
+ }
+ FileSystemEdit::MoveFile { src, dst } => {
+ let old_uri = snap.file_id_to_url(src);
+ let new_uri = snap.anchored_path(&dst);
+ let mut rename_file =
+ lsp_types::RenameFile { old_uri, new_uri, options: None, annotation_id: None };
+ if snap.analysis.is_library_file(src).ok() == Some(true)
+ && snap.config.change_annotation_support()
+ {
+ rename_file.annotation_id = Some(outside_workspace_annotation_id())
+ }
+ ops.push(lsp_ext::SnippetDocumentChangeOperation::Op(lsp_types::ResourceOp::Rename(
+ rename_file,
+ )))
+ }
+ FileSystemEdit::MoveDir { src, src_id, dst } => {
+ let old_uri = snap.anchored_path(&src);
+ let new_uri = snap.anchored_path(&dst);
+ let mut rename_file =
+ lsp_types::RenameFile { old_uri, new_uri, options: None, annotation_id: None };
+ if snap.analysis.is_library_file(src_id).ok() == Some(true)
+ && snap.config.change_annotation_support()
+ {
+ rename_file.annotation_id = Some(outside_workspace_annotation_id())
+ }
+ ops.push(lsp_ext::SnippetDocumentChangeOperation::Op(lsp_types::ResourceOp::Rename(
+ rename_file,
+ )))
+ }
+ }
+ Ok(ops)
+}
+
+pub(crate) fn snippet_workspace_edit(
+ snap: &GlobalStateSnapshot,
+ source_change: SourceChange,
+) -> Result<lsp_ext::SnippetWorkspaceEdit> {
+ let mut document_changes: Vec<lsp_ext::SnippetDocumentChangeOperation> = Vec::new();
+
+ for op in source_change.file_system_edits {
+ let ops = snippet_text_document_ops(snap, op)?;
+ document_changes.extend_from_slice(&ops);
+ }
+ for (file_id, edit) in source_change.source_file_edits {
+ let edit = snippet_text_document_edit(snap, source_change.is_snippet, file_id, edit)?;
+ document_changes.push(lsp_ext::SnippetDocumentChangeOperation::Edit(edit));
+ }
+ let mut workspace_edit = lsp_ext::SnippetWorkspaceEdit {
+ changes: None,
+ document_changes: Some(document_changes),
+ change_annotations: None,
+ };
+ if snap.config.change_annotation_support() {
+ workspace_edit.change_annotations = Some(
+ once((
+ outside_workspace_annotation_id(),
+ lsp_types::ChangeAnnotation {
+ label: String::from("Edit outside of the workspace"),
+ needs_confirmation: Some(true),
+ description: Some(String::from(
+ "This edit lies outside of the workspace and may affect dependencies",
+ )),
+ },
+ ))
+ .collect(),
+ )
+ }
+ Ok(workspace_edit)
+}
+
+pub(crate) fn workspace_edit(
+ snap: &GlobalStateSnapshot,
+ source_change: SourceChange,
+) -> Result<lsp_types::WorkspaceEdit> {
+ assert!(!source_change.is_snippet);
+ snippet_workspace_edit(snap, source_change).map(|it| it.into())
+}
+
+impl From<lsp_ext::SnippetWorkspaceEdit> for lsp_types::WorkspaceEdit {
+ fn from(snippet_workspace_edit: lsp_ext::SnippetWorkspaceEdit) -> lsp_types::WorkspaceEdit {
+ lsp_types::WorkspaceEdit {
+ changes: None,
+ document_changes: snippet_workspace_edit.document_changes.map(|changes| {
+ lsp_types::DocumentChanges::Operations(
+ changes
+ .into_iter()
+ .map(|change| match change {
+ lsp_ext::SnippetDocumentChangeOperation::Op(op) => {
+ lsp_types::DocumentChangeOperation::Op(op)
+ }
+ lsp_ext::SnippetDocumentChangeOperation::Edit(edit) => {
+ lsp_types::DocumentChangeOperation::Edit(
+ lsp_types::TextDocumentEdit {
+ text_document: edit.text_document,
+ edits: edit.edits.into_iter().map(From::from).collect(),
+ },
+ )
+ }
+ })
+ .collect(),
+ )
+ }),
+ change_annotations: snippet_workspace_edit.change_annotations,
+ }
+ }
+}
+
+impl From<lsp_ext::SnippetTextEdit>
+ for lsp_types::OneOf<lsp_types::TextEdit, lsp_types::AnnotatedTextEdit>
+{
+ fn from(
+ lsp_ext::SnippetTextEdit { annotation_id, insert_text_format:_, new_text, range }: lsp_ext::SnippetTextEdit,
+ ) -> Self {
+ match annotation_id {
+ Some(annotation_id) => lsp_types::OneOf::Right(lsp_types::AnnotatedTextEdit {
+ text_edit: lsp_types::TextEdit { range, new_text },
+ annotation_id,
+ }),
+ None => lsp_types::OneOf::Left(lsp_types::TextEdit { range, new_text }),
+ }
+ }
+}
+
+pub(crate) fn call_hierarchy_item(
+ snap: &GlobalStateSnapshot,
+ target: NavigationTarget,
+) -> Result<lsp_types::CallHierarchyItem> {
+ let name = target.name.to_string();
+ let detail = target.description.clone();
+ let kind = target.kind.map(symbol_kind).unwrap_or(lsp_types::SymbolKind::FUNCTION);
+ let (uri, range, selection_range) = location_info(snap, target)?;
+ Ok(lsp_types::CallHierarchyItem {
+ name,
+ kind,
+ tags: None,
+ detail,
+ uri,
+ range,
+ selection_range,
+ data: None,
+ })
+}
+
+pub(crate) fn code_action_kind(kind: AssistKind) -> lsp_types::CodeActionKind {
+ match kind {
+ AssistKind::None | AssistKind::Generate => lsp_types::CodeActionKind::EMPTY,
+ AssistKind::QuickFix => lsp_types::CodeActionKind::QUICKFIX,
+ AssistKind::Refactor => lsp_types::CodeActionKind::REFACTOR,
+ AssistKind::RefactorExtract => lsp_types::CodeActionKind::REFACTOR_EXTRACT,
+ AssistKind::RefactorInline => lsp_types::CodeActionKind::REFACTOR_INLINE,
+ AssistKind::RefactorRewrite => lsp_types::CodeActionKind::REFACTOR_REWRITE,
+ }
+}
+
+pub(crate) fn code_action(
+ snap: &GlobalStateSnapshot,
+ assist: Assist,
+ resolve_data: Option<(usize, lsp_types::CodeActionParams)>,
+) -> Result<lsp_ext::CodeAction> {
+ let mut res = lsp_ext::CodeAction {
+ title: assist.label.to_string(),
+ group: assist.group.filter(|_| snap.config.code_action_group()).map(|gr| gr.0),
+ kind: Some(code_action_kind(assist.id.1)),
+ edit: None,
+ is_preferred: None,
+ data: None,
+ command: None,
+ };
+
+ if assist.trigger_signature_help && snap.config.client_commands().trigger_parameter_hints {
+ res.command = Some(command::trigger_parameter_hints());
+ }
+
+ match (assist.source_change, resolve_data) {
+ (Some(it), _) => res.edit = Some(snippet_workspace_edit(snap, it)?),
+ (None, Some((index, code_action_params))) => {
+ res.data = Some(lsp_ext::CodeActionData {
+ id: format!("{}:{}:{}", assist.id.0, assist.id.1.name(), index),
+ code_action_params,
+ });
+ }
+ (None, None) => {
+ stdx::never!("assist should always be resolved if client can't do lazy resolving")
+ }
+ };
+ Ok(res)
+}
+
+pub(crate) fn runnable(
+ snap: &GlobalStateSnapshot,
+ runnable: Runnable,
+) -> Result<lsp_ext::Runnable> {
+ let config = snap.config.runnables();
+ let spec = CargoTargetSpec::for_file(snap, runnable.nav.file_id)?;
+ let workspace_root = spec.as_ref().map(|it| it.workspace_root.clone());
+ let target = spec.as_ref().map(|s| s.target.clone());
+ let (cargo_args, executable_args) =
+ CargoTargetSpec::runnable_args(snap, spec, &runnable.kind, &runnable.cfg)?;
+ let label = runnable.label(target);
+ let location = location_link(snap, None, runnable.nav)?;
+
+ Ok(lsp_ext::Runnable {
+ label,
+ location: Some(location),
+ kind: lsp_ext::RunnableKind::Cargo,
+ args: lsp_ext::CargoRunnable {
+ workspace_root: workspace_root.map(|it| it.into()),
+ override_cargo: config.override_cargo,
+ cargo_args,
+ cargo_extra_args: config.cargo_extra_args,
+ executable_args,
+ expect_test: None,
+ },
+ })
+}
+
+pub(crate) fn code_lens(
+ acc: &mut Vec<lsp_types::CodeLens>,
+ snap: &GlobalStateSnapshot,
+ annotation: Annotation,
+) -> Result<()> {
+ let client_commands_config = snap.config.client_commands();
+ match annotation.kind {
+ AnnotationKind::Runnable(run) => {
+ let line_index = snap.file_line_index(run.nav.file_id)?;
+ let annotation_range = range(&line_index, annotation.range);
+
+ let title = run.title();
+ let can_debug = match run.kind {
+ ide::RunnableKind::DocTest { .. } => false,
+ ide::RunnableKind::TestMod { .. }
+ | ide::RunnableKind::Test { .. }
+ | ide::RunnableKind::Bench { .. }
+ | ide::RunnableKind::Bin => true,
+ };
+ let r = runnable(snap, run)?;
+
+ let lens_config = snap.config.lens();
+ if lens_config.run && client_commands_config.run_single {
+ let command = command::run_single(&r, &title);
+ acc.push(lsp_types::CodeLens {
+ range: annotation_range,
+ command: Some(command),
+ data: None,
+ })
+ }
+ if lens_config.debug && can_debug && client_commands_config.debug_single {
+ let command = command::debug_single(&r);
+ acc.push(lsp_types::CodeLens {
+ range: annotation_range,
+ command: Some(command),
+ data: None,
+ })
+ }
+ }
+ AnnotationKind::HasImpls { file_id, data } => {
+ if !client_commands_config.show_reference {
+ return Ok(());
+ }
+ let line_index = snap.file_line_index(file_id)?;
+ let annotation_range = range(&line_index, annotation.range);
+ let url = url(snap, file_id);
+
+ let id = lsp_types::TextDocumentIdentifier { uri: url.clone() };
+
+ let doc_pos = lsp_types::TextDocumentPositionParams::new(id, annotation_range.start);
+
+ let goto_params = lsp_types::request::GotoImplementationParams {
+ text_document_position_params: doc_pos,
+ work_done_progress_params: Default::default(),
+ partial_result_params: Default::default(),
+ };
+
+ let command = data.map(|ranges| {
+ let locations: Vec<lsp_types::Location> = ranges
+ .into_iter()
+ .filter_map(|target| {
+ location(
+ snap,
+ FileRange { file_id: target.file_id, range: target.full_range },
+ )
+ .ok()
+ })
+ .collect();
+
+ command::show_references(
+ implementation_title(locations.len()),
+ &url,
+ annotation_range.start,
+ locations,
+ )
+ });
+
+ acc.push(lsp_types::CodeLens {
+ range: annotation_range,
+ command,
+ data: Some(to_value(lsp_ext::CodeLensResolveData::Impls(goto_params)).unwrap()),
+ })
+ }
+ AnnotationKind::HasReferences { file_id, data } => {
+ if !client_commands_config.show_reference {
+ return Ok(());
+ }
+ let line_index = snap.file_line_index(file_id)?;
+ let annotation_range = range(&line_index, annotation.range);
+ let url = url(snap, file_id);
+
+ let id = lsp_types::TextDocumentIdentifier { uri: url.clone() };
+
+ let doc_pos = lsp_types::TextDocumentPositionParams::new(id, annotation_range.start);
+
+ let command = data.map(|ranges| {
+ let locations: Vec<lsp_types::Location> =
+ ranges.into_iter().filter_map(|range| location(snap, range).ok()).collect();
+
+ command::show_references(
+ reference_title(locations.len()),
+ &url,
+ annotation_range.start,
+ locations,
+ )
+ });
+
+ acc.push(lsp_types::CodeLens {
+ range: annotation_range,
+ command,
+ data: Some(to_value(lsp_ext::CodeLensResolveData::References(doc_pos)).unwrap()),
+ })
+ }
+ }
+ Ok(())
+}
+
+pub(crate) mod command {
+ use ide::{FileRange, NavigationTarget};
+ use serde_json::to_value;
+
+ use crate::{
+ global_state::GlobalStateSnapshot,
+ lsp_ext,
+ to_proto::{location, location_link},
+ };
+
+ pub(crate) fn show_references(
+ title: String,
+ uri: &lsp_types::Url,
+ position: lsp_types::Position,
+ locations: Vec<lsp_types::Location>,
+ ) -> lsp_types::Command {
+ // We cannot use the 'editor.action.showReferences' command directly
+ // because that command requires vscode types which we convert in the handler
+ // on the client side.
+
+ lsp_types::Command {
+ title,
+ command: "rust-analyzer.showReferences".into(),
+ arguments: Some(vec![
+ to_value(uri).unwrap(),
+ to_value(position).unwrap(),
+ to_value(locations).unwrap(),
+ ]),
+ }
+ }
+
+ pub(crate) fn run_single(runnable: &lsp_ext::Runnable, title: &str) -> lsp_types::Command {
+ lsp_types::Command {
+ title: title.to_string(),
+ command: "rust-analyzer.runSingle".into(),
+ arguments: Some(vec![to_value(runnable).unwrap()]),
+ }
+ }
+
+ pub(crate) fn debug_single(runnable: &lsp_ext::Runnable) -> lsp_types::Command {
+ lsp_types::Command {
+ title: "Debug".into(),
+ command: "rust-analyzer.debugSingle".into(),
+ arguments: Some(vec![to_value(runnable).unwrap()]),
+ }
+ }
+
+ pub(crate) fn goto_location(
+ snap: &GlobalStateSnapshot,
+ nav: &NavigationTarget,
+ ) -> Option<lsp_types::Command> {
+ let value = if snap.config.location_link() {
+ let link = location_link(snap, None, nav.clone()).ok()?;
+ to_value(link).ok()?
+ } else {
+ let range = FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() };
+ let location = location(snap, range).ok()?;
+ to_value(location).ok()?
+ };
+
+ Some(lsp_types::Command {
+ title: nav.name.to_string(),
+ command: "rust-analyzer.gotoLocation".into(),
+ arguments: Some(vec![value]),
+ })
+ }
+
+ pub(crate) fn trigger_parameter_hints() -> lsp_types::Command {
+ lsp_types::Command {
+ title: "triggerParameterHints".into(),
+ command: "editor.action.triggerParameterHints".into(),
+ arguments: None,
+ }
+ }
+}
+
+pub(crate) fn implementation_title(count: usize) -> String {
+ if count == 1 {
+ "1 implementation".into()
+ } else {
+ format!("{} implementations", count)
+ }
+}
+
+pub(crate) fn reference_title(count: usize) -> String {
+ if count == 1 {
+ "1 reference".into()
+ } else {
+ format!("{} references", count)
+ }
+}
+
+pub(crate) fn markup_content(
+ markup: Markup,
+ kind: ide::HoverDocFormat,
+) -> lsp_types::MarkupContent {
+ let kind = match kind {
+ ide::HoverDocFormat::Markdown => lsp_types::MarkupKind::Markdown,
+ ide::HoverDocFormat::PlainText => lsp_types::MarkupKind::PlainText,
+ };
+ let value = crate::markdown::format_docs(markup.as_str());
+ lsp_types::MarkupContent { kind, value }
+}
+
+pub(crate) fn rename_error(err: RenameError) -> crate::LspError {
+ // This is wrong, but we don't have a better alternative I suppose?
+ // https://github.com/microsoft/language-server-protocol/issues/1341
+ invalid_params_error(err.to_string())
+}
+
+#[cfg(test)]
+mod tests {
+ use std::sync::Arc;
+
+ use ide::Analysis;
+
+ use super::*;
+
+ #[test]
+ fn conv_fold_line_folding_only_fixup() {
+ let text = r#"mod a;
+mod b;
+mod c;
+
+fn main() {
+ if cond {
+ a::do_a();
+ } else {
+ b::do_b();
+ }
+}"#;
+
+ let (analysis, file_id) = Analysis::from_single_file(text.to_string());
+ let folds = analysis.folding_ranges(file_id).unwrap();
+ assert_eq!(folds.len(), 4);
+
+ let line_index = LineIndex {
+ index: Arc::new(ide::LineIndex::new(text)),
+ endings: LineEndings::Unix,
+ encoding: OffsetEncoding::Utf16,
+ };
+ let converted: Vec<lsp_types::FoldingRange> =
+ folds.into_iter().map(|it| folding_range(text, &line_index, true, it)).collect();
+
+ let expected_lines = [(0, 2), (4, 10), (5, 6), (7, 9)];
+ assert_eq!(converted.len(), expected_lines.len());
+ for (folding_range, (start_line, end_line)) in converted.iter().zip(expected_lines.iter()) {
+ assert_eq!(folding_range.start_line, *start_line);
+ assert_eq!(folding_range.start_character, None);
+ assert_eq!(folding_range.end_line, *end_line);
+ assert_eq!(folding_range.end_character, None);
+ }
+ }
+
+ // `Url` is not able to parse windows paths on unix machines.
+ #[test]
+ #[cfg(target_os = "windows")]
+ fn test_lowercase_drive_letter() {
+ use std::{convert::TryInto, path::Path};
+
+ let url = url_from_abs_path(Path::new("C:\\Test").try_into().unwrap());
+ assert_eq!(url.to_string(), "file:///c:/Test");
+
+ let url = url_from_abs_path(Path::new(r#"\\localhost\C$\my_dir"#).try_into().unwrap());
+ assert_eq!(url.to_string(), "file://localhost/C$/my_dir");
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/version.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/version.rs
new file mode 100644
index 000000000..1e829299e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/version.rs
@@ -0,0 +1,57 @@
+//! Code for representing rust-analyzer's release version number.
+
+use std::fmt;
+
+/// Information about the git repository where rust-analyzer was built from.
+pub struct CommitInfo {
+ pub short_commit_hash: &'static str,
+ pub commit_hash: &'static str,
+ pub commit_date: &'static str,
+}
+
+/// Cargo's version.
+pub struct VersionInfo {
+ /// rust-analyzer's version, such as "1.57.0", "1.58.0-beta.1", "1.59.0-nightly", etc.
+ pub version: &'static str,
+ /// The release channel we were built for (stable/beta/nightly/dev).
+ ///
+ /// `None` if not built via rustbuild.
+ pub release_channel: Option<&'static str>,
+ /// Information about the Git repository we may have been built from.
+ ///
+ /// `None` if not built from a git repo.
+ pub commit_info: Option<CommitInfo>,
+}
+
+impl fmt::Display for VersionInfo {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.version)?;
+
+ if let Some(ci) = &self.commit_info {
+ write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?;
+ };
+ Ok(())
+ }
+}
+
+/// Returns information about cargo's version.
+pub const fn version() -> VersionInfo {
+ let version = match option_env!("CFG_RELEASE") {
+ Some(x) => x,
+ None => "0.0.0",
+ };
+
+ let release_channel = option_env!("CFG_RELEASE_CHANNEL");
+ let commit_info = match (
+ option_env!("RA_COMMIT_SHORT_HASH"),
+ option_env!("RA_COMMIT_HASH"),
+ option_env!("RA_COMMIT_DATE"),
+ ) {
+ (Some(short_commit_hash), Some(commit_hash), Some(commit_date)) => {
+ Some(CommitInfo { short_commit_hash, commit_hash, commit_date })
+ }
+ _ => None,
+ };
+
+ VersionInfo { version, release_channel, commit_info }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs
new file mode 100644
index 000000000..4cc46af1b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -0,0 +1,1099 @@
+//! The most high-level integrated tests for rust-analyzer.
+//!
+//! This tests run a full LSP event loop, spawn cargo and process stdlib from
+//! sysroot. For this reason, the tests here are very slow, and should be
+//! avoided unless absolutely necessary.
+//!
+//! In particular, it's fine *not* to test that client & server agree on
+//! specific JSON shapes here -- there's little value in such tests, as we can't
+//! be sure without a real client anyway.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+#[cfg(not(feature = "in-rust-tree"))]
+mod sourcegen;
+mod support;
+mod testdir;
+mod tidy;
+
+use std::{collections::HashMap, path::PathBuf, time::Instant};
+
+use expect_test::expect;
+use lsp_types::{
+ notification::DidOpenTextDocument,
+ request::{
+ CodeActionRequest, Completion, Formatting, GotoTypeDefinition, HoverRequest,
+ WillRenameFiles, WorkspaceSymbol,
+ },
+ CodeActionContext, CodeActionParams, CompletionParams, DidOpenTextDocumentParams,
+ DocumentFormattingParams, FileRename, FormattingOptions, GotoDefinitionParams, HoverParams,
+ PartialResultParams, Position, Range, RenameFilesParams, TextDocumentItem,
+ TextDocumentPositionParams, WorkDoneProgressParams,
+};
+use rust_analyzer::lsp_ext::{OnEnter, Runnables, RunnablesParams};
+use serde_json::json;
+use test_utils::skip_slow_tests;
+
+use crate::{
+ support::{project, Project},
+ testdir::TestDir,
+};
+
+const PROFILE: &str = "";
+// const PROFILE: &'static str = "*@3>100";
+
+#[test]
+fn completes_items_from_standard_library() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = Project::with_fixture(
+ r#"
+//- /Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /src/lib.rs
+use std::collections::Spam;
+"#,
+ )
+ .with_config(serde_json::json!({
+ "cargo": { "noSysroot": false }
+ }))
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ let res = server.send_request::<Completion>(CompletionParams {
+ text_document_position: TextDocumentPositionParams::new(
+ server.doc_id("src/lib.rs"),
+ Position::new(0, 23),
+ ),
+ context: None,
+ partial_result_params: PartialResultParams::default(),
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ });
+ assert!(res.to_string().contains("HashMap"));
+}
+
+#[test]
+fn test_runnables_project() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = Project::with_fixture(
+ r#"
+//- /foo/Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /foo/src/lib.rs
+pub fn foo() {}
+
+//- /foo/tests/spam.rs
+#[test]
+fn test_eggs() {}
+
+//- /bar/Cargo.toml
+[package]
+name = "bar"
+version = "0.0.0"
+
+//- /bar/src/main.rs
+fn main() {}
+"#,
+ )
+ .root("foo")
+ .root("bar")
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ server.request::<Runnables>(
+ RunnablesParams { text_document: server.doc_id("foo/tests/spam.rs"), position: None },
+ json!([
+ {
+ "args": {
+ "cargoArgs": ["test", "--package", "foo", "--test", "spam"],
+ "executableArgs": ["test_eggs", "--exact", "--nocapture"],
+ "cargoExtraArgs": [],
+ "overrideCargo": null,
+ "workspaceRoot": server.path().join("foo")
+ },
+ "kind": "cargo",
+ "label": "test test_eggs",
+ "location": {
+ "targetRange": {
+ "end": { "character": 17, "line": 1 },
+ "start": { "character": 0, "line": 0 }
+ },
+ "targetSelectionRange": {
+ "end": { "character": 12, "line": 1 },
+ "start": { "character": 3, "line": 1 }
+ },
+ "targetUri": "file:///[..]/tests/spam.rs"
+ }
+ },
+ {
+ "args": {
+ "overrideCargo": null,
+ "workspaceRoot": server.path().join("foo"),
+ "cargoArgs": [
+ "test",
+ "--package",
+ "foo",
+ "--test",
+ "spam"
+ ],
+ "cargoExtraArgs": [],
+ "executableArgs": [
+ "",
+ "--nocapture"
+ ]
+ },
+ "kind": "cargo",
+ "label": "test-mod ",
+ "location": {
+ "targetUri": "file:///[..]/tests/spam.rs",
+ "targetRange": {
+ "start": {
+ "line": 0,
+ "character": 0
+ },
+ "end": {
+ "line": 3,
+ "character": 0
+ }
+ },
+ "targetSelectionRange": {
+ "start": {
+ "line": 0,
+ "character": 0
+ },
+ "end": {
+ "line": 3,
+ "character": 0
+ }
+ }
+ },
+ },
+ {
+ "args": {
+ "cargoArgs": ["check", "--package", "foo", "--all-targets"],
+ "executableArgs": [],
+ "cargoExtraArgs": [],
+ "overrideCargo": null,
+ "workspaceRoot": server.path().join("foo")
+ },
+ "kind": "cargo",
+ "label": "cargo check -p foo --all-targets"
+ },
+ {
+ "args": {
+ "cargoArgs": ["test", "--package", "foo", "--all-targets"],
+ "executableArgs": [],
+ "cargoExtraArgs": [],
+ "overrideCargo": null,
+ "workspaceRoot": server.path().join("foo")
+ },
+ "kind": "cargo",
+ "label": "cargo test -p foo --all-targets"
+ }
+ ]),
+ );
+}
+
+// Each package in these workspaces should be run from its own root
+#[test]
+fn test_path_dependency_runnables() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = Project::with_fixture(
+ r#"
+//- /consumer/Cargo.toml
+[package]
+name = "consumer"
+version = "0.1.0"
+[dependencies]
+dependency = { path = "../dependency" }
+
+//- /consumer/src/lib.rs
+#[cfg(test)]
+mod tests {
+ #[test]
+ fn consumer() {}
+}
+
+//- /dependency/Cargo.toml
+[package]
+name = "dependency"
+version = "0.1.0"
+[dev-dependencies]
+devdependency = { path = "../devdependency" }
+
+//- /dependency/src/lib.rs
+#[cfg(test)]
+mod tests {
+ #[test]
+ fn dependency() {}
+}
+
+//- /devdependency/Cargo.toml
+[package]
+name = "devdependency"
+version = "0.1.0"
+
+//- /devdependency/src/lib.rs
+#[cfg(test)]
+mod tests {
+ #[test]
+ fn devdependency() {}
+}
+ "#,
+ )
+ .root("consumer")
+ .root("dependency")
+ .root("devdependency")
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ for runnable in ["consumer", "dependency", "devdependency"] {
+ server.request::<Runnables>(
+ RunnablesParams {
+ text_document: server.doc_id(&format!("{}/src/lib.rs", runnable)),
+ position: None,
+ },
+ json!([
+ "{...}",
+ {
+ "label": "cargo test -p [..] --all-targets",
+ "kind": "cargo",
+ "args": {
+ "overrideCargo": null,
+ "workspaceRoot": server.path().join(runnable),
+ "cargoArgs": [
+ "test",
+ "--package",
+ runnable,
+ "--all-targets"
+ ],
+ "cargoExtraArgs": [],
+ "executableArgs": []
+ },
+ },
+ "{...}",
+ "{...}"
+ ]),
+ );
+ }
+}
+
+#[test]
+fn test_format_document() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = project(
+ r#"
+//- /Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /src/lib.rs
+mod bar;
+
+fn main() {
+}
+
+pub use std::collections::HashMap;
+"#,
+ )
+ .wait_until_workspace_is_loaded();
+
+ server.request::<Formatting>(
+ DocumentFormattingParams {
+ text_document: server.doc_id("src/lib.rs"),
+ options: FormattingOptions {
+ tab_size: 4,
+ insert_spaces: false,
+ insert_final_newline: None,
+ trim_final_newlines: None,
+ trim_trailing_whitespace: None,
+ properties: HashMap::new(),
+ },
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ },
+ json!([
+ {
+ "newText": "",
+ "range": {
+ "end": { "character": 0, "line": 3 },
+ "start": { "character": 11, "line": 2 }
+ }
+ }
+ ]),
+ );
+}
+
+#[test]
+fn test_format_document_2018() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = project(
+ r#"
+//- /Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+edition = "2018"
+
+//- /src/lib.rs
+mod bar;
+
+async fn test() {
+}
+
+fn main() {
+}
+
+pub use std::collections::HashMap;
+"#,
+ )
+ .wait_until_workspace_is_loaded();
+
+ server.request::<Formatting>(
+ DocumentFormattingParams {
+ text_document: server.doc_id("src/lib.rs"),
+ options: FormattingOptions {
+ tab_size: 4,
+ insert_spaces: false,
+ properties: HashMap::new(),
+ insert_final_newline: None,
+ trim_final_newlines: None,
+ trim_trailing_whitespace: None,
+ },
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ },
+ json!([
+ {
+ "newText": "",
+ "range": {
+ "end": { "character": 0, "line": 3 },
+ "start": { "character": 17, "line": 2 }
+ }
+ },
+ {
+ "newText": "",
+ "range": {
+ "end": { "character": 0, "line": 6 },
+ "start": { "character": 11, "line": 5 }
+ }
+ }
+ ]),
+ );
+}
+
+#[test]
+fn test_format_document_unchanged() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = project(
+ r#"
+//- /Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /src/lib.rs
+fn main() {}
+"#,
+ )
+ .wait_until_workspace_is_loaded();
+
+ server.request::<Formatting>(
+ DocumentFormattingParams {
+ text_document: server.doc_id("src/lib.rs"),
+ options: FormattingOptions {
+ tab_size: 4,
+ insert_spaces: false,
+ insert_final_newline: None,
+ trim_final_newlines: None,
+ trim_trailing_whitespace: None,
+ properties: HashMap::new(),
+ },
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ },
+ json!(null),
+ );
+}
+
+#[test]
+fn test_missing_module_code_action() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = project(
+ r#"
+//- /Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /src/lib.rs
+mod bar;
+
+fn main() {}
+"#,
+ )
+ .wait_until_workspace_is_loaded();
+
+ server.request::<CodeActionRequest>(
+ CodeActionParams {
+ text_document: server.doc_id("src/lib.rs"),
+ range: Range::new(Position::new(0, 4), Position::new(0, 7)),
+ context: CodeActionContext::default(),
+ partial_result_params: PartialResultParams::default(),
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ },
+ json!([
+ {
+ "title": "Create module at `bar.rs`",
+ "kind": "quickfix",
+ "edit": {
+ "documentChanges": [
+ {
+ "kind": "create",
+ "uri": "file://[..]/src/bar.rs"
+ }
+ ]
+ }
+ },
+ {
+ "title": "Create module at `bar/mod.rs`",
+ "kind": "quickfix",
+ "edit": {
+ "documentChanges": [
+ {
+ "kind": "create",
+ "uri": "file://[..]src/bar/mod.rs"
+ }
+ ]
+ }
+ }
+ ]),
+ );
+
+ server.request::<CodeActionRequest>(
+ CodeActionParams {
+ text_document: server.doc_id("src/lib.rs"),
+ range: Range::new(Position::new(2, 8), Position::new(2, 8)),
+ context: CodeActionContext::default(),
+ partial_result_params: PartialResultParams::default(),
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ },
+ json!([]),
+ );
+}
+
+#[test]
+fn test_missing_module_code_action_in_json_project() {
+ if skip_slow_tests() {
+ // return;
+ }
+
+ let tmp_dir = TestDir::new();
+
+ let path = tmp_dir.path();
+
+ let project = json!({
+ "roots": [path],
+ "crates": [ {
+ "root_module": path.join("src/lib.rs"),
+ "deps": [],
+ "edition": "2015",
+ "cfg": [ "cfg_atom_1", "feature=\"cfg_1\""],
+ } ]
+ });
+
+ let code = format!(
+ r#"
+//- /rust-project.json
+{PROJECT}
+
+//- /src/lib.rs
+mod bar;
+
+fn main() {{}}
+"#,
+ PROJECT = project,
+ );
+
+ let server =
+ Project::with_fixture(&code).tmp_dir(tmp_dir).server().wait_until_workspace_is_loaded();
+
+ server.request::<CodeActionRequest>(
+ CodeActionParams {
+ text_document: server.doc_id("src/lib.rs"),
+ range: Range::new(Position::new(0, 4), Position::new(0, 7)),
+ context: CodeActionContext::default(),
+ partial_result_params: PartialResultParams::default(),
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ },
+ json!([
+ {
+ "title": "Create module at `bar.rs`",
+ "kind": "quickfix",
+ "edit": {
+ "documentChanges": [
+ {
+ "kind": "create",
+ "uri": "file://[..]/src/bar.rs"
+ }
+ ]
+ }
+ },
+ {
+ "title": "Create module at `bar/mod.rs`",
+ "kind": "quickfix",
+ "edit": {
+ "documentChanges": [
+ {
+ "kind": "create",
+ "uri": "file://[..]src/bar/mod.rs"
+ }
+ ]
+ }
+ }
+ ]),
+ );
+
+ server.request::<CodeActionRequest>(
+ CodeActionParams {
+ text_document: server.doc_id("src/lib.rs"),
+ range: Range::new(Position::new(2, 8), Position::new(2, 8)),
+ context: CodeActionContext::default(),
+ partial_result_params: PartialResultParams::default(),
+ work_done_progress_params: WorkDoneProgressParams::default(),
+ },
+ json!([]),
+ );
+}
+
+#[test]
+fn diagnostics_dont_block_typing() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let librs: String = (0..10).map(|i| format!("mod m{};", i)).collect();
+ let libs: String = (0..10).map(|i| format!("//- /src/m{}.rs\nfn foo() {{}}\n\n", i)).collect();
+ let server = Project::with_fixture(&format!(
+ r#"
+//- /Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /src/lib.rs
+{}
+
+{}
+
+fn main() {{}}
+"#,
+ librs, libs
+ ))
+ .with_config(serde_json::json!({
+ "cargo": { "noSysroot": false }
+ }))
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ for i in 0..10 {
+ server.notification::<DidOpenTextDocument>(DidOpenTextDocumentParams {
+ text_document: TextDocumentItem {
+ uri: server.doc_id(&format!("src/m{}.rs", i)).uri,
+ language_id: "rust".to_string(),
+ version: 0,
+ text: "/// Docs\nfn foo() {}".to_string(),
+ },
+ });
+ }
+ let start = Instant::now();
+ server.request::<OnEnter>(
+ TextDocumentPositionParams {
+ text_document: server.doc_id("src/m0.rs"),
+ position: Position { line: 0, character: 5 },
+ },
+ json!([{
+ "insertTextFormat": 2,
+ "newText": "\n/// $0",
+ "range": {
+ "end": { "character": 5, "line": 0 },
+ "start": { "character": 5, "line": 0 }
+ }
+ }]),
+ );
+ let elapsed = start.elapsed();
+ assert!(elapsed.as_millis() < 2000, "typing enter took {:?}", elapsed);
+}
+
+#[test]
+fn preserves_dos_line_endings() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = Project::with_fixture(
+ "
+//- /Cargo.toml
+[package]
+name = \"foo\"
+version = \"0.0.0\"
+
+//- /src/main.rs
+/// Some Docs\r\nfn main() {}
+",
+ )
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ server.request::<OnEnter>(
+ TextDocumentPositionParams {
+ text_document: server.doc_id("src/main.rs"),
+ position: Position { line: 0, character: 8 },
+ },
+ json!([{
+ "insertTextFormat": 2,
+ "newText": "\r\n/// $0",
+ "range": {
+ "end": { "line": 0, "character": 8 },
+ "start": { "line": 0, "character": 8 }
+ }
+ }]),
+ );
+}
+
+#[test]
+fn out_dirs_check() {
+ if skip_slow_tests() {
+ // return;
+ }
+
+ let server = Project::with_fixture(
+ r###"
+//- /Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /build.rs
+use std::{env, fs, path::Path};
+
+fn main() {
+ let out_dir = env::var_os("OUT_DIR").unwrap();
+ let dest_path = Path::new(&out_dir).join("hello.rs");
+ fs::write(
+ &dest_path,
+ r#"pub fn message() -> &'static str { "Hello, World!" }"#,
+ )
+ .unwrap();
+ println!("cargo:rustc-cfg=atom_cfg");
+ println!("cargo:rustc-cfg=featlike=\"set\"");
+ println!("cargo:rerun-if-changed=build.rs");
+}
+//- /src/main.rs
+#[rustc_builtin_macro] macro_rules! include {}
+#[rustc_builtin_macro] macro_rules! include_str {}
+#[rustc_builtin_macro] macro_rules! concat {}
+#[rustc_builtin_macro] macro_rules! env {}
+
+include!(concat!(env!("OUT_DIR"), "/hello.rs"));
+
+#[cfg(atom_cfg)]
+struct A;
+#[cfg(bad_atom_cfg)]
+struct A;
+#[cfg(featlike = "set")]
+struct B;
+#[cfg(featlike = "not_set")]
+struct B;
+
+fn main() {
+ let va = A;
+ let vb = B;
+ let should_be_str = message();
+ let another_str = include_str!("main.rs");
+}
+"###,
+ )
+ .with_config(serde_json::json!({
+ "cargo": {
+ "buildScripts": {
+ "enable": true
+ },
+ "noSysroot": true,
+ }
+ }))
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ let res = server.send_request::<HoverRequest>(HoverParams {
+ text_document_position_params: TextDocumentPositionParams::new(
+ server.doc_id("src/main.rs"),
+ Position::new(19, 10),
+ ),
+ work_done_progress_params: Default::default(),
+ });
+ assert!(res.to_string().contains("&str"));
+
+ let res = server.send_request::<HoverRequest>(HoverParams {
+ text_document_position_params: TextDocumentPositionParams::new(
+ server.doc_id("src/main.rs"),
+ Position::new(20, 10),
+ ),
+ work_done_progress_params: Default::default(),
+ });
+ assert!(res.to_string().contains("&str"));
+
+ server.request::<GotoTypeDefinition>(
+ GotoDefinitionParams {
+ text_document_position_params: TextDocumentPositionParams::new(
+ server.doc_id("src/main.rs"),
+ Position::new(17, 9),
+ ),
+ work_done_progress_params: Default::default(),
+ partial_result_params: Default::default(),
+ },
+ json!([{
+ "originSelectionRange": {
+ "end": { "character": 10, "line": 17 },
+ "start": { "character": 8, "line": 17 }
+ },
+ "targetRange": {
+ "end": { "character": 9, "line": 8 },
+ "start": { "character": 0, "line": 7 }
+ },
+ "targetSelectionRange": {
+ "end": { "character": 8, "line": 8 },
+ "start": { "character": 7, "line": 8 }
+ },
+ "targetUri": "file:///[..]src/main.rs"
+ }]),
+ );
+
+ server.request::<GotoTypeDefinition>(
+ GotoDefinitionParams {
+ text_document_position_params: TextDocumentPositionParams::new(
+ server.doc_id("src/main.rs"),
+ Position::new(18, 9),
+ ),
+ work_done_progress_params: Default::default(),
+ partial_result_params: Default::default(),
+ },
+ json!([{
+ "originSelectionRange": {
+ "end": { "character": 10, "line": 18 },
+ "start": { "character": 8, "line": 18 }
+ },
+ "targetRange": {
+ "end": { "character": 9, "line": 12 },
+ "start": { "character": 0, "line":11 }
+ },
+ "targetSelectionRange": {
+ "end": { "character": 8, "line": 12 },
+ "start": { "character": 7, "line": 12 }
+ },
+ "targetUri": "file:///[..]src/main.rs"
+ }]),
+ );
+}
+
+#[test]
+fn resolve_proc_macro() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = Project::with_fixture(
+ r###"
+//- /foo/Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+edition = "2021"
+[dependencies]
+bar = {path = "../bar"}
+
+//- /foo/src/main.rs
+use bar::Bar;
+
+#[rustc_builtin_macro]
+macro derive($item:item) {}
+trait Bar {
+ fn bar();
+}
+#[derive(Bar)]
+struct Foo {}
+fn main() {
+ Foo::bar();
+}
+
+//- /bar/Cargo.toml
+[package]
+name = "bar"
+version = "0.0.0"
+edition = "2021"
+
+[lib]
+proc-macro = true
+
+//- /bar/src/lib.rs
+extern crate proc_macro;
+use proc_macro::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
+macro_rules! t {
+ ($n:literal) => {
+ TokenTree::from(Ident::new($n, Span::call_site()))
+ };
+ ({}) => {
+ TokenTree::from(Group::new(Delimiter::Brace, TokenStream::new()))
+ };
+ (()) => {
+ TokenTree::from(Group::new(Delimiter::Parenthesis, TokenStream::new()))
+ };
+}
+#[proc_macro_derive(Bar)]
+pub fn foo(_input: TokenStream) -> TokenStream {
+ // We hard code the output here for preventing to use any deps
+ let mut res = TokenStream::new();
+
+ // ill behaved proc-macro will use the stdout
+ // we should ignore it
+ println!("I am bad guy");
+
+ // impl Bar for Foo { fn bar() {} }
+ let mut tokens = vec![t!("impl"), t!("Bar"), t!("for"), t!("Foo")];
+ let mut fn_stream = TokenStream::new();
+ fn_stream.extend(vec![t!("fn"), t!("bar"), t!(()), t!({})]);
+ tokens.push(Group::new(Delimiter::Brace, fn_stream).into());
+ res.extend(tokens);
+ res
+}
+
+"###,
+ )
+ .with_config(serde_json::json!({
+ "cargo": {
+ "buildScripts": {
+ "enable": true
+ },
+ "noSysroot": true,
+ },
+ "procMacro": {
+ "enable": true,
+ "server": PathBuf::from(env!("CARGO_BIN_EXE_rust-analyzer")),
+ }
+ }))
+ .root("foo")
+ .root("bar")
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ let res = server.send_request::<HoverRequest>(HoverParams {
+ text_document_position_params: TextDocumentPositionParams::new(
+ server.doc_id("foo/src/main.rs"),
+ Position::new(10, 9),
+ ),
+ work_done_progress_params: Default::default(),
+ });
+ let value = res.get("contents").unwrap().get("value").unwrap().as_str().unwrap();
+
+ expect![[r#"
+
+ ```rust
+ foo::Foo
+ ```
+
+ ```rust
+ fn bar()
+ ```"#]]
+ .assert_eq(value);
+}
+
+#[test]
+fn test_will_rename_files_same_level() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let tmp_dir = TestDir::new();
+ let tmp_dir_path = tmp_dir.path().to_owned();
+ let tmp_dir_str = tmp_dir_path.to_str().unwrap();
+ let base_path = PathBuf::from(format!("file://{}", tmp_dir_str));
+
+ let code = r#"
+//- /Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /src/lib.rs
+mod old_file;
+mod from_mod;
+mod to_mod;
+mod old_folder;
+fn main() {}
+
+//- /src/old_file.rs
+
+//- /src/old_folder/mod.rs
+
+//- /src/from_mod/mod.rs
+
+//- /src/to_mod/foo.rs
+
+"#;
+ let server =
+ Project::with_fixture(code).tmp_dir(tmp_dir).server().wait_until_workspace_is_loaded();
+
+ //rename same level file
+ server.request::<WillRenameFiles>(
+ RenameFilesParams {
+ files: vec![FileRename {
+ old_uri: base_path.join("src/old_file.rs").to_str().unwrap().to_string(),
+ new_uri: base_path.join("src/new_file.rs").to_str().unwrap().to_string(),
+ }],
+ },
+ json!({
+ "documentChanges": [
+ {
+ "textDocument": {
+ "uri": format!("file://{}", tmp_dir_path.join("src").join("lib.rs").to_str().unwrap().to_string().replace("C:\\", "/c:/").replace('\\', "/")),
+ "version": null
+ },
+ "edits": [
+ {
+ "range": {
+ "start": {
+ "line": 0,
+ "character": 4
+ },
+ "end": {
+ "line": 0,
+ "character": 12
+ }
+ },
+ "newText": "new_file"
+ }
+ ]
+ }
+ ]
+ }),
+ );
+
+ //rename file from mod.rs to foo.rs
+ server.request::<WillRenameFiles>(
+ RenameFilesParams {
+ files: vec![FileRename {
+ old_uri: base_path.join("src/from_mod/mod.rs").to_str().unwrap().to_string(),
+ new_uri: base_path.join("src/from_mod/foo.rs").to_str().unwrap().to_string(),
+ }],
+ },
+ json!(null),
+ );
+
+ //rename file from foo.rs to mod.rs
+ server.request::<WillRenameFiles>(
+ RenameFilesParams {
+ files: vec![FileRename {
+ old_uri: base_path.join("src/to_mod/foo.rs").to_str().unwrap().to_string(),
+ new_uri: base_path.join("src/to_mod/mod.rs").to_str().unwrap().to_string(),
+ }],
+ },
+ json!(null),
+ );
+
+ //rename same level file
+ server.request::<WillRenameFiles>(
+ RenameFilesParams {
+ files: vec![FileRename {
+ old_uri: base_path.join("src/old_folder").to_str().unwrap().to_string(),
+ new_uri: base_path.join("src/new_folder").to_str().unwrap().to_string(),
+ }],
+ },
+ json!({
+ "documentChanges": [
+ {
+ "textDocument": {
+ "uri": format!("file://{}", tmp_dir_path.join("src").join("lib.rs").to_str().unwrap().to_string().replace("C:\\", "/c:/").replace('\\', "/")),
+ "version": null
+ },
+ "edits": [
+ {
+ "range": {
+ "start": {
+ "line": 3,
+ "character": 4
+ },
+ "end": {
+ "line": 3,
+ "character": 14
+ }
+ },
+ "newText": "new_folder"
+ }
+ ]
+ }
+ ]
+ }),
+ );
+}
+
+#[test]
+fn test_exclude_config_works() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let server = Project::with_fixture(
+ r#"
+//- /foo/Cargo.toml
+[package]
+name = "foo"
+version = "0.0.0"
+
+//- /foo/src/lib.rs
+pub fn foo() {}
+
+//- /bar/Cargo.toml
+[package]
+name = "bar"
+version = "0.0.0"
+
+//- /bar/src/lib.rs
+pub fn bar() {}
+"#,
+ )
+ .root("foo")
+ .root("bar")
+ .with_config(json!({
+ "files": {
+ "excludeDirs": ["foo", "bar"]
+ }
+ }))
+ .server()
+ .wait_until_workspace_is_loaded();
+
+ server.request::<WorkspaceSymbol>(Default::default(), json!([]));
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/sourcegen.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/sourcegen.rs
new file mode 100644
index 000000000..e6ac018a0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/sourcegen.rs
@@ -0,0 +1,80 @@
+//! Generates `assists.md` documentation.
+
+use std::{fmt, fs, io, path::PathBuf};
+
+#[test]
+fn sourcegen_feature_docs() {
+ let features = Feature::collect().unwrap();
+ let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
+ let contents = format!(
+ "
+// Generated file, do not edit by hand, see `sourcegen_feature_docs`.
+{}
+",
+ contents.trim()
+ );
+ let dst = sourcegen::project_root().join("docs/user/generated_features.adoc");
+ fs::write(&dst, &contents).unwrap();
+}
+
+#[derive(Debug)]
+struct Feature {
+ id: String,
+ location: sourcegen::Location,
+ doc: String,
+}
+
+impl Feature {
+ fn collect() -> io::Result<Vec<Feature>> {
+ let crates_dir = sourcegen::project_root().join("crates");
+
+ let mut res = Vec::new();
+ for path in sourcegen::list_rust_files(&crates_dir) {
+ collect_file(&mut res, path)?;
+ }
+ res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
+ return Ok(res);
+
+ fn collect_file(acc: &mut Vec<Feature>, path: PathBuf) -> io::Result<()> {
+ let text = std::fs::read_to_string(&path)?;
+ let comment_blocks = sourcegen::CommentBlock::extract("Feature", &text);
+
+ for block in comment_blocks {
+ let id = block.id;
+ if let Err(msg) = is_valid_feature_name(&id) {
+ panic!("invalid feature name: {:?}:\n {}", id, msg)
+ }
+ let doc = block.contents.join("\n");
+ let location = sourcegen::Location { file: path.clone(), line: block.line };
+ acc.push(Feature { id, location, doc })
+ }
+
+ Ok(())
+ }
+ }
+}
+
+fn is_valid_feature_name(feature: &str) -> Result<(), String> {
+ 'word: for word in feature.split_whitespace() {
+ for short in ["to", "and"] {
+ if word == short {
+ continue 'word;
+ }
+ }
+ for short in ["To", "And"] {
+ if word == short {
+ return Err(format!("Don't capitalize {:?}", word));
+ }
+ }
+ if !word.starts_with(char::is_uppercase) {
+ return Err(format!("Capitalize {:?}", word));
+ }
+ }
+ Ok(())
+}
+
+impl fmt::Display for Feature {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ writeln!(f, "=== {}\n**Source:** {}\n{}", self.id, self.location, self.doc)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
new file mode 100644
index 000000000..4fa88c3c6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
@@ -0,0 +1,406 @@
+use std::{
+ cell::{Cell, RefCell},
+ fs,
+ path::{Path, PathBuf},
+ sync::Once,
+ time::Duration,
+};
+
+use crossbeam_channel::{after, select, Receiver};
+use lsp_server::{Connection, Message, Notification, Request};
+use lsp_types::{notification::Exit, request::Shutdown, TextDocumentIdentifier, Url};
+use project_model::ProjectManifest;
+use rust_analyzer::{config::Config, lsp_ext, main_loop};
+use serde::Serialize;
+use serde_json::{json, to_string_pretty, Value};
+use test_utils::Fixture;
+use vfs::AbsPathBuf;
+
+use crate::testdir::TestDir;
+
+pub(crate) struct Project<'a> {
+ fixture: &'a str,
+ tmp_dir: Option<TestDir>,
+ roots: Vec<PathBuf>,
+ config: serde_json::Value,
+}
+
+impl<'a> Project<'a> {
+ pub(crate) fn with_fixture(fixture: &str) -> Project<'_> {
+ Project {
+ fixture,
+ tmp_dir: None,
+ roots: vec![],
+ config: serde_json::json!({
+ "cargo": {
+ // Loading standard library is costly, let's ignore it by default
+ "noSysroot": true,
+ // Can't use test binary as rustc wrapper.
+ "buildScripts": {
+ "useRustcWrapper": false
+ },
+ }
+ }),
+ }
+ }
+
+ pub(crate) fn tmp_dir(mut self, tmp_dir: TestDir) -> Project<'a> {
+ self.tmp_dir = Some(tmp_dir);
+ self
+ }
+
+ pub(crate) fn root(mut self, path: &str) -> Project<'a> {
+ self.roots.push(path.into());
+ self
+ }
+
+ pub(crate) fn with_config(mut self, config: serde_json::Value) -> Project<'a> {
+ fn merge(dst: &mut serde_json::Value, src: serde_json::Value) {
+ match (dst, src) {
+ (Value::Object(dst), Value::Object(src)) => {
+ for (k, v) in src {
+ merge(dst.entry(k).or_insert(v.clone()), v)
+ }
+ }
+ (dst, src) => *dst = src,
+ }
+ }
+ merge(&mut self.config, config);
+ self
+ }
+
+ pub(crate) fn server(self) -> Server {
+ let tmp_dir = self.tmp_dir.unwrap_or_else(TestDir::new);
+ static INIT: Once = Once::new();
+ INIT.call_once(|| {
+ tracing_subscriber::fmt()
+ .with_test_writer()
+ .with_env_filter(tracing_subscriber::EnvFilter::from_env("RA_LOG"))
+ .init();
+ profile::init_from(crate::PROFILE);
+ });
+
+ let (mini_core, proc_macros, fixtures) = Fixture::parse(self.fixture);
+ assert!(proc_macros.is_empty());
+ assert!(mini_core.is_none());
+ for entry in fixtures {
+ let path = tmp_dir.path().join(&entry.path['/'.len_utf8()..]);
+ fs::create_dir_all(path.parent().unwrap()).unwrap();
+ fs::write(path.as_path(), entry.text.as_bytes()).unwrap();
+ }
+
+ let tmp_dir_path = AbsPathBuf::assert(tmp_dir.path().to_path_buf());
+ let mut roots =
+ self.roots.into_iter().map(|root| tmp_dir_path.join(root)).collect::<Vec<_>>();
+ if roots.is_empty() {
+ roots.push(tmp_dir_path.clone());
+ }
+ let discovered_projects = roots
+ .into_iter()
+ .map(|it| ProjectManifest::discover_single(&it).unwrap())
+ .collect::<Vec<_>>();
+
+ let mut config = Config::new(
+ tmp_dir_path,
+ lsp_types::ClientCapabilities {
+ workspace: Some(lsp_types::WorkspaceClientCapabilities {
+ did_change_watched_files: Some(
+ lsp_types::DidChangeWatchedFilesClientCapabilities {
+ dynamic_registration: Some(true),
+ },
+ ),
+ ..Default::default()
+ }),
+ text_document: Some(lsp_types::TextDocumentClientCapabilities {
+ definition: Some(lsp_types::GotoCapability {
+ link_support: Some(true),
+ ..Default::default()
+ }),
+ code_action: Some(lsp_types::CodeActionClientCapabilities {
+ code_action_literal_support: Some(
+ lsp_types::CodeActionLiteralSupport::default(),
+ ),
+ ..Default::default()
+ }),
+ hover: Some(lsp_types::HoverClientCapabilities {
+ content_format: Some(vec![lsp_types::MarkupKind::Markdown]),
+ ..Default::default()
+ }),
+ ..Default::default()
+ }),
+ window: Some(lsp_types::WindowClientCapabilities {
+ work_done_progress: Some(false),
+ ..Default::default()
+ }),
+ experimental: Some(json!({
+ "serverStatusNotification": true,
+ })),
+ ..Default::default()
+ },
+ );
+ config.discovered_projects = Some(discovered_projects);
+ config.update(self.config).expect("invalid config");
+
+ Server::new(tmp_dir, config)
+ }
+}
+
+pub(crate) fn project(fixture: &str) -> Server {
+ Project::with_fixture(fixture).server()
+}
+
+pub(crate) struct Server {
+ req_id: Cell<i32>,
+ messages: RefCell<Vec<Message>>,
+ _thread: jod_thread::JoinHandle<()>,
+ client: Connection,
+ /// XXX: remove the tempdir last
+ dir: TestDir,
+}
+
+impl Server {
+ fn new(dir: TestDir, config: Config) -> Server {
+ let (connection, client) = Connection::memory();
+
+ let _thread = jod_thread::Builder::new()
+ .name("test server".to_string())
+ .spawn(move || main_loop(config, connection).unwrap())
+ .expect("failed to spawn a thread");
+
+ Server { req_id: Cell::new(1), dir, messages: Default::default(), client, _thread }
+ }
+
+ pub(crate) fn doc_id(&self, rel_path: &str) -> TextDocumentIdentifier {
+ let path = self.dir.path().join(rel_path);
+ TextDocumentIdentifier { uri: Url::from_file_path(path).unwrap() }
+ }
+
+ pub(crate) fn notification<N>(&self, params: N::Params)
+ where
+ N: lsp_types::notification::Notification,
+ N::Params: Serialize,
+ {
+ let r = Notification::new(N::METHOD.to_string(), params);
+ self.send_notification(r)
+ }
+
+ #[track_caller]
+ pub(crate) fn request<R>(&self, params: R::Params, expected_resp: Value)
+ where
+ R: lsp_types::request::Request,
+ R::Params: Serialize,
+ {
+ let actual = self.send_request::<R>(params);
+ if let Some((expected_part, actual_part)) = find_mismatch(&expected_resp, &actual) {
+ panic!(
+ "JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n",
+ to_string_pretty(&expected_resp).unwrap(),
+ to_string_pretty(&actual).unwrap(),
+ to_string_pretty(expected_part).unwrap(),
+ to_string_pretty(actual_part).unwrap(),
+ );
+ }
+ }
+
+ pub(crate) fn send_request<R>(&self, params: R::Params) -> Value
+ where
+ R: lsp_types::request::Request,
+ R::Params: Serialize,
+ {
+ let id = self.req_id.get();
+ self.req_id.set(id.wrapping_add(1));
+
+ let r = Request::new(id.into(), R::METHOD.to_string(), params);
+ self.send_request_(r)
+ }
+ fn send_request_(&self, r: Request) -> Value {
+ let id = r.id.clone();
+ self.client.sender.send(r.clone().into()).unwrap();
+ while let Some(msg) = self.recv().unwrap_or_else(|Timeout| panic!("timeout: {:?}", r)) {
+ match msg {
+ Message::Request(req) => {
+ if req.method == "client/registerCapability" {
+ let params = req.params.to_string();
+ if ["workspace/didChangeWatchedFiles", "textDocument/didSave"]
+ .into_iter()
+ .any(|it| params.contains(it))
+ {
+ continue;
+ }
+ }
+ panic!("unexpected request: {:?}", req)
+ }
+ Message::Notification(_) => (),
+ Message::Response(res) => {
+ assert_eq!(res.id, id);
+ if let Some(err) = res.error {
+ panic!("error response: {:#?}", err);
+ }
+ return res.result.unwrap();
+ }
+ }
+ }
+ panic!("no response for {:?}", r);
+ }
+ pub(crate) fn wait_until_workspace_is_loaded(self) -> Server {
+ self.wait_for_message_cond(1, &|msg: &Message| match msg {
+ Message::Notification(n) if n.method == "experimental/serverStatus" => {
+ let status = n
+ .clone()
+ .extract::<lsp_ext::ServerStatusParams>("experimental/serverStatus")
+ .unwrap();
+ status.quiescent
+ }
+ _ => false,
+ })
+ .unwrap_or_else(|Timeout| panic!("timeout while waiting for ws to load"));
+ self
+ }
+ fn wait_for_message_cond(
+ &self,
+ n: usize,
+ cond: &dyn Fn(&Message) -> bool,
+ ) -> Result<(), Timeout> {
+ let mut total = 0;
+ for msg in self.messages.borrow().iter() {
+ if cond(msg) {
+ total += 1
+ }
+ }
+ while total < n {
+ let msg = self.recv()?.expect("no response");
+ if cond(&msg) {
+ total += 1;
+ }
+ }
+ Ok(())
+ }
+ fn recv(&self) -> Result<Option<Message>, Timeout> {
+ let msg = recv_timeout(&self.client.receiver)?;
+ let msg = msg.map(|msg| {
+ self.messages.borrow_mut().push(msg.clone());
+ msg
+ });
+ Ok(msg)
+ }
+ fn send_notification(&self, not: Notification) {
+ self.client.sender.send(Message::Notification(not)).unwrap();
+ }
+
+ pub(crate) fn path(&self) -> &Path {
+ self.dir.path()
+ }
+}
+
+impl Drop for Server {
+ fn drop(&mut self) {
+ self.request::<Shutdown>((), Value::Null);
+ self.notification::<Exit>(());
+ }
+}
+
+struct Timeout;
+
+fn recv_timeout(receiver: &Receiver<Message>) -> Result<Option<Message>, Timeout> {
+ let timeout =
+ if cfg!(target_os = "macos") { Duration::from_secs(300) } else { Duration::from_secs(120) };
+ select! {
+ recv(receiver) -> msg => Ok(msg.ok()),
+ recv(after(timeout)) -> _ => Err(Timeout),
+ }
+}
+
+// Comparison functionality borrowed from cargo:
+
+/// Compares JSON object for approximate equality.
+/// You can use `[..]` wildcard in strings (useful for OS dependent things such
+/// as paths). You can use a `"{...}"` string literal as a wildcard for
+/// arbitrary nested JSON. Arrays are sorted before comparison.
+fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a Value, &'a Value)> {
+ match (expected, actual) {
+ (Value::Number(l), Value::Number(r)) if l == r => None,
+ (Value::Bool(l), Value::Bool(r)) if l == r => None,
+ (Value::String(l), Value::String(r)) if lines_match(l, r) => None,
+ (Value::Array(l), Value::Array(r)) => {
+ if l.len() != r.len() {
+ return Some((expected, actual));
+ }
+
+ let mut l = l.iter().collect::<Vec<_>>();
+ let mut r = r.iter().collect::<Vec<_>>();
+
+ l.retain(|l| match r.iter().position(|r| find_mismatch(l, r).is_none()) {
+ Some(i) => {
+ r.remove(i);
+ false
+ }
+ None => true,
+ });
+
+ if !l.is_empty() {
+ assert!(!r.is_empty());
+ Some((l[0], r[0]))
+ } else {
+ assert_eq!(r.len(), 0);
+ None
+ }
+ }
+ (Value::Object(l), Value::Object(r)) => {
+ fn sorted_values(obj: &serde_json::Map<String, Value>) -> Vec<&Value> {
+ let mut entries = obj.iter().collect::<Vec<_>>();
+ entries.sort_by_key(|it| it.0);
+ entries.into_iter().map(|(_k, v)| v).collect::<Vec<_>>()
+ }
+
+ let same_keys = l.len() == r.len() && l.keys().all(|k| r.contains_key(k));
+ if !same_keys {
+ return Some((expected, actual));
+ }
+
+ let l = sorted_values(l);
+ let r = sorted_values(r);
+
+ l.into_iter().zip(r).find_map(|(l, r)| find_mismatch(l, r))
+ }
+ (Value::Null, Value::Null) => None,
+ // magic string literal "{...}" acts as wildcard for any sub-JSON
+ (Value::String(l), _) if l == "{...}" => None,
+ _ => Some((expected, actual)),
+ }
+}
+
+/// Compare a line with an expected pattern.
+/// - Use `[..]` as a wildcard to match 0 or more characters on the same line
+/// (similar to `.*` in a regex).
+fn lines_match(expected: &str, actual: &str) -> bool {
+ // Let's not deal with / vs \ (windows...)
+ // First replace backslash-escaped backslashes with forward slashes
+ // which can occur in, for example, JSON output
+ let expected = expected.replace(r"\\", "/").replace('\\', "/");
+ let mut actual: &str = &actual.replace(r"\\", "/").replace('\\', "/");
+ for (i, part) in expected.split("[..]").enumerate() {
+ match actual.find(part) {
+ Some(j) => {
+ if i == 0 && j != 0 {
+ return false;
+ }
+ actual = &actual[j + part.len()..];
+ }
+ None => return false,
+ }
+ }
+ actual.is_empty() || expected.ends_with("[..]")
+}
+
+#[test]
+fn lines_match_works() {
+ assert!(lines_match("a b", "a b"));
+ assert!(lines_match("a[..]b", "a b"));
+ assert!(lines_match("a[..]", "a b"));
+ assert!(lines_match("[..]", "a b"));
+ assert!(lines_match("[..]b", "a b"));
+
+ assert!(!lines_match("[..]b", "c"));
+ assert!(!lines_match("b", "c"));
+ assert!(!lines_match("b", "cb"));
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/testdir.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/testdir.rs
new file mode 100644
index 000000000..3bec23a91
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/testdir.rs
@@ -0,0 +1,75 @@
+use std::{
+ fs, io,
+ path::{Path, PathBuf},
+ sync::atomic::{AtomicUsize, Ordering},
+};
+
+pub(crate) struct TestDir {
+ path: PathBuf,
+ keep: bool,
+}
+
+impl TestDir {
+ pub(crate) fn new() -> TestDir {
+ let temp_dir = std::env::temp_dir();
+ // On MacOS builders on GitHub actions, the temp dir is a symlink, and
+ // that causes problems down the line. Specifically:
+ // * Cargo may emit different PackageId depending on the working directory
+ // * rust-analyzer may fail to map LSP URIs to correct paths.
+ //
+ // Work-around this by canonicalizing. Note that we don't want to do this
+ // on *every* OS, as on windows `canonicalize` itself creates problems.
+ #[cfg(target_os = "macos")]
+ let temp_dir = temp_dir.canonicalize().unwrap();
+
+ let base = temp_dir.join("testdir");
+ let pid = std::process::id();
+
+ static CNT: AtomicUsize = AtomicUsize::new(0);
+ for _ in 0..100 {
+ let cnt = CNT.fetch_add(1, Ordering::Relaxed);
+ let path = base.join(format!("{}_{}", pid, cnt));
+ if path.is_dir() {
+ continue;
+ }
+ fs::create_dir_all(&path).unwrap();
+ return TestDir { path, keep: false };
+ }
+ panic!("Failed to create a temporary directory")
+ }
+ #[allow(unused)]
+ pub(crate) fn keep(mut self) -> TestDir {
+ self.keep = true;
+ self
+ }
+ pub(crate) fn path(&self) -> &Path {
+ &self.path
+ }
+}
+
+impl Drop for TestDir {
+ fn drop(&mut self) {
+ if self.keep {
+ return;
+ }
+ remove_dir_all(&self.path).unwrap_or_else(|err| {
+ panic!("failed to remove temporary directory {}: {}", self.path.display(), err)
+ })
+ }
+}
+
+#[cfg(not(windows))]
+fn remove_dir_all(path: &Path) -> io::Result<()> {
+ fs::remove_dir_all(path)
+}
+
+#[cfg(windows)]
+fn remove_dir_all(path: &Path) -> io::Result<()> {
+ for _ in 0..99 {
+ if fs::remove_dir_all(path).is_ok() {
+ return Ok(());
+ }
+ std::thread::sleep(std::time::Duration::from_millis(10))
+ }
+ fs::remove_dir_all(path)
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs
new file mode 100644
index 000000000..18f95925d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs
@@ -0,0 +1,473 @@
+use std::{
+ collections::HashSet,
+ path::{Path, PathBuf},
+};
+
+use xshell::Shell;
+
+#[cfg(not(feature = "in-rust-tree"))]
+use xshell::cmd;
+
+#[cfg(not(feature = "in-rust-tree"))]
+#[test]
+fn check_code_formatting() {
+ let sh = &Shell::new().unwrap();
+ sh.change_dir(sourcegen::project_root());
+ sh.set_var("RUSTUP_TOOLCHAIN", "stable");
+
+ let out = cmd!(sh, "rustfmt --version").read().unwrap();
+ if !out.contains("stable") {
+ panic!(
+ "Failed to run rustfmt from toolchain 'stable'. \
+ Please run `rustup component add rustfmt --toolchain stable` to install it.",
+ )
+ }
+
+ let res = cmd!(sh, "cargo fmt -- --check").run();
+ if res.is_err() {
+ let _ = cmd!(sh, "cargo fmt").run();
+ }
+ res.unwrap()
+}
+
+#[test]
+fn check_lsp_extensions_docs() {
+ let sh = &Shell::new().unwrap();
+
+ let expected_hash = {
+ let lsp_ext_rs = sh
+ .read_file(sourcegen::project_root().join("crates/rust-analyzer/src/lsp_ext.rs"))
+ .unwrap();
+ stable_hash(lsp_ext_rs.as_str())
+ };
+
+ let actual_hash = {
+ let lsp_extensions_md =
+ sh.read_file(sourcegen::project_root().join("docs/dev/lsp-extensions.md")).unwrap();
+ let text = lsp_extensions_md
+ .lines()
+ .find_map(|line| line.strip_prefix("lsp_ext.rs hash:"))
+ .unwrap()
+ .trim();
+ u64::from_str_radix(text, 16).unwrap()
+ };
+
+ if actual_hash != expected_hash {
+ panic!(
+ "
+lsp_ext.rs was changed without touching lsp-extensions.md.
+
+Expected hash: {:x}
+Actual hash: {:x}
+
+Please adjust docs/dev/lsp-extensions.md.
+",
+ expected_hash, actual_hash
+ )
+ }
+}
+
+#[test]
+fn files_are_tidy() {
+ let sh = &Shell::new().unwrap();
+
+ let files = sourcegen::list_files(&sourcegen::project_root().join("crates"));
+
+ let mut tidy_docs = TidyDocs::default();
+ let mut tidy_marks = TidyMarks::default();
+ for path in files {
+ let extension = path.extension().unwrap_or_default().to_str().unwrap_or_default();
+ match extension {
+ "rs" => {
+ let text = sh.read_file(&path).unwrap();
+ check_todo(&path, &text);
+ check_dbg(&path, &text);
+ check_test_attrs(&path, &text);
+ check_trailing_ws(&path, &text);
+ deny_clippy(&path, &text);
+ tidy_docs.visit(&path, &text);
+ tidy_marks.visit(&path, &text);
+ }
+ "toml" => {
+ let text = sh.read_file(&path).unwrap();
+ check_cargo_toml(&path, text);
+ }
+ _ => (),
+ }
+ }
+
+ tidy_docs.finish();
+ tidy_marks.finish();
+}
+
+fn check_cargo_toml(path: &Path, text: String) {
+ let mut section = None;
+ for (line_no, text) in text.lines().enumerate() {
+ let text = text.trim();
+ if text.starts_with('[') {
+ if !text.ends_with(']') {
+ panic!(
+ "\nplease don't add comments or trailing whitespace in section lines.\n\
+ {}:{}\n",
+ path.display(),
+ line_no + 1
+ )
+ }
+ section = Some(text);
+ continue;
+ }
+ let text: String = text.split_whitespace().collect();
+ if !text.contains("path=") {
+ continue;
+ }
+ match section {
+ Some(s) if s.contains("dev-dependencies") => {
+ if text.contains("version") {
+ panic!(
+ "\ncargo internal dev-dependencies should not have a version.\n\
+ {}:{}\n",
+ path.display(),
+ line_no + 1
+ );
+ }
+ }
+ Some(s) if s.contains("dependencies") => {
+ if !text.contains("version") {
+ panic!(
+ "\ncargo internal dependencies should have a version.\n\
+ {}:{}\n",
+ path.display(),
+ line_no + 1
+ );
+ }
+ }
+ _ => {}
+ }
+ }
+}
+
+fn deny_clippy(path: &Path, text: &str) {
+ let ignore = &[
+ // The documentation in string literals may contain anything for its own purposes
+ "ide-db/src/generated/lints.rs",
+ // The tests test clippy lint hovers
+ "ide/src/hover/tests.rs",
+ // The tests test clippy lint completions
+ "ide-completion/src/tests/attribute.rs",
+ ];
+ if ignore.iter().any(|p| path.ends_with(p)) {
+ return;
+ }
+
+ if text.contains("\u{61}llow(clippy") {
+ panic!(
+ "\n\nallowing lints is forbidden: {}.
+rust-analyzer intentionally doesn't check clippy on CI.
+You can allow lint globally via `xtask clippy`.
+See https://github.com/rust-lang/rust-clippy/issues/5537 for discussion.
+
+",
+ path.display()
+ )
+ }
+}
+
+#[cfg(not(feature = "in-rust-tree"))]
+#[test]
+fn check_licenses() {
+ let sh = &Shell::new().unwrap();
+
+ let expected = "
+0BSD OR MIT OR Apache-2.0
+Apache-2.0
+Apache-2.0 OR BSL-1.0
+Apache-2.0 OR MIT
+Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT
+Apache-2.0/MIT
+BSD-3-Clause
+BlueOak-1.0.0 OR MIT OR Apache-2.0
+CC0-1.0 OR Artistic-2.0
+ISC
+MIT
+MIT / Apache-2.0
+MIT OR Apache-2.0
+MIT OR Apache-2.0 OR Zlib
+MIT OR Zlib OR Apache-2.0
+MIT/Apache-2.0
+Unlicense/MIT
+Zlib OR Apache-2.0 OR MIT
+"
+ .lines()
+ .filter(|it| !it.is_empty())
+ .collect::<Vec<_>>();
+
+ let meta = cmd!(sh, "cargo metadata --format-version 1").read().unwrap();
+ let mut licenses = meta
+ .split(|c| c == ',' || c == '{' || c == '}')
+ .filter(|it| it.contains(r#""license""#))
+ .map(|it| it.trim())
+ .map(|it| it[r#""license":"#.len()..].trim_matches('"'))
+ .collect::<Vec<_>>();
+ licenses.sort_unstable();
+ licenses.dedup();
+ if licenses != expected {
+ let mut diff = String::new();
+
+ diff.push_str("New Licenses:\n");
+ for &l in licenses.iter() {
+ if !expected.contains(&l) {
+ diff += &format!(" {}\n", l)
+ }
+ }
+
+ diff.push_str("\nMissing Licenses:\n");
+ for &l in expected.iter() {
+ if !licenses.contains(&l) {
+ diff += &format!(" {}\n", l)
+ }
+ }
+
+ panic!("different set of licenses!\n{}", diff);
+ }
+ assert_eq!(licenses, expected);
+}
+
+fn check_todo(path: &Path, text: &str) {
+ let need_todo = &[
+ // This file itself obviously needs to use todo (<- like this!).
+ "tests/tidy.rs",
+ // Some of our assists generate `todo!()`.
+ "handlers/add_turbo_fish.rs",
+ "handlers/generate_function.rs",
+ "handlers/add_missing_match_arms.rs",
+ "handlers/replace_derive_with_manual_impl.rs",
+ // To support generating `todo!()` in assists, we have `expr_todo()` in
+ // `ast::make`.
+ "ast/make.rs",
+ // The documentation in string literals may contain anything for its own purposes
+ "ide-db/src/generated/lints.rs",
+ "ide-assists/src/utils/gen_trait_fn_body.rs",
+ "ide-assists/src/tests/generated.rs",
+ // The tests for missing fields
+ "ide-diagnostics/src/handlers/missing_fields.rs",
+ ];
+ if need_todo.iter().any(|p| path.ends_with(p)) {
+ return;
+ }
+ if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") {
+ // Generated by an assist
+ if text.contains("${0:todo!()}") {
+ return;
+ }
+
+ panic!(
+ "\nTODO markers or todo! macros should not be committed to the master branch,\n\
+ use FIXME instead\n\
+ {}\n",
+ path.display(),
+ )
+ }
+}
+
+fn check_dbg(path: &Path, text: &str) {
+ let need_dbg = &[
+ // This file itself obviously needs to use dbg.
+ "slow-tests/tidy.rs",
+ // Assists to remove `dbg!()`
+ "handlers/remove_dbg.rs",
+ // We have .dbg postfix
+ "ide-completion/src/completions/postfix.rs",
+ "ide-completion/src/completions/keyword.rs",
+ "ide-completion/src/tests/proc_macros.rs",
+ // The documentation in string literals may contain anything for its own purposes
+ "ide-completion/src/lib.rs",
+ "ide-db/src/generated/lints.rs",
+ // test for doc test for remove_dbg
+ "src/tests/generated.rs",
+ ];
+ if need_dbg.iter().any(|p| path.ends_with(p)) {
+ return;
+ }
+ if text.contains("dbg!") {
+ panic!(
+ "\ndbg! macros should not be committed to the master branch,\n\
+ {}\n",
+ path.display(),
+ )
+ }
+}
+
+fn check_test_attrs(path: &Path, text: &str) {
+ let ignore_rule =
+ "https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#ignore";
+ let need_ignore: &[&str] = &[
+ // This file.
+ "slow-tests/tidy.rs",
+ // Special case to run `#[ignore]` tests.
+ "ide/src/runnables.rs",
+ // A legit test which needs to be ignored, as it takes too long to run
+ // :(
+ "hir-def/src/nameres/collector.rs",
+ // Long sourcegen test to generate lint completions.
+ "ide-db/src/tests/sourcegen_lints.rs",
+ // Obviously needs ignore.
+ "ide-assists/src/handlers/toggle_ignore.rs",
+ // See above.
+ "ide-assists/src/tests/generated.rs",
+ ];
+ if text.contains("#[ignore") && !need_ignore.iter().any(|p| path.ends_with(p)) {
+ panic!("\ndon't `#[ignore]` tests, see:\n\n {}\n\n {}\n", ignore_rule, path.display(),)
+ }
+
+ let panic_rule =
+ "https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#should_panic";
+ let need_panic: &[&str] = &[
+ // This file.
+ "slow-tests/tidy.rs",
+ "test-utils/src/fixture.rs",
+ ];
+ if text.contains("#[should_panic") && !need_panic.iter().any(|p| path.ends_with(p)) {
+ panic!(
+ "\ndon't add `#[should_panic]` tests, see:\n\n {}\n\n {}\n",
+ panic_rule,
+ path.display(),
+ )
+ }
+}
+
+fn check_trailing_ws(path: &Path, text: &str) {
+ if is_exclude_dir(path, &["test_data"]) {
+ return;
+ }
+ for (line_number, line) in text.lines().enumerate() {
+ if line.chars().last().map(char::is_whitespace) == Some(true) {
+ panic!("Trailing whitespace in {} at line {}", path.display(), line_number + 1)
+ }
+ }
+}
+
+#[derive(Default)]
+struct TidyDocs {
+ missing_docs: Vec<String>,
+ contains_fixme: Vec<PathBuf>,
+}
+
+impl TidyDocs {
+ fn visit(&mut self, path: &Path, text: &str) {
+ // Tests and diagnostic fixes don't need module level comments.
+ if is_exclude_dir(path, &["tests", "test_data", "fixes", "grammar"]) {
+ return;
+ }
+
+ if is_exclude_file(path) {
+ return;
+ }
+
+ let first_line = match text.lines().next() {
+ Some(it) => it,
+ None => return,
+ };
+
+ if first_line.starts_with("//!") {
+ if first_line.contains("FIXME") {
+ self.contains_fixme.push(path.to_path_buf());
+ }
+ } else {
+ if text.contains("// Feature:")
+ || text.contains("// Assist:")
+ || text.contains("// Diagnostic:")
+ {
+ return;
+ }
+ self.missing_docs.push(path.display().to_string());
+ }
+
+ fn is_exclude_file(d: &Path) -> bool {
+ let file_names = ["tests.rs", "famous_defs_fixture.rs"];
+
+ d.file_name()
+ .unwrap_or_default()
+ .to_str()
+ .map(|f_n| file_names.iter().any(|name| *name == f_n))
+ .unwrap_or(false)
+ }
+ }
+
+ fn finish(self) {
+ if !self.missing_docs.is_empty() {
+ panic!(
+ "\nMissing docs strings\n\n\
+ modules:\n{}\n\n",
+ self.missing_docs.join("\n")
+ )
+ }
+
+ for path in self.contains_fixme {
+ panic!("FIXME doc in a fully-documented crate: {}", path.display())
+ }
+ }
+}
+
+fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool {
+ p.strip_prefix(sourcegen::project_root())
+ .unwrap()
+ .components()
+ .rev()
+ .skip(1)
+ .filter_map(|it| it.as_os_str().to_str())
+ .any(|it| dirs_to_exclude.contains(&it))
+}
+
+#[derive(Default)]
+struct TidyMarks {
+ hits: HashSet<String>,
+ checks: HashSet<String>,
+}
+
+impl TidyMarks {
+ fn visit(&mut self, _path: &Path, text: &str) {
+ find_marks(&mut self.hits, text, "hit");
+ find_marks(&mut self.checks, text, "check");
+ find_marks(&mut self.checks, text, "check_count");
+ }
+
+ fn finish(self) {
+ assert!(!self.hits.is_empty());
+
+ let diff: Vec<_> =
+ self.hits.symmetric_difference(&self.checks).map(|it| it.as_str()).collect();
+
+ if !diff.is_empty() {
+ panic!("unpaired marks: {:?}", diff)
+ }
+ }
+}
+
+#[allow(deprecated)]
+fn stable_hash(text: &str) -> u64 {
+ use std::hash::{Hash, Hasher, SipHasher};
+
+ let text = text.replace('\r', "");
+ let mut hasher = SipHasher::default();
+ text.hash(&mut hasher);
+ hasher.finish()
+}
+
+fn find_marks(set: &mut HashSet<String>, text: &str, mark: &str) {
+ let mut text = text;
+ let mut prev_text = "";
+ while text != prev_text {
+ prev_text = text;
+ if let Some(idx) = text.find(mark) {
+ text = &text[idx + mark.len()..];
+ if let Some(stripped_text) = text.strip_prefix("!(") {
+ text = stripped_text.trim_start();
+ if let Some(idx2) = text.find(|c: char| !(c.is_alphanumeric() || c == '_')) {
+ let mark_text = &text[..idx2];
+ set.insert(mark_text.to_string());
+ text = &text[idx2..];
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml b/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml
new file mode 100644
index 000000000..a84110d94
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml
@@ -0,0 +1,16 @@
+[package]
+name = "sourcegen"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+xshell = "0.2.2"
+
+[features]
+in-rust-tree = []
diff --git a/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs b/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs
new file mode 100644
index 000000000..ce0224ec7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs
@@ -0,0 +1,203 @@
+//! rust-analyzer relies heavily on source code generation.
+//!
+//! Things like feature documentation or assist tests are implemented by
+//! processing rust-analyzer's own source code and generating the appropriate
+//! output. See `sourcegen_` tests in various crates.
+//!
+//! This crate contains utilities to make this kind of source-gen easy.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use std::{
+ fmt, fs, mem,
+ path::{Path, PathBuf},
+};
+
+use xshell::{cmd, Shell};
+
+pub fn list_rust_files(dir: &Path) -> Vec<PathBuf> {
+ let mut res = list_files(dir);
+ res.retain(|it| {
+ it.file_name().unwrap_or_default().to_str().unwrap_or_default().ends_with(".rs")
+ });
+ res
+}
+
+pub fn list_files(dir: &Path) -> Vec<PathBuf> {
+ let mut res = Vec::new();
+ let mut work = vec![dir.to_path_buf()];
+ while let Some(dir) = work.pop() {
+ for entry in dir.read_dir().unwrap() {
+ let entry = entry.unwrap();
+ let file_type = entry.file_type().unwrap();
+ let path = entry.path();
+ let is_hidden =
+ path.file_name().unwrap_or_default().to_str().unwrap_or_default().starts_with('.');
+ if !is_hidden {
+ if file_type.is_dir() {
+ work.push(path);
+ } else if file_type.is_file() {
+ res.push(path);
+ }
+ }
+ }
+ }
+ res
+}
+
+#[derive(Clone)]
+pub struct CommentBlock {
+ pub id: String,
+ pub line: usize,
+ pub contents: Vec<String>,
+ is_doc: bool,
+}
+
+impl CommentBlock {
+ pub fn extract(tag: &str, text: &str) -> Vec<CommentBlock> {
+ assert!(tag.starts_with(char::is_uppercase));
+
+ let tag = format!("{}:", tag);
+ // Would be nice if we had `.retain_mut` here!
+ CommentBlock::extract_untagged(text)
+ .into_iter()
+ .filter_map(|mut block| {
+ let first = block.contents.remove(0);
+ first.strip_prefix(&tag).map(|id| {
+ if block.is_doc {
+ panic!(
+ "Use plain (non-doc) comments with tags like {}:\n {}",
+ tag, first
+ );
+ }
+
+ block.id = id.trim().to_string();
+ block
+ })
+ })
+ .collect()
+ }
+
+ pub fn extract_untagged(text: &str) -> Vec<CommentBlock> {
+ let mut res = Vec::new();
+
+ let lines = text.lines().map(str::trim_start);
+
+ let dummy_block =
+ CommentBlock { id: String::new(), line: 0, contents: Vec::new(), is_doc: false };
+ let mut block = dummy_block.clone();
+ for (line_num, line) in lines.enumerate() {
+ match line.strip_prefix("//") {
+ Some(mut contents) => {
+ if let Some('/' | '!') = contents.chars().next() {
+ contents = &contents[1..];
+ block.is_doc = true;
+ }
+ if let Some(' ') = contents.chars().next() {
+ contents = &contents[1..];
+ }
+ block.contents.push(contents.to_string());
+ }
+ None => {
+ if !block.contents.is_empty() {
+ let block = mem::replace(&mut block, dummy_block.clone());
+ res.push(block);
+ }
+ block.line = line_num + 2;
+ }
+ }
+ }
+ if !block.contents.is_empty() {
+ res.push(block);
+ }
+ res
+ }
+}
+
+#[derive(Debug)]
+pub struct Location {
+ pub file: PathBuf,
+ pub line: usize,
+}
+
+impl fmt::Display for Location {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let path = self.file.strip_prefix(&project_root()).unwrap().display().to_string();
+ let path = path.replace('\\', "/");
+ let name = self.file.file_name().unwrap();
+ write!(
+ f,
+ "https://github.com/rust-lang/rust-analyzer/blob/master/{}#L{}[{}]",
+ path,
+ self.line,
+ name.to_str().unwrap()
+ )
+ }
+}
+
+fn ensure_rustfmt(sh: &Shell) {
+ let version = cmd!(sh, "rustfmt --version").read().unwrap_or_default();
+ if !version.contains("stable") {
+ panic!(
+ "Failed to run rustfmt from toolchain 'stable'. \
+ Please run `rustup component add rustfmt --toolchain stable` to install it.",
+ );
+ }
+}
+
+pub fn reformat(text: String) -> String {
+ let sh = Shell::new().unwrap();
+ sh.set_var("RUSTUP_TOOLCHAIN", "stable");
+ ensure_rustfmt(&sh);
+ let rustfmt_toml = project_root().join("rustfmt.toml");
+ let mut stdout = cmd!(sh, "rustfmt --config-path {rustfmt_toml} --config fn_single_line=true")
+ .stdin(text)
+ .read()
+ .unwrap();
+ if !stdout.ends_with('\n') {
+ stdout.push('\n');
+ }
+ stdout
+}
+
+pub fn add_preamble(generator: &'static str, mut text: String) -> String {
+ let preamble = format!("//! Generated by `{}`, do not edit by hand.\n\n", generator);
+ text.insert_str(0, &preamble);
+ text
+}
+
+/// Checks that the `file` has the specified `contents`. If that is not the
+/// case, updates the file and then fails the test.
+pub fn ensure_file_contents(file: &Path, contents: &str) {
+ if let Ok(old_contents) = fs::read_to_string(file) {
+ if normalize_newlines(&old_contents) == normalize_newlines(contents) {
+ // File is already up to date.
+ return;
+ }
+ }
+
+ let display_path = file.strip_prefix(&project_root()).unwrap_or(file);
+ eprintln!(
+ "\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n",
+ display_path.display()
+ );
+ if std::env::var("CI").is_ok() {
+ eprintln!(" NOTE: run `cargo test` locally and commit the updated files\n");
+ }
+ if let Some(parent) = file.parent() {
+ let _ = fs::create_dir_all(parent);
+ }
+ fs::write(file, contents).unwrap();
+ panic!("some file was not up to date and has been updated, simply re-run the tests");
+}
+
+fn normalize_newlines(s: &str) -> String {
+ s.replace("\r\n", "\n")
+}
+
+pub fn project_root() -> PathBuf {
+ let dir = env!("CARGO_MANIFEST_DIR");
+ let res = PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned();
+ assert!(res.join("triagebot.toml").exists());
+ res
+}
diff --git a/src/tools/rust-analyzer/crates/stdx/Cargo.toml b/src/tools/rust-analyzer/crates/stdx/Cargo.toml
new file mode 100644
index 000000000..092b99ae5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/stdx/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "stdx"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+libc = "0.2.126"
+backtrace = { version = "0.3.65", optional = true }
+always-assert = { version = "0.1.2", features = ["log"] }
+# Think twice before adding anything here
+
+[target.'cfg(windows)'.dependencies]
+miow = "0.4.0"
+winapi = { version = "0.3.9", features = ["winerror"] }
+
+[features]
+# Uncomment to enable for the whole crate graph
+# default = [ "backtrace" ]
diff --git a/src/tools/rust-analyzer/crates/stdx/src/lib.rs b/src/tools/rust-analyzer/crates/stdx/src/lib.rs
new file mode 100644
index 000000000..b4d45206c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/stdx/src/lib.rs
@@ -0,0 +1,247 @@
+//! Missing batteries for standard libraries.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use std::process::Command;
+use std::{cmp::Ordering, ops, time::Instant};
+use std::{io as sio, iter};
+
+mod macros;
+pub mod process;
+pub mod panic_context;
+pub mod non_empty_vec;
+
+pub use always_assert::{always, never};
+
+#[inline(always)]
+pub fn is_ci() -> bool {
+ option_env!("CI").is_some()
+}
+
+#[must_use]
+pub fn timeit(label: &'static str) -> impl Drop {
+ let start = Instant::now();
+ defer(move || eprintln!("{}: {:.2?}", label, start.elapsed()))
+}
+
+/// Prints backtrace to stderr, useful for debugging.
+pub fn print_backtrace() {
+ #[cfg(feature = "backtrace")]
+ eprintln!("{:?}", backtrace::Backtrace::new());
+
+ #[cfg(not(feature = "backtrace"))]
+ eprintln!(
+ r#"Enable the backtrace feature.
+Uncomment `default = [ "backtrace" ]` in `crates/stdx/Cargo.toml`.
+"#
+ );
+}
+
+pub fn to_lower_snake_case(s: &str) -> String {
+ to_snake_case(s, char::to_ascii_lowercase)
+}
+pub fn to_upper_snake_case(s: &str) -> String {
+ to_snake_case(s, char::to_ascii_uppercase)
+}
+
+// Code partially taken from rust/compiler/rustc_lint/src/nonstandard_style.rs
+// commit: 9626f2b
+fn to_snake_case<F: Fn(&char) -> char>(mut s: &str, change_case: F) -> String {
+ let mut words = vec![];
+
+ // Preserve leading underscores
+ s = s.trim_start_matches(|c: char| {
+ if c == '_' {
+ words.push(String::new());
+ true
+ } else {
+ false
+ }
+ });
+
+ for s in s.split('_') {
+ let mut last_upper = false;
+ let mut buf = String::new();
+
+ if s.is_empty() {
+ continue;
+ }
+
+ for ch in s.chars() {
+ if !buf.is_empty() && buf != "'" && ch.is_uppercase() && !last_upper {
+ words.push(buf);
+ buf = String::new();
+ }
+
+ last_upper = ch.is_uppercase();
+ buf.extend(iter::once(change_case(&ch)));
+ }
+
+ words.push(buf);
+ }
+
+ words.join("_")
+}
+
+pub fn replace(buf: &mut String, from: char, to: &str) {
+ if !buf.contains(from) {
+ return;
+ }
+ // FIXME: do this in place.
+ *buf = buf.replace(from, to);
+}
+
+pub fn trim_indent(mut text: &str) -> String {
+ if text.starts_with('\n') {
+ text = &text[1..];
+ }
+ let indent = text
+ .lines()
+ .filter(|it| !it.trim().is_empty())
+ .map(|it| it.len() - it.trim_start().len())
+ .min()
+ .unwrap_or(0);
+ text.split_inclusive('\n')
+ .map(
+ |line| {
+ if line.len() <= indent {
+ line.trim_start_matches(' ')
+ } else {
+ &line[indent..]
+ }
+ },
+ )
+ .collect()
+}
+
+pub fn equal_range_by<T, F>(slice: &[T], mut key: F) -> ops::Range<usize>
+where
+ F: FnMut(&T) -> Ordering,
+{
+ let start = slice.partition_point(|it| key(it) == Ordering::Less);
+ let len = slice[start..].partition_point(|it| key(it) == Ordering::Equal);
+ start..start + len
+}
+
+#[must_use]
+pub fn defer<F: FnOnce()>(f: F) -> impl Drop {
+ struct D<F: FnOnce()>(Option<F>);
+ impl<F: FnOnce()> Drop for D<F> {
+ fn drop(&mut self) {
+ if let Some(f) = self.0.take() {
+ f();
+ }
+ }
+ }
+ D(Some(f))
+}
+
+/// A [`std::process::Child`] wrapper that will kill the child on drop.
+#[cfg_attr(not(target_arch = "wasm32"), repr(transparent))]
+#[derive(Debug)]
+pub struct JodChild(pub std::process::Child);
+
+impl ops::Deref for JodChild {
+ type Target = std::process::Child;
+ fn deref(&self) -> &std::process::Child {
+ &self.0
+ }
+}
+
+impl ops::DerefMut for JodChild {
+ fn deref_mut(&mut self) -> &mut std::process::Child {
+ &mut self.0
+ }
+}
+
+impl Drop for JodChild {
+ fn drop(&mut self) {
+ let _ = self.0.kill();
+ let _ = self.0.wait();
+ }
+}
+
+impl JodChild {
+ pub fn spawn(mut command: Command) -> sio::Result<Self> {
+ command.spawn().map(Self)
+ }
+
+ pub fn into_inner(self) -> std::process::Child {
+ if cfg!(target_arch = "wasm32") {
+ panic!("no processes on wasm");
+ }
+ // SAFETY: repr transparent, except on WASM
+ unsafe { std::mem::transmute::<JodChild, std::process::Child>(self) }
+ }
+}
+
+// feature: iter_order_by
+// Iterator::eq_by
+pub fn iter_eq_by<I, I2, F>(this: I2, other: I, mut eq: F) -> bool
+where
+ I: IntoIterator,
+ I2: IntoIterator,
+ F: FnMut(I2::Item, I::Item) -> bool,
+{
+ let mut other = other.into_iter();
+ let mut this = this.into_iter();
+
+ loop {
+ let x = match this.next() {
+ None => return other.next().is_none(),
+ Some(val) => val,
+ };
+
+ let y = match other.next() {
+ None => return false,
+ Some(val) => val,
+ };
+
+ if !eq(x, y) {
+ return false;
+ }
+ }
+}
+
+/// Returns all final segments of the argument, longest first.
+pub fn slice_tails<T>(this: &[T]) -> impl Iterator<Item = &[T]> {
+ (0..this.len()).map(|i| &this[i..])
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_trim_indent() {
+ assert_eq!(trim_indent(""), "");
+ assert_eq!(
+ trim_indent(
+ "
+ hello
+ world
+"
+ ),
+ "hello\nworld\n"
+ );
+ assert_eq!(
+ trim_indent(
+ "
+ hello
+ world"
+ ),
+ "hello\nworld"
+ );
+ assert_eq!(trim_indent(" hello\n world\n"), "hello\nworld\n");
+ assert_eq!(
+ trim_indent(
+ "
+ fn main() {
+ return 92;
+ }
+ "
+ ),
+ "fn main() {\n return 92;\n}\n"
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/stdx/src/macros.rs b/src/tools/rust-analyzer/crates/stdx/src/macros.rs
new file mode 100644
index 000000000..d91fc690c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/stdx/src/macros.rs
@@ -0,0 +1,47 @@
+//! Convenience macros.
+
+#[macro_export]
+macro_rules! eprintln {
+ ($($tt:tt)*) => {{
+ if $crate::is_ci() {
+ panic!("Forgot to remove debug-print?")
+ }
+ std::eprintln!($($tt)*)
+ }}
+}
+
+/// Appends formatted string to a `String`.
+#[macro_export]
+macro_rules! format_to {
+ ($buf:expr) => ();
+ ($buf:expr, $lit:literal $($arg:tt)*) => {
+ { use ::std::fmt::Write as _; let _ = ::std::write!($buf, $lit $($arg)*); }
+ };
+}
+
+/// Generates `From` impls for `Enum E { Foo(Foo), Bar(Bar) }` enums
+///
+/// # Example
+///
+/// ```rust
+/// impl_from!(Struct, Union, Enum for Adt);
+/// ```
+#[macro_export]
+macro_rules! impl_from {
+ ($($variant:ident $(($($sub_variant:ident),*))?),* for $enum:ident) => {
+ $(
+ impl From<$variant> for $enum {
+ fn from(it: $variant) -> $enum {
+ $enum::$variant(it)
+ }
+ }
+ $($(
+ impl From<$sub_variant> for $enum {
+ fn from(it: $sub_variant) -> $enum {
+ $enum::$variant($variant::$sub_variant(it))
+ }
+ }
+ )*)?
+ )*
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/stdx/src/non_empty_vec.rs b/src/tools/rust-analyzer/crates/stdx/src/non_empty_vec.rs
new file mode 100644
index 000000000..342194c78
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/stdx/src/non_empty_vec.rs
@@ -0,0 +1,39 @@
+//! See [`NonEmptyVec`].
+
+/// A [`Vec`] that is guaranteed to at least contain one element.
+pub struct NonEmptyVec<T> {
+ first: T,
+ rest: Vec<T>,
+}
+
+impl<T> NonEmptyVec<T> {
+ #[inline]
+ pub fn new(first: T) -> Self {
+ NonEmptyVec { first, rest: Vec::new() }
+ }
+
+ #[inline]
+ pub fn last_mut(&mut self) -> &mut T {
+ self.rest.last_mut().unwrap_or(&mut self.first)
+ }
+
+ #[inline]
+ pub fn pop(&mut self) -> Option<T> {
+ self.rest.pop()
+ }
+
+ #[inline]
+ pub fn push(&mut self, value: T) {
+ self.rest.push(value)
+ }
+
+ #[inline]
+ pub fn len(&self) -> usize {
+ 1 + self.rest.len()
+ }
+
+ #[inline]
+ pub fn into_last(mut self) -> T {
+ self.rest.pop().unwrap_or(self.first)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/stdx/src/panic_context.rs b/src/tools/rust-analyzer/crates/stdx/src/panic_context.rs
new file mode 100644
index 000000000..f8fafc5a6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/stdx/src/panic_context.rs
@@ -0,0 +1,49 @@
+//! A micro-crate to enhance panic messages with context info.
+//!
+//! FIXME: upstream to <https://github.com/kriomant/panic-context> ?
+
+use std::{cell::RefCell, panic, sync::Once};
+
+pub fn enter(context: String) -> PanicContext {
+ static ONCE: Once = Once::new();
+ ONCE.call_once(PanicContext::init);
+
+ with_ctx(|ctx| ctx.push(context));
+ PanicContext { _priv: () }
+}
+
+#[must_use]
+pub struct PanicContext {
+ _priv: (),
+}
+
+impl PanicContext {
+ fn init() {
+ let default_hook = panic::take_hook();
+ let hook = move |panic_info: &panic::PanicInfo<'_>| {
+ with_ctx(|ctx| {
+ if !ctx.is_empty() {
+ eprintln!("Panic context:");
+ for frame in ctx.iter() {
+ eprintln!("> {}\n", frame);
+ }
+ }
+ default_hook(panic_info);
+ });
+ };
+ panic::set_hook(Box::new(hook));
+ }
+}
+
+impl Drop for PanicContext {
+ fn drop(&mut self) {
+ with_ctx(|ctx| assert!(ctx.pop().is_some()));
+ }
+}
+
+fn with_ctx(f: impl FnOnce(&mut Vec<String>)) {
+ thread_local! {
+ static CTX: RefCell<Vec<String>> = RefCell::new(Vec::new());
+ }
+ CTX.with(|ctx| f(&mut *ctx.borrow_mut()));
+}
diff --git a/src/tools/rust-analyzer/crates/stdx/src/process.rs b/src/tools/rust-analyzer/crates/stdx/src/process.rs
new file mode 100644
index 000000000..e5aa34365
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/stdx/src/process.rs
@@ -0,0 +1,267 @@
+//! Read both stdout and stderr of child without deadlocks.
+//!
+//! <https://github.com/rust-lang/cargo/blob/905af549966f23a9288e9993a85d1249a5436556/crates/cargo-util/src/read2.rs>
+//! <https://github.com/rust-lang/cargo/blob/58a961314437258065e23cb6316dfc121d96fb71/crates/cargo-util/src/process_builder.rs#L231>
+
+use std::{
+ io,
+ process::{ChildStderr, ChildStdout, Command, Output, Stdio},
+};
+
+use crate::JodChild;
+
+pub fn streaming_output(
+ out: ChildStdout,
+ err: ChildStderr,
+ on_stdout_line: &mut dyn FnMut(&str),
+ on_stderr_line: &mut dyn FnMut(&str),
+) -> io::Result<(Vec<u8>, Vec<u8>)> {
+ let mut stdout = Vec::new();
+ let mut stderr = Vec::new();
+
+ imp::read2(out, err, &mut |is_out, data, eof| {
+ let idx = if eof {
+ data.len()
+ } else {
+ match data.iter().rposition(|b| *b == b'\n') {
+ Some(i) => i + 1,
+ None => return,
+ }
+ };
+ {
+ // scope for new_lines
+ let new_lines = {
+ let dst = if is_out { &mut stdout } else { &mut stderr };
+ let start = dst.len();
+ let data = data.drain(..idx);
+ dst.extend(data);
+ &dst[start..]
+ };
+ for line in String::from_utf8_lossy(new_lines).lines() {
+ if is_out {
+ on_stdout_line(line);
+ } else {
+ on_stderr_line(line);
+ }
+ }
+ }
+ })?;
+
+ Ok((stdout, stderr))
+}
+
+pub fn spawn_with_streaming_output(
+ mut cmd: Command,
+ on_stdout_line: &mut dyn FnMut(&str),
+ on_stderr_line: &mut dyn FnMut(&str),
+) -> io::Result<Output> {
+ let cmd = cmd.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null());
+
+ let mut child = JodChild(cmd.spawn()?);
+ let (stdout, stderr) = streaming_output(
+ child.stdout.take().unwrap(),
+ child.stderr.take().unwrap(),
+ on_stdout_line,
+ on_stderr_line,
+ )?;
+ let status = child.wait()?;
+ Ok(Output { status, stdout, stderr })
+}
+
+#[cfg(unix)]
+mod imp {
+ use std::{
+ io::{self, prelude::*},
+ mem,
+ os::unix::prelude::*,
+ process::{ChildStderr, ChildStdout},
+ };
+
+ pub(crate) fn read2(
+ mut out_pipe: ChildStdout,
+ mut err_pipe: ChildStderr,
+ data: &mut dyn FnMut(bool, &mut Vec<u8>, bool),
+ ) -> io::Result<()> {
+ unsafe {
+ libc::fcntl(out_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK);
+ libc::fcntl(err_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK);
+ }
+
+ let mut out_done = false;
+ let mut err_done = false;
+ let mut out = Vec::new();
+ let mut err = Vec::new();
+
+ let mut fds: [libc::pollfd; 2] = unsafe { mem::zeroed() };
+ fds[0].fd = out_pipe.as_raw_fd();
+ fds[0].events = libc::POLLIN;
+ fds[1].fd = err_pipe.as_raw_fd();
+ fds[1].events = libc::POLLIN;
+ let mut nfds = 2;
+ let mut errfd = 1;
+
+ while nfds > 0 {
+ // wait for either pipe to become readable using `select`
+ let r = unsafe { libc::poll(fds.as_mut_ptr(), nfds, -1) };
+ if r == -1 {
+ let err = io::Error::last_os_error();
+ if err.kind() == io::ErrorKind::Interrupted {
+ continue;
+ }
+ return Err(err);
+ }
+
+ // Read as much as we can from each pipe, ignoring EWOULDBLOCK or
+ // EAGAIN. If we hit EOF, then this will happen because the underlying
+ // reader will return Ok(0), in which case we'll see `Ok` ourselves. In
+ // this case we flip the other fd back into blocking mode and read
+ // whatever's leftover on that file descriptor.
+ let handle = |res: io::Result<_>| match res {
+ Ok(_) => Ok(true),
+ Err(e) => {
+ if e.kind() == io::ErrorKind::WouldBlock {
+ Ok(false)
+ } else {
+ Err(e)
+ }
+ }
+ };
+ if !err_done && fds[errfd].revents != 0 && handle(err_pipe.read_to_end(&mut err))? {
+ err_done = true;
+ nfds -= 1;
+ }
+ data(false, &mut err, err_done);
+ if !out_done && fds[0].revents != 0 && handle(out_pipe.read_to_end(&mut out))? {
+ out_done = true;
+ fds[0].fd = err_pipe.as_raw_fd();
+ errfd = 0;
+ nfds -= 1;
+ }
+ data(true, &mut out, out_done);
+ }
+ Ok(())
+ }
+}
+
+#[cfg(windows)]
+mod imp {
+ use std::{
+ io,
+ os::windows::prelude::*,
+ process::{ChildStderr, ChildStdout},
+ slice,
+ };
+
+ use miow::{
+ iocp::{CompletionPort, CompletionStatus},
+ pipe::NamedPipe,
+ Overlapped,
+ };
+ use winapi::shared::winerror::ERROR_BROKEN_PIPE;
+
+ struct Pipe<'a> {
+ dst: &'a mut Vec<u8>,
+ overlapped: Overlapped,
+ pipe: NamedPipe,
+ done: bool,
+ }
+
+ pub(crate) fn read2(
+ out_pipe: ChildStdout,
+ err_pipe: ChildStderr,
+ data: &mut dyn FnMut(bool, &mut Vec<u8>, bool),
+ ) -> io::Result<()> {
+ let mut out = Vec::new();
+ let mut err = Vec::new();
+
+ let port = CompletionPort::new(1)?;
+ port.add_handle(0, &out_pipe)?;
+ port.add_handle(1, &err_pipe)?;
+
+ unsafe {
+ let mut out_pipe = Pipe::new(out_pipe, &mut out);
+ let mut err_pipe = Pipe::new(err_pipe, &mut err);
+
+ out_pipe.read()?;
+ err_pipe.read()?;
+
+ let mut status = [CompletionStatus::zero(), CompletionStatus::zero()];
+
+ while !out_pipe.done || !err_pipe.done {
+ for status in port.get_many(&mut status, None)? {
+ if status.token() == 0 {
+ out_pipe.complete(status);
+ data(true, out_pipe.dst, out_pipe.done);
+ out_pipe.read()?;
+ } else {
+ err_pipe.complete(status);
+ data(false, err_pipe.dst, err_pipe.done);
+ err_pipe.read()?;
+ }
+ }
+ }
+
+ Ok(())
+ }
+ }
+
+ impl<'a> Pipe<'a> {
+ unsafe fn new<P: IntoRawHandle>(p: P, dst: &'a mut Vec<u8>) -> Pipe<'a> {
+ Pipe {
+ dst,
+ pipe: NamedPipe::from_raw_handle(p.into_raw_handle()),
+ overlapped: Overlapped::zero(),
+ done: false,
+ }
+ }
+
+ unsafe fn read(&mut self) -> io::Result<()> {
+ let dst = slice_to_end(self.dst);
+ match self.pipe.read_overlapped(dst, self.overlapped.raw()) {
+ Ok(_) => Ok(()),
+ Err(e) => {
+ if e.raw_os_error() == Some(ERROR_BROKEN_PIPE as i32) {
+ self.done = true;
+ Ok(())
+ } else {
+ Err(e)
+ }
+ }
+ }
+ }
+
+ unsafe fn complete(&mut self, status: &CompletionStatus) {
+ let prev = self.dst.len();
+ self.dst.set_len(prev + status.bytes_transferred() as usize);
+ if status.bytes_transferred() == 0 {
+ self.done = true;
+ }
+ }
+ }
+
+ unsafe fn slice_to_end(v: &mut Vec<u8>) -> &mut [u8] {
+ if v.capacity() == 0 {
+ v.reserve(16);
+ }
+ if v.capacity() == v.len() {
+ v.reserve(1);
+ }
+ slice::from_raw_parts_mut(v.as_mut_ptr().add(v.len()), v.capacity() - v.len())
+ }
+}
+
+#[cfg(target_arch = "wasm32")]
+mod imp {
+ use std::{
+ io,
+ process::{ChildStderr, ChildStdout},
+ };
+
+ pub(crate) fn read2(
+ _out_pipe: ChildStdout,
+ _err_pipe: ChildStderr,
+ _data: &mut dyn FnMut(bool, &mut Vec<u8>, bool),
+ ) -> io::Result<()> {
+ panic!("no processes on wasm")
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/Cargo.toml
new file mode 100644
index 000000000..0e2dec386
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/Cargo.toml
@@ -0,0 +1,39 @@
+[package]
+name = "syntax"
+version = "0.0.0"
+description = "Comment and whitespace preserving parser for the Rust language"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/rust-analyzer"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+itertools = "0.10.3"
+rowan = "0.15.8"
+rustc_lexer = { version = "725.0.0", package = "rustc-ap-rustc_lexer" }
+rustc-hash = "1.1.0"
+once_cell = "1.12.0"
+indexmap = "1.9.1"
+smol_str = "0.1.23"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+text-edit = { path = "../text-edit", version = "0.0.0" }
+parser = { path = "../parser", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+
+[dev-dependencies]
+rayon = "1.5.3"
+expect-test = "1.4.0"
+proc-macro2 = "1.0.39"
+quote = "1.0.20"
+ungrammar = "1.16.1"
+
+test-utils = { path = "../test-utils" }
+sourcegen = { path = "../sourcegen" }
+
+[features]
+in-rust-tree = []
diff --git a/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml
new file mode 100644
index 000000000..ba2f515b0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml
@@ -0,0 +1,27 @@
+
+[package]
+name = "syntax-fuzz"
+version = "0.0.1"
+publish = false
+edition = "2021"
+rust-version = "1.57"
+
+[package.metadata]
+cargo-fuzz = true
+
+[dependencies]
+syntax = { path = "..", version = "0.0.0" }
+text_edit = { path = "../../text_edit", version = "0.0.0" }
+libfuzzer-sys = { git = "https://github.com/rust-fuzz/libfuzzer-sys.git" }
+
+# Prevent this from interfering with workspaces
+[workspace]
+members = ["."]
+
+[[bin]]
+name = "parser"
+path = "fuzz_targets/parser.rs"
+
+[[bin]]
+name = "reparse"
+path = "fuzz_targets/reparse.rs"
diff --git a/src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/parser.rs b/src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/parser.rs
new file mode 100644
index 000000000..f80e13002
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/parser.rs
@@ -0,0 +1,11 @@
+//! Fuzzing for from-scratch parsing.
+
+#![no_main]
+use libfuzzer_sys::fuzz_target;
+use syntax::fuzz::check_parser;
+
+fuzz_target!(|data: &[u8]| {
+ if let Ok(text) = std::str::from_utf8(data) {
+ check_parser(text)
+ }
+});
diff --git a/src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/reparse.rs b/src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/reparse.rs
new file mode 100644
index 000000000..f865ce8d6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/reparse.rs
@@ -0,0 +1,11 @@
+//! Fuzzing for incremental parsing.
+
+#![no_main]
+use libfuzzer_sys::fuzz_target;
+use syntax::fuzz::CheckReparse;
+
+fuzz_target!(|data: &[u8]| {
+ if let Some(check) = CheckReparse::from_data(data) {
+ check.run();
+ }
+});
diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram
new file mode 100644
index 000000000..62aa47839
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram
@@ -0,0 +1,667 @@
+// Rust Un-Grammar.
+//
+// This grammar specifies the structure of Rust's concrete syntax tree.
+// It does not specify parsing rules (ambiguities, precedence, etc are out of scope).
+// Tokens are processed -- contextual keywords are recognised, compound operators glued.
+//
+// Legend:
+//
+// // -- comment
+// Name = -- non-terminal definition
+// 'ident' -- token (terminal)
+// A B -- sequence
+// A | B -- alternation
+// A* -- zero or more repetition
+// A? -- zero or one repetition
+// (A) -- same as A
+// label:A -- suggested name for field of AST node
+
+//*************************//
+// Names, Paths and Macros //
+//*************************//
+
+Name =
+ 'ident' | 'self'
+
+NameRef =
+ 'ident' | 'int_number' | 'self' | 'super' | 'crate' | 'Self'
+
+Lifetime =
+ 'lifetime_ident'
+
+Path =
+ (qualifier:Path '::')? segment:PathSegment
+
+PathSegment =
+ '::'? NameRef
+| NameRef GenericArgList?
+| NameRef ParamList RetType?
+| '<' PathType ('as' PathType)? '>'
+
+GenericArgList =
+ '::'? '<' (GenericArg (',' GenericArg)* ','?)? '>'
+
+GenericArg =
+ TypeArg
+| AssocTypeArg
+| LifetimeArg
+| ConstArg
+
+TypeArg =
+ Type
+
+AssocTypeArg =
+ NameRef GenericParamList? (':' TypeBoundList | ('=' Type | ConstArg))
+
+LifetimeArg =
+ Lifetime
+
+ConstArg =
+ Expr
+
+MacroCall =
+ Attr* Path '!' TokenTree ';'?
+
+TokenTree =
+ '(' ')'
+| '{' '}'
+| '[' ']'
+
+MacroItems =
+ Item*
+
+MacroStmts =
+ statements:Stmt*
+ Expr?
+
+//*************************//
+// Items //
+//*************************//
+
+SourceFile =
+ 'shebang'?
+ Attr*
+ Item*
+
+Item =
+ Const
+| Enum
+| ExternBlock
+| ExternCrate
+| Fn
+| Impl
+| MacroCall
+| MacroRules
+| MacroDef
+| Module
+| Static
+| Struct
+| Trait
+| TypeAlias
+| Union
+| Use
+
+MacroRules =
+ Attr* Visibility?
+ 'macro_rules' '!' Name
+ TokenTree
+
+MacroDef =
+ Attr* Visibility?
+ 'macro' Name args:TokenTree?
+ body:TokenTree
+
+Module =
+ Attr* Visibility?
+ 'mod' Name
+ (ItemList | ';')
+
+ItemList =
+ '{' Attr* Item* '}'
+
+ExternCrate =
+ Attr* Visibility?
+ 'extern' 'crate' NameRef Rename? ';'
+
+Rename =
+ 'as' (Name | '_')
+
+Use =
+ Attr* Visibility?
+ 'use' UseTree ';'
+
+UseTree =
+ (Path? '::')? ('*' | UseTreeList)
+| Path Rename?
+
+UseTreeList =
+ '{' (UseTree (',' UseTree)* ','?)? '}'
+
+Fn =
+ Attr* Visibility?
+ 'default'? 'const'? 'async'? 'unsafe'? Abi?
+ 'fn' Name GenericParamList? ParamList RetType? WhereClause?
+ (body:BlockExpr | ';')
+
+Abi =
+ 'extern' 'string'?
+
+ParamList =
+ '('(
+ SelfParam
+ | (SelfParam ',')? (Param (',' Param)* ','?)?
+ )')'
+| '|' (Param (',' Param)* ','?)? '|'
+
+SelfParam =
+ Attr* (
+ ('&' Lifetime?)? 'mut'? Name
+ | 'mut'? Name ':' Type
+ )
+
+Param =
+ Attr* (
+ Pat (':' Type)?
+ | Type
+ | '...'
+ )
+
+RetType =
+ '->' Type
+
+TypeAlias =
+ Attr* Visibility?
+ 'default'?
+ 'type' Name GenericParamList? (':' TypeBoundList?)? WhereClause?
+ ('=' Type)? ';'
+
+Struct =
+ Attr* Visibility?
+ 'struct' Name GenericParamList? (
+ WhereClause? (RecordFieldList | ';')
+ | TupleFieldList WhereClause? ';'
+ )
+
+RecordFieldList =
+ '{' fields:(RecordField (',' RecordField)* ','?)? '}'
+
+RecordField =
+ Attr* Visibility?
+ Name ':' Type
+
+TupleFieldList =
+ '(' fields:(TupleField (',' TupleField)* ','?)? ')'
+
+TupleField =
+ Attr* Visibility?
+ Type
+
+FieldList =
+ RecordFieldList
+| TupleFieldList
+
+Enum =
+ Attr* Visibility?
+ 'enum' Name GenericParamList? WhereClause?
+ VariantList
+
+VariantList =
+ '{' (Variant (',' Variant)* ','?)? '}'
+
+Variant =
+ Attr* Visibility?
+ Name FieldList? ('=' Expr)?
+
+Union =
+ Attr* Visibility?
+ 'union' Name GenericParamList? WhereClause?
+ RecordFieldList
+
+// A Data Type.
+//
+// Not used directly in the grammar, but handy to have anyway.
+Adt =
+ Enum
+| Struct
+| Union
+
+Const =
+ Attr* Visibility?
+ 'default'?
+ 'const' (Name | '_') ':' Type
+ ('=' body:Expr)? ';'
+
+Static =
+ Attr* Visibility?
+ 'static' 'mut'? Name ':' Type
+ ('=' body:Expr)? ';'
+
+Trait =
+ Attr* Visibility?
+ 'unsafe'? 'auto'?
+ 'trait' Name GenericParamList? (':' TypeBoundList?)? WhereClause?
+ AssocItemList
+
+AssocItemList =
+ '{' Attr* AssocItem* '}'
+
+AssocItem =
+ Const
+| Fn
+| MacroCall
+| TypeAlias
+
+Impl =
+ Attr* Visibility?
+ 'default'? 'unsafe'?
+ 'impl' GenericParamList? ('const'? '!'? trait:Type 'for')? self_ty:Type WhereClause?
+ AssocItemList
+
+ExternBlock =
+ Attr* 'unsafe'? Abi ExternItemList
+
+ExternItemList =
+ '{' Attr* ExternItem* '}'
+
+ExternItem =
+ Fn
+| MacroCall
+| Static
+| TypeAlias
+
+GenericParamList =
+ '<' (GenericParam (',' GenericParam)* ','?)? '>'
+
+GenericParam =
+ ConstParam
+| LifetimeParam
+| TypeParam
+
+TypeParam =
+ Attr* Name (':' TypeBoundList?)?
+ ('=' default_type:Type)?
+
+ConstParam =
+ Attr* 'const' Name ':' Type
+ ('=' default_val:Expr)?
+
+LifetimeParam =
+ Attr* Lifetime (':' TypeBoundList?)?
+
+WhereClause =
+ 'where' predicates:(WherePred (',' WherePred)* ','?)
+
+WherePred =
+ ('for' GenericParamList)? (Lifetime | Type) ':' TypeBoundList?
+
+Visibility =
+ 'pub' ('(' 'in'? Path ')')?
+
+Attr =
+ '#' '!'? '[' Meta ']'
+
+Meta =
+ Path ('=' Expr | TokenTree)?
+
+//****************************//
+// Statements and Expressions //
+//****************************//
+
+Stmt =
+ ';'
+| ExprStmt
+| Item
+| LetStmt
+
+LetStmt =
+ Attr* 'let' Pat (':' Type)?
+ '=' initializer:Expr
+ LetElse?
+ ';'
+
+LetElse =
+ 'else' BlockExpr
+
+ExprStmt =
+ Expr ';'?
+
+Expr =
+ ArrayExpr
+| AwaitExpr
+| BinExpr
+| BlockExpr
+| BoxExpr
+| BreakExpr
+| CallExpr
+| CastExpr
+| ClosureExpr
+| ContinueExpr
+| FieldExpr
+| ForExpr
+| IfExpr
+| IndexExpr
+| Literal
+| LoopExpr
+| MacroExpr
+| MacroStmts
+| MatchExpr
+| MethodCallExpr
+| ParenExpr
+| PathExpr
+| PrefixExpr
+| RangeExpr
+| RecordExpr
+| RefExpr
+| ReturnExpr
+| TryExpr
+| TupleExpr
+| WhileExpr
+| YieldExpr
+| LetExpr
+| UnderscoreExpr
+
+MacroExpr =
+ MacroCall
+
+Literal =
+ Attr* value:(
+ 'int_number' | 'float_number'
+ | 'string' | 'raw_string'
+ | 'byte_string' | 'raw_byte_string'
+ | 'true' | 'false'
+ | 'char' | 'byte'
+ )
+
+PathExpr =
+ Attr* Path
+
+StmtList =
+ '{'
+ Attr*
+ statements:Stmt*
+ tail_expr:Expr?
+ '}'
+
+RefExpr =
+ Attr* '&' ('raw' | 'mut' | 'const') Expr
+
+TryExpr =
+ Attr* Expr '?'
+
+BlockExpr =
+ Attr* Label? ('try' | 'unsafe' | 'async' | 'const') StmtList
+
+PrefixExpr =
+ Attr* op:('-' | '!' | '*') Expr
+
+BinExpr =
+ Attr*
+ lhs:Expr
+ op:(
+ '||' | '&&'
+ | '==' | '!=' | '<=' | '>=' | '<' | '>'
+ | '+' | '*' | '-' | '/' | '%' | '<<' | '>>' | '^' | '|' | '&'
+ | '=' | '+=' | '/=' | '*=' | '%=' | '>>=' | '<<=' | '-=' | '|=' | '&=' | '^='
+ )
+ rhs:Expr
+
+CastExpr =
+ Attr* Expr 'as' Type
+
+ParenExpr =
+ Attr* '(' Attr* Expr ')'
+
+ArrayExpr =
+ Attr* '[' Attr* (
+ (Expr (',' Expr)* ','?)?
+ | Expr ';' Expr
+ ) ']'
+
+IndexExpr =
+ Attr* base:Expr '[' index:Expr ']'
+
+TupleExpr =
+ Attr* '(' Attr* fields:(Expr (',' Expr)* ','?)? ')'
+
+RecordExpr =
+ Path RecordExprFieldList
+
+RecordExprFieldList =
+ '{'
+ Attr*
+ fields:(RecordExprField (',' RecordExprField)* ','?)?
+ ('..' spread:Expr?)?
+ '}'
+
+RecordExprField =
+ Attr* (NameRef ':')? Expr
+
+CallExpr =
+ Attr* Expr ArgList
+
+ArgList =
+ '(' args:(Expr (',' Expr)* ','?)? ')'
+
+MethodCallExpr =
+ Attr* receiver:Expr '.' NameRef GenericArgList? ArgList
+
+FieldExpr =
+ Attr* Expr '.' NameRef
+
+ClosureExpr =
+ Attr* ('for' GenericParamList)? 'static'? 'async'? 'move'? ParamList RetType?
+ body:Expr
+
+IfExpr =
+ Attr* 'if' condition:Expr then_branch:BlockExpr
+ ('else' else_branch:(IfExpr | BlockExpr))?
+
+LoopExpr =
+ Attr* Label? 'loop'
+ loop_body:BlockExpr
+
+ForExpr =
+ Attr* Label? 'for' Pat 'in' iterable:Expr
+ loop_body:BlockExpr
+
+WhileExpr =
+ Attr* Label? 'while' condition:Expr
+ loop_body:BlockExpr
+
+Label =
+ Lifetime ':'
+
+BreakExpr =
+ Attr* 'break' Lifetime? Expr?
+
+ContinueExpr =
+ Attr* 'continue' Lifetime?
+
+RangeExpr =
+ Attr* start:Expr? op:('..' | '..=') end:Expr?
+
+MatchExpr =
+ Attr* 'match' Expr MatchArmList
+
+MatchArmList =
+ '{'
+ Attr*
+ arms:MatchArm*
+ '}'
+
+MatchArm =
+ Attr* Pat guard:MatchGuard? '=>' Expr ','?
+
+MatchGuard =
+ 'if' condition:Expr
+
+ReturnExpr =
+ Attr* 'return' Expr?
+
+YieldExpr =
+ Attr* 'yield' Expr?
+
+LetExpr =
+ Attr* 'let' Pat '=' Expr
+
+UnderscoreExpr =
+ Attr* '_'
+
+AwaitExpr =
+ Attr* Expr '.' 'await'
+
+BoxExpr =
+ Attr* 'box' Expr
+
+//*************************//
+// Types //
+//*************************//
+
+Type =
+ ArrayType
+| DynTraitType
+| FnPtrType
+| ForType
+| ImplTraitType
+| InferType
+| MacroType
+| NeverType
+| ParenType
+| PathType
+| PtrType
+| RefType
+| SliceType
+| TupleType
+
+ParenType =
+ '(' Type ')'
+
+NeverType =
+ '!'
+
+MacroType =
+ MacroCall
+
+PathType =
+ Path
+
+TupleType =
+ '(' fields:(Type (',' Type)* ','?)? ')'
+
+PtrType =
+ '*' ('const' | 'mut') Type
+
+RefType =
+ '&' Lifetime? 'mut'? Type
+
+ArrayType =
+ '[' Type ';' Expr ']'
+
+SliceType =
+ '[' Type ']'
+
+InferType =
+ '_'
+
+FnPtrType =
+ 'const'? 'async'? 'unsafe'? Abi? 'fn' ParamList RetType?
+
+ForType =
+ 'for' GenericParamList Type
+
+ImplTraitType =
+ 'impl' TypeBoundList
+
+DynTraitType =
+ 'dyn' TypeBoundList
+
+TypeBoundList =
+ bounds:(TypeBound ('+' TypeBound)* '+'?)
+
+TypeBound =
+ Lifetime
+| ('?' | '~' 'const')? Type
+
+//************************//
+// Patterns //
+//************************//
+
+Pat =
+ IdentPat
+| BoxPat
+| RestPat
+| LiteralPat
+| MacroPat
+| OrPat
+| ParenPat
+| PathPat
+| WildcardPat
+| RangePat
+| RecordPat
+| RefPat
+| SlicePat
+| TuplePat
+| TupleStructPat
+| ConstBlockPat
+
+LiteralPat =
+ Literal
+
+IdentPat =
+ Attr* 'ref'? 'mut'? Name ('@' Pat)?
+
+WildcardPat =
+ '_'
+
+RangePat =
+ // 1..
+ start:Pat op:('..' | '..=')
+ // 1..2
+ | start:Pat op:('..' | '..=') end:Pat
+ // ..2
+ | op:('..' | '..=') end:Pat
+
+RefPat =
+ '&' 'mut'? Pat
+
+RecordPat =
+ Path RecordPatFieldList
+
+RecordPatFieldList =
+ '{'
+ fields:(RecordPatField (',' RecordPatField)* ','?)?
+ RestPat?
+ '}'
+
+RecordPatField =
+ Attr* (NameRef ':')? Pat
+
+TupleStructPat =
+ Path '(' fields:(Pat (',' Pat)* ','?)? ')'
+
+TuplePat =
+ '(' fields:(Pat (',' Pat)* ','?)? ')'
+
+ParenPat =
+ '(' Pat ')'
+
+SlicePat =
+ '[' (Pat (',' Pat)* ','?)? ']'
+
+PathPat =
+ Path
+
+OrPat =
+ (Pat ('|' Pat)* '|'?)
+
+BoxPat =
+ 'box' Pat
+
+RestPat =
+ Attr* '..'
+
+MacroPat =
+ MacroCall
+
+ConstBlockPat =
+ 'const' BlockExpr
diff --git a/src/tools/rust-analyzer/crates/syntax/src/algo.rs b/src/tools/rust-analyzer/crates/syntax/src/algo.rs
new file mode 100644
index 000000000..8b14789dd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/algo.rs
@@ -0,0 +1,660 @@
+//! Collection of assorted algorithms for syntax trees.
+
+use std::hash::BuildHasherDefault;
+
+use indexmap::IndexMap;
+use itertools::Itertools;
+use rustc_hash::FxHashMap;
+use text_edit::TextEditBuilder;
+
+use crate::{
+ AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange,
+ TextSize,
+};
+
+/// Returns ancestors of the node at the offset, sorted by length. This should
+/// do the right thing at an edge, e.g. when searching for expressions at `{
+/// $0foo }` we will get the name reference instead of the whole block, which
+/// we would get if we just did `find_token_at_offset(...).flat_map(|t|
+/// t.parent().ancestors())`.
+pub fn ancestors_at_offset(
+ node: &SyntaxNode,
+ offset: TextSize,
+) -> impl Iterator<Item = SyntaxNode> {
+ node.token_at_offset(offset)
+ .map(|token| token.parent_ancestors())
+ .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
+}
+
+/// Finds a node of specific Ast type at offset. Note that this is slightly
+/// imprecise: if the cursor is strictly between two nodes of the desired type,
+/// as in
+///
+/// ```no_run
+/// struct Foo {}|struct Bar;
+/// ```
+///
+/// then the shorter node will be silently preferred.
+pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextSize) -> Option<N> {
+ ancestors_at_offset(syntax, offset).find_map(N::cast)
+}
+
+pub fn find_node_at_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<N> {
+ syntax.covering_element(range).ancestors().find_map(N::cast)
+}
+
+/// Skip to next non `trivia` token
+pub fn skip_trivia_token(mut token: SyntaxToken, direction: Direction) -> Option<SyntaxToken> {
+ while token.kind().is_trivia() {
+ token = match direction {
+ Direction::Next => token.next_token()?,
+ Direction::Prev => token.prev_token()?,
+ }
+ }
+ Some(token)
+}
+/// Skip to next non `whitespace` token
+pub fn skip_whitespace_token(mut token: SyntaxToken, direction: Direction) -> Option<SyntaxToken> {
+ while token.kind() == SyntaxKind::WHITESPACE {
+ token = match direction {
+ Direction::Next => token.next_token()?,
+ Direction::Prev => token.prev_token()?,
+ }
+ }
+ Some(token)
+}
+
+/// Finds the first sibling in the given direction which is not `trivia`
+pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Option<SyntaxElement> {
+ return match element {
+ NodeOrToken::Node(node) => node.siblings_with_tokens(direction).skip(1).find(not_trivia),
+ NodeOrToken::Token(token) => token.siblings_with_tokens(direction).skip(1).find(not_trivia),
+ };
+
+ fn not_trivia(element: &SyntaxElement) -> bool {
+ match element {
+ NodeOrToken::Node(_) => true,
+ NodeOrToken::Token(token) => !token.kind().is_trivia(),
+ }
+ }
+}
+
+pub fn least_common_ancestor(u: &SyntaxNode, v: &SyntaxNode) -> Option<SyntaxNode> {
+ if u == v {
+ return Some(u.clone());
+ }
+
+ let u_depth = u.ancestors().count();
+ let v_depth = v.ancestors().count();
+ let keep = u_depth.min(v_depth);
+
+ let u_candidates = u.ancestors().skip(u_depth - keep);
+ let v_candidates = v.ancestors().skip(v_depth - keep);
+ let (res, _) = u_candidates.zip(v_candidates).find(|(x, y)| x == y)?;
+ Some(res)
+}
+
+pub fn neighbor<T: AstNode>(me: &T, direction: Direction) -> Option<T> {
+ me.syntax().siblings(direction).skip(1).find_map(T::cast)
+}
+
+pub fn has_errors(node: &SyntaxNode) -> bool {
+ node.children().any(|it| it.kind() == SyntaxKind::ERROR)
+}
+
+type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<rustc_hash::FxHasher>>;
+
+#[derive(Debug, Hash, PartialEq, Eq)]
+enum TreeDiffInsertPos {
+ After(SyntaxElement),
+ AsFirstChild(SyntaxElement),
+}
+
+#[derive(Debug)]
+pub struct TreeDiff {
+ replacements: FxHashMap<SyntaxElement, SyntaxElement>,
+ deletions: Vec<SyntaxElement>,
+ // the vec as well as the indexmap are both here to preserve order
+ insertions: FxIndexMap<TreeDiffInsertPos, Vec<SyntaxElement>>,
+}
+
+impl TreeDiff {
+ pub fn into_text_edit(&self, builder: &mut TextEditBuilder) {
+ let _p = profile::span("into_text_edit");
+
+ for (anchor, to) in &self.insertions {
+ let offset = match anchor {
+ TreeDiffInsertPos::After(it) => it.text_range().end(),
+ TreeDiffInsertPos::AsFirstChild(it) => it.text_range().start(),
+ };
+ to.iter().for_each(|to| builder.insert(offset, to.to_string()));
+ }
+ for (from, to) in &self.replacements {
+ builder.replace(from.text_range(), to.to_string());
+ }
+ for text_range in self.deletions.iter().map(SyntaxElement::text_range) {
+ builder.delete(text_range);
+ }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.replacements.is_empty() && self.deletions.is_empty() && self.insertions.is_empty()
+ }
+}
+
+/// Finds a (potentially minimal) diff, which, applied to `from`, will result in `to`.
+///
+/// Specifically, returns a structure that consists of a replacements, insertions and deletions
+/// such that applying this map on `from` will result in `to`.
+///
+/// This function tries to find a fine-grained diff.
+pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff {
+ let _p = profile::span("diff");
+
+ let mut diff = TreeDiff {
+ replacements: FxHashMap::default(),
+ insertions: FxIndexMap::default(),
+ deletions: Vec::new(),
+ };
+ let (from, to) = (from.clone().into(), to.clone().into());
+
+ if !syntax_element_eq(&from, &to) {
+ go(&mut diff, from, to);
+ }
+ return diff;
+
+ fn syntax_element_eq(lhs: &SyntaxElement, rhs: &SyntaxElement) -> bool {
+ lhs.kind() == rhs.kind()
+ && lhs.text_range().len() == rhs.text_range().len()
+ && match (&lhs, &rhs) {
+ (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => {
+ lhs == rhs || lhs.text() == rhs.text()
+ }
+ (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(),
+ _ => false,
+ }
+ }
+
+ // FIXME: this is horribly inefficient. I bet there's a cool algorithm to diff trees properly.
+ fn go(diff: &mut TreeDiff, lhs: SyntaxElement, rhs: SyntaxElement) {
+ let (lhs, rhs) = match lhs.as_node().zip(rhs.as_node()) {
+ Some((lhs, rhs)) => (lhs, rhs),
+ _ => {
+ cov_mark::hit!(diff_node_token_replace);
+ diff.replacements.insert(lhs, rhs);
+ return;
+ }
+ };
+
+ let mut look_ahead_scratch = Vec::default();
+
+ let mut rhs_children = rhs.children_with_tokens();
+ let mut lhs_children = lhs.children_with_tokens();
+ let mut last_lhs = None;
+ loop {
+ let lhs_child = lhs_children.next();
+ match (lhs_child.clone(), rhs_children.next()) {
+ (None, None) => break,
+ (None, Some(element)) => {
+ let insert_pos = match last_lhs.clone() {
+ Some(prev) => {
+ cov_mark::hit!(diff_insert);
+ TreeDiffInsertPos::After(prev)
+ }
+ // first iteration, insert into out parent as the first child
+ None => {
+ cov_mark::hit!(diff_insert_as_first_child);
+ TreeDiffInsertPos::AsFirstChild(lhs.clone().into())
+ }
+ };
+ diff.insertions.entry(insert_pos).or_insert_with(Vec::new).push(element);
+ }
+ (Some(element), None) => {
+ cov_mark::hit!(diff_delete);
+ diff.deletions.push(element);
+ }
+ (Some(ref lhs_ele), Some(ref rhs_ele)) if syntax_element_eq(lhs_ele, rhs_ele) => {}
+ (Some(lhs_ele), Some(rhs_ele)) => {
+ // nodes differ, look for lhs_ele in rhs, if its found we can mark everything up
+ // until that element as insertions. This is important to keep the diff minimal
+ // in regards to insertions that have been actually done, this is important for
+ // use insertions as we do not want to replace the entire module node.
+ look_ahead_scratch.push(rhs_ele.clone());
+ let mut rhs_children_clone = rhs_children.clone();
+ let mut insert = false;
+ for rhs_child in &mut rhs_children_clone {
+ if syntax_element_eq(&lhs_ele, &rhs_child) {
+ cov_mark::hit!(diff_insertions);
+ insert = true;
+ break;
+ }
+ look_ahead_scratch.push(rhs_child);
+ }
+ let drain = look_ahead_scratch.drain(..);
+ if insert {
+ let insert_pos = if let Some(prev) = last_lhs.clone().filter(|_| insert) {
+ TreeDiffInsertPos::After(prev)
+ } else {
+ cov_mark::hit!(insert_first_child);
+ TreeDiffInsertPos::AsFirstChild(lhs.clone().into())
+ };
+
+ diff.insertions.entry(insert_pos).or_insert_with(Vec::new).extend(drain);
+ rhs_children = rhs_children_clone;
+ } else {
+ go(diff, lhs_ele, rhs_ele);
+ }
+ }
+ }
+ last_lhs = lhs_child.or(last_lhs);
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+ use itertools::Itertools;
+ use parser::SyntaxKind;
+ use text_edit::TextEdit;
+
+ use crate::{AstNode, SyntaxElement};
+
+ #[test]
+ fn replace_node_token() {
+ cov_mark::check!(diff_node_token_replace);
+ check_diff(
+ r#"use node;"#,
+ r#"ident"#,
+ expect![[r#"
+ insertions:
+
+
+
+ replacements:
+
+ Line 0: Token(USE_KW@0..3 "use") -> ident
+
+ deletions:
+
+ Line 1: " "
+ Line 1: node
+ Line 1: ;
+ "#]],
+ );
+ }
+
+ #[test]
+ fn replace_parent() {
+ cov_mark::check!(diff_insert_as_first_child);
+ check_diff(
+ r#""#,
+ r#"use foo::bar;"#,
+ expect![[r#"
+ insertions:
+
+ Line 0: AsFirstChild(Node(SOURCE_FILE@0..0))
+ -> use foo::bar;
+
+ replacements:
+
+
+
+ deletions:
+
+
+ "#]],
+ );
+ }
+
+ #[test]
+ fn insert_last() {
+ cov_mark::check!(diff_insert);
+ check_diff(
+ r#"
+use foo;
+use bar;"#,
+ r#"
+use foo;
+use bar;
+use baz;"#,
+ expect![[r#"
+ insertions:
+
+ Line 2: After(Node(USE@10..18))
+ -> "\n"
+ -> use baz;
+
+ replacements:
+
+
+
+ deletions:
+
+
+ "#]],
+ );
+ }
+
+ #[test]
+ fn insert_middle() {
+ check_diff(
+ r#"
+use foo;
+use baz;"#,
+ r#"
+use foo;
+use bar;
+use baz;"#,
+ expect![[r#"
+ insertions:
+
+ Line 2: After(Token(WHITESPACE@9..10 "\n"))
+ -> use bar;
+ -> "\n"
+
+ replacements:
+
+
+
+ deletions:
+
+
+ "#]],
+ )
+ }
+
+ #[test]
+ fn insert_first() {
+ check_diff(
+ r#"
+use bar;
+use baz;"#,
+ r#"
+use foo;
+use bar;
+use baz;"#,
+ expect![[r#"
+ insertions:
+
+ Line 0: After(Token(WHITESPACE@0..1 "\n"))
+ -> use foo;
+ -> "\n"
+
+ replacements:
+
+
+
+ deletions:
+
+
+ "#]],
+ )
+ }
+
+ #[test]
+ fn first_child_insertion() {
+ cov_mark::check!(insert_first_child);
+ check_diff(
+ r#"fn main() {
+ stdi
+ }"#,
+ r#"use foo::bar;
+
+ fn main() {
+ stdi
+ }"#,
+ expect![[r#"
+ insertions:
+
+ Line 0: AsFirstChild(Node(SOURCE_FILE@0..30))
+ -> use foo::bar;
+ -> "\n\n "
+
+ replacements:
+
+
+
+ deletions:
+
+
+ "#]],
+ );
+ }
+
+ #[test]
+ fn delete_last() {
+ cov_mark::check!(diff_delete);
+ check_diff(
+ r#"use foo;
+ use bar;"#,
+ r#"use foo;"#,
+ expect![[r#"
+ insertions:
+
+
+
+ replacements:
+
+
+
+ deletions:
+
+ Line 1: "\n "
+ Line 2: use bar;
+ "#]],
+ );
+ }
+
+ #[test]
+ fn delete_middle() {
+ cov_mark::check!(diff_insertions);
+ check_diff(
+ r#"
+use expect_test::{expect, Expect};
+use text_edit::TextEdit;
+
+use crate::AstNode;
+"#,
+ r#"
+use expect_test::{expect, Expect};
+
+use crate::AstNode;
+"#,
+ expect![[r#"
+ insertions:
+
+ Line 1: After(Node(USE@1..35))
+ -> "\n\n"
+ -> use crate::AstNode;
+
+ replacements:
+
+
+
+ deletions:
+
+ Line 2: use text_edit::TextEdit;
+ Line 3: "\n\n"
+ Line 4: use crate::AstNode;
+ Line 5: "\n"
+ "#]],
+ )
+ }
+
+ #[test]
+ fn delete_first() {
+ check_diff(
+ r#"
+use text_edit::TextEdit;
+
+use crate::AstNode;
+"#,
+ r#"
+use crate::AstNode;
+"#,
+ expect![[r#"
+ insertions:
+
+
+
+ replacements:
+
+ Line 2: Token(IDENT@5..14 "text_edit") -> crate
+ Line 2: Token(IDENT@16..24 "TextEdit") -> AstNode
+ Line 2: Token(WHITESPACE@25..27 "\n\n") -> "\n"
+
+ deletions:
+
+ Line 3: use crate::AstNode;
+ Line 4: "\n"
+ "#]],
+ )
+ }
+
+ #[test]
+ fn merge_use() {
+ check_diff(
+ r#"
+use std::{
+ fmt,
+ hash::BuildHasherDefault,
+ ops::{self, RangeInclusive},
+};
+"#,
+ r#"
+use std::fmt;
+use std::hash::BuildHasherDefault;
+use std::ops::{self, RangeInclusive};
+"#,
+ expect![[r#"
+ insertions:
+
+ Line 2: After(Node(PATH_SEGMENT@5..8))
+ -> ::
+ -> fmt
+ Line 6: After(Token(WHITESPACE@86..87 "\n"))
+ -> use std::hash::BuildHasherDefault;
+ -> "\n"
+ -> use std::ops::{self, RangeInclusive};
+ -> "\n"
+
+ replacements:
+
+ Line 2: Token(IDENT@5..8 "std") -> std
+
+ deletions:
+
+ Line 2: ::
+ Line 2: {
+ fmt,
+ hash::BuildHasherDefault,
+ ops::{self, RangeInclusive},
+ }
+ "#]],
+ )
+ }
+
+ #[test]
+ fn early_return_assist() {
+ check_diff(
+ r#"
+fn main() {
+ if let Ok(x) = Err(92) {
+ foo(x);
+ }
+}
+ "#,
+ r#"
+fn main() {
+ let x = match Err(92) {
+ Ok(it) => it,
+ _ => return,
+ };
+ foo(x);
+}
+ "#,
+ expect![[r#"
+ insertions:
+
+ Line 3: After(Node(BLOCK_EXPR@40..63))
+ -> " "
+ -> match Err(92) {
+ Ok(it) => it,
+ _ => return,
+ }
+ -> ;
+ Line 3: After(Node(IF_EXPR@17..63))
+ -> "\n "
+ -> foo(x);
+
+ replacements:
+
+ Line 3: Token(IF_KW@17..19 "if") -> let
+ Line 3: Token(LET_KW@20..23 "let") -> x
+ Line 3: Node(BLOCK_EXPR@40..63) -> =
+
+ deletions:
+
+ Line 3: " "
+ Line 3: Ok(x)
+ Line 3: " "
+ Line 3: =
+ Line 3: " "
+ Line 3: Err(92)
+ "#]],
+ )
+ }
+
+ fn check_diff(from: &str, to: &str, expected_diff: Expect) {
+ let from_node = crate::SourceFile::parse(from).tree().syntax().clone();
+ let to_node = crate::SourceFile::parse(to).tree().syntax().clone();
+ let diff = super::diff(&from_node, &to_node);
+
+ let line_number =
+ |syn: &SyntaxElement| from[..syn.text_range().start().into()].lines().count();
+
+ let fmt_syntax = |syn: &SyntaxElement| match syn.kind() {
+ SyntaxKind::WHITESPACE => format!("{:?}", syn.to_string()),
+ _ => format!("{}", syn),
+ };
+
+ let insertions =
+ diff.insertions.iter().format_with("\n", |(k, v), f| -> Result<(), std::fmt::Error> {
+ f(&format!(
+ "Line {}: {:?}\n-> {}",
+ line_number(match k {
+ super::TreeDiffInsertPos::After(syn) => syn,
+ super::TreeDiffInsertPos::AsFirstChild(syn) => syn,
+ }),
+ k,
+ v.iter().format_with("\n-> ", |v, f| f(&fmt_syntax(v)))
+ ))
+ });
+
+ let replacements = diff
+ .replacements
+ .iter()
+ .sorted_by_key(|(syntax, _)| syntax.text_range().start())
+ .format_with("\n", |(k, v), f| {
+ f(&format!("Line {}: {:?} -> {}", line_number(k), k, fmt_syntax(v)))
+ });
+
+ let deletions = diff
+ .deletions
+ .iter()
+ .format_with("\n", |v, f| f(&format!("Line {}: {}", line_number(v), &fmt_syntax(v))));
+
+ let actual = format!(
+ "insertions:\n\n{}\n\nreplacements:\n\n{}\n\ndeletions:\n\n{}\n",
+ insertions, replacements, deletions
+ );
+ expected_diff.assert_eq(&actual);
+
+ let mut from = from.to_owned();
+ let mut text_edit = TextEdit::builder();
+ diff.into_text_edit(&mut text_edit);
+ text_edit.finish().apply(&mut from);
+ assert_eq!(&*from, to, "diff did not turn `from` to `to`");
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast.rs b/src/tools/rust-analyzer/crates/syntax/src/ast.rs
new file mode 100644
index 000000000..4aa64d0d6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast.rs
@@ -0,0 +1,367 @@
+//! Abstract Syntax Tree, layered on top of untyped `SyntaxNode`s
+
+mod generated;
+mod traits;
+mod token_ext;
+mod node_ext;
+mod expr_ext;
+mod operators;
+pub mod edit;
+pub mod edit_in_place;
+pub mod make;
+
+use std::marker::PhantomData;
+
+use crate::{
+ syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken},
+ SyntaxKind,
+};
+
+pub use self::{
+ expr_ext::{ArrayExprKind, BlockModifier, CallableExpr, ElseBranch, LiteralKind},
+ generated::{nodes::*, tokens::*},
+ node_ext::{
+ AttrKind, FieldKind, Macro, NameLike, NameOrNameRef, PathSegmentKind, SelfParamKind,
+ SlicePatComponents, StructKind, TypeBoundKind, TypeOrConstParam, VisibilityKind,
+ },
+ operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp},
+ token_ext::{CommentKind, CommentPlacement, CommentShape, IsString, QuoteOffsets, Radix},
+ traits::{
+ AttrDocCommentIter, DocCommentIter, HasArgList, HasAttrs, HasDocComments, HasGenericParams,
+ HasLoopBody, HasModuleItem, HasName, HasTypeBounds, HasVisibility,
+ },
+};
+
+/// The main trait to go from untyped `SyntaxNode` to a typed ast. The
+/// conversion itself has zero runtime cost: ast and syntax nodes have exactly
+/// the same representation: a pointer to the tree root and a pointer to the
+/// node itself.
+pub trait AstNode {
+ fn can_cast(kind: SyntaxKind) -> bool
+ where
+ Self: Sized;
+
+ fn cast(syntax: SyntaxNode) -> Option<Self>
+ where
+ Self: Sized;
+
+ fn syntax(&self) -> &SyntaxNode;
+ fn clone_for_update(&self) -> Self
+ where
+ Self: Sized,
+ {
+ Self::cast(self.syntax().clone_for_update()).unwrap()
+ }
+ fn clone_subtree(&self) -> Self
+ where
+ Self: Sized,
+ {
+ Self::cast(self.syntax().clone_subtree()).unwrap()
+ }
+}
+
+/// Like `AstNode`, but wraps tokens rather than interior nodes.
+pub trait AstToken {
+ fn can_cast(token: SyntaxKind) -> bool
+ where
+ Self: Sized;
+
+ fn cast(syntax: SyntaxToken) -> Option<Self>
+ where
+ Self: Sized;
+
+ fn syntax(&self) -> &SyntaxToken;
+
+ fn text(&self) -> &str {
+ self.syntax().text()
+ }
+}
+
+/// An iterator over `SyntaxNode` children of a particular AST type.
+#[derive(Debug, Clone)]
+pub struct AstChildren<N> {
+ inner: SyntaxNodeChildren,
+ ph: PhantomData<N>,
+}
+
+impl<N> AstChildren<N> {
+ fn new(parent: &SyntaxNode) -> Self {
+ AstChildren { inner: parent.children(), ph: PhantomData }
+ }
+}
+
+impl<N: AstNode> Iterator for AstChildren<N> {
+ type Item = N;
+ fn next(&mut self) -> Option<N> {
+ self.inner.find_map(N::cast)
+ }
+}
+
+mod support {
+ use super::{AstChildren, AstNode, SyntaxKind, SyntaxNode, SyntaxToken};
+
+ pub(super) fn child<N: AstNode>(parent: &SyntaxNode) -> Option<N> {
+ parent.children().find_map(N::cast)
+ }
+
+ pub(super) fn children<N: AstNode>(parent: &SyntaxNode) -> AstChildren<N> {
+ AstChildren::new(parent)
+ }
+
+ pub(super) fn token(parent: &SyntaxNode, kind: SyntaxKind) -> Option<SyntaxToken> {
+ parent.children_with_tokens().filter_map(|it| it.into_token()).find(|it| it.kind() == kind)
+ }
+}
+
+#[test]
+fn assert_ast_is_object_safe() {
+ fn _f(_: &dyn AstNode, _: &dyn HasName) {}
+}
+
+#[test]
+fn test_doc_comment_none() {
+ let file = SourceFile::parse(
+ r#"
+ // non-doc
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert!(module.doc_comments().doc_comment_text().is_none());
+}
+
+#[test]
+fn test_outer_doc_comment_of_items() {
+ let file = SourceFile::parse(
+ r#"
+ /// doc
+ // non-doc
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(" doc", module.doc_comments().doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_inner_doc_comment_of_items() {
+ let file = SourceFile::parse(
+ r#"
+ //! doc
+ // non-doc
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert!(module.doc_comments().doc_comment_text().is_none());
+}
+
+#[test]
+fn test_doc_comment_of_statics() {
+ let file = SourceFile::parse(
+ r#"
+ /// Number of levels
+ static LEVELS: i32 = 0;
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let st = file.syntax().descendants().find_map(Static::cast).unwrap();
+ assert_eq!(" Number of levels", st.doc_comments().doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_doc_comment_preserves_indents() {
+ let file = SourceFile::parse(
+ r#"
+ /// doc1
+ /// ```
+ /// fn foo() {
+ /// // ...
+ /// }
+ /// ```
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(
+ " doc1\n ```\n fn foo() {\n // ...\n }\n ```",
+ module.doc_comments().doc_comment_text().unwrap()
+ );
+}
+
+#[test]
+fn test_doc_comment_preserves_newlines() {
+ let file = SourceFile::parse(
+ r#"
+ /// this
+ /// is
+ /// mod
+ /// foo
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(" this\n is\n mod\n foo", module.doc_comments().doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_doc_comment_single_line_block_strips_suffix() {
+ let file = SourceFile::parse(
+ r#"
+ /** this is mod foo*/
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(" this is mod foo", module.doc_comments().doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_doc_comment_single_line_block_strips_suffix_whitespace() {
+ let file = SourceFile::parse(
+ r#"
+ /** this is mod foo */
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(" this is mod foo ", module.doc_comments().doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_doc_comment_multi_line_block_strips_suffix() {
+ let file = SourceFile::parse(
+ r#"
+ /**
+ this
+ is
+ mod foo
+ */
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(
+ "\n this\n is\n mod foo\n ",
+ module.doc_comments().doc_comment_text().unwrap()
+ );
+}
+
+#[test]
+fn test_comments_preserve_trailing_whitespace() {
+ let file = SourceFile::parse(
+ "\n/// Representation of a Realm. \n/// In the specification these are called Realm Records.\nstruct Realm {}",
+ )
+ .ok()
+ .unwrap();
+ let def = file.syntax().descendants().find_map(Struct::cast).unwrap();
+ assert_eq!(
+ " Representation of a Realm. \n In the specification these are called Realm Records.",
+ def.doc_comments().doc_comment_text().unwrap()
+ );
+}
+
+#[test]
+fn test_four_slash_line_comment() {
+ let file = SourceFile::parse(
+ r#"
+ //// too many slashes to be a doc comment
+ /// doc comment
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(" doc comment", module.doc_comments().doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_where_predicates() {
+ fn assert_bound(text: &str, bound: Option<TypeBound>) {
+ assert_eq!(text, bound.unwrap().syntax().text().to_string());
+ }
+
+ let file = SourceFile::parse(
+ r#"
+fn foo()
+where
+ T: Clone + Copy + Debug + 'static,
+ 'a: 'b + 'c,
+ Iterator::Item: 'a + Debug,
+ Iterator::Item: Debug + 'a,
+ <T as Iterator>::Item: Debug + 'a,
+ for<'a> F: Fn(&'a str)
+{}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let where_clause = file.syntax().descendants().find_map(WhereClause::cast).unwrap();
+
+ let mut predicates = where_clause.predicates();
+
+ let pred = predicates.next().unwrap();
+ let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+ assert!(pred.for_token().is_none());
+ assert!(pred.generic_param_list().is_none());
+ assert_eq!("T", pred.ty().unwrap().syntax().text().to_string());
+ assert_bound("Clone", bounds.next());
+ assert_bound("Copy", bounds.next());
+ assert_bound("Debug", bounds.next());
+ assert_bound("'static", bounds.next());
+
+ let pred = predicates.next().unwrap();
+ let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+ assert_eq!("'a", pred.lifetime().unwrap().lifetime_ident_token().unwrap().text());
+
+ assert_bound("'b", bounds.next());
+ assert_bound("'c", bounds.next());
+
+ let pred = predicates.next().unwrap();
+ let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+ assert_eq!("Iterator::Item", pred.ty().unwrap().syntax().text().to_string());
+ assert_bound("'a", bounds.next());
+
+ let pred = predicates.next().unwrap();
+ let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+ assert_eq!("Iterator::Item", pred.ty().unwrap().syntax().text().to_string());
+ assert_bound("Debug", bounds.next());
+ assert_bound("'a", bounds.next());
+
+ let pred = predicates.next().unwrap();
+ let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+ assert_eq!("<T as Iterator>::Item", pred.ty().unwrap().syntax().text().to_string());
+ assert_bound("Debug", bounds.next());
+ assert_bound("'a", bounds.next());
+
+ let pred = predicates.next().unwrap();
+ let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+ assert!(pred.for_token().is_some());
+ assert_eq!("<'a>", pred.generic_param_list().unwrap().syntax().text().to_string());
+ assert_eq!("F", pred.ty().unwrap().syntax().text().to_string());
+ assert_bound("Fn(&'a str)", bounds.next());
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs
new file mode 100644
index 000000000..15805dfc8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs
@@ -0,0 +1,174 @@
+//! This module contains functions for editing syntax trees. As the trees are
+//! immutable, all function here return a fresh copy of the tree, instead of
+//! doing an in-place modification.
+use std::{fmt, iter, ops};
+
+use crate::{
+ ast::{self, make, AstNode},
+ ted, AstToken, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken,
+};
+
+#[derive(Debug, Clone, Copy)]
+pub struct IndentLevel(pub u8);
+
+impl From<u8> for IndentLevel {
+ fn from(level: u8) -> IndentLevel {
+ IndentLevel(level)
+ }
+}
+
+impl fmt::Display for IndentLevel {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let spaces = " ";
+ let buf;
+ let len = self.0 as usize * 4;
+ let indent = if len <= spaces.len() {
+ &spaces[..len]
+ } else {
+ buf = " ".repeat(len);
+ &buf
+ };
+ fmt::Display::fmt(indent, f)
+ }
+}
+
+impl ops::Add<u8> for IndentLevel {
+ type Output = IndentLevel;
+ fn add(self, rhs: u8) -> IndentLevel {
+ IndentLevel(self.0 + rhs)
+ }
+}
+
+impl IndentLevel {
+ pub fn single() -> IndentLevel {
+ IndentLevel(0)
+ }
+ pub fn is_zero(&self) -> bool {
+ self.0 == 0
+ }
+ pub fn from_element(element: &SyntaxElement) -> IndentLevel {
+ match element {
+ rowan::NodeOrToken::Node(it) => IndentLevel::from_node(it),
+ rowan::NodeOrToken::Token(it) => IndentLevel::from_token(it),
+ }
+ }
+
+ pub fn from_node(node: &SyntaxNode) -> IndentLevel {
+ match node.first_token() {
+ Some(it) => Self::from_token(&it),
+ None => IndentLevel(0),
+ }
+ }
+
+ pub fn from_token(token: &SyntaxToken) -> IndentLevel {
+ for ws in prev_tokens(token.clone()).filter_map(ast::Whitespace::cast) {
+ let text = ws.syntax().text();
+ if let Some(pos) = text.rfind('\n') {
+ let level = text[pos + 1..].chars().count() / 4;
+ return IndentLevel(level as u8);
+ }
+ }
+ IndentLevel(0)
+ }
+
+ /// XXX: this intentionally doesn't change the indent of the very first token.
+ /// Ie, in something like
+ /// ```
+ /// fn foo() {
+ /// 92
+ /// }
+ /// ```
+ /// if you indent the block, the `{` token would stay put.
+ pub(super) fn increase_indent(self, node: &SyntaxNode) {
+ let tokens = node.preorder_with_tokens().filter_map(|event| match event {
+ rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it),
+ _ => None,
+ });
+ for token in tokens {
+ if let Some(ws) = ast::Whitespace::cast(token) {
+ if ws.text().contains('\n') {
+ let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self));
+ ted::replace(ws.syntax(), &new_ws);
+ }
+ }
+ }
+ }
+
+ pub(super) fn decrease_indent(self, node: &SyntaxNode) {
+ let tokens = node.preorder_with_tokens().filter_map(|event| match event {
+ rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it),
+ _ => None,
+ });
+ for token in tokens {
+ if let Some(ws) = ast::Whitespace::cast(token) {
+ if ws.text().contains('\n') {
+ let new_ws = make::tokens::whitespace(
+ &ws.syntax().text().replace(&format!("\n{}", self), "\n"),
+ );
+ ted::replace(ws.syntax(), &new_ws);
+ }
+ }
+ }
+ }
+}
+
+fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> {
+ iter::successors(Some(token), |token| token.prev_token())
+}
+
+/// Soft-deprecated in favor of mutable tree editing API `edit_in_place::Ident`.
+pub trait AstNodeEdit: AstNode + Clone + Sized {
+ fn indent_level(&self) -> IndentLevel {
+ IndentLevel::from_node(self.syntax())
+ }
+ #[must_use]
+ fn indent(&self, level: IndentLevel) -> Self {
+ fn indent_inner(node: &SyntaxNode, level: IndentLevel) -> SyntaxNode {
+ let res = node.clone_subtree().clone_for_update();
+ level.increase_indent(&res);
+ res.clone_subtree()
+ }
+
+ Self::cast(indent_inner(self.syntax(), level)).unwrap()
+ }
+ #[must_use]
+ fn dedent(&self, level: IndentLevel) -> Self {
+ fn dedent_inner(node: &SyntaxNode, level: IndentLevel) -> SyntaxNode {
+ let res = node.clone_subtree().clone_for_update();
+ level.decrease_indent(&res);
+ res.clone_subtree()
+ }
+
+ Self::cast(dedent_inner(self.syntax(), level)).unwrap()
+ }
+ #[must_use]
+ fn reset_indent(&self) -> Self {
+ let level = IndentLevel::from_node(self.syntax());
+ self.dedent(level)
+ }
+}
+
+impl<N: AstNode + Clone> AstNodeEdit for N {}
+
+#[test]
+fn test_increase_indent() {
+ let arm_list = {
+ let arm = make::match_arm(iter::once(make::wildcard_pat().into()), None, make::expr_unit());
+ make::match_arm_list(vec![arm.clone(), arm])
+ };
+ assert_eq!(
+ arm_list.syntax().to_string(),
+ "{
+ _ => (),
+ _ => (),
+}"
+ );
+ let indented = arm_list.indent(IndentLevel(2));
+ assert_eq!(
+ indented.syntax().to_string(),
+ "{
+ _ => (),
+ _ => (),
+ }"
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
new file mode 100644
index 000000000..e3e928aec
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
@@ -0,0 +1,717 @@
+//! Structural editing for ast.
+
+use std::iter::{empty, successors};
+
+use parser::{SyntaxKind, T};
+use rowan::SyntaxElement;
+
+use crate::{
+ algo::{self, neighbor},
+ ast::{self, edit::IndentLevel, make, HasGenericParams},
+ ted::{self, Position},
+ AstNode, AstToken, Direction,
+ SyntaxKind::{ATTR, COMMENT, WHITESPACE},
+ SyntaxNode,
+};
+
+use super::HasName;
+
+pub trait GenericParamsOwnerEdit: ast::HasGenericParams {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList;
+ fn get_or_create_where_clause(&self) -> ast::WhereClause;
+}
+
+impl GenericParamsOwnerEdit for ast::Fn {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = if let Some(name) = self.name() {
+ Position::after(name.syntax)
+ } else if let Some(fn_token) = self.fn_token() {
+ Position::after(fn_token)
+ } else if let Some(param_list) = self.param_list() {
+ Position::before(param_list.syntax)
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let position = if let Some(ty) = self.ret_type() {
+ Position::after(ty.syntax())
+ } else if let Some(param_list) = self.param_list() {
+ Position::after(param_list.syntax())
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+impl GenericParamsOwnerEdit for ast::Impl {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = match self.impl_token() {
+ Some(imp_token) => Position::after(imp_token),
+ None => Position::last_child_of(self.syntax()),
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let position = match self.assoc_item_list() {
+ Some(items) => Position::before(items.syntax()),
+ None => Position::last_child_of(self.syntax()),
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+impl GenericParamsOwnerEdit for ast::Trait {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = if let Some(name) = self.name() {
+ Position::after(name.syntax)
+ } else if let Some(trait_token) = self.trait_token() {
+ Position::after(trait_token)
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let position = match self.assoc_item_list() {
+ Some(items) => Position::before(items.syntax()),
+ None => Position::last_child_of(self.syntax()),
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+impl GenericParamsOwnerEdit for ast::Struct {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = if let Some(name) = self.name() {
+ Position::after(name.syntax)
+ } else if let Some(struct_token) = self.struct_token() {
+ Position::after(struct_token)
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let tfl = self.field_list().and_then(|fl| match fl {
+ ast::FieldList::RecordFieldList(_) => None,
+ ast::FieldList::TupleFieldList(it) => Some(it),
+ });
+ let position = if let Some(tfl) = tfl {
+ Position::after(tfl.syntax())
+ } else if let Some(gpl) = self.generic_param_list() {
+ Position::after(gpl.syntax())
+ } else if let Some(name) = self.name() {
+ Position::after(name.syntax())
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+impl GenericParamsOwnerEdit for ast::Enum {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = if let Some(name) = self.name() {
+ Position::after(name.syntax)
+ } else if let Some(enum_token) = self.enum_token() {
+ Position::after(enum_token)
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let position = if let Some(gpl) = self.generic_param_list() {
+ Position::after(gpl.syntax())
+ } else if let Some(name) = self.name() {
+ Position::after(name.syntax())
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+fn create_where_clause(position: Position) {
+ let where_clause = make::where_clause(empty()).clone_for_update();
+ ted::insert(position, where_clause.syntax());
+}
+
+fn create_generic_param_list(position: Position) -> ast::GenericParamList {
+ let gpl = make::generic_param_list(empty()).clone_for_update();
+ ted::insert_raw(position, gpl.syntax());
+ gpl
+}
+
+pub trait AttrsOwnerEdit: ast::HasAttrs {
+ fn remove_attrs_and_docs(&self) {
+ remove_attrs_and_docs(self.syntax());
+
+ fn remove_attrs_and_docs(node: &SyntaxNode) {
+ let mut remove_next_ws = false;
+ for child in node.children_with_tokens() {
+ match child.kind() {
+ ATTR | COMMENT => {
+ remove_next_ws = true;
+ child.detach();
+ continue;
+ }
+ WHITESPACE if remove_next_ws => {
+ child.detach();
+ }
+ _ => (),
+ }
+ remove_next_ws = false;
+ }
+ }
+ }
+}
+
+impl<T: ast::HasAttrs> AttrsOwnerEdit for T {}
+
+impl ast::GenericParamList {
+ pub fn add_generic_param(&self, generic_param: ast::GenericParam) {
+ match self.generic_params().last() {
+ Some(last_param) => {
+ let position = Position::after(last_param.syntax());
+ let elements = vec![
+ make::token(T![,]).into(),
+ make::tokens::single_space().into(),
+ generic_param.syntax().clone().into(),
+ ];
+ ted::insert_all(position, elements);
+ }
+ None => {
+ let after_l_angle = Position::after(self.l_angle_token().unwrap());
+ ted::insert(after_l_angle, generic_param.syntax());
+ }
+ }
+ }
+}
+
+impl ast::WhereClause {
+ pub fn add_predicate(&self, predicate: ast::WherePred) {
+ if let Some(pred) = self.predicates().last() {
+ if !pred.syntax().siblings_with_tokens(Direction::Next).any(|it| it.kind() == T![,]) {
+ ted::append_child_raw(self.syntax(), make::token(T![,]));
+ }
+ }
+ ted::append_child(self.syntax(), predicate.syntax());
+ }
+}
+
+impl ast::TypeBoundList {
+ pub fn remove(&self) {
+ match self.syntax().siblings_with_tokens(Direction::Prev).find(|it| it.kind() == T![:]) {
+ Some(colon) => ted::remove_all(colon..=self.syntax().clone().into()),
+ None => ted::remove(self.syntax()),
+ }
+ }
+}
+
+impl ast::PathSegment {
+ pub fn get_or_create_generic_arg_list(&self) -> ast::GenericArgList {
+ if self.generic_arg_list().is_none() {
+ let arg_list = make::generic_arg_list().clone_for_update();
+ ted::append_child(self.syntax(), arg_list.syntax());
+ }
+ self.generic_arg_list().unwrap()
+ }
+}
+
+impl ast::UseTree {
+ pub fn remove(&self) {
+ for dir in [Direction::Next, Direction::Prev] {
+ if let Some(next_use_tree) = neighbor(self, dir) {
+ let separators = self
+ .syntax()
+ .siblings_with_tokens(dir)
+ .skip(1)
+ .take_while(|it| it.as_node() != Some(next_use_tree.syntax()));
+ ted::remove_all_iter(separators);
+ break;
+ }
+ }
+ ted::remove(self.syntax());
+ }
+
+ pub fn get_or_create_use_tree_list(&self) -> ast::UseTreeList {
+ match self.use_tree_list() {
+ Some(it) => it,
+ None => {
+ let position = Position::last_child_of(self.syntax());
+ let use_tree_list = make::use_tree_list(empty()).clone_for_update();
+ let mut elements = Vec::with_capacity(2);
+ if self.coloncolon_token().is_none() {
+ elements.push(make::token(T![::]).into());
+ }
+ elements.push(use_tree_list.syntax().clone().into());
+ ted::insert_all_raw(position, elements);
+ use_tree_list
+ }
+ }
+ }
+
+ /// Splits off the given prefix, making it the path component of the use tree,
+ /// appending the rest of the path to all UseTreeList items.
+ ///
+ /// # Examples
+ ///
+ /// `prefix$0::suffix` -> `prefix::{suffix}`
+ ///
+ /// `prefix$0` -> `prefix::{self}`
+ ///
+ /// `prefix$0::*` -> `prefix::{*}`
+ pub fn split_prefix(&self, prefix: &ast::Path) {
+ debug_assert_eq!(self.path(), Some(prefix.top_path()));
+ let path = self.path().unwrap();
+ if &path == prefix && self.use_tree_list().is_none() {
+ if self.star_token().is_some() {
+ // path$0::* -> *
+ self.coloncolon_token().map(ted::remove);
+ ted::remove(prefix.syntax());
+ } else {
+ // path$0 -> self
+ let self_suffix =
+ make::path_unqualified(make::path_segment_self()).clone_for_update();
+ ted::replace(path.syntax(), self_suffix.syntax());
+ }
+ } else if split_path_prefix(prefix).is_none() {
+ return;
+ }
+ // At this point, prefix path is detached; _self_ use tree has suffix path.
+ // Next, transform 'suffix' use tree into 'prefix::{suffix}'
+ let subtree = self.clone_subtree().clone_for_update();
+ ted::remove_all_iter(self.syntax().children_with_tokens());
+ ted::insert(Position::first_child_of(self.syntax()), prefix.syntax());
+ self.get_or_create_use_tree_list().add_use_tree(subtree);
+
+ fn split_path_prefix(prefix: &ast::Path) -> Option<()> {
+ let parent = prefix.parent_path()?;
+ let segment = parent.segment()?;
+ if algo::has_errors(segment.syntax()) {
+ return None;
+ }
+ for p in successors(parent.parent_path(), |it| it.parent_path()) {
+ p.segment()?;
+ }
+ prefix.parent_path().and_then(|p| p.coloncolon_token()).map(ted::remove);
+ ted::remove(prefix.syntax());
+ Some(())
+ }
+ }
+}
+
+impl ast::UseTreeList {
+ pub fn add_use_tree(&self, use_tree: ast::UseTree) {
+ let (position, elements) = match self.use_trees().last() {
+ Some(last_tree) => (
+ Position::after(last_tree.syntax()),
+ vec![
+ make::token(T![,]).into(),
+ make::tokens::single_space().into(),
+ use_tree.syntax.into(),
+ ],
+ ),
+ None => {
+ let position = match self.l_curly_token() {
+ Some(l_curly) => Position::after(l_curly),
+ None => Position::last_child_of(self.syntax()),
+ };
+ (position, vec![use_tree.syntax.into()])
+ }
+ };
+ ted::insert_all_raw(position, elements);
+ }
+}
+
+impl ast::Use {
+ pub fn remove(&self) {
+ let next_ws = self
+ .syntax()
+ .next_sibling_or_token()
+ .and_then(|it| it.into_token())
+ .and_then(ast::Whitespace::cast);
+ if let Some(next_ws) = next_ws {
+ let ws_text = next_ws.syntax().text();
+ if let Some(rest) = ws_text.strip_prefix('\n') {
+ if rest.is_empty() {
+ ted::remove(next_ws.syntax());
+ } else {
+ ted::replace(next_ws.syntax(), make::tokens::whitespace(rest));
+ }
+ }
+ }
+ ted::remove(self.syntax());
+ }
+}
+
+impl ast::Impl {
+ pub fn get_or_create_assoc_item_list(&self) -> ast::AssocItemList {
+ if self.assoc_item_list().is_none() {
+ let assoc_item_list = make::assoc_item_list().clone_for_update();
+ ted::append_child(self.syntax(), assoc_item_list.syntax());
+ }
+ self.assoc_item_list().unwrap()
+ }
+}
+
+impl ast::AssocItemList {
+ pub fn add_item(&self, item: ast::AssocItem) {
+ let (indent, position, whitespace) = match self.assoc_items().last() {
+ Some(last_item) => (
+ IndentLevel::from_node(last_item.syntax()),
+ Position::after(last_item.syntax()),
+ "\n\n",
+ ),
+ None => match self.l_curly_token() {
+ Some(l_curly) => {
+ normalize_ws_between_braces(self.syntax());
+ (IndentLevel::from_token(&l_curly) + 1, Position::after(&l_curly), "\n")
+ }
+ None => (IndentLevel::single(), Position::last_child_of(self.syntax()), "\n"),
+ },
+ };
+ let elements: Vec<SyntaxElement<_>> = vec![
+ make::tokens::whitespace(&format!("{}{}", whitespace, indent)).into(),
+ item.syntax().clone().into(),
+ ];
+ ted::insert_all(position, elements);
+ }
+}
+
+impl ast::Fn {
+ pub fn get_or_create_body(&self) -> ast::BlockExpr {
+ if self.body().is_none() {
+ let body = make::ext::empty_block_expr().clone_for_update();
+ match self.semicolon_token() {
+ Some(semi) => {
+ ted::replace(semi, body.syntax());
+ ted::insert(Position::before(body.syntax), make::tokens::single_space());
+ }
+ None => ted::append_child(self.syntax(), body.syntax()),
+ }
+ }
+ self.body().unwrap()
+ }
+}
+
+impl ast::MatchArm {
+ pub fn remove(&self) {
+ if let Some(sibling) = self.syntax().prev_sibling_or_token() {
+ if sibling.kind() == SyntaxKind::WHITESPACE {
+ ted::remove(sibling);
+ }
+ }
+ if let Some(sibling) = self.syntax().next_sibling_or_token() {
+ if sibling.kind() == T![,] {
+ ted::remove(sibling);
+ }
+ }
+ ted::remove(self.syntax());
+ }
+}
+
+impl ast::MatchArmList {
+ pub fn add_arm(&self, arm: ast::MatchArm) {
+ normalize_ws_between_braces(self.syntax());
+ let mut elements = Vec::new();
+ let position = match self.arms().last() {
+ Some(last_arm) => {
+ if needs_comma(&last_arm) {
+ ted::append_child(last_arm.syntax(), make::token(SyntaxKind::COMMA));
+ }
+ Position::after(last_arm.syntax().clone())
+ }
+ None => match self.l_curly_token() {
+ Some(it) => Position::after(it),
+ None => Position::last_child_of(self.syntax()),
+ },
+ };
+ let indent = IndentLevel::from_node(self.syntax()) + 1;
+ elements.push(make::tokens::whitespace(&format!("\n{}", indent)).into());
+ elements.push(arm.syntax().clone().into());
+ if needs_comma(&arm) {
+ ted::append_child(arm.syntax(), make::token(SyntaxKind::COMMA));
+ }
+ ted::insert_all(position, elements);
+
+ fn needs_comma(arm: &ast::MatchArm) -> bool {
+ arm.expr().map_or(false, |e| !e.is_block_like()) && arm.comma_token().is_none()
+ }
+ }
+}
+
+impl ast::RecordExprFieldList {
+ pub fn add_field(&self, field: ast::RecordExprField) {
+ let is_multiline = self.syntax().text().contains_char('\n');
+ let whitespace = if is_multiline {
+ let indent = IndentLevel::from_node(self.syntax()) + 1;
+ make::tokens::whitespace(&format!("\n{}", indent))
+ } else {
+ make::tokens::single_space()
+ };
+
+ if is_multiline {
+ normalize_ws_between_braces(self.syntax());
+ }
+
+ let position = match self.fields().last() {
+ Some(last_field) => {
+ let comma = match last_field
+ .syntax()
+ .siblings_with_tokens(Direction::Next)
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == T![,])
+ {
+ Some(it) => it,
+ None => {
+ let comma = ast::make::token(T![,]);
+ ted::insert(Position::after(last_field.syntax()), &comma);
+ comma
+ }
+ };
+ Position::after(comma)
+ }
+ None => match self.l_curly_token() {
+ Some(it) => Position::after(it),
+ None => Position::last_child_of(self.syntax()),
+ },
+ };
+
+ ted::insert_all(position, vec![whitespace.into(), field.syntax().clone().into()]);
+ if is_multiline {
+ ted::insert(Position::after(field.syntax()), ast::make::token(T![,]));
+ }
+ }
+}
+
+impl ast::RecordExprField {
+ /// This will either replace the initializer, or in the case that this is a shorthand convert
+ /// the initializer into the name ref and insert the expr as the new initializer.
+ pub fn replace_expr(&self, expr: ast::Expr) {
+ if self.name_ref().is_some() {
+ match self.expr() {
+ Some(prev) => ted::replace(prev.syntax(), expr.syntax()),
+ None => ted::append_child(self.syntax(), expr.syntax()),
+ }
+ return;
+ }
+ // this is a shorthand
+ if let Some(ast::Expr::PathExpr(path_expr)) = self.expr() {
+ if let Some(path) = path_expr.path() {
+ if let Some(name_ref) = path.as_single_name_ref() {
+ path_expr.syntax().detach();
+ let children = vec![
+ name_ref.syntax().clone().into(),
+ ast::make::token(T![:]).into(),
+ ast::make::tokens::single_space().into(),
+ expr.syntax().clone().into(),
+ ];
+ ted::insert_all_raw(Position::last_child_of(self.syntax()), children);
+ }
+ }
+ }
+ }
+}
+
+impl ast::RecordPatFieldList {
+ pub fn add_field(&self, field: ast::RecordPatField) {
+ let is_multiline = self.syntax().text().contains_char('\n');
+ let whitespace = if is_multiline {
+ let indent = IndentLevel::from_node(self.syntax()) + 1;
+ make::tokens::whitespace(&format!("\n{}", indent))
+ } else {
+ make::tokens::single_space()
+ };
+
+ if is_multiline {
+ normalize_ws_between_braces(self.syntax());
+ }
+
+ let position = match self.fields().last() {
+ Some(last_field) => {
+ let comma = match last_field
+ .syntax()
+ .siblings_with_tokens(Direction::Next)
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == T![,])
+ {
+ Some(it) => it,
+ None => {
+ let comma = ast::make::token(T![,]);
+ ted::insert(Position::after(last_field.syntax()), &comma);
+ comma
+ }
+ };
+ Position::after(comma)
+ }
+ None => match self.l_curly_token() {
+ Some(it) => Position::after(it),
+ None => Position::last_child_of(self.syntax()),
+ },
+ };
+
+ ted::insert_all(position, vec![whitespace.into(), field.syntax().clone().into()]);
+ if is_multiline {
+ ted::insert(Position::after(field.syntax()), ast::make::token(T![,]));
+ }
+ }
+}
+impl ast::StmtList {
+ pub fn push_front(&self, statement: ast::Stmt) {
+ ted::insert(Position::after(self.l_curly_token().unwrap()), statement.syntax());
+ }
+}
+
+fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> {
+ let l = node
+ .children_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == T!['{'])?;
+ let r = node
+ .children_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == T!['}'])?;
+
+ let indent = IndentLevel::from_node(node);
+
+ match l.next_sibling_or_token() {
+ Some(ws) if ws.kind() == SyntaxKind::WHITESPACE => {
+ if ws.next_sibling_or_token()?.into_token()? == r {
+ ted::replace(ws, make::tokens::whitespace(&format!("\n{}", indent)));
+ }
+ }
+ Some(ws) if ws.kind() == T!['}'] => {
+ ted::insert(Position::after(l), make::tokens::whitespace(&format!("\n{}", indent)));
+ }
+ _ => (),
+ }
+ Some(())
+}
+
+pub trait Indent: AstNode + Clone + Sized {
+ fn indent_level(&self) -> IndentLevel {
+ IndentLevel::from_node(self.syntax())
+ }
+ fn indent(&self, by: IndentLevel) {
+ by.increase_indent(self.syntax());
+ }
+ fn dedent(&self, by: IndentLevel) {
+ by.decrease_indent(self.syntax());
+ }
+ fn reindent_to(&self, target_level: IndentLevel) {
+ let current_level = IndentLevel::from_node(self.syntax());
+ self.dedent(current_level);
+ self.indent(target_level);
+ }
+}
+
+impl<N: AstNode + Clone> Indent for N {}
+
+#[cfg(test)]
+mod tests {
+ use std::fmt;
+
+ use crate::SourceFile;
+
+ use super::*;
+
+ fn ast_mut_from_text<N: AstNode>(text: &str) -> N {
+ let parse = SourceFile::parse(text);
+ parse.tree().syntax().descendants().find_map(N::cast).unwrap().clone_for_update()
+ }
+
+ #[test]
+ fn test_create_generic_param_list() {
+ fn check_create_gpl<N: GenericParamsOwnerEdit + fmt::Display>(before: &str, after: &str) {
+ let gpl_owner = ast_mut_from_text::<N>(before);
+ gpl_owner.get_or_create_generic_param_list();
+ assert_eq!(gpl_owner.to_string(), after);
+ }
+
+ check_create_gpl::<ast::Fn>("fn foo", "fn foo<>");
+ check_create_gpl::<ast::Fn>("fn foo() {}", "fn foo<>() {}");
+
+ check_create_gpl::<ast::Impl>("impl", "impl<>");
+ check_create_gpl::<ast::Impl>("impl Struct {}", "impl<> Struct {}");
+ check_create_gpl::<ast::Impl>("impl Trait for Struct {}", "impl<> Trait for Struct {}");
+
+ check_create_gpl::<ast::Trait>("trait Trait<>", "trait Trait<>");
+ check_create_gpl::<ast::Trait>("trait Trait<> {}", "trait Trait<> {}");
+
+ check_create_gpl::<ast::Struct>("struct A", "struct A<>");
+ check_create_gpl::<ast::Struct>("struct A;", "struct A<>;");
+ check_create_gpl::<ast::Struct>("struct A();", "struct A<>();");
+ check_create_gpl::<ast::Struct>("struct A {}", "struct A<> {}");
+
+ check_create_gpl::<ast::Enum>("enum E", "enum E<>");
+ check_create_gpl::<ast::Enum>("enum E {", "enum E<> {");
+ }
+
+ #[test]
+ fn test_increase_indent() {
+ let arm_list = ast_mut_from_text::<ast::Fn>(
+ "fn foo() {
+ ;
+ ;
+}",
+ );
+ arm_list.indent(IndentLevel(2));
+ assert_eq!(
+ arm_list.to_string(),
+ "fn foo() {
+ ;
+ ;
+ }",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs
new file mode 100644
index 000000000..db66d08a7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs
@@ -0,0 +1,410 @@
+//! Various extension methods to ast Expr Nodes, which are hard to code-generate.
+//!
+//! These methods should only do simple, shallow tasks related to the syntax of the node itself.
+
+use crate::{
+ ast::{
+ self,
+ operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp},
+ support, AstChildren, AstNode,
+ },
+ AstToken,
+ SyntaxKind::*,
+ SyntaxNode, SyntaxToken, T,
+};
+
+impl ast::HasAttrs for ast::Expr {}
+
+impl ast::Expr {
+ pub fn is_block_like(&self) -> bool {
+ matches!(
+ self,
+ ast::Expr::IfExpr(_)
+ | ast::Expr::LoopExpr(_)
+ | ast::Expr::ForExpr(_)
+ | ast::Expr::WhileExpr(_)
+ | ast::Expr::BlockExpr(_)
+ | ast::Expr::MatchExpr(_)
+ )
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ElseBranch {
+ Block(ast::BlockExpr),
+ IfExpr(ast::IfExpr),
+}
+
+impl From<ast::BlockExpr> for ElseBranch {
+ fn from(block_expr: ast::BlockExpr) -> Self {
+ Self::Block(block_expr)
+ }
+}
+
+impl From<ast::IfExpr> for ElseBranch {
+ fn from(if_expr: ast::IfExpr) -> Self {
+ Self::IfExpr(if_expr)
+ }
+}
+
+impl ast::IfExpr {
+ pub fn then_branch(&self) -> Option<ast::BlockExpr> {
+ self.children_after_condition().next()
+ }
+
+ pub fn else_branch(&self) -> Option<ElseBranch> {
+ let res = match self.children_after_condition().nth(1) {
+ Some(block) => ElseBranch::Block(block),
+ None => {
+ let elif = self.children_after_condition().next()?;
+ ElseBranch::IfExpr(elif)
+ }
+ };
+ Some(res)
+ }
+
+ fn children_after_condition<N: AstNode>(&self) -> impl Iterator<Item = N> {
+ self.syntax().children().skip(1).filter_map(N::cast)
+ }
+}
+
+#[test]
+fn if_block_condition() {
+ let parse = ast::SourceFile::parse(
+ r#"
+ fn test() {
+ if { true } { "if" }
+ else if { false } { "first elif" }
+ else if true { "second elif" }
+ else if (true) { "third elif" }
+ else { "else" }
+ }
+ "#,
+ );
+ let if_ = parse.tree().syntax().descendants().find_map(ast::IfExpr::cast).unwrap();
+ assert_eq!(if_.then_branch().unwrap().syntax().text(), r#"{ "if" }"#);
+ let elif = match if_.else_branch().unwrap() {
+ ElseBranch::IfExpr(elif) => elif,
+ ElseBranch::Block(_) => panic!("should be `else if`"),
+ };
+ assert_eq!(elif.then_branch().unwrap().syntax().text(), r#"{ "first elif" }"#);
+ let elif = match elif.else_branch().unwrap() {
+ ElseBranch::IfExpr(elif) => elif,
+ ElseBranch::Block(_) => panic!("should be `else if`"),
+ };
+ assert_eq!(elif.then_branch().unwrap().syntax().text(), r#"{ "second elif" }"#);
+ let elif = match elif.else_branch().unwrap() {
+ ElseBranch::IfExpr(elif) => elif,
+ ElseBranch::Block(_) => panic!("should be `else if`"),
+ };
+ assert_eq!(elif.then_branch().unwrap().syntax().text(), r#"{ "third elif" }"#);
+ let else_ = match elif.else_branch().unwrap() {
+ ElseBranch::Block(else_) => else_,
+ ElseBranch::IfExpr(_) => panic!("should be `else`"),
+ };
+ assert_eq!(else_.syntax().text(), r#"{ "else" }"#);
+}
+
+#[test]
+fn if_condition_with_if_inside() {
+ let parse = ast::SourceFile::parse(
+ r#"
+ fn test() {
+ if if true { true } else { false } { "if" }
+ else { "else" }
+ }
+ "#,
+ );
+ let if_ = parse.tree().syntax().descendants().find_map(ast::IfExpr::cast).unwrap();
+ assert_eq!(if_.then_branch().unwrap().syntax().text(), r#"{ "if" }"#);
+ let else_ = match if_.else_branch().unwrap() {
+ ElseBranch::Block(else_) => else_,
+ ElseBranch::IfExpr(_) => panic!("should be `else`"),
+ };
+ assert_eq!(else_.syntax().text(), r#"{ "else" }"#);
+}
+
+impl ast::PrefixExpr {
+ pub fn op_kind(&self) -> Option<UnaryOp> {
+ let res = match self.op_token()?.kind() {
+ T![*] => UnaryOp::Deref,
+ T![!] => UnaryOp::Not,
+ T![-] => UnaryOp::Neg,
+ _ => return None,
+ };
+ Some(res)
+ }
+
+ pub fn op_token(&self) -> Option<SyntaxToken> {
+ self.syntax().first_child_or_token()?.into_token()
+ }
+}
+
+impl ast::BinExpr {
+ pub fn op_details(&self) -> Option<(SyntaxToken, BinaryOp)> {
+ self.syntax().children_with_tokens().filter_map(|it| it.into_token()).find_map(|c| {
+ #[rustfmt::skip]
+ let bin_op = match c.kind() {
+ T![||] => BinaryOp::LogicOp(LogicOp::Or),
+ T![&&] => BinaryOp::LogicOp(LogicOp::And),
+
+ T![==] => BinaryOp::CmpOp(CmpOp::Eq { negated: false }),
+ T![!=] => BinaryOp::CmpOp(CmpOp::Eq { negated: true }),
+ T![<=] => BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Less, strict: false }),
+ T![>=] => BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Greater, strict: false }),
+ T![<] => BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Less, strict: true }),
+ T![>] => BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Greater, strict: true }),
+
+ T![+] => BinaryOp::ArithOp(ArithOp::Add),
+ T![*] => BinaryOp::ArithOp(ArithOp::Mul),
+ T![-] => BinaryOp::ArithOp(ArithOp::Sub),
+ T![/] => BinaryOp::ArithOp(ArithOp::Div),
+ T![%] => BinaryOp::ArithOp(ArithOp::Rem),
+ T![<<] => BinaryOp::ArithOp(ArithOp::Shl),
+ T![>>] => BinaryOp::ArithOp(ArithOp::Shr),
+ T![^] => BinaryOp::ArithOp(ArithOp::BitXor),
+ T![|] => BinaryOp::ArithOp(ArithOp::BitOr),
+ T![&] => BinaryOp::ArithOp(ArithOp::BitAnd),
+
+ T![=] => BinaryOp::Assignment { op: None },
+ T![+=] => BinaryOp::Assignment { op: Some(ArithOp::Add) },
+ T![*=] => BinaryOp::Assignment { op: Some(ArithOp::Mul) },
+ T![-=] => BinaryOp::Assignment { op: Some(ArithOp::Sub) },
+ T![/=] => BinaryOp::Assignment { op: Some(ArithOp::Div) },
+ T![%=] => BinaryOp::Assignment { op: Some(ArithOp::Rem) },
+ T![<<=] => BinaryOp::Assignment { op: Some(ArithOp::Shl) },
+ T![>>=] => BinaryOp::Assignment { op: Some(ArithOp::Shr) },
+ T![^=] => BinaryOp::Assignment { op: Some(ArithOp::BitXor) },
+ T![|=] => BinaryOp::Assignment { op: Some(ArithOp::BitOr) },
+ T![&=] => BinaryOp::Assignment { op: Some(ArithOp::BitAnd) },
+
+ _ => return None,
+ };
+ Some((c, bin_op))
+ })
+ }
+
+ pub fn op_kind(&self) -> Option<BinaryOp> {
+ self.op_details().map(|t| t.1)
+ }
+
+ pub fn op_token(&self) -> Option<SyntaxToken> {
+ self.op_details().map(|t| t.0)
+ }
+
+ pub fn lhs(&self) -> Option<ast::Expr> {
+ support::children(self.syntax()).next()
+ }
+
+ pub fn rhs(&self) -> Option<ast::Expr> {
+ support::children(self.syntax()).nth(1)
+ }
+
+ pub fn sub_exprs(&self) -> (Option<ast::Expr>, Option<ast::Expr>) {
+ let mut children = support::children(self.syntax());
+ let first = children.next();
+ let second = children.next();
+ (first, second)
+ }
+}
+
+impl ast::RangeExpr {
+ fn op_details(&self) -> Option<(usize, SyntaxToken, RangeOp)> {
+ self.syntax().children_with_tokens().enumerate().find_map(|(ix, child)| {
+ let token = child.into_token()?;
+ let bin_op = match token.kind() {
+ T![..] => RangeOp::Exclusive,
+ T![..=] => RangeOp::Inclusive,
+ _ => return None,
+ };
+ Some((ix, token, bin_op))
+ })
+ }
+
+ pub fn op_kind(&self) -> Option<RangeOp> {
+ self.op_details().map(|t| t.2)
+ }
+
+ pub fn op_token(&self) -> Option<SyntaxToken> {
+ self.op_details().map(|t| t.1)
+ }
+
+ pub fn start(&self) -> Option<ast::Expr> {
+ let op_ix = self.op_details()?.0;
+ self.syntax()
+ .children_with_tokens()
+ .take(op_ix)
+ .find_map(|it| ast::Expr::cast(it.into_node()?))
+ }
+
+ pub fn end(&self) -> Option<ast::Expr> {
+ let op_ix = self.op_details()?.0;
+ self.syntax()
+ .children_with_tokens()
+ .skip(op_ix + 1)
+ .find_map(|it| ast::Expr::cast(it.into_node()?))
+ }
+}
+
+impl ast::IndexExpr {
+ pub fn base(&self) -> Option<ast::Expr> {
+ support::children(self.syntax()).next()
+ }
+ pub fn index(&self) -> Option<ast::Expr> {
+ support::children(self.syntax()).nth(1)
+ }
+}
+
+pub enum ArrayExprKind {
+ Repeat { initializer: Option<ast::Expr>, repeat: Option<ast::Expr> },
+ ElementList(AstChildren<ast::Expr>),
+}
+
+impl ast::ArrayExpr {
+ pub fn kind(&self) -> ArrayExprKind {
+ if self.is_repeat() {
+ ArrayExprKind::Repeat {
+ initializer: support::children(self.syntax()).next(),
+ repeat: support::children(self.syntax()).nth(1),
+ }
+ } else {
+ ArrayExprKind::ElementList(support::children(self.syntax()))
+ }
+ }
+
+ fn is_repeat(&self) -> bool {
+ self.semicolon_token().is_some()
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub enum LiteralKind {
+ String(ast::String),
+ ByteString(ast::ByteString),
+ IntNumber(ast::IntNumber),
+ FloatNumber(ast::FloatNumber),
+ Char(ast::Char),
+ Byte(ast::Byte),
+ Bool(bool),
+}
+
+impl ast::Literal {
+ pub fn token(&self) -> SyntaxToken {
+ self.syntax()
+ .children_with_tokens()
+ .find(|e| e.kind() != ATTR && !e.kind().is_trivia())
+ .and_then(|e| e.into_token())
+ .unwrap()
+ }
+
+ pub fn kind(&self) -> LiteralKind {
+ let token = self.token();
+
+ if let Some(t) = ast::IntNumber::cast(token.clone()) {
+ return LiteralKind::IntNumber(t);
+ }
+ if let Some(t) = ast::FloatNumber::cast(token.clone()) {
+ return LiteralKind::FloatNumber(t);
+ }
+ if let Some(t) = ast::String::cast(token.clone()) {
+ return LiteralKind::String(t);
+ }
+ if let Some(t) = ast::ByteString::cast(token.clone()) {
+ return LiteralKind::ByteString(t);
+ }
+ if let Some(t) = ast::Char::cast(token.clone()) {
+ return LiteralKind::Char(t);
+ }
+ if let Some(t) = ast::Byte::cast(token.clone()) {
+ return LiteralKind::Byte(t);
+ }
+
+ match token.kind() {
+ T![true] => LiteralKind::Bool(true),
+ T![false] => LiteralKind::Bool(false),
+ _ => unreachable!(),
+ }
+ }
+}
+
+pub enum BlockModifier {
+ Async(SyntaxToken),
+ Unsafe(SyntaxToken),
+ Try(SyntaxToken),
+ Const(SyntaxToken),
+ Label(ast::Label),
+}
+
+impl ast::BlockExpr {
+ pub fn modifier(&self) -> Option<BlockModifier> {
+ self.async_token()
+ .map(BlockModifier::Async)
+ .or_else(|| self.unsafe_token().map(BlockModifier::Unsafe))
+ .or_else(|| self.try_token().map(BlockModifier::Try))
+ .or_else(|| self.const_token().map(BlockModifier::Const))
+ .or_else(|| self.label().map(BlockModifier::Label))
+ }
+ /// false if the block is an intrinsic part of the syntax and can't be
+ /// replaced with arbitrary expression.
+ ///
+ /// ```not_rust
+ /// fn foo() { not_stand_alone }
+ /// const FOO: () = { stand_alone };
+ /// ```
+ pub fn is_standalone(&self) -> bool {
+ let parent = match self.syntax().parent() {
+ Some(it) => it,
+ None => return true,
+ };
+ !matches!(parent.kind(), FN | IF_EXPR | WHILE_EXPR | LOOP_EXPR)
+ }
+}
+
+#[test]
+fn test_literal_with_attr() {
+ let parse = ast::SourceFile::parse(r#"const _: &str = { #[attr] "Hello" };"#);
+ let lit = parse.tree().syntax().descendants().find_map(ast::Literal::cast).unwrap();
+ assert_eq!(lit.token().text(), r#""Hello""#);
+}
+
+impl ast::RecordExprField {
+ pub fn parent_record_lit(&self) -> ast::RecordExpr {
+ self.syntax().ancestors().find_map(ast::RecordExpr::cast).unwrap()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum CallableExpr {
+ Call(ast::CallExpr),
+ MethodCall(ast::MethodCallExpr),
+}
+
+impl ast::HasAttrs for CallableExpr {}
+impl ast::HasArgList for CallableExpr {}
+
+impl AstNode for CallableExpr {
+ fn can_cast(kind: parser::SyntaxKind) -> bool
+ where
+ Self: Sized,
+ {
+ ast::CallExpr::can_cast(kind) || ast::MethodCallExpr::can_cast(kind)
+ }
+
+ fn cast(syntax: SyntaxNode) -> Option<Self>
+ where
+ Self: Sized,
+ {
+ if let Some(it) = ast::CallExpr::cast(syntax.clone()) {
+ Some(Self::Call(it))
+ } else {
+ ast::MethodCallExpr::cast(syntax).map(Self::MethodCall)
+ }
+ }
+
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Self::Call(it) => it.syntax(),
+ Self::MethodCall(it) => it.syntax(),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated.rs
new file mode 100644
index 000000000..843b43cf0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated.rs
@@ -0,0 +1,41 @@
+//! This file is actually hand-written, but the submodules are indeed generated.
+#[rustfmt::skip]
+pub(crate) mod nodes;
+#[rustfmt::skip]
+pub(crate) mod tokens;
+
+use crate::{
+ AstNode,
+ SyntaxKind::{self, *},
+ SyntaxNode,
+};
+
+pub(crate) use nodes::*;
+
+// Stmt is the only nested enum, so it's easier to just hand-write it
+impl AstNode for Stmt {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ LET_STMT | EXPR_STMT => true,
+ _ => Item::can_cast(kind),
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ LET_STMT => Stmt::LetStmt(LetStmt { syntax }),
+ EXPR_STMT => Stmt::ExprStmt(ExprStmt { syntax }),
+ _ => {
+ let item = Item::cast(syntax)?;
+ Stmt::Item(item)
+ }
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Stmt::LetStmt(it) => &it.syntax,
+ Stmt::ExprStmt(it) => &it.syntax,
+ Stmt::Item(it) => it.syntax(),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
new file mode 100644
index 000000000..63309a155
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
@@ -0,0 +1,4806 @@
+//! Generated by `sourcegen_ast`, do not edit by hand.
+
+#![allow(non_snake_case)]
+use crate::{
+ ast::{self, support, AstChildren, AstNode},
+ SyntaxKind::{self, *},
+ SyntaxNode, SyntaxToken, T,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Name {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Name {
+ pub fn ident_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ident]) }
+ pub fn self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![self]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct NameRef {
+ pub(crate) syntax: SyntaxNode,
+}
+impl NameRef {
+ pub fn ident_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ident]) }
+ pub fn self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![self]) }
+ pub fn super_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![super]) }
+ pub fn crate_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![crate]) }
+ pub fn Self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![Self]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Lifetime {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Lifetime {
+ pub fn lifetime_ident_token(&self) -> Option<SyntaxToken> {
+ support::token(&self.syntax, T![lifetime_ident])
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Path {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Path {
+ pub fn qualifier(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
+ pub fn segment(&self) -> Option<PathSegment> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathSegment {
+ pub(crate) syntax: SyntaxNode,
+}
+impl PathSegment {
+ pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn generic_arg_list(&self) -> Option<GenericArgList> { support::child(&self.syntax) }
+ pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
+ pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
+ pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
+ pub fn path_type(&self) -> Option<PathType> { support::child(&self.syntax) }
+ pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
+ pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct GenericArgList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl GenericArgList {
+ pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
+ pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
+ pub fn generic_args(&self) -> AstChildren<GenericArg> { support::children(&self.syntax) }
+ pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ParamList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ParamList {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn self_param(&self) -> Option<SelfParam> { support::child(&self.syntax) }
+ pub fn comma_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![,]) }
+ pub fn params(&self) -> AstChildren<Param> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+ pub fn pipe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![|]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RetType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RetType {
+ pub fn thin_arrow_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![->]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl PathType {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeArg {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TypeArg {
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AssocTypeArg {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasTypeBounds for AssocTypeArg {}
+impl AssocTypeArg {
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn const_arg(&self) -> Option<ConstArg> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LifetimeArg {
+ pub(crate) syntax: SyntaxNode,
+}
+impl LifetimeArg {
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ConstArg {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ConstArg {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct GenericParamList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl GenericParamList {
+ pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
+ pub fn generic_params(&self) -> AstChildren<GenericParam> { support::children(&self.syntax) }
+ pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeBoundList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TypeBoundList {
+ pub fn bounds(&self) -> AstChildren<TypeBound> { support::children(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroCall {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MacroCall {}
+impl ast::HasDocComments for MacroCall {}
+impl MacroCall {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+ pub fn token_tree(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Attr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Attr {
+ pub fn pound_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![#]) }
+ pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn meta(&self) -> Option<Meta> { support::child(&self.syntax) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TokenTree {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TokenTree {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroItems {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasModuleItem for MacroItems {}
+impl MacroItems {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroStmts {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MacroStmts {
+ pub fn statements(&self) -> AstChildren<Stmt> { support::children(&self.syntax) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SourceFile {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for SourceFile {}
+impl ast::HasModuleItem for SourceFile {}
+impl ast::HasDocComments for SourceFile {}
+impl SourceFile {
+ pub fn shebang_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![shebang]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Const {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Const {}
+impl ast::HasName for Const {}
+impl ast::HasVisibility for Const {}
+impl ast::HasDocComments for Const {}
+impl Const {
+ pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn body(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Enum {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Enum {}
+impl ast::HasName for Enum {}
+impl ast::HasVisibility for Enum {}
+impl ast::HasGenericParams for Enum {}
+impl ast::HasDocComments for Enum {}
+impl Enum {
+ pub fn enum_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![enum]) }
+ pub fn variant_list(&self) -> Option<VariantList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ExternBlock {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ExternBlock {}
+impl ast::HasDocComments for ExternBlock {}
+impl ExternBlock {
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn abi(&self) -> Option<Abi> { support::child(&self.syntax) }
+ pub fn extern_item_list(&self) -> Option<ExternItemList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ExternCrate {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ExternCrate {}
+impl ast::HasVisibility for ExternCrate {}
+impl ast::HasDocComments for ExternCrate {}
+impl ExternCrate {
+ pub fn extern_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![extern]) }
+ pub fn crate_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![crate]) }
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn rename(&self) -> Option<Rename> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Fn {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Fn {}
+impl ast::HasName for Fn {}
+impl ast::HasVisibility for Fn {}
+impl ast::HasGenericParams for Fn {}
+impl ast::HasDocComments for Fn {}
+impl Fn {
+ pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn abi(&self) -> Option<Abi> { support::child(&self.syntax) }
+ pub fn fn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![fn]) }
+ pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
+ pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
+ pub fn body(&self) -> Option<BlockExpr> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Impl {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Impl {}
+impl ast::HasVisibility for Impl {}
+impl ast::HasGenericParams for Impl {}
+impl ast::HasDocComments for Impl {}
+impl Impl {
+ pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn impl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![impl]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+ pub fn assoc_item_list(&self) -> Option<AssocItemList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroRules {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MacroRules {}
+impl ast::HasName for MacroRules {}
+impl ast::HasVisibility for MacroRules {}
+impl ast::HasDocComments for MacroRules {}
+impl MacroRules {
+ pub fn macro_rules_token(&self) -> Option<SyntaxToken> {
+ support::token(&self.syntax, T![macro_rules])
+ }
+ pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+ pub fn token_tree(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroDef {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MacroDef {}
+impl ast::HasName for MacroDef {}
+impl ast::HasVisibility for MacroDef {}
+impl ast::HasDocComments for MacroDef {}
+impl MacroDef {
+ pub fn macro_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![macro]) }
+ pub fn args(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+ pub fn body(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Module {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Module {}
+impl ast::HasName for Module {}
+impl ast::HasVisibility for Module {}
+impl ast::HasDocComments for Module {}
+impl Module {
+ pub fn mod_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mod]) }
+ pub fn item_list(&self) -> Option<ItemList> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Static {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Static {}
+impl ast::HasName for Static {}
+impl ast::HasVisibility for Static {}
+impl ast::HasDocComments for Static {}
+impl Static {
+ pub fn static_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![static]) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn body(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Struct {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Struct {}
+impl ast::HasName for Struct {}
+impl ast::HasVisibility for Struct {}
+impl ast::HasGenericParams for Struct {}
+impl ast::HasDocComments for Struct {}
+impl Struct {
+ pub fn struct_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![struct]) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+ pub fn field_list(&self) -> Option<FieldList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Trait {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Trait {}
+impl ast::HasName for Trait {}
+impl ast::HasVisibility for Trait {}
+impl ast::HasGenericParams for Trait {}
+impl ast::HasTypeBounds for Trait {}
+impl ast::HasDocComments for Trait {}
+impl Trait {
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn auto_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![auto]) }
+ pub fn trait_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![trait]) }
+ pub fn assoc_item_list(&self) -> Option<AssocItemList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeAlias {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TypeAlias {}
+impl ast::HasName for TypeAlias {}
+impl ast::HasVisibility for TypeAlias {}
+impl ast::HasGenericParams for TypeAlias {}
+impl ast::HasTypeBounds for TypeAlias {}
+impl ast::HasDocComments for TypeAlias {}
+impl TypeAlias {
+ pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
+ pub fn type_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![type]) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Union {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Union {}
+impl ast::HasName for Union {}
+impl ast::HasVisibility for Union {}
+impl ast::HasGenericParams for Union {}
+impl ast::HasDocComments for Union {}
+impl Union {
+ pub fn union_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![union]) }
+ pub fn record_field_list(&self) -> Option<RecordFieldList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Use {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Use {}
+impl ast::HasVisibility for Use {}
+impl ast::HasDocComments for Use {}
+impl Use {
+ pub fn use_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![use]) }
+ pub fn use_tree(&self) -> Option<UseTree> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Visibility {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Visibility {
+ pub fn pub_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![pub]) }
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn in_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![in]) }
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ItemList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ItemList {}
+impl ast::HasModuleItem for ItemList {}
+impl ItemList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Rename {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasName for Rename {}
+impl Rename {
+ pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
+ pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct UseTree {
+ pub(crate) syntax: SyntaxNode,
+}
+impl UseTree {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
+ pub fn star_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![*]) }
+ pub fn use_tree_list(&self) -> Option<UseTreeList> { support::child(&self.syntax) }
+ pub fn rename(&self) -> Option<Rename> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct UseTreeList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl UseTreeList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn use_trees(&self) -> AstChildren<UseTree> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Abi {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Abi {
+ pub fn extern_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![extern]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WhereClause {
+ pub(crate) syntax: SyntaxNode,
+}
+impl WhereClause {
+ pub fn where_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![where]) }
+ pub fn predicates(&self) -> AstChildren<WherePred> { support::children(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BlockExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for BlockExpr {}
+impl BlockExpr {
+ pub fn label(&self) -> Option<Label> { support::child(&self.syntax) }
+ pub fn try_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![try]) }
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn stmt_list(&self) -> Option<StmtList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SelfParam {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for SelfParam {}
+impl ast::HasName for SelfParam {}
+impl SelfParam {
+ pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) }
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Param {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Param {}
+impl Param {
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn dotdotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![...]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordFieldList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RecordFieldList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn fields(&self) -> AstChildren<RecordField> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleFieldList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TupleFieldList {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn fields(&self) -> AstChildren<TupleField> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordField {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RecordField {}
+impl ast::HasName for RecordField {}
+impl ast::HasVisibility for RecordField {}
+impl ast::HasDocComments for RecordField {}
+impl RecordField {
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleField {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TupleField {}
+impl ast::HasVisibility for TupleField {}
+impl ast::HasDocComments for TupleField {}
+impl TupleField {
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct VariantList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl VariantList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn variants(&self) -> AstChildren<Variant> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Variant {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Variant {}
+impl ast::HasName for Variant {}
+impl ast::HasVisibility for Variant {}
+impl ast::HasDocComments for Variant {}
+impl Variant {
+ pub fn field_list(&self) -> Option<FieldList> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AssocItemList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for AssocItemList {}
+impl AssocItemList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn assoc_items(&self) -> AstChildren<AssocItem> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ExternItemList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ExternItemList {}
+impl ExternItemList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn extern_items(&self) -> AstChildren<ExternItem> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ConstParam {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ConstParam {}
+impl ast::HasName for ConstParam {}
+impl ConstParam {
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn default_val(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LifetimeParam {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for LifetimeParam {}
+impl ast::HasTypeBounds for LifetimeParam {}
+impl LifetimeParam {
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeParam {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TypeParam {}
+impl ast::HasName for TypeParam {}
+impl ast::HasTypeBounds for TypeParam {}
+impl TypeParam {
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn default_type(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WherePred {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasTypeBounds for WherePred {}
+impl WherePred {
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+ pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Meta {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Meta {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn token_tree(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ExprStmt {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ExprStmt {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LetStmt {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for LetStmt {}
+impl LetStmt {
+ pub fn let_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![let]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn initializer(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn let_else(&self) -> Option<LetElse> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LetElse {
+ pub(crate) syntax: SyntaxNode,
+}
+impl LetElse {
+ pub fn else_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![else]) }
+ pub fn block_expr(&self) -> Option<BlockExpr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ArrayExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ArrayExpr {}
+impl ArrayExpr {
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn exprs(&self) -> AstChildren<Expr> { support::children(&self.syntax) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AwaitExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for AwaitExpr {}
+impl AwaitExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn dot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![.]) }
+ pub fn await_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![await]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BinExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for BinExpr {}
+impl BinExpr {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BoxExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for BoxExpr {}
+impl BoxExpr {
+ pub fn box_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![box]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BreakExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for BreakExpr {}
+impl BreakExpr {
+ pub fn break_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![break]) }
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct CallExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for CallExpr {}
+impl ast::HasArgList for CallExpr {}
+impl CallExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct CastExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for CastExpr {}
+impl CastExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ClosureExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ClosureExpr {}
+impl ClosureExpr {
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+ pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+ pub fn static_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![static]) }
+ pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
+ pub fn move_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![move]) }
+ pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
+ pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
+ pub fn body(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ContinueExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ContinueExpr {}
+impl ContinueExpr {
+ pub fn continue_token(&self) -> Option<SyntaxToken> {
+ support::token(&self.syntax, T![continue])
+ }
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct FieldExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for FieldExpr {}
+impl FieldExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn dot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![.]) }
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ForExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ForExpr {}
+impl ForExpr {
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn in_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![in]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct IfExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for IfExpr {}
+impl IfExpr {
+ pub fn if_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![if]) }
+ pub fn else_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![else]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct IndexExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for IndexExpr {}
+impl IndexExpr {
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Literal {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Literal {}
+impl Literal {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LoopExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for LoopExpr {}
+impl ast::HasLoopBody for LoopExpr {}
+impl LoopExpr {
+ pub fn loop_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![loop]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MacroExpr {
+ pub fn macro_call(&self) -> Option<MacroCall> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MatchExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MatchExpr {}
+impl MatchExpr {
+ pub fn match_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![match]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn match_arm_list(&self) -> Option<MatchArmList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MethodCallExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MethodCallExpr {}
+impl ast::HasArgList for MethodCallExpr {}
+impl MethodCallExpr {
+ pub fn receiver(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn dot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![.]) }
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn generic_arg_list(&self) -> Option<GenericArgList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ParenExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ParenExpr {}
+impl ParenExpr {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for PathExpr {}
+impl PathExpr {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PrefixExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for PrefixExpr {}
+impl PrefixExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RangeExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RangeExpr {}
+impl RangeExpr {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RecordExpr {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn record_expr_field_list(&self) -> Option<RecordExprFieldList> {
+ support::child(&self.syntax)
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RefExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RefExpr {}
+impl RefExpr {
+ pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) }
+ pub fn raw_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![raw]) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ReturnExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ReturnExpr {}
+impl ReturnExpr {
+ pub fn return_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![return]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TryExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TryExpr {}
+impl TryExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn question_mark_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![?]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TupleExpr {}
+impl TupleExpr {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn fields(&self) -> AstChildren<Expr> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WhileExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for WhileExpr {}
+impl WhileExpr {
+ pub fn while_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![while]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct YieldExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for YieldExpr {}
+impl YieldExpr {
+ pub fn yield_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![yield]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LetExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for LetExpr {}
+impl LetExpr {
+ pub fn let_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![let]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct UnderscoreExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for UnderscoreExpr {}
+impl UnderscoreExpr {
+ pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct StmtList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for StmtList {}
+impl StmtList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn statements(&self) -> AstChildren<Stmt> { support::children(&self.syntax) }
+ pub fn tail_expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Label {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Label {
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordExprFieldList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RecordExprFieldList {}
+impl RecordExprFieldList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn fields(&self) -> AstChildren<RecordExprField> { support::children(&self.syntax) }
+ pub fn dotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![..]) }
+ pub fn spread(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordExprField {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RecordExprField {}
+impl RecordExprField {
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ArgList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ArgList {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn args(&self) -> AstChildren<Expr> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MatchArmList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MatchArmList {}
+impl MatchArmList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn arms(&self) -> AstChildren<MatchArm> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MatchArm {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MatchArm {}
+impl MatchArm {
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn guard(&self) -> Option<MatchGuard> { support::child(&self.syntax) }
+ pub fn fat_arrow_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=>]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn comma_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![,]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MatchGuard {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MatchGuard {
+ pub fn if_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![if]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ArrayType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ArrayType {
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct DynTraitType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl DynTraitType {
+ pub fn dyn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![dyn]) }
+ pub fn type_bound_list(&self) -> Option<TypeBoundList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct FnPtrType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl FnPtrType {
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn abi(&self) -> Option<Abi> { support::child(&self.syntax) }
+ pub fn fn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![fn]) }
+ pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
+ pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ForType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ForType {
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+ pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ImplTraitType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ImplTraitType {
+ pub fn impl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![impl]) }
+ pub fn type_bound_list(&self) -> Option<TypeBoundList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct InferType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl InferType {
+ pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MacroType {
+ pub fn macro_call(&self) -> Option<MacroCall> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct NeverType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl NeverType {
+ pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ParenType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ParenType {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PtrType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl PtrType {
+ pub fn star_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![*]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RefType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RefType {
+ pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) }
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SliceType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl SliceType {
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TupleType {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn fields(&self) -> AstChildren<Type> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeBound {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TypeBound {
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn question_mark_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![?]) }
+ pub fn tilde_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![~]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct IdentPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for IdentPat {}
+impl ast::HasName for IdentPat {}
+impl IdentPat {
+ pub fn ref_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ref]) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn at_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![@]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BoxPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl BoxPat {
+ pub fn box_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![box]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RestPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RestPat {}
+impl RestPat {
+ pub fn dotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![..]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LiteralPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl LiteralPat {
+ pub fn literal(&self) -> Option<Literal> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MacroPat {
+ pub fn macro_call(&self) -> Option<MacroCall> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct OrPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl OrPat {
+ pub fn pats(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ParenPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ParenPat {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl PathPat {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WildcardPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl WildcardPat {
+ pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RangePat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RangePat {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RecordPat {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn record_pat_field_list(&self) -> Option<RecordPatFieldList> {
+ support::child(&self.syntax)
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RefPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RefPat {
+ pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SlicePat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl SlicePat {
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn pats(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TuplePat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TuplePat {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn fields(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleStructPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TupleStructPat {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn fields(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ConstBlockPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ConstBlockPat {
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn block_expr(&self) -> Option<BlockExpr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordPatFieldList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RecordPatFieldList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn fields(&self) -> AstChildren<RecordPatField> { support::children(&self.syntax) }
+ pub fn rest_pat(&self) -> Option<RestPat> { support::child(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordPatField {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RecordPatField {}
+impl RecordPatField {
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum GenericArg {
+ TypeArg(TypeArg),
+ AssocTypeArg(AssocTypeArg),
+ LifetimeArg(LifetimeArg),
+ ConstArg(ConstArg),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Type {
+ ArrayType(ArrayType),
+ DynTraitType(DynTraitType),
+ FnPtrType(FnPtrType),
+ ForType(ForType),
+ ImplTraitType(ImplTraitType),
+ InferType(InferType),
+ MacroType(MacroType),
+ NeverType(NeverType),
+ ParenType(ParenType),
+ PathType(PathType),
+ PtrType(PtrType),
+ RefType(RefType),
+ SliceType(SliceType),
+ TupleType(TupleType),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Expr {
+ ArrayExpr(ArrayExpr),
+ AwaitExpr(AwaitExpr),
+ BinExpr(BinExpr),
+ BlockExpr(BlockExpr),
+ BoxExpr(BoxExpr),
+ BreakExpr(BreakExpr),
+ CallExpr(CallExpr),
+ CastExpr(CastExpr),
+ ClosureExpr(ClosureExpr),
+ ContinueExpr(ContinueExpr),
+ FieldExpr(FieldExpr),
+ ForExpr(ForExpr),
+ IfExpr(IfExpr),
+ IndexExpr(IndexExpr),
+ Literal(Literal),
+ LoopExpr(LoopExpr),
+ MacroExpr(MacroExpr),
+ MacroStmts(MacroStmts),
+ MatchExpr(MatchExpr),
+ MethodCallExpr(MethodCallExpr),
+ ParenExpr(ParenExpr),
+ PathExpr(PathExpr),
+ PrefixExpr(PrefixExpr),
+ RangeExpr(RangeExpr),
+ RecordExpr(RecordExpr),
+ RefExpr(RefExpr),
+ ReturnExpr(ReturnExpr),
+ TryExpr(TryExpr),
+ TupleExpr(TupleExpr),
+ WhileExpr(WhileExpr),
+ YieldExpr(YieldExpr),
+ LetExpr(LetExpr),
+ UnderscoreExpr(UnderscoreExpr),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Item {
+ Const(Const),
+ Enum(Enum),
+ ExternBlock(ExternBlock),
+ ExternCrate(ExternCrate),
+ Fn(Fn),
+ Impl(Impl),
+ MacroCall(MacroCall),
+ MacroRules(MacroRules),
+ MacroDef(MacroDef),
+ Module(Module),
+ Static(Static),
+ Struct(Struct),
+ Trait(Trait),
+ TypeAlias(TypeAlias),
+ Union(Union),
+ Use(Use),
+}
+impl ast::HasAttrs for Item {}
+impl ast::HasDocComments for Item {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Stmt {
+ ExprStmt(ExprStmt),
+ Item(Item),
+ LetStmt(LetStmt),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Pat {
+ IdentPat(IdentPat),
+ BoxPat(BoxPat),
+ RestPat(RestPat),
+ LiteralPat(LiteralPat),
+ MacroPat(MacroPat),
+ OrPat(OrPat),
+ ParenPat(ParenPat),
+ PathPat(PathPat),
+ WildcardPat(WildcardPat),
+ RangePat(RangePat),
+ RecordPat(RecordPat),
+ RefPat(RefPat),
+ SlicePat(SlicePat),
+ TuplePat(TuplePat),
+ TupleStructPat(TupleStructPat),
+ ConstBlockPat(ConstBlockPat),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum FieldList {
+ RecordFieldList(RecordFieldList),
+ TupleFieldList(TupleFieldList),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Adt {
+ Enum(Enum),
+ Struct(Struct),
+ Union(Union),
+}
+impl ast::HasAttrs for Adt {}
+impl ast::HasDocComments for Adt {}
+impl ast::HasGenericParams for Adt {}
+impl ast::HasName for Adt {}
+impl ast::HasVisibility for Adt {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum AssocItem {
+ Const(Const),
+ Fn(Fn),
+ MacroCall(MacroCall),
+ TypeAlias(TypeAlias),
+}
+impl ast::HasAttrs for AssocItem {}
+impl ast::HasDocComments for AssocItem {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum ExternItem {
+ Fn(Fn),
+ MacroCall(MacroCall),
+ Static(Static),
+ TypeAlias(TypeAlias),
+}
+impl ast::HasAttrs for ExternItem {}
+impl ast::HasDocComments for ExternItem {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum GenericParam {
+ ConstParam(ConstParam),
+ LifetimeParam(LifetimeParam),
+ TypeParam(TypeParam),
+}
+impl ast::HasAttrs for GenericParam {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasArgList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasArgList for AnyHasArgList {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasAttrs {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for AnyHasAttrs {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasDocComments {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasDocComments for AnyHasDocComments {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasGenericParams {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasGenericParams for AnyHasGenericParams {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasLoopBody {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasLoopBody for AnyHasLoopBody {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasModuleItem {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasModuleItem for AnyHasModuleItem {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasName {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasName for AnyHasName {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasTypeBounds {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasTypeBounds for AnyHasTypeBounds {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasVisibility {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasVisibility for AnyHasVisibility {}
+impl AstNode for Name {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == NAME }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for NameRef {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == NAME_REF }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Lifetime {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LIFETIME }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Path {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PATH }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PathSegment {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_SEGMENT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for GenericArgList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == GENERIC_ARG_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ParamList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PARAM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RetType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RET_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PathType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeArg {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_ARG }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for AssocTypeArg {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ASSOC_TYPE_ARG }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LifetimeArg {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LIFETIME_ARG }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ConstArg {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_ARG }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for GenericParamList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == GENERIC_PARAM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeBoundList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_BOUND_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroCall {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_CALL }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Attr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ATTR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TokenTree {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TOKEN_TREE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroItems {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_ITEMS }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroStmts {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_STMTS }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for SourceFile {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == SOURCE_FILE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Const {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CONST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Enum {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ENUM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ExternBlock {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_BLOCK }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ExternCrate {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_CRATE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Fn {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FN }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Impl {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == IMPL }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroRules {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_RULES }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroDef {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_DEF }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Module {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MODULE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Static {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == STATIC }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Struct {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == STRUCT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Trait {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TRAIT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeAlias {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_ALIAS }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Union {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == UNION }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Use {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == USE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Visibility {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == VISIBILITY }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ItemList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ITEM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Rename {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RENAME }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for UseTree {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == USE_TREE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for UseTreeList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == USE_TREE_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Abi {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ABI }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for WhereClause {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == WHERE_CLAUSE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BlockExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BLOCK_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for SelfParam {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == SELF_PARAM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Param {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PARAM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordFieldList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_FIELD_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleFieldList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_FIELD_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordField {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_FIELD }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleField {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_FIELD }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for VariantList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == VARIANT_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Variant {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == VARIANT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for AssocItemList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ASSOC_ITEM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ExternItemList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_ITEM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ConstParam {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_PARAM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LifetimeParam {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LIFETIME_PARAM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeParam {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_PARAM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for WherePred {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == WHERE_PRED }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Meta {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == META }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ExprStmt {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == EXPR_STMT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LetStmt {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LET_STMT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LetElse {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LET_ELSE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ArrayExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ARRAY_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for AwaitExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == AWAIT_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BinExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BIN_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BoxExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BOX_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BreakExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BREAK_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for CallExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CALL_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for CastExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CAST_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ClosureExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CLOSURE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ContinueExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CONTINUE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for FieldExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FIELD_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ForExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for IfExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == IF_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for IndexExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == INDEX_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Literal {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LITERAL }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LoopExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LOOP_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MatchExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MethodCallExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == METHOD_CALL_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ParenExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PathExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PrefixExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PREFIX_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RangeExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RANGE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RefExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == REF_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ReturnExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RETURN_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TryExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TRY_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for WhileExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == WHILE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for YieldExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == YIELD_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LetExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LET_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for UnderscoreExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == UNDERSCORE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for StmtList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == STMT_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Label {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LABEL }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordExprFieldList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR_FIELD_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordExprField {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR_FIELD }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ArgList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ARG_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MatchArmList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_ARM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MatchArm {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_ARM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MatchGuard {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_GUARD }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ArrayType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ARRAY_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for DynTraitType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == DYN_TRAIT_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for FnPtrType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FN_PTR_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ForType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ImplTraitType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == IMPL_TRAIT_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for InferType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == INFER_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for NeverType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == NEVER_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ParenType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PtrType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PTR_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RefType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == REF_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for SliceType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == SLICE_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeBound {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_BOUND }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for IdentPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == IDENT_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BoxPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BOX_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RestPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == REST_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LiteralPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LITERAL_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for OrPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == OR_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ParenPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PathPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for WildcardPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == WILDCARD_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RangePat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RANGE_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RefPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == REF_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for SlicePat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == SLICE_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TuplePat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleStructPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_STRUCT_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ConstBlockPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_BLOCK_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordPatFieldList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT_FIELD_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordPatField {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT_FIELD }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl From<TypeArg> for GenericArg {
+ fn from(node: TypeArg) -> GenericArg { GenericArg::TypeArg(node) }
+}
+impl From<AssocTypeArg> for GenericArg {
+ fn from(node: AssocTypeArg) -> GenericArg { GenericArg::AssocTypeArg(node) }
+}
+impl From<LifetimeArg> for GenericArg {
+ fn from(node: LifetimeArg) -> GenericArg { GenericArg::LifetimeArg(node) }
+}
+impl From<ConstArg> for GenericArg {
+ fn from(node: ConstArg) -> GenericArg { GenericArg::ConstArg(node) }
+}
+impl AstNode for GenericArg {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ TYPE_ARG | ASSOC_TYPE_ARG | LIFETIME_ARG | CONST_ARG => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ TYPE_ARG => GenericArg::TypeArg(TypeArg { syntax }),
+ ASSOC_TYPE_ARG => GenericArg::AssocTypeArg(AssocTypeArg { syntax }),
+ LIFETIME_ARG => GenericArg::LifetimeArg(LifetimeArg { syntax }),
+ CONST_ARG => GenericArg::ConstArg(ConstArg { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ GenericArg::TypeArg(it) => &it.syntax,
+ GenericArg::AssocTypeArg(it) => &it.syntax,
+ GenericArg::LifetimeArg(it) => &it.syntax,
+ GenericArg::ConstArg(it) => &it.syntax,
+ }
+ }
+}
+impl From<ArrayType> for Type {
+ fn from(node: ArrayType) -> Type { Type::ArrayType(node) }
+}
+impl From<DynTraitType> for Type {
+ fn from(node: DynTraitType) -> Type { Type::DynTraitType(node) }
+}
+impl From<FnPtrType> for Type {
+ fn from(node: FnPtrType) -> Type { Type::FnPtrType(node) }
+}
+impl From<ForType> for Type {
+ fn from(node: ForType) -> Type { Type::ForType(node) }
+}
+impl From<ImplTraitType> for Type {
+ fn from(node: ImplTraitType) -> Type { Type::ImplTraitType(node) }
+}
+impl From<InferType> for Type {
+ fn from(node: InferType) -> Type { Type::InferType(node) }
+}
+impl From<MacroType> for Type {
+ fn from(node: MacroType) -> Type { Type::MacroType(node) }
+}
+impl From<NeverType> for Type {
+ fn from(node: NeverType) -> Type { Type::NeverType(node) }
+}
+impl From<ParenType> for Type {
+ fn from(node: ParenType) -> Type { Type::ParenType(node) }
+}
+impl From<PathType> for Type {
+ fn from(node: PathType) -> Type { Type::PathType(node) }
+}
+impl From<PtrType> for Type {
+ fn from(node: PtrType) -> Type { Type::PtrType(node) }
+}
+impl From<RefType> for Type {
+ fn from(node: RefType) -> Type { Type::RefType(node) }
+}
+impl From<SliceType> for Type {
+ fn from(node: SliceType) -> Type { Type::SliceType(node) }
+}
+impl From<TupleType> for Type {
+ fn from(node: TupleType) -> Type { Type::TupleType(node) }
+}
+impl AstNode for Type {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ ARRAY_TYPE | DYN_TRAIT_TYPE | FN_PTR_TYPE | FOR_TYPE | IMPL_TRAIT_TYPE | INFER_TYPE
+ | MACRO_TYPE | NEVER_TYPE | PAREN_TYPE | PATH_TYPE | PTR_TYPE | REF_TYPE
+ | SLICE_TYPE | TUPLE_TYPE => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ ARRAY_TYPE => Type::ArrayType(ArrayType { syntax }),
+ DYN_TRAIT_TYPE => Type::DynTraitType(DynTraitType { syntax }),
+ FN_PTR_TYPE => Type::FnPtrType(FnPtrType { syntax }),
+ FOR_TYPE => Type::ForType(ForType { syntax }),
+ IMPL_TRAIT_TYPE => Type::ImplTraitType(ImplTraitType { syntax }),
+ INFER_TYPE => Type::InferType(InferType { syntax }),
+ MACRO_TYPE => Type::MacroType(MacroType { syntax }),
+ NEVER_TYPE => Type::NeverType(NeverType { syntax }),
+ PAREN_TYPE => Type::ParenType(ParenType { syntax }),
+ PATH_TYPE => Type::PathType(PathType { syntax }),
+ PTR_TYPE => Type::PtrType(PtrType { syntax }),
+ REF_TYPE => Type::RefType(RefType { syntax }),
+ SLICE_TYPE => Type::SliceType(SliceType { syntax }),
+ TUPLE_TYPE => Type::TupleType(TupleType { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Type::ArrayType(it) => &it.syntax,
+ Type::DynTraitType(it) => &it.syntax,
+ Type::FnPtrType(it) => &it.syntax,
+ Type::ForType(it) => &it.syntax,
+ Type::ImplTraitType(it) => &it.syntax,
+ Type::InferType(it) => &it.syntax,
+ Type::MacroType(it) => &it.syntax,
+ Type::NeverType(it) => &it.syntax,
+ Type::ParenType(it) => &it.syntax,
+ Type::PathType(it) => &it.syntax,
+ Type::PtrType(it) => &it.syntax,
+ Type::RefType(it) => &it.syntax,
+ Type::SliceType(it) => &it.syntax,
+ Type::TupleType(it) => &it.syntax,
+ }
+ }
+}
+impl From<ArrayExpr> for Expr {
+ fn from(node: ArrayExpr) -> Expr { Expr::ArrayExpr(node) }
+}
+impl From<AwaitExpr> for Expr {
+ fn from(node: AwaitExpr) -> Expr { Expr::AwaitExpr(node) }
+}
+impl From<BinExpr> for Expr {
+ fn from(node: BinExpr) -> Expr { Expr::BinExpr(node) }
+}
+impl From<BlockExpr> for Expr {
+ fn from(node: BlockExpr) -> Expr { Expr::BlockExpr(node) }
+}
+impl From<BoxExpr> for Expr {
+ fn from(node: BoxExpr) -> Expr { Expr::BoxExpr(node) }
+}
+impl From<BreakExpr> for Expr {
+ fn from(node: BreakExpr) -> Expr { Expr::BreakExpr(node) }
+}
+impl From<CallExpr> for Expr {
+ fn from(node: CallExpr) -> Expr { Expr::CallExpr(node) }
+}
+impl From<CastExpr> for Expr {
+ fn from(node: CastExpr) -> Expr { Expr::CastExpr(node) }
+}
+impl From<ClosureExpr> for Expr {
+ fn from(node: ClosureExpr) -> Expr { Expr::ClosureExpr(node) }
+}
+impl From<ContinueExpr> for Expr {
+ fn from(node: ContinueExpr) -> Expr { Expr::ContinueExpr(node) }
+}
+impl From<FieldExpr> for Expr {
+ fn from(node: FieldExpr) -> Expr { Expr::FieldExpr(node) }
+}
+impl From<ForExpr> for Expr {
+ fn from(node: ForExpr) -> Expr { Expr::ForExpr(node) }
+}
+impl From<IfExpr> for Expr {
+ fn from(node: IfExpr) -> Expr { Expr::IfExpr(node) }
+}
+impl From<IndexExpr> for Expr {
+ fn from(node: IndexExpr) -> Expr { Expr::IndexExpr(node) }
+}
+impl From<Literal> for Expr {
+ fn from(node: Literal) -> Expr { Expr::Literal(node) }
+}
+impl From<LoopExpr> for Expr {
+ fn from(node: LoopExpr) -> Expr { Expr::LoopExpr(node) }
+}
+impl From<MacroExpr> for Expr {
+ fn from(node: MacroExpr) -> Expr { Expr::MacroExpr(node) }
+}
+impl From<MacroStmts> for Expr {
+ fn from(node: MacroStmts) -> Expr { Expr::MacroStmts(node) }
+}
+impl From<MatchExpr> for Expr {
+ fn from(node: MatchExpr) -> Expr { Expr::MatchExpr(node) }
+}
+impl From<MethodCallExpr> for Expr {
+ fn from(node: MethodCallExpr) -> Expr { Expr::MethodCallExpr(node) }
+}
+impl From<ParenExpr> for Expr {
+ fn from(node: ParenExpr) -> Expr { Expr::ParenExpr(node) }
+}
+impl From<PathExpr> for Expr {
+ fn from(node: PathExpr) -> Expr { Expr::PathExpr(node) }
+}
+impl From<PrefixExpr> for Expr {
+ fn from(node: PrefixExpr) -> Expr { Expr::PrefixExpr(node) }
+}
+impl From<RangeExpr> for Expr {
+ fn from(node: RangeExpr) -> Expr { Expr::RangeExpr(node) }
+}
+impl From<RecordExpr> for Expr {
+ fn from(node: RecordExpr) -> Expr { Expr::RecordExpr(node) }
+}
+impl From<RefExpr> for Expr {
+ fn from(node: RefExpr) -> Expr { Expr::RefExpr(node) }
+}
+impl From<ReturnExpr> for Expr {
+ fn from(node: ReturnExpr) -> Expr { Expr::ReturnExpr(node) }
+}
+impl From<TryExpr> for Expr {
+ fn from(node: TryExpr) -> Expr { Expr::TryExpr(node) }
+}
+impl From<TupleExpr> for Expr {
+ fn from(node: TupleExpr) -> Expr { Expr::TupleExpr(node) }
+}
+impl From<WhileExpr> for Expr {
+ fn from(node: WhileExpr) -> Expr { Expr::WhileExpr(node) }
+}
+impl From<YieldExpr> for Expr {
+ fn from(node: YieldExpr) -> Expr { Expr::YieldExpr(node) }
+}
+impl From<LetExpr> for Expr {
+ fn from(node: LetExpr) -> Expr { Expr::LetExpr(node) }
+}
+impl From<UnderscoreExpr> for Expr {
+ fn from(node: UnderscoreExpr) -> Expr { Expr::UnderscoreExpr(node) }
+}
+impl AstNode for Expr {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ ARRAY_EXPR | AWAIT_EXPR | BIN_EXPR | BLOCK_EXPR | BOX_EXPR | BREAK_EXPR | CALL_EXPR
+ | CAST_EXPR | CLOSURE_EXPR | CONTINUE_EXPR | FIELD_EXPR | FOR_EXPR | IF_EXPR
+ | INDEX_EXPR | LITERAL | LOOP_EXPR | MACRO_EXPR | MACRO_STMTS | MATCH_EXPR
+ | METHOD_CALL_EXPR | PAREN_EXPR | PATH_EXPR | PREFIX_EXPR | RANGE_EXPR
+ | RECORD_EXPR | REF_EXPR | RETURN_EXPR | TRY_EXPR | TUPLE_EXPR | WHILE_EXPR
+ | YIELD_EXPR | LET_EXPR | UNDERSCORE_EXPR => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ ARRAY_EXPR => Expr::ArrayExpr(ArrayExpr { syntax }),
+ AWAIT_EXPR => Expr::AwaitExpr(AwaitExpr { syntax }),
+ BIN_EXPR => Expr::BinExpr(BinExpr { syntax }),
+ BLOCK_EXPR => Expr::BlockExpr(BlockExpr { syntax }),
+ BOX_EXPR => Expr::BoxExpr(BoxExpr { syntax }),
+ BREAK_EXPR => Expr::BreakExpr(BreakExpr { syntax }),
+ CALL_EXPR => Expr::CallExpr(CallExpr { syntax }),
+ CAST_EXPR => Expr::CastExpr(CastExpr { syntax }),
+ CLOSURE_EXPR => Expr::ClosureExpr(ClosureExpr { syntax }),
+ CONTINUE_EXPR => Expr::ContinueExpr(ContinueExpr { syntax }),
+ FIELD_EXPR => Expr::FieldExpr(FieldExpr { syntax }),
+ FOR_EXPR => Expr::ForExpr(ForExpr { syntax }),
+ IF_EXPR => Expr::IfExpr(IfExpr { syntax }),
+ INDEX_EXPR => Expr::IndexExpr(IndexExpr { syntax }),
+ LITERAL => Expr::Literal(Literal { syntax }),
+ LOOP_EXPR => Expr::LoopExpr(LoopExpr { syntax }),
+ MACRO_EXPR => Expr::MacroExpr(MacroExpr { syntax }),
+ MACRO_STMTS => Expr::MacroStmts(MacroStmts { syntax }),
+ MATCH_EXPR => Expr::MatchExpr(MatchExpr { syntax }),
+ METHOD_CALL_EXPR => Expr::MethodCallExpr(MethodCallExpr { syntax }),
+ PAREN_EXPR => Expr::ParenExpr(ParenExpr { syntax }),
+ PATH_EXPR => Expr::PathExpr(PathExpr { syntax }),
+ PREFIX_EXPR => Expr::PrefixExpr(PrefixExpr { syntax }),
+ RANGE_EXPR => Expr::RangeExpr(RangeExpr { syntax }),
+ RECORD_EXPR => Expr::RecordExpr(RecordExpr { syntax }),
+ REF_EXPR => Expr::RefExpr(RefExpr { syntax }),
+ RETURN_EXPR => Expr::ReturnExpr(ReturnExpr { syntax }),
+ TRY_EXPR => Expr::TryExpr(TryExpr { syntax }),
+ TUPLE_EXPR => Expr::TupleExpr(TupleExpr { syntax }),
+ WHILE_EXPR => Expr::WhileExpr(WhileExpr { syntax }),
+ YIELD_EXPR => Expr::YieldExpr(YieldExpr { syntax }),
+ LET_EXPR => Expr::LetExpr(LetExpr { syntax }),
+ UNDERSCORE_EXPR => Expr::UnderscoreExpr(UnderscoreExpr { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Expr::ArrayExpr(it) => &it.syntax,
+ Expr::AwaitExpr(it) => &it.syntax,
+ Expr::BinExpr(it) => &it.syntax,
+ Expr::BlockExpr(it) => &it.syntax,
+ Expr::BoxExpr(it) => &it.syntax,
+ Expr::BreakExpr(it) => &it.syntax,
+ Expr::CallExpr(it) => &it.syntax,
+ Expr::CastExpr(it) => &it.syntax,
+ Expr::ClosureExpr(it) => &it.syntax,
+ Expr::ContinueExpr(it) => &it.syntax,
+ Expr::FieldExpr(it) => &it.syntax,
+ Expr::ForExpr(it) => &it.syntax,
+ Expr::IfExpr(it) => &it.syntax,
+ Expr::IndexExpr(it) => &it.syntax,
+ Expr::Literal(it) => &it.syntax,
+ Expr::LoopExpr(it) => &it.syntax,
+ Expr::MacroExpr(it) => &it.syntax,
+ Expr::MacroStmts(it) => &it.syntax,
+ Expr::MatchExpr(it) => &it.syntax,
+ Expr::MethodCallExpr(it) => &it.syntax,
+ Expr::ParenExpr(it) => &it.syntax,
+ Expr::PathExpr(it) => &it.syntax,
+ Expr::PrefixExpr(it) => &it.syntax,
+ Expr::RangeExpr(it) => &it.syntax,
+ Expr::RecordExpr(it) => &it.syntax,
+ Expr::RefExpr(it) => &it.syntax,
+ Expr::ReturnExpr(it) => &it.syntax,
+ Expr::TryExpr(it) => &it.syntax,
+ Expr::TupleExpr(it) => &it.syntax,
+ Expr::WhileExpr(it) => &it.syntax,
+ Expr::YieldExpr(it) => &it.syntax,
+ Expr::LetExpr(it) => &it.syntax,
+ Expr::UnderscoreExpr(it) => &it.syntax,
+ }
+ }
+}
+impl From<Const> for Item {
+ fn from(node: Const) -> Item { Item::Const(node) }
+}
+impl From<Enum> for Item {
+ fn from(node: Enum) -> Item { Item::Enum(node) }
+}
+impl From<ExternBlock> for Item {
+ fn from(node: ExternBlock) -> Item { Item::ExternBlock(node) }
+}
+impl From<ExternCrate> for Item {
+ fn from(node: ExternCrate) -> Item { Item::ExternCrate(node) }
+}
+impl From<Fn> for Item {
+ fn from(node: Fn) -> Item { Item::Fn(node) }
+}
+impl From<Impl> for Item {
+ fn from(node: Impl) -> Item { Item::Impl(node) }
+}
+impl From<MacroCall> for Item {
+ fn from(node: MacroCall) -> Item { Item::MacroCall(node) }
+}
+impl From<MacroRules> for Item {
+ fn from(node: MacroRules) -> Item { Item::MacroRules(node) }
+}
+impl From<MacroDef> for Item {
+ fn from(node: MacroDef) -> Item { Item::MacroDef(node) }
+}
+impl From<Module> for Item {
+ fn from(node: Module) -> Item { Item::Module(node) }
+}
+impl From<Static> for Item {
+ fn from(node: Static) -> Item { Item::Static(node) }
+}
+impl From<Struct> for Item {
+ fn from(node: Struct) -> Item { Item::Struct(node) }
+}
+impl From<Trait> for Item {
+ fn from(node: Trait) -> Item { Item::Trait(node) }
+}
+impl From<TypeAlias> for Item {
+ fn from(node: TypeAlias) -> Item { Item::TypeAlias(node) }
+}
+impl From<Union> for Item {
+ fn from(node: Union) -> Item { Item::Union(node) }
+}
+impl From<Use> for Item {
+ fn from(node: Use) -> Item { Item::Use(node) }
+}
+impl AstNode for Item {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ CONST | ENUM | EXTERN_BLOCK | EXTERN_CRATE | FN | IMPL | MACRO_CALL | MACRO_RULES
+ | MACRO_DEF | MODULE | STATIC | STRUCT | TRAIT | TYPE_ALIAS | UNION | USE => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ CONST => Item::Const(Const { syntax }),
+ ENUM => Item::Enum(Enum { syntax }),
+ EXTERN_BLOCK => Item::ExternBlock(ExternBlock { syntax }),
+ EXTERN_CRATE => Item::ExternCrate(ExternCrate { syntax }),
+ FN => Item::Fn(Fn { syntax }),
+ IMPL => Item::Impl(Impl { syntax }),
+ MACRO_CALL => Item::MacroCall(MacroCall { syntax }),
+ MACRO_RULES => Item::MacroRules(MacroRules { syntax }),
+ MACRO_DEF => Item::MacroDef(MacroDef { syntax }),
+ MODULE => Item::Module(Module { syntax }),
+ STATIC => Item::Static(Static { syntax }),
+ STRUCT => Item::Struct(Struct { syntax }),
+ TRAIT => Item::Trait(Trait { syntax }),
+ TYPE_ALIAS => Item::TypeAlias(TypeAlias { syntax }),
+ UNION => Item::Union(Union { syntax }),
+ USE => Item::Use(Use { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Item::Const(it) => &it.syntax,
+ Item::Enum(it) => &it.syntax,
+ Item::ExternBlock(it) => &it.syntax,
+ Item::ExternCrate(it) => &it.syntax,
+ Item::Fn(it) => &it.syntax,
+ Item::Impl(it) => &it.syntax,
+ Item::MacroCall(it) => &it.syntax,
+ Item::MacroRules(it) => &it.syntax,
+ Item::MacroDef(it) => &it.syntax,
+ Item::Module(it) => &it.syntax,
+ Item::Static(it) => &it.syntax,
+ Item::Struct(it) => &it.syntax,
+ Item::Trait(it) => &it.syntax,
+ Item::TypeAlias(it) => &it.syntax,
+ Item::Union(it) => &it.syntax,
+ Item::Use(it) => &it.syntax,
+ }
+ }
+}
+impl From<ExprStmt> for Stmt {
+ fn from(node: ExprStmt) -> Stmt { Stmt::ExprStmt(node) }
+}
+impl From<Item> for Stmt {
+ fn from(node: Item) -> Stmt { Stmt::Item(node) }
+}
+impl From<LetStmt> for Stmt {
+ fn from(node: LetStmt) -> Stmt { Stmt::LetStmt(node) }
+}
+impl From<IdentPat> for Pat {
+ fn from(node: IdentPat) -> Pat { Pat::IdentPat(node) }
+}
+impl From<BoxPat> for Pat {
+ fn from(node: BoxPat) -> Pat { Pat::BoxPat(node) }
+}
+impl From<RestPat> for Pat {
+ fn from(node: RestPat) -> Pat { Pat::RestPat(node) }
+}
+impl From<LiteralPat> for Pat {
+ fn from(node: LiteralPat) -> Pat { Pat::LiteralPat(node) }
+}
+impl From<MacroPat> for Pat {
+ fn from(node: MacroPat) -> Pat { Pat::MacroPat(node) }
+}
+impl From<OrPat> for Pat {
+ fn from(node: OrPat) -> Pat { Pat::OrPat(node) }
+}
+impl From<ParenPat> for Pat {
+ fn from(node: ParenPat) -> Pat { Pat::ParenPat(node) }
+}
+impl From<PathPat> for Pat {
+ fn from(node: PathPat) -> Pat { Pat::PathPat(node) }
+}
+impl From<WildcardPat> for Pat {
+ fn from(node: WildcardPat) -> Pat { Pat::WildcardPat(node) }
+}
+impl From<RangePat> for Pat {
+ fn from(node: RangePat) -> Pat { Pat::RangePat(node) }
+}
+impl From<RecordPat> for Pat {
+ fn from(node: RecordPat) -> Pat { Pat::RecordPat(node) }
+}
+impl From<RefPat> for Pat {
+ fn from(node: RefPat) -> Pat { Pat::RefPat(node) }
+}
+impl From<SlicePat> for Pat {
+ fn from(node: SlicePat) -> Pat { Pat::SlicePat(node) }
+}
+impl From<TuplePat> for Pat {
+ fn from(node: TuplePat) -> Pat { Pat::TuplePat(node) }
+}
+impl From<TupleStructPat> for Pat {
+ fn from(node: TupleStructPat) -> Pat { Pat::TupleStructPat(node) }
+}
+impl From<ConstBlockPat> for Pat {
+ fn from(node: ConstBlockPat) -> Pat { Pat::ConstBlockPat(node) }
+}
+impl AstNode for Pat {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ IDENT_PAT | BOX_PAT | REST_PAT | LITERAL_PAT | MACRO_PAT | OR_PAT | PAREN_PAT
+ | PATH_PAT | WILDCARD_PAT | RANGE_PAT | RECORD_PAT | REF_PAT | SLICE_PAT
+ | TUPLE_PAT | TUPLE_STRUCT_PAT | CONST_BLOCK_PAT => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ IDENT_PAT => Pat::IdentPat(IdentPat { syntax }),
+ BOX_PAT => Pat::BoxPat(BoxPat { syntax }),
+ REST_PAT => Pat::RestPat(RestPat { syntax }),
+ LITERAL_PAT => Pat::LiteralPat(LiteralPat { syntax }),
+ MACRO_PAT => Pat::MacroPat(MacroPat { syntax }),
+ OR_PAT => Pat::OrPat(OrPat { syntax }),
+ PAREN_PAT => Pat::ParenPat(ParenPat { syntax }),
+ PATH_PAT => Pat::PathPat(PathPat { syntax }),
+ WILDCARD_PAT => Pat::WildcardPat(WildcardPat { syntax }),
+ RANGE_PAT => Pat::RangePat(RangePat { syntax }),
+ RECORD_PAT => Pat::RecordPat(RecordPat { syntax }),
+ REF_PAT => Pat::RefPat(RefPat { syntax }),
+ SLICE_PAT => Pat::SlicePat(SlicePat { syntax }),
+ TUPLE_PAT => Pat::TuplePat(TuplePat { syntax }),
+ TUPLE_STRUCT_PAT => Pat::TupleStructPat(TupleStructPat { syntax }),
+ CONST_BLOCK_PAT => Pat::ConstBlockPat(ConstBlockPat { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Pat::IdentPat(it) => &it.syntax,
+ Pat::BoxPat(it) => &it.syntax,
+ Pat::RestPat(it) => &it.syntax,
+ Pat::LiteralPat(it) => &it.syntax,
+ Pat::MacroPat(it) => &it.syntax,
+ Pat::OrPat(it) => &it.syntax,
+ Pat::ParenPat(it) => &it.syntax,
+ Pat::PathPat(it) => &it.syntax,
+ Pat::WildcardPat(it) => &it.syntax,
+ Pat::RangePat(it) => &it.syntax,
+ Pat::RecordPat(it) => &it.syntax,
+ Pat::RefPat(it) => &it.syntax,
+ Pat::SlicePat(it) => &it.syntax,
+ Pat::TuplePat(it) => &it.syntax,
+ Pat::TupleStructPat(it) => &it.syntax,
+ Pat::ConstBlockPat(it) => &it.syntax,
+ }
+ }
+}
+impl From<RecordFieldList> for FieldList {
+ fn from(node: RecordFieldList) -> FieldList { FieldList::RecordFieldList(node) }
+}
+impl From<TupleFieldList> for FieldList {
+ fn from(node: TupleFieldList) -> FieldList { FieldList::TupleFieldList(node) }
+}
+impl AstNode for FieldList {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ RECORD_FIELD_LIST | TUPLE_FIELD_LIST => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ RECORD_FIELD_LIST => FieldList::RecordFieldList(RecordFieldList { syntax }),
+ TUPLE_FIELD_LIST => FieldList::TupleFieldList(TupleFieldList { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ FieldList::RecordFieldList(it) => &it.syntax,
+ FieldList::TupleFieldList(it) => &it.syntax,
+ }
+ }
+}
+impl From<Enum> for Adt {
+ fn from(node: Enum) -> Adt { Adt::Enum(node) }
+}
+impl From<Struct> for Adt {
+ fn from(node: Struct) -> Adt { Adt::Struct(node) }
+}
+impl From<Union> for Adt {
+ fn from(node: Union) -> Adt { Adt::Union(node) }
+}
+impl AstNode for Adt {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ ENUM | STRUCT | UNION => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ ENUM => Adt::Enum(Enum { syntax }),
+ STRUCT => Adt::Struct(Struct { syntax }),
+ UNION => Adt::Union(Union { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Adt::Enum(it) => &it.syntax,
+ Adt::Struct(it) => &it.syntax,
+ Adt::Union(it) => &it.syntax,
+ }
+ }
+}
+impl From<Const> for AssocItem {
+ fn from(node: Const) -> AssocItem { AssocItem::Const(node) }
+}
+impl From<Fn> for AssocItem {
+ fn from(node: Fn) -> AssocItem { AssocItem::Fn(node) }
+}
+impl From<MacroCall> for AssocItem {
+ fn from(node: MacroCall) -> AssocItem { AssocItem::MacroCall(node) }
+}
+impl From<TypeAlias> for AssocItem {
+ fn from(node: TypeAlias) -> AssocItem { AssocItem::TypeAlias(node) }
+}
+impl AstNode for AssocItem {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ CONST | FN | MACRO_CALL | TYPE_ALIAS => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ CONST => AssocItem::Const(Const { syntax }),
+ FN => AssocItem::Fn(Fn { syntax }),
+ MACRO_CALL => AssocItem::MacroCall(MacroCall { syntax }),
+ TYPE_ALIAS => AssocItem::TypeAlias(TypeAlias { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ AssocItem::Const(it) => &it.syntax,
+ AssocItem::Fn(it) => &it.syntax,
+ AssocItem::MacroCall(it) => &it.syntax,
+ AssocItem::TypeAlias(it) => &it.syntax,
+ }
+ }
+}
+impl From<Fn> for ExternItem {
+ fn from(node: Fn) -> ExternItem { ExternItem::Fn(node) }
+}
+impl From<MacroCall> for ExternItem {
+ fn from(node: MacroCall) -> ExternItem { ExternItem::MacroCall(node) }
+}
+impl From<Static> for ExternItem {
+ fn from(node: Static) -> ExternItem { ExternItem::Static(node) }
+}
+impl From<TypeAlias> for ExternItem {
+ fn from(node: TypeAlias) -> ExternItem { ExternItem::TypeAlias(node) }
+}
+impl AstNode for ExternItem {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ FN | MACRO_CALL | STATIC | TYPE_ALIAS => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ FN => ExternItem::Fn(Fn { syntax }),
+ MACRO_CALL => ExternItem::MacroCall(MacroCall { syntax }),
+ STATIC => ExternItem::Static(Static { syntax }),
+ TYPE_ALIAS => ExternItem::TypeAlias(TypeAlias { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ ExternItem::Fn(it) => &it.syntax,
+ ExternItem::MacroCall(it) => &it.syntax,
+ ExternItem::Static(it) => &it.syntax,
+ ExternItem::TypeAlias(it) => &it.syntax,
+ }
+ }
+}
+impl From<ConstParam> for GenericParam {
+ fn from(node: ConstParam) -> GenericParam { GenericParam::ConstParam(node) }
+}
+impl From<LifetimeParam> for GenericParam {
+ fn from(node: LifetimeParam) -> GenericParam { GenericParam::LifetimeParam(node) }
+}
+impl From<TypeParam> for GenericParam {
+ fn from(node: TypeParam) -> GenericParam { GenericParam::TypeParam(node) }
+}
+impl AstNode for GenericParam {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ CONST_PARAM | LIFETIME_PARAM | TYPE_PARAM => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ CONST_PARAM => GenericParam::ConstParam(ConstParam { syntax }),
+ LIFETIME_PARAM => GenericParam::LifetimeParam(LifetimeParam { syntax }),
+ TYPE_PARAM => GenericParam::TypeParam(TypeParam { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ GenericParam::ConstParam(it) => &it.syntax,
+ GenericParam::LifetimeParam(it) => &it.syntax,
+ GenericParam::TypeParam(it) => &it.syntax,
+ }
+ }
+}
+impl AnyHasArgList {
+ #[inline]
+ pub fn new<T: ast::HasArgList>(node: T) -> AnyHasArgList {
+ AnyHasArgList { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasArgList {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ CALL_EXPR | METHOD_CALL_EXPR => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasArgList { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasAttrs {
+ #[inline]
+ pub fn new<T: ast::HasAttrs>(node: T) -> AnyHasAttrs {
+ AnyHasAttrs { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasAttrs {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ MACRO_CALL
+ | SOURCE_FILE
+ | CONST
+ | ENUM
+ | EXTERN_BLOCK
+ | EXTERN_CRATE
+ | FN
+ | IMPL
+ | MACRO_RULES
+ | MACRO_DEF
+ | MODULE
+ | STATIC
+ | STRUCT
+ | TRAIT
+ | TYPE_ALIAS
+ | UNION
+ | USE
+ | ITEM_LIST
+ | BLOCK_EXPR
+ | SELF_PARAM
+ | PARAM
+ | RECORD_FIELD
+ | TUPLE_FIELD
+ | VARIANT
+ | ASSOC_ITEM_LIST
+ | EXTERN_ITEM_LIST
+ | CONST_PARAM
+ | LIFETIME_PARAM
+ | TYPE_PARAM
+ | LET_STMT
+ | ARRAY_EXPR
+ | AWAIT_EXPR
+ | BIN_EXPR
+ | BOX_EXPR
+ | BREAK_EXPR
+ | CALL_EXPR
+ | CAST_EXPR
+ | CLOSURE_EXPR
+ | CONTINUE_EXPR
+ | FIELD_EXPR
+ | FOR_EXPR
+ | IF_EXPR
+ | INDEX_EXPR
+ | LITERAL
+ | LOOP_EXPR
+ | MATCH_EXPR
+ | METHOD_CALL_EXPR
+ | PAREN_EXPR
+ | PATH_EXPR
+ | PREFIX_EXPR
+ | RANGE_EXPR
+ | REF_EXPR
+ | RETURN_EXPR
+ | TRY_EXPR
+ | TUPLE_EXPR
+ | WHILE_EXPR
+ | YIELD_EXPR
+ | LET_EXPR
+ | UNDERSCORE_EXPR
+ | STMT_LIST
+ | RECORD_EXPR_FIELD_LIST
+ | RECORD_EXPR_FIELD
+ | MATCH_ARM_LIST
+ | MATCH_ARM
+ | IDENT_PAT
+ | REST_PAT
+ | RECORD_PAT_FIELD => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasAttrs { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasDocComments {
+ #[inline]
+ pub fn new<T: ast::HasDocComments>(node: T) -> AnyHasDocComments {
+ AnyHasDocComments { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasDocComments {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ MACRO_CALL | SOURCE_FILE | CONST | ENUM | EXTERN_BLOCK | EXTERN_CRATE | FN | IMPL
+ | MACRO_RULES | MACRO_DEF | MODULE | STATIC | STRUCT | TRAIT | TYPE_ALIAS | UNION
+ | USE | RECORD_FIELD | TUPLE_FIELD | VARIANT => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasDocComments { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasGenericParams {
+ #[inline]
+ pub fn new<T: ast::HasGenericParams>(node: T) -> AnyHasGenericParams {
+ AnyHasGenericParams { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasGenericParams {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ ENUM | FN | IMPL | STRUCT | TRAIT | TYPE_ALIAS | UNION => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasGenericParams { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasLoopBody {
+ #[inline]
+ pub fn new<T: ast::HasLoopBody>(node: T) -> AnyHasLoopBody {
+ AnyHasLoopBody { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasLoopBody {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ FOR_EXPR | LOOP_EXPR | WHILE_EXPR => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasLoopBody { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasModuleItem {
+ #[inline]
+ pub fn new<T: ast::HasModuleItem>(node: T) -> AnyHasModuleItem {
+ AnyHasModuleItem { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasModuleItem {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ MACRO_ITEMS | SOURCE_FILE | ITEM_LIST => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasModuleItem { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasName {
+ #[inline]
+ pub fn new<T: ast::HasName>(node: T) -> AnyHasName {
+ AnyHasName { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasName {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ CONST | ENUM | FN | MACRO_RULES | MACRO_DEF | MODULE | STATIC | STRUCT | TRAIT
+ | TYPE_ALIAS | UNION | RENAME | SELF_PARAM | RECORD_FIELD | VARIANT | CONST_PARAM
+ | TYPE_PARAM | IDENT_PAT => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasName { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasTypeBounds {
+ #[inline]
+ pub fn new<T: ast::HasTypeBounds>(node: T) -> AnyHasTypeBounds {
+ AnyHasTypeBounds { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasTypeBounds {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ ASSOC_TYPE_ARG | TRAIT | TYPE_ALIAS | LIFETIME_PARAM | TYPE_PARAM | WHERE_PRED => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasTypeBounds { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasVisibility {
+ #[inline]
+ pub fn new<T: ast::HasVisibility>(node: T) -> AnyHasVisibility {
+ AnyHasVisibility { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasVisibility {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ CONST | ENUM | EXTERN_CRATE | FN | IMPL | MACRO_RULES | MACRO_DEF | MODULE | STATIC
+ | STRUCT | TRAIT | TYPE_ALIAS | UNION | USE | RECORD_FIELD | TUPLE_FIELD | VARIANT => {
+ true
+ }
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasVisibility { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl std::fmt::Display for GenericArg {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Type {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Expr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Item {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Stmt {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Pat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for FieldList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Adt {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for AssocItem {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ExternItem {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for GenericParam {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Name {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for NameRef {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Lifetime {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Path {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PathSegment {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for GenericArgList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ParamList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RetType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PathType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TypeArg {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for AssocTypeArg {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LifetimeArg {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ConstArg {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for GenericParamList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TypeBoundList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroCall {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Attr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TokenTree {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroItems {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroStmts {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for SourceFile {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Const {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Enum {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ExternBlock {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ExternCrate {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Fn {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Impl {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroRules {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroDef {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Module {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Static {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Struct {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Trait {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TypeAlias {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Union {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Use {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Visibility {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ItemList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Rename {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for UseTree {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for UseTreeList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Abi {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for WhereClause {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for BlockExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for SelfParam {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Param {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordFieldList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TupleFieldList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordField {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TupleField {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for VariantList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Variant {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for AssocItemList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ExternItemList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ConstParam {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LifetimeParam {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TypeParam {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for WherePred {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Meta {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ExprStmt {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LetStmt {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LetElse {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ArrayExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for AwaitExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for BinExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for BoxExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for BreakExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for CallExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for CastExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ClosureExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ContinueExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for FieldExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ForExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for IfExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for IndexExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Literal {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LoopExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MatchExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MethodCallExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ParenExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PathExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PrefixExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RangeExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RefExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ReturnExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TryExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TupleExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for WhileExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for YieldExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LetExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for UnderscoreExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for StmtList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Label {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordExprFieldList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordExprField {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ArgList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MatchArmList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MatchArm {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MatchGuard {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ArrayType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for DynTraitType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for FnPtrType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ForType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ImplTraitType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for InferType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for NeverType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ParenType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PtrType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RefType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for SliceType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TupleType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TypeBound {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for IdentPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for BoxPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RestPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LiteralPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for OrPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ParenPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PathPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for WildcardPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RangePat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RefPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for SlicePat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TuplePat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TupleStructPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ConstBlockPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordPatFieldList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordPatField {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs
new file mode 100644
index 000000000..a3209c5ab
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs
@@ -0,0 +1,196 @@
+//! Generated by `sourcegen_ast`, do not edit by hand.
+
+use crate::{
+ ast::AstToken,
+ SyntaxKind::{self, *},
+ SyntaxToken,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Whitespace {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for Whitespace {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for Whitespace {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == WHITESPACE }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Comment {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for Comment {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for Comment {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == COMMENT }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct String {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for String {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for String {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == STRING }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ByteString {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for ByteString {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for ByteString {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BYTE_STRING }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct IntNumber {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for IntNumber {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for IntNumber {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == INT_NUMBER }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct FloatNumber {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for FloatNumber {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for FloatNumber {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FLOAT_NUMBER }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Char {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for Char {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for Char {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CHAR }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Byte {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for Byte {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for Byte {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BYTE }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Ident {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for Ident {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for Ident {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == IDENT }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
new file mode 100644
index 000000000..5908dda8e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
@@ -0,0 +1,901 @@
+//! This module contains free-standing functions for creating AST fragments out
+//! of smaller pieces.
+//!
+//! Note that all functions here intended to be stupid constructors, which just
+//! assemble a finish node from immediate children. If you want to do something
+//! smarter than that, it belongs to the `ext` submodule.
+//!
+//! Keep in mind that `from_text` functions should be kept private. The public
+//! API should require to assemble every node piecewise. The trick of
+//! `parse(format!())` we use internally is an implementation detail -- long
+//! term, it will be replaced with direct tree manipulation.
+use itertools::Itertools;
+use stdx::{format_to, never};
+
+use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxToken};
+
+/// While the parent module defines basic atomic "constructors", the `ext`
+/// module defines shortcuts for common things.
+///
+/// It's named `ext` rather than `shortcuts` just to keep it short.
+pub mod ext {
+ use super::*;
+
+ pub fn simple_ident_pat(name: ast::Name) -> ast::IdentPat {
+ return from_text(&name.text());
+
+ fn from_text(text: &str) -> ast::IdentPat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+ }
+ pub fn ident_path(ident: &str) -> ast::Path {
+ path_unqualified(path_segment(name_ref(ident)))
+ }
+
+ pub fn path_from_idents<'a>(
+ parts: impl std::iter::IntoIterator<Item = &'a str>,
+ ) -> Option<ast::Path> {
+ let mut iter = parts.into_iter();
+ let base = ext::ident_path(iter.next()?);
+ let path = iter.fold(base, |base, s| {
+ let path = ext::ident_path(s);
+ path_concat(base, path)
+ });
+ Some(path)
+ }
+
+ pub fn field_from_idents<'a>(
+ parts: impl std::iter::IntoIterator<Item = &'a str>,
+ ) -> Option<ast::Expr> {
+ let mut iter = parts.into_iter();
+ let base = expr_path(ext::ident_path(iter.next()?));
+ let expr = iter.fold(base, expr_field);
+ Some(expr)
+ }
+
+ pub fn expr_unreachable() -> ast::Expr {
+ expr_from_text("unreachable!()")
+ }
+ pub fn expr_todo() -> ast::Expr {
+ expr_from_text("todo!()")
+ }
+ pub fn expr_ty_default(ty: &ast::Type) -> ast::Expr {
+ expr_from_text(&format!("{}::default()", ty))
+ }
+ pub fn expr_ty_new(ty: &ast::Type) -> ast::Expr {
+ expr_from_text(&format!("{}::new()", ty))
+ }
+
+ pub fn zero_number() -> ast::Expr {
+ expr_from_text("0")
+ }
+ pub fn zero_float() -> ast::Expr {
+ expr_from_text("0.0")
+ }
+ pub fn empty_str() -> ast::Expr {
+ expr_from_text(r#""""#)
+ }
+ pub fn empty_char() -> ast::Expr {
+ expr_from_text("'\x00'")
+ }
+ pub fn default_bool() -> ast::Expr {
+ expr_from_text("false")
+ }
+ pub fn option_none() -> ast::Expr {
+ expr_from_text("None")
+ }
+ pub fn empty_block_expr() -> ast::BlockExpr {
+ block_expr(None, None)
+ }
+
+ pub fn ty_bool() -> ast::Type {
+ ty_path(ident_path("bool"))
+ }
+ pub fn ty_option(t: ast::Type) -> ast::Type {
+ ty_from_text(&format!("Option<{}>", t))
+ }
+ pub fn ty_result(t: ast::Type, e: ast::Type) -> ast::Type {
+ ty_from_text(&format!("Result<{}, {}>", t, e))
+ }
+}
+
+pub fn name(text: &str) -> ast::Name {
+ ast_from_text(&format!("mod {}{};", raw_ident_esc(text), text))
+}
+pub fn name_ref(text: &str) -> ast::NameRef {
+ ast_from_text(&format!("fn f() {{ {}{}; }}", raw_ident_esc(text), text))
+}
+fn raw_ident_esc(ident: &str) -> &'static str {
+ let is_keyword = parser::SyntaxKind::from_keyword(ident).is_some();
+ if is_keyword && !matches!(ident, "self" | "crate" | "super" | "Self") {
+ "r#"
+ } else {
+ ""
+ }
+}
+
+pub fn lifetime(text: &str) -> ast::Lifetime {
+ let mut text = text;
+ let tmp;
+ if never!(!text.starts_with('\'')) {
+ tmp = format!("'{}", text);
+ text = &tmp;
+ }
+ ast_from_text(&format!("fn f<{}>() {{ }}", text))
+}
+
+// FIXME: replace stringly-typed constructor with a family of typed ctors, a-la
+// `expr_xxx`.
+pub fn ty(text: &str) -> ast::Type {
+ ty_from_text(text)
+}
+pub fn ty_placeholder() -> ast::Type {
+ ty_from_text("_")
+}
+pub fn ty_unit() -> ast::Type {
+ ty_from_text("()")
+}
+pub fn ty_tuple(types: impl IntoIterator<Item = ast::Type>) -> ast::Type {
+ let mut count: usize = 0;
+ let mut contents = types.into_iter().inspect(|_| count += 1).join(", ");
+ if count == 1 {
+ contents.push(',');
+ }
+
+ ty_from_text(&format!("({})", contents))
+}
+pub fn ty_ref(target: ast::Type, exclusive: bool) -> ast::Type {
+ ty_from_text(&if exclusive { format!("&mut {}", target) } else { format!("&{}", target) })
+}
+pub fn ty_path(path: ast::Path) -> ast::Type {
+ ty_from_text(&path.to_string())
+}
+fn ty_from_text(text: &str) -> ast::Type {
+ ast_from_text(&format!("type _T = {};", text))
+}
+
+pub fn assoc_item_list() -> ast::AssocItemList {
+ ast_from_text("impl C for D {}")
+}
+
+pub fn impl_(
+ ty: ast::Path,
+ params: Option<ast::GenericParamList>,
+ ty_params: Option<ast::GenericParamList>,
+) -> ast::Impl {
+ let params = match params {
+ Some(params) => params.to_string(),
+ None => String::new(),
+ };
+ let ty_params = match ty_params {
+ Some(params) => params.to_string(),
+ None => String::new(),
+ };
+ ast_from_text(&format!("impl{} {}{} {{}}", params, ty, ty_params))
+}
+
+pub fn impl_trait(
+ trait_: ast::Path,
+ ty: ast::Path,
+ ty_params: Option<ast::GenericParamList>,
+) -> ast::Impl {
+ let ty_params = ty_params.map_or_else(String::new, |params| params.to_string());
+ ast_from_text(&format!("impl{2} {} for {}{2} {{}}", trait_, ty, ty_params))
+}
+
+pub(crate) fn generic_arg_list() -> ast::GenericArgList {
+ ast_from_text("const S: T<> = ();")
+}
+
+pub fn path_segment(name_ref: ast::NameRef) -> ast::PathSegment {
+ ast_from_text(&format!("type __ = {};", name_ref))
+}
+
+pub fn path_segment_ty(type_ref: ast::Type, trait_ref: Option<ast::PathType>) -> ast::PathSegment {
+ let text = match trait_ref {
+ Some(trait_ref) => format!("fn f(x: <{} as {}>) {{}}", type_ref, trait_ref),
+ None => format!("fn f(x: <{}>) {{}}", type_ref),
+ };
+ ast_from_text(&text)
+}
+
+pub fn path_segment_self() -> ast::PathSegment {
+ ast_from_text("use self;")
+}
+
+pub fn path_segment_super() -> ast::PathSegment {
+ ast_from_text("use super;")
+}
+
+pub fn path_segment_crate() -> ast::PathSegment {
+ ast_from_text("use crate;")
+}
+
+pub fn path_unqualified(segment: ast::PathSegment) -> ast::Path {
+ ast_from_text(&format!("type __ = {};", segment))
+}
+
+pub fn path_qualified(qual: ast::Path, segment: ast::PathSegment) -> ast::Path {
+ ast_from_text(&format!("{}::{}", qual, segment))
+}
+// FIXME: path concatenation operation doesn't make sense as AST op.
+pub fn path_concat(first: ast::Path, second: ast::Path) -> ast::Path {
+ ast_from_text(&format!("type __ = {}::{};", first, second))
+}
+
+pub fn path_from_segments(
+ segments: impl IntoIterator<Item = ast::PathSegment>,
+ is_abs: bool,
+) -> ast::Path {
+ let segments = segments.into_iter().map(|it| it.syntax().clone()).join("::");
+ ast_from_text(&if is_abs {
+ format!("fn f(x: ::{}) {{}}", segments)
+ } else {
+ format!("fn f(x: {}) {{}}", segments)
+ })
+}
+
+pub fn join_paths(paths: impl IntoIterator<Item = ast::Path>) -> ast::Path {
+ let paths = paths.into_iter().map(|it| it.syntax().clone()).join("::");
+ ast_from_text(&format!("type __ = {};", paths))
+}
+
+// FIXME: should not be pub
+pub fn path_from_text(text: &str) -> ast::Path {
+ ast_from_text(&format!("fn main() {{ let test = {}; }}", text))
+}
+
+pub fn use_tree_glob() -> ast::UseTree {
+ ast_from_text("use *;")
+}
+pub fn use_tree(
+ path: ast::Path,
+ use_tree_list: Option<ast::UseTreeList>,
+ alias: Option<ast::Rename>,
+ add_star: bool,
+) -> ast::UseTree {
+ let mut buf = "use ".to_string();
+ buf += &path.syntax().to_string();
+ if let Some(use_tree_list) = use_tree_list {
+ format_to!(buf, "::{}", use_tree_list);
+ }
+ if add_star {
+ buf += "::*";
+ }
+
+ if let Some(alias) = alias {
+ format_to!(buf, " {}", alias);
+ }
+ ast_from_text(&buf)
+}
+
+pub fn use_tree_list(use_trees: impl IntoIterator<Item = ast::UseTree>) -> ast::UseTreeList {
+ let use_trees = use_trees.into_iter().map(|it| it.syntax().clone()).join(", ");
+ ast_from_text(&format!("use {{{}}};", use_trees))
+}
+
+pub fn use_(visibility: Option<ast::Visibility>, use_tree: ast::UseTree) -> ast::Use {
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+ ast_from_text(&format!("{}use {};", visibility, use_tree))
+}
+
+pub fn record_expr(path: ast::Path, fields: ast::RecordExprFieldList) -> ast::RecordExpr {
+ ast_from_text(&format!("fn f() {{ {} {} }}", path, fields))
+}
+
+pub fn record_expr_field_list(
+ fields: impl IntoIterator<Item = ast::RecordExprField>,
+) -> ast::RecordExprFieldList {
+ let fields = fields.into_iter().join(", ");
+ ast_from_text(&format!("fn f() {{ S {{ {} }} }}", fields))
+}
+
+pub fn record_expr_field(name: ast::NameRef, expr: Option<ast::Expr>) -> ast::RecordExprField {
+ return match expr {
+ Some(expr) => from_text(&format!("{}: {}", name, expr)),
+ None => from_text(&name.to_string()),
+ };
+
+ fn from_text(text: &str) -> ast::RecordExprField {
+ ast_from_text(&format!("fn f() {{ S {{ {}, }} }}", text))
+ }
+}
+
+pub fn record_field(
+ visibility: Option<ast::Visibility>,
+ name: ast::Name,
+ ty: ast::Type,
+) -> ast::RecordField {
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+ ast_from_text(&format!("struct S {{ {}{}: {}, }}", visibility, name, ty))
+}
+
+// TODO
+pub fn block_expr(
+ stmts: impl IntoIterator<Item = ast::Stmt>,
+ tail_expr: Option<ast::Expr>,
+) -> ast::BlockExpr {
+ let mut buf = "{\n".to_string();
+ for stmt in stmts.into_iter() {
+ format_to!(buf, " {}\n", stmt);
+ }
+ if let Some(tail_expr) = tail_expr {
+ format_to!(buf, " {}\n", tail_expr);
+ }
+ buf += "}";
+ ast_from_text(&format!("fn f() {}", buf))
+}
+
+/// Ideally this function wouldn't exist since it involves manual indenting.
+/// It differs from `make::block_expr` by also supporting comments.
+///
+/// FIXME: replace usages of this with the mutable syntax tree API
+pub fn hacky_block_expr_with_comments(
+ elements: impl IntoIterator<Item = crate::SyntaxElement>,
+ tail_expr: Option<ast::Expr>,
+) -> ast::BlockExpr {
+ let mut buf = "{\n".to_string();
+ for node_or_token in elements.into_iter() {
+ match node_or_token {
+ rowan::NodeOrToken::Node(n) => format_to!(buf, " {}\n", n),
+ rowan::NodeOrToken::Token(t) if t.kind() == SyntaxKind::COMMENT => {
+ format_to!(buf, " {}\n", t)
+ }
+ _ => (),
+ }
+ }
+ if let Some(tail_expr) = tail_expr {
+ format_to!(buf, " {}\n", tail_expr);
+ }
+ buf += "}";
+ ast_from_text(&format!("fn f() {}", buf))
+}
+
+pub fn expr_unit() -> ast::Expr {
+ expr_from_text("()")
+}
+pub fn expr_literal(text: &str) -> ast::Literal {
+ assert_eq!(text.trim(), text);
+ ast_from_text(&format!("fn f() {{ let _ = {}; }}", text))
+}
+
+pub fn expr_empty_block() -> ast::Expr {
+ expr_from_text("{}")
+}
+pub fn expr_path(path: ast::Path) -> ast::Expr {
+ expr_from_text(&path.to_string())
+}
+pub fn expr_continue(label: Option<ast::Lifetime>) -> ast::Expr {
+ match label {
+ Some(label) => expr_from_text(&format!("continue {}", label)),
+ None => expr_from_text("continue"),
+ }
+}
+// Consider `op: SyntaxKind` instead for nicer syntax at the call-site?
+pub fn expr_bin_op(lhs: ast::Expr, op: ast::BinaryOp, rhs: ast::Expr) -> ast::Expr {
+ expr_from_text(&format!("{} {} {}", lhs, op, rhs))
+}
+pub fn expr_break(label: Option<ast::Lifetime>, expr: Option<ast::Expr>) -> ast::Expr {
+ let mut s = String::from("break");
+
+ if let Some(label) = label {
+ format_to!(s, " {}", label);
+ }
+
+ if let Some(expr) = expr {
+ format_to!(s, " {}", expr);
+ }
+
+ expr_from_text(&s)
+}
+pub fn expr_return(expr: Option<ast::Expr>) -> ast::Expr {
+ match expr {
+ Some(expr) => expr_from_text(&format!("return {}", expr)),
+ None => expr_from_text("return"),
+ }
+}
+pub fn expr_try(expr: ast::Expr) -> ast::Expr {
+ expr_from_text(&format!("{}?", expr))
+}
+pub fn expr_await(expr: ast::Expr) -> ast::Expr {
+ expr_from_text(&format!("{}.await", expr))
+}
+pub fn expr_match(expr: ast::Expr, match_arm_list: ast::MatchArmList) -> ast::Expr {
+ expr_from_text(&format!("match {} {}", expr, match_arm_list))
+}
+pub fn expr_if(
+ condition: ast::Expr,
+ then_branch: ast::BlockExpr,
+ else_branch: Option<ast::ElseBranch>,
+) -> ast::Expr {
+ let else_branch = match else_branch {
+ Some(ast::ElseBranch::Block(block)) => format!("else {}", block),
+ Some(ast::ElseBranch::IfExpr(if_expr)) => format!("else {}", if_expr),
+ None => String::new(),
+ };
+ expr_from_text(&format!("if {} {} {}", condition, then_branch, else_branch))
+}
+pub fn expr_for_loop(pat: ast::Pat, expr: ast::Expr, block: ast::BlockExpr) -> ast::Expr {
+ expr_from_text(&format!("for {} in {} {}", pat, expr, block))
+}
+
+pub fn expr_loop(block: ast::BlockExpr) -> ast::Expr {
+ expr_from_text(&format!("loop {}", block))
+}
+
+pub fn expr_prefix(op: SyntaxKind, expr: ast::Expr) -> ast::Expr {
+ let token = token(op);
+ expr_from_text(&format!("{}{}", token, expr))
+}
+pub fn expr_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::Expr {
+ expr_from_text(&format!("{}{}", f, arg_list))
+}
+pub fn expr_method_call(
+ receiver: ast::Expr,
+ method: ast::NameRef,
+ arg_list: ast::ArgList,
+) -> ast::Expr {
+ expr_from_text(&format!("{}.{}{}", receiver, method, arg_list))
+}
+pub fn expr_macro_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::Expr {
+ expr_from_text(&format!("{}!{}", f, arg_list))
+}
+pub fn expr_ref(expr: ast::Expr, exclusive: bool) -> ast::Expr {
+ expr_from_text(&if exclusive { format!("&mut {}", expr) } else { format!("&{}", expr) })
+}
+pub fn expr_closure(pats: impl IntoIterator<Item = ast::Param>, expr: ast::Expr) -> ast::Expr {
+ let params = pats.into_iter().join(", ");
+ expr_from_text(&format!("|{}| {}", params, expr))
+}
+pub fn expr_field(receiver: ast::Expr, field: &str) -> ast::Expr {
+ expr_from_text(&format!("{}.{}", receiver, field))
+}
+pub fn expr_paren(expr: ast::Expr) -> ast::Expr {
+ expr_from_text(&format!("({})", expr))
+}
+pub fn expr_tuple(elements: impl IntoIterator<Item = ast::Expr>) -> ast::Expr {
+ let expr = elements.into_iter().format(", ");
+ expr_from_text(&format!("({})", expr))
+}
+pub fn expr_assignment(lhs: ast::Expr, rhs: ast::Expr) -> ast::Expr {
+ expr_from_text(&format!("{} = {}", lhs, rhs))
+}
+fn expr_from_text(text: &str) -> ast::Expr {
+ ast_from_text(&format!("const C: () = {};", text))
+}
+pub fn expr_let(pattern: ast::Pat, expr: ast::Expr) -> ast::LetExpr {
+ ast_from_text(&format!("const _: () = while let {} = {} {{}};", pattern, expr))
+}
+
+pub fn arg_list(args: impl IntoIterator<Item = ast::Expr>) -> ast::ArgList {
+ ast_from_text(&format!("fn main() {{ ()({}) }}", args.into_iter().format(", ")))
+}
+
+pub fn ident_pat(ref_: bool, mut_: bool, name: ast::Name) -> ast::IdentPat {
+ let mut s = String::from("fn f(");
+ if ref_ {
+ s.push_str("ref ");
+ }
+ if mut_ {
+ s.push_str("mut ");
+ }
+ format_to!(s, "{}", name);
+ s.push_str(": ())");
+ ast_from_text(&s)
+}
+
+pub fn wildcard_pat() -> ast::WildcardPat {
+ return from_text("_");
+
+ fn from_text(text: &str) -> ast::WildcardPat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+}
+
+pub fn literal_pat(lit: &str) -> ast::LiteralPat {
+ return from_text(lit);
+
+ fn from_text(text: &str) -> ast::LiteralPat {
+ ast_from_text(&format!("fn f() {{ match x {{ {} => {{}} }} }}", text))
+ }
+}
+
+/// Creates a tuple of patterns from an iterator of patterns.
+///
+/// Invariant: `pats` must be length > 0
+pub fn tuple_pat(pats: impl IntoIterator<Item = ast::Pat>) -> ast::TuplePat {
+ let mut count: usize = 0;
+ let mut pats_str = pats.into_iter().inspect(|_| count += 1).join(", ");
+ if count == 1 {
+ pats_str.push(',');
+ }
+ return from_text(&format!("({})", pats_str));
+
+ fn from_text(text: &str) -> ast::TuplePat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+}
+
+pub fn tuple_struct_pat(
+ path: ast::Path,
+ pats: impl IntoIterator<Item = ast::Pat>,
+) -> ast::TupleStructPat {
+ let pats_str = pats.into_iter().join(", ");
+ return from_text(&format!("{}({})", path, pats_str));
+
+ fn from_text(text: &str) -> ast::TupleStructPat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+}
+
+pub fn record_pat(path: ast::Path, pats: impl IntoIterator<Item = ast::Pat>) -> ast::RecordPat {
+ let pats_str = pats.into_iter().join(", ");
+ return from_text(&format!("{} {{ {} }}", path, pats_str));
+
+ fn from_text(text: &str) -> ast::RecordPat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+}
+
+pub fn record_pat_with_fields(path: ast::Path, fields: ast::RecordPatFieldList) -> ast::RecordPat {
+ ast_from_text(&format!("fn f({} {}: ()))", path, fields))
+}
+
+pub fn record_pat_field_list(
+ fields: impl IntoIterator<Item = ast::RecordPatField>,
+) -> ast::RecordPatFieldList {
+ let fields = fields.into_iter().join(", ");
+ ast_from_text(&format!("fn f(S {{ {} }}: ()))", fields))
+}
+
+pub fn record_pat_field(name_ref: ast::NameRef, pat: ast::Pat) -> ast::RecordPatField {
+ ast_from_text(&format!("fn f(S {{ {}: {} }}: ()))", name_ref, pat))
+}
+
+pub fn record_pat_field_shorthand(name_ref: ast::NameRef) -> ast::RecordPatField {
+ ast_from_text(&format!("fn f(S {{ {} }}: ()))", name_ref))
+}
+
+/// Returns a `BindPat` if the path has just one segment, a `PathPat` otherwise.
+pub fn path_pat(path: ast::Path) -> ast::Pat {
+ return from_text(&path.to_string());
+ fn from_text(text: &str) -> ast::Pat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+}
+
+pub fn match_arm(
+ pats: impl IntoIterator<Item = ast::Pat>,
+ guard: Option<ast::Expr>,
+ expr: ast::Expr,
+) -> ast::MatchArm {
+ let pats_str = pats.into_iter().join(" | ");
+ return match guard {
+ Some(guard) => from_text(&format!("{} if {} => {}", pats_str, guard, expr)),
+ None => from_text(&format!("{} => {}", pats_str, expr)),
+ };
+
+ fn from_text(text: &str) -> ast::MatchArm {
+ ast_from_text(&format!("fn f() {{ match () {{{}}} }}", text))
+ }
+}
+
+pub fn match_arm_with_guard(
+ pats: impl IntoIterator<Item = ast::Pat>,
+ guard: ast::Expr,
+ expr: ast::Expr,
+) -> ast::MatchArm {
+ let pats_str = pats.into_iter().join(" | ");
+ return from_text(&format!("{} if {} => {}", pats_str, guard, expr));
+
+ fn from_text(text: &str) -> ast::MatchArm {
+ ast_from_text(&format!("fn f() {{ match () {{{}}} }}", text))
+ }
+}
+
+pub fn match_arm_list(arms: impl IntoIterator<Item = ast::MatchArm>) -> ast::MatchArmList {
+ let arms_str = arms
+ .into_iter()
+ .map(|arm| {
+ let needs_comma = arm.expr().map_or(true, |it| !it.is_block_like());
+ let comma = if needs_comma { "," } else { "" };
+ format!(" {}{}\n", arm.syntax(), comma)
+ })
+ .collect::<String>();
+ return from_text(&arms_str);
+
+ fn from_text(text: &str) -> ast::MatchArmList {
+ ast_from_text(&format!("fn f() {{ match () {{\n{}}} }}", text))
+ }
+}
+
+pub fn where_pred(
+ path: ast::Path,
+ bounds: impl IntoIterator<Item = ast::TypeBound>,
+) -> ast::WherePred {
+ let bounds = bounds.into_iter().join(" + ");
+ return from_text(&format!("{}: {}", path, bounds));
+
+ fn from_text(text: &str) -> ast::WherePred {
+ ast_from_text(&format!("fn f() where {} {{ }}", text))
+ }
+}
+
+pub fn where_clause(preds: impl IntoIterator<Item = ast::WherePred>) -> ast::WhereClause {
+ let preds = preds.into_iter().join(", ");
+ return from_text(preds.as_str());
+
+ fn from_text(text: &str) -> ast::WhereClause {
+ ast_from_text(&format!("fn f() where {} {{ }}", text))
+ }
+}
+
+pub fn let_stmt(
+ pattern: ast::Pat,
+ ty: Option<ast::Type>,
+ initializer: Option<ast::Expr>,
+) -> ast::LetStmt {
+ let mut text = String::new();
+ format_to!(text, "let {}", pattern);
+ if let Some(ty) = ty {
+ format_to!(text, ": {}", ty);
+ }
+ match initializer {
+ Some(it) => format_to!(text, " = {};", it),
+ None => format_to!(text, ";"),
+ };
+ ast_from_text(&format!("fn f() {{ {} }}", text))
+}
+pub fn expr_stmt(expr: ast::Expr) -> ast::ExprStmt {
+ let semi = if expr.is_block_like() { "" } else { ";" };
+ ast_from_text(&format!("fn f() {{ {}{} (); }}", expr, semi))
+}
+
+pub fn item_const(
+ visibility: Option<ast::Visibility>,
+ name: ast::Name,
+ ty: ast::Type,
+ expr: ast::Expr,
+) -> ast::Const {
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+ ast_from_text(&format!("{} const {}: {} = {};", visibility, name, ty, expr))
+}
+
+pub fn param(pat: ast::Pat, ty: ast::Type) -> ast::Param {
+ ast_from_text(&format!("fn f({}: {}) {{ }}", pat, ty))
+}
+
+pub fn self_param() -> ast::SelfParam {
+ ast_from_text("fn f(&self) { }")
+}
+
+pub fn ret_type(ty: ast::Type) -> ast::RetType {
+ ast_from_text(&format!("fn f() -> {} {{ }}", ty))
+}
+
+pub fn param_list(
+ self_param: Option<ast::SelfParam>,
+ pats: impl IntoIterator<Item = ast::Param>,
+) -> ast::ParamList {
+ let args = pats.into_iter().join(", ");
+ let list = match self_param {
+ Some(self_param) if args.is_empty() => format!("fn f({}) {{ }}", self_param),
+ Some(self_param) => format!("fn f({}, {}) {{ }}", self_param, args),
+ None => format!("fn f({}) {{ }}", args),
+ };
+ ast_from_text(&list)
+}
+
+pub fn type_param(name: ast::Name, ty: Option<ast::TypeBoundList>) -> ast::TypeParam {
+ let bound = match ty {
+ Some(it) => format!(": {}", it),
+ None => String::new(),
+ };
+ ast_from_text(&format!("fn f<{}{}>() {{ }}", name, bound))
+}
+
+pub fn lifetime_param(lifetime: ast::Lifetime) -> ast::LifetimeParam {
+ ast_from_text(&format!("fn f<{}>() {{ }}", lifetime))
+}
+
+pub fn generic_param_list(
+ pats: impl IntoIterator<Item = ast::GenericParam>,
+) -> ast::GenericParamList {
+ let args = pats.into_iter().join(", ");
+ ast_from_text(&format!("fn f<{}>() {{ }}", args))
+}
+
+pub fn visibility_pub_crate() -> ast::Visibility {
+ ast_from_text("pub(crate) struct S")
+}
+
+pub fn visibility_pub() -> ast::Visibility {
+ ast_from_text("pub struct S")
+}
+
+pub fn tuple_field_list(fields: impl IntoIterator<Item = ast::TupleField>) -> ast::TupleFieldList {
+ let fields = fields.into_iter().join(", ");
+ ast_from_text(&format!("struct f({});", fields))
+}
+
+pub fn record_field_list(
+ fields: impl IntoIterator<Item = ast::RecordField>,
+) -> ast::RecordFieldList {
+ let fields = fields.into_iter().join(", ");
+ ast_from_text(&format!("struct f {{ {} }}", fields))
+}
+
+pub fn tuple_field(visibility: Option<ast::Visibility>, ty: ast::Type) -> ast::TupleField {
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+ ast_from_text(&format!("struct f({}{});", visibility, ty))
+}
+
+pub fn variant(name: ast::Name, field_list: Option<ast::FieldList>) -> ast::Variant {
+ let field_list = match field_list {
+ None => String::new(),
+ Some(it) => format!("{}", it),
+ };
+ ast_from_text(&format!("enum f {{ {}{} }}", name, field_list))
+}
+
+pub fn fn_(
+ visibility: Option<ast::Visibility>,
+ fn_name: ast::Name,
+ type_params: Option<ast::GenericParamList>,
+ params: ast::ParamList,
+ body: ast::BlockExpr,
+ ret_type: Option<ast::RetType>,
+ is_async: bool,
+) -> ast::Fn {
+ let type_params = match type_params {
+ Some(type_params) => format!("{}", type_params),
+ None => "".into(),
+ };
+ let ret_type = match ret_type {
+ Some(ret_type) => format!("{} ", ret_type),
+ None => "".into(),
+ };
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+
+ let async_literal = if is_async { "async " } else { "" };
+
+ ast_from_text(&format!(
+ "{}{}fn {}{}{} {}{}",
+ visibility, async_literal, fn_name, type_params, params, ret_type, body
+ ))
+}
+
+pub fn struct_(
+ visibility: Option<ast::Visibility>,
+ strukt_name: ast::Name,
+ generic_param_list: Option<ast::GenericParamList>,
+ field_list: ast::FieldList,
+) -> ast::Struct {
+ let semicolon = if matches!(field_list, ast::FieldList::TupleFieldList(_)) { ";" } else { "" };
+ let type_params = generic_param_list.map_or_else(String::new, |it| it.to_string());
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+
+ ast_from_text(&format!(
+ "{}struct {}{}{}{}",
+ visibility, strukt_name, type_params, field_list, semicolon
+ ))
+}
+
+#[track_caller]
+fn ast_from_text<N: AstNode>(text: &str) -> N {
+ let parse = SourceFile::parse(text);
+ let node = match parse.tree().syntax().descendants().find_map(N::cast) {
+ Some(it) => it,
+ None => {
+ panic!("Failed to make ast node `{}` from text {}", std::any::type_name::<N>(), text)
+ }
+ };
+ let node = node.clone_subtree();
+ assert_eq!(node.syntax().text_range().start(), 0.into());
+ node
+}
+
+pub fn token(kind: SyntaxKind) -> SyntaxToken {
+ tokens::SOURCE_FILE
+ .tree()
+ .syntax()
+ .clone_for_update()
+ .descendants_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == kind)
+ .unwrap_or_else(|| panic!("unhandled token: {:?}", kind))
+}
+
+pub mod tokens {
+ use once_cell::sync::Lazy;
+
+ use crate::{ast, AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken};
+
+ pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| {
+ SourceFile::parse(
+ "const C: <()>::Item = (1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p)\n;\n\n",
+ )
+ });
+
+ pub fn single_space() -> SyntaxToken {
+ SOURCE_FILE
+ .tree()
+ .syntax()
+ .clone_for_update()
+ .descendants_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == WHITESPACE && it.text() == " ")
+ .unwrap()
+ }
+
+ pub fn whitespace(text: &str) -> SyntaxToken {
+ assert!(text.trim().is_empty());
+ let sf = SourceFile::parse(text).ok().unwrap();
+ sf.syntax().clone_for_update().first_child_or_token().unwrap().into_token().unwrap()
+ }
+
+ pub fn doc_comment(text: &str) -> SyntaxToken {
+ assert!(!text.trim().is_empty());
+ let sf = SourceFile::parse(text).ok().unwrap();
+ sf.syntax().first_child_or_token().unwrap().into_token().unwrap()
+ }
+
+ pub fn literal(text: &str) -> SyntaxToken {
+ assert_eq!(text.trim(), text);
+ let lit: ast::Literal = super::ast_from_text(&format!("fn f() {{ let _ = {}; }}", text));
+ lit.syntax().first_child_or_token().unwrap().into_token().unwrap()
+ }
+
+ pub fn single_newline() -> SyntaxToken {
+ let res = SOURCE_FILE
+ .tree()
+ .syntax()
+ .clone_for_update()
+ .descendants_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == WHITESPACE && it.text() == "\n")
+ .unwrap();
+ res.detach();
+ res
+ }
+
+ pub fn blank_line() -> SyntaxToken {
+ SOURCE_FILE
+ .tree()
+ .syntax()
+ .clone_for_update()
+ .descendants_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == WHITESPACE && it.text() == "\n\n")
+ .unwrap()
+ }
+
+ pub struct WsBuilder(SourceFile);
+
+ impl WsBuilder {
+ pub fn new(text: &str) -> WsBuilder {
+ WsBuilder(SourceFile::parse(text).ok().unwrap())
+ }
+ pub fn ws(&self) -> SyntaxToken {
+ self.0.syntax().first_child_or_token().unwrap().into_token().unwrap()
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
new file mode 100644
index 000000000..bb92c51e9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
@@ -0,0 +1,875 @@
+//! Various extension methods to ast Nodes, which are hard to code-generate.
+//! Extensions for various expressions live in a sibling `expr_extensions` module.
+//!
+//! These methods should only do simple, shallow tasks related to the syntax of the node itself.
+
+use std::{borrow::Cow, fmt, iter::successors};
+
+use itertools::Itertools;
+use parser::SyntaxKind;
+use rowan::{GreenNodeData, GreenTokenData};
+
+use crate::{
+ ast::{self, support, AstNode, AstToken, HasAttrs, HasGenericParams, HasName, SyntaxNode},
+ NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, TokenText, T,
+};
+
+impl ast::Lifetime {
+ pub fn text(&self) -> TokenText<'_> {
+ text_of_first_token(self.syntax())
+ }
+}
+
+impl ast::Name {
+ pub fn text(&self) -> TokenText<'_> {
+ text_of_first_token(self.syntax())
+ }
+}
+
+impl ast::NameRef {
+ pub fn text(&self) -> TokenText<'_> {
+ text_of_first_token(self.syntax())
+ }
+
+ pub fn as_tuple_field(&self) -> Option<usize> {
+ self.text().parse().ok()
+ }
+
+ pub fn token_kind(&self) -> SyntaxKind {
+ self.syntax().first_token().map_or(SyntaxKind::ERROR, |it| it.kind())
+ }
+}
+
+fn text_of_first_token(node: &SyntaxNode) -> TokenText<'_> {
+ fn first_token(green_ref: &GreenNodeData) -> &GreenTokenData {
+ green_ref.children().next().and_then(NodeOrToken::into_token).unwrap()
+ }
+
+ match node.green() {
+ Cow::Borrowed(green_ref) => TokenText::borrowed(first_token(green_ref).text()),
+ Cow::Owned(green) => TokenText::owned(first_token(&green).to_owned()),
+ }
+}
+
+impl ast::HasModuleItem for ast::StmtList {}
+
+impl ast::BlockExpr {
+ // FIXME: remove all these methods, they belong to ast::StmtList
+ pub fn statements(&self) -> impl Iterator<Item = ast::Stmt> {
+ self.stmt_list().into_iter().flat_map(|it| it.statements())
+ }
+ pub fn tail_expr(&self) -> Option<ast::Expr> {
+ self.stmt_list()?.tail_expr()
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum Macro {
+ MacroRules(ast::MacroRules),
+ MacroDef(ast::MacroDef),
+}
+
+impl From<ast::MacroRules> for Macro {
+ fn from(it: ast::MacroRules) -> Self {
+ Macro::MacroRules(it)
+ }
+}
+
+impl From<ast::MacroDef> for Macro {
+ fn from(it: ast::MacroDef) -> Self {
+ Macro::MacroDef(it)
+ }
+}
+
+impl AstNode for Macro {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(kind, SyntaxKind::MACRO_RULES | SyntaxKind::MACRO_DEF)
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ SyntaxKind::MACRO_RULES => Macro::MacroRules(ast::MacroRules { syntax }),
+ SyntaxKind::MACRO_DEF => Macro::MacroDef(ast::MacroDef { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Macro::MacroRules(it) => it.syntax(),
+ Macro::MacroDef(it) => it.syntax(),
+ }
+ }
+}
+
+impl HasName for Macro {
+ fn name(&self) -> Option<ast::Name> {
+ match self {
+ Macro::MacroRules(mac) => mac.name(),
+ Macro::MacroDef(mac) => mac.name(),
+ }
+ }
+}
+
+impl HasAttrs for Macro {}
+
+impl From<ast::AssocItem> for ast::Item {
+ fn from(assoc: ast::AssocItem) -> Self {
+ match assoc {
+ ast::AssocItem::Const(it) => ast::Item::Const(it),
+ ast::AssocItem::Fn(it) => ast::Item::Fn(it),
+ ast::AssocItem::MacroCall(it) => ast::Item::MacroCall(it),
+ ast::AssocItem::TypeAlias(it) => ast::Item::TypeAlias(it),
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum AttrKind {
+ Inner,
+ Outer,
+}
+
+impl AttrKind {
+ /// Returns `true` if the attr_kind is [`Inner`](Self::Inner).
+ pub fn is_inner(&self) -> bool {
+ matches!(self, Self::Inner)
+ }
+
+ /// Returns `true` if the attr_kind is [`Outer`](Self::Outer).
+ pub fn is_outer(&self) -> bool {
+ matches!(self, Self::Outer)
+ }
+}
+
+impl ast::Attr {
+ pub fn as_simple_atom(&self) -> Option<SmolStr> {
+ let meta = self.meta()?;
+ if meta.eq_token().is_some() || meta.token_tree().is_some() {
+ return None;
+ }
+ self.simple_name()
+ }
+
+ pub fn as_simple_call(&self) -> Option<(SmolStr, ast::TokenTree)> {
+ let tt = self.meta()?.token_tree()?;
+ Some((self.simple_name()?, tt))
+ }
+
+ pub fn simple_name(&self) -> Option<SmolStr> {
+ let path = self.meta()?.path()?;
+ match (path.segment(), path.qualifier()) {
+ (Some(segment), None) => Some(segment.syntax().first_token()?.text().into()),
+ _ => None,
+ }
+ }
+
+ pub fn kind(&self) -> AttrKind {
+ match self.excl_token() {
+ Some(_) => AttrKind::Inner,
+ None => AttrKind::Outer,
+ }
+ }
+
+ pub fn path(&self) -> Option<ast::Path> {
+ self.meta()?.path()
+ }
+
+ pub fn expr(&self) -> Option<ast::Expr> {
+ self.meta()?.expr()
+ }
+
+ pub fn token_tree(&self) -> Option<ast::TokenTree> {
+ self.meta()?.token_tree()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum PathSegmentKind {
+ Name(ast::NameRef),
+ Type { type_ref: Option<ast::Type>, trait_ref: Option<ast::PathType> },
+ SelfTypeKw,
+ SelfKw,
+ SuperKw,
+ CrateKw,
+}
+
+impl ast::PathSegment {
+ pub fn parent_path(&self) -> ast::Path {
+ self.syntax()
+ .parent()
+ .and_then(ast::Path::cast)
+ .expect("segments are always nested in paths")
+ }
+
+ pub fn crate_token(&self) -> Option<SyntaxToken> {
+ self.name_ref().and_then(|it| it.crate_token())
+ }
+
+ pub fn self_token(&self) -> Option<SyntaxToken> {
+ self.name_ref().and_then(|it| it.self_token())
+ }
+
+ pub fn self_type_token(&self) -> Option<SyntaxToken> {
+ self.name_ref().and_then(|it| it.Self_token())
+ }
+
+ pub fn super_token(&self) -> Option<SyntaxToken> {
+ self.name_ref().and_then(|it| it.super_token())
+ }
+
+ pub fn kind(&self) -> Option<PathSegmentKind> {
+ let res = if let Some(name_ref) = self.name_ref() {
+ match name_ref.token_kind() {
+ T![Self] => PathSegmentKind::SelfTypeKw,
+ T![self] => PathSegmentKind::SelfKw,
+ T![super] => PathSegmentKind::SuperKw,
+ T![crate] => PathSegmentKind::CrateKw,
+ _ => PathSegmentKind::Name(name_ref),
+ }
+ } else {
+ match self.syntax().first_child_or_token()?.kind() {
+ T![<] => {
+ // <T> or <T as Trait>
+ // T is any TypeRef, Trait has to be a PathType
+ let mut type_refs =
+ self.syntax().children().filter(|node| ast::Type::can_cast(node.kind()));
+ let type_ref = type_refs.next().and_then(ast::Type::cast);
+ let trait_ref = type_refs.next().and_then(ast::PathType::cast);
+ PathSegmentKind::Type { type_ref, trait_ref }
+ }
+ _ => return None,
+ }
+ };
+ Some(res)
+ }
+}
+
+impl ast::Path {
+ pub fn parent_path(&self) -> Option<ast::Path> {
+ self.syntax().parent().and_then(ast::Path::cast)
+ }
+
+ pub fn as_single_segment(&self) -> Option<ast::PathSegment> {
+ match self.qualifier() {
+ Some(_) => None,
+ None => self.segment(),
+ }
+ }
+
+ pub fn as_single_name_ref(&self) -> Option<ast::NameRef> {
+ match self.qualifier() {
+ Some(_) => None,
+ None => self.segment()?.name_ref(),
+ }
+ }
+
+ pub fn first_qualifier_or_self(&self) -> ast::Path {
+ successors(Some(self.clone()), ast::Path::qualifier).last().unwrap()
+ }
+
+ pub fn first_segment(&self) -> Option<ast::PathSegment> {
+ self.first_qualifier_or_self().segment()
+ }
+
+ pub fn segments(&self) -> impl Iterator<Item = ast::PathSegment> + Clone {
+ successors(self.first_segment(), |p| {
+ p.parent_path().parent_path().and_then(|p| p.segment())
+ })
+ }
+
+ pub fn qualifiers(&self) -> impl Iterator<Item = ast::Path> + Clone {
+ successors(self.qualifier(), |p| p.qualifier())
+ }
+
+ pub fn top_path(&self) -> ast::Path {
+ let mut this = self.clone();
+ while let Some(path) = this.parent_path() {
+ this = path;
+ }
+ this
+ }
+}
+
+impl ast::Use {
+ pub fn is_simple_glob(&self) -> bool {
+ self.use_tree().map_or(false, |use_tree| {
+ use_tree.use_tree_list().is_none() && use_tree.star_token().is_some()
+ })
+ }
+}
+
+impl ast::UseTree {
+ pub fn is_simple_path(&self) -> bool {
+ self.use_tree_list().is_none() && self.star_token().is_none()
+ }
+}
+
+impl ast::UseTreeList {
+ pub fn parent_use_tree(&self) -> ast::UseTree {
+ self.syntax()
+ .parent()
+ .and_then(ast::UseTree::cast)
+ .expect("UseTreeLists are always nested in UseTrees")
+ }
+
+ pub fn has_inner_comment(&self) -> bool {
+ self.syntax()
+ .children_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find_map(ast::Comment::cast)
+ .is_some()
+ }
+}
+
+impl ast::Impl {
+ pub fn self_ty(&self) -> Option<ast::Type> {
+ match self.target() {
+ (Some(t), None) | (_, Some(t)) => Some(t),
+ _ => None,
+ }
+ }
+
+ pub fn trait_(&self) -> Option<ast::Type> {
+ match self.target() {
+ (Some(t), Some(_)) => Some(t),
+ _ => None,
+ }
+ }
+
+ fn target(&self) -> (Option<ast::Type>, Option<ast::Type>) {
+ let mut types = support::children(self.syntax());
+ let first = types.next();
+ let second = types.next();
+ (first, second)
+ }
+
+ pub fn for_trait_name_ref(name_ref: &ast::NameRef) -> Option<ast::Impl> {
+ let this = name_ref.syntax().ancestors().find_map(ast::Impl::cast)?;
+ if this.trait_()?.syntax().text_range().start() == name_ref.syntax().text_range().start() {
+ Some(this)
+ } else {
+ None
+ }
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum StructKind {
+ Record(ast::RecordFieldList),
+ Tuple(ast::TupleFieldList),
+ Unit,
+}
+
+impl StructKind {
+ fn from_node<N: AstNode>(node: &N) -> StructKind {
+ if let Some(nfdl) = support::child::<ast::RecordFieldList>(node.syntax()) {
+ StructKind::Record(nfdl)
+ } else if let Some(pfl) = support::child::<ast::TupleFieldList>(node.syntax()) {
+ StructKind::Tuple(pfl)
+ } else {
+ StructKind::Unit
+ }
+ }
+}
+
+impl ast::Struct {
+ pub fn kind(&self) -> StructKind {
+ StructKind::from_node(self)
+ }
+}
+
+impl ast::RecordExprField {
+ pub fn for_field_name(field_name: &ast::NameRef) -> Option<ast::RecordExprField> {
+ let candidate = Self::for_name_ref(field_name)?;
+ if candidate.field_name().as_ref() == Some(field_name) {
+ Some(candidate)
+ } else {
+ None
+ }
+ }
+
+ pub fn for_name_ref(name_ref: &ast::NameRef) -> Option<ast::RecordExprField> {
+ let syn = name_ref.syntax();
+ syn.parent()
+ .and_then(ast::RecordExprField::cast)
+ .or_else(|| syn.ancestors().nth(4).and_then(ast::RecordExprField::cast))
+ }
+
+ /// Deals with field init shorthand
+ pub fn field_name(&self) -> Option<ast::NameRef> {
+ if let Some(name_ref) = self.name_ref() {
+ return Some(name_ref);
+ }
+ if let ast::Expr::PathExpr(expr) = self.expr()? {
+ let path = expr.path()?;
+ let segment = path.segment()?;
+ let name_ref = segment.name_ref()?;
+ if path.qualifier().is_none() {
+ return Some(name_ref);
+ }
+ }
+ None
+ }
+}
+
+#[derive(Debug, Clone)]
+pub enum NameLike {
+ NameRef(ast::NameRef),
+ Name(ast::Name),
+ Lifetime(ast::Lifetime),
+}
+
+impl NameLike {
+ pub fn as_name_ref(&self) -> Option<&ast::NameRef> {
+ match self {
+ NameLike::NameRef(name_ref) => Some(name_ref),
+ _ => None,
+ }
+ }
+ pub fn as_lifetime(&self) -> Option<&ast::Lifetime> {
+ match self {
+ NameLike::Lifetime(lifetime) => Some(lifetime),
+ _ => None,
+ }
+ }
+ pub fn text(&self) -> TokenText<'_> {
+ match self {
+ NameLike::NameRef(name_ref) => name_ref.text(),
+ NameLike::Name(name) => name.text(),
+ NameLike::Lifetime(lifetime) => lifetime.text(),
+ }
+ }
+}
+
+impl ast::AstNode for NameLike {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(kind, SyntaxKind::NAME | SyntaxKind::NAME_REF | SyntaxKind::LIFETIME)
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ SyntaxKind::NAME => NameLike::Name(ast::Name { syntax }),
+ SyntaxKind::NAME_REF => NameLike::NameRef(ast::NameRef { syntax }),
+ SyntaxKind::LIFETIME => NameLike::Lifetime(ast::Lifetime { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ NameLike::NameRef(it) => it.syntax(),
+ NameLike::Name(it) => it.syntax(),
+ NameLike::Lifetime(it) => it.syntax(),
+ }
+ }
+}
+
+const _: () = {
+ use ast::{Lifetime, Name, NameRef};
+ stdx::impl_from!(NameRef, Name, Lifetime for NameLike);
+};
+
+#[derive(Debug, Clone, PartialEq)]
+pub enum NameOrNameRef {
+ Name(ast::Name),
+ NameRef(ast::NameRef),
+}
+
+impl fmt::Display for NameOrNameRef {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ NameOrNameRef::Name(it) => fmt::Display::fmt(it, f),
+ NameOrNameRef::NameRef(it) => fmt::Display::fmt(it, f),
+ }
+ }
+}
+
+impl NameOrNameRef {
+ pub fn text(&self) -> TokenText<'_> {
+ match self {
+ NameOrNameRef::Name(name) => name.text(),
+ NameOrNameRef::NameRef(name_ref) => name_ref.text(),
+ }
+ }
+}
+
+impl ast::RecordPatField {
+ pub fn for_field_name_ref(field_name: &ast::NameRef) -> Option<ast::RecordPatField> {
+ let candidate = field_name.syntax().parent().and_then(ast::RecordPatField::cast)?;
+ match candidate.field_name()? {
+ NameOrNameRef::NameRef(name_ref) if name_ref == *field_name => Some(candidate),
+ _ => None,
+ }
+ }
+
+ pub fn for_field_name(field_name: &ast::Name) -> Option<ast::RecordPatField> {
+ let candidate =
+ field_name.syntax().ancestors().nth(2).and_then(ast::RecordPatField::cast)?;
+ match candidate.field_name()? {
+ NameOrNameRef::Name(name) if name == *field_name => Some(candidate),
+ _ => None,
+ }
+ }
+
+ pub fn parent_record_pat(&self) -> ast::RecordPat {
+ self.syntax().ancestors().find_map(ast::RecordPat::cast).unwrap()
+ }
+
+ /// Deals with field init shorthand
+ pub fn field_name(&self) -> Option<NameOrNameRef> {
+ if let Some(name_ref) = self.name_ref() {
+ return Some(NameOrNameRef::NameRef(name_ref));
+ }
+ match self.pat() {
+ Some(ast::Pat::IdentPat(pat)) => {
+ let name = pat.name()?;
+ Some(NameOrNameRef::Name(name))
+ }
+ Some(ast::Pat::BoxPat(pat)) => match pat.pat() {
+ Some(ast::Pat::IdentPat(pat)) => {
+ let name = pat.name()?;
+ Some(NameOrNameRef::Name(name))
+ }
+ _ => None,
+ },
+ _ => None,
+ }
+ }
+}
+
+impl ast::Variant {
+ pub fn parent_enum(&self) -> ast::Enum {
+ self.syntax()
+ .parent()
+ .and_then(|it| it.parent())
+ .and_then(ast::Enum::cast)
+ .expect("EnumVariants are always nested in Enums")
+ }
+ pub fn kind(&self) -> StructKind {
+ StructKind::from_node(self)
+ }
+}
+
+impl ast::Item {
+ pub fn generic_param_list(&self) -> Option<ast::GenericParamList> {
+ ast::AnyHasGenericParams::cast(self.syntax().clone())?.generic_param_list()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum FieldKind {
+ Name(ast::NameRef),
+ Index(SyntaxToken),
+}
+
+impl ast::FieldExpr {
+ pub fn index_token(&self) -> Option<SyntaxToken> {
+ self.syntax
+ .children_with_tokens()
+ // FIXME: Accepting floats here to reject them in validation later
+ .find(|c| c.kind() == SyntaxKind::INT_NUMBER || c.kind() == SyntaxKind::FLOAT_NUMBER)
+ .as_ref()
+ .and_then(SyntaxElement::as_token)
+ .cloned()
+ }
+
+ pub fn field_access(&self) -> Option<FieldKind> {
+ match self.name_ref() {
+ Some(nr) => Some(FieldKind::Name(nr)),
+ None => self.index_token().map(FieldKind::Index),
+ }
+ }
+}
+
+pub struct SlicePatComponents {
+ pub prefix: Vec<ast::Pat>,
+ pub slice: Option<ast::Pat>,
+ pub suffix: Vec<ast::Pat>,
+}
+
+impl ast::SlicePat {
+ pub fn components(&self) -> SlicePatComponents {
+ let mut args = self.pats().peekable();
+ let prefix = args
+ .peeking_take_while(|p| match p {
+ ast::Pat::RestPat(_) => false,
+ ast::Pat::IdentPat(bp) => !matches!(bp.pat(), Some(ast::Pat::RestPat(_))),
+ ast::Pat::RefPat(rp) => match rp.pat() {
+ Some(ast::Pat::RestPat(_)) => false,
+ Some(ast::Pat::IdentPat(bp)) => !matches!(bp.pat(), Some(ast::Pat::RestPat(_))),
+ _ => true,
+ },
+ _ => true,
+ })
+ .collect();
+ let slice = args.next();
+ let suffix = args.collect();
+
+ SlicePatComponents { prefix, slice, suffix }
+ }
+}
+
+impl ast::IdentPat {
+ pub fn is_simple_ident(&self) -> bool {
+ self.at_token().is_none()
+ && self.mut_token().is_none()
+ && self.ref_token().is_none()
+ && self.pat().is_none()
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum SelfParamKind {
+ /// self
+ Owned,
+ /// &self
+ Ref,
+ /// &mut self
+ MutRef,
+}
+
+impl ast::SelfParam {
+ pub fn kind(&self) -> SelfParamKind {
+ if self.amp_token().is_some() {
+ if self.mut_token().is_some() {
+ SelfParamKind::MutRef
+ } else {
+ SelfParamKind::Ref
+ }
+ } else {
+ SelfParamKind::Owned
+ }
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub enum TypeBoundKind {
+ /// Trait
+ PathType(ast::PathType),
+ /// for<'a> ...
+ ForType(ast::ForType),
+ /// 'a
+ Lifetime(ast::Lifetime),
+}
+
+impl ast::TypeBound {
+ pub fn kind(&self) -> TypeBoundKind {
+ if let Some(path_type) = support::children(self.syntax()).next() {
+ TypeBoundKind::PathType(path_type)
+ } else if let Some(for_type) = support::children(self.syntax()).next() {
+ TypeBoundKind::ForType(for_type)
+ } else if let Some(lifetime) = self.lifetime() {
+ TypeBoundKind::Lifetime(lifetime)
+ } else {
+ unreachable!()
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+pub enum TypeOrConstParam {
+ Type(ast::TypeParam),
+ Const(ast::ConstParam),
+}
+
+impl TypeOrConstParam {
+ pub fn name(&self) -> Option<ast::Name> {
+ match self {
+ TypeOrConstParam::Type(x) => x.name(),
+ TypeOrConstParam::Const(x) => x.name(),
+ }
+ }
+}
+
+pub enum VisibilityKind {
+ In(ast::Path),
+ PubCrate,
+ PubSuper,
+ PubSelf,
+ Pub,
+}
+
+impl ast::Visibility {
+ pub fn kind(&self) -> VisibilityKind {
+ match self.path() {
+ Some(path) => {
+ if let Some(segment) =
+ path.as_single_segment().filter(|it| it.coloncolon_token().is_none())
+ {
+ if segment.crate_token().is_some() {
+ return VisibilityKind::PubCrate;
+ } else if segment.super_token().is_some() {
+ return VisibilityKind::PubSuper;
+ } else if segment.self_token().is_some() {
+ return VisibilityKind::PubSelf;
+ }
+ }
+ VisibilityKind::In(path)
+ }
+ None => VisibilityKind::Pub,
+ }
+ }
+}
+
+impl ast::LifetimeParam {
+ pub fn lifetime_bounds(&self) -> impl Iterator<Item = SyntaxToken> {
+ self.syntax()
+ .children_with_tokens()
+ .filter_map(|it| it.into_token())
+ .skip_while(|x| x.kind() != T![:])
+ .filter(|it| it.kind() == T![lifetime_ident])
+ }
+}
+
+impl ast::Module {
+ /// Returns the parent ast::Module, this is different than the semantic parent in that this only
+ /// considers parent declarations in the AST
+ pub fn parent(&self) -> Option<ast::Module> {
+ self.syntax().ancestors().nth(2).and_then(ast::Module::cast)
+ }
+}
+
+impl ast::RangePat {
+ pub fn start(&self) -> Option<ast::Pat> {
+ self.syntax()
+ .children_with_tokens()
+ .take_while(|it| !(it.kind() == T![..] || it.kind() == T![..=]))
+ .filter_map(|it| it.into_node())
+ .find_map(ast::Pat::cast)
+ }
+
+ pub fn end(&self) -> Option<ast::Pat> {
+ self.syntax()
+ .children_with_tokens()
+ .skip_while(|it| !(it.kind() == T![..] || it.kind() == T![..=]))
+ .filter_map(|it| it.into_node())
+ .find_map(ast::Pat::cast)
+ }
+}
+
+impl ast::TokenTree {
+ pub fn token_trees_and_tokens(
+ &self,
+ ) -> impl Iterator<Item = NodeOrToken<ast::TokenTree, SyntaxToken>> {
+ self.syntax().children_with_tokens().filter_map(|not| match not {
+ NodeOrToken::Node(node) => ast::TokenTree::cast(node).map(NodeOrToken::Node),
+ NodeOrToken::Token(t) => Some(NodeOrToken::Token(t)),
+ })
+ }
+
+ pub fn left_delimiter_token(&self) -> Option<SyntaxToken> {
+ self.syntax()
+ .first_child_or_token()?
+ .into_token()
+ .filter(|it| matches!(it.kind(), T!['{'] | T!['('] | T!['[']))
+ }
+
+ pub fn right_delimiter_token(&self) -> Option<SyntaxToken> {
+ self.syntax()
+ .last_child_or_token()?
+ .into_token()
+ .filter(|it| matches!(it.kind(), T!['}'] | T![')'] | T![']']))
+ }
+
+ pub fn parent_meta(&self) -> Option<ast::Meta> {
+ self.syntax().parent().and_then(ast::Meta::cast)
+ }
+}
+
+impl ast::Meta {
+ pub fn parent_attr(&self) -> Option<ast::Attr> {
+ self.syntax().parent().and_then(ast::Attr::cast)
+ }
+}
+
+impl ast::GenericArgList {
+ pub fn lifetime_args(&self) -> impl Iterator<Item = ast::LifetimeArg> {
+ self.generic_args().filter_map(|arg| match arg {
+ ast::GenericArg::LifetimeArg(it) => Some(it),
+ _ => None,
+ })
+ }
+}
+
+impl ast::GenericParamList {
+ pub fn lifetime_params(&self) -> impl Iterator<Item = ast::LifetimeParam> {
+ self.generic_params().filter_map(|param| match param {
+ ast::GenericParam::LifetimeParam(it) => Some(it),
+ ast::GenericParam::TypeParam(_) | ast::GenericParam::ConstParam(_) => None,
+ })
+ }
+ pub fn type_or_const_params(&self) -> impl Iterator<Item = ast::TypeOrConstParam> {
+ self.generic_params().filter_map(|param| match param {
+ ast::GenericParam::TypeParam(it) => Some(ast::TypeOrConstParam::Type(it)),
+ ast::GenericParam::LifetimeParam(_) => None,
+ ast::GenericParam::ConstParam(it) => Some(ast::TypeOrConstParam::Const(it)),
+ })
+ }
+}
+
+impl ast::ForExpr {
+ pub fn iterable(&self) -> Option<ast::Expr> {
+ // If the iterable is a BlockExpr, check if the body is missing.
+ // If it is assume the iterable is the expression that is missing instead.
+ let mut exprs = support::children(self.syntax());
+ let first = exprs.next();
+ match first {
+ Some(ast::Expr::BlockExpr(_)) => exprs.next().and(first),
+ first => first,
+ }
+ }
+}
+
+impl ast::HasLoopBody for ast::ForExpr {
+ fn loop_body(&self) -> Option<ast::BlockExpr> {
+ let mut exprs = support::children(self.syntax());
+ let first = exprs.next();
+ let second = exprs.next();
+ second.or(first)
+ }
+}
+
+impl ast::WhileExpr {
+ pub fn condition(&self) -> Option<ast::Expr> {
+ // If the condition is a BlockExpr, check if the body is missing.
+ // If it is assume the condition is the expression that is missing instead.
+ let mut exprs = support::children(self.syntax());
+ let first = exprs.next();
+ match first {
+ Some(ast::Expr::BlockExpr(_)) => exprs.next().and(first),
+ first => first,
+ }
+ }
+}
+
+impl ast::HasLoopBody for ast::WhileExpr {
+ fn loop_body(&self) -> Option<ast::BlockExpr> {
+ let mut exprs = support::children(self.syntax());
+ let first = exprs.next();
+ let second = exprs.next();
+ second.or(first)
+ }
+}
+
+impl ast::HasAttrs for ast::AnyHasDocComments {}
+
+impl From<ast::Adt> for ast::Item {
+ fn from(it: ast::Adt) -> Self {
+ match it {
+ ast::Adt::Enum(it) => ast::Item::Enum(it),
+ ast::Adt::Struct(it) => ast::Item::Struct(it),
+ ast::Adt::Union(it) => ast::Item::Union(it),
+ }
+ }
+}
+
+impl ast::IfExpr {
+ pub fn condition(&self) -> Option<ast::Expr> {
+ support::child(&self.syntax)
+ }
+}
+
+impl ast::MatchGuard {
+ pub fn condition(&self) -> Option<ast::Expr> {
+ support::child(&self.syntax)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/operators.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/operators.rs
new file mode 100644
index 000000000..a687ba0b7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/operators.rs
@@ -0,0 +1,122 @@
+//! Defines a bunch of data-less enums for unary and binary operators.
+//!
+//! Types here don't know about AST, this allows re-using them for both AST and
+//! HIR.
+use std::fmt;
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum RangeOp {
+ /// `..`
+ Exclusive,
+ /// `..=`
+ Inclusive,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum UnaryOp {
+ /// `*`
+ Deref,
+ /// `!`
+ Not,
+ /// `-`
+ Neg,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum BinaryOp {
+ LogicOp(LogicOp),
+ ArithOp(ArithOp),
+ CmpOp(CmpOp),
+ Assignment { op: Option<ArithOp> },
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum LogicOp {
+ And,
+ Or,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum CmpOp {
+ Eq { negated: bool },
+ Ord { ordering: Ordering, strict: bool },
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum Ordering {
+ Less,
+ Greater,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum ArithOp {
+ Add,
+ Mul,
+ Sub,
+ Div,
+ Rem,
+ Shl,
+ Shr,
+ BitXor,
+ BitOr,
+ BitAnd,
+}
+
+impl fmt::Display for LogicOp {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let res = match self {
+ LogicOp::And => "&&",
+ LogicOp::Or => "||",
+ };
+ f.write_str(res)
+ }
+}
+
+impl fmt::Display for ArithOp {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let res = match self {
+ ArithOp::Add => "+",
+ ArithOp::Mul => "*",
+ ArithOp::Sub => "-",
+ ArithOp::Div => "/",
+ ArithOp::Rem => "%",
+ ArithOp::Shl => "<<",
+ ArithOp::Shr => ">>",
+ ArithOp::BitXor => "^",
+ ArithOp::BitOr => "|",
+ ArithOp::BitAnd => "&",
+ };
+ f.write_str(res)
+ }
+}
+
+impl fmt::Display for CmpOp {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let res = match self {
+ CmpOp::Eq { negated: false } => "==",
+ CmpOp::Eq { negated: true } => "!=",
+ CmpOp::Ord { ordering: Ordering::Less, strict: false } => "<=",
+ CmpOp::Ord { ordering: Ordering::Less, strict: true } => "<",
+ CmpOp::Ord { ordering: Ordering::Greater, strict: false } => ">=",
+ CmpOp::Ord { ordering: Ordering::Greater, strict: true } => ">",
+ };
+ f.write_str(res)
+ }
+}
+
+impl fmt::Display for BinaryOp {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ BinaryOp::LogicOp(op) => fmt::Display::fmt(op, f),
+ BinaryOp::ArithOp(op) => fmt::Display::fmt(op, f),
+ BinaryOp::CmpOp(op) => fmt::Display::fmt(op, f),
+ BinaryOp::Assignment { op } => {
+ f.write_str("=")?;
+ if let Some(op) = op {
+ fmt::Display::fmt(op, f)?;
+ }
+ Ok(())
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
new file mode 100644
index 000000000..28976d837
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
@@ -0,0 +1,472 @@
+//! There are many AstNodes, but only a few tokens, so we hand-write them here.
+
+use std::borrow::Cow;
+
+use rustc_lexer::unescape::{unescape_byte, unescape_char, unescape_literal, Mode};
+
+use crate::{
+ ast::{self, AstToken},
+ TextRange, TextSize,
+};
+
+impl ast::Comment {
+ pub fn kind(&self) -> CommentKind {
+ CommentKind::from_text(self.text())
+ }
+
+ pub fn is_doc(&self) -> bool {
+ self.kind().doc.is_some()
+ }
+
+ pub fn is_inner(&self) -> bool {
+ self.kind().doc == Some(CommentPlacement::Inner)
+ }
+
+ pub fn is_outer(&self) -> bool {
+ self.kind().doc == Some(CommentPlacement::Outer)
+ }
+
+ pub fn prefix(&self) -> &'static str {
+ let &(prefix, _kind) = CommentKind::BY_PREFIX
+ .iter()
+ .find(|&(prefix, kind)| self.kind() == *kind && self.text().starts_with(prefix))
+ .unwrap();
+ prefix
+ }
+
+ /// Returns the textual content of a doc comment node as a single string with prefix and suffix
+ /// removed.
+ pub fn doc_comment(&self) -> Option<&str> {
+ let kind = self.kind();
+ match kind {
+ CommentKind { shape, doc: Some(_) } => {
+ let prefix = kind.prefix();
+ let text = &self.text()[prefix.len()..];
+ let text = if shape == CommentShape::Block {
+ text.strip_suffix("*/").unwrap_or(text)
+ } else {
+ text
+ };
+ Some(text)
+ }
+ _ => None,
+ }
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub struct CommentKind {
+ pub shape: CommentShape,
+ pub doc: Option<CommentPlacement>,
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub enum CommentShape {
+ Line,
+ Block,
+}
+
+impl CommentShape {
+ pub fn is_line(self) -> bool {
+ self == CommentShape::Line
+ }
+
+ pub fn is_block(self) -> bool {
+ self == CommentShape::Block
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub enum CommentPlacement {
+ Inner,
+ Outer,
+}
+
+impl CommentKind {
+ const BY_PREFIX: [(&'static str, CommentKind); 9] = [
+ ("/**/", CommentKind { shape: CommentShape::Block, doc: None }),
+ ("/***", CommentKind { shape: CommentShape::Block, doc: None }),
+ ("////", CommentKind { shape: CommentShape::Line, doc: None }),
+ ("///", CommentKind { shape: CommentShape::Line, doc: Some(CommentPlacement::Outer) }),
+ ("//!", CommentKind { shape: CommentShape::Line, doc: Some(CommentPlacement::Inner) }),
+ ("/**", CommentKind { shape: CommentShape::Block, doc: Some(CommentPlacement::Outer) }),
+ ("/*!", CommentKind { shape: CommentShape::Block, doc: Some(CommentPlacement::Inner) }),
+ ("//", CommentKind { shape: CommentShape::Line, doc: None }),
+ ("/*", CommentKind { shape: CommentShape::Block, doc: None }),
+ ];
+
+ pub(crate) fn from_text(text: &str) -> CommentKind {
+ let &(_prefix, kind) = CommentKind::BY_PREFIX
+ .iter()
+ .find(|&(prefix, _kind)| text.starts_with(prefix))
+ .unwrap();
+ kind
+ }
+
+ pub fn prefix(&self) -> &'static str {
+ let &(prefix, _) =
+ CommentKind::BY_PREFIX.iter().rev().find(|(_, kind)| kind == self).unwrap();
+ prefix
+ }
+}
+
+impl ast::Whitespace {
+ pub fn spans_multiple_lines(&self) -> bool {
+ let text = self.text();
+ text.find('\n').map_or(false, |idx| text[idx + 1..].contains('\n'))
+ }
+}
+
+pub struct QuoteOffsets {
+ pub quotes: (TextRange, TextRange),
+ pub contents: TextRange,
+}
+
+impl QuoteOffsets {
+ fn new(literal: &str) -> Option<QuoteOffsets> {
+ let left_quote = literal.find('"')?;
+ let right_quote = literal.rfind('"')?;
+ if left_quote == right_quote {
+ // `literal` only contains one quote
+ return None;
+ }
+
+ let start = TextSize::from(0);
+ let left_quote = TextSize::try_from(left_quote).unwrap() + TextSize::of('"');
+ let right_quote = TextSize::try_from(right_quote).unwrap();
+ let end = TextSize::of(literal);
+
+ let res = QuoteOffsets {
+ quotes: (TextRange::new(start, left_quote), TextRange::new(right_quote, end)),
+ contents: TextRange::new(left_quote, right_quote),
+ };
+ Some(res)
+ }
+}
+
+pub trait IsString: AstToken {
+ fn quote_offsets(&self) -> Option<QuoteOffsets> {
+ let text = self.text();
+ let offsets = QuoteOffsets::new(text)?;
+ let o = self.syntax().text_range().start();
+ let offsets = QuoteOffsets {
+ quotes: (offsets.quotes.0 + o, offsets.quotes.1 + o),
+ contents: offsets.contents + o,
+ };
+ Some(offsets)
+ }
+ fn text_range_between_quotes(&self) -> Option<TextRange> {
+ self.quote_offsets().map(|it| it.contents)
+ }
+ fn open_quote_text_range(&self) -> Option<TextRange> {
+ self.quote_offsets().map(|it| it.quotes.0)
+ }
+ fn close_quote_text_range(&self) -> Option<TextRange> {
+ self.quote_offsets().map(|it| it.quotes.1)
+ }
+ fn escaped_char_ranges(
+ &self,
+ cb: &mut dyn FnMut(TextRange, Result<char, rustc_lexer::unescape::EscapeError>),
+ ) {
+ let text_range_no_quotes = match self.text_range_between_quotes() {
+ Some(it) => it,
+ None => return,
+ };
+
+ let start = self.syntax().text_range().start();
+ let text = &self.text()[text_range_no_quotes - start];
+ let offset = text_range_no_quotes.start() - start;
+
+ unescape_literal(text, Mode::Str, &mut |range, unescaped_char| {
+ let text_range =
+ TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
+ cb(text_range + offset, unescaped_char);
+ });
+ }
+}
+
+impl IsString for ast::String {}
+
+impl ast::String {
+ pub fn is_raw(&self) -> bool {
+ self.text().starts_with('r')
+ }
+ pub fn map_range_up(&self, range: TextRange) -> Option<TextRange> {
+ let contents_range = self.text_range_between_quotes()?;
+ assert!(TextRange::up_to(contents_range.len()).contains_range(range));
+ Some(range + contents_range.start())
+ }
+
+ pub fn value(&self) -> Option<Cow<'_, str>> {
+ if self.is_raw() {
+ let text = self.text();
+ let text =
+ &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+ return Some(Cow::Borrowed(text));
+ }
+
+ let text = self.text();
+ let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+
+ let mut buf = String::new();
+ let mut text_iter = text.chars();
+ let mut has_error = false;
+ unescape_literal(text, Mode::Str, &mut |char_range, unescaped_char| match (
+ unescaped_char,
+ buf.capacity() == 0,
+ ) {
+ (Ok(c), false) => buf.push(c),
+ (Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (),
+ (Ok(c), true) => {
+ buf.reserve_exact(text.len());
+ buf.push_str(&text[..char_range.start]);
+ buf.push(c);
+ }
+ (Err(_), _) => has_error = true,
+ });
+
+ match (has_error, buf.capacity() == 0) {
+ (true, _) => None,
+ (false, true) => Some(Cow::Borrowed(text)),
+ (false, false) => Some(Cow::Owned(buf)),
+ }
+ }
+}
+
+impl IsString for ast::ByteString {}
+
+impl ast::ByteString {
+ pub fn is_raw(&self) -> bool {
+ self.text().starts_with("br")
+ }
+
+ pub fn value(&self) -> Option<Cow<'_, [u8]>> {
+ if self.is_raw() {
+ let text = self.text();
+ let text =
+ &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+ return Some(Cow::Borrowed(text.as_bytes()));
+ }
+
+ let text = self.text();
+ let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+
+ let mut buf: Vec<u8> = Vec::new();
+ let mut text_iter = text.chars();
+ let mut has_error = false;
+ unescape_literal(text, Mode::ByteStr, &mut |char_range, unescaped_char| match (
+ unescaped_char,
+ buf.capacity() == 0,
+ ) {
+ (Ok(c), false) => buf.push(c as u8),
+ (Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (),
+ (Ok(c), true) => {
+ buf.reserve_exact(text.len());
+ buf.extend_from_slice(text[..char_range.start].as_bytes());
+ buf.push(c as u8);
+ }
+ (Err(_), _) => has_error = true,
+ });
+
+ match (has_error, buf.capacity() == 0) {
+ (true, _) => None,
+ (false, true) => Some(Cow::Borrowed(text.as_bytes())),
+ (false, false) => Some(Cow::Owned(buf)),
+ }
+ }
+}
+
+impl ast::IntNumber {
+ pub fn radix(&self) -> Radix {
+ match self.text().get(..2).unwrap_or_default() {
+ "0b" => Radix::Binary,
+ "0o" => Radix::Octal,
+ "0x" => Radix::Hexadecimal,
+ _ => Radix::Decimal,
+ }
+ }
+
+ pub fn split_into_parts(&self) -> (&str, &str, &str) {
+ let radix = self.radix();
+ let (prefix, mut text) = self.text().split_at(radix.prefix_len());
+
+ let is_suffix_start: fn(&(usize, char)) -> bool = match radix {
+ Radix::Hexadecimal => |(_, c)| matches!(c, 'g'..='z' | 'G'..='Z'),
+ _ => |(_, c)| c.is_ascii_alphabetic(),
+ };
+
+ let mut suffix = "";
+ if let Some((suffix_start, _)) = text.char_indices().find(is_suffix_start) {
+ let (text2, suffix2) = text.split_at(suffix_start);
+ text = text2;
+ suffix = suffix2;
+ };
+
+ (prefix, text, suffix)
+ }
+
+ pub fn value(&self) -> Option<u128> {
+ let (_, text, _) = self.split_into_parts();
+ let value = u128::from_str_radix(&text.replace('_', ""), self.radix() as u32).ok()?;
+ Some(value)
+ }
+
+ pub fn suffix(&self) -> Option<&str> {
+ let (_, _, suffix) = self.split_into_parts();
+ if suffix.is_empty() {
+ None
+ } else {
+ Some(suffix)
+ }
+ }
+
+ pub fn float_value(&self) -> Option<f64> {
+ let (_, text, _) = self.split_into_parts();
+ text.parse::<f64>().ok()
+ }
+}
+
+impl ast::FloatNumber {
+ pub fn split_into_parts(&self) -> (&str, &str) {
+ let text = self.text();
+ let mut float_text = self.text();
+ let mut suffix = "";
+ let mut indices = text.char_indices();
+ if let Some((mut suffix_start, c)) = indices.by_ref().find(|(_, c)| c.is_ascii_alphabetic())
+ {
+ if c == 'e' || c == 'E' {
+ if let Some(suffix_start_tuple) = indices.find(|(_, c)| c.is_ascii_alphabetic()) {
+ suffix_start = suffix_start_tuple.0;
+
+ float_text = &text[..suffix_start];
+ suffix = &text[suffix_start..];
+ }
+ } else {
+ float_text = &text[..suffix_start];
+ suffix = &text[suffix_start..];
+ }
+ }
+
+ (float_text, suffix)
+ }
+
+ pub fn suffix(&self) -> Option<&str> {
+ let (_, suffix) = self.split_into_parts();
+ if suffix.is_empty() {
+ None
+ } else {
+ Some(suffix)
+ }
+ }
+
+ pub fn value(&self) -> Option<f64> {
+ let (text, _) = self.split_into_parts();
+ text.parse::<f64>().ok()
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Copy, Clone)]
+pub enum Radix {
+ Binary = 2,
+ Octal = 8,
+ Decimal = 10,
+ Hexadecimal = 16,
+}
+
+impl Radix {
+ pub const ALL: &'static [Radix] =
+ &[Radix::Binary, Radix::Octal, Radix::Decimal, Radix::Hexadecimal];
+
+ const fn prefix_len(self) -> usize {
+ match self {
+ Self::Decimal => 0,
+ _ => 2,
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::ast::{self, make, FloatNumber, IntNumber};
+
+ fn check_float_suffix<'a>(lit: &str, expected: impl Into<Option<&'a str>>) {
+ assert_eq!(FloatNumber { syntax: make::tokens::literal(lit) }.suffix(), expected.into());
+ }
+
+ fn check_int_suffix<'a>(lit: &str, expected: impl Into<Option<&'a str>>) {
+ assert_eq!(IntNumber { syntax: make::tokens::literal(lit) }.suffix(), expected.into());
+ }
+
+ #[test]
+ fn test_float_number_suffix() {
+ check_float_suffix("123.0", None);
+ check_float_suffix("123f32", "f32");
+ check_float_suffix("123.0e", None);
+ check_float_suffix("123.0e4", None);
+ check_float_suffix("123.0ef32", "f32");
+ check_float_suffix("123.0E4f32", "f32");
+ check_float_suffix("1_2_3.0_f32", "f32");
+ }
+
+ #[test]
+ fn test_int_number_suffix() {
+ check_int_suffix("123", None);
+ check_int_suffix("123i32", "i32");
+ check_int_suffix("1_0_1_l_o_l", "l_o_l");
+ check_int_suffix("0b11", None);
+ check_int_suffix("0o11", None);
+ check_int_suffix("0xff", None);
+ check_int_suffix("0b11u32", "u32");
+ check_int_suffix("0o11u32", "u32");
+ check_int_suffix("0xffu32", "u32");
+ }
+
+ fn check_string_value<'a>(lit: &str, expected: impl Into<Option<&'a str>>) {
+ assert_eq!(
+ ast::String { syntax: make::tokens::literal(&format!("\"{}\"", lit)) }
+ .value()
+ .as_deref(),
+ expected.into()
+ );
+ }
+
+ #[test]
+ fn test_string_escape() {
+ check_string_value(r"foobar", "foobar");
+ check_string_value(r"\foobar", None);
+ check_string_value(r"\nfoobar", "\nfoobar");
+ check_string_value(r"C:\\Windows\\System32\\", "C:\\Windows\\System32\\");
+ }
+}
+
+impl ast::Char {
+ pub fn value(&self) -> Option<char> {
+ let mut text = self.text();
+ if text.starts_with('\'') {
+ text = &text[1..];
+ } else {
+ return None;
+ }
+ if text.ends_with('\'') {
+ text = &text[0..text.len() - 1];
+ }
+
+ unescape_char(text).ok()
+ }
+}
+
+impl ast::Byte {
+ pub fn value(&self) -> Option<u8> {
+ let mut text = self.text();
+ if text.starts_with("b\'") {
+ text = &text[2..];
+ } else {
+ return None;
+ }
+ if text.ends_with('\'') {
+ text = &text[0..text.len() - 1];
+ }
+
+ unescape_byte(text).ok()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
new file mode 100644
index 000000000..aa2b7ed5c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
@@ -0,0 +1,136 @@
+//! Various traits that are implemented by ast nodes.
+//!
+//! The implementations are usually trivial, and live in generated.rs
+use itertools::Either;
+
+use crate::{
+ ast::{self, support, AstChildren, AstNode, AstToken},
+ syntax_node::SyntaxElementChildren,
+ SyntaxElement, SyntaxToken, T,
+};
+
+pub trait HasName: AstNode {
+ fn name(&self) -> Option<ast::Name> {
+ support::child(self.syntax())
+ }
+}
+
+pub trait HasVisibility: AstNode {
+ fn visibility(&self) -> Option<ast::Visibility> {
+ support::child(self.syntax())
+ }
+}
+
+pub trait HasLoopBody: AstNode {
+ fn loop_body(&self) -> Option<ast::BlockExpr> {
+ support::child(self.syntax())
+ }
+
+ fn label(&self) -> Option<ast::Label> {
+ support::child(self.syntax())
+ }
+}
+
+pub trait HasArgList: AstNode {
+ fn arg_list(&self) -> Option<ast::ArgList> {
+ support::child(self.syntax())
+ }
+}
+
+pub trait HasModuleItem: AstNode {
+ fn items(&self) -> AstChildren<ast::Item> {
+ support::children(self.syntax())
+ }
+}
+
+pub trait HasGenericParams: AstNode {
+ fn generic_param_list(&self) -> Option<ast::GenericParamList> {
+ support::child(self.syntax())
+ }
+
+ fn where_clause(&self) -> Option<ast::WhereClause> {
+ support::child(self.syntax())
+ }
+}
+
+pub trait HasTypeBounds: AstNode {
+ fn type_bound_list(&self) -> Option<ast::TypeBoundList> {
+ support::child(self.syntax())
+ }
+
+ fn colon_token(&self) -> Option<SyntaxToken> {
+ support::token(self.syntax(), T![:])
+ }
+}
+
+pub trait HasAttrs: AstNode {
+ fn attrs(&self) -> AstChildren<ast::Attr> {
+ support::children(self.syntax())
+ }
+ fn has_atom_attr(&self, atom: &str) -> bool {
+ self.attrs().filter_map(|x| x.as_simple_atom()).any(|x| x == atom)
+ }
+}
+
+pub trait HasDocComments: HasAttrs {
+ fn doc_comments(&self) -> DocCommentIter {
+ DocCommentIter { iter: self.syntax().children_with_tokens() }
+ }
+ fn doc_comments_and_attrs(&self) -> AttrDocCommentIter {
+ AttrDocCommentIter { iter: self.syntax().children_with_tokens() }
+ }
+}
+
+impl DocCommentIter {
+ pub fn from_syntax_node(syntax_node: &ast::SyntaxNode) -> DocCommentIter {
+ DocCommentIter { iter: syntax_node.children_with_tokens() }
+ }
+
+ #[cfg(test)]
+ pub fn doc_comment_text(self) -> Option<String> {
+ let docs = itertools::Itertools::join(
+ &mut self.filter_map(|comment| comment.doc_comment().map(ToOwned::to_owned)),
+ "\n",
+ );
+ if docs.is_empty() {
+ None
+ } else {
+ Some(docs)
+ }
+ }
+}
+
+pub struct DocCommentIter {
+ iter: SyntaxElementChildren,
+}
+
+impl Iterator for DocCommentIter {
+ type Item = ast::Comment;
+ fn next(&mut self) -> Option<ast::Comment> {
+ self.iter.by_ref().find_map(|el| {
+ el.into_token().and_then(ast::Comment::cast).filter(ast::Comment::is_doc)
+ })
+ }
+}
+
+pub struct AttrDocCommentIter {
+ iter: SyntaxElementChildren,
+}
+
+impl AttrDocCommentIter {
+ pub fn from_syntax_node(syntax_node: &ast::SyntaxNode) -> AttrDocCommentIter {
+ AttrDocCommentIter { iter: syntax_node.children_with_tokens() }
+ }
+}
+
+impl Iterator for AttrDocCommentIter {
+ type Item = Either<ast::Attr, ast::Comment>;
+ fn next(&mut self) -> Option<Self::Item> {
+ self.iter.by_ref().find_map(|el| match el {
+ SyntaxElement::Node(node) => ast::Attr::cast(node).map(Either::Left),
+ SyntaxElement::Token(tok) => {
+ ast::Comment::cast(tok).filter(ast::Comment::is_doc).map(Either::Right)
+ }
+ })
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/fuzz.rs b/src/tools/rust-analyzer/crates/syntax/src/fuzz.rs
new file mode 100644
index 000000000..256999fe0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/fuzz.rs
@@ -0,0 +1,75 @@
+//! Some infrastructure for fuzzy testing.
+//!
+//! We don't normally run fuzzying, so this is hopelessly bitrotten :(
+
+use std::{
+ convert::TryInto,
+ str::{self, FromStr},
+};
+
+use text_edit::Indel;
+
+use crate::{validation, AstNode, SourceFile, TextRange};
+
+fn check_file_invariants(file: &SourceFile) {
+ let root = file.syntax();
+ validation::validate_block_structure(root);
+}
+
+pub fn check_parser(text: &str) {
+ let file = SourceFile::parse(text);
+ check_file_invariants(&file.tree());
+}
+
+#[derive(Debug, Clone)]
+pub struct CheckReparse {
+ text: String,
+ edit: Indel,
+ edited_text: String,
+}
+
+impl CheckReparse {
+ pub fn from_data(data: &[u8]) -> Option<Self> {
+ const PREFIX: &str = "fn main(){\n\t";
+ const SUFFIX: &str = "\n}";
+
+ let data = str::from_utf8(data).ok()?;
+ let mut lines = data.lines();
+ let delete_start = usize::from_str(lines.next()?).ok()? + PREFIX.len();
+ let delete_len = usize::from_str(lines.next()?).ok()?;
+ let insert = lines.next()?.to_string();
+ let text = lines.collect::<Vec<_>>().join("\n");
+ let text = format!("{}{}{}", PREFIX, text, SUFFIX);
+ text.get(delete_start..delete_start.checked_add(delete_len)?)?; // make sure delete is a valid range
+ let delete =
+ TextRange::at(delete_start.try_into().unwrap(), delete_len.try_into().unwrap());
+ let edited_text =
+ format!("{}{}{}", &text[..delete_start], &insert, &text[delete_start + delete_len..]);
+ let edit = Indel { insert, delete };
+ Some(CheckReparse { text, edit, edited_text })
+ }
+
+ pub fn run(&self) {
+ let parse = SourceFile::parse(&self.text);
+ let new_parse = parse.reparse(&self.edit);
+ check_file_invariants(&new_parse.tree());
+ assert_eq!(&new_parse.tree().syntax().text().to_string(), &self.edited_text);
+ let full_reparse = SourceFile::parse(&self.edited_text);
+ for (a, b) in
+ new_parse.tree().syntax().descendants().zip(full_reparse.tree().syntax().descendants())
+ {
+ if (a.kind(), a.text_range()) != (b.kind(), b.text_range()) {
+ eprint!("original:\n{:#?}", parse.tree().syntax());
+ eprint!("reparsed:\n{:#?}", new_parse.tree().syntax());
+ eprint!("full reparse:\n{:#?}", full_reparse.tree().syntax());
+ assert_eq!(
+ format!("{:?}", a),
+ format!("{:?}", b),
+ "different syntax tree produced by the full reparse"
+ );
+ }
+ }
+ // FIXME
+ // assert_eq!(new_file.errors(), full_reparse.errors());
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/hacks.rs b/src/tools/rust-analyzer/crates/syntax/src/hacks.rs
new file mode 100644
index 000000000..a047f61fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/hacks.rs
@@ -0,0 +1,15 @@
+//! Things which exist to solve practial issues, but which shouldn't exist.
+//!
+//! Please avoid adding new usages of the functions in this module
+
+use crate::{ast, AstNode};
+
+pub fn parse_expr_from_str(s: &str) -> Option<ast::Expr> {
+ let s = s.trim();
+ let file = ast::SourceFile::parse(&format!("const _: () = {};", s));
+ let expr = file.syntax_node().descendants().find_map(ast::Expr::cast)?;
+ if expr.syntax().text() != s {
+ return None;
+ }
+ Some(expr)
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/lib.rs b/src/tools/rust-analyzer/crates/syntax/src/lib.rs
new file mode 100644
index 000000000..7fa354c0c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/lib.rs
@@ -0,0 +1,358 @@
+//! Syntax Tree library used throughout the rust analyzer.
+//!
+//! Properties:
+//! - easy and fast incremental re-parsing
+//! - graceful handling of errors
+//! - full-fidelity representation (*any* text can be precisely represented as
+//! a syntax tree)
+//!
+//! For more information, see the [RFC]. Current implementation is inspired by
+//! the [Swift] one.
+//!
+//! The most interesting modules here are `syntax_node` (which defines concrete
+//! syntax tree) and `ast` (which defines abstract syntax tree on top of the
+//! CST). The actual parser live in a separate `parser` crate, though the
+//! lexer lives in this crate.
+//!
+//! See `api_walkthrough` test in this file for a quick API tour!
+//!
+//! [RFC]: <https://github.com/rust-lang/rfcs/pull/2256>
+//! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md>
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+#[allow(unused)]
+macro_rules! eprintln {
+ ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
+}
+
+mod syntax_node;
+mod syntax_error;
+mod parsing;
+mod validation;
+mod ptr;
+mod token_text;
+#[cfg(test)]
+mod tests;
+
+pub mod algo;
+pub mod ast;
+#[doc(hidden)]
+pub mod fuzz;
+pub mod utils;
+pub mod ted;
+pub mod hacks;
+
+use std::{marker::PhantomData, sync::Arc};
+
+use stdx::format_to;
+use text_edit::Indel;
+
+pub use crate::{
+ ast::{AstNode, AstToken},
+ ptr::{AstPtr, SyntaxNodePtr},
+ syntax_error::SyntaxError,
+ syntax_node::{
+ PreorderWithTokens, RustLanguage, SyntaxElement, SyntaxElementChildren, SyntaxNode,
+ SyntaxNodeChildren, SyntaxToken, SyntaxTreeBuilder,
+ },
+ token_text::TokenText,
+};
+pub use parser::{SyntaxKind, T};
+pub use rowan::{
+ api::Preorder, Direction, GreenNode, NodeOrToken, SyntaxText, TextRange, TextSize,
+ TokenAtOffset, WalkEvent,
+};
+pub use smol_str::SmolStr;
+
+/// `Parse` is the result of the parsing: a syntax tree and a collection of
+/// errors.
+///
+/// Note that we always produce a syntax tree, even for completely invalid
+/// files.
+#[derive(Debug, PartialEq, Eq)]
+pub struct Parse<T> {
+ green: GreenNode,
+ errors: Arc<Vec<SyntaxError>>,
+ _ty: PhantomData<fn() -> T>,
+}
+
+impl<T> Clone for Parse<T> {
+ fn clone(&self) -> Parse<T> {
+ Parse { green: self.green.clone(), errors: self.errors.clone(), _ty: PhantomData }
+ }
+}
+
+impl<T> Parse<T> {
+ fn new(green: GreenNode, errors: Vec<SyntaxError>) -> Parse<T> {
+ Parse { green, errors: Arc::new(errors), _ty: PhantomData }
+ }
+
+ pub fn syntax_node(&self) -> SyntaxNode {
+ SyntaxNode::new_root(self.green.clone())
+ }
+ pub fn errors(&self) -> &[SyntaxError] {
+ &*self.errors
+ }
+}
+
+impl<T: AstNode> Parse<T> {
+ pub fn to_syntax(self) -> Parse<SyntaxNode> {
+ Parse { green: self.green, errors: self.errors, _ty: PhantomData }
+ }
+
+ pub fn tree(&self) -> T {
+ T::cast(self.syntax_node()).unwrap()
+ }
+
+ pub fn ok(self) -> Result<T, Arc<Vec<SyntaxError>>> {
+ if self.errors.is_empty() {
+ Ok(self.tree())
+ } else {
+ Err(self.errors)
+ }
+ }
+}
+
+impl Parse<SyntaxNode> {
+ pub fn cast<N: AstNode>(self) -> Option<Parse<N>> {
+ if N::cast(self.syntax_node()).is_some() {
+ Some(Parse { green: self.green, errors: self.errors, _ty: PhantomData })
+ } else {
+ None
+ }
+ }
+}
+
+impl Parse<SourceFile> {
+ pub fn debug_dump(&self) -> String {
+ let mut buf = format!("{:#?}", self.tree().syntax());
+ for err in self.errors.iter() {
+ format_to!(buf, "error {:?}: {}\n", err.range(), err);
+ }
+ buf
+ }
+
+ pub fn reparse(&self, indel: &Indel) -> Parse<SourceFile> {
+ self.incremental_reparse(indel).unwrap_or_else(|| self.full_reparse(indel))
+ }
+
+ fn incremental_reparse(&self, indel: &Indel) -> Option<Parse<SourceFile>> {
+ // FIXME: validation errors are not handled here
+ parsing::incremental_reparse(self.tree().syntax(), indel, self.errors.to_vec()).map(
+ |(green_node, errors, _reparsed_range)| Parse {
+ green: green_node,
+ errors: Arc::new(errors),
+ _ty: PhantomData,
+ },
+ )
+ }
+
+ fn full_reparse(&self, indel: &Indel) -> Parse<SourceFile> {
+ let mut text = self.tree().syntax().text().to_string();
+ indel.apply(&mut text);
+ SourceFile::parse(&text)
+ }
+}
+
+/// `SourceFile` represents a parse tree for a single Rust file.
+pub use crate::ast::SourceFile;
+
+impl SourceFile {
+ pub fn parse(text: &str) -> Parse<SourceFile> {
+ let (green, mut errors) = parsing::parse_text(text);
+ let root = SyntaxNode::new_root(green.clone());
+
+ errors.extend(validation::validate(&root));
+
+ assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
+ Parse { green, errors: Arc::new(errors), _ty: PhantomData }
+ }
+}
+
+/// Matches a `SyntaxNode` against an `ast` type.
+///
+/// # Example:
+///
+/// ```ignore
+/// match_ast! {
+/// match node {
+/// ast::CallExpr(it) => { ... },
+/// ast::MethodCallExpr(it) => { ... },
+/// ast::MacroCall(it) => { ... },
+/// _ => None,
+/// }
+/// }
+/// ```
+#[macro_export]
+macro_rules! match_ast {
+ (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
+
+ (match ($node:expr) {
+ $( $( $path:ident )::+ ($it:pat) => $res:expr, )*
+ _ => $catch_all:expr $(,)?
+ }) => {{
+ $( if let Some($it) = $($path::)+cast($node.clone()) { $res } else )*
+ { $catch_all }
+ }};
+}
+
+/// This test does not assert anything and instead just shows off the crate's
+/// API.
+#[test]
+fn api_walkthrough() {
+ use ast::{HasModuleItem, HasName};
+
+ let source_code = "
+ fn foo() {
+ 1 + 1
+ }
+ ";
+ // `SourceFile` is the main entry point.
+ //
+ // The `parse` method returns a `Parse` -- a pair of syntax tree and a list
+ // of errors. That is, syntax tree is constructed even in presence of errors.
+ let parse = SourceFile::parse(source_code);
+ assert!(parse.errors().is_empty());
+
+ // The `tree` method returns an owned syntax node of type `SourceFile`.
+ // Owned nodes are cheap: inside, they are `Rc` handles to the underling data.
+ let file: SourceFile = parse.tree();
+
+ // `SourceFile` is the root of the syntax tree. We can iterate file's items.
+ // Let's fetch the `foo` function.
+ let mut func = None;
+ for item in file.items() {
+ match item {
+ ast::Item::Fn(f) => func = Some(f),
+ _ => unreachable!(),
+ }
+ }
+ let func: ast::Fn = func.unwrap();
+
+ // Each AST node has a bunch of getters for children. All getters return
+ // `Option`s though, to account for incomplete code. Some getters are common
+ // for several kinds of node. In this case, a trait like `ast::NameOwner`
+ // usually exists. By convention, all ast types should be used with `ast::`
+ // qualifier.
+ let name: Option<ast::Name> = func.name();
+ let name = name.unwrap();
+ assert_eq!(name.text(), "foo");
+
+ // Let's get the `1 + 1` expression!
+ let body: ast::BlockExpr = func.body().unwrap();
+ let stmt_list: ast::StmtList = body.stmt_list().unwrap();
+ let expr: ast::Expr = stmt_list.tail_expr().unwrap();
+
+ // Enums are used to group related ast nodes together, and can be used for
+ // matching. However, because there are no public fields, it's possible to
+ // match only the top level enum: that is the price we pay for increased API
+ // flexibility
+ let bin_expr: &ast::BinExpr = match &expr {
+ ast::Expr::BinExpr(e) => e,
+ _ => unreachable!(),
+ };
+
+ // Besides the "typed" AST API, there's an untyped CST one as well.
+ // To switch from AST to CST, call `.syntax()` method:
+ let expr_syntax: &SyntaxNode = expr.syntax();
+
+ // Note how `expr` and `bin_expr` are in fact the same node underneath:
+ assert!(expr_syntax == bin_expr.syntax());
+
+ // To go from CST to AST, `AstNode::cast` function is used:
+ let _expr: ast::Expr = match ast::Expr::cast(expr_syntax.clone()) {
+ Some(e) => e,
+ None => unreachable!(),
+ };
+
+ // The two properties each syntax node has is a `SyntaxKind`:
+ assert_eq!(expr_syntax.kind(), SyntaxKind::BIN_EXPR);
+
+ // And text range:
+ assert_eq!(expr_syntax.text_range(), TextRange::new(32.into(), 37.into()));
+
+ // You can get node's text as a `SyntaxText` object, which will traverse the
+ // tree collecting token's text:
+ let text: SyntaxText = expr_syntax.text();
+ assert_eq!(text.to_string(), "1 + 1");
+
+ // There's a bunch of traversal methods on `SyntaxNode`:
+ assert_eq!(expr_syntax.parent().as_ref(), Some(stmt_list.syntax()));
+ assert_eq!(stmt_list.syntax().first_child_or_token().map(|it| it.kind()), Some(T!['{']));
+ assert_eq!(
+ expr_syntax.next_sibling_or_token().map(|it| it.kind()),
+ Some(SyntaxKind::WHITESPACE)
+ );
+
+ // As well as some iterator helpers:
+ let f = expr_syntax.ancestors().find_map(ast::Fn::cast);
+ assert_eq!(f, Some(func));
+ assert!(expr_syntax.siblings_with_tokens(Direction::Next).any(|it| it.kind() == T!['}']));
+ assert_eq!(
+ expr_syntax.descendants_with_tokens().count(),
+ 8, // 5 tokens `1`, ` `, `+`, ` `, `!`
+ // 2 child literal expressions: `1`, `1`
+ // 1 the node itself: `1 + 1`
+ );
+
+ // There's also a `preorder` method with a more fine-grained iteration control:
+ let mut buf = String::new();
+ let mut indent = 0;
+ for event in expr_syntax.preorder_with_tokens() {
+ match event {
+ WalkEvent::Enter(node) => {
+ let text = match &node {
+ NodeOrToken::Node(it) => it.text().to_string(),
+ NodeOrToken::Token(it) => it.text().to_string(),
+ };
+ format_to!(buf, "{:indent$}{:?} {:?}\n", " ", text, node.kind(), indent = indent);
+ indent += 2;
+ }
+ WalkEvent::Leave(_) => indent -= 2,
+ }
+ }
+ assert_eq!(indent, 0);
+ assert_eq!(
+ buf.trim(),
+ r#"
+"1 + 1" BIN_EXPR
+ "1" LITERAL
+ "1" INT_NUMBER
+ " " WHITESPACE
+ "+" PLUS
+ " " WHITESPACE
+ "1" LITERAL
+ "1" INT_NUMBER
+"#
+ .trim()
+ );
+
+ // To recursively process the tree, there are three approaches:
+ // 1. explicitly call getter methods on AST nodes.
+ // 2. use descendants and `AstNode::cast`.
+ // 3. use descendants and `match_ast!`.
+ //
+ // Here's how the first one looks like:
+ let exprs_cast: Vec<String> = file
+ .syntax()
+ .descendants()
+ .filter_map(ast::Expr::cast)
+ .map(|expr| expr.syntax().text().to_string())
+ .collect();
+
+ // An alternative is to use a macro.
+ let mut exprs_visit = Vec::new();
+ for node in file.syntax().descendants() {
+ match_ast! {
+ match node {
+ ast::Expr(it) => {
+ let res = it.syntax().text().to_string();
+ exprs_visit.push(res);
+ },
+ _ => (),
+ }
+ }
+ }
+ assert_eq!(exprs_cast, exprs_visit);
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/parsing.rs b/src/tools/rust-analyzer/crates/syntax/src/parsing.rs
new file mode 100644
index 000000000..047e670c9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/parsing.rs
@@ -0,0 +1,46 @@
+//! Lexing, bridging to parser (which does the actual parsing) and
+//! incremental reparsing.
+
+mod reparsing;
+
+use rowan::TextRange;
+
+use crate::{syntax_node::GreenNode, SyntaxError, SyntaxTreeBuilder};
+
+pub(crate) use crate::parsing::reparsing::incremental_reparse;
+
+pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
+ let lexed = parser::LexedStr::new(text);
+ let parser_input = lexed.to_input();
+ let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input);
+ let (node, errors, _eof) = build_tree(lexed, parser_output);
+ (node, errors)
+}
+
+pub(crate) fn build_tree(
+ lexed: parser::LexedStr<'_>,
+ parser_output: parser::Output,
+) -> (GreenNode, Vec<SyntaxError>, bool) {
+ let mut builder = SyntaxTreeBuilder::default();
+
+ let is_eof = lexed.intersperse_trivia(&parser_output, &mut |step| match step {
+ parser::StrStep::Token { kind, text } => builder.token(kind, text),
+ parser::StrStep::Enter { kind } => builder.start_node(kind),
+ parser::StrStep::Exit => builder.finish_node(),
+ parser::StrStep::Error { msg, pos } => {
+ builder.error(msg.to_string(), pos.try_into().unwrap())
+ }
+ });
+
+ let (node, mut errors) = builder.finish_raw();
+ for (i, err) in lexed.errors() {
+ let text_range = lexed.text_range(i);
+ let text_range = TextRange::new(
+ text_range.start.try_into().unwrap(),
+ text_range.end.try_into().unwrap(),
+ );
+ errors.push(SyntaxError::new(err, text_range))
+ }
+
+ (node, errors, is_eof)
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs b/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
new file mode 100644
index 000000000..701e6232d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
@@ -0,0 +1,441 @@
+//! Implementation of incremental re-parsing.
+//!
+//! We use two simple strategies for this:
+//! - if the edit modifies only a single token (like changing an identifier's
+//! letter), we replace only this token.
+//! - otherwise, we search for the nearest `{}` block which contains the edit
+//! and try to parse only this block.
+
+use parser::Reparser;
+use text_edit::Indel;
+
+use crate::{
+ parsing::build_tree,
+ syntax_node::{GreenNode, GreenToken, NodeOrToken, SyntaxElement, SyntaxNode},
+ SyntaxError,
+ SyntaxKind::*,
+ TextRange, TextSize, T,
+};
+
+pub(crate) fn incremental_reparse(
+ node: &SyntaxNode,
+ edit: &Indel,
+ errors: Vec<SyntaxError>,
+) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
+ if let Some((green, new_errors, old_range)) = reparse_token(node, edit) {
+ return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
+ }
+
+ if let Some((green, new_errors, old_range)) = reparse_block(node, edit) {
+ return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
+ }
+ None
+}
+
+fn reparse_token(
+ root: &SyntaxNode,
+ edit: &Indel,
+) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
+ let prev_token = root.covering_element(edit.delete).as_token()?.clone();
+ let prev_token_kind = prev_token.kind();
+ match prev_token_kind {
+ WHITESPACE | COMMENT | IDENT | STRING => {
+ if prev_token_kind == WHITESPACE || prev_token_kind == COMMENT {
+ // removing a new line may extends previous token
+ let deleted_range = edit.delete - prev_token.text_range().start();
+ if prev_token.text()[deleted_range].contains('\n') {
+ return None;
+ }
+ }
+
+ let mut new_text = get_text_after_edit(prev_token.clone().into(), edit);
+ let (new_token_kind, new_err) = parser::LexedStr::single_token(&new_text)?;
+
+ if new_token_kind != prev_token_kind
+ || (new_token_kind == IDENT && is_contextual_kw(&new_text))
+ {
+ return None;
+ }
+
+ // Check that edited token is not a part of the bigger token.
+ // E.g. if for source code `bruh"str"` the user removed `ruh`, then
+ // `b` no longer remains an identifier, but becomes a part of byte string literal
+ if let Some(next_char) = root.text().char_at(prev_token.text_range().end()) {
+ new_text.push(next_char);
+ let token_with_next_char = parser::LexedStr::single_token(&new_text);
+ if let Some((_kind, _error)) = token_with_next_char {
+ return None;
+ }
+ new_text.pop();
+ }
+
+ let new_token = GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), &new_text);
+ let range = TextRange::up_to(TextSize::of(&new_text));
+ Some((
+ prev_token.replace_with(new_token),
+ new_err.into_iter().map(|msg| SyntaxError::new(msg, range)).collect(),
+ prev_token.text_range(),
+ ))
+ }
+ _ => None,
+ }
+}
+
+fn reparse_block(
+ root: &SyntaxNode,
+ edit: &Indel,
+) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
+ let (node, reparser) = find_reparsable_node(root, edit.delete)?;
+ let text = get_text_after_edit(node.clone().into(), edit);
+
+ let lexed = parser::LexedStr::new(text.as_str());
+ let parser_input = lexed.to_input();
+ if !is_balanced(&lexed) {
+ return None;
+ }
+
+ let tree_traversal = reparser.parse(&parser_input);
+
+ let (green, new_parser_errors, _eof) = build_tree(lexed, tree_traversal);
+
+ Some((node.replace_with(green), new_parser_errors, node.text_range()))
+}
+
+fn get_text_after_edit(element: SyntaxElement, edit: &Indel) -> String {
+ let edit = Indel::replace(edit.delete - element.text_range().start(), edit.insert.clone());
+
+ let mut text = match element {
+ NodeOrToken::Token(token) => token.text().to_string(),
+ NodeOrToken::Node(node) => node.text().to_string(),
+ };
+ edit.apply(&mut text);
+ text
+}
+
+fn is_contextual_kw(text: &str) -> bool {
+ matches!(text, "auto" | "default" | "union")
+}
+
+fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> {
+ let node = node.covering_element(range);
+
+ node.ancestors().find_map(|node| {
+ let first_child = node.first_child_or_token().map(|it| it.kind());
+ let parent = node.parent().map(|it| it.kind());
+ Reparser::for_node(node.kind(), first_child, parent).map(|r| (node, r))
+ })
+}
+
+fn is_balanced(lexed: &parser::LexedStr<'_>) -> bool {
+ if lexed.is_empty() || lexed.kind(0) != T!['{'] || lexed.kind(lexed.len() - 1) != T!['}'] {
+ return false;
+ }
+ let mut balance = 0usize;
+ for i in 1..lexed.len() - 1 {
+ match lexed.kind(i) {
+ T!['{'] => balance += 1,
+ T!['}'] => {
+ balance = match balance.checked_sub(1) {
+ Some(b) => b,
+ None => return false,
+ }
+ }
+ _ => (),
+ }
+ }
+ balance == 0
+}
+
+fn merge_errors(
+ old_errors: Vec<SyntaxError>,
+ new_errors: Vec<SyntaxError>,
+ range_before_reparse: TextRange,
+ edit: &Indel,
+) -> Vec<SyntaxError> {
+ let mut res = Vec::new();
+
+ for old_err in old_errors {
+ let old_err_range = old_err.range();
+ if old_err_range.end() <= range_before_reparse.start() {
+ res.push(old_err);
+ } else if old_err_range.start() >= range_before_reparse.end() {
+ let inserted_len = TextSize::of(&edit.insert);
+ res.push(old_err.with_range((old_err_range + inserted_len) - edit.delete.len()));
+ // Note: extra parens are intentional to prevent uint underflow, HWAB (here was a bug)
+ }
+ }
+ res.extend(new_errors.into_iter().map(|new_err| {
+ // fighting borrow checker with a variable ;)
+ let offseted_range = new_err.range() + range_before_reparse.start();
+ new_err.with_range(offseted_range)
+ }));
+ res
+}
+
+#[cfg(test)]
+mod tests {
+ use test_utils::{assert_eq_text, extract_range};
+
+ use super::*;
+ use crate::{AstNode, Parse, SourceFile};
+
+ fn do_check(before: &str, replace_with: &str, reparsed_len: u32) {
+ let (range, before) = extract_range(before);
+ let edit = Indel::replace(range, replace_with.to_owned());
+ let after = {
+ let mut after = before.clone();
+ edit.apply(&mut after);
+ after
+ };
+
+ let fully_reparsed = SourceFile::parse(&after);
+ let incrementally_reparsed: Parse<SourceFile> = {
+ let before = SourceFile::parse(&before);
+ let (green, new_errors, range) =
+ incremental_reparse(before.tree().syntax(), &edit, before.errors.to_vec()).unwrap();
+ assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length");
+ Parse::new(green, new_errors)
+ };
+
+ assert_eq_text!(
+ &format!("{:#?}", fully_reparsed.tree().syntax()),
+ &format!("{:#?}", incrementally_reparsed.tree().syntax()),
+ );
+ assert_eq!(fully_reparsed.errors(), incrementally_reparsed.errors());
+ }
+
+ #[test] // FIXME: some test here actually test token reparsing
+ fn reparse_block_tests() {
+ do_check(
+ r"
+fn foo() {
+ let x = foo + $0bar$0
+}
+",
+ "baz",
+ 3,
+ );
+ do_check(
+ r"
+fn foo() {
+ let x = foo$0 + bar$0
+}
+",
+ "baz",
+ 25,
+ );
+ do_check(
+ r"
+struct Foo {
+ f: foo$0$0
+}
+",
+ ",\n g: (),",
+ 14,
+ );
+ do_check(
+ r"
+fn foo {
+ let;
+ 1 + 1;
+ $092$0;
+}
+",
+ "62",
+ 31, // FIXME: reparse only int literal here
+ );
+ do_check(
+ r"
+mod foo {
+ fn $0$0
+}
+",
+ "bar",
+ 11,
+ );
+
+ do_check(
+ r"
+trait Foo {
+ type $0Foo$0;
+}
+",
+ "Output",
+ 3,
+ );
+ do_check(
+ r"
+impl IntoIterator<Item=i32> for Foo {
+ f$0$0
+}
+",
+ "n next(",
+ 9,
+ );
+ do_check(r"use a::b::{foo,$0,bar$0};", "baz", 10);
+ do_check(
+ r"
+pub enum A {
+ Foo$0$0
+}
+",
+ "\nBar;\n",
+ 11,
+ );
+ do_check(
+ r"
+foo!{a, b$0$0 d}
+",
+ ", c[3]",
+ 8,
+ );
+ do_check(
+ r"
+fn foo() {
+ vec![$0$0]
+}
+",
+ "123",
+ 14,
+ );
+ do_check(
+ r"
+extern {
+ fn$0;$0
+}
+",
+ " exit(code: c_int)",
+ 11,
+ );
+ }
+
+ #[test]
+ fn reparse_token_tests() {
+ do_check(
+ r"$0$0
+fn foo() -> i32 { 1 }
+",
+ "\n\n\n \n",
+ 1,
+ );
+ do_check(
+ r"
+fn foo() -> $0$0 {}
+",
+ " \n",
+ 2,
+ );
+ do_check(
+ r"
+fn $0foo$0() -> i32 { 1 }
+",
+ "bar",
+ 3,
+ );
+ do_check(
+ r"
+fn foo$0$0foo() { }
+",
+ "bar",
+ 6,
+ );
+ do_check(
+ r"
+fn foo /* $0$0 */ () {}
+",
+ "some comment",
+ 6,
+ );
+ do_check(
+ r"
+fn baz $0$0 () {}
+",
+ " \t\t\n\n",
+ 2,
+ );
+ do_check(
+ r"
+fn baz $0$0 () {}
+",
+ " \t\t\n\n",
+ 2,
+ );
+ do_check(
+ r"
+/// foo $0$0omment
+mod { }
+",
+ "c",
+ 14,
+ );
+ do_check(
+ r#"
+fn -> &str { "Hello$0$0" }
+"#,
+ ", world",
+ 7,
+ );
+ do_check(
+ r#"
+fn -> &str { // "Hello$0$0"
+"#,
+ ", world",
+ 10,
+ );
+ do_check(
+ r##"
+fn -> &str { r#"Hello$0$0"#
+"##,
+ ", world",
+ 10,
+ );
+ do_check(
+ r"
+#[derive($0Copy$0)]
+enum Foo {
+
+}
+",
+ "Clone",
+ 4,
+ );
+ }
+
+ #[test]
+ fn reparse_str_token_with_error_unchanged() {
+ do_check(r#""$0Unclosed$0 string literal"#, "Still unclosed", 24);
+ }
+
+ #[test]
+ fn reparse_str_token_with_error_fixed() {
+ do_check(r#""unterinated$0$0"#, "\"", 12);
+ }
+
+ #[test]
+ fn reparse_block_with_error_in_middle_unchanged() {
+ do_check(
+ r#"fn main() {
+ if {}
+ 32 + 4$0$0
+ return
+ if {}
+ }"#,
+ "23",
+ 105,
+ )
+ }
+
+ #[test]
+ fn reparse_block_with_error_in_middle_fixed() {
+ do_check(
+ r#"fn main() {
+ if {}
+ 32 + 4$0$0
+ return
+ if {}
+ }"#,
+ ";",
+ 105,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ptr.rs b/src/tools/rust-analyzer/crates/syntax/src/ptr.rs
new file mode 100644
index 000000000..a886972ff
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ptr.rs
@@ -0,0 +1,104 @@
+//! In rust-analyzer, syntax trees are transient objects.
+//!
+//! That means that we create trees when we need them, and tear them down to
+//! save memory. In this architecture, hanging on to a particular syntax node
+//! for a long time is ill-advisable, as that keeps the whole tree resident.
+//!
+//! Instead, we provide a [`SyntaxNodePtr`] type, which stores information about
+//! *location* of a particular syntax node in a tree. Its a small type which can
+//! be cheaply stored, and which can be resolved to a real [`SyntaxNode`] when
+//! necessary.
+
+use std::{
+ hash::{Hash, Hasher},
+ marker::PhantomData,
+};
+
+use rowan::TextRange;
+
+use crate::{syntax_node::RustLanguage, AstNode, SyntaxNode};
+
+/// A "pointer" to a [`SyntaxNode`], via location in the source code.
+pub type SyntaxNodePtr = rowan::ast::SyntaxNodePtr<RustLanguage>;
+
+/// Like `SyntaxNodePtr`, but remembers the type of node.
+#[derive(Debug)]
+pub struct AstPtr<N: AstNode> {
+ raw: SyntaxNodePtr,
+ _ty: PhantomData<fn() -> N>,
+}
+
+impl<N: AstNode> Clone for AstPtr<N> {
+ fn clone(&self) -> AstPtr<N> {
+ AstPtr { raw: self.raw.clone(), _ty: PhantomData }
+ }
+}
+
+impl<N: AstNode> Eq for AstPtr<N> {}
+
+impl<N: AstNode> PartialEq for AstPtr<N> {
+ fn eq(&self, other: &AstPtr<N>) -> bool {
+ self.raw == other.raw
+ }
+}
+
+impl<N: AstNode> Hash for AstPtr<N> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.raw.hash(state);
+ }
+}
+
+impl<N: AstNode> AstPtr<N> {
+ pub fn new(node: &N) -> AstPtr<N> {
+ AstPtr { raw: SyntaxNodePtr::new(node.syntax()), _ty: PhantomData }
+ }
+
+ pub fn to_node(&self, root: &SyntaxNode) -> N {
+ let syntax_node = self.raw.to_node(root);
+ N::cast(syntax_node).unwrap()
+ }
+
+ pub fn syntax_node_ptr(&self) -> SyntaxNodePtr {
+ self.raw.clone()
+ }
+
+ pub fn text_range(&self) -> TextRange {
+ self.raw.text_range()
+ }
+
+ pub fn cast<U: AstNode>(self) -> Option<AstPtr<U>> {
+ if !U::can_cast(self.raw.kind()) {
+ return None;
+ }
+ Some(AstPtr { raw: self.raw, _ty: PhantomData })
+ }
+
+ pub fn upcast<M: AstNode>(self) -> AstPtr<M>
+ where
+ N: Into<M>,
+ {
+ AstPtr { raw: self.raw, _ty: PhantomData }
+ }
+
+ /// Like `SyntaxNodePtr::cast` but the trait bounds work out.
+ pub fn try_from_raw(raw: SyntaxNodePtr) -> Option<AstPtr<N>> {
+ N::can_cast(raw.kind()).then(|| AstPtr { raw, _ty: PhantomData })
+ }
+}
+
+impl<N: AstNode> From<AstPtr<N>> for SyntaxNodePtr {
+ fn from(ptr: AstPtr<N>) -> SyntaxNodePtr {
+ ptr.raw
+ }
+}
+
+#[test]
+fn test_local_syntax_ptr() {
+ use crate::{ast, AstNode, SourceFile};
+
+ let file = SourceFile::parse("struct Foo { f: u32, }").ok().unwrap();
+ let field = file.syntax().descendants().find_map(ast::RecordField::cast).unwrap();
+ let ptr = SyntaxNodePtr::new(field.syntax());
+ let field_syntax = ptr.to_node(file.syntax());
+ assert_eq!(field.syntax(), &field_syntax);
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_error.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_error.rs
new file mode 100644
index 000000000..dc6130bd6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_error.rs
@@ -0,0 +1,44 @@
+//! See docs for `SyntaxError`.
+
+use std::fmt;
+
+use crate::{TextRange, TextSize};
+
+/// Represents the result of unsuccessful tokenization, parsing
+/// or tree validation.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SyntaxError(String, TextRange);
+
+// FIXME: there was an unused SyntaxErrorKind previously (before this enum was removed)
+// It was introduced in this PR: https://github.com/rust-lang/rust-analyzer/pull/846/files#diff-827da9b03b8f9faa1bade5cdd44d5dafR95
+// but it was not removed by a mistake.
+//
+// So, we need to find a place where to stick validation for attributes in match clauses.
+// Code before refactor:
+// InvalidMatchInnerAttr => {
+// write!(f, "Inner attributes are only allowed directly after the opening brace of the match expression")
+// }
+
+impl SyntaxError {
+ pub fn new(message: impl Into<String>, range: TextRange) -> Self {
+ Self(message.into(), range)
+ }
+ pub fn new_at_offset(message: impl Into<String>, offset: TextSize) -> Self {
+ Self(message.into(), TextRange::empty(offset))
+ }
+
+ pub fn range(&self) -> TextRange {
+ self.1
+ }
+
+ pub fn with_range(mut self, range: TextRange) -> Self {
+ self.1 = range;
+ self
+ }
+}
+
+impl fmt::Display for SyntaxError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_node.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_node.rs
new file mode 100644
index 000000000..a08c01597
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_node.rs
@@ -0,0 +1,75 @@
+//! This module defines Concrete Syntax Tree (CST), used by rust-analyzer.
+//!
+//! The CST includes comments and whitespace, provides a single node type,
+//! `SyntaxNode`, and a basic traversal API (parent, children, siblings).
+//!
+//! The *real* implementation is in the (language-agnostic) `rowan` crate, this
+//! module just wraps its API.
+
+use rowan::{GreenNodeBuilder, Language};
+
+use crate::{Parse, SyntaxError, SyntaxKind, TextSize};
+
+pub(crate) use rowan::{GreenNode, GreenToken, NodeOrToken};
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum RustLanguage {}
+impl Language for RustLanguage {
+ type Kind = SyntaxKind;
+
+ fn kind_from_raw(raw: rowan::SyntaxKind) -> SyntaxKind {
+ SyntaxKind::from(raw.0)
+ }
+
+ fn kind_to_raw(kind: SyntaxKind) -> rowan::SyntaxKind {
+ rowan::SyntaxKind(kind.into())
+ }
+}
+
+pub type SyntaxNode = rowan::SyntaxNode<RustLanguage>;
+pub type SyntaxToken = rowan::SyntaxToken<RustLanguage>;
+pub type SyntaxElement = rowan::SyntaxElement<RustLanguage>;
+pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren<RustLanguage>;
+pub type SyntaxElementChildren = rowan::SyntaxElementChildren<RustLanguage>;
+pub type PreorderWithTokens = rowan::api::PreorderWithTokens<RustLanguage>;
+
+#[derive(Default)]
+pub struct SyntaxTreeBuilder {
+ errors: Vec<SyntaxError>,
+ inner: GreenNodeBuilder<'static>,
+}
+
+impl SyntaxTreeBuilder {
+ pub(crate) fn finish_raw(self) -> (GreenNode, Vec<SyntaxError>) {
+ let green = self.inner.finish();
+ (green, self.errors)
+ }
+
+ pub fn finish(self) -> Parse<SyntaxNode> {
+ let (green, errors) = self.finish_raw();
+ // Disable block validation, see https://github.com/rust-lang/rust-analyzer/pull/10357
+ if cfg!(debug_assertions) && false {
+ let node = SyntaxNode::new_root(green.clone());
+ crate::validation::validate_block_structure(&node);
+ }
+ Parse::new(green, errors)
+ }
+
+ pub fn token(&mut self, kind: SyntaxKind, text: &str) {
+ let kind = RustLanguage::kind_to_raw(kind);
+ self.inner.token(kind, text);
+ }
+
+ pub fn start_node(&mut self, kind: SyntaxKind) {
+ let kind = RustLanguage::kind_to_raw(kind);
+ self.inner.start_node(kind);
+ }
+
+ pub fn finish_node(&mut self) {
+ self.inner.finish_node();
+ }
+
+ pub fn error(&mut self, error: String, text_pos: TextSize) {
+ self.errors.push(SyntaxError::new_at_offset(error, text_pos));
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ted.rs b/src/tools/rust-analyzer/crates/syntax/src/ted.rs
new file mode 100644
index 000000000..a47b4b11c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ted.rs
@@ -0,0 +1,206 @@
+//! Primitive tree editor, ed for trees.
+//!
+//! The `_raw`-suffixed functions insert elements as is, unsuffixed versions fix
+//! up elements around the edges.
+use std::{mem, ops::RangeInclusive};
+
+use parser::T;
+
+use crate::{
+ ast::{self, edit::IndentLevel, make, AstNode},
+ SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken,
+};
+
+/// Utility trait to allow calling `ted` functions with references or owned
+/// nodes. Do not use outside of this module.
+pub trait Element {
+ fn syntax_element(self) -> SyntaxElement;
+}
+
+impl<E: Element + Clone> Element for &'_ E {
+ fn syntax_element(self) -> SyntaxElement {
+ self.clone().syntax_element()
+ }
+}
+impl Element for SyntaxElement {
+ fn syntax_element(self) -> SyntaxElement {
+ self
+ }
+}
+impl Element for SyntaxNode {
+ fn syntax_element(self) -> SyntaxElement {
+ self.into()
+ }
+}
+impl Element for SyntaxToken {
+ fn syntax_element(self) -> SyntaxElement {
+ self.into()
+ }
+}
+
+#[derive(Debug)]
+pub struct Position {
+ repr: PositionRepr,
+}
+
+#[derive(Debug)]
+enum PositionRepr {
+ FirstChild(SyntaxNode),
+ After(SyntaxElement),
+}
+
+impl Position {
+ pub fn after(elem: impl Element) -> Position {
+ let repr = PositionRepr::After(elem.syntax_element());
+ Position { repr }
+ }
+ pub fn before(elem: impl Element) -> Position {
+ let elem = elem.syntax_element();
+ let repr = match elem.prev_sibling_or_token() {
+ Some(it) => PositionRepr::After(it),
+ None => PositionRepr::FirstChild(elem.parent().unwrap()),
+ };
+ Position { repr }
+ }
+ pub fn first_child_of(node: &(impl Into<SyntaxNode> + Clone)) -> Position {
+ let repr = PositionRepr::FirstChild(node.clone().into());
+ Position { repr }
+ }
+ pub fn last_child_of(node: &(impl Into<SyntaxNode> + Clone)) -> Position {
+ let node = node.clone().into();
+ let repr = match node.last_child_or_token() {
+ Some(it) => PositionRepr::After(it),
+ None => PositionRepr::FirstChild(node),
+ };
+ Position { repr }
+ }
+}
+
+pub fn insert(position: Position, elem: impl Element) {
+ insert_all(position, vec![elem.syntax_element()]);
+}
+pub fn insert_raw(position: Position, elem: impl Element) {
+ insert_all_raw(position, vec![elem.syntax_element()]);
+}
+pub fn insert_all(position: Position, mut elements: Vec<SyntaxElement>) {
+ if let Some(first) = elements.first() {
+ if let Some(ws) = ws_before(&position, first) {
+ elements.insert(0, ws.into());
+ }
+ }
+ if let Some(last) = elements.last() {
+ if let Some(ws) = ws_after(&position, last) {
+ elements.push(ws.into());
+ }
+ }
+ insert_all_raw(position, elements);
+}
+pub fn insert_all_raw(position: Position, elements: Vec<SyntaxElement>) {
+ let (parent, index) = match position.repr {
+ PositionRepr::FirstChild(parent) => (parent, 0),
+ PositionRepr::After(child) => (child.parent().unwrap(), child.index() + 1),
+ };
+ parent.splice_children(index..index, elements);
+}
+
+pub fn remove(elem: impl Element) {
+ elem.syntax_element().detach();
+}
+pub fn remove_all(range: RangeInclusive<SyntaxElement>) {
+ replace_all(range, Vec::new());
+}
+pub fn remove_all_iter(range: impl IntoIterator<Item = SyntaxElement>) {
+ let mut it = range.into_iter();
+ if let Some(mut first) = it.next() {
+ match it.last() {
+ Some(mut last) => {
+ if first.index() > last.index() {
+ mem::swap(&mut first, &mut last);
+ }
+ remove_all(first..=last);
+ }
+ None => remove(first),
+ }
+ }
+}
+
+pub fn replace(old: impl Element, new: impl Element) {
+ replace_with_many(old, vec![new.syntax_element()]);
+}
+pub fn replace_with_many(old: impl Element, new: Vec<SyntaxElement>) {
+ let old = old.syntax_element();
+ replace_all(old.clone()..=old, new);
+}
+pub fn replace_all(range: RangeInclusive<SyntaxElement>, new: Vec<SyntaxElement>) {
+ let start = range.start().index();
+ let end = range.end().index();
+ let parent = range.start().parent().unwrap();
+ parent.splice_children(start..end + 1, new);
+}
+
+pub fn append_child(node: &(impl Into<SyntaxNode> + Clone), child: impl Element) {
+ let position = Position::last_child_of(node);
+ insert(position, child);
+}
+pub fn append_child_raw(node: &(impl Into<SyntaxNode> + Clone), child: impl Element) {
+ let position = Position::last_child_of(node);
+ insert_raw(position, child);
+}
+
+fn ws_before(position: &Position, new: &SyntaxElement) -> Option<SyntaxToken> {
+ let prev = match &position.repr {
+ PositionRepr::FirstChild(_) => return None,
+ PositionRepr::After(it) => it,
+ };
+
+ if prev.kind() == T!['{'] && new.kind() == SyntaxKind::USE {
+ if let Some(item_list) = prev.parent().and_then(ast::ItemList::cast) {
+ let mut indent = IndentLevel::from_element(&item_list.syntax().clone().into());
+ indent.0 += 1;
+ return Some(make::tokens::whitespace(&format!("\n{}", indent)));
+ }
+ }
+
+ if prev.kind() == T!['{'] && ast::Stmt::can_cast(new.kind()) {
+ if let Some(stmt_list) = prev.parent().and_then(ast::StmtList::cast) {
+ let mut indent = IndentLevel::from_element(&stmt_list.syntax().clone().into());
+ indent.0 += 1;
+ return Some(make::tokens::whitespace(&format!("\n{}", indent)));
+ }
+ }
+
+ ws_between(prev, new)
+}
+fn ws_after(position: &Position, new: &SyntaxElement) -> Option<SyntaxToken> {
+ let next = match &position.repr {
+ PositionRepr::FirstChild(parent) => parent.first_child_or_token()?,
+ PositionRepr::After(sibling) => sibling.next_sibling_or_token()?,
+ };
+ ws_between(new, &next)
+}
+fn ws_between(left: &SyntaxElement, right: &SyntaxElement) -> Option<SyntaxToken> {
+ if left.kind() == SyntaxKind::WHITESPACE || right.kind() == SyntaxKind::WHITESPACE {
+ return None;
+ }
+ if right.kind() == T![;] || right.kind() == T![,] {
+ return None;
+ }
+ if left.kind() == T![<] || right.kind() == T![>] {
+ return None;
+ }
+ if left.kind() == T![&] && right.kind() == SyntaxKind::LIFETIME {
+ return None;
+ }
+ if right.kind() == SyntaxKind::GENERIC_ARG_LIST {
+ return None;
+ }
+
+ if right.kind() == SyntaxKind::USE {
+ let mut indent = IndentLevel::from_element(left);
+ if left.kind() == SyntaxKind::USE {
+ indent.0 = IndentLevel::from_element(right).0.max(indent.0);
+ }
+ return Some(make::tokens::whitespace(&format!("\n{}", indent)));
+ }
+ Some(make::tokens::single_space())
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests.rs b/src/tools/rust-analyzer/crates/syntax/src/tests.rs
new file mode 100644
index 000000000..58fba8cfa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/tests.rs
@@ -0,0 +1,186 @@
+#[cfg(not(feature = "in-rust-tree"))]
+mod ast_src;
+#[cfg(not(feature = "in-rust-tree"))]
+mod sourcegen_ast;
+
+use std::{
+ fs,
+ path::{Path, PathBuf},
+};
+
+use ast::HasName;
+use expect_test::expect_file;
+use rayon::prelude::*;
+use test_utils::{bench, bench_fixture, project_root};
+
+use crate::{ast, fuzz, AstNode, SourceFile, SyntaxError};
+
+#[test]
+fn parse_smoke_test() {
+ let code = r##"
+fn main() {
+ println!("Hello, world!")
+}
+ "##;
+
+ let parse = SourceFile::parse(code);
+ // eprintln!("{:#?}", parse.syntax_node());
+ assert!(parse.ok().is_ok());
+}
+
+#[test]
+fn benchmark_parser() {
+ if std::env::var("RUN_SLOW_BENCHES").is_err() {
+ return;
+ }
+
+ let data = bench_fixture::glorious_old_parser();
+ let tree = {
+ let _b = bench("parsing");
+ let p = SourceFile::parse(&data);
+ assert!(p.errors.is_empty());
+ assert_eq!(p.tree().syntax.text_range().len(), 352474.into());
+ p.tree()
+ };
+
+ {
+ let _b = bench("tree traversal");
+ let fn_names =
+ tree.syntax().descendants().filter_map(ast::Fn::cast).filter_map(|f| f.name()).count();
+ assert_eq!(fn_names, 268);
+ }
+}
+
+#[test]
+fn validation_tests() {
+ dir_tests(&test_data_dir(), &["parser/validation"], "rast", |text, path| {
+ let parse = SourceFile::parse(text);
+ let errors = parse.errors();
+ assert_errors_are_present(errors, path);
+ parse.debug_dump()
+ });
+}
+
+#[test]
+fn parser_fuzz_tests() {
+ for (_, text) in collect_rust_files(&test_data_dir(), &["parser/fuzz-failures"]) {
+ fuzz::check_parser(&text)
+ }
+}
+
+#[test]
+fn reparse_fuzz_tests() {
+ for (_, text) in collect_rust_files(&test_data_dir(), &["reparse/fuzz-failures"]) {
+ let check = fuzz::CheckReparse::from_data(text.as_bytes()).unwrap();
+ check.run();
+ }
+}
+
+/// Test that Rust-analyzer can parse and validate the rust-analyzer
+#[test]
+fn self_hosting_parsing() {
+ let crates_dir = project_root().join("crates");
+
+ let mut files = ::sourcegen::list_rust_files(&crates_dir);
+ files.retain(|path| {
+ // Get all files which are not in the crates/syntax/test_data folder
+ !path.components().any(|component| component.as_os_str() == "test_data")
+ });
+
+ assert!(
+ files.len() > 100,
+ "self_hosting_parsing found too few files - is it running in the right directory?"
+ );
+
+ let errors = files
+ .into_par_iter()
+ .filter_map(|file| {
+ let text = read_text(&file);
+ match SourceFile::parse(&text).ok() {
+ Ok(_) => None,
+ Err(err) => Some((file, err)),
+ }
+ })
+ .collect::<Vec<_>>();
+
+ if !errors.is_empty() {
+ let errors = errors
+ .into_iter()
+ .map(|(path, err)| format!("{}: {:?}\n", path.display(), err[0]))
+ .collect::<String>();
+ panic!("Parsing errors:\n{}\n", errors);
+ }
+}
+
+fn test_data_dir() -> PathBuf {
+ project_root().join("crates/syntax/test_data")
+}
+
+fn assert_errors_are_present(errors: &[SyntaxError], path: &Path) {
+ assert!(!errors.is_empty(), "There should be errors in the file {:?}", path.display());
+}
+
+/// Calls callback `f` with input code and file paths for each `.rs` file in `test_data_dir`
+/// subdirectories defined by `paths`.
+///
+/// If the content of the matching output file differs from the output of `f()`
+/// the test will fail.
+///
+/// If there is no matching output file it will be created and filled with the
+/// output of `f()`, but the test will fail.
+fn dir_tests<F>(test_data_dir: &Path, paths: &[&str], outfile_extension: &str, f: F)
+where
+ F: Fn(&str, &Path) -> String,
+{
+ for (path, input_code) in collect_rust_files(test_data_dir, paths) {
+ let actual = f(&input_code, &path);
+ let path = path.with_extension(outfile_extension);
+ expect_file![path].assert_eq(&actual)
+ }
+}
+
+/// Collects all `.rs` files from `dir` subdirectories defined by `paths`.
+fn collect_rust_files(root_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> {
+ paths
+ .iter()
+ .flat_map(|path| {
+ let path = root_dir.to_owned().join(path);
+ rust_files_in_dir(&path).into_iter()
+ })
+ .map(|path| {
+ let text = read_text(&path);
+ (path, text)
+ })
+ .collect()
+}
+
+/// Collects paths to all `.rs` files from `dir` in a sorted `Vec<PathBuf>`.
+fn rust_files_in_dir(dir: &Path) -> Vec<PathBuf> {
+ let mut acc = Vec::new();
+ for file in fs::read_dir(&dir).unwrap() {
+ let file = file.unwrap();
+ let path = file.path();
+ if path.extension().unwrap_or_default() == "rs" {
+ acc.push(path);
+ }
+ }
+ acc.sort();
+ acc
+}
+
+/// Read file and normalize newlines.
+///
+/// `rustc` seems to always normalize `\r\n` newlines to `\n`:
+///
+/// ```
+/// let s = "
+/// ";
+/// assert_eq!(s.as_bytes(), &[10]);
+/// ```
+///
+/// so this should always be correct.
+fn read_text(path: &Path) -> String {
+ fs::read_to_string(path)
+ .unwrap_or_else(|_| panic!("File at {:?} should be valid", path))
+ .replace("\r\n", "\n")
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs b/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs
new file mode 100644
index 000000000..cf5be1c30
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs
@@ -0,0 +1,252 @@
+//! Defines input for code generation process.
+
+pub(crate) struct KindsSrc<'a> {
+ pub(crate) punct: &'a [(&'a str, &'a str)],
+ pub(crate) keywords: &'a [&'a str],
+ pub(crate) contextual_keywords: &'a [&'a str],
+ pub(crate) literals: &'a [&'a str],
+ pub(crate) tokens: &'a [&'a str],
+ pub(crate) nodes: &'a [&'a str],
+}
+
+pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc {
+ punct: &[
+ (";", "SEMICOLON"),
+ (",", "COMMA"),
+ ("(", "L_PAREN"),
+ (")", "R_PAREN"),
+ ("{", "L_CURLY"),
+ ("}", "R_CURLY"),
+ ("[", "L_BRACK"),
+ ("]", "R_BRACK"),
+ ("<", "L_ANGLE"),
+ (">", "R_ANGLE"),
+ ("@", "AT"),
+ ("#", "POUND"),
+ ("~", "TILDE"),
+ ("?", "QUESTION"),
+ ("$", "DOLLAR"),
+ ("&", "AMP"),
+ ("|", "PIPE"),
+ ("+", "PLUS"),
+ ("*", "STAR"),
+ ("/", "SLASH"),
+ ("^", "CARET"),
+ ("%", "PERCENT"),
+ ("_", "UNDERSCORE"),
+ (".", "DOT"),
+ ("..", "DOT2"),
+ ("...", "DOT3"),
+ ("..=", "DOT2EQ"),
+ (":", "COLON"),
+ ("::", "COLON2"),
+ ("=", "EQ"),
+ ("==", "EQ2"),
+ ("=>", "FAT_ARROW"),
+ ("!", "BANG"),
+ ("!=", "NEQ"),
+ ("-", "MINUS"),
+ ("->", "THIN_ARROW"),
+ ("<=", "LTEQ"),
+ (">=", "GTEQ"),
+ ("+=", "PLUSEQ"),
+ ("-=", "MINUSEQ"),
+ ("|=", "PIPEEQ"),
+ ("&=", "AMPEQ"),
+ ("^=", "CARETEQ"),
+ ("/=", "SLASHEQ"),
+ ("*=", "STAREQ"),
+ ("%=", "PERCENTEQ"),
+ ("&&", "AMP2"),
+ ("||", "PIPE2"),
+ ("<<", "SHL"),
+ (">>", "SHR"),
+ ("<<=", "SHLEQ"),
+ (">>=", "SHREQ"),
+ ],
+ keywords: &[
+ "as", "async", "await", "box", "break", "const", "continue", "crate", "dyn", "else",
+ "enum", "extern", "false", "fn", "for", "if", "impl", "in", "let", "loop", "macro",
+ "match", "mod", "move", "mut", "pub", "ref", "return", "self", "Self", "static", "struct",
+ "super", "trait", "true", "try", "type", "unsafe", "use", "where", "while", "yield",
+ ],
+ contextual_keywords: &["auto", "default", "existential", "union", "raw", "macro_rules"],
+ literals: &["INT_NUMBER", "FLOAT_NUMBER", "CHAR", "BYTE", "STRING", "BYTE_STRING"],
+ tokens: &["ERROR", "IDENT", "WHITESPACE", "LIFETIME_IDENT", "COMMENT", "SHEBANG"],
+ nodes: &[
+ "SOURCE_FILE",
+ "STRUCT",
+ "UNION",
+ "ENUM",
+ "FN",
+ "RET_TYPE",
+ "EXTERN_CRATE",
+ "MODULE",
+ "USE",
+ "STATIC",
+ "CONST",
+ "TRAIT",
+ "IMPL",
+ "TYPE_ALIAS",
+ "MACRO_CALL",
+ "MACRO_RULES",
+ "MACRO_ARM",
+ "TOKEN_TREE",
+ "MACRO_DEF",
+ "PAREN_TYPE",
+ "TUPLE_TYPE",
+ "MACRO_TYPE",
+ "NEVER_TYPE",
+ "PATH_TYPE",
+ "PTR_TYPE",
+ "ARRAY_TYPE",
+ "SLICE_TYPE",
+ "REF_TYPE",
+ "INFER_TYPE",
+ "FN_PTR_TYPE",
+ "FOR_TYPE",
+ "IMPL_TRAIT_TYPE",
+ "DYN_TRAIT_TYPE",
+ "OR_PAT",
+ "PAREN_PAT",
+ "REF_PAT",
+ "BOX_PAT",
+ "IDENT_PAT",
+ "WILDCARD_PAT",
+ "REST_PAT",
+ "PATH_PAT",
+ "RECORD_PAT",
+ "RECORD_PAT_FIELD_LIST",
+ "RECORD_PAT_FIELD",
+ "TUPLE_STRUCT_PAT",
+ "TUPLE_PAT",
+ "SLICE_PAT",
+ "RANGE_PAT",
+ "LITERAL_PAT",
+ "MACRO_PAT",
+ "CONST_BLOCK_PAT",
+ // atoms
+ "TUPLE_EXPR",
+ "ARRAY_EXPR",
+ "PAREN_EXPR",
+ "PATH_EXPR",
+ "CLOSURE_EXPR",
+ "IF_EXPR",
+ "WHILE_EXPR",
+ "LOOP_EXPR",
+ "FOR_EXPR",
+ "CONTINUE_EXPR",
+ "BREAK_EXPR",
+ "LABEL",
+ "BLOCK_EXPR",
+ "STMT_LIST",
+ "RETURN_EXPR",
+ "YIELD_EXPR",
+ "LET_EXPR",
+ "UNDERSCORE_EXPR",
+ "MACRO_EXPR",
+ "MATCH_EXPR",
+ "MATCH_ARM_LIST",
+ "MATCH_ARM",
+ "MATCH_GUARD",
+ "RECORD_EXPR",
+ "RECORD_EXPR_FIELD_LIST",
+ "RECORD_EXPR_FIELD",
+ "BOX_EXPR",
+ // postfix
+ "CALL_EXPR",
+ "INDEX_EXPR",
+ "METHOD_CALL_EXPR",
+ "FIELD_EXPR",
+ "AWAIT_EXPR",
+ "TRY_EXPR",
+ "CAST_EXPR",
+ // unary
+ "REF_EXPR",
+ "PREFIX_EXPR",
+ "RANGE_EXPR", // just weird
+ "BIN_EXPR",
+ "EXTERN_BLOCK",
+ "EXTERN_ITEM_LIST",
+ "VARIANT",
+ "RECORD_FIELD_LIST",
+ "RECORD_FIELD",
+ "TUPLE_FIELD_LIST",
+ "TUPLE_FIELD",
+ "VARIANT_LIST",
+ "ITEM_LIST",
+ "ASSOC_ITEM_LIST",
+ "ATTR",
+ "META",
+ "USE_TREE",
+ "USE_TREE_LIST",
+ "PATH",
+ "PATH_SEGMENT",
+ "LITERAL",
+ "RENAME",
+ "VISIBILITY",
+ "WHERE_CLAUSE",
+ "WHERE_PRED",
+ "ABI",
+ "NAME",
+ "NAME_REF",
+ "LET_STMT",
+ "LET_ELSE",
+ "EXPR_STMT",
+ "GENERIC_PARAM_LIST",
+ "GENERIC_PARAM",
+ "LIFETIME_PARAM",
+ "TYPE_PARAM",
+ "CONST_PARAM",
+ "GENERIC_ARG_LIST",
+ "LIFETIME",
+ "LIFETIME_ARG",
+ "TYPE_ARG",
+ "ASSOC_TYPE_ARG",
+ "CONST_ARG",
+ "PARAM_LIST",
+ "PARAM",
+ "SELF_PARAM",
+ "ARG_LIST",
+ "TYPE_BOUND",
+ "TYPE_BOUND_LIST",
+ // macro related
+ "MACRO_ITEMS",
+ "MACRO_STMTS",
+ ],
+};
+
+#[derive(Default, Debug)]
+pub(crate) struct AstSrc {
+ pub(crate) tokens: Vec<String>,
+ pub(crate) nodes: Vec<AstNodeSrc>,
+ pub(crate) enums: Vec<AstEnumSrc>,
+}
+
+#[derive(Debug)]
+pub(crate) struct AstNodeSrc {
+ pub(crate) doc: Vec<String>,
+ pub(crate) name: String,
+ pub(crate) traits: Vec<String>,
+ pub(crate) fields: Vec<Field>,
+}
+
+#[derive(Debug, Eq, PartialEq)]
+pub(crate) enum Field {
+ Token(String),
+ Node { name: String, ty: String, cardinality: Cardinality },
+}
+
+#[derive(Debug, Eq, PartialEq)]
+pub(crate) enum Cardinality {
+ Optional,
+ Many,
+}
+
+#[derive(Debug)]
+pub(crate) struct AstEnumSrc {
+ pub(crate) doc: Vec<String>,
+ pub(crate) name: String,
+ pub(crate) traits: Vec<String>,
+ pub(crate) variants: Vec<String>,
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs b/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs
new file mode 100644
index 000000000..6d2766225
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs
@@ -0,0 +1,862 @@
+//! This module generates AST datatype used by rust-analyzer.
+//!
+//! Specifically, it generates the `SyntaxKind` enum and a number of newtype
+//! wrappers around `SyntaxNode` which implement `syntax::AstNode`.
+
+use std::{
+ collections::{BTreeSet, HashSet},
+ fmt::Write,
+};
+
+use itertools::Itertools;
+use proc_macro2::{Punct, Spacing};
+use quote::{format_ident, quote};
+use ungrammar::{Grammar, Rule};
+
+use crate::tests::ast_src::{
+ AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC,
+};
+
+#[test]
+fn sourcegen_ast() {
+ let syntax_kinds = generate_syntax_kinds(KINDS_SRC);
+ let syntax_kinds_file =
+ sourcegen::project_root().join("crates/parser/src/syntax_kind/generated.rs");
+ sourcegen::ensure_file_contents(syntax_kinds_file.as_path(), &syntax_kinds);
+
+ let grammar =
+ include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/rust.ungram")).parse().unwrap();
+ let ast = lower(&grammar);
+
+ let ast_tokens = generate_tokens(&ast);
+ let ast_tokens_file =
+ sourcegen::project_root().join("crates/syntax/src/ast/generated/tokens.rs");
+ sourcegen::ensure_file_contents(ast_tokens_file.as_path(), &ast_tokens);
+
+ let ast_nodes = generate_nodes(KINDS_SRC, &ast);
+ let ast_nodes_file = sourcegen::project_root().join("crates/syntax/src/ast/generated/nodes.rs");
+ sourcegen::ensure_file_contents(ast_nodes_file.as_path(), &ast_nodes);
+}
+
+fn generate_tokens(grammar: &AstSrc) -> String {
+ let tokens = grammar.tokens.iter().map(|token| {
+ let name = format_ident!("{}", token);
+ let kind = format_ident!("{}", to_upper_snake_case(token));
+ quote! {
+ #[derive(Debug, Clone, PartialEq, Eq, Hash)]
+ pub struct #name {
+ pub(crate) syntax: SyntaxToken,
+ }
+ impl std::fmt::Display for #name {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+ }
+ impl AstToken for #name {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == #kind }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+ }
+ }
+ });
+
+ sourcegen::add_preamble(
+ "sourcegen_ast",
+ sourcegen::reformat(
+ quote! {
+ use crate::{SyntaxKind::{self, *}, SyntaxToken, ast::AstToken};
+ #(#tokens)*
+ }
+ .to_string(),
+ ),
+ )
+ .replace("#[derive", "\n#[derive")
+}
+
+fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String {
+ let (node_defs, node_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
+ .nodes
+ .iter()
+ .map(|node| {
+ let name = format_ident!("{}", node.name);
+ let kind = format_ident!("{}", to_upper_snake_case(&node.name));
+ let traits = node
+ .traits
+ .iter()
+ .filter(|trait_name| {
+ // Loops have two expressions so this might collide, therefor manual impl it
+ node.name != "ForExpr" && node.name != "WhileExpr"
+ || trait_name.as_str() != "HasLoopBody"
+ })
+ .map(|trait_name| {
+ let trait_name = format_ident!("{}", trait_name);
+ quote!(impl ast::#trait_name for #name {})
+ });
+
+ let methods = node.fields.iter().map(|field| {
+ let method_name = field.method_name();
+ let ty = field.ty();
+
+ if field.is_many() {
+ quote! {
+ pub fn #method_name(&self) -> AstChildren<#ty> {
+ support::children(&self.syntax)
+ }
+ }
+ } else if let Some(token_kind) = field.token_kind() {
+ quote! {
+ pub fn #method_name(&self) -> Option<#ty> {
+ support::token(&self.syntax, #token_kind)
+ }
+ }
+ } else {
+ quote! {
+ pub fn #method_name(&self) -> Option<#ty> {
+ support::child(&self.syntax)
+ }
+ }
+ }
+ });
+ (
+ quote! {
+ #[pretty_doc_comment_placeholder_workaround]
+ #[derive(Debug, Clone, PartialEq, Eq, Hash)]
+ pub struct #name {
+ pub(crate) syntax: SyntaxNode,
+ }
+
+ #(#traits)*
+
+ impl #name {
+ #(#methods)*
+ }
+ },
+ quote! {
+ impl AstNode for #name {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ kind == #kind
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+ }
+ },
+ )
+ })
+ .unzip();
+
+ let (enum_defs, enum_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
+ .enums
+ .iter()
+ .map(|en| {
+ let variants: Vec<_> = en.variants.iter().map(|var| format_ident!("{}", var)).collect();
+ let name = format_ident!("{}", en.name);
+ let kinds: Vec<_> = variants
+ .iter()
+ .map(|name| format_ident!("{}", to_upper_snake_case(&name.to_string())))
+ .collect();
+ let traits = en.traits.iter().map(|trait_name| {
+ let trait_name = format_ident!("{}", trait_name);
+ quote!(impl ast::#trait_name for #name {})
+ });
+
+ let ast_node = if en.name == "Stmt" {
+ quote! {}
+ } else {
+ quote! {
+ impl AstNode for #name {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ #(#kinds)|* => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ #(
+ #kinds => #name::#variants(#variants { syntax }),
+ )*
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ #(
+ #name::#variants(it) => &it.syntax,
+ )*
+ }
+ }
+ }
+ }
+ };
+
+ (
+ quote! {
+ #[pretty_doc_comment_placeholder_workaround]
+ #[derive(Debug, Clone, PartialEq, Eq, Hash)]
+ pub enum #name {
+ #(#variants(#variants),)*
+ }
+
+ #(#traits)*
+ },
+ quote! {
+ #(
+ impl From<#variants> for #name {
+ fn from(node: #variants) -> #name {
+ #name::#variants(node)
+ }
+ }
+ )*
+ #ast_node
+ },
+ )
+ })
+ .unzip();
+
+ let (any_node_defs, any_node_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
+ .nodes
+ .iter()
+ .flat_map(|node| node.traits.iter().map(move |t| (t, node)))
+ .into_group_map()
+ .into_iter()
+ .sorted_by_key(|(k, _)| *k)
+ .map(|(trait_name, nodes)| {
+ let name = format_ident!("Any{}", trait_name);
+ let trait_name = format_ident!("{}", trait_name);
+ let kinds: Vec<_> = nodes
+ .iter()
+ .map(|name| format_ident!("{}", to_upper_snake_case(&name.name.to_string())))
+ .collect();
+
+ (
+ quote! {
+ #[pretty_doc_comment_placeholder_workaround]
+ #[derive(Debug, Clone, PartialEq, Eq, Hash)]
+ pub struct #name {
+ pub(crate) syntax: SyntaxNode,
+ }
+ impl ast::#trait_name for #name {}
+ },
+ quote! {
+ impl #name {
+ #[inline]
+ pub fn new<T: ast::#trait_name>(node: T) -> #name {
+ #name {
+ syntax: node.syntax().clone()
+ }
+ }
+ }
+ impl AstNode for #name {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ #(#kinds)|* => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| #name { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ &self.syntax
+ }
+ }
+ },
+ )
+ })
+ .unzip();
+
+ let enum_names = grammar.enums.iter().map(|it| &it.name);
+ let node_names = grammar.nodes.iter().map(|it| &it.name);
+
+ let display_impls =
+ enum_names.chain(node_names.clone()).map(|it| format_ident!("{}", it)).map(|name| {
+ quote! {
+ impl std::fmt::Display for #name {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+ }
+ }
+ });
+
+ let defined_nodes: HashSet<_> = node_names.collect();
+
+ for node in kinds
+ .nodes
+ .iter()
+ .map(|kind| to_pascal_case(kind))
+ .filter(|name| !defined_nodes.iter().any(|&it| it == name))
+ {
+ drop(node)
+ // FIXME: restore this
+ // eprintln!("Warning: node {} not defined in ast source", node);
+ }
+
+ let ast = quote! {
+ #![allow(non_snake_case)]
+ use crate::{
+ SyntaxNode, SyntaxToken, SyntaxKind::{self, *},
+ ast::{self, AstNode, AstChildren, support},
+ T,
+ };
+
+ #(#node_defs)*
+ #(#enum_defs)*
+ #(#any_node_defs)*
+ #(#node_boilerplate_impls)*
+ #(#enum_boilerplate_impls)*
+ #(#any_node_boilerplate_impls)*
+ #(#display_impls)*
+ };
+
+ let ast = ast.to_string().replace("T ! [", "T![");
+
+ let mut res = String::with_capacity(ast.len() * 2);
+
+ let mut docs =
+ grammar.nodes.iter().map(|it| &it.doc).chain(grammar.enums.iter().map(|it| &it.doc));
+
+ for chunk in ast.split("# [pretty_doc_comment_placeholder_workaround] ") {
+ res.push_str(chunk);
+ if let Some(doc) = docs.next() {
+ write_doc_comment(doc, &mut res);
+ }
+ }
+
+ let res = sourcegen::add_preamble("sourcegen_ast", sourcegen::reformat(res));
+ res.replace("#[derive", "\n#[derive")
+}
+
+fn write_doc_comment(contents: &[String], dest: &mut String) {
+ for line in contents {
+ writeln!(dest, "///{}", line).unwrap();
+ }
+}
+
+fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> String {
+ let (single_byte_tokens_values, single_byte_tokens): (Vec<_>, Vec<_>) = grammar
+ .punct
+ .iter()
+ .filter(|(token, _name)| token.len() == 1)
+ .map(|(token, name)| (token.chars().next().unwrap(), format_ident!("{}", name)))
+ .unzip();
+
+ let punctuation_values = grammar.punct.iter().map(|(token, _name)| {
+ if "{}[]()".contains(token) {
+ let c = token.chars().next().unwrap();
+ quote! { #c }
+ } else {
+ let cs = token.chars().map(|c| Punct::new(c, Spacing::Joint));
+ quote! { #(#cs)* }
+ }
+ });
+ let punctuation =
+ grammar.punct.iter().map(|(_token, name)| format_ident!("{}", name)).collect::<Vec<_>>();
+
+ let x = |&name| match name {
+ "Self" => format_ident!("SELF_TYPE_KW"),
+ name => format_ident!("{}_KW", to_upper_snake_case(name)),
+ };
+ let full_keywords_values = grammar.keywords;
+ let full_keywords = full_keywords_values.iter().map(x);
+
+ let contextual_keywords_values = &grammar.contextual_keywords;
+ let contextual_keywords = contextual_keywords_values.iter().map(x);
+
+ let all_keywords_values = grammar
+ .keywords
+ .iter()
+ .chain(grammar.contextual_keywords.iter())
+ .copied()
+ .collect::<Vec<_>>();
+ let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw));
+ let all_keywords = all_keywords_values.iter().map(x).collect::<Vec<_>>();
+
+ let literals =
+ grammar.literals.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
+
+ let tokens = grammar.tokens.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
+
+ let nodes = grammar.nodes.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
+
+ let ast = quote! {
+ #![allow(bad_style, missing_docs, unreachable_pub)]
+ /// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT`.
+ #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+ #[repr(u16)]
+ pub enum SyntaxKind {
+ // Technical SyntaxKinds: they appear temporally during parsing,
+ // but never end up in the final tree
+ #[doc(hidden)]
+ TOMBSTONE,
+ #[doc(hidden)]
+ EOF,
+ #(#punctuation,)*
+ #(#all_keywords,)*
+ #(#literals,)*
+ #(#tokens,)*
+ #(#nodes,)*
+
+ // Technical kind so that we can cast from u16 safely
+ #[doc(hidden)]
+ __LAST,
+ }
+ use self::SyntaxKind::*;
+
+ impl SyntaxKind {
+ pub fn is_keyword(self) -> bool {
+ match self {
+ #(#all_keywords)|* => true,
+ _ => false,
+ }
+ }
+
+ pub fn is_punct(self) -> bool {
+ match self {
+ #(#punctuation)|* => true,
+ _ => false,
+ }
+ }
+
+ pub fn is_literal(self) -> bool {
+ match self {
+ #(#literals)|* => true,
+ _ => false,
+ }
+ }
+
+ pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
+ let kw = match ident {
+ #(#full_keywords_values => #full_keywords,)*
+ _ => return None,
+ };
+ Some(kw)
+ }
+
+ pub fn from_contextual_keyword(ident: &str) -> Option<SyntaxKind> {
+ let kw = match ident {
+ #(#contextual_keywords_values => #contextual_keywords,)*
+ _ => return None,
+ };
+ Some(kw)
+ }
+
+ pub fn from_char(c: char) -> Option<SyntaxKind> {
+ let tok = match c {
+ #(#single_byte_tokens_values => #single_byte_tokens,)*
+ _ => return None,
+ };
+ Some(tok)
+ }
+ }
+
+ #[macro_export]
+ macro_rules! T {
+ #([#punctuation_values] => { $crate::SyntaxKind::#punctuation };)*
+ #([#all_keywords_idents] => { $crate::SyntaxKind::#all_keywords };)*
+ [lifetime_ident] => { $crate::SyntaxKind::LIFETIME_IDENT };
+ [ident] => { $crate::SyntaxKind::IDENT };
+ [shebang] => { $crate::SyntaxKind::SHEBANG };
+ }
+ pub use T;
+ };
+
+ sourcegen::add_preamble("sourcegen_ast", sourcegen::reformat(ast.to_string()))
+}
+
+fn to_upper_snake_case(s: &str) -> String {
+ let mut buf = String::with_capacity(s.len());
+ let mut prev = false;
+ for c in s.chars() {
+ if c.is_ascii_uppercase() && prev {
+ buf.push('_')
+ }
+ prev = true;
+
+ buf.push(c.to_ascii_uppercase());
+ }
+ buf
+}
+
+fn to_lower_snake_case(s: &str) -> String {
+ let mut buf = String::with_capacity(s.len());
+ let mut prev = false;
+ for c in s.chars() {
+ if c.is_ascii_uppercase() && prev {
+ buf.push('_')
+ }
+ prev = true;
+
+ buf.push(c.to_ascii_lowercase());
+ }
+ buf
+}
+
+fn to_pascal_case(s: &str) -> String {
+ let mut buf = String::with_capacity(s.len());
+ let mut prev_is_underscore = true;
+ for c in s.chars() {
+ if c == '_' {
+ prev_is_underscore = true;
+ } else if prev_is_underscore {
+ buf.push(c.to_ascii_uppercase());
+ prev_is_underscore = false;
+ } else {
+ buf.push(c.to_ascii_lowercase());
+ }
+ }
+ buf
+}
+
+fn pluralize(s: &str) -> String {
+ format!("{}s", s)
+}
+
+impl Field {
+ fn is_many(&self) -> bool {
+ matches!(self, Field::Node { cardinality: Cardinality::Many, .. })
+ }
+ fn token_kind(&self) -> Option<proc_macro2::TokenStream> {
+ match self {
+ Field::Token(token) => {
+ let token: proc_macro2::TokenStream = token.parse().unwrap();
+ Some(quote! { T![#token] })
+ }
+ _ => None,
+ }
+ }
+ fn method_name(&self) -> proc_macro2::Ident {
+ match self {
+ Field::Token(name) => {
+ let name = match name.as_str() {
+ ";" => "semicolon",
+ "->" => "thin_arrow",
+ "'{'" => "l_curly",
+ "'}'" => "r_curly",
+ "'('" => "l_paren",
+ "')'" => "r_paren",
+ "'['" => "l_brack",
+ "']'" => "r_brack",
+ "<" => "l_angle",
+ ">" => "r_angle",
+ "=" => "eq",
+ "!" => "excl",
+ "*" => "star",
+ "&" => "amp",
+ "_" => "underscore",
+ "." => "dot",
+ ".." => "dotdot",
+ "..." => "dotdotdot",
+ "..=" => "dotdoteq",
+ "=>" => "fat_arrow",
+ "@" => "at",
+ ":" => "colon",
+ "::" => "coloncolon",
+ "#" => "pound",
+ "?" => "question_mark",
+ "," => "comma",
+ "|" => "pipe",
+ "~" => "tilde",
+ _ => name,
+ };
+ format_ident!("{}_token", name)
+ }
+ Field::Node { name, .. } => {
+ if name == "type" {
+ format_ident!("ty")
+ } else {
+ format_ident!("{}", name)
+ }
+ }
+ }
+ }
+ fn ty(&self) -> proc_macro2::Ident {
+ match self {
+ Field::Token(_) => format_ident!("SyntaxToken"),
+ Field::Node { ty, .. } => format_ident!("{}", ty),
+ }
+ }
+}
+
+fn lower(grammar: &Grammar) -> AstSrc {
+ let mut res = AstSrc {
+ tokens: "Whitespace Comment String ByteString IntNumber FloatNumber Char Byte Ident"
+ .split_ascii_whitespace()
+ .map(|it| it.to_string())
+ .collect::<Vec<_>>(),
+ ..Default::default()
+ };
+
+ let nodes = grammar.iter().collect::<Vec<_>>();
+
+ for &node in &nodes {
+ let name = grammar[node].name.clone();
+ let rule = &grammar[node].rule;
+ match lower_enum(grammar, rule) {
+ Some(variants) => {
+ let enum_src = AstEnumSrc { doc: Vec::new(), name, traits: Vec::new(), variants };
+ res.enums.push(enum_src);
+ }
+ None => {
+ let mut fields = Vec::new();
+ lower_rule(&mut fields, grammar, None, rule);
+ res.nodes.push(AstNodeSrc { doc: Vec::new(), name, traits: Vec::new(), fields });
+ }
+ }
+ }
+
+ deduplicate_fields(&mut res);
+ extract_enums(&mut res);
+ extract_struct_traits(&mut res);
+ extract_enum_traits(&mut res);
+ res
+}
+
+fn lower_enum(grammar: &Grammar, rule: &Rule) -> Option<Vec<String>> {
+ let alternatives = match rule {
+ Rule::Alt(it) => it,
+ _ => return None,
+ };
+ let mut variants = Vec::new();
+ for alternative in alternatives {
+ match alternative {
+ Rule::Node(it) => variants.push(grammar[*it].name.clone()),
+ Rule::Token(it) if grammar[*it].name == ";" => (),
+ _ => return None,
+ }
+ }
+ Some(variants)
+}
+
+fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, rule: &Rule) {
+ if lower_comma_list(acc, grammar, label, rule) {
+ return;
+ }
+
+ match rule {
+ Rule::Node(node) => {
+ let ty = grammar[*node].name.clone();
+ let name = label.cloned().unwrap_or_else(|| to_lower_snake_case(&ty));
+ let field = Field::Node { name, ty, cardinality: Cardinality::Optional };
+ acc.push(field);
+ }
+ Rule::Token(token) => {
+ assert!(label.is_none());
+ let mut name = grammar[*token].name.clone();
+ if name != "int_number" && name != "string" {
+ if "[]{}()".contains(&name) {
+ name = format!("'{}'", name);
+ }
+ let field = Field::Token(name);
+ acc.push(field);
+ }
+ }
+ Rule::Rep(inner) => {
+ if let Rule::Node(node) = &**inner {
+ let ty = grammar[*node].name.clone();
+ let name = label.cloned().unwrap_or_else(|| pluralize(&to_lower_snake_case(&ty)));
+ let field = Field::Node { name, ty, cardinality: Cardinality::Many };
+ acc.push(field);
+ return;
+ }
+ panic!("unhandled rule: {:?}", rule)
+ }
+ Rule::Labeled { label: l, rule } => {
+ assert!(label.is_none());
+ let manually_implemented = matches!(
+ l.as_str(),
+ "lhs"
+ | "rhs"
+ | "then_branch"
+ | "else_branch"
+ | "start"
+ | "end"
+ | "op"
+ | "index"
+ | "base"
+ | "value"
+ | "trait"
+ | "self_ty"
+ | "iterable"
+ | "condition"
+ );
+ if manually_implemented {
+ return;
+ }
+ lower_rule(acc, grammar, Some(l), rule);
+ }
+ Rule::Seq(rules) | Rule::Alt(rules) => {
+ for rule in rules {
+ lower_rule(acc, grammar, label, rule)
+ }
+ }
+ Rule::Opt(rule) => lower_rule(acc, grammar, label, rule),
+ }
+}
+
+// (T (',' T)* ','?)
+fn lower_comma_list(
+ acc: &mut Vec<Field>,
+ grammar: &Grammar,
+ label: Option<&String>,
+ rule: &Rule,
+) -> bool {
+ let rule = match rule {
+ Rule::Seq(it) => it,
+ _ => return false,
+ };
+ let (node, repeat, trailing_comma) = match rule.as_slice() {
+ [Rule::Node(node), Rule::Rep(repeat), Rule::Opt(trailing_comma)] => {
+ (node, repeat, trailing_comma)
+ }
+ _ => return false,
+ };
+ let repeat = match &**repeat {
+ Rule::Seq(it) => it,
+ _ => return false,
+ };
+ match repeat.as_slice() {
+ [comma, Rule::Node(n)] if comma == &**trailing_comma && n == node => (),
+ _ => return false,
+ }
+ let ty = grammar[*node].name.clone();
+ let name = label.cloned().unwrap_or_else(|| pluralize(&to_lower_snake_case(&ty)));
+ let field = Field::Node { name, ty, cardinality: Cardinality::Many };
+ acc.push(field);
+ true
+}
+
+fn deduplicate_fields(ast: &mut AstSrc) {
+ for node in &mut ast.nodes {
+ let mut i = 0;
+ 'outer: while i < node.fields.len() {
+ for j in 0..i {
+ let f1 = &node.fields[i];
+ let f2 = &node.fields[j];
+ if f1 == f2 {
+ node.fields.remove(i);
+ continue 'outer;
+ }
+ }
+ i += 1;
+ }
+ }
+}
+
+fn extract_enums(ast: &mut AstSrc) {
+ for node in &mut ast.nodes {
+ for enm in &ast.enums {
+ let mut to_remove = Vec::new();
+ for (i, field) in node.fields.iter().enumerate() {
+ let ty = field.ty().to_string();
+ if enm.variants.iter().any(|it| it == &ty) {
+ to_remove.push(i);
+ }
+ }
+ if to_remove.len() == enm.variants.len() {
+ node.remove_field(to_remove);
+ let ty = enm.name.clone();
+ let name = to_lower_snake_case(&ty);
+ node.fields.push(Field::Node { name, ty, cardinality: Cardinality::Optional });
+ }
+ }
+ }
+}
+
+fn extract_struct_traits(ast: &mut AstSrc) {
+ let traits: &[(&str, &[&str])] = &[
+ ("HasAttrs", &["attrs"]),
+ ("HasName", &["name"]),
+ ("HasVisibility", &["visibility"]),
+ ("HasGenericParams", &["generic_param_list", "where_clause"]),
+ ("HasTypeBounds", &["type_bound_list", "colon_token"]),
+ ("HasModuleItem", &["items"]),
+ ("HasLoopBody", &["label", "loop_body"]),
+ ("HasArgList", &["arg_list"]),
+ ];
+
+ for node in &mut ast.nodes {
+ for (name, methods) in traits {
+ extract_struct_trait(node, name, methods);
+ }
+ }
+
+ let nodes_with_doc_comments = [
+ "SourceFile",
+ "Fn",
+ "Struct",
+ "Union",
+ "RecordField",
+ "TupleField",
+ "Enum",
+ "Variant",
+ "Trait",
+ "Module",
+ "Static",
+ "Const",
+ "TypeAlias",
+ "Impl",
+ "ExternBlock",
+ "ExternCrate",
+ "MacroCall",
+ "MacroRules",
+ "MacroDef",
+ "Use",
+ ];
+
+ for node in &mut ast.nodes {
+ if nodes_with_doc_comments.contains(&&*node.name) {
+ node.traits.push("HasDocComments".into());
+ }
+ }
+}
+
+fn extract_struct_trait(node: &mut AstNodeSrc, trait_name: &str, methods: &[&str]) {
+ let mut to_remove = Vec::new();
+ for (i, field) in node.fields.iter().enumerate() {
+ let method_name = field.method_name().to_string();
+ if methods.iter().any(|&it| it == method_name) {
+ to_remove.push(i);
+ }
+ }
+ if to_remove.len() == methods.len() {
+ node.traits.push(trait_name.to_string());
+ node.remove_field(to_remove);
+ }
+}
+
+fn extract_enum_traits(ast: &mut AstSrc) {
+ for enm in &mut ast.enums {
+ if enm.name == "Stmt" {
+ continue;
+ }
+ let nodes = &ast.nodes;
+ let mut variant_traits = enm
+ .variants
+ .iter()
+ .map(|var| nodes.iter().find(|it| &it.name == var).unwrap())
+ .map(|node| node.traits.iter().cloned().collect::<BTreeSet<_>>());
+
+ let mut enum_traits = match variant_traits.next() {
+ Some(it) => it,
+ None => continue,
+ };
+ for traits in variant_traits {
+ enum_traits = enum_traits.intersection(&traits).cloned().collect();
+ }
+ enm.traits = enum_traits.into_iter().collect();
+ }
+}
+
+impl AstNodeSrc {
+ fn remove_field(&mut self, to_remove: Vec<usize>) {
+ to_remove.into_iter().rev().for_each(|idx| {
+ self.fields.remove(idx);
+ });
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/token_text.rs b/src/tools/rust-analyzer/crates/syntax/src/token_text.rs
new file mode 100644
index 000000000..913b24d42
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/token_text.rs
@@ -0,0 +1,95 @@
+//! Yet another version of owned string, backed by a syntax tree token.
+
+use std::{cmp::Ordering, fmt, ops};
+
+use rowan::GreenToken;
+
+pub struct TokenText<'a>(pub(crate) Repr<'a>);
+
+pub(crate) enum Repr<'a> {
+ Borrowed(&'a str),
+ Owned(GreenToken),
+}
+
+impl<'a> TokenText<'a> {
+ pub(crate) fn borrowed(text: &'a str) -> Self {
+ TokenText(Repr::Borrowed(text))
+ }
+
+ pub(crate) fn owned(green: GreenToken) -> Self {
+ TokenText(Repr::Owned(green))
+ }
+
+ pub fn as_str(&self) -> &str {
+ match &self.0 {
+ &Repr::Borrowed(it) => it,
+ Repr::Owned(green) => green.text(),
+ }
+ }
+}
+
+impl ops::Deref for TokenText<'_> {
+ type Target = str;
+
+ fn deref(&self) -> &str {
+ self.as_str()
+ }
+}
+impl AsRef<str> for TokenText<'_> {
+ fn as_ref(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl From<TokenText<'_>> for String {
+ fn from(token_text: TokenText<'_>) -> Self {
+ token_text.as_str().into()
+ }
+}
+
+impl PartialEq<&'_ str> for TokenText<'_> {
+ fn eq(&self, other: &&str) -> bool {
+ self.as_str() == *other
+ }
+}
+impl PartialEq<TokenText<'_>> for &'_ str {
+ fn eq(&self, other: &TokenText<'_>) -> bool {
+ other == self
+ }
+}
+impl PartialEq<String> for TokenText<'_> {
+ fn eq(&self, other: &String) -> bool {
+ self.as_str() == other.as_str()
+ }
+}
+impl PartialEq<TokenText<'_>> for String {
+ fn eq(&self, other: &TokenText<'_>) -> bool {
+ other == self
+ }
+}
+impl PartialEq for TokenText<'_> {
+ fn eq(&self, other: &TokenText<'_>) -> bool {
+ self.as_str() == other.as_str()
+ }
+}
+impl Eq for TokenText<'_> {}
+impl Ord for TokenText<'_> {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.as_str().cmp(other.as_str())
+ }
+}
+impl PartialOrd for TokenText<'_> {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+impl fmt::Display for TokenText<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(self.as_str(), f)
+ }
+}
+impl fmt::Debug for TokenText<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(self.as_str(), f)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/utils.rs b/src/tools/rust-analyzer/crates/syntax/src/utils.rs
new file mode 100644
index 000000000..f4c02518b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/utils.rs
@@ -0,0 +1,43 @@
+//! A set of utils methods to reuse on other abstraction levels
+
+use itertools::Itertools;
+
+use crate::{ast, match_ast, AstNode};
+
+pub fn path_to_string_stripping_turbo_fish(path: &ast::Path) -> String {
+ path.syntax()
+ .children()
+ .filter_map(|node| {
+ match_ast! {
+ match node {
+ ast::PathSegment(it) => {
+ Some(it.name_ref()?.to_string())
+ },
+ ast::Path(it) => {
+ Some(path_to_string_stripping_turbo_fish(&it))
+ },
+ _ => None,
+ }
+ }
+ })
+ .join("::")
+}
+
+#[cfg(test)]
+mod tests {
+ use super::path_to_string_stripping_turbo_fish;
+ use crate::ast::make;
+
+ #[test]
+ fn turbofishes_are_stripped() {
+ assert_eq!("Vec", path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::<i32>")),);
+ assert_eq!(
+ "Vec::new",
+ path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::<i32>::new")),
+ );
+ assert_eq!(
+ "Vec::new",
+ path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::new()")),
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/validation.rs b/src/tools/rust-analyzer/crates/syntax/src/validation.rs
new file mode 100644
index 000000000..b9f2b5132
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/validation.rs
@@ -0,0 +1,378 @@
+//! This module implements syntax validation that the parser doesn't handle.
+//!
+//! A failed validation emits a diagnostic.
+
+mod block;
+
+use rowan::Direction;
+use rustc_lexer::unescape::{
+ self, unescape_byte, unescape_byte_literal, unescape_char, unescape_literal, Mode,
+};
+
+use crate::{
+ algo,
+ ast::{self, HasAttrs, HasVisibility},
+ match_ast, AstNode, SyntaxError,
+ SyntaxKind::{CONST, FN, INT_NUMBER, TYPE_ALIAS},
+ SyntaxNode, SyntaxToken, TextSize, T,
+};
+
+pub(crate) fn validate(root: &SyntaxNode) -> Vec<SyntaxError> {
+ // FIXME:
+ // * Add unescape validation of raw string literals and raw byte string literals
+ // * Add validation of doc comments are being attached to nodes
+
+ let mut errors = Vec::new();
+ for node in root.descendants() {
+ match_ast! {
+ match node {
+ ast::Literal(it) => validate_literal(it, &mut errors),
+ ast::Const(it) => validate_const(it, &mut errors),
+ ast::BlockExpr(it) => block::validate_block_expr(it, &mut errors),
+ ast::FieldExpr(it) => validate_numeric_name(it.name_ref(), &mut errors),
+ ast::RecordExprField(it) => validate_numeric_name(it.name_ref(), &mut errors),
+ ast::Visibility(it) => validate_visibility(it, &mut errors),
+ ast::RangeExpr(it) => validate_range_expr(it, &mut errors),
+ ast::PathSegment(it) => validate_path_keywords(it, &mut errors),
+ ast::RefType(it) => validate_trait_object_ref_ty(it, &mut errors),
+ ast::PtrType(it) => validate_trait_object_ptr_ty(it, &mut errors),
+ ast::FnPtrType(it) => validate_trait_object_fn_ptr_ret_ty(it, &mut errors),
+ ast::MacroRules(it) => validate_macro_rules(it, &mut errors),
+ ast::LetExpr(it) => validate_let_expr(it, &mut errors),
+ _ => (),
+ }
+ }
+ }
+ errors
+}
+
+fn rustc_unescape_error_to_string(err: unescape::EscapeError) -> &'static str {
+ use unescape::EscapeError as EE;
+
+ #[rustfmt::skip]
+ let err_message = match err {
+ EE::ZeroChars => {
+ "Literal must not be empty"
+ }
+ EE::MoreThanOneChar => {
+ "Literal must be one character long"
+ }
+ EE::LoneSlash => {
+ "Character must be escaped: `\\`"
+ }
+ EE::InvalidEscape => {
+ "Invalid escape"
+ }
+ EE::BareCarriageReturn | EE::BareCarriageReturnInRawString => {
+ "Character must be escaped: `\r`"
+ }
+ EE::EscapeOnlyChar => {
+ "Escape character `\\` must be escaped itself"
+ }
+ EE::TooShortHexEscape => {
+ "ASCII hex escape code must have exactly two digits"
+ }
+ EE::InvalidCharInHexEscape => {
+ "ASCII hex escape code must contain only hex characters"
+ }
+ EE::OutOfRangeHexEscape => {
+ "ASCII hex escape code must be at most 0x7F"
+ }
+ EE::NoBraceInUnicodeEscape => {
+ "Missing `{` to begin the unicode escape"
+ }
+ EE::InvalidCharInUnicodeEscape => {
+ "Unicode escape must contain only hex characters and underscores"
+ }
+ EE::EmptyUnicodeEscape => {
+ "Unicode escape must not be empty"
+ }
+ EE::UnclosedUnicodeEscape => {
+ "Missing `}` to terminate the unicode escape"
+ }
+ EE::LeadingUnderscoreUnicodeEscape => {
+ "Unicode escape code must not begin with an underscore"
+ }
+ EE::OverlongUnicodeEscape => {
+ "Unicode escape code must have at most 6 digits"
+ }
+ EE::LoneSurrogateUnicodeEscape => {
+ "Unicode escape code must not be a surrogate"
+ }
+ EE::OutOfRangeUnicodeEscape => {
+ "Unicode escape code must be at most 0x10FFFF"
+ }
+ EE::UnicodeEscapeInByte => {
+ "Byte literals must not contain unicode escapes"
+ }
+ EE::NonAsciiCharInByte | EE::NonAsciiCharInByteString => {
+ "Byte literals must not contain non-ASCII characters"
+ }
+ };
+
+ err_message
+}
+
+fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
+ // FIXME: move this function to outer scope (https://github.com/rust-lang/rust-analyzer/pull/2834#discussion_r366196658)
+ fn unquote(text: &str, prefix_len: usize, end_delimiter: char) -> Option<&str> {
+ text.rfind(end_delimiter).and_then(|end| text.get(prefix_len..end))
+ }
+
+ let token = literal.token();
+ let text = token.text();
+
+ // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-lang/rust-analyzer/pull/2834#discussion_r366199205)
+ let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| {
+ let off = token.text_range().start() + TextSize::try_from(off + prefix_len).unwrap();
+ acc.push(SyntaxError::new_at_offset(rustc_unescape_error_to_string(err), off));
+ };
+
+ match literal.kind() {
+ ast::LiteralKind::String(s) => {
+ if !s.is_raw() {
+ if let Some(without_quotes) = unquote(text, 1, '"') {
+ unescape_literal(without_quotes, Mode::Str, &mut |range, char| {
+ if let Err(err) = char {
+ push_err(1, (range.start, err));
+ }
+ });
+ }
+ }
+ }
+ ast::LiteralKind::ByteString(s) => {
+ if !s.is_raw() {
+ if let Some(without_quotes) = unquote(text, 2, '"') {
+ unescape_byte_literal(without_quotes, Mode::ByteStr, &mut |range, char| {
+ if let Err(err) = char {
+ push_err(2, (range.start, err));
+ }
+ });
+ }
+ }
+ }
+ ast::LiteralKind::Char(_) => {
+ if let Some(Err(e)) = unquote(text, 1, '\'').map(unescape_char) {
+ push_err(1, e);
+ }
+ }
+ ast::LiteralKind::Byte(_) => {
+ if let Some(Err(e)) = unquote(text, 2, '\'').map(unescape_byte) {
+ push_err(2, e);
+ }
+ }
+ ast::LiteralKind::IntNumber(_)
+ | ast::LiteralKind::FloatNumber(_)
+ | ast::LiteralKind::Bool(_) => {}
+ }
+}
+
+pub(crate) fn validate_block_structure(root: &SyntaxNode) {
+ let mut stack = Vec::new();
+ for node in root.descendants_with_tokens() {
+ match node.kind() {
+ T!['{'] => stack.push(node),
+ T!['}'] => {
+ if let Some(pair) = stack.pop() {
+ assert_eq!(
+ node.parent(),
+ pair.parent(),
+ "\nunpaired curlys:\n{}\n{:#?}\n",
+ root.text(),
+ root,
+ );
+ assert!(
+ node.next_sibling_or_token().is_none()
+ && pair.prev_sibling_or_token().is_none(),
+ "\nfloating curlys at {:?}\nfile:\n{}\nerror:\n{}\n",
+ node,
+ root.text(),
+ node,
+ );
+ }
+ }
+ _ => (),
+ }
+ }
+}
+
+fn validate_numeric_name(name_ref: Option<ast::NameRef>, errors: &mut Vec<SyntaxError>) {
+ if let Some(int_token) = int_token(name_ref) {
+ if int_token.text().chars().any(|c| !c.is_digit(10)) {
+ errors.push(SyntaxError::new(
+ "Tuple (struct) field access is only allowed through \
+ decimal integers with no underscores or suffix",
+ int_token.text_range(),
+ ));
+ }
+ }
+
+ fn int_token(name_ref: Option<ast::NameRef>) -> Option<SyntaxToken> {
+ name_ref?.syntax().first_child_or_token()?.into_token().filter(|it| it.kind() == INT_NUMBER)
+ }
+}
+
+fn validate_visibility(vis: ast::Visibility, errors: &mut Vec<SyntaxError>) {
+ let path_without_in_token = vis.in_token().is_none()
+ && vis.path().and_then(|p| p.as_single_name_ref()).and_then(|n| n.ident_token()).is_some();
+ if path_without_in_token {
+ errors.push(SyntaxError::new("incorrect visibility restriction", vis.syntax.text_range()));
+ }
+ let parent = match vis.syntax().parent() {
+ Some(it) => it,
+ None => return,
+ };
+ match parent.kind() {
+ FN | CONST | TYPE_ALIAS => (),
+ _ => return,
+ }
+
+ let impl_def = match parent.parent().and_then(|it| it.parent()).and_then(ast::Impl::cast) {
+ Some(it) => it,
+ None => return,
+ };
+ // FIXME: disable validation if there's an attribute, since some proc macros use this syntax.
+ // ideally the validation would run only on the fully expanded code, then this wouldn't be necessary.
+ if impl_def.trait_().is_some() && impl_def.attrs().next().is_none() {
+ errors.push(SyntaxError::new("Unnecessary visibility qualifier", vis.syntax.text_range()));
+ }
+}
+
+fn validate_range_expr(expr: ast::RangeExpr, errors: &mut Vec<SyntaxError>) {
+ if expr.op_kind() == Some(ast::RangeOp::Inclusive) && expr.end().is_none() {
+ errors.push(SyntaxError::new(
+ "An inclusive range must have an end expression",
+ expr.syntax().text_range(),
+ ));
+ }
+}
+
+fn validate_path_keywords(segment: ast::PathSegment, errors: &mut Vec<SyntaxError>) {
+ let path = segment.parent_path();
+ let is_path_start = segment.coloncolon_token().is_none() && path.qualifier().is_none();
+
+ if let Some(token) = segment.self_token() {
+ if !is_path_start {
+ errors.push(SyntaxError::new(
+ "The `self` keyword is only allowed as the first segment of a path",
+ token.text_range(),
+ ));
+ }
+ } else if let Some(token) = segment.crate_token() {
+ if !is_path_start || use_prefix(path).is_some() {
+ errors.push(SyntaxError::new(
+ "The `crate` keyword is only allowed as the first segment of a path",
+ token.text_range(),
+ ));
+ }
+ }
+
+ fn use_prefix(mut path: ast::Path) -> Option<ast::Path> {
+ for node in path.syntax().ancestors().skip(1) {
+ match_ast! {
+ match node {
+ ast::UseTree(it) => if let Some(tree_path) = it.path() {
+ // Even a top-level path exists within a `UseTree` so we must explicitly
+ // allow our path but disallow anything else
+ if tree_path != path {
+ return Some(tree_path);
+ }
+ },
+ ast::UseTreeList(_) => continue,
+ ast::Path(parent) => path = parent,
+ _ => return None,
+ }
+ };
+ }
+ None
+ }
+}
+
+fn validate_trait_object_ref_ty(ty: ast::RefType, errors: &mut Vec<SyntaxError>) {
+ if let Some(ast::Type::DynTraitType(ty)) = ty.ty() {
+ if let Some(err) = validate_trait_object_ty(ty) {
+ errors.push(err);
+ }
+ }
+}
+
+fn validate_trait_object_ptr_ty(ty: ast::PtrType, errors: &mut Vec<SyntaxError>) {
+ if let Some(ast::Type::DynTraitType(ty)) = ty.ty() {
+ if let Some(err) = validate_trait_object_ty(ty) {
+ errors.push(err);
+ }
+ }
+}
+
+fn validate_trait_object_fn_ptr_ret_ty(ty: ast::FnPtrType, errors: &mut Vec<SyntaxError>) {
+ if let Some(ast::Type::DynTraitType(ty)) = ty.ret_type().and_then(|ty| ty.ty()) {
+ if let Some(err) = validate_trait_object_ty(ty) {
+ errors.push(err);
+ }
+ }
+}
+
+fn validate_trait_object_ty(ty: ast::DynTraitType) -> Option<SyntaxError> {
+ let tbl = ty.type_bound_list()?;
+
+ if tbl.bounds().count() > 1 {
+ let dyn_token = ty.dyn_token()?;
+ let potential_parenthesis =
+ algo::skip_trivia_token(dyn_token.prev_token()?, Direction::Prev)?;
+ let kind = potential_parenthesis.kind();
+ if !matches!(kind, T!['('] | T![<] | T![=]) {
+ return Some(SyntaxError::new("ambiguous `+` in a type", ty.syntax().text_range()));
+ }
+ }
+ None
+}
+
+fn validate_macro_rules(mac: ast::MacroRules, errors: &mut Vec<SyntaxError>) {
+ if let Some(vis) = mac.visibility() {
+ errors.push(SyntaxError::new(
+ "visibilities are not allowed on `macro_rules!` items",
+ vis.syntax().text_range(),
+ ));
+ }
+}
+
+fn validate_const(const_: ast::Const, errors: &mut Vec<SyntaxError>) {
+ if let Some(mut_token) = const_
+ .const_token()
+ .and_then(|t| t.next_token())
+ .and_then(|t| algo::skip_trivia_token(t, Direction::Next))
+ .filter(|t| t.kind() == T![mut])
+ {
+ errors.push(SyntaxError::new("const globals cannot be mutable", mut_token.text_range()));
+ }
+}
+
+fn validate_let_expr(let_: ast::LetExpr, errors: &mut Vec<SyntaxError>) {
+ let mut token = let_.syntax().clone();
+ loop {
+ token = match token.parent() {
+ Some(it) => it,
+ None => break,
+ };
+
+ if ast::ParenExpr::can_cast(token.kind()) {
+ continue;
+ } else if let Some(it) = ast::BinExpr::cast(token.clone()) {
+ if it.op_kind() == Some(ast::BinaryOp::LogicOp(ast::LogicOp::And)) {
+ continue;
+ }
+ } else if ast::IfExpr::can_cast(token.kind())
+ || ast::WhileExpr::can_cast(token.kind())
+ || ast::MatchGuard::can_cast(token.kind())
+ {
+ // It must be part of the condition since the expressions are inside a block.
+ return;
+ }
+
+ break;
+ }
+ errors.push(SyntaxError::new(
+ "`let` expressions are not supported here",
+ let_.syntax().text_range(),
+ ));
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/validation/block.rs b/src/tools/rust-analyzer/crates/syntax/src/validation/block.rs
new file mode 100644
index 000000000..8eb4a10a3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/validation/block.rs
@@ -0,0 +1,24 @@
+//! Logic for validating block expressions i.e. `ast::BlockExpr`.
+
+use crate::{
+ ast::{self, AstNode, HasAttrs},
+ SyntaxError,
+ SyntaxKind::*,
+};
+
+pub(crate) fn validate_block_expr(block: ast::BlockExpr, errors: &mut Vec<SyntaxError>) {
+ if let Some(parent) = block.syntax().parent() {
+ match parent.kind() {
+ FN | EXPR_STMT | STMT_LIST => return,
+ _ => {}
+ }
+ }
+ if let Some(stmt_list) = block.stmt_list() {
+ errors.extend(stmt_list.attrs().filter(|attr| attr.kind().is_inner()).map(|attr| {
+ SyntaxError::new(
+ "A block in this position cannot accept inner attributes",
+ attr.syntax().text_range(),
+ )
+ }));
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0000.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0000.rs
new file mode 100644
index 000000000..f977d23c4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0000.rs
@@ -0,0 +1,199 @@
+//! An experimental implementation of [Rust RFC#2256 lrs);
+ let root = SyntaxNode::new_owned(root);
+ validate_block_structure(root.borrowed());
+ File { root }
+ }
+ pub fn parse(text: &str) -> File {
+ let tokens = tokenize(&text);
+ let (green, errors) = parser_impl::parse_with::<syntax_node::GreenBuilder>(
+ text, &tokens, grammar::root,
+ );
+ File::new(green, errors)
+ }
+ pub fn reparse(&self, edit: &AtomTextEdit) -> File {
+ self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit))
+ }
+ pub fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option<File> {
+ let (node, reparser) = find_reparsable_node(self.syntax(), edit.delete)?;
+ let text = replace_range(
+ node.text().to_string(),
+ edit.delete - node.range().start(),
+ &edit.insert,
+ );
+ let tokens = tokenize(&text);
+ if !is_balanced(&tokens) {
+ return None;
+ }
+ let (green, new_errors) = parser_impl::parse_with::<syntax_node::GreenBuilder>(
+ &te2t, &tokens, reparser,
+ );
+ let green_root = node.replace_with(green);
+ let errors = merge_errors(self.errors(), new_errors, node, edit);
+ Some(File::new(green_root, errors))
+ }
+ fn full_reparse(&self, edit: &AtomTextEdit) -> File {
+ let text = replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert);
+ File::parse(&text)
+ }
+ pub fn ast(&self) -> ast::Root {
+ ast::Root::cast(self.syntax()).unwrap()
+ }
+ pub fn syntax(&self) -> SyntaxNodeRef {
+ self.root.brroowed()
+ }
+ mp_tree(root),
+ );
+ assert!(
+ node.next_sibling().is_none() && pair.prev_sibling().is_none(),
+ "\nfloating curlys at {:?}\nfile:\n{}\nerror:\n{}\n",
+ node,
+ root.text(),
+ node.text(),
+ );
+ }
+ }
+ _ => (),
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct AtomTextEdit {
+ pub delete: TextRange,
+ pub insert: String,
+}
+
+impl AtomTextEdit {
+ pub fn replace(range: TextRange, replace_with: String) -> AtomTextEdit {
+ AtomTextEdit { delete: range, insert: replace_with }
+ }
+
+ pub fn delete(range: TextRange) -> AtomTextEdit {
+ AtomTextEdit::replace(range, String::new())
+ }
+
+ pub fn insert(offset: TextUnit, text: String) -> AtomTextEdit {
+ AtomTextEdit::replace(TextRange::offset_len(offset, 0.into()), text)
+ }
+}
+
+fn find_reparsable_node(node: SyntaxNodeRef, range: TextRange) -> Option<(SyntaxNodeRef, fn(&mut Parser))> {
+ let node = algo::find_covering_node(node, range);
+ return algo::ancestors(node)
+ .filter_map(|node| reparser(node).map(|r| (node, r)))
+ .next();
+
+ fn reparser(node: SyntaxNodeRef) -> Option<fn(&mut Parser)> {
+ let res = match node.kind() {
+ BLOCK => grammar::block,
+ RECORD_FIELD_LIST => grammar::record_field_list,
+ _ => return None,
+ };
+ Some(res)
+ }
+}
+
+pub /*(meh)*/ fn replace_range(mut text: String, range: TextRange, replace_with: &str) -> String {
+ let start = u32::from(range.start()) as usize;
+ let end = u32::from(range.end()) as usize;
+ text.replace_range(start..end, replace_with);
+ text
+}
+
+fn is_balanced(tokens: &[Token]) -> bool {
+ if tokens.is_empty()
+ || tokens.first().unwrap().kind != L_CURLY
+ || tokens.last().unwrap().kind != R_CURLY {
+ return false
+ }
+ let mut balance = 0usize;
+ for t in tokens.iter() {
+ match t.kind {
+ L_CURLYt {
+ pub delete: TextRange,
+ pub insert: String,
+}
+
+impl AtomTextEdit {
+ pub fn replace(range: TextRange, replace_with: String) -> AtomTextEdit {
+ AtomTextEdit { delete: range, insert: replace_with }
+ }
+
+ pub fn delete(range: TextRange) -> AtomTextEdit {
+ AtomTextEdit::replace(range, String::new())
+ }
+
+ pub fn insert(offset: TextUnit, text: String) -> AtomTextEdit {
+ AtomTextEdit::replace(TextRange::offset_len(offset, 0.into()), text)
+ }
+}
+
+fn find_reparsable_node(node: SyntaxNodeRef, range: TextRange) -> Option<(SyntaxNodeRef, fn(&mut Parser))> {
+ let node = algo::find_covering_node(node, range);
+ return algo::ancestors(node)
+ .filter_map(|node| reparser(node).map(|r| (node, r)))
+ .next();
+
+ fn reparser(node: SyntaxNodeRef) -> Option<fn(&mut Parser)> {
+ let res = match node.kind() {
+ ;
+ let end = u32::from(range.end()) as usize;
+ text.replaT => grammar::record_field_list,
+ _ => return None,
+ };
+ Some(res)
+ }
+}
+
+pub /*(meh)*/ fn replace_range(mut text: String, range: TextRange, replace_with: &str) -> String {
+ let start = u32::from(range.start()) as usize;
+ let end = u32::from(range.end()) as usize;
+ text.replace_range(start..end, replace_with);
+ text
+}
+
+fn is_balanced(tokens: &[Token]) -> bool {
+ if tokens.is_empty()
+ || tokens.first().unwrap().kind != L_CURLY
+ || tokens.last().unwrap().kind != R_CURLY {
+ return false
+ }
+ let mut balance = 0usize;
+ for t in tokens.iter() {
+ match t.kind {
+ L_CURLY => balance += 1,
+ R_CURLY => balance = match balance.checked_sub(1) {
+ Some(b) => b,
+ None => return false,
+ },
+ _ => (),
+ }
+ }
+ balance == 0
+}
+
+fn merge_errors(
+ old_errors: Vec<SyntaxError>,
+ new_errors: Vec<SyntaxError>,
+ old_node: SyntaxNodeRef,
+ edit: &AtomTextEdit,
+) -> Vec<SyntaxError> {
+ let mut res = Vec::new();
+ for e in old_errors {
+ if e.offset < old_node.range().start() {
+ res.push(e)
+ } else if e.offset > old_node.range().end() {
+ res.push(SyntaxError {
+ msg: e.msg,
+ offset: e.offset + TextUnit::of_str(&edit.insert) - edit.delete.len(),
+ })
+ }
+ }
+ for e in new_errors {
+ res.push(SyntaxError {
+ msg: e.msg,
+ offset: e.offset + old_node.range().start(),
+ })
+ }
+ res
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0001.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0001.rs
new file mode 100644
index 000000000..f1148058e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0001.rs
@@ -0,0 +1,106 @@
+use syntax::{
+ File, TextRange, SyntaxNodeRef, TextUnit,
+ SyntaxKind::*,
+ algo::{find_leaf_at_offset, LeafAtOffset, find_covering_node, ancestors, Direction, siblings},
+};
+
+pub fn extend_selection(file: &File, range: TextRange) -> Option<TextRange> {
+ let syntax = file.syntax();
+ extend(syntax.borrowed(), range)
+}
+
+pub(crate) fn extend(root: SyntaxNodeRef, range: TextRange) -> Option<TextRange> {
+ if range.is_empty() {
+ let offset = range.start();
+ let mut leaves = find_leaf_at_offset(root, offset);
+ if leaves.clone().all(|it| it.kind() == WHITESPACE) {
+ return Some(extend_ws(root, leaves.next()?, offset));
+ }
+ let leaf = match leaves {
+ LeafAtOffset::None => return None,
+ LeafAtOffset::Single(l) => l,
+ LeafAtOffset::Between(l, r) => pick_best(l, r),
+ };
+ return Some(leaf.range());
+ };
+ let node = find_covering_node(root, range);
+ if node.kind() == COMMENT && range == node.range() {
+ if let Some(range) = extend_comments(node) {
+ return Some(range);
+ }
+ }
+
+ match ancestors(node).skip_while(|n| n.range() == range).next() {
+ None => None,
+ Some(parent) => Some(parent.range()),
+ }
+}
+
+fn extend_ws(root: SyntaxNodeRef, ws: SyntaxNodeRef, offset: TextUnit) -> TextRange {
+ let ws_text = ws.leaf_text().unwrap();
+ let suffix = TextRange::from_to(offset, ws.range().end()) - ws.range().start();
+ let prefix = TextRange::from_to(ws.range().start(), offset) - ws.range().start();
+ let ws_suffix = &ws_text.as_str()[suffix];
+ let ws_prefix = &ws_text.as_str()[prefix];
+ if ws_text.contains("\n") && !ws_suffix.contains("\n") {
+ if let Some(node) = ws.next_sibling() {
+ let start = match ws_prefix.rfind('\n') {
+ Some(idx) => ws.range().start() + TextUnit::from((idx + 1) as u32),
+ None => node.range().start()
+ };
+ let end = if root.text().char_at(node.range().end()) == Some('\n') {
+ node.range().end() + TextUnit::of_char('\n')
+ } else {
+ node.range().end()
+ };
+ return TextRange::from_to(start, end);
+ }
+ }
+ ws.range()
+}
+
+fn pick_best<'a>(l: SyntaxNodeRef<'a>, r: Syntd[axNodeRef<'a>) -> SyntaxNodeRef<'a> {
+ return if priority(r) > priority(l) { r } else { l };
+ fn priority(n: SyntaxNodeRef) -> usize {
+ match n.kind() {
+ WHITESPACE => 0,
+ IDENT | SELF_KW | SUPER_KW | CRATE_KW => 2,
+ _ => 1,
+ }
+ }
+}
+
+fn extend_comments(node: SyntaxNodeRef) -> Option<TextRange> {
+ let left = adj_com[ments(node, Direction::Backward);
+ let right = adj_comments(node, Direction::Forward);
+ if left != right {
+ Some(TextRange::from_to(
+ left.range().start(),
+ right.range().end(),
+ ))
+ } else {
+ None
+ }
+}
+
+fn adj_comments(node: SyntaxNodeRef, dir: Direction) -> SyntaxNodeRef {
+ let mut res = node;
+ for node in siblings(node, dir) {
+ match node.kind() {
+ COMMENT => res = node,
+ WHITESPACE if !node.leaf_text().unwrap().as_str().contains("\n\n") => (),
+ _ => break
+ }
+ }
+ res
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use test_utils::extract_offset;
+
+ fn do_check(before: &str, afters: &[&str]) {
+ let (cursor, before) = extract_offset(before);
+ let file = File::parse(&before);
+ let mut range = TextRange::of
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0002.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0002.rs
new file mode 100644
index 000000000..f35dc7289
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0002.rs
@@ -0,0 +1 @@
+!('\ \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0003.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0003.rs
new file mode 100644
index 000000000..0f59c4722
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0003.rs
@@ -0,0 +1 @@
+if'\xɿ \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0004.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0004.rs
new file mode 100644
index 000000000..003290f52
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0004.rs
@@ -0,0 +1 @@
+b"\xʿ \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rast b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rast
new file mode 100644
index 000000000..50057a02d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rast
@@ -0,0 +1,127 @@
+SOURCE_FILE@0..350
+ FN@0..349
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..8
+ IDENT@3..8 "block"
+ PARAM_LIST@8..10
+ L_PAREN@8..9 "("
+ R_PAREN@9..10 ")"
+ WHITESPACE@10..11 " "
+ BLOCK_EXPR@11..349
+ STMT_LIST@11..349
+ L_CURLY@11..12 "{"
+ WHITESPACE@12..17 "\n "
+ LET_STMT@17..129
+ LET_KW@17..20 "let"
+ WHITESPACE@20..21 " "
+ IDENT_PAT@21..26
+ NAME@21..26
+ IDENT@21..26 "inner"
+ WHITESPACE@26..27 " "
+ EQ@27..28 "="
+ WHITESPACE@28..29 " "
+ BLOCK_EXPR@29..128
+ STMT_LIST@29..128
+ L_CURLY@29..30 "{"
+ WHITESPACE@30..39 "\n "
+ ATTR@39..83
+ POUND@39..40 "#"
+ BANG@40..41 "!"
+ L_BRACK@41..42 "["
+ META@42..82
+ PATH@42..45
+ PATH_SEGMENT@42..45
+ NAME_REF@42..45
+ IDENT@42..45 "doc"
+ TOKEN_TREE@45..82
+ L_PAREN@45..46 "("
+ STRING@46..81 "\"Inner attributes not ..."
+ R_PAREN@81..82 ")"
+ R_BRACK@82..83 "]"
+ WHITESPACE@83..92 "\n "
+ COMMENT@92..122 "//! Nor are ModuleDoc ..."
+ WHITESPACE@122..127 "\n "
+ R_CURLY@127..128 "}"
+ SEMICOLON@128..129 ";"
+ WHITESPACE@129..134 "\n "
+ EXPR_STMT@134..257
+ IF_EXPR@134..257
+ IF_KW@134..136 "if"
+ WHITESPACE@136..137 " "
+ LITERAL@137..141
+ TRUE_KW@137..141 "true"
+ WHITESPACE@141..142 " "
+ BLOCK_EXPR@142..257
+ STMT_LIST@142..257
+ L_CURLY@142..143 "{"
+ WHITESPACE@143..152 "\n "
+ ATTR@152..171
+ POUND@152..153 "#"
+ BANG@153..154 "!"
+ L_BRACK@154..155 "["
+ META@155..170
+ PATH@155..158
+ PATH_SEGMENT@155..158
+ NAME_REF@155..158
+ IDENT@155..158 "doc"
+ TOKEN_TREE@158..170
+ L_PAREN@158..159 "("
+ STRING@159..169 "\"Nor here\""
+ R_PAREN@169..170 ")"
+ R_BRACK@170..171 "]"
+ WHITESPACE@171..180 "\n "
+ ATTR@180..212
+ POUND@180..181 "#"
+ BANG@181..182 "!"
+ L_BRACK@182..183 "["
+ META@183..211
+ PATH@183..186
+ PATH_SEGMENT@183..186
+ NAME_REF@183..186
+ IDENT@183..186 "doc"
+ TOKEN_TREE@186..211
+ L_PAREN@186..187 "("
+ STRING@187..210 "\"We error on each attr\""
+ R_PAREN@210..211 ")"
+ R_BRACK@211..212 "]"
+ WHITESPACE@212..221 "\n "
+ COMMENT@221..251 "//! Nor are ModuleDoc ..."
+ WHITESPACE@251..256 "\n "
+ R_CURLY@256..257 "}"
+ WHITESPACE@257..262 "\n "
+ WHILE_EXPR@262..347
+ WHILE_KW@262..267 "while"
+ WHITESPACE@267..268 " "
+ LITERAL@268..272
+ TRUE_KW@268..272 "true"
+ WHITESPACE@272..273 " "
+ BLOCK_EXPR@273..347
+ STMT_LIST@273..347
+ L_CURLY@273..274 "{"
+ WHITESPACE@274..283 "\n "
+ ATTR@283..302
+ POUND@283..284 "#"
+ BANG@284..285 "!"
+ L_BRACK@285..286 "["
+ META@286..301
+ PATH@286..289
+ PATH_SEGMENT@286..289
+ NAME_REF@286..289
+ IDENT@286..289 "doc"
+ TOKEN_TREE@289..301
+ L_PAREN@289..290 "("
+ STRING@290..300 "\"Nor here\""
+ R_PAREN@300..301 ")"
+ R_BRACK@301..302 "]"
+ WHITESPACE@302..311 "\n "
+ COMMENT@311..341 "//! Nor are ModuleDoc ..."
+ WHITESPACE@341..346 "\n "
+ R_CURLY@346..347 "}"
+ WHITESPACE@347..348 "\n"
+ R_CURLY@348..349 "}"
+ WHITESPACE@349..350 "\n"
+error 39..83: A block in this position cannot accept inner attributes
+error 152..171: A block in this position cannot accept inner attributes
+error 180..212: A block in this position cannot accept inner attributes
+error 283..302: A block in this position cannot accept inner attributes
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rs
new file mode 100644
index 000000000..6a04f2d0a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rs
@@ -0,0 +1,15 @@
+fn block() {
+ let inner = {
+ #![doc("Inner attributes not allowed here")]
+ //! Nor are ModuleDoc comments
+ };
+ if true {
+ #![doc("Nor here")]
+ #![doc("We error on each attr")]
+ //! Nor are ModuleDoc comments
+ }
+ while true {
+ #![doc("Nor here")]
+ //! Nor are ModuleDoc comments
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0037_visibility_in_traits.rast b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0037_visibility_in_traits.rast
new file mode 100644
index 000000000..90c258cd1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0037_visibility_in_traits.rast
@@ -0,0 +1,105 @@
+SOURCE_FILE@0..118
+ IMPL@0..117
+ IMPL_KW@0..4 "impl"
+ WHITESPACE@4..5 " "
+ PATH_TYPE@5..6
+ PATH@5..6
+ PATH_SEGMENT@5..6
+ NAME_REF@5..6
+ IDENT@5..6 "T"
+ WHITESPACE@6..7 " "
+ FOR_KW@7..10 "for"
+ WHITESPACE@10..11 " "
+ TUPLE_TYPE@11..13
+ L_PAREN@11..12 "("
+ R_PAREN@12..13 ")"
+ WHITESPACE@13..14 " "
+ ASSOC_ITEM_LIST@14..117
+ L_CURLY@14..15 "{"
+ WHITESPACE@15..20 "\n "
+ FN@20..31
+ FN_KW@20..22 "fn"
+ WHITESPACE@22..23 " "
+ NAME@23..26
+ IDENT@23..26 "foo"
+ PARAM_LIST@26..28
+ L_PAREN@26..27 "("
+ R_PAREN@27..28 ")"
+ WHITESPACE@28..29 " "
+ BLOCK_EXPR@29..31
+ STMT_LIST@29..31
+ L_CURLY@29..30 "{"
+ R_CURLY@30..31 "}"
+ WHITESPACE@31..36 "\n "
+ FN@36..51
+ VISIBILITY@36..39
+ PUB_KW@36..39 "pub"
+ WHITESPACE@39..40 " "
+ FN_KW@40..42 "fn"
+ WHITESPACE@42..43 " "
+ NAME@43..46
+ IDENT@43..46 "bar"
+ PARAM_LIST@46..48
+ L_PAREN@46..47 "("
+ R_PAREN@47..48 ")"
+ WHITESPACE@48..49 " "
+ BLOCK_EXPR@49..51
+ STMT_LIST@49..51
+ L_CURLY@49..50 "{"
+ R_CURLY@50..51 "}"
+ WHITESPACE@51..56 "\n "
+ TYPE_ALIAS@56..81
+ VISIBILITY@56..66
+ PUB_KW@56..59 "pub"
+ L_PAREN@59..60 "("
+ PATH@60..65
+ PATH_SEGMENT@60..65
+ NAME_REF@60..65
+ CRATE_KW@60..65 "crate"
+ R_PAREN@65..66 ")"
+ WHITESPACE@66..67 " "
+ TYPE_KW@67..71 "type"
+ WHITESPACE@71..72 " "
+ NAME@72..75
+ IDENT@72..75 "Baz"
+ WHITESPACE@75..76 " "
+ EQ@76..77 "="
+ WHITESPACE@77..78 " "
+ TUPLE_TYPE@78..80
+ L_PAREN@78..79 "("
+ R_PAREN@79..80 ")"
+ SEMICOLON@80..81 ";"
+ WHITESPACE@81..86 "\n "
+ CONST@86..115
+ VISIBILITY@86..96
+ PUB_KW@86..89 "pub"
+ L_PAREN@89..90 "("
+ PATH@90..95
+ PATH_SEGMENT@90..95
+ NAME_REF@90..95
+ CRATE_KW@90..95 "crate"
+ R_PAREN@95..96 ")"
+ WHITESPACE@96..97 " "
+ CONST_KW@97..102 "const"
+ WHITESPACE@102..103 " "
+ NAME@103..104
+ IDENT@103..104 "C"
+ COLON@104..105 ":"
+ WHITESPACE@105..106 " "
+ PATH_TYPE@106..109
+ PATH@106..109
+ PATH_SEGMENT@106..109
+ NAME_REF@106..109
+ IDENT@106..109 "i32"
+ WHITESPACE@109..110 " "
+ EQ@110..111 "="
+ WHITESPACE@111..112 " "
+ LITERAL@112..114
+ INT_NUMBER@112..114 "92"
+ SEMICOLON@114..115 ";"
+ WHITESPACE@115..116 "\n"
+ R_CURLY@116..117 "}"
+ WHITESPACE@117..118 "\n"
+error 36..39: Unnecessary visibility qualifier
+error 56..66: Unnecessary visibility qualifier
+error 86..96: Unnecessary visibility qualifier
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0037_visibility_in_traits.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0037_visibility_in_traits.rs
new file mode 100644
index 000000000..a43e7ef10
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0037_visibility_in_traits.rs
@@ -0,0 +1,6 @@
+impl T for () {
+ fn foo() {}
+ pub fn bar() {}
+ pub(crate) type Baz = ();
+ pub(crate) const C: i32 = 92;
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rast b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rast
new file mode 100644
index 000000000..fd302fb4d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE@0..33
+ FN@0..32
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..7
+ IDENT@3..7 "main"
+ PARAM_LIST@7..9
+ L_PAREN@7..8 "("
+ R_PAREN@8..9 ")"
+ WHITESPACE@9..10 " "
+ BLOCK_EXPR@10..32
+ STMT_LIST@10..32
+ L_CURLY@10..11 "{"
+ WHITESPACE@11..16 "\n "
+ EXPR_STMT@16..21
+ RANGE_EXPR@16..20
+ LITERAL@16..17
+ INT_NUMBER@16..17 "0"
+ DOT2EQ@17..20 "..="
+ SEMICOLON@20..21 ";"
+ WHITESPACE@21..26 "\n "
+ EXPR_STMT@26..30
+ RANGE_EXPR@26..29
+ DOT2EQ@26..29 "..="
+ SEMICOLON@29..30 ";"
+ WHITESPACE@30..31 "\n"
+ R_CURLY@31..32 "}"
+ WHITESPACE@32..33 "\n"
+error 16..20: An inclusive range must have an end expression
+error 26..29: An inclusive range must have an end expression
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rs
new file mode 100644
index 000000000..0b4ed7a2b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rs
@@ -0,0 +1,4 @@
+fn main() {
+ 0..=;
+ ..=;
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0040_illegal_crate_kw_location.rast b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0040_illegal_crate_kw_location.rast
new file mode 100644
index 000000000..7449b5ddf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0040_illegal_crate_kw_location.rast
@@ -0,0 +1,96 @@
+SOURCE_FILE@0..98
+ USE@0..12
+ USE_KW@0..3 "use"
+ WHITESPACE@3..4 " "
+ USE_TREE@4..11
+ PATH@4..11
+ PATH_SEGMENT@4..11
+ COLON2@4..6 "::"
+ NAME_REF@6..11
+ CRATE_KW@6..11 "crate"
+ SEMICOLON@11..12 ";"
+ WHITESPACE@12..13 "\n"
+ USE@13..54
+ USE_KW@13..16 "use"
+ WHITESPACE@16..17 " "
+ USE_TREE@17..53
+ USE_TREE_LIST@17..53
+ L_CURLY@17..18 "{"
+ USE_TREE@18..23
+ PATH@18..23
+ PATH_SEGMENT@18..23
+ NAME_REF@18..23
+ CRATE_KW@18..23 "crate"
+ COMMA@23..24 ","
+ WHITESPACE@24..25 " "
+ USE_TREE@25..52
+ PATH@25..28
+ PATH_SEGMENT@25..28
+ NAME_REF@25..28
+ IDENT@25..28 "foo"
+ COLON2@28..30 "::"
+ USE_TREE_LIST@30..52
+ L_CURLY@30..31 "{"
+ USE_TREE@31..51
+ PATH@31..51
+ PATH@31..46
+ PATH@31..41
+ PATH@31..36
+ PATH_SEGMENT@31..36
+ NAME_REF@31..36
+ CRATE_KW@31..36 "crate"
+ COLON2@36..38 "::"
+ PATH_SEGMENT@38..41
+ NAME_REF@38..41
+ IDENT@38..41 "foo"
+ COLON2@41..43 "::"
+ PATH_SEGMENT@43..46
+ NAME_REF@43..46
+ IDENT@43..46 "bar"
+ COLON2@46..48 "::"
+ PATH_SEGMENT@48..51
+ NAME_REF@48..51
+ IDENT@48..51 "baz"
+ R_CURLY@51..52 "}"
+ R_CURLY@52..53 "}"
+ SEMICOLON@53..54 ";"
+ WHITESPACE@54..55 "\n"
+ USE@55..72
+ USE_KW@55..58 "use"
+ WHITESPACE@58..59 " "
+ USE_TREE@59..71
+ PATH@59..71
+ PATH@59..64
+ PATH_SEGMENT@59..64
+ NAME_REF@59..64
+ IDENT@59..64 "hello"
+ COLON2@64..66 "::"
+ PATH_SEGMENT@66..71
+ NAME_REF@66..71
+ CRATE_KW@66..71 "crate"
+ SEMICOLON@71..72 ";"
+ WHITESPACE@72..73 "\n"
+ USE@73..97
+ USE_KW@73..76 "use"
+ WHITESPACE@76..77 " "
+ USE_TREE@77..96
+ PATH@77..96
+ PATH@77..89
+ PATH@77..82
+ PATH_SEGMENT@77..82
+ NAME_REF@77..82
+ IDENT@77..82 "hello"
+ COLON2@82..84 "::"
+ PATH_SEGMENT@84..89
+ NAME_REF@84..89
+ CRATE_KW@84..89 "crate"
+ COLON2@89..91 "::"
+ PATH_SEGMENT@91..96
+ NAME_REF@91..96
+ IDENT@91..96 "there"
+ SEMICOLON@96..97 ";"
+ WHITESPACE@97..98 "\n"
+error 6..11: The `crate` keyword is only allowed as the first segment of a path
+error 31..36: The `crate` keyword is only allowed as the first segment of a path
+error 66..71: The `crate` keyword is only allowed as the first segment of a path
+error 84..89: The `crate` keyword is only allowed as the first segment of a path
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0040_illegal_crate_kw_location.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0040_illegal_crate_kw_location.rs
new file mode 100644
index 000000000..508def2c7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0040_illegal_crate_kw_location.rs
@@ -0,0 +1,4 @@
+use ::crate;
+use {crate, foo::{crate::foo::bar::baz}};
+use hello::crate;
+use hello::crate::there;
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0041_illegal_self_keyword_location.rast b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0041_illegal_self_keyword_location.rast
new file mode 100644
index 000000000..01f601091
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0041_illegal_self_keyword_location.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE@0..25
+ USE@0..11
+ USE_KW@0..3 "use"
+ WHITESPACE@3..4 " "
+ USE_TREE@4..10
+ PATH@4..10
+ PATH_SEGMENT@4..10
+ COLON2@4..6 "::"
+ NAME_REF@6..10
+ SELF_KW@6..10 "self"
+ SEMICOLON@10..11 ";"
+ WHITESPACE@11..12 "\n"
+ USE@12..24
+ USE_KW@12..15 "use"
+ WHITESPACE@15..16 " "
+ USE_TREE@16..23
+ PATH@16..23
+ PATH@16..17
+ PATH_SEGMENT@16..17
+ NAME_REF@16..17
+ IDENT@16..17 "a"
+ COLON2@17..19 "::"
+ PATH_SEGMENT@19..23
+ NAME_REF@19..23
+ SELF_KW@19..23 "self"
+ SEMICOLON@23..24 ";"
+ WHITESPACE@24..25 "\n"
+error 6..10: The `self` keyword is only allowed as the first segment of a path
+error 19..23: The `self` keyword is only allowed as the first segment of a path
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0041_illegal_self_keyword_location.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0041_illegal_self_keyword_location.rs
new file mode 100644
index 000000000..b9e1d7d8b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0041_illegal_self_keyword_location.rs
@@ -0,0 +1,2 @@
+use ::self;
+use a::self;
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rast b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rast
new file mode 100644
index 000000000..d94daacdc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rast
@@ -0,0 +1,196 @@
+SOURCE_FILE@0..187
+ TYPE_ALIAS@0..35
+ TYPE_KW@0..4 "type"
+ WHITESPACE@4..5 " "
+ NAME@5..8
+ IDENT@5..8 "Foo"
+ GENERIC_PARAM_LIST@8..12
+ L_ANGLE@8..9 "<"
+ LIFETIME_PARAM@9..11
+ LIFETIME@9..11
+ LIFETIME_IDENT@9..11 "'a"
+ R_ANGLE@11..12 ">"
+ WHITESPACE@12..13 " "
+ EQ@13..14 "="
+ WHITESPACE@14..15 " "
+ REF_TYPE@15..34
+ AMP@15..16 "&"
+ LIFETIME@16..18
+ LIFETIME_IDENT@16..18 "'a"
+ WHITESPACE@18..19 " "
+ DYN_TRAIT_TYPE@19..34
+ DYN_KW@19..22 "dyn"
+ WHITESPACE@22..23 " "
+ TYPE_BOUND_LIST@23..34
+ TYPE_BOUND@23..27
+ PATH_TYPE@23..27
+ PATH@23..27
+ PATH_SEGMENT@23..27
+ NAME_REF@23..27
+ IDENT@23..27 "Send"
+ WHITESPACE@27..28 " "
+ PLUS@28..29 "+"
+ WHITESPACE@29..30 " "
+ TYPE_BOUND@30..34
+ PATH_TYPE@30..34
+ PATH@30..34
+ PATH_SEGMENT@30..34
+ NAME_REF@30..34
+ IDENT@30..34 "Sync"
+ SEMICOLON@34..35 ";"
+ WHITESPACE@35..36 "\n"
+ TYPE_ALIAS@36..70
+ TYPE_KW@36..40 "type"
+ WHITESPACE@40..41 " "
+ NAME@41..44
+ IDENT@41..44 "Foo"
+ WHITESPACE@44..45 " "
+ EQ@45..46 "="
+ WHITESPACE@46..47 " "
+ PTR_TYPE@47..69
+ STAR@47..48 "*"
+ CONST_KW@48..53 "const"
+ WHITESPACE@53..54 " "
+ DYN_TRAIT_TYPE@54..69
+ DYN_KW@54..57 "dyn"
+ WHITESPACE@57..58 " "
+ TYPE_BOUND_LIST@58..69
+ TYPE_BOUND@58..62
+ PATH_TYPE@58..62
+ PATH@58..62
+ PATH_SEGMENT@58..62
+ NAME_REF@58..62
+ IDENT@58..62 "Send"
+ WHITESPACE@62..63 " "
+ PLUS@63..64 "+"
+ WHITESPACE@64..65 " "
+ TYPE_BOUND@65..69
+ PATH_TYPE@65..69
+ PATH@65..69
+ PATH_SEGMENT@65..69
+ NAME_REF@65..69
+ IDENT@65..69 "Sync"
+ SEMICOLON@69..70 ";"
+ WHITESPACE@70..71 "\n"
+ TYPE_ALIAS@71..109
+ TYPE_KW@71..75 "type"
+ WHITESPACE@75..76 " "
+ NAME@76..79
+ IDENT@76..79 "Foo"
+ WHITESPACE@79..80 " "
+ EQ@80..81 "="
+ WHITESPACE@81..82 " "
+ FN_PTR_TYPE@82..108
+ FN_KW@82..84 "fn"
+ PARAM_LIST@84..86
+ L_PAREN@84..85 "("
+ R_PAREN@85..86 ")"
+ WHITESPACE@86..87 " "
+ RET_TYPE@87..108
+ THIN_ARROW@87..89 "->"
+ WHITESPACE@89..90 " "
+ DYN_TRAIT_TYPE@90..108
+ DYN_KW@90..93 "dyn"
+ WHITESPACE@93..94 " "
+ TYPE_BOUND_LIST@94..108
+ TYPE_BOUND@94..98
+ PATH_TYPE@94..98
+ PATH@94..98
+ PATH_SEGMENT@94..98
+ NAME_REF@94..98
+ IDENT@94..98 "Send"
+ WHITESPACE@98..99 " "
+ PLUS@99..100 "+"
+ WHITESPACE@100..101 " "
+ TYPE_BOUND@101..108
+ LIFETIME@101..108
+ LIFETIME_IDENT@101..108 "'static"
+ SEMICOLON@108..109 ";"
+ WHITESPACE@109..110 "\n"
+ FN@110..186
+ FN_KW@110..112 "fn"
+ WHITESPACE@112..113 " "
+ NAME@113..117
+ IDENT@113..117 "main"
+ PARAM_LIST@117..119
+ L_PAREN@117..118 "("
+ R_PAREN@118..119 ")"
+ WHITESPACE@119..120 " "
+ BLOCK_EXPR@120..186
+ STMT_LIST@120..186
+ L_CURLY@120..121 "{"
+ WHITESPACE@121..126 "\n "
+ LET_STMT@126..184
+ LET_KW@126..129 "let"
+ WHITESPACE@129..130 " "
+ IDENT_PAT@130..131
+ NAME@130..131
+ IDENT@130..131 "b"
+ WHITESPACE@131..132 " "
+ EQ@132..133 "="
+ WHITESPACE@133..134 " "
+ CAST_EXPR@134..183
+ PAREN_EXPR@134..138
+ L_PAREN@134..135 "("
+ REF_EXPR@135..137
+ AMP@135..136 "&"
+ PATH_EXPR@136..137
+ PATH@136..137
+ PATH_SEGMENT@136..137
+ NAME_REF@136..137
+ IDENT@136..137 "a"
+ R_PAREN@137..138 ")"
+ WHITESPACE@138..139 " "
+ AS_KW@139..141 "as"
+ WHITESPACE@141..142 " "
+ REF_TYPE@142..183
+ AMP@142..143 "&"
+ DYN_TRAIT_TYPE@143..183
+ DYN_KW@143..146 "dyn"
+ WHITESPACE@146..147 " "
+ TYPE_BOUND_LIST@147..183
+ TYPE_BOUND@147..175
+ PATH_TYPE@147..175
+ PATH@147..175
+ PATH_SEGMENT@147..175
+ NAME_REF@147..150
+ IDENT@147..150 "Add"
+ GENERIC_ARG_LIST@150..175
+ L_ANGLE@150..151 "<"
+ TYPE_ARG@151..156
+ PATH_TYPE@151..156
+ PATH@151..156
+ PATH_SEGMENT@151..156
+ NAME_REF@151..156
+ IDENT@151..156 "Other"
+ COMMA@156..157 ","
+ WHITESPACE@157..158 " "
+ ASSOC_TYPE_ARG@158..174
+ NAME_REF@158..164
+ IDENT@158..164 "Output"
+ WHITESPACE@164..165 " "
+ EQ@165..166 "="
+ WHITESPACE@166..167 " "
+ PATH_TYPE@167..174
+ PATH@167..174
+ PATH_SEGMENT@167..174
+ NAME_REF@167..174
+ IDENT@167..174 "Addable"
+ R_ANGLE@174..175 ">"
+ WHITESPACE@175..176 " "
+ PLUS@176..177 "+"
+ WHITESPACE@177..178 " "
+ TYPE_BOUND@178..183
+ PATH_TYPE@178..183
+ PATH@178..183
+ PATH_SEGMENT@178..183
+ NAME_REF@178..183
+ IDENT@178..183 "Other"
+ SEMICOLON@183..184 ";"
+ WHITESPACE@184..185 "\n"
+ R_CURLY@185..186 "}"
+ WHITESPACE@186..187 "\n"
+error 19..34: ambiguous `+` in a type
+error 54..69: ambiguous `+` in a type
+error 90..108: ambiguous `+` in a type
+error 143..183: ambiguous `+` in a type
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rs
new file mode 100644
index 000000000..0a5958f25
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rs
@@ -0,0 +1,6 @@
+type Foo<'a> = &'a dyn Send + Sync;
+type Foo = *const dyn Send + Sync;
+type Foo = fn() -> dyn Send + 'static;
+fn main() {
+ let b = (&a) as &dyn Add<Other, Output = Addable> + Other;
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rast b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rast
new file mode 100644
index 000000000..c7eb312c9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rast
@@ -0,0 +1,22 @@
+SOURCE_FILE@0..24
+ CONST@0..23
+ CONST_KW@0..5 "const"
+ WHITESPACE@5..6 " "
+ MUT_KW@6..9 "mut"
+ WHITESPACE@9..10 " "
+ NAME@10..13
+ IDENT@10..13 "FOO"
+ COLON@13..14 ":"
+ WHITESPACE@14..15 " "
+ TUPLE_TYPE@15..17
+ L_PAREN@15..16 "("
+ R_PAREN@16..17 ")"
+ WHITESPACE@17..18 " "
+ EQ@18..19 "="
+ WHITESPACE@19..20 " "
+ TUPLE_EXPR@20..22
+ L_PAREN@20..21 "("
+ R_PAREN@21..22 ")"
+ SEMICOLON@22..23 ";"
+ WHITESPACE@23..24 "\n"
+error 6..9: const globals cannot be mutable
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rs
new file mode 100644
index 000000000..ccab6bccf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rs
@@ -0,0 +1 @@
+const mut FOO: () = ();
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/invalid_let_expr.rast b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/invalid_let_expr.rast
new file mode 100644
index 000000000..9e1e48864
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/invalid_let_expr.rast
@@ -0,0 +1,216 @@
+SOURCE_FILE@0..282
+ FN@0..281
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..6
+ IDENT@3..6 "foo"
+ PARAM_LIST@6..8
+ L_PAREN@6..7 "("
+ R_PAREN@7..8 ")"
+ WHITESPACE@8..9 " "
+ BLOCK_EXPR@9..281
+ STMT_LIST@9..281
+ L_CURLY@9..10 "{"
+ WHITESPACE@10..15 "\n "
+ CONST@15..42
+ CONST_KW@15..20 "const"
+ WHITESPACE@20..21 " "
+ UNDERSCORE@21..22 "_"
+ COLON@22..23 ":"
+ WHITESPACE@23..24 " "
+ TUPLE_TYPE@24..26
+ L_PAREN@24..25 "("
+ R_PAREN@25..26 ")"
+ WHITESPACE@26..27 " "
+ EQ@27..28 "="
+ WHITESPACE@28..29 " "
+ LET_EXPR@29..41
+ LET_KW@29..32 "let"
+ WHITESPACE@32..33 " "
+ WILDCARD_PAT@33..34
+ UNDERSCORE@33..34 "_"
+ WHITESPACE@34..35 " "
+ EQ@35..36 "="
+ WHITESPACE@36..37 " "
+ PATH_EXPR@37..41
+ PATH@37..41
+ PATH_SEGMENT@37..41
+ NAME_REF@37..41
+ IDENT@37..41 "None"
+ SEMICOLON@41..42 ";"
+ WHITESPACE@42..48 "\n\n "
+ LET_STMT@48..83
+ LET_KW@48..51 "let"
+ WHITESPACE@51..52 " "
+ WILDCARD_PAT@52..53
+ UNDERSCORE@52..53 "_"
+ WHITESPACE@53..54 " "
+ EQ@54..55 "="
+ WHITESPACE@55..56 " "
+ IF_EXPR@56..82
+ IF_KW@56..58 "if"
+ WHITESPACE@58..59 " "
+ LITERAL@59..63
+ TRUE_KW@59..63 "true"
+ WHITESPACE@63..64 " "
+ BLOCK_EXPR@64..82
+ STMT_LIST@64..82
+ L_CURLY@64..65 "{"
+ WHITESPACE@65..66 " "
+ PAREN_EXPR@66..80
+ L_PAREN@66..67 "("
+ LET_EXPR@67..79
+ LET_KW@67..70 "let"
+ WHITESPACE@70..71 " "
+ WILDCARD_PAT@71..72
+ UNDERSCORE@71..72 "_"
+ WHITESPACE@72..73 " "
+ EQ@73..74 "="
+ WHITESPACE@74..75 " "
+ PATH_EXPR@75..79
+ PATH@75..79
+ PATH_SEGMENT@75..79
+ NAME_REF@75..79
+ IDENT@75..79 "None"
+ R_PAREN@79..80 ")"
+ WHITESPACE@80..81 " "
+ R_CURLY@81..82 "}"
+ SEMICOLON@82..83 ";"
+ WHITESPACE@83..89 "\n\n "
+ IF_EXPR@89..279
+ IF_KW@89..91 "if"
+ WHITESPACE@91..92 " "
+ BIN_EXPR@92..114
+ LITERAL@92..96
+ TRUE_KW@92..96 "true"
+ WHITESPACE@96..97 " "
+ AMP2@97..99 "&&"
+ WHITESPACE@99..100 " "
+ PAREN_EXPR@100..114
+ L_PAREN@100..101 "("
+ LET_EXPR@101..113
+ LET_KW@101..104 "let"
+ WHITESPACE@104..105 " "
+ WILDCARD_PAT@105..106
+ UNDERSCORE@105..106 "_"
+ WHITESPACE@106..107 " "
+ EQ@107..108 "="
+ WHITESPACE@108..109 " "
+ PATH_EXPR@109..113
+ PATH@109..113
+ PATH_SEGMENT@109..113
+ NAME_REF@109..113
+ IDENT@109..113 "None"
+ R_PAREN@113..114 ")"
+ WHITESPACE@114..115 " "
+ BLOCK_EXPR@115..279
+ STMT_LIST@115..279
+ L_CURLY@115..116 "{"
+ WHITESPACE@116..125 "\n "
+ EXPR_STMT@125..140
+ PAREN_EXPR@125..139
+ L_PAREN@125..126 "("
+ LET_EXPR@126..138
+ LET_KW@126..129 "let"
+ WHITESPACE@129..130 " "
+ WILDCARD_PAT@130..131
+ UNDERSCORE@130..131 "_"
+ WHITESPACE@131..132 " "
+ EQ@132..133 "="
+ WHITESPACE@133..134 " "
+ PATH_EXPR@134..138
+ PATH@134..138
+ PATH_SEGMENT@134..138
+ NAME_REF@134..138
+ IDENT@134..138 "None"
+ R_PAREN@138..139 ")"
+ SEMICOLON@139..140 ";"
+ WHITESPACE@140..149 "\n "
+ WHILE_EXPR@149..273
+ WHILE_KW@149..154 "while"
+ WHITESPACE@154..155 " "
+ LET_EXPR@155..167
+ LET_KW@155..158 "let"
+ WHITESPACE@158..159 " "
+ WILDCARD_PAT@159..160
+ UNDERSCORE@159..160 "_"
+ WHITESPACE@160..161 " "
+ EQ@161..162 "="
+ WHITESPACE@162..163 " "
+ PATH_EXPR@163..167
+ PATH@163..167
+ PATH_SEGMENT@163..167
+ NAME_REF@163..167
+ IDENT@163..167 "None"
+ WHITESPACE@167..168 " "
+ BLOCK_EXPR@168..273
+ STMT_LIST@168..273
+ L_CURLY@168..169 "{"
+ WHITESPACE@169..182 "\n "
+ MATCH_EXPR@182..263
+ MATCH_KW@182..187 "match"
+ WHITESPACE@187..188 " "
+ PATH_EXPR@188..192
+ PATH@188..192
+ PATH_SEGMENT@188..192
+ NAME_REF@188..192
+ IDENT@188..192 "None"
+ WHITESPACE@192..193 " "
+ MATCH_ARM_LIST@193..263
+ L_CURLY@193..194 "{"
+ WHITESPACE@194..211 "\n "
+ MATCH_ARM@211..249
+ WILDCARD_PAT@211..212
+ UNDERSCORE@211..212 "_"
+ WHITESPACE@212..213 " "
+ MATCH_GUARD@213..228
+ IF_KW@213..215 "if"
+ WHITESPACE@215..216 " "
+ LET_EXPR@216..228
+ LET_KW@216..219 "let"
+ WHITESPACE@219..220 " "
+ WILDCARD_PAT@220..221
+ UNDERSCORE@220..221 "_"
+ WHITESPACE@221..222 " "
+ EQ@222..223 "="
+ WHITESPACE@223..224 " "
+ PATH_EXPR@224..228
+ PATH@224..228
+ PATH_SEGMENT@224..228
+ NAME_REF@224..228
+ IDENT@224..228 "None"
+ WHITESPACE@228..229 " "
+ FAT_ARROW@229..231 "=>"
+ WHITESPACE@231..232 " "
+ BLOCK_EXPR@232..249
+ STMT_LIST@232..249
+ L_CURLY@232..233 "{"
+ WHITESPACE@233..234 " "
+ LET_STMT@234..247
+ LET_KW@234..237 "let"
+ WHITESPACE@237..238 " "
+ WILDCARD_PAT@238..239
+ UNDERSCORE@238..239 "_"
+ WHITESPACE@239..240 " "
+ EQ@240..241 "="
+ WHITESPACE@241..242 " "
+ PATH_EXPR@242..246
+ PATH@242..246
+ PATH_SEGMENT@242..246
+ NAME_REF@242..246
+ IDENT@242..246 "None"
+ SEMICOLON@246..247 ";"
+ WHITESPACE@247..248 " "
+ R_CURLY@248..249 "}"
+ WHITESPACE@249..262 "\n "
+ R_CURLY@262..263 "}"
+ WHITESPACE@263..272 "\n "
+ R_CURLY@272..273 "}"
+ WHITESPACE@273..278 "\n "
+ R_CURLY@278..279 "}"
+ WHITESPACE@279..280 "\n"
+ R_CURLY@280..281 "}"
+ WHITESPACE@281..282 "\n"
+error 29..41: `let` expressions are not supported here
+error 67..79: `let` expressions are not supported here
+error 126..138: `let` expressions are not supported here
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/invalid_let_expr.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/invalid_let_expr.rs
new file mode 100644
index 000000000..1515ae533
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/invalid_let_expr.rs
@@ -0,0 +1,14 @@
+fn foo() {
+ const _: () = let _ = None;
+
+ let _ = if true { (let _ = None) };
+
+ if true && (let _ = None) {
+ (let _ = None);
+ while let _ = None {
+ match None {
+ _ if let _ = None => { let _ = None; }
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0000.rs b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0000.rs
new file mode 100644
index 000000000..388eb74ed
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0000.rs
@@ -0,0 +1,6 @@
+0
+1
+
+
+
+0 \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0001.rs b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0001.rs
new file mode 100644
index 000000000..d2d42c6f9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0001.rs
@@ -0,0 +1,4 @@
+0
+1
+
+bb" \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0002.rs b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0002.rs
new file mode 100644
index 000000000..3fbee1548
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0002.rs
@@ -0,0 +1,4 @@
+1
+1
+
+""! \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0003.rs b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0003.rs
new file mode 100644
index 000000000..d2757cd08
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0003.rs
Binary files differ
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0004.rs b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0004.rs
new file mode 100644
index 000000000..481617a70
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0004.rs
@@ -0,0 +1,4 @@
+0
+0
+}
+{; \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0005.rs b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0005.rs
new file mode 100644
index 000000000..074d761c7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0005.rs
@@ -0,0 +1,7 @@
+05
+1
+
+
+
+b'
+ \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/test-utils/Cargo.toml b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml
new file mode 100644
index 000000000..cceafe04e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml
@@ -0,0 +1,19 @@
+[package]
+name = "test-utils"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+# Avoid adding deps here, this crate is widely used in tests it should compile fast!
+dissimilar = "1.0.4"
+text-size = "1.1.0"
+rustc-hash = "1.1.0"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/assert_linear.rs b/src/tools/rust-analyzer/crates/test-utils/src/assert_linear.rs
new file mode 100644
index 000000000..24502ddb4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/test-utils/src/assert_linear.rs
@@ -0,0 +1,112 @@
+//! Checks that a set of measurements looks like a linear function rather than
+//! like a quadratic function. Algorithm:
+//!
+//! 1. Linearly scale input to be in [0; 1)
+//! 2. Using linear regression, compute the best linear function approximating
+//! the input.
+//! 3. Compute RMSE and maximal absolute error.
+//! 4. Check that errors are within tolerances and that the constant term is not
+//! too negative.
+//!
+//! Ideally, we should use a proper "model selection" to directly compare
+//! quadratic and linear models, but that sounds rather complicated:
+//!
+//! https://stats.stackexchange.com/questions/21844/selecting-best-model-based-on-linear-quadratic-and-cubic-fit-of-data
+//!
+//! We might get false positives on a VM, but never false negatives. So, if the
+//! first round fails, we repeat the ordeal three more times and fail only if
+//! every time there's a fault.
+use stdx::format_to;
+
+#[derive(Default)]
+pub struct AssertLinear {
+ rounds: Vec<Round>,
+}
+
+#[derive(Default)]
+struct Round {
+ samples: Vec<(f64, f64)>,
+ plot: String,
+ linear: bool,
+}
+
+impl AssertLinear {
+ pub fn next_round(&mut self) -> bool {
+ if let Some(round) = self.rounds.last_mut() {
+ round.finish();
+ }
+ if self.rounds.iter().any(|it| it.linear) || self.rounds.len() == 4 {
+ return false;
+ }
+ self.rounds.push(Round::default());
+ true
+ }
+
+ pub fn sample(&mut self, x: f64, y: f64) {
+ self.rounds.last_mut().unwrap().samples.push((x, y));
+ }
+}
+
+impl Drop for AssertLinear {
+ fn drop(&mut self) {
+ assert!(!self.rounds.is_empty());
+ if self.rounds.iter().all(|it| !it.linear) {
+ for round in &self.rounds {
+ eprintln!("\n{}", round.plot);
+ }
+ panic!("Doesn't look linear!");
+ }
+ }
+}
+
+impl Round {
+ fn finish(&mut self) {
+ let (mut xs, mut ys): (Vec<_>, Vec<_>) = self.samples.iter().copied().unzip();
+ normalize(&mut xs);
+ normalize(&mut ys);
+ let xy = xs.iter().copied().zip(ys.iter().copied());
+
+ // Linear regression: finding a and b to fit y = a + b*x.
+
+ let mean_x = mean(&xs);
+ let mean_y = mean(&ys);
+
+ let b = {
+ let mut num = 0.0;
+ let mut denom = 0.0;
+ for (x, y) in xy.clone() {
+ num += (x - mean_x) * (y - mean_y);
+ denom += (x - mean_x).powi(2);
+ }
+ num / denom
+ };
+
+ let a = mean_y - b * mean_x;
+
+ self.plot = format!("y_pred = {:.3} + {:.3} * x\n\nx y y_pred\n", a, b);
+
+ let mut se = 0.0;
+ let mut max_error = 0.0f64;
+ for (x, y) in xy {
+ let y_pred = a + b * x;
+ se += (y - y_pred).powi(2);
+ max_error = max_error.max((y_pred - y).abs());
+
+ format_to!(self.plot, "{:.3} {:.3} {:.3}\n", x, y, y_pred);
+ }
+
+ let rmse = (se / xs.len() as f64).sqrt();
+ format_to!(self.plot, "\nrmse = {:.3} max error = {:.3}", rmse, max_error);
+
+ self.linear = rmse < 0.05 && max_error < 0.1 && a > -0.1;
+
+ fn normalize(xs: &mut Vec<f64>) {
+ let max = xs.iter().copied().max_by(|a, b| a.partial_cmp(b).unwrap()).unwrap();
+ xs.iter_mut().for_each(|it| *it /= max);
+ }
+
+ fn mean(xs: &[f64]) -> f64 {
+ xs.iter().copied().sum::<f64>() / (xs.len() as f64)
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/bench_fixture.rs b/src/tools/rust-analyzer/crates/test-utils/src/bench_fixture.rs
new file mode 100644
index 000000000..979156263
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/test-utils/src/bench_fixture.rs
@@ -0,0 +1,45 @@
+//! Generates large snippets of Rust code for usage in the benchmarks.
+
+use std::fs;
+
+use stdx::format_to;
+
+use crate::project_root;
+
+pub fn big_struct() -> String {
+ let n = 1_000;
+ big_struct_n(n)
+}
+
+pub fn big_struct_n(n: u32) -> String {
+ let mut buf = "pub struct RegisterBlock {".to_string();
+ for i in 0..n {
+ format_to!(buf, " /// Doc comment for {}.\n", i);
+ format_to!(buf, " pub s{}: S{},\n", i, i);
+ }
+ buf.push_str("}\n\n");
+ for i in 0..n {
+ format_to!(
+ buf,
+ "
+
+#[repr(transparent)]
+struct S{} {{
+ field: u32,
+}}",
+ i
+ );
+ }
+
+ buf
+}
+
+pub fn glorious_old_parser() -> String {
+ let path = project_root().join("bench_data/glorious_old_parser");
+ fs::read_to_string(&path).unwrap()
+}
+
+pub fn numerous_macro_rules() -> String {
+ let path = project_root().join("bench_data/numerous_macro_rules");
+ fs::read_to_string(&path).unwrap()
+}
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs b/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs
new file mode 100644
index 000000000..8c806e792
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs
@@ -0,0 +1,409 @@
+//! Defines `Fixture` -- a convenient way to describe the initial state of
+//! rust-analyzer database from a single string.
+//!
+//! Fixtures are strings containing rust source code with optional metadata.
+//! A fixture without metadata is parsed into a single source file.
+//! Use this to test functionality local to one file.
+//!
+//! Simple Example:
+//! ```
+//! r#"
+//! fn main() {
+//! println!("Hello World")
+//! }
+//! "#
+//! ```
+//!
+//! Metadata can be added to a fixture after a `//-` comment.
+//! The basic form is specifying filenames,
+//! which is also how to define multiple files in a single test fixture
+//!
+//! Example using two files in the same crate:
+//! ```
+//! "
+//! //- /main.rs
+//! mod foo;
+//! fn main() {
+//! foo::bar();
+//! }
+//!
+//! //- /foo.rs
+//! pub fn bar() {}
+//! "
+//! ```
+//!
+//! Example using two crates with one file each, with one crate depending on the other:
+//! ```
+//! r#"
+//! //- /main.rs crate:a deps:b
+//! fn main() {
+//! b::foo();
+//! }
+//! //- /lib.rs crate:b
+//! pub fn b() {
+//! println!("Hello World")
+//! }
+//! "#
+//! ```
+//!
+//! Metadata allows specifying all settings and variables
+//! that are available in a real rust project:
+//! - crate names via `crate:cratename`
+//! - dependencies via `deps:dep1,dep2`
+//! - configuration settings via `cfg:dbg=false,opt_level=2`
+//! - environment variables via `env:PATH=/bin,RUST_LOG=debug`
+//!
+//! Example using all available metadata:
+//! ```
+//! "
+//! //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo
+//! fn insert_source_code_here() {}
+//! "
+//! ```
+
+use rustc_hash::FxHashMap;
+use stdx::trim_indent;
+
+#[derive(Debug, Eq, PartialEq)]
+pub struct Fixture {
+ pub path: String,
+ pub text: String,
+ pub krate: Option<String>,
+ pub deps: Vec<String>,
+ pub extern_prelude: Option<Vec<String>>,
+ pub cfg_atoms: Vec<String>,
+ pub cfg_key_values: Vec<(String, String)>,
+ pub edition: Option<String>,
+ pub env: FxHashMap<String, String>,
+ pub introduce_new_source_root: Option<String>,
+}
+
+pub struct MiniCore {
+ activated_flags: Vec<String>,
+ valid_flags: Vec<String>,
+}
+
+impl Fixture {
+ /// Parses text which looks like this:
+ ///
+ /// ```not_rust
+ /// //- some meta
+ /// line 1
+ /// line 2
+ /// //- other meta
+ /// ```
+ ///
+ /// Fixture can also start with a proc_macros and minicore declaration(in that order):
+ ///
+ /// ```
+ /// //- proc_macros: identity
+ /// //- minicore: sized
+ /// ```
+ ///
+ /// That will include predefined proc macros and a subset of `libcore` into the fixture, see
+ /// `minicore.rs` for what's available.
+ pub fn parse(ra_fixture: &str) -> (Option<MiniCore>, Vec<String>, Vec<Fixture>) {
+ let fixture = trim_indent(ra_fixture);
+ let mut fixture = fixture.as_str();
+ let mut mini_core = None;
+ let mut res: Vec<Fixture> = Vec::new();
+ let mut test_proc_macros = vec![];
+
+ if fixture.starts_with("//- proc_macros:") {
+ let first_line = fixture.split_inclusive('\n').next().unwrap();
+ test_proc_macros = first_line
+ .strip_prefix("//- proc_macros:")
+ .unwrap()
+ .split(',')
+ .map(|it| it.trim().to_string())
+ .collect();
+ fixture = &fixture[first_line.len()..];
+ }
+
+ if fixture.starts_with("//- minicore:") {
+ let first_line = fixture.split_inclusive('\n').next().unwrap();
+ mini_core = Some(MiniCore::parse(first_line));
+ fixture = &fixture[first_line.len()..];
+ }
+
+ let default = if fixture.contains("//-") { None } else { Some("//- /main.rs") };
+
+ for (ix, line) in default.into_iter().chain(fixture.split_inclusive('\n')).enumerate() {
+ if line.contains("//-") {
+ assert!(
+ line.starts_with("//-"),
+ "Metadata line {} has invalid indentation. \
+ All metadata lines need to have the same indentation.\n\
+ The offending line: {:?}",
+ ix,
+ line
+ );
+ }
+
+ if line.starts_with("//-") {
+ let meta = Fixture::parse_meta_line(line);
+ res.push(meta);
+ } else {
+ if line.starts_with("// ")
+ && line.contains(':')
+ && !line.contains("::")
+ && !line.contains('.')
+ && line.chars().all(|it| !it.is_uppercase())
+ {
+ panic!("looks like invalid metadata line: {:?}", line);
+ }
+
+ if let Some(entry) = res.last_mut() {
+ entry.text.push_str(line);
+ }
+ }
+ }
+
+ (mini_core, test_proc_macros, res)
+ }
+
+ //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo
+ fn parse_meta_line(meta: &str) -> Fixture {
+ assert!(meta.starts_with("//-"));
+ let meta = meta["//-".len()..].trim();
+ let components = meta.split_ascii_whitespace().collect::<Vec<_>>();
+
+ let path = components[0].to_string();
+ assert!(path.starts_with('/'), "fixture path does not start with `/`: {:?}", path);
+
+ let mut krate = None;
+ let mut deps = Vec::new();
+ let mut extern_prelude = None;
+ let mut edition = None;
+ let mut cfg_atoms = Vec::new();
+ let mut cfg_key_values = Vec::new();
+ let mut env = FxHashMap::default();
+ let mut introduce_new_source_root = None;
+ for component in components[1..].iter() {
+ let (key, value) = component
+ .split_once(':')
+ .unwrap_or_else(|| panic!("invalid meta line: {:?}", meta));
+ match key {
+ "crate" => krate = Some(value.to_string()),
+ "deps" => deps = value.split(',').map(|it| it.to_string()).collect(),
+ "extern-prelude" => {
+ if value.is_empty() {
+ extern_prelude = Some(Vec::new());
+ } else {
+ extern_prelude =
+ Some(value.split(',').map(|it| it.to_string()).collect::<Vec<_>>());
+ }
+ }
+ "edition" => edition = Some(value.to_string()),
+ "cfg" => {
+ for entry in value.split(',') {
+ match entry.split_once('=') {
+ Some((k, v)) => cfg_key_values.push((k.to_string(), v.to_string())),
+ None => cfg_atoms.push(entry.to_string()),
+ }
+ }
+ }
+ "env" => {
+ for key in value.split(',') {
+ if let Some((k, v)) = key.split_once('=') {
+ env.insert(k.into(), v.into());
+ }
+ }
+ }
+ "new_source_root" => introduce_new_source_root = Some(value.to_string()),
+ _ => panic!("bad component: {:?}", component),
+ }
+ }
+
+ for prelude_dep in extern_prelude.iter().flatten() {
+ assert!(
+ deps.contains(prelude_dep),
+ "extern-prelude {:?} must be a subset of deps {:?}",
+ extern_prelude,
+ deps
+ );
+ }
+
+ Fixture {
+ path,
+ text: String::new(),
+ krate,
+ deps,
+ extern_prelude,
+ cfg_atoms,
+ cfg_key_values,
+ edition,
+ env,
+ introduce_new_source_root,
+ }
+ }
+}
+
+impl MiniCore {
+ fn has_flag(&self, flag: &str) -> bool {
+ self.activated_flags.iter().any(|it| it == flag)
+ }
+
+ #[track_caller]
+ fn assert_valid_flag(&self, flag: &str) {
+ if !self.valid_flags.iter().any(|it| it == flag) {
+ panic!("invalid flag: {:?}, valid flags: {:?}", flag, self.valid_flags);
+ }
+ }
+
+ fn parse(line: &str) -> MiniCore {
+ let mut res = MiniCore { activated_flags: Vec::new(), valid_flags: Vec::new() };
+
+ let line = line.strip_prefix("//- minicore:").unwrap().trim();
+ for entry in line.split(", ") {
+ if res.has_flag(entry) {
+ panic!("duplicate minicore flag: {:?}", entry);
+ }
+ res.activated_flags.push(entry.to_string());
+ }
+
+ res
+ }
+
+ /// Strips parts of minicore.rs which are flagged by inactive flags.
+ ///
+ /// This is probably over-engineered to support flags dependencies.
+ pub fn source_code(mut self) -> String {
+ let mut buf = String::new();
+ let raw_mini_core = include_str!("./minicore.rs");
+ let mut lines = raw_mini_core.split_inclusive('\n');
+
+ let mut parsing_flags = false;
+ let mut implications = Vec::new();
+
+ // Parse `//!` preamble and extract flags and dependencies.
+ for line in lines.by_ref() {
+ let line = match line.strip_prefix("//!") {
+ Some(it) => it,
+ None => {
+ assert!(line.trim().is_empty());
+ break;
+ }
+ };
+
+ if parsing_flags {
+ let (flag, deps) = line.split_once(':').unwrap();
+ let flag = flag.trim();
+ self.valid_flags.push(flag.to_string());
+ for dep in deps.split(", ") {
+ let dep = dep.trim();
+ if !dep.is_empty() {
+ self.assert_valid_flag(dep);
+ implications.push((flag, dep));
+ }
+ }
+ }
+
+ if line.contains("Available flags:") {
+ parsing_flags = true;
+ }
+ }
+
+ for flag in &self.activated_flags {
+ self.assert_valid_flag(flag);
+ }
+
+ // Fixed point loop to compute transitive closure of flags.
+ loop {
+ let mut changed = false;
+ for &(u, v) in &implications {
+ if self.has_flag(u) && !self.has_flag(v) {
+ self.activated_flags.push(v.to_string());
+ changed = true;
+ }
+ }
+ if !changed {
+ break;
+ }
+ }
+
+ let mut active_regions = Vec::new();
+ let mut seen_regions = Vec::new();
+ for line in lines {
+ let trimmed = line.trim();
+ if let Some(region) = trimmed.strip_prefix("// region:") {
+ active_regions.push(region);
+ continue;
+ }
+ if let Some(region) = trimmed.strip_prefix("// endregion:") {
+ let prev = active_regions.pop().unwrap();
+ assert_eq!(prev, region);
+ continue;
+ }
+
+ let mut line_region = false;
+ if let Some(idx) = trimmed.find("// :") {
+ line_region = true;
+ active_regions.push(&trimmed[idx + "// :".len()..]);
+ }
+
+ let mut keep = true;
+ for &region in &active_regions {
+ assert!(
+ !region.starts_with(' '),
+ "region marker starts with a space: {:?}",
+ region
+ );
+ self.assert_valid_flag(region);
+ seen_regions.push(region);
+ keep &= self.has_flag(region);
+ }
+
+ if keep {
+ buf.push_str(line);
+ }
+ if line_region {
+ active_regions.pop().unwrap();
+ }
+ }
+
+ for flag in &self.valid_flags {
+ if !seen_regions.iter().any(|it| it == flag) {
+ panic!("unused minicore flag: {:?}", flag);
+ }
+ }
+ buf
+ }
+}
+
+#[test]
+#[should_panic]
+fn parse_fixture_checks_further_indented_metadata() {
+ Fixture::parse(
+ r"
+ //- /lib.rs
+ mod bar;
+
+ fn foo() {}
+ //- /bar.rs
+ pub fn baz() {}
+ ",
+ );
+}
+
+#[test]
+fn parse_fixture_gets_full_meta() {
+ let (mini_core, proc_macros, parsed) = Fixture::parse(
+ r#"
+//- proc_macros: identity
+//- minicore: coerce_unsized
+//- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b,atom env:OUTDIR=path/to,OTHER=foo
+mod m;
+"#,
+ );
+ assert_eq!(proc_macros, vec!["identity".to_string()]);
+ assert_eq!(mini_core.unwrap().activated_flags, vec!["coerce_unsized".to_string()]);
+ assert_eq!(1, parsed.len());
+
+ let meta = &parsed[0];
+ assert_eq!("mod m;\n", meta.text);
+
+ assert_eq!("foo", meta.krate.as_ref().unwrap());
+ assert_eq!("/lib.rs", meta.path);
+ assert_eq!(2, meta.env.len());
+}
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/lib.rs b/src/tools/rust-analyzer/crates/test-utils/src/lib.rs
new file mode 100644
index 000000000..8a9cfb6c2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/test-utils/src/lib.rs
@@ -0,0 +1,500 @@
+//! Assorted testing utilities.
+//!
+//! Most notable things are:
+//!
+//! * Rich text comparison, which outputs a diff.
+//! * Extracting markup (mainly, `$0` markers) out of fixture strings.
+//! * marks (see the eponymous module).
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod assert_linear;
+pub mod bench_fixture;
+mod fixture;
+
+use std::{
+ collections::BTreeMap,
+ env, fs,
+ path::{Path, PathBuf},
+};
+
+use profile::StopWatch;
+use stdx::is_ci;
+use text_size::{TextRange, TextSize};
+
+pub use dissimilar::diff as __diff;
+pub use rustc_hash::FxHashMap;
+
+pub use crate::{
+ assert_linear::AssertLinear,
+ fixture::{Fixture, MiniCore},
+};
+
+pub const CURSOR_MARKER: &str = "$0";
+pub const ESCAPED_CURSOR_MARKER: &str = "\\$0";
+
+/// Asserts that two strings are equal, otherwise displays a rich diff between them.
+///
+/// The diff shows changes from the "original" left string to the "actual" right string.
+///
+/// All arguments starting from and including the 3rd one are passed to
+/// `eprintln!()` macro in case of text inequality.
+#[macro_export]
+macro_rules! assert_eq_text {
+ ($left:expr, $right:expr) => {
+ assert_eq_text!($left, $right,)
+ };
+ ($left:expr, $right:expr, $($tt:tt)*) => {{
+ let left = $left;
+ let right = $right;
+ if left != right {
+ if left.trim() == right.trim() {
+ std::eprintln!("Left:\n{:?}\n\nRight:\n{:?}\n\nWhitespace difference\n", left, right);
+ } else {
+ let diff = $crate::__diff(left, right);
+ std::eprintln!("Left:\n{}\n\nRight:\n{}\n\nDiff:\n{}\n", left, right, $crate::format_diff(diff));
+ }
+ std::eprintln!($($tt)*);
+ panic!("text differs");
+ }
+ }};
+}
+
+/// Infallible version of `try_extract_offset()`.
+pub fn extract_offset(text: &str) -> (TextSize, String) {
+ match try_extract_offset(text) {
+ None => panic!("text should contain cursor marker"),
+ Some(result) => result,
+ }
+}
+
+/// Returns the offset of the first occurrence of `$0` marker and the copy of `text`
+/// without the marker.
+fn try_extract_offset(text: &str) -> Option<(TextSize, String)> {
+ let cursor_pos = text.find(CURSOR_MARKER)?;
+ let mut new_text = String::with_capacity(text.len() - CURSOR_MARKER.len());
+ new_text.push_str(&text[..cursor_pos]);
+ new_text.push_str(&text[cursor_pos + CURSOR_MARKER.len()..]);
+ let cursor_pos = TextSize::from(cursor_pos as u32);
+ Some((cursor_pos, new_text))
+}
+
+/// Infallible version of `try_extract_range()`.
+pub fn extract_range(text: &str) -> (TextRange, String) {
+ match try_extract_range(text) {
+ None => panic!("text should contain cursor marker"),
+ Some(result) => result,
+ }
+}
+
+/// Returns `TextRange` between the first two markers `$0...$0` and the copy
+/// of `text` without both of these markers.
+fn try_extract_range(text: &str) -> Option<(TextRange, String)> {
+ let (start, text) = try_extract_offset(text)?;
+ let (end, text) = try_extract_offset(&text)?;
+ Some((TextRange::new(start, end), text))
+}
+
+#[derive(Clone, Copy)]
+pub enum RangeOrOffset {
+ Range(TextRange),
+ Offset(TextSize),
+}
+
+impl RangeOrOffset {
+ pub fn expect_offset(self) -> TextSize {
+ match self {
+ RangeOrOffset::Offset(it) => it,
+ RangeOrOffset::Range(_) => panic!("expected an offset but got a range instead"),
+ }
+ }
+ pub fn expect_range(self) -> TextRange {
+ match self {
+ RangeOrOffset::Range(it) => it,
+ RangeOrOffset::Offset(_) => panic!("expected a range but got an offset"),
+ }
+ }
+ pub fn range_or_empty(self) -> TextRange {
+ match self {
+ RangeOrOffset::Range(range) => range,
+ RangeOrOffset::Offset(offset) => TextRange::empty(offset),
+ }
+ }
+}
+
+impl From<RangeOrOffset> for TextRange {
+ fn from(selection: RangeOrOffset) -> Self {
+ match selection {
+ RangeOrOffset::Range(it) => it,
+ RangeOrOffset::Offset(it) => TextRange::empty(it),
+ }
+ }
+}
+
+/// Extracts `TextRange` or `TextSize` depending on the amount of `$0` markers
+/// found in `text`.
+///
+/// # Panics
+/// Panics if no `$0` marker is present in the `text`.
+pub fn extract_range_or_offset(text: &str) -> (RangeOrOffset, String) {
+ if let Some((range, text)) = try_extract_range(text) {
+ return (RangeOrOffset::Range(range), text);
+ }
+ let (offset, text) = extract_offset(text);
+ (RangeOrOffset::Offset(offset), text)
+}
+
+/// Extracts ranges, marked with `<tag> </tag>` pairs from the `text`
+pub fn extract_tags(mut text: &str, tag: &str) -> (Vec<(TextRange, Option<String>)>, String) {
+ let open = format!("<{}", tag);
+ let close = format!("</{}>", tag);
+ let mut ranges = Vec::new();
+ let mut res = String::new();
+ let mut stack = Vec::new();
+ loop {
+ match text.find('<') {
+ None => {
+ res.push_str(text);
+ break;
+ }
+ Some(i) => {
+ res.push_str(&text[..i]);
+ text = &text[i..];
+ if text.starts_with(&open) {
+ let close_open = text.find('>').unwrap();
+ let attr = text[open.len()..close_open].trim();
+ let attr = if attr.is_empty() { None } else { Some(attr.to_string()) };
+ text = &text[close_open + '>'.len_utf8()..];
+ let from = TextSize::of(&res);
+ stack.push((from, attr));
+ } else if text.starts_with(&close) {
+ text = &text[close.len()..];
+ let (from, attr) =
+ stack.pop().unwrap_or_else(|| panic!("unmatched </{}>", tag));
+ let to = TextSize::of(&res);
+ ranges.push((TextRange::new(from, to), attr));
+ } else {
+ res.push('<');
+ text = &text['<'.len_utf8()..];
+ }
+ }
+ }
+ }
+ assert!(stack.is_empty(), "unmatched <{}>", tag);
+ ranges.sort_by_key(|r| (r.0.start(), r.0.end()));
+ (ranges, res)
+}
+#[test]
+fn test_extract_tags() {
+ let (tags, text) = extract_tags(r#"<tag fn>fn <tag>main</tag>() {}</tag>"#, "tag");
+ let actual = tags.into_iter().map(|(range, attr)| (&text[range], attr)).collect::<Vec<_>>();
+ assert_eq!(actual, vec![("fn main() {}", Some("fn".into())), ("main", None),]);
+}
+
+/// Inserts `$0` marker into the `text` at `offset`.
+pub fn add_cursor(text: &str, offset: TextSize) -> String {
+ let offset: usize = offset.into();
+ let mut res = String::new();
+ res.push_str(&text[..offset]);
+ res.push_str("$0");
+ res.push_str(&text[offset..]);
+ res
+}
+
+/// Extracts `//^^^ some text` annotations.
+///
+/// A run of `^^^` can be arbitrary long and points to the corresponding range
+/// in the line above.
+///
+/// The `// ^file text` syntax can be used to attach `text` to the entirety of
+/// the file.
+///
+/// Multiline string values are supported:
+///
+/// // ^^^ first line
+/// // | second line
+///
+/// Trailing whitespace is sometimes desired but usually stripped by the editor
+/// if at the end of a line, or incorrectly sized if followed by another
+/// annotation. In those cases the annotation can be explicitly ended with the
+/// `$` character.
+///
+/// // ^^^ trailing-ws-wanted $
+///
+/// Annotations point to the last line that actually was long enough for the
+/// range, not counting annotations themselves. So overlapping annotations are
+/// possible:
+/// ```no_run
+/// // stuff other stuff
+/// // ^^ 'st'
+/// // ^^^^^ 'stuff'
+/// // ^^^^^^^^^^^ 'other stuff'
+/// ```
+pub fn extract_annotations(text: &str) -> Vec<(TextRange, String)> {
+ let mut res = Vec::new();
+ // map from line length to beginning of last line that had that length
+ let mut line_start_map = BTreeMap::new();
+ let mut line_start: TextSize = 0.into();
+ let mut prev_line_annotations: Vec<(TextSize, usize)> = Vec::new();
+ for line in text.split_inclusive('\n') {
+ let mut this_line_annotations = Vec::new();
+ let line_length = if let Some((prefix, suffix)) = line.split_once("//") {
+ let ss_len = TextSize::of("//");
+ let annotation_offset = TextSize::of(prefix) + ss_len;
+ for annotation in extract_line_annotations(suffix.trim_end_matches('\n')) {
+ match annotation {
+ LineAnnotation::Annotation { mut range, content, file } => {
+ range += annotation_offset;
+ this_line_annotations.push((range.end(), res.len()));
+ let range = if file {
+ TextRange::up_to(TextSize::of(text))
+ } else {
+ let line_start = line_start_map.range(range.end()..).next().unwrap();
+
+ range + line_start.1
+ };
+ res.push((range, content));
+ }
+ LineAnnotation::Continuation { mut offset, content } => {
+ offset += annotation_offset;
+ let &(_, idx) = prev_line_annotations
+ .iter()
+ .find(|&&(off, _idx)| off == offset)
+ .unwrap();
+ res[idx].1.push('\n');
+ res[idx].1.push_str(&content);
+ res[idx].1.push('\n');
+ }
+ }
+ }
+ annotation_offset
+ } else {
+ TextSize::of(line)
+ };
+
+ line_start_map = line_start_map.split_off(&line_length);
+ line_start_map.insert(line_length, line_start);
+
+ line_start += TextSize::of(line);
+
+ prev_line_annotations = this_line_annotations;
+ }
+
+ res
+}
+
+enum LineAnnotation {
+ Annotation { range: TextRange, content: String, file: bool },
+ Continuation { offset: TextSize, content: String },
+}
+
+fn extract_line_annotations(mut line: &str) -> Vec<LineAnnotation> {
+ let mut res = Vec::new();
+ let mut offset: TextSize = 0.into();
+ let marker: fn(char) -> bool = if line.contains('^') { |c| c == '^' } else { |c| c == '|' };
+ while let Some(idx) = line.find(marker) {
+ offset += TextSize::try_from(idx).unwrap();
+ line = &line[idx..];
+
+ let mut len = line.chars().take_while(|&it| it == '^').count();
+ let mut continuation = false;
+ if len == 0 {
+ assert!(line.starts_with('|'));
+ continuation = true;
+ len = 1;
+ }
+ let range = TextRange::at(offset, len.try_into().unwrap());
+ let line_no_caret = &line[len..];
+ let end_marker = line_no_caret.find(|c| c == '$');
+ let next = line_no_caret.find(marker).map_or(line.len(), |it| it + len);
+
+ let cond = |end_marker| {
+ end_marker < next
+ && (line_no_caret[end_marker + 1..].is_empty()
+ || line_no_caret[end_marker + 1..]
+ .strip_prefix(|c: char| c.is_whitespace() || c == '^')
+ .is_some())
+ };
+ let mut content = match end_marker {
+ Some(end_marker) if cond(end_marker) => &line_no_caret[..end_marker],
+ _ => line_no_caret[..next - len].trim_end(),
+ };
+
+ let mut file = false;
+ if !continuation && content.starts_with("file") {
+ file = true;
+ content = &content["file".len()..];
+ }
+
+ let content = content.trim_start().to_string();
+
+ let annotation = if continuation {
+ LineAnnotation::Continuation { offset: range.end(), content }
+ } else {
+ LineAnnotation::Annotation { range, content, file }
+ };
+ res.push(annotation);
+
+ line = &line[next..];
+ offset += TextSize::try_from(next).unwrap();
+ }
+
+ res
+}
+
+#[test]
+fn test_extract_annotations_1() {
+ let text = stdx::trim_indent(
+ r#"
+fn main() {
+ let (x, y) = (9, 2);
+ //^ def ^ def
+ zoo + 1
+} //^^^ type:
+ // | i32
+
+// ^file
+ "#,
+ );
+ let res = extract_annotations(&text)
+ .into_iter()
+ .map(|(range, ann)| (&text[range], ann))
+ .collect::<Vec<_>>();
+
+ assert_eq!(
+ res[..3],
+ [("x", "def".into()), ("y", "def".into()), ("zoo", "type:\ni32\n".into())]
+ );
+ assert_eq!(res[3].0.len(), 115);
+}
+
+#[test]
+fn test_extract_annotations_2() {
+ let text = stdx::trim_indent(
+ r#"
+fn main() {
+ (x, y);
+ //^ a
+ // ^ b
+ //^^^^^^^^ c
+}"#,
+ );
+ let res = extract_annotations(&text)
+ .into_iter()
+ .map(|(range, ann)| (&text[range], ann))
+ .collect::<Vec<_>>();
+
+ assert_eq!(res, [("x", "a".into()), ("y", "b".into()), ("(x, y)", "c".into())]);
+}
+
+/// Returns `false` if slow tests should not run, otherwise returns `true` and
+/// also creates a file at `./target/.slow_tests_cookie` which serves as a flag
+/// that slow tests did run.
+pub fn skip_slow_tests() -> bool {
+ let should_skip = (std::env::var("CI").is_err() && std::env::var("RUN_SLOW_TESTS").is_err())
+ || std::env::var("SKIP_SLOW_TESTS").is_ok();
+ if should_skip {
+ eprintln!("ignoring slow test");
+ } else {
+ let path = project_root().join("./target/.slow_tests_cookie");
+ fs::write(&path, ".").unwrap();
+ }
+ should_skip
+}
+
+/// Returns the path to the root directory of `rust-analyzer` project.
+pub fn project_root() -> PathBuf {
+ let dir = env!("CARGO_MANIFEST_DIR");
+ PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned()
+}
+
+pub fn format_diff(chunks: Vec<dissimilar::Chunk<'_>>) -> String {
+ let mut buf = String::new();
+ for chunk in chunks {
+ let formatted = match chunk {
+ dissimilar::Chunk::Equal(text) => text.into(),
+ dissimilar::Chunk::Delete(text) => format!("\x1b[41m{}\x1b[0m", text),
+ dissimilar::Chunk::Insert(text) => format!("\x1b[42m{}\x1b[0m", text),
+ };
+ buf.push_str(&formatted);
+ }
+ buf
+}
+
+/// Utility for writing benchmark tests.
+///
+/// A benchmark test looks like this:
+///
+/// ```
+/// #[test]
+/// fn benchmark_foo() {
+/// if skip_slow_tests() { return; }
+///
+/// let data = bench_fixture::some_fixture();
+/// let analysis = some_setup();
+///
+/// let hash = {
+/// let _b = bench("foo");
+/// actual_work(analysis)
+/// };
+/// assert_eq!(hash, 92);
+/// }
+/// ```
+///
+/// * We skip benchmarks by default, to save time.
+/// Ideal benchmark time is 800 -- 1500 ms in debug.
+/// * We don't count preparation as part of the benchmark
+/// * The benchmark itself returns some kind of numeric hash.
+/// The hash is used as a sanity check that some code is actually run.
+/// Otherwise, it's too easy to win the benchmark by just doing nothing.
+pub fn bench(label: &'static str) -> impl Drop {
+ struct Bencher {
+ sw: StopWatch,
+ label: &'static str,
+ }
+
+ impl Drop for Bencher {
+ fn drop(&mut self) {
+ eprintln!("{}: {}", self.label, self.sw.elapsed());
+ }
+ }
+
+ Bencher { sw: StopWatch::start(), label }
+}
+
+/// Checks that the `file` has the specified `contents`. If that is not the
+/// case, updates the file and then fails the test.
+#[track_caller]
+pub fn ensure_file_contents(file: &Path, contents: &str) {
+ if let Err(()) = try_ensure_file_contents(file, contents) {
+ panic!("Some files were not up-to-date");
+ }
+}
+
+/// Checks that the `file` has the specified `contents`. If that is not the
+/// case, updates the file and return an Error.
+pub fn try_ensure_file_contents(file: &Path, contents: &str) -> Result<(), ()> {
+ match std::fs::read_to_string(file) {
+ Ok(old_contents) if normalize_newlines(&old_contents) == normalize_newlines(contents) => {
+ return Ok(());
+ }
+ _ => (),
+ }
+ let display_path = file.strip_prefix(&project_root()).unwrap_or(file);
+ eprintln!(
+ "\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n",
+ display_path.display()
+ );
+ if is_ci() {
+ eprintln!(" NOTE: run `cargo test` locally and commit the updated files\n");
+ }
+ if let Some(parent) = file.parent() {
+ let _ = std::fs::create_dir_all(parent);
+ }
+ std::fs::write(file, contents).unwrap();
+ Err(())
+}
+
+fn normalize_newlines(s: &str) -> String {
+ s.replace("\r\n", "\n")
+}
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
new file mode 100644
index 000000000..f48d1ec66
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
@@ -0,0 +1,669 @@
+//! This is a fixture we use for tests that need lang items.
+//!
+//! We want to include the minimal subset of core for each test, so this file
+//! supports "conditional compilation". Tests use the following syntax to include minicore:
+//!
+//! //- minicore: flag1, flag2
+//!
+//! We then strip all the code marked with other flags.
+//!
+//! Available flags:
+//! sized:
+//! unsize: sized
+//! coerce_unsized: unsize
+//! slice:
+//! range:
+//! deref: sized
+//! deref_mut: deref
+//! index: sized
+//! fn:
+//! try:
+//! pin:
+//! future: pin
+//! option:
+//! result:
+//! iterator: option
+//! iterators: iterator, fn
+//! default: sized
+//! hash:
+//! clone: sized
+//! copy: clone
+//! from: sized
+//! eq: sized
+//! ord: eq, option
+//! derive:
+//! fmt: result
+//! bool_impl: option, fn
+//! add:
+//! as_ref: sized
+//! drop:
+
+pub mod marker {
+ // region:sized
+ #[lang = "sized"]
+ #[fundamental]
+ #[rustc_specialization_trait]
+ pub trait Sized {}
+ // endregion:sized
+
+ // region:unsize
+ #[lang = "unsize"]
+ pub trait Unsize<T: ?Sized> {}
+ // endregion:unsize
+
+ // region:copy
+ #[lang = "copy"]
+ pub trait Copy: Clone {}
+ // region:derive
+ #[rustc_builtin_macro]
+ pub macro Copy($item:item) {}
+ // endregion:derive
+
+ mod copy_impls {
+ use super::Copy;
+
+ macro_rules! impl_copy {
+ ($($t:ty)*) => {
+ $(
+ impl Copy for $t {}
+ )*
+ }
+ }
+
+ impl_copy! {
+ usize u8 u16 u32 u64 u128
+ isize i8 i16 i32 i64 i128
+ f32 f64
+ bool char
+ }
+
+ impl<T: ?Sized> Copy for *const T {}
+ impl<T: ?Sized> Copy for *mut T {}
+ impl<T: ?Sized> Copy for &T {}
+ }
+ // endregion:copy
+}
+
+// region:default
+pub mod default {
+ pub trait Default: Sized {
+ fn default() -> Self;
+ }
+ // region:derive
+ #[rustc_builtin_macro]
+ pub macro Default($item:item) {}
+ // endregion:derive
+}
+// endregion:default
+
+// region:hash
+pub mod hash {
+ pub trait Hasher {}
+
+ pub trait Hash {
+ fn hash<H: Hasher>(&self, state: &mut H);
+ }
+}
+// endregion:hash
+
+// region:clone
+pub mod clone {
+ #[lang = "clone"]
+ pub trait Clone: Sized {
+ fn clone(&self) -> Self;
+ }
+ // region:derive
+ #[rustc_builtin_macro]
+ pub macro Clone($item:item) {}
+ // endregion:derive
+}
+// endregion:clone
+
+pub mod convert {
+ // region:from
+ pub trait From<T>: Sized {
+ fn from(_: T) -> Self;
+ }
+ pub trait Into<T>: Sized {
+ fn into(self) -> T;
+ }
+
+ impl<T, U> Into<U> for T
+ where
+ U: From<T>,
+ {
+ fn into(self) -> U {
+ U::from(self)
+ }
+ }
+
+ impl<T> From<T> for T {
+ fn from(t: T) -> T {
+ t
+ }
+ }
+ // endregion:from
+
+ // region:as_ref
+ pub trait AsRef<T: ?Sized> {
+ fn as_ref(&self) -> &T;
+ }
+ // endregion:as_ref
+}
+
+pub mod ops {
+ // region:coerce_unsized
+ mod unsize {
+ use crate::marker::Unsize;
+
+ #[lang = "coerce_unsized"]
+ pub trait CoerceUnsized<T: ?Sized> {}
+
+ impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a mut U> for &'a mut T {}
+ impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b mut T {}
+ impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for &'a mut T {}
+ impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for &'a mut T {}
+
+ impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
+ impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for &'a T {}
+
+ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
+ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *mut T {}
+ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *const T {}
+ }
+ pub use self::unsize::CoerceUnsized;
+ // endregion:coerce_unsized
+
+ // region:deref
+ mod deref {
+ #[lang = "deref"]
+ pub trait Deref {
+ #[lang = "deref_target"]
+ type Target: ?Sized;
+ fn deref(&self) -> &Self::Target;
+ }
+ // region:deref_mut
+ #[lang = "deref_mut"]
+ pub trait DerefMut: Deref {
+ fn deref_mut(&mut self) -> &mut Self::Target;
+ }
+ // endregion:deref_mut
+ }
+ pub use self::deref::{
+ Deref,
+ DerefMut, // :deref_mut
+ };
+ // endregion:deref
+
+ // region:drop
+ #[lang = "drop"]
+ pub trait Drop {
+ fn drop(&mut self);
+ }
+ // endregion:drop
+
+ // region:index
+ mod index {
+ #[lang = "index"]
+ pub trait Index<Idx: ?Sized> {
+ type Output: ?Sized;
+ fn index(&self, index: Idx) -> &Self::Output;
+ }
+ #[lang = "index_mut"]
+ pub trait IndexMut<Idx: ?Sized>: Index<Idx> {
+ fn index_mut(&mut self, index: Idx) -> &mut Self::Output;
+ }
+
+ // region:slice
+ impl<T, I> Index<I> for [T]
+ where
+ I: SliceIndex<[T]>,
+ {
+ type Output = I::Output;
+ fn index(&self, index: I) -> &I::Output {
+ loop {}
+ }
+ }
+ impl<T, I> IndexMut<I> for [T]
+ where
+ I: SliceIndex<[T]>,
+ {
+ fn index_mut(&mut self, index: I) -> &mut I::Output {
+ loop {}
+ }
+ }
+
+ pub unsafe trait SliceIndex<T: ?Sized> {
+ type Output: ?Sized;
+ }
+ unsafe impl<T> SliceIndex<[T]> for usize {
+ type Output = T;
+ }
+ // endregion:slice
+ }
+ pub use self::index::{Index, IndexMut};
+ // endregion:index
+
+ // region:drop
+ pub mod mem {
+ pub fn drop<T>(_x: T) {}
+ }
+ // endregion:drop
+
+ // region:range
+ mod range {
+ #[lang = "RangeFull"]
+ pub struct RangeFull;
+
+ #[lang = "Range"]
+ pub struct Range<Idx> {
+ pub start: Idx,
+ pub end: Idx,
+ }
+
+ #[lang = "RangeFrom"]
+ pub struct RangeFrom<Idx> {
+ pub start: Idx,
+ }
+
+ #[lang = "RangeTo"]
+ pub struct RangeTo<Idx> {
+ pub end: Idx,
+ }
+
+ #[lang = "RangeInclusive"]
+ pub struct RangeInclusive<Idx> {
+ pub(crate) start: Idx,
+ pub(crate) end: Idx,
+ pub(crate) exhausted: bool,
+ }
+
+ #[lang = "RangeToInclusive"]
+ pub struct RangeToInclusive<Idx> {
+ pub end: Idx,
+ }
+ }
+ pub use self::range::{Range, RangeFrom, RangeFull, RangeTo};
+ pub use self::range::{RangeInclusive, RangeToInclusive};
+ // endregion:range
+
+ // region:fn
+ mod function {
+ #[lang = "fn"]
+ #[fundamental]
+ pub trait Fn<Args>: FnMut<Args> {}
+
+ #[lang = "fn_mut"]
+ #[fundamental]
+ pub trait FnMut<Args>: FnOnce<Args> {}
+
+ #[lang = "fn_once"]
+ #[fundamental]
+ pub trait FnOnce<Args> {
+ #[lang = "fn_once_output"]
+ type Output;
+ }
+ }
+ pub use self::function::{Fn, FnMut, FnOnce};
+ // endregion:fn
+ // region:try
+ mod try_ {
+ pub enum ControlFlow<B, C = ()> {
+ Continue(C),
+ Break(B),
+ }
+ pub trait FromResidual<R = Self::Residual> {
+ #[lang = "from_residual"]
+ fn from_residual(residual: R) -> Self;
+ }
+ #[lang = "try"]
+ pub trait Try: FromResidual<Self::Residual> {
+ type Output;
+ type Residual;
+ #[lang = "from_output"]
+ fn from_output(output: Self::Output) -> Self;
+ #[lang = "branch"]
+ fn branch(self) -> ControlFlow<Self::Residual, Self::Output>;
+ }
+
+ impl<B, C> Try for ControlFlow<B, C> {
+ type Output = C;
+ type Residual = ControlFlow<B, convert::Infallible>;
+ fn from_output(output: Self::Output) -> Self {}
+ fn branch(self) -> ControlFlow<Self::Residual, Self::Output> {}
+ }
+
+ impl<B, C> FromResidual for ControlFlow<B, C> {
+ fn from_residual(residual: ControlFlow<B, convert::Infallible>) -> Self {}
+ }
+ }
+ pub use self::try_::{ControlFlow, FromResidual, Try};
+ // endregion:try
+
+ // region:add
+ #[lang = "add"]
+ pub trait Add<Rhs = Self> {
+ type Output;
+ fn add(self, rhs: Rhs) -> Self::Output;
+ }
+ // endregion:add
+}
+
+// region:eq
+pub mod cmp {
+ #[lang = "eq"]
+ pub trait PartialEq<Rhs: ?Sized = Self> {
+ fn eq(&self, other: &Rhs) -> bool;
+ fn ne(&self, other: &Rhs) -> bool {
+ !self.eq(other)
+ }
+ }
+
+ pub trait Eq: PartialEq<Self> {}
+
+ // region:derive
+ #[rustc_builtin_macro]
+ pub macro PartialEq($item:item) {}
+ #[rustc_builtin_macro]
+ pub macro Eq($item:item) {}
+ // endregion:derive
+
+ // region:ord
+ #[lang = "partial_ord"]
+ pub trait PartialOrd<Rhs: ?Sized = Self>: PartialEq<Rhs> {
+ fn partial_cmp(&self, other: &Rhs) -> Option<Ordering>;
+ }
+
+ pub trait Ord: Eq + PartialOrd<Self> {
+ fn cmp(&self, other: &Self) -> Ordering;
+ }
+
+ pub enum Ordering {
+ Less = -1,
+ Equal = 0,
+ Greater = 1,
+ }
+
+ // region:derive
+ #[rustc_builtin_macro]
+ pub macro PartialOrd($item:item) {}
+ #[rustc_builtin_macro]
+ pub macro Ord($item:item) {}
+ // endregion:derive
+
+ // endregion:ord
+}
+// endregion:eq
+
+// region:fmt
+pub mod fmt {
+ pub struct Error;
+ pub type Result = Result<(), Error>;
+ pub struct Formatter<'a>;
+ pub trait Debug {
+ fn fmt(&self, f: &mut Formatter<'_>) -> Result;
+ }
+}
+// endregion:fmt
+
+// region:slice
+pub mod slice {
+ #[lang = "slice"]
+ impl<T> [T] {
+ pub fn len(&self) -> usize {
+ loop {}
+ }
+ }
+}
+// endregion:slice
+
+// region:option
+pub mod option {
+ pub enum Option<T> {
+ #[lang = "None"]
+ None,
+ #[lang = "Some"]
+ Some(T),
+ }
+
+ impl<T> Option<T> {
+ pub const fn unwrap(self) -> T {
+ match self {
+ Some(val) => val,
+ None => panic!("called `Option::unwrap()` on a `None` value"),
+ }
+ }
+ }
+}
+// endregion:option
+
+// region:result
+pub mod result {
+ pub enum Result<T, E> {
+ #[lang = "Ok"]
+ Ok(T),
+ #[lang = "Err"]
+ Err(E),
+ }
+}
+// endregion:result
+
+// region:pin
+pub mod pin {
+ #[lang = "pin"]
+ #[fundamental]
+ pub struct Pin<P> {
+ pointer: P,
+ }
+}
+// endregion:pin
+
+// region:future
+pub mod future {
+ use crate::{
+ pin::Pin,
+ task::{Context, Poll},
+ };
+
+ #[lang = "future_trait"]
+ pub trait Future {
+ type Output;
+ #[lang = "poll"]
+ fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output>;
+ }
+}
+pub mod task {
+ pub enum Poll<T> {
+ #[lang = "Ready"]
+ Ready(T),
+ #[lang = "Pending"]
+ Pending,
+ }
+
+ pub struct Context<'a> {
+ waker: &'a (),
+ }
+}
+// endregion:future
+
+// region:iterator
+pub mod iter {
+ // region:iterators
+ mod adapters {
+ pub struct Take<I> {
+ iter: I,
+ n: usize,
+ }
+ impl<I> Iterator for Take<I>
+ where
+ I: Iterator,
+ {
+ type Item = <I as Iterator>::Item;
+
+ fn next(&mut self) -> Option<<I as Iterator>::Item> {
+ loop {}
+ }
+ }
+
+ pub struct FilterMap<I, F> {
+ iter: I,
+ f: F,
+ }
+ impl<B, I: Iterator, F> Iterator for FilterMap<I, F>
+ where
+ F: FnMut(I::Item) -> Option<B>,
+ {
+ type Item = B;
+
+ #[inline]
+ fn next(&mut self) -> Option<B> {
+ loop {}
+ }
+ }
+ }
+ pub use self::adapters::{Take, FilterMap};
+
+ mod sources {
+ mod repeat {
+ pub fn repeat<T>(elt: T) -> Repeat<T> {
+ loop {}
+ }
+
+ pub struct Repeat<A> {
+ element: A,
+ }
+
+ impl<A> Iterator for Repeat<A> {
+ type Item = A;
+
+ fn next(&mut self) -> Option<A> {
+ loop {}
+ }
+ }
+ }
+ pub use self::repeat::{repeat, Repeat};
+ }
+ pub use self::sources::{repeat, Repeat};
+ // endregion:iterators
+
+ mod traits {
+ mod iterator {
+ use super::super::Take;
+
+ pub trait Iterator {
+ type Item;
+ #[lang = "next"]
+ fn next(&mut self) -> Option<Self::Item>;
+ fn nth(&mut self, n: usize) -> Option<Self::Item> {
+ loop {}
+ }
+ fn by_ref(&mut self) -> &mut Self
+ where
+ Self: Sized,
+ {
+ self
+ }
+ // region:iterators
+ fn take(self, n: usize) -> crate::iter::Take<Self> {
+ loop {}
+ }
+ fn filter_map<B, F>(self, f: F) -> crate::iter::FilterMap<Self, F>
+ where
+ Self: Sized,
+ F: FnMut(Self::Item) -> Option<B>,
+ {
+ loop {}
+ }
+ // endregion:iterators
+ }
+ impl<I: Iterator + ?Sized> Iterator for &mut I {
+ type Item = I::Item;
+ fn next(&mut self) -> Option<I::Item> {
+ (**self).next()
+ }
+ }
+ }
+ pub use self::iterator::Iterator;
+
+ mod collect {
+ pub trait IntoIterator {
+ type Item;
+ type IntoIter: Iterator<Item = Self::Item>;
+ #[lang = "into_iter"]
+ fn into_iter(self) -> Self::IntoIter;
+ }
+ impl<I: Iterator> IntoIterator for I {
+ type Item = I::Item;
+ type IntoIter = I;
+ fn into_iter(self) -> I {
+ self
+ }
+ }
+ }
+ pub use self::collect::IntoIterator;
+ }
+ pub use self::traits::{IntoIterator, Iterator};
+}
+// endregion:iterator
+
+// region:derive
+mod macros {
+ pub(crate) mod builtin {
+ #[rustc_builtin_macro]
+ pub macro derive($item:item) {
+ /* compiler built-in */
+ }
+ }
+}
+// endregion:derive
+
+// region:bool_impl
+#[lang = "bool"]
+impl bool {
+ pub fn then<T, F: FnOnce() -> T>(self, f: F) -> Option<T> {
+ if self {
+ Some(f())
+ } else {
+ None
+ }
+ }
+}
+// endregion:bool_impl
+
+pub mod prelude {
+ pub mod v1 {
+ pub use crate::{
+ clone::Clone, // :clone
+ cmp::{Eq, PartialEq}, // :eq
+ cmp::{Ord, PartialOrd}, // :ord
+ convert::AsRef, // :as_ref
+ convert::{From, Into}, // :from
+ default::Default, // :default
+ iter::{IntoIterator, Iterator}, // :iterator
+ macros::builtin::derive, // :derive
+ marker::Copy, // :copy
+ marker::Sized, // :sized
+ mem::drop, // :drop
+ ops::Drop, // :drop
+ ops::{Fn, FnMut, FnOnce}, // :fn
+ option::Option::{self, None, Some}, // :option
+ result::Result::{self, Err, Ok}, // :result
+ };
+ }
+
+ pub mod rust_2015 {
+ pub use super::v1::*;
+ }
+
+ pub mod rust_2018 {
+ pub use super::v1::*;
+ }
+
+ pub mod rust_2021 {
+ pub use super::v1::*;
+ }
+}
+
+#[prelude_import]
+#[allow(unused)]
+use prelude::v1::*;
diff --git a/src/tools/rust-analyzer/crates/text-edit/Cargo.toml b/src/tools/rust-analyzer/crates/text-edit/Cargo.toml
new file mode 100644
index 000000000..cf14bbd3c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/text-edit/Cargo.toml
@@ -0,0 +1,14 @@
+[package]
+name = "text-edit"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+itertools = "0.10.3"
+text-size = "1.1.0"
diff --git a/src/tools/rust-analyzer/crates/text-edit/src/lib.rs b/src/tools/rust-analyzer/crates/text-edit/src/lib.rs
new file mode 100644
index 000000000..9bb4271b6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/text-edit/src/lib.rs
@@ -0,0 +1,264 @@
+//! Representation of a `TextEdit`.
+//!
+//! `rust-analyzer` never mutates text itself and only sends diffs to clients,
+//! so `TextEdit` is the ultimate representation of the work done by
+//! rust-analyzer.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use itertools::Itertools;
+use std::cmp::max;
+pub use text_size::{TextRange, TextSize};
+
+/// `InsertDelete` -- a single "atomic" change to text
+///
+/// Must not overlap with other `InDel`s
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Indel {
+ pub insert: String,
+ /// Refers to offsets in the original text
+ pub delete: TextRange,
+}
+
+#[derive(Default, Debug, Clone)]
+pub struct TextEdit {
+ /// Invariant: disjoint and sorted by `delete`.
+ indels: Vec<Indel>,
+}
+
+#[derive(Debug, Default, Clone)]
+pub struct TextEditBuilder {
+ indels: Vec<Indel>,
+}
+
+impl Indel {
+ pub fn insert(offset: TextSize, text: String) -> Indel {
+ Indel::replace(TextRange::empty(offset), text)
+ }
+ pub fn delete(range: TextRange) -> Indel {
+ Indel::replace(range, String::new())
+ }
+ pub fn replace(range: TextRange, replace_with: String) -> Indel {
+ Indel { delete: range, insert: replace_with }
+ }
+
+ pub fn apply(&self, text: &mut String) {
+ let start: usize = self.delete.start().into();
+ let end: usize = self.delete.end().into();
+ text.replace_range(start..end, &self.insert);
+ }
+}
+
+impl TextEdit {
+ pub fn builder() -> TextEditBuilder {
+ TextEditBuilder::default()
+ }
+
+ pub fn insert(offset: TextSize, text: String) -> TextEdit {
+ let mut builder = TextEdit::builder();
+ builder.insert(offset, text);
+ builder.finish()
+ }
+
+ pub fn delete(range: TextRange) -> TextEdit {
+ let mut builder = TextEdit::builder();
+ builder.delete(range);
+ builder.finish()
+ }
+
+ pub fn replace(range: TextRange, replace_with: String) -> TextEdit {
+ let mut builder = TextEdit::builder();
+ builder.replace(range, replace_with);
+ builder.finish()
+ }
+
+ pub fn len(&self) -> usize {
+ self.indels.len()
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.indels.is_empty()
+ }
+
+ pub fn iter(&self) -> std::slice::Iter<'_, Indel> {
+ self.into_iter()
+ }
+
+ pub fn apply(&self, text: &mut String) {
+ match self.len() {
+ 0 => return,
+ 1 => {
+ self.indels[0].apply(text);
+ return;
+ }
+ _ => (),
+ }
+
+ let text_size = TextSize::of(&*text);
+ let mut total_len = text_size;
+ let mut max_total_len = text_size;
+ for indel in &self.indels {
+ total_len += TextSize::of(&indel.insert);
+ total_len -= indel.delete.len();
+ max_total_len = max(max_total_len, total_len);
+ }
+
+ if let Some(additional) = max_total_len.checked_sub(text_size) {
+ text.reserve(additional.into());
+ }
+
+ for indel in self.indels.iter().rev() {
+ indel.apply(text);
+ }
+
+ assert_eq!(TextSize::of(&*text), total_len);
+ }
+
+ pub fn union(&mut self, other: TextEdit) -> Result<(), TextEdit> {
+ let iter_merge =
+ self.iter().merge_by(other.iter(), |l, r| l.delete.start() <= r.delete.start());
+ if !check_disjoint(&mut iter_merge.clone()) {
+ return Err(other);
+ }
+
+ // Only dedup deletions and replacements, keep all insertions
+ self.indels = iter_merge.dedup_by(|a, b| a == b && !a.delete.is_empty()).cloned().collect();
+ Ok(())
+ }
+
+ pub fn apply_to_offset(&self, offset: TextSize) -> Option<TextSize> {
+ let mut res = offset;
+ for indel in &self.indels {
+ if indel.delete.start() >= offset {
+ break;
+ }
+ if offset < indel.delete.end() {
+ return None;
+ }
+ res += TextSize::of(&indel.insert);
+ res -= indel.delete.len();
+ }
+ Some(res)
+ }
+}
+
+impl IntoIterator for TextEdit {
+ type Item = Indel;
+ type IntoIter = std::vec::IntoIter<Indel>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.indels.into_iter()
+ }
+}
+
+impl<'a> IntoIterator for &'a TextEdit {
+ type Item = &'a Indel;
+ type IntoIter = std::slice::Iter<'a, Indel>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.indels.iter()
+ }
+}
+
+impl TextEditBuilder {
+ pub fn is_empty(&self) -> bool {
+ self.indels.is_empty()
+ }
+ pub fn replace(&mut self, range: TextRange, replace_with: String) {
+ self.indel(Indel::replace(range, replace_with));
+ }
+ pub fn delete(&mut self, range: TextRange) {
+ self.indel(Indel::delete(range));
+ }
+ pub fn insert(&mut self, offset: TextSize, text: String) {
+ self.indel(Indel::insert(offset, text));
+ }
+ pub fn finish(self) -> TextEdit {
+ let mut indels = self.indels;
+ assert_disjoint_or_equal(&mut indels);
+ TextEdit { indels }
+ }
+ pub fn invalidates_offset(&self, offset: TextSize) -> bool {
+ self.indels.iter().any(|indel| indel.delete.contains_inclusive(offset))
+ }
+ fn indel(&mut self, indel: Indel) {
+ self.indels.push(indel);
+ if self.indels.len() <= 16 {
+ assert_disjoint_or_equal(&mut self.indels);
+ }
+ }
+}
+
+fn assert_disjoint_or_equal(indels: &mut [Indel]) {
+ assert!(check_disjoint_and_sort(indels));
+}
+
+fn check_disjoint_and_sort(indels: &mut [Indel]) -> bool {
+ indels.sort_by_key(|indel| (indel.delete.start(), indel.delete.end()));
+ check_disjoint(&mut indels.iter())
+}
+
+fn check_disjoint<'a, I>(indels: &mut I) -> bool
+where
+ I: std::iter::Iterator<Item = &'a Indel> + Clone,
+{
+ indels.clone().zip(indels.skip(1)).all(|(l, r)| l.delete.end() <= r.delete.start() || l == r)
+}
+
+#[cfg(test)]
+mod tests {
+ use super::{TextEdit, TextEditBuilder, TextRange};
+
+ fn range(start: u32, end: u32) -> TextRange {
+ TextRange::new(start.into(), end.into())
+ }
+
+ #[test]
+ fn test_apply() {
+ let mut text = "_11h1_2222_xx3333_4444_6666".to_string();
+ let mut builder = TextEditBuilder::default();
+ builder.replace(range(3, 4), "1".to_string());
+ builder.delete(range(11, 13));
+ builder.insert(22.into(), "_5555".to_string());
+
+ let text_edit = builder.finish();
+ text_edit.apply(&mut text);
+
+ assert_eq!(text, "_1111_2222_3333_4444_5555_6666")
+ }
+
+ #[test]
+ fn test_union() {
+ let mut edit1 = TextEdit::delete(range(7, 11));
+ let mut builder = TextEditBuilder::default();
+ builder.delete(range(1, 5));
+ builder.delete(range(13, 17));
+
+ let edit2 = builder.finish();
+ assert!(edit1.union(edit2).is_ok());
+ assert_eq!(edit1.indels.len(), 3);
+ }
+
+ #[test]
+ fn test_union_with_duplicates() {
+ let mut builder1 = TextEditBuilder::default();
+ builder1.delete(range(7, 11));
+ builder1.delete(range(13, 17));
+
+ let mut builder2 = TextEditBuilder::default();
+ builder2.delete(range(1, 5));
+ builder2.delete(range(13, 17));
+
+ let mut edit1 = builder1.finish();
+ let edit2 = builder2.finish();
+ assert!(edit1.union(edit2).is_ok());
+ assert_eq!(edit1.indels.len(), 3);
+ }
+
+ #[test]
+ fn test_union_panics() {
+ let mut edit1 = TextEdit::delete(range(7, 11));
+ let edit2 = TextEdit::delete(range(9, 13));
+ assert!(edit1.union(edit2).is_err());
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/toolchain/Cargo.toml b/src/tools/rust-analyzer/crates/toolchain/Cargo.toml
new file mode 100644
index 000000000..7d3b9e09e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/toolchain/Cargo.toml
@@ -0,0 +1,13 @@
+[package]
+name = "toolchain"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+home = "0.5.3"
diff --git a/src/tools/rust-analyzer/crates/toolchain/src/lib.rs b/src/tools/rust-analyzer/crates/toolchain/src/lib.rs
new file mode 100644
index 000000000..b05da7691
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/toolchain/src/lib.rs
@@ -0,0 +1,69 @@
+//! Discovery of `cargo` & `rustc` executables.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use std::{env, iter, path::PathBuf};
+
+pub fn cargo() -> PathBuf {
+ get_path_for_executable("cargo")
+}
+
+pub fn rustc() -> PathBuf {
+ get_path_for_executable("rustc")
+}
+
+pub fn rustup() -> PathBuf {
+ get_path_for_executable("rustup")
+}
+
+pub fn rustfmt() -> PathBuf {
+ get_path_for_executable("rustfmt")
+}
+
+/// Return a `PathBuf` to use for the given executable.
+///
+/// E.g., `get_path_for_executable("cargo")` may return just `cargo` if that
+/// gives a valid Cargo executable; or it may return a full path to a valid
+/// Cargo.
+fn get_path_for_executable(executable_name: &'static str) -> PathBuf {
+ // The current implementation checks three places for an executable to use:
+ // 1) Appropriate environment variable (erroring if this is set but not a usable executable)
+ // example: for cargo, this checks $CARGO environment variable; for rustc, $RUSTC; etc
+ // 2) `<executable_name>`
+ // example: for cargo, this tries just `cargo`, which will succeed if `cargo` is on the $PATH
+ // 3) `~/.cargo/bin/<executable_name>`
+ // example: for cargo, this tries ~/.cargo/bin/cargo
+ // It seems that this is a reasonable place to try for cargo, rustc, and rustup
+ let env_var = executable_name.to_ascii_uppercase();
+ if let Some(path) = env::var_os(&env_var) {
+ return path.into();
+ }
+
+ if lookup_in_path(executable_name) {
+ return executable_name.into();
+ }
+
+ if let Some(mut path) = home::home_dir() {
+ path.push(".cargo");
+ path.push("bin");
+ path.push(executable_name);
+ if let Some(path) = probe(path) {
+ return path;
+ }
+ }
+
+ executable_name.into()
+}
+
+fn lookup_in_path(exec: &str) -> bool {
+ let paths = env::var_os("PATH").unwrap_or_default();
+ env::split_paths(&paths).map(|path| path.join(exec)).find_map(probe).is_some()
+}
+
+fn probe(path: PathBuf) -> Option<PathBuf> {
+ let with_extension = match env::consts::EXE_EXTENSION {
+ "" => None,
+ it => Some(path.with_extension(it)),
+ };
+ iter::once(path).chain(with_extension).find(|it| it.is_file())
+}
diff --git a/src/tools/rust-analyzer/crates/tt/Cargo.toml b/src/tools/rust-analyzer/crates/tt/Cargo.toml
new file mode 100644
index 000000000..52dfb8608
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/tt/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "tt"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+smol_str = "0.1.23"
+
+stdx = { path = "../stdx", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/tt/src/buffer.rs b/src/tools/rust-analyzer/crates/tt/src/buffer.rs
new file mode 100644
index 000000000..69226bd4c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/tt/src/buffer.rs
@@ -0,0 +1,231 @@
+//! Stateful iteration over token trees.
+//!
+//! We use this as the source of tokens for parser.
+use crate::{Leaf, Subtree, TokenTree};
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+struct EntryId(usize);
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+struct EntryPtr(EntryId, usize);
+
+/// Internal type which is used instead of `TokenTree` to represent a token tree
+/// within a `TokenBuffer`.
+#[derive(Debug)]
+enum Entry<'t> {
+ // Mimicking types from proc-macro.
+ Subtree(Option<&'t TokenTree>, &'t Subtree, EntryId),
+ Leaf(&'t TokenTree),
+ // End entries contain a pointer to the entry from the containing
+ // token tree, or None if this is the outermost level.
+ End(Option<EntryPtr>),
+}
+
+/// A token tree buffer
+/// The safe version of `syn` [`TokenBuffer`](https://github.com/dtolnay/syn/blob/6533607f91686545cb034d2838beea338d9d0742/src/buffer.rs#L41)
+#[derive(Debug)]
+pub struct TokenBuffer<'t> {
+ buffers: Vec<Box<[Entry<'t>]>>,
+}
+
+trait TokenList<'a> {
+ fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>);
+}
+
+impl<'a> TokenList<'a> for &'a [TokenTree] {
+ fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>) {
+ // Must contain everything in tokens and then the Entry::End
+ let start_capacity = self.len() + 1;
+ let mut entries = Vec::with_capacity(start_capacity);
+ let mut children = vec![];
+ for (idx, tt) in self.iter().enumerate() {
+ match tt {
+ TokenTree::Leaf(_) => {
+ entries.push(Entry::Leaf(tt));
+ }
+ TokenTree::Subtree(subtree) => {
+ entries.push(Entry::End(None));
+ children.push((idx, (subtree, Some(tt))));
+ }
+ }
+ }
+ (children, entries)
+ }
+}
+
+impl<'a> TokenList<'a> for &'a Subtree {
+ fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>) {
+ // Must contain everything in tokens and then the Entry::End
+ let mut entries = vec![];
+ let mut children = vec![];
+ entries.push(Entry::End(None));
+ children.push((0usize, (*self, None)));
+ (children, entries)
+ }
+}
+
+impl<'t> TokenBuffer<'t> {
+ pub fn from_tokens(tokens: &'t [TokenTree]) -> TokenBuffer<'t> {
+ Self::new(tokens)
+ }
+
+ pub fn from_subtree(subtree: &'t Subtree) -> TokenBuffer<'t> {
+ Self::new(subtree)
+ }
+
+ fn new<T: TokenList<'t>>(tokens: T) -> TokenBuffer<'t> {
+ let mut buffers = vec![];
+ let idx = TokenBuffer::new_inner(tokens, &mut buffers, None);
+ assert_eq!(idx, 0);
+ TokenBuffer { buffers }
+ }
+
+ fn new_inner<T: TokenList<'t>>(
+ tokens: T,
+ buffers: &mut Vec<Box<[Entry<'t>]>>,
+ next: Option<EntryPtr>,
+ ) -> usize {
+ let (children, mut entries) = tokens.entries();
+
+ entries.push(Entry::End(next));
+ let res = buffers.len();
+ buffers.push(entries.into_boxed_slice());
+
+ for (child_idx, (subtree, tt)) in children {
+ let idx = TokenBuffer::new_inner(
+ subtree.token_trees.as_slice(),
+ buffers,
+ Some(EntryPtr(EntryId(res), child_idx + 1)),
+ );
+ buffers[res].as_mut()[child_idx] = Entry::Subtree(tt, subtree, EntryId(idx));
+ }
+
+ res
+ }
+
+ /// Creates a cursor referencing the first token in the buffer and able to
+ /// traverse until the end of the buffer.
+ pub fn begin(&self) -> Cursor<'_> {
+ Cursor::create(self, EntryPtr(EntryId(0), 0))
+ }
+
+ fn entry(&self, ptr: &EntryPtr) -> Option<&Entry<'_>> {
+ let id = ptr.0;
+ self.buffers[id.0].get(ptr.1)
+ }
+}
+
+#[derive(Debug)]
+pub enum TokenTreeRef<'a> {
+ Subtree(&'a Subtree, Option<&'a TokenTree>),
+ Leaf(&'a Leaf, &'a TokenTree),
+}
+
+impl<'a> TokenTreeRef<'a> {
+ pub fn cloned(&self) -> TokenTree {
+ match &self {
+ TokenTreeRef::Subtree(subtree, tt) => match tt {
+ Some(it) => (*it).clone(),
+ None => (*subtree).clone().into(),
+ },
+ TokenTreeRef::Leaf(_, tt) => (*tt).clone(),
+ }
+ }
+}
+
+/// A safe version of `Cursor` from `syn` crate <https://github.com/dtolnay/syn/blob/6533607f91686545cb034d2838beea338d9d0742/src/buffer.rs#L125>
+#[derive(Copy, Clone, Debug)]
+pub struct Cursor<'a> {
+ buffer: &'a TokenBuffer<'a>,
+ ptr: EntryPtr,
+}
+
+impl<'a> PartialEq for Cursor<'a> {
+ fn eq(&self, other: &Cursor<'_>) -> bool {
+ self.ptr == other.ptr && std::ptr::eq(self.buffer, other.buffer)
+ }
+}
+
+impl<'a> Eq for Cursor<'a> {}
+
+impl<'a> Cursor<'a> {
+ /// Check whether it is eof
+ pub fn eof(self) -> bool {
+ matches!(self.buffer.entry(&self.ptr), None | Some(Entry::End(None)))
+ }
+
+ /// If the cursor is pointing at the end of a subtree, returns
+ /// the parent subtree
+ pub fn end(self) -> Option<&'a Subtree> {
+ match self.entry() {
+ Some(Entry::End(Some(ptr))) => {
+ let idx = ptr.1;
+ if let Some(Entry::Subtree(_, subtree, _)) =
+ self.buffer.entry(&EntryPtr(ptr.0, idx - 1))
+ {
+ return Some(subtree);
+ }
+ None
+ }
+ _ => None,
+ }
+ }
+
+ fn entry(self) -> Option<&'a Entry<'a>> {
+ self.buffer.entry(&self.ptr)
+ }
+
+ /// If the cursor is pointing at a `Subtree`, returns
+ /// a cursor into that subtree
+ pub fn subtree(self) -> Option<Cursor<'a>> {
+ match self.entry() {
+ Some(Entry::Subtree(_, _, entry_id)) => {
+ Some(Cursor::create(self.buffer, EntryPtr(*entry_id, 0)))
+ }
+ _ => None,
+ }
+ }
+
+ /// If the cursor is pointing at a `TokenTree`, returns it
+ pub fn token_tree(self) -> Option<TokenTreeRef<'a>> {
+ match self.entry() {
+ Some(Entry::Leaf(tt)) => match tt {
+ TokenTree::Leaf(leaf) => Some(TokenTreeRef::Leaf(leaf, *tt)),
+ TokenTree::Subtree(subtree) => Some(TokenTreeRef::Subtree(subtree, Some(tt))),
+ },
+ Some(Entry::Subtree(tt, subtree, _)) => Some(TokenTreeRef::Subtree(subtree, *tt)),
+ Some(Entry::End(_)) | None => None,
+ }
+ }
+
+ fn create(buffer: &'a TokenBuffer<'_>, ptr: EntryPtr) -> Cursor<'a> {
+ Cursor { buffer, ptr }
+ }
+
+ /// Bump the cursor
+ pub fn bump(self) -> Cursor<'a> {
+ if let Some(Entry::End(exit)) = self.buffer.entry(&self.ptr) {
+ match exit {
+ Some(exit) => Cursor::create(self.buffer, *exit),
+ None => self,
+ }
+ } else {
+ Cursor::create(self.buffer, EntryPtr(self.ptr.0, self.ptr.1 + 1))
+ }
+ }
+
+ /// Bump the cursor, if it is a subtree, returns
+ /// a cursor into that subtree
+ pub fn bump_subtree(self) -> Cursor<'a> {
+ match self.entry() {
+ Some(Entry::Subtree(_, _, _)) => self.subtree().unwrap(),
+ _ => self.bump(),
+ }
+ }
+
+ /// Check whether it is a top level
+ pub fn is_root(&self) -> bool {
+ let entry_id = self.ptr.0;
+ entry_id.0 == 0
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/tt/src/lib.rs b/src/tools/rust-analyzer/crates/tt/src/lib.rs
new file mode 100644
index 000000000..a54861de9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/tt/src/lib.rs
@@ -0,0 +1,322 @@
+//! `tt` crate defines a `TokenTree` data structure: this is the interface (both
+//! input and output) of macros. It closely mirrors `proc_macro` crate's
+//! `TokenTree`.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use std::fmt;
+
+use stdx::impl_from;
+
+pub use smol_str::SmolStr;
+
+/// Represents identity of the token.
+///
+/// For hygiene purposes, we need to track which expanded tokens originated from
+/// which source tokens. We do it by assigning an distinct identity to each
+/// source token and making sure that identities are preserved during macro
+/// expansion.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TokenId(pub u32);
+
+impl TokenId {
+ pub const fn unspecified() -> TokenId {
+ TokenId(!0)
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum TokenTree {
+ Leaf(Leaf),
+ Subtree(Subtree),
+}
+impl_from!(Leaf, Subtree for TokenTree);
+
+impl TokenTree {
+ pub fn empty() -> Self {
+ TokenTree::Subtree(Subtree::default())
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Leaf {
+ Literal(Literal),
+ Punct(Punct),
+ Ident(Ident),
+}
+impl_from!(Literal, Punct, Ident for Leaf);
+
+#[derive(Clone, PartialEq, Eq, Hash, Default)]
+pub struct Subtree {
+ pub delimiter: Option<Delimiter>,
+ pub token_trees: Vec<TokenTree>,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct Delimiter {
+ pub id: TokenId,
+ pub kind: DelimiterKind,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum DelimiterKind {
+ Parenthesis,
+ Brace,
+ Bracket,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Literal {
+ pub text: SmolStr,
+ pub id: TokenId,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Punct {
+ pub char: char,
+ pub spacing: Spacing,
+ pub id: TokenId,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum Spacing {
+ Alone,
+ Joint,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Ident {
+ pub text: SmolStr,
+ pub id: TokenId,
+}
+
+impl Leaf {
+ pub fn id(&self) -> TokenId {
+ match self {
+ Leaf::Literal(l) => l.id,
+ Leaf::Punct(p) => p.id,
+ Leaf::Ident(i) => i.id,
+ }
+ }
+}
+
+fn print_debug_subtree(f: &mut fmt::Formatter<'_>, subtree: &Subtree, level: usize) -> fmt::Result {
+ let align = " ".repeat(level);
+
+ let aux = match subtree.delimiter.map(|it| (it.kind, it.id.0)) {
+ None => "$".to_string(),
+ Some((DelimiterKind::Parenthesis, id)) => format!("() {}", id),
+ Some((DelimiterKind::Brace, id)) => format!("{{}} {}", id),
+ Some((DelimiterKind::Bracket, id)) => format!("[] {}", id),
+ };
+
+ if subtree.token_trees.is_empty() {
+ write!(f, "{}SUBTREE {}", align, aux)?;
+ } else {
+ writeln!(f, "{}SUBTREE {}", align, aux)?;
+ for (idx, child) in subtree.token_trees.iter().enumerate() {
+ print_debug_token(f, child, level + 1)?;
+ if idx != subtree.token_trees.len() - 1 {
+ writeln!(f)?;
+ }
+ }
+ }
+
+ Ok(())
+}
+
+fn print_debug_token(f: &mut fmt::Formatter<'_>, tkn: &TokenTree, level: usize) -> fmt::Result {
+ let align = " ".repeat(level);
+
+ match tkn {
+ TokenTree::Leaf(leaf) => match leaf {
+ Leaf::Literal(lit) => write!(f, "{}LITERAL {} {}", align, lit.text, lit.id.0)?,
+ Leaf::Punct(punct) => write!(
+ f,
+ "{}PUNCH {} [{}] {}",
+ align,
+ punct.char,
+ if punct.spacing == Spacing::Alone { "alone" } else { "joint" },
+ punct.id.0
+ )?,
+ Leaf::Ident(ident) => write!(f, "{}IDENT {} {}", align, ident.text, ident.id.0)?,
+ },
+ TokenTree::Subtree(subtree) => {
+ print_debug_subtree(f, subtree, level)?;
+ }
+ }
+
+ Ok(())
+}
+
+impl fmt::Debug for Subtree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ print_debug_subtree(f, self, 0)
+ }
+}
+
+impl fmt::Display for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ TokenTree::Leaf(it) => fmt::Display::fmt(it, f),
+ TokenTree::Subtree(it) => fmt::Display::fmt(it, f),
+ }
+ }
+}
+
+impl fmt::Display for Subtree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let (l, r) = match self.delimiter_kind() {
+ Some(DelimiterKind::Parenthesis) => ("(", ")"),
+ Some(DelimiterKind::Brace) => ("{", "}"),
+ Some(DelimiterKind::Bracket) => ("[", "]"),
+ None => ("", ""),
+ };
+ f.write_str(l)?;
+ let mut needs_space = false;
+ for tt in &self.token_trees {
+ if needs_space {
+ f.write_str(" ")?;
+ }
+ needs_space = true;
+ match tt {
+ TokenTree::Leaf(Leaf::Punct(p)) => {
+ needs_space = p.spacing == Spacing::Alone;
+ fmt::Display::fmt(p, f)?;
+ }
+ tt => fmt::Display::fmt(tt, f)?,
+ }
+ }
+ f.write_str(r)?;
+ Ok(())
+ }
+}
+
+impl fmt::Display for Leaf {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Leaf::Ident(it) => fmt::Display::fmt(it, f),
+ Leaf::Literal(it) => fmt::Display::fmt(it, f),
+ Leaf::Punct(it) => fmt::Display::fmt(it, f),
+ }
+ }
+}
+
+impl fmt::Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.text, f)
+ }
+}
+
+impl fmt::Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.text, f)
+ }
+}
+
+impl fmt::Display for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.char, f)
+ }
+}
+
+impl Subtree {
+ /// Count the number of tokens recursively
+ pub fn count(&self) -> usize {
+ let children_count = self
+ .token_trees
+ .iter()
+ .map(|c| match c {
+ TokenTree::Subtree(c) => c.count(),
+ TokenTree::Leaf(_) => 0,
+ })
+ .sum::<usize>();
+
+ self.token_trees.len() + children_count
+ }
+
+ pub fn delimiter_kind(&self) -> Option<DelimiterKind> {
+ self.delimiter.map(|it| it.kind)
+ }
+}
+
+impl Subtree {
+ /// A simple line string used for debugging
+ pub fn as_debug_string(&self) -> String {
+ let delim = match self.delimiter_kind() {
+ Some(DelimiterKind::Brace) => ("{", "}"),
+ Some(DelimiterKind::Bracket) => ("[", "]"),
+ Some(DelimiterKind::Parenthesis) => ("(", ")"),
+ None => (" ", " "),
+ };
+
+ let mut res = String::new();
+ res.push_str(delim.0);
+ let mut last = None;
+ for child in &self.token_trees {
+ let s = match child {
+ TokenTree::Leaf(it) => {
+ let s = match it {
+ Leaf::Literal(it) => it.text.to_string(),
+ Leaf::Punct(it) => it.char.to_string(),
+ Leaf::Ident(it) => it.text.to_string(),
+ };
+ match (it, last) {
+ (Leaf::Ident(_), Some(&TokenTree::Leaf(Leaf::Ident(_)))) => {
+ " ".to_string() + &s
+ }
+ (Leaf::Punct(_), Some(&TokenTree::Leaf(Leaf::Punct(punct)))) => {
+ if punct.spacing == Spacing::Alone {
+ " ".to_string() + &s
+ } else {
+ s
+ }
+ }
+ _ => s,
+ }
+ }
+ TokenTree::Subtree(it) => it.as_debug_string(),
+ };
+ res.push_str(&s);
+ last = Some(child);
+ }
+
+ res.push_str(delim.1);
+ res
+ }
+}
+
+pub mod buffer;
+
+pub fn pretty(tkns: &[TokenTree]) -> String {
+ fn tokentree_to_text(tkn: &TokenTree) -> String {
+ match tkn {
+ TokenTree::Leaf(Leaf::Ident(ident)) => ident.text.clone().into(),
+ TokenTree::Leaf(Leaf::Literal(literal)) => literal.text.clone().into(),
+ TokenTree::Leaf(Leaf::Punct(punct)) => format!("{}", punct.char),
+ TokenTree::Subtree(subtree) => {
+ let content = pretty(&subtree.token_trees);
+ let (open, close) = match subtree.delimiter.map(|it| it.kind) {
+ None => ("", ""),
+ Some(DelimiterKind::Brace) => ("{", "}"),
+ Some(DelimiterKind::Parenthesis) => ("(", ")"),
+ Some(DelimiterKind::Bracket) => ("[", "]"),
+ };
+ format!("{}{}{}", open, content, close)
+ }
+ }
+ }
+
+ tkns.iter()
+ .fold((String::new(), true), |(last, last_to_joint), tkn| {
+ let s = [last, tokentree_to_text(tkn)].join(if last_to_joint { "" } else { " " });
+ let mut is_joint = false;
+ if let TokenTree::Leaf(Leaf::Punct(punct)) = tkn {
+ if punct.spacing == Spacing::Joint {
+ is_joint = true;
+ }
+ }
+ (s, is_joint)
+ })
+ .0
+}
diff --git a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
new file mode 100644
index 000000000..9ee4415dc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
@@ -0,0 +1,20 @@
+[package]
+name = "vfs-notify"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+tracing = "0.1.35"
+jod-thread = "0.1.2"
+walkdir = "2.3.2"
+crossbeam-channel = "0.5.5"
+notify = "=5.0.0-pre.15"
+
+vfs = { path = "../vfs", version = "0.0.0" }
+paths = { path = "../paths", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs
new file mode 100644
index 000000000..4d33a9afb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs
@@ -0,0 +1,234 @@
+//! An implementation of `loader::Handle`, based on `walkdir` and `notify`.
+//!
+//! The file watching bits here are untested and quite probably buggy. For this
+//! reason, by default we don't watch files and rely on editor's file watching
+//! capabilities.
+//!
+//! Hopefully, one day a reliable file watching/walking crate appears on
+//! crates.io, and we can reduce this to trivial glue code.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use std::fs;
+
+use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
+use notify::{RecommendedWatcher, RecursiveMode, Watcher};
+use paths::{AbsPath, AbsPathBuf};
+use vfs::loader;
+use walkdir::WalkDir;
+
+#[derive(Debug)]
+pub struct NotifyHandle {
+ // Relative order of fields below is significant.
+ sender: Sender<Message>,
+ _thread: jod_thread::JoinHandle,
+}
+
+#[derive(Debug)]
+enum Message {
+ Config(loader::Config),
+ Invalidate(AbsPathBuf),
+}
+
+impl loader::Handle for NotifyHandle {
+ fn spawn(sender: loader::Sender) -> NotifyHandle {
+ let actor = NotifyActor::new(sender);
+ let (sender, receiver) = unbounded::<Message>();
+ let thread = jod_thread::Builder::new()
+ .name("VfsLoader".to_owned())
+ .spawn(move || actor.run(receiver))
+ .expect("failed to spawn thread");
+ NotifyHandle { sender, _thread: thread }
+ }
+ fn set_config(&mut self, config: loader::Config) {
+ self.sender.send(Message::Config(config)).unwrap();
+ }
+ fn invalidate(&mut self, path: AbsPathBuf) {
+ self.sender.send(Message::Invalidate(path)).unwrap();
+ }
+ fn load_sync(&mut self, path: &AbsPath) -> Option<Vec<u8>> {
+ read(path)
+ }
+}
+
+type NotifyEvent = notify::Result<notify::Event>;
+
+struct NotifyActor {
+ sender: loader::Sender,
+ watched_entries: Vec<loader::Entry>,
+ // Drop order is significant.
+ watcher: Option<(RecommendedWatcher, Receiver<NotifyEvent>)>,
+}
+
+#[derive(Debug)]
+enum Event {
+ Message(Message),
+ NotifyEvent(NotifyEvent),
+}
+
+impl NotifyActor {
+ fn new(sender: loader::Sender) -> NotifyActor {
+ NotifyActor { sender, watched_entries: Vec::new(), watcher: None }
+ }
+ fn next_event(&self, receiver: &Receiver<Message>) -> Option<Event> {
+ let watcher_receiver = self.watcher.as_ref().map(|(_, receiver)| receiver);
+ select! {
+ recv(receiver) -> it => it.ok().map(Event::Message),
+ recv(watcher_receiver.unwrap_or(&never())) -> it => Some(Event::NotifyEvent(it.unwrap())),
+ }
+ }
+ fn run(mut self, inbox: Receiver<Message>) {
+ while let Some(event) = self.next_event(&inbox) {
+ tracing::debug!("vfs-notify event: {:?}", event);
+ match event {
+ Event::Message(msg) => match msg {
+ Message::Config(config) => {
+ self.watcher = None;
+ if !config.watch.is_empty() {
+ let (watcher_sender, watcher_receiver) = unbounded();
+ let watcher = log_notify_error(RecommendedWatcher::new(move |event| {
+ watcher_sender.send(event).unwrap();
+ }));
+ self.watcher = watcher.map(|it| (it, watcher_receiver));
+ }
+
+ let config_version = config.version;
+
+ let n_total = config.load.len();
+ self.send(loader::Message::Progress { n_total, n_done: 0, config_version });
+
+ self.watched_entries.clear();
+
+ for (i, entry) in config.load.into_iter().enumerate() {
+ let watch = config.watch.contains(&i);
+ if watch {
+ self.watched_entries.push(entry.clone());
+ }
+ let files = self.load_entry(entry, watch);
+ self.send(loader::Message::Loaded { files });
+ self.send(loader::Message::Progress {
+ n_total,
+ n_done: i + 1,
+ config_version,
+ });
+ }
+ }
+ Message::Invalidate(path) => {
+ let contents = read(path.as_path());
+ let files = vec![(path, contents)];
+ self.send(loader::Message::Loaded { files });
+ }
+ },
+ Event::NotifyEvent(event) => {
+ if let Some(event) = log_notify_error(event) {
+ let files = event
+ .paths
+ .into_iter()
+ .map(|path| AbsPathBuf::try_from(path).unwrap())
+ .filter_map(|path| {
+ let meta = fs::metadata(&path).ok()?;
+ if meta.file_type().is_dir()
+ && self
+ .watched_entries
+ .iter()
+ .any(|entry| entry.contains_dir(&path))
+ {
+ self.watch(path);
+ return None;
+ }
+
+ if !meta.file_type().is_file() {
+ return None;
+ }
+ if !self
+ .watched_entries
+ .iter()
+ .any(|entry| entry.contains_file(&path))
+ {
+ return None;
+ }
+
+ let contents = read(&path);
+ Some((path, contents))
+ })
+ .collect();
+ self.send(loader::Message::Loaded { files });
+ }
+ }
+ }
+ }
+ }
+ fn load_entry(
+ &mut self,
+ entry: loader::Entry,
+ watch: bool,
+ ) -> Vec<(AbsPathBuf, Option<Vec<u8>>)> {
+ match entry {
+ loader::Entry::Files(files) => files
+ .into_iter()
+ .map(|file| {
+ if watch {
+ self.watch(file.clone());
+ }
+ let contents = read(file.as_path());
+ (file, contents)
+ })
+ .collect::<Vec<_>>(),
+ loader::Entry::Directories(dirs) => {
+ let mut res = Vec::new();
+
+ for root in &dirs.include {
+ let walkdir =
+ WalkDir::new(root).follow_links(true).into_iter().filter_entry(|entry| {
+ if !entry.file_type().is_dir() {
+ return true;
+ }
+ let path = AbsPath::assert(entry.path());
+ root == path
+ || dirs.exclude.iter().chain(&dirs.include).all(|it| it != path)
+ });
+
+ let files = walkdir.filter_map(|it| it.ok()).filter_map(|entry| {
+ let is_dir = entry.file_type().is_dir();
+ let is_file = entry.file_type().is_file();
+ let abs_path = AbsPathBuf::assert(entry.into_path());
+ if is_dir && watch {
+ self.watch(abs_path.clone());
+ }
+ if !is_file {
+ return None;
+ }
+ let ext = abs_path.extension().unwrap_or_default();
+ if dirs.extensions.iter().all(|it| it.as_str() != ext) {
+ return None;
+ }
+ Some(abs_path)
+ });
+
+ res.extend(files.map(|file| {
+ let contents = read(file.as_path());
+ (file, contents)
+ }));
+ }
+ res
+ }
+ }
+ }
+
+ fn watch(&mut self, path: AbsPathBuf) {
+ if let Some((watcher, _)) = &mut self.watcher {
+ log_notify_error(watcher.watch(path.as_ref(), RecursiveMode::NonRecursive));
+ }
+ }
+ fn send(&mut self, msg: loader::Message) {
+ (self.sender)(msg);
+ }
+}
+
+fn read(path: &AbsPath) -> Option<Vec<u8>> {
+ std::fs::read(path).ok()
+}
+
+fn log_notify_error<T>(res: notify::Result<T>) -> Option<T> {
+ res.map_err(|err| tracing::warn!("notify error: {}", err)).ok()
+}
diff --git a/src/tools/rust-analyzer/crates/vfs/Cargo.toml b/src/tools/rust-analyzer/crates/vfs/Cargo.toml
new file mode 100644
index 000000000..c63773487
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs/Cargo.toml
@@ -0,0 +1,17 @@
+[package]
+name = "vfs"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+rustc-hash = "1.1.0"
+fst = "0.4.7"
+
+paths = { path = "../paths", version = "0.0.0" }
+indexmap = "1.9.1"
diff --git a/src/tools/rust-analyzer/crates/vfs/src/anchored_path.rs b/src/tools/rust-analyzer/crates/vfs/src/anchored_path.rs
new file mode 100644
index 000000000..db15a2a21
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs/src/anchored_path.rs
@@ -0,0 +1,49 @@
+//! Analysis-level representation of file-system paths.
+//!
+//! The primary goal of this is to losslessly represent paths like
+//!
+//! ```
+//! #[path = "./bar.rs"]
+//! mod foo;
+//! ```
+//!
+//! The first approach one might reach for is to use `PathBuf`. The problem here
+//! is that `PathBuf` depends on host target (windows or linux), but
+//! rust-analyzer should be capable to process `#[path = r"C:\bar.rs"]` on Unix.
+//!
+//! The second try is to use a `String`. This also fails, however. Consider a
+//! hypothetical scenario, where rust-analyzer operates in a
+//! networked/distributed mode. There's one global instance of rust-analyzer,
+//! which processes requests from different machines. Now, the semantics of
+//! `#[path = "/abs/path.rs"]` actually depends on which file-system we are at!
+//! That is, even absolute paths exist relative to a file system!
+//!
+//! A more realistic scenario here is virtual VFS paths we use for testing. More
+//! generally, there can be separate "universes" of VFS paths.
+//!
+//! That's why we use anchored representation -- each path carries an info about
+//! a file this path originates from. We can fetch fs/"universe" information
+//! from the anchor than.
+use crate::FileId;
+
+/// Path relative to a file.
+///
+/// Owned version of [`AnchoredPath`].
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub struct AnchoredPathBuf {
+ /// File that this path is relative to.
+ pub anchor: FileId,
+ /// Path relative to `anchor`'s containing directory.
+ pub path: String,
+}
+
+/// Path relative to a file.
+///
+/// Borrowed version of [`AnchoredPathBuf`].
+#[derive(Clone, Copy, PartialEq, Eq, Debug)]
+pub struct AnchoredPath<'a> {
+ /// File that this path is relative to.
+ pub anchor: FileId,
+ /// Path relative to `anchor`'s containing directory.
+ pub path: &'a str,
+}
diff --git a/src/tools/rust-analyzer/crates/vfs/src/file_set.rs b/src/tools/rust-analyzer/crates/vfs/src/file_set.rs
new file mode 100644
index 000000000..6a89263e5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs/src/file_set.rs
@@ -0,0 +1,218 @@
+//! Partitions a list of files into disjoint subsets.
+//!
+//! Files which do not belong to any explicitly configured `FileSet` belong to
+//! the default `FileSet`.
+use std::fmt;
+
+use fst::{IntoStreamer, Streamer};
+use rustc_hash::FxHashMap;
+
+use crate::{AnchoredPath, FileId, Vfs, VfsPath};
+
+/// A set of [`VfsPath`]s identified by [`FileId`]s.
+#[derive(Default, Clone, Eq, PartialEq)]
+pub struct FileSet {
+ files: FxHashMap<VfsPath, FileId>,
+ paths: FxHashMap<FileId, VfsPath>,
+}
+
+impl FileSet {
+ /// Returns the number of stored paths.
+ pub fn len(&self) -> usize {
+ self.files.len()
+ }
+
+ /// Get the id of the file corresponding to `path`.
+ ///
+ /// If either `path`'s [`anchor`](AnchoredPath::anchor) or the resolved path is not in
+ /// the set, returns [`None`].
+ pub fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
+ let mut base = self.paths[&path.anchor].clone();
+ base.pop();
+ let path = base.join(path.path)?;
+ self.files.get(&path).copied()
+ }
+
+ /// Get the id corresponding to `path` if it exists in the set.
+ pub fn file_for_path(&self, path: &VfsPath) -> Option<&FileId> {
+ self.files.get(path)
+ }
+
+ /// Get the path corresponding to `file` if it exists in the set.
+ pub fn path_for_file(&self, file: &FileId) -> Option<&VfsPath> {
+ self.paths.get(file)
+ }
+
+ /// Insert the `file_id, path` pair into the set.
+ ///
+ /// # Note
+ /// Multiple [`FileId`] can be mapped to the same [`VfsPath`], and vice-versa.
+ pub fn insert(&mut self, file_id: FileId, path: VfsPath) {
+ self.files.insert(path.clone(), file_id);
+ self.paths.insert(file_id, path);
+ }
+
+ /// Iterate over this set's ids.
+ pub fn iter(&self) -> impl Iterator<Item = FileId> + '_ {
+ self.paths.keys().copied()
+ }
+}
+
+impl fmt::Debug for FileSet {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("FileSet").field("n_files", &self.files.len()).finish()
+ }
+}
+
+/// This contains path prefixes to partition a [`Vfs`] into [`FileSet`]s.
+///
+/// # Example
+/// ```rust
+/// # use vfs::{file_set::FileSetConfigBuilder, VfsPath, Vfs};
+/// let mut builder = FileSetConfigBuilder::default();
+/// builder.add_file_set(vec![VfsPath::new_virtual_path("/src".to_string())]);
+/// let config = builder.build();
+/// let mut file_system = Vfs::default();
+/// file_system.set_file_contents(VfsPath::new_virtual_path("/src/main.rs".to_string()), Some(vec![]));
+/// file_system.set_file_contents(VfsPath::new_virtual_path("/src/lib.rs".to_string()), Some(vec![]));
+/// file_system.set_file_contents(VfsPath::new_virtual_path("/build.rs".to_string()), Some(vec![]));
+/// // contains the sets :
+/// // { "/src/main.rs", "/src/lib.rs" }
+/// // { "build.rs" }
+/// let sets = config.partition(&file_system);
+/// ```
+#[derive(Debug)]
+pub struct FileSetConfig {
+ /// Number of sets that `self` can partition a [`Vfs`] into.
+ ///
+ /// This should be the number of sets in `self.map` + 1 for files that don't fit in any
+ /// defined set.
+ n_file_sets: usize,
+ /// Map from encoded paths to the set they belong to.
+ map: fst::Map<Vec<u8>>,
+}
+
+impl Default for FileSetConfig {
+ fn default() -> Self {
+ FileSetConfig::builder().build()
+ }
+}
+
+impl FileSetConfig {
+ /// Returns a builder for `FileSetConfig`.
+ pub fn builder() -> FileSetConfigBuilder {
+ FileSetConfigBuilder::default()
+ }
+
+ /// Partition `vfs` into `FileSet`s.
+ ///
+ /// Creates a new [`FileSet`] for every set of prefixes in `self`.
+ pub fn partition(&self, vfs: &Vfs) -> Vec<FileSet> {
+ let mut scratch_space = Vec::new();
+ let mut res = vec![FileSet::default(); self.len()];
+ for (file_id, path) in vfs.iter() {
+ let root = self.classify(path, &mut scratch_space);
+ res[root].insert(file_id, path.clone());
+ }
+ res
+ }
+
+ /// Number of sets that `self` can partition a [`Vfs`] into.
+ fn len(&self) -> usize {
+ self.n_file_sets
+ }
+
+ /// Returns the set index for the given `path`.
+ ///
+ /// `scratch_space` is used as a buffer and will be entirely replaced.
+ fn classify(&self, path: &VfsPath, scratch_space: &mut Vec<u8>) -> usize {
+ scratch_space.clear();
+ path.encode(scratch_space);
+ let automaton = PrefixOf::new(scratch_space.as_slice());
+ let mut longest_prefix = self.len() - 1;
+ let mut stream = self.map.search(automaton).into_stream();
+ while let Some((_, v)) = stream.next() {
+ longest_prefix = v as usize;
+ }
+ longest_prefix
+ }
+}
+
+/// Builder for [`FileSetConfig`].
+pub struct FileSetConfigBuilder {
+ roots: Vec<Vec<VfsPath>>,
+}
+
+impl Default for FileSetConfigBuilder {
+ fn default() -> Self {
+ FileSetConfigBuilder { roots: Vec::new() }
+ }
+}
+
+impl FileSetConfigBuilder {
+ /// Returns the number of sets currently held.
+ pub fn len(&self) -> usize {
+ self.roots.len()
+ }
+
+ /// Add a new set of paths prefixes.
+ pub fn add_file_set(&mut self, roots: Vec<VfsPath>) {
+ self.roots.push(roots);
+ }
+
+ /// Build the `FileSetConfig`.
+ pub fn build(self) -> FileSetConfig {
+ let n_file_sets = self.roots.len() + 1;
+ let map = {
+ let mut entries = Vec::new();
+ for (i, paths) in self.roots.into_iter().enumerate() {
+ for p in paths {
+ let mut buf = Vec::new();
+ p.encode(&mut buf);
+ entries.push((buf, i as u64));
+ }
+ }
+ entries.sort();
+ entries.dedup_by(|(a, _), (b, _)| a == b);
+ fst::Map::from_iter(entries).unwrap()
+ };
+ FileSetConfig { n_file_sets, map }
+ }
+}
+
+/// Implements [`fst::Automaton`]
+///
+/// It will match if `prefix_of` is a prefix of the given data.
+struct PrefixOf<'a> {
+ prefix_of: &'a [u8],
+}
+
+impl<'a> PrefixOf<'a> {
+ /// Creates a new `PrefixOf` from the given slice.
+ fn new(prefix_of: &'a [u8]) -> Self {
+ Self { prefix_of }
+ }
+}
+
+impl fst::Automaton for PrefixOf<'_> {
+ type State = usize;
+ fn start(&self) -> usize {
+ 0
+ }
+ fn is_match(&self, &state: &usize) -> bool {
+ state != !0
+ }
+ fn can_match(&self, &state: &usize) -> bool {
+ state != !0
+ }
+ fn accept(&self, &state: &usize, byte: u8) -> usize {
+ if self.prefix_of.get(state) == Some(&byte) {
+ state + 1
+ } else {
+ !0
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/src/tools/rust-analyzer/crates/vfs/src/file_set/tests.rs b/src/tools/rust-analyzer/crates/vfs/src/file_set/tests.rs
new file mode 100644
index 000000000..2146df185
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs/src/file_set/tests.rs
@@ -0,0 +1,42 @@
+use super::*;
+
+#[test]
+fn path_prefix() {
+ let mut file_set = FileSetConfig::builder();
+ file_set.add_file_set(vec![VfsPath::new_virtual_path("/foo".into())]);
+ file_set.add_file_set(vec![VfsPath::new_virtual_path("/foo/bar/baz".into())]);
+ let file_set = file_set.build();
+
+ let mut vfs = Vfs::default();
+ vfs.set_file_contents(VfsPath::new_virtual_path("/foo/src/lib.rs".into()), Some(Vec::new()));
+ vfs.set_file_contents(
+ VfsPath::new_virtual_path("/foo/src/bar/baz/lib.rs".into()),
+ Some(Vec::new()),
+ );
+ vfs.set_file_contents(
+ VfsPath::new_virtual_path("/foo/bar/baz/lib.rs".into()),
+ Some(Vec::new()),
+ );
+ vfs.set_file_contents(VfsPath::new_virtual_path("/quux/lib.rs".into()), Some(Vec::new()));
+
+ let partition = file_set.partition(&vfs).into_iter().map(|it| it.len()).collect::<Vec<_>>();
+ assert_eq!(partition, vec![2, 1, 1]);
+}
+
+#[test]
+fn name_prefix() {
+ let mut file_set = FileSetConfig::builder();
+ file_set.add_file_set(vec![VfsPath::new_virtual_path("/foo".into())]);
+ file_set.add_file_set(vec![VfsPath::new_virtual_path("/foo-things".into())]);
+ let file_set = file_set.build();
+
+ let mut vfs = Vfs::default();
+ vfs.set_file_contents(VfsPath::new_virtual_path("/foo/src/lib.rs".into()), Some(Vec::new()));
+ vfs.set_file_contents(
+ VfsPath::new_virtual_path("/foo-things/src/lib.rs".into()),
+ Some(Vec::new()),
+ );
+
+ let partition = file_set.partition(&vfs).into_iter().map(|it| it.len()).collect::<Vec<_>>();
+ assert_eq!(partition, vec![1, 1, 0]);
+}
diff --git a/src/tools/rust-analyzer/crates/vfs/src/lib.rs b/src/tools/rust-analyzer/crates/vfs/src/lib.rs
new file mode 100644
index 000000000..10fae41d0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs/src/lib.rs
@@ -0,0 +1,221 @@
+//! # Virtual File System
+//!
+//! VFS stores all files read by rust-analyzer. Reading file contents from VFS
+//! always returns the same contents, unless VFS was explicitly modified with
+//! [`set_file_contents`]. All changes to VFS are logged, and can be retrieved via
+//! [`take_changes`] method. The pack of changes is then pushed to `salsa` and
+//! triggers incremental recomputation.
+//!
+//! Files in VFS are identified with [`FileId`]s -- interned paths. The notion of
+//! the path, [`VfsPath`] is somewhat abstract: at the moment, it is represented
+//! as an [`std::path::PathBuf`] internally, but this is an implementation detail.
+//!
+//! VFS doesn't do IO or file watching itself. For that, see the [`loader`]
+//! module. [`loader::Handle`] is an object-safe trait which abstracts both file
+//! loading and file watching. [`Handle`] is dynamically configured with a set of
+//! directory entries which should be scanned and watched. [`Handle`] then
+//! asynchronously pushes file changes. Directory entries are configured in
+//! free-form via list of globs, it's up to the [`Handle`] to interpret the globs
+//! in any specific way.
+//!
+//! VFS stores a flat list of files. [`file_set::FileSet`] can partition this list
+//! of files into disjoint sets of files. Traversal-like operations (including
+//! getting the neighbor file by the relative path) are handled by the [`FileSet`].
+//! [`FileSet`]s are also pushed to salsa and cause it to re-check `mod foo;`
+//! declarations when files are created or deleted.
+//!
+//! [`FileSet`] and [`loader::Entry`] play similar, but different roles.
+//! Both specify the "set of paths/files", one is geared towards file watching,
+//! the other towards salsa changes. In particular, single [`FileSet`]
+//! may correspond to several [`loader::Entry`]. For example, a crate from
+//! crates.io which uses code generation would have two [`Entries`] -- for sources
+//! in `~/.cargo`, and for generated code in `./target/debug/build`. It will
+//! have a single [`FileSet`] which unions the two sources.
+//!
+//! [`set_file_contents`]: Vfs::set_file_contents
+//! [`take_changes`]: Vfs::take_changes
+//! [`FileSet`]: file_set::FileSet
+//! [`Handle`]: loader::Handle
+//! [`Entries`]: loader::Entry
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod anchored_path;
+pub mod file_set;
+pub mod loader;
+mod path_interner;
+mod vfs_path;
+
+use std::{fmt, mem};
+
+use crate::path_interner::PathInterner;
+
+pub use crate::{
+ anchored_path::{AnchoredPath, AnchoredPathBuf},
+ vfs_path::VfsPath,
+};
+pub use paths::{AbsPath, AbsPathBuf};
+
+/// Handle to a file in [`Vfs`]
+///
+/// Most functions in rust-analyzer use this when they need to refer to a file.
+#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
+pub struct FileId(pub u32);
+
+/// Storage for all files read by rust-analyzer.
+///
+/// For more informations see the [crate-level](crate) documentation.
+#[derive(Default)]
+pub struct Vfs {
+ interner: PathInterner,
+ data: Vec<Option<Vec<u8>>>,
+ changes: Vec<ChangedFile>,
+}
+
+/// Changed file in the [`Vfs`].
+pub struct ChangedFile {
+ /// Id of the changed file
+ pub file_id: FileId,
+ /// Kind of change
+ pub change_kind: ChangeKind,
+}
+
+impl ChangedFile {
+ /// Returns `true` if the change is not [`Delete`](ChangeKind::Delete).
+ pub fn exists(&self) -> bool {
+ self.change_kind != ChangeKind::Delete
+ }
+
+ /// Returns `true` if the change is [`Create`](ChangeKind::Create) or
+ /// [`Delete`](ChangeKind::Delete).
+ pub fn is_created_or_deleted(&self) -> bool {
+ matches!(self.change_kind, ChangeKind::Create | ChangeKind::Delete)
+ }
+}
+
+/// Kind of [file change](ChangedFile).
+#[derive(Eq, PartialEq, Copy, Clone, Debug)]
+pub enum ChangeKind {
+ /// The file was (re-)created
+ Create,
+ /// The file was modified
+ Modify,
+ /// The file was deleted
+ Delete,
+}
+
+impl Vfs {
+ /// Amount of files currently stored.
+ ///
+ /// Note that this includes deleted files.
+ pub fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ /// Id of the given path if it exists in the `Vfs` and is not deleted.
+ pub fn file_id(&self, path: &VfsPath) -> Option<FileId> {
+ self.interner.get(path).filter(|&it| self.get(it).is_some())
+ }
+
+ /// File path corresponding to the given `file_id`.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the id is not present in the `Vfs`.
+ pub fn file_path(&self, file_id: FileId) -> VfsPath {
+ self.interner.lookup(file_id).clone()
+ }
+
+ /// File content corresponding to the given `file_id`.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the id is not present in the `Vfs`, or if the corresponding file is
+ /// deleted.
+ pub fn file_contents(&self, file_id: FileId) -> &[u8] {
+ self.get(file_id).as_deref().unwrap()
+ }
+
+ /// Returns an iterator over the stored ids and their corresponding paths.
+ ///
+ /// This will skip deleted files.
+ pub fn iter(&self) -> impl Iterator<Item = (FileId, &VfsPath)> + '_ {
+ (0..self.data.len())
+ .map(|it| FileId(it as u32))
+ .filter(move |&file_id| self.get(file_id).is_some())
+ .map(move |file_id| {
+ let path = self.interner.lookup(file_id);
+ (file_id, path)
+ })
+ }
+
+ /// Update the `path` with the given `contents`. `None` means the file was deleted.
+ ///
+ /// Returns `true` if the file was modified, and saves the [change](ChangedFile).
+ ///
+ /// If the path does not currently exists in the `Vfs`, allocates a new
+ /// [`FileId`] for it.
+ pub fn set_file_contents(&mut self, path: VfsPath, contents: Option<Vec<u8>>) -> bool {
+ let file_id = self.alloc_file_id(path);
+ let change_kind = match (&self.get(file_id), &contents) {
+ (None, None) => return false,
+ (None, Some(_)) => ChangeKind::Create,
+ (Some(_), None) => ChangeKind::Delete,
+ (Some(old), Some(new)) if old == new => return false,
+ (Some(_), Some(_)) => ChangeKind::Modify,
+ };
+
+ *self.get_mut(file_id) = contents;
+ self.changes.push(ChangedFile { file_id, change_kind });
+ true
+ }
+
+ /// Returns `true` if the `Vfs` contains [changes](ChangedFile).
+ pub fn has_changes(&self) -> bool {
+ !self.changes.is_empty()
+ }
+
+ /// Drain and returns all the changes in the `Vfs`.
+ pub fn take_changes(&mut self) -> Vec<ChangedFile> {
+ mem::take(&mut self.changes)
+ }
+
+ /// Returns the id associated with `path`
+ ///
+ /// - If `path` does not exists in the `Vfs`, allocate a new id for it, associated with a
+ /// deleted file;
+ /// - Else, returns `path`'s id.
+ ///
+ /// Does not record a change.
+ fn alloc_file_id(&mut self, path: VfsPath) -> FileId {
+ let file_id = self.interner.intern(path);
+ let idx = file_id.0 as usize;
+ let len = self.data.len().max(idx + 1);
+ self.data.resize_with(len, || None);
+ file_id
+ }
+
+ /// Returns the content associated with the given `file_id`.
+ ///
+ /// # Panics
+ ///
+ /// Panics if no file is associated to that id.
+ fn get(&self, file_id: FileId) -> &Option<Vec<u8>> {
+ &self.data[file_id.0 as usize]
+ }
+
+ /// Mutably returns the content associated with the given `file_id`.
+ ///
+ /// # Panics
+ ///
+ /// Panics if no file is associated to that id.
+ fn get_mut(&mut self, file_id: FileId) -> &mut Option<Vec<u8>> {
+ &mut self.data[file_id.0 as usize]
+ }
+}
+
+impl fmt::Debug for Vfs {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Vfs").field("n_files", &self.data.len()).finish()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/vfs/src/loader.rs b/src/tools/rust-analyzer/crates/vfs/src/loader.rs
new file mode 100644
index 000000000..e2d74782a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs/src/loader.rs
@@ -0,0 +1,215 @@
+//! Object safe interface for file watching and reading.
+use std::fmt;
+
+use paths::{AbsPath, AbsPathBuf};
+
+/// A set of files on the file system.
+#[derive(Debug, Clone)]
+pub enum Entry {
+ /// The `Entry` is represented by a raw set of files.
+ Files(Vec<AbsPathBuf>),
+ /// The `Entry` is represented by `Directories`.
+ Directories(Directories),
+}
+
+/// Specifies a set of files on the file system.
+///
+/// A file is included if:
+/// * it has included extension
+/// * it is under an `include` path
+/// * it is not under `exclude` path
+///
+/// If many include/exclude paths match, the longest one wins.
+///
+/// If a path is in both `include` and `exclude`, the `exclude` one wins.
+#[derive(Debug, Clone, Default)]
+pub struct Directories {
+ pub extensions: Vec<String>,
+ pub include: Vec<AbsPathBuf>,
+ pub exclude: Vec<AbsPathBuf>,
+}
+
+/// [`Handle`]'s configuration.
+#[derive(Debug)]
+pub struct Config {
+ /// Version number to associate progress updates to the right config
+ /// version.
+ pub version: u32,
+ /// Set of initially loaded files.
+ pub load: Vec<Entry>,
+ /// Index of watched entries in `load`.
+ ///
+ /// If a path in a watched entry is modified,the [`Handle`] should notify it.
+ pub watch: Vec<usize>,
+}
+
+/// Message about an action taken by a [`Handle`].
+pub enum Message {
+ /// Indicate a gradual progress.
+ ///
+ /// This is supposed to be the number of loaded files.
+ Progress { n_total: usize, n_done: usize, config_version: u32 },
+ /// The handle loaded the following files' content.
+ Loaded { files: Vec<(AbsPathBuf, Option<Vec<u8>>)> },
+}
+
+/// Type that will receive [`Messages`](Message) from a [`Handle`].
+pub type Sender = Box<dyn Fn(Message) + Send>;
+
+/// Interface for reading and watching files.
+pub trait Handle: fmt::Debug {
+ /// Spawn a new handle with the given `sender`.
+ fn spawn(sender: Sender) -> Self
+ where
+ Self: Sized;
+
+ /// Set this handle's configuration.
+ fn set_config(&mut self, config: Config);
+
+ /// The file's content at `path` has been modified, and should be reloaded.
+ fn invalidate(&mut self, path: AbsPathBuf);
+
+ /// Load the content of the given file, returning [`None`] if it does not
+ /// exists.
+ fn load_sync(&mut self, path: &AbsPath) -> Option<Vec<u8>>;
+}
+
+impl Entry {
+ /// Returns:
+ /// ```text
+ /// Entry::Directories(Directories {
+ /// extensions: ["rs"],
+ /// include: [base],
+ /// exclude: [base/.git],
+ /// })
+ /// ```
+ pub fn rs_files_recursively(base: AbsPathBuf) -> Entry {
+ Entry::Directories(dirs(base, &[".git"]))
+ }
+
+ /// Returns:
+ /// ```text
+ /// Entry::Directories(Directories {
+ /// extensions: ["rs"],
+ /// include: [base],
+ /// exclude: [base/.git, base/target],
+ /// })
+ /// ```
+ pub fn local_cargo_package(base: AbsPathBuf) -> Entry {
+ Entry::Directories(dirs(base, &[".git", "target"]))
+ }
+
+ /// Returns:
+ /// ```text
+ /// Entry::Directories(Directories {
+ /// extensions: ["rs"],
+ /// include: [base],
+ /// exclude: [base/.git, /tests, /examples, /benches],
+ /// })
+ /// ```
+ pub fn cargo_package_dependency(base: AbsPathBuf) -> Entry {
+ Entry::Directories(dirs(base, &[".git", "/tests", "/examples", "/benches"]))
+ }
+
+ /// Returns `true` if `path` is included in `self`.
+ ///
+ /// See [`Directories::contains_file`].
+ pub fn contains_file(&self, path: &AbsPath) -> bool {
+ match self {
+ Entry::Files(files) => files.iter().any(|it| it == path),
+ Entry::Directories(dirs) => dirs.contains_file(path),
+ }
+ }
+
+ /// Returns `true` if `path` is included in `self`.
+ ///
+ /// - If `self` is `Entry::Files`, returns `false`
+ /// - Else, see [`Directories::contains_dir`].
+ pub fn contains_dir(&self, path: &AbsPath) -> bool {
+ match self {
+ Entry::Files(_) => false,
+ Entry::Directories(dirs) => dirs.contains_dir(path),
+ }
+ }
+}
+
+impl Directories {
+ /// Returns `true` if `path` is included in `self`.
+ pub fn contains_file(&self, path: &AbsPath) -> bool {
+ // First, check the file extension...
+ let ext = path.extension().unwrap_or_default();
+ if self.extensions.iter().all(|it| it.as_str() != ext) {
+ return false;
+ }
+
+ // Then, check for path inclusion...
+ self.includes_path(path)
+ }
+
+ /// Returns `true` if `path` is included in `self`.
+ ///
+ /// Since `path` is supposed to be a directory, this will not take extension
+ /// into account.
+ pub fn contains_dir(&self, path: &AbsPath) -> bool {
+ self.includes_path(path)
+ }
+
+ /// Returns `true` if `path` is included in `self`.
+ ///
+ /// It is included if
+ /// - An element in `self.include` is a prefix of `path`.
+ /// - This path is longer than any element in `self.exclude` that is a prefix
+ /// of `path`. In case of equality, exclusion wins.
+ fn includes_path(&self, path: &AbsPath) -> bool {
+ let mut include: Option<&AbsPathBuf> = None;
+ for incl in &self.include {
+ if path.starts_with(incl) {
+ include = Some(match include {
+ Some(prev) if prev.starts_with(incl) => prev,
+ _ => incl,
+ });
+ }
+ }
+
+ let include = match include {
+ Some(it) => it,
+ None => return false,
+ };
+
+ !self.exclude.iter().any(|excl| path.starts_with(excl) && excl.starts_with(include))
+ }
+}
+
+/// Returns :
+/// ```text
+/// Directories {
+/// extensions: ["rs"],
+/// include: [base],
+/// exclude: [base/<exclude>],
+/// }
+/// ```
+fn dirs(base: AbsPathBuf, exclude: &[&str]) -> Directories {
+ let exclude = exclude.iter().map(|it| base.join(it)).collect::<Vec<_>>();
+ Directories { extensions: vec!["rs".to_string()], include: vec![base], exclude }
+}
+
+impl fmt::Debug for Message {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ Message::Loaded { files } => {
+ f.debug_struct("Loaded").field("n_files", &files.len()).finish()
+ }
+ Message::Progress { n_total, n_done, config_version } => f
+ .debug_struct("Progress")
+ .field("n_total", n_total)
+ .field("n_done", n_done)
+ .field("config_version", config_version)
+ .finish(),
+ }
+ }
+}
+
+#[test]
+fn handle_is_object_safe() {
+ fn _assert(_: &dyn Handle) {}
+}
diff --git a/src/tools/rust-analyzer/crates/vfs/src/path_interner.rs b/src/tools/rust-analyzer/crates/vfs/src/path_interner.rs
new file mode 100644
index 000000000..6e049f0d4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs/src/path_interner.rs
@@ -0,0 +1,48 @@
+//! Maps paths to compact integer ids. We don't care about clearings paths which
+//! no longer exist -- the assumption is total size of paths we ever look at is
+//! not too big.
+use std::hash::BuildHasherDefault;
+
+use indexmap::IndexSet;
+use rustc_hash::FxHasher;
+
+use crate::{FileId, VfsPath};
+
+/// Structure to map between [`VfsPath`] and [`FileId`].
+pub(crate) struct PathInterner {
+ map: IndexSet<VfsPath, BuildHasherDefault<FxHasher>>,
+}
+
+impl Default for PathInterner {
+ fn default() -> Self {
+ Self { map: IndexSet::default() }
+ }
+}
+
+impl PathInterner {
+ /// Get the id corresponding to `path`.
+ ///
+ /// If `path` does not exists in `self`, returns [`None`].
+ pub(crate) fn get(&self, path: &VfsPath) -> Option<FileId> {
+ self.map.get_index_of(path).map(|i| FileId(i as u32))
+ }
+
+ /// Insert `path` in `self`.
+ ///
+ /// - If `path` already exists in `self`, returns its associated id;
+ /// - Else, returns a newly allocated id.
+ pub(crate) fn intern(&mut self, path: VfsPath) -> FileId {
+ let (id, _added) = self.map.insert_full(path);
+ assert!(id < u32::MAX as usize);
+ FileId(id as u32)
+ }
+
+ /// Returns the path corresponding to `id`.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `id` does not exists in `self`.
+ pub(crate) fn lookup(&self, id: FileId) -> &VfsPath {
+ self.map.get_index(id.0 as usize).unwrap()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs b/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs
new file mode 100644
index 000000000..668c7320d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs
@@ -0,0 +1,406 @@
+//! Abstract-ish representation of paths for VFS.
+use std::fmt;
+
+use paths::{AbsPath, AbsPathBuf};
+
+/// Path in [`Vfs`].
+///
+/// Long-term, we want to support files which do not reside in the file-system,
+/// so we treat `VfsPath`s as opaque identifiers.
+///
+/// [`Vfs`]: crate::Vfs
+#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
+pub struct VfsPath(VfsPathRepr);
+
+impl VfsPath {
+ /// Creates an "in-memory" path from `/`-separated string.
+ ///
+ /// This is most useful for testing, to avoid windows/linux differences
+ ///
+ /// # Panics
+ ///
+ /// Panics if `path` does not start with `'/'`.
+ pub fn new_virtual_path(path: String) -> VfsPath {
+ assert!(path.starts_with('/'));
+ VfsPath(VfsPathRepr::VirtualPath(VirtualPath(path)))
+ }
+
+ /// Create a path from string. Input should be a string representation of
+ /// an absolute path inside filesystem
+ pub fn new_real_path(path: String) -> VfsPath {
+ VfsPath::from(AbsPathBuf::assert(path.into()))
+ }
+
+ /// Returns the `AbsPath` representation of `self` if `self` is on the file system.
+ pub fn as_path(&self) -> Option<&AbsPath> {
+ match &self.0 {
+ VfsPathRepr::PathBuf(it) => Some(it.as_path()),
+ VfsPathRepr::VirtualPath(_) => None,
+ }
+ }
+
+ /// Creates a new `VfsPath` with `path` adjoined to `self`.
+ pub fn join(&self, path: &str) -> Option<VfsPath> {
+ match &self.0 {
+ VfsPathRepr::PathBuf(it) => {
+ let res = it.join(path).normalize();
+ Some(VfsPath(VfsPathRepr::PathBuf(res)))
+ }
+ VfsPathRepr::VirtualPath(it) => {
+ let res = it.join(path)?;
+ Some(VfsPath(VfsPathRepr::VirtualPath(res)))
+ }
+ }
+ }
+
+ /// Remove the last component of `self` if there is one.
+ ///
+ /// If `self` has no component, returns `false`; else returns `true`.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// # use vfs::{AbsPathBuf, VfsPath};
+ /// let mut path = VfsPath::from(AbsPathBuf::assert("/foo/bar".into()));
+ /// assert!(path.pop());
+ /// assert_eq!(path, VfsPath::from(AbsPathBuf::assert("/foo".into())));
+ /// assert!(path.pop());
+ /// assert_eq!(path, VfsPath::from(AbsPathBuf::assert("/".into())));
+ /// assert!(!path.pop());
+ /// ```
+ pub fn pop(&mut self) -> bool {
+ match &mut self.0 {
+ VfsPathRepr::PathBuf(it) => it.pop(),
+ VfsPathRepr::VirtualPath(it) => it.pop(),
+ }
+ }
+
+ /// Returns `true` if `other` is a prefix of `self`.
+ pub fn starts_with(&self, other: &VfsPath) -> bool {
+ match (&self.0, &other.0) {
+ (VfsPathRepr::PathBuf(lhs), VfsPathRepr::PathBuf(rhs)) => lhs.starts_with(rhs),
+ (VfsPathRepr::VirtualPath(lhs), VfsPathRepr::VirtualPath(rhs)) => lhs.starts_with(rhs),
+ (VfsPathRepr::PathBuf(_) | VfsPathRepr::VirtualPath(_), _) => false,
+ }
+ }
+
+ /// Returns the `VfsPath` without its final component, if there is one.
+ ///
+ /// Returns [`None`] if the path is a root or prefix.
+ pub fn parent(&self) -> Option<VfsPath> {
+ let mut parent = self.clone();
+ if parent.pop() {
+ Some(parent)
+ } else {
+ None
+ }
+ }
+
+ /// Returns `self`'s base name and file extension.
+ pub fn name_and_extension(&self) -> Option<(&str, Option<&str>)> {
+ match &self.0 {
+ VfsPathRepr::PathBuf(p) => Some((
+ p.file_stem()?.to_str()?,
+ p.extension().and_then(|extension| extension.to_str()),
+ )),
+ VfsPathRepr::VirtualPath(p) => p.name_and_extension(),
+ }
+ }
+
+ /// **Don't make this `pub`**
+ ///
+ /// Encode the path in the given buffer.
+ ///
+ /// The encoding will be `0` if [`AbsPathBuf`], `1` if [`VirtualPath`], followed
+ /// by `self`'s representation.
+ ///
+ /// Note that this encoding is dependent on the operating system.
+ pub(crate) fn encode(&self, buf: &mut Vec<u8>) {
+ let tag = match &self.0 {
+ VfsPathRepr::PathBuf(_) => 0,
+ VfsPathRepr::VirtualPath(_) => 1,
+ };
+ buf.push(tag);
+ match &self.0 {
+ VfsPathRepr::PathBuf(path) => {
+ #[cfg(windows)]
+ {
+ use windows_paths::Encode;
+ let path: &std::path::Path = path.as_ref();
+ let components = path.components();
+ let mut add_sep = false;
+ for component in components {
+ if add_sep {
+ windows_paths::SEP.encode(buf);
+ }
+ let len_before = buf.len();
+ match component {
+ std::path::Component::Prefix(prefix) => {
+ // kind() returns a normalized and comparable path prefix.
+ prefix.kind().encode(buf);
+ }
+ std::path::Component::RootDir => {
+ if !add_sep {
+ component.as_os_str().encode(buf);
+ }
+ }
+ _ => component.as_os_str().encode(buf),
+ }
+
+ // some components may be encoded empty
+ add_sep = len_before != buf.len();
+ }
+ }
+ #[cfg(unix)]
+ {
+ use std::os::unix::ffi::OsStrExt;
+ buf.extend(path.as_os_str().as_bytes());
+ }
+ #[cfg(not(any(windows, unix)))]
+ {
+ buf.extend(path.as_os_str().to_string_lossy().as_bytes());
+ }
+ }
+ VfsPathRepr::VirtualPath(VirtualPath(s)) => buf.extend(s.as_bytes()),
+ }
+ }
+}
+
+#[cfg(windows)]
+mod windows_paths {
+ pub(crate) trait Encode {
+ fn encode(&self, buf: &mut Vec<u8>);
+ }
+
+ impl Encode for std::ffi::OsStr {
+ fn encode(&self, buf: &mut Vec<u8>) {
+ use std::os::windows::ffi::OsStrExt;
+ for wchar in self.encode_wide() {
+ buf.extend(wchar.to_le_bytes().iter().copied());
+ }
+ }
+ }
+
+ impl Encode for u8 {
+ fn encode(&self, buf: &mut Vec<u8>) {
+ let wide = *self as u16;
+ buf.extend(wide.to_le_bytes().iter().copied())
+ }
+ }
+
+ impl Encode for &str {
+ fn encode(&self, buf: &mut Vec<u8>) {
+ debug_assert!(self.is_ascii());
+ for b in self.as_bytes() {
+ b.encode(buf)
+ }
+ }
+ }
+
+ pub(crate) const SEP: &str = "\\";
+ const VERBATIM: &str = "\\\\?\\";
+ const UNC: &str = "UNC";
+ const DEVICE: &str = "\\\\.\\";
+ const COLON: &str = ":";
+
+ impl Encode for std::path::Prefix<'_> {
+ fn encode(&self, buf: &mut Vec<u8>) {
+ match self {
+ std::path::Prefix::Verbatim(c) => {
+ VERBATIM.encode(buf);
+ c.encode(buf);
+ }
+ std::path::Prefix::VerbatimUNC(server, share) => {
+ VERBATIM.encode(buf);
+ UNC.encode(buf);
+ SEP.encode(buf);
+ server.encode(buf);
+ SEP.encode(buf);
+ share.encode(buf);
+ }
+ std::path::Prefix::VerbatimDisk(d) => {
+ VERBATIM.encode(buf);
+ d.encode(buf);
+ COLON.encode(buf);
+ }
+ std::path::Prefix::DeviceNS(device) => {
+ DEVICE.encode(buf);
+ device.encode(buf);
+ }
+ std::path::Prefix::UNC(server, share) => {
+ SEP.encode(buf);
+ SEP.encode(buf);
+ server.encode(buf);
+ SEP.encode(buf);
+ share.encode(buf);
+ }
+ std::path::Prefix::Disk(d) => {
+ d.encode(buf);
+ COLON.encode(buf);
+ }
+ }
+ }
+ }
+ #[test]
+ fn paths_encoding() {
+ // drive letter casing agnostic
+ test_eq("C:/x.rs", "c:/x.rs");
+ // separator agnostic
+ test_eq("C:/x/y.rs", "C:\\x\\y.rs");
+
+ fn test_eq(a: &str, b: &str) {
+ let mut b1 = Vec::new();
+ let mut b2 = Vec::new();
+ vfs(a).encode(&mut b1);
+ vfs(b).encode(&mut b2);
+ assert_eq!(b1, b2);
+ }
+ }
+
+ #[test]
+ fn test_sep_root_dir_encoding() {
+ let mut buf = Vec::new();
+ vfs("C:/x/y").encode(&mut buf);
+ assert_eq!(&buf, &[0, 67, 0, 58, 0, 92, 0, 120, 0, 92, 0, 121, 0])
+ }
+
+ #[cfg(test)]
+ fn vfs(str: &str) -> super::VfsPath {
+ use super::{AbsPathBuf, VfsPath};
+ VfsPath::from(AbsPathBuf::try_from(str).unwrap())
+ }
+}
+
+/// Internal, private representation of [`VfsPath`].
+#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
+enum VfsPathRepr {
+ PathBuf(AbsPathBuf),
+ VirtualPath(VirtualPath),
+}
+
+impl From<AbsPathBuf> for VfsPath {
+ fn from(v: AbsPathBuf) -> Self {
+ VfsPath(VfsPathRepr::PathBuf(v.normalize()))
+ }
+}
+
+impl fmt::Display for VfsPath {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match &self.0 {
+ VfsPathRepr::PathBuf(it) => fmt::Display::fmt(&it.display(), f),
+ VfsPathRepr::VirtualPath(VirtualPath(it)) => fmt::Display::fmt(it, f),
+ }
+ }
+}
+
+impl fmt::Debug for VfsPath {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(&self.0, f)
+ }
+}
+
+impl fmt::Debug for VfsPathRepr {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match &self {
+ VfsPathRepr::PathBuf(it) => fmt::Debug::fmt(&it.display(), f),
+ VfsPathRepr::VirtualPath(VirtualPath(it)) => fmt::Debug::fmt(&it, f),
+ }
+ }
+}
+
+/// `/`-separated virtual path.
+///
+/// This is used to describe files that do not reside on the file system.
+#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
+struct VirtualPath(String);
+
+impl VirtualPath {
+ /// Returns `true` if `other` is a prefix of `self` (as strings).
+ fn starts_with(&self, other: &VirtualPath) -> bool {
+ self.0.starts_with(&other.0)
+ }
+
+ /// Remove the last component of `self`.
+ ///
+ /// This will find the last `'/'` in `self`, and remove everything after it,
+ /// including the `'/'`.
+ ///
+ /// If `self` contains no `'/'`, returns `false`; else returns `true`.
+ ///
+ /// # Example
+ ///
+ /// ```rust,ignore
+ /// let mut path = VirtualPath("/foo/bar".to_string());
+ /// path.pop();
+ /// assert_eq!(path.0, "/foo");
+ /// path.pop();
+ /// assert_eq!(path.0, "");
+ /// ```
+ fn pop(&mut self) -> bool {
+ let pos = match self.0.rfind('/') {
+ Some(pos) => pos,
+ None => return false,
+ };
+ self.0 = self.0[..pos].to_string();
+ true
+ }
+
+ /// Append the given *relative* path `path` to `self`.
+ ///
+ /// This will resolve any leading `"../"` in `path` before appending it.
+ ///
+ /// Returns [`None`] if `path` has more leading `"../"` than the number of
+ /// components in `self`.
+ ///
+ /// # Notes
+ ///
+ /// In practice, appending here means `self/path` as strings.
+ fn join(&self, mut path: &str) -> Option<VirtualPath> {
+ let mut res = self.clone();
+ while path.starts_with("../") {
+ if !res.pop() {
+ return None;
+ }
+ path = &path["../".len()..];
+ }
+ path = path.trim_start_matches("./");
+ res.0 = format!("{}/{}", res.0, path);
+ Some(res)
+ }
+
+ /// Returns `self`'s base name and file extension.
+ ///
+ /// # Returns
+ /// - `None` if `self` ends with `"//"`.
+ /// - `Some((name, None))` if `self`'s base contains no `.`, or only one `.` at
+ /// the start.
+ /// - `Some((name, Some(extension))` else.
+ ///
+ /// # Note
+ /// The extension will not contains `.`. This means `"/foo/bar.baz.rs"` will
+ /// return `Some(("bar.baz", Some("rs"))`.
+ fn name_and_extension(&self) -> Option<(&str, Option<&str>)> {
+ let file_path = if self.0.ends_with('/') { &self.0[..&self.0.len() - 1] } else { &self.0 };
+ let file_name = match file_path.rfind('/') {
+ Some(position) => &file_path[position + 1..],
+ None => file_path,
+ };
+
+ if file_name.is_empty() {
+ None
+ } else {
+ let mut file_stem_and_extension = file_name.rsplitn(2, '.');
+ let extension = file_stem_and_extension.next();
+ let file_stem = file_stem_and_extension.next();
+
+ match (file_stem, extension) {
+ (None, None) => None,
+ (None | Some(""), Some(_)) => Some((file_name, None)),
+ (Some(file_stem), extension) => Some((file_stem, extension)),
+ }
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/src/tools/rust-analyzer/crates/vfs/src/vfs_path/tests.rs b/src/tools/rust-analyzer/crates/vfs/src/vfs_path/tests.rs
new file mode 100644
index 000000000..510e021e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/vfs/src/vfs_path/tests.rs
@@ -0,0 +1,30 @@
+use super::*;
+
+#[test]
+fn virtual_path_extensions() {
+ assert_eq!(VirtualPath("/".to_string()).name_and_extension(), None);
+ assert_eq!(
+ VirtualPath("/directory".to_string()).name_and_extension(),
+ Some(("directory", None))
+ );
+ assert_eq!(
+ VirtualPath("/directory/".to_string()).name_and_extension(),
+ Some(("directory", None))
+ );
+ assert_eq!(
+ VirtualPath("/directory/file".to_string()).name_and_extension(),
+ Some(("file", None))
+ );
+ assert_eq!(
+ VirtualPath("/directory/.file".to_string()).name_and_extension(),
+ Some((".file", None))
+ );
+ assert_eq!(
+ VirtualPath("/directory/.file.rs".to_string()).name_and_extension(),
+ Some((".file", Some("rs")))
+ );
+ assert_eq!(
+ VirtualPath("/directory/file.rs".to_string()).name_and_extension(),
+ Some(("file", Some("rs")))
+ );
+}
diff --git a/src/tools/rust-analyzer/docs/dev/README.md b/src/tools/rust-analyzer/docs/dev/README.md
new file mode 100644
index 000000000..76bbd1e91
--- /dev/null
+++ b/src/tools/rust-analyzer/docs/dev/README.md
@@ -0,0 +1,266 @@
+# Contributing Quick Start
+
+rust-analyzer is an ordinary Rust project, which is organized as a Cargo workspace, builds on stable and doesn't depend on C libraries.
+So, just
+
+```
+$ cargo test
+```
+
+should be enough to get you started!
+
+To learn more about how rust-analyzer works, see [./architecture.md](./architecture.md).
+It also explains the high-level layout of the source code.
+Do skim through that document.
+
+We also publish rustdoc docs to pages: https://rust-lang.github.io/rust-analyzer/ide/.
+Note though, that the internal documentation is very incomplete.
+
+Various organizational and process issues are discussed in this document.
+
+# Getting in Touch
+
+rust-analyzer is a part of the [RLS-2.0 working
+group](https://github.com/rust-lang/compiler-team/tree/6a769c13656c0a6959ebc09e7b1f7c09b86fb9c0/working-groups/rls-2.0).
+Discussion happens in this Zulip stream:
+
+https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer
+
+# Issue Labels
+
+* [good-first-issue](https://github.com/rust-lang/rust-analyzer/labels/good%20first%20issue)
+ are good issues to get into the project.
+* [E-has-instructions](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AE-has-instructions)
+ issues have links to the code in question and tests.
+* [Broken Window](https://github.com/rust-lang/rust-analyzer/issues?q=is:issue+is:open+label:%22Broken+Window%22)
+ are issues which are not necessarily critical by themselves, but which should be fixed ASAP regardless, to avoid accumulation of technical debt.
+* [E-easy](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AE-easy),
+ [E-medium](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AE-medium),
+ [E-hard](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AE-hard),
+ [E-unknown](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AE-unknown),
+ labels are *estimates* for how hard would be to write a fix. Each triaged issue should have one of these labels.
+* [S-actionable](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AS-actionable) and
+ [S-unactionable](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AS-unactionable)
+ specify if there are concrete steps to resolve or advance an issue. Roughly, actionable issues need only work to be fixed,
+ while unactionable ones are blocked either on user feedback (providing a reproducible example), or on larger architectural
+ work or decisions. This classification is descriptive, not prescriptive, and might be wrong: Any unactionable issue might have a simple fix that we missed.
+ Each triaged issue should have one of these labels.
+* [fun](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3Afun)
+ is for cool, but probably hard stuff.
+* [Design](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%Design)
+ is for moderate/large scale architecture discussion.
+ Also a kind of fun.
+ These issues should generally include a link to a Zulip discussion thread.
+
+# Code Style & Review Process
+
+Do see [./style.md](./style.md).
+
+# Cookbook
+
+## CI
+
+We use GitHub Actions for CI.
+Most of the things, including formatting, are checked by `cargo test`.
+If `cargo test` passes locally, that's a good sign that CI will be green as well.
+The only exception is that some long-running tests are skipped locally by default.
+Use `env RUN_SLOW_TESTS=1 cargo test` to run the full suite.
+
+We use bors to enforce the [not rocket science](https://graydon2.dreamwidth.org/1597.html) rule.
+
+## Launching rust-analyzer
+
+Debugging the language server can be tricky.
+LSP is rather chatty, so driving it from the command line is not really feasible, driving it via VS Code requires interacting with two processes.
+
+For this reason, the best way to see how rust-analyzer works is to **find a relevant test and execute it**.
+VS Code & Emacs include an action for running a single test.
+
+Launching a VS Code instance with a locally built language server is also possible.
+There's **"Run Extension (Debug Build)"** launch configuration for this in VS Code.
+
+In general, I use one of the following workflows for fixing bugs and implementing features:
+
+If the problem concerns only internal parts of rust-analyzer (i.e. I don't need to touch the `rust-analyzer` crate or TypeScript code), there is a unit-test for it.
+So, I use **Rust Analyzer: Run** action in VS Code to run this single test, and then just do printf-driven development/debugging.
+As a sanity check after I'm done, I use `cargo xtask install --server` and **Reload Window** action in VS Code to verify that the thing works as I expect.
+
+If the problem concerns only the VS Code extension, I use **Run Installed Extension** launch configuration from `launch.json`.
+Notably, this uses the usual `rust-analyzer` binary from `PATH`.
+For this, it is important to have the following in your `settings.json` file:
+```json
+{
+ "rust-analyzer.server.path": "rust-analyzer"
+}
+```
+After I am done with the fix, I use `cargo xtask install --client` to try the new extension for real.
+
+If I need to fix something in the `rust-analyzer` crate, I feel sad because it's on the boundary between the two processes, and working there is slow.
+I usually just `cargo xtask install --server` and poke changes from my live environment.
+Note that this uses `--release`, which is usually faster overall, because loading stdlib into debug version of rust-analyzer takes a lot of time.
+To speed things up, sometimes I open a temporary hello-world project which has `"rust-analyzer.cargo.noSysroot": true` in `.code/settings.json`.
+This flag causes rust-analyzer to skip loading the sysroot, which greatly reduces the amount of things rust-analyzer needs to do, and makes printf's more useful.
+Note that you should only use the `eprint!` family of macros for debugging: stdout is used for LSP communication, and `print!` would break it.
+
+If I need to fix something simultaneously in the server and in the client, I feel even more sad.
+I don't have a specific workflow for this case.
+
+Additionally, I use `cargo run --release -p rust-analyzer -- analysis-stats path/to/some/rust/crate` to run a batch analysis.
+This is primarily useful for performance optimizations, or for bug minimization.
+
+## TypeScript Tests
+
+If you change files under `editors/code` and would like to run the tests and linter, install npm and run:
+
+```bash
+cd editors/code
+npm ci
+npm run lint
+```
+## How to ...
+
+* ... add an assist? [#7535](https://github.com/rust-lang/rust-analyzer/pull/7535)
+* ... add a new protocol extension? [#4569](https://github.com/rust-lang/rust-analyzer/pull/4569)
+* ... add a new configuration option? [#7451](https://github.com/rust-lang/rust-analyzer/pull/7451)
+* ... add a new completion? [#6964](https://github.com/rust-lang/rust-analyzer/pull/6964)
+* ... allow new syntax in the parser? [#7338](https://github.com/rust-lang/rust-analyzer/pull/7338)
+
+## Logging
+
+Logging is done by both rust-analyzer and VS Code, so it might be tricky to figure out where logs go.
+
+Inside rust-analyzer, we use the [`tracing`](https://docs.rs/tracing/) crate for logging,
+and [`tracing-subscriber`](https://docs.rs/tracing-subscriber) for logging frontend.
+By default, log goes to stderr, but the stderr itself is processed by VS Code.
+`--log-file <PATH>` CLI argument allows logging to file.
+Setting the `RA_LOG_FILE=<PATH>` environment variable will also log to file, it will also override `--log-file`.
+
+To see stderr in the running VS Code instance, go to the "Output" tab of the panel and select `rust-analyzer`.
+This shows `eprintln!` as well.
+Note that `stdout` is used for the actual protocol, so `println!` will break things.
+
+To log all communication between the server and the client, there are two choices:
+
+* You can log on the server side, by running something like
+ ```
+ env RA_LOG=lsp_server=debug code .
+ ```
+* You can log on the client side, by enabling `"rust-analyzer.trace.server": "verbose"` workspace setting.
+ These logs are shown in a separate tab in the output and could be used with LSP inspector.
+ Kudos to [@DJMcNab](https://github.com/DJMcNab) for setting this awesome infra up!
+
+
+There are also several VS Code commands which might be of interest:
+
+* `Rust Analyzer: Status` shows some memory-usage statistics.
+
+* `Rust Analyzer: Syntax Tree` shows syntax tree of the current file/selection.
+
+* `Rust Analyzer: View Hir` shows the HIR expressions within the function containing the cursor.
+
+ You can hover over syntax nodes in the opened text file to see the appropriate
+ rust code that it refers to and the rust editor will also highlight the proper
+ text range.
+
+ If you trigger Go to Definition in the inspected Rust source file,
+ the syntax tree read-only editor should scroll to and select the
+ appropriate syntax node token.
+
+ ![demo](https://user-images.githubusercontent.com/36276403/78225773-6636a480-74d3-11ea-9d9f-1c9d42da03b0.png)
+
+## Profiling
+
+We have a built-in hierarchical profiler, you can enable it by using `RA_PROFILE` env-var:
+
+```
+RA_PROFILE=* // dump everything
+RA_PROFILE=foo|bar|baz // enabled only selected entries
+RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10 ms
+```
+
+In particular, I have `export RA_PROFILE='*>10'` in my shell profile.
+
+We also have a "counting" profiler which counts number of instances of popular structs.
+It is enabled by `RA_COUNT=1`.
+
+To measure time for from-scratch analysis, use something like this:
+
+```
+$ cargo run --release -p rust-analyzer -- analysis-stats ../chalk/
+```
+
+For measuring time of incremental analysis, use either of these:
+
+```
+$ cargo run --release -p rust-analyzer -- analysis-bench ../chalk/ --highlight ../chalk/chalk-engine/src/logic.rs
+$ cargo run --release -p rust-analyzer -- analysis-bench ../chalk/ --complete ../chalk/chalk-engine/src/logic.rs:94:0
+```
+
+Look for `fn benchmark_xxx` tests for a quick way to reproduce performance problems.
+
+## Release Process
+
+Release process is handled by `release`, `dist` and `promote` xtasks, `release` being the main one.
+
+`release` assumes that you have checkouts of `rust-analyzer`, `rust-analyzer.github.io`, and `rust-lang/rust` in the same directory:
+
+```
+./rust-analyzer
+./rust-analyzer.github.io
+./rust-rust-analyzer # Note the name!
+```
+
+The remote for `rust-analyzer` must be called `upstream` (I use `origin` to point to my fork).
+In addition, for `xtask promote` (see below), `rust-rust-analyzer` must have a `rust-analyzer` remote pointing to this repository on GitHub.
+
+`release` calls the GitHub API calls to scrape pull request comments and categorize them in the changelog.
+This step uses the `curl` and `jq` applications, which need to be available in `PATH`.
+Finally, you need to obtain a GitHub personal access token and set the `GITHUB_TOKEN` environment variable.
+
+Release steps:
+
+1. Set the `GITHUB_TOKEN` environment variable.
+2. Inside rust-analyzer, run `cargo xtask release`. This will:
+ * checkout the `release` branch
+ * reset it to `upstream/nightly`
+ * push it to `upstream`. This triggers GitHub Actions which:
+ * runs `cargo xtask dist` to package binaries and VS Code extension
+ * makes a GitHub release
+ * publishes the VS Code extension to the marketplace
+ * call the GitHub API for PR details
+ * create a new changelog in `rust-analyzer.github.io`
+3. While the release is in progress, fill in the changelog
+4. Commit & push the changelog
+5. Tweet
+6. Inside `rust-analyzer`, run `cargo xtask promote` -- this will create a PR to rust-lang/rust updating rust-analyzer's subtree.
+ Self-approve the PR.
+
+If the GitHub Actions release fails because of a transient problem like a timeout, you can re-run the job from the Actions console.
+If it fails because of something that needs to be fixed, remove the release tag (if needed), fix the problem, then start over.
+Make sure to remove the new changelog post created when running `cargo xtask release` a second time.
+
+We release "nightly" every night automatically and promote the latest nightly to "stable" manually, every week.
+
+We don't do "patch" releases, unless something truly egregious comes up.
+To do a patch release, cherry-pick the fix on top of the current `release` branch and push the branch.
+There's no need to write a changelog for a patch release, it's OK to include the notes about the fix into the next weekly one.
+Note: we tag releases by dates, releasing a patch release on the same day should work (by overwriting a tag), but I am not 100% sure.
+
+## Permissions
+
+There are three sets of people with extra permissions:
+
+* rust-analyzer GitHub organization [**admins**](https://github.com/orgs/rust-analyzer/people?query=role:owner) (which include current t-compiler leads).
+ Admins have full access to the org.
+* [**review**](https://github.com/orgs/rust-analyzer/teams/review) team in the organization.
+ Reviewers have `r+` access to all of organization's repositories and publish rights on crates.io.
+ They also have direct commit access, but all changes should via bors queue.
+ It's ok to self-approve if you think you know what you are doing!
+ bors should automatically sync the permissions.
+ Feel free to request a review or assign any PR to a reviewer with the relevant expertise to bring the work to their attention.
+ Don't feel pressured to review assigned PRs though.
+ If you don't feel like reviewing for whatever reason, someone else will pick the review up!
+* [**triage**](https://github.com/orgs/rust-analyzer/teams/triage) team in the organization.
+ This team can label and close issues.
+
+Note that at the time being you need to be a member of the org yourself to view the links.
diff --git a/src/tools/rust-analyzer/docs/dev/architecture.md b/src/tools/rust-analyzer/docs/dev/architecture.md
new file mode 100644
index 000000000..ea4035baf
--- /dev/null
+++ b/src/tools/rust-analyzer/docs/dev/architecture.md
@@ -0,0 +1,497 @@
+# Architecture
+
+This document describes the high-level architecture of rust-analyzer.
+If you want to familiarize yourself with the code base, you are just in the right place!
+
+You might also enjoy ["Explaining Rust Analyzer"](https://www.youtube.com/playlist?list=PLhb66M_x9UmrqXhQuIpWC5VgTdrGxMx3y) series on YouTube.
+It goes deeper than what is covered in this document, but will take some time to watch.
+
+See also these implementation-related blog posts:
+
+* https://rust-analyzer.github.io/blog/2019/11/13/find-usages.html
+* https://rust-analyzer.github.io/blog/2020/07/20/three-architectures-for-responsive-ide.html
+* https://rust-analyzer.github.io/blog/2020/09/16/challeging-LR-parsing.html
+* https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html
+* https://rust-analyzer.github.io/blog/2020/10/24/introducing-ungrammar.html
+
+For older, by now mostly outdated stuff, see the [guide](./guide.md) and [another playlist](https://www.youtube.com/playlist?list=PL85XCvVPmGQho7MZkdW-wtPtuJcFpzycE).
+
+
+## Bird's Eye View
+
+![](https://user-images.githubusercontent.com/4789492/107129398-0ab70f00-687a-11eb-9bfc-d4eb023aec06.png)
+
+On the highest level, rust-analyzer is a thing which accepts input source code from the client and produces a structured semantic model of the code.
+
+More specifically, input data consists of a set of test files (`(PathBuf, String)` pairs) and information about project structure, captured in the so called `CrateGraph`.
+The crate graph specifies which files are crate roots, which cfg flags are specified for each crate and what dependencies exist between the crates.
+This is the input (ground) state.
+The analyzer keeps all this input data in memory and never does any IO.
+Because the input data is source code, which typically measures in tens of megabytes at most, keeping everything in memory is OK.
+
+A "structured semantic model" is basically an object-oriented representation of modules, functions and types which appear in the source code.
+This representation is fully "resolved": all expressions have types, all references are bound to declarations, etc.
+This is derived state.
+
+The client can submit a small delta of input data (typically, a change to a single file) and get a fresh code model which accounts for changes.
+
+The underlying engine makes sure that model is computed lazily (on-demand) and can be quickly updated for small modifications.
+
+## Entry Points
+
+`crates/rust-analyzer/src/bin/main.rs` contains the main function which spawns LSP.
+This is *the* entry point, but it front-loads a lot of complexity, so it's fine to just skim through it.
+
+`crates/rust-analyzer/src/handlers.rs` implements all LSP requests and is a great place to start if you are already familiar with LSP.
+
+`Analysis` and `AnalysisHost` types define the main API for consumers of IDE services.
+
+## Code Map
+
+This section talks briefly about various important directories and data structures.
+Pay attention to the **Architecture Invariant** sections.
+They often talk about things which are deliberately absent in the source code.
+
+Note also which crates are **API Boundaries**.
+Remember, [rules at the boundary are different](https://www.tedinski.com/2018/02/06/system-boundaries.html).
+
+### `xtask`
+
+This is rust-analyzer's "build system".
+We use cargo to compile rust code, but there are also various other tasks, like release management or local installation.
+They are handled by Rust code in the xtask directory.
+
+### `editors/code`
+
+VS Code plugin.
+
+### `lib/`
+
+rust-analyzer independent libraries which we publish to crates.io.
+It's not heavily utilized at the moment.
+
+### `crates/parser`
+
+It is a hand-written recursive descent parser, which produces a sequence of events like "start node X", "finish node Y".
+It works similarly to
+[kotlin's parser](https://github.com/JetBrains/kotlin/blob/4d951de616b20feca92f3e9cc9679b2de9e65195/compiler/frontend/src/org/jetbrains/kotlin/parsing/KotlinParsing.java),
+which is a good source of inspiration for dealing with syntax errors and incomplete input.
+Original [libsyntax parser](https://github.com/rust-lang/rust/blob/6b99adeb11313197f409b4f7c4083c2ceca8a4fe/src/libsyntax/parse/parser.rs) is what we use for the definition of the Rust language.
+`TreeSink` and `TokenSource` traits bridge the tree-agnostic parser from `grammar` with `rowan` trees.
+
+**Architecture Invariant:** the parser is independent of the particular tree structure and particular representation of the tokens.
+It transforms one flat stream of events into another flat stream of events.
+Token independence allows us to parse out both text-based source code and `tt`-based macro input.
+Tree independence allows us to more easily vary the syntax tree implementation.
+It should also unlock efficient light-parsing approaches.
+For example, you can extract the set of names defined in a file (for typo correction) without building a syntax tree.
+
+**Architecture Invariant:** parsing never fails, the parser produces `(T, Vec<Error>)` rather than `Result<T, Error>`.
+
+### `crates/syntax`
+
+Rust syntax tree structure and parser.
+See [RFC](https://github.com/rust-lang/rfcs/pull/2256) and [./syntax.md](./syntax.md) for some design notes.
+
+- [rowan](https://github.com/rust-analyzer/rowan) library is used for constructing syntax trees.
+- `ast` provides a type safe API on top of the raw `rowan` tree.
+- `ungrammar` description of the grammar, which is used to generate `syntax_kinds` and `ast` modules, using `cargo test -p xtask` command.
+
+Tests for ra_syntax are mostly data-driven.
+`test_data/parser` contains subdirectories with a bunch of `.rs` (test vectors) and `.txt` files with corresponding syntax trees.
+During testing, we check `.rs` against `.txt`.
+If the `.txt` file is missing, it is created (this is how you update tests).
+Additionally, running the xtask test suite with `cargo test -p xtask` will walk the grammar module and collect all `// test test_name` comments into files inside `test_data/parser/inline` directory.
+
+To update test data, run with `UPDATE_EXPECT` variable:
+
+```bash
+env UPDATE_EXPECT=1 cargo qt
+```
+
+After adding a new inline test you need to run `cargo test -p xtask` and also update the test data as described above.
+
+Note [`api_walkthrough`](https://github.com/rust-lang/rust-analyzer/blob/2fb6af89eb794f775de60b82afe56b6f986c2a40/crates/ra_syntax/src/lib.rs#L190-L348)
+in particular: it shows off various methods of working with syntax tree.
+
+See [#93](https://github.com/rust-lang/rust-analyzer/pull/93) for an example PR which fixes a bug in the grammar.
+
+**Architecture Invariant:** `syntax` crate is completely independent from the rest of rust-analyzer. It knows nothing about salsa or LSP.
+This is important because it is possible to make useful tooling using only the syntax tree.
+Without semantic information, you don't need to be able to _build_ code, which makes the tooling more robust.
+See also https://web.stanford.edu/~mlfbrown/paper.pdf.
+You can view the `syntax` crate as an entry point to rust-analyzer.
+`syntax` crate is an **API Boundary**.
+
+**Architecture Invariant:** syntax tree is a value type.
+The tree is fully determined by the contents of its syntax nodes, it doesn't need global context (like an interner) and doesn't store semantic info.
+Using the tree as a store for semantic info is convenient in traditional compilers, but doesn't work nicely in the IDE.
+Specifically, assists and refactors require transforming syntax trees, and that becomes awkward if you need to do something with the semantic info.
+
+**Architecture Invariant:** syntax tree is built for a single file.
+This is to enable parallel parsing of all files.
+
+**Architecture Invariant:** Syntax trees are by design incomplete and do not enforce well-formedness.
+If an AST method returns an `Option`, it *can* be `None` at runtime, even if this is forbidden by the grammar.
+
+### `crates/base_db`
+
+We use the [salsa](https://github.com/salsa-rs/salsa) crate for incremental and on-demand computation.
+Roughly, you can think of salsa as a key-value store, but it can also compute derived values using specified functions.
+The `base_db` crate provides basic infrastructure for interacting with salsa.
+Crucially, it defines most of the "input" queries: facts supplied by the client of the analyzer.
+Reading the docs of the `base_db::input` module should be useful: everything else is strictly derived from those inputs.
+
+**Architecture Invariant:** particularities of the build system are *not* the part of the ground state.
+In particular, `base_db` knows nothing about cargo.
+For example, `cfg` flags are a part of `base_db`, but `feature`s are not.
+A `foo` feature is a Cargo-level concept, which is lowered by Cargo to `--cfg feature=foo` argument on the command line.
+The `CrateGraph` structure is used to represent the dependencies between the crates abstractly.
+
+**Architecture Invariant:** `base_db` doesn't know about file system and file paths.
+Files are represented with opaque `FileId`, there's no operation to get an `std::path::Path` out of the `FileId`.
+
+### `crates/hir_expand`, `crates/hir_def`, `crates/hir_ty`
+
+These crates are the *brain* of rust-analyzer.
+This is the compiler part of the IDE.
+
+`hir_xxx` crates have a strong [ECS](https://en.wikipedia.org/wiki/Entity_component_system) flavor, in that they work with raw ids and directly query the database.
+There's little abstraction here.
+These crates integrate deeply with salsa and chalk.
+
+Name resolution, macro expansion and type inference all happen here.
+These crates also define various intermediate representations of the core.
+
+`ItemTree` condenses a single `SyntaxTree` into a "summary" data structure, which is stable over modifications to function bodies.
+
+`DefMap` contains the module tree of a crate and stores module scopes.
+
+`Body` stores information about expressions.
+
+**Architecture Invariant:** these crates are not, and will never be, an api boundary.
+
+**Architecture Invariant:** these crates explicitly care about being incremental.
+The core invariant we maintain is "typing inside a function's body never invalidates global derived data".
+i.e., if you change the body of `foo`, all facts about `bar` should remain intact.
+
+**Architecture Invariant:** hir exists only in context of particular crate instance with specific CFG flags.
+The same syntax may produce several instances of HIR if the crate participates in the crate graph more than once.
+
+### `crates/hir`
+
+The top-level `hir` crate is an **API Boundary**.
+If you think about "using rust-analyzer as a library", `hir` crate is most likely the façade you'll be talking to.
+
+It wraps ECS-style internal API into a more OO-flavored API (with an extra `db` argument for each call).
+
+**Architecture Invariant:** `hir` provides a static, fully resolved view of the code.
+While internal `hir_*` crates _compute_ things, `hir`, from the outside, looks like an inert data structure.
+
+`hir` also handles the delicate task of going from syntax to the corresponding `hir`.
+Remember that the mapping here is one-to-many.
+See `Semantics` type and `source_to_def` module.
+
+Note in particular a curious recursive structure in `source_to_def`.
+We first resolve the parent _syntax_ node to the parent _hir_ element.
+Then we ask the _hir_ parent what _syntax_ children does it have.
+Then we look for our node in the set of children.
+
+This is the heart of many IDE features, like goto definition, which start with figuring out the hir node at the cursor.
+This is some kind of (yet unnamed) uber-IDE pattern, as it is present in Roslyn and Kotlin as well.
+
+### `crates/ide`
+
+The `ide` crate builds on top of `hir` semantic model to provide high-level IDE features like completion or goto definition.
+It is an **API Boundary**.
+If you want to use IDE parts of rust-analyzer via LSP, custom flatbuffers-based protocol or just as a library in your text editor, this is the right API.
+
+**Architecture Invariant:** `ide` crate's API is build out of POD types with public fields.
+The API uses editor's terminology, it talks about offsets and string labels rather than in terms of definitions or types.
+It is effectively the view in MVC and viewmodel in [MVVM](https://en.wikipedia.org/wiki/Model%E2%80%93view%E2%80%93viewmodel).
+All arguments and return types are conceptually serializable.
+In particular, syntax trees and hir types are generally absent from the API (but are used heavily in the implementation).
+Shout outs to LSP developers for popularizing the idea that "UI" is a good place to draw a boundary at.
+
+`ide` is also the first crate which has the notion of change over time.
+`AnalysisHost` is a state to which you can transactionally `apply_change`.
+`Analysis` is an immutable snapshot of the state.
+
+Internally, `ide` is split across several crates. `ide_assists`, `ide_completion` and `ide_ssr` implement large isolated features.
+`ide_db` implements common IDE functionality (notably, reference search is implemented here).
+The `ide` contains a public API/façade, as well as implementation for a plethora of smaller features.
+
+**Architecture Invariant:** `ide` crate strives to provide a _perfect_ API.
+Although at the moment it has only one consumer, the LSP server, LSP *does not* influence its API design.
+Instead, we keep in mind a hypothetical _ideal_ client -- an IDE tailored specifically for rust, every nook and cranny of which is packed with Rust-specific goodies.
+
+### `crates/rust-analyzer`
+
+This crate defines the `rust-analyzer` binary, so it is the **entry point**.
+It implements the language server.
+
+**Architecture Invariant:** `rust-analyzer` is the only crate that knows about LSP and JSON serialization.
+If you want to expose a data structure `X` from ide to LSP, don't make it serializable.
+Instead, create a serializable counterpart in `rust-analyzer` crate and manually convert between the two.
+
+`GlobalState` is the state of the server.
+The `main_loop` defines the server event loop which accepts requests and sends responses.
+Requests that modify the state or might block user's typing are handled on the main thread.
+All other requests are processed in background.
+
+**Architecture Invariant:** the server is stateless, a-la HTTP.
+Sometimes state needs to be preserved between requests.
+For example, "what is the `edit` for the fifth completion item of the last completion edit?".
+For this, the second request should include enough info to re-create the context from scratch.
+This generally means including all the parameters of the original request.
+
+`reload` module contains the code that handles configuration and Cargo.toml changes.
+This is a tricky business.
+
+**Architecture Invariant:** `rust-analyzer` should be partially available even when the build is broken.
+Reloading process should not prevent IDE features from working.
+
+### `crates/toolchain`, `crates/project_model`, `crates/flycheck`
+
+These crates deal with invoking `cargo` to learn about project structure and get compiler errors for the "check on save" feature.
+
+They use `crates/path` heavily instead of `std::path`.
+A single `rust-analyzer` process can serve many projects, so it is important that server's current directory does not leak.
+
+### `crates/mbe`, `crates/tt`, `crates/proc_macro_api`, `crates/proc_macro_srv`
+
+These crates implement macros as token tree -> token tree transforms.
+They are independent from the rest of the code.
+
+`tt` crate defined `TokenTree`, a single token or a delimited sequence of token trees.
+`mbe` crate contains tools for transforming between syntax trees and token tree.
+And it also handles the actual parsing and expansion of declarative macro (a-la "Macros By Example" or mbe).
+
+For proc macros, the client-server model are used.
+We pass an argument `--proc-macro` to `rust-analyzer` binary to start a separate process (`proc_macro_srv`).
+And the client (`proc_macro_api`) provides an interface to talk to that server separately.
+
+And then token trees are passed from client, and the server will load the corresponding dynamic library (which built by `cargo`).
+And due to the fact the api for getting result from proc macro are always unstable in `rustc`,
+we maintain our own copy (and paste) of that part of code to allow us to build the whole thing in stable rust.
+
+ **Architecture Invariant:**
+Bad proc macros may panic or segfault accidentally. So we run it in another process and recover it from fatal error.
+And they may be non-deterministic which conflict how `salsa` works, so special attention is required.
+
+### `crates/cfg`
+
+This crate is responsible for parsing, evaluation and general definition of `cfg` attributes.
+
+### `crates/vfs`, `crates/vfs-notify`
+
+These crates implement a virtual file system.
+They provide consistent snapshots of the underlying file system and insulate messy OS paths.
+
+**Architecture Invariant:** vfs doesn't assume a single unified file system.
+i.e., a single rust-analyzer process can act as a remote server for two different machines, where the same `/tmp/foo.rs` path points to different files.
+For this reason, all path APIs generally take some existing path as a "file system witness".
+
+### `crates/stdx`
+
+This crate contains various non-rust-analyzer specific utils, which could have been in std, as well
+as copies of unstable std items we would like to make use of already, like `std::str::split_once`.
+
+### `crates/profile`
+
+This crate contains utilities for CPU and memory profiling.
+
+
+## Cross-Cutting Concerns
+
+This sections talks about the things which are everywhere and nowhere in particular.
+
+### Stability Guarantees
+
+One of the reasons rust-analyzer moves relatively fast is that we don't introduce new stability guarantees.
+Instead, as much as possible we leverage existing ones.
+
+Examples:
+
+* The `ide` API of rust-analyzer are explicitly unstable, but the LSP interface is stable, and here we just implement a stable API managed by someone else.
+* Rust language and Cargo are stable, and they are the primary inputs to rust-analyzer.
+* The `rowan` library is published to crates.io, but it is deliberately kept under `1.0` and always makes semver-incompatible upgrades
+
+Another important example is that rust-analyzer isn't run on CI, so, unlike `rustc` and `clippy`, it is actually ok for us to change runtime behavior.
+
+At some point we might consider opening up APIs or allowing crates.io libraries to include rust-analyzer specific annotations, but that's going to be a big commitment on our side.
+
+Exceptions:
+
+* `rust-project.json` is a de-facto stable format for non-cargo build systems.
+ It is probably ok enough, but was definitely stabilized implicitly.
+ Lesson for the future: when designing API which could become a stability boundary, don't wait for the first users until you stabilize it.
+ By the time you have first users, it is already de-facto stable.
+ And the users will first use the thing, and *then* inform you that now you have users.
+ The sad thing is that stuff should be stable before someone uses it for the first time, or it should contain explicit opt-in.
+* We ship some LSP extensions, and we try to keep those somewhat stable.
+ Here, we need to work with a finite set of editor maintainers, so not providing rock-solid guarantees works.
+
+### Code generation
+
+Some components in this repository are generated through automatic processes.
+Generated code is updated automatically on `cargo test`.
+Generated code is generally committed to the git repository.
+
+In particular, we generate:
+
+* API for working with syntax trees (`syntax::ast`, the [`ungrammar`](https://github.com/rust-analyzer/ungrammar) crate).
+* Various sections of the manual:
+
+ * features
+ * assists
+ * config
+
+* Documentation tests for assists
+
+See the `sourcegen` crate for details.
+
+**Architecture Invariant:** we avoid bootstrapping.
+For codegen we need to parse Rust code.
+Using rust-analyzer for that would work and would be fun, but it would also complicate the build process a lot.
+For that reason, we use syn and manual string parsing.
+
+### Cancellation
+
+Let's say that the IDE is in the process of computing syntax highlighting, when the user types `foo`.
+What should happen?
+`rust-analyzer`s answer is that the highlighting process should be cancelled -- its results are now stale, and it also blocks modification of the inputs.
+
+The salsa database maintains a global revision counter.
+When applying a change, salsa bumps this counter and waits until all other threads using salsa finish.
+If a thread does salsa-based computation and notices that the counter is incremented, it panics with a special value (see `Canceled::throw`).
+That is, rust-analyzer requires unwinding.
+
+`ide` is the boundary where the panic is caught and transformed into a `Result<T, Cancelled>`.
+
+### Testing
+
+Rust Analyzer has three interesting [system boundaries](https://www.tedinski.com/2018/04/10/making-tests-a-positive-influence-on-design.html) to concentrate tests on.
+
+The outermost boundary is the `rust-analyzer` crate, which defines an LSP interface in terms of stdio.
+We do integration testing of this component, by feeding it with a stream of LSP requests and checking responses.
+These tests are known as "heavy", because they interact with Cargo and read real files from disk.
+For this reason, we try to avoid writing too many tests on this boundary: in a statically typed language, it's hard to make an error in the protocol itself if messages are themselves typed.
+Heavy tests are only run when `RUN_SLOW_TESTS` env var is set.
+
+The middle, and most important, boundary is `ide`.
+Unlike `rust-analyzer`, which exposes API, `ide` uses Rust API and is intended for use by various tools.
+A typical test creates an `AnalysisHost`, calls some `Analysis` functions and compares the results against expectation.
+
+The innermost and most elaborate boundary is `hir`.
+It has a much richer vocabulary of types than `ide`, but the basic testing setup is the same: we create a database, run some queries, assert result.
+
+For comparisons, we use the `expect` crate for snapshot testing.
+
+To test various analysis corner cases and avoid forgetting about old tests, we use so-called marks.
+See the `marks` module in the `test_utils` crate for more.
+
+**Architecture Invariant:** rust-analyzer tests do not use libcore or libstd.
+All required library code must be a part of the tests.
+This ensures fast test execution.
+
+**Architecture Invariant:** tests are data driven and do not test the API.
+Tests which directly call various API functions are a liability, because they make refactoring the API significantly more complicated.
+So most of the tests look like this:
+
+```rust
+#[track_caller]
+fn check(input: &str, expect: expect_test::Expect) {
+ // The single place that actually exercises a particular API
+}
+
+#[test]
+fn foo() {
+ check("foo", expect![["bar"]]);
+}
+
+#[test]
+fn spam() {
+ check("spam", expect![["eggs"]]);
+}
+// ...and a hundred more tests that don't care about the specific API at all.
+```
+
+To specify input data, we use a single string literal in a special format, which can describe a set of rust files.
+See the `Fixture` its module for fixture examples and documentation.
+
+**Architecture Invariant:** all code invariants are tested by `#[test]` tests.
+There's no additional checks in CI, formatting and tidy tests are run with `cargo test`.
+
+**Architecture Invariant:** tests do not depend on any kind of external resources, they are perfectly reproducible.
+
+
+### Performance Testing
+
+TBA, take a look at the `metrics` xtask and `#[test] fn benchmark_xxx()` functions.
+
+### Error Handling
+
+**Architecture Invariant:** core parts of rust-analyzer (`ide`/`hir`) don't interact with the outside world and thus can't fail.
+Only parts touching LSP are allowed to do IO.
+
+Internals of rust-analyzer need to deal with broken code, but this is not an error condition.
+rust-analyzer is robust: various analysis compute `(T, Vec<Error>)` rather than `Result<T, Error>`.
+
+rust-analyzer is a complex long-running process.
+It will always have bugs and panics.
+But a panic in an isolated feature should not bring down the whole process.
+Each LSP-request is protected by a `catch_unwind`.
+We use `always` and `never` macros instead of `assert` to gracefully recover from impossible conditions.
+
+### Observability
+
+rust-analyzer is a long-running process, so it is important to understand what's going on inside.
+We have several instruments for that.
+
+The event loop that runs rust-analyzer is very explicit.
+Rather than spawning futures or scheduling callbacks (open), the event loop accepts an `enum` of possible events (closed).
+It's easy to see all the things that trigger rust-analyzer processing, together with their performance
+
+rust-analyzer includes a simple hierarchical profiler (`hprof`).
+It is enabled with `RA_PROFILE='*>50'` env var (log all (`*`) actions which take more than `50` ms) and produces output like:
+
+```
+85ms - handle_completion
+ 68ms - import_on_the_fly
+ 67ms - import_assets::search_for_relative_paths
+ 0ms - crate_def_map:wait (804 calls)
+ 0ms - find_path (16 calls)
+ 2ms - find_similar_imports (1 calls)
+ 0ms - generic_params_query (334 calls)
+ 59ms - trait_solve_query (186 calls)
+ 0ms - Semantics::analyze_impl (1 calls)
+ 1ms - render_resolution (8 calls)
+ 0ms - Semantics::analyze_impl (5 calls)
+```
+
+This is cheap enough to enable in production.
+
+
+Similarly, we save live object counting (`RA_COUNT=1`).
+It is not cheap enough to enable in prod, and this is a bug which should be fixed.
+
+### Configurability
+
+rust-analyzer strives to be as configurable as possible while offering reasonable defaults where no configuration exists yet.
+There will always be features that some people find more annoying than helpful, so giving the users the ability to tweak or disable these is a big part of offering a good user experience.
+Mind the code--architecture gap: at the moment, we are using fewer feature flags than we really should.
+
+### Serialization
+
+In Rust, it is easy (often too easy) to add serialization to any type by adding `#[derive(Serialize)]`.
+This easiness is misleading -- serializable types impose significant backwards compatability constraints.
+If a type is serializable, then it is a part of some IPC boundary.
+You often don't control the other side of this boundary, so changing serializable types is hard.
+
+For this reason, the types in `ide`, `base_db` and below are not serializable by design.
+If such types need to cross an IPC boundary, then the client of rust-analyzer needs to provide custom, client-specific serialization format.
+This isolates backwards compatibility and migration concerns to a specific client.
+
+For example, `rust-project.json` is it's own format -- it doesn't include `CrateGraph` as is.
+Instead, it creates a `CrateGraph` by calling appropriate constructing functions.
diff --git a/src/tools/rust-analyzer/docs/dev/debugging.md b/src/tools/rust-analyzer/docs/dev/debugging.md
new file mode 100644
index 000000000..48caec1d8
--- /dev/null
+++ b/src/tools/rust-analyzer/docs/dev/debugging.md
@@ -0,0 +1,99 @@
+# Debugging VSCode plugin and the language server
+
+## Prerequisites
+
+- Install [LLDB](https://lldb.llvm.org/) and the [LLDB Extension](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb).
+- Open the root folder in VSCode. Here you can access the preconfigured debug setups.
+
+ <img height=150px src="https://user-images.githubusercontent.com/36276403/74611090-92ec5380-5101-11ea-8a41-598f51f3f3e3.png" alt="Debug options view">
+
+- Install all TypeScript dependencies
+ ```bash
+ cd editors/code
+ npm ci
+ ```
+
+## Common knowledge
+
+* All debug configurations open a new `[Extension Development Host]` VSCode instance
+where **only** the `rust-analyzer` extension being debugged is enabled.
+* To activate the extension you need to open any Rust project folder in `[Extension Development Host]`.
+
+
+## Debug TypeScript VSCode extension
+
+- `Run Installed Extension` - runs the extension with the globally installed `rust-analyzer` binary.
+- `Run Extension (Debug Build)` - runs extension with the locally built LSP server (`target/debug/rust-analyzer`).
+
+TypeScript debugging is configured to watch your source edits and recompile.
+To apply changes to an already running debug process, press <kbd>Ctrl+Shift+P</kbd> and run the following command in your `[Extension Development Host]`
+
+```
+> Developer: Reload Window
+```
+
+## Debug Rust LSP server
+
+- When attaching a debugger to an already running `rust-analyzer` server on Linux you might need to enable `ptrace` for unrelated processes by running:
+
+ ```
+ echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope
+ ```
+
+
+- By default, the LSP server is built without debug information. To enable it, you'll need to change `Cargo.toml`:
+ ```toml
+ [profile.dev]
+ debug = 2
+ ```
+
+- Select `Run Extension (Debug Build)` to run your locally built `target/debug/rust-analyzer`.
+
+- In the original VSCode window once again select the `Attach To Server` debug configuration.
+
+- A list of running processes should appear. Select the `rust-analyzer` from this repo.
+
+- Navigate to `crates/rust-analyzer/src/main_loop.rs` and add a breakpoint to the `on_request` function.
+
+- Go back to the `[Extension Development Host]` instance and hover over a Rust variable and your breakpoint should hit.
+
+If you need to debug the server from the very beginning, including its initialization code, you can use the `--wait-dbg` command line argument or `RA_WAIT_DBG` environment variable. The server will spin at the beginning of the `try_main` function (see `crates\rust-analyzer\src\bin\main.rs`)
+```rust
+ let mut d = 4;
+ while d == 4 { // set a breakpoint here and change the value
+ d = 4;
+ }
+```
+
+However for this to work, you will need to enable debug_assertions in your build
+```rust
+RUSTFLAGS='--cfg debug_assertions' cargo build --release
+```
+
+## Demo
+
+- [Debugging TypeScript VScode extension](https://www.youtube.com/watch?v=T-hvpK6s4wM).
+- [Debugging Rust LSP server](https://www.youtube.com/watch?v=EaNb5rg4E0M).
+
+## Troubleshooting
+
+### Can't find the `rust-analyzer` process
+
+It could be a case of just jumping the gun.
+
+The `rust-analyzer` is only started once the `onLanguage:rust` activation.
+
+Make sure you open a rust file in the `[Extension Development Host]` and try again.
+
+### Can't connect to `rust-analyzer`
+
+Make sure you have run `echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope`.
+
+By default this should reset back to 1 every time you log in.
+
+### Breakpoints are never being hit
+
+Check your version of `lldb`. If it's version 6 and lower, use the `classic` adapter type.
+It's `lldb.adapterType` in settings file.
+
+If you're running `lldb` version 7, change the lldb adapter type to `bundled` or `native`.
diff --git a/src/tools/rust-analyzer/docs/dev/guide.md b/src/tools/rust-analyzer/docs/dev/guide.md
new file mode 100644
index 000000000..47ae3f3e6
--- /dev/null
+++ b/src/tools/rust-analyzer/docs/dev/guide.md
@@ -0,0 +1,573 @@
+# Guide to rust-analyzer
+
+## About the guide
+
+This guide describes the current state of rust-analyzer as of 2019-01-20 (git
+tag [guide-2019-01]). Its purpose is to document various problems and
+architectural solutions related to the problem of building IDE-first compiler
+for Rust. There is a video version of this guide as well:
+https://youtu.be/ANKBNiSWyfc.
+
+[guide-2019-01]: https://github.com/rust-lang/rust-analyzer/tree/guide-2019-01
+
+## The big picture
+
+On the highest possible level, rust-analyzer is a stateful component. A client may
+apply changes to the analyzer (new contents of `foo.rs` file is "fn main() {}")
+and it may ask semantic questions about the current state (what is the
+definition of the identifier with offset 92 in file `bar.rs`?). Two important
+properties hold:
+
+* Analyzer does not do any I/O. It starts in an empty state and all input data is
+ provided via `apply_change` API.
+
+* Only queries about the current state are supported. One can, of course,
+ simulate undo and redo by keeping a log of changes and inverse changes respectively.
+
+## IDE API
+
+To see the bigger picture of how the IDE features work, let's take a look at the [`AnalysisHost`] and
+[`Analysis`] pair of types. `AnalysisHost` has three methods:
+
+* `default()` for creating an empty analysis instance
+* `apply_change(&mut self)` to make changes (this is how you get from an empty
+ state to something interesting)
+* `analysis(&self)` to get an instance of `Analysis`
+
+`Analysis` has a ton of methods for IDEs, like `goto_definition`, or
+`completions`. Both inputs and outputs of `Analysis`' methods are formulated in
+terms of files and offsets, and **not** in terms of Rust concepts like structs,
+traits, etc. The "typed" API with Rust specific types is slightly lower in the
+stack, we'll talk about it later.
+
+[`AnalysisHost`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/lib.rs#L265-L284
+[`Analysis`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/lib.rs#L291-L478
+
+The reason for this separation of `Analysis` and `AnalysisHost` is that we want to apply
+changes "uniquely", but we might also want to fork an `Analysis` and send it to
+another thread for background processing. That is, there is only a single
+`AnalysisHost`, but there may be several (equivalent) `Analysis`.
+
+Note that all of the `Analysis` API return `Cancellable<T>`. This is required to
+be responsive in an IDE setting. Sometimes a long-running query is being computed
+and the user types something in the editor and asks for completion. In this
+case, we cancel the long-running computation (so it returns `Err(Cancelled)`),
+apply the change and execute request for completion. We never use stale data to
+answer requests. Under the cover, `AnalysisHost` "remembers" all outstanding
+`Analysis` instances. The `AnalysisHost::apply_change` method cancels all
+`Analysis`es, blocks until all of them are `Dropped` and then applies changes
+in-place. This may be familiar to Rustaceans who use read-write locks for interior
+mutability.
+
+Next, let's talk about what the inputs to the `Analysis` are, precisely.
+
+## Inputs
+
+Rust Analyzer never does any I/O itself, all inputs get passed explicitly via
+the `AnalysisHost::apply_change` method, which accepts a single argument, a
+`Change`. [`Change`] is a builder for a single change
+"transaction", so it suffices to study its methods to understand all of the
+input data.
+
+[`Change`]: https://github.com/rust-lang/rust-analyzer/blob/master/crates/base_db/src/change.rs#L14-L89
+
+The `(add|change|remove)_file` methods control the set of the input files, where
+each file has an integer id (`FileId`, picked by the client), text (`String`)
+and a filesystem path. Paths are tricky; they'll be explained below, in source roots
+section, together with the `add_root` method. The `add_library` method allows us to add a
+group of files which are assumed to rarely change. It's mostly an optimization
+and does not change the fundamental picture.
+
+The `set_crate_graph` method allows us to control how the input files are partitioned
+into compilation units -- crates. It also controls (in theory, not implemented
+yet) `cfg` flags. `CrateGraph` is a directed acyclic graph of crates. Each crate
+has a root `FileId`, a set of active `cfg` flags and a set of dependencies. Each
+dependency is a pair of a crate and a name. It is possible to have two crates
+with the same root `FileId` but different `cfg`-flags/dependencies. This model
+is lower than Cargo's model of packages: each Cargo package consists of several
+targets, each of which is a separate crate (or several crates, if you try
+different feature combinations).
+
+Procedural macros should become inputs as well, but currently they are not
+supported. Procedural macro will be a black box `Box<dyn Fn(TokenStream) -> TokenStream>`
+function, and will be inserted into the crate graph just like dependencies.
+
+Soon we'll talk how we build an LSP server on top of `Analysis`, but first,
+let's deal with that paths issue.
+
+## Source roots (a.k.a. "Filesystems are horrible")
+
+This is a non-essential section, feel free to skip.
+
+The previous section said that the filesystem path is an attribute of a file,
+but this is not the whole truth. Making it an absolute `PathBuf` will be bad for
+several reasons. First, filesystems are full of (platform-dependent) edge cases:
+
+* It's hard (requires a syscall) to decide if two paths are equivalent.
+* Some filesystems are case-sensitive (e.g. macOS).
+* Paths are not necessarily UTF-8.
+* Symlinks can form cycles.
+
+Second, this might hurt the reproducibility and hermeticity of builds. In theory,
+moving a project from `/foo/bar/my-project` to `/spam/eggs/my-project` should
+not change a bit in the output. However, if the absolute path is a part of the
+input, it is at least in theory observable, and *could* affect the output.
+
+Yet another problem is that we really *really* want to avoid doing I/O, but with
+Rust the set of "input" files is not necessarily known up-front. In theory, you
+can have `#[path="/dev/random"] mod foo;`.
+
+To solve (or explicitly refuse to solve) these problems rust-analyzer uses the
+concept of a "source root". Roughly speaking, source roots are the contents of a
+directory on a file systems, like `/home/matklad/projects/rustraytracer/**.rs`.
+
+More precisely, all files (`FileId`s) are partitioned into disjoint
+`SourceRoot`s. Each file has a relative UTF-8 path within the `SourceRoot`.
+`SourceRoot` has an identity (integer ID). Crucially, the root path of the
+source root itself is unknown to the analyzer: A client is supposed to maintain a
+mapping between `SourceRoot` IDs (which are assigned by the client) and actual
+`PathBuf`s. `SourceRoot`s give a sane tree model of the file system to the
+analyzer.
+
+Note that `mod`, `#[path]` and `include!()` can only reference files from the
+same source root. It is of course possible to explicitly add extra files to
+the source root, even `/dev/random`.
+
+## Language Server Protocol
+
+Now let's see how the `Analysis` API is exposed via the JSON RPC based language server protocol. The
+hard part here is managing changes (which can come either from the file system
+or from the editor) and concurrency (we want to spawn background jobs for things
+like syntax highlighting). We use the event loop pattern to manage the zoo, and
+the loop is the [`main_loop_inner`] function. The [`main_loop`] does a one-time
+initialization and tearing down of the resources.
+
+[`main_loop`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L51-L110
+[`main_loop_inner`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L156-L258
+
+
+Let's walk through a typical analyzer session!
+
+First, we need to figure out what to analyze. To do this, we run `cargo
+metadata` to learn about Cargo packages for current workspace and dependencies,
+and we run `rustc --print sysroot` and scan the "sysroot" (the directory containing the current Rust toolchain's files) to learn about crates like
+`std`. Currently we load this configuration once at the start of the server, but
+it should be possible to dynamically reconfigure it later without restart.
+
+[main_loop.rs#L62-L70](https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L62-L70)
+
+The [`ProjectModel`] we get after this step is very Cargo and sysroot specific,
+it needs to be lowered to get the input in the form of `Change`. This
+happens in [`ServerWorldState::new`] method. Specifically
+
+* Create a `SourceRoot` for each Cargo package and sysroot.
+* Schedule a filesystem scan of the roots.
+* Create an analyzer's `Crate` for each Cargo **target** and sysroot crate.
+* Setup dependencies between the crates.
+
+[`ProjectModel`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/project_model.rs#L16-L20
+[`ServerWorldState::new`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/server_world.rs#L38-L160
+
+The results of the scan (which may take a while) will be processed in the body
+of the main loop, just like any other change. Here's where we handle:
+
+* [File system changes](https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L194)
+* [Changes from the editor](https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L377)
+
+After a single loop's turn, we group the changes into one `Change` and
+[apply] it. This always happens on the main thread and blocks the loop.
+
+[apply]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/server_world.rs#L216
+
+To handle requests, like ["goto definition"], we create an instance of the
+`Analysis` and [`schedule`] the task (which consumes `Analysis`) on the
+threadpool. [The task] calls the corresponding `Analysis` method, while
+massaging the types into the LSP representation. Keep in mind that if we are
+executing "goto definition" on the threadpool and a new change comes in, the
+task will be canceled as soon as the main loop calls `apply_change` on the
+`AnalysisHost`.
+
+["goto definition"]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/server_world.rs#L216
+[`schedule`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L426-L455
+[The task]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop/handlers.rs#L205-L223
+
+This concludes the overview of the analyzer's programing *interface*. Next, let's
+dig into the implementation!
+
+## Salsa
+
+The most straightforward way to implement an "apply change, get analysis, repeat"
+API would be to maintain the input state and to compute all possible analysis
+information from scratch after every change. This works, but scales poorly with
+the size of the project. To make this fast, we need to take advantage of the
+fact that most of the changes are small, and that analysis results are unlikely
+to change significantly between invocations.
+
+To do this we use [salsa]: a framework for incremental on-demand computation.
+You can skip the rest of the section if you are familiar with `rustc`'s red-green
+algorithm (which is used for incremental compilation).
+
+[salsa]: https://github.com/salsa-rs/salsa
+
+It's better to refer to salsa's docs to learn about it. Here's a small excerpt:
+
+The key idea of salsa is that you define your program as a set of queries. Every
+query is used like a function `K -> V` that maps from some key of type `K` to a value
+of type `V`. Queries come in two basic varieties:
+
+* **Inputs**: the base inputs to your system. You can change these whenever you
+ like.
+
+* **Functions**: pure functions (no side effects) that transform your inputs
+ into other values. The results of queries are memoized to avoid recomputing
+ them a lot. When you make changes to the inputs, we'll figure out (fairly
+ intelligently) when we can re-use these memoized values and when we have to
+ recompute them.
+
+For further discussion, its important to understand one bit of "fairly
+intelligently". Suppose we have two functions, `f1` and `f2`, and one input,
+`z`. We call `f1(X)` which in turn calls `f2(Y)` which inspects `i(Z)`. `i(Z)`
+returns some value `V1`, `f2` uses that and returns `R1`, `f1` uses that and
+returns `O`. Now, let's change `i` at `Z` to `V2` from `V1` and try to compute
+`f1(X)` again. Because `f1(X)` (transitively) depends on `i(Z)`, we can't just
+reuse its value as is. However, if `f2(Y)` is *still* equal to `R1` (despite
+`i`'s change), we, in fact, *can* reuse `O` as result of `f1(X)`. And that's how
+salsa works: it recomputes results in *reverse* order, starting from inputs and
+progressing towards outputs, stopping as soon as it sees an intermediate value
+that hasn't changed. If this sounds confusing to you, don't worry: it is
+confusing. This illustration by @killercup might help:
+
+<img alt="step 1" src="https://user-images.githubusercontent.com/1711539/51460907-c5484780-1d6d-11e9-9cd2-d6f62bd746e0.png" width="50%">
+
+<img alt="step 2" src="https://user-images.githubusercontent.com/1711539/51460915-c9746500-1d6d-11e9-9a77-27d33a0c51b5.png" width="50%">
+
+<img alt="step 3" src="https://user-images.githubusercontent.com/1711539/51460920-cda08280-1d6d-11e9-8d96-a782aa57a4d4.png" width="50%">
+
+<img alt="step 4" src="https://user-images.githubusercontent.com/1711539/51460927-d1340980-1d6d-11e9-851e-13c149d5c406.png" width="50%">
+
+## Salsa Input Queries
+
+All analyzer information is stored in a salsa database. `Analysis` and
+`AnalysisHost` types are newtype wrappers for [`RootDatabase`] -- a salsa
+database.
+
+[`RootDatabase`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/db.rs#L88-L134
+
+Salsa input queries are defined in [`FilesDatabase`] (which is a part of
+`RootDatabase`). They closely mirror the familiar `Change` structure:
+indeed, what `apply_change` does is it sets the values of input queries.
+
+[`FilesDatabase`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/base_db/src/input.rs#L150-L174
+
+## From text to semantic model
+
+The bulk of the rust-analyzer is transforming input text into a semantic model of
+Rust code: a web of entities like modules, structs, functions and traits.
+
+An important fact to realize is that (unlike most other languages like C# or
+Java) there is not a one-to-one mapping between the source code and the semantic model. A
+single function definition in the source code might result in several semantic
+functions: for example, the same source file might get included as a module in
+several crates or a single crate might be present in the compilation DAG
+several times, with different sets of `cfg`s enabled. The IDE-specific task of
+mapping source code into a semantic model is inherently imprecise for
+this reason and gets handled by the [`source_binder`].
+
+[`source_binder`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/source_binder.rs
+
+The semantic interface is declared in the [`code_model_api`] module. Each entity is
+identified by an integer ID and has a bunch of methods which take a salsa database
+as an argument and returns other entities (which are also IDs). Internally, these
+methods invoke various queries on the database to build the model on demand.
+Here's [the list of queries].
+
+[`code_model_api`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/code_model_api.rs
+[the list of queries]: https://github.com/rust-lang/rust-analyzer/blob/7e84440e25e19529e4ff8a66e521d1b06349c6ec/crates/hir/src/db.rs#L20-L106
+
+The first step of building the model is parsing the source code.
+
+## Syntax trees
+
+An important property of the Rust language is that each file can be parsed in
+isolation. Unlike, say, `C++`, an `include` can't change the meaning of the
+syntax. For this reason, rust-analyzer can build a syntax tree for each "source
+file", which could then be reused by several semantic models if this file
+happens to be a part of several crates.
+
+The representation of syntax trees that rust-analyzer uses is similar to that of `Roslyn`
+and Swift's new [libsyntax]. Swift's docs give an excellent overview of the
+approach, so I skip this part here and instead outline the main characteristics
+of the syntax trees:
+
+* Syntax trees are fully lossless. Converting **any** text to a syntax tree and
+ back is a total identity function. All whitespace and comments are explicitly
+ represented in the tree.
+
+* Syntax nodes have generic `(next|previous)_sibling`, `parent`,
+ `(first|last)_child` functions. You can get from any one node to any other
+ node in the file using only these functions.
+
+* Syntax nodes know their range (start offset and length) in the file.
+
+* Syntax nodes share the ownership of their syntax tree: if you keep a reference
+ to a single function, the whole enclosing file is alive.
+
+* Syntax trees are immutable and the cost of replacing the subtree is
+ proportional to the depth of the subtree. Read Swift's docs to learn how
+ immutable + parent pointers + cheap modification is possible.
+
+* Syntax trees are build on best-effort basis. All accessor methods return
+ `Option`s. The tree for `fn foo` will contain a function declaration with
+ `None` for parameter list and body.
+
+* Syntax trees do not know the file they are built from, they only know about
+ the text.
+
+The implementation is based on the generic [rowan] crate on top of which a
+[rust-specific] AST is generated.
+
+[libsyntax]: https://github.com/apple/swift/tree/5e2c815edfd758f9b1309ce07bfc01c4bc20ec23/lib/Syntax
+[rowan]: https://github.com/rust-analyzer/rowan/tree/100a36dc820eb393b74abe0d20ddf99077b61f88
+[rust-specific]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_syntax/src/ast/generated.rs
+
+The next step in constructing the semantic model is ...
+
+## Building a Module Tree
+
+The algorithm for building a tree of modules is to start with a crate root
+(remember, each `Crate` from a `CrateGraph` has a `FileId`), collect all `mod`
+declarations and recursively process child modules. This is handled by the
+[`module_tree_query`], with two slight variations.
+
+[`module_tree_query`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/module_tree.rs#L116-L123
+
+First, rust-analyzer builds a module tree for all crates in a source root
+simultaneously. The main reason for this is historical (`module_tree` predates
+`CrateGraph`), but this approach also enables accounting for files which are not
+part of any crate. That is, if you create a file but do not include it as a
+submodule anywhere, you still get semantic completion, and you get a warning
+about a free-floating module (the actual warning is not implemented yet).
+
+The second difference is that `module_tree_query` does not *directly* depend on
+the "parse" query (which is confusingly called `source_file`). Why would calling
+the parse directly be bad? Suppose the user changes the file slightly, by adding
+an insignificant whitespace. Adding whitespace changes the parse tree (because
+it includes whitespace), and that means recomputing the whole module tree.
+
+We deal with this problem by introducing an intermediate [`submodules_query`].
+This query processes the syntax tree and extracts a set of declared submodule
+names. Now, changing the whitespace results in `submodules_query` being
+re-executed for a *single* module, but because the result of this query stays
+the same, we don't have to re-execute [`module_tree_query`]. In fact, we only
+need to re-execute it when we add/remove new files or when we change mod
+declarations.
+
+[`submodules_query`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/module_tree.rs#L41
+
+We store the resulting modules in a `Vec`-based indexed arena. The indices in
+the arena becomes module IDs. And this brings us to the next topic:
+assigning IDs in the general case.
+
+## Location Interner pattern
+
+One way to assign IDs is how we've dealt with modules: Collect all items into a
+single array in some specific order and use the index in the array as an ID. The
+main drawback of this approach is that these IDs are not stable: Adding a new item can
+shift the IDs of all other items. This works for modules, because adding a module is
+a comparatively rare operation, but would be less convenient for, for example,
+functions.
+
+Another solution here is positional IDs: We can identify a function as "the
+function with name `foo` in a ModuleId(92) module". Such locations are stable:
+adding a new function to the module (unless it is also named `foo`) does not
+change the location. However, such "ID" types ceases to be a `Copy`able integer and in
+general can become pretty large if we account for nesting (for example: "third parameter of
+the `foo` function of the `bar` `impl` in the `baz` module").
+
+[`LocationInterner`] allows us to combine the benefits of positional and numeric
+IDs. It is a bidirectional append-only map between locations and consecutive
+integers which can "intern" a location and return an integer ID back. The salsa
+database we use includes a couple of [interners]. How to "garbage collect"
+unused locations is an open question.
+
+[`LocationInterner`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/base_db/src/loc2id.rs#L65-L71
+[interners]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/db.rs#L22-L23
+
+For example, we use `LocationInterner` to assign IDs to definitions of functions,
+structs, enums, etc. The location, [`DefLoc`] contains two bits of information:
+
+* the ID of the module which contains the definition,
+* the ID of the specific item in the modules source code.
+
+We "could" use a text offset for the location of a particular item, but that would play
+badly with salsa: offsets change after edits. So, as a rule of thumb, we avoid
+using offsets, text ranges or syntax trees as keys and values for queries. What
+we do instead is we store "index" of the item among all of the items of a file
+(so, a positional based ID, but localized to a single file).
+
+[`DefLoc`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/ids.rs#L127-L139
+
+One thing we've glossed over for the time being is support for macros. We have
+only proof of concept handling of macros at the moment, but they are extremely
+interesting from an "assigning IDs" perspective.
+
+## Macros and recursive locations
+
+The tricky bit about macros is that they effectively create new source files.
+While we can use `FileId`s to refer to original files, we can't just assign them
+willy-nilly to the pseudo files of macro expansion. Instead, we use a special
+ID, [`HirFileId`] to refer to either a usual file or a macro-generated file:
+
+```rust
+enum HirFileId {
+ FileId(FileId),
+ Macro(MacroCallId),
+}
+```
+
+`MacroCallId` is an interned ID that specifies a particular macro invocation.
+Its `MacroCallLoc` contains:
+
+* `ModuleId` of the containing module
+* `HirFileId` of the containing file or pseudo file
+* an index of this particular macro invocation in this file (positional id
+ again).
+
+Note how `HirFileId` is defined in terms of `MacroCallLoc` which is defined in
+terms of `HirFileId`! This does not recur infinitely though: any chain of
+`HirFileId`s bottoms out in `HirFileId::FileId`, that is, some source file
+actually written by the user.
+
+[`HirFileId`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/ids.rs#L18-L125
+
+Now that we understand how to identify a definition, in a source or in a
+macro-generated file, we can discuss name resolution a bit.
+
+## Name resolution
+
+Name resolution faces the same problem as the module tree: if we look at the
+syntax tree directly, we'll have to recompute name resolution after every
+modification. The solution to the problem is the same: We [lower] the source code of
+each module into a position-independent representation which does not change if
+we modify bodies of the items. After that we [loop] resolving all imports until
+we've reached a fixed point.
+
+[lower]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/lower.rs#L113-L117
+[loop]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres.rs#L186-L196
+
+And, given all our preparation with IDs and a position-independent representation,
+it is satisfying to [test] that typing inside function body does not invalidate
+name resolution results.
+
+[test]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/tests.rs#L376
+
+An interesting fact about name resolution is that it "erases" all of the
+intermediate paths from the imports: in the end, we know which items are defined
+and which items are imported in each module, but, if the import was `use
+foo::bar::baz`, we deliberately forget what modules `foo` and `bar` resolve to.
+
+To serve "goto definition" requests on intermediate segments we need this info
+in the IDE, however. Luckily, we need it only for a tiny fraction of imports, so we just ask
+the module explicitly, "What does the path `foo::bar` resolve to?". This is a
+general pattern: we try to compute the minimal possible amount of information
+during analysis while allowing IDE to ask for additional specific bits.
+
+Name resolution is also a good place to introduce another salsa pattern used
+throughout the analyzer:
+
+## Source Map pattern
+
+Due to an obscure edge case in completion, IDE needs to know the syntax node of
+a use statement which imported the given completion candidate. We can't just
+store the syntax node as a part of name resolution: this will break
+incrementality, due to the fact that syntax changes after every file
+modification.
+
+We solve this problem during the lowering step of name resolution. The lowering
+query actually produces a *pair* of outputs: `LoweredModule` and [`SourceMap`].
+The `LoweredModule` module contains [imports], but in a position-independent form.
+The `SourceMap` contains a mapping from position-independent imports to
+(position-dependent) syntax nodes.
+
+The result of this basic lowering query changes after every modification. But
+there's an intermediate [projection query] which returns only the first
+position-independent part of the lowering. The result of this query is stable.
+Naturally, name resolution [uses] this stable projection query.
+
+[imports]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/lower.rs#L52-L59
+[`SourceMap`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/lower.rs#L52-L59
+[projection query]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/lower.rs#L97-L103
+[uses]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/query_definitions.rs#L49
+
+## Type inference
+
+First of all, implementation of type inference in rust-analyzer was spearheaded
+by [@flodiebold]. [#327] was an awesome Christmas present, thank you, Florian!
+
+Type inference runs on per-function granularity and uses the patterns we've
+discussed previously.
+
+First, we [lower the AST] of a function body into a position-independent
+representation. In this representation, each expression is assigned a
+[positional ID]. Alongside the lowered expression, [a source map] is produced,
+which maps between expression ids and original syntax. This lowering step also
+deals with "incomplete" source trees by replacing missing expressions by an
+explicit `Missing` expression.
+
+Given the lowered body of the function, we can now run [type inference] and
+construct a mapping from `ExprId`s to types.
+
+[@flodiebold]: https://github.com/flodiebold
+[#327]: https://github.com/rust-lang/rust-analyzer/pull/327
+[lower the AST]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/expr.rs
+[positional ID]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/expr.rs#L13-L15
+[a source map]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/expr.rs#L41-L44
+[type inference]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/ty.rs#L1208-L1223
+
+## Tying it all together: completion
+
+To conclude the overview of the rust-analyzer, let's trace the request for
+(type-inference powered!) code completion!
+
+We start by [receiving a message] from the language client. We decode the
+message as a request for completion and [schedule it on the threadpool]. This is
+the place where we [catch] canceled errors if, immediately after completion, the
+client sends some modification.
+
+In [the handler], we deserialize LSP requests into rust-analyzer specific data
+types (by converting a file url into a numeric `FileId`), [ask analysis for
+completion] and serialize results into the LSP.
+
+The [completion implementation] is finally the place where we start doing the actual
+work. The first step is to collect the `CompletionContext` -- a struct which
+describes the cursor position in terms of Rust syntax and semantics. For
+example, `function_syntax: Option<&'a ast::FnDef>` stores a reference to
+the enclosing function *syntax*, while `function: Option<hir::Function>` is the
+`Def` for this function.
+
+To construct the context, we first do an ["IntelliJ Trick"]: we insert a dummy
+identifier at the cursor's position and parse this modified file, to get a
+reasonably looking syntax tree. Then we do a bunch of "classification" routines
+to figure out the context. For example, we [find an ancestor `fn` node] and we get a
+[semantic model] for it (using the lossy `source_binder` infrastructure).
+
+The second step is to run a [series of independent completion routines]. Let's
+take a closer look at [`complete_dot`], which completes fields and methods in
+`foo.bar|`. First we extract a semantic function and a syntactic receiver
+expression out of the `Context`. Then we run type-inference for this single
+function and map our syntactic expression to `ExprId`. Using the ID, we figure
+out the type of the receiver expression. Then we add all fields & methods from
+the type to completion.
+
+[receiving a message]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L203
+[schedule it on the threadpool]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L428
+[catch]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L436-L442
+[the handler]: https://salsa.zulipchat.com/#narrow/stream/181542-rfcs.2Fsalsa-query-group/topic/design.20next.20steps
+[ask analysis for completion]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/lib.rs#L439-L444
+[completion implementation]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion.rs#L46-L62
+[`CompletionContext`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/completion_context.rs#L14-L37
+["IntelliJ Trick"]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/completion_context.rs#L72-L75
+[find an ancestor `fn` node]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/completion_context.rs#L116-L120
+[semantic model]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/completion_context.rs#L123
+[series of independent completion routines]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion.rs#L52-L59
+[`complete_dot`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/complete_dot.rs#L6-L22
diff --git a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
new file mode 100644
index 000000000..5040643d3
--- /dev/null
+++ b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
@@ -0,0 +1,761 @@
+<!---
+lsp_ext.rs hash: 2a188defec26cc7c
+
+If you need to change the above hash to make the test pass, please check if you
+need to adjust this doc as well and ping this issue:
+
+ https://github.com/rust-lang/rust-analyzer/issues/4604
+
+--->
+
+# LSP Extensions
+
+This document describes LSP extensions used by rust-analyzer.
+It's a best effort document, when in doubt, consult the source (and send a PR with clarification ;-) ).
+We aim to upstream all non Rust-specific extensions to the protocol, but this is not a top priority.
+All capabilities are enabled via the `experimental` field of `ClientCapabilities` or `ServerCapabilities`.
+Requests which we hope to upstream live under `experimental/` namespace.
+Requests, which are likely to always remain specific to `rust-analyzer` are under `rust-analyzer/` namespace.
+
+If you want to be notified about the changes to this document, subscribe to [#4604](https://github.com/rust-lang/rust-analyzer/issues/4604).
+
+## UTF-8 offsets
+
+rust-analyzer supports clangd's extension for opting into UTF-8 as the coordinate space for offsets (by default, LSP uses UTF-16 offsets).
+
+https://clangd.llvm.org/extensions.html#utf-8-offsets
+
+## Configuration in `initializationOptions`
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/567
+
+The `initializationOptions` field of the `InitializeParams` of the initialization request should contain the `"rust-analyzer"` section of the configuration.
+
+`rust-analyzer` normally sends a `"workspace/configuration"` request with `{ "items": ["rust-analyzer"] }` payload.
+However, the server can't do this during initialization.
+At the same time some essential configuration parameters are needed early on, before servicing requests.
+For this reason, we ask that `initializationOptions` contains the configuration, as if the server did make a `"workspace/configuration"` request.
+
+If a language client does not know about `rust-analyzer`'s configuration options it can get sensible defaults by doing any of the following:
+ * Not sending `initializationOptions`
+ * Sending `"initializationOptions": null`
+ * Sending `"initializationOptions": {}`
+
+## Snippet `TextEdit`
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/724
+
+**Experimental Client Capability:** `{ "snippetTextEdit": boolean }`
+
+If this capability is set, `WorkspaceEdit`s returned from `codeAction` requests and `TextEdit`s returned from `textDocument/onTypeFormatting` requests might contain `SnippetTextEdit`s instead of usual `TextEdit`s:
+
+```typescript
+interface SnippetTextEdit extends TextEdit {
+ insertTextFormat?: InsertTextFormat;
+ annotationId?: ChangeAnnotationIdentifier;
+}
+```
+
+```typescript
+export interface TextDocumentEdit {
+ textDocument: OptionalVersionedTextDocumentIdentifier;
+ edits: (TextEdit | SnippetTextEdit)[];
+}
+```
+
+When applying such code action or text edit, the editor should insert snippet, with tab stops and placeholder.
+At the moment, rust-analyzer guarantees that only a single edit will have `InsertTextFormat.Snippet`.
+
+### Example
+
+"Add `derive`" code action transforms `struct S;` into `#[derive($0)] struct S;`
+
+### Unresolved Questions
+
+* Where exactly are `SnippetTextEdit`s allowed (only in code actions at the moment)?
+* Can snippets span multiple files (so far, no)?
+
+## `CodeAction` Groups
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/994
+
+**Experimental Client Capability:** `{ "codeActionGroup": boolean }`
+
+If this capability is set, `CodeAction`s returned from the server contain an additional field, `group`:
+
+```typescript
+interface CodeAction {
+ title: string;
+ group?: string;
+ ...
+}
+```
+
+All code-actions with the same `group` should be grouped under single (extendable) entry in lightbulb menu.
+The set of actions `[ { title: "foo" }, { group: "frobnicate", title: "bar" }, { group: "frobnicate", title: "baz" }]` should be rendered as
+
+```
+💡
+ +-------------+
+ | foo |
+ +-------------+-----+
+ | frobnicate >| bar |
+ +-------------+-----+
+ | baz |
+ +-----+
+```
+
+Alternatively, selecting `frobnicate` could present a user with an additional menu to choose between `bar` and `baz`.
+
+### Example
+
+```rust
+fn main() {
+ let x: Entry/*cursor here*/ = todo!();
+}
+```
+
+Invoking code action at this position will yield two code actions for importing `Entry` from either `collections::HashMap` or `collection::BTreeMap`, grouped under a single "import" group.
+
+### Unresolved Questions
+
+* Is a fixed two-level structure enough?
+* Should we devise a general way to encode custom interaction protocols for GUI refactorings?
+
+## Parent Module
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/1002
+
+**Experimental Server Capability:** `{ "parentModule": boolean }`
+
+This request is sent from client to server to handle "Goto Parent Module" editor action.
+
+**Method:** `experimental/parentModule`
+
+**Request:** `TextDocumentPositionParams`
+
+**Response:** `Location | Location[] | LocationLink[] | null`
+
+
+### Example
+
+```rust
+// src/main.rs
+mod foo;
+// src/foo.rs
+
+/* cursor here*/
+```
+
+`experimental/parentModule` returns a single `Link` to the `mod foo;` declaration.
+
+### Unresolved Question
+
+* An alternative would be to use a more general "gotoSuper" request, which would work for super methods, super classes and super modules.
+ This is the approach IntelliJ Rust is taking.
+ However, experience shows that super module (which generally has a feeling of navigation between files) should be separate.
+ If you want super module, but the cursor happens to be inside an overridden function, the behavior with single "gotoSuper" request is surprising.
+
+## Join Lines
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/992
+
+**Experimental Server Capability:** `{ "joinLines": boolean }`
+
+This request is sent from client to server to handle "Join Lines" editor action.
+
+**Method:** `experimental/joinLines`
+
+**Request:**
+
+```typescript
+interface JoinLinesParams {
+ textDocument: TextDocumentIdentifier,
+ /// Currently active selections/cursor offsets.
+ /// This is an array to support multiple cursors.
+ ranges: Range[],
+}
+```
+
+**Response:** `TextEdit[]`
+
+### Example
+
+```rust
+fn main() {
+ /*cursor here*/let x = {
+ 92
+ };
+}
+```
+
+`experimental/joinLines` yields (curly braces are automagically removed)
+
+```rust
+fn main() {
+ let x = 92;
+}
+```
+
+### Unresolved Question
+
+* What is the position of the cursor after `joinLines`?
+ Currently, this is left to editor's discretion, but it might be useful to specify on the server via snippets.
+ However, it then becomes unclear how it works with multi cursor.
+
+## On Enter
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/1001
+
+**Experimental Server Capability:** `{ "onEnter": boolean }`
+
+This request is sent from client to server to handle the <kbd>Enter</kbd> key press.
+
+**Method:** `experimental/onEnter`
+
+**Request:**: `TextDocumentPositionParams`
+
+**Response:**
+
+```typescript
+SnippetTextEdit[]
+```
+
+### Example
+
+```rust
+fn main() {
+ // Some /*cursor here*/ docs
+ let x = 92;
+}
+```
+
+`experimental/onEnter` returns the following snippet
+
+```rust
+fn main() {
+ // Some
+ // $0 docs
+ let x = 92;
+}
+```
+
+The primary goal of `onEnter` is to handle automatic indentation when opening a new line.
+This is not yet implemented.
+The secondary goal is to handle fixing up syntax, like continuing doc strings and comments, and escaping `\n` in string literals.
+
+As proper cursor positioning is raison-d'etat for `onEnter`, it uses `SnippetTextEdit`.
+
+### Unresolved Question
+
+* How to deal with synchronicity of the request?
+ One option is to require the client to block until the server returns the response.
+ Another option is to do a OT-style merging of edits from client and server.
+ A third option is to do a record-replay: client applies heuristic on enter immediately, then applies all user's keypresses.
+ When the server is ready with the response, the client rollbacks all the changes and applies the recorded actions on top of the correct response.
+* How to deal with multiple carets?
+* Should we extend this to arbitrary typed events and not just `onEnter`?
+
+## Structural Search Replace (SSR)
+
+**Experimental Server Capability:** `{ "ssr": boolean }`
+
+This request is sent from client to server to handle structural search replace -- automated syntax tree based transformation of the source.
+
+**Method:** `experimental/ssr`
+
+**Request:**
+
+```typescript
+interface SsrParams {
+ /// Search query.
+ /// The specific syntax is specified outside of the protocol.
+ query: string,
+ /// If true, only check the syntax of the query and don't compute the actual edit.
+ parseOnly: boolean,
+ /// The current text document. This and `position` will be used to determine in what scope
+ /// paths in `query` should be resolved.
+ textDocument: TextDocumentIdentifier;
+ /// Position where SSR was invoked.
+ position: Position;
+ /// Current selections. Search/replace will be restricted to these if non-empty.
+ selections: Range[];
+}
+```
+
+**Response:**
+
+```typescript
+WorkspaceEdit
+```
+
+### Example
+
+SSR with query `foo($a, $b) ==>> ($a).foo($b)` will transform, eg `foo(y + 5, z)` into `(y + 5).foo(z)`.
+
+### Unresolved Question
+
+* Probably needs search without replace mode
+* Needs a way to limit the scope to certain files.
+
+## Matching Brace
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/999
+
+**Experimental Server Capability:** `{ "matchingBrace": boolean }`
+
+This request is sent from client to server to handle "Matching Brace" editor action.
+
+**Method:** `experimental/matchingBrace`
+
+**Request:**
+
+```typescript
+interface MatchingBraceParams {
+ textDocument: TextDocumentIdentifier,
+ /// Position for each cursor
+ positions: Position[],
+}
+```
+
+**Response:**
+
+```typescript
+Position[]
+```
+
+### Example
+
+```rust
+fn main() {
+ let x: Vec<()>/*cursor here*/ = vec![]
+}
+```
+
+`experimental/matchingBrace` yields the position of `<`.
+In many cases, matching braces can be handled by the editor.
+However, some cases (like disambiguating between generics and comparison operations) need a real parser.
+Moreover, it would be cool if editors didn't need to implement even basic language parsing
+
+### Unresolved Question
+
+* Should we return a nested brace structure, to allow paredit-like actions of jump *out* of the current brace pair?
+ This is how `SelectionRange` request works.
+* Alternatively, should we perhaps flag certain `SelectionRange`s as being brace pairs?
+
+## Runnables
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/944
+
+**Experimental Server Capability:** `{ "runnables": { "kinds": string[] } }`
+
+This request is sent from client to server to get the list of things that can be run (tests, binaries, `cargo check -p`).
+
+**Method:** `experimental/runnables`
+
+**Request:**
+
+```typescript
+interface RunnablesParams {
+ textDocument: TextDocumentIdentifier;
+ /// If null, compute runnables for the whole file.
+ position?: Position;
+}
+```
+
+**Response:** `Runnable[]`
+
+```typescript
+interface Runnable {
+ label: string;
+ /// If this Runnable is associated with a specific function/module, etc, the location of this item
+ location?: LocationLink;
+ /// Running things is necessary technology specific, `kind` needs to be advertised via server capabilities,
+ // the type of `args` is specific to `kind`. The actual running is handled by the client.
+ kind: string;
+ args: any;
+}
+```
+
+rust-analyzer supports only one `kind`, `"cargo"`. The `args` for `"cargo"` look like this:
+
+```typescript
+{
+ workspaceRoot?: string;
+ cargoArgs: string[];
+ cargoExtraArgs: string[];
+ executableArgs: string[];
+ expectTest?: boolean;
+ overrideCargo?: string;
+}
+```
+
+## Open External Documentation
+
+This request is sent from client to server to get a URL to documentation for the symbol under the cursor, if available.
+
+**Method** `experimental/externalDocs`
+
+**Request:**: `TextDocumentPositionParams`
+
+**Response** `string | null`
+
+
+## Analyzer Status
+
+**Method:** `rust-analyzer/analyzerStatus`
+
+**Request:**
+
+```typescript
+interface AnalyzerStatusParams {
+ /// If specified, show dependencies of the current file.
+ textDocument?: TextDocumentIdentifier;
+}
+```
+
+**Response:** `string`
+
+Returns internal status message, mostly for debugging purposes.
+
+## Reload Workspace
+
+**Method:** `rust-analyzer/reloadWorkspace`
+
+**Request:** `null`
+
+**Response:** `null`
+
+Reloads project information (that is, re-executes `cargo metadata`).
+
+## Server Status
+
+**Experimental Client Capability:** `{ "serverStatusNotification": boolean }`
+
+**Method:** `experimental/serverStatus`
+
+**Notification:**
+
+```typescript
+interface ServerStatusParams {
+ /// `ok` means that the server is completely functional.
+ ///
+ /// `warning` means that the server is partially functional.
+ /// It can answer correctly to most requests, but some results
+ /// might be wrong due to, for example, some missing dependencies.
+ ///
+ /// `error` means that the server is not functional. For example,
+ /// there's a fatal build configuration problem. The server might
+ /// still give correct answers to simple requests, but most results
+ /// will be incomplete or wrong.
+ health: "ok" | "warning" | "error",
+ /// Is there any pending background work which might change the status?
+ /// For example, are dependencies being downloaded?
+ quiescent: boolean,
+ /// Explanatory message to show on hover.
+ message?: string,
+}
+```
+
+This notification is sent from server to client.
+The client can use it to display *persistent* status to the user (in modline).
+It is similar to the `showMessage`, but is intended for stares rather than point-in-time events.
+
+Note that this functionality is intended primarily to inform the end user about the state of the server.
+In particular, it's valid for the client to completely ignore this extension.
+Clients are discouraged from but are allowed to use the `health` status to decide if it's worth sending a request to the server.
+
+## Syntax Tree
+
+**Method:** `rust-analyzer/syntaxTree`
+
+**Request:**
+
+```typescript
+interface SyntaxTreeParams {
+ textDocument: TextDocumentIdentifier,
+ range?: Range,
+}
+```
+
+**Response:** `string`
+
+Returns textual representation of a parse tree for the file/selected region.
+Primarily for debugging, but very useful for all people working on rust-analyzer itself.
+
+## View Hir
+
+**Method:** `rust-analyzer/viewHir`
+
+**Request:** `TextDocumentPositionParams`
+
+**Response:** `string`
+
+Returns a textual representation of the HIR of the function containing the cursor.
+For debugging or when working on rust-analyzer itself.
+
+## View File Text
+
+**Method:** `rust-analyzer/viewFileText`
+
+**Request:** `TextDocumentIdentifier`
+
+**Response:** `string`
+
+Returns the text of a file as seen by the server.
+This is for debugging file sync problems.
+
+## View ItemTree
+
+**Method:** `rust-analyzer/viewItemTree`
+
+**Request:**
+
+```typescript
+interface ViewItemTreeParams {
+ textDocument: TextDocumentIdentifier,
+}
+```
+
+**Response:** `string`
+
+Returns a textual representation of the `ItemTree` of the currently open file, for debugging.
+
+## View Crate Graph
+
+**Method:** `rust-analyzer/viewCrateGraph`
+
+**Request:**
+
+```typescript
+interface ViewCrateGraphParams {
+ full: boolean,
+}
+```
+
+**Response:** `string`
+
+Renders rust-analyzer's crate graph as an SVG image.
+
+If `full` is `true`, the graph includes non-workspace crates (crates.io dependencies as well as sysroot crates).
+
+## Shuffle Crate Graph
+
+**Method:** `rust-analyzer/shuffleCrateGraph`
+
+**Request:** `null`
+
+Shuffles the crate IDs in the crate graph, for debugging purposes.
+
+## Expand Macro
+
+**Method:** `rust-analyzer/expandMacro`
+
+**Request:**
+
+```typescript
+interface ExpandMacroParams {
+ textDocument: TextDocumentIdentifier,
+ position: Position,
+}
+```
+
+**Response:**
+
+```typescript
+interface ExpandedMacro {
+ name: string,
+ expansion: string,
+}
+```
+
+Expands macro call at a given position.
+
+## Hover Actions
+
+**Experimental Client Capability:** `{ "hoverActions": boolean }`
+
+If this capability is set, `Hover` request returned from the server might contain an additional field, `actions`:
+
+```typescript
+interface Hover {
+ ...
+ actions?: CommandLinkGroup[];
+}
+
+interface CommandLink extends Command {
+ /**
+ * A tooltip for the command, when represented in the UI.
+ */
+ tooltip?: string;
+}
+
+interface CommandLinkGroup {
+ title?: string;
+ commands: CommandLink[];
+}
+```
+
+Such actions on the client side are appended to a hover bottom as command links:
+```
+ +-----------------------------+
+ | Hover content |
+ | |
+ +-----------------------------+
+ | _Action1_ | _Action2_ | <- first group, no TITLE
+ +-----------------------------+
+ | TITLE _Action1_ | _Action2_ | <- second group
+ +-----------------------------+
+ ...
+```
+
+## Open Cargo.toml
+
+**Upstream Issue:** https://github.com/rust-lang/rust-analyzer/issues/6462
+
+**Experimental Server Capability:** `{ "openCargoToml": boolean }`
+
+This request is sent from client to server to open the current project's Cargo.toml
+
+**Method:** `experimental/openCargoToml`
+
+**Request:** `OpenCargoTomlParams`
+
+**Response:** `Location | null`
+
+
+### Example
+
+```rust
+// Cargo.toml
+[package]
+// src/main.rs
+
+/* cursor here*/
+```
+
+`experimental/openCargoToml` returns a single `Link` to the start of the `[package]` keyword.
+
+## Related tests
+
+This request is sent from client to server to get the list of tests for the specified position.
+
+**Method:** `rust-analyzer/relatedTests`
+
+**Request:** `TextDocumentPositionParams`
+
+**Response:** `TestInfo[]`
+
+```typescript
+interface TestInfo {
+ runnable: Runnable;
+}
+```
+
+## Hover Range
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/377
+
+**Experimental Server Capability:** { "hoverRange": boolean }
+
+This extension allows passing a `Range` as a `position` field of `HoverParams`.
+The primary use-case is to use the hover request to show the type of the expression currently selected.
+
+```typescript
+interface HoverParams extends WorkDoneProgressParams {
+ textDocument: TextDocumentIdentifier;
+ position: Range | Position;
+}
+```
+Whenever the client sends a `Range`, it is understood as the current selection and any hover included in the range will show the type of the expression if possible.
+
+### Example
+
+```rust
+fn main() {
+ let expression = $01 + 2 * 3$0;
+}
+```
+
+Triggering a hover inside the selection above will show a result of `i32`.
+
+## Move Item
+
+**Upstream Issue:** https://github.com/rust-lang/rust-analyzer/issues/6823
+
+This request is sent from client to server to move item under cursor or selection in some direction.
+
+**Method:** `experimental/moveItem`
+
+**Request:** `MoveItemParams`
+
+**Response:** `SnippetTextEdit[]`
+
+```typescript
+export interface MoveItemParams {
+ textDocument: TextDocumentIdentifier,
+ range: Range,
+ direction: Direction
+}
+
+export const enum Direction {
+ Up = "Up",
+ Down = "Down"
+}
+```
+
+## Workspace Symbols Filtering
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/941
+
+**Experimental Server Capability:** `{ "workspaceSymbolScopeKindFiltering": boolean }`
+
+Extends the existing `workspace/symbol` request with ability to filter symbols by broad scope and kind of symbol.
+If this capability is set, `workspace/symbol` parameter gains two new optional fields:
+
+
+```typescript
+interface WorkspaceSymbolParams {
+ /**
+ * Return only the symbols defined in the specified scope.
+ */
+ searchScope?: WorkspaceSymbolSearchScope;
+ /**
+ * Return only the symbols of specified kinds.
+ */
+ searchKind?: WorkspaceSymbolSearchKind;
+ ...
+}
+
+const enum WorkspaceSymbolSearchScope {
+ Workspace = "workspace",
+ WorkspaceAndDependencies = "workspaceAndDependencies"
+}
+
+const enum WorkspaceSymbolSearchKind {
+ OnlyTypes = "onlyTypes",
+ AllSymbols = "allSymbols"
+}
+```
+
+## Client Commands
+
+**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/642
+
+**Experimental Client Capability:** `{ "commands?": ClientCommandOptions }`
+
+Certain LSP types originating on the server, notably code lenses, embed commands.
+Commands can be serviced either by the server or by the client.
+However, the server doesn't know which commands are available on the client.
+
+This extensions allows the client to communicate this info.
+
+
+```typescript
+export interface ClientCommandOptions {
+ /**
+ * The commands to be executed on the client
+ */
+ commands: string[];
+}
+```
diff --git a/src/tools/rust-analyzer/docs/dev/style.md b/src/tools/rust-analyzer/docs/dev/style.md
new file mode 100644
index 000000000..a80eebd63
--- /dev/null
+++ b/src/tools/rust-analyzer/docs/dev/style.md
@@ -0,0 +1,1172 @@
+Our approach to "clean code" is two-fold:
+
+* We generally don't block PRs on style changes.
+* At the same time, all code in rust-analyzer is constantly refactored.
+
+It is explicitly OK for a reviewer to flag only some nits in the PR, and then send a follow-up cleanup PR for things which are easier to explain by example, cc-ing the original author.
+Sending small cleanup PRs (like renaming a single local variable) is encouraged.
+
+When reviewing pull requests prefer extending this document to leaving
+non-reusable comments on the pull request itself.
+
+# General
+
+## Scale of Changes
+
+Everyone knows that it's better to send small & focused pull requests.
+The problem is, sometimes you *have* to, eg, rewrite the whole compiler, and that just doesn't fit into a set of isolated PRs.
+
+The main things to keep an eye on are the boundaries between various components.
+There are three kinds of changes:
+
+1. Internals of a single component are changed.
+ Specifically, you don't change any `pub` items.
+ A good example here would be an addition of a new assist.
+
+2. API of a component is expanded.
+ Specifically, you add a new `pub` function which wasn't there before.
+ A good example here would be expansion of assist API, for example, to implement lazy assists or assists groups.
+
+3. A new dependency between components is introduced.
+ Specifically, you add a `pub use` reexport from another crate or you add a new line to the `[dependencies]` section of `Cargo.toml`.
+ A good example here would be adding reference search capability to the assists crates.
+
+For the first group, the change is generally merged as long as:
+
+* it works for the happy case,
+* it has tests,
+* it doesn't panic for the unhappy case.
+
+For the second group, the change would be subjected to quite a bit of scrutiny and iteration.
+The new API needs to be right (or at least easy to change later).
+The actual implementation doesn't matter that much.
+It's very important to minimize the amount of changed lines of code for changes of the second kind.
+Often, you start doing a change of the first kind, only to realize that you need to elevate to a change of the second kind.
+In this case, we'll probably ask you to split API changes into a separate PR.
+
+Changes of the third group should be pretty rare, so we don't specify any specific process for them.
+That said, adding an innocent-looking `pub use` is a very simple way to break encapsulation, keep an eye on it!
+
+Note: if you enjoyed this abstract hand-waving about boundaries, you might appreciate
+https://www.tedinski.com/2018/02/06/system-boundaries.html
+
+## Crates.io Dependencies
+
+We try to be very conservative with usage of crates.io dependencies.
+Don't use small "helper" crates (exception: `itertools` and `either` are allowed).
+If there's some general reusable bit of code you need, consider adding it to the `stdx` crate.
+A useful exercise is to read Cargo.lock and see if some *transitive* dependencies do not make sense for rust-analyzer.
+
+**Rationale:** keep compile times low, create ecosystem pressure for faster compiles, reduce the number of things which might break.
+
+## Commit Style
+
+We don't have specific rules around git history hygiene.
+Maintaining clean git history is strongly encouraged, but not enforced.
+Use rebase workflow, it's OK to rewrite history during PR review process.
+After you are happy with the state of the code, please use [interactive rebase](https://git-scm.com/book/en/v2/Git-Tools-Rewriting-History) to squash fixup commits.
+
+Avoid @mentioning people in commit messages and pull request descriptions(they are added to commit message by bors).
+Such messages create a lot of duplicate notification traffic during rebases.
+
+If possible, write Pull Request titles and descriptions from the user's perspective:
+
+```
+# GOOD
+Make goto definition work inside macros
+
+# BAD
+Use original span for FileId
+```
+
+This makes it easier to prepare a changelog.
+
+If the change adds a new user-visible functionality, consider recording a GIF with [peek](https://github.com/phw/peek) and pasting it into the PR description.
+
+To make writing the release notes easier, you can mark a pull request as a feature, fix, internal change, or minor.
+Minor changes are excluded from the release notes, while the other types are distributed in their corresponding sections.
+There are two ways to mark this:
+
+* use a `feat: `, `feature: `, `fix: `, `internal: ` or `minor: ` prefix in the PR title
+* write `changelog [feature|fix|internal|skip] [description]` in a comment or in the PR description; the description is optional, and will replace the title if included.
+
+These comments don't have to be added by the PR author.
+Editing a comment or the PR description or title is also fine, as long as it happens before the release.
+
+**Rationale:** clean history is potentially useful, but rarely used.
+But many users read changelogs.
+Including a description and GIF suitable for the changelog means less work for the maintainers on the release day.
+
+## Clippy
+
+We don't enforce Clippy.
+A number of default lints have high false positive rate.
+Selectively patching false-positives with `allow(clippy)` is considered worse than not using Clippy at all.
+There's a `cargo lint` command which runs a subset of low-FPR lints.
+Careful tweaking of `lint` is welcome.
+Of course, applying Clippy suggestions is welcome as long as they indeed improve the code.
+
+**Rationale:** see [rust-lang/clippy#5537](https://github.com/rust-lang/rust-clippy/issues/5537).
+
+# Code
+
+## Minimal Tests
+
+Most tests in rust-analyzer start with a snippet of Rust code.
+These snippets should be minimal -- if you copy-paste a snippet of real code into the tests, make sure to remove everything which could be removed.
+
+It also makes sense to format snippets more compactly (for example, by placing enum definitions like `enum E { Foo, Bar }` on a single line),
+as long as they are still readable.
+
+When using multiline fixtures, use unindented raw string literals:
+
+```rust
+ #[test]
+ fn inline_field_shorthand() {
+ check_assist(
+ inline_local_variable,
+ r#"
+struct S { foo: i32}
+fn main() {
+ let $0foo = 92;
+ S { foo }
+}
+"#,
+ r#"
+struct S { foo: i32}
+fn main() {
+ S { foo: 92 }
+}
+"#,
+ );
+ }
+```
+
+**Rationale:**
+
+There are many benefits to this:
+
+* less to read or to scroll past
+* easier to understand what exactly is tested
+* less stuff printed during printf-debugging
+* less time to run test
+
+Formatting ensures that you can use your editor's "number of selected characters" feature to correlate offsets with test's source code.
+
+## Marked Tests
+
+Use
+[`cov_mark::hit! / cov_mark::check!`](https://github.com/matklad/cov-mark)
+when testing specific conditions.
+Do not place several marks into a single test or condition.
+Do not reuse marks between several tests.
+
+**Rationale:** marks provide an easy way to find the canonical test for each bit of code.
+This makes it much easier to understand.
+More than one mark per test / code branch doesn't add significantly to understanding.
+
+## `#[should_panic]`
+
+Do not use `#[should_panic]` tests.
+Instead, explicitly check for `None`, `Err`, etc.
+
+**Rationale:** `#[should_panic]` is a tool for library authors to make sure that the API does not fail silently when misused.
+`rust-analyzer` is not a library, we don't need to test for API misuse, and we have to handle any user input without panics.
+Panic messages in the logs from the `#[should_panic]` tests are confusing.
+
+## `#[ignore]`
+
+Do not `#[ignore]` tests.
+If the test currently does not work, assert the wrong behavior and add a fixme explaining why it is wrong.
+
+**Rationale:** noticing when the behavior is fixed, making sure that even the wrong behavior is acceptable (ie, not a panic).
+
+## Function Preconditions
+
+Express function preconditions in types and force the caller to provide them (rather than checking in callee):
+
+```rust
+// GOOD
+fn frobnicate(walrus: Walrus) {
+ ...
+}
+
+// BAD
+fn frobnicate(walrus: Option<Walrus>) {
+ let walrus = match walrus {
+ Some(it) => it,
+ None => return,
+ };
+ ...
+}
+```
+
+**Rationale:** this makes control flow explicit at the call site.
+Call-site has more context, it often happens that the precondition falls out naturally or can be bubbled up higher in the stack.
+
+Avoid splitting precondition check and precondition use across functions:
+
+```rust
+// GOOD
+fn main() {
+ let s: &str = ...;
+ if let Some(contents) = string_literal_contents(s) {
+
+ }
+}
+
+fn string_literal_contents(s: &str) -> Option<&str> {
+ if s.starts_with('"') && s.ends_with('"') {
+ Some(&s[1..s.len() - 1])
+ } else {
+ None
+ }
+}
+
+// BAD
+fn main() {
+ let s: &str = ...;
+ if is_string_literal(s) {
+ let contents = &s[1..s.len() - 1];
+ }
+}
+
+fn is_string_literal(s: &str) -> bool {
+ s.starts_with('"') && s.ends_with('"')
+}
+```
+
+In the "Not as good" version, the precondition that `1` is a valid char boundary is checked in `is_string_literal` and used in `foo`.
+In the "Good" version, the precondition check and usage are checked in the same block, and then encoded in the types.
+
+**Rationale:** non-local code properties degrade under change.
+
+When checking a boolean precondition, prefer `if !invariant` to `if negated_invariant`:
+
+```rust
+// GOOD
+if !(idx < len) {
+ return None;
+}
+
+// BAD
+if idx >= len {
+ return None;
+}
+```
+
+**Rationale:** it's useful to see the invariant relied upon by the rest of the function clearly spelled out.
+
+## Control Flow
+
+As a special case of the previous rule, do not hide control flow inside functions, push it to the caller:
+
+```rust
+// GOOD
+if cond {
+ f()
+}
+
+// BAD
+fn f() {
+ if !cond {
+ return;
+ }
+ ...
+}
+```
+
+## Assertions
+
+Assert liberally.
+Prefer [`stdx::never!`](https://docs.rs/always-assert/0.1.2/always_assert/macro.never.html) to standard `assert!`.
+
+**Rationale:** See [cross cutting concern: error handling](https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/architecture.md#error-handling).
+
+## Getters & Setters
+
+If a field can have any value without breaking invariants, make the field public.
+Conversely, if there is an invariant, document it, enforce it in the "constructor" function, make the field private, and provide a getter.
+Never provide setters.
+
+Getters should return borrowed data:
+
+```rust
+struct Person {
+ // Invariant: never empty
+ first_name: String,
+ middle_name: Option<String>
+}
+
+// GOOD
+impl Person {
+ fn first_name(&self) -> &str { self.first_name.as_str() }
+ fn middle_name(&self) -> Option<&str> { self.middle_name.as_ref() }
+}
+
+// BAD
+impl Person {
+ fn first_name(&self) -> String { self.first_name.clone() }
+ fn middle_name(&self) -> &Option<String> { &self.middle_name }
+}
+```
+
+**Rationale:** we don't provide public API, it's cheaper to refactor than to pay getters rent.
+Non-local code properties degrade under change, privacy makes invariant local.
+Borrowed owned types (`&String`) disclose irrelevant details about internal representation.
+Irrelevant (neither right nor wrong) things obscure correctness.
+
+## Useless Types
+
+More generally, always prefer types on the left
+
+```rust
+// GOOD BAD
+&[T] &Vec<T>
+&str &String
+Option<&T> &Option<T>
+&Path &PathBuf
+```
+
+**Rationale:** types on the left are strictly more general.
+Even when generality is not required, consistency is important.
+
+## Constructors
+
+Prefer `Default` to zero-argument `new` function.
+
+```rust
+// GOOD
+#[derive(Default)]
+struct Foo {
+ bar: Option<Bar>
+}
+
+// BAD
+struct Foo {
+ bar: Option<Bar>
+}
+
+impl Foo {
+ fn new() -> Foo {
+ Foo { bar: None }
+ }
+}
+```
+
+Prefer `Default` even if it has to be implemented manually.
+
+**Rationale:** less typing in the common case, uniformity.
+
+Use `Vec::new` rather than `vec![]`.
+
+**Rationale:** uniformity, strength reduction.
+
+Avoid using "dummy" states to implement a `Default`.
+If a type doesn't have a sensible default, empty value, don't hide it.
+Let the caller explicitly decide what the right initial state is.
+
+## Functions Over Objects
+
+Avoid creating "doer" objects.
+That is, objects which are created only to execute a single action.
+
+```rust
+// GOOD
+do_thing(arg1, arg2);
+
+// BAD
+ThingDoer::new(arg1, arg2).do();
+```
+
+Note that this concerns only outward API.
+When implementing `do_thing`, it might be very useful to create a context object.
+
+```rust
+pub fn do_thing(arg1: Arg1, arg2: Arg2) -> Res {
+ let mut ctx = Ctx { arg1, arg2 };
+ ctx.run()
+}
+
+struct Ctx {
+ arg1: Arg1, arg2: Arg2
+}
+
+impl Ctx {
+ fn run(self) -> Res {
+ ...
+ }
+}
+```
+
+The difference is that `Ctx` is an impl detail here.
+
+Sometimes a middle ground is acceptable if this can save some busywork:
+
+```rust
+ThingDoer::do(arg1, arg2);
+
+pub struct ThingDoer {
+ arg1: Arg1, arg2: Arg2,
+}
+
+impl ThingDoer {
+ pub fn do(arg1: Arg1, arg2: Arg2) -> Res {
+ ThingDoer { arg1, arg2 }.run()
+ }
+ fn run(self) -> Res {
+ ...
+ }
+}
+```
+
+**Rationale:** not bothering the caller with irrelevant details, not mixing user API with implementor API.
+
+## Functions with many parameters
+
+Avoid creating functions with many optional or boolean parameters.
+Introduce a `Config` struct instead.
+
+```rust
+// GOOD
+pub struct AnnotationConfig {
+ pub binary_target: bool,
+ pub annotate_runnables: bool,
+ pub annotate_impls: bool,
+}
+
+pub fn annotations(
+ db: &RootDatabase,
+ file_id: FileId,
+ config: AnnotationConfig
+) -> Vec<Annotation> {
+ ...
+}
+
+// BAD
+pub fn annotations(
+ db: &RootDatabase,
+ file_id: FileId,
+ binary_target: bool,
+ annotate_runnables: bool,
+ annotate_impls: bool,
+) -> Vec<Annotation> {
+ ...
+}
+```
+
+**Rationale:** reducing churn.
+If the function has many parameters, they most likely change frequently.
+By packing them into a struct we protect all intermediary functions from changes.
+
+Do not implement `Default` for the `Config` struct, the caller has more context to determine better defaults.
+Do not store `Config` as a part of the `state`, pass it explicitly.
+This gives more flexibility for the caller.
+
+If there is variation not only in the input parameters, but in the return type as well, consider introducing a `Command` type.
+
+```rust
+// MAYBE GOOD
+pub struct Query {
+ pub name: String,
+ pub case_sensitive: bool,
+}
+
+impl Query {
+ pub fn all(self) -> Vec<Item> { ... }
+ pub fn first(self) -> Option<Item> { ... }
+}
+
+// MAYBE BAD
+fn query_all(name: String, case_sensitive: bool) -> Vec<Item> { ... }
+fn query_first(name: String, case_sensitive: bool) -> Option<Item> { ... }
+```
+
+## Prefer Separate Functions Over Parameters
+
+If a function has a `bool` or an `Option` parameter, and it is always called with `true`, `false`, `Some` and `None` literals, split the function in two.
+
+```rust
+// GOOD
+fn caller_a() {
+ foo()
+}
+
+fn caller_b() {
+ foo_with_bar(Bar::new())
+}
+
+fn foo() { ... }
+fn foo_with_bar(bar: Bar) { ... }
+
+// BAD
+fn caller_a() {
+ foo(None)
+}
+
+fn caller_b() {
+ foo(Some(Bar::new()))
+}
+
+fn foo(bar: Option<Bar>) { ... }
+```
+
+**Rationale:** more often than not, such functions display "`false sharing`" -- they have additional `if` branching inside for two different cases.
+Splitting the two different control flows into two functions simplifies each path, and remove cross-dependencies between the two paths.
+If there's common code between `foo` and `foo_with_bar`, extract *that* into a common helper.
+
+## Appropriate String Types
+
+When interfacing with OS APIs, use `OsString`, even if the original source of data is utf-8 encoded.
+**Rationale:** cleanly delineates the boundary when the data goes into the OS-land.
+
+Use `AbsPathBuf` and `AbsPath` over `std::Path`.
+**Rationale:** rust-analyzer is a long-lived process which handles several projects at the same time.
+It is important not to leak cwd by accident.
+
+# Premature Pessimization
+
+## Avoid Allocations
+
+Avoid writing code which is slower than it needs to be.
+Don't allocate a `Vec` where an iterator would do, don't allocate strings needlessly.
+
+```rust
+// GOOD
+use itertools::Itertools;
+
+let (first_word, second_word) = match text.split_ascii_whitespace().collect_tuple() {
+ Some(it) => it,
+ None => return,
+}
+
+// BAD
+let words = text.split_ascii_whitespace().collect::<Vec<_>>();
+if words.len() != 2 {
+ return
+}
+```
+
+**Rationale:** not allocating is almost always faster.
+
+## Push Allocations to the Call Site
+
+If allocation is inevitable, let the caller allocate the resource:
+
+```rust
+// GOOD
+fn frobnicate(s: String) {
+ ...
+}
+
+// BAD
+fn frobnicate(s: &str) {
+ let s = s.to_string();
+ ...
+}
+```
+
+**Rationale:** reveals the costs.
+It is also more efficient when the caller already owns the allocation.
+
+## Collection Types
+
+Prefer `rustc_hash::FxHashMap` and `rustc_hash::FxHashSet` instead of the ones in `std::collections`.
+
+**Rationale:** they use a hasher that's significantly faster and using them consistently will reduce code size by some small amount.
+
+## Avoid Intermediate Collections
+
+When writing a recursive function to compute a sets of things, use an accumulator parameter instead of returning a fresh collection.
+Accumulator goes first in the list of arguments.
+
+```rust
+// GOOD
+pub fn reachable_nodes(node: Node) -> FxHashSet<Node> {
+ let mut res = FxHashSet::default();
+ go(&mut res, node);
+ res
+}
+fn go(acc: &mut FxHashSet<Node>, node: Node) {
+ acc.insert(node);
+ for n in node.neighbors() {
+ go(acc, n);
+ }
+}
+
+// BAD
+pub fn reachable_nodes(node: Node) -> FxHashSet<Node> {
+ let mut res = FxHashSet::default();
+ res.insert(node);
+ for n in node.neighbors() {
+ res.extend(reachable_nodes(n));
+ }
+ res
+}
+```
+
+**Rationale:** re-use allocations, accumulator style is more concise for complex cases.
+
+## Avoid Monomorphization
+
+Avoid making a lot of code type parametric, *especially* on the boundaries between crates.
+
+```rust
+// GOOD
+fn frobnicate(f: impl FnMut()) {
+ frobnicate_impl(&mut f)
+}
+fn frobnicate_impl(f: &mut dyn FnMut()) {
+ // lots of code
+}
+
+// BAD
+fn frobnicate(f: impl FnMut()) {
+ // lots of code
+}
+```
+
+Avoid `AsRef` polymorphism, it pays back only for widely used libraries:
+
+```rust
+// GOOD
+fn frobnicate(f: &Path) {
+}
+
+// BAD
+fn frobnicate(f: impl AsRef<Path>) {
+}
+```
+
+**Rationale:** Rust uses monomorphization to compile generic code, meaning that for each instantiation of a generic functions with concrete types, the function is compiled afresh, *per crate*.
+This allows for exceptionally good performance, but leads to increased compile times.
+Runtime performance obeys 80%/20% rule -- only a small fraction of code is hot.
+Compile time **does not** obey this rule -- all code has to be compiled.
+
+# Style
+
+## Order of Imports
+
+Separate import groups with blank lines.
+Use one `use` per crate.
+
+Module declarations come before the imports.
+Order them in "suggested reading order" for a person new to the code base.
+
+```rust
+mod x;
+mod y;
+
+// First std.
+use std::{ ... }
+
+// Second, external crates (both crates.io crates and other rust-analyzer crates).
+use crate_foo::{ ... }
+use crate_bar::{ ... }
+
+// Then current crate.
+use crate::{}
+
+// Finally, parent and child modules, but prefer `use crate::`.
+use super::{}
+
+// Re-exports are treated as item definitions rather than imports, so they go
+// after imports and modules. Use them sparingly.
+pub use crate::x::Z;
+```
+
+**Rationale:** consistency.
+Reading order is important for new contributors.
+Grouping by crate allows spotting unwanted dependencies easier.
+
+## Import Style
+
+Qualify items from `hir` and `ast`.
+
+```rust
+// GOOD
+use syntax::ast;
+
+fn frobnicate(func: hir::Function, strukt: ast::Struct) {}
+
+// BAD
+use hir::Function;
+use syntax::ast::Struct;
+
+fn frobnicate(func: Function, strukt: Struct) {}
+```
+
+**Rationale:** avoids name clashes, makes the layer clear at a glance.
+
+When implementing traits from `std::fmt` or `std::ops`, import the module:
+
+```rust
+// GOOD
+use std::fmt;
+
+impl fmt::Display for RenameError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { .. }
+}
+
+// BAD
+impl std::fmt::Display for RenameError {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { .. }
+}
+
+// BAD
+use std::ops::Deref;
+
+impl Deref for Widget {
+ type Target = str;
+ fn deref(&self) -> &str { .. }
+}
+```
+
+**Rationale:** overall, less typing.
+Makes it clear that a trait is implemented, rather than used.
+
+Avoid local `use MyEnum::*` imports.
+**Rationale:** consistency.
+
+Prefer `use crate::foo::bar` to `use super::bar` or `use self::bar::baz`.
+**Rationale:** consistency, this is the style which works in all cases.
+
+By default, avoid re-exports.
+**Rationale:** for non-library code, re-exports introduce two ways to use something and allow for inconsistency.
+
+## Order of Items
+
+Optimize for the reader who sees the file for the first time, and wants to get a general idea about what's going on.
+People read things from top to bottom, so place most important things first.
+
+Specifically, if all items except one are private, always put the non-private item on top.
+
+```rust
+// GOOD
+pub(crate) fn frobnicate() {
+ Helper::act()
+}
+
+#[derive(Default)]
+struct Helper { stuff: i32 }
+
+impl Helper {
+ fn act(&self) {
+
+ }
+}
+
+// BAD
+#[derive(Default)]
+struct Helper { stuff: i32 }
+
+pub(crate) fn frobnicate() {
+ Helper::act()
+}
+
+impl Helper {
+ fn act(&self) {
+
+ }
+}
+```
+
+If there's a mixture of private and public items, put public items first.
+
+Put `struct`s and `enum`s first, functions and impls last. Order type declarations in top-down manner.
+
+```rust
+// GOOD
+struct Parent {
+ children: Vec<Child>
+}
+
+struct Child;
+
+impl Parent {
+}
+
+impl Child {
+}
+
+// BAD
+struct Child;
+
+impl Child {
+}
+
+struct Parent {
+ children: Vec<Child>
+}
+
+impl Parent {
+}
+```
+
+**Rationale:** easier to get the sense of the API by visually scanning the file.
+If function bodies are folded in the editor, the source code should read as documentation for the public API.
+
+## Context Parameters
+
+Some parameters are threaded unchanged through many function calls.
+They determine the "context" of the operation.
+Pass such parameters first, not last.
+If there are several context parameters, consider packing them into a `struct Ctx` and passing it as `&self`.
+
+```rust
+// GOOD
+fn dfs(graph: &Graph, v: Vertex) -> usize {
+ let mut visited = FxHashSet::default();
+ return go(graph, &mut visited, v);
+
+ fn go(graph: &Graph, visited: &mut FxHashSet<Vertex>, v: usize) -> usize {
+ ...
+ }
+}
+
+// BAD
+fn dfs(v: Vertex, graph: &Graph) -> usize {
+ fn go(v: usize, graph: &Graph, visited: &mut FxHashSet<Vertex>) -> usize {
+ ...
+ }
+
+ let mut visited = FxHashSet::default();
+ go(v, graph, &mut visited)
+}
+```
+
+**Rationale:** consistency.
+Context-first works better when non-context parameter is a lambda.
+
+## Variable Naming
+
+Use boring and long names for local variables ([yay code completion](https://github.com/rust-lang/rust-analyzer/pull/4162#discussion_r417130973)).
+The default name is a lowercased name of the type: `global_state: GlobalState`.
+Avoid ad-hoc acronyms and contractions, but use the ones that exist consistently (`db`, `ctx`, `acc`).
+Prefer American spelling (color, behavior).
+
+Default names:
+
+* `res` -- "result of the function" local variable
+* `it` -- I don't really care about the name
+* `n_foos` -- number of foos (prefer this to `foo_count`)
+* `foo_idx` -- index of `foo`
+
+Many names in rust-analyzer conflict with keywords.
+We use mangled names instead of `r#ident` syntax:
+
+```
+crate -> krate
+enum -> enum_
+fn -> func
+impl -> imp
+macro -> mac
+mod -> module
+struct -> strukt
+trait -> trait_
+type -> ty
+```
+
+**Rationale:** consistency.
+
+## Early Returns
+
+Do use early returns
+
+```rust
+// GOOD
+fn foo() -> Option<Bar> {
+ if !condition() {
+ return None;
+ }
+
+ Some(...)
+}
+
+// BAD
+fn foo() -> Option<Bar> {
+ if condition() {
+ Some(...)
+ } else {
+ None
+ }
+}
+```
+
+**Rationale:** reduce cognitive stack usage.
+
+Use `return Err(err)` to throw an error:
+
+```rust
+// GOOD
+fn f() -> Result<(), ()> {
+ if condition {
+ return Err(());
+ }
+ Ok(())
+}
+
+// BAD
+fn f() -> Result<(), ()> {
+ if condition {
+ Err(())?;
+ }
+ Ok(())
+}
+```
+
+**Rationale:** `return` has type `!`, which allows the compiler to flag dead
+code (`Err(...)?` is of unconstrained generic type `T`).
+
+## Comparisons
+
+When doing multiple comparisons use `<`/`<=`, avoid `>`/`>=`.
+
+```rust
+// GOOD
+assert!(lo <= x && x <= hi);
+assert!(r1 < l2 || r2 < l1);
+assert!(x < y);
+assert!(0 < x);
+
+// BAD
+assert!(x >= lo && x <= hi);
+assert!(r1 < l2 || l1 > r2);
+assert!(y > x);
+assert!(x > 0);
+```
+
+**Rationale:** Less-then comparisons are more intuitive, they correspond spatially to [real line](https://en.wikipedia.org/wiki/Real_line).
+
+## If-let
+
+Avoid `if let ... { } else { }` construct, use `match` instead.
+
+```rust
+// GOOD
+match ctx.expected_type.as_ref() {
+ Some(expected_type) => completion_ty == expected_type && !expected_type.is_unit(),
+ None => false,
+}
+
+// BAD
+if let Some(expected_type) = ctx.expected_type.as_ref() {
+ completion_ty == expected_type && !expected_type.is_unit()
+} else {
+ false
+}
+```
+
+**Rationale:** `match` is almost always more compact.
+The `else` branch can get a more precise pattern: `None` or `Err(_)` instead of `_`.
+
+## Match Ergonomics
+
+Don't use the `ref` keyword.
+
+**Rationale:** consistency & simplicity.
+`ref` was required before [match ergonomics](https://github.com/rust-lang/rfcs/blob/master/text/2005-match-ergonomics.md).
+Today, it is redundant.
+Between `ref` and mach ergonomics, the latter is more ergonomic in most cases, and is simpler (does not require a keyword).
+
+## Empty Match Arms
+
+Use `=> (),` when a match arm is intentionally empty:
+
+```rust
+// GOOD
+match result {
+ Ok(_) => (),
+ Err(err) => error!("{}", err),
+}
+
+// BAD
+match result {
+ Ok(_) => {}
+ Err(err) => error!("{}", err),
+}
+```
+
+**Rationale:** consistency.
+
+## Functional Combinators
+
+Use high order monadic combinators like `map`, `then` when they are a natural choice; don't bend the code to fit into some combinator.
+If writing a chain of combinators creates friction, replace them with control flow constructs: `for`, `if`, `match`.
+Mostly avoid `bool::then` and `Option::filter`.
+
+```rust
+// GOOD
+if !x.cond() {
+ return None;
+}
+Some(x)
+
+// BAD
+Some(x).filter(|it| it.cond())
+```
+
+This rule is more "soft" then others, and boils down mostly to taste.
+The guiding principle behind this rule is that code should be dense in computation, and sparse in the number of expressions per line.
+The second example contains *less* computation -- the `filter` function is an indirection for `if`, it doesn't do any useful work by itself.
+At the same time, it is more crowded -- it takes more time to visually scan it.
+
+**Rationale:** consistency, playing to language's strengths.
+Rust has first-class support for imperative control flow constructs like `for` and `if`, while functions are less first-class due to lack of universal function type, currying, and non-first-class effects (`?`, `.await`).
+
+## Turbofish
+
+Prefer type ascription over the turbofish.
+When ascribing types, avoid `_`
+
+```rust
+// GOOD
+let mutable: Vec<T> = old.into_iter().map(|it| builder.make_mut(it)).collect();
+
+// BAD
+let mutable: Vec<_> = old.into_iter().map(|it| builder.make_mut(it)).collect();
+
+// BAD
+let mutable = old.into_iter().map(|it| builder.make_mut(it)).collect::<Vec<_>>();
+```
+
+**Rationale:** consistency, readability.
+If compiler struggles to infer the type, the human would as well.
+Having the result type specified up-front helps with understanding what the chain of iterator methods is doing.
+
+## Helper Functions
+
+Avoid creating single-use helper functions:
+
+```rust
+// GOOD
+let buf = {
+ let mut buf = get_empty_buf(&mut arena);
+ buf.add_item(item);
+ buf
+};
+
+// BAD
+let buf = prepare_buf(&mut arena, item);
+
+...
+
+fn prepare_buf(arena: &mut Arena, item: Item) -> ItemBuf {
+ let mut res = get_empty_buf(&mut arena);
+ res.add_item(item);
+ res
+}
+```
+
+Exception: if you want to make use of `return` or `?`.
+
+**Rationale:** single-use functions change frequently, adding or removing parameters adds churn.
+A block serves just as well to delineate a bit of logic, but has access to all the context.
+Re-using originally single-purpose function often leads to bad coupling.
+
+## Local Helper Functions
+
+Put nested helper functions at the end of the enclosing functions
+(this requires using return statement).
+Don't nest more than one level deep.
+
+```rust
+// GOOD
+fn dfs(graph: &Graph, v: Vertex) -> usize {
+ let mut visited = FxHashSet::default();
+ return go(graph, &mut visited, v);
+
+ fn go(graph: &Graph, visited: &mut FxHashSet<Vertex>, v: usize) -> usize {
+ ...
+ }
+}
+
+// BAD
+fn dfs(graph: &Graph, v: Vertex) -> usize {
+ fn go(graph: &Graph, visited: &mut FxHashSet<Vertex>, v: usize) -> usize {
+ ...
+ }
+
+ let mut visited = FxHashSet::default();
+ go(graph, &mut visited, v)
+}
+```
+
+**Rationale:** consistency, improved top-down readability.
+
+## Helper Variables
+
+Introduce helper variables freely, especially for multiline conditions:
+
+```rust
+// GOOD
+let rustfmt_not_installed =
+ captured_stderr.contains("not installed") || captured_stderr.contains("not available");
+
+match output.status.code() {
+ Some(1) if !rustfmt_not_installed => Ok(None),
+ _ => Err(format_err!("rustfmt failed:\n{}", captured_stderr)),
+};
+
+// BAD
+match output.status.code() {
+ Some(1)
+ if !captured_stderr.contains("not installed")
+ && !captured_stderr.contains("not available") => Ok(None),
+ _ => Err(format_err!("rustfmt failed:\n{}", captured_stderr)),
+};
+```
+
+**Rationale:** Like blocks, single-use variables are a cognitively cheap abstraction, as they have access to all the context.
+Extra variables help during debugging, they make it easy to print/view important intermediate results.
+Giving a name to a condition inside an `if` expression often improves clarity and leads to nicely formatted code.
+
+## Token names
+
+Use `T![foo]` instead of `SyntaxKind::FOO_KW`.
+
+```rust
+// GOOD
+match p.current() {
+ T![true] | T![false] => true,
+ _ => false,
+}
+
+// BAD
+
+match p.current() {
+ SyntaxKind::TRUE_KW | SyntaxKind::FALSE_KW => true,
+ _ => false,
+}
+```
+
+**Rationale:** The macro uses the familiar Rust syntax, avoiding ambiguities like "is this a brace or bracket?".
+
+## Documentation
+
+Style inline code comments as proper sentences.
+Start with a capital letter, end with a dot.
+
+```rust
+// GOOD
+
+// Only simple single segment paths are allowed.
+MergeBehavior::Last => {
+ tree.use_tree_list().is_none() && tree.path().map(path_len) <= Some(1)
+}
+
+// BAD
+
+// only simple single segment paths are allowed
+MergeBehavior::Last => {
+ tree.use_tree_list().is_none() && tree.path().map(path_len) <= Some(1)
+}
+```
+
+**Rationale:** writing a sentence (or maybe even a paragraph) rather just "a comment" creates a more appropriate frame of mind.
+It tricks you into writing down more of the context you keep in your head while coding.
+
+For `.md` and `.adoc` files, prefer a sentence-per-line format, don't wrap lines.
+If the line is too long, you want to split the sentence in two :-)
+
+**Rationale:** much easier to edit the text and read the diff, see [this link](https://asciidoctor.org/docs/asciidoc-recommended-practices/#one-sentence-per-line).
diff --git a/src/tools/rust-analyzer/docs/dev/syntax.md b/src/tools/rust-analyzer/docs/dev/syntax.md
new file mode 100644
index 000000000..30e137013
--- /dev/null
+++ b/src/tools/rust-analyzer/docs/dev/syntax.md
@@ -0,0 +1,534 @@
+# Syntax in rust-analyzer
+
+## About the guide
+
+This guide describes the current state of syntax trees and parsing in rust-analyzer as of 2020-01-09 ([link to commit](https://github.com/rust-lang/rust-analyzer/tree/cf5bdf464cad7ceb9a67e07985a3f4d3799ec0b6)).
+
+## Source Code
+
+The things described are implemented in three places
+
+* [rowan](https://github.com/rust-analyzer/rowan/tree/v0.9.0) -- a generic library for rowan syntax trees.
+* [ra_syntax](https://github.com/rust-lang/rust-analyzer/tree/cf5bdf464cad7ceb9a67e07985a3f4d3799ec0b6/crates/ra_syntax) crate inside rust-analyzer which wraps `rowan` into rust-analyzer specific API.
+ Nothing in rust-analyzer except this crate knows about `rowan`.
+* [parser](https://github.com/rust-lang/rust-analyzer/tree/cf5bdf464cad7ceb9a67e07985a3f4d3799ec0b6/crates/parser) crate parses input tokens into an `ra_syntax` tree
+
+## Design Goals
+
+* Syntax trees are lossless, or full fidelity. All comments and whitespace get preserved.
+* Syntax trees are semantic-less. They describe *strictly* the structure of a sequence of characters, they don't have hygiene, name resolution or type information attached.
+* Syntax trees are simple value types. It is possible to create trees for a syntax without any external context.
+* Syntax trees have intuitive traversal API (parent, children, siblings, etc).
+* Parsing is lossless (even if the input is invalid, the tree produced by the parser represents it exactly).
+* Parsing is resilient (even if the input is invalid, parser tries to see as much syntax tree fragments in the input as it can).
+* Performance is important, it's OK to use `unsafe` if it means better memory/cpu usage.
+* Keep the parser and the syntax tree isolated from each other, such that they can vary independently.
+
+## Trees
+
+### Overview
+
+The syntax tree consists of three layers:
+
+* GreenNodes
+* SyntaxNodes (aka RedNode)
+* AST
+
+Of these, only GreenNodes store the actual data, the other two layers are (non-trivial) views into green tree.
+Red-green terminology comes from Roslyn ([link](https://ericlippert.com/2012/06/08/red-green-trees/)) and gives the name to the `rowan` library. Green and syntax nodes are defined in rowan, ast is defined in rust-analyzer.
+
+Syntax trees are a semi-transient data structure.
+In general, frontend does not keep syntax trees for all files in memory.
+Instead, it *lowers* syntax trees to more compact and rigid representation, which is not full-fidelity, but which can be mapped back to a syntax tree if so desired.
+
+
+### GreenNode
+
+GreenNode is a purely-functional tree with arbitrary arity. Conceptually, it is equivalent to the following run of the mill struct:
+
+```rust
+#[derive(PartialEq, Eq, Clone, Copy)]
+struct SyntaxKind(u16);
+
+#[derive(PartialEq, Eq, Clone)]
+struct Node {
+ kind: SyntaxKind,
+ text_len: usize,
+ children: Vec<Arc<Either<Node, Token>>>,
+}
+
+#[derive(PartialEq, Eq, Clone)]
+struct Token {
+ kind: SyntaxKind,
+ text: String,
+}
+```
+
+All the difference between the above sketch and the real implementation are strictly due to optimizations.
+
+Points of note:
+* The tree is untyped. Each node has a "type tag", `SyntaxKind`.
+* Interior and leaf nodes are distinguished on the type level.
+* Trivia and non-trivia tokens are not distinguished on the type level.
+* Each token carries its full text.
+* The original text can be recovered by concatenating the texts of all tokens in order.
+* Accessing a child of particular type (for example, parameter list of a function) generally involves linearly traversing the children, looking for a specific `kind`.
+* Modifying the tree is roughly `O(depth)`.
+ We don't make special efforts to guarantee that the depth is not linear, but, in practice, syntax trees are branchy and shallow.
+* If mandatory (grammar wise) node is missing from the input, it's just missing from the tree.
+* If an extra erroneous input is present, it is wrapped into a node with `ERROR` kind, and treated just like any other node.
+* Parser errors are not a part of syntax tree.
+
+An input like `fn f() { 90 + 2 }` might be parsed as
+
+```
+FN@0..17
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..4
+ IDENT@3..4 "f"
+ PARAM_LIST@4..6
+ L_PAREN@4..5 "("
+ R_PAREN@5..6 ")"
+ WHITESPACE@6..7 " "
+ BLOCK_EXPR@7..17
+ L_CURLY@7..8 "{"
+ WHITESPACE@8..9 " "
+ BIN_EXPR@9..15
+ LITERAL@9..11
+ INT_NUMBER@9..11 "90"
+ WHITESPACE@11..12 " "
+ PLUS@12..13 "+"
+ WHITESPACE@13..14 " "
+ LITERAL@14..15
+ INT_NUMBER@14..15 "2"
+ WHITESPACE@15..16 " "
+ R_CURLY@16..17 "}"
+```
+
+#### Optimizations
+
+(significant amount of implementation work here was done by [CAD97](https://github.com/cad97)).
+
+To reduce the amount of allocations, the GreenNode is a [DST](https://doc.rust-lang.org/reference/dynamically-sized-types.html), which uses a single allocation for header and children. Thus, it is only usable behind a pointer.
+
+```
+*-----------+------+----------+------------+--------+--------+-----+--------*
+| ref_count | kind | text_len | n_children | child1 | child2 | ... | childn |
+*-----------+------+----------+------------+--------+--------+-----+--------*
+```
+
+To more compactly store the children, we box *both* interior nodes and tokens, and represent
+`Either<Arc<Node>, Arc<Token>>` as a single pointer with a tag in the last bit.
+
+To avoid allocating EVERY SINGLE TOKEN on the heap, syntax trees use interning.
+Because the tree is fully immutable, it's valid to structurally share subtrees.
+For example, in `1 + 1`, there will be a *single* token for `1` with ref count 2; the same goes for the ` ` whitespace token.
+Interior nodes are shared as well (for example in `(1 + 1) * (1 + 1)`).
+
+Note that, the result of the interning is an `Arc<Node>`.
+That is, it's not an index into interning table, so you don't have to have the table around to do anything with the tree.
+Each tree is fully self-contained (although different trees might share parts).
+Currently, the interner is created per-file, but it will be easy to use a per-thread or per-some-contex one.
+
+We use a `TextSize`, a newtyped `u32`, to store the length of the text.
+
+We currently use `SmolStr`, a small object optimized string to store text.
+This was mostly relevant *before* we implemented tree interning, to avoid allocating common keywords and identifiers. We should switch to storing text data alongside the interned tokens.
+
+#### Alternative designs
+
+##### Dealing with trivia
+
+In the above model, whitespace is not treated specially.
+Another alternative (used by swift and roslyn) is to explicitly divide the set of tokens into trivia and non-trivia tokens, and represent non-trivia tokens as
+
+```rust
+struct Token {
+ kind: NonTriviaTokenKind,
+ text: String,
+ leading_trivia: Vec<TriviaToken>,
+ trailing_trivia: Vec<TriviaToken>,
+}
+```
+
+The tree then contains only non-trivia tokens.
+
+Another approach (from Dart) is to, in addition to a syntax tree, link all the tokens into a bidirectional link list.
+That way, the tree again contains only non-trivia tokens.
+
+Explicit trivia nodes, like in `rowan`, are used by IntelliJ.
+
+##### Accessing Children
+
+As noted before, accessing a specific child in the node requires a linear traversal of the children (though we can skip tokens, because the tag is encoded in the pointer itself).
+It is possible to recover O(1) access with another representation.
+We explicitly store optional and missing (required by the grammar, but not present) nodes.
+That is, we use `Option<Node>` for children.
+We also remove trivia tokens from the tree.
+This way, each child kind generally occupies a fixed position in a parent, and we can use index access to fetch it.
+The cost is that we now need to allocate space for all not-present optional nodes.
+So, `fn foo() {}` will have slots for visibility, unsafeness, attributes, abi and return type.
+
+IntelliJ uses linear traversal.
+Roslyn and Swift do `O(1)` access.
+
+##### Mutable Trees
+
+IntelliJ uses mutable trees.
+Overall, it creates a lot of additional complexity.
+However, the API for *editing* syntax trees is nice.
+
+For example the assist to move generic bounds to where clause has this code:
+
+```kotlin
+ for typeBound in typeBounds {
+ typeBound.typeParamBounds?.delete()
+}
+```
+
+Modeling this with immutable trees is possible, but annoying.
+
+### Syntax Nodes
+
+A function green tree is not super-convenient to use.
+The biggest problem is accessing parents (there are no parent pointers!).
+But there are also "identify" issues.
+Let's say you want to write a code which builds a list of expressions in a file: `fn collect_expressions(file: GreenNode) -> HashSet<GreenNode>`.
+For the input like
+
+```rust
+fn main() {
+ let x = 90i8;
+ let x = x + 2;
+ let x = 90i64;
+ let x = x + 2;
+}
+```
+
+both copies of the `x + 2` expression are representing by equal (and, with interning in mind, actually the same) green nodes.
+Green trees just can't differentiate between the two.
+
+`SyntaxNode` adds parent pointers and identify semantics to green nodes.
+They can be called cursors or [zippers](https://en.wikipedia.org/wiki/Zipper_(data_structure)) (fun fact: zipper is a derivative (as in ′) of a data structure).
+
+Conceptually, a `SyntaxNode` looks like this:
+
+```rust
+type SyntaxNode = Arc<SyntaxData>;
+
+struct SyntaxData {
+ offset: usize,
+ parent: Option<SyntaxNode>,
+ green: Arc<GreenNode>,
+}
+
+impl SyntaxNode {
+ fn new_root(root: Arc<GreenNode>) -> SyntaxNode {
+ Arc::new(SyntaxData {
+ offset: 0,
+ parent: None,
+ green: root,
+ })
+ }
+ fn parent(&self) -> Option<SyntaxNode> {
+ self.parent.clone()
+ }
+ fn children(&self) -> impl Iterator<Item = SyntaxNode> {
+ let mut offset = self.offset;
+ self.green.children().map(|green_child| {
+ let child_offset = offset;
+ offset += green_child.text_len;
+ Arc::new(SyntaxData {
+ offset: child_offset,
+ parent: Some(Arc::clone(self)),
+ green: Arc::clone(green_child),
+ })
+ })
+ }
+}
+
+impl PartialEq for SyntaxNode {
+ fn eq(&self, other: &SyntaxNode) -> bool {
+ self.offset == other.offset
+ && Arc::ptr_eq(&self.green, &other.green)
+ }
+}
+```
+
+Points of note:
+
+* SyntaxNode remembers its parent node (and, transitively, the path to the root of the tree)
+* SyntaxNode knows its *absolute* text offset in the whole file
+* Equality is based on identity. Comparing nodes from different trees does not make sense.
+
+#### Optimization
+
+The reality is different though :-)
+Traversal of trees is a common operation, and it makes sense to optimize it.
+In particular, the above code allocates and does atomic operations during a traversal.
+
+To get rid of atomics, `rowan` uses non thread-safe `Rc`.
+This is OK because trees traversals mostly (always, in case of rust-analyzer) run on a single thread. If you need to send a `SyntaxNode` to another thread, you can send a pair of **root**`GreenNode` (which is thread safe) and a `Range<usize>`.
+The other thread can restore the `SyntaxNode` by traversing from the root green node and looking for a node with specified range.
+You can also use the similar trick to store a `SyntaxNode`.
+That is, a data structure that holds a `(GreenNode, Range<usize>)` will be `Sync`.
+However, rust-analyzer goes even further.
+It treats trees as semi-transient and instead of storing a `GreenNode`, it generally stores just the id of the file from which the tree originated: `(FileId, Range<usize>)`.
+The `SyntaxNode` is the restored by reparsing the file and traversing it from root.
+With this trick, rust-analyzer holds only a small amount of trees in memory at the same time, which reduces memory usage.
+
+Additionally, only the root `SyntaxNode` owns an `Arc` to the (root) `GreenNode`.
+All other `SyntaxNode`s point to corresponding `GreenNode`s with a raw pointer.
+They also point to the parent (and, consequently, to the root) with an owning `Rc`, so this is sound.
+In other words, one needs *one* arc bump when initiating a traversal.
+
+To get rid of allocations, `rowan` takes advantage of `SyntaxNode: !Sync` and uses a thread-local free list of `SyntaxNode`s.
+In a typical traversal, you only directly hold a few `SyntaxNode`s at a time (and their ancestors indirectly), so a free list proportional to the depth of the tree removes all allocations in a typical case.
+
+So, while traversal is not exactly incrementing a pointer, it's still pretty cheap: TLS + rc bump!
+
+Traversal also yields (cheap) owned nodes, which improves ergonomics quite a bit.
+
+#### Alternative Designs
+
+##### Memoized RedNodes
+
+C# and Swift follow the design where the red nodes are memoized, which would look roughly like this in Rust:
+
+```rust
+type SyntaxNode = Arc<SyntaxData>;
+
+struct SyntaxData {
+ offset: usize,
+ parent: Option<SyntaxNode>,
+ green: Arc<GreenNode>,
+ children: Vec<OnceCell<SyntaxNode>>,
+}
+```
+
+This allows using true pointer equality for comparison of identities of `SyntaxNodes`.
+rust-analyzer used to have this design as well, but we've since switched to cursors.
+The main problem with memoizing the red nodes is that it more than doubles the memory requirements for fully realized syntax trees.
+In contrast, cursors generally retain only a path to the root.
+C# combats increased memory usage by using weak references.
+
+### AST
+
+`GreenTree`s are untyped and homogeneous, because it makes accommodating error nodes, arbitrary whitespace and comments natural, and because it makes possible to write generic tree traversals.
+However, when working with a specific node, like a function definition, one would want a strongly typed API.
+
+This is what is provided by the AST layer. AST nodes are transparent wrappers over untyped syntax nodes:
+
+```rust
+pub trait AstNode {
+ fn cast(syntax: SyntaxNode) -> Option<Self>
+ where
+ Self: Sized;
+
+ fn syntax(&self) -> &SyntaxNode;
+}
+```
+
+Concrete nodes are generated (there are 117 of them), and look roughly like this:
+
+```rust
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct FnDef {
+ syntax: SyntaxNode,
+}
+
+impl AstNode for FnDef {
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ match kind {
+ FN => Some(FnDef { syntax }),
+ _ => None,
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ &self.syntax
+ }
+}
+
+impl FnDef {
+ pub fn param_list(&self) -> Option<ParamList> {
+ self.syntax.children().find_map(ParamList::cast)
+ }
+ pub fn ret_type(&self) -> Option<RetType> {
+ self.syntax.children().find_map(RetType::cast)
+ }
+ pub fn body(&self) -> Option<BlockExpr> {
+ self.syntax.children().find_map(BlockExpr::cast)
+ }
+ // ...
+}
+```
+
+Variants like expressions, patterns or items are modeled with `enum`s, which also implement `AstNode`:
+
+```rust
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum AssocItem {
+ FnDef(FnDef),
+ TypeAliasDef(TypeAliasDef),
+ ConstDef(ConstDef),
+}
+
+impl AstNode for AssocItem {
+ ...
+}
+```
+
+Shared AST substructures are modeled via (object safe) traits:
+
+```rust
+trait HasVisibility: AstNode {
+ fn visibility(&self) -> Option<Visibility>;
+}
+
+impl HasVisibility for FnDef {
+ fn visibility(&self) -> Option<Visibility> {
+ self.syntax.children().find_map(Visibility::cast)
+ }
+}
+```
+
+Points of note:
+
+* Like `SyntaxNode`s, AST nodes are cheap to clone pointer-sized owned values.
+* All "fields" are optional, to accommodate incomplete and/or erroneous source code.
+* It's always possible to go from an ast node to an untyped `SyntaxNode`.
+* It's possible to go in the opposite direction with a checked cast.
+* `enum`s allow modeling of arbitrary intersecting subsets of AST types.
+* Most of rust-analyzer works with the ast layer, with notable exceptions of:
+ * macro expansion, which needs access to raw tokens and works with `SyntaxNode`s
+ * some IDE-specific features like syntax highlighting are more conveniently implemented over a homogeneous `SyntaxNode` tree
+
+#### Alternative Designs
+
+##### Semantic Full AST
+
+In IntelliJ the AST layer (dubbed **P**rogram **S**tructure **I**nterface) can have semantics attached, and is usually backed by either syntax tree, indices, or metadata from compiled libraries.
+The backend for PSI can change dynamically.
+
+### Syntax Tree Recap
+
+At its core, the syntax tree is a purely functional n-ary tree, which stores text at the leaf nodes and node "kinds" at all nodes.
+A cursor layer is added on top, which gives owned, cheap to clone nodes with identity semantics, parent links and absolute offsets.
+An AST layer is added on top, which reifies each node `Kind` as a separate Rust type with the corresponding API.
+
+## Parsing
+
+The (green) tree is constructed by a DFS "traversal" of the desired tree structure:
+
+```rust
+pub struct GreenNodeBuilder { ... }
+
+impl GreenNodeBuilder {
+ pub fn new() -> GreenNodeBuilder { ... }
+
+ pub fn token(&mut self, kind: SyntaxKind, text: &str) { ... }
+
+ pub fn start_node(&mut self, kind: SyntaxKind) { ... }
+ pub fn finish_node(&mut self) { ... }
+
+ pub fn finish(self) -> GreenNode { ... }
+}
+```
+
+The parser, ultimately, needs to invoke the `GreenNodeBuilder`.
+There are two principal sources of inputs for the parser:
+ * source text, which contains trivia tokens (whitespace and comments)
+ * token trees from macros, which lack trivia
+
+Additionally, input tokens do not correspond 1-to-1 with output tokens.
+For example, two consecutive `>` tokens might be glued, by the parser, into a single `>>`.
+
+For these reasons, the parser crate defines a callback interfaces for both input tokens and output trees.
+The explicit glue layer then bridges various gaps.
+
+The parser interface looks like this:
+
+```rust
+pub struct Token {
+ pub kind: SyntaxKind,
+ pub is_joined_to_next: bool,
+}
+
+pub trait TokenSource {
+ fn current(&self) -> Token;
+ fn lookahead_nth(&self, n: usize) -> Token;
+ fn is_keyword(&self, kw: &str) -> bool;
+
+ fn bump(&mut self);
+}
+
+pub trait TreeSink {
+ fn token(&mut self, kind: SyntaxKind, n_tokens: u8);
+
+ fn start_node(&mut self, kind: SyntaxKind);
+ fn finish_node(&mut self);
+
+ fn error(&mut self, error: ParseError);
+}
+
+pub fn parse(
+ token_source: &mut dyn TokenSource,
+ tree_sink: &mut dyn TreeSink,
+) { ... }
+```
+
+Points of note:
+
+* The parser and the syntax tree are independent, they live in different crates neither of which depends on the other.
+* The parser doesn't know anything about textual contents of the tokens, with an isolated hack for checking contextual keywords.
+* For gluing tokens, the `TreeSink::token` might advance further than one atomic token ahead.
+
+### Reporting Syntax Errors
+
+Syntax errors are not stored directly in the tree.
+The primary motivation for this is that syntax tree is not necessary produced by the parser, it may also be assembled manually from pieces (which happens all the time in refactorings).
+Instead, parser reports errors to an error sink, which stores them in a `Vec`.
+If possible, errors are not reported during parsing and are postponed for a separate validation step.
+For example, parser accepts visibility modifiers on trait methods, but then a separate tree traversal flags all such visibilities as erroneous.
+
+### Macros
+
+The primary difficulty with macros is that individual tokens have identities, which need to be preserved in the syntax tree for hygiene purposes.
+This is handled by the `TreeSink` layer.
+Specifically, `TreeSink` constructs the tree in lockstep with draining the original token stream.
+In the process, it records which tokens of the tree correspond to which tokens of the input, by using text ranges to identify syntax tokens.
+The end result is that parsing an expanded code yields a syntax tree and a mapping of text-ranges of the tree to original tokens.
+
+To deal with precedence in cases like `$expr * 1`, we use special invisible parenthesis, which are explicitly handled by the parser
+
+### Whitespace & Comments
+
+Parser does not see whitespace nodes.
+Instead, they are attached to the tree in the `TreeSink` layer.
+
+For example, in
+
+```rust
+// non doc comment
+fn foo() {}
+```
+
+the comment will be (heuristically) made a child of function node.
+
+### Incremental Reparse
+
+Green trees are cheap to modify, so incremental reparse works by patching a previous tree, without maintaining any additional state.
+The reparse is based on heuristic: we try to contain a change to a single `{}` block, and reparse only this block.
+To do this, we maintain the invariant that, even for invalid code, curly braces are always paired correctly.
+
+In practice, incremental reparsing doesn't actually matter much for IDE use-cases, parsing from scratch seems to be fast enough.
+
+### Parsing Algorithm
+
+We use a boring hand-crafted recursive descent + pratt combination, with a special effort of continuing the parsing if an error is detected.
+
+### Parser Recap
+
+Parser itself defines traits for token sequence input and syntax tree output.
+It doesn't care about where the tokens come from, and how the resulting syntax tree looks like.
diff --git a/src/tools/rust-analyzer/docs/user/generated_config.adoc b/src/tools/rust-analyzer/docs/user/generated_config.adoc
new file mode 100644
index 000000000..b0f2f1614
--- /dev/null
+++ b/src/tools/rust-analyzer/docs/user/generated_config.adoc
@@ -0,0 +1,620 @@
+[[rust-analyzer.assist.expressionFillDefault]]rust-analyzer.assist.expressionFillDefault (default: `"todo"`)::
++
+--
+Placeholder expression to use for missing expressions in assists.
+--
+[[rust-analyzer.cachePriming.enable]]rust-analyzer.cachePriming.enable (default: `true`)::
++
+--
+Warm up caches on project load.
+--
+[[rust-analyzer.cachePriming.numThreads]]rust-analyzer.cachePriming.numThreads (default: `0`)::
++
+--
+How many worker threads to handle priming caches. The default `0` means to pick automatically.
+--
+[[rust-analyzer.cargo.autoreload]]rust-analyzer.cargo.autoreload (default: `true`)::
++
+--
+Automatically refresh project info via `cargo metadata` on
+`Cargo.toml` or `.cargo/config.toml` changes.
+--
+[[rust-analyzer.cargo.buildScripts.enable]]rust-analyzer.cargo.buildScripts.enable (default: `true`)::
++
+--
+Run build scripts (`build.rs`) for more precise code analysis.
+--
+[[rust-analyzer.cargo.buildScripts.overrideCommand]]rust-analyzer.cargo.buildScripts.overrideCommand (default: `null`)::
++
+--
+Override the command rust-analyzer uses to run build scripts and
+build procedural macros. The command is required to output json
+and should therefore include `--message-format=json` or a similar
+option.
+
+By default, a cargo invocation will be constructed for the configured
+targets and features, with the following base command line:
+
+```bash
+cargo check --quiet --workspace --message-format=json --all-targets
+```
+.
+--
+[[rust-analyzer.cargo.buildScripts.useRustcWrapper]]rust-analyzer.cargo.buildScripts.useRustcWrapper (default: `true`)::
++
+--
+Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to
+avoid checking unnecessary things.
+--
+[[rust-analyzer.cargo.features]]rust-analyzer.cargo.features (default: `[]`)::
++
+--
+List of features to activate.
+
+Set this to `"all"` to pass `--all-features` to cargo.
+--
+[[rust-analyzer.cargo.noDefaultFeatures]]rust-analyzer.cargo.noDefaultFeatures (default: `false`)::
++
+--
+Whether to pass `--no-default-features` to cargo.
+--
+[[rust-analyzer.cargo.noSysroot]]rust-analyzer.cargo.noSysroot (default: `false`)::
++
+--
+Internal config for debugging, disables loading of sysroot crates.
+--
+[[rust-analyzer.cargo.target]]rust-analyzer.cargo.target (default: `null`)::
++
+--
+Compilation target override (target triple).
+--
+[[rust-analyzer.cargo.unsetTest]]rust-analyzer.cargo.unsetTest (default: `["core"]`)::
++
+--
+Unsets `#[cfg(test)]` for the specified crates.
+--
+[[rust-analyzer.checkOnSave.allTargets]]rust-analyzer.checkOnSave.allTargets (default: `true`)::
++
+--
+Check all targets and tests (`--all-targets`).
+--
+[[rust-analyzer.checkOnSave.command]]rust-analyzer.checkOnSave.command (default: `"check"`)::
++
+--
+Cargo command to use for `cargo check`.
+--
+[[rust-analyzer.checkOnSave.enable]]rust-analyzer.checkOnSave.enable (default: `true`)::
++
+--
+Run specified `cargo check` command for diagnostics on save.
+--
+[[rust-analyzer.checkOnSave.extraArgs]]rust-analyzer.checkOnSave.extraArgs (default: `[]`)::
++
+--
+Extra arguments for `cargo check`.
+--
+[[rust-analyzer.checkOnSave.features]]rust-analyzer.checkOnSave.features (default: `null`)::
++
+--
+List of features to activate. Defaults to
+`#rust-analyzer.cargo.features#`.
+
+Set to `"all"` to pass `--all-features` to Cargo.
+--
+[[rust-analyzer.checkOnSave.noDefaultFeatures]]rust-analyzer.checkOnSave.noDefaultFeatures (default: `null`)::
++
+--
+Whether to pass `--no-default-features` to Cargo. Defaults to
+`#rust-analyzer.cargo.noDefaultFeatures#`.
+--
+[[rust-analyzer.checkOnSave.overrideCommand]]rust-analyzer.checkOnSave.overrideCommand (default: `null`)::
++
+--
+Override the command rust-analyzer uses instead of `cargo check` for
+diagnostics on save. The command is required to output json and
+should therefor include `--message-format=json` or a similar option.
+
+If you're changing this because you're using some tool wrapping
+Cargo, you might also want to change
+`#rust-analyzer.cargo.buildScripts.overrideCommand#`.
+
+An example command would be:
+
+```bash
+cargo check --workspace --message-format=json --all-targets
+```
+.
+--
+[[rust-analyzer.checkOnSave.target]]rust-analyzer.checkOnSave.target (default: `null`)::
++
+--
+Check for a specific target. Defaults to
+`#rust-analyzer.cargo.target#`.
+--
+[[rust-analyzer.completion.autoimport.enable]]rust-analyzer.completion.autoimport.enable (default: `true`)::
++
+--
+Toggles the additional completions that automatically add imports when completed.
+Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
+--
+[[rust-analyzer.completion.autoself.enable]]rust-analyzer.completion.autoself.enable (default: `true`)::
++
+--
+Toggles the additional completions that automatically show method calls and field accesses
+with `self` prefixed to them when inside a method.
+--
+[[rust-analyzer.completion.callable.snippets]]rust-analyzer.completion.callable.snippets (default: `"fill_arguments"`)::
++
+--
+Whether to add parenthesis and argument snippets when completing function.
+--
+[[rust-analyzer.completion.postfix.enable]]rust-analyzer.completion.postfix.enable (default: `true`)::
++
+--
+Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
+--
+[[rust-analyzer.completion.privateEditable.enable]]rust-analyzer.completion.privateEditable.enable (default: `false`)::
++
+--
+Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.
+--
+[[rust-analyzer.completion.snippets.custom]]rust-analyzer.completion.snippets.custom::
++
+--
+Default:
+----
+{
+ "Arc::new": {
+ "postfix": "arc",
+ "body": "Arc::new(${receiver})",
+ "requires": "std::sync::Arc",
+ "description": "Put the expression into an `Arc`",
+ "scope": "expr"
+ },
+ "Rc::new": {
+ "postfix": "rc",
+ "body": "Rc::new(${receiver})",
+ "requires": "std::rc::Rc",
+ "description": "Put the expression into an `Rc`",
+ "scope": "expr"
+ },
+ "Box::pin": {
+ "postfix": "pinbox",
+ "body": "Box::pin(${receiver})",
+ "requires": "std::boxed::Box",
+ "description": "Put the expression into a pinned `Box`",
+ "scope": "expr"
+ },
+ "Ok": {
+ "postfix": "ok",
+ "body": "Ok(${receiver})",
+ "description": "Wrap the expression in a `Result::Ok`",
+ "scope": "expr"
+ },
+ "Err": {
+ "postfix": "err",
+ "body": "Err(${receiver})",
+ "description": "Wrap the expression in a `Result::Err`",
+ "scope": "expr"
+ },
+ "Some": {
+ "postfix": "some",
+ "body": "Some(${receiver})",
+ "description": "Wrap the expression in an `Option::Some`",
+ "scope": "expr"
+ }
+ }
+----
+Custom completion snippets.
+
+--
+[[rust-analyzer.diagnostics.disabled]]rust-analyzer.diagnostics.disabled (default: `[]`)::
++
+--
+List of rust-analyzer diagnostics to disable.
+--
+[[rust-analyzer.diagnostics.enable]]rust-analyzer.diagnostics.enable (default: `true`)::
++
+--
+Whether to show native rust-analyzer diagnostics.
+--
+[[rust-analyzer.diagnostics.experimental.enable]]rust-analyzer.diagnostics.experimental.enable (default: `false`)::
++
+--
+Whether to show experimental rust-analyzer diagnostics that might
+have more false positives than usual.
+--
+[[rust-analyzer.diagnostics.remapPrefix]]rust-analyzer.diagnostics.remapPrefix (default: `{}`)::
++
+--
+Map of prefixes to be substituted when parsing diagnostic file paths.
+This should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`.
+--
+[[rust-analyzer.diagnostics.warningsAsHint]]rust-analyzer.diagnostics.warningsAsHint (default: `[]`)::
++
+--
+List of warnings that should be displayed with hint severity.
+
+The warnings will be indicated by faded text or three dots in code
+and will not show up in the `Problems Panel`.
+--
+[[rust-analyzer.diagnostics.warningsAsInfo]]rust-analyzer.diagnostics.warningsAsInfo (default: `[]`)::
++
+--
+List of warnings that should be displayed with info severity.
+
+The warnings will be indicated by a blue squiggly underline in code
+and a blue icon in the `Problems Panel`.
+--
+[[rust-analyzer.files.excludeDirs]]rust-analyzer.files.excludeDirs (default: `[]`)::
++
+--
+These directories will be ignored by rust-analyzer. They are
+relative to the workspace root, and globs are not supported. You may
+also need to add the folders to Code's `files.watcherExclude`.
+--
+[[rust-analyzer.files.watcher]]rust-analyzer.files.watcher (default: `"client"`)::
++
+--
+Controls file watching implementation.
+--
+[[rust-analyzer.highlightRelated.breakPoints.enable]]rust-analyzer.highlightRelated.breakPoints.enable (default: `true`)::
++
+--
+Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
+--
+[[rust-analyzer.highlightRelated.exitPoints.enable]]rust-analyzer.highlightRelated.exitPoints.enable (default: `true`)::
++
+--
+Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).
+--
+[[rust-analyzer.highlightRelated.references.enable]]rust-analyzer.highlightRelated.references.enable (default: `true`)::
++
+--
+Enables highlighting of related references while the cursor is on any identifier.
+--
+[[rust-analyzer.highlightRelated.yieldPoints.enable]]rust-analyzer.highlightRelated.yieldPoints.enable (default: `true`)::
++
+--
+Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.
+--
+[[rust-analyzer.hover.actions.debug.enable]]rust-analyzer.hover.actions.debug.enable (default: `true`)::
++
+--
+Whether to show `Debug` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` is set.
+--
+[[rust-analyzer.hover.actions.enable]]rust-analyzer.hover.actions.enable (default: `true`)::
++
+--
+Whether to show HoverActions in Rust files.
+--
+[[rust-analyzer.hover.actions.gotoTypeDef.enable]]rust-analyzer.hover.actions.gotoTypeDef.enable (default: `true`)::
++
+--
+Whether to show `Go to Type Definition` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` is set.
+--
+[[rust-analyzer.hover.actions.implementations.enable]]rust-analyzer.hover.actions.implementations.enable (default: `true`)::
++
+--
+Whether to show `Implementations` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` is set.
+--
+[[rust-analyzer.hover.actions.references.enable]]rust-analyzer.hover.actions.references.enable (default: `false`)::
++
+--
+Whether to show `References` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` is set.
+--
+[[rust-analyzer.hover.actions.run.enable]]rust-analyzer.hover.actions.run.enable (default: `true`)::
++
+--
+Whether to show `Run` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` is set.
+--
+[[rust-analyzer.hover.documentation.enable]]rust-analyzer.hover.documentation.enable (default: `true`)::
++
+--
+Whether to show documentation on hover.
+--
+[[rust-analyzer.hover.links.enable]]rust-analyzer.hover.links.enable (default: `true`)::
++
+--
+Use markdown syntax for links in hover.
+--
+[[rust-analyzer.imports.granularity.enforce]]rust-analyzer.imports.granularity.enforce (default: `false`)::
++
+--
+Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.
+--
+[[rust-analyzer.imports.granularity.group]]rust-analyzer.imports.granularity.group (default: `"crate"`)::
++
+--
+How imports should be grouped into use statements.
+--
+[[rust-analyzer.imports.group.enable]]rust-analyzer.imports.group.enable (default: `true`)::
++
+--
+Group inserted imports by the https://rust-analyzer.github.io/manual.html#auto-import[following order]. Groups are separated by newlines.
+--
+[[rust-analyzer.imports.merge.glob]]rust-analyzer.imports.merge.glob (default: `true`)::
++
+--
+Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
+--
+[[rust-analyzer.imports.prefix]]rust-analyzer.imports.prefix (default: `"plain"`)::
++
+--
+The path structure for newly inserted paths to use.
+--
+[[rust-analyzer.inlayHints.bindingModeHints.enable]]rust-analyzer.inlayHints.bindingModeHints.enable (default: `false`)::
++
+--
+Whether to show inlay type hints for binding modes.
+--
+[[rust-analyzer.inlayHints.chainingHints.enable]]rust-analyzer.inlayHints.chainingHints.enable (default: `true`)::
++
+--
+Whether to show inlay type hints for method chains.
+--
+[[rust-analyzer.inlayHints.closingBraceHints.enable]]rust-analyzer.inlayHints.closingBraceHints.enable (default: `true`)::
++
+--
+Whether to show inlay hints after a closing `}` to indicate what item it belongs to.
+--
+[[rust-analyzer.inlayHints.closingBraceHints.minLines]]rust-analyzer.inlayHints.closingBraceHints.minLines (default: `25`)::
++
+--
+Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1
+to always show them).
+--
+[[rust-analyzer.inlayHints.closureReturnTypeHints.enable]]rust-analyzer.inlayHints.closureReturnTypeHints.enable (default: `"never"`)::
++
+--
+Whether to show inlay type hints for return types of closures.
+--
+[[rust-analyzer.inlayHints.lifetimeElisionHints.enable]]rust-analyzer.inlayHints.lifetimeElisionHints.enable (default: `"never"`)::
++
+--
+Whether to show inlay type hints for elided lifetimes in function signatures.
+--
+[[rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames]]rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames (default: `false`)::
++
+--
+Whether to prefer using parameter names as the name for elided lifetime hints if possible.
+--
+[[rust-analyzer.inlayHints.maxLength]]rust-analyzer.inlayHints.maxLength (default: `25`)::
++
+--
+Maximum length for inlay hints. Set to null to have an unlimited length.
+--
+[[rust-analyzer.inlayHints.parameterHints.enable]]rust-analyzer.inlayHints.parameterHints.enable (default: `true`)::
++
+--
+Whether to show function parameter name inlay hints at the call
+site.
+--
+[[rust-analyzer.inlayHints.reborrowHints.enable]]rust-analyzer.inlayHints.reborrowHints.enable (default: `"never"`)::
++
+--
+Whether to show inlay type hints for compiler inserted reborrows.
+--
+[[rust-analyzer.inlayHints.renderColons]]rust-analyzer.inlayHints.renderColons (default: `true`)::
++
+--
+Whether to render leading colons for type hints, and trailing colons for parameter hints.
+--
+[[rust-analyzer.inlayHints.typeHints.enable]]rust-analyzer.inlayHints.typeHints.enable (default: `true`)::
++
+--
+Whether to show inlay type hints for variables.
+--
+[[rust-analyzer.inlayHints.typeHints.hideClosureInitialization]]rust-analyzer.inlayHints.typeHints.hideClosureInitialization (default: `false`)::
++
+--
+Whether to hide inlay type hints for `let` statements that initialize to a closure.
+Only applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.
+--
+[[rust-analyzer.inlayHints.typeHints.hideNamedConstructor]]rust-analyzer.inlayHints.typeHints.hideNamedConstructor (default: `false`)::
++
+--
+Whether to hide inlay type hints for constructors.
+--
+[[rust-analyzer.joinLines.joinAssignments]]rust-analyzer.joinLines.joinAssignments (default: `true`)::
++
+--
+Join lines merges consecutive declaration and initialization of an assignment.
+--
+[[rust-analyzer.joinLines.joinElseIf]]rust-analyzer.joinLines.joinElseIf (default: `true`)::
++
+--
+Join lines inserts else between consecutive ifs.
+--
+[[rust-analyzer.joinLines.removeTrailingComma]]rust-analyzer.joinLines.removeTrailingComma (default: `true`)::
++
+--
+Join lines removes trailing commas.
+--
+[[rust-analyzer.joinLines.unwrapTrivialBlock]]rust-analyzer.joinLines.unwrapTrivialBlock (default: `true`)::
++
+--
+Join lines unwraps trivial blocks.
+--
+[[rust-analyzer.lens.debug.enable]]rust-analyzer.lens.debug.enable (default: `true`)::
++
+--
+Whether to show `Debug` lens. Only applies when
+`#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.lens.enable]]rust-analyzer.lens.enable (default: `true`)::
++
+--
+Whether to show CodeLens in Rust files.
+--
+[[rust-analyzer.lens.forceCustomCommands]]rust-analyzer.lens.forceCustomCommands (default: `true`)::
++
+--
+Internal config: use custom client-side commands even when the
+client doesn't set the corresponding capability.
+--
+[[rust-analyzer.lens.implementations.enable]]rust-analyzer.lens.implementations.enable (default: `true`)::
++
+--
+Whether to show `Implementations` lens. Only applies when
+`#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.lens.references.adt.enable]]rust-analyzer.lens.references.adt.enable (default: `false`)::
++
+--
+Whether to show `References` lens for Struct, Enum, and Union.
+Only applies when `#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.lens.references.enumVariant.enable]]rust-analyzer.lens.references.enumVariant.enable (default: `false`)::
++
+--
+Whether to show `References` lens for Enum Variants.
+Only applies when `#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.lens.references.method.enable]]rust-analyzer.lens.references.method.enable (default: `false`)::
++
+--
+Whether to show `Method References` lens. Only applies when
+`#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.lens.references.trait.enable]]rust-analyzer.lens.references.trait.enable (default: `false`)::
++
+--
+Whether to show `References` lens for Trait.
+Only applies when `#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.lens.run.enable]]rust-analyzer.lens.run.enable (default: `true`)::
++
+--
+Whether to show `Run` lens. Only applies when
+`#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.linkedProjects]]rust-analyzer.linkedProjects (default: `[]`)::
++
+--
+Disable project auto-discovery in favor of explicitly specified set
+of projects.
+
+Elements must be paths pointing to `Cargo.toml`,
+`rust-project.json`, or JSON objects in `rust-project.json` format.
+--
+[[rust-analyzer.lru.capacity]]rust-analyzer.lru.capacity (default: `null`)::
++
+--
+Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.
+--
+[[rust-analyzer.notifications.cargoTomlNotFound]]rust-analyzer.notifications.cargoTomlNotFound (default: `true`)::
++
+--
+Whether to show `can't find Cargo.toml` error message.
+--
+[[rust-analyzer.procMacro.attributes.enable]]rust-analyzer.procMacro.attributes.enable (default: `true`)::
++
+--
+Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.
+--
+[[rust-analyzer.procMacro.enable]]rust-analyzer.procMacro.enable (default: `true`)::
++
+--
+Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.
+--
+[[rust-analyzer.procMacro.ignored]]rust-analyzer.procMacro.ignored (default: `{}`)::
++
+--
+These proc-macros will be ignored when trying to expand them.
+
+This config takes a map of crate names with the exported proc-macro names to ignore as values.
+--
+[[rust-analyzer.procMacro.server]]rust-analyzer.procMacro.server (default: `null`)::
++
+--
+Internal config, path to proc-macro server executable (typically,
+this is rust-analyzer itself, but we override this in tests).
+--
+[[rust-analyzer.runnables.command]]rust-analyzer.runnables.command (default: `null`)::
++
+--
+Command to be executed instead of 'cargo' for runnables.
+--
+[[rust-analyzer.runnables.extraArgs]]rust-analyzer.runnables.extraArgs (default: `[]`)::
++
+--
+Additional arguments to be passed to cargo for runnables such as
+tests or binaries. For example, it may be `--release`.
+--
+[[rust-analyzer.rustc.source]]rust-analyzer.rustc.source (default: `null`)::
++
+--
+Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private
+projects, or "discover" to try to automatically find it if the `rustc-dev` component
+is installed.
+
+Any project which uses rust-analyzer with the rustcPrivate
+crates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.
+
+This option does not take effect until rust-analyzer is restarted.
+--
+[[rust-analyzer.rustfmt.extraArgs]]rust-analyzer.rustfmt.extraArgs (default: `[]`)::
++
+--
+Additional arguments to `rustfmt`.
+--
+[[rust-analyzer.rustfmt.overrideCommand]]rust-analyzer.rustfmt.overrideCommand (default: `null`)::
++
+--
+Advanced option, fully override the command rust-analyzer uses for
+formatting.
+--
+[[rust-analyzer.rustfmt.rangeFormatting.enable]]rust-analyzer.rustfmt.rangeFormatting.enable (default: `false`)::
++
+--
+Enables the use of rustfmt's unstable range formatting command for the
+`textDocument/rangeFormatting` request. The rustfmt option is unstable and only
+available on a nightly build.
+--
+[[rust-analyzer.semanticHighlighting.strings.enable]]rust-analyzer.semanticHighlighting.strings.enable (default: `true`)::
++
+--
+Use semantic tokens for strings.
+
+In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
+By disabling semantic tokens for strings, other grammars can be used to highlight
+their contents.
+--
+[[rust-analyzer.signatureInfo.detail]]rust-analyzer.signatureInfo.detail (default: `"full"`)::
++
+--
+Show full signature of the callable. Only shows parameters if disabled.
+--
+[[rust-analyzer.signatureInfo.documentation.enable]]rust-analyzer.signatureInfo.documentation.enable (default: `true`)::
++
+--
+Show documentation.
+--
+[[rust-analyzer.typing.autoClosingAngleBrackets.enable]]rust-analyzer.typing.autoClosingAngleBrackets.enable (default: `false`)::
++
+--
+Whether to insert closing angle brackets when typing an opening angle bracket of a generic argument list.
+--
+[[rust-analyzer.workspace.symbol.search.kind]]rust-analyzer.workspace.symbol.search.kind (default: `"only_types"`)::
++
+--
+Workspace symbol search kind.
+--
+[[rust-analyzer.workspace.symbol.search.limit]]rust-analyzer.workspace.symbol.search.limit (default: `128`)::
++
+--
+Limits the number of items returned from a workspace symbol search (Defaults to 128).
+Some clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.
+Other clients requires all results upfront and might require a higher limit.
+--
+[[rust-analyzer.workspace.symbol.search.scope]]rust-analyzer.workspace.symbol.search.scope (default: `"workspace"`)::
++
+--
+Workspace symbol search scope.
+--
diff --git a/src/tools/rust-analyzer/docs/user/manual.adoc b/src/tools/rust-analyzer/docs/user/manual.adoc
new file mode 100644
index 000000000..999a6437a
--- /dev/null
+++ b/src/tools/rust-analyzer/docs/user/manual.adoc
@@ -0,0 +1,863 @@
+= User Manual
+:toc: preamble
+:sectanchors:
+:page-layout: post
+:icons: font
+:source-highlighter: rouge
+:experimental:
+
+////
+IMPORTANT: the master copy of this document lives in the https://github.com/rust-lang/rust-analyzer repository
+////
+
+At its core, rust-analyzer is a *library* for semantic analysis of Rust code as it changes over time.
+This manual focuses on a specific usage of the library -- running it as part of a server that implements the
+https://microsoft.github.io/language-server-protocol/[Language Server Protocol] (LSP).
+The LSP allows various code editors, like VS Code, Emacs or Vim, to implement semantic features like completion or goto definition by talking to an external language server process.
+
+[TIP]
+====
+[.lead]
+To improve this document, send a pull request: +
+https://github.com/rust-lang/rust-analyzer/blob/master/docs/user/manual.adoc[https://github.com/rust-analyzer/.../manual.adoc]
+
+The manual is written in https://asciidoc.org[AsciiDoc] and includes some extra files which are generated from the source code. Run `cargo test` and `cargo test -p xtask` to create these and then `asciidoctor manual.adoc` to create an HTML copy.
+====
+
+If you have questions about using rust-analyzer, please ask them in the https://users.rust-lang.org/c/ide/14["`IDEs and Editors`"] topic of Rust users forum.
+
+== Installation
+
+In theory, one should be able to just install the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>> and have it automatically work with any editor.
+We are not there yet, so some editor specific setup is required.
+
+Additionally, rust-analyzer needs the sources of the standard library.
+If the source code is not present, rust-analyzer will attempt to install it automatically.
+
+To add the sources manually, run the following command:
+
+```bash
+$ rustup component add rust-src
+```
+
+=== Toolchain
+
+Only the latest stable standard library source is officially supported for use with rust-analyzer.
+If you are using an older toolchain or have an override set, rust-analyzer may fail to understand the Rust source.
+You will either need to update your toolchain or use an older version of rust-analyzer that is compatible with your toolchain.
+
+If you are using an override in your project, you can still force rust-analyzer to use the stable toolchain via the environment variable `RUSTUP_TOOLCHAIN`.
+For example, with VS Code or coc-rust-analyzer:
+
+[source,json]
+----
+{ "rust-analyzer.server.extraEnv": { "RUSTUP_TOOLCHAIN": "stable" } }
+----
+
+=== VS Code
+
+This is the best supported editor at the moment.
+The rust-analyzer plugin for VS Code is maintained
+https://github.com/rust-lang/rust-analyzer/tree/master/editors/code[in tree].
+
+You can install the latest release of the plugin from
+https://marketplace.visualstudio.com/items?itemName=rust-lang.rust-analyzer[the marketplace].
+
+Note that the plugin may cause conflicts with the
+https://marketplace.visualstudio.com/items?itemName=rust-lang.rust[official Rust plugin].
+It is recommended to disable the Rust plugin when using the rust-analyzer extension.
+
+By default, the plugin will prompt you to download the matching version of the server as well:
+
+image::https://user-images.githubusercontent.com/9021944/75067008-17502500-54ba-11ea-835a-f92aac50e866.png[]
+
+[NOTE]
+====
+To disable this notification put the following to `settings.json`
+
+[source,json]
+----
+{ "rust-analyzer.updates.askBeforeDownload": false }
+----
+====
+
+The server binary is stored in the extension install directory, which starts with `rust-lang.rust-analyzer-` and is located under:
+
+* Linux: `~/.vscode/extensions`
+* Linux (Remote, such as WSL): `~/.vscode-server/extensions`
+* macOS: `~/.vscode/extensions`
+* Windows: `%USERPROFILE%\.vscode\extensions`
+
+As an exception, on NixOS, the extension makes a copy of the server and stores it under `~/.config/Code/User/globalStorage/rust-lang.rust-analyzer`.
+
+Note that we only support the two most recent versions of VS Code.
+
+==== Updates
+
+The extension will be updated automatically as new versions become available.
+It will ask your permission to download the matching language server version binary if needed.
+
+===== Nightly
+
+We ship nightly releases for VS Code.
+To help us out by testing the newest code, you can enable pre-release versions in the Code extension page.
+
+==== Manual installation
+
+Alternatively, download a VSIX corresponding to your platform from the
+https://github.com/rust-lang/rust-analyzer/releases[releases] page.
+
+Install the extension with the `Extensions: Install from VSIX` command within VS Code, or from the command line via:
+[source]
+----
+$ code --install-extension /path/to/rust-analyzer.vsix
+----
+
+If you are running an unsupported platform, you can install `rust-analyzer-no-server.vsix` and compile or obtain a server binary.
+Copy the server anywhere, then add the path to your settings.json, for example:
+[source,json]
+----
+{ "rust-analyzer.server.path": "~/.local/bin/rust-analyzer-linux" }
+----
+
+==== Building From Source
+
+Both the server and the Code plugin can be installed from source:
+
+[source]
+----
+$ git clone https://github.com/rust-lang/rust-analyzer.git && cd rust-analyzer
+$ cargo xtask install
+----
+
+You'll need Cargo, nodejs (matching a supported version of VS Code) and npm for this.
+
+Note that installing via `xtask install` does not work for VS Code Remote, instead you'll need to install the `.vsix` manually.
+
+If you're not using Code, you can compile and install only the LSP server:
+
+[source]
+----
+$ cargo xtask install --server
+----
+
+=== rust-analyzer Language Server Binary
+
+Other editors generally require the `rust-analyzer` binary to be in `$PATH`.
+You can download pre-built binaries from the https://github.com/rust-lang/rust-analyzer/releases[releases] page.
+You will need to uncompress and rename the binary for your platform, e.g. from `rust-analyzer-aarch64-apple-darwin.gz` on Mac OS to `rust-analyzer`, make it executable, then move it into a directory in your `$PATH`.
+
+On Linux to install the `rust-analyzer` binary into `~/.local/bin`, these commands should work:
+
+[source,bash]
+----
+$ mkdir -p ~/.local/bin
+$ curl -L https://github.com/rust-lang/rust-analyzer/releases/latest/download/rust-analyzer-x86_64-unknown-linux-gnu.gz | gunzip -c - > ~/.local/bin/rust-analyzer
+$ chmod +x ~/.local/bin/rust-analyzer
+----
+
+Make sure that `~/.local/bin` is listed in the `$PATH` variable and use the appropriate URL if you're not on a `x86-64` system.
+
+You don't have to use `~/.local/bin`, any other path like `~/.cargo/bin` or `/usr/local/bin` will work just as well.
+
+Alternatively, you can install it from source using the command below.
+You'll need the latest stable version of the Rust toolchain.
+
+[source,bash]
+----
+$ git clone https://github.com/rust-lang/rust-analyzer.git && cd rust-analyzer
+$ cargo xtask install --server
+----
+
+If your editor can't find the binary even though the binary is on your `$PATH`, the likely explanation is that it doesn't see the same `$PATH` as the shell, see https://github.com/rust-lang/rust-analyzer/issues/1811[this issue].
+On Unix, running the editor from a shell or changing the `.desktop` file to set the environment should help.
+
+==== `rustup`
+
+`rust-analyzer` is available in `rustup`, but only in the nightly toolchain:
+
+[source,bash]
+----
+$ rustup +nightly component add rust-analyzer-preview
+----
+
+However, in contrast to `component add clippy` or `component add rustfmt`, this does not actually place a `rust-analyzer` binary in `~/.cargo/bin`, see https://github.com/rust-lang/rustup/issues/2411[this issue].
+
+==== Arch Linux
+
+The `rust-analyzer` binary can be installed from the repos or AUR (Arch User Repository):
+
+- https://www.archlinux.org/packages/community/x86_64/rust-analyzer/[`rust-analyzer`] (built from latest tagged source)
+- https://aur.archlinux.org/packages/rust-analyzer-git[`rust-analyzer-git`] (latest Git version)
+
+Install it with pacman, for example:
+
+[source,bash]
+----
+$ pacman -S rust-analyzer
+----
+
+==== Gentoo Linux
+
+`rust-analyzer` is available in the GURU repository:
+
+- https://gitweb.gentoo.org/repo/proj/guru.git/tree/dev-util/rust-analyzer?id=9895cea62602cfe599bd48e0fb02127411ca6e81[`dev-util/rust-analyzer`] builds from source
+- https://gitweb.gentoo.org/repo/proj/guru.git/tree/dev-util/rust-analyzer-bin?id=9895cea62602cfe599bd48e0fb02127411ca6e81[`dev-util/rust-analyzer-bin`] installs an official binary release
+
+If not already, GURU must be enabled (e.g. using `app-eselect/eselect-repository`) and sync'd before running `emerge`:
+
+[source,bash]
+----
+$ eselect repository enable guru && emaint sync -r guru
+$ emerge rust-analyzer-bin
+----
+
+==== macOS
+
+The `rust-analyzer` binary can be installed via https://brew.sh/[Homebrew].
+
+[source,bash]
+----
+$ brew install rust-analyzer
+----
+
+=== Emacs
+
+Note this excellent https://robert.kra.hn/posts/2021-02-07_rust-with-emacs/[guide] from https://github.com/rksm[@rksm].
+
+Prerequisites: You have installed the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
+
+Emacs support is maintained as part of the https://github.com/emacs-lsp/lsp-mode[Emacs-LSP] package in https://github.com/emacs-lsp/lsp-mode/blob/master/lsp-rust.el[lsp-rust.el].
+
+1. Install the most recent version of `emacs-lsp` package by following the https://github.com/emacs-lsp/lsp-mode[Emacs-LSP instructions].
+2. Set `lsp-rust-server` to `'rust-analyzer`.
+3. Run `lsp` in a Rust buffer.
+4. (Optionally) bind commands like `lsp-rust-analyzer-join-lines`, `lsp-extend-selection` and `lsp-rust-analyzer-expand-macro` to keys.
+
+=== Vim/NeoVim
+
+Prerequisites: You have installed the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
+Not needed if the extension can install/update it on its own, coc-rust-analyzer is one example.
+
+There are several LSP client implementations for vim or neovim:
+
+==== coc-rust-analyzer
+
+1. Install coc.nvim by following the instructions at
+ https://github.com/neoclide/coc.nvim[coc.nvim]
+ (Node.js required)
+2. Run `:CocInstall coc-rust-analyzer` to install
+ https://github.com/fannheyward/coc-rust-analyzer[coc-rust-analyzer],
+ this extension implements _most_ of the features supported in the VSCode extension:
+ * automatically install and upgrade stable/nightly releases
+ * same configurations as VSCode extension, `rust-analyzer.server.path`, `rust-analyzer.cargo.features` etc.
+ * same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.ssr` etc.
+ * inlay hints for variables and method chaining, _Neovim Only_
+
+Note: for code actions, use `coc-codeaction-cursor` and `coc-codeaction-selected`; `coc-codeaction` and `coc-codeaction-line` are unlikely to be useful.
+
+==== LanguageClient-neovim
+
+1. Install LanguageClient-neovim by following the instructions
+ https://github.com/autozimu/LanguageClient-neovim[here]
+ * The GitHub project wiki has extra tips on configuration
+
+2. Configure by adding this to your vim/neovim config file (replacing the existing Rust-specific line if it exists):
++
+[source,vim]
+----
+let g:LanguageClient_serverCommands = {
+\ 'rust': ['rust-analyzer'],
+\ }
+----
+
+==== YouCompleteMe
+
+Install YouCompleteMe by following the instructions
+ https://github.com/ycm-core/YouCompleteMe#installation[here].
+
+rust-analyzer is the default in ycm, it should work out of the box.
+
+==== ALE
+
+To use the LSP server in https://github.com/dense-analysis/ale[ale]:
+
+[source,vim]
+----
+let g:ale_linters = {'rust': ['analyzer']}
+----
+
+==== nvim-lsp
+
+NeoVim 0.5 has built-in language server support.
+For a quick start configuration of rust-analyzer, use https://github.com/neovim/nvim-lspconfig#rust_analyzer[neovim/nvim-lspconfig].
+Once `neovim/nvim-lspconfig` is installed, use `+lua require'lspconfig'.rust_analyzer.setup({})+` in your `init.vim`.
+
+You can also pass LSP settings to the server:
+
+[source,vim]
+----
+lua << EOF
+local nvim_lsp = require'lspconfig'
+
+local on_attach = function(client)
+ require'completion'.on_attach(client)
+end
+
+nvim_lsp.rust_analyzer.setup({
+ on_attach=on_attach,
+ settings = {
+ ["rust-analyzer"] = {
+ imports = {
+ granularity = {
+ group = "module",
+ },
+ prefix = "self",
+ },
+ cargo = {
+ buildScripts = {
+ enable = true,
+ },
+ },
+ procMacro = {
+ enable = true
+ },
+ }
+ }
+})
+EOF
+----
+
+See https://sharksforarms.dev/posts/neovim-rust/ for more tips on getting started.
+
+Check out https://github.com/simrat39/rust-tools.nvim for a batteries included rust-analyzer setup for neovim.
+
+==== vim-lsp
+
+vim-lsp is installed by following https://github.com/prabirshrestha/vim-lsp[the plugin instructions].
+It can be as simple as adding this line to your `.vimrc`:
+
+[source,vim]
+----
+Plug 'prabirshrestha/vim-lsp'
+----
+
+Next you need to register the `rust-analyzer` binary.
+If it is available in `$PATH`, you may want to add this to your `.vimrc`:
+
+[source,vim]
+----
+if executable('rust-analyzer')
+ au User lsp_setup call lsp#register_server({
+ \ 'name': 'Rust Language Server',
+ \ 'cmd': {server_info->['rust-analyzer']},
+ \ 'whitelist': ['rust'],
+ \ })
+endif
+----
+
+There is no dedicated UI for the server configuration, so you would need to send any options as a value of the `initialization_options` field, as described in the <<_configuration,Configuration>> section.
+Here is an example of how to enable the proc-macro support:
+
+[source,vim]
+----
+if executable('rust-analyzer')
+ au User lsp_setup call lsp#register_server({
+ \ 'name': 'Rust Language Server',
+ \ 'cmd': {server_info->['rust-analyzer']},
+ \ 'whitelist': ['rust'],
+ \ 'initialization_options': {
+ \ 'cargo': {
+ \ 'buildScripts': {
+ \ 'enable': v:true,
+ \ },
+ \ },
+ \ 'procMacro': {
+ \ 'enable': v:true,
+ \ },
+ \ },
+ \ })
+endif
+----
+
+=== Sublime Text
+
+==== Sublime Text 4:
+* Follow the instructions in link:https://github.com/sublimelsp/LSP-rust-analyzer[LSP-rust-analyzer].
+
+NOTE: Install link:https://packagecontrol.io/packages/LSP-file-watcher-chokidar[LSP-file-watcher-chokidar] to enable file watching (`workspace/didChangeWatchedFiles`).
+
+==== Sublime Text 3:
+* Install the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
+* Install the link:https://packagecontrol.io/packages/LSP[LSP package].
+* From the command palette, run `LSP: Enable Language Server Globally` and select `rust-analyzer`.
+
+If it worked, you should see "rust-analyzer, Line X, Column Y" on the left side of the status bar, and after waiting a bit, functionalities like tooltips on hovering over variables should become available.
+
+If you get an error saying `No such file or directory: 'rust-analyzer'`, see the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>> section on installing the language server binary.
+
+=== GNOME Builder
+
+GNOME Builder 3.37.1 and newer has native `rust-analyzer` support.
+If the LSP binary is not available, GNOME Builder can install it when opening a Rust file.
+
+
+=== Eclipse IDE
+
+Support for Rust development in the Eclipse IDE is provided by link:https://github.com/eclipse/corrosion[Eclipse Corrosion].
+If available in PATH or in some standard location, `rust-analyzer` is detected and powers editing of Rust files without further configuration.
+If `rust-analyzer` is not detected, Corrosion will prompt you for configuration of your Rust toolchain and language server with a link to the __Window > Preferences > Rust__ preference page; from here a button allows to download and configure `rust-analyzer`, but you can also reference another installation.
+You'll need to close and reopen all .rs and Cargo files, or to restart the IDE, for this change to take effect.
+
+=== Kate Text Editor
+
+Support for the language server protocol is built into Kate through the LSP plugin, which is included by default.
+It is preconfigured to use rust-analyzer for Rust sources since Kate 21.12.
+
+Earlier versions allow you to use rust-analyzer through a simple settings change.
+In the LSP Client settings of Kate, copy the content of the third tab "default parameters" to the second tab "server configuration".
+Then in the configuration replace:
+[source,json]
+----
+ "rust": {
+ "command": ["rls"],
+ "rootIndicationFileNames": ["Cargo.lock", "Cargo.toml"],
+ "url": "https://github.com/rust-lang/rls",
+ "highlightingModeRegex": "^Rust$"
+ },
+----
+With
+[source,json]
+----
+ "rust": {
+ "command": ["rust-analyzer"],
+ "rootIndicationFileNames": ["Cargo.lock", "Cargo.toml"],
+ "url": "https://github.com/rust-lang/rust-analyzer",
+ "highlightingModeRegex": "^Rust$"
+ },
+----
+Then click on apply, and restart the LSP server for your rust project.
+
+=== juCi++
+
+https://gitlab.com/cppit/jucipp[juCi++] has built-in support for the language server protocol, and since version 1.7.0 offers installation of both Rust and rust-analyzer when opening a Rust file.
+
+=== Kakoune
+
+https://kakoune.org/[Kakoune] supports LSP with the help of https://github.com/kak-lsp/kak-lsp[`kak-lsp`].
+Follow the https://github.com/kak-lsp/kak-lsp#installation[instructions] to install `kak-lsp`.
+To configure `kak-lsp`, refer to the https://github.com/kak-lsp/kak-lsp#configuring-kak-lsp[configuration section] which is basically about copying the https://github.com/kak-lsp/kak-lsp/blob/master/kak-lsp.toml[configuration file] in the right place (latest versions should use `rust-analyzer` by default).
+
+Finally, you need to configure Kakoune to talk to `kak-lsp` (see https://github.com/kak-lsp/kak-lsp#usage[Usage section]).
+A basic configuration will only get you LSP but you can also activate inlay diagnostics and auto-formatting on save.
+The following might help you get all of this.
+
+[source,txt]
+----
+eval %sh{kak-lsp --kakoune -s $kak_session} # Not needed if you load it with plug.kak.
+hook global WinSetOption filetype=rust %{
+ # Enable LSP
+ lsp-enable-window
+
+ # Auto-formatting on save
+ hook window BufWritePre .* lsp-formatting-sync
+
+ # Configure inlay hints (only on save)
+ hook window -group rust-inlay-hints BufWritePost .* rust-analyzer-inlay-hints
+ hook -once -always window WinSetOption filetype=.* %{
+ remove-hooks window rust-inlay-hints
+ }
+}
+----
+
+=== Helix
+
+https://docs.helix-editor.com/[Helix] supports LSP by default.
+However, it won't install `rust-analyzer` automatically.
+You can follow instructions for installing <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
+
+== Troubleshooting
+
+Start with looking at the rust-analyzer version.
+Try **Rust Analyzer: Show RA Version** in VS Code (using **Command Palette** feature typically activated by Ctrl+Shift+P) or `rust-analyzer --version` in the command line.
+If the date is more than a week ago, it's better to update rust-analyzer version.
+
+The next thing to check would be panic messages in rust-analyzer's log.
+Log messages are printed to stderr, in VS Code you can see then in the `Output > Rust Analyzer Language Server` tab of the panel.
+To see more logs, set the `RA_LOG=info` environment variable, this can be done either by setting the environment variable manually or by using `rust-analyzer.server.extraEnv`, note that both of these approaches require the server to be restarted.
+
+To fully capture LSP messages between the editor and the server, set `"rust-analyzer.trace.server": "verbose"` config and check
+`Output > Rust Analyzer Language Server Trace`.
+
+The root cause for many "`nothing works`" problems is that rust-analyzer fails to understand the project structure.
+To debug that, first note the `rust-analyzer` section in the status bar.
+If it has an error icon and red, that's the problem (hover will have somewhat helpful error message).
+**Rust Analyzer: Status** prints dependency information for the current file.
+Finally, `RA_LOG=project_model=debug` enables verbose logs during project loading.
+
+If rust-analyzer outright crashes, try running `rust-analyzer analysis-stats /path/to/project/directory/` on the command line.
+This command type checks the whole project in batch mode bypassing LSP machinery.
+
+When filing issues, it is useful (but not necessary) to try to minimize examples.
+An ideal bug reproduction looks like this:
+
+```bash
+$ git clone https://github.com/username/repo.git && cd repo && git switch --detach commit-hash
+$ rust-analyzer --version
+rust-analyzer dd12184e4 2021-05-08 dev
+$ rust-analyzer analysis-stats .
+💀 💀 💀
+```
+
+It is especially useful when the `repo` doesn't use external crates or the standard library.
+
+If you want to go as far as to modify the source code to debug the problem, be sure to take a look at the
+https://github.com/rust-lang/rust-analyzer/tree/master/docs/dev[dev docs]!
+
+== Configuration
+
+**Source:** https://github.com/rust-lang/rust-analyzer/blob/master/crates/rust-analyzer/src/config.rs[config.rs]
+
+The <<_installation,Installation>> section contains details on configuration for some of the editors.
+In general `rust-analyzer` is configured via LSP messages, which means that it's up to the editor to decide on the exact format and location of configuration files.
+
+Some clients, such as <<vs-code,VS Code>> or <<coc-rust-analyzer,COC plugin in Vim>> provide `rust-analyzer` specific configuration UIs. Others may require you to know a bit more about the interaction with `rust-analyzer`.
+
+For the later category, it might help to know that the initial configuration is specified as a value of the `initializationOptions` field of the https://microsoft.github.io/language-server-protocol/specifications/specification-current/#initialize[`InitializeParams` message, in the LSP protocol].
+The spec says that the field type is `any?`, but `rust-analyzer` is looking for a JSON object that is constructed using settings from the list below.
+Name of the setting, ignoring the `rust-analyzer.` prefix, is used as a path, and value of the setting becomes the JSON property value.
+
+For example, a very common configuration is to enable proc-macro support, can be achieved by sending this JSON:
+
+[source,json]
+----
+{
+ "cargo": {
+ "buildScripts": {
+ "enable": true,
+ },
+ },
+ "procMacro": {
+ "enable": true,
+ }
+}
+----
+
+Please consult your editor's documentation to learn more about how to configure https://microsoft.github.io/language-server-protocol/[LSP servers].
+
+To verify which configuration is actually used by `rust-analyzer`, set `RA_LOG` environment variable to `rust_analyzer=info` and look for config-related messages.
+Logs should show both the JSON that `rust-analyzer` sees as well as the updated config.
+
+This is the list of config options `rust-analyzer` supports:
+
+include::./generated_config.adoc[]
+
+== Non-Cargo Based Projects
+
+rust-analyzer does not require Cargo.
+However, if you use some other build system, you'll have to describe the structure of your project for rust-analyzer in the `rust-project.json` format:
+
+[source,TypeScript]
+----
+interface JsonProject {
+ /// Path to the directory with *source code* of
+ /// sysroot crates.
+ ///
+ /// It should point to the directory where std,
+ /// core, and friends can be found:
+ ///
+ /// https://github.com/rust-lang/rust/tree/master/library.
+ ///
+ /// If provided, rust-analyzer automatically adds
+ /// dependencies on sysroot crates. Conversely,
+ /// if you omit this path, you can specify sysroot
+ /// dependencies yourself and, for example, have
+ /// several different "sysroots" in one graph of
+ /// crates.
+ sysroot_src?: string;
+ /// The set of crates comprising the current
+ /// project. Must include all transitive
+ /// dependencies as well as sysroot crate (libstd,
+ /// libcore and such).
+ crates: Crate[];
+}
+
+interface Crate {
+ /// Optional crate name used for display purposes,
+ /// without affecting semantics. See the `deps`
+ /// key for semantically-significant crate names.
+ display_name?: string;
+ /// Path to the root module of the crate.
+ root_module: string;
+ /// Edition of the crate.
+ edition: "2015" | "2018" | "2021";
+ /// Dependencies
+ deps: Dep[];
+ /// Should this crate be treated as a member of
+ /// current "workspace".
+ ///
+ /// By default, inferred from the `root_module`
+ /// (members are the crates which reside inside
+ /// the directory opened in the editor).
+ ///
+ /// Set this to `false` for things like standard
+ /// library and 3rd party crates to enable
+ /// performance optimizations (rust-analyzer
+ /// assumes that non-member crates don't change).
+ is_workspace_member?: boolean;
+ /// Optionally specify the (super)set of `.rs`
+ /// files comprising this crate.
+ ///
+ /// By default, rust-analyzer assumes that only
+ /// files under `root_module.parent` can belong
+ /// to a crate. `include_dirs` are included
+ /// recursively, unless a subdirectory is in
+ /// `exclude_dirs`.
+ ///
+ /// Different crates can share the same `source`.
+ ///
+ /// If two crates share an `.rs` file in common,
+ /// they *must* have the same `source`.
+ /// rust-analyzer assumes that files from one
+ /// source can't refer to files in another source.
+ source?: {
+ include_dirs: string[],
+ exclude_dirs: string[],
+ },
+ /// The set of cfgs activated for a given crate, like
+ /// `["unix", "feature=\"foo\"", "feature=\"bar\""]`.
+ cfg: string[];
+ /// Target triple for this Crate.
+ ///
+ /// Used when running `rustc --print cfg`
+ /// to get target-specific cfgs.
+ target?: string;
+ /// Environment variables, used for
+ /// the `env!` macro
+ env: { [key: string]: string; },
+
+ /// Whether the crate is a proc-macro crate.
+ is_proc_macro: boolean;
+ /// For proc-macro crates, path to compiled
+ /// proc-macro (.so file).
+ proc_macro_dylib_path?: string;
+}
+
+interface Dep {
+ /// Index of a crate in the `crates` array.
+ crate: number,
+ /// Name as should appear in the (implicit)
+ /// `extern crate name` declaration.
+ name: string,
+}
+----
+
+This format is provisional and subject to change.
+Specifically, the `roots` setup will be different eventually.
+
+There are three ways to feed `rust-project.json` to rust-analyzer:
+
+* Place `rust-project.json` file at the root of the project, and rust-analyzer will discover it.
+* Specify `"rust-analyzer.linkedProjects": [ "path/to/rust-project.json" ]` in the settings (and make sure that your LSP client sends settings as a part of initialize request).
+* Specify `"rust-analyzer.linkedProjects": [ { "roots": [...], "crates": [...] }]` inline.
+
+Relative paths are interpreted relative to `rust-project.json` file location or (for inline JSON) relative to `rootUri`.
+
+See https://github.com/rust-analyzer/rust-project.json-example for a small example.
+
+You can set the `RA_LOG` environment variable to `rust_analyzer=info` to inspect how rust-analyzer handles config and project loading.
+
+Note that calls to `cargo check` are disabled when using `rust-project.json` by default, so compilation errors and warnings will no longer be sent to your LSP client. To enable these compilation errors you will need to specify explicitly what command rust-analyzer should run to perform the checks using the `checkOnSave.overrideCommand` configuration. As an example, the following configuration explicitly sets `cargo check` as the `checkOnSave` command.
+
+[source,json]
+----
+{ "rust-analyzer.checkOnSave.overrideCommand": ["cargo", "check", "--message-format=json"] }
+----
+
+The `checkOnSave.overrideCommand` requires the command specified to output json error messages for rust-analyzer to consume. The `--message-format=json` flag does this for `cargo check` so whichever command you use must also output errors in this format. See the <<Configuration>> section for more information.
+
+== Security
+
+At the moment, rust-analyzer assumes that all code is trusted.
+Here is a **non-exhaustive** list of ways to make rust-analyzer execute arbitrary code:
+
+* proc macros and build scripts are executed by default
+* `.cargo/config` can override `rustc` with an arbitrary executable
+* `rust-toolchain.toml` can override `rustc` with an arbitrary executable
+* VS Code plugin reads configuration from project directory, and that can be used to override paths to various executables, like `rustfmt` or `rust-analyzer` itself.
+* rust-analyzer's syntax trees library uses a lot of `unsafe` and hasn't been properly audited for memory safety.
+
+== Privacy
+
+The LSP server performs no network access in itself, but runs `cargo metadata` which will update or download the crate registry and the source code of the project dependencies.
+If enabled (the default), build scripts and procedural macros can do anything.
+
+The Code extension does not access the network.
+
+Any other editor plugins are not under the control of the `rust-analyzer` developers. For any privacy concerns, you should check with their respective developers.
+
+For `rust-analyzer` developers, `cargo xtask release` uses the GitHub API to put together the release notes.
+
+== Features
+
+include::./generated_features.adoc[]
+
+== Assists (Code Actions)
+
+Assists, or code actions, are small local refactorings, available in a particular context.
+They are usually triggered by a shortcut or by clicking a light bulb icon in the editor.
+Cursor position or selection is signified by `┃` character.
+
+include::./generated_assists.adoc[]
+
+== Diagnostics
+
+While most errors and warnings provided by rust-analyzer come from the `cargo check` integration, there's a growing number of diagnostics implemented using rust-analyzer's own analysis.
+Some of these diagnostics don't respect `\#[allow]` or `\#[deny]` attributes yet, but can be turned off using the `rust-analyzer.diagnostics.enable`, `rust-analyzer.diagnostics.experimental.enable` or `rust-analyzer.diagnostics.disabled` settings.
+
+include::./generated_diagnostic.adoc[]
+
+== Editor Features
+=== VS Code
+
+==== Color configurations
+
+It is possible to change the foreground/background color and font family/size of inlay hints.
+Just add this to your `settings.json`:
+
+[source,jsonc]
+----
+{
+ "editor.inlayHints.fontFamily": "Courier New",
+ "editor.inlayHints.fontSize": 11,
+
+ "workbench.colorCustomizations": {
+ // Name of the theme you are currently using
+ "[Default Dark+]": {
+ "editorInlayHint.foreground": "#868686f0",
+ "editorInlayHint.background": "#3d3d3d48",
+
+ // Overrides for specific kinds of inlay hints
+ "editorInlayHint.typeForeground": "#fdb6fdf0",
+ "editorInlayHint.parameterForeground": "#fdb6fdf0",
+ }
+ }
+}
+----
+
+==== Semantic style customizations
+
+You can customize the look of different semantic elements in the source code.
+For example, mutable bindings are underlined by default and you can override this behavior by adding the following section to your `settings.json`:
+
+[source,jsonc]
+----
+{
+ "editor.semanticTokenColorCustomizations": {
+ "rules": {
+ "*.mutable": {
+ "fontStyle": "", // underline is the default
+ },
+ }
+ },
+}
+----
+
+Most themes doesn't support styling unsafe operations differently yet. You can fix this by adding overrides for the rules `operator.unsafe`, `function.unsafe`, and `method.unsafe`:
+
+[source,jsonc]
+----
+{
+ "editor.semanticTokenColorCustomizations": {
+ "rules": {
+ "operator.unsafe": "#ff6600",
+ "function.unsafe": "#ff6600",
+ "method.unsafe": "#ff6600"
+ }
+ },
+}
+----
+
+In addition to the top-level rules you can specify overrides for specific themes. For example, if you wanted to use a darker text color on a specific light theme, you might write:
+
+[source,jsonc]
+----
+{
+ "editor.semanticTokenColorCustomizations": {
+ "rules": {
+ "operator.unsafe": "#ff6600"
+ },
+ "[Ayu Light]": {
+ "rules": {
+ "operator.unsafe": "#572300"
+ }
+ }
+ },
+}
+----
+
+Make sure you include the brackets around the theme name. For example, use `"[Ayu Light]"` to customize the theme Ayu Light.
+
+==== Special `when` clause context for keybindings.
+You may use `inRustProject` context to configure keybindings for rust projects only.
+For example:
+
+[source,json]
+----
+{
+ "key": "ctrl+alt+d",
+ "command": "rust-analyzer.openDocs",
+ "when": "inRustProject"
+}
+----
+More about `when` clause contexts https://code.visualstudio.com/docs/getstarted/keybindings#_when-clause-contexts[here].
+
+==== Setting runnable environment variables
+You can use "rust-analyzer.runnableEnv" setting to define runnable environment-specific substitution variables.
+The simplest way for all runnables in a bunch:
+```jsonc
+"rust-analyzer.runnableEnv": {
+ "RUN_SLOW_TESTS": "1"
+}
+```
+
+Or it is possible to specify vars more granularly:
+```jsonc
+"rust-analyzer.runnableEnv": [
+ {
+ // "mask": null, // null mask means that this rule will be applied for all runnables
+ env: {
+ "APP_ID": "1",
+ "APP_DATA": "asdf"
+ }
+ },
+ {
+ "mask": "test_name",
+ "env": {
+ "APP_ID": "2", // overwrites only APP_ID
+ }
+ }
+]
+```
+
+You can use any valid regular expression as a mask.
+Also note that a full runnable name is something like *run bin_or_example_name*, *test some::mod::test_name* or *test-mod some::mod*, so it is possible to distinguish binaries, single tests, and test modules with this masks: `"^run"`, `"^test "` (the trailing space matters!), and `"^test-mod"` respectively.
+
+==== Compiler feedback from external commands
+
+Instead of relying on the built-in `cargo check`, you can configure Code to run a command in the background and use the `$rustc-watch` problem matcher to generate inline error markers from its output.
+
+To do this you need to create a new https://code.visualstudio.com/docs/editor/tasks[VS Code Task] and set `rust-analyzer.checkOnSave.enable: false` in preferences.
+
+For example, if you want to run https://crates.io/crates/cargo-watch[`cargo watch`] instead, you might add the following to `.vscode/tasks.json`:
+
+```json
+{
+ "label": "Watch",
+ "group": "build",
+ "type": "shell",
+ "command": "cargo watch",
+ "problemMatcher": "$rustc-watch",
+ "isBackground": true
+}
+```
diff --git a/src/tools/rust-analyzer/lib/README.md b/src/tools/rust-analyzer/lib/README.md
new file mode 100644
index 000000000..6b2eeac2c
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/README.md
@@ -0,0 +1,2 @@
+Crates in this directory are published to crates.io and obey semver.
+They *could* live in a separate repo, but we want to experiment with a monorepo setup.
diff --git a/src/tools/rust-analyzer/lib/la-arena/Cargo.toml b/src/tools/rust-analyzer/lib/la-arena/Cargo.toml
new file mode 100644
index 000000000..ec5ba8ba0
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/la-arena/Cargo.toml
@@ -0,0 +1,10 @@
+[package]
+name = "la-arena"
+version = "0.3.0"
+description = "Simple index-based arena without deletion."
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/la-arena"
+documentation = "https://docs.rs/la-arena"
+categories = ["data-structures", "memory-management", "rust-patterns"]
+edition = "2021"
+rust-version = "1.56"
diff --git a/src/tools/rust-analyzer/lib/la-arena/src/lib.rs b/src/tools/rust-analyzer/lib/la-arena/src/lib.rs
new file mode 100644
index 000000000..dadee43b1
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/la-arena/src/lib.rs
@@ -0,0 +1,366 @@
+//! Yet another index-based arena.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(missing_docs)]
+
+use std::{
+ fmt,
+ hash::{Hash, Hasher},
+ iter::FromIterator,
+ marker::PhantomData,
+ ops::{Index, IndexMut, Range, RangeInclusive},
+};
+
+mod map;
+pub use map::ArenaMap;
+
+/// The raw index of a value in an arena.
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct RawIdx(u32);
+
+impl From<RawIdx> for u32 {
+ fn from(raw: RawIdx) -> u32 {
+ raw.0
+ }
+}
+
+impl From<u32> for RawIdx {
+ fn from(idx: u32) -> RawIdx {
+ RawIdx(idx)
+ }
+}
+
+impl fmt::Debug for RawIdx {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+impl fmt::Display for RawIdx {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// The index of a value allocated in an arena that holds `T`s.
+pub struct Idx<T> {
+ raw: RawIdx,
+ _ty: PhantomData<fn() -> T>,
+}
+
+impl<T> Clone for Idx<T> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+impl<T> Copy for Idx<T> {}
+
+impl<T> PartialEq for Idx<T> {
+ fn eq(&self, other: &Idx<T>) -> bool {
+ self.raw == other.raw
+ }
+}
+impl<T> Eq for Idx<T> {}
+
+impl<T> Hash for Idx<T> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.raw.hash(state);
+ }
+}
+
+impl<T> fmt::Debug for Idx<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut type_name = std::any::type_name::<T>();
+ if let Some(idx) = type_name.rfind(':') {
+ type_name = &type_name[idx + 1..];
+ }
+ write!(f, "Idx::<{}>({})", type_name, self.raw)
+ }
+}
+
+impl<T> Idx<T> {
+ /// Creates a new index from a [`RawIdx`].
+ pub fn from_raw(raw: RawIdx) -> Self {
+ Idx { raw, _ty: PhantomData }
+ }
+
+ /// Converts this index into the underlying [`RawIdx`].
+ pub fn into_raw(self) -> RawIdx {
+ self.raw
+ }
+}
+
+/// A range of densely allocated arena values.
+pub struct IdxRange<T> {
+ range: Range<u32>,
+ _p: PhantomData<T>,
+}
+
+impl<T> IdxRange<T> {
+ /// Creates a new index range
+ /// inclusive of the start value and exclusive of the end value.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// let a = arena.alloc("a");
+ /// let b = arena.alloc("b");
+ /// let c = arena.alloc("c");
+ /// let d = arena.alloc("d");
+ ///
+ /// let range = la_arena::IdxRange::new(b..d);
+ /// assert_eq!(&arena[range], &["b", "c"]);
+ /// ```
+ pub fn new(range: Range<Idx<T>>) -> Self {
+ Self { range: range.start.into_raw().into()..range.end.into_raw().into(), _p: PhantomData }
+ }
+
+ /// Creates a new index range
+ /// inclusive of the start value and end value.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// let foo = arena.alloc("foo");
+ /// let bar = arena.alloc("bar");
+ /// let baz = arena.alloc("baz");
+ ///
+ /// let range = la_arena::IdxRange::new_inclusive(foo..=baz);
+ /// assert_eq!(&arena[range], &["foo", "bar", "baz"]);
+ ///
+ /// let range = la_arena::IdxRange::new_inclusive(foo..=foo);
+ /// assert_eq!(&arena[range], &["foo"]);
+ /// ```
+ pub fn new_inclusive(range: RangeInclusive<Idx<T>>) -> Self {
+ Self {
+ range: u32::from(range.start().into_raw())..u32::from(range.end().into_raw()) + 1,
+ _p: PhantomData,
+ }
+ }
+
+ /// Returns whether the index range is empty.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// let one = arena.alloc(1);
+ /// let two = arena.alloc(2);
+ ///
+ /// assert!(la_arena::IdxRange::new(one..one).is_empty());
+ /// ```
+ pub fn is_empty(&self) -> bool {
+ self.range.is_empty()
+ }
+}
+
+impl<T> Iterator for IdxRange<T> {
+ type Item = Idx<T>;
+ fn next(&mut self) -> Option<Self::Item> {
+ self.range.next().map(|raw| Idx::from_raw(raw.into()))
+ }
+}
+
+impl<T> DoubleEndedIterator for IdxRange<T> {
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.range.next_back().map(|raw| Idx::from_raw(raw.into()))
+ }
+}
+
+impl<T> fmt::Debug for IdxRange<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple(&format!("IdxRange::<{}>", std::any::type_name::<T>()))
+ .field(&self.range)
+ .finish()
+ }
+}
+
+impl<T> Clone for IdxRange<T> {
+ fn clone(&self) -> Self {
+ Self { range: self.range.clone(), _p: PhantomData }
+ }
+}
+
+impl<T> PartialEq for IdxRange<T> {
+ fn eq(&self, other: &Self) -> bool {
+ self.range == other.range
+ }
+}
+
+impl<T> Eq for IdxRange<T> {}
+
+/// Yet another index-based arena.
+#[derive(Clone, PartialEq, Eq, Hash)]
+pub struct Arena<T> {
+ data: Vec<T>,
+}
+
+impl<T: fmt::Debug> fmt::Debug for Arena<T> {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt.debug_struct("Arena").field("len", &self.len()).field("data", &self.data).finish()
+ }
+}
+
+impl<T> Arena<T> {
+ /// Creates a new empty arena.
+ ///
+ /// ```
+ /// let arena: la_arena::Arena<i32> = la_arena::Arena::new();
+ /// assert!(arena.is_empty());
+ /// ```
+ pub const fn new() -> Arena<T> {
+ Arena { data: Vec::new() }
+ }
+
+ /// Empties the arena, removing all contained values.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ ///
+ /// arena.alloc(1);
+ /// arena.alloc(2);
+ /// arena.alloc(3);
+ /// assert_eq!(arena.len(), 3);
+ ///
+ /// arena.clear();
+ /// assert!(arena.is_empty());
+ /// ```
+ pub fn clear(&mut self) {
+ self.data.clear();
+ }
+
+ /// Returns the length of the arena.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// assert_eq!(arena.len(), 0);
+ ///
+ /// arena.alloc("foo");
+ /// assert_eq!(arena.len(), 1);
+ ///
+ /// arena.alloc("bar");
+ /// assert_eq!(arena.len(), 2);
+ ///
+ /// arena.alloc("baz");
+ /// assert_eq!(arena.len(), 3);
+ /// ```
+ pub fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ /// Returns whether the arena contains no elements.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// assert!(arena.is_empty());
+ ///
+ /// arena.alloc(0.5);
+ /// assert!(!arena.is_empty());
+ /// ```
+ pub fn is_empty(&self) -> bool {
+ self.data.is_empty()
+ }
+
+ /// Allocates a new value on the arena, returning the value’s index.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// let idx = arena.alloc(50);
+ ///
+ /// assert_eq!(arena[idx], 50);
+ /// ```
+ pub fn alloc(&mut self, value: T) -> Idx<T> {
+ let idx = self.next_idx();
+ self.data.push(value);
+ idx
+ }
+
+ /// Returns an iterator over the arena’s elements.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// let idx1 = arena.alloc(20);
+ /// let idx2 = arena.alloc(40);
+ /// let idx3 = arena.alloc(60);
+ ///
+ /// let mut iterator = arena.iter();
+ /// assert_eq!(iterator.next(), Some((idx1, &20)));
+ /// assert_eq!(iterator.next(), Some((idx2, &40)));
+ /// assert_eq!(iterator.next(), Some((idx3, &60)));
+ /// ```
+ pub fn iter(
+ &self,
+ ) -> impl Iterator<Item = (Idx<T>, &T)> + ExactSizeIterator + DoubleEndedIterator {
+ self.data.iter().enumerate().map(|(idx, value)| (Idx::from_raw(RawIdx(idx as u32)), value))
+ }
+
+ /// Returns an iterator over the arena’s mutable elements.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// let idx1 = arena.alloc(20);
+ ///
+ /// assert_eq!(arena[idx1], 20);
+ ///
+ /// let mut iterator = arena.iter_mut();
+ /// *iterator.next().unwrap().1 = 10;
+ /// drop(iterator);
+ ///
+ /// assert_eq!(arena[idx1], 10);
+ /// ```
+ pub fn iter_mut(
+ &mut self,
+ ) -> impl Iterator<Item = (Idx<T>, &mut T)> + ExactSizeIterator + DoubleEndedIterator {
+ self.data
+ .iter_mut()
+ .enumerate()
+ .map(|(idx, value)| (Idx::from_raw(RawIdx(idx as u32)), value))
+ }
+
+ /// Reallocates the arena to make it take up as little space as possible.
+ pub fn shrink_to_fit(&mut self) {
+ self.data.shrink_to_fit();
+ }
+
+ /// Returns the index of the next value allocated on the arena.
+ ///
+ /// This method should remain private to make creating invalid `Idx`s harder.
+ fn next_idx(&self) -> Idx<T> {
+ Idx::from_raw(RawIdx(self.data.len() as u32))
+ }
+}
+
+impl<T> Default for Arena<T> {
+ fn default() -> Arena<T> {
+ Arena { data: Vec::new() }
+ }
+}
+
+impl<T> Index<Idx<T>> for Arena<T> {
+ type Output = T;
+ fn index(&self, idx: Idx<T>) -> &T {
+ let idx = idx.into_raw().0 as usize;
+ &self.data[idx]
+ }
+}
+
+impl<T> IndexMut<Idx<T>> for Arena<T> {
+ fn index_mut(&mut self, idx: Idx<T>) -> &mut T {
+ let idx = idx.into_raw().0 as usize;
+ &mut self.data[idx]
+ }
+}
+
+impl<T> Index<IdxRange<T>> for Arena<T> {
+ type Output = [T];
+ fn index(&self, range: IdxRange<T>) -> &[T] {
+ let start = range.range.start as usize;
+ let end = range.range.end as usize;
+ &self.data[start..end]
+ }
+}
+
+impl<T> FromIterator<T> for Arena<T> {
+ fn from_iter<I>(iter: I) -> Self
+ where
+ I: IntoIterator<Item = T>,
+ {
+ Arena { data: Vec::from_iter(iter) }
+ }
+}
diff --git a/src/tools/rust-analyzer/lib/la-arena/src/map.rs b/src/tools/rust-analyzer/lib/la-arena/src/map.rs
new file mode 100644
index 000000000..d27f086d3
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/la-arena/src/map.rs
@@ -0,0 +1,75 @@
+use std::marker::PhantomData;
+
+use crate::Idx;
+
+/// A map from arena indexes to some other type.
+/// Space requirement is O(highest index).
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct ArenaMap<IDX, V> {
+ v: Vec<Option<V>>,
+ _ty: PhantomData<IDX>,
+}
+
+impl<T, V> ArenaMap<Idx<T>, V> {
+ /// Inserts a value associated with a given arena index into the map.
+ pub fn insert(&mut self, idx: Idx<T>, t: V) {
+ let idx = Self::to_idx(idx);
+
+ self.v.resize_with((idx + 1).max(self.v.len()), || None);
+ self.v[idx] = Some(t);
+ }
+
+ /// Returns a reference to the value associated with the provided index
+ /// if it is present.
+ pub fn get(&self, idx: Idx<T>) -> Option<&V> {
+ self.v.get(Self::to_idx(idx)).and_then(|it| it.as_ref())
+ }
+
+ /// Returns a mutable reference to the value associated with the provided index
+ /// if it is present.
+ pub fn get_mut(&mut self, idx: Idx<T>) -> Option<&mut V> {
+ self.v.get_mut(Self::to_idx(idx)).and_then(|it| it.as_mut())
+ }
+
+ /// Returns an iterator over the values in the map.
+ pub fn values(&self) -> impl Iterator<Item = &V> {
+ self.v.iter().filter_map(|o| o.as_ref())
+ }
+
+ /// Returns an iterator over mutable references to the values in the map.
+ pub fn values_mut(&mut self) -> impl Iterator<Item = &mut V> {
+ self.v.iter_mut().filter_map(|o| o.as_mut())
+ }
+
+ /// Returns an iterator over the arena indexes and values in the map.
+ pub fn iter(&self) -> impl Iterator<Item = (Idx<T>, &V)> {
+ self.v.iter().enumerate().filter_map(|(idx, o)| Some((Self::from_idx(idx), o.as_ref()?)))
+ }
+
+ fn to_idx(idx: Idx<T>) -> usize {
+ u32::from(idx.into_raw()) as usize
+ }
+
+ fn from_idx(idx: usize) -> Idx<T> {
+ Idx::from_raw((idx as u32).into())
+ }
+}
+
+impl<T, V> std::ops::Index<Idx<V>> for ArenaMap<Idx<V>, T> {
+ type Output = T;
+ fn index(&self, idx: Idx<V>) -> &T {
+ self.v[Self::to_idx(idx)].as_ref().unwrap()
+ }
+}
+
+impl<T, V> std::ops::IndexMut<Idx<V>> for ArenaMap<Idx<V>, T> {
+ fn index_mut(&mut self, idx: Idx<V>) -> &mut T {
+ self.v[Self::to_idx(idx)].as_mut().unwrap()
+ }
+}
+
+impl<T, V> Default for ArenaMap<Idx<V>, T> {
+ fn default() -> Self {
+ ArenaMap { v: Vec::new(), _ty: PhantomData }
+ }
+}
diff --git a/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml
new file mode 100644
index 000000000..204d120d0
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml
@@ -0,0 +1,16 @@
+[package]
+name = "lsp-server"
+version = "0.6.0"
+description = "Generic LSP server scaffold."
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/lsp-server"
+edition = "2021"
+
+[dependencies]
+log = "0.4.17"
+serde_json = "1.0.81"
+serde = { version = "1.0.137", features = ["derive"] }
+crossbeam-channel = "0.5.5"
+
+[dev-dependencies]
+lsp-types = "0.93.0"
diff --git a/src/tools/rust-analyzer/lib/lsp-server/examples/goto_def.rs b/src/tools/rust-analyzer/lib/lsp-server/examples/goto_def.rs
new file mode 100644
index 000000000..ca7ad0b53
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/lsp-server/examples/goto_def.rs
@@ -0,0 +1,121 @@
+//! A minimal example LSP server that can only respond to the `gotoDefinition` request. To use
+//! this example, execute it and then send an `initialize` request.
+//!
+//! ```no_run
+//! Content-Length: 85
+//!
+//! {"jsonrpc": "2.0", "method": "initialize", "id": 1, "params": {"capabilities": {}}}
+//! ```
+//!
+//! This will respond with a server response. Then send it a `initialized` notification which will
+//! have no response.
+//!
+//! ```no_run
+//! Content-Length: 59
+//!
+//! {"jsonrpc": "2.0", "method": "initialized", "params": {}}
+//! ```
+//!
+//! Once these two are sent, then we enter the main loop of the server. The only request this
+//! example can handle is `gotoDefinition`:
+//!
+//! ```no_run
+//! Content-Length: 159
+//!
+//! {"jsonrpc": "2.0", "method": "textDocument/definition", "id": 2, "params": {"textDocument": {"uri": "file://temp"}, "position": {"line": 1, "character": 1}}}
+//! ```
+//!
+//! To finish up without errors, send a shutdown request:
+//!
+//! ```no_run
+//! Content-Length: 67
+//!
+//! {"jsonrpc": "2.0", "method": "shutdown", "id": 3, "params": null}
+//! ```
+//!
+//! The server will exit the main loop and finally we send a `shutdown` notification to stop
+//! the server.
+//!
+//! ```
+//! Content-Length: 54
+//!
+//! {"jsonrpc": "2.0", "method": "exit", "params": null}
+//! ```
+use std::error::Error;
+
+use lsp_types::OneOf;
+use lsp_types::{
+ request::GotoDefinition, GotoDefinitionResponse, InitializeParams, ServerCapabilities,
+};
+
+use lsp_server::{Connection, ExtractError, Message, Request, RequestId, Response};
+
+fn main() -> Result<(), Box<dyn Error + Sync + Send>> {
+ // Note that we must have our logging only write out to stderr.
+ eprintln!("starting generic LSP server");
+
+ // Create the transport. Includes the stdio (stdin and stdout) versions but this could
+ // also be implemented to use sockets or HTTP.
+ let (connection, io_threads) = Connection::stdio();
+
+ // Run the server and wait for the two threads to end (typically by trigger LSP Exit event).
+ let server_capabilities = serde_json::to_value(&ServerCapabilities {
+ definition_provider: Some(OneOf::Left(true)),
+ ..Default::default()
+ })
+ .unwrap();
+ let initialization_params = connection.initialize(server_capabilities)?;
+ main_loop(connection, initialization_params)?;
+ io_threads.join()?;
+
+ // Shut down gracefully.
+ eprintln!("shutting down server");
+ Ok(())
+}
+
+fn main_loop(
+ connection: Connection,
+ params: serde_json::Value,
+) -> Result<(), Box<dyn Error + Sync + Send>> {
+ let _params: InitializeParams = serde_json::from_value(params).unwrap();
+ eprintln!("starting example main loop");
+ for msg in &connection.receiver {
+ eprintln!("got msg: {:?}", msg);
+ match msg {
+ Message::Request(req) => {
+ if connection.handle_shutdown(&req)? {
+ return Ok(());
+ }
+ eprintln!("got request: {:?}", req);
+ match cast::<GotoDefinition>(req) {
+ Ok((id, params)) => {
+ eprintln!("got gotoDefinition request #{}: {:?}", id, params);
+ let result = Some(GotoDefinitionResponse::Array(Vec::new()));
+ let result = serde_json::to_value(&result).unwrap();
+ let resp = Response { id, result: Some(result), error: None };
+ connection.sender.send(Message::Response(resp))?;
+ continue;
+ }
+ Err(err @ ExtractError::JsonError { .. }) => panic!("{:?}", err),
+ Err(ExtractError::MethodMismatch(req)) => req,
+ };
+ // ...
+ }
+ Message::Response(resp) => {
+ eprintln!("got response: {:?}", resp);
+ }
+ Message::Notification(not) => {
+ eprintln!("got notification: {:?}", not);
+ }
+ }
+ }
+ Ok(())
+}
+
+fn cast<R>(req: Request) -> Result<(RequestId, R::Params), ExtractError<Request>>
+where
+ R: lsp_types::request::Request,
+ R::Params: serde::de::DeserializeOwned,
+{
+ req.extract(R::METHOD)
+}
diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/error.rs b/src/tools/rust-analyzer/lib/lsp-server/src/error.rs
new file mode 100644
index 000000000..4c934d9ec
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/lsp-server/src/error.rs
@@ -0,0 +1,50 @@
+use std::fmt;
+
+use crate::{Notification, Request};
+
+#[derive(Debug, Clone)]
+pub struct ProtocolError(pub(crate) String);
+
+impl std::error::Error for ProtocolError {}
+
+impl fmt::Display for ProtocolError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.0, f)
+ }
+}
+
+#[derive(Debug)]
+pub enum ExtractError<T> {
+ /// The extracted message was of a different method than expected.
+ MethodMismatch(T),
+ /// Failed to deserialize the message.
+ JsonError { method: String, error: serde_json::Error },
+}
+
+impl std::error::Error for ExtractError<Request> {}
+impl fmt::Display for ExtractError<Request> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ExtractError::MethodMismatch(req) => {
+ write!(f, "Method mismatch for request '{}'", req.method)
+ }
+ ExtractError::JsonError { method, error } => {
+ write!(f, "Invalid request\nMethod: {method}\n error: {error}",)
+ }
+ }
+ }
+}
+
+impl std::error::Error for ExtractError<Notification> {}
+impl fmt::Display for ExtractError<Notification> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ExtractError::MethodMismatch(req) => {
+ write!(f, "Method mismatch for notification '{}'", req.method)
+ }
+ ExtractError::JsonError { method, error } => {
+ write!(f, "Invalid notification\nMethod: {method}\n error: {error}")
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/lib.rs b/src/tools/rust-analyzer/lib/lsp-server/src/lib.rs
new file mode 100644
index 000000000..d567077d4
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/lsp-server/src/lib.rs
@@ -0,0 +1,232 @@
+//! A language server scaffold, exposing a synchronous crossbeam-channel based API.
+//! This crate handles protocol handshaking and parsing messages, while you
+//! control the message dispatch loop yourself.
+//!
+//! Run with `RUST_LOG=lsp_server=debug` to see all the messages.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod msg;
+mod stdio;
+mod error;
+mod socket;
+mod req_queue;
+
+use std::{
+ io,
+ net::{TcpListener, TcpStream, ToSocketAddrs},
+};
+
+use crossbeam_channel::{Receiver, Sender};
+
+pub use crate::{
+ error::{ExtractError, ProtocolError},
+ msg::{ErrorCode, Message, Notification, Request, RequestId, Response, ResponseError},
+ req_queue::{Incoming, Outgoing, ReqQueue},
+ stdio::IoThreads,
+};
+
+/// Connection is just a pair of channels of LSP messages.
+pub struct Connection {
+ pub sender: Sender<Message>,
+ pub receiver: Receiver<Message>,
+}
+
+impl Connection {
+ /// Create connection over standard in/standard out.
+ ///
+ /// Use this to create a real language server.
+ pub fn stdio() -> (Connection, IoThreads) {
+ let (sender, receiver, io_threads) = stdio::stdio_transport();
+ (Connection { sender, receiver }, io_threads)
+ }
+
+ /// Open a connection over tcp.
+ /// This call blocks until a connection is established.
+ ///
+ /// Use this to create a real language server.
+ pub fn connect<A: ToSocketAddrs>(addr: A) -> io::Result<(Connection, IoThreads)> {
+ let stream = TcpStream::connect(addr)?;
+ let (sender, receiver, io_threads) = socket::socket_transport(stream);
+ Ok((Connection { sender, receiver }, io_threads))
+ }
+
+ /// Listen for a connection over tcp.
+ /// This call blocks until a connection is established.
+ ///
+ /// Use this to create a real language server.
+ pub fn listen<A: ToSocketAddrs>(addr: A) -> io::Result<(Connection, IoThreads)> {
+ let listener = TcpListener::bind(addr)?;
+ let (stream, _) = listener.accept()?;
+ let (sender, receiver, io_threads) = socket::socket_transport(stream);
+ Ok((Connection { sender, receiver }, io_threads))
+ }
+
+ /// Creates a pair of connected connections.
+ ///
+ /// Use this for testing.
+ pub fn memory() -> (Connection, Connection) {
+ let (s1, r1) = crossbeam_channel::unbounded();
+ let (s2, r2) = crossbeam_channel::unbounded();
+ (Connection { sender: s1, receiver: r2 }, Connection { sender: s2, receiver: r1 })
+ }
+
+ /// Starts the initialization process by waiting for an initialize
+ /// request from the client. Use this for more advanced customization than
+ /// `initialize` can provide.
+ ///
+ /// Returns the request id and serialized `InitializeParams` from the client.
+ ///
+ /// # Example
+ ///
+ /// ```no_run
+ /// use std::error::Error;
+ /// use lsp_types::{ClientCapabilities, InitializeParams, ServerCapabilities};
+ ///
+ /// use lsp_server::{Connection, Message, Request, RequestId, Response};
+ ///
+ /// fn main() -> Result<(), Box<dyn Error + Sync + Send>> {
+ /// // Create the transport. Includes the stdio (stdin and stdout) versions but this could
+ /// // also be implemented to use sockets or HTTP.
+ /// let (connection, io_threads) = Connection::stdio();
+ ///
+ /// // Run the server
+ /// let (id, params) = connection.initialize_start()?;
+ ///
+ /// let init_params: InitializeParams = serde_json::from_value(params).unwrap();
+ /// let client_capabilities: ClientCapabilities = init_params.capabilities;
+ /// let server_capabilities = ServerCapabilities::default();
+ ///
+ /// let initialize_data = serde_json::json!({
+ /// "capabilities": server_capabilities,
+ /// "serverInfo": {
+ /// "name": "lsp-server-test",
+ /// "version": "0.1"
+ /// }
+ /// });
+ ///
+ /// connection.initialize_finish(id, initialize_data)?;
+ ///
+ /// // ... Run main loop ...
+ ///
+ /// Ok(())
+ /// }
+ /// ```
+ pub fn initialize_start(&self) -> Result<(RequestId, serde_json::Value), ProtocolError> {
+ loop {
+ match self.receiver.recv() {
+ Ok(Message::Request(req)) if req.is_initialize() => {
+ return Ok((req.id, req.params))
+ }
+ // Respond to non-initialize requests with ServerNotInitialized
+ Ok(Message::Request(req)) => {
+ let resp = Response::new_err(
+ req.id.clone(),
+ ErrorCode::ServerNotInitialized as i32,
+ format!("expected initialize request, got {:?}", req),
+ );
+ self.sender.send(resp.into()).unwrap();
+ }
+ Ok(msg) => {
+ return Err(ProtocolError(format!(
+ "expected initialize request, got {:?}",
+ msg
+ )))
+ }
+ Err(e) => {
+ return Err(ProtocolError(format!(
+ "expected initialize request, got error: {}",
+ e
+ )))
+ }
+ };
+ }
+ }
+
+ /// Finishes the initialization process by sending an `InitializeResult` to the client
+ pub fn initialize_finish(
+ &self,
+ initialize_id: RequestId,
+ initialize_result: serde_json::Value,
+ ) -> Result<(), ProtocolError> {
+ let resp = Response::new_ok(initialize_id, initialize_result);
+ self.sender.send(resp.into()).unwrap();
+ match &self.receiver.recv() {
+ Ok(Message::Notification(n)) if n.is_initialized() => (),
+ Ok(msg) => {
+ return Err(ProtocolError(format!(
+ "expected Message::Notification, got: {:?}",
+ msg,
+ )))
+ }
+ Err(e) => {
+ return Err(ProtocolError(format!(
+ "expected initialized notification, got error: {}",
+ e,
+ )))
+ }
+ }
+ Ok(())
+ }
+
+ /// Initialize the connection. Sends the server capabilities
+ /// to the client and returns the serialized client capabilities
+ /// on success. If more fine-grained initialization is required use
+ /// `initialize_start`/`initialize_finish`.
+ ///
+ /// # Example
+ ///
+ /// ```no_run
+ /// use std::error::Error;
+ /// use lsp_types::ServerCapabilities;
+ ///
+ /// use lsp_server::{Connection, Message, Request, RequestId, Response};
+ ///
+ /// fn main() -> Result<(), Box<dyn Error + Sync + Send>> {
+ /// // Create the transport. Includes the stdio (stdin and stdout) versions but this could
+ /// // also be implemented to use sockets or HTTP.
+ /// let (connection, io_threads) = Connection::stdio();
+ ///
+ /// // Run the server
+ /// let server_capabilities = serde_json::to_value(&ServerCapabilities::default()).unwrap();
+ /// let initialization_params = connection.initialize(server_capabilities)?;
+ ///
+ /// // ... Run main loop ...
+ ///
+ /// Ok(())
+ /// }
+ /// ```
+ pub fn initialize(
+ &self,
+ server_capabilities: serde_json::Value,
+ ) -> Result<serde_json::Value, ProtocolError> {
+ let (id, params) = self.initialize_start()?;
+
+ let initialize_data = serde_json::json!({
+ "capabilities": server_capabilities,
+ });
+
+ self.initialize_finish(id, initialize_data)?;
+
+ Ok(params)
+ }
+
+ /// If `req` is `Shutdown`, respond to it and return `true`, otherwise return `false`
+ pub fn handle_shutdown(&self, req: &Request) -> Result<bool, ProtocolError> {
+ if !req.is_shutdown() {
+ return Ok(false);
+ }
+ let resp = Response::new_ok(req.id.clone(), ());
+ let _ = self.sender.send(resp.into());
+ match &self.receiver.recv_timeout(std::time::Duration::from_secs(30)) {
+ Ok(Message::Notification(n)) if n.is_exit() => (),
+ Ok(msg) => {
+ return Err(ProtocolError(format!("unexpected message during shutdown: {:?}", msg)))
+ }
+ Err(e) => {
+ return Err(ProtocolError(format!("unexpected error during shutdown: {}", e)))
+ }
+ }
+ Ok(true)
+ }
+}
diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs b/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs
new file mode 100644
index 000000000..97e5bd35c
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs
@@ -0,0 +1,343 @@
+use std::{
+ fmt,
+ io::{self, BufRead, Write},
+};
+
+use serde::{de::DeserializeOwned, Deserialize, Serialize};
+
+use crate::error::ExtractError;
+
+#[derive(Serialize, Deserialize, Debug, Clone)]
+#[serde(untagged)]
+pub enum Message {
+ Request(Request),
+ Response(Response),
+ Notification(Notification),
+}
+
+impl From<Request> for Message {
+ fn from(request: Request) -> Message {
+ Message::Request(request)
+ }
+}
+
+impl From<Response> for Message {
+ fn from(response: Response) -> Message {
+ Message::Response(response)
+ }
+}
+
+impl From<Notification> for Message {
+ fn from(notification: Notification) -> Message {
+ Message::Notification(notification)
+ }
+}
+
+#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[serde(transparent)]
+pub struct RequestId(IdRepr);
+
+#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[serde(untagged)]
+enum IdRepr {
+ I32(i32),
+ String(String),
+}
+
+impl From<i32> for RequestId {
+ fn from(id: i32) -> RequestId {
+ RequestId(IdRepr::I32(id))
+ }
+}
+
+impl From<String> for RequestId {
+ fn from(id: String) -> RequestId {
+ RequestId(IdRepr::String(id))
+ }
+}
+
+impl fmt::Display for RequestId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match &self.0 {
+ IdRepr::I32(it) => fmt::Display::fmt(it, f),
+ // Use debug here, to make it clear that `92` and `"92"` are
+ // different, and to reduce WTF factor if the sever uses `" "` as an
+ // ID.
+ IdRepr::String(it) => fmt::Debug::fmt(it, f),
+ }
+ }
+}
+
+#[derive(Debug, Serialize, Deserialize, Clone)]
+pub struct Request {
+ pub id: RequestId,
+ pub method: String,
+ #[serde(default = "serde_json::Value::default")]
+ #[serde(skip_serializing_if = "serde_json::Value::is_null")]
+ pub params: serde_json::Value,
+}
+
+#[derive(Debug, Serialize, Deserialize, Clone)]
+pub struct Response {
+ // JSON RPC allows this to be null if it was impossible
+ // to decode the request's id. Ignore this special case
+ // and just die horribly.
+ pub id: RequestId,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub result: Option<serde_json::Value>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub error: Option<ResponseError>,
+}
+
+#[derive(Debug, Serialize, Deserialize, Clone)]
+pub struct ResponseError {
+ pub code: i32,
+ pub message: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub data: Option<serde_json::Value>,
+}
+
+#[derive(Clone, Copy, Debug)]
+#[allow(unused)]
+pub enum ErrorCode {
+ // Defined by JSON RPC:
+ ParseError = -32700,
+ InvalidRequest = -32600,
+ MethodNotFound = -32601,
+ InvalidParams = -32602,
+ InternalError = -32603,
+ ServerErrorStart = -32099,
+ ServerErrorEnd = -32000,
+
+ /// Error code indicating that a server received a notification or
+ /// request before the server has received the `initialize` request.
+ ServerNotInitialized = -32002,
+ UnknownErrorCode = -32001,
+
+ // Defined by the protocol:
+ /// The client has canceled a request and a server has detected
+ /// the cancel.
+ RequestCanceled = -32800,
+
+ /// The server detected that the content of a document got
+ /// modified outside normal conditions. A server should
+ /// NOT send this error code if it detects a content change
+ /// in it unprocessed messages. The result even computed
+ /// on an older state might still be useful for the client.
+ ///
+ /// If a client decides that a result is not of any use anymore
+ /// the client should cancel the request.
+ ContentModified = -32801,
+
+ /// The server cancelled the request. This error code should
+ /// only be used for requests that explicitly support being
+ /// server cancellable.
+ ///
+ /// @since 3.17.0
+ ServerCancelled = -32802,
+}
+
+#[derive(Debug, Serialize, Deserialize, Clone)]
+pub struct Notification {
+ pub method: String,
+ #[serde(default = "serde_json::Value::default")]
+ #[serde(skip_serializing_if = "serde_json::Value::is_null")]
+ pub params: serde_json::Value,
+}
+
+impl Message {
+ pub fn read(r: &mut impl BufRead) -> io::Result<Option<Message>> {
+ Message::_read(r)
+ }
+ fn _read(r: &mut dyn BufRead) -> io::Result<Option<Message>> {
+ let text = match read_msg_text(r)? {
+ None => return Ok(None),
+ Some(text) => text,
+ };
+ let msg = serde_json::from_str(&text)?;
+ Ok(Some(msg))
+ }
+ pub fn write(self, w: &mut impl Write) -> io::Result<()> {
+ self._write(w)
+ }
+ fn _write(self, w: &mut dyn Write) -> io::Result<()> {
+ #[derive(Serialize)]
+ struct JsonRpc {
+ jsonrpc: &'static str,
+ #[serde(flatten)]
+ msg: Message,
+ }
+ let text = serde_json::to_string(&JsonRpc { jsonrpc: "2.0", msg: self })?;
+ write_msg_text(w, &text)
+ }
+}
+
+impl Response {
+ pub fn new_ok<R: Serialize>(id: RequestId, result: R) -> Response {
+ Response { id, result: Some(serde_json::to_value(result).unwrap()), error: None }
+ }
+ pub fn new_err(id: RequestId, code: i32, message: String) -> Response {
+ let error = ResponseError { code, message, data: None };
+ Response { id, result: None, error: Some(error) }
+ }
+}
+
+impl Request {
+ pub fn new<P: Serialize>(id: RequestId, method: String, params: P) -> Request {
+ Request { id, method, params: serde_json::to_value(params).unwrap() }
+ }
+ pub fn extract<P: DeserializeOwned>(
+ self,
+ method: &str,
+ ) -> Result<(RequestId, P), ExtractError<Request>> {
+ if self.method != method {
+ return Err(ExtractError::MethodMismatch(self));
+ }
+ match serde_json::from_value(self.params) {
+ Ok(params) => Ok((self.id, params)),
+ Err(error) => Err(ExtractError::JsonError { method: self.method, error }),
+ }
+ }
+
+ pub(crate) fn is_shutdown(&self) -> bool {
+ self.method == "shutdown"
+ }
+ pub(crate) fn is_initialize(&self) -> bool {
+ self.method == "initialize"
+ }
+}
+
+impl Notification {
+ pub fn new(method: String, params: impl Serialize) -> Notification {
+ Notification { method, params: serde_json::to_value(params).unwrap() }
+ }
+ pub fn extract<P: DeserializeOwned>(
+ self,
+ method: &str,
+ ) -> Result<P, ExtractError<Notification>> {
+ if self.method != method {
+ return Err(ExtractError::MethodMismatch(self));
+ }
+ match serde_json::from_value(self.params) {
+ Ok(params) => Ok(params),
+ Err(error) => Err(ExtractError::JsonError { method: self.method, error }),
+ }
+ }
+ pub(crate) fn is_exit(&self) -> bool {
+ self.method == "exit"
+ }
+ pub(crate) fn is_initialized(&self) -> bool {
+ self.method == "initialized"
+ }
+}
+
+fn read_msg_text(inp: &mut dyn BufRead) -> io::Result<Option<String>> {
+ fn invalid_data(error: impl Into<Box<dyn std::error::Error + Send + Sync>>) -> io::Error {
+ io::Error::new(io::ErrorKind::InvalidData, error)
+ }
+ macro_rules! invalid_data {
+ ($($tt:tt)*) => (invalid_data(format!($($tt)*)))
+ }
+
+ let mut size = None;
+ let mut buf = String::new();
+ loop {
+ buf.clear();
+ if inp.read_line(&mut buf)? == 0 {
+ return Ok(None);
+ }
+ if !buf.ends_with("\r\n") {
+ return Err(invalid_data!("malformed header: {:?}", buf));
+ }
+ let buf = &buf[..buf.len() - 2];
+ if buf.is_empty() {
+ break;
+ }
+ let mut parts = buf.splitn(2, ": ");
+ let header_name = parts.next().unwrap();
+ let header_value =
+ parts.next().ok_or_else(|| invalid_data!("malformed header: {:?}", buf))?;
+ if header_name == "Content-Length" {
+ size = Some(header_value.parse::<usize>().map_err(invalid_data)?);
+ }
+ }
+ let size: usize = size.ok_or_else(|| invalid_data!("no Content-Length"))?;
+ let mut buf = buf.into_bytes();
+ buf.resize(size, 0);
+ inp.read_exact(&mut buf)?;
+ let buf = String::from_utf8(buf).map_err(invalid_data)?;
+ log::debug!("< {}", buf);
+ Ok(Some(buf))
+}
+
+fn write_msg_text(out: &mut dyn Write, msg: &str) -> io::Result<()> {
+ log::debug!("> {}", msg);
+ write!(out, "Content-Length: {}\r\n\r\n", msg.len())?;
+ out.write_all(msg.as_bytes())?;
+ out.flush()?;
+ Ok(())
+}
+
+#[cfg(test)]
+mod tests {
+ use super::{Message, Notification, Request, RequestId};
+
+ #[test]
+ fn shutdown_with_explicit_null() {
+ let text = "{\"jsonrpc\": \"2.0\",\"id\": 3,\"method\": \"shutdown\", \"params\": null }";
+ let msg: Message = serde_json::from_str(text).unwrap();
+
+ assert!(
+ matches!(msg, Message::Request(req) if req.id == 3.into() && req.method == "shutdown")
+ );
+ }
+
+ #[test]
+ fn shutdown_with_no_params() {
+ let text = "{\"jsonrpc\": \"2.0\",\"id\": 3,\"method\": \"shutdown\"}";
+ let msg: Message = serde_json::from_str(text).unwrap();
+
+ assert!(
+ matches!(msg, Message::Request(req) if req.id == 3.into() && req.method == "shutdown")
+ );
+ }
+
+ #[test]
+ fn notification_with_explicit_null() {
+ let text = "{\"jsonrpc\": \"2.0\",\"method\": \"exit\", \"params\": null }";
+ let msg: Message = serde_json::from_str(text).unwrap();
+
+ assert!(matches!(msg, Message::Notification(not) if not.method == "exit"));
+ }
+
+ #[test]
+ fn notification_with_no_params() {
+ let text = "{\"jsonrpc\": \"2.0\",\"method\": \"exit\"}";
+ let msg: Message = serde_json::from_str(text).unwrap();
+
+ assert!(matches!(msg, Message::Notification(not) if not.method == "exit"));
+ }
+
+ #[test]
+ fn serialize_request_with_null_params() {
+ let msg = Message::Request(Request {
+ id: RequestId::from(3),
+ method: "shutdown".into(),
+ params: serde_json::Value::Null,
+ });
+ let serialized = serde_json::to_string(&msg).unwrap();
+
+ assert_eq!("{\"id\":3,\"method\":\"shutdown\"}", serialized);
+ }
+
+ #[test]
+ fn serialize_notification_with_null_params() {
+ let msg = Message::Notification(Notification {
+ method: "exit".into(),
+ params: serde_json::Value::Null,
+ });
+ let serialized = serde_json::to_string(&msg).unwrap();
+
+ assert_eq!("{\"method\":\"exit\"}", serialized);
+ }
+}
diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/req_queue.rs b/src/tools/rust-analyzer/lib/lsp-server/src/req_queue.rs
new file mode 100644
index 000000000..1f3d44715
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/lsp-server/src/req_queue.rs
@@ -0,0 +1,62 @@
+use std::collections::HashMap;
+
+use serde::Serialize;
+
+use crate::{ErrorCode, Request, RequestId, Response, ResponseError};
+
+/// Manages the set of pending requests, both incoming and outgoing.
+#[derive(Debug)]
+pub struct ReqQueue<I, O> {
+ pub incoming: Incoming<I>,
+ pub outgoing: Outgoing<O>,
+}
+
+impl<I, O> Default for ReqQueue<I, O> {
+ fn default() -> ReqQueue<I, O> {
+ ReqQueue {
+ incoming: Incoming { pending: HashMap::default() },
+ outgoing: Outgoing { next_id: 0, pending: HashMap::default() },
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct Incoming<I> {
+ pending: HashMap<RequestId, I>,
+}
+
+#[derive(Debug)]
+pub struct Outgoing<O> {
+ next_id: i32,
+ pending: HashMap<RequestId, O>,
+}
+
+impl<I> Incoming<I> {
+ pub fn register(&mut self, id: RequestId, data: I) {
+ self.pending.insert(id, data);
+ }
+ pub fn cancel(&mut self, id: RequestId) -> Option<Response> {
+ let _data = self.complete(id.clone())?;
+ let error = ResponseError {
+ code: ErrorCode::RequestCanceled as i32,
+ message: "canceled by client".to_string(),
+ data: None,
+ };
+ Some(Response { id, result: None, error: Some(error) })
+ }
+ pub fn complete(&mut self, id: RequestId) -> Option<I> {
+ self.pending.remove(&id)
+ }
+}
+
+impl<O> Outgoing<O> {
+ pub fn register<P: Serialize>(&mut self, method: String, params: P, data: O) -> Request {
+ let id = RequestId::from(self.next_id);
+ self.pending.insert(id.clone(), data);
+ self.next_id += 1;
+ Request::new(id, method, params)
+ }
+ pub fn complete(&mut self, id: RequestId) -> Option<O> {
+ self.pending.remove(&id)
+ }
+}
diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/socket.rs b/src/tools/rust-analyzer/lib/lsp-server/src/socket.rs
new file mode 100644
index 000000000..4a59c4c0f
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/lsp-server/src/socket.rs
@@ -0,0 +1,46 @@
+use std::{
+ io::{self, BufReader},
+ net::TcpStream,
+ thread,
+};
+
+use crossbeam_channel::{bounded, Receiver, Sender};
+
+use crate::{
+ stdio::{make_io_threads, IoThreads},
+ Message,
+};
+
+pub(crate) fn socket_transport(
+ stream: TcpStream,
+) -> (Sender<Message>, Receiver<Message>, IoThreads) {
+ let (reader_receiver, reader) = make_reader(stream.try_clone().unwrap());
+ let (writer_sender, writer) = make_write(stream.try_clone().unwrap());
+ let io_threads = make_io_threads(reader, writer);
+ (writer_sender, reader_receiver, io_threads)
+}
+
+fn make_reader(stream: TcpStream) -> (Receiver<Message>, thread::JoinHandle<io::Result<()>>) {
+ let (reader_sender, reader_receiver) = bounded::<Message>(0);
+ let reader = thread::spawn(move || {
+ let mut buf_read = BufReader::new(stream);
+ while let Some(msg) = Message::read(&mut buf_read).unwrap() {
+ let is_exit = matches!(&msg, Message::Notification(n) if n.is_exit());
+ reader_sender.send(msg).unwrap();
+ if is_exit {
+ break;
+ }
+ }
+ Ok(())
+ });
+ (reader_receiver, reader)
+}
+
+fn make_write(mut stream: TcpStream) -> (Sender<Message>, thread::JoinHandle<io::Result<()>>) {
+ let (writer_sender, writer_receiver) = bounded::<Message>(0);
+ let writer = thread::spawn(move || {
+ writer_receiver.into_iter().try_for_each(|it| it.write(&mut stream)).unwrap();
+ Ok(())
+ });
+ (writer_sender, writer)
+}
diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/stdio.rs b/src/tools/rust-analyzer/lib/lsp-server/src/stdio.rs
new file mode 100644
index 000000000..cdee6432d
--- /dev/null
+++ b/src/tools/rust-analyzer/lib/lsp-server/src/stdio.rs
@@ -0,0 +1,71 @@
+use std::{
+ io::{self, stdin, stdout},
+ thread,
+};
+
+use crossbeam_channel::{bounded, Receiver, Sender};
+
+use crate::Message;
+
+/// Creates an LSP connection via stdio.
+pub(crate) fn stdio_transport() -> (Sender<Message>, Receiver<Message>, IoThreads) {
+ let (writer_sender, writer_receiver) = bounded::<Message>(0);
+ let writer = thread::spawn(move || {
+ let stdout = stdout();
+ let mut stdout = stdout.lock();
+ writer_receiver.into_iter().try_for_each(|it| it.write(&mut stdout))?;
+ Ok(())
+ });
+ let (reader_sender, reader_receiver) = bounded::<Message>(0);
+ let reader = thread::spawn(move || {
+ let stdin = stdin();
+ let mut stdin = stdin.lock();
+ while let Some(msg) = Message::read(&mut stdin)? {
+ let is_exit = match &msg {
+ Message::Notification(n) => n.is_exit(),
+ _ => false,
+ };
+
+ reader_sender.send(msg).unwrap();
+
+ if is_exit {
+ break;
+ }
+ }
+ Ok(())
+ });
+ let threads = IoThreads { reader, writer };
+ (writer_sender, reader_receiver, threads)
+}
+
+// Creates an IoThreads
+pub(crate) fn make_io_threads(
+ reader: thread::JoinHandle<io::Result<()>>,
+ writer: thread::JoinHandle<io::Result<()>>,
+) -> IoThreads {
+ IoThreads { reader, writer }
+}
+
+pub struct IoThreads {
+ reader: thread::JoinHandle<io::Result<()>>,
+ writer: thread::JoinHandle<io::Result<()>>,
+}
+
+impl IoThreads {
+ pub fn join(self) -> io::Result<()> {
+ match self.reader.join() {
+ Ok(r) => r?,
+ Err(err) => {
+ println!("reader panicked!");
+ std::panic::panic_any(err)
+ }
+ }
+ match self.writer.join() {
+ Ok(r) => r,
+ Err(err) => {
+ println!("writer panicked!");
+ std::panic::panic_any(err);
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/rustfmt.toml b/src/tools/rust-analyzer/rustfmt.toml
new file mode 100644
index 000000000..71007de81
--- /dev/null
+++ b/src/tools/rust-analyzer/rustfmt.toml
@@ -0,0 +1,2 @@
+reorder_modules = false
+use_small_heuristics = "Max"
diff --git a/src/tools/rust-analyzer/triagebot.toml b/src/tools/rust-analyzer/triagebot.toml
new file mode 100644
index 000000000..fa0824ac5
--- /dev/null
+++ b/src/tools/rust-analyzer/triagebot.toml
@@ -0,0 +1 @@
+[assign]
diff --git a/src/tools/rust-analyzer/xtask/Cargo.toml b/src/tools/rust-analyzer/xtask/Cargo.toml
new file mode 100644
index 000000000..95d44e9b9
--- /dev/null
+++ b/src/tools/rust-analyzer/xtask/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "xtask"
+version = "0.1.0"
+publish = false
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[dependencies]
+anyhow = "1.0.57"
+flate2 = "1.0.24"
+write-json = "0.1.2"
+xshell = "0.2.2"
+xflags = "0.2.4"
+# Avoid adding more dependencies to this crate
diff --git a/src/tools/rust-analyzer/xtask/src/dist.rs b/src/tools/rust-analyzer/xtask/src/dist.rs
new file mode 100644
index 000000000..686aec4ae
--- /dev/null
+++ b/src/tools/rust-analyzer/xtask/src/dist.rs
@@ -0,0 +1,170 @@
+use std::{
+ env,
+ fs::File,
+ io,
+ path::{Path, PathBuf},
+};
+
+use flate2::{write::GzEncoder, Compression};
+use xshell::{cmd, Shell};
+
+use crate::{date_iso, flags, project_root};
+
+const VERSION_STABLE: &str = "0.3";
+const VERSION_NIGHTLY: &str = "0.4";
+const VERSION_DEV: &str = "0.5"; // keep this one in sync with `package.json`
+
+impl flags::Dist {
+ pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
+ let stable = sh.var("GITHUB_REF").unwrap_or_default().as_str() == "refs/heads/release";
+
+ let project_root = project_root();
+ let target = Target::get(&project_root);
+ let dist = project_root.join("dist");
+ sh.remove_path(&dist)?;
+ sh.create_dir(&dist)?;
+
+ if let Some(patch_version) = self.client_patch_version {
+ let version = if stable {
+ format!("{}.{}", VERSION_STABLE, patch_version)
+ } else {
+ // A hack to make VS Code prefer nightly over stable.
+ format!("{}.{}", VERSION_NIGHTLY, patch_version)
+ };
+ dist_server(sh, &format!("{version}-standalone"), &target)?;
+ let release_tag = if stable { date_iso(sh)? } else { "nightly".to_string() };
+ dist_client(sh, &version, &release_tag, &target)?;
+ } else {
+ dist_server(sh, "0.0.0-standalone", &target)?;
+ }
+ Ok(())
+ }
+}
+
+fn dist_client(
+ sh: &Shell,
+ version: &str,
+ release_tag: &str,
+ target: &Target,
+) -> anyhow::Result<()> {
+ let bundle_path = Path::new("editors").join("code").join("server");
+ sh.create_dir(&bundle_path)?;
+ sh.copy_file(&target.server_path, &bundle_path)?;
+ if let Some(symbols_path) = &target.symbols_path {
+ sh.copy_file(symbols_path, &bundle_path)?;
+ }
+
+ let _d = sh.push_dir("./editors/code");
+
+ let mut patch = Patch::new(sh, "./package.json")?;
+ patch
+ .replace(
+ &format!(r#""version": "{}.0-dev""#, VERSION_DEV),
+ &format!(r#""version": "{}""#, version),
+ )
+ .replace(r#""releaseTag": null"#, &format!(r#""releaseTag": "{}""#, release_tag))
+ .replace(r#""$generated-start": {},"#, "")
+ .replace(",\n \"$generated-end\": {}", "")
+ .replace(r#""enabledApiProposals": [],"#, r#""#);
+ patch.commit(sh)?;
+
+ Ok(())
+}
+
+fn dist_server(sh: &Shell, release: &str, target: &Target) -> anyhow::Result<()> {
+ let _e = sh.push_env("CFG_RELEASE", release);
+ let _e = sh.push_env("CARGO_PROFILE_RELEASE_LTO", "thin");
+
+ // Uncomment to enable debug info for releases. Note that:
+ // * debug info is split on windows and macs, so it does nothing for those platforms,
+ // * on Linux, this blows up the binary size from 8MB to 43MB, which is unreasonable.
+ // let _e = sh.push_env("CARGO_PROFILE_RELEASE_DEBUG", "1");
+
+ if target.name.contains("-linux-") {
+ env::set_var("CC", "clang");
+ }
+
+ let target_name = &target.name;
+ cmd!(sh, "cargo build --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --target {target_name} --release").run()?;
+
+ let dst = Path::new("dist").join(&target.artifact_name);
+ gzip(&target.server_path, &dst.with_extension("gz"))?;
+
+ Ok(())
+}
+
+fn gzip(src_path: &Path, dest_path: &Path) -> anyhow::Result<()> {
+ let mut encoder = GzEncoder::new(File::create(dest_path)?, Compression::best());
+ let mut input = io::BufReader::new(File::open(src_path)?);
+ io::copy(&mut input, &mut encoder)?;
+ encoder.finish()?;
+ Ok(())
+}
+
+struct Target {
+ name: String,
+ server_path: PathBuf,
+ symbols_path: Option<PathBuf>,
+ artifact_name: String,
+}
+
+impl Target {
+ fn get(project_root: &Path) -> Self {
+ let name = match env::var("RA_TARGET") {
+ Ok(target) => target,
+ _ => {
+ if cfg!(target_os = "linux") {
+ "x86_64-unknown-linux-gnu".to_string()
+ } else if cfg!(target_os = "windows") {
+ "x86_64-pc-windows-msvc".to_string()
+ } else if cfg!(target_os = "macos") {
+ "x86_64-apple-darwin".to_string()
+ } else {
+ panic!("Unsupported OS, maybe try setting RA_TARGET")
+ }
+ }
+ };
+ let out_path = project_root.join("target").join(&name).join("release");
+ let (exe_suffix, symbols_path) = if name.contains("-windows-") {
+ (".exe".into(), Some(out_path.join("rust_analyzer.pdb")))
+ } else {
+ (String::new(), None)
+ };
+ let server_path = out_path.join(format!("rust-analyzer{}", exe_suffix));
+ let artifact_name = format!("rust-analyzer-{}{}", name, exe_suffix);
+ Self { name, server_path, symbols_path, artifact_name }
+ }
+}
+
+struct Patch {
+ path: PathBuf,
+ original_contents: String,
+ contents: String,
+}
+
+impl Patch {
+ fn new(sh: &Shell, path: impl Into<PathBuf>) -> anyhow::Result<Patch> {
+ let path = path.into();
+ let contents = sh.read_file(&path)?;
+ Ok(Patch { path, original_contents: contents.clone(), contents })
+ }
+
+ fn replace(&mut self, from: &str, to: &str) -> &mut Patch {
+ assert!(self.contents.contains(from));
+ self.contents = self.contents.replace(from, to);
+ self
+ }
+
+ fn commit(&self, sh: &Shell) -> anyhow::Result<()> {
+ sh.write_file(&self.path, &self.contents)?;
+ Ok(())
+ }
+}
+
+impl Drop for Patch {
+ fn drop(&mut self) {
+ // FIXME: find a way to bring this back
+ let _ = &self.original_contents;
+ // write_file(&self.path, &self.original_contents).unwrap();
+ }
+}
diff --git a/src/tools/rust-analyzer/xtask/src/flags.rs b/src/tools/rust-analyzer/xtask/src/flags.rs
new file mode 100644
index 000000000..993c64cce
--- /dev/null
+++ b/src/tools/rust-analyzer/xtask/src/flags.rs
@@ -0,0 +1,148 @@
+#![allow(unreachable_pub)]
+
+use crate::install::{ClientOpt, Malloc, ServerOpt};
+
+xflags::xflags! {
+ src "./src/flags.rs"
+
+ /// Run custom build command.
+ cmd xtask {
+ default cmd help {
+ /// Print help information.
+ optional -h, --help
+ }
+
+ /// Install rust-analyzer server or editor plugin.
+ cmd install {
+ /// Install only VS Code plugin.
+ optional --client
+ /// One of 'code', 'code-exploration', 'code-insiders', 'codium', or 'code-oss'.
+ optional --code-bin name: String
+
+ /// Install only the language server.
+ optional --server
+ /// Use mimalloc allocator for server
+ optional --mimalloc
+ /// Use jemalloc allocator for server
+ optional --jemalloc
+ }
+
+ cmd fuzz-tests {}
+
+ cmd release {
+ optional --dry-run
+ }
+ cmd promote {
+ optional --dry-run
+ }
+ cmd dist {
+ optional --client-patch-version version: String
+ }
+ cmd metrics {
+ optional --dry-run
+ }
+ /// Builds a benchmark version of rust-analyzer and puts it into `./target`.
+ cmd bb
+ required suffix: String
+ {}
+ }
+}
+
+// generated start
+// The following code is generated by `xflags` macro.
+// Run `env UPDATE_XFLAGS=1 cargo build` to regenerate.
+#[derive(Debug)]
+pub struct Xtask {
+ pub subcommand: XtaskCmd,
+}
+
+#[derive(Debug)]
+pub enum XtaskCmd {
+ Help(Help),
+ Install(Install),
+ FuzzTests(FuzzTests),
+ Release(Release),
+ Promote(Promote),
+ Dist(Dist),
+ Metrics(Metrics),
+ Bb(Bb),
+}
+
+#[derive(Debug)]
+pub struct Help {
+ pub help: bool,
+}
+
+#[derive(Debug)]
+pub struct Install {
+ pub client: bool,
+ pub code_bin: Option<String>,
+ pub server: bool,
+ pub mimalloc: bool,
+ pub jemalloc: bool,
+}
+
+#[derive(Debug)]
+pub struct FuzzTests;
+
+#[derive(Debug)]
+pub struct Release {
+ pub dry_run: bool,
+}
+
+#[derive(Debug)]
+pub struct Promote {
+ pub dry_run: bool,
+}
+
+#[derive(Debug)]
+pub struct Dist {
+ pub client_patch_version: Option<String>,
+}
+
+#[derive(Debug)]
+pub struct Metrics {
+ pub dry_run: bool,
+}
+
+#[derive(Debug)]
+pub struct Bb {
+ pub suffix: String,
+}
+
+impl Xtask {
+ pub const HELP: &'static str = Self::HELP_;
+
+ #[allow(dead_code)]
+ pub fn from_env() -> xflags::Result<Self> {
+ Self::from_env_()
+ }
+
+ #[allow(dead_code)]
+ pub fn from_vec(args: Vec<std::ffi::OsString>) -> xflags::Result<Self> {
+ Self::from_vec_(args)
+ }
+}
+// generated end
+
+impl Install {
+ pub(crate) fn server(&self) -> Option<ServerOpt> {
+ if self.client && !self.server {
+ return None;
+ }
+ let malloc = if self.mimalloc {
+ Malloc::Mimalloc
+ } else if self.jemalloc {
+ Malloc::Jemalloc
+ } else {
+ Malloc::System
+ };
+ Some(ServerOpt { malloc })
+ }
+ pub(crate) fn client(&self) -> Option<ClientOpt> {
+ if !self.client && self.server {
+ return None;
+ }
+ Some(ClientOpt { code_bin: self.code_bin.clone() })
+ }
+}
diff --git a/src/tools/rust-analyzer/xtask/src/install.rs b/src/tools/rust-analyzer/xtask/src/install.rs
new file mode 100644
index 000000000..ae978d551
--- /dev/null
+++ b/src/tools/rust-analyzer/xtask/src/install.rs
@@ -0,0 +1,142 @@
+//! Installs rust-analyzer language server and/or editor plugin.
+
+use std::{env, path::PathBuf, str};
+
+use anyhow::{bail, format_err, Context, Result};
+use xshell::{cmd, Shell};
+
+use crate::flags;
+
+impl flags::Install {
+ pub(crate) fn run(self, sh: &Shell) -> Result<()> {
+ if cfg!(target_os = "macos") {
+ fix_path_for_mac(sh).context("Fix path for mac")?;
+ }
+ if let Some(server) = self.server() {
+ install_server(sh, server).context("install server")?;
+ }
+ if let Some(client) = self.client() {
+ install_client(sh, client).context("install client")?;
+ }
+ Ok(())
+ }
+}
+
+#[derive(Clone)]
+pub(crate) struct ClientOpt {
+ pub(crate) code_bin: Option<String>,
+}
+
+const VS_CODES: &[&str] = &["code", "code-exploration", "code-insiders", "codium", "code-oss"];
+
+pub(crate) struct ServerOpt {
+ pub(crate) malloc: Malloc,
+}
+
+pub(crate) enum Malloc {
+ System,
+ Mimalloc,
+ Jemalloc,
+}
+
+fn fix_path_for_mac(sh: &Shell) -> Result<()> {
+ let mut vscode_path: Vec<PathBuf> = {
+ const COMMON_APP_PATH: &str =
+ r"/Applications/Visual Studio Code.app/Contents/Resources/app/bin";
+ const ROOT_DIR: &str = "";
+ let home_dir = sh.var("HOME").map_err(|err| {
+ format_err!("Failed getting HOME from environment with error: {}.", err)
+ })?;
+
+ [ROOT_DIR, &home_dir]
+ .into_iter()
+ .map(|dir| dir.to_string() + COMMON_APP_PATH)
+ .map(PathBuf::from)
+ .filter(|path| path.exists())
+ .collect()
+ };
+
+ if !vscode_path.is_empty() {
+ let vars = sh.var_os("PATH").context("Could not get PATH variable from env.")?;
+
+ let mut paths = env::split_paths(&vars).collect::<Vec<_>>();
+ paths.append(&mut vscode_path);
+ let new_paths = env::join_paths(paths).context("build env PATH")?;
+ sh.set_var("PATH", &new_paths);
+ }
+
+ Ok(())
+}
+
+fn install_client(sh: &Shell, client_opt: ClientOpt) -> Result<()> {
+ let _dir = sh.push_dir("./editors/code");
+
+ // Package extension.
+ if cfg!(unix) {
+ cmd!(sh, "npm --version").run().context("`npm` is required to build the VS Code plugin")?;
+ cmd!(sh, "npm ci").run()?;
+
+ cmd!(sh, "npm run package --scripts-prepend-node-path").run()?;
+ } else {
+ cmd!(sh, "cmd.exe /c npm --version")
+ .run()
+ .context("`npm` is required to build the VS Code plugin")?;
+ cmd!(sh, "cmd.exe /c npm ci").run()?;
+
+ cmd!(sh, "cmd.exe /c npm run package").run()?;
+ };
+
+ // Find the appropriate VS Code binary.
+ let lifetime_extender;
+ let candidates: &[&str] = match client_opt.code_bin.as_deref() {
+ Some(it) => {
+ lifetime_extender = [it];
+ &lifetime_extender[..]
+ }
+ None => VS_CODES,
+ };
+ let code = candidates
+ .iter()
+ .copied()
+ .find(|&bin| {
+ if cfg!(unix) {
+ cmd!(sh, "{bin} --version").read().is_ok()
+ } else {
+ cmd!(sh, "cmd.exe /c {bin}.cmd --version").read().is_ok()
+ }
+ })
+ .ok_or_else(|| {
+ format_err!("Can't execute `{} --version`. Perhaps it is not in $PATH?", candidates[0])
+ })?;
+
+ // Install & verify.
+ let installed_extensions = if cfg!(unix) {
+ cmd!(sh, "{code} --install-extension rust-analyzer.vsix --force").run()?;
+ cmd!(sh, "{code} --list-extensions").read()?
+ } else {
+ cmd!(sh, "cmd.exe /c {code}.cmd --install-extension rust-analyzer.vsix --force").run()?;
+ cmd!(sh, "cmd.exe /c {code}.cmd --list-extensions").read()?
+ };
+
+ if !installed_extensions.contains("rust-analyzer") {
+ bail!(
+ "Could not install the Visual Studio Code extension. \
+ Please make sure you have at least NodeJS 12.x together with the latest version of VS Code installed and try again. \
+ Note that installing via xtask install does not work for VS Code Remote, instead you’ll need to install the .vsix manually."
+ );
+ }
+
+ Ok(())
+}
+
+fn install_server(sh: &Shell, opts: ServerOpt) -> Result<()> {
+ let features = match opts.malloc {
+ Malloc::System => &[][..],
+ Malloc::Mimalloc => &["--features", "mimalloc"],
+ Malloc::Jemalloc => &["--features", "jemalloc"],
+ };
+
+ let cmd = cmd!(sh, "cargo install --path crates/rust-analyzer --locked --force --features force-always-assert {features...}");
+ cmd.run()?;
+ Ok(())
+}
diff --git a/src/tools/rust-analyzer/xtask/src/main.rs b/src/tools/rust-analyzer/xtask/src/main.rs
new file mode 100644
index 000000000..335ac324a
--- /dev/null
+++ b/src/tools/rust-analyzer/xtask/src/main.rs
@@ -0,0 +1,91 @@
+//! See <https://github.com/matklad/cargo-xtask/>.
+//!
+//! This binary defines various auxiliary build commands, which are not
+//! expressible with just `cargo`. Notably, it provides tests via `cargo test -p xtask`
+//! for code generation and `cargo xtask install` for installation of
+//! rust-analyzer server and client.
+//!
+//! This binary is integrated into the `cargo` command line by using an alias in
+//! `.cargo/config`.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod flags;
+
+mod install;
+mod release;
+mod dist;
+mod metrics;
+
+use anyhow::bail;
+use std::{
+ env,
+ path::{Path, PathBuf},
+};
+use xshell::{cmd, Shell};
+
+fn main() -> anyhow::Result<()> {
+ let sh = &Shell::new()?;
+ sh.change_dir(project_root());
+
+ let flags = flags::Xtask::from_env()?;
+ match flags.subcommand {
+ flags::XtaskCmd::Help(_) => {
+ println!("{}", flags::Xtask::HELP);
+ Ok(())
+ }
+ flags::XtaskCmd::Install(cmd) => cmd.run(sh),
+ flags::XtaskCmd::FuzzTests(_) => run_fuzzer(sh),
+ flags::XtaskCmd::Release(cmd) => cmd.run(sh),
+ flags::XtaskCmd::Promote(cmd) => cmd.run(sh),
+ flags::XtaskCmd::Dist(cmd) => cmd.run(sh),
+ flags::XtaskCmd::Metrics(cmd) => cmd.run(sh),
+ flags::XtaskCmd::Bb(cmd) => {
+ {
+ let _d = sh.push_dir("./crates/rust-analyzer");
+ cmd!(sh, "cargo build --release --features jemalloc").run()?;
+ }
+ sh.copy_file(
+ "./target/release/rust-analyzer",
+ format!("./target/rust-analyzer-{}", cmd.suffix),
+ )?;
+ Ok(())
+ }
+ }
+}
+
+fn project_root() -> PathBuf {
+ Path::new(
+ &env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| env!("CARGO_MANIFEST_DIR").to_owned()),
+ )
+ .ancestors()
+ .nth(1)
+ .unwrap()
+ .to_path_buf()
+}
+
+fn run_fuzzer(sh: &Shell) -> anyhow::Result<()> {
+ let _d = sh.push_dir("./crates/syntax");
+ let _e = sh.push_env("RUSTUP_TOOLCHAIN", "nightly");
+ if cmd!(sh, "cargo fuzz --help").read().is_err() {
+ cmd!(sh, "cargo install cargo-fuzz").run()?;
+ };
+
+ // Expecting nightly rustc
+ let out = cmd!(sh, "rustc --version").read()?;
+ if !out.contains("nightly") {
+ bail!("fuzz tests require nightly rustc")
+ }
+
+ cmd!(sh, "cargo fuzz run parser").run()?;
+ Ok(())
+}
+
+fn date_iso(sh: &Shell) -> anyhow::Result<String> {
+ let res = cmd!(sh, "date -u +%Y-%m-%d").read()?;
+ Ok(res)
+}
+
+fn is_release_tag(tag: &str) -> bool {
+ tag.len() == "2020-02-24".len() && tag.starts_with(|c: char| c.is_ascii_digit())
+}
diff --git a/src/tools/rust-analyzer/xtask/src/metrics.rs b/src/tools/rust-analyzer/xtask/src/metrics.rs
new file mode 100644
index 000000000..ebeb87346
--- /dev/null
+++ b/src/tools/rust-analyzer/xtask/src/metrics.rs
@@ -0,0 +1,200 @@
+use std::{
+ collections::BTreeMap,
+ env, fs,
+ io::Write as _,
+ path::Path,
+ time::{Instant, SystemTime, UNIX_EPOCH},
+};
+
+use anyhow::{bail, format_err};
+use xshell::{cmd, Shell};
+
+use crate::flags;
+
+type Unit = String;
+
+impl flags::Metrics {
+ pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
+ let mut metrics = Metrics::new(sh)?;
+ if !self.dry_run {
+ sh.remove_path("./target/release")?;
+ }
+ if !Path::new("./target/rustc-perf").exists() {
+ sh.create_dir("./target/rustc-perf")?;
+ cmd!(sh, "git clone https://github.com/rust-lang/rustc-perf.git ./target/rustc-perf")
+ .run()?;
+ }
+ {
+ let _d = sh.push_dir("./target/rustc-perf");
+ let revision = &metrics.perf_revision;
+ cmd!(sh, "git reset --hard {revision}").run()?;
+ }
+
+ let _env = sh.push_env("RA_METRICS", "1");
+
+ {
+ // https://github.com/rust-lang/rust-analyzer/issues/9997
+ let _d = sh.push_dir("target/rustc-perf/collector/benchmarks/webrender");
+ cmd!(sh, "cargo update -p url --precise 1.6.1").run()?;
+ }
+ metrics.measure_build(sh)?;
+ metrics.measure_analysis_stats_self(sh)?;
+ metrics.measure_analysis_stats(sh, "ripgrep")?;
+ metrics.measure_analysis_stats(sh, "webrender")?;
+ metrics.measure_analysis_stats(sh, "diesel/diesel")?;
+
+ if !self.dry_run {
+ let _d = sh.push_dir("target");
+ let metrics_token = env::var("METRICS_TOKEN").unwrap();
+ cmd!(
+ sh,
+ "git clone --depth 1 https://{metrics_token}@github.com/rust-analyzer/metrics.git"
+ )
+ .run()?;
+
+ {
+ let mut file =
+ fs::File::options().append(true).open("target/metrics/metrics.json")?;
+ writeln!(file, "{}", metrics.json())?;
+ }
+
+ let _d = sh.push_dir("metrics");
+ cmd!(sh, "git add .").run()?;
+ cmd!(sh, "git -c user.name=Bot -c user.email=dummy@example.com commit --message 📈")
+ .run()?;
+ cmd!(sh, "git push origin master").run()?;
+ }
+ eprintln!("{metrics:#?}");
+ Ok(())
+ }
+}
+
+impl Metrics {
+ fn measure_build(&mut self, sh: &Shell) -> anyhow::Result<()> {
+ eprintln!("\nMeasuring build");
+ cmd!(sh, "cargo fetch").run()?;
+
+ let time = Instant::now();
+ cmd!(sh, "cargo build --release --package rust-analyzer --bin rust-analyzer").run()?;
+ let time = time.elapsed();
+ self.report("build", time.as_millis() as u64, "ms".into());
+ Ok(())
+ }
+ fn measure_analysis_stats_self(&mut self, sh: &Shell) -> anyhow::Result<()> {
+ self.measure_analysis_stats_path(sh, "self", ".")
+ }
+ fn measure_analysis_stats(&mut self, sh: &Shell, bench: &str) -> anyhow::Result<()> {
+ self.measure_analysis_stats_path(
+ sh,
+ bench,
+ &format!("./target/rustc-perf/collector/benchmarks/{}", bench),
+ )
+ }
+ fn measure_analysis_stats_path(
+ &mut self,
+ sh: &Shell,
+ name: &str,
+ path: &str,
+ ) -> anyhow::Result<()> {
+ eprintln!("\nMeasuring analysis-stats/{name}");
+ let output =
+ cmd!(sh, "./target/release/rust-analyzer -q analysis-stats --memory-usage {path}")
+ .read()?;
+ for (metric, value, unit) in parse_metrics(&output) {
+ self.report(&format!("analysis-stats/{name}/{metric}"), value, unit.into());
+ }
+ Ok(())
+ }
+}
+
+fn parse_metrics(output: &str) -> Vec<(&str, u64, &str)> {
+ output
+ .lines()
+ .filter_map(|it| {
+ let entry = it.split(':').collect::<Vec<_>>();
+ match entry.as_slice() {
+ ["METRIC", name, value, unit] => Some((*name, value.parse().unwrap(), *unit)),
+ _ => None,
+ }
+ })
+ .collect()
+}
+
+#[derive(Debug)]
+struct Metrics {
+ host: Host,
+ timestamp: SystemTime,
+ revision: String,
+ perf_revision: String,
+ metrics: BTreeMap<String, (u64, Unit)>,
+}
+
+#[derive(Debug)]
+struct Host {
+ os: String,
+ cpu: String,
+ mem: String,
+}
+
+impl Metrics {
+ fn new(sh: &Shell) -> anyhow::Result<Metrics> {
+ let host = Host::new(sh)?;
+ let timestamp = SystemTime::now();
+ let revision = cmd!(sh, "git rev-parse HEAD").read()?;
+ let perf_revision = "c52ee623e231e7690a93be88d943016968c1036b".into();
+ Ok(Metrics { host, timestamp, revision, perf_revision, metrics: BTreeMap::new() })
+ }
+
+ fn report(&mut self, name: &str, value: u64, unit: Unit) {
+ self.metrics.insert(name.into(), (value, unit));
+ }
+
+ fn json(&self) -> String {
+ let mut buf = String::new();
+ self.to_json(write_json::object(&mut buf));
+ buf
+ }
+
+ fn to_json(&self, mut obj: write_json::Object<'_>) {
+ self.host.to_json(obj.object("host"));
+ let timestamp = self.timestamp.duration_since(UNIX_EPOCH).unwrap();
+ obj.number("timestamp", timestamp.as_secs() as f64);
+ obj.string("revision", &self.revision);
+ obj.string("perf_revision", &self.perf_revision);
+ let mut metrics = obj.object("metrics");
+ for (k, (value, unit)) in &self.metrics {
+ metrics.array(k).number(*value as f64).string(unit);
+ }
+ }
+}
+
+impl Host {
+ fn new(sh: &Shell) -> anyhow::Result<Host> {
+ if cfg!(not(target_os = "linux")) {
+ bail!("can only collect metrics on Linux ");
+ }
+
+ let os = read_field(sh, "/etc/os-release", "PRETTY_NAME=")?.trim_matches('"').to_string();
+
+ let cpu = read_field(sh, "/proc/cpuinfo", "model name")?
+ .trim_start_matches(':')
+ .trim()
+ .to_string();
+
+ let mem = read_field(sh, "/proc/meminfo", "MemTotal:")?;
+
+ return Ok(Host { os, cpu, mem });
+
+ fn read_field(sh: &Shell, path: &str, field: &str) -> anyhow::Result<String> {
+ let text = sh.read_file(path)?;
+
+ text.lines()
+ .find_map(|it| it.strip_prefix(field))
+ .map(|it| it.trim().to_string())
+ .ok_or_else(|| format_err!("can't parse {}", path))
+ }
+ }
+ fn to_json(&self, mut obj: write_json::Object<'_>) {
+ obj.string("os", &self.os).string("cpu", &self.cpu).string("mem", &self.mem);
+ }
+}
diff --git a/src/tools/rust-analyzer/xtask/src/release.rs b/src/tools/rust-analyzer/xtask/src/release.rs
new file mode 100644
index 000000000..17ada5156
--- /dev/null
+++ b/src/tools/rust-analyzer/xtask/src/release.rs
@@ -0,0 +1,96 @@
+mod changelog;
+
+use xshell::{cmd, Shell};
+
+use crate::{date_iso, flags, is_release_tag, project_root};
+
+impl flags::Release {
+ pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
+ if !self.dry_run {
+ cmd!(sh, "git switch release").run()?;
+ cmd!(sh, "git fetch upstream --tags --force").run()?;
+ cmd!(sh, "git reset --hard tags/nightly").run()?;
+ // The `release` branch sometimes has a couple of cherry-picked
+ // commits for patch releases. If that's the case, just overwrite
+ // it. As we are setting `release` branch to an up-to-date `nightly`
+ // tag, this shouldn't be problematic in general.
+ //
+ // Note that, as we tag releases, we don't worry about "losing"
+ // commits -- they'll be kept alive by the tag. More generally, we
+ // don't care about historic releases all that much, it's fine even
+ // to delete old tags.
+ cmd!(sh, "git push --force").run()?;
+ }
+
+ // Generates bits of manual.adoc.
+ cmd!(sh, "cargo test -p ide-assists -p ide-diagnostics -p rust-analyzer -- sourcegen_")
+ .run()?;
+
+ let website_root = project_root().join("../rust-analyzer.github.io");
+ {
+ let _dir = sh.push_dir(&website_root);
+ cmd!(sh, "git switch src").run()?;
+ cmd!(sh, "git pull").run()?;
+ }
+ let changelog_dir = website_root.join("./thisweek/_posts");
+
+ let today = date_iso(sh)?;
+ let commit = cmd!(sh, "git rev-parse HEAD").read()?;
+ let changelog_n = sh
+ .read_dir(changelog_dir.as_path())?
+ .into_iter()
+ .filter_map(|p| p.file_stem().map(|s| s.to_string_lossy().to_string()))
+ .filter_map(|s| s.splitn(5, '-').last().map(|n| n.replace('-', ".")))
+ .filter_map(|s| s.parse::<f32>().ok())
+ .map(|n| 1 + n.floor() as usize)
+ .max()
+ .unwrap_or_default();
+
+ for adoc in [
+ "manual.adoc",
+ "generated_assists.adoc",
+ "generated_config.adoc",
+ "generated_diagnostic.adoc",
+ "generated_features.adoc",
+ ] {
+ let src = project_root().join("./docs/user/").join(adoc);
+ let dst = website_root.join(adoc);
+
+ let contents = sh.read_file(src)?;
+ sh.write_file(dst, contents)?;
+ }
+
+ let tags = cmd!(sh, "git tag --list").read()?;
+ let prev_tag = tags.lines().filter(|line| is_release_tag(line)).last().unwrap();
+
+ let contents = changelog::get_changelog(sh, changelog_n, &commit, prev_tag, &today)?;
+ let path = changelog_dir.join(format!("{}-changelog-{}.adoc", today, changelog_n));
+ sh.write_file(&path, &contents)?;
+
+ Ok(())
+ }
+}
+
+impl flags::Promote {
+ pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
+ let _dir = sh.push_dir("../rust-rust-analyzer");
+ cmd!(sh, "git switch master").run()?;
+ cmd!(sh, "git fetch upstream").run()?;
+ cmd!(sh, "git reset --hard upstream/master").run()?;
+
+ let date = date_iso(sh)?;
+ let branch = format!("rust-analyzer-{date}");
+ cmd!(sh, "git switch -c {branch}").run()?;
+ cmd!(sh, "git subtree pull -P src/tools/rust-analyzer rust-analyzer master").run()?;
+
+ if !self.dry_run {
+ cmd!(sh, "git push -u origin {branch}").run()?;
+ cmd!(
+ sh,
+ "xdg-open https://github.com/matklad/rust/pull/new/{branch}?body=r%3F%20%40ghost"
+ )
+ .run()?;
+ }
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/xtask/src/release/changelog.rs b/src/tools/rust-analyzer/xtask/src/release/changelog.rs
new file mode 100644
index 000000000..2647f7794
--- /dev/null
+++ b/src/tools/rust-analyzer/xtask/src/release/changelog.rs
@@ -0,0 +1,171 @@
+use std::fmt::Write;
+use std::{env, iter};
+
+use anyhow::bail;
+use xshell::{cmd, Shell};
+
+pub(crate) fn get_changelog(
+ sh: &Shell,
+ changelog_n: usize,
+ commit: &str,
+ prev_tag: &str,
+ today: &str,
+) -> anyhow::Result<String> {
+ let token = match env::var("GITHUB_TOKEN") {
+ Ok(token) => token,
+ Err(_) => bail!("Please obtain a personal access token from https://github.com/settings/tokens and set the `GITHUB_TOKEN` environment variable."),
+ };
+
+ let git_log = cmd!(sh, "git log {prev_tag}..HEAD --reverse").read()?;
+ let mut features = String::new();
+ let mut fixes = String::new();
+ let mut internal = String::new();
+ let mut others = String::new();
+ for line in git_log.lines() {
+ let line = line.trim_start();
+ if let Some(pr_num) = parse_pr_number(&line) {
+ let accept = "Accept: application/vnd.github.v3+json";
+ let authorization = format!("Authorization: token {}", token);
+ let pr_url = "https://api.github.com/repos/rust-lang/rust-analyzer/issues";
+
+ // we don't use an HTTPS client or JSON parser to keep the build times low
+ let pr = pr_num.to_string();
+ let pr_json =
+ cmd!(sh, "curl -s -H {accept} -H {authorization} {pr_url}/{pr}").read()?;
+ let pr_title = cmd!(sh, "jq .title").stdin(&pr_json).read()?;
+ let pr_title = unescape(&pr_title[1..pr_title.len() - 1]);
+ let pr_comment = cmd!(sh, "jq .body").stdin(pr_json).read()?;
+
+ let comments_json =
+ cmd!(sh, "curl -s -H {accept} -H {authorization} {pr_url}/{pr}/comments").read()?;
+ let pr_comments = cmd!(sh, "jq .[].body").stdin(comments_json).read()?;
+
+ let l = iter::once(pr_comment.as_str())
+ .chain(pr_comments.lines())
+ .rev()
+ .find_map(|it| {
+ let it = unescape(&it[1..it.len() - 1]);
+ it.lines().find_map(parse_changelog_line)
+ })
+ .into_iter()
+ .next()
+ .unwrap_or_else(|| parse_title_line(&pr_title));
+ let s = match l.kind {
+ PrKind::Feature => &mut features,
+ PrKind::Fix => &mut fixes,
+ PrKind::Internal => &mut internal,
+ PrKind::Other => &mut others,
+ PrKind::Skip => continue,
+ };
+ writeln!(s, "* pr:{}[] {}", pr_num, l.message.as_deref().unwrap_or(&pr_title)).unwrap();
+ }
+ }
+
+ let contents = format!(
+ "\
+= Changelog #{}
+:sectanchors:
+:page-layout: post
+
+Commit: commit:{}[] +
+Release: release:{}[]
+
+== New Features
+
+{}
+
+== Fixes
+
+{}
+
+== Internal Improvements
+
+{}
+
+== Others
+
+{}
+",
+ changelog_n, commit, today, features, fixes, internal, others
+ );
+ Ok(contents)
+}
+
+#[derive(Clone, Copy)]
+enum PrKind {
+ Feature,
+ Fix,
+ Internal,
+ Other,
+ Skip,
+}
+
+struct PrInfo {
+ message: Option<String>,
+ kind: PrKind,
+}
+
+fn unescape(s: &str) -> String {
+ s.replace(r#"\""#, "").replace(r#"\n"#, "\n").replace(r#"\r"#, "")
+}
+
+fn parse_pr_number(s: &str) -> Option<u32> {
+ const BORS_PREFIX: &str = "Merge #";
+ const HOMU_PREFIX: &str = "Auto merge of #";
+ if s.starts_with(BORS_PREFIX) {
+ let s = &s[BORS_PREFIX.len()..];
+ s.parse().ok()
+ } else if s.starts_with(HOMU_PREFIX) {
+ let s = &s[HOMU_PREFIX.len()..];
+ if let Some(space) = s.find(' ') {
+ s[..space].parse().ok()
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+}
+
+fn parse_changelog_line(s: &str) -> Option<PrInfo> {
+ let parts = s.splitn(3, ' ').collect::<Vec<_>>();
+ if parts.len() < 2 || parts[0] != "changelog" {
+ return None;
+ }
+ let message = parts.get(2).map(|it| it.to_string());
+ let kind = match parts[1].trim_end_matches(':') {
+ "feature" => PrKind::Feature,
+ "fix" => PrKind::Fix,
+ "internal" => PrKind::Internal,
+ "skip" => PrKind::Skip,
+ _ => {
+ let kind = PrKind::Other;
+ let message = format!("{} {}", parts[1], message.unwrap_or_default());
+ return Some(PrInfo { kind, message: Some(message) });
+ }
+ };
+ let res = PrInfo { message, kind };
+ Some(res)
+}
+
+fn parse_title_line(s: &str) -> PrInfo {
+ let lower = s.to_ascii_lowercase();
+ const PREFIXES: [(&str, PrKind); 5] = [
+ ("feat: ", PrKind::Feature),
+ ("feature: ", PrKind::Feature),
+ ("fix: ", PrKind::Fix),
+ ("internal: ", PrKind::Internal),
+ ("minor: ", PrKind::Skip),
+ ];
+
+ for &(prefix, kind) in &PREFIXES {
+ if lower.starts_with(prefix) {
+ let message = match &kind {
+ PrKind::Skip => None,
+ _ => Some(s[prefix.len()..].to_string()),
+ };
+ return PrInfo { message, kind };
+ }
+ }
+ PrInfo { kind: PrKind::Other, message: Some(s.to_string()) }
+}
diff --git a/src/tools/rust-demangler/Cargo.toml b/src/tools/rust-demangler/Cargo.toml
new file mode 100644
index 000000000..2bb73b326
--- /dev/null
+++ b/src/tools/rust-demangler/Cargo.toml
@@ -0,0 +1,16 @@
+[package]
+name = "rust-demangler"
+version = "0.0.1"
+edition = "2021"
+
+[dependencies]
+regex = "1.0"
+rustc-demangle = "0.1.17"
+
+[lib]
+name = "rust_demangler"
+doctest = false
+
+[[bin]]
+name = "rust-demangler"
+test = false
diff --git a/src/tools/rust-demangler/README.md b/src/tools/rust-demangler/README.md
new file mode 100644
index 000000000..4e8a689a1
--- /dev/null
+++ b/src/tools/rust-demangler/README.md
@@ -0,0 +1,36 @@
+# rust-demangler
+
+_Demangles rustc mangled names._
+
+`rust-demangler` supports the requirements of the [`llvm-cov show -Xdemangler`
+option](https://llvm.org/docs/CommandGuide/llvm-cov.html#cmdoption-llvm-cov-show-xdemangler),
+to perform Rust-specific symbol demangling:
+
+> _The demangler is expected to read a newline-separated list of symbols from
+> stdin and write a newline-separated list of the same length to stdout._
+
+To use `rust-demangler` with `llvm-cov` for example:
+
+```shell
+$ TARGET="${PWD}/build/x86_64-unknown-linux-gnu"
+$ "${TARGET}"/llvm/bin/llvm-cov show \
+ --Xdemangler=path/to/rust-demangler \
+ --instr-profile=main.profdata ./main --show-line-counts-or-regions
+```
+
+`rust-demangler` is a Rust "extended tool", used in Rust compiler tests, and
+optionally included in Rust distributions that enable coverage profiling. Symbol
+demangling is implemented using the
+[rustc-demangle](https://crates.io/crates/rustc-demangle) crate.
+
+_(Note, for Rust developers, the third-party tool
+[`rustfilt`](https://crates.io/crates/rustfilt) also supports `llvm-cov` symbol
+demangling. `rustfilt` is a more generalized tool that searches any body of
+text, using pattern matching, to find and demangle Rust symbols.)_
+
+## License
+
+Rust-demangler is distributed under the terms of both the MIT license and the
+Apache License (Version 2.0).
+
+See [LICENSE-APACHE](/LICENSE-APACHE) and [LICENSE-MIT](/LICENSE-MIT) for details.
diff --git a/src/tools/rust-demangler/src/lib.rs b/src/tools/rust-demangler/src/lib.rs
new file mode 100644
index 000000000..1d972229d
--- /dev/null
+++ b/src/tools/rust-demangler/src/lib.rs
@@ -0,0 +1,21 @@
+use regex::Regex;
+use rustc_demangle::demangle;
+use std::str::Lines;
+
+const REPLACE_COLONS: &str = "::";
+
+pub fn create_disambiguator_re() -> Regex {
+ Regex::new(r"\[[a-f0-9]{5,16}\]::").unwrap()
+}
+
+pub fn demangle_lines(lines: Lines<'_>, strip_crate_disambiguators: Option<Regex>) -> Vec<String> {
+ let mut demangled_lines = Vec::new();
+ for mangled in lines {
+ let mut demangled = demangle(mangled).to_string();
+ if let Some(re) = &strip_crate_disambiguators {
+ demangled = re.replace_all(&demangled, REPLACE_COLONS).to_string();
+ }
+ demangled_lines.push(demangled);
+ }
+ demangled_lines
+}
diff --git a/src/tools/rust-demangler/src/main.rs b/src/tools/rust-demangler/src/main.rs
new file mode 100644
index 000000000..1b5ef5d24
--- /dev/null
+++ b/src/tools/rust-demangler/src/main.rs
@@ -0,0 +1,97 @@
+//! Demangles rustc mangled names.
+//!
+//! Note regarding crate disambiguators:
+//!
+//! Some demangled symbol paths can include "crate disambiguator" suffixes, represented as a large
+//! hexadecimal value enclosed in square braces, and appended to the name of the crate. a suffix to the
+//! original crate name. For example, the `core` crate, here, includes a disambiguator:
+//!
+//! ```rust
+//! <generics::Firework<f64> as core[a7a74cee373f048]::ops::drop::Drop>::drop
+//! ```
+//!
+//! These disambiguators are known to vary depending on environmental circumstances. As a result,
+//! tests that compare results including demangled names can fail across development environments,
+//! particularly with cross-platform testing. Also, the resulting crate paths are not syntactically
+//! valid, and don't match the original source symbol paths, which can impact development tools.
+//!
+//! For these reasons, by default, `rust-demangler` uses a heuristic to remove crate disambiguators
+//! from their original demangled representation before printing them to standard output. If crate
+//! disambiguators are required, add the `-d` (or `--disambiguators`) flag, and the disambiguators
+//! will not be removed.
+//!
+//! Also note that the disambiguators are stripped by a Regex pattern that is tolerant to some
+//! variation in the number of hexadecimal digits. The disambiguators come from a hash value, which
+//! typically generates a 16-digit hex representation on a 64-bit architecture; however, leading
+//! zeros are not included, which can shorten the hex digit length, and a different hash algorithm
+//! that might also be dependent on the architecture, might shorten the length even further. A
+//! minimum length of 5 digits is assumed, which should be more than sufficient to support hex
+//! representations that generate only 8-digits of precision with an extremely rare (but not
+//! impossible) result with up to 3 leading zeros.
+//!
+//! Using a minimum number of digits less than 5 risks the possibility of stripping demangled name
+//! components with a similar pattern. For example, some closures instantiated multiple times
+//! include their own disambiguators, demangled as non-hashed zero-based indexes in square brackets.
+//! These disambiguators seem to have more analytical value (for instance, in coverage analysis), so
+//! they are not removed.
+
+use rust_demangler::*;
+use std::io::{self, Read, Write};
+
+fn main() -> io::Result<()> {
+ // FIXME(richkadel): In Issue #77615 discussed updating the `rustc-demangle` library, to provide
+ // an option to generate demangled names without including crate disambiguators. If that
+ // happens, update this tool to use that option (if the `-d` flag is not set) instead stripping
+ // them via the Regex heuristic. The update the doc comments and help.
+
+ // Strip hashed hexadecimal crate disambiguators. Leading zeros are not enforced, and can be
+ // different across different platform/architecture types, so while 16 hex digits are common,
+ // they can also be shorter.
+ //
+ // Also note that a demangled symbol path may include the `[<digits>]` pattern, with zero-based
+ // indexes (such as for closures, and possibly for types defined in anonymous scopes). Preferably
+ // these should not be stripped.
+ //
+ // The minimum length of 5 digits supports the possibility that some target architecture (maybe
+ // a 32-bit or smaller architecture) could generate a hash value with a maximum of 8 digits,
+ // and more than three leading zeros should be extremely unlikely. Conversely, it should be
+ // sufficient to assume the zero-based indexes for closures and anonymous scopes will never
+ // exceed the value 9999.
+ let mut strip_crate_disambiguators = Some(create_disambiguator_re());
+
+ let mut args = std::env::args();
+ let progname = args.next().unwrap();
+ for arg in args {
+ if arg == "--disambiguators" || arg == "-d" {
+ strip_crate_disambiguators = None;
+ } else {
+ eprintln!();
+ eprintln!("Usage: {} [-d|--disambiguators]", progname);
+ eprintln!();
+ eprintln!(
+ "This tool converts a list of Rust mangled symbols (one per line) into a\n\
+ corresponding list of demangled symbols."
+ );
+ eprintln!();
+ eprintln!(
+ "With -d (--disambiguators), Rust symbols mangled with the v0 symbol mangler may\n\
+ include crate disambiguators (a hexadecimal hash value, typically up to 16 digits\n\
+ long, enclosed in square brackets)."
+ );
+ eprintln!();
+ eprintln!(
+ "By default, crate disambiguators are removed, using a heuristics-based regular\n\
+ expression. (See the `rust-demangler` doc comments for more information.)"
+ );
+ eprintln!();
+ std::process::exit(1)
+ }
+ }
+
+ let mut buffer = String::new();
+ io::stdin().read_to_string(&mut buffer)?;
+ let mut demangled_lines = demangle_lines(buffer.lines(), strip_crate_disambiguators);
+ demangled_lines.push("".to_string()); // ensure a trailing newline
+ io::stdout().write_all(demangled_lines.join("\n").as_bytes())?;
+ Ok(())
+}
diff --git a/src/tools/rust-demangler/tests/lib.rs b/src/tools/rust-demangler/tests/lib.rs
new file mode 100644
index 000000000..85019df78
--- /dev/null
+++ b/src/tools/rust-demangler/tests/lib.rs
@@ -0,0 +1,84 @@
+use rust_demangler::*;
+
+const MANGLED_INPUT: &str = r"
+_RNvC6_123foo3bar
+_RNqCs4fqI2P2rA04_11utf8_identsu30____7hkackfecea1cbdathfdh9hlq6y
+_RNCNCNgCs6DXkGYLi8lr_2cc5spawn00B5_
+_RNCINkXs25_NgCsbmNqQUJIY6D_4core5sliceINyB9_4IterhENuNgNoBb_4iter8iterator8Iterator9rpositionNCNgNpB9_6memchr7memrchrs_0E0Bb_
+_RINbNbCskIICzLVDPPb_5alloc5alloc8box_freeDINbNiB4_5boxed5FnBoxuEp6OutputuEL_ECs1iopQbuBiw2_3std
+INtC8arrayvec8ArrayVechKj7b_E
+_RMCs4fqI2P2rA04_13const_genericINtB0_8UnsignedKhb_E
+_RMCs4fqI2P2rA04_13const_genericINtB0_6SignedKs98_E
+_RMCs4fqI2P2rA04_13const_genericINtB0_6SignedKanb_E
+_RMCs4fqI2P2rA04_13const_genericINtB0_4BoolKb0_E
+_RMCs4fqI2P2rA04_13const_genericINtB0_4BoolKb1_E
+_RMCs4fqI2P2rA04_13const_genericINtB0_4CharKc76_E
+_RMCs4fqI2P2rA04_13const_genericINtB0_4CharKca_E
+_RMCs4fqI2P2rA04_13const_genericINtB0_4CharKc2202_E
+_RNvNvMCs4fqI2P2rA04_13const_genericINtB4_3FooKpE3foo3FOO
+_RC3foo.llvm.9D1C9369
+_RC3foo.llvm.9D1C9369@@16
+_RNvC9backtrace3foo.llvm.A5310EB9
+_RNvNtNtNtNtCs92dm3009vxr_4rand4rngs7adapter9reseeding4fork23FORK_HANDLER_REGISTERED.0.0
+";
+
+const DEMANGLED_OUTPUT: &str = r"
+123foo[0]::bar
+utf8_idents[317d481089b8c8fe]::საჭმელად_გემრიელი_სადილი
+cc[4d6468d6c9fd4bb3]::spawn::{closure#0}::{closure#0}
+<core[846817f741e54dfd]::slice::Iter<u8> as core[846817f741e54dfd]::iter::iterator::Iterator>::rposition::<core[846817f741e54dfd]::slice::memchr::memrchr::{closure#1}>::{closure#0}
+alloc[f15a878b47eb696b]::alloc::box_free::<dyn alloc[f15a878b47eb696b]::boxed::FnBox<(), Output = ()>>
+INtC8arrayvec8ArrayVechKj7b_E
+<const_generic[317d481089b8c8fe]::Unsigned<11u8>>
+<const_generic[317d481089b8c8fe]::Signed<152i16>>
+<const_generic[317d481089b8c8fe]::Signed<-11i8>>
+<const_generic[317d481089b8c8fe]::Bool<false>>
+<const_generic[317d481089b8c8fe]::Bool<true>>
+<const_generic[317d481089b8c8fe]::Char<'v'>>
+<const_generic[317d481089b8c8fe]::Char<'\n'>>
+<const_generic[317d481089b8c8fe]::Char<'∂'>>
+<const_generic[317d481089b8c8fe]::Foo<_>>::foo::FOO
+foo[0]
+foo[0]
+backtrace[0]::foo
+rand[693ea8e72247470f]::rngs::adapter::reseeding::fork::FORK_HANDLER_REGISTERED.0.0
+";
+
+const DEMANGLED_OUTPUT_NO_CRATE_DISAMBIGUATORS: &str = r"
+123foo[0]::bar
+utf8_idents::საჭმელად_გემრიელი_სადილი
+cc::spawn::{closure#0}::{closure#0}
+<core::slice::Iter<u8> as core::iter::iterator::Iterator>::rposition::<core::slice::memchr::memrchr::{closure#1}>::{closure#0}
+alloc::alloc::box_free::<dyn alloc::boxed::FnBox<(), Output = ()>>
+INtC8arrayvec8ArrayVechKj7b_E
+<const_generic::Unsigned<11u8>>
+<const_generic::Signed<152i16>>
+<const_generic::Signed<-11i8>>
+<const_generic::Bool<false>>
+<const_generic::Bool<true>>
+<const_generic::Char<'v'>>
+<const_generic::Char<'\n'>>
+<const_generic::Char<'∂'>>
+<const_generic::Foo<_>>::foo::FOO
+foo[0]
+foo[0]
+backtrace[0]::foo
+rand::rngs::adapter::reseeding::fork::FORK_HANDLER_REGISTERED.0.0
+";
+
+#[test]
+fn test_demangle_lines() {
+ let demangled_lines = demangle_lines(MANGLED_INPUT.lines(), None);
+ for (expected, actual) in DEMANGLED_OUTPUT.lines().zip(demangled_lines) {
+ assert_eq!(expected, actual);
+ }
+}
+
+#[test]
+fn test_demangle_lines_no_crate_disambiguators() {
+ let demangled_lines = demangle_lines(MANGLED_INPUT.lines(), Some(create_disambiguator_re()));
+ for (expected, actual) in DEMANGLED_OUTPUT_NO_CRATE_DISAMBIGUATORS.lines().zip(demangled_lines)
+ {
+ assert_eq!(expected, actual);
+ }
+}
diff --git a/src/tools/rust-installer/.github/workflows/ci.yml b/src/tools/rust-installer/.github/workflows/ci.yml
new file mode 100644
index 000000000..57a5cb76e
--- /dev/null
+++ b/src/tools/rust-installer/.github/workflows/ci.yml
@@ -0,0 +1,23 @@
+---
+
+name: CI
+on: [push, pull_request]
+
+jobs:
+ test:
+ name: Test
+ runs-on: ubuntu-latest
+ env:
+ LZMA_API_STATIC: 1
+ steps:
+ - name: Checkout the source code
+ uses: actions/checkout@v2
+
+ - name: Install Rust stable
+ run: rustup toolchain update stable && rustup default stable
+
+ - name: Build the tool
+ run: cargo build
+
+ - name: Execute the test suite
+ run: ./test.sh
diff --git a/src/tools/rust-installer/Cargo.toml b/src/tools/rust-installer/Cargo.toml
new file mode 100644
index 000000000..38b81a1ba
--- /dev/null
+++ b/src/tools/rust-installer/Cargo.toml
@@ -0,0 +1,28 @@
+[package]
+authors = ["The Rust Project Developers"]
+name = "installer"
+version = "0.0.0"
+edition = "2018"
+
+[[bin]]
+doc = false
+name = "rust-installer"
+path = "src/main.rs"
+
+[dependencies]
+anyhow = "1.0.19"
+flate2 = "1.0.1"
+rayon = "1.0"
+tar = "0.4.13"
+walkdir = "2"
+xz2 = "0.1.4"
+num_cpus = "1"
+remove_dir_all = "0.5"
+
+[dependencies.clap]
+features = ["derive"]
+version = "3.1"
+
+[target."cfg(windows)".dependencies]
+lazy_static = "1"
+winapi = { version = "0.3", features = ["errhandlingapi", "handleapi", "ioapiset", "winerror", "winioctl", "winnt"] }
diff --git a/src/tools/rust-installer/LICENSE-APACHE b/src/tools/rust-installer/LICENSE-APACHE
new file mode 100644
index 000000000..16fe87b06
--- /dev/null
+++ b/src/tools/rust-installer/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/src/tools/rust-installer/LICENSE-MIT b/src/tools/rust-installer/LICENSE-MIT
new file mode 100644
index 000000000..e69282e38
--- /dev/null
+++ b/src/tools/rust-installer/LICENSE-MIT
@@ -0,0 +1,25 @@
+Copyright (c) 2015 The Rust Project Developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/src/tools/rust-installer/README.md b/src/tools/rust-installer/README.md
new file mode 100644
index 000000000..99d8e5ca4
--- /dev/null
+++ b/src/tools/rust-installer/README.md
@@ -0,0 +1,71 @@
+[![Build Status](https://travis-ci.org/rust-lang/rust-installer.svg?branch=master)](https://travis-ci.org/rust-lang/rust-installer)
+
+A generator for the install.sh script commonly used to install Rust in
+Unix environments. It is used By Rust, Cargo, and is intended to be
+used by a future combined installer of Rust + Cargo.
+
+# Usage
+
+```
+./gen-installer.sh --product-name=Rust \
+ --rel-manifest-dir=rustlib \
+ --success-message=Rust-is-ready-to-roll. \
+ --image-dir=./install-image \
+ --work-dir=./temp \
+ --output-dir=./dist \
+ --non-installed-overlay=./overlay \
+ --package-name=rustc-nightly-i686-apple-darwin \
+ --component-name=rustc \
+ --legacy-manifest-dirs=rustlib \
+ --bulk-dirs=share/doc
+```
+
+Or, to just generate the script.
+
+```
+./gen-install-script.sh --product-name=Rust \
+ --rel-manifest-dir=rustlib \
+ --success-message=Rust-is-ready-to-roll. \
+ --output-script=install.sh \
+ --legacy-manifest-dirs=rustlib
+```
+
+*Note: the dashes in `success-message` are converted to spaces. The
+script's argument handling is broken with spaces.*
+
+To combine installers.
+
+```
+./combine-installers.sh --product-name=Rust \
+ --rel-manifest-dir=rustlib \
+ --success-message=Rust-is-ready-to-roll. \
+ --work-dir=./temp \
+ --output-dir=./dist \
+ --non-installed-overlay=./overlay \
+ --package-name=rustc-nightly-i686-apple-darwin \
+ --legacy-manifest-dirs=rustlib \
+ --input-tarballs=./rustc.tar.gz,cargo.tar.gz
+```
+
+# Future work
+
+* Make install.sh not have to be customized, pull it's data from a
+ config file.
+* Be more resiliant to installation failures, particularly if the disk
+ is full.
+* Pre-install and post-uninstall scripts.
+* Allow components to depend on or contradict other components.
+* Sanity check that expected destination dirs (bin, lib, share exist)?
+* Add --docdir flag. Is there a standard name for this?
+* Remove empty directories on uninstall.
+* Detect mismatches in --prefix, --mandir, etc. in follow-on
+ installs/uninstalls.
+* Fix argument handling for spaces.
+* Add --bindir.
+
+# License
+
+This software is distributed under the terms of both the MIT license
+and/or the Apache License (Version 2.0), at your option.
+
+See [LICENSE-APACHE](LICENSE-APACHE), [LICENSE-MIT](LICENSE-MIT) for details.
diff --git a/src/tools/rust-installer/combine-installers.sh b/src/tools/rust-installer/combine-installers.sh
new file mode 100755
index 000000000..4931c34dd
--- /dev/null
+++ b/src/tools/rust-installer/combine-installers.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ue
+
+# Prints the absolute path of a directory to stdout
+abs_path() {
+ local path="$1"
+ # Unset CDPATH because it causes havok: it makes the destination unpredictable
+ # and triggers 'cd' to print the path to stdout. Route `cd`'s output to /dev/null
+ # for good measure.
+ (unset CDPATH && cd "$path" > /dev/null && pwd)
+}
+
+src_dir="$(abs_path $(dirname "$0"))"
+cargo run --manifest-path="$src_dir/Cargo.toml" -- combine "$@"
diff --git a/src/tools/rust-installer/gen-install-script.sh b/src/tools/rust-installer/gen-install-script.sh
new file mode 100755
index 000000000..b4559d147
--- /dev/null
+++ b/src/tools/rust-installer/gen-install-script.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ue
+
+# Prints the absolute path of a directory to stdout
+abs_path() {
+ local path="$1"
+ # Unset CDPATH because it causes havok: it makes the destination unpredictable
+ # and triggers 'cd' to print the path to stdout. Route `cd`'s output to /dev/null
+ # for good measure.
+ (unset CDPATH && cd "$path" > /dev/null && pwd)
+}
+
+src_dir="$(abs_path $(dirname "$0"))"
+cargo run --manifest-path="$src_dir/Cargo.toml" -- script "$@"
diff --git a/src/tools/rust-installer/gen-installer.sh b/src/tools/rust-installer/gen-installer.sh
new file mode 100755
index 000000000..198cfe742
--- /dev/null
+++ b/src/tools/rust-installer/gen-installer.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ue
+
+# Prints the absolute path of a directory to stdout
+abs_path() {
+ local path="$1"
+ # Unset CDPATH because it causes havok: it makes the destination unpredictable
+ # and triggers 'cd' to print the path to stdout. Route `cd`'s output to /dev/null
+ # for good measure.
+ (unset CDPATH && cd "$path" > /dev/null && pwd)
+}
+
+src_dir="$(abs_path $(dirname "$0"))"
+cargo run --manifest-path="$src_dir/Cargo.toml" -- generate "$@"
diff --git a/src/tools/rust-installer/install-template.sh b/src/tools/rust-installer/install-template.sh
new file mode 100644
index 000000000..e68be8911
--- /dev/null
+++ b/src/tools/rust-installer/install-template.sh
@@ -0,0 +1,987 @@
+#!/bin/bash
+# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+# No undefined variables
+set -u
+
+init_logging() {
+ local _abs_libdir="$1"
+ local _logfile="$_abs_libdir/$TEMPLATE_REL_MANIFEST_DIR/install.log"
+ rm -f "$_logfile"
+ need_ok "failed to remove old installation log"
+ touch "$_logfile"
+ need_ok "failed to create installation log"
+ LOGFILE="$_logfile"
+}
+
+log_line() {
+ local _line="$1"
+
+ if [ -n "${LOGFILE-}" -a -e "${LOGFILE-}" ]; then
+ echo "$_line" >> "$LOGFILE"
+ # Ignore errors, which may happen e.g. after the manifest dir is deleted
+ fi
+}
+
+msg() {
+ local _line="install: ${1-}"
+ echo "$_line"
+ log_line "$_line"
+}
+
+verbose_msg() {
+ if [ -n "${CFG_VERBOSE-}" ]; then
+ msg "${1-}"
+ else
+ log_line "install: ${1-}"
+ fi
+}
+
+step_msg() {
+ msg
+ msg "$1"
+ msg
+}
+
+verbose_step_msg() {
+ if [ -n "${CFG_VERBOSE-}" ]; then
+ msg
+ msg "$1"
+ msg
+ else
+ log_line ""
+ log_line "install: $1"
+ log_line ""
+ fi
+}
+
+warn() {
+ local _line="install: WARNING: $1"
+ echo "$_line" >&2
+ log_line "$_line"
+}
+
+err() {
+ local _line="install: error: $1"
+ echo "$_line" >&2
+ log_line "$_line"
+ exit 1
+}
+
+# A non-user error that is likely to result in a corrupted install
+critical_err() {
+ local _line="install: error: $1. see logs at '${LOGFILE-}'"
+ echo "$_line" >&2
+ log_line "$_line"
+ exit 1
+}
+
+need_ok() {
+ if [ $? -ne 0 ]
+ then
+ err "$1"
+ fi
+}
+
+critical_need_ok() {
+ if [ $? -ne 0 ]
+ then
+ critical_err "$1"
+ fi
+}
+
+want_ok() {
+ if [ $? -ne 0 ]; then
+ warn "$1"
+ fi
+}
+
+assert_nz() {
+ if [ -z "$1" ]; then err "assert_nz $2"; fi
+}
+
+need_cmd() {
+ if command -v $1 >/dev/null 2>&1
+ then verbose_msg "found $1"
+ else err "need $1"
+ fi
+}
+
+run() {
+ local _line="\$ $*"
+ "$@"
+ local _retval=$?
+ log_line "$_line"
+ return $_retval
+}
+
+write_to_file() {
+ local _msg="$1"
+ local _file="$2"
+ local _line="$ echo \"$_msg\" > \"$_file\""
+ echo "$_msg" > "$_file"
+ local _retval=$?
+ log_line "$_line"
+ return $_retval
+}
+
+append_to_file() {
+ local _msg="$1"
+ local _file="$2"
+ local _line="$ echo \"$_msg\" >> \"$_file\""
+ echo "$_msg" >> "$_file"
+ local _retval=$?
+ log_line "$_line"
+ return $_retval
+}
+
+make_dir_recursive() {
+ local _dir="$1"
+ local _line="$ umask 022 && mkdir -p \"$_dir\""
+ umask 022 && mkdir -p "$_dir"
+ local _retval=$?
+ log_line "$_line"
+ return $_retval
+}
+
+putvar() {
+ local t
+ local tlen
+ eval t=\$$1
+ eval tlen=\${#$1}
+}
+
+valopt() {
+ VAL_OPTIONS="$VAL_OPTIONS $1"
+
+ local op=$1
+ local default=$2
+ shift
+ shift
+ local doc="$*"
+ if [ $HELP -eq 0 ]
+ then
+ local uop=$(echo $op | tr 'a-z-' 'A-Z_')
+ local v="CFG_${uop}"
+ eval $v="$default"
+ for arg in $CFG_ARGS
+ do
+ if echo "$arg" | grep -q -- "--$op="
+ then
+ local val=$(echo "$arg" | cut -f2 -d=)
+ eval $v=$val
+ fi
+ done
+ putvar $v
+ else
+ if [ -z "$default" ]
+ then
+ default="<none>"
+ fi
+ op="${op}=[${default}]"
+ printf " --%-30s %s\n" "$op" "$doc"
+ fi
+}
+
+opt() {
+ BOOL_OPTIONS="$BOOL_OPTIONS $1"
+
+ local op=$1
+ local default=$2
+ shift
+ shift
+ local doc="$*"
+ local flag=""
+
+ if [ $default -eq 0 ]
+ then
+ flag="enable"
+ else
+ flag="disable"
+ doc="don't $doc"
+ fi
+
+ if [ $HELP -eq 0 ]
+ then
+ for arg in $CFG_ARGS
+ do
+ if [ "$arg" = "--${flag}-${op}" ]
+ then
+ op=$(echo $op | tr 'a-z-' 'A-Z_')
+ flag=$(echo $flag | tr 'a-z' 'A-Z')
+ local v="CFG_${flag}_${op}"
+ eval $v=1
+ putvar $v
+ fi
+ done
+ else
+ if [ ! -z "${META-}" ]
+ then
+ op="$op=<$META>"
+ fi
+ printf " --%-30s %s\n" "$flag-$op" "$doc"
+ fi
+}
+
+flag() {
+ BOOL_OPTIONS="$BOOL_OPTIONS $1"
+
+ local op=$1
+ shift
+ local doc="$*"
+
+ if [ $HELP -eq 0 ]
+ then
+ for arg in $CFG_ARGS
+ do
+ if [ "$arg" = "--${op}" ]
+ then
+ op=$(echo $op | tr 'a-z-' 'A-Z_')
+ local v="CFG_${op}"
+ eval $v=1
+ putvar $v
+ fi
+ done
+ else
+ if [ ! -z "${META-}" ]
+ then
+ op="$op=<$META>"
+ fi
+ printf " --%-30s %s\n" "$op" "$doc"
+ fi
+}
+
+validate_opt () {
+ for arg in $CFG_ARGS
+ do
+ local is_arg_valid=0
+ for option in $BOOL_OPTIONS
+ do
+ if test --disable-$option = $arg
+ then
+ is_arg_valid=1
+ fi
+ if test --enable-$option = $arg
+ then
+ is_arg_valid=1
+ fi
+ if test --$option = $arg
+ then
+ is_arg_valid=1
+ fi
+ done
+ for option in $VAL_OPTIONS
+ do
+ if echo "$arg" | grep -q -- "--$option="
+ then
+ is_arg_valid=1
+ fi
+ done
+ if [ "$arg" = "--help" ]
+ then
+ echo
+ echo "No more help available for Configure options,"
+ echo "check the Wiki or join our IRC channel"
+ break
+ else
+ if test $is_arg_valid -eq 0
+ then
+ err "Option '$arg' is not recognized"
+ fi
+ fi
+ done
+}
+
+absolutify() {
+ local file_path="$1"
+ local file_path_dirname="$(dirname "$file_path")"
+ local file_path_basename="$(basename "$file_path")"
+ local file_abs_path="$(abs_path "$file_path_dirname")"
+ local file_path="$file_abs_path/$file_path_basename"
+ # This is the return value
+ RETVAL="$file_path"
+}
+
+# Prints the absolute path of a directory to stdout
+abs_path() {
+ local path="$1"
+ # Unset CDPATH because it causes havok: it makes the destination unpredictable
+ # and triggers 'cd' to print the path to stdout. Route `cd`'s output to /dev/null
+ # for good measure.
+ (unset CDPATH && cd "$path" > /dev/null && pwd)
+}
+
+uninstall_legacy() {
+ local _abs_libdir="$1"
+
+ local _uninstalled_something=false
+
+ # Replace commas in legacy manifest list with spaces
+ _legacy_manifest_dirs=`echo "$TEMPLATE_LEGACY_MANIFEST_DIRS" | sed "s/,/ /g"`
+
+ # Uninstall from legacy manifests
+ local _md
+ for _md in $_legacy_manifest_dirs; do
+ # First, uninstall from the installation prefix.
+ # Errors are warnings - try to rm everything in the manifest even if some fail.
+ if [ -f "$_abs_libdir/$_md/manifest" ]
+ then
+
+ # iterate through installed manifest and remove files
+ local _p;
+ while read _p; do
+ # the installed manifest contains absolute paths
+ msg "removing legacy file $_p"
+ if [ -f "$_p" ]
+ then
+ run rm -f "$_p"
+ want_ok "failed to remove $_p"
+ else
+ warn "supposedly installed file $_p does not exist!"
+ fi
+ done < "$_abs_libdir/$_md/manifest"
+
+ # If we fail to remove $md below, then the
+ # installed manifest will still be full; the installed manifest
+ # needs to be empty before install.
+ msg "removing legacy manifest $_abs_libdir/$_md/manifest"
+ run rm -f "$_abs_libdir/$_md/manifest"
+ # For the above reason, this is a hard error
+ need_ok "failed to remove installed manifest"
+
+ # Remove $template_rel_manifest_dir directory
+ msg "removing legacy manifest dir $_abs_libdir/$_md"
+ run rm -R "$_abs_libdir/$_md"
+ want_ok "failed to remove $_md"
+
+ _uninstalled_something=true
+ fi
+ done
+
+ RETVAL="$_uninstalled_something"
+}
+
+uninstall_components() {
+ local _abs_libdir="$1"
+ local _dest_prefix="$2"
+ local _components="$3"
+
+ # We're going to start by uninstalling existing components. This
+ local _uninstalled_something=false
+
+ # First, try removing any 'legacy' manifests from before
+ # rust-installer
+ uninstall_legacy "$_abs_libdir"
+ assert_nz "$RETVAL", "RETVAL"
+ if [ "$RETVAL" = true ]; then
+ _uninstalled_something=true;
+ fi
+
+ # Load the version of the installed installer
+ local _installed_version=
+ if [ -f "$abs_libdir/$TEMPLATE_REL_MANIFEST_DIR/rust-installer-version" ]; then
+ _installed_version=`cat "$_abs_libdir/$TEMPLATE_REL_MANIFEST_DIR/rust-installer-version"`
+
+ # Sanity check
+ if [ ! -n "$_installed_version" ]; then critical_err "rust installer version is empty"; fi
+ fi
+
+ # If there's something installed, then uninstall
+ if [ -n "$_installed_version" ]; then
+ # Check the version of the installed installer
+ case "$_installed_version" in
+
+ # If this is a previous version, then upgrade in place to the
+ # current version before uninstalling.
+ 2 )
+ # The only change between version 2 -> 3 is that components are placed
+ # in subdirectories of the installer tarball. There are no changes
+ # to the installed data format, so nothing to do.
+ ;;
+
+ # This is the current version. Nothing need to be done except uninstall.
+ "$TEMPLATE_RUST_INSTALLER_VERSION")
+ ;;
+
+ # If this is an unknown (future) version then bail.
+ * )
+ echo "The copy of $TEMPLATE_PRODUCT_NAME at $_dest_prefix was installed using an"
+ echo "unknown version ($_installed_version) of rust-installer."
+ echo "Uninstall it first with the installer used for the original installation"
+ echo "before continuing."
+ exit 1
+ ;;
+ esac
+
+ local _md="$_abs_libdir/$TEMPLATE_REL_MANIFEST_DIR"
+ local _installed_components="$(cat "$_md/components")"
+
+ # Uninstall (our components only) before reinstalling
+ local _available_component
+ for _available_component in $_components; do
+ local _installed_component
+ for _installed_component in $_installed_components; do
+ if [ "$_available_component" = "$_installed_component" ]; then
+ msg "uninstalling component '$_available_component'"
+ local _component_manifest="$_md/manifest-$_installed_component"
+
+ # Sanity check: there should be a component manifest
+ if [ ! -f "$_component_manifest" ]; then
+ critical_err "installed component '$_installed_component' has no manifest"
+ fi
+
+ # Iterate through installed component manifest and remove files
+ local _directive
+ while read _directive; do
+
+ local _command=`echo $_directive | cut -f1 -d:`
+ local _file=`echo $_directive | cut -f2 -d:`
+
+ # Sanity checks
+ if [ ! -n "$_command" ]; then critical_err "malformed installation directive"; fi
+ if [ ! -n "$_file" ]; then critical_err "malformed installation directive"; fi
+
+ case "$_command" in
+ file)
+ verbose_msg "removing file $_file"
+ if [ -f "$_file" ]; then
+ run rm -f "$_file"
+ want_ok "failed to remove $_file"
+ else
+ warn "supposedly installed file $_file does not exist!"
+ fi
+ ;;
+
+ dir)
+ verbose_msg "removing directory $_file"
+ run rm -r "$_file"
+ want_ok "unable to remove directory $_file"
+ ;;
+
+ *)
+ critical_err "unknown installation directive"
+ ;;
+ esac
+
+ done < "$_component_manifest"
+
+ # Remove the installed component manifest
+ verbose_msg "removing component manifest $_component_manifest"
+ run rm "$_component_manifest"
+ # This is a hard error because the installation is unrecoverable
+ critical_need_ok "failed to remove installed manifest for component '$_installed_component'"
+
+ # Update the installed component list
+ local _modified_components="$(sed "/^$_installed_component\$/d" "$_md/components")"
+ write_to_file "$_modified_components" "$_md/components"
+ critical_need_ok "failed to update installed component list"
+ fi
+ done
+ done
+
+ # If there are no remaining components delete the manifest directory,
+ # but only if we're doing an uninstall - if we're doing an install,
+ # then leave the manifest directory around to hang onto the logs,
+ # and any files not managed by the installer.
+ if [ -n "${CFG_UNINSTALL-}" ]; then
+ local _remaining_components="$(cat "$_md/components")"
+ if [ ! -n "$_remaining_components" ]; then
+ verbose_msg "removing manifest directory $_md"
+ run rm -r "$_md"
+ want_ok "failed to remove $_md"
+
+ maybe_unconfigure_ld
+ fi
+ fi
+
+ _uninstalled_something=true
+ fi
+
+ # There's no installed version. If we were asked to uninstall, then that's a problem.
+ if [ -n "${CFG_UNINSTALL-}" -a "$_uninstalled_something" = false ]
+ then
+ err "unable to find installation manifest at $CFG_LIBDIR/$TEMPLATE_REL_MANIFEST_DIR"
+ fi
+}
+
+install_components() {
+ local _src_dir="$1"
+ local _abs_libdir="$2"
+ local _dest_prefix="$3"
+ local _components="$4"
+
+ local _component
+ for _component in $_components; do
+
+ msg "installing component '$_component'"
+
+ # The file name of the manifest we're installing from
+ local _input_manifest="$_src_dir/$_component/manifest.in"
+
+ # Sanity check: do we have our input manifests?
+ if [ ! -f "$_input_manifest" ]; then
+ critical_err "manifest for $_component does not exist at $_input_manifest"
+ fi
+
+ # The installed manifest directory
+ local _md="$_abs_libdir/$TEMPLATE_REL_MANIFEST_DIR"
+
+ # The file name of the manifest we're going to create during install
+ local _installed_manifest="$_md/manifest-$_component"
+
+ # Create the installed manifest, which we will fill in with absolute file paths
+ touch "$_installed_manifest"
+ critical_need_ok "failed to create installed manifest"
+
+ # Add this component to the installed component list
+ append_to_file "$_component" "$_md/components"
+ critical_need_ok "failed to update components list for $_component"
+
+ # Now install, iterate through the new manifest and copy files
+ local _directive
+ while read _directive; do
+
+ local _command=`echo $_directive | cut -f1 -d:`
+ local _file=`echo $_directive | cut -f2 -d:`
+
+ # Sanity checks
+ if [ ! -n "$_command" ]; then critical_err "malformed installation directive"; fi
+ if [ ! -n "$_file" ]; then critical_err "malformed installation directive"; fi
+
+ # Decide the destination of the file
+ local _file_install_path="$_dest_prefix/$_file"
+
+ if echo "$_file" | grep "^etc/" > /dev/null
+ then
+ local _f="$(echo "$_file" | sed 's/^etc\///')"
+ _file_install_path="$CFG_SYSCONFDIR/$_f"
+ fi
+
+ if echo "$_file" | grep "^bin/" > /dev/null
+ then
+ local _f="$(echo "$_file" | sed 's/^bin\///')"
+ _file_install_path="$CFG_BINDIR/$_f"
+ fi
+
+ if echo "$_file" | grep "^lib/" > /dev/null
+ then
+ local _f="$(echo "$_file" | sed 's/^lib\///')"
+ _file_install_path="$CFG_LIBDIR/$_f"
+ fi
+
+ if echo "$_file" | grep "^share" > /dev/null
+ then
+ local _f="$(echo "$_file" | sed 's/^share\///')"
+ _file_install_path="$CFG_DATADIR/$_f"
+ fi
+
+ if echo "$_file" | grep "^share/man/" > /dev/null
+ then
+ local _f="$(echo "$_file" | sed 's/^share\/man\///')"
+ _file_install_path="$CFG_MANDIR/$_f"
+ fi
+
+ # HACK: Try to support overriding --docdir. Paths with the form
+ # "share/doc/$product/" can be redirected to a single --docdir
+ # path. If the following detects that --docdir has been specified
+ # then it will replace everything preceeding the "$product" path
+ # component. The problem here is that the combined rust installer
+ # contains two "products": rust and cargo; so the contents of those
+ # directories will both be dumped into the same directory; and the
+ # contents of those directories are _not_ disjoint. Since this feature
+ # is almost entirely to support 'make install' anyway I don't expect
+ # this problem to be a big deal in practice.
+ if [ "$CFG_DOCDIR" != "<default>" ]
+ then
+ if echo "$_file" | grep "^share/doc/" > /dev/null
+ then
+ local _f="$(echo "$_file" | sed 's/^share\/doc\/[^/]*\///')"
+ _file_install_path="$CFG_DOCDIR/$_f"
+ fi
+ fi
+
+ # Make sure there's a directory for it
+ make_dir_recursive "$(dirname "$_file_install_path")"
+ critical_need_ok "directory creation failed"
+
+ # Make the path absolute so we can uninstall it later without
+ # starting from the installation cwd
+ absolutify "$_file_install_path"
+ _file_install_path="$RETVAL"
+ assert_nz "$_file_install_path" "file_install_path"
+
+ case "$_command" in
+ file )
+
+ verbose_msg "copying file $_file_install_path"
+
+ maybe_backup_path "$_file_install_path"
+
+ if echo "$_file" | grep "^bin/" > /dev/null || test -x "$_src_dir/$_component/$_file"
+ then
+ run cp "$_src_dir/$_component/$_file" "$_file_install_path"
+ run chmod 755 "$_file_install_path"
+ else
+ run cp "$_src_dir/$_component/$_file" "$_file_install_path"
+ run chmod 644 "$_file_install_path"
+ fi
+ critical_need_ok "file creation failed"
+
+ # Update the manifest
+ append_to_file "file:$_file_install_path" "$_installed_manifest"
+ critical_need_ok "failed to update manifest"
+
+ ;;
+
+ dir )
+
+ verbose_msg "copying directory $_file_install_path"
+
+ maybe_backup_path "$_file_install_path"
+
+ run cp -R "$_src_dir/$_component/$_file" "$_file_install_path"
+ critical_need_ok "failed to copy directory"
+
+ # Set permissions. 0755 for dirs, 644 for files
+ run chmod -R u+rwX,go+rX,go-w "$_file_install_path"
+ critical_need_ok "failed to set permissions on directory"
+
+ # Update the manifest
+ append_to_file "dir:$_file_install_path" "$_installed_manifest"
+ critical_need_ok "failed to update manifest"
+ ;;
+
+ *)
+ critical_err "unknown installation directive"
+ ;;
+ esac
+ done < "$_input_manifest"
+
+ done
+}
+
+maybe_configure_ld() {
+ local _abs_libdir="$1"
+
+ local _ostype="$(uname -s)"
+ assert_nz "$_ostype" "ostype"
+
+ if [ "$_ostype" = "Linux" -a ! -n "${CFG_DISABLE_LDCONFIG-}" ]; then
+
+ # Fedora-based systems do not configure the dynamic linker to look
+ # /usr/local/lib, which is our default installation directory. To
+ # make things just work, try to put that directory in
+ # /etc/ld.so.conf.d/rust-installer-v1 so ldconfig picks it up.
+ # Issue #30.
+ #
+ # This will get rm'd when the last component is uninstalled in
+ # maybe_unconfigure_ld.
+ if [ "$_abs_libdir" = "/usr/local/lib" -a -d "/etc/ld.so.conf.d" ]; then
+ echo "$_abs_libdir" > "/etc/ld.so.conf.d/rust-installer-v1-$TEMPLATE_REL_MANIFEST_DIR.conf"
+ if [ $? -ne 0 ]; then
+ # This shouldn't happen if we've gotten this far
+ # installing to /usr/local
+ warn "failed to update /etc/ld.so.conf.d. this is unexpected"
+ fi
+ fi
+
+ verbose_msg "running ldconfig"
+ if [ -n "${CFG_VERBOSE-}" ]; then
+ ldconfig
+ else
+ ldconfig 2> /dev/null
+ fi
+ if [ $? -ne 0 ]
+ then
+ warn "failed to run ldconfig. this may happen when not installing as root. run with --verbose to see the error"
+ fi
+ fi
+}
+
+maybe_unconfigure_ld() {
+ local _ostype="$(uname -s)"
+ assert_nz "$_ostype" "ostype"
+
+ if [ "$_ostype" != "Linux" ]; then
+ return 0
+ fi
+
+ rm "/etc/ld.so.conf.d/rust-installer-v1-$TEMPLATE_REL_MANIFEST_DIR.conf" 2> /dev/null
+ # Above may fail since that file may not have been created on install
+}
+
+# Doing our own 'install'-like backup that is consistent across platforms
+maybe_backup_path() {
+ local _file_install_path="$1"
+
+ if [ -e "$_file_install_path" ]; then
+ msg "backing up existing file at $_file_install_path"
+ run mv -f "$_file_install_path" "$_file_install_path.old"
+ critical_need_ok "failed to back up $_file_install_path"
+ fi
+}
+
+install_uninstaller() {
+ local _src_dir="$1"
+ local _src_basename="$2"
+ local _abs_libdir="$3"
+
+ local _uninstaller="$_abs_libdir/$TEMPLATE_REL_MANIFEST_DIR/uninstall.sh"
+ msg "creating uninstall script at $_uninstaller"
+ run cp "$_src_dir/$_src_basename" "$_uninstaller"
+ critical_need_ok "unable to install uninstaller"
+}
+
+do_preflight_sanity_checks() {
+ local _src_dir="$1"
+ local _dest_prefix="$2"
+
+ # Sanity check: can we can write to the destination?
+ verbose_msg "verifying destination is writable"
+ make_dir_recursive "$CFG_LIBDIR"
+ need_ok "can't write to destination. consider \`sudo\`."
+ touch "$CFG_LIBDIR/rust-install-probe" > /dev/null
+ if [ $? -ne 0 ]
+ then
+ err "can't write to destination. consider \`sudo\`."
+ fi
+ rm "$CFG_LIBDIR/rust-install-probe"
+ need_ok "failed to remove install probe"
+
+ # Sanity check: don't install to the directory containing the installer.
+ # That would surely cause chaos.
+ verbose_msg "verifying destination is not the same as source"
+ local _prefix_dir="$(abs_path "$dest_prefix")"
+ if [ "$_src_dir" = "$_dest_prefix" -a "${CFG_UNINSTALL-}" != 1 ]; then
+ err "cannot install to same directory as installer"
+ fi
+}
+
+verbose_msg "looking for install programs"
+verbose_msg
+
+need_cmd mkdir
+need_cmd printf
+need_cmd cut
+need_cmd grep
+need_cmd uname
+need_cmd tr
+need_cmd sed
+need_cmd chmod
+need_cmd env
+need_cmd pwd
+
+CFG_ARGS="${@:-}"
+
+HELP=0
+if [ "${1-}" = "--help" ]
+then
+ HELP=1
+ shift
+ echo
+ echo "Usage: $0 [options]"
+ echo
+ echo "Options:"
+ echo
+else
+ verbose_step_msg "processing arguments"
+fi
+
+OPTIONS=""
+BOOL_OPTIONS=""
+VAL_OPTIONS=""
+
+flag uninstall "only uninstall from the installation prefix"
+valopt destdir "" "set installation root"
+valopt prefix "/usr/local" "set installation prefix"
+
+# Avoid prepending an extra / to the prefix path if there's no destdir
+# NB: CFG vars here are undefined when passing --help
+if [ -z "${CFG_DESTDIR-}" ]; then
+ CFG_DESTDIR_PREFIX="${CFG_PREFIX-}"
+else
+ CFG_DESTDIR_PREFIX="$CFG_DESTDIR/$CFG_PREFIX"
+fi
+
+# NB This isn't quite the same definition as in `configure`.
+# just using 'lib' instead of configure's CFG_LIBDIR_RELATIVE
+valopt without "" "comma-separated list of components to not install"
+valopt components "" "comma-separated list of components to install"
+flag list-components "list available components"
+valopt sysconfdir "$CFG_DESTDIR_PREFIX/etc" "install system configuration files"
+valopt bindir "$CFG_DESTDIR_PREFIX/bin" "install binaries"
+valopt libdir "$CFG_DESTDIR_PREFIX/lib" "install libraries"
+valopt datadir "$CFG_DESTDIR_PREFIX/share" "install data"
+# NB We repeat datadir default value because we don't set CFG_DATADIR in --help
+valopt mandir "${CFG_DATADIR-"$CFG_DESTDIR_PREFIX/share"}/man" "install man pages in PATH"
+# NB See the docdir handling in install_components for an explanation of this
+# weird <default> string
+valopt docdir "\<default\>" "install documentation in PATH"
+opt ldconfig 1 "run ldconfig after installation (Linux only)"
+opt verify 1 "obsolete"
+flag verbose "run with verbose output"
+
+if [ $HELP -eq 1 ]
+then
+ echo
+ exit 0
+fi
+
+verbose_step_msg "validating arguments"
+validate_opt
+
+# Template configuration.
+# These names surrounded by '%%` are replaced by sed when generating install.sh
+# FIXME: Might want to consider loading this from a file and not generating install.sh
+
+# Rust or Cargo
+TEMPLATE_PRODUCT_NAME=%%TEMPLATE_PRODUCT_NAME%%
+# rustlib or cargo
+TEMPLATE_REL_MANIFEST_DIR=%%TEMPLATE_REL_MANIFEST_DIR%%
+# 'Rust is ready to roll.' or 'Cargo is cool to cruise.'
+TEMPLATE_SUCCESS_MESSAGE=%%TEMPLATE_SUCCESS_MESSAGE%%
+# Locations to look for directories containing legacy, pre-versioned manifests
+TEMPLATE_LEGACY_MANIFEST_DIRS=%%TEMPLATE_LEGACY_MANIFEST_DIRS%%
+# The installer version
+TEMPLATE_RUST_INSTALLER_VERSION=%%TEMPLATE_RUST_INSTALLER_VERSION%%
+
+# OK, let's get installing ...
+
+# This is where we are installing from
+src_dir="$(abs_path $(dirname "$0"))"
+
+# The name of the script
+src_basename="$(basename "$0")"
+
+# If we've been run as 'uninstall.sh' (from the existing installation)
+# then we're doing a full uninstall, as opposed to the --uninstall flag
+# which just means 'uninstall my components'.
+if [ "$src_basename" = "uninstall.sh" ]; then
+ if [ "${*:-}" != "" ]; then
+ # Currently don't know what to do with arguments in this mode
+ err "uninstall.sh does not take any arguments"
+ fi
+ CFG_UNINSTALL=1
+ CFG_DESTDIR_PREFIX="$(abs_path "$src_dir/../../")"
+ CFG_LIBDIR="$(abs_path "$src_dir/../")"
+fi
+
+# This is where we are installing to
+dest_prefix="$CFG_DESTDIR_PREFIX"
+
+# Open the components file to get the list of components to install.
+# NB: During install this components file is read from the installer's
+# source dir, during a full uninstall it's read from the manifest dir,
+# and thus contains all installed components.
+components=`cat "$src_dir/components"`
+
+# Sanity check: do we have components?
+if [ ! -n "$components" ]; then
+ err "unable to find installation components"
+fi
+
+# If the user asked for a component list, do that and exit
+if [ -n "${CFG_LIST_COMPONENTS-}" ]; then
+ echo
+ echo "# Available components"
+ echo
+ for component in $components; do
+ echo "* $component"
+ done
+ echo
+ exit 0
+fi
+
+# If the user specified which components to install/uninstall,
+# then validate that they exist and select them for installation
+if [ -n "$CFG_COMPONENTS" ]; then
+ # Remove commas
+ user_components="$(echo "$CFG_COMPONENTS" | sed "s/,/ /g")"
+ for user_component in $user_components; do
+ found=false
+ for my_component in $components; do
+ if [ "$user_component" = "$my_component" ]; then
+ found=true
+ fi
+ done
+ if [ "$found" = false ]; then
+ err "unknown component: $user_component"
+ fi
+ done
+ components="$user_components"
+fi
+
+if [ -n "$CFG_WITHOUT" ]; then
+ without_components="$(echo "$CFG_WITHOUT" | sed "s/,/ /g")"
+ for without_component in $without_components; do
+ components="$(echo "$components" | sed "s/$without_component//" | sed "s/$without_component//")"
+ done
+fi
+
+if [ -z "$components" ]; then
+ if [ -z "${CFG_UNINSTALL-}" ]; then
+ err "no components selected for installation"
+ else
+ err "no components selected for uninstallation"
+ fi
+fi
+
+do_preflight_sanity_checks "$src_dir" "$dest_prefix"
+
+# Using an absolute path to libdir in a few places so that the status
+# messages are consistently using absolute paths.
+absolutify "$CFG_LIBDIR"
+abs_libdir="$RETVAL"
+assert_nz "$abs_libdir" "abs_libdir"
+
+# Create the manifest directory, where we will put our logs
+make_dir_recursive "$abs_libdir/$TEMPLATE_REL_MANIFEST_DIR"
+need_ok "failed to create $TEMPLATE_REL_MANIFEST_DIR"
+
+# Log messages and commands
+init_logging "$abs_libdir"
+
+# First do any uninstallation, including from legacy manifests. This
+# will also upgrade the metadata of existing installs.
+uninstall_components "$abs_libdir" "$dest_prefix" "$components"
+
+# If we're only uninstalling then exit
+if [ -n "${CFG_UNINSTALL-}" ]
+then
+ echo
+ echo " $TEMPLATE_PRODUCT_NAME is uninstalled."
+ echo
+ exit 0
+fi
+
+# Create the manifest directory again! uninstall_legacy
+# may have deleted it.
+make_dir_recursive "$abs_libdir/$TEMPLATE_REL_MANIFEST_DIR"
+need_ok "failed to create $TEMPLATE_REL_MANIFEST_DIR"
+
+# Drop the version number into the manifest dir
+write_to_file "$TEMPLATE_RUST_INSTALLER_VERSION" "$abs_libdir/$TEMPLATE_REL_MANIFEST_DIR/rust-installer-version"
+critical_need_ok "failed to write installer version"
+
+# Install the uninstaller
+install_uninstaller "$src_dir" "$src_basename" "$abs_libdir"
+
+# Install each component
+install_components "$src_dir" "$abs_libdir" "$dest_prefix" "$components"
+
+# Make dynamic libraries available to the linker
+maybe_configure_ld "$abs_libdir"
+
+echo
+echo " $TEMPLATE_SUCCESS_MESSAGE"
+echo
+
+
diff --git a/src/tools/rust-installer/make-tarballs.sh b/src/tools/rust-installer/make-tarballs.sh
new file mode 100755
index 000000000..e9f88cc8b
--- /dev/null
+++ b/src/tools/rust-installer/make-tarballs.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ue
+
+# Prints the absolute path of a directory to stdout
+abs_path() {
+ local path="$1"
+ # Unset CDPATH because it causes havok: it makes the destination unpredictable
+ # and triggers 'cd' to print the path to stdout. Route `cd`'s output to /dev/null
+ # for good measure.
+ (unset CDPATH && cd "$path" > /dev/null && pwd)
+}
+
+src_dir="$(abs_path $(dirname "$0"))"
+cargo run --manifest-path="$src_dir/Cargo.toml" -- tarball "$@"
diff --git a/src/tools/rust-installer/rust-installer-version b/src/tools/rust-installer/rust-installer-version
new file mode 100644
index 000000000..e440e5c84
--- /dev/null
+++ b/src/tools/rust-installer/rust-installer-version
@@ -0,0 +1 @@
+3 \ No newline at end of file
diff --git a/src/tools/rust-installer/src/combiner.rs b/src/tools/rust-installer/src/combiner.rs
new file mode 100644
index 000000000..2ec09d67e
--- /dev/null
+++ b/src/tools/rust-installer/src/combiner.rs
@@ -0,0 +1,161 @@
+use super::Scripter;
+use super::Tarballer;
+use crate::{
+ compression::{CompressionFormat, CompressionFormats},
+ util::*,
+};
+use anyhow::{bail, Context, Result};
+use std::io::{Read, Write};
+use std::path::Path;
+use tar::Archive;
+
+actor! {
+ #[derive(Debug)]
+ pub struct Combiner {
+ /// The name of the product, for display.
+ #[clap(value_name = "NAME")]
+ product_name: String = "Product",
+
+ /// The name of the package tarball.
+ #[clap(value_name = "NAME")]
+ package_name: String = "package",
+
+ /// The directory under lib/ where the manifest lives.
+ #[clap(value_name = "DIR")]
+ rel_manifest_dir: String = "packagelib",
+
+ /// The string to print after successful installation.
+ #[clap(value_name = "MESSAGE")]
+ success_message: String = "Installed.",
+
+ /// Places to look for legacy manifests to uninstall.
+ #[clap(value_name = "DIRS")]
+ legacy_manifest_dirs: String = "",
+
+ /// Installers to combine.
+ #[clap(value_name = "FILE,FILE")]
+ input_tarballs: String = "",
+
+ /// Directory containing files that should not be installed.
+ #[clap(value_name = "DIR")]
+ non_installed_overlay: String = "",
+
+ /// The directory to do temporary work.
+ #[clap(value_name = "DIR")]
+ work_dir: String = "./workdir",
+
+ /// The location to put the final image and tarball.
+ #[clap(value_name = "DIR")]
+ output_dir: String = "./dist",
+
+ /// The formats used to compress the tarball
+ #[clap(value_name = "FORMAT", default_value_t)]
+ compression_formats: CompressionFormats,
+ }
+}
+
+impl Combiner {
+ /// Combines the installer tarballs.
+ pub fn run(self) -> Result<()> {
+ create_dir_all(&self.work_dir)?;
+
+ let package_dir = Path::new(&self.work_dir).join(&self.package_name);
+ if package_dir.exists() {
+ remove_dir_all(&package_dir)?;
+ }
+ create_dir_all(&package_dir)?;
+
+ // Merge each installer into the work directory of the new installer.
+ let components = create_new_file(package_dir.join("components"))?;
+ for input_tarball in self
+ .input_tarballs
+ .split(',')
+ .map(str::trim)
+ .filter(|s| !s.is_empty())
+ {
+ // Extract the input tarballs
+ let compression =
+ CompressionFormat::detect_from_path(input_tarball).ok_or_else(|| {
+ anyhow::anyhow!("couldn't figure out the format of {}", input_tarball)
+ })?;
+ Archive::new(compression.decode(input_tarball)?)
+ .unpack(&self.work_dir)
+ .with_context(|| {
+ format!(
+ "unable to extract '{}' into '{}'",
+ &input_tarball, self.work_dir
+ )
+ })?;
+
+ let pkg_name =
+ input_tarball.trim_end_matches(&format!(".tar.{}", compression.extension()));
+ let pkg_name = Path::new(pkg_name).file_name().unwrap();
+ let pkg_dir = Path::new(&self.work_dir).join(&pkg_name);
+
+ // Verify the version number.
+ let mut version = String::new();
+ open_file(pkg_dir.join("rust-installer-version"))
+ .and_then(|mut file| Ok(file.read_to_string(&mut version)?))
+ .with_context(|| format!("failed to read version in '{}'", input_tarball))?;
+ if version.trim().parse() != Ok(crate::RUST_INSTALLER_VERSION) {
+ bail!("incorrect installer version in {}", input_tarball);
+ }
+
+ // Copy components to the new combined installer.
+ let mut pkg_components = String::new();
+ open_file(pkg_dir.join("components"))
+ .and_then(|mut file| Ok(file.read_to_string(&mut pkg_components)?))
+ .with_context(|| format!("failed to read components in '{}'", input_tarball))?;
+ for component in pkg_components.split_whitespace() {
+ // All we need to do is copy the component directory. We could
+ // move it, but rustbuild wants to reuse the unpacked package
+ // dir for OS-specific installers on macOS and Windows.
+ let component_dir = package_dir.join(&component);
+ create_dir(&component_dir)?;
+ copy_recursive(&pkg_dir.join(&component), &component_dir)?;
+
+ // Merge the component name.
+ writeln!(&components, "{}", component).context("failed to write new components")?;
+ }
+ }
+ drop(components);
+
+ // Write the installer version.
+ let version = package_dir.join("rust-installer-version");
+ writeln!(
+ create_new_file(version)?,
+ "{}",
+ crate::RUST_INSTALLER_VERSION
+ )
+ .context("failed to write new installer version")?;
+
+ // Copy the overlay.
+ if !self.non_installed_overlay.is_empty() {
+ copy_recursive(self.non_installed_overlay.as_ref(), &package_dir)?;
+ }
+
+ // Generate the install script.
+ let output_script = package_dir.join("install.sh");
+ let mut scripter = Scripter::default();
+ scripter
+ .product_name(self.product_name)
+ .rel_manifest_dir(self.rel_manifest_dir)
+ .success_message(self.success_message)
+ .legacy_manifest_dirs(self.legacy_manifest_dirs)
+ .output_script(path_to_str(&output_script)?.into());
+ scripter.run()?;
+
+ // Make the tarballs.
+ create_dir_all(&self.output_dir)?;
+ let output = Path::new(&self.output_dir).join(&self.package_name);
+ let mut tarballer = Tarballer::default();
+ tarballer
+ .work_dir(self.work_dir)
+ .input(self.package_name)
+ .output(path_to_str(&output)?.into())
+ .compression_formats(self.compression_formats.clone());
+ tarballer.run()?;
+
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-installer/src/compression.rs b/src/tools/rust-installer/src/compression.rs
new file mode 100644
index 000000000..7e20a9497
--- /dev/null
+++ b/src/tools/rust-installer/src/compression.rs
@@ -0,0 +1,177 @@
+use anyhow::{Context, Error};
+use flate2::{read::GzDecoder, write::GzEncoder};
+use rayon::prelude::*;
+use std::{convert::TryFrom, fmt, io::Read, io::Write, path::Path, str::FromStr};
+use xz2::{read::XzDecoder, write::XzEncoder};
+
+#[derive(Debug, Copy, Clone)]
+pub enum CompressionFormat {
+ Gz,
+ Xz,
+}
+
+impl CompressionFormat {
+ pub(crate) fn detect_from_path(path: impl AsRef<Path>) -> Option<Self> {
+ match path.as_ref().extension().and_then(|e| e.to_str()) {
+ Some("gz") => Some(CompressionFormat::Gz),
+ Some("xz") => Some(CompressionFormat::Xz),
+ _ => None,
+ }
+ }
+
+ pub(crate) fn extension(&self) -> &'static str {
+ match self {
+ CompressionFormat::Gz => "gz",
+ CompressionFormat::Xz => "xz",
+ }
+ }
+
+ pub(crate) fn encode(&self, path: impl AsRef<Path>) -> Result<Box<dyn Encoder>, Error> {
+ let mut os = path.as_ref().as_os_str().to_os_string();
+ os.push(format!(".{}", self.extension()));
+ let path = Path::new(&os);
+
+ if path.exists() {
+ crate::util::remove_file(path)?;
+ }
+ let file = crate::util::create_new_file(path)?;
+
+ Ok(match self {
+ CompressionFormat::Gz => Box::new(GzEncoder::new(file, flate2::Compression::best())),
+ CompressionFormat::Xz => {
+ // Note that preset 6 takes about 173MB of memory per thread, so we limit the number of
+ // threads to not blow out 32-bit hosts. (We could be more precise with
+ // `MtStreamBuilder::memusage()` if desired.)
+ let stream = xz2::stream::MtStreamBuilder::new()
+ .threads(Ord::min(num_cpus::get(), 8) as u32)
+ .preset(6)
+ .encoder()?;
+ Box::new(XzEncoder::new_stream(file, stream))
+ }
+ })
+ }
+
+ pub(crate) fn decode(&self, path: impl AsRef<Path>) -> Result<Box<dyn Read>, Error> {
+ let file = crate::util::open_file(path.as_ref())?;
+ Ok(match self {
+ CompressionFormat::Gz => Box::new(GzDecoder::new(file)),
+ CompressionFormat::Xz => Box::new(XzDecoder::new(file)),
+ })
+ }
+}
+
+/// This struct wraps Vec<CompressionFormat> in order to parse the value from the command line.
+#[derive(Debug, Clone)]
+pub struct CompressionFormats(Vec<CompressionFormat>);
+
+impl TryFrom<&'_ str> for CompressionFormats {
+ type Error = Error;
+
+ fn try_from(value: &str) -> Result<Self, Self::Error> {
+ let mut parsed = Vec::new();
+ for format in value.split(',') {
+ match format.trim() {
+ "gz" => parsed.push(CompressionFormat::Gz),
+ "xz" => parsed.push(CompressionFormat::Xz),
+ other => anyhow::bail!("unknown compression format: {}", other),
+ }
+ }
+ Ok(CompressionFormats(parsed))
+ }
+}
+
+impl FromStr for CompressionFormats {
+ type Err = Error;
+
+ fn from_str(value: &str) -> Result<Self, Self::Err> {
+ Self::try_from(value)
+ }
+}
+
+impl fmt::Display for CompressionFormats {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ for (i, format) in self.iter().enumerate() {
+ if i != 0 {
+ write!(f, ",")?;
+ }
+ fmt::Display::fmt(match format {
+ CompressionFormat::Xz => "xz",
+ CompressionFormat::Gz => "gz",
+ }, f)?;
+ }
+ Ok(())
+ }
+}
+
+impl Default for CompressionFormats {
+ fn default() -> Self {
+ Self(vec![CompressionFormat::Gz, CompressionFormat::Xz])
+ }
+}
+
+impl CompressionFormats {
+ pub(crate) fn iter(&self) -> impl Iterator<Item = CompressionFormat> + '_ {
+ self.0.iter().map(|i| *i)
+ }
+}
+
+pub(crate) trait Encoder: Send + Write {
+ fn finish(self: Box<Self>) -> Result<(), Error>;
+}
+
+impl<W: Send + Write> Encoder for GzEncoder<W> {
+ fn finish(self: Box<Self>) -> Result<(), Error> {
+ GzEncoder::finish(*self).context("failed to finish .gz file")?;
+ Ok(())
+ }
+}
+
+impl<W: Send + Write> Encoder for XzEncoder<W> {
+ fn finish(self: Box<Self>) -> Result<(), Error> {
+ XzEncoder::finish(*self).context("failed to finish .xz file")?;
+ Ok(())
+ }
+}
+
+pub(crate) struct CombinedEncoder {
+ encoders: Vec<Box<dyn Encoder>>,
+}
+
+impl CombinedEncoder {
+ pub(crate) fn new(encoders: Vec<Box<dyn Encoder>>) -> Box<dyn Encoder> {
+ Box::new(Self { encoders })
+ }
+}
+
+impl Write for CombinedEncoder {
+ fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
+ self.write_all(buf)?;
+ Ok(buf.len())
+ }
+
+ fn write_all(&mut self, buf: &[u8]) -> std::io::Result<()> {
+ self.encoders
+ .par_iter_mut()
+ .map(|w| w.write_all(buf))
+ .collect::<std::io::Result<Vec<()>>>()?;
+ Ok(())
+ }
+
+ fn flush(&mut self) -> std::io::Result<()> {
+ self.encoders
+ .par_iter_mut()
+ .map(|w| w.flush())
+ .collect::<std::io::Result<Vec<()>>>()?;
+ Ok(())
+ }
+}
+
+impl Encoder for CombinedEncoder {
+ fn finish(self: Box<Self>) -> Result<(), Error> {
+ self.encoders
+ .into_par_iter()
+ .map(|e| e.finish())
+ .collect::<Result<Vec<()>, Error>>()?;
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-installer/src/generator.rs b/src/tools/rust-installer/src/generator.rs
new file mode 100644
index 000000000..6a4cb9b4b
--- /dev/null
+++ b/src/tools/rust-installer/src/generator.rs
@@ -0,0 +1,170 @@
+use super::Scripter;
+use super::Tarballer;
+use crate::compression::CompressionFormats;
+use crate::util::*;
+use anyhow::{bail, format_err, Context, Result};
+use std::io::Write;
+use std::path::Path;
+
+actor! {
+ #[derive(Debug)]
+ pub struct Generator {
+ /// The name of the product, for display
+ #[clap(value_name = "NAME")]
+ product_name: String = "Product",
+
+ /// The name of the component, distinct from other installed components
+ #[clap(value_name = "NAME")]
+ component_name: String = "component",
+
+ /// The name of the package, tarball
+ #[clap(value_name = "NAME")]
+ package_name: String = "package",
+
+ /// The directory under lib/ where the manifest lives
+ #[clap(value_name = "DIR")]
+ rel_manifest_dir: String = "packagelib",
+
+ /// The string to print after successful installation
+ #[clap(value_name = "MESSAGE")]
+ success_message: String = "Installed.",
+
+ /// Places to look for legacy manifests to uninstall
+ #[clap(value_name = "DIRS")]
+ legacy_manifest_dirs: String = "",
+
+ /// Directory containing files that should not be installed
+ #[clap(value_name = "DIR")]
+ non_installed_overlay: String = "",
+
+ /// Path prefixes of directories that should be installed/uninstalled in bulk
+ #[clap(value_name = "DIRS")]
+ bulk_dirs: String = "",
+
+ /// The directory containing the installation medium
+ #[clap(value_name = "DIR")]
+ image_dir: String = "./install_image",
+
+ /// The directory to do temporary work
+ #[clap(value_name = "DIR")]
+ work_dir: String = "./workdir",
+
+ /// The location to put the final image and tarball
+ #[clap(value_name = "DIR")]
+ output_dir: String = "./dist",
+
+ /// The formats used to compress the tarball
+ #[clap(value_name = "FORMAT", default_value_t)]
+ compression_formats: CompressionFormats,
+ }
+}
+
+impl Generator {
+ /// Generates the actual installer tarball
+ pub fn run(self) -> Result<()> {
+ create_dir_all(&self.work_dir)?;
+
+ let package_dir = Path::new(&self.work_dir).join(&self.package_name);
+ if package_dir.exists() {
+ remove_dir_all(&package_dir)?;
+ }
+
+ // Copy the image and write the manifest
+ let component_dir = package_dir.join(&self.component_name);
+ create_dir_all(&component_dir)?;
+ copy_and_manifest(self.image_dir.as_ref(), &component_dir, &self.bulk_dirs)?;
+
+ // Write the component name
+ let components = package_dir.join("components");
+ writeln!(create_new_file(components)?, "{}", self.component_name)
+ .context("failed to write the component file")?;
+
+ // Write the installer version (only used by combine-installers.sh)
+ let version = package_dir.join("rust-installer-version");
+ writeln!(
+ create_new_file(version)?,
+ "{}",
+ crate::RUST_INSTALLER_VERSION
+ )
+ .context("failed to write new installer version")?;
+
+ // Copy the overlay
+ if !self.non_installed_overlay.is_empty() {
+ copy_recursive(self.non_installed_overlay.as_ref(), &package_dir)?;
+ }
+
+ // Generate the install script
+ let output_script = package_dir.join("install.sh");
+ let mut scripter = Scripter::default();
+ scripter
+ .product_name(self.product_name)
+ .rel_manifest_dir(self.rel_manifest_dir)
+ .success_message(self.success_message)
+ .legacy_manifest_dirs(self.legacy_manifest_dirs)
+ .output_script(path_to_str(&output_script)?.into());
+ scripter.run()?;
+
+ // Make the tarballs
+ create_dir_all(&self.output_dir)?;
+ let output = Path::new(&self.output_dir).join(&self.package_name);
+ let mut tarballer = Tarballer::default();
+ tarballer
+ .work_dir(self.work_dir)
+ .input(self.package_name)
+ .output(path_to_str(&output)?.into())
+ .compression_formats(self.compression_formats.clone());
+ tarballer.run()?;
+
+ Ok(())
+ }
+}
+
+/// Copies the `src` directory recursively to `dst`, writing `manifest.in` too.
+fn copy_and_manifest(src: &Path, dst: &Path, bulk_dirs: &str) -> Result<()> {
+ let manifest = create_new_file(dst.join("manifest.in"))?;
+ let bulk_dirs: Vec<_> = bulk_dirs
+ .split(',')
+ .filter(|s| !s.is_empty())
+ .map(Path::new)
+ .collect();
+
+ copy_with_callback(src, dst, |path, file_type| {
+ // We need paths to be compatible with both Unix and Windows.
+ if path
+ .components()
+ .filter_map(|c| c.as_os_str().to_str())
+ .any(|s| s.contains('\\'))
+ {
+ bail!(
+ "rust-installer doesn't support '\\' in path components: {:?}",
+ path
+ );
+ }
+
+ // Normalize to Unix-style path separators.
+ let normalized_string;
+ let mut string = path.to_str().ok_or_else(|| {
+ format_err!(
+ "rust-installer doesn't support non-Unicode paths: {:?}",
+ path
+ )
+ })?;
+ if string.contains('\\') {
+ normalized_string = string.replace('\\', "/");
+ string = &normalized_string;
+ }
+
+ if file_type.is_dir() {
+ // Only manifest directories that are explicitly bulk.
+ if bulk_dirs.contains(&path) {
+ writeln!(&manifest, "dir:{}", string)?;
+ }
+ } else {
+ // Only manifest files that aren't under bulk directories.
+ if !bulk_dirs.iter().any(|d| path.starts_with(d)) {
+ writeln!(&manifest, "file:{}", string)?;
+ }
+ }
+ Ok(())
+ })
+}
diff --git a/src/tools/rust-installer/src/lib.rs b/src/tools/rust-installer/src/lib.rs
new file mode 100644
index 000000000..799092019
--- /dev/null
+++ b/src/tools/rust-installer/src/lib.rs
@@ -0,0 +1,17 @@
+#[macro_use]
+mod util;
+
+mod combiner;
+mod compression;
+mod generator;
+mod scripter;
+mod tarballer;
+
+pub use crate::combiner::Combiner;
+pub use crate::generator::Generator;
+pub use crate::scripter::Scripter;
+pub use crate::tarballer::Tarballer;
+
+/// The installer version, output only to be used by combine-installers.sh.
+/// (should match `SOURCE_DIRECTORY/rust_installer_version`)
+pub const RUST_INSTALLER_VERSION: u32 = 3;
diff --git a/src/tools/rust-installer/src/main.rs b/src/tools/rust-installer/src/main.rs
new file mode 100644
index 000000000..be8a0d683
--- /dev/null
+++ b/src/tools/rust-installer/src/main.rs
@@ -0,0 +1,27 @@
+use anyhow::{Context, Result};
+use clap::{self, Parser};
+
+#[derive(Parser)]
+struct CommandLine {
+ #[clap(subcommand)]
+ command: Subcommand,
+}
+
+#[derive(clap::Subcommand)]
+enum Subcommand {
+ Generate(installer::Generator),
+ Combine(installer::Combiner),
+ Script(installer::Scripter),
+ Tarball(installer::Tarballer),
+}
+
+fn main() -> Result<()> {
+ let command_line = CommandLine::parse();
+ match command_line.command {
+ Subcommand::Combine(combiner) => combiner.run().context("failed to combine installers")?,
+ Subcommand::Generate(generator) => generator.run().context("failed to generate installer")?,
+ Subcommand::Script(scripter) => scripter.run().context("failed to generate installation script")?,
+ Subcommand::Tarball(tarballer) => tarballer.run().context("failed to generate tarballs")?,
+ }
+ Ok(())
+}
diff --git a/src/tools/rust-installer/src/remove_dir_all.rs b/src/tools/rust-installer/src/remove_dir_all.rs
new file mode 100644
index 000000000..110976528
--- /dev/null
+++ b/src/tools/rust-installer/src/remove_dir_all.rs
@@ -0,0 +1,860 @@
+#![allow(non_snake_case)]
+
+use std::io;
+use std::path::Path;
+
+#[cfg(not(windows))]
+pub fn remove_dir_all(path: &Path) -> io::Result<()> {
+ ::std::fs::remove_dir_all(path)
+}
+
+#[cfg(windows)]
+pub fn remove_dir_all(path: &Path) -> io::Result<()> {
+ win::remove_dir_all(path)
+}
+
+#[cfg(windows)]
+mod win {
+ use winapi::ctypes::{c_uint, c_ushort};
+ use winapi::shared::minwindef::{BOOL, DWORD, FALSE, FILETIME, LPVOID};
+ use winapi::shared::winerror::{
+ ERROR_CALL_NOT_IMPLEMENTED, ERROR_INSUFFICIENT_BUFFER, ERROR_NO_MORE_FILES,
+ };
+ use winapi::um::errhandlingapi::{GetLastError, SetLastError};
+ use winapi::um::fileapi::{
+ CreateFileW, FindFirstFileW, FindNextFileW, GetFileInformationByHandle,
+ };
+ use winapi::um::fileapi::{BY_HANDLE_FILE_INFORMATION, CREATE_ALWAYS, CREATE_NEW};
+ use winapi::um::fileapi::{FILE_BASIC_INFO, FILE_RENAME_INFO, TRUNCATE_EXISTING};
+ use winapi::um::fileapi::{OPEN_ALWAYS, OPEN_EXISTING};
+ use winapi::um::handleapi::{CloseHandle, INVALID_HANDLE_VALUE};
+ use winapi::um::ioapiset::DeviceIoControl;
+ use winapi::um::libloaderapi::{GetModuleHandleW, GetProcAddress};
+ use winapi::um::minwinbase::{
+ FileBasicInfo, FileRenameInfo, FILE_INFO_BY_HANDLE_CLASS, WIN32_FIND_DATAW,
+ };
+ use winapi::um::winbase::SECURITY_SQOS_PRESENT;
+ use winapi::um::winbase::{
+ FILE_FLAG_BACKUP_SEMANTICS, FILE_FLAG_DELETE_ON_CLOSE, FILE_FLAG_OPEN_REPARSE_POINT,
+ };
+ use winapi::um::winioctl::FSCTL_GET_REPARSE_POINT;
+ use winapi::um::winnt::{DELETE, FILE_ATTRIBUTE_DIRECTORY, HANDLE, LPCWSTR};
+ use winapi::um::winnt::{FILE_ATTRIBUTE_READONLY, FILE_ATTRIBUTE_REPARSE_POINT};
+ use winapi::um::winnt::{FILE_GENERIC_WRITE, FILE_WRITE_DATA, GENERIC_READ, GENERIC_WRITE};
+ use winapi::um::winnt::{FILE_READ_ATTRIBUTES, FILE_WRITE_ATTRIBUTES};
+ use winapi::um::winnt::{FILE_SHARE_DELETE, FILE_SHARE_READ, FILE_SHARE_WRITE};
+ use winapi::um::winnt::{IO_REPARSE_TAG_MOUNT_POINT, IO_REPARSE_TAG_SYMLINK, LARGE_INTEGER};
+
+ use std::ffi::{OsStr, OsString};
+ use std::io;
+ use std::mem;
+ use std::os::windows::ffi::{OsStrExt, OsStringExt};
+ use std::path::{Path, PathBuf};
+ use std::ptr;
+ use std::sync::Arc;
+
+ pub fn remove_dir_all(path: &Path) -> io::Result<()> {
+ // On Windows it is not enough to just recursively remove the contents of a
+ // directory and then the directory itself. Deleting does not happen
+ // instantaneously, but is scheduled.
+ // To work around this, we move the file or directory to some `base_dir`
+ // right before deletion to avoid races.
+ //
+ // As `base_dir` we choose the parent dir of the directory we want to
+ // remove. We very probably have permission to create files here, as we
+ // already need write permission in this dir to delete the directory. And it
+ // should be on the same volume.
+ //
+ // To handle files with names like `CON` and `morse .. .`, and when a
+ // directory structure is so deep it needs long path names the path is first
+ // converted to a `//?/`-path with `get_path()`.
+ //
+ // To make sure we don't leave a moved file laying around if the process
+ // crashes before we can delete the file, we do all operations on an file
+ // handle. By opening a file with `FILE_FLAG_DELETE_ON_CLOSE` Windows will
+ // always delete the file when the handle closes.
+ //
+ // All files are renamed to be in the `base_dir`, and have their name
+ // changed to "rm-<counter>". After every rename the counter is increased.
+ // Rename should not overwrite possibly existing files in the base dir. So
+ // if it fails with `AlreadyExists`, we just increase the counter and try
+ // again.
+ //
+ // For read-only files and directories we first have to remove the read-only
+ // attribute before we can move or delete them. This also removes the
+ // attribute from possible hardlinks to the file, so just before closing we
+ // restore the read-only attribute.
+ //
+ // If 'path' points to a directory symlink or junction we should not
+ // recursively remove the target of the link, but only the link itself.
+ //
+ // Moving and deleting is guaranteed to succeed if we are able to open the
+ // file with `DELETE` permission. If others have the file open we only have
+ // `DELETE` permission if they have specified `FILE_SHARE_DELETE`. We can
+ // also delete the file now, but it will not disappear until all others have
+ // closed the file. But no-one can open the file after we have flagged it
+ // for deletion.
+
+ // Open the path once to get the canonical path, file type and attributes.
+ let (path, metadata) = {
+ let mut opts = OpenOptions::new();
+ opts.access_mode(FILE_READ_ATTRIBUTES);
+ opts.custom_flags(FILE_FLAG_BACKUP_SEMANTICS | FILE_FLAG_OPEN_REPARSE_POINT);
+ let file = File::open(path, &opts)?;
+ (get_path(&file)?, file.file_attr()?)
+ };
+
+ let mut ctx = RmdirContext {
+ base_dir: match path.parent() {
+ Some(dir) => dir,
+ None => {
+ return Err(io::Error::new(
+ io::ErrorKind::PermissionDenied,
+ "can't delete root directory",
+ ))
+ }
+ },
+ readonly: metadata.perm().readonly(),
+ counter: 0,
+ };
+
+ let filetype = metadata.file_type();
+ if filetype.is_dir() {
+ remove_dir_all_recursive(path.as_ref(), &mut ctx)
+ } else if filetype.is_symlink_dir() {
+ remove_item(path.as_ref(), &mut ctx)
+ } else {
+ Err(io::Error::new(
+ io::ErrorKind::PermissionDenied,
+ "Not a directory",
+ ))
+ }
+ }
+
+ fn readdir(p: &Path) -> io::Result<ReadDir> {
+ let root = p.to_path_buf();
+ let star = p.join("*");
+ let path = to_u16s(&star)?;
+
+ unsafe {
+ let mut wfd = mem::zeroed();
+ let find_handle = FindFirstFileW(path.as_ptr(), &mut wfd);
+ if find_handle != INVALID_HANDLE_VALUE {
+ Ok(ReadDir {
+ handle: FindNextFileHandle(find_handle),
+ root: Arc::new(root),
+ first: Some(wfd),
+ })
+ } else {
+ Err(io::Error::last_os_error())
+ }
+ }
+ }
+
+ struct RmdirContext<'a> {
+ base_dir: &'a Path,
+ readonly: bool,
+ counter: u64,
+ }
+
+ fn remove_dir_all_recursive(path: &Path, ctx: &mut RmdirContext) -> io::Result<()> {
+ let dir_readonly = ctx.readonly;
+ for child in readdir(path)? {
+ let child = child?;
+ let child_type = child.file_type()?;
+ ctx.readonly = child.metadata()?.perm().readonly();
+ if child_type.is_dir() {
+ remove_dir_all_recursive(&child.path(), ctx)?;
+ } else {
+ remove_item(&child.path().as_ref(), ctx)?;
+ }
+ }
+ ctx.readonly = dir_readonly;
+ remove_item(path, ctx)
+ }
+
+ fn remove_item(path: &Path, ctx: &mut RmdirContext) -> io::Result<()> {
+ if !ctx.readonly {
+ let mut opts = OpenOptions::new();
+ opts.access_mode(DELETE);
+ opts.custom_flags(
+ FILE_FLAG_BACKUP_SEMANTICS | // delete directory
+ FILE_FLAG_OPEN_REPARSE_POINT | // delete symlink
+ FILE_FLAG_DELETE_ON_CLOSE,
+ );
+ let file = File::open(path, &opts)?;
+ move_item(&file, ctx)
+ } else {
+ // remove read-only permision
+ set_perm(&path, FilePermissions::new())?;
+ // move and delete file, similar to !readonly.
+ // only the access mode is different.
+ let mut opts = OpenOptions::new();
+ opts.access_mode(DELETE | FILE_WRITE_ATTRIBUTES);
+ opts.custom_flags(
+ FILE_FLAG_BACKUP_SEMANTICS
+ | FILE_FLAG_OPEN_REPARSE_POINT
+ | FILE_FLAG_DELETE_ON_CLOSE,
+ );
+ let file = File::open(path, &opts)?;
+ move_item(&file, ctx)?;
+ // restore read-only flag just in case there are other hard links
+ let mut perm = FilePermissions::new();
+ perm.set_readonly(true);
+ let _ = file.set_perm(perm); // ignore if this fails
+ Ok(())
+ }
+ }
+
+ macro_rules! compat_fn {
+ ($module:ident: $(
+ fn $symbol:ident($($argname:ident: $argtype:ty),*)
+ -> $rettype:ty {
+ $($body:expr);*
+ }
+ )*) => ($(
+ #[allow(unused_variables)]
+ unsafe fn $symbol($($argname: $argtype),*) -> $rettype {
+ use std::sync::atomic::{AtomicUsize, Ordering};
+ use std::mem;
+ use std::ffi::CString;
+ type F = unsafe extern "system" fn($($argtype),*) -> $rettype;
+
+ lazy_static! { static ref PTR: AtomicUsize = AtomicUsize::new(0);}
+
+ fn lookup(module: &str, symbol: &str) -> Option<usize> {
+ let mut module: Vec<u16> = module.encode_utf16().collect();
+ module.push(0);
+ let symbol = CString::new(symbol).unwrap();
+ unsafe {
+ let handle = GetModuleHandleW(module.as_ptr());
+ match GetProcAddress(handle, symbol.as_ptr()) as usize {
+ 0 => None,
+ n => Some(n),
+ }
+ }
+ }
+
+ fn store_func(ptr: &AtomicUsize, module: &str, symbol: &str,
+ fallback: usize) -> usize {
+ let value = lookup(module, symbol).unwrap_or(fallback);
+ ptr.store(value, Ordering::SeqCst);
+ value
+ }
+
+ fn load() -> usize {
+ store_func(&PTR, stringify!($module), stringify!($symbol), fallback as usize)
+ }
+ unsafe extern "system" fn fallback($($argname: $argtype),*)
+ -> $rettype {
+ $($body);*
+ }
+
+ let addr = match PTR.load(Ordering::SeqCst) {
+ 0 => load(),
+ n => n,
+ };
+ mem::transmute::<usize, F>(addr)($($argname),*)
+ }
+ )*)
+ }
+
+ compat_fn! {
+ kernel32:
+ fn GetFinalPathNameByHandleW(_hFile: HANDLE,
+ _lpszFilePath: LPCWSTR,
+ _cchFilePath: DWORD,
+ _dwFlags: DWORD) -> DWORD {
+ SetLastError(ERROR_CALL_NOT_IMPLEMENTED as DWORD); 0
+ }
+ fn SetFileInformationByHandle(_hFile: HANDLE,
+ _FileInformationClass: FILE_INFO_BY_HANDLE_CLASS,
+ _lpFileInformation: LPVOID,
+ _dwBufferSize: DWORD) -> BOOL {
+ SetLastError(ERROR_CALL_NOT_IMPLEMENTED as DWORD); 0
+ }
+ }
+
+ fn cvt(i: i32) -> io::Result<i32> {
+ if i == 0 {
+ Err(io::Error::last_os_error())
+ } else {
+ Ok(i)
+ }
+ }
+
+ fn to_u16s<S: AsRef<OsStr>>(s: S) -> io::Result<Vec<u16>> {
+ fn inner(s: &OsStr) -> io::Result<Vec<u16>> {
+ let mut maybe_result: Vec<u16> = s.encode_wide().collect();
+ if maybe_result.iter().any(|&u| u == 0) {
+ return Err(io::Error::new(
+ io::ErrorKind::InvalidInput,
+ "strings passed to WinAPI cannot contain NULs",
+ ));
+ }
+ maybe_result.push(0);
+ Ok(maybe_result)
+ }
+ inner(s.as_ref())
+ }
+
+ fn truncate_utf16_at_nul<'a>(v: &'a [u16]) -> &'a [u16] {
+ match v.iter().position(|c| *c == 0) {
+ // don't include the 0
+ Some(i) => &v[..i],
+ None => v,
+ }
+ }
+
+ fn fill_utf16_buf<F1, F2, T>(mut f1: F1, f2: F2) -> io::Result<T>
+ where
+ F1: FnMut(*mut u16, DWORD) -> DWORD,
+ F2: FnOnce(&[u16]) -> T,
+ {
+ // Start off with a stack buf but then spill over to the heap if we end up
+ // needing more space.
+ let mut stack_buf = [0u16; 512];
+ let mut heap_buf = Vec::new();
+ unsafe {
+ let mut n = stack_buf.len();
+ loop {
+ let buf = if n <= stack_buf.len() {
+ &mut stack_buf[..]
+ } else {
+ let extra = n - heap_buf.len();
+ heap_buf.reserve(extra);
+ heap_buf.set_len(n);
+ &mut heap_buf[..]
+ };
+
+ // This function is typically called on windows API functions which
+ // will return the correct length of the string, but these functions
+ // also return the `0` on error. In some cases, however, the
+ // returned "correct length" may actually be 0!
+ //
+ // To handle this case we call `SetLastError` to reset it to 0 and
+ // then check it again if we get the "0 error value". If the "last
+ // error" is still 0 then we interpret it as a 0 length buffer and
+ // not an actual error.
+ SetLastError(0);
+ let k = match f1(buf.as_mut_ptr(), n as DWORD) {
+ 0 if GetLastError() == 0 => 0,
+ 0 => return Err(io::Error::last_os_error()),
+ n => n,
+ } as usize;
+ if k == n && GetLastError() == ERROR_INSUFFICIENT_BUFFER {
+ n *= 2;
+ } else if k >= n {
+ n = k;
+ } else {
+ return Ok(f2(&buf[..k]));
+ }
+ }
+ }
+ }
+
+ #[derive(Clone, PartialEq, Eq, Debug, Default)]
+ struct FilePermissions {
+ readonly: bool,
+ }
+
+ impl FilePermissions {
+ fn new() -> FilePermissions {
+ Default::default()
+ }
+ fn readonly(&self) -> bool {
+ self.readonly
+ }
+ fn set_readonly(&mut self, readonly: bool) {
+ self.readonly = readonly
+ }
+ }
+
+ #[derive(Clone)]
+ struct OpenOptions {
+ // generic
+ read: bool,
+ write: bool,
+ append: bool,
+ truncate: bool,
+ create: bool,
+ create_new: bool,
+ // system-specific
+ custom_flags: u32,
+ access_mode: Option<DWORD>,
+ attributes: DWORD,
+ share_mode: DWORD,
+ security_qos_flags: DWORD,
+ security_attributes: usize, // FIXME: should be a reference
+ }
+
+ impl OpenOptions {
+ fn new() -> OpenOptions {
+ OpenOptions {
+ // generic
+ read: false,
+ write: false,
+ append: false,
+ truncate: false,
+ create: false,
+ create_new: false,
+ // system-specific
+ custom_flags: 0,
+ access_mode: None,
+ share_mode: FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
+ attributes: 0,
+ security_qos_flags: 0,
+ security_attributes: 0,
+ }
+ }
+ fn custom_flags(&mut self, flags: u32) {
+ self.custom_flags = flags;
+ }
+ fn access_mode(&mut self, access_mode: u32) {
+ self.access_mode = Some(access_mode);
+ }
+
+ fn get_access_mode(&self) -> io::Result<DWORD> {
+ const ERROR_INVALID_PARAMETER: i32 = 87;
+
+ match (self.read, self.write, self.append, self.access_mode) {
+ (_, _, _, Some(mode)) => Ok(mode),
+ (true, false, false, None) => Ok(GENERIC_READ),
+ (false, true, false, None) => Ok(GENERIC_WRITE),
+ (true, true, false, None) => Ok(GENERIC_READ | GENERIC_WRITE),
+ (false, _, true, None) => Ok(FILE_GENERIC_WRITE & !FILE_WRITE_DATA),
+ (true, _, true, None) => Ok(GENERIC_READ | (FILE_GENERIC_WRITE & !FILE_WRITE_DATA)),
+ (false, false, false, None) => {
+ Err(io::Error::from_raw_os_error(ERROR_INVALID_PARAMETER))
+ }
+ }
+ }
+
+ fn get_creation_mode(&self) -> io::Result<DWORD> {
+ const ERROR_INVALID_PARAMETER: i32 = 87;
+
+ match (self.write, self.append) {
+ (true, false) => {}
+ (false, false) => {
+ if self.truncate || self.create || self.create_new {
+ return Err(io::Error::from_raw_os_error(ERROR_INVALID_PARAMETER));
+ }
+ }
+ (_, true) => {
+ if self.truncate && !self.create_new {
+ return Err(io::Error::from_raw_os_error(ERROR_INVALID_PARAMETER));
+ }
+ }
+ }
+
+ Ok(match (self.create, self.truncate, self.create_new) {
+ (false, false, false) => OPEN_EXISTING,
+ (true, false, false) => OPEN_ALWAYS,
+ (false, true, false) => TRUNCATE_EXISTING,
+ (true, true, false) => CREATE_ALWAYS,
+ (_, _, true) => CREATE_NEW,
+ })
+ }
+
+ fn get_flags_and_attributes(&self) -> DWORD {
+ self.custom_flags
+ | self.attributes
+ | self.security_qos_flags
+ | if self.security_qos_flags != 0 {
+ SECURITY_SQOS_PRESENT
+ } else {
+ 0
+ }
+ | if self.create_new {
+ FILE_FLAG_OPEN_REPARSE_POINT
+ } else {
+ 0
+ }
+ }
+ }
+
+ struct File {
+ handle: Handle,
+ }
+
+ impl File {
+ fn open(path: &Path, opts: &OpenOptions) -> io::Result<File> {
+ let path = to_u16s(path)?;
+ let handle = unsafe {
+ CreateFileW(
+ path.as_ptr(),
+ opts.get_access_mode()?,
+ opts.share_mode,
+ opts.security_attributes as *mut _,
+ opts.get_creation_mode()?,
+ opts.get_flags_and_attributes(),
+ ptr::null_mut(),
+ )
+ };
+ if handle == INVALID_HANDLE_VALUE {
+ Err(io::Error::last_os_error())
+ } else {
+ Ok(File {
+ handle: Handle::new(handle),
+ })
+ }
+ }
+
+ fn file_attr(&self) -> io::Result<FileAttr> {
+ unsafe {
+ let mut info: BY_HANDLE_FILE_INFORMATION = mem::zeroed();
+ cvt(GetFileInformationByHandle(self.handle.raw(), &mut info))?;
+ let mut attr = FileAttr {
+ attributes: info.dwFileAttributes,
+ creation_time: info.ftCreationTime,
+ last_access_time: info.ftLastAccessTime,
+ last_write_time: info.ftLastWriteTime,
+ file_size: ((info.nFileSizeHigh as u64) << 32) | (info.nFileSizeLow as u64),
+ reparse_tag: 0,
+ };
+ if attr.is_reparse_point() {
+ let mut b = [0; MAXIMUM_REPARSE_DATA_BUFFER_SIZE];
+ if let Ok((_, buf)) = self.reparse_point(&mut b) {
+ attr.reparse_tag = buf.ReparseTag;
+ }
+ }
+ Ok(attr)
+ }
+ }
+
+ fn set_attributes(&self, attr: DWORD) -> io::Result<()> {
+ let zero: LARGE_INTEGER = unsafe { mem::zeroed() };
+
+ let mut info = FILE_BASIC_INFO {
+ CreationTime: zero, // do not change
+ LastAccessTime: zero, // do not change
+ LastWriteTime: zero, // do not change
+ ChangeTime: zero, // do not change
+ FileAttributes: attr,
+ };
+ let size = mem::size_of_val(&info);
+ cvt(unsafe {
+ SetFileInformationByHandle(
+ self.handle.raw(),
+ FileBasicInfo,
+ &mut info as *mut _ as *mut _,
+ size as DWORD,
+ )
+ })?;
+ Ok(())
+ }
+
+ fn rename(&self, new: &Path, replace: bool) -> io::Result<()> {
+ // &self must be opened with DELETE permission
+ use std::iter;
+ #[cfg(target_arch = "x86")]
+ const STRUCT_SIZE: usize = 12;
+ #[cfg(target_arch = "x86_64")]
+ const STRUCT_SIZE: usize = 20;
+
+ // FIXME: check for internal NULs in 'new'
+ let mut data: Vec<u16> = iter::repeat(0u16)
+ .take(STRUCT_SIZE / 2)
+ .chain(new.as_os_str().encode_wide())
+ .collect();
+ data.push(0);
+ let size = data.len() * 2;
+
+ unsafe {
+ // Thanks to alignment guarantees on Windows this works
+ // (8 for 32-bit and 16 for 64-bit)
+ let info = data.as_mut_ptr() as *mut FILE_RENAME_INFO;
+ // The type of ReplaceIfExists is BOOL, but it actually expects a
+ // BOOLEAN. This means true is -1, not c::TRUE.
+ (*info).ReplaceIfExists = if replace { -1 } else { FALSE };
+ (*info).RootDirectory = ptr::null_mut();
+ (*info).FileNameLength = (size - STRUCT_SIZE) as DWORD;
+ cvt(SetFileInformationByHandle(
+ self.handle().raw(),
+ FileRenameInfo,
+ data.as_mut_ptr() as *mut _ as *mut _,
+ size as DWORD,
+ ))?;
+ Ok(())
+ }
+ }
+ fn set_perm(&self, perm: FilePermissions) -> io::Result<()> {
+ let attr = self.file_attr()?.attributes;
+ if perm.readonly == (attr & FILE_ATTRIBUTE_READONLY != 0) {
+ Ok(())
+ } else if perm.readonly {
+ self.set_attributes(attr | FILE_ATTRIBUTE_READONLY)
+ } else {
+ self.set_attributes(attr & !FILE_ATTRIBUTE_READONLY)
+ }
+ }
+
+ fn handle(&self) -> &Handle {
+ &self.handle
+ }
+
+ fn reparse_point<'a>(
+ &self,
+ space: &'a mut [u8; MAXIMUM_REPARSE_DATA_BUFFER_SIZE],
+ ) -> io::Result<(DWORD, &'a REPARSE_DATA_BUFFER)> {
+ unsafe {
+ let mut bytes = 0;
+ cvt({
+ DeviceIoControl(
+ self.handle.raw(),
+ FSCTL_GET_REPARSE_POINT,
+ ptr::null_mut(),
+ 0,
+ space.as_mut_ptr() as *mut _,
+ space.len() as DWORD,
+ &mut bytes,
+ ptr::null_mut(),
+ )
+ })?;
+ Ok((bytes, &*(space.as_ptr() as *const REPARSE_DATA_BUFFER)))
+ }
+ }
+ }
+
+ #[derive(Copy, Clone, PartialEq, Eq, Hash)]
+ enum FileType {
+ Dir,
+ File,
+ SymlinkFile,
+ SymlinkDir,
+ ReparsePoint,
+ MountPoint,
+ }
+
+ impl FileType {
+ fn new(attrs: DWORD, reparse_tag: DWORD) -> FileType {
+ match (
+ attrs & FILE_ATTRIBUTE_DIRECTORY != 0,
+ attrs & FILE_ATTRIBUTE_REPARSE_POINT != 0,
+ reparse_tag,
+ ) {
+ (false, false, _) => FileType::File,
+ (true, false, _) => FileType::Dir,
+ (false, true, IO_REPARSE_TAG_SYMLINK) => FileType::SymlinkFile,
+ (true, true, IO_REPARSE_TAG_SYMLINK) => FileType::SymlinkDir,
+ (true, true, IO_REPARSE_TAG_MOUNT_POINT) => FileType::MountPoint,
+ (_, true, _) => FileType::ReparsePoint,
+ // Note: if a _file_ has a reparse tag of the type IO_REPARSE_TAG_MOUNT_POINT it is
+ // invalid, as junctions always have to be dirs. We set the filetype to ReparsePoint
+ // to indicate it is something symlink-like, but not something you can follow.
+ }
+ }
+
+ fn is_dir(&self) -> bool {
+ *self == FileType::Dir
+ }
+ fn is_symlink_dir(&self) -> bool {
+ *self == FileType::SymlinkDir || *self == FileType::MountPoint
+ }
+ }
+
+ impl DirEntry {
+ fn new(root: &Arc<PathBuf>, wfd: &WIN32_FIND_DATAW) -> Option<DirEntry> {
+ let first_bytes = &wfd.cFileName[0..3];
+ if first_bytes.starts_with(&[46, 0]) || first_bytes.starts_with(&[46, 46, 0]) {
+ None
+ } else {
+ Some(DirEntry {
+ root: root.clone(),
+ data: *wfd,
+ })
+ }
+ }
+
+ fn path(&self) -> PathBuf {
+ self.root.join(&self.file_name())
+ }
+
+ fn file_name(&self) -> OsString {
+ let filename = truncate_utf16_at_nul(&self.data.cFileName);
+ OsString::from_wide(filename)
+ }
+
+ fn file_type(&self) -> io::Result<FileType> {
+ Ok(FileType::new(
+ self.data.dwFileAttributes,
+ /* reparse_tag = */ self.data.dwReserved0,
+ ))
+ }
+
+ fn metadata(&self) -> io::Result<FileAttr> {
+ Ok(FileAttr {
+ attributes: self.data.dwFileAttributes,
+ creation_time: self.data.ftCreationTime,
+ last_access_time: self.data.ftLastAccessTime,
+ last_write_time: self.data.ftLastWriteTime,
+ file_size: ((self.data.nFileSizeHigh as u64) << 32)
+ | (self.data.nFileSizeLow as u64),
+ reparse_tag: if self.data.dwFileAttributes & FILE_ATTRIBUTE_REPARSE_POINT != 0 {
+ // reserved unless this is a reparse point
+ self.data.dwReserved0
+ } else {
+ 0
+ },
+ })
+ }
+ }
+
+ struct DirEntry {
+ root: Arc<PathBuf>,
+ data: WIN32_FIND_DATAW,
+ }
+
+ struct ReadDir {
+ handle: FindNextFileHandle,
+ root: Arc<PathBuf>,
+ first: Option<WIN32_FIND_DATAW>,
+ }
+
+ impl Iterator for ReadDir {
+ type Item = io::Result<DirEntry>;
+ fn next(&mut self) -> Option<io::Result<DirEntry>> {
+ if let Some(first) = self.first.take() {
+ if let Some(e) = DirEntry::new(&self.root, &first) {
+ return Some(Ok(e));
+ }
+ }
+ unsafe {
+ let mut wfd = mem::zeroed();
+ loop {
+ if FindNextFileW(self.handle.0, &mut wfd) == 0 {
+ if GetLastError() == ERROR_NO_MORE_FILES {
+ return None;
+ } else {
+ return Some(Err(io::Error::last_os_error()));
+ }
+ }
+ if let Some(e) = DirEntry::new(&self.root, &wfd) {
+ return Some(Ok(e));
+ }
+ }
+ }
+ }
+ }
+
+ #[derive(Clone)]
+ struct FileAttr {
+ attributes: DWORD,
+ creation_time: FILETIME,
+ last_access_time: FILETIME,
+ last_write_time: FILETIME,
+ file_size: u64,
+ reparse_tag: DWORD,
+ }
+
+ impl FileAttr {
+ fn perm(&self) -> FilePermissions {
+ FilePermissions {
+ readonly: self.attributes & FILE_ATTRIBUTE_READONLY != 0,
+ }
+ }
+
+ fn file_type(&self) -> FileType {
+ FileType::new(self.attributes, self.reparse_tag)
+ }
+
+ fn is_reparse_point(&self) -> bool {
+ self.attributes & FILE_ATTRIBUTE_REPARSE_POINT != 0
+ }
+ }
+
+ #[repr(C)]
+ struct REPARSE_DATA_BUFFER {
+ ReparseTag: c_uint,
+ ReparseDataLength: c_ushort,
+ Reserved: c_ushort,
+ rest: (),
+ }
+
+ const MAXIMUM_REPARSE_DATA_BUFFER_SIZE: usize = 16 * 1024;
+
+ /// An owned container for `HANDLE` object, closing them on Drop.
+ ///
+ /// All methods are inherited through a `Deref` impl to `RawHandle`
+ struct Handle(RawHandle);
+
+ use std::ops::Deref;
+
+ /// A wrapper type for `HANDLE` objects to give them proper Send/Sync inference
+ /// as well as Rust-y methods.
+ ///
+ /// This does **not** drop the handle when it goes out of scope, use `Handle`
+ /// instead for that.
+ #[derive(Copy, Clone)]
+ struct RawHandle(HANDLE);
+
+ unsafe impl Send for RawHandle {}
+ unsafe impl Sync for RawHandle {}
+
+ impl Handle {
+ fn new(handle: HANDLE) -> Handle {
+ Handle(RawHandle::new(handle))
+ }
+ }
+
+ impl Deref for Handle {
+ type Target = RawHandle;
+ fn deref(&self) -> &RawHandle {
+ &self.0
+ }
+ }
+
+ impl Drop for Handle {
+ fn drop(&mut self) {
+ unsafe {
+ let _ = CloseHandle(self.raw());
+ }
+ }
+ }
+
+ impl RawHandle {
+ fn new(handle: HANDLE) -> RawHandle {
+ RawHandle(handle)
+ }
+
+ fn raw(&self) -> HANDLE {
+ self.0
+ }
+ }
+
+ struct FindNextFileHandle(HANDLE);
+
+ fn get_path(f: &File) -> io::Result<PathBuf> {
+ fill_utf16_buf(
+ |buf, sz| unsafe {
+ GetFinalPathNameByHandleW(f.handle.raw(), buf, sz, VOLUME_NAME_DOS)
+ },
+ |buf| PathBuf::from(OsString::from_wide(buf)),
+ )
+ }
+
+ fn move_item(file: &File, ctx: &mut RmdirContext) -> io::Result<()> {
+ let mut tmpname = ctx.base_dir.join(format! {"rm-{}", ctx.counter});
+ ctx.counter += 1;
+ // Try to rename the file. If it already exists, just retry with an other
+ // filename.
+ while let Err(err) = file.rename(tmpname.as_ref(), false) {
+ if err.kind() != io::ErrorKind::AlreadyExists {
+ return Err(err);
+ };
+ tmpname = ctx.base_dir.join(format!("rm-{}", ctx.counter));
+ ctx.counter += 1;
+ }
+ Ok(())
+ }
+
+ fn set_perm(path: &Path, perm: FilePermissions) -> io::Result<()> {
+ let mut opts = OpenOptions::new();
+ opts.access_mode(FILE_READ_ATTRIBUTES | FILE_WRITE_ATTRIBUTES);
+ opts.custom_flags(FILE_FLAG_BACKUP_SEMANTICS);
+ let file = File::open(path, &opts)?;
+ file.set_perm(perm)
+ }
+
+ const VOLUME_NAME_DOS: DWORD = 0x0;
+}
diff --git a/src/tools/rust-installer/src/scripter.rs b/src/tools/rust-installer/src/scripter.rs
new file mode 100644
index 000000000..06affc029
--- /dev/null
+++ b/src/tools/rust-installer/src/scripter.rs
@@ -0,0 +1,68 @@
+use crate::util::*;
+use anyhow::{Context, Result};
+use std::io::Write;
+
+const TEMPLATE: &'static str = include_str!("../install-template.sh");
+
+actor! {
+ #[derive(Debug)]
+ pub struct Scripter {
+ /// The name of the product, for display
+ #[clap(value_name = "NAME")]
+ product_name: String = "Product",
+
+ /// The directory under lib/ where the manifest lives
+ #[clap(value_name = "DIR")]
+ rel_manifest_dir: String = "manifestlib",
+
+ /// The string to print after successful installation
+ #[clap(value_name = "MESSAGE")]
+ success_message: String = "Installed.",
+
+ /// Places to look for legacy manifests to uninstall
+ #[clap(value_name = "DIRS")]
+ legacy_manifest_dirs: String = "",
+
+ /// The name of the output script
+ #[clap(value_name = "FILE")]
+ output_script: String = "install.sh",
+ }
+}
+
+impl Scripter {
+ /// Generates the actual installer script
+ pub fn run(self) -> Result<()> {
+ // Replace dashes in the success message with spaces (our arg handling botches spaces)
+ // TODO: still needed? Kept for compatibility for now.
+ let product_name = self.product_name.replace('-', " ");
+
+ // Replace dashes in the success message with spaces (our arg handling botches spaces)
+ // TODO: still needed? Kept for compatibility for now.
+ let success_message = self.success_message.replace('-', " ");
+
+ let script = TEMPLATE
+ .replace("%%TEMPLATE_PRODUCT_NAME%%", &sh_quote(&product_name))
+ .replace("%%TEMPLATE_REL_MANIFEST_DIR%%", &self.rel_manifest_dir)
+ .replace("%%TEMPLATE_SUCCESS_MESSAGE%%", &sh_quote(&success_message))
+ .replace(
+ "%%TEMPLATE_LEGACY_MANIFEST_DIRS%%",
+ &sh_quote(&self.legacy_manifest_dirs),
+ )
+ .replace(
+ "%%TEMPLATE_RUST_INSTALLER_VERSION%%",
+ &sh_quote(&crate::RUST_INSTALLER_VERSION),
+ );
+
+ create_new_executable(&self.output_script)?
+ .write_all(script.as_ref())
+ .with_context(|| format!("failed to write output script '{}'", self.output_script))?;
+
+ Ok(())
+ }
+}
+
+fn sh_quote<T: ToString>(s: &T) -> String {
+ // We'll single-quote the whole thing, so first replace single-quotes with
+ // '"'"' (leave quoting, double-quote one `'`, re-enter single-quoting)
+ format!("'{}'", s.to_string().replace('\'', r#"'"'"'"#))
+}
diff --git a/src/tools/rust-installer/src/tarballer.rs b/src/tools/rust-installer/src/tarballer.rs
new file mode 100644
index 000000000..76f5af3fa
--- /dev/null
+++ b/src/tools/rust-installer/src/tarballer.rs
@@ -0,0 +1,143 @@
+use anyhow::{bail, Context, Result};
+use std::fs::{read_link, symlink_metadata};
+use std::io::{empty, BufWriter, Write};
+use std::path::Path;
+use tar::{Builder, Header};
+use walkdir::WalkDir;
+
+use crate::{
+ compression::{CombinedEncoder, CompressionFormats},
+ util::*,
+};
+
+actor! {
+ #[derive(Debug)]
+ pub struct Tarballer {
+ /// The input folder to be compressed.
+ #[clap(value_name = "NAME")]
+ input: String = "package",
+
+ /// The prefix of the tarballs.
+ #[clap(value_name = "PATH")]
+ output: String = "./dist",
+
+ /// The folder in which the input is to be found.
+ #[clap(value_name = "DIR")]
+ work_dir: String = "./workdir",
+
+ /// The formats used to compress the tarball.
+ #[clap(value_name = "FORMAT", default_value_t)]
+ compression_formats: CompressionFormats,
+ }
+}
+
+impl Tarballer {
+ /// Generates the actual tarballs
+ pub fn run(self) -> Result<()> {
+ let tarball_name = self.output.clone() + ".tar";
+ let encoder = CombinedEncoder::new(
+ self.compression_formats
+ .iter()
+ .map(|f| f.encode(&tarball_name))
+ .collect::<Result<Vec<_>>>()?,
+ );
+
+ // Sort files by their suffix, to group files with the same name from
+ // different locations (likely identical) and files with the same
+ // extension (likely containing similar data).
+ let (dirs, mut files) = get_recursive_paths(&self.work_dir, &self.input)
+ .context("failed to collect file paths")?;
+ files.sort_by(|a, b| a.bytes().rev().cmp(b.bytes().rev()));
+
+ // Write the tar into both encoded files. We write all directories
+ // first, so files may be directly created. (See rust-lang/rustup.rs#1092.)
+ let buf = BufWriter::with_capacity(1024 * 1024, encoder);
+ let mut builder = Builder::new(buf);
+
+ let pool = rayon::ThreadPoolBuilder::new()
+ .num_threads(2)
+ .build()
+ .unwrap();
+ pool.install(move || {
+ for path in dirs {
+ let src = Path::new(&self.work_dir).join(&path);
+ builder
+ .append_dir(&path, &src)
+ .with_context(|| format!("failed to tar dir '{}'", src.display()))?;
+ }
+ for path in files {
+ let src = Path::new(&self.work_dir).join(&path);
+ append_path(&mut builder, &src, &path)
+ .with_context(|| format!("failed to tar file '{}'", src.display()))?;
+ }
+ builder
+ .into_inner()
+ .context("failed to finish writing .tar stream")?
+ .into_inner()
+ .ok()
+ .unwrap()
+ .finish()?;
+
+ Ok(())
+ })
+ }
+}
+
+fn append_path<W: Write>(builder: &mut Builder<W>, src: &Path, path: &String) -> Result<()> {
+ let stat = symlink_metadata(src)?;
+ let mut header = Header::new_gnu();
+ header.set_metadata(&stat);
+ if stat.file_type().is_symlink() {
+ let link = read_link(src)?;
+ header.set_link_name(&link)?;
+ builder.append_data(&mut header, path, &mut empty())?;
+ } else {
+ if cfg!(windows) {
+ // Windows doesn't really have a mode, so `tar` never marks files executable.
+ // Use an extension whitelist to update files that usually should be so.
+ const EXECUTABLES: [&'static str; 4] = ["exe", "dll", "py", "sh"];
+ if let Some(ext) = src.extension().and_then(|s| s.to_str()) {
+ if EXECUTABLES.contains(&ext) {
+ let mode = header.mode()?;
+ header.set_mode(mode | 0o111);
+ }
+ }
+ }
+ let file = open_file(src)?;
+ builder.append_data(&mut header, path, &file)?;
+ }
+ Ok(())
+}
+
+/// Returns all `(directories, files)` under the source path.
+fn get_recursive_paths<P, Q>(root: P, name: Q) -> Result<(Vec<String>, Vec<String>)>
+where
+ P: AsRef<Path>,
+ Q: AsRef<Path>,
+{
+ let root = root.as_ref();
+ let name = name.as_ref();
+
+ if !name.is_relative() && !name.starts_with(root) {
+ bail!(
+ "input '{}' is not in work dir '{}'",
+ name.display(),
+ root.display()
+ );
+ }
+
+ let mut dirs = vec![];
+ let mut files = vec![];
+ for entry in WalkDir::new(root.join(name)) {
+ let entry = entry?;
+ let path = entry.path().strip_prefix(root)?;
+ let path = path_to_str(&path)?;
+
+ if entry.file_type().is_dir() {
+ dirs.push(path.to_owned());
+ } else {
+ files.push(path.to_owned());
+ }
+ }
+ Ok((dirs, files))
+}
diff --git a/src/tools/rust-installer/src/util.rs b/src/tools/rust-installer/src/util.rs
new file mode 100644
index 000000000..674617c65
--- /dev/null
+++ b/src/tools/rust-installer/src/util.rs
@@ -0,0 +1,156 @@
+use anyhow::{format_err, Context, Result};
+use std::fs;
+use std::path::Path;
+use walkdir::WalkDir;
+
+// Needed to set the script mode to executable.
+#[cfg(unix)]
+use std::os::unix::fs::OpenOptionsExt;
+// FIXME: what about Windows? Are default ACLs executable?
+
+#[cfg(unix)]
+use std::os::unix::fs::symlink as symlink_file;
+#[cfg(windows)]
+use std::os::windows::fs::symlink_file;
+
+/// Converts a `&Path` to a UTF-8 `&str`.
+pub fn path_to_str(path: &Path) -> Result<&str> {
+ path.to_str()
+ .ok_or_else(|| format_err!("path is not valid UTF-8 '{}'", path.display()))
+}
+
+/// Wraps `fs::copy` with a nicer error message.
+pub fn copy<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> Result<u64> {
+ if fs::symlink_metadata(&from)?.file_type().is_symlink() {
+ let link = fs::read_link(&from)?;
+ symlink_file(link, &to)?;
+ Ok(0)
+ } else {
+ let amt = fs::copy(&from, &to).with_context(|| {
+ format!(
+ "failed to copy '{}' to '{}'",
+ from.as_ref().display(),
+ to.as_ref().display()
+ )
+ })?;
+ Ok(amt)
+ }
+}
+
+/// Wraps `fs::create_dir` with a nicer error message.
+pub fn create_dir<P: AsRef<Path>>(path: P) -> Result<()> {
+ fs::create_dir(&path)
+ .with_context(|| format!("failed to create dir '{}'", path.as_ref().display()))?;
+ Ok(())
+}
+
+/// Wraps `fs::create_dir_all` with a nicer error message.
+pub fn create_dir_all<P: AsRef<Path>>(path: P) -> Result<()> {
+ fs::create_dir_all(&path)
+ .with_context(|| format!("failed to create dir '{}'", path.as_ref().display()))?;
+ Ok(())
+}
+
+/// Wraps `fs::OpenOptions::create_new().open()` as executable, with a nicer error message.
+pub fn create_new_executable<P: AsRef<Path>>(path: P) -> Result<fs::File> {
+ let mut options = fs::OpenOptions::new();
+ options.write(true).create_new(true);
+ #[cfg(unix)]
+ options.mode(0o755);
+ let file = options
+ .open(&path)
+ .with_context(|| format!("failed to create file '{}'", path.as_ref().display()))?;
+ Ok(file)
+}
+
+/// Wraps `fs::OpenOptions::create_new().open()`, with a nicer error message.
+pub fn create_new_file<P: AsRef<Path>>(path: P) -> Result<fs::File> {
+ let file = fs::OpenOptions::new()
+ .write(true)
+ .create_new(true)
+ .open(&path)
+ .with_context(|| format!("failed to create file '{}'", path.as_ref().display()))?;
+ Ok(file)
+}
+
+/// Wraps `fs::File::open()` with a nicer error message.
+pub fn open_file<P: AsRef<Path>>(path: P) -> Result<fs::File> {
+ let file = fs::File::open(&path)
+ .with_context(|| format!("failed to open file '{}'", path.as_ref().display()))?;
+ Ok(file)
+}
+
+/// Wraps `remove_dir_all` with a nicer error message.
+pub fn remove_dir_all<P: AsRef<Path>>(path: P) -> Result<()> {
+ remove_dir_all::remove_dir_all(path.as_ref())
+ .with_context(|| format!("failed to remove dir '{}'", path.as_ref().display()))?;
+ Ok(())
+}
+
+/// Wrap `fs::remove_file` with a nicer error message
+pub fn remove_file<P: AsRef<Path>>(path: P) -> Result<()> {
+ fs::remove_file(path.as_ref())
+ .with_context(|| format!("failed to remove file '{}'", path.as_ref().display()))?;
+ Ok(())
+}
+
+/// Copies the `src` directory recursively to `dst`. Both are assumed to exist
+/// when this function is called.
+pub fn copy_recursive(src: &Path, dst: &Path) -> Result<()> {
+ copy_with_callback(src, dst, |_, _| Ok(()))
+}
+
+/// Copies the `src` directory recursively to `dst`. Both are assumed to exist
+/// when this function is called. Invokes a callback for each path visited.
+pub fn copy_with_callback<F>(src: &Path, dst: &Path, mut callback: F) -> Result<()>
+where
+ F: FnMut(&Path, fs::FileType) -> Result<()>,
+{
+ for entry in WalkDir::new(src).min_depth(1) {
+ let entry = entry?;
+ let file_type = entry.file_type();
+ let path = entry.path().strip_prefix(src)?;
+ let dst = dst.join(path);
+
+ if file_type.is_dir() {
+ create_dir(&dst)?;
+ } else {
+ copy(entry.path(), dst)?;
+ }
+ callback(&path, file_type)?;
+ }
+ Ok(())
+}
+
+macro_rules! actor_field_default {
+ () => { Default::default() };
+ (= $expr:expr) => { $expr.into() }
+}
+
+/// Creates an "actor" with default values, setters for all fields, and Clap parser support.
+macro_rules! actor {
+ ($( #[ $attr:meta ] )+ pub struct $name:ident {
+ $( $( #[ $field_attr:meta ] )+ $field:ident : $type:ty $(= $default:tt)*, )*
+ }) => {
+ $( #[ $attr ] )+
+ #[derive(clap::Args)]
+ pub struct $name {
+ $( $( #[ $field_attr ] )+ #[clap(long, $(default_value = $default)*)] $field : $type, )*
+ }
+
+ impl Default for $name {
+ fn default() -> $name {
+ $name {
+ $($field : actor_field_default!($(= $default)*), )*
+ }
+ }
+ }
+
+ impl $name {
+ $(pub fn $field(&mut self, value: $type) -> &mut Self {
+ self.$field = value;
+ self
+ })*
+ }
+ }
+}
diff --git a/src/tools/rust-installer/test.sh b/src/tools/rust-installer/test.sh
new file mode 100755
index 000000000..bf6de4cb1
--- /dev/null
+++ b/src/tools/rust-installer/test.sh
@@ -0,0 +1,1342 @@
+#!/bin/bash
+
+set -e -u
+
+if [ -x /bin/echo ]; then
+ ECHO='/bin/echo'
+else
+ ECHO='echo'
+fi
+
+# Prints the absolute path of a directory to stdout
+abs_path() {
+ local path="$1"
+ # Unset CDPATH because it causes havok: it makes the destination unpredictable
+ # and triggers 'cd' to print the path to stdout. Route `cd`'s output to /dev/null
+ # for good measure.
+ (unset CDPATH && cd "$path" > /dev/null && pwd)
+}
+
+S="$(abs_path $(dirname $0))"
+
+TEST_DIR="$S/test"
+TMP_DIR="$S/tmp"
+WORK_DIR="$TMP_DIR/workdir"
+OUT_DIR="$TMP_DIR/outdir"
+PREFIX_DIR="$TMP_DIR/prefix"
+
+case $(uname -s) in
+
+ MINGW* | MSYS*)
+ WINDOWS=1
+ ;;
+esac
+
+say() {
+ echo "test: $1"
+}
+
+pre() {
+ echo "test: $1"
+ rm -Rf "$WORK_DIR"
+ rm -Rf "$OUT_DIR"
+ rm -Rf "$PREFIX_DIR"
+ mkdir -p "$WORK_DIR"
+ mkdir -p "$OUT_DIR"
+ mkdir -p "$PREFIX_DIR"
+}
+
+need_ok() {
+ if [ $? -ne 0 ]
+ then
+ echo
+ echo "TEST FAILED!"
+ echo
+ exit 1
+ fi
+}
+
+fail() {
+ echo
+ echo "$1"
+ echo
+ echo "TEST FAILED!"
+ echo
+ exit 1
+}
+
+try() {
+ set +e
+ _cmd="$@"
+ _output=`$@ 2>&1`
+ if [ $? -ne 0 ]; then
+ echo \$ "$_cmd"
+ # Using /bin/echo to avoid escaping
+ $ECHO "$_output"
+ echo
+ echo "TEST FAILED!"
+ echo
+ exit 1
+ else
+ if [ -n "${VERBOSE-}" -o -n "${VERBOSE_CMD-}" ]; then
+ echo \$ "$_cmd"
+ fi
+ if [ -n "${VERBOSE-}" -o -n "${VERBOSE_OUTPUT-}" ]; then
+ $ECHO "$_output"
+ fi
+ fi
+ set -e
+}
+
+expect_fail() {
+ set +e
+ _cmd="$@"
+ _output=`$@ 2>&1`
+ if [ $? -eq 0 ]; then
+ echo \$ "$_cmd"
+ # Using /bin/echo to avoid escaping
+ $ECHO "$_output"
+ echo
+ echo "TEST FAILED!"
+ echo
+ exit 1
+ else
+ if [ -n "${VERBOSE-}" -o -n "${VERBOSE_CMD-}" ]; then
+ echo \$ "$_cmd"
+ fi
+ if [ -n "${VERBOSE-}" -o -n "${VERBOSE_OUTPUT-}" ]; then
+ $ECHO "$_output"
+ fi
+ fi
+ set -e
+}
+
+expect_output_ok() {
+ set +e
+ local _expected="$1"
+ shift 1
+ _cmd="$@"
+ _output=`$@ 2>&1`
+ if [ $? -ne 0 ]; then
+ echo \$ "$_cmd"
+ # Using /bin/echo to avoid escaping
+ $ECHO "$_output"
+ echo
+ echo "TEST FAILED!"
+ echo
+ exit 1
+ elif ! echo "$_output" | grep -q "$_expected"; then
+ echo \$ "$_cmd"
+ $ECHO "$_output"
+ echo
+ echo "missing expected output '$_expected'"
+ echo
+ echo
+ echo "TEST FAILED!"
+ echo
+ exit 1
+ else
+ if [ -n "${VERBOSE-}" -o -n "${VERBOSE_CMD-}" ]; then
+ echo \$ "$_cmd"
+ fi
+ if [ -n "${VERBOSE-}" -o -n "${VERBOSE_OUTPUT-}" ]; then
+ $ECHO "$_output"
+ fi
+ fi
+ set -e
+}
+
+expect_output_fail() {
+ set +e
+ local _expected="$1"
+ shift 1
+ _cmd="$@"
+ _output=`$@ 2>&1`
+ if [ $? -eq 0 ]; then
+ echo \$ "$_cmd"
+ # Using /bin/echo to avoid escaping
+ $ECHO "$_output"
+ echo
+ echo "TEST FAILED!"
+ echo
+ exit 1
+ elif ! echo "$_output" | grep -q "$_expected"; then
+ echo \$ "$_cmd"
+ $ECHO "$_output"
+ echo
+ echo "missing expected output '$_expected'"
+ echo
+ echo
+ echo "TEST FAILED!"
+ echo
+ exit 1
+ else
+ if [ -n "${VERBOSE-}" -o -n "${VERBOSE_CMD-}" ]; then
+ echo \$ "$_cmd"
+ fi
+ if [ -n "${VERBOSE-}" -o -n "${VERBOSE_OUTPUT-}" ]; then
+ $ECHO "$_output"
+ fi
+ fi
+ set -e
+}
+
+expect_not_output_ok() {
+ set +e
+ local _expected="$1"
+ shift 1
+ _cmd="$@"
+ _output=`$@ 2>&1`
+ if [ $? -ne 0 ]; then
+ echo \$ "$_cmd"
+ # Using /bin/echo to avoid escaping
+ $ECHO "$_output"
+ echo
+ echo "TEST FAILED!"
+ echo
+ exit 1
+ elif echo "$_output" | grep -q "$_expected"; then
+ echo \$ "$_cmd"
+ $ECHO "$_output"
+ echo
+ echo "unexpected output '$_expected'"
+ echo
+ echo
+ echo "TEST FAILED!"
+ echo
+ exit 1
+ else
+ if [ -n "${VERBOSE-}" -o -n "${VERBOSE_CMD-}" ]; then
+ echo \$ "$_cmd"
+ fi
+ if [ -n "${VERBOSE-}" -o -n "${VERBOSE_OUTPUT-}" ]; then
+ $ECHO "$_output"
+ fi
+ fi
+ set -e
+}
+
+runtest() {
+ local _testname="$1"
+ if [ -n "${TESTNAME-}" ]; then
+ if ! echo "$_testname" | grep -q "$TESTNAME"; then
+ return 0
+ fi
+ fi
+
+ pre "$_testname"
+ "$_testname"
+}
+
+# Installation tests
+
+basic_install() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR"
+ try "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR"
+ try test -e "$PREFIX_DIR/something-to-install"
+ try test -e "$PREFIX_DIR/dir-to-install/foo"
+ try test -e "$PREFIX_DIR/bin/program"
+ try test -e "$PREFIX_DIR/bin/program2"
+ try test -e "$PREFIX_DIR/bin/bad-bin"
+}
+runtest basic_install
+
+basic_uninstall() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR"
+ try "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR"
+ try "$WORK_DIR/package/install.sh --uninstall" --prefix="$PREFIX_DIR"
+ try test ! -e "$PREFIX_DIR/something-to-install"
+ try test ! -e "$PREFIX_DIR/dir-to-install/foo"
+ try test ! -e "$PREFIX_DIR/bin/program"
+ try test ! -e "$PREFIX_DIR/bin/program2"
+ try test ! -e "$PREFIX_DIR/bin/bad-bin"
+ try test ! -e "$PREFIX_DIR/lib/packagelib"
+}
+runtest basic_uninstall
+
+not_installed_files() {
+ mkdir -p "$WORK_DIR/overlay"
+ touch "$WORK_DIR/overlay/not-installed"
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --non-installed-overlay="$WORK_DIR/overlay"
+ try test -e "$WORK_DIR/package/not-installed"
+ try "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR"
+ try test ! -e "$PREFIX_DIR/not-installed"
+}
+runtest not_installed_files
+
+tarball_with_package_name() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rustc-nightly
+ try "$WORK_DIR/rustc-nightly/install.sh" --prefix="$PREFIX_DIR"
+ try test -e "$OUT_DIR/rustc-nightly.tar.gz"
+ try test -e "$OUT_DIR/rustc-nightly.tar.xz"
+}
+runtest tarball_with_package_name
+
+install_overwrite_backup() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR"
+ try mkdir -p "$PREFIX_DIR/bin"
+ touch "$PREFIX_DIR/bin/program"
+ try "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR"
+ # The existing program was backed up by 'install'
+ try test -e "$PREFIX_DIR/bin/program.old"
+}
+runtest install_overwrite_backup
+
+bulk_directory() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --bulk-dirs=dir-to-install
+ try "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR"
+ try test -e "$PREFIX_DIR/something-to-install"
+ try test -e "$PREFIX_DIR/dir-to-install/foo"
+ try test -e "$PREFIX_DIR/bin/program"
+ try test -e "$PREFIX_DIR/bin/program2"
+ try test -e "$PREFIX_DIR/bin/bad-bin"
+ try "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR" --uninstall
+ try test ! -e "$PREFIX_DIR/dir-to-install"
+}
+runtest bulk_directory
+
+bulk_directory_overwrite() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --bulk-dirs=dir-to-install
+ try mkdir -p "$PREFIX_DIR/dir-to-install"
+ try touch "$PREFIX_DIR/dir-to-install/overwrite"
+ try "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR"
+ # The file that used to exist in the directory no longer does
+ try test ! -e "$PREFIX_DIR/dir-to-install/overwrite"
+ # It was backed up
+ try test -e "$PREFIX_DIR/dir-to-install.old/overwrite"
+}
+runtest bulk_directory_overwrite
+
+bulk_directory_overwrite_existing_backup() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --bulk-dirs=dir-to-install
+ try mkdir -p "$PREFIX_DIR/dir-to-install"
+ try touch "$PREFIX_DIR/dir-to-install/overwrite"
+ # This time we've already got an existing backup of the overwritten directory.
+ # The install should still succeed.
+ try mkdir -p "$PREFIX_DIR/dir-to-install~"
+ try touch "$PREFIX_DIR/dir-to-install~/overwrite"
+ try "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR"
+ try test ! -e "$PREFIX_DIR/dir-to-install/overwrite"
+ try test -e "$PREFIX_DIR/dir-to-install~/overwrite"
+}
+runtest bulk_directory_overwrite_existing_backup
+
+nested_bulk_directory() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image4" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --bulk-dirs=dir-to-install/qux
+ try "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR"
+ try test -e "$PREFIX_DIR/dir-to-install/qux/bar"
+ try "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR" --uninstall
+ try test ! -e "$PREFIX_DIR/dir-to-install/qux"
+}
+runtest nested_bulk_directory
+
+only_bulk_directory_no_files() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image5" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --bulk-dirs=dir-to-install
+ try "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR"
+ try test -e "$PREFIX_DIR/dir-to-install/foo"
+ try "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR" --uninstall
+ try test ! -e "$PREFIX_DIR/dir-to-install/foo"
+}
+runtest only_bulk_directory_no_files
+
+nested_not_installed_files() {
+ mkdir -p "$WORK_DIR/overlay"
+ touch "$WORK_DIR/overlay/not-installed"
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image4" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --non-installed-overlay="$WORK_DIR/overlay"
+ try test -e "$WORK_DIR/package/not-installed"
+ try "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR"
+ try test ! -e "$PREFIX_DIR/not-installed"
+}
+runtest nested_not_installed_files
+
+multiple_components() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR/c1" \
+ --output-dir="$OUT_DIR/c1" \
+ --component-name=rustc
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image3" \
+ --work-dir="$WORK_DIR/c2" \
+ --output-dir="$OUT_DIR/c2" \
+ --component-name=cargo
+ try "$WORK_DIR/c1/package/install.sh" --prefix="$PREFIX_DIR"
+ try "$WORK_DIR/c2/package/install.sh" --prefix="$PREFIX_DIR"
+ try test -e "$PREFIX_DIR/something-to-install"
+ try test -e "$PREFIX_DIR/dir-to-install/foo"
+ try test -e "$PREFIX_DIR/bin/program"
+ try test -e "$PREFIX_DIR/bin/program2"
+ try test -e "$PREFIX_DIR/bin/bad-bin"
+ try test -e "$PREFIX_DIR/bin/cargo"
+ try "$WORK_DIR/c1/package/install.sh" --prefix="$PREFIX_DIR" --uninstall
+ try test ! -e "$PREFIX_DIR/something-to-install"
+ try test ! -e "$PREFIX_DIR/dir-to-install/foo"
+ try test ! -e "$PREFIX_DIR/bin/program"
+ try test ! -e "$PREFIX_DIR/bin/program2"
+ try test ! -e "$PREFIX_DIR/bin/bad-bin"
+ try "$WORK_DIR/c2/package/install.sh" --prefix="$PREFIX_DIR" --uninstall
+ try test ! -e "$PREFIX_DIR/bin/cargo"
+ try test ! -e "$PREFIX_DIR/lib/packagelib"
+}
+runtest multiple_components
+
+uninstall_from_installed_script() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR/c1" \
+ --output-dir="$OUT_DIR/c1" \
+ --component-name=rustc
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image3" \
+ --work-dir="$WORK_DIR/c2" \
+ --output-dir="$OUT_DIR/c2" \
+ --component-name=cargo
+ try "$WORK_DIR/c1/package/install.sh" --prefix="$PREFIX_DIR"
+ try "$WORK_DIR/c2/package/install.sh" --prefix="$PREFIX_DIR"
+ try test -e "$PREFIX_DIR/something-to-install"
+ try test -e "$PREFIX_DIR/dir-to-install/foo"
+ try test -e "$PREFIX_DIR/bin/program"
+ try test -e "$PREFIX_DIR/bin/program2"
+ try test -e "$PREFIX_DIR/bin/bad-bin"
+ try test -e "$PREFIX_DIR/bin/cargo"
+ # All components should be uninstalled by this script
+ try sh "$PREFIX_DIR/lib/packagelib/uninstall.sh"
+ try test ! -e "$PREFIX_DIR/something-to-install"
+ try test ! -e "$PREFIX_DIR/dir-to-install/foo"
+ try test ! -e "$PREFIX_DIR/bin/program"
+ try test ! -e "$PREFIX_DIR/bin/program2"
+ try test ! -e "$PREFIX_DIR/bin/bad-bin"
+ try test ! -e "$PREFIX_DIR/bin/cargo"
+ try test ! -e "$PREFIX_DIR/lib/packagelib"
+}
+runtest uninstall_from_installed_script
+
+uninstall_from_installed_script_with_args_fails() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR/c1" \
+ --output-dir="$OUT_DIR/c1" \
+ --component-name=rustc
+ try "$WORK_DIR/c1/package/install.sh" --prefix="$PREFIX_DIR"
+ expect_output_fail "uninstall.sh does not take any arguments" sh "$PREFIX_DIR/lib/packagelib/uninstall.sh" --prefix=foo
+}
+runtest uninstall_from_installed_script_with_args_fails
+
+# Combined installer tests
+
+combine_installers() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rustc \
+ --component-name=rustc
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image3" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=cargo \
+ --component-name=cargo
+ try sh "$S/combine-installers.sh" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust \
+ --input-tarballs="$OUT_DIR/rustc.tar.gz,$OUT_DIR/cargo.tar.gz"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR"
+ try test -e "$PREFIX_DIR/something-to-install"
+ try test -e "$PREFIX_DIR/dir-to-install/foo"
+ try test -e "$PREFIX_DIR/bin/program"
+ try test -e "$PREFIX_DIR/bin/program2"
+ try test -e "$PREFIX_DIR/bin/bad-bin"
+ try test -e "$PREFIX_DIR/bin/cargo"
+ try "$WORK_DIR/rust/install.sh --uninstall" --prefix="$PREFIX_DIR"
+ try test ! -e "$PREFIX_DIR/something-to-install"
+ try test ! -e "$PREFIX_DIR/dir-to-install/foo"
+ try test ! -e "$PREFIX_DIR/bin/program"
+ try test ! -e "$PREFIX_DIR/bin/program2"
+ try test ! -e "$PREFIX_DIR/bin/bad-bin"
+ try test ! -e "$PREFIX_DIR/bin/cargo"
+ try test ! -e "$PREFIX_DIR/lib/packagelib"
+}
+runtest combine_installers
+
+combine_three_installers() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rustc \
+ --component-name=rustc
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image3" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=cargo \
+ --component-name=cargo
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image4" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust-docs \
+ --component-name=rust-docs
+ try sh "$S/combine-installers.sh" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust \
+ --input-tarballs="$OUT_DIR/rustc.tar.gz,$OUT_DIR/cargo.tar.gz,$OUT_DIR/rust-docs.tar.gz"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR"
+ try test -e "$PREFIX_DIR/something-to-install"
+ try test -e "$PREFIX_DIR/dir-to-install/foo"
+ try test -e "$PREFIX_DIR/bin/program"
+ try test -e "$PREFIX_DIR/bin/program2"
+ try test -e "$PREFIX_DIR/bin/bad-bin"
+ try test -e "$PREFIX_DIR/bin/cargo"
+ try test -e "$PREFIX_DIR/dir-to-install/qux/bar"
+ try "$WORK_DIR/rust/install.sh --uninstall" --prefix="$PREFIX_DIR"
+ try test ! -e "$PREFIX_DIR/something-to-install"
+ try test ! -e "$PREFIX_DIR/dir-to-install/foo"
+ try test ! -e "$PREFIX_DIR/bin/program"
+ try test ! -e "$PREFIX_DIR/bin/program2"
+ try test ! -e "$PREFIX_DIR/bin/bad-bin"
+ try test ! -e "$PREFIX_DIR/bin/cargo"
+ try test ! -e "$PREFIX_DIR/lib/packagelib"
+ try test ! -e "$PREFIX_DIR/dir-to-install/qux/bar"
+}
+runtest combine_three_installers
+
+combine_installers_with_overlay() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rustc \
+ --component-name=rustc
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image3" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=cargo \
+ --component-name=cargo
+ mkdir -p "$WORK_DIR/overlay"
+ touch "$WORK_DIR/overlay/README"
+ try sh "$S/combine-installers.sh" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust \
+ --input-tarballs="$OUT_DIR/rustc.tar.gz,$OUT_DIR/cargo.tar.gz" \
+ --non-installed-overlay="$WORK_DIR/overlay"
+ try test -e "$WORK_DIR/rust/README"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR"
+ try test ! -e "$PREFIX_DIR/README"
+}
+runtest combine_installers_with_overlay
+
+combined_with_bulk_dirs() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rustc \
+ --component-name=rustc \
+ --bulk-dirs=dir-to-install
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image3" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=cargo \
+ --component-name=cargo
+ try sh "$S/combine-installers.sh" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust \
+ --input-tarballs="$OUT_DIR/rustc.tar.gz,$OUT_DIR/cargo.tar.gz"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR"
+ try test -e "$PREFIX_DIR/dir-to-install/foo"
+ try "$WORK_DIR/rust/install.sh --uninstall" --prefix="$PREFIX_DIR"
+ try test ! -e "$PREFIX_DIR/dir-to-install"
+}
+runtest combined_with_bulk_dirs
+
+combine_install_with_separate_uninstall() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rustc \
+ --component-name=rustc \
+ --rel-manifest-dir=rustlib
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image3" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=cargo \
+ --component-name=cargo \
+ --rel-manifest-dir=rustlib
+ try sh "$S/combine-installers.sh" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust \
+ --input-tarballs="$OUT_DIR/rustc.tar.gz,$OUT_DIR/cargo.tar.gz" \
+ --rel-manifest-dir=rustlib
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR"
+ try test -e "$PREFIX_DIR/something-to-install"
+ try test -e "$PREFIX_DIR/dir-to-install/foo"
+ try test -e "$PREFIX_DIR/bin/program"
+ try test -e "$PREFIX_DIR/bin/program2"
+ try test -e "$PREFIX_DIR/bin/bad-bin"
+ try test -e "$PREFIX_DIR/bin/cargo"
+ try "$WORK_DIR/rustc/install.sh --uninstall" --prefix="$PREFIX_DIR"
+ try test ! -e "$PREFIX_DIR/something-to-install"
+ try test ! -e "$PREFIX_DIR/dir-to-install/foo"
+ try test ! -e "$PREFIX_DIR/bin/program"
+ try test ! -e "$PREFIX_DIR/bin/program2"
+ try test ! -e "$PREFIX_DIR/bin/bad-bin"
+ try "$WORK_DIR/cargo/install.sh --uninstall" --prefix="$PREFIX_DIR"
+ try test ! -e "$PREFIX_DIR/bin/cargo"
+ try test ! -e "$PREFIX_DIR/lib/packagelib"
+}
+runtest combine_install_with_separate_uninstall
+
+select_components_to_install() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rustc \
+ --component-name=rustc
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image3" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=cargo \
+ --component-name=cargo
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image4" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust-docs \
+ --component-name=rust-docs
+ try sh "$S/combine-installers.sh" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust \
+ --input-tarballs="$OUT_DIR/rustc.tar.gz,$OUT_DIR/cargo.tar.gz,$OUT_DIR/rust-docs.tar.gz"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR" --components=rustc
+ try test -e "$PREFIX_DIR/bin/program"
+ try test ! -e "$PREFIX_DIR/bin/cargo"
+ try test ! -e "$PREFIX_DIR/baz"
+ try "$WORK_DIR/rust/install.sh --uninstall" --prefix="$PREFIX_DIR"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR" --components=cargo
+ try test ! -e "$PREFIX_DIR/bin/program"
+ try test -e "$PREFIX_DIR/bin/cargo"
+ try test ! -e "$PREFIX_DIR/baz"
+ try "$WORK_DIR/rust/install.sh --uninstall" --prefix="$PREFIX_DIR"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR" --components=rust-docs
+ try test ! -e "$PREFIX_DIR/bin/program"
+ try test ! -e "$PREFIX_DIR/bin/cargo"
+ try test -e "$PREFIX_DIR/baz"
+ try "$WORK_DIR/rust/install.sh --uninstall" --prefix="$PREFIX_DIR"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR" --components=rustc,cargo
+ try test -e "$PREFIX_DIR/bin/program"
+ try test -e "$PREFIX_DIR/bin/cargo"
+ try test ! -e "$PREFIX_DIR/baz"
+ try "$WORK_DIR/rust/install.sh --uninstall" --prefix="$PREFIX_DIR" --components=rustc,cargo,rust-docs
+ try test ! -e "$PREFIX_DIR/bin/program"
+ try test ! -e "$PREFIX_DIR/bin/cargo"
+ try test ! -e "$PREFIX_DIR/baz"
+ try test ! -e "$PREFIX_DIR/lib/packagelib"
+}
+runtest select_components_to_install
+
+select_components_to_uninstall() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rustc \
+ --component-name=rustc
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image3" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=cargo \
+ --component-name=cargo
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image4" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust-docs \
+ --component-name=rust-docs
+ try sh "$S/combine-installers.sh" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust \
+ --input-tarballs="$OUT_DIR/rustc.tar.gz,$OUT_DIR/cargo.tar.gz,$OUT_DIR/rust-docs.tar.gz"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR"
+ try "$WORK_DIR/rust/install.sh --uninstall" --prefix="$PREFIX_DIR" --components=rustc
+ try test ! -e "$PREFIX_DIR/bin/program"
+ try test -e "$PREFIX_DIR/bin/cargo"
+ try test -e "$PREFIX_DIR/baz"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR"
+ try "$WORK_DIR/rust/install.sh --uninstall" --prefix="$PREFIX_DIR" --components=cargo
+ try test -e "$PREFIX_DIR/bin/program"
+ try test ! -e "$PREFIX_DIR/bin/cargo"
+ try test -e "$PREFIX_DIR/baz"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR"
+ try "$WORK_DIR/rust/install.sh --uninstall" --prefix="$PREFIX_DIR" --components=rust-docs
+ try test -e "$PREFIX_DIR/bin/program"
+ try test -e "$PREFIX_DIR/bin/cargo"
+ try test ! -e "$PREFIX_DIR/baz"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR"
+ try "$WORK_DIR/rust/install.sh --uninstall" --prefix="$PREFIX_DIR" --components=rustc,cargo
+ try test ! -e "$PREFIX_DIR/bin/program"
+ try test ! -e "$PREFIX_DIR/bin/cargo"
+ try test -e "$PREFIX_DIR/baz"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR"
+ try "$WORK_DIR/rust/install.sh --uninstall" --prefix="$PREFIX_DIR" --components=rustc,cargo,rust-docs
+ try test ! -e "$PREFIX_DIR/bin/program"
+ try test ! -e "$PREFIX_DIR/bin/cargo"
+ try test ! -e "$PREFIX_DIR/baz"
+ try test ! -e "$PREFIX_DIR/lib/packagelib"
+}
+runtest select_components_to_uninstall
+
+invalid_component() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rustc \
+ --component-name=rustc
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image3" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=cargo \
+ --component-name=cargo
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image4" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust-docs \
+ --component-name=rust-docs
+ try sh "$S/combine-installers.sh" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust \
+ --input-tarballs="$OUT_DIR/rustc.tar.gz,$OUT_DIR/cargo.tar.gz,$OUT_DIR/rust-docs.tar.gz"
+ expect_output_fail "unknown component" "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR" --components=foo
+}
+runtest invalid_component
+
+without_components() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rustc \
+ --component-name=rustc
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image3" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=cargo \
+ --component-name=cargo
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image4" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust-docs \
+ --component-name=rust-docs
+ try sh "$S/combine-installers.sh" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust \
+ --input-tarballs="$OUT_DIR/rustc.tar.gz,$OUT_DIR/cargo.tar.gz,$OUT_DIR/rust-docs.tar.gz"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR" --without=rust-docs
+ try test -e "$PREFIX_DIR/bin/program"
+ try test -e "$PREFIX_DIR/bin/cargo"
+ try test ! -e "$PREFIX_DIR/baz"
+ try "$WORK_DIR/rust/install.sh --uninstall" --prefix="$PREFIX_DIR"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR" --without=rust-docs,cargo
+ try test -e "$PREFIX_DIR/bin/program"
+ try test ! -e "$PREFIX_DIR/bin/cargo"
+ try test ! -e "$PREFIX_DIR/baz"
+ try "$WORK_DIR/rust/install.sh --uninstall" --prefix="$PREFIX_DIR"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR" --without=rust-docs,rustc
+ try test ! -e "$PREFIX_DIR/bin/program"
+ try test -e "$PREFIX_DIR/bin/cargo"
+ try test ! -e "$PREFIX_DIR/baz"
+ try "$WORK_DIR/rust/install.sh --uninstall" --prefix="$PREFIX_DIR"
+}
+runtest without_components
+
+# --uninstall --without is kind of weird,
+# --without causes components to remain installed
+uninstall_without_components() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rustc \
+ --component-name=rustc
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image3" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=cargo \
+ --component-name=cargo
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image4" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust-docs \
+ --component-name=rust-docs
+ try sh "$S/combine-installers.sh" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust \
+ --input-tarballs="$OUT_DIR/rustc.tar.gz,$OUT_DIR/cargo.tar.gz,$OUT_DIR/rust-docs.tar.gz"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR"
+ try "$WORK_DIR/rust/install.sh --uninstall" --prefix="$PREFIX_DIR" --without=rust-docs
+ try test ! -e "$PREFIX_DIR/bin/program"
+ try test ! -e "$PREFIX_DIR/bin/cargo"
+ try test -e "$PREFIX_DIR/baz"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR"
+ try "$WORK_DIR/rust/install.sh --uninstall" --prefix="$PREFIX_DIR" --without=rust-docs,cargo
+ try test ! -e "$PREFIX_DIR/bin/program"
+ try test -e "$PREFIX_DIR/bin/cargo"
+ try test -e "$PREFIX_DIR/baz"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR"
+ try "$WORK_DIR/rust/install.sh --uninstall" --prefix="$PREFIX_DIR" --without=rust-docs,rustc
+ try test -e "$PREFIX_DIR/bin/program"
+ try test ! -e "$PREFIX_DIR/bin/cargo"
+ try test -e "$PREFIX_DIR/baz"
+}
+runtest uninstall_without_components
+
+without_any_components() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rustc \
+ --component-name=rustc
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image3" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=cargo \
+ --component-name=cargo
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image4" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust-docs \
+ --component-name=rust-docs
+ try sh "$S/combine-installers.sh" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust \
+ --input-tarballs="$OUT_DIR/rustc.tar.gz,$OUT_DIR/cargo.tar.gz,$OUT_DIR/rust-docs.tar.gz"
+ expect_output_fail "no components selected for installation" \
+ "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR" --without=rust-docs,rustc,cargo
+}
+runtest without_any_components
+
+uninstall_without_any_components() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rustc \
+ --component-name=rustc
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image3" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=cargo \
+ --component-name=cargo
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image4" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust-docs \
+ --component-name=rust-docs
+ try sh "$S/combine-installers.sh" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust \
+ --input-tarballs="$OUT_DIR/rustc.tar.gz,$OUT_DIR/cargo.tar.gz,$OUT_DIR/rust-docs.tar.gz"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR"
+ expect_output_fail "no components selected for uninstallation" \
+ "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR" \
+ --uninstall --without=rust-docs,rustc,cargo
+}
+runtest uninstall_without_any_components
+
+list_components() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rustc \
+ --component-name=rustc
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image3" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=cargo \
+ --component-name=cargo
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image4" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust-docs \
+ --component-name=rust-docs
+ try sh "$S/combine-installers.sh" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust \
+ --input-tarballs="$OUT_DIR/rustc.tar.gz,$OUT_DIR/cargo.tar.gz,$OUT_DIR/rust-docs.tar.gz"
+ expect_output_ok "rustc" "$WORK_DIR/rust/install.sh" --list-components
+ expect_output_ok "cargo" "$WORK_DIR/rust/install.sh" --list-components
+ expect_output_ok "rust-docs" "$WORK_DIR/rust/install.sh" --list-components
+}
+runtest list_components
+
+combined_remains() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rustc \
+ --component-name=rustc
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image3" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=cargo \
+ --component-name=cargo
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image4" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust-docs \
+ --component-name=rust-docs
+ try sh "$S/combine-installers.sh" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust \
+ --input-tarballs="$OUT_DIR/rustc.tar.gz,$OUT_DIR/cargo.tar.gz,$OUT_DIR/rust-docs.tar.gz"
+ for component in rustc cargo rust-docs; do
+ # rustbuild wants the original extracted package intact too
+ try test -d "$WORK_DIR/$component/$component"
+ try test -d "$WORK_DIR/rust/$component"
+ done
+}
+runtest combined_remains
+
+# Smoke tests
+
+cannot_write_error() {
+ # chmod doesn't work on windows
+ if [ ! -n "${WINDOWS-}" ]; then
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR"
+ chmod u-w "$PREFIX_DIR"
+ expect_fail "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR"
+ chmod u+w "$PREFIX_DIR"
+ fi
+}
+runtest cannot_write_error
+
+cannot_install_to_installer() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=my-package
+ expect_output_fail "cannot install to same directory as installer" \
+ "$WORK_DIR/my-package/install.sh" --prefix="$WORK_DIR/my-package"
+}
+runtest cannot_install_to_installer
+
+upgrade_from_future_installer_error() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --rel-manifest-dir=rustlib
+ try "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR"
+ echo 100 > "$PREFIX_DIR/lib/rustlib/rust-installer-version"
+ expect_fail "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR"
+}
+runtest upgrade_from_future_installer_error
+
+destdir() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR"
+ try "$WORK_DIR/package/install.sh" --destdir="$PREFIX_DIR/" --prefix=prefix
+ try test -e "$PREFIX_DIR/prefix/bin/program"
+}
+runtest destdir
+
+destdir_no_trailing_slash() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR"
+ try "$WORK_DIR/package/install.sh" --destdir="$PREFIX_DIR" --prefix=prefix
+ try test -e "$PREFIX_DIR/prefix/bin/program"
+}
+runtest destdir_no_trailing_slash
+
+disable_verify_noop() {
+ # Obsolete --disable-verify flag doesn't generate error
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR"
+ try "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR" --disable-verify
+}
+runtest disable_verify_noop
+
+create_log() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR"
+ try "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR"
+ try test -e "$PREFIX_DIR/lib/packagelib/install.log"
+ local _log="$(cat "$PREFIX_DIR/lib/packagelib/install.log")"
+ if [ -z "$_log" ]; then
+ fail "log is empty"
+ fi
+}
+runtest create_log
+
+leave_log_after_failure() {
+ # chmod doesn't work on windows
+ if [ ! -n "${WINDOWS-}" ]; then
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR"
+ mkdir -p "$PREFIX_DIR/lib/packagelib"
+ touch "$PREFIX_DIR/lib/packagelib/components"
+ chmod u-w "$PREFIX_DIR/lib/packagelib/components"
+ expect_fail "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR"
+ chmod u+w "$PREFIX_DIR/lib/packagelib/components"
+ try test -e "$PREFIX_DIR/lib/packagelib/install.log"
+ local _log="$(cat "$PREFIX_DIR/lib/packagelib/install.log")"
+ if [ -z "$_log" ]; then
+ fail "log is empty"
+ fi
+ # script should tell user where the logs are
+ if ! grep -q "see logs at" "$PREFIX_DIR/lib/packagelib/install.log"; then
+ fail "missing log message"
+ fi
+ fi
+}
+runtest leave_log_after_failure
+
+# https://github.com/rust-lang/rust-installer/issues/22
+help() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR"
+ try "$WORK_DIR/package/install.sh" --help
+}
+runtest help
+
+# https://github.com/rust-lang/rust-installer/issues/31
+CDPATH_does_not_destroy_things() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR"
+ cd "$WORK_DIR" || exit 1
+ export CDPATH="../$(basename $WORK_DIR)/foo"
+ try sh "package/install.sh" --prefix="$PREFIX_DIR"
+ cd "$S" || exit 1
+ cd "$PREFIX_DIR" || exit 1
+ export CDPATH="../$(basename $PREFIX_DIR)"
+ try sh "lib/packagelib/uninstall.sh"
+ cd "$S" || exit 1
+ unset CDPATH
+}
+runtest CDPATH_does_not_destroy_things
+
+docdir_default() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image-docdir1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR"
+ try "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR"
+ try test -e "$PREFIX_DIR/share/doc/rust/README"
+ try test -e "$PREFIX_DIR/share/doc/rust/rustdocs.txt"
+}
+runtest docdir_default
+
+docdir() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image-docdir1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR"
+ try mkdir "$WORK_DIR/docdir"
+ try "$WORK_DIR/package/install.sh" --prefix="$PREFIX_DIR" --docdir="$WORK_DIR/docdir"
+ try test -e "$WORK_DIR/docdir/README"
+ try test -e "$WORK_DIR/docdir/rustdocs.txt"
+}
+runtest docdir
+
+docdir_combined() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image-docdir1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name="rustc" \
+ --component-name="rustc"
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image-docdir2" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name="cargo" \
+ --component-name="cargo"
+ try sh "$S/combine-installers.sh" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust \
+ --input-tarballs="$OUT_DIR/rustc.tar.gz,$OUT_DIR/cargo.tar.gz"
+ try mkdir "$WORK_DIR/docdir"
+ try "$WORK_DIR/rust/install.sh" --prefix="$PREFIX_DIR" --docdir="$WORK_DIR/docdir"
+ try test -e "$WORK_DIR/docdir/README"
+ try test -e "$WORK_DIR/docdir/rustdocs.txt"
+ try test -e "$WORK_DIR/docdir/README"
+ try test -e "$WORK_DIR/docdir/cargodocs.txt"
+}
+runtest docdir_combined
+
+combine_installers_different_input_compression_formats() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rustc \
+ --component-name=rustc \
+ --compression-formats=xz
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image3" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=cargo \
+ --component-name=cargo \
+ --compression-formats=gz
+ try sh "$S/combine-installers.sh" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust \
+ --input-tarballs="$OUT_DIR/rustc.tar.xz,$OUT_DIR/cargo.tar.gz"
+
+ try test -e "${OUT_DIR}/rust.tar.gz"
+ try test -e "${OUT_DIR}/rust.tar.xz"
+}
+runtest combine_installers_different_input_compression_formats
+
+generate_compression_formats_one() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name="rustc" \
+ --component-name="rustc" \
+ --compression-formats="xz"
+
+ try test ! -e "${OUT_DIR}/rustc.tar.gz"
+ try test -e "${OUT_DIR}/rustc.tar.xz"
+}
+runtest generate_compression_formats_one
+
+generate_compression_formats_multiple() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name="rustc" \
+ --component-name="rustc" \
+ --compression-formats="gz,xz"
+
+ try test -e "${OUT_DIR}/rustc.tar.gz"
+ try test -e "${OUT_DIR}/rustc.tar.xz"
+}
+runtest generate_compression_formats_multiple
+
+generate_compression_formats_error() {
+ expect_fail sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name="rustc" \
+ --component-name="rustc" \
+ --compression-formats="xz,foobar"
+}
+runtest generate_compression_formats_error
+
+combine_compression_formats_one() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rustc \
+ --component-name=rustc
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image3" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=cargo \
+ --component-name=cargo
+ try sh "$S/combine-installers.sh" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust \
+ --input-tarballs="$OUT_DIR/rustc.tar.gz,$OUT_DIR/cargo.tar.gz" \
+ --compression-formats=xz
+
+ try test ! -e "${OUT_DIR}/rust.tar.gz"
+ try test -e "${OUT_DIR}/rust.tar.xz"
+}
+runtest combine_compression_formats_one
+
+combine_compression_formats_multiple() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rustc \
+ --component-name=rustc
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image3" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=cargo \
+ --component-name=cargo
+ try sh "$S/combine-installers.sh" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust \
+ --input-tarballs="$OUT_DIR/rustc.tar.gz,$OUT_DIR/cargo.tar.gz" \
+ --compression-formats=xz,gz
+
+ try test -e "${OUT_DIR}/rust.tar.gz"
+ try test -e "${OUT_DIR}/rust.tar.xz"
+}
+runtest combine_compression_formats_multiple
+
+combine_compression_formats_error() {
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image1" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rustc \
+ --component-name=rustc
+ try sh "$S/gen-installer.sh" \
+ --image-dir="$TEST_DIR/image3" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=cargo \
+ --component-name=cargo
+ expect_fail sh "$S/combine-installers.sh" \
+ --work-dir="$WORK_DIR" \
+ --output-dir="$OUT_DIR" \
+ --package-name=rust \
+ --input-tarballs="$OUT_DIR/rustc.tar.gz,$OUT_DIR/cargo.tar.gz" \
+ --compression-formats=xz,foobar
+}
+runtest combine_compression_formats_error
+
+tarball_compression_formats_one() {
+ try cp -r "${TEST_DIR}/image1" "${WORK_DIR}/image"
+ try sh "$S/make-tarballs.sh" \
+ --input="${WORK_DIR}/image" \
+ --work-dir="${WORK_DIR}" \
+ --output="${OUT_DIR}/rustc" \
+ --compression-formats="xz"
+
+ try test ! -e "${OUT_DIR}/rustc.tar.gz"
+ try test -e "${OUT_DIR}/rustc.tar.xz"
+}
+runtest tarball_compression_formats_one
+
+tarball_compression_formats_multiple() {
+ try cp -r "${TEST_DIR}/image1" "${WORK_DIR}/image"
+ try sh "$S/make-tarballs.sh" \
+ --input="${WORK_DIR}/image" \
+ --work-dir="${WORK_DIR}" \
+ --output="${OUT_DIR}/rustc" \
+ --compression-formats="xz,gz"
+
+ try test -e "${OUT_DIR}/rustc.tar.gz"
+ try test -e "${OUT_DIR}/rustc.tar.xz"
+}
+runtest tarball_compression_formats_multiple
+
+tarball_compression_formats_error() {
+ try cp -r "${TEST_DIR}/image1" "${WORK_DIR}/image"
+ expect_fail sh "$S/make-tarballs.sh" \
+ --input="${WORK_DIR}/image" \
+ --work-dir="${WORK_DIR}" \
+ --output="${OUT_DIR}/rustc" \
+ --compression-formats="xz,foobar"
+}
+runtest tarball_compression_formats_error
+
+echo
+echo "TOTAL SUCCESS!"
+echo
diff --git a/src/tools/rust-installer/test/image-docdir1/share/doc/rust/README b/src/tools/rust-installer/test/image-docdir1/share/doc/rust/README
new file mode 100644
index 000000000..871732e64
--- /dev/null
+++ b/src/tools/rust-installer/test/image-docdir1/share/doc/rust/README
@@ -0,0 +1 @@
+rust
diff --git a/src/tools/rust-installer/test/image-docdir1/share/doc/rust/rustdocs.txt b/src/tools/rust-installer/test/image-docdir1/share/doc/rust/rustdocs.txt
new file mode 100644
index 000000000..871732e64
--- /dev/null
+++ b/src/tools/rust-installer/test/image-docdir1/share/doc/rust/rustdocs.txt
@@ -0,0 +1 @@
+rust
diff --git a/src/tools/rust-installer/test/image-docdir2/share/doc/cargo/README b/src/tools/rust-installer/test/image-docdir2/share/doc/cargo/README
new file mode 100644
index 000000000..033a48caf
--- /dev/null
+++ b/src/tools/rust-installer/test/image-docdir2/share/doc/cargo/README
@@ -0,0 +1 @@
+cargo
diff --git a/src/tools/rust-installer/test/image-docdir2/share/doc/cargo/cargodocs.txt b/src/tools/rust-installer/test/image-docdir2/share/doc/cargo/cargodocs.txt
new file mode 100644
index 000000000..033a48caf
--- /dev/null
+++ b/src/tools/rust-installer/test/image-docdir2/share/doc/cargo/cargodocs.txt
@@ -0,0 +1 @@
+cargo
diff --git a/src/tools/rust-installer/test/image1/bin/bad-bin b/src/tools/rust-installer/test/image1/bin/bad-bin
new file mode 100644
index 000000000..b5b0e3234
--- /dev/null
+++ b/src/tools/rust-installer/test/image1/bin/bad-bin
@@ -0,0 +1 @@
+#!/bin/bogus \ No newline at end of file
diff --git a/src/tools/rust-installer/test/image1/bin/program b/src/tools/rust-installer/test/image1/bin/program
new file mode 100755
index 000000000..96b4b06ad
--- /dev/null
+++ b/src/tools/rust-installer/test/image1/bin/program
@@ -0,0 +1 @@
+#!/bin/sh \ No newline at end of file
diff --git a/src/tools/rust-installer/test/image1/bin/program2 b/src/tools/rust-installer/test/image1/bin/program2
new file mode 100755
index 000000000..96b4b06ad
--- /dev/null
+++ b/src/tools/rust-installer/test/image1/bin/program2
@@ -0,0 +1 @@
+#!/bin/sh \ No newline at end of file
diff --git a/src/tools/rust-installer/test/image1/dir-to-install/foo b/src/tools/rust-installer/test/image1/dir-to-install/foo
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-installer/test/image1/dir-to-install/foo
diff --git a/src/tools/rust-installer/test/image1/dir-to-not-install/foo b/src/tools/rust-installer/test/image1/dir-to-not-install/foo
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-installer/test/image1/dir-to-not-install/foo
diff --git a/src/tools/rust-installer/test/image1/something-to-install b/src/tools/rust-installer/test/image1/something-to-install
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-installer/test/image1/something-to-install
diff --git a/src/tools/rust-installer/test/image1/something-to-not-install b/src/tools/rust-installer/test/image1/something-to-not-install
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-installer/test/image1/something-to-not-install
diff --git a/src/tools/rust-installer/test/image2/bin/oldprogram b/src/tools/rust-installer/test/image2/bin/oldprogram
new file mode 100755
index 000000000..96b4b06ad
--- /dev/null
+++ b/src/tools/rust-installer/test/image2/bin/oldprogram
@@ -0,0 +1 @@
+#!/bin/sh \ No newline at end of file
diff --git a/src/tools/rust-installer/test/image2/dir-to-install/bar b/src/tools/rust-installer/test/image2/dir-to-install/bar
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-installer/test/image2/dir-to-install/bar
diff --git a/src/tools/rust-installer/test/image2/something-to-install b/src/tools/rust-installer/test/image2/something-to-install
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-installer/test/image2/something-to-install
diff --git a/src/tools/rust-installer/test/image3/bin/cargo b/src/tools/rust-installer/test/image3/bin/cargo
new file mode 100755
index 000000000..96b4b06ad
--- /dev/null
+++ b/src/tools/rust-installer/test/image3/bin/cargo
@@ -0,0 +1 @@
+#!/bin/sh \ No newline at end of file
diff --git a/src/tools/rust-installer/test/image4/baz b/src/tools/rust-installer/test/image4/baz
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-installer/test/image4/baz
diff --git a/src/tools/rust-installer/test/image4/dir-to-install/qux/bar b/src/tools/rust-installer/test/image4/dir-to-install/qux/bar
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-installer/test/image4/dir-to-install/qux/bar
diff --git a/src/tools/rust-installer/test/image5/dir-to-install/foo b/src/tools/rust-installer/test/image5/dir-to-install/foo
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rust-installer/test/image5/dir-to-install/foo
diff --git a/src/tools/rust-installer/triagebot.toml b/src/tools/rust-installer/triagebot.toml
new file mode 100644
index 000000000..fa0824ac5
--- /dev/null
+++ b/src/tools/rust-installer/triagebot.toml
@@ -0,0 +1 @@
+[assign]
diff --git a/src/tools/rustbook/Cargo.toml b/src/tools/rustbook/Cargo.toml
new file mode 100644
index 000000000..33c051804
--- /dev/null
+++ b/src/tools/rustbook/Cargo.toml
@@ -0,0 +1,14 @@
+[package]
+name = "rustbook"
+version = "0.1.0"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+
+[dependencies]
+clap = "3.1.1"
+env_logger = "0.7.1"
+
+[dependencies.mdbook]
+version = "0.4.21"
+default-features = false
+features = ["search"]
diff --git a/src/tools/rustbook/src/main.rs b/src/tools/rustbook/src/main.rs
new file mode 100644
index 000000000..3c7dc0183
--- /dev/null
+++ b/src/tools/rustbook/src/main.rs
@@ -0,0 +1,102 @@
+use clap::crate_version;
+
+use std::env;
+use std::path::{Path, PathBuf};
+
+use clap::{arg, ArgMatches, Command};
+
+use mdbook::errors::Result as Result3;
+use mdbook::MDBook;
+
+fn main() {
+ let crate_version = format!("v{}", crate_version!());
+ env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("warn")).init();
+ let d_arg = arg!(-d --"dest-dir" <DEST_DIR>
+"The output directory for your book\n(Defaults to ./book when omitted)")
+ .required(false);
+ let dir_arg = arg!([dir]
+"A directory for your book\n(Defaults to Current Directory when omitted)");
+
+ let matches = Command::new("rustbook")
+ .about("Build a book with mdBook")
+ .author("Steve Klabnik <steve@steveklabnik.com>")
+ .version(&*crate_version)
+ .subcommand_required(true)
+ .arg_required_else_help(true)
+ .subcommand(
+ Command::new("build")
+ .about("Build the book from the markdown files")
+ .arg(d_arg)
+ .arg(&dir_arg),
+ )
+ .subcommand(
+ Command::new("test")
+ .about("Tests that a book's Rust code samples compile")
+ .arg(dir_arg),
+ )
+ .get_matches();
+
+ // Check which subcomamnd the user ran...
+ match matches.subcommand() {
+ Some(("build", sub_matches)) => {
+ if let Err(e) = build(sub_matches) {
+ handle_error(e);
+ }
+ }
+ Some(("test", sub_matches)) => {
+ if let Err(e) = test(sub_matches) {
+ handle_error(e);
+ }
+ }
+ _ => unreachable!(),
+ };
+}
+
+// Build command implementation
+pub fn build(args: &ArgMatches) -> Result3<()> {
+ let book_dir = get_book_dir(args);
+ let mut book = load_book(&book_dir)?;
+
+ // Set this to allow us to catch bugs in advance.
+ book.config.build.create_missing = false;
+
+ if let Some(dest_dir) = args.value_of("dest-dir") {
+ book.config.build.build_dir = PathBuf::from(dest_dir);
+ }
+
+ book.build()?;
+
+ Ok(())
+}
+
+fn test(args: &ArgMatches) -> Result3<()> {
+ let book_dir = get_book_dir(args);
+ let mut book = load_book(&book_dir)?;
+ book.test(vec![])
+}
+
+fn get_book_dir(args: &ArgMatches) -> PathBuf {
+ if let Some(dir) = args.value_of("dir") {
+ // Check if path is relative from current dir, or absolute...
+ let p = Path::new(dir);
+ if p.is_relative() { env::current_dir().unwrap().join(dir) } else { p.to_path_buf() }
+ } else {
+ env::current_dir().unwrap()
+ }
+}
+
+fn load_book(book_dir: &Path) -> Result3<MDBook> {
+ let mut book = MDBook::load(book_dir)?;
+ book.config.set("output.html.input-404", "").unwrap();
+ Ok(book)
+}
+
+fn handle_error(error: mdbook::errors::Error) -> ! {
+ eprintln!("Error: {}", error);
+
+ for cause in error.chain().skip(1) {
+ eprintln!("\tCaused By: {}", cause);
+ }
+
+ ::std::process::exit(101);
+}
diff --git a/src/tools/rustc-workspace-hack/Cargo.toml b/src/tools/rustc-workspace-hack/Cargo.toml
new file mode 100644
index 000000000..b1d8b8649
--- /dev/null
+++ b/src/tools/rustc-workspace-hack/Cargo.toml
@@ -0,0 +1,100 @@
+[package]
+name = "rustc-workspace-hack"
+version = "1.0.0"
+license = 'MIT OR Apache-2.0'
+description = """
+Hack for the compiler's own build system
+"""
+edition = "2021"
+
+[lib]
+path = "lib.rs"
+
+# For documentation about what this is and why in the world these dependencies
+# are appearing, see `README.md`.
+
+[target.'cfg(windows)'.dependencies.winapi]
+version = "0.3"
+features = [
+ "accctrl",
+ "aclapi",
+ "basetsd",
+ "cfg",
+ "consoleapi",
+ "errhandlingapi",
+ "evntrace",
+ "fibersapi",
+ "in6addr",
+ "inaddr",
+ "ioapiset",
+ "jobapi",
+ "jobapi2",
+ "knownfolders",
+ "libloaderapi",
+ "lmcons",
+ "memoryapi",
+ "minschannel",
+ "minwinbase",
+ "mstcpip",
+ "mswsock",
+ "namedpipeapi",
+ "ntdef",
+ "ntsecapi",
+ "ntstatus",
+ "objbase",
+ "processenv",
+ "processthreadsapi",
+ "profileapi",
+ "psapi",
+ "schannel",
+ "securitybaseapi",
+ "shellapi",
+ "shlobj",
+ "sspi",
+ "synchapi",
+ "sysinfoapi",
+ "threadpoollegacyapiset",
+ "timezoneapi",
+ "userenv",
+ "winbase",
+ "wincon",
+ "wincrypt",
+ "windef",
+ "winioctl",
+ "winnt",
+ "winreg",
+ "winsock2",
+ "winuser",
+ "ws2def",
+ "ws2ipdef",
+ "ws2tcpip",
+]
+
+[dependencies]
+bstr = { version = "0.2.17", features = ["default"] }
+byteorder = { version = "1", features = ['default', 'std'] }
+clap = { version = "3.1.1", features = ["derive", "clap_derive"]}
+curl-sys = { version = "0.4.13", features = ["http2", "libnghttp2-sys"], optional = true }
+crossbeam-utils = { version = "0.8.0", features = ["nightly"] }
+libc = { version = "0.2.79", features = ["align"] }
+# Ensure default features of libz-sys, which are disabled in some scenarios.
+libz-sys = { version = "1.1.2" }
+# The only user of memchr's deprecated `use_std` feature is `combine`, so this can be
+# removed if/when https://github.com/Marwes/combine/pull/348 is merged and released.
+memchr = { version = "2.5", features = ["std", "use_std"] }
+# Ensure default features of regex, which are disabled in some scenarios.
+regex = { version = "1.5.6" }
+proc-macro2 = { version = "1", features = ["default"] }
+quote = { version = "1", features = ["default"] }
+rand_core_0_5 = { package = "rand_core", version = "0.5.1", features = ["getrandom", "alloc", "std"] }
+serde = { version = "1.0.82", features = ['derive'] }
+serde_json = { version = "1.0.31", features = ["raw_value", "unbounded_depth"] }
+smallvec = { version = "1.8.1", features = ['union', 'may_dangle'] }
+syn = { version = "1", features = ['fold', 'full', 'extra-traits', 'visit', 'visit-mut'] }
+url = { version = "2.0", features = ['serde'] }
+
+[target.'cfg(not(windows))'.dependencies]
+openssl = { version = "0.10.35", optional = true }
+
+[features]
+all-static = ['openssl/vendored', 'curl-sys/static-curl', 'curl-sys/force-system-lib-on-osx']
diff --git a/src/tools/rustc-workspace-hack/README.md b/src/tools/rustc-workspace-hack/README.md
new file mode 100644
index 000000000..4a5286fae
--- /dev/null
+++ b/src/tools/rustc-workspace-hack/README.md
@@ -0,0 +1,25 @@
+# `rustc-workspace-hack`
+
+This crate is a bit of a hack to make workspaces in rustc work a bit better.
+The rationale for this existence is a bit subtle, but the general idea is that
+we want commands like `./x.py build src/tools/{rls,clippy,cargo}` to share as
+many dependencies as possible.
+
+Each invocation is a different invocation of Cargo, however. Each time Cargo
+runs a build it will re-resolve the dependency graph, notably selecting
+different features sometimes for each build.
+
+For example, let's say there's a very deep dependency like `num-traits` in each
+of these builds. For Cargo the `num-traits`'s `default` feature is turned off.
+In RLS, however, the `default` feature is turned. This means that building Cargo
+and then the RLS will actually build Cargo twice (as a transitive dependency
+changed). This is bad!
+
+The goal of this crate is to solve this problem and ensure that the resolved
+dependency graph for all of these tools is the same in the various subsets of
+each tool, notably enabling the same features of transitive dependencies.
+
+All tools vendored here depend on the `rustc-workspace-hack` crate on crates.io.
+When on crates.io this crate is an empty crate that is just a noop. We override
+it, however, in this workspace to this crate here, which means we can control
+crates in the dependency graph for each of these tools.
diff --git a/src/tools/rustc-workspace-hack/lib.rs b/src/tools/rustc-workspace-hack/lib.rs
new file mode 100644
index 000000000..44425d9c1
--- /dev/null
+++ b/src/tools/rustc-workspace-hack/lib.rs
@@ -0,0 +1 @@
+// intentionally left blank
diff --git a/src/tools/rustdoc-gui/tester.js b/src/tools/rustdoc-gui/tester.js
new file mode 100644
index 000000000..a51218503
--- /dev/null
+++ b/src/tools/rustdoc-gui/tester.js
@@ -0,0 +1,278 @@
+// This package needs to be install:
+//
+// ```
+// npm install browser-ui-test
+// ```
+
+const fs = require("fs");
+const path = require("path");
+const os = require('os');
+const {Options, runTest} = require('browser-ui-test');
+
+function showHelp() {
+ console.log("rustdoc-js options:");
+ console.log(" --doc-folder [PATH] : location of the generated doc folder");
+ console.log(" --file [PATH] : file to run (can be repeated)");
+ console.log(" --debug : show extra information about script run");
+ console.log(" --show-text : render font in pages");
+ console.log(" --no-headless : disable headless mode");
+ console.log(" --no-sandbox : disable sandbox mode");
+ console.log(" --help : show this message then quit");
+ console.log(" --tests-folder [PATH] : location of the .GOML tests folder");
+ console.log(" --jobs [NUMBER] : number of threads to run tests on");
+ console.log(" --executable-path [PATH] : path of the browser's executable to be used");
+}
+
+function isNumeric(s) {
+ return /^\d+$/.test(s);
+}
+
+function parseOptions(args) {
+ var opts = {
+ "doc_folder": "",
+ "tests_folder": "",
+ "files": [],
+ "debug": false,
+ "show_text": false,
+ "no_headless": false,
+ "jobs": -1,
+ "executable_path": null,
+ "no_sandbox": false,
+ };
+ var correspondances = {
+ "--doc-folder": "doc_folder",
+ "--tests-folder": "tests_folder",
+ "--debug": "debug",
+ "--show-text": "show_text",
+ "--no-headless": "no_headless",
+ "--executable-path": "executable_path",
+ "--no-sandbox": "no_sandbox",
+ };
+
+ for (var i = 0; i < args.length; ++i) {
+ if (args[i] === "--doc-folder"
+ || args[i] === "--tests-folder"
+ || args[i] === "--file"
+ || args[i] === "--jobs"
+ || args[i] === "--executable-path") {
+ i += 1;
+ if (i >= args.length) {
+ console.log("Missing argument after `" + args[i - 1] + "` option.");
+ return null;
+ }
+ if (args[i - 1] === "--jobs") {
+ if (!isNumeric(args[i])) {
+ console.log(
+ "`--jobs` option expects a positive number, found `" + args[i] + "`");
+ return null;
+ }
+ opts["jobs"] = parseInt(args[i]);
+ } else if (args[i - 1] !== "--file") {
+ opts[correspondances[args[i - 1]]] = args[i];
+ } else {
+ opts["files"].push(args[i]);
+ }
+ } else if (args[i] === "--help") {
+ showHelp();
+ process.exit(0);
+ } else if (args[i] === "--no-sandbox") {
+ console.log("`--no-sandbox` is being used. Be very careful!");
+ opts[correspondances[args[i]]] = true;
+ } else if (correspondances[args[i]]) {
+ opts[correspondances[args[i]]] = true;
+ } else {
+ console.log("Unknown option `" + args[i] + "`.");
+ console.log("Use `--help` to see the list of options");
+ return null;
+ }
+ }
+ if (opts["tests_folder"].length < 1) {
+ console.log("Missing `--tests-folder` option.");
+ } else if (opts["doc_folder"].length < 1) {
+ console.log("Missing `--doc-folder` option.");
+ } else {
+ return opts;
+ }
+ return null;
+}
+
+/// Print single char status information without \n
+function char_printer(n_tests) {
+ const max_per_line = 10;
+ let current = 0;
+ return {
+ successful: function() {
+ current += 1;
+ if (current % max_per_line === 0) {
+ process.stdout.write(`. (${current}/${n_tests})${os.EOL}`);
+ } else {
+ process.stdout.write(".");
+ }
+ },
+ erroneous: function() {
+ current += 1;
+ if (current % max_per_line === 0) {
+ process.stderr.write(`F (${current}/${n_tests})${os.EOL}`);
+ } else {
+ process.stderr.write("F");
+ }
+ },
+ finish: function() {
+ if (current % max_per_line === 0) {
+ // Don't output if we are already at a matching line end
+ console.log("");
+ } else {
+ const spaces = " ".repeat(max_per_line - (current % max_per_line));
+ process.stdout.write(`${spaces} (${current}/${n_tests})${os.EOL}${os.EOL}`);
+ }
+ },
+ };
+}
+
+/// Sort array by .file_name property
+function by_filename(a, b) {
+ return a.file_name - b.file_name;
+}
+
+async function main(argv) {
+ let opts = parseOptions(argv.slice(2));
+ if (opts === null) {
+ process.exit(1);
+ }
+
+ // Print successful tests too
+ let debug = false;
+ // Run tests in sequentially
+ let headless = true;
+ const options = new Options();
+ try {
+ // This is more convenient that setting fields one by one.
+ let args = [
+ "--variable", "DOC_PATH", opts["doc_folder"], "--enable-fail-on-js-error",
+ ];
+ if (opts["debug"]) {
+ debug = true;
+ args.push("--debug");
+ }
+ if (opts["show_text"]) {
+ args.push("--show-text");
+ }
+ if (opts["no_sandbox"]) {
+ args.push("--no-sandbox");
+ }
+ if (opts["no_headless"]) {
+ args.push("--no-headless");
+ headless = false;
+ }
+ if (opts["executable_path"] !== null) {
+ args.push("--executable-path");
+ args.push(opts["executable_path"]);
+ }
+ options.parseArguments(args);
+ } catch (error) {
+ console.error(`invalid argument: ${error}`);
+ process.exit(1);
+ }
+
+ let failed = false;
+ let files;
+ if (opts["files"].length === 0) {
+ files = fs.readdirSync(opts["tests_folder"]);
+ } else {
+ files = opts["files"];
+ }
+ files = files.filter(file => path.extname(file) == ".goml");
+ if (files.length === 0) {
+ console.error("rustdoc-gui: No test selected");
+ process.exit(2);
+ }
+ files.sort();
+
+ if (!headless) {
+ opts["jobs"] = 1;
+ console.log("`--no-headless` option is active, disabling concurrency for running tests.");
+ }
+
+ console.log(`Running ${files.length} rustdoc-gui (${opts["jobs"]} concurrently) ...`);
+
+ if (opts["jobs"] < 1) {
+ process.setMaxListeners(files.length + 1);
+ } else if (headless) {
+ process.setMaxListeners(opts["jobs"] + 1);
+ }
+
+ const tests_queue = [];
+ let results = {
+ successful: [],
+ failed: [],
+ errored: [],
+ };
+ const status_bar = char_printer(files.length);
+ for (let i = 0; i < files.length; ++i) {
+ const file_name = files[i];
+ const testPath = path.join(opts["tests_folder"], file_name);
+ const callback = runTest(testPath, options)
+ .then(out => {
+ const [output, nb_failures] = out;
+ results[nb_failures === 0 ? "successful" : "failed"].push({
+ file_name: testPath,
+ output: output,
+ });
+ if (nb_failures > 0) {
+ status_bar.erroneous();
+ failed = true;
+ } else {
+ status_bar.successful();
+ }
+ })
+ .catch(err => {
+ results.errored.push({
+ file_name: testPath + file_name,
+ output: err,
+ });
+ status_bar.erroneous();
+ failed = true;
+ })
+ .finally(() => {
+ // We now remove the promise from the tests_queue.
+ tests_queue.splice(tests_queue.indexOf(callback), 1);
+ });
+ tests_queue.push(callback);
+ if (opts["jobs"] > 0 && tests_queue.length >= opts["jobs"]) {
+ await Promise.race(tests_queue);
+ }
+ }
+ if (tests_queue.length > 0) {
+ await Promise.all(tests_queue);
+ }
+ status_bar.finish();
+
+ if (debug) {
+ results.successful.sort(by_filename);
+ results.successful.forEach(r => {
+ console.log(r.output);
+ });
+ }
+
+ if (results.failed.length > 0) {
+ console.log("");
+ results.failed.sort(by_filename);
+ results.failed.forEach(r => {
+ console.log(r.file_name, r.output);
+ });
+ }
+ if (results.errored.length > 0) {
+ console.log(os.EOL);
+ // print run errors on the bottom so developers see them better
+ results.errored.sort(by_filename);
+ results.errored.forEach(r => {
+ console.error(r.file_name, r.output);
+ });
+ }
+
+ if (failed) {
+ process.exit(1);
+ }
+}
+
+main(process.argv);
diff --git a/src/tools/rustdoc-js/tester.js b/src/tools/rustdoc-js/tester.js
new file mode 100644
index 000000000..df3185758
--- /dev/null
+++ b/src/tools/rustdoc-js/tester.js
@@ -0,0 +1,418 @@
+const fs = require('fs');
+const path = require('path');
+
+function loadContent(content) {
+ var Module = module.constructor;
+ var m = new Module();
+ m._compile(content, "tmp.js");
+ m.exports.ignore_order = content.indexOf("\n// ignore-order\n") !== -1 ||
+ content.startsWith("// ignore-order\n");
+ m.exports.exact_check = content.indexOf("\n// exact-check\n") !== -1 ||
+ content.startsWith("// exact-check\n");
+ m.exports.should_fail = content.indexOf("\n// should-fail\n") !== -1 ||
+ content.startsWith("// should-fail\n");
+ return m.exports;
+}
+
+function readFile(filePath) {
+ return fs.readFileSync(filePath, 'utf8');
+}
+
+function contentToDiffLine(key, value) {
+ return `"${key}": "${value}",`;
+}
+
+// This function is only called when no matching result was found and therefore will only display
+// the diff between the two items.
+function betterLookingDiff(entry, data) {
+ let output = ' {\n';
+ let spaces = ' ';
+ for (let key in entry) {
+ if (!entry.hasOwnProperty(key)) {
+ continue;
+ }
+ if (!data || !data.hasOwnProperty(key)) {
+ output += '-' + spaces + contentToDiffLine(key, entry[key]) + '\n';
+ continue;
+ }
+ let value = data[key];
+ if (value !== entry[key]) {
+ output += '-' + spaces + contentToDiffLine(key, entry[key]) + '\n';
+ output += '+' + spaces + contentToDiffLine(key, value) + '\n';
+ } else {
+ output += spaces + contentToDiffLine(key, value) + '\n';
+ }
+ }
+ return output + ' }';
+}
+
+function lookForEntry(entry, data) {
+ for (var i = 0; i < data.length; ++i) {
+ var allGood = true;
+ for (var key in entry) {
+ if (!entry.hasOwnProperty(key)) {
+ continue;
+ }
+ var value = data[i][key];
+ // To make our life easier, if there is a "parent" type, we add it to the path.
+ if (key === 'path' && data[i]['parent'] !== undefined) {
+ if (value.length > 0) {
+ value += '::' + data[i]['parent']['name'];
+ } else {
+ value = data[i]['parent']['name'];
+ }
+ }
+ if (value !== entry[key]) {
+ allGood = false;
+ break;
+ }
+ }
+ if (allGood === true) {
+ return i;
+ }
+ }
+ return null;
+}
+
+// This function checks if `expected` has all the required fields needed for the checks.
+function checkNeededFields(fullPath, expected, error_text, queryName, position) {
+ let fieldsToCheck;
+ if (fullPath.length === 0) {
+ fieldsToCheck = [
+ "foundElems",
+ "original",
+ "returned",
+ "typeFilter",
+ "userQuery",
+ "error",
+ ];
+ } else if (fullPath.endsWith("elems") || fullPath.endsWith("generics")) {
+ fieldsToCheck = [
+ "name",
+ "fullPath",
+ "pathWithoutLast",
+ "pathLast",
+ "generics",
+ ];
+ } else {
+ fieldsToCheck = [];
+ }
+ for (var i = 0; i < fieldsToCheck.length; ++i) {
+ const field = fieldsToCheck[i];
+ if (!expected.hasOwnProperty(field)) {
+ let text = `${queryName}==> Mandatory key \`${field}\` is not present`;
+ if (fullPath.length > 0) {
+ text += ` in field \`${fullPath}\``;
+ if (position != null) {
+ text += ` (position ${position})`;
+ }
+ }
+ error_text.push(text);
+ }
+ }
+}
+
+function valueCheck(fullPath, expected, result, error_text, queryName) {
+ if (Array.isArray(expected)) {
+ for (var i = 0; i < expected.length; ++i) {
+ checkNeededFields(fullPath, expected[i], error_text, queryName, i);
+ if (i >= result.length) {
+ error_text.push(`${queryName}==> EXPECTED has extra value in array from field ` +
+ `\`${fullPath}\` (position ${i}): \`${JSON.stringify(expected[i])}\``);
+ } else {
+ valueCheck(fullPath + '[' + i + ']', expected[i], result[i], error_text, queryName);
+ }
+ }
+ for (; i < result.length; ++i) {
+ error_text.push(`${queryName}==> RESULT has extra value in array from field ` +
+ `\`${fullPath}\` (position ${i}): \`${JSON.stringify(result[i])}\` ` +
+ 'compared to EXPECTED');
+ }
+ } else if (expected !== null && typeof expected !== "undefined" &&
+ expected.constructor == Object) {
+ for (const key in expected) {
+ if (!expected.hasOwnProperty(key)) {
+ continue;
+ }
+ if (!result.hasOwnProperty(key)) {
+ error_text.push('==> Unknown key "' + key + '"');
+ break;
+ }
+ const obj_path = fullPath + (fullPath.length > 0 ? '.' : '') + key;
+ valueCheck(obj_path, expected[key], result[key], error_text, queryName);
+ }
+ } else {
+ expectedValue = JSON.stringify(expected);
+ resultValue = JSON.stringify(result);
+ if (expectedValue != resultValue) {
+ error_text.push(`${queryName}==> Different values for field \`${fullPath}\`:\n` +
+ `EXPECTED: \`${expectedValue}\`\nRESULT: \`${resultValue}\``);
+ }
+ }
+}
+
+function runParser(query, expected, parseQuery, queryName) {
+ var error_text = [];
+ checkNeededFields("", expected, error_text, queryName, null);
+ if (error_text.length === 0) {
+ valueCheck('', expected, parseQuery(query), error_text, queryName);
+ }
+ return error_text;
+}
+
+function runSearch(query, expected, doSearch, loadedFile, queryName) {
+ const ignore_order = loadedFile.ignore_order;
+ const exact_check = loadedFile.exact_check;
+
+ var results = doSearch(query, loadedFile.FILTER_CRATE);
+ var error_text = [];
+
+ for (var key in expected) {
+ if (!expected.hasOwnProperty(key)) {
+ continue;
+ }
+ if (!results.hasOwnProperty(key)) {
+ error_text.push('==> Unknown key "' + key + '"');
+ break;
+ }
+ var entry = expected[key];
+
+ if (exact_check == true && entry.length !== results[key].length) {
+ error_text.push(queryName + "==> Expected exactly " + entry.length +
+ " results but found " + results[key].length + " in '" + key + "'");
+ }
+
+ var prev_pos = -1;
+ for (var i = 0; i < entry.length; ++i) {
+ var entry_pos = lookForEntry(entry[i], results[key]);
+ if (entry_pos === null) {
+ error_text.push(queryName + "==> Result not found in '" + key + "': '" +
+ JSON.stringify(entry[i]) + "'");
+ // By default, we just compare the two first items.
+ let item_to_diff = 0;
+ if ((ignore_order === false || exact_check === true) && i < results[key].length) {
+ item_to_diff = i;
+ }
+ error_text.push("Diff of first error:\n" +
+ betterLookingDiff(entry[i], results[key][item_to_diff]));
+ } else if (exact_check === true && prev_pos + 1 !== entry_pos) {
+ error_text.push(queryName + "==> Exact check failed at position " + (prev_pos + 1) +
+ ": expected '" + JSON.stringify(entry[i]) + "' but found '" +
+ JSON.stringify(results[key][i]) + "'");
+ } else if (ignore_order === false && entry_pos < prev_pos) {
+ error_text.push(queryName + "==> '" + JSON.stringify(entry[i]) + "' was supposed " +
+ "to be before '" + JSON.stringify(results[key][entry_pos]) + "'");
+ } else {
+ prev_pos = entry_pos;
+ }
+ }
+ }
+ return error_text;
+}
+
+function checkResult(error_text, loadedFile, displaySuccess) {
+ if (error_text.length === 0 && loadedFile.should_fail === true) {
+ console.log("FAILED");
+ console.log("==> Test was supposed to fail but all items were found...");
+ } else if (error_text.length !== 0 && loadedFile.should_fail === false) {
+ console.log("FAILED");
+ console.log(error_text.join("\n"));
+ } else {
+ if (displaySuccess) {
+ console.log("OK");
+ }
+ return 0;
+ }
+ return 1;
+}
+
+function runCheck(loadedFile, key, callback) {
+ const expected = loadedFile[key];
+ const query = loadedFile.QUERY;
+
+ if (Array.isArray(query)) {
+ if (!Array.isArray(expected)) {
+ console.log("FAILED");
+ console.log(`==> If QUERY variable is an array, ${key} should be an array too`);
+ return 1;
+ } else if (query.length !== expected.length) {
+ console.log("FAILED");
+ console.log(`==> QUERY variable should have the same length as ${key}`);
+ return 1;
+ }
+ for (var i = 0; i < query.length; ++i) {
+ var error_text = callback(query[i], expected[i], "[ query `" + query[i] + "`]");
+ if (checkResult(error_text, loadedFile, false) !== 0) {
+ return 1;
+ }
+ }
+ console.log("OK");
+ } else {
+ var error_text = callback(query, expected, "");
+ if (checkResult(error_text, loadedFile, true) !== 0) {
+ return 1;
+ }
+ }
+ return 0;
+}
+
+function runChecks(testFile, doSearch, parseQuery) {
+ var checkExpected = false;
+ var checkParsed = false;
+ var testFileContent = readFile(testFile) + 'exports.QUERY = QUERY;';
+
+ if (testFileContent.indexOf("FILTER_CRATE") !== -1) {
+ testFileContent += "exports.FILTER_CRATE = FILTER_CRATE;";
+ } else {
+ testFileContent += "exports.FILTER_CRATE = null;";
+ }
+
+ if (testFileContent.indexOf("\nconst EXPECTED") !== -1) {
+ testFileContent += 'exports.EXPECTED = EXPECTED;';
+ checkExpected = true;
+ }
+ if (testFileContent.indexOf("\nconst PARSED") !== -1) {
+ testFileContent += 'exports.PARSED = PARSED;';
+ checkParsed = true;
+ }
+ if (!checkParsed && !checkExpected) {
+ console.log("FAILED");
+ console.log("==> At least `PARSED` or `EXPECTED` is needed!");
+ return 1;
+ }
+
+ const loadedFile = loadContent(testFileContent);
+ var res = 0;
+
+ if (checkExpected) {
+ res += runCheck(loadedFile, "EXPECTED", (query, expected, text) => {
+ return runSearch(query, expected, doSearch, loadedFile, text);
+ });
+ }
+ if (checkParsed) {
+ res += runCheck(loadedFile, "PARSED", (query, expected, text) => {
+ return runParser(query, expected, parseQuery, text);
+ });
+ }
+ return res;
+}
+
+/**
+ * Load searchNNN.js and search-indexNNN.js.
+ *
+ * @param {string} doc_folder - Path to a folder generated by running rustdoc
+ * @param {string} resource_suffix - Version number between filename and .js, e.g. "1.59.0"
+ * @returns {Object} - Object containing two keys: `doSearch`, which runs a search
+ * with the loaded index and returns a table of results; and `parseQuery`, which is the
+ * `parseQuery` function exported from the search module.
+ */
+function loadSearchJS(doc_folder, resource_suffix) {
+ const searchJs = path.join(doc_folder, "search" + resource_suffix + ".js");
+ const searchIndexJs = path.join(doc_folder, "search-index" + resource_suffix + ".js");
+ const searchIndex = require(searchIndexJs);
+ const searchModule = require(searchJs);
+ const searchWords = searchModule.initSearch(searchIndex.searchIndex);
+
+ return {
+ doSearch: function (queryStr, filterCrate, currentCrate) {
+ return searchModule.execQuery(searchModule.parseQuery(queryStr), searchWords,
+ filterCrate, currentCrate);
+ },
+ parseQuery: searchModule.parseQuery,
+ }
+}
+
+function showHelp() {
+ console.log("rustdoc-js options:");
+ console.log(" --doc-folder [PATH] : location of the generated doc folder");
+ console.log(" --help : show this message then quit");
+ console.log(" --crate-name [STRING] : crate name to be used");
+ console.log(" --test-file [PATHs] : location of the JS test files (can be called " +
+ "multiple times)");
+ console.log(" --test-folder [PATH] : location of the JS tests folder");
+ console.log(" --resource-suffix [STRING] : suffix to refer to the correct files");
+}
+
+function parseOptions(args) {
+ var opts = {
+ "crate_name": "",
+ "resource_suffix": "",
+ "doc_folder": "",
+ "test_folder": "",
+ "test_file": [],
+ };
+ var correspondences = {
+ "--resource-suffix": "resource_suffix",
+ "--doc-folder": "doc_folder",
+ "--test-folder": "test_folder",
+ "--test-file": "test_file",
+ "--crate-name": "crate_name",
+ };
+
+ for (var i = 0; i < args.length; ++i) {
+ if (correspondences.hasOwnProperty(args[i])) {
+ i += 1;
+ if (i >= args.length) {
+ console.log("Missing argument after `" + args[i - 1] + "` option.");
+ return null;
+ }
+ if (args[i - 1] !== "--test-file") {
+ opts[correspondences[args[i - 1]]] = args[i];
+ } else {
+ opts[correspondences[args[i - 1]]].push(args[i]);
+ }
+ } else if (args[i] === "--help") {
+ showHelp();
+ process.exit(0);
+ } else {
+ console.log("Unknown option `" + args[i] + "`.");
+ console.log("Use `--help` to see the list of options");
+ return null;
+ }
+ }
+ if (opts["doc_folder"].length < 1) {
+ console.log("Missing `--doc-folder` option.");
+ } else if (opts["crate_name"].length < 1) {
+ console.log("Missing `--crate-name` option.");
+ } else if (opts["test_folder"].length < 1 && opts["test_file"].length < 1) {
+ console.log("At least one of `--test-folder` or `--test-file` option is required.");
+ } else {
+ return opts;
+ }
+ return null;
+}
+
+function main(argv) {
+ var opts = parseOptions(argv.slice(2));
+ if (opts === null) {
+ return 1;
+ }
+
+ let parseAndSearch = loadSearchJS(
+ opts["doc_folder"],
+ opts["resource_suffix"]);
+ var errors = 0;
+
+ let doSearch = function (queryStr, filterCrate) {
+ return parseAndSearch.doSearch(queryStr, filterCrate, opts["crate_name"]);
+ };
+
+ if (opts["test_file"].length !== 0) {
+ opts["test_file"].forEach(function (file) {
+ process.stdout.write(`Testing ${file} ... `);
+ errors += runChecks(file, doSearch, parseAndSearch.parseQuery);
+ });
+ } else if (opts["test_folder"].length !== 0) {
+ fs.readdirSync(opts["test_folder"]).forEach(function (file) {
+ if (!file.endsWith(".js")) {
+ return;
+ }
+ process.stdout.write(`Testing ${file} ... `);
+ errors += runChecks(path.join(opts["test_folder"], file), doSearch,
+ parseAndSearch.parseQuery);
+ });
+ }
+ return errors > 0 ? 1 : 0;
+}
+
+process.exit(main(process.argv));
diff --git a/src/tools/rustdoc-themes/Cargo.toml b/src/tools/rustdoc-themes/Cargo.toml
new file mode 100644
index 000000000..3d8c77d36
--- /dev/null
+++ b/src/tools/rustdoc-themes/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "rustdoc-themes"
+version = "0.1.0"
+edition = "2021"
+
+[[bin]]
+name = "rustdoc-themes"
+path = "main.rs"
diff --git a/src/tools/rustdoc-themes/main.rs b/src/tools/rustdoc-themes/main.rs
new file mode 100644
index 000000000..7cac985a9
--- /dev/null
+++ b/src/tools/rustdoc-themes/main.rs
@@ -0,0 +1,49 @@
+use std::env::args;
+use std::fs::read_dir;
+use std::path::Path;
+use std::process::{exit, Command};
+
+const FILES_TO_IGNORE: &[&str] = &["light.css"];
+
+fn get_folders<P: AsRef<Path>>(folder_path: P) -> Vec<String> {
+ let mut ret = Vec::with_capacity(10);
+
+ for entry in read_dir(folder_path.as_ref()).expect("read_dir failed") {
+ let entry = entry.expect("Couldn't unwrap entry");
+ let path = entry.path();
+
+ if !path.is_file() {
+ continue;
+ }
+ let filename = path.file_name().expect("file_name failed");
+ if FILES_TO_IGNORE.iter().any(|x| x == &filename) {
+ continue;
+ }
+ ret.push(format!("{}", path.display()));
+ }
+ ret
+}
+
+fn main() {
+ let argv: Vec<String> = args().collect();
+
+ if argv.len() < 3 {
+ eprintln!("Needs rustdoc binary path");
+ exit(1);
+ }
+ let rustdoc_bin = &argv[1];
+ let themes_folder = &argv[2];
+ let themes = get_folders(&themes_folder);
+ if themes.is_empty() {
+ eprintln!("No theme found in \"{}\"...", themes_folder);
+ exit(1);
+ }
+ let arg_name = "--check-theme".to_owned();
+ let status = Command::new(rustdoc_bin)
+ .args(&themes.iter().flat_map(|t| vec![&arg_name, t].into_iter()).collect::<Vec<_>>())
+ .status()
+ .expect("failed to execute child");
+ if !status.success() {
+ exit(1);
+ }
+}
diff --git a/src/tools/rustdoc/Cargo.toml b/src/tools/rustdoc/Cargo.toml
new file mode 100644
index 000000000..c4101f72c
--- /dev/null
+++ b/src/tools/rustdoc/Cargo.toml
@@ -0,0 +1,17 @@
+[package]
+name = "rustdoc-tool"
+version = "0.0.0"
+edition = "2021"
+
+# Cargo adds a number of paths to the dylib search path on windows, which results in
+# the wrong rustdoc being executed. To avoid the conflicting rustdocs, we name the "tool"
+# rustdoc a different name.
+[[bin]]
+name = "rustdoc_tool_binary"
+path = "main.rs"
+
+[dependencies]
+rustdoc = { path = "../../librustdoc" }
+
+[features]
+jemalloc = ['rustdoc/jemalloc']
diff --git a/src/tools/rustdoc/main.rs b/src/tools/rustdoc/main.rs
new file mode 100644
index 000000000..5b499a1fa
--- /dev/null
+++ b/src/tools/rustdoc/main.rs
@@ -0,0 +1,3 @@
+fn main() {
+ rustdoc::main()
+}
diff --git a/src/tools/rustfmt/.editorconfig b/src/tools/rustfmt/.editorconfig
new file mode 100644
index 000000000..5bb92df3e
--- /dev/null
+++ b/src/tools/rustfmt/.editorconfig
@@ -0,0 +1,26 @@
+root = true
+
+[*]
+charset = utf-8
+end_of_line = lf
+indent_size = 2
+indent_style = space
+trim_trailing_whitespace = true
+insert_final_newline = true
+
+[*.md]
+trim_trailing_whitespace = false
+
+[*.rs]
+indent_size = 4
+
+[tests/**/*.rs]
+charset = utf-8
+end_of_line = unset
+indent_size = unset
+indent_style = unset
+trim_trailing_whitespace = unset
+insert_final_newline = unset
+
+[appveyor.yml]
+end_of_line = unset
diff --git a/src/tools/rustfmt/.github/workflows/integration.yml b/src/tools/rustfmt/.github/workflows/integration.yml
new file mode 100644
index 000000000..4d8899b43
--- /dev/null
+++ b/src/tools/rustfmt/.github/workflows/integration.yml
@@ -0,0 +1,85 @@
+name: integration
+on:
+ push:
+ branches:
+ - master
+ pull_request:
+
+jobs:
+ integration-tests:
+ runs-on: ubuntu-latest
+ name: ${{ matrix.integration }}
+ strategy:
+ # https://help.github.com/en/actions/getting-started-with-github-actions/about-github-actions#usage-limits
+ # There's a limit of 60 concurrent jobs across all repos in the rust-lang organization.
+ # In order to prevent overusing too much of that 60 limit, we throttle the
+ # number of rustfmt jobs that will run concurrently.
+ max-parallel: 4
+ fail-fast: false
+ matrix:
+ integration: [
+ bitflags,
+ error-chain,
+ log,
+ mdbook,
+ packed_simd,
+ rust-semverver,
+ tempdir,
+ futures-rs,
+ rust-clippy,
+ failure,
+ ]
+ include:
+ # Allowed Failures
+ # Actions doesn't yet support explicitly marking matrix legs as allowed failures
+ # https://github.community/t5/GitHub-Actions/continue-on-error-allow-failure-UI-indication/td-p/37033
+ # https://github.community/t5/GitHub-Actions/Why-a-matrix-step-will-be-canceled-if-another-one-failed/td-p/30920
+ # Instead, leverage `continue-on-error`
+ # https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions#jobsjob_idstepscontinue-on-error
+ #
+ # Failing due to breaking changes in rustfmt 2.0 where empty
+ # match blocks have trailing commas removed
+ # https://github.com/rust-lang/rustfmt/pull/4226
+ - integration: chalk
+ allow-failure: true
+ - integration: crater
+ allow-failure: true
+ - integration: glob
+ allow-failure: true
+ - integration: stdsimd
+ allow-failure: true
+ # Using old rustfmt configuration option
+ - integration: rand
+ allow-failure: true
+ # Keep this as an allowed failure as it's fragile to breaking changes of rustc.
+ - integration: rust-clippy
+ allow-failure: true
+ # Using old rustfmt configuration option
+ - integration: packed_simd
+ allow-failure: true
+ # calebcartwright (2019-12-24)
+ # Keeping this as an allowed failure since it was flagged as such in the TravisCI config, even though
+ # it appears to have been passing for quite some time.
+ # Original comment was: temporal build failure due to breaking changes in the nightly compiler
+ - integration: rust-semverver
+ allow-failure: true
+ # Can be moved back to include section after https://github.com/rust-lang-nursery/failure/pull/298 is merged
+ - integration: failure
+ allow-failure: true
+
+ steps:
+ - name: checkout
+ uses: actions/checkout@v3
+
+ # Run build
+ - name: install rustup
+ run: |
+ curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs > rustup-init.sh
+ sh rustup-init.sh -y --default-toolchain none
+
+ - name: run integration tests
+ env:
+ INTEGRATION: ${{ matrix.integration }}
+ TARGET: x86_64-unknown-linux-gnu
+ run: ./ci/integration.sh
+ continue-on-error: ${{ matrix.allow-failure == true }}
diff --git a/src/tools/rustfmt/.github/workflows/linux.yml b/src/tools/rustfmt/.github/workflows/linux.yml
new file mode 100644
index 000000000..bce9b0c8d
--- /dev/null
+++ b/src/tools/rustfmt/.github/workflows/linux.yml
@@ -0,0 +1,39 @@
+name: linux
+on:
+ push:
+ branches:
+ - master
+ pull_request:
+
+jobs:
+ test:
+ runs-on: ubuntu-latest
+ name: (${{ matrix.target }}, ${{ matrix.cfg_release_channel }})
+ env:
+ CFG_RELEASE_CHANNEL: ${{ matrix.cfg_release_channel }}
+ strategy:
+ # https://help.github.com/en/actions/getting-started-with-github-actions/about-github-actions#usage-limits
+ # There's a limit of 60 concurrent jobs across all repos in the rust-lang organization.
+ # In order to prevent overusing too much of that 60 limit, we throttle the
+ # number of rustfmt jobs that will run concurrently.
+ max-parallel: 1
+ fail-fast: false
+ matrix:
+ target: [
+ x86_64-unknown-linux-gnu,
+ ]
+ cfg_release_channel: [nightly, stable]
+
+ steps:
+ - name: checkout
+ uses: actions/checkout@v3
+
+ # Run build
+ - name: install rustup
+ run: |
+ curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs > rustup-init.sh
+ sh rustup-init.sh -y --default-toolchain none
+ rustup target add ${{ matrix.target }}
+
+ - name: Build and Test
+ run: ./ci/build_and_test.sh
diff --git a/src/tools/rustfmt/.github/workflows/mac.yml b/src/tools/rustfmt/.github/workflows/mac.yml
new file mode 100644
index 000000000..89a980c42
--- /dev/null
+++ b/src/tools/rustfmt/.github/workflows/mac.yml
@@ -0,0 +1,36 @@
+name: mac
+on:
+ push:
+ branches:
+ - master
+ pull_request:
+
+jobs:
+ test:
+ # https://help.github.com/en/actions/automating-your-workflow-with-github-actions/virtual-environments-for-github-hosted-runners#supported-runners-and-hardware-resources
+ # macOS Catalina 10.15
+ runs-on: macos-latest
+ name: (${{ matrix.target }}, ${{ matrix.cfg_release_channel }})
+ env:
+ CFG_RELEASE_CHANNEL: ${{ matrix.cfg_release_channel }}
+ strategy:
+ fail-fast: false
+ matrix:
+ target: [
+ x86_64-apple-darwin,
+ ]
+ cfg_release_channel: [nightly, stable]
+
+ steps:
+ - name: checkout
+ uses: actions/checkout@v3
+
+ # Run build
+ - name: install rustup
+ run: |
+ curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs > rustup-init.sh
+ sh rustup-init.sh -y --default-toolchain none
+ rustup target add ${{ matrix.target }}
+
+ - name: Build and Test
+ run: ./ci/build_and_test.sh
diff --git a/src/tools/rustfmt/.github/workflows/rustdoc_check.yml b/src/tools/rustfmt/.github/workflows/rustdoc_check.yml
new file mode 100644
index 000000000..cd0c32189
--- /dev/null
+++ b/src/tools/rustfmt/.github/workflows/rustdoc_check.yml
@@ -0,0 +1,25 @@
+name: rustdoc check
+on:
+ push:
+ branches:
+ - master
+ pull_request:
+
+jobs:
+ rustdoc_check:
+ runs-on: ubuntu-latest
+ name: rustdoc check
+ steps:
+ - name: checkout
+ uses: actions/checkout@v3
+
+ - name: install rustup
+ run: |
+ curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs > rustup-init.sh
+ sh rustup-init.sh -y --default-toolchain none
+ rustup target add x86_64-unknown-linux-gnu
+
+ - name: document rustfmt
+ env:
+ RUSTDOCFLAGS: --document-private-items --enable-index-page --show-type-layout --generate-link-to-definition -Zunstable-options -Dwarnings
+ run: cargo doc -Zskip-rustdoc-fingerprint --no-deps -p rustfmt-nightly -p rustfmt-config_proc_macro
diff --git a/src/tools/rustfmt/.github/workflows/upload-assets.yml b/src/tools/rustfmt/.github/workflows/upload-assets.yml
new file mode 100644
index 000000000..25699234a
--- /dev/null
+++ b/src/tools/rustfmt/.github/workflows/upload-assets.yml
@@ -0,0 +1,81 @@
+name: upload
+
+on:
+ push:
+ release:
+ types: [created]
+ workflow_dispatch:
+
+jobs:
+ build-release:
+ name: build-release
+ strategy:
+ matrix:
+ build: [linux-x86_64, macos-x86_64, windows-x86_64-gnu, windows-x86_64-msvc]
+ include:
+ - build: linux-x86_64
+ os: ubuntu-latest
+ rust: nightly
+ target: x86_64-unknown-linux-gnu
+ - build: macos-x86_64
+ os: macos-latest
+ rust: nightly
+ target: x86_64-apple-darwin
+ - build: windows-x86_64-gnu
+ os: windows-latest
+ rust: nightly-x86_64-gnu
+ target: x86_64-pc-windows-gnu
+ - build: windows-x86_64-msvc
+ os: windows-latest
+ rust: nightly-x86_64-msvc
+ target: x86_64-pc-windows-msvc
+ runs-on: ${{ matrix.os }}
+ steps:
+ - uses: actions/checkout@v3
+
+ # Run build
+ - name: install rustup
+ run: |
+ curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs > rustup-init.sh
+ sh rustup-init.sh -y --default-toolchain none
+ rustup target add ${{ matrix.target }}
+
+ - name: Add mingw64 to path for x86_64-gnu
+ run: echo "C:\msys64\mingw64\bin" >> $GITHUB_PATH
+ if: matrix.rust == 'nightly-x86_64-gnu'
+ shell: bash
+
+ - name: Build release binaries
+ uses: actions-rs/cargo@v1
+ with:
+ command: build
+ args: --release
+
+ - name: Build archive
+ shell: bash
+ run: |
+ staging="rustfmt_${{ matrix.build }}_${{ github.event.release.tag_name }}"
+ mkdir -p "$staging"
+
+ cp {README.md,Configurations.md,CHANGELOG.md,LICENSE-MIT,LICENSE-APACHE} "$staging/"
+
+ if [ "${{ matrix.os }}" = "windows-latest" ]; then
+ cp target/release/{rustfmt.exe,cargo-fmt.exe,rustfmt-format-diff.exe,git-rustfmt.exe} "$staging/"
+ 7z a "$staging.zip" "$staging"
+ echo "ASSET=$staging.zip" >> $GITHUB_ENV
+ else
+ cp target/release/{rustfmt,cargo-fmt,rustfmt-format-diff,git-rustfmt} "$staging/"
+ tar czf "$staging.tar.gz" "$staging"
+ echo "ASSET=$staging.tar.gz" >> $GITHUB_ENV
+ fi
+
+ - name: Upload Release Asset
+ if: github.event_name == 'release'
+ uses: actions/upload-release-asset@v1
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ with:
+ upload_url: ${{ github.event.release.upload_url }}
+ asset_path: ${{ env.ASSET }}
+ asset_name: ${{ env.ASSET }}
+ asset_content_type: application/octet-stream
diff --git a/src/tools/rustfmt/.github/workflows/windows.yml b/src/tools/rustfmt/.github/workflows/windows.yml
new file mode 100644
index 000000000..ec37c714b
--- /dev/null
+++ b/src/tools/rustfmt/.github/workflows/windows.yml
@@ -0,0 +1,62 @@
+name: windows
+on:
+ push:
+ branches:
+ - master
+ pull_request:
+
+jobs:
+ test:
+ runs-on: windows-latest
+ name: (${{ matrix.target }}, ${{ matrix.cfg_release_channel }})
+ env:
+ CFG_RELEASE_CHANNEL: ${{ matrix.cfg_release_channel }}
+ strategy:
+ # https://help.github.com/en/actions/getting-started-with-github-actions/about-github-actions#usage-limits
+ # There's a limit of 60 concurrent jobs across all repos in the rust-lang organization.
+ # In order to prevent overusing too much of that 60 limit, we throttle the
+ # number of rustfmt jobs that will run concurrently.
+ max-parallel: 2
+ fail-fast: false
+ matrix:
+ target: [
+ i686-pc-windows-gnu,
+ i686-pc-windows-msvc,
+ x86_64-pc-windows-gnu,
+ x86_64-pc-windows-msvc,
+ ]
+ cfg_release_channel: [nightly, stable]
+
+ steps:
+ # The Windows runners have autocrlf enabled by default
+ # which causes failures for some of rustfmt's line-ending sensitive tests
+ - name: disable git eol translation
+ run: git config --global core.autocrlf false
+ - name: checkout
+ uses: actions/checkout@v3
+
+ # Run build
+ - name: Install Rustup using win.rustup.rs
+ run: |
+ # Disable the download progress bar which can cause perf issues
+ $ProgressPreference = "SilentlyContinue"
+ Invoke-WebRequest https://win.rustup.rs/ -OutFile rustup-init.exe
+ .\rustup-init.exe -y --default-host=x86_64-pc-windows-msvc --default-toolchain=none
+ del rustup-init.exe
+ rustup target add ${{ matrix.target }}
+ shell: powershell
+
+ - name: Add mingw32 to path for i686-gnu
+ run: |
+ echo "C:\msys64\mingw32\bin" >> $GITHUB_PATH
+ if: matrix.target == 'i686-pc-windows-gnu' && matrix.channel == 'nightly'
+ shell: bash
+
+ - name: Add mingw64 to path for x86_64-gnu
+ run: echo "C:\msys64\mingw64\bin" >> $GITHUB_PATH
+ if: matrix.target == 'x86_64-pc-windows-gnu' && matrix.channel == 'nightly'
+ shell: bash
+
+ - name: Build and Test
+ shell: cmd
+ run: ci\build_and_test.bat
diff --git a/src/tools/rustfmt/.travis.yml b/src/tools/rustfmt/.travis.yml
new file mode 100644
index 000000000..d699bd842
--- /dev/null
+++ b/src/tools/rustfmt/.travis.yml
@@ -0,0 +1,77 @@
+sudo: false
+language: rust
+rust: nightly
+os: linux
+cache:
+ directories:
+ - $HOME/.cargo
+
+addons:
+ apt:
+ packages:
+ - libcurl4-openssl-dev
+ - libelf-dev
+ - libdw-dev
+
+matrix:
+ include:
+ - env: DEPLOY=LINUX
+ - env: CFG_RELEASE_CHANNEL=beta
+ - os: osx
+ - env: INTEGRATION=bitflags
+ - env: INTEGRATION=chalk
+ - env: INTEGRATION=crater
+ - env: INTEGRATION=error-chain
+ - env: INTEGRATION=glob
+ - env: INTEGRATION=log
+ - env: INTEGRATION=mdbook
+ - env: INTEGRATION=packed_simd
+ - env: INTEGRATION=rust-semverver
+ - env: INTEGRATION=stdsimd TARGET=x86_64-unknown-linux-gnu
+ - env: INTEGRATION=tempdir
+ - env: INTEGRATION=futures-rs
+ allow_failures:
+ # Using old configuration option
+ - env: INTEGRATION=rand
+ # Doesn't build - keep this in allow_failures as it's fragile to breaking changes of rustc.
+ - env: INTEGRATION=rust-clippy
+ # Doesn't build - seems to be because of an option
+ - env: INTEGRATION=packed_simd
+ # Doesn't build - a temporal build failure due to breaking changes in the nightly compilre
+ - env: INTEGRATION=rust-semverver
+ # can be moved back to include section after https://github.com/rust-lang-nursery/failure/pull/298 is merged
+ - env: INTEGRATION=failure
+ # `cargo test` doesn't finish - disabling for now.
+ # - env: INTEGRATION=cargo
+
+script:
+ - |
+ if [ -z ${INTEGRATION} ]; then
+ export CFG_RELEASE_CHANNEL=nightly
+ export CFG_RELEASE=nightly
+ cargo build
+ cargo test
+ cargo test -- --ignored
+ else
+ ./ci/integration.sh
+ fi
+
+after_success:
+- if [ -z ${INTEGRATION} ]; then travis-cargo coveralls --no-sudo; fi
+
+before_deploy:
+ # TODO: cross build
+ - cargo build --release --target=x86_64-unknown-linux-gnu
+ - tar czf rustfmt-x86_64-unknown-linux-gnu.tar.gz Contributing.md Design.md README.md -C target/x86_64-unknown-linux-gnu/release/rustfmt rustfmt
+
+deploy:
+ provider: releases
+ api_key:
+ secure: "your own encrypted key"
+ file:
+ - rustfmt-x86_64-unknown-linux-gnu.tar.gz
+ on:
+ repo: nrc/rustfmt
+ tags: true
+ condition: "$DEPLOY = LINUX"
+ skip_cleanup: true
diff --git a/src/tools/rustfmt/CHANGELOG.md b/src/tools/rustfmt/CHANGELOG.md
new file mode 100644
index 000000000..0c1893bf8
--- /dev/null
+++ b/src/tools/rustfmt/CHANGELOG.md
@@ -0,0 +1,1335 @@
+# Changelog
+
+## [Unreleased]
+
+## [1.5.1] 2022-06-24
+
+**N.B** A bug was introduced in v1.5.0/nightly-2022-06-15 which modified formatting. If you happened to run rustfmt over your code with one of those ~10 nightlies it's possible you may have seen formatting changes, and you may see additional changes after this fix since that bug has now been reverted.
+
+### Fixed
+
+- Correct an issue introduced in v1.5.0 where formatting changes were unintentionally introduced in a few cases with a large/long construct in a right hand side position (e.g. a large chain on the RHS of a local/assignment statement)
+- `cargo fmt --version` properly displays the version value again [#5395](https://github.com/rust-lang/rustfmt/issues/5395)
+
+### Changed
+
+- Properly sort imports containing raw identifiers [#3791](https://github.com/rust-lang/rustfmt/issues/3791) (note this is change version gated, and not applied by default)
+
+### Added
+
+- Add new configuration option, `doc_comment_code_block_width`, which allows for setting a shorter width limit to use for formatting code snippets in doc comments [#5384](https://github.com/rust-lang/rustfmt/issues/5384)
+
+### Install/Download Options
+- **rustup (nightly)** - nightly-2022-06-24
+- **GitHub Release Binaries** - [Release v1.5.1](https://github.com/rust-lang/rustfmt/releases/tag/v1.5.0)
+- **Build from source** - [Tag v1.5.1](https://github.com/rust-lang/rustfmt/tree/v1.5.1), see instructions for how to [install rustfmt from source][install-from-source]
+
+## [1.5.0] 2022-06-14
+
+### Changed
+
+- Simplify the rustfmt help text by eliding the full path to the rustfmt binary path from the usage string when running `rustfmt --help` [#5214](https://github.com/rust-lang/rustfmt/issues/5214)
+
+### Fixed
+
+- Remove duplicate imports when `imports_granularity` is set to `Item` [#4725](https://github.com/rust-lang/rustfmt/issues/4725)
+- Properly handle stdin input containing an inner skip attribute [#5368](https://github.com/rust-lang/rustfmt/issues/5368)
+- Maintain attributes on imports when `imports_granularity` is set to `Item` [#5030](https://github.com/rust-lang/rustfmt/issues/5030)
+- Format empty trait definitions as a single line when both `empty_item_single_line` is enabled and `brace_style` is set to `AlwaysNextLine` [#5047](https://github.com/rust-lang/rustfmt/issues/5047)
+- Don't change granularity of imports containing comments with `imports_granularity` if doing so could lose or misplace those comments [#5311](https://github.com/rust-lang/rustfmt/pull/5311)
+- Prevent rustfmt from removing trailing comments at the end of files annotated with inner `#![rustfmt::skip]` attributes [#5033](https://github.com/rust-lang/rustfmt/issues/5033)
+- Fixed various `error[internal]: left behind trailing whitespace"` issues:
+ - Remove trailing whitespace when formatting a where clause who's bounds have an empty right hand side [#5012](https://github.com/rust-lang/rustfmt/issues/5012) [#4850](https://github.com/rust-lang/rustfmt/issues/4850)
+ - Prevent rustfmt from adding an empty comment line when rewriting markdown lists at the start of doc comments. This issue was triggered when `wrap_comments=true` [#5088](https://github.com/rust-lang/rustfmt/issues/5088)
+- Prevent adding a block indented newline before a function parameter with a complex type that was formatted over multiple lines [#5125](https://github.com/rust-lang/rustfmt/issues/5125)
+- Fix various module resolution issues preventing rustfmt from finding modules that should be formatted:
+ - Handle external mods imported via external->inline load hierarchy [#5063](https://github.com/rust-lang/rustfmt/issues/5063)
+ - Resolve sub modules of integration tests [#5119](https://github.com/rust-lang/rustfmt/issues/5119)
+ - Module resolution will fallback to the current search directory if a relative directory search results in a `FileNotFound` error [#5198](https://github.com/rust-lang/rustfmt/issues/5198)
+- Give users a clearer error message when resolving a module who's file path is ambiguous (e.g `x.rs` and `x/mod.rs`). Before users were given a `File not found` error message which was confusing [#5167](https://github.com/rust-lang/rustfmt/issues/5167)
+- Fix various issues related to type aliases:
+ - Prevent rustfmt from adding `= impl` to associated types defined in macro bodies [#4823](https://github.com/rust-lang/rustfmt/issues/4823)
+ - Properly block indent type alias impl traits (TAITs) that wrap to the next line when `version=Two` is set. Before any trait bounds that wrapped to the next line would not be indented [#5027](https://github.com/rust-lang/rustfmt/issues/5027)
+ - Prevent rustfmt from adding an `impl Trait` definition into types [#5086](https://github.com/rust-lang/rustfmt/issues/5086)
+- Fix cases where `normalize_comments=true` would de-normalizes some comments by changing inline comments into block comments [#4909](https://github.com/rust-lang/rustfmt/issues/4909)
+- Prevent rustfmt from wrapping the content of markdown [reference-style links](https://www.markdownguide.org/basic-syntax/#reference-style-links) in doc comments [#5095](https://github.com/rust-lang/rustfmt/issues/5095) [#4933](https://github.com/rust-lang/rustfmt/issues/4933)
+- Don't format files annotated with inner `#![rustfmt::skip]` attribute [PR #5094](https://github.com/rust-lang/rustfmt/pull/5094)
+- Prevent duplicate comma when struct pattern ends with `..` and `trailing_comma=Always`. For example, `let Foo { a, .. } = b;` would become `let Foo { a,, .. } = b;` [#5066](https://github.com/rust-lang/rustfmt/issues/5066)
+- Fix the order of `static` and `async` keywords when rewriting static async closures. The correct order is `static` and then `async` (e.g `static async || {}`) [#5149](https://github.com/rust-lang/rustfmt/issues/5149)
+- Retain the fully qualified path segment when rewriting struct literals in expression position. Now `<Struct as Trait>::Type` is not rewritten as `Trait::Type` [#5151](https://github.com/rust-lang/rustfmt/issues/5151)
+- Do not remove match arm braces from a match arm with a single `ast::ExprKind::Block` that has leading attributes. Removing the braces could lead to code that does not compile. Now rustfmt will leave the outer `{}` in place when formatting `=> {#[allow(unsafe_code)]unsafe {}}` [#4109](https://github.com/rust-lang/rustfmt/issues/4109)
+- Backport json emitter and stdin changes [PR #5054](https://github.com/rust-lang/rustfmt/pull/5054)
+ - Make `--check` work when running rustfmt with input from stdin [PR #3896](https://github.com/rust-lang/rustfmt/pull/3896)
+ - Fix `--check` with the `--files-with-diff` flag [PR #3910](https://github.com/rust-lang/rustfmt/pull/3910)
+ - Produce valid JSON when using the JSON emitter [PR #3953](https://github.com/rust-lang/rustfmt/pull/3953)
+ - Fix newlines in JSON output [PR #4262](https://github.com/rust-lang/rustfmt/pull/4262)
+ - Use `<stdin>` when emitting stdin as filename [PR #4298](https://github.com/rust-lang/rustfmt/pull/4298)
+- Always generate some output when formatting `@generated` files via stdin even when `format_generated_files=false`. Not producing output caused rust-analyzer to delete the file content [rust-lang/rust-analyzer](https://github.com/rust-lang/rust-analyzer/issues/11285) [#5172](https://github.com/rust-lang/rustfmt/issues/5172)
+- Properly block indent multi-line comments in empty struct definitions. Previously, only the first comment line would be block indented. All other comment lines would be aligned with the struct definition [#4854](https://github.com/rust-lang/rustfmt/issues/4854)
+- Prevent rustfmt from wrapping a comment at a byte position inside a non-ascii character when `wrap_comments=true`. This prevents rustfmt from panicking when breaking on the invalid position [#5023](https://github.com/rust-lang/rustfmt/issues/5023)
+- Prevent rustfmt from removing commented out trailing separators (e.g commas) when rewriting lists. For example, remove the comma from a comment like this `// ...,` would lead to a scenario where the entire list could not be rewritten because the content of the comment changed [#5042](https://github.com/rust-lang/rustfmt/issues/5042)
+- Fix panic when `import_granularity` was set to `Module`, `One`, or `Crate` and the import use declaration contained an alias `use crate a::b as b1` [#5131](https://github.com/rust-lang/rustfmt/issues/5131)
+- Add a newline between generic parameters and their doc comments to prevent the generic parameters from being merged into their doc comments [#5122](https://github.com/rust-lang/rustfmt/issues/5122)
+- Fixes indentation issue where string literals manually broken with line continuation characters (`\`) would be incorrectly indented in macro definitions when setting `format_strings=true`[#4036](https://github.com/rust-lang/rustfmt/issues/4036)
+- Properly wrap and format long markdown block quotes when `wrap_comments=true` [#5157](https://github.com/rust-lang/rustfmt/issues/5157)
+- Prevent rustfmt from wrapping markdown headers even when `wrap_comments=true`. Wrapping the markdown headers would prevent them from being properly rendered with rustdoc [#5238](https://github.com/rust-lang/rustfmt/issues/5238)
+- Prevent rustfmt from removing commas between struct fields when those fields were also separated by an empty line [#4791](https://github.com/rust-lang/rustfmt/issues/4791) [#4928](https://github.com/rust-lang/rustfmt/issues/4928)
+- Fix compiler error caused when formatting imports with `imports_granularity=Module` and a path containing `self`. Given the following import `use crate::lexer::{self, tokens::TokenData};`, rustfmt would transform the `self` import into `use crate::lexer::self;`. Now rustfmt produces `use crate::lexer::{self};` [#4681](https://github.com/rust-lang/rustfmt/issues/4681)
+- Prevent rustfmt from breaking long type links in doc comments on namespace qualifiers (`::`) when `wrap_comments=true`. Breaking these long type links over multiple lines prevented them from being properly rendered in rustdoc [#5260](https://github.com/rust-lang/rustfmt/issues/5260)
+- Correctly find the start of struct bodies after any generic `const` parameters. Naively searching for an opening `{` lead to issues since generic `const` parameters are also defined with `{}` (e.g. `struct Example<const N: usize = { 1048576 }> {}`) [#5273](https://github.com/rust-lang/rustfmt/issues/5273)
+- Prevent rustfmt from merging derives when using inner or outer `rustfmt::skip::attributes` attributes. For example, `#[rustfmt::skip::attributes(derive)]` [#5270](https://github.com/rust-lang/rustfmt/issues/5270)
+- Retain trailing `;` when rewriting macro calls in extern blocks. For example, `extern "C" { x!(-); }`[#5281](https://github.com/rust-lang/rustfmt/issues/5281)
+- Add a newline when formatting struct fields preceded by both doc comments and inline comments to prevent the field from being merged into the inline comment. This was not an issue when a struct was preceded by just a doc comment or just an inline comment [#5215](https://github.com/rust-lang/rustfmt/issues/5215)
+
+### Added
+
+- Added `One` as a new [group_imports](https://rust-lang.github.io/rustfmt/?version=v1.4.38&search=#group_imports) option to create a single group for all imports [PR #4966](https://github.com/rust-lang/rustfmt/pull/4966)
+- Add [short_array_element_width_threshold](https://rust-lang.github.io/rustfmt/?version=v1.4.38&search=#short_array_element_width_threshold) config option to give users more control over when `Mixed` list formatting is used [PR #5228](https://github.com/rust-lang/rustfmt/pull/5228)
+
+### Removed
+
+- Removed unstable, nightly-only config option `report_todo` [#5101](https://github.com/rust-lang/rustfmt/issues/5101)
+- Removed unstable, nightly-only config option `report_fixme` [#5102](https://github.com/rust-lang/rustfmt/issues/5102)
+- Removed unstable, nightly-only config option `license_template_path` [#5103](https://github.com/rust-lang/rustfmt/issues/5103)
+
+### Misc
+
+- Improved performance when formatting large and deeply nested expression trees, often found in generated code, which have many expressions that exceed `max_width` [#5128](https://github.com/rust-lang/rustfmt/issues/5128), [#4867](https://github.com/rust-lang/rustfmt/issues/4867), [#4476](https://github.com/rust-lang/rustfmt/issues/4476), [#5139](https://github.com/rust-lang/rustfmt/pull/5139)
+
+### Install/Download Options
+- **rustup (nightly)** - nightly-2022-06-15
+- **GitHub Release Binaries** - [Release v1.5.0](https://github.com/rust-lang/rustfmt/releases/tag/v1.5.0)
+- **Build from source** - [Tag v1.5.0](https://github.com/rust-lang/rustfmt/tree/v1.5.0), see instructions for how to [install rustfmt from source][install-from-source]
+
+## [1.4.38] 2021-10-20
+
+### Changed
+
+- Switched from `rustc-ap-*` crates to `rustc_private` for consumption model of rustc internals
+- `annotate-snippets` updated to v0.8 [PR #4762](https://github.com/rust-lang/rustfmt/pull/4762)
+- Greatly improved the performance of `cargo fmt` in large workspaces utilizing the `--all` flag by updating to a newer version of `cargo_metadata` that leverages updated `cargo` output from v1.51+ [PR #4997](https://github.com/rust-lang/rustfmt/pull/4997)
+- Improved formatting of long slice patterns [#4530](https://github.com/rust-lang/rustfmt/issues/4530)
+ - **Note you must have `version = Two` in your configuration to take advantage of the new formatting**
+- Stabilized `match_block_trailing_comma` configuration option [#3380](https://github.com/rust-lang/rustfmt/issues/3380) - [https://rust-lang.github.io/rustfmt/?version=v1.4.38&search=#match_block_trailing_comma](https://rust-lang.github.io/rustfmt/?version=v1.4.38&search=#match_block_trailing_comma)
+- Stabilized `disable_all_formatting` configuration option [#5026](https://github.com/rust-lang/rustfmt/pull/5026) - [https://rust-lang.github.io/rustfmt/?version=v1.4.38&search=#disable_all_formatting](https://rust-lang.github.io/rustfmt/?version=v1.4.38&search=#disable_all_formatting)
+- Various improvements to the configuration documentation website [https://rust-lang.github.io/rustfmt/?version=v1.4.38]([https://rust-lang.github.io/rustfmt/?version=v1.4.38])
+- Addressed various clippy and rustc warnings
+
+
+### Fixed
+
+- Resolved issue where specious whitespace would be inserted when a block style comment was terminated within string literal processing [#4312](https://github.com/rust-lang/rustfmt/issues/4312)
+- Nested out-of-line mods are again parsed and formatted [#4874](https://github.com/rust-lang/rustfmt/issues/4874)
+- Accepts `2021` for edition value from rustfmt command line [PR #4847](https://github.com/rust-lang/rustfmt/pull/4847)
+- Unstable command line options are no longer displayed in `--help` text on stable [PR #4798](https://github.com/rust-lang/rustfmt/issues/4798)
+- Stopped panicking on patterns in match arms which start with non-ascii characters [#4868](https://github.com/rust-lang/rustfmt/issues/4868)
+- Stopped stripping defaults on const params [#4816](https://github.com/rust-lang/rustfmt/issues/4816)
+- Fixed issue with dropped content with GAT aliases with self bounds in impls [#4911](https://github.com/rust-lang/rustfmt/issues/4911)
+- Stopped removing generic args on associated type constraints [#4943](https://github.com/rust-lang/rustfmt/issues/4943)
+- Stopped dropping visibility on certain trait and impl items [#4960](https://github.com/rust-lang/rustfmt/issues/4960)
+- Fixed dropping of qualified paths in struct patterns [#4908](https://github.com/rust-lang/rustfmt/issues/4908) and [#5005](https://github.com/rust-lang/rustfmt/issues/5005)
+- Fixed bug in line width calculation that was causing specious formatting of certain patterns [#4031](https://github.com/rust-lang/rustfmt/issues/4031)
+ - **Note that this bug fix may cause observable formatting changes in cases where code had been formatted with prior versions of rustfmt that contained the bug**
+- Fixed bug where rustfmt would drop parameter attributes if they were too long in certain cases [#4579](https://github.com/rust-lang/rustfmt/issues/4579)
+- Resolved idempotency issue with extern body elements [#4963](https://github.com/rust-lang/rustfmt/issues/4963)
+- rustfmt will now handle doc-style comments on function parameters, since they could appear with certain macro usage patterns even though it's generally invalid syntax [#4936](https://github.com/rust-lang/rustfmt/issues/4936)
+- Fixed bug in `match_block_trailing_comma` where commas were not added to the blocks of bodies whose arm had a guard that did not fit on the same line as the pattern [#4998](https://github.com/rust-lang/rustfmt/pull/4998)
+- Fixed bug in cases where derive attributes started with a block style comment [#4984](https://github.com/rust-lang/rustfmt/issues/4984)
+- Fixed issue where the struct rest could be lost when `struct_field_align_threshold` was enabled [#4926](https://github.com/rust-lang/rustfmt/issues/4926)
+- Handles cases where certain control flow type expressions have comments between patterns/keywords and the pattern ident contains the keyword [#5009](https://github.com/rust-lang/rustfmt/issues/5009)
+- Handles tuple structs that have explicit visibilities and start with a block style comment [#5011](https://github.com/rust-lang/rustfmt/issues/5011)
+- Handles leading line-style comments in certain types of macro calls [#4615](https://github.com/rust-lang/rustfmt/issues/4615)
+
+
+### Added
+- Granular width heuristic options made available for user control [PR #4782](https://github.com/rust-lang/rustfmt/pull/4782). This includes the following:
+ - [`array_width`](https://rust-lang.github.io/rustfmt/?version=v1.4.38&search=#array_width)
+ - [`attr_fn_like_width`](https://rust-lang.github.io/rustfmt/?version=v1.4.38&search=#attr_fn_like_width)
+ - [`chain_width`](https://rust-lang.github.io/rustfmt/?version=v1.4.38&search=#chain_width)
+ - [`fn_call_width`](https://rust-lang.github.io/rustfmt/?version=v1.4.38&search=#fn_call_width)
+ - [`single_line_if_else_max_width`](https://rust-lang.github.io/rustfmt/?version=v1.4.38&search=#single_line_if_else_max_width)
+ - [`struct_lit_width`](https://rust-lang.github.io/rustfmt/?version=v1.4.38&search=#struct_lit_width)
+ - [`struct_variant_width`](https://rust-lang.github.io/rustfmt/?version=v1.4.38&search=#struct_variant_width)
+
+Note this hit the rustup distributions prior to the v1.4.38 release as part of an out-of-cycle updates, but is listed in this version because the feature was not in the other v1.4.37 releases. See also the `use_small_heuristics` section on the configuration site for more information
+[https://rust-lang.github.io/rustfmt/?version=v1.4.38&search=#use_small_heuristics](https://rust-lang.github.io/rustfmt/?version=v1.4.38&search=#use_small_heuristics)
+
+- New `One` variant added to `imports_granularity` configuration option which can be used to reformat all imports into a single use statement [#4669](https://github.com/rust-lang/rustfmt/issues/4669)
+- rustfmt will now skip files that are annotated with `@generated` at the top of the file [#3958](https://github.com/rust-lang/rustfmt/issues/3958)
+ if `format_generated_files` option is set to `false` (by default `@generated` files are formatted)
+- New configuration option `hex_literal_case` that allows user to control the casing utilized for hex literals [PR #4903](https://github.com/rust-lang/rustfmt/pull/4903)
+
+See the section on the configuration site for more information
+https://rust-lang.github.io/rustfmt/?version=v1.4.38&search=#hex_literal_case
+
+- `cargo fmt` now directly supports the `--check` flag, which means it's now possible to run `cargo fmt --check` instead of the more verbose `cargo fmt -- --check` [#3888](https://github.com/rust-lang/rustfmt/issues/3888)
+
+### Install/Download Options
+- **rustup (nightly)** - *pending*
+- **GitHub Release Binaries** - [Release v1.4.38](https://github.com/rust-lang/rustfmt/releases/tag/v1.4.38)
+- **Build from source** - [Tag v1.4.38](https://github.com/rust-lang/rustfmt/tree/v1.4.38), see instructions for how to [install rustfmt from source][install-from-source]
+
+## [1.4.37] 2021-04-03
+
+### Changed
+
+- `rustc-ap-*` crates updated to v712.0.0
+
+### Fixed
+- Resolve idempotence issue related to indentation of macro defs that contain or-patterns with inner comments ([#4603](https://github.com/rust-lang/rustfmt/issues/4603))
+- Addressed various clippy and rustc warnings
+
+### Install/Download Options
+- **crates.io package** - *pending*
+- **rustup (nightly)** - *pending*
+- **GitHub Release Binaries** - [Release v1.4.37](https://github.com/rust-lang/rustfmt/releases/tag/v1.4.37)
+- **Build from source** - [Tag v1.4.37](https://github.com/rust-lang/rustfmt/tree/v1.4.37), see instructions for how to [install rustfmt from source][install-from-source]
+
+## [1.4.36] 2021-02-07
+
+### Changed
+
+- `rustc-ap-*` crates updated to v705.0.0
+
+### Install/Download Options
+- **crates.io package** - *pending*
+- **rustup (nightly)** - *pending*
+- **GitHub Release Binaries** - [Release v1.4.36](https://github.com/rust-lang/rustfmt/releases/tag/v1.4.36)
+- **Build from source** - [Tag v1.4.36](https://github.com/rust-lang/rustfmt/tree/v1.4.36), see instructions for how to [install rustfmt from source][install-from-source]
+
+## [1.4.35] 2021-02-03
+
+### Changed
+
+- `rustc-ap-*` crates updated to v702.0.0
+
+### Install/Download Options
+- **crates.io package** - *pending*
+- **rustup (nightly)** - *n/a (superseded by [v1.4.36](#1436-2021-02-07))
+- **GitHub Release Binaries** - [Release v1.4.35](https://github.com/rust-lang/rustfmt/releases/tag/v1.4.35)
+- **Build from source** - [Tag v1.4.35](https://github.com/rust-lang/rustfmt/tree/v1.4.35), see instructions for how to [install rustfmt from source][install-from-source]
+
+## [1.4.34] 2021-01-28
+
+### Fixed
+- Don't insert trailing comma on (base-less) rest in struct literals within macros ([#4675](https://github.com/rust-lang/rustfmt/issues/4675))
+
+### Install/Download Options
+- **crates.io package** - *pending*
+- **rustup (nightly)** - Starting in `2021-01-31`
+- **GitHub Release Binaries** - [Release v1.4.34](https://github.com/rust-lang/rustfmt/releases/tag/v1.4.34)
+- **Build from source** - [Tag v1.4.34](https://github.com/rust-lang/rustfmt/tree/v1.4.34), see instructions for how to [install rustfmt from source][install-from-source]
+
+## [1.4.33] 2021-01-27
+
+### Changed
+- `merge_imports` configuration has been deprecated in favor of the new `imports_granularity` option. Any existing usage of `merge_imports` will be automatically mapped to the corresponding value on `imports_granularity` with a warning message printed to encourage users to update their config files.
+
+### Added
+- New `imports_granularity` option has been added which succeeds `merge_imports`. This new option supports several additional variants which allow users to merge imports at different levels (crate or module), and even flatten imports to have a single use statement per item. ([PR #4634](https://github.com/rust-lang/rustfmt/pull/4634), [PR #4639](https://github.com/rust-lang/rustfmt/pull/4639))
+
+See the section on the configuration site for more information
+https://rust-lang.github.io/rustfmt/?version=v1.4.33&search=#imports_granularity
+
+### Fixed
+- Fix erroneous removal of `const` keyword on const trait impl ([#4084](https://github.com/rust-lang/rustfmt/issues/4084))
+- Fix incorrect span usage wit const generics in supertraits ([#4204](https://github.com/rust-lang/rustfmt/issues/4204))
+- Use correct span for const generic params ([#4263](https://github.com/rust-lang/rustfmt/issues/4263))
+- Correct span on const generics to include type bounds ([#4310](https://github.com/rust-lang/rustfmt/issues/4310))
+- Idempotence issue on blocks containing only empty statements ([#4627](https://github.com/rust-lang/rustfmt/issues/4627) and [#3868](https://github.com/rust-lang/rustfmt/issues/3868))
+- Fix issue with semicolon placement on required functions that have a trailing comment that ends in a line-style comment before the semicolon ([#4646](https://github.com/rust-lang/rustfmt/issues/4646))
+- Avoid shared interned cfg_if symbol since rustfmt can re-initialize the rustc_ast globals on multiple inputs ([#4656](https://github.com/rust-lang/rustfmt/issues/4656))
+
+### Install/Download Options
+- **crates.io package** - *pending*
+- **rustup (nightly)** - n/a (superseded by [v1.4.34](#1434-2021-01-28))
+- **GitHub Release Binaries** - [Release v1.4.33](https://github.com/rust-lang/rustfmt/releases/tag/v1.4.33)
+- **Build from source** - [Tag v1.4.33](https://github.com/rust-lang/rustfmt/tree/v1.4.33), see instructions for how to [install rustfmt from source][install-from-source]
+
+## [1.4.32] 2021-01-16
+
+### Fixed
+- Indentation now correct on first bound in cases where the generic bounds are multiline formatted and the first bound itself is multiline formatted ([#4636](https://github.com/rust-lang/rustfmt/issues/4636))
+
+### Install/Download Options
+- **crates.io package** - *pending*
+- **rustup (nightly)** - Starting in `2021-01-18`
+- **GitHub Release Binaries** - [Release v1.4.32](https://github.com/rust-lang/rustfmt/releases/tag/v1.4.32)
+- **Build from source** - [Tag v1.4.32](https://github.com/rust-lang/rustfmt/tree/v1.4.32), see instructions for how to [install rustfmt from source][install-from-source]
+
+## [1.4.31] 2021-01-09
+
+### Changed
+
+- `rustc-ap-*` crates updated to v697.0.0
+
+### Added
+- Support for 2021 Edition [#4618](https://github.com/rust-lang/rustfmt/pull/4618))
+
+### Install/Download Options
+- **crates.io package** - *pending*
+- **rustup (nightly)** - Starting in `2021-01-16`
+- **GitHub Release Binaries** - [Release v1.4.31](https://github.com/rust-lang/rustfmt/releases/tag/v1.4.31)
+- **Build from source** - [Tag v1.4.31](https://github.com/rust-lang/rustfmt/tree/v1.4.31), see instructions for how to [install rustfmt from source][install-from-source]
+
+## [1.4.30] 2020-12-20
+
+### Fixed
+- Last character in derive no longer erroneously stripped when `indent_style` is overridden to `Visual`. ([#4584](https://github.com/rust-lang/rustfmt/issues/4584))
+- Brace wrapping of closure bodies maintained in cases where the closure has an explicit return type and the body consists of a single expression statement. ([#4577](https://github.com/rust-lang/rustfmt/issues/4577))
+- No more panics on invalid code with `err` and `typeof` types ([#4357](https://github.com/rust-lang/rustfmt/issues/4357), [#4586](https://github.com/rust-lang/rustfmt/issues/4586))
+
+### Install/Download Options
+- **crates.io package** - *pending*
+- **rustup (nightly)** - Starting in `2020-12-25`
+- **GitHub Release Binaries** - [Release v1.4.30](https://github.com/rust-lang/rustfmt/releases/tag/v1.4.30)
+- **Build from source** - [Tag v1.4.30](https://github.com/rust-lang/rustfmt/tree/v1.4.30), see instructions for how to [install rustfmt from source][install-from-source]
+
+## [1.4.29] 2020-12-04
+
+### Fixed
+- Negative polarity on non-trait impl now preserved. ([#4566](https://github.com/rust-lang/rustfmt/issues/4566))
+
+### Install/Download Options
+- **crates.io package** - *pending*
+- **rustup (nightly)** - Starting in `2020-12-07`
+- **GitHub Release Binaries** - [Release v1.4.29](https://github.com/rust-lang/rustfmt/releases/tag/v1.4.29)
+- **Build from source** - [Tag v1.4.29](https://github.com/rust-lang/rustfmt/tree/v1.4.29), see instructions for how to [install rustfmt from source][install-from-source]
+
+## [1.4.28] 2020-11-29
+
+### Changed
+
+- `rustc-ap-*` crates updated to v691.0.0
+- In the event of an invalid inner attribute on a `cfg_if` condition, rustfmt will now attempt to continue and format the imported modules. Previously rustfmt would emit the parser error about an inner attribute being invalid in this position, but for rustfmt's purposes the invalid attribute doesn't prevent nor impact module formatting.
+
+### Added
+
+- [`group_imports`][group-imports-config-docs] - a new configuration option that allows users to control the strategy used for grouping imports ([#4107](https://github.com/rust-lang/rustfmt/issues/4107))
+
+[group-imports-config-docs]: https://github.com/rust-lang/rustfmt/blob/v1.4.28/Configurations.md#group_imports
+
+### Fixed
+- Formatting of malformed derived attributes is no longer butchered. ([#3898](https://github.com/rust-lang/rustfmt/issues/3898), [#4029](https://github.com/rust-lang/rustfmt/issues/4029), [#4115](https://github.com/rust-lang/rustfmt/issues/4115), [#4545](https://github.com/rust-lang/rustfmt/issues/4545))
+- Correct indentation used in macro branches when `hard_tabs` is enabled. ([#4152](https://github.com/rust-lang/rustfmt/issues/4152))
+- Comments between the visibility modifier and item name are no longer dropped. ([#2781](https://github.com/rust-lang/rustfmt/issues/2781))
+- Comments preceding the assignment operator in type aliases are no longer dropped. ([#4244](https://github.com/rust-lang/rustfmt/issues/4244))
+- Comments between {`&` operator, lifetime, `mut` kw, type} are no longer dropped. ([#4245](https://github.com/rust-lang/rustfmt/issues/4245))
+- Comments between type bounds are no longer dropped. ([#4243](https://github.com/rust-lang/rustfmt/issues/4243))
+- Function headers are no longer dropped on foreign function items. ([#4288](https://github.com/rust-lang/rustfmt/issues/4288))
+- Foreign function blocks are no longer dropped. ([#4313](https://github.com/rust-lang/rustfmt/issues/4313))
+- `where_single_line` is no longer incorrectly applied to multiline function signatures that have no `where` clause. ([#4547](https://github.com/rust-lang/rustfmt/issues/4547))
+- `matches!` expressions with multiple patterns and a destructure pattern are now able to be formatted. ([#4512](https://github.com/rust-lang/rustfmt/issues/4512))
+
+### Install/Download Options
+- **crates.io package** - *pending*
+- **rustup (nightly)** - n/a (superseded by [v1.4.29](#1429-2020-12-04))
+- **GitHub Release Binaries** - [Release v1.4.28](https://github.com/rust-lang/rustfmt/releases/tag/v1.4.28)
+- **Build from source** - [Tag v1.4.28](https://github.com/rust-lang/rustfmt/tree/v1.4.28), see instructions for how to [install rustfmt from source][install-from-source]
+
+## [1.4.27] 2020-11-16
+
+### Fixed
+
+- Leading comments in an extern block are no longer dropped (a bug that exists in v1.4.26). ([#4528](https://github.com/rust-lang/rustfmt/issues/4528))
+
+### Install/Download Options
+- **crates.io package** - *pending*
+- **rustup (nightly)** - Starting in `2020-11-18`
+- **GitHub Release Binaries** - [Release v1.4.27](https://github.com/rust-lang/rustfmt/releases/tag/v1.4.27)
+- **Build from source** - [Tag v1.4.27](https://github.com/rust-lang/rustfmt/tree/v1.4.27), see instructions for how to [install rustfmt from source][install-from-source]
+
+## [1.4.26] 2020-11-14
+
+### Changed
+
+- Original comment indentation for trailing comments within an `if` is now taken into account when determining the indentation level to use for the trailing comment in formatted code. This does not modify any existing code formatted with rustfmt; it simply gives the programmer discretion to specify whether the comment is associated to the `else` block, or if the trailing comment is just a member of the `if` block. ([#1575](https://github.com/rust-lang/rustfmt/issues/1575), [#4120](https://github.com/rust-lang/rustfmt/issues/4120), [#4506](https://github.com/rust-lang/rustfmt/issues/4506))
+
+In this example the `// else comment` refers to the `else`:
+```rust
+// if comment
+if cond {
+ "if"
+// else comment
+} else {
+ "else"
+}
+```
+
+Whereas in this case the `// continue` comments are members of their respective blocks and do not refer to the `else` below.
+```rust
+if toks.eat_token(Token::Word("modify"))? && toks.eat_token(Token::Word("labels"))? {
+ if toks.eat_token(Token::Colon)? {
+ // ate the token
+ } else if toks.eat_token(Token::Word("to"))? {
+ // optionally eat the colon after to, e.g.:
+ // @rustbot modify labels to: -S-waiting-on-author, +S-waiting-on-review
+ toks.eat_token(Token::Colon)?;
+ } else {
+ // It's okay if there's no to or colon, we can just eat labels
+ // afterwards.
+ }
+ 1 + 2;
+ // continue
+} else if toks.eat_token(Token::Word("label"))? {
+ // continue
+} else {
+ return Ok(None);
+}
+```
+
+### Fixed
+- Formatting of empty blocks with attributes which only contained comments is no longer butchered.([#4475](https://github.com/rust-lang/rustfmt/issues/4475), [#4467](https://github.com/rust-lang/rustfmt/issues/4467), [#4452](https://github.com/rust-lang/rustfmt/issues/4452#issuecomment-705886282), [#4522](https://github.com/rust-lang/rustfmt/issues/4522))
+- Indentation of trailing comments in non-empty extern blocks is now correct. ([#4120](https://github.com/rust-lang/rustfmt/issues/4120#issuecomment-696491872))
+
+### Install/Download Options
+- **crates.io package** - *pending*
+- **rustup (nightly)** - Starting in `2020-11-16`
+- **GitHub Release Binaries** - [Release v1.4.26](https://github.com/rust-lang/rustfmt/releases/tag/v1.4.26)
+- **Build from source** - [Tag v1.4.26](https://github.com/rust-lang/rustfmt/tree/v1.4.26), see instructions for how to [install rustfmt from source][install-from-source]
+
+## [1.4.25] 2020-11-10
+
+### Changed
+
+- Semicolons are no longer automatically inserted on trailing expressions in macro definition arms ([#4507](https://github.com/rust-lang/rustfmt/pull/4507)). This gives the programmer control and discretion over whether there should be semicolons in these scenarios so that potential expansion issues can be avoided.
+
+### Install/Download Options
+- **crates.io package** - *pending*
+- **rustup (nightly)** - Starting in `2020-11-14`
+- **GitHub Release Binaries** - [Release v1.4.25](https://github.com/rust-lang/rustfmt/releases/tag/v1.4.25)
+- **Build from source** - [Tag v1.4.25](https://github.com/rust-lang/rustfmt/tree/v1.4.25), see instructions for how to [install rustfmt from source][install-from-source]
+
+## [1.4.24] 2020-11-05
+
+### Changed
+
+- Block wrapped match arm bodies containing a single macro call expression are no longer flattened ([#4496](https://github.com/rust-lang/rustfmt/pull/4496)). This allows programmer discretion so that the block wrapping can be preserved in cases where needed to prevent issues in expansion, such as with trailing semicolons, and aligns with updated [Style Guide guidance](https://github.com/rust-dev-tools/fmt-rfcs/blob/master/guide/expressions.md#macro-call-expressions) for such scenarios.
+
+### Fixed
+- Remove useless `deprecated` attribute on a trait impl block in the rustfmt lib, as these now trigger errors ([rust-lang/rust/#78626](https://github.com/rust-lang/rust/pull/78626))
+
+### Install/Download Options
+- **crates.io package** - *pending*
+- **rustup (nightly)** - Starting in `2020-11-09`
+- **GitHub Release Binaries** - [Release v1.4.24](https://github.com/rust-lang/rustfmt/releases/tag/v1.4.24)
+- **Build from source** - [Tag v1.4.24](https://github.com/rust-lang/rustfmt/tree/v1.4.24), see instructions for how to [install rustfmt from source][install-from-source]
+
+## [1.4.23] 2020-10-30
+
+### Changed
+
+- Update `rustc-ap-*` crates to v686.0.0
+
+### Added
+- Initial support for formatting new ConstBlock syntax ([#4478](https://github.com/rust-lang/rustfmt/pull/4478))
+
+### Fixed
+- Handling of unclosed delimiter-only parsing errors in input files ([#4466](https://github.com/rust-lang/rustfmt/issues/4466))
+- Misc. minor parser bugs ([#4418](https://github.com/rust-lang/rustfmt/issues/4418) and [#4431](https://github.com/rust-lang/rustfmt/issues/4431))
+- Panic on nested tuple access ([#4355](https://github.com/rust-lang/rustfmt/issues/4355))
+- Unable to disable license template path via cli override ([#4487](https://github.com/rust-lang/rustfmt/issues/4487))
+- Preserve comments in empty statements [#4018](https://github.com/rust-lang/rustfmt/issues/4018))
+- Indentation on skipped code [#4398](https://github.com/rust-lang/rustfmt/issues/4398))
+
+### Install/Download Options
+- **crates.io package** - *pending*
+- **rustup (nightly)** - n/a (superseded by [v1.4.24](#1424-2020-11-05))
+- **GitHub Release Binaries** - [Release v1.4.23](https://github.com/rust-lang/rustfmt/releases/tag/v1.4.23)
+- **Build from source** - [Tag v1.4.23](https://github.com/rust-lang/rustfmt/tree/v1.4.23), see instructions for how to [install rustfmt from source][install-from-source]
+
+
+
+## [1.4.22] 2020-10-04
+
+### Changed
+
+- Update `rustc-ap-*` crates to v679.0.0
+- Add config option to allow control of leading match arm pipes
+- Support `RUSTFMT` environment variable in `cargo fmt` to run specified `rustfmt` instance
+
+### Fixed
+
+- Fix preservation of type aliases within extern blocks
+
+
+## [1.4.9] 2019-10-07
+
+### Changed
+
+- Update `rustc-ap-*` crates to 606.0.0.
+
+### Fixed
+
+- Fix aligning comments of different group
+- Fix flattening imports with a single `self`.
+- Fix removing attributes on function parameters.
+- Fix removing `impl` keyword from opaque type.
+
+## [1.4.8] 2019-09-08
+
+### Changed
+
+- Update `rustc-ap-*` crates to 583.0.0.
+
+## [1.4.7] 2019-09-06
+
+### Added
+
+- Add `--config` command line option.
+
+### Changed
+
+- Update `rustc-ap-*` crates to 581.0.0.
+- rustfmt now do not warn against trailing whitespaces inside macro calls.
+
+### Fixed
+
+- Fix `merge_imports` generating invalid code.
+- Fix removing discriminant values on enum variants.
+- Fix modules defined inside `cfg_if!` not being formatted.
+- Fix minor formatting issues.
+
+## [1.4.6] 2019-08-28
+
+### Added
+
+- Add `--message-format` command line option to `cargo-fmt`.
+- Add `-l,--files-with-diff` command line option to `rustfmt`.
+- Add `json` emit mode.
+
+### Fixed
+
+- Fix removing attributes on struct pattern's fields.
+- Fix non-idempotent formatting of match arm.
+- Fix `merge_imports` generating invalid code.
+- Fix imports with `#![macro_use]` getting reordered with `reorder_imports`.
+- Fix calculation of line numbers in checkstyle output.
+- Fix poor formatting of complex fn type.
+
+## [1.4.5] 2019-08-13
+
+### Fixed
+
+- Fix generating invalid code when formatting an impl block with const generics inside a where clause.
+- Fix adding a trailing space after a `dyn` keyword which is used as a macro argument by itself.
+
+## [1.4.4] 2019-08-06
+
+### Fixed
+
+- Fix `cargo fmt` incorrectly formatting crates that is not part of the workspace or the path dependencies.
+- Fix removing a trailing comma from a tuple pattern.
+
+## [1.4.3] 2019-08-02
+
+### Changed
+
+- Update `rustc-ap-*` crates to 546.0.0.
+
+### Fixed
+
+- Fix an underscore pattern getting removed.
+
+## [1.4.2] 2019-07-31
+
+### Changed
+
+- Explicitly require the version of `rustfmt-config_proc_macro` to be 0.1.2 or later.
+
+## [1.4.1] 2019-07-30
+
+### Changed
+
+- Update `rustc-ap-*` crates to 542.0.0.
+
+## [1.4.0] 2019-07-29
+
+### Added
+
+- Add new attribute `rustfmt::skip::attributes` to prevent rustfmt
+from formatting an attribute #3665
+
+### Changed
+
+- Update `rustc-ap-*` crates to 541.0.0.
+- Remove multiple semicolons.
+
+## [1.3.3] 2019-07-15
+
+### Added
+
+- Add `--manifest-path` support to `cargo fmt` (#3683).
+
+### Fixed
+
+- Fix `cargo fmt -- --help` printing nothing (#3620).
+- Fix inserting an extra comma (#3677).
+- Fix incorrect handling of CRLF with `file-lines` (#3684).
+- Fix `print-config=minimal` option (#3687).
+
+## [1.3.2] 2019-07-06
+
+### Fixed
+
+- Fix rustfmt crashing when `await!` macro call is used in a method chain.
+- Fix rustfmt not recognizing a package whose name differs from its directory's name.
+
+## [1.3.1] 2019-06-30
+
+### Added
+
+- Implement the `Display` trait on the types of `Config`.
+
+### Changed
+
+- `ignore` configuration option now only supports paths separated by `/`. Windows-style paths are not supported.
+- Running `cargo fmt` in a sub-directory of a project is now supported.
+
+### Fixed
+
+- Fix bugs that may cause rustfmt to crash.
+
+## [1.3.0] 2019-06-09
+
+### Added
+
+- Format modules defined inside `cfg_if` macro calls #3600
+
+### Changed
+
+- Change option `format_doc_comment` to `format_code_in_doc_comment`.
+- `use_small_heuristics` changed to be an enum and stabilised. Configuration
+ options are now ready for 1.0.
+- Stabilise `fn_args_density` configuration option and rename it to `fn_args_layout` #3581
+- Update `rustc-ap-*` crates to 486.0.0
+- Ignore sub-modules when skip-children is used #3607
+- Removed bitrig support #3608
+
+### Fixed
+
+- `wrap_comments` should not imply `format_doc_comments` #3535
+- Incorrect handling of const generics #3555
+- Add the handling for `vec!` with paren inside macro #3576
+- Format trait aliases with where clauses #3586
+- Catch panics from the parser while rewriting macro calls #3589
+- Fix erasing inner attributes in struct #3593
+- Inline the attribute with its item even with the `macro_use` attribute or when `reorder_imports` is disabled #3598
+- Fix the bug add unwanted code to impl #3602
+
+## [1.2.2] 2019-04-24
+
+### Fixed
+
+- Fix processing of `ignore` paths #3522
+- Attempt to format attributes if only they exist #3523
+
+## [1.2.1] 2019-04-18
+
+### Added
+
+- Add `--print-config current` CLI option b473e65
+- Create GitHub [page](https://rust-lang.github.io/rustfmt/) for Configuration.md #3485
+
+### Fixed
+
+- Keep comment appearing between parameter's name and its type #3491
+- Do not delete semicolon after macro call with square brackets #3500
+- Fix `--version` CLI option #3506
+- Fix duplication of attributes on a match arm's body #3510
+- Avoid overflowing item with attributes #3511
+
+## [1.2.0] 2019-03-27
+
+### Added
+
+- Add new attribute `rustfmt::skip::macros` to prevent rustfmt from formatting a macro #3454
+
+### Changed
+
+- Discard error report in silent_emitter #3466
+
+### Fixed
+
+- Fix bad performance on deeply nested binary expressions #3467
+- Use BTreeMap to guarantee consistent ordering b4d4b57
+
+## [1.1.1] 2019-03-21
+
+### Fixed
+
+- Avoid panic on macro inside deeply nested block c9479de
+- Fix line numbering in missed spans and handle file_lines in edge cases cdd08da
+- Fix formatting of async blocks 1fa06ec
+- Avoid duplication on the presence of spaces between macro name and `!` #3464
+
+## [1.1.0] 2019-03-17
+
+### Added
+
+- Add `inline_attribute_width` configuration option to write an item and its attribute on the same line if their combined width is below a threshold #3409
+- Support `const` generics f0c861b
+- Support path clarity module #3448
+
+### Changed
+
+- Align loop and while formatting 7d9a2ef
+- Support `EmitMode::ModifiedLines` with stdin input #3424
+- Update `rustc-ap-*` crates to 407.0.0
+- Remove trailing whitespaces in missing spans 2d5bc69
+
+### Fixed
+
+- Do not remove comment in the case of no arg 8e3ef3e
+- Fix `Ident of macro+ident gets duplicated` error 40ff078
+- Format the if expression at the end of the block in a single line 5f3dfe6
+
+## [1.0.3] 2019-02-14
+
+### Added
+
+- Point unstable options to tracking issues 412dcc7
+
+### Changed
+
+- Update `rustc-ap-*` crates to 373.0.0
+
+## [1.0.2] 2019-02-12
+
+### Added
+
+- Add a [section](https://github.com/rust-lang/rustfmt/blob/ae331be/Contributing.md#version-gate-formatting-changes) to the Contributing.md file about version-gating formatting changes 36e2cb0
+- Allow specifying package with `-p` CLI option a8d2591
+- Support `rustfmt::skip` on imports #3289
+- Support global `rustfmt.toml` to be written in user config directory #3280
+- Format visibility on trait alias 96a3df3
+
+### Changed
+
+- Do not modify original source code inside macro call #3260
+- Recognize strings inside comments in order to avoid indenting them baa62c6
+- Use Unicode-standard char width to wrap comments or strings a01990c
+- Change new line point in the case of no args #3294
+- Use the same formatting rule between functions and macros #3298
+- Update rustc-ap-rustc_target to 366.0.0, rustc-ap-syntax to 366.0.0, and rustc-ap-syntax_pos to 366.0.0
+
+### Fixed
+
+- rewrite_comment: fix block fallback when failing to rewrite an itemized block ab7f4e1
+- Catch possible tokenizer panics #3240
+- Fix macro indentation on Windows #3266
+- Fix shape when formatting return or break expr on statement position #3259
+- rewrite_comment: fix block fallback when failing to rewrite an itemized block
+- Keep leading double-colon to respect the 2018 edition of rust's paths a2bfc02
+- Fix glob and nested global imports 2125ad2
+- Do not force trailing comma when using mixed layout #3306
+- Prioritize `single_line_fn` and `empty_item_single_line` over `brace_style` #3308
+- Fix `internal error: left behind trailing whitespace` with long lines c2534f5
+- Fix attribute duplication #3325
+- Fix formatting of strings within a macro 813aa79
+- Handle a macro argument with a single keyword 9a7ea6a
+
+## [1.0.1] 2018-12-09
+
+### Added
+
+- Add a `version` option 378994b
+
+### Changed
+
+- End expressions like return/continue/break with a semicolon #3223
+- Update rustc-ap-rustc_target to 306.0.0, rustc-ap-syntax to 306.0.0, and rustc-ap-syntax_pos to 306.0.0
+
+### Fixed
+
+- Allow to run a rustfmt command from cargo-fmt even when there is no target a2da636
+- Fix `un-closed delimiter` errors when formatting break labels 40174e9
+
+## [1.0.0] 2018-11-19
+
+### Changed
+
+- Preserve possibly one whitespace for brace macros 1a3bc79
+- Prefer to break arguments over putting output type on the next line 1dd54e6
+
+## [0.99.9] 2018-11-15
+
+### Changed
+
+- Update rustc-ap-rustc_target to 297.0.0, rustc-ap-syntax to 297.0.0, to rustc-ap-syntax_pos to 297.0.0
+- Don't align comments on `extern crate`s dd7add7
+
+## [0.99.8] 2018-11-14
+
+### Added
+
+- Add `overflow_delimited_expr` config option to more aggressively allow overflow #3175
+
+### Fixed
+
+- Fix the logic for retaining a comment before the arrow in a match #3181
+- Do not wrap comments in doctest to avoid failing doctest runs #3183
+- Fix comment rewriting that was wrapping code into a line comment #3188
+- Fix formatting of unit-struct with `where`-clause #3200
+
+## [0.99.7] 2018-11-07
+
+### Changed
+
+- Force a newline after the `if` condition if there is a different indentation level #3109
+- Use correct width when formatting type on local statement #3126
+- Treat crates non-alphabetically when ordering 799005f
+- Fix formatting of code that is annotated with rustfmt::skip #3113
+- Stabilize `edition` configuration option 9c3ae2d
+- cargo-fmt: detect Rust edition in use #3129
+- Trim the indentation on macros which heuristically appear to use block-style indentation #3178
+
+### Fixed
+
+- Do not remove path disambiugator inside macro #3142
+- Improve handling of Windows newlines #3141
+- Fix alignment of a struct's fields (`struct_field_align_threshold` option) with the Visual `indent_style` #3165
+- Fix a bug in formatting markdown lists within comments #3172
+
+## [0.99.6] 2018-10-18
+
+### Added
+
+- Add `enum_discrim_align_threshold` option to vertically align enum discriminants cc22869
+- Add `println!`-like heuristic to the `fail` attribute #3067
+- Handle itemized items inside comments #3083
+- Add `format_doc_comments` configuration option to control the formatting of code snippets inside comments #3089
+
+### Changed
+
+- Makes brace behavior consistent with empty bodies for traits and impls 2727d41
+- Consider a multi-lined array as a block-like expression #3969
+- Improve formatting of strings #3073
+- Get rid of extra commas in Visual struct literal formatting #3077
+- Update rustc-ap-rustc_target to 274.0.0, rustc-ap-syntax to 274.0.0, and rustc-ap-syntax_pos to 274.0.0
+- Format macro calls with item-like arguments #3080
+- Avoid control flow expressions conditions to go multi line ef59b34
+- Simplify multi-lining binop expressions #3101
+
+### Fixed
+
+- Do not format a code block in documentation if it is annotated with ignore or text 2bcc3a9
+- Fix inconsistent overflow behavior in Visual style #3078
+- Fix corner cases of the string formatting implementation #3083
+- Do not add parens around lifetimes 0ac68c9
+- Catch parser panic in format_snippet 8c4e92a
+
+## [0.99.5] 2018-09-25
+
+### Added
+
+- Handle leading module separator for 2018 Edition #2952
+- Add configuration option `normalize_doc_attributes`: convert doc attributes to comments #3002
+
+### Changed
+
+- Accept 2015 and 2018 instead of Edition2015 and Edition2018 for edition option eec7436
+- Support platforms without a timer 46e2a2e
+- Update rustc-ap-rustc_target to 263.0.0, rustc-ap-syntax to 263.0.0, and rustc-ap-syntax_pos to 263.0.0
+
+### Fixed
+
+- Format of attributes with commas #2971
+- Fix optional arg condensing #2972
+- Improve formatting of long function parameters #2981
+- Fix formatting of raw string literals #2983
+- Handle chain with try operators with spaces #2986
+- Use correct shape in Visual tuple rewriting #2987
+- Impove formatting of arguments with `visual_style = "Visual"` option #2988
+- Change `print_diff` to output the correct line number 992b179
+- Propagate errors about failing to rewrite a macro 6f318e3
+- Handle formatting of long function signature #3010
+- Fix indent computation of a macro with braces c3edf6d
+- Format generics on associated types #3035
+- Incorrect indentation of multiline block match expression #3042
+- Fix bug in import where two consecutive module separators were possible 98a0ef2
+- Prevent right-shifting of block comments with bare lines 5fdb6db
+
+## [0.99.4] 2018-08-27
+
+### Added
+
+- Handle formatting of underscore imports #2951
+- Handle formatting of try blocks #2965
+
+### Changed
+
+- Update rustc-ap-rustc_target to 237.0.0, rustc-ap-syntax to 237.0.0, and rustc-ap-syntax_pos to 237.0.0 ca19c9a
+- Consider `dev` channel as nightly for unstable features #2948
+
+### Fixed
+
+- Fix formatting of patterns with ellipsis # 2942
+
+## [0.99.3] 2018-08-23
+
+### Added
+
+- Use path attribute when searching for modules #2901
+- Expose FileLines JSON representation to allow external libraries to use the file_lines option #2915
+
+### Changed
+
+- Replace '--conifig-help' with '--config=help' cb10e06
+- Improve formatting of slice patterns #2912
+
+### Fixed
+
+- Format chains with comment #2899
+- Fix indentation of formatted macro body #2920
+- Fix indentation of block comments f23e6aa
+
+## [0.99.2] 2018-08-07
+
+### Changed
+
+- Update rustc-ap-rustc_target to 218.0.0, rustc-ap-syntax to 218.0.0, and rustc-ap-syntax_pos to 218.0.0 5c9a2b6
+- Combine function-like attributes #2900
+
+### Fixed
+
+- Explicitly handle semicolon after the item in statement position d96e3ca
+- Fix parsing '#'-hiding of rustdoc 2eca09e
+
+## [0.99.1] 2018-08-04
+
+### Fixed
+
+- fix use statements ordering when a number is present 1928ae7
+
+## [0.99.0] 2018-08-03
+
+- 1.0 RC release
+
+### Changed
+
+- Clarification in README.md 30fe66b
+
+## [0.9.0] 2018-08-01
+
+### Added
+
+- Handle raw identifiers 3027c21
+- Format async closure 60ce411
+- Add max_width option for all heuristics c2ae39e
+- Add config option `format_macro_matchers` to format the metavariable matching patterns in macros 79c5ee8
+- Add config option `format_macro_bodies` to format the bodies of macros 79c5ee8
+- Format exitential type fc307ff
+- Support raw identifiers in struct expressions f121b1a
+- Format Async block and async function 0b25f60
+
+### Changed
+
+- Update rustc-ap-rustc_target to 211.0.0, rustc-ap-syntax to 211.0.0, and rustc-ap-syntax_pos to 211.0.0
+- Put each nested import on its own line while putting non-nested imports on the same line as much as possible 42ab258
+- Respect `empty_item_single_line` config option when formatting empty impls. Put the `where` on its own line to improve readability #2771
+- Strip leading `|` in match arm patterns 1d4b988
+- Apply short function call heuristic to attributes 3abebf9
+- Indent a match guard if the pattern is multiline be4d37d
+- Change default newline style to `Native` 9d8f381
+- Improve formatting of series of binop expressions a4cdb68
+- Trigger an internal error if we skip formatting due to a lost comment b085113
+- Refactor chain formatting #2838
+
+### Fixed
+
+- Do not insert spaces around braces with empty body or multiple lines 2f65852
+- Allow using mixed layout with comments #2766
+- Handle break labels #2726
+- fix rewrite_string when a line feed is present 472a2ed
+- Fix an anomaly with comments and array literals b28a0cd
+- Check for comments after the `=>` in a match arm 6899471
+
+## [0.8.0,0.8.1,0.8.2] 2018-05-28
+
+### Added
+
+- Use scoped attributes for skip attribute https://github.com/rust-lang/rustfmt/pull/2703
+
+### Changed
+
+- Comment options `wrap_comments` and `normalize_comments` are reverted back to unstable 416bc4c
+- Stabilise `reorder_imports` and `reorder_modules` options 7b6d2b4
+- Remove `spaces_within_parens_and_brackets` option d726492
+- Stabilise shorthand options: `use_try_shorthand`, `use_field_init_shorthand`, and `force_explicit_abi` 8afe367
+- Stabilise `remove_nested_parens` and set default to true a70f716
+- Unstabilise `unstable_features` dd9c15a
+- Remove `remove_blank_lines_at_start_or_end_of_block` option 2ee8b0e
+- Update rustc-ap-syntax to 146.0.0 and rustc-ap-rustc_target to 146.0.0 2c275a2
+- Audit the public API #2639
+
+### Fixed
+
+- Handle code block in doc comment without rust prefix f1974e2
+
+## [0.7.0] 2018-05-14
+
+### Added
+
+- Add integration tests against crates in the rust-lang-nursery c79f39a
+
+### Changed
+
+- Update rustc-ap-syntax to 128.0.0 and ustc-ap-rustc_target to 128.0.0 195395f
+- Put operands on its own line when each fits in a single line f8439ce
+- Improve CLI options 55ac062 1869888 798bffb 4d9de48 eca7796 8396da1 5d9f5aa
+
+### Fixed
+
+- Use correct line width for list attribute 61a401a
+- Avoid flip-flopping impl items when reordering them 37c216c
+- Formatting breaks short lines when max_width is less than 100 9b36156
+- Fix variant "Mixed" of imports_layout option 8c8676c
+- Improve handling of long lines f885039
+- Fix up lines exceeding max width 51c07f4
+- Fix handling of modules in non_modrs_mods style cf573e8
+- Do not duplicate attributes on use items e59ceaf
+- Do not insert an extra brace in macros with native newlines 4c9ef93
+
+## [0.6.1] 2018-05-01
+
+### Changed
+
+- Change the default value of imports_indent to IndentStyle::Block https://github.com/rust-lang/rustfmt/pull/2662
+
+### Fixed
+
+- Handle formatting of auto traits 5b5a72c
+- Use consistent formatting for empty enum and struct https://github.com/rust-lang/rustfmt/pull/2656
+
+## [0.6.0] 2018-04-20
+
+### Changed
+
+- Improve public API 8669004
+
+## [0.5.0] 2018-04-20
+
+### Added
+
+- Add `verbose-diff` CLI option 5194984
+
+### Changed
+
+- Update rustc-ap-syntax to 103.0.0 dd807e2
+- Refactor to make a sensible public API ca610d3
+
+### Fixed
+
+- Add spaces between consecutive `..` `..=` 61d29eb
+
+## [0.4.2] 2018-04-12
+
+### Added
+
+- Handle binary operators and lifetimes 0fd174d
+- Add reorder_impl_items config option 94f5a05
+- Add `--unstable-features` CLI option to list unstable options from the `--help` output 8208f8a
+- Add merge_imports config option 5dd203e
+
+### Changed
+
+- Format macro arguments with vertical layout ec71459
+- Reorder imports by default 164cf7d
+- Do not collapse block around expr with condition on match arm 5b9b7d5
+- Use vertical layout for complex attributes c77708f
+- Format array using heuristics for function calls 98c6f7b
+- Implement stable ordering for impl items with the the following item priority: type, const, macro, then method fa80ddf
+- Reorder imports by default 164cf7d
+- Group `extern crate` by default 3a138a2
+- Make `error_on_line_overflow` false by default f146711
+- Merge imports with the same prefix into a single nested import 1954513
+- Squash the various 'reorder imports' option into one 911395a
+
+### Fixed
+
+- Print version is missing the channel ca6fc67
+- Do not add the beginning vert to the match arm 1e1d9d4
+- Follow indent style config when formatting attributes efd295a
+- Do not insert newline when item is empty a8022f3
+- Do not indent or unindent inside string literal ec1907b
+
+## [0.4.1] 2018-03-16
+
+### Added
+
+- Add `ignore` configuration option.
+- Add `license_template_path` configuration option.
+- Format `lazy_static!`.
+
+### Fixed
+
+- Fix formatting bugs.
+- Fix setting `reorder_modules` removing inline modules.
+- Format attributes on block expressions.
+- Support `dyn trait` syntax.
+- Support multiple patterns in `if let` and `while let`.
+- Support a pattern with parentheses.
+
+## [0.4.0] 2018-03-02
+
+### Changed
+
+- Do not print verbose outputs when formatting with stdin.
+- Preserve trailing whitespaces in doc comments.
+- Scale the values of width heuristics by `max_width`.
+
+### Fixed
+
+- Do not reorder items with `#[macro_use]`.
+- Fix formatting bugs.
+- Support the beginning `|` on a match arm.
+
+## [0.3.8] 2018-02-04
+
+### Added
+
+- Format (or at least try to format) `macro_rules!`.
+
+## [0.3.7] 2018-02-01
+
+### Added
+
+- Add `use_field_init_shorthand` config option.
+- Add `reorder_modules` configuration option.
+
+## [0.3.6] 2018-01-18
+
+### Fixed
+
+- Fix panicking on formatting certain macros (#2371).
+
+## [0.3.5] 2018-01-15
+
+### Changed
+
+- Format code block in comments when `wrap_comments` is set to `true`.
+- Remove `same_line_attributes` configuration option.
+- Rename `git-fmt` to `git-rustfmt`.
+
+### Fixed
+
+- Rustup to `rustc 1.25.0-nightly (e6072a7b3 2018-01-13)`.
+- Fix formatting bugs.
+
+## [0.3.4] 2017-12-23
+
+### Added
+
+- Add `--version` flag to `cargo-fmt`, allow `cargo fmt --version`.
+
+### Fixed
+
+- Rustup to `rustc 1.24.0-nightly (5165ee9e2 2017-12-22)`.
+
+## [0.3.3] 2017-12-22
+
+### Added
+
+- Format trait aliases.
+
+### Changed
+
+- `cargo fmt` will format every workspace member.
+
+### Fixed
+
+- Rustup to `rustc 1.24.0-nightly (250b49205 2017-12-21)`
+- Fix formatting bugs.
+
+## [0.3.2] 2017-12-15
+
+### Changed
+
+- Warn when unknown configuration option is used.
+
+### Fixed
+
+- Rustup to `rustc 1.24.0-nightly (0077d128d 2017-12-14)`.
+
+## [0.3.1] 2017-12-11
+
+### Added
+
+- Add `error_on_unformatted` configuration option.
+- Add `--error-on-unformatted` command line option.
+
+### Changed
+
+- Do not report formatting errors on comments or strings by default.
+- Rename `error_on_line_overflow_comments` to `error_on_unformatted`.
+
+### Fixed
+
+- Fix formatting bugs.
+- Fix adding a trailing whitespace inside code block when `wrap_comments = true`.
+
+## [0.3.0] 2017-12-11
+
+### Added
+
+- Support nested imports.
+
+### Changed
+
+- Do not report errors on skipped items.
+- Do not format code block inside comments when `wrap_comments = true`.
+- Keep vertical spaces between items within range.
+- Format `format!` and its variants using compressed style.
+- Format `write!` and its variants using compressed style.
+- Format **simple** array using compressed style.
+
+### Fixed
+
+- Fix `rustfmt --package package_name` not working properly.
+- Fix formatting bugs.
+
+## [0.2.17] 2017-12-03
+
+### Added
+
+- Add `blank_lines_lower_bound` and `blank_lines_upper_bound` configuration options.
+
+### Changed
+
+- Combine configuration options related to width heuristic into `width_heuristic`.
+- If the match arm's body is `if` expression, force to use block.
+
+### Fixed
+
+- Fix `cargo fmt --all` being trapped in an infinite loop.
+- Fix many formatting bugs.
+
+### Removed
+
+- Remove legacy configuration options.
+
+## [0.2.16] 2017-11-21
+
+### Added
+
+- Remove empty lines at the beginning of the file.
+- Soft wrapping on doc comments.
+
+### Changed
+
+- Break before `|` when using multiple lines for match arm patterns.
+- Combine `control_style`, `where_style` and `*_indent` config options into `indent_style`.
+- Combine `item_brace_style` and `fn_brace_style` config options into `brace_style`.
+- Combine config options related spacing around colons into `space_before_colon` and `space_after_colon`.
+
+### Fixed
+
+- Fix many bugs.
+
+## [0.2.15] 2017-11-08
+
+### Added
+
+- Add git-fmt tool
+- `where_single_line` configuration option.
+
+### Changed
+
+- Rename `chain_one_line_max` to `chain_width`.
+- Change the suffix of indent-related configuration options to `_indent`.
+
+## [0.2.14] 2017-11-06
+
+### Fixed
+
+- Rustup to the latest nightly.
+
+## [0.2.13] 2017-10-30
+
+### Fixed
+
+- Rustup to the latest nightly.
+
+## [0.2.12] 2017-10-29
+
+### Fixed
+
+- Fix a bug that `cargo fmt` hangs forever.
+
+## [0.2.11] 2017-10-29
+
+### Fixed
+
+- Fix a bug that `cargo fmt` crashes.
+
+## [0.2.10] 2017-10-28
+
+## [0.2.9] 2017-10-16
+
+## [0.2.8] 2017-09-28
+
+## [0.2.7] 2017-09-21
+
+### Added
+
+- `binop_separator` configuration option (#1964).
+
+### Changed
+
+- Use horizontal layout for function call with a single argument.
+
+### Fixed
+
+- Fix panicking when calling `cargo fmt --all` (#1963).
+- Refactorings & faster rustfmt.
+
+## [0.2.6] 2017-09-14
+
+### Fixed
+
+- Fix a performance issue with nested block (#1940).
+- Refactorings & faster rustfmt.
+
+## [0.2.5] 2017-08-31
+
+### Added
+
+- Format and preserve attributes on statements (#1933).
+
+### Fixed
+
+- Use getters to access `Span` fields (#1899).
+
+## [0.2.4] 2017-08-30
+
+### Added
+
+- Add support for `Yield` (#1928).
+
+## [0.2.3] 2017-08-30
+
+### Added
+
+- `multiline_closure_forces_block` configuration option (#1898).
+- `multiline_match_arm_forces_block` configuration option (#1898).
+- `merge_derives` configuration option (#1910).
+- `struct_remove_empty_braces` configuration option (#1930).
+- Various refactorings.
+
+### Changed
+
+- Put single-lined block comments on the same line with list-like structure's item (#1923).
+- Preserve blank line between doc comment and attribute (#1925).
+- Put the opening and the closing braces of enum and struct on the same line, even when `item_brace_style = "AlwaysNextLine"` (#1930).
+
+### Fixed
+
+- Format attributes on `ast::ForeignItem` and take max width into account (#1916).
+- Ignore empty lines when calculating the shortest indent width inside macro with braces (#1918).
+- Handle tabs properly inside macro with braces (#1918).
+- Fix a typo in `compute_budgets_for_args()` (#1924).
+- Recover comment between keyword (`impl` and `trait`) and `{` which used to get removed (#1925).
+
+
+[install-from-source]: https://github.com/rust-lang/rustfmt#installing-from-source
diff --git a/src/tools/rustfmt/CODE_OF_CONDUCT.md b/src/tools/rustfmt/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..d70b2b52a
--- /dev/null
+++ b/src/tools/rustfmt/CODE_OF_CONDUCT.md
@@ -0,0 +1,40 @@
+# The Rust Code of Conduct
+
+A version of this document [can be found online](https://www.rust-lang.org/conduct.html).
+
+## Conduct
+
+**Contact**: [rust-mods@rust-lang.org](mailto:rust-mods@rust-lang.org)
+
+* We are committed to providing a friendly, safe and welcoming environment for all, regardless of level of experience, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, nationality, or other similar characteristic.
+* On IRC, please avoid using overtly sexual nicknames or other nicknames that might detract from a friendly, safe and welcoming environment for all.
+* Please be kind and courteous. There's no need to be mean or rude.
+* Respect that people have differences of opinion and that every design or implementation choice carries a trade-off and numerous costs. There is seldom a right answer.
+* Please keep unstructured critique to a minimum. If you have solid ideas you want to experiment with, make a fork and see how it works.
+* We will exclude you from interaction if you insult, demean or harass anyone. That is not welcome behavior. We interpret the term "harassment" as including the definition in the <a href="http://citizencodeofconduct.org/">Citizen Code of Conduct</a>; if you have any lack of clarity about what might be included in that concept, please read their definition. In particular, we don't tolerate behavior that excludes people in socially marginalized groups.
+* Private harassment is also unacceptable. No matter who you are, if you feel you have been or are being harassed or made uncomfortable by a community member, please contact one of the channel ops or any of the [Rust moderation team][mod_team] immediately. Whether you're a regular contributor or a newcomer, we care about making this community a safe place for you and we've got your back.
+* Likewise any spamming, trolling, flaming, baiting or other attention-stealing behavior is not welcome.
+
+## Moderation
+
+
+These are the policies for upholding our community's standards of conduct. If you feel that a thread needs moderation, please contact the [Rust moderation team][mod_team].
+
+1. Remarks that violate the Rust standards of conduct, including hateful, hurtful, oppressive, or exclusionary remarks, are not allowed. (Cursing is allowed, but never targeting another user, and never in a hateful manner.)
+2. Remarks that moderators find inappropriate, whether listed in the code of conduct or not, are also not allowed.
+3. Moderators will first respond to such remarks with a warning.
+4. If the warning is unheeded, the user will be "kicked," i.e., kicked out of the communication channel to cool off.
+5. If the user comes back and continues to make trouble, they will be banned, i.e., indefinitely excluded.
+6. Moderators may choose at their discretion to un-ban the user if it was a first offense and they offer the offended party a genuine apology.
+7. If a moderator bans someone and you think it was unjustified, please take it up with that moderator, or with a different moderator, **in private**. Complaints about bans in-channel are not allowed.
+8. Moderators are held to a higher standard than other community members. If a moderator creates an inappropriate situation, they should expect less leeway than others.
+
+In the Rust community we strive to go the extra step to look out for each other. Don't just aim to be technically unimpeachable, try to be your best self. In particular, avoid flirting with offensive or sensitive issues, particularly if they're off-topic; this all too often leads to unnecessary fights, hurt feelings, and damaged trust; worse, it can drive people away from the community entirely.
+
+And if someone takes issue with something you said or did, resist the urge to be defensive. Just stop doing what it was they complained about and apologize. Even if you feel you were misinterpreted or unfairly accused, chances are good there was something you could've communicated better — remember that it's your responsibility to make your fellow Rustaceans comfortable. Everyone wants to get along and we are all here first and foremost because we want to talk about cool technology. You will find that people will be eager to assume good intent and forgive as long as you earn their trust.
+
+The enforcement policies listed above apply to all official Rust venues; including official IRC channels (#rust, #rust-internals, #rust-tools, #rust-libs, #rustc, #rust-beginners, #rust-docs, #rust-community, #rust-lang, and #cargo); GitHub repositories under rust-lang, rust-lang-nursery, and rust-lang-deprecated; and all forums under rust-lang.org (users.rust-lang.org, internals.rust-lang.org). For other projects adopting the Rust Code of Conduct, please contact the maintainers of those projects for enforcement. If you wish to use this code of conduct for your own project, consider explicitly mentioning your moderation policy or making a copy with your own moderation policy so as to avoid confusion.
+
+*Adapted from the [Node.js Policy on Trolling](http://blog.izs.me/post/30036893703/policy-on-trolling) as well as the [Contributor Covenant v1.3.0](https://www.contributor-covenant.org/version/1/3/0/).*
+
+[mod_team]: https://www.rust-lang.org/team.html#Moderation-team
diff --git a/src/tools/rustfmt/Cargo.lock b/src/tools/rustfmt/Cargo.lock
new file mode 100644
index 000000000..311df226d
--- /dev/null
+++ b/src/tools/rustfmt/Cargo.lock
@@ -0,0 +1,744 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "aho-corasick"
+version = "0.7.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "annotate-snippets"
+version = "0.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c3b9d411ecbaf79885c6df4d75fff75858d5995ff25385657a28af47e82f9c36"
+dependencies = [
+ "unicode-width",
+ "yansi-term",
+]
+
+[[package]]
+name = "anyhow"
+version = "1.0.56"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4361135be9122e0870de935d7c439aef945b9f9ddd4199a553b5270b49c82a27"
+
+[[package]]
+name = "atty"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
+dependencies = [
+ "hermit-abi",
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "bstr"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "bytecount"
+version = "0.6.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "72feb31ffc86498dacdbd0fcebb56138e7177a8cc5cea4516031d15ae85a742e"
+dependencies = [
+ "packed_simd_2",
+]
+
+[[package]]
+name = "camino"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6f3132262930b0522068049f5870a856ab8affc80c70d08b6ecb785771a6fc23"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "cargo-platform"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cbdb825da8a5df079a43676dbe042702f1707b1109f713a01420fbb4cc71fa27"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "cargo_metadata"
+version = "0.14.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4acbb09d9ee8e23699b9634375c72795d095bf268439da88562cf9b501f181fa"
+dependencies = [
+ "camino",
+ "cargo-platform",
+ "semver",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "clap"
+version = "3.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "71c47df61d9e16dc010b55dba1952a57d8c215dbb533fd13cdd13369aac73b1c"
+dependencies = [
+ "atty",
+ "bitflags",
+ "clap_derive",
+ "indexmap",
+ "lazy_static",
+ "os_str_bytes",
+ "strsim",
+ "termcolor",
+ "textwrap",
+]
+
+[[package]]
+name = "clap_derive"
+version = "3.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a3aab4734e083b809aaf5794e14e756d1c798d2c69c7f7de7a09a2f5214993c1"
+dependencies = [
+ "heck",
+ "proc-macro-error",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.8.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38"
+dependencies = [
+ "cfg-if",
+ "lazy_static",
+]
+
+[[package]]
+name = "derive-new"
+version = "0.5.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3418329ca0ad70234b9735dc4ceed10af4df60eff9c8e7b06cb5e520d92c3535"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "diff"
+version = "0.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0e25ea47919b1560c4e3b7fe0aaab9becf5b84a10325ddf7db0f0ba5e1026499"
+
+[[package]]
+name = "dirs"
+version = "4.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059"
+dependencies = [
+ "dirs-sys",
+]
+
+[[package]]
+name = "dirs-next"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1"
+dependencies = [
+ "cfg-if",
+ "dirs-sys-next",
+]
+
+[[package]]
+name = "dirs-sys"
+version = "0.3.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6"
+dependencies = [
+ "libc",
+ "redox_users",
+ "winapi",
+]
+
+[[package]]
+name = "dirs-sys-next"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d"
+dependencies = [
+ "libc",
+ "redox_users",
+ "winapi",
+]
+
+[[package]]
+name = "either"
+version = "1.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
+
+[[package]]
+name = "env_logger"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0b2cf0344971ee6c64c31be0d530793fba457d322dfec2810c453d0ef228f9c3"
+dependencies = [
+ "atty",
+ "humantime",
+ "log",
+ "regex",
+ "termcolor",
+]
+
+[[package]]
+name = "fnv"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+
+[[package]]
+name = "getopts"
+version = "0.2.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5"
+dependencies = [
+ "unicode-width",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9be70c98951c83b8d2f8f60d7065fa6d5146873094452a1008da8c2f1e4205ad"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "wasi",
+]
+
+[[package]]
+name = "globset"
+version = "0.4.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "10463d9ff00a2a068db14231982f5132edebad0d7660cd956a1c30292dbcbfbd"
+dependencies = [
+ "aho-corasick",
+ "bstr",
+ "fnv",
+ "log",
+ "regex",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
+
+[[package]]
+name = "heck"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9"
+
+[[package]]
+name = "hermit-abi"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "humantime"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
+
+[[package]]
+name = "ignore"
+version = "0.4.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "713f1b139373f96a2e0ce3ac931cd01ee973c3c5dd7c40c0c2efe96ad2b6751d"
+dependencies = [
+ "crossbeam-utils",
+ "globset",
+ "lazy_static",
+ "log",
+ "memchr",
+ "regex",
+ "same-file",
+ "thread_local",
+ "walkdir",
+ "winapi-util",
+]
+
+[[package]]
+name = "indexmap"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0f647032dfaa1f8b6dc29bd3edb7bbef4861b8b8007ebb118d6db284fd59f6ee"
+dependencies = [
+ "autocfg",
+ "hashbrown",
+]
+
+[[package]]
+name = "itertools"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3"
+dependencies = [
+ "either",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35"
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "libc"
+version = "0.2.122"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec647867e2bf0772e28c8bcde4f0d19a9216916e890543b5a03ed8ef27b8f259"
+
+[[package]]
+name = "libm"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7fc7aa29613bd6a620df431842069224d8bc9011086b1db4c0e0cd47fa03ec9a"
+
+[[package]]
+name = "log"
+version = "0.4.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6389c490849ff5bc16be905ae24bc913a9c8892e19b2341dbc175e14c341c2b8"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "memchr"
+version = "2.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
+
+[[package]]
+name = "once_cell"
+version = "1.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87f3e037eac156d1775da914196f0f37741a274155e34a0b7e427c35d2a2ecb9"
+
+[[package]]
+name = "os_str_bytes"
+version = "6.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "packed_simd_2"
+version = "0.3.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "defdcfef86dcc44ad208f71d9ff4ce28df6537a4e0d6b0e8e845cb8ca10059a6"
+dependencies = [
+ "cfg-if",
+ "libm",
+]
+
+[[package]]
+name = "proc-macro-error"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
+dependencies = [
+ "proc-macro-error-attr",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "version_check",
+]
+
+[[package]]
+name = "proc-macro-error-attr"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "version_check",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec757218438d5fda206afc041538b2f6d889286160d649a86a24d37e1235afd1"
+dependencies = [
+ "unicode-xid",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "632d02bff7f874a36f33ea8bb416cd484b90cc66c1194b1a1110d067a7013f58"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.2.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62f25bc4c7e55e0b0b7a1d43fb893f4fa1361d0abe38b9ce4f323c2adfe6ef42"
+dependencies = [
+ "bitflags",
+]
+
+[[package]]
+name = "redox_users"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b"
+dependencies = [
+ "getrandom",
+ "redox_syscall",
+ "thiserror",
+]
+
+[[package]]
+name = "regex"
+version = "1.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
+
+[[package]]
+name = "rustc-workspace-hack"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fc71d2faa173b74b232dedc235e3ee1696581bb132fc116fa3626d6151a1a8fb"
+
+[[package]]
+name = "rustfmt-config_proc_macro"
+version = "0.2.0"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "rustfmt-nightly"
+version = "1.5.1"
+dependencies = [
+ "annotate-snippets",
+ "anyhow",
+ "bytecount",
+ "cargo_metadata",
+ "clap",
+ "derive-new",
+ "diff",
+ "dirs",
+ "env_logger",
+ "getopts",
+ "ignore",
+ "itertools",
+ "lazy_static",
+ "log",
+ "regex",
+ "rustc-workspace-hack",
+ "rustfmt-config_proc_macro",
+ "serde",
+ "serde_json",
+ "term",
+ "thiserror",
+ "toml",
+ "unicode-segmentation",
+ "unicode-width",
+ "unicode_categories",
+]
+
+[[package]]
+name = "rustversion"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f"
+
+[[package]]
+name = "ryu"
+version = "1.0.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f"
+
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "semver"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d65bd28f48be7196d222d95b9243287f48d27aca604e08497513019ff0502cc4"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "serde"
+version = "1.0.136"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.136"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.79"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95"
+dependencies = [
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "strsim"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
+
+[[package]]
+name = "syn"
+version = "1.0.91"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b683b2b825c8eef438b77c36a06dc262294da3d5a5813fac20da149241dcd44d"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-xid",
+]
+
+[[package]]
+name = "term"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f"
+dependencies = [
+ "dirs-next",
+ "rustversion",
+ "winapi",
+]
+
+[[package]]
+name = "termcolor"
+version = "1.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "textwrap"
+version = "0.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb"
+
+[[package]]
+name = "thiserror"
+version = "1.0.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417"
+dependencies = [
+ "thiserror-impl",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "1.0.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "thread_local"
+version = "1.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180"
+dependencies = [
+ "once_cell",
+]
+
+[[package]]
+name = "toml"
+version = "0.5.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "unicode-segmentation"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99"
+
+[[package]]
+name = "unicode-width"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973"
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
+
+[[package]]
+name = "unicode_categories"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e"
+
+[[package]]
+name = "version_check"
+version = "0.9.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+
+[[package]]
+name = "walkdir"
+version = "2.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56"
+dependencies = [
+ "same-file",
+ "winapi",
+ "winapi-util",
+]
+
+[[package]]
+name = "wasi"
+version = "0.10.2+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-util"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
+[[package]]
+name = "yansi-term"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fe5c30ade05e61656247b2e334a031dfd0cc466fadef865bdcdea8d537951bf1"
+dependencies = [
+ "winapi",
+]
diff --git a/src/tools/rustfmt/Cargo.toml b/src/tools/rustfmt/Cargo.toml
new file mode 100644
index 000000000..7a4e02d69
--- /dev/null
+++ b/src/tools/rustfmt/Cargo.toml
@@ -0,0 +1,71 @@
+[package]
+
+name = "rustfmt-nightly"
+version = "1.5.1"
+description = "Tool to find and fix Rust formatting issues"
+repository = "https://github.com/rust-lang/rustfmt"
+readme = "README.md"
+license = "Apache-2.0/MIT"
+build = "build.rs"
+categories = ["development-tools"]
+edition = "2021"
+
+[[bin]]
+name = "rustfmt"
+path = "src/bin/main.rs"
+
+[[bin]]
+name = "cargo-fmt"
+path = "src/cargo-fmt/main.rs"
+
+[[bin]]
+name = "rustfmt-format-diff"
+path = "src/format-diff/main.rs"
+
+[[bin]]
+name = "git-rustfmt"
+path = "src/git-rustfmt/main.rs"
+
+[features]
+default = ["cargo-fmt", "rustfmt-format-diff"]
+cargo-fmt = []
+rustfmt-format-diff = []
+generic-simd = ["bytecount/generic-simd"]
+
+[dependencies]
+annotate-snippets = { version = "0.9", features = ["color"] }
+anyhow = "1.0"
+bytecount = "0.6"
+cargo_metadata = "0.14"
+clap = { version = "3.1", features = ["derive"] }
+derive-new = "0.5"
+diff = "0.1"
+dirs = "4.0"
+env_logger = "0.9"
+getopts = "0.2"
+ignore = "0.4"
+itertools = "0.10"
+lazy_static = "1.4"
+log = "0.4"
+regex = "1.5"
+serde = { version = "1.0", features = ["derive"] }
+serde_json = "1.0"
+term = "0.7"
+thiserror = "1.0"
+toml = "0.5"
+unicode-segmentation = "1.9"
+unicode-width = "0.1"
+unicode_categories = "0.1"
+
+rustfmt-config_proc_macro = { version = "0.2", path = "config_proc_macro" }
+
+# A noop dependency that changes in the Rust repository, it's a bit of a hack.
+# See the `src/tools/rustc-workspace-hack/README.md` file in `rust-lang/rust`
+# for more information.
+rustc-workspace-hack = "1.0.0"
+
+# Rustc dependencies are loaded from the sysroot, Cargo doesn't know about them.
+
+[package.metadata.rust-analyzer]
+# This package uses #[feature(rustc_private)]
+rustc_private = true
diff --git a/src/tools/rustfmt/Configurations.md b/src/tools/rustfmt/Configurations.md
new file mode 100644
index 000000000..8b96b9d36
--- /dev/null
+++ b/src/tools/rustfmt/Configurations.md
@@ -0,0 +1,2865 @@
+# Configuring Rustfmt
+
+Rustfmt is designed to be very configurable. You can create a TOML file called `rustfmt.toml` or `.rustfmt.toml`, place it in the project or any other parent directory and it will apply the options in that file. If none of these directories contain such a file, both your home directory and a directory called `rustfmt` in your [global config directory](https://docs.rs/dirs/1.0.4/dirs/fn.config_dir.html) (e.g. `.config/rustfmt/`) are checked as well.
+
+A possible content of `rustfmt.toml` or `.rustfmt.toml` might look like this:
+
+```toml
+indent_style = "Block"
+reorder_imports = false
+```
+
+Each configuration option is either stable or unstable.
+Stable options can always be used, while unstable options are only available on a nightly toolchain and must be opted into.
+To enable unstable options, set `unstable_features = true` in `rustfmt.toml` or pass `--unstable-features` to rustfmt.
+
+# Configuration Options
+
+Below you find a detailed visual guide on all the supported configuration options of rustfmt:
+
+## `array_width`
+
+Maximum width of an array literal before falling back to vertical formatting.
+
+- **Default value**: `60`
+- **Possible values**: any positive integer that is less than or equal to the value specified for [`max_width`](#max_width)
+- **Stable**: Yes
+
+By default this option is set as a percentage of [`max_width`](#max_width) provided by [`use_small_heuristics`](#use_small_heuristics), but a value set directly for `array_width` will take precedence.
+
+See also [`max_width`](#max_width) and [`use_small_heuristics`](#use_small_heuristics)
+
+## `attr_fn_like_width`
+
+Maximum width of the args of a function-like attributes before falling back to vertical formatting.
+
+- **Default value**: `70`
+- **Possible values**: any positive integer that is less than or equal to the value specified for [`max_width`](#max_width)
+- **Stable**: Yes
+
+By default this option is set as a percentage of [`max_width`](#max_width) provided by [`use_small_heuristics`](#use_small_heuristics), but a value set directly for `attr_fn_like_width` will take precedence.
+
+See also [`max_width`](#max_width) and [`use_small_heuristics`](#use_small_heuristics)
+
+## `binop_separator`
+
+Where to put a binary operator when a binary expression goes multiline.
+
+- **Default value**: `"Front"`
+- **Possible values**: `"Front"`, `"Back"`
+- **Stable**: No (tracking issue: [#3368](https://github.com/rust-lang/rustfmt/issues/3368))
+
+#### `"Front"` (default):
+
+```rust
+fn main() {
+ let or = foofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoo
+ || barbarbarbarbarbarbarbarbarbarbarbarbarbarbarbar;
+
+ let sum = 123456789012345678901234567890
+ + 123456789012345678901234567890
+ + 123456789012345678901234567890;
+
+ let range = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ ..bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb;
+}
+```
+
+#### `"Back"`:
+
+```rust
+fn main() {
+ let or = foofoofoofoofoofoofoofoofoofoofoofoofoofoofoofoo ||
+ barbarbarbarbarbarbarbarbarbarbarbarbarbarbarbar;
+
+ let sum = 123456789012345678901234567890 +
+ 123456789012345678901234567890 +
+ 123456789012345678901234567890;
+
+ let range = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa..
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb;
+}
+```
+
+## `blank_lines_lower_bound`
+
+Minimum number of blank lines which must be put between items. If two items have fewer blank lines between
+them, additional blank lines are inserted.
+
+- **Default value**: `0`
+- **Possible values**: *unsigned integer*
+- **Stable**: No (tracking issue: [#3382](https://github.com/rust-lang/rustfmt/issues/3382))
+
+### Example
+Original Code (rustfmt will not change it with the default value of `0`):
+
+```rust
+#![rustfmt::skip]
+
+fn foo() {
+ println!("a");
+}
+fn bar() {
+ println!("b");
+ println!("c");
+}
+```
+
+#### `1`
+```rust
+fn foo() {
+
+ println!("a");
+}
+
+fn bar() {
+
+ println!("b");
+
+ println!("c");
+}
+```
+
+
+## `blank_lines_upper_bound`
+
+Maximum number of blank lines which can be put between items. If more than this number of consecutive empty
+lines are found, they are trimmed down to match this integer.
+
+- **Default value**: `1`
+- **Possible values**: any non-negative integer
+- **Stable**: No (tracking issue: [#3381](https://github.com/rust-lang/rustfmt/issues/3381))
+
+### Example
+Original Code:
+
+```rust
+#![rustfmt::skip]
+
+fn foo() {
+ println!("a");
+}
+
+
+
+fn bar() {
+ println!("b");
+
+
+ println!("c");
+}
+```
+
+#### `1` (default):
+```rust
+fn foo() {
+ println!("a");
+}
+
+fn bar() {
+ println!("b");
+
+ println!("c");
+}
+```
+
+#### `2`:
+```rust
+fn foo() {
+ println!("a");
+}
+
+
+fn bar() {
+ println!("b");
+
+
+ println!("c");
+}
+```
+
+See also: [`blank_lines_lower_bound`](#blank_lines_lower_bound)
+
+## `brace_style`
+
+Brace style for items
+
+- **Default value**: `"SameLineWhere"`
+- **Possible values**: `"AlwaysNextLine"`, `"PreferSameLine"`, `"SameLineWhere"`
+- **Stable**: No (tracking issue: [#3376](https://github.com/rust-lang/rustfmt/issues/3376))
+
+### Functions
+
+#### `"SameLineWhere"` (default):
+
+```rust
+fn lorem() {
+ // body
+}
+
+fn lorem(ipsum: usize) {
+ // body
+}
+
+fn lorem<T>(ipsum: T)
+where
+ T: Add + Sub + Mul + Div,
+{
+ // body
+}
+```
+
+#### `"AlwaysNextLine"`:
+
+```rust
+fn lorem()
+{
+ // body
+}
+
+fn lorem(ipsum: usize)
+{
+ // body
+}
+
+fn lorem<T>(ipsum: T)
+where
+ T: Add + Sub + Mul + Div,
+{
+ // body
+}
+```
+
+#### `"PreferSameLine"`:
+
+```rust
+fn lorem() {
+ // body
+}
+
+fn lorem(ipsum: usize) {
+ // body
+}
+
+fn lorem<T>(ipsum: T)
+where
+ T: Add + Sub + Mul + Div, {
+ // body
+}
+```
+
+### Structs and enums
+
+#### `"SameLineWhere"` (default):
+
+```rust
+struct Lorem {
+ ipsum: bool,
+}
+
+struct Dolor<T>
+where
+ T: Eq,
+{
+ sit: T,
+}
+```
+
+#### `"AlwaysNextLine"`:
+
+```rust
+struct Lorem
+{
+ ipsum: bool,
+}
+
+struct Dolor<T>
+where
+ T: Eq,
+{
+ sit: T,
+}
+```
+
+#### `"PreferSameLine"`:
+
+```rust
+struct Lorem {
+ ipsum: bool,
+}
+
+struct Dolor<T>
+where
+ T: Eq, {
+ sit: T,
+}
+```
+
+## `chain_width`
+
+Maximum width of a chain to fit on one line.
+
+- **Default value**: `60`
+- **Possible values**: any positive integer that is less than or equal to the value specified for [`max_width`](#max_width)
+- **Stable**: Yes
+
+By default this option is set as a percentage of [`max_width`](#max_width) provided by [`use_small_heuristics`](#use_small_heuristics), but a value set directly for `chain_width` will take precedence.
+
+See also [`max_width`](#max_width) and [`use_small_heuristics`](#use_small_heuristics)
+
+## `color`
+
+Whether to use colored output or not.
+
+- **Default value**: `"Auto"`
+- **Possible values**: "Auto", "Always", "Never"
+- **Stable**: No (tracking issue: [#3385](https://github.com/rust-lang/rustfmt/issues/3385))
+
+## `combine_control_expr`
+
+Combine control expressions with function calls.
+
+- **Default value**: `true`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3369](https://github.com/rust-lang/rustfmt/issues/3369))
+
+#### `true` (default):
+
+```rust
+fn example() {
+ // If
+ foo!(if x {
+ foo();
+ } else {
+ bar();
+ });
+
+ // IfLet
+ foo!(if let Some(..) = x {
+ foo();
+ } else {
+ bar();
+ });
+
+ // While
+ foo!(while x {
+ foo();
+ bar();
+ });
+
+ // WhileLet
+ foo!(while let Some(..) = x {
+ foo();
+ bar();
+ });
+
+ // ForLoop
+ foo!(for x in y {
+ foo();
+ bar();
+ });
+
+ // Loop
+ foo!(loop {
+ foo();
+ bar();
+ });
+}
+```
+
+#### `false`:
+
+```rust
+fn example() {
+ // If
+ foo!(
+ if x {
+ foo();
+ } else {
+ bar();
+ }
+ );
+
+ // IfLet
+ foo!(
+ if let Some(..) = x {
+ foo();
+ } else {
+ bar();
+ }
+ );
+
+ // While
+ foo!(
+ while x {
+ foo();
+ bar();
+ }
+ );
+
+ // WhileLet
+ foo!(
+ while let Some(..) = x {
+ foo();
+ bar();
+ }
+ );
+
+ // ForLoop
+ foo!(
+ for x in y {
+ foo();
+ bar();
+ }
+ );
+
+ // Loop
+ foo!(
+ loop {
+ foo();
+ bar();
+ }
+ );
+}
+```
+
+## `comment_width`
+
+Maximum length of comments. No effect unless`wrap_comments = true`.
+
+- **Default value**: `80`
+- **Possible values**: any positive integer
+- **Stable**: No (tracking issue: [#3349](https://github.com/rust-lang/rustfmt/issues/3349))
+
+**Note:** A value of `0` results in [`wrap_comments`](#wrap_comments) being applied regardless of a line's width.
+
+#### `80` (default; comments shorter than `comment_width`):
+```rust
+// Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+```
+
+#### `60` (comments longer than `comment_width`):
+```rust
+// Lorem ipsum dolor sit amet,
+// consectetur adipiscing elit.
+```
+
+See also [`wrap_comments`](#wrap_comments).
+
+## `condense_wildcard_suffixes`
+
+Replace strings of _ wildcards by a single .. in tuple patterns
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3384](https://github.com/rust-lang/rustfmt/issues/3384))
+
+#### `false` (default):
+
+```rust
+fn main() {
+ let (lorem, ipsum, _, _) = (1, 2, 3, 4);
+ let (lorem, ipsum, ..) = (1, 2, 3, 4);
+}
+```
+
+#### `true`:
+
+```rust
+fn main() {
+ let (lorem, ipsum, ..) = (1, 2, 3, 4);
+}
+```
+
+## `control_brace_style`
+
+Brace style for control flow constructs
+
+- **Default value**: `"AlwaysSameLine"`
+- **Possible values**: `"AlwaysNextLine"`, `"AlwaysSameLine"`, `"ClosingNextLine"`
+- **Stable**: No (tracking issue: [#3377](https://github.com/rust-lang/rustfmt/issues/3377))
+
+#### `"AlwaysSameLine"` (default):
+
+```rust
+fn main() {
+ if lorem {
+ println!("ipsum!");
+ } else {
+ println!("dolor!");
+ }
+}
+```
+
+#### `"AlwaysNextLine"`:
+
+```rust
+fn main() {
+ if lorem
+ {
+ println!("ipsum!");
+ }
+ else
+ {
+ println!("dolor!");
+ }
+}
+```
+
+#### `"ClosingNextLine"`:
+
+```rust
+fn main() {
+ if lorem {
+ println!("ipsum!");
+ }
+ else {
+ println!("dolor!");
+ }
+}
+```
+
+## `disable_all_formatting`
+
+Don't reformat anything.
+
+Note that this option may be soft-deprecated in the future once the [ignore](#ignore) option is stabilized. Nightly toolchain users are encouraged to use [ignore](#ignore) instead when possible.
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: Yes
+
+## `edition`
+
+Specifies which edition is used by the parser.
+
+- **Default value**: `"2015"`
+- **Possible values**: `"2015"`, `"2018"`, `"2021"`
+- **Stable**: Yes
+
+Rustfmt is able to pick up the edition used by reading the `Cargo.toml` file if executed
+through the Cargo's formatting tool `cargo fmt`. Otherwise, the edition needs to be specified
+in your config file:
+
+```toml
+edition = "2018"
+```
+
+## `empty_item_single_line`
+
+Put empty-body functions and impls on a single line
+
+- **Default value**: `true`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3356](https://github.com/rust-lang/rustfmt/issues/3356))
+
+#### `true` (default):
+
+```rust
+fn lorem() {}
+
+impl Lorem {}
+```
+
+#### `false`:
+
+```rust
+fn lorem() {
+}
+
+impl Lorem {
+}
+```
+
+See also [`brace_style`](#brace_style), [`control_brace_style`](#control_brace_style).
+
+
+## `enum_discrim_align_threshold`
+
+The maximum length of enum variant having discriminant, that gets vertically aligned with others.
+Variants without discriminants would be ignored for the purpose of alignment.
+
+Note that this is not how much whitespace is inserted, but instead the longest variant name that
+doesn't get ignored when aligning.
+
+- **Default value** : 0
+- **Possible values**: any positive integer
+- **Stable**: No (tracking issue: [#3372](https://github.com/rust-lang/rustfmt/issues/3372))
+
+#### `0` (default):
+
+```rust
+enum Bar {
+ A = 0,
+ Bb = 1,
+ RandomLongVariantGoesHere = 10,
+ Ccc = 71,
+}
+
+enum Bar {
+ VeryLongVariantNameHereA = 0,
+ VeryLongVariantNameHereBb = 1,
+ VeryLongVariantNameHereCcc = 2,
+}
+```
+
+#### `20`:
+
+```rust
+enum Foo {
+ A = 0,
+ Bb = 1,
+ RandomLongVariantGoesHere = 10,
+ Ccc = 2,
+}
+
+enum Bar {
+ VeryLongVariantNameHereA = 0,
+ VeryLongVariantNameHereBb = 1,
+ VeryLongVariantNameHereCcc = 2,
+}
+```
+
+
+## `error_on_line_overflow`
+
+Error if Rustfmt is unable to get all lines within `max_width`, except for comments and string
+literals. If this happens, then it is a bug in Rustfmt. You might be able to work around the bug by
+refactoring your code to avoid long/complex expressions, usually by extracting a local variable or
+using a shorter name.
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3391](https://github.com/rust-lang/rustfmt/issues/3391))
+
+See also [`max_width`](#max_width).
+
+## `error_on_unformatted`
+
+Error if unable to get comments or string literals within `max_width`, or they are left with
+trailing whitespaces.
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3392](https://github.com/rust-lang/rustfmt/issues/3392))
+
+## `fn_args_layout`
+
+Control the layout of arguments in a function
+
+- **Default value**: `"Tall"`
+- **Possible values**: `"Compressed"`, `"Tall"`, `"Vertical"`
+- **Stable**: Yes
+
+#### `"Tall"` (default):
+
+```rust
+trait Lorem {
+ fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet);
+
+ fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet) {
+ // body
+ }
+
+ fn lorem(
+ ipsum: Ipsum,
+ dolor: Dolor,
+ sit: Sit,
+ amet: Amet,
+ consectetur: Consectetur,
+ adipiscing: Adipiscing,
+ elit: Elit,
+ );
+
+ fn lorem(
+ ipsum: Ipsum,
+ dolor: Dolor,
+ sit: Sit,
+ amet: Amet,
+ consectetur: Consectetur,
+ adipiscing: Adipiscing,
+ elit: Elit,
+ ) {
+ // body
+ }
+}
+```
+
+#### `"Compressed"`:
+
+```rust
+trait Lorem {
+ fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet);
+
+ fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet) {
+ // body
+ }
+
+ fn lorem(
+ ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet, consectetur: Consectetur,
+ adipiscing: Adipiscing, elit: Elit,
+ );
+
+ fn lorem(
+ ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet, consectetur: Consectetur,
+ adipiscing: Adipiscing, elit: Elit,
+ ) {
+ // body
+ }
+}
+```
+
+#### `"Vertical"`:
+
+```rust
+trait Lorem {
+ fn lorem(
+ ipsum: Ipsum,
+ dolor: Dolor,
+ sit: Sit,
+ amet: Amet,
+ );
+
+ fn lorem(
+ ipsum: Ipsum,
+ dolor: Dolor,
+ sit: Sit,
+ amet: Amet,
+ ) {
+ // body
+ }
+
+ fn lorem(
+ ipsum: Ipsum,
+ dolor: Dolor,
+ sit: Sit,
+ amet: Amet,
+ consectetur: Consectetur,
+ adipiscing: Adipiscing,
+ elit: Elit,
+ );
+
+ fn lorem(
+ ipsum: Ipsum,
+ dolor: Dolor,
+ sit: Sit,
+ amet: Amet,
+ consectetur: Consectetur,
+ adipiscing: Adipiscing,
+ elit: Elit,
+ ) {
+ // body
+ }
+}
+```
+
+## `fn_call_width`
+
+Maximum width of the args of a function call before falling back to vertical formatting.
+
+- **Default value**: `60`
+- **Possible values**: any positive integer that is less than or equal to the value specified for [`max_width`](#max_width)
+- **Stable**: Yes
+
+By default this option is set as a percentage of [`max_width`](#max_width) provided by [`use_small_heuristics`](#use_small_heuristics), but a value set directly for `fn_call_width` will take precedence.
+
+See also [`max_width`](#max_width) and [`use_small_heuristics`](#use_small_heuristics)
+
+## `fn_single_line`
+
+Put single-expression functions on a single line
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3358](https://github.com/rust-lang/rustfmt/issues/3358))
+
+#### `false` (default):
+
+```rust
+fn lorem() -> usize {
+ 42
+}
+
+fn lorem() -> usize {
+ let ipsum = 42;
+ ipsum
+}
+```
+
+#### `true`:
+
+```rust
+fn lorem() -> usize { 42 }
+
+fn lorem() -> usize {
+ let ipsum = 42;
+ ipsum
+}
+```
+
+See also [`control_brace_style`](#control_brace_style).
+
+
+## `force_explicit_abi`
+
+Always print the abi for extern items
+
+- **Default value**: `true`
+- **Possible values**: `true`, `false`
+- **Stable**: Yes
+
+**Note:** Non-"C" ABIs are always printed. If `false` then "C" is removed.
+
+#### `true` (default):
+
+```rust
+extern "C" {
+ pub static lorem: c_int;
+}
+```
+
+#### `false`:
+
+```rust
+extern {
+ pub static lorem: c_int;
+}
+```
+
+## `force_multiline_blocks`
+
+Force multiline closure and match arm bodies to be wrapped in a block
+
+- **Default value**: `false`
+- **Possible values**: `false`, `true`
+- **Stable**: No (tracking issue: [#3374](https://github.com/rust-lang/rustfmt/issues/3374))
+
+#### `false` (default):
+
+```rust
+fn main() {
+ result.and_then(|maybe_value| match maybe_value {
+ None => foo(),
+ Some(value) => bar(),
+ });
+
+ match lorem {
+ None => |ipsum| {
+ println!("Hello World");
+ },
+ Some(dolor) => foo(),
+ }
+}
+```
+
+#### `true`:
+
+```rust
+fn main() {
+ result.and_then(|maybe_value| {
+ match maybe_value {
+ None => foo(),
+ Some(value) => bar(),
+ }
+ });
+
+ match lorem {
+ None => {
+ |ipsum| {
+ println!("Hello World");
+ }
+ }
+ Some(dolor) => foo(),
+ }
+}
+```
+
+
+## `format_code_in_doc_comments`
+
+Format code snippet included in doc comments.
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3348](https://github.com/rust-lang/rustfmt/issues/3348))
+
+#### `false` (default):
+
+```rust
+/// Adds one to the number given.
+///
+/// # Examples
+///
+/// ```rust
+/// let five=5;
+///
+/// assert_eq!(
+/// 6,
+/// add_one(5)
+/// );
+/// # fn add_one(x: i32) -> i32 {
+/// # x + 1
+/// # }
+/// ```
+fn add_one(x: i32) -> i32 {
+ x + 1
+}
+```
+
+#### `true`
+
+```rust
+/// Adds one to the number given.
+///
+/// # Examples
+///
+/// ```rust
+/// let five = 5;
+///
+/// assert_eq!(6, add_one(5));
+/// # fn add_one(x: i32) -> i32 {
+/// # x + 1
+/// # }
+/// ```
+fn add_one(x: i32) -> i32 {
+ x + 1
+}
+```
+
+## `doc_comment_code_block_width`
+
+Max width for code snippets included in doc comments. Only used if [`format_code_in_doc_comments`](#format_code_in_doc_comments) is true.
+
+- **Default value**: `100`
+- **Possible values**: any positive integer that is less than or equal to the value specified for [`max_width`](#max_width)
+- **Stable**: No (tracking issue: [#5359](https://github.com/rust-lang/rustfmt/issues/5359))
+
+## `format_generated_files`
+
+Format generated files. A file is considered generated
+if any of the first five lines contain a `@generated` comment marker.
+By default, generated files are reformatted, i. e. `@generated` marker is ignored.
+This option is currently ignored for stdin (`@generated` in stdin is ignored.)
+
+- **Default value**: `true`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#5080](https://github.com/rust-lang/rustfmt/issues/5080))
+
+## `format_macro_matchers`
+
+Format the metavariable matching patterns in macros.
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3354](https://github.com/rust-lang/rustfmt/issues/3354))
+
+#### `false` (default):
+
+```rust
+macro_rules! foo {
+ ($a: ident : $b: ty) => {
+ $a(42): $b;
+ };
+ ($a: ident $b: ident $c: ident) => {
+ $a = $b + $c;
+ };
+}
+```
+
+#### `true`:
+
+```rust
+macro_rules! foo {
+ ($a:ident : $b:ty) => {
+ $a(42): $b;
+ };
+ ($a:ident $b:ident $c:ident) => {
+ $a = $b + $c;
+ };
+}
+```
+
+See also [`format_macro_bodies`](#format_macro_bodies).
+
+
+## `format_macro_bodies`
+
+Format the bodies of macros.
+
+- **Default value**: `true`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3355](https://github.com/rust-lang/rustfmt/issues/3355))
+
+#### `true` (default):
+
+```rust
+macro_rules! foo {
+ ($a: ident : $b: ty) => {
+ $a(42): $b;
+ };
+ ($a: ident $b: ident $c: ident) => {
+ $a = $b + $c;
+ };
+}
+```
+
+#### `false`:
+
+```rust
+macro_rules! foo {
+ ($a: ident : $b: ty) => { $a(42): $b; };
+ ($a: ident $b: ident $c: ident) => { $a=$b+$c; };
+}
+```
+
+See also [`format_macro_matchers`](#format_macro_matchers).
+
+
+## `format_strings`
+
+Format string literals where necessary
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3353](https://github.com/rust-lang/rustfmt/issues/3353))
+
+#### `false` (default):
+
+```rust
+fn main() {
+ let lorem = "ipsum dolor sit amet consectetur adipiscing elit lorem ipsum dolor sit amet consectetur adipiscing";
+}
+```
+
+#### `true`:
+
+```rust
+fn main() {
+ let lorem = "ipsum dolor sit amet consectetur adipiscing elit lorem ipsum dolor sit amet \
+ consectetur adipiscing";
+}
+```
+
+See also [`max_width`](#max_width).
+
+## `hard_tabs`
+
+Use tab characters for indentation, spaces for alignment
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: Yes
+
+#### `false` (default):
+
+```rust
+fn lorem() -> usize {
+ 42 // spaces before 42
+}
+```
+
+#### `true`:
+
+```rust
+fn lorem() -> usize {
+ 42 // tabs before 42
+}
+```
+
+See also: [`tab_spaces`](#tab_spaces).
+
+## `hex_literal_case`
+
+Control the case of the letters in hexadecimal literal values
+
+- **Default value**: `Preserve`
+- **Possible values**: `Preserve`, `Upper`, `Lower`
+- **Stable**: No (tracking issue: [#5081](https://github.com/rust-lang/rustfmt/issues/5081))
+
+## `hide_parse_errors`
+
+Do not show parse errors if the parser failed to parse files.
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3390](https://github.com/rust-lang/rustfmt/issues/3390))
+
+## `ignore`
+
+Skip formatting files and directories that match the specified pattern.
+The pattern format is the same as [.gitignore](https://git-scm.com/docs/gitignore#_pattern_format). Be sure to use Unix/forwardslash `/` style paths. This path style will work on all platforms. Windows style paths with backslashes `\` are not supported.
+
+- **Default value**: format every file
+- **Possible values**: See an example below
+- **Stable**: No (tracking issue: [#3395](https://github.com/rust-lang/rustfmt/issues/3395))
+
+### Example
+
+If you want to ignore specific files, put the following to your config file:
+
+```toml
+ignore = [
+ "src/types.rs",
+ "src/foo/bar.rs",
+]
+```
+
+If you want to ignore every file under `examples/`, put the following to your config file:
+
+```toml
+ignore = [
+ "examples",
+]
+```
+
+If you want to ignore every file under the directory where you put your rustfmt.toml:
+
+```toml
+ignore = ["/"]
+```
+
+## `imports_indent`
+
+Indent style of imports
+
+- **Default Value**: `"Block"`
+- **Possible values**: `"Block"`, `"Visual"`
+- **Stable**: No (tracking issue: [#3360](https://github.com/rust-lang/rustfmt/issues/3360))
+
+#### `"Block"` (default):
+
+```rust
+use foo::{
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy,
+ zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
+};
+```
+
+#### `"Visual"`:
+
+```rust
+use foo::{xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy,
+ zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz};
+```
+
+See also: [`imports_layout`](#imports_layout).
+
+## `imports_layout`
+
+Item layout inside a imports block
+
+- **Default value**: "Mixed"
+- **Possible values**: "Horizontal", "HorizontalVertical", "Mixed", "Vertical"
+- **Stable**: No (tracking issue: [#3361](https://github.com/rust-lang/rustfmt/issues/3361))
+
+#### `"Mixed"` (default):
+
+```rust
+use foo::{xxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzz};
+
+use foo::{
+ aaaaaaaaaaaaaaaaaa, bbbbbbbbbbbbbbbbbb, cccccccccccccccccc, dddddddddddddddddd,
+ eeeeeeeeeeeeeeeeee, ffffffffffffffffff,
+};
+```
+
+#### `"Horizontal"`:
+
+**Note**: This option forces all imports onto one line and may exceed `max_width`.
+
+```rust
+use foo::{xxx, yyy, zzz};
+
+use foo::{aaa, bbb, ccc, ddd, eee, fff};
+```
+
+#### `"HorizontalVertical"`:
+
+```rust
+use foo::{xxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzz};
+
+use foo::{
+ aaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbb,
+ cccccccccccccccccc,
+ dddddddddddddddddd,
+ eeeeeeeeeeeeeeeeee,
+ ffffffffffffffffff,
+};
+```
+
+#### `"Vertical"`:
+
+```rust
+use foo::{
+ xxx,
+ yyy,
+ zzz,
+};
+
+use foo::{
+ aaa,
+ bbb,
+ ccc,
+ ddd,
+ eee,
+ fff,
+};
+```
+
+## `indent_style`
+
+Indent on expressions or items.
+
+- **Default value**: `"Block"`
+- **Possible values**: `"Block"`, `"Visual"`
+- **Stable**: No (tracking issue: [#3346](https://github.com/rust-lang/rustfmt/issues/3346))
+
+### Array
+
+#### `"Block"` (default):
+
+```rust
+fn main() {
+ let lorem = vec![
+ "ipsum",
+ "dolor",
+ "sit",
+ "amet",
+ "consectetur",
+ "adipiscing",
+ "elit",
+ ];
+}
+```
+
+#### `"Visual"`:
+
+```rust
+fn main() {
+ let lorem = vec!["ipsum",
+ "dolor",
+ "sit",
+ "amet",
+ "consectetur",
+ "adipiscing",
+ "elit"];
+}
+```
+
+### Control flow
+
+#### `"Block"` (default):
+
+```rust
+fn main() {
+ if lorem_ipsum
+ && dolor_sit
+ && amet_consectetur
+ && lorem_sit
+ && dolor_consectetur
+ && amet_ipsum
+ && lorem_consectetur
+ {
+ // ...
+ }
+}
+```
+
+#### `"Visual"`:
+
+```rust
+fn main() {
+ if lorem_ipsum
+ && dolor_sit
+ && amet_consectetur
+ && lorem_sit
+ && dolor_consectetur
+ && amet_ipsum
+ && lorem_consectetur
+ {
+ // ...
+ }
+}
+```
+
+See also: [`control_brace_style`](#control_brace_style).
+
+### Function arguments
+
+#### `"Block"` (default):
+
+```rust
+fn lorem() {}
+
+fn lorem(ipsum: usize) {}
+
+fn lorem(
+ ipsum: usize,
+ dolor: usize,
+ sit: usize,
+ amet: usize,
+ consectetur: usize,
+ adipiscing: usize,
+ elit: usize,
+) {
+ // body
+}
+```
+
+#### `"Visual"`:
+
+```rust
+fn lorem() {}
+
+fn lorem(ipsum: usize) {}
+
+fn lorem(ipsum: usize,
+ dolor: usize,
+ sit: usize,
+ amet: usize,
+ consectetur: usize,
+ adipiscing: usize,
+ elit: usize) {
+ // body
+}
+```
+
+### Function calls
+
+#### `"Block"` (default):
+
+```rust
+fn main() {
+ lorem(
+ "lorem",
+ "ipsum",
+ "dolor",
+ "sit",
+ "amet",
+ "consectetur",
+ "adipiscing",
+ "elit",
+ );
+}
+```
+
+#### `"Visual"`:
+
+```rust
+fn main() {
+ lorem("lorem",
+ "ipsum",
+ "dolor",
+ "sit",
+ "amet",
+ "consectetur",
+ "adipiscing",
+ "elit");
+}
+```
+
+### Generics
+
+#### `"Block"` (default):
+
+```rust
+fn lorem<
+ Ipsum: Eq = usize,
+ Dolor: Eq = usize,
+ Sit: Eq = usize,
+ Amet: Eq = usize,
+ Adipiscing: Eq = usize,
+ Consectetur: Eq = usize,
+ Elit: Eq = usize,
+>(
+ ipsum: Ipsum,
+ dolor: Dolor,
+ sit: Sit,
+ amet: Amet,
+ adipiscing: Adipiscing,
+ consectetur: Consectetur,
+ elit: Elit,
+) -> T {
+ // body
+}
+```
+
+#### `"Visual"`:
+
+```rust
+fn lorem<Ipsum: Eq = usize,
+ Dolor: Eq = usize,
+ Sit: Eq = usize,
+ Amet: Eq = usize,
+ Adipiscing: Eq = usize,
+ Consectetur: Eq = usize,
+ Elit: Eq = usize>(
+ ipsum: Ipsum,
+ dolor: Dolor,
+ sit: Sit,
+ amet: Amet,
+ adipiscing: Adipiscing,
+ consectetur: Consectetur,
+ elit: Elit)
+ -> T {
+ // body
+}
+```
+
+#### Struct
+
+#### `"Block"` (default):
+
+```rust
+fn main() {
+ let lorem = Lorem {
+ ipsum: dolor,
+ sit: amet,
+ };
+}
+```
+
+#### `"Visual"`:
+
+```rust
+fn main() {
+ let lorem = Lorem { ipsum: dolor,
+ sit: amet };
+}
+```
+
+See also: [`struct_lit_single_line`](#struct_lit_single_line), [`indent_style`](#indent_style).
+
+### Where predicates
+
+#### `"Block"` (default):
+
+```rust
+fn lorem<Ipsum, Dolor, Sit, Amet>() -> T
+where
+ Ipsum: Eq,
+ Dolor: Eq,
+ Sit: Eq,
+ Amet: Eq,
+{
+ // body
+}
+```
+
+#### `"Visual"`:
+
+```rust
+fn lorem<Ipsum, Dolor, Sit, Amet>() -> T
+ where Ipsum: Eq,
+ Dolor: Eq,
+ Sit: Eq,
+ Amet: Eq
+{
+ // body
+}
+```
+
+## `inline_attribute_width`
+
+Write an item and its attribute on the same line if their combined width is below a threshold
+
+- **Default value**: 0
+- **Possible values**: any positive integer
+- **Stable**: No (tracking issue: [#3343](https://github.com/rust-lang/rustfmt/issues/3343))
+
+### Example
+
+#### `0` (default):
+```rust
+#[cfg(feature = "alloc")]
+use core::slice;
+```
+
+#### `50`:
+```rust
+#[cfg(feature = "alloc")] use core::slice;
+```
+
+## `match_arm_blocks`
+
+Controls whether arm bodies are wrapped in cases where the first line of the body cannot fit on the same line as the `=>` operator.
+
+The Style Guide requires that bodies are block wrapped by default if a line break is required after the `=>`, but this option can be used to disable that behavior to prevent wrapping arm bodies in that event, so long as the body does not contain multiple statements nor line comments.
+
+- **Default value**: `true`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3373](https://github.com/rust-lang/rustfmt/issues/3373))
+
+#### `true` (default):
+
+```rust
+fn main() {
+ match lorem {
+ ipsum => {
+ foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo(x)
+ }
+ dolor => println!("{}", sit),
+ sit => foo(
+ "foooooooooooooooooooooooo",
+ "baaaaaaaaaaaaaaaaaaaaaaaarr",
+ "baaaaaaaaaaaaaaaaaaaazzzzzzzzzzzzz",
+ "qqqqqqqqquuuuuuuuuuuuuuuuuuuuuuuuuuxxx",
+ ),
+ }
+}
+```
+
+#### `false`:
+
+```rust
+fn main() {
+ match lorem {
+ lorem =>
+ foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo(x),
+ ipsum => println!("{}", sit),
+ sit => foo(
+ "foooooooooooooooooooooooo",
+ "baaaaaaaaaaaaaaaaaaaaaaaarr",
+ "baaaaaaaaaaaaaaaaaaaazzzzzzzzzzzzz",
+ "qqqqqqqqquuuuuuuuuuuuuuuuuuuuuuuuuuxxx",
+ ),
+ }
+}
+```
+
+See also: [`match_block_trailing_comma`](#match_block_trailing_comma).
+
+## `match_arm_leading_pipes`
+
+Controls whether to include a leading pipe on match arms
+
+- **Default value**: `Never`
+- **Possible values**: `Always`, `Never`, `Preserve`
+- **Stable**: Yes
+
+#### `Never` (default):
+```rust
+// Leading pipes are removed from this:
+// fn foo() {
+// match foo {
+// | "foo" | "bar" => {}
+// | "baz"
+// | "something relatively long"
+// | "something really really really realllllllllllllly long" => println!("x"),
+// | "qux" => println!("y"),
+// _ => {}
+// }
+// }
+
+// Becomes
+fn foo() {
+ match foo {
+ "foo" | "bar" => {}
+ "baz"
+ | "something relatively long"
+ | "something really really really realllllllllllllly long" => println!("x"),
+ "qux" => println!("y"),
+ _ => {}
+ }
+}
+```
+
+#### `Always`:
+```rust
+// Leading pipes are emitted on all arms of this:
+// fn foo() {
+// match foo {
+// "foo" | "bar" => {}
+// "baz"
+// | "something relatively long"
+// | "something really really really realllllllllllllly long" => println!("x"),
+// "qux" => println!("y"),
+// _ => {}
+// }
+// }
+
+// Becomes:
+fn foo() {
+ match foo {
+ | "foo" | "bar" => {}
+ | "baz"
+ | "something relatively long"
+ | "something really really really realllllllllllllly long" => println!("x"),
+ | "qux" => println!("y"),
+ | _ => {}
+ }
+}
+```
+
+#### `Preserve`:
+```rust
+fn foo() {
+ match foo {
+ | "foo" | "bar" => {}
+ | "baz"
+ | "something relatively long"
+ | "something really really really realllllllllllllly long" => println!("x"),
+ | "qux" => println!("y"),
+ _ => {}
+ }
+
+ match baz {
+ "qux" => {}
+ "foo" | "bar" => {}
+ _ => {}
+ }
+}
+```
+
+## `match_block_trailing_comma`
+
+Put a trailing comma after a block based match arm (non-block arms are not affected)
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: Yes
+
+#### `false` (default):
+
+```rust
+fn main() {
+ match lorem {
+ Lorem::Ipsum => {
+ println!("ipsum");
+ }
+ Lorem::Dolor => println!("dolor"),
+ }
+}
+```
+
+#### `true`:
+
+```rust
+fn main() {
+ match lorem {
+ Lorem::Ipsum => {
+ println!("ipsum");
+ },
+ Lorem::Dolor => println!("dolor"),
+ }
+}
+```
+
+See also: [`trailing_comma`](#trailing_comma), [`match_arm_blocks`](#match_arm_blocks).
+
+## `max_width`
+
+Maximum width of each line
+
+- **Default value**: `100`
+- **Possible values**: any positive integer
+- **Stable**: Yes
+
+See also [`error_on_line_overflow`](#error_on_line_overflow).
+
+## `merge_derives`
+
+Merge multiple derives into a single one.
+
+- **Default value**: `true`
+- **Possible values**: `true`, `false`
+- **Stable**: Yes
+
+#### `true` (default):
+
+```rust
+#[derive(Eq, PartialEq, Debug, Copy, Clone)]
+pub enum Foo {}
+```
+
+#### `false`:
+
+```rust
+#[derive(Eq, PartialEq, Debug, Copy, Clone)]
+pub enum Bar {}
+
+#[derive(Eq, PartialEq)]
+#[derive(Debug)]
+#[derive(Copy, Clone)]
+pub enum Foo {}
+```
+
+## `imports_granularity`
+
+How imports should be grouped into `use` statements. Imports will be merged or split to the configured level of granularity.
+
+- **Default value**: `Preserve`
+- **Possible values**: `Preserve`, `Crate`, `Module`, `Item`, `One`
+- **Stable**: No (tracking issue: [#4991](https://github.com/rust-lang/rustfmt/issues/4991))
+
+Note that rustfmt will not modify the granularity of imports containing comments if doing so could potentially lose or misplace said comments.
+
+#### `Preserve` (default):
+
+Do not change the granularity of any imports and preserve the original structure written by the developer.
+
+```rust
+use foo::b;
+use foo::b::{f, g};
+use foo::{a, c, d::e};
+use qux::{h, i};
+```
+
+#### `Crate`:
+
+Merge imports from the same crate into a single `use` statement. Conversely, imports from different crates are split into separate statements.
+
+```rust
+use foo::{
+ a, b,
+ b::{f, g},
+ c,
+ d::e,
+};
+use qux::{h, i};
+```
+
+#### `Module`:
+
+Merge imports from the same module into a single `use` statement. Conversely, imports from different modules are split into separate statements.
+
+```rust
+use foo::b::{f, g};
+use foo::d::e;
+use foo::{a, b, c};
+use qux::{h, i};
+```
+
+#### `Item`:
+
+Flatten imports so that each has its own `use` statement.
+
+```rust
+use foo::a;
+use foo::b;
+use foo::b::f;
+use foo::b::g;
+use foo::c;
+use foo::d::e;
+use qux::h;
+use qux::i;
+```
+
+#### `One`:
+
+Merge all imports into a single `use` statement as long as they have the same visibility.
+
+```rust
+pub use foo::{x, y};
+use {
+ bar::{
+ a,
+ b::{self, f, g},
+ c,
+ d::e,
+ },
+ qux::{h, i},
+};
+```
+
+## `merge_imports`
+
+This option is deprecated. Use `imports_granularity = "Crate"` instead.
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+
+#### `false` (default):
+
+```rust
+use foo::{a, c, d};
+use foo::{b, g};
+use foo::{e, f};
+```
+
+#### `true`:
+
+```rust
+use foo::{a, b, c, d, e, f, g};
+```
+
+
+## `newline_style`
+
+Unix or Windows line endings
+
+- **Default value**: `"Auto"`
+- **Possible values**: `"Auto"`, `"Native"`, `"Unix"`, `"Windows"`
+- **Stable**: Yes
+
+#### `Auto` (default):
+
+The newline style is detected automatically on a per-file basis. Files
+with mixed line endings will be converted to the first detected line
+ending style.
+
+#### `Native`
+
+Line endings will be converted to `\r\n` on Windows and `\n` on all
+other platforms.
+
+#### `Unix`
+
+Line endings will be converted to `\n`.
+
+#### `Windows`
+
+Line endings will be converted to `\r\n`.
+
+## `normalize_comments`
+
+Convert /* */ comments to // comments where possible
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3350](https://github.com/rust-lang/rustfmt/issues/3350))
+
+#### `false` (default):
+
+```rust
+// Lorem ipsum:
+fn dolor() -> usize {}
+
+/* sit amet: */
+fn adipiscing() -> usize {}
+```
+
+#### `true`:
+
+```rust
+// Lorem ipsum:
+fn dolor() -> usize {}
+
+// sit amet:
+fn adipiscing() -> usize {}
+```
+
+## `normalize_doc_attributes`
+
+Convert `#![doc]` and `#[doc]` attributes to `//!` and `///` doc comments.
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3351](https://github.com/rust-lang/rustfmt/issues/3351))
+
+#### `false` (default):
+
+```rust
+#![doc = "Example documentation"]
+
+#[doc = "Example item documentation"]
+pub enum Bar {}
+
+/// Example item documentation
+pub enum Foo {}
+```
+
+#### `true`:
+
+```rust
+//! Example documentation
+
+/// Example item documentation
+pub enum Foo {}
+```
+
+## `overflow_delimited_expr`
+
+When structs, slices, arrays, and block/array-like macros are used as the last
+argument in an expression list, allow them to overflow (like blocks/closures)
+instead of being indented on a new line.
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3370](https://github.com/rust-lang/rustfmt/issues/3370))
+
+#### `false` (default):
+
+```rust
+fn example() {
+ foo(ctx, |param| {
+ action();
+ foo(param)
+ });
+
+ foo(
+ ctx,
+ Bar {
+ x: value,
+ y: value2,
+ },
+ );
+
+ foo(
+ ctx,
+ &[
+ MAROON_TOMATOES,
+ PURPLE_POTATOES,
+ ORGANE_ORANGES,
+ GREEN_PEARS,
+ RED_APPLES,
+ ],
+ );
+
+ foo(
+ ctx,
+ vec![
+ MAROON_TOMATOES,
+ PURPLE_POTATOES,
+ ORGANE_ORANGES,
+ GREEN_PEARS,
+ RED_APPLES,
+ ],
+ );
+}
+```
+
+#### `true`:
+
+```rust
+fn example() {
+ foo(ctx, |param| {
+ action();
+ foo(param)
+ });
+
+ foo(ctx, Bar {
+ x: value,
+ y: value2,
+ });
+
+ foo(ctx, &[
+ MAROON_TOMATOES,
+ PURPLE_POTATOES,
+ ORGANE_ORANGES,
+ GREEN_PEARS,
+ RED_APPLES,
+ ]);
+
+ foo(ctx, vec![
+ MAROON_TOMATOES,
+ PURPLE_POTATOES,
+ ORGANE_ORANGES,
+ GREEN_PEARS,
+ RED_APPLES,
+ ]);
+}
+```
+
+## `remove_nested_parens`
+
+Remove nested parens.
+
+- **Default value**: `true`,
+- **Possible values**: `true`, `false`
+- **Stable**: Yes
+
+
+#### `true` (default):
+```rust
+fn main() {
+ (foo());
+}
+```
+
+#### `false`:
+```rust
+fn main() {
+ (foo());
+
+ ((((foo()))));
+}
+```
+
+
+## `reorder_impl_items`
+
+Reorder impl items. `type` and `const` are put first, then macros and methods.
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3363](https://github.com/rust-lang/rustfmt/issues/3363))
+
+#### `false` (default)
+
+```rust
+struct Dummy;
+
+impl Iterator for Dummy {
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+
+ type Item = i32;
+}
+
+impl Iterator for Dummy {
+ type Item = i32;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+}
+```
+
+#### `true`
+
+```rust
+struct Dummy;
+
+impl Iterator for Dummy {
+ type Item = i32;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+}
+```
+
+## `reorder_imports`
+
+Reorder import and extern crate statements alphabetically in groups (a group is
+separated by a newline).
+
+- **Default value**: `true`
+- **Possible values**: `true`, `false`
+- **Stable**: Yes
+
+#### `true` (default):
+
+```rust
+use dolor;
+use ipsum;
+use lorem;
+use sit;
+```
+
+#### `false`:
+
+```rust
+use lorem;
+use ipsum;
+use dolor;
+use sit;
+```
+
+## `group_imports`
+
+Controls the strategy for how consecutive imports are grouped together.
+
+Controls the strategy for grouping sets of consecutive imports. Imports may contain newlines between imports and still be grouped together as a single set, but other statements between imports will result in different grouping sets.
+
+- **Default value**: `Preserve`
+- **Possible values**: `Preserve`, `StdExternalCrate`, `One`
+- **Stable**: No (tracking issue: [#5083](https://github.com/rust-lang/rustfmt/issues/5083))
+
+Each set of imports (one or more `use` statements, optionally separated by newlines) will be formatted independently. Other statements such as `mod ...` or `extern crate ...` will cause imports to not be grouped together.
+
+#### `Preserve` (default):
+
+Preserve the source file's import groups.
+
+```rust
+use super::update::convert_publish_payload;
+use chrono::Utc;
+
+use alloc::alloc::Layout;
+use juniper::{FieldError, FieldResult};
+use uuid::Uuid;
+
+use std::sync::Arc;
+
+use broker::database::PooledConnection;
+
+use super::schema::{Context, Payload};
+use crate::models::Event;
+use core::f32;
+```
+
+#### `StdExternalCrate`:
+
+Discard existing import groups, and create three groups for:
+1. `std`, `core` and `alloc`,
+2. external crates,
+3. `self`, `super` and `crate` imports.
+
+```rust
+use alloc::alloc::Layout;
+use core::f32;
+use std::sync::Arc;
+
+use broker::database::PooledConnection;
+use chrono::Utc;
+use juniper::{FieldError, FieldResult};
+use uuid::Uuid;
+
+use super::schema::{Context, Payload};
+use super::update::convert_publish_payload;
+use crate::models::Event;
+```
+
+#### `One`:
+
+Discard existing import groups, and create a single group for everything
+
+```rust
+use super::schema::{Context, Payload};
+use super::update::convert_publish_payload;
+use crate::models::Event;
+use alloc::alloc::Layout;
+use broker::database::PooledConnection;
+use chrono::Utc;
+use core::f32;
+use juniper::{FieldError, FieldResult};
+use std::sync::Arc;
+use uuid::Uuid;
+```
+
+## `reorder_modules`
+
+Reorder `mod` declarations alphabetically in group.
+
+- **Default value**: `true`
+- **Possible values**: `true`, `false`
+- **Stable**: Yes
+
+#### `true` (default)
+
+```rust
+mod a;
+mod b;
+
+mod dolor;
+mod ipsum;
+mod lorem;
+mod sit;
+```
+
+#### `false`
+
+```rust
+mod b;
+mod a;
+
+mod lorem;
+mod ipsum;
+mod dolor;
+mod sit;
+```
+
+**Note** `mod` with `#[macro_export]` will not be reordered since that could change the semantics
+of the original source code.
+
+## `required_version`
+
+Require a specific version of rustfmt. If you want to make sure that the
+specific version of rustfmt is used in your CI, use this option.
+
+- **Default value**: `CARGO_PKG_VERSION`
+- **Possible values**: any published version (e.g. `"0.3.8"`)
+- **Stable**: No (tracking issue: [#3386](https://github.com/rust-lang/rustfmt/issues/3386))
+
+## `short_array_element_width_threshold`
+
+The width threshold for an array element to be considered "short".
+
+The layout of an array is dependent on the length of each of its elements.
+If the length of every element in an array is below this threshold (all elements are "short") then the array can be formatted in the mixed/compressed style, but if any one element has a length that exceeds this threshold then the array elements will have to be formatted vertically.
+
+- **Default value**: `10`
+- **Possible values**: any positive integer that is less than or equal to the value specified for [`max_width`](#max_width)
+- **Stable**: Yes
+
+#### `10` (default):
+```rust
+fn main() {
+ pub const FORMAT_TEST: [u64; 5] = [
+ 0x0000000000000000,
+ 0xaaaaaaaaaaaaaaaa,
+ 0xbbbbbbbbbbbbbbbb,
+ 0xcccccccccccccccc,
+ 0xdddddddddddddddd,
+ ];
+}
+```
+#### `20`:
+```rust
+fn main() {
+ pub const FORMAT_TEST: [u64; 5] = [
+ 0x0000000000000000, 0xaaaaaaaaaaaaaaaa, 0xbbbbbbbbbbbbbbbb, 0xcccccccccccccccc,
+ 0xdddddddddddddddd,
+ ];
+}
+```
+See also [`max_width`](#max_width).
+
+## `skip_children`
+
+Don't reformat out of line modules
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3389](https://github.com/rust-lang/rustfmt/issues/3389))
+
+## `single_line_if_else_max_width`
+
+Maximum line length for single line if-else expressions. A value of `0` (zero) results in if-else expressions always being broken into multiple lines. Note this occurs when `use_small_heuristics` is set to `Off`.
+
+- **Default value**: `50`
+- **Possible values**: any positive integer that is less than or equal to the value specified for [`max_width`](#max_width)
+- **Stable**: Yes
+
+By default this option is set as a percentage of [`max_width`](#max_width) provided by [`use_small_heuristics`](#use_small_heuristics), but a value set directly for `single_line_if_else_max_width` will take precedence.
+
+See also [`max_width`](#max_width) and [`use_small_heuristics`](#use_small_heuristics)
+
+## `space_after_colon`
+
+Leave a space after the colon.
+
+- **Default value**: `true`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3366](https://github.com/rust-lang/rustfmt/issues/3366))
+
+#### `true` (default):
+
+```rust
+fn lorem<T: Eq>(t: T) {
+ let lorem: Dolor = Lorem {
+ ipsum: dolor,
+ sit: amet,
+ };
+}
+```
+
+#### `false`:
+
+```rust
+fn lorem<T:Eq>(t:T) {
+ let lorem:Dolor = Lorem {
+ ipsum:dolor,
+ sit:amet,
+ };
+}
+```
+
+See also: [`space_before_colon`](#space_before_colon).
+
+## `space_before_colon`
+
+Leave a space before the colon.
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3365](https://github.com/rust-lang/rustfmt/issues/3365))
+
+#### `false` (default):
+
+```rust
+fn lorem<T: Eq>(t: T) {
+ let lorem: Dolor = Lorem {
+ ipsum: dolor,
+ sit: amet,
+ };
+}
+```
+
+#### `true`:
+
+```rust
+fn lorem<T : Eq>(t : T) {
+ let lorem : Dolor = Lorem {
+ ipsum : dolor,
+ sit : amet,
+ };
+}
+```
+
+See also: [`space_after_colon`](#space_after_colon).
+
+## `spaces_around_ranges`
+
+Put spaces around the .., ..=, and ... range operators
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3367](https://github.com/rust-lang/rustfmt/issues/3367))
+
+#### `false` (default):
+
+```rust
+fn main() {
+ let lorem = 0..10;
+ let ipsum = 0..=10;
+
+ match lorem {
+ 1..5 => foo(),
+ _ => bar,
+ }
+
+ match lorem {
+ 1..=5 => foo(),
+ _ => bar,
+ }
+
+ match lorem {
+ 1...5 => foo(),
+ _ => bar,
+ }
+}
+```
+
+#### `true`:
+
+```rust
+fn main() {
+ let lorem = 0 .. 10;
+ let ipsum = 0 ..= 10;
+
+ match lorem {
+ 1 .. 5 => foo(),
+ _ => bar,
+ }
+
+ match lorem {
+ 1 ..= 5 => foo(),
+ _ => bar,
+ }
+
+ match lorem {
+ 1 ... 5 => foo(),
+ _ => bar,
+ }
+}
+```
+
+## `struct_field_align_threshold`
+
+The maximum diff of width between struct fields to be aligned with each other.
+
+- **Default value** : 0
+- **Possible values**: any non-negative integer
+- **Stable**: No (tracking issue: [#3371](https://github.com/rust-lang/rustfmt/issues/3371))
+
+#### `0` (default):
+
+```rust
+struct Foo {
+ x: u32,
+ yy: u32,
+ zzz: u32,
+}
+```
+
+#### `20`:
+
+```rust
+struct Foo {
+ x: u32,
+ yy: u32,
+ zzz: u32,
+}
+```
+
+## `struct_lit_single_line`
+
+Put small struct literals on a single line
+
+- **Default value**: `true`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3357](https://github.com/rust-lang/rustfmt/issues/3357))
+
+#### `true` (default):
+
+```rust
+fn main() {
+ let lorem = Lorem { foo: bar, baz: ofo };
+}
+```
+
+#### `false`:
+
+```rust
+fn main() {
+ let lorem = Lorem {
+ foo: bar,
+ baz: ofo,
+ };
+}
+```
+
+See also: [`indent_style`](#indent_style).
+
+## `struct_lit_width`
+
+Maximum width in the body of a struct literal before falling back to vertical formatting. A value of `0` (zero) results in struct literals always being broken into multiple lines. Note this occurs when `use_small_heuristics` is set to `Off`.
+
+- **Default value**: `18`
+- **Possible values**: any positive integer that is less than or equal to the value specified for [`max_width`](#max_width)
+- **Stable**: Yes
+
+By default this option is set as a percentage of [`max_width`](#max_width) provided by [`use_small_heuristics`](#use_small_heuristics), but a value set directly for `struct_lit_width` will take precedence.
+
+See also [`max_width`](#max_width), [`use_small_heuristics`](#use_small_heuristics), and [`struct_lit_single_line`](#struct_lit_single_line)
+
+## `struct_variant_width`
+
+Maximum width in the body of a struct variant before falling back to vertical formatting. A value of `0` (zero) results in struct literals always being broken into multiple lines. Note this occurs when `use_small_heuristics` is set to `Off`.
+
+- **Default value**: `35`
+- **Possible values**: any positive integer that is less than or equal to the value specified for [`max_width`](#max_width)
+- **Stable**: Yes
+
+By default this option is set as a percentage of [`max_width`](#max_width) provided by [`use_small_heuristics`](#use_small_heuristics), but a value set directly for `struct_variant_width` will take precedence.
+
+See also [`max_width`](#max_width) and [`use_small_heuristics`](#use_small_heuristics)
+
+## `tab_spaces`
+
+Number of spaces per tab
+
+- **Default value**: `4`
+- **Possible values**: any positive integer
+- **Stable**: Yes
+
+#### `4` (default):
+
+```rust
+fn lorem() {
+ let ipsum = dolor();
+ let sit = vec![
+ "amet consectetur adipiscing elit amet",
+ "consectetur adipiscing elit amet consectetur.",
+ ];
+}
+```
+
+#### `2`:
+
+```rust
+fn lorem() {
+ let ipsum = dolor();
+ let sit = vec![
+ "amet consectetur adipiscing elit amet",
+ "consectetur adipiscing elit amet consectetur.",
+ ];
+}
+```
+
+See also: [`hard_tabs`](#hard_tabs).
+
+
+## `trailing_comma`
+
+How to handle trailing commas for lists
+
+- **Default value**: `"Vertical"`
+- **Possible values**: `"Always"`, `"Never"`, `"Vertical"`
+- **Stable**: No (tracking issue: [#3379](https://github.com/rust-lang/rustfmt/issues/3379))
+
+#### `"Vertical"` (default):
+
+```rust
+fn main() {
+ let Lorem { ipsum, dolor, sit } = amet;
+ let Lorem {
+ ipsum,
+ dolor,
+ sit,
+ amet,
+ consectetur,
+ adipiscing,
+ } = elit;
+}
+```
+
+#### `"Always"`:
+
+```rust
+fn main() {
+ let Lorem { ipsum, dolor, sit, } = amet;
+ let Lorem {
+ ipsum,
+ dolor,
+ sit,
+ amet,
+ consectetur,
+ adipiscing,
+ } = elit;
+}
+```
+
+#### `"Never"`:
+
+```rust
+fn main() {
+ let Lorem { ipsum, dolor, sit } = amet;
+ let Lorem {
+ ipsum,
+ dolor,
+ sit,
+ amet,
+ consectetur,
+ adipiscing
+ } = elit;
+}
+```
+
+See also: [`match_block_trailing_comma`](#match_block_trailing_comma).
+
+## `trailing_semicolon`
+
+Add trailing semicolon after break, continue and return
+
+- **Default value**: `true`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3378](https://github.com/rust-lang/rustfmt/issues/3378))
+
+#### `true` (default):
+```rust
+fn foo() -> usize {
+ return 0;
+}
+```
+
+#### `false`:
+```rust
+fn foo() -> usize {
+ return 0
+}
+```
+
+## `type_punctuation_density`
+
+Determines if `+` or `=` are wrapped in spaces in the punctuation of types
+
+- **Default value**: `"Wide"`
+- **Possible values**: `"Compressed"`, `"Wide"`
+- **Stable**: No (tracking issue: [#3364](https://github.com/rust-lang/rustfmt/issues/3364))
+
+#### `"Wide"` (default):
+
+```rust
+fn lorem<Ipsum: Dolor + Sit = Amet>() {
+ // body
+}
+```
+
+#### `"Compressed"`:
+
+```rust
+fn lorem<Ipsum: Dolor+Sit=Amet>() {
+ // body
+}
+```
+
+## `unstable_features`
+
+Enable unstable features on the unstable channel.
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3387](https://github.com/rust-lang/rustfmt/issues/3387))
+
+## `use_field_init_shorthand`
+
+Use field initialize shorthand if possible.
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: Yes
+
+#### `false` (default):
+
+```rust
+struct Foo {
+ x: u32,
+ y: u32,
+ z: u32,
+}
+
+fn main() {
+ let x = 1;
+ let y = 2;
+ let z = 3;
+ let a = Foo { x, y, z };
+ let b = Foo { x: x, y: y, z: z };
+}
+```
+
+#### `true`:
+
+```rust
+struct Foo {
+ x: u32,
+ y: u32,
+ z: u32,
+}
+
+fn main() {
+ let x = 1;
+ let y = 2;
+ let z = 3;
+ let a = Foo { x, y, z };
+}
+```
+
+## `use_small_heuristics`
+
+This option can be used to simplify the management and bulk updates of the granular width configuration settings ([`fn_call_width`](#fn_call_width), [`attr_fn_like_width`](#attr_fn_like_width), [`struct_lit_width`](#struct_lit_width), [`struct_variant_width`](#struct_variant_width), [`array_width`](#array_width), [`chain_width`](#chain_width), [`single_line_if_else_max_width`](#single_line_if_else_max_width)), that respectively control when formatted constructs are multi-lined/vertical based on width.
+
+Note that explicitly provided values for the width configuration settings take precedence and override the calculated values determined by `use_small_heuristics`.
+
+- **Default value**: `"Default"`
+- **Possible values**: `"Default"`, `"Off"`, `"Max"`
+- **Stable**: Yes
+
+#### `Default` (default):
+When `use_small_heuristics` is set to `Default`, the values for the granular width settings are calculated as a ratio of the value for `max_width`.
+
+The ratios are:
+* [`fn_call_width`](#fn_call_width) - `60%`
+* [`attr_fn_like_width`](#attr_fn_like_width) - `70%`
+* [`struct_lit_width`](#struct_lit_width) - `18%`
+* [`struct_variant_width`](#struct_variant_width) - `35%`
+* [`array_width`](#array_width) - `60%`
+* [`chain_width`](#chain_width) - `60%`
+* [`single_line_if_else_max_width`](#single_line_if_else_max_width) - `50%`
+
+For example when `max_width` is set to `100`, the width settings are:
+* `fn_call_width=60`
+* `attr_fn_like_width=70`
+* `struct_lit_width=18`
+* `struct_variant_width=35`
+* `array_width=60`
+* `chain_width=60`
+* `single_line_if_else_max_width=50`
+
+and when `max_width` is set to `200`:
+* `fn_call_width=120`
+* `attr_fn_like_width=140`
+* `struct_lit_width=36`
+* `struct_variant_width=70`
+* `array_width=120`
+* `chain_width=120`
+* `single_line_if_else_max_width=100`
+
+```rust
+enum Lorem {
+ Ipsum,
+ Dolor(bool),
+ Sit { amet: Consectetur, adipiscing: Elit },
+}
+
+fn main() {
+ lorem(
+ "lorem",
+ "ipsum",
+ "dolor",
+ "sit",
+ "amet",
+ "consectetur",
+ "adipiscing",
+ );
+
+ let lorem = Lorem {
+ ipsum: dolor,
+ sit: amet,
+ };
+ let lorem = Lorem { ipsum: dolor };
+
+ let lorem = if ipsum { dolor } else { sit };
+}
+```
+
+#### `Off`:
+When `use_small_heuristics` is set to `Off`, the granular width settings are functionally disabled and ignored. See the documentation for the respective width config options for specifics.
+
+```rust
+enum Lorem {
+ Ipsum,
+ Dolor(bool),
+ Sit {
+ amet: Consectetur,
+ adipiscing: Elit,
+ },
+}
+
+fn main() {
+ lorem("lorem", "ipsum", "dolor", "sit", "amet", "consectetur", "adipiscing");
+
+ let lorem = Lorem {
+ ipsum: dolor,
+ sit: amet,
+ };
+
+ let lorem = if ipsum {
+ dolor
+ } else {
+ sit
+ };
+}
+```
+
+#### `Max`:
+When `use_small_heuristics` is set to `Max`, then each granular width setting is set to the same value as `max_width`.
+
+So if `max_width` is set to `200`, then all the width settings are also set to `200`.
+* `fn_call_width=200`
+* `attr_fn_like_width=200`
+* `struct_lit_width=200`
+* `struct_variant_width=200`
+* `array_width=200`
+* `chain_width=200`
+* `single_line_if_else_max_width=200`
+
+```rust
+enum Lorem {
+ Ipsum,
+ Dolor(bool),
+ Sit { amet: Consectetur, adipiscing: Elit },
+}
+
+fn main() {
+ lorem("lorem", "ipsum", "dolor", "sit", "amet", "consectetur", "adipiscing");
+
+ let lorem = Lorem { ipsum: dolor, sit: amet };
+
+ let lorem = if ipsum { dolor } else { sit };
+}
+```
+
+
+See also:
+* [`max_width`](#max_width)
+* [`fn_call_width`](#fn_call_width)
+* [`attr_fn_like_width`](#attr_fn_like_width)
+* [`struct_lit_width`](#struct_lit_width)
+* [`struct_variant_width`](#struct_variant_width)
+* [`array_width`](#array_width)
+* [`chain_width`](#chain_width)
+* [`single_line_if_else_max_width`](#single_line_if_else_max_width)
+
+## `use_try_shorthand`
+
+Replace uses of the try! macro by the ? shorthand
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: Yes
+
+#### `false` (default):
+
+```rust
+fn main() {
+ let lorem = ipsum.map(|dolor| dolor.sit())?;
+
+ let lorem = try!(ipsum.map(|dolor| dolor.sit()));
+}
+```
+
+#### `true`:
+
+```rust
+fn main() {
+ let lorem = ipsum.map(|dolor| dolor.sit())?;
+}
+```
+
+## `version`
+
+Which version of the formatting rules to use. `Version::One` is backwards-compatible
+with Rustfmt 1.0. Other versions are only backwards compatible within a major
+version number.
+
+- **Default value**: `One`
+- **Possible values**: `One`, `Two`
+- **Stable**: No (tracking issue: [#3383](https://github.com/rust-lang/rustfmt/issues/3383))
+
+### Example
+
+```toml
+version = "Two"
+```
+
+## `where_single_line`
+
+Forces the `where` clause to be laid out on a single line.
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3359](https://github.com/rust-lang/rustfmt/issues/3359))
+
+#### `false` (default):
+
+```rust
+impl<T> Lorem for T
+where
+ Option<T>: Ipsum,
+{
+ // body
+}
+```
+
+#### `true`:
+
+```rust
+impl<T> Lorem for T
+where Option<T>: Ipsum
+{
+ // body
+}
+```
+
+See also [`brace_style`](#brace_style), [`control_brace_style`](#control_brace_style).
+
+
+## `wrap_comments`
+
+Break comments to fit on the line
+
+- **Default value**: `false`
+- **Possible values**: `true`, `false`
+- **Stable**: No (tracking issue: [#3347](https://github.com/rust-lang/rustfmt/issues/3347))
+
+#### `false` (default):
+
+```rust
+// Lorem ipsum dolor sit amet, consectetur adipiscing elit,
+// sed do eiusmod tempor incididunt ut labore et dolore
+// magna aliqua. Ut enim ad minim veniam, quis nostrud
+// exercitation ullamco laboris nisi ut aliquip ex ea
+// commodo consequat.
+
+// Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.
+```
+
+#### `true`:
+
+```rust
+// Lorem ipsum dolor sit amet, consectetur adipiscing elit,
+// sed do eiusmod tempor incididunt ut labore et dolore
+// magna aliqua. Ut enim ad minim veniam, quis nostrud
+// exercitation ullamco laboris nisi ut aliquip ex ea
+// commodo consequat.
+```
+
+# Internal Options
+
+## `emit_mode`
+
+Internal option
+
+## `make_backup`
+
+Internal option, use `--backup`
+
+## `print_misformatted_file_names`
+
+Internal option, use `-l` or `--files-with-diff`
diff --git a/src/tools/rustfmt/Contributing.md b/src/tools/rustfmt/Contributing.md
new file mode 100644
index 000000000..307399601
--- /dev/null
+++ b/src/tools/rustfmt/Contributing.md
@@ -0,0 +1,251 @@
+# Contributing
+
+There are many ways to contribute to Rustfmt. This document lays out what they
+are and has information on how to get started. If you have any questions about
+contributing or need help with anything, please ask in the WG-Rustfmt channel
+on [Discord](https://discordapp.com/invite/rust-lang). Feel free to also ask questions
+on issues, or file new issues specifically to get help.
+
+All contributors are expected to follow our [Code of
+Conduct](CODE_OF_CONDUCT.md).
+
+## Test and file issues
+
+It would be really useful to have people use rustfmt on their projects and file
+issues where it does something you don't expect.
+
+
+## Create test cases
+
+Having a strong test suite for a tool like this is essential. It is very easy
+to create regressions. Any tests you can add are very much appreciated.
+
+The tests can be run with `cargo test`. This does a number of things:
+* runs the unit tests for a number of internal functions;
+* makes sure that rustfmt run on every file in `./tests/source/` is equal to its
+ associated file in `./tests/target/`;
+* runs idempotence tests on the files in `./tests/target/`. These files should
+ not be changed by rustfmt;
+* checks that rustfmt's code is not changed by running on itself. This ensures
+ that the project bootstraps.
+
+Creating a test is as easy as creating a new file in `./tests/source/` and an
+equally named one in `./tests/target/`. If it is only required that rustfmt
+leaves a piece of code unformatted, it may suffice to only create a target file.
+
+Whenever there's a discrepancy between the expected output when running tests, a
+colourised diff will be printed so that the offending line(s) can quickly be
+identified.
+
+Without explicit settings, the tests will be run using rustfmt's default
+configuration. It is possible to run a test using non-default settings in several
+ways. Firstly, you can include configuration parameters in comments at the top
+of the file. For example: to use 3 spaces per tab, start your test with
+`// rustfmt-tab_spaces: 3`. Just remember that the comment is part of the input,
+so include in both the source and target files! It is also possible to
+explicitly specify the name of the expected output file in the target directory.
+Use `// rustfmt-target: filename.rs` for this. You can also specify a custom
+configuration by using the `rustfmt-config` directive. Rustfmt will then use
+that toml file located in `./tests/config/` for its configuration. Including
+`// rustfmt-config: small_tabs.toml` will run your test with the configuration
+file found at `./tests/config/small_tabs.toml`. The final option is used when the
+test source file contains no configuration parameter comments. In this case, the
+test harness looks for a configuration file with the same filename as the test
+file in the `./tests/config/` directory, so a test source file named `test-indent.rs`
+would need a configuration file named `test-indent.toml` in that directory. As an
+example, the `issue-1111.rs` test file is configured by the file
+`./tests/config/issue-1111.toml`.
+
+## Debugging
+
+Some `rewrite_*` methods use the `debug!` macro for printing useful information.
+These messages can be printed by using the environment variable `RUSTFMT_LOG=rustfmt=DEBUG`.
+These traces can be helpful in understanding which part of the code was used
+and get a better grasp on the execution flow.
+
+## Hack!
+
+Here are some [good starting issues](https://github.com/rust-lang/rustfmt/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22).
+
+If you've found areas which need polish and don't have issues, please submit a
+PR, don't feel there needs to be an issue.
+
+
+### Guidelines
+
+Rustfmt bootstraps, that is part of its test suite is running itself on its
+source code. So, basically, the only style guideline is that you must pass the
+tests. That ensures that the Rustfmt source code adheres to our own conventions.
+
+Talking of tests, if you add a new feature or fix a bug, please also add a test.
+It's really easy, see above for details. Please run `cargo test` before
+submitting a PR to ensure your patch passes all tests, it's pretty quick.
+
+Rustfmt is post-1.0 and within major version releases we strive for backwards
+compatibility (at least when using the default options). That means any code
+which changes Rustfmt's output must be guarded by either an option or a version
+check. The latter is implemented as an option called `option`. See the section on
+[configuration](#Configuration) below.
+
+Please try to avoid leaving `TODO`s in the code. There are a few around, but I
+wish there weren't. You can leave `FIXME`s, preferably with an issue number.
+
+
+### Version-gate formatting changes
+
+A change that introduces a different code-formatting should be gated on the
+`version` configuration. This is to ensure the formatting of the current major
+release is preserved, while allowing fixes to be implemented for the next
+release.
+
+This is done by conditionally guarding the change like so:
+
+```rust
+if config.version() == Version::One { // if the current major release is 1.x
+ // current formatting
+} else {
+ // new formatting
+}
+```
+
+This allows the user to apply the next formatting explicitly via the
+configuration, while being stable by default.
+
+When the next major release is done, the code block of the previous formatting
+can be deleted, e.g., the first block in the example above when going from `1.x`
+to `2.x`.
+
+| Note: Only formatting changes with default options need to be gated. |
+| --- |
+
+### A quick tour of Rustfmt
+
+Rustfmt is basically a pretty printer - that is, its mode of operation is to
+take an AST (abstract syntax tree) and print it in a nice way (including staying
+under the maximum permitted width for a line). In order to get that AST, we
+first have to parse the source text, we use the Rust compiler's parser to do
+that (see [src/lib.rs](src/lib.rs)). We shy away from doing anything too fancy, such as
+algebraic approaches to pretty printing, instead relying on an heuristic
+approach, 'manually' crafting a string for each AST node. This results in quite
+a lot of code, but it is relatively simple.
+
+The AST is a tree view of source code. It carries all the semantic information
+about the code, but not all of the syntax. In particular, we lose white space
+and comments (although doc comments are preserved). Rustfmt uses a view of the
+AST before macros are expanded, so there are still macro uses in the code. The
+arguments to macros are not an AST, but raw tokens - this makes them harder to
+format.
+
+There are different nodes for every kind of item and expression in Rust. For
+more details see the source code in the compiler -
+[ast.rs](https://github.com/rust-lang/rust/blob/master/compiler/rustc_ast/src/ast.rs) - and/or the
+[docs](https://doc.rust-lang.org/nightly/nightly-rustc/rustc_ast/ast/index.html).
+
+Many nodes in the AST (but not all, annoyingly) have a `Span`. A `Span` is a
+range in the source code, it can easily be converted to a snippet of source
+text. When the AST does not contain enough information for us, we rely heavily
+on `Span`s. For example, we can look between spans to try and find comments, or
+parse a snippet to see how the user wrote their source code.
+
+The downside of using the AST is that we miss some information - primarily white
+space and comments. White space is sometimes significant, although mostly we
+want to ignore it and make our own. We strive to reproduce all comments, but
+this is sometimes difficult. The crufty corners of Rustfmt are where we hack
+around the absence of comments in the AST and try to recreate them as best we
+can.
+
+Our primary tool here is to look between spans for text we've missed. For
+example, in a function call `foo(a, b)`, we have spans for `a` and `b`, in this
+case, there is only a comma and a single space between the end of `a` and the
+start of `b`, so there is nothing much to do. But if we look at
+`foo(a /* a comment */, b)`, then between `a` and `b` we find the comment.
+
+At a higher level, Rustfmt has machinery so that we account for text between
+'top level' items. Then we can reproduce that text pretty much verbatim. We only
+count spans we actually reformat, so if we can't format a span it is not missed
+completely but is reproduced in the output without being formatted. This is
+mostly handled in [src/missed_spans.rs](src/missed_spans.rs). See also `FmtVisitor::last_pos` in
+[src/visitor.rs](src/visitor.rs).
+
+
+#### Some important elements
+
+At the highest level, Rustfmt uses a `Visitor` implementation called `FmtVisitor`
+to walk the AST. This is in [src/visitor.rs](src/visitor.rs). This is really just used to walk
+items, rather than the bodies of functions. We also cover macros and attributes
+here. Most methods of the visitor call out to `Rewrite` implementations that
+then walk their own children.
+
+The `Rewrite` trait is defined in [src/rewrite.rs](src/rewrite.rs). It is implemented for many
+things that can be rewritten, mostly AST nodes. It has a single function,
+`rewrite`, which is called to rewrite `self` into an `Option<String>`. The
+arguments are `width` which is the horizontal space we write into and `offset`
+which is how much we are currently indented from the lhs of the page. We also
+take a context which contains information used for parsing, the current block
+indent, and a configuration (see below).
+
+##### Rewrite and Indent
+
+To understand the indents, consider
+
+```
+impl Foo {
+ fn foo(...) {
+ bar(argument_one,
+ baz());
+ }
+}
+```
+
+When formatting the `bar` call we will format the arguments in order, after the
+first one we know we are working on multiple lines (imagine it is longer than
+written). So, when we come to the second argument, the indent we pass to
+`rewrite` is 12, which puts us under the first argument. The current block
+indent (stored in the context) is 8. The former is used for visual indenting
+(when objects are vertically aligned with some marker), the latter is used for
+block indenting (when objects are tabbed in from the lhs). The width available
+for `baz()` will be the maximum width, minus the space used for indenting, minus
+the space used for the `);`. (Note that actual argument formatting does not
+quite work like this, but it's close enough).
+
+The `rewrite` function returns an `Option` - either we successfully rewrite and
+return the rewritten string for the caller to use, or we fail to rewrite and
+return `None`. This could be because Rustfmt encounters something it doesn't
+know how to reformat, but more often it is because Rustfmt can't fit the item
+into the required width. How to handle this is up to the caller. Often the
+caller just gives up, ultimately relying on the missed spans system to paste in
+the un-formatted source. A better solution (although not performed in many
+places) is for the caller to shuffle around some of its other items to make
+more width, then call the function again with more space.
+
+Since it is common for callers to bail out when a callee fails, we often use a
+`?` operator to make this pattern more succinct.
+
+One way we might find out that we don't have enough space is when computing how much
+space we have. Something like `available_space = budget - overhead`. Since
+widths are unsized integers, this would cause underflow. Therefore we use
+checked subtraction: `available_space = budget.checked_sub(overhead)?`.
+`checked_sub` returns an `Option`, and if we would underflow `?` returns
+`None`, otherwise, we proceed with the computed space.
+
+##### Rewrite of list-like expressions
+
+Much of the syntax in Rust is lists: lists of arguments, lists of fields, lists of
+array elements, etc. We have some generic code to handle lists, including how to
+space them in horizontal and vertical space, indentation, comments between
+items, trailing separators, etc. However, since there are so many options, the
+code is a bit complex. Look in [src/lists.rs](src/lists.rs). `write_list` is the key function,
+and `ListFormatting` the key structure for configuration. You'll need to make a
+`ListItems` for input, this is usually done using `itemize_list`.
+
+##### Configuration
+
+Rustfmt strives to be highly configurable. Often the first part of a patch is
+creating a configuration option for the feature you are implementing. All
+handling of configuration options is done in [src/config/mod.rs](src/config/mod.rs). Look for the
+`create_config!` macro at the end of the file for all the options. The rest of
+the file defines a bunch of enums used for options, and the machinery to produce
+the config struct and parse a config file, etc. Checking an option is done by
+accessing the correct field on the config struct, e.g., `config.max_width()`. Most
+functions have a `Config`, or one can be accessed via a visitor or context of
+some kind.
diff --git a/src/tools/rustfmt/Design.md b/src/tools/rustfmt/Design.md
new file mode 100644
index 000000000..7a4dcf877
--- /dev/null
+++ b/src/tools/rustfmt/Design.md
@@ -0,0 +1,184 @@
+# Some thoughts on the design of rustfmt
+
+## Use cases
+
+A formatting tool can be used in different ways and the different use cases can
+affect the design of the tool. The use cases I'm particularly concerned with are:
+
+* running on a whole repo before check-in
+ - in particular, to replace the `make tidy` pass on the Rust distro
+* running on code from another project that you are adding to your own
+* using for mass changes in code style over a project
+
+Some valid use cases for a formatting tool which I am explicitly not trying to
+address (although it would be nice, if possible):
+
+* running 'as you type' in an IDE
+* running on arbitrary snippets of code
+* running on Rust-like code, specifically code which doesn't parse
+* use as a pretty printer inside the compiler
+* refactoring
+* formatting totally unformatted source code
+
+
+## Scope and vision
+
+I do not subscribe to the notion that a formatting tool should only change
+whitespace. I believe that we should semantics preserving, but not necessarily
+syntax preserving, i.e., we can change the AST of a program.
+
+I.e., we might change glob imports to list or single imports, re-order imports,
+move bounds to where clauses, combine multiple impls into a single impl, etc.
+
+However, we will not change the names of variables or make any changes which
+*could* change the semantics. To be ever so slightly formal, we might imagine
+a compilers high level intermediate representation, we should strive to only
+make changes which do not change the HIR, even if they do change the AST.
+
+I would like to be able to output refactoring scripts for making deeper changes
+though. (E.g., renaming variables to satisfy our style guidelines).
+
+My long term goal is that all style lints can be moved from the compiler to
+rustfmt and, as well as warning, can either fix problems or emit refactoring
+scripts to do so.
+
+### Configurability
+
+I believe reformatting should be configurable to some extent. We should read in
+options from a configuration file and reformat accordingly. We should supply at
+least a config file which matches the Rust style guidelines.
+
+There should be multiple modes for running the tool. As well as simply replacing
+each file, we should be able to show the user a list of the changes we would
+make, or show a list of violations without corrections (the difference being
+that there are multiple ways to satisfy a given set of style guidelines, and we
+should distinguish violations from deviations from our own model).
+
+
+## Implementation philosophy
+
+Some details of the philosophy behind the implementation.
+
+
+### Operate on the AST
+
+A reformatting tool can be based on either the AST or a token stream (in Rust
+this is actually a stream of token trees, but it's not a fundamental difference).
+There are pros and cons to the two approaches. I have chosen to use the AST
+approach. The primary reasons are that it allows us to do more sophisticated
+manipulations, rather than just change whitespace, and it gives us more context
+when making those changes.
+
+The advantage of the tokens approach is that you can operate on non-parsable
+code. I don't care too much about that, it would be nice, but I think being able
+to perform sophisticated transformations is more important. In the future, I hope to
+(optionally) be able to use type information for informing reformatting too. One
+specific case of unparsable code is macros. Using tokens is certainly easier
+here, but I believe it is perfectly solvable with the AST approach. At the limit,
+we can operate on just tokens in the macro case.
+
+I believe that there is not in fact that much difference between the two
+approaches. Due to imperfect span information, under the AST approach, we
+sometimes are reduced to examining tokens or do some re-lexing of our own. Under
+the tokens approach, you need to implement your own (much simpler) parser. I
+believe that as the tool gets more sophisticated, you end up doing more at the
+token-level, or having an increasingly sophisticated parser, until at the limit
+you have the same tool.
+
+However, I believe starting from the AST gets you more quickly to a usable and
+useful tool.
+
+
+### Heuristic rather than algorithmic
+
+Many formatting tools use a very general algorithmic or even algebraic tool for
+pretty printing. This results in very elegant code, but I believe does not give
+the best results. I prefer a more ad hoc approach where each expression/item is
+formatted using custom rules. We hopefully don't end up with too much code due
+to good old fashioned abstraction and code sharing. This will give a bigger code
+base, but hopefully a better result.
+
+It also means that there will be some cases we can't format and we have to give
+up. I think that is OK. Hopefully, they are rare enough that manually fixing them
+is not painful. Better to have a tool that gives great code in 99% of cases and
+fails in 1% than a tool which gives 50% great code and 50% ugly code, but never
+fails.
+
+
+### Incremental development
+
+I want rustfmt to be useful as soon as possible and to always be useful. I
+specifically don't want to have to wait for a feature (or worse, the whole tool)
+to be perfect before it is useful. The main ways this is achieved is to output
+the source code where we can't yet reformat, be able to turn off new features
+until they are ready, and the 'do no harm' principle (see next section).
+
+
+### First, do no harm
+
+Until rustfmt is perfect, there will always be a trade-off between doing more and
+doing existing things well. I want to err on the side of the latter.
+Specifically, rustfmt should never take OK code and make it look worse. If we
+can't make it better, we should leave it as is. That might mean being less
+aggressive than we like or using configurability.
+
+
+### Use the source code as guidance
+
+There are often multiple ways to format code and satisfy standards. Where this
+is the case, we should use the source code as a hint for reformatting.
+Furthermore, where the code has been formatted in a particular way that satisfies
+the coding standard, it should not be changed (this is sometimes not possible or
+not worthwhile due to uniformity being desirable, but it is a useful goal).
+
+
+### Architecture details
+
+We use the AST from [syntex_syntax], an export of rustc's libsyntax. We use
+syntex_syntax's visit module to walk the AST to find starting points for
+reformatting. Eventually, we should reformat everything and we shouldn't need
+the visit module. We keep track of the last formatted position in the code, and
+when we reformat the next piece of code we make sure to output the span for all
+the code in between (handled by missed_spans.rs).
+
+[syntex_syntax]: https://crates.io/crates/syntex_syntax
+
+We read in formatting configuration from a `rustfmt.toml` file if there is one.
+The options and their defaults are defined in `config.rs`. A `Config` object is
+passed throughout the formatting code, and each formatting routine looks there
+for its configuration.
+
+Our visitor keeps track of the desired current indent due to blocks (
+`block_indent`). Each `visit_*` method reformats code according to this indent,
+`config.comment_width()` and `config.max_width()`. Most reformatting that is done
+in the `visit_*` methods is a bit hacky and is meant to be temporary until it can
+be done properly.
+
+There are a bunch of methods called `rewrite_*`. They do the bulk of the
+reformatting. These take the AST node to be reformatted (this may not literally
+be an AST node from syntex_syntax: there might be multiple parameters
+describing a logical node), the current indent, and the current width budget.
+They return a `String` (or sometimes an `Option<String>`) which formats the
+code in the box given by the indent and width budget. If the method fails, it
+returns `None` and the calling method then has to fallback in some way to give
+the callee more space.
+
+So, in summary, to format a node, we calculate the width budget and then walk down
+the tree from the node. At a leaf, we generate an actual string and then unwind,
+combining these strings as we go back up the tree.
+
+For example, consider a method definition:
+
+```
+ fn foo(a: A, b: B) {
+ ...
+ }
+```
+
+We start at indent 4, the rewrite function for the whole function knows it must
+write `fn foo(` before the arguments and `) {` after them, assuming the max width
+is 100, it thus asks the rewrite argument list function to rewrite with an indent
+of 11 and in a width of 86. Assuming that is possible (obviously in this case),
+it returns a string for the arguments and it can make a string for the function
+header. If the arguments couldn't be fitted in that space, we might try to
+fallback to a hanging indent, so we try again with indent 8 and width 89.
diff --git a/src/tools/rustfmt/LICENSE-APACHE b/src/tools/rustfmt/LICENSE-APACHE
new file mode 100644
index 000000000..212ba1f31
--- /dev/null
+++ b/src/tools/rustfmt/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright 2016-2021 The Rust Project Developers
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/src/tools/rustfmt/LICENSE-MIT b/src/tools/rustfmt/LICENSE-MIT
new file mode 100644
index 000000000..1baa137f6
--- /dev/null
+++ b/src/tools/rustfmt/LICENSE-MIT
@@ -0,0 +1,25 @@
+Copyright (c) 2016-2021 The Rust Project Developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/src/tools/rustfmt/Makefile.toml b/src/tools/rustfmt/Makefile.toml
new file mode 100644
index 000000000..597dd1205
--- /dev/null
+++ b/src/tools/rustfmt/Makefile.toml
@@ -0,0 +1,71 @@
+[env]
+CFG_RELEASE = { value = "${CARGO_MAKE_RUST_VERSION}", condition = { env_not_set = ["CFG_RELEASE"] } }
+CFG_RELEASE_CHANNEL = { value = "${CARGO_MAKE_RUST_CHANNEL}", condition = { env_not_set = ["CFG_RELEASE_CHANNEL"] } }
+
+[tasks.build-bin]
+command = "cargo"
+args = [
+ "build",
+ "--bin",
+ "rustfmt",
+ "--bin",
+ "cargo-fmt",
+]
+
+[tasks.build-bins]
+command = "cargo"
+args = [
+ "build",
+ "--bins",
+]
+
+[tasks.install]
+command = "cargo"
+args = [
+ "install",
+ "--path",
+ ".",
+ "--force",
+ "--locked", # Respect Cargo.lock
+]
+
+[tasks.release]
+command = "cargo"
+args = [
+ "build",
+ "--release",
+]
+
+[tasks.test]
+command = "cargo"
+args = [
+ "test",
+]
+
+[tasks.test-all]
+dependencies = ["build-bin"]
+run_task = { name = ["test", "test-ignored"] }
+
+[tasks.test-ignored]
+command = "cargo"
+args = [
+ "test",
+ "--",
+ "--ignored",
+]
+
+[tasks.b]
+alias = "build"
+
+[tasks.bb]
+alias = "build-bin"
+
+[tasks.bins]
+alias = "build-bins"
+
+[tasks.c]
+alias = "check"
+
+[tasks.t]
+alias = "test"
+
diff --git a/src/tools/rustfmt/Processes.md b/src/tools/rustfmt/Processes.md
new file mode 100644
index 000000000..9d86d52b1
--- /dev/null
+++ b/src/tools/rustfmt/Processes.md
@@ -0,0 +1,57 @@
+This document outlines processes regarding management of rustfmt.
+
+# Stabilising an Option
+
+In this Section, we describe how to stabilise an option of the rustfmt's configration.
+
+## Conditions
+
+- Is the default value correct ?
+- The design and implementation of the option are sound and clean.
+- The option is well tested, both in unit tests and, optimally, in real usage.
+- There is no open bug about the option that prevents its use.
+
+## Steps
+
+Open a pull request that closes the tracking issue. The tracking issue is listed beside the option in `Configurations.md`.
+
+- Update the `Config` enum marking the option as stable.
+- Update the the `Configuration.md` file marking the option as stable.
+- Update `CHANGELOG.md` marking the option as stable.
+
+## After the stabilisation
+
+The option should remain backward-compatible with previous parameters of the option. For instance, if the option is an enum `enum Foo { Alice, Bob }` and the variant `Foo::Bob` is removed/renamed, existing use of the `Foo::Bob` variant should map to the new logic. Breaking changes can be applied under the condition they are version-gated.
+
+# Make a Release
+
+## 0. Update CHANGELOG.md
+
+## 1. Update Cargo.toml and Cargo.lock
+
+For example, 1.0.0 -> 1.0.1:
+
+```diff
+-version = "1.0.0"
++version = "1.0.1"
+```
+
+## 2. Push the commit to the master branch
+
+E.g., https://github.com/rust-lang/rustfmt/commit/5274b49caa1a7db6ac10c76bf1a3d5710ccef569
+
+## 3. Create a release tag
+
+```sh
+git tag -s v1.2.3 -m "Release 1.2.3"
+```
+
+## 4. Publish to crates.io
+
+`cargo publish`
+
+## 5. Create a PR to rust-lang/rust to update the rustfmt submodule
+
+Note that if you are updating `rustc-ap-*` crates, then you need to update **every** submodules in the rust-lang/rust repository that depend on the crates to use the same version of those.
+
+As of 2019/05, there are two such crates: `rls` and `racer` (`racer` depends on `rustc-ap-syntax` and `rls` depends on `racer`, and `rls` is one of submodules of the rust-lang/rust repository).
diff --git a/src/tools/rustfmt/README.md b/src/tools/rustfmt/README.md
new file mode 100644
index 000000000..b3a968f0c
--- /dev/null
+++ b/src/tools/rustfmt/README.md
@@ -0,0 +1,252 @@
+# rustfmt [![Build Status](https://travis-ci.com/rust-lang/rustfmt.svg?branch=master)](https://travis-ci.com/rust-lang/rustfmt) [![Build Status](https://ci.appveyor.com/api/projects/status/github/rust-lang/rustfmt?svg=true)](https://ci.appveyor.com/project/rust-lang-libs/rustfmt) [![crates.io](https://img.shields.io/crates/v/rustfmt-nightly.svg)](https://crates.io/crates/rustfmt-nightly) [![Travis Configuration Status](https://img.shields.io/travis/davidalber/rustfmt-travis.svg?label=travis%20example)](https://travis-ci.org/davidalber/rustfmt-travis)
+
+A tool for formatting Rust code according to style guidelines.
+
+If you'd like to help out (and you should, it's a fun project!), see
+[Contributing.md](Contributing.md) and our [Code of
+Conduct](CODE_OF_CONDUCT.md).
+
+You can use rustfmt in Travis CI builds. We provide a minimal Travis CI
+configuration (see [here](#checking-style-on-a-ci-server)) and verify its status
+using another repository. The status of that repository's build is reported by
+the "travis example" badge above.
+
+## Quick start
+
+You can run `rustfmt` with Rust 1.24 and above.
+
+### On the Stable toolchain
+
+To install:
+
+```sh
+rustup component add rustfmt
+```
+
+To run on a cargo project in the current working directory:
+
+```sh
+cargo fmt
+```
+
+### On the Nightly toolchain
+
+For the latest and greatest `rustfmt`, nightly is required.
+
+To install:
+
+```sh
+rustup component add rustfmt --toolchain nightly
+```
+
+To run on a cargo project in the current working directory:
+
+```sh
+cargo +nightly fmt
+```
+
+## Limitations
+
+Rustfmt tries to work on as much Rust code as possible. Sometimes, the code
+doesn't even need to compile! In general, we are looking to limit areas of
+instability; in particular, post-1.0, the formatting of most code should not
+change as Rustfmt improves. However, there are some things that Rustfmt can't
+do or can't do well (and thus where formatting might change significantly,
+even post-1.0). We would like to reduce the list of limitations over time.
+
+The following list enumerates areas where Rustfmt does not work or where the
+stability guarantees do not apply (we don't make a distinction between the two
+because in the future Rustfmt might work on code where it currently does not):
+
+* a program where any part of the program does not parse (parsing is an early
+ stage of compilation and in Rust includes macro expansion).
+* Macro declarations and uses (current status: some macro declarations and uses
+ are formatted).
+* Comments, including any AST node with a comment 'inside' (Rustfmt does not
+ currently attempt to format comments, it does format code with comments inside, but that formatting may change in the future).
+* Rust code in code blocks in comments.
+* Any fragment of a program (i.e., stability guarantees only apply to whole
+ programs, even where fragments of a program can be formatted today).
+* Code containing non-ascii unicode characters (we believe Rustfmt mostly works
+ here, but do not have the test coverage or experience to be 100% sure).
+* Bugs in Rustfmt (like any software, Rustfmt has bugs, we do not consider bug
+ fixes to break our stability guarantees).
+
+
+## Installation
+
+```sh
+rustup component add rustfmt
+```
+
+## Installing from source
+
+To install from source (nightly required), first checkout to the tag or branch you want to install, then issue
+
+```sh
+cargo install --path .
+```
+
+This will install `rustfmt` in your `~/.cargo/bin`. Make sure to add `~/.cargo/bin` directory to
+your PATH variable.
+
+
+## Running
+
+You can run Rustfmt by just typing `rustfmt filename` if you used `cargo
+install`. This runs rustfmt on the given file, if the file includes out of line
+modules, then we reformat those too. So to run on a whole module or crate, you
+just need to run on the root file (usually mod.rs or lib.rs). Rustfmt can also
+read data from stdin. Alternatively, you can use `cargo fmt` to format all
+binary and library targets of your crate.
+
+You can run `rustfmt --help` for information about available arguments.
+The easiest way to run rustfmt against a project is with `cargo fmt`. `cargo fmt` works on both
+single-crate projects and [cargo workspaces](https://doc.rust-lang.org/book/ch14-03-cargo-workspaces.html).
+Please see `cargo fmt --help` for usage information.
+
+You can specify the path to your own `rustfmt` binary for cargo to use by setting the`RUSTFMT`
+environment variable. This was added in v1.4.22, so you must have this version or newer to leverage this feature (`cargo fmt --version`)
+
+### Running `rustfmt` directly
+
+To format individual files or arbitrary codes from stdin, the `rustfmt` binary should be used. Some
+examples follow:
+
+- `rustfmt lib.rs main.rs` will format "lib.rs" and "main.rs" in place
+- `rustfmt` will read a code from stdin and write formatting to stdout
+ - `echo "fn main() {}" | rustfmt` would emit "fn main() {}".
+
+For more information, including arguments and emit options, see `rustfmt --help`.
+
+### Verifying code is formatted
+
+When running with `--check`, Rustfmt will exit with `0` if Rustfmt would not
+make any formatting changes to the input, and `1` if Rustfmt would make changes.
+In other modes, Rustfmt will exit with `1` if there was some error during
+formatting (for example a parsing or internal error) and `0` if formatting
+completed without error (whether or not changes were made).
+
+
+
+## Running Rustfmt from your editor
+
+* [Vim](https://github.com/rust-lang/rust.vim#formatting-with-rustfmt)
+* [Emacs](https://github.com/rust-lang/rust-mode)
+* [Sublime Text 3](https://packagecontrol.io/packages/RustFmt)
+* [Atom](atom.md)
+* Visual Studio Code using [vscode-rust](https://github.com/editor-rs/vscode-rust), [vsc-rustfmt](https://github.com/Connorcpu/vsc-rustfmt) or [rls_vscode](https://github.com/jonathandturner/rls_vscode) through RLS.
+* [IntelliJ or CLion](intellij.md)
+
+
+## Checking style on a CI server
+
+To keep your code base consistently formatted, it can be helpful to fail the CI build
+when a pull request contains unformatted code. Using `--check` instructs
+rustfmt to exit with an error code if the input is not formatted correctly.
+It will also print any found differences. (Older versions of Rustfmt don't
+support `--check`, use `--write-mode diff`).
+
+A minimal Travis setup could look like this (requires Rust 1.31.0 or greater):
+
+```yaml
+language: rust
+before_script:
+- rustup component add rustfmt
+script:
+- cargo build
+- cargo test
+- cargo fmt --all -- --check
+```
+
+See [this blog post](https://medium.com/@ag_dubs/enforcing-style-in-ci-for-rust-projects-18f6b09ec69d)
+for more info.
+
+## How to build and test
+
+`cargo build` to build.
+
+`cargo test` to run all tests.
+
+To run rustfmt after this, use `cargo run --bin rustfmt -- filename`. See the
+notes above on running rustfmt.
+
+
+## Configuring Rustfmt
+
+Rustfmt is designed to be very configurable. You can create a TOML file called
+`rustfmt.toml` or `.rustfmt.toml`, place it in the project or any other parent
+directory and it will apply the options in that file. See `rustfmt
+--help=config` for the options which are available, or if you prefer to see
+visual style previews, [GitHub page](https://rust-lang.github.io/rustfmt/).
+
+By default, Rustfmt uses a style which conforms to the [Rust style guide][style
+guide] that has been formalized through the [style RFC
+process][fmt rfcs].
+
+Configuration options are either stable or unstable. Stable options can always
+be used, while unstable ones are only available on a nightly toolchain, and opt-in.
+See [GitHub page](https://rust-lang.github.io/rustfmt/) for details.
+
+### Rust's Editions
+
+Rustfmt is able to pick up the edition used by reading the `Cargo.toml` file if
+executed through the Cargo's formatting tool `cargo fmt`. Otherwise, the edition
+needs to be specified in `rustfmt.toml`, e.g., with `edition = "2018"`.
+
+## Tips
+
+* For things you do not want rustfmt to mangle, use `#[rustfmt::skip]`
+* To prevent rustfmt from formatting a macro or an attribute,
+ use `#[rustfmt::skip::macros(target_macro_name)]` or
+ `#[rustfmt::skip::attributes(target_attribute_name)]`
+
+ Example:
+
+ ```rust
+ #![rustfmt::skip::attributes(custom_attribute)]
+
+ #[custom_attribute(formatting , here , should , be , Skipped)]
+ #[rustfmt::skip::macros(html)]
+ fn main() {
+ let macro_result1 = html! { <div>
+ Hello</div>
+ }.to_string();
+ ```
+* When you run rustfmt, place a file named `rustfmt.toml` or `.rustfmt.toml` in
+ target file directory or its parents to override the default settings of
+ rustfmt. You can generate a file containing the default configuration with
+ `rustfmt --print-config default rustfmt.toml` and customize as needed.
+* After successful compilation, a `rustfmt` executable can be found in the
+ target directory.
+* If you're having issues compiling Rustfmt (or compile errors when trying to
+ install), make sure you have the most recent version of Rust installed.
+
+* You can change the way rustfmt emits the changes with the --emit flag:
+
+ Example:
+
+ ```sh
+ cargo fmt -- --emit files
+ ```
+
+ Options:
+
+ | Flag |Description| Nightly Only |
+ |:---:|:---:|:---:|
+ | files | overwrites output to files | No |
+ | stdout | writes output to stdout | No |
+ | coverage | displays how much of the input file was processed | Yes |
+ | checkstyle | emits in a checkstyle format | Yes |
+ | json | emits diffs in a json format | Yes |
+
+## License
+
+Rustfmt is distributed under the terms of both the MIT license and the
+Apache License (Version 2.0).
+
+See [LICENSE-APACHE](LICENSE-APACHE) and [LICENSE-MIT](LICENSE-MIT) for details.
+
+[rust]: https://github.com/rust-lang/rust
+[fmt rfcs]: https://github.com/rust-dev-tools/fmt-rfcs
+[style guide]: https://github.com/rust-dev-tools/fmt-rfcs/blob/master/guide/guide.md
diff --git a/src/tools/rustfmt/atom.md b/src/tools/rustfmt/atom.md
new file mode 100644
index 000000000..f77ac1490
--- /dev/null
+++ b/src/tools/rustfmt/atom.md
@@ -0,0 +1,31 @@
+# Running Rustfmt from Atom
+
+## RLS
+
+Rustfmt is included with the Rust Language Server, itself provided by [ide-rust](https://atom.io/packages/ide-rust).
+
+`apm install ide-rust`
+
+Once installed a file is formatted with `ctrl-shift-c` or `cmd-shift-c`, also available in context menu.
+
+## atom-beautify
+
+Another way is to install [Beautify](https://atom.io/packages/atom-beautify), you
+can do this by running `apm install atom-beautify`.
+
+There are 2 settings that need to be configured in the atom beautifier configuration.
+
+- Install rustfmt as per the [readme](README.md).
+- Open the atom beautifier settings
+
+ Go to Edit->Preferences. Click the packages on the left side and click on setting for atom-beautifier
+
+- Set rustfmt as the beautifier
+
+ Find the setting labeled *Language Config - Rust - Default Beautifier* and make sure it is set to rustfmt as shown below. You can also set the beautifier to auto format on save here.
+![image](https://cloud.githubusercontent.com/assets/6623285/11147685/c8ade16c-8a3d-11e5-9da5-bd3d998d97f9.png)
+
+- Set the path to your rustfmt location
+
+ Find the setting labeled *Rust - Rustfmt Path*. This setting is towards the bottom and you will need to scroll a bit. Set it to the path for your rustfmt executable.
+![image](https://cloud.githubusercontent.com/assets/6623285/11147718/f4d10224-8a3d-11e5-9f69-9e900cbe0278.png)
diff --git a/src/tools/rustfmt/bootstrap.sh b/src/tools/rustfmt/bootstrap.sh
new file mode 100755
index 000000000..05ac0ce2f
--- /dev/null
+++ b/src/tools/rustfmt/bootstrap.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+
+# Make sure you double check the diffs after running this script - with great
+# power comes great responsibility.
+# We deliberately avoid reformatting files with rustfmt comment directives.
+
+cargo build --release
+
+target/release/rustfmt src/lib.rs
+target/release/rustfmt src/bin/main.rs
+target/release/rustfmt src/cargo-fmt/main.rs
+
+for filename in tests/target/*.rs; do
+ if ! grep -q "rustfmt-" "$filename"; then
+ target/release/rustfmt $filename
+ fi
+done
diff --git a/src/tools/rustfmt/build.rs b/src/tools/rustfmt/build.rs
new file mode 100644
index 000000000..e7b1e1b85
--- /dev/null
+++ b/src/tools/rustfmt/build.rs
@@ -0,0 +1,55 @@
+use std::env;
+use std::fs::File;
+use std::io::Write;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+
+fn main() {
+ // Only check .git/HEAD dirty status if it exists - doing so when
+ // building dependent crates may lead to false positives and rebuilds
+ if Path::new(".git/HEAD").exists() {
+ println!("cargo:rerun-if-changed=.git/HEAD");
+ }
+
+ println!("cargo:rerun-if-env-changed=CFG_RELEASE_CHANNEL");
+
+ let out_dir = PathBuf::from(env::var_os("OUT_DIR").unwrap());
+
+ File::create(out_dir.join("commit-info.txt"))
+ .unwrap()
+ .write_all(commit_info().as_bytes())
+ .unwrap();
+}
+
+// Try to get hash and date of the last commit on a best effort basis. If anything goes wrong
+// (git not installed or if this is not a git repository) just return an empty string.
+fn commit_info() -> String {
+ match (channel(), commit_hash(), commit_date()) {
+ (channel, Some(hash), Some(date)) => format!("{} ({} {})", channel, hash.trim_end(), date),
+ _ => String::new(),
+ }
+}
+
+fn channel() -> String {
+ if let Ok(channel) = env::var("CFG_RELEASE_CHANNEL") {
+ channel
+ } else {
+ "nightly".to_owned()
+ }
+}
+
+fn commit_hash() -> Option<String> {
+ Command::new("git")
+ .args(&["rev-parse", "--short", "HEAD"])
+ .output()
+ .ok()
+ .and_then(|r| String::from_utf8(r.stdout).ok())
+}
+
+fn commit_date() -> Option<String> {
+ Command::new("git")
+ .args(&["log", "-1", "--date=short", "--pretty=format:%cd"])
+ .output()
+ .ok()
+ .and_then(|r| String::from_utf8(r.stdout).ok())
+}
diff --git a/src/tools/rustfmt/ci/build_and_test.bat b/src/tools/rustfmt/ci/build_and_test.bat
new file mode 100755
index 000000000..ef4101778
--- /dev/null
+++ b/src/tools/rustfmt/ci/build_and_test.bat
@@ -0,0 +1,14 @@
+set "RUSTFLAGS=-D warnings"
+
+:: Print version information
+rustc -Vv || exit /b 1
+cargo -V || exit /b 1
+
+:: Build and test main crate
+cargo build --locked || exit /b 1
+cargo test || exit /b 1
+
+:: Build and test other crates
+cd config_proc_macro || exit /b 1
+cargo build --locked || exit /b 1
+cargo test || exit /b 1
diff --git a/src/tools/rustfmt/ci/build_and_test.sh b/src/tools/rustfmt/ci/build_and_test.sh
new file mode 100755
index 000000000..8fa0f67b0
--- /dev/null
+++ b/src/tools/rustfmt/ci/build_and_test.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+
+set -euo pipefail
+
+export RUSTFLAGS="-D warnings"
+
+# Print version information
+rustc -Vv
+cargo -V
+
+# Build and test main crate
+cargo build --locked
+cargo test
+
+# Build and test other crates
+cd config_proc_macro
+cargo build --locked
+cargo test
diff --git a/src/tools/rustfmt/ci/integration.sh b/src/tools/rustfmt/ci/integration.sh
new file mode 100755
index 000000000..562d5d70c
--- /dev/null
+++ b/src/tools/rustfmt/ci/integration.sh
@@ -0,0 +1,107 @@
+#!/usr/bin/env bash
+
+set -ex
+
+: ${INTEGRATION?"The INTEGRATION environment variable must be set."}
+
+# FIXME: this means we can get a stale cargo-fmt from a previous run.
+#
+# `which rustfmt` fails if rustfmt is not found. Since we don't install
+# `rustfmt` via `rustup`, this is the case unless we manually install it. Once
+# that happens, `cargo install --force` will be called, which installs
+# `rustfmt`, `cargo-fmt`, etc to `~/.cargo/bin`. This directory is cached by
+# travis (see `.travis.yml`'s "cache" key), such that build-bots that arrive
+# here after the first installation will find `rustfmt` and won't need to build
+# it again.
+#
+#which cargo-fmt || cargo install --force
+CFG_RELEASE=nightly CFG_RELEASE_CHANNEL=nightly cargo install --path . --force --locked
+
+echo "Integration tests for: ${INTEGRATION}"
+cargo fmt -- --version
+
+# Checks that:
+#
+# * `cargo fmt --all` succeeds without any warnings or errors
+# * `cargo fmt --all -- --check` after formatting returns success
+# * `cargo test --all` still passes (formatting did not break the build)
+function check_fmt_with_all_tests {
+ check_fmt_base "--all"
+ return $?
+}
+
+# Checks that:
+#
+# * `cargo fmt --all` succeeds without any warnings or errors
+# * `cargo fmt --all -- --check` after formatting returns success
+# * `cargo test --lib` still passes (formatting did not break the build)
+function check_fmt_with_lib_tests {
+ check_fmt_base "--lib"
+ return $?
+}
+
+function check_fmt_base {
+ local test_args="$1"
+ local build=$(cargo test $test_args 2>&1)
+ if [[ "$build" =~ "build failed" ]] || [[ "$build" =~ "test result: FAILED." ]]; then
+ return 0
+ fi
+ touch rustfmt.toml
+ cargo fmt --all -v |& tee rustfmt_output
+ if [[ ${PIPESTATUS[0]} != 0 ]]; then
+ cat rustfmt_output
+ return 1
+ fi
+ cat rustfmt_output
+ ! cat rustfmt_output | grep -q "internal error"
+ if [[ $? != 0 ]]; then
+ return 1
+ fi
+ ! cat rustfmt_output | grep -q "warning"
+ if [[ $? != 0 ]]; then
+ return 1
+ fi
+ ! cat rustfmt_output | grep -q "Warning"
+ if [[ $? != 0 ]]; then
+ return 1
+ fi
+ cargo fmt --all -- --check |& tee rustfmt_check_output
+ if [[ ${PIPESTATUS[0]} != 0 ]]; then
+ cat rustfmt_check_output
+ return 1
+ fi
+ cargo test $test_args
+ if [[ $? != 0 ]]; then
+ return $?
+ fi
+}
+
+function show_head {
+ local head=$(git rev-parse HEAD)
+ echo "Head commit of ${INTEGRATION}: $head"
+}
+
+case ${INTEGRATION} in
+ cargo)
+ git clone --depth=1 https://github.com/rust-lang/${INTEGRATION}.git
+ cd ${INTEGRATION}
+ show_head
+ export CFG_DISABLE_CROSS_TESTS=1
+ check_fmt_with_all_tests
+ cd -
+ ;;
+ crater)
+ git clone --depth=1 https://github.com/rust-lang-nursery/${INTEGRATION}.git
+ cd ${INTEGRATION}
+ show_head
+ check_fmt_with_lib_tests
+ cd -
+ ;;
+ *)
+ git clone --depth=1 https://github.com/rust-lang-nursery/${INTEGRATION}.git
+ cd ${INTEGRATION}
+ show_head
+ check_fmt_with_all_tests
+ cd -
+ ;;
+esac
diff --git a/src/tools/rustfmt/config_proc_macro/Cargo.lock b/src/tools/rustfmt/config_proc_macro/Cargo.lock
new file mode 100644
index 000000000..ecf561f28
--- /dev/null
+++ b/src/tools/rustfmt/config_proc_macro/Cargo.lock
@@ -0,0 +1,68 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e98a83a9f9b331f54b924e68a66acb1bb35cb01fb0a23645139967abefb697e8"
+dependencies = [
+ "unicode-xid",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "rustfmt-config_proc_macro"
+version = "0.2.0"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "serde",
+ "syn",
+]
+
+[[package]]
+name = "serde"
+version = "1.0.99"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fec2851eb56d010dc9a21b89ca53ee75e6528bab60c11e89d38390904982da9f"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.99"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb4dc18c61206b08dc98216c98faa0232f4337e1e1b8574551d5bad29ea1b425"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "syn"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-xid",
+]
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
diff --git a/src/tools/rustfmt/config_proc_macro/Cargo.toml b/src/tools/rustfmt/config_proc_macro/Cargo.toml
new file mode 100644
index 000000000..a41b3a5e6
--- /dev/null
+++ b/src/tools/rustfmt/config_proc_macro/Cargo.toml
@@ -0,0 +1,23 @@
+[package]
+name = "rustfmt-config_proc_macro"
+version = "0.2.0"
+edition = "2018"
+description = "A collection of procedural macros for rustfmt"
+license = "Apache-2.0/MIT"
+categories = ["development-tools::procedural-macro-helpers"]
+repository = "https://github.com/rust-lang/rustfmt"
+
+[lib]
+proc-macro = true
+
+[dependencies]
+proc-macro2 = "1.0"
+quote = "1.0"
+syn = { version = "1.0", features = ["full", "visit"] }
+
+[dev-dependencies]
+serde = { version = "1.0", features = ["derive"] }
+
+[features]
+default = []
+debug-with-rustfmt = []
diff --git a/src/tools/rustfmt/config_proc_macro/src/attrs.rs b/src/tools/rustfmt/config_proc_macro/src/attrs.rs
new file mode 100644
index 000000000..0baba046f
--- /dev/null
+++ b/src/tools/rustfmt/config_proc_macro/src/attrs.rs
@@ -0,0 +1,57 @@
+//! This module provides utilities for handling attributes on variants
+//! of `config_type` enum. Currently there are two types of attributes
+//! that could appear on the variants of `config_type` enum: `doc_hint`
+//! and `value`. Both comes in the form of name-value pair whose value
+//! is string literal.
+
+/// Returns the value of the first `doc_hint` attribute in the given slice or
+/// `None` if `doc_hint` attribute is not available.
+pub fn find_doc_hint(attrs: &[syn::Attribute]) -> Option<String> {
+ attrs.iter().filter_map(doc_hint).next()
+}
+
+/// Returns `true` if the given attribute is a `doc_hint` attribute.
+pub fn is_doc_hint(attr: &syn::Attribute) -> bool {
+ is_attr_name_value(attr, "doc_hint")
+}
+
+/// Returns a string literal value if the given attribute is `doc_hint`
+/// attribute or `None` otherwise.
+pub fn doc_hint(attr: &syn::Attribute) -> Option<String> {
+ get_name_value_str_lit(attr, "doc_hint")
+}
+
+/// Returns the value of the first `value` attribute in the given slice or
+/// `None` if `value` attribute is not available.
+pub fn find_config_value(attrs: &[syn::Attribute]) -> Option<String> {
+ attrs.iter().filter_map(config_value).next()
+}
+
+/// Returns a string literal value if the given attribute is `value`
+/// attribute or `None` otherwise.
+pub fn config_value(attr: &syn::Attribute) -> Option<String> {
+ get_name_value_str_lit(attr, "value")
+}
+
+/// Returns `true` if the given attribute is a `value` attribute.
+pub fn is_config_value(attr: &syn::Attribute) -> bool {
+ is_attr_name_value(attr, "value")
+}
+
+fn is_attr_name_value(attr: &syn::Attribute, name: &str) -> bool {
+ attr.parse_meta().ok().map_or(false, |meta| match meta {
+ syn::Meta::NameValue(syn::MetaNameValue { ref path, .. }) if path.is_ident(name) => true,
+ _ => false,
+ })
+}
+
+fn get_name_value_str_lit(attr: &syn::Attribute, name: &str) -> Option<String> {
+ attr.parse_meta().ok().and_then(|meta| match meta {
+ syn::Meta::NameValue(syn::MetaNameValue {
+ ref path,
+ lit: syn::Lit::Str(ref lit_str),
+ ..
+ }) if path.is_ident(name) => Some(lit_str.value()),
+ _ => None,
+ })
+}
diff --git a/src/tools/rustfmt/config_proc_macro/src/config_type.rs b/src/tools/rustfmt/config_proc_macro/src/config_type.rs
new file mode 100644
index 000000000..93a78b846
--- /dev/null
+++ b/src/tools/rustfmt/config_proc_macro/src/config_type.rs
@@ -0,0 +1,15 @@
+use proc_macro2::TokenStream;
+
+use crate::item_enum::define_config_type_on_enum;
+use crate::item_struct::define_config_type_on_struct;
+
+/// Defines `config_type` on enum or struct.
+// FIXME: Implement this on struct.
+pub fn define_config_type(input: &syn::Item) -> TokenStream {
+ match input {
+ syn::Item::Struct(st) => define_config_type_on_struct(st),
+ syn::Item::Enum(en) => define_config_type_on_enum(en),
+ _ => panic!("Expected enum or struct"),
+ }
+ .unwrap()
+}
diff --git a/src/tools/rustfmt/config_proc_macro/src/item_enum.rs b/src/tools/rustfmt/config_proc_macro/src/item_enum.rs
new file mode 100644
index 000000000..dcee77a85
--- /dev/null
+++ b/src/tools/rustfmt/config_proc_macro/src/item_enum.rs
@@ -0,0 +1,208 @@
+use proc_macro2::TokenStream;
+use quote::quote;
+
+use crate::attrs::*;
+use crate::utils::*;
+
+type Variants = syn::punctuated::Punctuated<syn::Variant, syn::Token![,]>;
+
+/// Defines and implements `config_type` enum.
+pub fn define_config_type_on_enum(em: &syn::ItemEnum) -> syn::Result<TokenStream> {
+ let syn::ItemEnum {
+ vis,
+ enum_token,
+ ident,
+ generics,
+ variants,
+ ..
+ } = em;
+
+ let mod_name_str = format!("__define_config_type_on_enum_{}", ident);
+ let mod_name = syn::Ident::new(&mod_name_str, ident.span());
+ let variants = fold_quote(variants.iter().map(process_variant), |meta| quote!(#meta,));
+
+ let impl_doc_hint = impl_doc_hint(&em.ident, &em.variants);
+ let impl_from_str = impl_from_str(&em.ident, &em.variants);
+ let impl_display = impl_display(&em.ident, &em.variants);
+ let impl_serde = impl_serde(&em.ident, &em.variants);
+ let impl_deserialize = impl_deserialize(&em.ident, &em.variants);
+
+ Ok(quote! {
+ #[allow(non_snake_case)]
+ mod #mod_name {
+ #[derive(Debug, Copy, Clone, Eq, PartialEq)]
+ pub #enum_token #ident #generics { #variants }
+ #impl_display
+ #impl_doc_hint
+ #impl_from_str
+ #impl_serde
+ #impl_deserialize
+ }
+ #vis use #mod_name::#ident;
+ })
+}
+
+/// Remove attributes specific to `config_proc_macro` from enum variant fields.
+fn process_variant(variant: &syn::Variant) -> TokenStream {
+ let metas = variant
+ .attrs
+ .iter()
+ .filter(|attr| !is_doc_hint(attr) && !is_config_value(attr));
+ let attrs = fold_quote(metas, |meta| quote!(#meta));
+ let syn::Variant { ident, fields, .. } = variant;
+ quote!(#attrs #ident #fields)
+}
+
+fn impl_doc_hint(ident: &syn::Ident, variants: &Variants) -> TokenStream {
+ let doc_hint = variants
+ .iter()
+ .map(doc_hint_of_variant)
+ .collect::<Vec<_>>()
+ .join("|");
+ let doc_hint = format!("[{}]", doc_hint);
+ quote! {
+ use crate::config::ConfigType;
+ impl ConfigType for #ident {
+ fn doc_hint() -> String {
+ #doc_hint.to_owned()
+ }
+ }
+ }
+}
+
+fn impl_display(ident: &syn::Ident, variants: &Variants) -> TokenStream {
+ let vs = variants
+ .iter()
+ .filter(|v| is_unit(v))
+ .map(|v| (config_value_of_variant(v), &v.ident));
+ let match_patterns = fold_quote(vs, |(s, v)| {
+ quote! {
+ #ident::#v => write!(f, "{}", #s),
+ }
+ });
+ quote! {
+ use std::fmt;
+ impl fmt::Display for #ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ #match_patterns
+ _ => unimplemented!(),
+ }
+ }
+ }
+ }
+}
+
+fn impl_from_str(ident: &syn::Ident, variants: &Variants) -> TokenStream {
+ let vs = variants
+ .iter()
+ .filter(|v| is_unit(v))
+ .map(|v| (config_value_of_variant(v), &v.ident));
+ let if_patterns = fold_quote(vs, |(s, v)| {
+ quote! {
+ if #s.eq_ignore_ascii_case(s) {
+ return Ok(#ident::#v);
+ }
+ }
+ });
+ let mut err_msg = String::from("Bad variant, expected one of:");
+ for v in variants.iter().filter(|v| is_unit(v)) {
+ err_msg.push_str(&format!(" `{}`", v.ident));
+ }
+
+ quote! {
+ impl ::std::str::FromStr for #ident {
+ type Err = &'static str;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ #if_patterns
+ return Err(#err_msg);
+ }
+ }
+ }
+}
+
+fn doc_hint_of_variant(variant: &syn::Variant) -> String {
+ find_doc_hint(&variant.attrs).unwrap_or(variant.ident.to_string())
+}
+
+fn config_value_of_variant(variant: &syn::Variant) -> String {
+ find_config_value(&variant.attrs).unwrap_or(variant.ident.to_string())
+}
+
+fn impl_serde(ident: &syn::Ident, variants: &Variants) -> TokenStream {
+ let arms = fold_quote(variants.iter(), |v| {
+ let v_ident = &v.ident;
+ let pattern = match v.fields {
+ syn::Fields::Named(..) => quote!(#ident::v_ident{..}),
+ syn::Fields::Unnamed(..) => quote!(#ident::#v_ident(..)),
+ syn::Fields::Unit => quote!(#ident::#v_ident),
+ };
+ let option_value = config_value_of_variant(v);
+ quote! {
+ #pattern => serializer.serialize_str(&#option_value),
+ }
+ });
+
+ quote! {
+ impl ::serde::ser::Serialize for #ident {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: ::serde::ser::Serializer,
+ {
+ use serde::ser::Error;
+ match self {
+ #arms
+ _ => Err(S::Error::custom(format!("Cannot serialize {:?}", self))),
+ }
+ }
+ }
+ }
+}
+
+// Currently only unit variants are supported.
+fn impl_deserialize(ident: &syn::Ident, variants: &Variants) -> TokenStream {
+ let supported_vs = variants.iter().filter(|v| is_unit(v));
+ let if_patterns = fold_quote(supported_vs, |v| {
+ let config_value = config_value_of_variant(v);
+ let variant_ident = &v.ident;
+ quote! {
+ if #config_value.eq_ignore_ascii_case(s) {
+ return Ok(#ident::#variant_ident);
+ }
+ }
+ });
+
+ let supported_vs = variants.iter().filter(|v| is_unit(v));
+ let allowed = fold_quote(supported_vs.map(config_value_of_variant), |s| quote!(#s,));
+
+ quote! {
+ impl<'de> serde::de::Deserialize<'de> for #ident {
+ fn deserialize<D>(d: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ use serde::de::{Error, Visitor};
+ use std::marker::PhantomData;
+ use std::fmt;
+ struct StringOnly<T>(PhantomData<T>);
+ impl<'de, T> Visitor<'de> for StringOnly<T>
+ where T: serde::Deserializer<'de> {
+ type Value = String;
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str("string")
+ }
+ fn visit_str<E>(self, value: &str) -> Result<String, E> {
+ Ok(String::from(value))
+ }
+ }
+ let s = &d.deserialize_string(StringOnly::<D>(PhantomData))?;
+
+ #if_patterns
+
+ static ALLOWED: &'static[&str] = &[#allowed];
+ Err(D::Error::unknown_variant(&s, ALLOWED))
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/config_proc_macro/src/item_struct.rs b/src/tools/rustfmt/config_proc_macro/src/item_struct.rs
new file mode 100644
index 000000000..f03ff7e30
--- /dev/null
+++ b/src/tools/rustfmt/config_proc_macro/src/item_struct.rs
@@ -0,0 +1,5 @@
+use proc_macro2::TokenStream;
+
+pub fn define_config_type_on_struct(_st: &syn::ItemStruct) -> syn::Result<TokenStream> {
+ unimplemented!()
+}
diff --git a/src/tools/rustfmt/config_proc_macro/src/lib.rs b/src/tools/rustfmt/config_proc_macro/src/lib.rs
new file mode 100644
index 000000000..e772c53f4
--- /dev/null
+++ b/src/tools/rustfmt/config_proc_macro/src/lib.rs
@@ -0,0 +1,71 @@
+//! This crate provides a derive macro for `ConfigType`.
+
+#![recursion_limit = "256"]
+
+mod attrs;
+mod config_type;
+mod item_enum;
+mod item_struct;
+mod utils;
+
+use std::str::FromStr;
+
+use proc_macro::TokenStream;
+use syn::parse_macro_input;
+
+#[proc_macro_attribute]
+pub fn config_type(_args: TokenStream, input: TokenStream) -> TokenStream {
+ let input = parse_macro_input!(input as syn::Item);
+ let output = config_type::define_config_type(&input);
+
+ #[cfg(feature = "debug-with-rustfmt")]
+ {
+ utils::debug_with_rustfmt(&output);
+ }
+
+ TokenStream::from(output)
+}
+
+/// Used to conditionally output the TokenStream for tests that need to be run on nightly only.
+///
+/// ```rust
+/// # use rustfmt_config_proc_macro::nightly_only_test;
+///
+/// #[nightly_only_test]
+/// #[test]
+/// fn test_needs_nightly_rustfmt() {
+/// assert!(true);
+/// }
+/// ```
+#[proc_macro_attribute]
+pub fn nightly_only_test(_args: TokenStream, input: TokenStream) -> TokenStream {
+ // if CFG_RELEASE_CHANNEL is not set we default to nightly, hence why the default is true
+ if option_env!("CFG_RELEASE_CHANNEL").map_or(true, |c| c == "nightly" || c == "dev") {
+ input
+ } else {
+ // output an empty token stream if CFG_RELEASE_CHANNEL is not set to "nightly" or "dev"
+ TokenStream::from_str("").unwrap()
+ }
+}
+
+/// Used to conditionally output the TokenStream for tests that need to be run on stable only.
+///
+/// ```rust
+/// # use rustfmt_config_proc_macro::stable_only_test;
+///
+/// #[stable_only_test]
+/// #[test]
+/// fn test_needs_stable_rustfmt() {
+/// assert!(true);
+/// }
+/// ```
+#[proc_macro_attribute]
+pub fn stable_only_test(_args: TokenStream, input: TokenStream) -> TokenStream {
+ // if CFG_RELEASE_CHANNEL is not set we default to nightly, hence why the default is false
+ if option_env!("CFG_RELEASE_CHANNEL").map_or(false, |c| c == "stable") {
+ input
+ } else {
+ // output an empty token stream if CFG_RELEASE_CHANNEL is not set or is not 'stable'
+ TokenStream::from_str("").unwrap()
+ }
+}
diff --git a/src/tools/rustfmt/config_proc_macro/src/utils.rs b/src/tools/rustfmt/config_proc_macro/src/utils.rs
new file mode 100644
index 000000000..f5cba87b0
--- /dev/null
+++ b/src/tools/rustfmt/config_proc_macro/src/utils.rs
@@ -0,0 +1,52 @@
+use proc_macro2::TokenStream;
+use quote::{quote, ToTokens};
+
+pub fn fold_quote<F, I, T>(input: impl Iterator<Item = I>, f: F) -> TokenStream
+where
+ F: Fn(I) -> T,
+ T: ToTokens,
+{
+ input.fold(quote! {}, |acc, x| {
+ let y = f(x);
+ quote! { #acc #y }
+ })
+}
+
+pub fn is_unit(v: &syn::Variant) -> bool {
+ match v.fields {
+ syn::Fields::Unit => true,
+ _ => false,
+ }
+}
+
+#[cfg(feature = "debug-with-rustfmt")]
+/// Pretty-print the output of proc macro using rustfmt.
+pub fn debug_with_rustfmt(input: &TokenStream) {
+ use std::env;
+ use std::ffi::OsStr;
+ use std::io::Write;
+ use std::process::{Command, Stdio};
+
+ let rustfmt_var = env::var_os("RUSTFMT");
+ let rustfmt = match &rustfmt_var {
+ Some(rustfmt) => rustfmt,
+ None => OsStr::new("rustfmt"),
+ };
+ let mut child = Command::new(rustfmt)
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .spawn()
+ .expect("Failed to spawn rustfmt in stdio mode");
+ {
+ let stdin = child.stdin.as_mut().expect("Failed to get stdin");
+ stdin
+ .write_all(format!("{}", input).as_bytes())
+ .expect("Failed to write to stdin");
+ }
+ let rustfmt_output = child.wait_with_output().expect("rustfmt has failed");
+
+ eprintln!(
+ "{}",
+ String::from_utf8(rustfmt_output.stdout).expect("rustfmt returned non-UTF8 string")
+ );
+}
diff --git a/src/tools/rustfmt/config_proc_macro/tests/smoke.rs b/src/tools/rustfmt/config_proc_macro/tests/smoke.rs
new file mode 100644
index 000000000..940a8a0c2
--- /dev/null
+++ b/src/tools/rustfmt/config_proc_macro/tests/smoke.rs
@@ -0,0 +1,20 @@
+pub mod config {
+ pub trait ConfigType: Sized {
+ fn doc_hint() -> String;
+ }
+}
+
+#[allow(dead_code)]
+#[allow(unused_imports)]
+mod tests {
+ use rustfmt_config_proc_macro::config_type;
+
+ #[config_type]
+ enum Bar {
+ Foo,
+ Bar,
+ #[doc_hint = "foo_bar"]
+ FooBar,
+ FooFoo(i32),
+ }
+}
diff --git a/src/tools/rustfmt/intellij.md b/src/tools/rustfmt/intellij.md
new file mode 100644
index 000000000..6a711c017
--- /dev/null
+++ b/src/tools/rustfmt/intellij.md
@@ -0,0 +1,35 @@
+# Running Rustfmt from IntelliJ or CLion
+
+## Installation
+
+- Install [CLion](https://www.jetbrains.com/clion/), [IntelliJ Ultimate or CE](https://www.jetbrains.com/idea/) through the direct download link or using the [JetBrains Toolbox](https://www.jetbrains.com/toolbox/).
+ CLion and IntelliJ Ultimate [provide a built-in debugger interface](https://github.com/intellij-rust/intellij-rust#compatible-ides) but they are not free like IntelliJ CE.
+
+- Install the [Rust Plugin](https://intellij-rust.github.io/) by navigating to File → Settings → Plugins and searching the plugin in the Marketplace
+ ![plugins](https://user-images.githubusercontent.com/6505554/83944518-6f1e5c00-a81d-11ea-9c35-e16948811ba8.png)
+
+- Press "Install" on the Rust plugin
+ ![install rust](https://user-images.githubusercontent.com/6505554/83944533-82c9c280-a81d-11ea-86b3-ee2e31bc7d12.png)
+
+- Restart CLion/IntelliJ
+
+## Configuration
+
+### Run Rustfmt on save
+
+- Open Rustfmt settings (File → Settings → Languages & Frameworks → Rust → Rustfmt) and enable "Run rustfmt on Save"
+ ![run_rustfmt_on_save](https://user-images.githubusercontent.com/6505554/83944610-3468f380-a81e-11ea-9c34-0cbd18dd4969.png)
+
+- IntellJ uses autosave, so now your files will always be formatted according to rustfmt. Alternatively you can use Ctrl+S to reformat file manually
+
+### Bind shortcut to "Reformat File with Rustfmt" action
+
+- Open the settings window (File → Settings) and search for "reformat"
+ ![keymap](https://user-images.githubusercontent.com/1133787/47240922-2ae10c80-d3ea-11e8-9d8f-c798d9749240.png)
+- Right-click on "Reformat File with Rustfmt" and assign a keyboard shortcut
+
+ ![shortcut_window](https://user-images.githubusercontent.com/1133787/47240981-5b28ab00-d3ea-11e8-882e-8b864164db74.png)
+- Press "OK"
+ ![shortcut_after](https://user-images.githubusercontent.com/1133787/47241000-6976c700-d3ea-11e8-9342-50ebc2f9f97b.png)
+
+- Done. You can now use rustfmt in an opened *.rs file with your previously specified shortcut
diff --git a/src/tools/rustfmt/rust-toolchain b/src/tools/rustfmt/rust-toolchain
new file mode 100644
index 000000000..2640a9e0e
--- /dev/null
+++ b/src/tools/rustfmt/rust-toolchain
@@ -0,0 +1,3 @@
+[toolchain]
+channel = "nightly-2022-06-21"
+components = ["rustc-dev"]
diff --git a/src/tools/rustfmt/rustfmt.toml b/src/tools/rustfmt/rustfmt.toml
new file mode 100644
index 000000000..eccd5f9bd
--- /dev/null
+++ b/src/tools/rustfmt/rustfmt.toml
@@ -0,0 +1,3 @@
+error_on_line_overflow = true
+error_on_unformatted = true
+version = "Two"
diff --git a/src/tools/rustfmt/src/attr.rs b/src/tools/rustfmt/src/attr.rs
new file mode 100644
index 000000000..41ba9a847
--- /dev/null
+++ b/src/tools/rustfmt/src/attr.rs
@@ -0,0 +1,541 @@
+//! Format attributes and meta items.
+
+use rustc_ast::ast;
+use rustc_ast::HasAttrs;
+use rustc_span::{symbol::sym, Span, Symbol};
+
+use self::doc_comment::DocCommentFormatter;
+use crate::comment::{contains_comment, rewrite_doc_comment, CommentStyle};
+use crate::config::lists::*;
+use crate::config::IndentStyle;
+use crate::expr::rewrite_literal;
+use crate::lists::{definitive_tactic, itemize_list, write_list, ListFormatting, Separator};
+use crate::overflow;
+use crate::rewrite::{Rewrite, RewriteContext};
+use crate::shape::Shape;
+use crate::source_map::SpanUtils;
+use crate::types::{rewrite_path, PathContext};
+use crate::utils::{count_newlines, mk_sp};
+
+mod doc_comment;
+
+pub(crate) fn contains_name(attrs: &[ast::Attribute], name: Symbol) -> bool {
+ attrs.iter().any(|attr| attr.has_name(name))
+}
+
+pub(crate) fn first_attr_value_str_by_name(
+ attrs: &[ast::Attribute],
+ name: Symbol,
+) -> Option<Symbol> {
+ attrs
+ .iter()
+ .find(|attr| attr.has_name(name))
+ .and_then(|attr| attr.value_str())
+}
+
+/// Returns attributes on the given statement.
+pub(crate) fn get_attrs_from_stmt(stmt: &ast::Stmt) -> &[ast::Attribute] {
+ stmt.attrs()
+}
+
+pub(crate) fn get_span_without_attrs(stmt: &ast::Stmt) -> Span {
+ match stmt.kind {
+ ast::StmtKind::Local(ref local) => local.span,
+ ast::StmtKind::Item(ref item) => item.span,
+ ast::StmtKind::Expr(ref expr) | ast::StmtKind::Semi(ref expr) => expr.span,
+ ast::StmtKind::MacCall(ref mac_stmt) => mac_stmt.mac.span(),
+ ast::StmtKind::Empty => stmt.span,
+ }
+}
+
+/// Returns attributes that are within `outer_span`.
+pub(crate) fn filter_inline_attrs(
+ attrs: &[ast::Attribute],
+ outer_span: Span,
+) -> Vec<ast::Attribute> {
+ attrs
+ .iter()
+ .filter(|a| outer_span.lo() <= a.span.lo() && a.span.hi() <= outer_span.hi())
+ .cloned()
+ .collect()
+}
+
+fn is_derive(attr: &ast::Attribute) -> bool {
+ attr.has_name(sym::derive)
+}
+
+// The shape of the arguments to a function-like attribute.
+fn argument_shape(
+ left: usize,
+ right: usize,
+ combine: bool,
+ shape: Shape,
+ context: &RewriteContext<'_>,
+) -> Option<Shape> {
+ match context.config.indent_style() {
+ IndentStyle::Block => {
+ if combine {
+ shape.offset_left(left)
+ } else {
+ Some(
+ shape
+ .block_indent(context.config.tab_spaces())
+ .with_max_width(context.config),
+ )
+ }
+ }
+ IndentStyle::Visual => shape
+ .visual_indent(0)
+ .shrink_left(left)
+ .and_then(|s| s.sub_width(right)),
+ }
+}
+
+fn format_derive(
+ derives: &[ast::Attribute],
+ shape: Shape,
+ context: &RewriteContext<'_>,
+) -> Option<String> {
+ // Collect all items from all attributes
+ let all_items = derives
+ .iter()
+ .map(|attr| {
+ // Parse the derive items and extract the span for each item; if any
+ // attribute is not parseable, none of the attributes will be
+ // reformatted.
+ let item_spans = attr.meta_item_list().map(|meta_item_list| {
+ meta_item_list
+ .into_iter()
+ .map(|nested_meta_item| nested_meta_item.span())
+ })?;
+
+ let items = itemize_list(
+ context.snippet_provider,
+ item_spans,
+ ")",
+ ",",
+ |span| span.lo(),
+ |span| span.hi(),
+ |span| Some(context.snippet(*span).to_owned()),
+ // We update derive attribute spans to start after the opening '('
+ // This helps us focus parsing to just what's inside #[derive(...)]
+ context.snippet_provider.span_after(attr.span, "("),
+ attr.span.hi(),
+ false,
+ );
+
+ Some(items)
+ })
+ // Fail if any attribute failed.
+ .collect::<Option<Vec<_>>>()?
+ // Collect the results into a single, flat, Vec.
+ .into_iter()
+ .flatten()
+ .collect::<Vec<_>>();
+
+ // Collect formatting parameters.
+ let prefix = attr_prefix(&derives[0]);
+ let argument_shape = argument_shape(
+ "[derive()]".len() + prefix.len(),
+ ")]".len(),
+ false,
+ shape,
+ context,
+ )?;
+ let one_line_shape = shape
+ .offset_left("[derive()]".len() + prefix.len())?
+ .sub_width("()]".len())?;
+ let one_line_budget = one_line_shape.width;
+
+ let tactic = definitive_tactic(
+ &all_items,
+ ListTactic::HorizontalVertical,
+ Separator::Comma,
+ argument_shape.width,
+ );
+ let trailing_separator = match context.config.indent_style() {
+ // We always add the trailing comma and remove it if it is not needed.
+ IndentStyle::Block => SeparatorTactic::Always,
+ IndentStyle::Visual => SeparatorTactic::Never,
+ };
+
+ // Format the collection of items.
+ let fmt = ListFormatting::new(argument_shape, context.config)
+ .tactic(tactic)
+ .trailing_separator(trailing_separator)
+ .ends_with_newline(false);
+ let item_str = write_list(&all_items, &fmt)?;
+
+ debug!("item_str: '{}'", item_str);
+
+ // Determine if the result will be nested, i.e. if we're using the block
+ // indent style and either the items are on multiple lines or we've exceeded
+ // our budget to fit on a single line.
+ let nested = context.config.indent_style() == IndentStyle::Block
+ && (item_str.contains('\n') || item_str.len() > one_line_budget);
+
+ // Format the final result.
+ let mut result = String::with_capacity(128);
+ result.push_str(prefix);
+ result.push_str("[derive(");
+ if nested {
+ let nested_indent = argument_shape.indent.to_string_with_newline(context.config);
+ result.push_str(&nested_indent);
+ result.push_str(&item_str);
+ result.push_str(&shape.indent.to_string_with_newline(context.config));
+ } else if let SeparatorTactic::Always = context.config.trailing_comma() {
+ // Retain the trailing comma.
+ result.push_str(&item_str);
+ } else if item_str.ends_with(',') {
+ // Remove the trailing comma.
+ result.push_str(&item_str[..item_str.len() - 1]);
+ } else {
+ result.push_str(&item_str);
+ }
+ result.push_str(")]");
+
+ Some(result)
+}
+
+/// Returns the first group of attributes that fills the given predicate.
+/// We consider two doc comments are in different group if they are separated by normal comments.
+fn take_while_with_pred<'a, P>(
+ context: &RewriteContext<'_>,
+ attrs: &'a [ast::Attribute],
+ pred: P,
+) -> &'a [ast::Attribute]
+where
+ P: Fn(&ast::Attribute) -> bool,
+{
+ let mut len = 0;
+ let mut iter = attrs.iter().peekable();
+
+ while let Some(attr) = iter.next() {
+ if pred(attr) {
+ len += 1;
+ } else {
+ break;
+ }
+ if let Some(next_attr) = iter.peek() {
+ // Extract comments between two attributes.
+ let span_between_attr = mk_sp(attr.span.hi(), next_attr.span.lo());
+ let snippet = context.snippet(span_between_attr);
+ if count_newlines(snippet) >= 2 || snippet.contains('/') {
+ break;
+ }
+ }
+ }
+
+ &attrs[..len]
+}
+
+/// Rewrite the any doc comments which come before any other attributes.
+fn rewrite_initial_doc_comments(
+ context: &RewriteContext<'_>,
+ attrs: &[ast::Attribute],
+ shape: Shape,
+) -> Option<(usize, Option<String>)> {
+ if attrs.is_empty() {
+ return Some((0, None));
+ }
+ // Rewrite doc comments
+ let sugared_docs = take_while_with_pred(context, attrs, |a| a.is_doc_comment());
+ if !sugared_docs.is_empty() {
+ let snippet = sugared_docs
+ .iter()
+ .map(|a| context.snippet(a.span))
+ .collect::<Vec<_>>()
+ .join("\n");
+ return Some((
+ sugared_docs.len(),
+ Some(rewrite_doc_comment(
+ &snippet,
+ shape.comment(context.config),
+ context.config,
+ )?),
+ ));
+ }
+
+ Some((0, None))
+}
+
+impl Rewrite for ast::NestedMetaItem {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ match self {
+ ast::NestedMetaItem::MetaItem(ref meta_item) => meta_item.rewrite(context, shape),
+ ast::NestedMetaItem::Literal(ref l) => rewrite_literal(context, l, shape),
+ }
+ }
+}
+
+fn has_newlines_before_after_comment(comment: &str) -> (&str, &str) {
+ // Look at before and after comment and see if there are any empty lines.
+ let comment_begin = comment.find('/');
+ let len = comment_begin.unwrap_or_else(|| comment.len());
+ let mlb = count_newlines(&comment[..len]) > 1;
+ let mla = if comment_begin.is_none() {
+ mlb
+ } else {
+ comment
+ .chars()
+ .rev()
+ .take_while(|c| c.is_whitespace())
+ .filter(|&c| c == '\n')
+ .count()
+ > 1
+ };
+ (if mlb { "\n" } else { "" }, if mla { "\n" } else { "" })
+}
+
+impl Rewrite for ast::MetaItem {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ Some(match self.kind {
+ ast::MetaItemKind::Word => {
+ rewrite_path(context, PathContext::Type, None, &self.path, shape)?
+ }
+ ast::MetaItemKind::List(ref list) => {
+ let path = rewrite_path(context, PathContext::Type, None, &self.path, shape)?;
+ let has_trailing_comma = crate::expr::span_ends_with_comma(context, self.span);
+ overflow::rewrite_with_parens(
+ context,
+ &path,
+ list.iter(),
+ // 1 = "]"
+ shape.sub_width(1)?,
+ self.span,
+ context.config.attr_fn_like_width(),
+ Some(if has_trailing_comma {
+ SeparatorTactic::Always
+ } else {
+ SeparatorTactic::Never
+ }),
+ )?
+ }
+ ast::MetaItemKind::NameValue(ref literal) => {
+ let path = rewrite_path(context, PathContext::Type, None, &self.path, shape)?;
+ // 3 = ` = `
+ let lit_shape = shape.shrink_left(path.len() + 3)?;
+ // `rewrite_literal` returns `None` when `literal` exceeds max
+ // width. Since a literal is basically unformattable unless it
+ // is a string literal (and only if `format_strings` is set),
+ // we might be better off ignoring the fact that the attribute
+ // is longer than the max width and continue on formatting.
+ // See #2479 for example.
+ let value = rewrite_literal(context, literal, lit_shape)
+ .unwrap_or_else(|| context.snippet(literal.span).to_owned());
+ format!("{} = {}", path, value)
+ }
+ })
+ }
+}
+
+impl Rewrite for ast::Attribute {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ let snippet = context.snippet(self.span);
+ if self.is_doc_comment() {
+ rewrite_doc_comment(snippet, shape.comment(context.config), context.config)
+ } else {
+ let should_skip = self
+ .ident()
+ .map(|s| context.skip_context.skip_attribute(s.name.as_str()))
+ .unwrap_or(false);
+ let prefix = attr_prefix(self);
+
+ if should_skip || contains_comment(snippet) {
+ return Some(snippet.to_owned());
+ }
+
+ if let Some(ref meta) = self.meta() {
+ // This attribute is possibly a doc attribute needing normalization to a doc comment
+ if context.config.normalize_doc_attributes() && meta.has_name(sym::doc) {
+ if let Some(ref literal) = meta.value_str() {
+ let comment_style = match self.style {
+ ast::AttrStyle::Inner => CommentStyle::Doc,
+ ast::AttrStyle::Outer => CommentStyle::TripleSlash,
+ };
+
+ let literal_str = literal.as_str();
+ let doc_comment_formatter =
+ DocCommentFormatter::new(literal_str, comment_style);
+ let doc_comment = format!("{}", doc_comment_formatter);
+ return rewrite_doc_comment(
+ &doc_comment,
+ shape.comment(context.config),
+ context.config,
+ );
+ }
+ }
+
+ // 1 = `[`
+ let shape = shape.offset_left(prefix.len() + 1)?;
+ Some(
+ meta.rewrite(context, shape)
+ .map_or_else(|| snippet.to_owned(), |rw| format!("{}[{}]", prefix, rw)),
+ )
+ } else {
+ Some(snippet.to_owned())
+ }
+ }
+ }
+}
+
+impl Rewrite for [ast::Attribute] {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ if self.is_empty() {
+ return Some(String::new());
+ }
+
+ // The current remaining attributes.
+ let mut attrs = self;
+ let mut result = String::new();
+
+ // Determine if the source text is annotated with `#[rustfmt::skip::attributes(derive)]`
+ // or `#![rustfmt::skip::attributes(derive)]`
+ let skip_derives = context.skip_context.skip_attribute("derive");
+
+ // This is not just a simple map because we need to handle doc comments
+ // (where we take as many doc comment attributes as possible) and possibly
+ // merging derives into a single attribute.
+ loop {
+ if attrs.is_empty() {
+ return Some(result);
+ }
+
+ // Handle doc comments.
+ let (doc_comment_len, doc_comment_str) =
+ rewrite_initial_doc_comments(context, attrs, shape)?;
+ if doc_comment_len > 0 {
+ let doc_comment_str = doc_comment_str.expect("doc comments, but no result");
+ result.push_str(&doc_comment_str);
+
+ let missing_span = attrs
+ .get(doc_comment_len)
+ .map(|next| mk_sp(attrs[doc_comment_len - 1].span.hi(), next.span.lo()));
+ if let Some(missing_span) = missing_span {
+ let snippet = context.snippet(missing_span);
+ let (mla, mlb) = has_newlines_before_after_comment(snippet);
+ let comment = crate::comment::recover_missing_comment_in_span(
+ missing_span,
+ shape.with_max_width(context.config),
+ context,
+ 0,
+ )?;
+ let comment = if comment.is_empty() {
+ format!("\n{}", mlb)
+ } else {
+ format!("{}{}\n{}", mla, comment, mlb)
+ };
+ result.push_str(&comment);
+ result.push_str(&shape.indent.to_string(context.config));
+ }
+
+ attrs = &attrs[doc_comment_len..];
+
+ continue;
+ }
+
+ // Handle derives if we will merge them.
+ if !skip_derives && context.config.merge_derives() && is_derive(&attrs[0]) {
+ let derives = take_while_with_pred(context, attrs, is_derive);
+ let derive_str = format_derive(derives, shape, context)?;
+ result.push_str(&derive_str);
+
+ let missing_span = attrs
+ .get(derives.len())
+ .map(|next| mk_sp(attrs[derives.len() - 1].span.hi(), next.span.lo()));
+ if let Some(missing_span) = missing_span {
+ let comment = crate::comment::recover_missing_comment_in_span(
+ missing_span,
+ shape.with_max_width(context.config),
+ context,
+ 0,
+ )?;
+ result.push_str(&comment);
+ if let Some(next) = attrs.get(derives.len()) {
+ if next.is_doc_comment() {
+ let snippet = context.snippet(missing_span);
+ let (_, mlb) = has_newlines_before_after_comment(snippet);
+ result.push_str(mlb);
+ }
+ }
+ result.push('\n');
+ result.push_str(&shape.indent.to_string(context.config));
+ }
+
+ attrs = &attrs[derives.len()..];
+
+ continue;
+ }
+
+ // If we get here, then we have a regular attribute, just handle one
+ // at a time.
+
+ let formatted_attr = attrs[0].rewrite(context, shape)?;
+ result.push_str(&formatted_attr);
+
+ let missing_span = attrs
+ .get(1)
+ .map(|next| mk_sp(attrs[0].span.hi(), next.span.lo()));
+ if let Some(missing_span) = missing_span {
+ let comment = crate::comment::recover_missing_comment_in_span(
+ missing_span,
+ shape.with_max_width(context.config),
+ context,
+ 0,
+ )?;
+ result.push_str(&comment);
+ if let Some(next) = attrs.get(1) {
+ if next.is_doc_comment() {
+ let snippet = context.snippet(missing_span);
+ let (_, mlb) = has_newlines_before_after_comment(snippet);
+ result.push_str(mlb);
+ }
+ }
+ result.push('\n');
+ result.push_str(&shape.indent.to_string(context.config));
+ }
+
+ attrs = &attrs[1..];
+ }
+ }
+}
+
+fn attr_prefix(attr: &ast::Attribute) -> &'static str {
+ match attr.style {
+ ast::AttrStyle::Inner => "#!",
+ ast::AttrStyle::Outer => "#",
+ }
+}
+
+pub(crate) trait MetaVisitor<'ast> {
+ fn visit_meta_item(&mut self, meta_item: &'ast ast::MetaItem) {
+ match meta_item.kind {
+ ast::MetaItemKind::Word => self.visit_meta_word(meta_item),
+ ast::MetaItemKind::List(ref list) => self.visit_meta_list(meta_item, list),
+ ast::MetaItemKind::NameValue(ref lit) => self.visit_meta_name_value(meta_item, lit),
+ }
+ }
+
+ fn visit_meta_list(
+ &mut self,
+ _meta_item: &'ast ast::MetaItem,
+ list: &'ast [ast::NestedMetaItem],
+ ) {
+ for nm in list {
+ self.visit_nested_meta_item(nm);
+ }
+ }
+
+ fn visit_meta_word(&mut self, _meta_item: &'ast ast::MetaItem) {}
+
+ fn visit_meta_name_value(&mut self, _meta_item: &'ast ast::MetaItem, _lit: &'ast ast::Lit) {}
+
+ fn visit_nested_meta_item(&mut self, nm: &'ast ast::NestedMetaItem) {
+ match nm {
+ ast::NestedMetaItem::MetaItem(ref meta_item) => self.visit_meta_item(meta_item),
+ ast::NestedMetaItem::Literal(ref lit) => self.visit_literal(lit),
+ }
+ }
+
+ fn visit_literal(&mut self, _lit: &'ast ast::Lit) {}
+}
diff --git a/src/tools/rustfmt/src/attr/doc_comment.rs b/src/tools/rustfmt/src/attr/doc_comment.rs
new file mode 100644
index 000000000..f653a12a8
--- /dev/null
+++ b/src/tools/rustfmt/src/attr/doc_comment.rs
@@ -0,0 +1,83 @@
+use crate::comment::CommentStyle;
+use std::fmt::{self, Display};
+
+/// Formats a string as a doc comment using the given [`CommentStyle`].
+#[derive(new)]
+pub(super) struct DocCommentFormatter<'a> {
+ literal: &'a str,
+ style: CommentStyle<'a>,
+}
+
+impl Display for DocCommentFormatter<'_> {
+ fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let opener = self.style.opener().trim_end();
+ let mut lines = self.literal.lines().peekable();
+
+ // Handle `#[doc = ""]`.
+ if lines.peek().is_none() {
+ return write!(formatter, "{}", opener);
+ }
+
+ while let Some(line) = lines.next() {
+ let is_last_line = lines.peek().is_none();
+ if is_last_line {
+ write!(formatter, "{}{}", opener, line)?;
+ } else {
+ writeln!(formatter, "{}{}", opener, line)?;
+ }
+ }
+ Ok(())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn literal_controls_leading_spaces() {
+ test_doc_comment_is_formatted_correctly(
+ " Lorem ipsum",
+ "/// Lorem ipsum",
+ CommentStyle::TripleSlash,
+ );
+ }
+
+ #[test]
+ fn single_line_doc_comment_is_formatted_correctly() {
+ test_doc_comment_is_formatted_correctly(
+ "Lorem ipsum",
+ "///Lorem ipsum",
+ CommentStyle::TripleSlash,
+ );
+ }
+
+ #[test]
+ fn multi_line_doc_comment_is_formatted_correctly() {
+ test_doc_comment_is_formatted_correctly(
+ "Lorem ipsum\nDolor sit amet",
+ "///Lorem ipsum\n///Dolor sit amet",
+ CommentStyle::TripleSlash,
+ );
+ }
+
+ #[test]
+ fn whitespace_within_lines_is_preserved() {
+ test_doc_comment_is_formatted_correctly(
+ " Lorem ipsum \n Dolor sit amet ",
+ "/// Lorem ipsum \n/// Dolor sit amet ",
+ CommentStyle::TripleSlash,
+ );
+ }
+
+ fn test_doc_comment_is_formatted_correctly(
+ literal: &str,
+ expected_comment: &str,
+ style: CommentStyle<'_>,
+ ) {
+ assert_eq!(
+ expected_comment,
+ format!("{}", DocCommentFormatter::new(literal, style))
+ );
+ }
+}
diff --git a/src/tools/rustfmt/src/bin/main.rs b/src/tools/rustfmt/src/bin/main.rs
new file mode 100644
index 000000000..8e871e61f
--- /dev/null
+++ b/src/tools/rustfmt/src/bin/main.rs
@@ -0,0 +1,710 @@
+use anyhow::{format_err, Result};
+
+use io::Error as IoError;
+use thiserror::Error;
+
+use rustfmt_nightly as rustfmt;
+
+use std::collections::HashMap;
+use std::env;
+use std::fs::File;
+use std::io::{self, stdout, Read, Write};
+use std::path::{Path, PathBuf};
+use std::str::FromStr;
+
+use getopts::{Matches, Options};
+
+use crate::rustfmt::{
+ load_config, CliOptions, Color, Config, Edition, EmitMode, FileLines, FileName,
+ FormatReportFormatterBuilder, Input, Session, Verbosity,
+};
+
+fn main() {
+ env_logger::Builder::from_env("RUSTFMT_LOG").init();
+ let opts = make_opts();
+
+ let exit_code = match execute(&opts) {
+ Ok(code) => code,
+ Err(e) => {
+ eprintln!("{:#}", e);
+ 1
+ }
+ };
+ // Make sure standard output is flushed before we exit.
+ std::io::stdout().flush().unwrap();
+
+ // Exit with given exit code.
+ //
+ // NOTE: this immediately terminates the process without doing any cleanup,
+ // so make sure to finish all necessary cleanup before this is called.
+ std::process::exit(exit_code);
+}
+
+/// Rustfmt operations.
+enum Operation {
+ /// Format files and their child modules.
+ Format {
+ files: Vec<PathBuf>,
+ minimal_config_path: Option<String>,
+ },
+ /// Print the help message.
+ Help(HelpOp),
+ /// Print version information
+ Version,
+ /// Output default config to a file, or stdout if None
+ ConfigOutputDefault { path: Option<String> },
+ /// Output current config (as if formatting to a file) to stdout
+ ConfigOutputCurrent { path: Option<String> },
+ /// No file specified, read from stdin
+ Stdin { input: String },
+}
+
+/// Rustfmt operations errors.
+#[derive(Error, Debug)]
+pub enum OperationError {
+ /// An unknown help topic was requested.
+ #[error("Unknown help topic: `{0}`.")]
+ UnknownHelpTopic(String),
+ /// An unknown print-config option was requested.
+ #[error("Unknown print-config option: `{0}`.")]
+ UnknownPrintConfigTopic(String),
+ /// Attempt to generate a minimal config from standard input.
+ #[error("The `--print-config=minimal` option doesn't work with standard input.")]
+ MinimalPathWithStdin,
+ /// An io error during reading or writing.
+ #[error("{0}")]
+ IoError(IoError),
+ /// Attempt to use --emit with a mode which is not currently
+ /// supported with stdandard input.
+ #[error("Emit mode {0} not supported with standard output.")]
+ StdinBadEmit(EmitMode),
+}
+
+impl From<IoError> for OperationError {
+ fn from(e: IoError) -> OperationError {
+ OperationError::IoError(e)
+ }
+}
+
+/// Arguments to `--help`
+enum HelpOp {
+ None,
+ Config,
+ FileLines,
+}
+
+fn make_opts() -> Options {
+ let mut opts = Options::new();
+
+ opts.optflag(
+ "",
+ "check",
+ "Run in 'check' mode. Exits with 0 if input is formatted correctly. Exits \
+ with 1 and prints a diff if formatting is required.",
+ );
+ let is_nightly = is_nightly();
+ let emit_opts = if is_nightly {
+ "[files|stdout|coverage|checkstyle|json]"
+ } else {
+ "[files|stdout]"
+ };
+ opts.optopt("", "emit", "What data to emit and how", emit_opts);
+ opts.optflag("", "backup", "Backup any modified files.");
+ opts.optopt(
+ "",
+ "config-path",
+ "Recursively searches the given path for the rustfmt.toml config file. If not \
+ found reverts to the input file path",
+ "[Path for the configuration file]",
+ );
+ opts.optopt("", "edition", "Rust edition to use", "[2015|2018|2021]");
+ opts.optopt(
+ "",
+ "color",
+ "Use colored output (if supported)",
+ "[always|never|auto]",
+ );
+ opts.optopt(
+ "",
+ "print-config",
+ "Dumps a default or minimal config to PATH. A minimal config is the \
+ subset of the current config file used for formatting the current program. \
+ `current` writes to stdout current config as if formatting the file at PATH.",
+ "[default|minimal|current] PATH",
+ );
+ opts.optflag(
+ "l",
+ "files-with-diff",
+ "Prints the names of mismatched files that were formatted. Prints the names of \
+ files that would be formated when used with `--check` mode. ",
+ );
+ opts.optmulti(
+ "",
+ "config",
+ "Set options from command line. These settings take priority over .rustfmt.toml",
+ "[key1=val1,key2=val2...]",
+ );
+
+ if is_nightly {
+ opts.optflag(
+ "",
+ "unstable-features",
+ "Enables unstable features. Only available on nightly channel.",
+ );
+ opts.optopt(
+ "",
+ "file-lines",
+ "Format specified line ranges. Run with `--help=file-lines` for \
+ more detail (unstable).",
+ "JSON",
+ );
+ opts.optflag(
+ "",
+ "error-on-unformatted",
+ "Error if unable to get comments or string literals within max_width, \
+ or they are left with trailing whitespaces (unstable).",
+ );
+ opts.optflag(
+ "",
+ "skip-children",
+ "Don't reformat child modules (unstable).",
+ );
+ }
+
+ opts.optflag("v", "verbose", "Print verbose output");
+ opts.optflag("q", "quiet", "Print less output");
+ opts.optflag("V", "version", "Show version information");
+ let help_topics = if is_nightly {
+ "`config` or `file-lines`"
+ } else {
+ "`config`"
+ };
+ let mut help_topic_msg = "Show this message or help about a specific topic: ".to_owned();
+ help_topic_msg.push_str(help_topics);
+
+ opts.optflagopt("h", "help", &help_topic_msg, "=TOPIC");
+
+ opts
+}
+
+fn is_nightly() -> bool {
+ option_env!("CFG_RELEASE_CHANNEL").map_or(true, |c| c == "nightly" || c == "dev")
+}
+
+// Returned i32 is an exit code
+fn execute(opts: &Options) -> Result<i32> {
+ let matches = opts.parse(env::args().skip(1))?;
+ let options = GetOptsOptions::from_matches(&matches)?;
+
+ match determine_operation(&matches)? {
+ Operation::Help(HelpOp::None) => {
+ print_usage_to_stdout(opts, "");
+ Ok(0)
+ }
+ Operation::Help(HelpOp::Config) => {
+ Config::print_docs(&mut stdout(), options.unstable_features);
+ Ok(0)
+ }
+ Operation::Help(HelpOp::FileLines) => {
+ print_help_file_lines();
+ Ok(0)
+ }
+ Operation::Version => {
+ print_version();
+ Ok(0)
+ }
+ Operation::ConfigOutputDefault { path } => {
+ let toml = Config::default().all_options().to_toml()?;
+ if let Some(path) = path {
+ let mut file = File::create(path)?;
+ file.write_all(toml.as_bytes())?;
+ } else {
+ io::stdout().write_all(toml.as_bytes())?;
+ }
+ Ok(0)
+ }
+ Operation::ConfigOutputCurrent { path } => {
+ let path = match path {
+ Some(path) => path,
+ None => return Err(format_err!("PATH required for `--print-config current`")),
+ };
+
+ let file = PathBuf::from(path);
+ let file = file.canonicalize().unwrap_or(file);
+
+ let (config, _) = load_config(Some(file.parent().unwrap()), Some(options))?;
+ let toml = config.all_options().to_toml()?;
+ io::stdout().write_all(toml.as_bytes())?;
+
+ Ok(0)
+ }
+ Operation::Stdin { input } => format_string(input, options),
+ Operation::Format {
+ files,
+ minimal_config_path,
+ } => format(files, minimal_config_path, &options),
+ }
+}
+
+fn format_string(input: String, options: GetOptsOptions) -> Result<i32> {
+ // try to read config from local directory
+ let (mut config, _) = load_config(Some(Path::new(".")), Some(options.clone()))?;
+
+ if options.check {
+ config.set().emit_mode(EmitMode::Diff);
+ } else {
+ match options.emit_mode {
+ // Emit modes which work with standard input
+ // None means default, which is Stdout.
+ None | Some(EmitMode::Stdout) | Some(EmitMode::Checkstyle) | Some(EmitMode::Json) => {}
+ Some(emit_mode) => {
+ return Err(OperationError::StdinBadEmit(emit_mode).into());
+ }
+ }
+ config
+ .set()
+ .emit_mode(options.emit_mode.unwrap_or(EmitMode::Stdout));
+ }
+ config.set().verbose(Verbosity::Quiet);
+
+ // parse file_lines
+ config.set().file_lines(options.file_lines);
+ for f in config.file_lines().files() {
+ match *f {
+ FileName::Stdin => {}
+ _ => eprintln!("Warning: Extra file listed in file_lines option '{}'", f),
+ }
+ }
+
+ let out = &mut stdout();
+ let mut session = Session::new(config, Some(out));
+ format_and_emit_report(&mut session, Input::Text(input));
+
+ let exit_code = if session.has_operational_errors() || session.has_parsing_errors() {
+ 1
+ } else {
+ 0
+ };
+ Ok(exit_code)
+}
+
+fn format(
+ files: Vec<PathBuf>,
+ minimal_config_path: Option<String>,
+ options: &GetOptsOptions,
+) -> Result<i32> {
+ options.verify_file_lines(&files);
+ let (config, config_path) = load_config(None, Some(options.clone()))?;
+
+ if config.verbose() == Verbosity::Verbose {
+ if let Some(path) = config_path.as_ref() {
+ println!("Using rustfmt config file {}", path.display());
+ }
+ }
+
+ let out = &mut stdout();
+ let mut session = Session::new(config, Some(out));
+
+ for file in files {
+ if !file.exists() {
+ eprintln!("Error: file `{}` does not exist", file.to_str().unwrap());
+ session.add_operational_error();
+ } else if file.is_dir() {
+ eprintln!("Error: `{}` is a directory", file.to_str().unwrap());
+ session.add_operational_error();
+ } else {
+ // Check the file directory if the config-path could not be read or not provided
+ if config_path.is_none() {
+ let (local_config, config_path) =
+ load_config(Some(file.parent().unwrap()), Some(options.clone()))?;
+ if local_config.verbose() == Verbosity::Verbose {
+ if let Some(path) = config_path {
+ println!(
+ "Using rustfmt config file {} for {}",
+ path.display(),
+ file.display()
+ );
+ }
+ }
+
+ session.override_config(local_config, |sess| {
+ format_and_emit_report(sess, Input::File(file))
+ });
+ } else {
+ format_and_emit_report(&mut session, Input::File(file));
+ }
+ }
+ }
+
+ // If we were given a path via dump-minimal-config, output any options
+ // that were used during formatting as TOML.
+ if let Some(path) = minimal_config_path {
+ let mut file = File::create(path)?;
+ let toml = session.config.used_options().to_toml()?;
+ file.write_all(toml.as_bytes())?;
+ }
+
+ let exit_code = if session.has_operational_errors()
+ || session.has_parsing_errors()
+ || ((session.has_diff() || session.has_check_errors()) && options.check)
+ {
+ 1
+ } else {
+ 0
+ };
+ Ok(exit_code)
+}
+
+fn format_and_emit_report<T: Write>(session: &mut Session<'_, T>, input: Input) {
+ match session.format(input) {
+ Ok(report) => {
+ if report.has_warnings() {
+ eprintln!(
+ "{}",
+ FormatReportFormatterBuilder::new(&report)
+ .enable_colors(should_print_with_colors(session))
+ .build()
+ );
+ }
+ }
+ Err(msg) => {
+ eprintln!("Error writing files: {}", msg);
+ session.add_operational_error();
+ }
+ }
+}
+
+fn should_print_with_colors<T: Write>(session: &mut Session<'_, T>) -> bool {
+ match term::stderr() {
+ Some(ref t)
+ if session.config.color().use_colored_tty()
+ && t.supports_color()
+ && t.supports_attr(term::Attr::Bold) =>
+ {
+ true
+ }
+ _ => false,
+ }
+}
+
+fn print_usage_to_stdout(opts: &Options, reason: &str) {
+ let sep = if reason.is_empty() {
+ String::new()
+ } else {
+ format!("{}\n\n", reason)
+ };
+ let msg = format!(
+ "{}Format Rust code\n\nusage: rustfmt [options] <file>...",
+ sep
+ );
+ println!("{}", opts.usage(&msg));
+}
+
+fn print_help_file_lines() {
+ println!(
+ "If you want to restrict reformatting to specific sets of lines, you can
+use the `--file-lines` option. Its argument is a JSON array of objects
+with `file` and `range` properties, where `file` is a file name, and
+`range` is an array representing a range of lines like `[7,13]`. Ranges
+are 1-based and inclusive of both end points. Specifying an empty array
+will result in no files being formatted. For example,
+
+```
+rustfmt --file-lines '[
+ {{\"file\":\"src/lib.rs\",\"range\":[7,13]}},
+ {{\"file\":\"src/lib.rs\",\"range\":[21,29]}},
+ {{\"file\":\"src/foo.rs\",\"range\":[10,11]}},
+ {{\"file\":\"src/foo.rs\",\"range\":[15,15]}}]'
+```
+
+would format lines `7-13` and `21-29` of `src/lib.rs`, and lines `10-11`,
+and `15` of `src/foo.rs`. No other files would be formatted, even if they
+are included as out of line modules from `src/lib.rs`."
+ );
+}
+
+fn print_version() {
+ let version_info = format!(
+ "{}-{}",
+ option_env!("CARGO_PKG_VERSION").unwrap_or("unknown"),
+ include_str!(concat!(env!("OUT_DIR"), "/commit-info.txt"))
+ );
+
+ println!("rustfmt {}", version_info);
+}
+
+fn determine_operation(matches: &Matches) -> Result<Operation, OperationError> {
+ if matches.opt_present("h") {
+ let topic = matches.opt_str("h");
+ if topic == None {
+ return Ok(Operation::Help(HelpOp::None));
+ } else if topic == Some("config".to_owned()) {
+ return Ok(Operation::Help(HelpOp::Config));
+ } else if topic == Some("file-lines".to_owned()) && is_nightly() {
+ return Ok(Operation::Help(HelpOp::FileLines));
+ } else {
+ return Err(OperationError::UnknownHelpTopic(topic.unwrap()));
+ }
+ }
+ let mut free_matches = matches.free.iter();
+
+ let mut minimal_config_path = None;
+ if let Some(kind) = matches.opt_str("print-config") {
+ let path = free_matches.next().cloned();
+ match kind.as_str() {
+ "default" => return Ok(Operation::ConfigOutputDefault { path }),
+ "current" => return Ok(Operation::ConfigOutputCurrent { path }),
+ "minimal" => {
+ minimal_config_path = path;
+ if minimal_config_path.is_none() {
+ eprintln!("WARNING: PATH required for `--print-config minimal`.");
+ }
+ }
+ _ => {
+ return Err(OperationError::UnknownPrintConfigTopic(kind));
+ }
+ }
+ }
+
+ if matches.opt_present("version") {
+ return Ok(Operation::Version);
+ }
+
+ let files: Vec<_> = free_matches
+ .map(|s| {
+ let p = PathBuf::from(s);
+ // we will do comparison later, so here tries to canonicalize first
+ // to get the expected behavior.
+ p.canonicalize().unwrap_or(p)
+ })
+ .collect();
+
+ // if no file argument is supplied, read from stdin
+ if files.is_empty() {
+ if minimal_config_path.is_some() {
+ return Err(OperationError::MinimalPathWithStdin);
+ }
+ let mut buffer = String::new();
+ io::stdin().read_to_string(&mut buffer)?;
+
+ return Ok(Operation::Stdin { input: buffer });
+ }
+
+ Ok(Operation::Format {
+ files,
+ minimal_config_path,
+ })
+}
+
+const STABLE_EMIT_MODES: [EmitMode; 3] = [EmitMode::Files, EmitMode::Stdout, EmitMode::Diff];
+
+/// Parsed command line options.
+#[derive(Clone, Debug, Default)]
+struct GetOptsOptions {
+ skip_children: Option<bool>,
+ quiet: bool,
+ verbose: bool,
+ config_path: Option<PathBuf>,
+ inline_config: HashMap<String, String>,
+ emit_mode: Option<EmitMode>,
+ backup: bool,
+ check: bool,
+ edition: Option<Edition>,
+ color: Option<Color>,
+ file_lines: FileLines, // Default is all lines in all files.
+ unstable_features: bool,
+ error_on_unformatted: Option<bool>,
+ print_misformatted_file_names: bool,
+}
+
+impl GetOptsOptions {
+ pub fn from_matches(matches: &Matches) -> Result<GetOptsOptions> {
+ let mut options = GetOptsOptions::default();
+ options.verbose = matches.opt_present("verbose");
+ options.quiet = matches.opt_present("quiet");
+ if options.verbose && options.quiet {
+ return Err(format_err!("Can't use both `--verbose` and `--quiet`"));
+ }
+
+ let rust_nightly = is_nightly();
+
+ if rust_nightly {
+ options.unstable_features = matches.opt_present("unstable-features");
+
+ if options.unstable_features {
+ if matches.opt_present("skip-children") {
+ options.skip_children = Some(true);
+ }
+ if matches.opt_present("error-on-unformatted") {
+ options.error_on_unformatted = Some(true);
+ }
+ if let Some(ref file_lines) = matches.opt_str("file-lines") {
+ options.file_lines = file_lines.parse()?;
+ }
+ } else {
+ let mut unstable_options = vec![];
+ if matches.opt_present("skip-children") {
+ unstable_options.push("`--skip-children`");
+ }
+ if matches.opt_present("error-on-unformatted") {
+ unstable_options.push("`--error-on-unformatted`");
+ }
+ if matches.opt_present("file-lines") {
+ unstable_options.push("`--file-lines`");
+ }
+ if !unstable_options.is_empty() {
+ let s = if unstable_options.len() == 1 { "" } else { "s" };
+ return Err(format_err!(
+ "Unstable option{} ({}) used without `--unstable-features`",
+ s,
+ unstable_options.join(", "),
+ ));
+ }
+ }
+ }
+
+ options.config_path = matches.opt_str("config-path").map(PathBuf::from);
+
+ options.inline_config = matches
+ .opt_strs("config")
+ .iter()
+ .flat_map(|config| config.split(','))
+ .map(
+ |key_val| match key_val.char_indices().find(|(_, ch)| *ch == '=') {
+ Some((middle, _)) => {
+ let (key, val) = (&key_val[..middle], &key_val[middle + 1..]);
+ if !Config::is_valid_key_val(key, val) {
+ Err(format_err!("invalid key=val pair: `{}`", key_val))
+ } else {
+ Ok((key.to_string(), val.to_string()))
+ }
+ }
+
+ None => Err(format_err!(
+ "--config expects comma-separated list of key=val pairs, found `{}`",
+ key_val
+ )),
+ },
+ )
+ .collect::<Result<HashMap<_, _>, _>>()?;
+
+ options.check = matches.opt_present("check");
+ if let Some(ref emit_str) = matches.opt_str("emit") {
+ if options.check {
+ return Err(format_err!("Invalid to use `--emit` and `--check`"));
+ }
+
+ options.emit_mode = Some(emit_mode_from_emit_str(emit_str)?);
+ }
+
+ if let Some(ref edition_str) = matches.opt_str("edition") {
+ options.edition = Some(edition_from_edition_str(edition_str)?);
+ }
+
+ if matches.opt_present("backup") {
+ options.backup = true;
+ }
+
+ if matches.opt_present("files-with-diff") {
+ options.print_misformatted_file_names = true;
+ }
+
+ if !rust_nightly {
+ if let Some(ref emit_mode) = options.emit_mode {
+ if !STABLE_EMIT_MODES.contains(emit_mode) {
+ return Err(format_err!(
+ "Invalid value for `--emit` - using an unstable \
+ value without `--unstable-features`",
+ ));
+ }
+ }
+ }
+
+ if let Some(ref color) = matches.opt_str("color") {
+ match Color::from_str(color) {
+ Ok(color) => options.color = Some(color),
+ _ => return Err(format_err!("Invalid color: {}", color)),
+ }
+ }
+
+ Ok(options)
+ }
+
+ fn verify_file_lines(&self, files: &[PathBuf]) {
+ for f in self.file_lines.files() {
+ match *f {
+ FileName::Real(ref f) if files.contains(f) => {}
+ FileName::Real(_) => {
+ eprintln!("Warning: Extra file listed in file_lines option '{}'", f)
+ }
+ FileName::Stdin => eprintln!("Warning: Not a file '{}'", f),
+ }
+ }
+ }
+}
+
+impl CliOptions for GetOptsOptions {
+ fn apply_to(self, config: &mut Config) {
+ if self.verbose {
+ config.set().verbose(Verbosity::Verbose);
+ } else if self.quiet {
+ config.set().verbose(Verbosity::Quiet);
+ } else {
+ config.set().verbose(Verbosity::Normal);
+ }
+ config.set().file_lines(self.file_lines);
+ config.set().unstable_features(self.unstable_features);
+ if let Some(skip_children) = self.skip_children {
+ config.set().skip_children(skip_children);
+ }
+ if let Some(error_on_unformatted) = self.error_on_unformatted {
+ config.set().error_on_unformatted(error_on_unformatted);
+ }
+ if let Some(edition) = self.edition {
+ config.set().edition(edition);
+ }
+ if self.check {
+ config.set().emit_mode(EmitMode::Diff);
+ } else if let Some(emit_mode) = self.emit_mode {
+ config.set().emit_mode(emit_mode);
+ }
+ if self.backup {
+ config.set().make_backup(true);
+ }
+ if let Some(color) = self.color {
+ config.set().color(color);
+ }
+ if self.print_misformatted_file_names {
+ config.set().print_misformatted_file_names(true);
+ }
+
+ for (key, val) in self.inline_config {
+ config.override_value(&key, &val);
+ }
+ }
+
+ fn config_path(&self) -> Option<&Path> {
+ self.config_path.as_deref()
+ }
+}
+
+fn edition_from_edition_str(edition_str: &str) -> Result<Edition> {
+ match edition_str {
+ "2015" => Ok(Edition::Edition2015),
+ "2018" => Ok(Edition::Edition2018),
+ "2021" => Ok(Edition::Edition2021),
+ "2024" => Ok(Edition::Edition2024),
+ _ => Err(format_err!("Invalid value for `--edition`")),
+ }
+}
+
+fn emit_mode_from_emit_str(emit_str: &str) -> Result<EmitMode> {
+ match emit_str {
+ "files" => Ok(EmitMode::Files),
+ "stdout" => Ok(EmitMode::Stdout),
+ "coverage" => Ok(EmitMode::Coverage),
+ "checkstyle" => Ok(EmitMode::Checkstyle),
+ "json" => Ok(EmitMode::Json),
+ _ => Err(format_err!("Invalid value for `--emit`")),
+ }
+}
diff --git a/src/tools/rustfmt/src/cargo-fmt/main.rs b/src/tools/rustfmt/src/cargo-fmt/main.rs
new file mode 100644
index 000000000..9031d29b4
--- /dev/null
+++ b/src/tools/rustfmt/src/cargo-fmt/main.rs
@@ -0,0 +1,550 @@
+// Inspired by Paul Woolcock's cargo-fmt (https://github.com/pwoolcoc/cargo-fmt/).
+
+#![deny(warnings)]
+#![allow(clippy::match_like_matches_macro)]
+
+use std::cmp::Ordering;
+use std::collections::{BTreeMap, BTreeSet};
+use std::env;
+use std::ffi::OsStr;
+use std::fs;
+use std::hash::{Hash, Hasher};
+use std::io::{self, Write};
+use std::path::{Path, PathBuf};
+use std::process::Command;
+use std::str;
+
+use clap::{AppSettings, CommandFactory, Parser};
+
+#[path = "test/mod.rs"]
+#[cfg(test)]
+mod cargo_fmt_tests;
+
+#[derive(Parser)]
+#[clap(
+ global_setting(AppSettings::NoAutoVersion),
+ bin_name = "cargo fmt",
+ about = "This utility formats all bin and lib files of \
+ the current crate using rustfmt."
+)]
+pub struct Opts {
+ /// No output printed to stdout
+ #[clap(short = 'q', long = "quiet")]
+ quiet: bool,
+
+ /// Use verbose output
+ #[clap(short = 'v', long = "verbose")]
+ verbose: bool,
+
+ /// Print rustfmt version and exit
+ #[clap(long = "version")]
+ version: bool,
+
+ /// Specify package to format
+ #[clap(
+ short = 'p',
+ long = "package",
+ value_name = "package",
+ multiple_values = true
+ )]
+ packages: Vec<String>,
+
+ /// Specify path to Cargo.toml
+ #[clap(long = "manifest-path", value_name = "manifest-path")]
+ manifest_path: Option<String>,
+
+ /// Specify message-format: short|json|human
+ #[clap(long = "message-format", value_name = "message-format")]
+ message_format: Option<String>,
+
+ /// Options passed to rustfmt
+ // 'raw = true' to make `--` explicit.
+ #[clap(name = "rustfmt_options", raw(true))]
+ rustfmt_options: Vec<String>,
+
+ /// Format all packages, and also their local path-based dependencies
+ #[clap(long = "all")]
+ format_all: bool,
+
+ /// Run rustfmt in check mode
+ #[clap(long = "check")]
+ check: bool,
+}
+
+fn main() {
+ let exit_status = execute();
+ std::io::stdout().flush().unwrap();
+ std::process::exit(exit_status);
+}
+
+const SUCCESS: i32 = 0;
+const FAILURE: i32 = 1;
+
+fn execute() -> i32 {
+ // Drop extra `fmt` argument provided by `cargo`.
+ let mut found_fmt = false;
+ let args = env::args().filter(|x| {
+ if found_fmt {
+ true
+ } else {
+ found_fmt = x == "fmt";
+ x != "fmt"
+ }
+ });
+
+ let opts = Opts::parse_from(args);
+
+ let verbosity = match (opts.verbose, opts.quiet) {
+ (false, false) => Verbosity::Normal,
+ (false, true) => Verbosity::Quiet,
+ (true, false) => Verbosity::Verbose,
+ (true, true) => {
+ print_usage_to_stderr("quiet mode and verbose mode are not compatible");
+ return FAILURE;
+ }
+ };
+
+ if opts.version {
+ return handle_command_status(get_rustfmt_info(&[String::from("--version")]));
+ }
+ if opts.rustfmt_options.iter().any(|s| {
+ ["--print-config", "-h", "--help", "-V", "--version"].contains(&s.as_str())
+ || s.starts_with("--help=")
+ || s.starts_with("--print-config=")
+ }) {
+ return handle_command_status(get_rustfmt_info(&opts.rustfmt_options));
+ }
+
+ let strategy = CargoFmtStrategy::from_opts(&opts);
+ let mut rustfmt_args = opts.rustfmt_options;
+ if opts.check {
+ let check_flag = "--check";
+ if !rustfmt_args.iter().any(|o| o == check_flag) {
+ rustfmt_args.push(check_flag.to_owned());
+ }
+ }
+ if let Some(message_format) = opts.message_format {
+ if let Err(msg) = convert_message_format_to_rustfmt_args(&message_format, &mut rustfmt_args)
+ {
+ print_usage_to_stderr(&msg);
+ return FAILURE;
+ }
+ }
+
+ if let Some(specified_manifest_path) = opts.manifest_path {
+ if !specified_manifest_path.ends_with("Cargo.toml") {
+ print_usage_to_stderr("the manifest-path must be a path to a Cargo.toml file");
+ return FAILURE;
+ }
+ let manifest_path = PathBuf::from(specified_manifest_path);
+ handle_command_status(format_crate(
+ verbosity,
+ &strategy,
+ rustfmt_args,
+ Some(&manifest_path),
+ ))
+ } else {
+ handle_command_status(format_crate(verbosity, &strategy, rustfmt_args, None))
+ }
+}
+
+fn rustfmt_command() -> Command {
+ let rustfmt_var = env::var_os("RUSTFMT");
+ let rustfmt = match &rustfmt_var {
+ Some(rustfmt) => rustfmt,
+ None => OsStr::new("rustfmt"),
+ };
+ Command::new(rustfmt)
+}
+
+fn convert_message_format_to_rustfmt_args(
+ message_format: &str,
+ rustfmt_args: &mut Vec<String>,
+) -> Result<(), String> {
+ let mut contains_emit_mode = false;
+ let mut contains_check = false;
+ let mut contains_list_files = false;
+ for arg in rustfmt_args.iter() {
+ if arg.starts_with("--emit") {
+ contains_emit_mode = true;
+ }
+ if arg == "--check" {
+ contains_check = true;
+ }
+ if arg == "-l" || arg == "--files-with-diff" {
+ contains_list_files = true;
+ }
+ }
+ match message_format {
+ "short" => {
+ if !contains_list_files {
+ rustfmt_args.push(String::from("-l"));
+ }
+ Ok(())
+ }
+ "json" => {
+ if contains_emit_mode {
+ return Err(String::from(
+ "cannot include --emit arg when --message-format is set to json",
+ ));
+ }
+ if contains_check {
+ return Err(String::from(
+ "cannot include --check arg when --message-format is set to json",
+ ));
+ }
+ rustfmt_args.push(String::from("--emit"));
+ rustfmt_args.push(String::from("json"));
+ Ok(())
+ }
+ "human" => Ok(()),
+ _ => {
+ return Err(format!(
+ "invalid --message-format value: {}. Allowed values are: short|json|human",
+ message_format
+ ));
+ }
+ }
+}
+
+fn print_usage_to_stderr(reason: &str) {
+ eprintln!("{}", reason);
+ let app = Opts::command();
+ app.after_help("")
+ .write_help(&mut io::stderr())
+ .expect("failed to write to stderr");
+}
+
+#[derive(Debug, Clone, Copy, PartialEq)]
+pub enum Verbosity {
+ Verbose,
+ Normal,
+ Quiet,
+}
+
+fn handle_command_status(status: Result<i32, io::Error>) -> i32 {
+ match status {
+ Err(e) => {
+ print_usage_to_stderr(&e.to_string());
+ FAILURE
+ }
+ Ok(status) => status,
+ }
+}
+
+fn get_rustfmt_info(args: &[String]) -> Result<i32, io::Error> {
+ let mut command = rustfmt_command()
+ .stdout(std::process::Stdio::inherit())
+ .args(args)
+ .spawn()
+ .map_err(|e| match e.kind() {
+ io::ErrorKind::NotFound => io::Error::new(
+ io::ErrorKind::Other,
+ "Could not run rustfmt, please make sure it is in your PATH.",
+ ),
+ _ => e,
+ })?;
+ let result = command.wait()?;
+ if result.success() {
+ Ok(SUCCESS)
+ } else {
+ Ok(result.code().unwrap_or(SUCCESS))
+ }
+}
+
+fn format_crate(
+ verbosity: Verbosity,
+ strategy: &CargoFmtStrategy,
+ rustfmt_args: Vec<String>,
+ manifest_path: Option<&Path>,
+) -> Result<i32, io::Error> {
+ let targets = get_targets(strategy, manifest_path)?;
+
+ // Currently only bin and lib files get formatted.
+ run_rustfmt(&targets, &rustfmt_args, verbosity)
+}
+
+/// Target uses a `path` field for equality and hashing.
+#[derive(Debug)]
+pub struct Target {
+ /// A path to the main source file of the target.
+ path: PathBuf,
+ /// A kind of target (e.g., lib, bin, example, ...).
+ kind: String,
+ /// Rust edition for this target.
+ edition: String,
+}
+
+impl Target {
+ pub fn from_target(target: &cargo_metadata::Target) -> Self {
+ let path = PathBuf::from(&target.src_path);
+ let canonicalized = fs::canonicalize(&path).unwrap_or(path);
+
+ Target {
+ path: canonicalized,
+ kind: target.kind[0].clone(),
+ edition: target.edition.clone(),
+ }
+ }
+}
+
+impl PartialEq for Target {
+ fn eq(&self, other: &Target) -> bool {
+ self.path == other.path
+ }
+}
+
+impl PartialOrd for Target {
+ fn partial_cmp(&self, other: &Target) -> Option<Ordering> {
+ Some(self.path.cmp(&other.path))
+ }
+}
+
+impl Ord for Target {
+ fn cmp(&self, other: &Target) -> Ordering {
+ self.path.cmp(&other.path)
+ }
+}
+
+impl Eq for Target {}
+
+impl Hash for Target {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.path.hash(state);
+ }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub enum CargoFmtStrategy {
+ /// Format every packages and dependencies.
+ All,
+ /// Format packages that are specified by the command line argument.
+ Some(Vec<String>),
+ /// Format the root packages only.
+ Root,
+}
+
+impl CargoFmtStrategy {
+ pub fn from_opts(opts: &Opts) -> CargoFmtStrategy {
+ match (opts.format_all, opts.packages.is_empty()) {
+ (false, true) => CargoFmtStrategy::Root,
+ (true, _) => CargoFmtStrategy::All,
+ (false, false) => CargoFmtStrategy::Some(opts.packages.clone()),
+ }
+ }
+}
+
+/// Based on the specified `CargoFmtStrategy`, returns a set of main source files.
+fn get_targets(
+ strategy: &CargoFmtStrategy,
+ manifest_path: Option<&Path>,
+) -> Result<BTreeSet<Target>, io::Error> {
+ let mut targets = BTreeSet::new();
+
+ match *strategy {
+ CargoFmtStrategy::Root => get_targets_root_only(manifest_path, &mut targets)?,
+ CargoFmtStrategy::All => {
+ get_targets_recursive(manifest_path, &mut targets, &mut BTreeSet::new())?
+ }
+ CargoFmtStrategy::Some(ref hitlist) => {
+ get_targets_with_hitlist(manifest_path, hitlist, &mut targets)?
+ }
+ }
+
+ if targets.is_empty() {
+ Err(io::Error::new(
+ io::ErrorKind::Other,
+ "Failed to find targets".to_owned(),
+ ))
+ } else {
+ Ok(targets)
+ }
+}
+
+fn get_targets_root_only(
+ manifest_path: Option<&Path>,
+ targets: &mut BTreeSet<Target>,
+) -> Result<(), io::Error> {
+ let metadata = get_cargo_metadata(manifest_path)?;
+ let workspace_root_path = PathBuf::from(&metadata.workspace_root).canonicalize()?;
+ let (in_workspace_root, current_dir_manifest) = if let Some(target_manifest) = manifest_path {
+ (
+ workspace_root_path == target_manifest,
+ target_manifest.canonicalize()?,
+ )
+ } else {
+ let current_dir = env::current_dir()?.canonicalize()?;
+ (
+ workspace_root_path == current_dir,
+ current_dir.join("Cargo.toml"),
+ )
+ };
+
+ let package_targets = match metadata.packages.len() {
+ 1 => metadata.packages.into_iter().next().unwrap().targets,
+ _ => metadata
+ .packages
+ .into_iter()
+ .filter(|p| {
+ in_workspace_root
+ || PathBuf::from(&p.manifest_path)
+ .canonicalize()
+ .unwrap_or_default()
+ == current_dir_manifest
+ })
+ .flat_map(|p| p.targets)
+ .collect(),
+ };
+
+ for target in package_targets {
+ targets.insert(Target::from_target(&target));
+ }
+
+ Ok(())
+}
+
+fn get_targets_recursive(
+ manifest_path: Option<&Path>,
+ targets: &mut BTreeSet<Target>,
+ visited: &mut BTreeSet<String>,
+) -> Result<(), io::Error> {
+ let metadata = get_cargo_metadata(manifest_path)?;
+ for package in &metadata.packages {
+ add_targets(&package.targets, targets);
+
+ // Look for local dependencies using information available since cargo v1.51
+ // It's theoretically possible someone could use a newer version of rustfmt with
+ // a much older version of `cargo`, but we don't try to explicitly support that scenario.
+ // If someone reports an issue with path-based deps not being formatted, be sure to
+ // confirm their version of `cargo` (not `cargo-fmt`) is >= v1.51
+ // https://github.com/rust-lang/cargo/pull/8994
+ for dependency in &package.dependencies {
+ if dependency.path.is_none() || visited.contains(&dependency.name) {
+ continue;
+ }
+
+ let manifest_path = PathBuf::from(dependency.path.as_ref().unwrap()).join("Cargo.toml");
+ if manifest_path.exists()
+ && !metadata
+ .packages
+ .iter()
+ .any(|p| p.manifest_path.eq(&manifest_path))
+ {
+ visited.insert(dependency.name.to_owned());
+ get_targets_recursive(Some(&manifest_path), targets, visited)?;
+ }
+ }
+ }
+
+ Ok(())
+}
+
+fn get_targets_with_hitlist(
+ manifest_path: Option<&Path>,
+ hitlist: &[String],
+ targets: &mut BTreeSet<Target>,
+) -> Result<(), io::Error> {
+ let metadata = get_cargo_metadata(manifest_path)?;
+ let mut workspace_hitlist: BTreeSet<&String> = BTreeSet::from_iter(hitlist);
+
+ for package in metadata.packages {
+ if workspace_hitlist.remove(&package.name) {
+ for target in package.targets {
+ targets.insert(Target::from_target(&target));
+ }
+ }
+ }
+
+ if workspace_hitlist.is_empty() {
+ Ok(())
+ } else {
+ let package = workspace_hitlist.iter().next().unwrap();
+ Err(io::Error::new(
+ io::ErrorKind::InvalidInput,
+ format!("package `{}` is not a member of the workspace", package),
+ ))
+ }
+}
+
+fn add_targets(target_paths: &[cargo_metadata::Target], targets: &mut BTreeSet<Target>) {
+ for target in target_paths {
+ targets.insert(Target::from_target(target));
+ }
+}
+
+fn run_rustfmt(
+ targets: &BTreeSet<Target>,
+ fmt_args: &[String],
+ verbosity: Verbosity,
+) -> Result<i32, io::Error> {
+ let by_edition = targets
+ .iter()
+ .inspect(|t| {
+ if verbosity == Verbosity::Verbose {
+ println!("[{} ({})] {:?}", t.kind, t.edition, t.path)
+ }
+ })
+ .fold(BTreeMap::new(), |mut h, t| {
+ h.entry(&t.edition).or_insert_with(Vec::new).push(&t.path);
+ h
+ });
+
+ let mut status = vec![];
+ for (edition, files) in by_edition {
+ let stdout = if verbosity == Verbosity::Quiet {
+ std::process::Stdio::null()
+ } else {
+ std::process::Stdio::inherit()
+ };
+
+ if verbosity == Verbosity::Verbose {
+ print!("rustfmt");
+ print!(" --edition {}", edition);
+ fmt_args.iter().for_each(|f| print!(" {}", f));
+ files.iter().for_each(|f| print!(" {}", f.display()));
+ println!();
+ }
+
+ let mut command = rustfmt_command()
+ .stdout(stdout)
+ .args(files)
+ .args(&["--edition", edition])
+ .args(fmt_args)
+ .spawn()
+ .map_err(|e| match e.kind() {
+ io::ErrorKind::NotFound => io::Error::new(
+ io::ErrorKind::Other,
+ "Could not run rustfmt, please make sure it is in your PATH.",
+ ),
+ _ => e,
+ })?;
+
+ status.push(command.wait()?);
+ }
+
+ Ok(status
+ .iter()
+ .filter_map(|s| if s.success() { None } else { s.code() })
+ .next()
+ .unwrap_or(SUCCESS))
+}
+
+fn get_cargo_metadata(manifest_path: Option<&Path>) -> Result<cargo_metadata::Metadata, io::Error> {
+ let mut cmd = cargo_metadata::MetadataCommand::new();
+ cmd.no_deps();
+ if let Some(manifest_path) = manifest_path {
+ cmd.manifest_path(manifest_path);
+ }
+ cmd.other_options(vec![String::from("--offline")]);
+
+ match cmd.exec() {
+ Ok(metadata) => Ok(metadata),
+ Err(_) => {
+ cmd.other_options(vec![]);
+ match cmd.exec() {
+ Ok(metadata) => Ok(metadata),
+ Err(error) => Err(io::Error::new(io::ErrorKind::Other, error.to_string())),
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/src/cargo-fmt/test/message_format.rs b/src/tools/rustfmt/src/cargo-fmt/test/message_format.rs
new file mode 100644
index 000000000..bf44924f1
--- /dev/null
+++ b/src/tools/rustfmt/src/cargo-fmt/test/message_format.rs
@@ -0,0 +1,80 @@
+use super::*;
+
+#[test]
+fn invalid_message_format() {
+ assert_eq!(
+ convert_message_format_to_rustfmt_args("awesome", &mut vec![]),
+ Err(String::from(
+ "invalid --message-format value: awesome. Allowed values are: short|json|human"
+ )),
+ );
+}
+
+#[test]
+fn json_message_format_and_check_arg() {
+ let mut args = vec![String::from("--check")];
+ assert_eq!(
+ convert_message_format_to_rustfmt_args("json", &mut args),
+ Err(String::from(
+ "cannot include --check arg when --message-format is set to json"
+ )),
+ );
+}
+
+#[test]
+fn json_message_format_and_emit_arg() {
+ let mut args = vec![String::from("--emit"), String::from("checkstyle")];
+ assert_eq!(
+ convert_message_format_to_rustfmt_args("json", &mut args),
+ Err(String::from(
+ "cannot include --emit arg when --message-format is set to json"
+ )),
+ );
+}
+
+#[test]
+fn json_message_format() {
+ let mut args = vec![String::from("--edition"), String::from("2018")];
+ assert!(convert_message_format_to_rustfmt_args("json", &mut args).is_ok());
+ assert_eq!(
+ args,
+ vec![
+ String::from("--edition"),
+ String::from("2018"),
+ String::from("--emit"),
+ String::from("json")
+ ]
+ );
+}
+
+#[test]
+fn human_message_format() {
+ let exp_args = vec![String::from("--emit"), String::from("json")];
+ let mut act_args = exp_args.clone();
+ assert!(convert_message_format_to_rustfmt_args("human", &mut act_args).is_ok());
+ assert_eq!(act_args, exp_args);
+}
+
+#[test]
+fn short_message_format() {
+ let mut args = vec![String::from("--check")];
+ assert!(convert_message_format_to_rustfmt_args("short", &mut args).is_ok());
+ assert_eq!(args, vec![String::from("--check"), String::from("-l")]);
+}
+
+#[test]
+fn short_message_format_included_short_list_files_flag() {
+ let mut args = vec![String::from("--check"), String::from("-l")];
+ assert!(convert_message_format_to_rustfmt_args("short", &mut args).is_ok());
+ assert_eq!(args, vec![String::from("--check"), String::from("-l")]);
+}
+
+#[test]
+fn short_message_format_included_long_list_files_flag() {
+ let mut args = vec![String::from("--check"), String::from("--files-with-diff")];
+ assert!(convert_message_format_to_rustfmt_args("short", &mut args).is_ok());
+ assert_eq!(
+ args,
+ vec![String::from("--check"), String::from("--files-with-diff")]
+ );
+}
diff --git a/src/tools/rustfmt/src/cargo-fmt/test/mod.rs b/src/tools/rustfmt/src/cargo-fmt/test/mod.rs
new file mode 100644
index 000000000..56e52fbab
--- /dev/null
+++ b/src/tools/rustfmt/src/cargo-fmt/test/mod.rs
@@ -0,0 +1,141 @@
+use super::*;
+
+mod message_format;
+mod targets;
+
+#[test]
+fn default_options() {
+ let empty: Vec<String> = vec![];
+ let o = Opts::parse_from(&empty);
+ assert_eq!(false, o.quiet);
+ assert_eq!(false, o.verbose);
+ assert_eq!(false, o.version);
+ assert_eq!(false, o.check);
+ assert_eq!(empty, o.packages);
+ assert_eq!(empty, o.rustfmt_options);
+ assert_eq!(false, o.format_all);
+ assert_eq!(None, o.manifest_path);
+ assert_eq!(None, o.message_format);
+}
+
+#[test]
+fn good_options() {
+ let o = Opts::parse_from(&[
+ "test",
+ "-q",
+ "-p",
+ "p1",
+ "-p",
+ "p2",
+ "--message-format",
+ "short",
+ "--check",
+ "--",
+ "--edition",
+ "2018",
+ ]);
+ assert_eq!(true, o.quiet);
+ assert_eq!(false, o.verbose);
+ assert_eq!(false, o.version);
+ assert_eq!(true, o.check);
+ assert_eq!(vec!["p1", "p2"], o.packages);
+ assert_eq!(vec!["--edition", "2018"], o.rustfmt_options);
+ assert_eq!(false, o.format_all);
+ assert_eq!(Some(String::from("short")), o.message_format);
+}
+
+#[test]
+fn unexpected_option() {
+ assert!(
+ Opts::command()
+ .try_get_matches_from(&["test", "unexpected"])
+ .is_err()
+ );
+}
+
+#[test]
+fn unexpected_flag() {
+ assert!(
+ Opts::command()
+ .try_get_matches_from(&["test", "--flag"])
+ .is_err()
+ );
+}
+
+#[test]
+fn mandatory_separator() {
+ assert!(
+ Opts::command()
+ .try_get_matches_from(&["test", "--emit"])
+ .is_err()
+ );
+ assert!(
+ !Opts::command()
+ .try_get_matches_from(&["test", "--", "--emit"])
+ .is_err()
+ );
+}
+
+#[test]
+fn multiple_packages_one_by_one() {
+ let o = Opts::parse_from(&[
+ "test",
+ "-p",
+ "package1",
+ "--package",
+ "package2",
+ "-p",
+ "package3",
+ ]);
+ assert_eq!(3, o.packages.len());
+}
+
+#[test]
+fn multiple_packages_grouped() {
+ let o = Opts::parse_from(&[
+ "test",
+ "--package",
+ "package1",
+ "package2",
+ "-p",
+ "package3",
+ "package4",
+ ]);
+ assert_eq!(4, o.packages.len());
+}
+
+#[test]
+fn empty_packages_1() {
+ assert!(
+ Opts::command()
+ .try_get_matches_from(&["test", "-p"])
+ .is_err()
+ );
+}
+
+#[test]
+fn empty_packages_2() {
+ assert!(
+ Opts::command()
+ .try_get_matches_from(&["test", "-p", "--", "--check"])
+ .is_err()
+ );
+}
+
+#[test]
+fn empty_packages_3() {
+ assert!(
+ Opts::command()
+ .try_get_matches_from(&["test", "-p", "--verbose"])
+ .is_err()
+ );
+}
+
+#[test]
+fn empty_packages_4() {
+ assert!(
+ Opts::command()
+ .try_get_matches_from(&["test", "-p", "--check"])
+ .is_err()
+ );
+}
diff --git a/src/tools/rustfmt/src/cargo-fmt/test/targets.rs b/src/tools/rustfmt/src/cargo-fmt/test/targets.rs
new file mode 100644
index 000000000..b7e7fabdf
--- /dev/null
+++ b/src/tools/rustfmt/src/cargo-fmt/test/targets.rs
@@ -0,0 +1,134 @@
+use super::*;
+
+struct ExpTarget {
+ path: &'static str,
+ edition: &'static str,
+ kind: &'static str,
+}
+
+mod all_targets {
+ use super::*;
+
+ fn assert_correct_targets_loaded(
+ manifest_suffix: &str,
+ source_root: &str,
+ exp_targets: &[ExpTarget],
+ exp_num_targets: usize,
+ ) {
+ let root_path = Path::new("tests/cargo-fmt/source").join(source_root);
+ let get_path = |exp: &str| PathBuf::from(&root_path).join(exp).canonicalize().unwrap();
+ let manifest_path = Path::new(&root_path).join(manifest_suffix);
+ let targets = get_targets(&CargoFmtStrategy::All, Some(manifest_path.as_path()))
+ .expect("Targets should have been loaded");
+
+ assert_eq!(targets.len(), exp_num_targets);
+
+ for target in exp_targets {
+ assert!(targets.contains(&Target {
+ path: get_path(target.path),
+ edition: target.edition.to_owned(),
+ kind: target.kind.to_owned(),
+ }));
+ }
+ }
+
+ mod different_crate_and_dir_names {
+ use super::*;
+
+ fn assert_correct_targets_loaded(manifest_suffix: &str) {
+ let exp_targets = vec![
+ ExpTarget {
+ path: "dependency-dir-name/subdep-dir-name/src/lib.rs",
+ edition: "2018",
+ kind: "lib",
+ },
+ ExpTarget {
+ path: "dependency-dir-name/src/lib.rs",
+ edition: "2018",
+ kind: "lib",
+ },
+ ExpTarget {
+ path: "src/main.rs",
+ edition: "2018",
+ kind: "main",
+ },
+ ];
+ super::assert_correct_targets_loaded(
+ manifest_suffix,
+ "divergent-crate-dir-names",
+ &exp_targets,
+ 3,
+ );
+ }
+
+ #[test]
+ fn correct_targets_from_root() {
+ assert_correct_targets_loaded("Cargo.toml");
+ }
+
+ #[test]
+ fn correct_targets_from_sub_local_dep() {
+ assert_correct_targets_loaded("dependency-dir-name/Cargo.toml");
+ }
+ }
+
+ mod workspaces {
+ use super::*;
+
+ fn assert_correct_targets_loaded(manifest_suffix: &str) {
+ let exp_targets = vec![
+ ExpTarget {
+ path: "ws/a/src/main.rs",
+ edition: "2018",
+ kind: "bin",
+ },
+ ExpTarget {
+ path: "ws/b/src/main.rs",
+ edition: "2018",
+ kind: "bin",
+ },
+ ExpTarget {
+ path: "ws/c/src/lib.rs",
+ edition: "2018",
+ kind: "lib",
+ },
+ ExpTarget {
+ path: "ws/a/d/src/lib.rs",
+ edition: "2018",
+ kind: "lib",
+ },
+ ExpTarget {
+ path: "e/src/main.rs",
+ edition: "2018",
+ kind: "main",
+ },
+ ExpTarget {
+ path: "ws/a/d/f/src/lib.rs",
+ edition: "2018",
+ kind: "lib",
+ },
+ ];
+ super::assert_correct_targets_loaded(
+ manifest_suffix,
+ "workspaces/path-dep-above",
+ &exp_targets,
+ 6,
+ );
+ }
+
+ #[test]
+ fn includes_outside_workspace_deps() {
+ assert_correct_targets_loaded("ws/Cargo.toml");
+ }
+
+ #[test]
+ fn includes_workspace_from_dep_above() {
+ assert_correct_targets_loaded("e/Cargo.toml");
+ }
+
+ #[test]
+ fn includes_all_packages_from_workspace_subdir() {
+ assert_correct_targets_loaded("ws/a/d/f/Cargo.toml");
+ }
+ }
+}
diff --git a/src/tools/rustfmt/src/chains.rs b/src/tools/rustfmt/src/chains.rs
new file mode 100644
index 000000000..e26e24ec5
--- /dev/null
+++ b/src/tools/rustfmt/src/chains.rs
@@ -0,0 +1,888 @@
+//! Formatting of chained expressions, i.e., expressions that are chained by
+//! dots: struct and enum field access, method calls, and try shorthand (`?`).
+//!
+//! Instead of walking these subexpressions one-by-one, as is our usual strategy
+//! for expression formatting, we collect maximal sequences of these expressions
+//! and handle them simultaneously.
+//!
+//! Whenever possible, the entire chain is put on a single line. If that fails,
+//! we put each subexpression on a separate, much like the (default) function
+//! argument function argument strategy.
+//!
+//! Depends on config options: `chain_indent` is the indent to use for
+//! blocks in the parent/root/base of the chain (and the rest of the chain's
+//! alignment).
+//! E.g., `let foo = { aaaa; bbb; ccc }.bar.baz();`, we would layout for the
+//! following values of `chain_indent`:
+//! Block:
+//!
+//! ```text
+//! let foo = {
+//! aaaa;
+//! bbb;
+//! ccc
+//! }.bar
+//! .baz();
+//! ```
+//!
+//! Visual:
+//!
+//! ```text
+//! let foo = {
+//! aaaa;
+//! bbb;
+//! ccc
+//! }
+//! .bar
+//! .baz();
+//! ```
+//!
+//! If the first item in the chain is a block expression, we align the dots with
+//! the braces.
+//! Block:
+//!
+//! ```text
+//! let a = foo.bar
+//! .baz()
+//! .qux
+//! ```
+//!
+//! Visual:
+//!
+//! ```text
+//! let a = foo.bar
+//! .baz()
+//! .qux
+//! ```
+
+use std::borrow::Cow;
+use std::cmp::min;
+
+use rustc_ast::{ast, ptr};
+use rustc_span::{symbol, BytePos, Span};
+
+use crate::comment::{rewrite_comment, CharClasses, FullCodeCharKind, RichChar};
+use crate::config::{IndentStyle, Version};
+use crate::expr::rewrite_call;
+use crate::lists::extract_pre_comment;
+use crate::macros::convert_try_mac;
+use crate::rewrite::{Rewrite, RewriteContext};
+use crate::shape::Shape;
+use crate::source_map::SpanUtils;
+use crate::utils::{
+ self, first_line_width, last_line_extendable, last_line_width, mk_sp, rewrite_ident,
+ trimmed_last_line_width, wrap_str,
+};
+
+pub(crate) fn rewrite_chain(
+ expr: &ast::Expr,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+) -> Option<String> {
+ let chain = Chain::from_ast(expr, context);
+ debug!("rewrite_chain {:?} {:?}", chain, shape);
+
+ // If this is just an expression with some `?`s, then format it trivially and
+ // return early.
+ if chain.children.is_empty() {
+ return chain.parent.rewrite(context, shape);
+ }
+
+ chain.rewrite(context, shape)
+}
+
+#[derive(Debug)]
+enum CommentPosition {
+ Back,
+ Top,
+}
+
+// An expression plus trailing `?`s to be formatted together.
+#[derive(Debug)]
+struct ChainItem {
+ kind: ChainItemKind,
+ tries: usize,
+ span: Span,
+}
+
+// FIXME: we can't use a reference here because to convert `try!` to `?` we
+// synthesise the AST node. However, I think we could use `Cow` and that
+// would remove a lot of cloning.
+#[derive(Debug)]
+enum ChainItemKind {
+ Parent(ast::Expr),
+ MethodCall(
+ ast::PathSegment,
+ Vec<ast::GenericArg>,
+ Vec<ptr::P<ast::Expr>>,
+ ),
+ StructField(symbol::Ident),
+ TupleField(symbol::Ident, bool),
+ Await,
+ Comment(String, CommentPosition),
+}
+
+impl ChainItemKind {
+ fn is_block_like(&self, context: &RewriteContext<'_>, reps: &str) -> bool {
+ match self {
+ ChainItemKind::Parent(ref expr) => utils::is_block_expr(context, expr, reps),
+ ChainItemKind::MethodCall(..)
+ | ChainItemKind::StructField(..)
+ | ChainItemKind::TupleField(..)
+ | ChainItemKind::Await
+ | ChainItemKind::Comment(..) => false,
+ }
+ }
+
+ fn is_tup_field_access(expr: &ast::Expr) -> bool {
+ match expr.kind {
+ ast::ExprKind::Field(_, ref field) => {
+ field.name.to_string().chars().all(|c| c.is_digit(10))
+ }
+ _ => false,
+ }
+ }
+
+ fn from_ast(context: &RewriteContext<'_>, expr: &ast::Expr) -> (ChainItemKind, Span) {
+ let (kind, span) = match expr.kind {
+ ast::ExprKind::MethodCall(ref segment, ref expressions, _) => {
+ let types = if let Some(ref generic_args) = segment.args {
+ if let ast::GenericArgs::AngleBracketed(ref data) = **generic_args {
+ data.args
+ .iter()
+ .filter_map(|x| match x {
+ ast::AngleBracketedArg::Arg(ref generic_arg) => {
+ Some(generic_arg.clone())
+ }
+ _ => None,
+ })
+ .collect::<Vec<_>>()
+ } else {
+ vec![]
+ }
+ } else {
+ vec![]
+ };
+ let span = mk_sp(expressions[0].span.hi(), expr.span.hi());
+ let kind = ChainItemKind::MethodCall(segment.clone(), types, expressions.clone());
+ (kind, span)
+ }
+ ast::ExprKind::Field(ref nested, field) => {
+ let kind = if Self::is_tup_field_access(expr) {
+ ChainItemKind::TupleField(field, Self::is_tup_field_access(nested))
+ } else {
+ ChainItemKind::StructField(field)
+ };
+ let span = mk_sp(nested.span.hi(), field.span.hi());
+ (kind, span)
+ }
+ ast::ExprKind::Await(ref nested) => {
+ let span = mk_sp(nested.span.hi(), expr.span.hi());
+ (ChainItemKind::Await, span)
+ }
+ _ => return (ChainItemKind::Parent(expr.clone()), expr.span),
+ };
+
+ // Remove comments from the span.
+ let lo = context.snippet_provider.span_before(span, ".");
+ (kind, mk_sp(lo, span.hi()))
+ }
+}
+
+impl Rewrite for ChainItem {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ let shape = shape.sub_width(self.tries)?;
+ let rewrite = match self.kind {
+ ChainItemKind::Parent(ref expr) => expr.rewrite(context, shape)?,
+ ChainItemKind::MethodCall(ref segment, ref types, ref exprs) => {
+ Self::rewrite_method_call(segment.ident, types, exprs, self.span, context, shape)?
+ }
+ ChainItemKind::StructField(ident) => format!(".{}", rewrite_ident(context, ident)),
+ ChainItemKind::TupleField(ident, nested) => format!(
+ "{}.{}",
+ if nested && context.config.version() == Version::One {
+ " "
+ } else {
+ ""
+ },
+ rewrite_ident(context, ident)
+ ),
+ ChainItemKind::Await => ".await".to_owned(),
+ ChainItemKind::Comment(ref comment, _) => {
+ rewrite_comment(comment, false, shape, context.config)?
+ }
+ };
+ Some(format!("{}{}", rewrite, "?".repeat(self.tries)))
+ }
+}
+
+impl ChainItem {
+ fn new(context: &RewriteContext<'_>, expr: &ast::Expr, tries: usize) -> ChainItem {
+ let (kind, span) = ChainItemKind::from_ast(context, expr);
+ ChainItem { kind, tries, span }
+ }
+
+ fn comment(span: Span, comment: String, pos: CommentPosition) -> ChainItem {
+ ChainItem {
+ kind: ChainItemKind::Comment(comment, pos),
+ tries: 0,
+ span,
+ }
+ }
+
+ fn is_comment(&self) -> bool {
+ matches!(self.kind, ChainItemKind::Comment(..))
+ }
+
+ fn rewrite_method_call(
+ method_name: symbol::Ident,
+ types: &[ast::GenericArg],
+ args: &[ptr::P<ast::Expr>],
+ span: Span,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ ) -> Option<String> {
+ let type_str = if types.is_empty() {
+ String::new()
+ } else {
+ let type_list = types
+ .iter()
+ .map(|ty| ty.rewrite(context, shape))
+ .collect::<Option<Vec<_>>>()?;
+
+ format!("::<{}>", type_list.join(", "))
+ };
+ let callee_str = format!(".{}{}", rewrite_ident(context, method_name), type_str);
+ rewrite_call(context, &callee_str, &args[1..], span, shape)
+ }
+}
+
+#[derive(Debug)]
+struct Chain {
+ parent: ChainItem,
+ children: Vec<ChainItem>,
+}
+
+impl Chain {
+ fn from_ast(expr: &ast::Expr, context: &RewriteContext<'_>) -> Chain {
+ let subexpr_list = Self::make_subexpr_list(expr, context);
+
+ // Un-parse the expression tree into ChainItems
+ let mut rev_children = vec![];
+ let mut sub_tries = 0;
+ for subexpr in &subexpr_list {
+ match subexpr.kind {
+ ast::ExprKind::Try(_) => sub_tries += 1,
+ _ => {
+ rev_children.push(ChainItem::new(context, subexpr, sub_tries));
+ sub_tries = 0;
+ }
+ }
+ }
+
+ fn is_tries(s: &str) -> bool {
+ s.chars().all(|c| c == '?')
+ }
+
+ fn is_post_comment(s: &str) -> bool {
+ let comment_start_index = s.chars().position(|c| c == '/');
+ if comment_start_index.is_none() {
+ return false;
+ }
+
+ let newline_index = s.chars().position(|c| c == '\n');
+ if newline_index.is_none() {
+ return true;
+ }
+
+ comment_start_index.unwrap() < newline_index.unwrap()
+ }
+
+ fn handle_post_comment(
+ post_comment_span: Span,
+ post_comment_snippet: &str,
+ prev_span_end: &mut BytePos,
+ children: &mut Vec<ChainItem>,
+ ) {
+ let white_spaces: &[_] = &[' ', '\t'];
+ if post_comment_snippet
+ .trim_matches(white_spaces)
+ .starts_with('\n')
+ {
+ // No post comment.
+ return;
+ }
+ let trimmed_snippet = trim_tries(post_comment_snippet);
+ if is_post_comment(&trimmed_snippet) {
+ children.push(ChainItem::comment(
+ post_comment_span,
+ trimmed_snippet.trim().to_owned(),
+ CommentPosition::Back,
+ ));
+ *prev_span_end = post_comment_span.hi();
+ }
+ }
+
+ let parent = rev_children.pop().unwrap();
+ let mut children = vec![];
+ let mut prev_span_end = parent.span.hi();
+ let mut iter = rev_children.into_iter().rev().peekable();
+ if let Some(first_chain_item) = iter.peek() {
+ let comment_span = mk_sp(prev_span_end, first_chain_item.span.lo());
+ let comment_snippet = context.snippet(comment_span);
+ if !is_tries(comment_snippet.trim()) {
+ handle_post_comment(
+ comment_span,
+ comment_snippet,
+ &mut prev_span_end,
+ &mut children,
+ );
+ }
+ }
+ while let Some(chain_item) = iter.next() {
+ let comment_snippet = context.snippet(chain_item.span);
+ // FIXME: Figure out the way to get a correct span when converting `try!` to `?`.
+ let handle_comment =
+ !(context.config.use_try_shorthand() || is_tries(comment_snippet.trim()));
+
+ // Pre-comment
+ if handle_comment {
+ let pre_comment_span = mk_sp(prev_span_end, chain_item.span.lo());
+ let pre_comment_snippet = trim_tries(context.snippet(pre_comment_span));
+ let (pre_comment, _) = extract_pre_comment(&pre_comment_snippet);
+ match pre_comment {
+ Some(ref comment) if !comment.is_empty() => {
+ children.push(ChainItem::comment(
+ pre_comment_span,
+ comment.to_owned(),
+ CommentPosition::Top,
+ ));
+ }
+ _ => (),
+ }
+ }
+
+ prev_span_end = chain_item.span.hi();
+ children.push(chain_item);
+
+ // Post-comment
+ if !handle_comment || iter.peek().is_none() {
+ continue;
+ }
+
+ let next_lo = iter.peek().unwrap().span.lo();
+ let post_comment_span = mk_sp(prev_span_end, next_lo);
+ let post_comment_snippet = context.snippet(post_comment_span);
+ handle_post_comment(
+ post_comment_span,
+ post_comment_snippet,
+ &mut prev_span_end,
+ &mut children,
+ );
+ }
+
+ Chain { parent, children }
+ }
+
+ // Returns a Vec of the prefixes of the chain.
+ // E.g., for input `a.b.c` we return [`a.b.c`, `a.b`, 'a']
+ fn make_subexpr_list(expr: &ast::Expr, context: &RewriteContext<'_>) -> Vec<ast::Expr> {
+ let mut subexpr_list = vec![expr.clone()];
+
+ while let Some(subexpr) = Self::pop_expr_chain(subexpr_list.last().unwrap(), context) {
+ subexpr_list.push(subexpr.clone());
+ }
+
+ subexpr_list
+ }
+
+ // Returns the expression's subexpression, if it exists. When the subexpr
+ // is a try! macro, we'll convert it to shorthand when the option is set.
+ fn pop_expr_chain(expr: &ast::Expr, context: &RewriteContext<'_>) -> Option<ast::Expr> {
+ match expr.kind {
+ ast::ExprKind::MethodCall(_, ref expressions, _) => {
+ Some(Self::convert_try(&expressions[0], context))
+ }
+ ast::ExprKind::Field(ref subexpr, _)
+ | ast::ExprKind::Try(ref subexpr)
+ | ast::ExprKind::Await(ref subexpr) => Some(Self::convert_try(subexpr, context)),
+ _ => None,
+ }
+ }
+
+ fn convert_try(expr: &ast::Expr, context: &RewriteContext<'_>) -> ast::Expr {
+ match expr.kind {
+ ast::ExprKind::MacCall(ref mac) if context.config.use_try_shorthand() => {
+ if let Some(subexpr) = convert_try_mac(mac, context) {
+ subexpr
+ } else {
+ expr.clone()
+ }
+ }
+ _ => expr.clone(),
+ }
+ }
+}
+
+impl Rewrite for Chain {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ debug!("rewrite chain {:?} {:?}", self, shape);
+
+ let mut formatter = match context.config.indent_style() {
+ IndentStyle::Block => {
+ Box::new(ChainFormatterBlock::new(self)) as Box<dyn ChainFormatter>
+ }
+ IndentStyle::Visual => {
+ Box::new(ChainFormatterVisual::new(self)) as Box<dyn ChainFormatter>
+ }
+ };
+
+ formatter.format_root(&self.parent, context, shape)?;
+ if let Some(result) = formatter.pure_root() {
+ return wrap_str(result, context.config.max_width(), shape);
+ }
+
+ // Decide how to layout the rest of the chain.
+ let child_shape = formatter.child_shape(context, shape)?;
+
+ formatter.format_children(context, child_shape)?;
+ formatter.format_last_child(context, shape, child_shape)?;
+
+ let result = formatter.join_rewrites(context, child_shape)?;
+ wrap_str(result, context.config.max_width(), shape)
+ }
+}
+
+// There are a few types for formatting chains. This is because there is a lot
+// in common between formatting with block vs visual indent, but they are
+// different enough that branching on the indent all over the place gets ugly.
+// Anything that can format a chain is a ChainFormatter.
+trait ChainFormatter {
+ // Parent is the first item in the chain, e.g., `foo` in `foo.bar.baz()`.
+ // Root is the parent plus any other chain items placed on the first line to
+ // avoid an orphan. E.g.,
+ // ```text
+ // foo.bar
+ // .baz()
+ // ```
+ // If `bar` were not part of the root, then foo would be orphaned and 'float'.
+ fn format_root(
+ &mut self,
+ parent: &ChainItem,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ ) -> Option<()>;
+ fn child_shape(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<Shape>;
+ fn format_children(&mut self, context: &RewriteContext<'_>, child_shape: Shape) -> Option<()>;
+ fn format_last_child(
+ &mut self,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ child_shape: Shape,
+ ) -> Option<()>;
+ fn join_rewrites(&self, context: &RewriteContext<'_>, child_shape: Shape) -> Option<String>;
+ // Returns `Some` if the chain is only a root, None otherwise.
+ fn pure_root(&mut self) -> Option<String>;
+}
+
+// Data and behaviour that is shared by both chain formatters. The concrete
+// formatters can delegate much behaviour to `ChainFormatterShared`.
+struct ChainFormatterShared<'a> {
+ // The current working set of child items.
+ children: &'a [ChainItem],
+ // The current rewrites of items (includes trailing `?`s, but not any way to
+ // connect the rewrites together).
+ rewrites: Vec<String>,
+ // Whether the chain can fit on one line.
+ fits_single_line: bool,
+ // The number of children in the chain. This is not equal to `self.children.len()`
+ // because `self.children` will change size as we process the chain.
+ child_count: usize,
+}
+
+impl<'a> ChainFormatterShared<'a> {
+ fn new(chain: &'a Chain) -> ChainFormatterShared<'a> {
+ ChainFormatterShared {
+ children: &chain.children,
+ rewrites: Vec::with_capacity(chain.children.len() + 1),
+ fits_single_line: false,
+ child_count: chain.children.len(),
+ }
+ }
+
+ fn pure_root(&mut self) -> Option<String> {
+ if self.children.is_empty() {
+ assert_eq!(self.rewrites.len(), 1);
+ Some(self.rewrites.pop().unwrap())
+ } else {
+ None
+ }
+ }
+
+ // Rewrite the last child. The last child of a chain requires special treatment. We need to
+ // know whether 'overflowing' the last child make a better formatting:
+ //
+ // A chain with overflowing the last child:
+ // ```text
+ // parent.child1.child2.last_child(
+ // a,
+ // b,
+ // c,
+ // )
+ // ```
+ //
+ // A chain without overflowing the last child (in vertical layout):
+ // ```text
+ // parent
+ // .child1
+ // .child2
+ // .last_child(a, b, c)
+ // ```
+ //
+ // In particular, overflowing is effective when the last child is a method with a multi-lined
+ // block-like argument (e.g., closure):
+ // ```text
+ // parent.child1.child2.last_child(|a, b, c| {
+ // let x = foo(a, b, c);
+ // let y = bar(a, b, c);
+ //
+ // // ...
+ //
+ // result
+ // })
+ // ```
+ fn format_last_child(
+ &mut self,
+ may_extend: bool,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ child_shape: Shape,
+ ) -> Option<()> {
+ let last = self.children.last()?;
+ let extendable = may_extend && last_line_extendable(&self.rewrites[0]);
+ let prev_last_line_width = last_line_width(&self.rewrites[0]);
+
+ // Total of all items excluding the last.
+ let almost_total = if extendable {
+ prev_last_line_width
+ } else {
+ self.rewrites
+ .iter()
+ .map(|rw| utils::unicode_str_width(rw))
+ .sum()
+ } + last.tries;
+ let one_line_budget = if self.child_count == 1 {
+ shape.width
+ } else {
+ min(shape.width, context.config.chain_width())
+ }
+ .saturating_sub(almost_total);
+
+ let all_in_one_line = !self.children.iter().any(ChainItem::is_comment)
+ && self.rewrites.iter().all(|s| !s.contains('\n'))
+ && one_line_budget > 0;
+ let last_shape = if all_in_one_line {
+ shape.sub_width(last.tries)?
+ } else if extendable {
+ child_shape.sub_width(last.tries)?
+ } else {
+ child_shape.sub_width(shape.rhs_overhead(context.config) + last.tries)?
+ };
+
+ let mut last_subexpr_str = None;
+ if all_in_one_line || extendable {
+ // First we try to 'overflow' the last child and see if it looks better than using
+ // vertical layout.
+ let one_line_shape = if context.use_block_indent() {
+ last_shape.offset_left(almost_total)
+ } else {
+ last_shape
+ .visual_indent(almost_total)
+ .sub_width(almost_total)
+ };
+
+ if let Some(one_line_shape) = one_line_shape {
+ if let Some(rw) = last.rewrite(context, one_line_shape) {
+ // We allow overflowing here only if both of the following conditions match:
+ // 1. The entire chain fits in a single line except the last child.
+ // 2. `last_child_str.lines().count() >= 5`.
+ let line_count = rw.lines().count();
+ let could_fit_single_line = first_line_width(&rw) <= one_line_budget;
+ if could_fit_single_line && line_count >= 5 {
+ last_subexpr_str = Some(rw);
+ self.fits_single_line = all_in_one_line;
+ } else {
+ // We could not know whether overflowing is better than using vertical
+ // layout, just by looking at the overflowed rewrite. Now we rewrite the
+ // last child on its own line, and compare two rewrites to choose which is
+ // better.
+ let last_shape = child_shape
+ .sub_width(shape.rhs_overhead(context.config) + last.tries)?;
+ match last.rewrite(context, last_shape) {
+ Some(ref new_rw) if !could_fit_single_line => {
+ last_subexpr_str = Some(new_rw.clone());
+ }
+ Some(ref new_rw) if new_rw.lines().count() >= line_count => {
+ last_subexpr_str = Some(rw);
+ self.fits_single_line = could_fit_single_line && all_in_one_line;
+ }
+ new_rw @ Some(..) => {
+ last_subexpr_str = new_rw;
+ }
+ _ => {
+ last_subexpr_str = Some(rw);
+ self.fits_single_line = could_fit_single_line && all_in_one_line;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ let last_shape = if context.use_block_indent() {
+ last_shape
+ } else {
+ child_shape.sub_width(shape.rhs_overhead(context.config) + last.tries)?
+ };
+
+ last_subexpr_str = last_subexpr_str.or_else(|| last.rewrite(context, last_shape));
+ self.rewrites.push(last_subexpr_str?);
+ Some(())
+ }
+
+ fn join_rewrites(&self, context: &RewriteContext<'_>, child_shape: Shape) -> Option<String> {
+ let connector = if self.fits_single_line {
+ // Yay, we can put everything on one line.
+ Cow::from("")
+ } else {
+ // Use new lines.
+ if context.force_one_line_chain.get() {
+ return None;
+ }
+ child_shape.to_string_with_newline(context.config)
+ };
+
+ let mut rewrite_iter = self.rewrites.iter();
+ let mut result = rewrite_iter.next().unwrap().clone();
+ let children_iter = self.children.iter();
+ let iter = rewrite_iter.zip(children_iter);
+
+ for (rewrite, chain_item) in iter {
+ match chain_item.kind {
+ ChainItemKind::Comment(_, CommentPosition::Back) => result.push(' '),
+ ChainItemKind::Comment(_, CommentPosition::Top) => result.push_str(&connector),
+ _ => result.push_str(&connector),
+ }
+ result.push_str(rewrite);
+ }
+
+ Some(result)
+ }
+}
+
+// Formats a chain using block indent.
+struct ChainFormatterBlock<'a> {
+ shared: ChainFormatterShared<'a>,
+ root_ends_with_block: bool,
+}
+
+impl<'a> ChainFormatterBlock<'a> {
+ fn new(chain: &'a Chain) -> ChainFormatterBlock<'a> {
+ ChainFormatterBlock {
+ shared: ChainFormatterShared::new(chain),
+ root_ends_with_block: false,
+ }
+ }
+}
+
+impl<'a> ChainFormatter for ChainFormatterBlock<'a> {
+ fn format_root(
+ &mut self,
+ parent: &ChainItem,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ ) -> Option<()> {
+ let mut root_rewrite: String = parent.rewrite(context, shape)?;
+
+ let mut root_ends_with_block = parent.kind.is_block_like(context, &root_rewrite);
+ let tab_width = context.config.tab_spaces().saturating_sub(shape.offset);
+
+ while root_rewrite.len() <= tab_width && !root_rewrite.contains('\n') {
+ let item = &self.shared.children[0];
+ if let ChainItemKind::Comment(..) = item.kind {
+ break;
+ }
+ let shape = shape.offset_left(root_rewrite.len())?;
+ match &item.rewrite(context, shape) {
+ Some(rewrite) => root_rewrite.push_str(rewrite),
+ None => break,
+ }
+
+ root_ends_with_block = last_line_extendable(&root_rewrite);
+
+ self.shared.children = &self.shared.children[1..];
+ if self.shared.children.is_empty() {
+ break;
+ }
+ }
+ self.shared.rewrites.push(root_rewrite);
+ self.root_ends_with_block = root_ends_with_block;
+ Some(())
+ }
+
+ fn child_shape(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<Shape> {
+ Some(
+ if self.root_ends_with_block {
+ shape.block_indent(0)
+ } else {
+ shape.block_indent(context.config.tab_spaces())
+ }
+ .with_max_width(context.config),
+ )
+ }
+
+ fn format_children(&mut self, context: &RewriteContext<'_>, child_shape: Shape) -> Option<()> {
+ for item in &self.shared.children[..self.shared.children.len() - 1] {
+ let rewrite = item.rewrite(context, child_shape)?;
+ self.shared.rewrites.push(rewrite);
+ }
+ Some(())
+ }
+
+ fn format_last_child(
+ &mut self,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ child_shape: Shape,
+ ) -> Option<()> {
+ self.shared
+ .format_last_child(true, context, shape, child_shape)
+ }
+
+ fn join_rewrites(&self, context: &RewriteContext<'_>, child_shape: Shape) -> Option<String> {
+ self.shared.join_rewrites(context, child_shape)
+ }
+
+ fn pure_root(&mut self) -> Option<String> {
+ self.shared.pure_root()
+ }
+}
+
+// Format a chain using visual indent.
+struct ChainFormatterVisual<'a> {
+ shared: ChainFormatterShared<'a>,
+ // The extra offset from the chain's shape to the position of the `.`
+ offset: usize,
+}
+
+impl<'a> ChainFormatterVisual<'a> {
+ fn new(chain: &'a Chain) -> ChainFormatterVisual<'a> {
+ ChainFormatterVisual {
+ shared: ChainFormatterShared::new(chain),
+ offset: 0,
+ }
+ }
+}
+
+impl<'a> ChainFormatter for ChainFormatterVisual<'a> {
+ fn format_root(
+ &mut self,
+ parent: &ChainItem,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ ) -> Option<()> {
+ let parent_shape = shape.visual_indent(0);
+ let mut root_rewrite = parent.rewrite(context, parent_shape)?;
+ let multiline = root_rewrite.contains('\n');
+ self.offset = if multiline {
+ last_line_width(&root_rewrite).saturating_sub(shape.used_width())
+ } else {
+ trimmed_last_line_width(&root_rewrite)
+ };
+
+ if !multiline || parent.kind.is_block_like(context, &root_rewrite) {
+ let item = &self.shared.children[0];
+ if let ChainItemKind::Comment(..) = item.kind {
+ self.shared.rewrites.push(root_rewrite);
+ return Some(());
+ }
+ let child_shape = parent_shape
+ .visual_indent(self.offset)
+ .sub_width(self.offset)?;
+ let rewrite = item.rewrite(context, child_shape)?;
+ match wrap_str(rewrite, context.config.max_width(), shape) {
+ Some(rewrite) => root_rewrite.push_str(&rewrite),
+ None => {
+ // We couldn't fit in at the visual indent, try the last
+ // indent.
+ let rewrite = item.rewrite(context, parent_shape)?;
+ root_rewrite.push_str(&rewrite);
+ self.offset = 0;
+ }
+ }
+
+ self.shared.children = &self.shared.children[1..];
+ }
+
+ self.shared.rewrites.push(root_rewrite);
+ Some(())
+ }
+
+ fn child_shape(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<Shape> {
+ shape
+ .with_max_width(context.config)
+ .offset_left(self.offset)
+ .map(|s| s.visual_indent(0))
+ }
+
+ fn format_children(&mut self, context: &RewriteContext<'_>, child_shape: Shape) -> Option<()> {
+ for item in &self.shared.children[..self.shared.children.len() - 1] {
+ let rewrite = item.rewrite(context, child_shape)?;
+ self.shared.rewrites.push(rewrite);
+ }
+ Some(())
+ }
+
+ fn format_last_child(
+ &mut self,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ child_shape: Shape,
+ ) -> Option<()> {
+ self.shared
+ .format_last_child(false, context, shape, child_shape)
+ }
+
+ fn join_rewrites(&self, context: &RewriteContext<'_>, child_shape: Shape) -> Option<String> {
+ self.shared.join_rewrites(context, child_shape)
+ }
+
+ fn pure_root(&mut self) -> Option<String> {
+ self.shared.pure_root()
+ }
+}
+
+/// Removes try operators (`?`s) that appear in the given string. If removing
+/// them leaves an empty line, remove that line as well unless it is the first
+/// line (we need the first newline for detecting pre/post comment).
+fn trim_tries(s: &str) -> String {
+ let mut result = String::with_capacity(s.len());
+ let mut line_buffer = String::with_capacity(s.len());
+ for (kind, rich_char) in CharClasses::new(s.chars()) {
+ match rich_char.get_char() {
+ '\n' => {
+ if result.is_empty() || !line_buffer.trim().is_empty() {
+ result.push_str(&line_buffer);
+ result.push('\n')
+ }
+ line_buffer.clear();
+ }
+ '?' if kind == FullCodeCharKind::Normal => continue,
+ c => line_buffer.push(c),
+ }
+ }
+ if !line_buffer.trim().is_empty() {
+ result.push_str(&line_buffer);
+ }
+ result
+}
diff --git a/src/tools/rustfmt/src/closures.rs b/src/tools/rustfmt/src/closures.rs
new file mode 100644
index 000000000..88a6bebb6
--- /dev/null
+++ b/src/tools/rustfmt/src/closures.rs
@@ -0,0 +1,448 @@
+use rustc_ast::{ast, ptr};
+use rustc_span::Span;
+
+use crate::attr::get_attrs_from_stmt;
+use crate::config::lists::*;
+use crate::config::Version;
+use crate::expr::{block_contains_comment, is_simple_block, is_unsafe_block, rewrite_cond};
+use crate::items::{span_hi_for_param, span_lo_for_param};
+use crate::lists::{definitive_tactic, itemize_list, write_list, ListFormatting, Separator};
+use crate::overflow::OverflowableItem;
+use crate::rewrite::{Rewrite, RewriteContext};
+use crate::shape::Shape;
+use crate::source_map::SpanUtils;
+use crate::types::rewrite_lifetime_param;
+use crate::utils::{last_line_width, left_most_sub_expr, stmt_expr, NodeIdExt};
+
+// This module is pretty messy because of the rules around closures and blocks:
+// FIXME - the below is probably no longer true in full.
+// * if there is a return type, then there must be braces,
+// * given a closure with braces, whether that is parsed to give an inner block
+// or not depends on if there is a return type and if there are statements
+// in that block,
+// * if the first expression in the body ends with a block (i.e., is a
+// statement without needing a semi-colon), then adding or removing braces
+// can change whether it is treated as an expression or statement.
+
+pub(crate) fn rewrite_closure(
+ binder: &ast::ClosureBinder,
+ capture: ast::CaptureBy,
+ is_async: &ast::Async,
+ movability: ast::Movability,
+ fn_decl: &ast::FnDecl,
+ body: &ast::Expr,
+ span: Span,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+) -> Option<String> {
+ debug!("rewrite_closure {:?}", body);
+
+ let (prefix, extra_offset) = rewrite_closure_fn_decl(
+ binder, capture, is_async, movability, fn_decl, body, span, context, shape,
+ )?;
+ // 1 = space between `|...|` and body.
+ let body_shape = shape.offset_left(extra_offset)?;
+
+ if let ast::ExprKind::Block(ref block, _) = body.kind {
+ // The body of the closure is an empty block.
+ if block.stmts.is_empty() && !block_contains_comment(context, block) {
+ return body
+ .rewrite(context, shape)
+ .map(|s| format!("{} {}", prefix, s));
+ }
+
+ let result = match fn_decl.output {
+ ast::FnRetTy::Default(_) if !context.inside_macro() => {
+ try_rewrite_without_block(body, &prefix, context, shape, body_shape)
+ }
+ _ => None,
+ };
+
+ result.or_else(|| {
+ // Either we require a block, or tried without and failed.
+ rewrite_closure_block(block, &prefix, context, body_shape)
+ })
+ } else {
+ rewrite_closure_expr(body, &prefix, context, body_shape).or_else(|| {
+ // The closure originally had a non-block expression, but we can't fit on
+ // one line, so we'll insert a block.
+ rewrite_closure_with_block(body, &prefix, context, body_shape)
+ })
+ }
+}
+
+fn try_rewrite_without_block(
+ expr: &ast::Expr,
+ prefix: &str,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ body_shape: Shape,
+) -> Option<String> {
+ let expr = get_inner_expr(expr, prefix, context);
+
+ if is_block_closure_forced(context, expr) {
+ rewrite_closure_with_block(expr, prefix, context, shape)
+ } else {
+ rewrite_closure_expr(expr, prefix, context, body_shape)
+ }
+}
+
+fn get_inner_expr<'a>(
+ expr: &'a ast::Expr,
+ prefix: &str,
+ context: &RewriteContext<'_>,
+) -> &'a ast::Expr {
+ if let ast::ExprKind::Block(ref block, _) = expr.kind {
+ if !needs_block(block, prefix, context) {
+ // block.stmts.len() == 1 except with `|| {{}}`;
+ // https://github.com/rust-lang/rustfmt/issues/3844
+ if let Some(expr) = block.stmts.first().and_then(stmt_expr) {
+ return get_inner_expr(expr, prefix, context);
+ }
+ }
+ }
+
+ expr
+}
+
+// Figure out if a block is necessary.
+fn needs_block(block: &ast::Block, prefix: &str, context: &RewriteContext<'_>) -> bool {
+ let has_attributes = block.stmts.first().map_or(false, |first_stmt| {
+ !get_attrs_from_stmt(first_stmt).is_empty()
+ });
+
+ is_unsafe_block(block)
+ || block.stmts.len() > 1
+ || has_attributes
+ || block_contains_comment(context, block)
+ || prefix.contains('\n')
+}
+
+fn veto_block(e: &ast::Expr) -> bool {
+ match e.kind {
+ ast::ExprKind::Call(..)
+ | ast::ExprKind::Binary(..)
+ | ast::ExprKind::Cast(..)
+ | ast::ExprKind::Type(..)
+ | ast::ExprKind::Assign(..)
+ | ast::ExprKind::AssignOp(..)
+ | ast::ExprKind::Field(..)
+ | ast::ExprKind::Index(..)
+ | ast::ExprKind::Range(..)
+ | ast::ExprKind::Try(..) => true,
+ _ => false,
+ }
+}
+
+// Rewrite closure with a single expression wrapping its body with block.
+// || { #[attr] foo() } -> Block { #[attr] foo() }
+fn rewrite_closure_with_block(
+ body: &ast::Expr,
+ prefix: &str,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+) -> Option<String> {
+ let left_most = left_most_sub_expr(body);
+ let veto_block = veto_block(body) && !expr_requires_semi_to_be_stmt(left_most);
+ if veto_block {
+ return None;
+ }
+
+ let block = ast::Block {
+ stmts: vec![ast::Stmt {
+ id: ast::NodeId::root(),
+ kind: ast::StmtKind::Expr(ptr::P(body.clone())),
+ span: body.span,
+ }],
+ id: ast::NodeId::root(),
+ rules: ast::BlockCheckMode::Default,
+ tokens: None,
+ span: body
+ .attrs
+ .first()
+ .map(|attr| attr.span.to(body.span))
+ .unwrap_or(body.span),
+ could_be_bare_literal: false,
+ };
+ let block = crate::expr::rewrite_block_with_visitor(
+ context,
+ "",
+ &block,
+ Some(&body.attrs),
+ None,
+ shape,
+ false,
+ )?;
+ Some(format!("{} {}", prefix, block))
+}
+
+// Rewrite closure with a single expression without wrapping its body with block.
+fn rewrite_closure_expr(
+ expr: &ast::Expr,
+ prefix: &str,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+) -> Option<String> {
+ fn allow_multi_line(expr: &ast::Expr) -> bool {
+ match expr.kind {
+ ast::ExprKind::Match(..)
+ | ast::ExprKind::Async(..)
+ | ast::ExprKind::Block(..)
+ | ast::ExprKind::TryBlock(..)
+ | ast::ExprKind::Loop(..)
+ | ast::ExprKind::Struct(..) => true,
+
+ ast::ExprKind::AddrOf(_, _, ref expr)
+ | ast::ExprKind::Box(ref expr)
+ | ast::ExprKind::Try(ref expr)
+ | ast::ExprKind::Unary(_, ref expr)
+ | ast::ExprKind::Cast(ref expr, _) => allow_multi_line(expr),
+
+ _ => false,
+ }
+ }
+
+ // When rewriting closure's body without block, we require it to fit in a single line
+ // unless it is a block-like expression or we are inside macro call.
+ let veto_multiline = (!allow_multi_line(expr) && !context.inside_macro())
+ || context.config.force_multiline_blocks();
+ expr.rewrite(context, shape)
+ .and_then(|rw| {
+ if veto_multiline && rw.contains('\n') {
+ None
+ } else {
+ Some(rw)
+ }
+ })
+ .map(|rw| format!("{} {}", prefix, rw))
+}
+
+// Rewrite closure whose body is block.
+fn rewrite_closure_block(
+ block: &ast::Block,
+ prefix: &str,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+) -> Option<String> {
+ Some(format!("{} {}", prefix, block.rewrite(context, shape)?))
+}
+
+// Return type is (prefix, extra_offset)
+fn rewrite_closure_fn_decl(
+ binder: &ast::ClosureBinder,
+ capture: ast::CaptureBy,
+ asyncness: &ast::Async,
+ movability: ast::Movability,
+ fn_decl: &ast::FnDecl,
+ body: &ast::Expr,
+ span: Span,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+) -> Option<(String, usize)> {
+ let binder = match binder {
+ ast::ClosureBinder::For { generic_params, .. } if generic_params.is_empty() => {
+ "for<> ".to_owned()
+ }
+ ast::ClosureBinder::For { generic_params, .. } => {
+ let lifetime_str = rewrite_lifetime_param(context, shape, generic_params)?;
+ format!("for<{lifetime_str}> ")
+ }
+ ast::ClosureBinder::NotPresent => "".to_owned(),
+ };
+
+ let immovable = if movability == ast::Movability::Static {
+ "static "
+ } else {
+ ""
+ };
+ let is_async = if asyncness.is_async() { "async " } else { "" };
+ let mover = if capture == ast::CaptureBy::Value {
+ "move "
+ } else {
+ ""
+ };
+ // 4 = "|| {".len(), which is overconservative when the closure consists of
+ // a single expression.
+ let nested_shape = shape
+ .shrink_left(binder.len() + immovable.len() + is_async.len() + mover.len())?
+ .sub_width(4)?;
+
+ // 1 = |
+ let param_offset = nested_shape.indent + 1;
+ let param_shape = nested_shape.offset_left(1)?.visual_indent(0);
+ let ret_str = fn_decl.output.rewrite(context, param_shape)?;
+
+ let param_items = itemize_list(
+ context.snippet_provider,
+ fn_decl.inputs.iter(),
+ "|",
+ ",",
+ |param| span_lo_for_param(param),
+ |param| span_hi_for_param(context, param),
+ |param| param.rewrite(context, param_shape),
+ context.snippet_provider.span_after(span, "|"),
+ body.span.lo(),
+ false,
+ );
+ let item_vec = param_items.collect::<Vec<_>>();
+ // 1 = space between parameters and return type.
+ let horizontal_budget = nested_shape.width.saturating_sub(ret_str.len() + 1);
+ let tactic = definitive_tactic(
+ &item_vec,
+ ListTactic::HorizontalVertical,
+ Separator::Comma,
+ horizontal_budget,
+ );
+ let param_shape = match tactic {
+ DefinitiveListTactic::Horizontal => param_shape.sub_width(ret_str.len() + 1)?,
+ _ => param_shape,
+ };
+
+ let fmt = ListFormatting::new(param_shape, context.config)
+ .tactic(tactic)
+ .preserve_newline(true);
+ let list_str = write_list(&item_vec, &fmt)?;
+ let mut prefix = format!("{}{}{}{}|{}|", binder, immovable, is_async, mover, list_str);
+
+ if !ret_str.is_empty() {
+ if prefix.contains('\n') {
+ prefix.push('\n');
+ prefix.push_str(&param_offset.to_string(context.config));
+ } else {
+ prefix.push(' ');
+ }
+ prefix.push_str(&ret_str);
+ }
+ // 1 = space between `|...|` and body.
+ let extra_offset = last_line_width(&prefix) + 1;
+
+ Some((prefix, extra_offset))
+}
+
+// Rewriting closure which is placed at the end of the function call's arg.
+// Returns `None` if the reformatted closure 'looks bad'.
+pub(crate) fn rewrite_last_closure(
+ context: &RewriteContext<'_>,
+ expr: &ast::Expr,
+ shape: Shape,
+) -> Option<String> {
+ if let ast::ExprKind::Closure(
+ ref binder,
+ capture,
+ ref is_async,
+ movability,
+ ref fn_decl,
+ ref body,
+ _,
+ ) = expr.kind
+ {
+ let body = match body.kind {
+ ast::ExprKind::Block(ref block, _)
+ if !is_unsafe_block(block)
+ && !context.inside_macro()
+ && is_simple_block(context, block, Some(&body.attrs)) =>
+ {
+ stmt_expr(&block.stmts[0]).unwrap_or(body)
+ }
+ _ => body,
+ };
+ let (prefix, extra_offset) = rewrite_closure_fn_decl(
+ binder, capture, is_async, movability, fn_decl, body, expr.span, context, shape,
+ )?;
+ // If the closure goes multi line before its body, do not overflow the closure.
+ if prefix.contains('\n') {
+ return None;
+ }
+
+ let body_shape = shape.offset_left(extra_offset)?;
+
+ // We force to use block for the body of the closure for certain kinds of expressions.
+ if is_block_closure_forced(context, body) {
+ return rewrite_closure_with_block(body, &prefix, context, body_shape).map(
+ |body_str| {
+ match fn_decl.output {
+ ast::FnRetTy::Default(..) if body_str.lines().count() <= 7 => {
+ // If the expression can fit in a single line, we need not force block
+ // closure. However, if the closure has a return type, then we must
+ // keep the blocks.
+ match rewrite_closure_expr(body, &prefix, context, shape) {
+ Some(single_line_body_str)
+ if !single_line_body_str.contains('\n') =>
+ {
+ single_line_body_str
+ }
+ _ => body_str,
+ }
+ }
+ _ => body_str,
+ }
+ },
+ );
+ }
+
+ // When overflowing the closure which consists of a single control flow expression,
+ // force to use block if its condition uses multi line.
+ let is_multi_lined_cond = rewrite_cond(context, body, body_shape).map_or(false, |cond| {
+ cond.contains('\n') || cond.len() > body_shape.width
+ });
+ if is_multi_lined_cond {
+ return rewrite_closure_with_block(body, &prefix, context, body_shape);
+ }
+
+ // Seems fine, just format the closure in usual manner.
+ return expr.rewrite(context, shape);
+ }
+ None
+}
+
+/// Returns `true` if the given vector of arguments has more than one `ast::ExprKind::Closure`.
+pub(crate) fn args_have_many_closure(args: &[OverflowableItem<'_>]) -> bool {
+ args.iter()
+ .filter_map(OverflowableItem::to_expr)
+ .filter(|expr| matches!(expr.kind, ast::ExprKind::Closure(..)))
+ .count()
+ > 1
+}
+
+fn is_block_closure_forced(context: &RewriteContext<'_>, expr: &ast::Expr) -> bool {
+ // If we are inside macro, we do not want to add or remove block from closure body.
+ if context.inside_macro() {
+ false
+ } else {
+ is_block_closure_forced_inner(expr, context.config.version())
+ }
+}
+
+fn is_block_closure_forced_inner(expr: &ast::Expr, version: Version) -> bool {
+ match expr.kind {
+ ast::ExprKind::If(..) | ast::ExprKind::While(..) | ast::ExprKind::ForLoop(..) => true,
+ ast::ExprKind::Loop(..) if version == Version::Two => true,
+ ast::ExprKind::AddrOf(_, _, ref expr)
+ | ast::ExprKind::Box(ref expr)
+ | ast::ExprKind::Try(ref expr)
+ | ast::ExprKind::Unary(_, ref expr)
+ | ast::ExprKind::Cast(ref expr, _) => is_block_closure_forced_inner(expr, version),
+ _ => false,
+ }
+}
+
+/// Does this expression require a semicolon to be treated
+/// as a statement? The negation of this: 'can this expression
+/// be used as a statement without a semicolon' -- is used
+/// as an early-bail-out in the parser so that, for instance,
+/// if true {...} else {...}
+/// |x| 5
+/// isn't parsed as (if true {...} else {...} | x) | 5
+// From https://github.com/rust-lang/rust/blob/master/src/libsyntax/parse/classify.rs.
+fn expr_requires_semi_to_be_stmt(e: &ast::Expr) -> bool {
+ match e.kind {
+ ast::ExprKind::If(..)
+ | ast::ExprKind::Match(..)
+ | ast::ExprKind::Block(..)
+ | ast::ExprKind::While(..)
+ | ast::ExprKind::Loop(..)
+ | ast::ExprKind::ForLoop(..)
+ | ast::ExprKind::TryBlock(..) => false,
+ _ => true,
+ }
+}
diff --git a/src/tools/rustfmt/src/comment.rs b/src/tools/rustfmt/src/comment.rs
new file mode 100644
index 000000000..4d565afc1
--- /dev/null
+++ b/src/tools/rustfmt/src/comment.rs
@@ -0,0 +1,2007 @@
+// Formatting and tools for comments.
+
+use std::{self, borrow::Cow, iter};
+
+use itertools::{multipeek, MultiPeek};
+use lazy_static::lazy_static;
+use regex::Regex;
+use rustc_span::Span;
+
+use crate::config::Config;
+use crate::rewrite::RewriteContext;
+use crate::shape::{Indent, Shape};
+use crate::string::{rewrite_string, StringFormat};
+use crate::utils::{
+ count_newlines, first_line_width, last_line_width, trim_left_preserve_layout,
+ trimmed_last_line_width, unicode_str_width,
+};
+use crate::{ErrorKind, FormattingError};
+
+lazy_static! {
+ /// A regex matching reference doc links.
+ ///
+ /// ```markdown
+ /// /// An [example].
+ /// ///
+ /// /// [example]: this::is::a::link
+ /// ```
+ static ref REFERENCE_LINK_URL: Regex = Regex::new(r"^\[.+\]\s?:").unwrap();
+}
+
+fn is_custom_comment(comment: &str) -> bool {
+ if !comment.starts_with("//") {
+ false
+ } else if let Some(c) = comment.chars().nth(2) {
+ !c.is_alphanumeric() && !c.is_whitespace()
+ } else {
+ false
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq)]
+pub(crate) enum CommentStyle<'a> {
+ DoubleSlash,
+ TripleSlash,
+ Doc,
+ SingleBullet,
+ DoubleBullet,
+ Exclamation,
+ Custom(&'a str),
+}
+
+fn custom_opener(s: &str) -> &str {
+ s.lines().next().map_or("", |first_line| {
+ first_line
+ .find(' ')
+ .map_or(first_line, |space_index| &first_line[0..=space_index])
+ })
+}
+
+impl<'a> CommentStyle<'a> {
+ /// Returns `true` if the commenting style covers a line only.
+ pub(crate) fn is_line_comment(&self) -> bool {
+ match *self {
+ CommentStyle::DoubleSlash
+ | CommentStyle::TripleSlash
+ | CommentStyle::Doc
+ | CommentStyle::Custom(_) => true,
+ _ => false,
+ }
+ }
+
+ /// Returns `true` if the commenting style can span over multiple lines.
+ pub(crate) fn is_block_comment(&self) -> bool {
+ match *self {
+ CommentStyle::SingleBullet | CommentStyle::DoubleBullet | CommentStyle::Exclamation => {
+ true
+ }
+ _ => false,
+ }
+ }
+
+ /// Returns `true` if the commenting style is for documentation.
+ pub(crate) fn is_doc_comment(&self) -> bool {
+ matches!(*self, CommentStyle::TripleSlash | CommentStyle::Doc)
+ }
+
+ pub(crate) fn opener(&self) -> &'a str {
+ match *self {
+ CommentStyle::DoubleSlash => "// ",
+ CommentStyle::TripleSlash => "/// ",
+ CommentStyle::Doc => "//! ",
+ CommentStyle::SingleBullet => "/* ",
+ CommentStyle::DoubleBullet => "/** ",
+ CommentStyle::Exclamation => "/*! ",
+ CommentStyle::Custom(opener) => opener,
+ }
+ }
+
+ pub(crate) fn closer(&self) -> &'a str {
+ match *self {
+ CommentStyle::DoubleSlash
+ | CommentStyle::TripleSlash
+ | CommentStyle::Custom(..)
+ | CommentStyle::Doc => "",
+ CommentStyle::SingleBullet | CommentStyle::DoubleBullet | CommentStyle::Exclamation => {
+ " */"
+ }
+ }
+ }
+
+ pub(crate) fn line_start(&self) -> &'a str {
+ match *self {
+ CommentStyle::DoubleSlash => "// ",
+ CommentStyle::TripleSlash => "/// ",
+ CommentStyle::Doc => "//! ",
+ CommentStyle::SingleBullet | CommentStyle::DoubleBullet | CommentStyle::Exclamation => {
+ " * "
+ }
+ CommentStyle::Custom(opener) => opener,
+ }
+ }
+
+ pub(crate) fn to_str_tuplet(&self) -> (&'a str, &'a str, &'a str) {
+ (self.opener(), self.closer(), self.line_start())
+ }
+}
+
+pub(crate) fn comment_style(orig: &str, normalize_comments: bool) -> CommentStyle<'_> {
+ if !normalize_comments {
+ if orig.starts_with("/**") && !orig.starts_with("/**/") {
+ CommentStyle::DoubleBullet
+ } else if orig.starts_with("/*!") {
+ CommentStyle::Exclamation
+ } else if orig.starts_with("/*") {
+ CommentStyle::SingleBullet
+ } else if orig.starts_with("///") && orig.chars().nth(3).map_or(true, |c| c != '/') {
+ CommentStyle::TripleSlash
+ } else if orig.starts_with("//!") {
+ CommentStyle::Doc
+ } else if is_custom_comment(orig) {
+ CommentStyle::Custom(custom_opener(orig))
+ } else {
+ CommentStyle::DoubleSlash
+ }
+ } else if (orig.starts_with("///") && orig.chars().nth(3).map_or(true, |c| c != '/'))
+ || (orig.starts_with("/**") && !orig.starts_with("/**/"))
+ {
+ CommentStyle::TripleSlash
+ } else if orig.starts_with("//!") || orig.starts_with("/*!") {
+ CommentStyle::Doc
+ } else if is_custom_comment(orig) {
+ CommentStyle::Custom(custom_opener(orig))
+ } else {
+ CommentStyle::DoubleSlash
+ }
+}
+
+/// Returns true if the last line of the passed string finishes with a block-comment.
+pub(crate) fn is_last_comment_block(s: &str) -> bool {
+ s.trim_end().ends_with("*/")
+}
+
+/// Combine `prev_str` and `next_str` into a single `String`. `span` may contain
+/// comments between two strings. If there are such comments, then that will be
+/// recovered. If `allow_extend` is true and there is no comment between the two
+/// strings, then they will be put on a single line as long as doing so does not
+/// exceed max width.
+pub(crate) fn combine_strs_with_missing_comments(
+ context: &RewriteContext<'_>,
+ prev_str: &str,
+ next_str: &str,
+ span: Span,
+ shape: Shape,
+ allow_extend: bool,
+) -> Option<String> {
+ trace!(
+ "combine_strs_with_missing_comments `{}` `{}` {:?} {:?}",
+ prev_str,
+ next_str,
+ span,
+ shape
+ );
+
+ let mut result =
+ String::with_capacity(prev_str.len() + next_str.len() + shape.indent.width() + 128);
+ result.push_str(prev_str);
+ let mut allow_one_line = !prev_str.contains('\n') && !next_str.contains('\n');
+ let first_sep =
+ if prev_str.is_empty() || next_str.is_empty() || trimmed_last_line_width(prev_str) == 0 {
+ ""
+ } else {
+ " "
+ };
+ let mut one_line_width =
+ last_line_width(prev_str) + first_line_width(next_str) + first_sep.len();
+
+ let config = context.config;
+ let indent = shape.indent;
+ let missing_comment = rewrite_missing_comment(span, shape, context)?;
+
+ if missing_comment.is_empty() {
+ if allow_extend && one_line_width <= shape.width {
+ result.push_str(first_sep);
+ } else if !prev_str.is_empty() {
+ result.push_str(&indent.to_string_with_newline(config))
+ }
+ result.push_str(next_str);
+ return Some(result);
+ }
+
+ // We have a missing comment between the first expression and the second expression.
+
+ // Peek the the original source code and find out whether there is a newline between the first
+ // expression and the second expression or the missing comment. We will preserve the original
+ // layout whenever possible.
+ let original_snippet = context.snippet(span);
+ let prefer_same_line = if let Some(pos) = original_snippet.find('/') {
+ !original_snippet[..pos].contains('\n')
+ } else {
+ !original_snippet.contains('\n')
+ };
+
+ one_line_width -= first_sep.len();
+ let first_sep = if prev_str.is_empty() || missing_comment.is_empty() {
+ Cow::from("")
+ } else {
+ let one_line_width = last_line_width(prev_str) + first_line_width(&missing_comment) + 1;
+ if prefer_same_line && one_line_width <= shape.width {
+ Cow::from(" ")
+ } else {
+ indent.to_string_with_newline(config)
+ }
+ };
+ result.push_str(&first_sep);
+ result.push_str(&missing_comment);
+
+ let second_sep = if missing_comment.is_empty() || next_str.is_empty() {
+ Cow::from("")
+ } else if missing_comment.starts_with("//") {
+ indent.to_string_with_newline(config)
+ } else {
+ one_line_width += missing_comment.len() + first_sep.len() + 1;
+ allow_one_line &= !missing_comment.starts_with("//") && !missing_comment.contains('\n');
+ if prefer_same_line && allow_one_line && one_line_width <= shape.width {
+ Cow::from(" ")
+ } else {
+ indent.to_string_with_newline(config)
+ }
+ };
+ result.push_str(&second_sep);
+ result.push_str(next_str);
+
+ Some(result)
+}
+
+pub(crate) fn rewrite_doc_comment(orig: &str, shape: Shape, config: &Config) -> Option<String> {
+ identify_comment(orig, false, shape, config, true)
+}
+
+pub(crate) fn rewrite_comment(
+ orig: &str,
+ block_style: bool,
+ shape: Shape,
+ config: &Config,
+) -> Option<String> {
+ identify_comment(orig, block_style, shape, config, false)
+}
+
+fn identify_comment(
+ orig: &str,
+ block_style: bool,
+ shape: Shape,
+ config: &Config,
+ is_doc_comment: bool,
+) -> Option<String> {
+ let style = comment_style(orig, false);
+
+ // Computes the byte length of line taking into account a newline if the line is part of a
+ // paragraph.
+ fn compute_len(orig: &str, line: &str) -> usize {
+ if orig.len() > line.len() {
+ if orig.as_bytes()[line.len()] == b'\r' {
+ line.len() + 2
+ } else {
+ line.len() + 1
+ }
+ } else {
+ line.len()
+ }
+ }
+
+ // Get the first group of line comments having the same commenting style.
+ //
+ // Returns a tuple with:
+ // - a boolean indicating if there is a blank line
+ // - a number indicating the size of the first group of comments
+ fn consume_same_line_comments(
+ style: CommentStyle<'_>,
+ orig: &str,
+ line_start: &str,
+ ) -> (bool, usize) {
+ let mut first_group_ending = 0;
+ let mut hbl = false;
+
+ for line in orig.lines() {
+ let trimmed_line = line.trim_start();
+ if trimmed_line.is_empty() {
+ hbl = true;
+ break;
+ } else if trimmed_line.starts_with(line_start)
+ || comment_style(trimmed_line, false) == style
+ {
+ first_group_ending += compute_len(&orig[first_group_ending..], line);
+ } else {
+ break;
+ }
+ }
+ (hbl, first_group_ending)
+ }
+
+ let (has_bare_lines, first_group_ending) = match style {
+ CommentStyle::DoubleSlash | CommentStyle::TripleSlash | CommentStyle::Doc => {
+ let line_start = style.line_start().trim_start();
+ consume_same_line_comments(style, orig, line_start)
+ }
+ CommentStyle::Custom(opener) => {
+ let trimmed_opener = opener.trim_end();
+ consume_same_line_comments(style, orig, trimmed_opener)
+ }
+ // for a block comment, search for the closing symbol
+ CommentStyle::DoubleBullet | CommentStyle::SingleBullet | CommentStyle::Exclamation => {
+ let closer = style.closer().trim_start();
+ let mut count = orig.matches(closer).count();
+ let mut closing_symbol_offset = 0;
+ let mut hbl = false;
+ let mut first = true;
+ for line in orig.lines() {
+ closing_symbol_offset += compute_len(&orig[closing_symbol_offset..], line);
+ let mut trimmed_line = line.trim_start();
+ if !trimmed_line.starts_with('*')
+ && !trimmed_line.starts_with("//")
+ && !trimmed_line.starts_with("/*")
+ {
+ hbl = true;
+ }
+
+ // Remove opener from consideration when searching for closer
+ if first {
+ let opener = style.opener().trim_end();
+ trimmed_line = &trimmed_line[opener.len()..];
+ first = false;
+ }
+ if trimmed_line.ends_with(closer) {
+ count -= 1;
+ if count == 0 {
+ break;
+ }
+ }
+ }
+ (hbl, closing_symbol_offset)
+ }
+ };
+
+ let (first_group, rest) = orig.split_at(first_group_ending);
+ let rewritten_first_group =
+ if !config.normalize_comments() && has_bare_lines && style.is_block_comment() {
+ trim_left_preserve_layout(first_group, shape.indent, config)?
+ } else if !config.normalize_comments()
+ && !config.wrap_comments()
+ && !config.format_code_in_doc_comments()
+ {
+ light_rewrite_comment(first_group, shape.indent, config, is_doc_comment)
+ } else {
+ rewrite_comment_inner(
+ first_group,
+ block_style,
+ style,
+ shape,
+ config,
+ is_doc_comment || style.is_doc_comment(),
+ )?
+ };
+ if rest.is_empty() {
+ Some(rewritten_first_group)
+ } else {
+ identify_comment(
+ rest.trim_start(),
+ block_style,
+ shape,
+ config,
+ is_doc_comment,
+ )
+ .map(|rest_str| {
+ format!(
+ "{}\n{}{}{}",
+ rewritten_first_group,
+ // insert back the blank line
+ if has_bare_lines && style.is_line_comment() {
+ "\n"
+ } else {
+ ""
+ },
+ shape.indent.to_string(config),
+ rest_str
+ )
+ })
+ }
+}
+
+/// Enum indicating if the code block contains rust based on attributes
+enum CodeBlockAttribute {
+ Rust,
+ NotRust,
+}
+
+impl CodeBlockAttribute {
+ /// Parse comma separated attributes list. Return rust only if all
+ /// attributes are valid rust attributes
+ /// See <https://doc.rust-lang.org/rustdoc/print.html#attributes>
+ fn new(attributes: &str) -> CodeBlockAttribute {
+ for attribute in attributes.split(',') {
+ match attribute.trim() {
+ "" | "rust" | "should_panic" | "no_run" | "edition2015" | "edition2018"
+ | "edition2021" => (),
+ "ignore" | "compile_fail" | "text" => return CodeBlockAttribute::NotRust,
+ _ => return CodeBlockAttribute::NotRust,
+ }
+ }
+ CodeBlockAttribute::Rust
+ }
+}
+
+/// Block that is formatted as an item.
+///
+/// An item starts with either a star `*` a dash `-` or a greater-than `>`.
+/// Different level of indentation are handled by shrinking the shape accordingly.
+struct ItemizedBlock {
+ /// the lines that are identified as part of an itemized block
+ lines: Vec<String>,
+ /// the number of characters (typically whitespaces) up to the item sigil
+ indent: usize,
+ /// the string that marks the start of an item
+ opener: String,
+ /// sequence of characters (typically whitespaces) to prefix new lines that are part of the item
+ line_start: String,
+}
+
+impl ItemizedBlock {
+ /// Returns `true` if the line is formatted as an item
+ fn is_itemized_line(line: &str) -> bool {
+ let trimmed = line.trim_start();
+ trimmed.starts_with("* ") || trimmed.starts_with("- ") || trimmed.starts_with("> ")
+ }
+
+ /// Creates a new ItemizedBlock described with the given line.
+ /// The `is_itemized_line` needs to be called first.
+ fn new(line: &str) -> ItemizedBlock {
+ let space_to_sigil = line.chars().take_while(|c| c.is_whitespace()).count();
+ // +2 = '* ', which will add the appropriate amount of whitespace to keep itemized
+ // content formatted correctly.
+ let mut indent = space_to_sigil + 2;
+ let mut line_start = " ".repeat(indent);
+
+ // Markdown blockquote start with a "> "
+ if line.trim_start().starts_with(">") {
+ // remove the original +2 indent because there might be multiple nested block quotes
+ // and it's easier to reason about the final indent by just taking the length
+ // of th new line_start. We update the indent because it effects the max width
+ // of each formatted line.
+ line_start = itemized_block_quote_start(line, line_start, 2);
+ indent = line_start.len();
+ }
+ ItemizedBlock {
+ lines: vec![line[indent..].to_string()],
+ indent,
+ opener: line[..indent].to_string(),
+ line_start,
+ }
+ }
+
+ /// Returns a `StringFormat` used for formatting the content of an item.
+ fn create_string_format<'a>(&'a self, fmt: &'a StringFormat<'_>) -> StringFormat<'a> {
+ StringFormat {
+ opener: "",
+ closer: "",
+ line_start: "",
+ line_end: "",
+ shape: Shape::legacy(fmt.shape.width.saturating_sub(self.indent), Indent::empty()),
+ trim_end: true,
+ config: fmt.config,
+ }
+ }
+
+ /// Returns `true` if the line is part of the current itemized block.
+ /// If it is, then it is added to the internal lines list.
+ fn add_line(&mut self, line: &str) -> bool {
+ if !ItemizedBlock::is_itemized_line(line)
+ && self.indent <= line.chars().take_while(|c| c.is_whitespace()).count()
+ {
+ self.lines.push(line.to_string());
+ return true;
+ }
+ false
+ }
+
+ /// Returns the block as a string, with each line trimmed at the start.
+ fn trimmed_block_as_string(&self) -> String {
+ self.lines
+ .iter()
+ .map(|line| format!("{} ", line.trim_start()))
+ .collect::<String>()
+ }
+
+ /// Returns the block as a string under its original form.
+ fn original_block_as_string(&self) -> String {
+ self.lines.join("\n")
+ }
+}
+
+/// Determine the line_start when formatting markdown block quotes.
+/// The original line_start likely contains indentation (whitespaces), which we'd like to
+/// replace with '> ' characters.
+fn itemized_block_quote_start(line: &str, mut line_start: String, remove_indent: usize) -> String {
+ let quote_level = line
+ .chars()
+ .take_while(|c| !c.is_alphanumeric())
+ .fold(0, |acc, c| if c == '>' { acc + 1 } else { acc });
+
+ for _ in 0..remove_indent {
+ line_start.pop();
+ }
+
+ for _ in 0..quote_level {
+ line_start.push_str("> ")
+ }
+ line_start
+}
+
+struct CommentRewrite<'a> {
+ result: String,
+ code_block_buffer: String,
+ is_prev_line_multi_line: bool,
+ code_block_attr: Option<CodeBlockAttribute>,
+ item_block: Option<ItemizedBlock>,
+ comment_line_separator: String,
+ indent_str: String,
+ max_width: usize,
+ fmt_indent: Indent,
+ fmt: StringFormat<'a>,
+
+ opener: String,
+ closer: String,
+ line_start: String,
+ style: CommentStyle<'a>,
+}
+
+impl<'a> CommentRewrite<'a> {
+ fn new(
+ orig: &'a str,
+ block_style: bool,
+ shape: Shape,
+ config: &'a Config,
+ ) -> CommentRewrite<'a> {
+ let ((opener, closer, line_start), style) = if block_style {
+ (
+ CommentStyle::SingleBullet.to_str_tuplet(),
+ CommentStyle::SingleBullet,
+ )
+ } else {
+ let style = comment_style(orig, config.normalize_comments());
+ (style.to_str_tuplet(), style)
+ };
+
+ let max_width = shape
+ .width
+ .checked_sub(closer.len() + opener.len())
+ .unwrap_or(1);
+ let indent_str = shape.indent.to_string_with_newline(config).to_string();
+
+ let mut cr = CommentRewrite {
+ result: String::with_capacity(orig.len() * 2),
+ code_block_buffer: String::with_capacity(128),
+ is_prev_line_multi_line: false,
+ code_block_attr: None,
+ item_block: None,
+ comment_line_separator: format!("{}{}", indent_str, line_start),
+ max_width,
+ indent_str,
+ fmt_indent: shape.indent,
+
+ fmt: StringFormat {
+ opener: "",
+ closer: "",
+ line_start,
+ line_end: "",
+ shape: Shape::legacy(max_width, shape.indent),
+ trim_end: true,
+ config,
+ },
+
+ opener: opener.to_owned(),
+ closer: closer.to_owned(),
+ line_start: line_start.to_owned(),
+ style,
+ };
+ cr.result.push_str(opener);
+ cr
+ }
+
+ fn join_block(s: &str, sep: &str) -> String {
+ let mut result = String::with_capacity(s.len() + 128);
+ let mut iter = s.lines().peekable();
+ while let Some(line) = iter.next() {
+ result.push_str(line);
+ result.push_str(match iter.peek() {
+ Some(next_line) if next_line.is_empty() => sep.trim_end(),
+ Some(..) => sep,
+ None => "",
+ });
+ }
+ result
+ }
+
+ /// Check if any characters were written to the result buffer after the start of the comment.
+ /// when calling [`CommentRewrite::new()`] the result buffer is initiazlied with the opening
+ /// characters for the comment.
+ fn buffer_contains_comment(&self) -> bool {
+ // if self.result.len() < self.opener.len() then an empty comment is in the buffer
+ // if self.result.len() > self.opener.len() then a non empty comment is in the buffer
+ self.result.len() != self.opener.len()
+ }
+
+ fn finish(mut self) -> String {
+ if !self.code_block_buffer.is_empty() {
+ // There is a code block that is not properly enclosed by backticks.
+ // We will leave them untouched.
+ self.result.push_str(&self.comment_line_separator);
+ self.result.push_str(&Self::join_block(
+ &trim_custom_comment_prefix(&self.code_block_buffer),
+ &self.comment_line_separator,
+ ));
+ }
+
+ if let Some(ref ib) = self.item_block {
+ // the last few lines are part of an itemized block
+ self.fmt.shape = Shape::legacy(self.max_width, self.fmt_indent);
+ let item_fmt = ib.create_string_format(&self.fmt);
+
+ // only push a comment_line_separator for ItemizedBlocks if the comment is not empty
+ if self.buffer_contains_comment() {
+ self.result.push_str(&self.comment_line_separator);
+ }
+
+ self.result.push_str(&ib.opener);
+ match rewrite_string(
+ &ib.trimmed_block_as_string(),
+ &item_fmt,
+ self.max_width.saturating_sub(ib.indent),
+ ) {
+ Some(s) => self.result.push_str(&Self::join_block(
+ &s,
+ &format!("{}{}", self.comment_line_separator, ib.line_start),
+ )),
+ None => self.result.push_str(&Self::join_block(
+ &ib.original_block_as_string(),
+ &self.comment_line_separator,
+ )),
+ };
+ }
+
+ self.result.push_str(&self.closer);
+ if self.result.ends_with(&self.opener) && self.opener.ends_with(' ') {
+ // Trailing space.
+ self.result.pop();
+ }
+
+ self.result
+ }
+
+ fn handle_line(
+ &mut self,
+ orig: &'a str,
+ i: usize,
+ line: &'a str,
+ has_leading_whitespace: bool,
+ is_doc_comment: bool,
+ ) -> bool {
+ let num_newlines = count_newlines(orig);
+ let is_last = i == num_newlines;
+ let needs_new_comment_line = if self.style.is_block_comment() {
+ num_newlines > 0 || self.buffer_contains_comment()
+ } else {
+ self.buffer_contains_comment()
+ };
+
+ if let Some(ref mut ib) = self.item_block {
+ if ib.add_line(line) {
+ return false;
+ }
+ self.is_prev_line_multi_line = false;
+ self.fmt.shape = Shape::legacy(self.max_width, self.fmt_indent);
+ let item_fmt = ib.create_string_format(&self.fmt);
+
+ // only push a comment_line_separator if we need to start a new comment line
+ if needs_new_comment_line {
+ self.result.push_str(&self.comment_line_separator);
+ }
+
+ self.result.push_str(&ib.opener);
+ match rewrite_string(
+ &ib.trimmed_block_as_string(),
+ &item_fmt,
+ self.max_width.saturating_sub(ib.indent),
+ ) {
+ Some(s) => self.result.push_str(&Self::join_block(
+ &s,
+ &format!("{}{}", self.comment_line_separator, ib.line_start),
+ )),
+ None => self.result.push_str(&Self::join_block(
+ &ib.original_block_as_string(),
+ &self.comment_line_separator,
+ )),
+ };
+ } else if self.code_block_attr.is_some() {
+ if line.starts_with("```") {
+ let code_block = match self.code_block_attr.as_ref().unwrap() {
+ CodeBlockAttribute::Rust
+ if self.fmt.config.format_code_in_doc_comments()
+ && !self.code_block_buffer.is_empty() =>
+ {
+ let mut config = self.fmt.config.clone();
+ config.set().wrap_comments(false);
+ let comment_max_width = config
+ .doc_comment_code_block_width()
+ .min(config.max_width());
+ config.set().max_width(comment_max_width);
+ if let Some(s) =
+ crate::format_code_block(&self.code_block_buffer, &config, false)
+ {
+ trim_custom_comment_prefix(&s.snippet)
+ } else {
+ trim_custom_comment_prefix(&self.code_block_buffer)
+ }
+ }
+ _ => trim_custom_comment_prefix(&self.code_block_buffer),
+ };
+ if !code_block.is_empty() {
+ self.result.push_str(&self.comment_line_separator);
+ self.result
+ .push_str(&Self::join_block(&code_block, &self.comment_line_separator));
+ }
+ self.code_block_buffer.clear();
+ self.result.push_str(&self.comment_line_separator);
+ self.result.push_str(line);
+ self.code_block_attr = None;
+ } else {
+ self.code_block_buffer
+ .push_str(&hide_sharp_behind_comment(line));
+ self.code_block_buffer.push('\n');
+ }
+ return false;
+ }
+
+ self.code_block_attr = None;
+ self.item_block = None;
+ if let Some(stripped) = line.strip_prefix("```") {
+ self.code_block_attr = Some(CodeBlockAttribute::new(stripped))
+ } else if self.fmt.config.wrap_comments() && ItemizedBlock::is_itemized_line(line) {
+ let ib = ItemizedBlock::new(line);
+ self.item_block = Some(ib);
+ return false;
+ }
+
+ if self.result == self.opener {
+ let force_leading_whitespace = &self.opener == "/* " && count_newlines(orig) == 0;
+ if !has_leading_whitespace && !force_leading_whitespace && self.result.ends_with(' ') {
+ self.result.pop();
+ }
+ if line.is_empty() {
+ return false;
+ }
+ } else if self.is_prev_line_multi_line && !line.is_empty() {
+ self.result.push(' ')
+ } else if is_last && line.is_empty() {
+ // trailing blank lines are unwanted
+ if !self.closer.is_empty() {
+ self.result.push_str(&self.indent_str);
+ }
+ return true;
+ } else {
+ self.result.push_str(&self.comment_line_separator);
+ if !has_leading_whitespace && self.result.ends_with(' ') {
+ self.result.pop();
+ }
+ }
+
+ let is_markdown_header_doc_comment = is_doc_comment && line.starts_with("#");
+
+ // We only want to wrap the comment if:
+ // 1) wrap_comments = true is configured
+ // 2) The comment is not the start of a markdown header doc comment
+ // 3) The comment width exceeds the shape's width
+ // 4) No URLS were found in the comment
+ let should_wrap_comment = self.fmt.config.wrap_comments()
+ && !is_markdown_header_doc_comment
+ && unicode_str_width(line) > self.fmt.shape.width
+ && !has_url(line);
+
+ if should_wrap_comment {
+ match rewrite_string(line, &self.fmt, self.max_width) {
+ Some(ref s) => {
+ self.is_prev_line_multi_line = s.contains('\n');
+ self.result.push_str(s);
+ }
+ None if self.is_prev_line_multi_line => {
+ // We failed to put the current `line` next to the previous `line`.
+ // Remove the trailing space, then start rewrite on the next line.
+ self.result.pop();
+ self.result.push_str(&self.comment_line_separator);
+ self.fmt.shape = Shape::legacy(self.max_width, self.fmt_indent);
+ match rewrite_string(line, &self.fmt, self.max_width) {
+ Some(ref s) => {
+ self.is_prev_line_multi_line = s.contains('\n');
+ self.result.push_str(s);
+ }
+ None => {
+ self.is_prev_line_multi_line = false;
+ self.result.push_str(line);
+ }
+ }
+ }
+ None => {
+ self.is_prev_line_multi_line = false;
+ self.result.push_str(line);
+ }
+ }
+
+ self.fmt.shape = if self.is_prev_line_multi_line {
+ // 1 = " "
+ let offset = 1 + last_line_width(&self.result) - self.line_start.len();
+ Shape {
+ width: self.max_width.saturating_sub(offset),
+ indent: self.fmt_indent,
+ offset: self.fmt.shape.offset + offset,
+ }
+ } else {
+ Shape::legacy(self.max_width, self.fmt_indent)
+ };
+ } else {
+ if line.is_empty() && self.result.ends_with(' ') && !is_last {
+ // Remove space if this is an empty comment or a doc comment.
+ self.result.pop();
+ }
+ self.result.push_str(line);
+ self.fmt.shape = Shape::legacy(self.max_width, self.fmt_indent);
+ self.is_prev_line_multi_line = false;
+ }
+
+ false
+ }
+}
+
+fn rewrite_comment_inner(
+ orig: &str,
+ block_style: bool,
+ style: CommentStyle<'_>,
+ shape: Shape,
+ config: &Config,
+ is_doc_comment: bool,
+) -> Option<String> {
+ let mut rewriter = CommentRewrite::new(orig, block_style, shape, config);
+
+ let line_breaks = count_newlines(orig.trim_end());
+ let lines = orig
+ .lines()
+ .enumerate()
+ .map(|(i, mut line)| {
+ line = trim_end_unless_two_whitespaces(line.trim_start(), is_doc_comment);
+ // Drop old closer.
+ if i == line_breaks && line.ends_with("*/") && !line.starts_with("//") {
+ line = line[..(line.len() - 2)].trim_end();
+ }
+
+ line
+ })
+ .map(|s| left_trim_comment_line(s, &style))
+ .map(|(line, has_leading_whitespace)| {
+ if orig.starts_with("/*") && line_breaks == 0 {
+ (
+ line.trim_start(),
+ has_leading_whitespace || config.normalize_comments(),
+ )
+ } else {
+ (line, has_leading_whitespace || config.normalize_comments())
+ }
+ });
+
+ for (i, (line, has_leading_whitespace)) in lines.enumerate() {
+ if rewriter.handle_line(orig, i, line, has_leading_whitespace, is_doc_comment) {
+ break;
+ }
+ }
+
+ Some(rewriter.finish())
+}
+
+const RUSTFMT_CUSTOM_COMMENT_PREFIX: &str = "//#### ";
+
+fn hide_sharp_behind_comment(s: &str) -> Cow<'_, str> {
+ let s_trimmed = s.trim();
+ if s_trimmed.starts_with("# ") || s_trimmed == "#" {
+ Cow::from(format!("{}{}", RUSTFMT_CUSTOM_COMMENT_PREFIX, s))
+ } else {
+ Cow::from(s)
+ }
+}
+
+fn trim_custom_comment_prefix(s: &str) -> String {
+ s.lines()
+ .map(|line| {
+ let left_trimmed = line.trim_start();
+ if left_trimmed.starts_with(RUSTFMT_CUSTOM_COMMENT_PREFIX) {
+ left_trimmed.trim_start_matches(RUSTFMT_CUSTOM_COMMENT_PREFIX)
+ } else {
+ line
+ }
+ })
+ .collect::<Vec<_>>()
+ .join("\n")
+}
+
+/// Returns `true` if the given string MAY include URLs or alike.
+fn has_url(s: &str) -> bool {
+ // This function may return false positive, but should get its job done in most cases.
+ s.contains("https://")
+ || s.contains("http://")
+ || s.contains("ftp://")
+ || s.contains("file://")
+ || REFERENCE_LINK_URL.is_match(s)
+}
+
+/// Given the span, rewrite the missing comment inside it if available.
+/// Note that the given span must only include comments (or leading/trailing whitespaces).
+pub(crate) fn rewrite_missing_comment(
+ span: Span,
+ shape: Shape,
+ context: &RewriteContext<'_>,
+) -> Option<String> {
+ let missing_snippet = context.snippet(span);
+ let trimmed_snippet = missing_snippet.trim();
+ // check the span starts with a comment
+ let pos = trimmed_snippet.find('/');
+ if !trimmed_snippet.is_empty() && pos.is_some() {
+ rewrite_comment(trimmed_snippet, false, shape, context.config)
+ } else {
+ Some(String::new())
+ }
+}
+
+/// Recover the missing comments in the specified span, if available.
+/// The layout of the comments will be preserved as long as it does not break the code
+/// and its total width does not exceed the max width.
+pub(crate) fn recover_missing_comment_in_span(
+ span: Span,
+ shape: Shape,
+ context: &RewriteContext<'_>,
+ used_width: usize,
+) -> Option<String> {
+ let missing_comment = rewrite_missing_comment(span, shape, context)?;
+ if missing_comment.is_empty() {
+ Some(String::new())
+ } else {
+ let missing_snippet = context.snippet(span);
+ let pos = missing_snippet.find('/')?;
+ // 1 = ` `
+ let total_width = missing_comment.len() + used_width + 1;
+ let force_new_line_before_comment =
+ missing_snippet[..pos].contains('\n') || total_width > context.config.max_width();
+ let sep = if force_new_line_before_comment {
+ shape.indent.to_string_with_newline(context.config)
+ } else {
+ Cow::from(" ")
+ };
+ Some(format!("{}{}", sep, missing_comment))
+ }
+}
+
+/// Trim trailing whitespaces unless they consist of two or more whitespaces.
+fn trim_end_unless_two_whitespaces(s: &str, is_doc_comment: bool) -> &str {
+ if is_doc_comment && s.ends_with(" ") {
+ s
+ } else {
+ s.trim_end()
+ }
+}
+
+/// Trims whitespace and aligns to indent, but otherwise does not change comments.
+fn light_rewrite_comment(
+ orig: &str,
+ offset: Indent,
+ config: &Config,
+ is_doc_comment: bool,
+) -> String {
+ let lines: Vec<&str> = orig
+ .lines()
+ .map(|l| {
+ // This is basically just l.trim(), but in the case that a line starts
+ // with `*` we want to leave one space before it, so it aligns with the
+ // `*` in `/*`.
+ let first_non_whitespace = l.find(|c| !char::is_whitespace(c));
+ let left_trimmed = if let Some(fnw) = first_non_whitespace {
+ if l.as_bytes()[fnw] == b'*' && fnw > 0 {
+ &l[fnw - 1..]
+ } else {
+ &l[fnw..]
+ }
+ } else {
+ ""
+ };
+ // Preserve markdown's double-space line break syntax in doc comment.
+ trim_end_unless_two_whitespaces(left_trimmed, is_doc_comment)
+ })
+ .collect();
+ lines.join(&format!("\n{}", offset.to_string(config)))
+}
+
+/// Trims comment characters and possibly a single space from the left of a string.
+/// Does not trim all whitespace. If a single space is trimmed from the left of the string,
+/// this function returns true.
+fn left_trim_comment_line<'a>(line: &'a str, style: &CommentStyle<'_>) -> (&'a str, bool) {
+ if line.starts_with("//! ")
+ || line.starts_with("/// ")
+ || line.starts_with("/*! ")
+ || line.starts_with("/** ")
+ {
+ (&line[4..], true)
+ } else if let CommentStyle::Custom(opener) = *style {
+ if let Some(stripped) = line.strip_prefix(opener) {
+ (stripped, true)
+ } else {
+ (&line[opener.trim_end().len()..], false)
+ }
+ } else if line.starts_with("/* ")
+ || line.starts_with("// ")
+ || line.starts_with("//!")
+ || line.starts_with("///")
+ || line.starts_with("** ")
+ || line.starts_with("/*!")
+ || (line.starts_with("/**") && !line.starts_with("/**/"))
+ {
+ (&line[3..], line.chars().nth(2).unwrap() == ' ')
+ } else if line.starts_with("/*")
+ || line.starts_with("* ")
+ || line.starts_with("//")
+ || line.starts_with("**")
+ {
+ (&line[2..], line.chars().nth(1).unwrap() == ' ')
+ } else if let Some(stripped) = line.strip_prefix('*') {
+ (stripped, false)
+ } else {
+ (line, line.starts_with(' '))
+ }
+}
+
+pub(crate) trait FindUncommented {
+ fn find_uncommented(&self, pat: &str) -> Option<usize>;
+ fn find_last_uncommented(&self, pat: &str) -> Option<usize>;
+}
+
+impl FindUncommented for str {
+ fn find_uncommented(&self, pat: &str) -> Option<usize> {
+ let mut needle_iter = pat.chars();
+ for (kind, (i, b)) in CharClasses::new(self.char_indices()) {
+ match needle_iter.next() {
+ None => {
+ return Some(i - pat.len());
+ }
+ Some(c) => match kind {
+ FullCodeCharKind::Normal | FullCodeCharKind::InString if b == c => {}
+ _ => {
+ needle_iter = pat.chars();
+ }
+ },
+ }
+ }
+
+ // Handle case where the pattern is a suffix of the search string
+ match needle_iter.next() {
+ Some(_) => None,
+ None => Some(self.len() - pat.len()),
+ }
+ }
+
+ fn find_last_uncommented(&self, pat: &str) -> Option<usize> {
+ if let Some(left) = self.find_uncommented(pat) {
+ let mut result = left;
+ // add 1 to use find_last_uncommented for &str after pat
+ while let Some(next) = self[(result + 1)..].find_last_uncommented(pat) {
+ result += next + 1;
+ }
+ Some(result)
+ } else {
+ None
+ }
+ }
+}
+
+// Returns the first byte position after the first comment. The given string
+// is expected to be prefixed by a comment, including delimiters.
+// Good: `/* /* inner */ outer */ code();`
+// Bad: `code(); // hello\n world!`
+pub(crate) fn find_comment_end(s: &str) -> Option<usize> {
+ let mut iter = CharClasses::new(s.char_indices());
+ for (kind, (i, _c)) in &mut iter {
+ if kind == FullCodeCharKind::Normal || kind == FullCodeCharKind::InString {
+ return Some(i);
+ }
+ }
+
+ // Handle case where the comment ends at the end of `s`.
+ if iter.status == CharClassesStatus::Normal {
+ Some(s.len())
+ } else {
+ None
+ }
+}
+
+/// Returns `true` if text contains any comment.
+pub(crate) fn contains_comment(text: &str) -> bool {
+ CharClasses::new(text.chars()).any(|(kind, _)| kind.is_comment())
+}
+
+pub(crate) struct CharClasses<T>
+where
+ T: Iterator,
+ T::Item: RichChar,
+{
+ base: MultiPeek<T>,
+ status: CharClassesStatus,
+}
+
+pub(crate) trait RichChar {
+ fn get_char(&self) -> char;
+}
+
+impl RichChar for char {
+ fn get_char(&self) -> char {
+ *self
+ }
+}
+
+impl RichChar for (usize, char) {
+ fn get_char(&self) -> char {
+ self.1
+ }
+}
+
+#[derive(PartialEq, Eq, Debug, Clone, Copy)]
+enum CharClassesStatus {
+ Normal,
+ /// Character is within a string
+ LitString,
+ LitStringEscape,
+ /// Character is within a raw string
+ LitRawString(u32),
+ RawStringPrefix(u32),
+ RawStringSuffix(u32),
+ LitChar,
+ LitCharEscape,
+ /// Character inside a block comment, with the integer indicating the nesting deepness of the
+ /// comment
+ BlockComment(u32),
+ /// Character inside a block-commented string, with the integer indicating the nesting deepness
+ /// of the comment
+ StringInBlockComment(u32),
+ /// Status when the '/' has been consumed, but not yet the '*', deepness is
+ /// the new deepness (after the comment opening).
+ BlockCommentOpening(u32),
+ /// Status when the '*' has been consumed, but not yet the '/', deepness is
+ /// the new deepness (after the comment closing).
+ BlockCommentClosing(u32),
+ /// Character is within a line comment
+ LineComment,
+}
+
+/// Distinguish between functional part of code and comments
+#[derive(PartialEq, Eq, Debug, Clone, Copy)]
+pub(crate) enum CodeCharKind {
+ Normal,
+ Comment,
+}
+
+/// Distinguish between functional part of code and comments,
+/// describing opening and closing of comments for ease when chunking
+/// code from tagged characters
+#[derive(PartialEq, Eq, Debug, Clone, Copy)]
+pub(crate) enum FullCodeCharKind {
+ Normal,
+ /// The first character of a comment, there is only one for a comment (always '/')
+ StartComment,
+ /// Any character inside a comment including the second character of comment
+ /// marks ("//", "/*")
+ InComment,
+ /// Last character of a comment, '\n' for a line comment, '/' for a block comment.
+ EndComment,
+ /// Start of a mutlitine string inside a comment
+ StartStringCommented,
+ /// End of a mutlitine string inside a comment
+ EndStringCommented,
+ /// Inside a commented string
+ InStringCommented,
+ /// Start of a mutlitine string
+ StartString,
+ /// End of a mutlitine string
+ EndString,
+ /// Inside a string.
+ InString,
+}
+
+impl FullCodeCharKind {
+ pub(crate) fn is_comment(self) -> bool {
+ match self {
+ FullCodeCharKind::StartComment
+ | FullCodeCharKind::InComment
+ | FullCodeCharKind::EndComment
+ | FullCodeCharKind::StartStringCommented
+ | FullCodeCharKind::InStringCommented
+ | FullCodeCharKind::EndStringCommented => true,
+ _ => false,
+ }
+ }
+
+ /// Returns true if the character is inside a comment
+ pub(crate) fn inside_comment(self) -> bool {
+ match self {
+ FullCodeCharKind::InComment
+ | FullCodeCharKind::StartStringCommented
+ | FullCodeCharKind::InStringCommented
+ | FullCodeCharKind::EndStringCommented => true,
+ _ => false,
+ }
+ }
+
+ pub(crate) fn is_string(self) -> bool {
+ self == FullCodeCharKind::InString || self == FullCodeCharKind::StartString
+ }
+
+ /// Returns true if the character is within a commented string
+ pub(crate) fn is_commented_string(self) -> bool {
+ self == FullCodeCharKind::InStringCommented
+ || self == FullCodeCharKind::StartStringCommented
+ }
+
+ fn to_codecharkind(self) -> CodeCharKind {
+ if self.is_comment() {
+ CodeCharKind::Comment
+ } else {
+ CodeCharKind::Normal
+ }
+ }
+}
+
+impl<T> CharClasses<T>
+where
+ T: Iterator,
+ T::Item: RichChar,
+{
+ pub(crate) fn new(base: T) -> CharClasses<T> {
+ CharClasses {
+ base: multipeek(base),
+ status: CharClassesStatus::Normal,
+ }
+ }
+}
+
+fn is_raw_string_suffix<T>(iter: &mut MultiPeek<T>, count: u32) -> bool
+where
+ T: Iterator,
+ T::Item: RichChar,
+{
+ for _ in 0..count {
+ match iter.peek() {
+ Some(c) if c.get_char() == '#' => continue,
+ _ => return false,
+ }
+ }
+ true
+}
+
+impl<T> Iterator for CharClasses<T>
+where
+ T: Iterator,
+ T::Item: RichChar,
+{
+ type Item = (FullCodeCharKind, T::Item);
+
+ fn next(&mut self) -> Option<(FullCodeCharKind, T::Item)> {
+ let item = self.base.next()?;
+ let chr = item.get_char();
+ let mut char_kind = FullCodeCharKind::Normal;
+ self.status = match self.status {
+ CharClassesStatus::LitRawString(sharps) => {
+ char_kind = FullCodeCharKind::InString;
+ match chr {
+ '"' => {
+ if sharps == 0 {
+ char_kind = FullCodeCharKind::Normal;
+ CharClassesStatus::Normal
+ } else if is_raw_string_suffix(&mut self.base, sharps) {
+ CharClassesStatus::RawStringSuffix(sharps)
+ } else {
+ CharClassesStatus::LitRawString(sharps)
+ }
+ }
+ _ => CharClassesStatus::LitRawString(sharps),
+ }
+ }
+ CharClassesStatus::RawStringPrefix(sharps) => {
+ char_kind = FullCodeCharKind::InString;
+ match chr {
+ '#' => CharClassesStatus::RawStringPrefix(sharps + 1),
+ '"' => CharClassesStatus::LitRawString(sharps),
+ _ => CharClassesStatus::Normal, // Unreachable.
+ }
+ }
+ CharClassesStatus::RawStringSuffix(sharps) => {
+ match chr {
+ '#' => {
+ if sharps == 1 {
+ CharClassesStatus::Normal
+ } else {
+ char_kind = FullCodeCharKind::InString;
+ CharClassesStatus::RawStringSuffix(sharps - 1)
+ }
+ }
+ _ => CharClassesStatus::Normal, // Unreachable
+ }
+ }
+ CharClassesStatus::LitString => {
+ char_kind = FullCodeCharKind::InString;
+ match chr {
+ '"' => CharClassesStatus::Normal,
+ '\\' => CharClassesStatus::LitStringEscape,
+ _ => CharClassesStatus::LitString,
+ }
+ }
+ CharClassesStatus::LitStringEscape => {
+ char_kind = FullCodeCharKind::InString;
+ CharClassesStatus::LitString
+ }
+ CharClassesStatus::LitChar => match chr {
+ '\\' => CharClassesStatus::LitCharEscape,
+ '\'' => CharClassesStatus::Normal,
+ _ => CharClassesStatus::LitChar,
+ },
+ CharClassesStatus::LitCharEscape => CharClassesStatus::LitChar,
+ CharClassesStatus::Normal => match chr {
+ 'r' => match self.base.peek().map(RichChar::get_char) {
+ Some('#') | Some('"') => {
+ char_kind = FullCodeCharKind::InString;
+ CharClassesStatus::RawStringPrefix(0)
+ }
+ _ => CharClassesStatus::Normal,
+ },
+ '"' => {
+ char_kind = FullCodeCharKind::InString;
+ CharClassesStatus::LitString
+ }
+ '\'' => {
+ // HACK: Work around mut borrow.
+ match self.base.peek() {
+ Some(next) if next.get_char() == '\\' => {
+ self.status = CharClassesStatus::LitChar;
+ return Some((char_kind, item));
+ }
+ _ => (),
+ }
+
+ match self.base.peek() {
+ Some(next) if next.get_char() == '\'' => CharClassesStatus::LitChar,
+ _ => CharClassesStatus::Normal,
+ }
+ }
+ '/' => match self.base.peek() {
+ Some(next) if next.get_char() == '*' => {
+ self.status = CharClassesStatus::BlockCommentOpening(1);
+ return Some((FullCodeCharKind::StartComment, item));
+ }
+ Some(next) if next.get_char() == '/' => {
+ self.status = CharClassesStatus::LineComment;
+ return Some((FullCodeCharKind::StartComment, item));
+ }
+ _ => CharClassesStatus::Normal,
+ },
+ _ => CharClassesStatus::Normal,
+ },
+ CharClassesStatus::StringInBlockComment(deepness) => {
+ char_kind = FullCodeCharKind::InStringCommented;
+ if chr == '"' {
+ CharClassesStatus::BlockComment(deepness)
+ } else if chr == '*' && self.base.peek().map(RichChar::get_char) == Some('/') {
+ char_kind = FullCodeCharKind::InComment;
+ CharClassesStatus::BlockCommentClosing(deepness - 1)
+ } else {
+ CharClassesStatus::StringInBlockComment(deepness)
+ }
+ }
+ CharClassesStatus::BlockComment(deepness) => {
+ assert_ne!(deepness, 0);
+ char_kind = FullCodeCharKind::InComment;
+ match self.base.peek() {
+ Some(next) if next.get_char() == '/' && chr == '*' => {
+ CharClassesStatus::BlockCommentClosing(deepness - 1)
+ }
+ Some(next) if next.get_char() == '*' && chr == '/' => {
+ CharClassesStatus::BlockCommentOpening(deepness + 1)
+ }
+ _ if chr == '"' => CharClassesStatus::StringInBlockComment(deepness),
+ _ => self.status,
+ }
+ }
+ CharClassesStatus::BlockCommentOpening(deepness) => {
+ assert_eq!(chr, '*');
+ self.status = CharClassesStatus::BlockComment(deepness);
+ return Some((FullCodeCharKind::InComment, item));
+ }
+ CharClassesStatus::BlockCommentClosing(deepness) => {
+ assert_eq!(chr, '/');
+ if deepness == 0 {
+ self.status = CharClassesStatus::Normal;
+ return Some((FullCodeCharKind::EndComment, item));
+ } else {
+ self.status = CharClassesStatus::BlockComment(deepness);
+ return Some((FullCodeCharKind::InComment, item));
+ }
+ }
+ CharClassesStatus::LineComment => match chr {
+ '\n' => {
+ self.status = CharClassesStatus::Normal;
+ return Some((FullCodeCharKind::EndComment, item));
+ }
+ _ => {
+ self.status = CharClassesStatus::LineComment;
+ return Some((FullCodeCharKind::InComment, item));
+ }
+ },
+ };
+ Some((char_kind, item))
+ }
+}
+
+/// An iterator over the lines of a string, paired with the char kind at the
+/// end of the line.
+pub(crate) struct LineClasses<'a> {
+ base: iter::Peekable<CharClasses<std::str::Chars<'a>>>,
+ kind: FullCodeCharKind,
+}
+
+impl<'a> LineClasses<'a> {
+ pub(crate) fn new(s: &'a str) -> Self {
+ LineClasses {
+ base: CharClasses::new(s.chars()).peekable(),
+ kind: FullCodeCharKind::Normal,
+ }
+ }
+}
+
+impl<'a> Iterator for LineClasses<'a> {
+ type Item = (FullCodeCharKind, String);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.base.peek()?;
+
+ let mut line = String::new();
+
+ let start_kind = match self.base.peek() {
+ Some((kind, _)) => *kind,
+ None => unreachable!(),
+ };
+
+ for (kind, c) in self.base.by_ref() {
+ // needed to set the kind of the ending character on the last line
+ self.kind = kind;
+ if c == '\n' {
+ self.kind = match (start_kind, kind) {
+ (FullCodeCharKind::Normal, FullCodeCharKind::InString) => {
+ FullCodeCharKind::StartString
+ }
+ (FullCodeCharKind::InString, FullCodeCharKind::Normal) => {
+ FullCodeCharKind::EndString
+ }
+ (FullCodeCharKind::InComment, FullCodeCharKind::InStringCommented) => {
+ FullCodeCharKind::StartStringCommented
+ }
+ (FullCodeCharKind::InStringCommented, FullCodeCharKind::InComment) => {
+ FullCodeCharKind::EndStringCommented
+ }
+ _ => kind,
+ };
+ break;
+ }
+ line.push(c);
+ }
+
+ // Workaround for CRLF newline.
+ if line.ends_with('\r') {
+ line.pop();
+ }
+
+ Some((self.kind, line))
+ }
+}
+
+/// Iterator over functional and commented parts of a string. Any part of a string is either
+/// functional code, either *one* block comment, either *one* line comment. Whitespace between
+/// comments is functional code. Line comments contain their ending newlines.
+struct UngroupedCommentCodeSlices<'a> {
+ slice: &'a str,
+ iter: iter::Peekable<CharClasses<std::str::CharIndices<'a>>>,
+}
+
+impl<'a> UngroupedCommentCodeSlices<'a> {
+ fn new(code: &'a str) -> UngroupedCommentCodeSlices<'a> {
+ UngroupedCommentCodeSlices {
+ slice: code,
+ iter: CharClasses::new(code.char_indices()).peekable(),
+ }
+ }
+}
+
+impl<'a> Iterator for UngroupedCommentCodeSlices<'a> {
+ type Item = (CodeCharKind, usize, &'a str);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let (kind, (start_idx, _)) = self.iter.next()?;
+ match kind {
+ FullCodeCharKind::Normal | FullCodeCharKind::InString => {
+ // Consume all the Normal code
+ while let Some(&(char_kind, _)) = self.iter.peek() {
+ if char_kind.is_comment() {
+ break;
+ }
+ let _ = self.iter.next();
+ }
+ }
+ FullCodeCharKind::StartComment => {
+ // Consume the whole comment
+ loop {
+ match self.iter.next() {
+ Some((kind, ..)) if kind.inside_comment() => continue,
+ _ => break,
+ }
+ }
+ }
+ _ => panic!(),
+ }
+ let slice = match self.iter.peek() {
+ Some(&(_, (end_idx, _))) => &self.slice[start_idx..end_idx],
+ None => &self.slice[start_idx..],
+ };
+ Some((
+ if kind.is_comment() {
+ CodeCharKind::Comment
+ } else {
+ CodeCharKind::Normal
+ },
+ start_idx,
+ slice,
+ ))
+ }
+}
+
+/// Iterator over an alternating sequence of functional and commented parts of
+/// a string. The first item is always a, possibly zero length, subslice of
+/// functional text. Line style comments contain their ending newlines.
+pub(crate) struct CommentCodeSlices<'a> {
+ slice: &'a str,
+ last_slice_kind: CodeCharKind,
+ last_slice_end: usize,
+}
+
+impl<'a> CommentCodeSlices<'a> {
+ pub(crate) fn new(slice: &'a str) -> CommentCodeSlices<'a> {
+ CommentCodeSlices {
+ slice,
+ last_slice_kind: CodeCharKind::Comment,
+ last_slice_end: 0,
+ }
+ }
+}
+
+impl<'a> Iterator for CommentCodeSlices<'a> {
+ type Item = (CodeCharKind, usize, &'a str);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.last_slice_end == self.slice.len() {
+ return None;
+ }
+
+ let mut sub_slice_end = self.last_slice_end;
+ let mut first_whitespace = None;
+ let subslice = &self.slice[self.last_slice_end..];
+ let mut iter = CharClasses::new(subslice.char_indices());
+
+ for (kind, (i, c)) in &mut iter {
+ let is_comment_connector = self.last_slice_kind == CodeCharKind::Normal
+ && &subslice[..2] == "//"
+ && [' ', '\t'].contains(&c);
+
+ if is_comment_connector && first_whitespace.is_none() {
+ first_whitespace = Some(i);
+ }
+
+ if kind.to_codecharkind() == self.last_slice_kind && !is_comment_connector {
+ let last_index = match first_whitespace {
+ Some(j) => j,
+ None => i,
+ };
+ sub_slice_end = self.last_slice_end + last_index;
+ break;
+ }
+
+ if !is_comment_connector {
+ first_whitespace = None;
+ }
+ }
+
+ if let (None, true) = (iter.next(), sub_slice_end == self.last_slice_end) {
+ // This was the last subslice.
+ sub_slice_end = match first_whitespace {
+ Some(i) => self.last_slice_end + i,
+ None => self.slice.len(),
+ };
+ }
+
+ let kind = match self.last_slice_kind {
+ CodeCharKind::Comment => CodeCharKind::Normal,
+ CodeCharKind::Normal => CodeCharKind::Comment,
+ };
+ let res = (
+ kind,
+ self.last_slice_end,
+ &self.slice[self.last_slice_end..sub_slice_end],
+ );
+ self.last_slice_end = sub_slice_end;
+ self.last_slice_kind = kind;
+
+ Some(res)
+ }
+}
+
+/// Checks is `new` didn't miss any comment from `span`, if it removed any, return previous text
+/// (if it fits in the width/offset, else return `None`), else return `new`
+pub(crate) fn recover_comment_removed(
+ new: String,
+ span: Span,
+ context: &RewriteContext<'_>,
+) -> Option<String> {
+ let snippet = context.snippet(span);
+ if snippet != new && changed_comment_content(snippet, &new) {
+ // We missed some comments. Warn and keep the original text.
+ if context.config.error_on_unformatted() {
+ context.report.append(
+ context.parse_sess.span_to_filename(span),
+ vec![FormattingError::from_span(
+ span,
+ context.parse_sess,
+ ErrorKind::LostComment,
+ )],
+ );
+ }
+ Some(snippet.to_owned())
+ } else {
+ Some(new)
+ }
+}
+
+pub(crate) fn filter_normal_code(code: &str) -> String {
+ let mut buffer = String::with_capacity(code.len());
+ LineClasses::new(code).for_each(|(kind, line)| match kind {
+ FullCodeCharKind::Normal
+ | FullCodeCharKind::StartString
+ | FullCodeCharKind::InString
+ | FullCodeCharKind::EndString => {
+ buffer.push_str(&line);
+ buffer.push('\n');
+ }
+ _ => (),
+ });
+ if !code.ends_with('\n') && buffer.ends_with('\n') {
+ buffer.pop();
+ }
+ buffer
+}
+
+/// Returns `true` if the two strings of code have the same payload of comments.
+/// The payload of comments is everything in the string except:
+/// - actual code (not comments),
+/// - comment start/end marks,
+/// - whitespace,
+/// - '*' at the beginning of lines in block comments.
+fn changed_comment_content(orig: &str, new: &str) -> bool {
+ // Cannot write this as a fn since we cannot return types containing closures.
+ let code_comment_content = |code| {
+ let slices = UngroupedCommentCodeSlices::new(code);
+ slices
+ .filter(|&(ref kind, _, _)| *kind == CodeCharKind::Comment)
+ .flat_map(|(_, _, s)| CommentReducer::new(s))
+ };
+ let res = code_comment_content(orig).ne(code_comment_content(new));
+ debug!(
+ "comment::changed_comment_content: {}\norig: '{}'\nnew: '{}'\nraw_old: {}\nraw_new: {}",
+ res,
+ orig,
+ new,
+ code_comment_content(orig).collect::<String>(),
+ code_comment_content(new).collect::<String>()
+ );
+ res
+}
+
+/// Iterator over the 'payload' characters of a comment.
+/// It skips whitespace, comment start/end marks, and '*' at the beginning of lines.
+/// The comment must be one comment, ie not more than one start mark (no multiple line comments,
+/// for example).
+struct CommentReducer<'a> {
+ is_block: bool,
+ at_start_line: bool,
+ iter: std::str::Chars<'a>,
+}
+
+impl<'a> CommentReducer<'a> {
+ fn new(comment: &'a str) -> CommentReducer<'a> {
+ let is_block = comment.starts_with("/*");
+ let comment = remove_comment_header(comment);
+ CommentReducer {
+ is_block,
+ // There are no supplementary '*' on the first line.
+ at_start_line: false,
+ iter: comment.chars(),
+ }
+ }
+}
+
+impl<'a> Iterator for CommentReducer<'a> {
+ type Item = char;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ loop {
+ let mut c = self.iter.next()?;
+ if self.is_block && self.at_start_line {
+ while c.is_whitespace() {
+ c = self.iter.next()?;
+ }
+ // Ignore leading '*'.
+ if c == '*' {
+ c = self.iter.next()?;
+ }
+ } else if c == '\n' {
+ self.at_start_line = true;
+ }
+ if !c.is_whitespace() {
+ return Some(c);
+ }
+ }
+ }
+}
+
+fn remove_comment_header(comment: &str) -> &str {
+ if comment.starts_with("///") || comment.starts_with("//!") {
+ &comment[3..]
+ } else if let Some(stripped) = comment.strip_prefix("//") {
+ stripped
+ } else if (comment.starts_with("/**") && !comment.starts_with("/**/"))
+ || comment.starts_with("/*!")
+ {
+ &comment[3..comment.len() - 2]
+ } else {
+ assert!(
+ comment.starts_with("/*"),
+ "string '{}' is not a comment",
+ comment
+ );
+ &comment[2..comment.len() - 2]
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use crate::shape::{Indent, Shape};
+
+ #[test]
+ fn char_classes() {
+ let mut iter = CharClasses::new("//\n\n".chars());
+
+ assert_eq!((FullCodeCharKind::StartComment, '/'), iter.next().unwrap());
+ assert_eq!((FullCodeCharKind::InComment, '/'), iter.next().unwrap());
+ assert_eq!((FullCodeCharKind::EndComment, '\n'), iter.next().unwrap());
+ assert_eq!((FullCodeCharKind::Normal, '\n'), iter.next().unwrap());
+ assert_eq!(None, iter.next());
+ }
+
+ #[test]
+ fn comment_code_slices() {
+ let input = "code(); /* test */ 1 + 1";
+ let mut iter = CommentCodeSlices::new(input);
+
+ assert_eq!((CodeCharKind::Normal, 0, "code(); "), iter.next().unwrap());
+ assert_eq!(
+ (CodeCharKind::Comment, 8, "/* test */"),
+ iter.next().unwrap()
+ );
+ assert_eq!((CodeCharKind::Normal, 18, " 1 + 1"), iter.next().unwrap());
+ assert_eq!(None, iter.next());
+ }
+
+ #[test]
+ fn comment_code_slices_two() {
+ let input = "// comment\n test();";
+ let mut iter = CommentCodeSlices::new(input);
+
+ assert_eq!((CodeCharKind::Normal, 0, ""), iter.next().unwrap());
+ assert_eq!(
+ (CodeCharKind::Comment, 0, "// comment\n"),
+ iter.next().unwrap()
+ );
+ assert_eq!(
+ (CodeCharKind::Normal, 11, " test();"),
+ iter.next().unwrap()
+ );
+ assert_eq!(None, iter.next());
+ }
+
+ #[test]
+ fn comment_code_slices_three() {
+ let input = "1 // comment\n // comment2\n\n";
+ let mut iter = CommentCodeSlices::new(input);
+
+ assert_eq!((CodeCharKind::Normal, 0, "1 "), iter.next().unwrap());
+ assert_eq!(
+ (CodeCharKind::Comment, 2, "// comment\n // comment2\n"),
+ iter.next().unwrap()
+ );
+ assert_eq!((CodeCharKind::Normal, 29, "\n"), iter.next().unwrap());
+ assert_eq!(None, iter.next());
+ }
+
+ #[test]
+ #[rustfmt::skip]
+ fn format_doc_comments() {
+ let mut wrap_normalize_config: crate::config::Config = Default::default();
+ wrap_normalize_config.set().wrap_comments(true);
+ wrap_normalize_config.set().normalize_comments(true);
+
+ let mut wrap_config: crate::config::Config = Default::default();
+ wrap_config.set().wrap_comments(true);
+
+ let comment = rewrite_comment(" //test",
+ true,
+ Shape::legacy(100, Indent::new(0, 100)),
+ &wrap_normalize_config).unwrap();
+ assert_eq!("/* test */", comment);
+
+ let comment = rewrite_comment("// comment on a",
+ false,
+ Shape::legacy(10, Indent::empty()),
+ &wrap_normalize_config).unwrap();
+ assert_eq!("// comment\n// on a", comment);
+
+ let comment = rewrite_comment("// A multi line comment\n // between args.",
+ false,
+ Shape::legacy(60, Indent::new(0, 12)),
+ &wrap_normalize_config).unwrap();
+ assert_eq!("// A multi line comment\n // between args.", comment);
+
+ let input = "// comment";
+ let expected =
+ "/* comment */";
+ let comment = rewrite_comment(input,
+ true,
+ Shape::legacy(9, Indent::new(0, 69)),
+ &wrap_normalize_config).unwrap();
+ assert_eq!(expected, comment);
+
+ let comment = rewrite_comment("/* trimmed */",
+ true,
+ Shape::legacy(100, Indent::new(0, 100)),
+ &wrap_normalize_config).unwrap();
+ assert_eq!("/* trimmed */", comment);
+
+ // Check that different comment style are properly recognised.
+ let comment = rewrite_comment(r#"/// test1
+ /// test2
+ /*
+ * test3
+ */"#,
+ false,
+ Shape::legacy(100, Indent::new(0, 0)),
+ &wrap_normalize_config).unwrap();
+ assert_eq!("/// test1\n/// test2\n// test3", comment);
+
+ // Check that the blank line marks the end of a commented paragraph.
+ let comment = rewrite_comment(r#"// test1
+
+ // test2"#,
+ false,
+ Shape::legacy(100, Indent::new(0, 0)),
+ &wrap_normalize_config).unwrap();
+ assert_eq!("// test1\n\n// test2", comment);
+
+ // Check that the blank line marks the end of a custom-commented paragraph.
+ let comment = rewrite_comment(r#"//@ test1
+
+ //@ test2"#,
+ false,
+ Shape::legacy(100, Indent::new(0, 0)),
+ &wrap_normalize_config).unwrap();
+ assert_eq!("//@ test1\n\n//@ test2", comment);
+
+ // Check that bare lines are just indented but otherwise left unchanged.
+ let comment = rewrite_comment(r#"// test1
+ /*
+ a bare line!
+
+ another bare line!
+ */"#,
+ false,
+ Shape::legacy(100, Indent::new(0, 0)),
+ &wrap_config).unwrap();
+ assert_eq!("// test1\n/*\n a bare line!\n\n another bare line!\n*/", comment);
+ }
+
+ // This is probably intended to be a non-test fn, but it is not used.
+ // We should keep this around unless it helps us test stuff to remove it.
+ fn uncommented(text: &str) -> String {
+ CharClasses::new(text.chars())
+ .filter_map(|(s, c)| match s {
+ FullCodeCharKind::Normal | FullCodeCharKind::InString => Some(c),
+ _ => None,
+ })
+ .collect()
+ }
+
+ #[test]
+ fn test_uncommented() {
+ assert_eq!(&uncommented("abc/*...*/"), "abc");
+ assert_eq!(
+ &uncommented("// .... /* \n../* /* *** / */ */a/* // */c\n"),
+ "..ac\n"
+ );
+ assert_eq!(&uncommented("abc \" /* */\" qsdf"), "abc \" /* */\" qsdf");
+ }
+
+ #[test]
+ fn test_contains_comment() {
+ assert_eq!(contains_comment("abc"), false);
+ assert_eq!(contains_comment("abc // qsdf"), true);
+ assert_eq!(contains_comment("abc /* kqsdf"), true);
+ assert_eq!(contains_comment("abc \" /* */\" qsdf"), false);
+ }
+
+ #[test]
+ fn test_find_uncommented() {
+ fn check(haystack: &str, needle: &str, expected: Option<usize>) {
+ assert_eq!(expected, haystack.find_uncommented(needle));
+ }
+
+ check("/*/ */test", "test", Some(6));
+ check("//test\ntest", "test", Some(7));
+ check("/* comment only */", "whatever", None);
+ check(
+ "/* comment */ some text /* more commentary */ result",
+ "result",
+ Some(46),
+ );
+ check("sup // sup", "p", Some(2));
+ check("sup", "x", None);
+ check(r#"π? /**/ π is nice!"#, r#"π is nice"#, Some(9));
+ check("/*sup yo? \n sup*/ sup", "p", Some(20));
+ check("hel/*lohello*/lo", "hello", None);
+ check("acb", "ab", None);
+ check(",/*A*/ ", ",", Some(0));
+ check("abc", "abc", Some(0));
+ check("/* abc */", "abc", None);
+ check("/**/abc/* */", "abc", Some(4));
+ check("\"/* abc */\"", "abc", Some(4));
+ check("\"/* abc", "abc", Some(4));
+ }
+
+ #[test]
+ fn test_filter_normal_code() {
+ let s = r#"
+fn main() {
+ println!("hello, world");
+}
+"#;
+ assert_eq!(s, filter_normal_code(s));
+ let s_with_comment = r#"
+fn main() {
+ // hello, world
+ println!("hello, world");
+}
+"#;
+ assert_eq!(s, filter_normal_code(s_with_comment));
+ }
+}
diff --git a/src/tools/rustfmt/src/config/config_type.rs b/src/tools/rustfmt/src/config/config_type.rs
new file mode 100644
index 000000000..e37ed798c
--- /dev/null
+++ b/src/tools/rustfmt/src/config/config_type.rs
@@ -0,0 +1,426 @@
+use crate::config::file_lines::FileLines;
+use crate::config::options::{IgnoreList, WidthHeuristics};
+
+/// Trait for types that can be used in `Config`.
+pub(crate) trait ConfigType: Sized {
+ /// Returns hint text for use in `Config::print_docs()`. For enum types, this is a
+ /// pipe-separated list of variants; for other types it returns "<type>".
+ fn doc_hint() -> String;
+}
+
+impl ConfigType for bool {
+ fn doc_hint() -> String {
+ String::from("<boolean>")
+ }
+}
+
+impl ConfigType for usize {
+ fn doc_hint() -> String {
+ String::from("<unsigned integer>")
+ }
+}
+
+impl ConfigType for isize {
+ fn doc_hint() -> String {
+ String::from("<signed integer>")
+ }
+}
+
+impl ConfigType for String {
+ fn doc_hint() -> String {
+ String::from("<string>")
+ }
+}
+
+impl ConfigType for FileLines {
+ fn doc_hint() -> String {
+ String::from("<json>")
+ }
+}
+
+impl ConfigType for WidthHeuristics {
+ fn doc_hint() -> String {
+ String::new()
+ }
+}
+
+impl ConfigType for IgnoreList {
+ fn doc_hint() -> String {
+ String::from("[<string>,..]")
+ }
+}
+
+macro_rules! create_config {
+ ($($i:ident: $ty:ty, $def:expr, $stb:expr, $( $dstring:expr ),+ );+ $(;)*) => (
+ #[cfg(test)]
+ use std::collections::HashSet;
+ use std::io::Write;
+
+ use serde::{Deserialize, Serialize};
+
+ #[derive(Clone)]
+ #[allow(unreachable_pub)]
+ pub struct Config {
+ // For each config item, we store a bool indicating whether it has
+ // been accessed and the value, and a bool whether the option was
+ // manually initialised, or taken from the default,
+ $($i: (Cell<bool>, bool, $ty, bool)),+
+ }
+
+ // Just like the Config struct but with each property wrapped
+ // as Option<T>. This is used to parse a rustfmt.toml that doesn't
+ // specify all properties of `Config`.
+ // We first parse into `PartialConfig`, then create a default `Config`
+ // and overwrite the properties with corresponding values from `PartialConfig`.
+ #[derive(Deserialize, Serialize, Clone)]
+ #[allow(unreachable_pub)]
+ pub struct PartialConfig {
+ $(pub $i: Option<$ty>),+
+ }
+
+ // Macro hygiene won't allow us to make `set_$i()` methods on Config
+ // for each item, so this struct is used to give the API to set values:
+ // `config.set().option(false)`. It's pretty ugly. Consider replacing
+ // with `config.set_option(false)` if we ever get a stable/usable
+ // `concat_idents!()`.
+ #[allow(unreachable_pub)]
+ pub struct ConfigSetter<'a>(&'a mut Config);
+
+ impl<'a> ConfigSetter<'a> {
+ $(
+ #[allow(unreachable_pub)]
+ pub fn $i(&mut self, value: $ty) {
+ (self.0).$i.2 = value;
+ match stringify!($i) {
+ "max_width"
+ | "use_small_heuristics"
+ | "fn_call_width"
+ | "single_line_if_else_max_width"
+ | "attr_fn_like_width"
+ | "struct_lit_width"
+ | "struct_variant_width"
+ | "array_width"
+ | "chain_width" => self.0.set_heuristics(),
+ "merge_imports" => self.0.set_merge_imports(),
+ &_ => (),
+ }
+ }
+ )+
+ }
+
+ // Query each option, returns true if the user set the option, false if
+ // a default was used.
+ #[allow(unreachable_pub)]
+ pub struct ConfigWasSet<'a>(&'a Config);
+
+ impl<'a> ConfigWasSet<'a> {
+ $(
+ #[allow(unreachable_pub)]
+ pub fn $i(&self) -> bool {
+ (self.0).$i.1
+ }
+ )+
+ }
+
+ impl Config {
+ $(
+ #[allow(unreachable_pub)]
+ pub fn $i(&self) -> $ty {
+ self.$i.0.set(true);
+ self.$i.2.clone()
+ }
+ )+
+
+ #[allow(unreachable_pub)]
+ pub fn set(&mut self) -> ConfigSetter<'_> {
+ ConfigSetter(self)
+ }
+
+ #[allow(unreachable_pub)]
+ pub fn was_set(&self) -> ConfigWasSet<'_> {
+ ConfigWasSet(self)
+ }
+
+ fn fill_from_parsed_config(mut self, parsed: PartialConfig, dir: &Path) -> Config {
+ $(
+ if let Some(val) = parsed.$i {
+ if self.$i.3 {
+ self.$i.1 = true;
+ self.$i.2 = val;
+ } else {
+ if crate::is_nightly_channel!() {
+ self.$i.1 = true;
+ self.$i.2 = val;
+ } else {
+ eprintln!("Warning: can't set `{} = {:?}`, unstable features are only \
+ available in nightly channel.", stringify!($i), val);
+ }
+ }
+ }
+ )+
+ self.set_heuristics();
+ self.set_ignore(dir);
+ self.set_merge_imports();
+ self
+ }
+
+ /// Returns a hash set initialized with every user-facing config option name.
+ #[cfg(test)]
+ pub(crate) fn hash_set() -> HashSet<String> {
+ let mut hash_set = HashSet::new();
+ $(
+ hash_set.insert(stringify!($i).to_owned());
+ )+
+ hash_set
+ }
+
+ pub(crate) fn is_valid_name(name: &str) -> bool {
+ match name {
+ $(
+ stringify!($i) => true,
+ )+
+ _ => false,
+ }
+ }
+
+ #[allow(unreachable_pub)]
+ pub fn is_valid_key_val(key: &str, val: &str) -> bool {
+ match key {
+ $(
+ stringify!($i) => val.parse::<$ty>().is_ok(),
+ )+
+ _ => false,
+ }
+ }
+
+ #[allow(unreachable_pub)]
+ pub fn used_options(&self) -> PartialConfig {
+ PartialConfig {
+ $(
+ $i: if self.$i.0.get() {
+ Some(self.$i.2.clone())
+ } else {
+ None
+ },
+ )+
+ }
+ }
+
+ #[allow(unreachable_pub)]
+ pub fn all_options(&self) -> PartialConfig {
+ PartialConfig {
+ $(
+ $i: Some(self.$i.2.clone()),
+ )+
+ }
+ }
+
+ #[allow(unreachable_pub)]
+ pub fn override_value(&mut self, key: &str, val: &str)
+ {
+ match key {
+ $(
+ stringify!($i) => {
+ self.$i.1 = true;
+ self.$i.2 = val.parse::<$ty>()
+ .expect(&format!("Failed to parse override for {} (\"{}\") as a {}",
+ stringify!($i),
+ val,
+ stringify!($ty)));
+ }
+ )+
+ _ => panic!("Unknown config key in override: {}", key)
+ }
+
+ match key {
+ "max_width"
+ | "use_small_heuristics"
+ | "fn_call_width"
+ | "single_line_if_else_max_width"
+ | "attr_fn_like_width"
+ | "struct_lit_width"
+ | "struct_variant_width"
+ | "array_width"
+ | "chain_width" => self.set_heuristics(),
+ "merge_imports" => self.set_merge_imports(),
+ &_ => (),
+ }
+ }
+
+ #[allow(unreachable_pub)]
+ pub fn is_hidden_option(name: &str) -> bool {
+ const HIDE_OPTIONS: [&str; 5] =
+ ["verbose", "verbose_diff", "file_lines", "width_heuristics", "merge_imports"];
+ HIDE_OPTIONS.contains(&name)
+ }
+
+ #[allow(unreachable_pub)]
+ pub fn print_docs(out: &mut dyn Write, include_unstable: bool) {
+ use std::cmp;
+ let max = 0;
+ $( let max = cmp::max(max, stringify!($i).len()+1); )+
+ let space_str = " ".repeat(max);
+ writeln!(out, "Configuration Options:").unwrap();
+ $(
+ if $stb || include_unstable {
+ let name_raw = stringify!($i);
+
+ if !Config::is_hidden_option(name_raw) {
+ let mut name_out = String::with_capacity(max);
+ for _ in name_raw.len()..max-1 {
+ name_out.push(' ')
+ }
+ name_out.push_str(name_raw);
+ name_out.push(' ');
+ let mut default_str = format!("{}", $def);
+ if default_str.is_empty() {
+ default_str = String::from("\"\"");
+ }
+ writeln!(out,
+ "{}{} Default: {}{}",
+ name_out,
+ <$ty>::doc_hint(),
+ default_str,
+ if !$stb { " (unstable)" } else { "" }).unwrap();
+ $(
+ writeln!(out, "{}{}", space_str, $dstring).unwrap();
+ )+
+ writeln!(out).unwrap();
+ }
+ }
+ )+
+ }
+
+ fn set_width_heuristics(&mut self, heuristics: WidthHeuristics) {
+ let max_width = self.max_width.2;
+ let get_width_value = |
+ was_set: bool,
+ override_value: usize,
+ heuristic_value: usize,
+ config_key: &str,
+ | -> usize {
+ if !was_set {
+ return heuristic_value;
+ }
+ if override_value > max_width {
+ eprintln!(
+ "`{0}` cannot have a value that exceeds `max_width`. \
+ `{0}` will be set to the same value as `max_width`",
+ config_key,
+ );
+ return max_width;
+ }
+ override_value
+ };
+
+ let fn_call_width = get_width_value(
+ self.was_set().fn_call_width(),
+ self.fn_call_width.2,
+ heuristics.fn_call_width,
+ "fn_call_width",
+ );
+ self.fn_call_width.2 = fn_call_width;
+
+ let attr_fn_like_width = get_width_value(
+ self.was_set().attr_fn_like_width(),
+ self.attr_fn_like_width.2,
+ heuristics.attr_fn_like_width,
+ "attr_fn_like_width",
+ );
+ self.attr_fn_like_width.2 = attr_fn_like_width;
+
+ let struct_lit_width = get_width_value(
+ self.was_set().struct_lit_width(),
+ self.struct_lit_width.2,
+ heuristics.struct_lit_width,
+ "struct_lit_width",
+ );
+ self.struct_lit_width.2 = struct_lit_width;
+
+ let struct_variant_width = get_width_value(
+ self.was_set().struct_variant_width(),
+ self.struct_variant_width.2,
+ heuristics.struct_variant_width,
+ "struct_variant_width",
+ );
+ self.struct_variant_width.2 = struct_variant_width;
+
+ let array_width = get_width_value(
+ self.was_set().array_width(),
+ self.array_width.2,
+ heuristics.array_width,
+ "array_width",
+ );
+ self.array_width.2 = array_width;
+
+ let chain_width = get_width_value(
+ self.was_set().chain_width(),
+ self.chain_width.2,
+ heuristics.chain_width,
+ "chain_width",
+ );
+ self.chain_width.2 = chain_width;
+
+ let single_line_if_else_max_width = get_width_value(
+ self.was_set().single_line_if_else_max_width(),
+ self.single_line_if_else_max_width.2,
+ heuristics.single_line_if_else_max_width,
+ "single_line_if_else_max_width",
+ );
+ self.single_line_if_else_max_width.2 = single_line_if_else_max_width;
+ }
+
+ fn set_heuristics(&mut self) {
+ let max_width = self.max_width.2;
+ match self.use_small_heuristics.2 {
+ Heuristics::Default =>
+ self.set_width_heuristics(WidthHeuristics::scaled(max_width)),
+ Heuristics::Max => self.set_width_heuristics(WidthHeuristics::set(max_width)),
+ Heuristics::Off => self.set_width_heuristics(WidthHeuristics::null()),
+ };
+ }
+
+ fn set_ignore(&mut self, dir: &Path) {
+ self.ignore.2.add_prefix(dir);
+ }
+
+ fn set_merge_imports(&mut self) {
+ if self.was_set().merge_imports() {
+ eprintln!(
+ "Warning: the `merge_imports` option is deprecated. \
+ Use `imports_granularity=\"Crate\"` instead"
+ );
+ if !self.was_set().imports_granularity() {
+ self.imports_granularity.2 = if self.merge_imports() {
+ ImportGranularity::Crate
+ } else {
+ ImportGranularity::Preserve
+ };
+ }
+ }
+ }
+
+ #[allow(unreachable_pub)]
+ /// Returns `true` if the config key was explicitly set and is the default value.
+ pub fn is_default(&self, key: &str) -> bool {
+ $(
+ if let stringify!($i) = key {
+ return self.$i.1 && self.$i.2 == $def;
+ }
+ )+
+ false
+ }
+ }
+
+ // Template for the default configuration
+ impl Default for Config {
+ fn default() -> Config {
+ Config {
+ $(
+ $i: (Cell::new(false), false, $def, $stb),
+ )+
+ }
+ }
+ }
+ )
+}
diff --git a/src/tools/rustfmt/src/config/file_lines.rs b/src/tools/rustfmt/src/config/file_lines.rs
new file mode 100644
index 000000000..e4e51a3f3
--- /dev/null
+++ b/src/tools/rustfmt/src/config/file_lines.rs
@@ -0,0 +1,440 @@
+//! This module contains types and functions to support formatting specific line ranges.
+
+use itertools::Itertools;
+use std::collections::HashMap;
+use std::path::PathBuf;
+use std::{cmp, fmt, iter, str};
+
+use rustc_data_structures::sync::Lrc;
+use rustc_span::{self, SourceFile};
+use serde::{ser, Deserialize, Deserializer, Serialize, Serializer};
+use serde_json as json;
+use thiserror::Error;
+
+/// A range of lines in a file, inclusive of both ends.
+pub struct LineRange {
+ pub(crate) file: Lrc<SourceFile>,
+ pub(crate) lo: usize,
+ pub(crate) hi: usize,
+}
+
+/// Defines the name of an input - either a file or stdin.
+#[derive(Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)]
+pub enum FileName {
+ Real(PathBuf),
+ Stdin,
+}
+
+impl From<rustc_span::FileName> for FileName {
+ fn from(name: rustc_span::FileName) -> FileName {
+ match name {
+ rustc_span::FileName::Real(rustc_span::RealFileName::LocalPath(p)) => FileName::Real(p),
+ rustc_span::FileName::Custom(ref f) if f == "stdin" => FileName::Stdin,
+ _ => unreachable!(),
+ }
+ }
+}
+
+impl fmt::Display for FileName {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ FileName::Real(p) => write!(f, "{}", p.to_str().unwrap()),
+ FileName::Stdin => write!(f, "<stdin>"),
+ }
+ }
+}
+
+impl<'de> Deserialize<'de> for FileName {
+ fn deserialize<D>(deserializer: D) -> Result<FileName, D::Error>
+ where
+ D: Deserializer<'de>,
+ {
+ let s = String::deserialize(deserializer)?;
+ if s == "stdin" {
+ Ok(FileName::Stdin)
+ } else {
+ Ok(FileName::Real(s.into()))
+ }
+ }
+}
+
+impl Serialize for FileName {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ let s = match self {
+ FileName::Stdin => Ok("stdin"),
+ FileName::Real(path) => path
+ .to_str()
+ .ok_or_else(|| ser::Error::custom("path can't be serialized as UTF-8 string")),
+ };
+
+ s.and_then(|s| serializer.serialize_str(s))
+ }
+}
+
+impl LineRange {
+ pub(crate) fn file_name(&self) -> FileName {
+ self.file.name.clone().into()
+ }
+}
+
+/// A range that is inclusive of both ends.
+#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord, Deserialize)]
+pub struct Range {
+ lo: usize,
+ hi: usize,
+}
+
+impl<'a> From<&'a LineRange> for Range {
+ fn from(range: &'a LineRange) -> Range {
+ Range::new(range.lo, range.hi)
+ }
+}
+
+impl fmt::Display for Range {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}..{}", self.lo, self.hi)
+ }
+}
+
+impl Range {
+ pub fn new(lo: usize, hi: usize) -> Range {
+ Range { lo, hi }
+ }
+
+ fn is_empty(self) -> bool {
+ self.lo > self.hi
+ }
+
+ #[allow(dead_code)]
+ fn contains(self, other: Range) -> bool {
+ if other.is_empty() {
+ true
+ } else {
+ !self.is_empty() && self.lo <= other.lo && self.hi >= other.hi
+ }
+ }
+
+ fn intersects(self, other: Range) -> bool {
+ if self.is_empty() || other.is_empty() {
+ false
+ } else {
+ (self.lo <= other.hi && other.hi <= self.hi)
+ || (other.lo <= self.hi && self.hi <= other.hi)
+ }
+ }
+
+ fn adjacent_to(self, other: Range) -> bool {
+ if self.is_empty() || other.is_empty() {
+ false
+ } else {
+ self.hi + 1 == other.lo || other.hi + 1 == self.lo
+ }
+ }
+
+ /// Returns a new `Range` with lines from `self` and `other` if they were adjacent or
+ /// intersect; returns `None` otherwise.
+ fn merge(self, other: Range) -> Option<Range> {
+ if self.adjacent_to(other) || self.intersects(other) {
+ Some(Range::new(
+ cmp::min(self.lo, other.lo),
+ cmp::max(self.hi, other.hi),
+ ))
+ } else {
+ None
+ }
+ }
+}
+
+/// A set of lines in files.
+///
+/// It is represented as a multimap keyed on file names, with values a collection of
+/// non-overlapping ranges sorted by their start point. An inner `None` is interpreted to mean all
+/// lines in all files.
+#[derive(Clone, Debug, Default, PartialEq)]
+pub struct FileLines(Option<HashMap<FileName, Vec<Range>>>);
+
+impl fmt::Display for FileLines {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match &self.0 {
+ None => write!(f, "None")?,
+ Some(map) => {
+ for (file_name, ranges) in map.iter() {
+ write!(f, "{}: ", file_name)?;
+ write!(f, "{}\n", ranges.iter().format(", "))?;
+ }
+ }
+ };
+ Ok(())
+ }
+}
+
+/// Normalizes the ranges so that the invariants for `FileLines` hold: ranges are non-overlapping,
+/// and ordered by their start point.
+fn normalize_ranges(ranges: &mut HashMap<FileName, Vec<Range>>) {
+ for ranges in ranges.values_mut() {
+ ranges.sort();
+ let mut result = vec![];
+ let mut iter = ranges.iter_mut().peekable();
+ while let Some(next) = iter.next() {
+ let mut next = *next;
+ while let Some(&&mut peek) = iter.peek() {
+ if let Some(merged) = next.merge(peek) {
+ iter.next().unwrap();
+ next = merged;
+ } else {
+ break;
+ }
+ }
+ result.push(next)
+ }
+ *ranges = result;
+ }
+}
+
+impl FileLines {
+ /// Creates a `FileLines` that contains all lines in all files.
+ pub(crate) fn all() -> FileLines {
+ FileLines(None)
+ }
+
+ /// Returns `true` if this `FileLines` contains all lines in all files.
+ pub(crate) fn is_all(&self) -> bool {
+ self.0.is_none()
+ }
+
+ pub fn from_ranges(mut ranges: HashMap<FileName, Vec<Range>>) -> FileLines {
+ normalize_ranges(&mut ranges);
+ FileLines(Some(ranges))
+ }
+
+ /// Returns an iterator over the files contained in `self`.
+ pub fn files(&self) -> Files<'_> {
+ Files(self.0.as_ref().map(HashMap::keys))
+ }
+
+ /// Returns JSON representation as accepted by the `--file-lines JSON` arg.
+ pub fn to_json_spans(&self) -> Vec<JsonSpan> {
+ match &self.0 {
+ None => vec![],
+ Some(file_ranges) => file_ranges
+ .iter()
+ .flat_map(|(file, ranges)| ranges.iter().map(move |r| (file, r)))
+ .map(|(file, range)| JsonSpan {
+ file: file.to_owned(),
+ range: (range.lo, range.hi),
+ })
+ .collect(),
+ }
+ }
+
+ /// Returns `true` if `self` includes all lines in all files. Otherwise runs `f` on all ranges
+ /// in the designated file (if any) and returns true if `f` ever does.
+ fn file_range_matches<F>(&self, file_name: &FileName, f: F) -> bool
+ where
+ F: FnMut(&Range) -> bool,
+ {
+ let map = match self.0 {
+ // `None` means "all lines in all files".
+ None => return true,
+ Some(ref map) => map,
+ };
+
+ match canonicalize_path_string(file_name).and_then(|file| map.get(&file)) {
+ Some(ranges) => ranges.iter().any(f),
+ None => false,
+ }
+ }
+
+ /// Returns `true` if `range` is fully contained in `self`.
+ #[allow(dead_code)]
+ pub(crate) fn contains(&self, range: &LineRange) -> bool {
+ self.file_range_matches(&range.file_name(), |r| r.contains(Range::from(range)))
+ }
+
+ /// Returns `true` if any lines in `range` are in `self`.
+ pub(crate) fn intersects(&self, range: &LineRange) -> bool {
+ self.file_range_matches(&range.file_name(), |r| r.intersects(Range::from(range)))
+ }
+
+ /// Returns `true` if `line` from `file_name` is in `self`.
+ pub(crate) fn contains_line(&self, file_name: &FileName, line: usize) -> bool {
+ self.file_range_matches(file_name, |r| r.lo <= line && r.hi >= line)
+ }
+
+ /// Returns `true` if all the lines between `lo` and `hi` from `file_name` are in `self`.
+ pub(crate) fn contains_range(&self, file_name: &FileName, lo: usize, hi: usize) -> bool {
+ self.file_range_matches(file_name, |r| r.contains(Range::new(lo, hi)))
+ }
+}
+
+/// `FileLines` files iterator.
+pub struct Files<'a>(Option<::std::collections::hash_map::Keys<'a, FileName, Vec<Range>>>);
+
+impl<'a> iter::Iterator for Files<'a> {
+ type Item = &'a FileName;
+
+ fn next(&mut self) -> Option<&'a FileName> {
+ self.0.as_mut().and_then(Iterator::next)
+ }
+}
+
+fn canonicalize_path_string(file: &FileName) -> Option<FileName> {
+ match *file {
+ FileName::Real(ref path) => path.canonicalize().ok().map(FileName::Real),
+ _ => Some(file.clone()),
+ }
+}
+
+#[derive(Error, Debug)]
+pub enum FileLinesError {
+ #[error("{0}")]
+ Json(json::Error),
+ #[error("Can't canonicalize {0}")]
+ CannotCanonicalize(FileName),
+}
+
+// This impl is needed for `Config::override_value` to work for use in tests.
+impl str::FromStr for FileLines {
+ type Err = FileLinesError;
+
+ fn from_str(s: &str) -> Result<FileLines, Self::Err> {
+ let v: Vec<JsonSpan> = json::from_str(s).map_err(FileLinesError::Json)?;
+ let mut m = HashMap::new();
+ for js in v {
+ let (s, r) = JsonSpan::into_tuple(js)?;
+ m.entry(s).or_insert_with(Vec::new).push(r);
+ }
+ Ok(FileLines::from_ranges(m))
+ }
+}
+
+// For JSON decoding.
+#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Deserialize, Serialize)]
+pub struct JsonSpan {
+ file: FileName,
+ range: (usize, usize),
+}
+
+impl JsonSpan {
+ fn into_tuple(self) -> Result<(FileName, Range), FileLinesError> {
+ let (lo, hi) = self.range;
+ let canonical = canonicalize_path_string(&self.file)
+ .ok_or(FileLinesError::CannotCanonicalize(self.file))?;
+ Ok((canonical, Range::new(lo, hi)))
+ }
+}
+
+// This impl is needed for inclusion in the `Config` struct. We don't have a toml representation
+// for `FileLines`, so it will just panic instead.
+impl<'de> ::serde::de::Deserialize<'de> for FileLines {
+ fn deserialize<D>(_: D) -> Result<Self, D::Error>
+ where
+ D: ::serde::de::Deserializer<'de>,
+ {
+ panic!(
+ "FileLines cannot be deserialized from a project rustfmt.toml file: please \
+ specify it via the `--file-lines` option instead"
+ );
+ }
+}
+
+// We also want to avoid attempting to serialize a FileLines to toml. The
+// `Config` struct should ensure this impl is never reached.
+impl ::serde::ser::Serialize for FileLines {
+ fn serialize<S>(&self, _: S) -> Result<S::Ok, S::Error>
+ where
+ S: ::serde::ser::Serializer,
+ {
+ unreachable!("FileLines cannot be serialized. This is a rustfmt bug.");
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::Range;
+
+ #[test]
+ fn test_range_intersects() {
+ assert!(Range::new(1, 2).intersects(Range::new(1, 1)));
+ assert!(Range::new(1, 2).intersects(Range::new(2, 2)));
+ assert!(!Range::new(1, 2).intersects(Range::new(0, 0)));
+ assert!(!Range::new(1, 2).intersects(Range::new(3, 10)));
+ assert!(!Range::new(1, 3).intersects(Range::new(5, 5)));
+ }
+
+ #[test]
+ fn test_range_adjacent_to() {
+ assert!(!Range::new(1, 2).adjacent_to(Range::new(1, 1)));
+ assert!(!Range::new(1, 2).adjacent_to(Range::new(2, 2)));
+ assert!(Range::new(1, 2).adjacent_to(Range::new(0, 0)));
+ assert!(Range::new(1, 2).adjacent_to(Range::new(3, 10)));
+ assert!(!Range::new(1, 3).adjacent_to(Range::new(5, 5)));
+ }
+
+ #[test]
+ fn test_range_contains() {
+ assert!(Range::new(1, 2).contains(Range::new(1, 1)));
+ assert!(Range::new(1, 2).contains(Range::new(2, 2)));
+ assert!(!Range::new(1, 2).contains(Range::new(0, 0)));
+ assert!(!Range::new(1, 2).contains(Range::new(3, 10)));
+ }
+
+ #[test]
+ fn test_range_merge() {
+ assert_eq!(None, Range::new(1, 3).merge(Range::new(5, 5)));
+ assert_eq!(None, Range::new(4, 7).merge(Range::new(0, 1)));
+ assert_eq!(
+ Some(Range::new(3, 7)),
+ Range::new(3, 5).merge(Range::new(4, 7))
+ );
+ assert_eq!(
+ Some(Range::new(3, 7)),
+ Range::new(3, 5).merge(Range::new(5, 7))
+ );
+ assert_eq!(
+ Some(Range::new(3, 7)),
+ Range::new(3, 5).merge(Range::new(6, 7))
+ );
+ assert_eq!(
+ Some(Range::new(3, 7)),
+ Range::new(3, 7).merge(Range::new(4, 5))
+ );
+ }
+
+ use super::json::{self, json};
+ use super::{FileLines, FileName};
+ use std::{collections::HashMap, path::PathBuf};
+
+ #[test]
+ fn file_lines_to_json() {
+ let ranges: HashMap<FileName, Vec<Range>> = [
+ (
+ FileName::Real(PathBuf::from("src/main.rs")),
+ vec![Range::new(1, 3), Range::new(5, 7)],
+ ),
+ (
+ FileName::Real(PathBuf::from("src/lib.rs")),
+ vec![Range::new(1, 7)],
+ ),
+ ]
+ .iter()
+ .cloned()
+ .collect();
+
+ let file_lines = FileLines::from_ranges(ranges);
+ let mut spans = file_lines.to_json_spans();
+ spans.sort();
+ let json = json::to_value(&spans).unwrap();
+ assert_eq!(
+ json,
+ json! {[
+ {"file": "src/lib.rs", "range": [1, 7]},
+ {"file": "src/main.rs", "range": [1, 3]},
+ {"file": "src/main.rs", "range": [5, 7]},
+ ]}
+ );
+ }
+}
diff --git a/src/tools/rustfmt/src/config/lists.rs b/src/tools/rustfmt/src/config/lists.rs
new file mode 100644
index 000000000..11cb17068
--- /dev/null
+++ b/src/tools/rustfmt/src/config/lists.rs
@@ -0,0 +1,92 @@
+//! Configuration options related to rewriting a list.
+
+use rustfmt_config_proc_macro::config_type;
+
+use crate::config::IndentStyle;
+
+/// The definitive formatting tactic for lists.
+#[derive(Eq, PartialEq, Debug, Copy, Clone)]
+pub enum DefinitiveListTactic {
+ Vertical,
+ Horizontal,
+ Mixed,
+ /// Special case tactic for `format!()`, `write!()` style macros.
+ SpecialMacro(usize),
+}
+
+impl DefinitiveListTactic {
+ pub fn ends_with_newline(&self, indent_style: IndentStyle) -> bool {
+ match indent_style {
+ IndentStyle::Block => *self != DefinitiveListTactic::Horizontal,
+ IndentStyle::Visual => false,
+ }
+ }
+}
+
+/// Formatting tactic for lists. This will be cast down to a
+/// `DefinitiveListTactic` depending on the number and length of the items and
+/// their comments.
+#[config_type]
+pub enum ListTactic {
+ /// One item per row.
+ Vertical,
+ /// All items on one row.
+ Horizontal,
+ /// Try Horizontal layout, if that fails then vertical.
+ HorizontalVertical,
+ /// HorizontalVertical with a soft limit of n characters.
+ LimitedHorizontalVertical(usize),
+ /// Pack as many items as possible per row over (possibly) many rows.
+ Mixed,
+}
+
+#[config_type]
+pub enum SeparatorTactic {
+ Always,
+ Never,
+ Vertical,
+}
+
+impl SeparatorTactic {
+ pub fn from_bool(b: bool) -> SeparatorTactic {
+ if b {
+ SeparatorTactic::Always
+ } else {
+ SeparatorTactic::Never
+ }
+ }
+}
+
+/// Where to put separator.
+#[config_type]
+pub enum SeparatorPlace {
+ Front,
+ Back,
+}
+
+impl SeparatorPlace {
+ pub fn is_front(self) -> bool {
+ self == SeparatorPlace::Front
+ }
+
+ pub fn is_back(self) -> bool {
+ self == SeparatorPlace::Back
+ }
+
+ pub fn from_tactic(
+ default: SeparatorPlace,
+ tactic: DefinitiveListTactic,
+ sep: &str,
+ ) -> SeparatorPlace {
+ match tactic {
+ DefinitiveListTactic::Vertical => default,
+ _ => {
+ if sep == "," {
+ SeparatorPlace::Back
+ } else {
+ default
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/src/config/mod.rs b/src/tools/rustfmt/src/config/mod.rs
new file mode 100644
index 000000000..f49c18d3a
--- /dev/null
+++ b/src/tools/rustfmt/src/config/mod.rs
@@ -0,0 +1,924 @@
+use std::cell::Cell;
+use std::default::Default;
+use std::fs::File;
+use std::io::{Error, ErrorKind, Read};
+use std::path::{Path, PathBuf};
+use std::{env, fs};
+
+use thiserror::Error;
+
+use crate::config::config_type::ConfigType;
+#[allow(unreachable_pub)]
+pub use crate::config::file_lines::{FileLines, FileName, Range};
+#[allow(unreachable_pub)]
+pub use crate::config::lists::*;
+#[allow(unreachable_pub)]
+pub use crate::config::options::*;
+
+#[macro_use]
+pub(crate) mod config_type;
+#[macro_use]
+pub(crate) mod options;
+
+pub(crate) mod file_lines;
+pub(crate) mod lists;
+
+// This macro defines configuration options used in rustfmt. Each option
+// is defined as follows:
+//
+// `name: value type, default value, is stable, description;`
+create_config! {
+ // Fundamental stuff
+ max_width: usize, 100, true, "Maximum width of each line";
+ hard_tabs: bool, false, true, "Use tab characters for indentation, spaces for alignment";
+ tab_spaces: usize, 4, true, "Number of spaces per tab";
+ newline_style: NewlineStyle, NewlineStyle::Auto, true, "Unix or Windows line endings";
+ indent_style: IndentStyle, IndentStyle::Block, false, "How do we indent expressions or items";
+
+ // Width Heuristics
+ use_small_heuristics: Heuristics, Heuristics::Default, true, "Whether to use different \
+ formatting for items and expressions if they satisfy a heuristic notion of 'small'";
+ width_heuristics: WidthHeuristics, WidthHeuristics::scaled(100), false,
+ "'small' heuristic values";
+ fn_call_width: usize, 60, true, "Maximum width of the args of a function call before \
+ falling back to vertical formatting.";
+ attr_fn_like_width: usize, 70, true, "Maximum width of the args of a function-like \
+ attributes before falling back to vertical formatting.";
+ struct_lit_width: usize, 18, true, "Maximum width in the body of a struct lit before \
+ falling back to vertical formatting.";
+ struct_variant_width: usize, 35, true, "Maximum width in the body of a struct variant before \
+ falling back to vertical formatting.";
+ array_width: usize, 60, true, "Maximum width of an array literal before falling \
+ back to vertical formatting.";
+ chain_width: usize, 60, true, "Maximum length of a chain to fit on a single line.";
+ single_line_if_else_max_width: usize, 50, true, "Maximum line length for single line if-else \
+ expressions. A value of zero means always break if-else expressions.";
+
+ // Comments. macros, and strings
+ wrap_comments: bool, false, false, "Break comments to fit on the line";
+ format_code_in_doc_comments: bool, false, false, "Format the code snippet in doc comments.";
+ doc_comment_code_block_width: usize, 100, false, "Maximum width for code snippets in doc \
+ comments. No effect unless format_code_in_doc_comments = true";
+ comment_width: usize, 80, false,
+ "Maximum length of comments. No effect unless wrap_comments = true";
+ normalize_comments: bool, false, false, "Convert /* */ comments to // comments where possible";
+ normalize_doc_attributes: bool, false, false, "Normalize doc attributes as doc comments";
+ format_strings: bool, false, false, "Format string literals where necessary";
+ format_macro_matchers: bool, false, false,
+ "Format the metavariable matching patterns in macros";
+ format_macro_bodies: bool, true, false, "Format the bodies of macros";
+ hex_literal_case: HexLiteralCase, HexLiteralCase::Preserve, false,
+ "Format hexadecimal integer literals";
+
+ // Single line expressions and items
+ empty_item_single_line: bool, true, false,
+ "Put empty-body functions and impls on a single line";
+ struct_lit_single_line: bool, true, false,
+ "Put small struct literals on a single line";
+ fn_single_line: bool, false, false, "Put single-expression functions on a single line";
+ where_single_line: bool, false, false, "Force where-clauses to be on a single line";
+
+ // Imports
+ imports_indent: IndentStyle, IndentStyle::Block, false, "Indent of imports";
+ imports_layout: ListTactic, ListTactic::Mixed, false, "Item layout inside a import block";
+ imports_granularity: ImportGranularity, ImportGranularity::Preserve, false,
+ "Merge or split imports to the provided granularity";
+ group_imports: GroupImportsTactic, GroupImportsTactic::Preserve, false,
+ "Controls the strategy for how imports are grouped together";
+ merge_imports: bool, false, false, "(deprecated: use imports_granularity instead)";
+
+ // Ordering
+ reorder_imports: bool, true, true, "Reorder import and extern crate statements alphabetically";
+ reorder_modules: bool, true, true, "Reorder module statements alphabetically in group";
+ reorder_impl_items: bool, false, false, "Reorder impl items";
+
+ // Spaces around punctuation
+ type_punctuation_density: TypeDensity, TypeDensity::Wide, false,
+ "Determines if '+' or '=' are wrapped in spaces in the punctuation of types";
+ space_before_colon: bool, false, false, "Leave a space before the colon";
+ space_after_colon: bool, true, false, "Leave a space after the colon";
+ spaces_around_ranges: bool, false, false, "Put spaces around the .. and ..= range operators";
+ binop_separator: SeparatorPlace, SeparatorPlace::Front, false,
+ "Where to put a binary operator when a binary expression goes multiline";
+
+ // Misc.
+ remove_nested_parens: bool, true, true, "Remove nested parens";
+ combine_control_expr: bool, true, false, "Combine control expressions with function calls";
+ short_array_element_width_threshold: usize, 10, true,
+ "Width threshold for an array element to be considered short";
+ overflow_delimited_expr: bool, false, false,
+ "Allow trailing bracket/brace delimited expressions to overflow";
+ struct_field_align_threshold: usize, 0, false,
+ "Align struct fields if their diffs fits within threshold";
+ enum_discrim_align_threshold: usize, 0, false,
+ "Align enum variants discrims, if their diffs fit within threshold";
+ match_arm_blocks: bool, true, false, "Wrap the body of arms in blocks when it does not fit on \
+ the same line with the pattern of arms";
+ match_arm_leading_pipes: MatchArmLeadingPipe, MatchArmLeadingPipe::Never, true,
+ "Determines whether leading pipes are emitted on match arms";
+ force_multiline_blocks: bool, false, false,
+ "Force multiline closure bodies and match arms to be wrapped in a block";
+ fn_args_layout: Density, Density::Tall, true,
+ "Control the layout of arguments in a function";
+ brace_style: BraceStyle, BraceStyle::SameLineWhere, false, "Brace style for items";
+ control_brace_style: ControlBraceStyle, ControlBraceStyle::AlwaysSameLine, false,
+ "Brace style for control flow constructs";
+ trailing_semicolon: bool, true, false,
+ "Add trailing semicolon after break, continue and return";
+ trailing_comma: SeparatorTactic, SeparatorTactic::Vertical, false,
+ "How to handle trailing commas for lists";
+ match_block_trailing_comma: bool, false, true,
+ "Put a trailing comma after a block based match arm (non-block arms are not affected)";
+ blank_lines_upper_bound: usize, 1, false,
+ "Maximum number of blank lines which can be put between items";
+ blank_lines_lower_bound: usize, 0, false,
+ "Minimum number of blank lines which must be put between items";
+ edition: Edition, Edition::Edition2015, true, "The edition of the parser (RFC 2052)";
+ version: Version, Version::One, false, "Version of formatting rules";
+ inline_attribute_width: usize, 0, false,
+ "Write an item and its attribute on the same line \
+ if their combined width is below a threshold";
+ format_generated_files: bool, true, false, "Format generated files";
+
+ // Options that can change the source code beyond whitespace/blocks (somewhat linty things)
+ merge_derives: bool, true, true, "Merge multiple `#[derive(...)]` into a single one";
+ use_try_shorthand: bool, false, true, "Replace uses of the try! macro by the ? shorthand";
+ use_field_init_shorthand: bool, false, true, "Use field initialization shorthand if possible";
+ force_explicit_abi: bool, true, true, "Always print the abi for extern items";
+ condense_wildcard_suffixes: bool, false, false, "Replace strings of _ wildcards by a single .. \
+ in tuple patterns";
+
+ // Control options (changes the operation of rustfmt, rather than the formatting)
+ color: Color, Color::Auto, false,
+ "What Color option to use when none is supplied: Always, Never, Auto";
+ required_version: String, env!("CARGO_PKG_VERSION").to_owned(), false,
+ "Require a specific version of rustfmt";
+ unstable_features: bool, false, false,
+ "Enables unstable features. Only available on nightly channel";
+ disable_all_formatting: bool, false, true, "Don't reformat anything";
+ skip_children: bool, false, false, "Don't reformat out of line modules";
+ hide_parse_errors: bool, false, false, "Hide errors from the parser";
+ error_on_line_overflow: bool, false, false, "Error if unable to get all lines within max_width";
+ error_on_unformatted: bool, false, false,
+ "Error if unable to get comments or string literals within max_width, \
+ or they are left with trailing whitespaces";
+ ignore: IgnoreList, IgnoreList::default(), false,
+ "Skip formatting the specified files and directories";
+
+ // Not user-facing
+ verbose: Verbosity, Verbosity::Normal, false, "How much to information to emit to the user";
+ file_lines: FileLines, FileLines::all(), false,
+ "Lines to format; this is not supported in rustfmt.toml, and can only be specified \
+ via the --file-lines option";
+ emit_mode: EmitMode, EmitMode::Files, false,
+ "What emit Mode to use when none is supplied";
+ make_backup: bool, false, false, "Backup changed files";
+ print_misformatted_file_names: bool, false, true,
+ "Prints the names of mismatched files that were formatted. Prints the names of \
+ files that would be formated when used with `--check` mode. ";
+}
+
+#[derive(Error, Debug)]
+#[error("Could not output config: {0}")]
+pub struct ToTomlError(toml::ser::Error);
+
+impl PartialConfig {
+ pub fn to_toml(&self) -> Result<String, ToTomlError> {
+ // Non-user-facing options can't be specified in TOML
+ let mut cloned = self.clone();
+ cloned.file_lines = None;
+ cloned.verbose = None;
+ cloned.width_heuristics = None;
+ cloned.print_misformatted_file_names = None;
+ cloned.merge_imports = None;
+
+ ::toml::to_string(&cloned).map_err(ToTomlError)
+ }
+}
+
+impl Config {
+ pub(crate) fn version_meets_requirement(&self) -> bool {
+ if self.was_set().required_version() {
+ let version = env!("CARGO_PKG_VERSION");
+ let required_version = self.required_version();
+ if version != required_version {
+ println!(
+ "Error: rustfmt version ({}) doesn't match the required version ({})",
+ version, required_version,
+ );
+ return false;
+ }
+ }
+
+ true
+ }
+
+ /// Constructs a `Config` from the toml file specified at `file_path`.
+ ///
+ /// This method only looks at the provided path, for a method that
+ /// searches parents for a `rustfmt.toml` see `from_resolved_toml_path`.
+ ///
+ /// Returns a `Config` if the config could be read and parsed from
+ /// the file, otherwise errors.
+ pub(super) fn from_toml_path(file_path: &Path) -> Result<Config, Error> {
+ let mut file = File::open(&file_path)?;
+ let mut toml = String::new();
+ file.read_to_string(&mut toml)?;
+ Config::from_toml(&toml, file_path.parent().unwrap())
+ .map_err(|err| Error::new(ErrorKind::InvalidData, err))
+ }
+
+ /// Resolves the config for input in `dir`.
+ ///
+ /// Searches for `rustfmt.toml` beginning with `dir`, and
+ /// recursively checking parents of `dir` if no config file is found.
+ /// If no config file exists in `dir` or in any parent, a
+ /// default `Config` will be returned (and the returned path will be empty).
+ ///
+ /// Returns the `Config` to use, and the path of the project file if there was
+ /// one.
+ pub(super) fn from_resolved_toml_path(dir: &Path) -> Result<(Config, Option<PathBuf>), Error> {
+ /// Try to find a project file in the given directory and its parents.
+ /// Returns the path of a the nearest project file if one exists,
+ /// or `None` if no project file was found.
+ fn resolve_project_file(dir: &Path) -> Result<Option<PathBuf>, Error> {
+ let mut current = if dir.is_relative() {
+ env::current_dir()?.join(dir)
+ } else {
+ dir.to_path_buf()
+ };
+
+ current = fs::canonicalize(current)?;
+
+ loop {
+ match get_toml_path(&current) {
+ Ok(Some(path)) => return Ok(Some(path)),
+ Err(e) => return Err(e),
+ _ => (),
+ }
+
+ // If the current directory has no parent, we're done searching.
+ if !current.pop() {
+ break;
+ }
+ }
+
+ // If nothing was found, check in the home directory.
+ if let Some(home_dir) = dirs::home_dir() {
+ if let Some(path) = get_toml_path(&home_dir)? {
+ return Ok(Some(path));
+ }
+ }
+
+ // If none was found ther either, check in the user's configuration directory.
+ if let Some(mut config_dir) = dirs::config_dir() {
+ config_dir.push("rustfmt");
+ if let Some(path) = get_toml_path(&config_dir)? {
+ return Ok(Some(path));
+ }
+ }
+
+ Ok(None)
+ }
+
+ match resolve_project_file(dir)? {
+ None => Ok((Config::default(), None)),
+ Some(path) => Config::from_toml_path(&path).map(|config| (config, Some(path))),
+ }
+ }
+
+ pub(crate) fn from_toml(toml: &str, dir: &Path) -> Result<Config, String> {
+ let parsed: ::toml::Value = toml
+ .parse()
+ .map_err(|e| format!("Could not parse TOML: {}", e))?;
+ let mut err = String::new();
+ let table = parsed
+ .as_table()
+ .ok_or_else(|| String::from("Parsed config was not table"))?;
+ for key in table.keys() {
+ if !Config::is_valid_name(key) {
+ let msg = &format!("Warning: Unknown configuration option `{}`\n", key);
+ err.push_str(msg)
+ }
+ }
+ match parsed.try_into() {
+ Ok(parsed_config) => {
+ if !err.is_empty() {
+ eprint!("{}", err);
+ }
+ Ok(Config::default().fill_from_parsed_config(parsed_config, dir))
+ }
+ Err(e) => {
+ err.push_str("Error: Decoding config file failed:\n");
+ err.push_str(format!("{}\n", e).as_str());
+ err.push_str("Please check your config file.");
+ Err(err)
+ }
+ }
+ }
+}
+
+/// Loads a config by checking the client-supplied options and if appropriate, the
+/// file system (including searching the file system for overrides).
+pub fn load_config<O: CliOptions>(
+ file_path: Option<&Path>,
+ options: Option<O>,
+) -> Result<(Config, Option<PathBuf>), Error> {
+ let over_ride = match options {
+ Some(ref opts) => config_path(opts)?,
+ None => None,
+ };
+
+ let result = if let Some(over_ride) = over_ride {
+ Config::from_toml_path(over_ride.as_ref()).map(|p| (p, Some(over_ride.to_owned())))
+ } else if let Some(file_path) = file_path {
+ Config::from_resolved_toml_path(file_path)
+ } else {
+ Ok((Config::default(), None))
+ };
+
+ result.map(|(mut c, p)| {
+ if let Some(options) = options {
+ options.apply_to(&mut c);
+ }
+ (c, p)
+ })
+}
+
+// Check for the presence of known config file names (`rustfmt.toml, `.rustfmt.toml`) in `dir`
+//
+// Return the path if a config file exists, empty if no file exists, and Error for IO errors
+fn get_toml_path(dir: &Path) -> Result<Option<PathBuf>, Error> {
+ const CONFIG_FILE_NAMES: [&str; 2] = [".rustfmt.toml", "rustfmt.toml"];
+ for config_file_name in &CONFIG_FILE_NAMES {
+ let config_file = dir.join(config_file_name);
+ match fs::metadata(&config_file) {
+ // Only return if it's a file to handle the unlikely situation of a directory named
+ // `rustfmt.toml`.
+ Ok(ref md) if md.is_file() => return Ok(Some(config_file)),
+ // Return the error if it's something other than `NotFound`; otherwise we didn't
+ // find the project file yet, and continue searching.
+ Err(e) => {
+ if e.kind() != ErrorKind::NotFound {
+ let ctx = format!("Failed to get metadata for config file {:?}", &config_file);
+ let err = anyhow::Error::new(e).context(ctx);
+ return Err(Error::new(ErrorKind::Other, err));
+ }
+ }
+ _ => {}
+ }
+ }
+ Ok(None)
+}
+
+fn config_path(options: &dyn CliOptions) -> Result<Option<PathBuf>, Error> {
+ let config_path_not_found = |path: &str| -> Result<Option<PathBuf>, Error> {
+ Err(Error::new(
+ ErrorKind::NotFound,
+ format!(
+ "Error: unable to find a config file for the given path: `{}`",
+ path
+ ),
+ ))
+ };
+
+ // Read the config_path and convert to parent dir if a file is provided.
+ // If a config file cannot be found from the given path, return error.
+ match options.config_path() {
+ Some(path) if !path.exists() => config_path_not_found(path.to_str().unwrap()),
+ Some(path) if path.is_dir() => {
+ let config_file_path = get_toml_path(path)?;
+ if config_file_path.is_some() {
+ Ok(config_file_path)
+ } else {
+ config_path_not_found(path.to_str().unwrap())
+ }
+ }
+ path => Ok(path.map(ToOwned::to_owned)),
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use std::str;
+
+ use rustfmt_config_proc_macro::{nightly_only_test, stable_only_test};
+
+ #[allow(dead_code)]
+ mod mock {
+ use super::super::*;
+
+ create_config! {
+ // Options that are used by the generated functions
+ max_width: usize, 100, true, "Maximum width of each line";
+ required_version: String, env!("CARGO_PKG_VERSION").to_owned(), false,
+ "Require a specific version of rustfmt.";
+ ignore: IgnoreList, IgnoreList::default(), false,
+ "Skip formatting the specified files and directories.";
+ verbose: Verbosity, Verbosity::Normal, false,
+ "How much to information to emit to the user";
+ file_lines: FileLines, FileLines::all(), false,
+ "Lines to format; this is not supported in rustfmt.toml, and can only be specified \
+ via the --file-lines option";
+
+ // merge_imports deprecation
+ imports_granularity: ImportGranularity, ImportGranularity::Preserve, false,
+ "Merge imports";
+ merge_imports: bool, false, false, "(deprecated: use imports_granularity instead)";
+
+ // Width Heuristics
+ use_small_heuristics: Heuristics, Heuristics::Default, true,
+ "Whether to use different formatting for items and \
+ expressions if they satisfy a heuristic notion of 'small'.";
+ width_heuristics: WidthHeuristics, WidthHeuristics::scaled(100), false,
+ "'small' heuristic values";
+
+ fn_call_width: usize, 60, true, "Maximum width of the args of a function call before \
+ falling back to vertical formatting.";
+ attr_fn_like_width: usize, 70, true, "Maximum width of the args of a function-like \
+ attributes before falling back to vertical formatting.";
+ struct_lit_width: usize, 18, true, "Maximum width in the body of a struct lit before \
+ falling back to vertical formatting.";
+ struct_variant_width: usize, 35, true, "Maximum width in the body of a struct \
+ variant before falling back to vertical formatting.";
+ array_width: usize, 60, true, "Maximum width of an array literal before falling \
+ back to vertical formatting.";
+ chain_width: usize, 60, true, "Maximum length of a chain to fit on a single line.";
+ single_line_if_else_max_width: usize, 50, true, "Maximum line length for single \
+ line if-else expressions. A value of zero means always break if-else expressions.";
+
+ // Options that are used by the tests
+ stable_option: bool, false, true, "A stable option";
+ unstable_option: bool, false, false, "An unstable option";
+ }
+ }
+
+ #[test]
+ fn test_config_set() {
+ let mut config = Config::default();
+ config.set().verbose(Verbosity::Quiet);
+ assert_eq!(config.verbose(), Verbosity::Quiet);
+ config.set().verbose(Verbosity::Normal);
+ assert_eq!(config.verbose(), Verbosity::Normal);
+ }
+
+ #[test]
+ fn test_config_used_to_toml() {
+ let config = Config::default();
+
+ let merge_derives = config.merge_derives();
+ let skip_children = config.skip_children();
+
+ let used_options = config.used_options();
+ let toml = used_options.to_toml().unwrap();
+ assert_eq!(
+ toml,
+ format!(
+ "merge_derives = {}\nskip_children = {}\n",
+ merge_derives, skip_children,
+ )
+ );
+ }
+
+ #[test]
+ fn test_was_set() {
+ let config = Config::from_toml("hard_tabs = true", Path::new("")).unwrap();
+
+ assert_eq!(config.was_set().hard_tabs(), true);
+ assert_eq!(config.was_set().verbose(), false);
+ }
+
+ #[test]
+ fn test_print_docs_exclude_unstable() {
+ use self::mock::Config;
+
+ let mut output = Vec::new();
+ Config::print_docs(&mut output, false);
+
+ let s = str::from_utf8(&output).unwrap();
+
+ assert_eq!(s.contains("stable_option"), true);
+ assert_eq!(s.contains("unstable_option"), false);
+ assert_eq!(s.contains("(unstable)"), false);
+ }
+
+ #[test]
+ fn test_print_docs_include_unstable() {
+ use self::mock::Config;
+
+ let mut output = Vec::new();
+ Config::print_docs(&mut output, true);
+
+ let s = str::from_utf8(&output).unwrap();
+ assert_eq!(s.contains("stable_option"), true);
+ assert_eq!(s.contains("unstable_option"), true);
+ assert_eq!(s.contains("(unstable)"), true);
+ }
+
+ #[test]
+ fn test_dump_default_config() {
+ let default_config = format!(
+ r#"max_width = 100
+hard_tabs = false
+tab_spaces = 4
+newline_style = "Auto"
+indent_style = "Block"
+use_small_heuristics = "Default"
+fn_call_width = 60
+attr_fn_like_width = 70
+struct_lit_width = 18
+struct_variant_width = 35
+array_width = 60
+chain_width = 60
+single_line_if_else_max_width = 50
+wrap_comments = false
+format_code_in_doc_comments = false
+doc_comment_code_block_width = 100
+comment_width = 80
+normalize_comments = false
+normalize_doc_attributes = false
+format_strings = false
+format_macro_matchers = false
+format_macro_bodies = true
+hex_literal_case = "Preserve"
+empty_item_single_line = true
+struct_lit_single_line = true
+fn_single_line = false
+where_single_line = false
+imports_indent = "Block"
+imports_layout = "Mixed"
+imports_granularity = "Preserve"
+group_imports = "Preserve"
+reorder_imports = true
+reorder_modules = true
+reorder_impl_items = false
+type_punctuation_density = "Wide"
+space_before_colon = false
+space_after_colon = true
+spaces_around_ranges = false
+binop_separator = "Front"
+remove_nested_parens = true
+combine_control_expr = true
+short_array_element_width_threshold = 10
+overflow_delimited_expr = false
+struct_field_align_threshold = 0
+enum_discrim_align_threshold = 0
+match_arm_blocks = true
+match_arm_leading_pipes = "Never"
+force_multiline_blocks = false
+fn_args_layout = "Tall"
+brace_style = "SameLineWhere"
+control_brace_style = "AlwaysSameLine"
+trailing_semicolon = true
+trailing_comma = "Vertical"
+match_block_trailing_comma = false
+blank_lines_upper_bound = 1
+blank_lines_lower_bound = 0
+edition = "2015"
+version = "One"
+inline_attribute_width = 0
+format_generated_files = true
+merge_derives = true
+use_try_shorthand = false
+use_field_init_shorthand = false
+force_explicit_abi = true
+condense_wildcard_suffixes = false
+color = "Auto"
+required_version = "{}"
+unstable_features = false
+disable_all_formatting = false
+skip_children = false
+hide_parse_errors = false
+error_on_line_overflow = false
+error_on_unformatted = false
+ignore = []
+emit_mode = "Files"
+make_backup = false
+"#,
+ env!("CARGO_PKG_VERSION")
+ );
+ let toml = Config::default().all_options().to_toml().unwrap();
+ assert_eq!(&toml, &default_config);
+ }
+
+ #[stable_only_test]
+ #[test]
+ fn test_as_not_nightly_channel() {
+ let mut config = Config::default();
+ assert_eq!(config.was_set().unstable_features(), false);
+ config.set().unstable_features(true);
+ assert_eq!(config.was_set().unstable_features(), false);
+ }
+
+ #[nightly_only_test]
+ #[test]
+ fn test_as_nightly_channel() {
+ let mut config = Config::default();
+ config.set().unstable_features(true);
+ // When we don't set the config from toml or command line options it
+ // doesn't get marked as set by the user.
+ assert_eq!(config.was_set().unstable_features(), false);
+ config.set().unstable_features(true);
+ assert_eq!(config.unstable_features(), true);
+ }
+
+ #[nightly_only_test]
+ #[test]
+ fn test_unstable_from_toml() {
+ let config = Config::from_toml("unstable_features = true", Path::new("")).unwrap();
+ assert_eq!(config.was_set().unstable_features(), true);
+ assert_eq!(config.unstable_features(), true);
+ }
+
+ #[cfg(test)]
+ mod deprecated_option_merge_imports {
+ use super::*;
+
+ #[nightly_only_test]
+ #[test]
+ fn test_old_option_set() {
+ let toml = r#"
+ unstable_features = true
+ merge_imports = true
+ "#;
+ let config = Config::from_toml(toml, Path::new("")).unwrap();
+ assert_eq!(config.imports_granularity(), ImportGranularity::Crate);
+ }
+
+ #[nightly_only_test]
+ #[test]
+ fn test_both_set() {
+ let toml = r#"
+ unstable_features = true
+ merge_imports = true
+ imports_granularity = "Preserve"
+ "#;
+ let config = Config::from_toml(toml, Path::new("")).unwrap();
+ assert_eq!(config.imports_granularity(), ImportGranularity::Preserve);
+ }
+
+ #[nightly_only_test]
+ #[test]
+ fn test_new_overridden() {
+ let toml = r#"
+ unstable_features = true
+ merge_imports = true
+ "#;
+ let mut config = Config::from_toml(toml, Path::new("")).unwrap();
+ config.override_value("imports_granularity", "Preserve");
+ assert_eq!(config.imports_granularity(), ImportGranularity::Preserve);
+ }
+
+ #[nightly_only_test]
+ #[test]
+ fn test_old_overridden() {
+ let toml = r#"
+ unstable_features = true
+ imports_granularity = "Module"
+ "#;
+ let mut config = Config::from_toml(toml, Path::new("")).unwrap();
+ config.override_value("merge_imports", "true");
+ // no effect: the new option always takes precedence
+ assert_eq!(config.imports_granularity(), ImportGranularity::Module);
+ }
+ }
+
+ #[cfg(test)]
+ mod use_small_heuristics {
+ use super::*;
+
+ #[test]
+ fn test_default_sets_correct_widths() {
+ let toml = r#"
+ use_small_heuristics = "Default"
+ max_width = 200
+ "#;
+ let config = Config::from_toml(toml, Path::new("")).unwrap();
+ assert_eq!(config.array_width(), 120);
+ assert_eq!(config.attr_fn_like_width(), 140);
+ assert_eq!(config.chain_width(), 120);
+ assert_eq!(config.fn_call_width(), 120);
+ assert_eq!(config.single_line_if_else_max_width(), 100);
+ assert_eq!(config.struct_lit_width(), 36);
+ assert_eq!(config.struct_variant_width(), 70);
+ }
+
+ #[test]
+ fn test_max_sets_correct_widths() {
+ let toml = r#"
+ use_small_heuristics = "Max"
+ max_width = 120
+ "#;
+ let config = Config::from_toml(toml, Path::new("")).unwrap();
+ assert_eq!(config.array_width(), 120);
+ assert_eq!(config.attr_fn_like_width(), 120);
+ assert_eq!(config.chain_width(), 120);
+ assert_eq!(config.fn_call_width(), 120);
+ assert_eq!(config.single_line_if_else_max_width(), 120);
+ assert_eq!(config.struct_lit_width(), 120);
+ assert_eq!(config.struct_variant_width(), 120);
+ }
+
+ #[test]
+ fn test_off_sets_correct_widths() {
+ let toml = r#"
+ use_small_heuristics = "Off"
+ max_width = 100
+ "#;
+ let config = Config::from_toml(toml, Path::new("")).unwrap();
+ assert_eq!(config.array_width(), usize::max_value());
+ assert_eq!(config.attr_fn_like_width(), usize::max_value());
+ assert_eq!(config.chain_width(), usize::max_value());
+ assert_eq!(config.fn_call_width(), usize::max_value());
+ assert_eq!(config.single_line_if_else_max_width(), 0);
+ assert_eq!(config.struct_lit_width(), 0);
+ assert_eq!(config.struct_variant_width(), 0);
+ }
+
+ #[test]
+ fn test_override_works_with_default() {
+ let toml = r#"
+ use_small_heuristics = "Default"
+ array_width = 20
+ attr_fn_like_width = 40
+ chain_width = 20
+ fn_call_width = 90
+ single_line_if_else_max_width = 40
+ struct_lit_width = 30
+ struct_variant_width = 34
+ "#;
+ let config = Config::from_toml(toml, Path::new("")).unwrap();
+ assert_eq!(config.array_width(), 20);
+ assert_eq!(config.attr_fn_like_width(), 40);
+ assert_eq!(config.chain_width(), 20);
+ assert_eq!(config.fn_call_width(), 90);
+ assert_eq!(config.single_line_if_else_max_width(), 40);
+ assert_eq!(config.struct_lit_width(), 30);
+ assert_eq!(config.struct_variant_width(), 34);
+ }
+
+ #[test]
+ fn test_override_with_max() {
+ let toml = r#"
+ use_small_heuristics = "Max"
+ array_width = 20
+ attr_fn_like_width = 40
+ chain_width = 20
+ fn_call_width = 90
+ single_line_if_else_max_width = 40
+ struct_lit_width = 30
+ struct_variant_width = 34
+ "#;
+ let config = Config::from_toml(toml, Path::new("")).unwrap();
+ assert_eq!(config.array_width(), 20);
+ assert_eq!(config.attr_fn_like_width(), 40);
+ assert_eq!(config.chain_width(), 20);
+ assert_eq!(config.fn_call_width(), 90);
+ assert_eq!(config.single_line_if_else_max_width(), 40);
+ assert_eq!(config.struct_lit_width(), 30);
+ assert_eq!(config.struct_variant_width(), 34);
+ }
+
+ #[test]
+ fn test_override_with_off() {
+ let toml = r#"
+ use_small_heuristics = "Off"
+ array_width = 20
+ attr_fn_like_width = 40
+ chain_width = 20
+ fn_call_width = 90
+ single_line_if_else_max_width = 40
+ struct_lit_width = 30
+ struct_variant_width = 34
+ "#;
+ let config = Config::from_toml(toml, Path::new("")).unwrap();
+ assert_eq!(config.array_width(), 20);
+ assert_eq!(config.attr_fn_like_width(), 40);
+ assert_eq!(config.chain_width(), 20);
+ assert_eq!(config.fn_call_width(), 90);
+ assert_eq!(config.single_line_if_else_max_width(), 40);
+ assert_eq!(config.struct_lit_width(), 30);
+ assert_eq!(config.struct_variant_width(), 34);
+ }
+
+ #[test]
+ fn test_fn_call_width_config_exceeds_max_width() {
+ let toml = r#"
+ max_width = 90
+ fn_call_width = 95
+ "#;
+ let config = Config::from_toml(toml, Path::new("")).unwrap();
+ assert_eq!(config.fn_call_width(), 90);
+ }
+
+ #[test]
+ fn test_attr_fn_like_width_config_exceeds_max_width() {
+ let toml = r#"
+ max_width = 80
+ attr_fn_like_width = 90
+ "#;
+ let config = Config::from_toml(toml, Path::new("")).unwrap();
+ assert_eq!(config.attr_fn_like_width(), 80);
+ }
+
+ #[test]
+ fn test_struct_lit_config_exceeds_max_width() {
+ let toml = r#"
+ max_width = 78
+ struct_lit_width = 90
+ "#;
+ let config = Config::from_toml(toml, Path::new("")).unwrap();
+ assert_eq!(config.struct_lit_width(), 78);
+ }
+
+ #[test]
+ fn test_struct_variant_width_config_exceeds_max_width() {
+ let toml = r#"
+ max_width = 80
+ struct_variant_width = 90
+ "#;
+ let config = Config::from_toml(toml, Path::new("")).unwrap();
+ assert_eq!(config.struct_variant_width(), 80);
+ }
+
+ #[test]
+ fn test_array_width_config_exceeds_max_width() {
+ let toml = r#"
+ max_width = 60
+ array_width = 80
+ "#;
+ let config = Config::from_toml(toml, Path::new("")).unwrap();
+ assert_eq!(config.array_width(), 60);
+ }
+
+ #[test]
+ fn test_chain_width_config_exceeds_max_width() {
+ let toml = r#"
+ max_width = 80
+ chain_width = 90
+ "#;
+ let config = Config::from_toml(toml, Path::new("")).unwrap();
+ assert_eq!(config.chain_width(), 80);
+ }
+
+ #[test]
+ fn test_single_line_if_else_max_width_config_exceeds_max_width() {
+ let toml = r#"
+ max_width = 70
+ single_line_if_else_max_width = 90
+ "#;
+ let config = Config::from_toml(toml, Path::new("")).unwrap();
+ assert_eq!(config.single_line_if_else_max_width(), 70);
+ }
+
+ #[test]
+ fn test_override_fn_call_width_exceeds_max_width() {
+ let mut config = Config::default();
+ config.override_value("fn_call_width", "101");
+ assert_eq!(config.fn_call_width(), 100);
+ }
+
+ #[test]
+ fn test_override_attr_fn_like_width_exceeds_max_width() {
+ let mut config = Config::default();
+ config.override_value("attr_fn_like_width", "101");
+ assert_eq!(config.attr_fn_like_width(), 100);
+ }
+
+ #[test]
+ fn test_override_struct_lit_exceeds_max_width() {
+ let mut config = Config::default();
+ config.override_value("struct_lit_width", "101");
+ assert_eq!(config.struct_lit_width(), 100);
+ }
+
+ #[test]
+ fn test_override_struct_variant_width_exceeds_max_width() {
+ let mut config = Config::default();
+ config.override_value("struct_variant_width", "101");
+ assert_eq!(config.struct_variant_width(), 100);
+ }
+
+ #[test]
+ fn test_override_array_width_exceeds_max_width() {
+ let mut config = Config::default();
+ config.override_value("array_width", "101");
+ assert_eq!(config.array_width(), 100);
+ }
+
+ #[test]
+ fn test_override_chain_width_exceeds_max_width() {
+ let mut config = Config::default();
+ config.override_value("chain_width", "101");
+ assert_eq!(config.chain_width(), 100);
+ }
+
+ #[test]
+ fn test_override_single_line_if_else_max_width_exceeds_max_width() {
+ let mut config = Config::default();
+ config.override_value("single_line_if_else_max_width", "101");
+ assert_eq!(config.single_line_if_else_max_width(), 100);
+ }
+ }
+}
diff --git a/src/tools/rustfmt/src/config/options.rs b/src/tools/rustfmt/src/config/options.rs
new file mode 100644
index 000000000..257a17b27
--- /dev/null
+++ b/src/tools/rustfmt/src/config/options.rs
@@ -0,0 +1,464 @@
+use std::collections::{hash_set, HashSet};
+use std::fmt;
+use std::path::{Path, PathBuf};
+use std::str::FromStr;
+
+use itertools::Itertools;
+use rustfmt_config_proc_macro::config_type;
+use serde::de::{SeqAccess, Visitor};
+use serde::ser::SerializeSeq;
+use serde::{Deserialize, Deserializer, Serialize, Serializer};
+
+use crate::config::lists::*;
+use crate::config::Config;
+
+#[config_type]
+pub enum NewlineStyle {
+ /// Auto-detect based on the raw source input.
+ Auto,
+ /// Force CRLF (`\r\n`).
+ Windows,
+ /// Force CR (`\n).
+ Unix,
+ /// `\r\n` in Windows, `\n` on other platforms.
+ Native,
+}
+
+#[config_type]
+/// Where to put the opening brace of items (`fn`, `impl`, etc.).
+pub enum BraceStyle {
+ /// Put the opening brace on the next line.
+ AlwaysNextLine,
+ /// Put the opening brace on the same line, if possible.
+ PreferSameLine,
+ /// Prefer the same line except where there is a where-clause, in which
+ /// case force the brace to be put on the next line.
+ SameLineWhere,
+}
+
+#[config_type]
+/// Where to put the opening brace of conditional expressions (`if`, `match`, etc.).
+pub enum ControlBraceStyle {
+ /// K&R style, Rust community default
+ AlwaysSameLine,
+ /// Stroustrup style
+ ClosingNextLine,
+ /// Allman style
+ AlwaysNextLine,
+}
+
+#[config_type]
+/// How to indent.
+pub enum IndentStyle {
+ /// First line on the same line as the opening brace, all lines aligned with
+ /// the first line.
+ Visual,
+ /// First line is on a new line and all lines align with **block** indent.
+ Block,
+}
+
+#[config_type]
+/// How to place a list-like items.
+/// FIXME: Issue-3581: this should be renamed to ItemsLayout when publishing 2.0
+pub enum Density {
+ /// Fit as much on one line as possible.
+ Compressed,
+ /// Items are placed horizontally if sufficient space, vertically otherwise.
+ Tall,
+ /// Place every item on a separate line.
+ Vertical,
+}
+
+#[config_type]
+/// Spacing around type combinators.
+pub enum TypeDensity {
+ /// No spaces around "=" and "+"
+ Compressed,
+ /// Spaces around " = " and " + "
+ Wide,
+}
+
+#[config_type]
+/// Heuristic settings that can be used to simply
+/// the configuration of the granular width configurations
+/// like `struct_lit_width`, `array_width`, etc.
+pub enum Heuristics {
+ /// Turn off any heuristics
+ Off,
+ /// Turn on max heuristics
+ Max,
+ /// Use scaled values based on the value of `max_width`
+ Default,
+}
+
+impl Density {
+ pub fn to_list_tactic(self, len: usize) -> ListTactic {
+ match self {
+ Density::Compressed => ListTactic::Mixed,
+ Density::Tall => ListTactic::HorizontalVertical,
+ Density::Vertical if len == 1 => ListTactic::Horizontal,
+ Density::Vertical => ListTactic::Vertical,
+ }
+ }
+}
+
+#[config_type]
+/// Configuration for import groups, i.e. sets of imports separated by newlines.
+pub enum GroupImportsTactic {
+ /// Keep groups as they are.
+ Preserve,
+ /// Discard existing groups, and create new groups for
+ /// 1. `std` / `core` / `alloc` imports
+ /// 2. other imports
+ /// 3. `self` / `crate` / `super` imports
+ StdExternalCrate,
+ /// Discard existing groups, and create a single group for everything
+ One,
+}
+
+#[config_type]
+/// How to merge imports.
+pub enum ImportGranularity {
+ /// Do not merge imports.
+ Preserve,
+ /// Use one `use` statement per crate.
+ Crate,
+ /// Use one `use` statement per module.
+ Module,
+ /// Use one `use` statement per imported item.
+ Item,
+ /// Use one `use` statement including all items.
+ One,
+}
+
+/// Controls how rustfmt should handle case in hexadecimal literals.
+#[config_type]
+pub enum HexLiteralCase {
+ /// Leave the literal as-is
+ Preserve,
+ /// Ensure all literals use uppercase lettering
+ Upper,
+ /// Ensure all literals use lowercase lettering
+ Lower,
+}
+
+#[config_type]
+pub enum ReportTactic {
+ Always,
+ Unnumbered,
+ Never,
+}
+
+/// What Rustfmt should emit. Mostly corresponds to the `--emit` command line
+/// option.
+#[config_type]
+pub enum EmitMode {
+ /// Emits to files.
+ Files,
+ /// Writes the output to stdout.
+ Stdout,
+ /// Displays how much of the input file was processed
+ Coverage,
+ /// Unfancy stdout
+ Checkstyle,
+ /// Writes the resulting diffs in a JSON format. Returns an empty array
+ /// `[]` if there were no diffs.
+ Json,
+ /// Output the changed lines (for internal value only)
+ ModifiedLines,
+ /// Checks if a diff can be generated. If so, rustfmt outputs a diff and
+ /// quits with exit code 1.
+ /// This option is designed to be run in CI where a non-zero exit signifies
+ /// non-standard code formatting. Used for `--check`.
+ Diff,
+}
+
+/// Client-preference for coloured output.
+#[config_type]
+pub enum Color {
+ /// Always use color, whether it is a piped or terminal output
+ Always,
+ /// Never use color
+ Never,
+ /// Automatically use color, if supported by terminal
+ Auto,
+}
+
+#[config_type]
+/// rustfmt format style version.
+pub enum Version {
+ /// 1.x.y. When specified, rustfmt will format in the same style as 1.0.0.
+ One,
+ /// 2.x.y. When specified, rustfmt will format in the the latest style.
+ Two,
+}
+
+impl Color {
+ /// Whether we should use a coloured terminal.
+ pub fn use_colored_tty(self) -> bool {
+ match self {
+ Color::Always | Color::Auto => true,
+ Color::Never => false,
+ }
+ }
+}
+
+/// How chatty should Rustfmt be?
+#[config_type]
+pub enum Verbosity {
+ /// Emit more.
+ Verbose,
+ /// Default.
+ Normal,
+ /// Emit as little as possible.
+ Quiet,
+}
+
+#[derive(Deserialize, Serialize, Clone, Debug, PartialEq)]
+pub struct WidthHeuristics {
+ // Maximum width of the args of a function call before falling back
+ // to vertical formatting.
+ pub(crate) fn_call_width: usize,
+ // Maximum width of the args of a function-like attributes before falling
+ // back to vertical formatting.
+ pub(crate) attr_fn_like_width: usize,
+ // Maximum width in the body of a struct lit before falling back to
+ // vertical formatting.
+ pub(crate) struct_lit_width: usize,
+ // Maximum width in the body of a struct variant before falling back
+ // to vertical formatting.
+ pub(crate) struct_variant_width: usize,
+ // Maximum width of an array literal before falling back to vertical
+ // formatting.
+ pub(crate) array_width: usize,
+ // Maximum length of a chain to fit on a single line.
+ pub(crate) chain_width: usize,
+ // Maximum line length for single line if-else expressions. A value
+ // of zero means always break if-else expressions.
+ pub(crate) single_line_if_else_max_width: usize,
+}
+
+impl fmt::Display for WidthHeuristics {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{:?}", self)
+ }
+}
+
+impl WidthHeuristics {
+ // Using this WidthHeuristics means we ignore heuristics.
+ pub fn null() -> WidthHeuristics {
+ WidthHeuristics {
+ fn_call_width: usize::max_value(),
+ attr_fn_like_width: usize::max_value(),
+ struct_lit_width: 0,
+ struct_variant_width: 0,
+ array_width: usize::max_value(),
+ chain_width: usize::max_value(),
+ single_line_if_else_max_width: 0,
+ }
+ }
+
+ pub fn set(max_width: usize) -> WidthHeuristics {
+ WidthHeuristics {
+ fn_call_width: max_width,
+ attr_fn_like_width: max_width,
+ struct_lit_width: max_width,
+ struct_variant_width: max_width,
+ array_width: max_width,
+ chain_width: max_width,
+ single_line_if_else_max_width: max_width,
+ }
+ }
+
+ // scale the default WidthHeuristics according to max_width
+ pub fn scaled(max_width: usize) -> WidthHeuristics {
+ const DEFAULT_MAX_WIDTH: usize = 100;
+ let max_width_ratio = if max_width > DEFAULT_MAX_WIDTH {
+ let ratio = max_width as f32 / DEFAULT_MAX_WIDTH as f32;
+ // round to the closest 0.1
+ (ratio * 10.0).round() / 10.0
+ } else {
+ 1.0
+ };
+ WidthHeuristics {
+ fn_call_width: (60.0 * max_width_ratio).round() as usize,
+ attr_fn_like_width: (70.0 * max_width_ratio).round() as usize,
+ struct_lit_width: (18.0 * max_width_ratio).round() as usize,
+ struct_variant_width: (35.0 * max_width_ratio).round() as usize,
+ array_width: (60.0 * max_width_ratio).round() as usize,
+ chain_width: (60.0 * max_width_ratio).round() as usize,
+ single_line_if_else_max_width: (50.0 * max_width_ratio).round() as usize,
+ }
+ }
+}
+
+impl ::std::str::FromStr for WidthHeuristics {
+ type Err = &'static str;
+
+ fn from_str(_: &str) -> Result<Self, Self::Err> {
+ Err("WidthHeuristics is not parsable")
+ }
+}
+
+impl Default for EmitMode {
+ fn default() -> EmitMode {
+ EmitMode::Files
+ }
+}
+
+/// A set of directories, files and modules that rustfmt should ignore.
+#[derive(Default, Clone, Debug, PartialEq)]
+pub struct IgnoreList {
+ /// A set of path specified in rustfmt.toml.
+ path_set: HashSet<PathBuf>,
+ /// A path to rustfmt.toml.
+ rustfmt_toml_path: PathBuf,
+}
+
+impl fmt::Display for IgnoreList {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(
+ f,
+ "[{}]",
+ self.path_set
+ .iter()
+ .format_with(", ", |path, f| f(&format_args!(
+ "{}",
+ path.to_string_lossy()
+ )))
+ )
+ }
+}
+
+impl Serialize for IgnoreList {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ let mut seq = serializer.serialize_seq(Some(self.path_set.len()))?;
+ for e in &self.path_set {
+ seq.serialize_element(e)?;
+ }
+ seq.end()
+ }
+}
+
+impl<'de> Deserialize<'de> for IgnoreList {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: Deserializer<'de>,
+ {
+ struct HashSetVisitor;
+ impl<'v> Visitor<'v> for HashSetVisitor {
+ type Value = HashSet<PathBuf>;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str("a sequence of path")
+ }
+
+ fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
+ where
+ A: SeqAccess<'v>,
+ {
+ let mut path_set = HashSet::new();
+ while let Some(elem) = seq.next_element()? {
+ path_set.insert(elem);
+ }
+ Ok(path_set)
+ }
+ }
+ Ok(IgnoreList {
+ path_set: deserializer.deserialize_seq(HashSetVisitor)?,
+ rustfmt_toml_path: PathBuf::new(),
+ })
+ }
+}
+
+impl<'a> IntoIterator for &'a IgnoreList {
+ type Item = &'a PathBuf;
+ type IntoIter = hash_set::Iter<'a, PathBuf>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.path_set.iter()
+ }
+}
+
+impl IgnoreList {
+ pub fn add_prefix(&mut self, dir: &Path) {
+ self.rustfmt_toml_path = dir.to_path_buf();
+ }
+
+ pub fn rustfmt_toml_path(&self) -> &Path {
+ &self.rustfmt_toml_path
+ }
+}
+
+impl FromStr for IgnoreList {
+ type Err = &'static str;
+
+ fn from_str(_: &str) -> Result<Self, Self::Err> {
+ Err("IgnoreList is not parsable")
+ }
+}
+
+/// Maps client-supplied options to Rustfmt's internals, mostly overriding
+/// values in a config with values from the command line.
+pub trait CliOptions {
+ fn apply_to(self, config: &mut Config);
+ fn config_path(&self) -> Option<&Path>;
+}
+
+/// The edition of the syntax and semntics of code (RFC 2052).
+#[config_type]
+pub enum Edition {
+ #[value = "2015"]
+ #[doc_hint = "2015"]
+ /// Edition 2015.
+ Edition2015,
+ #[value = "2018"]
+ #[doc_hint = "2018"]
+ /// Edition 2018.
+ Edition2018,
+ #[value = "2021"]
+ #[doc_hint = "2021"]
+ /// Edition 2021.
+ Edition2021,
+ #[value = "2024"]
+ #[doc_hint = "2024"]
+ /// Edition 2024.
+ Edition2024,
+}
+
+impl Default for Edition {
+ fn default() -> Edition {
+ Edition::Edition2015
+ }
+}
+
+impl From<Edition> for rustc_span::edition::Edition {
+ fn from(edition: Edition) -> Self {
+ match edition {
+ Edition::Edition2015 => Self::Edition2015,
+ Edition::Edition2018 => Self::Edition2018,
+ Edition::Edition2021 => Self::Edition2021,
+ Edition::Edition2024 => Self::Edition2024,
+ }
+ }
+}
+
+impl PartialOrd for Edition {
+ fn partial_cmp(&self, other: &Edition) -> Option<std::cmp::Ordering> {
+ rustc_span::edition::Edition::partial_cmp(&(*self).into(), &(*other).into())
+ }
+}
+
+/// Controls how rustfmt should handle leading pipes on match arms.
+#[config_type]
+pub enum MatchArmLeadingPipe {
+ /// Place leading pipes on all match arms
+ Always,
+ /// Never emit leading pipes on match arms
+ Never,
+ /// Preserve any existing leading pipes
+ Preserve,
+}
diff --git a/src/tools/rustfmt/src/coverage.rs b/src/tools/rustfmt/src/coverage.rs
new file mode 100644
index 000000000..f5a049742
--- /dev/null
+++ b/src/tools/rustfmt/src/coverage.rs
@@ -0,0 +1,15 @@
+use crate::{Config, EmitMode};
+use std::borrow::Cow;
+
+pub(crate) fn transform_missing_snippet<'a>(config: &Config, string: &'a str) -> Cow<'a, str> {
+ match config.emit_mode() {
+ EmitMode::Coverage => Cow::from(replace_chars(string)),
+ _ => Cow::from(string),
+ }
+}
+
+fn replace_chars(s: &str) -> String {
+ s.chars()
+ .map(|ch| if ch.is_whitespace() { ch } else { 'X' })
+ .collect()
+}
diff --git a/src/tools/rustfmt/src/emitter.rs b/src/tools/rustfmt/src/emitter.rs
new file mode 100644
index 000000000..dc2c99a30
--- /dev/null
+++ b/src/tools/rustfmt/src/emitter.rs
@@ -0,0 +1,52 @@
+pub(crate) use self::checkstyle::*;
+pub(crate) use self::diff::*;
+pub(crate) use self::files::*;
+pub(crate) use self::files_with_backup::*;
+pub(crate) use self::json::*;
+pub(crate) use self::modified_lines::*;
+pub(crate) use self::stdout::*;
+use crate::FileName;
+use std::io::{self, Write};
+use std::path::Path;
+
+mod checkstyle;
+mod diff;
+mod files;
+mod files_with_backup;
+mod json;
+mod modified_lines;
+mod stdout;
+
+pub(crate) struct FormattedFile<'a> {
+ pub(crate) filename: &'a FileName,
+ pub(crate) original_text: &'a str,
+ pub(crate) formatted_text: &'a str,
+}
+
+#[derive(Debug, Default, Clone)]
+pub(crate) struct EmitterResult {
+ pub(crate) has_diff: bool,
+}
+
+pub(crate) trait Emitter {
+ fn emit_formatted_file(
+ &mut self,
+ output: &mut dyn Write,
+ formatted_file: FormattedFile<'_>,
+ ) -> Result<EmitterResult, io::Error>;
+
+ fn emit_header(&self, _output: &mut dyn Write) -> Result<(), io::Error> {
+ Ok(())
+ }
+
+ fn emit_footer(&self, _output: &mut dyn Write) -> Result<(), io::Error> {
+ Ok(())
+ }
+}
+
+fn ensure_real_path(filename: &FileName) -> &Path {
+ match *filename {
+ FileName::Real(ref path) => path,
+ _ => panic!("cannot format `{}` and emit to files", filename),
+ }
+}
diff --git a/src/tools/rustfmt/src/emitter/checkstyle.rs b/src/tools/rustfmt/src/emitter/checkstyle.rs
new file mode 100644
index 000000000..545b25997
--- /dev/null
+++ b/src/tools/rustfmt/src/emitter/checkstyle.rs
@@ -0,0 +1,150 @@
+use self::xml::XmlEscaped;
+use super::*;
+use crate::rustfmt_diff::{make_diff, DiffLine, Mismatch};
+use std::io::{self, Write};
+
+mod xml;
+
+#[derive(Debug, Default)]
+pub(crate) struct CheckstyleEmitter;
+
+impl Emitter for CheckstyleEmitter {
+ fn emit_header(&self, output: &mut dyn Write) -> Result<(), io::Error> {
+ writeln!(output, r#"<?xml version="1.0" encoding="utf-8"?>"#)?;
+ write!(output, r#"<checkstyle version="4.3">"#)?;
+ Ok(())
+ }
+
+ fn emit_footer(&self, output: &mut dyn Write) -> Result<(), io::Error> {
+ writeln!(output, "</checkstyle>")
+ }
+
+ fn emit_formatted_file(
+ &mut self,
+ output: &mut dyn Write,
+ FormattedFile {
+ filename,
+ original_text,
+ formatted_text,
+ }: FormattedFile<'_>,
+ ) -> Result<EmitterResult, io::Error> {
+ const CONTEXT_SIZE: usize = 0;
+ let diff = make_diff(original_text, formatted_text, CONTEXT_SIZE);
+ output_checkstyle_file(output, filename, diff)?;
+ Ok(EmitterResult::default())
+ }
+}
+
+pub(crate) fn output_checkstyle_file<T>(
+ mut writer: T,
+ filename: &FileName,
+ diff: Vec<Mismatch>,
+) -> Result<(), io::Error>
+where
+ T: Write,
+{
+ write!(writer, r#"<file name="{}">"#, filename)?;
+ for mismatch in diff {
+ let begin_line = mismatch.line_number;
+ let mut current_line;
+ let mut line_counter = 0;
+ for line in mismatch.lines {
+ // Do nothing with `DiffLine::Context` and `DiffLine::Resulting`.
+ if let DiffLine::Expected(message) = line {
+ current_line = begin_line + line_counter;
+ line_counter += 1;
+ write!(
+ writer,
+ r#"<error line="{}" severity="warning" message="Should be `{}`" />"#,
+ current_line,
+ XmlEscaped(&message)
+ )?;
+ }
+ }
+ }
+ write!(writer, "</file>")?;
+ Ok(())
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use std::path::PathBuf;
+
+ #[test]
+ fn emits_empty_record_on_file_with_no_mismatches() {
+ let file_name = "src/well_formatted.rs";
+ let mut writer = Vec::new();
+ let _ = output_checkstyle_file(
+ &mut writer,
+ &FileName::Real(PathBuf::from(file_name)),
+ vec![],
+ );
+ assert_eq!(
+ &writer[..],
+ format!(r#"<file name="{}"></file>"#, file_name).as_bytes()
+ );
+ }
+
+ // https://github.com/rust-lang/rustfmt/issues/1636
+ #[test]
+ fn emits_single_xml_tree_containing_all_files() {
+ let bin_file = "src/bin.rs";
+ let bin_original = vec!["fn main() {", "println!(\"Hello, world!\");", "}"];
+ let bin_formatted = vec!["fn main() {", " println!(\"Hello, world!\");", "}"];
+ let lib_file = "src/lib.rs";
+ let lib_original = vec!["fn greet() {", "println!(\"Greetings!\");", "}"];
+ let lib_formatted = vec!["fn greet() {", " println!(\"Greetings!\");", "}"];
+ let mut writer = Vec::new();
+ let mut emitter = CheckstyleEmitter::default();
+ let _ = emitter.emit_header(&mut writer);
+ let _ = emitter
+ .emit_formatted_file(
+ &mut writer,
+ FormattedFile {
+ filename: &FileName::Real(PathBuf::from(bin_file)),
+ original_text: &bin_original.join("\n"),
+ formatted_text: &bin_formatted.join("\n"),
+ },
+ )
+ .unwrap();
+ let _ = emitter
+ .emit_formatted_file(
+ &mut writer,
+ FormattedFile {
+ filename: &FileName::Real(PathBuf::from(lib_file)),
+ original_text: &lib_original.join("\n"),
+ formatted_text: &lib_formatted.join("\n"),
+ },
+ )
+ .unwrap();
+ let _ = emitter.emit_footer(&mut writer);
+ let exp_bin_xml = vec![
+ format!(r#"<file name="{}">"#, bin_file),
+ format!(
+ r#"<error line="2" severity="warning" message="Should be `{}`" />"#,
+ XmlEscaped(r#" println!("Hello, world!");"#),
+ ),
+ String::from("</file>"),
+ ];
+ let exp_lib_xml = vec![
+ format!(r#"<file name="{}">"#, lib_file),
+ format!(
+ r#"<error line="2" severity="warning" message="Should be `{}`" />"#,
+ XmlEscaped(r#" println!("Greetings!");"#),
+ ),
+ String::from("</file>"),
+ ];
+ assert_eq!(
+ String::from_utf8(writer).unwrap(),
+ vec![
+ r#"<?xml version="1.0" encoding="utf-8"?>"#,
+ "\n",
+ r#"<checkstyle version="4.3">"#,
+ &format!("{}{}", exp_bin_xml.join(""), exp_lib_xml.join("")),
+ "</checkstyle>\n",
+ ]
+ .join(""),
+ );
+ }
+}
diff --git a/src/tools/rustfmt/src/emitter/checkstyle/xml.rs b/src/tools/rustfmt/src/emitter/checkstyle/xml.rs
new file mode 100644
index 000000000..f251aabe8
--- /dev/null
+++ b/src/tools/rustfmt/src/emitter/checkstyle/xml.rs
@@ -0,0 +1,52 @@
+use std::fmt::{self, Display};
+
+/// Convert special characters into XML entities.
+/// This is needed for checkstyle output.
+pub(super) struct XmlEscaped<'a>(pub(super) &'a str);
+
+impl<'a> Display for XmlEscaped<'a> {
+ fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ for char in self.0.chars() {
+ match char {
+ '<' => write!(formatter, "&lt;"),
+ '>' => write!(formatter, "&gt;"),
+ '"' => write!(formatter, "&quot;"),
+ '\'' => write!(formatter, "&apos;"),
+ '&' => write!(formatter, "&amp;"),
+ _ => write!(formatter, "{}", char),
+ }?;
+ }
+
+ Ok(())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn special_characters_are_escaped() {
+ assert_eq!(
+ "&lt;&gt;&quot;&apos;&amp;",
+ format!("{}", XmlEscaped(r#"<>"'&"#)),
+ );
+ }
+
+ #[test]
+ fn special_characters_are_escaped_in_string_with_other_characters() {
+ assert_eq!(
+ "The quick brown &quot;🦊&quot; jumps &lt;over&gt; the lazy 🐶",
+ format!(
+ "{}",
+ XmlEscaped(r#"The quick brown "🦊" jumps <over> the lazy 🐶"#)
+ ),
+ );
+ }
+
+ #[test]
+ fn other_characters_are_not_escaped() {
+ let string = "The quick brown 🦊 jumps over the lazy 🐶";
+ assert_eq!(string, format!("{}", XmlEscaped(string)));
+ }
+}
diff --git a/src/tools/rustfmt/src/emitter/diff.rs b/src/tools/rustfmt/src/emitter/diff.rs
new file mode 100644
index 000000000..5e1f13446
--- /dev/null
+++ b/src/tools/rustfmt/src/emitter/diff.rs
@@ -0,0 +1,137 @@
+use super::*;
+use crate::config::Config;
+use crate::rustfmt_diff::{make_diff, print_diff};
+
+pub(crate) struct DiffEmitter {
+ config: Config,
+}
+
+impl DiffEmitter {
+ pub(crate) fn new(config: Config) -> Self {
+ Self { config }
+ }
+}
+
+impl Emitter for DiffEmitter {
+ fn emit_formatted_file(
+ &mut self,
+ output: &mut dyn Write,
+ FormattedFile {
+ filename,
+ original_text,
+ formatted_text,
+ }: FormattedFile<'_>,
+ ) -> Result<EmitterResult, io::Error> {
+ const CONTEXT_SIZE: usize = 3;
+ let mismatch = make_diff(original_text, formatted_text, CONTEXT_SIZE);
+ let has_diff = !mismatch.is_empty();
+
+ if has_diff {
+ if self.config.print_misformatted_file_names() {
+ writeln!(output, "{}", filename)?;
+ } else {
+ print_diff(
+ mismatch,
+ |line_num| format!("Diff in {} at line {}:", filename, line_num),
+ &self.config,
+ );
+ }
+ } else if original_text != formatted_text {
+ // This occurs when the only difference between the original and formatted values
+ // is the newline style. This happens because The make_diff function compares the
+ // original and formatted values line by line, independent of line endings.
+ writeln!(output, "Incorrect newline style in {}", filename)?;
+ return Ok(EmitterResult { has_diff: true });
+ }
+
+ Ok(EmitterResult { has_diff })
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::config::Config;
+ use crate::FileName;
+ use std::path::PathBuf;
+
+ #[test]
+ fn does_not_print_when_no_files_reformatted() {
+ let mut writer = Vec::new();
+ let config = Config::default();
+ let mut emitter = DiffEmitter::new(config);
+ let result = emitter
+ .emit_formatted_file(
+ &mut writer,
+ FormattedFile {
+ filename: &FileName::Real(PathBuf::from("src/lib.rs")),
+ original_text: "fn empty() {}\n",
+ formatted_text: "fn empty() {}\n",
+ },
+ )
+ .unwrap();
+ assert_eq!(result.has_diff, false);
+ assert_eq!(writer.len(), 0);
+ }
+
+ #[test]
+ fn prints_file_names_when_config_is_enabled() {
+ let bin_file = "src/bin.rs";
+ let bin_original = "fn main() {\nprintln!(\"Hello, world!\");\n}";
+ let bin_formatted = "fn main() {\n println!(\"Hello, world!\");\n}";
+ let lib_file = "src/lib.rs";
+ let lib_original = "fn greet() {\nprintln!(\"Greetings!\");\n}";
+ let lib_formatted = "fn greet() {\n println!(\"Greetings!\");\n}";
+
+ let mut writer = Vec::new();
+ let mut config = Config::default();
+ config.set().print_misformatted_file_names(true);
+ let mut emitter = DiffEmitter::new(config);
+ let _ = emitter
+ .emit_formatted_file(
+ &mut writer,
+ FormattedFile {
+ filename: &FileName::Real(PathBuf::from(bin_file)),
+ original_text: bin_original,
+ formatted_text: bin_formatted,
+ },
+ )
+ .unwrap();
+ let _ = emitter
+ .emit_formatted_file(
+ &mut writer,
+ FormattedFile {
+ filename: &FileName::Real(PathBuf::from(lib_file)),
+ original_text: lib_original,
+ formatted_text: lib_formatted,
+ },
+ )
+ .unwrap();
+
+ assert_eq!(
+ String::from_utf8(writer).unwrap(),
+ format!("{}\n{}\n", bin_file, lib_file),
+ )
+ }
+
+ #[test]
+ fn prints_newline_message_with_only_newline_style_diff() {
+ let mut writer = Vec::new();
+ let config = Config::default();
+ let mut emitter = DiffEmitter::new(config);
+ let _ = emitter
+ .emit_formatted_file(
+ &mut writer,
+ FormattedFile {
+ filename: &FileName::Real(PathBuf::from("src/lib.rs")),
+ original_text: "fn empty() {}\n",
+ formatted_text: "fn empty() {}\r\n",
+ },
+ )
+ .unwrap();
+ assert_eq!(
+ String::from_utf8(writer).unwrap(),
+ String::from("Incorrect newline style in src/lib.rs\n")
+ );
+ }
+}
diff --git a/src/tools/rustfmt/src/emitter/files.rs b/src/tools/rustfmt/src/emitter/files.rs
new file mode 100644
index 000000000..6360b73ee
--- /dev/null
+++ b/src/tools/rustfmt/src/emitter/files.rs
@@ -0,0 +1,37 @@
+use super::*;
+use std::fs;
+
+#[derive(Debug, Default)]
+pub(crate) struct FilesEmitter {
+ print_misformatted_file_names: bool,
+}
+
+impl FilesEmitter {
+ pub(crate) fn new(print_misformatted_file_names: bool) -> Self {
+ Self {
+ print_misformatted_file_names,
+ }
+ }
+}
+
+impl Emitter for FilesEmitter {
+ fn emit_formatted_file(
+ &mut self,
+ output: &mut dyn Write,
+ FormattedFile {
+ filename,
+ original_text,
+ formatted_text,
+ }: FormattedFile<'_>,
+ ) -> Result<EmitterResult, io::Error> {
+ // Write text directly over original file if there is a diff.
+ let filename = ensure_real_path(filename);
+ if original_text != formatted_text {
+ fs::write(filename, formatted_text)?;
+ if self.print_misformatted_file_names {
+ writeln!(output, "{}", filename.display())?;
+ }
+ }
+ Ok(EmitterResult::default())
+ }
+}
diff --git a/src/tools/rustfmt/src/emitter/files_with_backup.rs b/src/tools/rustfmt/src/emitter/files_with_backup.rs
new file mode 100644
index 000000000..4c15f6fa5
--- /dev/null
+++ b/src/tools/rustfmt/src/emitter/files_with_backup.rs
@@ -0,0 +1,31 @@
+use super::*;
+use std::fs;
+
+#[derive(Debug, Default)]
+pub(crate) struct FilesWithBackupEmitter;
+
+impl Emitter for FilesWithBackupEmitter {
+ fn emit_formatted_file(
+ &mut self,
+ _output: &mut dyn Write,
+ FormattedFile {
+ filename,
+ original_text,
+ formatted_text,
+ }: FormattedFile<'_>,
+ ) -> Result<EmitterResult, io::Error> {
+ let filename = ensure_real_path(filename);
+ if original_text != formatted_text {
+ // Do a little dance to make writing safer - write to a temp file
+ // rename the original to a .bk, then rename the temp file to the
+ // original.
+ let tmp_name = filename.with_extension("tmp");
+ let bk_name = filename.with_extension("bk");
+
+ fs::write(&tmp_name, formatted_text)?;
+ fs::rename(filename, bk_name)?;
+ fs::rename(tmp_name, filename)?;
+ }
+ Ok(EmitterResult::default())
+ }
+}
diff --git a/src/tools/rustfmt/src/emitter/json.rs b/src/tools/rustfmt/src/emitter/json.rs
new file mode 100644
index 000000000..c7f68d467
--- /dev/null
+++ b/src/tools/rustfmt/src/emitter/json.rs
@@ -0,0 +1,346 @@
+use super::*;
+use crate::rustfmt_diff::{make_diff, DiffLine, Mismatch};
+use serde::Serialize;
+use serde_json::to_string as to_json_string;
+use std::io::{self, Write};
+
+#[derive(Debug, Default)]
+pub(crate) struct JsonEmitter {
+ mismatched_files: Vec<MismatchedFile>,
+}
+
+#[derive(Debug, Default, PartialEq, Serialize)]
+struct MismatchedBlock {
+ original_begin_line: u32,
+ original_end_line: u32,
+ expected_begin_line: u32,
+ expected_end_line: u32,
+ original: String,
+ expected: String,
+}
+
+#[derive(Debug, Default, PartialEq, Serialize)]
+struct MismatchedFile {
+ name: String,
+ mismatches: Vec<MismatchedBlock>,
+}
+
+impl Emitter for JsonEmitter {
+ fn emit_footer(&self, output: &mut dyn Write) -> Result<(), io::Error> {
+ writeln!(output, "{}", &to_json_string(&self.mismatched_files)?)
+ }
+
+ fn emit_formatted_file(
+ &mut self,
+ _output: &mut dyn Write,
+ FormattedFile {
+ filename,
+ original_text,
+ formatted_text,
+ }: FormattedFile<'_>,
+ ) -> Result<EmitterResult, io::Error> {
+ const CONTEXT_SIZE: usize = 0;
+ let diff = make_diff(original_text, formatted_text, CONTEXT_SIZE);
+ let has_diff = !diff.is_empty();
+
+ if has_diff {
+ self.add_misformatted_file(filename, diff)?;
+ }
+
+ Ok(EmitterResult { has_diff })
+ }
+}
+
+impl JsonEmitter {
+ fn add_misformatted_file(
+ &mut self,
+ filename: &FileName,
+ diff: Vec<Mismatch>,
+ ) -> Result<(), io::Error> {
+ let mut mismatches = vec![];
+ for mismatch in diff {
+ let original_begin_line = mismatch.line_number_orig;
+ let expected_begin_line = mismatch.line_number;
+ let mut original_end_line = original_begin_line;
+ let mut expected_end_line = expected_begin_line;
+ let mut original_line_counter = 0;
+ let mut expected_line_counter = 0;
+ let mut original = String::new();
+ let mut expected = String::new();
+
+ for line in mismatch.lines {
+ match line {
+ DiffLine::Expected(msg) => {
+ expected_end_line = expected_begin_line + expected_line_counter;
+ expected_line_counter += 1;
+ expected.push_str(&msg);
+ expected.push('\n');
+ }
+ DiffLine::Resulting(msg) => {
+ original_end_line = original_begin_line + original_line_counter;
+ original_line_counter += 1;
+ original.push_str(&msg);
+ original.push('\n');
+ }
+ DiffLine::Context(_) => continue,
+ }
+ }
+
+ mismatches.push(MismatchedBlock {
+ original_begin_line,
+ original_end_line,
+ expected_begin_line,
+ expected_end_line,
+ original,
+ expected,
+ });
+ }
+ self.mismatched_files.push(MismatchedFile {
+ name: format!("{}", filename),
+ mismatches,
+ });
+ Ok(())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::FileName;
+ use std::path::PathBuf;
+
+ #[test]
+ fn expected_line_range_correct_when_single_line_split() {
+ let mut emitter = JsonEmitter {
+ mismatched_files: vec![],
+ };
+ let file = "foo/bar.rs";
+ let mismatched_file = MismatchedFile {
+ name: String::from(file),
+ mismatches: vec![MismatchedBlock {
+ original_begin_line: 79,
+ original_end_line: 79,
+ expected_begin_line: 79,
+ expected_end_line: 82,
+ original: String::from("fn Foo<T>() where T: Bar {\n"),
+ expected: String::from("fn Foo<T>()\nwhere\n T: Bar,\n{\n"),
+ }],
+ };
+ let mismatch = Mismatch {
+ line_number: 79,
+ line_number_orig: 79,
+ lines: vec![
+ DiffLine::Resulting(String::from("fn Foo<T>() where T: Bar {")),
+ DiffLine::Expected(String::from("fn Foo<T>()")),
+ DiffLine::Expected(String::from("where")),
+ DiffLine::Expected(String::from(" T: Bar,")),
+ DiffLine::Expected(String::from("{")),
+ ],
+ };
+
+ let _ = emitter
+ .add_misformatted_file(&FileName::Real(PathBuf::from(file)), vec![mismatch])
+ .unwrap();
+
+ assert_eq!(emitter.mismatched_files.len(), 1);
+ assert_eq!(emitter.mismatched_files[0], mismatched_file);
+ }
+
+ #[test]
+ fn context_lines_ignored() {
+ let mut emitter = JsonEmitter {
+ mismatched_files: vec![],
+ };
+ let file = "src/lib.rs";
+ let mismatched_file = MismatchedFile {
+ name: String::from(file),
+ mismatches: vec![MismatchedBlock {
+ original_begin_line: 5,
+ original_end_line: 5,
+ expected_begin_line: 5,
+ expected_end_line: 5,
+ original: String::from(
+ "fn foo(_x: &u64) -> Option<&(dyn::std::error::Error + 'static)> {\n",
+ ),
+ expected: String::from(
+ "fn foo(_x: &u64) -> Option<&(dyn ::std::error::Error + 'static)> {\n",
+ ),
+ }],
+ };
+ let mismatch = Mismatch {
+ line_number: 5,
+ line_number_orig: 5,
+ lines: vec![
+ DiffLine::Context(String::new()),
+ DiffLine::Resulting(String::from(
+ "fn foo(_x: &u64) -> Option<&(dyn::std::error::Error + 'static)> {",
+ )),
+ DiffLine::Context(String::new()),
+ DiffLine::Expected(String::from(
+ "fn foo(_x: &u64) -> Option<&(dyn ::std::error::Error + 'static)> {",
+ )),
+ DiffLine::Context(String::new()),
+ ],
+ };
+
+ let _ = emitter
+ .add_misformatted_file(&FileName::Real(PathBuf::from(file)), vec![mismatch])
+ .unwrap();
+
+ assert_eq!(emitter.mismatched_files.len(), 1);
+ assert_eq!(emitter.mismatched_files[0], mismatched_file);
+ }
+
+ #[test]
+ fn emits_empty_array_on_no_diffs() {
+ let mut writer = Vec::new();
+ let mut emitter = JsonEmitter::default();
+ let _ = emitter.emit_header(&mut writer);
+ let result = emitter
+ .emit_formatted_file(
+ &mut writer,
+ FormattedFile {
+ filename: &FileName::Real(PathBuf::from("src/lib.rs")),
+ original_text: "fn empty() {}\n",
+ formatted_text: "fn empty() {}\n",
+ },
+ )
+ .unwrap();
+ let _ = emitter.emit_footer(&mut writer);
+ assert_eq!(result.has_diff, false);
+ assert_eq!(&writer[..], "[]\n".as_bytes());
+ }
+
+ #[test]
+ fn emits_array_with_files_with_diffs() {
+ let file_name = "src/bin.rs";
+ let original = vec![
+ "fn main() {",
+ "println!(\"Hello, world!\");",
+ "}",
+ "",
+ "#[cfg(test)]",
+ "mod tests {",
+ "#[test]",
+ "fn it_works() {",
+ " assert_eq!(2 + 2, 4);",
+ "}",
+ "}",
+ ];
+ let formatted = vec![
+ "fn main() {",
+ " println!(\"Hello, world!\");",
+ "}",
+ "",
+ "#[cfg(test)]",
+ "mod tests {",
+ " #[test]",
+ " fn it_works() {",
+ " assert_eq!(2 + 2, 4);",
+ " }",
+ "}",
+ ];
+ let mut writer = Vec::new();
+ let mut emitter = JsonEmitter::default();
+ let _ = emitter.emit_header(&mut writer);
+ let result = emitter
+ .emit_formatted_file(
+ &mut writer,
+ FormattedFile {
+ filename: &FileName::Real(PathBuf::from(file_name)),
+ original_text: &original.join("\n"),
+ formatted_text: &formatted.join("\n"),
+ },
+ )
+ .unwrap();
+ let _ = emitter.emit_footer(&mut writer);
+ let exp_json = to_json_string(&vec![MismatchedFile {
+ name: String::from(file_name),
+ mismatches: vec![
+ MismatchedBlock {
+ original_begin_line: 2,
+ original_end_line: 2,
+ expected_begin_line: 2,
+ expected_end_line: 2,
+ original: String::from("println!(\"Hello, world!\");\n"),
+ expected: String::from(" println!(\"Hello, world!\");\n"),
+ },
+ MismatchedBlock {
+ original_begin_line: 7,
+ original_end_line: 10,
+ expected_begin_line: 7,
+ expected_end_line: 10,
+ original: String::from(
+ "#[test]\nfn it_works() {\n assert_eq!(2 + 2, 4);\n}\n",
+ ),
+ expected: String::from(
+ " #[test]\n fn it_works() {\n assert_eq!(2 + 2, 4);\n }\n",
+ ),
+ },
+ ],
+ }])
+ .unwrap();
+ assert_eq!(result.has_diff, true);
+ assert_eq!(&writer[..], format!("{}\n", exp_json).as_bytes());
+ }
+
+ #[test]
+ fn emits_valid_json_with_multiple_files() {
+ let bin_file = "src/bin.rs";
+ let bin_original = vec!["fn main() {", "println!(\"Hello, world!\");", "}"];
+ let bin_formatted = vec!["fn main() {", " println!(\"Hello, world!\");", "}"];
+ let lib_file = "src/lib.rs";
+ let lib_original = vec!["fn greet() {", "println!(\"Greetings!\");", "}"];
+ let lib_formatted = vec!["fn greet() {", " println!(\"Greetings!\");", "}"];
+ let mut writer = Vec::new();
+ let mut emitter = JsonEmitter::default();
+ let _ = emitter.emit_header(&mut writer);
+ let _ = emitter
+ .emit_formatted_file(
+ &mut writer,
+ FormattedFile {
+ filename: &FileName::Real(PathBuf::from(bin_file)),
+ original_text: &bin_original.join("\n"),
+ formatted_text: &bin_formatted.join("\n"),
+ },
+ )
+ .unwrap();
+ let _ = emitter
+ .emit_formatted_file(
+ &mut writer,
+ FormattedFile {
+ filename: &FileName::Real(PathBuf::from(lib_file)),
+ original_text: &lib_original.join("\n"),
+ formatted_text: &lib_formatted.join("\n"),
+ },
+ )
+ .unwrap();
+ let _ = emitter.emit_footer(&mut writer);
+ let exp_bin = MismatchedFile {
+ name: String::from(bin_file),
+ mismatches: vec![MismatchedBlock {
+ original_begin_line: 2,
+ original_end_line: 2,
+ expected_begin_line: 2,
+ expected_end_line: 2,
+ original: String::from("println!(\"Hello, world!\");\n"),
+ expected: String::from(" println!(\"Hello, world!\");\n"),
+ }],
+ };
+
+ let exp_lib = MismatchedFile {
+ name: String::from(lib_file),
+ mismatches: vec![MismatchedBlock {
+ original_begin_line: 2,
+ original_end_line: 2,
+ expected_begin_line: 2,
+ expected_end_line: 2,
+ original: String::from("println!(\"Greetings!\");\n"),
+ expected: String::from(" println!(\"Greetings!\");\n"),
+ }],
+ };
+
+ let exp_json = to_json_string(&vec![exp_bin, exp_lib]).unwrap();
+ assert_eq!(&writer[..], format!("{}\n", exp_json).as_bytes());
+ }
+}
diff --git a/src/tools/rustfmt/src/emitter/modified_lines.rs b/src/tools/rustfmt/src/emitter/modified_lines.rs
new file mode 100644
index 000000000..94ff570a8
--- /dev/null
+++ b/src/tools/rustfmt/src/emitter/modified_lines.rs
@@ -0,0 +1,24 @@
+use super::*;
+use crate::rustfmt_diff::{make_diff, ModifiedLines};
+use std::io::Write;
+
+#[derive(Debug, Default)]
+pub(crate) struct ModifiedLinesEmitter;
+
+impl Emitter for ModifiedLinesEmitter {
+ fn emit_formatted_file(
+ &mut self,
+ output: &mut dyn Write,
+ FormattedFile {
+ original_text,
+ formatted_text,
+ ..
+ }: FormattedFile<'_>,
+ ) -> Result<EmitterResult, io::Error> {
+ const CONTEXT_SIZE: usize = 0;
+ let mismatch = make_diff(original_text, formatted_text, CONTEXT_SIZE);
+ let has_diff = !mismatch.is_empty();
+ write!(output, "{}", ModifiedLines::from(mismatch))?;
+ Ok(EmitterResult { has_diff })
+ }
+}
diff --git a/src/tools/rustfmt/src/emitter/stdout.rs b/src/tools/rustfmt/src/emitter/stdout.rs
new file mode 100644
index 000000000..9fddd515e
--- /dev/null
+++ b/src/tools/rustfmt/src/emitter/stdout.rs
@@ -0,0 +1,32 @@
+use super::*;
+use crate::config::Verbosity;
+use std::io::Write;
+
+#[derive(Debug)]
+pub(crate) struct StdoutEmitter {
+ verbosity: Verbosity,
+}
+
+impl StdoutEmitter {
+ pub(crate) fn new(verbosity: Verbosity) -> Self {
+ Self { verbosity }
+ }
+}
+
+impl Emitter for StdoutEmitter {
+ fn emit_formatted_file(
+ &mut self,
+ output: &mut dyn Write,
+ FormattedFile {
+ filename,
+ formatted_text,
+ ..
+ }: FormattedFile<'_>,
+ ) -> Result<EmitterResult, io::Error> {
+ if self.verbosity != Verbosity::Quiet {
+ writeln!(output, "{}:\n", filename)?;
+ }
+ write!(output, "{}", formatted_text)?;
+ Ok(EmitterResult::default())
+ }
+}
diff --git a/src/tools/rustfmt/src/expr.rs b/src/tools/rustfmt/src/expr.rs
new file mode 100644
index 000000000..a7b73ba78
--- /dev/null
+++ b/src/tools/rustfmt/src/expr.rs
@@ -0,0 +1,2159 @@
+use std::borrow::Cow;
+use std::cmp::min;
+
+use itertools::Itertools;
+use rustc_ast::token::{Delimiter, LitKind};
+use rustc_ast::{ast, ptr};
+use rustc_span::{BytePos, Span};
+
+use crate::chains::rewrite_chain;
+use crate::closures;
+use crate::comment::{
+ combine_strs_with_missing_comments, contains_comment, recover_comment_removed, rewrite_comment,
+ rewrite_missing_comment, CharClasses, FindUncommented,
+};
+use crate::config::lists::*;
+use crate::config::{Config, ControlBraceStyle, HexLiteralCase, IndentStyle, Version};
+use crate::lists::{
+ definitive_tactic, itemize_list, shape_for_tactic, struct_lit_formatting, struct_lit_shape,
+ struct_lit_tactic, write_list, ListFormatting, Separator,
+};
+use crate::macros::{rewrite_macro, MacroPosition};
+use crate::matches::rewrite_match;
+use crate::overflow::{self, IntoOverflowableItem, OverflowableItem};
+use crate::pairs::{rewrite_all_pairs, rewrite_pair, PairParts};
+use crate::rewrite::{Rewrite, RewriteContext};
+use crate::shape::{Indent, Shape};
+use crate::source_map::{LineRangeUtils, SpanUtils};
+use crate::spanned::Spanned;
+use crate::string::{rewrite_string, StringFormat};
+use crate::types::{rewrite_path, PathContext};
+use crate::utils::{
+ colon_spaces, contains_skip, count_newlines, first_line_ends_with, inner_attributes,
+ last_line_extendable, last_line_width, mk_sp, outer_attributes, semicolon_for_expr,
+ unicode_str_width, wrap_str,
+};
+use crate::vertical::rewrite_with_alignment;
+use crate::visitor::FmtVisitor;
+
+impl Rewrite for ast::Expr {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ format_expr(self, ExprType::SubExpression, context, shape)
+ }
+}
+
+#[derive(Copy, Clone, PartialEq)]
+pub(crate) enum ExprType {
+ Statement,
+ SubExpression,
+}
+
+pub(crate) fn format_expr(
+ expr: &ast::Expr,
+ expr_type: ExprType,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+) -> Option<String> {
+ skip_out_of_file_lines_range!(context, expr.span);
+
+ if contains_skip(&*expr.attrs) {
+ return Some(context.snippet(expr.span()).to_owned());
+ }
+ let shape = if expr_type == ExprType::Statement && semicolon_for_expr(context, expr) {
+ shape.sub_width(1)?
+ } else {
+ shape
+ };
+
+ let expr_rw = match expr.kind {
+ ast::ExprKind::Array(ref expr_vec) => rewrite_array(
+ "",
+ expr_vec.iter(),
+ expr.span,
+ context,
+ shape,
+ choose_separator_tactic(context, expr.span),
+ None,
+ ),
+ ast::ExprKind::Lit(ref l) => {
+ if let Some(expr_rw) = rewrite_literal(context, l, shape) {
+ Some(expr_rw)
+ } else {
+ if let LitKind::StrRaw(_) = l.token.kind {
+ Some(context.snippet(l.span).trim().into())
+ } else {
+ None
+ }
+ }
+ }
+ ast::ExprKind::Call(ref callee, ref args) => {
+ let inner_span = mk_sp(callee.span.hi(), expr.span.hi());
+ let callee_str = callee.rewrite(context, shape)?;
+ rewrite_call(context, &callee_str, args, inner_span, shape)
+ }
+ ast::ExprKind::Paren(ref subexpr) => rewrite_paren(context, subexpr, shape, expr.span),
+ ast::ExprKind::Binary(op, ref lhs, ref rhs) => {
+ // FIXME: format comments between operands and operator
+ rewrite_all_pairs(expr, shape, context).or_else(|| {
+ rewrite_pair(
+ &**lhs,
+ &**rhs,
+ PairParts::infix(&format!(" {} ", context.snippet(op.span))),
+ context,
+ shape,
+ context.config.binop_separator(),
+ )
+ })
+ }
+ ast::ExprKind::Unary(op, ref subexpr) => rewrite_unary_op(context, op, subexpr, shape),
+ ast::ExprKind::Struct(ref struct_expr) => {
+ let ast::StructExpr {
+ qself,
+ fields,
+ path,
+ rest,
+ } = &**struct_expr;
+ rewrite_struct_lit(
+ context,
+ path,
+ qself.as_ref(),
+ fields,
+ rest,
+ &expr.attrs,
+ expr.span,
+ shape,
+ )
+ }
+ ast::ExprKind::Tup(ref items) => {
+ rewrite_tuple(context, items.iter(), expr.span, shape, items.len() == 1)
+ }
+ ast::ExprKind::Let(..) => None,
+ ast::ExprKind::If(..)
+ | ast::ExprKind::ForLoop(..)
+ | ast::ExprKind::Loop(..)
+ | ast::ExprKind::While(..) => to_control_flow(expr, expr_type)
+ .and_then(|control_flow| control_flow.rewrite(context, shape)),
+ ast::ExprKind::ConstBlock(ref anon_const) => {
+ Some(format!("const {}", anon_const.rewrite(context, shape)?))
+ }
+ ast::ExprKind::Block(ref block, opt_label) => {
+ match expr_type {
+ ExprType::Statement => {
+ if is_unsafe_block(block) {
+ rewrite_block(block, Some(&expr.attrs), opt_label, context, shape)
+ } else if let rw @ Some(_) =
+ rewrite_empty_block(context, block, Some(&expr.attrs), opt_label, "", shape)
+ {
+ // Rewrite block without trying to put it in a single line.
+ rw
+ } else {
+ let prefix = block_prefix(context, block, shape)?;
+
+ rewrite_block_with_visitor(
+ context,
+ &prefix,
+ block,
+ Some(&expr.attrs),
+ opt_label,
+ shape,
+ true,
+ )
+ }
+ }
+ ExprType::SubExpression => {
+ rewrite_block(block, Some(&expr.attrs), opt_label, context, shape)
+ }
+ }
+ }
+ ast::ExprKind::Match(ref cond, ref arms) => {
+ rewrite_match(context, cond, arms, shape, expr.span, &expr.attrs)
+ }
+ ast::ExprKind::Path(ref qself, ref path) => {
+ rewrite_path(context, PathContext::Expr, qself.as_ref(), path, shape)
+ }
+ ast::ExprKind::Assign(ref lhs, ref rhs, _) => {
+ rewrite_assignment(context, lhs, rhs, None, shape)
+ }
+ ast::ExprKind::AssignOp(ref op, ref lhs, ref rhs) => {
+ rewrite_assignment(context, lhs, rhs, Some(op), shape)
+ }
+ ast::ExprKind::Continue(ref opt_label) => {
+ let id_str = match *opt_label {
+ Some(label) => format!(" {}", label.ident),
+ None => String::new(),
+ };
+ Some(format!("continue{}", id_str))
+ }
+ ast::ExprKind::Break(ref opt_label, ref opt_expr) => {
+ let id_str = match *opt_label {
+ Some(label) => format!(" {}", label.ident),
+ None => String::new(),
+ };
+
+ if let Some(ref expr) = *opt_expr {
+ rewrite_unary_prefix(context, &format!("break{} ", id_str), &**expr, shape)
+ } else {
+ Some(format!("break{}", id_str))
+ }
+ }
+ ast::ExprKind::Yield(ref opt_expr) => {
+ if let Some(ref expr) = *opt_expr {
+ rewrite_unary_prefix(context, "yield ", &**expr, shape)
+ } else {
+ Some("yield".to_string())
+ }
+ }
+ ast::ExprKind::Closure(
+ ref binder,
+ capture,
+ ref is_async,
+ movability,
+ ref fn_decl,
+ ref body,
+ _,
+ ) => closures::rewrite_closure(
+ binder, capture, is_async, movability, fn_decl, body, expr.span, context, shape,
+ ),
+ ast::ExprKind::Try(..)
+ | ast::ExprKind::Field(..)
+ | ast::ExprKind::MethodCall(..)
+ | ast::ExprKind::Await(_) => rewrite_chain(expr, context, shape),
+ ast::ExprKind::MacCall(ref mac) => {
+ rewrite_macro(mac, None, context, shape, MacroPosition::Expression).or_else(|| {
+ wrap_str(
+ context.snippet(expr.span).to_owned(),
+ context.config.max_width(),
+ shape,
+ )
+ })
+ }
+ ast::ExprKind::Ret(None) => Some("return".to_owned()),
+ ast::ExprKind::Ret(Some(ref expr)) => {
+ rewrite_unary_prefix(context, "return ", &**expr, shape)
+ }
+ ast::ExprKind::Yeet(None) => Some("do yeet".to_owned()),
+ ast::ExprKind::Yeet(Some(ref expr)) => {
+ rewrite_unary_prefix(context, "do yeet ", &**expr, shape)
+ }
+ ast::ExprKind::Box(ref expr) => rewrite_unary_prefix(context, "box ", &**expr, shape),
+ ast::ExprKind::AddrOf(borrow_kind, mutability, ref expr) => {
+ rewrite_expr_addrof(context, borrow_kind, mutability, expr, shape)
+ }
+ ast::ExprKind::Cast(ref expr, ref ty) => rewrite_pair(
+ &**expr,
+ &**ty,
+ PairParts::infix(" as "),
+ context,
+ shape,
+ SeparatorPlace::Front,
+ ),
+ ast::ExprKind::Type(ref expr, ref ty) => rewrite_pair(
+ &**expr,
+ &**ty,
+ PairParts::infix(": "),
+ context,
+ shape,
+ SeparatorPlace::Back,
+ ),
+ ast::ExprKind::Index(ref expr, ref index) => {
+ rewrite_index(&**expr, &**index, context, shape)
+ }
+ ast::ExprKind::Repeat(ref expr, ref repeats) => rewrite_pair(
+ &**expr,
+ &*repeats.value,
+ PairParts::new("[", "; ", "]"),
+ context,
+ shape,
+ SeparatorPlace::Back,
+ ),
+ ast::ExprKind::Range(ref lhs, ref rhs, limits) => {
+ let delim = match limits {
+ ast::RangeLimits::HalfOpen => "..",
+ ast::RangeLimits::Closed => "..=",
+ };
+
+ fn needs_space_before_range(context: &RewriteContext<'_>, lhs: &ast::Expr) -> bool {
+ match lhs.kind {
+ ast::ExprKind::Lit(ref lit) => match lit.kind {
+ ast::LitKind::Float(_, ast::LitFloatType::Unsuffixed) => {
+ context.snippet(lit.span).ends_with('.')
+ }
+ _ => false,
+ },
+ ast::ExprKind::Unary(_, ref expr) => needs_space_before_range(context, expr),
+ _ => false,
+ }
+ }
+
+ fn needs_space_after_range(rhs: &ast::Expr) -> bool {
+ // Don't format `.. ..` into `....`, which is invalid.
+ //
+ // This check is unnecessary for `lhs`, because a range
+ // starting from another range needs parentheses as `(x ..) ..`
+ // (`x .. ..` is a range from `x` to `..`).
+ matches!(rhs.kind, ast::ExprKind::Range(None, _, _))
+ }
+
+ let default_sp_delim = |lhs: Option<&ast::Expr>, rhs: Option<&ast::Expr>| {
+ let space_if = |b: bool| if b { " " } else { "" };
+
+ format!(
+ "{}{}{}",
+ lhs.map_or("", |lhs| space_if(needs_space_before_range(context, lhs))),
+ delim,
+ rhs.map_or("", |rhs| space_if(needs_space_after_range(rhs))),
+ )
+ };
+
+ match (lhs.as_ref().map(|x| &**x), rhs.as_ref().map(|x| &**x)) {
+ (Some(lhs), Some(rhs)) => {
+ let sp_delim = if context.config.spaces_around_ranges() {
+ format!(" {} ", delim)
+ } else {
+ default_sp_delim(Some(lhs), Some(rhs))
+ };
+ rewrite_pair(
+ &*lhs,
+ &*rhs,
+ PairParts::infix(&sp_delim),
+ context,
+ shape,
+ context.config.binop_separator(),
+ )
+ }
+ (None, Some(rhs)) => {
+ let sp_delim = if context.config.spaces_around_ranges() {
+ format!("{} ", delim)
+ } else {
+ default_sp_delim(None, Some(rhs))
+ };
+ rewrite_unary_prefix(context, &sp_delim, &*rhs, shape)
+ }
+ (Some(lhs), None) => {
+ let sp_delim = if context.config.spaces_around_ranges() {
+ format!(" {}", delim)
+ } else {
+ default_sp_delim(Some(lhs), None)
+ };
+ rewrite_unary_suffix(context, &sp_delim, &*lhs, shape)
+ }
+ (None, None) => Some(delim.to_owned()),
+ }
+ }
+ // We do not format these expressions yet, but they should still
+ // satisfy our width restrictions.
+ // Style Guide RFC for InlineAsm variant pending
+ // https://github.com/rust-dev-tools/fmt-rfcs/issues/152
+ ast::ExprKind::InlineAsm(..) => Some(context.snippet(expr.span).to_owned()),
+ ast::ExprKind::TryBlock(ref block) => {
+ if let rw @ Some(_) =
+ rewrite_single_line_block(context, "try ", block, Some(&expr.attrs), None, shape)
+ {
+ rw
+ } else {
+ // 9 = `try `
+ let budget = shape.width.saturating_sub(9);
+ Some(format!(
+ "{}{}",
+ "try ",
+ rewrite_block(
+ block,
+ Some(&expr.attrs),
+ None,
+ context,
+ Shape::legacy(budget, shape.indent)
+ )?
+ ))
+ }
+ }
+ ast::ExprKind::Async(capture_by, _node_id, ref block) => {
+ let mover = if capture_by == ast::CaptureBy::Value {
+ "move "
+ } else {
+ ""
+ };
+ if let rw @ Some(_) = rewrite_single_line_block(
+ context,
+ format!("{}{}", "async ", mover).as_str(),
+ block,
+ Some(&expr.attrs),
+ None,
+ shape,
+ ) {
+ rw
+ } else {
+ // 6 = `async `
+ let budget = shape.width.saturating_sub(6);
+ Some(format!(
+ "{}{}{}",
+ "async ",
+ mover,
+ rewrite_block(
+ block,
+ Some(&expr.attrs),
+ None,
+ context,
+ Shape::legacy(budget, shape.indent)
+ )?
+ ))
+ }
+ }
+ ast::ExprKind::Underscore => Some("_".to_owned()),
+ ast::ExprKind::Err => None,
+ };
+
+ expr_rw
+ .and_then(|expr_str| recover_comment_removed(expr_str, expr.span, context))
+ .and_then(|expr_str| {
+ let attrs = outer_attributes(&expr.attrs);
+ let attrs_str = attrs.rewrite(context, shape)?;
+ let span = mk_sp(
+ attrs.last().map_or(expr.span.lo(), |attr| attr.span.hi()),
+ expr.span.lo(),
+ );
+ combine_strs_with_missing_comments(context, &attrs_str, &expr_str, span, shape, false)
+ })
+}
+
+pub(crate) fn rewrite_array<'a, T: 'a + IntoOverflowableItem<'a>>(
+ name: &'a str,
+ exprs: impl Iterator<Item = &'a T>,
+ span: Span,
+ context: &'a RewriteContext<'_>,
+ shape: Shape,
+ force_separator_tactic: Option<SeparatorTactic>,
+ delim_token: Option<Delimiter>,
+) -> Option<String> {
+ overflow::rewrite_with_square_brackets(
+ context,
+ name,
+ exprs,
+ shape,
+ span,
+ force_separator_tactic,
+ delim_token,
+ )
+}
+
+fn rewrite_empty_block(
+ context: &RewriteContext<'_>,
+ block: &ast::Block,
+ attrs: Option<&[ast::Attribute]>,
+ label: Option<ast::Label>,
+ prefix: &str,
+ shape: Shape,
+) -> Option<String> {
+ if block_has_statements(block) {
+ return None;
+ }
+
+ let label_str = rewrite_label(label);
+ if attrs.map_or(false, |a| !inner_attributes(a).is_empty()) {
+ return None;
+ }
+
+ if !block_contains_comment(context, block) && shape.width >= 2 {
+ return Some(format!("{}{}{{}}", prefix, label_str));
+ }
+
+ // If a block contains only a single-line comment, then leave it on one line.
+ let user_str = context.snippet(block.span);
+ let user_str = user_str.trim();
+ if user_str.starts_with('{') && user_str.ends_with('}') {
+ let comment_str = user_str[1..user_str.len() - 1].trim();
+ if block.stmts.is_empty()
+ && !comment_str.contains('\n')
+ && !comment_str.starts_with("//")
+ && comment_str.len() + 4 <= shape.width
+ {
+ return Some(format!("{}{}{{ {} }}", prefix, label_str, comment_str));
+ }
+ }
+
+ None
+}
+
+fn block_prefix(context: &RewriteContext<'_>, block: &ast::Block, shape: Shape) -> Option<String> {
+ Some(match block.rules {
+ ast::BlockCheckMode::Unsafe(..) => {
+ let snippet = context.snippet(block.span);
+ let open_pos = snippet.find_uncommented("{")?;
+ // Extract comment between unsafe and block start.
+ let trimmed = &snippet[6..open_pos].trim();
+
+ if !trimmed.is_empty() {
+ // 9 = "unsafe {".len(), 7 = "unsafe ".len()
+ let budget = shape.width.checked_sub(9)?;
+ format!(
+ "unsafe {} ",
+ rewrite_comment(
+ trimmed,
+ true,
+ Shape::legacy(budget, shape.indent + 7),
+ context.config,
+ )?
+ )
+ } else {
+ "unsafe ".to_owned()
+ }
+ }
+ ast::BlockCheckMode::Default => String::new(),
+ })
+}
+
+fn rewrite_single_line_block(
+ context: &RewriteContext<'_>,
+ prefix: &str,
+ block: &ast::Block,
+ attrs: Option<&[ast::Attribute]>,
+ label: Option<ast::Label>,
+ shape: Shape,
+) -> Option<String> {
+ if is_simple_block(context, block, attrs) {
+ let expr_shape = shape.offset_left(last_line_width(prefix))?;
+ let expr_str = block.stmts[0].rewrite(context, expr_shape)?;
+ let label_str = rewrite_label(label);
+ let result = format!("{}{}{{ {} }}", prefix, label_str, expr_str);
+ if result.len() <= shape.width && !result.contains('\n') {
+ return Some(result);
+ }
+ }
+ None
+}
+
+pub(crate) fn rewrite_block_with_visitor(
+ context: &RewriteContext<'_>,
+ prefix: &str,
+ block: &ast::Block,
+ attrs: Option<&[ast::Attribute]>,
+ label: Option<ast::Label>,
+ shape: Shape,
+ has_braces: bool,
+) -> Option<String> {
+ if let rw @ Some(_) = rewrite_empty_block(context, block, attrs, label, prefix, shape) {
+ return rw;
+ }
+
+ let mut visitor = FmtVisitor::from_context(context);
+ visitor.block_indent = shape.indent;
+ visitor.is_if_else_block = context.is_if_else_block();
+ match (block.rules, label) {
+ (ast::BlockCheckMode::Unsafe(..), _) | (ast::BlockCheckMode::Default, Some(_)) => {
+ let snippet = context.snippet(block.span);
+ let open_pos = snippet.find_uncommented("{")?;
+ visitor.last_pos = block.span.lo() + BytePos(open_pos as u32)
+ }
+ (ast::BlockCheckMode::Default, None) => visitor.last_pos = block.span.lo(),
+ }
+
+ let inner_attrs = attrs.map(inner_attributes);
+ let label_str = rewrite_label(label);
+ visitor.visit_block(block, inner_attrs.as_deref(), has_braces);
+ let visitor_context = visitor.get_context();
+ context
+ .skipped_range
+ .borrow_mut()
+ .append(&mut visitor_context.skipped_range.borrow_mut());
+ Some(format!("{}{}{}", prefix, label_str, visitor.buffer))
+}
+
+impl Rewrite for ast::Block {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ rewrite_block(self, None, None, context, shape)
+ }
+}
+
+fn rewrite_block(
+ block: &ast::Block,
+ attrs: Option<&[ast::Attribute]>,
+ label: Option<ast::Label>,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+) -> Option<String> {
+ let prefix = block_prefix(context, block, shape)?;
+
+ // shape.width is used only for the single line case: either the empty block `{}`,
+ // or an unsafe expression `unsafe { e }`.
+ if let rw @ Some(_) = rewrite_empty_block(context, block, attrs, label, &prefix, shape) {
+ return rw;
+ }
+
+ let result = rewrite_block_with_visitor(context, &prefix, block, attrs, label, shape, true);
+ if let Some(ref result_str) = result {
+ if result_str.lines().count() <= 3 {
+ if let rw @ Some(_) =
+ rewrite_single_line_block(context, &prefix, block, attrs, label, shape)
+ {
+ return rw;
+ }
+ }
+ }
+
+ result
+}
+
+// Rewrite condition if the given expression has one.
+pub(crate) fn rewrite_cond(
+ context: &RewriteContext<'_>,
+ expr: &ast::Expr,
+ shape: Shape,
+) -> Option<String> {
+ match expr.kind {
+ ast::ExprKind::Match(ref cond, _) => {
+ // `match `cond` {`
+ let cond_shape = match context.config.indent_style() {
+ IndentStyle::Visual => shape.shrink_left(6).and_then(|s| s.sub_width(2))?,
+ IndentStyle::Block => shape.offset_left(8)?,
+ };
+ cond.rewrite(context, cond_shape)
+ }
+ _ => to_control_flow(expr, ExprType::SubExpression).and_then(|control_flow| {
+ let alt_block_sep =
+ String::from("\n") + &shape.indent.block_only().to_string(context.config);
+ control_flow
+ .rewrite_cond(context, shape, &alt_block_sep)
+ .map(|rw| rw.0)
+ }),
+ }
+}
+
+// Abstraction over control flow expressions
+#[derive(Debug)]
+struct ControlFlow<'a> {
+ cond: Option<&'a ast::Expr>,
+ block: &'a ast::Block,
+ else_block: Option<&'a ast::Expr>,
+ label: Option<ast::Label>,
+ pat: Option<&'a ast::Pat>,
+ keyword: &'a str,
+ matcher: &'a str,
+ connector: &'a str,
+ allow_single_line: bool,
+ // HACK: `true` if this is an `if` expression in an `else if`.
+ nested_if: bool,
+ span: Span,
+}
+
+fn extract_pats_and_cond(expr: &ast::Expr) -> (Option<&ast::Pat>, &ast::Expr) {
+ match expr.kind {
+ ast::ExprKind::Let(ref pat, ref cond, _) => (Some(pat), cond),
+ _ => (None, expr),
+ }
+}
+
+// FIXME: Refactor this.
+fn to_control_flow(expr: &ast::Expr, expr_type: ExprType) -> Option<ControlFlow<'_>> {
+ match expr.kind {
+ ast::ExprKind::If(ref cond, ref if_block, ref else_block) => {
+ let (pat, cond) = extract_pats_and_cond(cond);
+ Some(ControlFlow::new_if(
+ cond,
+ pat,
+ if_block,
+ else_block.as_ref().map(|e| &**e),
+ expr_type == ExprType::SubExpression,
+ false,
+ expr.span,
+ ))
+ }
+ ast::ExprKind::ForLoop(ref pat, ref cond, ref block, label) => {
+ Some(ControlFlow::new_for(pat, cond, block, label, expr.span))
+ }
+ ast::ExprKind::Loop(ref block, label) => {
+ Some(ControlFlow::new_loop(block, label, expr.span))
+ }
+ ast::ExprKind::While(ref cond, ref block, label) => {
+ let (pat, cond) = extract_pats_and_cond(cond);
+ Some(ControlFlow::new_while(pat, cond, block, label, expr.span))
+ }
+ _ => None,
+ }
+}
+
+fn choose_matcher(pat: Option<&ast::Pat>) -> &'static str {
+ pat.map_or("", |_| "let")
+}
+
+impl<'a> ControlFlow<'a> {
+ fn new_if(
+ cond: &'a ast::Expr,
+ pat: Option<&'a ast::Pat>,
+ block: &'a ast::Block,
+ else_block: Option<&'a ast::Expr>,
+ allow_single_line: bool,
+ nested_if: bool,
+ span: Span,
+ ) -> ControlFlow<'a> {
+ let matcher = choose_matcher(pat);
+ ControlFlow {
+ cond: Some(cond),
+ block,
+ else_block,
+ label: None,
+ pat,
+ keyword: "if",
+ matcher,
+ connector: " =",
+ allow_single_line,
+ nested_if,
+ span,
+ }
+ }
+
+ fn new_loop(block: &'a ast::Block, label: Option<ast::Label>, span: Span) -> ControlFlow<'a> {
+ ControlFlow {
+ cond: None,
+ block,
+ else_block: None,
+ label,
+ pat: None,
+ keyword: "loop",
+ matcher: "",
+ connector: "",
+ allow_single_line: false,
+ nested_if: false,
+ span,
+ }
+ }
+
+ fn new_while(
+ pat: Option<&'a ast::Pat>,
+ cond: &'a ast::Expr,
+ block: &'a ast::Block,
+ label: Option<ast::Label>,
+ span: Span,
+ ) -> ControlFlow<'a> {
+ let matcher = choose_matcher(pat);
+ ControlFlow {
+ cond: Some(cond),
+ block,
+ else_block: None,
+ label,
+ pat,
+ keyword: "while",
+ matcher,
+ connector: " =",
+ allow_single_line: false,
+ nested_if: false,
+ span,
+ }
+ }
+
+ fn new_for(
+ pat: &'a ast::Pat,
+ cond: &'a ast::Expr,
+ block: &'a ast::Block,
+ label: Option<ast::Label>,
+ span: Span,
+ ) -> ControlFlow<'a> {
+ ControlFlow {
+ cond: Some(cond),
+ block,
+ else_block: None,
+ label,
+ pat: Some(pat),
+ keyword: "for",
+ matcher: "",
+ connector: " in",
+ allow_single_line: false,
+ nested_if: false,
+ span,
+ }
+ }
+
+ fn rewrite_single_line(
+ &self,
+ pat_expr_str: &str,
+ context: &RewriteContext<'_>,
+ width: usize,
+ ) -> Option<String> {
+ assert!(self.allow_single_line);
+ let else_block = self.else_block?;
+ let fixed_cost = self.keyword.len() + " { } else { }".len();
+
+ if let ast::ExprKind::Block(ref else_node, _) = else_block.kind {
+ if !is_simple_block(context, self.block, None)
+ || !is_simple_block(context, else_node, None)
+ || pat_expr_str.contains('\n')
+ {
+ return None;
+ }
+
+ let new_width = width.checked_sub(pat_expr_str.len() + fixed_cost)?;
+ let expr = &self.block.stmts[0];
+ let if_str = expr.rewrite(context, Shape::legacy(new_width, Indent::empty()))?;
+
+ let new_width = new_width.checked_sub(if_str.len())?;
+ let else_expr = &else_node.stmts[0];
+ let else_str = else_expr.rewrite(context, Shape::legacy(new_width, Indent::empty()))?;
+
+ if if_str.contains('\n') || else_str.contains('\n') {
+ return None;
+ }
+
+ let result = format!(
+ "{} {} {{ {} }} else {{ {} }}",
+ self.keyword, pat_expr_str, if_str, else_str
+ );
+
+ if result.len() <= width {
+ return Some(result);
+ }
+ }
+
+ None
+ }
+}
+
+/// Returns `true` if the last line of pat_str has leading whitespace and it is wider than the
+/// shape's indent.
+fn last_line_offsetted(start_column: usize, pat_str: &str) -> bool {
+ let mut leading_whitespaces = 0;
+ for c in pat_str.chars().rev() {
+ match c {
+ '\n' => break,
+ _ if c.is_whitespace() => leading_whitespaces += 1,
+ _ => leading_whitespaces = 0,
+ }
+ }
+ leading_whitespaces > start_column
+}
+
+impl<'a> ControlFlow<'a> {
+ fn rewrite_pat_expr(
+ &self,
+ context: &RewriteContext<'_>,
+ expr: &ast::Expr,
+ shape: Shape,
+ offset: usize,
+ ) -> Option<String> {
+ debug!("rewrite_pat_expr {:?} {:?} {:?}", shape, self.pat, expr);
+
+ let cond_shape = shape.offset_left(offset)?;
+ if let Some(pat) = self.pat {
+ let matcher = if self.matcher.is_empty() {
+ self.matcher.to_owned()
+ } else {
+ format!("{} ", self.matcher)
+ };
+ let pat_shape = cond_shape
+ .offset_left(matcher.len())?
+ .sub_width(self.connector.len())?;
+ let pat_string = pat.rewrite(context, pat_shape)?;
+ let comments_lo = context
+ .snippet_provider
+ .span_after(self.span.with_lo(pat.span.hi()), self.connector.trim());
+ let comments_span = mk_sp(comments_lo, expr.span.lo());
+ return rewrite_assign_rhs_with_comments(
+ context,
+ &format!("{}{}{}", matcher, pat_string, self.connector),
+ expr,
+ cond_shape,
+ &RhsAssignKind::Expr(&expr.kind, expr.span),
+ RhsTactics::Default,
+ comments_span,
+ true,
+ );
+ }
+
+ let expr_rw = expr.rewrite(context, cond_shape);
+ // The expression may (partially) fit on the current line.
+ // We do not allow splitting between `if` and condition.
+ if self.keyword == "if" || expr_rw.is_some() {
+ return expr_rw;
+ }
+
+ // The expression won't fit on the current line, jump to next.
+ let nested_shape = shape
+ .block_indent(context.config.tab_spaces())
+ .with_max_width(context.config);
+ let nested_indent_str = nested_shape.indent.to_string_with_newline(context.config);
+ expr.rewrite(context, nested_shape)
+ .map(|expr_rw| format!("{}{}", nested_indent_str, expr_rw))
+ }
+
+ fn rewrite_cond(
+ &self,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ alt_block_sep: &str,
+ ) -> Option<(String, usize)> {
+ // Do not take the rhs overhead from the upper expressions into account
+ // when rewriting pattern.
+ let new_width = context.budget(shape.used_width());
+ let fresh_shape = Shape {
+ width: new_width,
+ ..shape
+ };
+ let constr_shape = if self.nested_if {
+ // We are part of an if-elseif-else chain. Our constraints are tightened.
+ // 7 = "} else " .len()
+ fresh_shape.offset_left(7)?
+ } else {
+ fresh_shape
+ };
+
+ let label_string = rewrite_label(self.label);
+ // 1 = space after keyword.
+ let offset = self.keyword.len() + label_string.len() + 1;
+
+ let pat_expr_string = match self.cond {
+ Some(cond) => self.rewrite_pat_expr(context, cond, constr_shape, offset)?,
+ None => String::new(),
+ };
+
+ let brace_overhead =
+ if context.config.control_brace_style() != ControlBraceStyle::AlwaysNextLine {
+ // 2 = ` {`
+ 2
+ } else {
+ 0
+ };
+ let one_line_budget = context
+ .config
+ .max_width()
+ .saturating_sub(constr_shape.used_width() + offset + brace_overhead);
+ let force_newline_brace = (pat_expr_string.contains('\n')
+ || pat_expr_string.len() > one_line_budget)
+ && (!last_line_extendable(&pat_expr_string)
+ || last_line_offsetted(shape.used_width(), &pat_expr_string));
+
+ // Try to format if-else on single line.
+ if self.allow_single_line && context.config.single_line_if_else_max_width() > 0 {
+ let trial = self.rewrite_single_line(&pat_expr_string, context, shape.width);
+
+ if let Some(cond_str) = trial {
+ if cond_str.len() <= context.config.single_line_if_else_max_width() {
+ return Some((cond_str, 0));
+ }
+ }
+ }
+
+ let cond_span = if let Some(cond) = self.cond {
+ cond.span
+ } else {
+ mk_sp(self.block.span.lo(), self.block.span.lo())
+ };
+
+ // `for event in event`
+ // Do not include label in the span.
+ let lo = self
+ .label
+ .map_or(self.span.lo(), |label| label.ident.span.hi());
+ let between_kwd_cond = mk_sp(
+ context
+ .snippet_provider
+ .span_after(mk_sp(lo, self.span.hi()), self.keyword.trim()),
+ if self.pat.is_none() {
+ cond_span.lo()
+ } else if self.matcher.is_empty() {
+ self.pat.unwrap().span.lo()
+ } else {
+ context
+ .snippet_provider
+ .span_before(self.span, self.matcher.trim())
+ },
+ );
+
+ let between_kwd_cond_comment = extract_comment(between_kwd_cond, context, shape);
+
+ let after_cond_comment =
+ extract_comment(mk_sp(cond_span.hi(), self.block.span.lo()), context, shape);
+
+ let block_sep = if self.cond.is_none() && between_kwd_cond_comment.is_some() {
+ ""
+ } else if context.config.control_brace_style() == ControlBraceStyle::AlwaysNextLine
+ || force_newline_brace
+ {
+ alt_block_sep
+ } else {
+ " "
+ };
+
+ let used_width = if pat_expr_string.contains('\n') {
+ last_line_width(&pat_expr_string)
+ } else {
+ // 2 = spaces after keyword and condition.
+ label_string.len() + self.keyword.len() + pat_expr_string.len() + 2
+ };
+
+ Some((
+ format!(
+ "{}{}{}{}{}",
+ label_string,
+ self.keyword,
+ between_kwd_cond_comment.as_ref().map_or(
+ if pat_expr_string.is_empty() || pat_expr_string.starts_with('\n') {
+ ""
+ } else {
+ " "
+ },
+ |s| &**s,
+ ),
+ pat_expr_string,
+ after_cond_comment.as_ref().map_or(block_sep, |s| &**s)
+ ),
+ used_width,
+ ))
+ }
+}
+
+impl<'a> Rewrite for ControlFlow<'a> {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ debug!("ControlFlow::rewrite {:?} {:?}", self, shape);
+
+ let alt_block_sep = &shape.indent.to_string_with_newline(context.config);
+ let (cond_str, used_width) = self.rewrite_cond(context, shape, alt_block_sep)?;
+ // If `used_width` is 0, it indicates that whole control flow is written in a single line.
+ if used_width == 0 {
+ return Some(cond_str);
+ }
+
+ let block_width = shape.width.saturating_sub(used_width);
+ // This is used only for the empty block case: `{}`. So, we use 1 if we know
+ // we should avoid the single line case.
+ let block_width = if self.else_block.is_some() || self.nested_if {
+ min(1, block_width)
+ } else {
+ block_width
+ };
+ let block_shape = Shape {
+ width: block_width,
+ ..shape
+ };
+ let block_str = {
+ let old_val = context.is_if_else_block.replace(self.else_block.is_some());
+ let result =
+ rewrite_block_with_visitor(context, "", self.block, None, None, block_shape, true);
+ context.is_if_else_block.replace(old_val);
+ result?
+ };
+
+ let mut result = format!("{}{}", cond_str, block_str);
+
+ if let Some(else_block) = self.else_block {
+ let shape = Shape::indented(shape.indent, context.config);
+ let mut last_in_chain = false;
+ let rewrite = match else_block.kind {
+ // If the else expression is another if-else expression, prevent it
+ // from being formatted on a single line.
+ // Note how we're passing the original shape, as the
+ // cost of "else" should not cascade.
+ ast::ExprKind::If(ref cond, ref if_block, ref next_else_block) => {
+ let (pats, cond) = extract_pats_and_cond(cond);
+ ControlFlow::new_if(
+ cond,
+ pats,
+ if_block,
+ next_else_block.as_ref().map(|e| &**e),
+ false,
+ true,
+ mk_sp(else_block.span.lo(), self.span.hi()),
+ )
+ .rewrite(context, shape)
+ }
+ _ => {
+ last_in_chain = true;
+ // When rewriting a block, the width is only used for single line
+ // blocks, passing 1 lets us avoid that.
+ let else_shape = Shape {
+ width: min(1, shape.width),
+ ..shape
+ };
+ format_expr(else_block, ExprType::Statement, context, else_shape)
+ }
+ };
+
+ let between_kwd_else_block = mk_sp(
+ self.block.span.hi(),
+ context
+ .snippet_provider
+ .span_before(mk_sp(self.block.span.hi(), else_block.span.lo()), "else"),
+ );
+ let between_kwd_else_block_comment =
+ extract_comment(between_kwd_else_block, context, shape);
+
+ let after_else = mk_sp(
+ context
+ .snippet_provider
+ .span_after(mk_sp(self.block.span.hi(), else_block.span.lo()), "else"),
+ else_block.span.lo(),
+ );
+ let after_else_comment = extract_comment(after_else, context, shape);
+
+ let between_sep = match context.config.control_brace_style() {
+ ControlBraceStyle::AlwaysNextLine | ControlBraceStyle::ClosingNextLine => {
+ &*alt_block_sep
+ }
+ ControlBraceStyle::AlwaysSameLine => " ",
+ };
+ let after_sep = match context.config.control_brace_style() {
+ ControlBraceStyle::AlwaysNextLine if last_in_chain => &*alt_block_sep,
+ _ => " ",
+ };
+
+ result.push_str(&format!(
+ "{}else{}",
+ between_kwd_else_block_comment
+ .as_ref()
+ .map_or(between_sep, |s| &**s),
+ after_else_comment.as_ref().map_or(after_sep, |s| &**s),
+ ));
+ result.push_str(&rewrite?);
+ }
+
+ Some(result)
+ }
+}
+
+fn rewrite_label(opt_label: Option<ast::Label>) -> Cow<'static, str> {
+ match opt_label {
+ Some(label) => Cow::from(format!("{}: ", label.ident)),
+ None => Cow::from(""),
+ }
+}
+
+fn extract_comment(span: Span, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ match rewrite_missing_comment(span, shape, context) {
+ Some(ref comment) if !comment.is_empty() => Some(format!(
+ "{indent}{}{indent}",
+ comment,
+ indent = shape.indent.to_string_with_newline(context.config)
+ )),
+ _ => None,
+ }
+}
+
+pub(crate) fn block_contains_comment(context: &RewriteContext<'_>, block: &ast::Block) -> bool {
+ contains_comment(context.snippet(block.span))
+}
+
+// Checks that a block contains no statements, an expression and no comments or
+// attributes.
+// FIXME: incorrectly returns false when comment is contained completely within
+// the expression.
+pub(crate) fn is_simple_block(
+ context: &RewriteContext<'_>,
+ block: &ast::Block,
+ attrs: Option<&[ast::Attribute]>,
+) -> bool {
+ block.stmts.len() == 1
+ && stmt_is_expr(&block.stmts[0])
+ && !block_contains_comment(context, block)
+ && attrs.map_or(true, |a| a.is_empty())
+}
+
+/// Checks whether a block contains at most one statement or expression, and no
+/// comments or attributes.
+pub(crate) fn is_simple_block_stmt(
+ context: &RewriteContext<'_>,
+ block: &ast::Block,
+ attrs: Option<&[ast::Attribute]>,
+) -> bool {
+ block.stmts.len() <= 1
+ && !block_contains_comment(context, block)
+ && attrs.map_or(true, |a| a.is_empty())
+}
+
+fn block_has_statements(block: &ast::Block) -> bool {
+ block
+ .stmts
+ .iter()
+ .any(|stmt| !matches!(stmt.kind, ast::StmtKind::Empty))
+}
+
+/// Checks whether a block contains no statements, expressions, comments, or
+/// inner attributes.
+pub(crate) fn is_empty_block(
+ context: &RewriteContext<'_>,
+ block: &ast::Block,
+ attrs: Option<&[ast::Attribute]>,
+) -> bool {
+ !block_has_statements(block)
+ && !block_contains_comment(context, block)
+ && attrs.map_or(true, |a| inner_attributes(a).is_empty())
+}
+
+pub(crate) fn stmt_is_expr(stmt: &ast::Stmt) -> bool {
+ matches!(stmt.kind, ast::StmtKind::Expr(..))
+}
+
+pub(crate) fn is_unsafe_block(block: &ast::Block) -> bool {
+ matches!(block.rules, ast::BlockCheckMode::Unsafe(..))
+}
+
+pub(crate) fn rewrite_literal(
+ context: &RewriteContext<'_>,
+ l: &ast::Lit,
+ shape: Shape,
+) -> Option<String> {
+ match l.kind {
+ ast::LitKind::Str(_, ast::StrStyle::Cooked) => rewrite_string_lit(context, l.span, shape),
+ ast::LitKind::Int(..) => rewrite_int_lit(context, l, shape),
+ _ => wrap_str(
+ context.snippet(l.span).to_owned(),
+ context.config.max_width(),
+ shape,
+ ),
+ }
+}
+
+fn rewrite_string_lit(context: &RewriteContext<'_>, span: Span, shape: Shape) -> Option<String> {
+ let string_lit = context.snippet(span);
+
+ if !context.config.format_strings() {
+ if string_lit
+ .lines()
+ .dropping_back(1)
+ .all(|line| line.ends_with('\\'))
+ && context.config.version() == Version::Two
+ {
+ return Some(string_lit.to_owned());
+ } else {
+ return wrap_str(string_lit.to_owned(), context.config.max_width(), shape);
+ }
+ }
+
+ // Remove the quote characters.
+ let str_lit = &string_lit[1..string_lit.len() - 1];
+
+ rewrite_string(
+ str_lit,
+ &StringFormat::new(shape.visual_indent(0), context.config),
+ shape.width.saturating_sub(2),
+ )
+}
+
+fn rewrite_int_lit(context: &RewriteContext<'_>, lit: &ast::Lit, shape: Shape) -> Option<String> {
+ let span = lit.span;
+ let symbol = lit.token.symbol.as_str();
+
+ if let Some(symbol_stripped) = symbol.strip_prefix("0x") {
+ let hex_lit = match context.config.hex_literal_case() {
+ HexLiteralCase::Preserve => None,
+ HexLiteralCase::Upper => Some(symbol_stripped.to_ascii_uppercase()),
+ HexLiteralCase::Lower => Some(symbol_stripped.to_ascii_lowercase()),
+ };
+ if let Some(hex_lit) = hex_lit {
+ return wrap_str(
+ format!(
+ "0x{}{}",
+ hex_lit,
+ lit.token.suffix.map_or(String::new(), |s| s.to_string())
+ ),
+ context.config.max_width(),
+ shape,
+ );
+ }
+ }
+
+ wrap_str(
+ context.snippet(span).to_owned(),
+ context.config.max_width(),
+ shape,
+ )
+}
+
+fn choose_separator_tactic(context: &RewriteContext<'_>, span: Span) -> Option<SeparatorTactic> {
+ if context.inside_macro() {
+ if span_ends_with_comma(context, span) {
+ Some(SeparatorTactic::Always)
+ } else {
+ Some(SeparatorTactic::Never)
+ }
+ } else {
+ None
+ }
+}
+
+pub(crate) fn rewrite_call(
+ context: &RewriteContext<'_>,
+ callee: &str,
+ args: &[ptr::P<ast::Expr>],
+ span: Span,
+ shape: Shape,
+) -> Option<String> {
+ overflow::rewrite_with_parens(
+ context,
+ callee,
+ args.iter(),
+ shape,
+ span,
+ context.config.fn_call_width(),
+ choose_separator_tactic(context, span),
+ )
+}
+
+pub(crate) fn is_simple_expr(expr: &ast::Expr) -> bool {
+ match expr.kind {
+ ast::ExprKind::Lit(..) => true,
+ ast::ExprKind::Path(ref qself, ref path) => qself.is_none() && path.segments.len() <= 1,
+ ast::ExprKind::AddrOf(_, _, ref expr)
+ | ast::ExprKind::Box(ref expr)
+ | ast::ExprKind::Cast(ref expr, _)
+ | ast::ExprKind::Field(ref expr, _)
+ | ast::ExprKind::Try(ref expr)
+ | ast::ExprKind::Unary(_, ref expr) => is_simple_expr(expr),
+ ast::ExprKind::Index(ref lhs, ref rhs) => is_simple_expr(lhs) && is_simple_expr(rhs),
+ ast::ExprKind::Repeat(ref lhs, ref rhs) => {
+ is_simple_expr(lhs) && is_simple_expr(&*rhs.value)
+ }
+ _ => false,
+ }
+}
+
+pub(crate) fn is_every_expr_simple(lists: &[OverflowableItem<'_>]) -> bool {
+ lists.iter().all(OverflowableItem::is_simple)
+}
+
+pub(crate) fn can_be_overflowed_expr(
+ context: &RewriteContext<'_>,
+ expr: &ast::Expr,
+ args_len: usize,
+) -> bool {
+ match expr.kind {
+ _ if !expr.attrs.is_empty() => false,
+ ast::ExprKind::Match(..) => {
+ (context.use_block_indent() && args_len == 1)
+ || (context.config.indent_style() == IndentStyle::Visual && args_len > 1)
+ || context.config.overflow_delimited_expr()
+ }
+ ast::ExprKind::If(..)
+ | ast::ExprKind::ForLoop(..)
+ | ast::ExprKind::Loop(..)
+ | ast::ExprKind::While(..) => {
+ context.config.combine_control_expr() && context.use_block_indent() && args_len == 1
+ }
+
+ // Handle always block-like expressions
+ ast::ExprKind::Async(..) | ast::ExprKind::Block(..) | ast::ExprKind::Closure(..) => true,
+
+ // Handle `[]` and `{}`-like expressions
+ ast::ExprKind::Array(..) | ast::ExprKind::Struct(..) => {
+ context.config.overflow_delimited_expr()
+ || (context.use_block_indent() && args_len == 1)
+ }
+ ast::ExprKind::MacCall(ref mac) => {
+ match (
+ rustc_ast::ast::MacDelimiter::from_token(mac.args.delim().unwrap()),
+ context.config.overflow_delimited_expr(),
+ ) {
+ (Some(ast::MacDelimiter::Bracket), true)
+ | (Some(ast::MacDelimiter::Brace), true) => true,
+ _ => context.use_block_indent() && args_len == 1,
+ }
+ }
+
+ // Handle parenthetical expressions
+ ast::ExprKind::Call(..) | ast::ExprKind::MethodCall(..) | ast::ExprKind::Tup(..) => {
+ context.use_block_indent() && args_len == 1
+ }
+
+ // Handle unary-like expressions
+ ast::ExprKind::AddrOf(_, _, ref expr)
+ | ast::ExprKind::Box(ref expr)
+ | ast::ExprKind::Try(ref expr)
+ | ast::ExprKind::Unary(_, ref expr)
+ | ast::ExprKind::Cast(ref expr, _) => can_be_overflowed_expr(context, expr, args_len),
+ _ => false,
+ }
+}
+
+pub(crate) fn is_nested_call(expr: &ast::Expr) -> bool {
+ match expr.kind {
+ ast::ExprKind::Call(..) | ast::ExprKind::MacCall(..) => true,
+ ast::ExprKind::AddrOf(_, _, ref expr)
+ | ast::ExprKind::Box(ref expr)
+ | ast::ExprKind::Try(ref expr)
+ | ast::ExprKind::Unary(_, ref expr)
+ | ast::ExprKind::Cast(ref expr, _) => is_nested_call(expr),
+ _ => false,
+ }
+}
+
+/// Returns `true` if a function call or a method call represented by the given span ends with a
+/// trailing comma. This function is used when rewriting macro, as adding or removing a trailing
+/// comma from macro can potentially break the code.
+pub(crate) fn span_ends_with_comma(context: &RewriteContext<'_>, span: Span) -> bool {
+ let mut result: bool = Default::default();
+ let mut prev_char: char = Default::default();
+ let closing_delimiters = &[')', '}', ']'];
+
+ for (kind, c) in CharClasses::new(context.snippet(span).chars()) {
+ match c {
+ _ if kind.is_comment() || c.is_whitespace() => continue,
+ c if closing_delimiters.contains(&c) => {
+ result &= !closing_delimiters.contains(&prev_char);
+ }
+ ',' => result = true,
+ _ => result = false,
+ }
+ prev_char = c;
+ }
+
+ result
+}
+
+fn rewrite_paren(
+ context: &RewriteContext<'_>,
+ mut subexpr: &ast::Expr,
+ shape: Shape,
+ mut span: Span,
+) -> Option<String> {
+ debug!("rewrite_paren, shape: {:?}", shape);
+
+ // Extract comments within parens.
+ let mut pre_span;
+ let mut post_span;
+ let mut pre_comment;
+ let mut post_comment;
+ let remove_nested_parens = context.config.remove_nested_parens();
+ loop {
+ // 1 = "(" or ")"
+ pre_span = mk_sp(span.lo() + BytePos(1), subexpr.span.lo());
+ post_span = mk_sp(subexpr.span.hi(), span.hi() - BytePos(1));
+ pre_comment = rewrite_missing_comment(pre_span, shape, context)?;
+ post_comment = rewrite_missing_comment(post_span, shape, context)?;
+
+ // Remove nested parens if there are no comments.
+ if let ast::ExprKind::Paren(ref subsubexpr) = subexpr.kind {
+ if remove_nested_parens && pre_comment.is_empty() && post_comment.is_empty() {
+ span = subexpr.span;
+ subexpr = subsubexpr;
+ continue;
+ }
+ }
+
+ break;
+ }
+
+ // 1 = `(` and `)`
+ let sub_shape = shape.offset_left(1)?.sub_width(1)?;
+ let subexpr_str = subexpr.rewrite(context, sub_shape)?;
+ let fits_single_line = !pre_comment.contains("//") && !post_comment.contains("//");
+ if fits_single_line {
+ Some(format!("({}{}{})", pre_comment, subexpr_str, post_comment))
+ } else {
+ rewrite_paren_in_multi_line(context, subexpr, shape, pre_span, post_span)
+ }
+}
+
+fn rewrite_paren_in_multi_line(
+ context: &RewriteContext<'_>,
+ subexpr: &ast::Expr,
+ shape: Shape,
+ pre_span: Span,
+ post_span: Span,
+) -> Option<String> {
+ let nested_indent = shape.indent.block_indent(context.config);
+ let nested_shape = Shape::indented(nested_indent, context.config);
+ let pre_comment = rewrite_missing_comment(pre_span, nested_shape, context)?;
+ let post_comment = rewrite_missing_comment(post_span, nested_shape, context)?;
+ let subexpr_str = subexpr.rewrite(context, nested_shape)?;
+
+ let mut result = String::with_capacity(subexpr_str.len() * 2);
+ result.push('(');
+ if !pre_comment.is_empty() {
+ result.push_str(&nested_indent.to_string_with_newline(context.config));
+ result.push_str(&pre_comment);
+ }
+ result.push_str(&nested_indent.to_string_with_newline(context.config));
+ result.push_str(&subexpr_str);
+ if !post_comment.is_empty() {
+ result.push_str(&nested_indent.to_string_with_newline(context.config));
+ result.push_str(&post_comment);
+ }
+ result.push_str(&shape.indent.to_string_with_newline(context.config));
+ result.push(')');
+
+ Some(result)
+}
+
+fn rewrite_index(
+ expr: &ast::Expr,
+ index: &ast::Expr,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+) -> Option<String> {
+ let expr_str = expr.rewrite(context, shape)?;
+
+ let offset = last_line_width(&expr_str) + 1;
+ let rhs_overhead = shape.rhs_overhead(context.config);
+ let index_shape = if expr_str.contains('\n') {
+ Shape::legacy(context.config.max_width(), shape.indent)
+ .offset_left(offset)
+ .and_then(|shape| shape.sub_width(1 + rhs_overhead))
+ } else {
+ match context.config.indent_style() {
+ IndentStyle::Block => shape
+ .offset_left(offset)
+ .and_then(|shape| shape.sub_width(1)),
+ IndentStyle::Visual => shape.visual_indent(offset).sub_width(offset + 1),
+ }
+ };
+ let orig_index_rw = index_shape.and_then(|s| index.rewrite(context, s));
+
+ // Return if index fits in a single line.
+ match orig_index_rw {
+ Some(ref index_str) if !index_str.contains('\n') => {
+ return Some(format!("{}[{}]", expr_str, index_str));
+ }
+ _ => (),
+ }
+
+ // Try putting index on the next line and see if it fits in a single line.
+ let indent = shape.indent.block_indent(context.config);
+ let index_shape = Shape::indented(indent, context.config).offset_left(1)?;
+ let index_shape = index_shape.sub_width(1 + rhs_overhead)?;
+ let new_index_rw = index.rewrite(context, index_shape);
+ match (orig_index_rw, new_index_rw) {
+ (_, Some(ref new_index_str)) if !new_index_str.contains('\n') => Some(format!(
+ "{}{}[{}]",
+ expr_str,
+ indent.to_string_with_newline(context.config),
+ new_index_str,
+ )),
+ (None, Some(ref new_index_str)) => Some(format!(
+ "{}{}[{}]",
+ expr_str,
+ indent.to_string_with_newline(context.config),
+ new_index_str,
+ )),
+ (Some(ref index_str), _) => Some(format!("{}[{}]", expr_str, index_str)),
+ _ => None,
+ }
+}
+
+fn struct_lit_can_be_aligned(fields: &[ast::ExprField], has_base: bool) -> bool {
+ !has_base && fields.iter().all(|field| !field.is_shorthand)
+}
+
+fn rewrite_struct_lit<'a>(
+ context: &RewriteContext<'_>,
+ path: &ast::Path,
+ qself: Option<&ast::QSelf>,
+ fields: &'a [ast::ExprField],
+ struct_rest: &ast::StructRest,
+ attrs: &[ast::Attribute],
+ span: Span,
+ shape: Shape,
+) -> Option<String> {
+ debug!("rewrite_struct_lit: shape {:?}", shape);
+
+ enum StructLitField<'a> {
+ Regular(&'a ast::ExprField),
+ Base(&'a ast::Expr),
+ Rest(Span),
+ }
+
+ // 2 = " {".len()
+ let path_shape = shape.sub_width(2)?;
+ let path_str = rewrite_path(context, PathContext::Expr, qself, path, path_shape)?;
+
+ let has_base_or_rest = match struct_rest {
+ ast::StructRest::None if fields.is_empty() => return Some(format!("{} {{}}", path_str)),
+ ast::StructRest::Rest(_) if fields.is_empty() => {
+ return Some(format!("{} {{ .. }}", path_str));
+ }
+ ast::StructRest::Rest(_) | ast::StructRest::Base(_) => true,
+ _ => false,
+ };
+
+ // Foo { a: Foo } - indent is +3, width is -5.
+ let (h_shape, v_shape) = struct_lit_shape(shape, context, path_str.len() + 3, 2)?;
+
+ let one_line_width = h_shape.map_or(0, |shape| shape.width);
+ let body_lo = context.snippet_provider.span_after(span, "{");
+ let fields_str = if struct_lit_can_be_aligned(fields, has_base_or_rest)
+ && context.config.struct_field_align_threshold() > 0
+ {
+ rewrite_with_alignment(
+ fields,
+ context,
+ v_shape,
+ mk_sp(body_lo, span.hi()),
+ one_line_width,
+ )?
+ } else {
+ let field_iter = fields.iter().map(StructLitField::Regular).chain(
+ match struct_rest {
+ ast::StructRest::Base(expr) => Some(StructLitField::Base(&**expr)),
+ ast::StructRest::Rest(span) => Some(StructLitField::Rest(*span)),
+ ast::StructRest::None => None,
+ }
+ .into_iter(),
+ );
+
+ let span_lo = |item: &StructLitField<'_>| match *item {
+ StructLitField::Regular(field) => field.span().lo(),
+ StructLitField::Base(expr) => {
+ let last_field_hi = fields.last().map_or(span.lo(), |field| field.span.hi());
+ let snippet = context.snippet(mk_sp(last_field_hi, expr.span.lo()));
+ let pos = snippet.find_uncommented("..").unwrap();
+ last_field_hi + BytePos(pos as u32)
+ }
+ StructLitField::Rest(span) => span.lo(),
+ };
+ let span_hi = |item: &StructLitField<'_>| match *item {
+ StructLitField::Regular(field) => field.span().hi(),
+ StructLitField::Base(expr) => expr.span.hi(),
+ StructLitField::Rest(span) => span.hi(),
+ };
+ let rewrite = |item: &StructLitField<'_>| match *item {
+ StructLitField::Regular(field) => {
+ // The 1 taken from the v_budget is for the comma.
+ rewrite_field(context, field, v_shape.sub_width(1)?, 0)
+ }
+ StructLitField::Base(expr) => {
+ // 2 = ..
+ expr.rewrite(context, v_shape.offset_left(2)?)
+ .map(|s| format!("..{}", s))
+ }
+ StructLitField::Rest(_) => Some("..".to_owned()),
+ };
+
+ let items = itemize_list(
+ context.snippet_provider,
+ field_iter,
+ "}",
+ ",",
+ span_lo,
+ span_hi,
+ rewrite,
+ body_lo,
+ span.hi(),
+ false,
+ );
+ let item_vec = items.collect::<Vec<_>>();
+
+ let tactic = struct_lit_tactic(h_shape, context, &item_vec);
+ let nested_shape = shape_for_tactic(tactic, h_shape, v_shape);
+
+ let ends_with_comma = span_ends_with_comma(context, span);
+ let force_no_trailing_comma = context.inside_macro() && !ends_with_comma;
+
+ let fmt = struct_lit_formatting(
+ nested_shape,
+ tactic,
+ context,
+ force_no_trailing_comma || has_base_or_rest || !context.use_block_indent(),
+ );
+
+ write_list(&item_vec, &fmt)?
+ };
+
+ let fields_str =
+ wrap_struct_field(context, attrs, &fields_str, shape, v_shape, one_line_width)?;
+ Some(format!("{} {{{}}}", path_str, fields_str))
+
+ // FIXME if context.config.indent_style() == Visual, but we run out
+ // of space, we should fall back to BlockIndent.
+}
+
+pub(crate) fn wrap_struct_field(
+ context: &RewriteContext<'_>,
+ attrs: &[ast::Attribute],
+ fields_str: &str,
+ shape: Shape,
+ nested_shape: Shape,
+ one_line_width: usize,
+) -> Option<String> {
+ let should_vertical = context.config.indent_style() == IndentStyle::Block
+ && (fields_str.contains('\n')
+ || !context.config.struct_lit_single_line()
+ || fields_str.len() > one_line_width);
+
+ let inner_attrs = &inner_attributes(attrs);
+ if inner_attrs.is_empty() {
+ if should_vertical {
+ Some(format!(
+ "{}{}{}",
+ nested_shape.indent.to_string_with_newline(context.config),
+ fields_str,
+ shape.indent.to_string_with_newline(context.config)
+ ))
+ } else {
+ // One liner or visual indent.
+ Some(format!(" {} ", fields_str))
+ }
+ } else {
+ Some(format!(
+ "{}{}{}{}{}",
+ nested_shape.indent.to_string_with_newline(context.config),
+ inner_attrs.rewrite(context, shape)?,
+ nested_shape.indent.to_string_with_newline(context.config),
+ fields_str,
+ shape.indent.to_string_with_newline(context.config)
+ ))
+ }
+}
+
+pub(crate) fn struct_lit_field_separator(config: &Config) -> &str {
+ colon_spaces(config)
+}
+
+pub(crate) fn rewrite_field(
+ context: &RewriteContext<'_>,
+ field: &ast::ExprField,
+ shape: Shape,
+ prefix_max_width: usize,
+) -> Option<String> {
+ if contains_skip(&field.attrs) {
+ return Some(context.snippet(field.span()).to_owned());
+ }
+ let mut attrs_str = field.attrs.rewrite(context, shape)?;
+ if !attrs_str.is_empty() {
+ attrs_str.push_str(&shape.indent.to_string_with_newline(context.config));
+ };
+ let name = context.snippet(field.ident.span);
+ if field.is_shorthand {
+ Some(attrs_str + name)
+ } else {
+ let mut separator = String::from(struct_lit_field_separator(context.config));
+ for _ in 0..prefix_max_width.saturating_sub(name.len()) {
+ separator.push(' ');
+ }
+ let overhead = name.len() + separator.len();
+ let expr_shape = shape.offset_left(overhead)?;
+ let expr = field.expr.rewrite(context, expr_shape);
+
+ match expr {
+ Some(ref e) if e.as_str() == name && context.config.use_field_init_shorthand() => {
+ Some(attrs_str + name)
+ }
+ Some(e) => Some(format!("{}{}{}{}", attrs_str, name, separator, e)),
+ None => {
+ let expr_offset = shape.indent.block_indent(context.config);
+ let expr = field
+ .expr
+ .rewrite(context, Shape::indented(expr_offset, context.config));
+ expr.map(|s| {
+ format!(
+ "{}{}:\n{}{}",
+ attrs_str,
+ name,
+ expr_offset.to_string(context.config),
+ s
+ )
+ })
+ }
+ }
+ }
+}
+
+fn rewrite_tuple_in_visual_indent_style<'a, T: 'a + IntoOverflowableItem<'a>>(
+ context: &RewriteContext<'_>,
+ mut items: impl Iterator<Item = &'a T>,
+ span: Span,
+ shape: Shape,
+ is_singleton_tuple: bool,
+) -> Option<String> {
+ // In case of length 1, need a trailing comma
+ debug!("rewrite_tuple_in_visual_indent_style {:?}", shape);
+ if is_singleton_tuple {
+ // 3 = "(" + ",)"
+ let nested_shape = shape.sub_width(3)?.visual_indent(1);
+ return items
+ .next()
+ .unwrap()
+ .rewrite(context, nested_shape)
+ .map(|s| format!("({},)", s));
+ }
+
+ let list_lo = context.snippet_provider.span_after(span, "(");
+ let nested_shape = shape.sub_width(2)?.visual_indent(1);
+ let items = itemize_list(
+ context.snippet_provider,
+ items,
+ ")",
+ ",",
+ |item| item.span().lo(),
+ |item| item.span().hi(),
+ |item| item.rewrite(context, nested_shape),
+ list_lo,
+ span.hi() - BytePos(1),
+ false,
+ );
+ let item_vec: Vec<_> = items.collect();
+ let tactic = definitive_tactic(
+ &item_vec,
+ ListTactic::HorizontalVertical,
+ Separator::Comma,
+ nested_shape.width,
+ );
+ let fmt = ListFormatting::new(nested_shape, context.config)
+ .tactic(tactic)
+ .ends_with_newline(false);
+ let list_str = write_list(&item_vec, &fmt)?;
+
+ Some(format!("({})", list_str))
+}
+
+pub(crate) fn rewrite_tuple<'a, T: 'a + IntoOverflowableItem<'a>>(
+ context: &'a RewriteContext<'_>,
+ items: impl Iterator<Item = &'a T>,
+ span: Span,
+ shape: Shape,
+ is_singleton_tuple: bool,
+) -> Option<String> {
+ debug!("rewrite_tuple {:?}", shape);
+ if context.use_block_indent() {
+ // We use the same rule as function calls for rewriting tuples.
+ let force_tactic = if context.inside_macro() {
+ if span_ends_with_comma(context, span) {
+ Some(SeparatorTactic::Always)
+ } else {
+ Some(SeparatorTactic::Never)
+ }
+ } else if is_singleton_tuple {
+ Some(SeparatorTactic::Always)
+ } else {
+ None
+ };
+ overflow::rewrite_with_parens(
+ context,
+ "",
+ items,
+ shape,
+ span,
+ context.config.fn_call_width(),
+ force_tactic,
+ )
+ } else {
+ rewrite_tuple_in_visual_indent_style(context, items, span, shape, is_singleton_tuple)
+ }
+}
+
+pub(crate) fn rewrite_unary_prefix<R: Rewrite>(
+ context: &RewriteContext<'_>,
+ prefix: &str,
+ rewrite: &R,
+ shape: Shape,
+) -> Option<String> {
+ rewrite
+ .rewrite(context, shape.offset_left(prefix.len())?)
+ .map(|r| format!("{}{}", prefix, r))
+}
+
+// FIXME: this is probably not correct for multi-line Rewrites. we should
+// subtract suffix.len() from the last line budget, not the first!
+pub(crate) fn rewrite_unary_suffix<R: Rewrite>(
+ context: &RewriteContext<'_>,
+ suffix: &str,
+ rewrite: &R,
+ shape: Shape,
+) -> Option<String> {
+ rewrite
+ .rewrite(context, shape.sub_width(suffix.len())?)
+ .map(|mut r| {
+ r.push_str(suffix);
+ r
+ })
+}
+
+fn rewrite_unary_op(
+ context: &RewriteContext<'_>,
+ op: ast::UnOp,
+ expr: &ast::Expr,
+ shape: Shape,
+) -> Option<String> {
+ // For some reason, an UnOp is not spanned like BinOp!
+ rewrite_unary_prefix(context, ast::UnOp::to_string(op), expr, shape)
+}
+
+pub(crate) enum RhsAssignKind<'ast> {
+ Expr(&'ast ast::ExprKind, Span),
+ Bounds,
+ Ty,
+}
+
+impl<'ast> RhsAssignKind<'ast> {
+ // TODO(calebcartwright)
+ // Preemptive addition for handling RHS with chains, not yet utilized.
+ // It may make more sense to construct the chain first and then check
+ // whether there are actually chain elements.
+ #[allow(dead_code)]
+ fn is_chain(&self) -> bool {
+ match self {
+ RhsAssignKind::Expr(kind, _) => {
+ matches!(
+ kind,
+ ast::ExprKind::Try(..)
+ | ast::ExprKind::Field(..)
+ | ast::ExprKind::MethodCall(..)
+ | ast::ExprKind::Await(_)
+ )
+ }
+ _ => false,
+ }
+ }
+}
+
+fn rewrite_assignment(
+ context: &RewriteContext<'_>,
+ lhs: &ast::Expr,
+ rhs: &ast::Expr,
+ op: Option<&ast::BinOp>,
+ shape: Shape,
+) -> Option<String> {
+ let operator_str = match op {
+ Some(op) => context.snippet(op.span),
+ None => "=",
+ };
+
+ // 1 = space between lhs and operator.
+ let lhs_shape = shape.sub_width(operator_str.len() + 1)?;
+ let lhs_str = format!("{} {}", lhs.rewrite(context, lhs_shape)?, operator_str);
+
+ rewrite_assign_rhs(
+ context,
+ lhs_str,
+ rhs,
+ &RhsAssignKind::Expr(&rhs.kind, rhs.span),
+ shape,
+ )
+}
+
+/// Controls where to put the rhs.
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub(crate) enum RhsTactics {
+ /// Use heuristics.
+ Default,
+ /// Put the rhs on the next line if it uses multiple line, without extra indentation.
+ ForceNextLineWithoutIndent,
+ /// Allow overflowing max width if neither `Default` nor `ForceNextLineWithoutIndent`
+ /// did not work.
+ AllowOverflow,
+}
+
+// The left hand side must contain everything up to, and including, the
+// assignment operator.
+pub(crate) fn rewrite_assign_rhs<S: Into<String>, R: Rewrite>(
+ context: &RewriteContext<'_>,
+ lhs: S,
+ ex: &R,
+ rhs_kind: &RhsAssignKind<'_>,
+ shape: Shape,
+) -> Option<String> {
+ rewrite_assign_rhs_with(context, lhs, ex, shape, rhs_kind, RhsTactics::Default)
+}
+
+pub(crate) fn rewrite_assign_rhs_expr<R: Rewrite>(
+ context: &RewriteContext<'_>,
+ lhs: &str,
+ ex: &R,
+ shape: Shape,
+ rhs_kind: &RhsAssignKind<'_>,
+ rhs_tactics: RhsTactics,
+) -> Option<String> {
+ let last_line_width = last_line_width(lhs).saturating_sub(if lhs.contains('\n') {
+ shape.indent.width()
+ } else {
+ 0
+ });
+ // 1 = space between operator and rhs.
+ let orig_shape = shape.offset_left(last_line_width + 1).unwrap_or(Shape {
+ width: 0,
+ offset: shape.offset + last_line_width + 1,
+ ..shape
+ });
+ let has_rhs_comment = if let Some(offset) = lhs.find_last_uncommented("=") {
+ lhs.trim_end().len() > offset + 1
+ } else {
+ false
+ };
+
+ choose_rhs(
+ context,
+ ex,
+ orig_shape,
+ ex.rewrite(context, orig_shape),
+ rhs_kind,
+ rhs_tactics,
+ has_rhs_comment,
+ )
+}
+
+pub(crate) fn rewrite_assign_rhs_with<S: Into<String>, R: Rewrite>(
+ context: &RewriteContext<'_>,
+ lhs: S,
+ ex: &R,
+ shape: Shape,
+ rhs_kind: &RhsAssignKind<'_>,
+ rhs_tactics: RhsTactics,
+) -> Option<String> {
+ let lhs = lhs.into();
+ let rhs = rewrite_assign_rhs_expr(context, &lhs, ex, shape, rhs_kind, rhs_tactics)?;
+ Some(lhs + &rhs)
+}
+
+pub(crate) fn rewrite_assign_rhs_with_comments<S: Into<String>, R: Rewrite>(
+ context: &RewriteContext<'_>,
+ lhs: S,
+ ex: &R,
+ shape: Shape,
+ rhs_kind: &RhsAssignKind<'_>,
+ rhs_tactics: RhsTactics,
+ between_span: Span,
+ allow_extend: bool,
+) -> Option<String> {
+ let lhs = lhs.into();
+ let contains_comment = contains_comment(context.snippet(between_span));
+ let shape = if contains_comment {
+ shape.block_left(context.config.tab_spaces())?
+ } else {
+ shape
+ };
+ let rhs = rewrite_assign_rhs_expr(context, &lhs, ex, shape, rhs_kind, rhs_tactics)?;
+
+ if contains_comment {
+ let rhs = rhs.trim_start();
+ combine_strs_with_missing_comments(context, &lhs, rhs, between_span, shape, allow_extend)
+ } else {
+ Some(lhs + &rhs)
+ }
+}
+
+fn choose_rhs<R: Rewrite>(
+ context: &RewriteContext<'_>,
+ expr: &R,
+ shape: Shape,
+ orig_rhs: Option<String>,
+ _rhs_kind: &RhsAssignKind<'_>,
+ rhs_tactics: RhsTactics,
+ has_rhs_comment: bool,
+) -> Option<String> {
+ match orig_rhs {
+ Some(ref new_str) if new_str.is_empty() => Some(String::new()),
+ Some(ref new_str)
+ if !new_str.contains('\n') && unicode_str_width(new_str) <= shape.width =>
+ {
+ Some(format!(" {}", new_str))
+ }
+ _ => {
+ // Expression did not fit on the same line as the identifier.
+ // Try splitting the line and see if that works better.
+ let new_shape = shape_from_rhs_tactic(context, shape, rhs_tactics)?;
+ let new_rhs = expr.rewrite(context, new_shape);
+ let new_indent_str = &shape
+ .indent
+ .block_indent(context.config)
+ .to_string_with_newline(context.config);
+ let before_space_str = if has_rhs_comment { "" } else { " " };
+
+ match (orig_rhs, new_rhs) {
+ (Some(ref orig_rhs), Some(ref new_rhs))
+ if wrap_str(new_rhs.clone(), context.config.max_width(), new_shape)
+ .is_none() =>
+ {
+ Some(format!("{}{}", before_space_str, orig_rhs))
+ }
+ (Some(ref orig_rhs), Some(ref new_rhs))
+ if prefer_next_line(orig_rhs, new_rhs, rhs_tactics) =>
+ {
+ Some(format!("{}{}", new_indent_str, new_rhs))
+ }
+ (None, Some(ref new_rhs)) => Some(format!("{}{}", new_indent_str, new_rhs)),
+ (None, None) if rhs_tactics == RhsTactics::AllowOverflow => {
+ let shape = shape.infinite_width();
+ expr.rewrite(context, shape)
+ .map(|s| format!("{}{}", before_space_str, s))
+ }
+ (None, None) => None,
+ (Some(orig_rhs), _) => Some(format!("{}{}", before_space_str, orig_rhs)),
+ }
+ }
+ }
+}
+
+fn shape_from_rhs_tactic(
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ rhs_tactic: RhsTactics,
+) -> Option<Shape> {
+ match rhs_tactic {
+ RhsTactics::ForceNextLineWithoutIndent => shape
+ .with_max_width(context.config)
+ .sub_width(shape.indent.width()),
+ RhsTactics::Default | RhsTactics::AllowOverflow => {
+ Shape::indented(shape.indent.block_indent(context.config), context.config)
+ .sub_width(shape.rhs_overhead(context.config))
+ }
+ }
+}
+
+/// Returns true if formatting next_line_rhs is better on a new line when compared to the
+/// original's line formatting.
+///
+/// It is considered better if:
+/// 1. the tactic is ForceNextLineWithoutIndent
+/// 2. next_line_rhs doesn't have newlines
+/// 3. the original line has more newlines than next_line_rhs
+/// 4. the original formatting of the first line ends with `(`, `{`, or `[` and next_line_rhs
+/// doesn't
+pub(crate) fn prefer_next_line(
+ orig_rhs: &str,
+ next_line_rhs: &str,
+ rhs_tactics: RhsTactics,
+) -> bool {
+ rhs_tactics == RhsTactics::ForceNextLineWithoutIndent
+ || !next_line_rhs.contains('\n')
+ || count_newlines(orig_rhs) > count_newlines(next_line_rhs) + 1
+ || first_line_ends_with(orig_rhs, '(') && !first_line_ends_with(next_line_rhs, '(')
+ || first_line_ends_with(orig_rhs, '{') && !first_line_ends_with(next_line_rhs, '{')
+ || first_line_ends_with(orig_rhs, '[') && !first_line_ends_with(next_line_rhs, '[')
+}
+
+fn rewrite_expr_addrof(
+ context: &RewriteContext<'_>,
+ borrow_kind: ast::BorrowKind,
+ mutability: ast::Mutability,
+ expr: &ast::Expr,
+ shape: Shape,
+) -> Option<String> {
+ let operator_str = match (mutability, borrow_kind) {
+ (ast::Mutability::Not, ast::BorrowKind::Ref) => "&",
+ (ast::Mutability::Not, ast::BorrowKind::Raw) => "&raw const ",
+ (ast::Mutability::Mut, ast::BorrowKind::Ref) => "&mut ",
+ (ast::Mutability::Mut, ast::BorrowKind::Raw) => "&raw mut ",
+ };
+ rewrite_unary_prefix(context, operator_str, expr, shape)
+}
+
+pub(crate) fn is_method_call(expr: &ast::Expr) -> bool {
+ match expr.kind {
+ ast::ExprKind::MethodCall(..) => true,
+ ast::ExprKind::AddrOf(_, _, ref expr)
+ | ast::ExprKind::Box(ref expr)
+ | ast::ExprKind::Cast(ref expr, _)
+ | ast::ExprKind::Try(ref expr)
+ | ast::ExprKind::Unary(_, ref expr) => is_method_call(expr),
+ _ => false,
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::last_line_offsetted;
+
+ #[test]
+ fn test_last_line_offsetted() {
+ let lines = "one\n two";
+ assert_eq!(last_line_offsetted(2, lines), true);
+ assert_eq!(last_line_offsetted(4, lines), false);
+ assert_eq!(last_line_offsetted(6, lines), false);
+
+ let lines = "one two";
+ assert_eq!(last_line_offsetted(2, lines), false);
+ assert_eq!(last_line_offsetted(0, lines), false);
+
+ let lines = "\ntwo";
+ assert_eq!(last_line_offsetted(2, lines), false);
+ assert_eq!(last_line_offsetted(0, lines), false);
+
+ let lines = "one\n two three";
+ assert_eq!(last_line_offsetted(2, lines), true);
+ let lines = "one\n two three";
+ assert_eq!(last_line_offsetted(2, lines), false);
+ }
+}
diff --git a/src/tools/rustfmt/src/format-diff/main.rs b/src/tools/rustfmt/src/format-diff/main.rs
new file mode 100644
index 000000000..f6b739e1c
--- /dev/null
+++ b/src/tools/rustfmt/src/format-diff/main.rs
@@ -0,0 +1,281 @@
+// Inspired by Clang's clang-format-diff:
+//
+// https://github.com/llvm-mirror/clang/blob/master/tools/clang-format/clang-format-diff.py
+
+#![deny(warnings)]
+
+#[macro_use]
+extern crate log;
+
+use serde::{Deserialize, Serialize};
+use serde_json as json;
+use thiserror::Error;
+
+use std::collections::HashSet;
+use std::env;
+use std::ffi::OsStr;
+use std::io::{self, BufRead};
+use std::process;
+
+use regex::Regex;
+
+use clap::{CommandFactory, Parser};
+
+/// The default pattern of files to format.
+///
+/// We only want to format rust files by default.
+const DEFAULT_PATTERN: &str = r".*\.rs";
+
+#[derive(Error, Debug)]
+enum FormatDiffError {
+ #[error("{0}")]
+ IncorrectOptions(#[from] getopts::Fail),
+ #[error("{0}")]
+ IncorrectFilter(#[from] regex::Error),
+ #[error("{0}")]
+ IoError(#[from] io::Error),
+}
+
+#[derive(Parser, Debug)]
+#[clap(
+ name = "rustfmt-format-diff",
+ disable_version_flag = true,
+ next_line_help = true
+)]
+pub struct Opts {
+ /// Skip the smallest prefix containing NUMBER slashes
+ #[clap(
+ short = 'p',
+ long = "skip-prefix",
+ value_name = "NUMBER",
+ default_value = "0"
+ )]
+ skip_prefix: u32,
+
+ /// Custom pattern selecting file paths to reformat
+ #[clap(
+ short = 'f',
+ long = "filter",
+ value_name = "PATTERN",
+ default_value = DEFAULT_PATTERN
+ )]
+ filter: String,
+}
+
+fn main() {
+ env_logger::Builder::from_env("RUSTFMT_LOG").init();
+ let opts = Opts::parse();
+ if let Err(e) = run(opts) {
+ println!("{}", e);
+ Opts::command()
+ .print_help()
+ .expect("cannot write to stdout");
+ process::exit(1);
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Serialize, Deserialize)]
+struct Range {
+ file: String,
+ range: [u32; 2],
+}
+
+fn run(opts: Opts) -> Result<(), FormatDiffError> {
+ let (files, ranges) = scan_diff(io::stdin(), opts.skip_prefix, &opts.filter)?;
+ run_rustfmt(&files, &ranges)
+}
+
+fn run_rustfmt(files: &HashSet<String>, ranges: &[Range]) -> Result<(), FormatDiffError> {
+ if files.is_empty() || ranges.is_empty() {
+ debug!("No files to format found");
+ return Ok(());
+ }
+
+ let ranges_as_json = json::to_string(ranges).unwrap();
+
+ debug!("Files: {:?}", files);
+ debug!("Ranges: {:?}", ranges);
+
+ let rustfmt_var = env::var_os("RUSTFMT");
+ let rustfmt = match &rustfmt_var {
+ Some(rustfmt) => rustfmt,
+ None => OsStr::new("rustfmt"),
+ };
+ let exit_status = process::Command::new(rustfmt)
+ .args(files)
+ .arg("--file-lines")
+ .arg(ranges_as_json)
+ .status()?;
+
+ if !exit_status.success() {
+ return Err(FormatDiffError::IoError(io::Error::new(
+ io::ErrorKind::Other,
+ format!("rustfmt failed with {}", exit_status),
+ )));
+ }
+ Ok(())
+}
+
+/// Scans a diff from `from`, and returns the set of files found, and the ranges
+/// in those files.
+fn scan_diff<R>(
+ from: R,
+ skip_prefix: u32,
+ file_filter: &str,
+) -> Result<(HashSet<String>, Vec<Range>), FormatDiffError>
+where
+ R: io::Read,
+{
+ let diff_pattern = format!(r"^\+\+\+\s(?:.*?/){{{}}}(\S*)", skip_prefix);
+ let diff_pattern = Regex::new(&diff_pattern).unwrap();
+
+ let lines_pattern = Regex::new(r"^@@.*\+(\d+)(,(\d+))?").unwrap();
+
+ let file_filter = Regex::new(&format!("^{}$", file_filter))?;
+
+ let mut current_file = None;
+
+ let mut files = HashSet::new();
+ let mut ranges = vec![];
+ for line in io::BufReader::new(from).lines() {
+ let line = line.unwrap();
+
+ if let Some(captures) = diff_pattern.captures(&line) {
+ current_file = Some(captures.get(1).unwrap().as_str().to_owned());
+ }
+
+ let file = match current_file {
+ Some(ref f) => &**f,
+ None => continue,
+ };
+
+ // FIXME(emilio): We could avoid this most of the time if needed, but
+ // it's not clear it's worth it.
+ if !file_filter.is_match(file) {
+ continue;
+ }
+
+ let lines_captures = match lines_pattern.captures(&line) {
+ Some(captures) => captures,
+ None => continue,
+ };
+
+ let start_line = lines_captures
+ .get(1)
+ .unwrap()
+ .as_str()
+ .parse::<u32>()
+ .unwrap();
+ let line_count = match lines_captures.get(3) {
+ Some(line_count) => line_count.as_str().parse::<u32>().unwrap(),
+ None => 1,
+ };
+
+ if line_count == 0 {
+ continue;
+ }
+
+ let end_line = start_line + line_count - 1;
+ files.insert(file.to_owned());
+ ranges.push(Range {
+ file: file.to_owned(),
+ range: [start_line, end_line],
+ });
+ }
+
+ Ok((files, ranges))
+}
+
+#[test]
+fn scan_simple_git_diff() {
+ const DIFF: &str = include_str!("test/bindgen.diff");
+ let (files, ranges) = scan_diff(DIFF.as_bytes(), 1, r".*\.rs").expect("scan_diff failed?");
+
+ assert!(
+ files.contains("src/ir/traversal.rs"),
+ "Should've matched the filter"
+ );
+
+ assert!(
+ !files.contains("tests/headers/anon_enum.hpp"),
+ "Shouldn't have matched the filter"
+ );
+
+ assert_eq!(
+ &ranges,
+ &[
+ Range {
+ file: "src/ir/item.rs".to_owned(),
+ range: [148, 158],
+ },
+ Range {
+ file: "src/ir/item.rs".to_owned(),
+ range: [160, 170],
+ },
+ Range {
+ file: "src/ir/traversal.rs".to_owned(),
+ range: [9, 16],
+ },
+ Range {
+ file: "src/ir/traversal.rs".to_owned(),
+ range: [35, 43],
+ },
+ ]
+ );
+}
+
+#[cfg(test)]
+mod cmd_line_tests {
+ use super::*;
+
+ #[test]
+ fn default_options() {
+ let empty: Vec<String> = vec![];
+ let o = Opts::parse_from(&empty);
+ assert_eq!(DEFAULT_PATTERN, o.filter);
+ assert_eq!(0, o.skip_prefix);
+ }
+
+ #[test]
+ fn good_options() {
+ let o = Opts::parse_from(&["test", "-p", "10", "-f", r".*\.hs"]);
+ assert_eq!(r".*\.hs", o.filter);
+ assert_eq!(10, o.skip_prefix);
+ }
+
+ #[test]
+ fn unexpected_option() {
+ assert!(
+ Opts::command()
+ .try_get_matches_from(&["test", "unexpected"])
+ .is_err()
+ );
+ }
+
+ #[test]
+ fn unexpected_flag() {
+ assert!(
+ Opts::command()
+ .try_get_matches_from(&["test", "--flag"])
+ .is_err()
+ );
+ }
+
+ #[test]
+ fn overridden_option() {
+ assert!(
+ Opts::command()
+ .try_get_matches_from(&["test", "-p", "10", "-p", "20"])
+ .is_err()
+ );
+ }
+
+ #[test]
+ fn negative_filter() {
+ assert!(
+ Opts::command()
+ .try_get_matches_from(&["test", "-p", "-1"])
+ .is_err()
+ );
+ }
+}
diff --git a/src/tools/rustfmt/src/format-diff/test/bindgen.diff b/src/tools/rustfmt/src/format-diff/test/bindgen.diff
new file mode 100644
index 000000000..d2fd379f4
--- /dev/null
+++ b/src/tools/rustfmt/src/format-diff/test/bindgen.diff
@@ -0,0 +1,67 @@
+diff --git a/src/ir/item.rs b/src/ir/item.rs
+index 7f3afefb..90d15e96 100644
+--- a/src/ir/item.rs
++++ b/src/ir/item.rs
+@@ -148,7 +148,11 @@ impl<'a, 'b> Iterator for ItemAncestorsIter<'a, 'b>
+ impl AsTemplateParam for ItemId {
+ type Extra = ();
+
+- fn as_template_param(&self, ctx: &BindgenContext, _: &()) -> Option<ItemId> {
++ fn as_template_param(
++ &self,
++ ctx: &BindgenContext,
++ _: &(),
++ ) -> Option<ItemId> {
+ ctx.resolve_item(*self).as_template_param(ctx, &())
+ }
+ }
+@@ -156,7 +160,11 @@ impl AsTemplateParam for ItemId {
+ impl AsTemplateParam for Item {
+ type Extra = ();
+
+- fn as_template_param(&self, ctx: &BindgenContext, _: &()) -> Option<ItemId> {
++ fn as_template_param(
++ &self,
++ ctx: &BindgenContext,
++ _: &(),
++ ) -> Option<ItemId> {
+ self.kind.as_template_param(ctx, self)
+ }
+ }
+diff --git a/src/ir/traversal.rs b/src/ir/traversal.rs
+index 762a3e2d..b9c9dd4e 100644
+--- a/src/ir/traversal.rs
++++ b/src/ir/traversal.rs
+@@ -9,6 +9,8 @@ use std::collections::{BTreeMap, VecDeque};
+ ///
+ /// from --> to
+ ///
++/// Random content to generate a diff.
++///
+ /// The `from` is left implicit: it is the concrete `Trace` implementer which
+ /// yielded this outgoing edge.
+ #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+@@ -33,7 +35,9 @@ impl Into<ItemId> for Edge {
+ }
+ }
+
+-/// The kind of edge reference. This is useful when we wish to only consider
++/// The kind of edge reference.
++///
++/// This is useful when we wish to only consider
+ /// certain kinds of edges for a particular traversal or analysis.
+ #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+ pub enum EdgeKind {
+diff --git a/tests/headers/anon_enum.hpp b/tests/headers/anon_enum.hpp
+index 1961fe6c..34759df3 100644
+--- a/tests/headers/anon_enum.hpp
++++ b/tests/headers/anon_enum.hpp
+@@ -1,7 +1,7 @@
+ struct Test {
+ int foo;
+ float bar;
+- enum { T_NONE };
++ enum { T_NONE, T_SOME };
+ };
+
+ typedef enum {
diff --git a/src/tools/rustfmt/src/format_report_formatter.rs b/src/tools/rustfmt/src/format_report_formatter.rs
new file mode 100644
index 000000000..fd536d4df
--- /dev/null
+++ b/src/tools/rustfmt/src/format_report_formatter.rs
@@ -0,0 +1,150 @@
+use crate::formatting::FormattingError;
+use crate::{ErrorKind, FormatReport};
+use annotate_snippets::display_list::{DisplayList, FormatOptions};
+use annotate_snippets::snippet::{Annotation, AnnotationType, Slice, Snippet, SourceAnnotation};
+use std::fmt::{self, Display};
+
+/// A builder for [`FormatReportFormatter`].
+pub struct FormatReportFormatterBuilder<'a> {
+ report: &'a FormatReport,
+ enable_colors: bool,
+}
+
+impl<'a> FormatReportFormatterBuilder<'a> {
+ /// Creates a new [`FormatReportFormatterBuilder`].
+ pub fn new(report: &'a FormatReport) -> Self {
+ Self {
+ report,
+ enable_colors: false,
+ }
+ }
+
+ /// Enables colors and formatting in the output.
+ #[must_use]
+ pub fn enable_colors(self, enable_colors: bool) -> Self {
+ Self {
+ enable_colors,
+ ..self
+ }
+ }
+
+ /// Creates a new [`FormatReportFormatter`] from the settings in this builder.
+ pub fn build(self) -> FormatReportFormatter<'a> {
+ FormatReportFormatter {
+ report: self.report,
+ enable_colors: self.enable_colors,
+ }
+ }
+}
+
+/// Formats the warnings/errors in a [`FormatReport`].
+///
+/// Can be created using a [`FormatReportFormatterBuilder`].
+pub struct FormatReportFormatter<'a> {
+ report: &'a FormatReport,
+ enable_colors: bool,
+}
+
+impl<'a> Display for FormatReportFormatter<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let errors_by_file = &self.report.internal.borrow().0;
+
+ let opt = FormatOptions {
+ color: self.enable_colors,
+ ..Default::default()
+ };
+
+ for (file, errors) in errors_by_file {
+ for error in errors {
+ let error_kind = error.kind.to_string();
+ let title = Some(Annotation {
+ id: if error.is_internal() {
+ Some("internal")
+ } else {
+ None
+ },
+ label: Some(&error_kind),
+ annotation_type: error_kind_to_snippet_annotation_type(&error.kind),
+ });
+
+ let message_suffix = error.msg_suffix();
+ let footer = if !message_suffix.is_empty() {
+ Some(Annotation {
+ id: None,
+ label: Some(message_suffix),
+ annotation_type: AnnotationType::Note,
+ })
+ } else {
+ None
+ };
+
+ let origin = format!("{}:{}", file, error.line);
+ let slice = Slice {
+ source: &error.line_buffer.clone(),
+ line_start: error.line,
+ origin: Some(origin.as_str()),
+ fold: false,
+ annotations: slice_annotation(error).into_iter().collect(),
+ };
+
+ let snippet = Snippet {
+ title,
+ footer: footer.into_iter().collect(),
+ slices: vec![slice],
+ opt,
+ };
+ writeln!(f, "{}\n", DisplayList::from(snippet))?;
+ }
+ }
+
+ if !errors_by_file.is_empty() {
+ let label = format!(
+ "rustfmt has failed to format. See previous {} errors.",
+ self.report.warning_count()
+ );
+ let snippet = Snippet {
+ title: Some(Annotation {
+ id: None,
+ label: Some(&label),
+ annotation_type: AnnotationType::Warning,
+ }),
+ footer: Vec::new(),
+ slices: Vec::new(),
+ opt,
+ };
+ writeln!(f, "{}", DisplayList::from(snippet))?;
+ }
+
+ Ok(())
+ }
+}
+
+fn slice_annotation(error: &FormattingError) -> Option<SourceAnnotation<'_>> {
+ let (range_start, range_length) = error.format_len();
+ let range_end = range_start + range_length;
+
+ if range_length > 0 {
+ Some(SourceAnnotation {
+ annotation_type: AnnotationType::Error,
+ range: (range_start, range_end),
+ label: "",
+ })
+ } else {
+ None
+ }
+}
+
+fn error_kind_to_snippet_annotation_type(error_kind: &ErrorKind) -> AnnotationType {
+ match error_kind {
+ ErrorKind::LineOverflow(..)
+ | ErrorKind::TrailingWhitespace
+ | ErrorKind::IoError(_)
+ | ErrorKind::ModuleResolutionError(_)
+ | ErrorKind::ParseError
+ | ErrorKind::LostComment
+ | ErrorKind::BadAttr
+ | ErrorKind::InvalidGlobPattern(_)
+ | ErrorKind::VersionMismatch => AnnotationType::Error,
+ ErrorKind::DeprecatedAttr => AnnotationType::Warning,
+ }
+}
diff --git a/src/tools/rustfmt/src/formatting.rs b/src/tools/rustfmt/src/formatting.rs
new file mode 100644
index 000000000..1dfd8a514
--- /dev/null
+++ b/src/tools/rustfmt/src/formatting.rs
@@ -0,0 +1,632 @@
+// High level formatting functions.
+
+use std::collections::HashMap;
+use std::io::{self, Write};
+use std::time::{Duration, Instant};
+
+use rustc_ast::ast;
+use rustc_span::Span;
+
+use self::newline_style::apply_newline_style;
+use crate::comment::{CharClasses, FullCodeCharKind};
+use crate::config::{Config, FileName, Verbosity};
+use crate::formatting::generated::is_generated_file;
+use crate::modules::Module;
+use crate::parse::parser::{DirectoryOwnership, Parser, ParserError};
+use crate::parse::session::ParseSess;
+use crate::utils::{contains_skip, count_newlines};
+use crate::visitor::FmtVisitor;
+use crate::{modules, source_file, ErrorKind, FormatReport, Input, Session};
+
+mod generated;
+mod newline_style;
+
+// A map of the files of a crate, with their new content
+pub(crate) type SourceFile = Vec<FileRecord>;
+pub(crate) type FileRecord = (FileName, String);
+
+impl<'b, T: Write + 'b> Session<'b, T> {
+ pub(crate) fn format_input_inner(
+ &mut self,
+ input: Input,
+ is_macro_def: bool,
+ ) -> Result<FormatReport, ErrorKind> {
+ if !self.config.version_meets_requirement() {
+ return Err(ErrorKind::VersionMismatch);
+ }
+
+ rustc_span::create_session_if_not_set_then(self.config.edition().into(), |_| {
+ if self.config.disable_all_formatting() {
+ // When the input is from stdin, echo back the input.
+ return match input {
+ Input::Text(ref buf) => echo_back_stdin(buf),
+ _ => Ok(FormatReport::new()),
+ };
+ }
+
+ let config = &self.config.clone();
+ let format_result = format_project(input, config, self, is_macro_def);
+
+ format_result.map(|report| {
+ self.errors.add(&report.internal.borrow().1);
+ report
+ })
+ })
+ }
+}
+
+/// Determine if a module should be skipped. True if the module should be skipped, false otherwise.
+fn should_skip_module<T: FormatHandler>(
+ config: &Config,
+ context: &FormatContext<'_, T>,
+ input_is_stdin: bool,
+ main_file: &FileName,
+ path: &FileName,
+ module: &Module<'_>,
+) -> bool {
+ if contains_skip(module.attrs()) {
+ return true;
+ }
+
+ if config.skip_children() && path != main_file {
+ return true;
+ }
+
+ if !input_is_stdin && context.ignore_file(path) {
+ return true;
+ }
+
+ // FIXME(calebcartwright) - we need to determine how we'll handle the
+ // `format_generated_files` option with stdin based input.
+ if !input_is_stdin && !config.format_generated_files() {
+ let source_file = context.parse_session.span_to_file_contents(module.span);
+ let src = source_file.src.as_ref().expect("SourceFile without src");
+
+ if is_generated_file(src) {
+ return true;
+ }
+ }
+
+ false
+}
+
+fn echo_back_stdin(input: &str) -> Result<FormatReport, ErrorKind> {
+ if let Err(e) = io::stdout().write_all(input.as_bytes()) {
+ return Err(From::from(e));
+ }
+ Ok(FormatReport::new())
+}
+
+// Format an entire crate (or subset of the module tree).
+fn format_project<T: FormatHandler>(
+ input: Input,
+ config: &Config,
+ handler: &mut T,
+ is_macro_def: bool,
+) -> Result<FormatReport, ErrorKind> {
+ let mut timer = Timer::start();
+
+ let main_file = input.file_name();
+ let input_is_stdin = main_file == FileName::Stdin;
+
+ let parse_session = ParseSess::new(config)?;
+ if config.skip_children() && parse_session.ignore_file(&main_file) {
+ return Ok(FormatReport::new());
+ }
+
+ // Parse the crate.
+ let mut report = FormatReport::new();
+ let directory_ownership = input.to_directory_ownership();
+ let krate = match Parser::parse_crate(input, &parse_session) {
+ Ok(krate) => krate,
+ // Surface parse error via Session (errors are merged there from report)
+ Err(e) => {
+ let forbid_verbose = input_is_stdin || e != ParserError::ParsePanicError;
+ should_emit_verbose(forbid_verbose, config, || {
+ eprintln!("The Rust parser panicked");
+ });
+ report.add_parsing_error();
+ return Ok(report);
+ }
+ };
+
+ let mut context = FormatContext::new(&krate, report, parse_session, config, handler);
+ let files = modules::ModResolver::new(
+ &context.parse_session,
+ directory_ownership.unwrap_or(DirectoryOwnership::UnownedViaBlock),
+ !input_is_stdin && !config.skip_children(),
+ )
+ .visit_crate(&krate)?
+ .into_iter()
+ .filter(|(path, module)| {
+ input_is_stdin
+ || !should_skip_module(config, &context, input_is_stdin, &main_file, path, module)
+ })
+ .collect::<Vec<_>>();
+
+ timer = timer.done_parsing();
+
+ // Suppress error output if we have to do any further parsing.
+ context.parse_session.set_silent_emitter();
+
+ for (path, module) in files {
+ if input_is_stdin && contains_skip(module.attrs()) {
+ return echo_back_stdin(
+ context
+ .parse_session
+ .snippet_provider(module.span)
+ .entire_snippet(),
+ );
+ }
+ should_emit_verbose(input_is_stdin, config, || println!("Formatting {}", path));
+ context.format_file(path, &module, is_macro_def)?;
+ }
+ timer = timer.done_formatting();
+
+ should_emit_verbose(input_is_stdin, config, || {
+ println!(
+ "Spent {0:.3} secs in the parsing phase, and {1:.3} secs in the formatting phase",
+ timer.get_parse_time(),
+ timer.get_format_time(),
+ )
+ });
+
+ Ok(context.report)
+}
+
+// Used for formatting files.
+#[derive(new)]
+struct FormatContext<'a, T: FormatHandler> {
+ krate: &'a ast::Crate,
+ report: FormatReport,
+ parse_session: ParseSess,
+ config: &'a Config,
+ handler: &'a mut T,
+}
+
+impl<'a, T: FormatHandler + 'a> FormatContext<'a, T> {
+ fn ignore_file(&self, path: &FileName) -> bool {
+ self.parse_session.ignore_file(path)
+ }
+
+ // Formats a single file/module.
+ fn format_file(
+ &mut self,
+ path: FileName,
+ module: &Module<'_>,
+ is_macro_def: bool,
+ ) -> Result<(), ErrorKind> {
+ let snippet_provider = self.parse_session.snippet_provider(module.span);
+ let mut visitor = FmtVisitor::from_parse_sess(
+ &self.parse_session,
+ self.config,
+ &snippet_provider,
+ self.report.clone(),
+ );
+ visitor.skip_context.update_with_attrs(&self.krate.attrs);
+ visitor.is_macro_def = is_macro_def;
+ visitor.last_pos = snippet_provider.start_pos();
+ visitor.skip_empty_lines(snippet_provider.end_pos());
+ visitor.format_separate_mod(module, snippet_provider.end_pos());
+
+ debug_assert_eq!(
+ visitor.line_number,
+ count_newlines(&visitor.buffer),
+ "failed in format_file visitor.buffer:\n {:?}",
+ &visitor.buffer
+ );
+
+ // For some reason, the source_map does not include terminating
+ // newlines so we must add one on for each file. This is sad.
+ source_file::append_newline(&mut visitor.buffer);
+
+ format_lines(
+ &mut visitor.buffer,
+ &path,
+ &visitor.skipped_range.borrow(),
+ self.config,
+ &self.report,
+ );
+
+ apply_newline_style(
+ self.config.newline_style(),
+ &mut visitor.buffer,
+ snippet_provider.entire_snippet(),
+ );
+
+ if visitor.macro_rewrite_failure {
+ self.report.add_macro_format_failure();
+ }
+ self.report
+ .add_non_formatted_ranges(visitor.skipped_range.borrow().clone());
+
+ self.handler.handle_formatted_file(
+ &self.parse_session,
+ path,
+ visitor.buffer.to_owned(),
+ &mut self.report,
+ )
+ }
+}
+
+// Handle the results of formatting.
+trait FormatHandler {
+ fn handle_formatted_file(
+ &mut self,
+ parse_session: &ParseSess,
+ path: FileName,
+ result: String,
+ report: &mut FormatReport,
+ ) -> Result<(), ErrorKind>;
+}
+
+impl<'b, T: Write + 'b> FormatHandler for Session<'b, T> {
+ // Called for each formatted file.
+ fn handle_formatted_file(
+ &mut self,
+ parse_session: &ParseSess,
+ path: FileName,
+ result: String,
+ report: &mut FormatReport,
+ ) -> Result<(), ErrorKind> {
+ if let Some(ref mut out) = self.out {
+ match source_file::write_file(
+ Some(parse_session),
+ &path,
+ &result,
+ out,
+ &mut *self.emitter,
+ self.config.newline_style(),
+ ) {
+ Ok(ref result) if result.has_diff => report.add_diff(),
+ Err(e) => {
+ // Create a new error with path_str to help users see which files failed
+ let err_msg = format!("{}: {}", path, e);
+ return Err(io::Error::new(e.kind(), err_msg).into());
+ }
+ _ => {}
+ }
+ }
+
+ self.source_file.push((path, result));
+ Ok(())
+ }
+}
+
+pub(crate) struct FormattingError {
+ pub(crate) line: usize,
+ pub(crate) kind: ErrorKind,
+ is_comment: bool,
+ is_string: bool,
+ pub(crate) line_buffer: String,
+}
+
+impl FormattingError {
+ pub(crate) fn from_span(
+ span: Span,
+ parse_sess: &ParseSess,
+ kind: ErrorKind,
+ ) -> FormattingError {
+ FormattingError {
+ line: parse_sess.line_of_byte_pos(span.lo()),
+ is_comment: kind.is_comment(),
+ kind,
+ is_string: false,
+ line_buffer: parse_sess.span_to_first_line_string(span),
+ }
+ }
+
+ pub(crate) fn is_internal(&self) -> bool {
+ match self.kind {
+ ErrorKind::LineOverflow(..)
+ | ErrorKind::TrailingWhitespace
+ | ErrorKind::IoError(_)
+ | ErrorKind::ParseError
+ | ErrorKind::LostComment => true,
+ _ => false,
+ }
+ }
+
+ pub(crate) fn msg_suffix(&self) -> &str {
+ if self.is_comment || self.is_string {
+ "set `error_on_unformatted = false` to suppress \
+ the warning against comments or string literals\n"
+ } else {
+ ""
+ }
+ }
+
+ // (space, target)
+ pub(crate) fn format_len(&self) -> (usize, usize) {
+ match self.kind {
+ ErrorKind::LineOverflow(found, max) => (max, found - max),
+ ErrorKind::TrailingWhitespace
+ | ErrorKind::DeprecatedAttr
+ | ErrorKind::BadAttr
+ | ErrorKind::LostComment => {
+ let trailing_ws_start = self
+ .line_buffer
+ .rfind(|c: char| !c.is_whitespace())
+ .map(|pos| pos + 1)
+ .unwrap_or(0);
+ (
+ trailing_ws_start,
+ self.line_buffer.len() - trailing_ws_start,
+ )
+ }
+ _ => unreachable!(),
+ }
+ }
+}
+
+pub(crate) type FormatErrorMap = HashMap<FileName, Vec<FormattingError>>;
+
+#[derive(Default, Debug, PartialEq)]
+pub(crate) struct ReportedErrors {
+ // Encountered e.g., an IO error.
+ pub(crate) has_operational_errors: bool,
+
+ // Failed to reformat code because of parsing errors.
+ pub(crate) has_parsing_errors: bool,
+
+ // Code is valid, but it is impossible to format it properly.
+ pub(crate) has_formatting_errors: bool,
+
+ // Code contains macro call that was unable to format.
+ pub(crate) has_macro_format_failure: bool,
+
+ // Failed an opt-in checking.
+ pub(crate) has_check_errors: bool,
+
+ /// Formatted code differs from existing code (--check only).
+ pub(crate) has_diff: bool,
+
+ /// Formatted code missed something, like lost comments or extra trailing space
+ pub(crate) has_unformatted_code_errors: bool,
+}
+
+impl ReportedErrors {
+ /// Combine two summaries together.
+ pub(crate) fn add(&mut self, other: &ReportedErrors) {
+ self.has_operational_errors |= other.has_operational_errors;
+ self.has_parsing_errors |= other.has_parsing_errors;
+ self.has_formatting_errors |= other.has_formatting_errors;
+ self.has_macro_format_failure |= other.has_macro_format_failure;
+ self.has_check_errors |= other.has_check_errors;
+ self.has_diff |= other.has_diff;
+ self.has_unformatted_code_errors |= other.has_unformatted_code_errors;
+ }
+}
+
+#[derive(Clone, Copy, Debug)]
+enum Timer {
+ Disabled,
+ Initialized(Instant),
+ DoneParsing(Instant, Instant),
+ DoneFormatting(Instant, Instant, Instant),
+}
+
+impl Timer {
+ fn start() -> Timer {
+ if cfg!(target_arch = "wasm32") {
+ Timer::Disabled
+ } else {
+ Timer::Initialized(Instant::now())
+ }
+ }
+ fn done_parsing(self) -> Self {
+ match self {
+ Timer::Disabled => Timer::Disabled,
+ Timer::Initialized(init_time) => Timer::DoneParsing(init_time, Instant::now()),
+ _ => panic!("Timer can only transition to DoneParsing from Initialized state"),
+ }
+ }
+
+ fn done_formatting(self) -> Self {
+ match self {
+ Timer::Disabled => Timer::Disabled,
+ Timer::DoneParsing(init_time, parse_time) => {
+ Timer::DoneFormatting(init_time, parse_time, Instant::now())
+ }
+ _ => panic!("Timer can only transition to DoneFormatting from DoneParsing state"),
+ }
+ }
+
+ /// Returns the time it took to parse the source files in seconds.
+ fn get_parse_time(&self) -> f32 {
+ match *self {
+ Timer::Disabled => panic!("this platform cannot time execution"),
+ Timer::DoneParsing(init, parse_time) | Timer::DoneFormatting(init, parse_time, _) => {
+ // This should never underflow since `Instant::now()` guarantees monotonicity.
+ Self::duration_to_f32(parse_time.duration_since(init))
+ }
+ Timer::Initialized(..) => unreachable!(),
+ }
+ }
+
+ /// Returns the time it took to go from the parsed AST to the formatted output. Parsing time is
+ /// not included.
+ fn get_format_time(&self) -> f32 {
+ match *self {
+ Timer::Disabled => panic!("this platform cannot time execution"),
+ Timer::DoneFormatting(_init, parse_time, format_time) => {
+ Self::duration_to_f32(format_time.duration_since(parse_time))
+ }
+ Timer::DoneParsing(..) | Timer::Initialized(..) => unreachable!(),
+ }
+ }
+
+ fn duration_to_f32(d: Duration) -> f32 {
+ d.as_secs() as f32 + d.subsec_nanos() as f32 / 1_000_000_000f32
+ }
+}
+
+// Formatting done on a char by char or line by line basis.
+// FIXME(#20): other stuff for parity with make tidy.
+fn format_lines(
+ text: &mut String,
+ name: &FileName,
+ skipped_range: &[(usize, usize)],
+ config: &Config,
+ report: &FormatReport,
+) {
+ let mut formatter = FormatLines::new(name, skipped_range, config);
+ formatter.iterate(text);
+
+ if formatter.newline_count > 1 {
+ debug!("track truncate: {} {}", text.len(), formatter.newline_count);
+ let line = text.len() - formatter.newline_count + 1;
+ text.truncate(line);
+ }
+
+ report.append(name.clone(), formatter.errors);
+}
+
+struct FormatLines<'a> {
+ name: &'a FileName,
+ skipped_range: &'a [(usize, usize)],
+ last_was_space: bool,
+ line_len: usize,
+ cur_line: usize,
+ newline_count: usize,
+ errors: Vec<FormattingError>,
+ line_buffer: String,
+ current_line_contains_string_literal: bool,
+ format_line: bool,
+ config: &'a Config,
+}
+
+impl<'a> FormatLines<'a> {
+ fn new(
+ name: &'a FileName,
+ skipped_range: &'a [(usize, usize)],
+ config: &'a Config,
+ ) -> FormatLines<'a> {
+ FormatLines {
+ name,
+ skipped_range,
+ last_was_space: false,
+ line_len: 0,
+ cur_line: 1,
+ newline_count: 0,
+ errors: vec![],
+ line_buffer: String::with_capacity(config.max_width() * 2),
+ current_line_contains_string_literal: false,
+ format_line: config.file_lines().contains_line(name, 1),
+ config,
+ }
+ }
+
+ // Iterate over the chars in the file map.
+ fn iterate(&mut self, text: &mut String) {
+ for (kind, c) in CharClasses::new(text.chars()) {
+ if c == '\r' {
+ continue;
+ }
+
+ if c == '\n' {
+ self.new_line(kind);
+ } else {
+ self.char(c, kind);
+ }
+ }
+ }
+
+ fn new_line(&mut self, kind: FullCodeCharKind) {
+ if self.format_line {
+ // Check for (and record) trailing whitespace.
+ if self.last_was_space {
+ if self.should_report_error(kind, &ErrorKind::TrailingWhitespace)
+ && !self.is_skipped_line()
+ {
+ self.push_err(
+ ErrorKind::TrailingWhitespace,
+ kind.is_comment(),
+ kind.is_string(),
+ );
+ }
+ self.line_len -= 1;
+ }
+
+ // Check for any line width errors we couldn't correct.
+ let error_kind = ErrorKind::LineOverflow(self.line_len, self.config.max_width());
+ if self.line_len > self.config.max_width()
+ && !self.is_skipped_line()
+ && self.should_report_error(kind, &error_kind)
+ {
+ let is_string = self.current_line_contains_string_literal;
+ self.push_err(error_kind, kind.is_comment(), is_string);
+ }
+ }
+
+ self.line_len = 0;
+ self.cur_line += 1;
+ self.format_line = self
+ .config
+ .file_lines()
+ .contains_line(self.name, self.cur_line);
+ self.newline_count += 1;
+ self.last_was_space = false;
+ self.line_buffer.clear();
+ self.current_line_contains_string_literal = false;
+ }
+
+ fn char(&mut self, c: char, kind: FullCodeCharKind) {
+ self.newline_count = 0;
+ self.line_len += if c == '\t' {
+ self.config.tab_spaces()
+ } else {
+ 1
+ };
+ self.last_was_space = c.is_whitespace();
+ self.line_buffer.push(c);
+ if kind.is_string() {
+ self.current_line_contains_string_literal = true;
+ }
+ }
+
+ fn push_err(&mut self, kind: ErrorKind, is_comment: bool, is_string: bool) {
+ self.errors.push(FormattingError {
+ line: self.cur_line,
+ kind,
+ is_comment,
+ is_string,
+ line_buffer: self.line_buffer.clone(),
+ });
+ }
+
+ fn should_report_error(&self, char_kind: FullCodeCharKind, error_kind: &ErrorKind) -> bool {
+ let allow_error_report = if char_kind.is_comment()
+ || self.current_line_contains_string_literal
+ || error_kind.is_comment()
+ {
+ self.config.error_on_unformatted()
+ } else {
+ true
+ };
+
+ match error_kind {
+ ErrorKind::LineOverflow(..) => {
+ self.config.error_on_line_overflow() && allow_error_report
+ }
+ ErrorKind::TrailingWhitespace | ErrorKind::LostComment => allow_error_report,
+ _ => true,
+ }
+ }
+
+ /// Returns `true` if the line with the given line number was skipped by `#[rustfmt::skip]`.
+ fn is_skipped_line(&self) -> bool {
+ self.skipped_range
+ .iter()
+ .any(|&(lo, hi)| lo <= self.cur_line && self.cur_line <= hi)
+ }
+}
+
+fn should_emit_verbose<F>(forbid_verbose_output: bool, config: &Config, f: F)
+where
+ F: Fn(),
+{
+ if config.verbose() == Verbosity::Verbose && !forbid_verbose_output {
+ f();
+ }
+}
diff --git a/src/tools/rustfmt/src/formatting/generated.rs b/src/tools/rustfmt/src/formatting/generated.rs
new file mode 100644
index 000000000..58f43f17e
--- /dev/null
+++ b/src/tools/rustfmt/src/formatting/generated.rs
@@ -0,0 +1,7 @@
+/// Returns `true` if the given span is a part of generated files.
+pub(super) fn is_generated_file(original_snippet: &str) -> bool {
+ original_snippet
+ .lines()
+ .take(5) // looking for marker only in the beginning of the file
+ .any(|line| line.contains("@generated"))
+}
diff --git a/src/tools/rustfmt/src/formatting/newline_style.rs b/src/tools/rustfmt/src/formatting/newline_style.rs
new file mode 100644
index 000000000..97c4fc16d
--- /dev/null
+++ b/src/tools/rustfmt/src/formatting/newline_style.rs
@@ -0,0 +1,250 @@
+use crate::NewlineStyle;
+
+/// Apply this newline style to the formatted text. When the style is set
+/// to `Auto`, the `raw_input_text` is used to detect the existing line
+/// endings.
+///
+/// If the style is set to `Auto` and `raw_input_text` contains no
+/// newlines, the `Native` style will be used.
+pub(crate) fn apply_newline_style(
+ newline_style: NewlineStyle,
+ formatted_text: &mut String,
+ raw_input_text: &str,
+) {
+ *formatted_text = match effective_newline_style(newline_style, raw_input_text) {
+ EffectiveNewlineStyle::Windows => convert_to_windows_newlines(formatted_text),
+ EffectiveNewlineStyle::Unix => convert_to_unix_newlines(formatted_text),
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+enum EffectiveNewlineStyle {
+ Windows,
+ Unix,
+}
+
+fn effective_newline_style(
+ newline_style: NewlineStyle,
+ raw_input_text: &str,
+) -> EffectiveNewlineStyle {
+ match newline_style {
+ NewlineStyle::Auto => auto_detect_newline_style(raw_input_text),
+ NewlineStyle::Native => native_newline_style(),
+ NewlineStyle::Windows => EffectiveNewlineStyle::Windows,
+ NewlineStyle::Unix => EffectiveNewlineStyle::Unix,
+ }
+}
+
+const LINE_FEED: char = '\n';
+const CARRIAGE_RETURN: char = '\r';
+const WINDOWS_NEWLINE: &str = "\r\n";
+const UNIX_NEWLINE: &str = "\n";
+
+fn auto_detect_newline_style(raw_input_text: &str) -> EffectiveNewlineStyle {
+ let first_line_feed_pos = raw_input_text.chars().position(|ch| ch == LINE_FEED);
+ match first_line_feed_pos {
+ Some(first_line_feed_pos) => {
+ let char_before_line_feed_pos = first_line_feed_pos.saturating_sub(1);
+ let char_before_line_feed = raw_input_text.chars().nth(char_before_line_feed_pos);
+ match char_before_line_feed {
+ Some(CARRIAGE_RETURN) => EffectiveNewlineStyle::Windows,
+ _ => EffectiveNewlineStyle::Unix,
+ }
+ }
+ None => native_newline_style(),
+ }
+}
+
+fn native_newline_style() -> EffectiveNewlineStyle {
+ if cfg!(windows) {
+ EffectiveNewlineStyle::Windows
+ } else {
+ EffectiveNewlineStyle::Unix
+ }
+}
+
+fn convert_to_windows_newlines(formatted_text: &String) -> String {
+ let mut transformed = String::with_capacity(2 * formatted_text.capacity());
+ let mut chars = formatted_text.chars().peekable();
+ while let Some(current_char) = chars.next() {
+ let next_char = chars.peek();
+ match current_char {
+ LINE_FEED => transformed.push_str(WINDOWS_NEWLINE),
+ CARRIAGE_RETURN if next_char == Some(&LINE_FEED) => {}
+ current_char => transformed.push(current_char),
+ }
+ }
+ transformed
+}
+
+fn convert_to_unix_newlines(formatted_text: &str) -> String {
+ formatted_text.replace(WINDOWS_NEWLINE, UNIX_NEWLINE)
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn auto_detects_unix_newlines() {
+ assert_eq!(
+ EffectiveNewlineStyle::Unix,
+ auto_detect_newline_style("One\nTwo\nThree")
+ );
+ }
+
+ #[test]
+ fn auto_detects_windows_newlines() {
+ assert_eq!(
+ EffectiveNewlineStyle::Windows,
+ auto_detect_newline_style("One\r\nTwo\r\nThree")
+ );
+ }
+
+ #[test]
+ fn auto_detects_windows_newlines_with_multibyte_char_on_first_line() {
+ assert_eq!(
+ EffectiveNewlineStyle::Windows,
+ auto_detect_newline_style("A 🎢 of a first line\r\nTwo\r\nThree")
+ );
+ }
+
+ #[test]
+ fn falls_back_to_native_newlines_if_no_newlines_are_found() {
+ let expected_newline_style = if cfg!(windows) {
+ EffectiveNewlineStyle::Windows
+ } else {
+ EffectiveNewlineStyle::Unix
+ };
+ assert_eq!(
+ expected_newline_style,
+ auto_detect_newline_style("One Two Three")
+ );
+ }
+
+ #[test]
+ fn auto_detects_and_applies_unix_newlines() {
+ let formatted_text = "One\nTwo\nThree";
+ let raw_input_text = "One\nTwo\nThree";
+
+ let mut out = String::from(formatted_text);
+ apply_newline_style(NewlineStyle::Auto, &mut out, raw_input_text);
+ assert_eq!("One\nTwo\nThree", &out, "auto should detect 'lf'");
+ }
+
+ #[test]
+ fn auto_detects_and_applies_windows_newlines() {
+ let formatted_text = "One\nTwo\nThree";
+ let raw_input_text = "One\r\nTwo\r\nThree";
+
+ let mut out = String::from(formatted_text);
+ apply_newline_style(NewlineStyle::Auto, &mut out, raw_input_text);
+ assert_eq!("One\r\nTwo\r\nThree", &out, "auto should detect 'crlf'");
+ }
+
+ #[test]
+ fn auto_detects_and_applies_native_newlines() {
+ let formatted_text = "One\nTwo\nThree";
+ let raw_input_text = "One Two Three";
+
+ let mut out = String::from(formatted_text);
+ apply_newline_style(NewlineStyle::Auto, &mut out, raw_input_text);
+
+ if cfg!(windows) {
+ assert_eq!(
+ "One\r\nTwo\r\nThree", &out,
+ "auto-native-windows should detect 'crlf'"
+ );
+ } else {
+ assert_eq!(
+ "One\nTwo\nThree", &out,
+ "auto-native-unix should detect 'lf'"
+ );
+ }
+ }
+
+ #[test]
+ fn applies_unix_newlines() {
+ test_newlines_are_applied_correctly(
+ "One\r\nTwo\nThree",
+ "One\nTwo\nThree",
+ NewlineStyle::Unix,
+ );
+ }
+
+ #[test]
+ fn applying_unix_newlines_changes_nothing_for_unix_newlines() {
+ let formatted_text = "One\nTwo\nThree";
+ test_newlines_are_applied_correctly(formatted_text, formatted_text, NewlineStyle::Unix);
+ }
+
+ #[test]
+ fn applies_unix_newlines_to_string_with_unix_and_windows_newlines() {
+ test_newlines_are_applied_correctly(
+ "One\r\nTwo\r\nThree\nFour",
+ "One\nTwo\nThree\nFour",
+ NewlineStyle::Unix,
+ );
+ }
+
+ #[test]
+ fn applies_windows_newlines_to_string_with_unix_and_windows_newlines() {
+ test_newlines_are_applied_correctly(
+ "One\nTwo\nThree\r\nFour",
+ "One\r\nTwo\r\nThree\r\nFour",
+ NewlineStyle::Windows,
+ );
+ }
+
+ #[test]
+ fn applying_windows_newlines_changes_nothing_for_windows_newlines() {
+ let formatted_text = "One\r\nTwo\r\nThree";
+ test_newlines_are_applied_correctly(formatted_text, formatted_text, NewlineStyle::Windows);
+ }
+
+ #[test]
+ fn keeps_carriage_returns_when_applying_windows_newlines_to_str_with_unix_newlines() {
+ test_newlines_are_applied_correctly(
+ "One\nTwo\nThree\rDrei",
+ "One\r\nTwo\r\nThree\rDrei",
+ NewlineStyle::Windows,
+ );
+ }
+
+ #[test]
+ fn keeps_carriage_returns_when_applying_unix_newlines_to_str_with_unix_newlines() {
+ test_newlines_are_applied_correctly(
+ "One\nTwo\nThree\rDrei",
+ "One\nTwo\nThree\rDrei",
+ NewlineStyle::Unix,
+ );
+ }
+
+ #[test]
+ fn keeps_carriage_returns_when_applying_windows_newlines_to_str_with_windows_newlines() {
+ test_newlines_are_applied_correctly(
+ "One\r\nTwo\r\nThree\rDrei",
+ "One\r\nTwo\r\nThree\rDrei",
+ NewlineStyle::Windows,
+ );
+ }
+
+ #[test]
+ fn keeps_carriage_returns_when_applying_unix_newlines_to_str_with_windows_newlines() {
+ test_newlines_are_applied_correctly(
+ "One\r\nTwo\r\nThree\rDrei",
+ "One\nTwo\nThree\rDrei",
+ NewlineStyle::Unix,
+ );
+ }
+
+ fn test_newlines_are_applied_correctly(
+ input: &str,
+ expected: &str,
+ newline_style: NewlineStyle,
+ ) {
+ let mut out = String::from(input);
+ apply_newline_style(newline_style, &mut out, input);
+ assert_eq!(expected, &out);
+ }
+}
diff --git a/src/tools/rustfmt/src/git-rustfmt/main.rs b/src/tools/rustfmt/src/git-rustfmt/main.rs
new file mode 100644
index 000000000..579778edb
--- /dev/null
+++ b/src/tools/rustfmt/src/git-rustfmt/main.rs
@@ -0,0 +1,192 @@
+#[macro_use]
+extern crate log;
+
+use std::env;
+use std::io::stdout;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+use std::str::FromStr;
+
+use getopts::{Matches, Options};
+use rustfmt_nightly as rustfmt;
+
+use crate::rustfmt::{load_config, CliOptions, FormatReportFormatterBuilder, Input, Session};
+
+fn prune_files(files: Vec<&str>) -> Vec<&str> {
+ let prefixes: Vec<_> = files
+ .iter()
+ .filter(|f| f.ends_with("mod.rs") || f.ends_with("lib.rs"))
+ .map(|f| &f[..f.len() - 6])
+ .collect();
+
+ let mut pruned_prefixes = vec![];
+ for p1 in prefixes {
+ if p1.starts_with("src/bin/") || pruned_prefixes.iter().all(|p2| !p1.starts_with(p2)) {
+ pruned_prefixes.push(p1);
+ }
+ }
+ debug!("prefixes: {:?}", pruned_prefixes);
+
+ files
+ .into_iter()
+ .filter(|f| {
+ if f.ends_with("mod.rs") || f.ends_with("lib.rs") || f.starts_with("src/bin/") {
+ return true;
+ }
+ pruned_prefixes.iter().all(|pp| !f.starts_with(pp))
+ })
+ .collect()
+}
+
+fn git_diff(commits: &str) -> String {
+ let mut cmd = Command::new("git");
+ cmd.arg("diff");
+ if commits != "0" {
+ cmd.arg(format!("HEAD~{}", commits));
+ }
+ let output = cmd.output().expect("Couldn't execute `git diff`");
+ String::from_utf8_lossy(&output.stdout).into_owned()
+}
+
+fn get_files(input: &str) -> Vec<&str> {
+ input
+ .lines()
+ .filter(|line| line.starts_with("+++ b/") && line.ends_with(".rs"))
+ .map(|line| &line[6..])
+ .collect()
+}
+
+fn fmt_files(files: &[&str]) -> i32 {
+ let (config, _) =
+ load_config::<NullOptions>(Some(Path::new(".")), None).expect("couldn't load config");
+
+ let mut exit_code = 0;
+ let mut out = stdout();
+ let mut session = Session::new(config, Some(&mut out));
+ for file in files {
+ let report = session.format(Input::File(PathBuf::from(file))).unwrap();
+ if report.has_warnings() {
+ eprintln!("{}", FormatReportFormatterBuilder::new(&report).build());
+ }
+ if !session.has_no_errors() {
+ exit_code = 1;
+ }
+ }
+ exit_code
+}
+
+struct NullOptions;
+
+impl CliOptions for NullOptions {
+ fn apply_to(self, _: &mut rustfmt::Config) {
+ unreachable!();
+ }
+ fn config_path(&self) -> Option<&Path> {
+ unreachable!();
+ }
+}
+
+fn uncommitted_files() -> Vec<String> {
+ let mut cmd = Command::new("git");
+ cmd.arg("ls-files");
+ cmd.arg("--others");
+ cmd.arg("--modified");
+ cmd.arg("--exclude-standard");
+ let output = cmd.output().expect("Couldn't execute Git");
+ let stdout = String::from_utf8_lossy(&output.stdout);
+ stdout
+ .lines()
+ .filter(|s| s.ends_with(".rs"))
+ .map(std::borrow::ToOwned::to_owned)
+ .collect()
+}
+
+fn check_uncommitted() {
+ let uncommitted = uncommitted_files();
+ debug!("uncommitted files: {:?}", uncommitted);
+ if !uncommitted.is_empty() {
+ println!("Found untracked changes:");
+ for f in &uncommitted {
+ println!(" {}", f);
+ }
+ println!("Commit your work, or run with `-u`.");
+ println!("Exiting.");
+ std::process::exit(1);
+ }
+}
+
+fn make_opts() -> Options {
+ let mut opts = Options::new();
+ opts.optflag("h", "help", "show this message");
+ opts.optflag("c", "check", "check only, don't format (unimplemented)");
+ opts.optflag("u", "uncommitted", "format uncommitted files");
+ opts
+}
+
+struct Config {
+ commits: String,
+ uncommitted: bool,
+}
+
+impl Config {
+ fn from_args(matches: &Matches, opts: &Options) -> Config {
+ // `--help` display help message and quit
+ if matches.opt_present("h") {
+ let message = format!(
+ "\nusage: {} <commits> [options]\n\n\
+ commits: number of commits to format, default: 1",
+ env::args_os().next().unwrap().to_string_lossy()
+ );
+ println!("{}", opts.usage(&message));
+ std::process::exit(0);
+ }
+
+ let mut config = Config {
+ commits: "1".to_owned(),
+ uncommitted: false,
+ };
+
+ if matches.opt_present("c") {
+ unimplemented!();
+ }
+
+ if matches.opt_present("u") {
+ config.uncommitted = true;
+ }
+
+ if matches.free.len() > 1 {
+ panic!("unknown arguments, use `-h` for usage");
+ }
+ if matches.free.len() == 1 {
+ let commits = matches.free[0].trim();
+ if u32::from_str(commits).is_err() {
+ panic!("Couldn't parse number of commits");
+ }
+ config.commits = commits.to_owned();
+ }
+
+ config
+ }
+}
+
+fn main() {
+ env_logger::Builder::from_env("RUSTFMT_LOG").init();
+
+ let opts = make_opts();
+ let matches = opts
+ .parse(env::args().skip(1))
+ .expect("Couldn't parse command line");
+ let config = Config::from_args(&matches, &opts);
+
+ if !config.uncommitted {
+ check_uncommitted();
+ }
+
+ let stdout = git_diff(&config.commits);
+ let files = get_files(&stdout);
+ debug!("files: {:?}", files);
+ let files = prune_files(files);
+ debug!("pruned files: {:?}", files);
+ let exit_code = fmt_files(&files);
+ std::process::exit(exit_code);
+}
diff --git a/src/tools/rustfmt/src/ignore_path.rs b/src/tools/rustfmt/src/ignore_path.rs
new file mode 100644
index 000000000..d95594949
--- /dev/null
+++ b/src/tools/rustfmt/src/ignore_path.rs
@@ -0,0 +1,52 @@
+use ignore::{self, gitignore};
+
+use crate::config::{FileName, IgnoreList};
+
+pub(crate) struct IgnorePathSet {
+ ignore_set: gitignore::Gitignore,
+}
+
+impl IgnorePathSet {
+ pub(crate) fn from_ignore_list(ignore_list: &IgnoreList) -> Result<Self, ignore::Error> {
+ let mut ignore_builder = gitignore::GitignoreBuilder::new(ignore_list.rustfmt_toml_path());
+
+ for ignore_path in ignore_list {
+ ignore_builder.add_line(None, ignore_path.to_str().unwrap())?;
+ }
+
+ Ok(IgnorePathSet {
+ ignore_set: ignore_builder.build()?,
+ })
+ }
+
+ pub(crate) fn is_match(&self, file_name: &FileName) -> bool {
+ match file_name {
+ FileName::Stdin => false,
+ FileName::Real(p) => self
+ .ignore_set
+ .matched_path_or_any_parents(p, false)
+ .is_ignore(),
+ }
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use rustfmt_config_proc_macro::nightly_only_test;
+
+ #[nightly_only_test]
+ #[test]
+ fn test_ignore_path_set() {
+ use crate::config::{Config, FileName};
+ use crate::ignore_path::IgnorePathSet;
+ use std::path::{Path, PathBuf};
+
+ let config =
+ Config::from_toml(r#"ignore = ["foo.rs", "bar_dir/*"]"#, Path::new("")).unwrap();
+ let ignore_path_set = IgnorePathSet::from_ignore_list(&config.ignore()).unwrap();
+
+ assert!(ignore_path_set.is_match(&FileName::Real(PathBuf::from("src/foo.rs"))));
+ assert!(ignore_path_set.is_match(&FileName::Real(PathBuf::from("bar_dir/baz.rs"))));
+ assert!(!ignore_path_set.is_match(&FileName::Real(PathBuf::from("src/bar.rs"))));
+ }
+}
diff --git a/src/tools/rustfmt/src/imports.rs b/src/tools/rustfmt/src/imports.rs
new file mode 100644
index 000000000..8d41c8815
--- /dev/null
+++ b/src/tools/rustfmt/src/imports.rs
@@ -0,0 +1,1506 @@
+use std::borrow::Cow;
+use std::cmp::Ordering;
+use std::fmt;
+
+use core::hash::{Hash, Hasher};
+
+use itertools::Itertools;
+
+use rustc_ast::ast::{self, UseTreeKind};
+use rustc_span::{
+ symbol::{self, sym},
+ BytePos, Span, DUMMY_SP,
+};
+
+use crate::comment::combine_strs_with_missing_comments;
+use crate::config::lists::*;
+use crate::config::ImportGranularity;
+use crate::config::{Edition, IndentStyle, Version};
+use crate::lists::{
+ definitive_tactic, itemize_list, write_list, ListFormatting, ListItem, Separator,
+};
+use crate::rewrite::{Rewrite, RewriteContext};
+use crate::shape::Shape;
+use crate::source_map::SpanUtils;
+use crate::spanned::Spanned;
+use crate::utils::{is_same_visibility, mk_sp, rewrite_ident};
+use crate::visitor::FmtVisitor;
+
+/// Returns a name imported by a `use` declaration.
+/// E.g., returns `Ordering` for `std::cmp::Ordering` and `self` for `std::cmp::self`.
+pub(crate) fn path_to_imported_ident(path: &ast::Path) -> symbol::Ident {
+ path.segments.last().unwrap().ident
+}
+
+impl<'a> FmtVisitor<'a> {
+ pub(crate) fn format_import(&mut self, item: &ast::Item, tree: &ast::UseTree) {
+ let span = item.span();
+ let shape = self.shape();
+ let rw = UseTree::from_ast(
+ &self.get_context(),
+ tree,
+ None,
+ Some(item.vis.clone()),
+ Some(item.span.lo()),
+ Some(item.attrs.clone()),
+ )
+ .rewrite_top_level(&self.get_context(), shape);
+ match rw {
+ Some(ref s) if s.is_empty() => {
+ // Format up to last newline
+ let prev_span = mk_sp(self.last_pos, source!(self, span).lo());
+ let trimmed_snippet = self.snippet(prev_span).trim_end();
+ let span_end = self.last_pos + BytePos(trimmed_snippet.len() as u32);
+ self.format_missing(span_end);
+ // We have an excessive newline from the removed import.
+ if self.buffer.ends_with('\n') {
+ self.buffer.pop();
+ self.line_number -= 1;
+ }
+ self.last_pos = source!(self, span).hi();
+ }
+ Some(ref s) => {
+ self.format_missing_with_indent(source!(self, span).lo());
+ self.push_str(s);
+ self.last_pos = source!(self, span).hi();
+ }
+ None => {
+ self.format_missing_with_indent(source!(self, span).lo());
+ self.format_missing(source!(self, span).hi());
+ }
+ }
+ }
+}
+
+// Ordering of imports
+
+// We order imports by translating to our own representation and then sorting.
+// The Rust AST data structures are really bad for this. Rustfmt applies a bunch
+// of normalisations to imports and since we want to sort based on the result
+// of these (and to maintain idempotence) we must apply the same normalisations
+// to the data structures for sorting.
+//
+// We sort `self` and `super` before other imports, then identifier imports,
+// then glob imports, then lists of imports. We do not take aliases into account
+// when ordering unless the imports are identical except for the alias (rare in
+// practice).
+
+// FIXME(#2531): we should unify the comparison code here with the formatting
+// code elsewhere since we are essentially string-ifying twice. Furthermore, by
+// parsing to our own format on comparison, we repeat a lot of work when
+// sorting.
+
+// FIXME we do a lot of allocation to make our own representation.
+#[derive(Clone, Eq, Hash, PartialEq)]
+pub(crate) enum UseSegmentKind {
+ Ident(String, Option<String>),
+ Slf(Option<String>),
+ Super(Option<String>),
+ Crate(Option<String>),
+ Glob,
+ List(Vec<UseTree>),
+}
+
+#[derive(Clone, Eq, PartialEq)]
+pub(crate) struct UseSegment {
+ pub(crate) kind: UseSegmentKind,
+ pub(crate) version: Version,
+}
+
+#[derive(Clone)]
+pub(crate) struct UseTree {
+ pub(crate) path: Vec<UseSegment>,
+ pub(crate) span: Span,
+ // Comment information within nested use tree.
+ pub(crate) list_item: Option<ListItem>,
+ // Additional fields for top level use items.
+ // Should we have another struct for top-level use items rather than reusing this?
+ visibility: Option<ast::Visibility>,
+ attrs: Option<Vec<ast::Attribute>>,
+}
+
+impl PartialEq for UseTree {
+ fn eq(&self, other: &UseTree) -> bool {
+ self.path == other.path
+ }
+}
+impl Eq for UseTree {}
+
+impl Spanned for UseTree {
+ fn span(&self) -> Span {
+ let lo = if let Some(ref attrs) = self.attrs {
+ attrs.iter().next().map_or(self.span.lo(), |a| a.span.lo())
+ } else {
+ self.span.lo()
+ };
+ mk_sp(lo, self.span.hi())
+ }
+}
+
+impl UseSegment {
+ // Clone a version of self with any top-level alias removed.
+ fn remove_alias(&self) -> UseSegment {
+ let kind = match self.kind {
+ UseSegmentKind::Ident(ref s, _) => UseSegmentKind::Ident(s.clone(), None),
+ UseSegmentKind::Slf(_) => UseSegmentKind::Slf(None),
+ UseSegmentKind::Super(_) => UseSegmentKind::Super(None),
+ UseSegmentKind::Crate(_) => UseSegmentKind::Crate(None),
+ _ => return self.clone(),
+ };
+ UseSegment {
+ kind,
+ version: self.version,
+ }
+ }
+
+ // Check if self == other with their aliases removed.
+ fn equal_except_alias(&self, other: &Self) -> bool {
+ match (&self.kind, &other.kind) {
+ (UseSegmentKind::Ident(ref s1, _), UseSegmentKind::Ident(ref s2, _)) => s1 == s2,
+ (UseSegmentKind::Slf(_), UseSegmentKind::Slf(_))
+ | (UseSegmentKind::Super(_), UseSegmentKind::Super(_))
+ | (UseSegmentKind::Crate(_), UseSegmentKind::Crate(_))
+ | (UseSegmentKind::Glob, UseSegmentKind::Glob) => true,
+ (UseSegmentKind::List(ref list1), UseSegmentKind::List(ref list2)) => list1 == list2,
+ _ => false,
+ }
+ }
+
+ fn get_alias(&self) -> Option<&str> {
+ match &self.kind {
+ UseSegmentKind::Ident(_, a)
+ | UseSegmentKind::Slf(a)
+ | UseSegmentKind::Super(a)
+ | UseSegmentKind::Crate(a) => a.as_deref(),
+ _ => None,
+ }
+ }
+
+ fn from_path_segment(
+ context: &RewriteContext<'_>,
+ path_seg: &ast::PathSegment,
+ modsep: bool,
+ ) -> Option<UseSegment> {
+ let name = rewrite_ident(context, path_seg.ident);
+ if name.is_empty() || name == "{{root}}" {
+ return None;
+ }
+ let kind = match name {
+ "self" => UseSegmentKind::Slf(None),
+ "super" => UseSegmentKind::Super(None),
+ "crate" => UseSegmentKind::Crate(None),
+ _ => {
+ let mod_sep = if modsep { "::" } else { "" };
+ UseSegmentKind::Ident(format!("{}{}", mod_sep, name), None)
+ }
+ };
+
+ Some(UseSegment {
+ kind,
+ version: context.config.version(),
+ })
+ }
+
+ fn contains_comment(&self) -> bool {
+ if let UseSegmentKind::List(list) = &self.kind {
+ list.iter().any(|subtree| subtree.contains_comment())
+ } else {
+ false
+ }
+ }
+}
+
+pub(crate) fn normalize_use_trees_with_granularity(
+ use_trees: Vec<UseTree>,
+ import_granularity: ImportGranularity,
+) -> Vec<UseTree> {
+ let merge_by = match import_granularity {
+ ImportGranularity::Item => return flatten_use_trees(use_trees, ImportGranularity::Item),
+ ImportGranularity::Preserve => return use_trees,
+ ImportGranularity::Crate => SharedPrefix::Crate,
+ ImportGranularity::Module => SharedPrefix::Module,
+ ImportGranularity::One => SharedPrefix::One,
+ };
+
+ let mut result = Vec::with_capacity(use_trees.len());
+ for use_tree in use_trees {
+ if use_tree.contains_comment() || use_tree.attrs.is_some() {
+ result.push(use_tree);
+ continue;
+ }
+
+ for mut flattened in use_tree.flatten(import_granularity) {
+ if let Some(tree) = result
+ .iter_mut()
+ .find(|tree| tree.share_prefix(&flattened, merge_by))
+ {
+ tree.merge(&flattened, merge_by);
+ } else {
+ // If this is the first tree with this prefix, handle potential trailing ::self
+ if merge_by == SharedPrefix::Module {
+ flattened = flattened.nest_trailing_self();
+ }
+ result.push(flattened);
+ }
+ }
+ }
+ result
+}
+
+fn flatten_use_trees(
+ use_trees: Vec<UseTree>,
+ import_granularity: ImportGranularity,
+) -> Vec<UseTree> {
+ // Return non-sorted single occurance of the use-trees text string;
+ // order is by first occurance of the use-tree.
+ use_trees
+ .into_iter()
+ .flat_map(|tree| tree.flatten(import_granularity))
+ .map(UseTree::nest_trailing_self)
+ .unique()
+ .collect()
+}
+
+impl fmt::Debug for UseTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(self, f)
+ }
+}
+
+impl fmt::Debug for UseSegment {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.kind, f)
+ }
+}
+
+impl fmt::Display for UseSegment {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.kind, f)
+ }
+}
+
+impl Hash for UseSegment {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.kind.hash(state);
+ }
+}
+
+impl fmt::Debug for UseSegmentKind {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(self, f)
+ }
+}
+
+impl fmt::Display for UseSegmentKind {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ UseSegmentKind::Glob => write!(f, "*"),
+ UseSegmentKind::Ident(ref s, Some(ref alias)) => write!(f, "{} as {}", s, alias),
+ UseSegmentKind::Ident(ref s, None) => write!(f, "{}", s),
+ UseSegmentKind::Slf(..) => write!(f, "self"),
+ UseSegmentKind::Super(..) => write!(f, "super"),
+ UseSegmentKind::Crate(..) => write!(f, "crate"),
+ UseSegmentKind::List(ref list) => {
+ write!(f, "{{")?;
+ for (i, item) in list.iter().enumerate() {
+ if i != 0 {
+ write!(f, ", ")?;
+ }
+ write!(f, "{}", item)?;
+ }
+ write!(f, "}}")
+ }
+ }
+ }
+}
+impl fmt::Display for UseTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ for (i, segment) in self.path.iter().enumerate() {
+ if i != 0 {
+ write!(f, "::")?;
+ }
+ write!(f, "{}", segment)?;
+ }
+ Ok(())
+ }
+}
+
+impl UseTree {
+ // Rewrite use tree with `use ` and a trailing `;`.
+ pub(crate) fn rewrite_top_level(
+ &self,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ ) -> Option<String> {
+ let vis = self.visibility.as_ref().map_or(Cow::from(""), |vis| {
+ crate::utils::format_visibility(context, vis)
+ });
+ let use_str = self
+ .rewrite(context, shape.offset_left(vis.len())?)
+ .map(|s| {
+ if s.is_empty() {
+ s
+ } else {
+ format!("{}use {};", vis, s)
+ }
+ })?;
+ match self.attrs {
+ Some(ref attrs) if !attrs.is_empty() => {
+ let attr_str = attrs.rewrite(context, shape)?;
+ let lo = attrs.last().as_ref()?.span.hi();
+ let hi = self.span.lo();
+ let span = mk_sp(lo, hi);
+
+ let allow_extend = if attrs.len() == 1 {
+ let line_len = attr_str.len() + 1 + use_str.len();
+ !attrs.first().unwrap().is_doc_comment()
+ && context.config.inline_attribute_width() >= line_len
+ } else {
+ false
+ };
+
+ combine_strs_with_missing_comments(
+ context,
+ &attr_str,
+ &use_str,
+ span,
+ shape,
+ allow_extend,
+ )
+ }
+ _ => Some(use_str),
+ }
+ }
+
+ // FIXME: Use correct span?
+ // The given span is essentially incorrect, since we are reconstructing
+ // use-statements. This should not be a problem, though, since we have
+ // already tried to extract comment and observed that there are no comment
+ // around the given use item, and the span will not be used afterward.
+ fn from_path(path: Vec<UseSegment>, span: Span) -> UseTree {
+ UseTree {
+ path,
+ span,
+ list_item: None,
+ visibility: None,
+ attrs: None,
+ }
+ }
+
+ pub(crate) fn from_ast_with_normalization(
+ context: &RewriteContext<'_>,
+ item: &ast::Item,
+ ) -> Option<UseTree> {
+ match item.kind {
+ ast::ItemKind::Use(ref use_tree) => Some(
+ UseTree::from_ast(
+ context,
+ use_tree,
+ None,
+ Some(item.vis.clone()),
+ Some(item.span.lo()),
+ if item.attrs.is_empty() {
+ None
+ } else {
+ Some(item.attrs.clone())
+ },
+ )
+ .normalize(),
+ ),
+ _ => None,
+ }
+ }
+
+ fn from_ast(
+ context: &RewriteContext<'_>,
+ a: &ast::UseTree,
+ list_item: Option<ListItem>,
+ visibility: Option<ast::Visibility>,
+ opt_lo: Option<BytePos>,
+ attrs: Option<Vec<ast::Attribute>>,
+ ) -> UseTree {
+ let span = if let Some(lo) = opt_lo {
+ mk_sp(lo, a.span.hi())
+ } else {
+ a.span
+ };
+ let mut result = UseTree {
+ path: vec![],
+ span,
+ list_item,
+ visibility,
+ attrs,
+ };
+
+ let leading_modsep =
+ context.config.edition() >= Edition::Edition2018 && a.prefix.is_global();
+
+ let mut modsep = leading_modsep;
+
+ for p in &a.prefix.segments {
+ if let Some(use_segment) = UseSegment::from_path_segment(context, p, modsep) {
+ result.path.push(use_segment);
+ modsep = false;
+ }
+ }
+
+ let version = context.config.version();
+
+ match a.kind {
+ UseTreeKind::Glob => {
+ // in case of a global path and the glob starts at the root, e.g., "::*"
+ if a.prefix.segments.len() == 1 && leading_modsep {
+ let kind = UseSegmentKind::Ident("".to_owned(), None);
+ result.path.push(UseSegment { kind, version });
+ }
+ result.path.push(UseSegment {
+ kind: UseSegmentKind::Glob,
+ version,
+ });
+ }
+ UseTreeKind::Nested(ref list) => {
+ // Extract comments between nested use items.
+ // This needs to be done before sorting use items.
+ let items = itemize_list(
+ context.snippet_provider,
+ list.iter().map(|(tree, _)| tree),
+ "}",
+ ",",
+ |tree| tree.span.lo(),
+ |tree| tree.span.hi(),
+ |_| Some("".to_owned()), // We only need comments for now.
+ context.snippet_provider.span_after(a.span, "{"),
+ a.span.hi(),
+ false,
+ );
+
+ // in case of a global path and the nested list starts at the root,
+ // e.g., "::{foo, bar}"
+ if a.prefix.segments.len() == 1 && leading_modsep {
+ let kind = UseSegmentKind::Ident("".to_owned(), None);
+ result.path.push(UseSegment { kind, version });
+ }
+ let kind = UseSegmentKind::List(
+ list.iter()
+ .zip(items)
+ .map(|(t, list_item)| {
+ Self::from_ast(context, &t.0, Some(list_item), None, None, None)
+ })
+ .collect(),
+ );
+ result.path.push(UseSegment { kind, version });
+ }
+ UseTreeKind::Simple(ref rename, ..) => {
+ // If the path has leading double colons and is composed of only 2 segments, then we
+ // bypass the call to path_to_imported_ident which would get only the ident and
+ // lose the path root, e.g., `that` in `::that`.
+ // The span of `a.prefix` contains the leading colons.
+ let name = if a.prefix.segments.len() == 2 && leading_modsep {
+ context.snippet(a.prefix.span).to_owned()
+ } else {
+ rewrite_ident(context, path_to_imported_ident(&a.prefix)).to_owned()
+ };
+ let alias = rename.and_then(|ident| {
+ if ident.name == sym::underscore_imports {
+ // for impl-only-use
+ Some("_".to_owned())
+ } else if ident == path_to_imported_ident(&a.prefix) {
+ None
+ } else {
+ Some(rewrite_ident(context, ident).to_owned())
+ }
+ });
+ let kind = match name.as_ref() {
+ "self" => UseSegmentKind::Slf(alias),
+ "super" => UseSegmentKind::Super(alias),
+ "crate" => UseSegmentKind::Crate(alias),
+ _ => UseSegmentKind::Ident(name, alias),
+ };
+
+ let segment = UseSegment { kind, version };
+
+ // `name` is already in result.
+ result.path.pop();
+ result.path.push(segment);
+ }
+ }
+ result
+ }
+
+ // Do the adjustments that rustfmt does elsewhere to use paths.
+ pub(crate) fn normalize(mut self) -> UseTree {
+ let mut last = self.path.pop().expect("Empty use tree?");
+ // Hack around borrow checker.
+ let mut normalize_sole_list = false;
+ let mut aliased_self = false;
+
+ // Remove foo::{} or self without attributes.
+ match last.kind {
+ _ if self.attrs.is_some() => (),
+ UseSegmentKind::List(ref list) if list.is_empty() => {
+ self.path = vec![];
+ return self;
+ }
+ UseSegmentKind::Slf(None) if self.path.is_empty() && self.visibility.is_some() => {
+ self.path = vec![];
+ return self;
+ }
+ _ => (),
+ }
+
+ // Normalise foo::self -> foo.
+ if let UseSegmentKind::Slf(None) = last.kind {
+ if !self.path.is_empty() {
+ return self;
+ }
+ }
+
+ // Normalise foo::self as bar -> foo as bar.
+ if let UseSegmentKind::Slf(_) = last.kind {
+ if let Some(UseSegment {
+ kind: UseSegmentKind::Ident(_, None),
+ ..
+ }) = self.path.last()
+ {
+ aliased_self = true;
+ }
+ }
+
+ let mut done = false;
+ if aliased_self {
+ match self.path.last_mut() {
+ Some(UseSegment {
+ kind: UseSegmentKind::Ident(_, ref mut old_rename),
+ ..
+ }) => {
+ assert!(old_rename.is_none());
+ if let UseSegmentKind::Slf(Some(rename)) = last.clone().kind {
+ *old_rename = Some(rename);
+ done = true;
+ }
+ }
+ _ => unreachable!(),
+ }
+ }
+
+ if done {
+ return self;
+ }
+
+ // Normalise foo::{bar} -> foo::bar
+ if let UseSegmentKind::List(ref list) = last.kind {
+ if list.len() == 1 && list[0].to_string() != "self" {
+ normalize_sole_list = true;
+ }
+ }
+
+ if normalize_sole_list {
+ match last.kind {
+ UseSegmentKind::List(list) => {
+ for seg in &list[0].path {
+ self.path.push(seg.clone());
+ }
+ return self.normalize();
+ }
+ _ => unreachable!(),
+ }
+ }
+
+ // Recursively normalize elements of a list use (including sorting the list).
+ if let UseSegmentKind::List(list) = last.kind {
+ let mut list = list.into_iter().map(UseTree::normalize).collect::<Vec<_>>();
+ list.sort();
+ last = UseSegment {
+ kind: UseSegmentKind::List(list),
+ version: last.version,
+ };
+ }
+
+ self.path.push(last);
+ self
+ }
+
+ fn has_comment(&self) -> bool {
+ self.list_item.as_ref().map_or(false, ListItem::has_comment)
+ }
+
+ fn contains_comment(&self) -> bool {
+ self.has_comment() || self.path.iter().any(|path| path.contains_comment())
+ }
+
+ fn same_visibility(&self, other: &UseTree) -> bool {
+ match (&self.visibility, &other.visibility) {
+ (
+ Some(ast::Visibility {
+ kind: ast::VisibilityKind::Inherited,
+ ..
+ }),
+ None,
+ )
+ | (
+ None,
+ Some(ast::Visibility {
+ kind: ast::VisibilityKind::Inherited,
+ ..
+ }),
+ )
+ | (None, None) => true,
+ (Some(ref a), Some(ref b)) => is_same_visibility(a, b),
+ _ => false,
+ }
+ }
+
+ fn share_prefix(&self, other: &UseTree, shared_prefix: SharedPrefix) -> bool {
+ if self.path.is_empty()
+ || other.path.is_empty()
+ || self.attrs.is_some()
+ || self.contains_comment()
+ || !self.same_visibility(other)
+ {
+ false
+ } else {
+ match shared_prefix {
+ SharedPrefix::Crate => self.path[0] == other.path[0],
+ SharedPrefix::Module => {
+ self.path[..self.path.len() - 1] == other.path[..other.path.len() - 1]
+ }
+ SharedPrefix::One => true,
+ }
+ }
+ }
+
+ fn flatten(self, import_granularity: ImportGranularity) -> Vec<UseTree> {
+ if self.path.is_empty() || self.contains_comment() {
+ return vec![self];
+ }
+ match &self.path.clone().last().unwrap().kind {
+ UseSegmentKind::List(list) => {
+ if list.len() == 1 && list[0].path.len() == 1 {
+ if let UseSegmentKind::Slf(..) = list[0].path[0].kind {
+ return vec![self];
+ };
+ }
+ let prefix = &self.path[..self.path.len() - 1];
+ let mut result = vec![];
+ for nested_use_tree in list {
+ for flattend in &mut nested_use_tree.clone().flatten(import_granularity) {
+ let mut new_path = prefix.to_vec();
+ new_path.append(&mut flattend.path);
+ result.push(UseTree {
+ path: new_path,
+ span: self.span,
+ list_item: None,
+ visibility: self.visibility.clone(),
+ // only retain attributes for `ImportGranularity::Item`
+ attrs: match import_granularity {
+ ImportGranularity::Item => self.attrs.clone(),
+ _ => None,
+ },
+ });
+ }
+ }
+
+ result
+ }
+ _ => vec![self],
+ }
+ }
+
+ fn merge(&mut self, other: &UseTree, merge_by: SharedPrefix) {
+ let mut prefix = 0;
+ for (a, b) in self.path.iter().zip(other.path.iter()) {
+ // only discard the alias at the root of the tree
+ if (prefix == 0 && a.equal_except_alias(b)) || a == b {
+ prefix += 1;
+ } else {
+ break;
+ }
+ }
+ if let Some(new_path) = merge_rest(&self.path, &other.path, prefix, merge_by) {
+ self.path = new_path;
+ self.span = self.span.to(other.span);
+ }
+ }
+
+ /// If this tree ends in `::self`, rewrite it to `::{self}`.
+ fn nest_trailing_self(mut self) -> UseTree {
+ if let Some(UseSegment {
+ kind: UseSegmentKind::Slf(..),
+ ..
+ }) = self.path.last()
+ {
+ let self_segment = self.path.pop().unwrap();
+ let version = self_segment.version;
+ let kind = UseSegmentKind::List(vec![UseTree::from_path(vec![self_segment], DUMMY_SP)]);
+ self.path.push(UseSegment { kind, version });
+ }
+ self
+ }
+}
+
+fn merge_rest(
+ a: &[UseSegment],
+ b: &[UseSegment],
+ mut len: usize,
+ merge_by: SharedPrefix,
+) -> Option<Vec<UseSegment>> {
+ if a.len() == len && b.len() == len {
+ return None;
+ }
+ if a.len() != len && b.len() != len {
+ let version = a[len].version;
+ if let UseSegmentKind::List(ref list) = a[len].kind {
+ let mut list = list.clone();
+ merge_use_trees_inner(
+ &mut list,
+ UseTree::from_path(b[len..].to_vec(), DUMMY_SP),
+ merge_by,
+ );
+ let mut new_path = b[..len].to_vec();
+ let kind = UseSegmentKind::List(list);
+ new_path.push(UseSegment { kind, version });
+ return Some(new_path);
+ }
+ } else if len == 1 {
+ let (common, rest) = if a.len() == len {
+ (&a[0], &b[1..])
+ } else {
+ (&b[0], &a[1..])
+ };
+ let kind = UseSegmentKind::Slf(common.get_alias().map(ToString::to_string));
+ let version = a[0].version;
+ let mut list = vec![UseTree::from_path(
+ vec![UseSegment { kind, version }],
+ DUMMY_SP,
+ )];
+ match rest {
+ [
+ UseSegment {
+ kind: UseSegmentKind::List(rest_list),
+ ..
+ },
+ ] => list.extend(rest_list.clone()),
+ _ => list.push(UseTree::from_path(rest.to_vec(), DUMMY_SP)),
+ }
+ return Some(vec![
+ b[0].clone(),
+ UseSegment {
+ kind: UseSegmentKind::List(list),
+ version,
+ },
+ ]);
+ } else {
+ len -= 1;
+ }
+ let mut list = vec![
+ UseTree::from_path(a[len..].to_vec(), DUMMY_SP),
+ UseTree::from_path(b[len..].to_vec(), DUMMY_SP),
+ ];
+ list.sort();
+ let mut new_path = b[..len].to_vec();
+ let kind = UseSegmentKind::List(list);
+ let version = a[0].version;
+ new_path.push(UseSegment { kind, version });
+ Some(new_path)
+}
+
+fn merge_use_trees_inner(trees: &mut Vec<UseTree>, use_tree: UseTree, merge_by: SharedPrefix) {
+ struct SimilarTree<'a> {
+ similarity: usize,
+ path_len: usize,
+ tree: &'a mut UseTree,
+ }
+
+ let similar_trees = trees.iter_mut().filter_map(|tree| {
+ if tree.share_prefix(&use_tree, merge_by) {
+ // In the case of `SharedPrefix::One`, `similarity` is used for deciding with which
+ // tree `use_tree` should be merge.
+ // In other cases `similarity` won't be used, so set it to `0` as a dummy value.
+ let similarity = if merge_by == SharedPrefix::One {
+ tree.path
+ .iter()
+ .zip(&use_tree.path)
+ .take_while(|(a, b)| a.equal_except_alias(b))
+ .count()
+ } else {
+ 0
+ };
+
+ let path_len = tree.path.len();
+ Some(SimilarTree {
+ similarity,
+ tree,
+ path_len,
+ })
+ } else {
+ None
+ }
+ });
+
+ if use_tree.path.len() == 1 && merge_by == SharedPrefix::Crate {
+ if let Some(tree) = similar_trees.min_by_key(|tree| tree.path_len) {
+ if tree.path_len == 1 {
+ return;
+ }
+ }
+ } else if merge_by == SharedPrefix::One {
+ if let Some(sim_tree) = similar_trees.max_by_key(|tree| tree.similarity) {
+ if sim_tree.similarity > 0 {
+ sim_tree.tree.merge(&use_tree, merge_by);
+ return;
+ }
+ }
+ } else if let Some(sim_tree) = similar_trees.max_by_key(|tree| tree.path_len) {
+ if sim_tree.path_len > 1 {
+ sim_tree.tree.merge(&use_tree, merge_by);
+ return;
+ }
+ }
+ trees.push(use_tree);
+ trees.sort();
+}
+
+impl Hash for UseTree {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.path.hash(state);
+ }
+}
+
+impl PartialOrd for UseSegment {
+ fn partial_cmp(&self, other: &UseSegment) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+impl PartialOrd for UseTree {
+ fn partial_cmp(&self, other: &UseTree) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+impl Ord for UseSegment {
+ fn cmp(&self, other: &UseSegment) -> Ordering {
+ use self::UseSegmentKind::*;
+
+ fn is_upper_snake_case(s: &str) -> bool {
+ s.chars()
+ .all(|c| c.is_uppercase() || c == '_' || c.is_numeric())
+ }
+
+ match (&self.kind, &other.kind) {
+ (Slf(ref a), Slf(ref b))
+ | (Super(ref a), Super(ref b))
+ | (Crate(ref a), Crate(ref b)) => match (a, b) {
+ (Some(sa), Some(sb)) => {
+ if self.version == Version::Two {
+ sa.trim_start_matches("r#").cmp(sb.trim_start_matches("r#"))
+ } else {
+ a.cmp(b)
+ }
+ }
+ (_, _) => a.cmp(b),
+ },
+ (Glob, Glob) => Ordering::Equal,
+ (Ident(ref pia, ref aa), Ident(ref pib, ref ab)) => {
+ let (ia, ib) = if self.version == Version::Two {
+ (pia.trim_start_matches("r#"), pib.trim_start_matches("r#"))
+ } else {
+ (pia.as_str(), pib.as_str())
+ };
+ // snake_case < CamelCase < UPPER_SNAKE_CASE
+ if ia.starts_with(char::is_uppercase) && ib.starts_with(char::is_lowercase) {
+ return Ordering::Greater;
+ }
+ if ia.starts_with(char::is_lowercase) && ib.starts_with(char::is_uppercase) {
+ return Ordering::Less;
+ }
+ if is_upper_snake_case(ia) && !is_upper_snake_case(ib) {
+ return Ordering::Greater;
+ }
+ if !is_upper_snake_case(ia) && is_upper_snake_case(ib) {
+ return Ordering::Less;
+ }
+ let ident_ord = ia.cmp(ib);
+ if ident_ord != Ordering::Equal {
+ return ident_ord;
+ }
+ match (aa, ab) {
+ (None, Some(_)) => Ordering::Less,
+ (Some(_), None) => Ordering::Greater,
+ (Some(aas), Some(abs)) => {
+ if self.version == Version::Two {
+ aas.trim_start_matches("r#")
+ .cmp(abs.trim_start_matches("r#"))
+ } else {
+ aas.cmp(abs)
+ }
+ }
+ (None, None) => Ordering::Equal,
+ }
+ }
+ (List(ref a), List(ref b)) => {
+ for (a, b) in a.iter().zip(b.iter()) {
+ let ord = a.cmp(b);
+ if ord != Ordering::Equal {
+ return ord;
+ }
+ }
+
+ a.len().cmp(&b.len())
+ }
+ (Slf(_), _) => Ordering::Less,
+ (_, Slf(_)) => Ordering::Greater,
+ (Super(_), _) => Ordering::Less,
+ (_, Super(_)) => Ordering::Greater,
+ (Crate(_), _) => Ordering::Less,
+ (_, Crate(_)) => Ordering::Greater,
+ (Ident(..), _) => Ordering::Less,
+ (_, Ident(..)) => Ordering::Greater,
+ (Glob, _) => Ordering::Less,
+ (_, Glob) => Ordering::Greater,
+ }
+ }
+}
+impl Ord for UseTree {
+ fn cmp(&self, other: &UseTree) -> Ordering {
+ for (a, b) in self.path.iter().zip(other.path.iter()) {
+ let ord = a.cmp(b);
+ // The comparison without aliases is a hack to avoid situations like
+ // comparing `a::b` to `a as c` - where the latter should be ordered
+ // first since it is shorter.
+ if ord != Ordering::Equal && a.remove_alias().cmp(&b.remove_alias()) != Ordering::Equal
+ {
+ return ord;
+ }
+ }
+
+ self.path.len().cmp(&other.path.len())
+ }
+}
+
+fn rewrite_nested_use_tree(
+ context: &RewriteContext<'_>,
+ use_tree_list: &[UseTree],
+ shape: Shape,
+) -> Option<String> {
+ let mut list_items = Vec::with_capacity(use_tree_list.len());
+ let nested_shape = match context.config.imports_indent() {
+ IndentStyle::Block => shape
+ .block_indent(context.config.tab_spaces())
+ .with_max_width(context.config)
+ .sub_width(1)?,
+ IndentStyle::Visual => shape.visual_indent(0),
+ };
+ for use_tree in use_tree_list {
+ if let Some(mut list_item) = use_tree.list_item.clone() {
+ list_item.item = use_tree.rewrite(context, nested_shape);
+ list_items.push(list_item);
+ } else {
+ list_items.push(ListItem::from_str(use_tree.rewrite(context, nested_shape)?));
+ }
+ }
+ let has_nested_list = use_tree_list.iter().any(|use_segment| {
+ use_segment.path.last().map_or(false, |last_segment| {
+ matches!(last_segment.kind, UseSegmentKind::List(..))
+ })
+ });
+
+ let remaining_width = if has_nested_list {
+ 0
+ } else {
+ shape.width.saturating_sub(2)
+ };
+
+ let tactic = definitive_tactic(
+ &list_items,
+ context.config.imports_layout(),
+ Separator::Comma,
+ remaining_width,
+ );
+
+ let ends_with_newline = context.config.imports_indent() == IndentStyle::Block
+ && tactic != DefinitiveListTactic::Horizontal;
+ let trailing_separator = if ends_with_newline {
+ context.config.trailing_comma()
+ } else {
+ SeparatorTactic::Never
+ };
+ let fmt = ListFormatting::new(nested_shape, context.config)
+ .tactic(tactic)
+ .trailing_separator(trailing_separator)
+ .ends_with_newline(ends_with_newline)
+ .preserve_newline(true)
+ .nested(has_nested_list);
+
+ let list_str = write_list(&list_items, &fmt)?;
+
+ let result = if (list_str.contains('\n') || list_str.len() > remaining_width)
+ && context.config.imports_indent() == IndentStyle::Block
+ {
+ format!(
+ "{{\n{}{}\n{}}}",
+ nested_shape.indent.to_string(context.config),
+ list_str,
+ shape.indent.to_string(context.config)
+ )
+ } else {
+ format!("{{{}}}", list_str)
+ };
+
+ Some(result)
+}
+
+impl Rewrite for UseSegment {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ Some(match self.kind {
+ UseSegmentKind::Ident(ref ident, Some(ref rename)) => {
+ format!("{} as {}", ident, rename)
+ }
+ UseSegmentKind::Ident(ref ident, None) => ident.clone(),
+ UseSegmentKind::Slf(Some(ref rename)) => format!("self as {}", rename),
+ UseSegmentKind::Slf(None) => "self".to_owned(),
+ UseSegmentKind::Super(Some(ref rename)) => format!("super as {}", rename),
+ UseSegmentKind::Super(None) => "super".to_owned(),
+ UseSegmentKind::Crate(Some(ref rename)) => format!("crate as {}", rename),
+ UseSegmentKind::Crate(None) => "crate".to_owned(),
+ UseSegmentKind::Glob => "*".to_owned(),
+ UseSegmentKind::List(ref use_tree_list) => rewrite_nested_use_tree(
+ context,
+ use_tree_list,
+ // 1 = "{" and "}"
+ shape.offset_left(1)?.sub_width(1)?,
+ )?,
+ })
+ }
+}
+
+impl Rewrite for UseTree {
+ // This does NOT format attributes and visibility or add a trailing `;`.
+ fn rewrite(&self, context: &RewriteContext<'_>, mut shape: Shape) -> Option<String> {
+ let mut result = String::with_capacity(256);
+ let mut iter = self.path.iter().peekable();
+ while let Some(segment) = iter.next() {
+ let segment_str = segment.rewrite(context, shape)?;
+ result.push_str(&segment_str);
+ if iter.peek().is_some() {
+ result.push_str("::");
+ // 2 = "::"
+ shape = shape.offset_left(2 + segment_str.len())?;
+ }
+ }
+ Some(result)
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+enum SharedPrefix {
+ Crate,
+ Module,
+ One,
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use rustc_span::DUMMY_SP;
+
+ // Parse the path part of an import. This parser is not robust and is only
+ // suitable for use in a test harness.
+ fn parse_use_tree(s: &str) -> UseTree {
+ use std::iter::Peekable;
+ use std::mem::swap;
+ use std::str::Chars;
+
+ struct Parser<'a> {
+ input: Peekable<Chars<'a>>,
+ version: Version,
+ }
+
+ impl<'a> Parser<'a> {
+ fn bump(&mut self) {
+ self.input.next().unwrap();
+ }
+
+ fn eat(&mut self, c: char) {
+ assert_eq!(self.input.next().unwrap(), c);
+ }
+
+ fn push_segment(
+ &self,
+ result: &mut Vec<UseSegment>,
+ buf: &mut String,
+ alias_buf: &mut Option<String>,
+ ) {
+ let version = self.version;
+ if !buf.is_empty() {
+ let mut alias = None;
+ swap(alias_buf, &mut alias);
+
+ match buf.as_ref() {
+ "self" => {
+ let kind = UseSegmentKind::Slf(alias);
+ result.push(UseSegment { kind, version });
+ *buf = String::new();
+ *alias_buf = None;
+ }
+ "super" => {
+ let kind = UseSegmentKind::Super(alias);
+ result.push(UseSegment { kind, version });
+ *buf = String::new();
+ *alias_buf = None;
+ }
+ "crate" => {
+ let kind = UseSegmentKind::Crate(alias);
+ result.push(UseSegment { kind, version });
+ *buf = String::new();
+ *alias_buf = None;
+ }
+ _ => {
+ let mut name = String::new();
+ swap(buf, &mut name);
+ let kind = UseSegmentKind::Ident(name, alias);
+ result.push(UseSegment { kind, version });
+ }
+ }
+ }
+ }
+
+ fn parse_in_list(&mut self) -> UseTree {
+ let mut result = vec![];
+ let mut buf = String::new();
+ let mut alias_buf = None;
+ while let Some(&c) = self.input.peek() {
+ match c {
+ '{' => {
+ assert!(buf.is_empty());
+ self.bump();
+ let kind = UseSegmentKind::List(self.parse_list());
+ result.push(UseSegment {
+ kind,
+ version: self.version,
+ });
+ self.eat('}');
+ }
+ '*' => {
+ assert!(buf.is_empty());
+ self.bump();
+ let kind = UseSegmentKind::Glob;
+ result.push(UseSegment {
+ kind,
+ version: self.version,
+ });
+ }
+ ':' => {
+ self.bump();
+ self.eat(':');
+ self.push_segment(&mut result, &mut buf, &mut alias_buf);
+ }
+ '}' | ',' => {
+ self.push_segment(&mut result, &mut buf, &mut alias_buf);
+ return UseTree {
+ path: result,
+ span: DUMMY_SP,
+ list_item: None,
+ visibility: None,
+ attrs: None,
+ };
+ }
+ ' ' => {
+ self.bump();
+ self.eat('a');
+ self.eat('s');
+ self.eat(' ');
+ alias_buf = Some(String::new());
+ }
+ c => {
+ self.bump();
+ if let Some(ref mut buf) = alias_buf {
+ buf.push(c);
+ } else {
+ buf.push(c);
+ }
+ }
+ }
+ }
+ self.push_segment(&mut result, &mut buf, &mut alias_buf);
+ UseTree {
+ path: result,
+ span: DUMMY_SP,
+ list_item: None,
+ visibility: None,
+ attrs: None,
+ }
+ }
+
+ fn parse_list(&mut self) -> Vec<UseTree> {
+ let mut result = vec![];
+ loop {
+ match self.input.peek().unwrap() {
+ ',' | ' ' => self.bump(),
+ '}' => {
+ return result;
+ }
+ _ => result.push(self.parse_in_list()),
+ }
+ }
+ }
+ }
+
+ let mut parser = Parser {
+ input: s.chars().peekable(),
+ version: Version::One,
+ };
+ parser.parse_in_list()
+ }
+
+ macro_rules! parse_use_trees {
+ ($($s:expr),* $(,)*) => {
+ vec![
+ $(parse_use_tree($s),)*
+ ]
+ }
+ }
+
+ macro_rules! test_merge {
+ ($by:ident, [$($input:expr),* $(,)*], [$($output:expr),* $(,)*]) => {
+ assert_eq!(
+ normalize_use_trees_with_granularity(
+ parse_use_trees!($($input,)*),
+ ImportGranularity::$by,
+ ),
+ parse_use_trees!($($output,)*),
+ );
+ }
+ }
+
+ #[test]
+ fn test_use_tree_merge_crate() {
+ test_merge!(
+ Crate,
+ ["a::b::{c, d}", "a::b::{e, f}"],
+ ["a::b::{c, d, e, f}"]
+ );
+ test_merge!(Crate, ["a::b::c", "a::b"], ["a::{b, b::c}"]);
+ test_merge!(Crate, ["a::b", "a::b"], ["a::b"]);
+ test_merge!(Crate, ["a", "a::b", "a::b::c"], ["a::{self, b, b::c}"]);
+ test_merge!(
+ Crate,
+ ["a", "a::b", "a::b::c", "a::b::c::d"],
+ ["a::{self, b, b::{c, c::d}}"]
+ );
+ test_merge!(
+ Crate,
+ ["a", "a::b", "a::b::c", "a::b"],
+ ["a::{self, b, b::c}"]
+ );
+ test_merge!(
+ Crate,
+ ["a::{b::{self, c}, d::e}", "a::d::f"],
+ ["a::{b::{self, c}, d::{e, f}}"]
+ );
+ test_merge!(
+ Crate,
+ ["a::d::f", "a::{b::{self, c}, d::e}"],
+ ["a::{b::{self, c}, d::{e, f}}"]
+ );
+ test_merge!(
+ Crate,
+ ["a::{c, d, b}", "a::{d, e, b, a, f}", "a::{f, g, c}"],
+ ["a::{a, b, c, d, e, f, g}"]
+ );
+ test_merge!(
+ Crate,
+ ["a::{self}", "b::{self as foo}"],
+ ["a::{self}", "b::{self as foo}"]
+ );
+ }
+
+ #[test]
+ fn test_use_tree_merge_module() {
+ test_merge!(
+ Module,
+ ["foo::b", "foo::{a, c, d::e}"],
+ ["foo::{a, b, c}", "foo::d::e"]
+ );
+
+ test_merge!(
+ Module,
+ ["foo::{a::b, a::c, d::e, d::f}"],
+ ["foo::a::{b, c}", "foo::d::{e, f}"]
+ );
+ }
+
+ #[test]
+ fn test_use_tree_merge_one() {
+ test_merge!(One, ["a", "b"], ["{a, b}"]);
+
+ test_merge!(One, ["a::{aa, ab}", "b", "a"], ["{a::{self, aa, ab}, b}"]);
+
+ test_merge!(One, ["a as x", "b as y"], ["{a as x, b as y}"]);
+
+ test_merge!(
+ One,
+ ["a::{aa as xa, ab}", "b", "a"],
+ ["{a::{self, aa as xa, ab}, b}"]
+ );
+
+ test_merge!(
+ One,
+ ["a", "a::{aa, ab::{aba, abb}}"],
+ ["a::{self, aa, ab::{aba, abb}}"]
+ );
+
+ test_merge!(One, ["a", "b::{ba, *}"], ["{a, b::{ba, *}}"]);
+
+ test_merge!(One, ["a", "b", "a::aa"], ["{a::{self, aa}, b}"]);
+
+ test_merge!(
+ One,
+ ["a::aa::aaa", "a::ac::aca", "a::aa::*"],
+ ["a::{aa::{aaa, *}, ac::aca}"]
+ );
+
+ test_merge!(
+ One,
+ ["a", "b::{ba, bb}", "a::{aa::*, ab::aba}"],
+ ["{a::{self, aa::*, ab::aba}, b::{ba, bb}}"]
+ );
+
+ test_merge!(
+ One,
+ ["b", "a::ac::{aca, acb}", "a::{aa::*, ab}"],
+ ["{a::{aa::*, ab, ac::{aca, acb}}, b}"]
+ );
+ }
+
+ #[test]
+ fn test_flatten_use_trees() {
+ assert_eq!(
+ flatten_use_trees(
+ parse_use_trees!["foo::{a::{b, c}, d::e}"],
+ ImportGranularity::Item
+ ),
+ parse_use_trees!["foo::a::b", "foo::a::c", "foo::d::e"]
+ );
+
+ assert_eq!(
+ flatten_use_trees(
+ parse_use_trees!["foo::{self, a, b::{c, d}, e::*}"],
+ ImportGranularity::Item
+ ),
+ parse_use_trees![
+ "foo::{self}",
+ "foo::a",
+ "foo::b::c",
+ "foo::b::d",
+ "foo::e::*"
+ ]
+ );
+ }
+
+ #[test]
+ fn test_use_tree_flatten() {
+ assert_eq!(
+ parse_use_tree("a::b::{c, d, e, f}").flatten(ImportGranularity::Item),
+ parse_use_trees!("a::b::c", "a::b::d", "a::b::e", "a::b::f",)
+ );
+
+ assert_eq!(
+ parse_use_tree("a::b::{c::{d, e, f}, g, h::{i, j, k}}")
+ .flatten(ImportGranularity::Item),
+ parse_use_trees![
+ "a::b::c::d",
+ "a::b::c::e",
+ "a::b::c::f",
+ "a::b::g",
+ "a::b::h::i",
+ "a::b::h::j",
+ "a::b::h::k",
+ ]
+ );
+ }
+
+ #[test]
+ fn test_use_tree_normalize() {
+ assert_eq!(parse_use_tree("a::self").normalize(), parse_use_tree("a"));
+ assert_eq!(
+ parse_use_tree("a::self as foo").normalize(),
+ parse_use_tree("a as foo")
+ );
+ assert_eq!(
+ parse_use_tree("a::{self}").normalize(),
+ parse_use_tree("a::{self}")
+ );
+ assert_eq!(parse_use_tree("a::{b}").normalize(), parse_use_tree("a::b"));
+ assert_eq!(
+ parse_use_tree("a::{b, c::self}").normalize(),
+ parse_use_tree("a::{b, c}")
+ );
+ assert_eq!(
+ parse_use_tree("a::{b as bar, c::self}").normalize(),
+ parse_use_tree("a::{b as bar, c}")
+ );
+ }
+
+ #[test]
+ fn test_use_tree_ord() {
+ assert!(parse_use_tree("a").normalize() < parse_use_tree("aa").normalize());
+ assert!(parse_use_tree("a").normalize() < parse_use_tree("a::a").normalize());
+ assert!(parse_use_tree("a").normalize() < parse_use_tree("*").normalize());
+ assert!(parse_use_tree("a").normalize() < parse_use_tree("{a, b}").normalize());
+ assert!(parse_use_tree("*").normalize() < parse_use_tree("{a, b}").normalize());
+
+ assert!(
+ parse_use_tree("aaaaaaaaaaaaaaa::{bb, cc, dddddddd}").normalize()
+ < parse_use_tree("aaaaaaaaaaaaaaa::{bb, cc, ddddddddd}").normalize()
+ );
+ assert!(
+ parse_use_tree("serde::de::{Deserialize}").normalize()
+ < parse_use_tree("serde_json").normalize()
+ );
+ assert!(parse_use_tree("a::b::c").normalize() < parse_use_tree("a::b::*").normalize());
+ assert!(
+ parse_use_tree("foo::{Bar, Baz}").normalize()
+ < parse_use_tree("{Bar, Baz}").normalize()
+ );
+
+ assert!(
+ parse_use_tree("foo::{qux as bar}").normalize()
+ < parse_use_tree("foo::{self as bar}").normalize()
+ );
+ assert!(
+ parse_use_tree("foo::{qux as bar}").normalize()
+ < parse_use_tree("foo::{baz, qux as bar}").normalize()
+ );
+ assert!(
+ parse_use_tree("foo::{self as bar, baz}").normalize()
+ < parse_use_tree("foo::{baz, qux as bar}").normalize()
+ );
+
+ assert!(parse_use_tree("foo").normalize() < parse_use_tree("Foo").normalize());
+ assert!(parse_use_tree("foo").normalize() < parse_use_tree("foo::Bar").normalize());
+
+ assert!(
+ parse_use_tree("std::cmp::{d, c, b, a}").normalize()
+ < parse_use_tree("std::cmp::{b, e, g, f}").normalize()
+ );
+ }
+
+ #[test]
+ fn test_use_tree_nest_trailing_self() {
+ assert_eq!(
+ parse_use_tree("a::b::self").nest_trailing_self(),
+ parse_use_tree("a::b::{self}")
+ );
+ assert_eq!(
+ parse_use_tree("a::b::c").nest_trailing_self(),
+ parse_use_tree("a::b::c")
+ );
+ assert_eq!(
+ parse_use_tree("a::b::{c, d}").nest_trailing_self(),
+ parse_use_tree("a::b::{c, d}")
+ );
+ assert_eq!(
+ parse_use_tree("a::b::{self, c}").nest_trailing_self(),
+ parse_use_tree("a::b::{self, c}")
+ );
+ }
+}
diff --git a/src/tools/rustfmt/src/items.rs b/src/tools/rustfmt/src/items.rs
new file mode 100644
index 000000000..8f35068e3
--- /dev/null
+++ b/src/tools/rustfmt/src/items.rs
@@ -0,0 +1,3335 @@
+// Formatting top-level items - functions, structs, enums, traits, impls.
+
+use std::borrow::Cow;
+use std::cmp::{max, min, Ordering};
+
+use regex::Regex;
+use rustc_ast::visit;
+use rustc_ast::{ast, ptr};
+use rustc_span::{symbol, BytePos, Span, DUMMY_SP};
+
+use crate::attr::filter_inline_attrs;
+use crate::comment::{
+ combine_strs_with_missing_comments, contains_comment, is_last_comment_block,
+ recover_comment_removed, recover_missing_comment_in_span, rewrite_missing_comment,
+ FindUncommented,
+};
+use crate::config::lists::*;
+use crate::config::{BraceStyle, Config, IndentStyle, Version};
+use crate::expr::{
+ is_empty_block, is_simple_block_stmt, rewrite_assign_rhs, rewrite_assign_rhs_with,
+ rewrite_assign_rhs_with_comments, RhsAssignKind, RhsTactics,
+};
+use crate::lists::{definitive_tactic, itemize_list, write_list, ListFormatting, Separator};
+use crate::macros::{rewrite_macro, MacroPosition};
+use crate::overflow;
+use crate::rewrite::{Rewrite, RewriteContext};
+use crate::shape::{Indent, Shape};
+use crate::source_map::{LineRangeUtils, SpanUtils};
+use crate::spanned::Spanned;
+use crate::stmt::Stmt;
+use crate::types::opaque_ty;
+use crate::utils::*;
+use crate::vertical::rewrite_with_alignment;
+use crate::visitor::FmtVisitor;
+
+const DEFAULT_VISIBILITY: ast::Visibility = ast::Visibility {
+ kind: ast::VisibilityKind::Inherited,
+ span: DUMMY_SP,
+ tokens: None,
+};
+
+fn type_annotation_separator(config: &Config) -> &str {
+ colon_spaces(config)
+}
+
+// Statements of the form
+// let pat: ty = init;
+impl Rewrite for ast::Local {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ debug!(
+ "Local::rewrite {:?} {} {:?}",
+ self, shape.width, shape.indent
+ );
+
+ skip_out_of_file_lines_range!(context, self.span);
+
+ if contains_skip(&self.attrs) || matches!(self.kind, ast::LocalKind::InitElse(..)) {
+ return None;
+ }
+
+ let attrs_str = self.attrs.rewrite(context, shape)?;
+ let mut result = if attrs_str.is_empty() {
+ "let ".to_owned()
+ } else {
+ combine_strs_with_missing_comments(
+ context,
+ &attrs_str,
+ "let ",
+ mk_sp(
+ self.attrs.last().map(|a| a.span.hi()).unwrap(),
+ self.span.lo(),
+ ),
+ shape,
+ false,
+ )?
+ };
+
+ // 4 = "let ".len()
+ let pat_shape = shape.offset_left(4)?;
+ // 1 = ;
+ let pat_shape = pat_shape.sub_width(1)?;
+ let pat_str = self.pat.rewrite(context, pat_shape)?;
+ result.push_str(&pat_str);
+
+ // String that is placed within the assignment pattern and expression.
+ let infix = {
+ let mut infix = String::with_capacity(32);
+
+ if let Some(ref ty) = self.ty {
+ let separator = type_annotation_separator(context.config);
+ let ty_shape = if pat_str.contains('\n') {
+ shape.with_max_width(context.config)
+ } else {
+ shape
+ }
+ .offset_left(last_line_width(&result) + separator.len())?
+ // 2 = ` =`
+ .sub_width(2)?;
+
+ let rewrite = ty.rewrite(context, ty_shape)?;
+
+ infix.push_str(separator);
+ infix.push_str(&rewrite);
+ }
+
+ if self.kind.init().is_some() {
+ infix.push_str(" =");
+ }
+
+ infix
+ };
+
+ result.push_str(&infix);
+
+ if let Some((init, _els)) = self.kind.init_else_opt() {
+ // 1 = trailing semicolon;
+ let nested_shape = shape.sub_width(1)?;
+
+ result = rewrite_assign_rhs(
+ context,
+ result,
+ init,
+ &RhsAssignKind::Expr(&init.kind, init.span),
+ nested_shape,
+ )?;
+ // todo else
+ }
+
+ result.push(';');
+ Some(result)
+ }
+}
+
+// FIXME convert to using rewrite style rather than visitor
+// FIXME format modules in this style
+#[allow(dead_code)]
+#[derive(Debug)]
+struct Item<'a> {
+ unsafety: ast::Unsafe,
+ abi: Cow<'static, str>,
+ vis: Option<&'a ast::Visibility>,
+ body: Vec<BodyElement<'a>>,
+ span: Span,
+}
+
+impl<'a> Item<'a> {
+ fn from_foreign_mod(fm: &'a ast::ForeignMod, span: Span, config: &Config) -> Item<'a> {
+ Item {
+ unsafety: fm.unsafety,
+ abi: format_extern(
+ ast::Extern::from_abi(fm.abi, DUMMY_SP),
+ config.force_explicit_abi(),
+ true,
+ ),
+ vis: None,
+ body: fm
+ .items
+ .iter()
+ .map(|i| BodyElement::ForeignItem(i))
+ .collect(),
+ span,
+ }
+ }
+}
+
+#[derive(Debug)]
+enum BodyElement<'a> {
+ // Stmt(&'a ast::Stmt),
+ // Field(&'a ast::ExprField),
+ // Variant(&'a ast::Variant),
+ // Item(&'a ast::Item),
+ ForeignItem(&'a ast::ForeignItem),
+}
+
+/// Represents a fn's signature.
+pub(crate) struct FnSig<'a> {
+ decl: &'a ast::FnDecl,
+ generics: &'a ast::Generics,
+ ext: ast::Extern,
+ is_async: Cow<'a, ast::Async>,
+ constness: ast::Const,
+ defaultness: ast::Defaultness,
+ unsafety: ast::Unsafe,
+ visibility: &'a ast::Visibility,
+}
+
+impl<'a> FnSig<'a> {
+ pub(crate) fn from_method_sig(
+ method_sig: &'a ast::FnSig,
+ generics: &'a ast::Generics,
+ visibility: &'a ast::Visibility,
+ ) -> FnSig<'a> {
+ FnSig {
+ unsafety: method_sig.header.unsafety,
+ is_async: Cow::Borrowed(&method_sig.header.asyncness),
+ constness: method_sig.header.constness,
+ defaultness: ast::Defaultness::Final,
+ ext: method_sig.header.ext,
+ decl: &*method_sig.decl,
+ generics,
+ visibility,
+ }
+ }
+
+ pub(crate) fn from_fn_kind(
+ fn_kind: &'a visit::FnKind<'_>,
+ decl: &'a ast::FnDecl,
+ defaultness: ast::Defaultness,
+ ) -> FnSig<'a> {
+ match *fn_kind {
+ visit::FnKind::Fn(fn_ctxt, _, fn_sig, vis, generics, _) => match fn_ctxt {
+ visit::FnCtxt::Assoc(..) => {
+ let mut fn_sig = FnSig::from_method_sig(fn_sig, generics, vis);
+ fn_sig.defaultness = defaultness;
+ fn_sig
+ }
+ _ => FnSig {
+ decl,
+ generics,
+ ext: fn_sig.header.ext,
+ constness: fn_sig.header.constness,
+ is_async: Cow::Borrowed(&fn_sig.header.asyncness),
+ defaultness,
+ unsafety: fn_sig.header.unsafety,
+ visibility: vis,
+ },
+ },
+ _ => unreachable!(),
+ }
+ }
+
+ fn to_str(&self, context: &RewriteContext<'_>) -> String {
+ let mut result = String::with_capacity(128);
+ // Vis defaultness constness unsafety abi.
+ result.push_str(&*format_visibility(context, self.visibility));
+ result.push_str(format_defaultness(self.defaultness));
+ result.push_str(format_constness(self.constness));
+ result.push_str(format_async(&self.is_async));
+ result.push_str(format_unsafety(self.unsafety));
+ result.push_str(&format_extern(
+ self.ext,
+ context.config.force_explicit_abi(),
+ false,
+ ));
+ result
+ }
+}
+
+impl<'a> FmtVisitor<'a> {
+ fn format_item(&mut self, item: &Item<'_>) {
+ self.buffer.push_str(format_unsafety(item.unsafety));
+ self.buffer.push_str(&item.abi);
+
+ let snippet = self.snippet(item.span);
+ let brace_pos = snippet.find_uncommented("{").unwrap();
+
+ self.push_str("{");
+ if !item.body.is_empty() || contains_comment(&snippet[brace_pos..]) {
+ // FIXME: this skips comments between the extern keyword and the opening
+ // brace.
+ self.last_pos = item.span.lo() + BytePos(brace_pos as u32 + 1);
+ self.block_indent = self.block_indent.block_indent(self.config);
+
+ if !item.body.is_empty() {
+ for item in &item.body {
+ self.format_body_element(item);
+ }
+ }
+
+ self.format_missing_no_indent(item.span.hi() - BytePos(1));
+ self.block_indent = self.block_indent.block_unindent(self.config);
+ let indent_str = self.block_indent.to_string(self.config);
+ self.push_str(&indent_str);
+ }
+
+ self.push_str("}");
+ self.last_pos = item.span.hi();
+ }
+
+ fn format_body_element(&mut self, element: &BodyElement<'_>) {
+ match *element {
+ BodyElement::ForeignItem(item) => self.format_foreign_item(item),
+ }
+ }
+
+ pub(crate) fn format_foreign_mod(&mut self, fm: &ast::ForeignMod, span: Span) {
+ let item = Item::from_foreign_mod(fm, span, self.config);
+ self.format_item(&item);
+ }
+
+ fn format_foreign_item(&mut self, item: &ast::ForeignItem) {
+ let rewrite = item.rewrite(&self.get_context(), self.shape());
+ let hi = item.span.hi();
+ let span = if item.attrs.is_empty() {
+ item.span
+ } else {
+ mk_sp(item.attrs[0].span.lo(), hi)
+ };
+ self.push_rewrite(span, rewrite);
+ self.last_pos = hi;
+ }
+
+ pub(crate) fn rewrite_fn_before_block(
+ &mut self,
+ indent: Indent,
+ ident: symbol::Ident,
+ fn_sig: &FnSig<'_>,
+ span: Span,
+ ) -> Option<(String, FnBraceStyle)> {
+ let context = self.get_context();
+
+ let mut fn_brace_style = newline_for_brace(self.config, &fn_sig.generics.where_clause);
+ let (result, _, force_newline_brace) =
+ rewrite_fn_base(&context, indent, ident, fn_sig, span, fn_brace_style)?;
+
+ // 2 = ` {`
+ if self.config.brace_style() == BraceStyle::AlwaysNextLine
+ || force_newline_brace
+ || last_line_width(&result) + 2 > self.shape().width
+ {
+ fn_brace_style = FnBraceStyle::NextLine
+ }
+
+ Some((result, fn_brace_style))
+ }
+
+ pub(crate) fn rewrite_required_fn(
+ &mut self,
+ indent: Indent,
+ ident: symbol::Ident,
+ sig: &ast::FnSig,
+ vis: &ast::Visibility,
+ generics: &ast::Generics,
+ span: Span,
+ ) -> Option<String> {
+ // Drop semicolon or it will be interpreted as comment.
+ let span = mk_sp(span.lo(), span.hi() - BytePos(1));
+ let context = self.get_context();
+
+ let (mut result, ends_with_comment, _) = rewrite_fn_base(
+ &context,
+ indent,
+ ident,
+ &FnSig::from_method_sig(sig, generics, vis),
+ span,
+ FnBraceStyle::None,
+ )?;
+
+ // If `result` ends with a comment, then remember to add a newline
+ if ends_with_comment {
+ result.push_str(&indent.to_string_with_newline(context.config));
+ }
+
+ // Re-attach semicolon
+ result.push(';');
+
+ Some(result)
+ }
+
+ pub(crate) fn single_line_fn(
+ &self,
+ fn_str: &str,
+ block: &ast::Block,
+ inner_attrs: Option<&[ast::Attribute]>,
+ ) -> Option<String> {
+ if fn_str.contains('\n') || inner_attrs.map_or(false, |a| !a.is_empty()) {
+ return None;
+ }
+
+ let context = self.get_context();
+
+ if self.config.empty_item_single_line()
+ && is_empty_block(&context, block, None)
+ && self.block_indent.width() + fn_str.len() + 3 <= self.config.max_width()
+ && !last_line_contains_single_line_comment(fn_str)
+ {
+ return Some(format!("{} {{}}", fn_str));
+ }
+
+ if !self.config.fn_single_line() || !is_simple_block_stmt(&context, block, None) {
+ return None;
+ }
+
+ let res = Stmt::from_ast_node(block.stmts.first()?, true)
+ .rewrite(&self.get_context(), self.shape())?;
+
+ let width = self.block_indent.width() + fn_str.len() + res.len() + 5;
+ if !res.contains('\n') && width <= self.config.max_width() {
+ Some(format!("{} {{ {} }}", fn_str, res))
+ } else {
+ None
+ }
+ }
+
+ pub(crate) fn visit_static(&mut self, static_parts: &StaticParts<'_>) {
+ let rewrite = rewrite_static(&self.get_context(), static_parts, self.block_indent);
+ self.push_rewrite(static_parts.span, rewrite);
+ }
+
+ pub(crate) fn visit_struct(&mut self, struct_parts: &StructParts<'_>) {
+ let is_tuple = match struct_parts.def {
+ ast::VariantData::Tuple(..) => true,
+ _ => false,
+ };
+ let rewrite = format_struct(&self.get_context(), struct_parts, self.block_indent, None)
+ .map(|s| if is_tuple { s + ";" } else { s });
+ self.push_rewrite(struct_parts.span, rewrite);
+ }
+
+ pub(crate) fn visit_enum(
+ &mut self,
+ ident: symbol::Ident,
+ vis: &ast::Visibility,
+ enum_def: &ast::EnumDef,
+ generics: &ast::Generics,
+ span: Span,
+ ) {
+ let enum_header =
+ format_header(&self.get_context(), "enum ", ident, vis, self.block_indent);
+ self.push_str(&enum_header);
+
+ let enum_snippet = self.snippet(span);
+ let brace_pos = enum_snippet.find_uncommented("{").unwrap();
+ let body_start = span.lo() + BytePos(brace_pos as u32 + 1);
+ let generics_str = format_generics(
+ &self.get_context(),
+ generics,
+ self.config.brace_style(),
+ if enum_def.variants.is_empty() {
+ BracePos::ForceSameLine
+ } else {
+ BracePos::Auto
+ },
+ self.block_indent,
+ // make a span that starts right after `enum Foo`
+ mk_sp(ident.span.hi(), body_start),
+ last_line_width(&enum_header),
+ )
+ .unwrap();
+ self.push_str(&generics_str);
+
+ self.last_pos = body_start;
+
+ match self.format_variant_list(enum_def, body_start, span.hi()) {
+ Some(ref s) if enum_def.variants.is_empty() => self.push_str(s),
+ rw => {
+ self.push_rewrite(mk_sp(body_start, span.hi()), rw);
+ self.block_indent = self.block_indent.block_unindent(self.config);
+ }
+ }
+ }
+
+ // Format the body of an enum definition
+ fn format_variant_list(
+ &mut self,
+ enum_def: &ast::EnumDef,
+ body_lo: BytePos,
+ body_hi: BytePos,
+ ) -> Option<String> {
+ if enum_def.variants.is_empty() {
+ let mut buffer = String::with_capacity(128);
+ // 1 = "}"
+ let span = mk_sp(body_lo, body_hi - BytePos(1));
+ format_empty_struct_or_tuple(
+ &self.get_context(),
+ span,
+ self.block_indent,
+ &mut buffer,
+ "",
+ "}",
+ );
+ return Some(buffer);
+ }
+ let mut result = String::with_capacity(1024);
+ let original_offset = self.block_indent;
+ self.block_indent = self.block_indent.block_indent(self.config);
+
+ // If enum variants have discriminants, try to vertically align those,
+ // provided the discrims are not shifted too much to the right
+ let align_threshold: usize = self.config.enum_discrim_align_threshold();
+ let discr_ident_lens: Vec<usize> = enum_def
+ .variants
+ .iter()
+ .filter(|var| var.disr_expr.is_some())
+ .map(|var| rewrite_ident(&self.get_context(), var.ident).len())
+ .collect();
+ // cut the list at the point of longest discrim shorter than the threshold
+ // All of the discrims under the threshold will get padded, and all above - left as is.
+ let pad_discrim_ident_to = *discr_ident_lens
+ .iter()
+ .filter(|&l| *l <= align_threshold)
+ .max()
+ .unwrap_or(&0);
+
+ let itemize_list_with = |one_line_width: usize| {
+ itemize_list(
+ self.snippet_provider,
+ enum_def.variants.iter(),
+ "}",
+ ",",
+ |f| {
+ if !f.attrs.is_empty() {
+ f.attrs[0].span.lo()
+ } else {
+ f.span.lo()
+ }
+ },
+ |f| f.span.hi(),
+ |f| self.format_variant(f, one_line_width, pad_discrim_ident_to),
+ body_lo,
+ body_hi,
+ false,
+ )
+ .collect()
+ };
+ let mut items: Vec<_> = itemize_list_with(self.config.struct_variant_width());
+
+ // If one of the variants use multiple lines, use multi-lined formatting for all variants.
+ let has_multiline_variant = items.iter().any(|item| item.inner_as_ref().contains('\n'));
+ let has_single_line_variant = items.iter().any(|item| !item.inner_as_ref().contains('\n'));
+ if has_multiline_variant && has_single_line_variant {
+ items = itemize_list_with(0);
+ }
+
+ let shape = self.shape().sub_width(2)?;
+ let fmt = ListFormatting::new(shape, self.config)
+ .trailing_separator(self.config.trailing_comma())
+ .preserve_newline(true);
+
+ let list = write_list(&items, &fmt)?;
+ result.push_str(&list);
+ result.push_str(&original_offset.to_string_with_newline(self.config));
+ result.push('}');
+ Some(result)
+ }
+
+ // Variant of an enum.
+ fn format_variant(
+ &self,
+ field: &ast::Variant,
+ one_line_width: usize,
+ pad_discrim_ident_to: usize,
+ ) -> Option<String> {
+ if contains_skip(&field.attrs) {
+ let lo = field.attrs[0].span.lo();
+ let span = mk_sp(lo, field.span.hi());
+ return Some(self.snippet(span).to_owned());
+ }
+
+ let context = self.get_context();
+ // 1 = ','
+ let shape = self.shape().sub_width(1)?;
+ let attrs_str = field.attrs.rewrite(&context, shape)?;
+ let lo = field
+ .attrs
+ .last()
+ .map_or(field.span.lo(), |attr| attr.span.hi());
+ let span = mk_sp(lo, field.span.lo());
+
+ let variant_body = match field.data {
+ ast::VariantData::Tuple(..) | ast::VariantData::Struct(..) => format_struct(
+ &context,
+ &StructParts::from_variant(field),
+ self.block_indent,
+ Some(one_line_width),
+ )?,
+ ast::VariantData::Unit(..) => rewrite_ident(&context, field.ident).to_owned(),
+ };
+
+ let variant_body = if let Some(ref expr) = field.disr_expr {
+ let lhs = format!("{:1$} =", variant_body, pad_discrim_ident_to);
+ let ex = &*expr.value;
+ rewrite_assign_rhs_with(
+ &context,
+ lhs,
+ ex,
+ shape,
+ &RhsAssignKind::Expr(&ex.kind, ex.span),
+ RhsTactics::AllowOverflow,
+ )?
+ } else {
+ variant_body
+ };
+
+ combine_strs_with_missing_comments(&context, &attrs_str, &variant_body, span, shape, false)
+ }
+
+ fn visit_impl_items(&mut self, items: &[ptr::P<ast::AssocItem>]) {
+ if self.get_context().config.reorder_impl_items() {
+ type TyOpt = Option<ptr::P<ast::Ty>>;
+ use crate::ast::AssocItemKind::*;
+ let is_type = |ty: &TyOpt| opaque_ty(ty).is_none();
+ let is_opaque = |ty: &TyOpt| opaque_ty(ty).is_some();
+ let both_type = |l: &TyOpt, r: &TyOpt| is_type(l) && is_type(r);
+ let both_opaque = |l: &TyOpt, r: &TyOpt| is_opaque(l) && is_opaque(r);
+ let need_empty_line = |a: &ast::AssocItemKind, b: &ast::AssocItemKind| match (a, b) {
+ (TyAlias(lty), TyAlias(rty))
+ if both_type(&lty.ty, &rty.ty) || both_opaque(&lty.ty, &rty.ty) =>
+ {
+ false
+ }
+ (Const(..), Const(..)) => false,
+ _ => true,
+ };
+
+ // Create visitor for each items, then reorder them.
+ let mut buffer = vec![];
+ for item in items {
+ self.visit_impl_item(item);
+ buffer.push((self.buffer.clone(), item.clone()));
+ self.buffer.clear();
+ }
+
+ buffer.sort_by(|(_, a), (_, b)| match (&a.kind, &b.kind) {
+ (TyAlias(lty), TyAlias(rty))
+ if both_type(&lty.ty, &rty.ty) || both_opaque(&lty.ty, &rty.ty) =>
+ {
+ a.ident.as_str().cmp(b.ident.as_str())
+ }
+ (Const(..), Const(..)) | (MacCall(..), MacCall(..)) => {
+ a.ident.as_str().cmp(b.ident.as_str())
+ }
+ (Fn(..), Fn(..)) => a.span.lo().cmp(&b.span.lo()),
+ (TyAlias(ty), _) if is_type(&ty.ty) => Ordering::Less,
+ (_, TyAlias(ty)) if is_type(&ty.ty) => Ordering::Greater,
+ (TyAlias(..), _) => Ordering::Less,
+ (_, TyAlias(..)) => Ordering::Greater,
+ (Const(..), _) => Ordering::Less,
+ (_, Const(..)) => Ordering::Greater,
+ (MacCall(..), _) => Ordering::Less,
+ (_, MacCall(..)) => Ordering::Greater,
+ });
+ let mut prev_kind = None;
+ for (buf, item) in buffer {
+ // Make sure that there are at least a single empty line between
+ // different impl items.
+ if prev_kind
+ .as_ref()
+ .map_or(false, |prev_kind| need_empty_line(prev_kind, &item.kind))
+ {
+ self.push_str("\n");
+ }
+ let indent_str = self.block_indent.to_string_with_newline(self.config);
+ self.push_str(&indent_str);
+ self.push_str(buf.trim());
+ prev_kind = Some(item.kind.clone());
+ }
+ } else {
+ for item in items {
+ self.visit_impl_item(item);
+ }
+ }
+ }
+}
+
+pub(crate) fn format_impl(
+ context: &RewriteContext<'_>,
+ item: &ast::Item,
+ iimpl: &ast::Impl,
+ offset: Indent,
+) -> Option<String> {
+ let ast::Impl {
+ generics,
+ self_ty,
+ items,
+ ..
+ } = iimpl;
+ let mut result = String::with_capacity(128);
+ let ref_and_type = format_impl_ref_and_type(context, item, iimpl, offset)?;
+ let sep = offset.to_string_with_newline(context.config);
+ result.push_str(&ref_and_type);
+
+ let where_budget = if result.contains('\n') {
+ context.config.max_width()
+ } else {
+ context.budget(last_line_width(&result))
+ };
+
+ let mut option = WhereClauseOption::snuggled(&ref_and_type);
+ let snippet = context.snippet(item.span);
+ let open_pos = snippet.find_uncommented("{")? + 1;
+ if !contains_comment(&snippet[open_pos..])
+ && items.is_empty()
+ && generics.where_clause.predicates.len() == 1
+ && !result.contains('\n')
+ {
+ option.suppress_comma();
+ option.snuggle();
+ option.allow_single_line();
+ }
+
+ let missing_span = mk_sp(self_ty.span.hi(), item.span.hi());
+ let where_span_end = context.snippet_provider.opt_span_before(missing_span, "{");
+ let where_clause_str = rewrite_where_clause(
+ context,
+ &generics.where_clause.predicates,
+ generics.where_clause.span,
+ context.config.brace_style(),
+ Shape::legacy(where_budget, offset.block_only()),
+ false,
+ "{",
+ where_span_end,
+ self_ty.span.hi(),
+ option,
+ )?;
+
+ // If there is no where-clause, we may have missing comments between the trait name and
+ // the opening brace.
+ if generics.where_clause.predicates.is_empty() {
+ if let Some(hi) = where_span_end {
+ match recover_missing_comment_in_span(
+ mk_sp(self_ty.span.hi(), hi),
+ Shape::indented(offset, context.config),
+ context,
+ last_line_width(&result),
+ ) {
+ Some(ref missing_comment) if !missing_comment.is_empty() => {
+ result.push_str(missing_comment);
+ }
+ _ => (),
+ }
+ }
+ }
+
+ if is_impl_single_line(context, items.as_slice(), &result, &where_clause_str, item)? {
+ result.push_str(&where_clause_str);
+ if where_clause_str.contains('\n') || last_line_contains_single_line_comment(&result) {
+ // if the where_clause contains extra comments AND
+ // there is only one where-clause predicate
+ // recover the suppressed comma in single line where_clause formatting
+ if generics.where_clause.predicates.len() == 1 {
+ result.push(',');
+ }
+ result.push_str(&format!("{}{{{}}}", sep, sep));
+ } else {
+ result.push_str(" {}");
+ }
+ return Some(result);
+ }
+
+ result.push_str(&where_clause_str);
+
+ let need_newline = last_line_contains_single_line_comment(&result) || result.contains('\n');
+ match context.config.brace_style() {
+ _ if need_newline => result.push_str(&sep),
+ BraceStyle::AlwaysNextLine => result.push_str(&sep),
+ BraceStyle::PreferSameLine => result.push(' '),
+ BraceStyle::SameLineWhere => {
+ if !where_clause_str.is_empty() {
+ result.push_str(&sep);
+ } else {
+ result.push(' ');
+ }
+ }
+ }
+
+ result.push('{');
+ // this is an impl body snippet(impl SampleImpl { /* here */ })
+ let lo = max(self_ty.span.hi(), generics.where_clause.span.hi());
+ let snippet = context.snippet(mk_sp(lo, item.span.hi()));
+ let open_pos = snippet.find_uncommented("{")? + 1;
+
+ if !items.is_empty() || contains_comment(&snippet[open_pos..]) {
+ let mut visitor = FmtVisitor::from_context(context);
+ let item_indent = offset.block_only().block_indent(context.config);
+ visitor.block_indent = item_indent;
+ visitor.last_pos = lo + BytePos(open_pos as u32);
+
+ visitor.visit_attrs(&item.attrs, ast::AttrStyle::Inner);
+ visitor.visit_impl_items(items);
+
+ visitor.format_missing(item.span.hi() - BytePos(1));
+
+ let inner_indent_str = visitor.block_indent.to_string_with_newline(context.config);
+ let outer_indent_str = offset.block_only().to_string_with_newline(context.config);
+
+ result.push_str(&inner_indent_str);
+ result.push_str(visitor.buffer.trim());
+ result.push_str(&outer_indent_str);
+ } else if need_newline || !context.config.empty_item_single_line() {
+ result.push_str(&sep);
+ }
+
+ result.push('}');
+
+ Some(result)
+}
+
+fn is_impl_single_line(
+ context: &RewriteContext<'_>,
+ items: &[ptr::P<ast::AssocItem>],
+ result: &str,
+ where_clause_str: &str,
+ item: &ast::Item,
+) -> Option<bool> {
+ let snippet = context.snippet(item.span);
+ let open_pos = snippet.find_uncommented("{")? + 1;
+
+ Some(
+ context.config.empty_item_single_line()
+ && items.is_empty()
+ && !result.contains('\n')
+ && result.len() + where_clause_str.len() <= context.config.max_width()
+ && !contains_comment(&snippet[open_pos..]),
+ )
+}
+
+fn format_impl_ref_and_type(
+ context: &RewriteContext<'_>,
+ item: &ast::Item,
+ iimpl: &ast::Impl,
+ offset: Indent,
+) -> Option<String> {
+ let ast::Impl {
+ unsafety,
+ polarity,
+ defaultness,
+ constness,
+ ref generics,
+ of_trait: ref trait_ref,
+ ref self_ty,
+ ..
+ } = *iimpl;
+ let mut result = String::with_capacity(128);
+
+ result.push_str(&format_visibility(context, &item.vis));
+ result.push_str(format_defaultness(defaultness));
+ result.push_str(format_unsafety(unsafety));
+
+ let shape = if context.config.version() == Version::Two {
+ Shape::indented(offset + last_line_width(&result), context.config)
+ } else {
+ generics_shape_from_config(
+ context.config,
+ Shape::indented(offset + last_line_width(&result), context.config),
+ 0,
+ )?
+ };
+ let generics_str = rewrite_generics(context, "impl", generics, shape)?;
+ result.push_str(&generics_str);
+ result.push_str(format_constness_right(constness));
+
+ let polarity_str = match polarity {
+ ast::ImplPolarity::Negative(_) => "!",
+ ast::ImplPolarity::Positive => "",
+ };
+
+ let polarity_overhead;
+ let trait_ref_overhead;
+ if let Some(ref trait_ref) = *trait_ref {
+ let result_len = last_line_width(&result);
+ result.push_str(&rewrite_trait_ref(
+ context,
+ trait_ref,
+ offset,
+ polarity_str,
+ result_len,
+ )?);
+ polarity_overhead = 0; // already written
+ trait_ref_overhead = " for".len();
+ } else {
+ polarity_overhead = polarity_str.len();
+ trait_ref_overhead = 0;
+ }
+
+ // Try to put the self type in a single line.
+ let curly_brace_overhead = if generics.where_clause.predicates.is_empty() {
+ // If there is no where-clause adapt budget for type formatting to take space and curly
+ // brace into account.
+ match context.config.brace_style() {
+ BraceStyle::AlwaysNextLine => 0,
+ _ => 2,
+ }
+ } else {
+ 0
+ };
+ let used_space =
+ last_line_width(&result) + polarity_overhead + trait_ref_overhead + curly_brace_overhead;
+ // 1 = space before the type.
+ let budget = context.budget(used_space + 1);
+ if let Some(self_ty_str) = self_ty.rewrite(context, Shape::legacy(budget, offset)) {
+ if !self_ty_str.contains('\n') {
+ if trait_ref.is_some() {
+ result.push_str(" for ");
+ } else {
+ result.push(' ');
+ result.push_str(polarity_str);
+ }
+ result.push_str(&self_ty_str);
+ return Some(result);
+ }
+ }
+
+ // Couldn't fit the self type on a single line, put it on a new line.
+ result.push('\n');
+ // Add indentation of one additional tab.
+ let new_line_offset = offset.block_indent(context.config);
+ result.push_str(&new_line_offset.to_string(context.config));
+ if trait_ref.is_some() {
+ result.push_str("for ");
+ } else {
+ result.push_str(polarity_str);
+ }
+ let budget = context.budget(last_line_width(&result) + polarity_overhead);
+ let type_offset = match context.config.indent_style() {
+ IndentStyle::Visual => new_line_offset + trait_ref_overhead,
+ IndentStyle::Block => new_line_offset,
+ };
+ result.push_str(&*self_ty.rewrite(context, Shape::legacy(budget, type_offset))?);
+ Some(result)
+}
+
+fn rewrite_trait_ref(
+ context: &RewriteContext<'_>,
+ trait_ref: &ast::TraitRef,
+ offset: Indent,
+ polarity_str: &str,
+ result_len: usize,
+) -> Option<String> {
+ // 1 = space between generics and trait_ref
+ let used_space = 1 + polarity_str.len() + result_len;
+ let shape = Shape::indented(offset + used_space, context.config);
+ if let Some(trait_ref_str) = trait_ref.rewrite(context, shape) {
+ if !trait_ref_str.contains('\n') {
+ return Some(format!(" {}{}", polarity_str, trait_ref_str));
+ }
+ }
+ // We could not make enough space for trait_ref, so put it on new line.
+ let offset = offset.block_indent(context.config);
+ let shape = Shape::indented(offset, context.config);
+ let trait_ref_str = trait_ref.rewrite(context, shape)?;
+ Some(format!(
+ "{}{}{}",
+ offset.to_string_with_newline(context.config),
+ polarity_str,
+ trait_ref_str
+ ))
+}
+
+pub(crate) struct StructParts<'a> {
+ prefix: &'a str,
+ ident: symbol::Ident,
+ vis: &'a ast::Visibility,
+ def: &'a ast::VariantData,
+ generics: Option<&'a ast::Generics>,
+ span: Span,
+}
+
+impl<'a> StructParts<'a> {
+ fn format_header(&self, context: &RewriteContext<'_>, offset: Indent) -> String {
+ format_header(context, self.prefix, self.ident, self.vis, offset)
+ }
+
+ fn from_variant(variant: &'a ast::Variant) -> Self {
+ StructParts {
+ prefix: "",
+ ident: variant.ident,
+ vis: &DEFAULT_VISIBILITY,
+ def: &variant.data,
+ generics: None,
+ span: variant.span,
+ }
+ }
+
+ pub(crate) fn from_item(item: &'a ast::Item) -> Self {
+ let (prefix, def, generics) = match item.kind {
+ ast::ItemKind::Struct(ref def, ref generics) => ("struct ", def, generics),
+ ast::ItemKind::Union(ref def, ref generics) => ("union ", def, generics),
+ _ => unreachable!(),
+ };
+ StructParts {
+ prefix,
+ ident: item.ident,
+ vis: &item.vis,
+ def,
+ generics: Some(generics),
+ span: item.span,
+ }
+ }
+}
+
+fn format_struct(
+ context: &RewriteContext<'_>,
+ struct_parts: &StructParts<'_>,
+ offset: Indent,
+ one_line_width: Option<usize>,
+) -> Option<String> {
+ match *struct_parts.def {
+ ast::VariantData::Unit(..) => format_unit_struct(context, struct_parts, offset),
+ ast::VariantData::Tuple(ref fields, _) => {
+ format_tuple_struct(context, struct_parts, fields, offset)
+ }
+ ast::VariantData::Struct(ref fields, _) => {
+ format_struct_struct(context, struct_parts, fields, offset, one_line_width)
+ }
+ }
+}
+
+pub(crate) fn format_trait(
+ context: &RewriteContext<'_>,
+ item: &ast::Item,
+ offset: Indent,
+) -> Option<String> {
+ if let ast::ItemKind::Trait(trait_kind) = &item.kind {
+ let ast::Trait {
+ is_auto,
+ unsafety,
+ ref generics,
+ ref bounds,
+ ref items,
+ } = **trait_kind;
+ let mut result = String::with_capacity(128);
+ let header = format!(
+ "{}{}{}trait ",
+ format_visibility(context, &item.vis),
+ format_unsafety(unsafety),
+ format_auto(is_auto),
+ );
+ result.push_str(&header);
+
+ let body_lo = context.snippet_provider.span_after(item.span, "{");
+
+ let shape = Shape::indented(offset, context.config).offset_left(result.len())?;
+ let generics_str =
+ rewrite_generics(context, rewrite_ident(context, item.ident), generics, shape)?;
+ result.push_str(&generics_str);
+
+ // FIXME(#2055): rustfmt fails to format when there are comments between trait bounds.
+ if !bounds.is_empty() {
+ let ident_hi = context
+ .snippet_provider
+ .span_after(item.span, item.ident.as_str());
+ let bound_hi = bounds.last().unwrap().span().hi();
+ let snippet = context.snippet(mk_sp(ident_hi, bound_hi));
+ if contains_comment(snippet) {
+ return None;
+ }
+
+ result = rewrite_assign_rhs_with(
+ context,
+ result + ":",
+ bounds,
+ shape,
+ &RhsAssignKind::Bounds,
+ RhsTactics::ForceNextLineWithoutIndent,
+ )?;
+ }
+
+ // Rewrite where-clause.
+ if !generics.where_clause.predicates.is_empty() {
+ let where_on_new_line = context.config.indent_style() != IndentStyle::Block;
+
+ let where_budget = context.budget(last_line_width(&result));
+ let pos_before_where = if bounds.is_empty() {
+ generics.where_clause.span.lo()
+ } else {
+ bounds[bounds.len() - 1].span().hi()
+ };
+ let option = WhereClauseOption::snuggled(&generics_str);
+ let where_clause_str = rewrite_where_clause(
+ context,
+ &generics.where_clause.predicates,
+ generics.where_clause.span,
+ context.config.brace_style(),
+ Shape::legacy(where_budget, offset.block_only()),
+ where_on_new_line,
+ "{",
+ None,
+ pos_before_where,
+ option,
+ )?;
+ // If the where-clause cannot fit on the same line,
+ // put the where-clause on a new line
+ if !where_clause_str.contains('\n')
+ && last_line_width(&result) + where_clause_str.len() + offset.width()
+ > context.config.comment_width()
+ {
+ let width = offset.block_indent + context.config.tab_spaces() - 1;
+ let where_indent = Indent::new(0, width);
+ result.push_str(&where_indent.to_string_with_newline(context.config));
+ }
+ result.push_str(&where_clause_str);
+ } else {
+ let item_snippet = context.snippet(item.span);
+ if let Some(lo) = item_snippet.find('/') {
+ // 1 = `{`
+ let comment_hi = body_lo - BytePos(1);
+ let comment_lo = item.span.lo() + BytePos(lo as u32);
+ if comment_lo < comment_hi {
+ match recover_missing_comment_in_span(
+ mk_sp(comment_lo, comment_hi),
+ Shape::indented(offset, context.config),
+ context,
+ last_line_width(&result),
+ ) {
+ Some(ref missing_comment) if !missing_comment.is_empty() => {
+ result.push_str(missing_comment);
+ }
+ _ => (),
+ }
+ }
+ }
+ }
+
+ let block_span = mk_sp(generics.where_clause.span.hi(), item.span.hi());
+ let snippet = context.snippet(block_span);
+ let open_pos = snippet.find_uncommented("{")? + 1;
+
+ match context.config.brace_style() {
+ _ if last_line_contains_single_line_comment(&result)
+ || last_line_width(&result) + 2 > context.budget(offset.width()) =>
+ {
+ result.push_str(&offset.to_string_with_newline(context.config));
+ }
+ _ if context.config.empty_item_single_line()
+ && items.is_empty()
+ && !result.contains('\n')
+ && !contains_comment(&snippet[open_pos..]) =>
+ {
+ result.push_str(" {}");
+ return Some(result);
+ }
+ BraceStyle::AlwaysNextLine => {
+ result.push_str(&offset.to_string_with_newline(context.config));
+ }
+ BraceStyle::PreferSameLine => result.push(' '),
+ BraceStyle::SameLineWhere => {
+ if result.contains('\n')
+ || (!generics.where_clause.predicates.is_empty() && !items.is_empty())
+ {
+ result.push_str(&offset.to_string_with_newline(context.config));
+ } else {
+ result.push(' ');
+ }
+ }
+ }
+ result.push('{');
+
+ let outer_indent_str = offset.block_only().to_string_with_newline(context.config);
+
+ if !items.is_empty() || contains_comment(&snippet[open_pos..]) {
+ let mut visitor = FmtVisitor::from_context(context);
+ visitor.block_indent = offset.block_only().block_indent(context.config);
+ visitor.last_pos = block_span.lo() + BytePos(open_pos as u32);
+
+ for item in items {
+ visitor.visit_trait_item(item);
+ }
+
+ visitor.format_missing(item.span.hi() - BytePos(1));
+
+ let inner_indent_str = visitor.block_indent.to_string_with_newline(context.config);
+
+ result.push_str(&inner_indent_str);
+ result.push_str(visitor.buffer.trim());
+ result.push_str(&outer_indent_str);
+ } else if result.contains('\n') {
+ result.push_str(&outer_indent_str);
+ }
+
+ result.push('}');
+ Some(result)
+ } else {
+ unreachable!();
+ }
+}
+
+pub(crate) struct TraitAliasBounds<'a> {
+ generic_bounds: &'a ast::GenericBounds,
+ generics: &'a ast::Generics,
+}
+
+impl<'a> Rewrite for TraitAliasBounds<'a> {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ let generic_bounds_str = self.generic_bounds.rewrite(context, shape)?;
+
+ let mut option = WhereClauseOption::new(true, WhereClauseSpace::None);
+ option.allow_single_line();
+
+ let where_str = rewrite_where_clause(
+ context,
+ &self.generics.where_clause.predicates,
+ self.generics.where_clause.span,
+ context.config.brace_style(),
+ shape,
+ false,
+ ";",
+ None,
+ self.generics.where_clause.span.lo(),
+ option,
+ )?;
+
+ let fits_single_line = !generic_bounds_str.contains('\n')
+ && !where_str.contains('\n')
+ && generic_bounds_str.len() + where_str.len() < shape.width;
+ let space = if generic_bounds_str.is_empty() || where_str.is_empty() {
+ Cow::from("")
+ } else if fits_single_line {
+ Cow::from(" ")
+ } else {
+ shape.indent.to_string_with_newline(context.config)
+ };
+
+ Some(format!("{}{}{}", generic_bounds_str, space, where_str))
+ }
+}
+
+pub(crate) fn format_trait_alias(
+ context: &RewriteContext<'_>,
+ ident: symbol::Ident,
+ vis: &ast::Visibility,
+ generics: &ast::Generics,
+ generic_bounds: &ast::GenericBounds,
+ shape: Shape,
+) -> Option<String> {
+ let alias = rewrite_ident(context, ident);
+ // 6 = "trait ", 2 = " ="
+ let g_shape = shape.offset_left(6)?.sub_width(2)?;
+ let generics_str = rewrite_generics(context, alias, generics, g_shape)?;
+ let vis_str = format_visibility(context, vis);
+ let lhs = format!("{}trait {} =", vis_str, generics_str);
+ // 1 = ";"
+ let trait_alias_bounds = TraitAliasBounds {
+ generic_bounds,
+ generics,
+ };
+ rewrite_assign_rhs(
+ context,
+ lhs,
+ &trait_alias_bounds,
+ &RhsAssignKind::Bounds,
+ shape.sub_width(1)?,
+ )
+ .map(|s| s + ";")
+}
+
+fn format_unit_struct(
+ context: &RewriteContext<'_>,
+ p: &StructParts<'_>,
+ offset: Indent,
+) -> Option<String> {
+ let header_str = format_header(context, p.prefix, p.ident, p.vis, offset);
+ let generics_str = if let Some(generics) = p.generics {
+ let hi = context.snippet_provider.span_before(p.span, ";");
+ format_generics(
+ context,
+ generics,
+ context.config.brace_style(),
+ BracePos::None,
+ offset,
+ // make a span that starts right after `struct Foo`
+ mk_sp(p.ident.span.hi(), hi),
+ last_line_width(&header_str),
+ )?
+ } else {
+ String::new()
+ };
+ Some(format!("{}{};", header_str, generics_str))
+}
+
+pub(crate) fn format_struct_struct(
+ context: &RewriteContext<'_>,
+ struct_parts: &StructParts<'_>,
+ fields: &[ast::FieldDef],
+ offset: Indent,
+ one_line_width: Option<usize>,
+) -> Option<String> {
+ let mut result = String::with_capacity(1024);
+ let span = struct_parts.span;
+
+ let header_str = struct_parts.format_header(context, offset);
+ result.push_str(&header_str);
+
+ let header_hi = struct_parts.ident.span.hi();
+ let body_lo = if let Some(generics) = struct_parts.generics {
+ // Adjust the span to start at the end of the generic arguments before searching for the '{'
+ let span = span.with_lo(generics.span.hi());
+ context.snippet_provider.span_after(span, "{")
+ } else {
+ context.snippet_provider.span_after(span, "{")
+ };
+
+ let generics_str = match struct_parts.generics {
+ Some(g) => format_generics(
+ context,
+ g,
+ context.config.brace_style(),
+ if fields.is_empty() {
+ BracePos::ForceSameLine
+ } else {
+ BracePos::Auto
+ },
+ offset,
+ // make a span that starts right after `struct Foo`
+ mk_sp(header_hi, body_lo),
+ last_line_width(&result),
+ )?,
+ None => {
+ // 3 = ` {}`, 2 = ` {`.
+ let overhead = if fields.is_empty() { 3 } else { 2 };
+ if (context.config.brace_style() == BraceStyle::AlwaysNextLine && !fields.is_empty())
+ || context.config.max_width() < overhead + result.len()
+ {
+ format!("\n{}{{", offset.block_only().to_string(context.config))
+ } else {
+ " {".to_owned()
+ }
+ }
+ };
+ // 1 = `}`
+ let overhead = if fields.is_empty() { 1 } else { 0 };
+ let total_width = result.len() + generics_str.len() + overhead;
+ if !generics_str.is_empty()
+ && !generics_str.contains('\n')
+ && total_width > context.config.max_width()
+ {
+ result.push('\n');
+ result.push_str(&offset.to_string(context.config));
+ result.push_str(generics_str.trim_start());
+ } else {
+ result.push_str(&generics_str);
+ }
+
+ if fields.is_empty() {
+ let inner_span = mk_sp(body_lo, span.hi() - BytePos(1));
+ format_empty_struct_or_tuple(context, inner_span, offset, &mut result, "", "}");
+ return Some(result);
+ }
+
+ // 3 = ` ` and ` }`
+ let one_line_budget = context.budget(result.len() + 3 + offset.width());
+ let one_line_budget =
+ one_line_width.map_or(0, |one_line_width| min(one_line_width, one_line_budget));
+
+ let items_str = rewrite_with_alignment(
+ fields,
+ context,
+ Shape::indented(offset.block_indent(context.config), context.config).sub_width(1)?,
+ mk_sp(body_lo, span.hi()),
+ one_line_budget,
+ )?;
+
+ if !items_str.contains('\n')
+ && !result.contains('\n')
+ && items_str.len() <= one_line_budget
+ && !last_line_contains_single_line_comment(&items_str)
+ {
+ Some(format!("{} {} }}", result, items_str))
+ } else {
+ Some(format!(
+ "{}\n{}{}\n{}}}",
+ result,
+ offset
+ .block_indent(context.config)
+ .to_string(context.config),
+ items_str,
+ offset.to_string(context.config)
+ ))
+ }
+}
+
+fn get_bytepos_after_visibility(vis: &ast::Visibility, default_span: Span) -> BytePos {
+ match vis.kind {
+ ast::VisibilityKind::Restricted { .. } => vis.span.hi(),
+ _ => default_span.lo(),
+ }
+}
+
+// Format tuple or struct without any fields. We need to make sure that the comments
+// inside the delimiters are preserved.
+fn format_empty_struct_or_tuple(
+ context: &RewriteContext<'_>,
+ span: Span,
+ offset: Indent,
+ result: &mut String,
+ opener: &str,
+ closer: &str,
+) {
+ // 3 = " {}" or "();"
+ let used_width = last_line_used_width(result, offset.width()) + 3;
+ if used_width > context.config.max_width() {
+ result.push_str(&offset.to_string_with_newline(context.config))
+ }
+ result.push_str(opener);
+
+ // indented shape for proper indenting of multi-line comments
+ let shape = Shape::indented(offset.block_indent(context.config), context.config);
+ match rewrite_missing_comment(span, shape, context) {
+ Some(ref s) if s.is_empty() => (),
+ Some(ref s) => {
+ let is_multi_line = !is_single_line(s);
+ if is_multi_line || first_line_contains_single_line_comment(s) {
+ let nested_indent_str = offset
+ .block_indent(context.config)
+ .to_string_with_newline(context.config);
+ result.push_str(&nested_indent_str);
+ }
+ result.push_str(s);
+ if is_multi_line || last_line_contains_single_line_comment(s) {
+ result.push_str(&offset.to_string_with_newline(context.config));
+ }
+ }
+ None => result.push_str(context.snippet(span)),
+ }
+ result.push_str(closer);
+}
+
+fn format_tuple_struct(
+ context: &RewriteContext<'_>,
+ struct_parts: &StructParts<'_>,
+ fields: &[ast::FieldDef],
+ offset: Indent,
+) -> Option<String> {
+ let mut result = String::with_capacity(1024);
+ let span = struct_parts.span;
+
+ let header_str = struct_parts.format_header(context, offset);
+ result.push_str(&header_str);
+
+ let body_lo = if fields.is_empty() {
+ let lo = get_bytepos_after_visibility(struct_parts.vis, span);
+ context
+ .snippet_provider
+ .span_after(mk_sp(lo, span.hi()), "(")
+ } else {
+ fields[0].span.lo()
+ };
+ let body_hi = if fields.is_empty() {
+ context
+ .snippet_provider
+ .span_after(mk_sp(body_lo, span.hi()), ")")
+ } else {
+ // This is a dirty hack to work around a missing `)` from the span of the last field.
+ let last_arg_span = fields[fields.len() - 1].span;
+ context
+ .snippet_provider
+ .opt_span_after(mk_sp(last_arg_span.hi(), span.hi()), ")")
+ .unwrap_or_else(|| last_arg_span.hi())
+ };
+
+ let where_clause_str = match struct_parts.generics {
+ Some(generics) => {
+ let budget = context.budget(last_line_width(&header_str));
+ let shape = Shape::legacy(budget, offset);
+ let generics_str = rewrite_generics(context, "", generics, shape)?;
+ result.push_str(&generics_str);
+
+ let where_budget = context.budget(last_line_width(&result));
+ let option = WhereClauseOption::new(true, WhereClauseSpace::Newline);
+ rewrite_where_clause(
+ context,
+ &generics.where_clause.predicates,
+ generics.where_clause.span,
+ context.config.brace_style(),
+ Shape::legacy(where_budget, offset.block_only()),
+ false,
+ ";",
+ None,
+ body_hi,
+ option,
+ )?
+ }
+ None => "".to_owned(),
+ };
+
+ if fields.is_empty() {
+ let body_hi = context
+ .snippet_provider
+ .span_before(mk_sp(body_lo, span.hi()), ")");
+ let inner_span = mk_sp(body_lo, body_hi);
+ format_empty_struct_or_tuple(context, inner_span, offset, &mut result, "(", ")");
+ } else {
+ let shape = Shape::indented(offset, context.config).sub_width(1)?;
+ let lo = if let Some(generics) = struct_parts.generics {
+ generics.span.hi()
+ } else {
+ struct_parts.ident.span.hi()
+ };
+ result = overflow::rewrite_with_parens(
+ context,
+ &result,
+ fields.iter(),
+ shape,
+ mk_sp(lo, span.hi()),
+ context.config.fn_call_width(),
+ None,
+ )?;
+ }
+
+ if !where_clause_str.is_empty()
+ && !where_clause_str.contains('\n')
+ && (result.contains('\n')
+ || offset.block_indent + result.len() + where_clause_str.len() + 1
+ > context.config.max_width())
+ {
+ // We need to put the where-clause on a new line, but we didn't
+ // know that earlier, so the where-clause will not be indented properly.
+ result.push('\n');
+ result.push_str(
+ &(offset.block_only() + (context.config.tab_spaces() - 1)).to_string(context.config),
+ );
+ }
+ result.push_str(&where_clause_str);
+
+ Some(result)
+}
+
+pub(crate) enum ItemVisitorKind<'a> {
+ Item(&'a ast::Item),
+ AssocTraitItem(&'a ast::AssocItem),
+ AssocImplItem(&'a ast::AssocItem),
+ ForeignItem(&'a ast::ForeignItem),
+}
+
+struct TyAliasRewriteInfo<'c, 'g>(
+ &'c RewriteContext<'c>,
+ Indent,
+ &'g ast::Generics,
+ (ast::TyAliasWhereClause, ast::TyAliasWhereClause),
+ usize,
+ symbol::Ident,
+ Span,
+);
+
+pub(crate) fn rewrite_type_alias<'a, 'b>(
+ ty_alias_kind: &ast::TyAlias,
+ context: &RewriteContext<'a>,
+ indent: Indent,
+ visitor_kind: &ItemVisitorKind<'b>,
+ span: Span,
+) -> Option<String> {
+ use ItemVisitorKind::*;
+
+ let ast::TyAlias {
+ defaultness,
+ ref generics,
+ ref bounds,
+ ref ty,
+ where_clauses,
+ where_predicates_split,
+ } = *ty_alias_kind;
+ let ty_opt = ty.as_ref();
+ let (ident, vis) = match visitor_kind {
+ Item(i) => (i.ident, &i.vis),
+ AssocTraitItem(i) | AssocImplItem(i) => (i.ident, &i.vis),
+ ForeignItem(i) => (i.ident, &i.vis),
+ };
+ let rw_info = &TyAliasRewriteInfo(
+ context,
+ indent,
+ generics,
+ where_clauses,
+ where_predicates_split,
+ ident,
+ span,
+ );
+ let op_ty = opaque_ty(ty);
+ // Type Aliases are formatted slightly differently depending on the context
+ // in which they appear, whether they are opaque, and whether they are associated.
+ // https://rustc-dev-guide.rust-lang.org/opaque-types-type-alias-impl-trait.html
+ // https://github.com/rust-dev-tools/fmt-rfcs/blob/master/guide/items.md#type-aliases
+ match (visitor_kind, &op_ty) {
+ (Item(_) | AssocTraitItem(_) | ForeignItem(_), Some(op_bounds)) => {
+ let op = OpaqueType { bounds: op_bounds };
+ rewrite_ty(rw_info, Some(bounds), Some(&op), vis)
+ }
+ (Item(_) | AssocTraitItem(_) | ForeignItem(_), None) => {
+ rewrite_ty(rw_info, Some(bounds), ty_opt, vis)
+ }
+ (AssocImplItem(_), _) => {
+ let result = if let Some(op_bounds) = op_ty {
+ let op = OpaqueType { bounds: op_bounds };
+ rewrite_ty(rw_info, Some(bounds), Some(&op), &DEFAULT_VISIBILITY)
+ } else {
+ rewrite_ty(rw_info, Some(bounds), ty_opt, vis)
+ }?;
+ match defaultness {
+ ast::Defaultness::Default(..) => Some(format!("default {}", result)),
+ _ => Some(result),
+ }
+ }
+ }
+}
+
+fn rewrite_ty<R: Rewrite>(
+ rw_info: &TyAliasRewriteInfo<'_, '_>,
+ generic_bounds_opt: Option<&ast::GenericBounds>,
+ rhs: Option<&R>,
+ vis: &ast::Visibility,
+) -> Option<String> {
+ let mut result = String::with_capacity(128);
+ let TyAliasRewriteInfo(
+ context,
+ indent,
+ generics,
+ where_clauses,
+ where_predicates_split,
+ ident,
+ span,
+ ) = *rw_info;
+ let (before_where_predicates, after_where_predicates) = generics
+ .where_clause
+ .predicates
+ .split_at(where_predicates_split);
+ if !after_where_predicates.is_empty() {
+ return None;
+ }
+ result.push_str(&format!("{}type ", format_visibility(context, vis)));
+ let ident_str = rewrite_ident(context, ident);
+
+ if generics.params.is_empty() {
+ result.push_str(ident_str)
+ } else {
+ // 2 = `= `
+ let g_shape = Shape::indented(indent, context.config)
+ .offset_left(result.len())?
+ .sub_width(2)?;
+ let generics_str = rewrite_generics(context, ident_str, generics, g_shape)?;
+ result.push_str(&generics_str);
+ }
+
+ if let Some(bounds) = generic_bounds_opt {
+ if !bounds.is_empty() {
+ // 2 = `: `
+ let shape = Shape::indented(indent, context.config).offset_left(result.len() + 2)?;
+ let type_bounds = bounds.rewrite(context, shape).map(|s| format!(": {}", s))?;
+ result.push_str(&type_bounds);
+ }
+ }
+
+ let where_budget = context.budget(last_line_width(&result));
+ let mut option = WhereClauseOption::snuggled(&result);
+ if rhs.is_none() {
+ option.suppress_comma();
+ }
+ let where_clause_str = rewrite_where_clause(
+ context,
+ before_where_predicates,
+ where_clauses.0.1,
+ context.config.brace_style(),
+ Shape::legacy(where_budget, indent),
+ false,
+ "=",
+ None,
+ generics.span.hi(),
+ option,
+ )?;
+ result.push_str(&where_clause_str);
+
+ if let Some(ty) = rhs {
+ // If there's a where clause, add a newline before the assignment. Otherwise just add a
+ // space.
+ let has_where = !before_where_predicates.is_empty();
+ if has_where {
+ result.push_str(&indent.to_string_with_newline(context.config));
+ } else {
+ result.push(' ');
+ }
+
+ let comment_span = context
+ .snippet_provider
+ .opt_span_before(span, "=")
+ .map(|op_lo| mk_sp(where_clauses.0.1.hi(), op_lo));
+
+ let lhs = match comment_span {
+ Some(comment_span)
+ if contains_comment(context.snippet_provider.span_to_snippet(comment_span)?) =>
+ {
+ let comment_shape = if has_where {
+ Shape::indented(indent, context.config)
+ } else {
+ Shape::indented(indent, context.config)
+ .block_left(context.config.tab_spaces())?
+ };
+
+ combine_strs_with_missing_comments(
+ context,
+ result.trim_end(),
+ "=",
+ comment_span,
+ comment_shape,
+ true,
+ )?
+ }
+ _ => format!("{}=", result),
+ };
+
+ // 1 = `;`
+ let shape = Shape::indented(indent, context.config).sub_width(1)?;
+ rewrite_assign_rhs(context, lhs, &*ty, &RhsAssignKind::Ty, shape).map(|s| s + ";")
+ } else {
+ Some(format!("{};", result))
+ }
+}
+
+fn type_annotation_spacing(config: &Config) -> (&str, &str) {
+ (
+ if config.space_before_colon() { " " } else { "" },
+ if config.space_after_colon() { " " } else { "" },
+ )
+}
+
+pub(crate) fn rewrite_struct_field_prefix(
+ context: &RewriteContext<'_>,
+ field: &ast::FieldDef,
+) -> Option<String> {
+ let vis = format_visibility(context, &field.vis);
+ let type_annotation_spacing = type_annotation_spacing(context.config);
+ Some(match field.ident {
+ Some(name) => format!(
+ "{}{}{}:",
+ vis,
+ rewrite_ident(context, name),
+ type_annotation_spacing.0
+ ),
+ None => vis.to_string(),
+ })
+}
+
+impl Rewrite for ast::FieldDef {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ rewrite_struct_field(context, self, shape, 0)
+ }
+}
+
+pub(crate) fn rewrite_struct_field(
+ context: &RewriteContext<'_>,
+ field: &ast::FieldDef,
+ shape: Shape,
+ lhs_max_width: usize,
+) -> Option<String> {
+ if contains_skip(&field.attrs) {
+ return Some(context.snippet(field.span()).to_owned());
+ }
+
+ let type_annotation_spacing = type_annotation_spacing(context.config);
+ let prefix = rewrite_struct_field_prefix(context, field)?;
+
+ let attrs_str = field.attrs.rewrite(context, shape)?;
+ let attrs_extendable = field.ident.is_none() && is_attributes_extendable(&attrs_str);
+ let missing_span = if field.attrs.is_empty() {
+ mk_sp(field.span.lo(), field.span.lo())
+ } else {
+ mk_sp(field.attrs.last().unwrap().span.hi(), field.span.lo())
+ };
+ let mut spacing = String::from(if field.ident.is_some() {
+ type_annotation_spacing.1
+ } else {
+ ""
+ });
+ // Try to put everything on a single line.
+ let attr_prefix = combine_strs_with_missing_comments(
+ context,
+ &attrs_str,
+ &prefix,
+ missing_span,
+ shape,
+ attrs_extendable,
+ )?;
+ let overhead = trimmed_last_line_width(&attr_prefix);
+ let lhs_offset = lhs_max_width.saturating_sub(overhead);
+ for _ in 0..lhs_offset {
+ spacing.push(' ');
+ }
+ // In this extreme case we will be missing a space between an attribute and a field.
+ if prefix.is_empty() && !attrs_str.is_empty() && attrs_extendable && spacing.is_empty() {
+ spacing.push(' ');
+ }
+ let orig_ty = shape
+ .offset_left(overhead + spacing.len())
+ .and_then(|ty_shape| field.ty.rewrite(context, ty_shape));
+ if let Some(ref ty) = orig_ty {
+ if !ty.contains('\n') && !contains_comment(context.snippet(missing_span)) {
+ return Some(attr_prefix + &spacing + ty);
+ }
+ }
+
+ let is_prefix_empty = prefix.is_empty();
+ // We must use multiline. We are going to put attributes and a field on different lines.
+ let field_str = rewrite_assign_rhs(context, prefix, &*field.ty, &RhsAssignKind::Ty, shape)?;
+ // Remove a leading white-space from `rewrite_assign_rhs()` when rewriting a tuple struct.
+ let field_str = if is_prefix_empty {
+ field_str.trim_start()
+ } else {
+ &field_str
+ };
+ combine_strs_with_missing_comments(context, &attrs_str, field_str, missing_span, shape, false)
+}
+
+pub(crate) struct StaticParts<'a> {
+ prefix: &'a str,
+ vis: &'a ast::Visibility,
+ ident: symbol::Ident,
+ ty: &'a ast::Ty,
+ mutability: ast::Mutability,
+ expr_opt: Option<&'a ptr::P<ast::Expr>>,
+ defaultness: Option<ast::Defaultness>,
+ span: Span,
+}
+
+impl<'a> StaticParts<'a> {
+ pub(crate) fn from_item(item: &'a ast::Item) -> Self {
+ let (defaultness, prefix, ty, mutability, expr) = match item.kind {
+ ast::ItemKind::Static(ref ty, mutability, ref expr) => {
+ (None, "static", ty, mutability, expr)
+ }
+ ast::ItemKind::Const(defaultness, ref ty, ref expr) => {
+ (Some(defaultness), "const", ty, ast::Mutability::Not, expr)
+ }
+ _ => unreachable!(),
+ };
+ StaticParts {
+ prefix,
+ vis: &item.vis,
+ ident: item.ident,
+ ty,
+ mutability,
+ expr_opt: expr.as_ref(),
+ defaultness,
+ span: item.span,
+ }
+ }
+
+ pub(crate) fn from_trait_item(ti: &'a ast::AssocItem) -> Self {
+ let (defaultness, ty, expr_opt) = match ti.kind {
+ ast::AssocItemKind::Const(defaultness, ref ty, ref expr_opt) => {
+ (defaultness, ty, expr_opt)
+ }
+ _ => unreachable!(),
+ };
+ StaticParts {
+ prefix: "const",
+ vis: &ti.vis,
+ ident: ti.ident,
+ ty,
+ mutability: ast::Mutability::Not,
+ expr_opt: expr_opt.as_ref(),
+ defaultness: Some(defaultness),
+ span: ti.span,
+ }
+ }
+
+ pub(crate) fn from_impl_item(ii: &'a ast::AssocItem) -> Self {
+ let (defaultness, ty, expr) = match ii.kind {
+ ast::AssocItemKind::Const(defaultness, ref ty, ref expr) => (defaultness, ty, expr),
+ _ => unreachable!(),
+ };
+ StaticParts {
+ prefix: "const",
+ vis: &ii.vis,
+ ident: ii.ident,
+ ty,
+ mutability: ast::Mutability::Not,
+ expr_opt: expr.as_ref(),
+ defaultness: Some(defaultness),
+ span: ii.span,
+ }
+ }
+}
+
+fn rewrite_static(
+ context: &RewriteContext<'_>,
+ static_parts: &StaticParts<'_>,
+ offset: Indent,
+) -> Option<String> {
+ let colon = colon_spaces(context.config);
+ let mut prefix = format!(
+ "{}{}{} {}{}{}",
+ format_visibility(context, static_parts.vis),
+ static_parts.defaultness.map_or("", format_defaultness),
+ static_parts.prefix,
+ format_mutability(static_parts.mutability),
+ rewrite_ident(context, static_parts.ident),
+ colon,
+ );
+ // 2 = " =".len()
+ let ty_shape =
+ Shape::indented(offset.block_only(), context.config).offset_left(prefix.len() + 2)?;
+ let ty_str = match static_parts.ty.rewrite(context, ty_shape) {
+ Some(ty_str) => ty_str,
+ None => {
+ if prefix.ends_with(' ') {
+ prefix.pop();
+ }
+ let nested_indent = offset.block_indent(context.config);
+ let nested_shape = Shape::indented(nested_indent, context.config);
+ let ty_str = static_parts.ty.rewrite(context, nested_shape)?;
+ format!(
+ "{}{}",
+ nested_indent.to_string_with_newline(context.config),
+ ty_str
+ )
+ }
+ };
+
+ if let Some(expr) = static_parts.expr_opt {
+ let comments_lo = context.snippet_provider.span_after(static_parts.span, "=");
+ let expr_lo = expr.span.lo();
+ let comments_span = mk_sp(comments_lo, expr_lo);
+
+ let lhs = format!("{}{} =", prefix, ty_str);
+
+ // 1 = ;
+ let remaining_width = context.budget(offset.block_indent + 1);
+ rewrite_assign_rhs_with_comments(
+ context,
+ &lhs,
+ &**expr,
+ Shape::legacy(remaining_width, offset.block_only()),
+ &RhsAssignKind::Expr(&expr.kind, expr.span),
+ RhsTactics::Default,
+ comments_span,
+ true,
+ )
+ .and_then(|res| recover_comment_removed(res, static_parts.span, context))
+ .map(|s| if s.ends_with(';') { s } else { s + ";" })
+ } else {
+ Some(format!("{}{};", prefix, ty_str))
+ }
+}
+
+// FIXME(calebcartwright) - This is a hack around a bug in the handling of TyKind::ImplTrait.
+// This should be removed once that bug is resolved, with the type alias formatting using the
+// defined Ty for the RHS directly.
+// https://github.com/rust-lang/rustfmt/issues/4373
+// https://github.com/rust-lang/rustfmt/issues/5027
+struct OpaqueType<'a> {
+ bounds: &'a ast::GenericBounds,
+}
+
+impl<'a> Rewrite for OpaqueType<'a> {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ let shape = shape.offset_left(5)?; // `impl `
+ self.bounds
+ .rewrite(context, shape)
+ .map(|s| format!("impl {}", s))
+ }
+}
+
+impl Rewrite for ast::FnRetTy {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ match *self {
+ ast::FnRetTy::Default(_) => Some(String::new()),
+ ast::FnRetTy::Ty(ref ty) => {
+ if context.config.version() == Version::One
+ || context.config.indent_style() == IndentStyle::Visual
+ {
+ let inner_width = shape.width.checked_sub(3)?;
+ return ty
+ .rewrite(context, Shape::legacy(inner_width, shape.indent + 3))
+ .map(|r| format!("-> {}", r));
+ }
+
+ ty.rewrite(context, shape.offset_left(3)?)
+ .map(|s| format!("-> {}", s))
+ }
+ }
+ }
+}
+
+fn is_empty_infer(ty: &ast::Ty, pat_span: Span) -> bool {
+ match ty.kind {
+ ast::TyKind::Infer => ty.span.hi() == pat_span.hi(),
+ _ => false,
+ }
+}
+
+/// Recover any missing comments between the param and the type.
+///
+/// # Returns
+///
+/// A 2-len tuple with the comment before the colon in first position, and the comment after the
+/// colon in second position.
+fn get_missing_param_comments(
+ context: &RewriteContext<'_>,
+ pat_span: Span,
+ ty_span: Span,
+ shape: Shape,
+) -> (String, String) {
+ let missing_comment_span = mk_sp(pat_span.hi(), ty_span.lo());
+
+ let span_before_colon = {
+ let missing_comment_span_hi = context
+ .snippet_provider
+ .span_before(missing_comment_span, ":");
+ mk_sp(pat_span.hi(), missing_comment_span_hi)
+ };
+ let span_after_colon = {
+ let missing_comment_span_lo = context
+ .snippet_provider
+ .span_after(missing_comment_span, ":");
+ mk_sp(missing_comment_span_lo, ty_span.lo())
+ };
+
+ let comment_before_colon = rewrite_missing_comment(span_before_colon, shape, context)
+ .filter(|comment| !comment.is_empty())
+ .map_or(String::new(), |comment| format!(" {}", comment));
+ let comment_after_colon = rewrite_missing_comment(span_after_colon, shape, context)
+ .filter(|comment| !comment.is_empty())
+ .map_or(String::new(), |comment| format!("{} ", comment));
+ (comment_before_colon, comment_after_colon)
+}
+
+impl Rewrite for ast::Param {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ let param_attrs_result = self
+ .attrs
+ .rewrite(context, Shape::legacy(shape.width, shape.indent))?;
+ // N.B. Doc comments aren't typically valid syntax, but could appear
+ // in the presence of certain macros - https://github.com/rust-lang/rustfmt/issues/4936
+ let (span, has_multiple_attr_lines, has_doc_comments) = if !self.attrs.is_empty() {
+ let num_attrs = self.attrs.len();
+ (
+ mk_sp(self.attrs[num_attrs - 1].span.hi(), self.pat.span.lo()),
+ param_attrs_result.contains('\n'),
+ self.attrs.iter().any(|a| a.is_doc_comment()),
+ )
+ } else {
+ (mk_sp(self.span.lo(), self.span.lo()), false, false)
+ };
+
+ if let Some(ref explicit_self) = self.to_self() {
+ rewrite_explicit_self(
+ context,
+ explicit_self,
+ &param_attrs_result,
+ span,
+ shape,
+ has_multiple_attr_lines,
+ )
+ } else if is_named_param(self) {
+ let param_name = &self
+ .pat
+ .rewrite(context, Shape::legacy(shape.width, shape.indent))?;
+ let mut result = combine_strs_with_missing_comments(
+ context,
+ &param_attrs_result,
+ param_name,
+ span,
+ shape,
+ !has_multiple_attr_lines && !has_doc_comments,
+ )?;
+
+ if !is_empty_infer(&*self.ty, self.pat.span) {
+ let (before_comment, after_comment) =
+ get_missing_param_comments(context, self.pat.span, self.ty.span, shape);
+ result.push_str(&before_comment);
+ result.push_str(colon_spaces(context.config));
+ result.push_str(&after_comment);
+ let overhead = last_line_width(&result);
+ let max_width = shape.width.checked_sub(overhead)?;
+ if let Some(ty_str) = self
+ .ty
+ .rewrite(context, Shape::legacy(max_width, shape.indent))
+ {
+ result.push_str(&ty_str);
+ } else {
+ let prev_str = if param_attrs_result.is_empty() {
+ param_attrs_result
+ } else {
+ param_attrs_result + &shape.to_string_with_newline(context.config)
+ };
+
+ result = combine_strs_with_missing_comments(
+ context,
+ &prev_str,
+ param_name,
+ span,
+ shape,
+ !has_multiple_attr_lines,
+ )?;
+ result.push_str(&before_comment);
+ result.push_str(colon_spaces(context.config));
+ result.push_str(&after_comment);
+ let overhead = last_line_width(&result);
+ let max_width = shape.width.checked_sub(overhead)?;
+ let ty_str = self
+ .ty
+ .rewrite(context, Shape::legacy(max_width, shape.indent))?;
+ result.push_str(&ty_str);
+ }
+ }
+
+ Some(result)
+ } else {
+ self.ty.rewrite(context, shape)
+ }
+ }
+}
+
+fn rewrite_explicit_self(
+ context: &RewriteContext<'_>,
+ explicit_self: &ast::ExplicitSelf,
+ param_attrs: &str,
+ span: Span,
+ shape: Shape,
+ has_multiple_attr_lines: bool,
+) -> Option<String> {
+ match explicit_self.node {
+ ast::SelfKind::Region(lt, m) => {
+ let mut_str = format_mutability(m);
+ match lt {
+ Some(ref l) => {
+ let lifetime_str = l.rewrite(
+ context,
+ Shape::legacy(context.config.max_width(), Indent::empty()),
+ )?;
+ Some(combine_strs_with_missing_comments(
+ context,
+ param_attrs,
+ &format!("&{} {}self", lifetime_str, mut_str),
+ span,
+ shape,
+ !has_multiple_attr_lines,
+ )?)
+ }
+ None => Some(combine_strs_with_missing_comments(
+ context,
+ param_attrs,
+ &format!("&{}self", mut_str),
+ span,
+ shape,
+ !has_multiple_attr_lines,
+ )?),
+ }
+ }
+ ast::SelfKind::Explicit(ref ty, mutability) => {
+ let type_str = ty.rewrite(
+ context,
+ Shape::legacy(context.config.max_width(), Indent::empty()),
+ )?;
+
+ Some(combine_strs_with_missing_comments(
+ context,
+ param_attrs,
+ &format!("{}self: {}", format_mutability(mutability), type_str),
+ span,
+ shape,
+ !has_multiple_attr_lines,
+ )?)
+ }
+ ast::SelfKind::Value(mutability) => Some(combine_strs_with_missing_comments(
+ context,
+ param_attrs,
+ &format!("{}self", format_mutability(mutability)),
+ span,
+ shape,
+ !has_multiple_attr_lines,
+ )?),
+ }
+}
+
+pub(crate) fn span_lo_for_param(param: &ast::Param) -> BytePos {
+ if param.attrs.is_empty() {
+ if is_named_param(param) {
+ param.pat.span.lo()
+ } else {
+ param.ty.span.lo()
+ }
+ } else {
+ param.attrs[0].span.lo()
+ }
+}
+
+pub(crate) fn span_hi_for_param(context: &RewriteContext<'_>, param: &ast::Param) -> BytePos {
+ match param.ty.kind {
+ ast::TyKind::Infer if context.snippet(param.ty.span) == "_" => param.ty.span.hi(),
+ ast::TyKind::Infer if is_named_param(param) => param.pat.span.hi(),
+ _ => param.ty.span.hi(),
+ }
+}
+
+pub(crate) fn is_named_param(param: &ast::Param) -> bool {
+ if let ast::PatKind::Ident(_, ident, _) = param.pat.kind {
+ ident.name != symbol::kw::Empty
+ } else {
+ true
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub(crate) enum FnBraceStyle {
+ SameLine,
+ NextLine,
+ None,
+}
+
+// Return type is (result, force_new_line_for_brace)
+fn rewrite_fn_base(
+ context: &RewriteContext<'_>,
+ indent: Indent,
+ ident: symbol::Ident,
+ fn_sig: &FnSig<'_>,
+ span: Span,
+ fn_brace_style: FnBraceStyle,
+) -> Option<(String, bool, bool)> {
+ let mut force_new_line_for_brace = false;
+
+ let where_clause = &fn_sig.generics.where_clause;
+
+ let mut result = String::with_capacity(1024);
+ result.push_str(&fn_sig.to_str(context));
+
+ // fn foo
+ result.push_str("fn ");
+
+ // Generics.
+ let overhead = if let FnBraceStyle::SameLine = fn_brace_style {
+ // 4 = `() {`
+ 4
+ } else {
+ // 2 = `()`
+ 2
+ };
+ let used_width = last_line_used_width(&result, indent.width());
+ let one_line_budget = context.budget(used_width + overhead);
+ let shape = Shape {
+ width: one_line_budget,
+ indent,
+ offset: used_width,
+ };
+ let fd = fn_sig.decl;
+ let generics_str = rewrite_generics(
+ context,
+ rewrite_ident(context, ident),
+ &fn_sig.generics,
+ shape,
+ )?;
+ result.push_str(&generics_str);
+
+ let snuggle_angle_bracket = generics_str
+ .lines()
+ .last()
+ .map_or(false, |l| l.trim_start().len() == 1);
+
+ // Note that the width and indent don't really matter, we'll re-layout the
+ // return type later anyway.
+ let ret_str = fd
+ .output
+ .rewrite(context, Shape::indented(indent, context.config))?;
+
+ let multi_line_ret_str = ret_str.contains('\n');
+ let ret_str_len = if multi_line_ret_str { 0 } else { ret_str.len() };
+
+ // Params.
+ let (one_line_budget, multi_line_budget, mut param_indent) = compute_budgets_for_params(
+ context,
+ &result,
+ indent,
+ ret_str_len,
+ fn_brace_style,
+ multi_line_ret_str,
+ )?;
+
+ debug!(
+ "rewrite_fn_base: one_line_budget: {}, multi_line_budget: {}, param_indent: {:?}",
+ one_line_budget, multi_line_budget, param_indent
+ );
+
+ result.push('(');
+ // Check if vertical layout was forced.
+ if one_line_budget == 0
+ && !snuggle_angle_bracket
+ && context.config.indent_style() == IndentStyle::Visual
+ {
+ result.push_str(&param_indent.to_string_with_newline(context.config));
+ }
+
+ let params_end = if fd.inputs.is_empty() {
+ context
+ .snippet_provider
+ .span_after(mk_sp(fn_sig.generics.span.hi(), span.hi()), ")")
+ } else {
+ let last_span = mk_sp(fd.inputs[fd.inputs.len() - 1].span().hi(), span.hi());
+ context.snippet_provider.span_after(last_span, ")")
+ };
+ let params_span = mk_sp(
+ context
+ .snippet_provider
+ .span_after(mk_sp(fn_sig.generics.span.hi(), span.hi()), "("),
+ params_end,
+ );
+ let param_str = rewrite_params(
+ context,
+ &fd.inputs,
+ one_line_budget,
+ multi_line_budget,
+ indent,
+ param_indent,
+ params_span,
+ fd.c_variadic(),
+ )?;
+
+ let put_params_in_block = match context.config.indent_style() {
+ IndentStyle::Block => param_str.contains('\n') || param_str.len() > one_line_budget,
+ _ => false,
+ } && !fd.inputs.is_empty();
+
+ let mut params_last_line_contains_comment = false;
+ let mut no_params_and_over_max_width = false;
+
+ if put_params_in_block {
+ param_indent = indent.block_indent(context.config);
+ result.push_str(&param_indent.to_string_with_newline(context.config));
+ result.push_str(&param_str);
+ result.push_str(&indent.to_string_with_newline(context.config));
+ result.push(')');
+ } else {
+ result.push_str(&param_str);
+ let used_width = last_line_used_width(&result, indent.width()) + first_line_width(&ret_str);
+ // Put the closing brace on the next line if it overflows the max width.
+ // 1 = `)`
+ let closing_paren_overflow_max_width =
+ fd.inputs.is_empty() && used_width + 1 > context.config.max_width();
+ // If the last line of params contains comment, we cannot put the closing paren
+ // on the same line.
+ params_last_line_contains_comment = param_str
+ .lines()
+ .last()
+ .map_or(false, |last_line| last_line.contains("//"));
+
+ if context.config.version() == Version::Two {
+ if closing_paren_overflow_max_width {
+ result.push(')');
+ result.push_str(&indent.to_string_with_newline(context.config));
+ no_params_and_over_max_width = true;
+ } else if params_last_line_contains_comment {
+ result.push_str(&indent.to_string_with_newline(context.config));
+ result.push(')');
+ no_params_and_over_max_width = true;
+ } else {
+ result.push(')');
+ }
+ } else {
+ if closing_paren_overflow_max_width || params_last_line_contains_comment {
+ result.push_str(&indent.to_string_with_newline(context.config));
+ }
+ result.push(')');
+ }
+ }
+
+ // Return type.
+ if let ast::FnRetTy::Ty(..) = fd.output {
+ let ret_should_indent = match context.config.indent_style() {
+ // If our params are block layout then we surely must have space.
+ IndentStyle::Block if put_params_in_block || fd.inputs.is_empty() => false,
+ _ if params_last_line_contains_comment => false,
+ _ if result.contains('\n') || multi_line_ret_str => true,
+ _ => {
+ // If the return type would push over the max width, then put the return type on
+ // a new line. With the +1 for the signature length an additional space between
+ // the closing parenthesis of the param and the arrow '->' is considered.
+ let mut sig_length = result.len() + indent.width() + ret_str_len + 1;
+
+ // If there is no where-clause, take into account the space after the return type
+ // and the brace.
+ if where_clause.predicates.is_empty() {
+ sig_length += 2;
+ }
+
+ sig_length > context.config.max_width()
+ }
+ };
+ let ret_shape = if ret_should_indent {
+ if context.config.version() == Version::One
+ || context.config.indent_style() == IndentStyle::Visual
+ {
+ let indent = if param_str.is_empty() {
+ // Aligning with non-existent params looks silly.
+ force_new_line_for_brace = true;
+ indent + 4
+ } else {
+ // FIXME: we might want to check that using the param indent
+ // doesn't blow our budget, and if it does, then fallback to
+ // the where-clause indent.
+ param_indent
+ };
+
+ result.push_str(&indent.to_string_with_newline(context.config));
+ Shape::indented(indent, context.config)
+ } else {
+ let mut ret_shape = Shape::indented(indent, context.config);
+ if param_str.is_empty() {
+ // Aligning with non-existent params looks silly.
+ force_new_line_for_brace = true;
+ ret_shape = if context.use_block_indent() {
+ ret_shape.offset_left(4).unwrap_or(ret_shape)
+ } else {
+ ret_shape.indent = ret_shape.indent + 4;
+ ret_shape
+ };
+ }
+
+ result.push_str(&ret_shape.indent.to_string_with_newline(context.config));
+ ret_shape
+ }
+ } else {
+ if context.config.version() == Version::Two {
+ if !param_str.is_empty() || !no_params_and_over_max_width {
+ result.push(' ');
+ }
+ } else {
+ result.push(' ');
+ }
+
+ let ret_shape = Shape::indented(indent, context.config);
+ ret_shape
+ .offset_left(last_line_width(&result))
+ .unwrap_or(ret_shape)
+ };
+
+ if multi_line_ret_str || ret_should_indent {
+ // Now that we know the proper indent and width, we need to
+ // re-layout the return type.
+ let ret_str = fd.output.rewrite(context, ret_shape)?;
+ result.push_str(&ret_str);
+ } else {
+ result.push_str(&ret_str);
+ }
+
+ // Comment between return type and the end of the decl.
+ let snippet_lo = fd.output.span().hi();
+ if where_clause.predicates.is_empty() {
+ let snippet_hi = span.hi();
+ let snippet = context.snippet(mk_sp(snippet_lo, snippet_hi));
+ // Try to preserve the layout of the original snippet.
+ let original_starts_with_newline = snippet
+ .find(|c| c != ' ')
+ .map_or(false, |i| starts_with_newline(&snippet[i..]));
+ let original_ends_with_newline = snippet
+ .rfind(|c| c != ' ')
+ .map_or(false, |i| snippet[i..].ends_with('\n'));
+ let snippet = snippet.trim();
+ if !snippet.is_empty() {
+ result.push(if original_starts_with_newline {
+ '\n'
+ } else {
+ ' '
+ });
+ result.push_str(snippet);
+ if original_ends_with_newline {
+ force_new_line_for_brace = true;
+ }
+ }
+ }
+ }
+
+ let pos_before_where = match fd.output {
+ ast::FnRetTy::Default(..) => params_span.hi(),
+ ast::FnRetTy::Ty(ref ty) => ty.span.hi(),
+ };
+
+ let is_params_multi_lined = param_str.contains('\n');
+
+ let space = if put_params_in_block && ret_str.is_empty() {
+ WhereClauseSpace::Space
+ } else {
+ WhereClauseSpace::Newline
+ };
+ let mut option = WhereClauseOption::new(fn_brace_style == FnBraceStyle::None, space);
+ if is_params_multi_lined {
+ option.veto_single_line();
+ }
+ let where_clause_str = rewrite_where_clause(
+ context,
+ &where_clause.predicates,
+ where_clause.span,
+ context.config.brace_style(),
+ Shape::indented(indent, context.config),
+ true,
+ "{",
+ Some(span.hi()),
+ pos_before_where,
+ option,
+ )?;
+ // If there are neither where-clause nor return type, we may be missing comments between
+ // params and `{`.
+ if where_clause_str.is_empty() {
+ if let ast::FnRetTy::Default(ret_span) = fd.output {
+ match recover_missing_comment_in_span(
+ mk_sp(params_span.hi(), ret_span.hi()),
+ shape,
+ context,
+ last_line_width(&result),
+ ) {
+ Some(ref missing_comment) if !missing_comment.is_empty() => {
+ result.push_str(missing_comment);
+ force_new_line_for_brace = true;
+ }
+ _ => (),
+ }
+ }
+ }
+
+ result.push_str(&where_clause_str);
+
+ let ends_with_comment = last_line_contains_single_line_comment(&result);
+ force_new_line_for_brace |= ends_with_comment;
+ force_new_line_for_brace |=
+ is_params_multi_lined && context.config.where_single_line() && !where_clause_str.is_empty();
+ Some((result, ends_with_comment, force_new_line_for_brace))
+}
+
+/// Kind of spaces to put before `where`.
+#[derive(Copy, Clone)]
+enum WhereClauseSpace {
+ /// A single space.
+ Space,
+ /// A new line.
+ Newline,
+ /// Nothing.
+ None,
+}
+
+#[derive(Copy, Clone)]
+struct WhereClauseOption {
+ suppress_comma: bool, // Force no trailing comma
+ snuggle: WhereClauseSpace,
+ allow_single_line: bool, // Try single line where-clause instead of vertical layout
+ veto_single_line: bool, // Disallow a single-line where-clause.
+}
+
+impl WhereClauseOption {
+ fn new(suppress_comma: bool, snuggle: WhereClauseSpace) -> WhereClauseOption {
+ WhereClauseOption {
+ suppress_comma,
+ snuggle,
+ allow_single_line: false,
+ veto_single_line: false,
+ }
+ }
+
+ fn snuggled(current: &str) -> WhereClauseOption {
+ WhereClauseOption {
+ suppress_comma: false,
+ snuggle: if last_line_width(current) == 1 {
+ WhereClauseSpace::Space
+ } else {
+ WhereClauseSpace::Newline
+ },
+ allow_single_line: false,
+ veto_single_line: false,
+ }
+ }
+
+ fn suppress_comma(&mut self) {
+ self.suppress_comma = true
+ }
+
+ fn allow_single_line(&mut self) {
+ self.allow_single_line = true
+ }
+
+ fn snuggle(&mut self) {
+ self.snuggle = WhereClauseSpace::Space
+ }
+
+ fn veto_single_line(&mut self) {
+ self.veto_single_line = true;
+ }
+}
+
+fn rewrite_params(
+ context: &RewriteContext<'_>,
+ params: &[ast::Param],
+ one_line_budget: usize,
+ multi_line_budget: usize,
+ indent: Indent,
+ param_indent: Indent,
+ span: Span,
+ variadic: bool,
+) -> Option<String> {
+ if params.is_empty() {
+ let comment = context
+ .snippet(mk_sp(
+ span.lo(),
+ // to remove ')'
+ span.hi() - BytePos(1),
+ ))
+ .trim();
+ return Some(comment.to_owned());
+ }
+ let param_items: Vec<_> = itemize_list(
+ context.snippet_provider,
+ params.iter(),
+ ")",
+ ",",
+ |param| span_lo_for_param(param),
+ |param| param.ty.span.hi(),
+ |param| {
+ param
+ .rewrite(context, Shape::legacy(multi_line_budget, param_indent))
+ .or_else(|| Some(context.snippet(param.span()).to_owned()))
+ },
+ span.lo(),
+ span.hi(),
+ false,
+ )
+ .collect();
+
+ let tactic = definitive_tactic(
+ &param_items,
+ context
+ .config
+ .fn_args_layout()
+ .to_list_tactic(param_items.len()),
+ Separator::Comma,
+ one_line_budget,
+ );
+ let budget = match tactic {
+ DefinitiveListTactic::Horizontal => one_line_budget,
+ _ => multi_line_budget,
+ };
+ let indent = match context.config.indent_style() {
+ IndentStyle::Block => indent.block_indent(context.config),
+ IndentStyle::Visual => param_indent,
+ };
+ let trailing_separator = if variadic {
+ SeparatorTactic::Never
+ } else {
+ match context.config.indent_style() {
+ IndentStyle::Block => context.config.trailing_comma(),
+ IndentStyle::Visual => SeparatorTactic::Never,
+ }
+ };
+ let fmt = ListFormatting::new(Shape::legacy(budget, indent), context.config)
+ .tactic(tactic)
+ .trailing_separator(trailing_separator)
+ .ends_with_newline(tactic.ends_with_newline(context.config.indent_style()))
+ .preserve_newline(true);
+ write_list(&param_items, &fmt)
+}
+
+fn compute_budgets_for_params(
+ context: &RewriteContext<'_>,
+ result: &str,
+ indent: Indent,
+ ret_str_len: usize,
+ fn_brace_style: FnBraceStyle,
+ force_vertical_layout: bool,
+) -> Option<(usize, usize, Indent)> {
+ debug!(
+ "compute_budgets_for_params {} {:?}, {}, {:?}",
+ result.len(),
+ indent,
+ ret_str_len,
+ fn_brace_style,
+ );
+ // Try keeping everything on the same line.
+ if !result.contains('\n') && !force_vertical_layout {
+ // 2 = `()`, 3 = `() `, space is before ret_string.
+ let overhead = if ret_str_len == 0 { 2 } else { 3 };
+ let mut used_space = indent.width() + result.len() + ret_str_len + overhead;
+ match fn_brace_style {
+ FnBraceStyle::None => used_space += 1, // 1 = `;`
+ FnBraceStyle::SameLine => used_space += 2, // 2 = `{}`
+ FnBraceStyle::NextLine => (),
+ }
+ let one_line_budget = context.budget(used_space);
+
+ if one_line_budget > 0 {
+ // 4 = "() {".len()
+ let (indent, multi_line_budget) = match context.config.indent_style() {
+ IndentStyle::Block => {
+ let indent = indent.block_indent(context.config);
+ (indent, context.budget(indent.width() + 1))
+ }
+ IndentStyle::Visual => {
+ let indent = indent + result.len() + 1;
+ let multi_line_overhead = match fn_brace_style {
+ FnBraceStyle::SameLine => 4,
+ _ => 2,
+ } + indent.width();
+ (indent, context.budget(multi_line_overhead))
+ }
+ };
+
+ return Some((one_line_budget, multi_line_budget, indent));
+ }
+ }
+
+ // Didn't work. we must force vertical layout and put params on a newline.
+ let new_indent = indent.block_indent(context.config);
+ let used_space = match context.config.indent_style() {
+ // 1 = `,`
+ IndentStyle::Block => new_indent.width() + 1,
+ // Account for `)` and possibly ` {`.
+ IndentStyle::Visual => new_indent.width() + if ret_str_len == 0 { 1 } else { 3 },
+ };
+ Some((0, context.budget(used_space), new_indent))
+}
+
+fn newline_for_brace(config: &Config, where_clause: &ast::WhereClause) -> FnBraceStyle {
+ let predicate_count = where_clause.predicates.len();
+
+ if config.where_single_line() && predicate_count == 1 {
+ return FnBraceStyle::SameLine;
+ }
+ let brace_style = config.brace_style();
+
+ let use_next_line = brace_style == BraceStyle::AlwaysNextLine
+ || (brace_style == BraceStyle::SameLineWhere && predicate_count > 0);
+ if use_next_line {
+ FnBraceStyle::NextLine
+ } else {
+ FnBraceStyle::SameLine
+ }
+}
+
+fn rewrite_generics(
+ context: &RewriteContext<'_>,
+ ident: &str,
+ generics: &ast::Generics,
+ shape: Shape,
+) -> Option<String> {
+ // FIXME: convert bounds to where-clauses where they get too big or if
+ // there is a where-clause at all.
+
+ if generics.params.is_empty() {
+ return Some(ident.to_owned());
+ }
+
+ let params = generics.params.iter();
+ overflow::rewrite_with_angle_brackets(context, ident, params, shape, generics.span)
+}
+
+fn generics_shape_from_config(config: &Config, shape: Shape, offset: usize) -> Option<Shape> {
+ match config.indent_style() {
+ IndentStyle::Visual => shape.visual_indent(1 + offset).sub_width(offset + 2),
+ IndentStyle::Block => {
+ // 1 = ","
+ shape
+ .block()
+ .block_indent(config.tab_spaces())
+ .with_max_width(config)
+ .sub_width(1)
+ }
+ }
+}
+
+fn rewrite_where_clause_rfc_style(
+ context: &RewriteContext<'_>,
+ predicates: &[ast::WherePredicate],
+ where_span: Span,
+ shape: Shape,
+ terminator: &str,
+ span_end: Option<BytePos>,
+ span_end_before_where: BytePos,
+ where_clause_option: WhereClauseOption,
+) -> Option<String> {
+ let (where_keyword, allow_single_line) = rewrite_where_keyword(
+ context,
+ predicates,
+ where_span,
+ shape,
+ span_end_before_where,
+ where_clause_option,
+ )?;
+
+ // 1 = `,`
+ let clause_shape = shape
+ .block()
+ .with_max_width(context.config)
+ .block_left(context.config.tab_spaces())?
+ .sub_width(1)?;
+ let force_single_line = context.config.where_single_line()
+ && predicates.len() == 1
+ && !where_clause_option.veto_single_line;
+
+ let preds_str = rewrite_bounds_on_where_clause(
+ context,
+ predicates,
+ clause_shape,
+ terminator,
+ span_end,
+ where_clause_option,
+ force_single_line,
+ )?;
+
+ // 6 = `where `
+ let clause_sep =
+ if allow_single_line && !preds_str.contains('\n') && 6 + preds_str.len() <= shape.width
+ || force_single_line
+ {
+ Cow::from(" ")
+ } else {
+ clause_shape.indent.to_string_with_newline(context.config)
+ };
+
+ Some(format!("{}{}{}", where_keyword, clause_sep, preds_str))
+}
+
+/// Rewrite `where` and comment around it.
+fn rewrite_where_keyword(
+ context: &RewriteContext<'_>,
+ predicates: &[ast::WherePredicate],
+ where_span: Span,
+ shape: Shape,
+ span_end_before_where: BytePos,
+ where_clause_option: WhereClauseOption,
+) -> Option<(String, bool)> {
+ let block_shape = shape.block().with_max_width(context.config);
+ // 1 = `,`
+ let clause_shape = block_shape
+ .block_left(context.config.tab_spaces())?
+ .sub_width(1)?;
+
+ let comment_separator = |comment: &str, shape: Shape| {
+ if comment.is_empty() {
+ Cow::from("")
+ } else {
+ shape.indent.to_string_with_newline(context.config)
+ }
+ };
+
+ let (span_before, span_after) =
+ missing_span_before_after_where(span_end_before_where, predicates, where_span);
+ let (comment_before, comment_after) =
+ rewrite_comments_before_after_where(context, span_before, span_after, shape)?;
+
+ let starting_newline = match where_clause_option.snuggle {
+ WhereClauseSpace::Space if comment_before.is_empty() => Cow::from(" "),
+ WhereClauseSpace::None => Cow::from(""),
+ _ => block_shape.indent.to_string_with_newline(context.config),
+ };
+
+ let newline_before_where = comment_separator(&comment_before, shape);
+ let newline_after_where = comment_separator(&comment_after, clause_shape);
+ let result = format!(
+ "{}{}{}where{}{}",
+ starting_newline, comment_before, newline_before_where, newline_after_where, comment_after
+ );
+ let allow_single_line = where_clause_option.allow_single_line
+ && comment_before.is_empty()
+ && comment_after.is_empty();
+
+ Some((result, allow_single_line))
+}
+
+/// Rewrite bounds on a where clause.
+fn rewrite_bounds_on_where_clause(
+ context: &RewriteContext<'_>,
+ predicates: &[ast::WherePredicate],
+ shape: Shape,
+ terminator: &str,
+ span_end: Option<BytePos>,
+ where_clause_option: WhereClauseOption,
+ force_single_line: bool,
+) -> Option<String> {
+ let span_start = predicates[0].span().lo();
+ // If we don't have the start of the next span, then use the end of the
+ // predicates, but that means we miss comments.
+ let len = predicates.len();
+ let end_of_preds = predicates[len - 1].span().hi();
+ let span_end = span_end.unwrap_or(end_of_preds);
+ let items = itemize_list(
+ context.snippet_provider,
+ predicates.iter(),
+ terminator,
+ ",",
+ |pred| pred.span().lo(),
+ |pred| pred.span().hi(),
+ |pred| pred.rewrite(context, shape),
+ span_start,
+ span_end,
+ false,
+ );
+ let comma_tactic = if where_clause_option.suppress_comma || force_single_line {
+ SeparatorTactic::Never
+ } else {
+ context.config.trailing_comma()
+ };
+
+ // shape should be vertical only and only if we have `force_single_line` option enabled
+ // and the number of items of the where-clause is equal to 1
+ let shape_tactic = if force_single_line {
+ DefinitiveListTactic::Horizontal
+ } else {
+ DefinitiveListTactic::Vertical
+ };
+
+ let fmt = ListFormatting::new(shape, context.config)
+ .tactic(shape_tactic)
+ .trailing_separator(comma_tactic)
+ .preserve_newline(true);
+ write_list(&items.collect::<Vec<_>>(), &fmt)
+}
+
+fn rewrite_where_clause(
+ context: &RewriteContext<'_>,
+ predicates: &[ast::WherePredicate],
+ where_span: Span,
+ brace_style: BraceStyle,
+ shape: Shape,
+ on_new_line: bool,
+ terminator: &str,
+ span_end: Option<BytePos>,
+ span_end_before_where: BytePos,
+ where_clause_option: WhereClauseOption,
+) -> Option<String> {
+ if predicates.is_empty() {
+ return Some(String::new());
+ }
+
+ if context.config.indent_style() == IndentStyle::Block {
+ return rewrite_where_clause_rfc_style(
+ context,
+ predicates,
+ where_span,
+ shape,
+ terminator,
+ span_end,
+ span_end_before_where,
+ where_clause_option,
+ );
+ }
+
+ let extra_indent = Indent::new(context.config.tab_spaces(), 0);
+
+ let offset = match context.config.indent_style() {
+ IndentStyle::Block => shape.indent + extra_indent.block_indent(context.config),
+ // 6 = "where ".len()
+ IndentStyle::Visual => shape.indent + extra_indent + 6,
+ };
+ // FIXME: if indent_style != Visual, then the budgets below might
+ // be out by a char or two.
+
+ let budget = context.config.max_width() - offset.width();
+ let span_start = predicates[0].span().lo();
+ // If we don't have the start of the next span, then use the end of the
+ // predicates, but that means we miss comments.
+ let len = predicates.len();
+ let end_of_preds = predicates[len - 1].span().hi();
+ let span_end = span_end.unwrap_or(end_of_preds);
+ let items = itemize_list(
+ context.snippet_provider,
+ predicates.iter(),
+ terminator,
+ ",",
+ |pred| pred.span().lo(),
+ |pred| pred.span().hi(),
+ |pred| pred.rewrite(context, Shape::legacy(budget, offset)),
+ span_start,
+ span_end,
+ false,
+ );
+ let item_vec = items.collect::<Vec<_>>();
+ // FIXME: we don't need to collect here
+ let tactic = definitive_tactic(&item_vec, ListTactic::Vertical, Separator::Comma, budget);
+
+ let mut comma_tactic = context.config.trailing_comma();
+ // Kind of a hack because we don't usually have trailing commas in where-clauses.
+ if comma_tactic == SeparatorTactic::Vertical || where_clause_option.suppress_comma {
+ comma_tactic = SeparatorTactic::Never;
+ }
+
+ let fmt = ListFormatting::new(Shape::legacy(budget, offset), context.config)
+ .tactic(tactic)
+ .trailing_separator(comma_tactic)
+ .ends_with_newline(tactic.ends_with_newline(context.config.indent_style()))
+ .preserve_newline(true);
+ let preds_str = write_list(&item_vec, &fmt)?;
+
+ let end_length = if terminator == "{" {
+ // If the brace is on the next line we don't need to count it otherwise it needs two
+ // characters " {"
+ match brace_style {
+ BraceStyle::AlwaysNextLine | BraceStyle::SameLineWhere => 0,
+ BraceStyle::PreferSameLine => 2,
+ }
+ } else if terminator == "=" {
+ 2
+ } else {
+ terminator.len()
+ };
+ if on_new_line
+ || preds_str.contains('\n')
+ || shape.indent.width() + " where ".len() + preds_str.len() + end_length > shape.width
+ {
+ Some(format!(
+ "\n{}where {}",
+ (shape.indent + extra_indent).to_string(context.config),
+ preds_str
+ ))
+ } else {
+ Some(format!(" where {}", preds_str))
+ }
+}
+
+fn missing_span_before_after_where(
+ before_item_span_end: BytePos,
+ predicates: &[ast::WherePredicate],
+ where_span: Span,
+) -> (Span, Span) {
+ let missing_span_before = mk_sp(before_item_span_end, where_span.lo());
+ // 5 = `where`
+ let pos_after_where = where_span.lo() + BytePos(5);
+ let missing_span_after = mk_sp(pos_after_where, predicates[0].span().lo());
+ (missing_span_before, missing_span_after)
+}
+
+fn rewrite_comments_before_after_where(
+ context: &RewriteContext<'_>,
+ span_before_where: Span,
+ span_after_where: Span,
+ shape: Shape,
+) -> Option<(String, String)> {
+ let before_comment = rewrite_missing_comment(span_before_where, shape, context)?;
+ let after_comment = rewrite_missing_comment(
+ span_after_where,
+ shape.block_indent(context.config.tab_spaces()),
+ context,
+ )?;
+ Some((before_comment, after_comment))
+}
+
+fn format_header(
+ context: &RewriteContext<'_>,
+ item_name: &str,
+ ident: symbol::Ident,
+ vis: &ast::Visibility,
+ offset: Indent,
+) -> String {
+ let mut result = String::with_capacity(128);
+ let shape = Shape::indented(offset, context.config);
+
+ result.push_str(format_visibility(context, vis).trim());
+
+ // Check for a missing comment between the visibility and the item name.
+ let after_vis = vis.span.hi();
+ if let Some(before_item_name) = context
+ .snippet_provider
+ .opt_span_before(mk_sp(vis.span.lo(), ident.span.hi()), item_name.trim())
+ {
+ let missing_span = mk_sp(after_vis, before_item_name);
+ if let Some(result_with_comment) = combine_strs_with_missing_comments(
+ context,
+ &result,
+ item_name,
+ missing_span,
+ shape,
+ /* allow_extend */ true,
+ ) {
+ result = result_with_comment;
+ }
+ }
+
+ result.push_str(rewrite_ident(context, ident));
+
+ result
+}
+
+#[derive(PartialEq, Eq, Clone, Copy)]
+enum BracePos {
+ None,
+ Auto,
+ ForceSameLine,
+}
+
+fn format_generics(
+ context: &RewriteContext<'_>,
+ generics: &ast::Generics,
+ brace_style: BraceStyle,
+ brace_pos: BracePos,
+ offset: Indent,
+ span: Span,
+ used_width: usize,
+) -> Option<String> {
+ let shape = Shape::legacy(context.budget(used_width + offset.width()), offset);
+ let mut result = rewrite_generics(context, "", generics, shape)?;
+
+ // If the generics are not parameterized then generics.span.hi() == 0,
+ // so we use span.lo(), which is the position after `struct Foo`.
+ let span_end_before_where = if !generics.params.is_empty() {
+ generics.span.hi()
+ } else {
+ span.lo()
+ };
+ let (same_line_brace, missed_comments) = if !generics.where_clause.predicates.is_empty() {
+ let budget = context.budget(last_line_used_width(&result, offset.width()));
+ let mut option = WhereClauseOption::snuggled(&result);
+ if brace_pos == BracePos::None {
+ option.suppress_comma = true;
+ }
+ let where_clause_str = rewrite_where_clause(
+ context,
+ &generics.where_clause.predicates,
+ generics.where_clause.span,
+ brace_style,
+ Shape::legacy(budget, offset.block_only()),
+ true,
+ "{",
+ Some(span.hi()),
+ span_end_before_where,
+ option,
+ )?;
+ result.push_str(&where_clause_str);
+ (
+ brace_pos == BracePos::ForceSameLine || brace_style == BraceStyle::PreferSameLine,
+ // missed comments are taken care of in #rewrite_where_clause
+ None,
+ )
+ } else {
+ (
+ brace_pos == BracePos::ForceSameLine
+ || (result.contains('\n') && brace_style == BraceStyle::PreferSameLine
+ || brace_style != BraceStyle::AlwaysNextLine)
+ || trimmed_last_line_width(&result) == 1,
+ rewrite_missing_comment(
+ mk_sp(
+ span_end_before_where,
+ if brace_pos == BracePos::None {
+ span.hi()
+ } else {
+ context.snippet_provider.span_before(span, "{")
+ },
+ ),
+ shape,
+ context,
+ ),
+ )
+ };
+ // add missing comments
+ let missed_line_comments = missed_comments
+ .filter(|missed_comments| !missed_comments.is_empty())
+ .map_or(false, |missed_comments| {
+ let is_block = is_last_comment_block(&missed_comments);
+ let sep = if is_block { " " } else { "\n" };
+ result.push_str(sep);
+ result.push_str(&missed_comments);
+ !is_block
+ });
+ if brace_pos == BracePos::None {
+ return Some(result);
+ }
+ let total_used_width = last_line_used_width(&result, used_width);
+ let remaining_budget = context.budget(total_used_width);
+ // If the same line brace if forced, it indicates that we are rewriting an item with empty body,
+ // and hence we take the closer into account as well for one line budget.
+ // We assume that the closer has the same length as the opener.
+ let overhead = if brace_pos == BracePos::ForceSameLine {
+ // 3 = ` {}`
+ 3
+ } else {
+ // 2 = ` {`
+ 2
+ };
+ let forbid_same_line_brace = missed_line_comments || overhead > remaining_budget;
+ if !forbid_same_line_brace && same_line_brace {
+ result.push(' ');
+ } else {
+ result.push('\n');
+ result.push_str(&offset.block_only().to_string(context.config));
+ }
+ result.push('{');
+
+ Some(result)
+}
+
+impl Rewrite for ast::ForeignItem {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ let attrs_str = self.attrs.rewrite(context, shape)?;
+ // Drop semicolon or it will be interpreted as comment.
+ // FIXME: this may be a faulty span from libsyntax.
+ let span = mk_sp(self.span.lo(), self.span.hi() - BytePos(1));
+
+ let item_str = match self.kind {
+ ast::ForeignItemKind::Fn(ref fn_kind) => {
+ let ast::Fn {
+ defaultness,
+ ref sig,
+ ref generics,
+ ref body,
+ } = **fn_kind;
+ if let Some(ref body) = body {
+ let mut visitor = FmtVisitor::from_context(context);
+ visitor.block_indent = shape.indent;
+ visitor.last_pos = self.span.lo();
+ let inner_attrs = inner_attributes(&self.attrs);
+ let fn_ctxt = visit::FnCtxt::Foreign;
+ visitor.visit_fn(
+ visit::FnKind::Fn(
+ fn_ctxt,
+ self.ident,
+ sig,
+ &self.vis,
+ generics,
+ Some(body),
+ ),
+ &sig.decl,
+ self.span,
+ defaultness,
+ Some(&inner_attrs),
+ );
+ Some(visitor.buffer.to_owned())
+ } else {
+ rewrite_fn_base(
+ context,
+ shape.indent,
+ self.ident,
+ &FnSig::from_method_sig(sig, generics, &self.vis),
+ span,
+ FnBraceStyle::None,
+ )
+ .map(|(s, _, _)| format!("{};", s))
+ }
+ }
+ ast::ForeignItemKind::Static(ref ty, mutability, _) => {
+ // FIXME(#21): we're dropping potential comments in between the
+ // function kw here.
+ let vis = format_visibility(context, &self.vis);
+ let mut_str = format_mutability(mutability);
+ let prefix = format!(
+ "{}static {}{}:",
+ vis,
+ mut_str,
+ rewrite_ident(context, self.ident)
+ );
+ // 1 = ;
+ rewrite_assign_rhs(
+ context,
+ prefix,
+ &**ty,
+ &RhsAssignKind::Ty,
+ shape.sub_width(1)?,
+ )
+ .map(|s| s + ";")
+ }
+ ast::ForeignItemKind::TyAlias(ref ty_alias) => {
+ let (kind, span) = (&ItemVisitorKind::ForeignItem(self), self.span);
+ rewrite_type_alias(ty_alias, context, shape.indent, kind, span)
+ }
+ ast::ForeignItemKind::MacCall(ref mac) => {
+ rewrite_macro(mac, None, context, shape, MacroPosition::Item)
+ }
+ }?;
+
+ let missing_span = if self.attrs.is_empty() {
+ mk_sp(self.span.lo(), self.span.lo())
+ } else {
+ mk_sp(self.attrs[self.attrs.len() - 1].span.hi(), self.span.lo())
+ };
+ combine_strs_with_missing_comments(
+ context,
+ &attrs_str,
+ &item_str,
+ missing_span,
+ shape,
+ false,
+ )
+ }
+}
+
+/// Rewrite the attributes of an item.
+fn rewrite_attrs(
+ context: &RewriteContext<'_>,
+ item: &ast::Item,
+ item_str: &str,
+ shape: Shape,
+) -> Option<String> {
+ let attrs = filter_inline_attrs(&item.attrs, item.span());
+ let attrs_str = attrs.rewrite(context, shape)?;
+
+ let missed_span = if attrs.is_empty() {
+ mk_sp(item.span.lo(), item.span.lo())
+ } else {
+ mk_sp(attrs[attrs.len() - 1].span.hi(), item.span.lo())
+ };
+
+ let allow_extend = if attrs.len() == 1 {
+ let line_len = attrs_str.len() + 1 + item_str.len();
+ !attrs.first().unwrap().is_doc_comment()
+ && context.config.inline_attribute_width() >= line_len
+ } else {
+ false
+ };
+
+ combine_strs_with_missing_comments(
+ context,
+ &attrs_str,
+ item_str,
+ missed_span,
+ shape,
+ allow_extend,
+ )
+}
+
+/// Rewrite an inline mod.
+/// The given shape is used to format the mod's attributes.
+pub(crate) fn rewrite_mod(
+ context: &RewriteContext<'_>,
+ item: &ast::Item,
+ attrs_shape: Shape,
+) -> Option<String> {
+ let mut result = String::with_capacity(32);
+ result.push_str(&*format_visibility(context, &item.vis));
+ result.push_str("mod ");
+ result.push_str(rewrite_ident(context, item.ident));
+ result.push(';');
+ rewrite_attrs(context, item, &result, attrs_shape)
+}
+
+/// Rewrite `extern crate foo;`.
+/// The given shape is used to format the extern crate's attributes.
+pub(crate) fn rewrite_extern_crate(
+ context: &RewriteContext<'_>,
+ item: &ast::Item,
+ attrs_shape: Shape,
+) -> Option<String> {
+ assert!(is_extern_crate(item));
+ let new_str = context.snippet(item.span);
+ let item_str = if contains_comment(new_str) {
+ new_str.to_owned()
+ } else {
+ let no_whitespace = &new_str.split_whitespace().collect::<Vec<&str>>().join(" ");
+ String::from(&*Regex::new(r"\s;").unwrap().replace(no_whitespace, ";"))
+ };
+ rewrite_attrs(context, item, &item_str, attrs_shape)
+}
+
+/// Returns `true` for `mod foo;`, false for `mod foo { .. }`.
+pub(crate) fn is_mod_decl(item: &ast::Item) -> bool {
+ !matches!(
+ item.kind,
+ ast::ItemKind::Mod(_, ast::ModKind::Loaded(_, ast::Inline::Yes, _))
+ )
+}
+
+pub(crate) fn is_use_item(item: &ast::Item) -> bool {
+ matches!(item.kind, ast::ItemKind::Use(_))
+}
+
+pub(crate) fn is_extern_crate(item: &ast::Item) -> bool {
+ matches!(item.kind, ast::ItemKind::ExternCrate(..))
+}
diff --git a/src/tools/rustfmt/src/lib.rs b/src/tools/rustfmt/src/lib.rs
new file mode 100644
index 000000000..1d1ef525f
--- /dev/null
+++ b/src/tools/rustfmt/src/lib.rs
@@ -0,0 +1,658 @@
+#![feature(rustc_private)]
+#![deny(rust_2018_idioms)]
+#![warn(unreachable_pub)]
+#![recursion_limit = "256"]
+#![allow(clippy::match_like_matches_macro)]
+#![allow(unreachable_pub)]
+
+#[macro_use]
+extern crate derive_new;
+#[cfg(test)]
+#[macro_use]
+extern crate lazy_static;
+#[macro_use]
+extern crate log;
+
+// N.B. these crates are loaded from the sysroot, so they need extern crate.
+extern crate rustc_ast;
+extern crate rustc_ast_pretty;
+extern crate rustc_builtin_macros;
+extern crate rustc_data_structures;
+extern crate rustc_errors;
+extern crate rustc_expand;
+extern crate rustc_parse;
+extern crate rustc_session;
+extern crate rustc_span;
+
+use std::cell::RefCell;
+use std::collections::HashMap;
+use std::fmt;
+use std::io::{self, Write};
+use std::mem;
+use std::panic;
+use std::path::PathBuf;
+use std::rc::Rc;
+
+use rustc_ast::ast;
+use rustc_span::symbol;
+use thiserror::Error;
+
+use crate::comment::LineClasses;
+use crate::emitter::Emitter;
+use crate::formatting::{FormatErrorMap, FormattingError, ReportedErrors, SourceFile};
+use crate::modules::ModuleResolutionError;
+use crate::parse::parser::DirectoryOwnership;
+use crate::shape::Indent;
+use crate::utils::indent_next_line;
+
+pub use crate::config::{
+ load_config, CliOptions, Color, Config, Edition, EmitMode, FileLines, FileName, NewlineStyle,
+ Range, Verbosity,
+};
+
+pub use crate::format_report_formatter::{FormatReportFormatter, FormatReportFormatterBuilder};
+
+pub use crate::rustfmt_diff::{ModifiedChunk, ModifiedLines};
+
+#[macro_use]
+mod utils;
+
+mod attr;
+mod chains;
+mod closures;
+mod comment;
+pub(crate) mod config;
+mod coverage;
+mod emitter;
+mod expr;
+mod format_report_formatter;
+pub(crate) mod formatting;
+mod ignore_path;
+mod imports;
+mod items;
+mod lists;
+mod macros;
+mod matches;
+mod missed_spans;
+pub(crate) mod modules;
+mod overflow;
+mod pairs;
+mod parse;
+mod patterns;
+mod release_channel;
+mod reorder;
+mod rewrite;
+pub(crate) mod rustfmt_diff;
+mod shape;
+mod skip;
+pub(crate) mod source_file;
+pub(crate) mod source_map;
+mod spanned;
+mod stmt;
+mod string;
+#[cfg(test)]
+mod test;
+mod types;
+mod vertical;
+pub(crate) mod visitor;
+
+/// The various errors that can occur during formatting. Note that not all of
+/// these can currently be propagated to clients.
+#[derive(Error, Debug)]
+pub enum ErrorKind {
+ /// Line has exceeded character limit (found, maximum).
+ #[error(
+ "line formatted, but exceeded maximum width \
+ (maximum: {1} (see `max_width` option), found: {0})"
+ )]
+ LineOverflow(usize, usize),
+ /// Line ends in whitespace.
+ #[error("left behind trailing whitespace")]
+ TrailingWhitespace,
+ /// Used deprecated skip attribute.
+ #[error("`rustfmt_skip` is deprecated; use `rustfmt::skip`")]
+ DeprecatedAttr,
+ /// Used a rustfmt:: attribute other than skip or skip::macros.
+ #[error("invalid attribute")]
+ BadAttr,
+ /// An io error during reading or writing.
+ #[error("io error: {0}")]
+ IoError(io::Error),
+ /// Error during module resolution.
+ #[error("{0}")]
+ ModuleResolutionError(#[from] ModuleResolutionError),
+ /// Parse error occurred when parsing the input.
+ #[error("parse error")]
+ ParseError,
+ /// The user mandated a version and the current version of Rustfmt does not
+ /// satisfy that requirement.
+ #[error("version mismatch")]
+ VersionMismatch,
+ /// If we had formatted the given node, then we would have lost a comment.
+ #[error("not formatted because a comment would be lost")]
+ LostComment,
+ /// Invalid glob pattern in `ignore` configuration option.
+ #[error("Invalid glob pattern found in ignore list: {0}")]
+ InvalidGlobPattern(ignore::Error),
+}
+
+impl ErrorKind {
+ fn is_comment(&self) -> bool {
+ matches!(self, ErrorKind::LostComment)
+ }
+}
+
+impl From<io::Error> for ErrorKind {
+ fn from(e: io::Error) -> ErrorKind {
+ ErrorKind::IoError(e)
+ }
+}
+
+/// Result of formatting a snippet of code along with ranges of lines that didn't get formatted,
+/// i.e., that got returned as they were originally.
+#[derive(Debug)]
+struct FormattedSnippet {
+ snippet: String,
+ non_formatted_ranges: Vec<(usize, usize)>,
+}
+
+impl FormattedSnippet {
+ /// In case the snippet needed to be wrapped in a function, this shifts down the ranges of
+ /// non-formatted code.
+ fn unwrap_code_block(&mut self) {
+ self.non_formatted_ranges
+ .iter_mut()
+ .for_each(|(low, high)| {
+ *low -= 1;
+ *high -= 1;
+ });
+ }
+
+ /// Returns `true` if the line n did not get formatted.
+ fn is_line_non_formatted(&self, n: usize) -> bool {
+ self.non_formatted_ranges
+ .iter()
+ .any(|(low, high)| *low <= n && n <= *high)
+ }
+}
+
+/// Reports on any issues that occurred during a run of Rustfmt.
+///
+/// Can be reported to the user using the `Display` impl on [`FormatReportFormatter`].
+#[derive(Clone)]
+pub struct FormatReport {
+ // Maps stringified file paths to their associated formatting errors.
+ internal: Rc<RefCell<(FormatErrorMap, ReportedErrors)>>,
+ non_formatted_ranges: Vec<(usize, usize)>,
+}
+
+impl FormatReport {
+ fn new() -> FormatReport {
+ FormatReport {
+ internal: Rc::new(RefCell::new((HashMap::new(), ReportedErrors::default()))),
+ non_formatted_ranges: Vec::new(),
+ }
+ }
+
+ fn add_non_formatted_ranges(&mut self, mut ranges: Vec<(usize, usize)>) {
+ self.non_formatted_ranges.append(&mut ranges);
+ }
+
+ fn append(&self, f: FileName, mut v: Vec<FormattingError>) {
+ self.track_errors(&v);
+ self.internal
+ .borrow_mut()
+ .0
+ .entry(f)
+ .and_modify(|fe| fe.append(&mut v))
+ .or_insert(v);
+ }
+
+ fn track_errors(&self, new_errors: &[FormattingError]) {
+ let errs = &mut self.internal.borrow_mut().1;
+ if !new_errors.is_empty() {
+ errs.has_formatting_errors = true;
+ }
+ if errs.has_operational_errors && errs.has_check_errors && errs.has_unformatted_code_errors
+ {
+ return;
+ }
+ for err in new_errors {
+ match err.kind {
+ ErrorKind::LineOverflow(..) => {
+ errs.has_operational_errors = true;
+ }
+ ErrorKind::TrailingWhitespace => {
+ errs.has_operational_errors = true;
+ errs.has_unformatted_code_errors = true;
+ }
+ ErrorKind::LostComment => {
+ errs.has_unformatted_code_errors = true;
+ }
+ ErrorKind::DeprecatedAttr | ErrorKind::BadAttr | ErrorKind::VersionMismatch => {
+ errs.has_check_errors = true;
+ }
+ _ => {}
+ }
+ }
+ }
+
+ fn add_diff(&mut self) {
+ self.internal.borrow_mut().1.has_diff = true;
+ }
+
+ fn add_macro_format_failure(&mut self) {
+ self.internal.borrow_mut().1.has_macro_format_failure = true;
+ }
+
+ fn add_parsing_error(&mut self) {
+ self.internal.borrow_mut().1.has_parsing_errors = true;
+ }
+
+ fn warning_count(&self) -> usize {
+ self.internal
+ .borrow()
+ .0
+ .iter()
+ .map(|(_, errors)| errors.len())
+ .sum()
+ }
+
+ /// Whether any warnings or errors are present in the report.
+ pub fn has_warnings(&self) -> bool {
+ self.internal.borrow().1.has_formatting_errors
+ }
+
+ /// Print the report to a terminal using colours and potentially other
+ /// fancy output.
+ #[deprecated(note = "Use FormatReportFormatter with colors enabled instead")]
+ pub fn fancy_print(
+ &self,
+ mut t: Box<dyn term::Terminal<Output = io::Stderr>>,
+ ) -> Result<(), term::Error> {
+ writeln!(
+ t,
+ "{}",
+ FormatReportFormatterBuilder::new(self)
+ .enable_colors(true)
+ .build()
+ )?;
+ Ok(())
+ }
+}
+
+/// Deprecated - Use FormatReportFormatter instead
+// https://github.com/rust-lang/rust/issues/78625
+// https://github.com/rust-lang/rust/issues/39935
+impl fmt::Display for FormatReport {
+ // Prints all the formatting errors.
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
+ write!(fmt, "{}", FormatReportFormatterBuilder::new(self).build())?;
+ Ok(())
+ }
+}
+
+/// Format the given snippet. The snippet is expected to be *complete* code.
+/// When we cannot parse the given snippet, this function returns `None`.
+fn format_snippet(snippet: &str, config: &Config, is_macro_def: bool) -> Option<FormattedSnippet> {
+ let mut config = config.clone();
+ panic::catch_unwind(|| {
+ let mut out: Vec<u8> = Vec::with_capacity(snippet.len() * 2);
+ config.set().emit_mode(config::EmitMode::Stdout);
+ config.set().verbose(Verbosity::Quiet);
+ config.set().hide_parse_errors(true);
+ if is_macro_def {
+ config.set().error_on_unformatted(true);
+ }
+
+ let (formatting_error, result) = {
+ let input = Input::Text(snippet.into());
+ let mut session = Session::new(config, Some(&mut out));
+ let result = session.format_input_inner(input, is_macro_def);
+ (
+ session.errors.has_macro_format_failure
+ || session.out.as_ref().unwrap().is_empty() && !snippet.is_empty()
+ || result.is_err()
+ || (is_macro_def && session.has_unformatted_code_errors()),
+ result,
+ )
+ };
+ if formatting_error {
+ None
+ } else {
+ String::from_utf8(out).ok().map(|snippet| FormattedSnippet {
+ snippet,
+ non_formatted_ranges: result.unwrap().non_formatted_ranges,
+ })
+ }
+ })
+ // Discard panics encountered while formatting the snippet
+ // The ? operator is needed to remove the extra Option
+ .ok()?
+}
+
+/// Format the given code block. Mainly targeted for code block in comment.
+/// The code block may be incomplete (i.e., parser may be unable to parse it).
+/// To avoid panic in parser, we wrap the code block with a dummy function.
+/// The returned code block does **not** end with newline.
+fn format_code_block(
+ code_snippet: &str,
+ config: &Config,
+ is_macro_def: bool,
+) -> Option<FormattedSnippet> {
+ const FN_MAIN_PREFIX: &str = "fn main() {\n";
+
+ fn enclose_in_main_block(s: &str, config: &Config) -> String {
+ let indent = Indent::from_width(config, config.tab_spaces());
+ let mut result = String::with_capacity(s.len() * 2);
+ result.push_str(FN_MAIN_PREFIX);
+ let mut need_indent = true;
+ for (kind, line) in LineClasses::new(s) {
+ if need_indent {
+ result.push_str(&indent.to_string(config));
+ }
+ result.push_str(&line);
+ result.push('\n');
+ need_indent = indent_next_line(kind, &line, config);
+ }
+ result.push('}');
+ result
+ }
+
+ // Wrap the given code block with `fn main()` if it does not have one.
+ let snippet = enclose_in_main_block(code_snippet, config);
+ let mut result = String::with_capacity(snippet.len());
+ let mut is_first = true;
+
+ // While formatting the code, ignore the config's newline style setting and always use "\n"
+ // instead of "\r\n" for the newline characters. This is ok because the output here is
+ // not directly outputted by rustfmt command, but used by the comment formatter's input.
+ // We have output-file-wide "\n" ==> "\r\n" conversion process after here if it's necessary.
+ let mut config_with_unix_newline = config.clone();
+ config_with_unix_newline
+ .set()
+ .newline_style(NewlineStyle::Unix);
+ let mut formatted = format_snippet(&snippet, &config_with_unix_newline, is_macro_def)?;
+ // Remove wrapping main block
+ formatted.unwrap_code_block();
+
+ // Trim "fn main() {" on the first line and "}" on the last line,
+ // then unindent the whole code block.
+ let block_len = formatted
+ .snippet
+ .rfind('}')
+ .unwrap_or_else(|| formatted.snippet.len());
+ let mut is_indented = true;
+ let indent_str = Indent::from_width(config, config.tab_spaces()).to_string(config);
+ for (kind, ref line) in LineClasses::new(&formatted.snippet[FN_MAIN_PREFIX.len()..block_len]) {
+ if !is_first {
+ result.push('\n');
+ } else {
+ is_first = false;
+ }
+ let trimmed_line = if !is_indented {
+ line
+ } else if line.len() > config.max_width() {
+ // If there are lines that are larger than max width, we cannot tell
+ // whether we have succeeded but have some comments or strings that
+ // are too long, or we have failed to format code block. We will be
+ // conservative and just return `None` in this case.
+ return None;
+ } else if line.len() > indent_str.len() {
+ // Make sure that the line has leading whitespaces.
+ if line.starts_with(indent_str.as_ref()) {
+ let offset = if config.hard_tabs() {
+ 1
+ } else {
+ config.tab_spaces()
+ };
+ &line[offset..]
+ } else {
+ line
+ }
+ } else {
+ line
+ };
+ result.push_str(trimmed_line);
+ is_indented = indent_next_line(kind, line, config);
+ }
+ Some(FormattedSnippet {
+ snippet: result,
+ non_formatted_ranges: formatted.non_formatted_ranges,
+ })
+}
+
+/// A session is a run of rustfmt across a single or multiple inputs.
+pub struct Session<'b, T: Write> {
+ pub config: Config,
+ pub out: Option<&'b mut T>,
+ pub(crate) errors: ReportedErrors,
+ source_file: SourceFile,
+ emitter: Box<dyn Emitter + 'b>,
+}
+
+impl<'b, T: Write + 'b> Session<'b, T> {
+ pub fn new(config: Config, mut out: Option<&'b mut T>) -> Session<'b, T> {
+ let emitter = create_emitter(&config);
+
+ if let Some(ref mut out) = out {
+ let _ = emitter.emit_header(out);
+ }
+
+ Session {
+ config,
+ out,
+ emitter,
+ errors: ReportedErrors::default(),
+ source_file: SourceFile::new(),
+ }
+ }
+
+ /// The main entry point for Rustfmt. Formats the given input according to the
+ /// given config. `out` is only necessary if required by the configuration.
+ pub fn format(&mut self, input: Input) -> Result<FormatReport, ErrorKind> {
+ self.format_input_inner(input, false)
+ }
+
+ pub fn override_config<F, U>(&mut self, mut config: Config, f: F) -> U
+ where
+ F: FnOnce(&mut Session<'b, T>) -> U,
+ {
+ mem::swap(&mut config, &mut self.config);
+ let result = f(self);
+ mem::swap(&mut config, &mut self.config);
+ result
+ }
+
+ pub fn add_operational_error(&mut self) {
+ self.errors.has_operational_errors = true;
+ }
+
+ pub fn has_operational_errors(&self) -> bool {
+ self.errors.has_operational_errors
+ }
+
+ pub fn has_parsing_errors(&self) -> bool {
+ self.errors.has_parsing_errors
+ }
+
+ pub fn has_formatting_errors(&self) -> bool {
+ self.errors.has_formatting_errors
+ }
+
+ pub fn has_check_errors(&self) -> bool {
+ self.errors.has_check_errors
+ }
+
+ pub fn has_diff(&self) -> bool {
+ self.errors.has_diff
+ }
+
+ pub fn has_unformatted_code_errors(&self) -> bool {
+ self.errors.has_unformatted_code_errors
+ }
+
+ pub fn has_no_errors(&self) -> bool {
+ !(self.has_operational_errors()
+ || self.has_parsing_errors()
+ || self.has_formatting_errors()
+ || self.has_check_errors()
+ || self.has_diff()
+ || self.has_unformatted_code_errors()
+ || self.errors.has_macro_format_failure)
+ }
+}
+
+pub(crate) fn create_emitter<'a>(config: &Config) -> Box<dyn Emitter + 'a> {
+ match config.emit_mode() {
+ EmitMode::Files if config.make_backup() => {
+ Box::new(emitter::FilesWithBackupEmitter::default())
+ }
+ EmitMode::Files => Box::new(emitter::FilesEmitter::new(
+ config.print_misformatted_file_names(),
+ )),
+ EmitMode::Stdout | EmitMode::Coverage => {
+ Box::new(emitter::StdoutEmitter::new(config.verbose()))
+ }
+ EmitMode::Json => Box::new(emitter::JsonEmitter::default()),
+ EmitMode::ModifiedLines => Box::new(emitter::ModifiedLinesEmitter::default()),
+ EmitMode::Checkstyle => Box::new(emitter::CheckstyleEmitter::default()),
+ EmitMode::Diff => Box::new(emitter::DiffEmitter::new(config.clone())),
+ }
+}
+
+impl<'b, T: Write + 'b> Drop for Session<'b, T> {
+ fn drop(&mut self) {
+ if let Some(ref mut out) = self.out {
+ let _ = self.emitter.emit_footer(out);
+ }
+ }
+}
+
+#[derive(Debug)]
+pub enum Input {
+ File(PathBuf),
+ Text(String),
+}
+
+impl Input {
+ fn file_name(&self) -> FileName {
+ match *self {
+ Input::File(ref file) => FileName::Real(file.clone()),
+ Input::Text(..) => FileName::Stdin,
+ }
+ }
+
+ fn to_directory_ownership(&self) -> Option<DirectoryOwnership> {
+ match self {
+ Input::File(ref file) => {
+ // If there exists a directory with the same name as an input,
+ // then the input should be parsed as a sub module.
+ let file_stem = file.file_stem()?;
+ if file.parent()?.to_path_buf().join(file_stem).is_dir() {
+ Some(DirectoryOwnership::Owned {
+ relative: file_stem.to_str().map(symbol::Ident::from_str),
+ })
+ } else {
+ None
+ }
+ }
+ _ => None,
+ }
+ }
+}
+
+#[cfg(test)]
+mod unit_tests {
+ use super::*;
+
+ #[test]
+ fn test_no_panic_on_format_snippet_and_format_code_block() {
+ // `format_snippet()` and `format_code_block()` should not panic
+ // even when we cannot parse the given snippet.
+ let snippet = "let";
+ assert!(format_snippet(snippet, &Config::default(), false).is_none());
+ assert!(format_code_block(snippet, &Config::default(), false).is_none());
+ }
+
+ fn test_format_inner<F>(formatter: F, input: &str, expected: &str) -> bool
+ where
+ F: Fn(&str, &Config, bool) -> Option<FormattedSnippet>,
+ {
+ let output = formatter(input, &Config::default(), false);
+ output.is_some() && output.unwrap().snippet == expected
+ }
+
+ #[test]
+ fn test_format_snippet() {
+ let snippet = "fn main() { println!(\"hello, world\"); }";
+ #[cfg(not(windows))]
+ let expected = "fn main() {\n \
+ println!(\"hello, world\");\n\
+ }\n";
+ #[cfg(windows)]
+ let expected = "fn main() {\r\n \
+ println!(\"hello, world\");\r\n\
+ }\r\n";
+ assert!(test_format_inner(format_snippet, snippet, expected));
+ }
+
+ #[test]
+ fn test_format_code_block_fail() {
+ #[rustfmt::skip]
+ let code_block = "this_line_is_100_characters_long_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx(x, y, z);";
+ assert!(format_code_block(code_block, &Config::default(), false).is_none());
+ }
+
+ #[test]
+ fn test_format_code_block() {
+ // simple code block
+ let code_block = "let x=3;";
+ let expected = "let x = 3;";
+ assert!(test_format_inner(format_code_block, code_block, expected));
+
+ // more complex code block, taken from chains.rs.
+ let code_block =
+"let (nested_shape, extend) = if !parent_rewrite_contains_newline && is_continuable(&parent) {
+(
+chain_indent(context, shape.add_offset(parent_rewrite.len())),
+context.config.indent_style() == IndentStyle::Visual || is_small_parent,
+)
+} else if is_block_expr(context, &parent, &parent_rewrite) {
+match context.config.indent_style() {
+// Try to put the first child on the same line with parent's last line
+IndentStyle::Block => (parent_shape.block_indent(context.config.tab_spaces()), true),
+// The parent is a block, so align the rest of the chain with the closing
+// brace.
+IndentStyle::Visual => (parent_shape, false),
+}
+} else {
+(
+chain_indent(context, shape.add_offset(parent_rewrite.len())),
+false,
+)
+};
+";
+ let expected =
+"let (nested_shape, extend) = if !parent_rewrite_contains_newline && is_continuable(&parent) {
+ (
+ chain_indent(context, shape.add_offset(parent_rewrite.len())),
+ context.config.indent_style() == IndentStyle::Visual || is_small_parent,
+ )
+} else if is_block_expr(context, &parent, &parent_rewrite) {
+ match context.config.indent_style() {
+ // Try to put the first child on the same line with parent's last line
+ IndentStyle::Block => (parent_shape.block_indent(context.config.tab_spaces()), true),
+ // The parent is a block, so align the rest of the chain with the closing
+ // brace.
+ IndentStyle::Visual => (parent_shape, false),
+ }
+} else {
+ (
+ chain_indent(context, shape.add_offset(parent_rewrite.len())),
+ false,
+ )
+};";
+ assert!(test_format_inner(format_code_block, code_block, expected));
+ }
+}
diff --git a/src/tools/rustfmt/src/lists.rs b/src/tools/rustfmt/src/lists.rs
new file mode 100644
index 000000000..e87850507
--- /dev/null
+++ b/src/tools/rustfmt/src/lists.rs
@@ -0,0 +1,943 @@
+//! Format list-like expressions and items.
+
+use std::cmp;
+use std::iter::Peekable;
+
+use rustc_span::BytePos;
+
+use crate::comment::{find_comment_end, rewrite_comment, FindUncommented};
+use crate::config::lists::*;
+use crate::config::{Config, IndentStyle};
+use crate::rewrite::RewriteContext;
+use crate::shape::{Indent, Shape};
+use crate::utils::{
+ count_newlines, first_line_width, last_line_width, mk_sp, starts_with_newline,
+ unicode_str_width,
+};
+use crate::visitor::SnippetProvider;
+
+pub(crate) struct ListFormatting<'a> {
+ tactic: DefinitiveListTactic,
+ separator: &'a str,
+ trailing_separator: SeparatorTactic,
+ separator_place: SeparatorPlace,
+ shape: Shape,
+ // Non-expressions, e.g., items, will have a new line at the end of the list.
+ // Important for comment styles.
+ ends_with_newline: bool,
+ // Remove newlines between list elements for expressions.
+ preserve_newline: bool,
+ // Nested import lists get some special handling for the "Mixed" list type
+ nested: bool,
+ // Whether comments should be visually aligned.
+ align_comments: bool,
+ config: &'a Config,
+}
+
+impl<'a> ListFormatting<'a> {
+ pub(crate) fn new(shape: Shape, config: &'a Config) -> Self {
+ ListFormatting {
+ tactic: DefinitiveListTactic::Vertical,
+ separator: ",",
+ trailing_separator: SeparatorTactic::Never,
+ separator_place: SeparatorPlace::Back,
+ shape,
+ ends_with_newline: true,
+ preserve_newline: false,
+ nested: false,
+ align_comments: true,
+ config,
+ }
+ }
+
+ pub(crate) fn tactic(mut self, tactic: DefinitiveListTactic) -> Self {
+ self.tactic = tactic;
+ self
+ }
+
+ pub(crate) fn separator(mut self, separator: &'a str) -> Self {
+ self.separator = separator;
+ self
+ }
+
+ pub(crate) fn trailing_separator(mut self, trailing_separator: SeparatorTactic) -> Self {
+ self.trailing_separator = trailing_separator;
+ self
+ }
+
+ pub(crate) fn separator_place(mut self, separator_place: SeparatorPlace) -> Self {
+ self.separator_place = separator_place;
+ self
+ }
+
+ pub(crate) fn ends_with_newline(mut self, ends_with_newline: bool) -> Self {
+ self.ends_with_newline = ends_with_newline;
+ self
+ }
+
+ pub(crate) fn preserve_newline(mut self, preserve_newline: bool) -> Self {
+ self.preserve_newline = preserve_newline;
+ self
+ }
+
+ pub(crate) fn nested(mut self, nested: bool) -> Self {
+ self.nested = nested;
+ self
+ }
+
+ pub(crate) fn align_comments(mut self, align_comments: bool) -> Self {
+ self.align_comments = align_comments;
+ self
+ }
+
+ pub(crate) fn needs_trailing_separator(&self) -> bool {
+ match self.trailing_separator {
+ // We always put separator in front.
+ SeparatorTactic::Always => true,
+ SeparatorTactic::Vertical => self.tactic == DefinitiveListTactic::Vertical,
+ SeparatorTactic::Never => {
+ self.tactic == DefinitiveListTactic::Vertical && self.separator_place.is_front()
+ }
+ }
+ }
+}
+
+impl AsRef<ListItem> for ListItem {
+ fn as_ref(&self) -> &ListItem {
+ self
+ }
+}
+
+#[derive(PartialEq, Eq, Debug, Copy, Clone)]
+pub(crate) enum ListItemCommentStyle {
+ // Try to keep the comment on the same line with the item.
+ SameLine,
+ // Put the comment on the previous or the next line of the item.
+ DifferentLine,
+ // No comment available.
+ None,
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct ListItem {
+ // None for comments mean that they are not present.
+ pub(crate) pre_comment: Option<String>,
+ pub(crate) pre_comment_style: ListItemCommentStyle,
+ // Item should include attributes and doc comments. None indicates a failed
+ // rewrite.
+ pub(crate) item: Option<String>,
+ pub(crate) post_comment: Option<String>,
+ // Whether there is extra whitespace before this item.
+ pub(crate) new_lines: bool,
+}
+
+impl ListItem {
+ pub(crate) fn empty() -> ListItem {
+ ListItem {
+ pre_comment: None,
+ pre_comment_style: ListItemCommentStyle::None,
+ item: None,
+ post_comment: None,
+ new_lines: false,
+ }
+ }
+
+ pub(crate) fn inner_as_ref(&self) -> &str {
+ self.item.as_ref().map_or("", |s| s)
+ }
+
+ pub(crate) fn is_different_group(&self) -> bool {
+ self.inner_as_ref().contains('\n')
+ || self.pre_comment.is_some()
+ || self
+ .post_comment
+ .as_ref()
+ .map_or(false, |s| s.contains('\n'))
+ }
+
+ pub(crate) fn is_multiline(&self) -> bool {
+ self.inner_as_ref().contains('\n')
+ || self
+ .pre_comment
+ .as_ref()
+ .map_or(false, |s| s.contains('\n'))
+ || self
+ .post_comment
+ .as_ref()
+ .map_or(false, |s| s.contains('\n'))
+ }
+
+ pub(crate) fn has_single_line_comment(&self) -> bool {
+ self.pre_comment
+ .as_ref()
+ .map_or(false, |comment| comment.trim_start().starts_with("//"))
+ || self
+ .post_comment
+ .as_ref()
+ .map_or(false, |comment| comment.trim_start().starts_with("//"))
+ }
+
+ pub(crate) fn has_comment(&self) -> bool {
+ self.pre_comment.is_some() || self.post_comment.is_some()
+ }
+
+ pub(crate) fn from_str<S: Into<String>>(s: S) -> ListItem {
+ ListItem {
+ pre_comment: None,
+ pre_comment_style: ListItemCommentStyle::None,
+ item: Some(s.into()),
+ post_comment: None,
+ new_lines: false,
+ }
+ }
+
+ // Returns `true` if the item causes something to be written.
+ fn is_substantial(&self) -> bool {
+ fn empty(s: &Option<String>) -> bool {
+ !matches!(*s, Some(ref s) if !s.is_empty())
+ }
+
+ !(empty(&self.pre_comment) && empty(&self.item) && empty(&self.post_comment))
+ }
+}
+
+/// The type of separator for lists.
+#[derive(Copy, Clone, Eq, PartialEq, Debug)]
+pub(crate) enum Separator {
+ Comma,
+ VerticalBar,
+}
+
+impl Separator {
+ pub(crate) fn len(self) -> usize {
+ match self {
+ // 2 = `, `
+ Separator::Comma => 2,
+ // 3 = ` | `
+ Separator::VerticalBar => 3,
+ }
+ }
+}
+
+pub(crate) fn definitive_tactic<I, T>(
+ items: I,
+ tactic: ListTactic,
+ sep: Separator,
+ width: usize,
+) -> DefinitiveListTactic
+where
+ I: IntoIterator<Item = T> + Clone,
+ T: AsRef<ListItem>,
+{
+ let pre_line_comments = items
+ .clone()
+ .into_iter()
+ .any(|item| item.as_ref().has_single_line_comment());
+
+ let limit = match tactic {
+ _ if pre_line_comments => return DefinitiveListTactic::Vertical,
+ ListTactic::Horizontal => return DefinitiveListTactic::Horizontal,
+ ListTactic::Vertical => return DefinitiveListTactic::Vertical,
+ ListTactic::LimitedHorizontalVertical(limit) => ::std::cmp::min(width, limit),
+ ListTactic::Mixed | ListTactic::HorizontalVertical => width,
+ };
+
+ let (sep_count, total_width) = calculate_width(items.clone());
+ let total_sep_len = sep.len() * sep_count.saturating_sub(1);
+ let real_total = total_width + total_sep_len;
+
+ if real_total <= limit && !items.into_iter().any(|item| item.as_ref().is_multiline()) {
+ DefinitiveListTactic::Horizontal
+ } else {
+ match tactic {
+ ListTactic::Mixed => DefinitiveListTactic::Mixed,
+ _ => DefinitiveListTactic::Vertical,
+ }
+ }
+}
+
+// Format a list of commented items into a string.
+pub(crate) fn write_list<I, T>(items: I, formatting: &ListFormatting<'_>) -> Option<String>
+where
+ I: IntoIterator<Item = T> + Clone,
+ T: AsRef<ListItem>,
+{
+ let tactic = formatting.tactic;
+ let sep_len = formatting.separator.len();
+
+ // Now that we know how we will layout, we can decide for sure if there
+ // will be a trailing separator.
+ let mut trailing_separator = formatting.needs_trailing_separator();
+ let mut result = String::with_capacity(128);
+ let cloned_items = items.clone();
+ let mut iter = items.into_iter().enumerate().peekable();
+ let mut item_max_width: Option<usize> = None;
+ let sep_place =
+ SeparatorPlace::from_tactic(formatting.separator_place, tactic, formatting.separator);
+ let mut prev_item_had_post_comment = false;
+ let mut prev_item_is_nested_import = false;
+
+ let mut line_len = 0;
+ let indent_str = &formatting.shape.indent.to_string(formatting.config);
+ while let Some((i, item)) = iter.next() {
+ let item = item.as_ref();
+ let inner_item = item.item.as_ref()?;
+ let first = i == 0;
+ let last = iter.peek().is_none();
+ let mut separate = match sep_place {
+ SeparatorPlace::Front => !first,
+ SeparatorPlace::Back => !last || trailing_separator,
+ };
+ let item_sep_len = if separate { sep_len } else { 0 };
+
+ // Item string may be multi-line. Its length (used for block comment alignment)
+ // should be only the length of the last line.
+ let item_last_line = if item.is_multiline() {
+ inner_item.lines().last().unwrap_or("")
+ } else {
+ inner_item.as_ref()
+ };
+ let mut item_last_line_width = item_last_line.len() + item_sep_len;
+ if item_last_line.starts_with(&**indent_str) {
+ item_last_line_width -= indent_str.len();
+ }
+
+ if !item.is_substantial() {
+ continue;
+ }
+
+ match tactic {
+ DefinitiveListTactic::Horizontal if !first => {
+ result.push(' ');
+ }
+ DefinitiveListTactic::SpecialMacro(num_args_before) => {
+ if i == 0 {
+ // Nothing
+ } else if i < num_args_before {
+ result.push(' ');
+ } else if i <= num_args_before + 1 {
+ result.push('\n');
+ result.push_str(indent_str);
+ } else {
+ result.push(' ');
+ }
+ }
+ DefinitiveListTactic::Vertical
+ if !first && !inner_item.is_empty() && !result.is_empty() =>
+ {
+ result.push('\n');
+ result.push_str(indent_str);
+ }
+ DefinitiveListTactic::Mixed => {
+ let total_width = total_item_width(item) + item_sep_len;
+
+ // 1 is space between separator and item.
+ if (line_len > 0 && line_len + 1 + total_width > formatting.shape.width)
+ || prev_item_had_post_comment
+ || (formatting.nested
+ && (prev_item_is_nested_import || (!first && inner_item.contains("::"))))
+ {
+ result.push('\n');
+ result.push_str(indent_str);
+ line_len = 0;
+ if formatting.ends_with_newline {
+ trailing_separator = true;
+ }
+ } else if line_len > 0 {
+ result.push(' ');
+ line_len += 1;
+ }
+
+ if last && formatting.ends_with_newline {
+ separate = formatting.trailing_separator != SeparatorTactic::Never;
+ }
+
+ line_len += total_width;
+ }
+ _ => {}
+ }
+
+ // Pre-comments
+ if let Some(ref comment) = item.pre_comment {
+ // Block style in non-vertical mode.
+ let block_mode = tactic == DefinitiveListTactic::Horizontal;
+ // Width restriction is only relevant in vertical mode.
+ let comment =
+ rewrite_comment(comment, block_mode, formatting.shape, formatting.config)?;
+ result.push_str(&comment);
+
+ if !inner_item.is_empty() {
+ use DefinitiveListTactic::*;
+ if matches!(tactic, Vertical | Mixed | SpecialMacro(_)) {
+ // We cannot keep pre-comments on the same line if the comment is normalized.
+ let keep_comment = if formatting.config.normalize_comments()
+ || item.pre_comment_style == ListItemCommentStyle::DifferentLine
+ {
+ false
+ } else {
+ // We will try to keep the comment on the same line with the item here.
+ // 1 = ` `
+ let total_width = total_item_width(item) + item_sep_len + 1;
+ total_width <= formatting.shape.width
+ };
+ if keep_comment {
+ result.push(' ');
+ } else {
+ result.push('\n');
+ result.push_str(indent_str);
+ // This is the width of the item (without comments).
+ line_len = item.item.as_ref().map_or(0, |s| unicode_str_width(s));
+ }
+ } else {
+ result.push(' ')
+ }
+ }
+ item_max_width = None;
+ }
+
+ if separate && sep_place.is_front() && !first {
+ result.push_str(formatting.separator.trim());
+ result.push(' ');
+ }
+ result.push_str(inner_item);
+
+ // Post-comments
+ if tactic == DefinitiveListTactic::Horizontal && item.post_comment.is_some() {
+ let comment = item.post_comment.as_ref().unwrap();
+ let formatted_comment = rewrite_comment(
+ comment,
+ true,
+ Shape::legacy(formatting.shape.width, Indent::empty()),
+ formatting.config,
+ )?;
+
+ result.push(' ');
+ result.push_str(&formatted_comment);
+ }
+
+ if separate && sep_place.is_back() {
+ result.push_str(formatting.separator);
+ }
+
+ if tactic != DefinitiveListTactic::Horizontal && item.post_comment.is_some() {
+ let comment = item.post_comment.as_ref().unwrap();
+ let overhead = last_line_width(&result) + first_line_width(comment.trim());
+
+ let rewrite_post_comment = |item_max_width: &mut Option<usize>| {
+ if item_max_width.is_none() && !last && !inner_item.contains('\n') {
+ *item_max_width = Some(max_width_of_item_with_post_comment(
+ &cloned_items,
+ i,
+ overhead,
+ formatting.config.max_width(),
+ ));
+ }
+ let overhead = if starts_with_newline(comment) {
+ 0
+ } else if let Some(max_width) = *item_max_width {
+ max_width + 2
+ } else {
+ // 1 = space between item and comment.
+ item_last_line_width + 1
+ };
+ let width = formatting.shape.width.checked_sub(overhead).unwrap_or(1);
+ let offset = formatting.shape.indent + overhead;
+ let comment_shape = Shape::legacy(width, offset);
+
+ let block_style = if !formatting.ends_with_newline && last {
+ true
+ } else if starts_with_newline(comment) {
+ false
+ } else {
+ comment.trim().contains('\n') || comment.trim().len() > width
+ };
+
+ rewrite_comment(
+ comment.trim_start(),
+ block_style,
+ comment_shape,
+ formatting.config,
+ )
+ };
+
+ let mut formatted_comment = rewrite_post_comment(&mut item_max_width)?;
+
+ if !starts_with_newline(comment) {
+ if formatting.align_comments {
+ let mut comment_alignment =
+ post_comment_alignment(item_max_width, inner_item.len());
+ if first_line_width(&formatted_comment)
+ + last_line_width(&result)
+ + comment_alignment
+ + 1
+ > formatting.config.max_width()
+ {
+ item_max_width = None;
+ formatted_comment = rewrite_post_comment(&mut item_max_width)?;
+ comment_alignment =
+ post_comment_alignment(item_max_width, inner_item.len());
+ }
+ for _ in 0..=comment_alignment {
+ result.push(' ');
+ }
+ }
+ // An additional space for the missing trailing separator (or
+ // if we skipped alignment above).
+ if !formatting.align_comments
+ || (last
+ && item_max_width.is_some()
+ && !separate
+ && !formatting.separator.is_empty())
+ {
+ result.push(' ');
+ }
+ } else {
+ result.push('\n');
+ result.push_str(indent_str);
+ }
+ if formatted_comment.contains('\n') {
+ item_max_width = None;
+ }
+ result.push_str(&formatted_comment);
+ } else {
+ item_max_width = None;
+ }
+
+ if formatting.preserve_newline
+ && !last
+ && tactic == DefinitiveListTactic::Vertical
+ && item.new_lines
+ {
+ item_max_width = None;
+ result.push('\n');
+ }
+
+ prev_item_had_post_comment = item.post_comment.is_some();
+ prev_item_is_nested_import = inner_item.contains("::");
+ }
+
+ Some(result)
+}
+
+fn max_width_of_item_with_post_comment<I, T>(
+ items: &I,
+ i: usize,
+ overhead: usize,
+ max_budget: usize,
+) -> usize
+where
+ I: IntoIterator<Item = T> + Clone,
+ T: AsRef<ListItem>,
+{
+ let mut max_width = 0;
+ let mut first = true;
+ for item in items.clone().into_iter().skip(i) {
+ let item = item.as_ref();
+ let inner_item_width = item.inner_as_ref().len();
+ if !first
+ && (item.is_different_group()
+ || item.post_comment.is_none()
+ || inner_item_width + overhead > max_budget)
+ {
+ return max_width;
+ }
+ if max_width < inner_item_width {
+ max_width = inner_item_width;
+ }
+ if item.new_lines {
+ return max_width;
+ }
+ first = false;
+ }
+ max_width
+}
+
+fn post_comment_alignment(item_max_width: Option<usize>, inner_item_len: usize) -> usize {
+ item_max_width.unwrap_or(0).saturating_sub(inner_item_len)
+}
+
+pub(crate) struct ListItems<'a, I, F1, F2, F3>
+where
+ I: Iterator,
+{
+ snippet_provider: &'a SnippetProvider,
+ inner: Peekable<I>,
+ get_lo: F1,
+ get_hi: F2,
+ get_item_string: F3,
+ prev_span_end: BytePos,
+ next_span_start: BytePos,
+ terminator: &'a str,
+ separator: &'a str,
+ leave_last: bool,
+}
+
+pub(crate) fn extract_pre_comment(pre_snippet: &str) -> (Option<String>, ListItemCommentStyle) {
+ let trimmed_pre_snippet = pre_snippet.trim();
+ // Both start and end are checked to support keeping a block comment inline with
+ // the item, even if there are preceding line comments, while still supporting
+ // a snippet that starts with a block comment but also contains one or more
+ // trailing single line comments.
+ // https://github.com/rust-lang/rustfmt/issues/3025
+ // https://github.com/rust-lang/rustfmt/pull/3048
+ // https://github.com/rust-lang/rustfmt/issues/3839
+ let starts_with_block_comment = trimmed_pre_snippet.starts_with("/*");
+ let ends_with_block_comment = trimmed_pre_snippet.ends_with("*/");
+ let starts_with_single_line_comment = trimmed_pre_snippet.starts_with("//");
+ if ends_with_block_comment {
+ let comment_end = pre_snippet.rfind(|c| c == '/').unwrap();
+ if pre_snippet[comment_end..].contains('\n') {
+ (
+ Some(trimmed_pre_snippet.to_owned()),
+ ListItemCommentStyle::DifferentLine,
+ )
+ } else {
+ (
+ Some(trimmed_pre_snippet.to_owned()),
+ ListItemCommentStyle::SameLine,
+ )
+ }
+ } else if starts_with_single_line_comment || starts_with_block_comment {
+ (
+ Some(trimmed_pre_snippet.to_owned()),
+ ListItemCommentStyle::DifferentLine,
+ )
+ } else {
+ (None, ListItemCommentStyle::None)
+ }
+}
+
+pub(crate) fn extract_post_comment(
+ post_snippet: &str,
+ comment_end: usize,
+ separator: &str,
+ is_last: bool,
+) -> Option<String> {
+ let white_space: &[_] = &[' ', '\t'];
+
+ // Cleanup post-comment: strip separators and whitespace.
+ let post_snippet = post_snippet[..comment_end].trim();
+
+ let last_inline_comment_ends_with_separator = if is_last {
+ if let Some(line) = post_snippet.lines().last() {
+ line.ends_with(separator) && line.trim().starts_with("//")
+ } else {
+ false
+ }
+ } else {
+ false
+ };
+
+ let post_snippet_trimmed = if post_snippet.starts_with(|c| c == ',' || c == ':') {
+ post_snippet[1..].trim_matches(white_space)
+ } else if let Some(stripped) = post_snippet.strip_prefix(separator) {
+ stripped.trim_matches(white_space)
+ } else if last_inline_comment_ends_with_separator {
+ // since we're on the last item it's fine to keep any trailing separators in comments
+ post_snippet.trim_matches(white_space)
+ }
+ // not comment or over two lines
+ else if post_snippet.ends_with(',')
+ && (!post_snippet.trim().starts_with("//") || post_snippet.trim().contains('\n'))
+ {
+ post_snippet[..(post_snippet.len() - 1)].trim_matches(white_space)
+ } else {
+ post_snippet
+ };
+ // FIXME(#3441): post_snippet includes 'const' now
+ // it should not include here
+ let removed_newline_snippet = post_snippet_trimmed.trim();
+ if !post_snippet_trimmed.is_empty()
+ && (removed_newline_snippet.starts_with("//") || removed_newline_snippet.starts_with("/*"))
+ {
+ Some(post_snippet_trimmed.to_owned())
+ } else {
+ None
+ }
+}
+
+pub(crate) fn get_comment_end(
+ post_snippet: &str,
+ separator: &str,
+ terminator: &str,
+ is_last: bool,
+) -> usize {
+ if is_last {
+ return post_snippet
+ .find_uncommented(terminator)
+ .unwrap_or_else(|| post_snippet.len());
+ }
+
+ let mut block_open_index = post_snippet.find("/*");
+ // check if it really is a block comment (and not `//*` or a nested comment)
+ if let Some(i) = block_open_index {
+ match post_snippet.find('/') {
+ Some(j) if j < i => block_open_index = None,
+ _ if post_snippet[..i].ends_with('/') => block_open_index = None,
+ _ => (),
+ }
+ }
+ let newline_index = post_snippet.find('\n');
+ if let Some(separator_index) = post_snippet.find_uncommented(separator) {
+ match (block_open_index, newline_index) {
+ // Separator before comment, with the next item on same line.
+ // Comment belongs to next item.
+ (Some(i), None) if i > separator_index => separator_index + 1,
+ // Block-style post-comment before the separator.
+ (Some(i), None) => cmp::max(
+ find_comment_end(&post_snippet[i..]).unwrap() + i,
+ separator_index + 1,
+ ),
+ // Block-style post-comment. Either before or after the separator.
+ (Some(i), Some(j)) if i < j => cmp::max(
+ find_comment_end(&post_snippet[i..]).unwrap() + i,
+ separator_index + 1,
+ ),
+ // Potential *single* line comment.
+ (_, Some(j)) if j > separator_index => j + 1,
+ _ => post_snippet.len(),
+ }
+ } else if let Some(newline_index) = newline_index {
+ // Match arms may not have trailing comma. In any case, for match arms,
+ // we will assume that the post comment belongs to the next arm if they
+ // do not end with trailing comma.
+ newline_index + 1
+ } else {
+ 0
+ }
+}
+
+// Account for extra whitespace between items. This is fiddly
+// because of the way we divide pre- and post- comments.
+pub(crate) fn has_extra_newline(post_snippet: &str, comment_end: usize) -> bool {
+ if post_snippet.is_empty() || comment_end == 0 {
+ return false;
+ }
+
+ let len_last = post_snippet[..comment_end]
+ .chars()
+ .last()
+ .unwrap()
+ .len_utf8();
+ // Everything from the separator to the next item.
+ let test_snippet = &post_snippet[comment_end - len_last..];
+ let first_newline = test_snippet
+ .find('\n')
+ .unwrap_or_else(|| test_snippet.len());
+ // From the end of the first line of comments.
+ let test_snippet = &test_snippet[first_newline..];
+ let first = test_snippet
+ .find(|c: char| !c.is_whitespace())
+ .unwrap_or_else(|| test_snippet.len());
+ // From the end of the first line of comments to the next non-whitespace char.
+ let test_snippet = &test_snippet[..first];
+
+ // There were multiple line breaks which got trimmed to nothing.
+ count_newlines(test_snippet) > 1
+}
+
+impl<'a, T, I, F1, F2, F3> Iterator for ListItems<'a, I, F1, F2, F3>
+where
+ I: Iterator<Item = T>,
+ F1: Fn(&T) -> BytePos,
+ F2: Fn(&T) -> BytePos,
+ F3: Fn(&T) -> Option<String>,
+{
+ type Item = ListItem;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner.next().map(|item| {
+ // Pre-comment
+ let pre_snippet = self
+ .snippet_provider
+ .span_to_snippet(mk_sp(self.prev_span_end, (self.get_lo)(&item)))
+ .unwrap_or("");
+ let (pre_comment, pre_comment_style) = extract_pre_comment(pre_snippet);
+
+ // Post-comment
+ let next_start = match self.inner.peek() {
+ Some(next_item) => (self.get_lo)(next_item),
+ None => self.next_span_start,
+ };
+ let post_snippet = self
+ .snippet_provider
+ .span_to_snippet(mk_sp((self.get_hi)(&item), next_start))
+ .unwrap_or("");
+ let is_last = self.inner.peek().is_none();
+ let comment_end =
+ get_comment_end(post_snippet, self.separator, self.terminator, is_last);
+ let new_lines = has_extra_newline(post_snippet, comment_end);
+ let post_comment =
+ extract_post_comment(post_snippet, comment_end, self.separator, is_last);
+
+ self.prev_span_end = (self.get_hi)(&item) + BytePos(comment_end as u32);
+
+ ListItem {
+ pre_comment,
+ pre_comment_style,
+ item: if self.inner.peek().is_none() && self.leave_last {
+ None
+ } else {
+ (self.get_item_string)(&item)
+ },
+ post_comment,
+ new_lines,
+ }
+ })
+ }
+}
+
+#[allow(clippy::too_many_arguments)]
+// Creates an iterator over a list's items with associated comments.
+pub(crate) fn itemize_list<'a, T, I, F1, F2, F3>(
+ snippet_provider: &'a SnippetProvider,
+ inner: I,
+ terminator: &'a str,
+ separator: &'a str,
+ get_lo: F1,
+ get_hi: F2,
+ get_item_string: F3,
+ prev_span_end: BytePos,
+ next_span_start: BytePos,
+ leave_last: bool,
+) -> ListItems<'a, I, F1, F2, F3>
+where
+ I: Iterator<Item = T>,
+ F1: Fn(&T) -> BytePos,
+ F2: Fn(&T) -> BytePos,
+ F3: Fn(&T) -> Option<String>,
+{
+ ListItems {
+ snippet_provider,
+ inner: inner.peekable(),
+ get_lo,
+ get_hi,
+ get_item_string,
+ prev_span_end,
+ next_span_start,
+ terminator,
+ separator,
+ leave_last,
+ }
+}
+
+/// Returns the count and total width of the list items.
+fn calculate_width<I, T>(items: I) -> (usize, usize)
+where
+ I: IntoIterator<Item = T>,
+ T: AsRef<ListItem>,
+{
+ items
+ .into_iter()
+ .map(|item| total_item_width(item.as_ref()))
+ .fold((0, 0), |acc, l| (acc.0 + 1, acc.1 + l))
+}
+
+pub(crate) fn total_item_width(item: &ListItem) -> usize {
+ comment_len(item.pre_comment.as_ref().map(|x| &(*x)[..]))
+ + comment_len(item.post_comment.as_ref().map(|x| &(*x)[..]))
+ + item.item.as_ref().map_or(0, |s| unicode_str_width(s))
+}
+
+fn comment_len(comment: Option<&str>) -> usize {
+ match comment {
+ Some(s) => {
+ let text_len = s.trim().len();
+ if text_len > 0 {
+ // We'll put " /*" before and " */" after inline comments.
+ text_len + 6
+ } else {
+ text_len
+ }
+ }
+ None => 0,
+ }
+}
+
+// Compute horizontal and vertical shapes for a struct-lit-like thing.
+pub(crate) fn struct_lit_shape(
+ shape: Shape,
+ context: &RewriteContext<'_>,
+ prefix_width: usize,
+ suffix_width: usize,
+) -> Option<(Option<Shape>, Shape)> {
+ let v_shape = match context.config.indent_style() {
+ IndentStyle::Visual => shape
+ .visual_indent(0)
+ .shrink_left(prefix_width)?
+ .sub_width(suffix_width)?,
+ IndentStyle::Block => {
+ let shape = shape.block_indent(context.config.tab_spaces());
+ Shape {
+ width: context.budget(shape.indent.width()),
+ ..shape
+ }
+ }
+ };
+ let shape_width = shape.width.checked_sub(prefix_width + suffix_width);
+ if let Some(w) = shape_width {
+ let shape_width = cmp::min(w, context.config.struct_lit_width());
+ Some((Some(Shape::legacy(shape_width, shape.indent)), v_shape))
+ } else {
+ Some((None, v_shape))
+ }
+}
+
+// Compute the tactic for the internals of a struct-lit-like thing.
+pub(crate) fn struct_lit_tactic(
+ h_shape: Option<Shape>,
+ context: &RewriteContext<'_>,
+ items: &[ListItem],
+) -> DefinitiveListTactic {
+ if let Some(h_shape) = h_shape {
+ let prelim_tactic = match (context.config.indent_style(), items.len()) {
+ (IndentStyle::Visual, 1) => ListTactic::HorizontalVertical,
+ _ if context.config.struct_lit_single_line() => ListTactic::HorizontalVertical,
+ _ => ListTactic::Vertical,
+ };
+ definitive_tactic(items, prelim_tactic, Separator::Comma, h_shape.width)
+ } else {
+ DefinitiveListTactic::Vertical
+ }
+}
+
+// Given a tactic and possible shapes for horizontal and vertical layout,
+// come up with the actual shape to use.
+pub(crate) fn shape_for_tactic(
+ tactic: DefinitiveListTactic,
+ h_shape: Option<Shape>,
+ v_shape: Shape,
+) -> Shape {
+ match tactic {
+ DefinitiveListTactic::Horizontal => h_shape.unwrap(),
+ _ => v_shape,
+ }
+}
+
+// Create a ListFormatting object for formatting the internals of a
+// struct-lit-like thing, that is a series of fields.
+pub(crate) fn struct_lit_formatting<'a>(
+ shape: Shape,
+ tactic: DefinitiveListTactic,
+ context: &'a RewriteContext<'_>,
+ force_no_trailing_comma: bool,
+) -> ListFormatting<'a> {
+ let ends_with_newline = context.config.indent_style() != IndentStyle::Visual
+ && tactic == DefinitiveListTactic::Vertical;
+ ListFormatting {
+ tactic,
+ separator: ",",
+ trailing_separator: if force_no_trailing_comma {
+ SeparatorTactic::Never
+ } else {
+ context.config.trailing_comma()
+ },
+ separator_place: SeparatorPlace::Back,
+ shape,
+ ends_with_newline,
+ preserve_newline: true,
+ nested: false,
+ align_comments: true,
+ config: context.config,
+ }
+}
diff --git a/src/tools/rustfmt/src/macros.rs b/src/tools/rustfmt/src/macros.rs
new file mode 100644
index 000000000..3a641fab5
--- /dev/null
+++ b/src/tools/rustfmt/src/macros.rs
@@ -0,0 +1,1412 @@
+// Format list-like macro invocations. These are invocations whose token trees
+// can be interpreted as expressions and separated by commas.
+// Note that these token trees do not actually have to be interpreted as
+// expressions by the compiler. An example of an invocation we would reformat is
+// foo!( x, y, z ). The token x may represent an identifier in the code, but we
+// interpreted as an expression.
+// Macro uses which are not-list like, such as bar!(key => val), will not be
+// reformatted.
+// List-like invocations with parentheses will be formatted as function calls,
+// and those with brackets will be formatted as array literals.
+
+use std::collections::HashMap;
+use std::panic::{catch_unwind, AssertUnwindSafe};
+
+use rustc_ast::token::{BinOpToken, Delimiter, Token, TokenKind};
+use rustc_ast::tokenstream::{Cursor, TokenStream, TokenTree};
+use rustc_ast::{ast, ptr};
+use rustc_ast_pretty::pprust;
+use rustc_span::{
+ symbol::{self, kw},
+ BytePos, Span, Symbol, DUMMY_SP,
+};
+
+use crate::comment::{
+ contains_comment, CharClasses, FindUncommented, FullCodeCharKind, LineClasses,
+};
+use crate::config::lists::*;
+use crate::expr::{rewrite_array, rewrite_assign_rhs, RhsAssignKind};
+use crate::lists::{itemize_list, write_list, ListFormatting};
+use crate::overflow;
+use crate::parse::macros::lazy_static::parse_lazy_static;
+use crate::parse::macros::{parse_expr, parse_macro_args, ParsedMacroArgs};
+use crate::rewrite::{Rewrite, RewriteContext};
+use crate::shape::{Indent, Shape};
+use crate::source_map::SpanUtils;
+use crate::spanned::Spanned;
+use crate::utils::{
+ format_visibility, indent_next_line, is_empty_line, mk_sp, remove_trailing_white_spaces,
+ rewrite_ident, trim_left_preserve_layout, wrap_str, NodeIdExt,
+};
+use crate::visitor::FmtVisitor;
+
+const FORCED_BRACKET_MACROS: &[&str] = &["vec!"];
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub(crate) enum MacroPosition {
+ Item,
+ Statement,
+ Expression,
+ Pat,
+}
+
+#[derive(Debug)]
+pub(crate) enum MacroArg {
+ Expr(ptr::P<ast::Expr>),
+ Ty(ptr::P<ast::Ty>),
+ Pat(ptr::P<ast::Pat>),
+ Item(ptr::P<ast::Item>),
+ Keyword(symbol::Ident, Span),
+}
+
+impl MacroArg {
+ pub(crate) fn is_item(&self) -> bool {
+ match self {
+ MacroArg::Item(..) => true,
+ _ => false,
+ }
+ }
+}
+
+impl Rewrite for ast::Item {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ let mut visitor = crate::visitor::FmtVisitor::from_context(context);
+ visitor.block_indent = shape.indent;
+ visitor.last_pos = self.span().lo();
+ visitor.visit_item(self);
+ Some(visitor.buffer.to_owned())
+ }
+}
+
+impl Rewrite for MacroArg {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ match *self {
+ MacroArg::Expr(ref expr) => expr.rewrite(context, shape),
+ MacroArg::Ty(ref ty) => ty.rewrite(context, shape),
+ MacroArg::Pat(ref pat) => pat.rewrite(context, shape),
+ MacroArg::Item(ref item) => item.rewrite(context, shape),
+ MacroArg::Keyword(ident, _) => Some(ident.name.to_string()),
+ }
+ }
+}
+
+/// Rewrite macro name without using pretty-printer if possible.
+fn rewrite_macro_name(
+ context: &RewriteContext<'_>,
+ path: &ast::Path,
+ extra_ident: Option<symbol::Ident>,
+) -> String {
+ let name = if path.segments.len() == 1 {
+ // Avoid using pretty-printer in the common case.
+ format!("{}!", rewrite_ident(context, path.segments[0].ident))
+ } else {
+ format!("{}!", pprust::path_to_string(path))
+ };
+ match extra_ident {
+ Some(ident) if ident.name != kw::Empty => format!("{} {}", name, ident),
+ _ => name,
+ }
+}
+
+// Use this on failing to format the macro call.
+fn return_macro_parse_failure_fallback(
+ context: &RewriteContext<'_>,
+ indent: Indent,
+ position: MacroPosition,
+ span: Span,
+) -> Option<String> {
+ // Mark this as a failure however we format it
+ context.macro_rewrite_failure.replace(true);
+
+ // Heuristically determine whether the last line of the macro uses "Block" style
+ // rather than using "Visual" style, or another indentation style.
+ let is_like_block_indent_style = context
+ .snippet(span)
+ .lines()
+ .last()
+ .map(|closing_line| {
+ closing_line
+ .trim()
+ .chars()
+ .all(|ch| matches!(ch, '}' | ')' | ']'))
+ })
+ .unwrap_or(false);
+ if is_like_block_indent_style {
+ return trim_left_preserve_layout(context.snippet(span), indent, context.config);
+ }
+
+ context.skipped_range.borrow_mut().push((
+ context.parse_sess.line_of_byte_pos(span.lo()),
+ context.parse_sess.line_of_byte_pos(span.hi()),
+ ));
+
+ // Return the snippet unmodified if the macro is not block-like
+ let mut snippet = context.snippet(span).to_owned();
+ if position == MacroPosition::Item {
+ snippet.push(';');
+ }
+ Some(snippet)
+}
+
+pub(crate) fn rewrite_macro(
+ mac: &ast::MacCall,
+ extra_ident: Option<symbol::Ident>,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ position: MacroPosition,
+) -> Option<String> {
+ let should_skip = context
+ .skip_context
+ .skip_macro(context.snippet(mac.path.span));
+ if should_skip {
+ None
+ } else {
+ let guard = context.enter_macro();
+ let result = catch_unwind(AssertUnwindSafe(|| {
+ rewrite_macro_inner(
+ mac,
+ extra_ident,
+ context,
+ shape,
+ position,
+ guard.is_nested(),
+ )
+ }));
+ match result {
+ Err(..) | Ok(None) => {
+ context.macro_rewrite_failure.replace(true);
+ None
+ }
+ Ok(rw) => rw,
+ }
+ }
+}
+
+fn rewrite_macro_inner(
+ mac: &ast::MacCall,
+ extra_ident: Option<symbol::Ident>,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ position: MacroPosition,
+ is_nested_macro: bool,
+) -> Option<String> {
+ if context.config.use_try_shorthand() {
+ if let Some(expr) = convert_try_mac(mac, context) {
+ context.leave_macro();
+ return expr.rewrite(context, shape);
+ }
+ }
+
+ let original_style = macro_style(mac, context);
+
+ let macro_name = rewrite_macro_name(context, &mac.path, extra_ident);
+ let is_forced_bracket = FORCED_BRACKET_MACROS.contains(&&macro_name[..]);
+
+ let style = if is_forced_bracket && !is_nested_macro {
+ Delimiter::Bracket
+ } else {
+ original_style
+ };
+
+ let ts = mac.args.inner_tokens();
+ let has_comment = contains_comment(context.snippet(mac.span()));
+ if ts.is_empty() && !has_comment {
+ return match style {
+ Delimiter::Parenthesis if position == MacroPosition::Item => {
+ Some(format!("{}();", macro_name))
+ }
+ Delimiter::Bracket if position == MacroPosition::Item => {
+ Some(format!("{}[];", macro_name))
+ }
+ Delimiter::Parenthesis => Some(format!("{}()", macro_name)),
+ Delimiter::Bracket => Some(format!("{}[]", macro_name)),
+ Delimiter::Brace => Some(format!("{} {{}}", macro_name)),
+ _ => unreachable!(),
+ };
+ }
+ // Format well-known macros which cannot be parsed as a valid AST.
+ if macro_name == "lazy_static!" && !has_comment {
+ if let success @ Some(..) = format_lazy_static(context, shape, ts.clone()) {
+ return success;
+ }
+ }
+
+ let ParsedMacroArgs {
+ args: arg_vec,
+ vec_with_semi,
+ trailing_comma,
+ } = match parse_macro_args(context, ts, style, is_forced_bracket) {
+ Some(args) => args,
+ None => {
+ return return_macro_parse_failure_fallback(
+ context,
+ shape.indent,
+ position,
+ mac.span(),
+ );
+ }
+ };
+
+ if !arg_vec.is_empty() && arg_vec.iter().all(MacroArg::is_item) {
+ return rewrite_macro_with_items(
+ context,
+ &arg_vec,
+ &macro_name,
+ shape,
+ style,
+ position,
+ mac.span(),
+ );
+ }
+
+ match style {
+ Delimiter::Parenthesis => {
+ // Handle special case: `vec!(expr; expr)`
+ if vec_with_semi {
+ handle_vec_semi(context, shape, arg_vec, macro_name, style)
+ } else {
+ // Format macro invocation as function call, preserve the trailing
+ // comma because not all macros support them.
+ overflow::rewrite_with_parens(
+ context,
+ &macro_name,
+ arg_vec.iter(),
+ shape,
+ mac.span(),
+ context.config.fn_call_width(),
+ if trailing_comma {
+ Some(SeparatorTactic::Always)
+ } else {
+ Some(SeparatorTactic::Never)
+ },
+ )
+ .map(|rw| match position {
+ MacroPosition::Item => format!("{};", rw),
+ _ => rw,
+ })
+ }
+ }
+ Delimiter::Bracket => {
+ // Handle special case: `vec![expr; expr]`
+ if vec_with_semi {
+ handle_vec_semi(context, shape, arg_vec, macro_name, style)
+ } else {
+ // If we are rewriting `vec!` macro or other special macros,
+ // then we can rewrite this as a usual array literal.
+ // Otherwise, we must preserve the original existence of trailing comma.
+ let macro_name = &macro_name.as_str();
+ let mut force_trailing_comma = if trailing_comma {
+ Some(SeparatorTactic::Always)
+ } else {
+ Some(SeparatorTactic::Never)
+ };
+ if FORCED_BRACKET_MACROS.contains(macro_name) && !is_nested_macro {
+ context.leave_macro();
+ if context.use_block_indent() {
+ force_trailing_comma = Some(SeparatorTactic::Vertical);
+ };
+ }
+ let rewrite = rewrite_array(
+ macro_name,
+ arg_vec.iter(),
+ mac.span(),
+ context,
+ shape,
+ force_trailing_comma,
+ Some(original_style),
+ )?;
+ let comma = match position {
+ MacroPosition::Item => ";",
+ _ => "",
+ };
+
+ Some(format!("{}{}", rewrite, comma))
+ }
+ }
+ Delimiter::Brace => {
+ // For macro invocations with braces, always put a space between
+ // the `macro_name!` and `{ /* macro_body */ }` but skip modifying
+ // anything in between the braces (for now).
+ let snippet = context.snippet(mac.span()).trim_start_matches(|c| c != '{');
+ match trim_left_preserve_layout(snippet, shape.indent, context.config) {
+ Some(macro_body) => Some(format!("{} {}", macro_name, macro_body)),
+ None => Some(format!("{} {}", macro_name, snippet)),
+ }
+ }
+ _ => unreachable!(),
+ }
+}
+
+fn handle_vec_semi(
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ arg_vec: Vec<MacroArg>,
+ macro_name: String,
+ delim_token: Delimiter,
+) -> Option<String> {
+ let (left, right) = match delim_token {
+ Delimiter::Parenthesis => ("(", ")"),
+ Delimiter::Bracket => ("[", "]"),
+ _ => unreachable!(),
+ };
+
+ let mac_shape = shape.offset_left(macro_name.len())?;
+ // 8 = `vec![]` + `; ` or `vec!()` + `; `
+ let total_overhead = 8;
+ let nested_shape = mac_shape.block_indent(context.config.tab_spaces());
+ let lhs = arg_vec[0].rewrite(context, nested_shape)?;
+ let rhs = arg_vec[1].rewrite(context, nested_shape)?;
+ if !lhs.contains('\n')
+ && !rhs.contains('\n')
+ && lhs.len() + rhs.len() + total_overhead <= shape.width
+ {
+ // macro_name(lhs; rhs) or macro_name[lhs; rhs]
+ Some(format!("{}{}{}; {}{}", macro_name, left, lhs, rhs, right))
+ } else {
+ // macro_name(\nlhs;\nrhs\n) or macro_name[\nlhs;\nrhs\n]
+ Some(format!(
+ "{}{}{}{};{}{}{}{}",
+ macro_name,
+ left,
+ nested_shape.indent.to_string_with_newline(context.config),
+ lhs,
+ nested_shape.indent.to_string_with_newline(context.config),
+ rhs,
+ shape.indent.to_string_with_newline(context.config),
+ right
+ ))
+ }
+}
+
+pub(crate) fn rewrite_macro_def(
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ indent: Indent,
+ def: &ast::MacroDef,
+ ident: symbol::Ident,
+ vis: &ast::Visibility,
+ span: Span,
+) -> Option<String> {
+ let snippet = Some(remove_trailing_white_spaces(context.snippet(span)));
+ if snippet.as_ref().map_or(true, |s| s.ends_with(';')) {
+ return snippet;
+ }
+
+ let ts = def.body.inner_tokens();
+ let mut parser = MacroParser::new(ts.into_trees());
+ let parsed_def = match parser.parse() {
+ Some(def) => def,
+ None => return snippet,
+ };
+
+ let mut result = if def.macro_rules {
+ String::from("macro_rules!")
+ } else {
+ format!("{}macro", format_visibility(context, vis))
+ };
+
+ result += " ";
+ result += rewrite_ident(context, ident);
+
+ let multi_branch_style = def.macro_rules || parsed_def.branches.len() != 1;
+
+ let arm_shape = if multi_branch_style {
+ shape
+ .block_indent(context.config.tab_spaces())
+ .with_max_width(context.config)
+ } else {
+ shape
+ };
+
+ let branch_items = itemize_list(
+ context.snippet_provider,
+ parsed_def.branches.iter(),
+ "}",
+ ";",
+ |branch| branch.span.lo(),
+ |branch| branch.span.hi(),
+ |branch| match branch.rewrite(context, arm_shape, multi_branch_style) {
+ Some(v) => Some(v),
+ // if the rewrite returned None because a macro could not be rewritten, then return the
+ // original body
+ None if context.macro_rewrite_failure.get() => {
+ Some(context.snippet(branch.body).trim().to_string())
+ }
+ None => None,
+ },
+ context.snippet_provider.span_after(span, "{"),
+ span.hi(),
+ false,
+ )
+ .collect::<Vec<_>>();
+
+ let fmt = ListFormatting::new(arm_shape, context.config)
+ .separator(if def.macro_rules { ";" } else { "" })
+ .trailing_separator(SeparatorTactic::Always)
+ .preserve_newline(true);
+
+ if multi_branch_style {
+ result += " {";
+ result += &arm_shape.indent.to_string_with_newline(context.config);
+ }
+
+ match write_list(&branch_items, &fmt) {
+ Some(ref s) => result += s,
+ None => return snippet,
+ }
+
+ if multi_branch_style {
+ result += &indent.to_string_with_newline(context.config);
+ result += "}";
+ }
+
+ Some(result)
+}
+
+fn register_metavariable(
+ map: &mut HashMap<String, String>,
+ result: &mut String,
+ name: &str,
+ dollar_count: usize,
+) {
+ let mut new_name = "$".repeat(dollar_count - 1);
+ let mut old_name = "$".repeat(dollar_count);
+
+ new_name.push('z');
+ new_name.push_str(name);
+ old_name.push_str(name);
+
+ result.push_str(&new_name);
+ map.insert(old_name, new_name);
+}
+
+// Replaces `$foo` with `zfoo`. We must check for name overlap to ensure we
+// aren't causing problems.
+// This should also work for escaped `$` variables, where we leave earlier `$`s.
+fn replace_names(input: &str) -> Option<(String, HashMap<String, String>)> {
+ // Each substitution will require five or six extra bytes.
+ let mut result = String::with_capacity(input.len() + 64);
+ let mut substs = HashMap::new();
+ let mut dollar_count = 0;
+ let mut cur_name = String::new();
+
+ for (kind, c) in CharClasses::new(input.chars()) {
+ if kind != FullCodeCharKind::Normal {
+ result.push(c);
+ } else if c == '$' {
+ dollar_count += 1;
+ } else if dollar_count == 0 {
+ result.push(c);
+ } else if !c.is_alphanumeric() && !cur_name.is_empty() {
+ // Terminates a name following one or more dollars.
+ register_metavariable(&mut substs, &mut result, &cur_name, dollar_count);
+
+ result.push(c);
+ dollar_count = 0;
+ cur_name.clear();
+ } else if c == '(' && cur_name.is_empty() {
+ // FIXME: Support macro def with repeat.
+ return None;
+ } else if c.is_alphanumeric() || c == '_' {
+ cur_name.push(c);
+ }
+ }
+
+ if !cur_name.is_empty() {
+ register_metavariable(&mut substs, &mut result, &cur_name, dollar_count);
+ }
+
+ debug!("replace_names `{}` {:?}", result, substs);
+
+ Some((result, substs))
+}
+
+#[derive(Debug, Clone)]
+enum MacroArgKind {
+ /// e.g., `$x: expr`.
+ MetaVariable(Symbol, String),
+ /// e.g., `$($foo: expr),*`
+ Repeat(
+ /// `()`, `[]` or `{}`.
+ Delimiter,
+ /// Inner arguments inside delimiters.
+ Vec<ParsedMacroArg>,
+ /// Something after the closing delimiter and the repeat token, if available.
+ Option<Box<ParsedMacroArg>>,
+ /// The repeat token. This could be one of `*`, `+` or `?`.
+ Token,
+ ),
+ /// e.g., `[derive(Debug)]`
+ Delimited(Delimiter, Vec<ParsedMacroArg>),
+ /// A possible separator. e.g., `,` or `;`.
+ Separator(String, String),
+ /// Other random stuff that does not fit to other kinds.
+ /// e.g., `== foo` in `($x: expr == foo)`.
+ Other(String, String),
+}
+
+fn delim_token_to_str(
+ context: &RewriteContext<'_>,
+ delim_token: Delimiter,
+ shape: Shape,
+ use_multiple_lines: bool,
+ inner_is_empty: bool,
+) -> (String, String) {
+ let (lhs, rhs) = match delim_token {
+ Delimiter::Parenthesis => ("(", ")"),
+ Delimiter::Bracket => ("[", "]"),
+ Delimiter::Brace => {
+ if inner_is_empty || use_multiple_lines {
+ ("{", "}")
+ } else {
+ ("{ ", " }")
+ }
+ }
+ Delimiter::Invisible => unreachable!(),
+ };
+ if use_multiple_lines {
+ let indent_str = shape.indent.to_string_with_newline(context.config);
+ let nested_indent_str = shape
+ .indent
+ .block_indent(context.config)
+ .to_string_with_newline(context.config);
+ (
+ format!("{}{}", lhs, nested_indent_str),
+ format!("{}{}", indent_str, rhs),
+ )
+ } else {
+ (lhs.to_owned(), rhs.to_owned())
+ }
+}
+
+impl MacroArgKind {
+ fn starts_with_brace(&self) -> bool {
+ matches!(
+ *self,
+ MacroArgKind::Repeat(Delimiter::Brace, _, _, _)
+ | MacroArgKind::Delimited(Delimiter::Brace, _)
+ )
+ }
+
+ fn starts_with_dollar(&self) -> bool {
+ matches!(
+ *self,
+ MacroArgKind::Repeat(..) | MacroArgKind::MetaVariable(..)
+ )
+ }
+
+ fn ends_with_space(&self) -> bool {
+ matches!(*self, MacroArgKind::Separator(..))
+ }
+
+ fn has_meta_var(&self) -> bool {
+ match *self {
+ MacroArgKind::MetaVariable(..) => true,
+ MacroArgKind::Repeat(_, ref args, _, _) => args.iter().any(|a| a.kind.has_meta_var()),
+ _ => false,
+ }
+ }
+
+ fn rewrite(
+ &self,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ use_multiple_lines: bool,
+ ) -> Option<String> {
+ let rewrite_delimited_inner = |delim_tok, args| -> Option<(String, String, String)> {
+ let inner = wrap_macro_args(context, args, shape)?;
+ let (lhs, rhs) = delim_token_to_str(context, delim_tok, shape, false, inner.is_empty());
+ if lhs.len() + inner.len() + rhs.len() <= shape.width {
+ return Some((lhs, inner, rhs));
+ }
+
+ let (lhs, rhs) = delim_token_to_str(context, delim_tok, shape, true, false);
+ let nested_shape = shape
+ .block_indent(context.config.tab_spaces())
+ .with_max_width(context.config);
+ let inner = wrap_macro_args(context, args, nested_shape)?;
+ Some((lhs, inner, rhs))
+ };
+
+ match *self {
+ MacroArgKind::MetaVariable(ty, ref name) => Some(format!("${}:{}", name, ty)),
+ MacroArgKind::Repeat(delim_tok, ref args, ref another, ref tok) => {
+ let (lhs, inner, rhs) = rewrite_delimited_inner(delim_tok, args)?;
+ let another = another
+ .as_ref()
+ .and_then(|a| a.rewrite(context, shape, use_multiple_lines))
+ .unwrap_or_else(|| "".to_owned());
+ let repeat_tok = pprust::token_to_string(tok);
+
+ Some(format!("${}{}{}{}{}", lhs, inner, rhs, another, repeat_tok))
+ }
+ MacroArgKind::Delimited(delim_tok, ref args) => {
+ rewrite_delimited_inner(delim_tok, args)
+ .map(|(lhs, inner, rhs)| format!("{}{}{}", lhs, inner, rhs))
+ }
+ MacroArgKind::Separator(ref sep, ref prefix) => Some(format!("{}{} ", prefix, sep)),
+ MacroArgKind::Other(ref inner, ref prefix) => Some(format!("{}{}", prefix, inner)),
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+struct ParsedMacroArg {
+ kind: MacroArgKind,
+}
+
+impl ParsedMacroArg {
+ fn rewrite(
+ &self,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ use_multiple_lines: bool,
+ ) -> Option<String> {
+ self.kind.rewrite(context, shape, use_multiple_lines)
+ }
+}
+
+/// Parses macro arguments on macro def.
+struct MacroArgParser {
+ /// Either a name of the next metavariable, a separator, or junk.
+ buf: String,
+ /// The first token of the current buffer.
+ start_tok: Token,
+ /// `true` if we are parsing a metavariable or a repeat.
+ is_meta_var: bool,
+ /// The last token parsed.
+ last_tok: Token,
+ /// Holds the parsed arguments.
+ result: Vec<ParsedMacroArg>,
+}
+
+fn last_tok(tt: &TokenTree) -> Token {
+ match *tt {
+ TokenTree::Token(ref t, _) => t.clone(),
+ TokenTree::Delimited(delim_span, delim, _) => Token {
+ kind: TokenKind::CloseDelim(delim),
+ span: delim_span.close,
+ },
+ }
+}
+
+impl MacroArgParser {
+ fn new() -> MacroArgParser {
+ MacroArgParser {
+ buf: String::new(),
+ is_meta_var: false,
+ last_tok: Token {
+ kind: TokenKind::Eof,
+ span: DUMMY_SP,
+ },
+ start_tok: Token {
+ kind: TokenKind::Eof,
+ span: DUMMY_SP,
+ },
+ result: vec![],
+ }
+ }
+
+ fn set_last_tok(&mut self, tok: &TokenTree) {
+ self.last_tok = last_tok(tok);
+ }
+
+ fn add_separator(&mut self) {
+ let prefix = if self.need_space_prefix() {
+ " ".to_owned()
+ } else {
+ "".to_owned()
+ };
+ self.result.push(ParsedMacroArg {
+ kind: MacroArgKind::Separator(self.buf.clone(), prefix),
+ });
+ self.buf.clear();
+ }
+
+ fn add_other(&mut self) {
+ let prefix = if self.need_space_prefix() {
+ " ".to_owned()
+ } else {
+ "".to_owned()
+ };
+ self.result.push(ParsedMacroArg {
+ kind: MacroArgKind::Other(self.buf.clone(), prefix),
+ });
+ self.buf.clear();
+ }
+
+ fn add_meta_variable(&mut self, iter: &mut Cursor) -> Option<()> {
+ match iter.next() {
+ Some(TokenTree::Token(
+ Token {
+ kind: TokenKind::Ident(name, _),
+ ..
+ },
+ _,
+ )) => {
+ self.result.push(ParsedMacroArg {
+ kind: MacroArgKind::MetaVariable(name, self.buf.clone()),
+ });
+
+ self.buf.clear();
+ self.is_meta_var = false;
+ Some(())
+ }
+ _ => None,
+ }
+ }
+
+ fn add_delimited(&mut self, inner: Vec<ParsedMacroArg>, delim: Delimiter) {
+ self.result.push(ParsedMacroArg {
+ kind: MacroArgKind::Delimited(delim, inner),
+ });
+ }
+
+ // $($foo: expr),?
+ fn add_repeat(
+ &mut self,
+ inner: Vec<ParsedMacroArg>,
+ delim: Delimiter,
+ iter: &mut Cursor,
+ ) -> Option<()> {
+ let mut buffer = String::new();
+ let mut first = true;
+
+ // Parse '*', '+' or '?.
+ for tok in iter {
+ self.set_last_tok(&tok);
+ if first {
+ first = false;
+ }
+
+ match tok {
+ TokenTree::Token(
+ Token {
+ kind: TokenKind::BinOp(BinOpToken::Plus),
+ ..
+ },
+ _,
+ )
+ | TokenTree::Token(
+ Token {
+ kind: TokenKind::Question,
+ ..
+ },
+ _,
+ )
+ | TokenTree::Token(
+ Token {
+ kind: TokenKind::BinOp(BinOpToken::Star),
+ ..
+ },
+ _,
+ ) => {
+ break;
+ }
+ TokenTree::Token(ref t, _) => {
+ buffer.push_str(&pprust::token_to_string(t));
+ }
+ _ => return None,
+ }
+ }
+
+ // There could be some random stuff between ')' and '*', '+' or '?'.
+ let another = if buffer.trim().is_empty() {
+ None
+ } else {
+ Some(Box::new(ParsedMacroArg {
+ kind: MacroArgKind::Other(buffer, "".to_owned()),
+ }))
+ };
+
+ self.result.push(ParsedMacroArg {
+ kind: MacroArgKind::Repeat(delim, inner, another, self.last_tok.clone()),
+ });
+ Some(())
+ }
+
+ fn update_buffer(&mut self, t: &Token) {
+ if self.buf.is_empty() {
+ self.start_tok = t.clone();
+ } else {
+ let needs_space = match next_space(&self.last_tok.kind) {
+ SpaceState::Ident => ident_like(t),
+ SpaceState::Punctuation => !ident_like(t),
+ SpaceState::Always => true,
+ SpaceState::Never => false,
+ };
+ if force_space_before(&t.kind) || needs_space {
+ self.buf.push(' ');
+ }
+ }
+
+ self.buf.push_str(&pprust::token_to_string(t));
+ }
+
+ fn need_space_prefix(&self) -> bool {
+ if self.result.is_empty() {
+ return false;
+ }
+
+ let last_arg = self.result.last().unwrap();
+ if let MacroArgKind::MetaVariable(..) = last_arg.kind {
+ if ident_like(&self.start_tok) {
+ return true;
+ }
+ if self.start_tok.kind == TokenKind::Colon {
+ return true;
+ }
+ }
+
+ if force_space_before(&self.start_tok.kind) {
+ return true;
+ }
+
+ false
+ }
+
+ /// Returns a collection of parsed macro def's arguments.
+ fn parse(mut self, tokens: TokenStream) -> Option<Vec<ParsedMacroArg>> {
+ let mut iter = tokens.into_trees();
+
+ while let Some(tok) = iter.next() {
+ match tok {
+ TokenTree::Token(
+ Token {
+ kind: TokenKind::Dollar,
+ span,
+ },
+ _,
+ ) => {
+ // We always want to add a separator before meta variables.
+ if !self.buf.is_empty() {
+ self.add_separator();
+ }
+
+ // Start keeping the name of this metavariable in the buffer.
+ self.is_meta_var = true;
+ self.start_tok = Token {
+ kind: TokenKind::Dollar,
+ span,
+ };
+ }
+ TokenTree::Token(
+ Token {
+ kind: TokenKind::Colon,
+ ..
+ },
+ _,
+ ) if self.is_meta_var => {
+ self.add_meta_variable(&mut iter)?;
+ }
+ TokenTree::Token(ref t, _) => self.update_buffer(t),
+ TokenTree::Delimited(_delimited_span, delimited, ref tts) => {
+ if !self.buf.is_empty() {
+ if next_space(&self.last_tok.kind) == SpaceState::Always {
+ self.add_separator();
+ } else {
+ self.add_other();
+ }
+ }
+
+ // Parse the stuff inside delimiters.
+ let parser = MacroArgParser::new();
+ let delimited_arg = parser.parse(tts.clone())?;
+
+ if self.is_meta_var {
+ self.add_repeat(delimited_arg, delimited, &mut iter)?;
+ self.is_meta_var = false;
+ } else {
+ self.add_delimited(delimited_arg, delimited);
+ }
+ }
+ }
+
+ self.set_last_tok(&tok);
+ }
+
+ // We are left with some stuff in the buffer. Since there is nothing
+ // left to separate, add this as `Other`.
+ if !self.buf.is_empty() {
+ self.add_other();
+ }
+
+ Some(self.result)
+ }
+}
+
+fn wrap_macro_args(
+ context: &RewriteContext<'_>,
+ args: &[ParsedMacroArg],
+ shape: Shape,
+) -> Option<String> {
+ wrap_macro_args_inner(context, args, shape, false)
+ .or_else(|| wrap_macro_args_inner(context, args, shape, true))
+}
+
+fn wrap_macro_args_inner(
+ context: &RewriteContext<'_>,
+ args: &[ParsedMacroArg],
+ shape: Shape,
+ use_multiple_lines: bool,
+) -> Option<String> {
+ let mut result = String::with_capacity(128);
+ let mut iter = args.iter().peekable();
+ let indent_str = shape.indent.to_string_with_newline(context.config);
+
+ while let Some(arg) = iter.next() {
+ result.push_str(&arg.rewrite(context, shape, use_multiple_lines)?);
+
+ if use_multiple_lines
+ && (arg.kind.ends_with_space() || iter.peek().map_or(false, |a| a.kind.has_meta_var()))
+ {
+ if arg.kind.ends_with_space() {
+ result.pop();
+ }
+ result.push_str(&indent_str);
+ } else if let Some(next_arg) = iter.peek() {
+ let space_before_dollar =
+ !arg.kind.ends_with_space() && next_arg.kind.starts_with_dollar();
+ let space_before_brace = next_arg.kind.starts_with_brace();
+ if space_before_dollar || space_before_brace {
+ result.push(' ');
+ }
+ }
+ }
+
+ if !use_multiple_lines && result.len() >= shape.width {
+ None
+ } else {
+ Some(result)
+ }
+}
+
+// This is a bit sketchy. The token rules probably need tweaking, but it works
+// for some common cases. I hope the basic logic is sufficient. Note that the
+// meaning of some tokens is a bit different here from usual Rust, e.g., `*`
+// and `(`/`)` have special meaning.
+//
+// We always try and format on one line.
+// FIXME: Use multi-line when every thing does not fit on one line.
+fn format_macro_args(
+ context: &RewriteContext<'_>,
+ token_stream: TokenStream,
+ shape: Shape,
+) -> Option<String> {
+ if !context.config.format_macro_matchers() {
+ let span = span_for_token_stream(&token_stream);
+ return Some(match span {
+ Some(span) => context.snippet(span).to_owned(),
+ None => String::new(),
+ });
+ }
+ let parsed_args = MacroArgParser::new().parse(token_stream)?;
+ wrap_macro_args(context, &parsed_args, shape)
+}
+
+fn span_for_token_stream(token_stream: &TokenStream) -> Option<Span> {
+ token_stream.trees().next().map(|tt| tt.span())
+}
+
+// We should insert a space if the next token is a:
+#[derive(Copy, Clone, PartialEq)]
+enum SpaceState {
+ Never,
+ Punctuation,
+ Ident, // Or ident/literal-like thing.
+ Always,
+}
+
+fn force_space_before(tok: &TokenKind) -> bool {
+ debug!("tok: force_space_before {:?}", tok);
+
+ match tok {
+ TokenKind::Eq
+ | TokenKind::Lt
+ | TokenKind::Le
+ | TokenKind::EqEq
+ | TokenKind::Ne
+ | TokenKind::Ge
+ | TokenKind::Gt
+ | TokenKind::AndAnd
+ | TokenKind::OrOr
+ | TokenKind::Not
+ | TokenKind::Tilde
+ | TokenKind::BinOpEq(_)
+ | TokenKind::At
+ | TokenKind::RArrow
+ | TokenKind::LArrow
+ | TokenKind::FatArrow
+ | TokenKind::BinOp(_)
+ | TokenKind::Pound
+ | TokenKind::Dollar => true,
+ _ => false,
+ }
+}
+
+fn ident_like(tok: &Token) -> bool {
+ matches!(
+ tok.kind,
+ TokenKind::Ident(..) | TokenKind::Literal(..) | TokenKind::Lifetime(_)
+ )
+}
+
+fn next_space(tok: &TokenKind) -> SpaceState {
+ debug!("next_space: {:?}", tok);
+
+ match tok {
+ TokenKind::Not
+ | TokenKind::BinOp(BinOpToken::And)
+ | TokenKind::Tilde
+ | TokenKind::At
+ | TokenKind::Comma
+ | TokenKind::Dot
+ | TokenKind::DotDot
+ | TokenKind::DotDotDot
+ | TokenKind::DotDotEq
+ | TokenKind::Question => SpaceState::Punctuation,
+
+ TokenKind::ModSep
+ | TokenKind::Pound
+ | TokenKind::Dollar
+ | TokenKind::OpenDelim(_)
+ | TokenKind::CloseDelim(_) => SpaceState::Never,
+
+ TokenKind::Literal(..) | TokenKind::Ident(..) | TokenKind::Lifetime(_) => SpaceState::Ident,
+
+ _ => SpaceState::Always,
+ }
+}
+
+/// Tries to convert a macro use into a short hand try expression. Returns `None`
+/// when the macro is not an instance of `try!` (or parsing the inner expression
+/// failed).
+pub(crate) fn convert_try_mac(
+ mac: &ast::MacCall,
+ context: &RewriteContext<'_>,
+) -> Option<ast::Expr> {
+ let path = &pprust::path_to_string(&mac.path);
+ if path == "try" || path == "r#try" {
+ let ts = mac.args.inner_tokens();
+
+ Some(ast::Expr {
+ id: ast::NodeId::root(), // dummy value
+ kind: ast::ExprKind::Try(parse_expr(context, ts)?),
+ span: mac.span(), // incorrect span, but shouldn't matter too much
+ attrs: ast::AttrVec::new(),
+ tokens: None,
+ })
+ } else {
+ None
+ }
+}
+
+pub(crate) fn macro_style(mac: &ast::MacCall, context: &RewriteContext<'_>) -> Delimiter {
+ let snippet = context.snippet(mac.span());
+ let paren_pos = snippet.find_uncommented("(").unwrap_or(usize::max_value());
+ let bracket_pos = snippet.find_uncommented("[").unwrap_or(usize::max_value());
+ let brace_pos = snippet.find_uncommented("{").unwrap_or(usize::max_value());
+
+ if paren_pos < bracket_pos && paren_pos < brace_pos {
+ Delimiter::Parenthesis
+ } else if bracket_pos < brace_pos {
+ Delimiter::Bracket
+ } else {
+ Delimiter::Brace
+ }
+}
+
+// A very simple parser that just parses a macros 2.0 definition into its branches.
+// Currently we do not attempt to parse any further than that.
+#[derive(new)]
+struct MacroParser {
+ toks: Cursor,
+}
+
+impl MacroParser {
+ // (`(` ... `)` `=>` `{` ... `}`)*
+ fn parse(&mut self) -> Option<Macro> {
+ let mut branches = vec![];
+ while self.toks.look_ahead(1).is_some() {
+ branches.push(self.parse_branch()?);
+ }
+
+ Some(Macro { branches })
+ }
+
+ // `(` ... `)` `=>` `{` ... `}`
+ fn parse_branch(&mut self) -> Option<MacroBranch> {
+ let tok = self.toks.next()?;
+ let (lo, args_paren_kind) = match tok {
+ TokenTree::Token(..) => return None,
+ TokenTree::Delimited(delimited_span, d, _) => (delimited_span.open.lo(), d),
+ };
+ let args = TokenStream::new(vec![tok]);
+ match self.toks.next()? {
+ TokenTree::Token(
+ Token {
+ kind: TokenKind::FatArrow,
+ ..
+ },
+ _,
+ ) => {}
+ _ => return None,
+ }
+ let (mut hi, body, whole_body) = match self.toks.next()? {
+ TokenTree::Token(..) => return None,
+ TokenTree::Delimited(delimited_span, ..) => {
+ let data = delimited_span.entire().data();
+ (
+ data.hi,
+ Span::new(
+ data.lo + BytePos(1),
+ data.hi - BytePos(1),
+ data.ctxt,
+ data.parent,
+ ),
+ delimited_span.entire(),
+ )
+ }
+ };
+ if let Some(TokenTree::Token(
+ Token {
+ kind: TokenKind::Semi,
+ span,
+ },
+ _,
+ )) = self.toks.look_ahead(0)
+ {
+ hi = span.hi();
+ self.toks.next();
+ }
+ Some(MacroBranch {
+ span: mk_sp(lo, hi),
+ args_paren_kind,
+ args,
+ body,
+ whole_body,
+ })
+ }
+}
+
+// A parsed macros 2.0 macro definition.
+struct Macro {
+ branches: Vec<MacroBranch>,
+}
+
+// FIXME: it would be more efficient to use references to the token streams
+// rather than clone them, if we can make the borrowing work out.
+struct MacroBranch {
+ span: Span,
+ args_paren_kind: Delimiter,
+ args: TokenStream,
+ body: Span,
+ whole_body: Span,
+}
+
+impl MacroBranch {
+ fn rewrite(
+ &self,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ multi_branch_style: bool,
+ ) -> Option<String> {
+ // Only attempt to format function-like macros.
+ if self.args_paren_kind != Delimiter::Parenthesis {
+ // FIXME(#1539): implement for non-sugared macros.
+ return None;
+ }
+
+ // 5 = " => {"
+ let mut result = format_macro_args(context, self.args.clone(), shape.sub_width(5)?)?;
+
+ if multi_branch_style {
+ result += " =>";
+ }
+
+ if !context.config.format_macro_bodies() {
+ result += " ";
+ result += context.snippet(self.whole_body);
+ return Some(result);
+ }
+
+ // The macro body is the most interesting part. It might end up as various
+ // AST nodes, but also has special variables (e.g, `$foo`) which can't be
+ // parsed as regular Rust code (and note that these can be escaped using
+ // `$$`). We'll try and format like an AST node, but we'll substitute
+ // variables for new names with the same length first.
+
+ let old_body = context.snippet(self.body).trim();
+ let (body_str, substs) = replace_names(old_body)?;
+ let has_block_body = old_body.starts_with('{');
+
+ let mut config = context.config.clone();
+ config.set().hide_parse_errors(true);
+
+ result += " {";
+
+ let body_indent = if has_block_body {
+ shape.indent
+ } else {
+ shape.indent.block_indent(&config)
+ };
+ let new_width = config.max_width() - body_indent.width();
+ config.set().max_width(new_width);
+
+ // First try to format as items, then as statements.
+ let new_body_snippet = match crate::format_snippet(&body_str, &config, true) {
+ Some(new_body) => new_body,
+ None => {
+ let new_width = new_width + config.tab_spaces();
+ config.set().max_width(new_width);
+ match crate::format_code_block(&body_str, &config, true) {
+ Some(new_body) => new_body,
+ None => return None,
+ }
+ }
+ };
+ let new_body = wrap_str(
+ new_body_snippet.snippet.to_string(),
+ config.max_width(),
+ shape,
+ )?;
+
+ // Indent the body since it is in a block.
+ let indent_str = body_indent.to_string(&config);
+ let mut new_body = LineClasses::new(new_body.trim_end())
+ .enumerate()
+ .fold(
+ (String::new(), true),
+ |(mut s, need_indent), (i, (kind, ref l))| {
+ if !is_empty_line(l)
+ && need_indent
+ && !new_body_snippet.is_line_non_formatted(i + 1)
+ {
+ s += &indent_str;
+ }
+ (s + l + "\n", indent_next_line(kind, l, &config))
+ },
+ )
+ .0;
+
+ // Undo our replacement of macro variables.
+ // FIXME: this could be *much* more efficient.
+ for (old, new) in &substs {
+ if old_body.contains(new) {
+ debug!("rewrite_macro_def: bailing matching variable: `{}`", new);
+ return None;
+ }
+ new_body = new_body.replace(new, old);
+ }
+
+ if has_block_body {
+ result += new_body.trim();
+ } else if !new_body.is_empty() {
+ result += "\n";
+ result += &new_body;
+ result += &shape.indent.to_string(&config);
+ }
+
+ result += "}";
+
+ Some(result)
+ }
+}
+
+/// Format `lazy_static!` from <https://crates.io/crates/lazy_static>.
+///
+/// # Expected syntax
+///
+/// ```text
+/// lazy_static! {
+/// [pub] static ref NAME_1: TYPE_1 = EXPR_1;
+/// [pub] static ref NAME_2: TYPE_2 = EXPR_2;
+/// ...
+/// [pub] static ref NAME_N: TYPE_N = EXPR_N;
+/// }
+/// ```
+fn format_lazy_static(
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ ts: TokenStream,
+) -> Option<String> {
+ let mut result = String::with_capacity(1024);
+ let nested_shape = shape
+ .block_indent(context.config.tab_spaces())
+ .with_max_width(context.config);
+
+ result.push_str("lazy_static! {");
+ result.push_str(&nested_shape.indent.to_string_with_newline(context.config));
+
+ let parsed_elems = parse_lazy_static(context, ts)?;
+ let last = parsed_elems.len() - 1;
+ for (i, (vis, id, ty, expr)) in parsed_elems.iter().enumerate() {
+ // Rewrite as a static item.
+ let vis = crate::utils::format_visibility(context, vis);
+ let mut stmt = String::with_capacity(128);
+ stmt.push_str(&format!(
+ "{}static ref {}: {} =",
+ vis,
+ id,
+ ty.rewrite(context, nested_shape)?
+ ));
+ result.push_str(&rewrite_assign_rhs(
+ context,
+ stmt,
+ &*expr,
+ &RhsAssignKind::Expr(&expr.kind, expr.span),
+ nested_shape.sub_width(1)?,
+ )?);
+ result.push(';');
+ if i != last {
+ result.push_str(&nested_shape.indent.to_string_with_newline(context.config));
+ }
+ }
+
+ result.push_str(&shape.indent.to_string_with_newline(context.config));
+ result.push('}');
+
+ Some(result)
+}
+
+fn rewrite_macro_with_items(
+ context: &RewriteContext<'_>,
+ items: &[MacroArg],
+ macro_name: &str,
+ shape: Shape,
+ style: Delimiter,
+ position: MacroPosition,
+ span: Span,
+) -> Option<String> {
+ let (opener, closer) = match style {
+ Delimiter::Parenthesis => ("(", ")"),
+ Delimiter::Bracket => ("[", "]"),
+ Delimiter::Brace => (" {", "}"),
+ _ => return None,
+ };
+ let trailing_semicolon = match style {
+ Delimiter::Parenthesis | Delimiter::Bracket if position == MacroPosition::Item => ";",
+ _ => "",
+ };
+
+ let mut visitor = FmtVisitor::from_context(context);
+ visitor.block_indent = shape.indent.block_indent(context.config);
+ visitor.last_pos = context.snippet_provider.span_after(span, opener.trim());
+ for item in items {
+ let item = match item {
+ MacroArg::Item(item) => item,
+ _ => return None,
+ };
+ visitor.visit_item(item);
+ }
+
+ let mut result = String::with_capacity(256);
+ result.push_str(macro_name);
+ result.push_str(opener);
+ result.push_str(&visitor.block_indent.to_string_with_newline(context.config));
+ result.push_str(visitor.buffer.trim());
+ result.push_str(&shape.indent.to_string_with_newline(context.config));
+ result.push_str(closer);
+ result.push_str(trailing_semicolon);
+ Some(result)
+}
diff --git a/src/tools/rustfmt/src/matches.rs b/src/tools/rustfmt/src/matches.rs
new file mode 100644
index 000000000..85d9c5d2b
--- /dev/null
+++ b/src/tools/rustfmt/src/matches.rs
@@ -0,0 +1,602 @@
+//! Format match expression.
+
+use std::iter::repeat;
+
+use rustc_ast::{ast, ptr};
+use rustc_span::{BytePos, Span};
+
+use crate::comment::{combine_strs_with_missing_comments, rewrite_comment};
+use crate::config::lists::*;
+use crate::config::{Config, ControlBraceStyle, IndentStyle, MatchArmLeadingPipe, Version};
+use crate::expr::{
+ format_expr, is_empty_block, is_simple_block, is_unsafe_block, prefer_next_line, rewrite_cond,
+ ExprType, RhsTactics,
+};
+use crate::lists::{itemize_list, write_list, ListFormatting};
+use crate::rewrite::{Rewrite, RewriteContext};
+use crate::shape::Shape;
+use crate::source_map::SpanUtils;
+use crate::spanned::Spanned;
+use crate::utils::{
+ contains_skip, extra_offset, first_line_width, inner_attributes, last_line_extendable, mk_sp,
+ semicolon_for_expr, trimmed_last_line_width, unicode_str_width,
+};
+
+/// A simple wrapper type against `ast::Arm`. Used inside `write_list()`.
+struct ArmWrapper<'a> {
+ arm: &'a ast::Arm,
+ /// `true` if the arm is the last one in match expression. Used to decide on whether we should
+ /// add trailing comma to the match arm when `config.trailing_comma() == Never`.
+ is_last: bool,
+ /// Holds a byte position of `|` at the beginning of the arm pattern, if available.
+ beginning_vert: Option<BytePos>,
+}
+
+impl<'a> ArmWrapper<'a> {
+ fn new(arm: &'a ast::Arm, is_last: bool, beginning_vert: Option<BytePos>) -> ArmWrapper<'a> {
+ ArmWrapper {
+ arm,
+ is_last,
+ beginning_vert,
+ }
+ }
+}
+
+impl<'a> Spanned for ArmWrapper<'a> {
+ fn span(&self) -> Span {
+ if let Some(lo) = self.beginning_vert {
+ let lo = std::cmp::min(lo, self.arm.span().lo());
+ mk_sp(lo, self.arm.span().hi())
+ } else {
+ self.arm.span()
+ }
+ }
+}
+
+impl<'a> Rewrite for ArmWrapper<'a> {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ rewrite_match_arm(
+ context,
+ self.arm,
+ shape,
+ self.is_last,
+ self.beginning_vert.is_some(),
+ )
+ }
+}
+
+pub(crate) fn rewrite_match(
+ context: &RewriteContext<'_>,
+ cond: &ast::Expr,
+ arms: &[ast::Arm],
+ shape: Shape,
+ span: Span,
+ attrs: &[ast::Attribute],
+) -> Option<String> {
+ // Do not take the rhs overhead from the upper expressions into account
+ // when rewriting match condition.
+ let cond_shape = Shape {
+ width: context.budget(shape.used_width()),
+ ..shape
+ };
+ // 6 = `match `
+ let cond_shape = match context.config.indent_style() {
+ IndentStyle::Visual => cond_shape.shrink_left(6)?,
+ IndentStyle::Block => cond_shape.offset_left(6)?,
+ };
+ let cond_str = cond.rewrite(context, cond_shape)?;
+ let alt_block_sep = &shape.indent.to_string_with_newline(context.config);
+ let block_sep = match context.config.control_brace_style() {
+ ControlBraceStyle::AlwaysNextLine => alt_block_sep,
+ _ if last_line_extendable(&cond_str) => " ",
+ // 2 = ` {`
+ _ if cond_str.contains('\n') || cond_str.len() + 2 > cond_shape.width => alt_block_sep,
+ _ => " ",
+ };
+
+ let nested_indent_str = shape
+ .indent
+ .block_indent(context.config)
+ .to_string(context.config);
+ // Inner attributes.
+ let inner_attrs = &inner_attributes(attrs);
+ let inner_attrs_str = if inner_attrs.is_empty() {
+ String::new()
+ } else {
+ inner_attrs
+ .rewrite(context, shape)
+ .map(|s| format!("{}{}\n", nested_indent_str, s))?
+ };
+
+ let open_brace_pos = if inner_attrs.is_empty() {
+ let hi = if arms.is_empty() {
+ span.hi()
+ } else {
+ arms[0].span().lo()
+ };
+ context
+ .snippet_provider
+ .span_after(mk_sp(cond.span.hi(), hi), "{")
+ } else {
+ inner_attrs[inner_attrs.len() - 1].span.hi()
+ };
+
+ if arms.is_empty() {
+ let snippet = context.snippet(mk_sp(open_brace_pos, span.hi() - BytePos(1)));
+ if snippet.trim().is_empty() {
+ Some(format!("match {} {{}}", cond_str))
+ } else {
+ // Empty match with comments or inner attributes? We are not going to bother, sorry ;)
+ Some(context.snippet(span).to_owned())
+ }
+ } else {
+ let span_after_cond = mk_sp(cond.span.hi(), span.hi());
+ Some(format!(
+ "match {}{}{{\n{}{}{}\n{}}}",
+ cond_str,
+ block_sep,
+ inner_attrs_str,
+ nested_indent_str,
+ rewrite_match_arms(context, arms, shape, span_after_cond, open_brace_pos)?,
+ shape.indent.to_string(context.config),
+ ))
+ }
+}
+
+fn arm_comma(config: &Config, body: &ast::Expr, is_last: bool) -> &'static str {
+ if is_last && config.trailing_comma() == SeparatorTactic::Never {
+ ""
+ } else if config.match_block_trailing_comma() {
+ ","
+ } else if let ast::ExprKind::Block(ref block, _) = body.kind {
+ if let ast::BlockCheckMode::Default = block.rules {
+ ""
+ } else {
+ ","
+ }
+ } else {
+ ","
+ }
+}
+
+/// Collect a byte position of the beginning `|` for each arm, if available.
+fn collect_beginning_verts(
+ context: &RewriteContext<'_>,
+ arms: &[ast::Arm],
+) -> Vec<Option<BytePos>> {
+ arms.iter()
+ .map(|a| {
+ context
+ .snippet(a.pat.span)
+ .starts_with('|')
+ .then(|| a.pat.span().lo())
+ })
+ .collect()
+}
+
+fn rewrite_match_arms(
+ context: &RewriteContext<'_>,
+ arms: &[ast::Arm],
+ shape: Shape,
+ span: Span,
+ open_brace_pos: BytePos,
+) -> Option<String> {
+ let arm_shape = shape
+ .block_indent(context.config.tab_spaces())
+ .with_max_width(context.config);
+
+ let arm_len = arms.len();
+ let is_last_iter = repeat(false)
+ .take(arm_len.saturating_sub(1))
+ .chain(repeat(true));
+ let beginning_verts = collect_beginning_verts(context, arms);
+ let items = itemize_list(
+ context.snippet_provider,
+ arms.iter()
+ .zip(is_last_iter)
+ .zip(beginning_verts.into_iter())
+ .map(|((arm, is_last), beginning_vert)| ArmWrapper::new(arm, is_last, beginning_vert)),
+ "}",
+ "|",
+ |arm| arm.span().lo(),
+ |arm| arm.span().hi(),
+ |arm| arm.rewrite(context, arm_shape),
+ open_brace_pos,
+ span.hi(),
+ false,
+ );
+ let arms_vec: Vec<_> = items.collect();
+ // We will add/remove commas inside `arm.rewrite()`, and hence no separator here.
+ let fmt = ListFormatting::new(arm_shape, context.config)
+ .separator("")
+ .preserve_newline(true);
+
+ write_list(&arms_vec, &fmt)
+}
+
+fn rewrite_match_arm(
+ context: &RewriteContext<'_>,
+ arm: &ast::Arm,
+ shape: Shape,
+ is_last: bool,
+ has_leading_pipe: bool,
+) -> Option<String> {
+ let (missing_span, attrs_str) = if !arm.attrs.is_empty() {
+ if contains_skip(&arm.attrs) {
+ let (_, body) = flatten_arm_body(context, &arm.body, None);
+ // `arm.span()` does not include trailing comma, add it manually.
+ return Some(format!(
+ "{}{}",
+ context.snippet(arm.span()),
+ arm_comma(context.config, body, is_last),
+ ));
+ }
+ let missing_span = mk_sp(arm.attrs[arm.attrs.len() - 1].span.hi(), arm.pat.span.lo());
+ (missing_span, arm.attrs.rewrite(context, shape)?)
+ } else {
+ (mk_sp(arm.span().lo(), arm.span().lo()), String::new())
+ };
+
+ // Leading pipe offset
+ // 2 = `| `
+ let (pipe_offset, pipe_str) = match context.config.match_arm_leading_pipes() {
+ MatchArmLeadingPipe::Never => (0, ""),
+ MatchArmLeadingPipe::Preserve if !has_leading_pipe => (0, ""),
+ MatchArmLeadingPipe::Preserve | MatchArmLeadingPipe::Always => (2, "| "),
+ };
+
+ // Patterns
+ // 5 = ` => {`
+ let pat_shape = shape.sub_width(5)?.offset_left(pipe_offset)?;
+ let pats_str = arm.pat.rewrite(context, pat_shape)?;
+
+ // Guard
+ let block_like_pat = trimmed_last_line_width(&pats_str) <= context.config.tab_spaces();
+ let new_line_guard = pats_str.contains('\n') && !block_like_pat;
+ let guard_str = rewrite_guard(
+ context,
+ &arm.guard,
+ shape,
+ trimmed_last_line_width(&pats_str),
+ new_line_guard,
+ )?;
+
+ let lhs_str = combine_strs_with_missing_comments(
+ context,
+ &attrs_str,
+ &format!("{}{}{}", pipe_str, pats_str, guard_str),
+ missing_span,
+ shape,
+ false,
+ )?;
+
+ let arrow_span = mk_sp(arm.pat.span.hi(), arm.body.span().lo());
+ rewrite_match_body(
+ context,
+ &arm.body,
+ &lhs_str,
+ shape,
+ guard_str.contains('\n'),
+ arrow_span,
+ is_last,
+ )
+}
+
+fn stmt_is_expr_mac(stmt: &ast::Stmt) -> bool {
+ if let ast::StmtKind::Expr(expr) = &stmt.kind {
+ if let ast::ExprKind::MacCall(_) = &expr.kind {
+ return true;
+ }
+ }
+ false
+}
+
+fn block_can_be_flattened<'a>(
+ context: &RewriteContext<'_>,
+ expr: &'a ast::Expr,
+) -> Option<&'a ast::Block> {
+ match expr.kind {
+ ast::ExprKind::Block(ref block, _)
+ if !is_unsafe_block(block)
+ && !context.inside_macro()
+ && is_simple_block(context, block, Some(&expr.attrs))
+ && !stmt_is_expr_mac(&block.stmts[0]) =>
+ {
+ Some(&*block)
+ }
+ _ => None,
+ }
+}
+
+// (extend, body)
+// @extend: true if the arm body can be put next to `=>`
+// @body: flattened body, if the body is block with a single expression
+fn flatten_arm_body<'a>(
+ context: &'a RewriteContext<'_>,
+ body: &'a ast::Expr,
+ opt_shape: Option<Shape>,
+) -> (bool, &'a ast::Expr) {
+ let can_extend =
+ |expr| !context.config.force_multiline_blocks() && can_flatten_block_around_this(expr);
+
+ if let Some(block) = block_can_be_flattened(context, body) {
+ if let ast::StmtKind::Expr(ref expr) = block.stmts[0].kind {
+ if let ast::ExprKind::Block(..) = expr.kind {
+ if expr.attrs.is_empty() {
+ flatten_arm_body(context, expr, None)
+ } else {
+ (true, body)
+ }
+ } else {
+ let cond_becomes_muti_line = opt_shape
+ .and_then(|shape| rewrite_cond(context, expr, shape))
+ .map_or(false, |cond| cond.contains('\n'));
+ if cond_becomes_muti_line {
+ (false, &*body)
+ } else {
+ (can_extend(expr), &*expr)
+ }
+ }
+ } else {
+ (false, &*body)
+ }
+ } else {
+ (can_extend(body), &*body)
+ }
+}
+
+fn rewrite_match_body(
+ context: &RewriteContext<'_>,
+ body: &ptr::P<ast::Expr>,
+ pats_str: &str,
+ shape: Shape,
+ has_guard: bool,
+ arrow_span: Span,
+ is_last: bool,
+) -> Option<String> {
+ let (extend, body) = flatten_arm_body(
+ context,
+ body,
+ shape.offset_left(extra_offset(pats_str, shape) + 4),
+ );
+ let (is_block, is_empty_block) = if let ast::ExprKind::Block(ref block, _) = body.kind {
+ (true, is_empty_block(context, block, Some(&body.attrs)))
+ } else {
+ (false, false)
+ };
+
+ let comma = arm_comma(context.config, body, is_last);
+ let alt_block_sep = &shape.indent.to_string_with_newline(context.config);
+
+ let combine_orig_body = |body_str: &str| {
+ let block_sep = match context.config.control_brace_style() {
+ ControlBraceStyle::AlwaysNextLine if is_block => alt_block_sep,
+ _ => " ",
+ };
+
+ Some(format!("{} =>{}{}{}", pats_str, block_sep, body_str, comma))
+ };
+
+ let next_line_indent = if !is_block || is_empty_block {
+ shape.indent.block_indent(context.config)
+ } else {
+ shape.indent
+ };
+
+ let forbid_same_line =
+ (has_guard && pats_str.contains('\n') && !is_empty_block) || !body.attrs.is_empty();
+
+ // Look for comments between `=>` and the start of the body.
+ let arrow_comment = {
+ let arrow_snippet = context.snippet(arrow_span).trim();
+ // search for the arrow starting from the end of the snippet since there may be a match
+ // expression within the guard
+ let arrow_index = arrow_snippet.rfind("=>").unwrap();
+ // 2 = `=>`
+ let comment_str = arrow_snippet[arrow_index + 2..].trim();
+ if comment_str.is_empty() {
+ String::new()
+ } else {
+ rewrite_comment(comment_str, false, shape, context.config)?
+ }
+ };
+
+ let combine_next_line_body = |body_str: &str| {
+ let nested_indent_str = next_line_indent.to_string_with_newline(context.config);
+
+ if is_block {
+ let mut result = pats_str.to_owned();
+ result.push_str(" =>");
+ if !arrow_comment.is_empty() {
+ result.push_str(&nested_indent_str);
+ result.push_str(&arrow_comment);
+ }
+ result.push_str(&nested_indent_str);
+ result.push_str(body_str);
+ result.push_str(comma);
+ return Some(result);
+ }
+
+ let indent_str = shape.indent.to_string_with_newline(context.config);
+ let (body_prefix, body_suffix) =
+ if context.config.match_arm_blocks() && !context.inside_macro() {
+ let comma = if context.config.match_block_trailing_comma() {
+ ","
+ } else {
+ ""
+ };
+ let semicolon = if context.config.version() == Version::One {
+ ""
+ } else {
+ if semicolon_for_expr(context, body) {
+ ";"
+ } else {
+ ""
+ }
+ };
+ ("{", format!("{}{}}}{}", semicolon, indent_str, comma))
+ } else {
+ ("", String::from(","))
+ };
+
+ let block_sep = match context.config.control_brace_style() {
+ ControlBraceStyle::AlwaysNextLine => format!("{}{}", alt_block_sep, body_prefix),
+ _ if body_prefix.is_empty() => "".to_owned(),
+ _ if forbid_same_line || !arrow_comment.is_empty() => {
+ format!("{}{}", alt_block_sep, body_prefix)
+ }
+ _ => format!(" {}", body_prefix),
+ } + &nested_indent_str;
+
+ let mut result = pats_str.to_owned();
+ result.push_str(" =>");
+ if !arrow_comment.is_empty() {
+ result.push_str(&indent_str);
+ result.push_str(&arrow_comment);
+ }
+ result.push_str(&block_sep);
+ result.push_str(body_str);
+ result.push_str(&body_suffix);
+ Some(result)
+ };
+
+ // Let's try and get the arm body on the same line as the condition.
+ // 4 = ` => `.len()
+ let orig_body_shape = shape
+ .offset_left(extra_offset(pats_str, shape) + 4)
+ .and_then(|shape| shape.sub_width(comma.len()));
+ let orig_body = if forbid_same_line || !arrow_comment.is_empty() {
+ None
+ } else if let Some(body_shape) = orig_body_shape {
+ let rewrite = nop_block_collapse(
+ format_expr(body, ExprType::Statement, context, body_shape),
+ body_shape.width,
+ );
+
+ match rewrite {
+ Some(ref body_str)
+ if is_block
+ || (!body_str.contains('\n')
+ && unicode_str_width(body_str) <= body_shape.width) =>
+ {
+ return combine_orig_body(body_str);
+ }
+ _ => rewrite,
+ }
+ } else {
+ None
+ };
+ let orig_budget = orig_body_shape.map_or(0, |shape| shape.width);
+
+ // Try putting body on the next line and see if it looks better.
+ let next_line_body_shape = Shape::indented(next_line_indent, context.config);
+ let next_line_body = nop_block_collapse(
+ format_expr(body, ExprType::Statement, context, next_line_body_shape),
+ next_line_body_shape.width,
+ );
+ match (orig_body, next_line_body) {
+ (Some(ref orig_str), Some(ref next_line_str))
+ if prefer_next_line(orig_str, next_line_str, RhsTactics::Default) =>
+ {
+ combine_next_line_body(next_line_str)
+ }
+ (Some(ref orig_str), _) if extend && first_line_width(orig_str) <= orig_budget => {
+ combine_orig_body(orig_str)
+ }
+ (Some(ref orig_str), Some(ref next_line_str)) if orig_str.contains('\n') => {
+ combine_next_line_body(next_line_str)
+ }
+ (None, Some(ref next_line_str)) => combine_next_line_body(next_line_str),
+ (None, None) => None,
+ (Some(ref orig_str), _) => combine_orig_body(orig_str),
+ }
+}
+
+// The `if ...` guard on a match arm.
+fn rewrite_guard(
+ context: &RewriteContext<'_>,
+ guard: &Option<ptr::P<ast::Expr>>,
+ shape: Shape,
+ // The amount of space used up on this line for the pattern in
+ // the arm (excludes offset).
+ pattern_width: usize,
+ multiline_pattern: bool,
+) -> Option<String> {
+ if let Some(ref guard) = *guard {
+ // First try to fit the guard string on the same line as the pattern.
+ // 4 = ` if `, 5 = ` => {`
+ let cond_shape = shape
+ .offset_left(pattern_width + 4)
+ .and_then(|s| s.sub_width(5));
+ if !multiline_pattern {
+ if let Some(cond_shape) = cond_shape {
+ if let Some(cond_str) = guard.rewrite(context, cond_shape) {
+ if !cond_str.contains('\n') || pattern_width <= context.config.tab_spaces() {
+ return Some(format!(" if {}", cond_str));
+ }
+ }
+ }
+ }
+
+ // Not enough space to put the guard after the pattern, try a newline.
+ // 3 = `if `, 5 = ` => {`
+ let cond_shape = Shape::indented(shape.indent.block_indent(context.config), context.config)
+ .offset_left(3)
+ .and_then(|s| s.sub_width(5));
+ if let Some(cond_shape) = cond_shape {
+ if let Some(cond_str) = guard.rewrite(context, cond_shape) {
+ return Some(format!(
+ "{}if {}",
+ cond_shape.indent.to_string_with_newline(context.config),
+ cond_str
+ ));
+ }
+ }
+
+ None
+ } else {
+ Some(String::new())
+ }
+}
+
+fn nop_block_collapse(block_str: Option<String>, budget: usize) -> Option<String> {
+ debug!("nop_block_collapse {:?} {}", block_str, budget);
+ block_str.map(|block_str| {
+ if block_str.starts_with('{')
+ && budget >= 2
+ && (block_str[1..].find(|c: char| !c.is_whitespace()).unwrap() == block_str.len() - 2)
+ {
+ String::from("{}")
+ } else {
+ block_str
+ }
+ })
+}
+
+fn can_flatten_block_around_this(body: &ast::Expr) -> bool {
+ match body.kind {
+ // We do not allow `if` to stay on the same line, since we could easily mistake
+ // `pat => if cond { ... }` and `pat if cond => { ... }`.
+ ast::ExprKind::If(..) => false,
+ // We do not allow collapsing a block around expression with condition
+ // to avoid it being cluttered with match arm.
+ ast::ExprKind::ForLoop(..) | ast::ExprKind::While(..) => false,
+ ast::ExprKind::Loop(..)
+ | ast::ExprKind::Match(..)
+ | ast::ExprKind::Block(..)
+ | ast::ExprKind::Closure(..)
+ | ast::ExprKind::Array(..)
+ | ast::ExprKind::Call(..)
+ | ast::ExprKind::MethodCall(..)
+ | ast::ExprKind::MacCall(..)
+ | ast::ExprKind::Struct(..)
+ | ast::ExprKind::Tup(..) => true,
+ ast::ExprKind::AddrOf(_, _, ref expr)
+ | ast::ExprKind::Box(ref expr)
+ | ast::ExprKind::Try(ref expr)
+ | ast::ExprKind::Unary(_, ref expr)
+ | ast::ExprKind::Index(ref expr, _)
+ | ast::ExprKind::Cast(ref expr, _) => can_flatten_block_around_this(expr),
+ _ => false,
+ }
+}
diff --git a/src/tools/rustfmt/src/missed_spans.rs b/src/tools/rustfmt/src/missed_spans.rs
new file mode 100644
index 000000000..28edcb784
--- /dev/null
+++ b/src/tools/rustfmt/src/missed_spans.rs
@@ -0,0 +1,363 @@
+use rustc_span::{BytePos, Pos, Span};
+
+use crate::comment::{is_last_comment_block, rewrite_comment, CodeCharKind, CommentCodeSlices};
+use crate::config::file_lines::FileLines;
+use crate::config::FileName;
+use crate::config::Version;
+use crate::coverage::transform_missing_snippet;
+use crate::shape::{Indent, Shape};
+use crate::source_map::LineRangeUtils;
+use crate::utils::{count_lf_crlf, count_newlines, last_line_width, mk_sp};
+use crate::visitor::FmtVisitor;
+
+struct SnippetStatus {
+ /// An offset to the current line from the beginning of the original snippet.
+ line_start: usize,
+ /// A length of trailing whitespaces on the current line.
+ last_wspace: Option<usize>,
+ /// The current line number.
+ cur_line: usize,
+}
+
+impl SnippetStatus {
+ fn new(cur_line: usize) -> Self {
+ SnippetStatus {
+ line_start: 0,
+ last_wspace: None,
+ cur_line,
+ }
+ }
+}
+
+impl<'a> FmtVisitor<'a> {
+ fn output_at_start(&self) -> bool {
+ self.buffer.is_empty()
+ }
+
+ pub(crate) fn format_missing(&mut self, end: BytePos) {
+ // HACK(topecongiro): we use `format_missing()` to extract a missing comment between
+ // a macro (or similar) and a trailing semicolon. Here we just try to avoid calling
+ // `format_missing_inner` in the common case where there is no such comment.
+ // This is a hack, ideally we should fix a possible bug in `format_missing_inner`
+ // or refactor `visit_mac` and `rewrite_macro`, but this should suffice to fix the
+ // issue (#2727).
+ let missing_snippet = self.snippet(mk_sp(self.last_pos, end));
+ if missing_snippet.trim() == ";" {
+ self.push_str(";");
+ self.last_pos = end;
+ return;
+ }
+ self.format_missing_inner(end, |this, last_snippet, _| this.push_str(last_snippet))
+ }
+
+ pub(crate) fn format_missing_with_indent(&mut self, end: BytePos) {
+ self.format_missing_indent(end, true)
+ }
+
+ pub(crate) fn format_missing_no_indent(&mut self, end: BytePos) {
+ self.format_missing_indent(end, false)
+ }
+
+ fn format_missing_indent(&mut self, end: BytePos, should_indent: bool) {
+ let config = self.config;
+ self.format_missing_inner(end, |this, last_snippet, snippet| {
+ this.push_str(last_snippet.trim_end());
+ if last_snippet == snippet && !this.output_at_start() {
+ // No new lines in the snippet.
+ this.push_str("\n");
+ }
+ if should_indent {
+ let indent = this.block_indent.to_string(config);
+ this.push_str(&indent);
+ }
+ })
+ }
+
+ fn format_missing_inner<F: Fn(&mut FmtVisitor<'_>, &str, &str)>(
+ &mut self,
+ end: BytePos,
+ process_last_snippet: F,
+ ) {
+ let start = self.last_pos;
+
+ if start == end {
+ // Do nothing if this is the beginning of the file.
+ if !self.output_at_start() {
+ process_last_snippet(self, "", "");
+ }
+ return;
+ }
+
+ assert!(
+ start < end,
+ "Request to format inverted span: {}",
+ self.parse_sess.span_to_debug_info(mk_sp(start, end)),
+ );
+
+ self.last_pos = end;
+ let span = mk_sp(start, end);
+ let snippet = self.snippet(span);
+
+ // Do nothing for spaces in the beginning of the file
+ if start == BytePos(0) && end.0 as usize == snippet.len() && snippet.trim().is_empty() {
+ return;
+ }
+
+ if snippet.trim().is_empty() && !out_of_file_lines_range!(self, span) {
+ // Keep vertical spaces within range.
+ self.push_vertical_spaces(count_newlines(snippet));
+ process_last_snippet(self, "", snippet);
+ } else {
+ self.write_snippet(span, &process_last_snippet);
+ }
+ }
+
+ fn push_vertical_spaces(&mut self, mut newline_count: usize) {
+ let offset = self.buffer.chars().rev().take_while(|c| *c == '\n').count();
+ let newline_upper_bound = self.config.blank_lines_upper_bound() + 1;
+ let newline_lower_bound = self.config.blank_lines_lower_bound() + 1;
+
+ if newline_count + offset > newline_upper_bound {
+ if offset >= newline_upper_bound {
+ newline_count = 0;
+ } else {
+ newline_count = newline_upper_bound - offset;
+ }
+ } else if newline_count + offset < newline_lower_bound {
+ if offset >= newline_lower_bound {
+ newline_count = 0;
+ } else {
+ newline_count = newline_lower_bound - offset;
+ }
+ }
+
+ let blank_lines = "\n".repeat(newline_count);
+ self.push_str(&blank_lines);
+ }
+
+ fn write_snippet<F>(&mut self, span: Span, process_last_snippet: F)
+ where
+ F: Fn(&mut FmtVisitor<'_>, &str, &str),
+ {
+ // Get a snippet from the file start to the span's hi without allocating.
+ // We need it to determine what precedes the current comment. If the comment
+ // follows code on the same line, we won't touch it.
+ let big_span_lo = self.snippet_provider.start_pos();
+ let big_snippet = self.snippet_provider.entire_snippet();
+ let big_diff = (span.lo() - big_span_lo).to_usize();
+
+ let snippet = self.snippet(span);
+
+ debug!("write_snippet `{}`", snippet);
+
+ self.write_snippet_inner(big_snippet, snippet, big_diff, span, process_last_snippet);
+ }
+
+ fn write_snippet_inner<F>(
+ &mut self,
+ big_snippet: &str,
+ old_snippet: &str,
+ big_diff: usize,
+ span: Span,
+ process_last_snippet: F,
+ ) where
+ F: Fn(&mut FmtVisitor<'_>, &str, &str),
+ {
+ // Trim whitespace from the right hand side of each line.
+ // Annoyingly, the library functions for splitting by lines etc. are not
+ // quite right, so we must do it ourselves.
+ let line = self.parse_sess.line_of_byte_pos(span.lo());
+ let file_name = &self.parse_sess.span_to_filename(span);
+ let mut status = SnippetStatus::new(line);
+
+ let snippet = &*transform_missing_snippet(self.config, old_snippet);
+
+ let slice_within_file_lines_range =
+ |file_lines: FileLines, cur_line, s| -> (usize, usize, bool) {
+ let (lf_count, crlf_count) = count_lf_crlf(s);
+ let newline_count = lf_count + crlf_count;
+ let within_file_lines_range = file_lines.contains_range(
+ file_name,
+ cur_line,
+ // if a newline character is at the end of the slice, then the number of
+ // newlines needs to be decreased by 1 so that the range checked against
+ // the file_lines is the visual range one would expect.
+ cur_line + newline_count - if s.ends_with('\n') { 1 } else { 0 },
+ );
+ (lf_count, crlf_count, within_file_lines_range)
+ };
+ for (kind, offset, subslice) in CommentCodeSlices::new(snippet) {
+ debug!("{:?}: {:?}", kind, subslice);
+
+ let (lf_count, crlf_count, within_file_lines_range) =
+ slice_within_file_lines_range(self.config.file_lines(), status.cur_line, subslice);
+ let newline_count = lf_count + crlf_count;
+ if CodeCharKind::Comment == kind && within_file_lines_range {
+ // 1: comment.
+ self.process_comment(
+ &mut status,
+ snippet,
+ &big_snippet[..(offset + big_diff)],
+ offset,
+ subslice,
+ );
+ } else if subslice.trim().is_empty() && newline_count > 0 && within_file_lines_range {
+ // 2: blank lines.
+ self.push_vertical_spaces(newline_count);
+ status.cur_line += newline_count;
+ status.line_start = offset + lf_count + crlf_count * 2;
+ } else {
+ // 3: code which we failed to format or which is not within file-lines range.
+ self.process_missing_code(&mut status, snippet, subslice, offset, file_name);
+ }
+ }
+
+ let last_snippet = &snippet[status.line_start..];
+ let (_, _, within_file_lines_range) =
+ slice_within_file_lines_range(self.config.file_lines(), status.cur_line, last_snippet);
+ if within_file_lines_range {
+ process_last_snippet(self, last_snippet, snippet);
+ } else {
+ // just append what's left
+ self.push_str(last_snippet);
+ }
+ }
+
+ fn process_comment(
+ &mut self,
+ status: &mut SnippetStatus,
+ snippet: &str,
+ big_snippet: &str,
+ offset: usize,
+ subslice: &str,
+ ) {
+ let last_char = big_snippet
+ .chars()
+ .rev()
+ .find(|rev_c| ![' ', '\t'].contains(rev_c));
+
+ let fix_indent = last_char.map_or(true, |rev_c| ['{', '\n'].contains(&rev_c));
+ let mut on_same_line = false;
+
+ let comment_indent = if fix_indent {
+ if let Some('{') = last_char {
+ self.push_str("\n");
+ }
+ let indent_str = self.block_indent.to_string(self.config);
+ self.push_str(&indent_str);
+ self.block_indent
+ } else if self.config.version() == Version::Two && !snippet.starts_with('\n') {
+ // The comment appears on the same line as the previous formatted code.
+ // Assuming that comment is logically associated with that code, we want to keep it on
+ // the same level and avoid mixing it with possible other comment.
+ on_same_line = true;
+ self.push_str(" ");
+ self.block_indent
+ } else {
+ self.push_str(" ");
+ Indent::from_width(self.config, last_line_width(&self.buffer))
+ };
+
+ let comment_width = ::std::cmp::min(
+ self.config.comment_width(),
+ self.config.max_width() - self.block_indent.width(),
+ );
+ let comment_shape = Shape::legacy(comment_width, comment_indent);
+
+ if on_same_line {
+ match subslice.find('\n') {
+ None => {
+ self.push_str(subslice);
+ }
+ Some(offset) if offset + 1 == subslice.len() => {
+ self.push_str(&subslice[..offset]);
+ }
+ Some(offset) => {
+ // keep first line as is: if it were too long and wrapped, it may get mixed
+ // with the other lines.
+ let first_line = &subslice[..offset];
+ self.push_str(first_line);
+ self.push_str(&comment_indent.to_string_with_newline(self.config));
+
+ let other_lines = &subslice[offset + 1..];
+ let comment_str =
+ rewrite_comment(other_lines, false, comment_shape, self.config)
+ .unwrap_or_else(|| String::from(other_lines));
+ self.push_str(&comment_str);
+ }
+ }
+ } else {
+ let comment_str = rewrite_comment(subslice, false, comment_shape, self.config)
+ .unwrap_or_else(|| String::from(subslice));
+ self.push_str(&comment_str);
+ }
+
+ status.last_wspace = None;
+ status.line_start = offset + subslice.len();
+
+ // Add a newline:
+ // - if there isn't one already
+ // - otherwise, only if the last line is a line comment
+ if status.line_start <= snippet.len() {
+ match snippet[status.line_start..]
+ .chars()
+ // skip trailing whitespaces
+ .find(|c| !(*c == ' ' || *c == '\t'))
+ {
+ Some('\n') | Some('\r') => {
+ if !is_last_comment_block(subslice) {
+ self.push_str("\n");
+ }
+ }
+ _ => self.push_str("\n"),
+ }
+ }
+
+ status.cur_line += count_newlines(subslice);
+ }
+
+ fn process_missing_code(
+ &mut self,
+ status: &mut SnippetStatus,
+ snippet: &str,
+ subslice: &str,
+ offset: usize,
+ file_name: &FileName,
+ ) {
+ for (mut i, c) in subslice.char_indices() {
+ i += offset;
+
+ if c == '\n' {
+ let skip_this_line = !self
+ .config
+ .file_lines()
+ .contains_line(file_name, status.cur_line);
+ if skip_this_line {
+ status.last_wspace = None;
+ }
+
+ if let Some(lw) = status.last_wspace {
+ self.push_str(&snippet[status.line_start..lw]);
+ self.push_str("\n");
+ status.last_wspace = None;
+ } else {
+ self.push_str(&snippet[status.line_start..=i]);
+ }
+
+ status.cur_line += 1;
+ status.line_start = i + 1;
+ } else if c.is_whitespace() && status.last_wspace.is_none() {
+ status.last_wspace = Some(i);
+ } else {
+ status.last_wspace = None;
+ }
+ }
+
+ let remaining = snippet[status.line_start..subslice.len() + offset].trim();
+ if !remaining.is_empty() {
+ self.push_str(&self.block_indent.to_string(self.config));
+ self.push_str(remaining);
+ status.line_start = subslice.len() + offset;
+ }
+ }
+}
diff --git a/src/tools/rustfmt/src/modules.rs b/src/tools/rustfmt/src/modules.rs
new file mode 100644
index 000000000..81da72432
--- /dev/null
+++ b/src/tools/rustfmt/src/modules.rs
@@ -0,0 +1,577 @@
+use std::borrow::Cow;
+use std::collections::BTreeMap;
+use std::path::{Path, PathBuf};
+
+use rustc_ast::ast;
+use rustc_ast::visit::Visitor;
+use rustc_span::symbol::{self, sym, Symbol};
+use rustc_span::Span;
+use thiserror::Error;
+
+use crate::attr::MetaVisitor;
+use crate::config::FileName;
+use crate::items::is_mod_decl;
+use crate::parse::parser::{
+ Directory, DirectoryOwnership, ModError, ModulePathSuccess, Parser, ParserError,
+};
+use crate::parse::session::ParseSess;
+use crate::utils::{contains_skip, mk_sp};
+
+mod visitor;
+
+type FileModMap<'ast> = BTreeMap<FileName, Module<'ast>>;
+
+/// Represents module with its inner attributes.
+#[derive(Debug, Clone)]
+pub(crate) struct Module<'a> {
+ ast_mod_kind: Option<Cow<'a, ast::ModKind>>,
+ pub(crate) items: Cow<'a, Vec<rustc_ast::ptr::P<ast::Item>>>,
+ inner_attr: Vec<ast::Attribute>,
+ pub(crate) span: Span,
+}
+
+impl<'a> Module<'a> {
+ pub(crate) fn new(
+ mod_span: Span,
+ ast_mod_kind: Option<Cow<'a, ast::ModKind>>,
+ mod_items: Cow<'a, Vec<rustc_ast::ptr::P<ast::Item>>>,
+ mod_attrs: Cow<'a, Vec<ast::Attribute>>,
+ ) -> Self {
+ let inner_attr = mod_attrs
+ .iter()
+ .filter(|attr| attr.style == ast::AttrStyle::Inner)
+ .cloned()
+ .collect();
+ Module {
+ items: mod_items,
+ inner_attr,
+ span: mod_span,
+ ast_mod_kind,
+ }
+ }
+
+ pub(crate) fn attrs(&self) -> &[ast::Attribute] {
+ &self.inner_attr
+ }
+}
+
+/// Maps each module to the corresponding file.
+pub(crate) struct ModResolver<'ast, 'sess> {
+ parse_sess: &'sess ParseSess,
+ directory: Directory,
+ file_map: FileModMap<'ast>,
+ recursive: bool,
+}
+
+/// Represents errors while trying to resolve modules.
+#[derive(Debug, Error)]
+#[error("failed to resolve mod `{module}`: {kind}")]
+pub struct ModuleResolutionError {
+ pub(crate) module: String,
+ pub(crate) kind: ModuleResolutionErrorKind,
+}
+
+/// Defines variants similar to those of [rustc_expand::module::ModError]
+#[derive(Debug, Error)]
+pub(crate) enum ModuleResolutionErrorKind {
+ /// Find a file that cannot be parsed.
+ #[error("cannot parse {file}")]
+ ParseError { file: PathBuf },
+ /// File cannot be found.
+ #[error("{file} does not exist")]
+ NotFound { file: PathBuf },
+ /// File a.rs and a/mod.rs both exist
+ #[error("file for module found at both {default_path:?} and {secondary_path:?}")]
+ MultipleCandidates {
+ default_path: PathBuf,
+ secondary_path: PathBuf,
+ },
+}
+
+#[derive(Clone)]
+enum SubModKind<'a, 'ast> {
+ /// `mod foo;`
+ External(PathBuf, DirectoryOwnership, Module<'ast>),
+ /// `mod foo;` with multiple sources.
+ MultiExternal(Vec<(PathBuf, DirectoryOwnership, Module<'ast>)>),
+ /// `mod foo {}`
+ Internal(&'a ast::Item),
+}
+
+impl<'ast, 'sess, 'c> ModResolver<'ast, 'sess> {
+ /// Creates a new `ModResolver`.
+ pub(crate) fn new(
+ parse_sess: &'sess ParseSess,
+ directory_ownership: DirectoryOwnership,
+ recursive: bool,
+ ) -> Self {
+ ModResolver {
+ directory: Directory {
+ path: PathBuf::new(),
+ ownership: directory_ownership,
+ },
+ file_map: BTreeMap::new(),
+ parse_sess,
+ recursive,
+ }
+ }
+
+ /// Creates a map that maps a file name to the module in AST.
+ pub(crate) fn visit_crate(
+ mut self,
+ krate: &'ast ast::Crate,
+ ) -> Result<FileModMap<'ast>, ModuleResolutionError> {
+ let root_filename = self.parse_sess.span_to_filename(krate.spans.inner_span);
+ self.directory.path = match root_filename {
+ FileName::Real(ref p) => p.parent().unwrap_or(Path::new("")).to_path_buf(),
+ _ => PathBuf::new(),
+ };
+
+ // Skip visiting sub modules when the input is from stdin.
+ if self.recursive {
+ self.visit_mod_from_ast(&krate.items)?;
+ }
+
+ let snippet_provider = self.parse_sess.snippet_provider(krate.spans.inner_span);
+
+ self.file_map.insert(
+ root_filename,
+ Module::new(
+ mk_sp(snippet_provider.start_pos(), snippet_provider.end_pos()),
+ None,
+ Cow::Borrowed(&krate.items),
+ Cow::Borrowed(&krate.attrs),
+ ),
+ );
+ Ok(self.file_map)
+ }
+
+ /// Visit `cfg_if` macro and look for module declarations.
+ fn visit_cfg_if(&mut self, item: Cow<'ast, ast::Item>) -> Result<(), ModuleResolutionError> {
+ let mut visitor = visitor::CfgIfVisitor::new(self.parse_sess);
+ visitor.visit_item(&item);
+ for module_item in visitor.mods() {
+ if let ast::ItemKind::Mod(_, ref sub_mod_kind) = module_item.item.kind {
+ self.visit_sub_mod(
+ &module_item.item,
+ Module::new(
+ module_item.item.span,
+ Some(Cow::Owned(sub_mod_kind.clone())),
+ Cow::Owned(vec![]),
+ Cow::Owned(vec![]),
+ ),
+ )?;
+ }
+ }
+ Ok(())
+ }
+
+ /// Visit modules defined inside macro calls.
+ fn visit_mod_outside_ast(
+ &mut self,
+ items: Vec<rustc_ast::ptr::P<ast::Item>>,
+ ) -> Result<(), ModuleResolutionError> {
+ for item in items {
+ if is_cfg_if(&item) {
+ self.visit_cfg_if(Cow::Owned(item.into_inner()))?;
+ continue;
+ }
+
+ if let ast::ItemKind::Mod(_, ref sub_mod_kind) = item.kind {
+ let span = item.span;
+ self.visit_sub_mod(
+ &item,
+ Module::new(
+ span,
+ Some(Cow::Owned(sub_mod_kind.clone())),
+ Cow::Owned(vec![]),
+ Cow::Owned(vec![]),
+ ),
+ )?;
+ }
+ }
+ Ok(())
+ }
+
+ /// Visit modules from AST.
+ fn visit_mod_from_ast(
+ &mut self,
+ items: &'ast [rustc_ast::ptr::P<ast::Item>],
+ ) -> Result<(), ModuleResolutionError> {
+ for item in items {
+ if is_cfg_if(item) {
+ self.visit_cfg_if(Cow::Borrowed(item))?;
+ }
+
+ if let ast::ItemKind::Mod(_, ref sub_mod_kind) = item.kind {
+ let span = item.span;
+ self.visit_sub_mod(
+ item,
+ Module::new(
+ span,
+ Some(Cow::Borrowed(sub_mod_kind)),
+ Cow::Owned(vec![]),
+ Cow::Borrowed(&item.attrs),
+ ),
+ )?;
+ }
+ }
+ Ok(())
+ }
+
+ fn visit_sub_mod(
+ &mut self,
+ item: &'c ast::Item,
+ sub_mod: Module<'ast>,
+ ) -> Result<(), ModuleResolutionError> {
+ let old_directory = self.directory.clone();
+ let sub_mod_kind = self.peek_sub_mod(item, &sub_mod)?;
+ if let Some(sub_mod_kind) = sub_mod_kind {
+ self.insert_sub_mod(sub_mod_kind.clone())?;
+ self.visit_sub_mod_inner(sub_mod, sub_mod_kind)?;
+ }
+ self.directory = old_directory;
+ Ok(())
+ }
+
+ /// Inspect the given sub-module which we are about to visit and returns its kind.
+ fn peek_sub_mod(
+ &self,
+ item: &'c ast::Item,
+ sub_mod: &Module<'ast>,
+ ) -> Result<Option<SubModKind<'c, 'ast>>, ModuleResolutionError> {
+ if contains_skip(&item.attrs) {
+ return Ok(None);
+ }
+
+ if is_mod_decl(item) {
+ // mod foo;
+ // Look for an extern file.
+ self.find_external_module(item.ident, &item.attrs, sub_mod)
+ } else {
+ // An internal module (`mod foo { /* ... */ }`);
+ Ok(Some(SubModKind::Internal(item)))
+ }
+ }
+
+ fn insert_sub_mod(
+ &mut self,
+ sub_mod_kind: SubModKind<'c, 'ast>,
+ ) -> Result<(), ModuleResolutionError> {
+ match sub_mod_kind {
+ SubModKind::External(mod_path, _, sub_mod) => {
+ self.file_map
+ .entry(FileName::Real(mod_path))
+ .or_insert(sub_mod);
+ }
+ SubModKind::MultiExternal(mods) => {
+ for (mod_path, _, sub_mod) in mods {
+ self.file_map
+ .entry(FileName::Real(mod_path))
+ .or_insert(sub_mod);
+ }
+ }
+ _ => (),
+ }
+ Ok(())
+ }
+
+ fn visit_sub_mod_inner(
+ &mut self,
+ sub_mod: Module<'ast>,
+ sub_mod_kind: SubModKind<'c, 'ast>,
+ ) -> Result<(), ModuleResolutionError> {
+ match sub_mod_kind {
+ SubModKind::External(mod_path, directory_ownership, sub_mod) => {
+ let directory = Directory {
+ path: mod_path.parent().unwrap().to_path_buf(),
+ ownership: directory_ownership,
+ };
+ self.visit_sub_mod_after_directory_update(sub_mod, Some(directory))
+ }
+ SubModKind::Internal(item) => {
+ self.push_inline_mod_directory(item.ident, &item.attrs);
+ self.visit_sub_mod_after_directory_update(sub_mod, None)
+ }
+ SubModKind::MultiExternal(mods) => {
+ for (mod_path, directory_ownership, sub_mod) in mods {
+ let directory = Directory {
+ path: mod_path.parent().unwrap().to_path_buf(),
+ ownership: directory_ownership,
+ };
+ self.visit_sub_mod_after_directory_update(sub_mod, Some(directory))?;
+ }
+ Ok(())
+ }
+ }
+ }
+
+ fn visit_sub_mod_after_directory_update(
+ &mut self,
+ sub_mod: Module<'ast>,
+ directory: Option<Directory>,
+ ) -> Result<(), ModuleResolutionError> {
+ if let Some(directory) = directory {
+ self.directory = directory;
+ }
+ match (sub_mod.ast_mod_kind, sub_mod.items) {
+ (Some(Cow::Borrowed(ast::ModKind::Loaded(items, _, _))), _) => {
+ self.visit_mod_from_ast(items)
+ }
+ (Some(Cow::Owned(ast::ModKind::Loaded(items, _, _))), _) | (_, Cow::Owned(items)) => {
+ self.visit_mod_outside_ast(items)
+ }
+ (_, _) => Ok(()),
+ }
+ }
+
+ /// Find a file path in the filesystem which corresponds to the given module.
+ fn find_external_module(
+ &self,
+ mod_name: symbol::Ident,
+ attrs: &[ast::Attribute],
+ sub_mod: &Module<'ast>,
+ ) -> Result<Option<SubModKind<'c, 'ast>>, ModuleResolutionError> {
+ let relative = match self.directory.ownership {
+ DirectoryOwnership::Owned { relative } => relative,
+ DirectoryOwnership::UnownedViaBlock => None,
+ };
+ if let Some(path) = Parser::submod_path_from_attr(attrs, &self.directory.path) {
+ if self.parse_sess.is_file_parsed(&path) {
+ return Ok(None);
+ }
+ return match Parser::parse_file_as_module(self.parse_sess, &path, sub_mod.span) {
+ Ok((ref attrs, _, _)) if contains_skip(attrs) => Ok(None),
+ Ok((attrs, items, span)) => Ok(Some(SubModKind::External(
+ path,
+ DirectoryOwnership::Owned { relative: None },
+ Module::new(
+ span,
+ Some(Cow::Owned(ast::ModKind::Unloaded)),
+ Cow::Owned(items),
+ Cow::Owned(attrs),
+ ),
+ ))),
+ Err(ParserError::ParseError) => Err(ModuleResolutionError {
+ module: mod_name.to_string(),
+ kind: ModuleResolutionErrorKind::ParseError { file: path },
+ }),
+ Err(..) => Err(ModuleResolutionError {
+ module: mod_name.to_string(),
+ kind: ModuleResolutionErrorKind::NotFound { file: path },
+ }),
+ };
+ }
+
+ // Look for nested path, like `#[cfg_attr(feature = "foo", path = "bar.rs")]`.
+ let mut mods_outside_ast = self.find_mods_outside_of_ast(attrs, sub_mod);
+
+ match self
+ .parse_sess
+ .default_submod_path(mod_name, relative, &self.directory.path)
+ {
+ Ok(ModulePathSuccess {
+ file_path,
+ dir_ownership,
+ ..
+ }) => {
+ let outside_mods_empty = mods_outside_ast.is_empty();
+ let should_insert = !mods_outside_ast
+ .iter()
+ .any(|(outside_path, _, _)| outside_path == &file_path);
+ if self.parse_sess.is_file_parsed(&file_path) {
+ if outside_mods_empty {
+ return Ok(None);
+ } else {
+ if should_insert {
+ mods_outside_ast.push((file_path, dir_ownership, sub_mod.clone()));
+ }
+ return Ok(Some(SubModKind::MultiExternal(mods_outside_ast)));
+ }
+ }
+ match Parser::parse_file_as_module(self.parse_sess, &file_path, sub_mod.span) {
+ Ok((ref attrs, _, _)) if contains_skip(attrs) => Ok(None),
+ Ok((attrs, items, span)) if outside_mods_empty => {
+ Ok(Some(SubModKind::External(
+ file_path,
+ dir_ownership,
+ Module::new(
+ span,
+ Some(Cow::Owned(ast::ModKind::Unloaded)),
+ Cow::Owned(items),
+ Cow::Owned(attrs),
+ ),
+ )))
+ }
+ Ok((attrs, items, span)) => {
+ mods_outside_ast.push((
+ file_path.clone(),
+ dir_ownership,
+ Module::new(
+ span,
+ Some(Cow::Owned(ast::ModKind::Unloaded)),
+ Cow::Owned(items),
+ Cow::Owned(attrs),
+ ),
+ ));
+ if should_insert {
+ mods_outside_ast.push((file_path, dir_ownership, sub_mod.clone()));
+ }
+ Ok(Some(SubModKind::MultiExternal(mods_outside_ast)))
+ }
+ Err(ParserError::ParseError) => Err(ModuleResolutionError {
+ module: mod_name.to_string(),
+ kind: ModuleResolutionErrorKind::ParseError { file: file_path },
+ }),
+ Err(..) if outside_mods_empty => Err(ModuleResolutionError {
+ module: mod_name.to_string(),
+ kind: ModuleResolutionErrorKind::NotFound { file: file_path },
+ }),
+ Err(..) => {
+ if should_insert {
+ mods_outside_ast.push((file_path, dir_ownership, sub_mod.clone()));
+ }
+ Ok(Some(SubModKind::MultiExternal(mods_outside_ast)))
+ }
+ }
+ }
+ Err(mod_err) if !mods_outside_ast.is_empty() => {
+ if let ModError::ParserError(e) = mod_err {
+ e.cancel();
+ }
+ Ok(Some(SubModKind::MultiExternal(mods_outside_ast)))
+ }
+ Err(e) => match e {
+ ModError::FileNotFound(_, default_path, _secondary_path) => {
+ Err(ModuleResolutionError {
+ module: mod_name.to_string(),
+ kind: ModuleResolutionErrorKind::NotFound { file: default_path },
+ })
+ }
+ ModError::MultipleCandidates(_, default_path, secondary_path) => {
+ Err(ModuleResolutionError {
+ module: mod_name.to_string(),
+ kind: ModuleResolutionErrorKind::MultipleCandidates {
+ default_path,
+ secondary_path,
+ },
+ })
+ }
+ ModError::ParserError(_)
+ | ModError::CircularInclusion(_)
+ | ModError::ModInBlock(_) => Err(ModuleResolutionError {
+ module: mod_name.to_string(),
+ kind: ModuleResolutionErrorKind::ParseError {
+ file: self.directory.path.clone(),
+ },
+ }),
+ },
+ }
+ }
+
+ fn push_inline_mod_directory(&mut self, id: symbol::Ident, attrs: &[ast::Attribute]) {
+ if let Some(path) = find_path_value(attrs) {
+ self.directory.path.push(path.as_str());
+ self.directory.ownership = DirectoryOwnership::Owned { relative: None };
+ } else {
+ let id = id.as_str();
+ // We have to push on the current module name in the case of relative
+ // paths in order to ensure that any additional module paths from inline
+ // `mod x { ... }` come after the relative extension.
+ //
+ // For example, a `mod z { ... }` inside `x/y.rs` should set the current
+ // directory path to `/x/y/z`, not `/x/z` with a relative offset of `y`.
+ if let DirectoryOwnership::Owned { relative } = &mut self.directory.ownership {
+ if let Some(ident) = relative.take() {
+ // remove the relative offset
+ self.directory.path.push(ident.as_str());
+
+ // In the case where there is an x.rs and an ./x directory we want
+ // to prevent adding x twice. For example, ./x/x
+ if self.directory.path.exists() && !self.directory.path.join(id).exists() {
+ return;
+ }
+ }
+ }
+ self.directory.path.push(id);
+ }
+ }
+
+ fn find_mods_outside_of_ast(
+ &self,
+ attrs: &[ast::Attribute],
+ sub_mod: &Module<'ast>,
+ ) -> Vec<(PathBuf, DirectoryOwnership, Module<'ast>)> {
+ // Filter nested path, like `#[cfg_attr(feature = "foo", path = "bar.rs")]`.
+ let mut path_visitor = visitor::PathVisitor::default();
+ for attr in attrs.iter() {
+ if let Some(meta) = attr.meta() {
+ path_visitor.visit_meta_item(&meta)
+ }
+ }
+ let mut result = vec![];
+ for path in path_visitor.paths() {
+ let mut actual_path = self.directory.path.clone();
+ actual_path.push(&path);
+ if !actual_path.exists() {
+ continue;
+ }
+ if self.parse_sess.is_file_parsed(&actual_path) {
+ // If the specified file is already parsed, then we just use that.
+ result.push((
+ actual_path,
+ DirectoryOwnership::Owned { relative: None },
+ sub_mod.clone(),
+ ));
+ continue;
+ }
+ let (attrs, items, span) =
+ match Parser::parse_file_as_module(self.parse_sess, &actual_path, sub_mod.span) {
+ Ok((ref attrs, _, _)) if contains_skip(attrs) => continue,
+ Ok(m) => m,
+ Err(..) => continue,
+ };
+
+ result.push((
+ actual_path,
+ DirectoryOwnership::Owned { relative: None },
+ Module::new(
+ span,
+ Some(Cow::Owned(ast::ModKind::Unloaded)),
+ Cow::Owned(items),
+ Cow::Owned(attrs),
+ ),
+ ))
+ }
+ result
+ }
+}
+
+fn path_value(attr: &ast::Attribute) -> Option<Symbol> {
+ if attr.has_name(sym::path) {
+ attr.value_str()
+ } else {
+ None
+ }
+}
+
+// N.B., even when there are multiple `#[path = ...]` attributes, we just need to
+// examine the first one, since rustc ignores the second and the subsequent ones
+// as unused attributes.
+fn find_path_value(attrs: &[ast::Attribute]) -> Option<Symbol> {
+ attrs.iter().flat_map(path_value).next()
+}
+
+fn is_cfg_if(item: &ast::Item) -> bool {
+ match item.kind {
+ ast::ItemKind::MacCall(ref mac) => {
+ if let Some(first_segment) = mac.path.segments.first() {
+ if first_segment.ident.name == Symbol::intern("cfg_if") {
+ return true;
+ }
+ }
+ false
+ }
+ _ => false,
+ }
+}
diff --git a/src/tools/rustfmt/src/modules/visitor.rs b/src/tools/rustfmt/src/modules/visitor.rs
new file mode 100644
index 000000000..ea67977c1
--- /dev/null
+++ b/src/tools/rustfmt/src/modules/visitor.rs
@@ -0,0 +1,108 @@
+use rustc_ast::ast;
+use rustc_ast::visit::Visitor;
+use rustc_span::Symbol;
+
+use crate::attr::MetaVisitor;
+use crate::parse::macros::cfg_if::parse_cfg_if;
+use crate::parse::session::ParseSess;
+
+pub(crate) struct ModItem {
+ pub(crate) item: ast::Item,
+}
+
+/// Traverse `cfg_if!` macro and fetch modules.
+pub(crate) struct CfgIfVisitor<'a> {
+ parse_sess: &'a ParseSess,
+ mods: Vec<ModItem>,
+}
+
+impl<'a> CfgIfVisitor<'a> {
+ pub(crate) fn new(parse_sess: &'a ParseSess) -> CfgIfVisitor<'a> {
+ CfgIfVisitor {
+ mods: vec![],
+ parse_sess,
+ }
+ }
+
+ pub(crate) fn mods(self) -> Vec<ModItem> {
+ self.mods
+ }
+}
+
+impl<'a, 'ast: 'a> Visitor<'ast> for CfgIfVisitor<'a> {
+ fn visit_mac_call(&mut self, mac: &'ast ast::MacCall) {
+ match self.visit_mac_inner(mac) {
+ Ok(()) => (),
+ Err(e) => debug!("{}", e),
+ }
+ }
+}
+
+impl<'a, 'ast: 'a> CfgIfVisitor<'a> {
+ fn visit_mac_inner(&mut self, mac: &'ast ast::MacCall) -> Result<(), &'static str> {
+ // Support both:
+ // ```
+ // extern crate cfg_if;
+ // cfg_if::cfg_if! {..}
+ // ```
+ // And:
+ // ```
+ // #[macro_use]
+ // extern crate cfg_if;
+ // cfg_if! {..}
+ // ```
+ match mac.path.segments.first() {
+ Some(first_segment) => {
+ if first_segment.ident.name != Symbol::intern("cfg_if") {
+ return Err("Expected cfg_if");
+ }
+ }
+ None => {
+ return Err("Expected cfg_if");
+ }
+ };
+
+ let items = parse_cfg_if(self.parse_sess, mac)?;
+ self.mods
+ .append(&mut items.into_iter().map(|item| ModItem { item }).collect());
+
+ Ok(())
+ }
+}
+
+/// Extracts `path = "foo.rs"` from attributes.
+#[derive(Default)]
+pub(crate) struct PathVisitor {
+ /// A list of path defined in attributes.
+ paths: Vec<String>,
+}
+
+impl PathVisitor {
+ pub(crate) fn paths(self) -> Vec<String> {
+ self.paths
+ }
+}
+
+impl<'ast> MetaVisitor<'ast> for PathVisitor {
+ fn visit_meta_name_value(&mut self, meta_item: &'ast ast::MetaItem, lit: &'ast ast::Lit) {
+ if meta_item.has_name(Symbol::intern("path")) && lit.kind.is_str() {
+ self.paths.push(lit_to_str(lit));
+ }
+ }
+}
+
+#[cfg(not(windows))]
+fn lit_to_str(lit: &ast::Lit) -> String {
+ match lit.kind {
+ ast::LitKind::Str(symbol, ..) => symbol.to_string(),
+ _ => unreachable!(),
+ }
+}
+
+#[cfg(windows)]
+fn lit_to_str(lit: &ast::Lit) -> String {
+ match lit.kind {
+ ast::LitKind::Str(symbol, ..) => symbol.as_str().replace("/", "\\"),
+ _ => unreachable!(),
+ }
+}
diff --git a/src/tools/rustfmt/src/overflow.rs b/src/tools/rustfmt/src/overflow.rs
new file mode 100644
index 000000000..6bf8cd0c7
--- /dev/null
+++ b/src/tools/rustfmt/src/overflow.rs
@@ -0,0 +1,785 @@
+//! Rewrite a list some items with overflow.
+
+use std::cmp::min;
+
+use itertools::Itertools;
+use rustc_ast::token::Delimiter;
+use rustc_ast::{ast, ptr};
+use rustc_span::Span;
+
+use crate::closures;
+use crate::config::lists::*;
+use crate::config::Version;
+use crate::expr::{
+ can_be_overflowed_expr, is_every_expr_simple, is_method_call, is_nested_call, is_simple_expr,
+ rewrite_cond,
+};
+use crate::lists::{
+ definitive_tactic, itemize_list, write_list, ListFormatting, ListItem, Separator,
+};
+use crate::macros::MacroArg;
+use crate::patterns::{can_be_overflowed_pat, TuplePatField};
+use crate::rewrite::{Rewrite, RewriteContext};
+use crate::shape::Shape;
+use crate::source_map::SpanUtils;
+use crate::spanned::Spanned;
+use crate::types::{can_be_overflowed_type, SegmentParam};
+use crate::utils::{count_newlines, extra_offset, first_line_width, last_line_width, mk_sp};
+
+/// A list of `format!`-like macros, that take a long format string and a list of arguments to
+/// format.
+///
+/// Organized as a list of `(&str, usize)` tuples, giving the name of the macro and the number of
+/// arguments before the format string (none for `format!("format", ...)`, one for `assert!(result,
+/// "format", ...)`, two for `assert_eq!(left, right, "format", ...)`).
+const SPECIAL_CASE_MACROS: &[(&str, usize)] = &[
+ // format! like macros
+ // From the Rust Standard Library.
+ ("eprint!", 0),
+ ("eprintln!", 0),
+ ("format!", 0),
+ ("format_args!", 0),
+ ("print!", 0),
+ ("println!", 0),
+ ("panic!", 0),
+ ("unreachable!", 0),
+ // From the `log` crate.
+ ("debug!", 0),
+ ("error!", 0),
+ ("info!", 0),
+ ("warn!", 0),
+ // write! like macros
+ ("assert!", 1),
+ ("debug_assert!", 1),
+ ("write!", 1),
+ ("writeln!", 1),
+ // assert_eq! like macros
+ ("assert_eq!", 2),
+ ("assert_ne!", 2),
+ ("debug_assert_eq!", 2),
+ ("debug_assert_ne!", 2),
+];
+
+const SPECIAL_CASE_ATTR: &[(&str, usize)] = &[
+ // From the `failure` crate.
+ ("fail", 0),
+];
+
+#[derive(Debug)]
+pub(crate) enum OverflowableItem<'a> {
+ Expr(&'a ast::Expr),
+ GenericParam(&'a ast::GenericParam),
+ MacroArg(&'a MacroArg),
+ NestedMetaItem(&'a ast::NestedMetaItem),
+ SegmentParam(&'a SegmentParam<'a>),
+ FieldDef(&'a ast::FieldDef),
+ TuplePatField(&'a TuplePatField<'a>),
+ Ty(&'a ast::Ty),
+ Pat(&'a ast::Pat),
+}
+
+impl<'a> Rewrite for OverflowableItem<'a> {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ self.map(|item| item.rewrite(context, shape))
+ }
+}
+
+impl<'a> Spanned for OverflowableItem<'a> {
+ fn span(&self) -> Span {
+ self.map(|item| item.span())
+ }
+}
+
+impl<'a> OverflowableItem<'a> {
+ fn has_attrs(&self) -> bool {
+ match self {
+ OverflowableItem::Expr(ast::Expr { attrs, .. })
+ | OverflowableItem::GenericParam(ast::GenericParam { attrs, .. }) => !attrs.is_empty(),
+ OverflowableItem::FieldDef(ast::FieldDef { attrs, .. }) => !attrs.is_empty(),
+ OverflowableItem::MacroArg(MacroArg::Expr(expr)) => !expr.attrs.is_empty(),
+ OverflowableItem::MacroArg(MacroArg::Item(item)) => !item.attrs.is_empty(),
+ _ => false,
+ }
+ }
+
+ pub(crate) fn map<F, T>(&self, f: F) -> T
+ where
+ F: Fn(&dyn IntoOverflowableItem<'a>) -> T,
+ {
+ match self {
+ OverflowableItem::Expr(expr) => f(*expr),
+ OverflowableItem::GenericParam(gp) => f(*gp),
+ OverflowableItem::MacroArg(macro_arg) => f(*macro_arg),
+ OverflowableItem::NestedMetaItem(nmi) => f(*nmi),
+ OverflowableItem::SegmentParam(sp) => f(*sp),
+ OverflowableItem::FieldDef(sf) => f(*sf),
+ OverflowableItem::TuplePatField(pat) => f(*pat),
+ OverflowableItem::Ty(ty) => f(*ty),
+ OverflowableItem::Pat(pat) => f(*pat),
+ }
+ }
+
+ pub(crate) fn is_simple(&self) -> bool {
+ match self {
+ OverflowableItem::Expr(expr) => is_simple_expr(expr),
+ OverflowableItem::MacroArg(MacroArg::Keyword(..)) => true,
+ OverflowableItem::MacroArg(MacroArg::Expr(expr)) => is_simple_expr(expr),
+ OverflowableItem::NestedMetaItem(nested_meta_item) => match nested_meta_item {
+ ast::NestedMetaItem::Literal(..) => true,
+ ast::NestedMetaItem::MetaItem(ref meta_item) => {
+ matches!(meta_item.kind, ast::MetaItemKind::Word)
+ }
+ },
+ _ => false,
+ }
+ }
+
+ pub(crate) fn is_expr(&self) -> bool {
+ matches!(
+ self,
+ OverflowableItem::Expr(..) | OverflowableItem::MacroArg(MacroArg::Expr(..))
+ )
+ }
+
+ pub(crate) fn is_nested_call(&self) -> bool {
+ match self {
+ OverflowableItem::Expr(expr) => is_nested_call(expr),
+ OverflowableItem::MacroArg(MacroArg::Expr(expr)) => is_nested_call(expr),
+ _ => false,
+ }
+ }
+
+ pub(crate) fn to_expr(&self) -> Option<&'a ast::Expr> {
+ match self {
+ OverflowableItem::Expr(expr) => Some(expr),
+ OverflowableItem::MacroArg(MacroArg::Expr(ref expr)) => Some(expr),
+ _ => None,
+ }
+ }
+
+ pub(crate) fn can_be_overflowed(&self, context: &RewriteContext<'_>, len: usize) -> bool {
+ match self {
+ OverflowableItem::Expr(expr) => can_be_overflowed_expr(context, expr, len),
+ OverflowableItem::MacroArg(macro_arg) => match macro_arg {
+ MacroArg::Expr(ref expr) => can_be_overflowed_expr(context, expr, len),
+ MacroArg::Ty(ref ty) => can_be_overflowed_type(context, ty, len),
+ MacroArg::Pat(..) => false,
+ MacroArg::Item(..) => len == 1,
+ MacroArg::Keyword(..) => false,
+ },
+ OverflowableItem::NestedMetaItem(nested_meta_item) if len == 1 => {
+ match nested_meta_item {
+ ast::NestedMetaItem::Literal(..) => false,
+ ast::NestedMetaItem::MetaItem(..) => true,
+ }
+ }
+ OverflowableItem::SegmentParam(SegmentParam::Type(ty)) => {
+ can_be_overflowed_type(context, ty, len)
+ }
+ OverflowableItem::TuplePatField(pat) => can_be_overflowed_pat(context, pat, len),
+ OverflowableItem::Ty(ty) => can_be_overflowed_type(context, ty, len),
+ _ => false,
+ }
+ }
+
+ fn special_cases(&self) -> &'static [(&'static str, usize)] {
+ match self {
+ OverflowableItem::MacroArg(..) => SPECIAL_CASE_MACROS,
+ OverflowableItem::NestedMetaItem(..) => SPECIAL_CASE_ATTR,
+ _ => &[],
+ }
+ }
+}
+
+pub(crate) trait IntoOverflowableItem<'a>: Rewrite + Spanned {
+ fn into_overflowable_item(&'a self) -> OverflowableItem<'a>;
+}
+
+impl<'a, T: 'a + IntoOverflowableItem<'a>> IntoOverflowableItem<'a> for ptr::P<T> {
+ fn into_overflowable_item(&'a self) -> OverflowableItem<'a> {
+ (**self).into_overflowable_item()
+ }
+}
+
+macro_rules! impl_into_overflowable_item_for_ast_node {
+ ($($ast_node:ident),*) => {
+ $(
+ impl<'a> IntoOverflowableItem<'a> for ast::$ast_node {
+ fn into_overflowable_item(&'a self) -> OverflowableItem<'a> {
+ OverflowableItem::$ast_node(self)
+ }
+ }
+ )*
+ }
+}
+
+macro_rules! impl_into_overflowable_item_for_rustfmt_types {
+ ([$($ty:ident),*], [$($ty_with_lifetime:ident),*]) => {
+ $(
+ impl<'a> IntoOverflowableItem<'a> for $ty {
+ fn into_overflowable_item(&'a self) -> OverflowableItem<'a> {
+ OverflowableItem::$ty(self)
+ }
+ }
+ )*
+ $(
+ impl<'a> IntoOverflowableItem<'a> for $ty_with_lifetime<'a> {
+ fn into_overflowable_item(&'a self) -> OverflowableItem<'a> {
+ OverflowableItem::$ty_with_lifetime(self)
+ }
+ }
+ )*
+ }
+}
+
+impl_into_overflowable_item_for_ast_node!(Expr, GenericParam, NestedMetaItem, FieldDef, Ty, Pat);
+impl_into_overflowable_item_for_rustfmt_types!([MacroArg], [SegmentParam, TuplePatField]);
+
+pub(crate) fn into_overflowable_list<'a, T>(
+ iter: impl Iterator<Item = &'a T>,
+) -> impl Iterator<Item = OverflowableItem<'a>>
+where
+ T: 'a + IntoOverflowableItem<'a>,
+{
+ iter.map(|x| IntoOverflowableItem::into_overflowable_item(x))
+}
+
+pub(crate) fn rewrite_with_parens<'a, T: 'a + IntoOverflowableItem<'a>>(
+ context: &'a RewriteContext<'_>,
+ ident: &'a str,
+ items: impl Iterator<Item = &'a T>,
+ shape: Shape,
+ span: Span,
+ item_max_width: usize,
+ force_separator_tactic: Option<SeparatorTactic>,
+) -> Option<String> {
+ Context::new(
+ context,
+ items,
+ ident,
+ shape,
+ span,
+ "(",
+ ")",
+ item_max_width,
+ force_separator_tactic,
+ None,
+ )
+ .rewrite(shape)
+}
+
+pub(crate) fn rewrite_with_angle_brackets<'a, T: 'a + IntoOverflowableItem<'a>>(
+ context: &'a RewriteContext<'_>,
+ ident: &'a str,
+ items: impl Iterator<Item = &'a T>,
+ shape: Shape,
+ span: Span,
+) -> Option<String> {
+ Context::new(
+ context,
+ items,
+ ident,
+ shape,
+ span,
+ "<",
+ ">",
+ context.config.max_width(),
+ None,
+ None,
+ )
+ .rewrite(shape)
+}
+
+pub(crate) fn rewrite_with_square_brackets<'a, T: 'a + IntoOverflowableItem<'a>>(
+ context: &'a RewriteContext<'_>,
+ name: &'a str,
+ items: impl Iterator<Item = &'a T>,
+ shape: Shape,
+ span: Span,
+ force_separator_tactic: Option<SeparatorTactic>,
+ delim_token: Option<Delimiter>,
+) -> Option<String> {
+ let (lhs, rhs) = match delim_token {
+ Some(Delimiter::Parenthesis) => ("(", ")"),
+ Some(Delimiter::Brace) => ("{", "}"),
+ _ => ("[", "]"),
+ };
+ Context::new(
+ context,
+ items,
+ name,
+ shape,
+ span,
+ lhs,
+ rhs,
+ context.config.array_width(),
+ force_separator_tactic,
+ Some(("[", "]")),
+ )
+ .rewrite(shape)
+}
+
+struct Context<'a> {
+ context: &'a RewriteContext<'a>,
+ items: Vec<OverflowableItem<'a>>,
+ ident: &'a str,
+ prefix: &'static str,
+ suffix: &'static str,
+ one_line_shape: Shape,
+ nested_shape: Shape,
+ span: Span,
+ item_max_width: usize,
+ one_line_width: usize,
+ force_separator_tactic: Option<SeparatorTactic>,
+ custom_delims: Option<(&'a str, &'a str)>,
+}
+
+impl<'a> Context<'a> {
+ fn new<T: 'a + IntoOverflowableItem<'a>>(
+ context: &'a RewriteContext<'_>,
+ items: impl Iterator<Item = &'a T>,
+ ident: &'a str,
+ shape: Shape,
+ span: Span,
+ prefix: &'static str,
+ suffix: &'static str,
+ item_max_width: usize,
+ force_separator_tactic: Option<SeparatorTactic>,
+ custom_delims: Option<(&'a str, &'a str)>,
+ ) -> Context<'a> {
+ let used_width = extra_offset(ident, shape);
+ // 1 = `()`
+ let one_line_width = shape.width.saturating_sub(used_width + 2);
+
+ // 1 = "(" or ")"
+ let one_line_shape = shape
+ .offset_left(last_line_width(ident) + 1)
+ .and_then(|shape| shape.sub_width(1))
+ .unwrap_or(Shape { width: 0, ..shape });
+ let nested_shape = shape_from_indent_style(context, shape, used_width + 2, used_width + 1);
+ Context {
+ context,
+ items: into_overflowable_list(items).collect(),
+ ident,
+ one_line_shape,
+ nested_shape,
+ span,
+ prefix,
+ suffix,
+ item_max_width,
+ one_line_width,
+ force_separator_tactic,
+ custom_delims,
+ }
+ }
+
+ fn last_item(&self) -> Option<&OverflowableItem<'_>> {
+ self.items.last()
+ }
+
+ fn items_span(&self) -> Span {
+ let span_lo = self
+ .context
+ .snippet_provider
+ .span_after(self.span, self.prefix);
+ mk_sp(span_lo, self.span.hi())
+ }
+
+ fn rewrite_last_item_with_overflow(
+ &self,
+ last_list_item: &mut ListItem,
+ shape: Shape,
+ ) -> Option<String> {
+ let last_item = self.last_item()?;
+ let rewrite = match last_item {
+ OverflowableItem::Expr(expr) => {
+ match expr.kind {
+ // When overflowing the closure which consists of a single control flow
+ // expression, force to use block if its condition uses multi line.
+ ast::ExprKind::Closure(..) => {
+ // If the argument consists of multiple closures, we do not overflow
+ // the last closure.
+ if closures::args_have_many_closure(&self.items) {
+ None
+ } else {
+ closures::rewrite_last_closure(self.context, expr, shape)
+ }
+ }
+
+ // When overflowing the expressions which consists of a control flow
+ // expression, avoid condition to use multi line.
+ ast::ExprKind::If(..)
+ | ast::ExprKind::ForLoop(..)
+ | ast::ExprKind::Loop(..)
+ | ast::ExprKind::While(..)
+ | ast::ExprKind::Match(..) => {
+ let multi_line = rewrite_cond(self.context, expr, shape)
+ .map_or(false, |cond| cond.contains('\n'));
+
+ if multi_line {
+ None
+ } else {
+ expr.rewrite(self.context, shape)
+ }
+ }
+
+ _ => expr.rewrite(self.context, shape),
+ }
+ }
+ item => item.rewrite(self.context, shape),
+ };
+
+ if let Some(rewrite) = rewrite {
+ // splitn(2, *).next().unwrap() is always safe.
+ let rewrite_first_line = Some(rewrite.splitn(2, '\n').next().unwrap().to_owned());
+ last_list_item.item = rewrite_first_line;
+ Some(rewrite)
+ } else {
+ None
+ }
+ }
+
+ fn default_tactic(&self, list_items: &[ListItem]) -> DefinitiveListTactic {
+ definitive_tactic(
+ list_items,
+ ListTactic::LimitedHorizontalVertical(self.item_max_width),
+ Separator::Comma,
+ self.one_line_width,
+ )
+ }
+
+ fn try_overflow_last_item(&self, list_items: &mut Vec<ListItem>) -> DefinitiveListTactic {
+ // 1 = "("
+ let combine_arg_with_callee = self.items.len() == 1
+ && self.items[0].is_expr()
+ && !self.items[0].has_attrs()
+ && self.ident.len() < self.context.config.tab_spaces();
+ let overflow_last = combine_arg_with_callee || can_be_overflowed(self.context, &self.items);
+
+ // Replace the last item with its first line to see if it fits with
+ // first arguments.
+ let placeholder = if overflow_last {
+ let old_value = self.context.force_one_line_chain.get();
+ match self.last_item() {
+ Some(OverflowableItem::Expr(expr))
+ if !combine_arg_with_callee && is_method_call(expr) =>
+ {
+ self.context.force_one_line_chain.replace(true);
+ }
+ Some(OverflowableItem::MacroArg(MacroArg::Expr(expr)))
+ if !combine_arg_with_callee
+ && is_method_call(expr)
+ && self.context.config.version() == Version::Two =>
+ {
+ self.context.force_one_line_chain.replace(true);
+ }
+ _ => (),
+ }
+ let result = last_item_shape(
+ &self.items,
+ list_items,
+ self.one_line_shape,
+ self.item_max_width,
+ )
+ .and_then(|arg_shape| {
+ self.rewrite_last_item_with_overflow(
+ &mut list_items[self.items.len() - 1],
+ arg_shape,
+ )
+ });
+ self.context.force_one_line_chain.replace(old_value);
+ result
+ } else {
+ None
+ };
+
+ let mut tactic = definitive_tactic(
+ &*list_items,
+ ListTactic::LimitedHorizontalVertical(self.item_max_width),
+ Separator::Comma,
+ self.one_line_width,
+ );
+
+ // Replace the stub with the full overflowing last argument if the rewrite
+ // succeeded and its first line fits with the other arguments.
+ match (overflow_last, tactic, placeholder) {
+ (true, DefinitiveListTactic::Horizontal, Some(ref overflowed))
+ if self.items.len() == 1 =>
+ {
+ // When we are rewriting a nested function call, we restrict the
+ // budget for the inner function to avoid them being deeply nested.
+ // However, when the inner function has a prefix or a suffix
+ // (e.g., `foo() as u32`), this budget reduction may produce poorly
+ // formatted code, where a prefix or a suffix being left on its own
+ // line. Here we explicitlly check those cases.
+ if count_newlines(overflowed) == 1 {
+ let rw = self
+ .items
+ .last()
+ .and_then(|last_item| last_item.rewrite(self.context, self.nested_shape));
+ let no_newline = rw.as_ref().map_or(false, |s| !s.contains('\n'));
+ if no_newline {
+ list_items[self.items.len() - 1].item = rw;
+ } else {
+ list_items[self.items.len() - 1].item = Some(overflowed.to_owned());
+ }
+ } else {
+ list_items[self.items.len() - 1].item = Some(overflowed.to_owned());
+ }
+ }
+ (true, DefinitiveListTactic::Horizontal, placeholder @ Some(..)) => {
+ list_items[self.items.len() - 1].item = placeholder;
+ }
+ _ if !self.items.is_empty() => {
+ list_items[self.items.len() - 1].item = self
+ .items
+ .last()
+ .and_then(|last_item| last_item.rewrite(self.context, self.nested_shape));
+
+ // Use horizontal layout for a function with a single argument as long as
+ // everything fits in a single line.
+ // `self.one_line_width == 0` means vertical layout is forced.
+ if self.items.len() == 1
+ && self.one_line_width != 0
+ && !list_items[0].has_comment()
+ && !list_items[0].inner_as_ref().contains('\n')
+ && crate::lists::total_item_width(&list_items[0]) <= self.one_line_width
+ {
+ tactic = DefinitiveListTactic::Horizontal;
+ } else {
+ tactic = self.default_tactic(list_items);
+
+ if tactic == DefinitiveListTactic::Vertical {
+ if let Some((all_simple, num_args_before)) =
+ maybe_get_args_offset(self.ident, &self.items)
+ {
+ let one_line = all_simple
+ && definitive_tactic(
+ &list_items[..num_args_before],
+ ListTactic::HorizontalVertical,
+ Separator::Comma,
+ self.nested_shape.width,
+ ) == DefinitiveListTactic::Horizontal
+ && definitive_tactic(
+ &list_items[num_args_before + 1..],
+ ListTactic::HorizontalVertical,
+ Separator::Comma,
+ self.nested_shape.width,
+ ) == DefinitiveListTactic::Horizontal;
+
+ if one_line {
+ tactic = DefinitiveListTactic::SpecialMacro(num_args_before);
+ };
+ } else if is_every_expr_simple(&self.items)
+ && no_long_items(
+ list_items,
+ self.context.config.short_array_element_width_threshold(),
+ )
+ {
+ tactic = DefinitiveListTactic::Mixed;
+ }
+ }
+ }
+ }
+ _ => (),
+ }
+
+ tactic
+ }
+
+ fn rewrite_items(&self) -> Option<(bool, String)> {
+ let span = self.items_span();
+ let items = itemize_list(
+ self.context.snippet_provider,
+ self.items.iter(),
+ self.suffix,
+ ",",
+ |item| item.span().lo(),
+ |item| item.span().hi(),
+ |item| item.rewrite(self.context, self.nested_shape),
+ span.lo(),
+ span.hi(),
+ true,
+ );
+ let mut list_items: Vec<_> = items.collect();
+
+ // Try letting the last argument overflow to the next line with block
+ // indentation. If its first line fits on one line with the other arguments,
+ // we format the function arguments horizontally.
+ let tactic = self.try_overflow_last_item(&mut list_items);
+ let trailing_separator = if let Some(tactic) = self.force_separator_tactic {
+ tactic
+ } else if !self.context.use_block_indent() {
+ SeparatorTactic::Never
+ } else {
+ self.context.config.trailing_comma()
+ };
+ let ends_with_newline = match tactic {
+ DefinitiveListTactic::Vertical | DefinitiveListTactic::Mixed => {
+ self.context.use_block_indent()
+ }
+ _ => false,
+ };
+
+ let fmt = ListFormatting::new(self.nested_shape, self.context.config)
+ .tactic(tactic)
+ .trailing_separator(trailing_separator)
+ .ends_with_newline(ends_with_newline);
+
+ write_list(&list_items, &fmt)
+ .map(|items_str| (tactic == DefinitiveListTactic::Horizontal, items_str))
+ }
+
+ fn wrap_items(&self, items_str: &str, shape: Shape, is_extendable: bool) -> String {
+ let shape = Shape {
+ width: shape.width.saturating_sub(last_line_width(self.ident)),
+ ..shape
+ };
+
+ let (prefix, suffix) = match self.custom_delims {
+ Some((lhs, rhs)) => (lhs, rhs),
+ _ => (self.prefix, self.suffix),
+ };
+
+ let extend_width = if items_str.is_empty() {
+ 2
+ } else {
+ first_line_width(items_str) + 1
+ };
+ let nested_indent_str = self
+ .nested_shape
+ .indent
+ .to_string_with_newline(self.context.config);
+ let indent_str = shape
+ .block()
+ .indent
+ .to_string_with_newline(self.context.config);
+ let mut result = String::with_capacity(
+ self.ident.len() + items_str.len() + 2 + indent_str.len() + nested_indent_str.len(),
+ );
+ result.push_str(self.ident);
+ result.push_str(prefix);
+ let force_single_line = if self.context.config.version() == Version::Two {
+ !self.context.use_block_indent() || (is_extendable && extend_width <= shape.width)
+ } else {
+ // 2 = `()`
+ let fits_one_line = items_str.len() + 2 <= shape.width;
+ !self.context.use_block_indent()
+ || (self.context.inside_macro() && !items_str.contains('\n') && fits_one_line)
+ || (is_extendable && extend_width <= shape.width)
+ };
+ if force_single_line {
+ result.push_str(items_str);
+ } else {
+ if !items_str.is_empty() {
+ result.push_str(&nested_indent_str);
+ result.push_str(items_str);
+ }
+ result.push_str(&indent_str);
+ }
+ result.push_str(suffix);
+ result
+ }
+
+ fn rewrite(&self, shape: Shape) -> Option<String> {
+ let (extendable, items_str) = self.rewrite_items()?;
+
+ // If we are using visual indent style and failed to format, retry with block indent.
+ if !self.context.use_block_indent()
+ && need_block_indent(&items_str, self.nested_shape)
+ && !extendable
+ {
+ self.context.use_block.replace(true);
+ let result = self.rewrite(shape);
+ self.context.use_block.replace(false);
+ return result;
+ }
+
+ Some(self.wrap_items(&items_str, shape, extendable))
+ }
+}
+
+fn need_block_indent(s: &str, shape: Shape) -> bool {
+ s.lines().skip(1).any(|s| {
+ s.find(|c| !char::is_whitespace(c))
+ .map_or(false, |w| w + 1 < shape.indent.width())
+ })
+}
+
+fn can_be_overflowed(context: &RewriteContext<'_>, items: &[OverflowableItem<'_>]) -> bool {
+ items
+ .last()
+ .map_or(false, |x| x.can_be_overflowed(context, items.len()))
+}
+
+/// Returns a shape for the last argument which is going to be overflowed.
+fn last_item_shape(
+ lists: &[OverflowableItem<'_>],
+ items: &[ListItem],
+ shape: Shape,
+ args_max_width: usize,
+) -> Option<Shape> {
+ if items.len() == 1 && !lists.get(0)?.is_nested_call() {
+ return Some(shape);
+ }
+ let offset = items
+ .iter()
+ .dropping_back(1)
+ .map(|i| {
+ // 2 = ", "
+ 2 + i.inner_as_ref().len()
+ })
+ .sum();
+ Shape {
+ width: min(args_max_width, shape.width),
+ ..shape
+ }
+ .offset_left(offset)
+}
+
+fn shape_from_indent_style(
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ overhead: usize,
+ offset: usize,
+) -> Shape {
+ let (shape, overhead) = if context.use_block_indent() {
+ let shape = shape
+ .block()
+ .block_indent(context.config.tab_spaces())
+ .with_max_width(context.config);
+ (shape, 1) // 1 = ","
+ } else {
+ (shape.visual_indent(offset), overhead)
+ };
+ Shape {
+ width: shape.width.saturating_sub(overhead),
+ ..shape
+ }
+}
+
+fn no_long_items(list: &[ListItem], short_array_element_width_threshold: usize) -> bool {
+ list.iter()
+ .all(|item| item.inner_as_ref().len() <= short_array_element_width_threshold)
+}
+
+/// In case special-case style is required, returns an offset from which we start horizontal layout.
+pub(crate) fn maybe_get_args_offset(
+ callee_str: &str,
+ args: &[OverflowableItem<'_>],
+) -> Option<(bool, usize)> {
+ if let Some(&(_, num_args_before)) = args
+ .get(0)?
+ .special_cases()
+ .iter()
+ .find(|&&(s, _)| s == callee_str)
+ {
+ let all_simple = args.len() > num_args_before
+ && is_every_expr_simple(&args[0..num_args_before])
+ && is_every_expr_simple(&args[num_args_before + 1..]);
+
+ Some((all_simple, num_args_before))
+ } else {
+ None
+ }
+}
diff --git a/src/tools/rustfmt/src/pairs.rs b/src/tools/rustfmt/src/pairs.rs
new file mode 100644
index 000000000..d1c75126e
--- /dev/null
+++ b/src/tools/rustfmt/src/pairs.rs
@@ -0,0 +1,318 @@
+use rustc_ast::ast;
+
+use crate::config::lists::*;
+use crate::config::IndentStyle;
+use crate::rewrite::{Rewrite, RewriteContext};
+use crate::shape::Shape;
+use crate::utils::{
+ first_line_width, is_single_line, last_line_width, trimmed_last_line_width, wrap_str,
+};
+
+/// Sigils that decorate a binop pair.
+#[derive(new, Clone, Copy)]
+pub(crate) struct PairParts<'a> {
+ prefix: &'a str,
+ infix: &'a str,
+ suffix: &'a str,
+}
+
+impl<'a> PairParts<'a> {
+ pub(crate) fn infix(infix: &'a str) -> PairParts<'a> {
+ PairParts {
+ prefix: "",
+ infix,
+ suffix: "",
+ }
+ }
+}
+
+// Flattens a tree of pairs into a list and tries to rewrite them all at once.
+// FIXME would be nice to reuse the lists API for this, but because each separator
+// can be different, we can't.
+pub(crate) fn rewrite_all_pairs(
+ expr: &ast::Expr,
+ shape: Shape,
+ context: &RewriteContext<'_>,
+) -> Option<String> {
+ expr.flatten(context, shape).and_then(|list| {
+ // First we try formatting on one line.
+ rewrite_pairs_one_line(&list, shape, context)
+ .or_else(|| rewrite_pairs_multiline(&list, shape, context))
+ })
+}
+
+// This may return a multi-line result since we allow the last expression to go
+// multiline in a 'single line' formatting.
+fn rewrite_pairs_one_line<T: Rewrite>(
+ list: &PairList<'_, '_, T>,
+ shape: Shape,
+ context: &RewriteContext<'_>,
+) -> Option<String> {
+ assert!(list.list.len() >= 2, "Not a pair?");
+
+ let mut result = String::new();
+ let base_shape = shape.block();
+
+ for ((_, rewrite), s) in list.list.iter().zip(list.separators.iter()) {
+ if let Some(rewrite) = rewrite {
+ if !is_single_line(rewrite) || result.len() > shape.width {
+ return None;
+ }
+
+ result.push_str(rewrite);
+ result.push(' ');
+ result.push_str(s);
+ result.push(' ');
+ } else {
+ return None;
+ }
+ }
+
+ let prefix_len = result.len();
+ let last = list.list.last()?.0;
+ let cur_shape = base_shape.offset_left(last_line_width(&result))?;
+ let last_rewrite = last.rewrite(context, cur_shape)?;
+ result.push_str(&last_rewrite);
+
+ if first_line_width(&result) > shape.width {
+ return None;
+ }
+
+ // Check the last expression in the list. We sometimes let this expression
+ // go over multiple lines, but we check for some ugly conditions.
+ if !(is_single_line(&result) || last_rewrite.starts_with('{'))
+ && (last_rewrite.starts_with('(') || prefix_len > context.config.tab_spaces())
+ {
+ return None;
+ }
+
+ wrap_str(result, context.config.max_width(), shape)
+}
+
+fn rewrite_pairs_multiline<T: Rewrite>(
+ list: &PairList<'_, '_, T>,
+ shape: Shape,
+ context: &RewriteContext<'_>,
+) -> Option<String> {
+ let rhs_offset = shape.rhs_overhead(context.config);
+ let nested_shape = (match context.config.indent_style() {
+ IndentStyle::Visual => shape.visual_indent(0),
+ IndentStyle::Block => shape.block_indent(context.config.tab_spaces()),
+ })
+ .with_max_width(context.config)
+ .sub_width(rhs_offset)?;
+
+ let indent_str = nested_shape.indent.to_string_with_newline(context.config);
+ let mut result = String::new();
+
+ result.push_str(list.list[0].1.as_ref()?);
+
+ for ((e, default_rw), s) in list.list[1..].iter().zip(list.separators.iter()) {
+ // The following test checks if we should keep two subexprs on the same
+ // line. We do this if not doing so would create an orphan and there is
+ // enough space to do so.
+ let offset = if result.contains('\n') {
+ 0
+ } else {
+ shape.used_width()
+ };
+ if last_line_width(&result) + offset <= nested_shape.used_width() {
+ // We must snuggle the next line onto the previous line to avoid an orphan.
+ if let Some(line_shape) =
+ shape.offset_left(s.len() + 2 + trimmed_last_line_width(&result))
+ {
+ if let Some(rewrite) = e.rewrite(context, line_shape) {
+ result.push(' ');
+ result.push_str(s);
+ result.push(' ');
+ result.push_str(&rewrite);
+ continue;
+ }
+ }
+ }
+
+ match context.config.binop_separator() {
+ SeparatorPlace::Back => {
+ result.push(' ');
+ result.push_str(s);
+ result.push_str(&indent_str);
+ }
+ SeparatorPlace::Front => {
+ result.push_str(&indent_str);
+ result.push_str(s);
+ result.push(' ');
+ }
+ }
+
+ result.push_str(default_rw.as_ref()?);
+ }
+ Some(result)
+}
+
+// Rewrites a single pair.
+pub(crate) fn rewrite_pair<LHS, RHS>(
+ lhs: &LHS,
+ rhs: &RHS,
+ pp: PairParts<'_>,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ separator_place: SeparatorPlace,
+) -> Option<String>
+where
+ LHS: Rewrite,
+ RHS: Rewrite,
+{
+ let tab_spaces = context.config.tab_spaces();
+ let lhs_overhead = match separator_place {
+ SeparatorPlace::Back => shape.used_width() + pp.prefix.len() + pp.infix.trim_end().len(),
+ SeparatorPlace::Front => shape.used_width(),
+ };
+ let lhs_shape = Shape {
+ width: context.budget(lhs_overhead),
+ ..shape
+ };
+ let lhs_result = lhs
+ .rewrite(context, lhs_shape)
+ .map(|lhs_str| format!("{}{}", pp.prefix, lhs_str))?;
+
+ // Try to put both lhs and rhs on the same line.
+ let rhs_orig_result = shape
+ .offset_left(last_line_width(&lhs_result) + pp.infix.len())
+ .and_then(|s| s.sub_width(pp.suffix.len()))
+ .and_then(|rhs_shape| rhs.rewrite(context, rhs_shape));
+ if let Some(ref rhs_result) = rhs_orig_result {
+ // If the length of the lhs is equal to or shorter than the tab width or
+ // the rhs looks like block expression, we put the rhs on the same
+ // line with the lhs even if the rhs is multi-lined.
+ let allow_same_line = lhs_result.len() <= tab_spaces
+ || rhs_result
+ .lines()
+ .next()
+ .map(|first_line| first_line.ends_with('{'))
+ .unwrap_or(false);
+ if !rhs_result.contains('\n') || allow_same_line {
+ let one_line_width = last_line_width(&lhs_result)
+ + pp.infix.len()
+ + first_line_width(rhs_result)
+ + pp.suffix.len();
+ if one_line_width <= shape.width {
+ return Some(format!(
+ "{}{}{}{}",
+ lhs_result, pp.infix, rhs_result, pp.suffix
+ ));
+ }
+ }
+ }
+
+ // We have to use multiple lines.
+ // Re-evaluate the rhs because we have more space now:
+ let mut rhs_shape = match context.config.indent_style() {
+ IndentStyle::Visual => shape
+ .sub_width(pp.suffix.len() + pp.prefix.len())?
+ .visual_indent(pp.prefix.len()),
+ IndentStyle::Block => {
+ // Try to calculate the initial constraint on the right hand side.
+ let rhs_overhead = shape.rhs_overhead(context.config);
+ Shape::indented(shape.indent.block_indent(context.config), context.config)
+ .sub_width(rhs_overhead)?
+ }
+ };
+ let infix = match separator_place {
+ SeparatorPlace::Back => pp.infix.trim_end(),
+ SeparatorPlace::Front => pp.infix.trim_start(),
+ };
+ if separator_place == SeparatorPlace::Front {
+ rhs_shape = rhs_shape.offset_left(infix.len())?;
+ }
+ let rhs_result = rhs.rewrite(context, rhs_shape)?;
+ let indent_str = rhs_shape.indent.to_string_with_newline(context.config);
+ let infix_with_sep = match separator_place {
+ SeparatorPlace::Back => format!("{}{}", infix, indent_str),
+ SeparatorPlace::Front => format!("{}{}", indent_str, infix),
+ };
+ Some(format!(
+ "{}{}{}{}",
+ lhs_result, infix_with_sep, rhs_result, pp.suffix
+ ))
+}
+
+// A pair which forms a tree and can be flattened (e.g., binops).
+trait FlattenPair: Rewrite + Sized {
+ fn flatten(&self, _: &RewriteContext<'_>, _: Shape) -> Option<PairList<'_, '_, Self>> {
+ None
+ }
+}
+
+struct PairList<'a, 'b, T: Rewrite> {
+ list: Vec<(&'b T, Option<String>)>,
+ separators: Vec<&'a str>,
+}
+
+impl FlattenPair for ast::Expr {
+ fn flatten(
+ &self,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ ) -> Option<PairList<'_, '_, ast::Expr>> {
+ let top_op = match self.kind {
+ ast::ExprKind::Binary(op, _, _) => op.node,
+ _ => return None,
+ };
+
+ let default_rewrite = |node: &ast::Expr, sep: usize, is_first: bool| {
+ if is_first {
+ return node.rewrite(context, shape);
+ }
+ let nested_overhead = sep + 1;
+ let rhs_offset = shape.rhs_overhead(context.config);
+ let nested_shape = (match context.config.indent_style() {
+ IndentStyle::Visual => shape.visual_indent(0),
+ IndentStyle::Block => shape.block_indent(context.config.tab_spaces()),
+ })
+ .with_max_width(context.config)
+ .sub_width(rhs_offset)?;
+ let default_shape = match context.config.binop_separator() {
+ SeparatorPlace::Back => nested_shape.sub_width(nested_overhead)?,
+ SeparatorPlace::Front => nested_shape.offset_left(nested_overhead)?,
+ };
+ node.rewrite(context, default_shape)
+ };
+
+ // Turn a tree of binop expressions into a list using a depth-first,
+ // in-order traversal.
+ let mut stack = vec![];
+ let mut list = vec![];
+ let mut separators = vec![];
+ let mut node = self;
+ loop {
+ match node.kind {
+ ast::ExprKind::Binary(op, ref lhs, _) if op.node == top_op => {
+ stack.push(node);
+ node = lhs;
+ }
+ _ => {
+ let op_len = separators.last().map_or(0, |s: &&str| s.len());
+ let rw = default_rewrite(node, op_len, list.is_empty());
+ list.push((node, rw));
+ if let Some(pop) = stack.pop() {
+ match pop.kind {
+ ast::ExprKind::Binary(op, _, ref rhs) => {
+ separators.push(op.node.to_string());
+ node = rhs;
+ }
+ _ => unreachable!(),
+ }
+ } else {
+ break;
+ }
+ }
+ }
+ }
+
+ assert_eq!(list.len() - 1, separators.len());
+ Some(PairList { list, separators })
+ }
+}
+
+impl FlattenPair for ast::Ty {}
+impl FlattenPair for ast::Pat {}
diff --git a/src/tools/rustfmt/src/parse/macros/asm.rs b/src/tools/rustfmt/src/parse/macros/asm.rs
new file mode 100644
index 000000000..cc9fb5072
--- /dev/null
+++ b/src/tools/rustfmt/src/parse/macros/asm.rs
@@ -0,0 +1,11 @@
+use rustc_ast::ast;
+use rustc_builtin_macros::asm::{parse_asm_args, AsmArgs};
+
+use crate::rewrite::RewriteContext;
+
+#[allow(dead_code)]
+pub(crate) fn parse_asm(context: &RewriteContext<'_>, mac: &ast::MacCall) -> Option<AsmArgs> {
+ let ts = mac.args.inner_tokens();
+ let mut parser = super::build_parser(context, ts);
+ parse_asm_args(&mut parser, context.parse_sess.inner(), mac.span(), false).ok()
+}
diff --git a/src/tools/rustfmt/src/parse/macros/cfg_if.rs b/src/tools/rustfmt/src/parse/macros/cfg_if.rs
new file mode 100644
index 000000000..09b3e32df
--- /dev/null
+++ b/src/tools/rustfmt/src/parse/macros/cfg_if.rs
@@ -0,0 +1,89 @@
+use std::panic::{catch_unwind, AssertUnwindSafe};
+
+use rustc_ast::ast;
+use rustc_ast::token::{Delimiter, TokenKind};
+use rustc_parse::parser::ForceCollect;
+use rustc_span::symbol::kw;
+
+use crate::parse::macros::build_stream_parser;
+use crate::parse::session::ParseSess;
+
+pub(crate) fn parse_cfg_if<'a>(
+ sess: &'a ParseSess,
+ mac: &'a ast::MacCall,
+) -> Result<Vec<ast::Item>, &'static str> {
+ match catch_unwind(AssertUnwindSafe(|| parse_cfg_if_inner(sess, mac))) {
+ Ok(Ok(items)) => Ok(items),
+ Ok(err @ Err(_)) => err,
+ Err(..) => Err("failed to parse cfg_if!"),
+ }
+}
+
+fn parse_cfg_if_inner<'a>(
+ sess: &'a ParseSess,
+ mac: &'a ast::MacCall,
+) -> Result<Vec<ast::Item>, &'static str> {
+ let ts = mac.args.inner_tokens();
+ let mut parser = build_stream_parser(sess.inner(), ts);
+
+ let mut items = vec![];
+ let mut process_if_cfg = true;
+
+ while parser.token.kind != TokenKind::Eof {
+ if process_if_cfg {
+ if !parser.eat_keyword(kw::If) {
+ return Err("Expected `if`");
+ }
+ // Inner attributes are not actually syntactically permitted here, but we don't
+ // care about inner vs outer attributes in this position. Our purpose with this
+ // special case parsing of cfg_if macros is to ensure we can correctly resolve
+ // imported modules that may have a custom `path` defined.
+ //
+ // As such, we just need to advance the parser past the attribute and up to
+ // to the opening brace.
+ // See also https://github.com/rust-lang/rust/pull/79433
+ parser
+ .parse_attribute(rustc_parse::parser::attr::InnerAttrPolicy::Permitted)
+ .map_err(|_| "Failed to parse attributes")?;
+ }
+
+ if !parser.eat(&TokenKind::OpenDelim(Delimiter::Brace)) {
+ return Err("Expected an opening brace");
+ }
+
+ while parser.token != TokenKind::CloseDelim(Delimiter::Brace)
+ && parser.token.kind != TokenKind::Eof
+ {
+ let item = match parser.parse_item(ForceCollect::No) {
+ Ok(Some(item_ptr)) => item_ptr.into_inner(),
+ Ok(None) => continue,
+ Err(err) => {
+ err.cancel();
+ parser.sess.span_diagnostic.reset_err_count();
+ return Err(
+ "Expected item inside cfg_if block, but failed to parse it as an item",
+ );
+ }
+ };
+ if let ast::ItemKind::Mod(..) = item.kind {
+ items.push(item);
+ }
+ }
+
+ if !parser.eat(&TokenKind::CloseDelim(Delimiter::Brace)) {
+ return Err("Expected a closing brace");
+ }
+
+ if parser.eat(&TokenKind::Eof) {
+ break;
+ }
+
+ if !parser.eat_keyword(kw::Else) {
+ return Err("Expected `else`");
+ }
+
+ process_if_cfg = parser.token.is_keyword(kw::If);
+ }
+
+ Ok(items)
+}
diff --git a/src/tools/rustfmt/src/parse/macros/lazy_static.rs b/src/tools/rustfmt/src/parse/macros/lazy_static.rs
new file mode 100644
index 000000000..a8c2feec4
--- /dev/null
+++ b/src/tools/rustfmt/src/parse/macros/lazy_static.rs
@@ -0,0 +1,50 @@
+use rustc_ast::ast;
+use rustc_ast::ptr::P;
+use rustc_ast::token::TokenKind;
+use rustc_ast::tokenstream::TokenStream;
+use rustc_span::symbol::{self, kw};
+
+use crate::rewrite::RewriteContext;
+
+pub(crate) fn parse_lazy_static(
+ context: &RewriteContext<'_>,
+ ts: TokenStream,
+) -> Option<Vec<(ast::Visibility, symbol::Ident, P<ast::Ty>, P<ast::Expr>)>> {
+ let mut result = vec![];
+ let mut parser = super::build_parser(context, ts);
+ macro_rules! parse_or {
+ ($method:ident $(,)* $($arg:expr),* $(,)*) => {
+ match parser.$method($($arg,)*) {
+ Ok(val) => {
+ if parser.sess.span_diagnostic.has_errors().is_some() {
+ parser.sess.span_diagnostic.reset_err_count();
+ return None;
+ } else {
+ val
+ }
+ }
+ Err(err) => {
+ err.cancel();
+ parser.sess.span_diagnostic.reset_err_count();
+ return None;
+ }
+ }
+ }
+ }
+
+ while parser.token.kind != TokenKind::Eof {
+ // Parse a `lazy_static!` item.
+ let vis = parse_or!(parse_visibility, rustc_parse::parser::FollowedByType::No);
+ parser.eat_keyword(kw::Static);
+ parser.eat_keyword(kw::Ref);
+ let id = parse_or!(parse_ident);
+ parser.eat(&TokenKind::Colon);
+ let ty = parse_or!(parse_ty);
+ parser.eat(&TokenKind::Eq);
+ let expr = parse_or!(parse_expr);
+ parser.eat(&TokenKind::Semi);
+ result.push((vis, id, ty, expr));
+ }
+
+ Some(result)
+}
diff --git a/src/tools/rustfmt/src/parse/macros/mod.rs b/src/tools/rustfmt/src/parse/macros/mod.rs
new file mode 100644
index 000000000..67f398592
--- /dev/null
+++ b/src/tools/rustfmt/src/parse/macros/mod.rs
@@ -0,0 +1,229 @@
+use rustc_ast::token::{Delimiter, TokenKind};
+use rustc_ast::tokenstream::TokenStream;
+use rustc_ast::{ast, ptr};
+use rustc_parse::parser::{ForceCollect, Parser};
+use rustc_parse::{stream_to_parser, MACRO_ARGUMENTS};
+use rustc_session::parse::ParseSess;
+use rustc_span::symbol::{self, kw};
+use rustc_span::Symbol;
+
+use crate::macros::MacroArg;
+use crate::rewrite::RewriteContext;
+
+pub(crate) mod asm;
+pub(crate) mod cfg_if;
+pub(crate) mod lazy_static;
+
+fn build_stream_parser<'a>(sess: &'a ParseSess, tokens: TokenStream) -> Parser<'a> {
+ stream_to_parser(sess, tokens, MACRO_ARGUMENTS)
+}
+
+fn build_parser<'a>(context: &RewriteContext<'a>, tokens: TokenStream) -> Parser<'a> {
+ build_stream_parser(context.parse_sess.inner(), tokens)
+}
+
+fn parse_macro_arg<'a, 'b: 'a>(parser: &'a mut Parser<'b>) -> Option<MacroArg> {
+ macro_rules! parse_macro_arg {
+ ($macro_arg:ident, $parser:expr, $f:expr) => {
+ let mut cloned_parser = (*parser).clone();
+ match $parser(&mut cloned_parser) {
+ Ok(x) => {
+ if parser.sess.span_diagnostic.has_errors().is_some() {
+ parser.sess.span_diagnostic.reset_err_count();
+ } else {
+ // Parsing succeeded.
+ *parser = cloned_parser;
+ return Some(MacroArg::$macro_arg($f(x)?));
+ }
+ }
+ Err(e) => {
+ e.cancel();
+ parser.sess.span_diagnostic.reset_err_count();
+ }
+ }
+ };
+ }
+
+ parse_macro_arg!(
+ Expr,
+ |parser: &mut rustc_parse::parser::Parser<'b>| parser.parse_expr(),
+ |x: ptr::P<ast::Expr>| Some(x)
+ );
+ parse_macro_arg!(
+ Ty,
+ |parser: &mut rustc_parse::parser::Parser<'b>| parser.parse_ty(),
+ |x: ptr::P<ast::Ty>| Some(x)
+ );
+ parse_macro_arg!(
+ Pat,
+ |parser: &mut rustc_parse::parser::Parser<'b>| parser.parse_pat_no_top_alt(None),
+ |x: ptr::P<ast::Pat>| Some(x)
+ );
+ // `parse_item` returns `Option<ptr::P<ast::Item>>`.
+ parse_macro_arg!(
+ Item,
+ |parser: &mut rustc_parse::parser::Parser<'b>| parser.parse_item(ForceCollect::No),
+ |x: Option<ptr::P<ast::Item>>| x
+ );
+
+ None
+}
+
+pub(crate) struct ParsedMacroArgs {
+ pub(crate) vec_with_semi: bool,
+ pub(crate) trailing_comma: bool,
+ pub(crate) args: Vec<MacroArg>,
+}
+
+fn check_keyword<'a, 'b: 'a>(parser: &'a mut Parser<'b>) -> Option<MacroArg> {
+ for &keyword in RUST_KW.iter() {
+ if parser.token.is_keyword(keyword)
+ && parser.look_ahead(1, |t| {
+ t.kind == TokenKind::Eof || t.kind == TokenKind::Comma
+ })
+ {
+ parser.bump();
+ return Some(MacroArg::Keyword(
+ symbol::Ident::with_dummy_span(keyword),
+ parser.prev_token.span,
+ ));
+ }
+ }
+ None
+}
+
+pub(crate) fn parse_macro_args(
+ context: &RewriteContext<'_>,
+ tokens: TokenStream,
+ style: Delimiter,
+ forced_bracket: bool,
+) -> Option<ParsedMacroArgs> {
+ let mut parser = build_parser(context, tokens);
+ let mut args = Vec::new();
+ let mut vec_with_semi = false;
+ let mut trailing_comma = false;
+
+ if Delimiter::Brace != style {
+ loop {
+ if let Some(arg) = check_keyword(&mut parser) {
+ args.push(arg);
+ } else if let Some(arg) = parse_macro_arg(&mut parser) {
+ args.push(arg);
+ } else {
+ return None;
+ }
+
+ match parser.token.kind {
+ TokenKind::Eof => break,
+ TokenKind::Comma => (),
+ TokenKind::Semi => {
+ // Try to parse `vec![expr; expr]`
+ if forced_bracket {
+ parser.bump();
+ if parser.token.kind != TokenKind::Eof {
+ match parse_macro_arg(&mut parser) {
+ Some(arg) => {
+ args.push(arg);
+ parser.bump();
+ if parser.token.kind == TokenKind::Eof && args.len() == 2 {
+ vec_with_semi = true;
+ break;
+ }
+ }
+ None => {
+ return None;
+ }
+ }
+ }
+ }
+ return None;
+ }
+ _ if args.last().map_or(false, MacroArg::is_item) => continue,
+ _ => return None,
+ }
+
+ parser.bump();
+
+ if parser.token.kind == TokenKind::Eof {
+ trailing_comma = true;
+ break;
+ }
+ }
+ }
+
+ Some(ParsedMacroArgs {
+ vec_with_semi,
+ trailing_comma,
+ args,
+ })
+}
+
+pub(crate) fn parse_expr(
+ context: &RewriteContext<'_>,
+ tokens: TokenStream,
+) -> Option<ptr::P<ast::Expr>> {
+ let mut parser = build_parser(context, tokens);
+ parser.parse_expr().ok()
+}
+
+const RUST_KW: [Symbol; 59] = [
+ kw::PathRoot,
+ kw::DollarCrate,
+ kw::Underscore,
+ kw::As,
+ kw::Box,
+ kw::Break,
+ kw::Const,
+ kw::Continue,
+ kw::Crate,
+ kw::Else,
+ kw::Enum,
+ kw::Extern,
+ kw::False,
+ kw::Fn,
+ kw::For,
+ kw::If,
+ kw::Impl,
+ kw::In,
+ kw::Let,
+ kw::Loop,
+ kw::Match,
+ kw::Mod,
+ kw::Move,
+ kw::Mut,
+ kw::Pub,
+ kw::Ref,
+ kw::Return,
+ kw::SelfLower,
+ kw::SelfUpper,
+ kw::Static,
+ kw::Struct,
+ kw::Super,
+ kw::Trait,
+ kw::True,
+ kw::Type,
+ kw::Unsafe,
+ kw::Use,
+ kw::Where,
+ kw::While,
+ kw::Abstract,
+ kw::Become,
+ kw::Do,
+ kw::Final,
+ kw::Macro,
+ kw::Override,
+ kw::Priv,
+ kw::Typeof,
+ kw::Unsized,
+ kw::Virtual,
+ kw::Yield,
+ kw::Dyn,
+ kw::Async,
+ kw::Try,
+ kw::UnderscoreLifetime,
+ kw::StaticLifetime,
+ kw::Auto,
+ kw::Catch,
+ kw::Default,
+ kw::Union,
+];
diff --git a/src/tools/rustfmt/src/parse/mod.rs b/src/tools/rustfmt/src/parse/mod.rs
new file mode 100644
index 000000000..5e88826ea
--- /dev/null
+++ b/src/tools/rustfmt/src/parse/mod.rs
@@ -0,0 +1,3 @@
+pub(crate) mod macros;
+pub(crate) mod parser;
+pub(crate) mod session;
diff --git a/src/tools/rustfmt/src/parse/parser.rs b/src/tools/rustfmt/src/parse/parser.rs
new file mode 100644
index 000000000..268c72649
--- /dev/null
+++ b/src/tools/rustfmt/src/parse/parser.rs
@@ -0,0 +1,175 @@
+use std::panic::{catch_unwind, AssertUnwindSafe};
+use std::path::{Path, PathBuf};
+
+use rustc_ast::token::TokenKind;
+use rustc_ast::{ast, ptr};
+use rustc_errors::Diagnostic;
+use rustc_parse::{new_parser_from_file, parser::Parser as RawParser};
+use rustc_span::{sym, Span};
+
+use crate::attr::first_attr_value_str_by_name;
+use crate::parse::session::ParseSess;
+use crate::Input;
+
+pub(crate) type DirectoryOwnership = rustc_expand::module::DirOwnership;
+pub(crate) type ModulePathSuccess = rustc_expand::module::ModulePathSuccess;
+pub(crate) type ModError<'a> = rustc_expand::module::ModError<'a>;
+
+#[derive(Clone)]
+pub(crate) struct Directory {
+ pub(crate) path: PathBuf,
+ pub(crate) ownership: DirectoryOwnership,
+}
+
+/// A parser for Rust source code.
+pub(crate) struct Parser<'a> {
+ parser: RawParser<'a>,
+}
+
+/// A builder for the `Parser`.
+#[derive(Default)]
+pub(crate) struct ParserBuilder<'a> {
+ sess: Option<&'a ParseSess>,
+ input: Option<Input>,
+}
+
+impl<'a> ParserBuilder<'a> {
+ pub(crate) fn input(mut self, input: Input) -> ParserBuilder<'a> {
+ self.input = Some(input);
+ self
+ }
+
+ pub(crate) fn sess(mut self, sess: &'a ParseSess) -> ParserBuilder<'a> {
+ self.sess = Some(sess);
+ self
+ }
+
+ pub(crate) fn build(self) -> Result<Parser<'a>, ParserError> {
+ let sess = self.sess.ok_or(ParserError::NoParseSess)?;
+ let input = self.input.ok_or(ParserError::NoInput)?;
+
+ let parser = match Self::parser(sess.inner(), input) {
+ Ok(p) => p,
+ Err(db) => {
+ if let Some(diagnostics) = db {
+ sess.emit_diagnostics(diagnostics);
+ return Err(ParserError::ParserCreationError);
+ }
+ return Err(ParserError::ParsePanicError);
+ }
+ };
+
+ Ok(Parser { parser })
+ }
+
+ fn parser(
+ sess: &'a rustc_session::parse::ParseSess,
+ input: Input,
+ ) -> Result<rustc_parse::parser::Parser<'a>, Option<Vec<Diagnostic>>> {
+ match input {
+ Input::File(ref file) => catch_unwind(AssertUnwindSafe(move || {
+ new_parser_from_file(sess, file, None)
+ }))
+ .map_err(|_| None),
+ Input::Text(text) => rustc_parse::maybe_new_parser_from_source_str(
+ sess,
+ rustc_span::FileName::Custom("stdin".to_owned()),
+ text,
+ )
+ .map_err(Some),
+ }
+ }
+}
+
+#[derive(Debug, PartialEq)]
+pub(crate) enum ParserError {
+ NoParseSess,
+ NoInput,
+ ParserCreationError,
+ ParseError,
+ ParsePanicError,
+}
+
+impl<'a> Parser<'a> {
+ pub(crate) fn submod_path_from_attr(attrs: &[ast::Attribute], path: &Path) -> Option<PathBuf> {
+ let path_sym = first_attr_value_str_by_name(attrs, sym::path)?;
+ let path_str = path_sym.as_str();
+
+ // On windows, the base path might have the form
+ // `\\?\foo\bar` in which case it does not tolerate
+ // mixed `/` and `\` separators, so canonicalize
+ // `/` to `\`.
+ #[cfg(windows)]
+ let path_str = path_str.replace("/", "\\");
+
+ Some(path.join(path_str))
+ }
+
+ pub(crate) fn parse_file_as_module(
+ sess: &'a ParseSess,
+ path: &Path,
+ span: Span,
+ ) -> Result<(Vec<ast::Attribute>, Vec<ptr::P<ast::Item>>, Span), ParserError> {
+ let result = catch_unwind(AssertUnwindSafe(|| {
+ let mut parser = new_parser_from_file(sess.inner(), path, Some(span));
+ match parser.parse_mod(&TokenKind::Eof) {
+ Ok((a, i, spans)) => Some((a, i, spans.inner_span)),
+ Err(mut e) => {
+ e.emit();
+ if sess.can_reset_errors() {
+ sess.reset_errors();
+ }
+ None
+ }
+ }
+ }));
+ match result {
+ Ok(Some(m)) if !sess.has_errors() => Ok(m),
+ Ok(Some(m)) if sess.can_reset_errors() => {
+ sess.reset_errors();
+ Ok(m)
+ }
+ Ok(_) => Err(ParserError::ParseError),
+ Err(..) if path.exists() => Err(ParserError::ParseError),
+ Err(_) => Err(ParserError::ParsePanicError),
+ }
+ }
+
+ pub(crate) fn parse_crate(
+ input: Input,
+ sess: &'a ParseSess,
+ ) -> Result<ast::Crate, ParserError> {
+ let krate = Parser::parse_crate_inner(input, sess)?;
+ if !sess.has_errors() {
+ return Ok(krate);
+ }
+
+ if sess.can_reset_errors() {
+ sess.reset_errors();
+ return Ok(krate);
+ }
+
+ Err(ParserError::ParseError)
+ }
+
+ fn parse_crate_inner(input: Input, sess: &'a ParseSess) -> Result<ast::Crate, ParserError> {
+ ParserBuilder::default()
+ .input(input)
+ .sess(sess)
+ .build()?
+ .parse_crate_mod()
+ }
+
+ fn parse_crate_mod(&mut self) -> Result<ast::Crate, ParserError> {
+ let mut parser = AssertUnwindSafe(&mut self.parser);
+
+ match catch_unwind(move || parser.parse_crate_mod()) {
+ Ok(Ok(k)) => Ok(k),
+ Ok(Err(mut db)) => {
+ db.emit();
+ Err(ParserError::ParseError)
+ }
+ Err(_) => Err(ParserError::ParsePanicError),
+ }
+ }
+}
diff --git a/src/tools/rustfmt/src/parse/session.rs b/src/tools/rustfmt/src/parse/session.rs
new file mode 100644
index 000000000..23db54219
--- /dev/null
+++ b/src/tools/rustfmt/src/parse/session.rs
@@ -0,0 +1,507 @@
+use std::path::Path;
+use std::sync::atomic::{AtomicBool, Ordering};
+
+use rustc_data_structures::sync::{Lrc, Send};
+use rustc_errors::emitter::{Emitter, EmitterWriter};
+use rustc_errors::{ColorConfig, Diagnostic, Handler, Level as DiagnosticLevel};
+use rustc_session::parse::ParseSess as RawParseSess;
+use rustc_span::{
+ source_map::{FilePathMapping, SourceMap},
+ symbol, BytePos, Span,
+};
+
+use crate::config::file_lines::LineRange;
+use crate::ignore_path::IgnorePathSet;
+use crate::parse::parser::{ModError, ModulePathSuccess};
+use crate::source_map::LineRangeUtils;
+use crate::utils::starts_with_newline;
+use crate::visitor::SnippetProvider;
+use crate::{Config, ErrorKind, FileName};
+
+/// ParseSess holds structs necessary for constructing a parser.
+pub(crate) struct ParseSess {
+ parse_sess: RawParseSess,
+ ignore_path_set: Lrc<IgnorePathSet>,
+ can_reset_errors: Lrc<AtomicBool>,
+}
+
+/// Emitter which discards every error.
+struct SilentEmitter;
+
+impl Emitter for SilentEmitter {
+ fn source_map(&self) -> Option<&Lrc<SourceMap>> {
+ None
+ }
+ fn emit_diagnostic(&mut self, _db: &Diagnostic) {}
+ fn fluent_bundle(&self) -> Option<&Lrc<rustc_errors::FluentBundle>> {
+ None
+ }
+ fn fallback_fluent_bundle(&self) -> &rustc_errors::FluentBundle {
+ panic!("silent emitter attempted to translate a diagnostic");
+ }
+}
+
+fn silent_emitter() -> Box<dyn Emitter + Send> {
+ Box::new(SilentEmitter {})
+}
+
+/// Emit errors against every files expect ones specified in the `ignore_path_set`.
+struct SilentOnIgnoredFilesEmitter {
+ ignore_path_set: Lrc<IgnorePathSet>,
+ source_map: Lrc<SourceMap>,
+ emitter: Box<dyn Emitter + Send>,
+ has_non_ignorable_parser_errors: bool,
+ can_reset: Lrc<AtomicBool>,
+}
+
+impl SilentOnIgnoredFilesEmitter {
+ fn handle_non_ignoreable_error(&mut self, db: &Diagnostic) {
+ self.has_non_ignorable_parser_errors = true;
+ self.can_reset.store(false, Ordering::Release);
+ self.emitter.emit_diagnostic(db);
+ }
+}
+
+impl Emitter for SilentOnIgnoredFilesEmitter {
+ fn source_map(&self) -> Option<&Lrc<SourceMap>> {
+ None
+ }
+ fn emit_diagnostic(&mut self, db: &Diagnostic) {
+ if db.level() == DiagnosticLevel::Fatal {
+ return self.handle_non_ignoreable_error(db);
+ }
+ if let Some(primary_span) = &db.span.primary_span() {
+ let file_name = self.source_map.span_to_filename(*primary_span);
+ if let rustc_span::FileName::Real(rustc_span::RealFileName::LocalPath(ref path)) =
+ file_name
+ {
+ if self
+ .ignore_path_set
+ .is_match(&FileName::Real(path.to_path_buf()))
+ {
+ if !self.has_non_ignorable_parser_errors {
+ self.can_reset.store(true, Ordering::Release);
+ }
+ return;
+ }
+ };
+ }
+ self.handle_non_ignoreable_error(db);
+ }
+
+ fn fluent_bundle(&self) -> Option<&Lrc<rustc_errors::FluentBundle>> {
+ self.emitter.fluent_bundle()
+ }
+
+ fn fallback_fluent_bundle(&self) -> &rustc_errors::FluentBundle {
+ self.emitter.fallback_fluent_bundle()
+ }
+}
+
+fn default_handler(
+ source_map: Lrc<SourceMap>,
+ ignore_path_set: Lrc<IgnorePathSet>,
+ can_reset: Lrc<AtomicBool>,
+ hide_parse_errors: bool,
+) -> Handler {
+ let supports_color = term::stderr().map_or(false, |term| term.supports_color());
+ let color_cfg = if supports_color {
+ ColorConfig::Auto
+ } else {
+ ColorConfig::Never
+ };
+
+ let emitter = if hide_parse_errors {
+ silent_emitter()
+ } else {
+ let fallback_bundle =
+ rustc_errors::fallback_fluent_bundle(rustc_errors::DEFAULT_LOCALE_RESOURCES, false);
+ Box::new(EmitterWriter::stderr(
+ color_cfg,
+ Some(source_map.clone()),
+ None,
+ fallback_bundle,
+ false,
+ false,
+ None,
+ false,
+ ))
+ };
+ Handler::with_emitter(
+ true,
+ None,
+ Box::new(SilentOnIgnoredFilesEmitter {
+ has_non_ignorable_parser_errors: false,
+ source_map,
+ emitter,
+ ignore_path_set,
+ can_reset,
+ }),
+ )
+}
+
+impl ParseSess {
+ pub(crate) fn new(config: &Config) -> Result<ParseSess, ErrorKind> {
+ let ignore_path_set = match IgnorePathSet::from_ignore_list(&config.ignore()) {
+ Ok(ignore_path_set) => Lrc::new(ignore_path_set),
+ Err(e) => return Err(ErrorKind::InvalidGlobPattern(e)),
+ };
+ let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+ let can_reset_errors = Lrc::new(AtomicBool::new(false));
+
+ let handler = default_handler(
+ Lrc::clone(&source_map),
+ Lrc::clone(&ignore_path_set),
+ Lrc::clone(&can_reset_errors),
+ config.hide_parse_errors(),
+ );
+ let parse_sess = RawParseSess::with_span_handler(handler, source_map);
+
+ Ok(ParseSess {
+ parse_sess,
+ ignore_path_set,
+ can_reset_errors,
+ })
+ }
+
+ /// Determine the submodule path for the given module identifier.
+ ///
+ /// * `id` - The name of the module
+ /// * `relative` - If Some(symbol), the symbol name is a directory relative to the dir_path.
+ /// If relative is Some, resolve the submodle at {dir_path}/{symbol}/{id}.rs
+ /// or {dir_path}/{symbol}/{id}/mod.rs. if None, resolve the module at {dir_path}/{id}.rs.
+ /// * `dir_path` - Module resolution will occur relative to this directory.
+ pub(crate) fn default_submod_path(
+ &self,
+ id: symbol::Ident,
+ relative: Option<symbol::Ident>,
+ dir_path: &Path,
+ ) -> Result<ModulePathSuccess, ModError<'_>> {
+ rustc_expand::module::default_submod_path(&self.parse_sess, id, relative, dir_path).or_else(
+ |e| {
+ // If resloving a module relative to {dir_path}/{symbol} fails because a file
+ // could not be found, then try to resolve the module relative to {dir_path}.
+ // If we still can't find the module after searching for it in {dir_path},
+ // surface the original error.
+ if matches!(e, ModError::FileNotFound(..)) && relative.is_some() {
+ rustc_expand::module::default_submod_path(&self.parse_sess, id, None, dir_path)
+ .map_err(|_| e)
+ } else {
+ Err(e)
+ }
+ },
+ )
+ }
+
+ pub(crate) fn is_file_parsed(&self, path: &Path) -> bool {
+ self.parse_sess
+ .source_map()
+ .get_source_file(&rustc_span::FileName::Real(
+ rustc_span::RealFileName::LocalPath(path.to_path_buf()),
+ ))
+ .is_some()
+ }
+
+ pub(crate) fn ignore_file(&self, path: &FileName) -> bool {
+ self.ignore_path_set.as_ref().is_match(path)
+ }
+
+ pub(crate) fn set_silent_emitter(&mut self) {
+ self.parse_sess.span_diagnostic = Handler::with_emitter(true, None, silent_emitter());
+ }
+
+ pub(crate) fn span_to_filename(&self, span: Span) -> FileName {
+ self.parse_sess.source_map().span_to_filename(span).into()
+ }
+
+ pub(crate) fn span_to_file_contents(&self, span: Span) -> Lrc<rustc_span::SourceFile> {
+ self.parse_sess
+ .source_map()
+ .lookup_source_file(span.data().lo)
+ }
+
+ pub(crate) fn span_to_first_line_string(&self, span: Span) -> String {
+ let file_lines = self.parse_sess.source_map().span_to_lines(span).ok();
+
+ match file_lines {
+ Some(fl) => fl
+ .file
+ .get_line(fl.lines[0].line_index)
+ .map_or_else(String::new, |s| s.to_string()),
+ None => String::new(),
+ }
+ }
+
+ pub(crate) fn line_of_byte_pos(&self, pos: BytePos) -> usize {
+ self.parse_sess.source_map().lookup_char_pos(pos).line
+ }
+
+ // TODO(calebcartwright): Preemptive, currently unused addition
+ // that will be used to support formatting scenarios that take original
+ // positions into account
+ /// Determines whether two byte positions are in the same source line.
+ #[allow(dead_code)]
+ pub(crate) fn byte_pos_same_line(&self, a: BytePos, b: BytePos) -> bool {
+ self.line_of_byte_pos(a) == self.line_of_byte_pos(b)
+ }
+
+ pub(crate) fn span_to_debug_info(&self, span: Span) -> String {
+ self.parse_sess.source_map().span_to_diagnostic_string(span)
+ }
+
+ pub(crate) fn inner(&self) -> &RawParseSess {
+ &self.parse_sess
+ }
+
+ pub(crate) fn snippet_provider(&self, span: Span) -> SnippetProvider {
+ let source_file = self.parse_sess.source_map().lookup_char_pos(span.lo()).file;
+ SnippetProvider::new(
+ source_file.start_pos,
+ source_file.end_pos,
+ Lrc::clone(source_file.src.as_ref().unwrap()),
+ )
+ }
+
+ pub(crate) fn get_original_snippet(&self, file_name: &FileName) -> Option<Lrc<String>> {
+ self.parse_sess
+ .source_map()
+ .get_source_file(&file_name.into())
+ .and_then(|source_file| source_file.src.clone())
+ }
+}
+
+// Methods that should be restricted within the parse module.
+impl ParseSess {
+ pub(super) fn emit_diagnostics(&self, diagnostics: Vec<Diagnostic>) {
+ for mut diagnostic in diagnostics {
+ self.parse_sess
+ .span_diagnostic
+ .emit_diagnostic(&mut diagnostic);
+ }
+ }
+
+ pub(super) fn can_reset_errors(&self) -> bool {
+ self.can_reset_errors.load(Ordering::Acquire)
+ }
+
+ pub(super) fn has_errors(&self) -> bool {
+ self.parse_sess.span_diagnostic.has_errors().is_some()
+ }
+
+ pub(super) fn reset_errors(&self) {
+ self.parse_sess.span_diagnostic.reset_err_count();
+ }
+}
+
+impl LineRangeUtils for ParseSess {
+ fn lookup_line_range(&self, span: Span) -> LineRange {
+ let snippet = self
+ .parse_sess
+ .source_map()
+ .span_to_snippet(span)
+ .unwrap_or_default();
+ let lo = self.parse_sess.source_map().lookup_line(span.lo()).unwrap();
+ let hi = self.parse_sess.source_map().lookup_line(span.hi()).unwrap();
+
+ debug_assert_eq!(
+ lo.sf.name, hi.sf.name,
+ "span crossed file boundary: lo: {:?}, hi: {:?}",
+ lo, hi
+ );
+
+ // in case the span starts with a newline, the line range is off by 1 without the
+ // adjustment below
+ let offset = 1 + if starts_with_newline(&snippet) { 1 } else { 0 };
+ // Line numbers start at 1
+ LineRange {
+ file: lo.sf.clone(),
+ lo: lo.line + offset,
+ hi: hi.line + offset,
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use rustfmt_config_proc_macro::nightly_only_test;
+
+ mod emitter {
+ use super::*;
+ use crate::config::IgnoreList;
+ use crate::utils::mk_sp;
+ use rustc_errors::MultiSpan;
+ use rustc_span::{FileName as SourceMapFileName, RealFileName};
+ use std::path::PathBuf;
+ use std::sync::atomic::AtomicU32;
+
+ struct TestEmitter {
+ num_emitted_errors: Lrc<AtomicU32>,
+ }
+
+ impl Emitter for TestEmitter {
+ fn source_map(&self) -> Option<&Lrc<SourceMap>> {
+ None
+ }
+ fn emit_diagnostic(&mut self, _db: &Diagnostic) {
+ self.num_emitted_errors.fetch_add(1, Ordering::Release);
+ }
+ fn fluent_bundle(&self) -> Option<&Lrc<rustc_errors::FluentBundle>> {
+ None
+ }
+ fn fallback_fluent_bundle(&self) -> &rustc_errors::FluentBundle {
+ panic!("test emitter attempted to translate a diagnostic");
+ }
+ }
+
+ fn build_diagnostic(level: DiagnosticLevel, span: Option<MultiSpan>) -> Diagnostic {
+ let mut diag = Diagnostic::new(level, "");
+ diag.message.clear();
+ if let Some(span) = span {
+ diag.span = span;
+ }
+ diag
+ }
+
+ fn build_emitter(
+ num_emitted_errors: Lrc<AtomicU32>,
+ can_reset: Lrc<AtomicBool>,
+ source_map: Option<Lrc<SourceMap>>,
+ ignore_list: Option<IgnoreList>,
+ ) -> SilentOnIgnoredFilesEmitter {
+ let emitter_writer = TestEmitter { num_emitted_errors };
+ let source_map =
+ source_map.unwrap_or_else(|| Lrc::new(SourceMap::new(FilePathMapping::empty())));
+ let ignore_path_set = Lrc::new(
+ IgnorePathSet::from_ignore_list(&ignore_list.unwrap_or_default()).unwrap(),
+ );
+ SilentOnIgnoredFilesEmitter {
+ has_non_ignorable_parser_errors: false,
+ source_map,
+ emitter: Box::new(emitter_writer),
+ ignore_path_set,
+ can_reset,
+ }
+ }
+
+ fn get_ignore_list(config: &str) -> IgnoreList {
+ Config::from_toml(config, Path::new("")).unwrap().ignore()
+ }
+
+ #[test]
+ fn handles_fatal_parse_error_in_ignored_file() {
+ let num_emitted_errors = Lrc::new(AtomicU32::new(0));
+ let can_reset_errors = Lrc::new(AtomicBool::new(false));
+ let ignore_list = get_ignore_list(r#"ignore = ["foo.rs"]"#);
+ let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+ let source =
+ String::from(r#"extern "system" fn jni_symbol!( funcName ) ( ... ) -> {} "#);
+ source_map.new_source_file(
+ SourceMapFileName::Real(RealFileName::LocalPath(PathBuf::from("foo.rs"))),
+ source,
+ );
+ let mut emitter = build_emitter(
+ Lrc::clone(&num_emitted_errors),
+ Lrc::clone(&can_reset_errors),
+ Some(Lrc::clone(&source_map)),
+ Some(ignore_list),
+ );
+ let span = MultiSpan::from_span(mk_sp(BytePos(0), BytePos(1)));
+ let fatal_diagnostic = build_diagnostic(DiagnosticLevel::Fatal, Some(span));
+ emitter.emit_diagnostic(&fatal_diagnostic);
+ assert_eq!(num_emitted_errors.load(Ordering::Acquire), 1);
+ assert_eq!(can_reset_errors.load(Ordering::Acquire), false);
+ }
+
+ #[nightly_only_test]
+ #[test]
+ fn handles_recoverable_parse_error_in_ignored_file() {
+ let num_emitted_errors = Lrc::new(AtomicU32::new(0));
+ let can_reset_errors = Lrc::new(AtomicBool::new(false));
+ let ignore_list = get_ignore_list(r#"ignore = ["foo.rs"]"#);
+ let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+ let source = String::from(r#"pub fn bar() { 1x; }"#);
+ source_map.new_source_file(
+ SourceMapFileName::Real(RealFileName::LocalPath(PathBuf::from("foo.rs"))),
+ source,
+ );
+ let mut emitter = build_emitter(
+ Lrc::clone(&num_emitted_errors),
+ Lrc::clone(&can_reset_errors),
+ Some(Lrc::clone(&source_map)),
+ Some(ignore_list),
+ );
+ let span = MultiSpan::from_span(mk_sp(BytePos(0), BytePos(1)));
+ let non_fatal_diagnostic = build_diagnostic(DiagnosticLevel::Warning(None), Some(span));
+ emitter.emit_diagnostic(&non_fatal_diagnostic);
+ assert_eq!(num_emitted_errors.load(Ordering::Acquire), 0);
+ assert_eq!(can_reset_errors.load(Ordering::Acquire), true);
+ }
+
+ #[nightly_only_test]
+ #[test]
+ fn handles_recoverable_parse_error_in_non_ignored_file() {
+ let num_emitted_errors = Lrc::new(AtomicU32::new(0));
+ let can_reset_errors = Lrc::new(AtomicBool::new(false));
+ let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+ let source = String::from(r#"pub fn bar() { 1x; }"#);
+ source_map.new_source_file(
+ SourceMapFileName::Real(RealFileName::LocalPath(PathBuf::from("foo.rs"))),
+ source,
+ );
+ let mut emitter = build_emitter(
+ Lrc::clone(&num_emitted_errors),
+ Lrc::clone(&can_reset_errors),
+ Some(Lrc::clone(&source_map)),
+ None,
+ );
+ let span = MultiSpan::from_span(mk_sp(BytePos(0), BytePos(1)));
+ let non_fatal_diagnostic = build_diagnostic(DiagnosticLevel::Warning(None), Some(span));
+ emitter.emit_diagnostic(&non_fatal_diagnostic);
+ assert_eq!(num_emitted_errors.load(Ordering::Acquire), 1);
+ assert_eq!(can_reset_errors.load(Ordering::Acquire), false);
+ }
+
+ #[nightly_only_test]
+ #[test]
+ fn handles_mix_of_recoverable_parse_error() {
+ let num_emitted_errors = Lrc::new(AtomicU32::new(0));
+ let can_reset_errors = Lrc::new(AtomicBool::new(false));
+ let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+ let ignore_list = get_ignore_list(r#"ignore = ["foo.rs"]"#);
+ let bar_source = String::from(r#"pub fn bar() { 1x; }"#);
+ let foo_source = String::from(r#"pub fn foo() { 1x; }"#);
+ let fatal_source =
+ String::from(r#"extern "system" fn jni_symbol!( funcName ) ( ... ) -> {} "#);
+ source_map.new_source_file(
+ SourceMapFileName::Real(RealFileName::LocalPath(PathBuf::from("bar.rs"))),
+ bar_source,
+ );
+ source_map.new_source_file(
+ SourceMapFileName::Real(RealFileName::LocalPath(PathBuf::from("foo.rs"))),
+ foo_source,
+ );
+ source_map.new_source_file(
+ SourceMapFileName::Real(RealFileName::LocalPath(PathBuf::from("fatal.rs"))),
+ fatal_source,
+ );
+ let mut emitter = build_emitter(
+ Lrc::clone(&num_emitted_errors),
+ Lrc::clone(&can_reset_errors),
+ Some(Lrc::clone(&source_map)),
+ Some(ignore_list),
+ );
+ let bar_span = MultiSpan::from_span(mk_sp(BytePos(0), BytePos(1)));
+ let foo_span = MultiSpan::from_span(mk_sp(BytePos(21), BytePos(22)));
+ let bar_diagnostic = build_diagnostic(DiagnosticLevel::Warning(None), Some(bar_span));
+ let foo_diagnostic = build_diagnostic(DiagnosticLevel::Warning(None), Some(foo_span));
+ let fatal_diagnostic = build_diagnostic(DiagnosticLevel::Fatal, None);
+ emitter.emit_diagnostic(&bar_diagnostic);
+ emitter.emit_diagnostic(&foo_diagnostic);
+ emitter.emit_diagnostic(&fatal_diagnostic);
+ assert_eq!(num_emitted_errors.load(Ordering::Acquire), 2);
+ assert_eq!(can_reset_errors.load(Ordering::Acquire), false);
+ }
+ }
+}
diff --git a/src/tools/rustfmt/src/patterns.rs b/src/tools/rustfmt/src/patterns.rs
new file mode 100644
index 000000000..9b74b35f3
--- /dev/null
+++ b/src/tools/rustfmt/src/patterns.rs
@@ -0,0 +1,535 @@
+use rustc_ast::ast::{self, BindingMode, Pat, PatField, PatKind, RangeEnd, RangeSyntax};
+use rustc_ast::ptr;
+use rustc_span::{BytePos, Span};
+
+use crate::comment::{combine_strs_with_missing_comments, FindUncommented};
+use crate::config::lists::*;
+use crate::config::Version;
+use crate::expr::{can_be_overflowed_expr, rewrite_unary_prefix, wrap_struct_field};
+use crate::lists::{
+ definitive_tactic, itemize_list, shape_for_tactic, struct_lit_formatting, struct_lit_shape,
+ struct_lit_tactic, write_list, ListFormatting, ListItem, Separator,
+};
+use crate::macros::{rewrite_macro, MacroPosition};
+use crate::overflow;
+use crate::pairs::{rewrite_pair, PairParts};
+use crate::rewrite::{Rewrite, RewriteContext};
+use crate::shape::Shape;
+use crate::source_map::SpanUtils;
+use crate::spanned::Spanned;
+use crate::types::{rewrite_path, PathContext};
+use crate::utils::{format_mutability, mk_sp, mk_sp_lo_plus_one, rewrite_ident};
+
+/// Returns `true` if the given pattern is "short".
+/// A short pattern is defined by the following grammar:
+///
+/// `[small, ntp]`:
+/// - single token
+/// - `&[single-line, ntp]`
+///
+/// `[small]`:
+/// - `[small, ntp]`
+/// - unary tuple constructor `([small, ntp])`
+/// - `&[small]`
+pub(crate) fn is_short_pattern(pat: &ast::Pat, pat_str: &str) -> bool {
+ // We also require that the pattern is reasonably 'small' with its literal width.
+ pat_str.len() <= 20 && !pat_str.contains('\n') && is_short_pattern_inner(pat)
+}
+
+fn is_short_pattern_inner(pat: &ast::Pat) -> bool {
+ match pat.kind {
+ ast::PatKind::Rest | ast::PatKind::Wild | ast::PatKind::Lit(_) => true,
+ ast::PatKind::Ident(_, _, ref pat) => pat.is_none(),
+ ast::PatKind::Struct(..)
+ | ast::PatKind::MacCall(..)
+ | ast::PatKind::Slice(..)
+ | ast::PatKind::Path(..)
+ | ast::PatKind::Range(..) => false,
+ ast::PatKind::Tuple(ref subpats) => subpats.len() <= 1,
+ ast::PatKind::TupleStruct(_, ref path, ref subpats) => {
+ path.segments.len() <= 1 && subpats.len() <= 1
+ }
+ ast::PatKind::Box(ref p) | ast::PatKind::Ref(ref p, _) | ast::PatKind::Paren(ref p) => {
+ is_short_pattern_inner(&*p)
+ }
+ PatKind::Or(ref pats) => pats.iter().all(|p| is_short_pattern_inner(p)),
+ }
+}
+
+struct RangeOperand<'a>(&'a Option<ptr::P<ast::Expr>>);
+
+impl<'a> Rewrite for RangeOperand<'a> {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ match &self.0 {
+ None => Some("".to_owned()),
+ Some(ref exp) => exp.rewrite(context, shape),
+ }
+ }
+}
+
+impl Rewrite for Pat {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ match self.kind {
+ PatKind::Or(ref pats) => {
+ let pat_strs = pats
+ .iter()
+ .map(|p| p.rewrite(context, shape))
+ .collect::<Option<Vec<_>>>()?;
+
+ let use_mixed_layout = pats
+ .iter()
+ .zip(pat_strs.iter())
+ .all(|(pat, pat_str)| is_short_pattern(pat, pat_str));
+ let items: Vec<_> = pat_strs.into_iter().map(ListItem::from_str).collect();
+ let tactic = if use_mixed_layout {
+ DefinitiveListTactic::Mixed
+ } else {
+ definitive_tactic(
+ &items,
+ ListTactic::HorizontalVertical,
+ Separator::VerticalBar,
+ shape.width,
+ )
+ };
+ let fmt = ListFormatting::new(shape, context.config)
+ .tactic(tactic)
+ .separator(" |")
+ .separator_place(context.config.binop_separator())
+ .ends_with_newline(false);
+ write_list(&items, &fmt)
+ }
+ PatKind::Box(ref pat) => rewrite_unary_prefix(context, "box ", &**pat, shape),
+ PatKind::Ident(binding_mode, ident, ref sub_pat) => {
+ let (prefix, mutability) = match binding_mode {
+ BindingMode::ByRef(mutability) => ("ref", mutability),
+ BindingMode::ByValue(mutability) => ("", mutability),
+ };
+ let mut_infix = format_mutability(mutability).trim();
+ let id_str = rewrite_ident(context, ident);
+ let sub_pat = match *sub_pat {
+ Some(ref p) => {
+ // 2 - `@ `.
+ let width = shape
+ .width
+ .checked_sub(prefix.len() + mut_infix.len() + id_str.len() + 2)?;
+ let lo = context.snippet_provider.span_after(self.span, "@");
+ combine_strs_with_missing_comments(
+ context,
+ "@",
+ &p.rewrite(context, Shape::legacy(width, shape.indent))?,
+ mk_sp(lo, p.span.lo()),
+ shape,
+ true,
+ )?
+ }
+ None => "".to_owned(),
+ };
+
+ // combine prefix and mut
+ let (first_lo, first) = if !prefix.is_empty() && !mut_infix.is_empty() {
+ let hi = context.snippet_provider.span_before(self.span, "mut");
+ let lo = context.snippet_provider.span_after(self.span, "ref");
+ (
+ context.snippet_provider.span_after(self.span, "mut"),
+ combine_strs_with_missing_comments(
+ context,
+ prefix,
+ mut_infix,
+ mk_sp(lo, hi),
+ shape,
+ true,
+ )?,
+ )
+ } else if !prefix.is_empty() {
+ (
+ context.snippet_provider.span_after(self.span, "ref"),
+ prefix.to_owned(),
+ )
+ } else if !mut_infix.is_empty() {
+ (
+ context.snippet_provider.span_after(self.span, "mut"),
+ mut_infix.to_owned(),
+ )
+ } else {
+ (self.span.lo(), "".to_owned())
+ };
+
+ let next = if !sub_pat.is_empty() {
+ let hi = context.snippet_provider.span_before(self.span, "@");
+ combine_strs_with_missing_comments(
+ context,
+ id_str,
+ &sub_pat,
+ mk_sp(ident.span.hi(), hi),
+ shape,
+ true,
+ )?
+ } else {
+ id_str.to_owned()
+ };
+
+ combine_strs_with_missing_comments(
+ context,
+ &first,
+ &next,
+ mk_sp(first_lo, ident.span.lo()),
+ shape,
+ true,
+ )
+ }
+ PatKind::Wild => {
+ if 1 <= shape.width {
+ Some("_".to_owned())
+ } else {
+ None
+ }
+ }
+ PatKind::Rest => {
+ if 1 <= shape.width {
+ Some("..".to_owned())
+ } else {
+ None
+ }
+ }
+ PatKind::Range(ref lhs, ref rhs, ref end_kind) => {
+ let infix = match end_kind.node {
+ RangeEnd::Included(RangeSyntax::DotDotDot) => "...",
+ RangeEnd::Included(RangeSyntax::DotDotEq) => "..=",
+ RangeEnd::Excluded => "..",
+ };
+ let infix = if context.config.spaces_around_ranges() {
+ let lhs_spacing = match lhs {
+ None => "",
+ Some(_) => " ",
+ };
+ let rhs_spacing = match rhs {
+ None => "",
+ Some(_) => " ",
+ };
+ format!("{}{}{}", lhs_spacing, infix, rhs_spacing)
+ } else {
+ infix.to_owned()
+ };
+ rewrite_pair(
+ &RangeOperand(lhs),
+ &RangeOperand(rhs),
+ PairParts::infix(&infix),
+ context,
+ shape,
+ SeparatorPlace::Front,
+ )
+ }
+ PatKind::Ref(ref pat, mutability) => {
+ let prefix = format!("&{}", format_mutability(mutability));
+ rewrite_unary_prefix(context, &prefix, &**pat, shape)
+ }
+ PatKind::Tuple(ref items) => rewrite_tuple_pat(items, None, self.span, context, shape),
+ PatKind::Path(ref q_self, ref path) => {
+ rewrite_path(context, PathContext::Expr, q_self.as_ref(), path, shape)
+ }
+ PatKind::TupleStruct(ref q_self, ref path, ref pat_vec) => {
+ let path_str =
+ rewrite_path(context, PathContext::Expr, q_self.as_ref(), path, shape)?;
+ rewrite_tuple_pat(pat_vec, Some(path_str), self.span, context, shape)
+ }
+ PatKind::Lit(ref expr) => expr.rewrite(context, shape),
+ PatKind::Slice(ref slice_pat) if context.config.version() == Version::One => {
+ let rw: Vec<String> = slice_pat
+ .iter()
+ .map(|p| {
+ if let Some(rw) = p.rewrite(context, shape) {
+ rw
+ } else {
+ context.snippet(p.span).to_string()
+ }
+ })
+ .collect();
+ Some(format!("[{}]", rw.join(", ")))
+ }
+ PatKind::Slice(ref slice_pat) => overflow::rewrite_with_square_brackets(
+ context,
+ "",
+ slice_pat.iter(),
+ shape,
+ self.span,
+ None,
+ None,
+ ),
+ PatKind::Struct(ref qself, ref path, ref fields, ellipsis) => {
+ rewrite_struct_pat(qself, path, fields, ellipsis, self.span, context, shape)
+ }
+ PatKind::MacCall(ref mac) => {
+ rewrite_macro(mac, None, context, shape, MacroPosition::Pat)
+ }
+ PatKind::Paren(ref pat) => pat
+ .rewrite(context, shape.offset_left(1)?.sub_width(1)?)
+ .map(|inner_pat| format!("({})", inner_pat)),
+ }
+ }
+}
+
+fn rewrite_struct_pat(
+ qself: &Option<ast::QSelf>,
+ path: &ast::Path,
+ fields: &[ast::PatField],
+ ellipsis: bool,
+ span: Span,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+) -> Option<String> {
+ // 2 = ` {`
+ let path_shape = shape.sub_width(2)?;
+ let path_str = rewrite_path(context, PathContext::Expr, qself.as_ref(), path, path_shape)?;
+
+ if fields.is_empty() && !ellipsis {
+ return Some(format!("{} {{}}", path_str));
+ }
+
+ let (ellipsis_str, terminator) = if ellipsis { (", ..", "..") } else { ("", "}") };
+
+ // 3 = ` { `, 2 = ` }`.
+ let (h_shape, v_shape) =
+ struct_lit_shape(shape, context, path_str.len() + 3, ellipsis_str.len() + 2)?;
+
+ let items = itemize_list(
+ context.snippet_provider,
+ fields.iter(),
+ terminator,
+ ",",
+ |f| {
+ if f.attrs.is_empty() {
+ f.span.lo()
+ } else {
+ f.attrs.first().unwrap().span.lo()
+ }
+ },
+ |f| f.span.hi(),
+ |f| f.rewrite(context, v_shape),
+ context.snippet_provider.span_after(span, "{"),
+ span.hi(),
+ false,
+ );
+ let item_vec = items.collect::<Vec<_>>();
+
+ let tactic = struct_lit_tactic(h_shape, context, &item_vec);
+ let nested_shape = shape_for_tactic(tactic, h_shape, v_shape);
+ let fmt = struct_lit_formatting(nested_shape, tactic, context, false);
+
+ let mut fields_str = write_list(&item_vec, &fmt)?;
+ let one_line_width = h_shape.map_or(0, |shape| shape.width);
+
+ let has_trailing_comma = fmt.needs_trailing_separator();
+
+ if ellipsis {
+ if fields_str.contains('\n') || fields_str.len() > one_line_width {
+ // Add a missing trailing comma.
+ if !has_trailing_comma {
+ fields_str.push(',');
+ }
+ fields_str.push('\n');
+ fields_str.push_str(&nested_shape.indent.to_string(context.config));
+ } else {
+ if !fields_str.is_empty() {
+ // there are preceding struct fields being matched on
+ if has_trailing_comma {
+ fields_str.push(' ');
+ } else {
+ fields_str.push_str(", ");
+ }
+ }
+ }
+ fields_str.push_str("..");
+ }
+
+ // ast::Pat doesn't have attrs so use &[]
+ let fields_str = wrap_struct_field(context, &[], &fields_str, shape, v_shape, one_line_width)?;
+ Some(format!("{} {{{}}}", path_str, fields_str))
+}
+
+impl Rewrite for PatField {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ let hi_pos = if let Some(last) = self.attrs.last() {
+ last.span.hi()
+ } else {
+ self.pat.span.lo()
+ };
+
+ let attrs_str = if self.attrs.is_empty() {
+ String::from("")
+ } else {
+ self.attrs.rewrite(context, shape)?
+ };
+
+ let pat_str = self.pat.rewrite(context, shape)?;
+ if self.is_shorthand {
+ combine_strs_with_missing_comments(
+ context,
+ &attrs_str,
+ &pat_str,
+ mk_sp(hi_pos, self.pat.span.lo()),
+ shape,
+ false,
+ )
+ } else {
+ let nested_shape = shape.block_indent(context.config.tab_spaces());
+ let id_str = rewrite_ident(context, self.ident);
+ let one_line_width = id_str.len() + 2 + pat_str.len();
+ let pat_and_id_str = if one_line_width <= shape.width {
+ format!("{}: {}", id_str, pat_str)
+ } else {
+ format!(
+ "{}:\n{}{}",
+ id_str,
+ nested_shape.indent.to_string(context.config),
+ self.pat.rewrite(context, nested_shape)?
+ )
+ };
+ combine_strs_with_missing_comments(
+ context,
+ &attrs_str,
+ &pat_and_id_str,
+ mk_sp(hi_pos, self.pat.span.lo()),
+ nested_shape,
+ false,
+ )
+ }
+ }
+}
+
+#[derive(Debug)]
+pub(crate) enum TuplePatField<'a> {
+ Pat(&'a ptr::P<ast::Pat>),
+ Dotdot(Span),
+}
+
+impl<'a> Rewrite for TuplePatField<'a> {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ match *self {
+ TuplePatField::Pat(p) => p.rewrite(context, shape),
+ TuplePatField::Dotdot(_) => Some("..".to_string()),
+ }
+ }
+}
+
+impl<'a> Spanned for TuplePatField<'a> {
+ fn span(&self) -> Span {
+ match *self {
+ TuplePatField::Pat(p) => p.span(),
+ TuplePatField::Dotdot(span) => span,
+ }
+ }
+}
+
+impl<'a> TuplePatField<'a> {
+ fn is_dotdot(&self) -> bool {
+ match self {
+ TuplePatField::Pat(pat) => matches!(pat.kind, ast::PatKind::Rest),
+ TuplePatField::Dotdot(_) => true,
+ }
+ }
+}
+
+pub(crate) fn can_be_overflowed_pat(
+ context: &RewriteContext<'_>,
+ pat: &TuplePatField<'_>,
+ len: usize,
+) -> bool {
+ match *pat {
+ TuplePatField::Pat(pat) => match pat.kind {
+ ast::PatKind::Path(..)
+ | ast::PatKind::Tuple(..)
+ | ast::PatKind::Struct(..)
+ | ast::PatKind::TupleStruct(..) => context.use_block_indent() && len == 1,
+ ast::PatKind::Ref(ref p, _) | ast::PatKind::Box(ref p) => {
+ can_be_overflowed_pat(context, &TuplePatField::Pat(p), len)
+ }
+ ast::PatKind::Lit(ref expr) => can_be_overflowed_expr(context, expr, len),
+ _ => false,
+ },
+ TuplePatField::Dotdot(..) => false,
+ }
+}
+
+fn rewrite_tuple_pat(
+ pats: &[ptr::P<ast::Pat>],
+ path_str: Option<String>,
+ span: Span,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+) -> Option<String> {
+ if pats.is_empty() {
+ return Some(format!("{}()", path_str.unwrap_or_default()));
+ }
+ let mut pat_vec: Vec<_> = pats.iter().map(TuplePatField::Pat).collect();
+
+ let wildcard_suffix_len = count_wildcard_suffix_len(context, &pat_vec, span, shape);
+ let (pat_vec, span) = if context.config.condense_wildcard_suffixes() && wildcard_suffix_len >= 2
+ {
+ let new_item_count = 1 + pat_vec.len() - wildcard_suffix_len;
+ let sp = pat_vec[new_item_count - 1].span();
+ let snippet = context.snippet(sp);
+ let lo = sp.lo() + BytePos(snippet.find_uncommented("_").unwrap() as u32);
+ pat_vec[new_item_count - 1] = TuplePatField::Dotdot(mk_sp_lo_plus_one(lo));
+ (
+ &pat_vec[..new_item_count],
+ mk_sp(span.lo(), lo + BytePos(1)),
+ )
+ } else {
+ (&pat_vec[..], span)
+ };
+
+ let is_last_pat_dotdot = pat_vec.last().map_or(false, |p| p.is_dotdot());
+ let add_comma = path_str.is_none() && pat_vec.len() == 1 && !is_last_pat_dotdot;
+ let path_str = path_str.unwrap_or_default();
+
+ overflow::rewrite_with_parens(
+ context,
+ &path_str,
+ pat_vec.iter(),
+ shape,
+ span,
+ context.config.max_width(),
+ if add_comma {
+ Some(SeparatorTactic::Always)
+ } else {
+ None
+ },
+ )
+}
+
+fn count_wildcard_suffix_len(
+ context: &RewriteContext<'_>,
+ patterns: &[TuplePatField<'_>],
+ span: Span,
+ shape: Shape,
+) -> usize {
+ let mut suffix_len = 0;
+
+ let items: Vec<_> = itemize_list(
+ context.snippet_provider,
+ patterns.iter(),
+ ")",
+ ",",
+ |item| item.span().lo(),
+ |item| item.span().hi(),
+ |item| item.rewrite(context, shape),
+ context.snippet_provider.span_after(span, "("),
+ span.hi() - BytePos(1),
+ false,
+ )
+ .collect();
+
+ for item in items
+ .iter()
+ .rev()
+ .take_while(|i| matches!(i.item, Some(ref internal_string) if internal_string == "_"))
+ {
+ suffix_len += 1;
+
+ if item.has_comment() {
+ break;
+ }
+ }
+
+ suffix_len
+}
diff --git a/src/tools/rustfmt/src/release_channel.rs b/src/tools/rustfmt/src/release_channel.rs
new file mode 100644
index 000000000..948247b3c
--- /dev/null
+++ b/src/tools/rustfmt/src/release_channel.rs
@@ -0,0 +1,16 @@
+/// Checks if we're in a nightly build.
+///
+/// The environment variable `CFG_RELEASE_CHANNEL` is set during the rustc bootstrap
+/// to "stable", "beta", or "nightly" depending on what toolchain is being built.
+/// If we are being built as part of the stable or beta toolchains, we want
+/// to disable unstable configuration options.
+///
+/// If we're being built by cargo (e.g., `cargo +nightly install rustfmt-nightly`),
+/// `CFG_RELEASE_CHANNEL` is not set. As we only support being built against the
+/// nightly compiler when installed from crates.io, default to nightly mode.
+#[macro_export]
+macro_rules! is_nightly_channel {
+ () => {
+ option_env!("CFG_RELEASE_CHANNEL").map_or(true, |c| c == "nightly" || c == "dev")
+ };
+}
diff --git a/src/tools/rustfmt/src/reorder.rs b/src/tools/rustfmt/src/reorder.rs
new file mode 100644
index 000000000..9e4a668aa
--- /dev/null
+++ b/src/tools/rustfmt/src/reorder.rs
@@ -0,0 +1,330 @@
+//! Reorder items.
+//!
+//! `mod`, `extern crate` and `use` declarations are reordered in alphabetical
+//! order. Trait items are reordered in pre-determined order (associated types
+//! and constants comes before methods).
+
+// FIXME(#2455): Reorder trait items.
+
+use std::cmp::{Ord, Ordering};
+
+use rustc_ast::ast;
+use rustc_span::{symbol::sym, Span};
+
+use crate::config::{Config, GroupImportsTactic};
+use crate::imports::{normalize_use_trees_with_granularity, UseSegmentKind, UseTree};
+use crate::items::{is_mod_decl, rewrite_extern_crate, rewrite_mod};
+use crate::lists::{itemize_list, write_list, ListFormatting, ListItem};
+use crate::rewrite::RewriteContext;
+use crate::shape::Shape;
+use crate::source_map::LineRangeUtils;
+use crate::spanned::Spanned;
+use crate::utils::{contains_skip, mk_sp};
+use crate::visitor::FmtVisitor;
+
+/// Choose the ordering between the given two items.
+fn compare_items(a: &ast::Item, b: &ast::Item) -> Ordering {
+ match (&a.kind, &b.kind) {
+ (&ast::ItemKind::Mod(..), &ast::ItemKind::Mod(..)) => {
+ a.ident.as_str().cmp(b.ident.as_str())
+ }
+ (&ast::ItemKind::ExternCrate(ref a_name), &ast::ItemKind::ExternCrate(ref b_name)) => {
+ // `extern crate foo as bar;`
+ // ^^^ Comparing this.
+ let a_orig_name = a_name.unwrap_or(a.ident.name);
+ let b_orig_name = b_name.unwrap_or(b.ident.name);
+ let result = a_orig_name.as_str().cmp(b_orig_name.as_str());
+ if result != Ordering::Equal {
+ return result;
+ }
+
+ // `extern crate foo as bar;`
+ // ^^^ Comparing this.
+ match (a_name, b_name) {
+ (Some(..), None) => Ordering::Greater,
+ (None, Some(..)) => Ordering::Less,
+ (None, None) => Ordering::Equal,
+ (Some(..), Some(..)) => a.ident.as_str().cmp(b.ident.as_str()),
+ }
+ }
+ _ => unreachable!(),
+ }
+}
+
+fn wrap_reorderable_items(
+ context: &RewriteContext<'_>,
+ list_items: &[ListItem],
+ shape: Shape,
+) -> Option<String> {
+ let fmt = ListFormatting::new(shape, context.config)
+ .separator("")
+ .align_comments(false);
+ write_list(list_items, &fmt)
+}
+
+fn rewrite_reorderable_item(
+ context: &RewriteContext<'_>,
+ item: &ast::Item,
+ shape: Shape,
+) -> Option<String> {
+ match item.kind {
+ ast::ItemKind::ExternCrate(..) => rewrite_extern_crate(context, item, shape),
+ ast::ItemKind::Mod(..) => rewrite_mod(context, item, shape),
+ _ => None,
+ }
+}
+
+/// Rewrite a list of items with reordering and/or regrouping. Every item
+/// in `items` must have the same `ast::ItemKind`. Whether reordering, regrouping,
+/// or both are done is determined from the `context`.
+fn rewrite_reorderable_or_regroupable_items(
+ context: &RewriteContext<'_>,
+ reorderable_items: &[&ast::Item],
+ shape: Shape,
+ span: Span,
+) -> Option<String> {
+ match reorderable_items[0].kind {
+ // FIXME: Remove duplicated code.
+ ast::ItemKind::Use(..) => {
+ let mut normalized_items: Vec<_> = reorderable_items
+ .iter()
+ .filter_map(|item| UseTree::from_ast_with_normalization(context, item))
+ .collect();
+ let cloned = normalized_items.clone();
+ // Add comments before merging.
+ let list_items = itemize_list(
+ context.snippet_provider,
+ cloned.iter(),
+ "",
+ ";",
+ |item| item.span().lo(),
+ |item| item.span().hi(),
+ |_item| Some("".to_owned()),
+ span.lo(),
+ span.hi(),
+ false,
+ );
+ for (item, list_item) in normalized_items.iter_mut().zip(list_items) {
+ item.list_item = Some(list_item.clone());
+ }
+ normalized_items = normalize_use_trees_with_granularity(
+ normalized_items,
+ context.config.imports_granularity(),
+ );
+
+ let mut regrouped_items = match context.config.group_imports() {
+ GroupImportsTactic::Preserve | GroupImportsTactic::One => {
+ vec![normalized_items]
+ }
+ GroupImportsTactic::StdExternalCrate => group_imports(normalized_items),
+ };
+
+ if context.config.reorder_imports() {
+ regrouped_items.iter_mut().for_each(|items| items.sort())
+ }
+
+ // 4 = "use ", 1 = ";"
+ let nested_shape = shape.offset_left(4)?.sub_width(1)?;
+ let item_vec: Vec<_> = regrouped_items
+ .into_iter()
+ .filter(|use_group| !use_group.is_empty())
+ .map(|use_group| {
+ let item_vec: Vec<_> = use_group
+ .into_iter()
+ .map(|use_tree| ListItem {
+ item: use_tree.rewrite_top_level(context, nested_shape),
+ ..use_tree.list_item.unwrap_or_else(ListItem::empty)
+ })
+ .collect();
+ wrap_reorderable_items(context, &item_vec, nested_shape)
+ })
+ .collect::<Option<Vec<_>>>()?;
+
+ let join_string = format!("\n\n{}", shape.indent.to_string(context.config));
+ Some(item_vec.join(&join_string))
+ }
+ _ => {
+ let list_items = itemize_list(
+ context.snippet_provider,
+ reorderable_items.iter(),
+ "",
+ ";",
+ |item| item.span().lo(),
+ |item| item.span().hi(),
+ |item| rewrite_reorderable_item(context, item, shape),
+ span.lo(),
+ span.hi(),
+ false,
+ );
+
+ let mut item_pair_vec: Vec<_> = list_items.zip(reorderable_items.iter()).collect();
+ item_pair_vec.sort_by(|a, b| compare_items(a.1, b.1));
+ let item_vec: Vec<_> = item_pair_vec.into_iter().map(|pair| pair.0).collect();
+
+ wrap_reorderable_items(context, &item_vec, shape)
+ }
+ }
+}
+
+fn contains_macro_use_attr(item: &ast::Item) -> bool {
+ crate::attr::contains_name(&item.attrs, sym::macro_use)
+}
+
+/// Divides imports into three groups, corresponding to standard, external
+/// and local imports. Sorts each subgroup.
+fn group_imports(uts: Vec<UseTree>) -> Vec<Vec<UseTree>> {
+ let mut std_imports = Vec::new();
+ let mut external_imports = Vec::new();
+ let mut local_imports = Vec::new();
+
+ for ut in uts.into_iter() {
+ if ut.path.is_empty() {
+ external_imports.push(ut);
+ continue;
+ }
+ match &ut.path[0].kind {
+ UseSegmentKind::Ident(id, _) => match id.as_ref() {
+ "std" | "alloc" | "core" => std_imports.push(ut),
+ _ => external_imports.push(ut),
+ },
+ UseSegmentKind::Slf(_) | UseSegmentKind::Super(_) | UseSegmentKind::Crate(_) => {
+ local_imports.push(ut)
+ }
+ // These are probably illegal here
+ UseSegmentKind::Glob | UseSegmentKind::List(_) => external_imports.push(ut),
+ }
+ }
+
+ vec![std_imports, external_imports, local_imports]
+}
+
+/// A simplified version of `ast::ItemKind`.
+#[derive(Debug, PartialEq, Eq, Copy, Clone)]
+enum ReorderableItemKind {
+ ExternCrate,
+ Mod,
+ Use,
+ /// An item that cannot be reordered. Either has an unreorderable item kind
+ /// or an `macro_use` attribute.
+ Other,
+}
+
+impl ReorderableItemKind {
+ fn from(item: &ast::Item) -> Self {
+ match item.kind {
+ _ if contains_macro_use_attr(item) | contains_skip(&item.attrs) => {
+ ReorderableItemKind::Other
+ }
+ ast::ItemKind::ExternCrate(..) => ReorderableItemKind::ExternCrate,
+ ast::ItemKind::Mod(..) if is_mod_decl(item) => ReorderableItemKind::Mod,
+ ast::ItemKind::Use(..) => ReorderableItemKind::Use,
+ _ => ReorderableItemKind::Other,
+ }
+ }
+
+ fn is_same_item_kind(self, item: &ast::Item) -> bool {
+ ReorderableItemKind::from(item) == self
+ }
+
+ fn is_reorderable(self, config: &Config) -> bool {
+ match self {
+ ReorderableItemKind::ExternCrate => config.reorder_imports(),
+ ReorderableItemKind::Mod => config.reorder_modules(),
+ ReorderableItemKind::Use => config.reorder_imports(),
+ ReorderableItemKind::Other => false,
+ }
+ }
+
+ fn is_regroupable(self, config: &Config) -> bool {
+ match self {
+ ReorderableItemKind::ExternCrate
+ | ReorderableItemKind::Mod
+ | ReorderableItemKind::Other => false,
+ ReorderableItemKind::Use => config.group_imports() != GroupImportsTactic::Preserve,
+ }
+ }
+
+ fn in_group(self, config: &Config) -> bool {
+ match self {
+ ReorderableItemKind::ExternCrate | ReorderableItemKind::Mod => true,
+ ReorderableItemKind::Use => config.group_imports() == GroupImportsTactic::Preserve,
+ ReorderableItemKind::Other => false,
+ }
+ }
+}
+
+impl<'b, 'a: 'b> FmtVisitor<'a> {
+ /// Format items with the same item kind and reorder them, regroup them, or
+ /// both. If `in_group` is `true`, then the items separated by an empty line
+ /// will not be reordered together.
+ fn walk_reorderable_or_regroupable_items(
+ &mut self,
+ items: &[&ast::Item],
+ item_kind: ReorderableItemKind,
+ in_group: bool,
+ ) -> usize {
+ let mut last = self.parse_sess.lookup_line_range(items[0].span());
+ let item_length = items
+ .iter()
+ .take_while(|ppi| {
+ item_kind.is_same_item_kind(&***ppi)
+ && (!in_group || {
+ let current = self.parse_sess.lookup_line_range(ppi.span());
+ let in_same_group = current.lo < last.hi + 2;
+ last = current;
+ in_same_group
+ })
+ })
+ .count();
+ let items = &items[..item_length];
+
+ let at_least_one_in_file_lines = items
+ .iter()
+ .any(|item| !out_of_file_lines_range!(self, item.span));
+
+ if at_least_one_in_file_lines && !items.is_empty() {
+ let lo = items.first().unwrap().span().lo();
+ let hi = items.last().unwrap().span().hi();
+ let span = mk_sp(lo, hi);
+ let rw = rewrite_reorderable_or_regroupable_items(
+ &self.get_context(),
+ items,
+ self.shape(),
+ span,
+ );
+ self.push_rewrite(span, rw);
+ } else {
+ for item in items {
+ self.push_rewrite(item.span, None);
+ }
+ }
+
+ item_length
+ }
+
+ /// Visits and format the given items. Items are reordered If they are
+ /// consecutive and reorderable.
+ pub(crate) fn visit_items_with_reordering(&mut self, mut items: &[&ast::Item]) {
+ while !items.is_empty() {
+ // If the next item is a `use`, `extern crate` or `mod`, then extract it and any
+ // subsequent items that have the same item kind to be reordered within
+ // `walk_reorderable_items`. Otherwise, just format the next item for output.
+ let item_kind = ReorderableItemKind::from(items[0]);
+ if item_kind.is_reorderable(self.config) || item_kind.is_regroupable(self.config) {
+ let visited_items_num = self.walk_reorderable_or_regroupable_items(
+ items,
+ item_kind,
+ item_kind.in_group(self.config),
+ );
+ let (_, rest) = items.split_at(visited_items_num);
+ items = rest;
+ } else {
+ // Reaching here means items were not reordered. There must be at least
+ // one item left in `items`, so calling `unwrap()` here is safe.
+ let (item, rest) = items.split_first().unwrap();
+ self.visit_item(item);
+ items = rest;
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/src/rewrite.rs b/src/tools/rustfmt/src/rewrite.rs
new file mode 100644
index 000000000..4a3bd129d
--- /dev/null
+++ b/src/tools/rustfmt/src/rewrite.rs
@@ -0,0 +1,98 @@
+// A generic trait to abstract the rewriting of an element (of the AST).
+
+use std::cell::{Cell, RefCell};
+use std::rc::Rc;
+
+use rustc_ast::ptr;
+use rustc_span::Span;
+
+use crate::config::{Config, IndentStyle};
+use crate::parse::session::ParseSess;
+use crate::shape::Shape;
+use crate::skip::SkipContext;
+use crate::visitor::SnippetProvider;
+use crate::FormatReport;
+
+pub(crate) trait Rewrite {
+ /// Rewrite self into shape.
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String>;
+}
+
+impl<T: Rewrite> Rewrite for ptr::P<T> {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ (**self).rewrite(context, shape)
+ }
+}
+
+#[derive(Clone)]
+pub(crate) struct RewriteContext<'a> {
+ pub(crate) parse_sess: &'a ParseSess,
+ pub(crate) config: &'a Config,
+ pub(crate) inside_macro: Rc<Cell<bool>>,
+ // Force block indent style even if we are using visual indent style.
+ pub(crate) use_block: Cell<bool>,
+ // When `is_if_else_block` is true, unindent the comment on top
+ // of the `else` or `else if`.
+ pub(crate) is_if_else_block: Cell<bool>,
+ // When rewriting chain, veto going multi line except the last element
+ pub(crate) force_one_line_chain: Cell<bool>,
+ pub(crate) snippet_provider: &'a SnippetProvider,
+ // Used for `format_snippet`
+ pub(crate) macro_rewrite_failure: Cell<bool>,
+ pub(crate) is_macro_def: bool,
+ pub(crate) report: FormatReport,
+ pub(crate) skip_context: SkipContext,
+ pub(crate) skipped_range: Rc<RefCell<Vec<(usize, usize)>>>,
+}
+
+pub(crate) struct InsideMacroGuard {
+ is_nested_macro_context: bool,
+ inside_macro_ref: Rc<Cell<bool>>,
+}
+
+impl InsideMacroGuard {
+ pub(crate) fn is_nested(&self) -> bool {
+ self.is_nested_macro_context
+ }
+}
+
+impl Drop for InsideMacroGuard {
+ fn drop(&mut self) {
+ self.inside_macro_ref.replace(self.is_nested_macro_context);
+ }
+}
+
+impl<'a> RewriteContext<'a> {
+ pub(crate) fn snippet(&self, span: Span) -> &str {
+ self.snippet_provider.span_to_snippet(span).unwrap()
+ }
+
+ /// Returns `true` if we should use block indent style for rewriting function call.
+ pub(crate) fn use_block_indent(&self) -> bool {
+ self.config.indent_style() == IndentStyle::Block || self.use_block.get()
+ }
+
+ pub(crate) fn budget(&self, used_width: usize) -> usize {
+ self.config.max_width().saturating_sub(used_width)
+ }
+
+ pub(crate) fn inside_macro(&self) -> bool {
+ self.inside_macro.get()
+ }
+
+ pub(crate) fn enter_macro(&self) -> InsideMacroGuard {
+ let is_nested_macro_context = self.inside_macro.replace(true);
+ InsideMacroGuard {
+ is_nested_macro_context,
+ inside_macro_ref: self.inside_macro.clone(),
+ }
+ }
+
+ pub(crate) fn leave_macro(&self) {
+ self.inside_macro.replace(false);
+ }
+
+ pub(crate) fn is_if_else_block(&self) -> bool {
+ self.is_if_else_block.get()
+ }
+}
diff --git a/src/tools/rustfmt/src/rustfmt_diff.rs b/src/tools/rustfmt/src/rustfmt_diff.rs
new file mode 100644
index 000000000..1724a0f87
--- /dev/null
+++ b/src/tools/rustfmt/src/rustfmt_diff.rs
@@ -0,0 +1,400 @@
+use std::collections::VecDeque;
+use std::fmt;
+use std::io;
+use std::io::Write;
+
+use crate::config::{Color, Config, Verbosity};
+
+#[derive(Debug, PartialEq)]
+pub(crate) enum DiffLine {
+ Context(String),
+ Expected(String),
+ Resulting(String),
+}
+
+#[derive(Debug, PartialEq)]
+pub(crate) struct Mismatch {
+ /// The line number in the formatted version.
+ pub(crate) line_number: u32,
+ /// The line number in the original version.
+ pub(crate) line_number_orig: u32,
+ /// The set of lines (context and old/new) in the mismatch.
+ pub(crate) lines: Vec<DiffLine>,
+}
+
+impl Mismatch {
+ fn new(line_number: u32, line_number_orig: u32) -> Mismatch {
+ Mismatch {
+ line_number,
+ line_number_orig,
+ lines: Vec::new(),
+ }
+ }
+}
+
+/// A single span of changed lines, with 0 or more removed lines
+/// and a vector of 0 or more inserted lines.
+#[derive(Debug, PartialEq, Eq)]
+pub struct ModifiedChunk {
+ /// The first to be removed from the original text
+ pub line_number_orig: u32,
+ /// The number of lines which have been replaced
+ pub lines_removed: u32,
+ /// The new lines
+ pub lines: Vec<String>,
+}
+
+/// Set of changed sections of a file.
+#[derive(Debug, PartialEq, Eq)]
+pub struct ModifiedLines {
+ /// The set of changed chunks.
+ pub chunks: Vec<ModifiedChunk>,
+}
+
+impl From<Vec<Mismatch>> for ModifiedLines {
+ fn from(mismatches: Vec<Mismatch>) -> ModifiedLines {
+ let chunks = mismatches.into_iter().map(|mismatch| {
+ let lines = mismatch.lines.iter();
+ let num_removed = lines
+ .filter(|line| matches!(line, DiffLine::Resulting(_)))
+ .count();
+
+ let new_lines = mismatch.lines.into_iter().filter_map(|line| match line {
+ DiffLine::Context(_) | DiffLine::Resulting(_) => None,
+ DiffLine::Expected(str) => Some(str),
+ });
+
+ ModifiedChunk {
+ line_number_orig: mismatch.line_number_orig,
+ lines_removed: num_removed as u32,
+ lines: new_lines.collect(),
+ }
+ });
+
+ ModifiedLines {
+ chunks: chunks.collect(),
+ }
+ }
+}
+
+// Converts a `Mismatch` into a serialized form, which just includes
+// enough information to modify the original file.
+// Each section starts with a line with three integers, space separated:
+// lineno num_removed num_added
+// followed by (`num_added`) lines of added text. The line numbers are
+// relative to the original file.
+impl fmt::Display for ModifiedLines {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ for chunk in &self.chunks {
+ writeln!(
+ f,
+ "{} {} {}",
+ chunk.line_number_orig,
+ chunk.lines_removed,
+ chunk.lines.len()
+ )?;
+
+ for line in &chunk.lines {
+ writeln!(f, "{}", line)?;
+ }
+ }
+
+ Ok(())
+ }
+}
+
+// Allows to convert `Display`ed `ModifiedLines` back to the structural data.
+impl std::str::FromStr for ModifiedLines {
+ type Err = ();
+
+ fn from_str(s: &str) -> Result<ModifiedLines, ()> {
+ let mut chunks = vec![];
+
+ let mut lines = s.lines();
+ while let Some(header) = lines.next() {
+ let mut header = header.split_whitespace();
+ let (orig, rem, new_lines) = match (header.next(), header.next(), header.next()) {
+ (Some(orig), Some(removed), Some(added)) => (orig, removed, added),
+ _ => return Err(()),
+ };
+ let (orig, rem, new_lines): (u32, u32, usize) =
+ match (orig.parse(), rem.parse(), new_lines.parse()) {
+ (Ok(a), Ok(b), Ok(c)) => (a, b, c),
+ _ => return Err(()),
+ };
+ let lines = lines.by_ref().take(new_lines);
+ let lines: Vec<_> = lines.map(ToOwned::to_owned).collect();
+ if lines.len() != new_lines {
+ return Err(());
+ }
+
+ chunks.push(ModifiedChunk {
+ line_number_orig: orig,
+ lines_removed: rem,
+ lines,
+ });
+ }
+
+ Ok(ModifiedLines { chunks })
+ }
+}
+
+// This struct handles writing output to stdout and abstracts away the logic
+// of printing in color, if it's possible in the executing environment.
+pub(crate) struct OutputWriter {
+ terminal: Option<Box<dyn term::Terminal<Output = io::Stdout>>>,
+}
+
+impl OutputWriter {
+ // Create a new OutputWriter instance based on the caller's preference
+ // for colorized output and the capabilities of the terminal.
+ pub(crate) fn new(color: Color) -> Self {
+ if let Some(t) = term::stdout() {
+ if color.use_colored_tty() && t.supports_color() {
+ return OutputWriter { terminal: Some(t) };
+ }
+ }
+ OutputWriter { terminal: None }
+ }
+
+ // Write output in the optionally specified color. The output is written
+ // in the specified color if this OutputWriter instance contains a
+ // Terminal in its `terminal` field.
+ pub(crate) fn writeln(&mut self, msg: &str, color: Option<term::color::Color>) {
+ match &mut self.terminal {
+ Some(ref mut t) => {
+ if let Some(color) = color {
+ t.fg(color).unwrap();
+ }
+ writeln!(t, "{}", msg).unwrap();
+ if color.is_some() {
+ t.reset().unwrap();
+ }
+ }
+ None => println!("{}", msg),
+ }
+ }
+}
+
+// Produces a diff between the expected output and actual output of rustfmt.
+pub(crate) fn make_diff(expected: &str, actual: &str, context_size: usize) -> Vec<Mismatch> {
+ let mut line_number = 1;
+ let mut line_number_orig = 1;
+ let mut context_queue: VecDeque<&str> = VecDeque::with_capacity(context_size);
+ let mut lines_since_mismatch = context_size + 1;
+ let mut results = Vec::new();
+ let mut mismatch = Mismatch::new(0, 0);
+
+ for result in diff::lines(expected, actual) {
+ match result {
+ diff::Result::Left(str) => {
+ if lines_since_mismatch >= context_size && lines_since_mismatch > 0 {
+ results.push(mismatch);
+ mismatch = Mismatch::new(
+ line_number - context_queue.len() as u32,
+ line_number_orig - context_queue.len() as u32,
+ );
+ }
+
+ while let Some(line) = context_queue.pop_front() {
+ mismatch.lines.push(DiffLine::Context(line.to_owned()));
+ }
+
+ mismatch.lines.push(DiffLine::Resulting(str.to_owned()));
+ line_number_orig += 1;
+ lines_since_mismatch = 0;
+ }
+ diff::Result::Right(str) => {
+ if lines_since_mismatch >= context_size && lines_since_mismatch > 0 {
+ results.push(mismatch);
+ mismatch = Mismatch::new(
+ line_number - context_queue.len() as u32,
+ line_number_orig - context_queue.len() as u32,
+ );
+ }
+
+ while let Some(line) = context_queue.pop_front() {
+ mismatch.lines.push(DiffLine::Context(line.to_owned()));
+ }
+
+ mismatch.lines.push(DiffLine::Expected(str.to_owned()));
+ line_number += 1;
+ lines_since_mismatch = 0;
+ }
+ diff::Result::Both(str, _) => {
+ if context_queue.len() >= context_size {
+ let _ = context_queue.pop_front();
+ }
+
+ if lines_since_mismatch < context_size {
+ mismatch.lines.push(DiffLine::Context(str.to_owned()));
+ } else if context_size > 0 {
+ context_queue.push_back(str);
+ }
+
+ line_number += 1;
+ line_number_orig += 1;
+ lines_since_mismatch += 1;
+ }
+ }
+ }
+
+ results.push(mismatch);
+ results.remove(0);
+
+ results
+}
+
+pub(crate) fn print_diff<F>(diff: Vec<Mismatch>, get_section_title: F, config: &Config)
+where
+ F: Fn(u32) -> String,
+{
+ let color = config.color();
+ let line_terminator = if config.verbose() == Verbosity::Verbose {
+ "⏎"
+ } else {
+ ""
+ };
+
+ let mut writer = OutputWriter::new(color);
+
+ for mismatch in diff {
+ let title = get_section_title(mismatch.line_number_orig);
+ writer.writeln(&title, None);
+
+ for line in mismatch.lines {
+ match line {
+ DiffLine::Context(ref str) => {
+ writer.writeln(&format!(" {}{}", str, line_terminator), None)
+ }
+ DiffLine::Expected(ref str) => writer.writeln(
+ &format!("+{}{}", str, line_terminator),
+ Some(term::color::GREEN),
+ ),
+ DiffLine::Resulting(ref str) => writer.writeln(
+ &format!("-{}{}", str, line_terminator),
+ Some(term::color::RED),
+ ),
+ }
+ }
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::DiffLine::*;
+ use super::{make_diff, Mismatch};
+ use super::{ModifiedChunk, ModifiedLines};
+
+ #[test]
+ fn diff_simple() {
+ let src = "one\ntwo\nthree\nfour\nfive\n";
+ let dest = "one\ntwo\ntrois\nfour\nfive\n";
+ let diff = make_diff(src, dest, 1);
+ assert_eq!(
+ diff,
+ vec![Mismatch {
+ line_number: 2,
+ line_number_orig: 2,
+ lines: vec![
+ Context("two".to_owned()),
+ Resulting("three".to_owned()),
+ Expected("trois".to_owned()),
+ Context("four".to_owned()),
+ ],
+ }]
+ );
+ }
+
+ #[test]
+ fn diff_simple2() {
+ let src = "one\ntwo\nthree\nfour\nfive\nsix\nseven\n";
+ let dest = "one\ntwo\ntrois\nfour\ncinq\nsix\nseven\n";
+ let diff = make_diff(src, dest, 1);
+ assert_eq!(
+ diff,
+ vec![
+ Mismatch {
+ line_number: 2,
+ line_number_orig: 2,
+ lines: vec![
+ Context("two".to_owned()),
+ Resulting("three".to_owned()),
+ Expected("trois".to_owned()),
+ Context("four".to_owned()),
+ ],
+ },
+ Mismatch {
+ line_number: 5,
+ line_number_orig: 5,
+ lines: vec![
+ Resulting("five".to_owned()),
+ Expected("cinq".to_owned()),
+ Context("six".to_owned()),
+ ],
+ },
+ ]
+ );
+ }
+
+ #[test]
+ fn diff_zerocontext() {
+ let src = "one\ntwo\nthree\nfour\nfive\n";
+ let dest = "one\ntwo\ntrois\nfour\nfive\n";
+ let diff = make_diff(src, dest, 0);
+ assert_eq!(
+ diff,
+ vec![Mismatch {
+ line_number: 3,
+ line_number_orig: 3,
+ lines: vec![Resulting("three".to_owned()), Expected("trois".to_owned())],
+ }]
+ );
+ }
+
+ #[test]
+ fn diff_trailing_newline() {
+ let src = "one\ntwo\nthree\nfour\nfive";
+ let dest = "one\ntwo\nthree\nfour\nfive\n";
+ let diff = make_diff(src, dest, 1);
+ assert_eq!(
+ diff,
+ vec![Mismatch {
+ line_number: 5,
+ line_number_orig: 5,
+ lines: vec![Context("five".to_owned()), Expected("".to_owned())],
+ }]
+ );
+ }
+
+ #[test]
+ fn modified_lines_from_str() {
+ use std::str::FromStr;
+
+ let src = "1 6 2\nfn some() {}\nfn main() {}\n25 3 1\n struct Test {}";
+ let lines = ModifiedLines::from_str(src).unwrap();
+ assert_eq!(
+ lines,
+ ModifiedLines {
+ chunks: vec![
+ ModifiedChunk {
+ line_number_orig: 1,
+ lines_removed: 6,
+ lines: vec!["fn some() {}".to_owned(), "fn main() {}".to_owned(),]
+ },
+ ModifiedChunk {
+ line_number_orig: 25,
+ lines_removed: 3,
+ lines: vec![" struct Test {}".to_owned()]
+ }
+ ]
+ }
+ );
+
+ let src = "1 5 3";
+ assert_eq!(ModifiedLines::from_str(src), Err(()));
+
+ let src = "1 5 3\na\nb";
+ assert_eq!(ModifiedLines::from_str(src), Err(()));
+ }
+}
diff --git a/src/tools/rustfmt/src/shape.rs b/src/tools/rustfmt/src/shape.rs
new file mode 100644
index 000000000..4376fd12b
--- /dev/null
+++ b/src/tools/rustfmt/src/shape.rs
@@ -0,0 +1,373 @@
+use std::borrow::Cow;
+use std::cmp::min;
+use std::ops::{Add, Sub};
+
+use crate::Config;
+
+#[derive(Copy, Clone, Debug)]
+pub(crate) struct Indent {
+ // Width of the block indent, in characters. Must be a multiple of
+ // Config::tab_spaces.
+ pub(crate) block_indent: usize,
+ // Alignment in characters.
+ pub(crate) alignment: usize,
+}
+
+// INDENT_BUFFER.len() = 81
+const INDENT_BUFFER_LEN: usize = 80;
+const INDENT_BUFFER: &str =
+ "\n ";
+
+impl Indent {
+ pub(crate) fn new(block_indent: usize, alignment: usize) -> Indent {
+ Indent {
+ block_indent,
+ alignment,
+ }
+ }
+
+ pub(crate) fn from_width(config: &Config, width: usize) -> Indent {
+ if config.hard_tabs() {
+ let tab_num = width / config.tab_spaces();
+ let alignment = width % config.tab_spaces();
+ Indent::new(config.tab_spaces() * tab_num, alignment)
+ } else {
+ Indent::new(width, 0)
+ }
+ }
+
+ pub(crate) fn empty() -> Indent {
+ Indent::new(0, 0)
+ }
+
+ pub(crate) fn block_only(&self) -> Indent {
+ Indent {
+ block_indent: self.block_indent,
+ alignment: 0,
+ }
+ }
+
+ pub(crate) fn block_indent(mut self, config: &Config) -> Indent {
+ self.block_indent += config.tab_spaces();
+ self
+ }
+
+ pub(crate) fn block_unindent(mut self, config: &Config) -> Indent {
+ if self.block_indent < config.tab_spaces() {
+ Indent::new(self.block_indent, 0)
+ } else {
+ self.block_indent -= config.tab_spaces();
+ self
+ }
+ }
+
+ pub(crate) fn width(&self) -> usize {
+ self.block_indent + self.alignment
+ }
+
+ pub(crate) fn to_string(&self, config: &Config) -> Cow<'static, str> {
+ self.to_string_inner(config, 1)
+ }
+
+ pub(crate) fn to_string_with_newline(&self, config: &Config) -> Cow<'static, str> {
+ self.to_string_inner(config, 0)
+ }
+
+ fn to_string_inner(&self, config: &Config, offset: usize) -> Cow<'static, str> {
+ let (num_tabs, num_spaces) = if config.hard_tabs() {
+ (self.block_indent / config.tab_spaces(), self.alignment)
+ } else {
+ (0, self.width())
+ };
+ let num_chars = num_tabs + num_spaces;
+ if num_tabs == 0 && num_chars + offset <= INDENT_BUFFER_LEN {
+ Cow::from(&INDENT_BUFFER[offset..=num_chars])
+ } else {
+ let mut indent = String::with_capacity(num_chars + if offset == 0 { 1 } else { 0 });
+ if offset == 0 {
+ indent.push('\n');
+ }
+ for _ in 0..num_tabs {
+ indent.push('\t')
+ }
+ for _ in 0..num_spaces {
+ indent.push(' ')
+ }
+ Cow::from(indent)
+ }
+ }
+}
+
+impl Add for Indent {
+ type Output = Indent;
+
+ fn add(self, rhs: Indent) -> Indent {
+ Indent {
+ block_indent: self.block_indent + rhs.block_indent,
+ alignment: self.alignment + rhs.alignment,
+ }
+ }
+}
+
+impl Sub for Indent {
+ type Output = Indent;
+
+ fn sub(self, rhs: Indent) -> Indent {
+ Indent::new(
+ self.block_indent - rhs.block_indent,
+ self.alignment - rhs.alignment,
+ )
+ }
+}
+
+impl Add<usize> for Indent {
+ type Output = Indent;
+
+ fn add(self, rhs: usize) -> Indent {
+ Indent::new(self.block_indent, self.alignment + rhs)
+ }
+}
+
+impl Sub<usize> for Indent {
+ type Output = Indent;
+
+ fn sub(self, rhs: usize) -> Indent {
+ Indent::new(self.block_indent, self.alignment - rhs)
+ }
+}
+
+// 8096 is close enough to infinite for rustfmt.
+const INFINITE_SHAPE_WIDTH: usize = 8096;
+
+#[derive(Copy, Clone, Debug)]
+pub(crate) struct Shape {
+ pub(crate) width: usize,
+ // The current indentation of code.
+ pub(crate) indent: Indent,
+ // Indentation + any already emitted text on the first line of the current
+ // statement.
+ pub(crate) offset: usize,
+}
+
+impl Shape {
+ /// `indent` is the indentation of the first line. The next lines
+ /// should begin with at least `indent` spaces (except backwards
+ /// indentation). The first line should not begin with indentation.
+ /// `width` is the maximum number of characters on the last line
+ /// (excluding `indent`). The width of other lines is not limited by
+ /// `width`.
+ /// Note that in reality, we sometimes use width for lines other than the
+ /// last (i.e., we are conservative).
+ // .......*-------*
+ // | |
+ // | *-*
+ // *-----|
+ // |<------------>| max width
+ // |<---->| indent
+ // |<--->| width
+ pub(crate) fn legacy(width: usize, indent: Indent) -> Shape {
+ Shape {
+ width,
+ indent,
+ offset: indent.alignment,
+ }
+ }
+
+ pub(crate) fn indented(indent: Indent, config: &Config) -> Shape {
+ Shape {
+ width: config.max_width().saturating_sub(indent.width()),
+ indent,
+ offset: indent.alignment,
+ }
+ }
+
+ pub(crate) fn with_max_width(&self, config: &Config) -> Shape {
+ Shape {
+ width: config.max_width().saturating_sub(self.indent.width()),
+ ..*self
+ }
+ }
+
+ pub(crate) fn visual_indent(&self, extra_width: usize) -> Shape {
+ let alignment = self.offset + extra_width;
+ Shape {
+ width: self.width,
+ indent: Indent::new(self.indent.block_indent, alignment),
+ offset: alignment,
+ }
+ }
+
+ pub(crate) fn block_indent(&self, extra_width: usize) -> Shape {
+ if self.indent.alignment == 0 {
+ Shape {
+ width: self.width,
+ indent: Indent::new(self.indent.block_indent + extra_width, 0),
+ offset: 0,
+ }
+ } else {
+ Shape {
+ width: self.width,
+ indent: self.indent + extra_width,
+ offset: self.indent.alignment + extra_width,
+ }
+ }
+ }
+
+ pub(crate) fn block_left(&self, width: usize) -> Option<Shape> {
+ self.block_indent(width).sub_width(width)
+ }
+
+ pub(crate) fn add_offset(&self, extra_width: usize) -> Shape {
+ Shape {
+ offset: self.offset + extra_width,
+ ..*self
+ }
+ }
+
+ pub(crate) fn block(&self) -> Shape {
+ Shape {
+ indent: self.indent.block_only(),
+ ..*self
+ }
+ }
+
+ pub(crate) fn saturating_sub_width(&self, width: usize) -> Shape {
+ self.sub_width(width).unwrap_or(Shape { width: 0, ..*self })
+ }
+
+ pub(crate) fn sub_width(&self, width: usize) -> Option<Shape> {
+ Some(Shape {
+ width: self.width.checked_sub(width)?,
+ ..*self
+ })
+ }
+
+ pub(crate) fn shrink_left(&self, width: usize) -> Option<Shape> {
+ Some(Shape {
+ width: self.width.checked_sub(width)?,
+ indent: self.indent + width,
+ offset: self.offset + width,
+ })
+ }
+
+ pub(crate) fn offset_left(&self, width: usize) -> Option<Shape> {
+ self.add_offset(width).sub_width(width)
+ }
+
+ pub(crate) fn used_width(&self) -> usize {
+ self.indent.block_indent + self.offset
+ }
+
+ pub(crate) fn rhs_overhead(&self, config: &Config) -> usize {
+ config
+ .max_width()
+ .saturating_sub(self.used_width() + self.width)
+ }
+
+ pub(crate) fn comment(&self, config: &Config) -> Shape {
+ let width = min(
+ self.width,
+ config.comment_width().saturating_sub(self.indent.width()),
+ );
+ Shape { width, ..*self }
+ }
+
+ pub(crate) fn to_string_with_newline(&self, config: &Config) -> Cow<'static, str> {
+ let mut offset_indent = self.indent;
+ offset_indent.alignment = self.offset;
+ offset_indent.to_string_inner(config, 0)
+ }
+
+ /// Creates a `Shape` with a virtually infinite width.
+ pub(crate) fn infinite_width(&self) -> Shape {
+ Shape {
+ width: INFINITE_SHAPE_WIDTH,
+ ..*self
+ }
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ #[test]
+ fn indent_add_sub() {
+ let indent = Indent::new(4, 8) + Indent::new(8, 12);
+ assert_eq!(12, indent.block_indent);
+ assert_eq!(20, indent.alignment);
+
+ let indent = indent - Indent::new(4, 4);
+ assert_eq!(8, indent.block_indent);
+ assert_eq!(16, indent.alignment);
+ }
+
+ #[test]
+ fn indent_add_sub_alignment() {
+ let indent = Indent::new(4, 8) + 4;
+ assert_eq!(4, indent.block_indent);
+ assert_eq!(12, indent.alignment);
+
+ let indent = indent - 4;
+ assert_eq!(4, indent.block_indent);
+ assert_eq!(8, indent.alignment);
+ }
+
+ #[test]
+ fn indent_to_string_spaces() {
+ let config = Config::default();
+ let indent = Indent::new(4, 8);
+
+ // 12 spaces
+ assert_eq!(" ", indent.to_string(&config));
+ }
+
+ #[test]
+ fn indent_to_string_hard_tabs() {
+ let mut config = Config::default();
+ config.set().hard_tabs(true);
+ let indent = Indent::new(8, 4);
+
+ // 2 tabs + 4 spaces
+ assert_eq!("\t\t ", indent.to_string(&config));
+ }
+
+ #[test]
+ fn shape_visual_indent() {
+ let config = Config::default();
+ let indent = Indent::new(4, 8);
+ let shape = Shape::legacy(config.max_width(), indent);
+ let shape = shape.visual_indent(20);
+
+ assert_eq!(config.max_width(), shape.width);
+ assert_eq!(4, shape.indent.block_indent);
+ assert_eq!(28, shape.indent.alignment);
+ assert_eq!(28, shape.offset);
+ }
+
+ #[test]
+ fn shape_block_indent_without_alignment() {
+ let config = Config::default();
+ let indent = Indent::new(4, 0);
+ let shape = Shape::legacy(config.max_width(), indent);
+ let shape = shape.block_indent(20);
+
+ assert_eq!(config.max_width(), shape.width);
+ assert_eq!(24, shape.indent.block_indent);
+ assert_eq!(0, shape.indent.alignment);
+ assert_eq!(0, shape.offset);
+ }
+
+ #[test]
+ fn shape_block_indent_with_alignment() {
+ let config = Config::default();
+ let indent = Indent::new(4, 8);
+ let shape = Shape::legacy(config.max_width(), indent);
+ let shape = shape.block_indent(20);
+
+ assert_eq!(config.max_width(), shape.width);
+ assert_eq!(4, shape.indent.block_indent);
+ assert_eq!(28, shape.indent.alignment);
+ assert_eq!(28, shape.offset);
+ }
+}
diff --git a/src/tools/rustfmt/src/skip.rs b/src/tools/rustfmt/src/skip.rs
new file mode 100644
index 000000000..0fdc097ef
--- /dev/null
+++ b/src/tools/rustfmt/src/skip.rs
@@ -0,0 +1,76 @@
+//! Module that contains skip related stuffs.
+
+use rustc_ast::ast;
+use rustc_ast_pretty::pprust;
+
+/// Take care of skip name stack. You can update it by attributes slice or
+/// by other context. Query this context to know if you need skip a block.
+#[derive(Default, Clone)]
+pub(crate) struct SkipContext {
+ macros: Vec<String>,
+ attributes: Vec<String>,
+}
+
+impl SkipContext {
+ pub(crate) fn update_with_attrs(&mut self, attrs: &[ast::Attribute]) {
+ self.macros.append(&mut get_skip_names("macros", attrs));
+ self.attributes
+ .append(&mut get_skip_names("attributes", attrs));
+ }
+
+ pub(crate) fn update(&mut self, mut other: SkipContext) {
+ self.macros.append(&mut other.macros);
+ self.attributes.append(&mut other.attributes);
+ }
+
+ pub(crate) fn skip_macro(&self, name: &str) -> bool {
+ self.macros.iter().any(|n| n == name)
+ }
+
+ pub(crate) fn skip_attribute(&self, name: &str) -> bool {
+ self.attributes.iter().any(|n| n == name)
+ }
+}
+
+static RUSTFMT: &str = "rustfmt";
+static SKIP: &str = "skip";
+
+/// Say if you're playing with `rustfmt`'s skip attribute
+pub(crate) fn is_skip_attr(segments: &[ast::PathSegment]) -> bool {
+ if segments.len() < 2 || segments[0].ident.to_string() != RUSTFMT {
+ return false;
+ }
+ match segments.len() {
+ 2 => segments[1].ident.to_string() == SKIP,
+ 3 => {
+ segments[1].ident.to_string() == SKIP
+ && ["macros", "attributes"]
+ .iter()
+ .any(|&n| n == pprust::path_segment_to_string(&segments[2]))
+ }
+ _ => false,
+ }
+}
+
+fn get_skip_names(kind: &str, attrs: &[ast::Attribute]) -> Vec<String> {
+ let mut skip_names = vec![];
+ let path = format!("{}::{}::{}", RUSTFMT, SKIP, kind);
+ for attr in attrs {
+ // rustc_ast::ast::Path is implemented partialEq
+ // but it is designed for segments.len() == 1
+ if let ast::AttrKind::Normal(attr_item, _) = &attr.kind {
+ if pprust::path_to_string(&attr_item.path) != path {
+ continue;
+ }
+ }
+
+ if let Some(list) = attr.meta_item_list() {
+ for nested_meta_item in list {
+ if let Some(name) = nested_meta_item.ident() {
+ skip_names.push(name.to_string());
+ }
+ }
+ }
+ }
+ skip_names
+}
diff --git a/src/tools/rustfmt/src/source_file.rs b/src/tools/rustfmt/src/source_file.rs
new file mode 100644
index 000000000..56d4ab400
--- /dev/null
+++ b/src/tools/rustfmt/src/source_file.rs
@@ -0,0 +1,105 @@
+use std::fs;
+use std::io::{self, Write};
+use std::path::Path;
+
+use crate::config::FileName;
+use crate::emitter::{self, Emitter};
+use crate::parse::session::ParseSess;
+use crate::NewlineStyle;
+
+#[cfg(test)]
+use crate::config::Config;
+#[cfg(test)]
+use crate::create_emitter;
+#[cfg(test)]
+use crate::formatting::FileRecord;
+
+use rustc_data_structures::sync::Lrc;
+
+// Append a newline to the end of each file.
+pub(crate) fn append_newline(s: &mut String) {
+ s.push('\n');
+}
+
+#[cfg(test)]
+pub(crate) fn write_all_files<T>(
+ source_file: &[FileRecord],
+ out: &mut T,
+ config: &Config,
+) -> Result<(), io::Error>
+where
+ T: Write,
+{
+ let mut emitter = create_emitter(config);
+
+ emitter.emit_header(out)?;
+ for &(ref filename, ref text) in source_file {
+ write_file(
+ None,
+ filename,
+ text,
+ out,
+ &mut *emitter,
+ config.newline_style(),
+ )?;
+ }
+ emitter.emit_footer(out)?;
+
+ Ok(())
+}
+
+pub(crate) fn write_file<T>(
+ parse_sess: Option<&ParseSess>,
+ filename: &FileName,
+ formatted_text: &str,
+ out: &mut T,
+ emitter: &mut dyn Emitter,
+ newline_style: NewlineStyle,
+) -> Result<emitter::EmitterResult, io::Error>
+where
+ T: Write,
+{
+ fn ensure_real_path(filename: &FileName) -> &Path {
+ match *filename {
+ FileName::Real(ref path) => path,
+ _ => panic!("cannot format `{}` and emit to files", filename),
+ }
+ }
+
+ impl From<&FileName> for rustc_span::FileName {
+ fn from(filename: &FileName) -> rustc_span::FileName {
+ match filename {
+ FileName::Real(path) => {
+ rustc_span::FileName::Real(rustc_span::RealFileName::LocalPath(path.to_owned()))
+ }
+ FileName::Stdin => rustc_span::FileName::Custom("stdin".to_owned()),
+ }
+ }
+ }
+
+ // SourceFile's in the SourceMap will always have Unix-style line endings
+ // See: https://github.com/rust-lang/rustfmt/issues/3850
+ // So if the user has explicitly overridden the rustfmt `newline_style`
+ // config and `filename` is FileName::Real, then we must check the file system
+ // to get the original file value in order to detect newline_style conflicts.
+ // Otherwise, parse session is around (cfg(not(test))) and newline_style has been
+ // left as the default value, then try getting source from the parse session
+ // source map instead of hitting the file system. This also supports getting
+ // original text for `FileName::Stdin`.
+ let original_text = if newline_style != NewlineStyle::Auto && *filename != FileName::Stdin {
+ Lrc::new(fs::read_to_string(ensure_real_path(filename))?)
+ } else {
+ match parse_sess.and_then(|sess| sess.get_original_snippet(filename)) {
+ Some(ori) => ori,
+ None => Lrc::new(fs::read_to_string(ensure_real_path(filename))?),
+ }
+ };
+
+ let formatted_file = emitter::FormattedFile {
+ filename,
+ original_text: original_text.as_str(),
+ formatted_text,
+ };
+
+ emitter.emit_formatted_file(out, formatted_file)
+}
diff --git a/src/tools/rustfmt/src/source_map.rs b/src/tools/rustfmt/src/source_map.rs
new file mode 100644
index 000000000..76e0d24cf
--- /dev/null
+++ b/src/tools/rustfmt/src/source_map.rs
@@ -0,0 +1,82 @@
+//! This module contains utilities that work with the `SourceMap` from `libsyntax`/`syntex_syntax`.
+//! This includes extension traits and methods for looking up spans and line ranges for AST nodes.
+
+use rustc_span::{BytePos, Span};
+
+use crate::comment::FindUncommented;
+use crate::config::file_lines::LineRange;
+use crate::visitor::SnippetProvider;
+
+pub(crate) trait SpanUtils {
+ fn span_after(&self, original: Span, needle: &str) -> BytePos;
+ fn span_after_last(&self, original: Span, needle: &str) -> BytePos;
+ fn span_before(&self, original: Span, needle: &str) -> BytePos;
+ fn span_before_last(&self, original: Span, needle: &str) -> BytePos;
+ fn opt_span_after(&self, original: Span, needle: &str) -> Option<BytePos>;
+ fn opt_span_before(&self, original: Span, needle: &str) -> Option<BytePos>;
+}
+
+pub(crate) trait LineRangeUtils {
+ /// Returns the `LineRange` that corresponds to `span` in `self`.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `span` crosses a file boundary, which shouldn't happen.
+ fn lookup_line_range(&self, span: Span) -> LineRange;
+}
+
+impl SpanUtils for SnippetProvider {
+ fn span_after(&self, original: Span, needle: &str) -> BytePos {
+ self.opt_span_after(original, needle).unwrap_or_else(|| {
+ panic!(
+ "bad span: `{}`: `{}`",
+ needle,
+ self.span_to_snippet(original).unwrap()
+ )
+ })
+ }
+
+ fn span_after_last(&self, original: Span, needle: &str) -> BytePos {
+ let snippet = self.span_to_snippet(original).unwrap();
+ let mut offset = 0;
+
+ while let Some(additional_offset) = snippet[offset..].find_uncommented(needle) {
+ offset += additional_offset + needle.len();
+ }
+
+ original.lo() + BytePos(offset as u32)
+ }
+
+ fn span_before(&self, original: Span, needle: &str) -> BytePos {
+ self.opt_span_before(original, needle).unwrap_or_else(|| {
+ panic!(
+ "bad span: `{}`: `{}`",
+ needle,
+ self.span_to_snippet(original).unwrap()
+ )
+ })
+ }
+
+ fn span_before_last(&self, original: Span, needle: &str) -> BytePos {
+ let snippet = self.span_to_snippet(original).unwrap();
+ let mut offset = 0;
+
+ while let Some(additional_offset) = snippet[offset..].find_uncommented(needle) {
+ offset += additional_offset + needle.len();
+ }
+
+ original.lo() + BytePos(offset as u32 - 1)
+ }
+
+ fn opt_span_after(&self, original: Span, needle: &str) -> Option<BytePos> {
+ self.opt_span_before(original, needle)
+ .map(|bytepos| bytepos + BytePos(needle.len() as u32))
+ }
+
+ fn opt_span_before(&self, original: Span, needle: &str) -> Option<BytePos> {
+ let snippet = self.span_to_snippet(original)?;
+ let offset = snippet.find_uncommented(needle)?;
+
+ Some(original.lo() + BytePos(offset as u32))
+ }
+}
diff --git a/src/tools/rustfmt/src/spanned.rs b/src/tools/rustfmt/src/spanned.rs
new file mode 100644
index 000000000..2136cfeae
--- /dev/null
+++ b/src/tools/rustfmt/src/spanned.rs
@@ -0,0 +1,199 @@
+use std::cmp::max;
+
+use rustc_ast::{ast, ptr};
+use rustc_span::{source_map, Span};
+
+use crate::macros::MacroArg;
+use crate::utils::{mk_sp, outer_attributes};
+
+/// Spanned returns a span including attributes, if available.
+pub(crate) trait Spanned {
+ fn span(&self) -> Span;
+}
+
+impl<T: Spanned> Spanned for ptr::P<T> {
+ fn span(&self) -> Span {
+ (**self).span()
+ }
+}
+
+impl<T> Spanned for source_map::Spanned<T> {
+ fn span(&self) -> Span {
+ self.span
+ }
+}
+
+macro_rules! span_with_attrs_lo_hi {
+ ($this:ident, $lo:expr, $hi:expr) => {{
+ let attrs = outer_attributes(&$this.attrs);
+ if attrs.is_empty() {
+ mk_sp($lo, $hi)
+ } else {
+ mk_sp(attrs[0].span.lo(), $hi)
+ }
+ }};
+}
+
+macro_rules! span_with_attrs {
+ ($this:ident) => {
+ span_with_attrs_lo_hi!($this, $this.span.lo(), $this.span.hi())
+ };
+}
+
+macro_rules! implement_spanned {
+ ($this:ty) => {
+ impl Spanned for $this {
+ fn span(&self) -> Span {
+ span_with_attrs!(self)
+ }
+ }
+ };
+}
+
+// Implement `Spanned` for structs with `attrs` field.
+implement_spanned!(ast::AssocItem);
+implement_spanned!(ast::Expr);
+implement_spanned!(ast::ExprField);
+implement_spanned!(ast::ForeignItem);
+implement_spanned!(ast::Item);
+implement_spanned!(ast::Local);
+
+impl Spanned for ast::Stmt {
+ fn span(&self) -> Span {
+ match self.kind {
+ ast::StmtKind::Local(ref local) => mk_sp(local.span().lo(), self.span.hi()),
+ ast::StmtKind::Item(ref item) => mk_sp(item.span().lo(), self.span.hi()),
+ ast::StmtKind::Expr(ref expr) | ast::StmtKind::Semi(ref expr) => {
+ mk_sp(expr.span().lo(), self.span.hi())
+ }
+ ast::StmtKind::MacCall(ref mac_stmt) => {
+ if mac_stmt.attrs.is_empty() {
+ self.span
+ } else {
+ mk_sp(mac_stmt.attrs[0].span.lo(), self.span.hi())
+ }
+ }
+ ast::StmtKind::Empty => self.span,
+ }
+ }
+}
+
+impl Spanned for ast::Pat {
+ fn span(&self) -> Span {
+ self.span
+ }
+}
+
+impl Spanned for ast::Ty {
+ fn span(&self) -> Span {
+ self.span
+ }
+}
+
+impl Spanned for ast::Arm {
+ fn span(&self) -> Span {
+ let lo = if self.attrs.is_empty() {
+ self.pat.span.lo()
+ } else {
+ self.attrs[0].span.lo()
+ };
+ span_with_attrs_lo_hi!(self, lo, self.body.span.hi())
+ }
+}
+
+impl Spanned for ast::Param {
+ fn span(&self) -> Span {
+ if crate::items::is_named_param(self) {
+ mk_sp(crate::items::span_lo_for_param(self), self.ty.span.hi())
+ } else {
+ self.ty.span
+ }
+ }
+}
+
+impl Spanned for ast::GenericParam {
+ fn span(&self) -> Span {
+ let lo = match self.kind {
+ _ if !self.attrs.is_empty() => self.attrs[0].span.lo(),
+ ast::GenericParamKind::Const { kw_span, .. } => kw_span.lo(),
+ _ => self.ident.span.lo(),
+ };
+ let hi = if self.bounds.is_empty() {
+ self.ident.span.hi()
+ } else {
+ self.bounds.last().unwrap().span().hi()
+ };
+ let ty_hi = if let ast::GenericParamKind::Type {
+ default: Some(ref ty),
+ }
+ | ast::GenericParamKind::Const { ref ty, .. } = self.kind
+ {
+ ty.span().hi()
+ } else {
+ hi
+ };
+ mk_sp(lo, max(hi, ty_hi))
+ }
+}
+
+impl Spanned for ast::FieldDef {
+ fn span(&self) -> Span {
+ span_with_attrs_lo_hi!(self, self.span.lo(), self.ty.span.hi())
+ }
+}
+
+impl Spanned for ast::WherePredicate {
+ fn span(&self) -> Span {
+ match *self {
+ ast::WherePredicate::BoundPredicate(ref p) => p.span,
+ ast::WherePredicate::RegionPredicate(ref p) => p.span,
+ ast::WherePredicate::EqPredicate(ref p) => p.span,
+ }
+ }
+}
+
+impl Spanned for ast::FnRetTy {
+ fn span(&self) -> Span {
+ match *self {
+ ast::FnRetTy::Default(span) => span,
+ ast::FnRetTy::Ty(ref ty) => ty.span,
+ }
+ }
+}
+
+impl Spanned for ast::GenericArg {
+ fn span(&self) -> Span {
+ match *self {
+ ast::GenericArg::Lifetime(ref lt) => lt.ident.span,
+ ast::GenericArg::Type(ref ty) => ty.span(),
+ ast::GenericArg::Const(ref _const) => _const.value.span(),
+ }
+ }
+}
+
+impl Spanned for ast::GenericBound {
+ fn span(&self) -> Span {
+ match *self {
+ ast::GenericBound::Trait(ref ptr, _) => ptr.span,
+ ast::GenericBound::Outlives(ref l) => l.ident.span,
+ }
+ }
+}
+
+impl Spanned for MacroArg {
+ fn span(&self) -> Span {
+ match *self {
+ MacroArg::Expr(ref expr) => expr.span(),
+ MacroArg::Ty(ref ty) => ty.span(),
+ MacroArg::Pat(ref pat) => pat.span(),
+ MacroArg::Item(ref item) => item.span(),
+ MacroArg::Keyword(_, span) => span,
+ }
+ }
+}
+
+impl Spanned for ast::NestedMetaItem {
+ fn span(&self) -> Span {
+ self.span()
+ }
+}
diff --git a/src/tools/rustfmt/src/stmt.rs b/src/tools/rustfmt/src/stmt.rs
new file mode 100644
index 000000000..0b3854425
--- /dev/null
+++ b/src/tools/rustfmt/src/stmt.rs
@@ -0,0 +1,116 @@
+use rustc_ast::ast;
+use rustc_span::Span;
+
+use crate::comment::recover_comment_removed;
+use crate::config::Version;
+use crate::expr::{format_expr, ExprType};
+use crate::rewrite::{Rewrite, RewriteContext};
+use crate::shape::Shape;
+use crate::source_map::LineRangeUtils;
+use crate::spanned::Spanned;
+use crate::utils::semicolon_for_stmt;
+
+pub(crate) struct Stmt<'a> {
+ inner: &'a ast::Stmt,
+ is_last: bool,
+}
+
+impl<'a> Spanned for Stmt<'a> {
+ fn span(&self) -> Span {
+ self.inner.span()
+ }
+}
+
+impl<'a> Stmt<'a> {
+ pub(crate) fn as_ast_node(&self) -> &ast::Stmt {
+ self.inner
+ }
+
+ pub(crate) fn to_item(&self) -> Option<&ast::Item> {
+ match self.inner.kind {
+ ast::StmtKind::Item(ref item) => Some(&**item),
+ _ => None,
+ }
+ }
+
+ pub(crate) fn from_ast_node(inner: &'a ast::Stmt, is_last: bool) -> Self {
+ Stmt { inner, is_last }
+ }
+
+ pub(crate) fn from_ast_nodes<I>(iter: I) -> Vec<Self>
+ where
+ I: Iterator<Item = &'a ast::Stmt>,
+ {
+ let mut result = vec![];
+ let mut iter = iter.peekable();
+ while iter.peek().is_some() {
+ result.push(Stmt {
+ inner: iter.next().unwrap(),
+ is_last: iter.peek().is_none(),
+ })
+ }
+ result
+ }
+
+ pub(crate) fn is_empty(&self) -> bool {
+ matches!(self.inner.kind, ast::StmtKind::Empty)
+ }
+
+ fn is_last_expr(&self) -> bool {
+ if !self.is_last {
+ return false;
+ }
+
+ match self.as_ast_node().kind {
+ ast::StmtKind::Expr(ref expr) => match expr.kind {
+ ast::ExprKind::Ret(..) | ast::ExprKind::Continue(..) | ast::ExprKind::Break(..) => {
+ false
+ }
+ _ => true,
+ },
+ _ => false,
+ }
+ }
+}
+
+impl<'a> Rewrite for Stmt<'a> {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ let expr_type = if context.config.version() == Version::Two && self.is_last_expr() {
+ ExprType::SubExpression
+ } else {
+ ExprType::Statement
+ };
+ format_stmt(context, shape, self.as_ast_node(), expr_type)
+ }
+}
+
+impl Rewrite for ast::Stmt {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ format_stmt(context, shape, self, ExprType::Statement)
+ }
+}
+
+fn format_stmt(
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ stmt: &ast::Stmt,
+ expr_type: ExprType,
+) -> Option<String> {
+ skip_out_of_file_lines_range!(context, stmt.span());
+
+ let result = match stmt.kind {
+ ast::StmtKind::Local(ref local) => local.rewrite(context, shape),
+ ast::StmtKind::Expr(ref ex) | ast::StmtKind::Semi(ref ex) => {
+ let suffix = if semicolon_for_stmt(context, stmt) {
+ ";"
+ } else {
+ ""
+ };
+
+ let shape = shape.sub_width(suffix.len())?;
+ format_expr(ex, expr_type, context, shape).map(|s| s + suffix)
+ }
+ ast::StmtKind::MacCall(..) | ast::StmtKind::Item(..) | ast::StmtKind::Empty => None,
+ };
+ result.and_then(|res| recover_comment_removed(res, stmt.span(), context))
+}
diff --git a/src/tools/rustfmt/src/string.rs b/src/tools/rustfmt/src/string.rs
new file mode 100644
index 000000000..78b72a50c
--- /dev/null
+++ b/src/tools/rustfmt/src/string.rs
@@ -0,0 +1,725 @@
+// Format string literals.
+
+use regex::Regex;
+use unicode_categories::UnicodeCategories;
+use unicode_segmentation::UnicodeSegmentation;
+
+use crate::config::Config;
+use crate::shape::Shape;
+use crate::utils::{unicode_str_width, wrap_str};
+
+const MIN_STRING: usize = 10;
+
+/// Describes the layout of a piece of text.
+pub(crate) struct StringFormat<'a> {
+ /// The opening sequence of characters for the piece of text
+ pub(crate) opener: &'a str,
+ /// The closing sequence of characters for the piece of text
+ pub(crate) closer: &'a str,
+ /// The opening sequence of characters for a line
+ pub(crate) line_start: &'a str,
+ /// The closing sequence of characters for a line
+ pub(crate) line_end: &'a str,
+ /// The allocated box to fit the text into
+ pub(crate) shape: Shape,
+ /// Trim trailing whitespaces
+ pub(crate) trim_end: bool,
+ pub(crate) config: &'a Config,
+}
+
+impl<'a> StringFormat<'a> {
+ pub(crate) fn new(shape: Shape, config: &'a Config) -> StringFormat<'a> {
+ StringFormat {
+ opener: "\"",
+ closer: "\"",
+ line_start: " ",
+ line_end: "\\",
+ shape,
+ trim_end: false,
+ config,
+ }
+ }
+
+ /// Returns the maximum number of graphemes that is possible on a line while taking the
+ /// indentation into account.
+ ///
+ /// If we cannot put at least a single character per line, the rewrite won't succeed.
+ fn max_width_with_indent(&self) -> Option<usize> {
+ Some(
+ self.shape
+ .width
+ .checked_sub(self.opener.len() + self.line_end.len() + 1)?
+ + 1,
+ )
+ }
+
+ /// Like max_width_with_indent but the indentation is not subtracted.
+ /// This allows to fit more graphemes from the string on a line when
+ /// SnippetState::EndWithLineFeed.
+ fn max_width_without_indent(&self) -> Option<usize> {
+ self.config.max_width().checked_sub(self.line_end.len())
+ }
+}
+
+pub(crate) fn rewrite_string<'a>(
+ orig: &str,
+ fmt: &StringFormat<'a>,
+ newline_max_chars: usize,
+) -> Option<String> {
+ let max_width_with_indent = fmt.max_width_with_indent()?;
+ let max_width_without_indent = fmt.max_width_without_indent()?;
+ let indent_with_newline = fmt.shape.indent.to_string_with_newline(fmt.config);
+ let indent_without_newline = fmt.shape.indent.to_string(fmt.config);
+
+ // Strip line breaks.
+ // With this regex applied, all remaining whitespaces are significant
+ let strip_line_breaks_re = Regex::new(r"([^\\](\\\\)*)\\[\n\r][[:space:]]*").unwrap();
+ let stripped_str = strip_line_breaks_re.replace_all(orig, "$1");
+
+ let graphemes = UnicodeSegmentation::graphemes(&*stripped_str, false).collect::<Vec<&str>>();
+
+ // `cur_start` is the position in `orig` of the start of the current line.
+ let mut cur_start = 0;
+ let mut result = String::with_capacity(
+ stripped_str
+ .len()
+ .checked_next_power_of_two()
+ .unwrap_or(usize::max_value()),
+ );
+ result.push_str(fmt.opener);
+
+ // Snip a line at a time from `stripped_str` until it is used up. Push the snippet
+ // onto result.
+ let mut cur_max_width = max_width_with_indent;
+ let is_bareline_ok = fmt.line_start.is_empty() || is_whitespace(fmt.line_start);
+ loop {
+ // All the input starting at cur_start fits on the current line
+ if graphemes_width(&graphemes[cur_start..]) <= cur_max_width {
+ for (i, grapheme) in graphemes[cur_start..].iter().enumerate() {
+ if is_new_line(grapheme) {
+ // take care of blank lines
+ result = trim_end_but_line_feed(fmt.trim_end, result);
+ result.push('\n');
+ if !is_bareline_ok && cur_start + i + 1 < graphemes.len() {
+ result.push_str(&indent_without_newline);
+ result.push_str(fmt.line_start);
+ }
+ } else {
+ result.push_str(grapheme);
+ }
+ }
+ result = trim_end_but_line_feed(fmt.trim_end, result);
+ break;
+ }
+
+ // The input starting at cur_start needs to be broken
+ match break_string(
+ cur_max_width,
+ fmt.trim_end,
+ fmt.line_end,
+ &graphemes[cur_start..],
+ ) {
+ SnippetState::LineEnd(line, len) => {
+ result.push_str(&line);
+ result.push_str(fmt.line_end);
+ result.push_str(&indent_with_newline);
+ result.push_str(fmt.line_start);
+ cur_max_width = newline_max_chars;
+ cur_start += len;
+ }
+ SnippetState::EndWithLineFeed(line, len) => {
+ if line == "\n" && fmt.trim_end {
+ result = result.trim_end().to_string();
+ }
+ result.push_str(&line);
+ if is_bareline_ok {
+ // the next line can benefit from the full width
+ cur_max_width = max_width_without_indent;
+ } else {
+ result.push_str(&indent_without_newline);
+ result.push_str(fmt.line_start);
+ cur_max_width = max_width_with_indent;
+ }
+ cur_start += len;
+ }
+ SnippetState::EndOfInput(line) => {
+ result.push_str(&line);
+ break;
+ }
+ }
+ }
+
+ result.push_str(fmt.closer);
+ wrap_str(result, fmt.config.max_width(), fmt.shape)
+}
+
+/// Returns the index to the end of the URL if the split at index of the given string includes a
+/// URL or alike. Otherwise, returns `None`.
+fn detect_url(s: &[&str], index: usize) -> Option<usize> {
+ let start = match s[..=index].iter().rposition(|g| is_whitespace(g)) {
+ Some(pos) => pos + 1,
+ None => 0,
+ };
+ // 8 = minimum length for a string to contain a URL
+ if s.len() < start + 8 {
+ return None;
+ }
+ let split = s[start..].concat();
+ if split.contains("https://")
+ || split.contains("http://")
+ || split.contains("ftp://")
+ || split.contains("file://")
+ {
+ match s[index..].iter().position(|g| is_whitespace(g)) {
+ Some(pos) => Some(index + pos - 1),
+ None => Some(s.len() - 1),
+ }
+ } else {
+ None
+ }
+}
+
+/// Trims whitespaces to the right except for the line feed character.
+fn trim_end_but_line_feed(trim_end: bool, result: String) -> String {
+ let whitespace_except_line_feed = |c: char| c.is_whitespace() && c != '\n';
+ if trim_end && result.ends_with(whitespace_except_line_feed) {
+ result
+ .trim_end_matches(whitespace_except_line_feed)
+ .to_string()
+ } else {
+ result
+ }
+}
+
+/// Result of breaking a string so it fits in a line and the state it ended in.
+/// The state informs about what to do with the snippet and how to continue the breaking process.
+#[derive(Debug, PartialEq)]
+enum SnippetState {
+ /// The input could not be broken and so rewriting the string is finished.
+ EndOfInput(String),
+ /// The input could be broken and the returned snippet should be ended with a
+ /// `[StringFormat::line_end]`. The next snippet needs to be indented.
+ ///
+ /// The returned string is the line to print out and the number is the length that got read in
+ /// the text being rewritten. That length may be greater than the returned string if trailing
+ /// whitespaces got trimmed.
+ LineEnd(String, usize),
+ /// The input could be broken but a newline is present that cannot be trimmed. The next snippet
+ /// to be rewritten *could* use more width than what is specified by the given shape. For
+ /// example with a multiline string, the next snippet does not need to be indented, allowing
+ /// more characters to be fit within a line.
+ ///
+ /// The returned string is the line to print out and the number is the length that got read in
+ /// the text being rewritten.
+ EndWithLineFeed(String, usize),
+}
+
+fn not_whitespace_except_line_feed(g: &str) -> bool {
+ is_new_line(g) || !is_whitespace(g)
+}
+
+/// Break the input string at a boundary character around the offset `max_width`. A boundary
+/// character is either a punctuation or a whitespace.
+/// FIXME(issue#3281): We must follow UAX#14 algorithm instead of this.
+fn break_string(max_width: usize, trim_end: bool, line_end: &str, input: &[&str]) -> SnippetState {
+ let break_at = |index /* grapheme at index is included */| {
+ // Take in any whitespaces to the left/right of `input[index]` while
+ // preserving line feeds
+ let index_minus_ws = input[0..=index]
+ .iter()
+ .rposition(|grapheme| not_whitespace_except_line_feed(grapheme))
+ .unwrap_or(index);
+ // Take into account newlines occurring in input[0..=index], i.e., the possible next new
+ // line. If there is one, then text after it could be rewritten in a way that the available
+ // space is fully used.
+ for (i, grapheme) in input[0..=index].iter().enumerate() {
+ if is_new_line(grapheme) {
+ if i <= index_minus_ws {
+ let mut line = &input[0..i].concat()[..];
+ if trim_end {
+ line = line.trim_end();
+ }
+ return SnippetState::EndWithLineFeed(format!("{}\n", line), i + 1);
+ }
+ break;
+ }
+ }
+
+ let mut index_plus_ws = index;
+ for (i, grapheme) in input[index + 1..].iter().enumerate() {
+ if !trim_end && is_new_line(grapheme) {
+ return SnippetState::EndWithLineFeed(
+ input[0..=index + 1 + i].concat(),
+ index + 2 + i,
+ );
+ } else if not_whitespace_except_line_feed(grapheme) {
+ index_plus_ws = index + i;
+ break;
+ }
+ }
+
+ if trim_end {
+ SnippetState::LineEnd(input[0..=index_minus_ws].concat(), index_plus_ws + 1)
+ } else {
+ SnippetState::LineEnd(input[0..=index_plus_ws].concat(), index_plus_ws + 1)
+ }
+ };
+
+ // find a first index where the unicode width of input[0..x] become > max_width
+ let max_width_index_in_input = {
+ let mut cur_width = 0;
+ let mut cur_index = 0;
+ for (i, grapheme) in input.iter().enumerate() {
+ cur_width += unicode_str_width(grapheme);
+ cur_index = i;
+ if cur_width > max_width {
+ break;
+ }
+ }
+ cur_index
+ };
+ if max_width_index_in_input == 0 {
+ return SnippetState::EndOfInput(input.concat());
+ }
+
+ // Find the position in input for breaking the string
+ if line_end.is_empty()
+ && trim_end
+ && !is_whitespace(input[max_width_index_in_input - 1])
+ && is_whitespace(input[max_width_index_in_input])
+ {
+ // At a breaking point already
+ // The line won't invalidate the rewriting because:
+ // - no extra space needed for the line_end character
+ // - extra whitespaces to the right can be trimmed
+ return break_at(max_width_index_in_input - 1);
+ }
+ if let Some(url_index_end) = detect_url(input, max_width_index_in_input) {
+ let index_plus_ws = url_index_end
+ + input[url_index_end..]
+ .iter()
+ .skip(1)
+ .position(|grapheme| not_whitespace_except_line_feed(grapheme))
+ .unwrap_or(0);
+ return if trim_end {
+ SnippetState::LineEnd(input[..=url_index_end].concat(), index_plus_ws + 1)
+ } else {
+ SnippetState::LineEnd(input[..=index_plus_ws].concat(), index_plus_ws + 1)
+ };
+ }
+
+ match input[0..max_width_index_in_input]
+ .iter()
+ .rposition(|grapheme| is_whitespace(grapheme))
+ {
+ // Found a whitespace and what is on its left side is big enough.
+ Some(index) if index >= MIN_STRING => break_at(index),
+ // No whitespace found, try looking for a punctuation instead
+ _ => match (0..max_width_index_in_input)
+ .rev()
+ .skip_while(|pos| !is_valid_linebreak(input, *pos))
+ .next()
+ {
+ // Found a punctuation and what is on its left side is big enough.
+ Some(index) if index >= MIN_STRING => break_at(index),
+ // Either no boundary character was found to the left of `input[max_chars]`, or the line
+ // got too small. We try searching for a boundary character to the right.
+ _ => match (max_width_index_in_input..input.len())
+ .skip_while(|pos| !is_valid_linebreak(input, *pos))
+ .next()
+ {
+ // A boundary was found after the line limit
+ Some(index) => break_at(index),
+ // No boundary to the right, the input cannot be broken
+ None => SnippetState::EndOfInput(input.concat()),
+ },
+ },
+ }
+}
+
+fn is_valid_linebreak(input: &[&str], pos: usize) -> bool {
+ let is_whitespace = is_whitespace(input[pos]);
+ if is_whitespace {
+ return true;
+ }
+ let is_punctuation = is_punctuation(input[pos]);
+ if is_punctuation && !is_part_of_type(input, pos) {
+ return true;
+ }
+ false
+}
+
+fn is_part_of_type(input: &[&str], pos: usize) -> bool {
+ input.get(pos..=pos + 1) == Some(&[":", ":"])
+ || input.get(pos.saturating_sub(1)..=pos) == Some(&[":", ":"])
+}
+
+fn is_new_line(grapheme: &str) -> bool {
+ let bytes = grapheme.as_bytes();
+ bytes.starts_with(b"\n") || bytes.starts_with(b"\r\n")
+}
+
+fn is_whitespace(grapheme: &str) -> bool {
+ grapheme.chars().all(char::is_whitespace)
+}
+
+fn is_punctuation(grapheme: &str) -> bool {
+ grapheme
+ .chars()
+ .all(UnicodeCategories::is_punctuation_other)
+}
+
+fn graphemes_width(graphemes: &[&str]) -> usize {
+ graphemes.iter().map(|s| unicode_str_width(s)).sum()
+}
+
+#[cfg(test)]
+mod test {
+ use super::{break_string, detect_url, rewrite_string, SnippetState, StringFormat};
+ use crate::config::Config;
+ use crate::shape::{Indent, Shape};
+ use unicode_segmentation::UnicodeSegmentation;
+
+ #[test]
+ fn issue343() {
+ let config = Default::default();
+ let fmt = StringFormat::new(Shape::legacy(2, Indent::empty()), &config);
+ rewrite_string("eq_", &fmt, 2);
+ }
+
+ #[test]
+ fn line_break_at_valid_points_test() {
+ let string = "[TheName](Dont::break::my::type::That::would::be::very::nice) break here";
+ let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
+ assert_eq!(
+ break_string(20, false, "", &graphemes[..]),
+ SnippetState::LineEnd(
+ "[TheName](Dont::break::my::type::That::would::be::very::nice) ".to_string(),
+ 62
+ )
+ );
+ }
+
+ #[test]
+ fn should_break_on_whitespace() {
+ let string = "Placerat felis. Mauris porta ante sagittis purus.";
+ let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
+ assert_eq!(
+ break_string(20, false, "", &graphemes[..]),
+ SnippetState::LineEnd("Placerat felis. ".to_string(), 16)
+ );
+ assert_eq!(
+ break_string(20, true, "", &graphemes[..]),
+ SnippetState::LineEnd("Placerat felis.".to_string(), 16)
+ );
+ }
+
+ #[test]
+ fn should_break_on_punctuation() {
+ let string = "Placerat_felis._Mauris_porta_ante_sagittis_purus.";
+ let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
+ assert_eq!(
+ break_string(20, false, "", &graphemes[..]),
+ SnippetState::LineEnd("Placerat_felis.".to_string(), 15)
+ );
+ }
+
+ #[test]
+ fn should_break_forward() {
+ let string = "Venenatis_tellus_vel_tellus. Aliquam aliquam dolor at justo.";
+ let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
+ assert_eq!(
+ break_string(20, false, "", &graphemes[..]),
+ SnippetState::LineEnd("Venenatis_tellus_vel_tellus. ".to_string(), 29)
+ );
+ assert_eq!(
+ break_string(20, true, "", &graphemes[..]),
+ SnippetState::LineEnd("Venenatis_tellus_vel_tellus.".to_string(), 29)
+ );
+ }
+
+ #[test]
+ fn nothing_to_break() {
+ let string = "Venenatis_tellus_vel_tellus";
+ let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
+ assert_eq!(
+ break_string(20, false, "", &graphemes[..]),
+ SnippetState::EndOfInput("Venenatis_tellus_vel_tellus".to_string())
+ );
+ }
+
+ #[test]
+ fn significant_whitespaces() {
+ let string = "Neque in sem. \n Pellentesque tellus augue.";
+ let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
+ assert_eq!(
+ break_string(15, false, "", &graphemes[..]),
+ SnippetState::EndWithLineFeed("Neque in sem. \n".to_string(), 20)
+ );
+ assert_eq!(
+ break_string(25, false, "", &graphemes[..]),
+ SnippetState::EndWithLineFeed("Neque in sem. \n".to_string(), 20)
+ );
+
+ assert_eq!(
+ break_string(15, true, "", &graphemes[..]),
+ SnippetState::LineEnd("Neque in sem.".to_string(), 19)
+ );
+ assert_eq!(
+ break_string(25, true, "", &graphemes[..]),
+ SnippetState::EndWithLineFeed("Neque in sem.\n".to_string(), 20)
+ );
+ }
+
+ #[test]
+ fn big_whitespace() {
+ let string = "Neque in sem. Pellentesque tellus augue.";
+ let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
+ assert_eq!(
+ break_string(20, false, "", &graphemes[..]),
+ SnippetState::LineEnd("Neque in sem. ".to_string(), 25)
+ );
+ assert_eq!(
+ break_string(20, true, "", &graphemes[..]),
+ SnippetState::LineEnd("Neque in sem.".to_string(), 25)
+ );
+ }
+
+ #[test]
+ fn newline_in_candidate_line() {
+ let string = "Nulla\nconsequat erat at massa. Vivamus id mi.";
+
+ let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
+ assert_eq!(
+ break_string(25, false, "", &graphemes[..]),
+ SnippetState::EndWithLineFeed("Nulla\n".to_string(), 6)
+ );
+ assert_eq!(
+ break_string(25, true, "", &graphemes[..]),
+ SnippetState::EndWithLineFeed("Nulla\n".to_string(), 6)
+ );
+
+ let mut config: Config = Default::default();
+ config.set().max_width(27);
+ let fmt = StringFormat::new(Shape::legacy(25, Indent::empty()), &config);
+ let rewritten_string = rewrite_string(string, &fmt, 27);
+ assert_eq!(
+ rewritten_string,
+ Some("\"Nulla\nconsequat erat at massa. \\\n Vivamus id mi.\"".to_string())
+ );
+ }
+
+ #[test]
+ fn last_line_fit_with_trailing_whitespaces() {
+ let string = "Vivamus id mi. ";
+ let config: Config = Default::default();
+ let mut fmt = StringFormat::new(Shape::legacy(25, Indent::empty()), &config);
+
+ fmt.trim_end = true;
+ let rewritten_string = rewrite_string(string, &fmt, 25);
+ assert_eq!(rewritten_string, Some("\"Vivamus id mi.\"".to_string()));
+
+ fmt.trim_end = false; // default value of trim_end
+ let rewritten_string = rewrite_string(string, &fmt, 25);
+ assert_eq!(rewritten_string, Some("\"Vivamus id mi. \"".to_string()));
+ }
+
+ #[test]
+ fn last_line_fit_with_newline() {
+ let string = "Vivamus id mi.\nVivamus id mi.";
+ let config: Config = Default::default();
+ let fmt = StringFormat {
+ opener: "",
+ closer: "",
+ line_start: "// ",
+ line_end: "",
+ shape: Shape::legacy(100, Indent::from_width(&config, 4)),
+ trim_end: true,
+ config: &config,
+ };
+
+ let rewritten_string = rewrite_string(string, &fmt, 100);
+ assert_eq!(
+ rewritten_string,
+ Some("Vivamus id mi.\n // Vivamus id mi.".to_string())
+ );
+ }
+
+ #[test]
+ fn overflow_in_non_string_content() {
+ let comment = "Aenean metus.\nVestibulum ac lacus. Vivamus porttitor";
+ let config: Config = Default::default();
+ let fmt = StringFormat {
+ opener: "",
+ closer: "",
+ line_start: "// ",
+ line_end: "",
+ shape: Shape::legacy(30, Indent::from_width(&config, 8)),
+ trim_end: true,
+ config: &config,
+ };
+
+ assert_eq!(
+ rewrite_string(comment, &fmt, 30),
+ Some(
+ "Aenean metus.\n // Vestibulum ac lacus. Vivamus\n // porttitor"
+ .to_string()
+ )
+ );
+ }
+
+ #[test]
+ fn overflow_in_non_string_content_with_line_end() {
+ let comment = "Aenean metus.\nVestibulum ac lacus. Vivamus porttitor";
+ let config: Config = Default::default();
+ let fmt = StringFormat {
+ opener: "",
+ closer: "",
+ line_start: "// ",
+ line_end: "@",
+ shape: Shape::legacy(30, Indent::from_width(&config, 8)),
+ trim_end: true,
+ config: &config,
+ };
+
+ assert_eq!(
+ rewrite_string(comment, &fmt, 30),
+ Some(
+ "Aenean metus.\n // Vestibulum ac lacus. Vivamus@\n // porttitor"
+ .to_string()
+ )
+ );
+ }
+
+ #[test]
+ fn blank_line_with_non_empty_line_start() {
+ let config: Config = Default::default();
+ let mut fmt = StringFormat {
+ opener: "",
+ closer: "",
+ line_start: "// ",
+ line_end: "",
+ shape: Shape::legacy(30, Indent::from_width(&config, 4)),
+ trim_end: true,
+ config: &config,
+ };
+
+ let comment = "Aenean metus. Vestibulum\n\nac lacus. Vivamus porttitor";
+ assert_eq!(
+ rewrite_string(comment, &fmt, 30),
+ Some(
+ "Aenean metus. Vestibulum\n //\n // ac lacus. Vivamus porttitor".to_string()
+ )
+ );
+
+ fmt.shape = Shape::legacy(15, Indent::from_width(&config, 4));
+ let comment = "Aenean\n\nmetus. Vestibulum ac lacus. Vivamus porttitor";
+ assert_eq!(
+ rewrite_string(comment, &fmt, 15),
+ Some(
+ r#"Aenean
+ //
+ // metus. Vestibulum
+ // ac lacus. Vivamus
+ // porttitor"#
+ .to_string()
+ )
+ );
+ }
+
+ #[test]
+ fn retain_blank_lines() {
+ let config: Config = Default::default();
+ let fmt = StringFormat {
+ opener: "",
+ closer: "",
+ line_start: "// ",
+ line_end: "",
+ shape: Shape::legacy(20, Indent::from_width(&config, 4)),
+ trim_end: true,
+ config: &config,
+ };
+
+ let comment = "Aenean\n\nmetus. Vestibulum ac lacus.\n\n";
+ assert_eq!(
+ rewrite_string(comment, &fmt, 20),
+ Some(
+ "Aenean\n //\n // metus. Vestibulum ac\n // lacus.\n //\n".to_string()
+ )
+ );
+
+ let comment = "Aenean\n\nmetus. Vestibulum ac lacus.\n";
+ assert_eq!(
+ rewrite_string(comment, &fmt, 20),
+ Some("Aenean\n //\n // metus. Vestibulum ac\n // lacus.\n".to_string())
+ );
+
+ let comment = "Aenean\n \nmetus. Vestibulum ac lacus.";
+ assert_eq!(
+ rewrite_string(comment, &fmt, 20),
+ Some("Aenean\n //\n // metus. Vestibulum ac\n // lacus.".to_string())
+ );
+ }
+
+ #[test]
+ fn boundary_on_edge() {
+ let config: Config = Default::default();
+ let mut fmt = StringFormat {
+ opener: "",
+ closer: "",
+ line_start: "// ",
+ line_end: "",
+ shape: Shape::legacy(13, Indent::from_width(&config, 4)),
+ trim_end: true,
+ config: &config,
+ };
+
+ let comment = "Aenean metus. Vestibulum ac lacus.";
+ assert_eq!(
+ rewrite_string(comment, &fmt, 13),
+ Some("Aenean metus.\n // Vestibulum ac\n // lacus.".to_string())
+ );
+
+ fmt.trim_end = false;
+ let comment = "Vestibulum ac lacus.";
+ assert_eq!(
+ rewrite_string(comment, &fmt, 13),
+ Some("Vestibulum \n // ac lacus.".to_string())
+ );
+
+ fmt.trim_end = true;
+ fmt.line_end = "\\";
+ let comment = "Vestibulum ac lacus.";
+ assert_eq!(
+ rewrite_string(comment, &fmt, 13),
+ Some("Vestibulum\\\n // ac lacus.".to_string())
+ );
+ }
+
+ #[test]
+ fn detect_urls() {
+ let string = "aaa http://example.org something";
+ let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
+ assert_eq!(detect_url(&graphemes, 8), Some(21));
+
+ let string = "https://example.org something";
+ let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
+ assert_eq!(detect_url(&graphemes, 0), Some(18));
+
+ let string = "aaa ftp://example.org something";
+ let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
+ assert_eq!(detect_url(&graphemes, 8), Some(20));
+
+ let string = "aaa file://example.org something";
+ let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
+ assert_eq!(detect_url(&graphemes, 8), Some(21));
+
+ let string = "aaa http not an url";
+ let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
+ assert_eq!(detect_url(&graphemes, 6), None);
+
+ let string = "aaa file://example.org";
+ let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>();
+ assert_eq!(detect_url(&graphemes, 8), Some(21));
+ }
+}
diff --git a/src/tools/rustfmt/src/syntux.rs b/src/tools/rustfmt/src/syntux.rs
new file mode 100644
index 000000000..845576bd8
--- /dev/null
+++ b/src/tools/rustfmt/src/syntux.rs
@@ -0,0 +1,4 @@
+//! This module defines a thin abstract layer on top of the rustc's parser and syntax libraries.
+
+pub(crate) mod parser;
+pub(crate) mod session;
diff --git a/src/tools/rustfmt/src/test/configuration_snippet.rs b/src/tools/rustfmt/src/test/configuration_snippet.rs
new file mode 100644
index 000000000..c8fda7c85
--- /dev/null
+++ b/src/tools/rustfmt/src/test/configuration_snippet.rs
@@ -0,0 +1,322 @@
+use std::collections::{HashMap, HashSet};
+use std::fs;
+use std::io::{BufRead, BufReader, Write};
+use std::iter::Enumerate;
+use std::path::{Path, PathBuf};
+
+use super::{print_mismatches, write_message, DIFF_CONTEXT_SIZE};
+use crate::config::{Config, EmitMode, Verbosity};
+use crate::rustfmt_diff::{make_diff, Mismatch};
+use crate::{Input, Session};
+
+const CONFIGURATIONS_FILE_NAME: &str = "Configurations.md";
+
+// This enum is used to represent one of three text features in Configurations.md: a block of code
+// with its starting line number, the name of a rustfmt configuration option, or the value of a
+// rustfmt configuration option.
+enum ConfigurationSection {
+ CodeBlock((String, u32)), // (String: block of code, u32: line number of code block start)
+ ConfigName(String),
+ ConfigValue(String),
+}
+
+impl ConfigurationSection {
+ fn get_section<I: Iterator<Item = String>>(
+ file: &mut Enumerate<I>,
+ ) -> Option<ConfigurationSection> {
+ lazy_static! {
+ static ref CONFIG_NAME_REGEX: regex::Regex =
+ regex::Regex::new(r"^## `([^`]+)`").expect("failed creating configuration pattern");
+ static ref CONFIG_VALUE_REGEX: regex::Regex =
+ regex::Regex::new(r#"^#### `"?([^`"]+)"?`"#)
+ .expect("failed creating configuration value pattern");
+ }
+
+ loop {
+ match file.next() {
+ Some((i, line)) => {
+ if line.starts_with("```rust") {
+ // Get the lines of the code block.
+ let lines: Vec<String> = file
+ .map(|(_i, l)| l)
+ .take_while(|l| !l.starts_with("```"))
+ .collect();
+ let block = format!("{}\n", lines.join("\n"));
+
+ // +1 to translate to one-based indexing
+ // +1 to get to first line of code (line after "```")
+ let start_line = (i + 2) as u32;
+
+ return Some(ConfigurationSection::CodeBlock((block, start_line)));
+ } else if let Some(c) = CONFIG_NAME_REGEX.captures(&line) {
+ return Some(ConfigurationSection::ConfigName(String::from(&c[1])));
+ } else if let Some(c) = CONFIG_VALUE_REGEX.captures(&line) {
+ return Some(ConfigurationSection::ConfigValue(String::from(&c[1])));
+ }
+ }
+ None => return None, // reached the end of the file
+ }
+ }
+ }
+}
+
+// This struct stores the information about code blocks in the configurations
+// file, formats the code blocks, and prints formatting errors.
+struct ConfigCodeBlock {
+ config_name: Option<String>,
+ config_value: Option<String>,
+ code_block: Option<String>,
+ code_block_start: Option<u32>,
+}
+
+impl ConfigCodeBlock {
+ fn new() -> ConfigCodeBlock {
+ ConfigCodeBlock {
+ config_name: None,
+ config_value: None,
+ code_block: None,
+ code_block_start: None,
+ }
+ }
+
+ fn set_config_name(&mut self, name: Option<String>) {
+ self.config_name = name;
+ self.config_value = None;
+ }
+
+ fn set_config_value(&mut self, value: Option<String>) {
+ self.config_value = value;
+ }
+
+ fn set_code_block(&mut self, code_block: String, code_block_start: u32) {
+ self.code_block = Some(code_block);
+ self.code_block_start = Some(code_block_start);
+ }
+
+ fn get_block_config(&self) -> Config {
+ let mut config = Config::default();
+ config.set().verbose(Verbosity::Quiet);
+ if self.config_name.is_some() && self.config_value.is_some() {
+ config.override_value(
+ self.config_name.as_ref().unwrap(),
+ self.config_value.as_ref().unwrap(),
+ );
+ }
+ config
+ }
+
+ fn code_block_valid(&self) -> bool {
+ // We never expect to not have a code block.
+ assert!(self.code_block.is_some() && self.code_block_start.is_some());
+
+ // See if code block begins with #![rustfmt::skip].
+ let fmt_skip = self.fmt_skip();
+
+ if self.config_name.is_none() && !fmt_skip {
+ write_message(&format!(
+ "No configuration name for {}:{}",
+ CONFIGURATIONS_FILE_NAME,
+ self.code_block_start.unwrap()
+ ));
+ return false;
+ }
+ if self.config_value.is_none() && !fmt_skip {
+ write_message(&format!(
+ "No configuration value for {}:{}",
+ CONFIGURATIONS_FILE_NAME,
+ self.code_block_start.unwrap()
+ ));
+ return false;
+ }
+ true
+ }
+
+ /// True if the code block starts with #![rustfmt::skip]
+ fn fmt_skip(&self) -> bool {
+ self.code_block
+ .as_ref()
+ .unwrap()
+ .lines()
+ .nth(0)
+ .unwrap_or("")
+ == "#![rustfmt::skip]"
+ }
+
+ fn has_parsing_errors<T: Write>(&self, session: &Session<'_, T>) -> bool {
+ if session.has_parsing_errors() {
+ write_message(&format!(
+ "\u{261d}\u{1f3fd} Cannot format {}:{}",
+ CONFIGURATIONS_FILE_NAME,
+ self.code_block_start.unwrap()
+ ));
+ return true;
+ }
+
+ false
+ }
+
+ fn print_diff(&self, compare: Vec<Mismatch>) {
+ let mut mismatches = HashMap::new();
+ mismatches.insert(PathBuf::from(CONFIGURATIONS_FILE_NAME), compare);
+ print_mismatches(mismatches, |line_num| {
+ format!(
+ "\nMismatch at {}:{}:",
+ CONFIGURATIONS_FILE_NAME,
+ line_num + self.code_block_start.unwrap() - 1
+ )
+ });
+ }
+
+ fn formatted_has_diff(&self, text: &str) -> bool {
+ let compare = make_diff(self.code_block.as_ref().unwrap(), text, DIFF_CONTEXT_SIZE);
+ if !compare.is_empty() {
+ self.print_diff(compare);
+ return true;
+ }
+
+ false
+ }
+
+ // Return a bool indicating if formatting this code block is an idempotent
+ // operation. This function also triggers printing any formatting failure
+ // messages.
+ fn formatted_is_idempotent(&self) -> bool {
+ // Verify that we have all of the expected information.
+ if !self.code_block_valid() {
+ return false;
+ }
+
+ let input = Input::Text(self.code_block.as_ref().unwrap().to_owned());
+ let mut config = self.get_block_config();
+ config.set().emit_mode(EmitMode::Stdout);
+ let mut buf: Vec<u8> = vec![];
+
+ {
+ let mut session = Session::new(config, Some(&mut buf));
+ session.format(input).unwrap();
+ if self.has_parsing_errors(&session) {
+ return false;
+ }
+ }
+
+ !self.formatted_has_diff(&String::from_utf8(buf).unwrap())
+ }
+
+ // Extract a code block from the iterator. Behavior:
+ // - Rust code blocks are identifed by lines beginning with "```rust".
+ // - One explicit configuration setting is supported per code block.
+ // - Rust code blocks with no configuration setting are illegal and cause an
+ // assertion failure, unless the snippet begins with #![rustfmt::skip].
+ // - Configuration names in Configurations.md must be in the form of
+ // "## `NAME`".
+ // - Configuration values in Configurations.md must be in the form of
+ // "#### `VALUE`".
+ fn extract<I: Iterator<Item = String>>(
+ file: &mut Enumerate<I>,
+ prev: Option<&ConfigCodeBlock>,
+ hash_set: &mut HashSet<String>,
+ ) -> Option<ConfigCodeBlock> {
+ let mut code_block = ConfigCodeBlock::new();
+ code_block.config_name = prev.and_then(|cb| cb.config_name.clone());
+
+ loop {
+ match ConfigurationSection::get_section(file) {
+ Some(ConfigurationSection::CodeBlock((block, start_line))) => {
+ code_block.set_code_block(block, start_line);
+ break;
+ }
+ Some(ConfigurationSection::ConfigName(name)) => {
+ assert!(
+ Config::is_valid_name(&name),
+ "an unknown configuration option was found: {}",
+ name
+ );
+ assert!(
+ hash_set.remove(&name),
+ "multiple configuration guides found for option {}",
+ name
+ );
+ code_block.set_config_name(Some(name));
+ }
+ Some(ConfigurationSection::ConfigValue(value)) => {
+ code_block.set_config_value(Some(value));
+ }
+ None => return None, // end of file was reached
+ }
+ }
+
+ Some(code_block)
+ }
+}
+
+#[test]
+fn configuration_snippet_tests() {
+ super::init_log();
+ let blocks = get_code_blocks();
+ let failures = blocks
+ .iter()
+ .filter(|block| !block.fmt_skip())
+ .map(ConfigCodeBlock::formatted_is_idempotent)
+ .fold(0, |acc, r| acc + (!r as u32));
+
+ // Display results.
+ println!("Ran {} configurations tests.", blocks.len());
+ assert_eq!(failures, 0, "{} configurations tests failed", failures);
+}
+
+// Read Configurations.md and build a `Vec` of `ConfigCodeBlock` structs with one
+// entry for each Rust code block found.
+fn get_code_blocks() -> Vec<ConfigCodeBlock> {
+ let mut file_iter = BufReader::new(
+ fs::File::open(Path::new(CONFIGURATIONS_FILE_NAME))
+ .unwrap_or_else(|_| panic!("couldn't read file {}", CONFIGURATIONS_FILE_NAME)),
+ )
+ .lines()
+ .map(Result::unwrap)
+ .enumerate();
+ let mut code_blocks: Vec<ConfigCodeBlock> = Vec::new();
+ let mut hash_set = Config::hash_set();
+
+ while let Some(cb) = ConfigCodeBlock::extract(&mut file_iter, code_blocks.last(), &mut hash_set)
+ {
+ code_blocks.push(cb);
+ }
+
+ for name in hash_set {
+ if !Config::is_hidden_option(&name) {
+ panic!("{} does not have a configuration guide", name);
+ }
+ }
+
+ code_blocks
+}
+
+#[test]
+fn check_unstable_option_tracking_issue_numbers() {
+ // Ensure that tracking issue links point to the correct issue number
+ let tracking_issue =
+ regex::Regex::new(r"\(tracking issue: \[#(?P<number>\d+)\]\((?P<link>\S+)\)\)")
+ .expect("failed creating configuration pattern");
+
+ let lines = BufReader::new(
+ fs::File::open(Path::new(CONFIGURATIONS_FILE_NAME))
+ .unwrap_or_else(|_| panic!("couldn't read file {}", CONFIGURATIONS_FILE_NAME)),
+ )
+ .lines()
+ .map(Result::unwrap)
+ .enumerate();
+
+ for (idx, line) in lines {
+ if let Some(capture) = tracking_issue.captures(&line) {
+ let number = capture.name("number").unwrap().as_str();
+ let link = capture.name("link").unwrap().as_str();
+ assert!(
+ link.ends_with(number),
+ "{} on line {} does not point to issue #{}",
+ link,
+ idx + 1,
+ number,
+ );
+ }
+ }
+}
diff --git a/src/tools/rustfmt/src/test/mod.rs b/src/tools/rustfmt/src/test/mod.rs
new file mode 100644
index 000000000..6b5bc2b30
--- /dev/null
+++ b/src/tools/rustfmt/src/test/mod.rs
@@ -0,0 +1,1053 @@
+use std::collections::HashMap;
+use std::env;
+use std::fs;
+use std::io::{self, BufRead, BufReader, Read, Write};
+use std::iter::Peekable;
+use std::mem;
+use std::path::{Path, PathBuf};
+use std::process::{Command, Stdio};
+use std::str::Chars;
+use std::thread;
+
+use crate::config::{Color, Config, EmitMode, FileName, NewlineStyle};
+use crate::formatting::{ReportedErrors, SourceFile};
+use crate::rustfmt_diff::{make_diff, print_diff, DiffLine, Mismatch, ModifiedChunk, OutputWriter};
+use crate::source_file;
+use crate::{is_nightly_channel, FormatReport, FormatReportFormatterBuilder, Input, Session};
+
+use rustfmt_config_proc_macro::nightly_only_test;
+
+mod configuration_snippet;
+mod mod_resolver;
+mod parser;
+
+const DIFF_CONTEXT_SIZE: usize = 3;
+
+// A list of files on which we want to skip testing.
+const FILE_SKIP_LIST: &[&str] = &[
+ // We want to make sure that the `skip_children` is correctly working,
+ // so we do not want to test this file directly.
+ "configs/skip_children/foo/mod.rs",
+ "issue-3434/no_entry.rs",
+ "issue-3665/sub_mod.rs",
+ // Testing for issue-3779
+ "issue-3779/ice.rs",
+ // These files and directory are a part of modules defined inside `cfg_if!`.
+ "cfg_if/mod.rs",
+ "cfg_if/detect",
+ "issue-3253/foo.rs",
+ "issue-3253/bar.rs",
+ "issue-3253/paths",
+ // These files and directory are a part of modules defined inside `cfg_attr(..)`.
+ "cfg_mod/dir",
+ "cfg_mod/bar.rs",
+ "cfg_mod/foo.rs",
+ "cfg_mod/wasm32.rs",
+ "skip/foo.rs",
+];
+
+fn init_log() {
+ let _ = env_logger::builder().is_test(true).try_init();
+}
+
+struct TestSetting {
+ /// The size of the stack of the thread that run tests.
+ stack_size: usize,
+}
+
+impl Default for TestSetting {
+ fn default() -> Self {
+ TestSetting {
+ stack_size: 8_388_608, // 8MB
+ }
+ }
+}
+
+fn run_test_with<F>(test_setting: &TestSetting, f: F)
+where
+ F: FnOnce(),
+ F: Send + 'static,
+{
+ thread::Builder::new()
+ .stack_size(test_setting.stack_size)
+ .spawn(f)
+ .expect("Failed to create a test thread")
+ .join()
+ .expect("Failed to join a test thread")
+}
+
+fn is_subpath<P>(path: &Path, subpath: &P) -> bool
+where
+ P: AsRef<Path>,
+{
+ (0..path.components().count())
+ .map(|i| {
+ path.components()
+ .skip(i)
+ .take(subpath.as_ref().components().count())
+ })
+ .any(|c| c.zip(subpath.as_ref().components()).all(|(a, b)| a == b))
+}
+
+fn is_file_skip(path: &Path) -> bool {
+ FILE_SKIP_LIST
+ .iter()
+ .any(|file_path| is_subpath(path, file_path))
+}
+
+// Returns a `Vec` containing `PathBuf`s of files with an `rs` extension in the
+// given path. The `recursive` argument controls if files from subdirectories
+// are also returned.
+fn get_test_files(path: &Path, recursive: bool) -> Vec<PathBuf> {
+ let mut files = vec![];
+ if path.is_dir() {
+ for entry in fs::read_dir(path).expect(&format!(
+ "couldn't read directory {}",
+ path.to_str().unwrap()
+ )) {
+ let entry = entry.expect("couldn't get `DirEntry`");
+ let path = entry.path();
+ if path.is_dir() && recursive {
+ files.append(&mut get_test_files(&path, recursive));
+ } else if path.extension().map_or(false, |f| f == "rs") && !is_file_skip(&path) {
+ files.push(path);
+ }
+ }
+ }
+ files
+}
+
+fn verify_config_used(path: &Path, config_name: &str) {
+ for entry in fs::read_dir(path).expect(&format!(
+ "couldn't read {} directory",
+ path.to_str().unwrap()
+ )) {
+ let entry = entry.expect("couldn't get directory entry");
+ let path = entry.path();
+ if path.extension().map_or(false, |f| f == "rs") {
+ // check if "// rustfmt-<config_name>:" appears in the file.
+ let filebuf = BufReader::new(
+ fs::File::open(&path)
+ .unwrap_or_else(|_| panic!("couldn't read file {}", path.display())),
+ );
+ assert!(
+ filebuf
+ .lines()
+ .map(Result::unwrap)
+ .take_while(|l| l.starts_with("//"))
+ .any(|l| l.starts_with(&format!("// rustfmt-{}", config_name))),
+ "config option file {} does not contain expected config name",
+ path.display()
+ );
+ }
+ }
+}
+
+#[test]
+fn verify_config_test_names() {
+ init_log();
+ for path in &[
+ Path::new("tests/source/configs"),
+ Path::new("tests/target/configs"),
+ ] {
+ for entry in fs::read_dir(path).expect("couldn't read configs directory") {
+ let entry = entry.expect("couldn't get directory entry");
+ let path = entry.path();
+ if path.is_dir() {
+ let config_name = path.file_name().unwrap().to_str().unwrap();
+
+ // Make sure that config name is used in the files in the directory.
+ verify_config_used(&path, config_name);
+ }
+ }
+ }
+}
+
+// This writes to the terminal using the same approach (via `term::stdout` or
+// `println!`) that is used by `rustfmt::rustfmt_diff::print_diff`. Writing
+// using only one or the other will cause the output order to differ when
+// `print_diff` selects the approach not used.
+fn write_message(msg: &str) {
+ let mut writer = OutputWriter::new(Color::Auto);
+ writer.writeln(msg, None);
+}
+
+// Integration tests. The files in `tests/source` are formatted and compared
+// to their equivalent in `tests/target`. The target file and config can be
+// overridden by annotations in the source file. The input and output must match
+// exactly.
+#[test]
+fn system_tests() {
+ init_log();
+ run_test_with(&TestSetting::default(), || {
+ // Get all files in the tests/source directory.
+ let files = get_test_files(Path::new("tests/source"), true);
+ let (_reports, count, fails) = check_files(files, &None);
+
+ // Display results.
+ println!("Ran {} system tests.", count);
+ assert_eq!(fails, 0, "{} system tests failed", fails);
+ assert!(
+ count >= 300,
+ "Expected a minimum of {} system tests to be executed",
+ 300
+ )
+ });
+}
+
+// Do the same for tests/coverage-source directory.
+// The only difference is the coverage mode.
+#[test]
+fn coverage_tests() {
+ init_log();
+ let files = get_test_files(Path::new("tests/coverage/source"), true);
+ let (_reports, count, fails) = check_files(files, &None);
+
+ println!("Ran {} tests in coverage mode.", count);
+ assert_eq!(fails, 0, "{} tests failed", fails);
+}
+
+#[test]
+fn checkstyle_test() {
+ init_log();
+ let filename = "tests/writemode/source/fn-single-line.rs";
+ let expected_filename = "tests/writemode/target/checkstyle.xml";
+ assert_output(Path::new(filename), Path::new(expected_filename));
+}
+
+#[test]
+fn json_test() {
+ init_log();
+ let filename = "tests/writemode/source/json.rs";
+ let expected_filename = "tests/writemode/target/output.json";
+ assert_output(Path::new(filename), Path::new(expected_filename));
+}
+
+#[test]
+fn modified_test() {
+ init_log();
+ use std::io::BufRead;
+
+ // Test "modified" output
+ let filename = "tests/writemode/source/modified.rs";
+ let mut data = Vec::new();
+ let mut config = Config::default();
+ config
+ .set()
+ .emit_mode(crate::config::EmitMode::ModifiedLines);
+
+ {
+ let mut session = Session::new(config, Some(&mut data));
+ session.format(Input::File(filename.into())).unwrap();
+ }
+
+ let mut lines = data.lines();
+ let mut chunks = Vec::new();
+ while let Some(Ok(header)) = lines.next() {
+ // Parse the header line
+ let values: Vec<_> = header
+ .split(' ')
+ .map(|s| s.parse::<u32>().unwrap())
+ .collect();
+ assert_eq!(values.len(), 3);
+ let line_number_orig = values[0];
+ let lines_removed = values[1];
+ let num_added = values[2];
+ let mut added_lines = Vec::new();
+ for _ in 0..num_added {
+ added_lines.push(lines.next().unwrap().unwrap());
+ }
+ chunks.push(ModifiedChunk {
+ line_number_orig,
+ lines_removed,
+ lines: added_lines,
+ });
+ }
+
+ assert_eq!(
+ chunks,
+ vec![
+ ModifiedChunk {
+ line_number_orig: 4,
+ lines_removed: 4,
+ lines: vec!["fn blah() {}".into()],
+ },
+ ModifiedChunk {
+ line_number_orig: 9,
+ lines_removed: 6,
+ lines: vec!["#[cfg(a, b)]".into(), "fn main() {}".into()],
+ },
+ ],
+ );
+}
+
+// Helper function for comparing the results of rustfmt
+// to a known output file generated by one of the write modes.
+fn assert_output(source: &Path, expected_filename: &Path) {
+ let config = read_config(source);
+ let (_, source_file, _) = format_file(source, config.clone());
+
+ // Populate output by writing to a vec.
+ let mut out = vec![];
+ let _ = source_file::write_all_files(&source_file, &mut out, &config);
+ let output = String::from_utf8(out).unwrap();
+
+ let mut expected_file = fs::File::open(&expected_filename).expect("couldn't open target");
+ let mut expected_text = String::new();
+ expected_file
+ .read_to_string(&mut expected_text)
+ .expect("Failed reading target");
+
+ let compare = make_diff(&expected_text, &output, DIFF_CONTEXT_SIZE);
+ if !compare.is_empty() {
+ let mut failures = HashMap::new();
+ failures.insert(source.to_owned(), compare);
+ print_mismatches_default_message(failures);
+ panic!("Text does not match expected output");
+ }
+}
+
+// Helper function for comparing the results of rustfmt
+// to a known output generated by one of the write modes.
+fn assert_stdin_output(
+ source: &Path,
+ expected_filename: &Path,
+ emit_mode: EmitMode,
+ has_diff: bool,
+) {
+ let mut config = Config::default();
+ config.set().newline_style(NewlineStyle::Unix);
+ config.set().emit_mode(emit_mode);
+
+ let mut source_file = fs::File::open(&source).expect("couldn't open source");
+ let mut source_text = String::new();
+ source_file
+ .read_to_string(&mut source_text)
+ .expect("Failed reading target");
+ let input = Input::Text(source_text);
+
+ // Populate output by writing to a vec.
+ let mut buf: Vec<u8> = vec![];
+ {
+ let mut session = Session::new(config, Some(&mut buf));
+ session.format(input).unwrap();
+ let errors = ReportedErrors {
+ has_diff: has_diff,
+ ..Default::default()
+ };
+ assert_eq!(session.errors, errors);
+ }
+
+ let mut expected_file = fs::File::open(&expected_filename).expect("couldn't open target");
+ let mut expected_text = String::new();
+ expected_file
+ .read_to_string(&mut expected_text)
+ .expect("Failed reading target");
+
+ let output = String::from_utf8(buf).unwrap();
+ let compare = make_diff(&expected_text, &output, DIFF_CONTEXT_SIZE);
+ if !compare.is_empty() {
+ let mut failures = HashMap::new();
+ failures.insert(source.to_owned(), compare);
+ print_mismatches_default_message(failures);
+ panic!("Text does not match expected output");
+ }
+}
+// Idempotence tests. Files in tests/target are checked to be unaltered by
+// rustfmt.
+#[nightly_only_test]
+#[test]
+fn idempotence_tests() {
+ init_log();
+ run_test_with(&TestSetting::default(), || {
+ // Get all files in the tests/target directory.
+ let files = get_test_files(Path::new("tests/target"), true);
+ let (_reports, count, fails) = check_files(files, &None);
+
+ // Display results.
+ println!("Ran {} idempotent tests.", count);
+ assert_eq!(fails, 0, "{} idempotent tests failed", fails);
+ assert!(
+ count >= 400,
+ "Expected a minimum of {} idempotent tests to be executed",
+ 400
+ )
+ });
+}
+
+// Run rustfmt on itself. This operation must be idempotent. We also check that
+// no warnings are emitted.
+// Issue-3443: these tests require nightly
+#[nightly_only_test]
+#[test]
+fn self_tests() {
+ init_log();
+ let mut files = get_test_files(Path::new("tests"), false);
+ let bin_directories = vec!["cargo-fmt", "git-rustfmt", "bin", "format-diff"];
+ for dir in bin_directories {
+ let mut path = PathBuf::from("src");
+ path.push(dir);
+ path.push("main.rs");
+ files.push(path);
+ }
+ files.push(PathBuf::from("src/lib.rs"));
+
+ let (reports, count, fails) = check_files(files, &Some(PathBuf::from("rustfmt.toml")));
+ let mut warnings = 0;
+
+ // Display results.
+ println!("Ran {} self tests.", count);
+ assert_eq!(fails, 0, "{} self tests failed", fails);
+
+ for format_report in reports {
+ println!(
+ "{}",
+ FormatReportFormatterBuilder::new(&format_report).build()
+ );
+ warnings += format_report.warning_count();
+ }
+
+ assert_eq!(
+ warnings, 0,
+ "Rustfmt's code generated {} warnings",
+ warnings
+ );
+}
+
+#[test]
+fn format_files_find_new_files_via_cfg_if() {
+ init_log();
+ run_test_with(&TestSetting::default(), || {
+ // To repro issue-4656, it is necessary that these files are parsed
+ // as a part of the same session (hence this separate test runner).
+ let files = vec![
+ Path::new("tests/source/issue-4656/lib2.rs"),
+ Path::new("tests/source/issue-4656/lib.rs"),
+ ];
+
+ let config = Config::default();
+ let mut session = Session::<io::Stdout>::new(config, None);
+
+ let mut write_result = HashMap::new();
+ for file in files {
+ assert!(file.exists());
+ let result = session.format(Input::File(file.into())).unwrap();
+ assert!(!session.has_formatting_errors());
+ assert!(!result.has_warnings());
+ let mut source_file = SourceFile::new();
+ mem::swap(&mut session.source_file, &mut source_file);
+
+ for (filename, text) in source_file {
+ if let FileName::Real(ref filename) = filename {
+ write_result.insert(filename.to_owned(), text);
+ }
+ }
+ }
+ assert_eq!(
+ 3,
+ write_result.len(),
+ "Should have uncovered an extra file (format_me_please.rs) via lib.rs"
+ );
+ assert!(handle_result(write_result, None).is_ok());
+ });
+}
+
+#[test]
+fn stdin_formatting_smoke_test() {
+ init_log();
+ let input = Input::Text("fn main () {}".to_owned());
+ let mut config = Config::default();
+ config.set().emit_mode(EmitMode::Stdout);
+ let mut buf: Vec<u8> = vec![];
+ {
+ let mut session = Session::new(config, Some(&mut buf));
+ session.format(input).unwrap();
+ assert!(session.has_no_errors());
+ }
+
+ #[cfg(not(windows))]
+ assert_eq!(buf, "<stdin>:\n\nfn main() {}\n".as_bytes());
+ #[cfg(windows)]
+ assert_eq!(buf, "<stdin>:\n\nfn main() {}\r\n".as_bytes());
+}
+
+#[test]
+fn stdin_parser_panic_caught() {
+ init_log();
+ // See issue #3239.
+ for text in ["{", "}"].iter().cloned().map(String::from) {
+ let mut buf = vec![];
+ let mut session = Session::new(Default::default(), Some(&mut buf));
+ let _ = session.format(Input::Text(text));
+
+ assert!(session.has_parsing_errors());
+ }
+}
+
+/// Ensures that `EmitMode::ModifiedLines` works with input from `stdin`. Useful
+/// when embedding Rustfmt (e.g. inside RLS).
+#[test]
+fn stdin_works_with_modified_lines() {
+ init_log();
+ let input = "\nfn\n some( )\n{\n}\nfn main () {}\n";
+ let output = "1 6 2\nfn some() {}\nfn main() {}\n";
+
+ let input = Input::Text(input.to_owned());
+ let mut config = Config::default();
+ config.set().newline_style(NewlineStyle::Unix);
+ config.set().emit_mode(EmitMode::ModifiedLines);
+ let mut buf: Vec<u8> = vec![];
+ {
+ let mut session = Session::new(config, Some(&mut buf));
+ session.format(input).unwrap();
+ let errors = ReportedErrors {
+ has_diff: true,
+ ..Default::default()
+ };
+ assert_eq!(session.errors, errors);
+ }
+ assert_eq!(buf, output.as_bytes());
+}
+
+/// Ensures that `EmitMode::Json` works with input from `stdin`.
+#[test]
+fn stdin_works_with_json() {
+ init_log();
+ assert_stdin_output(
+ Path::new("tests/writemode/source/stdin.rs"),
+ Path::new("tests/writemode/target/stdin.json"),
+ EmitMode::Json,
+ true,
+ );
+}
+
+/// Ensures that `EmitMode::Checkstyle` works with input from `stdin`.
+#[test]
+fn stdin_works_with_checkstyle() {
+ init_log();
+ assert_stdin_output(
+ Path::new("tests/writemode/source/stdin.rs"),
+ Path::new("tests/writemode/target/stdin.xml"),
+ EmitMode::Checkstyle,
+ false,
+ );
+}
+
+#[test]
+fn stdin_disable_all_formatting_test() {
+ init_log();
+ let input = String::from("fn main() { println!(\"This should not be formatted.\"); }");
+ let mut child = Command::new(rustfmt().to_str().unwrap())
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .arg("--config-path=./tests/config/disable_all_formatting.toml")
+ .spawn()
+ .expect("failed to execute child");
+
+ {
+ let stdin = child.stdin.as_mut().expect("failed to get stdin");
+ stdin
+ .write_all(input.as_bytes())
+ .expect("failed to write stdin");
+ }
+
+ let output = child.wait_with_output().expect("failed to wait on child");
+ assert!(output.status.success());
+ assert!(output.stderr.is_empty());
+ assert_eq!(input, String::from_utf8(output.stdout).unwrap());
+}
+
+#[test]
+fn stdin_generated_files_issue_5172() {
+ init_log();
+ let input = Input::Text("//@generated\nfn main() {}".to_owned());
+ let mut config = Config::default();
+ config.set().emit_mode(EmitMode::Stdout);
+ config.set().format_generated_files(false);
+ config.set().newline_style(NewlineStyle::Unix);
+ let mut buf: Vec<u8> = vec![];
+ {
+ let mut session = Session::new(config, Some(&mut buf));
+ session.format(input).unwrap();
+ assert!(session.has_no_errors());
+ }
+ // N.B. this should be changed once `format_generated_files` is supported with stdin
+ assert_eq!(
+ String::from_utf8(buf).unwrap(),
+ "<stdin>:\n\n//@generated\nfn main() {}\n",
+ );
+}
+
+#[test]
+fn stdin_handles_mod_inner_ignore_attr() {
+ // see https://github.com/rust-lang/rustfmt/issues/5368
+ init_log();
+ let input = String::from("#![rustfmt::skip]\n\nfn main() { }");
+ let mut child = Command::new(rustfmt().to_str().unwrap())
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .spawn()
+ .expect("failed to execute child");
+
+ {
+ let stdin = child.stdin.as_mut().expect("failed to get stdin");
+ stdin
+ .write_all(input.as_bytes())
+ .expect("failed to write stdin");
+ }
+
+ let output = child.wait_with_output().expect("failed to wait on child");
+ assert!(output.status.success());
+ assert!(output.stderr.is_empty());
+ assert_eq!(input, String::from_utf8(output.stdout).unwrap());
+}
+
+#[test]
+fn format_lines_errors_are_reported() {
+ init_log();
+ let long_identifier = String::from_utf8(vec![b'a'; 239]).unwrap();
+ let input = Input::Text(format!("fn {}() {{}}", long_identifier));
+ let mut config = Config::default();
+ config.set().error_on_line_overflow(true);
+ let mut session = Session::<io::Stdout>::new(config, None);
+ session.format(input).unwrap();
+ assert!(session.has_formatting_errors());
+}
+
+#[test]
+fn format_lines_errors_are_reported_with_tabs() {
+ init_log();
+ let long_identifier = String::from_utf8(vec![b'a'; 97]).unwrap();
+ let input = Input::Text(format!("fn a() {{\n\t{}\n}}", long_identifier));
+ let mut config = Config::default();
+ config.set().error_on_line_overflow(true);
+ config.set().hard_tabs(true);
+ let mut session = Session::<io::Stdout>::new(config, None);
+ session.format(input).unwrap();
+ assert!(session.has_formatting_errors());
+}
+
+// For each file, run rustfmt and collect the output.
+// Returns the number of files checked and the number of failures.
+fn check_files(files: Vec<PathBuf>, opt_config: &Option<PathBuf>) -> (Vec<FormatReport>, u32, u32) {
+ let mut count = 0;
+ let mut fails = 0;
+ let mut reports = vec![];
+
+ for file_name in files {
+ let sig_comments = read_significant_comments(&file_name);
+ if sig_comments.contains_key("unstable") && !is_nightly_channel!() {
+ debug!(
+ "Skipping '{}' because it requires unstable \
+ features which are only available on nightly...",
+ file_name.display()
+ );
+ continue;
+ }
+
+ debug!("Testing '{}'...", file_name.display());
+
+ match idempotent_check(&file_name, opt_config) {
+ Ok(ref report) if report.has_warnings() => {
+ print!("{}", FormatReportFormatterBuilder::new(report).build());
+ fails += 1;
+ }
+ Ok(report) => reports.push(report),
+ Err(err) => {
+ if let IdempotentCheckError::Mismatch(msg) = err {
+ print_mismatches_default_message(msg);
+ }
+ fails += 1;
+ }
+ }
+
+ count += 1;
+ }
+
+ (reports, count, fails)
+}
+
+fn print_mismatches_default_message(result: HashMap<PathBuf, Vec<Mismatch>>) {
+ for (file_name, diff) in result {
+ let mismatch_msg_formatter =
+ |line_num| format!("\nMismatch at {}:{}:", file_name.display(), line_num);
+ print_diff(diff, &mismatch_msg_formatter, &Default::default());
+ }
+
+ if let Some(mut t) = term::stdout() {
+ t.reset().unwrap_or(());
+ }
+}
+
+fn print_mismatches<T: Fn(u32) -> String>(
+ result: HashMap<PathBuf, Vec<Mismatch>>,
+ mismatch_msg_formatter: T,
+) {
+ for (_file_name, diff) in result {
+ print_diff(diff, &mismatch_msg_formatter, &Default::default());
+ }
+
+ if let Some(mut t) = term::stdout() {
+ t.reset().unwrap_or(());
+ }
+}
+
+fn read_config(filename: &Path) -> Config {
+ let sig_comments = read_significant_comments(filename);
+ // Look for a config file. If there is a 'config' property in the significant comments, use
+ // that. Otherwise, if there are no significant comments at all, look for a config file with
+ // the same name as the test file.
+ let mut config = if !sig_comments.is_empty() {
+ get_config(sig_comments.get("config").map(Path::new))
+ } else {
+ get_config(filename.with_extension("toml").file_name().map(Path::new))
+ };
+
+ for (key, val) in &sig_comments {
+ if key != "target" && key != "config" && key != "unstable" {
+ config.override_value(key, val);
+ if config.is_default(key) {
+ warn!("Default value {} used explicitly for {}", val, key);
+ }
+ }
+ }
+
+ config
+}
+
+fn format_file<P: Into<PathBuf>>(filepath: P, config: Config) -> (bool, SourceFile, FormatReport) {
+ let filepath = filepath.into();
+ let input = Input::File(filepath);
+ let mut session = Session::<io::Stdout>::new(config, None);
+ let result = session.format(input).unwrap();
+ let parsing_errors = session.has_parsing_errors();
+ let mut source_file = SourceFile::new();
+ mem::swap(&mut session.source_file, &mut source_file);
+ (parsing_errors, source_file, result)
+}
+
+enum IdempotentCheckError {
+ Mismatch(HashMap<PathBuf, Vec<Mismatch>>),
+ Parse,
+}
+
+fn idempotent_check(
+ filename: &PathBuf,
+ opt_config: &Option<PathBuf>,
+) -> Result<FormatReport, IdempotentCheckError> {
+ let sig_comments = read_significant_comments(filename);
+ let config = if let Some(ref config_file_path) = opt_config {
+ Config::from_toml_path(config_file_path).expect("`rustfmt.toml` not found")
+ } else {
+ read_config(filename)
+ };
+ let (parsing_errors, source_file, format_report) = format_file(filename, config);
+ if parsing_errors {
+ return Err(IdempotentCheckError::Parse);
+ }
+
+ let mut write_result = HashMap::new();
+ for (filename, text) in source_file {
+ if let FileName::Real(ref filename) = filename {
+ write_result.insert(filename.to_owned(), text);
+ }
+ }
+
+ let target = sig_comments.get("target").map(|x| &(*x)[..]);
+
+ handle_result(write_result, target).map(|_| format_report)
+}
+
+// Reads test config file using the supplied (optional) file name. If there's no file name or the
+// file doesn't exist, just return the default config. Otherwise, the file must be read
+// successfully.
+fn get_config(config_file: Option<&Path>) -> Config {
+ let config_file_name = match config_file {
+ None => return Default::default(),
+ Some(file_name) => {
+ let mut full_path = PathBuf::from("tests/config/");
+ full_path.push(file_name);
+ if !full_path.exists() {
+ return Default::default();
+ };
+ full_path
+ }
+ };
+
+ let mut def_config_file = fs::File::open(config_file_name).expect("couldn't open config");
+ let mut def_config = String::new();
+ def_config_file
+ .read_to_string(&mut def_config)
+ .expect("Couldn't read config");
+
+ Config::from_toml(&def_config, Path::new("tests/config/")).expect("invalid TOML")
+}
+
+// Reads significant comments of the form: `// rustfmt-key: value` into a hash map.
+fn read_significant_comments(file_name: &Path) -> HashMap<String, String> {
+ let file = fs::File::open(file_name)
+ .unwrap_or_else(|_| panic!("couldn't read file {}", file_name.display()));
+ let reader = BufReader::new(file);
+ let pattern = r"^\s*//\s*rustfmt-([^:]+):\s*(\S+)";
+ let regex = regex::Regex::new(pattern).expect("failed creating pattern 1");
+
+ // Matches lines containing significant comments or whitespace.
+ let line_regex = regex::Regex::new(r"(^\s*$)|(^\s*//\s*rustfmt-[^:]+:\s*\S+)")
+ .expect("failed creating pattern 2");
+
+ reader
+ .lines()
+ .map(|line| line.expect("failed getting line"))
+ .filter(|line| line_regex.is_match(line))
+ .filter_map(|line| {
+ regex.captures_iter(&line).next().map(|capture| {
+ (
+ capture
+ .get(1)
+ .expect("couldn't unwrap capture")
+ .as_str()
+ .to_owned(),
+ capture
+ .get(2)
+ .expect("couldn't unwrap capture")
+ .as_str()
+ .to_owned(),
+ )
+ })
+ })
+ .collect()
+}
+
+// Compares output to input.
+// TODO: needs a better name, more explanation.
+fn handle_result(
+ result: HashMap<PathBuf, String>,
+ target: Option<&str>,
+) -> Result<(), IdempotentCheckError> {
+ let mut failures = HashMap::new();
+
+ for (file_name, fmt_text) in result {
+ // If file is in tests/source, compare to file with same name in tests/target.
+ let target = get_target(&file_name, target);
+ let open_error = format!("couldn't open target {:?}", target);
+ let mut f = fs::File::open(&target).expect(&open_error);
+
+ let mut text = String::new();
+ let read_error = format!("failed reading target {:?}", target);
+ f.read_to_string(&mut text).expect(&read_error);
+
+ // Ignore LF and CRLF difference for Windows.
+ if !string_eq_ignore_newline_repr(&fmt_text, &text) {
+ let diff = make_diff(&text, &fmt_text, DIFF_CONTEXT_SIZE);
+ assert!(
+ !diff.is_empty(),
+ "Empty diff? Maybe due to a missing a newline at the end of a file?"
+ );
+ failures.insert(file_name, diff);
+ }
+ }
+
+ if failures.is_empty() {
+ Ok(())
+ } else {
+ Err(IdempotentCheckError::Mismatch(failures))
+ }
+}
+
+// Maps source file paths to their target paths.
+fn get_target(file_name: &Path, target: Option<&str>) -> PathBuf {
+ if let Some(n) = file_name
+ .components()
+ .position(|c| c.as_os_str() == "source")
+ {
+ let mut target_file_name = PathBuf::new();
+ for (i, c) in file_name.components().enumerate() {
+ if i == n {
+ target_file_name.push("target");
+ } else {
+ target_file_name.push(c.as_os_str());
+ }
+ }
+ if let Some(replace_name) = target {
+ target_file_name.with_file_name(replace_name)
+ } else {
+ target_file_name
+ }
+ } else {
+ // This is either and idempotence check or a self check.
+ file_name.to_owned()
+ }
+}
+
+#[test]
+fn rustfmt_diff_make_diff_tests() {
+ init_log();
+ let diff = make_diff("a\nb\nc\nd", "a\ne\nc\nd", 3);
+ assert_eq!(
+ diff,
+ vec![Mismatch {
+ line_number: 1,
+ line_number_orig: 1,
+ lines: vec![
+ DiffLine::Context("a".into()),
+ DiffLine::Resulting("b".into()),
+ DiffLine::Expected("e".into()),
+ DiffLine::Context("c".into()),
+ DiffLine::Context("d".into()),
+ ],
+ }]
+ );
+}
+
+#[test]
+fn rustfmt_diff_no_diff_test() {
+ init_log();
+ let diff = make_diff("a\nb\nc\nd", "a\nb\nc\nd", 3);
+ assert_eq!(diff, vec![]);
+}
+
+// Compare strings without distinguishing between CRLF and LF
+fn string_eq_ignore_newline_repr(left: &str, right: &str) -> bool {
+ let left = CharsIgnoreNewlineRepr(left.chars().peekable());
+ let right = CharsIgnoreNewlineRepr(right.chars().peekable());
+ left.eq(right)
+}
+
+struct CharsIgnoreNewlineRepr<'a>(Peekable<Chars<'a>>);
+
+impl<'a> Iterator for CharsIgnoreNewlineRepr<'a> {
+ type Item = char;
+
+ fn next(&mut self) -> Option<char> {
+ self.0.next().map(|c| {
+ if c == '\r' {
+ if *self.0.peek().unwrap_or(&'\0') == '\n' {
+ self.0.next();
+ '\n'
+ } else {
+ '\r'
+ }
+ } else {
+ c
+ }
+ })
+ }
+}
+
+#[test]
+fn string_eq_ignore_newline_repr_test() {
+ init_log();
+ assert!(string_eq_ignore_newline_repr("", ""));
+ assert!(!string_eq_ignore_newline_repr("", "abc"));
+ assert!(!string_eq_ignore_newline_repr("abc", ""));
+ assert!(string_eq_ignore_newline_repr("a\nb\nc\rd", "a\nb\r\nc\rd"));
+ assert!(string_eq_ignore_newline_repr("a\r\n\r\n\r\nb", "a\n\n\nb"));
+ assert!(!string_eq_ignore_newline_repr("a\r\nbcd", "a\nbcdefghijk"));
+}
+
+struct TempFile {
+ path: PathBuf,
+}
+
+fn make_temp_file(file_name: &'static str) -> TempFile {
+ use std::env::var;
+ use std::fs::File;
+
+ // Used in the Rust build system.
+ let target_dir = var("RUSTFMT_TEST_DIR").unwrap_or_else(|_| ".".to_owned());
+ let path = Path::new(&target_dir).join(file_name);
+
+ let mut file = File::create(&path).expect("couldn't create temp file");
+ let content = "fn main() {}\n";
+ file.write_all(content.as_bytes())
+ .expect("couldn't write temp file");
+ TempFile { path }
+}
+
+impl Drop for TempFile {
+ fn drop(&mut self) {
+ use std::fs::remove_file;
+ remove_file(&self.path).expect("couldn't delete temp file");
+ }
+}
+
+fn rustfmt() -> PathBuf {
+ let mut me = env::current_exe().expect("failed to get current executable");
+ // Chop of the test name.
+ me.pop();
+ // Chop off `deps`.
+ me.pop();
+
+ me.push("rustfmt");
+ assert!(
+ me.is_file() || me.with_extension("exe").is_file(),
+ "{}",
+ if cfg!(release) {
+ "no rustfmt bin, try running `cargo build --release` before testing"
+ } else {
+ "no rustfmt bin, try running `cargo build` before testing"
+ }
+ );
+ me
+}
+
+#[test]
+fn verify_check_works() {
+ init_log();
+ let temp_file = make_temp_file("temp_check.rs");
+
+ Command::new(rustfmt().to_str().unwrap())
+ .arg("--check")
+ .arg(temp_file.path.to_str().unwrap())
+ .status()
+ .expect("run with check option failed");
+}
+
+#[test]
+fn verify_check_works_with_stdin() {
+ init_log();
+
+ let mut child = Command::new(rustfmt().to_str().unwrap())
+ .arg("--check")
+ .stdin(Stdio::piped())
+ .stderr(Stdio::piped())
+ .spawn()
+ .expect("run with check option failed");
+
+ {
+ let stdin = child.stdin.as_mut().expect("Failed to open stdin");
+ stdin
+ .write_all("fn main() {}\n".as_bytes())
+ .expect("Failed to write to rustfmt --check");
+ }
+ let output = child
+ .wait_with_output()
+ .expect("Failed to wait on rustfmt child");
+ assert!(output.status.success());
+}
+
+#[test]
+fn verify_check_l_works_with_stdin() {
+ init_log();
+
+ let mut child = Command::new(rustfmt().to_str().unwrap())
+ .arg("--check")
+ .arg("-l")
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .spawn()
+ .expect("run with check option failed");
+
+ {
+ let stdin = child.stdin.as_mut().expect("Failed to open stdin");
+ stdin
+ .write_all("fn main()\n{}\n".as_bytes())
+ .expect("Failed to write to rustfmt --check");
+ }
+ let output = child
+ .wait_with_output()
+ .expect("Failed to wait on rustfmt child");
+ assert!(output.status.success());
+ assert_eq!(std::str::from_utf8(&output.stdout).unwrap(), "<stdin>\n");
+}
diff --git a/src/tools/rustfmt/src/test/mod_resolver.rs b/src/tools/rustfmt/src/test/mod_resolver.rs
new file mode 100644
index 000000000..aacb2acc6
--- /dev/null
+++ b/src/tools/rustfmt/src/test/mod_resolver.rs
@@ -0,0 +1,82 @@
+use std::io;
+use std::path::PathBuf;
+
+use super::read_config;
+
+use crate::{FileName, Input, Session};
+
+fn verify_mod_resolution(input_file_name: &str, exp_misformatted_files: &[&str]) {
+ let input_file = PathBuf::from(input_file_name);
+ let config = read_config(&input_file);
+ let mut session = Session::<io::Stdout>::new(config, None);
+ let report = session
+ .format(Input::File(input_file_name.into()))
+ .expect("Should not have had any execution errors");
+ let errors_by_file = &report.internal.borrow().0;
+ for exp_file in exp_misformatted_files {
+ assert!(errors_by_file.contains_key(&FileName::Real(PathBuf::from(exp_file))));
+ }
+}
+
+#[test]
+fn nested_out_of_line_mods_loaded() {
+ // See also https://github.com/rust-lang/rustfmt/issues/4874
+ verify_mod_resolution(
+ "tests/mod-resolver/issue-4874/main.rs",
+ &[
+ "tests/mod-resolver/issue-4874/bar/baz.rs",
+ "tests/mod-resolver/issue-4874/foo/qux.rs",
+ ],
+ );
+}
+
+#[test]
+fn out_of_line_nested_inline_within_out_of_line() {
+ // See also https://github.com/rust-lang/rustfmt/issues/5063
+ verify_mod_resolution(
+ "tests/mod-resolver/issue-5063/main.rs",
+ &[
+ "tests/mod-resolver/issue-5063/foo/bar/baz.rs",
+ "tests/mod-resolver/issue-5063/foo.rs",
+ ],
+ );
+}
+
+#[test]
+fn skip_out_of_line_nested_inline_within_out_of_line() {
+ // See also https://github.com/rust-lang/rustfmt/issues/5065
+ verify_mod_resolution(
+ "tests/mod-resolver/skip-files-issue-5065/main.rs",
+ &["tests/mod-resolver/skip-files-issue-5065/one.rs"],
+ );
+}
+
+#[test]
+fn fmt_out_of_line_test_modules() {
+ // See also https://github.com/rust-lang/rustfmt/issues/5119
+ verify_mod_resolution(
+ "tests/mod-resolver/test-submodule-issue-5119/tests/test1.rs",
+ &[
+ "tests/mod-resolver/test-submodule-issue-5119/tests/test1.rs",
+ "tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub1.rs",
+ "tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub2.rs",
+ "tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub3/sub4.rs",
+ ],
+ )
+}
+
+#[test]
+fn fallback_and_try_to_resolve_external_submod_relative_to_current_dir_path() {
+ // See also https://github.com/rust-lang/rustfmt/issues/5198
+ verify_mod_resolution(
+ "tests/mod-resolver/issue-5198/lib.rs",
+ &[
+ "tests/mod-resolver/issue-5198/a.rs",
+ "tests/mod-resolver/issue-5198/lib/b.rs",
+ "tests/mod-resolver/issue-5198/lib/c/mod.rs",
+ "tests/mod-resolver/issue-5198/lib/c/e.rs",
+ "tests/mod-resolver/issue-5198/lib/c/d/f.rs",
+ "tests/mod-resolver/issue-5198/lib/c/d/g/mod.rs",
+ ],
+ )
+}
diff --git a/src/tools/rustfmt/src/test/parser.rs b/src/tools/rustfmt/src/test/parser.rs
new file mode 100644
index 000000000..ae4a4f94d
--- /dev/null
+++ b/src/tools/rustfmt/src/test/parser.rs
@@ -0,0 +1,57 @@
+use std::io;
+use std::path::PathBuf;
+
+use super::read_config;
+
+use crate::modules::{ModuleResolutionError, ModuleResolutionErrorKind};
+use crate::{ErrorKind, Input, Session};
+
+#[test]
+fn parser_errors_in_submods_are_surfaced() {
+ // See also https://github.com/rust-lang/rustfmt/issues/4126
+ let filename = "tests/parser/issue-4126/lib.rs";
+ let input_file = PathBuf::from(filename);
+ let exp_mod_name = "invalid";
+ let config = read_config(&input_file);
+ let mut session = Session::<io::Stdout>::new(config, None);
+ if let Err(ErrorKind::ModuleResolutionError(ModuleResolutionError { module, kind })) =
+ session.format(Input::File(filename.into()))
+ {
+ assert_eq!(&module, exp_mod_name);
+ if let ModuleResolutionErrorKind::ParseError {
+ file: unparseable_file,
+ } = kind
+ {
+ assert_eq!(
+ unparseable_file,
+ PathBuf::from("tests/parser/issue-4126/invalid.rs"),
+ );
+ } else {
+ panic!("Expected parser error");
+ }
+ } else {
+ panic!("Expected ModuleResolution operation error");
+ }
+}
+
+fn assert_parser_error(filename: &str) {
+ let file = PathBuf::from(filename);
+ let config = read_config(&file);
+ let mut session = Session::<io::Stdout>::new(config, None);
+ let _ = session.format(Input::File(filename.into())).unwrap();
+ assert!(session.has_parsing_errors());
+}
+
+#[test]
+fn parser_creation_errors_on_entry_new_parser_from_file_panic() {
+ // See also https://github.com/rust-lang/rustfmt/issues/4418
+ let filename = "tests/parser/issue_4418.rs";
+ assert_parser_error(filename);
+}
+
+#[test]
+fn crate_parsing_errors_on_unclosed_delims() {
+ // See also https://github.com/rust-lang/rustfmt/issues/4466
+ let filename = "tests/parser/unclosed-delims/issue_4466.rs";
+ assert_parser_error(filename);
+}
diff --git a/src/tools/rustfmt/src/types.rs b/src/tools/rustfmt/src/types.rs
new file mode 100644
index 000000000..2627886db
--- /dev/null
+++ b/src/tools/rustfmt/src/types.rs
@@ -0,0 +1,1086 @@
+use std::iter::ExactSizeIterator;
+use std::ops::Deref;
+
+use rustc_ast::ast::{self, FnRetTy, Mutability, Term};
+use rustc_ast::ptr;
+use rustc_span::{symbol::kw, BytePos, Pos, Span};
+
+use crate::comment::{combine_strs_with_missing_comments, contains_comment};
+use crate::config::lists::*;
+use crate::config::{IndentStyle, TypeDensity, Version};
+use crate::expr::{
+ format_expr, rewrite_assign_rhs, rewrite_call, rewrite_tuple, rewrite_unary_prefix, ExprType,
+ RhsAssignKind,
+};
+use crate::lists::{
+ definitive_tactic, itemize_list, write_list, ListFormatting, ListItem, Separator,
+};
+use crate::macros::{rewrite_macro, MacroPosition};
+use crate::overflow;
+use crate::pairs::{rewrite_pair, PairParts};
+use crate::rewrite::{Rewrite, RewriteContext};
+use crate::shape::Shape;
+use crate::source_map::SpanUtils;
+use crate::spanned::Spanned;
+use crate::utils::{
+ colon_spaces, extra_offset, first_line_width, format_extern, format_mutability,
+ last_line_extendable, last_line_width, mk_sp, rewrite_ident,
+};
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+pub(crate) enum PathContext {
+ Expr,
+ Type,
+ Import,
+}
+
+// Does not wrap on simple segments.
+pub(crate) fn rewrite_path(
+ context: &RewriteContext<'_>,
+ path_context: PathContext,
+ qself: Option<&ast::QSelf>,
+ path: &ast::Path,
+ shape: Shape,
+) -> Option<String> {
+ let skip_count = qself.map_or(0, |x| x.position);
+
+ let mut result = if path.is_global() && qself.is_none() && path_context != PathContext::Import {
+ "::".to_owned()
+ } else {
+ String::new()
+ };
+
+ let mut span_lo = path.span.lo();
+
+ if let Some(qself) = qself {
+ result.push('<');
+
+ let fmt_ty = qself.ty.rewrite(context, shape)?;
+ result.push_str(&fmt_ty);
+
+ if skip_count > 0 {
+ result.push_str(" as ");
+ if path.is_global() && path_context != PathContext::Import {
+ result.push_str("::");
+ }
+
+ // 3 = ">::".len()
+ let shape = shape.sub_width(3)?;
+
+ result = rewrite_path_segments(
+ PathContext::Type,
+ result,
+ path.segments.iter().take(skip_count),
+ span_lo,
+ path.span.hi(),
+ context,
+ shape,
+ )?;
+ }
+
+ result.push_str(">::");
+ span_lo = qself.ty.span.hi() + BytePos(1);
+ }
+
+ rewrite_path_segments(
+ path_context,
+ result,
+ path.segments.iter().skip(skip_count),
+ span_lo,
+ path.span.hi(),
+ context,
+ shape,
+ )
+}
+
+fn rewrite_path_segments<'a, I>(
+ path_context: PathContext,
+ mut buffer: String,
+ iter: I,
+ mut span_lo: BytePos,
+ span_hi: BytePos,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+) -> Option<String>
+where
+ I: Iterator<Item = &'a ast::PathSegment>,
+{
+ let mut first = true;
+ let shape = shape.visual_indent(0);
+
+ for segment in iter {
+ // Indicates a global path, shouldn't be rendered.
+ if segment.ident.name == kw::PathRoot {
+ continue;
+ }
+ if first {
+ first = false;
+ } else {
+ buffer.push_str("::");
+ }
+
+ let extra_offset = extra_offset(&buffer, shape);
+ let new_shape = shape.shrink_left(extra_offset)?;
+ let segment_string = rewrite_segment(
+ path_context,
+ segment,
+ &mut span_lo,
+ span_hi,
+ context,
+ new_shape,
+ )?;
+
+ buffer.push_str(&segment_string);
+ }
+
+ Some(buffer)
+}
+
+#[derive(Debug)]
+pub(crate) enum SegmentParam<'a> {
+ Const(&'a ast::AnonConst),
+ LifeTime(&'a ast::Lifetime),
+ Type(&'a ast::Ty),
+ Binding(&'a ast::AssocConstraint),
+}
+
+impl<'a> SegmentParam<'a> {
+ fn from_generic_arg(arg: &ast::GenericArg) -> SegmentParam<'_> {
+ match arg {
+ ast::GenericArg::Lifetime(ref lt) => SegmentParam::LifeTime(lt),
+ ast::GenericArg::Type(ref ty) => SegmentParam::Type(ty),
+ ast::GenericArg::Const(const_) => SegmentParam::Const(const_),
+ }
+ }
+}
+
+impl<'a> Spanned for SegmentParam<'a> {
+ fn span(&self) -> Span {
+ match *self {
+ SegmentParam::Const(const_) => const_.value.span,
+ SegmentParam::LifeTime(lt) => lt.ident.span,
+ SegmentParam::Type(ty) => ty.span,
+ SegmentParam::Binding(binding) => binding.span,
+ }
+ }
+}
+
+impl<'a> Rewrite for SegmentParam<'a> {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ match *self {
+ SegmentParam::Const(const_) => const_.rewrite(context, shape),
+ SegmentParam::LifeTime(lt) => lt.rewrite(context, shape),
+ SegmentParam::Type(ty) => ty.rewrite(context, shape),
+ SegmentParam::Binding(atc) => atc.rewrite(context, shape),
+ }
+ }
+}
+
+impl Rewrite for ast::AssocConstraint {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ use ast::AssocConstraintKind::{Bound, Equality};
+
+ let mut result = String::with_capacity(128);
+ result.push_str(rewrite_ident(context, self.ident));
+
+ if let Some(ref gen_args) = self.gen_args {
+ let budget = shape.width.checked_sub(result.len())?;
+ let shape = Shape::legacy(budget, shape.indent + result.len());
+ let gen_str = rewrite_generic_args(gen_args, context, shape, gen_args.span())?;
+ result.push_str(&gen_str);
+ }
+
+ let infix = match (&self.kind, context.config.type_punctuation_density()) {
+ (Bound { .. }, _) => ": ",
+ (Equality { .. }, TypeDensity::Wide) => " = ",
+ (Equality { .. }, TypeDensity::Compressed) => "=",
+ };
+ result.push_str(infix);
+
+ let budget = shape.width.checked_sub(result.len())?;
+ let shape = Shape::legacy(budget, shape.indent + result.len());
+ let rewrite = self.kind.rewrite(context, shape)?;
+ result.push_str(&rewrite);
+
+ Some(result)
+ }
+}
+
+impl Rewrite for ast::AssocConstraintKind {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ match self {
+ ast::AssocConstraintKind::Equality { term } => match term {
+ Term::Ty(ty) => ty.rewrite(context, shape),
+ Term::Const(c) => c.rewrite(context, shape),
+ },
+ ast::AssocConstraintKind::Bound { bounds } => bounds.rewrite(context, shape),
+ }
+ }
+}
+
+// Formats a path segment. There are some hacks involved to correctly determine
+// the segment's associated span since it's not part of the AST.
+//
+// The span_lo is assumed to be greater than the end of any previous segment's
+// parameters and lesser or equal than the start of current segment.
+//
+// span_hi is assumed equal to the end of the entire path.
+//
+// When the segment contains a positive number of parameters, we update span_lo
+// so that invariants described above will hold for the next segment.
+fn rewrite_segment(
+ path_context: PathContext,
+ segment: &ast::PathSegment,
+ span_lo: &mut BytePos,
+ span_hi: BytePos,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+) -> Option<String> {
+ let mut result = String::with_capacity(128);
+ result.push_str(rewrite_ident(context, segment.ident));
+
+ let ident_len = result.len();
+ let shape = if context.use_block_indent() {
+ shape.offset_left(ident_len)?
+ } else {
+ shape.shrink_left(ident_len)?
+ };
+
+ if let Some(ref args) = segment.args {
+ let generics_str = rewrite_generic_args(args, context, shape, mk_sp(*span_lo, span_hi))?;
+ match **args {
+ ast::GenericArgs::AngleBracketed(ref data) if !data.args.is_empty() => {
+ // HACK: squeeze out the span between the identifier and the parameters.
+ // The hack is required so that we don't remove the separator inside macro calls.
+ // This does not work in the presence of comment, hoping that people are
+ // sane about where to put their comment.
+ let separator_snippet = context
+ .snippet(mk_sp(segment.ident.span.hi(), data.span.lo()))
+ .trim();
+ let force_separator = context.inside_macro() && separator_snippet.starts_with("::");
+ let separator = if path_context == PathContext::Expr || force_separator {
+ "::"
+ } else {
+ ""
+ };
+ result.push_str(separator);
+
+ // Update position of last bracket.
+ *span_lo = context
+ .snippet_provider
+ .span_after(mk_sp(*span_lo, span_hi), "<");
+ }
+ _ => (),
+ }
+ result.push_str(&generics_str)
+ }
+
+ Some(result)
+}
+
+fn format_function_type<'a, I>(
+ inputs: I,
+ output: &FnRetTy,
+ variadic: bool,
+ span: Span,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+) -> Option<String>
+where
+ I: ExactSizeIterator,
+ <I as Iterator>::Item: Deref,
+ <I::Item as Deref>::Target: Rewrite + Spanned + 'a,
+{
+ debug!("format_function_type {:#?}", shape);
+
+ let ty_shape = match context.config.indent_style() {
+ // 4 = " -> "
+ IndentStyle::Block => shape.offset_left(4)?,
+ IndentStyle::Visual => shape.block_left(4)?,
+ };
+ let output = match *output {
+ FnRetTy::Ty(ref ty) => {
+ let type_str = ty.rewrite(context, ty_shape)?;
+ format!(" -> {}", type_str)
+ }
+ FnRetTy::Default(..) => String::new(),
+ };
+
+ let list_shape = if context.use_block_indent() {
+ Shape::indented(
+ shape.block().indent.block_indent(context.config),
+ context.config,
+ )
+ } else {
+ // 2 for ()
+ let budget = shape.width.checked_sub(2)?;
+ // 1 for (
+ let offset = shape.indent + 1;
+ Shape::legacy(budget, offset)
+ };
+
+ let is_inputs_empty = inputs.len() == 0;
+ let list_lo = context.snippet_provider.span_after(span, "(");
+ let (list_str, tactic) = if is_inputs_empty {
+ let tactic = get_tactics(&[], &output, shape);
+ let list_hi = context.snippet_provider.span_before(span, ")");
+ let comment = context
+ .snippet_provider
+ .span_to_snippet(mk_sp(list_lo, list_hi))?
+ .trim();
+ let comment = if comment.starts_with("//") {
+ format!(
+ "{}{}{}",
+ &list_shape.indent.to_string_with_newline(context.config),
+ comment,
+ &shape.block().indent.to_string_with_newline(context.config)
+ )
+ } else {
+ comment.to_string()
+ };
+ (comment, tactic)
+ } else {
+ let items = itemize_list(
+ context.snippet_provider,
+ inputs,
+ ")",
+ ",",
+ |arg| arg.span().lo(),
+ |arg| arg.span().hi(),
+ |arg| arg.rewrite(context, list_shape),
+ list_lo,
+ span.hi(),
+ false,
+ );
+
+ let item_vec: Vec<_> = items.collect();
+ let tactic = get_tactics(&item_vec, &output, shape);
+ let trailing_separator = if !context.use_block_indent() || variadic {
+ SeparatorTactic::Never
+ } else {
+ context.config.trailing_comma()
+ };
+
+ let fmt = ListFormatting::new(list_shape, context.config)
+ .tactic(tactic)
+ .trailing_separator(trailing_separator)
+ .ends_with_newline(tactic.ends_with_newline(context.config.indent_style()))
+ .preserve_newline(true);
+ (write_list(&item_vec, &fmt)?, tactic)
+ };
+
+ let args = if tactic == DefinitiveListTactic::Horizontal
+ || !context.use_block_indent()
+ || is_inputs_empty
+ {
+ format!("({})", list_str)
+ } else {
+ format!(
+ "({}{}{})",
+ list_shape.indent.to_string_with_newline(context.config),
+ list_str,
+ shape.block().indent.to_string_with_newline(context.config),
+ )
+ };
+ if output.is_empty() || last_line_width(&args) + first_line_width(&output) <= shape.width {
+ Some(format!("{}{}", args, output))
+ } else {
+ Some(format!(
+ "{}\n{}{}",
+ args,
+ list_shape.indent.to_string(context.config),
+ output.trim_start()
+ ))
+ }
+}
+
+fn type_bound_colon(context: &RewriteContext<'_>) -> &'static str {
+ colon_spaces(context.config)
+}
+
+// If the return type is multi-lined, then force to use multiple lines for
+// arguments as well.
+fn get_tactics(item_vec: &[ListItem], output: &str, shape: Shape) -> DefinitiveListTactic {
+ if output.contains('\n') {
+ DefinitiveListTactic::Vertical
+ } else {
+ definitive_tactic(
+ item_vec,
+ ListTactic::HorizontalVertical,
+ Separator::Comma,
+ // 2 is for the case of ',\n'
+ shape.width.saturating_sub(2 + output.len()),
+ )
+ }
+}
+
+impl Rewrite for ast::WherePredicate {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ // FIXME: dead spans?
+ let result = match *self {
+ ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate {
+ ref bound_generic_params,
+ ref bounded_ty,
+ ref bounds,
+ ..
+ }) => {
+ let type_str = bounded_ty.rewrite(context, shape)?;
+ let colon = type_bound_colon(context).trim_end();
+ let lhs = if let Some(lifetime_str) =
+ rewrite_lifetime_param(context, shape, bound_generic_params)
+ {
+ format!("for<{}> {}{}", lifetime_str, type_str, colon)
+ } else {
+ format!("{}{}", type_str, colon)
+ };
+
+ rewrite_assign_rhs(context, lhs, bounds, &RhsAssignKind::Bounds, shape)?
+ }
+ ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate {
+ ref lifetime,
+ ref bounds,
+ ..
+ }) => rewrite_bounded_lifetime(lifetime, bounds, context, shape)?,
+ ast::WherePredicate::EqPredicate(ast::WhereEqPredicate {
+ ref lhs_ty,
+ ref rhs_ty,
+ ..
+ }) => {
+ let lhs_ty_str = lhs_ty.rewrite(context, shape).map(|lhs| lhs + " =")?;
+ rewrite_assign_rhs(context, lhs_ty_str, &**rhs_ty, &RhsAssignKind::Ty, shape)?
+ }
+ };
+
+ Some(result)
+ }
+}
+
+impl Rewrite for ast::GenericArg {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ match *self {
+ ast::GenericArg::Lifetime(ref lt) => lt.rewrite(context, shape),
+ ast::GenericArg::Type(ref ty) => ty.rewrite(context, shape),
+ ast::GenericArg::Const(ref const_) => const_.rewrite(context, shape),
+ }
+ }
+}
+
+fn rewrite_generic_args(
+ gen_args: &ast::GenericArgs,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ span: Span,
+) -> Option<String> {
+ match gen_args {
+ ast::GenericArgs::AngleBracketed(ref data) if !data.args.is_empty() => {
+ let args = data
+ .args
+ .iter()
+ .map(|x| match x {
+ ast::AngleBracketedArg::Arg(generic_arg) => {
+ SegmentParam::from_generic_arg(generic_arg)
+ }
+ ast::AngleBracketedArg::Constraint(constraint) => {
+ SegmentParam::Binding(constraint)
+ }
+ })
+ .collect::<Vec<_>>();
+
+ overflow::rewrite_with_angle_brackets(context, "", args.iter(), shape, span)
+ }
+ ast::GenericArgs::Parenthesized(ref data) => format_function_type(
+ data.inputs.iter().map(|x| &**x),
+ &data.output,
+ false,
+ data.span,
+ context,
+ shape,
+ ),
+ _ => Some("".to_owned()),
+ }
+}
+
+fn rewrite_bounded_lifetime(
+ lt: &ast::Lifetime,
+ bounds: &[ast::GenericBound],
+ context: &RewriteContext<'_>,
+ shape: Shape,
+) -> Option<String> {
+ let result = lt.rewrite(context, shape)?;
+
+ if bounds.is_empty() {
+ Some(result)
+ } else {
+ let colon = type_bound_colon(context);
+ let overhead = last_line_width(&result) + colon.len();
+ let result = format!(
+ "{}{}{}",
+ result,
+ colon,
+ join_bounds(context, shape.sub_width(overhead)?, bounds, true)?
+ );
+ Some(result)
+ }
+}
+
+impl Rewrite for ast::AnonConst {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ format_expr(&self.value, ExprType::SubExpression, context, shape)
+ }
+}
+
+impl Rewrite for ast::Lifetime {
+ fn rewrite(&self, context: &RewriteContext<'_>, _: Shape) -> Option<String> {
+ Some(rewrite_ident(context, self.ident).to_owned())
+ }
+}
+
+impl Rewrite for ast::GenericBound {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ match *self {
+ ast::GenericBound::Trait(ref poly_trait_ref, trait_bound_modifier) => {
+ let snippet = context.snippet(self.span());
+ let has_paren = snippet.starts_with('(') && snippet.ends_with(')');
+ let rewrite = match trait_bound_modifier {
+ ast::TraitBoundModifier::None => poly_trait_ref.rewrite(context, shape),
+ ast::TraitBoundModifier::Maybe => poly_trait_ref
+ .rewrite(context, shape.offset_left(1)?)
+ .map(|s| format!("?{}", s)),
+ ast::TraitBoundModifier::MaybeConst => poly_trait_ref
+ .rewrite(context, shape.offset_left(7)?)
+ .map(|s| format!("~const {}", s)),
+ ast::TraitBoundModifier::MaybeConstMaybe => poly_trait_ref
+ .rewrite(context, shape.offset_left(8)?)
+ .map(|s| format!("~const ?{}", s)),
+ };
+ rewrite.map(|s| if has_paren { format!("({})", s) } else { s })
+ }
+ ast::GenericBound::Outlives(ref lifetime) => lifetime.rewrite(context, shape),
+ }
+ }
+}
+
+impl Rewrite for ast::GenericBounds {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ if self.is_empty() {
+ return Some(String::new());
+ }
+
+ join_bounds(context, shape, self, true)
+ }
+}
+
+impl Rewrite for ast::GenericParam {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ let mut result = String::with_capacity(128);
+ // FIXME: If there are more than one attributes, this will force multiline.
+ match self.attrs.rewrite(context, shape) {
+ Some(ref rw) if !rw.is_empty() => {
+ result.push_str(rw);
+ // When rewriting generic params, an extra newline should be put
+ // if the attributes end with a doc comment
+ if let Some(true) = self.attrs.last().map(|a| a.is_doc_comment()) {
+ result.push_str(&shape.indent.to_string_with_newline(context.config));
+ } else {
+ result.push(' ');
+ }
+ }
+ _ => (),
+ }
+
+ if let ast::GenericParamKind::Const {
+ ref ty,
+ kw_span: _,
+ default,
+ } = &self.kind
+ {
+ result.push_str("const ");
+ result.push_str(rewrite_ident(context, self.ident));
+ result.push_str(": ");
+ result.push_str(&ty.rewrite(context, shape)?);
+ if let Some(default) = default {
+ let eq_str = match context.config.type_punctuation_density() {
+ TypeDensity::Compressed => "=",
+ TypeDensity::Wide => " = ",
+ };
+ result.push_str(eq_str);
+ let budget = shape.width.checked_sub(result.len())?;
+ let rewrite = default.rewrite(context, Shape::legacy(budget, shape.indent))?;
+ result.push_str(&rewrite);
+ }
+ } else {
+ result.push_str(rewrite_ident(context, self.ident));
+ }
+
+ if !self.bounds.is_empty() {
+ result.push_str(type_bound_colon(context));
+ result.push_str(&self.bounds.rewrite(context, shape)?)
+ }
+ if let ast::GenericParamKind::Type {
+ default: Some(ref def),
+ } = self.kind
+ {
+ let eq_str = match context.config.type_punctuation_density() {
+ TypeDensity::Compressed => "=",
+ TypeDensity::Wide => " = ",
+ };
+ result.push_str(eq_str);
+ let budget = shape.width.checked_sub(result.len())?;
+ let rewrite =
+ def.rewrite(context, Shape::legacy(budget, shape.indent + result.len()))?;
+ result.push_str(&rewrite);
+ }
+
+ Some(result)
+ }
+}
+
+impl Rewrite for ast::PolyTraitRef {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ if let Some(lifetime_str) =
+ rewrite_lifetime_param(context, shape, &self.bound_generic_params)
+ {
+ // 6 is "for<> ".len()
+ let extra_offset = lifetime_str.len() + 6;
+ let path_str = self
+ .trait_ref
+ .rewrite(context, shape.offset_left(extra_offset)?)?;
+
+ Some(format!("for<{}> {}", lifetime_str, path_str))
+ } else {
+ self.trait_ref.rewrite(context, shape)
+ }
+ }
+}
+
+impl Rewrite for ast::TraitRef {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ rewrite_path(context, PathContext::Type, None, &self.path, shape)
+ }
+}
+
+impl Rewrite for ast::Ty {
+ fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ match self.kind {
+ ast::TyKind::TraitObject(ref bounds, tobj_syntax) => {
+ // we have to consider 'dyn' keyword is used or not!!!
+ let is_dyn = tobj_syntax == ast::TraitObjectSyntax::Dyn;
+ // 4 is length of 'dyn '
+ let shape = if is_dyn { shape.offset_left(4)? } else { shape };
+ let mut res = bounds.rewrite(context, shape)?;
+ // We may have falsely removed a trailing `+` inside macro call.
+ if context.inside_macro() && bounds.len() == 1 {
+ if context.snippet(self.span).ends_with('+') && !res.ends_with('+') {
+ res.push('+');
+ }
+ }
+ if is_dyn {
+ Some(format!("dyn {}", res))
+ } else {
+ Some(res)
+ }
+ }
+ ast::TyKind::Ptr(ref mt) => {
+ let prefix = match mt.mutbl {
+ Mutability::Mut => "*mut ",
+ Mutability::Not => "*const ",
+ };
+
+ rewrite_unary_prefix(context, prefix, &*mt.ty, shape)
+ }
+ ast::TyKind::Rptr(ref lifetime, ref mt) => {
+ let mut_str = format_mutability(mt.mutbl);
+ let mut_len = mut_str.len();
+ let mut result = String::with_capacity(128);
+ result.push('&');
+ let ref_hi = context.snippet_provider.span_after(self.span(), "&");
+ let mut cmnt_lo = ref_hi;
+
+ if let Some(ref lifetime) = *lifetime {
+ let lt_budget = shape.width.checked_sub(2 + mut_len)?;
+ let lt_str = lifetime.rewrite(
+ context,
+ Shape::legacy(lt_budget, shape.indent + 2 + mut_len),
+ )?;
+ let before_lt_span = mk_sp(cmnt_lo, lifetime.ident.span.lo());
+ if contains_comment(context.snippet(before_lt_span)) {
+ result = combine_strs_with_missing_comments(
+ context,
+ &result,
+ &lt_str,
+ before_lt_span,
+ shape,
+ true,
+ )?;
+ } else {
+ result.push_str(&lt_str);
+ }
+ result.push(' ');
+ cmnt_lo = lifetime.ident.span.hi();
+ }
+
+ if ast::Mutability::Mut == mt.mutbl {
+ let mut_hi = context.snippet_provider.span_after(self.span(), "mut");
+ let before_mut_span = mk_sp(cmnt_lo, mut_hi - BytePos::from_usize(3));
+ if contains_comment(context.snippet(before_mut_span)) {
+ result = combine_strs_with_missing_comments(
+ context,
+ result.trim_end(),
+ mut_str,
+ before_mut_span,
+ shape,
+ true,
+ )?;
+ } else {
+ result.push_str(mut_str);
+ }
+ cmnt_lo = mut_hi;
+ }
+
+ let before_ty_span = mk_sp(cmnt_lo, mt.ty.span.lo());
+ if contains_comment(context.snippet(before_ty_span)) {
+ result = combine_strs_with_missing_comments(
+ context,
+ result.trim_end(),
+ &mt.ty.rewrite(context, shape)?,
+ before_ty_span,
+ shape,
+ true,
+ )?;
+ } else {
+ let used_width = last_line_width(&result);
+ let budget = shape.width.checked_sub(used_width)?;
+ let ty_str = mt
+ .ty
+ .rewrite(context, Shape::legacy(budget, shape.indent + used_width))?;
+ result.push_str(&ty_str);
+ }
+
+ Some(result)
+ }
+ // FIXME: we drop any comments here, even though it's a silly place to put
+ // comments.
+ ast::TyKind::Paren(ref ty) => {
+ if context.config.version() == Version::One
+ || context.config.indent_style() == IndentStyle::Visual
+ {
+ let budget = shape.width.checked_sub(2)?;
+ return ty
+ .rewrite(context, Shape::legacy(budget, shape.indent + 1))
+ .map(|ty_str| format!("({})", ty_str));
+ }
+
+ // 2 = ()
+ if let Some(sh) = shape.sub_width(2) {
+ if let Some(ref s) = ty.rewrite(context, sh) {
+ if !s.contains('\n') {
+ return Some(format!("({})", s));
+ }
+ }
+ }
+
+ let indent_str = shape.indent.to_string_with_newline(context.config);
+ let shape = shape
+ .block_indent(context.config.tab_spaces())
+ .with_max_width(context.config);
+ let rw = ty.rewrite(context, shape)?;
+ Some(format!(
+ "({}{}{})",
+ shape.to_string_with_newline(context.config),
+ rw,
+ indent_str
+ ))
+ }
+ ast::TyKind::Slice(ref ty) => {
+ let budget = shape.width.checked_sub(4)?;
+ ty.rewrite(context, Shape::legacy(budget, shape.indent + 1))
+ .map(|ty_str| format!("[{}]", ty_str))
+ }
+ ast::TyKind::Tup(ref items) => {
+ rewrite_tuple(context, items.iter(), self.span, shape, items.len() == 1)
+ }
+ ast::TyKind::Path(ref q_self, ref path) => {
+ rewrite_path(context, PathContext::Type, q_self.as_ref(), path, shape)
+ }
+ ast::TyKind::Array(ref ty, ref repeats) => rewrite_pair(
+ &**ty,
+ &*repeats.value,
+ PairParts::new("[", "; ", "]"),
+ context,
+ shape,
+ SeparatorPlace::Back,
+ ),
+ ast::TyKind::Infer => {
+ if shape.width >= 1 {
+ Some("_".to_owned())
+ } else {
+ None
+ }
+ }
+ ast::TyKind::BareFn(ref bare_fn) => rewrite_bare_fn(bare_fn, self.span, context, shape),
+ ast::TyKind::Never => Some(String::from("!")),
+ ast::TyKind::MacCall(ref mac) => {
+ rewrite_macro(mac, None, context, shape, MacroPosition::Expression)
+ }
+ ast::TyKind::ImplicitSelf => Some(String::from("")),
+ ast::TyKind::ImplTrait(_, ref it) => {
+ // Empty trait is not a parser error.
+ if it.is_empty() {
+ return Some("impl".to_owned());
+ }
+ let rw = if context.config.version() == Version::One {
+ it.rewrite(context, shape)
+ } else {
+ join_bounds(context, shape, it, false)
+ };
+ rw.map(|it_str| {
+ let space = if it_str.is_empty() { "" } else { " " };
+ format!("impl{}{}", space, it_str)
+ })
+ }
+ ast::TyKind::CVarArgs => Some("...".to_owned()),
+ ast::TyKind::Err => Some(context.snippet(self.span).to_owned()),
+ ast::TyKind::Typeof(ref anon_const) => rewrite_call(
+ context,
+ "typeof",
+ &[anon_const.value.clone()],
+ self.span,
+ shape,
+ ),
+ }
+ }
+}
+
+fn rewrite_bare_fn(
+ bare_fn: &ast::BareFnTy,
+ span: Span,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+) -> Option<String> {
+ debug!("rewrite_bare_fn {:#?}", shape);
+
+ let mut result = String::with_capacity(128);
+
+ if let Some(ref lifetime_str) = rewrite_lifetime_param(context, shape, &bare_fn.generic_params)
+ {
+ result.push_str("for<");
+ // 6 = "for<> ".len(), 4 = "for<".
+ // This doesn't work out so nicely for multiline situation with lots of
+ // rightward drift. If that is a problem, we could use the list stuff.
+ result.push_str(lifetime_str);
+ result.push_str("> ");
+ }
+
+ result.push_str(crate::utils::format_unsafety(bare_fn.unsafety));
+
+ result.push_str(&format_extern(
+ bare_fn.ext,
+ context.config.force_explicit_abi(),
+ false,
+ ));
+
+ result.push_str("fn");
+
+ let func_ty_shape = if context.use_block_indent() {
+ shape.offset_left(result.len())?
+ } else {
+ shape.visual_indent(result.len()).sub_width(result.len())?
+ };
+
+ let rewrite = format_function_type(
+ bare_fn.decl.inputs.iter(),
+ &bare_fn.decl.output,
+ bare_fn.decl.c_variadic(),
+ span,
+ context,
+ func_ty_shape,
+ )?;
+
+ result.push_str(&rewrite);
+
+ Some(result)
+}
+
+fn is_generic_bounds_in_order(generic_bounds: &[ast::GenericBound]) -> bool {
+ let is_trait = |b: &ast::GenericBound| match b {
+ ast::GenericBound::Outlives(..) => false,
+ ast::GenericBound::Trait(..) => true,
+ };
+ let is_lifetime = |b: &ast::GenericBound| !is_trait(b);
+ let last_trait_index = generic_bounds.iter().rposition(is_trait);
+ let first_lifetime_index = generic_bounds.iter().position(is_lifetime);
+ match (last_trait_index, first_lifetime_index) {
+ (Some(last_trait_index), Some(first_lifetime_index)) => {
+ last_trait_index < first_lifetime_index
+ }
+ _ => true,
+ }
+}
+
+fn join_bounds(
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ items: &[ast::GenericBound],
+ need_indent: bool,
+) -> Option<String> {
+ join_bounds_inner(context, shape, items, need_indent, false)
+}
+
+fn join_bounds_inner(
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ items: &[ast::GenericBound],
+ need_indent: bool,
+ force_newline: bool,
+) -> Option<String> {
+ debug_assert!(!items.is_empty());
+
+ let generic_bounds_in_order = is_generic_bounds_in_order(items);
+ let is_bound_extendable = |s: &str, b: &ast::GenericBound| match b {
+ ast::GenericBound::Outlives(..) => true,
+ ast::GenericBound::Trait(..) => last_line_extendable(s),
+ };
+
+ let result = items.iter().enumerate().try_fold(
+ (String::new(), None, false),
+ |(strs, prev_trailing_span, prev_extendable), (i, item)| {
+ let trailing_span = if i < items.len() - 1 {
+ let hi = context
+ .snippet_provider
+ .span_before(mk_sp(items[i + 1].span().lo(), item.span().hi()), "+");
+
+ Some(mk_sp(item.span().hi(), hi))
+ } else {
+ None
+ };
+ let (leading_span, has_leading_comment) = if i > 0 {
+ let lo = context
+ .snippet_provider
+ .span_after(mk_sp(items[i - 1].span().hi(), item.span().lo()), "+");
+
+ let span = mk_sp(lo, item.span().lo());
+
+ let has_comments = contains_comment(context.snippet(span));
+
+ (Some(mk_sp(lo, item.span().lo())), has_comments)
+ } else {
+ (None, false)
+ };
+ let prev_has_trailing_comment = match prev_trailing_span {
+ Some(ts) => contains_comment(context.snippet(ts)),
+ _ => false,
+ };
+
+ let shape = if need_indent && force_newline {
+ shape
+ .block_indent(context.config.tab_spaces())
+ .with_max_width(context.config)
+ } else {
+ shape
+ };
+ let whitespace = if force_newline && (!prev_extendable || !generic_bounds_in_order) {
+ shape
+ .indent
+ .to_string_with_newline(context.config)
+ .to_string()
+ } else {
+ String::from(" ")
+ };
+
+ let joiner = match context.config.type_punctuation_density() {
+ TypeDensity::Compressed => String::from("+"),
+ TypeDensity::Wide => whitespace + "+ ",
+ };
+ let joiner = if has_leading_comment {
+ joiner.trim_end()
+ } else {
+ &joiner
+ };
+ let joiner = if prev_has_trailing_comment {
+ joiner.trim_start()
+ } else {
+ joiner
+ };
+
+ let (extendable, trailing_str) = if i == 0 {
+ let bound_str = item.rewrite(context, shape)?;
+ (is_bound_extendable(&bound_str, item), bound_str)
+ } else {
+ let bound_str = &item.rewrite(context, shape)?;
+ match leading_span {
+ Some(ls) if has_leading_comment => (
+ is_bound_extendable(bound_str, item),
+ combine_strs_with_missing_comments(
+ context, joiner, bound_str, ls, shape, true,
+ )?,
+ ),
+ _ => (
+ is_bound_extendable(bound_str, item),
+ String::from(joiner) + bound_str,
+ ),
+ }
+ };
+ match prev_trailing_span {
+ Some(ts) if prev_has_trailing_comment => combine_strs_with_missing_comments(
+ context,
+ &strs,
+ &trailing_str,
+ ts,
+ shape,
+ true,
+ )
+ .map(|v| (v, trailing_span, extendable)),
+ _ => Some((strs + &trailing_str, trailing_span, extendable)),
+ }
+ },
+ )?;
+
+ if !force_newline
+ && items.len() > 1
+ && (result.0.contains('\n') || result.0.len() > shape.width)
+ {
+ join_bounds_inner(context, shape, items, need_indent, true)
+ } else {
+ Some(result.0)
+ }
+}
+
+pub(crate) fn opaque_ty(ty: &Option<ptr::P<ast::Ty>>) -> Option<&ast::GenericBounds> {
+ ty.as_ref().and_then(|t| match &t.kind {
+ ast::TyKind::ImplTrait(_, bounds) => Some(bounds),
+ _ => None,
+ })
+}
+
+pub(crate) fn can_be_overflowed_type(
+ context: &RewriteContext<'_>,
+ ty: &ast::Ty,
+ len: usize,
+) -> bool {
+ match ty.kind {
+ ast::TyKind::Tup(..) => context.use_block_indent() && len == 1,
+ ast::TyKind::Rptr(_, ref mutty) | ast::TyKind::Ptr(ref mutty) => {
+ can_be_overflowed_type(context, &*mutty.ty, len)
+ }
+ _ => false,
+ }
+}
+
+/// Returns `None` if there is no `LifetimeDef` in the given generic parameters.
+pub(crate) fn rewrite_lifetime_param(
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ generic_params: &[ast::GenericParam],
+) -> Option<String> {
+ let result = generic_params
+ .iter()
+ .filter(|p| matches!(p.kind, ast::GenericParamKind::Lifetime))
+ .map(|lt| lt.rewrite(context, shape))
+ .collect::<Option<Vec<_>>>()?
+ .join(", ");
+ if result.is_empty() {
+ None
+ } else {
+ Some(result)
+ }
+}
diff --git a/src/tools/rustfmt/src/utils.rs b/src/tools/rustfmt/src/utils.rs
new file mode 100644
index 000000000..cd8528556
--- /dev/null
+++ b/src/tools/rustfmt/src/utils.rs
@@ -0,0 +1,708 @@
+use std::borrow::Cow;
+
+use rustc_ast::ast::{
+ self, Attribute, MetaItem, MetaItemKind, NestedMetaItem, NodeId, Path, Visibility,
+ VisibilityKind,
+};
+use rustc_ast::ptr;
+use rustc_ast_pretty::pprust;
+use rustc_span::{sym, symbol, BytePos, LocalExpnId, Span, Symbol, SyntaxContext};
+use unicode_width::UnicodeWidthStr;
+
+use crate::comment::{filter_normal_code, CharClasses, FullCodeCharKind, LineClasses};
+use crate::config::{Config, Version};
+use crate::rewrite::RewriteContext;
+use crate::shape::{Indent, Shape};
+
+#[inline]
+pub(crate) fn depr_skip_annotation() -> Symbol {
+ Symbol::intern("rustfmt_skip")
+}
+
+#[inline]
+pub(crate) fn skip_annotation() -> Symbol {
+ Symbol::intern("rustfmt::skip")
+}
+
+pub(crate) fn rewrite_ident<'a>(context: &'a RewriteContext<'_>, ident: symbol::Ident) -> &'a str {
+ context.snippet(ident.span)
+}
+
+// Computes the length of a string's last line, minus offset.
+pub(crate) fn extra_offset(text: &str, shape: Shape) -> usize {
+ match text.rfind('\n') {
+ // 1 for newline character
+ Some(idx) => text.len().saturating_sub(idx + 1 + shape.used_width()),
+ None => text.len(),
+ }
+}
+
+pub(crate) fn is_same_visibility(a: &Visibility, b: &Visibility) -> bool {
+ match (&a.kind, &b.kind) {
+ (
+ VisibilityKind::Restricted { path: p, .. },
+ VisibilityKind::Restricted { path: q, .. },
+ ) => pprust::path_to_string(p) == pprust::path_to_string(q),
+ (VisibilityKind::Public, VisibilityKind::Public)
+ | (VisibilityKind::Inherited, VisibilityKind::Inherited) => true,
+ _ => false,
+ }
+}
+
+// Uses Cow to avoid allocating in the common cases.
+pub(crate) fn format_visibility(
+ context: &RewriteContext<'_>,
+ vis: &Visibility,
+) -> Cow<'static, str> {
+ match vis.kind {
+ VisibilityKind::Public => Cow::from("pub "),
+ VisibilityKind::Inherited => Cow::from(""),
+ VisibilityKind::Restricted { ref path, .. } => {
+ let Path { ref segments, .. } = **path;
+ let mut segments_iter = segments.iter().map(|seg| rewrite_ident(context, seg.ident));
+ if path.is_global() {
+ segments_iter
+ .next()
+ .expect("Non-global path in pub(restricted)?");
+ }
+ let is_keyword = |s: &str| s == "crate" || s == "self" || s == "super";
+ let path = segments_iter.collect::<Vec<_>>().join("::");
+ let in_str = if is_keyword(&path) { "" } else { "in " };
+
+ Cow::from(format!("pub({}{}) ", in_str, path))
+ }
+ }
+}
+
+#[inline]
+pub(crate) fn format_async(is_async: &ast::Async) -> &'static str {
+ match is_async {
+ ast::Async::Yes { .. } => "async ",
+ ast::Async::No => "",
+ }
+}
+
+#[inline]
+pub(crate) fn format_constness(constness: ast::Const) -> &'static str {
+ match constness {
+ ast::Const::Yes(..) => "const ",
+ ast::Const::No => "",
+ }
+}
+
+#[inline]
+pub(crate) fn format_constness_right(constness: ast::Const) -> &'static str {
+ match constness {
+ ast::Const::Yes(..) => " const",
+ ast::Const::No => "",
+ }
+}
+
+#[inline]
+pub(crate) fn format_defaultness(defaultness: ast::Defaultness) -> &'static str {
+ match defaultness {
+ ast::Defaultness::Default(..) => "default ",
+ ast::Defaultness::Final => "",
+ }
+}
+
+#[inline]
+pub(crate) fn format_unsafety(unsafety: ast::Unsafe) -> &'static str {
+ match unsafety {
+ ast::Unsafe::Yes(..) => "unsafe ",
+ ast::Unsafe::No => "",
+ }
+}
+
+#[inline]
+pub(crate) fn format_auto(is_auto: ast::IsAuto) -> &'static str {
+ match is_auto {
+ ast::IsAuto::Yes => "auto ",
+ ast::IsAuto::No => "",
+ }
+}
+
+#[inline]
+pub(crate) fn format_mutability(mutability: ast::Mutability) -> &'static str {
+ match mutability {
+ ast::Mutability::Mut => "mut ",
+ ast::Mutability::Not => "",
+ }
+}
+
+#[inline]
+pub(crate) fn format_extern(
+ ext: ast::Extern,
+ explicit_abi: bool,
+ is_mod: bool,
+) -> Cow<'static, str> {
+ let abi = match ext {
+ ast::Extern::None => "Rust".to_owned(),
+ ast::Extern::Implicit(_) => "C".to_owned(),
+ ast::Extern::Explicit(abi, _) => abi.symbol_unescaped.to_string(),
+ };
+
+ if abi == "Rust" && !is_mod {
+ Cow::from("")
+ } else if abi == "C" && !explicit_abi {
+ Cow::from("extern ")
+ } else {
+ Cow::from(format!(r#"extern "{}" "#, abi))
+ }
+}
+
+#[inline]
+// Transform `Vec<rustc_ast::ptr::P<T>>` into `Vec<&T>`
+pub(crate) fn ptr_vec_to_ref_vec<T>(vec: &[ptr::P<T>]) -> Vec<&T> {
+ vec.iter().map(|x| &**x).collect::<Vec<_>>()
+}
+
+#[inline]
+pub(crate) fn filter_attributes(
+ attrs: &[ast::Attribute],
+ style: ast::AttrStyle,
+) -> Vec<ast::Attribute> {
+ attrs
+ .iter()
+ .filter(|a| a.style == style)
+ .cloned()
+ .collect::<Vec<_>>()
+}
+
+#[inline]
+pub(crate) fn inner_attributes(attrs: &[ast::Attribute]) -> Vec<ast::Attribute> {
+ filter_attributes(attrs, ast::AttrStyle::Inner)
+}
+
+#[inline]
+pub(crate) fn outer_attributes(attrs: &[ast::Attribute]) -> Vec<ast::Attribute> {
+ filter_attributes(attrs, ast::AttrStyle::Outer)
+}
+
+#[inline]
+pub(crate) fn is_single_line(s: &str) -> bool {
+ !s.chars().any(|c| c == '\n')
+}
+
+#[inline]
+pub(crate) fn first_line_contains_single_line_comment(s: &str) -> bool {
+ s.lines().next().map_or(false, |l| l.contains("//"))
+}
+
+#[inline]
+pub(crate) fn last_line_contains_single_line_comment(s: &str) -> bool {
+ s.lines().last().map_or(false, |l| l.contains("//"))
+}
+
+#[inline]
+pub(crate) fn is_attributes_extendable(attrs_str: &str) -> bool {
+ !attrs_str.contains('\n') && !last_line_contains_single_line_comment(attrs_str)
+}
+
+/// The width of the first line in s.
+#[inline]
+pub(crate) fn first_line_width(s: &str) -> usize {
+ unicode_str_width(s.splitn(2, '\n').next().unwrap_or(""))
+}
+
+/// The width of the last line in s.
+#[inline]
+pub(crate) fn last_line_width(s: &str) -> usize {
+ unicode_str_width(s.rsplitn(2, '\n').next().unwrap_or(""))
+}
+
+/// The total used width of the last line.
+#[inline]
+pub(crate) fn last_line_used_width(s: &str, offset: usize) -> usize {
+ if s.contains('\n') {
+ last_line_width(s)
+ } else {
+ offset + unicode_str_width(s)
+ }
+}
+
+#[inline]
+pub(crate) fn trimmed_last_line_width(s: &str) -> usize {
+ unicode_str_width(match s.rfind('\n') {
+ Some(n) => s[(n + 1)..].trim(),
+ None => s.trim(),
+ })
+}
+
+#[inline]
+pub(crate) fn last_line_extendable(s: &str) -> bool {
+ if s.ends_with("\"#") {
+ return true;
+ }
+ for c in s.chars().rev() {
+ match c {
+ '(' | ')' | ']' | '}' | '?' | '>' => continue,
+ '\n' => break,
+ _ if c.is_whitespace() => continue,
+ _ => return false,
+ }
+ }
+ true
+}
+
+#[inline]
+fn is_skip(meta_item: &MetaItem) -> bool {
+ match meta_item.kind {
+ MetaItemKind::Word => {
+ let path_str = pprust::path_to_string(&meta_item.path);
+ path_str == skip_annotation().as_str() || path_str == depr_skip_annotation().as_str()
+ }
+ MetaItemKind::List(ref l) => {
+ meta_item.has_name(sym::cfg_attr) && l.len() == 2 && is_skip_nested(&l[1])
+ }
+ _ => false,
+ }
+}
+
+#[inline]
+fn is_skip_nested(meta_item: &NestedMetaItem) -> bool {
+ match meta_item {
+ NestedMetaItem::MetaItem(ref mi) => is_skip(mi),
+ NestedMetaItem::Literal(_) => false,
+ }
+}
+
+#[inline]
+pub(crate) fn contains_skip(attrs: &[Attribute]) -> bool {
+ attrs
+ .iter()
+ .any(|a| a.meta().map_or(false, |a| is_skip(&a)))
+}
+
+#[inline]
+pub(crate) fn semicolon_for_expr(context: &RewriteContext<'_>, expr: &ast::Expr) -> bool {
+ // Never try to insert semicolons on expressions when we're inside
+ // a macro definition - this can prevent the macro from compiling
+ // when used in expression position
+ if context.is_macro_def {
+ return false;
+ }
+
+ match expr.kind {
+ ast::ExprKind::Ret(..) | ast::ExprKind::Continue(..) | ast::ExprKind::Break(..) => {
+ context.config.trailing_semicolon()
+ }
+ _ => false,
+ }
+}
+
+#[inline]
+pub(crate) fn semicolon_for_stmt(context: &RewriteContext<'_>, stmt: &ast::Stmt) -> bool {
+ match stmt.kind {
+ ast::StmtKind::Semi(ref expr) => match expr.kind {
+ ast::ExprKind::While(..) | ast::ExprKind::Loop(..) | ast::ExprKind::ForLoop(..) => {
+ false
+ }
+ ast::ExprKind::Break(..) | ast::ExprKind::Continue(..) | ast::ExprKind::Ret(..) => {
+ context.config.trailing_semicolon()
+ }
+ _ => true,
+ },
+ ast::StmtKind::Expr(..) => false,
+ _ => true,
+ }
+}
+
+#[inline]
+pub(crate) fn stmt_expr(stmt: &ast::Stmt) -> Option<&ast::Expr> {
+ match stmt.kind {
+ ast::StmtKind::Expr(ref expr) => Some(expr),
+ _ => None,
+ }
+}
+
+/// Returns the number of LF and CRLF respectively.
+pub(crate) fn count_lf_crlf(input: &str) -> (usize, usize) {
+ let mut lf = 0;
+ let mut crlf = 0;
+ let mut is_crlf = false;
+ for c in input.as_bytes() {
+ match c {
+ b'\r' => is_crlf = true,
+ b'\n' if is_crlf => crlf += 1,
+ b'\n' => lf += 1,
+ _ => is_crlf = false,
+ }
+ }
+ (lf, crlf)
+}
+
+pub(crate) fn count_newlines(input: &str) -> usize {
+ // Using bytes to omit UTF-8 decoding
+ bytecount::count(input.as_bytes(), b'\n')
+}
+
+// For format_missing and last_pos, need to use the source callsite (if applicable).
+// Required as generated code spans aren't guaranteed to follow on from the last span.
+macro_rules! source {
+ ($this:ident, $sp:expr) => {
+ $sp.source_callsite()
+ };
+}
+
+pub(crate) fn mk_sp(lo: BytePos, hi: BytePos) -> Span {
+ Span::new(lo, hi, SyntaxContext::root(), None)
+}
+
+pub(crate) fn mk_sp_lo_plus_one(lo: BytePos) -> Span {
+ Span::new(lo, lo + BytePos(1), SyntaxContext::root(), None)
+}
+
+// Returns `true` if the given span does not intersect with file lines.
+macro_rules! out_of_file_lines_range {
+ ($self:ident, $span:expr) => {
+ !$self.config.file_lines().is_all()
+ && !$self
+ .config
+ .file_lines()
+ .intersects(&$self.parse_sess.lookup_line_range($span))
+ };
+}
+
+macro_rules! skip_out_of_file_lines_range {
+ ($self:ident, $span:expr) => {
+ if out_of_file_lines_range!($self, $span) {
+ return None;
+ }
+ };
+}
+
+macro_rules! skip_out_of_file_lines_range_visitor {
+ ($self:ident, $span:expr) => {
+ if out_of_file_lines_range!($self, $span) {
+ $self.push_rewrite($span, None);
+ return;
+ }
+ };
+}
+
+// Wraps String in an Option. Returns Some when the string adheres to the
+// Rewrite constraints defined for the Rewrite trait and None otherwise.
+pub(crate) fn wrap_str(s: String, max_width: usize, shape: Shape) -> Option<String> {
+ if is_valid_str(&filter_normal_code(&s), max_width, shape) {
+ Some(s)
+ } else {
+ None
+ }
+}
+
+fn is_valid_str(snippet: &str, max_width: usize, shape: Shape) -> bool {
+ if !snippet.is_empty() {
+ // First line must fits with `shape.width`.
+ if first_line_width(snippet) > shape.width {
+ return false;
+ }
+ // If the snippet does not include newline, we are done.
+ if is_single_line(snippet) {
+ return true;
+ }
+ // The other lines must fit within the maximum width.
+ if snippet
+ .lines()
+ .skip(1)
+ .any(|line| unicode_str_width(line) > max_width)
+ {
+ return false;
+ }
+ // A special check for the last line, since the caller may
+ // place trailing characters on this line.
+ if last_line_width(snippet) > shape.used_width() + shape.width {
+ return false;
+ }
+ }
+ true
+}
+
+#[inline]
+pub(crate) fn colon_spaces(config: &Config) -> &'static str {
+ let before = config.space_before_colon();
+ let after = config.space_after_colon();
+ match (before, after) {
+ (true, true) => " : ",
+ (true, false) => " :",
+ (false, true) => ": ",
+ (false, false) => ":",
+ }
+}
+
+#[inline]
+pub(crate) fn left_most_sub_expr(e: &ast::Expr) -> &ast::Expr {
+ match e.kind {
+ ast::ExprKind::Call(ref e, _)
+ | ast::ExprKind::Binary(_, ref e, _)
+ | ast::ExprKind::Cast(ref e, _)
+ | ast::ExprKind::Type(ref e, _)
+ | ast::ExprKind::Assign(ref e, _, _)
+ | ast::ExprKind::AssignOp(_, ref e, _)
+ | ast::ExprKind::Field(ref e, _)
+ | ast::ExprKind::Index(ref e, _)
+ | ast::ExprKind::Range(Some(ref e), _, _)
+ | ast::ExprKind::Try(ref e) => left_most_sub_expr(e),
+ _ => e,
+ }
+}
+
+#[inline]
+pub(crate) fn starts_with_newline(s: &str) -> bool {
+ s.starts_with('\n') || s.starts_with("\r\n")
+}
+
+#[inline]
+pub(crate) fn first_line_ends_with(s: &str, c: char) -> bool {
+ s.lines().next().map_or(false, |l| l.ends_with(c))
+}
+
+// States whether an expression's last line exclusively consists of closing
+// parens, braces, and brackets in its idiomatic formatting.
+pub(crate) fn is_block_expr(context: &RewriteContext<'_>, expr: &ast::Expr, repr: &str) -> bool {
+ match expr.kind {
+ ast::ExprKind::MacCall(..)
+ | ast::ExprKind::Call(..)
+ | ast::ExprKind::MethodCall(..)
+ | ast::ExprKind::Array(..)
+ | ast::ExprKind::Struct(..)
+ | ast::ExprKind::While(..)
+ | ast::ExprKind::If(..)
+ | ast::ExprKind::Block(..)
+ | ast::ExprKind::ConstBlock(..)
+ | ast::ExprKind::Async(..)
+ | ast::ExprKind::Loop(..)
+ | ast::ExprKind::ForLoop(..)
+ | ast::ExprKind::TryBlock(..)
+ | ast::ExprKind::Match(..) => repr.contains('\n'),
+ ast::ExprKind::Paren(ref expr)
+ | ast::ExprKind::Binary(_, _, ref expr)
+ | ast::ExprKind::Index(_, ref expr)
+ | ast::ExprKind::Unary(_, ref expr)
+ | ast::ExprKind::Closure(_, _, _, _, _, ref expr, _)
+ | ast::ExprKind::Try(ref expr)
+ | ast::ExprKind::Yield(Some(ref expr)) => is_block_expr(context, expr, repr),
+ // This can only be a string lit
+ ast::ExprKind::Lit(_) => {
+ repr.contains('\n') && trimmed_last_line_width(repr) <= context.config.tab_spaces()
+ }
+ ast::ExprKind::AddrOf(..)
+ | ast::ExprKind::Assign(..)
+ | ast::ExprKind::AssignOp(..)
+ | ast::ExprKind::Await(..)
+ | ast::ExprKind::Box(..)
+ | ast::ExprKind::Break(..)
+ | ast::ExprKind::Cast(..)
+ | ast::ExprKind::Continue(..)
+ | ast::ExprKind::Err
+ | ast::ExprKind::Field(..)
+ | ast::ExprKind::InlineAsm(..)
+ | ast::ExprKind::Let(..)
+ | ast::ExprKind::Path(..)
+ | ast::ExprKind::Range(..)
+ | ast::ExprKind::Repeat(..)
+ | ast::ExprKind::Ret(..)
+ | ast::ExprKind::Yeet(..)
+ | ast::ExprKind::Tup(..)
+ | ast::ExprKind::Type(..)
+ | ast::ExprKind::Yield(None)
+ | ast::ExprKind::Underscore => false,
+ }
+}
+
+/// Removes trailing spaces from the specified snippet. We do not remove spaces
+/// inside strings or comments.
+pub(crate) fn remove_trailing_white_spaces(text: &str) -> String {
+ let mut buffer = String::with_capacity(text.len());
+ let mut space_buffer = String::with_capacity(128);
+ for (char_kind, c) in CharClasses::new(text.chars()) {
+ match c {
+ '\n' => {
+ if char_kind == FullCodeCharKind::InString {
+ buffer.push_str(&space_buffer);
+ }
+ space_buffer.clear();
+ buffer.push('\n');
+ }
+ _ if c.is_whitespace() => {
+ space_buffer.push(c);
+ }
+ _ => {
+ if !space_buffer.is_empty() {
+ buffer.push_str(&space_buffer);
+ space_buffer.clear();
+ }
+ buffer.push(c);
+ }
+ }
+ }
+ buffer
+}
+
+/// Indent each line according to the specified `indent`.
+/// e.g.
+///
+/// ```rust,compile_fail
+/// foo!{
+/// x,
+/// y,
+/// foo(
+/// a,
+/// b,
+/// c,
+/// ),
+/// }
+/// ```
+///
+/// will become
+///
+/// ```rust,compile_fail
+/// foo!{
+/// x,
+/// y,
+/// foo(
+/// a,
+/// b,
+/// c,
+/// ),
+/// }
+/// ```
+pub(crate) fn trim_left_preserve_layout(
+ orig: &str,
+ indent: Indent,
+ config: &Config,
+) -> Option<String> {
+ let mut lines = LineClasses::new(orig);
+ let first_line = lines.next().map(|(_, s)| s.trim_end().to_owned())?;
+ let mut trimmed_lines = Vec::with_capacity(16);
+
+ let mut veto_trim = false;
+ let min_prefix_space_width = lines
+ .filter_map(|(kind, line)| {
+ let mut trimmed = true;
+ let prefix_space_width = if is_empty_line(&line) {
+ None
+ } else {
+ Some(get_prefix_space_width(config, &line))
+ };
+
+ // just InString{Commented} in order to allow the start of a string to be indented
+ let new_veto_trim_value = (kind == FullCodeCharKind::InString
+ || (config.version() == Version::Two
+ && kind == FullCodeCharKind::InStringCommented))
+ && !line.ends_with('\\');
+ let line = if veto_trim || new_veto_trim_value {
+ veto_trim = new_veto_trim_value;
+ trimmed = false;
+ line
+ } else {
+ line.trim().to_owned()
+ };
+ trimmed_lines.push((trimmed, line, prefix_space_width));
+
+ // Because there is a veto against trimming and indenting lines within a string,
+ // such lines should not be taken into account when computing the minimum.
+ match kind {
+ FullCodeCharKind::InStringCommented | FullCodeCharKind::EndStringCommented
+ if config.version() == Version::Two =>
+ {
+ None
+ }
+ FullCodeCharKind::InString | FullCodeCharKind::EndString => None,
+ _ => prefix_space_width,
+ }
+ })
+ .min()?;
+
+ Some(
+ first_line
+ + "\n"
+ + &trimmed_lines
+ .iter()
+ .map(
+ |&(trimmed, ref line, prefix_space_width)| match prefix_space_width {
+ _ if !trimmed => line.to_owned(),
+ Some(original_indent_width) => {
+ let new_indent_width = indent.width()
+ + original_indent_width.saturating_sub(min_prefix_space_width);
+ let new_indent = Indent::from_width(config, new_indent_width);
+ format!("{}{}", new_indent.to_string(config), line)
+ }
+ None => String::new(),
+ },
+ )
+ .collect::<Vec<_>>()
+ .join("\n"),
+ )
+}
+
+/// Based on the given line, determine if the next line can be indented or not.
+/// This allows to preserve the indentation of multi-line literals when
+/// re-inserted a code block that has been formatted separately from the rest
+/// of the code, such as code in macro defs or code blocks doc comments.
+pub(crate) fn indent_next_line(kind: FullCodeCharKind, line: &str, config: &Config) -> bool {
+ if kind.is_string() {
+ // If the string ends with '\', the string has been wrapped over
+ // multiple lines. If `format_strings = true`, then the indentation of
+ // strings wrapped over multiple lines will have been adjusted while
+ // formatting the code block, therefore the string's indentation needs
+ // to be adjusted for the code surrounding the code block.
+ config.format_strings() && line.ends_with('\\')
+ } else if config.version() == Version::Two {
+ !kind.is_commented_string()
+ } else {
+ true
+ }
+}
+
+pub(crate) fn is_empty_line(s: &str) -> bool {
+ s.is_empty() || s.chars().all(char::is_whitespace)
+}
+
+fn get_prefix_space_width(config: &Config, s: &str) -> usize {
+ let mut width = 0;
+ for c in s.chars() {
+ match c {
+ ' ' => width += 1,
+ '\t' => width += config.tab_spaces(),
+ _ => return width,
+ }
+ }
+ width
+}
+
+pub(crate) trait NodeIdExt {
+ fn root() -> Self;
+}
+
+impl NodeIdExt for NodeId {
+ fn root() -> NodeId {
+ NodeId::placeholder_from_expn_id(LocalExpnId::ROOT)
+ }
+}
+
+pub(crate) fn unicode_str_width(s: &str) -> usize {
+ s.width()
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ #[test]
+ fn test_remove_trailing_white_spaces() {
+ let s = " r#\"\n test\n \"#";
+ assert_eq!(remove_trailing_white_spaces(s), s);
+ }
+
+ #[test]
+ fn test_trim_left_preserve_layout() {
+ let s = "aaa\n\tbbb\n ccc";
+ let config = Config::default();
+ let indent = Indent::new(4, 0);
+ assert_eq!(
+ trim_left_preserve_layout(s, indent, &config),
+ Some("aaa\n bbb\n ccc".to_string())
+ );
+ }
+}
diff --git a/src/tools/rustfmt/src/vertical.rs b/src/tools/rustfmt/src/vertical.rs
new file mode 100644
index 000000000..a06bc995a
--- /dev/null
+++ b/src/tools/rustfmt/src/vertical.rs
@@ -0,0 +1,302 @@
+// Format with vertical alignment.
+
+use std::cmp;
+
+use itertools::Itertools;
+use rustc_ast::ast;
+use rustc_span::{BytePos, Span};
+
+use crate::comment::combine_strs_with_missing_comments;
+use crate::config::lists::*;
+use crate::expr::rewrite_field;
+use crate::items::{rewrite_struct_field, rewrite_struct_field_prefix};
+use crate::lists::{
+ definitive_tactic, itemize_list, write_list, ListFormatting, ListItem, Separator,
+};
+use crate::rewrite::{Rewrite, RewriteContext};
+use crate::shape::{Indent, Shape};
+use crate::source_map::SpanUtils;
+use crate::spanned::Spanned;
+use crate::utils::{
+ contains_skip, is_attributes_extendable, mk_sp, rewrite_ident, trimmed_last_line_width,
+};
+
+pub(crate) trait AlignedItem {
+ fn skip(&self) -> bool;
+ fn get_span(&self) -> Span;
+ fn rewrite_prefix(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String>;
+ fn rewrite_aligned_item(
+ &self,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ prefix_max_width: usize,
+ ) -> Option<String>;
+}
+
+impl AlignedItem for ast::FieldDef {
+ fn skip(&self) -> bool {
+ contains_skip(&self.attrs)
+ }
+
+ fn get_span(&self) -> Span {
+ self.span()
+ }
+
+ fn rewrite_prefix(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ let attrs_str = self.attrs.rewrite(context, shape)?;
+ let missing_span = if self.attrs.is_empty() {
+ mk_sp(self.span.lo(), self.span.lo())
+ } else {
+ mk_sp(self.attrs.last().unwrap().span.hi(), self.span.lo())
+ };
+ let attrs_extendable = self.ident.is_none() && is_attributes_extendable(&attrs_str);
+ rewrite_struct_field_prefix(context, self).and_then(|field_str| {
+ combine_strs_with_missing_comments(
+ context,
+ &attrs_str,
+ &field_str,
+ missing_span,
+ shape,
+ attrs_extendable,
+ )
+ })
+ }
+
+ fn rewrite_aligned_item(
+ &self,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ prefix_max_width: usize,
+ ) -> Option<String> {
+ rewrite_struct_field(context, self, shape, prefix_max_width)
+ }
+}
+
+impl AlignedItem for ast::ExprField {
+ fn skip(&self) -> bool {
+ contains_skip(&self.attrs)
+ }
+
+ fn get_span(&self) -> Span {
+ self.span()
+ }
+
+ fn rewrite_prefix(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
+ let attrs_str = self.attrs.rewrite(context, shape)?;
+ let name = rewrite_ident(context, self.ident);
+ let missing_span = if self.attrs.is_empty() {
+ mk_sp(self.span.lo(), self.span.lo())
+ } else {
+ mk_sp(self.attrs.last().unwrap().span.hi(), self.span.lo())
+ };
+ combine_strs_with_missing_comments(
+ context,
+ &attrs_str,
+ name,
+ missing_span,
+ shape,
+ is_attributes_extendable(&attrs_str),
+ )
+ }
+
+ fn rewrite_aligned_item(
+ &self,
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ prefix_max_width: usize,
+ ) -> Option<String> {
+ rewrite_field(context, self, shape, prefix_max_width)
+ }
+}
+
+pub(crate) fn rewrite_with_alignment<T: AlignedItem>(
+ fields: &[T],
+ context: &RewriteContext<'_>,
+ shape: Shape,
+ span: Span,
+ one_line_width: usize,
+) -> Option<String> {
+ let (spaces, group_index) = if context.config.struct_field_align_threshold() > 0 {
+ group_aligned_items(context, fields)
+ } else {
+ ("", fields.len() - 1)
+ };
+ let init = &fields[0..=group_index];
+ let rest = &fields[group_index + 1..];
+ let init_last_pos = if rest.is_empty() {
+ span.hi()
+ } else {
+ // Decide whether the missing comments should stick to init or rest.
+ let init_hi = init[init.len() - 1].get_span().hi();
+ let rest_lo = rest[0].get_span().lo();
+ let missing_span = mk_sp(init_hi, rest_lo);
+ let missing_span = mk_sp(
+ context.snippet_provider.span_after(missing_span, ","),
+ missing_span.hi(),
+ );
+
+ let snippet = context.snippet(missing_span);
+ if snippet.trim_start().starts_with("//") {
+ let offset = snippet.lines().next().map_or(0, str::len);
+ // 2 = "," + "\n"
+ init_hi + BytePos(offset as u32 + 2)
+ } else if snippet.trim_start().starts_with("/*") {
+ let comment_lines = snippet
+ .lines()
+ .position(|line| line.trim_end().ends_with("*/"))
+ .unwrap_or(0);
+
+ let offset = snippet
+ .lines()
+ .take(comment_lines + 1)
+ .collect::<Vec<_>>()
+ .join("\n")
+ .len();
+
+ init_hi + BytePos(offset as u32 + 2)
+ } else {
+ missing_span.lo()
+ }
+ };
+ let init_span = mk_sp(span.lo(), init_last_pos);
+ let one_line_width = if rest.is_empty() { one_line_width } else { 0 };
+
+ // if another group follows, we must force a separator
+ let force_separator = !rest.is_empty();
+
+ let result = rewrite_aligned_items_inner(
+ context,
+ init,
+ init_span,
+ shape.indent,
+ one_line_width,
+ force_separator,
+ )?;
+ if rest.is_empty() {
+ Some(result + spaces)
+ } else {
+ let rest_span = mk_sp(init_last_pos, span.hi());
+ let rest_str = rewrite_with_alignment(rest, context, shape, rest_span, one_line_width)?;
+ Some(format!(
+ "{}{}\n{}{}",
+ result,
+ spaces,
+ &shape.indent.to_string(context.config),
+ &rest_str
+ ))
+ }
+}
+
+fn struct_field_prefix_max_min_width<T: AlignedItem>(
+ context: &RewriteContext<'_>,
+ fields: &[T],
+ shape: Shape,
+) -> (usize, usize) {
+ fields
+ .iter()
+ .map(|field| {
+ field
+ .rewrite_prefix(context, shape)
+ .map(|field_str| trimmed_last_line_width(&field_str))
+ })
+ .fold_options((0, ::std::usize::MAX), |(max_len, min_len), len| {
+ (cmp::max(max_len, len), cmp::min(min_len, len))
+ })
+ .unwrap_or((0, 0))
+}
+
+fn rewrite_aligned_items_inner<T: AlignedItem>(
+ context: &RewriteContext<'_>,
+ fields: &[T],
+ span: Span,
+ offset: Indent,
+ one_line_width: usize,
+ force_trailing_separator: bool,
+) -> Option<String> {
+ // 1 = ","
+ let item_shape = Shape::indented(offset, context.config).sub_width(1)?;
+ let (mut field_prefix_max_width, field_prefix_min_width) =
+ struct_field_prefix_max_min_width(context, fields, item_shape);
+ let max_diff = field_prefix_max_width.saturating_sub(field_prefix_min_width);
+ if max_diff > context.config.struct_field_align_threshold() {
+ field_prefix_max_width = 0;
+ }
+
+ let mut items = itemize_list(
+ context.snippet_provider,
+ fields.iter(),
+ "}",
+ ",",
+ |field| field.get_span().lo(),
+ |field| field.get_span().hi(),
+ |field| field.rewrite_aligned_item(context, item_shape, field_prefix_max_width),
+ span.lo(),
+ span.hi(),
+ false,
+ )
+ .collect::<Vec<_>>();
+
+ let tactic = definitive_tactic(
+ &items,
+ ListTactic::HorizontalVertical,
+ Separator::Comma,
+ one_line_width,
+ );
+
+ if tactic == DefinitiveListTactic::Horizontal {
+ // since the items fits on a line, there is no need to align them
+ let do_rewrite =
+ |field: &T| -> Option<String> { field.rewrite_aligned_item(context, item_shape, 0) };
+ fields
+ .iter()
+ .zip(items.iter_mut())
+ .for_each(|(field, list_item): (&T, &mut ListItem)| {
+ if list_item.item.is_some() {
+ list_item.item = do_rewrite(field);
+ }
+ });
+ }
+
+ let separator_tactic = if force_trailing_separator {
+ SeparatorTactic::Always
+ } else {
+ context.config.trailing_comma()
+ };
+
+ let fmt = ListFormatting::new(item_shape, context.config)
+ .tactic(tactic)
+ .trailing_separator(separator_tactic)
+ .preserve_newline(true);
+ write_list(&items, &fmt)
+}
+
+/// Returns the index in `fields` up to which a field belongs to the current group.
+/// The returned string is the group separator to use when rewriting the fields.
+/// Groups are defined by blank lines.
+fn group_aligned_items<T: AlignedItem>(
+ context: &RewriteContext<'_>,
+ fields: &[T],
+) -> (&'static str, usize) {
+ let mut index = 0;
+ for i in 0..fields.len() - 1 {
+ if fields[i].skip() {
+ return ("", index);
+ }
+ let span = mk_sp(fields[i].get_span().hi(), fields[i + 1].get_span().lo());
+ let snippet = context
+ .snippet(span)
+ .lines()
+ .skip(1)
+ .collect::<Vec<_>>()
+ .join("\n");
+ let has_blank_line = snippet
+ .lines()
+ .dropping_back(1)
+ .any(|l| l.trim().is_empty());
+ if has_blank_line {
+ return ("\n", index);
+ }
+ index += 1;
+ }
+ ("", index)
+}
diff --git a/src/tools/rustfmt/src/visitor.rs b/src/tools/rustfmt/src/visitor.rs
new file mode 100644
index 000000000..9a0e0752c
--- /dev/null
+++ b/src/tools/rustfmt/src/visitor.rs
@@ -0,0 +1,1010 @@
+use std::cell::{Cell, RefCell};
+use std::rc::Rc;
+
+use rustc_ast::{ast, token::Delimiter, visit};
+use rustc_data_structures::sync::Lrc;
+use rustc_span::{symbol, BytePos, Pos, Span};
+
+use crate::attr::*;
+use crate::comment::{contains_comment, rewrite_comment, CodeCharKind, CommentCodeSlices};
+use crate::config::Version;
+use crate::config::{BraceStyle, Config};
+use crate::coverage::transform_missing_snippet;
+use crate::items::{
+ format_impl, format_trait, format_trait_alias, is_mod_decl, is_use_item, rewrite_extern_crate,
+ rewrite_type_alias, FnBraceStyle, FnSig, ItemVisitorKind, StaticParts, StructParts,
+};
+use crate::macros::{macro_style, rewrite_macro, rewrite_macro_def, MacroPosition};
+use crate::modules::Module;
+use crate::parse::session::ParseSess;
+use crate::rewrite::{Rewrite, RewriteContext};
+use crate::shape::{Indent, Shape};
+use crate::skip::{is_skip_attr, SkipContext};
+use crate::source_map::{LineRangeUtils, SpanUtils};
+use crate::spanned::Spanned;
+use crate::stmt::Stmt;
+use crate::utils::{
+ self, contains_skip, count_newlines, depr_skip_annotation, format_unsafety, inner_attributes,
+ last_line_width, mk_sp, ptr_vec_to_ref_vec, rewrite_ident, starts_with_newline, stmt_expr,
+};
+use crate::{ErrorKind, FormatReport, FormattingError};
+
+/// Creates a string slice corresponding to the specified span.
+pub(crate) struct SnippetProvider {
+ /// A pointer to the content of the file we are formatting.
+ big_snippet: Lrc<String>,
+ /// A position of the start of `big_snippet`, used as an offset.
+ start_pos: usize,
+ /// An end position of the file that this snippet lives.
+ end_pos: usize,
+}
+
+impl SnippetProvider {
+ pub(crate) fn span_to_snippet(&self, span: Span) -> Option<&str> {
+ let start_index = span.lo().to_usize().checked_sub(self.start_pos)?;
+ let end_index = span.hi().to_usize().checked_sub(self.start_pos)?;
+ Some(&self.big_snippet[start_index..end_index])
+ }
+
+ pub(crate) fn new(start_pos: BytePos, end_pos: BytePos, big_snippet: Lrc<String>) -> Self {
+ let start_pos = start_pos.to_usize();
+ let end_pos = end_pos.to_usize();
+ SnippetProvider {
+ big_snippet,
+ start_pos,
+ end_pos,
+ }
+ }
+
+ pub(crate) fn entire_snippet(&self) -> &str {
+ self.big_snippet.as_str()
+ }
+
+ pub(crate) fn start_pos(&self) -> BytePos {
+ BytePos::from_usize(self.start_pos)
+ }
+
+ pub(crate) fn end_pos(&self) -> BytePos {
+ BytePos::from_usize(self.end_pos)
+ }
+}
+
+pub(crate) struct FmtVisitor<'a> {
+ parent_context: Option<&'a RewriteContext<'a>>,
+ pub(crate) parse_sess: &'a ParseSess,
+ pub(crate) buffer: String,
+ pub(crate) last_pos: BytePos,
+ // FIXME: use an RAII util or closure for indenting
+ pub(crate) block_indent: Indent,
+ pub(crate) config: &'a Config,
+ pub(crate) is_if_else_block: bool,
+ pub(crate) snippet_provider: &'a SnippetProvider,
+ pub(crate) line_number: usize,
+ /// List of 1-based line ranges which were annotated with skip
+ /// Both bounds are inclusifs.
+ pub(crate) skipped_range: Rc<RefCell<Vec<(usize, usize)>>>,
+ pub(crate) macro_rewrite_failure: bool,
+ pub(crate) report: FormatReport,
+ pub(crate) skip_context: SkipContext,
+ pub(crate) is_macro_def: bool,
+}
+
+impl<'a> Drop for FmtVisitor<'a> {
+ fn drop(&mut self) {
+ if let Some(ctx) = self.parent_context {
+ if self.macro_rewrite_failure {
+ ctx.macro_rewrite_failure.replace(true);
+ }
+ }
+ }
+}
+
+impl<'b, 'a: 'b> FmtVisitor<'a> {
+ fn set_parent_context(&mut self, context: &'a RewriteContext<'_>) {
+ self.parent_context = Some(context);
+ }
+
+ pub(crate) fn shape(&self) -> Shape {
+ Shape::indented(self.block_indent, self.config)
+ }
+
+ fn next_span(&self, hi: BytePos) -> Span {
+ mk_sp(self.last_pos, hi)
+ }
+
+ fn visit_stmt(&mut self, stmt: &Stmt<'_>, include_empty_semi: bool) {
+ debug!(
+ "visit_stmt: {}",
+ self.parse_sess.span_to_debug_info(stmt.span())
+ );
+
+ if stmt.is_empty() {
+ // If the statement is empty, just skip over it. Before that, make sure any comment
+ // snippet preceding the semicolon is picked up.
+ let snippet = self.snippet(mk_sp(self.last_pos, stmt.span().lo()));
+ let original_starts_with_newline = snippet
+ .find(|c| c != ' ')
+ .map_or(false, |i| starts_with_newline(&snippet[i..]));
+ let snippet = snippet.trim();
+ if !snippet.is_empty() {
+ // FIXME(calebcartwright 2021-01-03) - This exists strictly to maintain legacy
+ // formatting where rustfmt would preserve redundant semicolons on Items in a
+ // statement position.
+ // See comment within `walk_stmts` for more info
+ if include_empty_semi {
+ self.format_missing(stmt.span().hi());
+ } else {
+ if original_starts_with_newline {
+ self.push_str("\n");
+ }
+
+ self.push_str(&self.block_indent.to_string(self.config));
+ self.push_str(snippet);
+ }
+ } else if include_empty_semi {
+ self.push_str(";");
+ }
+ self.last_pos = stmt.span().hi();
+ return;
+ }
+
+ match stmt.as_ast_node().kind {
+ ast::StmtKind::Item(ref item) => {
+ self.visit_item(item);
+ self.last_pos = stmt.span().hi();
+ }
+ ast::StmtKind::Local(..) | ast::StmtKind::Expr(..) | ast::StmtKind::Semi(..) => {
+ let attrs = get_attrs_from_stmt(stmt.as_ast_node());
+ if contains_skip(attrs) {
+ self.push_skipped_with_span(
+ attrs,
+ stmt.span(),
+ get_span_without_attrs(stmt.as_ast_node()),
+ );
+ } else {
+ let shape = self.shape();
+ let rewrite = self.with_context(|ctx| stmt.rewrite(ctx, shape));
+ self.push_rewrite(stmt.span(), rewrite)
+ }
+ }
+ ast::StmtKind::MacCall(ref mac_stmt) => {
+ if self.visit_attrs(&mac_stmt.attrs, ast::AttrStyle::Outer) {
+ self.push_skipped_with_span(
+ &mac_stmt.attrs,
+ stmt.span(),
+ get_span_without_attrs(stmt.as_ast_node()),
+ );
+ } else {
+ self.visit_mac(&mac_stmt.mac, None, MacroPosition::Statement);
+ }
+ self.format_missing(stmt.span().hi());
+ }
+ ast::StmtKind::Empty => (),
+ }
+ }
+
+ /// Remove spaces between the opening brace and the first statement or the inner attribute
+ /// of the block.
+ fn trim_spaces_after_opening_brace(
+ &mut self,
+ b: &ast::Block,
+ inner_attrs: Option<&[ast::Attribute]>,
+ ) {
+ if let Some(first_stmt) = b.stmts.first() {
+ let hi = inner_attrs
+ .and_then(|attrs| inner_attributes(attrs).first().map(|attr| attr.span.lo()))
+ .unwrap_or_else(|| first_stmt.span().lo());
+ let missing_span = self.next_span(hi);
+ let snippet = self.snippet(missing_span);
+ let len = CommentCodeSlices::new(snippet)
+ .next()
+ .and_then(|(kind, _, s)| {
+ if kind == CodeCharKind::Normal {
+ s.rfind('\n')
+ } else {
+ None
+ }
+ });
+ if let Some(len) = len {
+ self.last_pos = self.last_pos + BytePos::from_usize(len);
+ }
+ }
+ }
+
+ pub(crate) fn visit_block(
+ &mut self,
+ b: &ast::Block,
+ inner_attrs: Option<&[ast::Attribute]>,
+ has_braces: bool,
+ ) {
+ debug!(
+ "visit_block: {}",
+ self.parse_sess.span_to_debug_info(b.span),
+ );
+
+ // Check if this block has braces.
+ let brace_compensation = BytePos(if has_braces { 1 } else { 0 });
+
+ self.last_pos = self.last_pos + brace_compensation;
+ self.block_indent = self.block_indent.block_indent(self.config);
+ self.push_str("{");
+ self.trim_spaces_after_opening_brace(b, inner_attrs);
+
+ // Format inner attributes if available.
+ if let Some(attrs) = inner_attrs {
+ self.visit_attrs(attrs, ast::AttrStyle::Inner);
+ }
+
+ self.walk_block_stmts(b);
+
+ if !b.stmts.is_empty() {
+ if let Some(expr) = stmt_expr(&b.stmts[b.stmts.len() - 1]) {
+ if utils::semicolon_for_expr(&self.get_context(), expr) {
+ self.push_str(";");
+ }
+ }
+ }
+
+ let rest_span = self.next_span(b.span.hi());
+ if out_of_file_lines_range!(self, rest_span) {
+ self.push_str(self.snippet(rest_span));
+ self.block_indent = self.block_indent.block_unindent(self.config);
+ } else {
+ // Ignore the closing brace.
+ let missing_span = self.next_span(b.span.hi() - brace_compensation);
+ self.close_block(missing_span, self.unindent_comment_on_closing_brace(b));
+ }
+ self.last_pos = source!(self, b.span).hi();
+ }
+
+ fn close_block(&mut self, span: Span, unindent_comment: bool) {
+ let config = self.config;
+
+ let mut last_hi = span.lo();
+ let mut unindented = false;
+ let mut prev_ends_with_newline = false;
+ let mut extra_newline = false;
+
+ let skip_normal = |s: &str| {
+ let trimmed = s.trim();
+ trimmed.is_empty() || trimmed.chars().all(|c| c == ';')
+ };
+
+ let comment_snippet = self.snippet(span);
+
+ let align_to_right = if unindent_comment && contains_comment(comment_snippet) {
+ let first_lines = comment_snippet.splitn(2, '/').next().unwrap_or("");
+ last_line_width(first_lines) > last_line_width(comment_snippet)
+ } else {
+ false
+ };
+
+ for (kind, offset, sub_slice) in CommentCodeSlices::new(comment_snippet) {
+ let sub_slice = transform_missing_snippet(config, sub_slice);
+
+ debug!("close_block: {:?} {:?} {:?}", kind, offset, sub_slice);
+
+ match kind {
+ CodeCharKind::Comment => {
+ if !unindented && unindent_comment && !align_to_right {
+ unindented = true;
+ self.block_indent = self.block_indent.block_unindent(config);
+ }
+ let span_in_between = mk_sp(last_hi, span.lo() + BytePos::from_usize(offset));
+ let snippet_in_between = self.snippet(span_in_between);
+ let mut comment_on_same_line = !snippet_in_between.contains('\n');
+
+ let mut comment_shape =
+ Shape::indented(self.block_indent, config).comment(config);
+ if self.config.version() == Version::Two && comment_on_same_line {
+ self.push_str(" ");
+ // put the first line of the comment on the same line as the
+ // block's last line
+ match sub_slice.find('\n') {
+ None => {
+ self.push_str(&sub_slice);
+ }
+ Some(offset) if offset + 1 == sub_slice.len() => {
+ self.push_str(&sub_slice[..offset]);
+ }
+ Some(offset) => {
+ let first_line = &sub_slice[..offset];
+ self.push_str(first_line);
+ self.push_str(&self.block_indent.to_string_with_newline(config));
+
+ // put the other lines below it, shaping it as needed
+ let other_lines = &sub_slice[offset + 1..];
+ let comment_str =
+ rewrite_comment(other_lines, false, comment_shape, config);
+ match comment_str {
+ Some(ref s) => self.push_str(s),
+ None => self.push_str(other_lines),
+ }
+ }
+ }
+ } else {
+ if comment_on_same_line {
+ // 1 = a space before `//`
+ let offset_len = 1 + last_line_width(&self.buffer)
+ .saturating_sub(self.block_indent.width());
+ match comment_shape
+ .visual_indent(offset_len)
+ .sub_width(offset_len)
+ {
+ Some(shp) => comment_shape = shp,
+ None => comment_on_same_line = false,
+ }
+ };
+
+ if comment_on_same_line {
+ self.push_str(" ");
+ } else {
+ if count_newlines(snippet_in_between) >= 2 || extra_newline {
+ self.push_str("\n");
+ }
+ self.push_str(&self.block_indent.to_string_with_newline(config));
+ }
+
+ let comment_str = rewrite_comment(&sub_slice, false, comment_shape, config);
+ match comment_str {
+ Some(ref s) => self.push_str(s),
+ None => self.push_str(&sub_slice),
+ }
+ }
+ }
+ CodeCharKind::Normal if skip_normal(&sub_slice) => {
+ extra_newline = prev_ends_with_newline && sub_slice.contains('\n');
+ continue;
+ }
+ CodeCharKind::Normal => {
+ self.push_str(&self.block_indent.to_string_with_newline(config));
+ self.push_str(sub_slice.trim());
+ }
+ }
+ prev_ends_with_newline = sub_slice.ends_with('\n');
+ extra_newline = false;
+ last_hi = span.lo() + BytePos::from_usize(offset + sub_slice.len());
+ }
+ if unindented {
+ self.block_indent = self.block_indent.block_indent(self.config);
+ }
+ self.block_indent = self.block_indent.block_unindent(self.config);
+ self.push_str(&self.block_indent.to_string_with_newline(config));
+ self.push_str("}");
+ }
+
+ fn unindent_comment_on_closing_brace(&self, b: &ast::Block) -> bool {
+ self.is_if_else_block && !b.stmts.is_empty()
+ }
+
+ // Note that this only gets called for function definitions. Required methods
+ // on traits do not get handled here.
+ pub(crate) fn visit_fn(
+ &mut self,
+ fk: visit::FnKind<'_>,
+ fd: &ast::FnDecl,
+ s: Span,
+ defaultness: ast::Defaultness,
+ inner_attrs: Option<&[ast::Attribute]>,
+ ) {
+ let indent = self.block_indent;
+ let block;
+ let rewrite = match fk {
+ visit::FnKind::Fn(_, ident, _, _, _, Some(ref b)) => {
+ block = b;
+ self.rewrite_fn_before_block(
+ indent,
+ ident,
+ &FnSig::from_fn_kind(&fk, fd, defaultness),
+ mk_sp(s.lo(), b.span.lo()),
+ )
+ }
+ _ => unreachable!(),
+ };
+
+ if let Some((fn_str, fn_brace_style)) = rewrite {
+ self.format_missing_with_indent(source!(self, s).lo());
+
+ if let Some(rw) = self.single_line_fn(&fn_str, block, inner_attrs) {
+ self.push_str(&rw);
+ self.last_pos = s.hi();
+ return;
+ }
+
+ self.push_str(&fn_str);
+ match fn_brace_style {
+ FnBraceStyle::SameLine => self.push_str(" "),
+ FnBraceStyle::NextLine => {
+ self.push_str(&self.block_indent.to_string_with_newline(self.config))
+ }
+ _ => unreachable!(),
+ }
+ self.last_pos = source!(self, block.span).lo();
+ } else {
+ self.format_missing(source!(self, block.span).lo());
+ }
+
+ self.visit_block(block, inner_attrs, true)
+ }
+
+ pub(crate) fn visit_item(&mut self, item: &ast::Item) {
+ skip_out_of_file_lines_range_visitor!(self, item.span);
+
+ // This is where we bail out if there is a skip attribute. This is only
+ // complex in the module case. It is complex because the module could be
+ // in a separate file and there might be attributes in both files, but
+ // the AST lumps them all together.
+ let filtered_attrs;
+ let mut attrs = &item.attrs;
+ let skip_context_saved = self.skip_context.clone();
+ self.skip_context.update_with_attrs(attrs);
+
+ let should_visit_node_again = match item.kind {
+ // For use/extern crate items, skip rewriting attributes but check for a skip attribute.
+ ast::ItemKind::Use(..) | ast::ItemKind::ExternCrate(_) => {
+ if contains_skip(attrs) {
+ self.push_skipped_with_span(attrs.as_slice(), item.span(), item.span());
+ false
+ } else {
+ true
+ }
+ }
+ // Module is inline, in this case we treat it like any other item.
+ _ if !is_mod_decl(item) => {
+ if self.visit_attrs(&item.attrs, ast::AttrStyle::Outer) {
+ self.push_skipped_with_span(item.attrs.as_slice(), item.span(), item.span());
+ false
+ } else {
+ true
+ }
+ }
+ // Module is not inline, but should be skipped.
+ ast::ItemKind::Mod(..) if contains_skip(&item.attrs) => false,
+ // Module is not inline and should not be skipped. We want
+ // to process only the attributes in the current file.
+ ast::ItemKind::Mod(..) => {
+ filtered_attrs = filter_inline_attrs(&item.attrs, item.span());
+ // Assert because if we should skip it should be caught by
+ // the above case.
+ assert!(!self.visit_attrs(&filtered_attrs, ast::AttrStyle::Outer));
+ attrs = &filtered_attrs;
+ true
+ }
+ _ => {
+ if self.visit_attrs(&item.attrs, ast::AttrStyle::Outer) {
+ self.push_skipped_with_span(item.attrs.as_slice(), item.span(), item.span());
+ false
+ } else {
+ true
+ }
+ }
+ };
+
+ // TODO(calebcartwright): consider enabling box_patterns feature gate
+ if should_visit_node_again {
+ match item.kind {
+ ast::ItemKind::Use(ref tree) => self.format_import(item, tree),
+ ast::ItemKind::Impl(ref iimpl) => {
+ let block_indent = self.block_indent;
+ let rw = self.with_context(|ctx| format_impl(ctx, item, iimpl, block_indent));
+ self.push_rewrite(item.span, rw);
+ }
+ ast::ItemKind::Trait(..) => {
+ let block_indent = self.block_indent;
+ let rw = self.with_context(|ctx| format_trait(ctx, item, block_indent));
+ self.push_rewrite(item.span, rw);
+ }
+ ast::ItemKind::TraitAlias(ref generics, ref generic_bounds) => {
+ let shape = Shape::indented(self.block_indent, self.config);
+ let rw = format_trait_alias(
+ &self.get_context(),
+ item.ident,
+ &item.vis,
+ generics,
+ generic_bounds,
+ shape,
+ );
+ self.push_rewrite(item.span, rw);
+ }
+ ast::ItemKind::ExternCrate(_) => {
+ let rw = rewrite_extern_crate(&self.get_context(), item, self.shape());
+ let span = if attrs.is_empty() {
+ item.span
+ } else {
+ mk_sp(attrs[0].span.lo(), item.span.hi())
+ };
+ self.push_rewrite(span, rw);
+ }
+ ast::ItemKind::Struct(..) | ast::ItemKind::Union(..) => {
+ self.visit_struct(&StructParts::from_item(item));
+ }
+ ast::ItemKind::Enum(ref def, ref generics) => {
+ self.format_missing_with_indent(source!(self, item.span).lo());
+ self.visit_enum(item.ident, &item.vis, def, generics, item.span);
+ self.last_pos = source!(self, item.span).hi();
+ }
+ ast::ItemKind::Mod(unsafety, ref mod_kind) => {
+ self.format_missing_with_indent(source!(self, item.span).lo());
+ self.format_mod(mod_kind, unsafety, &item.vis, item.span, item.ident, attrs);
+ }
+ ast::ItemKind::MacCall(ref mac) => {
+ self.visit_mac(mac, Some(item.ident), MacroPosition::Item);
+ }
+ ast::ItemKind::ForeignMod(ref foreign_mod) => {
+ self.format_missing_with_indent(source!(self, item.span).lo());
+ self.format_foreign_mod(foreign_mod, item.span);
+ }
+ ast::ItemKind::Static(..) | ast::ItemKind::Const(..) => {
+ self.visit_static(&StaticParts::from_item(item));
+ }
+ ast::ItemKind::Fn(ref fn_kind) => {
+ let ast::Fn {
+ defaultness,
+ ref sig,
+ ref generics,
+ ref body,
+ } = **fn_kind;
+ if let Some(ref body) = body {
+ let inner_attrs = inner_attributes(&item.attrs);
+ let fn_ctxt = match sig.header.ext {
+ ast::Extern::None => visit::FnCtxt::Free,
+ _ => visit::FnCtxt::Foreign,
+ };
+ self.visit_fn(
+ visit::FnKind::Fn(
+ fn_ctxt,
+ item.ident,
+ sig,
+ &item.vis,
+ generics,
+ Some(body),
+ ),
+ &sig.decl,
+ item.span,
+ defaultness,
+ Some(&inner_attrs),
+ )
+ } else {
+ let indent = self.block_indent;
+ let rewrite = self.rewrite_required_fn(
+ indent, item.ident, sig, &item.vis, generics, item.span,
+ );
+ self.push_rewrite(item.span, rewrite);
+ }
+ }
+ ast::ItemKind::TyAlias(ref ty_alias) => {
+ use ItemVisitorKind::Item;
+ self.visit_ty_alias_kind(ty_alias, &Item(item), item.span);
+ }
+ ast::ItemKind::GlobalAsm(..) => {
+ let snippet = Some(self.snippet(item.span).to_owned());
+ self.push_rewrite(item.span, snippet);
+ }
+ ast::ItemKind::MacroDef(ref def) => {
+ let rewrite = rewrite_macro_def(
+ &self.get_context(),
+ self.shape(),
+ self.block_indent,
+ def,
+ item.ident,
+ &item.vis,
+ item.span,
+ );
+ self.push_rewrite(item.span, rewrite);
+ }
+ };
+ }
+ self.skip_context = skip_context_saved;
+ }
+
+ fn visit_ty_alias_kind(
+ &mut self,
+ ty_kind: &ast::TyAlias,
+ visitor_kind: &ItemVisitorKind<'_>,
+ span: Span,
+ ) {
+ let rewrite = rewrite_type_alias(
+ ty_kind,
+ &self.get_context(),
+ self.block_indent,
+ visitor_kind,
+ span,
+ );
+ self.push_rewrite(span, rewrite);
+ }
+
+ fn visit_assoc_item(&mut self, visitor_kind: &ItemVisitorKind<'_>) {
+ use ItemVisitorKind::*;
+ // TODO(calebcartwright): Not sure the skip spans are correct
+ let (ai, skip_span, assoc_ctxt) = match visitor_kind {
+ AssocTraitItem(ai) => (*ai, ai.span(), visit::AssocCtxt::Trait),
+ AssocImplItem(ai) => (*ai, ai.span, visit::AssocCtxt::Impl),
+ _ => unreachable!(),
+ };
+ skip_out_of_file_lines_range_visitor!(self, ai.span);
+
+ if self.visit_attrs(&ai.attrs, ast::AttrStyle::Outer) {
+ self.push_skipped_with_span(ai.attrs.as_slice(), skip_span, skip_span);
+ return;
+ }
+
+ // TODO(calebcartwright): consider enabling box_patterns feature gate
+ match (&ai.kind, visitor_kind) {
+ (ast::AssocItemKind::Const(..), AssocTraitItem(_)) => {
+ self.visit_static(&StaticParts::from_trait_item(ai))
+ }
+ (ast::AssocItemKind::Const(..), AssocImplItem(_)) => {
+ self.visit_static(&StaticParts::from_impl_item(ai))
+ }
+ (ast::AssocItemKind::Fn(ref fn_kind), _) => {
+ let ast::Fn {
+ defaultness,
+ ref sig,
+ ref generics,
+ ref body,
+ } = **fn_kind;
+ if let Some(ref body) = body {
+ let inner_attrs = inner_attributes(&ai.attrs);
+ let fn_ctxt = visit::FnCtxt::Assoc(assoc_ctxt);
+ self.visit_fn(
+ visit::FnKind::Fn(fn_ctxt, ai.ident, sig, &ai.vis, generics, Some(body)),
+ &sig.decl,
+ ai.span,
+ defaultness,
+ Some(&inner_attrs),
+ );
+ } else {
+ let indent = self.block_indent;
+ let rewrite =
+ self.rewrite_required_fn(indent, ai.ident, sig, &ai.vis, generics, ai.span);
+ self.push_rewrite(ai.span, rewrite);
+ }
+ }
+ (ast::AssocItemKind::TyAlias(ref ty_alias), _) => {
+ self.visit_ty_alias_kind(ty_alias, visitor_kind, ai.span);
+ }
+ (ast::AssocItemKind::MacCall(ref mac), _) => {
+ self.visit_mac(mac, Some(ai.ident), MacroPosition::Item);
+ }
+ _ => unreachable!(),
+ }
+ }
+
+ pub(crate) fn visit_trait_item(&mut self, ti: &ast::AssocItem) {
+ self.visit_assoc_item(&ItemVisitorKind::AssocTraitItem(ti));
+ }
+
+ pub(crate) fn visit_impl_item(&mut self, ii: &ast::AssocItem) {
+ self.visit_assoc_item(&ItemVisitorKind::AssocImplItem(ii));
+ }
+
+ fn visit_mac(&mut self, mac: &ast::MacCall, ident: Option<symbol::Ident>, pos: MacroPosition) {
+ skip_out_of_file_lines_range_visitor!(self, mac.span());
+
+ // 1 = ;
+ let shape = self.shape().saturating_sub_width(1);
+ let rewrite = self.with_context(|ctx| rewrite_macro(mac, ident, ctx, shape, pos));
+ // As of v638 of the rustc-ap-* crates, the associated span no longer includes
+ // the trailing semicolon. This determines the correct span to ensure scenarios
+ // with whitespace between the delimiters and trailing semi (i.e. `foo!(abc) ;`)
+ // are formatted correctly.
+ let (span, rewrite) = match macro_style(mac, &self.get_context()) {
+ Delimiter::Bracket | Delimiter::Parenthesis if MacroPosition::Item == pos => {
+ let search_span = mk_sp(mac.span().hi(), self.snippet_provider.end_pos());
+ let hi = self.snippet_provider.span_before(search_span, ";");
+ let target_span = mk_sp(mac.span().lo(), hi + BytePos(1));
+ let rewrite = rewrite.map(|rw| {
+ if !rw.ends_with(';') {
+ format!("{};", rw)
+ } else {
+ rw
+ }
+ });
+ (target_span, rewrite)
+ }
+ _ => (mac.span(), rewrite),
+ };
+
+ self.push_rewrite(span, rewrite);
+ }
+
+ pub(crate) fn push_str(&mut self, s: &str) {
+ self.line_number += count_newlines(s);
+ self.buffer.push_str(s);
+ }
+
+ #[allow(clippy::needless_pass_by_value)]
+ fn push_rewrite_inner(&mut self, span: Span, rewrite: Option<String>) {
+ if let Some(ref s) = rewrite {
+ self.push_str(s);
+ } else {
+ let snippet = self.snippet(span);
+ self.push_str(snippet.trim());
+ }
+ self.last_pos = source!(self, span).hi();
+ }
+
+ pub(crate) fn push_rewrite(&mut self, span: Span, rewrite: Option<String>) {
+ self.format_missing_with_indent(source!(self, span).lo());
+ self.push_rewrite_inner(span, rewrite);
+ }
+
+ pub(crate) fn push_skipped_with_span(
+ &mut self,
+ attrs: &[ast::Attribute],
+ item_span: Span,
+ main_span: Span,
+ ) {
+ self.format_missing_with_indent(source!(self, item_span).lo());
+ // do not take into account the lines with attributes as part of the skipped range
+ let attrs_end = attrs
+ .iter()
+ .map(|attr| self.parse_sess.line_of_byte_pos(attr.span.hi()))
+ .max()
+ .unwrap_or(1);
+ let first_line = self.parse_sess.line_of_byte_pos(main_span.lo());
+ // Statement can start after some newlines and/or spaces
+ // or it can be on the same line as the last attribute.
+ // So here we need to take a minimum between the two.
+ let lo = std::cmp::min(attrs_end + 1, first_line);
+ self.push_rewrite_inner(item_span, None);
+ let hi = self.line_number + 1;
+ self.skipped_range.borrow_mut().push((lo, hi));
+ }
+
+ pub(crate) fn from_context(ctx: &'a RewriteContext<'_>) -> FmtVisitor<'a> {
+ let mut visitor = FmtVisitor::from_parse_sess(
+ ctx.parse_sess,
+ ctx.config,
+ ctx.snippet_provider,
+ ctx.report.clone(),
+ );
+ visitor.skip_context.update(ctx.skip_context.clone());
+ visitor.set_parent_context(ctx);
+ visitor
+ }
+
+ pub(crate) fn from_parse_sess(
+ parse_session: &'a ParseSess,
+ config: &'a Config,
+ snippet_provider: &'a SnippetProvider,
+ report: FormatReport,
+ ) -> FmtVisitor<'a> {
+ FmtVisitor {
+ parent_context: None,
+ parse_sess: parse_session,
+ buffer: String::with_capacity(snippet_provider.big_snippet.len() * 2),
+ last_pos: BytePos(0),
+ block_indent: Indent::empty(),
+ config,
+ is_if_else_block: false,
+ snippet_provider,
+ line_number: 0,
+ skipped_range: Rc::new(RefCell::new(vec![])),
+ is_macro_def: false,
+ macro_rewrite_failure: false,
+ report,
+ skip_context: Default::default(),
+ }
+ }
+
+ pub(crate) fn opt_snippet(&'b self, span: Span) -> Option<&'a str> {
+ self.snippet_provider.span_to_snippet(span)
+ }
+
+ pub(crate) fn snippet(&'b self, span: Span) -> &'a str {
+ self.opt_snippet(span).unwrap()
+ }
+
+ // Returns true if we should skip the following item.
+ pub(crate) fn visit_attrs(&mut self, attrs: &[ast::Attribute], style: ast::AttrStyle) -> bool {
+ for attr in attrs {
+ if attr.has_name(depr_skip_annotation()) {
+ let file_name = self.parse_sess.span_to_filename(attr.span);
+ self.report.append(
+ file_name,
+ vec![FormattingError::from_span(
+ attr.span,
+ self.parse_sess,
+ ErrorKind::DeprecatedAttr,
+ )],
+ );
+ } else {
+ match &attr.kind {
+ ast::AttrKind::Normal(ref attribute_item, _)
+ if self.is_unknown_rustfmt_attr(&attribute_item.path.segments) =>
+ {
+ let file_name = self.parse_sess.span_to_filename(attr.span);
+ self.report.append(
+ file_name,
+ vec![FormattingError::from_span(
+ attr.span,
+ self.parse_sess,
+ ErrorKind::BadAttr,
+ )],
+ );
+ }
+ _ => (),
+ }
+ }
+ }
+ if contains_skip(attrs) {
+ return true;
+ }
+
+ let attrs: Vec<_> = attrs.iter().filter(|a| a.style == style).cloned().collect();
+ if attrs.is_empty() {
+ return false;
+ }
+
+ let rewrite = attrs.rewrite(&self.get_context(), self.shape());
+ let span = mk_sp(attrs[0].span.lo(), attrs[attrs.len() - 1].span.hi());
+ self.push_rewrite(span, rewrite);
+
+ false
+ }
+
+ fn is_unknown_rustfmt_attr(&self, segments: &[ast::PathSegment]) -> bool {
+ if segments[0].ident.to_string() != "rustfmt" {
+ return false;
+ }
+ !is_skip_attr(segments)
+ }
+
+ fn walk_mod_items(&mut self, items: &[rustc_ast::ptr::P<ast::Item>]) {
+ self.visit_items_with_reordering(&ptr_vec_to_ref_vec(items));
+ }
+
+ fn walk_stmts(&mut self, stmts: &[Stmt<'_>], include_current_empty_semi: bool) {
+ if stmts.is_empty() {
+ return;
+ }
+
+ // Extract leading `use ...;`.
+ let items: Vec<_> = stmts
+ .iter()
+ .take_while(|stmt| stmt.to_item().map_or(false, is_use_item))
+ .filter_map(|stmt| stmt.to_item())
+ .collect();
+
+ if items.is_empty() {
+ self.visit_stmt(&stmts[0], include_current_empty_semi);
+
+ // FIXME(calebcartwright 2021-01-03) - This exists strictly to maintain legacy
+ // formatting where rustfmt would preserve redundant semicolons on Items in a
+ // statement position.
+ //
+ // Starting in rustc-ap-* v692 (~2020-12-01) the rustc parser now parses this as
+ // two separate statements (Item and Empty kinds), whereas before it was parsed as
+ // a single statement with the statement's span including the redundant semicolon.
+ //
+ // rustfmt typically tosses unnecessary/redundant semicolons, and eventually we
+ // should toss these as well, but doing so at this time would
+ // break the Stability Guarantee
+ // N.B. This could be updated to utilize the version gates.
+ let include_next_empty = if stmts.len() > 1 {
+ matches!(
+ (&stmts[0].as_ast_node().kind, &stmts[1].as_ast_node().kind),
+ (ast::StmtKind::Item(_), ast::StmtKind::Empty)
+ )
+ } else {
+ false
+ };
+
+ self.walk_stmts(&stmts[1..], include_next_empty);
+ } else {
+ self.visit_items_with_reordering(&items);
+ self.walk_stmts(&stmts[items.len()..], false);
+ }
+ }
+
+ fn walk_block_stmts(&mut self, b: &ast::Block) {
+ self.walk_stmts(&Stmt::from_ast_nodes(b.stmts.iter()), false)
+ }
+
+ fn format_mod(
+ &mut self,
+ mod_kind: &ast::ModKind,
+ unsafety: ast::Unsafe,
+ vis: &ast::Visibility,
+ s: Span,
+ ident: symbol::Ident,
+ attrs: &[ast::Attribute],
+ ) {
+ let vis_str = utils::format_visibility(&self.get_context(), vis);
+ self.push_str(&*vis_str);
+ self.push_str(format_unsafety(unsafety));
+ self.push_str("mod ");
+ // Calling `to_owned()` to work around borrow checker.
+ let ident_str = rewrite_ident(&self.get_context(), ident).to_owned();
+ self.push_str(&ident_str);
+
+ if let ast::ModKind::Loaded(ref items, ast::Inline::Yes, ref spans) = mod_kind {
+ let ast::ModSpans {
+ inner_span,
+ inject_use_span: _,
+ } = *spans;
+ match self.config.brace_style() {
+ BraceStyle::AlwaysNextLine => {
+ let indent_str = self.block_indent.to_string_with_newline(self.config);
+ self.push_str(&indent_str);
+ self.push_str("{");
+ }
+ _ => self.push_str(" {"),
+ }
+ // Hackery to account for the closing }.
+ let mod_lo = self.snippet_provider.span_after(source!(self, s), "{");
+ let body_snippet =
+ self.snippet(mk_sp(mod_lo, source!(self, inner_span).hi() - BytePos(1)));
+ let body_snippet = body_snippet.trim();
+ if body_snippet.is_empty() {
+ self.push_str("}");
+ } else {
+ self.last_pos = mod_lo;
+ self.block_indent = self.block_indent.block_indent(self.config);
+ self.visit_attrs(attrs, ast::AttrStyle::Inner);
+ self.walk_mod_items(items);
+ let missing_span = self.next_span(inner_span.hi() - BytePos(1));
+ self.close_block(missing_span, false);
+ }
+ self.last_pos = source!(self, inner_span).hi();
+ } else {
+ self.push_str(";");
+ self.last_pos = source!(self, s).hi();
+ }
+ }
+
+ pub(crate) fn format_separate_mod(&mut self, m: &Module<'_>, end_pos: BytePos) {
+ self.block_indent = Indent::empty();
+ let skipped = self.visit_attrs(m.attrs(), ast::AttrStyle::Inner);
+ assert!(
+ !skipped,
+ "Skipping module must be handled before reaching this line."
+ );
+ self.walk_mod_items(&m.items);
+ self.format_missing_with_indent(end_pos);
+ }
+
+ pub(crate) fn skip_empty_lines(&mut self, end_pos: BytePos) {
+ while let Some(pos) = self
+ .snippet_provider
+ .opt_span_after(self.next_span(end_pos), "\n")
+ {
+ if let Some(snippet) = self.opt_snippet(self.next_span(pos)) {
+ if snippet.trim().is_empty() {
+ self.last_pos = pos;
+ } else {
+ return;
+ }
+ }
+ }
+ }
+
+ pub(crate) fn with_context<F>(&mut self, f: F) -> Option<String>
+ where
+ F: Fn(&RewriteContext<'_>) -> Option<String>,
+ {
+ let context = self.get_context();
+ let result = f(&context);
+
+ self.macro_rewrite_failure |= context.macro_rewrite_failure.get();
+ result
+ }
+
+ pub(crate) fn get_context(&self) -> RewriteContext<'_> {
+ RewriteContext {
+ parse_sess: self.parse_sess,
+ config: self.config,
+ inside_macro: Rc::new(Cell::new(false)),
+ use_block: Cell::new(false),
+ is_if_else_block: Cell::new(false),
+ force_one_line_chain: Cell::new(false),
+ snippet_provider: self.snippet_provider,
+ macro_rewrite_failure: Cell::new(false),
+ is_macro_def: self.is_macro_def,
+ report: self.report.clone(),
+ skip_context: self.skip_context.clone(),
+ skipped_range: self.skipped_range.clone(),
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/cargo-fmt/main.rs b/src/tools/rustfmt/tests/cargo-fmt/main.rs
new file mode 100644
index 000000000..348876cd2
--- /dev/null
+++ b/src/tools/rustfmt/tests/cargo-fmt/main.rs
@@ -0,0 +1,98 @@
+// Integration tests for cargo-fmt.
+
+use std::env;
+use std::path::Path;
+use std::process::Command;
+
+/// Run the cargo-fmt executable and return its output.
+fn cargo_fmt(args: &[&str]) -> (String, String) {
+ let mut bin_dir = env::current_exe().unwrap();
+ bin_dir.pop(); // chop off test exe name
+ if bin_dir.ends_with("deps") {
+ bin_dir.pop();
+ }
+ let cmd = bin_dir.join(format!("cargo-fmt{}", env::consts::EXE_SUFFIX));
+
+ // Ensure cargo-fmt runs the rustfmt binary from the local target dir.
+ let path = env::var_os("PATH").unwrap_or_default();
+ let mut paths = env::split_paths(&path).collect::<Vec<_>>();
+ paths.insert(0, bin_dir);
+ let new_path = env::join_paths(paths).unwrap();
+
+ match Command::new(&cmd).args(args).env("PATH", new_path).output() {
+ Ok(output) => (
+ String::from_utf8(output.stdout).expect("utf-8"),
+ String::from_utf8(output.stderr).expect("utf-8"),
+ ),
+ Err(e) => panic!("failed to run `{:?} {:?}`: {}", cmd, args, e),
+ }
+}
+
+macro_rules! assert_that {
+ ($args:expr, $check:ident $check_args:tt) => {
+ let (stdout, stderr) = cargo_fmt($args);
+ if !stdout.$check$check_args {
+ panic!(
+ "Output not expected for cargo-fmt {:?}\n\
+ expected: {}{}\n\
+ actual stdout:\n{}\n\
+ actual stderr:\n{}",
+ $args,
+ stringify!($check),
+ stringify!($check_args),
+ stdout,
+ stderr
+ );
+ }
+ };
+}
+
+#[ignore]
+#[test]
+fn version() {
+ assert_that!(&["--version"], starts_with("rustfmt "));
+ assert_that!(&["--version"], starts_with("rustfmt "));
+ assert_that!(&["--", "-V"], starts_with("rustfmt "));
+ assert_that!(&["--", "--version"], starts_with("rustfmt "));
+}
+
+#[ignore]
+#[test]
+fn print_config() {
+ assert_that!(
+ &["--", "--print-config", "current", "."],
+ contains("max_width = ")
+ );
+}
+
+#[ignore]
+#[test]
+fn rustfmt_help() {
+ assert_that!(&["--", "--help"], contains("Format Rust code"));
+ assert_that!(&["--", "-h"], contains("Format Rust code"));
+ assert_that!(&["--", "--help=config"], contains("Configuration Options:"));
+}
+
+#[ignore]
+#[test]
+fn cargo_fmt_out_of_line_test_modules() {
+ // See also https://github.com/rust-lang/rustfmt/issues/5119
+ let expected_modified_files = [
+ "tests/mod-resolver/test-submodule-issue-5119/src/lib.rs",
+ "tests/mod-resolver/test-submodule-issue-5119/tests/test1.rs",
+ "tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub1.rs",
+ "tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub2.rs",
+ "tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub3/sub4.rs",
+ ];
+ let args = [
+ "-v",
+ "--check",
+ "--manifest-path",
+ "tests/mod-resolver/test-submodule-issue-5119/Cargo.toml",
+ ];
+ let (stdout, _) = cargo_fmt(&args);
+ for file in expected_modified_files {
+ let path = Path::new(file).canonicalize().unwrap();
+ assert!(stdout.contains(&format!("Diff in {}", path.display())))
+ }
+}
diff --git a/src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/Cargo.toml b/src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/Cargo.toml
new file mode 100644
index 000000000..315364a64
--- /dev/null
+++ b/src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/Cargo.toml
@@ -0,0 +1,13 @@
+[package]
+name = "cargo-fmt-test"
+version = "0.1.0"
+authors = ["calebcartwright"]
+edition = "2018"
+
+[dependencies]
+indexmap = "1.0.2"
+
+[workspace]
+members = [
+ "dependency-dir-name",
+] \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/dependency-dir-name/Cargo.toml b/src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/dependency-dir-name/Cargo.toml
new file mode 100644
index 000000000..4493882bf
--- /dev/null
+++ b/src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/dependency-dir-name/Cargo.toml
@@ -0,0 +1,10 @@
+[package]
+name = "dependency-crate-name"
+version = "0.1.0"
+authors = ["calebcartwright"]
+edition = "2018"
+
+[dependencies]
+subdep-crate-name = { path = "subdep-dir-name" }
+indexmap = "1.0.2"
+rusty-hook = "0.8.4" \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/dependency-dir-name/src/lib.rs b/src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/dependency-dir-name/src/lib.rs
new file mode 100644
index 000000000..e93b18d72
--- /dev/null
+++ b/src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/dependency-dir-name/src/lib.rs
@@ -0,0 +1,7 @@
+#[cfg(test)]
+mod tests {
+#[test]
+fn it_works() {
+ assert_eq!(2 + 2, 4);
+}
+}
diff --git a/src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/dependency-dir-name/subdep-dir-name/Cargo.toml b/src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/dependency-dir-name/subdep-dir-name/Cargo.toml
new file mode 100644
index 000000000..7dad09f40
--- /dev/null
+++ b/src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/dependency-dir-name/subdep-dir-name/Cargo.toml
@@ -0,0 +1,7 @@
+[package]
+name = "subdep-crate-name"
+version = "0.1.0"
+authors = ["calebcartwright"]
+edition = "2018"
+
+[dependencies]
diff --git a/src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/dependency-dir-name/subdep-dir-name/src/lib.rs b/src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/dependency-dir-name/subdep-dir-name/src/lib.rs
new file mode 100644
index 000000000..1c08c1c4f
--- /dev/null
+++ b/src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/dependency-dir-name/subdep-dir-name/src/lib.rs
@@ -0,0 +1,7 @@
+#[cfg(test)]
+mod tests {
+#[test]
+fn sub_test_that_works() {
+ assert_eq!(3 + 3, 6);
+}
+ }
diff --git a/src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/src/main.rs b/src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/src/main.rs
new file mode 100644
index 000000000..f5c339a8d
--- /dev/null
+++ b/src/tools/rustfmt/tests/cargo-fmt/source/divergent-crate-dir-names/src/main.rs
@@ -0,0 +1,3 @@
+fn main() {
+println!("Hello, world!");
+}
diff --git a/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/e/Cargo.toml b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/e/Cargo.toml
new file mode 100644
index 000000000..eaf1d76f9
--- /dev/null
+++ b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/e/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "e"
+version = "0.1.0"
+edition = "2018"
+[dependencies]
+c = { path = "../ws/c" }
+
+[workspace]
diff --git a/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/e/src/main.rs b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/e/src/main.rs
new file mode 100644
index 000000000..1c26a3895
--- /dev/null
+++ b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/e/src/main.rs
@@ -0,0 +1 @@
+struct E{ }
diff --git a/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/Cargo.toml b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/Cargo.toml
new file mode 100644
index 000000000..202739b61
--- /dev/null
+++ b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/Cargo.toml
@@ -0,0 +1,5 @@
+[workspace]
+members = [
+ "a",
+ "b"
+] \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/Cargo.toml b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/Cargo.toml
new file mode 100644
index 000000000..712a11344
--- /dev/null
+++ b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "a"
+version = "0.1.0"
+edition = "2018"
+[dependencies]
+d = { path = "./d" }
diff --git a/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/d/Cargo.toml b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/d/Cargo.toml
new file mode 100644
index 000000000..fb0f06fe5
--- /dev/null
+++ b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/d/Cargo.toml
@@ -0,0 +1,7 @@
+[package]
+name = "d"
+version = "0.1.0"
+edition = "2018"
+[dependencies]
+e = { path = "../../../e" }
+f = { path = "f" }
diff --git a/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/d/f/Cargo.toml b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/d/f/Cargo.toml
new file mode 100644
index 000000000..5c4fa5617
--- /dev/null
+++ b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/d/f/Cargo.toml
@@ -0,0 +1,4 @@
+[package]
+name = "f"
+version = "0.1.0"
+edition = "2018"
diff --git a/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/d/f/src/lib.rs b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/d/f/src/lib.rs
new file mode 100644
index 000000000..c655c4d5e
--- /dev/null
+++ b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/d/f/src/lib.rs
@@ -0,0 +1 @@
+struct F{ } \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/d/src/lib.rs b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/d/src/lib.rs
new file mode 100644
index 000000000..04e6e4cb9
--- /dev/null
+++ b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/d/src/lib.rs
@@ -0,0 +1 @@
+struct D{ } \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/src/main.rs b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/src/main.rs
new file mode 100644
index 000000000..04e6e4cb9
--- /dev/null
+++ b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/a/src/main.rs
@@ -0,0 +1 @@
+struct D{ } \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/b/Cargo.toml b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/b/Cargo.toml
new file mode 100644
index 000000000..47a24ff4f
--- /dev/null
+++ b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/b/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "b"
+version = "0.1.0"
+edition = "2018"
+[dependencies]
+c = { path = "../c" }
diff --git a/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/b/src/main.rs b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/b/src/main.rs
new file mode 100644
index 000000000..4833bbc69
--- /dev/null
+++ b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/b/src/main.rs
@@ -0,0 +1 @@
+struct B{ } \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/c/Cargo.toml b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/c/Cargo.toml
new file mode 100644
index 000000000..49fa6c395
--- /dev/null
+++ b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/c/Cargo.toml
@@ -0,0 +1,4 @@
+[package]
+name = "c"
+version = "0.1.0"
+edition = "2018"
diff --git a/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/c/src/lib.rs b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/c/src/lib.rs
new file mode 100644
index 000000000..1245ac91d
--- /dev/null
+++ b/src/tools/rustfmt/tests/cargo-fmt/source/workspaces/path-dep-above/ws/c/src/lib.rs
@@ -0,0 +1 @@
+struct C{ } \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/config/disable_all_formatting.toml b/src/tools/rustfmt/tests/config/disable_all_formatting.toml
new file mode 100644
index 000000000..c7ad93baf
--- /dev/null
+++ b/src/tools/rustfmt/tests/config/disable_all_formatting.toml
@@ -0,0 +1 @@
+disable_all_formatting = true
diff --git a/src/tools/rustfmt/tests/config/issue-1111.toml b/src/tools/rustfmt/tests/config/issue-1111.toml
new file mode 100644
index 000000000..44148a2d3
--- /dev/null
+++ b/src/tools/rustfmt/tests/config/issue-1111.toml
@@ -0,0 +1 @@
+reorder_imports = true
diff --git a/src/tools/rustfmt/tests/config/issue-2641.toml b/src/tools/rustfmt/tests/config/issue-2641.toml
new file mode 100644
index 000000000..11c9dca8a
--- /dev/null
+++ b/src/tools/rustfmt/tests/config/issue-2641.toml
@@ -0,0 +1 @@
+newline_style = "Windows"
diff --git a/src/tools/rustfmt/tests/config/issue-3779.toml b/src/tools/rustfmt/tests/config/issue-3779.toml
new file mode 100644
index 000000000..6ca52aee3
--- /dev/null
+++ b/src/tools/rustfmt/tests/config/issue-3779.toml
@@ -0,0 +1,3 @@
+ignore = [
+ "tests/**/issue-3779/ice.rs"
+]
diff --git a/src/tools/rustfmt/tests/config/skip_children.toml b/src/tools/rustfmt/tests/config/skip_children.toml
new file mode 100644
index 000000000..f52930d50
--- /dev/null
+++ b/src/tools/rustfmt/tests/config/skip_children.toml
@@ -0,0 +1 @@
+skip_children = true
diff --git a/src/tools/rustfmt/tests/config/small_tabs.toml b/src/tools/rustfmt/tests/config/small_tabs.toml
new file mode 100644
index 000000000..c3cfd3431
--- /dev/null
+++ b/src/tools/rustfmt/tests/config/small_tabs.toml
@@ -0,0 +1,10 @@
+max_width = 100
+comment_width = 80
+tab_spaces = 2
+newline_style = "Unix"
+brace_style = "SameLineWhere"
+fn_args_layout = "Tall"
+trailing_comma = "Vertical"
+indent_style = "Block"
+reorder_imports = false
+format_strings = true
diff --git a/src/tools/rustfmt/tests/coverage/source/comments.rs b/src/tools/rustfmt/tests/coverage/source/comments.rs
new file mode 100644
index 000000000..10940039e
--- /dev/null
+++ b/src/tools/rustfmt/tests/coverage/source/comments.rs
@@ -0,0 +1,7 @@
+// rustfmt-emit_mode: coverage
+/// Here's a doc comment!
+fn main() {
+ // foo is bar
+ let foo = "bar";
+ // loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong comment!!!!!
+}
diff --git a/src/tools/rustfmt/tests/coverage/target/comments.rs b/src/tools/rustfmt/tests/coverage/target/comments.rs
new file mode 100644
index 000000000..95e7b4705
--- /dev/null
+++ b/src/tools/rustfmt/tests/coverage/target/comments.rs
@@ -0,0 +1,7 @@
+XX XXXXXXXXXXXXXXXXXX XXXXXXXX
+/// Here's a doc comment!
+fn main() {
+ XX XXX XX XXX
+ let foo = "bar";
+ XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXX
+}
diff --git a/src/tools/rustfmt/tests/mod-resolver/issue-4874/bar/baz.rs b/src/tools/rustfmt/tests/mod-resolver/issue-4874/bar/baz.rs
new file mode 100644
index 000000000..d31b675ea
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/issue-4874/bar/baz.rs
@@ -0,0 +1,5 @@
+fn
+ fail_fmt_check
+ (
+
+ ) {} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/mod-resolver/issue-4874/foo.rs b/src/tools/rustfmt/tests/mod-resolver/issue-4874/foo.rs
new file mode 100644
index 000000000..246d84786
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/issue-4874/foo.rs
@@ -0,0 +1 @@
+mod qux; \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/mod-resolver/issue-4874/foo/qux.rs b/src/tools/rustfmt/tests/mod-resolver/issue-4874/foo/qux.rs
new file mode 100644
index 000000000..d8bb610a6
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/issue-4874/foo/qux.rs
@@ -0,0 +1,5 @@
+ fn
+ badly_formatted
+ (
+
+ ) {} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/mod-resolver/issue-4874/main.rs b/src/tools/rustfmt/tests/mod-resolver/issue-4874/main.rs
new file mode 100644
index 000000000..3609415b1
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/issue-4874/main.rs
@@ -0,0 +1,8 @@
+fn main() {
+ println!("Hello, world!");
+}
+
+mod foo;
+mod bar {
+ mod baz;
+} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/mod-resolver/issue-5063/foo.rs b/src/tools/rustfmt/tests/mod-resolver/issue-5063/foo.rs
new file mode 100644
index 000000000..d56974773
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/issue-5063/foo.rs
@@ -0,0 +1,2 @@
+mod bar {
+ mod baz;} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/mod-resolver/issue-5063/foo/bar/baz.rs b/src/tools/rustfmt/tests/mod-resolver/issue-5063/foo/bar/baz.rs
new file mode 100644
index 000000000..3519b0ee5
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/issue-5063/foo/bar/baz.rs
@@ -0,0 +1 @@
+fn baz() { } \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/mod-resolver/issue-5063/main.rs b/src/tools/rustfmt/tests/mod-resolver/issue-5063/main.rs
new file mode 100644
index 000000000..41c81c7bb
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/issue-5063/main.rs
@@ -0,0 +1,5 @@
+fn main() {
+ println!("Hello, world!");
+}
+
+mod foo; \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/mod-resolver/issue-5167/src/a.rs b/src/tools/rustfmt/tests/mod-resolver/issue-5167/src/a.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/issue-5167/src/a.rs
diff --git a/src/tools/rustfmt/tests/mod-resolver/issue-5167/src/a/mod.rs b/src/tools/rustfmt/tests/mod-resolver/issue-5167/src/a/mod.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/issue-5167/src/a/mod.rs
diff --git a/src/tools/rustfmt/tests/mod-resolver/issue-5167/src/lib.rs b/src/tools/rustfmt/tests/mod-resolver/issue-5167/src/lib.rs
new file mode 100644
index 000000000..f21af614d
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/issue-5167/src/lib.rs
@@ -0,0 +1 @@
+mod a;
diff --git a/src/tools/rustfmt/tests/mod-resolver/issue-5198/a.rs b/src/tools/rustfmt/tests/mod-resolver/issue-5198/a.rs
new file mode 100644
index 000000000..cd686f561
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/issue-5198/a.rs
@@ -0,0 +1 @@
+fn main( ) { println!("Hello World!") }
diff --git a/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib.rs b/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib.rs
new file mode 100644
index 000000000..696832913
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib.rs
@@ -0,0 +1,3 @@
+mod a;
+mod b;
+mod c;
diff --git a/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/b.rs b/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/b.rs
new file mode 100644
index 000000000..cd686f561
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/b.rs
@@ -0,0 +1 @@
+fn main( ) { println!("Hello World!") }
diff --git a/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/d.rs b/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/d.rs
new file mode 100644
index 000000000..d1604aa23
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/d.rs
@@ -0,0 +1,3 @@
+mod e;
+mod f;
+mod g;
diff --git a/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/d/explanation.txt b/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/d/explanation.txt
new file mode 100644
index 000000000..92c9e3021
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/d/explanation.txt
@@ -0,0 +1,16 @@
+This file is contained in the './lib/c/d/' directory.
+
+The directory name './lib/c/d/' conflicts with the './lib/c/d.rs' file name.
+
+'./lib/c/d.rs' defines 3 external modules:
+
+ * mod e;
+ * mod f;
+ * mod g;
+
+Module resolution will fail if we look for './lib/c/d/e.rs' or './lib/c/d/e/mod.rs',
+so we should fall back to looking for './lib/c/e.rs', which correctly finds the modlue, that
+rustfmt should format.
+
+'./lib/c/d/f.rs' and './lib/c/d/g/mod.rs' exist at the default submodule paths so we should be able
+to resolve these modules with no problems. \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/d/f.rs b/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/d/f.rs
new file mode 100644
index 000000000..cd686f561
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/d/f.rs
@@ -0,0 +1 @@
+fn main( ) { println!("Hello World!") }
diff --git a/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/d/g/mod.rs b/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/d/g/mod.rs
new file mode 100644
index 000000000..cd686f561
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/d/g/mod.rs
@@ -0,0 +1 @@
+fn main( ) { println!("Hello World!") }
diff --git a/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/e.rs b/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/e.rs
new file mode 100644
index 000000000..cd686f561
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/e.rs
@@ -0,0 +1 @@
+fn main( ) { println!("Hello World!") }
diff --git a/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/mod.rs b/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/mod.rs
new file mode 100644
index 000000000..819046196
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/c/mod.rs
@@ -0,0 +1,3 @@
+mod d;
+
+fn main( ) { println!("Hello World!") }
diff --git a/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/explanation.txt b/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/explanation.txt
new file mode 100644
index 000000000..d436a8076
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/issue-5198/lib/explanation.txt
@@ -0,0 +1,16 @@
+This file is contained in the './lib' directory.
+
+The directory name './lib' conflicts with the './lib.rs' file name.
+
+'lib.rs' defines 3 external modules:
+
+ * mod a;
+ * mod b;
+ * mod c;
+
+Module resolution will fail if we look for './lib/a.rs' or './lib/a/mod.rs',
+so we should fall back to looking for './a.rs', which correctly finds the modlue that
+rustfmt should format.
+
+'./lib/b.rs' and './lib/c/mod.rs' exist at the default submodule paths so we should be able
+to resolve these modules with no problems.
diff --git a/src/tools/rustfmt/tests/mod-resolver/module-not-found/bad_path_attribute/lib.rs b/src/tools/rustfmt/tests/mod-resolver/module-not-found/bad_path_attribute/lib.rs
new file mode 100644
index 000000000..2a63c961b
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/module-not-found/bad_path_attribute/lib.rs
@@ -0,0 +1,3 @@
+// module resolution fails because the path does not exist.
+#[path = "path/to/does_not_exist.rs"]
+mod a;
diff --git a/src/tools/rustfmt/tests/mod-resolver/module-not-found/relative_module/a.rs b/src/tools/rustfmt/tests/mod-resolver/module-not-found/relative_module/a.rs
new file mode 100644
index 000000000..4a1eac896
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/module-not-found/relative_module/a.rs
@@ -0,0 +1,2 @@
+// module resolution fails because `./a/b.rs` does not exist
+mod b;
diff --git a/src/tools/rustfmt/tests/mod-resolver/module-not-found/relative_module/lib.rs b/src/tools/rustfmt/tests/mod-resolver/module-not-found/relative_module/lib.rs
new file mode 100644
index 000000000..f21af614d
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/module-not-found/relative_module/lib.rs
@@ -0,0 +1 @@
+mod a;
diff --git a/src/tools/rustfmt/tests/mod-resolver/module-not-found/sibling_module/lib.rs b/src/tools/rustfmt/tests/mod-resolver/module-not-found/sibling_module/lib.rs
new file mode 100644
index 000000000..d9d9e1e3c
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/module-not-found/sibling_module/lib.rs
@@ -0,0 +1,2 @@
+// module resolution fails because `./a.rs` does not exist
+mod a;
diff --git a/src/tools/rustfmt/tests/mod-resolver/skip-files-issue-5065/foo.rs b/src/tools/rustfmt/tests/mod-resolver/skip-files-issue-5065/foo.rs
new file mode 100644
index 000000000..74889acf0
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/skip-files-issue-5065/foo.rs
@@ -0,0 +1,5 @@
+#![rustfmt::skip]
+
+mod bar {
+
+ mod baz;} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/mod-resolver/skip-files-issue-5065/foo/bar/baz.rs b/src/tools/rustfmt/tests/mod-resolver/skip-files-issue-5065/foo/bar/baz.rs
new file mode 100644
index 000000000..3519b0ee5
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/skip-files-issue-5065/foo/bar/baz.rs
@@ -0,0 +1 @@
+fn baz() { } \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/mod-resolver/skip-files-issue-5065/main.rs b/src/tools/rustfmt/tests/mod-resolver/skip-files-issue-5065/main.rs
new file mode 100644
index 000000000..3122e4f22
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/skip-files-issue-5065/main.rs
@@ -0,0 +1,9 @@
+#![rustfmt::skip]
+
+mod foo;
+mod one;
+
+fn main() {println!("Hello, world!");
+}
+
+// trailing commet
diff --git a/src/tools/rustfmt/tests/mod-resolver/skip-files-issue-5065/one.rs b/src/tools/rustfmt/tests/mod-resolver/skip-files-issue-5065/one.rs
new file mode 100644
index 000000000..e7eb2c2d6
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/skip-files-issue-5065/one.rs
@@ -0,0 +1 @@
+struct One { value: String } \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/Cargo.toml b/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/Cargo.toml
new file mode 100644
index 000000000..0993f1279
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "rustfmt-test-submodule-issue"
+version = "0.1.0"
+edition = "2018"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
diff --git a/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/src/lib.rs b/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/src/lib.rs
new file mode 100644
index 000000000..3f7ddba8a
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/src/lib.rs
@@ -0,0 +1,7 @@
+pub fn foo() -> i32 {
+3
+}
+
+pub fn bar() -> i32 {
+4
+}
diff --git a/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/tests/test1.rs b/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/tests/test1.rs
new file mode 100644
index 000000000..da4e86169
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/tests/test1.rs
@@ -0,0 +1,8 @@
+mod test1 {
+#[cfg(unix)]
+mod sub1;
+#[cfg(not(unix))]
+mod sub2;
+
+mod sub3;
+}
diff --git a/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub1.rs b/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub1.rs
new file mode 100644
index 000000000..b760ba23c
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub1.rs
@@ -0,0 +1,6 @@
+use rustfmt_test_submodule_issue::foo;
+
+#[test]
+fn test_foo() {
+assert_eq!(3, foo());
+}
diff --git a/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub2.rs b/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub2.rs
new file mode 100644
index 000000000..4fd8286ea
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub2.rs
@@ -0,0 +1,6 @@
+use rustfmt_test_submodule_issue::bar;
+
+#[test]
+fn test_bar() {
+assert_eq!(4, bar());
+}
diff --git a/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub3/mod.rs b/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub3/mod.rs
new file mode 100644
index 000000000..e029785bc
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub3/mod.rs
@@ -0,0 +1 @@
+mod sub4;
diff --git a/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub3/sub4.rs b/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub3/sub4.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rustfmt/tests/mod-resolver/test-submodule-issue-5119/tests/test1/sub3/sub4.rs
diff --git a/src/tools/rustfmt/tests/parser/issue-4126/invalid.rs b/src/tools/rustfmt/tests/parser/issue-4126/invalid.rs
new file mode 100644
index 000000000..7709c8484
--- /dev/null
+++ b/src/tools/rustfmt/tests/parser/issue-4126/invalid.rs
@@ -0,0 +1,6 @@
+fn foo() {
+ if bar && if !baz {
+ next_is_none = Some(true);
+ }
+ println!("foo");
+}
diff --git a/src/tools/rustfmt/tests/parser/issue-4126/lib.rs b/src/tools/rustfmt/tests/parser/issue-4126/lib.rs
new file mode 100644
index 000000000..aac63e355
--- /dev/null
+++ b/src/tools/rustfmt/tests/parser/issue-4126/lib.rs
@@ -0,0 +1 @@
+mod invalid;
diff --git a/src/tools/rustfmt/tests/parser/issue_4418.rs b/src/tools/rustfmt/tests/parser/issue_4418.rs
new file mode 100644
index 000000000..ff30235f0
--- /dev/null
+++ b/src/tools/rustfmt/tests/parser/issue_4418.rs
@@ -0,0 +1 @@
+} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/parser/unclosed-delims/issue_4466.rs b/src/tools/rustfmt/tests/parser/unclosed-delims/issue_4466.rs
new file mode 100644
index 000000000..2c2c81c91
--- /dev/null
+++ b/src/tools/rustfmt/tests/parser/unclosed-delims/issue_4466.rs
@@ -0,0 +1,11 @@
+fn main() {
+ if true {
+ println!("answer: {}", a_func();
+ } else {
+ println!("don't think so.");
+ }
+}
+
+fn a_func() -> i32 {
+ 42
+} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/rustfmt/main.rs b/src/tools/rustfmt/tests/rustfmt/main.rs
new file mode 100644
index 000000000..4c6d52726
--- /dev/null
+++ b/src/tools/rustfmt/tests/rustfmt/main.rs
@@ -0,0 +1,159 @@
+//! Integration tests for rustfmt.
+
+use std::env;
+use std::fs::remove_file;
+use std::path::Path;
+use std::process::Command;
+
+/// Run the rustfmt executable and return its output.
+fn rustfmt(args: &[&str]) -> (String, String) {
+ let mut bin_dir = env::current_exe().unwrap();
+ bin_dir.pop(); // chop off test exe name
+ if bin_dir.ends_with("deps") {
+ bin_dir.pop();
+ }
+ let cmd = bin_dir.join(format!("rustfmt{}", env::consts::EXE_SUFFIX));
+
+ // Ensure the rustfmt binary runs from the local target dir.
+ let path = env::var_os("PATH").unwrap_or_default();
+ let mut paths = env::split_paths(&path).collect::<Vec<_>>();
+ paths.insert(0, bin_dir);
+ let new_path = env::join_paths(paths).unwrap();
+
+ match Command::new(&cmd).args(args).env("PATH", new_path).output() {
+ Ok(output) => (
+ String::from_utf8(output.stdout).expect("utf-8"),
+ String::from_utf8(output.stderr).expect("utf-8"),
+ ),
+ Err(e) => panic!("failed to run `{:?} {:?}`: {}", cmd, args, e),
+ }
+}
+
+macro_rules! assert_that {
+ ($args:expr, $($check:ident $check_args:tt)&&+) => {
+ let (stdout, stderr) = rustfmt($args);
+ if $(!stdout.$check$check_args && !stderr.$check$check_args)||* {
+ panic!(
+ "Output not expected for rustfmt {:?}\n\
+ expected: {}\n\
+ actual stdout:\n{}\n\
+ actual stderr:\n{}",
+ $args,
+ stringify!($( $check$check_args )&&*),
+ stdout,
+ stderr
+ );
+ }
+ };
+}
+
+#[ignore]
+#[test]
+fn print_config() {
+ assert_that!(
+ &["--print-config", "unknown"],
+ starts_with("Unknown print-config option")
+ );
+ assert_that!(&["--print-config", "default"], contains("max_width = 100"));
+ assert_that!(&["--print-config", "minimal"], contains("PATH required"));
+ assert_that!(
+ &["--print-config", "minimal", "minimal-config"],
+ contains("doesn't work with standard input.")
+ );
+
+ let (stdout, stderr) = rustfmt(&[
+ "--print-config",
+ "minimal",
+ "minimal-config",
+ "src/shape.rs",
+ ]);
+ assert!(
+ Path::new("minimal-config").exists(),
+ "stdout:\n{}\nstderr:\n{}",
+ stdout,
+ stderr
+ );
+ remove_file("minimal-config").unwrap();
+}
+
+#[ignore]
+#[test]
+fn inline_config() {
+ // single invocation
+ assert_that!(
+ &[
+ "--print-config",
+ "current",
+ ".",
+ "--config=color=Never,edition=2018"
+ ],
+ contains("color = \"Never\"") && contains("edition = \"2018\"")
+ );
+
+ // multiple overriding invocations
+ assert_that!(
+ &[
+ "--print-config",
+ "current",
+ ".",
+ "--config",
+ "color=never,edition=2018",
+ "--config",
+ "color=always,format_strings=true"
+ ],
+ contains("color = \"Always\"")
+ && contains("edition = \"2018\"")
+ && contains("format_strings = true")
+ );
+}
+
+#[test]
+fn rustfmt_usage_text() {
+ let args = ["--help"];
+ let (stdout, _) = rustfmt(&args);
+ assert!(stdout.contains("Format Rust code\n\nusage: rustfmt [options] <file>..."));
+}
+
+#[test]
+fn mod_resolution_error_multiple_candidate_files() {
+ // See also https://github.com/rust-lang/rustfmt/issues/5167
+ let default_path = Path::new("tests/mod-resolver/issue-5167/src/a.rs");
+ let secondary_path = Path::new("tests/mod-resolver/issue-5167/src/a/mod.rs");
+ let error_message = format!(
+ "file for module found at both {:?} and {:?}",
+ default_path.canonicalize().unwrap(),
+ secondary_path.canonicalize().unwrap(),
+ );
+
+ let args = ["tests/mod-resolver/issue-5167/src/lib.rs"];
+ let (_stdout, stderr) = rustfmt(&args);
+ assert!(stderr.contains(&error_message))
+}
+
+#[test]
+fn mod_resolution_error_sibling_module_not_found() {
+ let args = ["tests/mod-resolver/module-not-found/sibling_module/lib.rs"];
+ let (_stdout, stderr) = rustfmt(&args);
+ // Module resolution fails because we're unable to find `a.rs` in the same directory as lib.rs
+ assert!(stderr.contains("a.rs does not exist"))
+}
+
+#[test]
+fn mod_resolution_error_relative_module_not_found() {
+ let args = ["tests/mod-resolver/module-not-found/relative_module/lib.rs"];
+ let (_stdout, stderr) = rustfmt(&args);
+ // The file `./a.rs` and directory `./a` both exist.
+ // Module resolution fails because we're unable to find `./a/b.rs`
+ #[cfg(not(windows))]
+ assert!(stderr.contains("a/b.rs does not exist"));
+ #[cfg(windows)]
+ assert!(stderr.contains("a\\b.rs does not exist"));
+}
+
+#[test]
+fn mod_resolution_error_path_attribute_does_not_exist() {
+ let args = ["tests/mod-resolver/module-not-found/bad_path_attribute/lib.rs"];
+ let (_stdout, stderr) = rustfmt(&args);
+ // The path attribute points to a file that does not exist
+ assert!(stderr.contains("does_not_exist.rs does not exist"));
+}
diff --git a/src/tools/rustfmt/tests/source/5131_crate.rs b/src/tools/rustfmt/tests/source/5131_crate.rs
new file mode 100644
index 000000000..96a316590
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/5131_crate.rs
@@ -0,0 +1,14 @@
+// rustfmt-imports_granularity: Crate
+
+use foo::a;
+use foo::a;
+use foo::b;
+use foo::b as b2;
+use foo::b::f;
+use foo::b::g;
+use foo::b::g as g2;
+use foo::c;
+use foo::d::e;
+use qux::h;
+use qux::h as h2;
+use qux::i;
diff --git a/src/tools/rustfmt/tests/source/5131_module.rs b/src/tools/rustfmt/tests/source/5131_module.rs
new file mode 100644
index 000000000..3e9139177
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/5131_module.rs
@@ -0,0 +1,33 @@
+// rustfmt-imports_granularity: Module
+
+#![allow(dead_code)]
+
+mod a {
+ pub mod b {
+ pub struct Data {
+ pub a: i32,
+ }
+ }
+
+ use crate::a::b::Data;
+ use crate::a::b::Data as Data2;
+
+ pub fn data(a: i32) -> Data {
+ Data { a }
+ }
+
+ pub fn data2(a: i32) -> Data2 {
+ Data2 { a }
+ }
+
+ #[cfg(test)]
+ mod tests {
+ use super::*;
+
+ #[test]
+ pub fn test() {
+ data(1);
+ data2(1);
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/5131_one.rs b/src/tools/rustfmt/tests/source/5131_one.rs
new file mode 100644
index 000000000..61ddf1341
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/5131_one.rs
@@ -0,0 +1,15 @@
+// rustfmt-imports_granularity: One
+
+pub use foo::x;
+pub use foo::x as x2;
+pub use foo::y;
+use bar::a;
+use bar::b;
+use bar::b::f;
+use bar::b::f as f2;
+use bar::b::g;
+use bar::c;
+use bar::d::e;
+use bar::d::e as e2;
+use qux::h;
+use qux::i;
diff --git a/src/tools/rustfmt/tests/source/alignment_2633/block_style.rs b/src/tools/rustfmt/tests/source/alignment_2633/block_style.rs
new file mode 100644
index 000000000..77fb2919e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/alignment_2633/block_style.rs
@@ -0,0 +1,8 @@
+// rustfmt-struct_field_align_threshold: 50
+
+fn func() {
+ Ok(ServerInformation { name: unwrap_message_string(items.get(0)),
+ vendor: unwrap_message_string(items.get(1)),
+ version: unwrap_message_string(items.get(2)),
+ spec_version: unwrap_message_string(items.get(3)), });
+}
diff --git a/src/tools/rustfmt/tests/source/alignment_2633/visual_style.rs b/src/tools/rustfmt/tests/source/alignment_2633/visual_style.rs
new file mode 100644
index 000000000..f34cc621e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/alignment_2633/visual_style.rs
@@ -0,0 +1,9 @@
+// rustfmt-struct_field_align_threshold: 50
+// rustfmt-indent_style: Visual
+
+fn func() {
+ Ok(ServerInformation { name: unwrap_message_string(items.get(0)),
+ vendor: unwrap_message_string(items.get(1)),
+ version: unwrap_message_string(items.get(2)),
+ spec_version: unwrap_message_string(items.get(3)), });
+}
diff --git a/src/tools/rustfmt/tests/source/array_comment.rs b/src/tools/rustfmt/tests/source/array_comment.rs
new file mode 100644
index 000000000..87372b279
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/array_comment.rs
@@ -0,0 +1,19 @@
+// Issue 2842
+// The comment should not make the last line shorter
+
+static XXX: [i8; 64] = [
+ 1, // Comment
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+];
+
+static XXX: [i8; 64] = [
+ 1,
+ // Comment
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+];
+
+static XXX: [i8; 64] = [
+ 1,
+ // Comment
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+];
diff --git a/src/tools/rustfmt/tests/source/assignment.rs b/src/tools/rustfmt/tests/source/assignment.rs
new file mode 100644
index 000000000..71de32556
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/assignment.rs
@@ -0,0 +1,34 @@
+// Test assignment
+
+fn main() {
+ let some_var : Type ;
+
+ let mut mutable;
+
+ let variable = AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA::BBBBBBBBBBBBBBBBBBBBBB::CCCCCCCCCCCCCCCCCCCCCC::EEEEEE;
+
+ variable = LOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOONG;
+
+ let single_line_fit =
+ DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD;
+
+ single_line_fit = 5;single_lit_fit >>= 10;
+
+
+ // #2791
+ let x = 2;;;;
+}
+
+fn break_meee() {
+ {
+ (block_start, block_size, margin_block_start, margin_block_end) = match (block_start,
+ block_end,
+ block_size) {
+ x => 1,
+ _ => 2,
+ };
+ }
+}
+
+// #2018
+pub const EXPLAIN_UNSIZED_TUPLE_COERCION: &'static str = "Unsized tuple coercion is not stable enough for use and is subject to change";
diff --git a/src/tools/rustfmt/tests/source/associated-types-bounds-wrapping.rs b/src/tools/rustfmt/tests/source/associated-types-bounds-wrapping.rs
new file mode 100644
index 000000000..464f428c7
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/associated-types-bounds-wrapping.rs
@@ -0,0 +1,5 @@
+// Test proper wrapping of long associated type bounds
+
+pub trait HttpService {
+ type WsService: 'static + Service<Request = WsCommand, Response = WsResponse, Error = ServerError>;
+}
diff --git a/src/tools/rustfmt/tests/source/associated_type_bounds.rs b/src/tools/rustfmt/tests/source/associated_type_bounds.rs
new file mode 100644
index 000000000..8572778a5
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/associated_type_bounds.rs
@@ -0,0 +1,13 @@
+// See #3657 - https://github.com/rust-lang/rustfmt/issues/3657
+
+#![feature(associated_type_bounds)]
+
+fn f<I: Iterator<Item: Clone>>() {}
+
+fn g<I: Iterator<Item : Clone>>() {}
+
+fn h<I: Iterator<Item : Clone>>() {}
+
+fn i<I: Iterator<Item:Clone>>() {}
+
+fn j<I: Iterator<Item : Clone+'a>>() {}
diff --git a/src/tools/rustfmt/tests/source/async_block.rs b/src/tools/rustfmt/tests/source/async_block.rs
new file mode 100644
index 000000000..18cb4fb5f
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/async_block.rs
@@ -0,0 +1,51 @@
+// rustfmt-edition: 2018
+
+fn main() {
+ let x = async {
+ Ok(())
+ };
+}
+
+fn baz() {
+ // test
+ let x = async {
+ // async blocks are great
+ Ok(())
+ };
+
+ let y = async {
+ Ok(())
+ }; // comment
+
+ spawn(
+ a,
+ async move {
+ action();
+ Ok(())
+ },
+ );
+
+ spawn(
+ a,
+ async move || {
+ action();
+ Ok(())
+ },
+ );
+
+ spawn(
+ a,
+ static async || {
+ action();
+ Ok(())
+ },
+ );
+
+ spawn(
+ a,
+ static async move || {
+ action();
+ Ok(())
+ },
+ );
+}
diff --git a/src/tools/rustfmt/tests/source/async_fn.rs b/src/tools/rustfmt/tests/source/async_fn.rs
new file mode 100644
index 000000000..c63cf5b0f
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/async_fn.rs
@@ -0,0 +1,28 @@
+// rustfmt-edition: 2018
+
+async fn bar() -> Result<(), ()> {
+ Ok(())
+}
+
+pub async fn baz() -> Result<(), ()> {
+ Ok(())
+}
+
+async unsafe fn foo() {
+ async move {
+ Ok(())
+ }
+}
+
+async unsafe fn rust() {
+ async move { // comment
+ Ok(())
+ }
+}
+
+async fn await_try() {
+ something
+ .await
+ ?
+ ;
+}
diff --git a/src/tools/rustfmt/tests/source/attrib.rs b/src/tools/rustfmt/tests/source/attrib.rs
new file mode 100644
index 000000000..d45fba552
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/attrib.rs
@@ -0,0 +1,234 @@
+// rustfmt-wrap_comments: true
+// Test attributes and doc comments are preserved.
+#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
+ html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
+ html_root_url = "https://doc.rust-lang.org/nightly/",
+ html_playground_url = "https://play.rust-lang.org/", test(attr(deny(warnings))))]
+
+//! Doc comment
+
+#![attribute]
+
+//! Crate doc comment
+
+// Comment
+
+// Comment on attribute
+#![the(attribute)]
+
+// Another comment
+
+/// Blah blah blah.
+/// Blah blah blah.
+/// Blah blah blah.
+/// Blah blah blah.
+
+/// Blah blah blah.
+impl Bar {
+ /// Blah blah blooo.
+ /// Blah blah blooo.
+ /// Blah blah blooo.
+ /// Blah blah blooo.
+ #[an_attribute]
+ #[doc = "an attribute that shouldn't be normalized to a doc comment"]
+ fn foo(&mut self) -> isize {
+ }
+
+ /// Blah blah bing.
+ /// Blah blah bing.
+ /// Blah blah bing.
+
+
+ /// Blah blah bing.
+ /// Blah blah bing.
+ /// Blah blah bing.
+ pub fn f2(self) {
+ (foo, bar)
+ }
+
+ #[another_attribute]
+ fn f3(self) -> Dog {
+ }
+
+ /// Blah blah bing.
+
+ #[attrib1]
+ /// Blah blah bing.
+ #[attrib2]
+ // Another comment that needs rewrite because it's tooooooooooooooooooooooooooooooo loooooooooooong.
+ /// Blah blah bing.
+ fn f4(self) -> Cat {
+ }
+
+ // We want spaces around `=`
+ #[cfg(feature="nightly")]
+ fn f5(self) -> Monkey {}
+}
+
+// #984
+struct Foo {
+ # [ derive ( Clone , PartialEq , Debug , Deserialize , Serialize ) ]
+ foo: usize,
+}
+
+// #1668
+
+/// Default path (*nix)
+#[cfg(all(unix, not(target_os = "macos"), not(target_os = "ios"), not(target_os = "android")))]
+fn foo() {
+ #[cfg(target_os = "freertos")]
+ match port_id {
+ 'a' | 'A' => GpioPort { port_address: GPIO_A },
+ 'b' | 'B' => GpioPort { port_address: GPIO_B },
+ _ => panic!(),
+ }
+
+ #[cfg_attr(not(target_os = "freertos"), allow(unused_variables))]
+ let x = 3;
+}
+
+// #1777
+#[test]
+#[should_panic(expected = "(")]
+#[should_panic(expected = /* ( */ "(")]
+#[should_panic(/* ((((( */expected /* ((((( */= /* ((((( */ "("/* ((((( */)]
+#[should_panic(
+ /* (((((((( *//*
+ (((((((((()(((((((( */
+ expected = "("
+ // ((((((((
+)]
+fn foo() {}
+
+// #1799
+fn issue_1799() {
+ #[allow(unreachable_code)] // https://github.com/rust-lang/rust/issues/43336
+ Some( Err(error) ) ;
+
+ #[allow(unreachable_code)]
+ // https://github.com/rust-lang/rust/issues/43336
+ Some( Err(error) ) ;
+}
+
+// Formatting inner attributes
+fn inner_attributes() {
+ #![ this_is_an_inner_attribute ( foo ) ]
+
+ foo();
+}
+
+impl InnerAttributes() {
+ #![ this_is_an_inner_attribute ( foo ) ]
+
+ fn foo() {}
+}
+
+mod InnerAttributes {
+ #![ this_is_an_inner_attribute ( foo ) ]
+}
+
+fn attributes_on_statements() {
+ // Local
+ # [ attr ( on ( local ) ) ]
+ let x = 3;
+
+ // Item
+ # [ attr ( on ( item ) ) ]
+ use foo;
+
+ // Expr
+ # [ attr ( on ( expr ) ) ]
+ {}
+
+ // Semi
+ # [ attr ( on ( semi ) ) ]
+ foo();
+
+ // Mac
+ # [ attr ( on ( mac ) ) ]
+ foo!();
+}
+
+// Large derives
+#[derive(Add, Sub, Mul, Div, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Debug, Hash, Serialize, Mul)]
+
+
+/// Foo bar baz
+
+
+#[derive(Add, Sub, Mul, Div, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Debug, Hash, Serialize, Deserialize)]
+pub struct HP(pub u8);
+
+// Long `#[doc = "..."]`
+struct A { #[doc = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"] b: i32 }
+
+// #2647
+#[cfg(feature = "this_line_is_101_characters_long_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx")]
+pub fn foo() {}
+
+// path attrs
+#[clippy::bar]
+#[clippy::bar(a, b, c)]
+pub fn foo() {}
+
+mod issue_2620 {
+ #[derive(Debug, StructOpt)]
+#[structopt(about = "Display information about the character on FF Logs")]
+pub struct Params {
+ #[structopt(help = "The server the character is on")]
+ server: String,
+ #[structopt(help = "The character's first name")]
+ first_name: String,
+ #[structopt(help = "The character's last name")]
+ last_name: String,
+ #[structopt(
+ short = "j",
+ long = "job",
+ help = "The job to look at",
+ parse(try_from_str)
+ )]
+ job: Option<Job>
+}
+}
+
+// #2969
+#[cfg(not(all(feature="std",
+ any(target_os = "linux", target_os = "android",
+ target_os = "netbsd",
+ target_os = "dragonfly",
+ target_os = "haiku",
+ target_os = "emscripten",
+ target_os = "solaris",
+ target_os = "cloudabi",
+ target_os = "macos", target_os = "ios",
+ target_os = "freebsd",
+ target_os = "openbsd",
+ target_os = "redox",
+ target_os = "fuchsia",
+ windows,
+ all(target_arch = "wasm32", feature = "stdweb"),
+ all(target_arch = "wasm32", feature = "wasm-bindgen"),
+ ))))]
+type Os = NoSource;
+
+// #3313
+fn stmt_expr_attributes() {
+ let foo ;
+ #[must_use]
+ foo = false ;
+}
+
+// #3509
+fn issue3509() {
+ match MyEnum {
+ MyEnum::Option1 if cfg!(target_os = "windows") =>
+ #[cfg(target_os = "windows")]{
+ 1
+ }
+ }
+ match MyEnum {
+ MyEnum::Option1 if cfg!(target_os = "windows") =>
+ #[cfg(target_os = "windows")]
+ 1,
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/big-impl-block.rs b/src/tools/rustfmt/tests/source/big-impl-block.rs
new file mode 100644
index 000000000..f71e6515c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/big-impl-block.rs
@@ -0,0 +1,123 @@
+// #1357
+impl<
+ 'a,
+ Select,
+ From,
+ Distinct,
+ Where,
+ Order,
+ Limit,
+ Offset,
+ Groupby,
+ DB,
+> InternalBoxedDsl<'a, DB>
+ for SelectStatement<
+ Select,
+ From,
+ Distinct,
+ Where,
+ Order,
+ Limit,
+ Offset,
+ GroupBy,
+ > where
+ DB: Backend,
+ Select: QueryFragment<DB> + SelectableExpression<From> + 'a,
+ Distinct: QueryFragment<DB> + 'a,
+ Where: Into<Option<Box<QueryFragment<DB> + 'a>>>,
+ Order: QueryFragment<DB> + 'a,
+ Limit: QueryFragment<DB> + 'a,
+ Offset: QueryFragment<DB> + 'a,
+{
+ type Output = BoxedSelectStatement<'a, Select::SqlTypeForSelect, From, DB>;
+
+ fn internal_into_boxed(self) -> Self::Output {
+ BoxedSelectStatement::new(
+ Box::new(self.select),
+ self.from,
+ Box::new(self.distinct),
+ self.where_clause.into(),
+ Box::new(self.order),
+ Box::new(self.limit),
+ Box::new(self.offset),
+ )
+ }
+}
+
+// #1369
+impl<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+> Foo for Bar {
+ fn foo() {}
+}
+impl Foo<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+> for Bar {
+ fn foo() {}
+}
+impl<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+> Foo<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+> for Bar {
+ fn foo() {}
+}
+impl<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+> Foo for Bar<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+> {
+ fn foo() {}
+}
+impl Foo<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+> for Bar<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+> {
+ fn foo() {}
+}
+impl<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+> Foo<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+> for Bar<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+> {
+ fn foo() {}
+}
+
+// #1689
+impl<M, S, F, X> SubSelectDirect<M, S, F, X>
+ where
+ M: select::Selector,
+ S: event::Stream,
+ F: for<'t> FnMut(transform::Api<
+ 't,
+ Stream<ContentStream<S>>,
+ >)
+ -> transform::Api<'t, X>,
+ X: event::Stream,
+{
+}
diff --git a/src/tools/rustfmt/tests/source/big-impl-visual.rs b/src/tools/rustfmt/tests/source/big-impl-visual.rs
new file mode 100644
index 000000000..7d906ac37
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/big-impl-visual.rs
@@ -0,0 +1,106 @@
+// rustfmt-indent_style: Visual
+
+// #1357
+impl<
+ 'a,
+ Select,
+ From,
+ Distinct,
+ Where,
+ Order,
+ Limit,
+ Offset,
+ Groupby,
+ DB,
+> InternalBoxedDsl<'a, DB>
+ for SelectStatement<
+ Select,
+ From,
+ Distinct,
+ Where,
+ Order,
+ Limit,
+ Offset,
+ GroupBy,
+ > where
+ DB: Backend,
+ Select: QueryFragment<DB> + SelectableExpression<From> + 'a,
+ Distinct: QueryFragment<DB> + 'a,
+ Where: Into<Option<Box<QueryFragment<DB> + 'a>>>,
+ Order: QueryFragment<DB> + 'a,
+ Limit: QueryFragment<DB> + 'a,
+ Offset: QueryFragment<DB> + 'a,
+{
+ type Output = BoxedSelectStatement<'a, Select::SqlTypeForSelect, From, DB>;
+
+ fn internal_into_boxed(self) -> Self::Output {
+ BoxedSelectStatement::new(
+ Box::new(self.select),
+ self.from,
+ Box::new(self.distinct),
+ self.where_clause.into(),
+ Box::new(self.order),
+ Box::new(self.limit),
+ Box::new(self.offset),
+ )
+ }
+}
+
+// #1369
+impl<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+> Foo for Bar {
+ fn foo() {}
+}
+impl Foo<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+> for Bar {
+ fn foo() {}
+}
+impl<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+> Foo<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+> for Bar {
+ fn foo() {}
+}
+impl<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+> Foo for Bar<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+> {
+ fn foo() {}
+}
+impl Foo<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+> for Bar<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+> {
+ fn foo() {}
+}
+impl<ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName> Foo<ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName>
+ for Bar<ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName> {
+ fn foo() {}
+}
diff --git a/src/tools/rustfmt/tests/source/binary-expr.rs b/src/tools/rustfmt/tests/source/binary-expr.rs
new file mode 100644
index 000000000..f7502931d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/binary-expr.rs
@@ -0,0 +1,10 @@
+// Binary expressions
+
+fn foo() {
+ // 100
+ let x = aaaaaaaaaa || bbbbbbbbbb || cccccccccc || dddddddddd && eeeeeeeeee || ffffffffff || ggg;
+ // 101
+ let x = aaaaaaaaaa || bbbbbbbbbb || cccccccccc || dddddddddd && eeeeeeeeee || ffffffffff || gggg;
+ // 104
+ let x = aaaaaaaaaa || bbbbbbbbbb || cccccccccc || dddddddddd && eeeeeeeeee || ffffffffff || gggggggg;
+}
diff --git a/src/tools/rustfmt/tests/source/binop-separator-back/bitwise.rs b/src/tools/rustfmt/tests/source/binop-separator-back/bitwise.rs
new file mode 100644
index 000000000..3804bf321
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/binop-separator-back/bitwise.rs
@@ -0,0 +1,14 @@
+// rustfmt-binop_separator: Back
+
+fn main() {
+ let value = abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ ^ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ;
+
+ let value = abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ & abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ;
+
+ let value = abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ | abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ;
+
+ let value = abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ << abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ;
+
+ let value = abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ >> abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ;
+
+}
diff --git a/src/tools/rustfmt/tests/source/binop-separator-back/comp.rs b/src/tools/rustfmt/tests/source/binop-separator-back/comp.rs
new file mode 100644
index 000000000..50a271274
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/binop-separator-back/comp.rs
@@ -0,0 +1,23 @@
+// rustfmt-binop_separator: Back
+
+fn main() {
+ if abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ < abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ {
+ //
+ }
+
+ if abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ <= abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ {
+ //
+ }
+
+ if abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ > abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ {
+ //
+ }
+
+ if abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ >= abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ {
+ //
+ }
+
+ if abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ == abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ {
+ //
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/binop-separator-back/logic.rs b/src/tools/rustfmt/tests/source/binop-separator-back/logic.rs
new file mode 100644
index 000000000..8c297e5a6
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/binop-separator-back/logic.rs
@@ -0,0 +1,7 @@
+// rustfmt-binop_separator: Back
+
+fn main() {
+ if abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ && abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ || abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ {
+ //
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/binop-separator-back/math.rs b/src/tools/rustfmt/tests/source/binop-separator-back/math.rs
new file mode 100644
index 000000000..3af4aad16
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/binop-separator-back/math.rs
@@ -0,0 +1,7 @@
+// rustfmt-binop_separator: Back
+
+fn main() {
+ let value = abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ + abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ + abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ + abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ + abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ + abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ + abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ;
+
+ let value = abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ + abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ * abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ - abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ / abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ + abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ * abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ * abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ / abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ / abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ + abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ;
+}
diff --git a/src/tools/rustfmt/tests/source/binop-separator-back/patterns.rs b/src/tools/rustfmt/tests/source/binop-separator-back/patterns.rs
new file mode 100644
index 000000000..a8c3b5cdd
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/binop-separator-back/patterns.rs
@@ -0,0 +1,9 @@
+// rustfmt-binop_separator: Back
+
+fn main() {
+ match val {
+ ThisIsA::ReallyLongPatternNameToHelpOverflowTheNextValueOntoTheNextLine | ThisIsA::SecondValueSeparatedByAPipe | ThisIsA::ThirdValueSeparatedByAPipe => {
+ //
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/binop-separator-back/range.rs b/src/tools/rustfmt/tests/source/binop-separator-back/range.rs
new file mode 100644
index 000000000..bdd3de992
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/binop-separator-back/range.rs
@@ -0,0 +1,7 @@
+// rustfmt-binop_separator: Back
+
+fn main() {
+ let value = abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ..abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ;
+
+ let value = abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ..=abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ;
+}
diff --git a/src/tools/rustfmt/tests/source/break-and-continue.rs b/src/tools/rustfmt/tests/source/break-and-continue.rs
new file mode 100644
index 000000000..c01d8a078
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/break-and-continue.rs
@@ -0,0 +1,23 @@
+// break and continue formatting
+
+#![feature(loop_break_value)]
+
+fn main() {
+ 'a: loop {
+ break 'a;
+ }
+
+ let mut done = false;
+ 'b: while !done {
+ done = true;
+ continue 'b;
+ }
+
+ let x = loop {
+ break 5;
+ };
+
+ let x = 'c: loop {
+ break 'c 5;
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/catch.rs b/src/tools/rustfmt/tests/source/catch.rs
new file mode 100644
index 000000000..541db1dc9
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/catch.rs
@@ -0,0 +1,28 @@
+// rustfmt-edition: 2018
+#![feature(try_blocks)]
+
+fn main() {
+ let x = try {
+ foo()?
+ };
+
+ let x = try /* Invisible comment */ { foo()? };
+
+ let x = try {
+ unsafe { foo()? }
+ };
+
+ let y = match (try {
+ foo()?
+ }) {
+ _ => (),
+ };
+
+ try {
+ foo()?;
+ };
+
+ try {
+ // Regular try block
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/arch/aarch64.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/arch/aarch64.rs
new file mode 100644
index 000000000..ebae2bd28
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/arch/aarch64.rs
@@ -0,0 +1,106 @@
+//! Aarch64 run-time features.
+
+/// Checks if `aarch64` feature is enabled.
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+#[allow_internal_unstable(stdsimd_internal,stdsimd)]
+macro_rules! is_aarch64_feature_detected {
+ ("neon") => {
+ // FIXME: this should be removed once we rename Aarch64 neon to asimd
+ cfg!(target_feature = "neon") ||
+ $crate::detect::check_for($crate::detect::Feature::asimd)
+ };
+ ("asimd") => {
+ cfg!(target_feature = "neon") ||
+ $crate::detect::check_for($crate::detect::Feature::asimd)
+ };
+ ("pmull") => {
+ cfg!(target_feature = "pmull") ||
+ $crate::detect::check_for($crate::detect::Feature::pmull)
+ };
+ ("fp") => {
+ cfg!(target_feature = "fp") ||
+ $crate::detect::check_for($crate::detect::Feature::fp)
+ };
+ ("fp16") => {
+ cfg!(target_feature = "fp16") ||
+ $crate::detect::check_for($crate::detect::Feature::fp16)
+ };
+ ("sve") => {
+ cfg!(target_feature = "sve") ||
+ $crate::detect::check_for($crate::detect::Feature::sve)
+ };
+ ("crc") => {
+ cfg!(target_feature = "crc") ||
+ $crate::detect::check_for($crate::detect::Feature::crc)
+ };
+ ("crypto") => {
+ cfg!(target_feature = "crypto") ||
+ $crate::detect::check_for($crate::detect::Feature::crypto)
+ };
+ ("lse") => {
+ cfg!(target_feature = "lse") ||
+ $crate::detect::check_for($crate::detect::Feature::lse)
+ };
+ ("rdm") => {
+ cfg!(target_feature = "rdm") ||
+ $crate::detect::check_for($crate::detect::Feature::rdm)
+ };
+ ("rcpc") => {
+ cfg!(target_feature = "rcpc") ||
+ $crate::detect::check_for($crate::detect::Feature::rcpc)
+ };
+ ("dotprod") => {
+ cfg!(target_feature = "dotprod") ||
+ $crate::detect::check_for($crate::detect::Feature::dotprod)
+ };
+ ("ras") => {
+ compile_error!("\"ras\" feature cannot be detected at run-time")
+ };
+ ("v8.1a") => {
+ compile_error!("\"v8.1a\" feature cannot be detected at run-time")
+ };
+ ("v8.2a") => {
+ compile_error!("\"v8.2a\" feature cannot be detected at run-time")
+ };
+ ("v8.3a") => {
+ compile_error!("\"v8.3a\" feature cannot be detected at run-time")
+ };
+ ($t:tt,) => {
+ is_aarch64_feature_detected!($t);
+ };
+ ($t:tt) => { compile_error!(concat!("unknown aarch64 target feature: ", $t)) };
+}
+
+/// ARM Aarch64 CPU Feature enum. Each variant denotes a position in a bitset
+/// for a particular feature.
+///
+/// PLEASE: do not use this, it is an implementation detail subject to change.
+#[doc(hidden)]
+#[allow(non_camel_case_types)]
+#[repr(u8)]
+#[unstable(feature = "stdsimd_internal", issue = "0")]
+pub enum Feature {
+ /// ARM Advanced SIMD (ASIMD)
+ asimd,
+ /// Polynomial Multiply
+ pmull,
+ /// Floating point support
+ fp,
+ /// Half-float support.
+ fp16,
+ /// Scalable Vector Extension (SVE)
+ sve,
+ /// CRC32 (Cyclic Redundancy Check)
+ crc,
+ /// Crypto: AES + PMULL + SHA1 + SHA2
+ crypto,
+ /// Atomics (Large System Extension)
+ lse,
+ /// Rounding Double Multiply (ASIMDRDM)
+ rdm,
+ /// Release consistent Processor consistent (RcPc)
+ rcpc,
+ /// Vector Dot-Product (ASIMDDP)
+ dotprod,
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/arch/arm.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/arch/arm.rs
new file mode 100644
index 000000000..b2626bf29
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/arch/arm.rs
@@ -0,0 +1,39 @@
+//! Run-time feature detection on ARM Aarch32.
+
+/// Checks if `arm` feature is enabled.
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+#[allow_internal_unstable(stdsimd_internal,stdsimd)]
+macro_rules! is_arm_feature_detected {
+ ("neon") => {
+ cfg!(target_feature = "neon") ||
+ $crate::detect::check_for($crate::detect::Feature::neon)
+ };
+ ("pmull") => {
+ cfg!(target_feature = "pmull") ||
+ $crate::detect::check_for($crate::detect::Feature::pmull)
+ };
+ ("v7") => { compile_error!("\"v7\" feature cannot be detected at run-time") };
+ ("vfp2") => { compile_error!("\"vfp2\" feature cannot be detected at run-time") };
+ ("vfp3") => { compile_error!("\"vfp3\" feature cannot be detected at run-time") };
+ ("vfp4") => { compile_error!("\"vfp4\" feature cannot be detected at run-time") };
+ ($t:tt,) => {
+ is_arm_feature_detected!($t);
+ };
+ ($t:tt) => { compile_error!(concat!("unknown arm target feature: ", $t)) };
+}
+
+/// ARM CPU Feature enum. Each variant denotes a position in a bitset for a
+/// particular feature.
+///
+/// PLEASE: do not use this, it is an implementation detail subject to change.
+#[doc(hidden)]
+#[allow(non_camel_case_types)]
+#[repr(u8)]
+#[unstable(feature = "stdsimd_internal", issue = "0")]
+pub enum Feature {
+ /// ARM Advanced SIMD (NEON) - Aarch32
+ neon,
+ /// Polynomial Multiply
+ pmull,
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/arch/mips.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/arch/mips.rs
new file mode 100644
index 000000000..f4381b811
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/arch/mips.rs
@@ -0,0 +1,29 @@
+//! Run-time feature detection on MIPS.
+
+/// Checks if `mips` feature is enabled.
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+#[allow_internal_unstable(stdsimd_internal,stdsimd)]
+macro_rules! is_mips_feature_detected {
+ ("msa") => {
+ cfg!(target_feature = "msa") ||
+ $crate::detect::check_for($crate::detect::Feature::msa)
+ };
+ ($t:tt,) => {
+ is_mips_feature_detected!($t);
+ };
+ ($t:tt) => { compile_error!(concat!("unknown mips target feature: ", $t)) };
+}
+
+/// MIPS CPU Feature enum. Each variant denotes a position in a bitset for a
+/// particular feature.
+///
+/// PLEASE: do not use this, it is an implementation detail subject to change.
+#[doc(hidden)]
+#[allow(non_camel_case_types)]
+#[repr(u8)]
+#[unstable(feature = "stdsimd_internal", issue = "0")]
+pub enum Feature {
+ /// MIPS SIMD Architecture (MSA)
+ msa,
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/arch/mips64.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/arch/mips64.rs
new file mode 100644
index 000000000..2663bc68b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/arch/mips64.rs
@@ -0,0 +1,29 @@
+//! Run-time feature detection on MIPS64.
+
+/// Checks if `mips64` feature is enabled.
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+#[allow_internal_unstable(stdsimd_internal,stdsimd)]
+macro_rules! is_mips64_feature_detected {
+ ("msa") => {
+ cfg!(target_feature = "msa") ||
+ $crate::detect::check_for($crate::detect::Feature::msa)
+ };
+ ($t:tt,) => {
+ is_mips64_feature_detected!($t);
+ };
+ ($t:tt) => { compile_error!(concat!("unknown mips64 target feature: ", $t)) };
+}
+
+/// MIPS64 CPU Feature enum. Each variant denotes a position in a bitset
+/// for a particular feature.
+///
+/// PLEASE: do not use this, it is an implementation detail subject to change.
+#[doc(hidden)]
+#[allow(non_camel_case_types)]
+#[repr(u8)]
+#[unstable(feature = "stdsimd_internal", issue = "0")]
+pub enum Feature {
+ /// MIPS SIMD Architecture (MSA)
+ msa,
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/arch/powerpc.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/arch/powerpc.rs
new file mode 100644
index 000000000..a342dc1aa
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/arch/powerpc.rs
@@ -0,0 +1,42 @@
+//! Run-time feature detection on PowerPC.
+
+/// Checks if `powerpc` feature is enabled.
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+#[allow_internal_unstable(stdsimd_internal,stdsimd)]
+macro_rules! is_powerpc_feature_detected {
+ ("altivec") => {
+ cfg!(target_feature = "altivec") ||
+ $crate::detect::check_for($crate::detect::Feature::altivec)
+ };
+ ("vsx") => {
+ cfg!(target_feature = "vsx") ||
+ $crate::detect::check_for($crate::detect::Feature::vsx)
+ };
+ ("power8") => {
+ cfg!(target_feature = "power8") ||
+ $crate::detect::check_for($crate::detect::Feature::power8)
+ };
+ ($t:tt,) => {
+ is_powerpc_feature_detected!($t);
+ };
+ ($t:tt) => { compile_error!(concat!("unknown powerpc target feature: ", $t)) };
+}
+
+
+/// PowerPC CPU Feature enum. Each variant denotes a position in a bitset
+/// for a particular feature.
+///
+/// PLEASE: do not use this, it is an implementation detail subject to change.
+#[doc(hidden)]
+#[allow(non_camel_case_types)]
+#[repr(u8)]
+#[unstable(feature = "stdsimd_internal", issue = "0")]
+pub enum Feature {
+ /// Altivec
+ altivec,
+ /// VSX
+ vsx,
+ /// Power8
+ power8,
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/arch/powerpc64.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/arch/powerpc64.rs
new file mode 100644
index 000000000..2e82c5692
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/arch/powerpc64.rs
@@ -0,0 +1,42 @@
+//! Run-time feature detection on PowerPC64.
+
+/// Checks if `powerpc64` feature is enabled.
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+#[allow_internal_unstable(stdsimd_internal,stdsimd)]
+macro_rules! is_powerpc64_feature_detected {
+ ("altivec") => {
+ cfg!(target_feature = "altivec") ||
+ $crate::detect::check_for($crate::detect::Feature::altivec)
+ };
+ ("vsx") => {
+ cfg!(target_feature = "vsx") ||
+ $crate::detect::check_for($crate::detect::Feature::vsx)
+ };
+ ("power8") => {
+ cfg!(target_feature = "power8") ||
+ $crate::detect::check_for($crate::detect::Feature::power8)
+ };
+ ($t:tt,) => {
+ is_powerpc64_feature_detected!($t);
+ };
+ ($t:tt) => { compile_error!(concat!("unknown powerpc64 target feature: ", $t)) };
+}
+
+
+/// PowerPC64 CPU Feature enum. Each variant denotes a position in a bitset
+/// for a particular feature.
+///
+/// PLEASE: do not use this, it is an implementation detail subject to change.
+#[doc(hidden)]
+#[allow(non_camel_case_types)]
+#[repr(u8)]
+#[unstable(feature = "stdsimd_internal", issue = "0")]
+pub enum Feature {
+ /// Altivec
+ altivec,
+ /// VSX
+ vsx,
+ /// Power8
+ power8,
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/arch/x86.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/arch/x86.rs
new file mode 100644
index 000000000..d26f4ee89
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/arch/x86.rs
@@ -0,0 +1,348 @@
+//! This module implements minimal run-time feature detection for x86.
+//!
+//! The features are detected using the `detect_features` function below.
+//! This function uses the CPUID instruction to read the feature flags from the
+//! CPU and encodes them in a `usize` where each bit position represents
+//! whether a feature is available (bit is set) or unavailable (bit is cleared).
+//!
+//! The enum `Feature` is used to map bit positions to feature names, and the
+//! the `__crate::detect::check_for!` macro is used to map string literals (e.g.,
+//! "avx") to these bit positions (e.g., `Feature::avx`).
+//!
+//! The run-time feature detection is performed by the
+//! `__crate::detect::check_for(Feature) -> bool` function. On its first call,
+//! this functions queries the CPU for the available features and stores them
+//! in a global `AtomicUsize` variable. The query is performed by just checking
+//! whether the feature bit in this global variable is set or cleared.
+
+/// A macro to test at *runtime* whether a CPU feature is available on
+/// x86/x86-64 platforms.
+///
+/// This macro is provided in the standard library and will detect at runtime
+/// whether the specified CPU feature is detected. This does **not** resolve at
+/// compile time unless the specified feature is already enabled for the entire
+/// crate. Runtime detection currently relies mostly on the `cpuid` instruction.
+///
+/// This macro only takes one argument which is a string literal of the feature
+/// being tested for. The feature names supported are the lowercase versions of
+/// the ones defined by Intel in [their documentation][docs].
+///
+/// ## Supported arguments
+///
+/// This macro supports the same names that `#[target_feature]` supports. Unlike
+/// `#[target_feature]`, however, this macro does not support names separated
+/// with a comma. Instead testing for multiple features must be done through
+/// separate macro invocations for now.
+///
+/// Supported arguments are:
+///
+/// * `"aes"`
+/// * `"pclmulqdq"`
+/// * `"rdrand"`
+/// * `"rdseed"`
+/// * `"tsc"`
+/// * `"mmx"`
+/// * `"sse"`
+/// * `"sse2"`
+/// * `"sse3"`
+/// * `"ssse3"`
+/// * `"sse4.1"`
+/// * `"sse4.2"`
+/// * `"sse4a"`
+/// * `"sha"`
+/// * `"avx"`
+/// * `"avx2"`
+/// * `"avx512f"`
+/// * `"avx512cd"`
+/// * `"avx512er"`
+/// * `"avx512pf"`
+/// * `"avx512bw"`
+/// * `"avx512dq"`
+/// * `"avx512vl"`
+/// * `"avx512ifma"`
+/// * `"avx512vbmi"`
+/// * `"avx512vpopcntdq"`
+/// * `"f16c"`
+/// * `"fma"`
+/// * `"bmi1"`
+/// * `"bmi2"`
+/// * `"abm"`
+/// * `"lzcnt"`
+/// * `"tbm"`
+/// * `"popcnt"`
+/// * `"fxsr"`
+/// * `"xsave"`
+/// * `"xsaveopt"`
+/// * `"xsaves"`
+/// * `"xsavec"`
+/// * `"adx"`
+/// * `"rtm"`
+///
+/// [docs]: https://software.intel.com/sites/landingpage/IntrinsicsGuide
+#[macro_export]
+#[stable(feature = "simd_x86", since = "1.27.0")]
+#[allow_internal_unstable(stdsimd_internal,stdsimd)]
+macro_rules! is_x86_feature_detected {
+ ("aes") => {
+ cfg!(target_feature = "aes") || $crate::detect::check_for(
+ $crate::detect::Feature::aes) };
+ ("pclmulqdq") => {
+ cfg!(target_feature = "pclmulqdq") || $crate::detect::check_for(
+ $crate::detect::Feature::pclmulqdq) };
+ ("rdrand") => {
+ cfg!(target_feature = "rdrand") || $crate::detect::check_for(
+ $crate::detect::Feature::rdrand) };
+ ("rdseed") => {
+ cfg!(target_feature = "rdseed") || $crate::detect::check_for(
+ $crate::detect::Feature::rdseed) };
+ ("tsc") => {
+ cfg!(target_feature = "tsc") || $crate::detect::check_for(
+ $crate::detect::Feature::tsc) };
+ ("mmx") => {
+ cfg!(target_feature = "mmx") || $crate::detect::check_for(
+ $crate::detect::Feature::mmx) };
+ ("sse") => {
+ cfg!(target_feature = "sse") || $crate::detect::check_for(
+ $crate::detect::Feature::sse) };
+ ("sse2") => {
+ cfg!(target_feature = "sse2") || $crate::detect::check_for(
+ $crate::detect::Feature::sse2)
+ };
+ ("sse3") => {
+ cfg!(target_feature = "sse3") || $crate::detect::check_for(
+ $crate::detect::Feature::sse3)
+ };
+ ("ssse3") => {
+ cfg!(target_feature = "ssse3") || $crate::detect::check_for(
+ $crate::detect::Feature::ssse3)
+ };
+ ("sse4.1") => {
+ cfg!(target_feature = "sse4.1") || $crate::detect::check_for(
+ $crate::detect::Feature::sse4_1)
+ };
+ ("sse4.2") => {
+ cfg!(target_feature = "sse4.2") || $crate::detect::check_for(
+ $crate::detect::Feature::sse4_2)
+ };
+ ("sse4a") => {
+ cfg!(target_feature = "sse4a") || $crate::detect::check_for(
+ $crate::detect::Feature::sse4a)
+ };
+ ("sha") => {
+ cfg!(target_feature = "sha") || $crate::detect::check_for(
+ $crate::detect::Feature::sha)
+ };
+ ("avx") => {
+ cfg!(target_feature = "avx") || $crate::detect::check_for(
+ $crate::detect::Feature::avx)
+ };
+ ("avx2") => {
+ cfg!(target_feature = "avx2") || $crate::detect::check_for(
+ $crate::detect::Feature::avx2)
+ };
+ ("avx512f") => {
+ cfg!(target_feature = "avx512f") || $crate::detect::check_for(
+ $crate::detect::Feature::avx512f)
+ };
+ ("avx512cd") => {
+ cfg!(target_feature = "avx512cd") || $crate::detect::check_for(
+ $crate::detect::Feature::avx512cd)
+ };
+ ("avx512er") => {
+ cfg!(target_feature = "avx512er") || $crate::detect::check_for(
+ $crate::detect::Feature::avx512er)
+ };
+ ("avx512pf") => {
+ cfg!(target_feature = "avx512pf") || $crate::detect::check_for(
+ $crate::detect::Feature::avx512pf)
+ };
+ ("avx512bw") => {
+ cfg!(target_feature = "avx512bw") || $crate::detect::check_for(
+ $crate::detect::Feature::avx512bw)
+ };
+ ("avx512dq") => {
+ cfg!(target_feature = "avx512dq") || $crate::detect::check_for(
+ $crate::detect::Feature::avx512dq)
+ };
+ ("avx512vl") => {
+ cfg!(target_Feature = "avx512vl") || $crate::detect::check_for(
+ $crate::detect::Feature::avx512vl)
+ };
+ ("avx512ifma") => {
+ cfg!(target_feature = "avx512ifma") || $crate::detect::check_for(
+ $crate::detect::Feature::avx512_ifma)
+ };
+ ("avx512vbmi") => {
+ cfg!(target_feature = "avx512vbmi") || $crate::detect::check_for(
+ $crate::detect::Feature::avx512_vbmi)
+ };
+ ("avx512vpopcntdq") => {
+ cfg!(target_feature = "avx512vpopcntdq") || $crate::detect::check_for(
+ $crate::detect::Feature::avx512_vpopcntdq)
+ };
+ ("f16c") => {
+ cfg!(target_feature = "f16c") || $crate::detect::check_for(
+ $crate::detect::Feature::f16c)
+ };
+ ("fma") => {
+ cfg!(target_feature = "fma") || $crate::detect::check_for(
+ $crate::detect::Feature::fma)
+ };
+ ("bmi1") => {
+ cfg!(target_feature = "bmi1") || $crate::detect::check_for(
+ $crate::detect::Feature::bmi)
+ };
+ ("bmi2") => {
+ cfg!(target_feature = "bmi2") || $crate::detect::check_for(
+ $crate::detect::Feature::bmi2)
+ };
+ ("abm") => {
+ cfg!(target_feature = "abm") || $crate::detect::check_for(
+ $crate::detect::Feature::abm)
+ };
+ ("lzcnt") => {
+ cfg!(target_feature = "lzcnt") || $crate::detect::check_for(
+ $crate::detect::Feature::abm)
+ };
+ ("tbm") => {
+ cfg!(target_feature = "tbm") || $crate::detect::check_for(
+ $crate::detect::Feature::tbm)
+ };
+ ("popcnt") => {
+ cfg!(target_feature = "popcnt") || $crate::detect::check_for(
+ $crate::detect::Feature::popcnt)
+ };
+ ("fxsr") => {
+ cfg!(target_feature = "fxsr") || $crate::detect::check_for(
+ $crate::detect::Feature::fxsr)
+ };
+ ("xsave") => {
+ cfg!(target_feature = "xsave") || $crate::detect::check_for(
+ $crate::detect::Feature::xsave)
+ };
+ ("xsaveopt") => {
+ cfg!(target_feature = "xsaveopt") || $crate::detect::check_for(
+ $crate::detect::Feature::xsaveopt)
+ };
+ ("xsaves") => {
+ cfg!(target_feature = "xsaves") || $crate::detect::check_for(
+ $crate::detect::Feature::xsaves)
+ };
+ ("xsavec") => {
+ cfg!(target_feature = "xsavec") || $crate::detect::check_for(
+ $crate::detect::Feature::xsavec)
+ };
+ ("cmpxchg16b") => {
+ cfg!(target_feature = "cmpxchg16b") || $crate::detect::check_for(
+ $crate::detect::Feature::cmpxchg16b)
+ };
+ ("adx") => {
+ cfg!(target_feature = "adx") || $crate::detect::check_for(
+ $crate::detect::Feature::adx)
+ };
+ ("rtm") => {
+ cfg!(target_feature = "rtm") || $crate::detect::check_for(
+ $crate::detect::Feature::rtm)
+ };
+ ($t:tt,) => {
+ is_x86_feature_detected!($t);
+ };
+ ($t:tt) => {
+ compile_error!(concat!("unknown target feature: ", $t))
+ };
+}
+
+/// X86 CPU Feature enum. Each variant denotes a position in a bitset for a
+/// particular feature.
+///
+/// This is an unstable implementation detail subject to change.
+#[allow(non_camel_case_types)]
+#[repr(u8)]
+#[doc(hidden)]
+#[unstable(feature = "stdsimd_internal", issue = "0")]
+pub enum Feature {
+ /// AES (Advanced Encryption Standard New Instructions AES-NI)
+ aes,
+ /// CLMUL (Carry-less Multiplication)
+ pclmulqdq,
+ /// RDRAND
+ rdrand,
+ /// RDSEED
+ rdseed,
+ /// TSC (Time Stamp Counter)
+ tsc,
+ /// MMX
+ mmx,
+ /// SSE (Streaming SIMD Extensions)
+ sse,
+ /// SSE2 (Streaming SIMD Extensions 2)
+ sse2,
+ /// SSE3 (Streaming SIMD Extensions 3)
+ sse3,
+ /// SSSE3 (Supplemental Streaming SIMD Extensions 3)
+ ssse3,
+ /// SSE4.1 (Streaming SIMD Extensions 4.1)
+ sse4_1,
+ /// SSE4.2 (Streaming SIMD Extensions 4.2)
+ sse4_2,
+ /// SSE4a (Streaming SIMD Extensions 4a)
+ sse4a,
+ /// SHA
+ sha,
+ /// AVX (Advanced Vector Extensions)
+ avx,
+ /// AVX2 (Advanced Vector Extensions 2)
+ avx2,
+ /// AVX-512 F (Foundation)
+ avx512f,
+ /// AVX-512 CD (Conflict Detection Instructions)
+ avx512cd,
+ /// AVX-512 ER (Exponential and Reciprocal Instructions)
+ avx512er,
+ /// AVX-512 PF (Prefetch Instructions)
+ avx512pf,
+ /// AVX-512 BW (Byte and Word Instructions)
+ avx512bw,
+ /// AVX-512 DQ (Doubleword and Quadword)
+ avx512dq,
+ /// AVX-512 VL (Vector Length Extensions)
+ avx512vl,
+ /// AVX-512 IFMA (Integer Fused Multiply Add)
+ avx512_ifma,
+ /// AVX-512 VBMI (Vector Byte Manipulation Instructions)
+ avx512_vbmi,
+ /// AVX-512 VPOPCNTDQ (Vector Population Count Doubleword and
+ /// Quadword)
+ avx512_vpopcntdq,
+ /// F16C (Conversions between IEEE-754 `binary16` and `binary32` formats)
+ f16c,
+ /// FMA (Fused Multiply Add)
+ fma,
+ /// BMI1 (Bit Manipulation Instructions 1)
+ bmi,
+ /// BMI1 (Bit Manipulation Instructions 2)
+ bmi2,
+ /// ABM (Advanced Bit Manipulation) on AMD / LZCNT (Leading Zero
+ /// Count) on Intel
+ abm,
+ /// TBM (Trailing Bit Manipulation)
+ tbm,
+ /// POPCNT (Population Count)
+ popcnt,
+ /// FXSR (Floating-point context fast save and restor)
+ fxsr,
+ /// XSAVE (Save Processor Extended States)
+ xsave,
+ /// XSAVEOPT (Save Processor Extended States Optimized)
+ xsaveopt,
+ /// XSAVES (Save Processor Extended States Supervisor)
+ xsaves,
+ /// XSAVEC (Save Processor Extended States Compacted)
+ xsavec,
+ /// CMPXCH16B, a 16-byte compare-and-swap instruction
+ cmpxchg16b,
+ /// ADX, Intel ADX (Multi-Precision Add-Carry Instruction Extensions)
+ adx,
+ /// RTM, Intel (Restricted Transactional Memory)
+ rtm,
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/bit.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/bit.rs
new file mode 100644
index 000000000..578f0b16b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/bit.rs
@@ -0,0 +1,9 @@
+//! Bit manipulation utilities.
+
+/// Tests the `bit` of `x`.
+#[allow(dead_code)]
+#[inline]
+pub(crate) fn test(x: usize, bit: u32) -> bool {
+ debug_assert!(bit < 32, "bit index out-of-bounds");
+ x & (1 << bit) != 0
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/cache.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/cache.rs
new file mode 100644
index 000000000..92bc4b58d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/cache.rs
@@ -0,0 +1,164 @@
+//! Caches run-time feature detection so that it only needs to be computed
+//! once.
+
+#![allow(dead_code)] // not used on all platforms
+
+use crate::sync::atomic::Ordering;
+
+#[cfg(target_pointer_width = "64")]
+use crate::sync::atomic::AtomicU64;
+
+#[cfg(target_pointer_width = "32")]
+use crate::sync::atomic::AtomicU32;
+
+/// Sets the `bit` of `x`.
+#[inline]
+const fn set_bit(x: u64, bit: u32) -> u64 {
+ x | 1 << bit
+}
+
+/// Tests the `bit` of `x`.
+#[inline]
+const fn test_bit(x: u64, bit: u32) -> bool {
+ x & (1 << bit) != 0
+}
+
+/// Maximum number of features that can be cached.
+const CACHE_CAPACITY: u32 = 63;
+
+/// This type is used to initialize the cache
+#[derive(Copy, Clone)]
+pub(crate) struct Initializer(u64);
+
+#[allow(clippy::use_self)]
+impl Default for Initializer {
+ fn default() -> Self {
+ Initializer(0)
+ }
+}
+
+impl Initializer {
+ /// Tests the `bit` of the cache.
+ #[allow(dead_code)]
+ #[inline]
+ pub(crate) fn test(self, bit: u32) -> bool {
+ // FIXME: this way of making sure that the cache is large enough is
+ // brittle.
+ debug_assert!(
+ bit < CACHE_CAPACITY,
+ "too many features, time to increase the cache size!"
+ );
+ test_bit(self.0, bit)
+ }
+
+ /// Sets the `bit` of the cache.
+ #[inline]
+ pub(crate) fn set(&mut self, bit: u32) {
+ // FIXME: this way of making sure that the cache is large enough is
+ // brittle.
+ debug_assert!(
+ bit < CACHE_CAPACITY,
+ "too many features, time to increase the cache size!"
+ );
+ let v = self.0;
+ self.0 = set_bit(v, bit);
+ }
+}
+
+/// This global variable is a cache of the features supported by the CPU.
+static CACHE: Cache = Cache::uninitialized();
+
+/// Feature cache with capacity for `CACHE_CAPACITY` features.
+///
+/// Note: the last feature bit is used to represent an
+/// uninitialized cache.
+#[cfg(target_pointer_width = "64")]
+struct Cache(AtomicU64);
+
+#[cfg(target_pointer_width = "64")]
+#[allow(clippy::use_self)]
+impl Cache {
+ /// Creates an uninitialized cache.
+ #[allow(clippy::declare_interior_mutable_const)]
+ const fn uninitialized() -> Self {
+ Cache(AtomicU64::new(u64::max_value()))
+ }
+ /// Is the cache uninitialized?
+ #[inline]
+ pub(crate) fn is_uninitialized(&self) -> bool {
+ self.0.load(Ordering::Relaxed) == u64::max_value()
+ }
+
+ /// Is the `bit` in the cache set?
+ #[inline]
+ pub(crate) fn test(&self, bit: u32) -> bool {
+ test_bit(CACHE.0.load(Ordering::Relaxed), bit)
+ }
+
+ /// Initializes the cache.
+ #[inline]
+ pub(crate) fn initialize(&self, value: Initializer) {
+ self.0.store(value.0, Ordering::Relaxed);
+ }
+}
+
+/// Feature cache with capacity for `CACHE_CAPACITY` features.
+///
+/// Note: the last feature bit is used to represent an
+/// uninitialized cache.
+#[cfg(target_pointer_width = "32")]
+struct Cache(AtomicU32, AtomicU32);
+
+#[cfg(target_pointer_width = "32")]
+impl Cache {
+ /// Creates an uninitialized cache.
+ const fn uninitialized() -> Self {
+ Cache(
+ AtomicU32::new(u32::max_value()),
+ AtomicU32::new(u32::max_value()),
+ )
+ }
+ /// Is the cache uninitialized?
+ #[inline]
+ pub(crate) fn is_uninitialized(&self) -> bool {
+ self.1.load(Ordering::Relaxed) == u32::max_value()
+ }
+
+ /// Is the `bit` in the cache set?
+ #[inline]
+ pub(crate) fn test(&self, bit: u32) -> bool {
+ if bit < 32 {
+ test_bit(CACHE.0.load(Ordering::Relaxed) as u64, bit)
+ } else {
+ test_bit(CACHE.1.load(Ordering::Relaxed) as u64, bit - 32)
+ }
+ }
+
+ /// Initializes the cache.
+ #[inline]
+ pub(crate) fn initialize(&self, value: Initializer) {
+ let lo: u32 = value.0 as u32;
+ let hi: u32 = (value.0 >> 32) as u32;
+ self.0.store(lo, Ordering::Relaxed);
+ self.1.store(hi, Ordering::Relaxed);
+ }
+}
+
+/// Tests the `bit` of the storage. If the storage has not been initialized,
+/// initializes it with the result of `f()`.
+///
+/// On its first invocation, it detects the CPU features and caches them in the
+/// `CACHE` global variable as an `AtomicU64`.
+///
+/// It uses the `Feature` variant to index into this variable as a bitset. If
+/// the bit is set, the feature is enabled, and otherwise it is disabled.
+#[inline]
+pub(crate) fn test<F>(bit: u32, f: F) -> bool
+where
+ F: FnOnce() -> Initializer,
+{
+ if CACHE.is_uninitialized() {
+ CACHE.initialize(f());
+ }
+ CACHE.test(bit)
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/error_macros.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/error_macros.rs
new file mode 100644
index 000000000..6769757ed
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/error_macros.rs
@@ -0,0 +1,150 @@
+//! The `is_{target_arch}_feature_detected!` macro are only available on their
+//! architecture. These macros provide a better error messages when the user
+//! attempts to call them in a different architecture.
+
+/// Prevents compilation if `is_x86_feature_detected` is used somewhere
+/// else than `x86` and `x86_64` targets.
+#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+macro_rules! is_x86_feature_detected {
+ ($t: tt) => {
+ compile_error!(
+ r#"
+ is_x86_feature_detected can only be used on x86 and x86_64 targets.
+ You can prevent it from being used in other architectures by
+ guarding it behind a cfg(target_arch) as follows:
+
+ #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] {
+ if is_x86_feature_detected(...) { ... }
+ }
+ "#
+ )
+ };
+}
+
+/// Prevents compilation if `is_arm_feature_detected` is used somewhere else
+/// than `ARM` targets.
+#[cfg(not(target_arch = "arm"))]
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+macro_rules! is_arm_feature_detected {
+ ($t:tt) => {
+ compile_error!(
+ r#"
+ is_arm_feature_detected can only be used on ARM targets.
+ You can prevent it from being used in other architectures by
+ guarding it behind a cfg(target_arch) as follows:
+
+ #[cfg(target_arch = "arm")] {
+ if is_arm_feature_detected(...) { ... }
+ }
+ "#
+ )
+ };
+}
+
+/// Prevents compilation if `is_aarch64_feature_detected` is used somewhere else
+/// than `aarch64` targets.
+#[cfg(not(target_arch = "aarch64"))]
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+macro_rules! is_aarch64_feature_detected {
+ ($t: tt) => {
+ compile_error!(
+ r#"
+ is_aarch64_feature_detected can only be used on AArch64 targets.
+ You can prevent it from being used in other architectures by
+ guarding it behind a cfg(target_arch) as follows:
+
+ #[cfg(target_arch = "aarch64")] {
+ if is_aarch64_feature_detected(...) { ... }
+ }
+ "#
+ )
+ };
+}
+
+/// Prevents compilation if `is_powerpc_feature_detected` is used somewhere else
+/// than `PowerPC` targets.
+#[cfg(not(target_arch = "powerpc"))]
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+macro_rules! is_powerpc_feature_detected {
+ ($t:tt) => {
+ compile_error!(
+ r#"
+is_powerpc_feature_detected can only be used on PowerPC targets.
+You can prevent it from being used in other architectures by
+guarding it behind a cfg(target_arch) as follows:
+
+ #[cfg(target_arch = "powerpc")] {
+ if is_powerpc_feature_detected(...) { ... }
+ }
+"#
+ )
+ };
+}
+
+/// Prevents compilation if `is_powerpc64_feature_detected` is used somewhere
+/// else than `PowerPC64` targets.
+#[cfg(not(target_arch = "powerpc64"))]
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+macro_rules! is_powerpc64_feature_detected {
+ ($t:tt) => {
+ compile_error!(
+ r#"
+is_powerpc64_feature_detected can only be used on PowerPC64 targets.
+You can prevent it from being used in other architectures by
+guarding it behind a cfg(target_arch) as follows:
+
+ #[cfg(target_arch = "powerpc64")] {
+ if is_powerpc64_feature_detected(...) { ... }
+ }
+"#
+ )
+ };
+}
+
+/// Prevents compilation if `is_mips_feature_detected` is used somewhere else
+/// than `MIPS` targets.
+#[cfg(not(target_arch = "mips"))]
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+macro_rules! is_mips_feature_detected {
+ ($t:tt) => {
+ compile_error!(
+ r#"
+ is_mips_feature_detected can only be used on MIPS targets.
+ You can prevent it from being used in other architectures by
+ guarding it behind a cfg(target_arch) as follows:
+
+ #[cfg(target_arch = "mips")] {
+ if is_mips_feature_detected(...) { ... }
+ }
+ "#
+ )
+ };
+}
+
+/// Prevents compilation if `is_mips64_feature_detected` is used somewhere else
+/// than `MIPS64` targets.
+#[cfg(not(target_arch = "mips64"))]
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+macro_rules! is_mips64_feature_detected {
+ ($t:tt) => {
+ compile_error!(
+ r#"
+ is_mips64_feature_detected can only be used on MIPS64 targets.
+ You can prevent it from being used in other architectures by
+ guarding it behind a cfg(target_arch) as follows:
+
+ #[cfg(target_arch = "mips64")] {
+ if is_mips64_feature_detected(...) { ... }
+ }
+ "#
+ )
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/mod.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/mod.rs
new file mode 100644
index 000000000..f446e88ee
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/mod.rs
@@ -0,0 +1,85 @@
+//! This module implements run-time feature detection.
+//!
+//! The `is_{arch}_feature_detected!("feature-name")` macros take the name of a
+//! feature as a string-literal, and return a boolean indicating whether the
+//! feature is enabled at run-time or not.
+//!
+//! These macros do two things:
+//! * map the string-literal into an integer stored as a `Feature` enum,
+//! * call a `os::check_for(x: Feature)` function that returns `true` if the
+//! feature is enabled.
+//!
+//! The `Feature` enums are also implemented in the `arch/{target_arch}.rs`
+//! modules.
+//!
+//! The `check_for` functions are, in general, Operating System dependent. Most
+//! architectures do not allow user-space programs to query the feature bits
+//! due to security concerns (x86 is the big exception). These functions are
+//! implemented in the `os/{target_os}.rs` modules.
+
+#[macro_use]
+mod error_macros;
+
+cfg_if! {
+ if #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] {
+ #[path = "arch/x86.rs"]
+ #[macro_use]
+ mod arch;
+ } else if #[cfg(target_arch = "arm")] {
+ #[path = "arch/arm.rs"]
+ #[macro_use]
+ mod arch;
+ } else if #[cfg(target_arch = "aarch64")] {
+ #[path = "arch/aarch64.rs"]
+ #[macro_use]
+ mod arch;
+ } else if #[cfg(target_arch = "powerpc")] {
+ #[path = "arch/powerpc.rs"]
+ #[macro_use]
+ mod arch;
+ } else if #[cfg(target_arch = "powerpc64")] {
+ #[path = "arch/powerpc64.rs"]
+ #[macro_use]
+ mod arch;
+ } else if #[cfg(target_arch = "mips")] {
+ #[path = "arch/mips.rs"]
+ #[macro_use]
+ mod arch;
+ } else if #[cfg(target_arch = "mips64")] {
+ #[path = "arch/mips64.rs"]
+ #[macro_use]
+ mod arch;
+ } else {
+ // Unimplemented architecture:
+ mod arch {
+ pub enum Feature {
+ Null
+ }
+ }
+ }
+}
+pub use self::arch::Feature;
+
+mod bit;
+mod cache;
+
+cfg_if! {
+ if #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] {
+ // On x86/x86_64 no OS specific functionality is required.
+ #[path = "os/x86.rs"]
+ mod os;
+ } else if #[cfg(all(target_os = "linux", feature = "use_std"))] {
+ #[path = "os/linux/mod.rs"]
+ mod os;
+ } else if #[cfg(target_os = "freebsd")] {
+ #[cfg(target_arch = "aarch64")]
+ #[path = "os/aarch64.rs"]
+ mod aarch64;
+ #[path = "os/freebsd/mod.rs"]
+ mod os;
+ } else {
+ #[path = "os/other.rs"]
+ mod os;
+ }
+}
+pub use self::os::check_for;
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/os/aarch64.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/os/aarch64.rs
new file mode 100644
index 000000000..dfb8c8770
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/os/aarch64.rs
@@ -0,0 +1,79 @@
+//! Run-time feature detection for Aarch64 on any OS that emulates the mrs instruction.
+//!
+//! On FreeBSD >= 12.0, Linux >= 4.11 and other operating systems, it is possible to use
+//! privileged system registers from userspace to check CPU feature support.
+//!
+//! AArch64 system registers ID_AA64ISAR0_EL1, ID_AA64PFR0_EL1, ID_AA64ISAR1_EL1
+//! have bits dedicated to features like AdvSIMD, CRC32, AES, atomics (LSE), etc.
+//! Each part of the register indicates the level of support for a certain feature, e.g.
+//! when ID_AA64ISAR0_EL1\[7:4\] is >= 1, AES is supported; when it's >= 2, PMULL is supported.
+//!
+//! For proper support of [SoCs where different cores have different capabilities](https://medium.com/@jadr2ddude/a-big-little-problem-a-tale-of-big-little-gone-wrong-e7778ce744bb),
+//! the OS has to always report only the features supported by all cores, like [FreeBSD does](https://reviews.freebsd.org/D17137#393947).
+//!
+//! References:
+//!
+//! - [Zircon implementation](https://fuchsia.googlesource.com/zircon/+/master/kernel/arch/arm64/feature.cpp)
+//! - [Linux documentation](https://www.kernel.org/doc/Documentation/arm64/cpu-feature-registers.txt)
+
+use crate::detect::{Feature, cache};
+
+/// Try to read the features from the system registers.
+///
+/// This will cause SIGILL if the current OS is not trapping the mrs instruction.
+pub(crate) fn detect_features() -> cache::Initializer {
+ let mut value = cache::Initializer::default();
+
+ {
+ let mut enable_feature = |f, enable| {
+ if enable {
+ value.set(f as u32);
+ }
+ };
+
+ // ID_AA64ISAR0_EL1 - Instruction Set Attribute Register 0
+ let aa64isar0: u64;
+ unsafe { asm!("mrs $0, ID_AA64ISAR0_EL1" : "=r"(aa64isar0)); }
+
+ let aes = bits_shift(aa64isar0, 7, 4) >= 1;
+ let pmull = bits_shift(aa64isar0, 7, 4) >= 2;
+ let sha1 = bits_shift(aa64isar0, 11, 8) >= 1;
+ let sha2 = bits_shift(aa64isar0, 15, 12) >= 1;
+ enable_feature(Feature::pmull, pmull);
+ // Crypto is specified as AES + PMULL + SHA1 + SHA2 per LLVM/hosts.cpp
+ enable_feature(Feature::crypto, aes && pmull && sha1 && sha2);
+ enable_feature(Feature::lse, bits_shift(aa64isar0, 23, 20) >= 1);
+ enable_feature(Feature::crc, bits_shift(aa64isar0, 19, 16) >= 1);
+
+ // ID_AA64PFR0_EL1 - Processor Feature Register 0
+ let aa64pfr0: u64;
+ unsafe { asm!("mrs $0, ID_AA64PFR0_EL1" : "=r"(aa64pfr0)); }
+
+ let fp = bits_shift(aa64pfr0, 19, 16) < 0xF;
+ let fphp = bits_shift(aa64pfr0, 19, 16) >= 1;
+ let asimd = bits_shift(aa64pfr0, 23, 20) < 0xF;
+ let asimdhp = bits_shift(aa64pfr0, 23, 20) >= 1;
+ enable_feature(Feature::fp, fp);
+ enable_feature(Feature::fp16, fphp);
+ // SIMD support requires float support - if half-floats are
+ // supported, it also requires half-float support:
+ enable_feature(Feature::asimd, fp && asimd && (!fphp | asimdhp));
+ // SIMD extensions require SIMD support:
+ enable_feature(Feature::rdm, asimd && bits_shift(aa64isar0, 31, 28) >= 1);
+ enable_feature(Feature::dotprod, asimd && bits_shift(aa64isar0, 47, 44) >= 1);
+ enable_feature(Feature::sve, asimd && bits_shift(aa64pfr0, 35, 32) >= 1);
+
+ // ID_AA64ISAR1_EL1 - Instruction Set Attribute Register 1
+ let aa64isar1: u64;
+ unsafe { asm!("mrs $0, ID_AA64ISAR1_EL1" : "=r"(aa64isar1)); }
+
+ enable_feature(Feature::rcpc, bits_shift(aa64isar1, 23, 20) >= 1);
+ }
+
+ value
+}
+
+#[inline]
+fn bits_shift(x: u64, high: usize, low: usize) -> u64 {
+ (x >> low) & ((1 << (high - low + 1)) - 1)
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/os/freebsd/aarch64.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/os/freebsd/aarch64.rs
new file mode 100644
index 000000000..910d2f33b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/os/freebsd/aarch64.rs
@@ -0,0 +1,28 @@
+//! Run-time feature detection for Aarch64 on FreeBSD.
+
+use crate::detect::{Feature, cache};
+use super::super::aarch64::detect_features;
+
+/// Performs run-time feature detection.
+#[inline]
+pub fn check_for(x: Feature) -> bool {
+ cache::test(x as u32, detect_features)
+}
+
+#[cfg(test)]
+mod tests {
+ #[test]
+ fn dump() {
+ println!("asimd: {:?}", is_aarch64_feature_detected!("asimd"));
+ println!("pmull: {:?}", is_aarch64_feature_detected!("pmull"));
+ println!("fp: {:?}", is_aarch64_feature_detected!("fp"));
+ println!("fp16: {:?}", is_aarch64_feature_detected!("fp16"));
+ println!("sve: {:?}", is_aarch64_feature_detected!("sve"));
+ println!("crc: {:?}", is_aarch64_feature_detected!("crc"));
+ println!("crypto: {:?}", is_aarch64_feature_detected!("crypto"));
+ println!("lse: {:?}", is_aarch64_feature_detected!("lse"));
+ println!("rdm: {:?}", is_aarch64_feature_detected!("rdm"));
+ println!("rcpc: {:?}", is_aarch64_feature_detected!("rcpc"));
+ println!("dotprod: {:?}", is_aarch64_feature_detected!("dotprod"));
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/os/freebsd/arm.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/os/freebsd/arm.rs
new file mode 100644
index 000000000..e13847dcb
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/os/freebsd/arm.rs
@@ -0,0 +1,27 @@
+//! Run-time feature detection for ARM on FreeBSD
+
+use crate::detect::{Feature, cache};
+use super::{auxvec};
+
+/// Performs run-time feature detection.
+#[inline]
+pub fn check_for(x: Feature) -> bool {
+ cache::test(x as u32, detect_features)
+}
+
+/// Try to read the features from the auxiliary vector
+fn detect_features() -> cache::Initializer {
+ let mut value = cache::Initializer::default();
+ let enable_feature = |value: &mut cache::Initializer, f, enable| {
+ if enable {
+ value.set(f as u32);
+ }
+ };
+
+ if let Ok(auxv) = auxvec::auxv() {
+ enable_feature(&mut value, Feature::neon, auxv.hwcap & 0x00001000 != 0);
+ enable_feature(&mut value, Feature::pmull, auxv.hwcap2 & 0x00000002 != 0);
+ return value;
+ }
+ value
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/os/freebsd/auxvec.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/os/freebsd/auxvec.rs
new file mode 100644
index 000000000..a2bac7676
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/os/freebsd/auxvec.rs
@@ -0,0 +1,86 @@
+//! Parses ELF auxiliary vectors.
+#![cfg_attr(any(target_arch = "arm", target_arch = "powerpc64"), allow(dead_code))]
+
+/// Key to access the CPU Hardware capabilities bitfield.
+pub(crate) const AT_HWCAP: usize = 25;
+/// Key to access the CPU Hardware capabilities 2 bitfield.
+pub(crate) const AT_HWCAP2: usize = 26;
+
+/// Cache HWCAP bitfields of the ELF Auxiliary Vector.
+///
+/// If an entry cannot be read all the bits in the bitfield are set to zero.
+/// This should be interpreted as all the features being disabled.
+#[derive(Debug, Copy, Clone)]
+pub(crate) struct AuxVec {
+ pub hwcap: usize,
+ pub hwcap2: usize,
+}
+
+/// ELF Auxiliary Vector
+///
+/// The auxiliary vector is a memory region in a running ELF program's stack
+/// composed of (key: usize, value: usize) pairs.
+///
+/// The keys used in the aux vector are platform dependent. For FreeBSD, they are
+/// defined in [sys/elf_common.h][elf_common_h]. The hardware capabilities of a given
+/// CPU can be queried with the `AT_HWCAP` and `AT_HWCAP2` keys.
+///
+/// Note that run-time feature detection is not invoked for features that can
+/// be detected at compile-time.
+///
+/// [elf_common.h]: https://svnweb.freebsd.org/base/release/12.0.0/sys/sys/elf_common.h?revision=341707
+pub(crate) fn auxv() -> Result<AuxVec, ()> {
+ if let Ok(hwcap) = archauxv(AT_HWCAP) {
+ if let Ok(hwcap2) = archauxv(AT_HWCAP2) {
+ if hwcap != 0 && hwcap2 != 0 {
+ return Ok(AuxVec { hwcap, hwcap2 });
+ }
+ }
+ }
+ Err(())
+}
+
+/// Tries to read the `key` from the auxiliary vector.
+fn archauxv(key: usize) -> Result<usize, ()> {
+ use crate::mem;
+
+ #[derive (Copy, Clone)]
+ #[repr(C)]
+ pub struct Elf_Auxinfo {
+ pub a_type: usize,
+ pub a_un: unnamed,
+ }
+ #[derive (Copy, Clone)]
+ #[repr(C)]
+ pub union unnamed {
+ pub a_val: libc::c_long,
+ pub a_ptr: *mut libc::c_void,
+ pub a_fcn: Option<unsafe extern "C" fn() -> ()>,
+ }
+
+ let mut auxv: [Elf_Auxinfo; 27] =
+ [Elf_Auxinfo{a_type: 0, a_un: unnamed{a_val: 0,},}; 27];
+
+ let mut len: libc::c_uint = mem::size_of_val(&auxv) as libc::c_uint;
+
+ unsafe {
+ let mut mib = [libc::CTL_KERN, libc::KERN_PROC, libc::KERN_PROC_AUXV, libc::getpid()];
+
+ let ret = libc::sysctl(mib.as_mut_ptr(),
+ mib.len() as u32,
+ &mut auxv as *mut _ as *mut _,
+ &mut len as *mut _ as *mut _,
+ 0 as *mut libc::c_void,
+ 0,
+ );
+
+ if ret != -1 {
+ for i in 0..auxv.len() {
+ if auxv[i].a_type == key {
+ return Ok(auxv[i].a_un.a_val as usize);
+ }
+ }
+ }
+ }
+ return Ok(0);
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/os/freebsd/mod.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/os/freebsd/mod.rs
new file mode 100644
index 000000000..1a5338a35
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/os/freebsd/mod.rs
@@ -0,0 +1,22 @@
+//! Run-time feature detection on FreeBSD
+
+mod auxvec;
+
+cfg_if! {
+ if #[cfg(target_arch = "aarch64")] {
+ mod aarch64;
+ pub use self::aarch64::check_for;
+ } else if #[cfg(target_arch = "arm")] {
+ mod arm;
+ pub use self::arm::check_for;
+ } else if #[cfg(target_arch = "powerpc64")] {
+ mod powerpc;
+ pub use self::powerpc::check_for;
+ } else {
+ use crate::arch::detect::Feature;
+ /// Performs run-time feature detection.
+ pub fn check_for(_x: Feature) -> bool {
+ false
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/os/freebsd/powerpc.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/os/freebsd/powerpc.rs
new file mode 100644
index 000000000..c7f761d4d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/os/freebsd/powerpc.rs
@@ -0,0 +1,27 @@
+//! Run-time feature detection for PowerPC on FreeBSD.
+
+use crate::detect::{Feature, cache};
+use super::{auxvec};
+
+/// Performs run-time feature detection.
+#[inline]
+pub fn check_for(x: Feature) -> bool {
+ cache::test(x as u32, detect_features)
+}
+
+fn detect_features() -> cache::Initializer {
+ let mut value = cache::Initializer::default();
+ let enable_feature = |value: &mut cache::Initializer, f, enable| {
+ if enable {
+ value.set(f as u32);
+ }
+ };
+
+ if let Ok(auxv) = auxvec::auxv() {
+ enable_feature(&mut value, Feature::altivec, auxv.hwcap & 0x10000000 != 0);
+ enable_feature(&mut value, Feature::vsx, auxv.hwcap & 0x00000080 != 0);
+ enable_feature(&mut value, Feature::power8, auxv.hwcap2 & 0x80000000 != 0);
+ return value;
+ }
+ value
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/aarch64.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/aarch64.rs
new file mode 100644
index 000000000..f7dc0f022
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/aarch64.rs
@@ -0,0 +1,157 @@
+//! Run-time feature detection for Aarch64 on Linux.
+
+use crate::detect::{Feature, cache, bit};
+use super::{auxvec, cpuinfo};
+
+/// Performs run-time feature detection.
+#[inline]
+pub fn check_for(x: Feature) -> bool {
+ cache::test(x as u32, detect_features)
+}
+
+/// Try to read the features from the auxiliary vector, and if that fails, try
+/// to read them from /proc/cpuinfo.
+fn detect_features() -> cache::Initializer {
+ if let Ok(auxv) = auxvec::auxv() {
+ let hwcap: AtHwcap = auxv.into();
+ return hwcap.cache();
+ }
+ if let Ok(c) = cpuinfo::CpuInfo::new() {
+ let hwcap: AtHwcap = c.into();
+ return hwcap.cache();
+ }
+ cache::Initializer::default()
+}
+
+/// These values are part of the platform-specific [asm/hwcap.h][hwcap] .
+///
+/// [hwcap]: https://github.com/torvalds/linux/blob/master/arch/arm64/include/uapi/asm/hwcap.h
+struct AtHwcap {
+ fp: bool, // 0
+ asimd: bool, // 1
+ // evtstrm: bool, // 2
+ aes: bool, // 3
+ pmull: bool, // 4
+ sha1: bool, // 5
+ sha2: bool, // 6
+ crc32: bool, // 7
+ atomics: bool, // 8
+ fphp: bool, // 9
+ asimdhp: bool, // 10
+ // cpuid: bool, // 11
+ asimdrdm: bool, // 12
+ // jscvt: bool, // 13
+ // fcma: bool, // 14
+ lrcpc: bool, // 15
+ // dcpop: bool, // 16
+ // sha3: bool, // 17
+ // sm3: bool, // 18
+ // sm4: bool, // 19
+ asimddp: bool, // 20
+ // sha512: bool, // 21
+ sve: bool, // 22
+}
+
+impl From<auxvec::AuxVec> for AtHwcap {
+ /// Reads AtHwcap from the auxiliary vector.
+ fn from(auxv: auxvec::AuxVec) -> Self {
+ AtHwcap {
+ fp: bit::test(auxv.hwcap, 0),
+ asimd: bit::test(auxv.hwcap, 1),
+ // evtstrm: bit::test(auxv.hwcap, 2),
+ aes: bit::test(auxv.hwcap, 3),
+ pmull: bit::test(auxv.hwcap, 4),
+ sha1: bit::test(auxv.hwcap, 5),
+ sha2: bit::test(auxv.hwcap, 6),
+ crc32: bit::test(auxv.hwcap, 7),
+ atomics: bit::test(auxv.hwcap, 8),
+ fphp: bit::test(auxv.hwcap, 9),
+ asimdhp: bit::test(auxv.hwcap, 10),
+ // cpuid: bit::test(auxv.hwcap, 11),
+ asimdrdm: bit::test(auxv.hwcap, 12),
+ // jscvt: bit::test(auxv.hwcap, 13),
+ // fcma: bit::test(auxv.hwcap, 14),
+ lrcpc: bit::test(auxv.hwcap, 15),
+ // dcpop: bit::test(auxv.hwcap, 16),
+ // sha3: bit::test(auxv.hwcap, 17),
+ // sm3: bit::test(auxv.hwcap, 18),
+ // sm4: bit::test(auxv.hwcap, 19),
+ asimddp: bit::test(auxv.hwcap, 20),
+ // sha512: bit::test(auxv.hwcap, 21),
+ sve: bit::test(auxv.hwcap, 22),
+ }
+ }
+}
+
+impl From<cpuinfo::CpuInfo> for AtHwcap {
+ /// Reads AtHwcap from /proc/cpuinfo .
+ fn from(c: cpuinfo::CpuInfo) -> Self {
+ let f = &c.field("Features");
+ AtHwcap {
+ // 64-bit names. FIXME: In 32-bit compatibility mode /proc/cpuinfo will
+ // map some of the 64-bit names to some 32-bit feature names. This does not
+ // cover that yet.
+ fp: f.has("fp"),
+ asimd: f.has("asimd"),
+ // evtstrm: f.has("evtstrm"),
+ aes: f.has("aes"),
+ pmull: f.has("pmull"),
+ sha1: f.has("sha1"),
+ sha2: f.has("sha2"),
+ crc32: f.has("crc32"),
+ atomics: f.has("atomics"),
+ fphp: f.has("fphp"),
+ asimdhp: f.has("asimdhp"),
+ // cpuid: f.has("cpuid"),
+ asimdrdm: f.has("asimdrdm"),
+ // jscvt: f.has("jscvt"),
+ // fcma: f.has("fcma"),
+ lrcpc: f.has("lrcpc"),
+ // dcpop: f.has("dcpop"),
+ // sha3: f.has("sha3"),
+ // sm3: f.has("sm3"),
+ // sm4: f.has("sm4"),
+ asimddp: f.has("asimddp"),
+ // sha512: f.has("sha512"),
+ sve: f.has("sve"),
+ }
+ }
+}
+
+impl AtHwcap {
+ /// Initializes the cache from the feature -bits.
+ ///
+ /// The features are enabled approximately like in LLVM host feature detection:
+ /// https://github.com/llvm-mirror/llvm/blob/master/lib/Support/Host.cpp#L1273
+ fn cache(self) -> cache::Initializer {
+ let mut value = cache::Initializer::default();
+ {
+ let mut enable_feature = |f, enable| {
+ if enable {
+ value.set(f as u32);
+ }
+ };
+
+ enable_feature(Feature::fp, self.fp);
+ // Half-float support requires float support
+ enable_feature(Feature::fp16, self.fp && self.fphp);
+ enable_feature(Feature::pmull, self.pmull);
+ enable_feature(Feature::crc, self.crc32);
+ enable_feature(Feature::lse, self.atomics);
+ enable_feature(Feature::rcpc, self.lrcpc);
+
+ // SIMD support requires float support - if half-floats are
+ // supported, it also requires half-float support:
+ let asimd = self.fp && self.asimd && (!self.fphp | self.asimdhp);
+ enable_feature(Feature::asimd, asimd);
+ // SIMD extensions require SIMD support:
+ enable_feature(Feature::rdm, self.asimdrdm && asimd);
+ enable_feature(Feature::dotprod, self.asimddp && asimd);
+ enable_feature(Feature::sve, self.sve && asimd);
+
+ // Crypto is specified as AES + PMULL + SHA1 + SHA2 per LLVM/hosts.cpp
+ enable_feature(Feature::crypto, self.aes && self.pmull && self.sha1 && self.sha2);
+ }
+ value
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/arm.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/arm.rs
new file mode 100644
index 000000000..0d58a847c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/arm.rs
@@ -0,0 +1,49 @@
+//! Run-time feature detection for ARM on Linux.
+
+use crate::detect::{Feature, cache, bit};
+use super::{auxvec, cpuinfo};
+
+/// Performs run-time feature detection.
+#[inline]
+pub fn check_for(x: Feature) -> bool {
+ cache::test(x as u32, detect_features)
+}
+
+/// Try to read the features from the auxiliary vector, and if that fails, try
+/// to read them from /proc/cpuinfo.
+fn detect_features() -> cache::Initializer {
+ let mut value = cache::Initializer::default();
+ let enable_feature = |value: &mut cache::Initializer, f, enable| {
+ if enable {
+ value.set(f as u32);
+ }
+ };
+
+ // The values are part of the platform-specific [asm/hwcap.h][hwcap]
+ //
+ // [hwcap]: https://github.com/torvalds/linux/blob/master/arch/arm64/include/uapi/asm/hwcap.h
+ if let Ok(auxv) = auxvec::auxv() {
+ enable_feature(&mut value, Feature::neon, bit::test(auxv.hwcap, 12));
+ enable_feature(&mut value, Feature::pmull, bit::test(auxv.hwcap2, 1));
+ return value;
+ }
+
+ if let Ok(c) = cpuinfo::CpuInfo::new() {
+ enable_feature(&mut value, Feature::neon, c.field("Features").has("neon") &&
+ !has_broken_neon(&c));
+ enable_feature(&mut value, Feature::pmull, c.field("Features").has("pmull"));
+ return value;
+ }
+ value
+}
+
+/// Is the CPU known to have a broken NEON unit?
+///
+/// See https://crbug.com/341598.
+fn has_broken_neon(cpuinfo: &cpuinfo::CpuInfo) -> bool {
+ cpuinfo.field("CPU implementer") == "0x51"
+ && cpuinfo.field("CPU architecture") == "7"
+ && cpuinfo.field("CPU variant") == "0x1"
+ && cpuinfo.field("CPU part") == "0x04d"
+ && cpuinfo.field("CPU revision") == "0"
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/auxvec.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/auxvec.rs
new file mode 100644
index 000000000..07b6432ea
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/auxvec.rs
@@ -0,0 +1,307 @@
+//! Parses ELF auxiliary vectors.
+#![cfg_attr(not(target_arch = "aarch64"), allow(dead_code))]
+
+#[cfg(feature = "std_detect_file_io")]
+use crate::{fs::File, io::Read};
+
+/// Key to access the CPU Hardware capabilities bitfield.
+pub(crate) const AT_HWCAP: usize = 16;
+/// Key to access the CPU Hardware capabilities 2 bitfield.
+#[cfg(any(target_arch = "arm", target_arch = "powerpc64"))]
+pub(crate) const AT_HWCAP2: usize = 26;
+
+/// Cache HWCAP bitfields of the ELF Auxiliary Vector.
+///
+/// If an entry cannot be read all the bits in the bitfield are set to zero.
+/// This should be interpreted as all the features being disabled.
+#[derive(Debug, Copy, Clone)]
+pub(crate) struct AuxVec {
+ pub hwcap: usize,
+ #[cfg(any(target_arch = "arm", target_arch = "powerpc64"))]
+ pub hwcap2: usize,
+}
+
+/// ELF Auxiliary Vector
+///
+/// The auxiliary vector is a memory region in a running ELF program's stack
+/// composed of (key: usize, value: usize) pairs.
+///
+/// The keys used in the aux vector are platform dependent. For Linux, they are
+/// defined in [linux/auxvec.h][auxvec_h]. The hardware capabilities of a given
+/// CPU can be queried with the `AT_HWCAP` and `AT_HWCAP2` keys.
+///
+/// There is no perfect way of reading the auxiliary vector.
+///
+/// - If the `std_detect_dlsym_getauxval` cargo feature is enabled, this will use
+/// `getauxval` if its linked to the binary, and otherwise proceed to a fallback implementation.
+/// When `std_detect_dlsym_getauxval` is disabled, this will assume that `getauxval` is
+/// linked to the binary - if that is not the case the behavior is undefined.
+/// - Otherwise, if the `std_detect_file_io` cargo feature is enabled, it will
+/// try to read `/proc/self/auxv`.
+/// - If that fails, this function returns an error.
+///
+/// Note that run-time feature detection is not invoked for features that can
+/// be detected at compile-time. Also note that if this function returns an
+/// error, cpuinfo still can (and will) be used to try to perform run-time
+/// feature detecton on some platforms.
+///
+/// For more information about when `getauxval` is available check the great
+/// [`auxv` crate documentation][auxv_docs].
+///
+/// [auxvec_h]: https://github.com/torvalds/linux/blob/master/include/uapi/linux/auxvec.h
+/// [auxv_docs]: https://docs.rs/auxv/0.3.3/auxv/
+pub(crate) fn auxv() -> Result<AuxVec, ()> {
+ #[cfg(feature = "std_detect_dlsym_getauxval")] {
+ // Try to call a dynamically-linked getauxval function.
+ if let Ok(hwcap) = getauxval(AT_HWCAP) {
+ // Targets with only AT_HWCAP:
+ #[cfg(any(target_arch = "aarch64", target_arch = "mips",
+ target_arch = "mips64"))]
+ {
+ if hwcap != 0 {
+ return Ok(AuxVec { hwcap });
+ }
+ }
+
+ // Targets with AT_HWCAP and AT_HWCAP2:
+ #[cfg(any(target_arch = "arm", target_arch = "powerpc64"))]
+ {
+ if let Ok(hwcap2) = getauxval(AT_HWCAP2) {
+ if hwcap != 0 && hwcap2 != 0 {
+ return Ok(AuxVec { hwcap, hwcap2 });
+ }
+ }
+ }
+ drop(hwcap);
+ }
+ #[cfg(feature = "std_detect_file_io")] {
+ // If calling getauxval fails, try to read the auxiliary vector from
+ // its file:
+ auxv_from_file("/proc/self/auxv")
+ }
+ #[cfg(not(feature = "std_detect_file_io"))] {
+ Err(())
+ }
+ }
+
+ #[cfg(not(feature = "std_detect_dlsym_getauxval"))] {
+ let hwcap = unsafe { ffi_getauxval(AT_HWCAP) };
+
+ // Targets with only AT_HWCAP:
+ #[cfg(any(target_arch = "aarch64", target_arch = "mips",
+ target_arch = "mips64"))]
+ {
+ if hwcap != 0 {
+ return Ok(AuxVec { hwcap });
+ }
+ }
+
+ // Targets with AT_HWCAP and AT_HWCAP2:
+ #[cfg(any(target_arch = "arm", target_arch = "powerpc64"))]
+ {
+ let hwcap2 = unsafe { ffi_getauxval(AT_HWCAP2) };
+ if hwcap != 0 && hwcap2 != 0 {
+ return Ok(AuxVec { hwcap, hwcap2 });
+ }
+ }
+ }
+}
+
+/// Tries to read the `key` from the auxiliary vector by calling the
+/// dynamically-linked `getauxval` function. If the function is not linked,
+/// this function return `Err`.
+#[cfg(feature = "std_detect_dlsym_getauxval")]
+fn getauxval(key: usize) -> Result<usize, ()> {
+ use libc;
+ pub type F = unsafe extern "C" fn(usize) -> usize;
+ unsafe {
+ let ptr = libc::dlsym(
+ libc::RTLD_DEFAULT,
+ "getauxval\0".as_ptr() as *const _,
+ );
+ if ptr.is_null() {
+ return Err(());
+ }
+
+ let ffi_getauxval: F = mem::transmute(ptr);
+ Ok(ffi_getauxval(key))
+ }
+}
+
+/// Tries to read the auxiliary vector from the `file`. If this fails, this
+/// function returns `Err`.
+#[cfg(feature = "std_detect_file_io")]
+fn auxv_from_file(file: &str) -> Result<AuxVec, ()> {
+ let mut file = File::open(file).map_err(|_| ())?;
+
+ // See <https://github.com/torvalds/linux/blob/v3.19/include/uapi/linux/auxvec.h>.
+ //
+ // The auxiliary vector contains at most 32 (key,value) fields: from
+ // `AT_EXECFN = 31` to `AT_NULL = 0`. That is, a buffer of
+ // 2*32 `usize` elements is enough to read the whole vector.
+ let mut buf = [0_usize; 64];
+ {
+ let raw: &mut [u8; 64 * mem::size_of::<usize>()] =
+ unsafe { mem::transmute(&mut buf) };
+ file.read(raw).map_err(|_| ())?;
+ }
+ auxv_from_buf(&buf)
+}
+
+/// Tries to interpret the `buffer` as an auxiliary vector. If that fails, this
+/// function returns `Err`.
+#[cfg(feature = "std_detect_file_io")]
+fn auxv_from_buf(buf: &[usize; 64]) -> Result<AuxVec, ()> {
+ // Targets with only AT_HWCAP:
+ #[cfg(any(target_arch = "aarch64", target_arch = "mips",
+ target_arch = "mips64"))]
+ {
+ for el in buf.chunks(2) {
+ match el[0] {
+ AT_HWCAP => return Ok(AuxVec { hwcap: el[1] }),
+ _ => (),
+ }
+ }
+ }
+ // Targets with AT_HWCAP and AT_HWCAP2:
+ #[cfg(any(target_arch = "arm", target_arch = "powerpc64"))]
+ {
+ let mut hwcap = None;
+ let mut hwcap2 = None;
+ for el in buf.chunks(2) {
+ match el[0] {
+ AT_HWCAP => hwcap = Some(el[1]),
+ AT_HWCAP2 => hwcap2 = Some(el[1]),
+ _ => (),
+ }
+ }
+
+ if let (Some(hwcap), Some(hwcap2)) = (hwcap, hwcap2) {
+ return Ok(AuxVec { hwcap, hwcap2 });
+ }
+ }
+ drop(buf);
+ Err(())
+}
+
+#[cfg(test)]
+mod tests {
+ extern crate auxv as auxv_crate;
+ use super::*;
+
+ // Reads the Auxiliary Vector key from /proc/self/auxv
+ // using the auxv crate.
+ #[cfg(feature = "std_detect_file_io")]
+ fn auxv_crate_getprocfs(key: usize) -> Option<usize> {
+ use self::auxv_crate::AuxvType;
+ use self::auxv_crate::procfs::search_procfs_auxv;
+ let k = key as AuxvType;
+ match search_procfs_auxv(&[k]) {
+ Ok(v) => Some(v[&k] as usize),
+ Err(_) => None,
+ }
+ }
+
+ // Reads the Auxiliary Vector key from getauxval()
+ // using the auxv crate.
+ #[cfg(not(any(target_arch = "mips", target_arch = "mips64")))]
+ fn auxv_crate_getauxval(key: usize) -> Option<usize> {
+ use self::auxv_crate::AuxvType;
+ use self::auxv_crate::getauxval::Getauxval;
+ let q = auxv_crate::getauxval::NativeGetauxval {};
+ match q.getauxval(key as AuxvType) {
+ Ok(v) => Some(v as usize),
+ Err(_) => None,
+ }
+ }
+
+ // FIXME: on mips/mips64 getauxval returns 0, and /proc/self/auxv
+ // does not always contain the AT_HWCAP key under qemu.
+ #[cfg(not(any(target_arch = "mips", target_arch = "mips64", target_arch = "powerpc")))]
+ #[test]
+ fn auxv_crate() {
+ let v = auxv();
+ if let Some(hwcap) = auxv_crate_getauxval(AT_HWCAP) {
+ let rt_hwcap = v.expect("failed to find hwcap key").hwcap;
+ assert_eq!(rt_hwcap, hwcap);
+ }
+
+ // Targets with AT_HWCAP and AT_HWCAP2:
+ #[cfg(any(target_arch = "arm", target_arch = "powerpc64"))]
+ {
+ if let Some(hwcap2) = auxv_crate_getauxval(AT_HWCAP2) {
+ let rt_hwcap2 = v.expect("failed to find hwcap2 key").hwcap2;
+ assert_eq!(rt_hwcap2, hwcap2);
+ }
+ }
+ }
+
+ #[test]
+ fn auxv_dump() {
+ if let Ok(auxvec) = auxv() {
+ println!("{:?}", auxvec);
+ } else {
+ println!("both getauxval() and reading /proc/self/auxv failed!");
+ }
+ }
+
+ #[cfg(feature = "std_detect_file_io")]
+ cfg_if! {
+ if #[cfg(target_arch = "arm")] {
+ #[test]
+ fn linux_rpi3() {
+ let file = concat!(env!("CARGO_MANIFEST_DIR"), "/src/detect/test_data/linux-rpi3.auxv");
+ println!("file: {}", file);
+ let v = auxv_from_file(file).unwrap();
+ assert_eq!(v.hwcap, 4174038);
+ assert_eq!(v.hwcap2, 16);
+ }
+
+ #[test]
+ #[should_panic]
+ fn linux_macos_vb() {
+ let file = concat!(env!("CARGO_MANIFEST_DIR"), "/src/detect/test_data/macos-virtualbox-linux-x86-4850HQ.auxv");
+ println!("file: {}", file);
+ let v = auxv_from_file(file).unwrap();
+ // this file is incomplete (contains hwcap but not hwcap2), we
+ // want to fall back to /proc/cpuinfo in this case, so
+ // reading should fail. assert_eq!(v.hwcap, 126614527);
+ // assert_eq!(v.hwcap2, 0);
+ }
+ } else if #[cfg(target_arch = "aarch64")] {
+ #[test]
+ fn linux_x64() {
+ let file = concat!(env!("CARGO_MANIFEST_DIR"), "/src/detect/test_data/linux-x64-i7-6850k.auxv");
+ println!("file: {}", file);
+ let v = auxv_from_file(file).unwrap();
+ assert_eq!(v.hwcap, 3219913727);
+ }
+ }
+ }
+
+ #[test]
+ #[cfg(feature = "std_detect_file_io")]
+ fn auxv_dump_procfs() {
+ if let Ok(auxvec) = auxv_from_file("/proc/self/auxv") {
+ println!("{:?}", auxvec);
+ } else {
+ println!("reading /proc/self/auxv failed!");
+ }
+ }
+
+ #[test]
+ fn auxv_crate_procfs() {
+ let v = auxv();
+ if let Some(hwcap) = auxv_crate_getprocfs(AT_HWCAP) {
+ assert_eq!(v.unwrap().hwcap, hwcap);
+ }
+
+ // Targets with AT_HWCAP and AT_HWCAP2:
+ #[cfg(any(target_arch = "arm", target_arch = "powerpc64"))]
+ {
+ if let Some(hwcap2) = auxv_crate_getprocfs(AT_HWCAP2) {
+ assert_eq!(v.unwrap().hwcap2, hwcap2);
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/cpuinfo.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/cpuinfo.rs
new file mode 100644
index 000000000..b31685785
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/cpuinfo.rs
@@ -0,0 +1,301 @@
+//! Parses /proc/cpuinfo
+#![cfg_attr(not(target_arch = "arm"), allow(dead_code))]
+
+extern crate std;
+use self::std::{prelude::v1::*, fs::File, io, io::Read};
+
+/// cpuinfo
+pub(crate) struct CpuInfo {
+ raw: String,
+}
+
+impl CpuInfo {
+ /// Reads /proc/cpuinfo into CpuInfo.
+ pub(crate) fn new() -> Result<Self, io::Error> {
+ let mut file = File::open("/proc/cpuinfo")?;
+ let mut cpui = Self { raw: String::new() };
+ file.read_to_string(&mut cpui.raw)?;
+ Ok(cpui)
+ }
+ /// Returns the value of the cpuinfo `field`.
+ pub(crate) fn field(&self, field: &str) -> CpuInfoField {
+ for l in self.raw.lines() {
+ if l.trim().starts_with(field) {
+ return CpuInfoField::new(l.split(": ").nth(1));
+ }
+ }
+ CpuInfoField(None)
+ }
+
+ /// Returns the `raw` contents of `/proc/cpuinfo`
+ #[cfg(test)]
+ fn raw(&self) -> &String {
+ &self.raw
+ }
+
+ #[cfg(test)]
+ fn from_str(other: &str) -> Result<Self, ::std::io::Error> {
+ Ok(Self {
+ raw: String::from(other),
+ })
+ }
+}
+
+/// Field of cpuinfo
+#[derive(Debug)]
+pub(crate) struct CpuInfoField<'a>(Option<&'a str>);
+
+impl<'a> PartialEq<&'a str> for CpuInfoField<'a> {
+ fn eq(&self, other: &&'a str) -> bool {
+ match self.0 {
+ None => other.is_empty(),
+ Some(f) => f == other.trim(),
+ }
+ }
+}
+
+impl<'a> CpuInfoField<'a> {
+ pub(crate) fn new<'b>(v: Option<&'b str>) -> CpuInfoField<'b> {
+ match v {
+ None => CpuInfoField::<'b>(None),
+ Some(f) => CpuInfoField::<'b>(Some(f.trim())),
+ }
+ }
+ /// Does the field exist?
+ #[cfg(test)]
+ pub(crate) fn exists(&self) -> bool {
+ self.0.is_some()
+ }
+ /// Does the field contain `other`?
+ pub(crate) fn has(&self, other: &str) -> bool {
+ match self.0 {
+ None => other.is_empty(),
+ Some(f) => {
+ let other = other.trim();
+ for v in f.split(' ') {
+ if v == other {
+ return true;
+ }
+ }
+ false
+ }
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn raw_dump() {
+ let cpuinfo = CpuInfo::new().unwrap();
+ if cpuinfo.field("vendor_id") == "GenuineIntel" {
+ assert!(cpuinfo.field("flags").exists());
+ assert!(!cpuinfo.field("vendor33_id").exists());
+ assert!(cpuinfo.field("flags").has("sse"));
+ assert!(!cpuinfo.field("flags").has("avx314"));
+ }
+ println!("{}", cpuinfo.raw());
+ }
+
+ const CORE_DUO_T6500: &str = r"processor : 0
+vendor_id : GenuineIntel
+cpu family : 6
+model : 23
+model name : Intel(R) Core(TM)2 Duo CPU T6500 @ 2.10GHz
+stepping : 10
+microcode : 0xa0b
+cpu MHz : 1600.000
+cache size : 2048 KB
+physical id : 0
+siblings : 2
+core id : 0
+cpu cores : 2
+apicid : 0
+initial apicid : 0
+fdiv_bug : no
+hlt_bug : no
+f00f_bug : no
+coma_bug : no
+fpu : yes
+fpu_exception : yes
+cpuid level : 13
+wp : yes
+flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe nx lm constant_tsc arch_perfmon pebs bts aperfmperf pni dtes64 monitor ds_cpl est tm2 ssse3 cx16 xtpr pdcm sse4_1 xsave lahf_lm dtherm
+bogomips : 4190.43
+clflush size : 64
+cache_alignment : 64
+address sizes : 36 bits physical, 48 bits virtual
+power management:
+";
+
+ #[test]
+ fn core_duo_t6500() {
+ let cpuinfo = CpuInfo::from_str(CORE_DUO_T6500).unwrap();
+ assert_eq!(cpuinfo.field("vendor_id"), "GenuineIntel");
+ assert_eq!(cpuinfo.field("cpu family"), "6");
+ assert_eq!(cpuinfo.field("model"), "23");
+ assert_eq!(
+ cpuinfo.field("model name"),
+ "Intel(R) Core(TM)2 Duo CPU T6500 @ 2.10GHz"
+ );
+ assert_eq!(
+ cpuinfo.field("flags"),
+ "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe nx lm constant_tsc arch_perfmon pebs bts aperfmperf pni dtes64 monitor ds_cpl est tm2 ssse3 cx16 xtpr pdcm sse4_1 xsave lahf_lm dtherm"
+ );
+ assert!(cpuinfo.field("flags").has("fpu"));
+ assert!(cpuinfo.field("flags").has("dtherm"));
+ assert!(cpuinfo.field("flags").has("sse2"));
+ assert!(!cpuinfo.field("flags").has("avx"));
+ }
+
+ const ARM_CORTEX_A53: &str =
+ r"Processor : AArch64 Processor rev 3 (aarch64)
+ processor : 0
+ processor : 1
+ processor : 2
+ processor : 3
+ processor : 4
+ processor : 5
+ processor : 6
+ processor : 7
+ Features : fp asimd evtstrm aes pmull sha1 sha2 crc32
+ CPU implementer : 0x41
+ CPU architecture: AArch64
+ CPU variant : 0x0
+ CPU part : 0xd03
+ CPU revision : 3
+
+ Hardware : HiKey Development Board
+ ";
+
+ #[test]
+ fn arm_cortex_a53() {
+ let cpuinfo = CpuInfo::from_str(ARM_CORTEX_A53).unwrap();
+ assert_eq!(
+ cpuinfo.field("Processor"),
+ "AArch64 Processor rev 3 (aarch64)"
+ );
+ assert_eq!(
+ cpuinfo.field("Features"),
+ "fp asimd evtstrm aes pmull sha1 sha2 crc32"
+ );
+ assert!(cpuinfo.field("Features").has("pmull"));
+ assert!(!cpuinfo.field("Features").has("neon"));
+ assert!(cpuinfo.field("Features").has("asimd"));
+ }
+
+ const ARM_CORTEX_A57: &str = r"Processor : Cortex A57 Processor rev 1 (aarch64)
+processor : 0
+processor : 1
+processor : 2
+processor : 3
+Features : fp asimd aes pmull sha1 sha2 crc32 wp half thumb fastmult vfp edsp neon vfpv3 tlsi vfpv4 idiva idivt
+CPU implementer : 0x41
+CPU architecture: 8
+CPU variant : 0x1
+CPU part : 0xd07
+CPU revision : 1";
+
+ #[test]
+ fn arm_cortex_a57() {
+ let cpuinfo = CpuInfo::from_str(ARM_CORTEX_A57).unwrap();
+ assert_eq!(
+ cpuinfo.field("Processor"),
+ "Cortex A57 Processor rev 1 (aarch64)"
+ );
+ assert_eq!(
+ cpuinfo.field("Features"),
+ "fp asimd aes pmull sha1 sha2 crc32 wp half thumb fastmult vfp edsp neon vfpv3 tlsi vfpv4 idiva idivt"
+ );
+ assert!(cpuinfo.field("Features").has("pmull"));
+ assert!(cpuinfo.field("Features").has("neon"));
+ assert!(cpuinfo.field("Features").has("asimd"));
+ }
+
+ const POWER8E_POWERKVM: &str = r"processor : 0
+cpu : POWER8E (raw), altivec supported
+clock : 3425.000000MHz
+revision : 2.1 (pvr 004b 0201)
+
+processor : 1
+cpu : POWER8E (raw), altivec supported
+clock : 3425.000000MHz
+revision : 2.1 (pvr 004b 0201)
+
+processor : 2
+cpu : POWER8E (raw), altivec supported
+clock : 3425.000000MHz
+revision : 2.1 (pvr 004b 0201)
+
+processor : 3
+cpu : POWER8E (raw), altivec supported
+clock : 3425.000000MHz
+revision : 2.1 (pvr 004b 0201)
+
+timebase : 512000000
+platform : pSeries
+model : IBM pSeries (emulated by qemu)
+machine : CHRP IBM pSeries (emulated by qemu)";
+
+ #[test]
+ fn power8_powerkvm() {
+ let cpuinfo = CpuInfo::from_str(POWER8E_POWERKVM).unwrap();
+ assert_eq!(cpuinfo.field("cpu"), "POWER8E (raw), altivec supported");
+
+ assert!(cpuinfo.field("cpu").has("altivec"));
+ }
+
+ const POWER5P: &str = r"processor : 0
+cpu : POWER5+ (gs)
+clock : 1900.098000MHz
+revision : 2.1 (pvr 003b 0201)
+
+processor : 1
+cpu : POWER5+ (gs)
+clock : 1900.098000MHz
+revision : 2.1 (pvr 003b 0201)
+
+processor : 2
+cpu : POWER5+ (gs)
+clock : 1900.098000MHz
+revision : 2.1 (pvr 003b 0201)
+
+processor : 3
+cpu : POWER5+ (gs)
+clock : 1900.098000MHz
+revision : 2.1 (pvr 003b 0201)
+
+processor : 4
+cpu : POWER5+ (gs)
+clock : 1900.098000MHz
+revision : 2.1 (pvr 003b 0201)
+
+processor : 5
+cpu : POWER5+ (gs)
+clock : 1900.098000MHz
+revision : 2.1 (pvr 003b 0201)
+
+processor : 6
+cpu : POWER5+ (gs)
+clock : 1900.098000MHz
+revision : 2.1 (pvr 003b 0201)
+
+processor : 7
+cpu : POWER5+ (gs)
+clock : 1900.098000MHz
+revision : 2.1 (pvr 003b 0201)
+
+timebase : 237331000
+platform : pSeries
+machine : CHRP IBM,9133-55A";
+
+ #[test]
+ fn power5p() {
+ let cpuinfo = CpuInfo::from_str(POWER5P).unwrap();
+ assert_eq!(cpuinfo.field("cpu"), "POWER5+ (gs)");
+
+ assert!(!cpuinfo.field("cpu").has("altivec"));
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/mips.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/mips.rs
new file mode 100644
index 000000000..c0a5fb2e5
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/mips.rs
@@ -0,0 +1,31 @@
+//! Run-time feature detection for MIPS on Linux.
+
+use crate::detect::{Feature, cache, bit};
+use super::auxvec;
+
+/// Performs run-time feature detection.
+#[inline]
+pub fn check_for(x: Feature) -> bool {
+ cache::test(x as u32, detect_features)
+}
+
+/// Try to read the features from the auxiliary vector, and if that fails, try
+/// to read them from `/proc/cpuinfo`.
+fn detect_features() -> cache::Initializer {
+ let mut value = cache::Initializer::default();
+ let enable_feature = |value: &mut cache::Initializer, f, enable| {
+ if enable {
+ value.set(f as u32);
+ }
+ };
+
+ // The values are part of the platform-specific [asm/hwcap.h][hwcap]
+ //
+ // [hwcap]: https://github.com/torvalds/linux/blob/master/arch/arm64/include/uapi/asm/hwcap.h
+ if let Ok(auxv) = auxvec::auxv() {
+ enable_feature(&mut value, Feature::msa, bit::test(auxv.hwcap, 1));
+ return value;
+ }
+ // TODO: fall back via `cpuinfo`.
+ value
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/mod.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/mod.rs
new file mode 100644
index 000000000..e02d5e6dc
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/mod.rs
@@ -0,0 +1,28 @@
+//! Run-time feature detection on Linux
+
+mod auxvec;
+
+#[cfg(feature = "std_detect_file_io")]
+mod cpuinfo;
+
+cfg_if! {
+ if #[cfg(target_arch = "aarch64")] {
+ mod aarch64;
+ pub use self::aarch64::check_for;
+ } else if #[cfg(target_arch = "arm")] {
+ mod arm;
+ pub use self::arm::check_for;
+ } else if #[cfg(any(target_arch = "mips", target_arch = "mips64"))] {
+ mod mips;
+ pub use self::mips::check_for;
+ } else if #[cfg(any(target_arch = "powerpc", target_arch = "powerpc64"))] {
+ mod powerpc;
+ pub use self::powerpc::check_for;
+ } else {
+ use crate::detect::Feature;
+ /// Performs run-time feature detection.
+ pub fn check_for(_x: Feature) -> bool {
+ false
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/powerpc.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/powerpc.rs
new file mode 100644
index 000000000..1c08a5844
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/os/linux/powerpc.rs
@@ -0,0 +1,41 @@
+//! Run-time feature detection for PowerPC on Linux.
+
+use crate::detect::{Feature, cache};
+use super::{auxvec, cpuinfo};
+
+/// Performs run-time feature detection.
+#[inline]
+pub fn check_for(x: Feature) -> bool {
+ cache::test(x as u32, detect_features)
+}
+
+/// Try to read the features from the auxiliary vector, and if that fails, try
+/// to read them from /proc/cpuinfo.
+fn detect_features() -> cache::Initializer {
+ let mut value = cache::Initializer::default();
+ let enable_feature = |value: &mut cache::Initializer, f, enable| {
+ if enable {
+ value.set(f as u32);
+ }
+ };
+
+ // The values are part of the platform-specific [asm/cputable.h][cputable]
+ //
+ // [cputable]: https://github.com/torvalds/linux/blob/master/arch/powerpc/include/uapi/asm/cputable.h
+ if let Ok(auxv) = auxvec::auxv() {
+ // note: the PowerPC values are the mask to do the test (instead of the
+ // index of the bit to test like in ARM and Aarch64)
+ enable_feature(&mut value, Feature::altivec, auxv.hwcap & 0x10000000 != 0);
+ enable_feature(&mut value, Feature::vsx, auxv.hwcap & 0x00000080 != 0);
+ enable_feature(&mut value, Feature::power8, auxv.hwcap2 & 0x80000000 != 0);
+ return value;
+ }
+
+ // PowerPC's /proc/cpuinfo lacks a proper Feature field,
+ // but `altivec` support is indicated in the `cpu` field.
+ if let Ok(c) = cpuinfo::CpuInfo::new() {
+ enable_feature(&mut value, Feature::altivec, c.field("cpu").has("altivec"));
+ return value;
+ }
+ value
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/os/other.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/os/other.rs
new file mode 100644
index 000000000..23e399ea7
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/os/other.rs
@@ -0,0 +1,9 @@
+//! Other operating systems
+
+use crate::detect::Feature;
+
+/// Performs run-time feature detection.
+#[inline]
+pub fn check_for(_x: Feature) -> bool {
+ false
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/detect/os/x86.rs b/src/tools/rustfmt/tests/source/cfg_if/detect/os/x86.rs
new file mode 100644
index 000000000..9257b8a4b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/detect/os/x86.rs
@@ -0,0 +1,375 @@
+//! x86 run-time feature detection is OS independent.
+
+#[cfg(target_arch = "x86")]
+use crate::arch::x86::*;
+#[cfg(target_arch = "x86_64")]
+use crate::arch::x86_64::*;
+
+use crate::mem;
+
+use crate::detect::{Feature, cache, bit};
+
+/// Performs run-time feature detection.
+#[inline]
+pub fn check_for(x: Feature) -> bool {
+ cache::test(x as u32, detect_features)
+}
+
+/// Run-time feature detection on x86 works by using the CPUID instruction.
+///
+/// The [CPUID Wikipedia page][wiki_cpuid] contains
+/// all the information about which flags to set to query which values, and in
+/// which registers these are reported.
+///
+/// The definitive references are:
+/// - [Intel 64 and IA-32 Architectures Software Developer's Manual Volume 2:
+/// Instruction Set Reference, A-Z][intel64_ref].
+/// - [AMD64 Architecture Programmer's Manual, Volume 3: General-Purpose and
+/// System Instructions][amd64_ref].
+///
+/// [wiki_cpuid]: https://en.wikipedia.org/wiki/CPUID
+/// [intel64_ref]: http://www.intel.de/content/dam/www/public/us/en/documents/manuals/64-ia-32-architectures-software-developer-instruction-set-reference-manual-325383.pdf
+/// [amd64_ref]: http://support.amd.com/TechDocs/24594.pdf
+#[allow(clippy::similar_names)]
+fn detect_features() -> cache::Initializer {
+ let mut value = cache::Initializer::default();
+
+ // If the x86 CPU does not support the CPUID instruction then it is too
+ // old to support any of the currently-detectable features.
+ if !has_cpuid() {
+ return value;
+ }
+
+ // Calling `__cpuid`/`__cpuid_count` from here on is safe because the CPU
+ // has `cpuid` support.
+
+ // 0. EAX = 0: Basic Information:
+ // - EAX returns the "Highest Function Parameter", that is, the maximum
+ // leaf value for subsequent calls of `cpuinfo` in range [0,
+ // 0x8000_0000]. - The vendor ID is stored in 12 u8 ascii chars,
+ // returned in EBX, EDX, and ECX (in that order):
+ let (max_basic_leaf, vendor_id) = unsafe {
+ let CpuidResult {
+ eax: max_basic_leaf,
+ ebx,
+ ecx,
+ edx,
+ } = __cpuid(0);
+ let vendor_id: [[u8; 4]; 3] = [
+ mem::transmute(ebx),
+ mem::transmute(edx),
+ mem::transmute(ecx),
+ ];
+ let vendor_id: [u8; 12] = mem::transmute(vendor_id);
+ (max_basic_leaf, vendor_id)
+ };
+
+ if max_basic_leaf < 1 {
+ // Earlier Intel 486, CPUID not implemented
+ return value;
+ }
+
+ // EAX = 1, ECX = 0: Queries "Processor Info and Feature Bits";
+ // Contains information about most x86 features.
+ let CpuidResult {
+ ecx: proc_info_ecx,
+ edx: proc_info_edx,
+ ..
+ } = unsafe { __cpuid(0x0000_0001_u32) };
+
+ // EAX = 7, ECX = 0: Queries "Extended Features";
+ // Contains information about bmi,bmi2, and avx2 support.
+ let (extended_features_ebx, extended_features_ecx) = if max_basic_leaf >= 7
+ {
+ let CpuidResult { ebx, ecx, .. } = unsafe { __cpuid(0x0000_0007_u32) };
+ (ebx, ecx)
+ } else {
+ (0, 0) // CPUID does not support "Extended Features"
+ };
+
+ // EAX = 0x8000_0000, ECX = 0: Get Highest Extended Function Supported
+ // - EAX returns the max leaf value for extended information, that is,
+ // `cpuid` calls in range [0x8000_0000; u32::MAX]:
+ let CpuidResult {
+ eax: extended_max_basic_leaf,
+ ..
+ } = unsafe { __cpuid(0x8000_0000_u32) };
+
+ // EAX = 0x8000_0001, ECX=0: Queries "Extended Processor Info and Feature
+ // Bits"
+ let extended_proc_info_ecx = if extended_max_basic_leaf >= 1 {
+ let CpuidResult { ecx, .. } = unsafe { __cpuid(0x8000_0001_u32) };
+ ecx
+ } else {
+ 0
+ };
+
+ {
+ // borrows value till the end of this scope:
+ let mut enable = |r, rb, f| {
+ if bit::test(r as usize, rb) {
+ value.set(f as u32);
+ }
+ };
+
+ enable(proc_info_ecx, 0, Feature::sse3);
+ enable(proc_info_ecx, 1, Feature::pclmulqdq);
+ enable(proc_info_ecx, 9, Feature::ssse3);
+ enable(proc_info_ecx, 13, Feature::cmpxchg16b);
+ enable(proc_info_ecx, 19, Feature::sse4_1);
+ enable(proc_info_ecx, 20, Feature::sse4_2);
+ enable(proc_info_ecx, 23, Feature::popcnt);
+ enable(proc_info_ecx, 25, Feature::aes);
+ enable(proc_info_ecx, 29, Feature::f16c);
+ enable(proc_info_ecx, 30, Feature::rdrand);
+ enable(extended_features_ebx, 18, Feature::rdseed);
+ enable(extended_features_ebx, 19, Feature::adx);
+ enable(extended_features_ebx, 11, Feature::rtm);
+ enable(proc_info_edx, 4, Feature::tsc);
+ enable(proc_info_edx, 23, Feature::mmx);
+ enable(proc_info_edx, 24, Feature::fxsr);
+ enable(proc_info_edx, 25, Feature::sse);
+ enable(proc_info_edx, 26, Feature::sse2);
+ enable(extended_features_ebx, 29, Feature::sha);
+
+ enable(extended_features_ebx, 3, Feature::bmi);
+ enable(extended_features_ebx, 8, Feature::bmi2);
+
+ // `XSAVE` and `AVX` support:
+ let cpu_xsave = bit::test(proc_info_ecx as usize, 26);
+ if cpu_xsave {
+ // 0. Here the CPU supports `XSAVE`.
+
+ // 1. Detect `OSXSAVE`, that is, whether the OS is AVX enabled and
+ // supports saving the state of the AVX/AVX2 vector registers on
+ // context-switches, see:
+ //
+ // - [intel: is avx enabled?][is_avx_enabled],
+ // - [mozilla: sse.cpp][mozilla_sse_cpp].
+ //
+ // [is_avx_enabled]: https://software.intel.com/en-us/blogs/2011/04/14/is-avx-enabled
+ // [mozilla_sse_cpp]: https://hg.mozilla.org/mozilla-central/file/64bab5cbb9b6/mozglue/build/SSE.cpp#l190
+ let cpu_osxsave = bit::test(proc_info_ecx as usize, 27);
+
+ if cpu_osxsave {
+ // 2. The OS must have signaled the CPU that it supports saving and
+ // restoring the:
+ //
+ // * SSE -> `XCR0.SSE[1]`
+ // * AVX -> `XCR0.AVX[2]`
+ // * AVX-512 -> `XCR0.AVX-512[7:5]`.
+ //
+ // by setting the corresponding bits of `XCR0` to `1`.
+ //
+ // This is safe because the CPU supports `xsave`
+ // and the OS has set `osxsave`.
+ let xcr0 = unsafe { _xgetbv(0) };
+ // Test `XCR0.SSE[1]` and `XCR0.AVX[2]` with the mask `0b110 == 6`:
+ let os_avx_support = xcr0 & 6 == 6;
+ // Test `XCR0.AVX-512[7:5]` with the mask `0b1110_0000 == 224`:
+ let os_avx512_support = xcr0 & 224 == 224;
+
+ // Only if the OS and the CPU support saving/restoring the AVX
+ // registers we enable `xsave` support:
+ if os_avx_support {
+ // See "13.3 ENABLING THE XSAVE FEATURE SET AND XSAVE-ENABLED
+ // FEATURES" in the "Intel® 64 and IA-32 Architectures Software
+ // Developer’s Manual, Volume 1: Basic Architecture":
+ //
+ // "Software enables the XSAVE feature set by setting
+ // CR4.OSXSAVE[bit 18] to 1 (e.g., with the MOV to CR4
+ // instruction). If this bit is 0, execution of any of XGETBV,
+ // XRSTOR, XRSTORS, XSAVE, XSAVEC, XSAVEOPT, XSAVES, and XSETBV
+ // causes an invalid-opcode exception (#UD)"
+ //
+ enable(proc_info_ecx, 26, Feature::xsave);
+
+ // For `xsaveopt`, `xsavec`, and `xsaves` we need to query:
+ // Processor Extended State Enumeration Sub-leaf (EAX = 0DH,
+ // ECX = 1):
+ if max_basic_leaf >= 0xd {
+ let CpuidResult {
+ eax: proc_extended_state1_eax,
+ ..
+ } = unsafe { __cpuid_count(0xd_u32, 1) };
+ enable(proc_extended_state1_eax, 0, Feature::xsaveopt);
+ enable(proc_extended_state1_eax, 1, Feature::xsavec);
+ enable(proc_extended_state1_eax, 3, Feature::xsaves);
+ }
+
+ // FMA (uses 256-bit wide registers):
+ enable(proc_info_ecx, 12, Feature::fma);
+
+ // And AVX/AVX2:
+ enable(proc_info_ecx, 28, Feature::avx);
+ enable(extended_features_ebx, 5, Feature::avx2);
+
+ // For AVX-512 the OS also needs to support saving/restoring
+ // the extended state, only then we enable AVX-512 support:
+ if os_avx512_support {
+ enable(extended_features_ebx, 16, Feature::avx512f);
+ enable(extended_features_ebx, 17, Feature::avx512dq);
+ enable(extended_features_ebx, 21, Feature::avx512_ifma);
+ enable(extended_features_ebx, 26, Feature::avx512pf);
+ enable(extended_features_ebx, 27, Feature::avx512er);
+ enable(extended_features_ebx, 28, Feature::avx512cd);
+ enable(extended_features_ebx, 30, Feature::avx512bw);
+ enable(extended_features_ebx, 31, Feature::avx512vl);
+ enable(extended_features_ecx, 1, Feature::avx512_vbmi);
+ enable(
+ extended_features_ecx,
+ 14,
+ Feature::avx512_vpopcntdq,
+ );
+ }
+ }
+ }
+ }
+
+ // This detects ABM on AMD CPUs and LZCNT on Intel CPUs.
+ // On intel CPUs with popcnt, lzcnt implements the
+ // "missing part" of ABM, so we map both to the same
+ // internal feature.
+ //
+ // The `is_x86_feature_detected!("lzcnt")` macro then
+ // internally maps to Feature::abm.
+ enable(extended_proc_info_ecx, 5, Feature::abm);
+ // As Hygon Dhyana originates from AMD technology and shares most of the architecture with
+ // AMD's family 17h, but with different CPU Vendor ID("HygonGenuine")/Family series
+ // number(Family 18h).
+ //
+ // For CPUID feature bits, Hygon Dhyana(family 18h) share the same definition with AMD
+ // family 17h.
+ //
+ // Related AMD CPUID specification is https://www.amd.com/system/files/TechDocs/25481.pdf.
+ // Related Hygon kernel patch can be found on
+ // http://lkml.kernel.org/r/5ce86123a7b9dad925ac583d88d2f921040e859b.1538583282.git.puwen@hygon.cn
+ if vendor_id == *b"AuthenticAMD" || vendor_id == *b"HygonGenuine" {
+ // These features are available on AMD arch CPUs:
+ enable(extended_proc_info_ecx, 6, Feature::sse4a);
+ enable(extended_proc_info_ecx, 21, Feature::tbm);
+ }
+ }
+
+ value
+}
+
+#[cfg(test)]
+mod tests {
+ extern crate cupid;
+
+ #[test]
+ fn dump() {
+ println!("aes: {:?}", is_x86_feature_detected!("aes"));
+ println!("pclmulqdq: {:?}", is_x86_feature_detected!("pclmulqdq"));
+ println!("rdrand: {:?}", is_x86_feature_detected!("rdrand"));
+ println!("rdseed: {:?}", is_x86_feature_detected!("rdseed"));
+ println!("tsc: {:?}", is_x86_feature_detected!("tsc"));
+ println!("sse: {:?}", is_x86_feature_detected!("sse"));
+ println!("sse2: {:?}", is_x86_feature_detected!("sse2"));
+ println!("sse3: {:?}", is_x86_feature_detected!("sse3"));
+ println!("ssse3: {:?}", is_x86_feature_detected!("ssse3"));
+ println!("sse4.1: {:?}", is_x86_feature_detected!("sse4.1"));
+ println!("sse4.2: {:?}", is_x86_feature_detected!("sse4.2"));
+ println!("sse4a: {:?}", is_x86_feature_detected!("sse4a"));
+ println!("sha: {:?}", is_x86_feature_detected!("sha"));
+ println!("avx: {:?}", is_x86_feature_detected!("avx"));
+ println!("avx2: {:?}", is_x86_feature_detected!("avx2"));
+ println!("avx512f {:?}", is_x86_feature_detected!("avx512f"));
+ println!("avx512cd {:?}", is_x86_feature_detected!("avx512cd"));
+ println!("avx512er {:?}", is_x86_feature_detected!("avx512er"));
+ println!("avx512pf {:?}", is_x86_feature_detected!("avx512pf"));
+ println!("avx512bw {:?}", is_x86_feature_detected!("avx512bw"));
+ println!("avx512dq {:?}", is_x86_feature_detected!("avx512dq"));
+ println!("avx512vl {:?}", is_x86_feature_detected!("avx512vl"));
+ println!("avx512_ifma {:?}", is_x86_feature_detected!("avx512ifma"));
+ println!("avx512_vbmi {:?}", is_x86_feature_detected!("avx512vbmi"));
+ println!(
+ "avx512_vpopcntdq {:?}",
+ is_x86_feature_detected!("avx512vpopcntdq")
+ );
+ println!("fma: {:?}", is_x86_feature_detected!("fma"));
+ println!("abm: {:?}", is_x86_feature_detected!("abm"));
+ println!("bmi: {:?}", is_x86_feature_detected!("bmi1"));
+ println!("bmi2: {:?}", is_x86_feature_detected!("bmi2"));
+ println!("tbm: {:?}", is_x86_feature_detected!("tbm"));
+ println!("popcnt: {:?}", is_x86_feature_detected!("popcnt"));
+ println!("lzcnt: {:?}", is_x86_feature_detected!("lzcnt"));
+ println!("fxsr: {:?}", is_x86_feature_detected!("fxsr"));
+ println!("xsave: {:?}", is_x86_feature_detected!("xsave"));
+ println!("xsaveopt: {:?}", is_x86_feature_detected!("xsaveopt"));
+ println!("xsaves: {:?}", is_x86_feature_detected!("xsaves"));
+ println!("xsavec: {:?}", is_x86_feature_detected!("xsavec"));
+ println!("cmpxchg16b: {:?}", is_x86_feature_detected!("cmpxchg16b"));
+ println!("adx: {:?}", is_x86_feature_detected!("adx"));
+ println!("rtm: {:?}", is_x86_feature_detected!("rtm"));
+ }
+
+ #[test]
+ fn compare_with_cupid() {
+ let information = cupid::master().unwrap();
+ assert_eq!(is_x86_feature_detected!("aes"), information.aesni());
+ assert_eq!(is_x86_feature_detected!("pclmulqdq"), information.pclmulqdq());
+ assert_eq!(is_x86_feature_detected!("rdrand"), information.rdrand());
+ assert_eq!(is_x86_feature_detected!("rdseed"), information.rdseed());
+ assert_eq!(is_x86_feature_detected!("tsc"), information.tsc());
+ assert_eq!(is_x86_feature_detected!("sse"), information.sse());
+ assert_eq!(is_x86_feature_detected!("sse2"), information.sse2());
+ assert_eq!(is_x86_feature_detected!("sse3"), information.sse3());
+ assert_eq!(is_x86_feature_detected!("ssse3"), information.ssse3());
+ assert_eq!(is_x86_feature_detected!("sse4.1"), information.sse4_1());
+ assert_eq!(is_x86_feature_detected!("sse4.2"), information.sse4_2());
+ assert_eq!(is_x86_feature_detected!("sse4a"), information.sse4a());
+ assert_eq!(is_x86_feature_detected!("sha"), information.sha());
+ assert_eq!(is_x86_feature_detected!("avx"), information.avx());
+ assert_eq!(is_x86_feature_detected!("avx2"), information.avx2());
+ assert_eq!(is_x86_feature_detected!("avx512f"), information.avx512f());
+ assert_eq!(is_x86_feature_detected!("avx512cd"), information.avx512cd());
+ assert_eq!(is_x86_feature_detected!("avx512er"), information.avx512er());
+ assert_eq!(is_x86_feature_detected!("avx512pf"), information.avx512pf());
+ assert_eq!(is_x86_feature_detected!("avx512bw"), information.avx512bw());
+ assert_eq!(is_x86_feature_detected!("avx512dq"), information.avx512dq());
+ assert_eq!(is_x86_feature_detected!("avx512vl"), information.avx512vl());
+ assert_eq!(
+ is_x86_feature_detected!("avx512ifma"),
+ information.avx512_ifma()
+ );
+ assert_eq!(
+ is_x86_feature_detected!("avx512vbmi"),
+ information.avx512_vbmi()
+ );
+ assert_eq!(
+ is_x86_feature_detected!("avx512vpopcntdq"),
+ information.avx512_vpopcntdq()
+ );
+ assert_eq!(is_x86_feature_detected!("fma"), information.fma());
+ assert_eq!(is_x86_feature_detected!("bmi1"), information.bmi1());
+ assert_eq!(is_x86_feature_detected!("bmi2"), information.bmi2());
+ assert_eq!(is_x86_feature_detected!("popcnt"), information.popcnt());
+ assert_eq!(is_x86_feature_detected!("abm"), information.lzcnt());
+ assert_eq!(is_x86_feature_detected!("tbm"), information.tbm());
+ assert_eq!(is_x86_feature_detected!("lzcnt"), information.lzcnt());
+ assert_eq!(is_x86_feature_detected!("xsave"), information.xsave());
+ assert_eq!(is_x86_feature_detected!("xsaveopt"), information.xsaveopt());
+ assert_eq!(
+ is_x86_feature_detected!("xsavec"),
+ information.xsavec_and_xrstor()
+ );
+ assert_eq!(
+ is_x86_feature_detected!("xsaves"),
+ information.xsaves_xrstors_and_ia32_xss()
+ );
+ assert_eq!(
+ is_x86_feature_detected!("cmpxchg16b"),
+ information.cmpxchg16b(),
+ );
+ assert_eq!(
+ is_x86_feature_detected!("adx"),
+ information.adx(),
+ );
+ assert_eq!(
+ is_x86_feature_detected!("rtm"),
+ information.rtm(),
+ );
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_if/lib.rs b/src/tools/rustfmt/tests/source/cfg_if/lib.rs
new file mode 100644
index 000000000..8b3bb304f
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/lib.rs
@@ -0,0 +1,49 @@
+//! Run-time feature detection for the Rust standard library.
+//!
+//! To detect whether a feature is enabled in the system running the binary
+//! use one of the appropriate macro for the target:
+//!
+//! * `x86` and `x86_64`: [`is_x86_feature_detected`]
+//! * `arm`: [`is_arm_feature_detected`]
+//! * `aarch64`: [`is_aarch64_feature_detected`]
+//! * `mips`: [`is_mips_feature_detected`]
+//! * `mips64`: [`is_mips64_feature_detected`]
+//! * `powerpc`: [`is_powerpc_feature_detected`]
+//! * `powerpc64`: [`is_powerpc64_feature_detected`]
+
+#![unstable(feature = "stdsimd", issue = "27731")]
+#![feature(const_fn, staged_api, stdsimd, doc_cfg, allow_internal_unstable)]
+#![allow(clippy::shadow_reuse)]
+#![deny(clippy::missing_inline_in_public_items)]
+#![cfg_attr(target_os = "linux", feature(linkage))]
+#![cfg_attr(all(target_os = "freebsd", target_arch = "aarch64"), feature(asm))]
+#![cfg_attr(stdsimd_strict, deny(warnings))]
+#![cfg_attr(test, allow(unused_imports))]
+#![no_std]
+
+#[macro_use]
+extern crate cfg_if;
+
+cfg_if! {
+ if #[cfg(feature = "std_detect_file_io")] {
+ #[cfg_attr(test, macro_use(println))]
+ extern crate std;
+
+ #[allow(unused_imports)]
+ use std::{arch, fs, io, mem, sync};
+ } else {
+ #[cfg(test)]
+ #[macro_use(println)]
+ extern crate std;
+
+ #[allow(unused_imports)]
+ use core::{arch, mem, sync};
+ }
+}
+
+#[cfg(feature = "std_detect_dlsym_getauxval")]
+extern crate libc;
+
+#[doc(hidden)]
+#[unstable(feature = "stdsimd", issue = "27731")]
+pub mod detect;
diff --git a/src/tools/rustfmt/tests/source/cfg_if/mod.rs b/src/tools/rustfmt/tests/source/cfg_if/mod.rs
new file mode 100644
index 000000000..b630e7ff3
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_if/mod.rs
@@ -0,0 +1,5 @@
+//! `std_detect`
+
+#[doc(hidden)] // unstable implementation detail
+#[unstable(feature = "stdsimd", issue = "27731")]
+pub mod detect;
diff --git a/src/tools/rustfmt/tests/source/cfg_mod/bar.rs b/src/tools/rustfmt/tests/source/cfg_mod/bar.rs
new file mode 100644
index 000000000..5b6b5f438
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_mod/bar.rs
@@ -0,0 +1,3 @@
+fn bar( ) -> &str {
+"bar"
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_mod/dir/dir1/dir2/wasm32.rs b/src/tools/rustfmt/tests/source/cfg_mod/dir/dir1/dir2/wasm32.rs
new file mode 100644
index 000000000..0f8c0a3a7
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_mod/dir/dir1/dir2/wasm32.rs
@@ -0,0 +1,6 @@
+fn
+ wasm32
+ () -> &str
+{
+ "wasm32"
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_mod/dir/dir1/dir3/wasm32.rs b/src/tools/rustfmt/tests/source/cfg_mod/dir/dir1/dir3/wasm32.rs
new file mode 100644
index 000000000..0f8c0a3a7
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_mod/dir/dir1/dir3/wasm32.rs
@@ -0,0 +1,6 @@
+fn
+ wasm32
+ () -> &str
+{
+ "wasm32"
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_mod/foo.rs b/src/tools/rustfmt/tests/source/cfg_mod/foo.rs
new file mode 100644
index 000000000..de4ce55ef
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_mod/foo.rs
@@ -0,0 +1,4 @@
+fn foo( )
+ -> &str {
+ "foo"
+}
diff --git a/src/tools/rustfmt/tests/source/cfg_mod/mod.rs b/src/tools/rustfmt/tests/source/cfg_mod/mod.rs
new file mode 100644
index 000000000..45ba86f11
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_mod/mod.rs
@@ -0,0 +1,10 @@
+#[cfg_attr(feature = "foo", path = "foo.rs")]
+#[cfg_attr(not(feture = "foo"), path = "bar.rs")]
+mod sub_mod;
+
+#[cfg_attr(target_arch = "wasm32", path = "dir/dir1/dir2/wasm32.rs")]
+#[cfg_attr(not(target_arch = "wasm32"), path = "dir/dir1/dir3/wasm32.rs")]
+mod wasm32;
+
+#[some_attr(path = "somewhere.rs")]
+mod other;
diff --git a/src/tools/rustfmt/tests/source/cfg_mod/other.rs b/src/tools/rustfmt/tests/source/cfg_mod/other.rs
new file mode 100644
index 000000000..0b5c04d21
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_mod/other.rs
@@ -0,0 +1 @@
+fn other() -> &str { "other"}
diff --git a/src/tools/rustfmt/tests/source/cfg_mod/wasm32.rs b/src/tools/rustfmt/tests/source/cfg_mod/wasm32.rs
new file mode 100644
index 000000000..3741e53fd
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/cfg_mod/wasm32.rs
@@ -0,0 +1,4 @@
+fn
+ wasm32() -> &str {
+ "wasm32"
+ }
diff --git a/src/tools/rustfmt/tests/source/chains-visual.rs b/src/tools/rustfmt/tests/source/chains-visual.rs
new file mode 100644
index 000000000..20a96311e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/chains-visual.rs
@@ -0,0 +1,158 @@
+// rustfmt-indent_style: Visual
+// Test chain formatting.
+
+fn main() {
+ // Don't put chains on a single line if it wasn't so in source.
+ let a = b .c
+ .d.1
+ .foo(|x| x + 1);
+
+ bbbbbbbbbbbbbbbbbbb.ccccccccccccccccccccccccccccccccccccc
+ .ddddddddddddddddddddddddddd();
+
+ bbbbbbbbbbbbbbbbbbb.ccccccccccccccccccccccccccccccccccccc.ddddddddddddddddddddddddddd.eeeeeeee();
+
+ // Test case where first chain element isn't a path, but is shorter than
+ // the size of a tab.
+ x()
+ .y(|| match cond() { true => (), false => () });
+
+ loong_func()
+ .quux(move || if true {
+ 1
+ } else {
+ 2
+ });
+
+ some_fuuuuuuuuunction()
+ .method_call_a(aaaaa, bbbbb, |c| {
+ let x = c;
+ x
+ });
+
+ some_fuuuuuuuuunction().method_call_a(aaaaa, bbbbb, |c| {
+ let x = c;
+ x
+ }).method_call_b(aaaaa, bbbbb, |c| {
+ let x = c;
+ x
+ });
+
+ fffffffffffffffffffffffffffffffffff(a,
+ {
+ SCRIPT_TASK_ROOT
+ .with(|root| {
+ *root.borrow_mut() = Some(&script_task);
+ });
+ });
+
+ let suuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuum = xxxxxxx
+ .map(|x| x + 5)
+ .map(|x| x / 2)
+ .fold(0, |acc, x| acc + x);
+
+ aaaaaaaaaaaaaaaa.map(|x| {
+ x += 1;
+ x
+ }).filter(some_mod::some_filter)
+}
+
+fn floaters() {
+ let z = Foo {
+ field1: val1,
+ field2: val2,
+ };
+
+ let x = Foo {
+ field1: val1,
+ field2: val2,
+ }.method_call().method_call();
+
+ let y = if cond {
+ val1
+ } else {
+ val2
+ }
+ .method_call();
+
+ {
+ match x {
+ PushParam => {
+ // params are 1-indexed
+ stack.push(mparams[match cur.to_digit(10) {
+ Some(d) => d as usize - 1,
+ None => return Err("bad param number".to_owned()),
+ }]
+ .clone());
+ }
+ }
+ }
+
+ if cond { some(); } else { none(); }
+ .bar()
+ .baz();
+
+ Foo { x: val } .baz(|| { force(); multiline(); }) .quux();
+
+ Foo { y: i_am_multi_line, z: ok }
+ .baz(|| {
+ force(); multiline();
+ })
+ .quux();
+
+ a + match x { true => "yay!", false => "boo!" }.bar()
+}
+
+fn is_replaced_content() -> bool {
+ constellat.send(ConstellationMsg::ViewportConstrained(
+ self.id, constraints)).unwrap();
+}
+
+fn issue587() {
+ a.b::<()>(c);
+
+ std::mem::transmute(dl.symbol::<()>("init").unwrap())
+}
+
+fn issue_1389() {
+ let names = String::from_utf8(names)?.split('|').map(str::to_owned).collect();
+}
+
+fn issue1217() -> Result<Mnemonic, Error> {
+let random_chars: String = OsRng::new()?
+ .gen_ascii_chars()
+ .take(self.bit_length)
+ .collect();
+
+ Ok(Mnemonic::new(&random_chars))
+}
+
+fn issue1236(options: Vec<String>) -> Result<Option<String>> {
+let process = Command::new("dmenu").stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .spawn()
+ .chain_err(|| "failed to spawn dmenu")?;
+}
+
+fn issue1434() {
+ for _ in 0..100 {
+ let prototype_id = PrototypeIdData::from_reader::<_, B>(&mut self.file_cursor).chain_err(|| {
+ format!("could not read prototype ID at offset {:#010x}",
+ current_offset)
+ })?;
+ }
+}
+
+fn issue2264() {
+ {
+ something.function()
+ .map(|| {
+ if let a_very_very_very_very_very_very_very_very_long_variable =
+ compute_this_variable()
+ {
+ println!("Hello");
+ }
+ })
+ .collect();
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/chains.rs b/src/tools/rustfmt/tests/source/chains.rs
new file mode 100644
index 000000000..c77f5bac4
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/chains.rs
@@ -0,0 +1,266 @@
+// rustfmt-use_small_heuristics: Off
+// Test chain formatting.
+
+fn main() {
+ let a = b .c
+ .d.1
+ .foo(|x| x + 1);
+
+ bbbbbbbbbbbbbbbbbbb.ccccccccccccccccccccccccccccccccccccc
+ .ddddddddddddddddddddddddddd();
+
+ bbbbbbbbbbbbbbbbbbb.ccccccccccccccccccccccccccccccccccccc.ddddddddddddddddddddddddddd.eeeeeeee();
+
+ let f = fooooooooooooooooooooooooooooooooooooooooooooooooooo.baaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaar;
+
+ // Test case where first chain element isn't a path, but is shorter than
+ // the size of a tab.
+ x()
+ .y(|| match cond() { true => (), false => () });
+
+ loong_func()
+ .quux(move || if true {
+ 1
+ } else {
+ 2
+ });
+
+ some_fuuuuuuuuunction()
+ .method_call_a(aaaaa, bbbbb, |c| {
+ let x = c;
+ x
+ });
+
+ some_fuuuuuuuuunction().method_call_a(aaaaa, bbbbb, |c| {
+ let x = c;
+ x
+ }).method_call_b(aaaaa, bbbbb, |c| {
+ let x = c;
+ x
+ });
+
+ fffffffffffffffffffffffffffffffffff(a,
+ {
+ SCRIPT_TASK_ROOT
+ .with(|root| {
+ *root.borrow_mut() = Some(&script_task);
+ });
+ });
+
+ let suuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuum = xxxxxxx
+ .map(|x| x + 5)
+ .map(|x| x / 2)
+ .fold(0, |acc, x| acc + x);
+
+ body.fold(Body::new(), |mut body, chunk| {
+ body.extend(chunk);
+ Ok(body)
+ }).and_then(move |body| {
+ let req = Request::from_parts(parts, body);
+ f(req).map_err(|_| io::Error::new(io::ErrorKind::Other, ""))
+ });
+
+ aaaaaaaaaaaaaaaa.map(|x| {
+ x += 1;
+ x
+ }).filter(some_mod::some_filter)
+}
+
+fn floaters() {
+ let z = Foo {
+ field1: val1,
+ field2: val2,
+ };
+
+ let x = Foo {
+ field1: val1,
+ field2: val2,
+ }.method_call().method_call();
+
+ let y = if cond {
+ val1
+ } else {
+ val2
+ }
+ .method_call();
+
+ {
+ match x {
+ PushParam => {
+ // params are 1-indexed
+ stack.push(mparams[match cur.to_digit(10) {
+ Some(d) => d as usize - 1,
+ None => return Err("bad param number".to_owned()),
+ }]
+ .clone());
+ }
+ }
+ }
+
+ if cond { some(); } else { none(); }
+ .bar()
+ .baz();
+
+ Foo { x: val } .baz(|| { force(); multiline(); }) .quux();
+
+ Foo { y: i_am_multi_line, z: ok }
+ .baz(|| {
+ force(); multiline();
+ })
+ .quux();
+
+ a + match x { true => "yay!", false => "boo!" }.bar()
+}
+
+fn is_replaced_content() -> bool {
+ constellat.send(ConstellationMsg::ViewportConstrained(
+ self.id, constraints)).unwrap();
+}
+
+fn issue587() {
+ a.b::<()>(c);
+
+ std::mem::transmute(dl.symbol::<()>("init").unwrap())
+}
+
+fn try_shorthand() {
+ let x = expr?;
+ let y = expr.kaas()?.test();
+ let loooooooooooooooooooooooooooooooooooooooooong = does_this?.look?.good?.should_we_break?.after_the_first_question_mark?;
+ let yyyy = expr?.another?.another?.another?.another?.another?.another?.another?.another?.test();
+ let zzzz = expr?.another?.another?.another?.another?;
+ let aaa = x ???????????? ?????????????? ???? ????? ?????????????? ????????? ?????????????? ??;
+
+ let y = a.very .loooooooooooooooooooooooooooooooooooooong() .chain()
+ .inside() .weeeeeeeeeeeeeee()? .test() .0
+ .x;
+
+ parameterized(f,
+ substs,
+ def_id,
+ Ns::Value,
+ &[],
+ |tcx| tcx.lookup_item_type(def_id).generics)?;
+ fooooooooooooooooooooooooooo()?.bar()?.baaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaz()?;
+}
+
+fn issue_1004() {
+ match *self {
+ ty::ImplOrTraitItem::MethodTraitItem(ref i) => write!(f, "{:?}", i),
+ ty::ImplOrTraitItem::ConstTraitItem(ref i) => write!(f, "{:?}", i),
+ ty::ImplOrTraitItem::TypeTraitItem(ref i) => write!(f, "{:?}", i),
+ }
+ ?;
+
+ ty::tls::with(|tcx| {
+ let tap = ty::Binder(TraitAndProjections(principal, projections));
+ in_binder(f, tcx, &ty::Binder(""), Some(tap))
+ })
+ ?;
+}
+
+fn issue1392() {
+ test_method(r#"
+ if foo {
+ a();
+ }
+ else {
+ b();
+ }
+ "#.trim());
+}
+
+// #2067
+impl Settings {
+ fn save(&self) -> Result<()> {
+ let mut file = File::create(&settings_path).chain_err(|| ErrorKind::WriteError(settings_path.clone()))?;
+ }
+}
+
+fn issue2126() {
+ {
+ {
+ {
+ {
+ {
+ let x = self.span_from(sub_span.expect("No span found for struct arant variant"));
+ self.sspanpan_from_span(sub_span.expect("No span found for struct variant"));
+ let x = self.spanpan_from_span(sub_span.expect("No span found for struct variant"))?;
+ }
+ }
+ }
+ }
+ }
+}
+
+// #2200
+impl Foo {
+ pub fn from_ast(diagnostic: &::errors::Handler,
+ attrs: &[ast::Attribute]) -> Attributes {
+ let other_attrs = attrs.iter().filter_map(|attr| {
+ attr.with_desugared_doc(|attr| {
+ if attr.check_name("doc") {
+ if let Some(mi) = attr.meta() {
+ if let Some(value) = mi.value_str() {
+ doc_strings.push(DocFragment::Include(line,
+ attr.span,
+ filename,
+ contents));
+ }
+ }
+ }
+ })
+ }).collect();
+ }
+}
+
+// #2415
+// Avoid orphan in chain
+fn issue2415() {
+ let base_url = (|| {
+ // stuff
+
+ Ok((|| {
+ // stuff
+ Some(value.to_string())
+ })()
+ .ok_or("")?)
+ })()
+ .unwrap_or_else(|_: Box<::std::error::Error>| String::from(""));
+}
+
+impl issue_2786 {
+ fn thing(&self) {
+ foo(|a| {
+ println!("a");
+ println!("b");
+ }).bar(|c| {
+ println!("a");
+ println!("b");
+ })
+ .baz(|c| {
+ println!("a");
+ println!("b");
+ })
+ }
+}
+
+fn issue_2773() {
+ let bar = Some(0);
+ bar.or_else(|| {
+ // do stuff
+ None
+ }).or_else(|| {
+ // do other stuff
+ None
+ })
+ .and_then(|val| {
+ // do this stuff
+ None
+ });
+}
+
+fn issue_3034() {
+ disallowed_headers.iter().any(|header| *header == name) ||
+ disallowed_header_prefixes.iter().any(|prefix| name.starts_with(prefix))
+}
diff --git a/src/tools/rustfmt/tests/source/chains_with_comment.rs b/src/tools/rustfmt/tests/source/chains_with_comment.rs
new file mode 100644
index 000000000..91160711b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/chains_with_comment.rs
@@ -0,0 +1,121 @@
+// Chains with comment.
+
+fn main() {
+ let x = y // comment
+ .z;
+
+ foo // foo
+ // comment after parent
+ .x
+ .y
+ // comment 1
+ .bar() // comment after bar()
+ // comment 2
+ .foobar
+ // comment after
+ // comment 3
+ .baz(x, y, z);
+
+ self.rev_dep_graph
+ .iter()
+ // Remove nodes that are not dirty
+ .filter(|&(unit, _)| dirties.contains(&unit))
+ // Retain only dirty dependencies of the ones that are dirty
+ .map(|(k, deps)| {
+ (
+ k.clone(),
+ deps.iter()
+ .cloned()
+ .filter(|d| dirties.contains(&d))
+ .collect(),
+ )
+ });
+
+ let y = expr /* comment */.kaas()?
+// comment
+ .test();
+ let loooooooooooooooooooooooooooooooooooooooooong = does_this?.look?.good?.should_we_break?.after_the_first_question_mark?;
+ let zzzz = expr? // comment after parent
+// comment 0
+.another??? // comment 1
+.another???? // comment 2
+.another? // comment 3
+.another?;
+
+ let y = a.very .loooooooooooooooooooooooooooooooooooooong() /* comment */ .chain()
+ .inside() /* comment */ .weeeeeeeeeeeeeee()? .test() .0
+ .x;
+
+ parameterized(f,
+ substs,
+ def_id,
+ Ns::Value,
+ &[],
+ |tcx| tcx.lookup_item_type(def_id).generics)?;
+ fooooooooooooooooooooooooooo()?.bar()?.baaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaz()?;
+
+ // #2559
+ App::new("cargo-cache")
+.version(crate_version!())
+.bin_name("cargo")
+.about("Manage cargo cache")
+.author("matthiaskrgr")
+.subcommand(
+SubCommand::with_name("cache")
+.version(crate_version!())
+.bin_name("cargo-cache")
+.about("Manage cargo cache")
+.author("matthiaskrgr")
+.arg(&list_dirs)
+.arg(&remove_dir)
+.arg(&gc_repos)
+.arg(&info)
+.arg(&keep_duplicate_crates) .arg(&dry_run)
+.arg(&auto_clean)
+.arg(&auto_clean_expensive),
+ ) // subcommand
+ .arg(&list_dirs);
+}
+
+// #2177
+impl Foo {
+ fn dirty_rev_dep_graph(
+ &self,
+ dirties: &HashSet<UnitKey>,
+ ) -> HashMap<UnitKey, HashSet<UnitKey>> {
+ let dirties = self.transitive_dirty_units(dirties);
+ trace!("transitive_dirty_units: {:?}", dirties);
+
+ self.rev_dep_graph.iter()
+ // Remove nodes that are not dirty
+ .filter(|&(unit, _)| dirties.contains(&unit))
+ // Retain only dirty dependencies of the ones that are dirty
+ .map(|(k, deps)| (k.clone(), deps.iter().cloned().filter(|d| dirties.contains(&d)).collect()))
+ }
+}
+
+// #2907
+fn foo() {
+ let x = foo
+ .bar?? ? // comment
+ .baz;
+ let x = foo
+ .bar? ??
+ // comment
+ .baz;
+ let x = foo
+ .bar? ? ? // comment
+ // comment
+ .baz;
+ let x = foo
+ .bar? ?? // comment
+ // comment
+ ? ??
+ // comment
+ ? ??
+ // comment
+ ???
+ // comment
+ ? ? ?
+ .baz;
+}
diff --git a/src/tools/rustfmt/tests/source/closure-block-inside-macro.rs b/src/tools/rustfmt/tests/source/closure-block-inside-macro.rs
new file mode 100644
index 000000000..b3ddfb512
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/closure-block-inside-macro.rs
@@ -0,0 +1,9 @@
+// #1547
+fuzz_target!(|data: &[u8]| if let Some(first) = data.first() {
+ let index = *first as usize;
+ if index >= ENCODINGS.len() {
+ return;
+ }
+ let encoding = ENCODINGS[index];
+ dispatch_test(encoding, &data[1..]);
+});
diff --git a/src/tools/rustfmt/tests/source/closure.rs b/src/tools/rustfmt/tests/source/closure.rs
new file mode 100644
index 000000000..b2d28b305
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/closure.rs
@@ -0,0 +1,223 @@
+// rustfmt-normalize_comments: true
+// Closures
+
+fn main() {
+ let square = ( |i: i32 | i * i );
+
+ let commented = |/* first */ a /*argument*/, /* second*/ b: WithType /* argument*/, /* ignored */ _ |
+ (aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb);
+
+ let block_body = move |xxxxxxxxxxxxxxxxxxxxxxxxxxxxx, ref yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy| {
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxx + yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy
+ };
+
+ let loooooooooooooong_name = |field| {
+ // format comments.
+ if field.node.attrs.len() > 0 { field.node.attrs[0].span.lo()
+ } else {
+ field.span.lo()
+ }};
+
+ let unblock_me = |trivial| {
+ closure()
+ };
+
+ let empty = |arg| {};
+
+ let simple = |arg| { /* comment formatting */ foo(arg) };
+
+ let test = | | { do_something(); do_something_else(); };
+
+ let arg_test = |big_argument_name, test123| looooooooooooooooooong_function_naaaaaaaaaaaaaaaaame();
+
+ let arg_test = |big_argument_name, test123| {looooooooooooooooooong_function_naaaaaaaaaaaaaaaaame()};
+
+ let simple_closure = move || -> () {};
+
+ let closure = |input: Ty| -> Option<String> {
+ foo()
+ };
+
+ let closure_with_return_type = |aaaaaaaaaaaaaaaaaaaaaaarg1, aaaaaaaaaaaaaaaaaaaaaaarg2| -> Strong { "sup".to_owned() };
+
+ |arg1, arg2, _, _, arg3, arg4| { let temp = arg4 + arg3;
+ arg2 * arg1 - temp };
+
+ let block_body_with_comment = args.iter()
+ .map(|a| {
+ // Emitting only dep-info is possible only for final crate type, as
+ // as others may emit required metadata for dependent crate types
+ if a.starts_with("--emit") && is_final_crate_type && !self.workspace_mode {
+ "--emit=dep-info"
+ } else { a }
+ });
+
+ for<> || -> () {};
+ for< >|| -> () {};
+ for<
+> || -> () {};
+
+for< 'a
+ ,'b,
+'c > |_: &'a (), _: &'b (), _: &'c ()| -> () {};
+
+}
+
+fn issue311() {
+ let func = |x| println!("{}", x);
+
+ (func)(0.0);
+}
+
+fn issue863() {
+ let closure = |x| match x {
+ 0 => true,
+ _ => false,
+ } == true;
+}
+
+fn issue934() {
+ let hash: &Fn(&&Block) -> u64 = &|block| -> u64 {
+ let mut h = SpanlessHash::new(cx);
+ h.hash_block(block);
+ h.finish()
+ };
+
+ let hash: &Fn(&&Block) -> u64 = &|block| -> u64 {
+ let mut h = SpanlessHash::new(cx);
+ h.hash_block(block);
+ h.finish();
+ };
+}
+
+impl<'a, 'tcx: 'a> SpanlessEq<'a, 'tcx> {
+ pub fn eq_expr(&self, left: &Expr, right: &Expr) -> bool {
+ match (&left.node, &right.node) {
+ (&ExprBinary(l_op, ref ll, ref lr), &ExprBinary(r_op, ref rl, ref rr)) => {
+ l_op.node == r_op.node && self.eq_expr(ll, rl) && self.eq_expr(lr, rr) ||
+ swap_binop(l_op.node, ll, lr).map_or(false, |(l_op, ll, lr)| l_op == r_op.node && self.eq_expr(ll, rl) && self.eq_expr(lr, rr))
+ }
+ }
+ }
+}
+
+fn foo() {
+ lifetimes_iter___map(|lasdfasfd| {
+ let hi = if l.bounds.is_empty() {
+ l.lifetime.span.hi()
+ };
+ });
+}
+
+fn issue1405() {
+ open_raw_fd(fd, b'r')
+ .and_then(|file| Capture::new_raw(None, |_, err| unsafe {
+ raw::pcap_fopen_offline(file, err)
+ }));
+}
+
+fn issue1466() {
+ let vertex_buffer = frame.scope(|ctx| {
+ let buffer =
+ ctx.create_host_visible_buffer::<VertexBuffer<Vertex>>(&vertices);
+ ctx.create_device_local_buffer(buffer)
+ });
+}
+
+fn issue470() {
+ {{{
+ let explicit_arg_decls =
+ explicit_arguments.into_iter()
+ .enumerate()
+ .map(|(index, (ty, pattern))| {
+ let lvalue = Lvalue::Arg(index as u32);
+ block = this.pattern(block,
+ argument_extent,
+ hair::PatternRef::Hair(pattern),
+ &lvalue);
+ ArgDecl { ty: ty }
+ });
+ }}}
+}
+
+// #1509
+impl Foo {
+ pub fn bar(&self) {
+ Some(SomeType {
+ push_closure_out_to_100_chars: iter(otherwise_it_works_ok.into_iter().map(|f| {
+ Ok(f)
+ })),
+ })
+ }
+}
+
+fn issue1329() {
+ aaaaaaaaaaaaaaaa.map(|x| {
+ x += 1;
+ x
+ })
+ .filter
+}
+
+fn issue325() {
+ let f = || unsafe { xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx };
+}
+
+fn issue1697() {
+ Test.func_a(A_VERY_LONG_CONST_VARIABLE_NAME, move |arg1, arg2, arg3, arg4| arg1 + arg2 + arg3 + arg4)
+}
+
+fn issue1694() {
+ foooooo(|_referencefffffffff: _, _target_reference: _, _oid: _, _target_oid: _| format!("refs/pull/{}/merge", pr_id))
+}
+
+fn issue1713() {
+ rayon::join(
+ || recurse(left, is_less, pred, limit),
+ || recurse(right, is_less, Some(pivot), limit),
+ );
+
+ rayon::join(
+ 1,
+ || recurse(left, is_less, pred, limit),
+ 2,
+ || recurse(right, is_less, Some(pivot), limit),
+ );
+}
+
+fn issue2063() {
+ |ctx: Ctx<(String, String)>| -> io::Result<Response> {
+ Ok(Response::new().with_body(ctx.params.0))
+ }
+}
+
+fn issue1524() {
+ let f = |x| {{{{x}}}};
+ let f = |x| {{{x}}};
+ let f = |x| {{x}};
+ let f = |x| {x};
+ let f = |x| x;
+}
+
+fn issue2171() {
+ foo(|| unsafe {
+ if PERIPHERALS {
+ loop {}
+ } else {
+ PERIPHERALS = true;
+ }
+ })
+}
+
+fn issue2207() {
+ a.map(|_| unsafe {
+ a_very_very_very_very_very_very_very_long_function_name_or_anything_else()
+ }.to_string())
+}
+
+fn issue2262() {
+ result.init(&mut result.slave.borrow_mut(), &mut (result.strategy)()).map_err(|factory| Error {
+ factory,
+ slave: None,
+ })?;
+}
diff --git a/src/tools/rustfmt/tests/source/comment.rs b/src/tools/rustfmt/tests/source/comment.rs
new file mode 100644
index 000000000..b6ce5267f
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/comment.rs
@@ -0,0 +1,90 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+
+//! Doc comment
+fn test() {
+ /*!
+ * Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec a diam lectus. Sed sit amet ipsum mauris. Maecenas congue ligula ac quam */
+
+// comment
+ // comment2
+
+ code(); /* leave this comment alone!
+ * ok? */
+
+ /* Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec a
+ * diam lectus. Sed sit amet ipsum mauris. Maecenas congue ligula ac quam
+ * viverra nec consectetur ante hendrerit. Donec et mollis dolor.
+ * Praesent et diam eget libero egestas mattis sit amet vitae augue. Nam
+ * tincidunt congue enim, ut porta lorem lacinia consectetur. Donec ut
+ * libero sed arcu vehicula ultricies a non tortor. Lorem ipsum dolor sit
+ * amet, consectetur adipiscing elit. Aenean ut gravida lorem. Ut turpis
+ * felis, pulvinar a semper sed, adipiscing id dolor. */
+
+ // Very looooooooooooooooooooooooooooooooooooooooooooooooooooooooong comment that should be split
+
+ // println!("{:?}", rewrite_comment(subslice,
+ // false,
+ // comment_width,
+ // self.block_indent,
+ // self.config)
+ // .unwrap());
+
+ funk(); //dontchangeme
+ // or me
+
+ // #1388
+ const EXCEPTION_PATHS: &'static [&'static str] =
+ &[// std crates
+ "src/libstd/sys/", // Platform-specific code for std lives here.
+ "src/bootstrap"];
+}
+
+ /// test123
+fn doc_comment() {
+}
+
+fn chains() {
+ foo.bar(|| {
+ let x = 10;
+ /* comment */ x })
+}
+
+fn issue_1086() {
+ /**/
+}
+
+/*
+ * random comment */
+
+fn main() {/* Test */}
+
+// #1643
+fn some_fn() /* some comment */
+{
+}
+
+fn some_fn1()
+// some comment
+{
+}
+
+fn some_fn2() // some comment
+{
+}
+
+fn some_fn3() /* some comment some comment some comment some comment some comment some comment so */
+{
+}
+
+fn some_fn4()
+/* some comment some comment some comment some comment some comment some comment some comment */
+{
+}
+
+// #1603
+pub enum Foo {
+ A, // `/** **/`
+ B, // `/*!`
+ C,
+}
diff --git a/src/tools/rustfmt/tests/source/comment2.rs b/src/tools/rustfmt/tests/source/comment2.rs
new file mode 100644
index 000000000..d68bb5483
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/comment2.rs
@@ -0,0 +1,4 @@
+// rustfmt-wrap_comments: true
+
+/// This is a long line that angers rustfmt. Rustfmt shall deal with it swiftly and justly.
+pub mod foo {}
diff --git a/src/tools/rustfmt/tests/source/comment3.rs b/src/tools/rustfmt/tests/source/comment3.rs
new file mode 100644
index 000000000..f19a85863
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/comment3.rs
@@ -0,0 +1,5 @@
+// rustfmt-wrap_comments: true
+
+//! This is a long line that angers rustfmt. Rustfmt shall deal with it swiftly and justly.
+
+pub mod foo {}
diff --git a/src/tools/rustfmt/tests/source/comment4.rs b/src/tools/rustfmt/tests/source/comment4.rs
new file mode 100644
index 000000000..f53a8a4a1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/comment4.rs
@@ -0,0 +1,52 @@
+#![allow(dead_code)] // bar
+
+//! Doc comment
+fn test() {
+// comment
+ // comment2
+
+ code(); /* leave this comment alone!
+ * ok? */
+
+ /* Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec a
+ * diam lectus. Sed sit amet ipsum mauris. Maecenas congue ligula ac quam
+ * viverra nec consectetur ante hendrerit. Donec et mollis dolor.
+ * Praesent et diam eget libero egestas mattis sit amet vitae augue. Nam
+ * tincidunt congue enim, ut porta lorem lacinia consectetur. Donec ut
+ * libero sed arcu vehicula ultricies a non tortor. Lorem ipsum dolor sit
+ * amet, consectetur adipiscing elit. Aenean ut gravida lorem. Ut turpis
+ * felis, pulvinar a semper sed, adipiscing id dolor. */
+
+ // Very loooooooooooooooooooooooooooooooooooooooooooooooooooooooong comment that should be split
+
+ // println!("{:?}", rewrite_comment(subslice,
+ // false,
+ // comment_width,
+ // self.block_indent,
+ // self.config)
+ // .unwrap());
+
+ funk(); //dontchangeme
+ // or me
+}
+
+ /// test123
+fn doc_comment() {
+}
+
+/*
+Regression test for issue #956
+
+(some very important text)
+*/
+
+/*
+fn debug_function() {
+ println!("hello");
+}
+// */
+
+#[link_section=".vectors"]
+#[no_mangle] // Test this attribute is preserved.
+#[cfg_attr(rustfmt, rustfmt::skip)]
+pub static ISSUE_1284: [i32; 16] = [];
diff --git a/src/tools/rustfmt/tests/source/comment5.rs b/src/tools/rustfmt/tests/source/comment5.rs
new file mode 100644
index 000000000..2835d8b25
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/comment5.rs
@@ -0,0 +1,14 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+
+//@ special comment
+//@ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec adiam lectus. Sed sit amet ipsum mauris. Maecenas congue ligula ac quam
+//@
+//@foo
+fn test() {}
+
+//@@@ another special comment
+//@@@ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec adiam lectus. Sed sit amet ipsum mauris. Maecenas congue ligula ac quam
+//@@@
+//@@@foo
+fn bar() {}
diff --git a/src/tools/rustfmt/tests/source/comment6.rs b/src/tools/rustfmt/tests/source/comment6.rs
new file mode 100644
index 000000000..e5d72113c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/comment6.rs
@@ -0,0 +1,10 @@
+// rustfmt-wrap_comments: true
+
+// Pendant la nuit du 9 mars 1860, les nuages, se confondant avec la mer, limitaient à quelques brasses la portée de la vue.
+// Sur cette mer démontée, dont les lames déferlaient en projetant des lueurs livides, un léger bâtiment fuyait presque à sec de toile.
+
+pub mod foo {}
+
+// ゆく河の流れは絶えずして、しかももとの水にあらず。淀みに浮かぶうたかたは、かつ消えかつ結びて、久しくとどまりたるためしなし。世の中にある人とすみかと、またかくのごとし。
+
+pub mod bar {}
diff --git a/src/tools/rustfmt/tests/source/comment_crlf_newline.rs b/src/tools/rustfmt/tests/source/comment_crlf_newline.rs
new file mode 100644
index 000000000..7a65f762f
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/comment_crlf_newline.rs
@@ -0,0 +1,4 @@
+// rustfmt-normalize_comments: true
+/* Block comments followed by CRLF newlines should not an extra newline at the end */
+
+/* Something else */
diff --git a/src/tools/rustfmt/tests/source/comments-in-lists/wrap-comments-not-normalized.rs b/src/tools/rustfmt/tests/source/comments-in-lists/wrap-comments-not-normalized.rs
new file mode 100644
index 000000000..b96c02802
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/comments-in-lists/wrap-comments-not-normalized.rs
@@ -0,0 +1,129 @@
+// rustfmt-wrap_comments: true
+
+// https://github.com/rust-lang/rustfmt/issues/4909
+pub enum E {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ Variant1,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ Variant2,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+}
+
+pub enum E2 {
+ // This can be changed once https://github.com/rust-lang/rustfmt/issues/4854 is fixed
+// Expand as needed, numbers should be ascending according to the stage
+// through the inclusion pipeline, or according to the descriptions
+}
+
+pub enum E3 {
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ Variant1,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ Variant2,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+
+}
+
+pub struct S {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ some_field: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ last_field: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+}
+
+pub struct S2 {
+ // This can be changed once https://github.com/rust-lang/rustfmt/issues/4854 is fixed
+// Expand as needed, numbers should be ascending according to the stage
+// through the inclusion pipeline, or according to the descriptions
+}
+
+pub struct S3 {
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ some_field: usize,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ last_field: usize,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+}
+
+fn foo(
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ a: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ b: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+) -> usize {
+ 5
+}
+
+fn foo2(// Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+) -> usize {
+ 5
+}
+
+fn foo3(
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ a: usize,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ b: usize,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+
+) -> usize {
+ 5
+}
+
+fn main() {
+ let v = vec![
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ 1,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ 2,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ ];
+
+ let v2: Vec<i32> = vec![
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ ];
+
+ let v3 = vec![
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ 1,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ 2,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ ];
+
+ // https://github.com/rust-lang/rustfmt/issues/4430
+ match a {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ b => c,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ d => e,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ }
+
+ match a {
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ b => c,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ d => e,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/comments-in-lists/wrap-comments-true.rs b/src/tools/rustfmt/tests/source/comments-in-lists/wrap-comments-true.rs
new file mode 100644
index 000000000..360b83852
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/comments-in-lists/wrap-comments-true.rs
@@ -0,0 +1,130 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+
+// https://github.com/rust-lang/rustfmt/issues/4909
+pub enum E {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ Variant1,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ Variant2,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+}
+
+pub enum E2 {
+ // This can be changed once https://github.com/rust-lang/rustfmt/issues/4854 is fixed
+// Expand as needed, numbers should be ascending according to the stage
+// through the inclusion pipeline, or according to the descriptions
+}
+
+pub enum E3 {
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ Variant1,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ Variant2,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+
+}
+
+pub struct S {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ some_field: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ last_field: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+}
+
+pub struct S2 {
+ // This can be changed once https://github.com/rust-lang/rustfmt/issues/4854 is fixed
+// Expand as needed, numbers should be ascending according to the stage
+// through the inclusion pipeline, or according to the descriptions
+}
+
+pub struct S3 {
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ some_field: usize,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ last_field: usize,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+}
+
+fn foo(
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ a: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ b: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+) -> usize {
+ 5
+}
+
+fn foo2(// Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+) -> usize {
+ 5
+}
+
+fn foo3(
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ a: usize,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ b: usize,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+
+) -> usize {
+ 5
+}
+
+fn main() {
+ let v = vec![
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ 1,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ 2,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ ];
+
+ let v2: Vec<i32> = vec![
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ ];
+
+ let v3 = vec![
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ 1,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ 2,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ ];
+
+ // https://github.com/rust-lang/rustfmt/issues/4430
+ match a {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ b => c,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ d => e,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ }
+
+ match a {
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ b => c,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ d => e,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion pipeline, or according to the descriptions
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/configs/blank_lines_lower_bound/1.rs b/src/tools/rustfmt/tests/source/configs/blank_lines_lower_bound/1.rs
new file mode 100644
index 000000000..c6058a55b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/blank_lines_lower_bound/1.rs
@@ -0,0 +1,13 @@
+// rustfmt-blank_lines_lower_bound: 1
+
+fn foo() {}
+fn bar() {}
+// comment
+fn foobar() {}
+
+fn foo1() {}
+fn bar1() {}
+
+// comment
+
+fn foobar1() {}
diff --git a/src/tools/rustfmt/tests/source/configs/brace_style/fn_always_next_line.rs b/src/tools/rustfmt/tests/source/configs/brace_style/fn_always_next_line.rs
new file mode 100644
index 000000000..d3bd9ac09
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/brace_style/fn_always_next_line.rs
@@ -0,0 +1,14 @@
+// rustfmt-brace_style: AlwaysNextLine
+// Function brace style
+
+fn lorem() {
+ // body
+}
+
+fn lorem(ipsum: usize) {
+ // body
+}
+
+fn lorem<T>(ipsum: T) where T: Add + Sub + Mul + Div {
+ // body
+}
diff --git a/src/tools/rustfmt/tests/source/configs/brace_style/fn_prefer_same_line.rs b/src/tools/rustfmt/tests/source/configs/brace_style/fn_prefer_same_line.rs
new file mode 100644
index 000000000..78a449524
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/brace_style/fn_prefer_same_line.rs
@@ -0,0 +1,14 @@
+// rustfmt-brace_style: PreferSameLine
+// Function brace style
+
+fn lorem() {
+ // body
+}
+
+fn lorem(ipsum: usize) {
+ // body
+}
+
+fn lorem<T>(ipsum: T) where T: Add + Sub + Mul + Div {
+ // body
+}
diff --git a/src/tools/rustfmt/tests/source/configs/brace_style/fn_same_line_where.rs b/src/tools/rustfmt/tests/source/configs/brace_style/fn_same_line_where.rs
new file mode 100644
index 000000000..3b78932e1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/brace_style/fn_same_line_where.rs
@@ -0,0 +1,14 @@
+// rustfmt-brace_style: SameLineWhere
+// Function brace style
+
+fn lorem() {
+ // body
+}
+
+fn lorem(ipsum: usize) {
+ // body
+}
+
+fn lorem<T>(ipsum: T) where T: Add + Sub + Mul + Div {
+ // body
+}
diff --git a/src/tools/rustfmt/tests/source/configs/brace_style/item_always_next_line.rs b/src/tools/rustfmt/tests/source/configs/brace_style/item_always_next_line.rs
new file mode 100644
index 000000000..0cc19b34d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/brace_style/item_always_next_line.rs
@@ -0,0 +1,20 @@
+// rustfmt-brace_style: AlwaysNextLine
+// Item brace style
+
+enum Foo {}
+
+struct Bar {}
+
+struct Lorem {
+ ipsum: bool,
+}
+
+struct Dolor<T> where T: Eq {
+ sit: T,
+}
+
+#[cfg(test)]
+mod tests {
+ #[test]
+ fn it_works() {}
+}
diff --git a/src/tools/rustfmt/tests/source/configs/brace_style/item_prefer_same_line.rs b/src/tools/rustfmt/tests/source/configs/brace_style/item_prefer_same_line.rs
new file mode 100644
index 000000000..4412bc869
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/brace_style/item_prefer_same_line.rs
@@ -0,0 +1,16 @@
+// rustfmt-brace_style: PreferSameLine
+// Item brace style
+
+struct Lorem {
+ ipsum: bool,
+}
+
+struct Dolor<T> where T: Eq {
+ sit: T,
+}
+
+#[cfg(test)]
+mod tests {
+ #[test]
+ fn it_works() {}
+}
diff --git a/src/tools/rustfmt/tests/source/configs/brace_style/item_same_line_where.rs b/src/tools/rustfmt/tests/source/configs/brace_style/item_same_line_where.rs
new file mode 100644
index 000000000..b8e69147d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/brace_style/item_same_line_where.rs
@@ -0,0 +1,16 @@
+// rustfmt-brace_style: SameLineWhere
+// Item brace style
+
+struct Lorem {
+ ipsum: bool,
+}
+
+struct Dolor<T> where T: Eq {
+ sit: T,
+}
+
+#[cfg(test)]
+mod tests {
+ #[test]
+ fn it_works() {}
+}
diff --git a/src/tools/rustfmt/tests/source/configs/chain_width/always.rs b/src/tools/rustfmt/tests/source/configs/chain_width/always.rs
new file mode 100644
index 000000000..2d16d66ae
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/chain_width/always.rs
@@ -0,0 +1,23 @@
+// rustfmt-chain_width: 1
+// setting an unachievable chain_width to always get chains
+// on separate lines
+
+struct Fluent {}
+
+impl Fluent {
+ fn blorp(&self) -> &Self {
+ self
+ }
+}
+
+fn main() {
+ let test = Fluent {};
+
+ // should be left alone
+ test.blorp();
+
+ // should be wrapped
+ test.blorp().blorp();
+ test.blorp().blorp().blorp();
+ test.blorp().blorp().blorp().blorp();
+}
diff --git a/src/tools/rustfmt/tests/source/configs/chain_width/small.rs b/src/tools/rustfmt/tests/source/configs/chain_width/small.rs
new file mode 100644
index 000000000..26f935453
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/chain_width/small.rs
@@ -0,0 +1,23 @@
+// rustfmt-chain_width: 40
+
+struct Fluent {}
+
+impl Fluent {
+ fn blorp(&self) -> &Self {
+ self
+ }
+}
+
+fn main() {
+ let test = Fluent {};
+
+ // should not be wrapped
+ test.blorp();
+ test.blorp().blorp();
+ test.blorp().blorp().blorp();
+ test.blorp().blorp().blorp().blorp();
+
+ // should be wrapped
+ test.blorp().blorp().blorp().blorp().blorp();
+ test.blorp().blorp().blorp().blorp().blorp().blorp();
+}
diff --git a/src/tools/rustfmt/tests/source/configs/chain_width/tiny.rs b/src/tools/rustfmt/tests/source/configs/chain_width/tiny.rs
new file mode 100644
index 000000000..fffc81dd5
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/chain_width/tiny.rs
@@ -0,0 +1,21 @@
+// rustfmt-chain_width: 20
+
+struct Fluent {}
+
+impl Fluent {
+ fn blorp(&self) -> &Self {
+ self
+ }
+}
+
+fn main() {
+ let test = Fluent {};
+
+ // should not be wrapped
+ test.blorp();
+ test.blorp().blorp();
+
+ // should be wrapped
+ test.blorp().blorp().blorp();
+ test.blorp().blorp().blorp().blorp();
+}
diff --git a/src/tools/rustfmt/tests/source/configs/comment_width/above.rs b/src/tools/rustfmt/tests/source/configs/comment_width/above.rs
new file mode 100644
index 000000000..36187ce0a
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/comment_width/above.rs
@@ -0,0 +1,7 @@
+// rustfmt-comment_width: 40
+// rustfmt-wrap_comments: true
+// Comment width
+
+fn main() {
+ // Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+}
diff --git a/src/tools/rustfmt/tests/source/configs/comment_width/below.rs b/src/tools/rustfmt/tests/source/configs/comment_width/below.rs
new file mode 100644
index 000000000..abbc5930c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/comment_width/below.rs
@@ -0,0 +1,7 @@
+// rustfmt-comment_width: 80
+// rustfmt-wrap_comments: true
+// Comment width
+
+fn main() {
+ // Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+}
diff --git a/src/tools/rustfmt/tests/source/configs/comment_width/ignore.rs b/src/tools/rustfmt/tests/source/configs/comment_width/ignore.rs
new file mode 100644
index 000000000..c86e71c28
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/comment_width/ignore.rs
@@ -0,0 +1,7 @@
+// rustfmt-comment_width: 40
+// rustfmt-wrap_comments: false
+// Comment width
+
+fn main() {
+ // Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+}
diff --git a/src/tools/rustfmt/tests/source/configs/condense_wildcard_suffixes/false.rs b/src/tools/rustfmt/tests/source/configs/condense_wildcard_suffixes/false.rs
new file mode 100644
index 000000000..3b967f35a
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/condense_wildcard_suffixes/false.rs
@@ -0,0 +1,6 @@
+// rustfmt-condense_wildcard_suffixes: false
+// Condense wildcard suffixes
+
+fn main() {
+ let (lorem, ipsum, _, _) = (1, 2, 3, 4);
+}
diff --git a/src/tools/rustfmt/tests/source/configs/condense_wildcard_suffixes/true.rs b/src/tools/rustfmt/tests/source/configs/condense_wildcard_suffixes/true.rs
new file mode 100644
index 000000000..3798a6b99
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/condense_wildcard_suffixes/true.rs
@@ -0,0 +1,6 @@
+// rustfmt-condense_wildcard_suffixes: true
+// Condense wildcard suffixes
+
+fn main() {
+ let (lorem, ipsum, _, _) = (1, 2, 3, 4);
+}
diff --git a/src/tools/rustfmt/tests/source/configs/control_brace_style/always_next_line.rs b/src/tools/rustfmt/tests/source/configs/control_brace_style/always_next_line.rs
new file mode 100644
index 000000000..c4ddad9ce
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/control_brace_style/always_next_line.rs
@@ -0,0 +1,10 @@
+// rustfmt-control_brace_style: AlwaysNextLine
+// Control brace style
+
+fn main() {
+ if lorem { println!("ipsum!"); } else { println!("dolor!"); }
+ match magi {
+ Homura => "Akemi",
+ Madoka => "Kaname",
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/configs/control_brace_style/always_same_line.rs b/src/tools/rustfmt/tests/source/configs/control_brace_style/always_same_line.rs
new file mode 100644
index 000000000..a9c699d27
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/control_brace_style/always_same_line.rs
@@ -0,0 +1,10 @@
+// rustfmt-control_brace_style: AlwaysSameLine
+// Control brace style
+
+fn main() {
+ if lorem { println!("ipsum!"); } else { println!("dolor!"); }
+ match magi {
+ Homura => "Akemi",
+ Madoka => "Kaname",
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/configs/control_brace_style/closing_next_line.rs b/src/tools/rustfmt/tests/source/configs/control_brace_style/closing_next_line.rs
new file mode 100644
index 000000000..1a74a28f2
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/control_brace_style/closing_next_line.rs
@@ -0,0 +1,10 @@
+// rustfmt-control_brace_style: ClosingNextLine
+// Control brace style
+
+fn main() {
+ if lorem { println!("ipsum!"); } else { println!("dolor!"); }
+ match magi {
+ Homura => "Akemi",
+ Madoka => "Kaname",
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/configs/disable_all_formatting/false.rs b/src/tools/rustfmt/tests/source/configs/disable_all_formatting/false.rs
new file mode 100644
index 000000000..834ca7a3c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/disable_all_formatting/false.rs
@@ -0,0 +1,6 @@
+// rustfmt-disable_all_formatting: false
+// Disable all formatting
+
+fn main() {
+ if lorem{println!("ipsum!");}else{println!("dolor!");}
+}
diff --git a/src/tools/rustfmt/tests/source/configs/disable_all_formatting/true.rs b/src/tools/rustfmt/tests/source/configs/disable_all_formatting/true.rs
new file mode 100644
index 000000000..56955bf38
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/disable_all_formatting/true.rs
@@ -0,0 +1,6 @@
+// rustfmt-disable_all_formatting: true
+// Disable all formatting
+
+fn main() {
+ iflorem{println!("ipsum!");}else{println!("dolor!");}
+}
diff --git a/src/tools/rustfmt/tests/source/configs/doc_comment_code_block_width/100.rs b/src/tools/rustfmt/tests/source/configs/doc_comment_code_block_width/100.rs
new file mode 100644
index 000000000..515780761
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/doc_comment_code_block_width/100.rs
@@ -0,0 +1,16 @@
+// rustfmt-format_code_in_doc_comments: true
+// rustfmt-doc_comment_code_block_width: 100
+
+/// ```rust
+/// impl Test {
+/// pub const fn from_bytes(v: &[u8]) -> Result<Self, ParserError> {
+/// Self::from_bytes_manual_slice(v, 0, v.len() )
+/// }
+/// }
+/// ```
+
+impl Test {
+ pub const fn from_bytes(v: &[u8]) -> Result<Self, ParserError> {
+ Self::from_bytes_manual_slice(v, 0, v.len() )
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/configs/doc_comment_code_block_width/100_greater_max_width.rs b/src/tools/rustfmt/tests/source/configs/doc_comment_code_block_width/100_greater_max_width.rs
new file mode 100644
index 000000000..96505c697
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/doc_comment_code_block_width/100_greater_max_width.rs
@@ -0,0 +1,17 @@
+// rustfmt-max_width: 50
+// rustfmt-format_code_in_doc_comments: true
+// rustfmt-doc_comment_code_block_width: 100
+
+/// ```rust
+/// impl Test {
+/// pub const fn from_bytes(v: &[u8]) -> Result<Self, ParserError> {
+/// Self::from_bytes_manual_slice(v, 0, v.len() )
+/// }
+/// }
+/// ```
+
+impl Test {
+ pub const fn from_bytes(v: &[u8]) -> Result<Self, ParserError> {
+ Self::from_bytes_manual_slice(v, 0, v.len() )
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/configs/doc_comment_code_block_width/50.rs b/src/tools/rustfmt/tests/source/configs/doc_comment_code_block_width/50.rs
new file mode 100644
index 000000000..2c6307951
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/doc_comment_code_block_width/50.rs
@@ -0,0 +1,16 @@
+// rustfmt-format_code_in_doc_comments: true
+// rustfmt-doc_comment_code_block_width: 50
+
+/// ```rust
+/// impl Test {
+/// pub const fn from_bytes(v: &[u8]) -> Result<Self, ParserError> {
+/// Self::from_bytes_manual_slice(v, 0, v.len() )
+/// }
+/// }
+/// ```
+
+impl Test {
+ pub const fn from_bytes(v: &[u8]) -> Result<Self, ParserError> {
+ Self::from_bytes_manual_slice(v, 0, v.len() )
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/configs/empty_item_single_line/false.rs b/src/tools/rustfmt/tests/source/configs/empty_item_single_line/false.rs
new file mode 100644
index 000000000..9bfb2b964
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/empty_item_single_line/false.rs
@@ -0,0 +1,16 @@
+// rustfmt-empty_item_single_line: false
+// Empty impl on single line
+
+impl Lorem {
+
+}
+
+impl Ipsum {
+
+}
+
+fn lorem() {
+}
+
+fn lorem() {
+}
diff --git a/src/tools/rustfmt/tests/source/configs/empty_item_single_line/true.rs b/src/tools/rustfmt/tests/source/configs/empty_item_single_line/true.rs
new file mode 100644
index 000000000..8af8b88ff
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/empty_item_single_line/true.rs
@@ -0,0 +1,16 @@
+// rustfmt-empty_item_single_line: true
+// Empty impl on single line
+
+impl Lorem {
+
+}
+
+impl Ipsum {
+
+}
+
+fn lorem() {
+}
+
+fn lorem() {
+}
diff --git a/src/tools/rustfmt/tests/source/configs/enum_discrim_align_threshold/40.rs b/src/tools/rustfmt/tests/source/configs/enum_discrim_align_threshold/40.rs
new file mode 100644
index 000000000..796e47c38
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/enum_discrim_align_threshold/40.rs
@@ -0,0 +1,34 @@
+// rustfmt-enum_discrim_align_threshold: 40
+
+enum Standard {
+ A = 1,
+ Bcdef = 2,
+}
+
+enum NoDiscrims {
+ ThisIsAFairlyLongEnumVariantWithoutDiscrimLongerThan40,
+ A = 1,
+ ThisIsAnotherFairlyLongEnumVariantWithoutDiscrimLongerThan40,
+ Bcdef = 2,
+}
+
+enum TooLong {
+ ThisOneHasDiscrimAaaaaaaaaaaaaaaaaaaaaaChar40 = 10,
+ A = 1,
+ Bcdef = 2,
+}
+
+enum Borderline {
+ ThisOneHasDiscrimAaaaaaaaaaaaaaaaaaaaaa = 10,
+ A = 1,
+ Bcdef = 2,
+}
+
+// Live specimen from #1686
+enum LongWithSmallDiff {
+ SceneColorimetryEstimates = 0x73636F65,
+ SceneAppearanceEstimates = 0x73617065,
+ FocalPlaneColorimetryEstimates = 0x66706365,
+ ReflectionHardcopyOriginalColorimetry = 0x72686F63,
+ ReflectionPrintOutputColorimetry = 0x72706F63,
+} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/configs/error_on_line_overflow/false.rs b/src/tools/rustfmt/tests/source/configs/error_on_line_overflow/false.rs
new file mode 100644
index 000000000..fa70ae783
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/error_on_line_overflow/false.rs
@@ -0,0 +1,6 @@
+// rustfmt-error_on_line_overflow: false
+// Error on line overflow
+
+fn main() {
+ let lorem_ipsum_dolor_sit_amet_consectetur_adipiscing_elit_lorem_ipsum_dolor_sit_amet_consectetur_adipiscing_elit;
+}
diff --git a/src/tools/rustfmt/tests/source/configs/fn_args_layout/compressed.rs b/src/tools/rustfmt/tests/source/configs/fn_args_layout/compressed.rs
new file mode 100644
index 000000000..66a371c25
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/fn_args_layout/compressed.rs
@@ -0,0 +1,16 @@
+// rustfmt-fn_args_layout: Compressed
+// Function arguments density
+
+trait Lorem {
+ fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet);
+
+ fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet) {
+ // body
+ }
+
+ fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet, consectetur: onsectetur, adipiscing: Adipiscing, elit: Elit);
+
+ fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet, consectetur: onsectetur, adipiscing: Adipiscing, elit: Elit) {
+ // body
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/configs/fn_args_layout/tall.rs b/src/tools/rustfmt/tests/source/configs/fn_args_layout/tall.rs
new file mode 100644
index 000000000..f11e86fd3
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/fn_args_layout/tall.rs
@@ -0,0 +1,16 @@
+// rustfmt-fn_args_layout: Tall
+// Function arguments density
+
+trait Lorem {
+ fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet);
+
+ fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet) {
+ // body
+ }
+
+ fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet, consectetur: onsectetur, adipiscing: Adipiscing, elit: Elit);
+
+ fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet, consectetur: onsectetur, adipiscing: Adipiscing, elit: Elit) {
+ // body
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/configs/fn_args_layout/vertical.rs b/src/tools/rustfmt/tests/source/configs/fn_args_layout/vertical.rs
new file mode 100644
index 000000000..a23cc0252
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/fn_args_layout/vertical.rs
@@ -0,0 +1,16 @@
+// rustfmt-fn_args_layout: Vertical
+// Function arguments density
+
+trait Lorem {
+ fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet);
+
+ fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet) {
+ // body
+ }
+
+ fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet, consectetur: onsectetur, adipiscing: Adipiscing, elit: Elit);
+
+ fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet, consectetur: onsectetur, adipiscing: Adipiscing, elit: Elit) {
+ // body
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/configs/fn_single_line/false.rs b/src/tools/rustfmt/tests/source/configs/fn_single_line/false.rs
new file mode 100644
index 000000000..3d092f0c0
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/fn_single_line/false.rs
@@ -0,0 +1,11 @@
+// rustfmt-fn_single_line: false
+// Single-expression function on single line
+
+fn lorem() -> usize {
+ 42
+}
+
+fn lorem() -> usize {
+ let ipsum = 42;
+ ipsum
+}
diff --git a/src/tools/rustfmt/tests/source/configs/fn_single_line/true.rs b/src/tools/rustfmt/tests/source/configs/fn_single_line/true.rs
new file mode 100644
index 000000000..3cb0fdedf
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/fn_single_line/true.rs
@@ -0,0 +1,11 @@
+// rustfmt-fn_single_line: true
+// Single-expression function on single line
+
+fn lorem() -> usize {
+ 42
+}
+
+fn lorem() -> usize {
+ let ipsum = 42;
+ ipsum
+}
diff --git a/src/tools/rustfmt/tests/source/configs/force_explicit_abi/false.rs b/src/tools/rustfmt/tests/source/configs/force_explicit_abi/false.rs
new file mode 100644
index 000000000..3c48f8e0c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/force_explicit_abi/false.rs
@@ -0,0 +1,6 @@
+// rustfmt-force_explicit_abi: false
+// Force explicit abi
+
+extern {
+ pub static lorem: c_int;
+}
diff --git a/src/tools/rustfmt/tests/source/configs/force_explicit_abi/true.rs b/src/tools/rustfmt/tests/source/configs/force_explicit_abi/true.rs
new file mode 100644
index 000000000..e5ff6cf7d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/force_explicit_abi/true.rs
@@ -0,0 +1,6 @@
+// rustfmt-force_explicit_abi: true
+// Force explicit abi
+
+extern {
+ pub static lorem: c_int;
+}
diff --git a/src/tools/rustfmt/tests/source/configs/force_multiline_block/false.rs b/src/tools/rustfmt/tests/source/configs/force_multiline_block/false.rs
new file mode 100644
index 000000000..b97e348e5
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/force_multiline_block/false.rs
@@ -0,0 +1,22 @@
+// rustfmt-force_multiline_blocks: false
+// Option forces multiline match arm and closure bodies to be wrapped in a block
+
+fn main() {
+ match lorem {
+ Lorem::Ipsum => {
+ if ipsum {
+ println!("dolor");
+ }
+ }
+ Lorem::Dolor => println!("amet"),
+ }
+}
+
+fn main() {
+ result.and_then(|maybe_value| {
+ match maybe_value {
+ None => Err("oops"),
+ Some(value) => Ok(1),
+ }
+ });
+}
diff --git a/src/tools/rustfmt/tests/source/configs/force_multiline_block/true.rs b/src/tools/rustfmt/tests/source/configs/force_multiline_block/true.rs
new file mode 100644
index 000000000..db9d3de46
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/force_multiline_block/true.rs
@@ -0,0 +1,18 @@
+// rustfmt-force_multiline_blocks: true
+// Option forces multiline match arm and closure bodies to be wrapped in a block
+
+fn main() {
+ match lorem {
+ Lorem::Ipsum => if ipsum {
+ println!("dolor");
+ },
+ Lorem::Dolor => println!("amet"),
+ }
+}
+
+fn main() {
+ result.and_then(|maybe_value| match maybe_value {
+ None => Err("oops"),
+ Some(value) => Ok(1),
+ });
+}
diff --git a/src/tools/rustfmt/tests/source/configs/format_generated_files/false.rs b/src/tools/rustfmt/tests/source/configs/format_generated_files/false.rs
new file mode 100644
index 000000000..dec1e00d1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/format_generated_files/false.rs
@@ -0,0 +1,8 @@
+// @generated
+// rustfmt-format_generated_files: false
+
+fn main()
+{
+ println!("hello, world")
+ ;
+}
diff --git a/src/tools/rustfmt/tests/source/configs/format_generated_files/true.rs b/src/tools/rustfmt/tests/source/configs/format_generated_files/true.rs
new file mode 100644
index 000000000..a25ddc25a
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/format_generated_files/true.rs
@@ -0,0 +1,8 @@
+// @generated
+// rustfmt-format_generated_files: true
+
+fn main()
+{
+ println!("hello, world")
+ ;
+}
diff --git a/src/tools/rustfmt/tests/source/configs/format_macro_bodies/false.rs b/src/tools/rustfmt/tests/source/configs/format_macro_bodies/false.rs
new file mode 100644
index 000000000..d618a1ac3
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/format_macro_bodies/false.rs
@@ -0,0 +1,7 @@
+// rustfmt-format_macro_bodies: false
+
+macro_rules! foo {
+ ($a: ident : $b: ty) => { $a(42): $b; };
+ ($a: ident $b: ident $c: ident) => { $a=$b+$c; };
+}
+
diff --git a/src/tools/rustfmt/tests/source/configs/format_macro_bodies/true.rs b/src/tools/rustfmt/tests/source/configs/format_macro_bodies/true.rs
new file mode 100644
index 000000000..b254b82d7
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/format_macro_bodies/true.rs
@@ -0,0 +1,7 @@
+// rustfmt-format_macro_bodies: true
+
+macro_rules! foo {
+ ($a: ident : $b: ty) => { $a(42): $b; };
+ ($a: ident $b: ident $c: ident) => { $a=$b+$c; };
+}
+
diff --git a/src/tools/rustfmt/tests/source/configs/format_macro_matchers/false.rs b/src/tools/rustfmt/tests/source/configs/format_macro_matchers/false.rs
new file mode 100644
index 000000000..a721bb55c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/format_macro_matchers/false.rs
@@ -0,0 +1,7 @@
+// rustfmt-format_macro_matchers: false
+
+macro_rules! foo {
+ ($a: ident : $b: ty) => { $a(42): $b; };
+ ($a: ident $b: ident $c: ident) => { $a=$b+$c; };
+}
+
diff --git a/src/tools/rustfmt/tests/source/configs/format_macro_matchers/true.rs b/src/tools/rustfmt/tests/source/configs/format_macro_matchers/true.rs
new file mode 100644
index 000000000..fa0442e22
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/format_macro_matchers/true.rs
@@ -0,0 +1,6 @@
+// rustfmt-format_macro_matchers: true
+
+macro_rules! foo {
+ ($a: ident : $b: ty) => { $a(42): $b; };
+ ($a: ident $b: ident $c: ident) => { $a=$b+$c; };
+}
diff --git a/src/tools/rustfmt/tests/source/configs/format_strings/false.rs b/src/tools/rustfmt/tests/source/configs/format_strings/false.rs
new file mode 100644
index 000000000..ecca0d7d1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/format_strings/false.rs
@@ -0,0 +1,8 @@
+// rustfmt-format_strings: false
+// rustfmt-max_width: 50
+// rustfmt-error_on_line_overflow: false
+// Force format strings
+
+fn main() {
+ let lorem = "ipsum dolor sit amet consectetur adipiscing elit lorem ipsum dolor sit";
+}
diff --git a/src/tools/rustfmt/tests/source/configs/format_strings/true.rs b/src/tools/rustfmt/tests/source/configs/format_strings/true.rs
new file mode 100644
index 000000000..337314478
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/format_strings/true.rs
@@ -0,0 +1,7 @@
+// rustfmt-format_strings: true
+// rustfmt-max_width: 50
+// Force format strings
+
+fn main() {
+ let lorem = "ipsum dolor sit amet consectetur adipiscing elit lorem ipsum dolor sit";
+}
diff --git a/src/tools/rustfmt/tests/source/configs/group_imports/One-merge_imports.rs b/src/tools/rustfmt/tests/source/configs/group_imports/One-merge_imports.rs
new file mode 100644
index 000000000..157d38579
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/group_imports/One-merge_imports.rs
@@ -0,0 +1,17 @@
+// rustfmt-group_imports: One
+// rustfmt-imports_granularity: Crate
+use chrono::Utc;
+use super::update::convert_publish_payload;
+
+use juniper::{FieldError, FieldResult};
+use uuid::Uuid;
+use alloc::alloc::Layout;
+
+use std::sync::Arc;
+use alloc::vec::Vec;
+
+use broker::database::PooledConnection;
+
+use super::schema::{Context, Payload};
+use core::f32;
+use crate::models::Event;
diff --git a/src/tools/rustfmt/tests/source/configs/group_imports/One-nested.rs b/src/tools/rustfmt/tests/source/configs/group_imports/One-nested.rs
new file mode 100644
index 000000000..109bd07e1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/group_imports/One-nested.rs
@@ -0,0 +1,7 @@
+// rustfmt-group_imports: One
+mod test {
+ use crate::foo::bar;
+
+ use std::path;
+ use crate::foo::bar2;
+}
diff --git a/src/tools/rustfmt/tests/source/configs/group_imports/One-no_reorder.rs b/src/tools/rustfmt/tests/source/configs/group_imports/One-no_reorder.rs
new file mode 100644
index 000000000..f82f62c7f
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/group_imports/One-no_reorder.rs
@@ -0,0 +1,16 @@
+// rustfmt-group_imports: One
+// rustfmt-reorder_imports: false
+use chrono::Utc;
+use super::update::convert_publish_payload;
+
+use juniper::{FieldError, FieldResult};
+use uuid::Uuid;
+use alloc::alloc::Layout;
+
+use std::sync::Arc;
+
+use broker::database::PooledConnection;
+
+use super::schema::{Context, Payload};
+use core::f32;
+use crate::models::Event;
diff --git a/src/tools/rustfmt/tests/source/configs/group_imports/One.rs b/src/tools/rustfmt/tests/source/configs/group_imports/One.rs
new file mode 100644
index 000000000..5ab7a9508
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/group_imports/One.rs
@@ -0,0 +1,15 @@
+// rustfmt-group_imports: One
+use chrono::Utc;
+use super::update::convert_publish_payload;
+
+use juniper::{FieldError, FieldResult};
+use uuid::Uuid;
+use alloc::alloc::Layout;
+
+use std::sync::Arc;
+
+use broker::database::PooledConnection;
+
+use super::schema::{Context, Payload};
+use core::f32;
+use crate::models::Event;
diff --git a/src/tools/rustfmt/tests/source/configs/group_imports/StdExternalCrate-merge_imports.rs b/src/tools/rustfmt/tests/source/configs/group_imports/StdExternalCrate-merge_imports.rs
new file mode 100644
index 000000000..ea7f6280a
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/group_imports/StdExternalCrate-merge_imports.rs
@@ -0,0 +1,17 @@
+// rustfmt-group_imports: StdExternalCrate
+// rustfmt-imports_granularity: Crate
+use chrono::Utc;
+use super::update::convert_publish_payload;
+
+use juniper::{FieldError, FieldResult};
+use uuid::Uuid;
+use alloc::alloc::Layout;
+
+use std::sync::Arc;
+use alloc::vec::Vec;
+
+use broker::database::PooledConnection;
+
+use super::schema::{Context, Payload};
+use core::f32;
+use crate::models::Event;
diff --git a/src/tools/rustfmt/tests/source/configs/group_imports/StdExternalCrate-nested.rs b/src/tools/rustfmt/tests/source/configs/group_imports/StdExternalCrate-nested.rs
new file mode 100644
index 000000000..08f4e07b7
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/group_imports/StdExternalCrate-nested.rs
@@ -0,0 +1,6 @@
+// rustfmt-group_imports: StdExternalCrate
+mod test {
+ use crate::foo::bar;
+ use std::path;
+ use crate::foo::bar2;
+}
diff --git a/src/tools/rustfmt/tests/source/configs/group_imports/StdExternalCrate-no_reorder.rs b/src/tools/rustfmt/tests/source/configs/group_imports/StdExternalCrate-no_reorder.rs
new file mode 100644
index 000000000..08c9a72ae
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/group_imports/StdExternalCrate-no_reorder.rs
@@ -0,0 +1,17 @@
+// rustfmt-group_imports: StdExternalCrate
+// rustfmt-reorder_imports: false
+
+use chrono::Utc;
+use super::update::convert_publish_payload;
+
+use juniper::{FieldError, FieldResult};
+use uuid::Uuid;
+use alloc::alloc::Layout;
+
+use std::sync::Arc;
+
+use broker::database::PooledConnection;
+
+use super::schema::{Context, Payload};
+use core::f32;
+use crate::models::Event;
diff --git a/src/tools/rustfmt/tests/source/configs/group_imports/StdExternalCrate-non_consecutive.rs b/src/tools/rustfmt/tests/source/configs/group_imports/StdExternalCrate-non_consecutive.rs
new file mode 100644
index 000000000..f239a0efa
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/group_imports/StdExternalCrate-non_consecutive.rs
@@ -0,0 +1,27 @@
+// rustfmt-group_imports: StdExternalCrate
+use chrono::Utc;
+use super::update::convert_publish_payload;
+
+
+
+
+
+use juniper::{FieldError, FieldResult};
+
+use uuid::Uuid;
+use alloc::alloc::Layout;
+
+extern crate uuid;
+
+
+
+
+
+use std::sync::Arc;
+
+
+use broker::database::PooledConnection;
+
+use super::schema::{Context, Payload};
+use core::f32;
+use crate::models::Event;
diff --git a/src/tools/rustfmt/tests/source/configs/group_imports/StdExternalCrate.rs b/src/tools/rustfmt/tests/source/configs/group_imports/StdExternalCrate.rs
new file mode 100644
index 000000000..d49c8941e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/group_imports/StdExternalCrate.rs
@@ -0,0 +1,15 @@
+// rustfmt-group_imports: StdExternalCrate
+use chrono::Utc;
+use super::update::convert_publish_payload;
+
+use juniper::{FieldError, FieldResult};
+use uuid::Uuid;
+use alloc::alloc::Layout;
+
+use std::sync::Arc;
+
+use broker::database::PooledConnection;
+
+use super::schema::{Context, Payload};
+use core::f32;
+use crate::models::Event;
diff --git a/src/tools/rustfmt/tests/source/configs/hard_tabs/false.rs b/src/tools/rustfmt/tests/source/configs/hard_tabs/false.rs
new file mode 100644
index 000000000..bf92162b4
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/hard_tabs/false.rs
@@ -0,0 +1,6 @@
+// rustfmt-hard_tabs: false
+// Hard tabs
+
+fn lorem() -> usize {
+42 // spaces before 42
+}
diff --git a/src/tools/rustfmt/tests/source/configs/hard_tabs/true.rs b/src/tools/rustfmt/tests/source/configs/hard_tabs/true.rs
new file mode 100644
index 000000000..738922a4d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/hard_tabs/true.rs
@@ -0,0 +1,6 @@
+// rustfmt-hard_tabs: true
+// Hard tabs
+
+fn lorem() -> usize {
+42 // spaces before 42
+}
diff --git a/src/tools/rustfmt/tests/source/configs/imports_layout/merge_mixed.rs b/src/tools/rustfmt/tests/source/configs/imports_layout/merge_mixed.rs
new file mode 100644
index 000000000..477c4aa16
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/imports_layout/merge_mixed.rs
@@ -0,0 +1,6 @@
+// rustfmt-imports_indent: Block
+// rustfmt-imports_granularity: Crate
+// rustfmt-imports_layout: Mixed
+
+use std::{fmt, io, str};
+use std::str::FromStr;
diff --git a/src/tools/rustfmt/tests/source/configs/indent_style/block_args.rs b/src/tools/rustfmt/tests/source/configs/indent_style/block_args.rs
new file mode 100644
index 000000000..4d2d280a1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/indent_style/block_args.rs
@@ -0,0 +1,26 @@
+// rustfmt-indent_style: Block
+// Function arguments layout
+
+fn lorem() {}
+
+fn lorem(ipsum: usize) {}
+
+fn lorem(ipsum: usize, dolor: usize, sit: usize, amet: usize, consectetur: usize, adipiscing: usize, elit: usize) {
+ // body
+}
+
+// #1441
+extern "system" {
+ pub fn GetConsoleHistoryInfo(console_history_info: *mut ConsoleHistoryInfo) -> Boooooooooooooool;
+}
+
+// rustfmt should not add trailing comma for variadic function. See #1623.
+extern "C" {
+ pub fn variadic_fn(first_parameter: FirstParameterType,
+ second_parameter: SecondParameterType,
+ ...);
+}
+
+// #1652
+fn deconstruct(foo: Bar) -> (SocketAddr, Header, Method, RequestUri, HttpVersion, AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA) {
+}
diff --git a/src/tools/rustfmt/tests/source/configs/indent_style/block_array.rs b/src/tools/rustfmt/tests/source/configs/indent_style/block_array.rs
new file mode 100644
index 000000000..8404f65f4
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/indent_style/block_array.rs
@@ -0,0 +1,6 @@
+// rustfmt-indent_style: Block
+// Array layout
+
+fn main() {
+ let lorem = vec!["ipsum","dolor","sit","amet","consectetur","adipiscing","elit"];
+}
diff --git a/src/tools/rustfmt/tests/source/configs/indent_style/block_call.rs b/src/tools/rustfmt/tests/source/configs/indent_style/block_call.rs
new file mode 100644
index 000000000..c82b6b8e3
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/indent_style/block_call.rs
@@ -0,0 +1,133 @@
+// rustfmt-indent_style: Block
+// Function call style
+
+fn main() {
+ lorem("lorem", "ipsum", "dolor", "sit", "amet", "consectetur", "adipiscing", "elit");
+ // #1501
+ let hyper = Arc::new(Client::with_connector(HttpsConnector::new(TlsClient::new())));
+
+ // chain
+ let x = yooooooooooooo.fooooooooooooooo.baaaaaaaaaaaaar(hello, world);
+
+ // #1380
+ {
+ {
+ let creds = self.client
+ .client_credentials(&self.config.auth.oauth2.id, &self.config.auth.oauth2.secret)?;
+ }
+ }
+
+ // nesting macro and function call
+ try!(foo(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx, xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx));
+ try!(foo(try!(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx, xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx)));
+}
+
+// #1521
+impl Foo {
+ fn map_pixel_to_coords(&self, point: &Vector2i, view: &View) -> Vector2f {
+ unsafe {
+ Vector2f::from_raw(ffi::sfRenderTexture_mapPixelToCoords(self.render_texture, point.raw(), view.raw()))
+ }
+ }
+}
+
+fn issue1420() {
+ given(
+ r#"
+ # Getting started
+ ...
+ "#,
+ )
+ .running(waltz)
+}
+
+// #1563
+fn query(conn: &Connection) -> Result<()> {
+ conn.query_row(
+ r#"
+ SELECT title, date
+ FROM posts,
+ WHERE DATE(date) = $1
+ "#,
+ &[],
+ |row| {
+ Post {
+ title: row.get(0),
+ date: row.get(1),
+ }
+ },
+ )?;
+
+ Ok(())
+}
+
+// #1449
+fn future_rayon_wait_1_thread() {
+ // run with only 1 worker thread; this would deadlock if we couldn't make progress
+ let mut result = None;
+ ThreadPool::new(Configuration::new().num_threads(1))
+ .unwrap()
+ .install(
+ || {
+ scope(
+ |s| {
+ use std::sync::mpsc::channel;
+ let (tx, rx) = channel();
+ let a = s.spawn_future(lazy(move || Ok::<usize, ()>(rx.recv().unwrap())));
+ // ^^^^ FIXME: why is this needed?
+ let b = s.spawn_future(a.map(|v| v + 1));
+ let c = s.spawn_future(b.map(|v| v + 1));
+ s.spawn(move |_| tx.send(20).unwrap());
+ result = Some(c.rayon_wait().unwrap());
+ },
+ );
+ },
+ );
+ assert_eq!(result, Some(22));
+}
+
+// #1494
+impl Cursor {
+ fn foo() {
+ self.cur_type()
+ .num_template_args()
+ .or_else(|| {
+ let n: c_int = unsafe { clang_Cursor_getNumTemplateArguments(self.x) };
+
+ if n >= 0 {
+ Some(n as u32)
+ } else {
+ debug_assert_eq!(n, -1);
+ None
+ }
+ })
+ .or_else(|| {
+ let canonical = self.canonical();
+ if canonical != *self {
+ canonical.num_template_args()
+ } else {
+ None
+ }
+ });
+ }
+}
+
+fn issue1581() {
+ bootstrap.checks.register(
+ "PERSISTED_LOCATIONS",
+ move || if locations2.0.inner_mut.lock().poisoned {
+ Check::new(
+ State::Error,
+ "Persisted location storage is poisoned due to a write failure",
+ )
+ } else {
+ Check::new(State::Healthy, "Persisted location storage is healthy")
+ },
+ );
+}
+
+fn issue1651() {
+ {
+ let type_list: Vec<_> = try_opt!(types.iter().map(|ty| ty.rewrite(context, shape)).collect());
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/configs/indent_style/block_chain.rs b/src/tools/rustfmt/tests/source/configs/indent_style/block_chain.rs
new file mode 100644
index 000000000..41d914691
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/indent_style/block_chain.rs
@@ -0,0 +1,6 @@
+// rustfmt-indent_style: Block
+// Chain indent
+
+fn main() {
+ let lorem = ipsum.dolor().sit().amet().consectetur().adipiscing().elite();
+}
diff --git a/src/tools/rustfmt/tests/source/configs/indent_style/block_generic.rs b/src/tools/rustfmt/tests/source/configs/indent_style/block_generic.rs
new file mode 100644
index 000000000..2cf17be56
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/indent_style/block_generic.rs
@@ -0,0 +1,6 @@
+// rustfmt-indent_style: Block
+// Generics indent
+
+fn lorem<Ipsum: Eq = usize, Dolor: Eq = usize, Sit: Eq = usize, Amet: Eq = usize, Adipiscing: Eq = usize, Consectetur: Eq = usize, Elit: Eq = usize>(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet, adipiscing: Adipiscing, consectetur: Consectetur, elit: Elit) -> T {
+ // body
+}
diff --git a/src/tools/rustfmt/tests/source/configs/indent_style/block_struct_lit.rs b/src/tools/rustfmt/tests/source/configs/indent_style/block_struct_lit.rs
new file mode 100644
index 000000000..47a6994f4
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/indent_style/block_struct_lit.rs
@@ -0,0 +1,6 @@
+// rustfmt-indent_style: Block
+// Struct literal-style
+
+fn main() {
+ let lorem = Lorem { ipsum: dolor, sit: amet };
+}
diff --git a/src/tools/rustfmt/tests/source/configs/indent_style/block_trailing_comma_call/one.rs b/src/tools/rustfmt/tests/source/configs/indent_style/block_trailing_comma_call/one.rs
new file mode 100644
index 000000000..6d48ea742
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/indent_style/block_trailing_comma_call/one.rs
@@ -0,0 +1,9 @@
+// rustfmt-version: One
+// rustfmt-error_on_line_overflow: false
+// rustfmt-indent_style: Block
+
+// rustfmt should not add trailing comma when rewriting macro. See #1528.
+fn a() {
+ panic!("this is a long string that goes past the maximum line length causing rustfmt to insert a comma here:");
+ foo(a, oooptoptoptoptptooptoptoptoptptooptoptoptoptptoptoptoptoptpt());
+}
diff --git a/src/tools/rustfmt/tests/source/configs/indent_style/block_trailing_comma_call/two.rs b/src/tools/rustfmt/tests/source/configs/indent_style/block_trailing_comma_call/two.rs
new file mode 100644
index 000000000..7a62d722c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/indent_style/block_trailing_comma_call/two.rs
@@ -0,0 +1,9 @@
+// rustfmt-version: Two
+// rustfmt-error_on_line_overflow: false
+// rustfmt-indent_style: Block
+
+// rustfmt should not add trailing comma when rewriting macro. See #1528.
+fn a() {
+ panic!("this is a long string that goes past the maximum line length causing rustfmt to insert a comma here:");
+ foo(a, oooptoptoptoptptooptoptoptoptptooptoptoptoptptoptoptoptoptpt());
+}
diff --git a/src/tools/rustfmt/tests/source/configs/indent_style/block_where_pred.rs b/src/tools/rustfmt/tests/source/configs/indent_style/block_where_pred.rs
new file mode 100644
index 000000000..450491f02
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/indent_style/block_where_pred.rs
@@ -0,0 +1,6 @@
+// rustfmt-indent_style: Block
+// Where predicate indent
+
+fn lorem<Ipsum, Dolor, Sit, Amet>() -> T where Ipsum: Eq, Dolor: Eq, Sit: Eq, Amet: Eq {
+ // body
+}
diff --git a/src/tools/rustfmt/tests/source/configs/indent_style/default.rs b/src/tools/rustfmt/tests/source/configs/indent_style/default.rs
new file mode 100644
index 000000000..f08f5c644
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/indent_style/default.rs
@@ -0,0 +1,6 @@
+// rustfmt-indent_style: Visual
+// Where style
+
+fn lorem<Ipsum, Dolor, Sit, Amet>() -> T where Ipsum: Eq, Dolor: Eq, Sit: Eq, Amet: Eq {
+ // body
+}
diff --git a/src/tools/rustfmt/tests/source/configs/indent_style/rfc_where.rs b/src/tools/rustfmt/tests/source/configs/indent_style/rfc_where.rs
new file mode 100644
index 000000000..012840be2
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/indent_style/rfc_where.rs
@@ -0,0 +1,6 @@
+// rustfmt-indent_style: Block
+// Where style
+
+fn lorem<Ipsum, Dolor, Sit, Amet>() -> T where Ipsum: Eq, Dolor: Eq, Sit: Eq, Amet: Eq {
+ // body
+}
diff --git a/src/tools/rustfmt/tests/source/configs/indent_style/visual_args.rs b/src/tools/rustfmt/tests/source/configs/indent_style/visual_args.rs
new file mode 100644
index 000000000..5aa28a62b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/indent_style/visual_args.rs
@@ -0,0 +1,32 @@
+// rustfmt-indent_style: Visual
+// Function arguments layout
+
+fn lorem() {}
+
+fn lorem(ipsum: usize) {}
+
+fn lorem(ipsum: usize, dolor: usize, sit: usize, amet: usize, consectetur: usize, adipiscing: usize, elit: usize) {
+ // body
+}
+
+// #1922
+extern "C" {
+ pub fn LAPACKE_csytrs_rook_work(matrix_layout: c_int,
+ uplo: c_char,
+ n: lapack_int,
+ nrhs: lapack_int,
+ a: *const lapack_complex_float,
+ lda: lapack_int, ipiv: *const lapack_int,
+ b: *mut lapack_complex_float,
+ ldb: lapack_int
+ )-> lapack_int;
+
+ pub fn LAPACKE_csytrs_rook_work(matrix_layout: c_int,
+ uplo: c_char,
+ n: lapack_int,
+ nrhs: lapack_int,
+ lda: lapack_int, ipiv: *const lapack_int,
+ b: *mut lapack_complex_float,
+ ldb: lapack_int
+ ) -> lapack_int;
+}
diff --git a/src/tools/rustfmt/tests/source/configs/indent_style/visual_array.rs b/src/tools/rustfmt/tests/source/configs/indent_style/visual_array.rs
new file mode 100644
index 000000000..05bbf00b1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/indent_style/visual_array.rs
@@ -0,0 +1,6 @@
+// rustfmt-indent_style: Visual
+// Array layout
+
+fn main() {
+ let lorem = vec!["ipsum","dolor","sit","amet","consectetur","adipiscing","elit"];
+}
diff --git a/src/tools/rustfmt/tests/source/configs/indent_style/visual_call.rs b/src/tools/rustfmt/tests/source/configs/indent_style/visual_call.rs
new file mode 100644
index 000000000..9a679d6bb
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/indent_style/visual_call.rs
@@ -0,0 +1,6 @@
+// rustfmt-indent_style: Visual
+// Function call style
+
+fn main() {
+ lorem("lorem", "ipsum", "dolor", "sit", "amet", "consectetur", "adipiscing", "elit");
+}
diff --git a/src/tools/rustfmt/tests/source/configs/indent_style/visual_chain.rs b/src/tools/rustfmt/tests/source/configs/indent_style/visual_chain.rs
new file mode 100644
index 000000000..b74948753
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/indent_style/visual_chain.rs
@@ -0,0 +1,6 @@
+// rustfmt-indent_style: Visual
+// Chain indent
+
+fn main() {
+ let lorem = ipsum.dolor().sit().amet().consectetur().adipiscing().elite();
+}
diff --git a/src/tools/rustfmt/tests/source/configs/indent_style/visual_generics.rs b/src/tools/rustfmt/tests/source/configs/indent_style/visual_generics.rs
new file mode 100644
index 000000000..1f910d32d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/indent_style/visual_generics.rs
@@ -0,0 +1,6 @@
+// rustfmt-indent_style: Visual
+// Generics indent
+
+fn lorem<Ipsum: Eq = usize, Dolor: Eq = usize, Sit: Eq = usize, Amet: Eq = usize, Adipiscing: Eq = usize, Consectetur: Eq = usize, Elit: Eq = usize>(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet, adipiscing: Adipiscing, consectetur: Consectetur, elit: Elit) -> T {
+ // body
+}
diff --git a/src/tools/rustfmt/tests/source/configs/indent_style/visual_struct_lit.rs b/src/tools/rustfmt/tests/source/configs/indent_style/visual_struct_lit.rs
new file mode 100644
index 000000000..45538e704
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/indent_style/visual_struct_lit.rs
@@ -0,0 +1,6 @@
+// rustfmt-indent_style: Visual
+// Struct literal-style
+
+fn main() {
+ let lorem = Lorem { ipsum: dolor, sit: amet };
+}
diff --git a/src/tools/rustfmt/tests/source/configs/indent_style/visual_trailing_comma.rs b/src/tools/rustfmt/tests/source/configs/indent_style/visual_trailing_comma.rs
new file mode 100644
index 000000000..9738d397d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/indent_style/visual_trailing_comma.rs
@@ -0,0 +1,7 @@
+// rustfmt-error_on_line_overflow: false
+// rustfmt-indent_style: Visual
+
+// rustfmt should not add trailing comma when rewriting macro. See #1528.
+fn a() {
+ panic!("this is a long string that goes past the maximum line length causing rustfmt to insert a comma here:");
+}
diff --git a/src/tools/rustfmt/tests/source/configs/indent_style/visual_where_pred.rs b/src/tools/rustfmt/tests/source/configs/indent_style/visual_where_pred.rs
new file mode 100644
index 000000000..055806b68
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/indent_style/visual_where_pred.rs
@@ -0,0 +1,6 @@
+// rustfmt-indent_style: Visual
+// Where predicate indent
+
+fn lorem<Ipsum, Dolor, Sit, Amet>() -> T where Ipsum: Eq, Dolor: Eq, Sit: Eq, Amet: Eq {
+ // body
+}
diff --git a/src/tools/rustfmt/tests/source/configs/match_arm_blocks/false.rs b/src/tools/rustfmt/tests/source/configs/match_arm_blocks/false.rs
new file mode 100644
index 000000000..53e37e13c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/match_arm_blocks/false.rs
@@ -0,0 +1,11 @@
+// rustfmt-match_arm_blocks: false
+// Wrap match-arms
+
+fn main() {
+ match lorem {
+ true => foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo(x),
+ false => {
+ println!("{}", sit)
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/configs/match_arm_blocks/true.rs b/src/tools/rustfmt/tests/source/configs/match_arm_blocks/true.rs
new file mode 100644
index 000000000..a452b13cd
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/match_arm_blocks/true.rs
@@ -0,0 +1,11 @@
+// rustfmt-match_arm_blocks: true
+// Wrap match-arms
+
+fn main() {
+ match lorem {
+ true => foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo(x),
+ false => {
+ println!("{}", sit)
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/configs/match_arm_leading_pipes/always.rs b/src/tools/rustfmt/tests/source/configs/match_arm_leading_pipes/always.rs
new file mode 100644
index 000000000..162d812d8
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/match_arm_leading_pipes/always.rs
@@ -0,0 +1,27 @@
+// rustfmt-match_arm_leading_pipes: Always
+
+fn foo() {
+ match foo {
+ "foo" | "bar" => {}
+ "baz"
+ | "something relatively long"
+ | "something really really really realllllllllllllly long" => println!("x"),
+ "qux" => println!("y"),
+ _ => {}
+ }
+}
+
+fn issue_3973() {
+ match foo {
+ "foo" | "bar" => {}
+ _ => {}
+ }
+}
+
+fn bar() {
+ match baz {
+ "qux" => {}
+ "foo" | "bar" => {}
+ _ => {}
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/configs/match_arm_leading_pipes/never.rs b/src/tools/rustfmt/tests/source/configs/match_arm_leading_pipes/never.rs
new file mode 100644
index 000000000..8a68fe214
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/match_arm_leading_pipes/never.rs
@@ -0,0 +1,28 @@
+// rustfmt-match_arm_leading_pipes: Never
+
+fn foo() {
+ match foo {
+ | "foo" | "bar" => {}
+ | "baz"
+ | "something relatively long"
+ | "something really really really realllllllllllllly long" => println!("x"),
+ | "qux" => println!("y"),
+ _ => {}
+ }
+}
+
+fn issue_3973() {
+ match foo {
+ | "foo"
+ | "bar" => {}
+ _ => {}
+ }
+}
+
+fn bar() {
+ match baz {
+ "qux" => {}
+ "foo" | "bar" => {}
+ _ => {}
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/configs/match_arm_leading_pipes/preserve.rs b/src/tools/rustfmt/tests/source/configs/match_arm_leading_pipes/preserve.rs
new file mode 100644
index 000000000..5486877bd
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/match_arm_leading_pipes/preserve.rs
@@ -0,0 +1,36 @@
+// rustfmt-match_arm_leading_pipes: Preserve
+
+fn foo() {
+ match foo {
+ | "foo" | "bar" => {}
+ | "baz"
+ | "something relatively long"
+ | "something really really really realllllllllllllly long" => println!("x"),
+ | "qux" => println!("y"),
+ _ => {}
+ }
+}
+
+fn issue_3973() {
+ match foo {
+ | "foo"
+ | "bar" => {}
+ _ => {}
+ }
+}
+
+fn bar() {
+ match baz {
+ "qux" => { }
+ "foo" | "bar" => {}
+ _ => {}
+ }
+}
+
+fn f(x: NonAscii) -> bool {
+ match x {
+ // foo
+ | Éfgh => true,
+ _ => false,
+ }
+} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/configs/match_block_trailing_comma/false.rs b/src/tools/rustfmt/tests/source/configs/match_block_trailing_comma/false.rs
new file mode 100644
index 000000000..70e02955f
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/match_block_trailing_comma/false.rs
@@ -0,0 +1,11 @@
+// rustfmt-match_block_trailing_comma: false
+// Match block trailing comma
+
+fn main() {
+ match lorem {
+ Lorem::Ipsum => {
+ println!("ipsum");
+ }
+ Lorem::Dolor => println!("dolor"),
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/configs/match_block_trailing_comma/true.rs b/src/tools/rustfmt/tests/source/configs/match_block_trailing_comma/true.rs
new file mode 100644
index 000000000..b9af3d472
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/match_block_trailing_comma/true.rs
@@ -0,0 +1,11 @@
+// rustfmt-match_block_trailing_comma: true
+// Match block trailing comma
+
+fn main() {
+ match lorem {
+ Lorem::Ipsum => {
+ println!("ipsum");
+ }
+ Lorem::Dolor => println!("dolor"),
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/configs/merge_derives/true.rs b/src/tools/rustfmt/tests/source/configs/merge_derives/true.rs
new file mode 100644
index 000000000..18b8443f0
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/merge_derives/true.rs
@@ -0,0 +1,46 @@
+// rustfmt-merge_derives: true
+// Merge multiple derives to a single one.
+
+#[bar]
+#[derive(Eq, PartialEq)]
+#[foo]
+#[derive(Debug)]
+#[foobar]
+#[derive(Copy, Clone)]
+pub enum Foo {}
+
+#[derive(Eq, PartialEq)]
+#[derive(Debug)]
+#[foobar]
+#[derive(Copy, Clone)]
+pub enum Bar {}
+
+#[derive(Eq, PartialEq)]
+#[derive(Debug)]
+#[derive(Copy, Clone)]
+pub enum FooBar {}
+
+mod foo {
+#[bar]
+#[derive(Eq, PartialEq)]
+#[foo]
+#[derive(Debug)]
+#[foobar]
+#[derive(Copy, Clone)]
+pub enum Foo {}
+}
+
+mod bar {
+#[derive(Eq, PartialEq)]
+#[derive(Debug)]
+#[foobar]
+#[derive(Copy, Clone)]
+pub enum Bar {}
+}
+
+mod foobar {
+#[derive(Eq, PartialEq)]
+#[derive(Debug)]
+#[derive(Copy, Clone)]
+pub enum FooBar {}
+}
diff --git a/src/tools/rustfmt/tests/source/configs/normalize_comments/false.rs b/src/tools/rustfmt/tests/source/configs/normalize_comments/false.rs
new file mode 100644
index 000000000..488962ed9
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/normalize_comments/false.rs
@@ -0,0 +1,13 @@
+// rustfmt-normalize_comments: false
+// Normalize comments
+
+// Lorem ipsum:
+fn dolor() -> usize {}
+
+/* sit amet: */
+fn adipiscing() -> usize {}
+
+// #652
+////////////////////////////////////////////////////////////////////////////////
+// Basic slice extension methods
+////////////////////////////////////////////////////////////////////////////////
diff --git a/src/tools/rustfmt/tests/source/configs/normalize_comments/true.rs b/src/tools/rustfmt/tests/source/configs/normalize_comments/true.rs
new file mode 100644
index 000000000..c74a9808e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/normalize_comments/true.rs
@@ -0,0 +1,13 @@
+// rustfmt-normalize_comments: true
+// Normalize comments
+
+// Lorem ipsum:
+fn dolor() -> usize {}
+
+/* sit amet: */
+fn adipiscing() -> usize {}
+
+// #652
+////////////////////////////////////////////////////////////////////////////////
+// Basic slice extension methods
+////////////////////////////////////////////////////////////////////////////////
diff --git a/src/tools/rustfmt/tests/source/configs/normalize_doc_attributes/false.rs b/src/tools/rustfmt/tests/source/configs/normalize_doc_attributes/false.rs
new file mode 100644
index 000000000..f8eb64273
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/normalize_doc_attributes/false.rs
@@ -0,0 +1,13 @@
+// rustfmt-normalize_doc_attributes: false
+// Normalize doc attributes
+
+#![doc = " Example documentation"]
+
+#[doc = " Example item documentation"]
+pub enum Foo {}
+
+#[doc = " Lots of space"]
+pub enum Bar {}
+
+#[doc = "no leading space"]
+pub mod FooBar {}
diff --git a/src/tools/rustfmt/tests/source/configs/normalize_doc_attributes/true.rs b/src/tools/rustfmt/tests/source/configs/normalize_doc_attributes/true.rs
new file mode 100644
index 000000000..894c00a4d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/normalize_doc_attributes/true.rs
@@ -0,0 +1,13 @@
+// rustfmt-normalize_doc_attributes: true
+// Normalize doc attributes
+
+#![doc = " Example documentation"]
+
+#[doc = " Example item documentation"]
+pub enum Foo {}
+
+#[doc = " Lots of space"]
+pub enum Bar {}
+
+#[doc = "no leading space"]
+pub mod FooBar {}
diff --git a/src/tools/rustfmt/tests/source/configs/remove_nested_parens/remove_nested_parens.rs b/src/tools/rustfmt/tests/source/configs/remove_nested_parens/remove_nested_parens.rs
new file mode 100644
index 000000000..87aed09c1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/remove_nested_parens/remove_nested_parens.rs
@@ -0,0 +1,5 @@
+// rustfmt-remove_nested_parens: true
+
+fn main() {
+ ((((((foo()))))));
+}
diff --git a/src/tools/rustfmt/tests/source/configs/reorder_impl_items/false.rs b/src/tools/rustfmt/tests/source/configs/reorder_impl_items/false.rs
new file mode 100644
index 000000000..beb99f0fb
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/reorder_impl_items/false.rs
@@ -0,0 +1,11 @@
+// rustfmt-reorder_impl_items: false
+
+struct Dummy;
+
+impl Iterator for Dummy {
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+
+ type Item = i32;
+}
diff --git a/src/tools/rustfmt/tests/source/configs/reorder_impl_items/true.rs b/src/tools/rustfmt/tests/source/configs/reorder_impl_items/true.rs
new file mode 100644
index 000000000..612b1c84a
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/reorder_impl_items/true.rs
@@ -0,0 +1,11 @@
+// rustfmt-reorder_impl_items: true
+
+struct Dummy;
+
+impl Iterator for Dummy {
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+
+ type Item = i32;
+}
diff --git a/src/tools/rustfmt/tests/source/configs/reorder_imports/false.rs b/src/tools/rustfmt/tests/source/configs/reorder_imports/false.rs
new file mode 100644
index 000000000..4b85684dc
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/reorder_imports/false.rs
@@ -0,0 +1,7 @@
+// rustfmt-reorder_imports: false
+// Reorder imports
+
+use lorem;
+use ipsum;
+use dolor;
+use sit;
diff --git a/src/tools/rustfmt/tests/source/configs/reorder_imports/true.rs b/src/tools/rustfmt/tests/source/configs/reorder_imports/true.rs
new file mode 100644
index 000000000..2a40f6d06
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/reorder_imports/true.rs
@@ -0,0 +1,19 @@
+// rustfmt-reorder_imports: true
+// Reorder imports
+
+use lorem;
+use ipsum;
+use dolor;
+use sit;
+
+fn foo() {
+ use C;
+ use B;
+ use A;
+
+ bar();
+
+ use F;
+ use E;
+ use D;
+}
diff --git a/src/tools/rustfmt/tests/source/configs/reorder_modules/dolor/mod.rs b/src/tools/rustfmt/tests/source/configs/reorder_modules/dolor/mod.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/reorder_modules/dolor/mod.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/rustfmt/tests/source/configs/reorder_modules/false.rs b/src/tools/rustfmt/tests/source/configs/reorder_modules/false.rs
new file mode 100644
index 000000000..56b1aa03e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/reorder_modules/false.rs
@@ -0,0 +1,7 @@
+// rustfmt-reorder_modules: false
+// Reorder modules
+
+mod lorem;
+mod ipsum;
+mod dolor;
+mod sit;
diff --git a/src/tools/rustfmt/tests/source/configs/reorder_modules/ipsum/mod.rs b/src/tools/rustfmt/tests/source/configs/reorder_modules/ipsum/mod.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/reorder_modules/ipsum/mod.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/rustfmt/tests/source/configs/reorder_modules/lorem/mod.rs b/src/tools/rustfmt/tests/source/configs/reorder_modules/lorem/mod.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/reorder_modules/lorem/mod.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/rustfmt/tests/source/configs/reorder_modules/sit/mod.rs b/src/tools/rustfmt/tests/source/configs/reorder_modules/sit/mod.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/reorder_modules/sit/mod.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/rustfmt/tests/source/configs/reorder_modules/true.rs b/src/tools/rustfmt/tests/source/configs/reorder_modules/true.rs
new file mode 100644
index 000000000..79b0ab1e3
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/reorder_modules/true.rs
@@ -0,0 +1,7 @@
+// rustfmt-reorder_modules: true
+// Reorder modules
+
+mod lorem;
+mod ipsum;
+mod dolor;
+mod sit;
diff --git a/src/tools/rustfmt/tests/source/configs/short_array_element_width_threshold/10.rs b/src/tools/rustfmt/tests/source/configs/short_array_element_width_threshold/10.rs
new file mode 100644
index 000000000..7d0d70919
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/short_array_element_width_threshold/10.rs
@@ -0,0 +1,11 @@
+// rustfmt-short_array_element_width_threshold: 10
+
+fn main() {
+ pub const FORMAT_TEST: [u64; 5] = [
+ 0x0000000000000000,
+ 0xaaaaaaaaaaaaaaaa,
+ 0xbbbbbbbbbbbbbbbb,
+ 0xcccccccccccccccc,
+ 0xdddddddddddddddd,
+ ];
+} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/configs/short_array_element_width_threshold/20.rs b/src/tools/rustfmt/tests/source/configs/short_array_element_width_threshold/20.rs
new file mode 100644
index 000000000..8a93a51d6
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/short_array_element_width_threshold/20.rs
@@ -0,0 +1,11 @@
+// rustfmt-short_array_element_width_threshold: 20
+
+fn main() {
+ pub const FORMAT_TEST: [u64; 5] = [
+ 0x0000000000000000,
+ 0xaaaaaaaaaaaaaaaa,
+ 0xbbbbbbbbbbbbbbbb,
+ 0xcccccccccccccccc,
+ 0xdddddddddddddddd,
+ ];
+} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/configs/short_array_element_width_threshold/greater_than_max_width.rs b/src/tools/rustfmt/tests/source/configs/short_array_element_width_threshold/greater_than_max_width.rs
new file mode 100644
index 000000000..710b6fe7c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/short_array_element_width_threshold/greater_than_max_width.rs
@@ -0,0 +1,12 @@
+// rustfmt-max_width: 20
+// rustfmt-short_array_element_width_threshold: 30
+
+fn main() {
+ pub const FORMAT_TEST: [u64; 5] = [
+ 0x0000000000000000,
+ 0xaaaaaaaaaaaaaaaa,
+ 0xbbbbbbbbbbbbbbbb,
+ 0xcccccccccccccccc,
+ 0xdddddddddddddddd,
+ ];
+}
diff --git a/src/tools/rustfmt/tests/source/configs/skip_children/foo/mod.rs b/src/tools/rustfmt/tests/source/configs/skip_children/foo/mod.rs
new file mode 100644
index 000000000..d7ff6cdb8
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/skip_children/foo/mod.rs
@@ -0,0 +1,3 @@
+fn skip_formatting_this() {
+ println ! ( "Skip this" ) ;
+}
diff --git a/src/tools/rustfmt/tests/source/configs/skip_children/true.rs b/src/tools/rustfmt/tests/source/configs/skip_children/true.rs
new file mode 100644
index 000000000..e51889dd4
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/skip_children/true.rs
@@ -0,0 +1,4 @@
+// rustfmt-skip_children: true
+
+mod foo ;
+mod void;
diff --git a/src/tools/rustfmt/tests/source/configs/space_before_colon/true.rs b/src/tools/rustfmt/tests/source/configs/space_before_colon/true.rs
new file mode 100644
index 000000000..0a5976025
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/space_before_colon/true.rs
@@ -0,0 +1,11 @@
+// rustfmt-space_before_colon: true
+// Space before colon
+
+fn lorem<T : Eq>(t : T) {
+ let ipsum: Dolor = sit;
+}
+
+const LOREM : Lorem = Lorem {
+ ipsum : dolor,
+ sit : amet,
+};
diff --git a/src/tools/rustfmt/tests/source/configs/spaces_around_ranges/false.rs b/src/tools/rustfmt/tests/source/configs/spaces_around_ranges/false.rs
new file mode 100644
index 000000000..1878c68a5
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/spaces_around_ranges/false.rs
@@ -0,0 +1,34 @@
+// rustfmt-spaces_around_ranges: false
+// Spaces around ranges
+
+fn main() {
+ let lorem = 0 .. 10;
+ let ipsum = 0 ..= 10;
+
+ match lorem {
+ 1 .. 5 => foo(),
+ _ => bar,
+ }
+
+ match lorem {
+ 1 ..= 5 => foo(),
+ _ => bar,
+ }
+
+ match lorem {
+ 1 ... 5 => foo(),
+ _ => bar,
+ }
+}
+
+fn half_open() {
+ match [5 .. 4, 99 .. 105, 43 .. 44] {
+ [_, 99 .., _] => {}
+ [_, .. 105, _] => {}
+ _ => {}
+ };
+
+ if let ..= 5 = 0 {}
+ if let .. 5 = 0 {}
+ if let 5 .. = 0 {}
+}
diff --git a/src/tools/rustfmt/tests/source/configs/spaces_around_ranges/true.rs b/src/tools/rustfmt/tests/source/configs/spaces_around_ranges/true.rs
new file mode 100644
index 000000000..0eadfb285
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/spaces_around_ranges/true.rs
@@ -0,0 +1,34 @@
+// rustfmt-spaces_around_ranges: true
+// Spaces around ranges
+
+fn main() {
+ let lorem = 0..10;
+ let ipsum = 0..=10;
+
+ match lorem {
+ 1..5 => foo(),
+ _ => bar,
+ }
+
+ match lorem {
+ 1..=5 => foo(),
+ _ => bar,
+ }
+
+ match lorem {
+ 1...5 => foo(),
+ _ => bar,
+ }
+}
+
+fn half_open() {
+ match [5..4, 99..105, 43..44] {
+ [_, 99.., _] => {}
+ [_, ..105, _] => {}
+ _ => {}
+ };
+
+ if let ..=5 = 0 {}
+ if let ..5 = 0 {}
+ if let 5.. = 0 {}
+}
diff --git a/src/tools/rustfmt/tests/source/configs/struct_field_align_threshold/20.rs b/src/tools/rustfmt/tests/source/configs/struct_field_align_threshold/20.rs
new file mode 100644
index 000000000..81253c460
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/struct_field_align_threshold/20.rs
@@ -0,0 +1,383 @@
+// rustfmt-struct_field_align_threshold: 20
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+// rustfmt-error_on_line_overflow: false
+
+struct Foo {
+ x: u32,
+ yy: u32, // comment
+ zzz: u32,
+}
+
+pub struct Bar {
+ x: u32,
+ yy: u32,
+ zzz: u32,
+
+ xxxxxxx: u32,
+}
+
+fn main() {
+ let foo = Foo {
+ x: 0,
+ yy: 1,
+ zzz: 2,
+ };
+
+ let bar = Bar {
+ x: 0,
+ yy: 1,
+ zzz: 2,
+
+ xxxxxxx: 3,
+ };
+}
+
+ /// A Doc comment
+#[AnAttribute]
+pub struct Foo {
+ #[rustfmt::skip]
+ f : SomeType, // Comment beside a field
+ f: SomeType, // Comment beside a field
+ // Comment on a field
+ #[AnAttribute]
+ g: SomeOtherType,
+ /// A doc comment on a field
+ h: AThirdType,
+ pub i: TypeForPublicField
+}
+
+// #1029
+pub struct Foo {
+ #[doc(hidden)]
+ // This will NOT get deleted!
+ bar: String, // hi
+}
+
+// #1029
+struct X {
+ // `x` is an important number.
+ #[allow(unused)] // TODO: use
+ x: u32,
+}
+
+// #410
+#[allow(missing_docs)]
+pub struct Writebatch<K: Key> {
+ #[allow(dead_code)] //only used for holding the internal pointer
+ writebatch: RawWritebatch,
+ marker: PhantomData<K>,
+}
+
+struct Bar;
+
+struct NewType(Type, OtherType);
+
+struct
+NewInt <T: Copy>(pub i32, SomeType /* inline comment */, T /* sup */
+
+
+ );
+
+struct Qux<'a,
+ N: Clone + 'a,
+ E: Clone + 'a,
+ G: Labeller<'a, N, E> + GraphWalk<'a, N, E>,
+ W: Write + Copy>
+(
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA, // Comment
+ BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB,
+ #[AnAttr]
+ // Comment
+ /// Testdoc
+ G,
+ pub W,
+);
+
+struct Tuple(/*Comment 1*/ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+ /* Comment 2 */ BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB,);
+
+// With a where-clause and generics.
+pub struct Foo<'a, Y: Baz>
+ where X: Whatever
+{
+ f: SomeType, // Comment beside a field
+}
+
+struct Baz {
+
+ a: A, // Comment A
+ b: B, // Comment B
+ c: C, // Comment C
+
+}
+
+struct Baz {
+ a: A, // Comment A
+
+ b: B, // Comment B
+
+
+
+
+ c: C, // Comment C
+}
+
+struct Baz {
+
+ a: A,
+
+ b: B,
+ c: C,
+
+
+
+
+ d: D
+
+}
+
+struct Baz
+{
+ // Comment A
+ a: A,
+
+ // Comment B
+b: B,
+ // Comment C
+ c: C,}
+
+// Will this be a one-liner?
+struct Tuple(
+ A, //Comment
+ B
+);
+
+pub struct State<F: FnMut() -> time::Timespec> { now: F }
+
+pub struct State<F: FnMut() -> ()> { now: F }
+
+pub struct State<F: FnMut()> { now: F }
+
+struct Palette { /// A map of indices in the palette to a count of pixels in approximately that color
+ foo: i32}
+
+// Splitting a single line comment into a block previously had a misalignment
+// when the field had attributes
+struct FieldsWithAttributes {
+ // Pre Comment
+ #[rustfmt::skip] pub host:String, // Post comment BBBBBBBBBBBBBB BBBBBBBBBBBBBBBB BBBBBBBBBBBBBBBB BBBBBBBBBBBBBBBBB BBBBBBBBBBB
+ //Another pre comment
+ #[attr1]
+ #[attr2] pub id: usize // CCCCCCCCCCCCCCCCCCC CCCCCCCCCCCCCCCCCCC CCCCCCCCCCCCCCCC CCCCCCCCCCCCCCCCCC CCCCCCCCCCCCCC CCCCCCCCCCCC
+}
+
+struct Deep {
+ deeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeep: node::Handle<IdRef<'id, Node<K, V>>,
+ Type,
+ NodeType>,
+}
+
+struct Foo<T>(T);
+struct Foo<T>(T) where T: Copy, T: Eq;
+struct Foo<T>(TTTTTTTTTTTTTTTTT, UUUUUUUUUUUUUUUUUUUUUUUU, TTTTTTTTTTTTTTTTTTT, UUUUUUUUUUUUUUUUUUU);
+struct Foo<T>(TTTTTTTTTTTTTTTTTT, UUUUUUUUUUUUUUUUUUUUUUUU, TTTTTTTTTTTTTTTTTTT) where T: PartialEq;
+struct Foo<T>(TTTTTTTTTTTTTTTTT, UUUUUUUUUUUUUUUUUUUUUUUU, TTTTTTTTTTTTTTTTTTTTT) where T: PartialEq;
+struct Foo<T>(TTTTTTTTTTTTTTTTT, UUUUUUUUUUUUUUUUUUUUUUUU, TTTTTTTTTTTTTTTTTTT, UUUUUUUUUUUUUUUUUUU) where T: PartialEq;
+struct Foo<T>(TTTTTTTTTTTTTTTTT, // Foo
+ UUUUUUUUUUUUUUUUUUUUUUUU /* Bar */,
+ // Baz
+ TTTTTTTTTTTTTTTTTTT,
+ // Qux (FIXME #572 - doc comment)
+ UUUUUUUUUUUUUUUUUUU);
+
+mod m {
+ struct X<T> where T: Sized {
+ a: T,
+ }
+}
+
+struct Foo<T>(TTTTTTTTTTTTTTTTTTT,
+ /// Qux
+ UUUUUUUUUUUUUUUUUUU);
+
+struct Issue677 {
+ pub ptr: *const libc::c_void,
+ pub trace: fn( obj:
+ *const libc::c_void, tracer : *mut JSTracer ),
+}
+
+struct Foo {}
+struct Foo {
+ }
+struct Foo {
+ // comment
+ }
+struct Foo {
+ // trailing space ->
+
+
+ }
+struct Foo { /* comment */ }
+struct Foo( /* comment */ );
+
+struct LongStruct {
+ a: A,
+ the_quick_brown_fox_jumps_over_the_lazy_dog:AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+}
+
+struct Deep {
+ deeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeep: node::Handle<IdRef<'id, Node<Key, Value>>,
+ Type,
+ NodeType>,
+}
+
+struct Foo<C=()>(String);
+
+// #1364
+fn foo() {
+ convex_shape.set_point(0, &Vector2f { x: 400.0, y: 100.0 });
+ convex_shape.set_point(1, &Vector2f { x: 500.0, y: 70.0 });
+ convex_shape.set_point(2, &Vector2f { x: 450.0, y: 100.0 });
+ convex_shape.set_point(3, &Vector2f { x: 580.0, y: 150.0 });
+}
+
+fn main() {
+ let x = Bar;
+
+ // Comment
+ let y = Foo {a: x };
+
+ Foo { a: foo() /* comment*/, /* comment*/ b: bar(), ..something };
+
+ Fooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo { a: f(), b: b(), };
+
+ Foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo { a: f(), b: b(), };
+
+ Foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo {
+ // Comment
+ a: foo(), // Comment
+ // Comment
+ b: bar(), // Comment
+ };
+
+ Foo { a:Bar,
+ b:f() };
+
+ Quux { x: if cond { bar(); }, y: baz() };
+
+ A {
+ // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec a diam lectus. Sed sit amet ipsum mauris. Maecenas congue ligula ac quam viverra nec consectetur ante hendrerit. Donec et mollis dolor.
+ first: item(),
+ // Praesent et diam eget libero egestas mattis sit amet vitae augue.
+ // Nam tincidunt congue enim, ut porta lorem lacinia consectetur.
+ second: Item
+ };
+
+ Some(Data::MethodCallData(MethodCallData {
+ span: sub_span.unwrap(),
+ scope: self.enclosing_scope(id),
+ ref_id: def_id,
+ decl_id: Some(decl_id),
+ }));
+
+ Diagram { /* o This graph demonstrates how
+ * / \ significant whitespace is
+ * o o preserved.
+ * /|\ \
+ * o o o o */
+ graph: G, }
+}
+
+fn matcher() {
+ TagTerminatedByteMatcher {
+ matcher: ByteMatcher {
+ pattern: b"<HTML",
+ mask: b"\xFF\xDF\xDF\xDF\xDF\xFF",
+ },
+ };
+}
+
+fn issue177() {
+ struct Foo<T> { memb: T }
+ let foo = Foo::<i64> { memb: 10 };
+}
+
+fn issue201() {
+ let s = S{a:0, .. b};
+}
+
+fn issue201_2() {
+ let s = S{a: S2{ .. c}, .. b};
+}
+
+fn issue278() {
+ let s = S {
+ a: 0,
+ //
+ b: 0,
+ };
+ let s1 = S {
+ a: 0,
+ // foo
+ //
+ // bar
+ b: 0,
+ };
+}
+
+fn struct_exprs() {
+ Foo
+ { a : 1, b:f( 2)};
+ Foo{a:1,b:f(2),..g(3)};
+ LoooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooongStruct { ..base };
+ IntrinsicISizesContribution { content_intrinsic_sizes: IntrinsicISizes { minimum_inline_size: 0, }, };
+}
+
+fn issue123() {
+ Foo { a: b, c: d, e: f };
+
+ Foo { a: bb, c: dd, e: ff };
+
+ Foo { a: ddddddddddddddddddddd, b: cccccccccccccccccccccccccccccccccccccc };
+}
+
+fn issue491() {
+ Foo {
+ guard: None,
+ arm: 0, // Comment
+ };
+
+ Foo {
+ arm: 0, // Comment
+ };
+
+ Foo { a: aaaaaaaaaa, b: bbbbbbbb, c: cccccccccc, d: dddddddddd, /* a comment */
+ e: eeeeeeeee };
+}
+
+fn issue698() {
+ Record {
+ ffffffffffffffffffffffffffields: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ };
+ Record {
+ ffffffffffffffffffffffffffields: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ }
+}
+
+fn issue835() {
+ MyStruct {};
+ MyStruct { /* a comment */ };
+ MyStruct {
+ // Another comment
+ };
+ MyStruct {}
+}
+
+fn field_init_shorthand() {
+ MyStruct { x, y, z };
+ MyStruct { x, y, z, .. base };
+ Foo { aaaaaaaaaa, bbbbbbbb, cccccccccc, dddddddddd, /* a comment */
+ eeeeeeeee };
+ Record { ffffffffffffffffffffffffffieldsaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa };
+}
diff --git a/src/tools/rustfmt/tests/source/configs/struct_lit_single_line/false.rs b/src/tools/rustfmt/tests/source/configs/struct_lit_single_line/false.rs
new file mode 100644
index 000000000..17cad8dde
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/struct_lit_single_line/false.rs
@@ -0,0 +1,6 @@
+// rustfmt-struct_lit_single_line: false
+// Struct literal multiline-style
+
+fn main() {
+ let lorem = Lorem { ipsum: dolor, sit: amet };
+}
diff --git a/src/tools/rustfmt/tests/source/configs/tab_spaces/2.rs b/src/tools/rustfmt/tests/source/configs/tab_spaces/2.rs
new file mode 100644
index 000000000..5c2667bc2
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/tab_spaces/2.rs
@@ -0,0 +1,11 @@
+// rustfmt-tab_spaces: 2
+// rustfmt-max_width: 30
+// rustfmt-indent_style: Block
+// Tab spaces
+
+fn lorem() {
+let ipsum = dolor();
+let sit = vec![
+"amet", "consectetur", "adipiscing", "elit."
+];
+}
diff --git a/src/tools/rustfmt/tests/source/configs/tab_spaces/4.rs b/src/tools/rustfmt/tests/source/configs/tab_spaces/4.rs
new file mode 100644
index 000000000..da61bbd42
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/tab_spaces/4.rs
@@ -0,0 +1,11 @@
+// rustfmt-tab_spaces: 4
+// rustfmt-max_width: 30
+// rustfmt-indent_style: Block
+// Tab spaces
+
+fn lorem() {
+let ipsum = dolor();
+let sit = vec![
+"amet", "consectetur", "adipiscing", "elit."
+];
+}
diff --git a/src/tools/rustfmt/tests/source/configs/trailing_comma/always.rs b/src/tools/rustfmt/tests/source/configs/trailing_comma/always.rs
new file mode 100644
index 000000000..57e874cd8
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/trailing_comma/always.rs
@@ -0,0 +1,7 @@
+// rustfmt-trailing_comma: Always
+// Trailing comma
+
+fn main() {
+ let Lorem { ipsum, dolor, sit, } = amet;
+ let Lorem { ipsum, dolor, sit, amet, consectetur, adipiscing } = elit;
+}
diff --git a/src/tools/rustfmt/tests/source/configs/trailing_comma/never.rs b/src/tools/rustfmt/tests/source/configs/trailing_comma/never.rs
new file mode 100644
index 000000000..4da3b996f
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/trailing_comma/never.rs
@@ -0,0 +1,23 @@
+// rustfmt-trailing_comma: Never
+// Trailing comma
+
+fn main() {
+ let Lorem { ipsum, dolor, sit, } = amet;
+ let Lorem { ipsum, dolor, sit, amet, consectetur, adipiscing } = elit;
+
+ // #1544
+ if let VrMsg::ClientReply {request_num: reply_req_num, value, ..} = msg {
+ let _ = safe_assert_eq!(reply_req_num, request_num, op);
+ return Ok((request_num, op, value));
+ }
+
+ // #1710
+ pub struct FileInput {
+ input: StringInput,
+ file_name: OsString,
+ }
+ match len {
+ Some(len) => Ok(new(self.input, self.pos + len)),
+ None => Err(self),
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/configs/trailing_comma/vertical.rs b/src/tools/rustfmt/tests/source/configs/trailing_comma/vertical.rs
new file mode 100644
index 000000000..c903e8221
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/trailing_comma/vertical.rs
@@ -0,0 +1,7 @@
+// rustfmt-trailing_comma: Vertical
+// Trailing comma
+
+fn main() {
+ let Lorem { ipsum, dolor, sit, } = amet;
+ let Lorem { ipsum, dolor, sit, amet, consectetur, adipiscing } = elit;
+}
diff --git a/src/tools/rustfmt/tests/source/configs/type_punctuation_density/compressed.rs b/src/tools/rustfmt/tests/source/configs/type_punctuation_density/compressed.rs
new file mode 100644
index 000000000..223b9a2f0
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/type_punctuation_density/compressed.rs
@@ -0,0 +1,37 @@
+// rustfmt-type_punctuation_density: Compressed
+// Type punctuation density
+
+fn lorem<Ipsum:Dolor+Sit=Amet>() {
+ // body
+}
+
+struct Foo<T: Eq + Clone, U>
+where U: Eq + Clone {
+ // body
+}
+
+trait Foo<'a, T = usize>
+where T: 'a + Eq + Clone
+{
+ type Bar: Eq + Clone;
+}
+
+trait Foo: Eq + Clone {
+ // body
+}
+
+impl<T> Foo<'a> for Bar
+where for<'a> T: 'a + Eq + Clone
+{
+ // body
+}
+
+fn foo<'a, 'b, 'c>()
+where 'a: 'b + 'c
+{
+ // body
+}
+
+fn Foo<T = Foo, Output = Expr<'tcx> + Foo>() {
+ let i = 6;
+}
diff --git a/src/tools/rustfmt/tests/source/configs/type_punctuation_density/wide.rs b/src/tools/rustfmt/tests/source/configs/type_punctuation_density/wide.rs
new file mode 100644
index 000000000..fe0c08167
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/type_punctuation_density/wide.rs
@@ -0,0 +1,37 @@
+// rustfmt-type_punctuation_density: Wide
+// Type punctuation density
+
+fn lorem<Ipsum:Dolor+Sit=Amet>() {
+ // body
+}
+
+struct Foo<T: Eq + Clone, U>
+where U: Eq + Clone {
+ // body
+}
+
+trait Foo<'a, T = usize>
+where T: 'a + Eq + Clone
+{
+ type Bar: Eq + Clone;
+}
+
+trait Foo: Eq + Clone {
+ // body
+}
+
+impl<T> Foo<'a> for Bar
+where for<'a> T: 'a + Eq + Clone
+{
+ // body
+}
+
+fn foo<'a, 'b, 'c>()
+where 'a: 'b + 'c
+{
+ // body
+}
+
+fn Foo<T = Foo, Output = Expr<'tcx> + Foo>() {
+ let i = 6;
+}
diff --git a/src/tools/rustfmt/tests/source/configs/use_field_init_shorthand/false.rs b/src/tools/rustfmt/tests/source/configs/use_field_init_shorthand/false.rs
new file mode 100644
index 000000000..4c2eb1de1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/use_field_init_shorthand/false.rs
@@ -0,0 +1,19 @@
+// rustfmt-use_field_init_shorthand: false
+// Use field initialization shorthand if possible.
+
+fn main() {
+ let a = Foo {
+ x: x,
+ y: y,
+ z: z,
+ };
+
+ let b = Bar {
+ x: x,
+ y: y,
+ #[attr]
+ z: z,
+ #[rustfmt::skip]
+ skipped: skipped,
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/configs/use_field_init_shorthand/true.rs b/src/tools/rustfmt/tests/source/configs/use_field_init_shorthand/true.rs
new file mode 100644
index 000000000..dcde28d74
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/use_field_init_shorthand/true.rs
@@ -0,0 +1,19 @@
+// rustfmt-use_field_init_shorthand: true
+// Use field initialization shorthand if possible.
+
+fn main() {
+ let a = Foo {
+ x: x,
+ y: y,
+ z: z,
+ };
+
+ let b = Bar {
+ x: x,
+ y: y,
+ #[attr]
+ z: z,
+ #[rustfmt::skip]
+ skipped: skipped,
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/configs/use_small_heuristics/default.rs b/src/tools/rustfmt/tests/source/configs/use_small_heuristics/default.rs
new file mode 100644
index 000000000..68bc40271
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/use_small_heuristics/default.rs
@@ -0,0 +1,25 @@
+// rustfmt-use_small_heuristics: Default
+
+enum Lorem {
+ Ipsum,
+ Dolor(bool),
+ Sit {
+ amet: Consectetur,
+ adipiscing: Elit,
+ },
+}
+
+fn main() {
+ lorem("lorem", "ipsum", "dolor", "sit", "amet", "consectetur", "adipiscing");
+
+ let lorem = Lorem {
+ ipsum: dolor,
+ sit: amet,
+ };
+
+ let lorem = if ipsum {
+ dolor
+ } else {
+ sit
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/configs/use_small_heuristics/max.rs b/src/tools/rustfmt/tests/source/configs/use_small_heuristics/max.rs
new file mode 100644
index 000000000..8d30932e2
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/use_small_heuristics/max.rs
@@ -0,0 +1,25 @@
+// rustfmt-use_small_heuristics: Max
+
+enum Lorem {
+ Ipsum,
+ Dolor(bool),
+ Sit {
+ amet: Consectetur,
+ adipiscing: Elit,
+ },
+}
+
+fn main() {
+ lorem("lorem", "ipsum", "dolor", "sit", "amet", "consectetur", "adipiscing");
+
+ let lorem = Lorem {
+ ipsum: dolor,
+ sit: amet,
+ };
+
+ let lorem = if ipsum {
+ dolor
+ } else {
+ sit
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/configs/use_small_heuristics/off.rs b/src/tools/rustfmt/tests/source/configs/use_small_heuristics/off.rs
new file mode 100644
index 000000000..f76392d24
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/use_small_heuristics/off.rs
@@ -0,0 +1,25 @@
+// rustfmt-use_small_heuristics: Off
+
+enum Lorem {
+ Ipsum,
+ Dolor(bool),
+ Sit {
+ amet: Consectetur,
+ adipiscing: Elit,
+ },
+}
+
+fn main() {
+ lorem("lorem", "ipsum", "dolor", "sit", "amet", "consectetur", "adipiscing");
+
+ let lorem = Lorem {
+ ipsum: dolor,
+ sit: amet,
+ };
+
+ let lorem = if ipsum {
+ dolor
+ } else {
+ sit
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/configs/use_try_shorthand/false.rs b/src/tools/rustfmt/tests/source/configs/use_try_shorthand/false.rs
new file mode 100644
index 000000000..de7f8b4a5
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/use_try_shorthand/false.rs
@@ -0,0 +1,6 @@
+// rustfmt-use_try_shorthand: false
+// Use try! shorthand
+
+fn main() {
+ let lorem = try!(ipsum.map(|dolor| dolor.sit()));
+}
diff --git a/src/tools/rustfmt/tests/source/configs/use_try_shorthand/true.rs b/src/tools/rustfmt/tests/source/configs/use_try_shorthand/true.rs
new file mode 100644
index 000000000..9015ec41e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/use_try_shorthand/true.rs
@@ -0,0 +1,6 @@
+// rustfmt-use_try_shorthand: true
+// Use try! shorthand
+
+fn main() {
+ let lorem = try!(ipsum.map(|dolor| dolor.sit()));
+}
diff --git a/src/tools/rustfmt/tests/source/configs/where_single_line/true.rs b/src/tools/rustfmt/tests/source/configs/where_single_line/true.rs
new file mode 100644
index 000000000..9de98283b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/where_single_line/true.rs
@@ -0,0 +1,26 @@
+// rustfmt-where_single_line: true
+// Where style
+
+
+fn lorem_two_items<Ipsum, Dolor, Sit, Amet>() -> T where Ipsum: Eq, Lorem: Eq {
+ // body
+}
+
+fn lorem_multi_line<Ipsum, Dolor, Sit, Amet>(
+ a: Aaaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbbbb,
+ c: Ccccccccccccccccc,
+ d: Ddddddddddddddddddddddddd,
+ e: Eeeeeeeeeeeeeeeeeee,
+) -> T
+where
+ Ipsum: Eq,
+{
+ // body
+}
+
+fn lorem<Ipsum, Dolor, Sit, Amet>() -> T where Ipsum: Eq {
+ // body
+}
+
+unsafe impl Sync for Foo where (): Send {}
diff --git a/src/tools/rustfmt/tests/source/configs/wrap_comments/false.rs b/src/tools/rustfmt/tests/source/configs/wrap_comments/false.rs
new file mode 100644
index 000000000..48ecd88ac
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/wrap_comments/false.rs
@@ -0,0 +1,8 @@
+// rustfmt-wrap_comments: false
+// rustfmt-max_width: 50
+// rustfmt-error_on_line_overflow: false
+// Wrap comments
+
+fn main() {
+ // Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.
+}
diff --git a/src/tools/rustfmt/tests/source/configs/wrap_comments/true.rs b/src/tools/rustfmt/tests/source/configs/wrap_comments/true.rs
new file mode 100644
index 000000000..39a79a4ca
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/configs/wrap_comments/true.rs
@@ -0,0 +1,15 @@
+// rustfmt-wrap_comments: true
+// rustfmt-max_width: 50
+// Wrap comments
+
+fn main() {
+ // Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.
+}
+
+fn code_block() {
+ // ```rust
+ // let x = 3;
+ //
+ // println!("x = {}", x);
+ // ```
+}
diff --git a/src/tools/rustfmt/tests/source/const_generics.rs b/src/tools/rustfmt/tests/source/const_generics.rs
new file mode 100644
index 000000000..01b764dbe
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/const_generics.rs
@@ -0,0 +1,44 @@
+struct Message {
+ field2: Vec<
+ "MessageEntity"
+ >,
+ field3: Vec<
+ 1
+ >,
+ field4: Vec<
+ 2 , 3
+ >,
+
+}
+
+struct RectangularArray<T, const WIDTH: usize, const HEIGHT: usize> {
+ array: [[T; WIDTH]; HEIGHT],
+}
+
+fn main() {
+ const X: usize = 7;
+ let x: RectangularArray<i32, 2, 4>;
+ let y: RectangularArray<i32, X, {2
+ * 2} >;
+}
+
+fn foo<const X: usize>() {
+ const Y: usize = X * 2;
+ static Z: (usize, usize) = (X, X);
+
+ struct Foo([i32; X]);
+}
+
+type Foo<const N: usize> = [i32; N + 1];
+
+pub trait Foo: Bar<{Baz::COUNT}> {
+ const ASD: usize;
+}
+
+// #4263
+fn const_generics_on_params<
+ // AAAA
+ const BBBB: usize,
+ /* CCCC */
+ const DDDD: usize,
+ >() {}
diff --git a/src/tools/rustfmt/tests/source/control-brace-style-always-next-line.rs b/src/tools/rustfmt/tests/source/control-brace-style-always-next-line.rs
new file mode 100644
index 000000000..9079fb46c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/control-brace-style-always-next-line.rs
@@ -0,0 +1,44 @@
+// rustfmt-control_brace_style: AlwaysNextLine
+
+fn main() {
+ loop {
+ ();
+ ();
+ }
+
+
+ 'label: loop // loop comment
+ {
+ ();
+ }
+
+
+ cond = true;
+ while cond {
+ ();
+ }
+
+
+ 'while_label: while cond { // while comment
+ ();
+ }
+
+
+ for obj in iter {
+ for sub_obj in obj
+ {
+ 'nested_while_label: while cond {
+ ();
+ }
+ }
+ }
+
+ match some_var { // match comment
+ pattern0 => val0,
+ pattern1 => val1,
+ pattern2 | pattern3 => {
+ do_stuff();
+ val2
+ },
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/control-brace-style-always-same-line.rs b/src/tools/rustfmt/tests/source/control-brace-style-always-same-line.rs
new file mode 100644
index 000000000..45111aaab
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/control-brace-style-always-same-line.rs
@@ -0,0 +1,42 @@
+fn main() {
+ loop {
+ ();
+ ();
+ }
+
+
+ 'label: loop // loop comment
+ {
+ ();
+ }
+
+
+ cond = true;
+ while cond {
+ ();
+ }
+
+
+ 'while_label: while cond { // while comment
+ ();
+ }
+
+
+ for obj in iter {
+ for sub_obj in obj
+ {
+ 'nested_while_label: while cond {
+ ();
+ }
+ }
+ }
+
+ match some_var { // match comment
+ pattern0 => val0,
+ pattern1 => val1,
+ pattern2 | pattern3 => {
+ do_stuff();
+ val2
+ },
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/doc-attrib.rs b/src/tools/rustfmt/tests/source/doc-attrib.rs
new file mode 100644
index 000000000..dde88c6e9
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/doc-attrib.rs
@@ -0,0 +1,118 @@
+// rustfmt-wrap_comments: true
+// rustfmt-normalize_doc_attributes: true
+
+// Only doc = "" attributes should be normalized
+#![doc = " Example doc attribute comment"]
+#![doc = " Example doc attribute comment with 10 leading spaces"]
+#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
+ html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
+ html_root_url = "https://doc.rust-lang.org/nightly/",
+ html_playground_url = "https://play.rust-lang.org/", test(attr(deny(warnings))))]
+
+
+// Long `#[doc = "..."]`
+struct A { #[doc = " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"] b: i32 }
+
+
+#[doc = " The `nodes` and `edges` method each return instantiations of `Cow<[T]>` to leave implementers the freedom to create entirely new vectors or to pass back slices into internally owned vectors."]
+struct B { b: i32 }
+
+
+#[doc = " Level 1 comment"]
+mod tests {
+ #[doc = " Level 2 comment"]
+ impl A {
+ #[doc = " Level 3 comment"]
+ fn f() {
+ #[doc = " Level 4 comment"]
+ fn g() {
+ }
+ }
+ }
+}
+
+struct C {
+ #[doc = " item doc attrib comment"]
+ // regular item comment
+ b: i32,
+
+ // regular item comment
+ #[doc = " item doc attrib comment"]
+ c: i32,
+}
+
+// non-regression test for regular attributes, from #2647
+#[cfg(feature = "this_line_is_101_characters_long_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx")]
+pub fn foo() {}
+
+// path attrs
+#[clippy::bar]
+#[clippy::bar(a, b, c)]
+pub fn foo() {}
+
+mod issue_2620 {
+ #[derive(Debug, StructOpt)]
+#[structopt(about = "Display information about the character on FF Logs")]
+pub struct Params {
+ #[structopt(help = "The server the character is on")]
+ server: String,
+ #[structopt(help = "The character's first name")]
+ first_name: String,
+ #[structopt(help = "The character's last name")]
+ last_name: String,
+ #[structopt(
+ short = "j",
+ long = "job",
+ help = "The job to look at",
+ parse(try_from_str)
+ )]
+ job: Option<Job>
+}
+}
+
+// non-regression test for regular attributes, from #2969
+#[cfg(not(all(feature="std",
+ any(target_os = "linux", target_os = "android",
+ target_os = "netbsd",
+ target_os = "dragonfly",
+ target_os = "haiku",
+ target_os = "emscripten",
+ target_os = "solaris",
+ target_os = "cloudabi",
+ target_os = "macos", target_os = "ios",
+ target_os = "freebsd",
+ target_os = "openbsd",
+ target_os = "redox",
+ target_os = "fuchsia",
+ windows,
+ all(target_arch = "wasm32", feature = "stdweb"),
+ all(target_arch = "wasm32", feature = "wasm-bindgen"),
+ ))))]
+type Os = NoSource;
+
+// use cases from bindgen needing precise control over leading spaces
+#[doc = " <div rustbindgen accessor></div>"]
+#[repr(C)]
+#[derive(Debug, Default, Copy, Clone)]
+pub struct ContradictAccessors {
+ #[doc = "<foo>no leading spaces here</foo>"]
+ pub mBothAccessors: ::std::os::raw::c_int,
+ #[doc = " <div rustbindgen accessor=\"false\"></div>"]
+ pub mNoAccessors: ::std::os::raw::c_int,
+ #[doc = " <div rustbindgen accessor=\"unsafe\"></div>"]
+ pub mUnsafeAccessors: ::std::os::raw::c_int,
+ #[doc = " <div rustbindgen accessor=\"immutable\"></div>"]
+ pub mImmutableAccessor: ::std::os::raw::c_int,
+}
+
+#[doc = " \\brief MPI structure"]
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct mbedtls_mpi {
+ #[doc = "< integer sign"]
+ pub s: ::std::os::raw::c_int,
+ #[doc = "< total # of limbs"]
+ pub n: ::std::os::raw::c_ulong,
+ #[doc = "< pointer to limbs"]
+ pub p: *mut mbedtls_mpi_uint,
+}
diff --git a/src/tools/rustfmt/tests/source/doc-comment-with-example.rs b/src/tools/rustfmt/tests/source/doc-comment-with-example.rs
new file mode 100644
index 000000000..e74ceefd1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/doc-comment-with-example.rs
@@ -0,0 +1,12 @@
+// rustfmt-format_code_in_doc_comments: true
+
+/// Foo
+///
+/// # Example
+/// ```
+/// # #![cfg_attr(not(dox), feature(cfg_target_feature, target_feature, stdsimd))]
+/// # #![cfg_attr(not(dox), no_std)]
+/// fn foo() { }
+/// ```
+///
+fn foo() {}
diff --git a/src/tools/rustfmt/tests/source/doc.rs b/src/tools/rustfmt/tests/source/doc.rs
new file mode 100644
index 000000000..3b25918b1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/doc.rs
@@ -0,0 +1,5 @@
+// rustfmt-normalize_comments: true
+// Part of multiple.rs
+
+// sadfsdfa
+//sdffsdfasdf
diff --git a/src/tools/rustfmt/tests/source/dyn_trait.rs b/src/tools/rustfmt/tests/source/dyn_trait.rs
new file mode 100644
index 000000000..012643be9
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/dyn_trait.rs
@@ -0,0 +1,20 @@
+#![feature(dyn_trait)]
+
+fn main() {
+ // #2506
+ // checks rustfmt doesn't remove dyn
+ trait MyTrait {
+ fn method(&self) -> u64;
+ }
+ fn f1(a: Box<dyn MyTrait>) {}
+
+ // checks if line wrap works correctly
+ trait Very_______________________Long__________________Name_______________________________Trait {
+ fn method(&self) -> u64;
+ }
+
+ fn f2(a: Box<dyn Very_______________________Long__________________Name____________________Trait+ 'static,>) {}
+
+ // #2582
+ let _: &dyn (::std::any::Any) = &msg;
+}
diff --git a/src/tools/rustfmt/tests/source/else-if-brace-style-always-next-line.rs b/src/tools/rustfmt/tests/source/else-if-brace-style-always-next-line.rs
new file mode 100644
index 000000000..7b4870fc6
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/else-if-brace-style-always-next-line.rs
@@ -0,0 +1,54 @@
+// rustfmt-control_brace_style: AlwaysNextLine
+
+fn main() {
+ if false
+ {
+ ();
+ ();
+ }
+
+ if false // lone if comment
+ {
+ ();
+ ();
+ }
+
+
+ let a =
+ if 0 > 1 {
+ unreachable!()
+ }
+ else
+ {
+ 0x0
+ };
+
+
+ if true
+ {
+ ();
+ } else if false {
+ ();
+ ();
+ }
+ else {
+ ();
+ ();
+ ();
+ }
+
+ if true // else-if-chain if comment
+ {
+ ();
+ }
+ else if false // else-if-chain else-if comment
+ {
+ ();
+ ();
+ } else // else-if-chain else comment
+ {
+ ();
+ ();
+ ();
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/else-if-brace-style-always-same-line.rs b/src/tools/rustfmt/tests/source/else-if-brace-style-always-same-line.rs
new file mode 100644
index 000000000..37c9417ea
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/else-if-brace-style-always-same-line.rs
@@ -0,0 +1,52 @@
+fn main() {
+ if false
+ {
+ ();
+ ();
+ }
+
+ if false // lone if comment
+ {
+ ();
+ ();
+ }
+
+
+ let a =
+ if 0 > 1 {
+ unreachable!()
+ }
+ else
+ {
+ 0x0
+ };
+
+
+ if true
+ {
+ ();
+ } else if false {
+ ();
+ ();
+ }
+ else {
+ ();
+ ();
+ ();
+ }
+
+ if true // else-if-chain if comment
+ {
+ ();
+ }
+ else if false // else-if-chain else-if comment
+ {
+ ();
+ ();
+ } else // else-if-chain else comment
+ {
+ ();
+ ();
+ ();
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/else-if-brace-style-closing-next-line.rs b/src/tools/rustfmt/tests/source/else-if-brace-style-closing-next-line.rs
new file mode 100644
index 000000000..3b885b3fa
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/else-if-brace-style-closing-next-line.rs
@@ -0,0 +1,54 @@
+// rustfmt-control_brace_style: ClosingNextLine
+
+fn main() {
+ if false
+ {
+ ();
+ ();
+ }
+
+ if false // lone if comment
+ {
+ ();
+ ();
+ }
+
+
+ let a =
+ if 0 > 1 {
+ unreachable!()
+ }
+ else
+ {
+ 0x0
+ };
+
+
+ if true
+ {
+ ();
+ } else if false {
+ ();
+ ();
+ }
+ else {
+ ();
+ ();
+ ();
+ }
+
+ if true // else-if-chain if comment
+ {
+ ();
+ }
+ else if false // else-if-chain else-if comment
+ {
+ ();
+ ();
+ } else // else-if-chain else comment
+ {
+ ();
+ ();
+ ();
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/empty-item-single-line-false.rs b/src/tools/rustfmt/tests/source/empty-item-single-line-false.rs
new file mode 100644
index 000000000..20c5bc83b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/empty-item-single-line-false.rs
@@ -0,0 +1,46 @@
+// rustfmt-brace_style: AlwaysNextLine
+// rustfmt-empty_item_single_line: false
+
+fn function()
+{
+
+}
+
+struct Struct
+{
+
+}
+
+enum Enum
+{
+
+}
+
+trait Trait
+{
+
+}
+
+impl<T> Trait for T
+{
+
+}
+
+trait Trait2<T>
+where
+ T: Copy + Display + Write + Read + FromStr, {}
+
+trait Trait3<T>
+where
+ T: Something
+ + SomethingElse
+ + Sync
+ + Send
+ + Display
+ + Debug
+ + Copy
+ + Hash
+ + Debug
+ + Display
+ + Write
+ + Read, {}
diff --git a/src/tools/rustfmt/tests/source/empty_file.rs b/src/tools/rustfmt/tests/source/empty_file.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/empty_file.rs
diff --git a/src/tools/rustfmt/tests/source/enum.rs b/src/tools/rustfmt/tests/source/enum.rs
new file mode 100644
index 000000000..0ed9651ab
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/enum.rs
@@ -0,0 +1,212 @@
+// rustfmt-wrap_comments: true
+// Enums test
+
+#[atrr]
+pub enum Test {
+ A, B(u32,
+ A /* comment */,
+ SomeType),
+ /// Doc comment
+ C,
+}
+
+pub enum Foo<'a, Y: Baz> where X: Whatever
+{ A, }
+
+enum EmtpyWithComment {
+ // Some comment
+}
+
+// C-style enum
+enum Bar {
+ A = 1,
+ #[someAttr(test)]
+ B = 2, // comment
+ C,
+}
+
+enum LongVariants {
+First(LOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOONG, // comment
+VARIANT),
+ // This is the second variant
+ Second
+}
+
+enum StructLikeVariants {
+ Normal(u32, String, ),
+ StructLike { x: i32, // Test comment
+ // Pre-comment
+ #[Attr50] y: SomeType, // Aanother Comment
+ }, SL { a: A }
+}
+
+enum X {
+ CreateWebGLPaintTask(Size2D<i32>, GLContextAttributes, IpcSender<Result<(IpcSender<CanvasMsg>, usize), String>>), // This is a post comment
+}
+
+pub enum EnumWithAttributes {
+ //This is a pre comment AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ TupleVar(usize, usize, usize), // AAAA AAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ // Pre Comment
+ #[rustfmt::skip]
+ SkippedItem(String,String,), // Post-comment
+ #[another_attr]
+ #[attr2]
+ ItemStruct {x: usize, y: usize}, // Comment AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ // And another
+ ForcedPreflight // AAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+}
+
+pub enum SingleTuple {
+ // Pre Comment AAAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ Match(usize, usize, String) // Post-comment AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+}
+
+pub enum SingleStruct {
+ Match {name: String, loc: usize} // Post-comment
+}
+
+pub enum GenericEnum<I, T>
+where I: Iterator<Item = T> {
+ // Pre Comment
+ Left {list: I, root: T}, // Post-comment
+ Right {list: I, root: T} // Post Comment
+}
+
+
+enum EmtpyWithComment {
+ // Some comment
+}
+
+enum TestFormatFails {
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+}
+
+fn nested_enum_test() {
+ if true {
+ enum TestEnum {
+ One(usize, usize, usize, usize, usize, usize, usize, usize, usize, usize, usize, usize, usize, usize, usize, usize,), // AAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAA
+ Two // AAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAA
+ }
+ enum TestNestedFormatFail {
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ }
+ }
+}
+
+ pub struct EmtpyWithComment {
+ // FIXME: Implement this struct
+}
+
+// #1115
+pub enum Bencoding<'i> {
+ Str(&'i [u8]),
+ Int(i64),
+ List(Vec<Bencoding<'i>>),
+ /// A bencoded dict value. The first element the slice of bytes in the source that the dict is
+ /// composed of. The second is the dict, decoded into an ordered map.
+ // TODO make Dict "structlike" AKA name the two values.
+ Dict(&'i [u8], BTreeMap<&'i [u8], Bencoding<'i>>),
+}
+
+// #1261
+pub enum CoreResourceMsg {
+ SetCookieForUrl(
+ ServoUrl,
+ #[serde(deserialize_with = "::hyper_serde::deserialize",
+ serialize_with = "::hyper_serde::serialize")]
+ Cookie,
+ CookieSource
+ ),
+}
+
+enum Loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong {}
+enum Looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong {}
+enum Loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong {}
+enum Loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong { Foo }
+
+// #1046
+pub enum Entry<'a, K: 'a, V: 'a> {
+ Vacant(
+ #[ stable( feature = "rust1", since = "1.0.0" ) ] VacantEntry<'a, K, V>,
+ ),
+ Occupied(
+ #[ stable( feature = "rust1", since = "1.0.0" ) ]
+ OccupiedEntry<'a, K, V>,
+ ),
+}
+
+// #2081
+pub enum ForegroundColor {
+ CYAN = (winapi::FOREGROUND_INTENSITY | winapi::FOREGROUND_GREEN | winapi::FOREGROUND_BLUE) as u16,
+}
+
+// #2098
+pub enum E<'a> {
+ V ( < std::slice::Iter<'a, Xxxxxxxxxxxxxx> as Iterator> :: Item ) ,
+}
+
+// #1809
+enum State {
+ TryRecv {
+ pos: usize,
+ lap: u8,
+ closed_count: usize,
+ },
+ Subscribe { pos: usize },
+ IsReady { pos: usize, ready: bool },
+ Unsubscribe {
+ pos: usize,
+ lap: u8,
+ id_woken: usize,
+ },
+ FinalTryRecv { pos: usize, id_woken: usize },
+ TimedOut,
+ Disconnected,
+}
+
+// #2190
+#[derive(Debug, Fail)]
+enum AnError {
+ #[fail(display = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")]
+ UnexpectedSingleToken { token: syn::Token },
+}
+
+// #2193
+enum WidthOf101 {
+ #[fail(display = ".....................................................")] Io(::std::io::Error),
+ #[fail(display = ".....................................................")] Ioo(::std::io::Error),
+ Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx(::std::io::Error),
+ Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx(::std::io::Error),
+}
+
+// #2389
+pub enum QlError {
+ #[fail(display = "Parsing error: {}", 0)] LexError(parser::lexer::LexError),
+ #[fail(display = "Parsing error: {:?}", 0)] ParseError(parser::ParseError),
+ #[fail(display = "Validation error: {:?}", 0)] ValidationError(Vec<validation::Error>),
+ #[fail(display = "Execution error: {}", 0)] ExecutionError(String),
+ // (from, to)
+ #[fail(display = "Translation error: from {} to {}", 0, 1)] TranslationError(String, String),
+ // (kind, input, expected)
+ #[fail(display = "aaaaaaaaaaaaCould not find {}: Found: {}, expected: {:?}", 0, 1, 2)] ResolveError(&'static str, String, Option<String>),
+}
+
+// #2594
+enum Foo {}
+enum Bar { }
+
+// #3562
+enum PublishedFileVisibility {
+ Public = sys::ERemoteStoragePublishedFileVisibility_k_ERemoteStoragePublishedFileVisibilityPublic,
+ FriendsOnly = sys::ERemoteStoragePublishedFileVisibility_k_ERemoteStoragePublishedFileVisibilityFriendsOnly,
+ Private = sys::ERemoteStoragePublishedFileVisibility_k_ERemoteStoragePublishedFileVisibilityPrivate,
+}
+
+// #3771
+//#![feature(arbitrary_enum_discriminant)]
+#[repr(u32)]
+pub enum E {
+ A { a: u32 } = 0x100,
+ B { field1: u32, field2: u8, field3: m::M } = 0x300 // comment
+}
diff --git a/src/tools/rustfmt/tests/source/existential_type.rs b/src/tools/rustfmt/tests/source/existential_type.rs
new file mode 100644
index 000000000..33bb9a951
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/existential_type.rs
@@ -0,0 +1,23 @@
+// Opaque type.
+
+ #![feature(type_alias_impl_trait)]
+
+pub type Adder<F, T>
+where
+ T: Clone,
+ F: Copy
+ = impl Fn(T) -> T;
+
+pub type Adderrr<T> = impl Fn( T ) -> T;
+
+impl Foo for Bar {
+type E = impl Trait;
+}
+
+pub type Adder_without_impl<F, T>
+where
+ T: Clone,
+ F: Copy
+ = Fn(T) -> T;
+
+pub type Adderrr_without_impl<T> = Fn( T ) -> T;
diff --git a/src/tools/rustfmt/tests/source/expr-block.rs b/src/tools/rustfmt/tests/source/expr-block.rs
new file mode 100644
index 000000000..a3e6100b7
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/expr-block.rs
@@ -0,0 +1,300 @@
+// Test expressions with block formatting.
+
+fn arrays() {
+ [ ];
+ let empty = [];
+
+ let foo = [a_long_name, a_very_lng_name, a_long_name];
+
+ let foo = [a_long_name, a_very_lng_name, a_long_name, a_very_lng_name, a_long_name, a_very_lng_name, a_long_name, a_very_lng_name];
+
+ vec![a_long_name, a_very_lng_name, a_long_name, a_very_lng_name, a_long_name, a_very_lng_name, a_very_lng_name];
+
+ [a_long_name, a_very_lng_name, a_long_name, a_very_lng_name, a_long_name, a_very_lng_name, a_very_lng_name]
+}
+
+fn arrays() {
+ let x = [0,
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 0,
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 0,
+ 7,
+ 8,
+ 9,
+ 0,
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 0];
+
+ let y = [/* comment */ 1, 2 /* post comment */, 3];
+
+ let xy = [ strukt { test123: value_one_two_three_four, turbo: coolio(), } , /* comment */ 1 ];
+
+ let a =WeightedChoice::new(&mut [Weighted {
+ weight: x,
+ item: 0,
+ },
+ Weighted {
+ weight: 1,
+ item: 1,
+ },
+ Weighted {
+ weight: x,
+ item: 2,
+ },
+ Weighted {
+ weight: 1,
+ item: 3,
+ }]);
+
+ let z = [xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzz, q];
+
+ [ 1 + 3, 4 , 5, 6, 7, 7, fncall::<Vec<_>>(3-1)]
+}
+
+fn function_calls() {
+ let items = itemize_list(context.source_map,
+ args.iter(),
+ ")",
+ |item| item.span.lo(),
+ |item| item.span.hi(),
+ |item| {
+ item.rewrite(context,
+ Shape {
+ width: remaining_width,
+ ..nested_shape
+ })
+ },
+ span.lo(),
+ span.hi());
+
+ itemize_list(context.source_map,
+ args.iter(),
+ ")",
+ |item| item.span.lo(),
+ |item| item.span.hi(),
+ |item| {
+ item.rewrite(context,
+ Shape {
+ width: remaining_width,
+ ..nested_shape
+ })
+ },
+ span.lo(),
+ span.hi())
+}
+
+fn macros() {
+ baz!(do_not, add, trailing, commas, inside, of, function, like, macros, even, if_they, are, long);
+
+ baz!(one_item_macro_which_is_also_loooooooooooooooooooooooooooooooooooooooooooooooong);
+
+ let _ = match option {
+ None => baz!(function, like, macro_as, expression, which, is, loooooooooooooooong),
+ Some(p) => baz!(one_item_macro_as_expression_which_is_also_loooooooooooooooong),
+ };
+}
+
+fn issue_1450() {
+ if selfstate
+ .compare_exchandsfasdsdfgsdgsdfgsdfgsdfgsdfgsdfgfsfdsage_weak(
+ STATE_PARKED,
+ STATE_UNPARKED,
+ Release,
+ Relaxed,
+ Release,
+ Relaxed,
+ )
+ .is_ok() {
+ return;
+ }
+}
+
+fn foo() {
+ if real_total <= limit && !pre_line_comments &&
+ !items.into_iter().any(|item| item.as_ref().is_multiline()) {
+ DefinitiveListTactic::Horizontal
+ }
+}
+
+fn combine_block() {
+ foo(
+ Bar {
+ x: value,
+ y: value2,
+ },
+ );
+
+ foo((Bar {
+ x: value,
+ y: value2,
+ },));
+
+ foo((1, 2, 3, Bar {
+ x: value,
+ y: value2,
+ }));
+
+ foo((1, 2, 3, |x| {
+ let y = x + 1;
+ let z = y + 1;
+ z
+ }));
+
+ let opt = Some(
+ Struct(
+ long_argument_one,
+ long_argument_two,
+ long_argggggggg,
+ ),
+ );
+
+ do_thing(
+ |param| {
+ action();
+ foo(param)
+ },
+ );
+
+ do_thing(
+ x,
+ |param| {
+ action();
+ foo(param)
+ },
+ );
+
+ do_thing(
+ x,
+ (
+ 1,
+ 2,
+ 3,
+ |param| {
+ action();
+ foo(param)
+ },
+ ),
+ );
+
+ Ok(
+ some_function(
+ lllllllllong_argument_one,
+ lllllllllong_argument_two,
+ lllllllllllllllllllllllllllllong_argument_three,
+ ),
+ );
+
+ foo(
+ thing,
+ bar(
+ param2,
+ pparam1param1param1param1param1param1param1param1param1param1aram1,
+ param3,
+ ),
+ );
+
+ foo.map_or(
+ || {
+ Ok(
+ SomeStruct {
+ f1: 0,
+ f2: 0,
+ f3: 0,
+ },
+ )
+ },
+ );
+
+ match opt {
+ Some(x) => somefunc(anotherfunc(
+ long_argument_one,
+ long_argument_two,
+ long_argument_three,
+ )),
+ Some(x) => |x| {
+ let y = x + 1;
+ let z = y + 1;
+ z
+ },
+ Some(x) => (1, 2, |x| {
+ let y = x + 1;
+ let z = y + 1;
+ z
+ }),
+ Some(x) => SomeStruct {
+ f1: long_argument_one,
+ f2: long_argument_two,
+ f3: long_argument_three,
+ },
+ None => Ok(SomeStruct {
+ f1: long_argument_one,
+ f2: long_argument_two,
+ f3: long_argument_three,
+ }),
+ };
+
+ match x {
+ y => func(
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,
+ ),
+ _ => func(
+ x,
+ yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy,
+ zzz,
+ ),
+ }
+}
+
+fn issue_1862() {
+ foo(
+ /* bar = */ None ,
+ something_something,
+ /* baz = */ None ,
+ /* This comment waaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaay too long to be kept on the same line */ None ,
+ /* com */ this_last_arg_is_tooooooooooooooooooooooooooooooooo_long_to_be_kept_with_the_pre_comment ,
+ )
+}
+
+fn issue_3025() {
+ foo(
+ // This describes the argument below.
+ /* bar = */ None ,
+ // This describes the argument below.
+ something_something,
+ // This describes the argument below. */
+ None ,
+ // This describes the argument below.
+ /* This comment waaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaay too long to be kept on the same line */ None ,
+ // This describes the argument below.
+ /* com */ this_last_arg_is_tooooooooooooooooooooooooooooooooo_long_to_be_kept_with_the_pre_comment ,
+ )
+}
+
+fn issue_1878() {
+ let channel: &str = seq.next_element()?.ok_or_else(|| de::Error::invalid_length(2, &self))?;
+}
diff --git a/src/tools/rustfmt/tests/source/expr-overflow-delimited.rs b/src/tools/rustfmt/tests/source/expr-overflow-delimited.rs
new file mode 100644
index 000000000..cd80ca6fc
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/expr-overflow-delimited.rs
@@ -0,0 +1,155 @@
+// rustfmt-overflow_delimited_expr: true
+
+fn combine_blocklike() {
+ do_thing(
+ |param| {
+ action();
+ foo(param)
+ },
+ );
+
+ do_thing(
+ x,
+ |param| {
+ action();
+ foo(param)
+ },
+ );
+
+ do_thing(
+ x,
+
+ // I'll be discussing the `action` with your para(m)legal counsel
+ |param| {
+ action();
+ foo(param)
+ },
+ );
+
+ do_thing(
+ Bar {
+ x: value,
+ y: value2,
+ },
+ );
+
+ do_thing(
+ x,
+ Bar {
+ x: value,
+ y: value2,
+ },
+ );
+
+ do_thing(
+ x,
+
+ // Let me tell you about that one time at the `Bar`
+ Bar {
+ x: value,
+ y: value2,
+ },
+ );
+
+ do_thing(
+ &[
+ value_with_longer_name,
+ value2_with_longer_name,
+ value3_with_longer_name,
+ value4_with_longer_name,
+ ],
+ );
+
+ do_thing(
+ x,
+ &[
+ value_with_longer_name,
+ value2_with_longer_name,
+ value3_with_longer_name,
+ value4_with_longer_name,
+ ],
+ );
+
+ do_thing(
+ x,
+
+ // Just admit it; my list is longer than can be folded on to one line
+ &[
+ value_with_longer_name,
+ value2_with_longer_name,
+ value3_with_longer_name,
+ value4_with_longer_name,
+ ],
+ );
+
+ do_thing(
+ vec![
+ value_with_longer_name,
+ value2_with_longer_name,
+ value3_with_longer_name,
+ value4_with_longer_name,
+ ],
+ );
+
+ do_thing(
+ x,
+ vec![
+ value_with_longer_name,
+ value2_with_longer_name,
+ value3_with_longer_name,
+ value4_with_longer_name,
+ ],
+ );
+
+ do_thing(
+ x,
+
+ // Just admit it; my list is longer than can be folded on to one line
+ vec![
+ value_with_longer_name,
+ value2_with_longer_name,
+ value3_with_longer_name,
+ value4_with_longer_name,
+ ],
+ );
+
+ do_thing(
+ x,
+ (
+ 1,
+ 2,
+ 3,
+ |param| {
+ action();
+ foo(param)
+ },
+ ),
+ );
+}
+
+fn combine_struct_sample() {
+ let identity = verify(
+ &ctx,
+ VerifyLogin {
+ type_: LoginType::Username,
+ username: args.username.clone(),
+ password: Some(args.password.clone()),
+ domain: None,
+ },
+ )?;
+}
+
+fn combine_macro_sample() {
+ rocket::ignite()
+ .mount(
+ "/",
+ routes![
+ http::auth::login,
+ http::auth::logout,
+ http::cors::options,
+ http::action::dance,
+ http::action::sleep,
+ ],
+ )
+ .launch();
+}
diff --git a/src/tools/rustfmt/tests/source/expr.rs b/src/tools/rustfmt/tests/source/expr.rs
new file mode 100644
index 000000000..21f8a4a43
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/expr.rs
@@ -0,0 +1,579 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+// Test expressions
+
+fn foo() -> bool {
+ let boxed: Box<i32> = box 5;
+ let referenced = &5 ;
+
+ let very_long_variable_name = ( a + first + simple + test );
+ let very_long_variable_name = (a + first + simple + test + AAAAAAAAAAAAA + BBBBBBBBBBBBBBBBB + b + c);
+
+ let is_internalxxxx = self.source_map.span_to_filename(s) == self.source_map.span_to_filename(m.inner);
+
+ let some_val = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa * bbbb / (bbbbbb -
+ function_call(x, *very_long_pointer, y))
+ + 1000 ;
+
+some_ridiculously_loooooooooooooooooooooong_function(10000 * 30000000000 + 40000 / 1002200000000
+ - 50000 * sqrt(-1),
+ trivial_value);
+ (((((((((aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + a +
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaa))))))))) ;
+
+ { for _ in 0..10 {} }
+
+ {{{{}}}}
+
+ if 1 + 2 > 0 { let result = 5; result } else { 4};
+
+ if let Some(x) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa {
+ // Nothing
+ }
+
+ if let Some(x) = (aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa) {}
+
+ if let (some_very_large,
+ tuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuple) = 1
+ + 2 + 3 {
+ }
+
+ if let (some_very_large,
+ tuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuple) = 1111 + 2222 {}
+
+ if let (some_very_large, tuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuple) = 1
+ + 2 + 3 {
+ }
+
+ if let ast::ItemKind::Trait(_, unsafety, ref generics, ref type_param_bounds, ref trait_items) = item.node
+ {
+ // nothing
+ }
+
+ let test = if true { 5 } else { 3 };
+
+ if cond() {
+ something();
+ } else if different_cond() {
+ something_else();
+ } else {
+ // Check subformatting
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ }
+
+ // #2884
+ let _ = [0; {struct Foo; impl Foo {const fn get(&self) -> usize {5}}; Foo.get()}];
+}
+
+fn bar() {
+ let range = ( 111111111 + 333333333333333333 + 1111 + 400000000000000000) .. (2222 + 2333333333333333);
+
+ let another_range = 5..some_func( a , b /* comment */);
+
+ for _ in 1 ..{ call_forever(); }
+
+ syntactically_correct(loop { sup( '?'); }, if cond { 0 } else { 1 });
+
+ let third = ..10;
+ let infi_range = .. ;
+ let foo = 1..;
+ let bar = 5 ;
+ let nonsense = (10 .. 0)..(0..10);
+
+ loop{if true {break}}
+
+ let x = (aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa &&
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ a);
+}
+
+fn baz() {
+ unsafe /* {}{}{}{{{{}} */ {
+ let foo = 1u32;
+ }
+
+ unsafe /* very looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong comment */ {}
+
+ unsafe // So this is a very long comment.
+ // Multi-line, too.
+ // Will it still format correctly?
+ {
+ }
+
+ unsafe {
+ // Regular unsafe block
+ }
+
+ unsafe {
+ foo()
+ }
+
+ unsafe {
+ foo();
+ }
+
+ // #2289
+ let identifier_0 = unsafe { this_is_58_chars_long_and_line_is_93_chars_long_xxxxxxxxxx };
+ let identifier_1 = unsafe { this_is_59_chars_long_and_line_is_94_chars_long_xxxxxxxxxxx };
+ let identifier_2 = unsafe { this_is_65_chars_long_and_line_is_100_chars_long_xxxxxxxxxxxxxxxx };
+ let identifier_3 = unsafe { this_is_66_chars_long_and_line_is_101_chars_long_xxxxxxxxxxxxxxxxx };
+}
+
+// Test some empty blocks.
+fn qux() {
+ {}
+ // FIXME this one could be done better.
+ { /* a block with a comment */ }
+ {
+
+ }
+ {
+ // A block with a comment.
+ }
+}
+
+fn issue227() {
+ {
+ let handler = box DocumentProgressHandler::new(addr, DocumentProgressTask::DOMContentLoaded);
+ }
+}
+
+fn issue184(source: &str) {
+ for c in source.chars() {
+ if index < 'a' {
+ continue;
+ }
+ }
+}
+
+fn arrays() {
+ let x = [0,
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 0,
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 0,
+ 7,
+ 8,
+ 9,
+ 0,
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 0];
+
+ let y = [/* comment */ 1, 2 /* post comment */, 3];
+
+ let xy = [ strukt { test123: value_one_two_three_four, turbo: coolio(), } , /* comment */ 1 ];
+
+ let a =WeightedChoice::new(&mut [Weighted {
+ weightweight: x,
+ item: 0,
+ },
+ Weighted {
+ weightweight: 1,
+ item: 1,
+ },
+ Weighted {
+ weightweight: x,
+ item: 2,
+ },
+ Weighted {
+ weightweight: 1,
+ item: 3,
+ }]);
+
+ let z = [xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzz, q];
+
+ [ 1 + 3, 4 , 5, 6, 7, 7, fncall::<Vec<_>>(3-1)]
+}
+
+fn returns() {
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa && return;
+
+ return aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa;
+}
+
+fn addrof() {
+ & mut(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa+bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb);
+ & (aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa+bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb);
+
+ // raw reference operator
+ & raw const a;
+ & raw mut b;
+}
+
+fn casts() {
+ fn unpack(packed: u32) -> [u16; 2] {
+ [
+ (packed >> 16) as u16,
+ (packed >> 0) as u16,
+ ]
+ }
+
+ let some_trait_xxx = xxxxxxxxxxx + xxxxxxxxxxxxx
+ as SomeTraitXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX;
+ let slightly_longer_trait = yyyyyyyyy + yyyyyyyyyyy as SomeTraitYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY;
+}
+
+fn indices() {
+ let x = (aaaaaaaaaaaaaaaaaaaaaaaaaaaa+bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb+cccccccccccccccc) [ x + y + z ];
+ let y = (aaaaaaaaaaaaaaaaaaaaaaaaaaaa + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb + cccccccccccccccc)[ xxxxx + yyyyy + zzzzz ];
+ let z = xxxxxxxxxx.x().y().zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz()[aaaaa];
+ let z = xxxxxxxxxx.x().y().zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz()[aaaaa];
+}
+
+fn repeats() {
+ let x = [aaaaaaaaaaaaaaaaaaaaaaaaaaaa+bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb+cccccccccccccccc; x + y + z ];
+ let y = [aaaaaaaaaaaaaaaaaaaaaaaaaaaa + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb + cccccccccccccccc; xxxxx + yyyyy + zzzzz ];
+}
+
+fn blocks() {
+ if 1 + 1 == 2 {
+ println!("yay arithmetix!");
+ };
+}
+
+fn issue767() {
+ if false {
+ if false {
+ } else {
+ // A let binding here seems necessary to trigger it.
+ let _ = ();
+ }
+ } else if let false = false {
+ }
+}
+
+fn ranges() {
+ let x = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa .. bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb;
+ let y = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa ..= bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb;
+ let z = ..= x ;
+
+ // #1766
+ let x = [0. ..10.0];
+ let x = [0. ..=10.0];
+
+ a ..= b
+
+ // the expr below won't compile because inclusive ranges need a defined end
+ // let a = 0 ..= ;
+}
+
+fn if_else() {
+ let exact = diff /
+ (if size == 0 {
+ 1
+} else {
+ size
+});
+
+ let cx = tp1.x +
+ any * radius *
+ if anticlockwise {
+ 1.0
+ } else {
+ -1.0
+ };
+}
+
+fn complex_if_else() {
+ if let Some(x) = xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx {
+ } else if let Some(x) = xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx {
+ ha();
+ } else if xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + xxxxxxxx {
+ yo();
+ } else if let Some(x) = xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx {
+ ha();
+ } else if xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + xxxxxxxxx {
+ yo();
+ }
+}
+
+fn issue1106() {
+ {
+ if let hir::ItemEnum(ref enum_def, ref generics) = self.ast_map.expect_item(enum_node_id).node {
+ }
+ }
+
+ for entry in
+ WalkDir::new(path)
+ .into_iter()
+ .filter_entry(|entry| exclusions.filter_entry(entry)) {
+ }
+}
+
+fn issue1570() {
+ a_very_long_function_name({some_func(1, {1})})
+}
+
+fn issue1714() {
+ v = &mut {v}[mid..];
+ let (left, right) = {v}.split_at_mut(mid);
+}
+
+// Multi-lined index should be put on the next line if it fits in one line.
+fn issue1749() {
+ {
+ {
+ {
+ if self.shape[(r as f32 + self.x_offset) as usize][(c as f32 + self.y_offset) as usize] != 0 {
+ // hello
+ }
+ }
+ }
+ }
+}
+
+// #1172
+fn newlines_between_list_like_expr() {
+ foo(
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,
+
+ yyyyyyyyyyyyyyyyyyyyyyyyyyyyyy,
+
+ zzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
+ );
+
+ vec![
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,
+
+ yyyyyyyyyyyyyyyyyyyyyyyyyyyyyy,
+
+ zzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
+ ];
+
+ match x {
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx |
+
+ yyyyyyyyyyyyyyyyyyyyyyyyyyyyyy |
+
+ zzzzzzzzzzzzzzzzzzzzzzzzzzzzzz => foo(a, b, c),
+ _ => bar(),
+ };
+}
+
+fn issue2178() {
+ Ok(result.iter().map(|item| ls_util::rls_to_location(item)).collect())
+}
+
+// #2493
+impl Foo {
+fn bar(&self) {
+ {
+ let x = match () {
+ () => {
+ let i;
+ i == self.install_config.storage.experimental_compressed_block_size as usize
+ }
+ };
+ }
+}
+}
+
+fn dots() {
+ .. .. ..; // (.. (.. (..)))
+ ..= ..= ..;
+ (..) .. ..; // ((..) .. (..))
+}
+
+// #2676
+// A function call with a large single argument.
+fn foo() {
+ let my_var =
+ Mutex::new(RpcClientType::connect(server_iddd).chain_err(|| "Unable to create RPC client")?);
+}
+
+// #2704
+// Method call with prefix and suffix.
+fn issue2704() {
+ // We should not combine the callee with a multi-lined method call.
+ let requires = requires.set(&requires0
+ .concat(&requires1)
+ .concat(&requires2)
+ .distinct_total());
+ let requires = requires.set(box requires0
+ .concat(&requires1)
+ .concat(&requires2)
+ .distinct_total());
+ let requires = requires.set(requires0
+ .concat(&requires1)
+ .concat(&requires2)
+ .distinct_total() as u32);
+ let requires = requires.set(requires0
+ .concat(&requires1)
+ .concat(&requires2)
+ .distinct_total()?);
+ let requires = requires.set(!requires0
+ .concat(&requires1)
+ .concat(&requires2)
+ .distinct_total());
+ // We should combine a small callee with an argument.
+ bar(vec![22]
+ .into_iter()
+ .map(|x| x * 2)
+ .filter(|_| true)
+ .collect());
+ // But we should not combine a long callee with an argument.
+ barrrr(vec![22]
+ .into_iter()
+ .map(|x| x * 2)
+ .filter(|_| true)
+ .collect());
+}
+
+// #2782
+fn issue2782() {
+ {let f={let f={{match f{F(f,_)=>{{loop{let f={match f{F(f,_)=>{{match f{F(f,_)=>{{loop{let f={let f={match f{'-'=>F(f,()),}};};}}}}}}}};}}}}}};};}
+}
+
+fn issue_2802() {
+ function_to_fill_this_line(some_arg, some_arg, some_arg)
+ * a_very_specific_length(specific_length_arg) * very_specific_length(Foo {
+ a: some_much_much_longer_value,
+ }) * some_value
+}
+
+fn issue_3003() {
+ let mut path: PathBuf = [
+ env!("CARGO_MANIFEST_DIR"),
+ "tests",
+ "support",
+ "dejavu-fonts-ttf-2.37",
+ "ttf",
+ ]
+ .iter()
+ .collect();
+}
+
+fn issue3226() {
+ {
+ {
+ {
+ return Err(ErrorKind::ManagementInterfaceError("Server exited unexpectedly").into())
+ }
+ }
+ }
+ {
+ {
+ {
+ break Err(ErrorKind::ManagementInterfaceError("Server exited unexpectedlyy").into())
+ }
+ }
+ }
+}
+
+// #3457
+fn issue3457() {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ println!("Test");
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+// #3498
+static REPRO: &[usize] = &[#[cfg(feature = "zero")]
+ 0];
+
+fn overflow_with_attr() {
+ foo(#[cfg(feature = "zero")]
+ 0);
+ foobar(#[cfg(feature = "zero")]
+ 0);
+ foobar(x, y, #[cfg(feature = "zero")]
+ {});
+}
+
+
+// https://github.com/rust-lang/rustfmt/issues/3765
+fn foo() {
+ async {
+ // Do
+ // some
+ // work
+ }
+ .await;
+
+ async {
+ // Do
+ // some
+ // work
+ }
+ .await;
+}
+
+fn underscore() {
+ _= 1;
+ _;
+ [ _,a,_ ] = [1, 2, 3];
+ (a, _) = (8, 9);
+ TupleStruct( _, a) = TupleStruct(2, 2);
+
+ let _ : usize = foo(_, _);
+}
diff --git a/src/tools/rustfmt/tests/source/extern.rs b/src/tools/rustfmt/tests/source/extern.rs
new file mode 100644
index 000000000..f51ba6e98
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/extern.rs
@@ -0,0 +1,92 @@
+// rustfmt-normalize_comments: true
+
+ extern crate foo ;
+ extern crate foo as bar ;
+
+extern crate futures;
+extern crate dotenv;
+extern crate chrono;
+
+extern crate foo;
+extern crate bar;
+
+// #2315
+extern crate proc_macro2;
+extern crate proc_macro;
+
+// #3128
+extern crate serde; // 1.0.78
+extern crate serde_derive; // 1.0.78
+extern crate serde_json; // 1.0.27
+
+ extern "C" {
+ fn c_func(x: *mut *mut libc::c_void);
+
+ fn c_func(x: XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX, y: YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY);
+
+ #[test123]
+ fn foo() -> uint64_t;
+
+pub fn bar() ;
+ }
+
+extern {
+ fn DMR_GetDevice(pHDev: *mut HDEV, searchMode: DeviceSearchMode, pSearchString: *const c_char, devNr: c_uint, wildcard: c_char) -> TDMR_ERROR;
+
+ fn quux() -> (); // Post comment
+
+ pub type
+ Foo;
+
+ type Bar;
+}
+
+extern "Rust" { static ext: u32;
+ // Some comment.
+ pub static mut var : SomeType ; }
+
+extern "C" {
+ fn syscall(number: libc::c_long /* comment 1 */, /* comm 2 */ ... /* sup? */) -> libc::c_long;
+
+ fn foo (x: *const c_char , ... ) ->
+libc::c_long;
+ }
+
+ extern {
+ pub fn freopen(filename: *const c_char, mode: *const c_char
+ , mode2: *const c_char
+ , mode3: *const c_char,
+ file: *mut FILE)
+ -> *mut FILE;
+
+
+ const fn foo(
+
+ ) ->
+ *mut Bar;
+ unsafe fn foo(
+
+ ) -> *
+ mut
+ Bar;
+
+ pub(super) const fn foo() -> *mut Bar;
+ pub(crate) unsafe fn foo() -> *mut Bar;
+ }
+
+extern {
+
+}
+
+macro_rules! x {
+ ($tt:tt) => {};
+}
+
+extern "macros" {
+ x!(ident);
+ x!(#);
+ x![ident];
+ x![#];
+ x! {ident}
+ x! {#}
+}
diff --git a/src/tools/rustfmt/tests/source/extern_not_explicit.rs b/src/tools/rustfmt/tests/source/extern_not_explicit.rs
new file mode 100644
index 000000000..9d6c4c2a1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/extern_not_explicit.rs
@@ -0,0 +1,14 @@
+// rustfmt-force_explicit_abi: false
+
+ extern "C" {
+ fn some_fn() -> ();
+ }
+
+ extern "C" fn sup() {
+
+ }
+
+type funky_func = extern "C" fn (unsafe extern "rust-call" fn(*const JSJitInfo, *mut JSContext,
+ HandleObject, *mut libc::c_void, u32,
+ *mut JSVal)
+ -> u8);
diff --git a/src/tools/rustfmt/tests/source/file-lines-1.rs b/src/tools/rustfmt/tests/source/file-lines-1.rs
new file mode 100644
index 000000000..0164e30a8
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/file-lines-1.rs
@@ -0,0 +1,29 @@
+// rustfmt-file_lines: [{"file":"tests/source/file-lines-1.rs","range":[4,8]}]
+
+fn floaters() {
+ let x = Foo {
+ field1: val1,
+ field2: val2,
+ }
+ .method_call().method_call();
+
+ let y = if cond {
+ val1
+ } else {
+ val2
+ }
+ .method_call();
+
+ {
+ match x {
+ PushParam => {
+ // comment
+ stack.push(mparams[match cur.to_digit(10) {
+ Some(d) => d as usize - 1,
+ None => return Err("bad param number".to_owned()),
+ }]
+ .clone());
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/file-lines-2.rs b/src/tools/rustfmt/tests/source/file-lines-2.rs
new file mode 100644
index 000000000..6f44ec6e6
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/file-lines-2.rs
@@ -0,0 +1,29 @@
+// rustfmt-file_lines: [{"file":"tests/source/file-lines-2.rs","range":[10,15]}]
+
+fn floaters() {
+ let x = Foo {
+ field1: val1,
+ field2: val2,
+ }
+ .method_call().method_call();
+
+ let y = if cond {
+ val1
+ } else {
+ val2
+ }
+ .method_call();
+
+ {
+ match x {
+ PushParam => {
+ // comment
+ stack.push(mparams[match cur.to_digit(10) {
+ Some(d) => d as usize - 1,
+ None => return Err("bad param number".to_owned()),
+ }]
+ .clone());
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/file-lines-3.rs b/src/tools/rustfmt/tests/source/file-lines-3.rs
new file mode 100644
index 000000000..4b825b9f5
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/file-lines-3.rs
@@ -0,0 +1,29 @@
+// rustfmt-file_lines: [{"file":"tests/source/file-lines-3.rs","range":[4,8]},{"file":"tests/source/file-lines-3.rs","range":[10,15]}]
+
+fn floaters() {
+ let x = Foo {
+ field1: val1,
+ field2: val2,
+ }
+ .method_call().method_call();
+
+ let y = if cond {
+ val1
+ } else {
+ val2
+ }
+ .method_call();
+
+ {
+ match x {
+ PushParam => {
+ // comment
+ stack.push(mparams[match cur.to_digit(10) {
+ Some(d) => d as usize - 1,
+ None => return Err("bad param number".to_owned()),
+ }]
+ .clone());
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/file-lines-4.rs b/src/tools/rustfmt/tests/source/file-lines-4.rs
new file mode 100644
index 000000000..83928bf6f
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/file-lines-4.rs
@@ -0,0 +1,30 @@
+// rustfmt-file_lines: []
+// (Test that nothing is formatted if an empty array is specified.)
+
+fn floaters() {
+ let x = Foo {
+ field1: val1,
+ field2: val2,
+ }
+ .method_call().method_call();
+
+ let y = if cond {
+ val1
+ } else {
+ val2
+ }
+ .method_call();
+ // aaaaaaaaaaaaa
+ {
+ match x {
+ PushParam => {
+ // comment
+ stack.push(mparams[match cur.to_digit(10) {
+ Some(d) => d as usize - 1,
+ None => return Err("bad param number".to_owned()),
+ }]
+ .clone());
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/file-lines-5.rs b/src/tools/rustfmt/tests/source/file-lines-5.rs
new file mode 100644
index 000000000..8ec2c67bc
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/file-lines-5.rs
@@ -0,0 +1,17 @@
+// rustfmt-file_lines: [{"file":"tests/source/file-lines-5.rs","range":[3,5]}]
+
+struct A {
+t: i64,
+}
+
+mod foo {
+ fn bar() {
+ // test
+ let i = 12;
+ // test
+ }
+ // aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ fn baz() {
+ let j = 15;
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/file-lines-6.rs b/src/tools/rustfmt/tests/source/file-lines-6.rs
new file mode 100644
index 000000000..2eacc8a0e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/file-lines-6.rs
@@ -0,0 +1,18 @@
+// rustfmt-file_lines: [{"file":"tests/source/file-lines-6.rs","range":[9,10]}]
+
+struct A {
+ t: i64,
+}
+
+mod foo {
+ fn bar() {
+ // test
+ let i = 12;
+ // test
+ }
+
+ fn baz() {
+///
+ let j = 15;
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/file-lines-7.rs b/src/tools/rustfmt/tests/source/file-lines-7.rs
new file mode 100644
index 000000000..b227ac35d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/file-lines-7.rs
@@ -0,0 +1,24 @@
+// rustfmt-file_lines: [{"file":"tests/source/file-lines-7.rs","range":[8,15]}]
+
+struct A {
+ t: i64,
+}
+
+mod foo {
+ fn bar() {
+
+
+
+ // test
+ let i = 12;
+ // test
+ }
+
+ fn baz() {
+
+
+
+ ///
+ let j = 15;
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/file-lines-item.rs b/src/tools/rustfmt/tests/source/file-lines-item.rs
new file mode 100644
index 000000000..fe52a7fa1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/file-lines-item.rs
@@ -0,0 +1,21 @@
+// rustfmt-file_lines: [{"file":"tests/source/file-lines-item.rs","range":[6,8]}]
+
+use foo::{c, b, a};
+use bar;
+
+fn foo() {
+ bar ( ) ;
+}
+
+impl Drop for Context {
+ fn drop(&mut self) {
+ }
+}
+
+impl Bar for Baz {
+ fn foo() {
+ bar(
+ baz, // Who knows?
+ )
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/fn-custom-2.rs b/src/tools/rustfmt/tests/source/fn-custom-2.rs
new file mode 100644
index 000000000..a3697c36d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/fn-custom-2.rs
@@ -0,0 +1,35 @@
+// Test different indents.
+
+fn foo(a: Aaaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbbbb, c: Ccccccccccccccccc, d: Ddddddddddddddddddddddddd, e: Eeeeeeeeeeeeeeeeeee) {
+ foo();
+}
+
+fn bar<'a: 'bbbbbbbbbbbbbbbbbbbbbbbbbbb, TTTTTTTTTTTTT, UUUUUUUUUUUUUUUUUUUU: WWWWWWWWWWWWWWWWWWWWWWWW>(a: Aaaaaaaaaaaaaaa) {
+ bar();
+}
+
+fn baz() where X: TTTTTTTT {
+ baz();
+}
+
+fn qux() where X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT, X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT, X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT, X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT {
+ baz();
+}
+
+impl Foo {
+ fn foo(self, a: Aaaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbbbb, c: Ccccccccccccccccc, d: Ddddddddddddddddddddddddd, e: Eeeeeeeeeeeeeeeeeee) {
+ foo();
+ }
+
+ fn bar<'a: 'bbbbbbbbbbbbbbbbbbbbbbbbbbb, TTTTTTTTTTTTT, UUUUUUUUUUUUUUUUUUUU: WWWWWWWWWWWWWWWWWWWWWWWW>(a: Aaaaaaaaaaaaaaa) {
+ bar();
+ }
+
+ fn baz() where X: TTTTTTTT {
+ baz();
+ }
+}
+
+struct Foo<TTTTTTTTTTTTTTTTTTTTTTTTTTTT, UUUUUUUUUUUUUUUUUUUUUU, VVVVVVVVVVVVVVVVVVVVVVVVVVV, WWWWWWWWWWWWWWWWWWWWWWWW> {
+ foo: Foo,
+}
diff --git a/src/tools/rustfmt/tests/source/fn-custom-3.rs b/src/tools/rustfmt/tests/source/fn-custom-3.rs
new file mode 100644
index 000000000..a5e0f9af2
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/fn-custom-3.rs
@@ -0,0 +1,31 @@
+// Test different indents.
+
+fn foo(a: Aaaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbbbb, c: Ccccccccccccccccc, d: Ddddddddddddddddddddddddd, e: Eeeeeeeeeeeeeeeeeee) {
+ foo();
+}
+
+fn bar<'a: 'bbbbbbbbbbbbbbbbbbbbbbbbbbb, TTTTTTTTTTTTT, UUUUUUUUUUUUUUUUUUUU: WWWWWWWWWWWWWWWWWWWWWWWW>(a: Aaaaaaaaaaaaaaa) {
+ bar();
+}
+
+fn qux() where X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT, X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT {
+ baz();
+}
+
+fn qux() where X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT, X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT, X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT, X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT {
+ baz();
+}
+
+impl Foo {
+ fn foo(self, a: Aaaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbbbb, c: Ccccccccccccccccc, d: Ddddddddddddddddddddddddd, e: Eeeeeeeeeeeeeeeeeee) {
+ foo();
+ }
+
+ fn bar<'a: 'bbbbbbbbbbbbbbbbbbbbbbbbbbb, TTTTTTTTTTTTT, UUUUUUUUUUUUUUUUUUUU: WWWWWWWWWWWWWWWWWWWWWWWW>(a: Aaaaaaaaaaaaaaa) {
+ bar();
+ }
+}
+
+struct Foo<TTTTTTTTTTTTTTTTTTTTTTTTTTTT, UUUUUUUUUUUUUUUUUUUUUU, VVVVVVVVVVVVVVVVVVVVVVVVVVV, WWWWWWWWWWWWWWWWWWWWWWWW> {
+ foo: Foo,
+}
diff --git a/src/tools/rustfmt/tests/source/fn-custom-4.rs b/src/tools/rustfmt/tests/source/fn-custom-4.rs
new file mode 100644
index 000000000..6e18b6f9f
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/fn-custom-4.rs
@@ -0,0 +1,13 @@
+// Test different indents.
+
+fn qux() where X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT, X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT, X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT, X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT {
+ baz();
+}
+
+fn qux() where X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT {
+ baz();
+}
+
+fn qux(a: Aaaaaaaaaaaaaaaaa) where X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT, X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT {
+ baz();
+}
diff --git a/src/tools/rustfmt/tests/source/fn-custom-6.rs b/src/tools/rustfmt/tests/source/fn-custom-6.rs
new file mode 100644
index 000000000..807084575
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/fn-custom-6.rs
@@ -0,0 +1,40 @@
+// rustfmt-brace_style: PreferSameLine
+// Test different indents.
+
+fn foo(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb) {
+ foo();
+}
+
+fn bar(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb, c: Cccccccccccccccccc, d: Dddddddddddddddd, e: Eeeeeeeeeeeeeee) {
+ bar();
+}
+
+fn foo(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb) -> String {
+ foo();
+}
+
+fn bar(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb, c: Cccccccccccccccccc, d: Dddddddddddddddd, e: Eeeeeeeeeeeeeee) -> String {
+ bar();
+}
+
+fn foo(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb) where T: UUUUUUUUUUU {
+ foo();
+}
+
+fn bar(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb, c: Cccccccccccccccccc, d: Dddddddddddddddd, e: Eeeeeeeeeeeeeee) where T: UUUUUUUUUUU {
+ bar();
+}
+
+fn foo(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb) -> String where T: UUUUUUUUUUU {
+ foo();
+}
+
+fn bar(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb, c: Cccccccccccccccccc, d: Dddddddddddddddd, e: Eeeeeeeeeeeeeee) -> String where T: UUUUUUUUUUU {
+ bar();
+}
+
+trait Test {
+ fn foo(a: u8) {}
+
+ fn bar(a: u8) -> String {}
+}
diff --git a/src/tools/rustfmt/tests/source/fn-custom-7.rs b/src/tools/rustfmt/tests/source/fn-custom-7.rs
new file mode 100644
index 000000000..d5330196b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/fn-custom-7.rs
@@ -0,0 +1,24 @@
+// rustfmt-normalize_comments: true
+// rustfmt-fn_args_layout: Vertical
+// rustfmt-brace_style: AlwaysNextLine
+
+// Case with only one variable.
+fn foo(a: u8) -> u8 {
+ bar()
+}
+
+// Case with 2 variables and some pre-comments.
+fn foo(a: u8 /* Comment 1 */, b: u8 /* Comment 2 */) -> u8 {
+ bar()
+}
+
+// Case with 2 variables and some post-comments.
+fn foo(/* Comment 1 */ a: u8, /* Comment 2 */ b: u8) -> u8 {
+ bar()
+}
+
+trait Test {
+ fn foo(a: u8) {}
+
+ fn bar(a: u8) -> String {}
+}
diff --git a/src/tools/rustfmt/tests/source/fn-custom-8.rs b/src/tools/rustfmt/tests/source/fn-custom-8.rs
new file mode 100644
index 000000000..0dd64868b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/fn-custom-8.rs
@@ -0,0 +1,48 @@
+// rustfmt-brace_style: PreferSameLine
+// Test different indents.
+
+fn foo(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb) {
+ foo();
+}
+
+fn bar(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb, c: Cccccccccccccccccc, d: Dddddddddddddddd, e: Eeeeeeeeeeeeeee) {
+ bar();
+}
+
+fn foo(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb) -> String {
+ foo();
+}
+
+fn bar(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb, c: Cccccccccccccccccc, d: Dddddddddddddddd, e: Eeeeeeeeeeeeeee) -> String {
+ bar();
+}
+
+fn foo(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb) where T: UUUUUUUUUUU {
+ foo();
+}
+
+fn bar(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb, c: Cccccccccccccccccc, d: Dddddddddddddddd, e: Eeeeeeeeeeeeeee) where T: UUUUUUUUUUU {
+ bar();
+}
+
+fn foo(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb) -> String where T: UUUUUUUUUUU {
+ foo();
+}
+
+fn bar(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb, c: Cccccccccccccccccc, d: Dddddddddddddddd, e: Eeeeeeeeeeeeeee) -> String where T: UUUUUUUUUUU {
+ bar();
+}
+
+trait Test {
+ fn foo(
+ a: u8) {
+
+ }
+
+ fn bar(a: u8)
+ -> String {
+
+ }
+
+ fn bar(a: u8) -> String where Foo: foooo, Bar: barrr {}
+}
diff --git a/src/tools/rustfmt/tests/source/fn-custom.rs b/src/tools/rustfmt/tests/source/fn-custom.rs
new file mode 100644
index 000000000..77ced4c5e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/fn-custom.rs
@@ -0,0 +1,13 @@
+// rustfmt-fn_args_layout: Compressed
+// Test some of the ways function signatures can be customised.
+
+// Test compressed layout of args.
+fn foo(a: Aaaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbbbb, c: Ccccccccccccccccc, d: Ddddddddddddddddddddddddd, e: Eeeeeeeeeeeeeeeeeee) {
+ foo();
+}
+
+impl Foo {
+ fn foo(self, a: Aaaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbbbb, c: Ccccccccccccccccc, d: Ddddddddddddddddddddddddd, e: Eeeeeeeeeeeeeeeeeee) {
+ foo();
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/fn-param-attributes.rs b/src/tools/rustfmt/tests/source/fn-param-attributes.rs
new file mode 100644
index 000000000..3407a3b2e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/fn-param-attributes.rs
@@ -0,0 +1,57 @@
+// https://github.com/rust-lang/rustfmt/issues/3623
+
+fn foo(#[cfg(something)] x: i32, y: i32) -> i32 {
+ x + y
+}
+
+fn foo_b(#[cfg(something)]x: i32, y: i32) -> i32 {
+ x + y
+}
+
+fn add(#[cfg(something)]#[deny(C)] x: i32, y: i32) -> i32 {
+ x + y
+}
+
+struct NamedSelfRefStruct {}
+impl NamedSelfRefStruct {
+ fn foo(
+#[cfg(something)] self: &Self,
+ ) {}
+}
+
+struct MutStruct {}
+impl MutStruct {
+ fn foo(
+ #[cfg(foo)]&mut self,#[deny(C)] b: i32,
+ ) {}
+}
+
+fn main() {
+ let c = |
+ #[allow(C)]a: u32,
+ #[cfg(something)] b: i32,
+ #[cfg_attr(something, cfg(nothing))]#[deny(C)] c: i32,
+ | {};
+ let _ = c(1, 2);
+}
+
+pub fn bar(
+ /// bar
+#[test] a: u32,
+ /// Bar
+ #[must_use]
+/// Baz
+ #[no_mangle] b: i32,
+) {}
+
+
+fn abc(
+ #[foo]
+ #[bar] param: u32,
+) {
+ // ...
+}
+
+fn really_really_really_loooooooooooooooooooong(#[cfg(some_even_longer_config_feature_that_keeps_going_and_going_and_going_forever_and_ever_and_ever_on_and_on)] b: i32) {
+ // ...
+}
diff --git a/src/tools/rustfmt/tests/source/fn-simple.rs b/src/tools/rustfmt/tests/source/fn-simple.rs
new file mode 100644
index 000000000..12a50c013
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/fn-simple.rs
@@ -0,0 +1,74 @@
+// rustfmt-normalize_comments: true
+
+fn simple(/*pre-comment on a function!?*/ i: i32/*yes, it's possible! */
+ ,response: NoWay /* hose */) {
+fn op(x: Typ, key : &[u8], upd : Box<Fn(Option<&memcache::Item>) -> (memcache::Status, Result<memcache::Item, Option<String>>)>) -> MapResult {}
+
+ "cool"}
+
+
+fn weird_comment(/* /*/ double level */ comment */ x: Hello /*/*/* triple, even */*/*/,
+// Does this work?
+y: World
+) {
+ simple(/* does this preserve comments now? */ 42, NoWay)
+}
+
+fn generic<T>(arg: T) -> &SomeType
+ where T: Fn(// First arg
+ A,
+ // Second argument
+ B, C, D, /* pre comment */ E /* last comment */) -> &SomeType {
+ arg(a, b, c, d, e)
+}
+
+fn foo() -> ! {}
+
+pub fn http_fetch_async(listener:Box< AsyncCORSResponseListener+Send >, script_chan: Box<ScriptChan+Send>) {
+}
+
+fn some_func<T:Box<Trait+Bound>>(val:T){}
+
+fn zzzzzzzzzzzzzzzzzzzz<Type, NodeType>
+ (selff: Type, mut handle: node::Handle<IdRef<'id, Node<K, V>>, Type, NodeType>)
+ -> SearchStack<'a, K, V, Type, NodeType>{
+}
+
+unsafe fn generic_call(cx: *mut JSContext, argc: libc::c_uint, vp: *mut JSVal,
+ is_lenient: bool,
+ call: unsafe extern fn(*const JSJitInfo, *mut JSContext,
+ HandleObject, *mut libc::c_void, u32,
+ *mut JSVal)
+ -> u8) {
+ let f: fn ( _ , _ ) -> _ = panic!() ;
+}
+
+pub fn start_export_thread<C: CryptoSchemee + 'static>(database: &Database, crypto_scheme: &C, block_size: usize, source_path: &Path) -> BonzoResult<mpsc::Consumer<'static, FileInstruction>> {}
+
+pub fn waltz(cwd: &Path) -> CliAssert {
+ {
+ {
+ formatted_comment = rewrite_comment(comment, block_style, width, offset, formatting_fig);
+ }
+ }
+}
+
+// #2003
+mod foo {
+ fn __bindgen_test_layout_i_open0_c_open1_char_a_open2_char_close2_close1_close0_instantiation() {
+ foo();
+ }
+}
+
+// #2082
+pub(crate) fn init() {}
+
+pub(crate) fn init() {}
+
+// #2630
+fn make_map<T, F: (Fn(&T) -> String)>(records: &Vec<T>, key_fn: F) -> HashMap<String, usize> {}
+
+// #2956
+fn bar(beans: Asdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdf, spam: bool, eggs: bool) -> bool{
+ unimplemented!();
+}
diff --git a/src/tools/rustfmt/tests/source/fn-single-line/version_one.rs b/src/tools/rustfmt/tests/source/fn-single-line/version_one.rs
new file mode 100644
index 000000000..469ab6215
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/fn-single-line/version_one.rs
@@ -0,0 +1,80 @@
+// rustfmt-fn_single_line: true
+// rustfmt-version: One
+// Test single-line functions.
+
+fn foo_expr() {
+ 1
+}
+
+fn foo_stmt() {
+ foo();
+}
+
+fn foo_decl_local() {
+ let z = 5;
+ }
+
+fn foo_decl_item(x: &mut i32) {
+ x = 3;
+}
+
+ fn empty() {
+
+}
+
+fn foo_return() -> String {
+ "yay"
+}
+
+fn foo_where() -> T where T: Sync {
+ let x = 2;
+}
+
+fn fooblock() {
+ {
+ "inner-block"
+ }
+}
+
+fn fooblock2(x: i32) {
+ let z = match x {
+ _ => 2,
+ };
+}
+
+fn comment() {
+ // this is a test comment
+ 1
+}
+
+fn comment2() {
+ // multi-line comment
+ let z = 2;
+ 1
+}
+
+fn only_comment() {
+ // Keep this here
+}
+
+fn aaaaaaaaaaaaaaaaa_looooooooooooooooooooooong_name() {
+ let z = "aaaaaaawwwwwwwwwwwwwwwwwwwwwwwwwwww";
+}
+
+fn lots_of_space () {
+ 1
+}
+
+fn mac() -> Vec<i32> { vec![] }
+
+trait CoolTypes {
+ fn dummy(&self) {
+ }
+}
+
+trait CoolerTypes { fn dummy(&self) {
+}
+}
+
+fn Foo<T>() where T: Bar {
+}
diff --git a/src/tools/rustfmt/tests/source/fn-single-line/version_two.rs b/src/tools/rustfmt/tests/source/fn-single-line/version_two.rs
new file mode 100644
index 000000000..bf381ff10
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/fn-single-line/version_two.rs
@@ -0,0 +1,80 @@
+// rustfmt-fn_single_line: true
+// rustfmt-version: Two
+// Test single-line functions.
+
+fn foo_expr() {
+ 1
+}
+
+fn foo_stmt() {
+ foo();
+}
+
+fn foo_decl_local() {
+ let z = 5;
+ }
+
+fn foo_decl_item(x: &mut i32) {
+ x = 3;
+}
+
+ fn empty() {
+
+}
+
+fn foo_return() -> String {
+ "yay"
+}
+
+fn foo_where() -> T where T: Sync {
+ let x = 2;
+}
+
+fn fooblock() {
+ {
+ "inner-block"
+ }
+}
+
+fn fooblock2(x: i32) {
+ let z = match x {
+ _ => 2,
+ };
+}
+
+fn comment() {
+ // this is a test comment
+ 1
+}
+
+fn comment2() {
+ // multi-line comment
+ let z = 2;
+ 1
+}
+
+fn only_comment() {
+ // Keep this here
+}
+
+fn aaaaaaaaaaaaaaaaa_looooooooooooooooooooooong_name() {
+ let z = "aaaaaaawwwwwwwwwwwwwwwwwwwwwwwwwwww";
+}
+
+fn lots_of_space () {
+ 1
+}
+
+fn mac() -> Vec<i32> { vec![] }
+
+trait CoolTypes {
+ fn dummy(&self) {
+ }
+}
+
+trait CoolerTypes { fn dummy(&self) {
+}
+}
+
+fn Foo<T>() where T: Bar {
+}
diff --git a/src/tools/rustfmt/tests/source/fn_args_indent-block.rs b/src/tools/rustfmt/tests/source/fn_args_indent-block.rs
new file mode 100644
index 000000000..955f390cc
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/fn_args_indent-block.rs
@@ -0,0 +1,77 @@
+// rustfmt-normalize_comments: true
+
+fn foo() {
+ foo();
+}
+
+fn foo(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb) {
+ foo();
+}
+
+fn bar(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb, c: Cccccccccccccccccc, d: Dddddddddddddddd, e: Eeeeeeeeeeeeeee) {
+ bar();
+}
+
+fn foo(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb) -> String {
+ foo();
+}
+
+fn bar(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb, c: Cccccccccccccccccc, d: Dddddddddddddddd, e: Eeeeeeeeeeeeeee) -> String {
+ bar();
+}
+
+fn foo(a: u8 /* Comment 1 */, b: u8 /* Comment 2 */) -> u8 {
+ bar()
+}
+
+fn foo(a: u8 /* Comment 1 */, b: Bbbbbbbbbbbbbb, c: Cccccccccccccccccc, d: Dddddddddddddddd, e: Eeeeeeeeeeeeeee /* Comment 2 */) -> u8 {
+ bar()
+}
+
+fn bar(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb, c: Cccccccccccccccccc, d: Dddddddddddddddd, e: Eeeeeeeeeeeeeee) -> String where X: Fooooo, Y: Baaar {
+ bar();
+}
+
+fn foo() -> T {
+ foo();
+}
+
+fn foo() -> T where X: Foooo, Y: Baaar {
+ foo();
+}
+
+fn foo() where X: Foooo {
+}
+
+fn foo() where X: Foooo, Y: Baaar {
+}
+
+fn foo() -> (Loooooooooooooooooooooong, Reeeeeeeeeeeeeeeeeeeeeeeeturn, iiiiiiiiis, Looooooooooooooooong) {
+ foo();
+}
+
+fn foo<g: G>() {
+ foo();
+}
+
+fn foo<L: Loooooooooooooooooooooong, G: Geeeeeeeeeeeneric, I: iiiiiiiiis, L: Looooooooooooooooong>() {
+ foo();
+}
+
+fn foo<L: Loooooooooooooooooooong, G: Geeeeeeeeeeneric, I: iiiiiiiiis, L: Loooooooooooooooong>() {
+ foo();
+}
+
+trait Test {
+ fn foo(a: u8) {}
+
+ fn bar(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb, c: Cccccccccccccccccc, d: Dddddddddddddddd, e: Eeeeeeeeeeeeeee) -> String {}
+}
+
+fn foo<L: Loooooooooooooooooooong, G: Geeeeeeeeeeneric, I: iiiiiiiiis, L: Loooooooooooooooong>(a: Aaaaaaaaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbbbbb, c: Cccccccccccccccccc, d: Dddddddddddddddd) {
+ foo();
+}
+
+fn foo() -> (Looooooooooooooooooooooooooong, Reeeeeeeeeeeeeeeeeeeeeeeeeeeeeturn, iiiiiiiiiiiiiis, Loooooooooooooooooooooong) {
+ foo();
+}
diff --git a/src/tools/rustfmt/tests/source/fn_args_layout-vertical.rs b/src/tools/rustfmt/tests/source/fn_args_layout-vertical.rs
new file mode 100644
index 000000000..759bc83d0
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/fn_args_layout-vertical.rs
@@ -0,0 +1,33 @@
+// rustfmt-fn_args_layout: Vertical
+
+// Empty list should stay on one line.
+fn do_bar(
+
+) -> u8 {
+ bar()
+}
+
+// A single argument should stay on the same line.
+fn do_bar(
+ a: u8) -> u8 {
+ bar()
+}
+
+// Multiple arguments should each get their own line.
+fn do_bar(a: u8, mut b: u8, c: &u8, d: &mut u8, closure: &Fn(i32) -> i32) -> i32 {
+ // This feature should not affect closures.
+ let bar = |x: i32, y: i32| -> i32 { x + y };
+ bar(a, b)
+}
+
+// If the first argument doesn't fit on the same line with the function name,
+// the whole list should probably be pushed to the next line with hanging
+// indent. That's not what happens though, so check current behaviour instead.
+// In any case, it should maintain single argument per line.
+fn do_this_that_and_the_other_thing(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa: u8,
+ b: u8, c: u8, d: u8) {
+ this();
+ that();
+ the_other_thing();
+}
diff --git a/src/tools/rustfmt/tests/source/hard-tabs.rs b/src/tools/rustfmt/tests/source/hard-tabs.rs
new file mode 100644
index 000000000..e4a0f4170
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/hard-tabs.rs
@@ -0,0 +1,84 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+// rustfmt-hard_tabs: true
+
+fn main() {
+let x = Bar;
+
+let y = Foo {a: x };
+
+Foo { a: foo() /* comment*/, /* comment*/ b: bar(), ..something };
+
+fn foo(a: i32, a: i32, a: i32, a: i32, a: i32, a: i32, a: i32, a: i32, a: i32, a: i32, a: i32) {}
+
+let str = "AAAAAAAAAAAAAAaAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAaAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAaAa";
+
+if let (some_very_large, tuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuple) = 1
++ 2 + 3 {
+}
+
+ if cond() {
+ something();
+ } else if different_cond() {
+ something_else();
+ } else {
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ }
+
+unsafe /* very looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong comment */ {}
+
+unsafe // So this is a very long comment.
+ // Multi-line, too.
+ // Will it still format correctly?
+{
+}
+
+let chain = funktion_kall().go_to_next_line_with_tab().go_to_next_line_with_tab().go_to_next_line_with_tab();
+
+let z = [xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzz, q];
+
+fn generic<T>(arg: T) -> &SomeType
+ where T: Fn(// First arg
+ A,
+ // Second argument
+ B, C, D, /* pre comment */ E /* last comment */) -> &SomeType {
+ arg(a, b, c, d, e)
+}
+
+ loong_func().quux(move || {
+ if true {
+ 1
+ } else {
+ 2
+ }
+ });
+
+ fffffffffffffffffffffffffffffffffff(a,
+ {
+ SCRIPT_TASK_ROOT
+ .with(|root| {
+ *root.borrow_mut() = Some(&script_task);
+ });
+ });
+ a.b
+ .c
+ .d();
+
+ x().y(|| {
+ match cond() {
+ true => (),
+ false => (),
+ }
+ });
+}
+
+// #2296
+impl Foo {
+ // a comment
+ // on multiple lines
+ fn foo() {
+ // another comment
+ // on multiple lines
+ let x = true;
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/hello.rs b/src/tools/rustfmt/tests/source/hello.rs
new file mode 100644
index 000000000..f892e6deb
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/hello.rs
@@ -0,0 +1,6 @@
+// rustfmt-config: small_tabs.toml
+// rustfmt-target: hello.rs
+
+// Smoke test - hello world.
+
+fn main() { println!("Hello world!"); }
diff --git a/src/tools/rustfmt/tests/source/hello2.rs b/src/tools/rustfmt/tests/source/hello2.rs
new file mode 100644
index 000000000..48af7de38
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/hello2.rs
@@ -0,0 +1,8 @@
+// rustfmt-config: small_tabs.toml
+// rustfmt-target: hello.rs
+
+// Smoke test - hello world.
+
+fn main( ) {
+println!("Hello world!");
+}
diff --git a/src/tools/rustfmt/tests/source/hex_literal_lower.rs b/src/tools/rustfmt/tests/source/hex_literal_lower.rs
new file mode 100644
index 000000000..ce307b3aa
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/hex_literal_lower.rs
@@ -0,0 +1,5 @@
+// rustfmt-hex_literal_case: Lower
+fn main() {
+ let h1 = 0xCAFE_5EA7;
+ let h2 = 0xCAFE_F00Du32;
+}
diff --git a/src/tools/rustfmt/tests/source/hex_literal_upper.rs b/src/tools/rustfmt/tests/source/hex_literal_upper.rs
new file mode 100644
index 000000000..b1092ad71
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/hex_literal_upper.rs
@@ -0,0 +1,5 @@
+// rustfmt-hex_literal_case: Upper
+fn main() {
+ let h1 = 0xCaFE_5ea7;
+ let h2 = 0xCAFE_F00Du32;
+}
diff --git a/src/tools/rustfmt/tests/source/if_while_or_patterns.rs b/src/tools/rustfmt/tests/source/if_while_or_patterns.rs
new file mode 100644
index 000000000..f01df7e91
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/if_while_or_patterns.rs
@@ -0,0 +1,27 @@
+#![feature(if_while_or_patterns)]
+
+fn main() {
+ if let 0 | 1 = 0 {
+ println!("hello, world");
+ };
+
+ if let aaaaaaaaaaaaaaaaaaaaaaaaaa | bbbbbbbbbbbbbbbbbbbbbbbbbbb | cccccccccccccccc | d_100 = 0 {
+ println!("hello, world");
+ }
+
+ if let aaaaaaaaaaaaaaaaaaaaaaaaaa | bbbbbbbbbbbbbbbbbbbbbbb | ccccccccccccccccccccc | d_101 = 0 {
+ println!("hello, world");
+ }
+
+ if let aaaaaaaaaaaaaaaaaaaaaaaaaaaa | bbbbbbbbbbbbbbbbbbbbbbb | ccccccccccccccccccccc | d_103 = 0 {
+ println!("hello, world");
+ }
+
+ if let aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa | bbbbbbbbbbbbbbbbbbbbbbb | ccccccccccccccccccccc | d_105 = 0 {
+ println!("hello, world");
+ }
+
+ while let xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx = foo_bar(bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb, cccccccccccccccccccccccccccccccccccccccc) {
+ println!("hello, world");
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/immovable_generators.rs b/src/tools/rustfmt/tests/source/immovable_generators.rs
new file mode 100644
index 000000000..c57a1e144
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/immovable_generators.rs
@@ -0,0 +1,7 @@
+#![feature(generators)]
+
+unsafe fn foo() {
+ let mut ga = static || {
+ yield 1;
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/impls.rs b/src/tools/rustfmt/tests/source/impls.rs
new file mode 100644
index 000000000..dcd1f0cd5
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/impls.rs
@@ -0,0 +1,178 @@
+// rustfmt-normalize_comments: true
+impl Foo for Bar { fn foo() { "hi" } }
+
+pub impl Foo for Bar {
+ // Associated Constants
+ const Baz: i32 = 16;
+ // Associated Types
+ type FooBar = usize;
+ // Comment 1
+ fn foo() { "hi" }
+ // Comment 2
+ fn foo() { "hi" }
+ // Comment 3
+}
+
+#[inherent]
+impl Visible for Bar {
+ pub const C: i32;
+ pub type T;
+ pub fn f();
+ pub fn g() {}
+}
+
+pub unsafe impl<'a, 'b, X, Y: Foo<Bar>> !Foo<'a, X> for Bar<'b, Y> where X: Foo<'a, Z> {
+ fn foo() { "hi" }
+}
+
+impl<'a, 'b, X, Y: Foo<Bar>> Foo<'a, X> for Bar<'b, Y> where X: Fooooooooooooooooooooooooooooo<'a, Z>
+{
+ fn foo() { "hi" }
+}
+
+impl<'a, 'b, X, Y: Foo<Bar>> Foo<'a, X> for Bar<'b, Y> where X: Foooooooooooooooooooooooooooo<'a, Z>
+{
+ fn foo() { "hi" }
+}
+
+impl<T> Foo for Bar<T> where T: Baz
+{
+}
+
+impl<T> Foo for Bar<T> where T: Baz { /* Comment */ }
+
+impl Foo {
+ fn foo() {}
+}
+
+impl Boo {
+
+ // BOO
+ fn boo() {}
+ // FOO
+
+
+
+}
+
+mod a {
+ impl Foo {
+ // Hello!
+ fn foo() {}
+ }
+}
+
+
+mod b {
+ mod a {
+ impl Foo {
+ fn foo() {}
+ }
+ }
+}
+
+impl Foo { add_fun!(); }
+
+impl Blah {
+ fn boop() {}
+ add_fun!();
+}
+
+impl X { fn do_parse( mut self : X ) {} }
+
+impl Y5000 {
+ fn bar(self: X< 'a , 'b >, y: Y) {}
+
+ fn bad(&self, ( x, y): CoorT) {}
+
+ fn turbo_bad(self: X< 'a , 'b > , ( x, y): CoorT) {
+
+ }
+}
+
+pub impl<T> Foo for Bar<T> where T: Foo
+{
+ fn foo() { "hi" }
+}
+
+pub impl<T, Z> Foo for Bar<T, Z> where T: Foo, Z: Baz {}
+
+mod m {
+ impl<T> PartialEq for S<T> where T: PartialEq {
+ fn eq(&self, other: &Self) {
+ true
+ }
+ }
+
+ impl<T> PartialEq for S<T> where T: PartialEq { }
+ }
+
+impl<BorrowType, K, V, NodeType, HandleType> Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType> {
+}
+
+impl<BorrowType, K, V, NodeType, HandleType> PartialEq for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType> {
+}
+
+mod x {
+ impl<A, B, C, D> Foo
+ where A: 'static,
+ B: 'static,
+ C: 'static,
+ D: 'static { }
+}
+
+impl<ConcreteThreadSafeLayoutNode: ThreadSafeLayoutNodeFoo> Issue1249<ConcreteThreadSafeLayoutNode> {
+ // Creates a new flow constructor.
+ fn foo() {}
+}
+
+// #1600
+impl<#[may_dangle] K, #[may_dangle] V> Drop for RawTable<K, V> {
+ fn drop() {}
+}
+
+// #1168
+pub trait Number: Copy + Eq + Not<Output = Self> + Shl<u8, Output = Self> +
+ Shr<u8, Output = Self> +
+ BitAnd<Self, Output=Self> + BitOr<Self, Output=Self> + BitAndAssign + BitOrAssign
+
+
+
+{
+ // test
+ fn zero() -> Self;
+}
+
+// #1642
+pub trait SomeTrait : Clone + Eq + PartialEq + Ord + PartialOrd + Default + Hash + Debug + Display + Write + Read + FromStr {
+ // comment
+}
+
+// #1995
+impl Foo {
+ fn f(
+ S {
+ aaaaaaaaaa: aaaaaaaaaa,
+ bbbbbbbbbb: bbbbbbbbbb,
+ cccccccccc: cccccccccc,
+ }: S
+ ) -> u32{
+ 1
+ }
+}
+
+// #2491
+impl<'a, 'b, 'c> SomeThing<Something> for (&'a mut SomethingLong, &'b mut SomethingLong, &'c mut SomethingLong) {
+ fn foo() {}
+}
+
+// #2746
+impl<'seq1, 'seq2, 'body, 'scope, Channel> Adc12< Dual, MasterRunningDma<'seq1, 'body, 'scope, Channel>, SlaveRunningDma<'seq2, 'body, 'scope>, > where Channel: DmaChannel, {}
+
+// #4084
+impl const std::default::Default for Struct {
+ #[inline]
+ fn default() -> Self {
+ Self { f: 12.5 }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/imports/imports-impl-only-use.rs b/src/tools/rustfmt/tests/source/imports/imports-impl-only-use.rs
new file mode 100644
index 000000000..d290d8d91
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/imports/imports-impl-only-use.rs
@@ -0,0 +1,4 @@
+#![feature(underscore_imports)]
+
+use attr;
+use std::iter::Iterator as _;
diff --git a/src/tools/rustfmt/tests/source/imports/imports-reorder-lines-and-items.rs b/src/tools/rustfmt/tests/source/imports/imports-reorder-lines-and-items.rs
new file mode 100644
index 000000000..b6380f31c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/imports/imports-reorder-lines-and-items.rs
@@ -0,0 +1,7 @@
+/// This comment should stay with `use std::str;`
+use std::str;
+use std::cmp::{d, c, b, a};
+use std::ddd::aaa;
+use std::ddd::{d as p, c as g, b, a};
+// This comment should stay with `use std::ddd:bbb;`
+use std::ddd::bbb;
diff --git a/src/tools/rustfmt/tests/source/imports/imports-reorder-lines.rs b/src/tools/rustfmt/tests/source/imports/imports-reorder-lines.rs
new file mode 100644
index 000000000..2b018544e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/imports/imports-reorder-lines.rs
@@ -0,0 +1,32 @@
+use std::str;
+use std::cmp::{d, c, b, a};
+use std::cmp::{b, e, g, f};
+use std::ddd::aaa;
+// This comment should stay with `use std::ddd;`
+use std::ddd;
+use std::ddd::bbb;
+
+mod test {
+}
+
+use aaa::bbb;
+use aaa;
+use aaa::*;
+
+mod test {}
+// If item names are equal, order by rename
+
+use test::{a as bb, b};
+use test::{a as aa, c};
+
+mod test {}
+// If item names are equal, order by rename - no rename comes before a rename
+
+use test::{a as bb, b};
+use test::{a, c};
+
+mod test {}
+// `self` always comes first
+
+use test::{a as aa, c};
+use test::{self as bb, b};
diff --git a/src/tools/rustfmt/tests/source/imports/imports-reorder.rs b/src/tools/rustfmt/tests/source/imports/imports-reorder.rs
new file mode 100644
index 000000000..cbe9d6ca7
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/imports/imports-reorder.rs
@@ -0,0 +1,5 @@
+// rustfmt-normalize_comments: true
+
+use path::{C,/*A*/ A, B /* B */, self /* self */};
+
+use {ab, ac, aa, Z, b};
diff --git a/src/tools/rustfmt/tests/source/imports/imports.rs b/src/tools/rustfmt/tests/source/imports/imports.rs
new file mode 100644
index 000000000..4dfc6ed94
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/imports/imports.rs
@@ -0,0 +1,107 @@
+// rustfmt-normalize_comments: true
+
+// Imports.
+
+// Long import.
+use rustc_ast::ast::{ItemForeignMod, ItemImpl, ItemMac, ItemMod, ItemStatic, ItemDefaultImpl};
+use exceedingly::looooooooooooooooooooooooooooooooooooooooooooooooooooooooooong::import::path::{ItemA, ItemB};
+use exceedingly::loooooooooooooooooooooooooooooooooooooooooooooooooooooooong::import::path::{ItemA, ItemB};
+
+use list::{
+ // Some item
+ SomeItem /* Comment */, /* Another item */ AnotherItem /* Another Comment */, // Last Item
+ LastItem
+};
+
+use test::{ Other /* C */ , /* A */ self /* B */ };
+
+use rustc_ast::{self};
+use {/* Pre-comment! */
+ Foo, Bar /* comment */};
+use Foo::{Bar, Baz};
+pub use rustc_ast::ast::{Expr_, Expr, ExprAssign, ExprCall, ExprMethodCall, ExprPath};
+
+use rustc_ast::some::{};
+
+use self;
+use std::io::{self};
+use std::io::self;
+
+mod Foo {
+ pub use rustc_ast::ast::{
+ ItemForeignMod,
+ ItemImpl,
+ ItemMac,
+ ItemMod,
+ ItemStatic,
+ ItemDefaultImpl
+ };
+
+ mod Foo2 {
+ pub use rustc_ast::ast::{ItemForeignMod, ItemImpl, ItemMac, ItemMod, ItemStatic, self, ItemDefaultImpl};
+ }
+}
+
+fn test() {
+use Baz::*;
+ use Qux;
+}
+
+// Simple imports
+use foo::bar::baz as baz ;
+use bar::quux as kaas;
+use foo;
+
+// With aliases.
+use foo::{self as bar, baz};
+use foo::{self as bar};
+use foo::{qux as bar};
+use foo::{baz, qux as bar};
+
+// With absolute paths
+use ::foo;
+use ::foo::{Bar};
+use ::foo::{Bar, Baz};
+use ::{Foo};
+use ::{Bar, Baz};
+
+// Root globs
+use *;
+use ::*;
+
+// spaces used to cause glob imports to disappear (#1356)
+use super:: * ;
+use foo::issue_1356:: * ;
+
+// We shouldn't remove imports which have attributes attached (#1858)
+#[cfg(unix)]
+use self::unix::{};
+
+// nested imports
+use foo::{a, bar::{baz, qux, xxxxxxxxxxx, yyyyyyyyyyyyy, zzzzzzzzzzzzzzzz, foo::{a, b, cxxxxxxxxxxxxx, yyyyyyyyyyyyyy, zzzzzzzzzzzzzzzz}}, b, boo, c,};
+
+use fooo::{baar::{foobar::{xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz}}, z, bar, bar::*, x, y};
+
+use exonum::{api::{Api, ApiError}, blockchain::{self, BlockProof, Blockchain, Transaction, TransactionSet}, crypto::{Hash, PublicKey}, helpers::Height, node::TransactionSend, storage::{ListProof, MapProof}};
+
+// nested imports with a single sub-tree.
+use a::{b::{c::*}};
+use a::{b::{c::{}}};
+use a::{b::{c::d}};
+use a::{b::{c::{xxx, yyy, zzz}}};
+
+// #2645
+/// This line is not affected.
+// This line is deleted.
+use c;
+
+// #2670
+#[macro_use]
+use imports_with_attr;
+
+// #2888
+use std::f64::consts::{SQRT_2, E, PI};
+
+// #3273
+#[rustfmt::skip]
+use std::fmt::{self, {Display, Formatter}};
diff --git a/src/tools/rustfmt/tests/source/imports/imports_block_indent.rs b/src/tools/rustfmt/tests/source/imports/imports_block_indent.rs
new file mode 100644
index 000000000..016deefe5
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/imports/imports_block_indent.rs
@@ -0,0 +1,2 @@
+// #2569
+use apns2::request::notification::{Notificatio, NotificationBuilder, Priority, SilentNotificationBuilder};
diff --git a/src/tools/rustfmt/tests/source/imports/imports_granularity_crate.rs b/src/tools/rustfmt/tests/source/imports/imports_granularity_crate.rs
new file mode 100644
index 000000000..f6f7761e8
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/imports/imports_granularity_crate.rs
@@ -0,0 +1,65 @@
+// rustfmt-imports_granularity: Crate
+
+use a::{c,d,b};
+use a::{d, e, b, a, f};
+use a::{f, g, c};
+
+#[doc(hidden)]
+use a::b;
+use a::c;
+use a::d;
+
+use a::{c, d, e};
+#[doc(hidden)]
+use a::b;
+use a::d;
+
+pub use foo::bar;
+use foo::{a, b, c};
+pub use foo::foobar;
+
+use a::{b::{c::*}};
+use a::{b::{c::{}}};
+use a::{b::{c::d}};
+use a::{b::{c::{xxx, yyy, zzz}}};
+
+// https://github.com/rust-lang/rustfmt/issues/3808
+use d::{self};
+use e::{self as foo};
+use f::{self, b};
+use g::a;
+use g::{self, b};
+use h::{a};
+use i::a::{self};
+use j::{a::{self}};
+
+use {k::{a, b}, l::{a, b}};
+use {k::{c, d}, l::{c, d}};
+
+use b::{f::g, h::{i, j} /* After b::h group */};
+use b::e;
+use b::{/* Before b::l group */ l::{self, m, n::o, p::*}, q};
+use b::d;
+use b::r; // After b::r
+use b::q::{self /* After b::q::self */};
+use b::u::{
+ a,
+ b,
+};
+use b::t::{
+ // Before b::t::a
+ a,
+ b,
+};
+use b::s::{
+ a,
+ b, // After b::s::b
+};
+use b::v::{
+ // Before b::v::a
+ a,
+ // Before b::v::b
+ b,
+};
+use b::t::{/* Before b::t::self */ self};
+use b::c;
diff --git a/src/tools/rustfmt/tests/source/imports/imports_granularity_default-with-dups.rs b/src/tools/rustfmt/tests/source/imports/imports_granularity_default-with-dups.rs
new file mode 100644
index 000000000..cbb21a9f1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/imports/imports_granularity_default-with-dups.rs
@@ -0,0 +1,6 @@
+use crate::lexer;
+use crate::lexer::tokens::TokenData;
+use crate::lexer::{tokens::TokenData};
+use crate::lexer::self;
+use crate::lexer::{self};
+use crate::lexer::{self, tokens::TokenData};
diff --git a/src/tools/rustfmt/tests/source/imports/imports_granularity_item-with-dups-StdExternalCrate-no-reorder.rs b/src/tools/rustfmt/tests/source/imports/imports_granularity_item-with-dups-StdExternalCrate-no-reorder.rs
new file mode 100644
index 000000000..e23705a88
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/imports/imports_granularity_item-with-dups-StdExternalCrate-no-reorder.rs
@@ -0,0 +1,13 @@
+// rustfmt-imports_granularity: Item
+// rustfmt-reorder_imports: false
+// rustfmt-group_imports: StdExternalCrate
+
+use crate::lexer;
+use crate::lexer;
+use crate::lexer::tokens::TokenData;
+use crate::lexer::{tokens::TokenData};
+use crate::lexer::self;
+use crate::lexer;
+use crate::lexer;
+use crate::lexer::{self};
+use crate::lexer::{self, tokens::TokenData};
diff --git a/src/tools/rustfmt/tests/source/imports/imports_granularity_item-with-dups.rs b/src/tools/rustfmt/tests/source/imports/imports_granularity_item-with-dups.rs
new file mode 100644
index 000000000..3e9589c29
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/imports/imports_granularity_item-with-dups.rs
@@ -0,0 +1,11 @@
+// rustfmt-imports_granularity: Item
+
+use crate::lexer;
+use crate::lexer;
+use crate::lexer::tokens::TokenData;
+use crate::lexer::{tokens::TokenData};
+use crate::lexer::self;
+use crate::lexer;
+use crate::lexer;
+use crate::lexer::{self};
+use crate::lexer::{self, tokens::TokenData};
diff --git a/src/tools/rustfmt/tests/source/imports/imports_granularity_item.rs b/src/tools/rustfmt/tests/source/imports/imports_granularity_item.rs
new file mode 100644
index 000000000..b82c0d33c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/imports/imports_granularity_item.rs
@@ -0,0 +1,34 @@
+// rustfmt-imports_granularity: Item
+
+use a::{b, c, d};
+use a::{f::g, h::{i, j}};
+use a::{l::{self, m, n::o, p::*}};
+use a::q::{self};
+
+use b::{f::g, h::{i, j} /* After b::h group */};
+use b::e;
+use b::{/* Before b::l group */ l::{self, m, n::o, p::*}, q};
+use b::d;
+use b::r; // After b::r
+use b::q::{self /* After b::q::self */};
+use b::u::{
+ a,
+ b,
+};
+use b::t::{
+ // Before b::t::a
+ a,
+ b,
+};
+use b::s::{
+ a,
+ b, // After b::s::b
+};
+use b::v::{
+ // Before b::v::a
+ a,
+ // Before b::v::b
+ b,
+};
+use b::t::{/* Before b::t::self */ self};
+use b::c;
diff --git a/src/tools/rustfmt/tests/source/imports/imports_granularity_module.rs b/src/tools/rustfmt/tests/source/imports/imports_granularity_module.rs
new file mode 100644
index 000000000..c7f68cea6
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/imports/imports_granularity_module.rs
@@ -0,0 +1,47 @@
+// rustfmt-imports_granularity: Module
+
+use a::{b::c, d::e};
+use a::{f, g::{h, i}};
+use a::{j::{self, k::{self, l}, m}, n::{o::p, q}};
+pub use a::{r::s, t};
+use b::{c::d, self};
+
+#[cfg(test)]
+use foo::{a::b, c::d};
+use foo::e;
+
+use bar::{
+ // comment
+ a::b,
+ // more comment
+ c::d,
+ e::f,
+};
+
+use b::{f::g, h::{i, j} /* After b::h group */};
+use b::e;
+use b::{/* Before b::l group */ l::{self, m, n::o, p::*}, q};
+use b::d;
+use b::r; // After b::r
+use b::q::{self /* After b::q::self */};
+use b::u::{
+ a,
+ b,
+};
+use b::t::{
+ // Before b::t::a
+ a,
+ b,
+};
+use b::s::{
+ a,
+ b, // After b::s::b
+};
+use b::v::{
+ // Before b::v::a
+ a,
+ // Before b::v::b
+ b,
+};
+use b::t::{/* Before b::t::self */ self};
+use b::c;
diff --git a/src/tools/rustfmt/tests/source/imports_granularity_one.rs b/src/tools/rustfmt/tests/source/imports_granularity_one.rs
new file mode 100644
index 000000000..4d5a47956
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/imports_granularity_one.rs
@@ -0,0 +1,88 @@
+// rustfmt-imports_granularity: One
+
+use b;
+use a::ac::{aca, acb};
+use a::{aa::*, ab};
+
+use a as x;
+use b::ba;
+use a::{aa, ab};
+
+use a::aa::aaa;
+use a::ab::aba as x;
+use a::aa::*;
+
+use a::aa;
+use a::ad::ada;
+#[cfg(test)]
+use a::{ab, ac::aca};
+use b;
+#[cfg(test)]
+use b::{
+ ba, bb,
+ bc::bca::{bcaa, bcab},
+};
+
+pub use a::aa;
+pub use a::ae;
+use a::{ab, ac, ad};
+use b::ba;
+pub use b::{bb, bc::bca};
+
+use a::aa::aaa;
+use a::ac::{aca, acb};
+use a::{aa::*, ab};
+use b::{
+ ba,
+ bb::{self, bba},
+};
+
+use crate::a;
+use crate::b::ba;
+use c::ca;
+
+use super::a;
+use c::ca;
+use super::b::ba;
+
+use crate::a;
+use super::b;
+use c::{self, ca};
+
+use a::{
+ // some comment
+ aa::{aaa, aab},
+ ab,
+ // another comment
+ ac::aca,
+};
+use b as x;
+use a::ad::ada;
+
+use b::{f::g, h::{i, j} /* After b::h group */};
+use b::e;
+use b::{/* Before b::l group */ l::{self, m, n::o, p::*}, q};
+use b::d;
+use b::r; // After b::r
+use b::q::{self /* After b::q::self */};
+use b::u::{
+ a,
+ b,
+};
+use b::t::{
+ // Before b::t::a
+ a,
+ b,
+};
+use b::s::{
+ a,
+ b, // After b::s::b
+};
+use b::v::{
+ // Before b::v::a
+ a,
+ // Before b::v::b
+ b,
+};
+use b::t::{/* Before b::t::self */ self};
+use b::c;
diff --git a/src/tools/rustfmt/tests/source/imports_raw_identifiers/version_One.rs b/src/tools/rustfmt/tests/source/imports_raw_identifiers/version_One.rs
new file mode 100644
index 000000000..bc4b5b135
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/imports_raw_identifiers/version_One.rs
@@ -0,0 +1,5 @@
+// rustfmt-version:One
+
+use websocket::client::ClientBuilder;
+use websocket::r#async::futures::Stream;
+use websocket::result::WebSocketError;
diff --git a/src/tools/rustfmt/tests/source/imports_raw_identifiers/version_Two.rs b/src/tools/rustfmt/tests/source/imports_raw_identifiers/version_Two.rs
new file mode 100644
index 000000000..88e7fbd01
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/imports_raw_identifiers/version_Two.rs
@@ -0,0 +1,5 @@
+// rustfmt-version:Two
+
+use websocket::client::ClientBuilder;
+use websocket::r#async::futures::Stream;
+use websocket::result::WebSocketError;
diff --git a/src/tools/rustfmt/tests/source/invalid-rust-code-in-doc-comment.rs b/src/tools/rustfmt/tests/source/invalid-rust-code-in-doc-comment.rs
new file mode 100644
index 000000000..835b0261b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/invalid-rust-code-in-doc-comment.rs
@@ -0,0 +1,20 @@
+// rustfmt-format_code_in_doc_comments: true
+
+/// ```rust
+/// if (true) { … }
+/// ```
+fn a() {
+}
+
+/// ```rust
+/// if foo() {
+/// …
+/// }
+/// ```
+fn a() {
+}
+
+/// ```rust
+/// k1 == k2 ⇒ hash(k1) == hash(k2)
+/// ```
+pub struct a ;
diff --git a/src/tools/rustfmt/tests/source/issue-1021.rs b/src/tools/rustfmt/tests/source/issue-1021.rs
new file mode 100644
index 000000000..380e24cc0
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1021.rs
@@ -0,0 +1,22 @@
+// rustfmt-normalize_comments: true
+fn main() {
+ match x {
+ S(true , .., true ) => (),
+ S(true , .. ) => (),
+ S(.., true ) => (),
+ S( .. ) => (),
+ S(_) => (),
+ S(/* .. */ .. ) => (),
+ S(/* .. */ .., true ) => (),
+ }
+
+ match y {
+ (true , .., true ) => (),
+ (true , .. ) => (),
+ (.., true ) => (),
+ ( .. ) => (),
+ (_,) => (),
+ (/* .. */ .. ) => (),
+ (/* .. */ .., true ) => (),
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-1049.rs b/src/tools/rustfmt/tests/source/issue-1049.rs
new file mode 100644
index 000000000..bcfba41e7
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1049.rs
@@ -0,0 +1,18 @@
+// Test overlong function signature
+pub unsafe fn reborrow_mut(&mut X: Abcde) -> Handle<NodeRef<marker::Mut, K, V, NodeType>, HandleType> {
+}
+
+pub fn merge(mut X: Abcdef) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
+}
+
+impl Handle {
+ pub fn merge(a: Abcd) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
+ }
+}
+
+// Long function without return type that should not be reformatted.
+fn veeeeeeeeeeeeeeeeeeeeery_long_name(a: FirstTypeeeeeeeeee, b: SecondTypeeeeeeeeeeeeeeeeeeeeeee) {}
+
+fn veeeeeeeeeeeeeeeeeeeeeery_long_name(a: FirstTypeeeeeeeeee, b: SecondTypeeeeeeeeeeeeeeeeeeeeeee) {}
+
+fn veeeeeeeeeeeeeeeeeeeeeeery_long_name(a: FirstTypeeeeeeeeee, b: SecondTypeeeeeeeeeeeeeeeeeeeeeee) {}
diff --git a/src/tools/rustfmt/tests/source/issue-1111.rs b/src/tools/rustfmt/tests/source/issue-1111.rs
new file mode 100644
index 000000000..2e1a89ad7
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1111.rs
@@ -0,0 +1 @@
+use bar;
diff --git a/src/tools/rustfmt/tests/source/issue-1120.rs b/src/tools/rustfmt/tests/source/issue-1120.rs
new file mode 100644
index 000000000..e85c9af99
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1120.rs
@@ -0,0 +1,9 @@
+// rustfmt-reorder_imports: true
+
+// Ensure that a use at the start of an inline module is correctly formatted.
+mod foo {use bar;}
+
+// Ensure that an indented `use` gets the correct indentation.
+mod foo {
+ use bar;
+}
diff --git a/src/tools/rustfmt/tests/source/issue-1124.rs b/src/tools/rustfmt/tests/source/issue-1124.rs
new file mode 100644
index 000000000..35c2197fa
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1124.rs
@@ -0,0 +1,15 @@
+// rustfmt-reorder_imports: true
+
+use d; use c; use b; use a;
+// The previous line has a space after the `use a;`
+
+mod a { use d; use c; use b; use a; }
+
+use z;
+
+use y;
+
+
+
+use x;
+use a;
diff --git a/src/tools/rustfmt/tests/source/issue-1127.rs b/src/tools/rustfmt/tests/source/issue-1127.rs
new file mode 100644
index 000000000..b49db4e3f
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1127.rs
@@ -0,0 +1,23 @@
+// rustfmt-max_width: 120
+// rustfmt-match_arm_blocks: false
+// rustfmt-match_block_trailing_comma: true
+
+fn a_very_very_very_very_very_very_very_very_very_very_very_long_function_name() -> i32 {
+ 42
+}
+
+enum TestEnum {
+ AVeryVeryLongEnumName,
+ AnotherVeryLongEnumName,
+ TheLastVeryLongEnumName,
+}
+
+fn main() {
+ let var = TestEnum::AVeryVeryLongEnumName;
+ let num = match var {
+ TestEnum::AVeryVeryLongEnumName => a_very_very_very_very_very_very_very_very_very_very_very_long_function_name(),
+ TestEnum::AnotherVeryLongEnumName => a_very_very_very_very_very_very_very_very_very_very_very_long_function_name(),
+ TestEnum::TheLastVeryLongEnumName => a_very_very_very_very_very_very_very_very_very_very_very_long_function_name(),
+ };
+}
+
diff --git a/src/tools/rustfmt/tests/source/issue-1158.rs b/src/tools/rustfmt/tests/source/issue-1158.rs
new file mode 100644
index 000000000..6742e1745
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1158.rs
@@ -0,0 +1,3 @@
+trait T {
+ itemmacro!(this, is.now() .formatted(yay));
+}
diff --git a/src/tools/rustfmt/tests/source/issue-1177.rs b/src/tools/rustfmt/tests/source/issue-1177.rs
new file mode 100644
index 000000000..3ac423c5a
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1177.rs
@@ -0,0 +1,7 @@
+// rustfmt-normalize_comments: true
+fn main() {
+ // Line Comment
+ /* Block Comment */
+
+ let d = 5;
+}
diff --git a/src/tools/rustfmt/tests/source/issue-1192.rs b/src/tools/rustfmt/tests/source/issue-1192.rs
new file mode 100644
index 000000000..4e39fbf9a
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1192.rs
@@ -0,0 +1,3 @@
+fn main() {
+ assert!(true) ;
+}
diff --git a/src/tools/rustfmt/tests/source/issue-1210/a.rs b/src/tools/rustfmt/tests/source/issue-1210/a.rs
new file mode 100644
index 000000000..6bb9964b4
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1210/a.rs
@@ -0,0 +1,12 @@
+// rustfmt-format_strings: true
+// rustfmt-max_width: 50
+
+impl Foo {
+ fn cxx(&self, target: &str) -> &Path {
+ match self.cxx.get(target) {
+ Some(p) => p.path(),
+ None => panic!("\n\ntarget `{}` is not configured as a host,
+ only as a target\n\n", target),
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-1210/b.rs b/src/tools/rustfmt/tests/source/issue-1210/b.rs
new file mode 100644
index 000000000..8c71ef98b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1210/b.rs
@@ -0,0 +1,12 @@
+// rustfmt-format_strings: true
+// rustfmt-max_width: 50
+
+impl Foo {
+ fn cxx(&self, target: &str) -> &Path {
+ match self.cxx.get(target) {
+ Some(p) => p.path(),
+ None => panic!("\ntarget `{}`: is not, configured as a host,
+ only as a target\n\n", target),
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-1210/c.rs b/src/tools/rustfmt/tests/source/issue-1210/c.rs
new file mode 100644
index 000000000..c080cef95
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1210/c.rs
@@ -0,0 +1,5 @@
+// rustfmt-format_strings: true
+// rustfmt-max_width: 50
+
+const foo: String = "trailing_spaces!!
+ keep them! Amet neque. Praesent rhoncus eros non velit.";
diff --git a/src/tools/rustfmt/tests/source/issue-1210/d.rs b/src/tools/rustfmt/tests/source/issue-1210/d.rs
new file mode 100644
index 000000000..783736bc3
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1210/d.rs
@@ -0,0 +1,4 @@
+// rustfmt-wrap_comments: true
+
+// trailing_spaces_in_comment!!
+// remove those from above
diff --git a/src/tools/rustfmt/tests/source/issue-1210/e.rs b/src/tools/rustfmt/tests/source/issue-1210/e.rs
new file mode 100644
index 000000000..9abada1d6
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1210/e.rs
@@ -0,0 +1,8 @@
+// rustfmt-format_strings: true
+// rustfmt-max_width: 50
+
+// explicit line breaks should be kept in order to preserve the layout
+
+const foo: String = "Suspendisse vel augue at felis tincidunt sollicitudin. Fusce arcu.
+ Duis et odio et leo
+ sollicitudin consequat. Aliquam lobortis. Phasellus condimentum.";
diff --git a/src/tools/rustfmt/tests/source/issue-1211.rs b/src/tools/rustfmt/tests/source/issue-1211.rs
new file mode 100644
index 000000000..5818736bf
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1211.rs
@@ -0,0 +1,15 @@
+fn main() {
+ for iface in &ifaces {
+ match iface.addr {
+ get_if_addrs::IfAddr::V4(ref addr) => {
+ match addr.broadcast {
+ Some(ip) => {
+ sock.send_to(&buf, (ip, 8765)).expect("foobar");
+ }
+ _ => ()
+ }
+ }
+ _ => ()
+ };
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-1216.rs b/src/tools/rustfmt/tests/source/issue-1216.rs
new file mode 100644
index 000000000..d727c158a
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1216.rs
@@ -0,0 +1,5 @@
+// rustfmt-normalize_comments: true
+enum E {
+ A, //* I am not a block comment (caused panic)
+ B,
+}
diff --git a/src/tools/rustfmt/tests/source/issue-1239.rs b/src/tools/rustfmt/tests/source/issue-1239.rs
new file mode 100644
index 000000000..913058257
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1239.rs
@@ -0,0 +1,9 @@
+fn foo() {
+ let with_alignment = if condition__uses_alignment_for_first_if__0 ||
+ condition__uses_alignment_for_first_if__1 ||
+ condition__uses_alignment_for_first_if__2 {
+ } else if condition__no_alignment_for_later_else__0 ||
+ condition__no_alignment_for_later_else__1 ||
+ condition__no_alignment_for_later_else__2 {
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/issue-1278.rs b/src/tools/rustfmt/tests/source/issue-1278.rs
new file mode 100644
index 000000000..e25376561
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1278.rs
@@ -0,0 +1,9 @@
+// rustfmt-indent_style = "block"
+
+#![feature(pub_restricted)]
+
+mod inner_mode {
+ pub(super) fn func_name(abc: i32) -> i32 {
+ abc
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-1350.rs b/src/tools/rustfmt/tests/source/issue-1350.rs
new file mode 100644
index 000000000..1baa1985a
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1350.rs
@@ -0,0 +1,16 @@
+// rustfmt-max_width: 120
+// rustfmt-comment_width: 110
+
+impl Struct {
+ fn fun() {
+ let result = match <R::RequestResult as serde::Deserialize>::deserialize(&json) {
+ Ok(v) => v,
+ Err(e) => {
+ match <R::ErrorResult as serde::Deserialize>::deserialize(&json) {
+ Ok(v) => return Err(Error::with_json(v)),
+ Err(e2) => return Err(Error::with_json(e)),
+ }
+ }
+ };
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-1366.rs b/src/tools/rustfmt/tests/source/issue-1366.rs
new file mode 100644
index 000000000..9d2964fc7
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1366.rs
@@ -0,0 +1,12 @@
+fn main() {
+ fn f() -> Option<i32> {
+ Some("fffffffsssssssssddddssssfffffddddff").map(|s| s).map(|s| s.to_string()).map(|res| {
+ match Some(res) {
+ Some(ref s) if s == "" => 41,
+ Some(_) => 42,
+ _ => 43,
+ }
+ })
+ }
+ println!("{:?}", f())
+}
diff --git a/src/tools/rustfmt/tests/source/issue-1468.rs b/src/tools/rustfmt/tests/source/issue-1468.rs
new file mode 100644
index 000000000..4d0d4f0eb
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1468.rs
@@ -0,0 +1,27 @@
+fn issue1468() {
+euc_jp_decoder_functions!({
+let trail_minus_offset = byte.wrapping_sub(0xA1);
+// Fast-track Hiragana (60% according to Lunde)
+// and Katakana (10% according to Lunde).
+if jis0208_lead_minus_offset == 0x03 &&
+trail_minus_offset < 0x53 {
+// Hiragana
+handle.write_upper_bmp(0x3041 + trail_minus_offset as u16)
+} else if jis0208_lead_minus_offset == 0x04 &&
+trail_minus_offset < 0x56 {
+// Katakana
+handle.write_upper_bmp(0x30A1 + trail_minus_offset as u16)
+} else if trail_minus_offset > (0xFE - 0xA1) {
+if byte < 0x80 {
+return (DecoderResult::Malformed(1, 0),
+unread_handle_trail.unread(),
+handle.written());
+}
+return (DecoderResult::Malformed(2, 0),
+unread_handle_trail.consumed(),
+handle.written());
+} else {
+unreachable!();
+}
+});
+}
diff --git a/src/tools/rustfmt/tests/source/issue-1693.rs b/src/tools/rustfmt/tests/source/issue-1693.rs
new file mode 100644
index 000000000..0622ce502
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1693.rs
@@ -0,0 +1,3 @@
+fn issue1693() {
+ let pixel_data = vec![(f16::from_f32(0.82), f16::from_f32(1.78), f16::from_f32(0.21)); 256 * 256];
+}
diff --git a/src/tools/rustfmt/tests/source/issue-1800.rs b/src/tools/rustfmt/tests/source/issue-1800.rs
new file mode 100644
index 000000000..eae226532
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1800.rs
@@ -0,0 +1,3 @@
+#![doc(html_root_url = "http://example.com")]
+#[cfg(feature = "foo")]
+fn a() {}
diff --git a/src/tools/rustfmt/tests/source/issue-1914.rs b/src/tools/rustfmt/tests/source/issue-1914.rs
new file mode 100644
index 000000000..447296c4b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-1914.rs
@@ -0,0 +1,6 @@
+// rustfmt-max_width: 80
+
+extern "C" {
+#[link_name = "_ZN7MyClass26example_check_no_collisionE"]
+ pub static mut MyClass_example_check_no_collision : * const :: std :: os :: raw :: c_int ;
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2025.rs b/src/tools/rustfmt/tests/source/issue-2025.rs
new file mode 100644
index 000000000..c6f61b4e3
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2025.rs
@@ -0,0 +1,8 @@
+
+
+
+
+// See if rustfmt removes empty lines on top of the file.
+pub fn foo() {
+ println!("hello, world");
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2111.rs b/src/tools/rustfmt/tests/source/issue-2111.rs
new file mode 100644
index 000000000..ccd113696
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2111.rs
@@ -0,0 +1,26 @@
+// An import with single line comments.
+use super::{
+ SCHEMA_VERSIONS,
+ LodaModel,
+ ModelProperties,
+ StringMap,
+ ModelSelector,
+ RequestDescription,
+ MethodDescription,
+ ModelBehaviour,
+ ModelRequestGraph,
+ DelayChoice,
+ Holding,
+ Destinations,
+ ModelEdges,
+ Switch,
+ // ModelMetaData,
+ // Generated,
+ // SecondsString,
+ // DateString,
+ // ModelConfiguration,
+ // ModelRequests,
+ // RestResponse,
+ // RestResponseCode,
+ // UniformHolding
+};
diff --git a/src/tools/rustfmt/tests/source/issue-2164.rs b/src/tools/rustfmt/tests/source/issue-2164.rs
new file mode 100644
index 000000000..6c288e1bd
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2164.rs
@@ -0,0 +1,4 @@
+// A stress test against code generated by bindgen.
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct emacs_env_25 { pub size : isize , pub private_members : * mut emacs_env_private , pub make_global_ref : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , any_reference : emacs_value ) -> emacs_value > , pub free_global_ref : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , global_reference : emacs_value ) > , pub non_local_exit_check : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env ) -> emacs_funcall_exit > , pub non_local_exit_clear : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env ) > , pub non_local_exit_get : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , non_local_exit_symbol_out : * mut emacs_value , non_local_exit_data_out : * mut emacs_value ) -> emacs_funcall_exit > , pub non_local_exit_signal : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , non_local_exit_symbol : emacs_value , non_local_exit_data : emacs_value ) > , pub non_local_exit_throw : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , tag : emacs_value , value : emacs_value ) > , pub make_function : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , min_arity : isize , max_arity : isize , function : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , nargs : isize , args : * mut emacs_value , arg1 : * mut ::libc :: c_void ) -> emacs_value > , documentation : * const ::libc :: c_char , data : * mut ::libc :: c_void ) -> emacs_value > , pub funcall : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , function : emacs_value , nargs : isize , args : * mut emacs_value ) -> emacs_value > , pub intern : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , symbol_name : * const ::libc :: c_char ) -> emacs_value > , pub type_of : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , value : emacs_value ) -> emacs_value > , pub is_not_nil : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , value : emacs_value ) -> bool > , pub eq : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , a : emacs_value , b : emacs_value ) -> bool > , pub extract_integer : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , value : emacs_value ) -> intmax_t > , pub make_integer : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , value : intmax_t ) -> emacs_value > , pub extract_float : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , value : emacs_value ) -> f64 > , pub make_float : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , value : f64 ) -> emacs_value > , pub copy_string_contents : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , value : emacs_value , buffer : * mut ::libc :: c_char , size_inout : * mut isize ) -> bool > , pub make_string : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , contents : * const ::libc :: c_char , length : isize ) -> emacs_value > , pub make_user_ptr : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , fin : :: std :: option :: Option < unsafe extern "C" fn ( arg1 : * mut ::libc :: c_void ) > , ptr : * mut ::libc :: c_void ) -> emacs_value > , pub get_user_ptr : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , uptr : emacs_value ) -> * mut ::libc :: c_void > , pub set_user_ptr : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , uptr : emacs_value , ptr : * mut ::libc :: c_void ) > , pub get_user_finalizer : :: std :: option :: Option < unsafe extern "C" fn ( arg1 : * mut ::libc :: c_void , env : * mut emacs_env , uptr : emacs_value ) -> :: std :: option :: Option < unsafe extern "C" fn ( arg1 : * mut ::libc :: c_void , env : * mut emacs_env , uptr : emacs_value ) > > , pub set_user_finalizer : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , uptr : emacs_value , fin : :: std :: option :: Option < unsafe extern "C" fn ( arg1 : * mut ::libc :: c_void ) > ) > , pub vec_get : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , vec : emacs_value , i : isize ) -> emacs_value > , pub vec_set : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , vec : emacs_value , i : isize , val : emacs_value ) > , pub vec_size : :: std :: option :: Option < unsafe extern "C" fn ( env : * mut emacs_env , vec : emacs_value ) -> isize > , }
diff --git a/src/tools/rustfmt/tests/source/issue-2179/one.rs b/src/tools/rustfmt/tests/source/issue-2179/one.rs
new file mode 100644
index 000000000..d23947931
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2179/one.rs
@@ -0,0 +1,36 @@
+// rustfmt-version: One
+// rustfmt-error_on_line_overflow: false
+
+fn issue_2179() {
+ let (opts, rustflags, clear_env_rust_log) =
+ {
+ // We mustn't lock configuration for the whole build process
+ let rls_config = rls_config.lock().unwrap();
+
+ let opts = CargoOptions::new(&rls_config);
+ trace!("Cargo compilation options:\n{:?}", opts);
+ let rustflags = prepare_cargo_rustflags(&rls_config);
+
+ // Warn about invalid specified bin target or package depending on current mode
+ // TODO: Return client notifications along with diagnostics to inform the user
+ if !rls_config.workspace_mode {
+ let cur_pkg_targets = ws.current().unwrap().targets();
+
+ if let &Some(ref build_bin) = rls_config.build_bin.as_ref() {
+ let mut bins = cur_pkg_targets.iter().filter(|x| x.is_bin());
+ if let None = bins.find(|x| x.name() == build_bin) {
+ warn!("cargo - couldn't find binary `{}` specified in `build_bin` configuration", build_bin);
+ }
+ }
+ } else {
+ for package in &opts.package {
+ if let None = ws.members().find(|x| x.name() == package) {
+ warn!("cargo - couldn't find member package `{}` specified in `analyze_package` configuration", package);
+ }
+ }
+ }
+
+ (opts, rustflags, rls_config.clear_env_rust_log)
+ };
+
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2179/two.rs b/src/tools/rustfmt/tests/source/issue-2179/two.rs
new file mode 100644
index 000000000..f4cc9cc48
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2179/two.rs
@@ -0,0 +1,36 @@
+// rustfmt-version: Two
+// rustfmt-error_on_line_overflow: false
+
+fn issue_2179() {
+ let (opts, rustflags, clear_env_rust_log) =
+ {
+ // We mustn't lock configuration for the whole build process
+ let rls_config = rls_config.lock().unwrap();
+
+ let opts = CargoOptions::new(&rls_config);
+ trace!("Cargo compilation options:\n{:?}", opts);
+ let rustflags = prepare_cargo_rustflags(&rls_config);
+
+ // Warn about invalid specified bin target or package depending on current mode
+ // TODO: Return client notifications along with diagnostics to inform the user
+ if !rls_config.workspace_mode {
+ let cur_pkg_targets = ws.current().unwrap().targets();
+
+ if let &Some(ref build_bin) = rls_config.build_bin.as_ref() {
+ let mut bins = cur_pkg_targets.iter().filter(|x| x.is_bin());
+ if let None = bins.find(|x| x.name() == build_bin) {
+ warn!("cargo - couldn't find binary `{}` specified in `build_bin` configuration", build_bin);
+ }
+ }
+ } else {
+ for package in &opts.package {
+ if let None = ws.members().find(|x| x.name() == package) {
+ warn!("cargo - couldn't find member package `{}` specified in `analyze_package` configuration", package);
+ }
+ }
+ }
+
+ (opts, rustflags, rls_config.clear_env_rust_log)
+ };
+
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2256.rs b/src/tools/rustfmt/tests/source/issue-2256.rs
new file mode 100644
index 000000000..a206e8db6
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2256.rs
@@ -0,0 +1,12 @@
+// こんにちは
+use std::{};
+use std::borrow::Cow;
+
+/* comment 1 */ use std::{};
+/* comment 2 */ use std::{};
+
+
+
+
+
+/* comment 3 */ use std::{};
diff --git a/src/tools/rustfmt/tests/source/issue-2342.rs b/src/tools/rustfmt/tests/source/issue-2342.rs
new file mode 100644
index 000000000..f86d24a14
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2342.rs
@@ -0,0 +1,5 @@
+// rustfmt-max_width: 80
+
+struct Foo {
+ #[cfg(feature = "serde")] bytes: [[u8; 17]; 5], // Same size as signature::ED25519_PKCS8_V2_LEN
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2445.rs b/src/tools/rustfmt/tests/source/issue-2445.rs
new file mode 100644
index 000000000..84ce6e647
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2445.rs
@@ -0,0 +1,21 @@
+test!(RunPassPretty {
+ // comment
+ path: "src/test/run-pass/pretty",
+ mode: "pretty",
+ suite: "run-pass",
+ default: false,
+ host: true // should, force, , no trailing comma here
+});
+
+test!(RunPassPretty {
+ // comment
+ path: "src/test/run-pass/pretty",
+ mode: "pretty",
+ suite: "run-pass",
+ default: false,
+ host: true, // should, , preserve, the trailing comma
+});
+
+test!(Test{
+ field: i32, // comment
+});
diff --git a/src/tools/rustfmt/tests/source/issue-2446.rs b/src/tools/rustfmt/tests/source/issue-2446.rs
new file mode 100644
index 000000000..ad649d95c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2446.rs
@@ -0,0 +1,11 @@
+enum Issue2446 {
+ V {
+ f: u8, // x
+ },
+}
+
+enum Issue2446TrailingCommentsOnly {
+ V {
+ f: u8, /* */
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2479.rs b/src/tools/rustfmt/tests/source/issue-2479.rs
new file mode 100644
index 000000000..df50236d0
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2479.rs
@@ -0,0 +1,2 @@
+// Long attributes.
+# [ derive ( Clone , Copy , Debug , PartialEq ) ] pub enum POLARITYR { # [ doc = "Task mode: No effect on pin from OUT[n] task. Event mode: no IN[n] event generated on pin activity." ] NONE , # [ doc = "Task mode: Set pin from OUT[n] task. Event mode: Generate IN[n] event when rising edge on pin." ] LOTOHI , # [ doc = "Task mode: Clear pin from OUT[n] task. Event mode: Generate IN[n] event when falling edge on pin." ] HITOLO , # [ doc = "Task mode: Toggle pin from OUT[n]. Event mode: Generate IN[n] when any change on pin." ] TOGGLE }
diff --git a/src/tools/rustfmt/tests/source/issue-2482/a.rs b/src/tools/rustfmt/tests/source/issue-2482/a.rs
new file mode 100644
index 000000000..fbbcb52a8
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2482/a.rs
@@ -0,0 +1,9 @@
+// rustfmt-reorder_modules: true
+
+// Do not reorder inline modules.
+
+mod c;
+mod a {
+ fn a() {}
+}
+mod b;
diff --git a/src/tools/rustfmt/tests/source/issue-2482/b.rs b/src/tools/rustfmt/tests/source/issue-2482/b.rs
new file mode 100644
index 000000000..40a8d9421
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2482/b.rs
@@ -0,0 +1 @@
+pub fn b() {}
diff --git a/src/tools/rustfmt/tests/source/issue-2482/c.rs b/src/tools/rustfmt/tests/source/issue-2482/c.rs
new file mode 100644
index 000000000..d93754551
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2482/c.rs
@@ -0,0 +1 @@
+pub fn c() {}
diff --git a/src/tools/rustfmt/tests/source/issue-2496.rs b/src/tools/rustfmt/tests/source/issue-2496.rs
new file mode 100644
index 000000000..0ebd4b510
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2496.rs
@@ -0,0 +1,16 @@
+// rustfmt-indent_style: Visual
+fn main() {
+ match option {
+ None => some_function(first_reasonably_long_argument,
+ second_reasonably_long_argument),
+ }
+}
+
+fn main() {
+ match option {
+ None => {
+ some_function(first_reasonably_long_argument,
+ second_reasonably_long_argument)
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2520.rs b/src/tools/rustfmt/tests/source/issue-2520.rs
new file mode 100644
index 000000000..5a23f1043
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2520.rs
@@ -0,0 +1,13 @@
+// rustfmt-normalize_comments: true
+// rustfmt-format_code_in_doc_comments: true
+
+//! ```rust
+//! println!( "hello, world" );
+//! ```
+
+#![deny( missing_docs )]
+
+//! ```rust
+//! println!("hello, world");
+
+#![deny( missing_docs )]
diff --git a/src/tools/rustfmt/tests/source/issue-2523.rs b/src/tools/rustfmt/tests/source/issue-2523.rs
new file mode 100644
index 000000000..491d5c38f
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2523.rs
@@ -0,0 +1,18 @@
+// rustfmt-normalize_comments: true
+// rustfmt-format_code_in_doc_comments: true
+
+// Do not unindent macro calls in comment with unformattable syntax.
+//! ```rust
+//! let x = 3 ;
+//! some_macro!(pub fn fn foo() (
+//! println!("Don't unindent me!");
+//! ));
+//! ```
+
+// Format items that appear as arguments of macro call.
+//! ```rust
+//! let x = 3 ;
+//! some_macro!(pub fn foo() {
+//! println!("Don't unindent me!");
+//! });
+//! ```
diff --git a/src/tools/rustfmt/tests/source/issue-2582.rs b/src/tools/rustfmt/tests/source/issue-2582.rs
new file mode 100644
index 000000000..bba8ce150
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2582.rs
@@ -0,0 +1 @@
+ fn main() {}
diff --git a/src/tools/rustfmt/tests/source/issue-2641.rs b/src/tools/rustfmt/tests/source/issue-2641.rs
new file mode 100644
index 000000000..c7ad60674
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2641.rs
@@ -0,0 +1,3 @@
+macro_rules! a {
+ () => {{}}
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2644.rs b/src/tools/rustfmt/tests/source/issue-2644.rs
new file mode 100644
index 000000000..fa9d16f44
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2644.rs
@@ -0,0 +1,11 @@
+// rustfmt-max_width: 80
+fn foo(e: Enum) {
+ match e {
+ Enum::Var {
+ element1,
+ element2,
+ } => {
+ return;
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2728.rs b/src/tools/rustfmt/tests/source/issue-2728.rs
new file mode 100644
index 000000000..6cb41b75b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2728.rs
@@ -0,0 +1,8 @@
+// rustfmt-wrap_comments: true
+// rustfmt-newline_style: Windows
+
+//! ```rust
+//! extern crate uom;
+//! ```
+
+fn main() {}
diff --git a/src/tools/rustfmt/tests/source/issue-2761.rs b/src/tools/rustfmt/tests/source/issue-2761.rs
new file mode 100644
index 000000000..bc3123190
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2761.rs
@@ -0,0 +1,15 @@
+const DATA: &'static [u8] = &[
+ 0x42, 0x50, 0x54, 0x44, //type
+ 0x23, 0x00, 0x00, 0x00, //size
+ 0x00, 0x00, 0x04, 0x00, //flags
+ 0xEC, 0x0C, 0x00, 0x00, //id
+ 0x00, 0x00, 0x00, 0x00, //revision
+ 0x2B, 0x00, //version
+ 0x00, 0x00, //unknown
+ 0x42, 0x50, 0x54, 0x4E, //field type
+ 0x1D, 0x00, //field size
+ 0x19, 0x00, 0x00, 0x00, //decompressed field size
+ 0x75, 0xc5, 0x21, 0x0d, 0x00, 0x00, 0x08, 0x05, 0xd1, 0x6c, //field data (compressed)
+ 0x6c, 0xdc, 0x57, 0x48, 0x3c, 0xfd, 0x5b, 0x5c, 0x02, 0xd4, //field data (compressed)
+ 0x6b, 0x32, 0xb5, 0xdc, 0xa3 //field data (compressed)
+];
diff --git a/src/tools/rustfmt/tests/source/issue-2781.rs b/src/tools/rustfmt/tests/source/issue-2781.rs
new file mode 100644
index 000000000..2c15b29b6
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2781.rs
@@ -0,0 +1,11 @@
+pub // Oh, no. A line comment.
+struct Foo {}
+
+pub /* Oh, no. A block comment. */ struct Foo {}
+
+mod inner {
+pub // Oh, no. A line comment.
+struct Foo {}
+
+pub /* Oh, no. A block comment. */ struct Foo {}
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2794.rs b/src/tools/rustfmt/tests/source/issue-2794.rs
new file mode 100644
index 000000000..c3f9c0412
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2794.rs
@@ -0,0 +1,7 @@
+// rustfmt-indent_style: Block
+// rustfmt-imports_indent: Block
+// rustfmt-imports_layout: Vertical
+
+use std::{
+ env, fs, io::{Read, Write},
+};
diff --git a/src/tools/rustfmt/tests/source/issue-2835.rs b/src/tools/rustfmt/tests/source/issue-2835.rs
new file mode 100644
index 000000000..2219b0b38
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2835.rs
@@ -0,0 +1,7 @@
+// rustfmt-brace_style: AlwaysNextLine
+// rustfmt-fn_single_line: true
+
+fn lorem() -> i32
+{
+ 42
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2863.rs b/src/tools/rustfmt/tests/source/issue-2863.rs
new file mode 100644
index 000000000..1bda857be
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2863.rs
@@ -0,0 +1,25 @@
+// rustfmt-reorder_impl_items: true
+
+impl<T> IntoIterator for SafeVec<T> {
+ type F = impl Trait;
+ type IntoIter = self::IntoIter<T>;
+ type Item = T;
+ // comment on foo()
+ fn foo() {println!("hello, world");}
+ type Bar = u32;
+ fn foo1() {println!("hello, world");}
+ type FooBar = u32;
+ fn foo2() {println!("hello, world");}
+ fn foo3() {println!("hello, world");}
+ const SomeConst: i32 = 100;
+ fn foo4() {println!("hello, world");}
+ fn foo5() {println!("hello, world");}
+ // comment on FoooooBar
+ type FoooooBar = u32;
+ fn foo6() {println!("hello, world");}
+ fn foo7() {println!("hello, world");}
+ type BarFoo = u32;
+ type E = impl Trait;
+ const AnotherConst: i32 = 100;
+ fn foo8() {println!("hello, world");}
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2869.rs b/src/tools/rustfmt/tests/source/issue-2869.rs
new file mode 100644
index 000000000..d18adfb46
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2869.rs
@@ -0,0 +1,41 @@
+// rustfmt-struct_field_align_threshold: 50
+
+#[derive(Serialize, Deserialize, Debug)]
+#[serde(rename_all = "PascalCase")]
+struct AuditLog1 {
+ creation_time: String,
+ id: String,
+ operation: String,
+ organization_id: String,
+ record_type: u32,
+ result_status: Option<String>,
+ #[serde(rename = "ClientIP")]
+ client_ip: Option<IpAddr>,
+ object_id: String,
+ actor: Option<Vec<IDType>>,
+ actor_context_id: Option<String>,
+ actor_ip_address: Option<IpAddr>,
+ azure_active_directory_event_type: Option<u8>,
+
+ #[serde(rename = "very")]
+ aaaaa: String,
+ #[serde(rename = "cool")]
+ bb: i32,
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+#[serde(rename_all = "PascalCase")]
+struct AuditLog2 {
+ creation_time: String,
+ id: String,
+ operation: String,
+ organization_id: String,
+ record_type: u32,
+ result_status: Option<String>,
+ client_ip: Option<IpAddr>,
+ object_id: String,
+ actor: Option<Vec<IDType>>,
+ actor_context_id: Option<String>,
+ actor_ip_address: Option<IpAddr>,
+ azure_active_directory_event_type: Option<u8>,
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2896.rs b/src/tools/rustfmt/tests/source/issue-2896.rs
new file mode 100644
index 000000000..f648e64b1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2896.rs
@@ -0,0 +1,161 @@
+extern crate rand;
+extern crate timely;
+extern crate differential_dataflow;
+
+use rand::{Rng, SeedableRng, StdRng};
+
+use timely::dataflow::operators::*;
+
+use differential_dataflow::AsCollection;
+use differential_dataflow::operators::*;
+use differential_dataflow::input::InputSession;
+
+// mod loglikelihoodratio;
+
+fn main() {
+
+ // define a new timely dataflow computation.
+ timely::execute_from_args(std::env::args().skip(6), move |worker| {
+
+ // capture parameters of the experiment.
+ let users: usize = std::env::args().nth(1).unwrap().parse().unwrap();
+ let items: usize = std::env::args().nth(2).unwrap().parse().unwrap();
+ let scale: usize = std::env::args().nth(3).unwrap().parse().unwrap();
+ let batch: usize = std::env::args().nth(4).unwrap().parse().unwrap();
+ let noisy: bool = std::env::args().nth(5).unwrap() == "noisy";
+
+ let index = worker.index();
+ let peers = worker.peers();
+
+ let (input, probe) = worker.dataflow(|scope| {
+
+ // input of (user, item) collection.
+ let (input, occurrences) = scope.new_input();
+ let occurrences = occurrences.as_collection();
+
+ //TODO adjust code to only work with upper triangular half of cooccurrence matrix
+
+ /* Compute the cooccurrence matrix C = A'A from the binary interaction matrix A. */
+ let cooccurrences =
+ occurrences
+ .join_map(&occurrences, |_user, &item_a, &item_b| (item_a, item_b))
+ .filter(|&(item_a, item_b)| item_a != item_b)
+ .count();
+
+ /* compute the rowsums of C indicating how often we encounter individual items. */
+ let row_sums =
+ occurrences
+ .map(|(_user, item)| item)
+ .count();
+
+ // row_sums.inspect(|record| println!("[row_sums] {:?}", record));
+
+ /* Join the cooccurrence pairs with the corresponding row sums. */
+ let mut cooccurrences_with_row_sums = cooccurrences
+ .map(|((item_a, item_b), num_cooccurrences)| (item_a, (item_b, num_cooccurrences)))
+ .join_map(&row_sums, |&item_a, &(item_b, num_cooccurrences), &row_sum_a| {
+ assert!(row_sum_a > 0);
+ (item_b, (item_a, num_cooccurrences, row_sum_a))
+ })
+ .join_map(&row_sums, |&item_b, &(item_a, num_cooccurrences, row_sum_a), &row_sum_b| {
+ assert!(row_sum_a > 0);
+ assert!(row_sum_b > 0);
+ (item_a, (item_b, num_cooccurrences, row_sum_a, row_sum_b))
+ });
+
+ // cooccurrences_with_row_sums
+ // .inspect(|record| println!("[cooccurrences_with_row_sums] {:?}", record));
+
+ // //TODO compute top-k "similar items" per item
+ // /* Compute LLR scores for each item pair. */
+ // let llr_scores = cooccurrences_with_row_sums.map(
+ // |(item_a, (item_b, num_cooccurrences, row_sum_a, row_sum_b))| {
+
+ // println!(
+ // "[llr_scores] item_a={} item_b={}, num_cooccurrences={} row_sum_a={} row_sum_b={}",
+ // item_a, item_b, num_cooccurrences, row_sum_a, row_sum_b);
+
+ // let k11: isize = num_cooccurrences;
+ // let k12: isize = row_sum_a as isize - k11;
+ // let k21: isize = row_sum_b as isize - k11;
+ // let k22: isize = 10000 - k12 - k21 + k11;
+
+ // let llr_score = loglikelihoodratio::log_likelihood_ratio(k11, k12, k21, k22);
+
+ // ((item_a, item_b), llr_score)
+ // });
+
+ if noisy {
+ cooccurrences_with_row_sums =
+ cooccurrences_with_row_sums
+ .inspect(|x| println!("change: {:?}", x));
+ }
+
+ let probe =
+ cooccurrences_with_row_sums
+ .probe();
+/*
+ // produce the (item, item) collection
+ let cooccurrences = occurrences
+ .join_map(&occurrences, |_user, &item_a, &item_b| (item_a, item_b));
+ // count the occurrences of each item.
+ let counts = cooccurrences
+ .map(|(item_a,_)| item_a)
+ .count();
+ // produce ((item1, item2), count1, count2, count12) tuples
+ let cooccurrences_with_counts = cooccurrences
+ .join_map(&counts, |&item_a, &item_b, &count_item_a| (item_b, (item_a, count_item_a)))
+ .join_map(&counts, |&item_b, &(item_a, count_item_a), &count_item_b| {
+ ((item_a, item_b), count_item_a, count_item_b)
+ });
+ let probe = cooccurrences_with_counts
+ .inspect(|x| println!("change: {:?}", x))
+ .probe();
+*/
+ (input, probe)
+ });
+
+ let seed: &[_] = &[1, 2, 3, index];
+ let mut rng1: StdRng = SeedableRng::from_seed(seed); // rng for edge additions
+ let mut rng2: StdRng = SeedableRng::from_seed(seed); // rng for edge deletions
+
+ let mut input = InputSession::from(input);
+
+ for count in 0 .. scale {
+ if count % peers == index {
+ let user = rng1.gen_range(0, users);
+ let item = rng1.gen_range(0, items);
+ // println!("[INITIAL INPUT] ({}, {})", user, item);
+ input.insert((user, item));
+ }
+ }
+
+ // load the initial data up!
+ while probe.less_than(input.time()) { worker.step(); }
+
+ for round in 1 .. {
+
+ for element in (round * batch) .. ((round + 1) * batch) {
+ if element % peers == index {
+ // advance the input timestamp.
+ input.advance_to(round * batch);
+ // insert a new item.
+ let user = rng1.gen_range(0, users);
+ let item = rng1.gen_range(0, items);
+ if noisy { println!("[INPUT: insert] ({}, {})", user, item); }
+ input.insert((user, item));
+ // remove an old item.
+ let user = rng2.gen_range(0, users);
+ let item = rng2.gen_range(0, items);
+ if noisy { println!("[INPUT: remove] ({}, {})", user, item); }
+ input.remove((user, item));
+ }
+ }
+
+ input.advance_to(round * batch);
+ input.flush();
+
+ while probe.less_than(input.time()) { worker.step(); }
+ }
+ }).unwrap();
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2916.rs b/src/tools/rustfmt/tests/source/issue-2916.rs
new file mode 100644
index 000000000..ccb1f8486
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2916.rs
@@ -0,0 +1,2 @@
+a_macro!(name<Param1, Param2>,
+) ;
diff --git a/src/tools/rustfmt/tests/source/issue-2917/packed_simd.rs b/src/tools/rustfmt/tests/source/issue-2917/packed_simd.rs
new file mode 100644
index 000000000..afa9e67c8
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2917/packed_simd.rs
@@ -0,0 +1,63 @@
+// rustfmt-wrap_comments: true
+//! Implements `From` and `Into` for vector types.
+
+macro_rules! impl_from_vector {
+ ([$elem_ty:ident; $elem_count:expr]: $id:ident | $test_tt:tt | $source:ident) => {
+ impl From<$source> for $id {
+ #[inline]
+ fn from(source: $source) -> Self {
+ fn static_assert_same_number_of_lanes<T, U>()
+ where
+ T: crate::sealed::Simd,
+ U: crate::sealed::Simd<LanesType = T::LanesType>,
+ {
+ }
+ use llvm::simd_cast;
+ static_assert_same_number_of_lanes::<$id, $source>();
+ Simd(unsafe { simd_cast(source.0) })
+ }
+ }
+
+ // FIXME: `Into::into` is not inline, but due to
+ // the blanket impl in `std`, which is not
+ // marked `default`, we cannot override it here with
+ // specialization.
+ /*
+ impl Into<$id> for $source {
+ #[inline]
+ fn into(self) -> $id {
+ unsafe { simd_cast(self) }
+ }
+ }
+ */
+
+ test_if!{
+ $test_tt:
+ interpolate_idents! {
+ mod [$id _from_ $source] {
+ use super::*;
+ #[test]
+ fn from() {
+ assert_eq!($id::lanes(), $source::lanes());
+ let source: $source = Default::default();
+ let vec: $id = Default::default();
+
+ let e = $id::from(source);
+ assert_eq!(e, vec);
+
+ let e: $id = source.into();
+ assert_eq!(e, vec);
+ }
+ }
+ }
+ }
+ };
+}
+
+macro_rules! impl_from_vectors {
+ ([$elem_ty:ident; $elem_count:expr]: $id:ident | $test_tt:tt | $($source:ident),*) => {
+ $(
+ impl_from_vector!([$elem_ty; $elem_count]: $id | $test_tt | $source);
+ )*
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2922.rs b/src/tools/rustfmt/tests/source/issue-2922.rs
new file mode 100644
index 000000000..44fae0b64
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2922.rs
@@ -0,0 +1,9 @@
+// rustfmt-indent_style: Visual
+struct Functions {
+ RunListenServer: unsafe extern "C" fn(*mut c_void,
+ *mut c_char,
+ *mut c_char,
+ *mut c_char,
+ *mut c_void,
+ *mut c_void) -> c_int,
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2927-2.rs b/src/tools/rustfmt/tests/source/issue-2927-2.rs
new file mode 100644
index 000000000..d87761fdc
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2927-2.rs
@@ -0,0 +1,7 @@
+// rustfmt-edition: 2015
+#![feature(rust_2018_preview, uniform_paths)]
+use futures::prelude::*;
+use http_03::cli::Cli;
+use hyper::{service::service_fn_ok, Body, Response, Server};
+use ::log::{error, info, log};
+use structopt::StructOpt;
diff --git a/src/tools/rustfmt/tests/source/issue-2927.rs b/src/tools/rustfmt/tests/source/issue-2927.rs
new file mode 100644
index 000000000..a7df32084
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2927.rs
@@ -0,0 +1,7 @@
+// rustfmt-edition: 2018
+#![feature(rust_2018_preview, uniform_paths)]
+use futures::prelude::*;
+use http_03::cli::Cli;
+use hyper::{service::service_fn_ok, Body, Response, Server};
+use ::log::{error, info, log};
+use structopt::StructOpt;
diff --git a/src/tools/rustfmt/tests/source/issue-2930.rs b/src/tools/rustfmt/tests/source/issue-2930.rs
new file mode 100644
index 000000000..962c3e4fe
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2930.rs
@@ -0,0 +1,5 @@
+// rustfmt-indent_style: Visual
+fn main() {
+ let (first_variable, second_variable) = (this_is_something_with_an_extraordinarily_long_name,
+ this_variable_name_is_also_pretty_long);
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2936.rs b/src/tools/rustfmt/tests/source/issue-2936.rs
new file mode 100644
index 000000000..55b5c56e6
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2936.rs
@@ -0,0 +1,19 @@
+struct AStruct {
+ A: u32,
+ B: u32,
+ C: u32,
+}
+
+impl Something for AStruct {
+ fn a_func() {
+ match a_val {
+ ContextualParseError::InvalidMediaRule(ref err) => {
+ let err: &CStr = match err.kind {
+ ParseErrorKind::Custom(StyleParseErrorKind::MediaQueryExpectedFeatureName(..)) => {
+ cstr!("PEMQExpectedFeatureName")
+ },
+ };
+ }
+ };
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2955.rs b/src/tools/rustfmt/tests/source/issue-2955.rs
new file mode 100644
index 000000000..525e070a5
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2955.rs
@@ -0,0 +1,6 @@
+// rustfmt-condense_wildcard_suffixes: true
+fn main() {
+ match (1, 2, 3) {
+ (_, _, _) => (),
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2973.rs b/src/tools/rustfmt/tests/source/issue-2973.rs
new file mode 100644
index 000000000..5256dd7c9
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2973.rs
@@ -0,0 +1,158 @@
+#[cfg(test)]
+mod test {
+ summary_test! {
+ tokenize_recipe_interpolation_eol,
+ "foo: # some comment
+ {{hello}}
+",
+ "foo: \
+ {{hello}} \
+{{ahah}}",
+ "N:#$>^{N}$<.",
+ }
+
+ summary_test! {
+ tokenize_strings,
+ r#"a = "'a'" + '"b"' + "'c'" + '"d"'#echo hello"#,
+ r#"N="+'+"+'#."#,
+ }
+
+ summary_test! {
+ tokenize_recipe_interpolation_eol,
+ "foo: # some comment
+ {{hello}}
+",
+ "N:#$>^{N}$<.",
+ }
+
+ summary_test! {
+ tokenize_recipe_interpolation_eof,
+ "foo: # more comments
+ {{hello}}
+# another comment
+",
+ "N:#$>^{N}$<#$.",
+ }
+
+ summary_test! {
+ tokenize_recipe_complex_interpolation_expression,
+ "foo: #lol\n {{a + b + \"z\" + blarg}}",
+ "N:#$>^{N+N+\"+N}<.",
+ }
+
+ summary_test! {
+ tokenize_recipe_multiple_interpolations,
+ "foo:,#ok\n {{a}}0{{b}}1{{c}}",
+ "N:,#$>^{N}_{N}_{N}<.",
+ }
+
+ summary_test! {
+ tokenize_junk,
+ "bob
+
+hello blah blah blah : a b c #whatever
+ ",
+ "N$$NNNN:NNN#$.",
+ }
+
+ summary_test! {
+ tokenize_empty_lines,
+ "
+# this does something
+hello:
+ asdf
+ bsdf
+
+ csdf
+
+ dsdf # whatever
+
+# yolo
+ ",
+ "$#$N:$>^_$^_$$^_$$^_$$<#$.",
+ }
+
+ summary_test! {
+ tokenize_comment_before_variable,
+ "
+#
+A='1'
+echo:
+ echo {{A}}
+ ",
+ "$#$N='$N:$>^_{N}$<.",
+ }
+
+ summary_test! {
+ tokenize_interpolation_backticks,
+ "hello:\n echo {{`echo hello` + `echo goodbye`}}",
+ "N:$>^_{`+`}<.",
+ }
+
+ summary_test! {
+ tokenize_assignment_backticks,
+ "a = `echo hello` + `echo goodbye`",
+ "N=`+`.",
+ }
+
+ summary_test! {
+ tokenize_multiple,
+ "
+hello:
+ a
+ b
+
+ c
+
+ d
+
+# hello
+bob:
+ frank
+ ",
+
+ "$N:$>^_$^_$$^_$$^_$$<#$N:$>^_$<.",
+ }
+
+ summary_test! {
+ tokenize_comment,
+ "a:=#",
+ "N:=#."
+ }
+
+ summary_test! {
+ tokenize_comment_with_bang,
+ "a:=#foo!",
+ "N:=#."
+ }
+
+ summary_test! {
+ tokenize_order,
+ r"
+b: a
+ @mv a b
+
+a:
+ @touch F
+ @touch a
+
+d: c
+ @rm c
+
+c: b
+ @mv b c",
+ "$N:N$>^_$$<N:$>^_$^_$$<N:N$>^_$$<N:N$>^_<.",
+ }
+
+ summary_test! {
+ tokenize_parens,
+ r"((())) )abc(+",
+ "((())))N(+.",
+ }
+
+ summary_test! {
+ crlf_newline,
+ "#\r\n#asdf\r\n",
+ "#$#$.",
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2977/impl.rs b/src/tools/rustfmt/tests/source/issue-2977/impl.rs
new file mode 100644
index 000000000..8d7bb9414
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2977/impl.rs
@@ -0,0 +1,44 @@
+macro_rules! atomic_bits {
+ // the println macro cannot be rewritten because of the asm macro
+ ($type:ty, $ldrex:expr, $strex:expr) => {
+ impl AtomicBits for $type {
+ unsafe fn load_excl(address: usize) -> Self {
+ let raw: $type;
+ asm!($ldrex
+ : "=r"(raw)
+ : "r"(address)
+ :
+ : "volatile");
+ raw
+ }
+
+ unsafe fn store_excl(self, address: usize) -> bool {
+ let status: $type;
+ println!("{}",
+ status);
+ status == 0
+ }
+ }
+ };
+
+ // the println macro should be rewritten here
+ ($type:ty) => {
+ fn some_func(self) {
+ let status: $type;
+ println!("{}", status);
+ }
+ };
+
+ // unrewritale macro in func
+ ($type:ty, $ldrex:expr) => {
+ unsafe fn load_excl(address: usize) -> Self {
+ let raw: $type;
+ asm!($ldrex
+ : "=r"(raw)
+ : "r"(address)
+ :
+ : "volatile");
+ raw
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2977/trait.rs b/src/tools/rustfmt/tests/source/issue-2977/trait.rs
new file mode 100644
index 000000000..ae20668cd
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2977/trait.rs
@@ -0,0 +1,44 @@
+macro_rules! atomic_bits {
+ // the println macro cannot be rewritten because of the asm macro
+ ($type:ty, $ldrex:expr, $strex:expr) => {
+ trait $type {
+ unsafe fn load_excl(address: usize) -> Self {
+ let raw: $type;
+ asm!($ldrex
+ : "=r"(raw)
+ : "r"(address)
+ :
+ : "volatile");
+ raw
+ }
+
+ unsafe fn store_excl(self, address: usize) -> bool {
+ let status: $type;
+ println!("{}",
+ status);
+ status == 0
+ }
+ }
+ };
+
+ // the println macro should be rewritten here
+ ($type:ty) => {
+ fn some_func(self) {
+ let status: $type;
+ println!("{}", status);
+ }
+ };
+
+ // unrewritale macro in func
+ ($type:ty, $ldrex:expr) => {
+ unsafe fn load_excl(address: usize) -> Self {
+ let raw: $type;
+ asm!($ldrex
+ : "=r"(raw)
+ : "r"(address)
+ :
+ : "volatile");
+ raw
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2985.rs b/src/tools/rustfmt/tests/source/issue-2985.rs
new file mode 100644
index 000000000..bde4da831
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2985.rs
@@ -0,0 +1,35 @@
+// rustfmt-indent_style: Visual
+fn foo() {
+ {
+ {
+ let extra_encoder_settings = extra_encoder_settings.iter()
+ .filter_map(|&(name, value)| {
+ value.split()
+ .next()
+ .something()
+ .something2()
+ .something3()
+ .something4()
+ });
+ let extra_encoder_settings = extra_encoder_settings.iter()
+ .filter_map(|&(name, value)| {
+ value.split()
+ .next()
+ .something()
+ .something2()
+ .something3()
+ .something4()
+ })
+ .something();
+ if let Some(subpod) = pod.subpods.iter().find(|s| {
+ !s.plaintext
+ .as_ref()
+ .map(String::as_ref)
+ .unwrap_or("")
+ .is_empty()
+ }) {
+ do_something();
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-2995.rs b/src/tools/rustfmt/tests/source/issue-2995.rs
new file mode 100644
index 000000000..accf7c3a1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-2995.rs
@@ -0,0 +1,7 @@
+fn issue_2995() {
+ // '\u{2028}' is inserted in the code below.
+
+ [0, 
1];
+ [0, 
/* */ 1];
+ 
[
0
,
1
]
;
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3029.rs b/src/tools/rustfmt/tests/source/issue-3029.rs
new file mode 100644
index 000000000..a7ac5c32b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3029.rs
@@ -0,0 +1,94 @@
+fn keep_if() {
+ {
+ {
+ {
+ EvaluateJSReply::NumberValue(
+ if FromJSValConvertible::from_jsval(cx, rval.handle(), ()) {
+ unimplemented!();
+ },
+ )
+ }
+ }
+ }
+}
+
+fn keep_if_let() {
+ {
+ {
+ {
+ EvaluateJSReply::NumberValue(
+ if let Some(e) = FromJSValConvertible::from_jsval(cx, rval.handle(), ()) {
+ unimplemented!();
+ },
+ )
+ }
+ }
+ }
+}
+
+fn keep_for() {
+ {
+ {
+ {
+ EvaluateJSReply::NumberValue(
+ for conv in FromJSValConvertible::from_jsval(cx, rval.handle(), ()) {
+ unimplemented!();
+ },
+ )
+ }
+ }
+ }
+}
+
+fn keep_loop() {
+ {
+ {
+ {
+ EvaluateJSReply::NumberValue(loop {
+ FromJSValConvertible::from_jsval(cx, rval.handle(), ());
+ })
+ }
+ }
+ }
+}
+
+fn keep_while() {
+ {
+ {
+ {
+ EvaluateJSReply::NumberValue(
+ while FromJSValConvertible::from_jsval(cx, rval.handle(), ()) {
+ unimplemented!();
+ },
+ )
+ }
+ }
+ }
+}
+
+fn keep_while_let() {
+ {
+ {
+ {
+ EvaluateJSReply::NumberValue(
+ while let Some(e) = FromJSValConvertible::from_jsval(cx, rval.handle(), ()) {
+ unimplemented!();
+ },
+ )
+ }
+ }
+ }
+}
+
+fn keep_match() {
+ {
+ {
+ EvaluateJSReply::NumberValue(
+ match FromJSValConvertible::from_jsval(cx, rval.handle(), ()) {
+ Ok(ConversionResult::Success(v)) => v,
+ _ => unreachable!(),
+ },
+ )
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3038.rs b/src/tools/rustfmt/tests/source/issue-3038.rs
new file mode 100644
index 000000000..0fbb05ddc
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3038.rs
@@ -0,0 +1,20 @@
+impl HTMLTableElement {
+ fn func() {
+ if number_of_row_elements == 0 {
+ if let Some(last_tbody) = node.rev_children()
+ .filter_map(DomRoot::downcast::<Element>)
+ .find(|n| n.is::<HTMLTableSectionElement>() && n.local_name() == &local_name!("tbody")) {
+ last_tbody.upcast::<Node>().AppendChild(new_row.upcast::<Node>())
+ .expect("InsertRow failed to append first row.");
+ }
+ }
+
+ if number_of_row_elements == 0 {
+ if let Some(last_tbody) = node
+ .find(|n| n.is::<HTMLTableSectionElement>() && n.local_name() == &local_name!("tbody")) {
+ last_tbody.upcast::<Node>().AppendChild(new_row.upcast::<Node>())
+ .expect("InsertRow failed to append first row.");
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3049.rs b/src/tools/rustfmt/tests/source/issue-3049.rs
new file mode 100644
index 000000000..43742683e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3049.rs
@@ -0,0 +1,45 @@
+// rustfmt-indent_style: Visual
+fn main() {
+ something.aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .bench_function(|| {
+ let x = hello();
+ });
+
+ something.aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .bench_function(arg, || {
+ let x = hello();
+ });
+
+ something.aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .bench_function(arg,
+ || {
+ let x = hello();
+ },
+ arg);
+
+ AAAAAAAAAAA.function(|| {
+ let _ = ();
+ });
+
+ AAAAAAAAAAA.chain().function(|| {
+ let _ = ();
+ })
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3055/original.rs b/src/tools/rustfmt/tests/source/issue-3055/original.rs
new file mode 100644
index 000000000..45e58473a
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3055/original.rs
@@ -0,0 +1,43 @@
+// rustfmt-wrap_comments: true
+// rustfmt-format_code_in_doc_comments: true
+
+/// Vestibulum elit nibh, rhoncus non, euismod sit amet, pretium eu, enim. Nunc commodo ultricies dui.
+///
+/// Should not format with text attribute
+/// ```text
+/// .--------------.
+/// | v
+/// Park <- Idle -> Poll -> Probe -> Download -> Install -> Reboot
+/// ^ ^ ' ' '
+/// ' ' ' ' '
+/// ' `--------' ' '
+/// `---------------' ' '
+/// `--------------------------' '
+/// `-------------------------------------'
+/// ```
+///
+/// Should not format with ignore attribute
+/// ```text
+/// .--------------.
+/// | v
+/// Park <- Idle -> Poll -> Probe -> Download -> Install -> Reboot
+/// ^ ^ ' ' '
+/// ' ' ' ' '
+/// ' `--------' ' '
+/// `---------------' ' '
+/// `--------------------------' '
+/// `-------------------------------------'
+/// ```
+///
+/// Should format with rust attribute
+/// ```rust
+/// let x =
+/// 42;
+/// ```
+///
+/// Should format with no attribute as it defaults to rust
+/// ```
+/// let x =
+/// 42;
+/// ```
+fn func() {}
diff --git a/src/tools/rustfmt/tests/source/issue-3059.rs b/src/tools/rustfmt/tests/source/issue-3059.rs
new file mode 100644
index 000000000..49a75cd67
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3059.rs
@@ -0,0 +1,7 @@
+// rustfmt-wrap_comments: true
+// rustfmt-max_width: 80
+
+/// Vestibulum elit nibh, rhoncus non, euismod sit amet, pretium eu, enim. Nunc commodo ultricies dui.
+/// Cras gravida rutrum massa. Donec accumsan mattis turpis. Quisque sem. Quisque elementum sapien
+/// iaculis augue. In dui sem, congue sit amet, feugiat quis, lobortis at, eros.
+fn func4() {}
diff --git a/src/tools/rustfmt/tests/source/issue-3066.rs b/src/tools/rustfmt/tests/source/issue-3066.rs
new file mode 100644
index 000000000..4d1ece43d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3066.rs
@@ -0,0 +1,7 @@
+// rustfmt-indent_style: Visual
+fn main() {
+ Struct { field: aaaaaaaaaaa };
+ Struct { field: aaaaaaaaaaaa, };
+ Struct { field: value,
+ field2: value2, };
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3131.rs b/src/tools/rustfmt/tests/source/issue-3131.rs
new file mode 100644
index 000000000..c4cb2d8c0
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3131.rs
@@ -0,0 +1,8 @@
+fn main() {
+ match 3 {
+ t if match t {
+ _ => true,
+ } => {},
+ _ => {}
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3153.rs b/src/tools/rustfmt/tests/source/issue-3153.rs
new file mode 100644
index 000000000..2836ce97c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3153.rs
@@ -0,0 +1,9 @@
+// rustfmt-wrap_comments: true
+
+/// This may panic if:
+/// - there are fewer than `max_header_bytes` bytes preceding the body
+/// - there are fewer than `max_footer_bytes` bytes following the body
+/// - the sum of the body bytes and post-body bytes is less than the sum
+/// of `min_body_and_padding_bytes` and `max_footer_bytes` (in other
+/// words, the minimum body and padding byte requirement is not met)
+fn foo() {}
diff --git a/src/tools/rustfmt/tests/source/issue-3158.rs b/src/tools/rustfmt/tests/source/issue-3158.rs
new file mode 100644
index 000000000..315073db6
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3158.rs
@@ -0,0 +1,74 @@
+// rustfmt-format_code_in_doc_comments: true
+
+/// Should format
+/// ```rust
+/// assert!( false );
+/// ```
+///
+/// Should format
+/// ```rust,should_panic
+/// assert!( false );
+/// ```
+///
+/// Should format
+/// ```rust,should_panic,edition2018
+/// assert!( false );
+/// ```
+///
+/// Should format
+/// ```rust , should_panic , edition2018
+/// assert!( false );
+/// ```
+///
+/// Should not format
+/// ```ignore
+/// assert!( false );
+/// ```
+///
+/// Should not format (not all are rust)
+/// ```rust,ignore
+/// assert!( false );
+/// ```
+///
+/// Should not format (rust compile_fail)
+/// ```compile_fail
+/// assert!( false );
+/// ```
+///
+/// Should not format (rust compile_fail)
+/// ```rust,compile_fail
+/// assert!( false );
+/// ```
+///
+/// Various unspecified ones that should format
+/// ```
+/// assert!( false );
+/// ```
+///
+/// ```,
+/// assert!( false );
+/// ```
+///
+/// ```,,,,,
+/// assert!( false );
+/// ```
+///
+/// ```,,, rust ,,
+/// assert!( false );
+/// ```
+///
+/// Should not format
+/// ```,,, rust , ignore,
+/// assert!( false );
+/// ```
+///
+/// Few empty ones
+/// ```
+/// ```
+///
+/// ```rust
+/// ```
+///
+/// ```ignore
+/// ```
+fn foo() {}
diff --git a/src/tools/rustfmt/tests/source/issue-3194.rs b/src/tools/rustfmt/tests/source/issue-3194.rs
new file mode 100644
index 000000000..b80ce346b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3194.rs
@@ -0,0 +1,13 @@
+mod m { struct S where A: B; }
+
+mod n { struct Foo where A: B { foo: usize } }
+
+mod o { enum Bar where A: B { Bar } }
+
+mod with_comments {
+ mod m { struct S /* before where */ where A: B; /* after where */ }
+
+ mod n { struct Foo /* before where */ where A: B /* after where */ { foo: usize } }
+
+ mod o { enum Bar /* before where */ where A: B /* after where */ { Bar } }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3198.rs b/src/tools/rustfmt/tests/source/issue-3198.rs
new file mode 100644
index 000000000..48cb24a00
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3198.rs
@@ -0,0 +1,99 @@
+impl TestTrait {
+ fn foo_one_pre(/* Important comment1 */
+ self) {
+ }
+
+ fn foo_one_post(self
+ /* Important comment1 */) {
+ }
+
+ fn foo_pre(
+ /* Important comment1 */
+ self,
+ /* Important comment2 */
+ a: i32,
+ ) {
+ }
+
+ fn foo_post(
+ self
+ /* Important comment1 */,
+ a: i32
+ /* Important comment2 */,
+ ) {
+ }
+
+ fn bar_pre(
+ /* Important comment1 */
+ &mut self,
+ /* Important comment2 */
+ a: i32,
+ ) {
+ }
+
+ fn bar_post(
+ &mut self
+ /* Important comment1 */,
+ a: i32
+ /* Important comment2 */,
+ ) {
+ }
+
+ fn baz_pre(
+ /* Important comment1 */
+ self: X< 'a , 'b >,
+ /* Important comment2 */
+ a: i32,
+ ) {
+ }
+
+ fn baz_post(
+ self: X< 'a , 'b >
+ /* Important comment1 */,
+ a: i32
+ /* Important comment2 */,
+ ) {
+ }
+
+ fn baz_tree_pre(
+ /* Important comment1 */
+ self: X< 'a , 'b >,
+ /* Important comment2 */
+ a: i32,
+ /* Important comment3 */
+ b: i32,
+ ) {
+ }
+
+ fn baz_tree_post(
+ self: X< 'a , 'b >
+ /* Important comment1 */,
+ a: i32
+ /* Important comment2 */,
+ b: i32
+ /* Important comment3 */,){
+ }
+
+ fn multi_line(
+ self: X<'a, 'b>, /* Important comment1-1 */
+ /* Important comment1-2 */
+ a: i32, /* Important comment2 */
+ b: i32, /* Important comment3 */
+ ) {
+ }
+
+ fn two_line_comment(
+ self: X<'a, 'b>, /* Important comment1-1
+ Important comment1-2 */
+ a: i32, /* Important comment2 */
+ b: i32, /* Important comment3 */
+ ) {
+ }
+
+ fn no_first_line_comment(
+ self: X<'a, 'b>,
+ /* Important comment2 */a: i32,
+ /* Important comment3 */b: i32,
+ ) {
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3213/version_one.rs b/src/tools/rustfmt/tests/source/issue-3213/version_one.rs
new file mode 100644
index 000000000..f9f4cab55
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3213/version_one.rs
@@ -0,0 +1,9 @@
+// rustfmt-version: One
+
+fn foo() {
+ match 0 {
+ 0 => return AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+ 1 => AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+ _ => "",
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3213/version_two.rs b/src/tools/rustfmt/tests/source/issue-3213/version_two.rs
new file mode 100644
index 000000000..0f068c19d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3213/version_two.rs
@@ -0,0 +1,9 @@
+// rustfmt-version: Two
+
+fn foo() {
+ match 0 {
+ 0 => return AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+ 1 => AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+ _ => "",
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3217.rs b/src/tools/rustfmt/tests/source/issue-3217.rs
new file mode 100644
index 000000000..176c70200
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3217.rs
@@ -0,0 +1,8 @@
+#![feature(label_break_value)]
+
+fn main() {
+ let mut res = 0;
+ 's_39: { if res == 0i32 { println!("Hello, world!"); } }
+ 's_40: loop { println!("res = {}", res); res += 1; if res == 3i32 { break 's_40; } }
+ let toto = || { if true { 42 } else { 24 } };
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3227/two.rs b/src/tools/rustfmt/tests/source/issue-3227/two.rs
new file mode 100644
index 000000000..c1572c00d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3227/two.rs
@@ -0,0 +1,13 @@
+// rustfmt-version: Two
+
+fn main() {
+ thread::spawn(|| {
+ while true {
+ println!("iteration");
+ }
+ });
+
+ thread::spawn(|| loop {
+ println!("iteration");
+ });
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3234.rs b/src/tools/rustfmt/tests/source/issue-3234.rs
new file mode 100644
index 000000000..120740a72
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3234.rs
@@ -0,0 +1,14 @@
+macro_rules! fuzz_target {
+ (|$data:ident: &[u8]| $body:block) => {};
+}
+
+fuzz_target!(|data: &[u8]| {
+
+ if let Ok(app_img) = AppImage::parse(data) {
+ if let Ok(app_img) = app_img.sign_for_secureboot(include_str!("../../test-data/signing-key")) {
+ assert!(app_img.is_signed());
+ Gbl::from_app_image(app_img).to_bytes();
+ }
+ }
+
+});
diff --git a/src/tools/rustfmt/tests/source/issue-3241.rs b/src/tools/rustfmt/tests/source/issue-3241.rs
new file mode 100644
index 000000000..090284a21
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3241.rs
@@ -0,0 +1,11 @@
+// rustfmt-edition: 2018
+
+use ::ignore;
+use ::ignore::some::more;
+use ::{foo, bar};
+use ::*;
+use ::baz::{foo, bar};
+
+fn main() {
+ println!("Hello, world!");
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3253/bar.rs b/src/tools/rustfmt/tests/source/issue-3253/bar.rs
new file mode 100644
index 000000000..eaeffd3ad
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3253/bar.rs
@@ -0,0 +1,4 @@
+// Empty
+ fn empty() {
+
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3253/foo.rs b/src/tools/rustfmt/tests/source/issue-3253/foo.rs
new file mode 100644
index 000000000..4ebe5326b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3253/foo.rs
@@ -0,0 +1,6 @@
+pub fn hello( )
+ {
+println!("Hello World!");
+
+ }
+
diff --git a/src/tools/rustfmt/tests/source/issue-3253/lib.rs b/src/tools/rustfmt/tests/source/issue-3253/lib.rs
new file mode 100644
index 000000000..3eef586bd
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3253/lib.rs
@@ -0,0 +1,14 @@
+#[macro_use]
+extern crate cfg_if;
+
+cfg_if! {
+ if #[cfg(target_family = "unix")] {
+ mod foo;
+ #[path = "paths/bar_foo.rs"]
+ mod bar_foo;
+ } else {
+ mod bar;
+ #[path = "paths/foo_bar.rs"]
+ mod foo_bar;
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3253/paths/bar_foo.rs b/src/tools/rustfmt/tests/source/issue-3253/paths/bar_foo.rs
new file mode 100644
index 000000000..da19f9dfa
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3253/paths/bar_foo.rs
@@ -0,0 +1,3 @@
+fn foo_decl_item(x: &mut i32) {
+ x = 3;
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3253/paths/excluded.rs b/src/tools/rustfmt/tests/source/issue-3253/paths/excluded.rs
new file mode 100644
index 000000000..5c63eb832
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3253/paths/excluded.rs
@@ -0,0 +1,6 @@
+// This module is not imported in the cfg_if macro in lib.rs so it is ignored
+// while the foo and bar mods are formatted.
+// Check the corresponding file in tests/target/issue-3253/paths/excluded.rs
+trait CoolerTypes { fn dummy(&self) {
+}
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3253/paths/foo_bar.rs b/src/tools/rustfmt/tests/source/issue-3253/paths/foo_bar.rs
new file mode 100644
index 000000000..fbb5d92c6
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3253/paths/foo_bar.rs
@@ -0,0 +1,4 @@
+
+
+fn Foo<T>() where T: Bar {
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3265.rs b/src/tools/rustfmt/tests/source/issue-3265.rs
new file mode 100644
index 000000000..e927cf2be
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3265.rs
@@ -0,0 +1,14 @@
+// rustfmt-newline_style: Windows
+#[cfg(test)]
+mod test {
+ summary_test! {
+ tokenize_recipe_interpolation_eol,
+ "foo: # some comment
+ {{hello}}
+",
+ "foo: \
+ {{hello}} \
+{{ahah}}",
+ "N:#$>^{N}$<.",
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3270/one.rs b/src/tools/rustfmt/tests/source/issue-3270/one.rs
new file mode 100644
index 000000000..3c2e27e22
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3270/one.rs
@@ -0,0 +1,12 @@
+// rustfmt-version: One
+
+pub fn main() {
+ /* let s = String::from(
+ "
+hello
+world
+",
+ ); */
+
+ assert_eq!(s, "\nhello\nworld\n");
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3270/two.rs b/src/tools/rustfmt/tests/source/issue-3270/two.rs
new file mode 100644
index 000000000..0eb756471
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3270/two.rs
@@ -0,0 +1,12 @@
+// rustfmt-version: Two
+
+pub fn main() {
+ /* let s = String::from(
+ "
+hello
+world
+",
+ ); */
+
+ assert_eq!(s, "\nhello\nworld\n");
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3272/v1.rs b/src/tools/rustfmt/tests/source/issue-3272/v1.rs
new file mode 100644
index 000000000..f4c1b7c99
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3272/v1.rs
@@ -0,0 +1,15 @@
+// rustfmt-version: One
+
+fn main() {
+ assert!(HAYSTACK
+ .par_iter()
+ .find_any(|&&x| x[0] % 1000 == 999)
+ .is_some());
+
+ assert(
+ HAYSTACK
+ .par_iter()
+ .find_any(|&&x| x[0] % 1000 == 999)
+ .is_some(),
+ );
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3272/v2.rs b/src/tools/rustfmt/tests/source/issue-3272/v2.rs
new file mode 100644
index 000000000..0148368ed
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3272/v2.rs
@@ -0,0 +1,15 @@
+// rustfmt-version: Two
+
+fn main() {
+ assert!(HAYSTACK
+ .par_iter()
+ .find_any(|&&x| x[0] % 1000 == 999)
+ .is_some());
+
+ assert(
+ HAYSTACK
+ .par_iter()
+ .find_any(|&&x| x[0] % 1000 == 999)
+ .is_some(),
+ );
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3278/version_one.rs b/src/tools/rustfmt/tests/source/issue-3278/version_one.rs
new file mode 100644
index 000000000..580679fba
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3278/version_one.rs
@@ -0,0 +1,8 @@
+// rustfmt-version: One
+
+pub fn parse_conditional<'a, I: 'a>(
+) -> impl Parser<Input = I, Output = Expr, PartialState = ()> + 'a
+where
+ I: Stream<Item = char>,
+{
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3278/version_two.rs b/src/tools/rustfmt/tests/source/issue-3278/version_two.rs
new file mode 100644
index 000000000..c17b1742d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3278/version_two.rs
@@ -0,0 +1,8 @@
+// rustfmt-version: Two
+
+pub fn parse_conditional<'a, I: 'a>()
+-> impl Parser<Input = I, Output = Expr, PartialState = ()> + 'a
+where
+ I: Stream<Item = char>,
+{
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3295/two.rs b/src/tools/rustfmt/tests/source/issue-3295/two.rs
new file mode 100644
index 000000000..0eaf02224
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3295/two.rs
@@ -0,0 +1,13 @@
+// rustfmt-version: Two
+pub enum TestEnum {
+ a,
+ b,
+}
+
+fn the_test(input: TestEnum) {
+ match input {
+ TestEnum::a => String::from("aaa"),
+ TestEnum::b => String::from("this is a very very very very very very very very very very very very very very very ong string"),
+
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3302.rs b/src/tools/rustfmt/tests/source/issue-3302.rs
new file mode 100644
index 000000000..c037584fd
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3302.rs
@@ -0,0 +1,43 @@
+// rustfmt-version: Two
+
+macro_rules! moo1 {
+ () => {
+ bar! {
+"
+"
+ }
+ };
+}
+
+macro_rules! moo2 {
+ () => {
+ bar! {
+ "
+"
+ }
+ };
+}
+
+macro_rules! moo3 {
+ () => {
+ 42
+ /*
+ bar! {
+ "
+ toto
+tata"
+ }
+ */
+ };
+}
+
+macro_rules! moo4 {
+ () => {
+ bar! {
+"
+ foo
+ bar
+baz"
+ }
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3343.rs b/src/tools/rustfmt/tests/source/issue-3343.rs
new file mode 100644
index 000000000..5670b04f5
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3343.rs
@@ -0,0 +1,47 @@
+// rustfmt-inline_attribute_width: 50
+
+#[cfg(feature = "alloc")]
+use core::slice;
+
+#[cfg(feature = "alloc")]
+use total_len_is::_50__;
+
+#[cfg(feature = "alloc")]
+use total_len_is::_51___;
+
+#[cfg(feature = "alloc")]
+extern crate len_is_50_;
+
+#[cfg(feature = "alloc")]
+extern crate len_is_51__;
+
+/// this is a comment to test is_sugared_doc property
+use core::convert;
+
+#[fooooo]
+#[barrrrr]
+use total_len_is_::_51______;
+
+#[cfg(not(all(
+ feature = "std",
+ any(
+ target_os = "linux",
+ target_os = "android",
+ target_os = "netbsd",
+ target_os = "dragonfly",
+ target_os = "haiku",
+ target_os = "emscripten",
+ target_os = "solaris",
+ target_os = "cloudabi",
+ target_os = "macos",
+ target_os = "ios",
+ target_os = "freebsd",
+ target_os = "openbsd",
+ target_os = "redox",
+ target_os = "fuchsia",
+ windows,
+ all(target_arch = "wasm32", feature = "stdweb"),
+ all(target_arch = "wasm32", feature = "wasm-bindgen"),
+ )
+)))]
+use core::slice;
diff --git a/src/tools/rustfmt/tests/source/issue-3423.rs b/src/tools/rustfmt/tests/source/issue-3423.rs
new file mode 100644
index 000000000..fbe8e5c37
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3423.rs
@@ -0,0 +1,5 @@
+/* a nice comment with a trailing whitespace */
+fn foo() {}
+
+/* a nice comment with a trailing tab */
+fn bar() {}
diff --git a/src/tools/rustfmt/tests/source/issue-3434/lib.rs b/src/tools/rustfmt/tests/source/issue-3434/lib.rs
new file mode 100644
index 000000000..7e396b383
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3434/lib.rs
@@ -0,0 +1,57 @@
+#![rustfmt::skip::macros(skip_macro_mod)]
+
+mod no_entry;
+
+#[rustfmt::skip::macros(html, skip_macro)]
+fn main() {
+ let macro_result1 = html! { <div>
+this should be skipped</div>
+ }
+ .to_string();
+
+ let macro_result2 = not_skip_macro! { <div>
+this should be mangled</div>
+ }
+ .to_string();
+
+ skip_macro! {
+this should be skipped
+};
+
+ foo();
+}
+
+fn foo() {
+ let macro_result1 = html! { <div>
+this should be mangled</div>
+ }
+ .to_string();
+}
+
+fn bar() {
+ let macro_result1 = skip_macro_mod! { <div>
+this should be skipped</div>
+ }
+ .to_string();
+}
+
+fn visitor_made_from_same_context() {
+ let pair = (
+ || {
+ foo!(<div>
+this should be mangled</div>
+ );
+ skip_macro_mod!(<div>
+this should be skipped</div>
+ );
+ },
+ || {
+ foo!(<div>
+this should be mangled</div>
+ );
+ skip_macro_mod!(<div>
+this should be skipped</div>
+ );
+ },
+ );
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3434/no_entry.rs b/src/tools/rustfmt/tests/source/issue-3434/no_entry.rs
new file mode 100644
index 000000000..0838829fe
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3434/no_entry.rs
@@ -0,0 +1,18 @@
+#[rustfmt::skip::macros(another_macro)]
+fn foo() {
+ another_macro!(
+This should be skipped.
+ );
+}
+
+fn bar() {
+ skip_macro_mod!(
+This should be skipped.
+ );
+}
+
+fn baz() {
+ let macro_result1 = no_skip_macro! { <div>
+this should be mangled</div>
+ }.to_string();
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3434/not_skip_macro.rs b/src/tools/rustfmt/tests/source/issue-3434/not_skip_macro.rs
new file mode 100644
index 000000000..1d7d73c52
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3434/not_skip_macro.rs
@@ -0,0 +1,8 @@
+#[this::is::not::skip::macros(ouch)]
+
+fn main() {
+ let macro_result1 = ouch! { <div>
+this should be mangled</div>
+ }
+ .to_string();
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3465.rs b/src/tools/rustfmt/tests/source/issue-3465.rs
new file mode 100644
index 000000000..0bc95ad46
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3465.rs
@@ -0,0 +1,42 @@
+fn main() {
+ ((((((((((((((((((((((((((((((((((((((((((0) + 1) + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1);
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3494/crlf.rs b/src/tools/rustfmt/tests/source/issue-3494/crlf.rs
new file mode 100644
index 000000000..9ce457c7b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3494/crlf.rs
@@ -0,0 +1,8 @@
+// rustfmt-file_lines: [{"file":"tests/source/issue-3494/crlf.rs","range":[4,5]}]
+
+pub fn main()
+{
+let world1 = "world"; println!("Hello, {}!", world1);
+let world2 = "world"; println!("Hello, {}!", world2);
+let world3 = "world"; println!("Hello, {}!", world3);
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3494/lf.rs b/src/tools/rustfmt/tests/source/issue-3494/lf.rs
new file mode 100644
index 000000000..bdbe69cef
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3494/lf.rs
@@ -0,0 +1,8 @@
+// rustfmt-file_lines: [{"file":"tests/source/issue-3494/lf.rs","range":[4,5]}]
+
+pub fn main()
+{
+let world1 = "world"; println!("Hello, {}!", world1);
+let world2 = "world"; println!("Hello, {}!", world2);
+let world3 = "world"; println!("Hello, {}!", world3);
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3508.rs b/src/tools/rustfmt/tests/source/issue-3508.rs
new file mode 100644
index 000000000..821e947c7
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3508.rs
@@ -0,0 +1,29 @@
+fn foo<F>(foo2: F)
+where
+ F: Fn(
+ // this comment is deleted
+),
+{
+}
+
+fn foo_block<F>(foo2: F)
+where
+ F: Fn(
+ /* this comment is deleted */
+ ),
+{
+}
+
+fn bar(
+ bar2: impl Fn(
+ // this comment is deleted
+ ),
+) {
+}
+
+fn bar_block(
+ bar2: impl Fn(
+ /* this comment is deleted */
+ ),
+) {
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3515.rs b/src/tools/rustfmt/tests/source/issue-3515.rs
new file mode 100644
index 000000000..9f760cb94
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3515.rs
@@ -0,0 +1,6 @@
+// rustfmt-reorder_imports: false
+
+use std :: fmt :: { self , Display } ;
+use std :: collections :: HashMap ;
+
+fn main() {}
diff --git a/src/tools/rustfmt/tests/source/issue-3532.rs b/src/tools/rustfmt/tests/source/issue-3532.rs
new file mode 100644
index 000000000..ec0c01610
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3532.rs
@@ -0,0 +1,7 @@
+fn foo(a: T) {
+ match a {
+1 => {}
+ 0 => {}
+ // _ => panic!("doesn't format!"),
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3585/extern_crate.rs b/src/tools/rustfmt/tests/source/issue-3585/extern_crate.rs
new file mode 100644
index 000000000..6716983ba
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3585/extern_crate.rs
@@ -0,0 +1,12 @@
+// rustfmt-inline_attribute_width: 100
+
+#[macro_use]
+extern crate static_assertions;
+
+#[cfg(unix)]
+extern crate static_assertions;
+
+// a comment before the attribute
+#[macro_use]
+// some comment after
+extern crate static_assertions;
diff --git a/src/tools/rustfmt/tests/source/issue-3585/reorder_imports_disabled.rs b/src/tools/rustfmt/tests/source/issue-3585/reorder_imports_disabled.rs
new file mode 100644
index 000000000..45b1bb9fd
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3585/reorder_imports_disabled.rs
@@ -0,0 +1,12 @@
+// rustfmt-inline_attribute_width: 100
+// rustfmt-reorder_imports: false
+
+#[cfg(unix)]
+extern crate crateb;
+#[cfg(unix)]
+extern crate cratea;
+
+#[cfg(unix)]
+use crateb;
+#[cfg(unix)]
+use cratea;
diff --git a/src/tools/rustfmt/tests/source/issue-3585/reorder_imports_enabled.rs b/src/tools/rustfmt/tests/source/issue-3585/reorder_imports_enabled.rs
new file mode 100644
index 000000000..9f433e5ca
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3585/reorder_imports_enabled.rs
@@ -0,0 +1,12 @@
+// rustfmt-inline_attribute_width: 100
+// rustfmt-reorder_imports: true
+
+#[cfg(unix)]
+extern crate crateb;
+#[cfg(unix)]
+extern crate cratea;
+
+#[cfg(unix)]
+use crateb;
+#[cfg(unix)]
+use cratea;
diff --git a/src/tools/rustfmt/tests/source/issue-3585/use.rs b/src/tools/rustfmt/tests/source/issue-3585/use.rs
new file mode 100644
index 000000000..e71ba9008
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3585/use.rs
@@ -0,0 +1,7 @@
+// rustfmt-inline_attribute_width: 100
+
+#[macro_use]
+use static_assertions;
+
+#[cfg(unix)]
+use static_assertions;
diff --git a/src/tools/rustfmt/tests/source/issue-3636.rs b/src/tools/rustfmt/tests/source/issue-3636.rs
new file mode 100644
index 000000000..edfa03012
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3636.rs
@@ -0,0 +1,10 @@
+// rustfmt-file_lines: [{"file":"tests/source/issue-3636.rs","range":[4,7]},{"file":"tests/target/issue-3636.rs","range":[3,6]}]
+
+fn foo() {
+ let x =
+ 42;
+ let y =
+ 42;
+ let z = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
+ let z = "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb";
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3639.rs b/src/tools/rustfmt/tests/source/issue-3639.rs
new file mode 100644
index 000000000..7b16b2dfd
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3639.rs
@@ -0,0 +1,5 @@
+trait Foo where {}
+struct Bar where {}
+struct Bax where;
+struct Baz(String) where;
+impl<> Foo<> for Bar<> where {}
diff --git a/src/tools/rustfmt/tests/source/issue-3651.rs b/src/tools/rustfmt/tests/source/issue-3651.rs
new file mode 100644
index 000000000..c153e99d0
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3651.rs
@@ -0,0 +1,4 @@
+fn f() -> Box<
+ dyn FnMut() -> Thing< WithType = LongItemName, Error = LONGLONGLONGLONGLONGONGEvenLongerErrorNameLongerLonger>,
+>{
+} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/issue-3665/lib.rs b/src/tools/rustfmt/tests/source/issue-3665/lib.rs
new file mode 100644
index 000000000..e049fbc56
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3665/lib.rs
@@ -0,0 +1,33 @@
+#![rustfmt::skip::attributes(skip_mod_attr)]
+
+mod sub_mod;
+
+#[rustfmt::skip::attributes(other, skip_attr)]
+fn main() {
+ #[other(should,
+skip,
+ this, format)]
+ struct S {}
+
+ #[skip_attr(should, skip,
+this, format,too)]
+ fn doesnt_mater() {}
+
+ #[skip_mod_attr(should, skip,
+this, format,
+ enerywhere)]
+ fn more() {}
+
+ #[not_skip(not,
+skip, me)]
+ struct B {}
+}
+
+#[other(should, not, skip,
+this, format, here)]
+fn foo() {}
+
+#[skip_mod_attr(should, skip,
+this, format,in, master,
+ and, sub, module)]
+fn bar() {}
diff --git a/src/tools/rustfmt/tests/source/issue-3665/not_skip_attribute.rs b/src/tools/rustfmt/tests/source/issue-3665/not_skip_attribute.rs
new file mode 100644
index 000000000..14985259a
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3665/not_skip_attribute.rs
@@ -0,0 +1,4 @@
+#![this::is::not::skip::attribute(ouch)]
+
+#[ouch(not, skip, me)]
+fn main() {}
diff --git a/src/tools/rustfmt/tests/source/issue-3665/sub_mod.rs b/src/tools/rustfmt/tests/source/issue-3665/sub_mod.rs
new file mode 100644
index 000000000..75fb24b4a
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3665/sub_mod.rs
@@ -0,0 +1,14 @@
+#[rustfmt::skip::attributes(more_skip)]
+#[more_skip(should,
+ skip,
+this, format)]
+fn foo() {}
+
+#[skip_mod_attr(should, skip,
+this, format,in, master,
+ and, sub, module)]
+fn bar() {}
+
+#[skip_attr(should, not,
+ skip, this, attribute, here)]
+fn baz() {}
diff --git a/src/tools/rustfmt/tests/source/issue-3672.rs b/src/tools/rustfmt/tests/source/issue-3672.rs
new file mode 100644
index 000000000..82616bd42
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3672.rs
@@ -0,0 +1,4 @@
+fn main() {
+ let x = 5;;
+
+} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/issue-3675.rs b/src/tools/rustfmt/tests/source/issue-3675.rs
new file mode 100644
index 000000000..f16efb2dc
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3675.rs
@@ -0,0 +1,5 @@
+ fn main() {
+ println!("{}"
+ // comment
+ , 111);
+ } \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/issue-3701/one.rs b/src/tools/rustfmt/tests/source/issue-3701/one.rs
new file mode 100644
index 000000000..a7f0bd3aa
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3701/one.rs
@@ -0,0 +1,12 @@
+// rustfmt-version: One
+
+fn build_sorted_static_get_entry_names(
+ mut entries: Vec<(u8, &'static str)>,
+) -> (impl Fn(
+ AlphabeticalTraversal,
+ Box<dyn dirents_sink::Sink<AlphabeticalTraversal>>,
+) -> BoxFuture<'static, Result<Box<dyn dirents_sink::Sealed>, Status>>
+ + Send
+ + Sync
+ + 'static) {
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3701/two.rs b/src/tools/rustfmt/tests/source/issue-3701/two.rs
new file mode 100644
index 000000000..8e15c58b8
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3701/two.rs
@@ -0,0 +1,12 @@
+// rustfmt-version: Two
+
+fn build_sorted_static_get_entry_names(
+ mut entries: Vec<(u8, &'static str)>,
+) -> (impl Fn(
+ AlphabeticalTraversal,
+ Box<dyn dirents_sink::Sink<AlphabeticalTraversal>>,
+) -> BoxFuture<'static, Result<Box<dyn dirents_sink::Sealed>, Status>>
+ + Send
+ + Sync
+ + 'static) {
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3709.rs b/src/tools/rustfmt/tests/source/issue-3709.rs
new file mode 100644
index 000000000..73c2a624e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3709.rs
@@ -0,0 +1,10 @@
+// rustfmt-edition: 2018
+
+macro_rules! token {
+ ($t:tt) => {};
+}
+
+fn main() {
+ token!(dyn);
+ token!(dyn );
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3740.rs b/src/tools/rustfmt/tests/source/issue-3740.rs
new file mode 100644
index 000000000..2769a8cc9
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3740.rs
@@ -0,0 +1,10 @@
+impl<T, const SIZE: usize> IntoNormalized for Vector<T, { SIZE }>
+ where
+ Vector<T, { SIZE }>: Div<Vector<T, { SIZE }>>,
+ for<'a> &'a Vector<T, { SIZE }>: IntoLength<Output = T>,
+{
+ type Output = Vector<T, { SIZE }>;
+ fn into_normalized(self) -> Self::Output {
+
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3750.rs b/src/tools/rustfmt/tests/source/issue-3750.rs
new file mode 100644
index 000000000..1189a99d2
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3750.rs
@@ -0,0 +1,16 @@
+// rustfmt-imports_granularity: Crate
+
+pub mod foo {
+ pub mod bar {
+ pub struct Bar;
+ }
+
+ pub fn bar() {}
+}
+
+use foo::bar;
+use foo::bar::Bar;
+
+fn main() {
+ bar();
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3751.rs b/src/tools/rustfmt/tests/source/issue-3751.rs
new file mode 100644
index 000000000..1343f80e6
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3751.rs
@@ -0,0 +1,10 @@
+// rustfmt-format_code_in_doc_comments: true
+
+//! Empty pound line
+//!
+//! ```rust
+//! #
+//! # fn main() {
+//! foo ( ) ;
+//! # }
+//! ```
diff --git a/src/tools/rustfmt/tests/source/issue-3779/ice.rs b/src/tools/rustfmt/tests/source/issue-3779/ice.rs
new file mode 100644
index 000000000..cde21412d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3779/ice.rs
@@ -0,0 +1,3 @@
+pub fn bar() {
+ 1x;
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3779/lib.rs b/src/tools/rustfmt/tests/source/issue-3779/lib.rs
new file mode 100644
index 000000000..16e9d4833
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3779/lib.rs
@@ -0,0 +1,9 @@
+// rustfmt-unstable: true
+// rustfmt-config: issue-3779.toml
+
+#[path = "ice.rs"]
+mod ice;
+
+fn foo() {
+println!("abc") ;
+ }
diff --git a/src/tools/rustfmt/tests/source/issue-3786.rs b/src/tools/rustfmt/tests/source/issue-3786.rs
new file mode 100644
index 000000000..54f8211ed
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3786.rs
@@ -0,0 +1,12 @@
+fn main() {
+ let _ =
+r#"
+this is a very long string exceeded maximum width in this case maximum 100. (current this line width is about 115)
+"#;
+
+ let _with_newline =
+
+r#"
+this is a very long string exceeded maximum width in this case maximum 100. (current this line width is about 115)
+"#;
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3787.rs b/src/tools/rustfmt/tests/source/issue-3787.rs
new file mode 100644
index 000000000..bcdc131a0
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3787.rs
@@ -0,0 +1,9 @@
+// rustfmt-wrap_comments: true
+
+//! URLs in items
+//! * [This is a link with a very loooooooooooooooooooooooooooooooooooooooooong URL.](https://example.com/This/is/a/link/with/a/very/loooooooooooooooooooooooooooooooooooooooooong/URL)
+//! * This is a [link](https://example.com/This/is/a/link/with/a/very/loooooooooooooooooooooooooooooooooooooooooong/URL) with a very loooooooooooooooooooooooooooooooooooooooooong URL.
+//! * there is no link here: In hac habitasse platea dictumst. Maecenas in ligula. Duis tincidunt odio sollicitudin quam. Nullam non mauris. Phasellus lacinia, velit sit amet bibendum euismod, leo diam interdum ligula, eu scelerisque sem purus in tellus.
+fn main() {
+ println!("Hello, world!");
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3840/version-one_hard-tabs.rs b/src/tools/rustfmt/tests/source/issue-3840/version-one_hard-tabs.rs
new file mode 100644
index 000000000..bf7ea7da0
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3840/version-one_hard-tabs.rs
@@ -0,0 +1,15 @@
+// rustfmt-hard_tabs: true
+
+impl<Target: FromEvent<A> + FromEvent<B>, A: Widget2<Ctx = C>, B: Widget2<Ctx = C>, C: for<'a> CtxFamily<'a>> Widget2 for WidgetEventLifter<Target, A, B>
+{
+ type Ctx = C;
+ type Event = Vec<Target>;
+}
+
+mod foo {
+ impl<Target: FromEvent<A> + FromEvent<B>, A: Widget2<Ctx = C>, B: Widget2<Ctx = C>, C: for<'a> CtxFamily<'a>> Widget2 for WidgetEventLifter<Target, A, B>
+ {
+ type Ctx = C;
+ type Event = Vec<Target>;
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3840/version-one_soft-tabs.rs b/src/tools/rustfmt/tests/source/issue-3840/version-one_soft-tabs.rs
new file mode 100644
index 000000000..3fc26224d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3840/version-one_soft-tabs.rs
@@ -0,0 +1,13 @@
+impl<Target: FromEvent<A> + FromEvent<B>, A: Widget2<Ctx = C>, B: Widget2<Ctx = C>, C: for<'a> CtxFamily<'a>> Widget2 for WidgetEventLifter<Target, A, B>
+{
+ type Ctx = C;
+ type Event = Vec<Target>;
+}
+
+mod foo {
+ impl<Target: FromEvent<A> + FromEvent<B>, A: Widget2<Ctx = C>, B: Widget2<Ctx = C>, C: for<'a> CtxFamily<'a>> Widget2 for WidgetEventLifter<Target, A, B>
+ {
+ type Ctx = C;
+ type Event = Vec<Target>;
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3840/version-two_hard-tabs.rs b/src/tools/rustfmt/tests/source/issue-3840/version-two_hard-tabs.rs
new file mode 100644
index 000000000..7b505fda8
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3840/version-two_hard-tabs.rs
@@ -0,0 +1,16 @@
+// rustfmt-hard_tabs: true
+// rustfmt-version: Two
+
+impl<Target: FromEvent<A> + FromEvent<B>, A: Widget2<Ctx = C>, B: Widget2<Ctx = C>, C: for<'a> CtxFamily<'a>> Widget2 for WidgetEventLifter<Target, A, B>
+{
+ type Ctx = C;
+ type Event = Vec<Target>;
+}
+
+mod foo {
+ impl<Target: FromEvent<A> + FromEvent<B>, A: Widget2<Ctx = C>, B: Widget2<Ctx = C>, C: for<'a> CtxFamily<'a>> Widget2 for WidgetEventLifter<Target, A, B>
+ {
+ type Ctx = C;
+ type Event = Vec<Target>;
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-3840/version-two_soft-tabs.rs b/src/tools/rustfmt/tests/source/issue-3840/version-two_soft-tabs.rs
new file mode 100644
index 000000000..39c8ef312
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-3840/version-two_soft-tabs.rs
@@ -0,0 +1,15 @@
+// rustfmt-version: Two
+
+impl<Target: FromEvent<A> + FromEvent<B>, A: Widget2<Ctx = C>, B: Widget2<Ctx = C>, C: for<'a> CtxFamily<'a>> Widget2 for WidgetEventLifter<Target, A, B>
+{
+ type Ctx = C;
+ type Event = Vec<Target>;
+}
+
+mod foo {
+ impl<Target: FromEvent<A> + FromEvent<B>, A: Widget2<Ctx = C>, B: Widget2<Ctx = C>, C: for<'a> CtxFamily<'a>> Widget2 for WidgetEventLifter<Target, A, B>
+ {
+ type Ctx = C;
+ type Event = Vec<Target>;
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4018.rs b/src/tools/rustfmt/tests/source/issue-4018.rs
new file mode 100644
index 000000000..9a91dd9a3
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4018.rs
@@ -0,0 +1,13 @@
+fn main() {
+ ;
+ /* extra comment */ ;
+}
+
+fn main() {
+ println!("");
+ // comment 1
+ // comment 2
+ // comment 3
+ // comment 4
+ ;
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4036/one.rs b/src/tools/rustfmt/tests/source/issue-4036/one.rs
new file mode 100644
index 000000000..9f9675f51
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4036/one.rs
@@ -0,0 +1,11 @@
+// rustfmt-format_strings: true
+
+macro_rules! test {
+ () => {
+ fn from() {
+ None.expect(
+ "We asserted that `buffer.len()` is exactly `$n` so we can expect `ApInt::from_iter` to be successful.",
+ )
+ }
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4036/three.rs b/src/tools/rustfmt/tests/source/issue-4036/three.rs
new file mode 100644
index 000000000..e1865dd08
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4036/three.rs
@@ -0,0 +1,12 @@
+// rustfmt-format_strings: true
+// rustfmt-hard_tabs: true
+
+macro_rules! test {
+ () => {
+ fn from() {
+ None.expect(
+ "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.",
+ )
+ }
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4036/two.rs b/src/tools/rustfmt/tests/source/issue-4036/two.rs
new file mode 100644
index 000000000..fa54d2e3e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4036/two.rs
@@ -0,0 +1,11 @@
+// rustfmt-format_strings: true
+
+macro_rules! test {
+ () => {
+ fn from() {
+ None.expect(
+ "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.",
+ )
+ }
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4079.rs b/src/tools/rustfmt/tests/source/issue-4079.rs
new file mode 100644
index 000000000..eb1ce5ed2
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4079.rs
@@ -0,0 +1,8 @@
+// rustfmt-wrap_comments: true
+
+/*!
+ * Lorem ipsum dolor sit amet, consectetur adipiscing elit. In lacinia
+ * ullamcorper lorem, non hendrerit enim convallis ut. Curabitur id sem volutpat
+ */
+
+/*! Lorem ipsum dolor sit amet, consectetur adipiscing elit. In lacinia ullamcorper lorem, non hendrerit enim convallis ut. Curabitur id sem volutpat */
diff --git a/src/tools/rustfmt/tests/source/issue-4120.rs b/src/tools/rustfmt/tests/source/issue-4120.rs
new file mode 100644
index 000000000..c9ce838c5
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4120.rs
@@ -0,0 +1,85 @@
+fn main() {
+ let x = if true {
+ 1
+ // In if
+ } else {
+ 0
+ // In else
+ };
+
+ let x = if true {
+ 1
+ /* In if */
+ } else {
+ 0
+ /* In else */
+ };
+
+ let z = if true {
+ if true {
+ 1
+
+ // In if level 2
+ } else {
+ 2
+ }
+ } else {
+ 3
+ };
+
+ let a = if true {
+ 1
+ // In if
+ } else {
+ 0
+ // In else
+ };
+
+ let a = if true {
+ 1
+
+ // In if
+ } else {
+ 0
+ // In else
+ };
+
+ let b = if true {
+ 1
+
+ // In if
+ } else {
+ 0
+ // In else
+ };
+
+ let c = if true {
+ 1
+
+ // In if
+ } else {
+ 0
+ // In else
+ };
+ for i in 0..2 {
+ println!("Something");
+ // In for
+ }
+
+ for i in 0..2 {
+ println!("Something");
+ /* In for */
+ }
+
+ extern "C" {
+ fn first();
+
+ // In foreign mod
+ }
+
+ extern "C" {
+ fn first();
+
+ /* In foreign mod */
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4243.rs b/src/tools/rustfmt/tests/source/issue-4243.rs
new file mode 100644
index 000000000..d8a27f7a4
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4243.rs
@@ -0,0 +1,21 @@
+fn main() {
+ type A: AA /*AA*/ + /*AB*/ AB
++ AC = AA
+/*AA*/ +
+ /*AB*/ AB+AC;
+
+ type B: BA /*BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA*/+/*BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB*/ BB
+ + BC = BA /*BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA*/ + /*BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB*/ BB+ BC;
+
+ type C: CA // CAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+// CAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ +
+ // CBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
+ // CBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
+ CB + CC = CA // CAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ // CAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ +
+ // CBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
+ // CBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
+ CB+ CC;
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4244.rs b/src/tools/rustfmt/tests/source/issue-4244.rs
new file mode 100644
index 000000000..34b51085e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4244.rs
@@ -0,0 +1,16 @@
+pub struct SS {}
+
+pub type A /* A Comment */ = SS;
+
+pub type B // Comment
+// B
+= SS;
+
+pub type C
+ /* Comment C */ = SS;
+
+pub trait D <T> {
+ type E /* Comment E */ = SS;
+}
+
+type F<'a: 'static, T: Ord + 'static>: Eq + PartialEq where T: 'static + Copy /* x */ = Vec<u8>;
diff --git a/src/tools/rustfmt/tests/source/issue-4245.rs b/src/tools/rustfmt/tests/source/issue-4245.rs
new file mode 100644
index 000000000..57d7e192d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4245.rs
@@ -0,0 +1,26 @@
+
+
+fn a(a: & // Comment
+ // Another comment
+ 'a File) {}
+
+fn b(b: & /* Another Comment */'a File) {}
+
+fn c(c: &'a /*Comment */ mut /*Comment */ File){}
+
+fn d(c: & // Comment
+'b // Multi Line
+// Comment
+mut // Multi Line
+// Comment
+File
+) {}
+
+fn e(c: &// Comment
+File) {}
+
+fn d(c: &// Comment
+mut // Multi Line
+// Comment
+File
+) {}
diff --git a/src/tools/rustfmt/tests/source/issue-4312.rs b/src/tools/rustfmt/tests/source/issue-4312.rs
new file mode 100644
index 000000000..b36b0efdb
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4312.rs
@@ -0,0 +1,22 @@
+// issue 4312
+fn main() {
+ /* " */
+ println!("Hello, world!");
+ /* abc " */
+ println!("Hello, world!");
+ /* " abc */
+ println!("Hello, world!");
+ let y = 4;
+ let x = match 1 + y == 3 {
+ True => 3,
+ False => 4,
+ /* " unreachable */
+ };
+}
+
+// issue 4806
+enum X {
+ A,
+ B,
+ /*"*/
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4382.rs b/src/tools/rustfmt/tests/source/issue-4382.rs
new file mode 100644
index 000000000..cbf0c4ed6
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4382.rs
@@ -0,0 +1,4 @@
+pub const NAME_MAX: usize = {
+ #[cfg(target_os = "linux")] { 1024 }
+ #[cfg(target_os = "freebsd")] { 255 }
+};
diff --git a/src/tools/rustfmt/tests/source/issue-4398.rs b/src/tools/rustfmt/tests/source/issue-4398.rs
new file mode 100644
index 000000000..b0095aaac
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4398.rs
@@ -0,0 +1,19 @@
+impl Struct {
+ /// Documentation for `foo`
+ #[rustfmt::skip] // comment on why use a skip here
+ pub fn foo(&self) {}
+}
+
+impl Struct {
+ /// Documentation for `foo`
+ #[rustfmt::skip] // comment on why use a skip here
+ pub fn foo(&self) {}
+}
+
+/// Documentation for `Struct`
+#[rustfmt::skip] // comment
+impl Struct {
+ /// Documentation for `foo`
+ #[rustfmt::skip] // comment on why use a skip here
+ pub fn foo(&self) {}
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4427.rs b/src/tools/rustfmt/tests/source/issue-4427.rs
new file mode 100644
index 000000000..e14e039b9
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4427.rs
@@ -0,0 +1,31 @@
+const A: usize =
+ // Some constant
+ 2;
+
+const B: usize =
+/* constant */
+3;
+
+const C : usize
+ = /* foo */5;
+
+const D: usize = // baz
+/* Some constant */
+ /* ba */
+ { 3
+ // foo
+ };
+const E: usize= /* foo */5;
+const F: usize =
+{
+ 7
+ };
+const G: usize = /* foooooooooooooooooooooooooooooooooooooooooooooooooooooooo0000000000000000xx00 */ 5;
+ const H: usize = /* asdfasdf */ match G > 1 {
+ true => 1,
+ false => 3,
+ };
+
+ pub static FOO_BAR: Vec<u8> = //f
+ {
+ vec![]};
diff --git a/src/tools/rustfmt/tests/source/issue-447.rs b/src/tools/rustfmt/tests/source/issue-447.rs
new file mode 100644
index 000000000..7c542cb58
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-447.rs
@@ -0,0 +1,39 @@
+// rustfmt-normalize_comments: true
+
+fn main() {
+ if /* shouldn't be dropped
+ shouldn't be dropped */
+
+ cond /* shouldn't be dropped
+ shouldn't be dropped */
+
+ {
+ } /* shouldn't be dropped
+ shouldn't be dropped */
+
+ else /* shouldn't be dropped
+ shouldn't be dropped */
+
+ if /* shouldn't be dropped
+ shouldn't be dropped */
+
+ cond /* shouldn't be dropped
+ shouldn't be dropped */
+
+ {
+ } /* shouldn't be dropped
+ shouldn't be dropped */
+
+ else /* shouldn't be dropped
+ shouldn't be dropped */
+
+ {
+ }
+
+ if /* shouldn't be dropped
+ shouldn't be dropped */
+ let Some(x) = y/* shouldn't be dropped
+ shouldn't be dropped */
+ {
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4530.rs b/src/tools/rustfmt/tests/source/issue-4530.rs
new file mode 100644
index 000000000..9d2882abb
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4530.rs
@@ -0,0 +1,4 @@
+// rustfmt-version: Two
+fn main() {
+ let [aaaaaaaaaaaaaaaaaaaaaaaaaa, bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb, cccccccccccccccccccccccccc, ddddddddddddddddddddddddd] = panic!();
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4577.rs b/src/tools/rustfmt/tests/source/issue-4577.rs
new file mode 100644
index 000000000..79975dd73
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4577.rs
@@ -0,0 +1,20 @@
+fn main() {
+ let s: String = "ABAABBAA".chars()
+ .filter(|c| {
+ if *c == 'A' {
+ true
+ }
+ else {
+ false
+ }
+ })
+ .map(|c| -> char {
+ if c == 'A' {
+ '0'
+ } else {
+ '1'
+ }
+ }).collect();
+
+ println!("{}", s);
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4603.rs b/src/tools/rustfmt/tests/source/issue-4603.rs
new file mode 100644
index 000000000..ba0803e0e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4603.rs
@@ -0,0 +1,47 @@
+// Formatting when original macro snippet is used
+
+// Original issue #4603 code
+#![feature(or_patterns)]
+macro_rules! t_or_f {
+ () => {
+ (true // some comment
+ | false)
+ };
+}
+
+// Other test cases variations
+macro_rules! RULES {
+ () => {
+ (
+ xxxxxxx // COMMENT
+ | yyyyyyy
+ )
+ };
+}
+macro_rules! RULES {
+ () => {
+ (xxxxxxx // COMMENT
+ | yyyyyyy)
+ };
+}
+
+fn main() {
+ macro_rules! RULES {
+ () => {
+ (xxxxxxx // COMMENT
+ | yyyyyyy)
+ };
+ }
+}
+
+macro_rules! RULES {
+ () => {
+ (xxxxxxx /* COMMENT */ | yyyyyyy)
+ };
+}
+macro_rules! RULES {
+ () => {
+ (xxxxxxx /* COMMENT */
+ | yyyyyyy)
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4615/minimum_example.rs b/src/tools/rustfmt/tests/source/issue-4615/minimum_example.rs
new file mode 100644
index 000000000..89af5d123
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4615/minimum_example.rs
@@ -0,0 +1,4 @@
+info!(//debug
+ "{}: sending function_code={:04x} data={:04x} crc=0x{:04X} data={:02X?}",
+ self.name, function_code, data, crc, output_cmd
+);
diff --git a/src/tools/rustfmt/tests/source/issue-4646.rs b/src/tools/rustfmt/tests/source/issue-4646.rs
new file mode 100644
index 000000000..ee0f23220
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4646.rs
@@ -0,0 +1,20 @@
+trait Foo {
+ fn bar(&self)
+ // where
+ // Self: Bar
+ ;
+}
+
+trait Foo {
+ fn bar(&self)
+ // where
+ // Self: Bar
+;
+}
+
+trait Foo {
+ fn bar(&self)
+ // where
+ // Self: Bar
+ ;
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4656/format_me_please.rs b/src/tools/rustfmt/tests/source/issue-4656/format_me_please.rs
new file mode 100644
index 000000000..7de753016
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4656/format_me_please.rs
@@ -0,0 +1,2 @@
+
+pub fn hello( ) { }
diff --git a/src/tools/rustfmt/tests/source/issue-4656/lib.rs b/src/tools/rustfmt/tests/source/issue-4656/lib.rs
new file mode 100644
index 000000000..5dac91b8a
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4656/lib.rs
@@ -0,0 +1,7 @@
+extern crate cfg_if;
+
+cfg_if::cfg_if! {
+ if #[cfg(target_family = "unix")] {
+ mod format_me_please;
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4656/lib2.rs b/src/tools/rustfmt/tests/source/issue-4656/lib2.rs
new file mode 100644
index 000000000..b17fffc58
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4656/lib2.rs
@@ -0,0 +1,3 @@
+its_a_macro! {
+ // Contents
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4791/buggy.rs b/src/tools/rustfmt/tests/source/issue-4791/buggy.rs
new file mode 100644
index 000000000..4760022ee
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4791/buggy.rs
@@ -0,0 +1,14 @@
+// rustfmt-struct_field_align_threshold: 30
+// rustfmt-trailing_comma: Never
+
+struct Foo {
+ group_a: u8,
+
+ group_b: u8,
+}
+
+struct Bar {
+ group_a: u8,
+
+ group_b: u8
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4791/trailing_comma.rs b/src/tools/rustfmt/tests/source/issue-4791/trailing_comma.rs
new file mode 100644
index 000000000..c56c70fae
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4791/trailing_comma.rs
@@ -0,0 +1,14 @@
+// rustfmt-struct_field_align_threshold: 30
+// rustfmt-trailing_comma: Always
+
+struct Foo {
+ group_a: u8,
+
+ group_b: u8
+}
+
+struct Bar {
+ group_a: u8,
+
+ group_b: u8,
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4816/lib.rs b/src/tools/rustfmt/tests/source/issue-4816/lib.rs
new file mode 100644
index 000000000..43d540c4a
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4816/lib.rs
@@ -0,0 +1,10 @@
+#![feature(const_generics_defaults)]
+struct Foo<const N: usize = 1, const N2: usize = 2>;
+struct Bar<const N: usize, const N2: usize = { N +
+1 }>;
+struct Lots<const N1BlahFooUwU: usize = { 10 + 28 + 1872 / 10 * 3 },const N2SecondParamOhmyyy: usize = { N1BlahFooUwU / 2 + 10 * 2 },>;
+struct NamesRHard<const N: usize = { 1 + 1 + 1 + 1 + 1 + 1 }>;
+struct FooBar<
+ const LessThan100ButClose: usize = {1+1+1+1+1+1+1+1+1+1+1+1+1+1+1+1+1+1+1+1+1}
+>;
+struct FooBarrrrrrrr<const N: usize = {13478234326456456444323871+ 1+ 1+ 1+ 1+ 1+ 1+ 1+ 1+ 1+ 1+ 1+ 1+ 1+1+1+1 + 1},>;
diff --git a/src/tools/rustfmt/tests/source/issue-4926/deeply_nested_struct.rs b/src/tools/rustfmt/tests/source/issue-4926/deeply_nested_struct.rs
new file mode 100644
index 000000000..e55e41bd1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4926/deeply_nested_struct.rs
@@ -0,0 +1,35 @@
+
+// rustfmt-struct_field_align_threshold: 30
+
+struct X {
+ a: i32,
+ b: i32,
+ c: i32,
+}
+
+fn test(x: X) {
+ let d = {
+ let e = {
+ let f = {
+ let g = {
+ let h = {
+ let i = {
+ let j = {
+ matches!(
+ x,
+ X { a: 1_000, b: 1_000, .. }
+ )
+ };
+ j
+ };
+ i
+ };
+ h
+ };
+ g
+ };
+ f
+ };
+ e
+ };
+} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/issue-4926/deeply_nested_struct_with_long_field_names.rs b/src/tools/rustfmt/tests/source/issue-4926/deeply_nested_struct_with_long_field_names.rs
new file mode 100644
index 000000000..516699fa2
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4926/deeply_nested_struct_with_long_field_names.rs
@@ -0,0 +1,43 @@
+// rustfmt-struct_field_align_threshold: 30
+
+struct X {
+ really_really_long_field_a: i32,
+ really_really_really_long_field_b: i32,
+ really_really_really_really_long_field_c: i32,
+ really_really_really_really_really_long_field_d: i32,
+ really_really_really_really_really_really_long_field_e: i32,
+ f: i32,
+}
+
+fn test(x: X) {
+ let d = {
+ let e = {
+ let f = {
+ let g = {
+ let h = {
+ let i = {
+ let j = {
+ matches!(
+ x,
+ X {
+ really_really_long_field_a: 10,
+ really_really_really_long_field_b: 10,
+ really_really_really_really_long_field_c: 10,
+ really_really_really_really_really_long_field_d: 10,
+ really_really_really_really_really_really_long_field_e: 10, ..
+ }
+ )
+ };
+ j
+ };
+ i
+ };
+ h
+ };
+ g
+ };
+ f
+ };
+ e
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4926/deeply_nested_struct_with_many_fields.rs b/src/tools/rustfmt/tests/source/issue-4926/deeply_nested_struct_with_many_fields.rs
new file mode 100644
index 000000000..38fd6f02c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4926/deeply_nested_struct_with_many_fields.rs
@@ -0,0 +1,44 @@
+// rustfmt-struct_field_align_threshold: 30
+
+struct X {
+ a: i32,
+ b: i32,
+ c: i32,
+ d: i32,
+ e: i32,
+ f: i32,
+ g: i32,
+ h: i32,
+ i: i32,
+ j: i32,
+ k: i32,
+}
+
+fn test(x: X) {
+ let d = {
+ let e = {
+ let f = {
+ let g = {
+ let h = {
+ let i = {
+ let j = {
+ matches!(
+ x,
+ X {
+ a: 1_000, b: 1_000, c: 1_000, d: 1_000, e: 1_000, f: 1_000, g: 1_000, h: 1_000, i: 1_000, j: 1_000, ..
+ }
+ )
+ };
+ j
+ };
+ i
+ };
+ h
+ };
+ g
+ };
+ f
+ };
+ e
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4926/enum_struct_field.rs b/src/tools/rustfmt/tests/source/issue-4926/enum_struct_field.rs
new file mode 100644
index 000000000..336378537
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4926/enum_struct_field.rs
@@ -0,0 +1,35 @@
+// rustfmt-struct_field_align_threshold: 30
+// rustfmt-enum_discrim_align_threshold: 30
+// rustfmt-imports_layout: HorizontalVertical
+
+#[derive(Default)]
+struct InnerStructA { bbbbbbbbb: i32, cccccccc: i32 }
+
+enum SomeEnumNamedD {
+ E(InnerStructA),
+ F {
+ ggggggggggggggggggggggggg: bool,
+ h: bool,
+ }
+}
+
+impl SomeEnumNamedD {
+ fn f_variant() -> Self {
+ Self::F { ggggggggggggggggggggggggg: true, h: true }
+ }
+}
+
+fn main() {
+ let kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk = SomeEnumNamedD::f_variant();
+ let something_we_care_about = matches!(
+ kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk,
+ SomeEnumNamedD::F {
+ ggggggggggggggggggggggggg: true,
+ ..
+ }
+ );
+
+ if something_we_care_about {
+ println!("Yup it happened");
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4926/minimum_example.rs b/src/tools/rustfmt/tests/source/issue-4926/minimum_example.rs
new file mode 100644
index 000000000..2c3045dea
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4926/minimum_example.rs
@@ -0,0 +1,10 @@
+// rustfmt-struct_field_align_threshold: 30
+
+struct X { a: i32, b: i32 }
+
+fn test(x: X) {
+ let y = matches!(x, X {
+ a: 1,
+ ..
+ });
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4926/struct_with_long_field_names.rs b/src/tools/rustfmt/tests/source/issue-4926/struct_with_long_field_names.rs
new file mode 100644
index 000000000..b8a37f071
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4926/struct_with_long_field_names.rs
@@ -0,0 +1,21 @@
+// rustfmt-struct_field_align_threshold: 30
+
+struct X {
+ really_really_long_field_a: i32,
+ really_really_really_long_field_b: i32,
+ really_really_really_really_long_field_c: i32,
+ really_really_really_really_really_long_field_d: i32,
+ really_really_really_really_really_really_long_field_e: i32,
+ f: i32,
+}
+
+fn test(x: X) {
+ let y = matches!(x, X {
+ really_really_long_field_a: 10,
+ really_really_really_long_field_b: 10,
+ really_really_really_really_long_field_c: 10,
+ really_really_really_really_really_long_field_d: 10,
+ really_really_really_really_really_really_long_field_e: 10,
+ ..
+ });
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4926/struct_with_many_fields.rs b/src/tools/rustfmt/tests/source/issue-4926/struct_with_many_fields.rs
new file mode 100644
index 000000000..4adfd3b30
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4926/struct_with_many_fields.rs
@@ -0,0 +1,21 @@
+// rustfmt-struct_field_align_threshold: 30
+
+struct X {
+ a: i32,
+ b: i32,
+ c: i32,
+ d: i32,
+ e: i32,
+ f: i32,
+ g: i32,
+ h: i32,
+ i: i32,
+ j: i32,
+ k: i32,
+}
+
+fn test(x: X) {
+ let y = matches!(x, X {
+ a: 1_000, b: 1_000, c: 1_000, d: 1_000, e: 1_000, f: 1_000, g: 1_000, h: 1_000, i: 1_000, j: 1_000, ..
+ });
+} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/issue-4984/minimum_example.rs b/src/tools/rustfmt/tests/source/issue-4984/minimum_example.rs
new file mode 100644
index 000000000..677f87377
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4984/minimum_example.rs
@@ -0,0 +1,2 @@
+#[derive(/*Debug, */Clone)]
+struct Foo;
diff --git a/src/tools/rustfmt/tests/source/issue-4984/multi_line_derive.rs b/src/tools/rustfmt/tests/source/issue-4984/multi_line_derive.rs
new file mode 100644
index 000000000..73921dd17
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4984/multi_line_derive.rs
@@ -0,0 +1,20 @@
+#[derive(
+/* ---------- Some really important comment that just had to go inside the derive --------- */
+Debug, Clone, Eq, PartialEq,
+)]
+struct Foo {
+ a: i32,
+ b: T,
+}
+
+#[derive(
+/*
+ Some really important comment that just had to go inside the derive.
+ Also had to be put over multiple lines
+*/
+Debug, Clone, Eq, PartialEq,
+)]
+struct Bar {
+ a: i32,
+ b: T,
+}
diff --git a/src/tools/rustfmt/tests/source/issue-4984/multiple_comments_within.rs b/src/tools/rustfmt/tests/source/issue-4984/multiple_comments_within.rs
new file mode 100644
index 000000000..eb474a723
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-4984/multiple_comments_within.rs
@@ -0,0 +1,8 @@
+#[derive(
+/* ---------- Some really important comment that just had to go inside the derive --------- */
+Debug, Clone,/* Another comment */Eq, PartialEq,
+)]
+struct Foo {
+ a: i32,
+ b: T,
+}
diff --git a/src/tools/rustfmt/tests/source/issue-5011.rs b/src/tools/rustfmt/tests/source/issue-5011.rs
new file mode 100644
index 000000000..b48292164
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-5011.rs
@@ -0,0 +1,12 @@
+pub(crate) struct ASlash(
+ // hello
+ i32
+);
+
+pub(crate) struct AStar(
+ /* hello */
+ i32
+);
+
+pub(crate) struct BStar(/* hello */ i32);
+
diff --git a/src/tools/rustfmt/tests/source/issue-5023.rs b/src/tools/rustfmt/tests/source/issue-5023.rs
new file mode 100644
index 000000000..ae1c723ef
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-5023.rs
@@ -0,0 +1,22 @@
+// rustfmt-wrap_comments: true
+
+/// A comment to test special unicode characters on boundaries
+/// 是,是,是,是,是,是,是,是,是,是,是,是 it should break right here this goes to the next line
+fn main() {
+ if xxx {
+ let xxx = xxx
+ .into_iter()
+ .filter(|(xxx, xxx)| {
+ if let Some(x) = Some(1) {
+ // xxxxxxxxxxxxxxxxxx, xxxxxxxxxxxx, xxxxxxxxxxxxxxxxxxxx xxx xxxxxxx, xxxxx xxx
+ // xxxxxxxxxx. xxxxxxxxxxxxxxxx,xxxxxxxxxxxxxxxxx xxx xxxxxxx
+ // 是sdfadsdfxxxxxxxxx,sdfaxxxxxx_xxxxx_masdfaonxxx,
+ if false {
+ return true;
+ }
+ }
+ false
+ })
+ .collect();
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-5030.rs b/src/tools/rustfmt/tests/source/issue-5030.rs
new file mode 100644
index 000000000..08ffaac7d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-5030.rs
@@ -0,0 +1,22 @@
+// rustfmt-imports_granularity: Item
+// rustfmt-group_imports: One
+
+// Confirm that attributes are duplicated to all items in the use statement
+#[cfg(feature = "foo")]
+use std::collections::{
+ HashMap,
+ HashSet,
+};
+
+// Separate the imports below from the ones above
+const A: usize = 0;
+
+// Copying attrs works with import grouping as well
+#[cfg(feature = "foo")]
+use std::collections::{
+ HashMap,
+ HashSet,
+};
+
+#[cfg(feature = "spam")]
+use qux::{bar, baz};
diff --git a/src/tools/rustfmt/tests/source/issue-5042/multi-line_comment_with_trailing_comma.rs b/src/tools/rustfmt/tests/source/issue-5042/multi-line_comment_with_trailing_comma.rs
new file mode 100644
index 000000000..5d171f32a
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-5042/multi-line_comment_with_trailing_comma.rs
@@ -0,0 +1,24 @@
+fn main() {
+ // 5042 deals with trailing commas, not the indentation issue of these comments
+ // When a future PR fixes the inentation issues these test can be updated
+ let _ = std::ops::Add::add(10, 20
+ // ...
+ // ...,
+ );
+
+ let _ = std::ops::Add::add(10, 20
+ /* ... */
+ // ...,
+ );
+
+ let _ = std::ops::Add::add(10, 20
+ // ...,
+ // ...,
+ );
+
+ let _ = std::ops::Add::add(10, 20
+ // ...,
+ /* ...
+ */,
+ );
+}
diff --git a/src/tools/rustfmt/tests/source/issue-5042/multi-line_comment_without_trailing_comma.rs b/src/tools/rustfmt/tests/source/issue-5042/multi-line_comment_without_trailing_comma.rs
new file mode 100644
index 000000000..b8a824b34
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-5042/multi-line_comment_without_trailing_comma.rs
@@ -0,0 +1,24 @@
+fn main() {
+ // 5042 deals with trailing commas, not the indentation issue of these comments
+ // When a future PR fixes the inentation issues these test can be updated
+ let _ = std::ops::Add::add(10, 20
+ // ...
+ // ...
+ );
+
+ let _ = std::ops::Add::add(10, 20
+ /* ... */
+ // ...
+ );
+
+ let _ = std::ops::Add::add(10, 20
+ // ...
+ // ...
+ );
+
+ let _ = std::ops::Add::add(10, 20
+ // ...
+ /* ...
+ */
+ );
+}
diff --git a/src/tools/rustfmt/tests/source/issue-5042/single-line_comment_with_trailing_comma.rs b/src/tools/rustfmt/tests/source/issue-5042/single-line_comment_with_trailing_comma.rs
new file mode 100644
index 000000000..bd765b7b4
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-5042/single-line_comment_with_trailing_comma.rs
@@ -0,0 +1,9 @@
+fn main() {
+ let _ = std::ops::Add::add(10, 20
+ // ...,
+ );
+
+ let _ = std::ops::Add::add(10, 20
+ /* ... */,
+ );
+}
diff --git a/src/tools/rustfmt/tests/source/issue-5042/single-line_comment_without_trailing_comma.rs b/src/tools/rustfmt/tests/source/issue-5042/single-line_comment_without_trailing_comma.rs
new file mode 100644
index 000000000..2ed8de875
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-5042/single-line_comment_without_trailing_comma.rs
@@ -0,0 +1,10 @@
+fn main() {
+ let _ = std::ops::Add::add(10, 20
+ // ...
+ );
+
+ let _ = std::ops::Add::add(10, 20
+ /* ... */
+ );
+}
+
diff --git a/src/tools/rustfmt/tests/source/issue-5088/deeply_nested_long_comment_wrap_comments_true.rs b/src/tools/rustfmt/tests/source/issue-5088/deeply_nested_long_comment_wrap_comments_true.rs
new file mode 100644
index 000000000..09f68cae4
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-5088/deeply_nested_long_comment_wrap_comments_true.rs
@@ -0,0 +1,33 @@
+// rustfmt-wrap_comments: true
+
+fn main() {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ // - aaaa aaaaaaaaa aaaaaaaaa aaaaaaaaa aaaaaaaaa bbbbbbbbbb bbbbbbbbb bbbbbbbbb ccc cccccccccc ccccccc cccccccc
+
+ // * aaaa aaaaaaaaa aaaaaaaaa aaaaaaaaa aaaaaaaaa bbbbbbbbbb bbbbbbbbb bbbbbbbbb ccc cccccccccc ccccccc cccccccc
+
+ /* - aaaa aaaaaaaaa aaaaaaaaa aaaaaaaaa aaaaaaaaa bbbbbbbbbb bbbbbbbbb bbbbbbbbb ccc cccccccccc ccccccc cccccccc */
+
+ /* * aaaa aaaaaaaaa aaaaaaaaa aaaaaaaaa aaaaaaaaa bbbbbbbbbb bbbbbbbbb bbbbbbbbb ccc cccccccccc ccccccc cccccccc */
+ };
+ };
+ };
+ };
+ };
+ };
+ };
+ };
+ };
+ };
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/issue-5088/start_with_empty_comment_very_long_itemized_block_wrap_comments_true.rs b/src/tools/rustfmt/tests/source/issue-5088/start_with_empty_comment_very_long_itemized_block_wrap_comments_true.rs
new file mode 100644
index 000000000..75f748000
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-5088/start_with_empty_comment_very_long_itemized_block_wrap_comments_true.rs
@@ -0,0 +1,19 @@
+// rustfmt-wrap_comments: true
+
+//
+// - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
+// - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
+
+//
+// * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
+// * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
+
+/*
+ * - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.*/
+/*
+ * - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.*/
+
+/*
+ * * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.*/
+/*
+ * * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.*/
diff --git a/src/tools/rustfmt/tests/source/issue-5088/very_long_comment_wrap_comments_true.rs b/src/tools/rustfmt/tests/source/issue-5088/very_long_comment_wrap_comments_true.rs
new file mode 100644
index 000000000..00437f002
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-5088/very_long_comment_wrap_comments_true.rs
@@ -0,0 +1,13 @@
+// rustfmt-wrap_comments: true
+
+// - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
+// - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
+
+// * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
+// * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
+
+/* - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.*/
+/* - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.*/
+
+/* * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.*/
+/* * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.*/
diff --git a/src/tools/rustfmt/tests/source/issue-510.rs b/src/tools/rustfmt/tests/source/issue-510.rs
new file mode 100644
index 000000000..4c60859e6
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-510.rs
@@ -0,0 +1,37 @@
+impl ISizeAndMarginsComputer for AbsoluteNonReplaced {
+fn solve_inline_size_constraints(&self,
+block: &mut BlockFlow,
+input: &ISizeConstraintInput)
+-> ISizeConstraintSolution {
+let (inline_start,inline_size,margin_inline_start,margin_inline_end) =
+match (inline_startssssssxxxxxxsssssxxxxxxxxxssssssxxx,inline_startssssssxxxxxxsssssxxxxxxxxxssssssxxx) {
+(MaybeAuto::Auto, MaybeAuto::Auto, MaybeAuto::Auto) => {
+let margin_start = inline_start_margin.specified_or_zero();
+let margin_end = inline_end_margin.specified_or_zero();
+// Now it is the same situation as inline-start Specified and inline-end
+// and inline-size Auto.
+//
+// Set inline-end to zero to calculate inline-size.
+let inline_size = block.get_shrink_to_fit_inline_size(available_inline_size -
+(margin_start + margin_end));
+(Au(0), inline_size, margin_start, margin_end)
+}
+};
+
+ let (inline_start, inline_size, margin_inline_start, margin_inline_end) =
+ match (inline_start, inline_end, computed_inline_size) {
+ (MaybeAuto::Auto, MaybeAuto::Auto, MaybeAuto::Auto) => {
+ let margin_start = inline_start_margin.specified_or_zero();
+ let margin_end = inline_end_margin.specified_or_zero();
+ // Now it is the same situation as inline-start Specified and inline-end
+ // and inline-size Auto.
+ //
+ // Set inline-end to zero to calculate inline-size.
+ let inline_size =
+ block.get_shrink_to_fit_inline_size(available_inline_size -
+ (margin_start + margin_end));
+ (Au(0), inline_size, margin_start, margin_end)
+ }
+ };
+}
+}
diff --git a/src/tools/rustfmt/tests/source/issue-5157/indented_itemized_markdown_blockquote.rs b/src/tools/rustfmt/tests/source/issue-5157/indented_itemized_markdown_blockquote.rs
new file mode 100644
index 000000000..5c1d79a74
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-5157/indented_itemized_markdown_blockquote.rs
@@ -0,0 +1,4 @@
+// rustfmt-wrap_comments: true
+
+/// > For each sample received, the middleware internally maintains a sample_state relative to each DataReader. The sample_state can either be READ or NOT_READ.
+fn block_quote() {}
diff --git a/src/tools/rustfmt/tests/source/issue-5157/nested_itemized_markdown_blockquote.rs b/src/tools/rustfmt/tests/source/issue-5157/nested_itemized_markdown_blockquote.rs
new file mode 100644
index 000000000..cf200d04e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-5157/nested_itemized_markdown_blockquote.rs
@@ -0,0 +1,10 @@
+// rustfmt-wrap_comments: true
+
+/// > For each sample received, the middleware internally maintains a sample_state relative to each DataReader. The sample_state can either be READ or NOT_READ.
+///
+/// > > For each sample received, the middleware internally maintains a sample_state relative to each DataReader. The sample_state can either be READ or NOT_READ.
+///
+/// > > > For each sample received, the middleware internally maintains a sample_state relative to each DataReader. The sample_state can either be READ or NOT_READ.
+///
+/// > > > > > > > > For each sample received, the middleware internally maintains a sample_state relative to each DataReader. The sample_state can either be READ or NOT_READ.
+fn block_quote() {}
diff --git a/src/tools/rustfmt/tests/source/issue-5157/support_itemized_markdown_blockquote.rs b/src/tools/rustfmt/tests/source/issue-5157/support_itemized_markdown_blockquote.rs
new file mode 100644
index 000000000..eb436402e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-5157/support_itemized_markdown_blockquote.rs
@@ -0,0 +1,4 @@
+// rustfmt-wrap_comments: true
+
+/// > For each sample received, the middleware internally maintains a sample_state relative to each DataReader. The sample_state can either be READ or NOT_READ.
+fn block_quote() {}
diff --git a/src/tools/rustfmt/tests/source/issue-5238/markdown_header_wrap_comments_false.rs b/src/tools/rustfmt/tests/source/issue-5238/markdown_header_wrap_comments_false.rs
new file mode 100644
index 000000000..229c6e575
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-5238/markdown_header_wrap_comments_false.rs
@@ -0,0 +1,11 @@
+// rustfmt-wrap_comments: false
+
+/// no markdown header so rustfmt should wrap this comment when `format_code_in_doc_comments = true` and `wrap_comments = true`
+fn not_documented_with_markdown_header() {
+ // This is just a normal inline comment so rustfmt should wrap this comment when `wrap_comments = true`
+}
+
+/// # We're using a markdown header here so rustfmt should refuse to wrap this comment in all circumstances
+fn documented_with_markdown_header() {
+ // # We're using a markdown header in an inline comment. rustfmt should be able to wrap this comment when `wrap_comments = true`
+}
diff --git a/src/tools/rustfmt/tests/source/issue-5238/markdown_header_wrap_comments_true.rs b/src/tools/rustfmt/tests/source/issue-5238/markdown_header_wrap_comments_true.rs
new file mode 100644
index 000000000..c547ff35c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-5238/markdown_header_wrap_comments_true.rs
@@ -0,0 +1,11 @@
+// rustfmt-wrap_comments: true
+
+/// no markdown header so rustfmt should wrap this comment when `format_code_in_doc_comments = true` and `wrap_comments = true`
+fn not_documented_with_markdown_header() {
+ // This is just a normal inline comment so rustfmt should wrap this comment when `wrap_comments = true`
+}
+
+/// # We're using a markdown header here so rustfmt should refuse to wrap this comment in all circumstances
+fn documented_with_markdown_header() {
+ // # We're using a markdown header in an inline comment. rustfmt should be able to wrap this comment when `wrap_comments = true`
+}
diff --git a/src/tools/rustfmt/tests/source/issue-5260.rs b/src/tools/rustfmt/tests/source/issue-5260.rs
new file mode 100644
index 000000000..c06068172
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-5260.rs
@@ -0,0 +1,14 @@
+// rustfmt-wrap_comments: true
+
+/// [MyType](VeryLongPathToMyType::NoLineBreak::Here::Okay::ThatWouldBeNice::Thanks)
+fn documented_with_longtype() {
+ // # We're using a long type link, rustfmt should not break line
+ // on the type when `wrap_comments = true`
+}
+
+/// VeryLongPathToMyType::JustMyType::But::VeryVery::Long::NoLineBreak::Here::Okay::ThatWouldBeNice::Thanks
+fn documented_with_verylongtype() {
+ // # We're using a long type link, rustfmt should not break line
+ // on the type when `wrap_comments = true`
+}
+
diff --git a/src/tools/rustfmt/tests/source/issue-5270/merge_derives_true.rs b/src/tools/rustfmt/tests/source/issue-5270/merge_derives_true.rs
new file mode 100644
index 000000000..b31bbf095
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-5270/merge_derives_true.rs
@@ -0,0 +1,62 @@
+// rustfmt-merge_derives:true
+
+#[rustfmt::skip::attributes(derive)]
+#[allow(dead_code)]
+#[derive(StructField)]
+#[derive(Clone)]
+struct DoNotMergeDerives {
+ field: String,
+}
+
+#[allow(dead_code)]
+#[derive(StructField)]
+#[rustfmt::skip::attributes(derive)]
+#[derive(Clone)]
+struct DoNotMergeDerivesSkipInMiddle {
+ field: String,
+}
+
+#[allow(dead_code)]
+#[derive(StructField)]
+#[derive(Clone)]
+#[rustfmt::skip::attributes(derive)]
+struct DoNotMergeDerivesSkipAtEnd {
+ field: String,
+}
+
+#[allow(dead_code)]
+#[derive(StructField)]
+#[derive(Clone)]
+struct MergeDerives {
+ field: String,
+}
+
+mod inner_attribute_derive_skip {
+ #![rustfmt::skip::attributes(derive)]
+
+ #[allow(dead_code)]
+ #[derive(StructField)]
+ #[derive(Clone)]
+ struct DoNotMergeDerives {
+ field: String,
+ }
+}
+
+#[rustfmt::skip::attributes(derive)]
+mod outer_attribute_derive_skip {
+ #[allow(dead_code)]
+ #[derive(StructField)]
+ #[derive(Clone)]
+ struct DoNotMergeDerives {
+ field: String,
+ }
+}
+
+mod no_derive_skip {
+ #[allow(dead_code)]
+ #[derive(StructField)]
+ #[derive(Clone)]
+ struct MergeDerives {
+ field: String,
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-539.rs b/src/tools/rustfmt/tests/source/issue-539.rs
new file mode 100644
index 000000000..d70682e3b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-539.rs
@@ -0,0 +1,5 @@
+// rustfmt-normalize_comments: true
+/*
+ FIXME (#3300): Should allow items to be anonymous. Right now
+ we just use dummy names for anon items.
+ */
diff --git a/src/tools/rustfmt/tests/source/issue-683.rs b/src/tools/rustfmt/tests/source/issue-683.rs
new file mode 100644
index 000000000..fd99015ea
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-683.rs
@@ -0,0 +1,5 @@
+// rustfmt-normalize_comments: true
+/*
+ * FIXME (#3300): Should allow items to be anonymous. Right now
+ * we just use dummy names for anon items.
+ */
diff --git a/src/tools/rustfmt/tests/source/issue-811.rs b/src/tools/rustfmt/tests/source/issue-811.rs
new file mode 100644
index 000000000..b7a89b5d0
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-811.rs
@@ -0,0 +1,19 @@
+trait FooTrait<T>: Sized {
+ type Bar: BarTrait<T>;
+}
+
+trait BarTrait<T>: Sized {
+ type Baz;
+ fn foo();
+}
+
+type Foo<T: FooTrait> = <<T as FooTrait<U>>::Bar as BarTrait<U>>::Baz;
+type Bar<T: BarTrait> = <T as BarTrait<U>>::Baz;
+
+fn some_func<T: FooTrait<U>, U>() {
+ <<T as FooTrait<U>>::Bar as BarTrait<U>>::foo();
+}
+
+fn some_func<T: BarTrait<U>>() {
+ <T as BarTrait<U>>::foo();
+}
diff --git a/src/tools/rustfmt/tests/source/issue-850.rs b/src/tools/rustfmt/tests/source/issue-850.rs
new file mode 100644
index 000000000..c939716a6
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-850.rs
@@ -0,0 +1 @@
+const unsafe fn x() {}
diff --git a/src/tools/rustfmt/tests/source/issue-855.rs b/src/tools/rustfmt/tests/source/issue-855.rs
new file mode 100644
index 000000000..8f33fa685
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-855.rs
@@ -0,0 +1,20 @@
+fn main() {
+ 'running: loop {
+ for event in event_pump.poll_iter() {
+ match event {
+ Event::Quit {..} | Event::KeyDown { keycode: Some(Keycode::Escape), .. } => break 'running,
+ }
+ }
+ }
+}
+
+fn main2() {
+ 'running: loop {
+ for event in event_pump.poll_iter() {
+ match event {
+ Event::Quit {..} |
+ Event::KeyDownXXXXXXXXXXXXX { keycode: Some(Keycode::Escape), .. } => break 'running,
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-913.rs b/src/tools/rustfmt/tests/source/issue-913.rs
new file mode 100644
index 000000000..25b9d42fd
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-913.rs
@@ -0,0 +1,20 @@
+mod client {
+ impl Client {
+ fn test(self) -> Result<()> {
+ let next_state = match self.state {
+ State::V5(v5::State::Command(v5::coand::State::WriteVersion(ref mut response))) => {
+ let x = reformat . meeee() ;
+ }
+ };
+
+ let next_state = match self.state {
+ State::V5(v5::State::Command(v5::comand::State::WriteVersion(ref mut response))) => {
+ // The pattern cannot be formatted in a way that the match stays
+ // within the column limit. The rewrite should therefore be
+ // skipped.
+ let x = dont . reformat . meeee();
+ }
+ };
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue-945.rs b/src/tools/rustfmt/tests/source/issue-945.rs
new file mode 100644
index 000000000..37d703c46
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-945.rs
@@ -0,0 +1,5 @@
+impl Bar { default const unsafe fn foo() { "hi" } }
+
+impl Baz { default unsafe extern "C" fn foo() { "hi" } }
+
+impl Foo for Bar { default fn foo() { "hi" } }
diff --git a/src/tools/rustfmt/tests/source/issue-977.rs b/src/tools/rustfmt/tests/source/issue-977.rs
new file mode 100644
index 000000000..fe16387b7
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue-977.rs
@@ -0,0 +1,7 @@
+// rustfmt-normalize_comments: true
+
+trait NameC { /* comment */ }
+struct FooC { /* comment */ }
+enum MooC { /* comment */ }
+mod BarC { /* comment */ }
+extern { /* comment */ }
diff --git a/src/tools/rustfmt/tests/source/issue_3839.rs b/src/tools/rustfmt/tests/source/issue_3839.rs
new file mode 100644
index 000000000..3933d31ee
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_3839.rs
@@ -0,0 +1,8 @@
+struct Foo {
+ a: i32,
+/*
+asd
+*/
+ // foo
+b: i32,
+}
diff --git a/src/tools/rustfmt/tests/source/issue_3844.rs b/src/tools/rustfmt/tests/source/issue_3844.rs
new file mode 100644
index 000000000..15441b2b0
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_3844.rs
@@ -0,0 +1,3 @@
+fn main() {
+|| {{}};
+}
diff --git a/src/tools/rustfmt/tests/source/issue_3853.rs b/src/tools/rustfmt/tests/source/issue_3853.rs
new file mode 100644
index 000000000..c41309bc7
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_3853.rs
@@ -0,0 +1,52 @@
+fn by_ref_with_block_before_ident() {
+if let Some(ref /*def*/ state)= foo{
+ println!(
+ "asdfasdfasdf"); }
+}
+
+fn mut_block_before_ident() {
+if let Some(mut /*def*/ state ) =foo{
+ println!(
+ "123" ); }
+}
+
+fn ref_and_mut_blocks_before_ident() {
+if let Some(ref /*abc*/
+ mut /*def*/ state ) = foo {
+ println!(
+ "deefefefefefwea" ); }
+}
+
+fn sub_pattern() {
+ let foo @ /*foo*/
+bar(f) = 42;
+}
+
+fn no_prefix_block_before_ident() {
+if let Some(
+ /*def*/ state ) = foo {
+ println!(
+ "129387123123" ); }
+}
+
+fn issue_3853() {
+if let Some(ref /*mut*/ state) = foo {
+ }
+}
+
+fn double_slash_comment_between_lhs_and_rhs() {
+ if let Some(e) =
+ // self.foo.bar(e, tx)
+ packet.transaction.state.committed
+ {
+ // body
+ println!(
+ "a2304712836123");
+ }
+}
+
+fn block_comment_between_lhs_and_rhs() {
+if let Some(ref /*def*/ mut /*abc*/ state)= /*abc*/foo{
+ println!(
+ "asdfasdfasdf"); }
+}
diff --git a/src/tools/rustfmt/tests/source/issue_3868.rs b/src/tools/rustfmt/tests/source/issue_3868.rs
new file mode 100644
index 000000000..6c46c3c9e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_3868.rs
@@ -0,0 +1,13 @@
+fn foo() {
+ ;
+}
+
+fn bar() {
+ for _ in 0..1 {
+ ;
+ }
+}
+
+fn baz() {
+ ();
+ } \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/issue_4032.rs b/src/tools/rustfmt/tests/source/issue_4032.rs
new file mode 100644
index 000000000..11ded074c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_4032.rs
@@ -0,0 +1,4 @@
+fn a1(#[aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa] a: u8) {}
+fn b1(#[aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa] bb: u8) {}
+fn a2(#[aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa] a: u8) {}
+fn b2(#[aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa] bb: u8) {}
diff --git a/src/tools/rustfmt/tests/source/issue_4057.rs b/src/tools/rustfmt/tests/source/issue_4057.rs
new file mode 100644
index 000000000..7cd80734b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_4057.rs
@@ -0,0 +1,15 @@
+// rustfmt-format_code_in_doc_comments: true
+
+/// ```
+/// # #[rustversion::since(1.36)]
+/// # fn dox() {
+/// # use std::pin::Pin;
+/// # type Projection<'a> = &'a ();
+/// # type ProjectionRef<'a> = &'a ();
+/// # trait Dox {
+/// fn project_ex (self: Pin<&mut Self>) -> Projection<'_>;
+/// fn project_ref(self: Pin<&Self>) -> ProjectionRef<'_>;
+/// # }
+/// # }
+/// ```
+struct Foo;
diff --git a/src/tools/rustfmt/tests/source/issue_4086.rs b/src/tools/rustfmt/tests/source/issue_4086.rs
new file mode 100644
index 000000000..ffa6442e9
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_4086.rs
@@ -0,0 +1,2 @@
+#[cfg(any())]
+extern "C++" {}
diff --git a/src/tools/rustfmt/tests/source/issue_4257.rs b/src/tools/rustfmt/tests/source/issue_4257.rs
new file mode 100644
index 000000000..2b887fadb
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_4257.rs
@@ -0,0 +1,13 @@
+#![feature(generic_associated_types)]
+#![allow(incomplete_features)]
+
+trait Trait<T> {
+ type Type<'a> where T: 'a;
+ fn foo(x: &T) -> Self::Type<'_>;
+}
+impl<T> Trait<T> for () {
+ type Type<'a> where T: 'a = &'a T;
+ fn foo(x: &T) -> Self::Type<'_> {
+ x
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue_4322.rs b/src/tools/rustfmt/tests/source/issue_4322.rs
new file mode 100644
index 000000000..b28cc7cdd
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_4322.rs
@@ -0,0 +1,3 @@
+trait Bar {
+ type X<'a> where Self: 'a;
+}
diff --git a/src/tools/rustfmt/tests/source/issue_4374.rs b/src/tools/rustfmt/tests/source/issue_4374.rs
new file mode 100644
index 000000000..2a45a022e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_4374.rs
@@ -0,0 +1,13 @@
+fn a<F>(_f: F) -> ()
+where
+ F: FnOnce() -> (),
+{
+}
+fn main() {
+ a(|| {
+ #[allow(irrefutable_let_patterns)]
+ while let _ = 0 {
+ break;
+ }
+ });
+} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/issue_4475.rs b/src/tools/rustfmt/tests/source/issue_4475.rs
new file mode 100644
index 000000000..241dc91d7
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_4475.rs
@@ -0,0 +1,27 @@
+fn main() {
+ #[cfg(debug_assertions)]
+ { println!("DEBUG"); }
+}
+
+fn main() {
+ #[cfg(feature = "foo")]
+ {
+ /*
+ let foo = 0
+ */
+ }
+}
+
+fn main() {
+ #[cfg(feature = "foo")]
+ { /* let foo = 0; */ }
+}
+
+fn main() {
+ #[foo]
+ #[bar]
+ #[baz]
+ {
+ // let foo = 0;
+ }
+} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/issue_4528.rs b/src/tools/rustfmt/tests/source/issue_4528.rs
new file mode 100644
index 000000000..85f6d8c03
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_4528.rs
@@ -0,0 +1,8 @@
+#![allow(clippy::no_effect)]
+
+extern "C" {
+ // N.B., mutability can be easily incorrect in FFI calls -- as
+ // in C, the default is mutable pointers.
+ fn ffi(c: *mut u8);
+ fn int_ffi(c: *mut i32);
+} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/issue_4579.rs b/src/tools/rustfmt/tests/source/issue_4579.rs
new file mode 100644
index 000000000..73f345233
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_4579.rs
@@ -0,0 +1,15 @@
+// rustfmt-hard_tabs: true
+
+#[macro_export]
+macro_rules! main {
+ () => {
+ #[spirv(fragment)]
+ pub fn main_fs(
+ mut out_color: ::spirv_std::storage_class::Output<Vec4>,
+ #[spirv(descriptor_set = 1)]iChannelResolution: ::spirv_std::storage_class::UniformConstant<
+ [::spirv_std::glam::Vec3A; 4],
+ >,
+ ) {
+ }
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/issue_4584.rs b/src/tools/rustfmt/tests/source/issue_4584.rs
new file mode 100644
index 000000000..695c55905
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_4584.rs
@@ -0,0 +1,19 @@
+// rustfmt-indent_style: Visual
+
+#[derive(Debug,)]
+pub enum Case {
+ Upper,
+ Lower
+}
+
+#[derive(Debug, Clone, PartialEq, Eq,)]
+pub enum Case {
+ Upper,
+ Lower
+}
+
+// NB - This formatting looks potentially off the desired state, but is
+// consistent with current behavior. Included here to provide a line wrapped
+// derive case with the changes applied to resolve issue #4584
+#[derive(Add, Sub, Mul, Div, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Debug, Hash, Serialize, Mul,)]
+struct Foo {} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/issue_4636.rs b/src/tools/rustfmt/tests/source/issue_4636.rs
new file mode 100644
index 000000000..ea7079f6c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_4636.rs
@@ -0,0 +1,13 @@
+pub trait PrettyPrinter<'tcx>:
+ Printer<
+ 'tcx,
+ Error = fmt::Error,
+ Path = Self,
+ Region = Self,
+ Type = Self,
+ DynExistential = Self,
+ Const = Self,
+ > + fmt::Write
+ {
+ //
+ } \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/issue_4675.rs b/src/tools/rustfmt/tests/source/issue_4675.rs
new file mode 100644
index 000000000..66613eed0
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_4675.rs
@@ -0,0 +1,8 @@
+macro_rules! foo {
+ ($s:ident ( $p:pat )) => {
+ Foo {
+ name: Name::$s($p),
+ ..
+ }
+ };
+} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/issue_4823.rs b/src/tools/rustfmt/tests/source/issue_4823.rs
new file mode 100644
index 000000000..a008dd3d8
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_4823.rs
@@ -0,0 +1,5 @@
+macro_rules! m {
+() => {
+type Type;
+};
+}
diff --git a/src/tools/rustfmt/tests/source/issue_4854.rs b/src/tools/rustfmt/tests/source/issue_4854.rs
new file mode 100644
index 000000000..35d6e21af
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_4854.rs
@@ -0,0 +1,113 @@
+struct Struct {
+ // Multiline comment
+ // should be formatted
+ // properly.
+}
+
+struct Struct2 {
+ // This formatting
+// Should be changed
+}
+
+struct Struct3(
+ // This
+ // is
+ // correct
+);
+
+struct Struct4(
+ // This
+// is
+// not
+// correct
+);
+
+struct Struct5 {
+ /*
+ Comment block
+ with many lines.
+ */
+}
+
+struct Struct6(
+ /*
+ Comment block
+ with many lines.
+ */
+);
+
+struct Struct7 {
+ /*
+Invalid
+format
+*/
+}
+
+struct Struct8(
+ /*
+Invalid
+format
+*/
+);
+
+struct Struct9 { /* bar */ }
+
+struct Struct10 { /* bar
+baz
+*/ }
+
+mod module {
+ struct Struct {
+ // Multiline comment
+ // should be formatted
+ // properly.
+ }
+
+ struct Struct2 {
+ // This formatting
+// Should be changed
+ }
+
+ struct Struct3(
+ // This
+ // is
+ // correct
+ );
+
+ struct Struct4(
+ // This
+ // is
+ // not
+// correct
+ );
+
+ struct Struct5 {
+ /*
+ Comment block
+ with many lines.
+ */
+ }
+
+ struct Struct6(
+ /*
+ Comment block
+ with many lines.
+ */
+ );
+
+ struct Struct7 {
+ /*
+Invalid
+format
+*/
+ }
+
+ struct Struct8(
+ /*
+Invalid
+format
+*/
+ );
+
+ struct Struct9 { /* bar */ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue_4911.rs b/src/tools/rustfmt/tests/source/issue_4911.rs
new file mode 100644
index 000000000..21ef6c6c4
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_4911.rs
@@ -0,0 +1,6 @@
+#![feature(generic_associated_types)]
+#![feature(min_type_alias_impl_trait)]
+
+impl SomeTrait for SomeType {
+ type SomeGAT<'a> where Self: 'a = impl SomeOtherTrait;
+} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/issue_4943.rs b/src/tools/rustfmt/tests/source/issue_4943.rs
new file mode 100644
index 000000000..0793b7b4f
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_4943.rs
@@ -0,0 +1,9 @@
+#![feature(generic_associated_types)]
+
+impl SomeStruct {
+ fn process<T>(v: T) -> <Self as GAT>::R<T>
+ where Self: GAT<R<T> = T>
+ {
+ SomeStruct::do_something(v)
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/issue_4954.rs b/src/tools/rustfmt/tests/source/issue_4954.rs
new file mode 100644
index 000000000..8011c601b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_4954.rs
@@ -0,0 +1,5 @@
+trait Foo {
+ type Arg<'a>;
+}
+
+struct Bar<T>(T) where for<'a> T: Foo<Arg<'a> = ()>;
diff --git a/src/tools/rustfmt/tests/source/issue_4963.rs b/src/tools/rustfmt/tests/source/issue_4963.rs
new file mode 100644
index 000000000..32e1f6cd4
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_4963.rs
@@ -0,0 +1,5 @@
+mod test {
+ extern "C" {fn test();}
+}
+
+extern "C" {fn test();} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/issue_5027.rs b/src/tools/rustfmt/tests/source/issue_5027.rs
new file mode 100644
index 000000000..67beeb23b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_5027.rs
@@ -0,0 +1,7 @@
+// rustfmt-version: Two
+
+pub type Iter<'a, D> = impl DoubleEndedIterator<Item = (SomethingSomethingSomethingLongType<D>)>+ ExactSizeIterator+ 'a;
+
+trait FOo {pub type Iter<'a, D> = impl DoubleEndedIterator<Item = (SomethingSomethingSomethingLongType<D>)>+ ExactSizeIterator+ 'a;}
+
+impl Bar {pub type Iter<'a, D> = impl DoubleEndedIterator<Item = (SomethingSomethingSomethingLongType<D>)>+ ExactSizeIterator+ 'a;} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/issue_5086.rs b/src/tools/rustfmt/tests/source/issue_5086.rs
new file mode 100644
index 000000000..1644c9d2c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/issue_5086.rs
@@ -0,0 +1,2 @@
+#[cfg(any())]
+ type Type : Bound ; \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/item-brace-style-always-next-line.rs b/src/tools/rustfmt/tests/source/item-brace-style-always-next-line.rs
new file mode 100644
index 000000000..0fb640512
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/item-brace-style-always-next-line.rs
@@ -0,0 +1,64 @@
+// rustfmt-brace_style: AlwaysNextLine
+
+mod M {
+ enum A {
+ A,
+ }
+
+ struct B {
+ b: i32,
+ }
+
+ // For empty enums and structs, the brace remains on the same line.
+ enum C {}
+
+ struct D {}
+
+ enum A<T> where T: Copy {
+ A,
+ }
+
+ struct B<T> where T: Copy {
+ b: i32,
+ }
+
+ // For empty enums and structs, the brace remains on the same line.
+ enum C<T> where T: Copy {}
+
+ struct D<T> where T: Copy {}
+}
+
+
+fn function()
+{
+
+}
+
+trait Trait
+{
+
+}
+
+impl<T> Trait for T
+{
+
+}
+
+trait Trait2<T>
+where
+ T: Copy + Display + Write + Read + FromStr, {}
+
+trait Trait3<T>
+where
+ T: Something
+ + SomethingElse
+ + Sync
+ + Send
+ + Display
+ + Debug
+ + Copy
+ + Hash
+ + Debug
+ + Display
+ + Write
+ + Read, {}
diff --git a/src/tools/rustfmt/tests/source/item-brace-style-prefer-same-line.rs b/src/tools/rustfmt/tests/source/item-brace-style-prefer-same-line.rs
new file mode 100644
index 000000000..dff89b8b6
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/item-brace-style-prefer-same-line.rs
@@ -0,0 +1,29 @@
+// rustfmt-brace_style: PreferSameLine
+
+mod M {
+ enum A
+ {
+ A,
+ }
+
+ struct B
+ {
+ b: i32,
+ }
+
+ enum C {}
+
+ struct D {}
+
+ enum A<T> where T: Copy {
+ A,
+ }
+
+ struct B<T> where T: Copy {
+ b: i32,
+ }
+
+ enum C<T> where T: Copy {}
+
+ struct D<T> where T: Copy {}
+}
diff --git a/src/tools/rustfmt/tests/source/item-brace-style-same-line-where.rs b/src/tools/rustfmt/tests/source/item-brace-style-same-line-where.rs
new file mode 100644
index 000000000..1d034089f
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/item-brace-style-same-line-where.rs
@@ -0,0 +1,29 @@
+mod M {
+ enum A
+ {
+ A,
+ }
+
+ struct B
+ {
+ b: i32,
+ }
+
+ // For empty enums and structs, the brace remains on the same line.
+ enum C {}
+
+ struct D {}
+
+ enum A<T> where T: Copy {
+ A,
+ }
+
+ struct B<T> where T: Copy {
+ b: i32,
+ }
+
+ // For empty enums and structs, the brace remains on the same line.
+ enum C<T> where T: Copy {}
+
+ struct D<T> where T: Copy {}
+}
diff --git a/src/tools/rustfmt/tests/source/itemized-blocks/no_wrap.rs b/src/tools/rustfmt/tests/source/itemized-blocks/no_wrap.rs
new file mode 100644
index 000000000..a7b6a10a0
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/itemized-blocks/no_wrap.rs
@@ -0,0 +1,47 @@
+// rustfmt-normalize_comments: true
+// rustfmt-format_code_in_doc_comments: true
+
+//! This is a list:
+//! * Outer
+//! * Outer
+//! * Inner
+//! * Inner with lots of text so that it could be reformatted something something something lots of text so that it could be reformatted something something something
+//!
+//! This example shows how to configure fern to output really nicely colored logs
+//! - when the log level is error, the whole line is red
+//! - when the log level is warn, the whole line is yellow
+//! - when the log level is info, the level name is green and the rest of the line is white
+//! - when the log level is debug, the whole line is white
+//! - when the log level is trace, the whole line is gray ("bright black")
+
+/// All the parameters ***except for `from_theater`*** should be inserted as sent by the remote
+/// theater, i.e., as passed to [`Theater::send`] on the remote actor:
+/// * `from` is the sending (remote) [`ActorId`], as reported by the remote theater by theater-specific means
+/// * `to` is the receiving (local) [`ActorId`], as requested by the remote theater
+/// * `tag` is a tag that identifies the message type
+/// * `msg` is the (serialized) message
+/// All the parameters ***except for `from_theater`*** should be inserted as sent by the remote
+/// theater, i.e., as passed to [`Theater::send`] on the remote actor
+fn func1() {}
+
+/// All the parameters ***except for `from_theater`*** should be inserted as sent by the remote
+/// theater, i.e., as passed to [`Theater::send`] on the remote actor:
+/// * `from` is the sending (remote) [`ActorId`], as reported by the remote theater by theater-specific means
+/// * `to` is the receiving (local) [`ActorId`], as requested by the remote theater
+/// * `tag` is a tag that identifies the message type
+/// * `msg` is the (serialized) message
+/// ```
+/// let x = 42;
+/// ```
+fn func2() {}
+
+/// Look:
+///
+/// ```
+/// let x = 42;
+/// ```
+/// * `from` is the sending (remote) [`ActorId`], as reported by the remote theater by theater-specific means
+/// * `to` is the receiving (local) [`ActorId`], as requested by the remote theater
+/// * `tag` is a tag that identifies the message type
+/// * `msg` is the (serialized) message
+fn func3() {}
diff --git a/src/tools/rustfmt/tests/source/itemized-blocks/rewrite_fail.rs b/src/tools/rustfmt/tests/source/itemized-blocks/rewrite_fail.rs
new file mode 100644
index 000000000..f99c2cc5f
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/itemized-blocks/rewrite_fail.rs
@@ -0,0 +1,11 @@
+// rustfmt-wrap_comments: true
+// rustfmt-max_width: 50
+
+// This example shows how to configure fern to output really nicely colored logs
+// - aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+// - aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+// - aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+// - when the log level is info, the level name is green and the rest of the line is white
+// - aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+// - aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+fn func1() {}
diff --git a/src/tools/rustfmt/tests/source/itemized-blocks/urls.rs b/src/tools/rustfmt/tests/source/itemized-blocks/urls.rs
new file mode 100644
index 000000000..2eaaafbbc
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/itemized-blocks/urls.rs
@@ -0,0 +1,22 @@
+// rustfmt-wrap_comments: true
+// rustfmt-max_width: 79
+
+//! CMSIS: Cortex Microcontroller Software Interface Standard
+//!
+//! The version 5 of the standard can be found at:
+//!
+//! http://arm-software.github.io/CMSIS_5/Core/html/index.html
+//!
+//! The API reference of the standard can be found at:
+//!
+//! - example -- http://example.org -- something something something something something something
+//! - something something something something something something more -- http://example.org
+//! - http://example.org/something/something/something/something/something/something and the rest
+//! - Core function access -- http://arm-software.github.io/CMSIS_5/Core/html/group__Core__Register__gr.html
+//! - Intrinsic functions for CPU instructions -- http://arm-software.github.io/CMSIS_5/Core/html/group__intrinsic__CPU__gr.html
+//! - Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Vestibulum sem lacus, commodo vitae.
+//!
+//! The reference C implementation used as the base of this Rust port can be
+//! found at
+//!
+//! https://github.com/ARM-software/CMSIS_5/blob/5.3.0/CMSIS/Core/Include/cmsis_gcc.h
diff --git a/src/tools/rustfmt/tests/source/itemized-blocks/wrap.rs b/src/tools/rustfmt/tests/source/itemized-blocks/wrap.rs
new file mode 100644
index 000000000..955cc698b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/itemized-blocks/wrap.rs
@@ -0,0 +1,55 @@
+// rustfmt-wrap_comments: true
+// rustfmt-format_code_in_doc_comments: true
+// rustfmt-max_width: 50
+
+//! This is a list:
+//! * Outer
+//! * Outer
+//! * Inner
+//! * Inner with lots of text so that it could be reformatted something something something lots of text so that it could be reformatted something something something
+//!
+//! This example shows how to configure fern to output really nicely colored logs
+//! - when the log level is error, the whole line is red
+//! - when the log level is warn, the whole line is yellow
+//! - when the log level is info, the level name is green and the rest of the line is white
+//! - when the log level is debug, the whole line is white
+//! - when the log level is trace, the whole line is gray ("bright black")
+
+// This example shows how to configure fern to output really nicely colored logs
+// - when the log level is error, the whole line is red
+// - when the log level is warn, the whole line is yellow
+// - when the log level is info, the level name is green and the rest of the line is white
+// - when the log level is debug, the whole line is white
+// - when the log level is trace, the whole line is gray ("bright black")
+
+/// All the parameters ***except for `from_theater`*** should be inserted as sent by the remote
+/// theater, i.e., as passed to [`Theater::send`] on the remote actor:
+/// * `from` is the sending (remote) [`ActorId`], as reported by the remote theater by theater-specific means
+/// * `to` is the receiving (local) [`ActorId`], as requested by the remote theater
+/// * `tag` is a tag that identifies the message type
+/// * `msg` is the (serialized) message
+/// All the parameters ***except for `from_theater`*** should be inserted as sent by the remote
+/// theater, i.e., as passed to [`Theater::send`] on the remote actor
+fn func1() {}
+
+/// All the parameters ***except for `from_theater`*** should be inserted as sent by the remote
+/// theater, i.e., as passed to [`Theater::send`] on the remote actor:
+/// * `from` is the sending (remote) [`ActorId`], as reported by the remote theater by theater-specific means
+/// * `to` is the receiving (local) [`ActorId`], as requested by the remote theater
+/// * `tag` is a tag that identifies the message type
+/// * `msg` is the (serialized) message
+/// ```
+/// let x = 42;
+/// ```
+fn func2() {}
+
+/// Look:
+///
+/// ```
+/// let x = 42;
+/// ```
+/// * `from` is the sending (remote) [`ActorId`], as reported by the remote theater by theater-specific means
+/// * `to` is the receiving (local) [`ActorId`], as requested by the remote theater
+/// * `tag` is a tag that identifies the message type
+/// * `msg` is the (serialized) message
+fn func3() {}
diff --git a/src/tools/rustfmt/tests/source/label_break.rs b/src/tools/rustfmt/tests/source/label_break.rs
new file mode 100644
index 000000000..2c79fd35e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/label_break.rs
@@ -0,0 +1,28 @@
+// format with label break value.
+fn main() {
+
+'empty_block: {}
+
+'block: {
+ do_thing();
+ if condition_not_met() {
+ break 'block;
+ }
+ do_next_thing();
+ if condition_not_met() {
+ break 'block;
+ }
+ do_last_thing();
+}
+
+let result = 'block: {
+ if foo() {
+ // comment
+ break 'block 1;
+ }
+ if bar() { /* comment */
+ break 'block 2;
+ }
+ 3
+};
+} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/large-block.rs b/src/tools/rustfmt/tests/source/large-block.rs
new file mode 100644
index 000000000..09e9169f3
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/large-block.rs
@@ -0,0 +1,5 @@
+fn issue1351() {
+ std_fmt_Arguments_new_v1_std_rt_begin_panic_fmt_sdfasfasdfasdf({
+ static __STATIC_FMTSTR: &'static [&'static str] = &[];
+ });
+}
diff --git a/src/tools/rustfmt/tests/source/large_vec.rs b/src/tools/rustfmt/tests/source/large_vec.rs
new file mode 100644
index 000000000..34d5bf399
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/large_vec.rs
@@ -0,0 +1,29 @@
+// See #1470.
+
+impl Environment {
+ pub fn new_root() -> Rc<RefCell<Environment>> {
+ let mut env = Environment::new();
+ let builtin_functions = &[("println",
+ Function::NativeVoid(CallSign {
+ num_params: 0,
+ variadic: true,
+ param_types: vec![],
+ },
+ native_println)),
+ ("run_http_server",
+ Function::NativeVoid(CallSign {
+ num_params: 1,
+ variadic: false,
+ param_types:
+ vec![Some(ConstraintType::Function)],
+ },
+ native_run_http_server)),
+ ("len",
+ Function::NativeReturning(CallSign {
+ num_params: 1,
+ variadic: false,
+ param_types: vec![None],
+ },
+ native_len))];
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/lazy_static.rs b/src/tools/rustfmt/tests/source/lazy_static.rs
new file mode 100644
index 000000000..38fefbcbe
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/lazy_static.rs
@@ -0,0 +1,45 @@
+// Format `lazy_static!`.
+
+lazy_static! {
+static ref CONFIG_NAME_REGEX: regex::Regex =
+regex::Regex::new(r"^## `([^`]+)`").expect("Failed creating configuration pattern");
+static ref CONFIG_VALUE_REGEX: regex::Regex = regex::Regex::new(r#"^#### `"?([^`"]+)"?`"#)
+.expect("Failed creating configuration value pattern");
+}
+
+// We need to be able to format `lazy_static!` without known syntax.
+lazy_static!(
+ xxx,
+yyyy ,
+ zzzzz
+);
+
+lazy_static!{
+}
+
+// #2354
+lazy_static ! {
+pub static ref Sbase64_encode_string : :: lisp :: LispSubrRef = {
+let subr = :: remacs_sys :: Lisp_Subr {
+header : :: remacs_sys :: Lisp_Vectorlike_Header {
+size : (
+( :: remacs_sys :: PseudovecType :: PVEC_SUBR as :: libc :: ptrdiff_t ) << ::
+remacs_sys :: PSEUDOVECTOR_AREA_BITS ) , } , function : self ::
+Fbase64_encode_string as * const :: libc :: c_void , min_args : 1i16 ,
+max_args : 2i16 , symbol_name : ( b"base64-encode-string\x00" ) . as_ptr ( )
+as * const :: libc :: c_char , intspec : :: std :: ptr :: null ( ) , doc : ::
+std :: ptr :: null ( ) , lang : :: remacs_sys :: Lisp_Subr_Lang_Rust , } ;
+unsafe {
+let ptr = :: remacs_sys :: xmalloc (
+:: std :: mem :: size_of :: < :: remacs_sys :: Lisp_Subr > ( ) ) as * mut ::
+remacs_sys :: Lisp_Subr ; :: std :: ptr :: copy_nonoverlapping (
+& subr , ptr , 1 ) ; :: std :: mem :: forget ( subr ) ; :: lisp :: ExternalPtr
+:: new ( ptr ) } } ; }
+
+
+lazy_static! {
+static ref FOO: HashMap<String,
+(&'static str,
+fn(Foo) -> Result<Box<Bar>, Either<FooError, BarError>>
+),> = HashMap::new();
+}
diff --git a/src/tools/rustfmt/tests/source/let_else.rs b/src/tools/rustfmt/tests/source/let_else.rs
new file mode 100644
index 000000000..a6e816fb5
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/let_else.rs
@@ -0,0 +1,3 @@
+fn main() {
+ let Some(1) = Some(1) else { return };
+}
diff --git a/src/tools/rustfmt/tests/source/long-fn-1/version_one.rs b/src/tools/rustfmt/tests/source/long-fn-1/version_one.rs
new file mode 100644
index 000000000..d6832c2af
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/long-fn-1/version_one.rs
@@ -0,0 +1,21 @@
+// rustfmt-version: One
+// Tests that a function which is almost short enough, but not quite, gets
+// formatted correctly.
+
+impl Foo {
+ fn some_input(&mut self, input: Input, input_path: Option<PathBuf>, ) -> (Input, Option<PathBuf>) {}
+
+ fn some_inpu(&mut self, input: Input, input_path: Option<PathBuf>) -> (Input, Option<PathBuf>) {}
+}
+
+// #1843
+#[allow(non_snake_case)]
+pub extern "C" fn Java_com_exonum_binding_storage_indices_ValueSetIndexProxy_nativeContainsByHash() -> bool {
+ false
+}
+
+// #3009
+impl Something {
+ fn my_function_name_is_way_to_long_but_used_as_a_case_study_or_an_example_its_fine(
+) -> Result< (), String > {}
+}
diff --git a/src/tools/rustfmt/tests/source/long-fn-1/version_two.rs b/src/tools/rustfmt/tests/source/long-fn-1/version_two.rs
new file mode 100644
index 000000000..f402a26e8
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/long-fn-1/version_two.rs
@@ -0,0 +1,21 @@
+// rustfmt-version: Two
+// Tests that a function which is almost short enough, but not quite, gets
+// formatted correctly.
+
+impl Foo {
+ fn some_input(&mut self, input: Input, input_path: Option<PathBuf>, ) -> (Input, Option<PathBuf>) {}
+
+ fn some_inpu(&mut self, input: Input, input_path: Option<PathBuf>) -> (Input, Option<PathBuf>) {}
+}
+
+// #1843
+#[allow(non_snake_case)]
+pub extern "C" fn Java_com_exonum_binding_storage_indices_ValueSetIndexProxy_nativeContainsByHash() -> bool {
+ false
+}
+
+// #3009
+impl Something {
+ fn my_function_name_is_way_to_long_but_used_as_a_case_study_or_an_example_its_fine(
+) -> Result< (), String > {}
+}
diff --git a/src/tools/rustfmt/tests/source/long-match-arms-brace-newline.rs b/src/tools/rustfmt/tests/source/long-match-arms-brace-newline.rs
new file mode 100644
index 000000000..927ada0ff
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/long-match-arms-brace-newline.rs
@@ -0,0 +1,15 @@
+// rustfmt-format_strings: true
+// rustfmt-max_width: 80
+// rustfmt-control_brace_style: AlwaysNextLine
+
+fn main() {
+ match x {
+ aaaaaaaa::Bbbbb::Ccccccccccccc(_, Some(ref x)) if x ==
+ "aaaaaaaaaaa \
+ aaaaaaa \
+ aaaaaa" => {
+ Ok(())
+ }
+ _ => Err(x),
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/long-use-statement-issue-3154.rs b/src/tools/rustfmt/tests/source/long-use-statement-issue-3154.rs
new file mode 100644
index 000000000..339382b5b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/long-use-statement-issue-3154.rs
@@ -0,0 +1,3 @@
+// rustfmt-reorder_imports: false
+
+pub use self :: super :: super :: super :: root::mozilla::detail::StringClassFlags as nsTStringRepr_ClassFlags ;
diff --git a/src/tools/rustfmt/tests/source/long_field_access.rs b/src/tools/rustfmt/tests/source/long_field_access.rs
new file mode 100644
index 000000000..7aa626221
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/long_field_access.rs
@@ -0,0 +1,3 @@
+fn f() {
+ block_flow.base.stacking_relative_position_of_display_port = self.base.stacking_relative_position_of_display_port;
+}
diff --git a/src/tools/rustfmt/tests/source/loop.rs b/src/tools/rustfmt/tests/source/loop.rs
new file mode 100644
index 000000000..6e92cdc6c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/loop.rs
@@ -0,0 +1,29 @@
+
+fn main() {
+ loop
+ { return some_val;}
+
+let x = loop { do_forever(); };
+
+ 'label : loop {
+ // Just comments
+ }
+
+ 'a: while loooooooooooooooooooooooooooooooooong_variable_name + another_value > some_other_value{}
+
+ while aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa > bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb {
+ }
+
+ while aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa {}
+
+ 'b: for xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx in some_iter(arg1, arg2) {
+ // do smth
+ }
+
+ while let Some(i) = x.find('s')
+ {
+ x.update();
+ continue;
+ continue 'foo;
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/macro_not_expr.rs b/src/tools/rustfmt/tests/source/macro_not_expr.rs
new file mode 100644
index 000000000..d8de4dce3
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/macro_not_expr.rs
@@ -0,0 +1,7 @@
+macro_rules! test {
+ ($($t:tt)*) => {}
+}
+
+fn main() {
+ test!( a : B => c d );
+}
diff --git a/src/tools/rustfmt/tests/source/macro_rules.rs b/src/tools/rustfmt/tests/source/macro_rules.rs
new file mode 100644
index 000000000..5aaca0c83
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/macro_rules.rs
@@ -0,0 +1,301 @@
+// rustfmt-format_macro_matchers: true
+
+macro_rules! m {
+ () => ();
+ ( $ x : ident ) => ();
+ ( $ m1 : ident , $ m2 : ident , $ x : ident ) => ();
+ ( $($beginning:ident),*;$middle:ident;$($end:ident),* ) => ();
+ ( $($beginning: ident),*; $middle: ident; $($end: ident),*; $($beginning: ident),*; $middle: ident; $($end: ident),* ) => {};
+ ( $ name : ident ( $ ( $ dol : tt $ var : ident ) * ) $ ( $ body : tt ) * ) => ();
+ ( $( $ i : ident : $ ty : ty , $def : expr , $stb : expr , $ ( $ dstring : tt ) , + ) ; + $ ( ; ) *
+ $( $ i : ident : $ ty : ty , $def : expr , $stb : expr , $ ( $ dstring : tt ) , + ) ; + $ ( ; ) *
+ ) => {};
+ ( $foo: tt foo [$ attr : meta] $name: ident ) => {};
+ ( $foo: tt [$ attr: meta] $name: ident ) => {};
+ ( $foo: tt &'a [$attr : meta] $name: ident ) => {};
+ ( $foo: tt foo # [ $attr : meta] $name: ident ) => {};
+ ( $foo: tt # [ $attr : meta] $name: ident) => {};
+ ( $foo: tt &'a # [ $attr : meta] $name: ident ) => {};
+ ( $ x : tt foo bar foo bar foo bar $ y : tt => x*y*z $ z : tt , $ ( $a: tt ) , * ) => {};
+}
+
+
+macro_rules! impl_a_method {
+ ($n:ident ( $a:ident : $ta:ty ) -> $ret:ty { $body:expr }) => {
+ fn $n($a:$ta) -> $ret { $body }
+ macro_rules! $n { ($va:expr) => { $n($va) } }
+ };
+ ($n:ident ( $a:ident : $ta:ty, $b:ident : $tb:ty ) -> $ret:ty { $body:expr }) => {
+ fn $n($a:$ta, $b:$tb) -> $ret { $body }
+ macro_rules! $n { ($va:expr, $vb:expr) => { $n($va, $vb) } }
+ };
+ ($n:ident ( $a:ident : $ta:ty, $b:ident : $tb:ty, $c:ident : $tc:ty ) -> $ret:ty { $body:expr }) => {
+ fn $n($a:$ta, $b:$tb, $c:$tc) -> $ret { $body }
+ macro_rules! $n { ($va:expr, $vb:expr, $vc:expr) => { $n($va, $vb, $vc) } }
+ };
+ ($n:ident ( $a:ident : $ta:ty, $b:ident : $tb:ty, $c:ident : $tc:ty, $d:ident : $td:ty ) -> $ret:ty { $body:expr }) => {
+ fn $n($a:$ta, $b:$tb, $c:$tc, $d:$td) -> $ret { $body }
+ macro_rules! $n { ($va:expr, $vb:expr, $vc:expr, $vd:expr) => { $n($va, $vb, $vc, $vd) } }
+ };
+}
+
+macro_rules! m {
+ // a
+ ($expr :expr, $( $func : ident ) * ) => {
+ {
+ let x = $expr;
+ $func (
+ x
+ )
+ }
+ };
+
+ /* b */
+
+ () => {/* c */};
+
+ (@tag) =>
+ {
+
+ };
+
+// d
+( $item:ident ) => {
+ mod macro_item { struct $item ; }
+};
+}
+
+macro m2 {
+ // a
+ ($expr :expr, $( $func : ident ) * ) => {
+ {
+ let x = $expr;
+ $func (
+ x
+ )
+ }
+ }
+
+ /* b */
+
+ () => {/* c */}
+
+ (@tag) =>
+ {
+
+ }
+
+// d
+( $item:ident ) => {
+ mod macro_item { struct $item ; }
+}
+}
+
+// #2438, #2476
+macro_rules! m {
+ () => {
+ fn foo() {
+ this_line_is_98_characters_long_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx(
+ );
+ }
+ }
+}
+macro_rules! m {
+ () => {
+ fn foo() {
+ this_line_is_99_characters_long_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx(
+);
+ }
+ };
+}
+macro_rules! m {
+ () => {
+ fn foo() {
+ this_line_is_100_characters_long_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx(
+);
+ }
+ };
+}
+macro_rules! m {
+ () => {
+ fn foo() {
+ this_line_is_101_characters_long_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx(
+ );
+ }
+ };
+}
+
+// #2439
+macro_rules! m {
+ ($line0_xxxxxxxxxxxxxxxxx: expr, $line1_xxxxxxxxxxxxxxxxx: expr, $line2_xxxxxxxxxxxxxxxxx: expr, $line3_xxxxxxxxxxxxxxxxx: expr,) => {};
+}
+
+// #2466
+// Skip formatting `macro_rules!` that are not using `{}`.
+macro_rules! m (
+ () => ()
+);
+macro_rules! m [
+ () => ()
+];
+
+// #2470
+macro foo($type_name: ident, $docs: expr) {
+ #[allow(non_camel_case_types)]
+ #[doc=$docs]
+ #[derive(Debug, Clone, Copy)]
+ pub struct $type_name;
+}
+
+// #2534
+macro_rules! foo {
+ ($a:ident : $b:ty) => {};
+ ($a:ident $b:ident $c:ident) => {};
+}
+
+// #2538
+macro_rules! add_message_to_notes {
+ ($msg:expr) => {{
+ let mut lines = message.lines();
+ notes.push_str(&format!("\n{}: {}", level, lines.next().unwrap()));
+ for line in lines {
+ notes.push_str(&format!(
+ "\n{:indent$}{line}",
+ "",
+ indent = level.len() + 2,
+ line = line,
+ ));
+ }
+ }}
+}
+
+// #2560
+macro_rules! binary {
+ ($_self:ident,$expr:expr, $lhs:expr,$func:ident) => {
+ while $_self.matched($expr) {
+ let op = $_self.get_binary_op()?;
+
+ let rhs = Box::new($_self.$func()?);
+
+ $lhs = Spanned {
+ span: $lhs.get_span().to(rhs.get_span()),
+ value: Expression::Binary {
+ lhs: Box::new($lhs),
+ op,
+ rhs,
+ },
+ }
+ }
+ };
+}
+
+// #2558
+macro_rules! m {
+ ($x:) => {};
+ ($($foo:expr)()?) => {};
+}
+
+// #2749
+macro_rules! foo {
+ ($(x)* {}) => {};
+ ($(x)* ()) => {};
+ ($(x)* []) => {};
+}
+macro_rules! __wundergraph_expand_sqlite_mutation {
+ ( $mutation_name:ident $((context = $($context:tt)*))*{ $( $entity_name:ident( $(insert = $insert:ident,)* $(update = $update:ident,)* $(delete = $($delete:tt)+)* ), )* } ) => {};
+}
+
+// #2607
+macro_rules! bench {
+ ($ty:ident) => {
+ criterion_group!(
+ name = benches;
+ config = ::common_bench::reduced_samples();
+ targets = call, map;
+ );
+ };
+}
+
+// #2770
+macro_rules! save_regs {
+ () => {
+ asm!("push rax
+ push rcx
+ push rdx
+ push rsi
+ push rdi
+ push r8
+ push r9
+ push r10
+ push r11"
+ :::: "intel", "volatile");
+ };
+}
+
+// #2721
+macro_rules! impl_as_byte_slice_arrays {
+ ($n:expr,) => {};
+ ($n:expr, $N:ident, $($NN:ident,)*) => {
+ impl_as_byte_slice_arrays!($n - 1, $($NN,)*);
+
+ impl<T> AsByteSliceMut for [T; $n] where [T]: AsByteSliceMut {
+ fn as_byte_slice_mut(&mut self) -> &mut [u8] {
+ self[..].as_byte_slice_mut()
+ }
+
+ fn to_le(&mut self) {
+ self[..].to_le()
+ }
+ }
+ };
+ (!div $n:expr,) => {};
+ (!div $n:expr, $N:ident, $($NN:ident,)*) => {
+ impl_as_byte_slice_arrays!(!div $n / 2, $($NN,)*);
+
+ impl<T> AsByteSliceMut for [T; $n] where [T]: AsByteSliceMut {
+ fn as_byte_slice_mut(&mut self) -> &mut [u8] {
+ self[..].as_byte_slice_mut()
+ }
+
+ fn to_le(&mut self) {
+ self[..].to_le()
+ }
+ }
+ };
+}
+
+// #2919
+fn foo() {
+ {
+ macro_rules! touch_value {
+ ($func:ident, $value:expr) => {{
+ let result = API::get_cached().$func(self, key.as_ptr(), $value, ffi::VSPropAppendMode::paTouch);
+ let result = API::get_cached().$func(self, key.as_ptr(), $value, ffi::VSPropAppend);
+ let result = API::get_cached().$func(self, key.as_ptr(), $value, ffi::VSPropAppendM);
+ let result = APIIIIIIIII::get_cached().$func(self, key.as_ptr(), $value, ffi::VSPropAppendM);
+ let result = API::get_cached().$func(self, key.as_ptr(), $value, ffi::VSPropAppendMMMMMMMMMM);
+ debug_assert!(result == 0);
+ }};
+ }
+ }
+}
+
+// #2642
+macro_rules! template {
+ ($name: expr) => {
+ format_args!(r##"
+"http://example.com"
+
+# test
+"##, $name)
+ }
+}
+
+macro_rules! template {
+ () => {
+ format_args!(r"
+//
+
+")
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/macros.rs b/src/tools/rustfmt/tests/source/macros.rs
new file mode 100644
index 000000000..3b286579c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/macros.rs
@@ -0,0 +1,486 @@
+// rustfmt-normalize_comments: true
+// rustfmt-format_macro_matchers: true
+itemmacro!(this, is.now() .formatted(yay));
+
+itemmacro!(really, long.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbb() .is.formatted());
+
+itemmacro!{this, is.brace().formatted()}
+
+fn main() {
+ foo! ( );
+
+ foo!(,);
+
+ bar!( a , b , c );
+
+ bar!( a , b , c , );
+
+ baz!(1+2+3, quux. kaas());
+
+ quux!(AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA, BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB);
+
+ kaas!(/* comments */ a /* post macro */, b /* another */);
+
+ trailingcomma!( a , b , c , );
+ // Preserve trailing comma only when necessary.
+ ok!(file.seek(
+ SeekFrom::Start(
+ table.map(|table| fixture.offset(table)).unwrap_or(0),
+ )
+ ));
+
+ noexpr!( i am not an expression, OK? );
+
+ vec! [ a , b , c];
+
+ vec! [AAAAAA, AAAAAA, AAAAAA, AAAAAA, AAAAAA, AAAAAA, AAAAAA, AAAAAA, AAAAAA,
+ BBBBB, 5, 100-30, 1.33, b, b, b];
+
+ vec! [a /* comment */];
+
+ // Trailing spaces after a comma
+ vec![
+ a,
+ ];
+
+ vec![a; b];
+ vec!(a; b);
+ vec!{a; b};
+
+ vec![a, b; c];
+ vec![a; b, c];
+
+ vec![a; (|x| { let y = x + 1; let z = y + 1; z })(2)];
+ vec![a; xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx];
+ vec![a; unsafe {
+ x + 1
+ }];
+
+ unknown_bracket_macro__comma_should_not_be_stripped![
+ a,
+ ];
+
+ foo(makro!(1, 3));
+
+ hamkaas!{ () };
+
+ macrowithbraces! {dont, format, me}
+
+ x!(fn);
+
+ some_macro!(
+
+ );
+
+ some_macro![
+ ];
+
+ some_macro!{
+ // comment
+ };
+
+ some_macro!{
+ // comment
+ };
+
+ some_macro!(
+ // comment
+ not function like
+ );
+
+ // #1712
+ let image = gray_image!(
+ 00, 01, 02;
+ 10, 11, 12;
+ 20, 21, 22);
+
+ // #1092
+ chain!(input, a:take!(max_size), || []);
+
+ // #2727
+ foo!("bar")
+;
+}
+
+impl X {
+ empty_invoc!{}
+ empty_invoc! {}
+}
+
+fn issue_1279() {
+ println!("dsfs"); // a comment
+}
+
+fn issue_1555() {
+ let hello = &format!("HTTP/1.1 200 OK\r\nServer: {}\r\n\r\n{}",
+ "65454654654654654654654655464",
+ "4");
+}
+
+fn issue1178() {
+ macro_rules! foo {
+ (#[$attr:meta] $name:ident) => {}
+ }
+
+ foo!(#[doc = "bar"] baz);
+}
+
+fn issue1739() {
+ sql_function!(add_rss_item,
+ add_rss_item_t,
+ (a: types::Integer,
+ b: types::Timestamptz,
+ c: types::Text,
+ d: types::Text,
+ e: types::Text));
+
+ w.slice_mut(s![.., init_size[1] - extreeeeeeeeeeeeeeeeeeeeeeeem..init_size[1], ..])
+ .par_map_inplace(|el| *el = 0.);
+}
+
+fn issue_1885() {
+ let threads = people.into_iter().map(|name| {
+ chan_select! {
+ rx.recv() => {}
+ }
+ }).collect::<Vec<_>>();
+}
+
+fn issue_1917() {
+ mod x {
+ quickcheck! {
+ fn test(a: String, s: String, b: String) -> TestResult {
+ if a.find(&s).is_none() {
+
+ TestResult::from_bool(true)
+ } else {
+ TestResult::discard()
+ }
+ }
+ }
+ }
+}
+
+fn issue_1921() {
+ // Macro with tabs.
+ lazy_static! {
+ static ref ONE: u32 = 1;
+ static ref TWO: u32 = 2;
+ static ref THREE: u32 = 3;
+ static ref FOUR: u32 = {
+ let mut acc = 1;
+ acc += 1;
+ acc += 2;
+ acc
+ }
+}
+}
+
+// #1577
+fn issue1577() {
+ let json = json!({
+ "foo": "bar",
+ });
+}
+
+// #3174
+fn issue_3174() {
+ let data =
+ if let Some(debug) = error.debug_info() {
+ json!({
+ "errorKind": format!("{:?}", error.err_kind()),
+ "debugMessage": debug.message,
+ })
+ } else {
+ json!({"errorKind": format!("{:?}", error.err_kind())})
+ };
+}
+
+gfx_pipeline!(pipe {
+ vbuf: gfx::VertexBuffer<Vertex> = (),
+ out: gfx::RenderTarget<ColorFormat> = "Target0",
+});
+
+// #1919
+#[test]
+fn __bindgen_test_layout_HandleWithDtor_open0_int_close0_instantiation() {
+ assert_eq!(
+ ::std::mem::size_of::<HandleWithDtor<::std::os::raw::c_int>>(),
+ 8usize,
+ concat!(
+ "Size of template specialization: ",
+ stringify ! ( HandleWithDtor < :: std :: os :: raw :: c_int > )
+ )
+ );
+ assert_eq ! ( :: std :: mem :: align_of :: < HandleWithDtor < :: std :: os :: raw :: c_int > > ( ) , 8usize , concat ! ( "Alignment of template specialization: " , stringify ! ( HandleWithDtor < :: std :: os :: raw :: c_int > ) ) );
+}
+
+// #878
+macro_rules! try_opt {
+ ($expr:expr) => (match $expr {
+ Some(val) => val,
+
+ None => { return None; }
+ })
+}
+
+// #2214
+// macro call whose argument is an array with trailing comma.
+fn issue2214() {
+make_test!(str_searcher_ascii_haystack, "bb", "abbcbbd", [
+ Reject(0, 1),
+ Match (1, 3),
+ Reject(3, 4),
+ Match (4, 6),
+ Reject(6, 7),
+]);
+}
+
+fn special_case_macros() {
+ let p = eprint!();
+ let q = eprint!("{}", 1);
+ let r = eprint!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
+ let s = eprint!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26);
+
+ let q = eprintln!("{}", 1);
+ let r = eprintln!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
+ let s = eprintln!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26);
+
+ let q = format!("{}", 1);
+ let r = format!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
+ let s = format!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26);
+
+ let q = format_args!("{}", 1);
+ let r = format_args!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
+ let s = format_args!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26);
+
+ let q = print!("{}", 1);
+ let r = print!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
+ let s = print!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26);
+
+ let q = println!("{}", 1);
+ let r = println!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
+ let s = println!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26);
+
+ let q = unreachable!("{}", 1);
+ let r = unreachable!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
+ let s = unreachable!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26);
+
+ debug!("{}", 1);
+ debug!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
+ debug!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26);
+
+ error!("{}", 1);
+ error!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
+ error!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26);
+
+ info!("{}", 1);
+ info!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
+ info!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26);
+
+ panic!("{}", 1);
+ panic!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
+ panic!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26);
+
+ warn!("{}", 1);
+ warn!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
+ warn!("{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26);
+
+ assert!();
+ assert!(result == 42);
+ assert!(result == 42, "Ahoy there, {}!", target);
+ assert!(result == 42, "Arr! While plunderin' the hold, we got '{}' when given '{}' (we expected '{}')", result, input, expected);
+ assert!(result == 42, "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26);
+
+ assert_eq!();
+ assert_eq!(left);
+ assert_eq!(left, right);
+ assert_eq!(left, right, "Ahoy there, {}!", target);
+ assert_eq!(left, right, "Arr! While plunderin' the hold, we got '{}' when given '{}' (we expected '{}')", result, input, expected);
+ assert_eq!(first_realllllllllllly_long_variable_that_doesnt_fit_one_one_line, second_reallllllllllly_long_variable_that_doesnt_fit_one_one_line, "Arr! While plunderin' the hold, we got '{}' when given '{}' (we expected '{}')", result, input, expected);
+ assert_eq!(left + 42, right, "Arr! While plunderin' the hold, we got '{}' when given '{}' (we expected '{}')", result, input, expected);
+ assert_eq!(left, right, "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26);
+
+ write!(&mut s, "Ahoy there, {}!", target);
+ write!(&mut s, "Arr! While plunderin' the hold, we got '{}' when given '{}' (we expected '{}')", result, input, expected);
+ write!(&mut s, "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26);
+
+ writeln!(&mut s, "Ahoy there, {}!", target);
+ writeln!(&mut s, "Arr! While plunderin' the hold, we got '{}' when given '{}' (we expected '{}')", result, input, expected);
+ writeln!(&mut s, "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26);
+}
+
+// #1209
+impl Foo {
+ /// foo
+ pub fn foo(&self) -> Bar<foo!( )> {}
+}
+
+// #819
+fn macro_in_pattern_position () {
+ let x = match y {
+ foo!( ) => (),
+ bar!( a, b,
+ c) => (),
+ bar!(a
+ , b
+ , c
+ ,) => (),
+ baz!( 1 + 2 + 3, quux.kaas( )
+ ) => (),
+ quux!(AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA, BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB) => (),
+ };
+}
+
+macro foo() {
+
+
+}
+
+pub macro bar($x:ident+$y:expr; ) {
+ fn foo($x: Foo) {
+ long_function(a_long_argument_to_a_long_function_is_what_this_is(AAAAAAAAAAAAAAAAAAAAAAAAAAAA),
+ $x.bar($y));
+ }
+}
+
+macro foo() {
+ // a comment
+ fn foo() {
+ // another comment
+ bar();
+ }
+}
+
+// #2574
+macro_rules! test {
+ () => {{}}
+}
+
+macro lex_err($kind: ident $(, $body: expr)*) {
+ Err(QlError::LexError(LexError::$kind($($body,)*)))
+}
+
+// Preserve trailing comma on item-level macro with `()` or `[]`.
+methods![ get, post, delete, ];
+methods!( get, post, delete, );
+
+// #2588
+macro_rules! m {
+ () => {
+ r#"
+ test
+ "#
+ };
+}
+fn foo() {
+ f!{r#"
+ test
+ "#};
+}
+
+// #2591
+fn foo() {
+ match 0u32 {
+ 0 => (),
+ _ => unreachable!(/* obviously */),
+ }
+}
+
+fn foo() {
+ let _ = column!(/* here */);
+}
+
+// #2616
+// Preserve trailing comma when using mixed layout for macro call.
+fn foo() {
+ foo!(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1);
+ foo!(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,);
+}
+
+// #2830
+// Preserve trailing comma-less/ness inside nested macro.
+named!(
+ do_parse_gsv<GsvData>,
+ map_res!(
+ do_parse!(
+ number_of_sentences: map_res!(digit, parse_num::<u16>)
+ >> char!(',')
+ >> sentence_index: map_res!(digit, parse_num::<u16>)
+ >> char!(',')
+ >> total_number_of_sats: map_res!(digit, parse_num::<u16>)
+ >> char!(',')
+ >> sat0: opt!(complete!(parse_gsv_sat_info))
+ >> sat1: opt!(complete!(parse_gsv_sat_info))
+ >> sat2: opt!(complete!(parse_gsv_sat_info))
+ >> sat3: opt!(complete!(parse_gsv_sat_info))
+ >> (
+ number_of_sentences,
+ sentence_index,
+ total_number_of_sats,
+ sat0,
+ sat1,
+ sat2,
+ sat3
+ )
+ ),
+ construct_gsv_data
+ )
+);
+
+// #2857
+convert_args!(vec!(1, 2, 3));
+
+// #3031
+thread_local!(
+/// TLV Holds a set of JSTraceables that need to be rooted
+ static ROOTED_TRACEABLES: RefCell<RootedTraceableSet> =
+ RefCell::new(RootedTraceableSet::new()) ;
+) ;
+
+thread_local![
+ /// TLV Holds a set of JSTraceables that need to be rooted
+ static ROOTED_TRACEABLES: RefCell<RootedTraceableSet> =
+ RefCell::new(RootedTraceableSet::new()) ;
+
+ /// TLV Holds a set of JSTraceables that need to be rooted
+ static ROOTED_TRACEABLES: RefCell<RootedTraceableSet> =
+ RefCell::new(RootedTraceableSet::new(0)) ;
+
+ /// TLV Holds a set of JSTraceables that need to be rooted
+ static ROOTED_TRACEABLES: RefCell<RootedTraceableSet> =
+ RefCell::new(RootedTraceableSet::new(), xxx, yyy) ;
+
+ /// TLV Holds a set of JSTraceables that need to be rooted
+static ROOTED_TRACEABLES: RefCell<RootedTraceableSet> =
+ RefCell::new(RootedTraceableSet::new(1234)) ;
+
+] ;
+
+fn issue3004() {
+ foo!(|_| { ( ) });
+ stringify!(( foo+ ));
+}
+
+// #3331
+pub fn fold_abi<V: Fold + ?Sized>(_visitor: &mut V, _i: Abi) -> Abi {
+ Abi {
+ extern_token: Token ! [ extern ](tokens_helper(_visitor, &_i.extern_token.span)),
+ name: (_i.name).map(|it| _visitor.fold_lit_str(it)),
+ }
+}
+
+// #3463
+x ! {()}
+
+// #3746
+f!(match a {
+ 4 =>
+ &[
+ (3, false), // Missing
+ (4, true) // I-frame
+ ] [..],
+});
+
+// #3583
+foo!(|x = y|);
diff --git a/src/tools/rustfmt/tests/source/markdown-comment-with-options.rs b/src/tools/rustfmt/tests/source/markdown-comment-with-options.rs
new file mode 100644
index 000000000..2c4d6a5cc
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/markdown-comment-with-options.rs
@@ -0,0 +1,17 @@
+// rustfmt-wrap_comments: true
+
+// Preserve two trailing whitespaces in doc comment,
+// but trim any whitespaces in normal comment.
+
+//! hello world
+//! hello world
+
+/// hello world
+/// hello world
+/// hello world
+fn foo() {
+ // hello world
+ // hello world
+ let x = 3;
+ println!("x = {}", x);
+}
diff --git a/src/tools/rustfmt/tests/source/markdown-comment.rs b/src/tools/rustfmt/tests/source/markdown-comment.rs
new file mode 100644
index 000000000..1ec26562f
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/markdown-comment.rs
@@ -0,0 +1,15 @@
+// Preserve two trailing whitespaces in doc comment,
+// but trim any whitespaces in normal comment.
+
+//! hello world
+//! hello world
+
+/// hello world
+/// hello world
+/// hello world
+fn foo() {
+ // hello world
+ // hello world
+ let x = 3;
+ println!("x = {}", x);
+}
diff --git a/src/tools/rustfmt/tests/source/match-block-trailing-comma.rs b/src/tools/rustfmt/tests/source/match-block-trailing-comma.rs
new file mode 100644
index 000000000..baa05b79c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/match-block-trailing-comma.rs
@@ -0,0 +1,22 @@
+// rustfmt-match_block_trailing_comma: true
+// Match expressions, no unwrapping of block arms or wrapping of multiline
+// expressions.
+
+fn foo() {
+ match x {
+ a => {
+ "line1";
+ "line2"
+ }
+ ThisIsA::Guard if true => {
+ "line1";
+ "line2"
+ }
+ ThisIsA::ReallyLongPattern(ThatWillForce::TheGuard, ToWrapOnto::TheFollowingLine) if true => {
+ "line1";
+ "line2"
+ }
+ b => (aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb),
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/match-flattening.rs b/src/tools/rustfmt/tests/source/match-flattening.rs
new file mode 100644
index 000000000..935ece53b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/match-flattening.rs
@@ -0,0 +1,21 @@
+fn main() {
+ match option {
+ None => if condition {
+ true
+ } else {
+ false
+ },
+ }
+}
+
+fn main() {
+ match option {
+ None => {
+ if condition {
+ true
+ } else {
+ false
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/match-nowrap-trailing-comma.rs b/src/tools/rustfmt/tests/source/match-nowrap-trailing-comma.rs
new file mode 100644
index 000000000..134d2fdf9
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/match-nowrap-trailing-comma.rs
@@ -0,0 +1,15 @@
+// rustfmt-match_arm_blocks: false
+// rustfmt-match_block_trailing_comma: true
+// Match expressions, no unwrapping of block arms or wrapping of multiline
+// expressions.
+
+fn foo() {
+ match x {
+ a => {
+ "line1";
+ "line2"
+ }
+ b => (aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb),
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/match-nowrap.rs b/src/tools/rustfmt/tests/source/match-nowrap.rs
new file mode 100644
index 000000000..db22cd9f0
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/match-nowrap.rs
@@ -0,0 +1,12 @@
+// rustfmt-match_arm_blocks: false
+// Match expressions, no unwrapping of block arms or wrapping of multiline
+// expressions.
+
+fn foo() {
+ match x {
+ a => { foo() }
+ b =>
+ (aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb),
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/match.rs b/src/tools/rustfmt/tests/source/match.rs
new file mode 100644
index 000000000..b5dc9957a
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/match.rs
@@ -0,0 +1,589 @@
+// rustfmt-normalize_comments: true
+// Match expressions.
+
+fn foo() {
+ // A match expression.
+ match x {
+ // Some comment.
+ a => foo(),
+ b if 0 < 42 => foo(),
+ c => { // Another comment.
+ // Comment.
+ an_expression;
+ foo()
+ }
+ Foo(ref bar) =>
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ Pattern1 | Pattern2 | Pattern3 => false,
+ Paternnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn |
+ Paternnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn => {
+ blah
+ }
+ Patternnnnnnnnnnnnnnnnnnn |
+ Patternnnnnnnnnnnnnnnnnnn |
+ Patternnnnnnnnnnnnnnnnnnn |
+ Patternnnnnnnnnnnnnnnnnnn => meh,
+
+ Patternnnnnnnnnnnnnnnnnnn |
+ Patternnnnnnnnnnnnnnnnnnn if looooooooooooooooooong_guard => meh,
+
+ Patternnnnnnnnnnnnnnnnnnnnnnnnn |
+ Patternnnnnnnnnnnnnnnnnnnnnnnnn if looooooooooooooooooooooooooooooooooooooooong_guard =>
+ meh,
+
+ // Test that earlier patterns can take the guard space
+ (aaaa, bbbbb, ccccccc, aaaaa, bbbbbbbb, cccccc, aaaa, bbbbbbbb, cccccc, dddddd) |
+ Patternnnnnnnnnnnnnnnnnnnnnnnnn if loooooooooooooooooooooooooooooooooooooooooong_guard => {}
+
+ _ => {}
+ ast::PathParameters::AngleBracketedParameters(ref data) if data.lifetimes.len() > 0 ||
+ data.types.len() > 0 ||
+ data.bindings.len() > 0 => {}
+ }
+
+ let whatever = match something {
+ /// DOC COMMENT!
+ Some(_) => 42,
+ // Comment on an attribute.
+ #[an_attribute]
+ // Comment after an attribute.
+ None => 0,
+ #[rustfmt::skip]
+ Blurb => { }
+ };
+}
+
+// Test that a match on an overflow line is laid out properly.
+fn main() {
+ let sub_span =
+ match xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx {
+ Some(sub_span) => Some(sub_span),
+ None => sub_span,
+ };
+}
+
+// Test that one-line bodies align.
+fn main() {
+ match r {
+ Variableeeeeeeeeeeeeeeeee => ( "variable",
+ vec!("id", "name", "qualname",
+ "value", "type", "scopeid"),
+ true,
+ true),
+ Enummmmmmmmmmmmmmmmmmmmm => ("enum",
+ vec!("id","qualname","scopeid","value"),
+ true,
+ true),
+ Variantttttttttttttttttttttttt => ("variant",
+ vec!("id",
+ "name",
+ "qualname",
+ "type",
+ "value",
+ "scopeid"),
+ true,
+ true),
+ };
+
+ match x{
+ y=>{/*Block with comment. Preserve me.*/ }
+ z=>{stmt();} }
+}
+
+fn matches() {
+ match 1 {
+ -1 => 10,
+ 1 => 1, // foo
+ 2 => 2,
+ // bar
+ 3 => 3,
+ _ => 0 // baz
+ }
+}
+
+fn match_skip() {
+ let _ = match Some(1) {
+ #[rustfmt::skip]
+ Some( n ) => n,
+ None => 1,
+ };
+}
+
+fn issue339() {
+ match a {
+ b => {}
+ c => { }
+ d => {
+ }
+ e => {
+
+
+
+ }
+ // collapsing here is safe
+ ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff => {
+ }
+ // collapsing here exceeds line length
+ ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffg => {
+ }
+ h => { // comment above block
+ }
+ i => {
+ } // comment below block
+ j => {
+ // comment inside block
+ }
+ j2 => {
+ // comments inside...
+ } // ... and after
+ // TODO uncomment when vertical whitespace is handled better
+ // k => {
+ //
+ // // comment with WS above
+ // }
+ // l => {
+ // // comment with ws below
+ //
+ // }
+ m => {
+ } n => { } o =>
+ {
+
+ }
+ p => { // Don't collapse me
+ } q => { } r =>
+ {
+
+ }
+ s => 0, // s comment
+ // t comment
+ t => 1,
+ u => 2,
+ v => {
+ } /* funky block
+ * comment */
+ // final comment
+ }
+}
+
+fn issue355() {
+ match mac {
+ a => println!("a", b),
+ b => vec!(1, 2),
+ c => vec!(3; 4),
+ d => {
+ println!("a", b)
+ }
+ e => {
+ vec!(1, 2)
+ }
+ f => {
+ vec!(3; 4)
+ }
+ h => println!("a", b), // h comment
+ i => vec!(1, 2), // i comment
+ j => vec!(3; 4), // j comment
+ // k comment
+ k => println!("a", b),
+ // l comment
+ l => vec!(1, 2),
+ // m comment
+ m => vec!(3; 4),
+ // Rewrite splits macro
+ nnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn => println!("a", b),
+ // Rewrite splits macro
+ oooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo => vec!(1, 2),
+ // Macro support fails to recognise this macro as splittable
+ // We push the whole expr to a new line, TODO split this macro as well
+ pppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppp => vec!(3; 4),
+ // q, r and s: Rewrite splits match arm
+ qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq => println!("a", b),
+ rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr => vec!(1, 2),
+ ssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssss => vec!(3; 4),
+ // Funky bracketing styles
+ t => println!{"a", b},
+ u => vec!{1, 2},
+ v => vec!{3; 4},
+ w => println!["a", b],
+ x => vec![1, 2],
+ y =>vec![3; 4],
+ // Brackets with comments
+ tc => println!{"a", b}, // comment
+ uc => vec!{1, 2}, // comment
+ vc =>vec!{3; 4}, // comment
+ wc =>println!["a", b], // comment
+ xc => vec![1,2], // comment
+ yc => vec![3; 4], // comment
+ yd =>
+ looooooooooooooooooooooooooooooooooooooooooooooooooooooooong_func(aaaaaaaaaa,
+ bbbbbbbbbb,
+ cccccccccc,
+ dddddddddd),
+ }
+}
+
+fn issue280() {
+ {
+ match x {
+ CompressionMode::DiscardNewline | CompressionMode::CompressWhitespaceNewline => ch ==
+ '\n',
+ ast::ItemConst(ref typ, ref expr) => self.process_static_or_const_item(item,
+ &typ,
+ &expr),
+ }
+ }
+}
+
+fn issue383() {
+ match resolution.last_private {LastImport{..} => false, _ => true};
+}
+
+fn issue507() {
+ match 1 {
+ 1 => unsafe { std::intrinsics::abort() },
+ _ => (),
+ }
+}
+
+fn issue508() {
+ match s.type_id() {
+ Some(NodeTypeId::Element(ElementTypeId::HTMLElement(
+ HTMLElementTypeId::HTMLCanvasElement))) => true,
+ Some(NodeTypeId::Element(ElementTypeId::HTMLElement(
+ HTMLElementTypeId::HTMLObjectElement))) => s.has_object_data(),
+ Some(NodeTypeId::Element(_)) => false,
+ }
+}
+
+fn issue496() {{{{
+ match def {
+ def::DefConst(def_id) | def::DefAssociatedConst(def_id) =>
+ match const_eval::lookup_const_by_id(cx.tcx, def_id, Some(self.pat.id)) {
+ Some(const_expr) => { x }}}}}}}
+
+fn issue494() {
+ {
+ match stmt.node {
+ hir::StmtExpr(ref expr, id) | hir::StmtSemi(ref expr, id) =>
+ result.push(
+ StmtRef::Mirror(
+ Box::new(Stmt { span: stmt.span,
+ kind: StmtKind::Expr {
+ scope: cx.tcx.region_maps.node_extent(id),
+ expr: expr.to_ref() } }))),
+ }
+ }
+}
+
+fn issue386() {
+ match foo {
+ BiEq | BiLt | BiLe | BiNe | BiGt | BiGe =>
+ true,
+ BiAnd | BiOr | BiAdd | BiSub | BiMul | BiDiv | BiRem |
+ BiBitXor | BiBitAnd | BiBitOr | BiShl | BiShr =>
+ false,
+ }
+}
+
+fn guards() {
+ match foo {
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa if foooooooooooooo && barrrrrrrrrrrr => {}
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa | aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa if foooooooooooooo && barrrrrrrrrrrr => {}
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ if fooooooooooooooooooooo &&
+ (bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb || cccccccccccccccccccccccccccccccccccccccc) => {}
+ }
+}
+
+fn issue1371() {
+ Some(match type_ {
+ sfEvtClosed => Closed,
+ sfEvtResized => {
+ let e = unsafe { *event.size.as_ref() };
+
+ Resized {
+ width: e.width,
+ height: e.height,
+ }
+ }
+ sfEvtLostFocus => LostFocus,
+ sfEvtGainedFocus => GainedFocus,
+ sfEvtTextEntered => {
+ TextEntered {
+ unicode: unsafe {
+ ::std::char::from_u32((*event.text.as_ref()).unicode)
+ .expect("Invalid unicode encountered on TextEntered event")
+ },
+ }
+ }
+ sfEvtKeyPressed => {
+ let e = unsafe { event.key.as_ref() };
+
+ KeyPressed {
+ code: unsafe { ::std::mem::transmute(e.code) },
+ alt: e.alt.to_bool(),
+ ctrl: e.control.to_bool(),
+ shift: e.shift.to_bool(),
+ system: e.system.to_bool(),
+ }
+ }
+ sfEvtKeyReleased => {
+ let e = unsafe { event.key.as_ref() };
+
+ KeyReleased {
+ code: unsafe { ::std::mem::transmute(e.code) },
+ alt: e.alt.to_bool(),
+ ctrl: e.control.to_bool(),
+ shift: e.shift.to_bool(),
+ system: e.system.to_bool(),
+ }
+ }
+ })
+}
+
+fn issue1395() {
+ let bar = Some(true);
+ let foo = Some(true);
+ let mut x = false;
+ bar.and_then(|_| {
+ match foo {
+ None => None,
+ Some(b) => {
+ x = true;
+ Some(b)
+ }
+ }
+ });
+}
+
+fn issue1456() {
+ Ok(Recording {
+ artists: match reader.evaluate(".//mb:recording/mb:artist-credit/mb:name-credit")? {
+ Nodeset(nodeset) => {
+ let res: Result<Vec<ArtistRef>, ReadError> = nodeset
+ .iter()
+ .map(|node| {
+ XPathNodeReader::new(node, &context).and_then(|r| ArtistRef::from_xml(&r))
+ })
+ .collect();
+ res?
+ }
+ _ => Vec::new(),
+ },
+ })
+}
+
+fn issue1460() {
+ let _ = match foo {
+ REORDER_BUFFER_CHANGE_INTERNAL_SPEC_INSERT => "internal_spec_insert_internal_spec_insert_internal_spec_insert",
+ _ => "reorder_something",
+ };
+}
+
+fn issue525() {
+ foobar(f, "{}", match *self {
+ TaskState::Started => "started",
+ TaskState::Success => "success",
+ TaskState::Failed => "failed",
+ });
+}
+
+// #1838, #1839
+fn match_with_near_max_width() {
+ let (this_line_uses_99_characters_and_is_formatted_properly, x012345) = match some_expression {
+ _ => unimplemented!(),
+ };
+
+ let (should_be_formatted_like_the_line_above_using_100_characters, x0) = match some_expression {
+ _ => unimplemented!(),
+ };
+
+ let (should_put_the_brace_on_the_next_line_using_101_characters, x0000) = match some_expression
+ {
+ _ => unimplemented!(),
+ };
+ match m {
+ Variant::Tag | Variant::Tag2 | Variant::Tag3 | Variant::Tag4 | Variant::Tag5 | Variant::Tag6 =>
+ {}
+ }
+}
+
+fn match_with_trailing_spaces() {
+ match x {
+
+ Some(..) => 0,
+ None => 1,
+ }
+}
+
+fn issue_2099() {
+ let a = match x {
+};
+ let b = match x {
+
+ };
+
+ match x {}
+}
+
+// #2021
+impl<'tcx> Const<'tcx> {
+ pub fn from_constval<'a>() -> Const<'tcx> {
+ let val = match *cv {
+ ConstVal::Variant(_) | ConstVal::Aggregate(..) | ConstVal::Unevaluated(..) => bug!("MIR must not use `{:?}` (aggregates are expanded to MIR rvalues)", cv),
+ };
+ }
+}
+
+// #2151
+fn issue_2151() {
+ match either {
+ x => {
+
+ }y => ()
+ }
+}
+
+// #2152
+fn issue_2152() {
+ match m {
+ "aaaaaaaaaaaaa" | "bbbbbbbbbbbbb" | "cccccccccccccccccccccccccccccccccccccccccccc" if true => {}
+ "bind" | "writev" | "readv" | "sendmsg" | "recvmsg" if android && (aarch64 || x86_64) => true,
+ }
+}
+
+// #2376
+// Preserve block around expressions with condition.
+fn issue_2376() {
+ let mut x = None;
+ match x {
+ Some(0) => {
+ for i in 1..11 {
+ x = Some(i);
+ }
+ }
+ Some(ref mut y) => {
+ while *y < 10 {
+ *y += 1;
+ }
+ }
+ None => {
+ while let None = x {
+ x = Some(10);
+ }
+ }
+ }
+}
+
+// #2621
+// Strip leading `|` in match arm patterns
+fn issue_2621() {
+ let x = Foo::A;
+ match x {
+ Foo::A => println!("No vert single condition"),
+ Foo::B | Foo::C => println!("Center vert two conditions"),
+ | Foo::D => println!("Preceding vert single condition"),
+ | Foo::E
+ | Foo::F => println!("Preceding vert over two lines"),
+ Foo::G |
+ Foo::H => println!("Trailing vert over two lines"),
+ // Comment on its own line
+ | Foo::I => println!("With comment"), // Comment after line
+ }
+}
+
+fn issue_2377() {
+ match tok {
+ Tok::Not
+ | Tok::BNot
+ | Tok::Plus
+ | Tok::Minus
+ | Tok::PlusPlus
+ | Tok::MinusMinus
+ | Tok::Void
+ | Tok::Delete if prec <= 16 => {
+ // code here...
+ }
+ Tok::TypeOf if prec <= 16 => {}
+ }
+}
+
+// #3040
+fn issue_3040() {
+ {
+ match foo {
+ DevtoolScriptControlMsg::WantsLiveNotifications(id, to_send) => {
+ match documents.find_window(id) {
+ Some(window) => devtools::handle_wants_live_notifications(window.upcast(), to_send),
+ None => return warn!("Message sent to closed pipeline {}.", id),
+ }
+ }
+ }
+ }
+}
+
+// #3030
+fn issue_3030() {
+ match input.trim().parse::<f64>() {
+ Ok(val)
+ if !(
+ // A valid number is the same as what rust considers to be valid,
+ // except for +1., NaN, and Infinity.
+ val.is_infinite() || val
+ .is_nan() || input.ends_with(".") || input.starts_with("+")
+ )
+ => {
+ }
+ }
+}
+
+fn issue_3005() {
+ match *token {
+ Token::Dimension {
+ value, ref unit, ..
+ } if num_context.is_ok(context.parsing_mode, value) =>
+ {
+ return NoCalcLength::parse_dimension(context, value, unit)
+ .map(LengthOrPercentage::Length)
+ .map_err(|()| location.new_unexpected_token_error(token.clone()));
+ },
+ }
+}
+
+// #3774
+fn issue_3774() {
+ {
+ {
+ {
+ match foo {
+ Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachab(),
+ Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreacha!(),
+ Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachabl(),
+ Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachae!(),
+ Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachable(),
+ Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachable!(),
+ Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => rrunreachable!(),
+ }
+ }
+ }
+ }
+}
+
+// #4109
+fn issue_4109() {
+ match () {
+ _ => {
+#[cfg(debug_assertions)]
+{
+println!("Foo");
+}
+}
+}
+
+match () {
+_ => {
+#[allow(unsafe_code)]
+unsafe {}
+}
+}
+}
diff --git a/src/tools/rustfmt/tests/source/match_overflow_expr.rs b/src/tools/rustfmt/tests/source/match_overflow_expr.rs
new file mode 100644
index 000000000..91275a894
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/match_overflow_expr.rs
@@ -0,0 +1,53 @@
+// rustfmt-overflow_delimited_expr: true
+
+fn main() {
+ println!(
+ "Foobar: {}",
+ match "input" {
+ "a" => "",
+ "b" => "",
+ "c" => "",
+ "d" => "",
+ "e" => "",
+ "f" => "",
+ "g" => "",
+ "h" => "",
+ "i" => "",
+ "j" => "",
+ "k" => "",
+ "l" => "",
+ "m" => "",
+ "n" => "",
+ "o" => "",
+ "p" => "",
+ "q" => "",
+ "r" => "Rust",
+ }
+ );
+}
+
+fn main() {
+ println!(
+ "Very Long Input String Which Makes It Impossible To Fit On The Same Line: {}",
+ match "input" {
+ "a" => "",
+ "b" => "",
+ "c" => "",
+ "d" => "",
+ "e" => "",
+ "f" => "",
+ "g" => "",
+ "h" => "",
+ "i" => "",
+ "j" => "",
+ "k" => "",
+ "l" => "",
+ "m" => "",
+ "n" => "",
+ "o" => "",
+ "p" => "",
+ "q" => "",
+ "r" => "Rust",
+ }
+ );
+}
diff --git a/src/tools/rustfmt/tests/source/max-line-length-in-chars.rs b/src/tools/rustfmt/tests/source/max-line-length-in-chars.rs
new file mode 100644
index 000000000..d49fbb7e3
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/max-line-length-in-chars.rs
@@ -0,0 +1,4 @@
+// rustfmt-max_width: 25
+
+// абвгдеёжзийклмнопрст
+fn main() {}
diff --git a/src/tools/rustfmt/tests/source/merge_imports_true_compat.rs b/src/tools/rustfmt/tests/source/merge_imports_true_compat.rs
new file mode 100644
index 000000000..bcea94351
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/merge_imports_true_compat.rs
@@ -0,0 +1,4 @@
+// rustfmt-merge_imports: true
+
+use a::b;
+use a::c; \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/mod-1.rs b/src/tools/rustfmt/tests/source/mod-1.rs
new file mode 100644
index 000000000..427a355b6
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/mod-1.rs
@@ -0,0 +1,29 @@
+// Deeply indented modules.
+
+ mod foo { mod bar { mod baz {} } }
+
+mod foo {
+ mod bar {
+ mod baz {
+ fn foo() { bar() }
+ }
+ }
+
+ mod qux {
+
+ }
+}
+
+mod boxed { pub use std::boxed::{Box, HEAP}; }
+
+pub mod x {
+ pub fn freopen(filename: *const c_char,
+ mode: *const c_char,
+ mode2: *const c_char,
+ mode3: *const c_char,
+ file: *mut FILE)
+ -> *mut FILE{}
+}
+
+ mod y { // sup boooooiiii
+ }
diff --git a/src/tools/rustfmt/tests/source/mod-2.rs b/src/tools/rustfmt/tests/source/mod-2.rs
new file mode 100644
index 000000000..7202e0020
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/mod-2.rs
@@ -0,0 +1,4 @@
+// Some nested mods
+
+#[cfg(test)] mod nestedmod ;
+pub mod no_new_line_beginning;
diff --git a/src/tools/rustfmt/tests/source/mod_skip_child.rs b/src/tools/rustfmt/tests/source/mod_skip_child.rs
new file mode 100644
index 000000000..d48c4a37e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/mod_skip_child.rs
@@ -0,0 +1,2 @@
+// rustfmt-skip_children: true
+mod nested_skipped;
diff --git a/src/tools/rustfmt/tests/source/multiple.rs b/src/tools/rustfmt/tests/source/multiple.rs
new file mode 100644
index 000000000..f89f4f68d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/multiple.rs
@@ -0,0 +1,134 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+// rustfmt-format_strings: true
+// Test of lots of random stuff.
+// FIXME split this into multiple, self-contained tests.
+
+
+#[attr1] extern crate foo;
+#[attr2] #[attr3] extern crate foo;
+#[attr1]extern crate foo;
+#[attr2]#[attr3]extern crate foo;
+
+use std::cell::*;
+use std::{any, ascii, self, borrow, boxed, char, borrow, boxed, char, borrow, borrow, boxed, char, borrow, boxed, char, borrow, boxed, char, borrow, boxed, char, borrow, boxed, char, borrow, boxed, char, borrow, boxed, char, borrow, boxed, char};
+
+mod doc;
+mod other;
+
+
+// sfdgfffffffffffffffffffffffffffffffffffffffffffffffffffffff ffffffffffffffffffffffffffffffffffffffffff
+
+ fn foo(a: isize,
+ b: u32, /* blah blah */
+ c: f64) {
+
+}
+
+fn foo()->Box<Write+'static> where 'a: 'b, for<'a> D<'b>: 'a {
+ hello!()
+}
+
+fn baz<'a: 'b /* comment on 'a */, T: SomsssssssssssssssssssssssssssssssssssssssssssssssssssssseType /* comment on T */>(a: A, b: B /* comment on b */, c: C) -> Bob {
+ #[attr1] extern crate foo;
+ #[attr2] #[attr3] extern crate foo;
+ #[attr1]extern crate foo;
+ #[attr2]#[attr3]extern crate foo;
+}
+
+#[rustfmt::skip]
+fn qux(a: dadsfa, // Comment 1
+ b: sdfasdfa, // Comment 2
+ c: dsfdsafa) // Comment 3
+{
+
+}
+
+/// Blah blah blah.
+impl Bar {
+ fn foo(&mut self, a: sdfsdfcccccccccccccccccccccccccccccccccccccccccccccccccc, // comment on a
+ b: sdfasdfsdfasfs /*closing comment*/ ) -> isize {}
+
+ /// Blah blah blah.
+ pub fn f2(self) {
+ (foo, bar)
+ }
+
+ #[an_attribute]
+ fn f3(self) -> Dog {
+ }
+}
+
+/// The `nodes` and `edges` method each return instantiations of
+/// `Cow<[T]>` to leave implementers the freedom to create
+
+/// entirely new vectors or to pass back slices into internally owned
+/// vectors.
+pub trait GraphWalk<'a, N, E> {
+ /// Returns all the nodes in this graph.
+ fn nodes(&'a self) -> Nodes<'a, N>;
+ /// Returns all of the edges in this graph.
+ fn edges(&'a self) -> Edges<'a, E>;
+ /// The source node for `edge`.
+ fn source(&'a self, edge: &E) -> N;
+ /// The target node for `edge`.
+ fn target(&'a self, edge: &E) -> N;
+}
+
+/// A Doc comment
+#[AnAttribute]
+pub struct Foo {
+ #[rustfmt::skip]
+ f : SomeType, // Comment beside a field
+ f : SomeType, // Comment beside a field
+ // Comment on a field
+ g: SomeOtherType,
+ /// A doc comment on a field
+ h: AThirdType,}
+
+struct Bar;
+
+// With a where-clause and generics.
+pub struct Foo<'a, Y: Baz>
+ where X: Whatever
+{
+ f: SomeType, // Comment beside a field
+}
+
+fn foo(ann: &'a (PpAnn+'a)) {}
+
+fn main() {
+ for i in 0i32..4 {
+ println!("{}", i);
+ }
+
+
+ while true {
+ hello();
+ }
+
+ let rc = Cell::new(42usize,42usize, Cell::new(42usize, remaining_widthremaining_widthremaining_widthremaining_width), 42usize);
+ let rc = RefCell::new(42usize,remaining_width, remaining_width); // a comment
+ let x = "Hello!!!!!!!!! abcd abcd abcd abcd abcd abcd\n abcd abcd abcd abcd abcd abcd abcd abcd abcd \
+ abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd \
+ abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd";
+ let s = expand(a
+ ,
+ b); }
+
+fn deconstruct() -> (SocketAddr, Method, Headers,
+ RequestUri, HttpVersion,
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA) {
+}
+
+fn deconstruct(foo: Bar) -> (SocketAddr, Method, Headers,
+ RequestUri, HttpVersion,
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA) {
+}
+
+#[rustfmt::skip]
+mod a{
+fn foo(x: T) {
+ let x: T = dfasdf;
+}
+}
diff --git a/src/tools/rustfmt/tests/source/negative-impl.rs b/src/tools/rustfmt/tests/source/negative-impl.rs
new file mode 100644
index 000000000..da242d4f3
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/negative-impl.rs
@@ -0,0 +1,7 @@
+impl ! Display for JoinHandle { }
+
+impl ! Box < JoinHandle > { }
+
+impl ! std :: fmt :: Display for JoinHandle < T : std :: future :: Future + std :: marker :: Send + std :: marker :: Sync > { }
+
+impl ! JoinHandle < T : std :: future :: Future < Output > + std :: marker :: Send + std :: marker :: Sync + 'static > + 'static { }
diff --git a/src/tools/rustfmt/tests/source/nested-if-else.rs b/src/tools/rustfmt/tests/source/nested-if-else.rs
new file mode 100644
index 000000000..9a54789dd
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/nested-if-else.rs
@@ -0,0 +1,11 @@
+fn issue1518() {
+ Some(Object {
+ field: if a {
+ a_thing
+ } else if b {
+ b_thing
+ } else {
+ c_thing
+ },
+ })
+}
diff --git a/src/tools/rustfmt/tests/source/nested_skipped/mod.rs b/src/tools/rustfmt/tests/source/nested_skipped/mod.rs
new file mode 100644
index 000000000..44b25ca87
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/nested_skipped/mod.rs
@@ -0,0 +1,3 @@
+fn ugly() {
+92;
+}
diff --git a/src/tools/rustfmt/tests/source/nestedmod/mod.rs b/src/tools/rustfmt/tests/source/nestedmod/mod.rs
new file mode 100644
index 000000000..d04e49570
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/nestedmod/mod.rs
@@ -0,0 +1,13 @@
+
+mod mod2a;
+mod mod2b;
+
+mod mymod1 {
+ use mod2a::{Foo,Bar};
+mod mod3a;
+}
+
+#[path="mod2c.rs"]
+mod mymod2;
+
+mod submod2;
diff --git a/src/tools/rustfmt/tests/source/nestedmod/mod2a.rs b/src/tools/rustfmt/tests/source/nestedmod/mod2a.rs
new file mode 100644
index 000000000..5df457a83
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/nestedmod/mod2a.rs
@@ -0,0 +1,4 @@
+// This is an empty file containing only
+// comments
+
+// ...................
diff --git a/src/tools/rustfmt/tests/source/nestedmod/mod2b.rs b/src/tools/rustfmt/tests/source/nestedmod/mod2b.rs
new file mode 100644
index 000000000..f128e2da6
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/nestedmod/mod2b.rs
@@ -0,0 +1,3 @@
+
+#[path="mod2a.rs"]
+mod c;
diff --git a/src/tools/rustfmt/tests/source/nestedmod/mod2c.rs b/src/tools/rustfmt/tests/source/nestedmod/mod2c.rs
new file mode 100644
index 000000000..eda6b233e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/nestedmod/mod2c.rs
@@ -0,0 +1,3 @@
+// A standard mod
+
+fn a( ) {}
diff --git a/src/tools/rustfmt/tests/source/nestedmod/mymod1/mod3a.rs b/src/tools/rustfmt/tests/source/nestedmod/mymod1/mod3a.rs
new file mode 100644
index 000000000..f28bde5e5
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/nestedmod/mymod1/mod3a.rs
@@ -0,0 +1,2 @@
+// Another mod
+fn a( ) { }
diff --git a/src/tools/rustfmt/tests/source/nestedmod/submod2/a.rs b/src/tools/rustfmt/tests/source/nestedmod/submod2/a.rs
new file mode 100644
index 000000000..0eaf08f0d
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/nestedmod/submod2/a.rs
@@ -0,0 +1,6 @@
+// Yet Another mod
+// Nested
+
+use c::a;
+
+fn foo( ) { }
diff --git a/src/tools/rustfmt/tests/source/nestedmod/submod2/mod.rs b/src/tools/rustfmt/tests/source/nestedmod/submod2/mod.rs
new file mode 100644
index 000000000..52f8be910
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/nestedmod/submod2/mod.rs
@@ -0,0 +1,5 @@
+// Another mod
+
+mod a;
+
+use a::a;
diff --git a/src/tools/rustfmt/tests/source/no_arg_with_commnet.rs b/src/tools/rustfmt/tests/source/no_arg_with_commnet.rs
new file mode 100644
index 000000000..ea4ee0f1e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/no_arg_with_commnet.rs
@@ -0,0 +1,2 @@
+fn foo( /* cooment */
+) {}
diff --git a/src/tools/rustfmt/tests/source/no_new_line_beginning.rs b/src/tools/rustfmt/tests/source/no_new_line_beginning.rs
new file mode 100644
index 000000000..f79c691f0
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/no_new_line_beginning.rs
@@ -0,0 +1,2 @@
+fn main() {
+}
diff --git a/src/tools/rustfmt/tests/source/normalize_doc_attributes_should_not_imply_format_doc_comments.rs b/src/tools/rustfmt/tests/source/normalize_doc_attributes_should_not_imply_format_doc_comments.rs
new file mode 100644
index 000000000..a97705bfb
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/normalize_doc_attributes_should_not_imply_format_doc_comments.rs
@@ -0,0 +1,15 @@
+// rustfmt-normalize_doc_attributes: true
+
+/// Foo
+///
+/// # Example
+/// ```
+/// # #![cfg_attr(not(dox), feature(cfg_target_feature, target_feature, stdsimd))]
+/// # #![cfg_attr(not(dox), no_std)]
+/// fn foo() { }
+/// ```
+///
+fn foo() {}
+
+#[doc = "Bar documents"]
+fn bar() {}
diff --git a/src/tools/rustfmt/tests/source/normalize_multiline_doc_attribute.rs b/src/tools/rustfmt/tests/source/normalize_multiline_doc_attribute.rs
new file mode 100644
index 000000000..3564e3e7a
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/normalize_multiline_doc_attribute.rs
@@ -0,0 +1,12 @@
+// rustfmt-unstable: true
+// rustfmt-normalize_doc_attributes: true
+
+#[doc = "This comment
+is split
+on multiple lines"]
+fn foo() {}
+
+#[doc = " B1"]
+#[doc = ""]
+#[doc = " A1"]
+fn bar() {}
diff --git a/src/tools/rustfmt/tests/source/one_line_if_v1.rs b/src/tools/rustfmt/tests/source/one_line_if_v1.rs
new file mode 100644
index 000000000..d3dcbe678
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/one_line_if_v1.rs
@@ -0,0 +1,42 @@
+// rustfmt-version: One
+
+fn plain_if(x: bool) -> u8 {
+ if x {
+ 0
+ } else {
+ 1
+ }
+}
+
+fn paren_if(x: bool) -> u8 {
+ (if x { 0 } else { 1 })
+}
+
+fn let_if(x: bool) -> u8 {
+ let x = if x {
+ foo()
+ } else {
+ bar()
+ };
+ x
+}
+
+fn return_if(x: bool) -> u8 {
+ return if x {
+ 0
+ } else {
+ 1
+ };
+}
+
+fn multi_if() {
+ use std::io;
+ if x { foo() } else { bar() }
+ if x { foo() } else { bar() }
+}
+
+fn middle_if() {
+ use std::io;
+ if x { foo() } else { bar() }
+ let x = 1;
+}
diff --git a/src/tools/rustfmt/tests/source/one_line_if_v2.rs b/src/tools/rustfmt/tests/source/one_line_if_v2.rs
new file mode 100644
index 000000000..40c834959
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/one_line_if_v2.rs
@@ -0,0 +1,42 @@
+// rustfmt-version: Two
+
+fn plain_if(x: bool) -> u8 {
+ if x {
+ 0
+ } else {
+ 1
+ }
+}
+
+fn paren_if(x: bool) -> u8 {
+ (if x { 0 } else { 1 })
+}
+
+fn let_if(x: bool) -> u8 {
+ let x = if x {
+ foo()
+ } else {
+ bar()
+ };
+ x
+}
+
+fn return_if(x: bool) -> u8 {
+ return if x {
+ 0
+ } else {
+ 1
+ };
+}
+
+fn multi_if() {
+ use std::io;
+ if x { foo() } else { bar() }
+ if x { foo() } else { bar() }
+}
+
+fn middle_if() {
+ use std::io;
+ if x { foo() } else { bar() }
+ let x = 1;
+}
diff --git a/src/tools/rustfmt/tests/source/other.rs b/src/tools/rustfmt/tests/source/other.rs
new file mode 100644
index 000000000..dfce84fcd
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/other.rs
@@ -0,0 +1,5 @@
+// Part of multiple.rs
+
+fn bob() {
+ println!("hello other!");
+}
diff --git a/src/tools/rustfmt/tests/source/paren.rs b/src/tools/rustfmt/tests/source/paren.rs
new file mode 100644
index 000000000..04e5ab7a5
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/paren.rs
@@ -0,0 +1,6 @@
+fn main() {
+ let x = (((1)));
+ let y = (/* comment */((2)));
+ let z = (((3)/* comment */));
+ let a = (((4/* comment */)));
+}
diff --git a/src/tools/rustfmt/tests/source/path_clarity/foo.rs b/src/tools/rustfmt/tests/source/path_clarity/foo.rs
new file mode 100644
index 000000000..cd247fabf
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/path_clarity/foo.rs
@@ -0,0 +1,2 @@
+// rustfmt-edition: 2018
+mod bar;
diff --git a/src/tools/rustfmt/tests/source/path_clarity/foo/bar.rs b/src/tools/rustfmt/tests/source/path_clarity/foo/bar.rs
new file mode 100644
index 000000000..8c1be504c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/path_clarity/foo/bar.rs
@@ -0,0 +1,3 @@
+pub fn fn_in_bar( ) {
+ println!( "foo/bar.rs" );
+}
diff --git a/src/tools/rustfmt/tests/source/paths.rs b/src/tools/rustfmt/tests/source/paths.rs
new file mode 100644
index 000000000..ebc26f146
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/paths.rs
@@ -0,0 +1,25 @@
+// rustfmt-normalize_comments: true
+
+fn main() {
+ let constellation_chan = Constellation::<layout::layout_task::LayoutTask, script::script_task::ScriptTask> ::start(
+ compositor_proxy,
+ resource_task,
+ image_cache_task,font_cache_task,
+ time_profiler_chan,
+ mem_profiler_chan,
+ devtools_chan,
+ storage_task,
+ supports_clipboard
+ );
+
+ Quux::<ParamOne, // Comment 1
+ ParamTwo, // Comment 2
+ >::some_func();
+
+ < *mut JSObject >:: relocate(entry);
+
+ let x: Foo<A >;
+ let x: Foo/*::*/<A>;
+}
+
+fn op(foo: Bar, key : &[u8], upd : Fn(Option<&memcache::Item> , Baz ) -> Result) -> MapResult {}
diff --git a/src/tools/rustfmt/tests/source/pattern-condense-wildcards.rs b/src/tools/rustfmt/tests/source/pattern-condense-wildcards.rs
new file mode 100644
index 000000000..69c3fa3cb
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/pattern-condense-wildcards.rs
@@ -0,0 +1,12 @@
+// rustfmt-normalize_comments: true
+// rustfmt-condense_wildcard_suffixes: true
+
+fn main() {
+ match x {
+ Butt (_,_) => "hah",
+ Tup (_) => "nah",
+ Quad (_,_, x,_) => " also no rewrite",
+ Quad (x, _, _, _) => "condense me pls",
+ Weird (x, _, _, /* don't condense before */ _, _, _) => "pls work",
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/pattern.rs b/src/tools/rustfmt/tests/source/pattern.rs
new file mode 100644
index 000000000..f06d03cad
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/pattern.rs
@@ -0,0 +1,90 @@
+// rustfmt-normalize_comments: true
+#![feature(exclusive_range_pattern)]
+use core::u8::MAX;
+
+fn main() {
+ let z = match x {
+ "pat1" => 1,
+ ( ref x, ref mut y /*comment*/) => 2,
+ };
+
+ if let < T as Trait > :: CONST = ident {
+ do_smth();
+ }
+
+ let Some ( ref xyz /* comment! */) = opt;
+
+ if let None = opt2 { panic!("oh noes"); }
+
+ let foo@bar (f) = 42;
+ let a::foo ( ..) = 42;
+ let [ ] = 42;
+ let [a, b,c ] = 42;
+ let [ a,b,c ] = 42;
+ let [a, b, c, d,e,f, g] = 42;
+ let foo { } = 42;
+ let foo {..} = 42;
+ let foo { x, y: ref foo, .. } = 42;
+ let foo { x, yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy: ref foo, .. } = 42;
+ let foo { x, yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy: ref foo, } = 42;
+ let foo { x, yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy: ref foo, .. };
+ let foo { x, yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy: ref foo, };
+
+ match b"12" {
+ [0,
+ 1..MAX
+ ] => {}
+ _ => {}
+ }
+}
+
+impl<'a,'b> ResolveGeneratedContentFragmentMutator<'a,'b> {
+ fn mutate_fragment(&mut self, fragment: &mut Fragment) {
+ match **info {
+ GeneratedContentInfo::ContentItem(
+ ContentItem::Counter(
+ ref counter_name,
+ counter_style
+ )
+ ) => {}}}
+}
+
+fn issue_1319() {
+ if let (Event { .. }, .. ) = ev_state {}
+}
+
+fn issue_1874() {
+ if let Some(()) = x {
+y
+ }
+}
+
+fn combine_patterns() {
+ let x = match y {
+ Some(
+ Some(
+ Foo {
+ z: Bar(..),
+ a: Bar(..),
+ b: Bar(..),
+ },
+ ),
+ ) => z,
+ _ => return,
+ };
+}
+
+fn slice_patterns() {
+ match b"123" {
+ [0, ..] => {}
+ [0, foo] => {}
+ _ => {}
+ }
+}
+
+fn issue3728() {
+ let foo = |
+ (c,)
+ | c;
+ foo((1,));
+}
diff --git a/src/tools/rustfmt/tests/source/preserves_carriage_return_for_unix.rs b/src/tools/rustfmt/tests/source/preserves_carriage_return_for_unix.rs
new file mode 100644
index 000000000..e5e0b2865
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/preserves_carriage_return_for_unix.rs
@@ -0,0 +1,2 @@
+// rustfmt-newline_style: Unix
+// Foo Bar
diff --git a/src/tools/rustfmt/tests/source/preserves_carriage_return_for_windows.rs b/src/tools/rustfmt/tests/source/preserves_carriage_return_for_windows.rs
new file mode 100644
index 000000000..1085360ee
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/preserves_carriage_return_for_windows.rs
@@ -0,0 +1,2 @@
+// rustfmt-newline_style: Windows
+// Foo Bar
diff --git a/src/tools/rustfmt/tests/source/pub-restricted.rs b/src/tools/rustfmt/tests/source/pub-restricted.rs
new file mode 100644
index 000000000..5683acbf3
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/pub-restricted.rs
@@ -0,0 +1,51 @@
+pub( super ) enum WriteState<D> {
+ WriteId {
+ id: U64Writer,
+ size: U64Writer,
+ payload: Option<Writer<D>>,
+ },
+ WriteSize {
+ size: U64Writer,
+ payload: Option<Writer<D>>,
+ },
+ WriteData(Writer<D>),
+}
+
+pub( crate ) enum WriteState<D> {
+ WriteId {
+ id: U64Writer,
+ size: U64Writer,
+ payload: Option<Writer<D>>,
+ },
+ WriteSize {
+ size: U64Writer,
+ payload: Option<Writer<D>>,
+ },
+ WriteData(Writer<D>),
+}
+
+pub(in ::global:: path :: to::some_mod ) enum WriteState<D> {
+ WriteId {
+ id: U64Writer,
+ size: U64Writer,
+ payload: Option<Writer<D>>,
+ },
+ WriteSize {
+ size: U64Writer,
+ payload: Option<Writer<D>>,
+ },
+ WriteData(Writer<D>),
+}
+
+pub( in local:: path :: to::some_mod ) enum WriteState<D> {
+ WriteId {
+ id: U64Writer,
+ size: U64Writer,
+ payload: Option<Writer<D>>,
+ },
+ WriteSize {
+ size: U64Writer,
+ payload: Option<Writer<D>>,
+ },
+ WriteData(Writer<D>),
+}
diff --git a/src/tools/rustfmt/tests/source/remove_blank_lines.rs b/src/tools/rustfmt/tests/source/remove_blank_lines.rs
new file mode 100644
index 000000000..43733ce76
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/remove_blank_lines.rs
@@ -0,0 +1,44 @@
+fn main() {
+
+
+
+
+ let x = 1;
+
+
+ let y = 2;
+
+
+ println!("x + y = {}", x + y);
+
+
+
+}
+
+
+fn foo() {
+
+ #![attribute]
+
+ let x = 1;
+
+ // comment
+
+
+}
+// comment after item
+
+
+// comment before item
+fn bar() {
+ let x = 1;
+ // comment after statement
+
+
+ // comment before statement
+ let y = 2;
+ let z = 3;
+
+
+ println!("x + y + z = {}", x + y + z);
+}
diff --git a/src/tools/rustfmt/tests/source/reorder-impl-items.rs b/src/tools/rustfmt/tests/source/reorder-impl-items.rs
new file mode 100644
index 000000000..16efff55b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/reorder-impl-items.rs
@@ -0,0 +1,15 @@
+// rustfmt-reorder_impl_items: true
+
+// The ordering of the following impl items should be idempotent.
+impl<'a> Command<'a> {
+ pub fn send_to(&self, w: &mut io::Write) -> io::Result<()> {
+ match self {
+ &Command::Data(ref c) => c.send_to(w),
+ &Command::Vrfy(ref c) => c.send_to(w),
+ }
+ }
+
+ pub fn parse(arg: &[u8]) -> Result<Command, ParseError> {
+ nom_to_result(command(arg))
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/single-line-if-else.rs b/src/tools/rustfmt/tests/source/single-line-if-else.rs
new file mode 100644
index 000000000..bcde390d1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/single-line-if-else.rs
@@ -0,0 +1,49 @@
+
+// Format if-else expressions on a single line, when possible.
+
+fn main() {
+ let a = if 1 > 2 {
+ unreachable!()
+ } else {
+ 10
+ };
+
+ let a = if x { 1 } else if y { 2 } else { 3 };
+
+ let b = if cond() {
+ 5
+ } else {
+ // Brief comment.
+ 10
+ };
+
+ let c = if cond() {
+ statement();
+
+ 5
+ } else {
+ 10
+ };
+
+ let d = if let Some(val) = turbo
+ { "cool" } else {
+ "beans" };
+
+ if cond() { statement(); } else { other_statement(); }
+
+ if true {
+ do_something()
+ }
+
+ let x = if veeeeeeeeery_loooooong_condition() { aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa } else { bbbbbbbbbb };
+
+ let x = if veeeeeeeeery_loooooong_condition() { aaaaaaaaaaaaaaaaaaaaaaaaa } else {
+ bbbbbbbbbb };
+
+ funk(if test() {
+ 1
+ } else {
+ 2
+ },
+ arg2);
+}
diff --git a/src/tools/rustfmt/tests/source/single-line-macro/v1.rs b/src/tools/rustfmt/tests/source/single-line-macro/v1.rs
new file mode 100644
index 000000000..a3aa631ed
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/single-line-macro/v1.rs
@@ -0,0 +1,10 @@
+// rustfmt-version: One
+
+// #2652
+// Preserve trailing comma inside macro, even if it looks an array.
+macro_rules! bar {
+ ($m:ident) => {
+ $m!([a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v, w, x, y, z,]);
+ $m!([a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v, w, x, y, z]);
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/single-line-macro/v2.rs b/src/tools/rustfmt/tests/source/single-line-macro/v2.rs
new file mode 100644
index 000000000..51a665f75
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/single-line-macro/v2.rs
@@ -0,0 +1,10 @@
+// rustfmt-version: Two
+
+// #2652
+// Preserve trailing comma inside macro, even if it looks an array.
+macro_rules! bar {
+ ($m:ident) => {
+ $m!([a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v, w, x, y, z,]);
+ $m!([a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v, w, x, y, z]);
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/soft-wrapping.rs b/src/tools/rustfmt/tests/source/soft-wrapping.rs
new file mode 100644
index 000000000..b0682d4db
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/soft-wrapping.rs
@@ -0,0 +1,15 @@
+// rustfmt-wrap_comments: true
+// rustfmt-max_width: 80
+// Soft wrapping for comments.
+
+// #535, soft wrapping for comments
+// Compare the lowest `f32` of both inputs for greater than or equal. The
+// lowest 32 bits of the result will be `0xffffffff` if `a.extract(0)` is
+// ggreater than or equal `b.extract(0)`, or `0` otherwise. The upper 96 bits off
+// the result are the upper 96 bits of `a`.
+
+/// Compares the lowest `f32` of both inputs for greater than or equal. The
+/// lowest 32 bits of the result will be `0xffffffff` if `a.extract(0)` is
+/// greater than or equal `b.extract(0)`, or `0` otherwise. The upper 96 bits off
+/// the result are the upper 96 bits of `a`.
+fn foo() {}
diff --git a/src/tools/rustfmt/tests/source/space-not-before-newline.rs b/src/tools/rustfmt/tests/source/space-not-before-newline.rs
new file mode 100644
index 000000000..2a1e18569
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/space-not-before-newline.rs
@@ -0,0 +1,8 @@
+struct Foo {
+ a: (),
+ // spaces ^^^ to be removed
+}
+enum Foo {
+ Bar,
+ // spaces ^^^ to be removed
+}
diff --git a/src/tools/rustfmt/tests/source/spaces-around-ranges.rs b/src/tools/rustfmt/tests/source/spaces-around-ranges.rs
new file mode 100644
index 000000000..1936b5e16
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/spaces-around-ranges.rs
@@ -0,0 +1,15 @@
+// rustfmt-spaces_around_ranges: true
+
+fn bar(v: &[u8]) {}
+
+fn foo() {
+ let a = vec![0; 20];
+ for j in 0..=20 {
+ for i in 0..3 {
+ bar(a[i..j]);
+ bar(a[i..]);
+ bar(a[..j]);
+ bar(a[..=(j + 1)]);
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/statements.rs b/src/tools/rustfmt/tests/source/statements.rs
new file mode 100644
index 000000000..c840b8ce1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/statements.rs
@@ -0,0 +1,43 @@
+// FIXME(calebcartwright) - Hopefully one day we can
+// elide these redundant semis like we do in other contexts.
+fn redundant_item_semis() {
+ impl Foo {
+ fn get(&self) -> usize {
+ 5
+ }
+ };
+
+ impl Bar {
+ fn get(&self) -> usize {
+ 5
+ }
+ } /*asdfsf*/;
+
+
+ impl Baz {
+ fn get(&self) -> usize {
+ 5
+ }
+ } /*asdfsf*/
+
+ // why would someone do this
+ ;
+
+
+ impl Qux {
+ fn get(&self) -> usize {
+ 5
+ }
+ }
+
+ // why
+ ;
+
+ impl Lorem {
+ fn get(&self) -> usize {
+ 5
+ }
+ }
+ // oh why
+ ;
+} \ No newline at end of file
diff --git a/src/tools/rustfmt/tests/source/static.rs b/src/tools/rustfmt/tests/source/static.rs
new file mode 100644
index 000000000..970786381
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/static.rs
@@ -0,0 +1,23 @@
+const FILE_GENERIC_READ: DWORD =
+ STANDARD_RIGHTS_READ | FILE_READ_DATA |
+ FILE_READ_ATTRIBUTES | FILE_READ_EA | SYNCHRONIZE;
+
+static boolnames: &'static[&'static str] = &["bw", "am", "xsb", "xhp", "xenl", "eo",
+ "gn", "hc", "km", "hs", "in", "db", "da", "mir", "msgr", "os", "eslok", "xt", "hz", "ul", "xon",
+ "nxon", "mc5i", "chts", "nrrmc", "npc", "ndscr", "ccc", "bce", "hls", "xhpa", "crxm", "daisy",
+ "xvpa", "sam", "cpix", "lpix", "OTbs", "OTns", "OTnc", "OTMT", "OTNL", "OTpt", "OTxr"];
+
+static mut name: SomeType = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa;
+
+ pub static count : u8 = 10 ;
+
+pub const test: &Type = &val;
+
+impl Color {
+ pub const WHITE: u32 = 10;
+}
+
+// #1391
+pub const XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX: NTSTATUS = 0 as usize;
+
+pub const XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX: Yyyyyyyyyyyyyyyyyyyyyyyyyyyy = 1;
diff --git a/src/tools/rustfmt/tests/source/string-lit-2.rs b/src/tools/rustfmt/tests/source/string-lit-2.rs
new file mode 100644
index 000000000..6b95e25a0
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/string-lit-2.rs
@@ -0,0 +1,25 @@
+fn main() -> &'static str {
+ let too_many_lines = "Hello";
+ let leave_me = "sssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssss\
+ s
+ jjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjj";
+}
+
+fn issue_1237() {
+ let msg = "eedadn\n\
+ drvtee\n\
+ eandsr\n\
+ raavrd\n\
+ atevrs\n\
+ tsrnev\n\
+ sdttsa\n\
+ rasrtv\n\
+ nssdts\n\
+ ntnada\n\
+ svetve\n\
+ tesnvt\n\
+ vntsnd\n\
+ vrdear\n\
+ dvrsen\n\
+ enarar";
+}
diff --git a/src/tools/rustfmt/tests/source/string-lit.rs b/src/tools/rustfmt/tests/source/string-lit.rs
new file mode 100644
index 000000000..7719e76ff
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/string-lit.rs
@@ -0,0 +1,61 @@
+// rustfmt-format_strings: true
+// Long string literals
+
+fn main() -> &'static str {
+ let str = "AAAAAAAAAAAAAAaAAAAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAaAA AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAaAa";
+ let str = "AAAAAAAAAAAAAAaAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAaAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAaAa";
+ let str = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA";
+
+ let too_many_lines = "H\
+ e\
+ l\
+ l\
+ o";
+
+ // Make sure we don't break after an escape character.
+ let odd_length_name = "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n";
+ let even_length_name = "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n";
+
+ let really_long_variable_name = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA";
+
+ let raw_string = r#"Do
+not
+remove
+formatting"#;
+
+ filename.replace(" ", "\\" );
+
+ let xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx =
+ funktion("yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy");
+
+ let unicode = "a̐éö̲\r\n";
+ let unicode2 = "Löwe 老虎 Léopard";
+ let unicode3 = "中华Việt Nam";
+ let unicode4 = "☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃";
+
+ "stuffin'"
+}
+
+fn issue682() {
+ let a = "hello \\ o/";
+ let b = a.replace("\\ ", "\\");
+}
+
+fn issue716() {
+ println!("forall x. mult(e(), x) = x /\\
+ forall x. mult(x, x) = e()");
+}
+
+fn issue_1282() {
+ {
+ match foo {
+ Permission::AndroidPermissionAccessLocationExtraCommands => {
+ "android.permission.ACCESS_LOCATION_EXTRA_COMMANDS"
+ }
+ }
+ }
+}
+
+// #1987
+#[link_args = "-s NO_FILESYSTEM=1 -s NO_EXIT_RUNTIME=1 -s EXPORTED_RUNTIME_METHODS=[\"_malloc\"] -s NO_DYNAMIC_EXECUTION=1 -s ELIMINATE_DUPLICATE_FUNCTIONS=1 -s EVAL_CTORS=1"]
+extern "C" {}
diff --git a/src/tools/rustfmt/tests/source/string_punctuation.rs b/src/tools/rustfmt/tests/source/string_punctuation.rs
new file mode 100644
index 000000000..552c461ed
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/string_punctuation.rs
@@ -0,0 +1,9 @@
+// rustfmt-format_strings: true
+
+fn main() {
+ println!("ThisIsAReallyLongStringWithNoSpaces.It_should_prefer_to_break_onpunctuation:Likethisssssssssssss");
+ format!("{}__{}__{}ItShouldOnlyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyNoticeSemicolonsPeriodsColonsAndCommasAndResortToMid-CharBreaksAfterPunctuation{}{}",x,y,z,a,b);
+ println!("aaaaaaaaaaaaaaaaaaaaaaaaaaaaalhijalfhiigjapdighjapdigjapdighdapighapdighpaidhg;adopgihadoguaadbadgad,qeoihapethae8t0aet8haetadbjtaeg;ooeouthaoeutgadlgajduabgoiuadogabudogubaodugbadgadgadga;adoughaoeugbaouea");
+ println!("sentuhaesnuthaesnutheasunteahusnaethuseantuihaesntdiastnidaetnuhaideuhsenathe。WeShouldSupportNonAsciiPunctuations§ensuhatheasunteahsuneathusneathuasnuhaesnuhaesnuaethusnaetuheasnuth");
+ println!("ThisIsASampleOfCJKString.祇園精舍の鐘の声、諸行無常の響きあり。娑羅双樹の花の色、盛者必衰の理をあらはす。奢れる人も久しからず、ただ春の夜の夢のごとし。猛き者もつひにはほろびぬ、ひとへに風の前の塵に同じ。");
+}
diff --git a/src/tools/rustfmt/tests/source/struct-field-attributes.rs b/src/tools/rustfmt/tests/source/struct-field-attributes.rs
new file mode 100644
index 000000000..76d6eda88
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/struct-field-attributes.rs
@@ -0,0 +1,52 @@
+// #1535
+#![feature(struct_field_attributes)]
+
+struct Foo {
+ bar: u64,
+
+ #[cfg(test)]
+ qux: u64,
+}
+
+fn do_something() -> Foo {
+ Foo {
+ bar: 0,
+
+ #[cfg(test)]
+ qux: 1,
+ }
+}
+
+fn main() {
+ do_something();
+}
+
+// #1462
+struct Foo {
+ foo: usize,
+ #[cfg(feature="include-bar")]
+ bar: usize,
+}
+
+fn new_foo() -> Foo {
+ Foo {
+ foo: 0,
+ #[cfg(feature="include-bar")]
+ bar: 0,
+ }
+}
+
+// #2044
+pub enum State {
+ Closure(#[cfg_attr(feature = "serde_derive", serde(state_with = "::serialization::closure"))] GcPtr<ClosureData>),
+}
+
+struct Fields(
+ #[cfg_attr(feature = "serde_derive", serde(state_with = "::base::serialization::shared"))] Arc<Vec<InternedStr>>,
+);
+
+// #2309
+pub struct A {
+#[doc="XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"]
+pub foos:Vec<bool>
+}
diff --git a/src/tools/rustfmt/tests/source/struct_field_doc_comment.rs b/src/tools/rustfmt/tests/source/struct_field_doc_comment.rs
new file mode 100644
index 000000000..191a62100
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/struct_field_doc_comment.rs
@@ -0,0 +1,72 @@
+// #5215
+struct MyTuple(
+ /// Doc Comments
+ /* TODO note to add more to Doc Comments */ u32,
+ /// Doc Comments
+ // TODO note
+ u64,
+);
+
+struct MyTuple(
+ #[cfg(unix)] // some comment
+ u64,
+ #[cfg(not(unix))] /*block comment */
+ u32,
+);
+
+struct MyTuple(
+ #[cfg(unix)]
+ // some comment
+ u64,
+ #[cfg(not(unix))]
+ /*block comment */
+ u32,
+);
+
+struct MyTuple(
+ #[cfg(unix)] // some comment
+ pub u64,
+ #[cfg(not(unix))] /*block comment */
+ pub(crate) u32,
+);
+
+struct MyTuple(
+ /// Doc Comments
+ /* TODO note to add more to Doc Comments */
+ pub u32,
+ /// Doc Comments
+ // TODO note
+ pub(crate) u64,
+);
+
+struct MyStruct {
+ #[cfg(unix)] // some comment
+ a: u64,
+ #[cfg(not(unix))] /*block comment */
+ b: u32,
+}
+
+struct MyStruct {
+ #[cfg(unix)] // some comment
+ pub a: u64,
+ #[cfg(not(unix))] /*block comment */
+ pub(crate) b: u32,
+}
+
+struct MyStruct {
+ /// Doc Comments
+ /* TODO note to add more to Doc Comments */
+ a: u32,
+ /// Doc Comments
+ // TODO note
+ b: u64,
+}
+
+struct MyStruct {
+ /// Doc Comments
+ /* TODO note to add more to Doc Comments */
+ pub a: u32,
+ /// Doc Comments
+ // TODO note
+ pub(crate) b: u64,
+}
diff --git a/src/tools/rustfmt/tests/source/struct_lits.rs b/src/tools/rustfmt/tests/source/struct_lits.rs
new file mode 100644
index 000000000..c5aaf7ef8
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/struct_lits.rs
@@ -0,0 +1,143 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+// Struct literal expressions.
+
+fn main() {
+ let x = Bar;
+
+ // Comment
+ let y = Foo {a: x };
+
+ Foo { a: foo() /* comment*/, /* comment*/ b: bar(), ..something };
+
+ Fooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo { a: f(), b: b(), };
+
+ Foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo { a: f(), b: b(), };
+
+ Foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo {
+ // Comment
+ a: foo(), // Comment
+ // Comment
+ b: bar(), // Comment
+ };
+
+ Foo { a:Bar,
+ b:f() };
+
+ Quux { x: if cond { bar(); }, y: baz() };
+
+ A {
+ // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec a diam lectus. Sed sit amet ipsum mauris. Maecenas congue ligula ac quam viverra nec consectetur ante hendrerit. Donec et mollis dolor.
+ first: item(),
+ // Praesent et diam eget libero egestas mattis sit amet vitae augue.
+ // Nam tincidunt congue enim, ut porta lorem lacinia consectetur.
+ second: Item
+ };
+
+ Some(Data::MethodCallData(MethodCallData {
+ span: sub_span.unwrap(),
+ scope: self.enclosing_scope(id),
+ ref_id: def_id,
+ decl_id: Some(decl_id),
+ }));
+
+ Diagram { /* o This graph demonstrates how
+ * / \ significant whitespace is
+ * o o preserved.
+ * /|\ \
+ * o o o o */
+ graph: G, }
+}
+
+fn matcher() {
+ TagTerminatedByteMatcher {
+ matcher: ByteMatcher {
+ pattern: b"<HTML",
+ mask: b"\xFF\xDF\xDF\xDF\xDF\xFF",
+ },
+ };
+}
+
+fn issue177() {
+ struct Foo<T> { memb: T }
+ let foo = Foo::<i64> { memb: 10 };
+}
+
+fn issue201() {
+ let s = S{a:0, .. b};
+}
+
+fn issue201_2() {
+ let s = S{a: S2{ .. c}, .. b};
+}
+
+fn issue278() {
+ let s = S {
+ a: 0,
+ //
+ b: 0,
+ };
+ let s1 = S {
+ a: 0,
+ // foo
+ //
+ // bar
+ b: 0,
+ };
+}
+
+fn struct_exprs() {
+ Foo
+ { a : 1, b:f( 2)};
+ Foo{a:1,b:f(2),..g(3)};
+ LoooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooongStruct { ..base };
+ IntrinsicISizesContribution { content_intrinsic_sizes: IntrinsicISizes { minimum_inline_size: 0, }, };
+}
+
+fn issue123() {
+ Foo { a: b, c: d, e: f };
+
+ Foo { a: bb, c: dd, e: ff };
+
+ Foo { a: ddddddddddddddddddddd, b: cccccccccccccccccccccccccccccccccccccc };
+}
+
+fn issue491() {
+ Foo {
+ guard: None,
+ arm: 0, // Comment
+ };
+
+ Foo {
+ arm: 0, // Comment
+ };
+
+ Foo { a: aaaaaaaaaa, b: bbbbbbbb, c: cccccccccc, d: dddddddddd, /* a comment */
+ e: eeeeeeeee };
+}
+
+fn issue698() {
+ Record {
+ ffffffffffffffffffffffffffields: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ };
+ Record {
+ ffffffffffffffffffffffffffields: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ }
+}
+
+fn issue835() {
+ MyStruct {};
+ MyStruct { /* a comment */ };
+ MyStruct {
+ // Another comment
+ };
+ MyStruct {}
+}
+
+fn field_init_shorthand() {
+ MyStruct { x, y, z };
+ MyStruct { x, y, z, .. base };
+ Foo { aaaaaaaaaa, bbbbbbbb, cccccccccc, dddddddddd, /* a comment */
+ eeeeeeeee };
+ Record { ffffffffffffffffffffffffffieldsaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa };
+}
diff --git a/src/tools/rustfmt/tests/source/struct_lits_multiline.rs b/src/tools/rustfmt/tests/source/struct_lits_multiline.rs
new file mode 100644
index 000000000..256ba1bbd
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/struct_lits_multiline.rs
@@ -0,0 +1,81 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+// rustfmt-struct_lit_single_line: false
+
+// Struct literal expressions.
+
+fn main() {
+ let x = Bar;
+
+ // Comment
+ let y = Foo {a: x };
+
+ Foo { a: foo() /* comment*/, /* comment*/ b: bar(), ..something };
+
+ Foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo { a: foo(), b: bar(), };
+
+ Foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo { a: foo(), b: bar(), };
+
+ Foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo {
+ // Comment
+ a: foo(), // Comment
+ // Comment
+ b: bar(), // Comment
+ };
+
+ Foo { a:Bar,
+ b:foo() };
+
+ Quux { x: if cond { bar(); }, y: baz() };
+
+ A {
+ // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec a diam lectus. Sed sit amet ipsum mauris. Maecenas congue ligula ac quam viverra nec consectetur ante hendrerit. Donec et mollis dolor.
+ first: item(),
+ // Praesent et diam eget libero egestas mattis sit amet vitae augue.
+ // Nam tincidunt congue enim, ut porta lorem lacinia consectetur.
+ second: Item
+ };
+
+ Some(Data::MethodCallData(MethodCallData {
+ span: sub_span.unwrap(),
+ scope: self.enclosing_scope(id),
+ ref_id: def_id,
+ decl_id: Some(decl_id),
+ }));
+
+ Diagram { /* o This graph demonstrates how
+ * / \ significant whitespace is
+ * o o preserved.
+ * /|\ \
+ * o o o o */
+ graph: G, }
+}
+
+fn matcher() {
+ TagTerminatedByteMatcher {
+ matcher: ByteMatcher {
+ pattern: b"<HTML",
+ mask: b"\xFF\xDF\xDF\xDF\xDF\xFF",
+ },
+ };
+}
+
+fn issue177() {
+ struct Foo<T> { memb: T }
+ let foo = Foo::<i64> { memb: 10 };
+}
+
+fn issue201() {
+ let s = S{a:0, .. b};
+}
+
+fn issue201_2() {
+ let s = S{a: S2{ .. c}, .. b};
+}
+
+fn issue491() {
+ Foo {
+ guard: None,
+ arm: 0, // Comment
+ };
+}
diff --git a/src/tools/rustfmt/tests/source/struct_lits_visual.rs b/src/tools/rustfmt/tests/source/struct_lits_visual.rs
new file mode 100644
index 000000000..e84652e9e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/struct_lits_visual.rs
@@ -0,0 +1,46 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+// rustfmt-indent_style: Visual
+
+// Struct literal expressions.
+
+fn main() {
+ let x = Bar;
+
+ // Comment
+ let y = Foo {a: x };
+
+ Foo { a: foo() /* comment*/, /* comment*/ b: bar(), ..something };
+
+ Fooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo { a: f(), b: b(), };
+
+ Foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo {
+ // Comment
+ a: foo(), // Comment
+ // Comment
+ b: bar(), // Comment
+ };
+
+ Foo { a:Bar,
+ b:f() };
+
+ Quux { x: if cond { bar(); }, y: baz() };
+
+ Baz { x: yxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx, z: zzzzz // test
+ };
+
+ A {
+ // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec a diam lectus. Sed sit amet ipsum mauris. Maecenas congue ligula ac quam viverra nec consectetur ante hendrerit. Donec et mollis dolor.
+ first: item(),
+ // Praesent et diam eget libero egestas mattis sit amet vitae augue.
+ // Nam tincidunt congue enim, ut porta lorem lacinia consectetur.
+ second: Item
+ };
+
+ Diagram { /* o This graph demonstrates how
+ * / \ significant whitespace is
+ * o o preserved.
+ * /|\ \
+ * o o o o */
+ graph: G, }
+}
diff --git a/src/tools/rustfmt/tests/source/struct_lits_visual_multiline.rs b/src/tools/rustfmt/tests/source/struct_lits_visual_multiline.rs
new file mode 100644
index 000000000..d2990f8da
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/struct_lits_visual_multiline.rs
@@ -0,0 +1,44 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+// rustfmt-indent_style: Visual
+// rustfmt-struct_lit_single_line: false
+
+// Struct literal expressions.
+
+fn main() {
+ let x = Bar;
+
+ // Comment
+ let y = Foo {a: x };
+
+ Foo { a: foo() /* comment*/, /* comment*/ b: bar(), ..something };
+
+ Fooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo { a: foo(), b: bar(), };
+
+ Foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo {
+ // Comment
+ a: foo(), // Comment
+ // Comment
+ b: bar(), // Comment
+ };
+
+ Foo { a:Bar,
+ b:foo() };
+
+ Quux { x: if cond { bar(); }, y: baz() };
+
+ A {
+ // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec a diam lectus. Sed sit amet ipsum mauris. Maecenas congue ligula ac quam viverra nec consectetur ante hendrerit. Donec et mollis dolor.
+ first: item(),
+ // Praesent et diam eget libero egestas mattis sit amet vitae augue.
+ // Nam tincidunt congue enim, ut porta lorem lacinia consectetur.
+ second: Item
+ };
+
+ Diagram { /* o This graph demonstrates how
+ * / \ significant whitespace is
+ * o o preserved.
+ * /|\ \
+ * o o o o */
+ graph: G, }
+}
diff --git a/src/tools/rustfmt/tests/source/struct_tuple_visual.rs b/src/tools/rustfmt/tests/source/struct_tuple_visual.rs
new file mode 100644
index 000000000..f95f3fe4f
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/struct_tuple_visual.rs
@@ -0,0 +1,36 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+// rustfmt-indent_style: Visual
+fn foo() {
+ Fooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo(f(), b());
+
+ Foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo(// Comment
+ foo(), /* Comment */
+ // Comment
+ bar() /* Comment */);
+
+ Foo(Bar, f());
+
+ Quux(if cond {
+ bar();
+ },
+ baz());
+
+ Baz(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,
+ zzzzz /* test */);
+
+ A(// Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec a diam lectus. Sed sit
+ // amet ipsum mauris. Maecenas congue ligula ac quam viverra nec consectetur ante
+ // hendrerit. Donec et mollis dolor.
+ item(),
+ // Praesent et diam eget libero egestas mattis sit amet vitae augue.
+ // Nam tincidunt congue enim, ut porta lorem lacinia consectetur.
+ Item);
+
+ Diagram(// o This graph demonstrates how
+ // / \ significant whitespace is
+ // o o preserved.
+ // /|\ \
+ // o o o o
+ G)
+}
diff --git a/src/tools/rustfmt/tests/source/structs.rs b/src/tools/rustfmt/tests/source/structs.rs
new file mode 100644
index 000000000..537151b27
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/structs.rs
@@ -0,0 +1,298 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+
+ /// A Doc comment
+#[AnAttribute]
+pub struct Foo {
+ #[rustfmt::skip]
+ f : SomeType, // Comment beside a field
+ f: SomeType, // Comment beside a field
+ // Comment on a field
+ #[AnAttribute]
+ g: SomeOtherType,
+ /// A doc comment on a field
+ h: AThirdType,
+ pub i: TypeForPublicField
+}
+
+// Destructuring
+fn foo() {
+ S { x: 5,
+ ..};
+ Struct {..} = Struct { a: 1, b: 4 };
+ Struct { a, .. } = Struct { a: 1, b: 2, c: 3};
+ TupleStruct(a,.., b) = TupleStruct(1, 2);
+ TupleStruct( ..) = TupleStruct(3, 4);
+ TupleStruct(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, .., bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb) = TupleStruct(1, 2);
+}
+
+// #1095
+struct S<T: /* comment */> {
+ t: T,
+}
+
+// #1029
+pub struct Foo {
+ #[doc(hidden)]
+ // This will NOT get deleted!
+ bar: String, // hi
+}
+
+// #1029
+struct X {
+ // `x` is an important number.
+ #[allow(unused)] // TODO: use
+ x: u32,
+}
+
+// #410
+#[allow(missing_docs)]
+pub struct Writebatch<K: Key> {
+ #[allow(dead_code)] //only used for holding the internal pointer
+ writebatch: RawWritebatch,
+ marker: PhantomData<K>,
+}
+
+struct Bar;
+
+struct NewType(Type, OtherType);
+
+struct
+NewInt <T: Copy>(pub i32, SomeType /* inline comment */, T /* sup */
+
+
+ );
+
+struct Qux<'a,
+ N: Clone + 'a,
+ E: Clone + 'a,
+ G: Labeller<'a, N, E> + GraphWalk<'a, N, E>,
+ W: Write + Copy>
+(
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA, // Comment
+ BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB,
+ #[AnAttr]
+ // Comment
+ /// Testdoc
+ G,
+ pub W,
+);
+
+struct Tuple(/*Comment 1*/ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+ /* Comment 2 */ BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB,);
+
+// With a where-clause and generics.
+pub struct Foo<'a, Y: Baz>
+ where X: Whatever
+{
+ f: SomeType, // Comment beside a field
+}
+
+struct Baz {
+
+ a: A, // Comment A
+ b: B, // Comment B
+ c: C, // Comment C
+
+}
+
+struct Baz {
+ a: A, // Comment A
+
+ b: B, // Comment B
+
+
+
+
+ c: C, // Comment C
+}
+
+struct Baz {
+
+ a: A,
+
+ b: B,
+ c: C,
+
+
+
+
+ d: D
+
+}
+
+struct Baz
+{
+ // Comment A
+ a: A,
+
+ // Comment B
+b: B,
+ // Comment C
+ c: C,}
+
+// Will this be a one-liner?
+struct Tuple(
+ A, //Comment
+ B
+);
+
+pub struct State<F: FnMut() -> time::Timespec> { now: F }
+
+pub struct State<F: FnMut() -> ()> { now: F }
+
+pub struct State<F: FnMut()> { now: F }
+
+struct Palette { /// A map of indices in the palette to a count of pixels in approximately that color
+ foo: i32}
+
+// Splitting a single line comment into a block previously had a misalignment
+// when the field had attributes
+struct FieldsWithAttributes {
+ // Pre Comment
+ #[rustfmt::skip] pub host:String, // Post comment BBBBBBBBBBBBBB BBBBBBBBBBBBBBBB BBBBBBBBBBBBBBBB BBBBBBBBBBBBBBBBB BBBBBBBBBBB
+ //Another pre comment
+ #[attr1]
+ #[attr2] pub id: usize // CCCCCCCCCCCCCCCCCCC CCCCCCCCCCCCCCCCCCC CCCCCCCCCCCCCCCC CCCCCCCCCCCCCCCCCC CCCCCCCCCCCCCC CCCCCCCCCCCC
+}
+
+struct Deep {
+ deeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeep: node::Handle<IdRef<'id, Node<K, V>>,
+ Type,
+ NodeType>,
+}
+
+struct Foo<T>(T);
+struct Foo<T>(T) where T: Copy, T: Eq;
+struct Foo<T>(TTTTTTTTTTTTTTTTT, UUUUUUUUUUUUUUUUUUUUUUUU, TTTTTTTTTTTTTTTTTTT, UUUUUUUUUUUUUUUUUUU);
+struct Foo<T>(TTTTTTTTTTTTTTTTTT, UUUUUUUUUUUUUUUUUUUUUUUU, TTTTTTTTTTTTTTTTTTT) where T: PartialEq;
+struct Foo<T>(TTTTTTTTTTTTTTTTT, UUUUUUUUUUUUUUUUUUUUUUUU, TTTTTTTTTTTTTTTTTTTTT) where T: PartialEq;
+struct Foo<T>(TTTTTTTTTTTTTTTTT, UUUUUUUUUUUUUUUUUUUUUUUU, TTTTTTTTTTTTTTTTTTT, UUUUUUUUUUUUUUUUUUU) where T: PartialEq;
+struct Foo<T>(TTTTTTTTTTTTTTTTT, // Foo
+ UUUUUUUUUUUUUUUUUUUUUUUU /* Bar */,
+ // Baz
+ TTTTTTTTTTTTTTTTTTT,
+ // Qux (FIXME #572 - doc comment)
+ UUUUUUUUUUUUUUUUUUU);
+
+mod m {
+ struct X<T> where T: Sized {
+ a: T,
+ }
+}
+
+struct Foo<T>(TTTTTTTTTTTTTTTTTTT,
+ /// Qux
+ UUUUUUUUUUUUUUUUUUU);
+
+struct Issue677 {
+ pub ptr: *const libc::c_void,
+ pub trace: fn( obj:
+ *const libc::c_void, tracer : *mut JSTracer ),
+}
+
+struct Foo {}
+struct Foo {
+ }
+struct Foo {
+ // comment
+ }
+struct Foo {
+ // trailing space ->
+
+
+ }
+struct Foo { /* comment */ }
+struct Foo( /* comment */ );
+
+struct LongStruct {
+ a: A,
+ the_quick_brown_fox_jumps_over_the_lazy_dog:AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+}
+
+struct Deep {
+ deeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeep: node::Handle<IdRef<'id, Node<Key, Value>>,
+ Type,
+ NodeType>,
+}
+
+struct Foo<C=()>(String);
+
+// #1364
+fn foo() {
+ convex_shape.set_point(0, &Vector2f { x: 400.0, y: 100.0 });
+ convex_shape.set_point(1, &Vector2f { x: 500.0, y: 70.0 });
+ convex_shape.set_point(2, &Vector2f { x: 450.0, y: 100.0 });
+ convex_shape.set_point(3, &Vector2f { x: 580.0, y: 150.0 });
+}
+
+// Vertical alignment
+struct Foo {
+ aaaaa: u32, // a
+
+ b: u32, // b
+ cc: u32, // cc
+
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx: u32, // 1
+ yy: u32, // comment2
+ zzz: u32, // comment3
+
+ aaaaaa: u32, // comment4
+ bb: u32, // comment5
+ // separate
+ dd: u32, // comment7
+ c: u32, // comment6
+
+ aaaaaaa: u32, /* multi
+ * line
+ * comment
+ */
+ b: u32, // hi
+
+ do_not_push_this_comment1: u32, // comment1
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx: u32, // 2
+ please_do_not_push_this_comment3: u32, // comment3
+
+ do_not_push_this_comment1: u32, // comment1
+ // separate
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx: u32, // 2
+ please_do_not_push_this_comment3: u32, // comment3
+
+ do_not_push_this_comment1: u32, // comment1
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx: u32, // 2
+ // separate
+ please_do_not_push_this_comment3: u32, // comment3
+}
+
+// structs with long identifier
+struct Loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong {}
+struct Looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong {}
+struct Loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong {}
+struct Loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong { x: i32 }
+
+// structs with visibility, do not duplicate visibility (#2110).
+pub(in self) struct Foo{}
+pub(super) struct Foo{}
+pub(crate) struct Foo{}
+pub(in self) struct Foo();
+pub(super) struct Foo();
+pub(crate) struct Foo();
+
+// #2125
+pub struct ReadinessCheckRegistry(Mutex<HashMap<Arc<String>, Box<Fn() -> ReadinessCheck + Sync + Send>>>);
+
+// #2144 unit struct with generics
+struct MyBox<T:?Sized>;
+struct MyBoxx<T, S> where T: ?Sized, S: Clone;
+
+// #2208
+struct Test {
+ /// foo
+ #[serde(default)]
+ pub join: Vec<String>,
+ #[serde(default)] pub tls: bool,
+}
+
+// #2818
+struct Paren((i32)) where i32: Trait;
+struct Parens((i32, i32)) where i32: Trait;
diff --git a/src/tools/rustfmt/tests/source/trailing-comma-never.rs b/src/tools/rustfmt/tests/source/trailing-comma-never.rs
new file mode 100644
index 000000000..c74267cd1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/trailing-comma-never.rs
@@ -0,0 +1,45 @@
+// rustfmt-trailing_comma: Never
+
+enum X {
+ A,
+ B,
+}
+
+enum Y {
+ A,
+ B
+}
+
+enum TupX {
+ A(u32),
+ B(i32, u16),
+}
+
+enum TupY {
+ A(u32),
+ B(i32, u16)
+}
+
+enum StructX {
+ A {
+ s: u16,
+ },
+ B {
+ u: u32,
+ i: i32,
+ },
+}
+
+enum StructY {
+ A {
+ s: u16,
+ },
+ B {
+ u: u32,
+ i: i32,
+ }
+}
+
+static XXX: [i8; 64] = [
+1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,1, 1, 1,
+];
diff --git a/src/tools/rustfmt/tests/source/trailing_commas.rs b/src/tools/rustfmt/tests/source/trailing_commas.rs
new file mode 100644
index 000000000..3e5fcc808
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/trailing_commas.rs
@@ -0,0 +1,47 @@
+// rustfmt-match_block_trailing_comma: true
+// rustfmt-trailing_comma: Always
+
+fn main() {
+ match foo {
+ x => {}
+ y => {
+ foo();
+ }
+ _ => x
+ }
+}
+
+fn f<S, T>(x: T, y: S) -> T where T: P, S: Q
+{
+ x
+}
+
+impl Trait for T where T: P
+{
+ fn f(x: T) -> T where T: Q + R
+ {
+ x
+ }
+}
+
+struct Pair<S, T> where T: P, S: P + Q {
+ a: T,
+ b: S
+}
+
+struct TupPair<S, T> (S, T) where T: P, S: P + Q;
+
+enum E<S, T> where S: P, T: P {
+ A {a: T},
+}
+
+type Double<T> where T: P, T: Q = Pair<T, T>;
+
+extern "C" {
+ fn f<S, T>(x: T, y: S) -> T where T: P, S: Q;
+}
+
+trait Q<S, T> where T: P, S: R
+{
+ fn f<U, V>(self, x: T, y: S, z: U) -> Self where U: P, V: P;
+}
diff --git a/src/tools/rustfmt/tests/source/trailing_comments/hard_tabs.rs b/src/tools/rustfmt/tests/source/trailing_comments/hard_tabs.rs
new file mode 100644
index 000000000..88249aa5f
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/trailing_comments/hard_tabs.rs
@@ -0,0 +1,21 @@
+// rustfmt-version: Two
+// rustfmt-wrap_comments: true
+// rustfmt-hard_tabs: true
+
+impl Foo {
+ fn foo() {
+ bar(); // comment 1
+ // comment 2
+ // comment 3
+ baz();
+ }
+}
+
+fn lorem_ipsum() {
+ let f = bar(); // Donec consequat mi. Quisque vitae dolor. Integer lobortis. Maecenas id nulla. Lorem.
+ // Id turpis. Nam posuere lectus vitae nibh. Etiam tortor orci, sagittis malesuada, rhoncus quis, hendrerit eget, libero. Quisque commodo nulla at nunc. Mauris consequat, enim vitae venenatis sollicitudin, dolor orci bibendum enim, a sagittis nulla nunc quis elit. Phasellus augue. Nunc suscipit, magna tincidunt lacinia faucibus, lacus tellus ornare purus, a pulvinar lacus orci eget nibh. Maecenas sed nibh non lacus tempor faucibus. In hac habitasse platea dictumst. Vivamus a orci at nulla tristique condimentum. Donec arcu quam, dictum accumsan, convallis accumsan, cursus sit amet, ipsum. In pharetra sagittis nunc.
+ let b = baz();
+
+ let normalized = self.ctfont.all_traits().normalized_weight(); // [-1.0, 1.0]
+ // TODO(emilio): It may make sense to make this range [.01, 10.0], to align with css-fonts-4's range of [1, 1000].
+}
diff --git a/src/tools/rustfmt/tests/source/trailing_comments/soft_tabs.rs b/src/tools/rustfmt/tests/source/trailing_comments/soft_tabs.rs
new file mode 100644
index 000000000..7845f713b
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/trailing_comments/soft_tabs.rs
@@ -0,0 +1,21 @@
+// rustfmt-version: Two
+// rustfmt-wrap_comments: true
+
+pub const IFF_MULTICAST: ::c_int = 0x0000000800; // Supports multicast
+// Multicast using broadcst. add.
+
+pub const SQ_CRETAB: u16 = 0x000e; // CREATE TABLE
+pub const SQ_DRPTAB: u16 = 0x000f; // DROP TABLE
+pub const SQ_CREIDX: u16 = 0x0010; // CREATE INDEX
+//const SQ_DRPIDX: u16 = 0x0011; // DROP INDEX
+//const SQ_GRANT: u16 = 0x0012; // GRANT
+//const SQ_REVOKE: u16 = 0x0013; // REVOKE
+
+fn foo() {
+ let f = bar(); // Donec consequat mi. Quisque vitae dolor. Integer lobortis. Maecenas id nulla. Lorem.
+ // Id turpis. Nam posuere lectus vitae nibh. Etiam tortor orci, sagittis malesuada, rhoncus quis, hendrerit eget, libero. Quisque commodo nulla at nunc. Mauris consequat, enim vitae venenatis sollicitudin, dolor orci bibendum enim, a sagittis nulla nunc quis elit. Phasellus augue. Nunc suscipit, magna tincidunt lacinia faucibus, lacus tellus ornare purus, a pulvinar lacus orci eget nibh. Maecenas sed nibh non lacus tempor faucibus. In hac habitasse platea dictumst. Vivamus a orci at nulla tristique condimentum. Donec arcu quam, dictum accumsan, convallis accumsan, cursus sit amet, ipsum. In pharetra sagittis nunc.
+ let b = baz();
+
+ let normalized = self.ctfont.all_traits().normalized_weight(); // [-1.0, 1.0]
+ // TODO(emilio): It may make sense to make this range [.01, 10.0], to align with css-fonts-4's range of [1, 1000].
+}
diff --git a/src/tools/rustfmt/tests/source/trait.rs b/src/tools/rustfmt/tests/source/trait.rs
new file mode 100644
index 000000000..b6db9e159
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/trait.rs
@@ -0,0 +1,183 @@
+// Test traits
+
+trait Foo {
+ fn bar(x: i32 ) -> Baz< U> { Baz::new()
+ }
+
+ fn baz(a: AAAAAAAAAAAAAAAAAAAAAA,
+b: BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB)
+-> RetType;
+
+ fn foo(a: AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA, // Another comment
+b: BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB)
+ -> RetType ; // Some comment
+
+ fn baz(&mut self ) -> i32 ;
+
+fn increment(& mut self, x: i32 );
+
+ fn read(&mut self, x: BufReader<R> /* Used to be MemReader */)
+ where R: Read;
+}
+
+pub trait WriteMessage {
+ fn write_message (&mut self, &FrontendMessage) -> io::Result<()>;
+}
+
+trait Runnable {
+ fn handler(self: & Runnable );
+}
+
+trait TraitWithExpr {
+ fn fn_with_expr(x: [i32; 1]);
+}
+
+trait Test {
+ fn read_struct<T, F>(&mut self, s_name: &str, len: usize, f: F) -> Result<T, Self::Error> where F: FnOnce(&mut Self) -> Result<T, Self::Error>;
+}
+
+trait T<> {}
+
+trait Foo { type Bar: Baz; type Inner: Foo = Box< Foo >; }
+
+trait ConstCheck<T>:Foo where T: Baz {
+ const J: i32;
+}
+
+trait Tttttttttttttttttttttttttttttttttttttttttttttttttttttttttt<T>
+ where T: Foo {}
+
+trait Ttttttttttttttttttttttttttttttttttttttttttttttttttttttttttt<T> where T: Foo {}
+
+trait FooBar<T> : Tttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttt where J: Bar { fn test(); }
+
+trait WhereList<T, J> where T: Foo, J: Bar {}
+
+trait X /* comment */ {}
+trait Y // comment
+{
+}
+
+// #2055
+pub trait Foo:
+// A and C
+A + C
+// and B
+ + B
+{}
+
+// #2158
+trait Foo {
+ type ItRev = <MergingUntypedTimeSeries<SliceSeries<SliceWindow>> as UntypedTimeSeries>::IterRev;
+ type IteRev = <MergingUntypedTimeSeries<SliceSeries<SliceWindow>> as UntypedTimeSeries>::IterRev;
+}
+
+// #2331
+trait MyTrait<AAAAAAAAAAAAAAAAAAAA, BBBBBBBBBBBBBBBBBBBB, CCCCCCCCCCCCCCCCCCCC, DDDDDDDDDDDDDDDDDDDD> {
+ fn foo() {}
+}
+
+// Trait aliases
+trait FooBar =
+ Foo
+ + Bar;
+trait FooBar <A, B, C>=
+ Foo
+ + Bar;
+pub trait FooBar =
+ Foo
+ + Bar;
+pub trait FooBar <A, B, C>=
+ Foo
+ + Bar;
+trait AAAAAAAAAAAAAAAAAA = BBBBBBBBBBBBBBBBBBB + CCCCCCCCCCCCCCCCCCCCCCCCCCCCC + DDDDDDDDDDDDDDDDDD;
+pub trait AAAAAAAAAAAAAAAAAA = BBBBBBBBBBBBBBBBBBB + CCCCCCCCCCCCCCCCCCCCCCCCCCCCC + DDDDDDDDDDDDDDDDDD;
+trait AAAAAAAAAAAAAAAAAAA = BBBBBBBBBBBBBBBBBBB + CCCCCCCCCCCCCCCCCCCCCCCCCCCCC + DDDDDDDDDDDDDDDDDD;
+trait AAAAAAAAAAAAAAAAAA = BBBBBBBBBBBBBBBBBBB + CCCCCCCCCCCCCCCCCCCCCCCCCCCCC + DDDDDDDDDDDDDDDDDDD;
+trait AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA<A, B, C, D, E> = FooBar;
+trait AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA<A, B, C, D, E> = FooBar;
+#[rustfmt::skip]
+trait FooBar = Foo
+ + Bar;
+
+// #2637
+auto trait Example {}
+pub auto trait PubExample {}
+pub unsafe auto trait PubUnsafeExample {}
+
+// #3006
+trait Foo<'a> {
+ type Bar< 'a >;
+}
+
+impl<'a> Foo<'a> for i32 {
+ type Bar< 'a > = i32;
+}
+
+// #3092
+pub mod test {
+ pub trait ATraitWithALooongName {}
+ pub trait ATrait
+ :ATraitWithALooongName + ATraitWithALooongName + ATraitWithALooongName + ATraitWithALooongName
+{
+}
+}
+
+// Trait aliases with where clauses.
+trait A = where for<'b> &'b Self: Send;
+
+trait B = where for<'b> &'b Self: Send + Clone + Copy + SomeTrait + AAAAAAAA + BBBBBBB + CCCCCCCCCC;
+trait B = where for<'b> &'b Self: Send + Clone + Copy + SomeTrait + AAAAAAAA + BBBBBBB + CCCCCCCCCCC;
+trait B = where
+ for<'b> &'b Self:
+Send + Clone + Copy + SomeTrait + AAAAAAAA + BBBBBBB + CCCCCCCCCCCCCCCCCCCCCCC;
+trait B = where
+ for<'b> &'b Self:
+Send + Clone + Copy + SomeTrait + AAAAAAAA + BBBBBBB + CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC;
+
+trait B = where
+ for<'b> &'b Self:
+Send
+ + Clone
+ + Copy
+ + SomeTrait
+ + AAAAAAAA
+ + BBBBBBB
+ + CCCCCCCCC
+ + DDDDDDD
+ + DDDDDDDD
+ + DDDDDDDDD
+ + EEEEEEE;
+
+trait A<'a, 'b, 'c> = Debug<T> + Foo where for<'b> &'b Self: Send;
+
+trait B<'a, 'b, 'c> = Debug<T> +Foo
+where for<'b> &'b Self:
+Send
+ + Clone
+ + Copy
+ + SomeTrait
+ + AAAAAAAA
+ + BBBBBBB
+ + CCCCCCCCC
+ + DDDDDDD;
+
+trait B<'a, 'b, 'c,T> = Debug<'a, T> where for<'b> &'b Self:
+Send
+ + Clone
+ + Copy
+ + SomeTrait
+ + AAAAAAAA
+ + BBBBBBB
+ + CCCCCCCCC
+ + DDDDDDD
+ + DDDDDDDD
+ + DDDDDDDDD
+ + EEEEEEE;
+
+trait Visible {
+ pub const C: i32;
+ pub type T;
+ pub fn f();
+ pub fn g() {}
+}
diff --git a/src/tools/rustfmt/tests/source/try-conversion.rs b/src/tools/rustfmt/tests/source/try-conversion.rs
new file mode 100644
index 000000000..ed83ee9e1
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/try-conversion.rs
@@ -0,0 +1,18 @@
+// rustfmt-use_try_shorthand: true
+
+fn main() {
+ let x = try!(some_expr());
+
+ let y = try!(a.very.loooooooooooooooooooooooooooooooooooooong().chain().inside().weeeeeeeeeeeeeee()).test().0.x;
+}
+
+fn test() {
+ a?
+}
+
+fn issue1291() {
+ try!(fs::create_dir_all(&gitfiledir).chain_err(|| {
+ format!("failed to create the {} submodule directory for the workarea",
+ name)
+ }));
+}
diff --git a/src/tools/rustfmt/tests/source/try_block.rs b/src/tools/rustfmt/tests/source/try_block.rs
new file mode 100644
index 000000000..2e8d61f7e
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/try_block.rs
@@ -0,0 +1,30 @@
+// rustfmt-edition: 2018
+
+fn main() -> Result<(), !> {
+ let _x: Option<_> = try {
+ 4
+ };
+
+ try {}
+}
+
+fn baz() -> Option<i32> {
+ if (1 == 1) {
+ return try {
+ 5
+ };
+ }
+
+ // test
+ let x: Option<()> = try {
+ // try blocks are great
+ };
+
+ let y: Option<i32> = try {
+ 6
+ }; // comment
+
+ let x: Option<i32> = try { baz()?; baz()?; baz()?; 7 };
+
+ return None;
+}
diff --git a/src/tools/rustfmt/tests/source/tuple.rs b/src/tools/rustfmt/tests/source/tuple.rs
new file mode 100644
index 000000000..9a0f979fb
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/tuple.rs
@@ -0,0 +1,63 @@
+// Test tuple litterals
+
+fn foo() {
+ let a = (a, a, a, a, a);
+ let aaaaaaaaaaaaaaaa = (aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaa, aaaaaaaaaaaaaa);
+ let aaaaaaaaaaaaaaaaaaaaaa = (aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaaaaaaaaaaa,
+ aaaa);
+ let a = (a,);
+
+ let b = (// This is a comment
+ b, // Comment
+ b /* Trailing comment */);
+
+ // #1063
+ foo(x.0 .0);
+}
+
+fn a() {
+ ((aaaaaaaa,
+ aaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaa),)
+}
+
+fn b() {
+ ((bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb, bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb),
+ bbbbbbbbbbbbbbbbbb)
+}
+
+fn issue550() {
+ self.visitor.visit_volume(self.level.sector_id(sector), (floor_y,
+ if is_sky_flat(ceil_tex) {from_wad_height(self.height_range.1)} else {ceil_y}));
+}
+
+fn issue775() {
+ if indent {
+ let a = mk_object(&[("a".to_string(), Boolean(true)),
+ ("b".to_string(),
+ Array(vec![mk_object(&[("c".to_string(),
+ String("\x0c\r".to_string()))]),
+ mk_object(&[("d".to_string(), String("".to_string()))])]))]);
+ }
+}
+
+fn issue1725() {
+ bench_antialiased_lines!(bench_draw_antialiased_line_segment_diagonal, (10, 10), (450, 450));
+ bench_antialiased_lines!(bench_draw_antialiased_line_segment_shallow, (10, 10), (450, 80));
+}
+
+fn issue_4355() {
+ let _ = ((1,),).0.0;
+}
+
+// https://github.com/rust-lang/rustfmt/issues/4410
+impl Drop for LockGuard {
+ fn drop(&mut self) {
+ LockMap::unlock(&self.0.0, &self.0.1);
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/tuple_v2.rs b/src/tools/rustfmt/tests/source/tuple_v2.rs
new file mode 100644
index 000000000..922303383
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/tuple_v2.rs
@@ -0,0 +1,5 @@
+// rustfmt-version: Two
+
+fn issue_4355() {
+ let _ = ((1,),).0 .0;
+}
diff --git a/src/tools/rustfmt/tests/source/type-ascription.rs b/src/tools/rustfmt/tests/source/type-ascription.rs
new file mode 100644
index 000000000..4874094cc
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/type-ascription.rs
@@ -0,0 +1,10 @@
+
+fn main() {
+ let xxxxxxxxxxx = yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy : SomeTrait<AA, BB, CC>;
+
+ let xxxxxxxxxxxxxxx = yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy: AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA;
+
+ let z = funk(yyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzz, wwwwww): AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA;
+
+ x : u32 - 1u32 / 10f32 : u32
+}
diff --git a/src/tools/rustfmt/tests/source/type.rs b/src/tools/rustfmt/tests/source/type.rs
new file mode 100644
index 000000000..61ef73a3c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/type.rs
@@ -0,0 +1,168 @@
+// rustfmt-normalize_comments: true
+fn types() {
+ let x: [ Vec < _ > ] = [];
+ let y: * mut [ SomeType ; konst_funk() ] = expr();
+ let z: (/*#digits*/ usize, /*exp*/ i16) = funk();
+ let z: ( usize /*#digits*/ , i16 /*exp*/ ) = funk();
+}
+
+struct F {
+ f: extern "C" fn(x: u8, ... /* comment */),
+ g: extern "C" fn(x: u8,/* comment */ ...),
+ h: extern "C" fn(x: u8, ... ),
+ i: extern "C" fn(x: u8, /* comment 4*/ y: String, // comment 3
+ z: Foo, /* comment */ .../* comment 2*/ ),
+}
+
+fn issue_1006(def_id_to_string: for<'a, 'b> unsafe fn(TyCtxt<'b, 'tcx, 'tcx>, DefId) -> String) {}
+
+fn impl_trait_fn_1() -> impl Fn(i32) -> Option<u8> {}
+
+fn impl_trait_fn_2<E>() -> impl Future<Item=&'a i64,Error=E> {}
+
+fn issue_1234() {
+ do_parse!(name: take_while1!(is_token) >> (Header))
+}
+
+// #2510
+impl CombineTypes {
+ pub fn pop_callback(
+ &self,
+ query_id: Uuid,
+ ) -> Option<
+ (
+ ProjectId,
+ Box<FnMut(&ProjectState, serde_json::Value, bool) -> () + Sync + Send>,
+ ),
+ > {
+ self.query_callbacks()(&query_id)
+ }
+}
+
+// #2859
+pub fn do_something<'a, T: Trait1 + Trait2 + 'a>(&fooo: u32) -> impl Future<
+ Item = (
+ impl Future<Item = (
+ ), Error = SomeError> + 'a,
+ impl Future<Item = (), Error = SomeError> + 'a,
+impl Future<Item = (), Error = SomeError > + 'a,
+ ),
+ Error = SomeError,
+ >
+ +
+ 'a {
+}
+
+pub fn do_something<'a, T: Trait1 + Trait2 + 'a>( &fooo: u32,
+) -> impl Future<
+ Item = (
+impl Future<Item = (), Error = SomeError> + 'a,
+ impl Future<Item = (), Error = SomeError> + 'a,
+ impl Future<Item = (), Error = SomeError> + 'a,
+ ),
+ Error = SomeError,
+ >
+ + Future<
+ Item = (
+ impl Future<Item = (), Error = SomeError> + 'a,
+impl Future<Item = (), Error = SomeError> + 'a,
+ impl Future<Item = (), Error = SomeError> + 'a,
+ ),
+ Error = SomeError,
+ >
+ + Future<
+ Item = (
+ impl Future<Item = (), Error = SomeError> + 'a,
+ impl Future<Item = (), Error = SomeError> + 'a,
+ impl Future<Item = (), Error = SomeError> + 'a,
+ ),
+ Error = SomeError,
+ >
+ +
+ 'a + 'b +
+ 'c {
+}
+
+// #3051
+token![impl];
+token![ impl ];
+
+// #3060
+macro_rules! foo {
+ ($foo_api: ty) => {
+ type Target = ( $foo_api ) + 'static;
+ }
+}
+
+type Target = ( FooAPI ) + 'static;
+
+// #3137
+fn foo<T>(t: T)
+where
+ T: ( FnOnce() -> () ) + Clone,
+ U: ( FnOnce() -> () ) + 'static,
+{
+}
+
+// #3117
+fn issue3117() {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ let opt: &mut Option<MyLongTypeHere> =
+ unsafe { &mut *self.future.get() };
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+// #3139
+fn issue3139() {
+ assert_eq!(
+ to_json_value(&None :: <i32>).unwrap(),
+ json!( { "test": None :: <i32> } )
+ );
+}
+
+// #3180
+fn foo(a: SomeLongComplexType, b: SomeOtherLongComplexType) -> Box<Future<Item = AnotherLongType, Error = ALongErrorType>> {
+}
+
+type MyFn = fn(a: SomeLongComplexType, b: SomeOtherLongComplexType,) -> Box<Future<Item = AnotherLongType, Error = ALongErrorType>>;
+
+// Const bound
+
+trait T: ~ const Super {}
+
+const fn not_quite_const<S: ~ const T>() -> i32 { <S as T>::CONST }
+
+struct S<T:~ const ? Sized>(std::marker::PhantomData<T>);
+
+impl ~ const T {}
+
+fn apit(_: impl ~ const T) {}
+
+fn rpit() -> impl ~ const T { S }
+
+pub struct Foo<T: Trait>(T);
+impl<T: ~ const Trait> Foo<T> {
+ fn new(t: T) -> Self {
+ Self(t)
+ }
+}
+
+// #4357
+type T = typeof(
+1);
+impl T for .. {
+}
diff --git a/src/tools/rustfmt/tests/source/type_alias.rs b/src/tools/rustfmt/tests/source/type_alias.rs
new file mode 100644
index 000000000..58c807f40
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/type_alias.rs
@@ -0,0 +1,34 @@
+// rustfmt-normalize_comments: true
+
+type PrivateTest<'a, I> = (Box<Parser<Input=I, Output=char> + 'a>, Box<Parser<Input=I, Output=char> + 'a>);
+
+pub type PublicTest<'a, I, O> = Result<Vec<MyLongType>, Box<Parser<Input=I, Output=char> + 'a>, Box<Parser<Input=I, Output=char> + 'a>>;
+
+pub type LongGenericListTest<'a, 'b, 'c, 'd, LONGPARAMETERNAME, LONGPARAMETERNAME, LONGPARAMETERNAME, A, B, C> = Option<Vec<MyType>>;
+
+pub type Exactly100CharsTest<'a, 'b, 'c, 'd, LONGPARAMETERNAME, LONGPARAMETERNAME, A, B> = Vec<i32>;
+
+pub type Exactly101CharsTest<'a, 'b, 'c, 'd, LONGPARAMETERNAME, LONGPARAMETERNAME, A, B> = Vec<Test>;
+
+pub type Exactly100CharsToEqualTest<'a, 'b, 'c, 'd, LONGPARAMETERNAME, LONGPARAMETERNAME, A, B, C> = Vec<i32>;
+
+pub type GenericsFitButNotEqualTest<'a, 'b, 'c, 'd, LONGPARAMETERNAME, LONGPARAMETERNAME, A1, B, C> = Vec<i32>;
+
+pub type CommentTest< /* Lifetime */ 'a
+ ,
+ // Type
+ T
+ > = ();
+
+
+pub type WithWhereClause<LONGPARAMETERNAME, T> where T: Clone, LONGPARAMETERNAME: Clone + Eq + OtherTrait = Option<T>;
+
+pub type Exactly100CharstoEqualWhereTest<T, U, PARAMET> where T: Clone + Ord + Eq + SomeOtherTrait = Option<T>;
+
+pub type Exactly101CharstoEqualWhereTest<T, U, PARAMETE> where T: Clone + Ord + Eq + SomeOtherTrait = Option<T>;
+
+type RegisterPlugin = unsafe fn(pt: *const c_char, plugin: *mut c_void, data: *mut CallbackData);
+
+// #1683
+pub type Between<Lhs, Rhs> = super::operators::Between<Lhs, super::operators::And<AsExpr<Rhs, Lhs>, AsExpr<Rhs, Lhs>>>;
+pub type NotBetween<Lhs, Rhs> = super::operators::NotBetween<Lhs, super::operators::And<AsExpr<Rhs, Lhs>, AsExpr<Rhs, Lhs>>>;
diff --git a/src/tools/rustfmt/tests/source/unicode.rs b/src/tools/rustfmt/tests/source/unicode.rs
new file mode 100644
index 000000000..4c2119af5
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/unicode.rs
@@ -0,0 +1,33 @@
+// rustfmt-wrap_comments: true
+
+fn foo() {
+ let s = "this line goes to 100: ͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶ";
+ let s = 42;
+
+ // a comment of length 80, with the starting sigil: ҘҘҘҘҘҘҘҘҘҘ ҘҘҘҘҘҘҘҘҘҘҘҘҘҘ
+ let s = 42;
+}
+
+pub fn bar(config: &Config) {
+ let csv = RefCell::new(create_csv(config, "foo"));
+ {
+ let mut csv = csv.borrow_mut();
+ for (i1, i2, i3) in iproduct!(0..2, 0..3, 0..3) {
+ csv.write_field(format!("γ[{}.{}.{}]", i1, i2, i3))
+ .unwrap();
+ csv.write_field(format!("d[{}.{}.{}]", i1, i2, i3))
+ .unwrap();
+ csv.write_field(format!("i[{}.{}.{}]", i1, i2, i3))
+ .unwrap();
+ }
+ csv.write_record(None::<&[u8]>).unwrap();
+ }
+}
+
+// The NotUnicode line is below 100 wrt chars but over it wrt String::len
+fn baz() {
+ let our_error_b = result_b_from_func.or_else(|e| match e {
+ NotPresent => Err(e).chain_err(|| "env var wasn't provided"),
+ NotUnicode(_) => Err(e).chain_err(|| "env var was very very very bork文字化ã"),
+ });
+}
diff --git a/src/tools/rustfmt/tests/source/unions.rs b/src/tools/rustfmt/tests/source/unions.rs
new file mode 100644
index 000000000..53630788f
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/unions.rs
@@ -0,0 +1,195 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+
+ /// A Doc comment
+#[AnAttribute]
+pub union Foo {
+ #[rustfmt::skip]
+ f : SomeType, // Comment beside a field
+ f: SomeType, // Comment beside a field
+ // Comment on a field
+ #[AnAttribute]
+ g: SomeOtherType,
+ /// A doc comment on a field
+ h: AThirdType,
+ pub i: TypeForPublicField
+}
+
+// #1029
+pub union Foo {
+ #[doc(hidden)]
+ // This will NOT get deleted!
+ bar: String, // hi
+}
+
+// #1029
+union X {
+ // `x` is an important number.
+ #[allow(unused)] // TODO: use
+ x: u32,
+}
+
+// #410
+#[allow(missing_docs)]
+pub union Writebatch<K: Key> {
+ #[allow(dead_code)] //only used for holding the internal pointer
+ writebatch: RawWritebatch,
+ marker: PhantomData<K>,
+}
+
+// With a where-clause and generics.
+pub union Foo<'a, Y: Baz>
+ where X: Whatever
+{
+ f: SomeType, // Comment beside a field
+}
+
+union Baz {
+
+ a: A, // Comment A
+ b: B, // Comment B
+ c: C, // Comment C
+
+}
+
+union Baz {
+ a: A, // Comment A
+
+ b: B, // Comment B
+
+
+
+
+ c: C, // Comment C
+}
+
+union Baz {
+
+ a: A,
+
+ b: B,
+ c: C,
+
+
+
+
+ d: D
+
+}
+
+union Baz
+{
+ // Comment A
+ a: A,
+
+ // Comment B
+b: B,
+ // Comment C
+ c: C,}
+
+pub union State<F: FnMut() -> time::Timespec> { now: F }
+
+pub union State<F: FnMut() -> ()> { now: F }
+
+pub union State<F: FnMut()> { now: F }
+
+union Palette { /// A map of indices in the palette to a count of pixels in approximately that color
+ foo: i32}
+
+// Splitting a single line comment into a block previously had a misalignment
+// when the field had attributes
+union FieldsWithAttributes {
+ // Pre Comment
+ #[rustfmt::skip] pub host:String, // Post comment BBBBBBBBBBBBBB BBBBBBBBBBBBBBBB BBBBBBBBBBBBBBBB BBBBBBBBBBBBBBBBB BBBBBBBBBBB
+ //Another pre comment
+ #[attr1]
+ #[attr2] pub id: usize // CCCCCCCCCCCCCCCCCCC CCCCCCCCCCCCCCCCCCC CCCCCCCCCCCCCCCC CCCCCCCCCCCCCCCCCC CCCCCCCCCCCCCC CCCCCCCCCCCC
+}
+
+union Deep {
+ deeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeep: node::Handle<IdRef<'id, Node<K, V>>,
+ Type,
+ NodeType>,
+}
+
+mod m {
+ union X<T> where T: Sized {
+ a: T,
+ }
+}
+
+union Issue677 {
+ pub ptr: *const libc::c_void,
+ pub trace: fn( obj:
+ *const libc::c_void, tracer : *mut JSTracer ),
+}
+
+union Foo {}
+union Foo {
+ }
+union Foo {
+ // comment
+ }
+union Foo {
+ // trailing space ->
+
+
+ }
+union Foo { /* comment */ }
+
+union LongUnion {
+ a: A,
+ the_quick_brown_fox_jumps_over_the_lazy_dog:AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+}
+
+union Deep {
+ deeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeep: node::Handle<IdRef<'id, Node<Key, Value>>,
+ Type,
+ NodeType>,
+}
+
+// #1364
+fn foo() {
+ convex_shape.set_point(0, &Vector2f { x: 400.0, y: 100.0 });
+ convex_shape.set_point(1, &Vector2f { x: 500.0, y: 70.0 });
+ convex_shape.set_point(2, &Vector2f { x: 450.0, y: 100.0 });
+ convex_shape.set_point(3, &Vector2f { x: 580.0, y: 150.0 });
+}
+
+// Vertical alignment
+union Foo {
+ aaaaa: u32, // a
+
+ b: u32, // b
+ cc: u32, // cc
+
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx: u32, // 1
+ yy: u32, // comment2
+ zzz: u32, // comment3
+
+ aaaaaa: u32, // comment4
+ bb: u32, // comment5
+ // separate
+ dd: u32, // comment7
+ c: u32, // comment6
+
+ aaaaaaa: u32, /* multi
+ * line
+ * comment
+ */
+ b: u32, // hi
+
+ do_not_push_this_comment1: u32, // comment1
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx: u32, // 2
+ please_do_not_push_this_comment3: u32, // comment3
+
+ do_not_push_this_comment1: u32, // comment1
+ // separate
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx: u32, // 2
+ please_do_not_push_this_comment3: u32, // comment3
+
+ do_not_push_this_comment1: u32, // comment1
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx: u32, // 2
+ // separate
+ please_do_not_push_this_comment3: u32, // comment3
+}
diff --git a/src/tools/rustfmt/tests/source/unsafe-mod.rs b/src/tools/rustfmt/tests/source/unsafe-mod.rs
new file mode 100644
index 000000000..9996b0627
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/unsafe-mod.rs
@@ -0,0 +1,7 @@
+// These are supported by rustc syntactically but not semantically.
+
+#[cfg(any())]
+unsafe mod m { }
+
+#[cfg(any())]
+unsafe extern "C++" { }
diff --git a/src/tools/rustfmt/tests/source/visibility.rs b/src/tools/rustfmt/tests/source/visibility.rs
new file mode 100644
index 000000000..1c5919ccf
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/visibility.rs
@@ -0,0 +1,8 @@
+// #2398
+pub mod outer_mod {
+ pub mod inner_mod {
+ pub ( in outer_mod ) fn outer_mod_visible_fn() {}
+ pub ( super ) fn super_mod_visible_fn() {}
+ pub ( self ) fn inner_mod_visible_fn() {}
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/visual-fn-type.rs b/src/tools/rustfmt/tests/source/visual-fn-type.rs
new file mode 100644
index 000000000..67dad5fa4
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/visual-fn-type.rs
@@ -0,0 +1,10 @@
+// rustfmt-indent_style: Visual
+type CNodeSetAtts = unsafe extern "C" fn(node: *const RsvgNode,
+ node_impl: *const RsvgCNodeImpl,
+ handle: *const RsvgHandle,
+ pbag: *const PropertyBag)
+ ;
+type CNodeDraw = unsafe extern "C" fn(node: *const RsvgNode,
+ node_impl: *const RsvgCNodeImpl,
+ draw_ctx: *const RsvgDrawingCtx,
+ dominate: i32);
diff --git a/src/tools/rustfmt/tests/source/where-clause-rfc.rs b/src/tools/rustfmt/tests/source/where-clause-rfc.rs
new file mode 100644
index 000000000..219a9bddb
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/where-clause-rfc.rs
@@ -0,0 +1,73 @@
+fn reflow_list_node_with_rule(node: &CompoundNode, rule: &Rule, args: &[Arg], shape: &Shape) where T: FOo, U: Bar {
+ let mut effects = HashMap::new();
+}
+
+fn reflow_list_node_with_rule(node: &CompoundNode, rule: &Rule, args: &[Arg], shape: &Shape) where T: FOo {
+ let mut effects = HashMap::new();
+}
+
+fn reflow_list_node_with_rule(node: &CompoundNode, rule: &Rule, args: &[Arg], shape: &Shape, shape: &Shape) where T: FOo, U: Bar {
+ let mut effects = HashMap::new();
+}
+
+fn reflow_list_node_with_rule(node: &CompoundNode, rule: &Rule, args: &[Arg], shape: &Shape, shape: &Shape) where T: FOo {
+ let mut effects = HashMap::new();
+}
+
+fn reflow_list_node_with_rule(node: &CompoundNode, rule: &Rule, args: &[Arg], shape: &Shape) -> Option<String> where T: FOo, U: Bar {
+ let mut effects = HashMap::new();
+}
+
+fn reflow_list_node_with_rule(node: &CompoundNode, rule: &Rule, args: &[Arg], shape: &Shape) -> Option<String> where T: FOo {
+ let mut effects = HashMap::new();
+}
+
+pub trait Test {
+ fn very_long_method_name<F>(self, f: F) -> MyVeryLongReturnType where F: FnMut(Self::Item) -> bool;
+
+ fn exactly_100_chars1<F>(self, f: F) -> MyVeryLongReturnType where F: FnMut(Self::Item) -> bool;
+}
+
+fn very_long_function_name<F>(very_long_argument: F) -> MyVeryLongReturnType where F: FnMut(Self::Item) -> bool { }
+
+struct VeryLongTupleStructName<A, B, C, D, E>(LongLongTypename, LongLongTypename, i32, i32) where A: LongTrait;
+
+struct Exactly100CharsToSemicolon<A, B, C, D, E>
+ (LongLongTypename, i32, i32)
+ where A: LongTrait1234;
+
+struct AlwaysOnNextLine<LongLongTypename, LongTypename, A, B, C, D, E, F> where A: LongTrait {
+ x: i32
+}
+
+pub trait SomeTrait<T>
+ where
+ T: Something + Sync + Send + Display + Debug + Copy + Hash + Debug + Display + Write + Read + FromStr
+{
+}
+
+// #2020
+impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> {
+ fn elaborate_bounds<F>(&mut self, bounds: &[ty::PolyTraitRef<'tcx>], mut mk_cand: F)
+ where F: for<'b> FnMut(&mut ProbeContext<'b, 'gcx, 'tcx>, ty::PolyTraitRef<'tcx>, ty::AssociatedItem),
+ {
+ // ...
+ }
+}
+
+// #2497
+fn handle_update<'a, Tab, Conn, R, C>(executor: &Executor<PooledConnection<ConnectionManager<Conn>>>, change_set: &'a C) -> ExecutionResult
+where &'a C: Identifiable + AsChangeset<Target = Tab> + HasTable<Table = Tab>,
+ <&'a C as AsChangeset>::Changeset: QueryFragment<Conn::Backend>,
+ Tab: Table + HasTable<Table = Tab>,
+ Tab::PrimaryKey: EqAll<<&'a C as Identifiable>::Id>,
+ Tab::FromClause: QueryFragment<Conn::Backend>,
+ Tab: FindDsl<<&'a C as Identifiable>::Id>,
+ Find<Tab, <&'a C as Identifiable>::Id>: IntoUpdateTarget<Table = Tab>,
+ <Find<Tab, <&'a C as Identifiable>::Id> as IntoUpdateTarget>::WhereClause: QueryFragment<Conn::Backend>,
+ Tab::Query: FilterDsl<<Tab::PrimaryKey as EqAll<<&'a C as Identifiable>::Id>>::Output>,
+ Filter<Tab::Query, <Tab::PrimaryKey as EqAll<<&'a C as Identifiable>::Id>>::Output>: LimitDsl,
+ Limit<Filter<Tab::Query, <Tab::PrimaryKey as EqAll<<&'a C as Identifiable>::Id>>::Output>>: QueryDsl + BoxedDsl< 'a, Conn::Backend, Output = BoxedSelectStatement<'a, R::SqlType, Tab, Conn::Backend>>,
+ R: LoadingHandler<Conn, Table = Tab, SqlType = Tab::SqlType> + GraphQLType<TypeInfo = (), Context = ()>, {
+ unimplemented!()
+}
diff --git a/src/tools/rustfmt/tests/source/where-clause.rs b/src/tools/rustfmt/tests/source/where-clause.rs
new file mode 100644
index 000000000..2a9160825
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/where-clause.rs
@@ -0,0 +1,58 @@
+// rustfmt-indent_style: Visual
+
+fn reflow_list_node_with_rule(node: &CompoundNode, rule: &Rule, args: &[Arg], shape: &Shape) where T: FOo, U: Bar {
+ let mut effects = HashMap::new();
+}
+
+fn reflow_list_node_with_rule(node: &CompoundNode, rule: &Rule, args: &[Arg], shape: &Shape) where T: FOo {
+ let mut effects = HashMap::new();
+}
+
+fn reflow_list_node_with_rule(node: &CompoundNode, rule: &Rule, args: &[Arg], shape: &Shape, shape: &Shape) where T: FOo, U: Bar {
+ let mut effects = HashMap::new();
+}
+
+fn reflow_list_node_with_rule(node: &CompoundNode, rule: &Rule, args: &[Arg], shape: &Shape, shape: &Shape) where T: FOo {
+ let mut effects = HashMap::new();
+}
+
+fn reflow_list_node_with_rule(node: &CompoundNode, rule: &Rule, args: &[Arg], shape: &Shape) -> Option<String> where T: FOo, U: Bar {
+ let mut effects = HashMap::new();
+}
+
+fn reflow_list_node_with_rule(node: &CompoundNode, rule: &Rule, args: &[Arg], shape: &Shape) -> Option<String> where T: FOo {
+ let mut effects = HashMap::new();
+}
+
+pub trait Test {
+ fn very_long_method_name<F>(self, f: F) -> MyVeryLongReturnType where F: FnMut(Self::Item) -> bool;
+
+ fn exactly_100_chars1<F>(self, f: F) -> MyVeryLongReturnType where F: FnMut(Self::Item) -> bool;
+}
+
+fn very_long_function_name<F>(very_long_argument: F) -> MyVeryLongReturnType where F: FnMut(Self::Item) -> bool { }
+
+struct VeryLongTupleStructName<A, B, C, D, E>(LongLongTypename, LongLongTypename, i32, i32) where A: LongTrait;
+
+struct Exactly100CharsToSemicolon<A, B, C, D, E>
+ (LongLongTypename, i32, i32)
+ where A: LongTrait1234;
+
+struct AlwaysOnNextLine<LongLongTypename, LongTypename, A, B, C, D, E, F> where A: LongTrait {
+ x: i32
+}
+
+pub trait SomeTrait<T>
+ where
+ T: Something + Sync + Send + Display + Debug + Copy + Hash + Debug + Display + Write + Read + FromStr
+{
+}
+
+// #2020
+impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> {
+ fn elaborate_bounds<F>(&mut self, bounds: &[ty::PolyTraitRef<'tcx>], mut mk_cand: F)
+ where F: for<'b> FnMut(&mut ProbeContext<'b, 'gcx, 'tcx>, ty::PolyTraitRef<'tcx>, ty::AssociatedItem),
+ {
+ // ...
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/width-heuristics.rs b/src/tools/rustfmt/tests/source/width-heuristics.rs
new file mode 100644
index 000000000..a591218b4
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/width-heuristics.rs
@@ -0,0 +1,28 @@
+// rustfmt-max_width: 120
+
+// elems on multiple lines for max_width 100, but same line for max_width 120
+fn foo(e: Enum) {
+ match e {
+ Enum::Var {
+ elem1,
+ elem2,
+ elem3,
+ } => {
+ return;
+ }
+ }
+}
+
+// elems not on same line for either max_width 100 or 120
+fn bar(e: Enum) {
+ match e {
+ Enum::Var {
+ elem1,
+ elem2,
+ elem3,
+ elem4,
+ } => {
+ return;
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/source/wrap_comments_should_not_imply_format_doc_comments.rs b/src/tools/rustfmt/tests/source/wrap_comments_should_not_imply_format_doc_comments.rs
new file mode 100644
index 000000000..78b3ce146
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/wrap_comments_should_not_imply_format_doc_comments.rs
@@ -0,0 +1,16 @@
+// rustfmt-wrap_comments: true
+
+/// Foo
+///
+/// # Example
+/// ```
+/// # #![cfg_attr(not(dox), feature(cfg_target_feature, target_feature, stdsimd))]
+/// # #![cfg_attr(not(dox), no_std)]
+/// fn foo() { }
+/// ```
+///
+fn foo() {}
+
+/// A long commment for wrapping
+/// This is a long long long long long long long long long long long long long long long long long long long long sentence.
+fn bar() {}
diff --git a/src/tools/rustfmt/tests/target/5131_crate.rs b/src/tools/rustfmt/tests/target/5131_crate.rs
new file mode 100644
index 000000000..557d66703
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/5131_crate.rs
@@ -0,0 +1,9 @@
+// rustfmt-imports_granularity: Crate
+
+use foo::{
+ a, b, b as b2,
+ b::{f, g, g as g2},
+ c,
+ d::e,
+};
+use qux::{h, h as h2, i};
diff --git a/src/tools/rustfmt/tests/target/5131_module.rs b/src/tools/rustfmt/tests/target/5131_module.rs
new file mode 100644
index 000000000..763024d6f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/5131_module.rs
@@ -0,0 +1,32 @@
+// rustfmt-imports_granularity: Module
+
+#![allow(dead_code)]
+
+mod a {
+ pub mod b {
+ pub struct Data {
+ pub a: i32,
+ }
+ }
+
+ use crate::a::b::{Data, Data as Data2};
+
+ pub fn data(a: i32) -> Data {
+ Data { a }
+ }
+
+ pub fn data2(a: i32) -> Data2 {
+ Data2 { a }
+ }
+
+ #[cfg(test)]
+ mod tests {
+ use super::*;
+
+ #[test]
+ pub fn test() {
+ data(1);
+ data2(1);
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/5131_one.rs b/src/tools/rustfmt/tests/target/5131_one.rs
new file mode 100644
index 000000000..a086dae5a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/5131_one.rs
@@ -0,0 +1,12 @@
+// rustfmt-imports_granularity: One
+
+pub use foo::{x, x as x2, y};
+use {
+ bar::{
+ a,
+ b::{self, f, g},
+ c,
+ d::{e, e as e2},
+ },
+ qux::{h, i},
+};
diff --git a/src/tools/rustfmt/tests/target/alignment_2633/block_style.rs b/src/tools/rustfmt/tests/target/alignment_2633/block_style.rs
new file mode 100644
index 000000000..f13e8a876
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/alignment_2633/block_style.rs
@@ -0,0 +1,10 @@
+// rustfmt-struct_field_align_threshold: 50
+
+fn func() {
+ Ok(ServerInformation {
+ name: unwrap_message_string(items.get(0)),
+ vendor: unwrap_message_string(items.get(1)),
+ version: unwrap_message_string(items.get(2)),
+ spec_version: unwrap_message_string(items.get(3)),
+ });
+}
diff --git a/src/tools/rustfmt/tests/target/alignment_2633/horizontal_tactic.rs b/src/tools/rustfmt/tests/target/alignment_2633/horizontal_tactic.rs
new file mode 100644
index 000000000..a381945fd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/alignment_2633/horizontal_tactic.rs
@@ -0,0 +1,13 @@
+// rustfmt-struct_field_align_threshold: 5
+
+#[derive(Fail, Debug, Clone)]
+pub enum BuildError {
+ LineTooLong { length: usize, limit: usize },
+ DisallowedByte { b: u8, pos: usize },
+ ContainsNewLine { pos: usize },
+}
+
+enum Foo {
+ A { a: usize, bbbbb: () },
+ B { a: (), bbbbb: () },
+}
diff --git a/src/tools/rustfmt/tests/target/alignment_2633/visual_style.rs b/src/tools/rustfmt/tests/target/alignment_2633/visual_style.rs
new file mode 100644
index 000000000..7d21b599a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/alignment_2633/visual_style.rs
@@ -0,0 +1,9 @@
+// rustfmt-struct_field_align_threshold: 50
+// rustfmt-indent_style: Visual
+
+fn func() {
+ Ok(ServerInformation { name: unwrap_message_string(items.get(0)),
+ vendor: unwrap_message_string(items.get(1)),
+ version: unwrap_message_string(items.get(2)),
+ spec_version: unwrap_message_string(items.get(3)), });
+}
diff --git a/src/tools/rustfmt/tests/target/array_comment.rs b/src/tools/rustfmt/tests/target/array_comment.rs
new file mode 100644
index 000000000..93e1f5f40
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/array_comment.rs
@@ -0,0 +1,18 @@
+// Issue 2842
+// The comment should not make the last line shorter
+
+static XXX: [i8; 64] = [
+ 1, // Comment
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+];
+
+static XXX: [i8; 64] = [
+ 1, // Comment
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+];
+
+static XXX: [i8; 64] = [
+ 1, // Comment
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1,
+];
diff --git a/src/tools/rustfmt/tests/target/assignment.rs b/src/tools/rustfmt/tests/target/assignment.rs
new file mode 100644
index 000000000..1a70d8481
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/assignment.rs
@@ -0,0 +1,39 @@
+// Test assignment
+
+fn main() {
+ let some_var: Type;
+
+ let mut mutable;
+
+ let variable =
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA::BBBBBBBBBBBBBBBBBBBBBB::CCCCCCCCCCCCCCCCCCCCCC::EEEEEE;
+
+ variable =
+ LOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOONG;
+
+ let single_line_fit = DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD;
+
+ single_line_fit = 5;
+ single_lit_fit >>= 10;
+
+ // #2791
+ let x = 2;
+}
+
+fn break_meee() {
+ {
+ (
+ block_start,
+ block_size,
+ margin_block_start,
+ margin_block_end,
+ ) = match (block_start, block_end, block_size) {
+ x => 1,
+ _ => 2,
+ };
+ }
+}
+
+// #2018
+pub const EXPLAIN_UNSIZED_TUPLE_COERCION: &'static str =
+ "Unsized tuple coercion is not stable enough for use and is subject to change";
diff --git a/src/tools/rustfmt/tests/target/associated-items.rs b/src/tools/rustfmt/tests/target/associated-items.rs
new file mode 100644
index 000000000..1b0a828d0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/associated-items.rs
@@ -0,0 +1,3 @@
+fn main() {
+ println!("{}", <bool as ::std::default::Default>::default());
+}
diff --git a/src/tools/rustfmt/tests/target/associated-types-bounds-wrapping.rs b/src/tools/rustfmt/tests/target/associated-types-bounds-wrapping.rs
new file mode 100644
index 000000000..8aaeee3b1
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/associated-types-bounds-wrapping.rs
@@ -0,0 +1,6 @@
+// Test proper wrapping of long associated type bounds
+
+pub trait HttpService {
+ type WsService: 'static
+ + Service<Request = WsCommand, Response = WsResponse, Error = ServerError>;
+}
diff --git a/src/tools/rustfmt/tests/target/associated_type_bounds.rs b/src/tools/rustfmt/tests/target/associated_type_bounds.rs
new file mode 100644
index 000000000..2dcbd65f8
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/associated_type_bounds.rs
@@ -0,0 +1,13 @@
+// See #3657 - https://github.com/rust-lang/rustfmt/issues/3657
+
+#![feature(associated_type_bounds)]
+
+fn f<I: Iterator<Item: Clone>>() {}
+
+fn g<I: Iterator<Item: Clone>>() {}
+
+fn h<I: Iterator<Item: Clone>>() {}
+
+fn i<I: Iterator<Item: Clone>>() {}
+
+fn j<I: Iterator<Item: Clone + 'a>>() {}
diff --git a/src/tools/rustfmt/tests/target/associated_type_defaults.rs b/src/tools/rustfmt/tests/target/associated_type_defaults.rs
new file mode 100644
index 000000000..d0a081337
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/associated_type_defaults.rs
@@ -0,0 +1,4 @@
+#![feature(associated_type_defaults)]
+trait Foo {
+ type Bar = ();
+}
diff --git a/src/tools/rustfmt/tests/target/async_block.rs b/src/tools/rustfmt/tests/target/async_block.rs
new file mode 100644
index 000000000..137d849c9
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/async_block.rs
@@ -0,0 +1,35 @@
+// rustfmt-edition: 2018
+
+fn main() {
+ let x = async { Ok(()) };
+}
+
+fn baz() {
+ // test
+ let x = async {
+ // async blocks are great
+ Ok(())
+ };
+
+ let y = async { Ok(()) }; // comment
+
+ spawn(a, async move {
+ action();
+ Ok(())
+ });
+
+ spawn(a, async move || {
+ action();
+ Ok(())
+ });
+
+ spawn(a, static async || {
+ action();
+ Ok(())
+ });
+
+ spawn(a, static async move || {
+ action();
+ Ok(())
+ });
+}
diff --git a/src/tools/rustfmt/tests/target/async_closure.rs b/src/tools/rustfmt/tests/target/async_closure.rs
new file mode 100644
index 000000000..9364e7dcc
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/async_closure.rs
@@ -0,0 +1,22 @@
+// rustfmt-edition: 2018
+
+fn main() {
+ let async_closure = async {
+ let x = 3;
+ x
+ };
+
+ let f = async /* comment */ {
+ let x = 3;
+ x
+ };
+
+ let g = async /* comment */ move {
+ let x = 3;
+ x
+ };
+
+ let f = |x| async {
+ println!("hello, world");
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/async_fn.rs b/src/tools/rustfmt/tests/target/async_fn.rs
new file mode 100644
index 000000000..ac151dddb
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/async_fn.rs
@@ -0,0 +1,24 @@
+// rustfmt-edition: 2018
+
+async fn bar() -> Result<(), ()> {
+ Ok(())
+}
+
+pub async fn baz() -> Result<(), ()> {
+ Ok(())
+}
+
+async unsafe fn foo() {
+ async move { Ok(()) }
+}
+
+async unsafe fn rust() {
+ async move {
+ // comment
+ Ok(())
+ }
+}
+
+async fn await_try() {
+ something.await?;
+}
diff --git a/src/tools/rustfmt/tests/target/attrib-block-expr.rs b/src/tools/rustfmt/tests/target/attrib-block-expr.rs
new file mode 100644
index 000000000..1e9557dc0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/attrib-block-expr.rs
@@ -0,0 +1,58 @@
+fn issue_2073() {
+ let x = {
+ #![my_attr]
+ do_something()
+ };
+
+ let x = #[my_attr]
+ {
+ do_something()
+ };
+
+ let x = #[my_attr]
+ {};
+
+ {
+ #![just_an_attribute]
+ };
+
+ let z = #[attr1]
+ #[attr2]
+ {
+ body()
+ };
+
+ x = |y| {
+ #![inner]
+ };
+
+ x = |y| #[outer]
+ {};
+
+ x = |y| {
+ //! ynot
+ };
+
+ x = |y| #[outer]
+ unsafe {};
+
+ let x = unsafe {
+ #![my_attr]
+ do_something()
+ };
+
+ let x = #[my_attr]
+ unsafe {
+ do_something()
+ };
+
+ // This is a dumb but possible case
+ let x = #[my_attr]
+ unsafe {};
+
+ x = |y| #[outer]
+ #[outer2]
+ unsafe {
+ //! Comment
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/attrib-extern-crate.rs b/src/tools/rustfmt/tests/target/attrib-extern-crate.rs
new file mode 100644
index 000000000..ed64a0aeb
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/attrib-extern-crate.rs
@@ -0,0 +1,17 @@
+// Attributes on extern crate.
+
+#[Attr1]
+extern crate Bar;
+#[Attr2]
+#[Attr2]
+extern crate Baz;
+extern crate Foo;
+
+fn foo() {
+ #[Attr1]
+ extern crate Bar;
+ #[Attr2]
+ #[Attr2]
+ extern crate Baz;
+ extern crate Foo;
+}
diff --git a/src/tools/rustfmt/tests/target/attrib.rs b/src/tools/rustfmt/tests/target/attrib.rs
new file mode 100644
index 000000000..7e61f68d7
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/attrib.rs
@@ -0,0 +1,271 @@
+// rustfmt-wrap_comments: true
+// Test attributes and doc comments are preserved.
+#![doc(
+ html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
+ html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
+ html_root_url = "https://doc.rust-lang.org/nightly/",
+ html_playground_url = "https://play.rust-lang.org/",
+ test(attr(deny(warnings)))
+)]
+
+//! Doc comment
+
+#![attribute]
+
+//! Crate doc comment
+
+// Comment
+
+// Comment on attribute
+#![the(attribute)]
+
+// Another comment
+
+/// Blah blah blah.
+/// Blah blah blah.
+/// Blah blah blah.
+/// Blah blah blah.
+
+/// Blah blah blah.
+impl Bar {
+ /// Blah blah blooo.
+ /// Blah blah blooo.
+ /// Blah blah blooo.
+ /// Blah blah blooo.
+ #[an_attribute]
+ #[doc = "an attribute that shouldn't be normalized to a doc comment"]
+ fn foo(&mut self) -> isize {}
+
+ /// Blah blah bing.
+ /// Blah blah bing.
+ /// Blah blah bing.
+
+ /// Blah blah bing.
+ /// Blah blah bing.
+ /// Blah blah bing.
+ pub fn f2(self) {
+ (foo, bar)
+ }
+
+ #[another_attribute]
+ fn f3(self) -> Dog {}
+
+ /// Blah blah bing.
+
+ #[attrib1]
+ /// Blah blah bing.
+ #[attrib2]
+ // Another comment that needs rewrite because it's tooooooooooooooooooooooooooooooo
+ // loooooooooooong.
+ /// Blah blah bing.
+ fn f4(self) -> Cat {}
+
+ // We want spaces around `=`
+ #[cfg(feature = "nightly")]
+ fn f5(self) -> Monkey {}
+}
+
+// #984
+struct Foo {
+ #[derive(Clone, PartialEq, Debug, Deserialize, Serialize)]
+ foo: usize,
+}
+
+// #1668
+
+/// Default path (*nix)
+#[cfg(all(
+ unix,
+ not(target_os = "macos"),
+ not(target_os = "ios"),
+ not(target_os = "android")
+))]
+fn foo() {
+ #[cfg(target_os = "freertos")]
+ match port_id {
+ 'a' | 'A' => GpioPort {
+ port_address: GPIO_A,
+ },
+ 'b' | 'B' => GpioPort {
+ port_address: GPIO_B,
+ },
+ _ => panic!(),
+ }
+
+ #[cfg_attr(not(target_os = "freertos"), allow(unused_variables))]
+ let x = 3;
+}
+
+// #1777
+#[test]
+#[should_panic(expected = "(")]
+#[should_panic(expected = /* ( */ "(")]
+#[should_panic(/* ((((( */expected /* ((((( */= /* ((((( */ "("/* ((((( */)]
+#[should_panic(
+ /* (((((((( *//*
+ (((((((((()(((((((( */
+ expected = "("
+ // ((((((((
+)]
+fn foo() {}
+
+// #1799
+fn issue_1799() {
+ #[allow(unreachable_code)] // https://github.com/rust-lang/rust/issues/43336
+ Some(Err(error));
+
+ #[allow(unreachable_code)]
+ // https://github.com/rust-lang/rust/issues/43336
+ Some(Err(error));
+}
+
+// Formatting inner attributes
+fn inner_attributes() {
+ #![this_is_an_inner_attribute(foo)]
+
+ foo();
+}
+
+impl InnerAttributes() {
+ #![this_is_an_inner_attribute(foo)]
+
+ fn foo() {}
+}
+
+mod InnerAttributes {
+ #![this_is_an_inner_attribute(foo)]
+}
+
+fn attributes_on_statements() {
+ // Local
+ #[attr(on(local))]
+ let x = 3;
+
+ // Item
+ #[attr(on(item))]
+ use foo;
+
+ // Expr
+ #[attr(on(expr))]
+ {}
+
+ // Semi
+ #[attr(on(semi))]
+ foo();
+
+ // Mac
+ #[attr(on(mac))]
+ foo!();
+}
+
+// Large derives
+#[derive(
+ Add, Sub, Mul, Div, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Debug, Hash, Serialize, Mul,
+)]
+
+/// Foo bar baz
+
+#[derive(
+ Add,
+ Sub,
+ Mul,
+ Div,
+ Clone,
+ Copy,
+ Eq,
+ PartialEq,
+ Ord,
+ PartialOrd,
+ Debug,
+ Hash,
+ Serialize,
+ Deserialize,
+)]
+pub struct HP(pub u8);
+
+// Long `#[doc = "..."]`
+struct A {
+ #[doc = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"]
+ b: i32,
+}
+
+// #2647
+#[cfg(
+ feature = "this_line_is_101_characters_long_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
+)]
+pub fn foo() {}
+
+// path attrs
+#[clippy::bar]
+#[clippy::bar(a, b, c)]
+pub fn foo() {}
+
+mod issue_2620 {
+ #[derive(Debug, StructOpt)]
+ #[structopt(about = "Display information about the character on FF Logs")]
+ pub struct Params {
+ #[structopt(help = "The server the character is on")]
+ server: String,
+ #[structopt(help = "The character's first name")]
+ first_name: String,
+ #[structopt(help = "The character's last name")]
+ last_name: String,
+ #[structopt(
+ short = "j",
+ long = "job",
+ help = "The job to look at",
+ parse(try_from_str)
+ )]
+ job: Option<Job>,
+ }
+}
+
+// #2969
+#[cfg(not(all(
+ feature = "std",
+ any(
+ target_os = "linux",
+ target_os = "android",
+ target_os = "netbsd",
+ target_os = "dragonfly",
+ target_os = "haiku",
+ target_os = "emscripten",
+ target_os = "solaris",
+ target_os = "cloudabi",
+ target_os = "macos",
+ target_os = "ios",
+ target_os = "freebsd",
+ target_os = "openbsd",
+ target_os = "redox",
+ target_os = "fuchsia",
+ windows,
+ all(target_arch = "wasm32", feature = "stdweb"),
+ all(target_arch = "wasm32", feature = "wasm-bindgen"),
+ )
+)))]
+type Os = NoSource;
+
+// #3313
+fn stmt_expr_attributes() {
+ let foo;
+ #[must_use]
+ foo = false;
+}
+
+// #3509
+fn issue3509() {
+ match MyEnum {
+ MyEnum::Option1 if cfg!(target_os = "windows") =>
+ #[cfg(target_os = "windows")]
+ {
+ 1
+ }
+ }
+ match MyEnum {
+ MyEnum::Option1 if cfg!(target_os = "windows") =>
+ {
+ #[cfg(target_os = "windows")]
+ 1
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/big-impl-block.rs b/src/tools/rustfmt/tests/target/big-impl-block.rs
new file mode 100644
index 000000000..e3728caba
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/big-impl-block.rs
@@ -0,0 +1,82 @@
+// #1357
+impl<'a, Select, From, Distinct, Where, Order, Limit, Offset, Groupby, DB> InternalBoxedDsl<'a, DB>
+ for SelectStatement<Select, From, Distinct, Where, Order, Limit, Offset, GroupBy>
+where
+ DB: Backend,
+ Select: QueryFragment<DB> + SelectableExpression<From> + 'a,
+ Distinct: QueryFragment<DB> + 'a,
+ Where: Into<Option<Box<QueryFragment<DB> + 'a>>>,
+ Order: QueryFragment<DB> + 'a,
+ Limit: QueryFragment<DB> + 'a,
+ Offset: QueryFragment<DB> + 'a,
+{
+ type Output = BoxedSelectStatement<'a, Select::SqlTypeForSelect, From, DB>;
+
+ fn internal_into_boxed(self) -> Self::Output {
+ BoxedSelectStatement::new(
+ Box::new(self.select),
+ self.from,
+ Box::new(self.distinct),
+ self.where_clause.into(),
+ Box::new(self.order),
+ Box::new(self.limit),
+ Box::new(self.offset),
+ )
+ }
+}
+
+// #1369
+impl<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName> Foo
+ for Bar
+{
+ fn foo() {}
+}
+impl Foo<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName>
+ for Bar
+{
+ fn foo() {}
+}
+impl<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName>
+ Foo<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName>
+ for Bar
+{
+ fn foo() {}
+}
+impl<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName> Foo
+ for Bar<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+ >
+{
+ fn foo() {}
+}
+impl Foo<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName>
+ for Bar<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+ >
+{
+ fn foo() {}
+}
+impl<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName>
+ Foo<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName>
+ for Bar<
+ ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName,
+ >
+{
+ fn foo() {}
+}
+
+// #1689
+impl<M, S, F, X> SubSelectDirect<M, S, F, X>
+where
+ M: select::Selector,
+ S: event::Stream,
+ F: for<'t> FnMut(transform::Api<'t, Stream<ContentStream<S>>>) -> transform::Api<'t, X>,
+ X: event::Stream,
+{
+}
diff --git a/src/tools/rustfmt/tests/target/big-impl-visual.rs b/src/tools/rustfmt/tests/target/big-impl-visual.rs
new file mode 100644
index 000000000..04b0a83fd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/big-impl-visual.rs
@@ -0,0 +1,65 @@
+// rustfmt-indent_style: Visual
+
+// #1357
+impl<'a, Select, From, Distinct, Where, Order, Limit, Offset, Groupby, DB> InternalBoxedDsl<'a, DB>
+ for SelectStatement<Select, From, Distinct, Where, Order, Limit, Offset, GroupBy>
+ where DB: Backend,
+ Select: QueryFragment<DB> + SelectableExpression<From> + 'a,
+ Distinct: QueryFragment<DB> + 'a,
+ Where: Into<Option<Box<QueryFragment<DB> + 'a>>>,
+ Order: QueryFragment<DB> + 'a,
+ Limit: QueryFragment<DB> + 'a,
+ Offset: QueryFragment<DB> + 'a
+{
+ type Output = BoxedSelectStatement<'a, Select::SqlTypeForSelect, From, DB>;
+
+ fn internal_into_boxed(self) -> Self::Output {
+ BoxedSelectStatement::new(Box::new(self.select),
+ self.from,
+ Box::new(self.distinct),
+ self.where_clause.into(),
+ Box::new(self.order),
+ Box::new(self.limit),
+ Box::new(self.offset))
+ }
+}
+
+// #1369
+impl<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName> Foo
+ for Bar
+{
+ fn foo() {}
+}
+impl Foo<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName>
+ for Bar
+{
+ fn foo() {}
+}
+impl<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName>
+ Foo<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName>
+ for Bar
+{
+ fn foo() {}
+}
+impl<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName> Foo
+ for Bar<ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName>
+{
+ fn foo() {}
+}
+impl Foo<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName>
+ for Bar<ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName>
+{
+ fn foo() {}
+}
+impl<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName>
+ Foo<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName>
+ for Bar<ExcessivelyLongGenericName,
+ ExcessivelyLongGenericName,
+ AnotherExcessivelyLongGenericName>
+{
+ fn foo() {}
+}
diff --git a/src/tools/rustfmt/tests/target/binary-expr.rs b/src/tools/rustfmt/tests/target/binary-expr.rs
new file mode 100644
index 000000000..93115b282
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/binary-expr.rs
@@ -0,0 +1,16 @@
+// Binary expressions
+
+fn foo() {
+ // 100
+ let x = aaaaaaaaaa || bbbbbbbbbb || cccccccccc || dddddddddd && eeeeeeeeee || ffffffffff || ggg;
+ // 101
+ let x =
+ aaaaaaaaaa || bbbbbbbbbb || cccccccccc || dddddddddd && eeeeeeeeee || ffffffffff || gggg;
+ // 104
+ let x = aaaaaaaaaa
+ || bbbbbbbbbb
+ || cccccccccc
+ || dddddddddd && eeeeeeeeee
+ || ffffffffff
+ || gggggggg;
+}
diff --git a/src/tools/rustfmt/tests/target/binop-separator-back/bitwise.rs b/src/tools/rustfmt/tests/target/binop-separator-back/bitwise.rs
new file mode 100644
index 000000000..ce32c05ef
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/binop-separator-back/bitwise.rs
@@ -0,0 +1,18 @@
+// rustfmt-binop_separator: Back
+
+fn main() {
+ let value = abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ ^
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ;
+
+ let value = abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ &
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ;
+
+ let value = abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ |
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ;
+
+ let value = abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ <<
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ;
+
+ let value = abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ >>
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ;
+}
diff --git a/src/tools/rustfmt/tests/target/binop-separator-back/comp.rs b/src/tools/rustfmt/tests/target/binop-separator-back/comp.rs
new file mode 100644
index 000000000..efd837bcf
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/binop-separator-back/comp.rs
@@ -0,0 +1,33 @@
+// rustfmt-binop_separator: Back
+
+fn main() {
+ if abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ <
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ
+ {
+ //
+ }
+
+ if abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ <=
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ
+ {
+ //
+ }
+
+ if abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ >
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ
+ {
+ //
+ }
+
+ if abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ >=
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ
+ {
+ //
+ }
+
+ if abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ ==
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ
+ {
+ //
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/binop-separator-back/logic.rs b/src/tools/rustfmt/tests/target/binop-separator-back/logic.rs
new file mode 100644
index 000000000..5f69fd5f5
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/binop-separator-back/logic.rs
@@ -0,0 +1,10 @@
+// rustfmt-binop_separator: Back
+
+fn main() {
+ if abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ &&
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ ||
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ
+ {
+ //
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/binop-separator-back/math.rs b/src/tools/rustfmt/tests/target/binop-separator-back/math.rs
new file mode 100644
index 000000000..7a3f27e73
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/binop-separator-back/math.rs
@@ -0,0 +1,23 @@
+// rustfmt-binop_separator: Back
+
+fn main() {
+ let value = abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ +
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ +
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ +
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ +
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ +
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ +
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ;
+
+ let value = abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ +
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ *
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ -
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ /
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ +
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ *
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ *
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ /
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ /
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ +
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ;
+}
diff --git a/src/tools/rustfmt/tests/target/binop-separator-back/patterns.rs b/src/tools/rustfmt/tests/target/binop-separator-back/patterns.rs
new file mode 100644
index 000000000..2e5971352
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/binop-separator-back/patterns.rs
@@ -0,0 +1,11 @@
+// rustfmt-binop_separator: Back
+
+fn main() {
+ match val {
+ ThisIsA::ReallyLongPatternNameToHelpOverflowTheNextValueOntoTheNextLine |
+ ThisIsA::SecondValueSeparatedByAPipe |
+ ThisIsA::ThirdValueSeparatedByAPipe => {
+ //
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/binop-separator-back/range.rs b/src/tools/rustfmt/tests/target/binop-separator-back/range.rs
new file mode 100644
index 000000000..19e5a81cd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/binop-separator-back/range.rs
@@ -0,0 +1,9 @@
+// rustfmt-binop_separator: Back
+
+fn main() {
+ let value = abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ..
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ;
+
+ let value = abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ..=
+ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ;
+}
diff --git a/src/tools/rustfmt/tests/target/break-and-continue.rs b/src/tools/rustfmt/tests/target/break-and-continue.rs
new file mode 100644
index 000000000..c01d8a078
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/break-and-continue.rs
@@ -0,0 +1,23 @@
+// break and continue formatting
+
+#![feature(loop_break_value)]
+
+fn main() {
+ 'a: loop {
+ break 'a;
+ }
+
+ let mut done = false;
+ 'b: while !done {
+ done = true;
+ continue 'b;
+ }
+
+ let x = loop {
+ break 5;
+ };
+
+ let x = 'c: loop {
+ break 'c 5;
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/catch.rs b/src/tools/rustfmt/tests/target/catch.rs
new file mode 100644
index 000000000..ffe694f8e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/catch.rs
@@ -0,0 +1,22 @@
+// rustfmt-edition: 2018
+#![feature(try_blocks)]
+
+fn main() {
+ let x = try { foo()? };
+
+ let x = try /* Invisible comment */ { foo()? };
+
+ let x = try { unsafe { foo()? } };
+
+ let y = match (try { foo()? }) {
+ _ => (),
+ };
+
+ try {
+ foo()?;
+ };
+
+ try {
+ // Regular try block
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/arch/aarch64.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/arch/aarch64.rs
new file mode 100644
index 000000000..91c51ed89
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/arch/aarch64.rs
@@ -0,0 +1,98 @@
+//! Aarch64 run-time features.
+
+/// Checks if `aarch64` feature is enabled.
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+#[allow_internal_unstable(stdsimd_internal, stdsimd)]
+macro_rules! is_aarch64_feature_detected {
+ ("neon") => {
+ // FIXME: this should be removed once we rename Aarch64 neon to asimd
+ cfg!(target_feature = "neon") || $crate::detect::check_for($crate::detect::Feature::asimd)
+ };
+ ("asimd") => {
+ cfg!(target_feature = "neon") || $crate::detect::check_for($crate::detect::Feature::asimd)
+ };
+ ("pmull") => {
+ cfg!(target_feature = "pmull") || $crate::detect::check_for($crate::detect::Feature::pmull)
+ };
+ ("fp") => {
+ cfg!(target_feature = "fp") || $crate::detect::check_for($crate::detect::Feature::fp)
+ };
+ ("fp16") => {
+ cfg!(target_feature = "fp16") || $crate::detect::check_for($crate::detect::Feature::fp16)
+ };
+ ("sve") => {
+ cfg!(target_feature = "sve") || $crate::detect::check_for($crate::detect::Feature::sve)
+ };
+ ("crc") => {
+ cfg!(target_feature = "crc") || $crate::detect::check_for($crate::detect::Feature::crc)
+ };
+ ("crypto") => {
+ cfg!(target_feature = "crypto")
+ || $crate::detect::check_for($crate::detect::Feature::crypto)
+ };
+ ("lse") => {
+ cfg!(target_feature = "lse") || $crate::detect::check_for($crate::detect::Feature::lse)
+ };
+ ("rdm") => {
+ cfg!(target_feature = "rdm") || $crate::detect::check_for($crate::detect::Feature::rdm)
+ };
+ ("rcpc") => {
+ cfg!(target_feature = "rcpc") || $crate::detect::check_for($crate::detect::Feature::rcpc)
+ };
+ ("dotprod") => {
+ cfg!(target_feature = "dotprod")
+ || $crate::detect::check_for($crate::detect::Feature::dotprod)
+ };
+ ("ras") => {
+ compile_error!("\"ras\" feature cannot be detected at run-time")
+ };
+ ("v8.1a") => {
+ compile_error!("\"v8.1a\" feature cannot be detected at run-time")
+ };
+ ("v8.2a") => {
+ compile_error!("\"v8.2a\" feature cannot be detected at run-time")
+ };
+ ("v8.3a") => {
+ compile_error!("\"v8.3a\" feature cannot be detected at run-time")
+ };
+ ($t:tt,) => {
+ is_aarch64_feature_detected!($t);
+ };
+ ($t:tt) => {
+ compile_error!(concat!("unknown aarch64 target feature: ", $t))
+ };
+}
+
+/// ARM Aarch64 CPU Feature enum. Each variant denotes a position in a bitset
+/// for a particular feature.
+///
+/// PLEASE: do not use this, it is an implementation detail subject to change.
+#[doc(hidden)]
+#[allow(non_camel_case_types)]
+#[repr(u8)]
+#[unstable(feature = "stdsimd_internal", issue = "0")]
+pub enum Feature {
+ /// ARM Advanced SIMD (ASIMD)
+ asimd,
+ /// Polynomial Multiply
+ pmull,
+ /// Floating point support
+ fp,
+ /// Half-float support.
+ fp16,
+ /// Scalable Vector Extension (SVE)
+ sve,
+ /// CRC32 (Cyclic Redundancy Check)
+ crc,
+ /// Crypto: AES + PMULL + SHA1 + SHA2
+ crypto,
+ /// Atomics (Large System Extension)
+ lse,
+ /// Rounding Double Multiply (ASIMDRDM)
+ rdm,
+ /// Release consistent Processor consistent (RcPc)
+ rcpc,
+ /// Vector Dot-Product (ASIMDDP)
+ dotprod,
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/arch/arm.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/arch/arm.rs
new file mode 100644
index 000000000..90c61fed8
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/arch/arm.rs
@@ -0,0 +1,47 @@
+//! Run-time feature detection on ARM Aarch32.
+
+/// Checks if `arm` feature is enabled.
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+#[allow_internal_unstable(stdsimd_internal, stdsimd)]
+macro_rules! is_arm_feature_detected {
+ ("neon") => {
+ cfg!(target_feature = "neon") || $crate::detect::check_for($crate::detect::Feature::neon)
+ };
+ ("pmull") => {
+ cfg!(target_feature = "pmull") || $crate::detect::check_for($crate::detect::Feature::pmull)
+ };
+ ("v7") => {
+ compile_error!("\"v7\" feature cannot be detected at run-time")
+ };
+ ("vfp2") => {
+ compile_error!("\"vfp2\" feature cannot be detected at run-time")
+ };
+ ("vfp3") => {
+ compile_error!("\"vfp3\" feature cannot be detected at run-time")
+ };
+ ("vfp4") => {
+ compile_error!("\"vfp4\" feature cannot be detected at run-time")
+ };
+ ($t:tt,) => {
+ is_arm_feature_detected!($t);
+ };
+ ($t:tt) => {
+ compile_error!(concat!("unknown arm target feature: ", $t))
+ };
+}
+
+/// ARM CPU Feature enum. Each variant denotes a position in a bitset for a
+/// particular feature.
+///
+/// PLEASE: do not use this, it is an implementation detail subject to change.
+#[doc(hidden)]
+#[allow(non_camel_case_types)]
+#[repr(u8)]
+#[unstable(feature = "stdsimd_internal", issue = "0")]
+pub enum Feature {
+ /// ARM Advanced SIMD (NEON) - Aarch32
+ neon,
+ /// Polynomial Multiply
+ pmull,
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/arch/mips.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/arch/mips.rs
new file mode 100644
index 000000000..2397a0906
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/arch/mips.rs
@@ -0,0 +1,30 @@
+//! Run-time feature detection on MIPS.
+
+/// Checks if `mips` feature is enabled.
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+#[allow_internal_unstable(stdsimd_internal, stdsimd)]
+macro_rules! is_mips_feature_detected {
+ ("msa") => {
+ cfg!(target_feature = "msa") || $crate::detect::check_for($crate::detect::Feature::msa)
+ };
+ ($t:tt,) => {
+ is_mips_feature_detected!($t);
+ };
+ ($t:tt) => {
+ compile_error!(concat!("unknown mips target feature: ", $t))
+ };
+}
+
+/// MIPS CPU Feature enum. Each variant denotes a position in a bitset for a
+/// particular feature.
+///
+/// PLEASE: do not use this, it is an implementation detail subject to change.
+#[doc(hidden)]
+#[allow(non_camel_case_types)]
+#[repr(u8)]
+#[unstable(feature = "stdsimd_internal", issue = "0")]
+pub enum Feature {
+ /// MIPS SIMD Architecture (MSA)
+ msa,
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/arch/mips64.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/arch/mips64.rs
new file mode 100644
index 000000000..d378defc5
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/arch/mips64.rs
@@ -0,0 +1,30 @@
+//! Run-time feature detection on MIPS64.
+
+/// Checks if `mips64` feature is enabled.
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+#[allow_internal_unstable(stdsimd_internal, stdsimd)]
+macro_rules! is_mips64_feature_detected {
+ ("msa") => {
+ cfg!(target_feature = "msa") || $crate::detect::check_for($crate::detect::Feature::msa)
+ };
+ ($t:tt,) => {
+ is_mips64_feature_detected!($t);
+ };
+ ($t:tt) => {
+ compile_error!(concat!("unknown mips64 target feature: ", $t))
+ };
+}
+
+/// MIPS64 CPU Feature enum. Each variant denotes a position in a bitset
+/// for a particular feature.
+///
+/// PLEASE: do not use this, it is an implementation detail subject to change.
+#[doc(hidden)]
+#[allow(non_camel_case_types)]
+#[repr(u8)]
+#[unstable(feature = "stdsimd_internal", issue = "0")]
+pub enum Feature {
+ /// MIPS SIMD Architecture (MSA)
+ msa,
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/arch/powerpc.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/arch/powerpc.rs
new file mode 100644
index 000000000..e7a9daac6
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/arch/powerpc.rs
@@ -0,0 +1,42 @@
+//! Run-time feature detection on PowerPC.
+
+/// Checks if `powerpc` feature is enabled.
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+#[allow_internal_unstable(stdsimd_internal, stdsimd)]
+macro_rules! is_powerpc_feature_detected {
+ ("altivec") => {
+ cfg!(target_feature = "altivec")
+ || $crate::detect::check_for($crate::detect::Feature::altivec)
+ };
+ ("vsx") => {
+ cfg!(target_feature = "vsx") || $crate::detect::check_for($crate::detect::Feature::vsx)
+ };
+ ("power8") => {
+ cfg!(target_feature = "power8")
+ || $crate::detect::check_for($crate::detect::Feature::power8)
+ };
+ ($t:tt,) => {
+ is_powerpc_feature_detected!($t);
+ };
+ ($t:tt) => {
+ compile_error!(concat!("unknown powerpc target feature: ", $t))
+ };
+}
+
+/// PowerPC CPU Feature enum. Each variant denotes a position in a bitset
+/// for a particular feature.
+///
+/// PLEASE: do not use this, it is an implementation detail subject to change.
+#[doc(hidden)]
+#[allow(non_camel_case_types)]
+#[repr(u8)]
+#[unstable(feature = "stdsimd_internal", issue = "0")]
+pub enum Feature {
+ /// Altivec
+ altivec,
+ /// VSX
+ vsx,
+ /// Power8
+ power8,
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/arch/powerpc64.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/arch/powerpc64.rs
new file mode 100644
index 000000000..c10220269
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/arch/powerpc64.rs
@@ -0,0 +1,42 @@
+//! Run-time feature detection on PowerPC64.
+
+/// Checks if `powerpc64` feature is enabled.
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+#[allow_internal_unstable(stdsimd_internal, stdsimd)]
+macro_rules! is_powerpc64_feature_detected {
+ ("altivec") => {
+ cfg!(target_feature = "altivec")
+ || $crate::detect::check_for($crate::detect::Feature::altivec)
+ };
+ ("vsx") => {
+ cfg!(target_feature = "vsx") || $crate::detect::check_for($crate::detect::Feature::vsx)
+ };
+ ("power8") => {
+ cfg!(target_feature = "power8")
+ || $crate::detect::check_for($crate::detect::Feature::power8)
+ };
+ ($t:tt,) => {
+ is_powerpc64_feature_detected!($t);
+ };
+ ($t:tt) => {
+ compile_error!(concat!("unknown powerpc64 target feature: ", $t))
+ };
+}
+
+/// PowerPC64 CPU Feature enum. Each variant denotes a position in a bitset
+/// for a particular feature.
+///
+/// PLEASE: do not use this, it is an implementation detail subject to change.
+#[doc(hidden)]
+#[allow(non_camel_case_types)]
+#[repr(u8)]
+#[unstable(feature = "stdsimd_internal", issue = "0")]
+pub enum Feature {
+ /// Altivec
+ altivec,
+ /// VSX
+ vsx,
+ /// Power8
+ power8,
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/arch/x86.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/arch/x86.rs
new file mode 100644
index 000000000..02d5eed1c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/arch/x86.rs
@@ -0,0 +1,333 @@
+//! This module implements minimal run-time feature detection for x86.
+//!
+//! The features are detected using the `detect_features` function below.
+//! This function uses the CPUID instruction to read the feature flags from the
+//! CPU and encodes them in a `usize` where each bit position represents
+//! whether a feature is available (bit is set) or unavailable (bit is cleared).
+//!
+//! The enum `Feature` is used to map bit positions to feature names, and the
+//! the `__crate::detect::check_for!` macro is used to map string literals (e.g.,
+//! "avx") to these bit positions (e.g., `Feature::avx`).
+//!
+//! The run-time feature detection is performed by the
+//! `__crate::detect::check_for(Feature) -> bool` function. On its first call,
+//! this functions queries the CPU for the available features and stores them
+//! in a global `AtomicUsize` variable. The query is performed by just checking
+//! whether the feature bit in this global variable is set or cleared.
+
+/// A macro to test at *runtime* whether a CPU feature is available on
+/// x86/x86-64 platforms.
+///
+/// This macro is provided in the standard library and will detect at runtime
+/// whether the specified CPU feature is detected. This does **not** resolve at
+/// compile time unless the specified feature is already enabled for the entire
+/// crate. Runtime detection currently relies mostly on the `cpuid` instruction.
+///
+/// This macro only takes one argument which is a string literal of the feature
+/// being tested for. The feature names supported are the lowercase versions of
+/// the ones defined by Intel in [their documentation][docs].
+///
+/// ## Supported arguments
+///
+/// This macro supports the same names that `#[target_feature]` supports. Unlike
+/// `#[target_feature]`, however, this macro does not support names separated
+/// with a comma. Instead testing for multiple features must be done through
+/// separate macro invocations for now.
+///
+/// Supported arguments are:
+///
+/// * `"aes"`
+/// * `"pclmulqdq"`
+/// * `"rdrand"`
+/// * `"rdseed"`
+/// * `"tsc"`
+/// * `"mmx"`
+/// * `"sse"`
+/// * `"sse2"`
+/// * `"sse3"`
+/// * `"ssse3"`
+/// * `"sse4.1"`
+/// * `"sse4.2"`
+/// * `"sse4a"`
+/// * `"sha"`
+/// * `"avx"`
+/// * `"avx2"`
+/// * `"avx512f"`
+/// * `"avx512cd"`
+/// * `"avx512er"`
+/// * `"avx512pf"`
+/// * `"avx512bw"`
+/// * `"avx512dq"`
+/// * `"avx512vl"`
+/// * `"avx512ifma"`
+/// * `"avx512vbmi"`
+/// * `"avx512vpopcntdq"`
+/// * `"f16c"`
+/// * `"fma"`
+/// * `"bmi1"`
+/// * `"bmi2"`
+/// * `"abm"`
+/// * `"lzcnt"`
+/// * `"tbm"`
+/// * `"popcnt"`
+/// * `"fxsr"`
+/// * `"xsave"`
+/// * `"xsaveopt"`
+/// * `"xsaves"`
+/// * `"xsavec"`
+/// * `"adx"`
+/// * `"rtm"`
+///
+/// [docs]: https://software.intel.com/sites/landingpage/IntrinsicsGuide
+#[macro_export]
+#[stable(feature = "simd_x86", since = "1.27.0")]
+#[allow_internal_unstable(stdsimd_internal, stdsimd)]
+macro_rules! is_x86_feature_detected {
+ ("aes") => {
+ cfg!(target_feature = "aes") || $crate::detect::check_for($crate::detect::Feature::aes)
+ };
+ ("pclmulqdq") => {
+ cfg!(target_feature = "pclmulqdq")
+ || $crate::detect::check_for($crate::detect::Feature::pclmulqdq)
+ };
+ ("rdrand") => {
+ cfg!(target_feature = "rdrand")
+ || $crate::detect::check_for($crate::detect::Feature::rdrand)
+ };
+ ("rdseed") => {
+ cfg!(target_feature = "rdseed")
+ || $crate::detect::check_for($crate::detect::Feature::rdseed)
+ };
+ ("tsc") => {
+ cfg!(target_feature = "tsc") || $crate::detect::check_for($crate::detect::Feature::tsc)
+ };
+ ("mmx") => {
+ cfg!(target_feature = "mmx") || $crate::detect::check_for($crate::detect::Feature::mmx)
+ };
+ ("sse") => {
+ cfg!(target_feature = "sse") || $crate::detect::check_for($crate::detect::Feature::sse)
+ };
+ ("sse2") => {
+ cfg!(target_feature = "sse2") || $crate::detect::check_for($crate::detect::Feature::sse2)
+ };
+ ("sse3") => {
+ cfg!(target_feature = "sse3") || $crate::detect::check_for($crate::detect::Feature::sse3)
+ };
+ ("ssse3") => {
+ cfg!(target_feature = "ssse3") || $crate::detect::check_for($crate::detect::Feature::ssse3)
+ };
+ ("sse4.1") => {
+ cfg!(target_feature = "sse4.1")
+ || $crate::detect::check_for($crate::detect::Feature::sse4_1)
+ };
+ ("sse4.2") => {
+ cfg!(target_feature = "sse4.2")
+ || $crate::detect::check_for($crate::detect::Feature::sse4_2)
+ };
+ ("sse4a") => {
+ cfg!(target_feature = "sse4a") || $crate::detect::check_for($crate::detect::Feature::sse4a)
+ };
+ ("sha") => {
+ cfg!(target_feature = "sha") || $crate::detect::check_for($crate::detect::Feature::sha)
+ };
+ ("avx") => {
+ cfg!(target_feature = "avx") || $crate::detect::check_for($crate::detect::Feature::avx)
+ };
+ ("avx2") => {
+ cfg!(target_feature = "avx2") || $crate::detect::check_for($crate::detect::Feature::avx2)
+ };
+ ("avx512f") => {
+ cfg!(target_feature = "avx512f")
+ || $crate::detect::check_for($crate::detect::Feature::avx512f)
+ };
+ ("avx512cd") => {
+ cfg!(target_feature = "avx512cd")
+ || $crate::detect::check_for($crate::detect::Feature::avx512cd)
+ };
+ ("avx512er") => {
+ cfg!(target_feature = "avx512er")
+ || $crate::detect::check_for($crate::detect::Feature::avx512er)
+ };
+ ("avx512pf") => {
+ cfg!(target_feature = "avx512pf")
+ || $crate::detect::check_for($crate::detect::Feature::avx512pf)
+ };
+ ("avx512bw") => {
+ cfg!(target_feature = "avx512bw")
+ || $crate::detect::check_for($crate::detect::Feature::avx512bw)
+ };
+ ("avx512dq") => {
+ cfg!(target_feature = "avx512dq")
+ || $crate::detect::check_for($crate::detect::Feature::avx512dq)
+ };
+ ("avx512vl") => {
+ cfg!(target_Feature = "avx512vl")
+ || $crate::detect::check_for($crate::detect::Feature::avx512vl)
+ };
+ ("avx512ifma") => {
+ cfg!(target_feature = "avx512ifma")
+ || $crate::detect::check_for($crate::detect::Feature::avx512_ifma)
+ };
+ ("avx512vbmi") => {
+ cfg!(target_feature = "avx512vbmi")
+ || $crate::detect::check_for($crate::detect::Feature::avx512_vbmi)
+ };
+ ("avx512vpopcntdq") => {
+ cfg!(target_feature = "avx512vpopcntdq")
+ || $crate::detect::check_for($crate::detect::Feature::avx512_vpopcntdq)
+ };
+ ("f16c") => {
+ cfg!(target_feature = "f16c") || $crate::detect::check_for($crate::detect::Feature::f16c)
+ };
+ ("fma") => {
+ cfg!(target_feature = "fma") || $crate::detect::check_for($crate::detect::Feature::fma)
+ };
+ ("bmi1") => {
+ cfg!(target_feature = "bmi1") || $crate::detect::check_for($crate::detect::Feature::bmi)
+ };
+ ("bmi2") => {
+ cfg!(target_feature = "bmi2") || $crate::detect::check_for($crate::detect::Feature::bmi2)
+ };
+ ("abm") => {
+ cfg!(target_feature = "abm") || $crate::detect::check_for($crate::detect::Feature::abm)
+ };
+ ("lzcnt") => {
+ cfg!(target_feature = "lzcnt") || $crate::detect::check_for($crate::detect::Feature::abm)
+ };
+ ("tbm") => {
+ cfg!(target_feature = "tbm") || $crate::detect::check_for($crate::detect::Feature::tbm)
+ };
+ ("popcnt") => {
+ cfg!(target_feature = "popcnt")
+ || $crate::detect::check_for($crate::detect::Feature::popcnt)
+ };
+ ("fxsr") => {
+ cfg!(target_feature = "fxsr") || $crate::detect::check_for($crate::detect::Feature::fxsr)
+ };
+ ("xsave") => {
+ cfg!(target_feature = "xsave") || $crate::detect::check_for($crate::detect::Feature::xsave)
+ };
+ ("xsaveopt") => {
+ cfg!(target_feature = "xsaveopt")
+ || $crate::detect::check_for($crate::detect::Feature::xsaveopt)
+ };
+ ("xsaves") => {
+ cfg!(target_feature = "xsaves")
+ || $crate::detect::check_for($crate::detect::Feature::xsaves)
+ };
+ ("xsavec") => {
+ cfg!(target_feature = "xsavec")
+ || $crate::detect::check_for($crate::detect::Feature::xsavec)
+ };
+ ("cmpxchg16b") => {
+ cfg!(target_feature = "cmpxchg16b")
+ || $crate::detect::check_for($crate::detect::Feature::cmpxchg16b)
+ };
+ ("adx") => {
+ cfg!(target_feature = "adx") || $crate::detect::check_for($crate::detect::Feature::adx)
+ };
+ ("rtm") => {
+ cfg!(target_feature = "rtm") || $crate::detect::check_for($crate::detect::Feature::rtm)
+ };
+ ($t:tt,) => {
+ is_x86_feature_detected!($t);
+ };
+ ($t:tt) => {
+ compile_error!(concat!("unknown target feature: ", $t))
+ };
+}
+
+/// X86 CPU Feature enum. Each variant denotes a position in a bitset for a
+/// particular feature.
+///
+/// This is an unstable implementation detail subject to change.
+#[allow(non_camel_case_types)]
+#[repr(u8)]
+#[doc(hidden)]
+#[unstable(feature = "stdsimd_internal", issue = "0")]
+pub enum Feature {
+ /// AES (Advanced Encryption Standard New Instructions AES-NI)
+ aes,
+ /// CLMUL (Carry-less Multiplication)
+ pclmulqdq,
+ /// RDRAND
+ rdrand,
+ /// RDSEED
+ rdseed,
+ /// TSC (Time Stamp Counter)
+ tsc,
+ /// MMX
+ mmx,
+ /// SSE (Streaming SIMD Extensions)
+ sse,
+ /// SSE2 (Streaming SIMD Extensions 2)
+ sse2,
+ /// SSE3 (Streaming SIMD Extensions 3)
+ sse3,
+ /// SSSE3 (Supplemental Streaming SIMD Extensions 3)
+ ssse3,
+ /// SSE4.1 (Streaming SIMD Extensions 4.1)
+ sse4_1,
+ /// SSE4.2 (Streaming SIMD Extensions 4.2)
+ sse4_2,
+ /// SSE4a (Streaming SIMD Extensions 4a)
+ sse4a,
+ /// SHA
+ sha,
+ /// AVX (Advanced Vector Extensions)
+ avx,
+ /// AVX2 (Advanced Vector Extensions 2)
+ avx2,
+ /// AVX-512 F (Foundation)
+ avx512f,
+ /// AVX-512 CD (Conflict Detection Instructions)
+ avx512cd,
+ /// AVX-512 ER (Exponential and Reciprocal Instructions)
+ avx512er,
+ /// AVX-512 PF (Prefetch Instructions)
+ avx512pf,
+ /// AVX-512 BW (Byte and Word Instructions)
+ avx512bw,
+ /// AVX-512 DQ (Doubleword and Quadword)
+ avx512dq,
+ /// AVX-512 VL (Vector Length Extensions)
+ avx512vl,
+ /// AVX-512 IFMA (Integer Fused Multiply Add)
+ avx512_ifma,
+ /// AVX-512 VBMI (Vector Byte Manipulation Instructions)
+ avx512_vbmi,
+ /// AVX-512 VPOPCNTDQ (Vector Population Count Doubleword and
+ /// Quadword)
+ avx512_vpopcntdq,
+ /// F16C (Conversions between IEEE-754 `binary16` and `binary32` formats)
+ f16c,
+ /// FMA (Fused Multiply Add)
+ fma,
+ /// BMI1 (Bit Manipulation Instructions 1)
+ bmi,
+ /// BMI1 (Bit Manipulation Instructions 2)
+ bmi2,
+ /// ABM (Advanced Bit Manipulation) on AMD / LZCNT (Leading Zero
+ /// Count) on Intel
+ abm,
+ /// TBM (Trailing Bit Manipulation)
+ tbm,
+ /// POPCNT (Population Count)
+ popcnt,
+ /// FXSR (Floating-point context fast save and restor)
+ fxsr,
+ /// XSAVE (Save Processor Extended States)
+ xsave,
+ /// XSAVEOPT (Save Processor Extended States Optimized)
+ xsaveopt,
+ /// XSAVES (Save Processor Extended States Supervisor)
+ xsaves,
+ /// XSAVEC (Save Processor Extended States Compacted)
+ xsavec,
+ /// CMPXCH16B, a 16-byte compare-and-swap instruction
+ cmpxchg16b,
+ /// ADX, Intel ADX (Multi-Precision Add-Carry Instruction Extensions)
+ adx,
+ /// RTM, Intel (Restricted Transactional Memory)
+ rtm,
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/bit.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/bit.rs
new file mode 100644
index 000000000..578f0b16b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/bit.rs
@@ -0,0 +1,9 @@
+//! Bit manipulation utilities.
+
+/// Tests the `bit` of `x`.
+#[allow(dead_code)]
+#[inline]
+pub(crate) fn test(x: usize, bit: u32) -> bool {
+ debug_assert!(bit < 32, "bit index out-of-bounds");
+ x & (1 << bit) != 0
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/cache.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/cache.rs
new file mode 100644
index 000000000..92bc4b58d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/cache.rs
@@ -0,0 +1,164 @@
+//! Caches run-time feature detection so that it only needs to be computed
+//! once.
+
+#![allow(dead_code)] // not used on all platforms
+
+use crate::sync::atomic::Ordering;
+
+#[cfg(target_pointer_width = "64")]
+use crate::sync::atomic::AtomicU64;
+
+#[cfg(target_pointer_width = "32")]
+use crate::sync::atomic::AtomicU32;
+
+/// Sets the `bit` of `x`.
+#[inline]
+const fn set_bit(x: u64, bit: u32) -> u64 {
+ x | 1 << bit
+}
+
+/// Tests the `bit` of `x`.
+#[inline]
+const fn test_bit(x: u64, bit: u32) -> bool {
+ x & (1 << bit) != 0
+}
+
+/// Maximum number of features that can be cached.
+const CACHE_CAPACITY: u32 = 63;
+
+/// This type is used to initialize the cache
+#[derive(Copy, Clone)]
+pub(crate) struct Initializer(u64);
+
+#[allow(clippy::use_self)]
+impl Default for Initializer {
+ fn default() -> Self {
+ Initializer(0)
+ }
+}
+
+impl Initializer {
+ /// Tests the `bit` of the cache.
+ #[allow(dead_code)]
+ #[inline]
+ pub(crate) fn test(self, bit: u32) -> bool {
+ // FIXME: this way of making sure that the cache is large enough is
+ // brittle.
+ debug_assert!(
+ bit < CACHE_CAPACITY,
+ "too many features, time to increase the cache size!"
+ );
+ test_bit(self.0, bit)
+ }
+
+ /// Sets the `bit` of the cache.
+ #[inline]
+ pub(crate) fn set(&mut self, bit: u32) {
+ // FIXME: this way of making sure that the cache is large enough is
+ // brittle.
+ debug_assert!(
+ bit < CACHE_CAPACITY,
+ "too many features, time to increase the cache size!"
+ );
+ let v = self.0;
+ self.0 = set_bit(v, bit);
+ }
+}
+
+/// This global variable is a cache of the features supported by the CPU.
+static CACHE: Cache = Cache::uninitialized();
+
+/// Feature cache with capacity for `CACHE_CAPACITY` features.
+///
+/// Note: the last feature bit is used to represent an
+/// uninitialized cache.
+#[cfg(target_pointer_width = "64")]
+struct Cache(AtomicU64);
+
+#[cfg(target_pointer_width = "64")]
+#[allow(clippy::use_self)]
+impl Cache {
+ /// Creates an uninitialized cache.
+ #[allow(clippy::declare_interior_mutable_const)]
+ const fn uninitialized() -> Self {
+ Cache(AtomicU64::new(u64::max_value()))
+ }
+ /// Is the cache uninitialized?
+ #[inline]
+ pub(crate) fn is_uninitialized(&self) -> bool {
+ self.0.load(Ordering::Relaxed) == u64::max_value()
+ }
+
+ /// Is the `bit` in the cache set?
+ #[inline]
+ pub(crate) fn test(&self, bit: u32) -> bool {
+ test_bit(CACHE.0.load(Ordering::Relaxed), bit)
+ }
+
+ /// Initializes the cache.
+ #[inline]
+ pub(crate) fn initialize(&self, value: Initializer) {
+ self.0.store(value.0, Ordering::Relaxed);
+ }
+}
+
+/// Feature cache with capacity for `CACHE_CAPACITY` features.
+///
+/// Note: the last feature bit is used to represent an
+/// uninitialized cache.
+#[cfg(target_pointer_width = "32")]
+struct Cache(AtomicU32, AtomicU32);
+
+#[cfg(target_pointer_width = "32")]
+impl Cache {
+ /// Creates an uninitialized cache.
+ const fn uninitialized() -> Self {
+ Cache(
+ AtomicU32::new(u32::max_value()),
+ AtomicU32::new(u32::max_value()),
+ )
+ }
+ /// Is the cache uninitialized?
+ #[inline]
+ pub(crate) fn is_uninitialized(&self) -> bool {
+ self.1.load(Ordering::Relaxed) == u32::max_value()
+ }
+
+ /// Is the `bit` in the cache set?
+ #[inline]
+ pub(crate) fn test(&self, bit: u32) -> bool {
+ if bit < 32 {
+ test_bit(CACHE.0.load(Ordering::Relaxed) as u64, bit)
+ } else {
+ test_bit(CACHE.1.load(Ordering::Relaxed) as u64, bit - 32)
+ }
+ }
+
+ /// Initializes the cache.
+ #[inline]
+ pub(crate) fn initialize(&self, value: Initializer) {
+ let lo: u32 = value.0 as u32;
+ let hi: u32 = (value.0 >> 32) as u32;
+ self.0.store(lo, Ordering::Relaxed);
+ self.1.store(hi, Ordering::Relaxed);
+ }
+}
+
+/// Tests the `bit` of the storage. If the storage has not been initialized,
+/// initializes it with the result of `f()`.
+///
+/// On its first invocation, it detects the CPU features and caches them in the
+/// `CACHE` global variable as an `AtomicU64`.
+///
+/// It uses the `Feature` variant to index into this variable as a bitset. If
+/// the bit is set, the feature is enabled, and otherwise it is disabled.
+#[inline]
+pub(crate) fn test<F>(bit: u32, f: F) -> bool
+where
+ F: FnOnce() -> Initializer,
+{
+ if CACHE.is_uninitialized() {
+ CACHE.initialize(f());
+ }
+ CACHE.test(bit)
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/error_macros.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/error_macros.rs
new file mode 100644
index 000000000..6769757ed
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/error_macros.rs
@@ -0,0 +1,150 @@
+//! The `is_{target_arch}_feature_detected!` macro are only available on their
+//! architecture. These macros provide a better error messages when the user
+//! attempts to call them in a different architecture.
+
+/// Prevents compilation if `is_x86_feature_detected` is used somewhere
+/// else than `x86` and `x86_64` targets.
+#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+macro_rules! is_x86_feature_detected {
+ ($t: tt) => {
+ compile_error!(
+ r#"
+ is_x86_feature_detected can only be used on x86 and x86_64 targets.
+ You can prevent it from being used in other architectures by
+ guarding it behind a cfg(target_arch) as follows:
+
+ #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] {
+ if is_x86_feature_detected(...) { ... }
+ }
+ "#
+ )
+ };
+}
+
+/// Prevents compilation if `is_arm_feature_detected` is used somewhere else
+/// than `ARM` targets.
+#[cfg(not(target_arch = "arm"))]
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+macro_rules! is_arm_feature_detected {
+ ($t:tt) => {
+ compile_error!(
+ r#"
+ is_arm_feature_detected can only be used on ARM targets.
+ You can prevent it from being used in other architectures by
+ guarding it behind a cfg(target_arch) as follows:
+
+ #[cfg(target_arch = "arm")] {
+ if is_arm_feature_detected(...) { ... }
+ }
+ "#
+ )
+ };
+}
+
+/// Prevents compilation if `is_aarch64_feature_detected` is used somewhere else
+/// than `aarch64` targets.
+#[cfg(not(target_arch = "aarch64"))]
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+macro_rules! is_aarch64_feature_detected {
+ ($t: tt) => {
+ compile_error!(
+ r#"
+ is_aarch64_feature_detected can only be used on AArch64 targets.
+ You can prevent it from being used in other architectures by
+ guarding it behind a cfg(target_arch) as follows:
+
+ #[cfg(target_arch = "aarch64")] {
+ if is_aarch64_feature_detected(...) { ... }
+ }
+ "#
+ )
+ };
+}
+
+/// Prevents compilation if `is_powerpc_feature_detected` is used somewhere else
+/// than `PowerPC` targets.
+#[cfg(not(target_arch = "powerpc"))]
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+macro_rules! is_powerpc_feature_detected {
+ ($t:tt) => {
+ compile_error!(
+ r#"
+is_powerpc_feature_detected can only be used on PowerPC targets.
+You can prevent it from being used in other architectures by
+guarding it behind a cfg(target_arch) as follows:
+
+ #[cfg(target_arch = "powerpc")] {
+ if is_powerpc_feature_detected(...) { ... }
+ }
+"#
+ )
+ };
+}
+
+/// Prevents compilation if `is_powerpc64_feature_detected` is used somewhere
+/// else than `PowerPC64` targets.
+#[cfg(not(target_arch = "powerpc64"))]
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+macro_rules! is_powerpc64_feature_detected {
+ ($t:tt) => {
+ compile_error!(
+ r#"
+is_powerpc64_feature_detected can only be used on PowerPC64 targets.
+You can prevent it from being used in other architectures by
+guarding it behind a cfg(target_arch) as follows:
+
+ #[cfg(target_arch = "powerpc64")] {
+ if is_powerpc64_feature_detected(...) { ... }
+ }
+"#
+ )
+ };
+}
+
+/// Prevents compilation if `is_mips_feature_detected` is used somewhere else
+/// than `MIPS` targets.
+#[cfg(not(target_arch = "mips"))]
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+macro_rules! is_mips_feature_detected {
+ ($t:tt) => {
+ compile_error!(
+ r#"
+ is_mips_feature_detected can only be used on MIPS targets.
+ You can prevent it from being used in other architectures by
+ guarding it behind a cfg(target_arch) as follows:
+
+ #[cfg(target_arch = "mips")] {
+ if is_mips_feature_detected(...) { ... }
+ }
+ "#
+ )
+ };
+}
+
+/// Prevents compilation if `is_mips64_feature_detected` is used somewhere else
+/// than `MIPS64` targets.
+#[cfg(not(target_arch = "mips64"))]
+#[macro_export]
+#[unstable(feature = "stdsimd", issue = "27731")]
+macro_rules! is_mips64_feature_detected {
+ ($t:tt) => {
+ compile_error!(
+ r#"
+ is_mips64_feature_detected can only be used on MIPS64 targets.
+ You can prevent it from being used in other architectures by
+ guarding it behind a cfg(target_arch) as follows:
+
+ #[cfg(target_arch = "mips64")] {
+ if is_mips64_feature_detected(...) { ... }
+ }
+ "#
+ )
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/mod.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/mod.rs
new file mode 100644
index 000000000..f446e88ee
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/mod.rs
@@ -0,0 +1,85 @@
+//! This module implements run-time feature detection.
+//!
+//! The `is_{arch}_feature_detected!("feature-name")` macros take the name of a
+//! feature as a string-literal, and return a boolean indicating whether the
+//! feature is enabled at run-time or not.
+//!
+//! These macros do two things:
+//! * map the string-literal into an integer stored as a `Feature` enum,
+//! * call a `os::check_for(x: Feature)` function that returns `true` if the
+//! feature is enabled.
+//!
+//! The `Feature` enums are also implemented in the `arch/{target_arch}.rs`
+//! modules.
+//!
+//! The `check_for` functions are, in general, Operating System dependent. Most
+//! architectures do not allow user-space programs to query the feature bits
+//! due to security concerns (x86 is the big exception). These functions are
+//! implemented in the `os/{target_os}.rs` modules.
+
+#[macro_use]
+mod error_macros;
+
+cfg_if! {
+ if #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] {
+ #[path = "arch/x86.rs"]
+ #[macro_use]
+ mod arch;
+ } else if #[cfg(target_arch = "arm")] {
+ #[path = "arch/arm.rs"]
+ #[macro_use]
+ mod arch;
+ } else if #[cfg(target_arch = "aarch64")] {
+ #[path = "arch/aarch64.rs"]
+ #[macro_use]
+ mod arch;
+ } else if #[cfg(target_arch = "powerpc")] {
+ #[path = "arch/powerpc.rs"]
+ #[macro_use]
+ mod arch;
+ } else if #[cfg(target_arch = "powerpc64")] {
+ #[path = "arch/powerpc64.rs"]
+ #[macro_use]
+ mod arch;
+ } else if #[cfg(target_arch = "mips")] {
+ #[path = "arch/mips.rs"]
+ #[macro_use]
+ mod arch;
+ } else if #[cfg(target_arch = "mips64")] {
+ #[path = "arch/mips64.rs"]
+ #[macro_use]
+ mod arch;
+ } else {
+ // Unimplemented architecture:
+ mod arch {
+ pub enum Feature {
+ Null
+ }
+ }
+ }
+}
+pub use self::arch::Feature;
+
+mod bit;
+mod cache;
+
+cfg_if! {
+ if #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] {
+ // On x86/x86_64 no OS specific functionality is required.
+ #[path = "os/x86.rs"]
+ mod os;
+ } else if #[cfg(all(target_os = "linux", feature = "use_std"))] {
+ #[path = "os/linux/mod.rs"]
+ mod os;
+ } else if #[cfg(target_os = "freebsd")] {
+ #[cfg(target_arch = "aarch64")]
+ #[path = "os/aarch64.rs"]
+ mod aarch64;
+ #[path = "os/freebsd/mod.rs"]
+ mod os;
+ } else {
+ #[path = "os/other.rs"]
+ mod os;
+ }
+}
+pub use self::os::check_for;
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/os/aarch64.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/os/aarch64.rs
new file mode 100644
index 000000000..9adc938a2
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/os/aarch64.rs
@@ -0,0 +1,88 @@
+//! Run-time feature detection for Aarch64 on any OS that emulates the mrs instruction.
+//!
+//! On FreeBSD >= 12.0, Linux >= 4.11 and other operating systems, it is possible to use
+//! privileged system registers from userspace to check CPU feature support.
+//!
+//! AArch64 system registers ID_AA64ISAR0_EL1, ID_AA64PFR0_EL1, ID_AA64ISAR1_EL1
+//! have bits dedicated to features like AdvSIMD, CRC32, AES, atomics (LSE), etc.
+//! Each part of the register indicates the level of support for a certain feature, e.g.
+//! when ID_AA64ISAR0_EL1\[7:4\] is >= 1, AES is supported; when it's >= 2, PMULL is supported.
+//!
+//! For proper support of [SoCs where different cores have different capabilities](https://medium.com/@jadr2ddude/a-big-little-problem-a-tale-of-big-little-gone-wrong-e7778ce744bb),
+//! the OS has to always report only the features supported by all cores, like [FreeBSD does](https://reviews.freebsd.org/D17137#393947).
+//!
+//! References:
+//!
+//! - [Zircon implementation](https://fuchsia.googlesource.com/zircon/+/master/kernel/arch/arm64/feature.cpp)
+//! - [Linux documentation](https://www.kernel.org/doc/Documentation/arm64/cpu-feature-registers.txt)
+
+use crate::detect::{cache, Feature};
+
+/// Try to read the features from the system registers.
+///
+/// This will cause SIGILL if the current OS is not trapping the mrs instruction.
+pub(crate) fn detect_features() -> cache::Initializer {
+ let mut value = cache::Initializer::default();
+
+ {
+ let mut enable_feature = |f, enable| {
+ if enable {
+ value.set(f as u32);
+ }
+ };
+
+ // ID_AA64ISAR0_EL1 - Instruction Set Attribute Register 0
+ let aa64isar0: u64;
+ unsafe {
+ asm!("mrs $0, ID_AA64ISAR0_EL1" : "=r"(aa64isar0));
+ }
+
+ let aes = bits_shift(aa64isar0, 7, 4) >= 1;
+ let pmull = bits_shift(aa64isar0, 7, 4) >= 2;
+ let sha1 = bits_shift(aa64isar0, 11, 8) >= 1;
+ let sha2 = bits_shift(aa64isar0, 15, 12) >= 1;
+ enable_feature(Feature::pmull, pmull);
+ // Crypto is specified as AES + PMULL + SHA1 + SHA2 per LLVM/hosts.cpp
+ enable_feature(Feature::crypto, aes && pmull && sha1 && sha2);
+ enable_feature(Feature::lse, bits_shift(aa64isar0, 23, 20) >= 1);
+ enable_feature(Feature::crc, bits_shift(aa64isar0, 19, 16) >= 1);
+
+ // ID_AA64PFR0_EL1 - Processor Feature Register 0
+ let aa64pfr0: u64;
+ unsafe {
+ asm!("mrs $0, ID_AA64PFR0_EL1" : "=r"(aa64pfr0));
+ }
+
+ let fp = bits_shift(aa64pfr0, 19, 16) < 0xF;
+ let fphp = bits_shift(aa64pfr0, 19, 16) >= 1;
+ let asimd = bits_shift(aa64pfr0, 23, 20) < 0xF;
+ let asimdhp = bits_shift(aa64pfr0, 23, 20) >= 1;
+ enable_feature(Feature::fp, fp);
+ enable_feature(Feature::fp16, fphp);
+ // SIMD support requires float support - if half-floats are
+ // supported, it also requires half-float support:
+ enable_feature(Feature::asimd, fp && asimd && (!fphp | asimdhp));
+ // SIMD extensions require SIMD support:
+ enable_feature(Feature::rdm, asimd && bits_shift(aa64isar0, 31, 28) >= 1);
+ enable_feature(
+ Feature::dotprod,
+ asimd && bits_shift(aa64isar0, 47, 44) >= 1,
+ );
+ enable_feature(Feature::sve, asimd && bits_shift(aa64pfr0, 35, 32) >= 1);
+
+ // ID_AA64ISAR1_EL1 - Instruction Set Attribute Register 1
+ let aa64isar1: u64;
+ unsafe {
+ asm!("mrs $0, ID_AA64ISAR1_EL1" : "=r"(aa64isar1));
+ }
+
+ enable_feature(Feature::rcpc, bits_shift(aa64isar1, 23, 20) >= 1);
+ }
+
+ value
+}
+
+#[inline]
+fn bits_shift(x: u64, high: usize, low: usize) -> u64 {
+ (x >> low) & ((1 << (high - low + 1)) - 1)
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/os/freebsd/aarch64.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/os/freebsd/aarch64.rs
new file mode 100644
index 000000000..97fe40f80
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/os/freebsd/aarch64.rs
@@ -0,0 +1,28 @@
+//! Run-time feature detection for Aarch64 on FreeBSD.
+
+use super::super::aarch64::detect_features;
+use crate::detect::{cache, Feature};
+
+/// Performs run-time feature detection.
+#[inline]
+pub fn check_for(x: Feature) -> bool {
+ cache::test(x as u32, detect_features)
+}
+
+#[cfg(test)]
+mod tests {
+ #[test]
+ fn dump() {
+ println!("asimd: {:?}", is_aarch64_feature_detected!("asimd"));
+ println!("pmull: {:?}", is_aarch64_feature_detected!("pmull"));
+ println!("fp: {:?}", is_aarch64_feature_detected!("fp"));
+ println!("fp16: {:?}", is_aarch64_feature_detected!("fp16"));
+ println!("sve: {:?}", is_aarch64_feature_detected!("sve"));
+ println!("crc: {:?}", is_aarch64_feature_detected!("crc"));
+ println!("crypto: {:?}", is_aarch64_feature_detected!("crypto"));
+ println!("lse: {:?}", is_aarch64_feature_detected!("lse"));
+ println!("rdm: {:?}", is_aarch64_feature_detected!("rdm"));
+ println!("rcpc: {:?}", is_aarch64_feature_detected!("rcpc"));
+ println!("dotprod: {:?}", is_aarch64_feature_detected!("dotprod"));
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/os/freebsd/arm.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/os/freebsd/arm.rs
new file mode 100644
index 000000000..7aa040075
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/os/freebsd/arm.rs
@@ -0,0 +1,27 @@
+//! Run-time feature detection for ARM on FreeBSD
+
+use super::auxvec;
+use crate::detect::{cache, Feature};
+
+/// Performs run-time feature detection.
+#[inline]
+pub fn check_for(x: Feature) -> bool {
+ cache::test(x as u32, detect_features)
+}
+
+/// Try to read the features from the auxiliary vector
+fn detect_features() -> cache::Initializer {
+ let mut value = cache::Initializer::default();
+ let enable_feature = |value: &mut cache::Initializer, f, enable| {
+ if enable {
+ value.set(f as u32);
+ }
+ };
+
+ if let Ok(auxv) = auxvec::auxv() {
+ enable_feature(&mut value, Feature::neon, auxv.hwcap & 0x00001000 != 0);
+ enable_feature(&mut value, Feature::pmull, auxv.hwcap2 & 0x00000002 != 0);
+ return value;
+ }
+ value
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/os/freebsd/auxvec.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/os/freebsd/auxvec.rs
new file mode 100644
index 000000000..c595ec459
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/os/freebsd/auxvec.rs
@@ -0,0 +1,94 @@
+//! Parses ELF auxiliary vectors.
+#![cfg_attr(any(target_arch = "arm", target_arch = "powerpc64"), allow(dead_code))]
+
+/// Key to access the CPU Hardware capabilities bitfield.
+pub(crate) const AT_HWCAP: usize = 25;
+/// Key to access the CPU Hardware capabilities 2 bitfield.
+pub(crate) const AT_HWCAP2: usize = 26;
+
+/// Cache HWCAP bitfields of the ELF Auxiliary Vector.
+///
+/// If an entry cannot be read all the bits in the bitfield are set to zero.
+/// This should be interpreted as all the features being disabled.
+#[derive(Debug, Copy, Clone)]
+pub(crate) struct AuxVec {
+ pub hwcap: usize,
+ pub hwcap2: usize,
+}
+
+/// ELF Auxiliary Vector
+///
+/// The auxiliary vector is a memory region in a running ELF program's stack
+/// composed of (key: usize, value: usize) pairs.
+///
+/// The keys used in the aux vector are platform dependent. For FreeBSD, they are
+/// defined in [sys/elf_common.h][elf_common_h]. The hardware capabilities of a given
+/// CPU can be queried with the `AT_HWCAP` and `AT_HWCAP2` keys.
+///
+/// Note that run-time feature detection is not invoked for features that can
+/// be detected at compile-time.
+///
+/// [elf_common.h]: https://svnweb.freebsd.org/base/release/12.0.0/sys/sys/elf_common.h?revision=341707
+pub(crate) fn auxv() -> Result<AuxVec, ()> {
+ if let Ok(hwcap) = archauxv(AT_HWCAP) {
+ if let Ok(hwcap2) = archauxv(AT_HWCAP2) {
+ if hwcap != 0 && hwcap2 != 0 {
+ return Ok(AuxVec { hwcap, hwcap2 });
+ }
+ }
+ }
+ Err(())
+}
+
+/// Tries to read the `key` from the auxiliary vector.
+fn archauxv(key: usize) -> Result<usize, ()> {
+ use crate::mem;
+
+ #[derive(Copy, Clone)]
+ #[repr(C)]
+ pub struct Elf_Auxinfo {
+ pub a_type: usize,
+ pub a_un: unnamed,
+ }
+ #[derive(Copy, Clone)]
+ #[repr(C)]
+ pub union unnamed {
+ pub a_val: libc::c_long,
+ pub a_ptr: *mut libc::c_void,
+ pub a_fcn: Option<unsafe extern "C" fn() -> ()>,
+ }
+
+ let mut auxv: [Elf_Auxinfo; 27] = [Elf_Auxinfo {
+ a_type: 0,
+ a_un: unnamed { a_val: 0 },
+ }; 27];
+
+ let mut len: libc::c_uint = mem::size_of_val(&auxv) as libc::c_uint;
+
+ unsafe {
+ let mut mib = [
+ libc::CTL_KERN,
+ libc::KERN_PROC,
+ libc::KERN_PROC_AUXV,
+ libc::getpid(),
+ ];
+
+ let ret = libc::sysctl(
+ mib.as_mut_ptr(),
+ mib.len() as u32,
+ &mut auxv as *mut _ as *mut _,
+ &mut len as *mut _ as *mut _,
+ 0 as *mut libc::c_void,
+ 0,
+ );
+
+ if ret != -1 {
+ for i in 0..auxv.len() {
+ if auxv[i].a_type == key {
+ return Ok(auxv[i].a_un.a_val as usize);
+ }
+ }
+ }
+ }
+ return Ok(0);
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/os/freebsd/mod.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/os/freebsd/mod.rs
new file mode 100644
index 000000000..1a5338a35
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/os/freebsd/mod.rs
@@ -0,0 +1,22 @@
+//! Run-time feature detection on FreeBSD
+
+mod auxvec;
+
+cfg_if! {
+ if #[cfg(target_arch = "aarch64")] {
+ mod aarch64;
+ pub use self::aarch64::check_for;
+ } else if #[cfg(target_arch = "arm")] {
+ mod arm;
+ pub use self::arm::check_for;
+ } else if #[cfg(target_arch = "powerpc64")] {
+ mod powerpc;
+ pub use self::powerpc::check_for;
+ } else {
+ use crate::arch::detect::Feature;
+ /// Performs run-time feature detection.
+ pub fn check_for(_x: Feature) -> bool {
+ false
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/os/freebsd/powerpc.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/os/freebsd/powerpc.rs
new file mode 100644
index 000000000..203e5cd7f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/os/freebsd/powerpc.rs
@@ -0,0 +1,27 @@
+//! Run-time feature detection for PowerPC on FreeBSD.
+
+use super::auxvec;
+use crate::detect::{cache, Feature};
+
+/// Performs run-time feature detection.
+#[inline]
+pub fn check_for(x: Feature) -> bool {
+ cache::test(x as u32, detect_features)
+}
+
+fn detect_features() -> cache::Initializer {
+ let mut value = cache::Initializer::default();
+ let enable_feature = |value: &mut cache::Initializer, f, enable| {
+ if enable {
+ value.set(f as u32);
+ }
+ };
+
+ if let Ok(auxv) = auxvec::auxv() {
+ enable_feature(&mut value, Feature::altivec, auxv.hwcap & 0x10000000 != 0);
+ enable_feature(&mut value, Feature::vsx, auxv.hwcap & 0x00000080 != 0);
+ enable_feature(&mut value, Feature::power8, auxv.hwcap2 & 0x80000000 != 0);
+ return value;
+ }
+ value
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/aarch64.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/aarch64.rs
new file mode 100644
index 000000000..8d874f228
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/aarch64.rs
@@ -0,0 +1,160 @@
+//! Run-time feature detection for Aarch64 on Linux.
+
+use super::{auxvec, cpuinfo};
+use crate::detect::{bit, cache, Feature};
+
+/// Performs run-time feature detection.
+#[inline]
+pub fn check_for(x: Feature) -> bool {
+ cache::test(x as u32, detect_features)
+}
+
+/// Try to read the features from the auxiliary vector, and if that fails, try
+/// to read them from /proc/cpuinfo.
+fn detect_features() -> cache::Initializer {
+ if let Ok(auxv) = auxvec::auxv() {
+ let hwcap: AtHwcap = auxv.into();
+ return hwcap.cache();
+ }
+ if let Ok(c) = cpuinfo::CpuInfo::new() {
+ let hwcap: AtHwcap = c.into();
+ return hwcap.cache();
+ }
+ cache::Initializer::default()
+}
+
+/// These values are part of the platform-specific [asm/hwcap.h][hwcap] .
+///
+/// [hwcap]: https://github.com/torvalds/linux/blob/master/arch/arm64/include/uapi/asm/hwcap.h
+struct AtHwcap {
+ fp: bool, // 0
+ asimd: bool, // 1
+ // evtstrm: bool, // 2
+ aes: bool, // 3
+ pmull: bool, // 4
+ sha1: bool, // 5
+ sha2: bool, // 6
+ crc32: bool, // 7
+ atomics: bool, // 8
+ fphp: bool, // 9
+ asimdhp: bool, // 10
+ // cpuid: bool, // 11
+ asimdrdm: bool, // 12
+ // jscvt: bool, // 13
+ // fcma: bool, // 14
+ lrcpc: bool, // 15
+ // dcpop: bool, // 16
+ // sha3: bool, // 17
+ // sm3: bool, // 18
+ // sm4: bool, // 19
+ asimddp: bool, // 20
+ // sha512: bool, // 21
+ sve: bool, // 22
+}
+
+impl From<auxvec::AuxVec> for AtHwcap {
+ /// Reads AtHwcap from the auxiliary vector.
+ fn from(auxv: auxvec::AuxVec) -> Self {
+ AtHwcap {
+ fp: bit::test(auxv.hwcap, 0),
+ asimd: bit::test(auxv.hwcap, 1),
+ // evtstrm: bit::test(auxv.hwcap, 2),
+ aes: bit::test(auxv.hwcap, 3),
+ pmull: bit::test(auxv.hwcap, 4),
+ sha1: bit::test(auxv.hwcap, 5),
+ sha2: bit::test(auxv.hwcap, 6),
+ crc32: bit::test(auxv.hwcap, 7),
+ atomics: bit::test(auxv.hwcap, 8),
+ fphp: bit::test(auxv.hwcap, 9),
+ asimdhp: bit::test(auxv.hwcap, 10),
+ // cpuid: bit::test(auxv.hwcap, 11),
+ asimdrdm: bit::test(auxv.hwcap, 12),
+ // jscvt: bit::test(auxv.hwcap, 13),
+ // fcma: bit::test(auxv.hwcap, 14),
+ lrcpc: bit::test(auxv.hwcap, 15),
+ // dcpop: bit::test(auxv.hwcap, 16),
+ // sha3: bit::test(auxv.hwcap, 17),
+ // sm3: bit::test(auxv.hwcap, 18),
+ // sm4: bit::test(auxv.hwcap, 19),
+ asimddp: bit::test(auxv.hwcap, 20),
+ // sha512: bit::test(auxv.hwcap, 21),
+ sve: bit::test(auxv.hwcap, 22),
+ }
+ }
+}
+
+impl From<cpuinfo::CpuInfo> for AtHwcap {
+ /// Reads AtHwcap from /proc/cpuinfo .
+ fn from(c: cpuinfo::CpuInfo) -> Self {
+ let f = &c.field("Features");
+ AtHwcap {
+ // 64-bit names. FIXME: In 32-bit compatibility mode /proc/cpuinfo will
+ // map some of the 64-bit names to some 32-bit feature names. This does not
+ // cover that yet.
+ fp: f.has("fp"),
+ asimd: f.has("asimd"),
+ // evtstrm: f.has("evtstrm"),
+ aes: f.has("aes"),
+ pmull: f.has("pmull"),
+ sha1: f.has("sha1"),
+ sha2: f.has("sha2"),
+ crc32: f.has("crc32"),
+ atomics: f.has("atomics"),
+ fphp: f.has("fphp"),
+ asimdhp: f.has("asimdhp"),
+ // cpuid: f.has("cpuid"),
+ asimdrdm: f.has("asimdrdm"),
+ // jscvt: f.has("jscvt"),
+ // fcma: f.has("fcma"),
+ lrcpc: f.has("lrcpc"),
+ // dcpop: f.has("dcpop"),
+ // sha3: f.has("sha3"),
+ // sm3: f.has("sm3"),
+ // sm4: f.has("sm4"),
+ asimddp: f.has("asimddp"),
+ // sha512: f.has("sha512"),
+ sve: f.has("sve"),
+ }
+ }
+}
+
+impl AtHwcap {
+ /// Initializes the cache from the feature -bits.
+ ///
+ /// The features are enabled approximately like in LLVM host feature detection:
+ /// https://github.com/llvm-mirror/llvm/blob/master/lib/Support/Host.cpp#L1273
+ fn cache(self) -> cache::Initializer {
+ let mut value = cache::Initializer::default();
+ {
+ let mut enable_feature = |f, enable| {
+ if enable {
+ value.set(f as u32);
+ }
+ };
+
+ enable_feature(Feature::fp, self.fp);
+ // Half-float support requires float support
+ enable_feature(Feature::fp16, self.fp && self.fphp);
+ enable_feature(Feature::pmull, self.pmull);
+ enable_feature(Feature::crc, self.crc32);
+ enable_feature(Feature::lse, self.atomics);
+ enable_feature(Feature::rcpc, self.lrcpc);
+
+ // SIMD support requires float support - if half-floats are
+ // supported, it also requires half-float support:
+ let asimd = self.fp && self.asimd && (!self.fphp | self.asimdhp);
+ enable_feature(Feature::asimd, asimd);
+ // SIMD extensions require SIMD support:
+ enable_feature(Feature::rdm, self.asimdrdm && asimd);
+ enable_feature(Feature::dotprod, self.asimddp && asimd);
+ enable_feature(Feature::sve, self.sve && asimd);
+
+ // Crypto is specified as AES + PMULL + SHA1 + SHA2 per LLVM/hosts.cpp
+ enable_feature(
+ Feature::crypto,
+ self.aes && self.pmull && self.sha1 && self.sha2,
+ );
+ }
+ value
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/arm.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/arm.rs
new file mode 100644
index 000000000..9c89500cc
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/arm.rs
@@ -0,0 +1,52 @@
+//! Run-time feature detection for ARM on Linux.
+
+use super::{auxvec, cpuinfo};
+use crate::detect::{bit, cache, Feature};
+
+/// Performs run-time feature detection.
+#[inline]
+pub fn check_for(x: Feature) -> bool {
+ cache::test(x as u32, detect_features)
+}
+
+/// Try to read the features from the auxiliary vector, and if that fails, try
+/// to read them from /proc/cpuinfo.
+fn detect_features() -> cache::Initializer {
+ let mut value = cache::Initializer::default();
+ let enable_feature = |value: &mut cache::Initializer, f, enable| {
+ if enable {
+ value.set(f as u32);
+ }
+ };
+
+ // The values are part of the platform-specific [asm/hwcap.h][hwcap]
+ //
+ // [hwcap]: https://github.com/torvalds/linux/blob/master/arch/arm64/include/uapi/asm/hwcap.h
+ if let Ok(auxv) = auxvec::auxv() {
+ enable_feature(&mut value, Feature::neon, bit::test(auxv.hwcap, 12));
+ enable_feature(&mut value, Feature::pmull, bit::test(auxv.hwcap2, 1));
+ return value;
+ }
+
+ if let Ok(c) = cpuinfo::CpuInfo::new() {
+ enable_feature(
+ &mut value,
+ Feature::neon,
+ c.field("Features").has("neon") && !has_broken_neon(&c),
+ );
+ enable_feature(&mut value, Feature::pmull, c.field("Features").has("pmull"));
+ return value;
+ }
+ value
+}
+
+/// Is the CPU known to have a broken NEON unit?
+///
+/// See https://crbug.com/341598.
+fn has_broken_neon(cpuinfo: &cpuinfo::CpuInfo) -> bool {
+ cpuinfo.field("CPU implementer") == "0x51"
+ && cpuinfo.field("CPU architecture") == "7"
+ && cpuinfo.field("CPU variant") == "0x1"
+ && cpuinfo.field("CPU part") == "0x04d"
+ && cpuinfo.field("CPU revision") == "0"
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/auxvec.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/auxvec.rs
new file mode 100644
index 000000000..6ebae67fb
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/auxvec.rs
@@ -0,0 +1,304 @@
+//! Parses ELF auxiliary vectors.
+#![cfg_attr(not(target_arch = "aarch64"), allow(dead_code))]
+
+#[cfg(feature = "std_detect_file_io")]
+use crate::{fs::File, io::Read};
+
+/// Key to access the CPU Hardware capabilities bitfield.
+pub(crate) const AT_HWCAP: usize = 16;
+/// Key to access the CPU Hardware capabilities 2 bitfield.
+#[cfg(any(target_arch = "arm", target_arch = "powerpc64"))]
+pub(crate) const AT_HWCAP2: usize = 26;
+
+/// Cache HWCAP bitfields of the ELF Auxiliary Vector.
+///
+/// If an entry cannot be read all the bits in the bitfield are set to zero.
+/// This should be interpreted as all the features being disabled.
+#[derive(Debug, Copy, Clone)]
+pub(crate) struct AuxVec {
+ pub hwcap: usize,
+ #[cfg(any(target_arch = "arm", target_arch = "powerpc64"))]
+ pub hwcap2: usize,
+}
+
+/// ELF Auxiliary Vector
+///
+/// The auxiliary vector is a memory region in a running ELF program's stack
+/// composed of (key: usize, value: usize) pairs.
+///
+/// The keys used in the aux vector are platform dependent. For Linux, they are
+/// defined in [linux/auxvec.h][auxvec_h]. The hardware capabilities of a given
+/// CPU can be queried with the `AT_HWCAP` and `AT_HWCAP2` keys.
+///
+/// There is no perfect way of reading the auxiliary vector.
+///
+/// - If the `std_detect_dlsym_getauxval` cargo feature is enabled, this will use
+/// `getauxval` if its linked to the binary, and otherwise proceed to a fallback implementation.
+/// When `std_detect_dlsym_getauxval` is disabled, this will assume that `getauxval` is
+/// linked to the binary - if that is not the case the behavior is undefined.
+/// - Otherwise, if the `std_detect_file_io` cargo feature is enabled, it will
+/// try to read `/proc/self/auxv`.
+/// - If that fails, this function returns an error.
+///
+/// Note that run-time feature detection is not invoked for features that can
+/// be detected at compile-time. Also note that if this function returns an
+/// error, cpuinfo still can (and will) be used to try to perform run-time
+/// feature detecton on some platforms.
+///
+/// For more information about when `getauxval` is available check the great
+/// [`auxv` crate documentation][auxv_docs].
+///
+/// [auxvec_h]: https://github.com/torvalds/linux/blob/master/include/uapi/linux/auxvec.h
+/// [auxv_docs]: https://docs.rs/auxv/0.3.3/auxv/
+pub(crate) fn auxv() -> Result<AuxVec, ()> {
+ #[cfg(feature = "std_detect_dlsym_getauxval")]
+ {
+ // Try to call a dynamically-linked getauxval function.
+ if let Ok(hwcap) = getauxval(AT_HWCAP) {
+ // Targets with only AT_HWCAP:
+ #[cfg(any(target_arch = "aarch64", target_arch = "mips", target_arch = "mips64"))]
+ {
+ if hwcap != 0 {
+ return Ok(AuxVec { hwcap });
+ }
+ }
+
+ // Targets with AT_HWCAP and AT_HWCAP2:
+ #[cfg(any(target_arch = "arm", target_arch = "powerpc64"))]
+ {
+ if let Ok(hwcap2) = getauxval(AT_HWCAP2) {
+ if hwcap != 0 && hwcap2 != 0 {
+ return Ok(AuxVec { hwcap, hwcap2 });
+ }
+ }
+ }
+ drop(hwcap);
+ }
+ #[cfg(feature = "std_detect_file_io")]
+ {
+ // If calling getauxval fails, try to read the auxiliary vector from
+ // its file:
+ auxv_from_file("/proc/self/auxv")
+ }
+ #[cfg(not(feature = "std_detect_file_io"))]
+ {
+ Err(())
+ }
+ }
+
+ #[cfg(not(feature = "std_detect_dlsym_getauxval"))]
+ {
+ let hwcap = unsafe { ffi_getauxval(AT_HWCAP) };
+
+ // Targets with only AT_HWCAP:
+ #[cfg(any(target_arch = "aarch64", target_arch = "mips", target_arch = "mips64"))]
+ {
+ if hwcap != 0 {
+ return Ok(AuxVec { hwcap });
+ }
+ }
+
+ // Targets with AT_HWCAP and AT_HWCAP2:
+ #[cfg(any(target_arch = "arm", target_arch = "powerpc64"))]
+ {
+ let hwcap2 = unsafe { ffi_getauxval(AT_HWCAP2) };
+ if hwcap != 0 && hwcap2 != 0 {
+ return Ok(AuxVec { hwcap, hwcap2 });
+ }
+ }
+ }
+}
+
+/// Tries to read the `key` from the auxiliary vector by calling the
+/// dynamically-linked `getauxval` function. If the function is not linked,
+/// this function return `Err`.
+#[cfg(feature = "std_detect_dlsym_getauxval")]
+fn getauxval(key: usize) -> Result<usize, ()> {
+ use libc;
+ pub type F = unsafe extern "C" fn(usize) -> usize;
+ unsafe {
+ let ptr = libc::dlsym(libc::RTLD_DEFAULT, "getauxval\0".as_ptr() as *const _);
+ if ptr.is_null() {
+ return Err(());
+ }
+
+ let ffi_getauxval: F = mem::transmute(ptr);
+ Ok(ffi_getauxval(key))
+ }
+}
+
+/// Tries to read the auxiliary vector from the `file`. If this fails, this
+/// function returns `Err`.
+#[cfg(feature = "std_detect_file_io")]
+fn auxv_from_file(file: &str) -> Result<AuxVec, ()> {
+ let mut file = File::open(file).map_err(|_| ())?;
+
+ // See <https://github.com/torvalds/linux/blob/v3.19/include/uapi/linux/auxvec.h>.
+ //
+ // The auxiliary vector contains at most 32 (key,value) fields: from
+ // `AT_EXECFN = 31` to `AT_NULL = 0`. That is, a buffer of
+ // 2*32 `usize` elements is enough to read the whole vector.
+ let mut buf = [0_usize; 64];
+ {
+ let raw: &mut [u8; 64 * mem::size_of::<usize>()] = unsafe { mem::transmute(&mut buf) };
+ file.read(raw).map_err(|_| ())?;
+ }
+ auxv_from_buf(&buf)
+}
+
+/// Tries to interpret the `buffer` as an auxiliary vector. If that fails, this
+/// function returns `Err`.
+#[cfg(feature = "std_detect_file_io")]
+fn auxv_from_buf(buf: &[usize; 64]) -> Result<AuxVec, ()> {
+ // Targets with only AT_HWCAP:
+ #[cfg(any(target_arch = "aarch64", target_arch = "mips", target_arch = "mips64"))]
+ {
+ for el in buf.chunks(2) {
+ match el[0] {
+ AT_HWCAP => return Ok(AuxVec { hwcap: el[1] }),
+ _ => (),
+ }
+ }
+ }
+ // Targets with AT_HWCAP and AT_HWCAP2:
+ #[cfg(any(target_arch = "arm", target_arch = "powerpc64"))]
+ {
+ let mut hwcap = None;
+ let mut hwcap2 = None;
+ for el in buf.chunks(2) {
+ match el[0] {
+ AT_HWCAP => hwcap = Some(el[1]),
+ AT_HWCAP2 => hwcap2 = Some(el[1]),
+ _ => (),
+ }
+ }
+
+ if let (Some(hwcap), Some(hwcap2)) = (hwcap, hwcap2) {
+ return Ok(AuxVec { hwcap, hwcap2 });
+ }
+ }
+ drop(buf);
+ Err(())
+}
+
+#[cfg(test)]
+mod tests {
+ extern crate auxv as auxv_crate;
+ use super::*;
+
+ // Reads the Auxiliary Vector key from /proc/self/auxv
+ // using the auxv crate.
+ #[cfg(feature = "std_detect_file_io")]
+ fn auxv_crate_getprocfs(key: usize) -> Option<usize> {
+ use self::auxv_crate::procfs::search_procfs_auxv;
+ use self::auxv_crate::AuxvType;
+ let k = key as AuxvType;
+ match search_procfs_auxv(&[k]) {
+ Ok(v) => Some(v[&k] as usize),
+ Err(_) => None,
+ }
+ }
+
+ // Reads the Auxiliary Vector key from getauxval()
+ // using the auxv crate.
+ #[cfg(not(any(target_arch = "mips", target_arch = "mips64")))]
+ fn auxv_crate_getauxval(key: usize) -> Option<usize> {
+ use self::auxv_crate::getauxval::Getauxval;
+ use self::auxv_crate::AuxvType;
+ let q = auxv_crate::getauxval::NativeGetauxval {};
+ match q.getauxval(key as AuxvType) {
+ Ok(v) => Some(v as usize),
+ Err(_) => None,
+ }
+ }
+
+ // FIXME: on mips/mips64 getauxval returns 0, and /proc/self/auxv
+ // does not always contain the AT_HWCAP key under qemu.
+ #[cfg(not(any(target_arch = "mips", target_arch = "mips64", target_arch = "powerpc")))]
+ #[test]
+ fn auxv_crate() {
+ let v = auxv();
+ if let Some(hwcap) = auxv_crate_getauxval(AT_HWCAP) {
+ let rt_hwcap = v.expect("failed to find hwcap key").hwcap;
+ assert_eq!(rt_hwcap, hwcap);
+ }
+
+ // Targets with AT_HWCAP and AT_HWCAP2:
+ #[cfg(any(target_arch = "arm", target_arch = "powerpc64"))]
+ {
+ if let Some(hwcap2) = auxv_crate_getauxval(AT_HWCAP2) {
+ let rt_hwcap2 = v.expect("failed to find hwcap2 key").hwcap2;
+ assert_eq!(rt_hwcap2, hwcap2);
+ }
+ }
+ }
+
+ #[test]
+ fn auxv_dump() {
+ if let Ok(auxvec) = auxv() {
+ println!("{:?}", auxvec);
+ } else {
+ println!("both getauxval() and reading /proc/self/auxv failed!");
+ }
+ }
+
+ #[cfg(feature = "std_detect_file_io")]
+ cfg_if! {
+ if #[cfg(target_arch = "arm")] {
+ #[test]
+ fn linux_rpi3() {
+ let file = concat!(env!("CARGO_MANIFEST_DIR"), "/src/detect/test_data/linux-rpi3.auxv");
+ println!("file: {}", file);
+ let v = auxv_from_file(file).unwrap();
+ assert_eq!(v.hwcap, 4174038);
+ assert_eq!(v.hwcap2, 16);
+ }
+
+ #[test]
+ #[should_panic]
+ fn linux_macos_vb() {
+ let file = concat!(env!("CARGO_MANIFEST_DIR"), "/src/detect/test_data/macos-virtualbox-linux-x86-4850HQ.auxv");
+ println!("file: {}", file);
+ let v = auxv_from_file(file).unwrap();
+ // this file is incomplete (contains hwcap but not hwcap2), we
+ // want to fall back to /proc/cpuinfo in this case, so
+ // reading should fail. assert_eq!(v.hwcap, 126614527);
+ // assert_eq!(v.hwcap2, 0);
+ }
+ } else if #[cfg(target_arch = "aarch64")] {
+ #[test]
+ fn linux_x64() {
+ let file = concat!(env!("CARGO_MANIFEST_DIR"), "/src/detect/test_data/linux-x64-i7-6850k.auxv");
+ println!("file: {}", file);
+ let v = auxv_from_file(file).unwrap();
+ assert_eq!(v.hwcap, 3219913727);
+ }
+ }
+ }
+
+ #[test]
+ #[cfg(feature = "std_detect_file_io")]
+ fn auxv_dump_procfs() {
+ if let Ok(auxvec) = auxv_from_file("/proc/self/auxv") {
+ println!("{:?}", auxvec);
+ } else {
+ println!("reading /proc/self/auxv failed!");
+ }
+ }
+
+ #[test]
+ fn auxv_crate_procfs() {
+ let v = auxv();
+ if let Some(hwcap) = auxv_crate_getprocfs(AT_HWCAP) {
+ assert_eq!(v.unwrap().hwcap, hwcap);
+ }
+
+ // Targets with AT_HWCAP and AT_HWCAP2:
+ #[cfg(any(target_arch = "arm", target_arch = "powerpc64"))]
+ {
+ if let Some(hwcap2) = auxv_crate_getprocfs(AT_HWCAP2) {
+ assert_eq!(v.unwrap().hwcap2, hwcap2);
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/cpuinfo.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/cpuinfo.rs
new file mode 100644
index 000000000..f76c48a4b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/cpuinfo.rs
@@ -0,0 +1,300 @@
+//! Parses /proc/cpuinfo
+#![cfg_attr(not(target_arch = "arm"), allow(dead_code))]
+
+extern crate std;
+use self::std::{fs::File, io, io::Read, prelude::v1::*};
+
+/// cpuinfo
+pub(crate) struct CpuInfo {
+ raw: String,
+}
+
+impl CpuInfo {
+ /// Reads /proc/cpuinfo into CpuInfo.
+ pub(crate) fn new() -> Result<Self, io::Error> {
+ let mut file = File::open("/proc/cpuinfo")?;
+ let mut cpui = Self { raw: String::new() };
+ file.read_to_string(&mut cpui.raw)?;
+ Ok(cpui)
+ }
+ /// Returns the value of the cpuinfo `field`.
+ pub(crate) fn field(&self, field: &str) -> CpuInfoField {
+ for l in self.raw.lines() {
+ if l.trim().starts_with(field) {
+ return CpuInfoField::new(l.split(": ").nth(1));
+ }
+ }
+ CpuInfoField(None)
+ }
+
+ /// Returns the `raw` contents of `/proc/cpuinfo`
+ #[cfg(test)]
+ fn raw(&self) -> &String {
+ &self.raw
+ }
+
+ #[cfg(test)]
+ fn from_str(other: &str) -> Result<Self, ::std::io::Error> {
+ Ok(Self {
+ raw: String::from(other),
+ })
+ }
+}
+
+/// Field of cpuinfo
+#[derive(Debug)]
+pub(crate) struct CpuInfoField<'a>(Option<&'a str>);
+
+impl<'a> PartialEq<&'a str> for CpuInfoField<'a> {
+ fn eq(&self, other: &&'a str) -> bool {
+ match self.0 {
+ None => other.is_empty(),
+ Some(f) => f == other.trim(),
+ }
+ }
+}
+
+impl<'a> CpuInfoField<'a> {
+ pub(crate) fn new<'b>(v: Option<&'b str>) -> CpuInfoField<'b> {
+ match v {
+ None => CpuInfoField::<'b>(None),
+ Some(f) => CpuInfoField::<'b>(Some(f.trim())),
+ }
+ }
+ /// Does the field exist?
+ #[cfg(test)]
+ pub(crate) fn exists(&self) -> bool {
+ self.0.is_some()
+ }
+ /// Does the field contain `other`?
+ pub(crate) fn has(&self, other: &str) -> bool {
+ match self.0 {
+ None => other.is_empty(),
+ Some(f) => {
+ let other = other.trim();
+ for v in f.split(' ') {
+ if v == other {
+ return true;
+ }
+ }
+ false
+ }
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn raw_dump() {
+ let cpuinfo = CpuInfo::new().unwrap();
+ if cpuinfo.field("vendor_id") == "GenuineIntel" {
+ assert!(cpuinfo.field("flags").exists());
+ assert!(!cpuinfo.field("vendor33_id").exists());
+ assert!(cpuinfo.field("flags").has("sse"));
+ assert!(!cpuinfo.field("flags").has("avx314"));
+ }
+ println!("{}", cpuinfo.raw());
+ }
+
+ const CORE_DUO_T6500: &str = r"processor : 0
+vendor_id : GenuineIntel
+cpu family : 6
+model : 23
+model name : Intel(R) Core(TM)2 Duo CPU T6500 @ 2.10GHz
+stepping : 10
+microcode : 0xa0b
+cpu MHz : 1600.000
+cache size : 2048 KB
+physical id : 0
+siblings : 2
+core id : 0
+cpu cores : 2
+apicid : 0
+initial apicid : 0
+fdiv_bug : no
+hlt_bug : no
+f00f_bug : no
+coma_bug : no
+fpu : yes
+fpu_exception : yes
+cpuid level : 13
+wp : yes
+flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe nx lm constant_tsc arch_perfmon pebs bts aperfmperf pni dtes64 monitor ds_cpl est tm2 ssse3 cx16 xtpr pdcm sse4_1 xsave lahf_lm dtherm
+bogomips : 4190.43
+clflush size : 64
+cache_alignment : 64
+address sizes : 36 bits physical, 48 bits virtual
+power management:
+";
+
+ #[test]
+ fn core_duo_t6500() {
+ let cpuinfo = CpuInfo::from_str(CORE_DUO_T6500).unwrap();
+ assert_eq!(cpuinfo.field("vendor_id"), "GenuineIntel");
+ assert_eq!(cpuinfo.field("cpu family"), "6");
+ assert_eq!(cpuinfo.field("model"), "23");
+ assert_eq!(
+ cpuinfo.field("model name"),
+ "Intel(R) Core(TM)2 Duo CPU T6500 @ 2.10GHz"
+ );
+ assert_eq!(
+ cpuinfo.field("flags"),
+ "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe nx lm constant_tsc arch_perfmon pebs bts aperfmperf pni dtes64 monitor ds_cpl est tm2 ssse3 cx16 xtpr pdcm sse4_1 xsave lahf_lm dtherm"
+ );
+ assert!(cpuinfo.field("flags").has("fpu"));
+ assert!(cpuinfo.field("flags").has("dtherm"));
+ assert!(cpuinfo.field("flags").has("sse2"));
+ assert!(!cpuinfo.field("flags").has("avx"));
+ }
+
+ const ARM_CORTEX_A53: &str = r"Processor : AArch64 Processor rev 3 (aarch64)
+ processor : 0
+ processor : 1
+ processor : 2
+ processor : 3
+ processor : 4
+ processor : 5
+ processor : 6
+ processor : 7
+ Features : fp asimd evtstrm aes pmull sha1 sha2 crc32
+ CPU implementer : 0x41
+ CPU architecture: AArch64
+ CPU variant : 0x0
+ CPU part : 0xd03
+ CPU revision : 3
+
+ Hardware : HiKey Development Board
+ ";
+
+ #[test]
+ fn arm_cortex_a53() {
+ let cpuinfo = CpuInfo::from_str(ARM_CORTEX_A53).unwrap();
+ assert_eq!(
+ cpuinfo.field("Processor"),
+ "AArch64 Processor rev 3 (aarch64)"
+ );
+ assert_eq!(
+ cpuinfo.field("Features"),
+ "fp asimd evtstrm aes pmull sha1 sha2 crc32"
+ );
+ assert!(cpuinfo.field("Features").has("pmull"));
+ assert!(!cpuinfo.field("Features").has("neon"));
+ assert!(cpuinfo.field("Features").has("asimd"));
+ }
+
+ const ARM_CORTEX_A57: &str = r"Processor : Cortex A57 Processor rev 1 (aarch64)
+processor : 0
+processor : 1
+processor : 2
+processor : 3
+Features : fp asimd aes pmull sha1 sha2 crc32 wp half thumb fastmult vfp edsp neon vfpv3 tlsi vfpv4 idiva idivt
+CPU implementer : 0x41
+CPU architecture: 8
+CPU variant : 0x1
+CPU part : 0xd07
+CPU revision : 1";
+
+ #[test]
+ fn arm_cortex_a57() {
+ let cpuinfo = CpuInfo::from_str(ARM_CORTEX_A57).unwrap();
+ assert_eq!(
+ cpuinfo.field("Processor"),
+ "Cortex A57 Processor rev 1 (aarch64)"
+ );
+ assert_eq!(
+ cpuinfo.field("Features"),
+ "fp asimd aes pmull sha1 sha2 crc32 wp half thumb fastmult vfp edsp neon vfpv3 tlsi vfpv4 idiva idivt"
+ );
+ assert!(cpuinfo.field("Features").has("pmull"));
+ assert!(cpuinfo.field("Features").has("neon"));
+ assert!(cpuinfo.field("Features").has("asimd"));
+ }
+
+ const POWER8E_POWERKVM: &str = r"processor : 0
+cpu : POWER8E (raw), altivec supported
+clock : 3425.000000MHz
+revision : 2.1 (pvr 004b 0201)
+
+processor : 1
+cpu : POWER8E (raw), altivec supported
+clock : 3425.000000MHz
+revision : 2.1 (pvr 004b 0201)
+
+processor : 2
+cpu : POWER8E (raw), altivec supported
+clock : 3425.000000MHz
+revision : 2.1 (pvr 004b 0201)
+
+processor : 3
+cpu : POWER8E (raw), altivec supported
+clock : 3425.000000MHz
+revision : 2.1 (pvr 004b 0201)
+
+timebase : 512000000
+platform : pSeries
+model : IBM pSeries (emulated by qemu)
+machine : CHRP IBM pSeries (emulated by qemu)";
+
+ #[test]
+ fn power8_powerkvm() {
+ let cpuinfo = CpuInfo::from_str(POWER8E_POWERKVM).unwrap();
+ assert_eq!(cpuinfo.field("cpu"), "POWER8E (raw), altivec supported");
+
+ assert!(cpuinfo.field("cpu").has("altivec"));
+ }
+
+ const POWER5P: &str = r"processor : 0
+cpu : POWER5+ (gs)
+clock : 1900.098000MHz
+revision : 2.1 (pvr 003b 0201)
+
+processor : 1
+cpu : POWER5+ (gs)
+clock : 1900.098000MHz
+revision : 2.1 (pvr 003b 0201)
+
+processor : 2
+cpu : POWER5+ (gs)
+clock : 1900.098000MHz
+revision : 2.1 (pvr 003b 0201)
+
+processor : 3
+cpu : POWER5+ (gs)
+clock : 1900.098000MHz
+revision : 2.1 (pvr 003b 0201)
+
+processor : 4
+cpu : POWER5+ (gs)
+clock : 1900.098000MHz
+revision : 2.1 (pvr 003b 0201)
+
+processor : 5
+cpu : POWER5+ (gs)
+clock : 1900.098000MHz
+revision : 2.1 (pvr 003b 0201)
+
+processor : 6
+cpu : POWER5+ (gs)
+clock : 1900.098000MHz
+revision : 2.1 (pvr 003b 0201)
+
+processor : 7
+cpu : POWER5+ (gs)
+clock : 1900.098000MHz
+revision : 2.1 (pvr 003b 0201)
+
+timebase : 237331000
+platform : pSeries
+machine : CHRP IBM,9133-55A";
+
+ #[test]
+ fn power5p() {
+ let cpuinfo = CpuInfo::from_str(POWER5P).unwrap();
+ assert_eq!(cpuinfo.field("cpu"), "POWER5+ (gs)");
+
+ assert!(!cpuinfo.field("cpu").has("altivec"));
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/mips.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/mips.rs
new file mode 100644
index 000000000..46a47fb7b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/mips.rs
@@ -0,0 +1,31 @@
+//! Run-time feature detection for MIPS on Linux.
+
+use super::auxvec;
+use crate::detect::{bit, cache, Feature};
+
+/// Performs run-time feature detection.
+#[inline]
+pub fn check_for(x: Feature) -> bool {
+ cache::test(x as u32, detect_features)
+}
+
+/// Try to read the features from the auxiliary vector, and if that fails, try
+/// to read them from `/proc/cpuinfo`.
+fn detect_features() -> cache::Initializer {
+ let mut value = cache::Initializer::default();
+ let enable_feature = |value: &mut cache::Initializer, f, enable| {
+ if enable {
+ value.set(f as u32);
+ }
+ };
+
+ // The values are part of the platform-specific [asm/hwcap.h][hwcap]
+ //
+ // [hwcap]: https://github.com/torvalds/linux/blob/master/arch/arm64/include/uapi/asm/hwcap.h
+ if let Ok(auxv) = auxvec::auxv() {
+ enable_feature(&mut value, Feature::msa, bit::test(auxv.hwcap, 1));
+ return value;
+ }
+ // TODO: fall back via `cpuinfo`.
+ value
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/mod.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/mod.rs
new file mode 100644
index 000000000..e02d5e6dc
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/mod.rs
@@ -0,0 +1,28 @@
+//! Run-time feature detection on Linux
+
+mod auxvec;
+
+#[cfg(feature = "std_detect_file_io")]
+mod cpuinfo;
+
+cfg_if! {
+ if #[cfg(target_arch = "aarch64")] {
+ mod aarch64;
+ pub use self::aarch64::check_for;
+ } else if #[cfg(target_arch = "arm")] {
+ mod arm;
+ pub use self::arm::check_for;
+ } else if #[cfg(any(target_arch = "mips", target_arch = "mips64"))] {
+ mod mips;
+ pub use self::mips::check_for;
+ } else if #[cfg(any(target_arch = "powerpc", target_arch = "powerpc64"))] {
+ mod powerpc;
+ pub use self::powerpc::check_for;
+ } else {
+ use crate::detect::Feature;
+ /// Performs run-time feature detection.
+ pub fn check_for(_x: Feature) -> bool {
+ false
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/powerpc.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/powerpc.rs
new file mode 100644
index 000000000..dc19bc8ed
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/os/linux/powerpc.rs
@@ -0,0 +1,41 @@
+//! Run-time feature detection for PowerPC on Linux.
+
+use super::{auxvec, cpuinfo};
+use crate::detect::{cache, Feature};
+
+/// Performs run-time feature detection.
+#[inline]
+pub fn check_for(x: Feature) -> bool {
+ cache::test(x as u32, detect_features)
+}
+
+/// Try to read the features from the auxiliary vector, and if that fails, try
+/// to read them from /proc/cpuinfo.
+fn detect_features() -> cache::Initializer {
+ let mut value = cache::Initializer::default();
+ let enable_feature = |value: &mut cache::Initializer, f, enable| {
+ if enable {
+ value.set(f as u32);
+ }
+ };
+
+ // The values are part of the platform-specific [asm/cputable.h][cputable]
+ //
+ // [cputable]: https://github.com/torvalds/linux/blob/master/arch/powerpc/include/uapi/asm/cputable.h
+ if let Ok(auxv) = auxvec::auxv() {
+ // note: the PowerPC values are the mask to do the test (instead of the
+ // index of the bit to test like in ARM and Aarch64)
+ enable_feature(&mut value, Feature::altivec, auxv.hwcap & 0x10000000 != 0);
+ enable_feature(&mut value, Feature::vsx, auxv.hwcap & 0x00000080 != 0);
+ enable_feature(&mut value, Feature::power8, auxv.hwcap2 & 0x80000000 != 0);
+ return value;
+ }
+
+ // PowerPC's /proc/cpuinfo lacks a proper Feature field,
+ // but `altivec` support is indicated in the `cpu` field.
+ if let Ok(c) = cpuinfo::CpuInfo::new() {
+ enable_feature(&mut value, Feature::altivec, c.field("cpu").has("altivec"));
+ return value;
+ }
+ value
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/os/other.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/os/other.rs
new file mode 100644
index 000000000..23e399ea7
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/os/other.rs
@@ -0,0 +1,9 @@
+//! Other operating systems
+
+use crate::detect::Feature;
+
+/// Performs run-time feature detection.
+#[inline]
+pub fn check_for(_x: Feature) -> bool {
+ false
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/detect/os/x86.rs b/src/tools/rustfmt/tests/target/cfg_if/detect/os/x86.rs
new file mode 100644
index 000000000..2e228aa37
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/detect/os/x86.rs
@@ -0,0 +1,367 @@
+//! x86 run-time feature detection is OS independent.
+
+#[cfg(target_arch = "x86")]
+use crate::arch::x86::*;
+#[cfg(target_arch = "x86_64")]
+use crate::arch::x86_64::*;
+
+use crate::mem;
+
+use crate::detect::{bit, cache, Feature};
+
+/// Performs run-time feature detection.
+#[inline]
+pub fn check_for(x: Feature) -> bool {
+ cache::test(x as u32, detect_features)
+}
+
+/// Run-time feature detection on x86 works by using the CPUID instruction.
+///
+/// The [CPUID Wikipedia page][wiki_cpuid] contains
+/// all the information about which flags to set to query which values, and in
+/// which registers these are reported.
+///
+/// The definitive references are:
+/// - [Intel 64 and IA-32 Architectures Software Developer's Manual Volume 2:
+/// Instruction Set Reference, A-Z][intel64_ref].
+/// - [AMD64 Architecture Programmer's Manual, Volume 3: General-Purpose and
+/// System Instructions][amd64_ref].
+///
+/// [wiki_cpuid]: https://en.wikipedia.org/wiki/CPUID
+/// [intel64_ref]: http://www.intel.de/content/dam/www/public/us/en/documents/manuals/64-ia-32-architectures-software-developer-instruction-set-reference-manual-325383.pdf
+/// [amd64_ref]: http://support.amd.com/TechDocs/24594.pdf
+#[allow(clippy::similar_names)]
+fn detect_features() -> cache::Initializer {
+ let mut value = cache::Initializer::default();
+
+ // If the x86 CPU does not support the CPUID instruction then it is too
+ // old to support any of the currently-detectable features.
+ if !has_cpuid() {
+ return value;
+ }
+
+ // Calling `__cpuid`/`__cpuid_count` from here on is safe because the CPU
+ // has `cpuid` support.
+
+ // 0. EAX = 0: Basic Information:
+ // - EAX returns the "Highest Function Parameter", that is, the maximum
+ // leaf value for subsequent calls of `cpuinfo` in range [0,
+ // 0x8000_0000]. - The vendor ID is stored in 12 u8 ascii chars,
+ // returned in EBX, EDX, and ECX (in that order):
+ let (max_basic_leaf, vendor_id) = unsafe {
+ let CpuidResult {
+ eax: max_basic_leaf,
+ ebx,
+ ecx,
+ edx,
+ } = __cpuid(0);
+ let vendor_id: [[u8; 4]; 3] = [
+ mem::transmute(ebx),
+ mem::transmute(edx),
+ mem::transmute(ecx),
+ ];
+ let vendor_id: [u8; 12] = mem::transmute(vendor_id);
+ (max_basic_leaf, vendor_id)
+ };
+
+ if max_basic_leaf < 1 {
+ // Earlier Intel 486, CPUID not implemented
+ return value;
+ }
+
+ // EAX = 1, ECX = 0: Queries "Processor Info and Feature Bits";
+ // Contains information about most x86 features.
+ let CpuidResult {
+ ecx: proc_info_ecx,
+ edx: proc_info_edx,
+ ..
+ } = unsafe { __cpuid(0x0000_0001_u32) };
+
+ // EAX = 7, ECX = 0: Queries "Extended Features";
+ // Contains information about bmi,bmi2, and avx2 support.
+ let (extended_features_ebx, extended_features_ecx) = if max_basic_leaf >= 7 {
+ let CpuidResult { ebx, ecx, .. } = unsafe { __cpuid(0x0000_0007_u32) };
+ (ebx, ecx)
+ } else {
+ (0, 0) // CPUID does not support "Extended Features"
+ };
+
+ // EAX = 0x8000_0000, ECX = 0: Get Highest Extended Function Supported
+ // - EAX returns the max leaf value for extended information, that is,
+ // `cpuid` calls in range [0x8000_0000; u32::MAX]:
+ let CpuidResult {
+ eax: extended_max_basic_leaf,
+ ..
+ } = unsafe { __cpuid(0x8000_0000_u32) };
+
+ // EAX = 0x8000_0001, ECX=0: Queries "Extended Processor Info and Feature
+ // Bits"
+ let extended_proc_info_ecx = if extended_max_basic_leaf >= 1 {
+ let CpuidResult { ecx, .. } = unsafe { __cpuid(0x8000_0001_u32) };
+ ecx
+ } else {
+ 0
+ };
+
+ {
+ // borrows value till the end of this scope:
+ let mut enable = |r, rb, f| {
+ if bit::test(r as usize, rb) {
+ value.set(f as u32);
+ }
+ };
+
+ enable(proc_info_ecx, 0, Feature::sse3);
+ enable(proc_info_ecx, 1, Feature::pclmulqdq);
+ enable(proc_info_ecx, 9, Feature::ssse3);
+ enable(proc_info_ecx, 13, Feature::cmpxchg16b);
+ enable(proc_info_ecx, 19, Feature::sse4_1);
+ enable(proc_info_ecx, 20, Feature::sse4_2);
+ enable(proc_info_ecx, 23, Feature::popcnt);
+ enable(proc_info_ecx, 25, Feature::aes);
+ enable(proc_info_ecx, 29, Feature::f16c);
+ enable(proc_info_ecx, 30, Feature::rdrand);
+ enable(extended_features_ebx, 18, Feature::rdseed);
+ enable(extended_features_ebx, 19, Feature::adx);
+ enable(extended_features_ebx, 11, Feature::rtm);
+ enable(proc_info_edx, 4, Feature::tsc);
+ enable(proc_info_edx, 23, Feature::mmx);
+ enable(proc_info_edx, 24, Feature::fxsr);
+ enable(proc_info_edx, 25, Feature::sse);
+ enable(proc_info_edx, 26, Feature::sse2);
+ enable(extended_features_ebx, 29, Feature::sha);
+
+ enable(extended_features_ebx, 3, Feature::bmi);
+ enable(extended_features_ebx, 8, Feature::bmi2);
+
+ // `XSAVE` and `AVX` support:
+ let cpu_xsave = bit::test(proc_info_ecx as usize, 26);
+ if cpu_xsave {
+ // 0. Here the CPU supports `XSAVE`.
+
+ // 1. Detect `OSXSAVE`, that is, whether the OS is AVX enabled and
+ // supports saving the state of the AVX/AVX2 vector registers on
+ // context-switches, see:
+ //
+ // - [intel: is avx enabled?][is_avx_enabled],
+ // - [mozilla: sse.cpp][mozilla_sse_cpp].
+ //
+ // [is_avx_enabled]: https://software.intel.com/en-us/blogs/2011/04/14/is-avx-enabled
+ // [mozilla_sse_cpp]: https://hg.mozilla.org/mozilla-central/file/64bab5cbb9b6/mozglue/build/SSE.cpp#l190
+ let cpu_osxsave = bit::test(proc_info_ecx as usize, 27);
+
+ if cpu_osxsave {
+ // 2. The OS must have signaled the CPU that it supports saving and
+ // restoring the:
+ //
+ // * SSE -> `XCR0.SSE[1]`
+ // * AVX -> `XCR0.AVX[2]`
+ // * AVX-512 -> `XCR0.AVX-512[7:5]`.
+ //
+ // by setting the corresponding bits of `XCR0` to `1`.
+ //
+ // This is safe because the CPU supports `xsave`
+ // and the OS has set `osxsave`.
+ let xcr0 = unsafe { _xgetbv(0) };
+ // Test `XCR0.SSE[1]` and `XCR0.AVX[2]` with the mask `0b110 == 6`:
+ let os_avx_support = xcr0 & 6 == 6;
+ // Test `XCR0.AVX-512[7:5]` with the mask `0b1110_0000 == 224`:
+ let os_avx512_support = xcr0 & 224 == 224;
+
+ // Only if the OS and the CPU support saving/restoring the AVX
+ // registers we enable `xsave` support:
+ if os_avx_support {
+ // See "13.3 ENABLING THE XSAVE FEATURE SET AND XSAVE-ENABLED
+ // FEATURES" in the "Intel® 64 and IA-32 Architectures Software
+ // Developer’s Manual, Volume 1: Basic Architecture":
+ //
+ // "Software enables the XSAVE feature set by setting
+ // CR4.OSXSAVE[bit 18] to 1 (e.g., with the MOV to CR4
+ // instruction). If this bit is 0, execution of any of XGETBV,
+ // XRSTOR, XRSTORS, XSAVE, XSAVEC, XSAVEOPT, XSAVES, and XSETBV
+ // causes an invalid-opcode exception (#UD)"
+ //
+ enable(proc_info_ecx, 26, Feature::xsave);
+
+ // For `xsaveopt`, `xsavec`, and `xsaves` we need to query:
+ // Processor Extended State Enumeration Sub-leaf (EAX = 0DH,
+ // ECX = 1):
+ if max_basic_leaf >= 0xd {
+ let CpuidResult {
+ eax: proc_extended_state1_eax,
+ ..
+ } = unsafe { __cpuid_count(0xd_u32, 1) };
+ enable(proc_extended_state1_eax, 0, Feature::xsaveopt);
+ enable(proc_extended_state1_eax, 1, Feature::xsavec);
+ enable(proc_extended_state1_eax, 3, Feature::xsaves);
+ }
+
+ // FMA (uses 256-bit wide registers):
+ enable(proc_info_ecx, 12, Feature::fma);
+
+ // And AVX/AVX2:
+ enable(proc_info_ecx, 28, Feature::avx);
+ enable(extended_features_ebx, 5, Feature::avx2);
+
+ // For AVX-512 the OS also needs to support saving/restoring
+ // the extended state, only then we enable AVX-512 support:
+ if os_avx512_support {
+ enable(extended_features_ebx, 16, Feature::avx512f);
+ enable(extended_features_ebx, 17, Feature::avx512dq);
+ enable(extended_features_ebx, 21, Feature::avx512_ifma);
+ enable(extended_features_ebx, 26, Feature::avx512pf);
+ enable(extended_features_ebx, 27, Feature::avx512er);
+ enable(extended_features_ebx, 28, Feature::avx512cd);
+ enable(extended_features_ebx, 30, Feature::avx512bw);
+ enable(extended_features_ebx, 31, Feature::avx512vl);
+ enable(extended_features_ecx, 1, Feature::avx512_vbmi);
+ enable(extended_features_ecx, 14, Feature::avx512_vpopcntdq);
+ }
+ }
+ }
+ }
+
+ // This detects ABM on AMD CPUs and LZCNT on Intel CPUs.
+ // On intel CPUs with popcnt, lzcnt implements the
+ // "missing part" of ABM, so we map both to the same
+ // internal feature.
+ //
+ // The `is_x86_feature_detected!("lzcnt")` macro then
+ // internally maps to Feature::abm.
+ enable(extended_proc_info_ecx, 5, Feature::abm);
+ // As Hygon Dhyana originates from AMD technology and shares most of the architecture with
+ // AMD's family 17h, but with different CPU Vendor ID("HygonGenuine")/Family series
+ // number(Family 18h).
+ //
+ // For CPUID feature bits, Hygon Dhyana(family 18h) share the same definition with AMD
+ // family 17h.
+ //
+ // Related AMD CPUID specification is https://www.amd.com/system/files/TechDocs/25481.pdf.
+ // Related Hygon kernel patch can be found on
+ // http://lkml.kernel.org/r/5ce86123a7b9dad925ac583d88d2f921040e859b.1538583282.git.puwen@hygon.cn
+ if vendor_id == *b"AuthenticAMD" || vendor_id == *b"HygonGenuine" {
+ // These features are available on AMD arch CPUs:
+ enable(extended_proc_info_ecx, 6, Feature::sse4a);
+ enable(extended_proc_info_ecx, 21, Feature::tbm);
+ }
+ }
+
+ value
+}
+
+#[cfg(test)]
+mod tests {
+ extern crate cupid;
+
+ #[test]
+ fn dump() {
+ println!("aes: {:?}", is_x86_feature_detected!("aes"));
+ println!("pclmulqdq: {:?}", is_x86_feature_detected!("pclmulqdq"));
+ println!("rdrand: {:?}", is_x86_feature_detected!("rdrand"));
+ println!("rdseed: {:?}", is_x86_feature_detected!("rdseed"));
+ println!("tsc: {:?}", is_x86_feature_detected!("tsc"));
+ println!("sse: {:?}", is_x86_feature_detected!("sse"));
+ println!("sse2: {:?}", is_x86_feature_detected!("sse2"));
+ println!("sse3: {:?}", is_x86_feature_detected!("sse3"));
+ println!("ssse3: {:?}", is_x86_feature_detected!("ssse3"));
+ println!("sse4.1: {:?}", is_x86_feature_detected!("sse4.1"));
+ println!("sse4.2: {:?}", is_x86_feature_detected!("sse4.2"));
+ println!("sse4a: {:?}", is_x86_feature_detected!("sse4a"));
+ println!("sha: {:?}", is_x86_feature_detected!("sha"));
+ println!("avx: {:?}", is_x86_feature_detected!("avx"));
+ println!("avx2: {:?}", is_x86_feature_detected!("avx2"));
+ println!("avx512f {:?}", is_x86_feature_detected!("avx512f"));
+ println!("avx512cd {:?}", is_x86_feature_detected!("avx512cd"));
+ println!("avx512er {:?}", is_x86_feature_detected!("avx512er"));
+ println!("avx512pf {:?}", is_x86_feature_detected!("avx512pf"));
+ println!("avx512bw {:?}", is_x86_feature_detected!("avx512bw"));
+ println!("avx512dq {:?}", is_x86_feature_detected!("avx512dq"));
+ println!("avx512vl {:?}", is_x86_feature_detected!("avx512vl"));
+ println!("avx512_ifma {:?}", is_x86_feature_detected!("avx512ifma"));
+ println!("avx512_vbmi {:?}", is_x86_feature_detected!("avx512vbmi"));
+ println!(
+ "avx512_vpopcntdq {:?}",
+ is_x86_feature_detected!("avx512vpopcntdq")
+ );
+ println!("fma: {:?}", is_x86_feature_detected!("fma"));
+ println!("abm: {:?}", is_x86_feature_detected!("abm"));
+ println!("bmi: {:?}", is_x86_feature_detected!("bmi1"));
+ println!("bmi2: {:?}", is_x86_feature_detected!("bmi2"));
+ println!("tbm: {:?}", is_x86_feature_detected!("tbm"));
+ println!("popcnt: {:?}", is_x86_feature_detected!("popcnt"));
+ println!("lzcnt: {:?}", is_x86_feature_detected!("lzcnt"));
+ println!("fxsr: {:?}", is_x86_feature_detected!("fxsr"));
+ println!("xsave: {:?}", is_x86_feature_detected!("xsave"));
+ println!("xsaveopt: {:?}", is_x86_feature_detected!("xsaveopt"));
+ println!("xsaves: {:?}", is_x86_feature_detected!("xsaves"));
+ println!("xsavec: {:?}", is_x86_feature_detected!("xsavec"));
+ println!("cmpxchg16b: {:?}", is_x86_feature_detected!("cmpxchg16b"));
+ println!("adx: {:?}", is_x86_feature_detected!("adx"));
+ println!("rtm: {:?}", is_x86_feature_detected!("rtm"));
+ }
+
+ #[test]
+ fn compare_with_cupid() {
+ let information = cupid::master().unwrap();
+ assert_eq!(is_x86_feature_detected!("aes"), information.aesni());
+ assert_eq!(
+ is_x86_feature_detected!("pclmulqdq"),
+ information.pclmulqdq()
+ );
+ assert_eq!(is_x86_feature_detected!("rdrand"), information.rdrand());
+ assert_eq!(is_x86_feature_detected!("rdseed"), information.rdseed());
+ assert_eq!(is_x86_feature_detected!("tsc"), information.tsc());
+ assert_eq!(is_x86_feature_detected!("sse"), information.sse());
+ assert_eq!(is_x86_feature_detected!("sse2"), information.sse2());
+ assert_eq!(is_x86_feature_detected!("sse3"), information.sse3());
+ assert_eq!(is_x86_feature_detected!("ssse3"), information.ssse3());
+ assert_eq!(is_x86_feature_detected!("sse4.1"), information.sse4_1());
+ assert_eq!(is_x86_feature_detected!("sse4.2"), information.sse4_2());
+ assert_eq!(is_x86_feature_detected!("sse4a"), information.sse4a());
+ assert_eq!(is_x86_feature_detected!("sha"), information.sha());
+ assert_eq!(is_x86_feature_detected!("avx"), information.avx());
+ assert_eq!(is_x86_feature_detected!("avx2"), information.avx2());
+ assert_eq!(is_x86_feature_detected!("avx512f"), information.avx512f());
+ assert_eq!(is_x86_feature_detected!("avx512cd"), information.avx512cd());
+ assert_eq!(is_x86_feature_detected!("avx512er"), information.avx512er());
+ assert_eq!(is_x86_feature_detected!("avx512pf"), information.avx512pf());
+ assert_eq!(is_x86_feature_detected!("avx512bw"), information.avx512bw());
+ assert_eq!(is_x86_feature_detected!("avx512dq"), information.avx512dq());
+ assert_eq!(is_x86_feature_detected!("avx512vl"), information.avx512vl());
+ assert_eq!(
+ is_x86_feature_detected!("avx512ifma"),
+ information.avx512_ifma()
+ );
+ assert_eq!(
+ is_x86_feature_detected!("avx512vbmi"),
+ information.avx512_vbmi()
+ );
+ assert_eq!(
+ is_x86_feature_detected!("avx512vpopcntdq"),
+ information.avx512_vpopcntdq()
+ );
+ assert_eq!(is_x86_feature_detected!("fma"), information.fma());
+ assert_eq!(is_x86_feature_detected!("bmi1"), information.bmi1());
+ assert_eq!(is_x86_feature_detected!("bmi2"), information.bmi2());
+ assert_eq!(is_x86_feature_detected!("popcnt"), information.popcnt());
+ assert_eq!(is_x86_feature_detected!("abm"), information.lzcnt());
+ assert_eq!(is_x86_feature_detected!("tbm"), information.tbm());
+ assert_eq!(is_x86_feature_detected!("lzcnt"), information.lzcnt());
+ assert_eq!(is_x86_feature_detected!("xsave"), information.xsave());
+ assert_eq!(is_x86_feature_detected!("xsaveopt"), information.xsaveopt());
+ assert_eq!(
+ is_x86_feature_detected!("xsavec"),
+ information.xsavec_and_xrstor()
+ );
+ assert_eq!(
+ is_x86_feature_detected!("xsaves"),
+ information.xsaves_xrstors_and_ia32_xss()
+ );
+ assert_eq!(
+ is_x86_feature_detected!("cmpxchg16b"),
+ information.cmpxchg16b(),
+ );
+ assert_eq!(is_x86_feature_detected!("adx"), information.adx(),);
+ assert_eq!(is_x86_feature_detected!("rtm"), information.rtm(),);
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_if/lib.rs b/src/tools/rustfmt/tests/target/cfg_if/lib.rs
new file mode 100644
index 000000000..8b3bb304f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/lib.rs
@@ -0,0 +1,49 @@
+//! Run-time feature detection for the Rust standard library.
+//!
+//! To detect whether a feature is enabled in the system running the binary
+//! use one of the appropriate macro for the target:
+//!
+//! * `x86` and `x86_64`: [`is_x86_feature_detected`]
+//! * `arm`: [`is_arm_feature_detected`]
+//! * `aarch64`: [`is_aarch64_feature_detected`]
+//! * `mips`: [`is_mips_feature_detected`]
+//! * `mips64`: [`is_mips64_feature_detected`]
+//! * `powerpc`: [`is_powerpc_feature_detected`]
+//! * `powerpc64`: [`is_powerpc64_feature_detected`]
+
+#![unstable(feature = "stdsimd", issue = "27731")]
+#![feature(const_fn, staged_api, stdsimd, doc_cfg, allow_internal_unstable)]
+#![allow(clippy::shadow_reuse)]
+#![deny(clippy::missing_inline_in_public_items)]
+#![cfg_attr(target_os = "linux", feature(linkage))]
+#![cfg_attr(all(target_os = "freebsd", target_arch = "aarch64"), feature(asm))]
+#![cfg_attr(stdsimd_strict, deny(warnings))]
+#![cfg_attr(test, allow(unused_imports))]
+#![no_std]
+
+#[macro_use]
+extern crate cfg_if;
+
+cfg_if! {
+ if #[cfg(feature = "std_detect_file_io")] {
+ #[cfg_attr(test, macro_use(println))]
+ extern crate std;
+
+ #[allow(unused_imports)]
+ use std::{arch, fs, io, mem, sync};
+ } else {
+ #[cfg(test)]
+ #[macro_use(println)]
+ extern crate std;
+
+ #[allow(unused_imports)]
+ use core::{arch, mem, sync};
+ }
+}
+
+#[cfg(feature = "std_detect_dlsym_getauxval")]
+extern crate libc;
+
+#[doc(hidden)]
+#[unstable(feature = "stdsimd", issue = "27731")]
+pub mod detect;
diff --git a/src/tools/rustfmt/tests/target/cfg_if/mod.rs b/src/tools/rustfmt/tests/target/cfg_if/mod.rs
new file mode 100644
index 000000000..b630e7ff3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_if/mod.rs
@@ -0,0 +1,5 @@
+//! `std_detect`
+
+#[doc(hidden)] // unstable implementation detail
+#[unstable(feature = "stdsimd", issue = "27731")]
+pub mod detect;
diff --git a/src/tools/rustfmt/tests/target/cfg_mod/bar.rs b/src/tools/rustfmt/tests/target/cfg_mod/bar.rs
new file mode 100644
index 000000000..20dc5b4a0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_mod/bar.rs
@@ -0,0 +1,3 @@
+fn bar() -> &str {
+ "bar"
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_mod/dir/dir1/dir2/wasm32.rs b/src/tools/rustfmt/tests/target/cfg_mod/dir/dir1/dir2/wasm32.rs
new file mode 100644
index 000000000..ac437e422
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_mod/dir/dir1/dir2/wasm32.rs
@@ -0,0 +1,3 @@
+fn wasm32() -> &str {
+ "wasm32"
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_mod/dir/dir1/dir3/wasm32.rs b/src/tools/rustfmt/tests/target/cfg_mod/dir/dir1/dir3/wasm32.rs
new file mode 100644
index 000000000..ac437e422
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_mod/dir/dir1/dir3/wasm32.rs
@@ -0,0 +1,3 @@
+fn wasm32() -> &str {
+ "wasm32"
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_mod/foo.rs b/src/tools/rustfmt/tests/target/cfg_mod/foo.rs
new file mode 100644
index 000000000..053c8e6f3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_mod/foo.rs
@@ -0,0 +1,3 @@
+fn foo() -> &str {
+ "foo"
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_mod/mod.rs b/src/tools/rustfmt/tests/target/cfg_mod/mod.rs
new file mode 100644
index 000000000..45ba86f11
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_mod/mod.rs
@@ -0,0 +1,10 @@
+#[cfg_attr(feature = "foo", path = "foo.rs")]
+#[cfg_attr(not(feture = "foo"), path = "bar.rs")]
+mod sub_mod;
+
+#[cfg_attr(target_arch = "wasm32", path = "dir/dir1/dir2/wasm32.rs")]
+#[cfg_attr(not(target_arch = "wasm32"), path = "dir/dir1/dir3/wasm32.rs")]
+mod wasm32;
+
+#[some_attr(path = "somewhere.rs")]
+mod other;
diff --git a/src/tools/rustfmt/tests/target/cfg_mod/other.rs b/src/tools/rustfmt/tests/target/cfg_mod/other.rs
new file mode 100644
index 000000000..5929b8dcf
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_mod/other.rs
@@ -0,0 +1,3 @@
+fn other() -> &str {
+ "other"
+}
diff --git a/src/tools/rustfmt/tests/target/cfg_mod/wasm32.rs b/src/tools/rustfmt/tests/target/cfg_mod/wasm32.rs
new file mode 100644
index 000000000..ac437e422
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/cfg_mod/wasm32.rs
@@ -0,0 +1,3 @@
+fn wasm32() -> &str {
+ "wasm32"
+}
diff --git a/src/tools/rustfmt/tests/target/chains-visual.rs b/src/tools/rustfmt/tests/target/chains-visual.rs
new file mode 100644
index 000000000..76ef99a4b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/chains-visual.rs
@@ -0,0 +1,158 @@
+// rustfmt-indent_style: Visual
+// Test chain formatting.
+
+fn main() {
+ // Don't put chains on a single line if it wasn't so in source.
+ let a = b.c.d.1.foo(|x| x + 1);
+
+ bbbbbbbbbbbbbbbbbbb.ccccccccccccccccccccccccccccccccccccc
+ .ddddddddddddddddddddddddddd();
+
+ bbbbbbbbbbbbbbbbbbb.ccccccccccccccccccccccccccccccccccccc
+ .ddddddddddddddddddddddddddd
+ .eeeeeeee();
+
+ // Test case where first chain element isn't a path, but is shorter than
+ // the size of a tab.
+ x().y(|| match cond() {
+ true => (),
+ false => (),
+ });
+
+ loong_func().quux(move || if true { 1 } else { 2 });
+
+ some_fuuuuuuuuunction().method_call_a(aaaaa, bbbbb, |c| {
+ let x = c;
+ x
+ });
+
+ some_fuuuuuuuuunction().method_call_a(aaaaa, bbbbb, |c| {
+ let x = c;
+ x
+ })
+ .method_call_b(aaaaa, bbbbb, |c| {
+ let x = c;
+ x
+ });
+
+ fffffffffffffffffffffffffffffffffff(a, {
+ SCRIPT_TASK_ROOT.with(|root| {
+ *root.borrow_mut() = Some(&script_task);
+ });
+ });
+
+ let suuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuum =
+ xxxxxxx.map(|x| x + 5)
+ .map(|x| x / 2)
+ .fold(0, |acc, x| acc + x);
+
+ aaaaaaaaaaaaaaaa.map(|x| {
+ x += 1;
+ x
+ })
+ .filter(some_mod::some_filter)
+}
+
+fn floaters() {
+ let z = Foo { field1: val1,
+ field2: val2 };
+
+ let x = Foo { field1: val1,
+ field2: val2 }.method_call()
+ .method_call();
+
+ let y = if cond { val1 } else { val2 }.method_call();
+
+ {
+ match x {
+ PushParam => {
+ // params are 1-indexed
+ stack.push(mparams[match cur.to_digit(10) {
+ Some(d) => d as usize - 1,
+ None => return Err("bad param number".to_owned()),
+ }].clone());
+ }
+ }
+ }
+
+ if cond {
+ some();
+ } else {
+ none();
+ }.bar()
+ .baz();
+
+ Foo { x: val }.baz(|| {
+ force();
+ multiline();
+ })
+ .quux();
+
+ Foo { y: i_am_multi_line,
+ z: ok }.baz(|| {
+ force();
+ multiline();
+ })
+ .quux();
+
+ a + match x {
+ true => "yay!",
+ false => "boo!",
+ }.bar()
+}
+
+fn is_replaced_content() -> bool {
+ constellat.send(ConstellationMsg::ViewportConstrained(self.id, constraints))
+ .unwrap();
+}
+
+fn issue587() {
+ a.b::<()>(c);
+
+ std::mem::transmute(dl.symbol::<()>("init").unwrap())
+}
+
+fn issue_1389() {
+ let names = String::from_utf8(names)?.split('|')
+ .map(str::to_owned)
+ .collect();
+}
+
+fn issue1217() -> Result<Mnemonic, Error> {
+ let random_chars: String = OsRng::new()?.gen_ascii_chars()
+ .take(self.bit_length)
+ .collect();
+
+ Ok(Mnemonic::new(&random_chars))
+}
+
+fn issue1236(options: Vec<String>) -> Result<Option<String>> {
+ let process = Command::new("dmenu").stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .spawn()
+ .chain_err(|| "failed to spawn dmenu")?;
+}
+
+fn issue1434() {
+ for _ in 0..100 {
+ let prototype_id =
+ PrototypeIdData::from_reader::<_, B>(&mut self.file_cursor).chain_err(|| {
+ format!("could not read prototype ID at offset {:#010x}",
+ current_offset)
+ })?;
+ }
+}
+
+fn issue2264() {
+ {
+ something.function()
+ .map(|| {
+ if let a_very_very_very_very_very_very_very_very_long_variable =
+ compute_this_variable()
+ {
+ println!("Hello");
+ }
+ })
+ .collect();
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/chains.rs b/src/tools/rustfmt/tests/target/chains.rs
new file mode 100644
index 000000000..292da2981
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/chains.rs
@@ -0,0 +1,306 @@
+// rustfmt-use_small_heuristics: Off
+// Test chain formatting.
+
+fn main() {
+ let a = b.c.d.1.foo(|x| x + 1);
+
+ bbbbbbbbbbbbbbbbbbb.ccccccccccccccccccccccccccccccccccccc.ddddddddddddddddddddddddddd();
+
+ bbbbbbbbbbbbbbbbbbb
+ .ccccccccccccccccccccccccccccccccccccc
+ .ddddddddddddddddddddddddddd
+ .eeeeeeee();
+
+ let f = fooooooooooooooooooooooooooooooooooooooooooooooooooo
+ .baaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaar;
+
+ // Test case where first chain element isn't a path, but is shorter than
+ // the size of a tab.
+ x().y(|| match cond() {
+ true => (),
+ false => (),
+ });
+
+ loong_func().quux(move || {
+ if true {
+ 1
+ } else {
+ 2
+ }
+ });
+
+ some_fuuuuuuuuunction().method_call_a(aaaaa, bbbbb, |c| {
+ let x = c;
+ x
+ });
+
+ some_fuuuuuuuuunction()
+ .method_call_a(aaaaa, bbbbb, |c| {
+ let x = c;
+ x
+ })
+ .method_call_b(aaaaa, bbbbb, |c| {
+ let x = c;
+ x
+ });
+
+ fffffffffffffffffffffffffffffffffff(a, {
+ SCRIPT_TASK_ROOT.with(|root| {
+ *root.borrow_mut() = Some(&script_task);
+ });
+ });
+
+ let suuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuum =
+ xxxxxxx.map(|x| x + 5).map(|x| x / 2).fold(0, |acc, x| acc + x);
+
+ body.fold(Body::new(), |mut body, chunk| {
+ body.extend(chunk);
+ Ok(body)
+ })
+ .and_then(move |body| {
+ let req = Request::from_parts(parts, body);
+ f(req).map_err(|_| io::Error::new(io::ErrorKind::Other, ""))
+ });
+
+ aaaaaaaaaaaaaaaa
+ .map(|x| {
+ x += 1;
+ x
+ })
+ .filter(some_mod::some_filter)
+}
+
+fn floaters() {
+ let z = Foo {
+ field1: val1,
+ field2: val2,
+ };
+
+ let x = Foo {
+ field1: val1,
+ field2: val2,
+ }
+ .method_call()
+ .method_call();
+
+ let y = if cond {
+ val1
+ } else {
+ val2
+ }
+ .method_call();
+
+ {
+ match x {
+ PushParam => {
+ // params are 1-indexed
+ stack.push(
+ mparams[match cur.to_digit(10) {
+ Some(d) => d as usize - 1,
+ None => return Err("bad param number".to_owned()),
+ }]
+ .clone(),
+ );
+ }
+ }
+ }
+
+ if cond {
+ some();
+ } else {
+ none();
+ }
+ .bar()
+ .baz();
+
+ Foo {
+ x: val,
+ }
+ .baz(|| {
+ force();
+ multiline();
+ })
+ .quux();
+
+ Foo {
+ y: i_am_multi_line,
+ z: ok,
+ }
+ .baz(|| {
+ force();
+ multiline();
+ })
+ .quux();
+
+ a + match x {
+ true => "yay!",
+ false => "boo!",
+ }
+ .bar()
+}
+
+fn is_replaced_content() -> bool {
+ constellat.send(ConstellationMsg::ViewportConstrained(self.id, constraints)).unwrap();
+}
+
+fn issue587() {
+ a.b::<()>(c);
+
+ std::mem::transmute(dl.symbol::<()>("init").unwrap())
+}
+
+fn try_shorthand() {
+ let x = expr?;
+ let y = expr.kaas()?.test();
+ let loooooooooooooooooooooooooooooooooooooooooong =
+ does_this?.look?.good?.should_we_break?.after_the_first_question_mark?;
+ let yyyy = expr?.another?.another?.another?.another?.another?.another?.another?.another?.test();
+ let zzzz = expr?.another?.another?.another?.another?;
+ let aaa = x??????????????????????????????????????????????????????????????????????????;
+
+ let y = a
+ .very
+ .loooooooooooooooooooooooooooooooooooooong()
+ .chain()
+ .inside()
+ .weeeeeeeeeeeeeee()?
+ .test()
+ .0
+ .x;
+
+ parameterized(f, substs, def_id, Ns::Value, &[], |tcx| tcx.lookup_item_type(def_id).generics)?;
+ fooooooooooooooooooooooooooo()?
+ .bar()?
+ .baaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaz()?;
+}
+
+fn issue_1004() {
+ match *self {
+ ty::ImplOrTraitItem::MethodTraitItem(ref i) => write!(f, "{:?}", i),
+ ty::ImplOrTraitItem::ConstTraitItem(ref i) => write!(f, "{:?}", i),
+ ty::ImplOrTraitItem::TypeTraitItem(ref i) => write!(f, "{:?}", i),
+ }?;
+
+ ty::tls::with(|tcx| {
+ let tap = ty::Binder(TraitAndProjections(principal, projections));
+ in_binder(f, tcx, &ty::Binder(""), Some(tap))
+ })?;
+}
+
+fn issue1392() {
+ test_method(
+ r#"
+ if foo {
+ a();
+ }
+ else {
+ b();
+ }
+ "#
+ .trim(),
+ );
+}
+
+// #2067
+impl Settings {
+ fn save(&self) -> Result<()> {
+ let mut file = File::create(&settings_path)
+ .chain_err(|| ErrorKind::WriteError(settings_path.clone()))?;
+ }
+}
+
+fn issue2126() {
+ {
+ {
+ {
+ {
+ {
+ let x = self
+ .span_from(sub_span.expect("No span found for struct arant variant"));
+ self.sspanpan_from_span(
+ sub_span.expect("No span found for struct variant"),
+ );
+ let x = self.spanpan_from_span(
+ sub_span.expect("No span found for struct variant"),
+ )?;
+ }
+ }
+ }
+ }
+ }
+}
+
+// #2200
+impl Foo {
+ pub fn from_ast(diagnostic: &::errors::Handler, attrs: &[ast::Attribute]) -> Attributes {
+ let other_attrs = attrs
+ .iter()
+ .filter_map(|attr| {
+ attr.with_desugared_doc(|attr| {
+ if attr.check_name("doc") {
+ if let Some(mi) = attr.meta() {
+ if let Some(value) = mi.value_str() {
+ doc_strings.push(DocFragment::Include(
+ line, attr.span, filename, contents,
+ ));
+ }
+ }
+ }
+ })
+ })
+ .collect();
+ }
+}
+
+// #2415
+// Avoid orphan in chain
+fn issue2415() {
+ let base_url = (|| {
+ // stuff
+
+ Ok((|| {
+ // stuff
+ Some(value.to_string())
+ })()
+ .ok_or("")?)
+ })()
+ .unwrap_or_else(|_: Box<::std::error::Error>| String::from(""));
+}
+
+impl issue_2786 {
+ fn thing(&self) {
+ foo(|a| {
+ println!("a");
+ println!("b");
+ })
+ .bar(|c| {
+ println!("a");
+ println!("b");
+ })
+ .baz(|c| {
+ println!("a");
+ println!("b");
+ })
+ }
+}
+
+fn issue_2773() {
+ let bar = Some(0);
+ bar.or_else(|| {
+ // do stuff
+ None
+ })
+ .or_else(|| {
+ // do other stuff
+ None
+ })
+ .and_then(|val| {
+ // do this stuff
+ None
+ });
+}
+
+fn issue_3034() {
+ disallowed_headers.iter().any(|header| *header == name)
+ || disallowed_header_prefixes.iter().any(|prefix| name.starts_with(prefix))
+}
diff --git a/src/tools/rustfmt/tests/target/chains_with_comment.rs b/src/tools/rustfmt/tests/target/chains_with_comment.rs
new file mode 100644
index 000000000..522d70713
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/chains_with_comment.rs
@@ -0,0 +1,137 @@
+// Chains with comment.
+
+fn main() {
+ let x = y // comment
+ .z;
+
+ foo // foo
+ // comment after parent
+ .x
+ .y
+ // comment 1
+ .bar() // comment after bar()
+ // comment 2
+ .foobar
+ // comment after
+ // comment 3
+ .baz(x, y, z);
+
+ self.rev_dep_graph
+ .iter()
+ // Remove nodes that are not dirty
+ .filter(|&(unit, _)| dirties.contains(&unit))
+ // Retain only dirty dependencies of the ones that are dirty
+ .map(|(k, deps)| {
+ (
+ k.clone(),
+ deps.iter()
+ .cloned()
+ .filter(|d| dirties.contains(&d))
+ .collect(),
+ )
+ });
+
+ let y = expr /* comment */
+ .kaas()?
+ // comment
+ .test();
+ let loooooooooooooooooooooooooooooooooooooooooong = does_this?
+ .look?
+ .good?
+ .should_we_break?
+ .after_the_first_question_mark?;
+ let zzzz = expr? // comment after parent
+ // comment 0
+ .another??? // comment 1
+ .another???? // comment 2
+ .another? // comment 3
+ .another?;
+
+ let y = a
+ .very
+ .loooooooooooooooooooooooooooooooooooooong() /* comment */
+ .chain()
+ .inside() /* comment */
+ .weeeeeeeeeeeeeee()?
+ .test()
+ .0
+ .x;
+
+ parameterized(f, substs, def_id, Ns::Value, &[], |tcx| {
+ tcx.lookup_item_type(def_id).generics
+ })?;
+ fooooooooooooooooooooooooooo()?
+ .bar()?
+ .baaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaz()?;
+
+ // #2559
+ App::new("cargo-cache")
+ .version(crate_version!())
+ .bin_name("cargo")
+ .about("Manage cargo cache")
+ .author("matthiaskrgr")
+ .subcommand(
+ SubCommand::with_name("cache")
+ .version(crate_version!())
+ .bin_name("cargo-cache")
+ .about("Manage cargo cache")
+ .author("matthiaskrgr")
+ .arg(&list_dirs)
+ .arg(&remove_dir)
+ .arg(&gc_repos)
+ .arg(&info)
+ .arg(&keep_duplicate_crates)
+ .arg(&dry_run)
+ .arg(&auto_clean)
+ .arg(&auto_clean_expensive),
+ ) // subcommand
+ .arg(&list_dirs);
+}
+
+// #2177
+impl Foo {
+ fn dirty_rev_dep_graph(
+ &self,
+ dirties: &HashSet<UnitKey>,
+ ) -> HashMap<UnitKey, HashSet<UnitKey>> {
+ let dirties = self.transitive_dirty_units(dirties);
+ trace!("transitive_dirty_units: {:?}", dirties);
+
+ self.rev_dep_graph
+ .iter()
+ // Remove nodes that are not dirty
+ .filter(|&(unit, _)| dirties.contains(&unit))
+ // Retain only dirty dependencies of the ones that are dirty
+ .map(|(k, deps)| {
+ (
+ k.clone(),
+ deps.iter()
+ .cloned()
+ .filter(|d| dirties.contains(&d))
+ .collect(),
+ )
+ })
+ }
+}
+
+// #2907
+fn foo() {
+ let x = foo
+ .bar??? // comment
+ .baz;
+ let x = foo
+ .bar???
+ // comment
+ .baz;
+ let x = foo
+ .bar??? // comment
+ // comment
+ .baz;
+ let x = foo
+ .bar??????????????? // comment
+ // comment
+ // comment
+ // comment
+ // comment
+ .baz;
+}
diff --git a/src/tools/rustfmt/tests/target/closure-block-inside-macro.rs b/src/tools/rustfmt/tests/target/closure-block-inside-macro.rs
new file mode 100644
index 000000000..b3ddfb512
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/closure-block-inside-macro.rs
@@ -0,0 +1,9 @@
+// #1547
+fuzz_target!(|data: &[u8]| if let Some(first) = data.first() {
+ let index = *first as usize;
+ if index >= ENCODINGS.len() {
+ return;
+ }
+ let encoding = ENCODINGS[index];
+ dispatch_test(encoding, &data[1..]);
+});
diff --git a/src/tools/rustfmt/tests/target/closure.rs b/src/tools/rustfmt/tests/target/closure.rs
new file mode 100644
index 000000000..e8b4ff7a9
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/closure.rs
@@ -0,0 +1,256 @@
+// rustfmt-normalize_comments: true
+// Closures
+
+fn main() {
+ let square = (|i: i32| i * i);
+
+ let commented = |// first
+ a, // argument
+ // second
+ b: WithType, // argument
+ // ignored
+ _| {
+ (
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ )
+ };
+
+ let block_body = move |xxxxxxxxxxxxxxxxxxxxxxxxxxxxx,
+ ref yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy| {
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxx + yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy
+ };
+
+ let loooooooooooooong_name = |field| {
+ // format comments.
+ if field.node.attrs.len() > 0 {
+ field.node.attrs[0].span.lo()
+ } else {
+ field.span.lo()
+ }
+ };
+
+ let unblock_me = |trivial| closure();
+
+ let empty = |arg| {};
+
+ let simple = |arg| {
+ // comment formatting
+ foo(arg)
+ };
+
+ let test = || {
+ do_something();
+ do_something_else();
+ };
+
+ let arg_test =
+ |big_argument_name, test123| looooooooooooooooooong_function_naaaaaaaaaaaaaaaaame();
+
+ let arg_test =
+ |big_argument_name, test123| looooooooooooooooooong_function_naaaaaaaaaaaaaaaaame();
+
+ let simple_closure = move || -> () {};
+
+ let closure = |input: Ty| -> Option<String> { foo() };
+
+ let closure_with_return_type =
+ |aaaaaaaaaaaaaaaaaaaaaaarg1, aaaaaaaaaaaaaaaaaaaaaaarg2| -> Strong { "sup".to_owned() };
+
+ |arg1, arg2, _, _, arg3, arg4| {
+ let temp = arg4 + arg3;
+ arg2 * arg1 - temp
+ };
+
+ let block_body_with_comment = args.iter().map(|a| {
+ // Emitting only dep-info is possible only for final crate type, as
+ // as others may emit required metadata for dependent crate types
+ if a.starts_with("--emit") && is_final_crate_type && !self.workspace_mode {
+ "--emit=dep-info"
+ } else {
+ a
+ }
+ });
+
+ for<> || -> () {};
+ for<> || -> () {};
+ for<> || -> () {};
+
+ for<'a, 'b, 'c> |_: &'a (), _: &'b (), _: &'c ()| -> () {};
+}
+
+fn issue311() {
+ let func = |x| println!("{}", x);
+
+ (func)(0.0);
+}
+
+fn issue863() {
+ let closure = |x| match x {
+ 0 => true,
+ _ => false,
+ } == true;
+}
+
+fn issue934() {
+ let hash: &Fn(&&Block) -> u64 = &|block| -> u64 {
+ let mut h = SpanlessHash::new(cx);
+ h.hash_block(block);
+ h.finish()
+ };
+
+ let hash: &Fn(&&Block) -> u64 = &|block| -> u64 {
+ let mut h = SpanlessHash::new(cx);
+ h.hash_block(block);
+ h.finish();
+ };
+}
+
+impl<'a, 'tcx: 'a> SpanlessEq<'a, 'tcx> {
+ pub fn eq_expr(&self, left: &Expr, right: &Expr) -> bool {
+ match (&left.node, &right.node) {
+ (&ExprBinary(l_op, ref ll, ref lr), &ExprBinary(r_op, ref rl, ref rr)) => {
+ l_op.node == r_op.node && self.eq_expr(ll, rl) && self.eq_expr(lr, rr)
+ || swap_binop(l_op.node, ll, lr).map_or(false, |(l_op, ll, lr)| {
+ l_op == r_op.node && self.eq_expr(ll, rl) && self.eq_expr(lr, rr)
+ })
+ }
+ }
+ }
+}
+
+fn foo() {
+ lifetimes_iter___map(|lasdfasfd| {
+ let hi = if l.bounds.is_empty() {
+ l.lifetime.span.hi()
+ };
+ });
+}
+
+fn issue1405() {
+ open_raw_fd(fd, b'r').and_then(|file| {
+ Capture::new_raw(None, |_, err| unsafe { raw::pcap_fopen_offline(file, err) })
+ });
+}
+
+fn issue1466() {
+ let vertex_buffer = frame.scope(|ctx| {
+ let buffer = ctx.create_host_visible_buffer::<VertexBuffer<Vertex>>(&vertices);
+ ctx.create_device_local_buffer(buffer)
+ });
+}
+
+fn issue470() {
+ {
+ {
+ {
+ let explicit_arg_decls =
+ explicit_arguments
+ .into_iter()
+ .enumerate()
+ .map(|(index, (ty, pattern))| {
+ let lvalue = Lvalue::Arg(index as u32);
+ block = this.pattern(
+ block,
+ argument_extent,
+ hair::PatternRef::Hair(pattern),
+ &lvalue,
+ );
+ ArgDecl { ty: ty }
+ });
+ }
+ }
+ }
+}
+
+// #1509
+impl Foo {
+ pub fn bar(&self) {
+ Some(SomeType {
+ push_closure_out_to_100_chars: iter(otherwise_it_works_ok.into_iter().map(|f| Ok(f))),
+ })
+ }
+}
+
+fn issue1329() {
+ aaaaaaaaaaaaaaaa
+ .map(|x| {
+ x += 1;
+ x
+ })
+ .filter
+}
+
+fn issue325() {
+ let f =
+ || unsafe { xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx };
+}
+
+fn issue1697() {
+ Test.func_a(
+ A_VERY_LONG_CONST_VARIABLE_NAME,
+ move |arg1, arg2, arg3, arg4| arg1 + arg2 + arg3 + arg4,
+ )
+}
+
+fn issue1694() {
+ foooooo(
+ |_referencefffffffff: _, _target_reference: _, _oid: _, _target_oid: _| {
+ format!("refs/pull/{}/merge", pr_id)
+ },
+ )
+}
+
+fn issue1713() {
+ rayon::join(
+ || recurse(left, is_less, pred, limit),
+ || recurse(right, is_less, Some(pivot), limit),
+ );
+
+ rayon::join(
+ 1,
+ || recurse(left, is_less, pred, limit),
+ 2,
+ || recurse(right, is_less, Some(pivot), limit),
+ );
+}
+
+fn issue2063() {
+ |ctx: Ctx<(String, String)>| -> io::Result<Response> {
+ Ok(Response::new().with_body(ctx.params.0))
+ }
+}
+
+fn issue1524() {
+ let f = |x| x;
+ let f = |x| x;
+ let f = |x| x;
+ let f = |x| x;
+ let f = |x| x;
+}
+
+fn issue2171() {
+ foo(|| unsafe {
+ if PERIPHERALS {
+ loop {}
+ } else {
+ PERIPHERALS = true;
+ }
+ })
+}
+
+fn issue2207() {
+ a.map(|_| {
+ unsafe { a_very_very_very_very_very_very_very_long_function_name_or_anything_else() }
+ .to_string()
+ })
+}
+
+fn issue2262() {
+ result
+ .init(&mut result.slave.borrow_mut(), &mut (result.strategy)())
+ .map_err(|factory| Error {
+ factory,
+ slave: None,
+ })?;
+}
diff --git a/src/tools/rustfmt/tests/target/comment-inside-const.rs b/src/tools/rustfmt/tests/target/comment-inside-const.rs
new file mode 100644
index 000000000..f847f2c69
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/comment-inside-const.rs
@@ -0,0 +1,9 @@
+fn issue982() {
+ const SOME_CONSTANT: u32 =
+ // Explanation why SOME_CONSTANT needs FLAG_A to be set.
+ FLAG_A |
+ // Explanation why SOME_CONSTANT needs FLAG_B to be set.
+ FLAG_B |
+ // Explanation why SOME_CONSTANT needs FLAG_C to be set.
+ FLAG_C;
+}
diff --git a/src/tools/rustfmt/tests/target/comment-not-disappear.rs b/src/tools/rustfmt/tests/target/comment-not-disappear.rs
new file mode 100644
index 000000000..b1fa0ff6f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/comment-not-disappear.rs
@@ -0,0 +1,38 @@
+// All the comments here should not disappear.
+
+fn a() {
+ match x {
+ X |
+ // A comment
+ Y => {}
+ };
+}
+
+fn b() {
+ match x {
+ X =>
+ // A comment
+ {
+ y
+ }
+ }
+}
+
+fn c() {
+ a() /* ... */;
+}
+
+fn foo() -> Vec<i32> {
+ (0..11)
+ .map(|x|
+ // This comment disappears.
+ if x % 2 == 0 { x } else { x * 2 })
+ .collect()
+}
+
+fn calc_page_len(prefix_len: usize, sofar: usize) -> usize {
+ 2 // page type and flags
+ + 1 // stored depth
+ + 2 // stored count
+ + prefix_len + sofar // sum of size of all the actual items
+}
diff --git a/src/tools/rustfmt/tests/target/comment.rs b/src/tools/rustfmt/tests/target/comment.rs
new file mode 100644
index 000000000..b987c8a44
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/comment.rs
@@ -0,0 +1,93 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+
+//! Doc comment
+fn test() {
+ //! Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec a diam
+ //! lectus. Sed sit amet ipsum mauris. Maecenas congue ligula ac quam
+
+ // comment
+ // comment2
+
+ code(); // leave this comment alone!
+ // ok?
+
+ // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec a
+ // diam lectus. Sed sit amet ipsum mauris. Maecenas congue ligula ac quam
+ // viverra nec consectetur ante hendrerit. Donec et mollis dolor.
+ // Praesent et diam eget libero egestas mattis sit amet vitae augue. Nam
+ // tincidunt congue enim, ut porta lorem lacinia consectetur. Donec ut
+ // libero sed arcu vehicula ultricies a non tortor. Lorem ipsum dolor sit
+ // amet, consectetur adipiscing elit. Aenean ut gravida lorem. Ut turpis
+ // felis, pulvinar a semper sed, adipiscing id dolor.
+
+ // Very looooooooooooooooooooooooooooooooooooooooooooooooooooooooong comment
+ // that should be split
+
+ // println!("{:?}", rewrite_comment(subslice,
+ // false,
+ // comment_width,
+ // self.block_indent,
+ // self.config)
+ // .unwrap());
+
+ funk(); // dontchangeme
+ // or me
+
+ // #1388
+ const EXCEPTION_PATHS: &'static [&'static str] = &[
+ // std crates
+ "src/libstd/sys/", // Platform-specific code for std lives here.
+ "src/bootstrap",
+ ];
+}
+
+/// test123
+fn doc_comment() {}
+
+fn chains() {
+ foo.bar(|| {
+ let x = 10;
+ // comment
+ x
+ })
+}
+
+fn issue_1086() {
+ //
+}
+
+// random comment
+
+fn main() { // Test
+}
+
+// #1643
+fn some_fn() // some comment
+{
+}
+
+fn some_fn1()
+// some comment
+{
+}
+
+fn some_fn2() // some comment
+{
+}
+
+fn some_fn3() // some comment some comment some comment some comment some comment some comment so
+{
+}
+
+fn some_fn4()
+// some comment some comment some comment some comment some comment some comment some comment
+{
+}
+
+// #1603
+pub enum Foo {
+ A, // `/** **/`
+ B, // `/*!`
+ C,
+}
diff --git a/src/tools/rustfmt/tests/target/comment2.rs b/src/tools/rustfmt/tests/target/comment2.rs
new file mode 100644
index 000000000..04f84a15c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/comment2.rs
@@ -0,0 +1,5 @@
+// rustfmt-wrap_comments: true
+
+/// This is a long line that angers rustfmt. Rustfmt shall deal with it swiftly
+/// and justly.
+pub mod foo {}
diff --git a/src/tools/rustfmt/tests/target/comment3.rs b/src/tools/rustfmt/tests/target/comment3.rs
new file mode 100644
index 000000000..3a810590d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/comment3.rs
@@ -0,0 +1,6 @@
+// rustfmt-wrap_comments: true
+
+//! This is a long line that angers rustfmt. Rustfmt shall deal with it swiftly
+//! and justly.
+
+pub mod foo {}
diff --git a/src/tools/rustfmt/tests/target/comment4.rs b/src/tools/rustfmt/tests/target/comment4.rs
new file mode 100644
index 000000000..e2ef7de97
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/comment4.rs
@@ -0,0 +1,51 @@
+#![allow(dead_code)] // bar
+
+//! Doc comment
+fn test() {
+ // comment
+ // comment2
+
+ code(); /* leave this comment alone!
+ * ok? */
+
+ /* Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec a
+ * diam lectus. Sed sit amet ipsum mauris. Maecenas congue ligula ac quam
+ * viverra nec consectetur ante hendrerit. Donec et mollis dolor.
+ * Praesent et diam eget libero egestas mattis sit amet vitae augue. Nam
+ * tincidunt congue enim, ut porta lorem lacinia consectetur. Donec ut
+ * libero sed arcu vehicula ultricies a non tortor. Lorem ipsum dolor sit
+ * amet, consectetur adipiscing elit. Aenean ut gravida lorem. Ut turpis
+ * felis, pulvinar a semper sed, adipiscing id dolor. */
+
+ // Very loooooooooooooooooooooooooooooooooooooooooooooooooooooooong comment that should be split
+
+ // println!("{:?}", rewrite_comment(subslice,
+ // false,
+ // comment_width,
+ // self.block_indent,
+ // self.config)
+ // .unwrap());
+
+ funk(); //dontchangeme
+ // or me
+}
+
+/// test123
+fn doc_comment() {}
+
+/*
+Regression test for issue #956
+
+(some very important text)
+*/
+
+/*
+fn debug_function() {
+ println!("hello");
+}
+// */
+
+#[link_section=".vectors"]
+#[no_mangle] // Test this attribute is preserved.
+#[cfg_attr(rustfmt, rustfmt::skip)]
+pub static ISSUE_1284: [i32; 16] = [];
diff --git a/src/tools/rustfmt/tests/target/comment5.rs b/src/tools/rustfmt/tests/target/comment5.rs
new file mode 100644
index 000000000..82d171e6f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/comment5.rs
@@ -0,0 +1,16 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+
+//@ special comment
+//@ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec adiam lectus.
+//@ Sed sit amet ipsum mauris. Maecenas congue ligula ac quam
+//@
+//@ foo
+fn test() {}
+
+//@@@ another special comment
+//@@@ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec adiam
+//@@@ lectus. Sed sit amet ipsum mauris. Maecenas congue ligula ac quam
+//@@@
+//@@@ foo
+fn bar() {}
diff --git a/src/tools/rustfmt/tests/target/comment6.rs b/src/tools/rustfmt/tests/target/comment6.rs
new file mode 100644
index 000000000..565fee632
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/comment6.rs
@@ -0,0 +1,14 @@
+// rustfmt-wrap_comments: true
+
+// Pendant la nuit du 9 mars 1860, les nuages, se confondant avec la mer,
+// limitaient à quelques brasses la portée de la vue. Sur cette mer démontée,
+// dont les lames déferlaient en projetant des lueurs livides, un léger bâtiment
+// fuyait presque à sec de toile.
+
+pub mod foo {}
+
+// ゆく河の流れは絶えずして、しかももとの水にあらず。淀みに浮かぶうたかたは、
+// かつ消えかつ結びて、久しくとどまりたるためしなし。世の中にある人とすみかと、
+// またかくのごとし。
+
+pub mod bar {}
diff --git a/src/tools/rustfmt/tests/target/comment_crlf_newline.rs b/src/tools/rustfmt/tests/target/comment_crlf_newline.rs
new file mode 100644
index 000000000..aab9e94d9
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/comment_crlf_newline.rs
@@ -0,0 +1,4 @@
+// rustfmt-normalize_comments: true
+// Block comments followed by CRLF newlines should not an extra newline at the end
+
+// Something else
diff --git a/src/tools/rustfmt/tests/target/comments-fn.rs b/src/tools/rustfmt/tests/target/comments-fn.rs
new file mode 100644
index 000000000..1f43bd93b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/comments-fn.rs
@@ -0,0 +1,38 @@
+// Test comments on functions are preserved.
+
+// Comment on foo.
+fn foo<F, G>(
+ a: aaaaaaaaaaaaa, // A comment
+ b: bbbbbbbbbbbbb, // a second comment
+ c: ccccccccccccc,
+ // Newline comment
+ d: ddddddddddddd,
+ // A multi line comment
+ // between args.
+ e: eeeeeeeeeeeee, /* comment before paren */
+) -> bar
+where
+ F: Foo, // COmment after where-clause
+ G: Goo, // final comment
+{
+}
+
+fn bar<F /* comment on F */, G /* comment on G */>() {}
+
+fn baz() -> Baz /* Comment after return type */ {}
+
+fn some_fn<T>()
+where
+ T: Eq, // some comment
+{
+}
+
+fn issue458<F>(a: &str, f: F)
+// comment1
+where
+ // comment2
+ F: FnOnce(&str) -> bool,
+{
+ f(a);
+ ()
+}
diff --git a/src/tools/rustfmt/tests/target/comments-in-lists/format-doc-comments.rs b/src/tools/rustfmt/tests/target/comments-in-lists/format-doc-comments.rs
new file mode 100644
index 000000000..be4b7a8c4
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/comments-in-lists/format-doc-comments.rs
@@ -0,0 +1,94 @@
+// rustfmt-format_code_in_doc_comments: true
+
+// https://github.com/rust-lang/rustfmt/issues/4420
+enum Minimal {
+ Example,
+ //[thisisremoved thatsleft
+ // canbeanything
+}
+
+struct Minimal2 {
+ Example: usize,
+ //[thisisremoved thatsleft
+ // canbeanything
+}
+
+pub enum E {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ Variant1,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ Variant2,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+}
+
+pub enum E2 {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+}
+
+pub struct S {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ some_field: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ last_field: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+}
+
+pub struct S2 {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+}
+
+fn foo(
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ a: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ b: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+) -> usize {
+ 5
+}
+
+fn foo2(// Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+) -> usize {
+ 5
+}
+
+fn main() {
+ let v = vec![
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ 1,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ 2,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ ];
+
+ let v2: Vec<i32> = vec![
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ ];
+
+ match a {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ b => c,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ d => e,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/comments-in-lists/wrap-comments-false.rs b/src/tools/rustfmt/tests/target/comments-in-lists/wrap-comments-false.rs
new file mode 100644
index 000000000..db4da6223
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/comments-in-lists/wrap-comments-false.rs
@@ -0,0 +1,83 @@
+// rustfmt-normalize_comments: true
+
+// https://github.com/rust-lang/rustfmt/issues/4909
+pub enum E {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ Variant1,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ Variant2,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+}
+
+pub enum E2 {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+}
+
+pub struct S {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ some_field: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ last_field: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+}
+
+pub struct S2 {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+}
+
+fn foo(
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ a: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ b: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+) -> usize {
+ 5
+}
+
+fn foo2(// Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+) -> usize {
+ 5
+}
+
+fn main() {
+ let v = vec![
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ 1,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ 2,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ ];
+
+ let v2: Vec<i32> = vec![
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ ];
+
+ // https://github.com/rust-lang/rustfmt/issues/4430
+ match a {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ b => c,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ d => e,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/comments-in-lists/wrap-comments-not-normalized.rs b/src/tools/rustfmt/tests/target/comments-in-lists/wrap-comments-not-normalized.rs
new file mode 100644
index 000000000..9b9147eb1
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/comments-in-lists/wrap-comments-not-normalized.rs
@@ -0,0 +1,142 @@
+// rustfmt-wrap_comments: true
+
+// https://github.com/rust-lang/rustfmt/issues/4909
+pub enum E {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ Variant1,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ Variant2,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+}
+
+pub enum E2 {
+ // This can be changed once https://github.com/rust-lang/rustfmt/issues/4854 is fixed
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+}
+
+pub enum E3 {
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion
+ // pipeline, or according to the descriptions
+ Variant1,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion
+ // pipeline, or according to the descriptions
+ Variant2,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion
+ // pipeline, or according to the descriptions
+}
+
+pub struct S {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ some_field: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ last_field: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+}
+
+pub struct S2 {
+ // This can be changed once https://github.com/rust-lang/rustfmt/issues/4854 is fixed
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+}
+
+pub struct S3 {
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion
+ // pipeline, or according to the descriptions
+ some_field: usize,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion
+ // pipeline, or according to the descriptions
+ last_field: usize,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion
+ // pipeline, or according to the descriptions
+}
+
+fn foo(
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ a: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ b: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+) -> usize {
+ 5
+}
+
+fn foo2(// Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+) -> usize {
+ 5
+}
+
+fn foo3(
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion
+ // pipeline, or according to the descriptions
+ a: usize,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion
+ // pipeline, or according to the descriptions
+ b: usize,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion
+ // pipeline, or according to the descriptions
+) -> usize {
+ 5
+}
+
+fn main() {
+ let v = vec![
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ 1,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ 2,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ ];
+
+ let v2: Vec<i32> = vec![
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ ];
+
+ let v3 = vec![
+ // Expand as needed, numbers should be ascending according to the stage through the
+ // inclusion pipeline, or according to the descriptions
+ 1,
+ // Expand as needed, numbers should be ascending according to the stage through the
+ // inclusion pipeline, or according to the descriptions
+ 2,
+ // Expand as needed, numbers should be ascending according to the stage through the
+ // inclusion pipeline, or according to the descriptions
+ ];
+
+ // https://github.com/rust-lang/rustfmt/issues/4430
+ match a {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ b => c,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ d => e,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ }
+
+ match a {
+ // Expand as needed, numbers should be ascending according to the stage through the
+ // inclusion pipeline, or according to the descriptions
+ b => c,
+ // Expand as needed, numbers should be ascending according to the stage through the
+ // inclusion pipeline, or according to the descriptions
+ d => e,
+ // Expand as needed, numbers should be ascending according to the stage through the
+ // inclusion pipeline, or according to the descriptions
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/comments-in-lists/wrap-comments-true.rs b/src/tools/rustfmt/tests/target/comments-in-lists/wrap-comments-true.rs
new file mode 100644
index 000000000..c1531d22a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/comments-in-lists/wrap-comments-true.rs
@@ -0,0 +1,143 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+
+// https://github.com/rust-lang/rustfmt/issues/4909
+pub enum E {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ Variant1,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ Variant2,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+}
+
+pub enum E2 {
+ // This can be changed once https://github.com/rust-lang/rustfmt/issues/4854 is fixed
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+}
+
+pub enum E3 {
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion
+ // pipeline, or according to the descriptions
+ Variant1,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion
+ // pipeline, or according to the descriptions
+ Variant2,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion
+ // pipeline, or according to the descriptions
+}
+
+pub struct S {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ some_field: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ last_field: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+}
+
+pub struct S2 {
+ // This can be changed once https://github.com/rust-lang/rustfmt/issues/4854 is fixed
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+}
+
+pub struct S3 {
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion
+ // pipeline, or according to the descriptions
+ some_field: usize,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion
+ // pipeline, or according to the descriptions
+ last_field: usize,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion
+ // pipeline, or according to the descriptions
+}
+
+fn foo(
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ a: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ b: usize,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+) -> usize {
+ 5
+}
+
+fn foo2(// Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+) -> usize {
+ 5
+}
+
+fn foo3(
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion
+ // pipeline, or according to the descriptions
+ a: usize,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion
+ // pipeline, or according to the descriptions
+ b: usize,
+ // Expand as needed, numbers should be ascending according to the stage through the inclusion
+ // pipeline, or according to the descriptions
+) -> usize {
+ 5
+}
+
+fn main() {
+ let v = vec![
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ 1,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ 2,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ ];
+
+ let v2: Vec<i32> = vec![
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ ];
+
+ let v3 = vec![
+ // Expand as needed, numbers should be ascending according to the stage through the
+ // inclusion pipeline, or according to the descriptions
+ 1,
+ // Expand as needed, numbers should be ascending according to the stage through the
+ // inclusion pipeline, or according to the descriptions
+ 2,
+ // Expand as needed, numbers should be ascending according to the stage through the
+ // inclusion pipeline, or according to the descriptions
+ ];
+
+ // https://github.com/rust-lang/rustfmt/issues/4430
+ match a {
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ b => c,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ d => e,
+ // Expand as needed, numbers should be ascending according to the stage
+ // through the inclusion pipeline, or according to the descriptions
+ }
+
+ match a {
+ // Expand as needed, numbers should be ascending according to the stage through the
+ // inclusion pipeline, or according to the descriptions
+ b => c,
+ // Expand as needed, numbers should be ascending according to the stage through the
+ // inclusion pipeline, or according to the descriptions
+ d => e,
+ // Expand as needed, numbers should be ascending according to the stage through the
+ // inclusion pipeline, or according to the descriptions
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/blank_lines_lower_bound/1.rs b/src/tools/rustfmt/tests/target/configs/blank_lines_lower_bound/1.rs
new file mode 100644
index 000000000..9706699dc
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/blank_lines_lower_bound/1.rs
@@ -0,0 +1,16 @@
+// rustfmt-blank_lines_lower_bound: 1
+
+fn foo() {}
+
+fn bar() {}
+
+// comment
+fn foobar() {}
+
+fn foo1() {}
+
+fn bar1() {}
+
+// comment
+
+fn foobar1() {}
diff --git a/src/tools/rustfmt/tests/target/configs/brace_style/fn_always_next_line.rs b/src/tools/rustfmt/tests/target/configs/brace_style/fn_always_next_line.rs
new file mode 100644
index 000000000..2755a2646
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/brace_style/fn_always_next_line.rs
@@ -0,0 +1,19 @@
+// rustfmt-brace_style: AlwaysNextLine
+// Function brace style
+
+fn lorem()
+{
+ // body
+}
+
+fn lorem(ipsum: usize)
+{
+ // body
+}
+
+fn lorem<T>(ipsum: T)
+where
+ T: Add + Sub + Mul + Div,
+{
+ // body
+}
diff --git a/src/tools/rustfmt/tests/target/configs/brace_style/fn_prefer_same_line.rs b/src/tools/rustfmt/tests/target/configs/brace_style/fn_prefer_same_line.rs
new file mode 100644
index 000000000..23f98b6dd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/brace_style/fn_prefer_same_line.rs
@@ -0,0 +1,16 @@
+// rustfmt-brace_style: PreferSameLine
+// Function brace style
+
+fn lorem() {
+ // body
+}
+
+fn lorem(ipsum: usize) {
+ // body
+}
+
+fn lorem<T>(ipsum: T)
+where
+ T: Add + Sub + Mul + Div, {
+ // body
+}
diff --git a/src/tools/rustfmt/tests/target/configs/brace_style/fn_same_line_where.rs b/src/tools/rustfmt/tests/target/configs/brace_style/fn_same_line_where.rs
new file mode 100644
index 000000000..2afe59943
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/brace_style/fn_same_line_where.rs
@@ -0,0 +1,17 @@
+// rustfmt-brace_style: SameLineWhere
+// Function brace style
+
+fn lorem() {
+ // body
+}
+
+fn lorem(ipsum: usize) {
+ // body
+}
+
+fn lorem<T>(ipsum: T)
+where
+ T: Add + Sub + Mul + Div,
+{
+ // body
+}
diff --git a/src/tools/rustfmt/tests/target/configs/brace_style/item_always_next_line.rs b/src/tools/rustfmt/tests/target/configs/brace_style/item_always_next_line.rs
new file mode 100644
index 000000000..c13018630
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/brace_style/item_always_next_line.rs
@@ -0,0 +1,25 @@
+// rustfmt-brace_style: AlwaysNextLine
+// Item brace style
+
+enum Foo {}
+
+struct Bar {}
+
+struct Lorem
+{
+ ipsum: bool,
+}
+
+struct Dolor<T>
+where
+ T: Eq,
+{
+ sit: T,
+}
+
+#[cfg(test)]
+mod tests
+{
+ #[test]
+ fn it_works() {}
+}
diff --git a/src/tools/rustfmt/tests/target/configs/brace_style/item_prefer_same_line.rs b/src/tools/rustfmt/tests/target/configs/brace_style/item_prefer_same_line.rs
new file mode 100644
index 000000000..5143d7517
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/brace_style/item_prefer_same_line.rs
@@ -0,0 +1,18 @@
+// rustfmt-brace_style: PreferSameLine
+// Item brace style
+
+struct Lorem {
+ ipsum: bool,
+}
+
+struct Dolor<T>
+where
+ T: Eq, {
+ sit: T,
+}
+
+#[cfg(test)]
+mod tests {
+ #[test]
+ fn it_works() {}
+}
diff --git a/src/tools/rustfmt/tests/target/configs/brace_style/item_same_line_where.rs b/src/tools/rustfmt/tests/target/configs/brace_style/item_same_line_where.rs
new file mode 100644
index 000000000..8a3b28526
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/brace_style/item_same_line_where.rs
@@ -0,0 +1,19 @@
+// rustfmt-brace_style: SameLineWhere
+// Item brace style
+
+struct Lorem {
+ ipsum: bool,
+}
+
+struct Dolor<T>
+where
+ T: Eq,
+{
+ sit: T,
+}
+
+#[cfg(test)]
+mod tests {
+ #[test]
+ fn it_works() {}
+}
diff --git a/src/tools/rustfmt/tests/target/configs/chain_width/always.rs b/src/tools/rustfmt/tests/target/configs/chain_width/always.rs
new file mode 100644
index 000000000..b16d25251
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/chain_width/always.rs
@@ -0,0 +1,29 @@
+// rustfmt-chain_width: 1
+// setting an unachievable chain_width to always get chains
+// on separate lines
+
+struct Fluent {}
+
+impl Fluent {
+ fn blorp(&self) -> &Self {
+ self
+ }
+}
+
+fn main() {
+ let test = Fluent {};
+
+ // should be left alone
+ test.blorp();
+
+ // should be wrapped
+ test.blorp()
+ .blorp();
+ test.blorp()
+ .blorp()
+ .blorp();
+ test.blorp()
+ .blorp()
+ .blorp()
+ .blorp();
+}
diff --git a/src/tools/rustfmt/tests/target/configs/chain_width/small.rs b/src/tools/rustfmt/tests/target/configs/chain_width/small.rs
new file mode 100644
index 000000000..2f2f72777
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/chain_width/small.rs
@@ -0,0 +1,32 @@
+// rustfmt-chain_width: 40
+
+struct Fluent {}
+
+impl Fluent {
+ fn blorp(&self) -> &Self {
+ self
+ }
+}
+
+fn main() {
+ let test = Fluent {};
+
+ // should not be wrapped
+ test.blorp();
+ test.blorp().blorp();
+ test.blorp().blorp().blorp();
+ test.blorp().blorp().blorp().blorp();
+
+ // should be wrapped
+ test.blorp()
+ .blorp()
+ .blorp()
+ .blorp()
+ .blorp();
+ test.blorp()
+ .blorp()
+ .blorp()
+ .blorp()
+ .blorp()
+ .blorp();
+}
diff --git a/src/tools/rustfmt/tests/target/configs/chain_width/tiny.rs b/src/tools/rustfmt/tests/target/configs/chain_width/tiny.rs
new file mode 100644
index 000000000..960d245f8
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/chain_width/tiny.rs
@@ -0,0 +1,26 @@
+// rustfmt-chain_width: 20
+
+struct Fluent {}
+
+impl Fluent {
+ fn blorp(&self) -> &Self {
+ self
+ }
+}
+
+fn main() {
+ let test = Fluent {};
+
+ // should not be wrapped
+ test.blorp();
+ test.blorp().blorp();
+
+ // should be wrapped
+ test.blorp()
+ .blorp()
+ .blorp();
+ test.blorp()
+ .blorp()
+ .blorp()
+ .blorp();
+}
diff --git a/src/tools/rustfmt/tests/target/configs/combine_control_expr/false.rs b/src/tools/rustfmt/tests/target/configs/combine_control_expr/false.rs
new file mode 100644
index 000000000..5ada9b1dd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/combine_control_expr/false.rs
@@ -0,0 +1,134 @@
+// rustfmt-indent_style: Block
+// rustfmt-combine_control_expr: false
+
+// Combining openings and closings. See rust-lang/fmt-rfcs#61.
+
+fn main() {
+ // Call
+ foo(bar(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ ));
+
+ // Mac
+ foo(foo!(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ ));
+
+ // MethodCall
+ foo(x.foo::<Bar, Baz>(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ ));
+
+ // Block
+ foo!({
+ foo();
+ bar();
+ });
+
+ // Closure
+ foo(|x| {
+ let y = x + 1;
+ y
+ });
+
+ // Match
+ foo(match opt {
+ Some(x) => x,
+ None => y,
+ });
+
+ // Struct
+ foo(Bar {
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ });
+
+ // If
+ foo!(
+ if x {
+ foo();
+ } else {
+ bar();
+ }
+ );
+
+ // IfLet
+ foo!(
+ if let Some(..) = x {
+ foo();
+ } else {
+ bar();
+ }
+ );
+
+ // While
+ foo!(
+ while x {
+ foo();
+ bar();
+ }
+ );
+
+ // WhileLet
+ foo!(
+ while let Some(..) = x {
+ foo();
+ bar();
+ }
+ );
+
+ // ForLoop
+ foo!(
+ for x in y {
+ foo();
+ bar();
+ }
+ );
+
+ // Loop
+ foo!(
+ loop {
+ foo();
+ bar();
+ }
+ );
+
+ // Tuple
+ foo((
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ ));
+
+ // AddrOf
+ foo(&bar(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ ));
+
+ // Box
+ foo(box Bar {
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ });
+
+ // Unary
+ foo(!bar(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ ));
+
+ // Try
+ foo(bar(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ )?);
+
+ // Cast
+ foo(Bar {
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,
+ } as i64);
+}
diff --git a/src/tools/rustfmt/tests/target/configs/combine_control_expr/true.rs b/src/tools/rustfmt/tests/target/configs/combine_control_expr/true.rs
new file mode 100644
index 000000000..52acd2649
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/combine_control_expr/true.rs
@@ -0,0 +1,122 @@
+// rustfmt-indent_style: Block
+// rustfmt-combine_control_expr: true
+
+// Combining openings and closings. See rust-lang/fmt-rfcs#61.
+
+fn main() {
+ // Call
+ foo(bar(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ ));
+
+ // Mac
+ foo(foo!(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ ));
+
+ // MethodCall
+ foo(x.foo::<Bar, Baz>(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ ));
+
+ // Block
+ foo!({
+ foo();
+ bar();
+ });
+
+ // Closure
+ foo(|x| {
+ let y = x + 1;
+ y
+ });
+
+ // Match
+ foo(match opt {
+ Some(x) => x,
+ None => y,
+ });
+
+ // Struct
+ foo(Bar {
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ });
+
+ // If
+ foo!(if x {
+ foo();
+ } else {
+ bar();
+ });
+
+ // IfLet
+ foo!(if let Some(..) = x {
+ foo();
+ } else {
+ bar();
+ });
+
+ // While
+ foo!(while x {
+ foo();
+ bar();
+ });
+
+ // WhileLet
+ foo!(while let Some(..) = x {
+ foo();
+ bar();
+ });
+
+ // ForLoop
+ foo!(for x in y {
+ foo();
+ bar();
+ });
+
+ // Loop
+ foo!(loop {
+ foo();
+ bar();
+ });
+
+ // Tuple
+ foo((
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ ));
+
+ // AddrOf
+ foo(&bar(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ ));
+
+ // Box
+ foo(box Bar {
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ });
+
+ // Unary
+ foo(!bar(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ ));
+
+ // Try
+ foo(bar(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ )?);
+
+ // Cast
+ foo(Bar {
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,
+ } as i64);
+}
diff --git a/src/tools/rustfmt/tests/target/configs/comment_width/above.rs b/src/tools/rustfmt/tests/target/configs/comment_width/above.rs
new file mode 100644
index 000000000..ddfecda65
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/comment_width/above.rs
@@ -0,0 +1,8 @@
+// rustfmt-comment_width: 40
+// rustfmt-wrap_comments: true
+// Comment width
+
+fn main() {
+ // Lorem ipsum dolor sit amet,
+ // consectetur adipiscing elit.
+}
diff --git a/src/tools/rustfmt/tests/target/configs/comment_width/below.rs b/src/tools/rustfmt/tests/target/configs/comment_width/below.rs
new file mode 100644
index 000000000..abbc5930c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/comment_width/below.rs
@@ -0,0 +1,7 @@
+// rustfmt-comment_width: 80
+// rustfmt-wrap_comments: true
+// Comment width
+
+fn main() {
+ // Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+}
diff --git a/src/tools/rustfmt/tests/target/configs/comment_width/ignore.rs b/src/tools/rustfmt/tests/target/configs/comment_width/ignore.rs
new file mode 100644
index 000000000..c86e71c28
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/comment_width/ignore.rs
@@ -0,0 +1,7 @@
+// rustfmt-comment_width: 40
+// rustfmt-wrap_comments: false
+// Comment width
+
+fn main() {
+ // Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+}
diff --git a/src/tools/rustfmt/tests/target/configs/condense_wildcard_suffixes/false.rs b/src/tools/rustfmt/tests/target/configs/condense_wildcard_suffixes/false.rs
new file mode 100644
index 000000000..3b967f35a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/condense_wildcard_suffixes/false.rs
@@ -0,0 +1,6 @@
+// rustfmt-condense_wildcard_suffixes: false
+// Condense wildcard suffixes
+
+fn main() {
+ let (lorem, ipsum, _, _) = (1, 2, 3, 4);
+}
diff --git a/src/tools/rustfmt/tests/target/configs/condense_wildcard_suffixes/true.rs b/src/tools/rustfmt/tests/target/configs/condense_wildcard_suffixes/true.rs
new file mode 100644
index 000000000..4f880abe8
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/condense_wildcard_suffixes/true.rs
@@ -0,0 +1,6 @@
+// rustfmt-condense_wildcard_suffixes: true
+// Condense wildcard suffixes
+
+fn main() {
+ let (lorem, ipsum, ..) = (1, 2, 3, 4);
+}
diff --git a/src/tools/rustfmt/tests/target/configs/control_brace_style/always_next_line.rs b/src/tools/rustfmt/tests/target/configs/control_brace_style/always_next_line.rs
new file mode 100644
index 000000000..7dc06f207
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/control_brace_style/always_next_line.rs
@@ -0,0 +1,18 @@
+// rustfmt-control_brace_style: AlwaysNextLine
+// Control brace style
+
+fn main() {
+ if lorem
+ {
+ println!("ipsum!");
+ }
+ else
+ {
+ println!("dolor!");
+ }
+ match magi
+ {
+ Homura => "Akemi",
+ Madoka => "Kaname",
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/control_brace_style/always_same_line.rs b/src/tools/rustfmt/tests/target/configs/control_brace_style/always_same_line.rs
new file mode 100644
index 000000000..993b6b681
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/control_brace_style/always_same_line.rs
@@ -0,0 +1,14 @@
+// rustfmt-control_brace_style: AlwaysSameLine
+// Control brace style
+
+fn main() {
+ if lorem {
+ println!("ipsum!");
+ } else {
+ println!("dolor!");
+ }
+ match magi {
+ Homura => "Akemi",
+ Madoka => "Kaname",
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/control_brace_style/closing_next_line.rs b/src/tools/rustfmt/tests/target/configs/control_brace_style/closing_next_line.rs
new file mode 100644
index 000000000..013852ee7
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/control_brace_style/closing_next_line.rs
@@ -0,0 +1,15 @@
+// rustfmt-control_brace_style: ClosingNextLine
+// Control brace style
+
+fn main() {
+ if lorem {
+ println!("ipsum!");
+ }
+ else {
+ println!("dolor!");
+ }
+ match magi {
+ Homura => "Akemi",
+ Madoka => "Kaname",
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/disable_all_formatting/false.rs b/src/tools/rustfmt/tests/target/configs/disable_all_formatting/false.rs
new file mode 100644
index 000000000..1a0477ddb
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/disable_all_formatting/false.rs
@@ -0,0 +1,10 @@
+// rustfmt-disable_all_formatting: false
+// Disable all formatting
+
+fn main() {
+ if lorem {
+ println!("ipsum!");
+ } else {
+ println!("dolor!");
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/disable_all_formatting/true.rs b/src/tools/rustfmt/tests/target/configs/disable_all_formatting/true.rs
new file mode 100644
index 000000000..736ccf569
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/disable_all_formatting/true.rs
@@ -0,0 +1,6 @@
+// rustfmt-disable_all_formatting: true
+// Disable all formatting
+
+fn main() {
+ if lorem{println!("ipsum!");}else{println!("dolor!");}
+}
diff --git a/src/tools/rustfmt/tests/target/configs/doc_comment_code_block_width/100.rs b/src/tools/rustfmt/tests/target/configs/doc_comment_code_block_width/100.rs
new file mode 100644
index 000000000..c010a28aa
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/doc_comment_code_block_width/100.rs
@@ -0,0 +1,16 @@
+// rustfmt-format_code_in_doc_comments: true
+// rustfmt-doc_comment_code_block_width: 100
+
+/// ```rust
+/// impl Test {
+/// pub const fn from_bytes(v: &[u8]) -> Result<Self, ParserError> {
+/// Self::from_bytes_manual_slice(v, 0, v.len())
+/// }
+/// }
+/// ```
+
+impl Test {
+ pub const fn from_bytes(v: &[u8]) -> Result<Self, ParserError> {
+ Self::from_bytes_manual_slice(v, 0, v.len())
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/doc_comment_code_block_width/100_greater_max_width.rs b/src/tools/rustfmt/tests/target/configs/doc_comment_code_block_width/100_greater_max_width.rs
new file mode 100644
index 000000000..6bcb99b91
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/doc_comment_code_block_width/100_greater_max_width.rs
@@ -0,0 +1,29 @@
+// rustfmt-max_width: 50
+// rustfmt-format_code_in_doc_comments: true
+// rustfmt-doc_comment_code_block_width: 100
+
+/// ```rust
+/// impl Test {
+/// pub const fn from_bytes(
+/// v: &[u8],
+/// ) -> Result<Self, ParserError> {
+/// Self::from_bytes_manual_slice(
+/// v,
+/// 0,
+/// v.len(),
+/// )
+/// }
+/// }
+/// ```
+
+impl Test {
+ pub const fn from_bytes(
+ v: &[u8],
+ ) -> Result<Self, ParserError> {
+ Self::from_bytes_manual_slice(
+ v,
+ 0,
+ v.len(),
+ )
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/doc_comment_code_block_width/50.rs b/src/tools/rustfmt/tests/target/configs/doc_comment_code_block_width/50.rs
new file mode 100644
index 000000000..e8ab6f28b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/doc_comment_code_block_width/50.rs
@@ -0,0 +1,22 @@
+// rustfmt-format_code_in_doc_comments: true
+// rustfmt-doc_comment_code_block_width: 50
+
+/// ```rust
+/// impl Test {
+/// pub const fn from_bytes(
+/// v: &[u8],
+/// ) -> Result<Self, ParserError> {
+/// Self::from_bytes_manual_slice(
+/// v,
+/// 0,
+/// v.len(),
+/// )
+/// }
+/// }
+/// ```
+
+impl Test {
+ pub const fn from_bytes(v: &[u8]) -> Result<Self, ParserError> {
+ Self::from_bytes_manual_slice(v, 0, v.len())
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/empty_item_single_line/false.rs b/src/tools/rustfmt/tests/target/configs/empty_item_single_line/false.rs
new file mode 100644
index 000000000..174fe330a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/empty_item_single_line/false.rs
@@ -0,0 +1,14 @@
+// rustfmt-empty_item_single_line: false
+// Empty impl on single line
+
+impl Lorem {
+}
+
+impl Ipsum {
+}
+
+fn lorem() {
+}
+
+fn lorem() {
+}
diff --git a/src/tools/rustfmt/tests/target/configs/empty_item_single_line/true.rs b/src/tools/rustfmt/tests/target/configs/empty_item_single_line/true.rs
new file mode 100644
index 000000000..0755485fe
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/empty_item_single_line/true.rs
@@ -0,0 +1,10 @@
+// rustfmt-empty_item_single_line: true
+// Empty impl on single line
+
+impl Lorem {}
+
+impl Ipsum {}
+
+fn lorem() {}
+
+fn lorem() {}
diff --git a/src/tools/rustfmt/tests/target/configs/enum_discrim_align_threshold/40.rs b/src/tools/rustfmt/tests/target/configs/enum_discrim_align_threshold/40.rs
new file mode 100644
index 000000000..3ed66039c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/enum_discrim_align_threshold/40.rs
@@ -0,0 +1,34 @@
+// rustfmt-enum_discrim_align_threshold: 40
+
+enum Standard {
+ A = 1,
+ Bcdef = 2,
+}
+
+enum NoDiscrims {
+ ThisIsAFairlyLongEnumVariantWithoutDiscrimLongerThan40,
+ A = 1,
+ ThisIsAnotherFairlyLongEnumVariantWithoutDiscrimLongerThan40,
+ Bcdef = 2,
+}
+
+enum TooLong {
+ ThisOneHasDiscrimAaaaaaaaaaaaaaaaaaaaaaChar40 = 10,
+ A = 1,
+ Bcdef = 2,
+}
+
+enum Borderline {
+ ThisOneHasDiscrimAaaaaaaaaaaaaaaaaaaaaa = 10,
+ A = 1,
+ Bcdef = 2,
+}
+
+// Live specimen from #1686
+enum LongWithSmallDiff {
+ SceneColorimetryEstimates = 0x73636F65,
+ SceneAppearanceEstimates = 0x73617065,
+ FocalPlaneColorimetryEstimates = 0x66706365,
+ ReflectionHardcopyOriginalColorimetry = 0x72686F63,
+ ReflectionPrintOutputColorimetry = 0x72706F63,
+}
diff --git a/src/tools/rustfmt/tests/target/configs/error_on_line_overflow/false.rs b/src/tools/rustfmt/tests/target/configs/error_on_line_overflow/false.rs
new file mode 100644
index 000000000..fa70ae783
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/error_on_line_overflow/false.rs
@@ -0,0 +1,6 @@
+// rustfmt-error_on_line_overflow: false
+// Error on line overflow
+
+fn main() {
+ let lorem_ipsum_dolor_sit_amet_consectetur_adipiscing_elit_lorem_ipsum_dolor_sit_amet_consectetur_adipiscing_elit;
+}
diff --git a/src/tools/rustfmt/tests/target/configs/error_on_unformatted/false.rs b/src/tools/rustfmt/tests/target/configs/error_on_unformatted/false.rs
new file mode 100644
index 000000000..6a78374e2
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/error_on_unformatted/false.rs
@@ -0,0 +1,12 @@
+// rustfmt-error_on_unformatted: false
+// Error on line overflow comment or string literals.
+
+// aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+fn main() {
+ // aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+
+ let x = " ";
+ let a = "
+
+";
+}
diff --git a/src/tools/rustfmt/tests/target/configs/fn_args_layout/compressed.rs b/src/tools/rustfmt/tests/target/configs/fn_args_layout/compressed.rs
new file mode 100644
index 000000000..f189446e2
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/fn_args_layout/compressed.rs
@@ -0,0 +1,22 @@
+// rustfmt-fn_args_layout: Compressed
+// Function arguments density
+
+trait Lorem {
+ fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet);
+
+ fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet) {
+ // body
+ }
+
+ fn lorem(
+ ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet, consectetur: onsectetur,
+ adipiscing: Adipiscing, elit: Elit,
+ );
+
+ fn lorem(
+ ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet, consectetur: onsectetur,
+ adipiscing: Adipiscing, elit: Elit,
+ ) {
+ // body
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/fn_args_layout/tall.rs b/src/tools/rustfmt/tests/target/configs/fn_args_layout/tall.rs
new file mode 100644
index 000000000..20f308973
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/fn_args_layout/tall.rs
@@ -0,0 +1,32 @@
+// rustfmt-fn_args_layout: Tall
+// Function arguments density
+
+trait Lorem {
+ fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet);
+
+ fn lorem(ipsum: Ipsum, dolor: Dolor, sit: Sit, amet: Amet) {
+ // body
+ }
+
+ fn lorem(
+ ipsum: Ipsum,
+ dolor: Dolor,
+ sit: Sit,
+ amet: Amet,
+ consectetur: onsectetur,
+ adipiscing: Adipiscing,
+ elit: Elit,
+ );
+
+ fn lorem(
+ ipsum: Ipsum,
+ dolor: Dolor,
+ sit: Sit,
+ amet: Amet,
+ consectetur: onsectetur,
+ adipiscing: Adipiscing,
+ elit: Elit,
+ ) {
+ // body
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/fn_args_layout/vertical.rs b/src/tools/rustfmt/tests/target/configs/fn_args_layout/vertical.rs
new file mode 100644
index 000000000..6c695a75d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/fn_args_layout/vertical.rs
@@ -0,0 +1,42 @@
+// rustfmt-fn_args_layout: Vertical
+// Function arguments density
+
+trait Lorem {
+ fn lorem(
+ ipsum: Ipsum,
+ dolor: Dolor,
+ sit: Sit,
+ amet: Amet,
+ );
+
+ fn lorem(
+ ipsum: Ipsum,
+ dolor: Dolor,
+ sit: Sit,
+ amet: Amet,
+ ) {
+ // body
+ }
+
+ fn lorem(
+ ipsum: Ipsum,
+ dolor: Dolor,
+ sit: Sit,
+ amet: Amet,
+ consectetur: onsectetur,
+ adipiscing: Adipiscing,
+ elit: Elit,
+ );
+
+ fn lorem(
+ ipsum: Ipsum,
+ dolor: Dolor,
+ sit: Sit,
+ amet: Amet,
+ consectetur: onsectetur,
+ adipiscing: Adipiscing,
+ elit: Elit,
+ ) {
+ // body
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/fn_single_line/false.rs b/src/tools/rustfmt/tests/target/configs/fn_single_line/false.rs
new file mode 100644
index 000000000..3d092f0c0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/fn_single_line/false.rs
@@ -0,0 +1,11 @@
+// rustfmt-fn_single_line: false
+// Single-expression function on single line
+
+fn lorem() -> usize {
+ 42
+}
+
+fn lorem() -> usize {
+ let ipsum = 42;
+ ipsum
+}
diff --git a/src/tools/rustfmt/tests/target/configs/fn_single_line/true.rs b/src/tools/rustfmt/tests/target/configs/fn_single_line/true.rs
new file mode 100644
index 000000000..10d94e02f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/fn_single_line/true.rs
@@ -0,0 +1,9 @@
+// rustfmt-fn_single_line: true
+// Single-expression function on single line
+
+fn lorem() -> usize { 42 }
+
+fn lorem() -> usize {
+ let ipsum = 42;
+ ipsum
+}
diff --git a/src/tools/rustfmt/tests/target/configs/force_explicit_abi/false.rs b/src/tools/rustfmt/tests/target/configs/force_explicit_abi/false.rs
new file mode 100644
index 000000000..3c48f8e0c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/force_explicit_abi/false.rs
@@ -0,0 +1,6 @@
+// rustfmt-force_explicit_abi: false
+// Force explicit abi
+
+extern {
+ pub static lorem: c_int;
+}
diff --git a/src/tools/rustfmt/tests/target/configs/force_explicit_abi/true.rs b/src/tools/rustfmt/tests/target/configs/force_explicit_abi/true.rs
new file mode 100644
index 000000000..90f5a8c4e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/force_explicit_abi/true.rs
@@ -0,0 +1,6 @@
+// rustfmt-force_explicit_abi: true
+// Force explicit abi
+
+extern "C" {
+ pub static lorem: c_int;
+}
diff --git a/src/tools/rustfmt/tests/target/configs/force_multiline_block/false.rs b/src/tools/rustfmt/tests/target/configs/force_multiline_block/false.rs
new file mode 100644
index 000000000..7cb4cac1d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/force_multiline_block/false.rs
@@ -0,0 +1,20 @@
+// rustfmt-force_multiline_blocks: false
+// Option forces multiline match arm and closure bodies to be wrapped in a block
+
+fn main() {
+ match lorem {
+ Lorem::Ipsum => {
+ if ipsum {
+ println!("dolor");
+ }
+ }
+ Lorem::Dolor => println!("amet"),
+ }
+}
+
+fn main() {
+ result.and_then(|maybe_value| match maybe_value {
+ None => Err("oops"),
+ Some(value) => Ok(1),
+ });
+}
diff --git a/src/tools/rustfmt/tests/target/configs/force_multiline_block/true.rs b/src/tools/rustfmt/tests/target/configs/force_multiline_block/true.rs
new file mode 100644
index 000000000..aec50afe5
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/force_multiline_block/true.rs
@@ -0,0 +1,22 @@
+// rustfmt-force_multiline_blocks: true
+// Option forces multiline match arm and closure bodies to be wrapped in a block
+
+fn main() {
+ match lorem {
+ Lorem::Ipsum => {
+ if ipsum {
+ println!("dolor");
+ }
+ }
+ Lorem::Dolor => println!("amet"),
+ }
+}
+
+fn main() {
+ result.and_then(|maybe_value| {
+ match maybe_value {
+ None => Err("oops"),
+ Some(value) => Ok(1),
+ }
+ });
+}
diff --git a/src/tools/rustfmt/tests/target/configs/format_generated_files/false.rs b/src/tools/rustfmt/tests/target/configs/format_generated_files/false.rs
new file mode 100644
index 000000000..dec1e00d1
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/format_generated_files/false.rs
@@ -0,0 +1,8 @@
+// @generated
+// rustfmt-format_generated_files: false
+
+fn main()
+{
+ println!("hello, world")
+ ;
+}
diff --git a/src/tools/rustfmt/tests/target/configs/format_generated_files/true.rs b/src/tools/rustfmt/tests/target/configs/format_generated_files/true.rs
new file mode 100644
index 000000000..5fea7e8b3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/format_generated_files/true.rs
@@ -0,0 +1,6 @@
+// @generated
+// rustfmt-format_generated_files: true
+
+fn main() {
+ println!("hello, world");
+}
diff --git a/src/tools/rustfmt/tests/target/configs/format_macro_bodies/false.rs b/src/tools/rustfmt/tests/target/configs/format_macro_bodies/false.rs
new file mode 100644
index 000000000..ec871b25b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/format_macro_bodies/false.rs
@@ -0,0 +1,6 @@
+// rustfmt-format_macro_bodies: false
+
+macro_rules! foo {
+ ($a: ident : $b: ty) => { $a(42): $b; };
+ ($a: ident $b: ident $c: ident) => { $a=$b+$c; };
+}
diff --git a/src/tools/rustfmt/tests/target/configs/format_macro_bodies/true.rs b/src/tools/rustfmt/tests/target/configs/format_macro_bodies/true.rs
new file mode 100644
index 000000000..9dc2524c3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/format_macro_bodies/true.rs
@@ -0,0 +1,10 @@
+// rustfmt-format_macro_bodies: true
+
+macro_rules! foo {
+ ($a: ident : $b: ty) => {
+ $a(42): $b;
+ };
+ ($a: ident $b: ident $c: ident) => {
+ $a = $b + $c;
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/configs/format_macro_matchers/false.rs b/src/tools/rustfmt/tests/target/configs/format_macro_matchers/false.rs
new file mode 100644
index 000000000..3966d21be
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/format_macro_matchers/false.rs
@@ -0,0 +1,10 @@
+// rustfmt-format_macro_matchers: false
+
+macro_rules! foo {
+ ($a: ident : $b: ty) => {
+ $a(42): $b;
+ };
+ ($a: ident $b: ident $c: ident) => {
+ $a = $b + $c;
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/configs/format_macro_matchers/true.rs b/src/tools/rustfmt/tests/target/configs/format_macro_matchers/true.rs
new file mode 100644
index 000000000..e113af96f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/format_macro_matchers/true.rs
@@ -0,0 +1,10 @@
+// rustfmt-format_macro_matchers: true
+
+macro_rules! foo {
+ ($a:ident : $b:ty) => {
+ $a(42): $b;
+ };
+ ($a:ident $b:ident $c:ident) => {
+ $a = $b + $c;
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/configs/format_strings/false.rs b/src/tools/rustfmt/tests/target/configs/format_strings/false.rs
new file mode 100644
index 000000000..ecca0d7d1
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/format_strings/false.rs
@@ -0,0 +1,8 @@
+// rustfmt-format_strings: false
+// rustfmt-max_width: 50
+// rustfmt-error_on_line_overflow: false
+// Force format strings
+
+fn main() {
+ let lorem = "ipsum dolor sit amet consectetur adipiscing elit lorem ipsum dolor sit";
+}
diff --git a/src/tools/rustfmt/tests/target/configs/format_strings/true.rs b/src/tools/rustfmt/tests/target/configs/format_strings/true.rs
new file mode 100644
index 000000000..fdd5ab2c9
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/format_strings/true.rs
@@ -0,0 +1,9 @@
+// rustfmt-format_strings: true
+// rustfmt-max_width: 50
+// Force format strings
+
+fn main() {
+ let lorem = "ipsum dolor sit amet \
+ consectetur adipiscing elit \
+ lorem ipsum dolor sit";
+}
diff --git a/src/tools/rustfmt/tests/target/configs/group_imports/One-merge_imports.rs b/src/tools/rustfmt/tests/target/configs/group_imports/One-merge_imports.rs
new file mode 100644
index 000000000..52e0e1c5a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/group_imports/One-merge_imports.rs
@@ -0,0 +1,14 @@
+// rustfmt-group_imports: One
+// rustfmt-imports_granularity: Crate
+use super::{
+ schema::{Context, Payload},
+ update::convert_publish_payload,
+};
+use crate::models::Event;
+use alloc::{alloc::Layout, vec::Vec};
+use broker::database::PooledConnection;
+use chrono::Utc;
+use core::f32;
+use juniper::{FieldError, FieldResult};
+use std::sync::Arc;
+use uuid::Uuid;
diff --git a/src/tools/rustfmt/tests/target/configs/group_imports/One-nested.rs b/src/tools/rustfmt/tests/target/configs/group_imports/One-nested.rs
new file mode 100644
index 000000000..5b6485482
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/group_imports/One-nested.rs
@@ -0,0 +1,6 @@
+// rustfmt-group_imports: One
+mod test {
+ use crate::foo::bar;
+ use crate::foo::bar2;
+ use std::path;
+}
diff --git a/src/tools/rustfmt/tests/target/configs/group_imports/One-no_reorder.rs b/src/tools/rustfmt/tests/target/configs/group_imports/One-no_reorder.rs
new file mode 100644
index 000000000..015e841d0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/group_imports/One-no_reorder.rs
@@ -0,0 +1,12 @@
+// rustfmt-group_imports: One
+// rustfmt-reorder_imports: false
+use chrono::Utc;
+use super::update::convert_publish_payload;
+use juniper::{FieldError, FieldResult};
+use uuid::Uuid;
+use alloc::alloc::Layout;
+use std::sync::Arc;
+use broker::database::PooledConnection;
+use super::schema::{Context, Payload};
+use core::f32;
+use crate::models::Event;
diff --git a/src/tools/rustfmt/tests/target/configs/group_imports/One.rs b/src/tools/rustfmt/tests/target/configs/group_imports/One.rs
new file mode 100644
index 000000000..3094c7ae1
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/group_imports/One.rs
@@ -0,0 +1,11 @@
+// rustfmt-group_imports: One
+use super::schema::{Context, Payload};
+use super::update::convert_publish_payload;
+use crate::models::Event;
+use alloc::alloc::Layout;
+use broker::database::PooledConnection;
+use chrono::Utc;
+use core::f32;
+use juniper::{FieldError, FieldResult};
+use std::sync::Arc;
+use uuid::Uuid;
diff --git a/src/tools/rustfmt/tests/target/configs/group_imports/StdExternalCrate-merge_imports.rs b/src/tools/rustfmt/tests/target/configs/group_imports/StdExternalCrate-merge_imports.rs
new file mode 100644
index 000000000..5e4064dd8
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/group_imports/StdExternalCrate-merge_imports.rs
@@ -0,0 +1,16 @@
+// rustfmt-group_imports: StdExternalCrate
+// rustfmt-imports_granularity: Crate
+use alloc::{alloc::Layout, vec::Vec};
+use core::f32;
+use std::sync::Arc;
+
+use broker::database::PooledConnection;
+use chrono::Utc;
+use juniper::{FieldError, FieldResult};
+use uuid::Uuid;
+
+use super::{
+ schema::{Context, Payload},
+ update::convert_publish_payload,
+};
+use crate::models::Event;
diff --git a/src/tools/rustfmt/tests/target/configs/group_imports/StdExternalCrate-nested.rs b/src/tools/rustfmt/tests/target/configs/group_imports/StdExternalCrate-nested.rs
new file mode 100644
index 000000000..daf23375c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/group_imports/StdExternalCrate-nested.rs
@@ -0,0 +1,7 @@
+// rustfmt-group_imports: StdExternalCrate
+mod test {
+ use std::path;
+
+ use crate::foo::bar;
+ use crate::foo::bar2;
+}
diff --git a/src/tools/rustfmt/tests/target/configs/group_imports/StdExternalCrate-no_reorder.rs b/src/tools/rustfmt/tests/target/configs/group_imports/StdExternalCrate-no_reorder.rs
new file mode 100644
index 000000000..76d3d6ccb
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/group_imports/StdExternalCrate-no_reorder.rs
@@ -0,0 +1,15 @@
+// rustfmt-group_imports: StdExternalCrate
+// rustfmt-reorder_imports: false
+
+use alloc::alloc::Layout;
+use std::sync::Arc;
+use core::f32;
+
+use chrono::Utc;
+use juniper::{FieldError, FieldResult};
+use uuid::Uuid;
+use broker::database::PooledConnection;
+
+use super::update::convert_publish_payload;
+use super::schema::{Context, Payload};
+use crate::models::Event;
diff --git a/src/tools/rustfmt/tests/target/configs/group_imports/StdExternalCrate-non_consecutive.rs b/src/tools/rustfmt/tests/target/configs/group_imports/StdExternalCrate-non_consecutive.rs
new file mode 100644
index 000000000..ecc8ede02
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/group_imports/StdExternalCrate-non_consecutive.rs
@@ -0,0 +1,18 @@
+// rustfmt-group_imports: StdExternalCrate
+use alloc::alloc::Layout;
+
+use chrono::Utc;
+use juniper::{FieldError, FieldResult};
+use uuid::Uuid;
+
+use super::update::convert_publish_payload;
+
+extern crate uuid;
+
+use core::f32;
+use std::sync::Arc;
+
+use broker::database::PooledConnection;
+
+use super::schema::{Context, Payload};
+use crate::models::Event;
diff --git a/src/tools/rustfmt/tests/target/configs/group_imports/StdExternalCrate.rs b/src/tools/rustfmt/tests/target/configs/group_imports/StdExternalCrate.rs
new file mode 100644
index 000000000..080257968
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/group_imports/StdExternalCrate.rs
@@ -0,0 +1,13 @@
+// rustfmt-group_imports: StdExternalCrate
+use alloc::alloc::Layout;
+use core::f32;
+use std::sync::Arc;
+
+use broker::database::PooledConnection;
+use chrono::Utc;
+use juniper::{FieldError, FieldResult};
+use uuid::Uuid;
+
+use super::schema::{Context, Payload};
+use super::update::convert_publish_payload;
+use crate::models::Event;
diff --git a/src/tools/rustfmt/tests/target/configs/hard_tabs/false.rs b/src/tools/rustfmt/tests/target/configs/hard_tabs/false.rs
new file mode 100644
index 000000000..ccfb53d8c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/hard_tabs/false.rs
@@ -0,0 +1,6 @@
+// rustfmt-hard_tabs: false
+// Hard tabs
+
+fn lorem() -> usize {
+ 42 // spaces before 42
+}
diff --git a/src/tools/rustfmt/tests/target/configs/hard_tabs/true.rs b/src/tools/rustfmt/tests/target/configs/hard_tabs/true.rs
new file mode 100644
index 000000000..3ed4e4f20
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/hard_tabs/true.rs
@@ -0,0 +1,6 @@
+// rustfmt-hard_tabs: true
+// Hard tabs
+
+fn lorem() -> usize {
+ 42 // spaces before 42
+}
diff --git a/src/tools/rustfmt/tests/target/configs/imports_indent/block.rs b/src/tools/rustfmt/tests/target/configs/imports_indent/block.rs
new file mode 100644
index 000000000..84c3b26bd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/imports_indent/block.rs
@@ -0,0 +1,7 @@
+// rustfmt-imports_indent: Block
+
+use lists::{
+ definitive_tactic, itemize_list, shape_for_tactic, struct_lit_formatting, struct_lit_shape,
+ struct_lit_tactic, write_list, DefinitiveListTactic, ListFormatting, ListItem, ListTactic,
+ SeparatorTactic,
+};
diff --git a/src/tools/rustfmt/tests/target/configs/imports_layout/horizontal_vertical.rs b/src/tools/rustfmt/tests/target/configs/imports_layout/horizontal_vertical.rs
new file mode 100644
index 000000000..4a63556d4
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/imports_layout/horizontal_vertical.rs
@@ -0,0 +1,18 @@
+// rustfmt-imports_indent: Block
+// rustfmt-imports_layout: HorizontalVertical
+
+use comment::{contains_comment, recover_comment_removed, rewrite_comment, FindUncommented};
+use lists::{
+ definitive_tactic,
+ itemize_list,
+ shape_for_tactic,
+ struct_lit_formatting,
+ struct_lit_shape,
+ struct_lit_tactic,
+ write_list,
+ DefinitiveListTactic,
+ ListFormatting,
+ ListItem,
+ ListTactic,
+ SeparatorTactic,
+};
diff --git a/src/tools/rustfmt/tests/target/configs/imports_layout/merge_mixed.rs b/src/tools/rustfmt/tests/target/configs/imports_layout/merge_mixed.rs
new file mode 100644
index 000000000..bc0da92ff
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/imports_layout/merge_mixed.rs
@@ -0,0 +1,5 @@
+// rustfmt-imports_indent: Block
+// rustfmt-imports_granularity: Crate
+// rustfmt-imports_layout: Mixed
+
+use std::{fmt, io, str, str::FromStr};
diff --git a/src/tools/rustfmt/tests/target/configs/imports_layout/mixed.rs b/src/tools/rustfmt/tests/target/configs/imports_layout/mixed.rs
new file mode 100644
index 000000000..5d3349a01
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/imports_layout/mixed.rs
@@ -0,0 +1,9 @@
+// rustfmt-imports_indent: Block
+// rustfmt-imports_layout: Mixed
+
+use comment::{contains_comment, recover_comment_removed, rewrite_comment, FindUncommented};
+use lists::{
+ definitive_tactic, itemize_list, shape_for_tactic, struct_lit_formatting, struct_lit_shape,
+ struct_lit_tactic, write_list, DefinitiveListTactic, ListFormatting, ListItem, ListTactic,
+ SeparatorTactic,
+};
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/block_args.rs b/src/tools/rustfmt/tests/target/configs/indent_style/block_args.rs
new file mode 100644
index 000000000..80f4e1333
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/block_args.rs
@@ -0,0 +1,47 @@
+// rustfmt-indent_style: Block
+// Function arguments layout
+
+fn lorem() {}
+
+fn lorem(ipsum: usize) {}
+
+fn lorem(
+ ipsum: usize,
+ dolor: usize,
+ sit: usize,
+ amet: usize,
+ consectetur: usize,
+ adipiscing: usize,
+ elit: usize,
+) {
+ // body
+}
+
+// #1441
+extern "system" {
+ pub fn GetConsoleHistoryInfo(
+ console_history_info: *mut ConsoleHistoryInfo,
+ ) -> Boooooooooooooool;
+}
+
+// rustfmt should not add trailing comma for variadic function. See #1623.
+extern "C" {
+ pub fn variadic_fn(
+ first_parameter: FirstParameterType,
+ second_parameter: SecondParameterType,
+ ...
+ );
+}
+
+// #1652
+fn deconstruct(
+ foo: Bar,
+) -> (
+ SocketAddr,
+ Header,
+ Method,
+ RequestUri,
+ HttpVersion,
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+) {
+}
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/block_array.rs b/src/tools/rustfmt/tests/target/configs/indent_style/block_array.rs
new file mode 100644
index 000000000..5d458248c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/block_array.rs
@@ -0,0 +1,14 @@
+// rustfmt-indent_style: Block
+// Array layout
+
+fn main() {
+ let lorem = vec![
+ "ipsum",
+ "dolor",
+ "sit",
+ "amet",
+ "consectetur",
+ "adipiscing",
+ "elit",
+ ];
+}
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/block_call.rs b/src/tools/rustfmt/tests/target/configs/indent_style/block_call.rs
new file mode 100644
index 000000000..19c44dc01
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/block_call.rs
@@ -0,0 +1,151 @@
+// rustfmt-indent_style: Block
+// Function call style
+
+fn main() {
+ lorem(
+ "lorem",
+ "ipsum",
+ "dolor",
+ "sit",
+ "amet",
+ "consectetur",
+ "adipiscing",
+ "elit",
+ );
+ // #1501
+ let hyper = Arc::new(Client::with_connector(
+ HttpsConnector::new(TlsClient::new()),
+ ));
+
+ // chain
+ let x = yooooooooooooo
+ .fooooooooooooooo
+ .baaaaaaaaaaaaar(hello, world);
+
+ // #1380
+ {
+ {
+ let creds = self
+ .client
+ .client_credentials(&self.config.auth.oauth2.id, &self.config.auth.oauth2.secret)?;
+ }
+ }
+
+ // nesting macro and function call
+ try!(foo(
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+ ));
+ try!(foo(try!(
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+ )));
+}
+
+// #1521
+impl Foo {
+ fn map_pixel_to_coords(&self, point: &Vector2i, view: &View) -> Vector2f {
+ unsafe {
+ Vector2f::from_raw(ffi::sfRenderTexture_mapPixelToCoords(
+ self.render_texture,
+ point.raw(),
+ view.raw(),
+ ))
+ }
+ }
+}
+
+fn issue1420() {
+ given(
+ r#"
+ # Getting started
+ ...
+ "#,
+ )
+ .running(waltz)
+}
+
+// #1563
+fn query(conn: &Connection) -> Result<()> {
+ conn.query_row(
+ r#"
+ SELECT title, date
+ FROM posts,
+ WHERE DATE(date) = $1
+ "#,
+ &[],
+ |row| Post {
+ title: row.get(0),
+ date: row.get(1),
+ },
+ )?;
+
+ Ok(())
+}
+
+// #1449
+fn future_rayon_wait_1_thread() {
+ // run with only 1 worker thread; this would deadlock if we couldn't make progress
+ let mut result = None;
+ ThreadPool::new(Configuration::new().num_threads(1))
+ .unwrap()
+ .install(|| {
+ scope(|s| {
+ use std::sync::mpsc::channel;
+ let (tx, rx) = channel();
+ let a = s.spawn_future(lazy(move || Ok::<usize, ()>(rx.recv().unwrap())));
+ // ^^^^ FIXME: why is this needed?
+ let b = s.spawn_future(a.map(|v| v + 1));
+ let c = s.spawn_future(b.map(|v| v + 1));
+ s.spawn(move |_| tx.send(20).unwrap());
+ result = Some(c.rayon_wait().unwrap());
+ });
+ });
+ assert_eq!(result, Some(22));
+}
+
+// #1494
+impl Cursor {
+ fn foo() {
+ self.cur_type()
+ .num_template_args()
+ .or_else(|| {
+ let n: c_int = unsafe { clang_Cursor_getNumTemplateArguments(self.x) };
+
+ if n >= 0 {
+ Some(n as u32)
+ } else {
+ debug_assert_eq!(n, -1);
+ None
+ }
+ })
+ .or_else(|| {
+ let canonical = self.canonical();
+ if canonical != *self {
+ canonical.num_template_args()
+ } else {
+ None
+ }
+ });
+ }
+}
+
+fn issue1581() {
+ bootstrap.checks.register("PERSISTED_LOCATIONS", move || {
+ if locations2.0.inner_mut.lock().poisoned {
+ Check::new(
+ State::Error,
+ "Persisted location storage is poisoned due to a write failure",
+ )
+ } else {
+ Check::new(State::Healthy, "Persisted location storage is healthy")
+ }
+ });
+}
+
+fn issue1651() {
+ {
+ let type_list: Vec<_> =
+ try_opt!(types.iter().map(|ty| ty.rewrite(context, shape)).collect());
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/block_chain.rs b/src/tools/rustfmt/tests/target/configs/indent_style/block_chain.rs
new file mode 100644
index 000000000..23340a4ab
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/block_chain.rs
@@ -0,0 +1,12 @@
+// rustfmt-indent_style: Block
+// Chain indent
+
+fn main() {
+ let lorem = ipsum
+ .dolor()
+ .sit()
+ .amet()
+ .consectetur()
+ .adipiscing()
+ .elite();
+}
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/block_generic.rs b/src/tools/rustfmt/tests/target/configs/indent_style/block_generic.rs
new file mode 100644
index 000000000..c4fcaaf65
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/block_generic.rs
@@ -0,0 +1,22 @@
+// rustfmt-indent_style: Block
+// Generics indent
+
+fn lorem<
+ Ipsum: Eq = usize,
+ Dolor: Eq = usize,
+ Sit: Eq = usize,
+ Amet: Eq = usize,
+ Adipiscing: Eq = usize,
+ Consectetur: Eq = usize,
+ Elit: Eq = usize,
+>(
+ ipsum: Ipsum,
+ dolor: Dolor,
+ sit: Sit,
+ amet: Amet,
+ adipiscing: Adipiscing,
+ consectetur: Consectetur,
+ elit: Elit,
+) -> T {
+ // body
+}
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/block_struct_lit.rs b/src/tools/rustfmt/tests/target/configs/indent_style/block_struct_lit.rs
new file mode 100644
index 000000000..656b56226
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/block_struct_lit.rs
@@ -0,0 +1,9 @@
+// rustfmt-indent_style: Block
+// Struct literal-style
+
+fn main() {
+ let lorem = Lorem {
+ ipsum: dolor,
+ sit: amet,
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/block_tab_spaces_call.rs b/src/tools/rustfmt/tests/target/configs/indent_style/block_tab_spaces_call.rs
new file mode 100644
index 000000000..5531e61dd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/block_tab_spaces_call.rs
@@ -0,0 +1,14 @@
+// rustfmt-indent_style: Block
+// rustfmt-max_width: 80
+// rustfmt-tab_spaces: 2
+
+// #1427
+fn main() {
+ exceptaions::config(move || {
+ (
+ NmiConfig {},
+ HardFaultConfig {},
+ SysTickConfig { gpio_sbsrr },
+ )
+ });
+}
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/block_trailing_comma_call/one.rs b/src/tools/rustfmt/tests/target/configs/indent_style/block_trailing_comma_call/one.rs
new file mode 100644
index 000000000..6b9489bef
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/block_trailing_comma_call/one.rs
@@ -0,0 +1,12 @@
+// rustfmt-version: One
+// rustfmt-error_on_line_overflow: false
+// rustfmt-indent_style: Block
+
+// rustfmt should not add trailing comma when rewriting macro. See #1528.
+fn a() {
+ panic!("this is a long string that goes past the maximum line length causing rustfmt to insert a comma here:");
+ foo(
+ a,
+ oooptoptoptoptptooptoptoptoptptooptoptoptoptptoptoptoptoptpt(),
+ );
+}
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/block_trailing_comma_call/two.rs b/src/tools/rustfmt/tests/target/configs/indent_style/block_trailing_comma_call/two.rs
new file mode 100644
index 000000000..4f4292e5f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/block_trailing_comma_call/two.rs
@@ -0,0 +1,14 @@
+// rustfmt-version: Two
+// rustfmt-error_on_line_overflow: false
+// rustfmt-indent_style: Block
+
+// rustfmt should not add trailing comma when rewriting macro. See #1528.
+fn a() {
+ panic!(
+ "this is a long string that goes past the maximum line length causing rustfmt to insert a comma here:"
+ );
+ foo(
+ a,
+ oooptoptoptoptptooptoptoptoptptooptoptoptoptptoptoptoptoptpt(),
+ );
+}
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/block_where_pred.rs b/src/tools/rustfmt/tests/target/configs/indent_style/block_where_pred.rs
new file mode 100644
index 000000000..ad7e0b8f3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/block_where_pred.rs
@@ -0,0 +1,12 @@
+// rustfmt-indent_style: Block
+// Where predicate indent
+
+fn lorem<Ipsum, Dolor, Sit, Amet>() -> T
+where
+ Ipsum: Eq,
+ Dolor: Eq,
+ Sit: Eq,
+ Amet: Eq,
+{
+ // body
+}
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/default.rs b/src/tools/rustfmt/tests/target/configs/indent_style/default.rs
new file mode 100644
index 000000000..a8f0902b3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/default.rs
@@ -0,0 +1,11 @@
+// rustfmt-indent_style: Visual
+// Where style
+
+fn lorem<Ipsum, Dolor, Sit, Amet>() -> T
+ where Ipsum: Eq,
+ Dolor: Eq,
+ Sit: Eq,
+ Amet: Eq
+{
+ // body
+}
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/rfc_control.rs b/src/tools/rustfmt/tests/target/configs/indent_style/rfc_control.rs
new file mode 100644
index 000000000..6619d8b26
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/rfc_control.rs
@@ -0,0 +1,39 @@
+// rustfmt-indent_style: Block
+
+// #1618
+fn main() {
+ loop {
+ if foo {
+ if ((right_paddle_speed < 0.) && (right_paddle.position().y - paddle_size.y / 2. > 5.))
+ || ((right_paddle_speed > 0.)
+ && (right_paddle.position().y + paddle_size.y / 2. < game_height as f32 - 5.))
+ {
+ foo
+ }
+ if ai_timer.elapsed_time().as_microseconds() > ai_time.as_microseconds() {
+ if ball.position().y + ball_radius > right_paddle.position().y + paddle_size.y / 2.
+ {
+ foo
+ }
+ }
+ }
+ }
+}
+
+fn issue1656() {
+ {
+ {
+ match rewrite {
+ Some(ref body_str)
+ if (!body_str.contains('\n') && body_str.len() <= arm_shape.width)
+ || !context.config.match_arm_blocks()
+ || (extend && first_line_width(body_str) <= arm_shape.width)
+ || is_block =>
+ {
+ return None;
+ }
+ _ => {}
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/rfc_where.rs b/src/tools/rustfmt/tests/target/configs/indent_style/rfc_where.rs
new file mode 100644
index 000000000..a7b9a4f02
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/rfc_where.rs
@@ -0,0 +1,12 @@
+// rustfmt-indent_style: Block
+// Where style
+
+fn lorem<Ipsum, Dolor, Sit, Amet>() -> T
+where
+ Ipsum: Eq,
+ Dolor: Eq,
+ Sit: Eq,
+ Amet: Eq,
+{
+ // body
+}
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/visual_args.rs b/src/tools/rustfmt/tests/target/configs/indent_style/visual_args.rs
new file mode 100644
index 000000000..04c2eaee3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/visual_args.rs
@@ -0,0 +1,40 @@
+// rustfmt-indent_style: Visual
+// Function arguments layout
+
+fn lorem() {}
+
+fn lorem(ipsum: usize) {}
+
+fn lorem(ipsum: usize,
+ dolor: usize,
+ sit: usize,
+ amet: usize,
+ consectetur: usize,
+ adipiscing: usize,
+ elit: usize) {
+ // body
+}
+
+// #1922
+extern "C" {
+ pub fn LAPACKE_csytrs_rook_work(matrix_layout: c_int,
+ uplo: c_char,
+ n: lapack_int,
+ nrhs: lapack_int,
+ a: *const lapack_complex_float,
+ lda: lapack_int,
+ ipiv: *const lapack_int,
+ b: *mut lapack_complex_float,
+ ldb: lapack_int)
+ -> lapack_int;
+
+ pub fn LAPACKE_csytrs_rook_work(matrix_layout: c_int,
+ uplo: c_char,
+ n: lapack_int,
+ nrhs: lapack_int,
+ lda: lapack_int,
+ ipiv: *const lapack_int,
+ b: *mut lapack_complex_float,
+ ldb: lapack_int)
+ -> lapack_int;
+}
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/visual_array.rs b/src/tools/rustfmt/tests/target/configs/indent_style/visual_array.rs
new file mode 100644
index 000000000..1da6ff237
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/visual_array.rs
@@ -0,0 +1,12 @@
+// rustfmt-indent_style: Visual
+// Array layout
+
+fn main() {
+ let lorem = vec!["ipsum",
+ "dolor",
+ "sit",
+ "amet",
+ "consectetur",
+ "adipiscing",
+ "elit"];
+}
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/visual_call.rs b/src/tools/rustfmt/tests/target/configs/indent_style/visual_call.rs
new file mode 100644
index 000000000..5454c44ef
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/visual_call.rs
@@ -0,0 +1,13 @@
+// rustfmt-indent_style: Visual
+// Function call style
+
+fn main() {
+ lorem("lorem",
+ "ipsum",
+ "dolor",
+ "sit",
+ "amet",
+ "consectetur",
+ "adipiscing",
+ "elit");
+}
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/visual_chain.rs b/src/tools/rustfmt/tests/target/configs/indent_style/visual_chain.rs
new file mode 100644
index 000000000..569f3d8b8
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/visual_chain.rs
@@ -0,0 +1,11 @@
+// rustfmt-indent_style: Visual
+// Chain indent
+
+fn main() {
+ let lorem = ipsum.dolor()
+ .sit()
+ .amet()
+ .consectetur()
+ .adipiscing()
+ .elite();
+}
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/visual_generics.rs b/src/tools/rustfmt/tests/target/configs/indent_style/visual_generics.rs
new file mode 100644
index 000000000..491075a14
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/visual_generics.rs
@@ -0,0 +1,20 @@
+// rustfmt-indent_style: Visual
+// Generics indent
+
+fn lorem<Ipsum: Eq = usize,
+ Dolor: Eq = usize,
+ Sit: Eq = usize,
+ Amet: Eq = usize,
+ Adipiscing: Eq = usize,
+ Consectetur: Eq = usize,
+ Elit: Eq = usize>(
+ ipsum: Ipsum,
+ dolor: Dolor,
+ sit: Sit,
+ amet: Amet,
+ adipiscing: Adipiscing,
+ consectetur: Consectetur,
+ elit: Elit)
+ -> T {
+ // body
+}
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/visual_struct_lit.rs b/src/tools/rustfmt/tests/target/configs/indent_style/visual_struct_lit.rs
new file mode 100644
index 000000000..ec49021d3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/visual_struct_lit.rs
@@ -0,0 +1,7 @@
+// rustfmt-indent_style: Visual
+// Struct literal-style
+
+fn main() {
+ let lorem = Lorem { ipsum: dolor,
+ sit: amet };
+}
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/visual_trailing_comma.rs b/src/tools/rustfmt/tests/target/configs/indent_style/visual_trailing_comma.rs
new file mode 100644
index 000000000..9738d397d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/visual_trailing_comma.rs
@@ -0,0 +1,7 @@
+// rustfmt-error_on_line_overflow: false
+// rustfmt-indent_style: Visual
+
+// rustfmt should not add trailing comma when rewriting macro. See #1528.
+fn a() {
+ panic!("this is a long string that goes past the maximum line length causing rustfmt to insert a comma here:");
+}
diff --git a/src/tools/rustfmt/tests/target/configs/indent_style/visual_where_pred.rs b/src/tools/rustfmt/tests/target/configs/indent_style/visual_where_pred.rs
new file mode 100644
index 000000000..45799dcd5
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/indent_style/visual_where_pred.rs
@@ -0,0 +1,11 @@
+// rustfmt-indent_style: Visual
+// Where predicate indent
+
+fn lorem<Ipsum, Dolor, Sit, Amet>() -> T
+ where Ipsum: Eq,
+ Dolor: Eq,
+ Sit: Eq,
+ Amet: Eq
+{
+ // body
+}
diff --git a/src/tools/rustfmt/tests/target/configs/match_arm_blocks/false.rs b/src/tools/rustfmt/tests/target/configs/match_arm_blocks/false.rs
new file mode 100644
index 000000000..7a9834168
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/match_arm_blocks/false.rs
@@ -0,0 +1,12 @@
+// rustfmt-match_arm_blocks: false
+// Wrap match-arms
+
+fn main() {
+ match lorem {
+ true =>
+ foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo(x),
+ false => {
+ println!("{}", sit)
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/match_arm_blocks/true.rs b/src/tools/rustfmt/tests/target/configs/match_arm_blocks/true.rs
new file mode 100644
index 000000000..eb9e34059
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/match_arm_blocks/true.rs
@@ -0,0 +1,13 @@
+// rustfmt-match_arm_blocks: true
+// Wrap match-arms
+
+fn main() {
+ match lorem {
+ true => {
+ foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo(x)
+ }
+ false => {
+ println!("{}", sit)
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/match_arm_leading_pipes/always.rs b/src/tools/rustfmt/tests/target/configs/match_arm_leading_pipes/always.rs
new file mode 100644
index 000000000..f2af81eac
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/match_arm_leading_pipes/always.rs
@@ -0,0 +1,27 @@
+// rustfmt-match_arm_leading_pipes: Always
+
+fn foo() {
+ match foo {
+ | "foo" | "bar" => {}
+ | "baz"
+ | "something relatively long"
+ | "something really really really realllllllllllllly long" => println!("x"),
+ | "qux" => println!("y"),
+ | _ => {}
+ }
+}
+
+fn issue_3973() {
+ match foo {
+ | "foo" | "bar" => {}
+ | _ => {}
+ }
+}
+
+fn bar() {
+ match baz {
+ | "qux" => {}
+ | "foo" | "bar" => {}
+ | _ => {}
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/match_arm_leading_pipes/never.rs b/src/tools/rustfmt/tests/target/configs/match_arm_leading_pipes/never.rs
new file mode 100644
index 000000000..345014e4b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/match_arm_leading_pipes/never.rs
@@ -0,0 +1,27 @@
+// rustfmt-match_arm_leading_pipes: Never
+
+fn foo() {
+ match foo {
+ "foo" | "bar" => {}
+ "baz"
+ | "something relatively long"
+ | "something really really really realllllllllllllly long" => println!("x"),
+ "qux" => println!("y"),
+ _ => {}
+ }
+}
+
+fn issue_3973() {
+ match foo {
+ "foo" | "bar" => {}
+ _ => {}
+ }
+}
+
+fn bar() {
+ match baz {
+ "qux" => {}
+ "foo" | "bar" => {}
+ _ => {}
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/match_arm_leading_pipes/preserve.rs b/src/tools/rustfmt/tests/target/configs/match_arm_leading_pipes/preserve.rs
new file mode 100644
index 000000000..477557584
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/match_arm_leading_pipes/preserve.rs
@@ -0,0 +1,35 @@
+// rustfmt-match_arm_leading_pipes: Preserve
+
+fn foo() {
+ match foo {
+ | "foo" | "bar" => {}
+ | "baz"
+ | "something relatively long"
+ | "something really really really realllllllllllllly long" => println!("x"),
+ | "qux" => println!("y"),
+ _ => {}
+ }
+}
+
+fn issue_3973() {
+ match foo {
+ | "foo" | "bar" => {}
+ _ => {}
+ }
+}
+
+fn bar() {
+ match baz {
+ "qux" => {}
+ "foo" | "bar" => {}
+ _ => {}
+ }
+}
+
+fn f(x: NonAscii) -> bool {
+ match x {
+ // foo
+ | Éfgh => true,
+ _ => false,
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/match_block_trailing_comma/false.rs b/src/tools/rustfmt/tests/target/configs/match_block_trailing_comma/false.rs
new file mode 100644
index 000000000..70e02955f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/match_block_trailing_comma/false.rs
@@ -0,0 +1,11 @@
+// rustfmt-match_block_trailing_comma: false
+// Match block trailing comma
+
+fn main() {
+ match lorem {
+ Lorem::Ipsum => {
+ println!("ipsum");
+ }
+ Lorem::Dolor => println!("dolor"),
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/match_block_trailing_comma/true.rs b/src/tools/rustfmt/tests/target/configs/match_block_trailing_comma/true.rs
new file mode 100644
index 000000000..b78b046dc
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/match_block_trailing_comma/true.rs
@@ -0,0 +1,11 @@
+// rustfmt-match_block_trailing_comma: true
+// Match block trailing comma
+
+fn main() {
+ match lorem {
+ Lorem::Ipsum => {
+ println!("ipsum");
+ },
+ Lorem::Dolor => println!("dolor"),
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/merge_derives/true.rs b/src/tools/rustfmt/tests/target/configs/merge_derives/true.rs
new file mode 100644
index 000000000..4d0148b1c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/merge_derives/true.rs
@@ -0,0 +1,40 @@
+// rustfmt-merge_derives: true
+// Merge multiple derives to a single one.
+
+#[bar]
+#[derive(Eq, PartialEq)]
+#[foo]
+#[derive(Debug)]
+#[foobar]
+#[derive(Copy, Clone)]
+pub enum Foo {}
+
+#[derive(Eq, PartialEq, Debug)]
+#[foobar]
+#[derive(Copy, Clone)]
+pub enum Bar {}
+
+#[derive(Eq, PartialEq, Debug, Copy, Clone)]
+pub enum FooBar {}
+
+mod foo {
+ #[bar]
+ #[derive(Eq, PartialEq)]
+ #[foo]
+ #[derive(Debug)]
+ #[foobar]
+ #[derive(Copy, Clone)]
+ pub enum Foo {}
+}
+
+mod bar {
+ #[derive(Eq, PartialEq, Debug)]
+ #[foobar]
+ #[derive(Copy, Clone)]
+ pub enum Bar {}
+}
+
+mod foobar {
+ #[derive(Eq, PartialEq, Debug, Copy, Clone)]
+ pub enum FooBar {}
+}
diff --git a/src/tools/rustfmt/tests/target/configs/normalize_comments/false.rs b/src/tools/rustfmt/tests/target/configs/normalize_comments/false.rs
new file mode 100644
index 000000000..488962ed9
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/normalize_comments/false.rs
@@ -0,0 +1,13 @@
+// rustfmt-normalize_comments: false
+// Normalize comments
+
+// Lorem ipsum:
+fn dolor() -> usize {}
+
+/* sit amet: */
+fn adipiscing() -> usize {}
+
+// #652
+////////////////////////////////////////////////////////////////////////////////
+// Basic slice extension methods
+////////////////////////////////////////////////////////////////////////////////
diff --git a/src/tools/rustfmt/tests/target/configs/normalize_comments/true.rs b/src/tools/rustfmt/tests/target/configs/normalize_comments/true.rs
new file mode 100644
index 000000000..0bdbe08ab
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/normalize_comments/true.rs
@@ -0,0 +1,13 @@
+// rustfmt-normalize_comments: true
+// Normalize comments
+
+// Lorem ipsum:
+fn dolor() -> usize {}
+
+// sit amet:
+fn adipiscing() -> usize {}
+
+// #652
+////////////////////////////////////////////////////////////////////////////////
+// Basic slice extension methods
+////////////////////////////////////////////////////////////////////////////////
diff --git a/src/tools/rustfmt/tests/target/configs/normalize_doc_attributes/false.rs b/src/tools/rustfmt/tests/target/configs/normalize_doc_attributes/false.rs
new file mode 100644
index 000000000..f8eb64273
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/normalize_doc_attributes/false.rs
@@ -0,0 +1,13 @@
+// rustfmt-normalize_doc_attributes: false
+// Normalize doc attributes
+
+#![doc = " Example documentation"]
+
+#[doc = " Example item documentation"]
+pub enum Foo {}
+
+#[doc = " Lots of space"]
+pub enum Bar {}
+
+#[doc = "no leading space"]
+pub mod FooBar {}
diff --git a/src/tools/rustfmt/tests/target/configs/normalize_doc_attributes/true.rs b/src/tools/rustfmt/tests/target/configs/normalize_doc_attributes/true.rs
new file mode 100644
index 000000000..fadab985b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/normalize_doc_attributes/true.rs
@@ -0,0 +1,13 @@
+// rustfmt-normalize_doc_attributes: true
+// Normalize doc attributes
+
+//! Example documentation
+
+/// Example item documentation
+pub enum Foo {}
+
+/// Lots of space
+pub enum Bar {}
+
+///no leading space
+pub mod FooBar {}
diff --git a/src/tools/rustfmt/tests/target/configs/remove_nested_parens/remove_nested_parens.rs b/src/tools/rustfmt/tests/target/configs/remove_nested_parens/remove_nested_parens.rs
new file mode 100644
index 000000000..d896042c3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/remove_nested_parens/remove_nested_parens.rs
@@ -0,0 +1,5 @@
+// rustfmt-remove_nested_parens: true
+
+fn main() {
+ (foo());
+}
diff --git a/src/tools/rustfmt/tests/target/configs/reorder_impl_items/false.rs b/src/tools/rustfmt/tests/target/configs/reorder_impl_items/false.rs
new file mode 100644
index 000000000..beb99f0fb
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/reorder_impl_items/false.rs
@@ -0,0 +1,11 @@
+// rustfmt-reorder_impl_items: false
+
+struct Dummy;
+
+impl Iterator for Dummy {
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+
+ type Item = i32;
+}
diff --git a/src/tools/rustfmt/tests/target/configs/reorder_impl_items/true.rs b/src/tools/rustfmt/tests/target/configs/reorder_impl_items/true.rs
new file mode 100644
index 000000000..f2294412a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/reorder_impl_items/true.rs
@@ -0,0 +1,11 @@
+// rustfmt-reorder_impl_items: true
+
+struct Dummy;
+
+impl Iterator for Dummy {
+ type Item = i32;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/reorder_imports/false.rs b/src/tools/rustfmt/tests/target/configs/reorder_imports/false.rs
new file mode 100644
index 000000000..4b85684dc
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/reorder_imports/false.rs
@@ -0,0 +1,7 @@
+// rustfmt-reorder_imports: false
+// Reorder imports
+
+use lorem;
+use ipsum;
+use dolor;
+use sit;
diff --git a/src/tools/rustfmt/tests/target/configs/reorder_imports/true.rs b/src/tools/rustfmt/tests/target/configs/reorder_imports/true.rs
new file mode 100644
index 000000000..e4ff7295f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/reorder_imports/true.rs
@@ -0,0 +1,19 @@
+// rustfmt-reorder_imports: true
+// Reorder imports
+
+use dolor;
+use ipsum;
+use lorem;
+use sit;
+
+fn foo() {
+ use A;
+ use B;
+ use C;
+
+ bar();
+
+ use D;
+ use E;
+ use F;
+}
diff --git a/src/tools/rustfmt/tests/target/configs/reorder_modules/dolor/mod.rs b/src/tools/rustfmt/tests/target/configs/reorder_modules/dolor/mod.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/reorder_modules/dolor/mod.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/rustfmt/tests/target/configs/reorder_modules/false.rs b/src/tools/rustfmt/tests/target/configs/reorder_modules/false.rs
new file mode 100644
index 000000000..56b1aa03e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/reorder_modules/false.rs
@@ -0,0 +1,7 @@
+// rustfmt-reorder_modules: false
+// Reorder modules
+
+mod lorem;
+mod ipsum;
+mod dolor;
+mod sit;
diff --git a/src/tools/rustfmt/tests/target/configs/reorder_modules/ipsum/mod.rs b/src/tools/rustfmt/tests/target/configs/reorder_modules/ipsum/mod.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/reorder_modules/ipsum/mod.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/rustfmt/tests/target/configs/reorder_modules/lorem/mod.rs b/src/tools/rustfmt/tests/target/configs/reorder_modules/lorem/mod.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/reorder_modules/lorem/mod.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/rustfmt/tests/target/configs/reorder_modules/sit/mod.rs b/src/tools/rustfmt/tests/target/configs/reorder_modules/sit/mod.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/reorder_modules/sit/mod.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/rustfmt/tests/target/configs/reorder_modules/true.rs b/src/tools/rustfmt/tests/target/configs/reorder_modules/true.rs
new file mode 100644
index 000000000..18361e88b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/reorder_modules/true.rs
@@ -0,0 +1,7 @@
+// rustfmt-reorder_modules: true
+// Reorder modules
+
+mod dolor;
+mod ipsum;
+mod lorem;
+mod sit;
diff --git a/src/tools/rustfmt/tests/target/configs/short_array_element_width_threshold/10.rs b/src/tools/rustfmt/tests/target/configs/short_array_element_width_threshold/10.rs
new file mode 100644
index 000000000..78c4adba1
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/short_array_element_width_threshold/10.rs
@@ -0,0 +1,11 @@
+// rustfmt-short_array_element_width_threshold: 10
+
+fn main() {
+ pub const FORMAT_TEST: [u64; 5] = [
+ 0x0000000000000000,
+ 0xaaaaaaaaaaaaaaaa,
+ 0xbbbbbbbbbbbbbbbb,
+ 0xcccccccccccccccc,
+ 0xdddddddddddddddd,
+ ];
+}
diff --git a/src/tools/rustfmt/tests/target/configs/short_array_element_width_threshold/20.rs b/src/tools/rustfmt/tests/target/configs/short_array_element_width_threshold/20.rs
new file mode 100644
index 000000000..608469065
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/short_array_element_width_threshold/20.rs
@@ -0,0 +1,8 @@
+// rustfmt-short_array_element_width_threshold: 20
+
+fn main() {
+ pub const FORMAT_TEST: [u64; 5] = [
+ 0x0000000000000000, 0xaaaaaaaaaaaaaaaa, 0xbbbbbbbbbbbbbbbb, 0xcccccccccccccccc,
+ 0xdddddddddddddddd,
+ ];
+}
diff --git a/src/tools/rustfmt/tests/target/configs/short_array_element_width_threshold/greater_than_max_width.rs b/src/tools/rustfmt/tests/target/configs/short_array_element_width_threshold/greater_than_max_width.rs
new file mode 100644
index 000000000..710b6fe7c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/short_array_element_width_threshold/greater_than_max_width.rs
@@ -0,0 +1,12 @@
+// rustfmt-max_width: 20
+// rustfmt-short_array_element_width_threshold: 30
+
+fn main() {
+ pub const FORMAT_TEST: [u64; 5] = [
+ 0x0000000000000000,
+ 0xaaaaaaaaaaaaaaaa,
+ 0xbbbbbbbbbbbbbbbb,
+ 0xcccccccccccccccc,
+ 0xdddddddddddddddd,
+ ];
+}
diff --git a/src/tools/rustfmt/tests/target/configs/skip_children/foo/mod.rs b/src/tools/rustfmt/tests/target/configs/skip_children/foo/mod.rs
new file mode 100644
index 000000000..d7ff6cdb8
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/skip_children/foo/mod.rs
@@ -0,0 +1,3 @@
+fn skip_formatting_this() {
+ println ! ( "Skip this" ) ;
+}
diff --git a/src/tools/rustfmt/tests/target/configs/skip_children/true.rs b/src/tools/rustfmt/tests/target/configs/skip_children/true.rs
new file mode 100644
index 000000000..33fd782b4
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/skip_children/true.rs
@@ -0,0 +1,4 @@
+// rustfmt-skip_children: true
+
+mod foo;
+mod void;
diff --git a/src/tools/rustfmt/tests/target/configs/space_before_colon/true.rs b/src/tools/rustfmt/tests/target/configs/space_before_colon/true.rs
new file mode 100644
index 000000000..e2895b5d7
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/space_before_colon/true.rs
@@ -0,0 +1,11 @@
+// rustfmt-space_before_colon: true
+// Space before colon
+
+fn lorem<T : Eq>(t : T) {
+ let ipsum : Dolor = sit;
+}
+
+const LOREM : Lorem = Lorem {
+ ipsum : dolor,
+ sit : amet,
+};
diff --git a/src/tools/rustfmt/tests/target/configs/spaces_around_ranges/false.rs b/src/tools/rustfmt/tests/target/configs/spaces_around_ranges/false.rs
new file mode 100644
index 000000000..72b1be480
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/spaces_around_ranges/false.rs
@@ -0,0 +1,34 @@
+// rustfmt-spaces_around_ranges: false
+// Spaces around ranges
+
+fn main() {
+ let lorem = 0..10;
+ let ipsum = 0..=10;
+
+ match lorem {
+ 1..5 => foo(),
+ _ => bar,
+ }
+
+ match lorem {
+ 1..=5 => foo(),
+ _ => bar,
+ }
+
+ match lorem {
+ 1...5 => foo(),
+ _ => bar,
+ }
+}
+
+fn half_open() {
+ match [5..4, 99..105, 43..44] {
+ [_, 99.., _] => {}
+ [_, ..105, _] => {}
+ _ => {}
+ };
+
+ if let ..=5 = 0 {}
+ if let ..5 = 0 {}
+ if let 5.. = 0 {}
+}
diff --git a/src/tools/rustfmt/tests/target/configs/spaces_around_ranges/true.rs b/src/tools/rustfmt/tests/target/configs/spaces_around_ranges/true.rs
new file mode 100644
index 000000000..c56fdbb02
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/spaces_around_ranges/true.rs
@@ -0,0 +1,34 @@
+// rustfmt-spaces_around_ranges: true
+// Spaces around ranges
+
+fn main() {
+ let lorem = 0 .. 10;
+ let ipsum = 0 ..= 10;
+
+ match lorem {
+ 1 .. 5 => foo(),
+ _ => bar,
+ }
+
+ match lorem {
+ 1 ..= 5 => foo(),
+ _ => bar,
+ }
+
+ match lorem {
+ 1 ... 5 => foo(),
+ _ => bar,
+ }
+}
+
+fn half_open() {
+ match [5 .. 4, 99 .. 105, 43 .. 44] {
+ [_, 99 .., _] => {}
+ [_, .. 105, _] => {}
+ _ => {}
+ };
+
+ if let ..= 5 = 0 {}
+ if let .. 5 = 0 {}
+ if let 5 .. = 0 {}
+}
diff --git a/src/tools/rustfmt/tests/target/configs/struct_field_align_threshold/20.rs b/src/tools/rustfmt/tests/target/configs/struct_field_align_threshold/20.rs
new file mode 100644
index 000000000..12a523e9d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/struct_field_align_threshold/20.rs
@@ -0,0 +1,471 @@
+// rustfmt-struct_field_align_threshold: 20
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+// rustfmt-error_on_line_overflow: false
+
+struct Foo {
+ x: u32,
+ yy: u32, // comment
+ zzz: u32,
+}
+
+pub struct Bar {
+ x: u32,
+ yy: u32,
+ zzz: u32,
+
+ xxxxxxx: u32,
+}
+
+fn main() {
+ let foo = Foo {
+ x: 0,
+ yy: 1,
+ zzz: 2,
+ };
+
+ let bar = Bar {
+ x: 0,
+ yy: 1,
+ zzz: 2,
+
+ xxxxxxx: 3,
+ };
+}
+
+/// A Doc comment
+#[AnAttribute]
+pub struct Foo {
+ #[rustfmt::skip]
+ f : SomeType, // Comment beside a field
+ f: SomeType, // Comment beside a field
+ // Comment on a field
+ #[AnAttribute]
+ g: SomeOtherType,
+ /// A doc comment on a field
+ h: AThirdType,
+ pub i: TypeForPublicField,
+}
+
+// #1029
+pub struct Foo {
+ #[doc(hidden)]
+ // This will NOT get deleted!
+ bar: String, // hi
+}
+
+// #1029
+struct X {
+ // `x` is an important number.
+ #[allow(unused)] // TODO: use
+ x: u32,
+}
+
+// #410
+#[allow(missing_docs)]
+pub struct Writebatch<K: Key> {
+ #[allow(dead_code)] // only used for holding the internal pointer
+ writebatch: RawWritebatch,
+ marker: PhantomData<K>,
+}
+
+struct Bar;
+
+struct NewType(Type, OtherType);
+
+struct NewInt<T: Copy>(
+ pub i32,
+ SomeType, // inline comment
+ T, // sup
+);
+
+struct Qux<
+ 'a,
+ N: Clone + 'a,
+ E: Clone + 'a,
+ G: Labeller<'a, N, E> + GraphWalk<'a, N, E>,
+ W: Write + Copy,
+>(
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA, // Comment
+ BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB,
+ #[AnAttr]
+ // Comment
+ /// Testdoc
+ G,
+ pub W,
+);
+
+struct Tuple(
+ // Comment 1
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+ // Comment 2
+ BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB,
+);
+
+// With a where-clause and generics.
+pub struct Foo<'a, Y: Baz>
+where
+ X: Whatever,
+{
+ f: SomeType, // Comment beside a field
+}
+
+struct Baz {
+ a: A, // Comment A
+ b: B, // Comment B
+ c: C, // Comment C
+}
+
+struct Baz {
+ a: A, // Comment A
+
+ b: B, // Comment B
+
+ c: C, // Comment C
+}
+
+struct Baz {
+ a: A,
+
+ b: B,
+ c: C,
+
+ d: D,
+}
+
+struct Baz {
+ // Comment A
+ a: A,
+
+ // Comment B
+ b: B,
+ // Comment C
+ c: C,
+}
+
+// Will this be a one-liner?
+struct Tuple(
+ A, // Comment
+ B,
+);
+
+pub struct State<F: FnMut() -> time::Timespec> {
+ now: F,
+}
+
+pub struct State<F: FnMut() -> ()> {
+ now: F,
+}
+
+pub struct State<F: FnMut()> {
+ now: F,
+}
+
+struct Palette {
+ /// A map of indices in the palette to a count of pixels in approximately
+ /// that color
+ foo: i32,
+}
+
+// Splitting a single line comment into a block previously had a misalignment
+// when the field had attributes
+struct FieldsWithAttributes {
+ // Pre Comment
+ #[rustfmt::skip] pub host:String, /* Post comment BBBBBBBBBBBBBB BBBBBBBBBBBBBBBB
+ * BBBBBBBBBBBBBBBB BBBBBBBBBBBBBBBBB BBBBBBBBBBB */
+ // Another pre comment
+ #[attr1]
+ #[attr2]
+ pub id: usize, /* CCCCCCCCCCCCCCCCCCC CCCCCCCCCCCCCCCCCCC CCCCCCCCCCCCCCCC
+ * CCCCCCCCCCCCCCCCCC CCCCCCCCCCCCCC CCCCCCCCCCCC */
+}
+
+struct Deep {
+ deeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeep:
+ node::Handle<IdRef<'id, Node<K, V>>, Type, NodeType>,
+}
+
+struct Foo<T>(T);
+struct Foo<T>(T)
+where
+ T: Copy,
+ T: Eq;
+struct Foo<T>(
+ TTTTTTTTTTTTTTTTT,
+ UUUUUUUUUUUUUUUUUUUUUUUU,
+ TTTTTTTTTTTTTTTTTTT,
+ UUUUUUUUUUUUUUUUUUU,
+);
+struct Foo<T>(
+ TTTTTTTTTTTTTTTTTT,
+ UUUUUUUUUUUUUUUUUUUUUUUU,
+ TTTTTTTTTTTTTTTTTTT,
+)
+where
+ T: PartialEq;
+struct Foo<T>(
+ TTTTTTTTTTTTTTTTT,
+ UUUUUUUUUUUUUUUUUUUUUUUU,
+ TTTTTTTTTTTTTTTTTTTTT,
+)
+where
+ T: PartialEq;
+struct Foo<T>(
+ TTTTTTTTTTTTTTTTT,
+ UUUUUUUUUUUUUUUUUUUUUUUU,
+ TTTTTTTTTTTTTTTTTTT,
+ UUUUUUUUUUUUUUUUUUU,
+)
+where
+ T: PartialEq;
+struct Foo<T>(
+ TTTTTTTTTTTTTTTTT, // Foo
+ UUUUUUUUUUUUUUUUUUUUUUUU, // Bar
+ // Baz
+ TTTTTTTTTTTTTTTTTTT,
+ // Qux (FIXME #572 - doc comment)
+ UUUUUUUUUUUUUUUUUUU,
+);
+
+mod m {
+ struct X<T>
+ where
+ T: Sized,
+ {
+ a: T,
+ }
+}
+
+struct Foo<T>(
+ TTTTTTTTTTTTTTTTTTT,
+ /// Qux
+ UUUUUUUUUUUUUUUUUUU,
+);
+
+struct Issue677 {
+ pub ptr: *const libc::c_void,
+ pub trace: fn(obj: *const libc::c_void, tracer: *mut JSTracer),
+}
+
+struct Foo {}
+struct Foo {}
+struct Foo {
+ // comment
+}
+struct Foo {
+ // trailing space ->
+}
+struct Foo {
+ // comment
+}
+struct Foo(
+ // comment
+);
+
+struct LongStruct {
+ a: A,
+ the_quick_brown_fox_jumps_over_the_lazy_dog:
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+}
+
+struct Deep {
+ deeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeep:
+ node::Handle<IdRef<'id, Node<Key, Value>>, Type, NodeType>,
+}
+
+struct Foo<C = ()>(String);
+
+// #1364
+fn foo() {
+ convex_shape.set_point(0, &Vector2f { x: 400.0, y: 100.0 });
+ convex_shape.set_point(1, &Vector2f { x: 500.0, y: 70.0 });
+ convex_shape.set_point(2, &Vector2f { x: 450.0, y: 100.0 });
+ convex_shape.set_point(3, &Vector2f { x: 580.0, y: 150.0 });
+}
+
+fn main() {
+ let x = Bar;
+
+ // Comment
+ let y = Foo { a: x };
+
+ Foo {
+ a: foo(), // comment
+ // comment
+ b: bar(),
+ ..something
+ };
+
+ Fooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo { a: f(), b: b() };
+
+ Foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo {
+ a: f(),
+ b: b(),
+ };
+
+ Foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo {
+ // Comment
+ a: foo(), // Comment
+ // Comment
+ b: bar(), // Comment
+ };
+
+ Foo { a: Bar, b: f() };
+
+ Quux {
+ x: if cond {
+ bar();
+ },
+ y: baz(),
+ };
+
+ A {
+ // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec a diam lectus. Sed sit
+ // amet ipsum mauris. Maecenas congue ligula ac quam viverra nec consectetur ante
+ // hendrerit. Donec et mollis dolor.
+ first: item(),
+ // Praesent et diam eget libero egestas mattis sit amet vitae augue.
+ // Nam tincidunt congue enim, ut porta lorem lacinia consectetur.
+ second: Item,
+ };
+
+ Some(Data::MethodCallData(MethodCallData {
+ span: sub_span.unwrap(),
+ scope: self.enclosing_scope(id),
+ ref_id: def_id,
+ decl_id: Some(decl_id),
+ }));
+
+ Diagram {
+ // o This graph demonstrates how
+ // / \ significant whitespace is
+ // o o preserved.
+ // /|\ \
+ // o o o o
+ graph: G,
+ }
+}
+
+fn matcher() {
+ TagTerminatedByteMatcher {
+ matcher: ByteMatcher {
+ pattern: b"<HTML",
+ mask: b"\xFF\xDF\xDF\xDF\xDF\xFF",
+ },
+ };
+}
+
+fn issue177() {
+ struct Foo<T> {
+ memb: T,
+ }
+ let foo = Foo::<i64> { memb: 10 };
+}
+
+fn issue201() {
+ let s = S { a: 0, ..b };
+}
+
+fn issue201_2() {
+ let s = S { a: S2 { ..c }, ..b };
+}
+
+fn issue278() {
+ let s = S {
+ a: 0,
+ //
+ b: 0,
+ };
+ let s1 = S {
+ a: 0,
+ // foo
+ //
+ // bar
+ b: 0,
+ };
+}
+
+fn struct_exprs() {
+ Foo { a: 1, b: f(2) };
+ Foo {
+ a: 1,
+ b: f(2),
+ ..g(3)
+ };
+ LoooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooongStruct {
+ ..base
+ };
+ IntrinsicISizesContribution {
+ content_intrinsic_sizes: IntrinsicISizes {
+ minimum_inline_size: 0,
+ },
+ };
+}
+
+fn issue123() {
+ Foo { a: b, c: d, e: f };
+
+ Foo {
+ a: bb,
+ c: dd,
+ e: ff,
+ };
+
+ Foo {
+ a: ddddddddddddddddddddd,
+ b: cccccccccccccccccccccccccccccccccccccc,
+ };
+}
+
+fn issue491() {
+ Foo {
+ guard: None,
+ arm: 0, // Comment
+ };
+
+ Foo {
+ arm: 0, // Comment
+ };
+
+ Foo {
+ a: aaaaaaaaaa,
+ b: bbbbbbbb,
+ c: cccccccccc,
+ d: dddddddddd, // a comment
+ e: eeeeeeeee,
+ };
+}
+
+fn issue698() {
+ Record {
+ ffffffffffffffffffffffffffields: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ };
+ Record {
+ ffffffffffffffffffffffffffields:
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ }
+}
+
+fn issue835() {
+ MyStruct {};
+ MyStruct { /* a comment */ };
+ MyStruct {
+ // Another comment
+ };
+ MyStruct {}
+}
+
+fn field_init_shorthand() {
+ MyStruct { x, y, z };
+ MyStruct { x, y, z, ..base };
+ Foo {
+ aaaaaaaaaa,
+ bbbbbbbb,
+ cccccccccc,
+ dddddddddd, // a comment
+ eeeeeeeee,
+ };
+ Record {
+ ffffffffffffffffffffffffffieldsaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/configs/struct_lit_single_line/false.rs b/src/tools/rustfmt/tests/target/configs/struct_lit_single_line/false.rs
new file mode 100644
index 000000000..e2732b5a7
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/struct_lit_single_line/false.rs
@@ -0,0 +1,9 @@
+// rustfmt-struct_lit_single_line: false
+// Struct literal multiline-style
+
+fn main() {
+ let lorem = Lorem {
+ ipsum: dolor,
+ sit: amet,
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/configs/tab_spaces/2.rs b/src/tools/rustfmt/tests/target/configs/tab_spaces/2.rs
new file mode 100644
index 000000000..85961706e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/tab_spaces/2.rs
@@ -0,0 +1,14 @@
+// rustfmt-tab_spaces: 2
+// rustfmt-max_width: 30
+// rustfmt-indent_style: Block
+// Tab spaces
+
+fn lorem() {
+ let ipsum = dolor();
+ let sit = vec![
+ "amet",
+ "consectetur",
+ "adipiscing",
+ "elit.",
+ ];
+}
diff --git a/src/tools/rustfmt/tests/target/configs/tab_spaces/4.rs b/src/tools/rustfmt/tests/target/configs/tab_spaces/4.rs
new file mode 100644
index 000000000..524a55121
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/tab_spaces/4.rs
@@ -0,0 +1,14 @@
+// rustfmt-tab_spaces: 4
+// rustfmt-max_width: 30
+// rustfmt-indent_style: Block
+// Tab spaces
+
+fn lorem() {
+ let ipsum = dolor();
+ let sit = vec![
+ "amet",
+ "consectetur",
+ "adipiscing",
+ "elit.",
+ ];
+}
diff --git a/src/tools/rustfmt/tests/target/configs/trailing_comma/always.rs b/src/tools/rustfmt/tests/target/configs/trailing_comma/always.rs
new file mode 100644
index 000000000..951dc6809
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/trailing_comma/always.rs
@@ -0,0 +1,14 @@
+// rustfmt-trailing_comma: Always
+// Trailing comma
+
+fn main() {
+ let Lorem { ipsum, dolor, sit, } = amet;
+ let Lorem {
+ ipsum,
+ dolor,
+ sit,
+ amet,
+ consectetur,
+ adipiscing,
+ } = elit;
+}
diff --git a/src/tools/rustfmt/tests/target/configs/trailing_comma/never.rs b/src/tools/rustfmt/tests/target/configs/trailing_comma/never.rs
new file mode 100644
index 000000000..ae0e50f96
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/trailing_comma/never.rs
@@ -0,0 +1,35 @@
+// rustfmt-trailing_comma: Never
+// Trailing comma
+
+fn main() {
+ let Lorem { ipsum, dolor, sit } = amet;
+ let Lorem {
+ ipsum,
+ dolor,
+ sit,
+ amet,
+ consectetur,
+ adipiscing
+ } = elit;
+
+ // #1544
+ if let VrMsg::ClientReply {
+ request_num: reply_req_num,
+ value,
+ ..
+ } = msg
+ {
+ let _ = safe_assert_eq!(reply_req_num, request_num, op);
+ return Ok((request_num, op, value));
+ }
+
+ // #1710
+ pub struct FileInput {
+ input: StringInput,
+ file_name: OsString
+ }
+ match len {
+ Some(len) => Ok(new(self.input, self.pos + len)),
+ None => Err(self)
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/configs/trailing_comma/vertical.rs b/src/tools/rustfmt/tests/target/configs/trailing_comma/vertical.rs
new file mode 100644
index 000000000..7283cde8d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/trailing_comma/vertical.rs
@@ -0,0 +1,14 @@
+// rustfmt-trailing_comma: Vertical
+// Trailing comma
+
+fn main() {
+ let Lorem { ipsum, dolor, sit } = amet;
+ let Lorem {
+ ipsum,
+ dolor,
+ sit,
+ amet,
+ consectetur,
+ adipiscing,
+ } = elit;
+}
diff --git a/src/tools/rustfmt/tests/target/configs/trailing_semicolon/false.rs b/src/tools/rustfmt/tests/target/configs/trailing_semicolon/false.rs
new file mode 100644
index 000000000..9fa746e9c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/trailing_semicolon/false.rs
@@ -0,0 +1,27 @@
+// rustfmt-trailing_semicolon: false
+
+#![feature(loop_break_value)]
+
+fn main() {
+ 'a: loop {
+ break 'a
+ }
+
+ let mut done = false;
+ 'b: while !done {
+ done = true;
+ continue 'b
+ }
+
+ let x = loop {
+ break 5
+ };
+
+ let x = 'c: loop {
+ break 'c 5
+ };
+}
+
+fn foo() -> usize {
+ return 0
+}
diff --git a/src/tools/rustfmt/tests/target/configs/trailing_semicolon/true.rs b/src/tools/rustfmt/tests/target/configs/trailing_semicolon/true.rs
new file mode 100644
index 000000000..61b6843d6
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/trailing_semicolon/true.rs
@@ -0,0 +1,27 @@
+// rustfmt-trailing_semicolon: true
+
+#![feature(loop_break_value)]
+
+fn main() {
+ 'a: loop {
+ break 'a;
+ }
+
+ let mut done = false;
+ 'b: while !done {
+ done = true;
+ continue 'b;
+ }
+
+ let x = loop {
+ break 5;
+ };
+
+ let x = 'c: loop {
+ break 'c 5;
+ };
+}
+
+fn foo() -> usize {
+ return 0;
+}
diff --git a/src/tools/rustfmt/tests/target/configs/type_punctuation_density/compressed.rs b/src/tools/rustfmt/tests/target/configs/type_punctuation_density/compressed.rs
new file mode 100644
index 000000000..6571e448e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/type_punctuation_density/compressed.rs
@@ -0,0 +1,41 @@
+// rustfmt-type_punctuation_density: Compressed
+// Type punctuation density
+
+fn lorem<Ipsum: Dolor+Sit=Amet>() {
+ // body
+}
+
+struct Foo<T: Eq+Clone, U>
+where
+ U: Eq+Clone, {
+ // body
+}
+
+trait Foo<'a, T=usize>
+where
+ T: 'a+Eq+Clone,
+{
+ type Bar: Eq+Clone;
+}
+
+trait Foo: Eq+Clone {
+ // body
+}
+
+impl<T> Foo<'a> for Bar
+where
+ for<'a> T: 'a+Eq+Clone,
+{
+ // body
+}
+
+fn foo<'a, 'b, 'c>()
+where
+ 'a: 'b+'c,
+{
+ // body
+}
+
+fn Foo<T=Foo, Output=Expr<'tcx>+Foo>() {
+ let i = 6;
+}
diff --git a/src/tools/rustfmt/tests/target/configs/type_punctuation_density/wide.rs b/src/tools/rustfmt/tests/target/configs/type_punctuation_density/wide.rs
new file mode 100644
index 000000000..01546c7b0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/type_punctuation_density/wide.rs
@@ -0,0 +1,41 @@
+// rustfmt-type_punctuation_density: Wide
+// Type punctuation density
+
+fn lorem<Ipsum: Dolor + Sit = Amet>() {
+ // body
+}
+
+struct Foo<T: Eq + Clone, U>
+where
+ U: Eq + Clone, {
+ // body
+}
+
+trait Foo<'a, T = usize>
+where
+ T: 'a + Eq + Clone,
+{
+ type Bar: Eq + Clone;
+}
+
+trait Foo: Eq + Clone {
+ // body
+}
+
+impl<T> Foo<'a> for Bar
+where
+ for<'a> T: 'a + Eq + Clone,
+{
+ // body
+}
+
+fn foo<'a, 'b, 'c>()
+where
+ 'a: 'b + 'c,
+{
+ // body
+}
+
+fn Foo<T = Foo, Output = Expr<'tcx> + Foo>() {
+ let i = 6;
+}
diff --git a/src/tools/rustfmt/tests/target/configs/use_field_init_shorthand/false.rs b/src/tools/rustfmt/tests/target/configs/use_field_init_shorthand/false.rs
new file mode 100644
index 000000000..743304468
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/use_field_init_shorthand/false.rs
@@ -0,0 +1,15 @@
+// rustfmt-use_field_init_shorthand: false
+// Use field initialization shorthand if possible.
+
+fn main() {
+ let a = Foo { x: x, y: y, z: z };
+
+ let b = Bar {
+ x: x,
+ y: y,
+ #[attr]
+ z: z,
+ #[rustfmt::skip]
+ skipped: skipped,
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/configs/use_field_init_shorthand/true.rs b/src/tools/rustfmt/tests/target/configs/use_field_init_shorthand/true.rs
new file mode 100644
index 000000000..8b80e8153
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/use_field_init_shorthand/true.rs
@@ -0,0 +1,15 @@
+// rustfmt-use_field_init_shorthand: true
+// Use field initialization shorthand if possible.
+
+fn main() {
+ let a = Foo { x, y, z };
+
+ let b = Bar {
+ x,
+ y,
+ #[attr]
+ z,
+ #[rustfmt::skip]
+ skipped: skipped,
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/configs/use_small_heuristics/default.rs b/src/tools/rustfmt/tests/target/configs/use_small_heuristics/default.rs
new file mode 100644
index 000000000..d67bd9aaf
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/use_small_heuristics/default.rs
@@ -0,0 +1,26 @@
+// rustfmt-use_small_heuristics: Default
+
+enum Lorem {
+ Ipsum,
+ Dolor(bool),
+ Sit { amet: Consectetur, adipiscing: Elit },
+}
+
+fn main() {
+ lorem(
+ "lorem",
+ "ipsum",
+ "dolor",
+ "sit",
+ "amet",
+ "consectetur",
+ "adipiscing",
+ );
+
+ let lorem = Lorem {
+ ipsum: dolor,
+ sit: amet,
+ };
+
+ let lorem = if ipsum { dolor } else { sit };
+}
diff --git a/src/tools/rustfmt/tests/target/configs/use_small_heuristics/max.rs b/src/tools/rustfmt/tests/target/configs/use_small_heuristics/max.rs
new file mode 100644
index 000000000..785dfbea0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/use_small_heuristics/max.rs
@@ -0,0 +1,15 @@
+// rustfmt-use_small_heuristics: Max
+
+enum Lorem {
+ Ipsum,
+ Dolor(bool),
+ Sit { amet: Consectetur, adipiscing: Elit },
+}
+
+fn main() {
+ lorem("lorem", "ipsum", "dolor", "sit", "amet", "consectetur", "adipiscing");
+
+ let lorem = Lorem { ipsum: dolor, sit: amet };
+
+ let lorem = if ipsum { dolor } else { sit };
+}
diff --git a/src/tools/rustfmt/tests/target/configs/use_small_heuristics/off.rs b/src/tools/rustfmt/tests/target/configs/use_small_heuristics/off.rs
new file mode 100644
index 000000000..f76392d24
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/use_small_heuristics/off.rs
@@ -0,0 +1,25 @@
+// rustfmt-use_small_heuristics: Off
+
+enum Lorem {
+ Ipsum,
+ Dolor(bool),
+ Sit {
+ amet: Consectetur,
+ adipiscing: Elit,
+ },
+}
+
+fn main() {
+ lorem("lorem", "ipsum", "dolor", "sit", "amet", "consectetur", "adipiscing");
+
+ let lorem = Lorem {
+ ipsum: dolor,
+ sit: amet,
+ };
+
+ let lorem = if ipsum {
+ dolor
+ } else {
+ sit
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/configs/use_try_shorthand/false.rs b/src/tools/rustfmt/tests/target/configs/use_try_shorthand/false.rs
new file mode 100644
index 000000000..de7f8b4a5
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/use_try_shorthand/false.rs
@@ -0,0 +1,6 @@
+// rustfmt-use_try_shorthand: false
+// Use try! shorthand
+
+fn main() {
+ let lorem = try!(ipsum.map(|dolor| dolor.sit()));
+}
diff --git a/src/tools/rustfmt/tests/target/configs/use_try_shorthand/true.rs b/src/tools/rustfmt/tests/target/configs/use_try_shorthand/true.rs
new file mode 100644
index 000000000..d3aa03579
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/use_try_shorthand/true.rs
@@ -0,0 +1,6 @@
+// rustfmt-use_try_shorthand: true
+// Use try! shorthand
+
+fn main() {
+ let lorem = ipsum.map(|dolor| dolor.sit())?;
+}
diff --git a/src/tools/rustfmt/tests/target/configs/where_single_line/true-with-brace-style.rs b/src/tools/rustfmt/tests/target/configs/where_single_line/true-with-brace-style.rs
new file mode 100644
index 000000000..ec7f79b68
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/where_single_line/true-with-brace-style.rs
@@ -0,0 +1,22 @@
+// rustfmt-brace_style: SameLineWhere
+// rustfmt-where_single_line: true
+
+fn lorem_multi_line_clauseless<Ipsum, Dolor, Sit, Amet>(
+ a: Aaaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbbbb,
+ c: Ccccccccccccccccc,
+ d: Ddddddddddddddddddddddddd,
+ e: Eeeeeeeeeeeeeeeeeee,
+) -> T {
+ // body
+}
+
+fn lorem_multi_line_clauseless<Ipsum, Dolor, Sit, Amet>(
+ a: Aaaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbbbb,
+ c: Ccccccccccccccccc,
+ d: Ddddddddddddddddddddddddd,
+ e: Eeeeeeeeeeeeeeeeeee,
+) {
+ // body
+}
diff --git a/src/tools/rustfmt/tests/target/configs/where_single_line/true.rs b/src/tools/rustfmt/tests/target/configs/where_single_line/true.rs
new file mode 100644
index 000000000..7f816459e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/where_single_line/true.rs
@@ -0,0 +1,30 @@
+// rustfmt-where_single_line: true
+// Where style
+
+fn lorem_two_items<Ipsum, Dolor, Sit, Amet>() -> T
+where
+ Ipsum: Eq,
+ Lorem: Eq,
+{
+ // body
+}
+
+fn lorem_multi_line<Ipsum, Dolor, Sit, Amet>(
+ a: Aaaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbbbb,
+ c: Ccccccccccccccccc,
+ d: Ddddddddddddddddddddddddd,
+ e: Eeeeeeeeeeeeeeeeeee,
+) -> T
+where
+ Ipsum: Eq,
+{
+ // body
+}
+
+fn lorem<Ipsum, Dolor, Sit, Amet>() -> T
+where Ipsum: Eq {
+ // body
+}
+
+unsafe impl Sync for Foo where (): Send {}
diff --git a/src/tools/rustfmt/tests/target/configs/wrap_comments/false.rs b/src/tools/rustfmt/tests/target/configs/wrap_comments/false.rs
new file mode 100644
index 000000000..48ecd88ac
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/wrap_comments/false.rs
@@ -0,0 +1,8 @@
+// rustfmt-wrap_comments: false
+// rustfmt-max_width: 50
+// rustfmt-error_on_line_overflow: false
+// Wrap comments
+
+fn main() {
+ // Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.
+}
diff --git a/src/tools/rustfmt/tests/target/configs/wrap_comments/true.rs b/src/tools/rustfmt/tests/target/configs/wrap_comments/true.rs
new file mode 100644
index 000000000..4096fd4d8
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/configs/wrap_comments/true.rs
@@ -0,0 +1,20 @@
+// rustfmt-wrap_comments: true
+// rustfmt-max_width: 50
+// Wrap comments
+
+fn main() {
+ // Lorem ipsum dolor sit amet, consectetur
+ // adipiscing elit, sed do eiusmod tempor
+ // incididunt ut labore et dolore magna
+ // aliqua. Ut enim ad minim veniam, quis
+ // nostrud exercitation ullamco laboris nisi
+ // ut aliquip ex ea commodo consequat.
+}
+
+fn code_block() {
+ // ```rust
+ // let x = 3;
+ //
+ // println!("x = {}", x);
+ // ```
+}
diff --git a/src/tools/rustfmt/tests/target/const_generics.rs b/src/tools/rustfmt/tests/target/const_generics.rs
new file mode 100644
index 000000000..b30b7b58c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/const_generics.rs
@@ -0,0 +1,37 @@
+struct Message {
+ field2: Vec<"MessageEntity">,
+ field3: Vec<1>,
+ field4: Vec<2, 3>,
+}
+
+struct RectangularArray<T, const WIDTH: usize, const HEIGHT: usize> {
+ array: [[T; WIDTH]; HEIGHT],
+}
+
+fn main() {
+ const X: usize = 7;
+ let x: RectangularArray<i32, 2, 4>;
+ let y: RectangularArray<i32, X, { 2 * 2 }>;
+}
+
+fn foo<const X: usize>() {
+ const Y: usize = X * 2;
+ static Z: (usize, usize) = (X, X);
+
+ struct Foo([i32; X]);
+}
+
+type Foo<const N: usize> = [i32; N + 1];
+
+pub trait Foo: Bar<{ Baz::COUNT }> {
+ const ASD: usize;
+}
+
+// #4263
+fn const_generics_on_params<
+ // AAAA
+ const BBBB: usize,
+ /* CCCC */
+ const DDDD: usize,
+>() {
+}
diff --git a/src/tools/rustfmt/tests/target/control-brace-style-always-next-line.rs b/src/tools/rustfmt/tests/target/control-brace-style-always-next-line.rs
new file mode 100644
index 000000000..054a3075c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/control-brace-style-always-next-line.rs
@@ -0,0 +1,50 @@
+// rustfmt-control_brace_style: AlwaysNextLine
+
+fn main() {
+ loop
+ {
+ ();
+ ();
+ }
+
+ 'label: loop
+ // loop comment
+ {
+ ();
+ }
+
+ cond = true;
+ while cond
+ {
+ ();
+ }
+
+ 'while_label: while cond
+ {
+ // while comment
+ ();
+ }
+
+ for obj in iter
+ {
+ for sub_obj in obj
+ {
+ 'nested_while_label: while cond
+ {
+ ();
+ }
+ }
+ }
+
+ match some_var
+ {
+ // match comment
+ pattern0 => val0,
+ pattern1 => val1,
+ pattern2 | pattern3 =>
+ {
+ do_stuff();
+ val2
+ }
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/control-brace-style-always-same-line.rs b/src/tools/rustfmt/tests/target/control-brace-style-always-same-line.rs
new file mode 100644
index 000000000..cf3f82dfc
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/control-brace-style-always-same-line.rs
@@ -0,0 +1,40 @@
+fn main() {
+ loop {
+ ();
+ ();
+ }
+
+ 'label: loop
+ // loop comment
+ {
+ ();
+ }
+
+ cond = true;
+ while cond {
+ ();
+ }
+
+ 'while_label: while cond {
+ // while comment
+ ();
+ }
+
+ for obj in iter {
+ for sub_obj in obj {
+ 'nested_while_label: while cond {
+ ();
+ }
+ }
+ }
+
+ match some_var {
+ // match comment
+ pattern0 => val0,
+ pattern1 => val1,
+ pattern2 | pattern3 => {
+ do_stuff();
+ val2
+ }
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/doc-attrib.rs b/src/tools/rustfmt/tests/target/doc-attrib.rs
new file mode 100644
index 000000000..36527b7cd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/doc-attrib.rs
@@ -0,0 +1,131 @@
+// rustfmt-wrap_comments: true
+// rustfmt-normalize_doc_attributes: true
+
+// Only doc = "" attributes should be normalized
+//! Example doc attribute comment
+//! Example doc attribute comment with 10 leading spaces
+#![doc(
+ html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
+ html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
+ html_root_url = "https://doc.rust-lang.org/nightly/",
+ html_playground_url = "https://play.rust-lang.org/",
+ test(attr(deny(warnings)))
+)]
+
+// Long `#[doc = "..."]`
+struct A {
+ /// xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+ b: i32,
+}
+
+/// The `nodes` and `edges` method each return instantiations of `Cow<[T]>` to
+/// leave implementers the freedom to create entirely new vectors or to pass
+/// back slices into internally owned vectors.
+struct B {
+ b: i32,
+}
+
+/// Level 1 comment
+mod tests {
+ /// Level 2 comment
+ impl A {
+ /// Level 3 comment
+ fn f() {
+ /// Level 4 comment
+ fn g() {}
+ }
+ }
+}
+
+struct C {
+ /// item doc attrib comment
+ // regular item comment
+ b: i32,
+
+ // regular item comment
+ /// item doc attrib comment
+ c: i32,
+}
+
+// non-regression test for regular attributes, from #2647
+#[cfg(
+ feature = "this_line_is_101_characters_long_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
+)]
+pub fn foo() {}
+
+// path attrs
+#[clippy::bar]
+#[clippy::bar(a, b, c)]
+pub fn foo() {}
+
+mod issue_2620 {
+ #[derive(Debug, StructOpt)]
+ #[structopt(about = "Display information about the character on FF Logs")]
+ pub struct Params {
+ #[structopt(help = "The server the character is on")]
+ server: String,
+ #[structopt(help = "The character's first name")]
+ first_name: String,
+ #[structopt(help = "The character's last name")]
+ last_name: String,
+ #[structopt(
+ short = "j",
+ long = "job",
+ help = "The job to look at",
+ parse(try_from_str)
+ )]
+ job: Option<Job>,
+ }
+}
+
+// non-regression test for regular attributes, from #2969
+#[cfg(not(all(
+ feature = "std",
+ any(
+ target_os = "linux",
+ target_os = "android",
+ target_os = "netbsd",
+ target_os = "dragonfly",
+ target_os = "haiku",
+ target_os = "emscripten",
+ target_os = "solaris",
+ target_os = "cloudabi",
+ target_os = "macos",
+ target_os = "ios",
+ target_os = "freebsd",
+ target_os = "openbsd",
+ target_os = "redox",
+ target_os = "fuchsia",
+ windows,
+ all(target_arch = "wasm32", feature = "stdweb"),
+ all(target_arch = "wasm32", feature = "wasm-bindgen"),
+ )
+)))]
+type Os = NoSource;
+
+// use cases from bindgen needing precise control over leading spaces
+/// <div rustbindgen accessor></div>
+#[repr(C)]
+#[derive(Debug, Default, Copy, Clone)]
+pub struct ContradictAccessors {
+ ///<foo>no leading spaces here</foo>
+ pub mBothAccessors: ::std::os::raw::c_int,
+ /// <div rustbindgen accessor="false"></div>
+ pub mNoAccessors: ::std::os::raw::c_int,
+ /// <div rustbindgen accessor="unsafe"></div>
+ pub mUnsafeAccessors: ::std::os::raw::c_int,
+ /// <div rustbindgen accessor="immutable"></div>
+ pub mImmutableAccessor: ::std::os::raw::c_int,
+}
+
+/// \brief MPI structure
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct mbedtls_mpi {
+ ///< integer sign
+ pub s: ::std::os::raw::c_int,
+ ///< total # of limbs
+ pub n: ::std::os::raw::c_ulong,
+ ///< pointer to limbs
+ pub p: *mut mbedtls_mpi_uint,
+}
diff --git a/src/tools/rustfmt/tests/target/doc-comment-with-example.rs b/src/tools/rustfmt/tests/target/doc-comment-with-example.rs
new file mode 100644
index 000000000..c5a4e779e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/doc-comment-with-example.rs
@@ -0,0 +1,11 @@
+// rustfmt-format_code_in_doc_comments: true
+
+/// Foo
+///
+/// # Example
+/// ```
+/// # #![cfg_attr(not(dox), feature(cfg_target_feature, target_feature, stdsimd))]
+/// # #![cfg_attr(not(dox), no_std)]
+/// fn foo() {}
+/// ```
+fn foo() {}
diff --git a/src/tools/rustfmt/tests/target/doc-of-generic-item.rs b/src/tools/rustfmt/tests/target/doc-of-generic-item.rs
new file mode 100644
index 000000000..2efc5e09a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/doc-of-generic-item.rs
@@ -0,0 +1,14 @@
+// Non-doc pre-comment of Foo
+/// doc of Foo
+// Non-doc post-comment of Foo
+struct Foo<
+ // Non-doc pre-comment of 'a
+ /// doc of 'a
+ 'a,
+ // Non-doc pre-comment of T
+ /// doc of T
+ T,
+ // Non-doc pre-comment of N
+ /// doc of N
+ const N: item,
+>;
diff --git a/src/tools/rustfmt/tests/target/doc.rs b/src/tools/rustfmt/tests/target/doc.rs
new file mode 100644
index 000000000..0f9e2d21c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/doc.rs
@@ -0,0 +1,5 @@
+// rustfmt-normalize_comments: true
+// Part of multiple.rs
+
+// sadfsdfa
+// sdffsdfasdf
diff --git a/src/tools/rustfmt/tests/target/dyn_trait.rs b/src/tools/rustfmt/tests/target/dyn_trait.rs
new file mode 100644
index 000000000..b6e2810a5
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/dyn_trait.rs
@@ -0,0 +1,27 @@
+#![feature(dyn_trait)]
+
+fn main() {
+ // #2506
+ // checks rustfmt doesn't remove dyn
+ trait MyTrait {
+ fn method(&self) -> u64;
+ }
+ fn f1(a: Box<dyn MyTrait>) {}
+
+ // checks if line wrap works correctly
+ trait Very_______________________Long__________________Name_______________________________Trait
+ {
+ fn method(&self) -> u64;
+ }
+
+ fn f2(
+ a: Box<
+ dyn Very_______________________Long__________________Name____________________Trait
+ + 'static,
+ >,
+ ) {
+ }
+
+ // #2582
+ let _: &dyn (::std::any::Any) = &msg;
+}
diff --git a/src/tools/rustfmt/tests/target/else-if-brace-style-always-next-line.rs b/src/tools/rustfmt/tests/target/else-if-brace-style-always-next-line.rs
new file mode 100644
index 000000000..31e12cfa0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/else-if-brace-style-always-next-line.rs
@@ -0,0 +1,53 @@
+// rustfmt-control_brace_style: AlwaysNextLine
+
+fn main() {
+ if false
+ {
+ ();
+ ();
+ }
+
+ if false
+ // lone if comment
+ {
+ ();
+ ();
+ }
+
+ let a = if 0 > 1 { unreachable!() } else { 0x0 };
+
+ if true
+ {
+ ();
+ }
+ else if false
+ {
+ ();
+ ();
+ }
+ else
+ {
+ ();
+ ();
+ ();
+ }
+
+ if true
+ // else-if-chain if comment
+ {
+ ();
+ }
+ else if false
+ // else-if-chain else-if comment
+ {
+ ();
+ ();
+ }
+ else
+ // else-if-chain else comment
+ {
+ ();
+ ();
+ ();
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/else-if-brace-style-always-same-line.rs b/src/tools/rustfmt/tests/target/else-if-brace-style-always-same-line.rs
new file mode 100644
index 000000000..07b71fd79
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/else-if-brace-style-always-same-line.rs
@@ -0,0 +1,43 @@
+fn main() {
+ if false {
+ ();
+ ();
+ }
+
+ if false
+ // lone if comment
+ {
+ ();
+ ();
+ }
+
+ let a = if 0 > 1 { unreachable!() } else { 0x0 };
+
+ if true {
+ ();
+ } else if false {
+ ();
+ ();
+ } else {
+ ();
+ ();
+ ();
+ }
+
+ if true
+ // else-if-chain if comment
+ {
+ ();
+ } else if false
+ // else-if-chain else-if comment
+ {
+ ();
+ ();
+ } else
+ // else-if-chain else comment
+ {
+ ();
+ ();
+ ();
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/else-if-brace-style-closing-next-line.rs b/src/tools/rustfmt/tests/target/else-if-brace-style-closing-next-line.rs
new file mode 100644
index 000000000..c99807dc0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/else-if-brace-style-closing-next-line.rs
@@ -0,0 +1,49 @@
+// rustfmt-control_brace_style: ClosingNextLine
+
+fn main() {
+ if false {
+ ();
+ ();
+ }
+
+ if false
+ // lone if comment
+ {
+ ();
+ ();
+ }
+
+ let a = if 0 > 1 { unreachable!() } else { 0x0 };
+
+ if true {
+ ();
+ }
+ else if false {
+ ();
+ ();
+ }
+ else {
+ ();
+ ();
+ ();
+ }
+
+ if true
+ // else-if-chain if comment
+ {
+ ();
+ }
+ else if false
+ // else-if-chain else-if comment
+ {
+ ();
+ ();
+ }
+ else
+ // else-if-chain else comment
+ {
+ ();
+ ();
+ ();
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/empty-item-single-line-false.rs b/src/tools/rustfmt/tests/target/empty-item-single-line-false.rs
new file mode 100644
index 000000000..bf7f70e7c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/empty-item-single-line-false.rs
@@ -0,0 +1,41 @@
+// rustfmt-brace_style: AlwaysNextLine
+// rustfmt-empty_item_single_line: false
+
+fn function()
+{
+}
+
+struct Struct {}
+
+enum Enum {}
+
+trait Trait
+{
+}
+
+impl<T> Trait for T
+{
+}
+
+trait Trait2<T>
+where
+ T: Copy + Display + Write + Read + FromStr,
+{
+}
+
+trait Trait3<T>
+where
+ T: Something
+ + SomethingElse
+ + Sync
+ + Send
+ + Display
+ + Debug
+ + Copy
+ + Hash
+ + Debug
+ + Display
+ + Write
+ + Read,
+{
+}
diff --git a/src/tools/rustfmt/tests/target/empty-tuple-no-conversion-to-unit-struct.rs b/src/tools/rustfmt/tests/target/empty-tuple-no-conversion-to-unit-struct.rs
new file mode 100644
index 000000000..0b9a15e8a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/empty-tuple-no-conversion-to-unit-struct.rs
@@ -0,0 +1,12 @@
+enum TestEnum {
+ Arm1(),
+ Arm2,
+}
+
+fn foo() {
+ let test = TestEnum::Arm1;
+ match test {
+ TestEnum::Arm1() => {}
+ TestEnum::Arm2 => {}
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/empty_file.rs b/src/tools/rustfmt/tests/target/empty_file.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/empty_file.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/rustfmt/tests/target/enum.rs b/src/tools/rustfmt/tests/target/enum.rs
new file mode 100644
index 000000000..9a25126b4
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/enum.rs
@@ -0,0 +1,289 @@
+// rustfmt-wrap_comments: true
+// Enums test
+
+#[atrr]
+pub enum Test {
+ A,
+ B(u32, A /* comment */, SomeType),
+ /// Doc comment
+ C,
+}
+
+pub enum Foo<'a, Y: Baz>
+where
+ X: Whatever,
+{
+ A,
+}
+
+enum EmtpyWithComment {
+ // Some comment
+}
+
+// C-style enum
+enum Bar {
+ A = 1,
+ #[someAttr(test)]
+ B = 2, // comment
+ C,
+}
+
+enum LongVariants {
+ First(
+ LOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOONG, // comment
+ VARIANT,
+ ),
+ // This is the second variant
+ Second,
+}
+
+enum StructLikeVariants {
+ Normal(u32, String),
+ StructLike {
+ x: i32, // Test comment
+ // Pre-comment
+ #[Attr50]
+ y: SomeType, // Aanother Comment
+ },
+ SL {
+ a: A,
+ },
+}
+
+enum X {
+ CreateWebGLPaintTask(
+ Size2D<i32>,
+ GLContextAttributes,
+ IpcSender<Result<(IpcSender<CanvasMsg>, usize), String>>,
+ ), // This is a post comment
+}
+
+pub enum EnumWithAttributes {
+ //This is a pre comment
+ // AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ TupleVar(usize, usize, usize), /* AAAA AAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAAA
+ * AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA */
+ // Pre Comment
+ #[rustfmt::skip]
+ SkippedItem(String,String,), // Post-comment
+ #[another_attr]
+ #[attr2]
+ ItemStruct {
+ x: usize,
+ y: usize,
+ }, /* Comment AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA */
+ // And another
+ ForcedPreflight, /* AAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ * AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA */
+}
+
+pub enum SingleTuple {
+ // Pre Comment AAAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ // AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ Match(usize, usize, String), /* Post-comment AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA */
+}
+
+pub enum SingleStruct {
+ Match { name: String, loc: usize }, // Post-comment
+}
+
+pub enum GenericEnum<I, T>
+where
+ I: Iterator<Item = T>,
+{
+ // Pre Comment
+ Left { list: I, root: T }, // Post-comment
+ Right { list: I, root: T }, // Post Comment
+}
+
+enum EmtpyWithComment {
+ // Some comment
+}
+
+enum TestFormatFails {
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+}
+
+fn nested_enum_test() {
+ if true {
+ enum TestEnum {
+ One(
+ usize,
+ usize,
+ usize,
+ usize,
+ usize,
+ usize,
+ usize,
+ usize,
+ usize,
+ usize,
+ usize,
+ usize,
+ usize,
+ usize,
+ usize,
+ usize,
+ ), /* AAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAAAAAA
+ * AAAAAAAAAAAAAAAAAAAAAA */
+ Two, /* AAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ * AAAAAAAAAAAAAAAAAA */
+ }
+ enum TestNestedFormatFail {
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+ }
+ }
+}
+
+pub struct EmtpyWithComment {
+ // FIXME: Implement this struct
+}
+
+// #1115
+pub enum Bencoding<'i> {
+ Str(&'i [u8]),
+ Int(i64),
+ List(Vec<Bencoding<'i>>),
+ /// A bencoded dict value. The first element the slice of bytes in the
+ /// source that the dict is composed of. The second is the dict, decoded
+ /// into an ordered map.
+ // TODO make Dict "structlike" AKA name the two values.
+ Dict(&'i [u8], BTreeMap<&'i [u8], Bencoding<'i>>),
+}
+
+// #1261
+pub enum CoreResourceMsg {
+ SetCookieForUrl(
+ ServoUrl,
+ #[serde(
+ deserialize_with = "::hyper_serde::deserialize",
+ serialize_with = "::hyper_serde::serialize"
+ )]
+ Cookie,
+ CookieSource,
+ ),
+}
+
+enum Loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong
+{}
+enum Looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong
+{}
+enum Loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong
+{}
+enum Loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong
+{
+ Foo,
+}
+
+// #1046
+pub enum Entry<'a, K: 'a, V: 'a> {
+ Vacant(#[stable(feature = "rust1", since = "1.0.0")] VacantEntry<'a, K, V>),
+ Occupied(#[stable(feature = "rust1", since = "1.0.0")] OccupiedEntry<'a, K, V>),
+}
+
+// #2081
+pub enum ForegroundColor {
+ CYAN =
+ (winapi::FOREGROUND_INTENSITY | winapi::FOREGROUND_GREEN | winapi::FOREGROUND_BLUE) as u16,
+}
+
+// #2098
+pub enum E<'a> {
+ V(<std::slice::Iter<'a, Xxxxxxxxxxxxxx> as Iterator>::Item),
+}
+
+// #1809
+enum State {
+ TryRecv {
+ pos: usize,
+ lap: u8,
+ closed_count: usize,
+ },
+ Subscribe {
+ pos: usize,
+ },
+ IsReady {
+ pos: usize,
+ ready: bool,
+ },
+ Unsubscribe {
+ pos: usize,
+ lap: u8,
+ id_woken: usize,
+ },
+ FinalTryRecv {
+ pos: usize,
+ id_woken: usize,
+ },
+ TimedOut,
+ Disconnected,
+}
+
+// #2190
+#[derive(Debug, Fail)]
+enum AnError {
+ #[fail(
+ display = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ )]
+ UnexpectedSingleToken { token: syn::Token },
+}
+
+// #2193
+enum WidthOf101 {
+ #[fail(display = ".....................................................")]
+ Io(::std::io::Error),
+ #[fail(display = ".....................................................")]
+ Ioo(::std::io::Error),
+ Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx(::std::io::Error),
+ Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx(
+ ::std::io::Error,
+ ),
+}
+
+// #2389
+pub enum QlError {
+ #[fail(display = "Parsing error: {}", 0)]
+ LexError(parser::lexer::LexError),
+ #[fail(display = "Parsing error: {:?}", 0)]
+ ParseError(parser::ParseError),
+ #[fail(display = "Validation error: {:?}", 0)]
+ ValidationError(Vec<validation::Error>),
+ #[fail(display = "Execution error: {}", 0)]
+ ExecutionError(String),
+ // (from, to)
+ #[fail(display = "Translation error: from {} to {}", 0, 1)]
+ TranslationError(String, String),
+ // (kind, input, expected)
+ #[fail(
+ display = "aaaaaaaaaaaaCould not find {}: Found: {}, expected: {:?}",
+ 0, 1, 2
+ )]
+ ResolveError(&'static str, String, Option<String>),
+}
+
+// #2594
+enum Foo {}
+enum Bar {}
+
+// #3562
+enum PublishedFileVisibility {
+ Public =
+ sys::ERemoteStoragePublishedFileVisibility_k_ERemoteStoragePublishedFileVisibilityPublic,
+ FriendsOnly = sys::ERemoteStoragePublishedFileVisibility_k_ERemoteStoragePublishedFileVisibilityFriendsOnly,
+ Private =
+ sys::ERemoteStoragePublishedFileVisibility_k_ERemoteStoragePublishedFileVisibilityPrivate,
+}
+
+// #3771
+//#![feature(arbitrary_enum_discriminant)]
+#[repr(u32)]
+pub enum E {
+ A {
+ a: u32,
+ } = 0x100,
+ B {
+ field1: u32,
+ field2: u8,
+ field3: m::M,
+ } = 0x300, // comment
+}
diff --git a/src/tools/rustfmt/tests/target/existential_type.rs b/src/tools/rustfmt/tests/target/existential_type.rs
new file mode 100644
index 000000000..ffc206875
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/existential_type.rs
@@ -0,0 +1,23 @@
+// Opaque type.
+
+#![feature(type_alias_impl_trait)]
+
+pub type Adder<F, T>
+where
+ T: Clone,
+ F: Copy,
+= impl Fn(T) -> T;
+
+pub type Adderrr<T> = impl Fn(T) -> T;
+
+impl Foo for Bar {
+ type E = impl Trait;
+}
+
+pub type Adder_without_impl<F, T>
+where
+ T: Clone,
+ F: Copy,
+= Fn(T) -> T;
+
+pub type Adderrr_without_impl<T> = Fn(T) -> T;
diff --git a/src/tools/rustfmt/tests/target/expr-block.rs b/src/tools/rustfmt/tests/target/expr-block.rs
new file mode 100644
index 000000000..c57700650
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/expr-block.rs
@@ -0,0 +1,305 @@
+// Test expressions with block formatting.
+
+fn arrays() {
+ [];
+ let empty = [];
+
+ let foo = [a_long_name, a_very_lng_name, a_long_name];
+
+ let foo = [
+ a_long_name,
+ a_very_lng_name,
+ a_long_name,
+ a_very_lng_name,
+ a_long_name,
+ a_very_lng_name,
+ a_long_name,
+ a_very_lng_name,
+ ];
+
+ vec![
+ a_long_name,
+ a_very_lng_name,
+ a_long_name,
+ a_very_lng_name,
+ a_long_name,
+ a_very_lng_name,
+ a_very_lng_name,
+ ];
+
+ [
+ a_long_name,
+ a_very_lng_name,
+ a_long_name,
+ a_very_lng_name,
+ a_long_name,
+ a_very_lng_name,
+ a_very_lng_name,
+ ]
+}
+
+fn arrays() {
+ let x = [
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 7, 8, 9, 0, 1, 2, 3, 4, 5,
+ 6, 7, 8, 9, 0,
+ ];
+
+ let y = [/* comment */ 1, 2 /* post comment */, 3];
+
+ let xy = [
+ strukt {
+ test123: value_one_two_three_four,
+ turbo: coolio(),
+ },
+ /* comment */ 1,
+ ];
+
+ let a = WeightedChoice::new(&mut [
+ Weighted { weight: x, item: 0 },
+ Weighted { weight: 1, item: 1 },
+ Weighted { weight: x, item: 2 },
+ Weighted { weight: 1, item: 3 },
+ ]);
+
+ let z = [
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,
+ yyyyyyyyyyyyyyyyyyyyyyyyyyy,
+ zzzzzzzzzzzzzzzzz,
+ q,
+ ];
+
+ [1 + 3, 4, 5, 6, 7, 7, fncall::<Vec<_>>(3 - 1)]
+}
+
+fn function_calls() {
+ let items = itemize_list(
+ context.source_map,
+ args.iter(),
+ ")",
+ |item| item.span.lo(),
+ |item| item.span.hi(),
+ |item| {
+ item.rewrite(
+ context,
+ Shape {
+ width: remaining_width,
+ ..nested_shape
+ },
+ )
+ },
+ span.lo(),
+ span.hi(),
+ );
+
+ itemize_list(
+ context.source_map,
+ args.iter(),
+ ")",
+ |item| item.span.lo(),
+ |item| item.span.hi(),
+ |item| {
+ item.rewrite(
+ context,
+ Shape {
+ width: remaining_width,
+ ..nested_shape
+ },
+ )
+ },
+ span.lo(),
+ span.hi(),
+ )
+}
+
+fn macros() {
+ baz!(
+ do_not, add, trailing, commas, inside, of, function, like, macros, even, if_they, are, long
+ );
+
+ baz!(one_item_macro_which_is_also_loooooooooooooooooooooooooooooooooooooooooooooooong);
+
+ let _ = match option {
+ None => baz!(
+ function,
+ like,
+ macro_as,
+ expression,
+ which,
+ is,
+ loooooooooooooooong
+ ),
+ Some(p) => baz!(one_item_macro_as_expression_which_is_also_loooooooooooooooong),
+ };
+}
+
+fn issue_1450() {
+ if selfstate
+ .compare_exchandsfasdsdfgsdgsdfgsdfgsdfgsdfgsdfgfsfdsage_weak(
+ STATE_PARKED,
+ STATE_UNPARKED,
+ Release,
+ Relaxed,
+ Release,
+ Relaxed,
+ )
+ .is_ok()
+ {
+ return;
+ }
+}
+
+fn foo() {
+ if real_total <= limit
+ && !pre_line_comments
+ && !items.into_iter().any(|item| item.as_ref().is_multiline())
+ {
+ DefinitiveListTactic::Horizontal
+ }
+}
+
+fn combine_block() {
+ foo(Bar {
+ x: value,
+ y: value2,
+ });
+
+ foo((Bar {
+ x: value,
+ y: value2,
+ },));
+
+ foo((
+ 1,
+ 2,
+ 3,
+ Bar {
+ x: value,
+ y: value2,
+ },
+ ));
+
+ foo((1, 2, 3, |x| {
+ let y = x + 1;
+ let z = y + 1;
+ z
+ }));
+
+ let opt = Some(Struct(
+ long_argument_one,
+ long_argument_two,
+ long_argggggggg,
+ ));
+
+ do_thing(|param| {
+ action();
+ foo(param)
+ });
+
+ do_thing(x, |param| {
+ action();
+ foo(param)
+ });
+
+ do_thing(
+ x,
+ (1, 2, 3, |param| {
+ action();
+ foo(param)
+ }),
+ );
+
+ Ok(some_function(
+ lllllllllong_argument_one,
+ lllllllllong_argument_two,
+ lllllllllllllllllllllllllllllong_argument_three,
+ ));
+
+ foo(
+ thing,
+ bar(
+ param2,
+ pparam1param1param1param1param1param1param1param1param1param1aram1,
+ param3,
+ ),
+ );
+
+ foo.map_or(|| {
+ Ok(SomeStruct {
+ f1: 0,
+ f2: 0,
+ f3: 0,
+ })
+ });
+
+ match opt {
+ Some(x) => somefunc(anotherfunc(
+ long_argument_one,
+ long_argument_two,
+ long_argument_three,
+ )),
+ Some(x) => |x| {
+ let y = x + 1;
+ let z = y + 1;
+ z
+ },
+ Some(x) => (1, 2, |x| {
+ let y = x + 1;
+ let z = y + 1;
+ z
+ }),
+ Some(x) => SomeStruct {
+ f1: long_argument_one,
+ f2: long_argument_two,
+ f3: long_argument_three,
+ },
+ None => Ok(SomeStruct {
+ f1: long_argument_one,
+ f2: long_argument_two,
+ f3: long_argument_three,
+ }),
+ };
+
+ match x {
+ y => func(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx),
+ _ => func(
+ x,
+ yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy,
+ zzz,
+ ),
+ }
+}
+
+fn issue_1862() {
+ foo(
+ /* bar = */ None,
+ something_something,
+ /* baz = */ None,
+ /* This comment waaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaay too long to be kept on the same line */
+ None,
+ /* com */
+ this_last_arg_is_tooooooooooooooooooooooooooooooooo_long_to_be_kept_with_the_pre_comment,
+ )
+}
+
+fn issue_3025() {
+ foo(
+ // This describes the argument below.
+ /* bar = */ None,
+ // This describes the argument below.
+ something_something,
+ // This describes the argument below. */
+ None,
+ // This describes the argument below.
+ /* This comment waaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaay too long to be kept on the same line */
+ None,
+ // This describes the argument below.
+ /* com */
+ this_last_arg_is_tooooooooooooooooooooooooooooooooo_long_to_be_kept_with_the_pre_comment,
+ )
+}
+
+fn issue_1878() {
+ let channel: &str = seq
+ .next_element()?
+ .ok_or_else(|| de::Error::invalid_length(2, &self))?;
+}
diff --git a/src/tools/rustfmt/tests/target/expr-overflow-delimited.rs b/src/tools/rustfmt/tests/target/expr-overflow-delimited.rs
new file mode 100644
index 000000000..b00e81fcd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/expr-overflow-delimited.rs
@@ -0,0 +1,120 @@
+// rustfmt-overflow_delimited_expr: true
+
+fn combine_blocklike() {
+ do_thing(|param| {
+ action();
+ foo(param)
+ });
+
+ do_thing(x, |param| {
+ action();
+ foo(param)
+ });
+
+ do_thing(
+ x,
+ // I'll be discussing the `action` with your para(m)legal counsel
+ |param| {
+ action();
+ foo(param)
+ },
+ );
+
+ do_thing(Bar {
+ x: value,
+ y: value2,
+ });
+
+ do_thing(x, Bar {
+ x: value,
+ y: value2,
+ });
+
+ do_thing(
+ x,
+ // Let me tell you about that one time at the `Bar`
+ Bar {
+ x: value,
+ y: value2,
+ },
+ );
+
+ do_thing(&[
+ value_with_longer_name,
+ value2_with_longer_name,
+ value3_with_longer_name,
+ value4_with_longer_name,
+ ]);
+
+ do_thing(x, &[
+ value_with_longer_name,
+ value2_with_longer_name,
+ value3_with_longer_name,
+ value4_with_longer_name,
+ ]);
+
+ do_thing(
+ x,
+ // Just admit it; my list is longer than can be folded on to one line
+ &[
+ value_with_longer_name,
+ value2_with_longer_name,
+ value3_with_longer_name,
+ value4_with_longer_name,
+ ],
+ );
+
+ do_thing(vec![
+ value_with_longer_name,
+ value2_with_longer_name,
+ value3_with_longer_name,
+ value4_with_longer_name,
+ ]);
+
+ do_thing(x, vec![
+ value_with_longer_name,
+ value2_with_longer_name,
+ value3_with_longer_name,
+ value4_with_longer_name,
+ ]);
+
+ do_thing(
+ x,
+ // Just admit it; my list is longer than can be folded on to one line
+ vec![
+ value_with_longer_name,
+ value2_with_longer_name,
+ value3_with_longer_name,
+ value4_with_longer_name,
+ ],
+ );
+
+ do_thing(
+ x,
+ (1, 2, 3, |param| {
+ action();
+ foo(param)
+ }),
+ );
+}
+
+fn combine_struct_sample() {
+ let identity = verify(&ctx, VerifyLogin {
+ type_: LoginType::Username,
+ username: args.username.clone(),
+ password: Some(args.password.clone()),
+ domain: None,
+ })?;
+}
+
+fn combine_macro_sample() {
+ rocket::ignite()
+ .mount("/", routes![
+ http::auth::login,
+ http::auth::logout,
+ http::cors::options,
+ http::action::dance,
+ http::action::sleep,
+ ])
+ .launch();
+}
diff --git a/src/tools/rustfmt/tests/target/expr.rs b/src/tools/rustfmt/tests/target/expr.rs
new file mode 100644
index 000000000..84df802bc
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/expr.rs
@@ -0,0 +1,671 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+// Test expressions
+
+fn foo() -> bool {
+ let boxed: Box<i32> = box 5;
+ let referenced = &5;
+
+ let very_long_variable_name = (a + first + simple + test);
+ let very_long_variable_name =
+ (a + first + simple + test + AAAAAAAAAAAAA + BBBBBBBBBBBBBBBBB + b + c);
+
+ let is_internalxxxx =
+ self.source_map.span_to_filename(s) == self.source_map.span_to_filename(m.inner);
+
+ let some_val = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa * bbbb
+ / (bbbbbb - function_call(x, *very_long_pointer, y))
+ + 1000;
+
+ some_ridiculously_loooooooooooooooooooooong_function(
+ 10000 * 30000000000 + 40000 / 1002200000000 - 50000 * sqrt(-1),
+ trivial_value,
+ );
+ (aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ + a
+ + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ + aaaaa);
+
+ {
+ for _ in 0..10 {}
+ }
+
+ {
+ {
+ {
+ {}
+ }
+ }
+ }
+
+ if 1 + 2 > 0 {
+ let result = 5;
+ result
+ } else {
+ 4
+ };
+
+ if let Some(x) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa {
+ // Nothing
+ }
+
+ if let Some(x) =
+ (aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa)
+ {}
+
+ if let (
+ some_very_large,
+ tuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuple,
+ ) = 1 + 2 + 3
+ {}
+
+ if let (
+ some_very_large,
+ tuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuple,
+ ) = 1111 + 2222
+ {}
+
+ if let (
+ some_very_large,
+ tuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuple,
+ ) = 1 + 2 + 3
+ {}
+
+ if let ast::ItemKind::Trait(_, unsafety, ref generics, ref type_param_bounds, ref trait_items) =
+ item.node
+ {
+ // nothing
+ }
+
+ let test = if true { 5 } else { 3 };
+
+ if cond() {
+ something();
+ } else if different_cond() {
+ something_else();
+ } else {
+ // Check subformatting
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ }
+
+ // #2884
+ let _ = [0; {
+ struct Foo;
+ impl Foo {
+ const fn get(&self) -> usize {
+ 5
+ }
+ };
+ Foo.get()
+ }];
+}
+
+fn bar() {
+ let range =
+ (111111111 + 333333333333333333 + 1111 + 400000000000000000)..(2222 + 2333333333333333);
+
+ let another_range = 5..some_func(a, b /* comment */);
+
+ for _ in 1.. {
+ call_forever();
+ }
+
+ syntactically_correct(
+ loop {
+ sup('?');
+ },
+ if cond { 0 } else { 1 },
+ );
+
+ let third = ..10;
+ let infi_range = ..;
+ let foo = 1..;
+ let bar = 5;
+ let nonsense = (10..0)..(0..10);
+
+ loop {
+ if true {
+ break;
+ }
+ }
+
+ let x = (
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa && aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ a,
+ );
+}
+
+fn baz() {
+ unsafe /* {}{}{}{{{{}} */ {
+ let foo = 1u32;
+ }
+
+ unsafe /* very looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong
+ * comment */ {
+ }
+
+ unsafe /* So this is a very long comment.
+ * Multi-line, too.
+ * Will it still format correctly? */ {
+ }
+
+ unsafe {
+ // Regular unsafe block
+ }
+
+ unsafe { foo() }
+
+ unsafe {
+ foo();
+ }
+
+ // #2289
+ let identifier_0 = unsafe { this_is_58_chars_long_and_line_is_93_chars_long_xxxxxxxxxx };
+ let identifier_1 = unsafe { this_is_59_chars_long_and_line_is_94_chars_long_xxxxxxxxxxx };
+ let identifier_2 = unsafe { this_is_65_chars_long_and_line_is_100_chars_long_xxxxxxxxxxxxxxxx };
+ let identifier_3 =
+ unsafe { this_is_66_chars_long_and_line_is_101_chars_long_xxxxxxxxxxxxxxxxx };
+}
+
+// Test some empty blocks.
+fn qux() {
+ {}
+ // FIXME this one could be done better.
+ { /* a block with a comment */ }
+ {}
+ {
+ // A block with a comment.
+ }
+}
+
+fn issue227() {
+ {
+ let handler =
+ box DocumentProgressHandler::new(addr, DocumentProgressTask::DOMContentLoaded);
+ }
+}
+
+fn issue184(source: &str) {
+ for c in source.chars() {
+ if index < 'a' {
+ continue;
+ }
+ }
+}
+
+fn arrays() {
+ let x = [
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 7, 8, 9, 0, 1, 2, 3, 4, 5,
+ 6, 7, 8, 9, 0,
+ ];
+
+ let y = [/* comment */ 1, 2 /* post comment */, 3];
+
+ let xy = [
+ strukt {
+ test123: value_one_two_three_four,
+ turbo: coolio(),
+ },
+ // comment
+ 1,
+ ];
+
+ let a = WeightedChoice::new(&mut [
+ Weighted {
+ weightweight: x,
+ item: 0,
+ },
+ Weighted {
+ weightweight: 1,
+ item: 1,
+ },
+ Weighted {
+ weightweight: x,
+ item: 2,
+ },
+ Weighted {
+ weightweight: 1,
+ item: 3,
+ },
+ ]);
+
+ let z = [
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,
+ yyyyyyyyyyyyyyyyyyyyyyyyyyy,
+ zzzzzzzzzzzzzzzzzz,
+ q,
+ ];
+
+ [1 + 3, 4, 5, 6, 7, 7, fncall::<Vec<_>>(3 - 1)]
+}
+
+fn returns() {
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ && return;
+
+ return aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa;
+}
+
+fn addrof() {
+ &mut (aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb);
+ &(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb);
+
+ // raw reference operator
+ &raw const a;
+ &raw mut b;
+}
+
+fn casts() {
+ fn unpack(packed: u32) -> [u16; 2] {
+ [(packed >> 16) as u16, (packed >> 0) as u16]
+ }
+
+ let some_trait_xxx = xxxxxxxxxxx + xxxxxxxxxxxxx as SomeTraitXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX;
+ let slightly_longer_trait =
+ yyyyyyyyy + yyyyyyyyyyy as SomeTraitYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY;
+}
+
+fn indices() {
+ let x = (aaaaaaaaaaaaaaaaaaaaaaaaaaaa + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb + cccccccccccccccc)
+ [x + y + z];
+ let y = (aaaaaaaaaaaaaaaaaaaaaaaaaaaa + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb + cccccccccccccccc)
+ [xxxxx + yyyyy + zzzzz];
+ let z = xxxxxxxxxx
+ .x()
+ .y()
+ .zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz()[aaaaa];
+ let z = xxxxxxxxxx
+ .x()
+ .y()
+ .zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz()
+ [aaaaa];
+}
+
+fn repeats() {
+ let x = [aaaaaaaaaaaaaaaaaaaaaaaaaaaa + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb + cccccccccccccccc;
+ x + y + z];
+ let y = [aaaaaaaaaaaaaaaaaaaaaaaaaaaa + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb + cccccccccccccccc;
+ xxxxx + yyyyy + zzzzz];
+}
+
+fn blocks() {
+ if 1 + 1 == 2 {
+ println!("yay arithmetix!");
+ };
+}
+
+fn issue767() {
+ if false {
+ if false {
+ } else {
+ // A let binding here seems necessary to trigger it.
+ let _ = ();
+ }
+ } else if let false = false {
+ }
+}
+
+fn ranges() {
+ let x = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa..bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb;
+ let y =
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa..=bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb;
+ let z = ..=x;
+
+ // #1766
+ let x = [0. ..10.0];
+ let x = [0. ..=10.0];
+
+ a..=b
+
+ // the expr below won't compile because inclusive ranges need a defined end
+ // let a = 0 ..= ;
+}
+
+fn if_else() {
+ let exact = diff / (if size == 0 { 1 } else { size });
+
+ let cx = tp1.x + any * radius * if anticlockwise { 1.0 } else { -1.0 };
+}
+
+fn complex_if_else() {
+ if let Some(x) = xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx {
+ } else if let Some(x) = xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx {
+ ha();
+ } else if xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + xxxxxxxx {
+ yo();
+ } else if let Some(x) = xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+ {
+ ha();
+ } else if xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + xxxxxxxxx
+ {
+ yo();
+ }
+}
+
+fn issue1106() {
+ {
+ if let hir::ItemEnum(ref enum_def, ref generics) =
+ self.ast_map.expect_item(enum_node_id).node
+ {}
+ }
+
+ for entry in WalkDir::new(path)
+ .into_iter()
+ .filter_entry(|entry| exclusions.filter_entry(entry))
+ {}
+}
+
+fn issue1570() {
+ a_very_long_function_name({ some_func(1, { 1 }) })
+}
+
+fn issue1714() {
+ v = &mut { v }[mid..];
+ let (left, right) = { v }.split_at_mut(mid);
+}
+
+// Multi-lined index should be put on the next line if it fits in one line.
+fn issue1749() {
+ {
+ {
+ {
+ if self.shape[(r as f32 + self.x_offset) as usize]
+ [(c as f32 + self.y_offset) as usize]
+ != 0
+ {
+ // hello
+ }
+ }
+ }
+ }
+}
+
+// #1172
+fn newlines_between_list_like_expr() {
+ foo(
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,
+ yyyyyyyyyyyyyyyyyyyyyyyyyyyyyy,
+ zzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
+ );
+
+ vec![
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,
+ yyyyyyyyyyyyyyyyyyyyyyyyyyyyyy,
+ zzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
+ ];
+
+ match x {
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+ | yyyyyyyyyyyyyyyyyyyyyyyyyyyyyy
+ | zzzzzzzzzzzzzzzzzzzzzzzzzzzzzz => foo(a, b, c),
+ _ => bar(),
+ };
+}
+
+fn issue2178() {
+ Ok(result
+ .iter()
+ .map(|item| ls_util::rls_to_location(item))
+ .collect())
+}
+
+// #2493
+impl Foo {
+ fn bar(&self) {
+ {
+ let x = match () {
+ () => {
+ let i;
+ i == self
+ .install_config
+ .storage
+ .experimental_compressed_block_size as usize
+ }
+ };
+ }
+ }
+}
+
+fn dots() {
+ .. .. ..; // (.. (.. (..)))
+ ..= ..= ..;
+ (..).. ..; // ((..) .. (..))
+}
+
+// #2676
+// A function call with a large single argument.
+fn foo() {
+ let my_var = Mutex::new(
+ RpcClientType::connect(server_iddd).chain_err(|| "Unable to create RPC client")?,
+ );
+}
+
+// #2704
+// Method call with prefix and suffix.
+fn issue2704() {
+ // We should not combine the callee with a multi-lined method call.
+ let requires = requires.set(
+ &requires0
+ .concat(&requires1)
+ .concat(&requires2)
+ .distinct_total(),
+ );
+ let requires = requires.set(
+ box requires0
+ .concat(&requires1)
+ .concat(&requires2)
+ .distinct_total(),
+ );
+ let requires = requires.set(
+ requires0
+ .concat(&requires1)
+ .concat(&requires2)
+ .distinct_total() as u32,
+ );
+ let requires = requires.set(
+ requires0
+ .concat(&requires1)
+ .concat(&requires2)
+ .distinct_total()?,
+ );
+ let requires = requires.set(
+ !requires0
+ .concat(&requires1)
+ .concat(&requires2)
+ .distinct_total(),
+ );
+ // We should combine a small callee with an argument.
+ bar(vec![22]
+ .into_iter()
+ .map(|x| x * 2)
+ .filter(|_| true)
+ .collect());
+ // But we should not combine a long callee with an argument.
+ barrrr(
+ vec![22]
+ .into_iter()
+ .map(|x| x * 2)
+ .filter(|_| true)
+ .collect(),
+ );
+}
+
+// #2782
+fn issue2782() {
+ {
+ let f = {
+ let f = {
+ {
+ match f {
+ F(f, _) => loop {
+ let f = {
+ match f {
+ F(f, _) => match f {
+ F(f, _) => loop {
+ let f = {
+ let f = {
+ match f {
+ '-' => F(f, ()),
+ }
+ };
+ };
+ },
+ },
+ }
+ };
+ },
+ }
+ }
+ };
+ };
+ }
+}
+
+fn issue_2802() {
+ function_to_fill_this_line(some_arg, some_arg, some_arg)
+ * a_very_specific_length(specific_length_arg)
+ * very_specific_length(Foo {
+ a: some_much_much_longer_value,
+ })
+ * some_value
+}
+
+fn issue_3003() {
+ let mut path: PathBuf = [
+ env!("CARGO_MANIFEST_DIR"),
+ "tests",
+ "support",
+ "dejavu-fonts-ttf-2.37",
+ "ttf",
+ ]
+ .iter()
+ .collect();
+}
+
+fn issue3226() {
+ {
+ {
+ {
+ return Err(
+ ErrorKind::ManagementInterfaceError("Server exited unexpectedly").into(),
+ );
+ }
+ }
+ }
+ {
+ {
+ {
+ break Err(
+ ErrorKind::ManagementInterfaceError("Server exited unexpectedlyy").into(),
+ );
+ }
+ }
+ }
+}
+
+// #3457
+fn issue3457() {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ println!("Test");
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+// #3498
+static REPRO: &[usize] = &[
+ #[cfg(feature = "zero")]
+ 0,
+];
+
+fn overflow_with_attr() {
+ foo(
+ #[cfg(feature = "zero")]
+ 0,
+ );
+ foobar(
+ #[cfg(feature = "zero")]
+ 0,
+ );
+ foobar(
+ x,
+ y,
+ #[cfg(feature = "zero")]
+ {},
+ );
+}
+
+// https://github.com/rust-lang/rustfmt/issues/3765
+fn foo() {
+ async {
+ // Do
+ // some
+ // work
+ }
+ .await;
+
+ async {
+ // Do
+ // some
+ // work
+ }
+ .await;
+}
+
+fn underscore() {
+ _ = 1;
+ _;
+ [_, a, _] = [1, 2, 3];
+ (a, _) = (8, 9);
+ TupleStruct(_, a) = TupleStruct(2, 2);
+
+ let _: usize = foo(_, _);
+}
diff --git a/src/tools/rustfmt/tests/target/extern.rs b/src/tools/rustfmt/tests/target/extern.rs
new file mode 100644
index 000000000..d1741360c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/extern.rs
@@ -0,0 +1,97 @@
+// rustfmt-normalize_comments: true
+
+extern crate foo;
+extern crate foo as bar;
+
+extern crate chrono;
+extern crate dotenv;
+extern crate futures;
+
+extern crate bar;
+extern crate foo;
+
+// #2315
+extern crate proc_macro;
+extern crate proc_macro2;
+
+// #3128
+extern crate serde; // 1.0.78
+extern crate serde_derive; // 1.0.78
+extern crate serde_json; // 1.0.27
+
+extern "C" {
+ fn c_func(x: *mut *mut libc::c_void);
+
+ fn c_func(
+ x: XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX,
+ y: YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY,
+ );
+
+ #[test123]
+ fn foo() -> uint64_t;
+
+ pub fn bar();
+}
+
+extern "C" {
+ fn DMR_GetDevice(
+ pHDev: *mut HDEV,
+ searchMode: DeviceSearchMode,
+ pSearchString: *const c_char,
+ devNr: c_uint,
+ wildcard: c_char,
+ ) -> TDMR_ERROR;
+
+ fn quux() -> (); // Post comment
+
+ pub type Foo;
+
+ type Bar;
+}
+
+extern "Rust" {
+ static ext: u32;
+ // Some comment.
+ pub static mut var: SomeType;
+}
+
+extern "C" {
+ fn syscall(
+ number: libc::c_long, // comment 1
+ // comm 2
+ ... // sup?
+ ) -> libc::c_long;
+
+ fn foo(x: *const c_char, ...) -> libc::c_long;
+}
+
+extern "C" {
+ pub fn freopen(
+ filename: *const c_char,
+ mode: *const c_char,
+ mode2: *const c_char,
+ mode3: *const c_char,
+ file: *mut FILE,
+ ) -> *mut FILE;
+
+ const fn foo() -> *mut Bar;
+ unsafe fn foo() -> *mut Bar;
+
+ pub(super) const fn foo() -> *mut Bar;
+ pub(crate) unsafe fn foo() -> *mut Bar;
+}
+
+extern "C" {}
+
+macro_rules! x {
+ ($tt:tt) => {};
+}
+
+extern "macros" {
+ x!(ident);
+ x!(#);
+ x![ident];
+ x![#];
+ x! {ident}
+ x! {#}
+}
diff --git a/src/tools/rustfmt/tests/target/extern_not_explicit.rs b/src/tools/rustfmt/tests/target/extern_not_explicit.rs
new file mode 100644
index 000000000..b55b64d05
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/extern_not_explicit.rs
@@ -0,0 +1,18 @@
+// rustfmt-force_explicit_abi: false
+
+extern {
+ fn some_fn() -> ();
+}
+
+extern fn sup() {}
+
+type funky_func = extern fn(
+ unsafe extern "rust-call" fn(
+ *const JSJitInfo,
+ *mut JSContext,
+ HandleObject,
+ *mut libc::c_void,
+ u32,
+ *mut JSVal,
+ ) -> u8,
+);
diff --git a/src/tools/rustfmt/tests/target/file-lines-1.rs b/src/tools/rustfmt/tests/target/file-lines-1.rs
new file mode 100644
index 000000000..13820ec29
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/file-lines-1.rs
@@ -0,0 +1,30 @@
+// rustfmt-file_lines: [{"file":"tests/source/file-lines-1.rs","range":[4,8]}]
+
+fn floaters() {
+ let x = Foo {
+ field1: val1,
+ field2: val2,
+ }
+ .method_call()
+ .method_call();
+
+ let y = if cond {
+ val1
+ } else {
+ val2
+ }
+ .method_call();
+
+ {
+ match x {
+ PushParam => {
+ // comment
+ stack.push(mparams[match cur.to_digit(10) {
+ Some(d) => d as usize - 1,
+ None => return Err("bad param number".to_owned()),
+ }]
+ .clone());
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/file-lines-2.rs b/src/tools/rustfmt/tests/target/file-lines-2.rs
new file mode 100644
index 000000000..bc25698c2
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/file-lines-2.rs
@@ -0,0 +1,24 @@
+// rustfmt-file_lines: [{"file":"tests/source/file-lines-2.rs","range":[10,15]}]
+
+fn floaters() {
+ let x = Foo {
+ field1: val1,
+ field2: val2,
+ }
+ .method_call().method_call();
+
+ let y = if cond { val1 } else { val2 }.method_call();
+
+ {
+ match x {
+ PushParam => {
+ // comment
+ stack.push(mparams[match cur.to_digit(10) {
+ Some(d) => d as usize - 1,
+ None => return Err("bad param number".to_owned()),
+ }]
+ .clone());
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/file-lines-3.rs b/src/tools/rustfmt/tests/target/file-lines-3.rs
new file mode 100644
index 000000000..77d6fb263
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/file-lines-3.rs
@@ -0,0 +1,25 @@
+// rustfmt-file_lines: [{"file":"tests/source/file-lines-3.rs","range":[4,8]},{"file":"tests/source/file-lines-3.rs","range":[10,15]}]
+
+fn floaters() {
+ let x = Foo {
+ field1: val1,
+ field2: val2,
+ }
+ .method_call()
+ .method_call();
+
+ let y = if cond { val1 } else { val2 }.method_call();
+
+ {
+ match x {
+ PushParam => {
+ // comment
+ stack.push(mparams[match cur.to_digit(10) {
+ Some(d) => d as usize - 1,
+ None => return Err("bad param number".to_owned()),
+ }]
+ .clone());
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/file-lines-4.rs b/src/tools/rustfmt/tests/target/file-lines-4.rs
new file mode 100644
index 000000000..83928bf6f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/file-lines-4.rs
@@ -0,0 +1,30 @@
+// rustfmt-file_lines: []
+// (Test that nothing is formatted if an empty array is specified.)
+
+fn floaters() {
+ let x = Foo {
+ field1: val1,
+ field2: val2,
+ }
+ .method_call().method_call();
+
+ let y = if cond {
+ val1
+ } else {
+ val2
+ }
+ .method_call();
+ // aaaaaaaaaaaaa
+ {
+ match x {
+ PushParam => {
+ // comment
+ stack.push(mparams[match cur.to_digit(10) {
+ Some(d) => d as usize - 1,
+ None => return Err("bad param number".to_owned()),
+ }]
+ .clone());
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/file-lines-5.rs b/src/tools/rustfmt/tests/target/file-lines-5.rs
new file mode 100644
index 000000000..3966dc063
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/file-lines-5.rs
@@ -0,0 +1,17 @@
+// rustfmt-file_lines: [{"file":"tests/source/file-lines-5.rs","range":[3,5]}]
+
+struct A {
+ t: i64,
+}
+
+mod foo {
+ fn bar() {
+ // test
+ let i = 12;
+ // test
+ }
+ // aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ fn baz() {
+ let j = 15;
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/file-lines-6.rs b/src/tools/rustfmt/tests/target/file-lines-6.rs
new file mode 100644
index 000000000..8a092df86
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/file-lines-6.rs
@@ -0,0 +1,18 @@
+// rustfmt-file_lines: [{"file":"tests/source/file-lines-6.rs","range":[9,10]}]
+
+struct A {
+ t: i64,
+}
+
+mod foo {
+ fn bar() {
+ // test
+ let i = 12;
+ // test
+ }
+
+ fn baz() {
+///
+ let j = 15;
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/file-lines-7.rs b/src/tools/rustfmt/tests/target/file-lines-7.rs
new file mode 100644
index 000000000..62d913d88
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/file-lines-7.rs
@@ -0,0 +1,21 @@
+// rustfmt-file_lines: [{"file":"tests/source/file-lines-7.rs","range":[8,15]}]
+
+struct A {
+ t: i64,
+}
+
+mod foo {
+ fn bar() {
+ // test
+ let i = 12;
+ // test
+ }
+
+ fn baz() {
+
+
+
+ ///
+ let j = 15;
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/file-lines-item.rs b/src/tools/rustfmt/tests/target/file-lines-item.rs
new file mode 100644
index 000000000..8d39eb609
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/file-lines-item.rs
@@ -0,0 +1,21 @@
+// rustfmt-file_lines: [{"file":"tests/source/file-lines-item.rs","range":[6,8]}]
+
+use foo::{c, b, a};
+use bar;
+
+fn foo() {
+ bar();
+}
+
+impl Drop for Context {
+ fn drop(&mut self) {
+ }
+}
+
+impl Bar for Baz {
+ fn foo() {
+ bar(
+ baz, // Who knows?
+ )
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/fn-args-with-last-line-comment.rs b/src/tools/rustfmt/tests/target/fn-args-with-last-line-comment.rs
new file mode 100644
index 000000000..27e0e0965
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/fn-args-with-last-line-comment.rs
@@ -0,0 +1,24 @@
+// #1587
+pub trait X {
+ fn a(&self) -> &'static str;
+ fn bcd(
+ &self,
+ c: &str, // comment on this arg
+ d: u16, // comment on this arg
+ e: &Vec<String>, // comment on this arg
+ ) -> Box<Q>;
+}
+
+// #1595
+fn foo(
+ arg1: LongTypeName,
+ arg2: LongTypeName,
+ arg3: LongTypeName,
+ arg4: LongTypeName,
+ arg5: LongTypeName,
+ arg6: LongTypeName,
+ arg7: LongTypeName,
+ //arg8: LongTypeName,
+) {
+ // do stuff
+}
diff --git a/src/tools/rustfmt/tests/target/fn-custom-2.rs b/src/tools/rustfmt/tests/target/fn-custom-2.rs
new file mode 100644
index 000000000..0e723396c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/fn-custom-2.rs
@@ -0,0 +1,77 @@
+// Test different indents.
+
+fn foo(
+ a: Aaaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbbbb,
+ c: Ccccccccccccccccc,
+ d: Ddddddddddddddddddddddddd,
+ e: Eeeeeeeeeeeeeeeeeee,
+) {
+ foo();
+}
+
+fn bar<
+ 'a: 'bbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ TTTTTTTTTTTTT,
+ UUUUUUUUUUUUUUUUUUUU: WWWWWWWWWWWWWWWWWWWWWWWW,
+>(
+ a: Aaaaaaaaaaaaaaa,
+) {
+ bar();
+}
+
+fn baz()
+where
+ X: TTTTTTTT,
+{
+ baz();
+}
+
+fn qux()
+where
+ X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT,
+ X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT,
+ X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT,
+ X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT,
+{
+ baz();
+}
+
+impl Foo {
+ fn foo(
+ self,
+ a: Aaaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbbbb,
+ c: Ccccccccccccccccc,
+ d: Ddddddddddddddddddddddddd,
+ e: Eeeeeeeeeeeeeeeeeee,
+ ) {
+ foo();
+ }
+
+ fn bar<
+ 'a: 'bbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ TTTTTTTTTTTTT,
+ UUUUUUUUUUUUUUUUUUUU: WWWWWWWWWWWWWWWWWWWWWWWW,
+ >(
+ a: Aaaaaaaaaaaaaaa,
+ ) {
+ bar();
+ }
+
+ fn baz()
+ where
+ X: TTTTTTTT,
+ {
+ baz();
+ }
+}
+
+struct Foo<
+ TTTTTTTTTTTTTTTTTTTTTTTTTTTT,
+ UUUUUUUUUUUUUUUUUUUUUU,
+ VVVVVVVVVVVVVVVVVVVVVVVVVVV,
+ WWWWWWWWWWWWWWWWWWWWWWWW,
+> {
+ foo: Foo,
+}
diff --git a/src/tools/rustfmt/tests/target/fn-custom-3.rs b/src/tools/rustfmt/tests/target/fn-custom-3.rs
new file mode 100644
index 000000000..bfafe4536
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/fn-custom-3.rs
@@ -0,0 +1,71 @@
+// Test different indents.
+
+fn foo(
+ a: Aaaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbbbb,
+ c: Ccccccccccccccccc,
+ d: Ddddddddddddddddddddddddd,
+ e: Eeeeeeeeeeeeeeeeeee,
+) {
+ foo();
+}
+
+fn bar<
+ 'a: 'bbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ TTTTTTTTTTTTT,
+ UUUUUUUUUUUUUUUUUUUU: WWWWWWWWWWWWWWWWWWWWWWWW,
+>(
+ a: Aaaaaaaaaaaaaaa,
+) {
+ bar();
+}
+
+fn qux()
+where
+ X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT,
+ X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT,
+{
+ baz();
+}
+
+fn qux()
+where
+ X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT,
+ X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT,
+ X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT,
+ X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT,
+{
+ baz();
+}
+
+impl Foo {
+ fn foo(
+ self,
+ a: Aaaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbbbb,
+ c: Ccccccccccccccccc,
+ d: Ddddddddddddddddddddddddd,
+ e: Eeeeeeeeeeeeeeeeeee,
+ ) {
+ foo();
+ }
+
+ fn bar<
+ 'a: 'bbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ TTTTTTTTTTTTT,
+ UUUUUUUUUUUUUUUUUUUU: WWWWWWWWWWWWWWWWWWWWWWWW,
+ >(
+ a: Aaaaaaaaaaaaaaa,
+ ) {
+ bar();
+ }
+}
+
+struct Foo<
+ TTTTTTTTTTTTTTTTTTTTTTTTTTTT,
+ UUUUUUUUUUUUUUUUUUUUUU,
+ VVVVVVVVVVVVVVVVVVVVVVVVVVV,
+ WWWWWWWWWWWWWWWWWWWWWWWW,
+> {
+ foo: Foo,
+}
diff --git a/src/tools/rustfmt/tests/target/fn-custom-4.rs b/src/tools/rustfmt/tests/target/fn-custom-4.rs
new file mode 100644
index 000000000..5de16e251
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/fn-custom-4.rs
@@ -0,0 +1,26 @@
+// Test different indents.
+
+fn qux()
+where
+ X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT,
+ X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT,
+ X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT,
+ X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT,
+{
+ baz();
+}
+
+fn qux()
+where
+ X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT,
+{
+ baz();
+}
+
+fn qux(a: Aaaaaaaaaaaaaaaaa)
+where
+ X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT,
+ X: TTTTTTTTTTTTTTTTTTTTTTTTTTTT,
+{
+ baz();
+}
diff --git a/src/tools/rustfmt/tests/target/fn-custom-6.rs b/src/tools/rustfmt/tests/target/fn-custom-6.rs
new file mode 100644
index 000000000..e891f4d58
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/fn-custom-6.rs
@@ -0,0 +1,71 @@
+// rustfmt-brace_style: PreferSameLine
+// Test different indents.
+
+fn foo(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb) {
+ foo();
+}
+
+fn bar(
+ a: Aaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbb,
+ c: Cccccccccccccccccc,
+ d: Dddddddddddddddd,
+ e: Eeeeeeeeeeeeeee,
+) {
+ bar();
+}
+
+fn foo(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb) -> String {
+ foo();
+}
+
+fn bar(
+ a: Aaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbb,
+ c: Cccccccccccccccccc,
+ d: Dddddddddddddddd,
+ e: Eeeeeeeeeeeeeee,
+) -> String {
+ bar();
+}
+
+fn foo(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb)
+where
+ T: UUUUUUUUUUU, {
+ foo();
+}
+
+fn bar(
+ a: Aaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbb,
+ c: Cccccccccccccccccc,
+ d: Dddddddddddddddd,
+ e: Eeeeeeeeeeeeeee,
+) where
+ T: UUUUUUUUUUU, {
+ bar();
+}
+
+fn foo(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb) -> String
+where
+ T: UUUUUUUUUUU, {
+ foo();
+}
+
+fn bar(
+ a: Aaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbb,
+ c: Cccccccccccccccccc,
+ d: Dddddddddddddddd,
+ e: Eeeeeeeeeeeeeee,
+) -> String
+where
+ T: UUUUUUUUUUU, {
+ bar();
+}
+
+trait Test {
+ fn foo(a: u8) {}
+
+ fn bar(a: u8) -> String {}
+}
diff --git a/src/tools/rustfmt/tests/target/fn-custom-7.rs b/src/tools/rustfmt/tests/target/fn-custom-7.rs
new file mode 100644
index 000000000..2c20ac5a7
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/fn-custom-7.rs
@@ -0,0 +1,36 @@
+// rustfmt-normalize_comments: true
+// rustfmt-fn_args_layout: Vertical
+// rustfmt-brace_style: AlwaysNextLine
+
+// Case with only one variable.
+fn foo(a: u8) -> u8
+{
+ bar()
+}
+
+// Case with 2 variables and some pre-comments.
+fn foo(
+ a: u8, // Comment 1
+ b: u8, // Comment 2
+) -> u8
+{
+ bar()
+}
+
+// Case with 2 variables and some post-comments.
+fn foo(
+ // Comment 1
+ a: u8,
+ // Comment 2
+ b: u8,
+) -> u8
+{
+ bar()
+}
+
+trait Test
+{
+ fn foo(a: u8) {}
+
+ fn bar(a: u8) -> String {}
+}
diff --git a/src/tools/rustfmt/tests/target/fn-custom-8.rs b/src/tools/rustfmt/tests/target/fn-custom-8.rs
new file mode 100644
index 000000000..29af3fca7
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/fn-custom-8.rs
@@ -0,0 +1,77 @@
+// rustfmt-brace_style: PreferSameLine
+// Test different indents.
+
+fn foo(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb) {
+ foo();
+}
+
+fn bar(
+ a: Aaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbb,
+ c: Cccccccccccccccccc,
+ d: Dddddddddddddddd,
+ e: Eeeeeeeeeeeeeee,
+) {
+ bar();
+}
+
+fn foo(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb) -> String {
+ foo();
+}
+
+fn bar(
+ a: Aaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbb,
+ c: Cccccccccccccccccc,
+ d: Dddddddddddddddd,
+ e: Eeeeeeeeeeeeeee,
+) -> String {
+ bar();
+}
+
+fn foo(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb)
+where
+ T: UUUUUUUUUUU, {
+ foo();
+}
+
+fn bar(
+ a: Aaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbb,
+ c: Cccccccccccccccccc,
+ d: Dddddddddddddddd,
+ e: Eeeeeeeeeeeeeee,
+) where
+ T: UUUUUUUUUUU, {
+ bar();
+}
+
+fn foo(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb) -> String
+where
+ T: UUUUUUUUUUU, {
+ foo();
+}
+
+fn bar(
+ a: Aaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbb,
+ c: Cccccccccccccccccc,
+ d: Dddddddddddddddd,
+ e: Eeeeeeeeeeeeeee,
+) -> String
+where
+ T: UUUUUUUUUUU, {
+ bar();
+}
+
+trait Test {
+ fn foo(a: u8) {}
+
+ fn bar(a: u8) -> String {}
+
+ fn bar(a: u8) -> String
+ where
+ Foo: foooo,
+ Bar: barrr, {
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/fn-custom.rs b/src/tools/rustfmt/tests/target/fn-custom.rs
new file mode 100644
index 000000000..2eb2a973d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/fn-custom.rs
@@ -0,0 +1,19 @@
+// rustfmt-fn_args_layout: Compressed
+// Test some of the ways function signatures can be customised.
+
+// Test compressed layout of args.
+fn foo(
+ a: Aaaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbbbb, c: Ccccccccccccccccc, d: Ddddddddddddddddddddddddd,
+ e: Eeeeeeeeeeeeeeeeeee,
+) {
+ foo();
+}
+
+impl Foo {
+ fn foo(
+ self, a: Aaaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbbbb, c: Ccccccccccccccccc,
+ d: Ddddddddddddddddddddddddd, e: Eeeeeeeeeeeeeeeeeee,
+ ) {
+ foo();
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/fn-param-attributes.rs b/src/tools/rustfmt/tests/target/fn-param-attributes.rs
new file mode 100644
index 000000000..829575518
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/fn-param-attributes.rs
@@ -0,0 +1,64 @@
+// https://github.com/rust-lang/rustfmt/issues/3623
+
+fn foo(#[cfg(something)] x: i32, y: i32) -> i32 {
+ x + y
+}
+
+fn foo_b(#[cfg(something)] x: i32, y: i32) -> i32 {
+ x + y
+}
+
+fn add(
+ #[cfg(something)]
+ #[deny(C)]
+ x: i32,
+ y: i32,
+) -> i32 {
+ x + y
+}
+
+struct NamedSelfRefStruct {}
+impl NamedSelfRefStruct {
+ fn foo(#[cfg(something)] self: &Self) {}
+}
+
+struct MutStruct {}
+impl MutStruct {
+ fn foo(#[cfg(foo)] &mut self, #[deny(C)] b: i32) {}
+}
+
+fn main() {
+ let c = |#[allow(C)] a: u32,
+ #[cfg(something)] b: i32,
+ #[cfg_attr(something, cfg(nothing))]
+ #[deny(C)]
+ c: i32| {};
+ let _ = c(1, 2);
+}
+
+pub fn bar(
+ /// bar
+ #[test]
+ a: u32,
+ /// Bar
+ #[must_use]
+ /// Baz
+ #[no_mangle]
+ b: i32,
+) {
+}
+
+fn abc(
+ #[foo]
+ #[bar]
+ param: u32,
+) {
+ // ...
+}
+
+fn really_really_really_loooooooooooooooooooong(
+ #[cfg(some_even_longer_config_feature_that_keeps_going_and_going_and_going_forever_and_ever_and_ever_on_and_on)]
+ b: i32,
+) {
+ // ...
+}
diff --git a/src/tools/rustfmt/tests/target/fn-simple.rs b/src/tools/rustfmt/tests/target/fn-simple.rs
new file mode 100644
index 000000000..e72526936
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/fn-simple.rs
@@ -0,0 +1,120 @@
+// rustfmt-normalize_comments: true
+
+fn simple(
+ // pre-comment on a function!?
+ i: i32, // yes, it's possible!
+ response: NoWay, // hose
+) {
+ fn op(
+ x: Typ,
+ key: &[u8],
+ upd: Box<
+ Fn(
+ Option<&memcache::Item>,
+ ) -> (memcache::Status, Result<memcache::Item, Option<String>>),
+ >,
+ ) -> MapResult {
+ }
+
+ "cool"
+}
+
+fn weird_comment(
+ // /*/ double level */ comment
+ x: Hello, // /*/* triple, even */*/
+ // Does this work?
+ y: World,
+) {
+ simple(/* does this preserve comments now? */ 42, NoWay)
+}
+
+fn generic<T>(arg: T) -> &SomeType
+where
+ T: Fn(
+ // First arg
+ A,
+ // Second argument
+ B,
+ C,
+ D,
+ // pre comment
+ E, // last comment
+ ) -> &SomeType,
+{
+ arg(a, b, c, d, e)
+}
+
+fn foo() -> ! {}
+
+pub fn http_fetch_async(
+ listener: Box<AsyncCORSResponseListener + Send>,
+ script_chan: Box<ScriptChan + Send>,
+) {
+}
+
+fn some_func<T: Box<Trait + Bound>>(val: T) {}
+
+fn zzzzzzzzzzzzzzzzzzzz<Type, NodeType>(
+ selff: Type,
+ mut handle: node::Handle<IdRef<'id, Node<K, V>>, Type, NodeType>,
+) -> SearchStack<'a, K, V, Type, NodeType> {
+}
+
+unsafe fn generic_call(
+ cx: *mut JSContext,
+ argc: libc::c_uint,
+ vp: *mut JSVal,
+ is_lenient: bool,
+ call: unsafe extern "C" fn(
+ *const JSJitInfo,
+ *mut JSContext,
+ HandleObject,
+ *mut libc::c_void,
+ u32,
+ *mut JSVal,
+ ) -> u8,
+) {
+ let f: fn(_, _) -> _ = panic!();
+}
+
+pub fn start_export_thread<C: CryptoSchemee + 'static>(
+ database: &Database,
+ crypto_scheme: &C,
+ block_size: usize,
+ source_path: &Path,
+) -> BonzoResult<mpsc::Consumer<'static, FileInstruction>> {
+}
+
+pub fn waltz(cwd: &Path) -> CliAssert {
+ {
+ {
+ formatted_comment =
+ rewrite_comment(comment, block_style, width, offset, formatting_fig);
+ }
+ }
+}
+
+// #2003
+mod foo {
+ fn __bindgen_test_layout_i_open0_c_open1_char_a_open2_char_close2_close1_close0_instantiation()
+ {
+ foo();
+ }
+}
+
+// #2082
+pub(crate) fn init() {}
+
+pub(crate) fn init() {}
+
+// #2630
+fn make_map<T, F: (Fn(&T) -> String)>(records: &Vec<T>, key_fn: F) -> HashMap<String, usize> {}
+
+// #2956
+fn bar(
+ beans: Asdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdf,
+ spam: bool,
+ eggs: bool,
+) -> bool {
+ unimplemented!();
+}
diff --git a/src/tools/rustfmt/tests/target/fn-single-line/version_one.rs b/src/tools/rustfmt/tests/target/fn-single-line/version_one.rs
new file mode 100644
index 000000000..013b2cd72
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/fn-single-line/version_one.rs
@@ -0,0 +1,71 @@
+// rustfmt-fn_single_line: true
+// rustfmt-version: One
+// Test single-line functions.
+
+fn foo_expr() { 1 }
+
+fn foo_stmt() { foo(); }
+
+fn foo_decl_local() { let z = 5; }
+
+fn foo_decl_item(x: &mut i32) { x = 3; }
+
+fn empty() {}
+
+fn foo_return() -> String { "yay" }
+
+fn foo_where() -> T
+where
+ T: Sync,
+{
+ let x = 2;
+}
+
+fn fooblock() {
+ {
+ "inner-block"
+ }
+}
+
+fn fooblock2(x: i32) {
+ let z = match x {
+ _ => 2,
+ };
+}
+
+fn comment() {
+ // this is a test comment
+ 1
+}
+
+fn comment2() {
+ // multi-line comment
+ let z = 2;
+ 1
+}
+
+fn only_comment() {
+ // Keep this here
+}
+
+fn aaaaaaaaaaaaaaaaa_looooooooooooooooooooooong_name() {
+ let z = "aaaaaaawwwwwwwwwwwwwwwwwwwwwwwwwwww";
+}
+
+fn lots_of_space() { 1 }
+
+fn mac() -> Vec<i32> { vec![] }
+
+trait CoolTypes {
+ fn dummy(&self) {}
+}
+
+trait CoolerTypes {
+ fn dummy(&self) {}
+}
+
+fn Foo<T>()
+where
+ T: Bar,
+{
+}
diff --git a/src/tools/rustfmt/tests/target/fn-single-line/version_two.rs b/src/tools/rustfmt/tests/target/fn-single-line/version_two.rs
new file mode 100644
index 000000000..b8053d4c2
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/fn-single-line/version_two.rs
@@ -0,0 +1,67 @@
+// rustfmt-fn_single_line: true
+// rustfmt-version: Two
+// Test single-line functions.
+
+fn foo_expr() { 1 }
+
+fn foo_stmt() { foo(); }
+
+fn foo_decl_local() { let z = 5; }
+
+fn foo_decl_item(x: &mut i32) { x = 3; }
+
+fn empty() {}
+
+fn foo_return() -> String { "yay" }
+
+fn foo_where() -> T
+where
+ T: Sync,
+{
+ let x = 2;
+}
+
+fn fooblock() { { "inner-block" } }
+
+fn fooblock2(x: i32) {
+ let z = match x {
+ _ => 2,
+ };
+}
+
+fn comment() {
+ // this is a test comment
+ 1
+}
+
+fn comment2() {
+ // multi-line comment
+ let z = 2;
+ 1
+}
+
+fn only_comment() {
+ // Keep this here
+}
+
+fn aaaaaaaaaaaaaaaaa_looooooooooooooooooooooong_name() {
+ let z = "aaaaaaawwwwwwwwwwwwwwwwwwwwwwwwwwww";
+}
+
+fn lots_of_space() { 1 }
+
+fn mac() -> Vec<i32> { vec![] }
+
+trait CoolTypes {
+ fn dummy(&self) {}
+}
+
+trait CoolerTypes {
+ fn dummy(&self) {}
+}
+
+fn Foo<T>()
+where
+ T: Bar,
+{
+}
diff --git a/src/tools/rustfmt/tests/target/fn-ty.rs b/src/tools/rustfmt/tests/target/fn-ty.rs
new file mode 100644
index 000000000..7d48f3b32
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/fn-ty.rs
@@ -0,0 +1,14 @@
+fn f(
+ xxxxxxxxxxxxxxxxxx: fn(a, b, b) -> a,
+ xxxxxxxxxxxxxxxxxx: fn() -> a,
+ xxxxxxxxxxxxxxxxxx: fn(a, b, b),
+ xxxxxxxxxxxxxxxxxx: fn(),
+ xxxxxxxxxxxxxxxxxx: fn(a, b, b) -> !,
+ xxxxxxxxxxxxxxxxxx: fn() -> !,
+) where
+ F1: Fn(a, b, b) -> a,
+ F2: Fn(a, b, b),
+ F3: Fn(),
+ F4: Fn() -> u32,
+{
+}
diff --git a/src/tools/rustfmt/tests/target/fn.rs b/src/tools/rustfmt/tests/target/fn.rs
new file mode 100644
index 000000000..0ad775ee1
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/fn.rs
@@ -0,0 +1,120 @@
+// Tests different fns
+
+fn foo(a: AAAA, b: BBB, c: CCC) -> RetType {}
+
+fn foo(a: AAAA, b: BBB /* some, weird, inline comment */, c: CCC) -> RetType
+where
+ T: Blah,
+{
+}
+
+fn foo(a: AAA /* (comment) */)
+where
+ T: Blah,
+{
+}
+
+fn foo(
+ a: AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+ b: BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB,
+) -> RetType
+where
+ T: Blah,
+{
+}
+
+fn foo<U, T>(
+ a: AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+ b: BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB,
+) -> RetType
+where
+ T: Blah,
+ U: dsfasdfasdfasd,
+{
+}
+
+fn foo<U: Fn(A) -> B /* paren inside generics */>() {}
+
+impl Foo {
+ fn with_no_errors<T, F>(&mut self, f: F) -> T
+ where
+ F: FnOnce(&mut Resolver) -> T,
+ {
+ }
+
+ fn foo(mut self, mut bar: u32) {}
+
+ fn bar(self, mut bazz: u32) {}
+}
+
+pub fn render<
+ 'a,
+ N: Clone + 'a,
+ E: Clone + 'a,
+ G: Labeller<'a, N, E> + GraphWalk<'a, N, E>,
+ W: Write,
+>(
+ g: &'a G,
+ w: &mut W,
+) -> io::Result<()> {
+ render_opts(g, w, &[])
+}
+
+const fn foo() {
+ x;
+}
+
+pub const fn foo() {
+ x;
+}
+
+impl Foo {
+ const fn foo() {
+ x;
+ }
+}
+
+fn homura<T: Deref<Target = i32>>(_: T) {}
+
+fn issue377() -> (Box<CompositorProxy + Send>, Box<CompositorReceiver>) {}
+
+fn main() {
+ let _ = function(move || 5);
+ let _ = move || 42;
+ let _ = || unsafe { abort() };
+}
+
+// With inner attributes.
+fn inner() {
+ #![inline]
+ x
+}
+
+#[cfg_attr(rustfmt, rustfmt::skip)]
+fn foo(a: i32) -> i32 {
+ // comment
+ if a > 0 { 1 } else { 2 }
+}
+
+fn ______________________baz(
+ a: i32,
+) -> *mut ::std::option::Option<
+ extern "C" fn(arg1: i32, _____________________a: i32, arg3: i32) -> (),
+> {
+}
+
+pub fn check_path<'a, 'tcx>(
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ path: &hir::Path,
+ id: ast::NodeId,
+ cb: &mut FnMut(DefId, Span, &Option<&Stability>, &Option<Depecation>),
+) {
+}
+
+pub fn check_path<'a, 'tcx>(
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ path: &hir::Path,
+ id: ast::NodeId,
+ cb: &mut FnMut(DefId, Span, &Option<&Stability>, &Option<Deprecation>),
+) {
+}
diff --git a/src/tools/rustfmt/tests/target/fn_args_indent-block.rs b/src/tools/rustfmt/tests/target/fn_args_indent-block.rs
new file mode 100644
index 000000000..f5232a488
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/fn_args_indent-block.rs
@@ -0,0 +1,143 @@
+// rustfmt-normalize_comments: true
+
+fn foo() {
+ foo();
+}
+
+fn foo(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb) {
+ foo();
+}
+
+fn bar(
+ a: Aaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbb,
+ c: Cccccccccccccccccc,
+ d: Dddddddddddddddd,
+ e: Eeeeeeeeeeeeeee,
+) {
+ bar();
+}
+
+fn foo(a: Aaaaaaaaaaaaaa, b: Bbbbbbbbbbbbbb) -> String {
+ foo();
+}
+
+fn bar(
+ a: Aaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbb,
+ c: Cccccccccccccccccc,
+ d: Dddddddddddddddd,
+ e: Eeeeeeeeeeeeeee,
+) -> String {
+ bar();
+}
+
+fn foo(a: u8 /* Comment 1 */, b: u8 /* Comment 2 */) -> u8 {
+ bar()
+}
+
+fn foo(
+ a: u8, // Comment 1
+ b: Bbbbbbbbbbbbbb,
+ c: Cccccccccccccccccc,
+ d: Dddddddddddddddd,
+ e: Eeeeeeeeeeeeeee, // Comment 2
+) -> u8 {
+ bar()
+}
+
+fn bar(
+ a: Aaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbb,
+ c: Cccccccccccccccccc,
+ d: Dddddddddddddddd,
+ e: Eeeeeeeeeeeeeee,
+) -> String
+where
+ X: Fooooo,
+ Y: Baaar,
+{
+ bar();
+}
+
+fn foo() -> T {
+ foo();
+}
+
+fn foo() -> T
+where
+ X: Foooo,
+ Y: Baaar,
+{
+ foo();
+}
+
+fn foo()
+where
+ X: Foooo,
+{
+}
+
+fn foo()
+where
+ X: Foooo,
+ Y: Baaar,
+{
+}
+
+fn foo() -> (
+ Loooooooooooooooooooooong,
+ Reeeeeeeeeeeeeeeeeeeeeeeeturn,
+ iiiiiiiiis,
+ Looooooooooooooooong,
+) {
+ foo();
+}
+
+fn foo<g: G>() {
+ foo();
+}
+
+fn foo<
+ L: Loooooooooooooooooooooong,
+ G: Geeeeeeeeeeeneric,
+ I: iiiiiiiiis,
+ L: Looooooooooooooooong,
+>() {
+ foo();
+}
+
+fn foo<L: Loooooooooooooooooooong, G: Geeeeeeeeeeneric, I: iiiiiiiiis, L: Loooooooooooooooong>() {
+ foo();
+}
+
+trait Test {
+ fn foo(a: u8) {}
+
+ fn bar(
+ a: Aaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbb,
+ c: Cccccccccccccccccc,
+ d: Dddddddddddddddd,
+ e: Eeeeeeeeeeeeeee,
+ ) -> String {
+ }
+}
+
+fn foo<L: Loooooooooooooooooooong, G: Geeeeeeeeeeneric, I: iiiiiiiiis, L: Loooooooooooooooong>(
+ a: Aaaaaaaaaaaaaaaaaaaa,
+ b: Bbbbbbbbbbbbbbbbb,
+ c: Cccccccccccccccccc,
+ d: Dddddddddddddddd,
+) {
+ foo();
+}
+
+fn foo() -> (
+ Looooooooooooooooooooooooooong,
+ Reeeeeeeeeeeeeeeeeeeeeeeeeeeeeturn,
+ iiiiiiiiiiiiiis,
+ Loooooooooooooooooooooong,
+) {
+ foo();
+}
diff --git a/src/tools/rustfmt/tests/target/fn_args_layout-vertical.rs b/src/tools/rustfmt/tests/target/fn_args_layout-vertical.rs
new file mode 100644
index 000000000..da0ac981d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/fn_args_layout-vertical.rs
@@ -0,0 +1,39 @@
+// rustfmt-fn_args_layout: Vertical
+
+// Empty list should stay on one line.
+fn do_bar() -> u8 {
+ bar()
+}
+
+// A single argument should stay on the same line.
+fn do_bar(a: u8) -> u8 {
+ bar()
+}
+
+// Multiple arguments should each get their own line.
+fn do_bar(
+ a: u8,
+ mut b: u8,
+ c: &u8,
+ d: &mut u8,
+ closure: &Fn(i32) -> i32,
+) -> i32 {
+ // This feature should not affect closures.
+ let bar = |x: i32, y: i32| -> i32 { x + y };
+ bar(a, b)
+}
+
+// If the first argument doesn't fit on the same line with the function name,
+// the whole list should probably be pushed to the next line with hanging
+// indent. That's not what happens though, so check current behaviour instead.
+// In any case, it should maintain single argument per line.
+fn do_this_that_and_the_other_thing(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa: u8,
+ b: u8,
+ c: u8,
+ d: u8,
+) {
+ this();
+ that();
+ the_other_thing();
+}
diff --git a/src/tools/rustfmt/tests/target/fn_once.rs b/src/tools/rustfmt/tests/target/fn_once.rs
new file mode 100644
index 000000000..42b8f98e7
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/fn_once.rs
@@ -0,0 +1,8 @@
+struct Add(usize);
+
+impl FnOnce<(usize,)> for Add {
+ type Output = Add;
+ extern "rust-call" fn call_once(self, to: (usize,)) -> Add {
+ Add(self.0 + to.0)
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/format_strings/issue-202.rs b/src/tools/rustfmt/tests/target/format_strings/issue-202.rs
new file mode 100644
index 000000000..2a2c24140
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/format_strings/issue-202.rs
@@ -0,0 +1,25 @@
+// rustfmt-format_strings: true
+
+#[test]
+fn compile_empty_program() {
+ let result = get_result();
+ let expected = "; ModuleID = \'foo\'
+
+; Function Attrs: nounwind
+declare void @llvm.memset.p0i8.i32(i8* nocapture, i8, i32, i32, i1) #0
+
+declare i32 @write(i32, i8*, i32)
+
+declare i32 @putchar(i32)
+
+declare i32 @getchar()
+
+define i32 @main() {
+entry:
+ ret i32 0
+}
+
+attributes #0 = { nounwind }
+";
+ assert_eq!(result, CString::new(expected).unwrap());
+}
diff --git a/src/tools/rustfmt/tests/target/format_strings/issue-2833.rs b/src/tools/rustfmt/tests/target/format_strings/issue-2833.rs
new file mode 100644
index 000000000..704835325
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/format_strings/issue-2833.rs
@@ -0,0 +1,15 @@
+// rustfmt-format_strings: true
+// rustfmt-max_width: 80
+
+fn test1() {
+ let expected = "\
+but Doctor Watson has to have it taken out for him and dusted,
+";
+}
+
+fn test2() {
+ let expected = "\
+[Omitted long matching line]
+but Doctor Watson has to have it taken out for him and dusted,
+";
+}
diff --git a/src/tools/rustfmt/tests/target/format_strings/issue-3263.rs b/src/tools/rustfmt/tests/target/format_strings/issue-3263.rs
new file mode 100644
index 000000000..72f7e9cc6
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/format_strings/issue-3263.rs
@@ -0,0 +1,26 @@
+// rustfmt-format_strings: true
+// rustfmt-newline_style: Windows
+
+#[test]
+fn compile_empty_program() {
+ let result = get_result();
+ let expected = "; ModuleID = \'foo\'
+
+; Function Attrs: nounwind
+declare void @llvm.memset.p0i8.i32(i8* nocapture, i8, i32, i32, i1) #0
+
+declare i32 @write(i32, i8*, i32)
+
+declare i32 @putchar(i32)
+
+declare i32 @getchar()
+
+define i32 @main() {
+entry:
+ ret i32 0
+}
+
+attributes #0 = { nounwind }
+";
+ assert_eq!(result, CString::new(expected).unwrap());
+}
diff --git a/src/tools/rustfmt/tests/target/format_strings/issue-687.rs b/src/tools/rustfmt/tests/target/format_strings/issue-687.rs
new file mode 100644
index 000000000..21d292f9e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/format_strings/issue-687.rs
@@ -0,0 +1,10 @@
+// rustfmt-format_strings: true
+
+fn foo() -> &'static str {
+ let sql = "ATTACH DATABASE ':memory:' AS my_attached;
+ BEGIN;
+ CREATE TABLE my_attached.foo(x INTEGER);
+ INSERT INTO my_attached.foo VALUES(42);
+ END;";
+ sql
+}
diff --git a/src/tools/rustfmt/tests/target/format_strings/issue564.rs b/src/tools/rustfmt/tests/target/format_strings/issue564.rs
new file mode 100644
index 000000000..d9ef077c2
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/format_strings/issue564.rs
@@ -0,0 +1,7 @@
+// rustfmt-format_strings: true
+
+const USAGE: &'static str = "
+Usage: codegen project <name> <digits> <len> <codes> <prizes> <step> <shift>
+ codegen regenerate <name>
+ codegen verify <name> <code>
+";
diff --git a/src/tools/rustfmt/tests/target/hard-tabs.rs b/src/tools/rustfmt/tests/target/hard-tabs.rs
new file mode 100644
index 000000000..aca7e09c0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/hard-tabs.rs
@@ -0,0 +1,98 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+// rustfmt-hard_tabs: true
+
+fn main() {
+ let x = Bar;
+
+ let y = Foo { a: x };
+
+ Foo {
+ a: foo(), // comment
+ // comment
+ b: bar(),
+ ..something
+ };
+
+ fn foo(a: i32, a: i32, a: i32, a: i32, a: i32, a: i32, a: i32, a: i32, a: i32, a: i32, a: i32) {
+ }
+
+ let str = "AAAAAAAAAAAAAAaAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAaAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAaAa";
+
+ if let (
+ some_very_large,
+ tuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuple,
+ ) = 1 + 2 + 3
+ {}
+
+ if cond() {
+ something();
+ } else if different_cond() {
+ something_else();
+ } else {
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ }
+
+ unsafe /* very looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong
+ * comment */ {
+ }
+
+ unsafe /* So this is a very long comment.
+ * Multi-line, too.
+ * Will it still format correctly? */ {
+ }
+
+ let chain = funktion_kall()
+ .go_to_next_line_with_tab()
+ .go_to_next_line_with_tab()
+ .go_to_next_line_with_tab();
+
+ let z = [
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,
+ yyyyyyyyyyyyyyyyyyyyyyyyyyy,
+ zzzzzzzzzzzzzzzzzz,
+ q,
+ ];
+
+ fn generic<T>(arg: T) -> &SomeType
+ where
+ T: Fn(
+ // First arg
+ A,
+ // Second argument
+ B,
+ C,
+ D,
+ // pre comment
+ E, // last comment
+ ) -> &SomeType,
+ {
+ arg(a, b, c, d, e)
+ }
+
+ loong_func().quux(move || if true { 1 } else { 2 });
+
+ fffffffffffffffffffffffffffffffffff(a, {
+ SCRIPT_TASK_ROOT.with(|root| {
+ *root.borrow_mut() = Some(&script_task);
+ });
+ });
+ a.b.c.d();
+
+ x().y(|| match cond() {
+ true => (),
+ false => (),
+ });
+}
+
+// #2296
+impl Foo {
+ // a comment
+ // on multiple lines
+ fn foo() {
+ // another comment
+ // on multiple lines
+ let x = true;
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/hello.rs b/src/tools/rustfmt/tests/target/hello.rs
new file mode 100644
index 000000000..d9f90b0b5
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/hello.rs
@@ -0,0 +1,8 @@
+// rustfmt-config: small_tabs.toml
+// rustfmt-target: hello.rs
+
+// Smoke test - hello world.
+
+fn main() {
+ println!("Hello world!");
+}
diff --git a/src/tools/rustfmt/tests/target/hex_literal_lower.rs b/src/tools/rustfmt/tests/target/hex_literal_lower.rs
new file mode 100644
index 000000000..5c27fded1
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/hex_literal_lower.rs
@@ -0,0 +1,5 @@
+// rustfmt-hex_literal_case: Lower
+fn main() {
+ let h1 = 0xcafe_5ea7;
+ let h2 = 0xcafe_f00du32;
+}
diff --git a/src/tools/rustfmt/tests/target/hex_literal_preserve.rs b/src/tools/rustfmt/tests/target/hex_literal_preserve.rs
new file mode 100644
index 000000000..e8774d0bb
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/hex_literal_preserve.rs
@@ -0,0 +1,5 @@
+// rustfmt-hex_literal_case: Preserve
+fn main() {
+ let h1 = 0xcAfE_5Ea7;
+ let h2 = 0xCaFe_F00du32;
+}
diff --git a/src/tools/rustfmt/tests/target/hex_literal_upper.rs b/src/tools/rustfmt/tests/target/hex_literal_upper.rs
new file mode 100644
index 000000000..48bb93d2c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/hex_literal_upper.rs
@@ -0,0 +1,5 @@
+// rustfmt-hex_literal_case: Upper
+fn main() {
+ let h1 = 0xCAFE_5EA7;
+ let h2 = 0xCAFE_F00Du32;
+}
diff --git a/src/tools/rustfmt/tests/target/if_while_or_patterns.rs b/src/tools/rustfmt/tests/target/if_while_or_patterns.rs
new file mode 100644
index 000000000..61a357afc
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/if_while_or_patterns.rs
@@ -0,0 +1,38 @@
+#![feature(if_while_or_patterns)]
+
+fn main() {
+ if let 0 | 1 = 0 {
+ println!("hello, world");
+ };
+
+ if let aaaaaaaaaaaaaaaaaaaaaaaaaa | bbbbbbbbbbbbbbbbbbbbbbbbbbb | cccccccccccccccc | d_100 = 0 {
+ println!("hello, world");
+ }
+
+ if let aaaaaaaaaaaaaaaaaaaaaaaaaa | bbbbbbbbbbbbbbbbbbbbbbb | ccccccccccccccccccccc | d_101 = 0
+ {
+ println!("hello, world");
+ }
+
+ if let aaaaaaaaaaaaaaaaaaaaaaaaaaaa | bbbbbbbbbbbbbbbbbbbbbbb | ccccccccccccccccccccc | d_103 =
+ 0
+ {
+ println!("hello, world");
+ }
+
+ if let aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ | bbbbbbbbbbbbbbbbbbbbbbb
+ | ccccccccccccccccccccc
+ | d_105 = 0
+ {
+ println!("hello, world");
+ }
+
+ while let xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx
+ | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx | xxx = foo_bar(
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ cccccccccccccccccccccccccccccccccccccccc,
+ ) {
+ println!("hello, world");
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/immovable_generators.rs b/src/tools/rustfmt/tests/target/immovable_generators.rs
new file mode 100644
index 000000000..0bf7a2d91
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/immovable_generators.rs
@@ -0,0 +1,7 @@
+#![feature(generators)]
+
+unsafe fn foo() {
+ let mut ga = static || {
+ yield 1;
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/impl.rs b/src/tools/rustfmt/tests/target/impl.rs
new file mode 100644
index 000000000..f37fbcf1f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/impl.rs
@@ -0,0 +1,43 @@
+// Test impls
+
+impl<T> JSTraceable for SmallVec<[T; 1]> {}
+
+impl<K, V, NodeRef: Deref<Target = Node<K, V>>> Handle<NodeRef, handle::Edge, handle::Internal> {
+ // Keep this.
+}
+
+impl<V> Test<V>
+where
+ V: Clone, // This comment is NOT removed by formatting!
+{
+ pub fn new(value: V) -> Self {
+ Test {
+ cloned_value: value.clone(),
+ value,
+ }
+ }
+}
+
+impl X<T> /* comment */ {}
+impl Y<T> // comment
+{
+}
+
+impl<T> Foo for T
+// comment1
+where
+ // comment2
+ // blah
+ T: Clone,
+{
+}
+
+// #1823
+default impl Trait for X {}
+default unsafe impl Trait for Y {}
+pub default unsafe impl Trait for Z {}
+
+// #2212
+impl ConstWithDefault {
+ default const CAN_RECONSTRUCT_QUERY_KEY: bool = false;
+}
diff --git a/src/tools/rustfmt/tests/target/impls.rs b/src/tools/rustfmt/tests/target/impls.rs
new file mode 100644
index 000000000..99e02990e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/impls.rs
@@ -0,0 +1,252 @@
+// rustfmt-normalize_comments: true
+impl Foo for Bar {
+ fn foo() {
+ "hi"
+ }
+}
+
+pub impl Foo for Bar {
+ // Associated Constants
+ const Baz: i32 = 16;
+ // Associated Types
+ type FooBar = usize;
+ // Comment 1
+ fn foo() {
+ "hi"
+ }
+ // Comment 2
+ fn foo() {
+ "hi"
+ }
+ // Comment 3
+}
+
+#[inherent]
+impl Visible for Bar {
+ pub const C: i32;
+ pub type T;
+ pub fn f();
+ pub fn g() {}
+}
+
+pub unsafe impl<'a, 'b, X, Y: Foo<Bar>> !Foo<'a, X> for Bar<'b, Y>
+where
+ X: Foo<'a, Z>,
+{
+ fn foo() {
+ "hi"
+ }
+}
+
+impl<'a, 'b, X, Y: Foo<Bar>> Foo<'a, X> for Bar<'b, Y>
+where
+ X: Fooooooooooooooooooooooooooooo<'a, Z>,
+{
+ fn foo() {
+ "hi"
+ }
+}
+
+impl<'a, 'b, X, Y: Foo<Bar>> Foo<'a, X> for Bar<'b, Y>
+where
+ X: Foooooooooooooooooooooooooooo<'a, Z>,
+{
+ fn foo() {
+ "hi"
+ }
+}
+
+impl<T> Foo for Bar<T> where T: Baz {}
+
+impl<T> Foo for Bar<T>
+where
+ T: Baz,
+{
+ // Comment
+}
+
+impl Foo {
+ fn foo() {}
+}
+
+impl Boo {
+ // BOO
+ fn boo() {}
+ // FOO
+}
+
+mod a {
+ impl Foo {
+ // Hello!
+ fn foo() {}
+ }
+}
+
+mod b {
+ mod a {
+ impl Foo {
+ fn foo() {}
+ }
+ }
+}
+
+impl Foo {
+ add_fun!();
+}
+
+impl Blah {
+ fn boop() {}
+ add_fun!();
+}
+
+impl X {
+ fn do_parse(mut self: X) {}
+}
+
+impl Y5000 {
+ fn bar(self: X<'a, 'b>, y: Y) {}
+
+ fn bad(&self, (x, y): CoorT) {}
+
+ fn turbo_bad(self: X<'a, 'b>, (x, y): CoorT) {}
+}
+
+pub impl<T> Foo for Bar<T>
+where
+ T: Foo,
+{
+ fn foo() {
+ "hi"
+ }
+}
+
+pub impl<T, Z> Foo for Bar<T, Z>
+where
+ T: Foo,
+ Z: Baz,
+{
+}
+
+mod m {
+ impl<T> PartialEq for S<T>
+ where
+ T: PartialEq,
+ {
+ fn eq(&self, other: &Self) {
+ true
+ }
+ }
+
+ impl<T> PartialEq for S<T> where T: PartialEq {}
+}
+
+impl<BorrowType, K, V, NodeType, HandleType>
+ Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
+{
+}
+
+impl<BorrowType, K, V, NodeType, HandleType> PartialEq
+ for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
+{
+}
+
+mod x {
+ impl<A, B, C, D> Foo
+ where
+ A: 'static,
+ B: 'static,
+ C: 'static,
+ D: 'static,
+ {
+ }
+}
+
+impl<ConcreteThreadSafeLayoutNode: ThreadSafeLayoutNodeFoo>
+ Issue1249<ConcreteThreadSafeLayoutNode>
+{
+ // Creates a new flow constructor.
+ fn foo() {}
+}
+
+// #1600
+impl<#[may_dangle] K, #[may_dangle] V> Drop for RawTable<K, V> {
+ fn drop() {}
+}
+
+// #1168
+pub trait Number:
+ Copy
+ + Eq
+ + Not<Output = Self>
+ + Shl<u8, Output = Self>
+ + Shr<u8, Output = Self>
+ + BitAnd<Self, Output = Self>
+ + BitOr<Self, Output = Self>
+ + BitAndAssign
+ + BitOrAssign
+{
+ // test
+ fn zero() -> Self;
+}
+
+// #1642
+pub trait SomeTrait:
+ Clone
+ + Eq
+ + PartialEq
+ + Ord
+ + PartialOrd
+ + Default
+ + Hash
+ + Debug
+ + Display
+ + Write
+ + Read
+ + FromStr
+{
+ // comment
+}
+
+// #1995
+impl Foo {
+ fn f(
+ S {
+ aaaaaaaaaa: aaaaaaaaaa,
+ bbbbbbbbbb: bbbbbbbbbb,
+ cccccccccc: cccccccccc,
+ }: S,
+ ) -> u32 {
+ 1
+ }
+}
+
+// #2491
+impl<'a, 'b, 'c> SomeThing<Something>
+ for (
+ &'a mut SomethingLong,
+ &'b mut SomethingLong,
+ &'c mut SomethingLong,
+ )
+{
+ fn foo() {}
+}
+
+// #2746
+impl<'seq1, 'seq2, 'body, 'scope, Channel>
+ Adc12<
+ Dual,
+ MasterRunningDma<'seq1, 'body, 'scope, Channel>,
+ SlaveRunningDma<'seq2, 'body, 'scope>,
+ >
+where
+ Channel: DmaChannel,
+{
+}
+
+// #4084
+impl const std::default::Default for Struct {
+ #[inline]
+ fn default() -> Self {
+ Self { f: 12.5 }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/imports/import-fencepost-length.rs b/src/tools/rustfmt/tests/target/imports/import-fencepost-length.rs
new file mode 100644
index 000000000..fd09d50d7
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/imports/import-fencepost-length.rs
@@ -0,0 +1,7 @@
+use aaaaaaaaaaaaaaa::bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb;
+use aaaaaaaaaaaaaaa::{
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb, ccccccccccccccccccccccccccccccc, dddddddd,
+};
+use aaaaaaaaaaaaaaa::{
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb, ccccccccccccccccccccccccccccccc, ddddddddd,
+};
diff --git a/src/tools/rustfmt/tests/target/imports/imports-impl-only-use.rs b/src/tools/rustfmt/tests/target/imports/imports-impl-only-use.rs
new file mode 100644
index 000000000..d290d8d91
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/imports/imports-impl-only-use.rs
@@ -0,0 +1,4 @@
+#![feature(underscore_imports)]
+
+use attr;
+use std::iter::Iterator as _;
diff --git a/src/tools/rustfmt/tests/target/imports/imports-reorder-lines-and-items.rs b/src/tools/rustfmt/tests/target/imports/imports-reorder-lines-and-items.rs
new file mode 100644
index 000000000..98a5afe43
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/imports/imports-reorder-lines-and-items.rs
@@ -0,0 +1,7 @@
+use std::cmp::{a, b, c, d};
+use std::ddd::aaa;
+use std::ddd::{a, b, c as g, d as p};
+/// This comment should stay with `use std::str;`
+use std::str;
+// This comment should stay with `use std::ddd:bbb;`
+use std::ddd::bbb;
diff --git a/src/tools/rustfmt/tests/target/imports/imports-reorder-lines.rs b/src/tools/rustfmt/tests/target/imports/imports-reorder-lines.rs
new file mode 100644
index 000000000..5b85503b5
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/imports/imports-reorder-lines.rs
@@ -0,0 +1,31 @@
+use std::cmp::{a, b, c, d};
+use std::cmp::{b, e, f, g};
+use std::ddd::aaa;
+use std::str;
+// This comment should stay with `use std::ddd;`
+use std::ddd;
+use std::ddd::bbb;
+
+mod test {}
+
+use aaa;
+use aaa::bbb;
+use aaa::*;
+
+mod test {}
+// If item names are equal, order by rename
+
+use test::{a as bb, b};
+use test::{a as aa, c};
+
+mod test {}
+// If item names are equal, order by rename - no rename comes before a rename
+
+use test::{a as bb, b};
+use test::{a, c};
+
+mod test {}
+// `self` always comes first
+
+use test::{self as bb, b};
+use test::{a as aa, c};
diff --git a/src/tools/rustfmt/tests/target/imports/imports-reorder.rs b/src/tools/rustfmt/tests/target/imports/imports-reorder.rs
new file mode 100644
index 000000000..84e97c022
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/imports/imports-reorder.rs
@@ -0,0 +1,5 @@
+// rustfmt-normalize_comments: true
+
+use path::{self /* self */, /* A */ A, B /* B */, C};
+
+use {aa, ab, ac, b, Z};
diff --git a/src/tools/rustfmt/tests/target/imports/imports.rs b/src/tools/rustfmt/tests/target/imports/imports.rs
new file mode 100644
index 000000000..87584d89f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/imports/imports.rs
@@ -0,0 +1,129 @@
+// rustfmt-normalize_comments: true
+
+// Imports.
+
+// Long import.
+use exceedingly::loooooooooooooooooooooooooooooooooooooooooooooooooooooooong::import::path::{
+ ItemA, ItemB,
+};
+use exceedingly::looooooooooooooooooooooooooooooooooooooooooooooooooooooooooong::import::path::{
+ ItemA, ItemB,
+};
+use rustc_ast::ast::{ItemDefaultImpl, ItemForeignMod, ItemImpl, ItemMac, ItemMod, ItemStatic};
+
+use list::{
+ // Another item
+ AnotherItem, // Another Comment
+ // Last Item
+ LastItem,
+ // Some item
+ SomeItem, // Comment
+};
+
+use test::{/* A */ self /* B */, Other /* C */};
+
+pub use rustc_ast::ast::{Expr, ExprAssign, ExprCall, ExprMethodCall, ExprPath, Expr_};
+use rustc_ast::{self};
+use Foo::{Bar, Baz};
+use {Bar /* comment */, /* Pre-comment! */ Foo};
+
+use std::io;
+use std::io::{self};
+
+mod Foo {
+ pub use rustc_ast::ast::{
+ ItemDefaultImpl, ItemForeignMod, ItemImpl, ItemMac, ItemMod, ItemStatic,
+ };
+
+ mod Foo2 {
+ pub use rustc_ast::ast::{
+ self, ItemDefaultImpl, ItemForeignMod, ItemImpl, ItemMac, ItemMod, ItemStatic,
+ };
+ }
+}
+
+fn test() {
+ use Baz::*;
+ use Qux;
+}
+
+// Simple imports
+use bar::quux as kaas;
+use foo;
+use foo::bar::baz;
+
+// With aliases.
+use foo::qux as bar;
+use foo::{self as bar};
+use foo::{self as bar, baz};
+use foo::{baz, qux as bar};
+
+// With absolute paths
+use foo;
+use foo::Bar;
+use foo::{Bar, Baz};
+use Foo;
+use {Bar, Baz};
+
+// Root globs
+use *;
+use *;
+
+// spaces used to cause glob imports to disappear (#1356)
+use super::*;
+use foo::issue_1356::*;
+
+// We shouldn't remove imports which have attributes attached (#1858)
+#[cfg(unix)]
+use self::unix::{};
+
+// nested imports
+use foo::{
+ a, b,
+ bar::{
+ baz,
+ foo::{a, b, cxxxxxxxxxxxxx, yyyyyyyyyyyyyy, zzzzzzzzzzzzzzzz},
+ qux, xxxxxxxxxxx, yyyyyyyyyyyyy, zzzzzzzzzzzzzzzz,
+ },
+ boo, c,
+};
+
+use fooo::{
+ baar::foobar::{
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy,
+ zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
+ },
+ bar,
+ bar::*,
+ x, y, z,
+};
+
+use exonum::{
+ api::{Api, ApiError},
+ blockchain::{self, BlockProof, Blockchain, Transaction, TransactionSet},
+ crypto::{Hash, PublicKey},
+ helpers::Height,
+ node::TransactionSend,
+ storage::{ListProof, MapProof},
+};
+
+// nested imports with a single sub-tree.
+use a::b::c::d;
+use a::b::c::*;
+use a::b::c::{xxx, yyy, zzz};
+
+// #2645
+/// This line is not affected.
+// This line is deleted.
+use c;
+
+// #2670
+#[macro_use]
+use imports_with_attr;
+
+// #2888
+use std::f64::consts::{E, PI, SQRT_2};
+
+// #3273
+#[rustfmt::skip]
+use std::fmt::{self, {Display, Formatter}};
diff --git a/src/tools/rustfmt/tests/target/imports/imports_2021_edition.rs b/src/tools/rustfmt/tests/target/imports/imports_2021_edition.rs
new file mode 100644
index 000000000..34dcc866a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/imports/imports_2021_edition.rs
@@ -0,0 +1,3 @@
+// rustfmt-edition: 2021
+
+use ::happy::new::year;
diff --git a/src/tools/rustfmt/tests/target/imports/imports_block_indent.rs b/src/tools/rustfmt/tests/target/imports/imports_block_indent.rs
new file mode 100644
index 000000000..8c90f7ce2
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/imports/imports_block_indent.rs
@@ -0,0 +1,4 @@
+// #2569
+use apns2::request::notification::{
+ Notificatio, NotificationBuilder, Priority, SilentNotificationBuilder,
+};
diff --git a/src/tools/rustfmt/tests/target/imports/imports_granularity_crate.rs b/src/tools/rustfmt/tests/target/imports/imports_granularity_crate.rs
new file mode 100644
index 000000000..36e01558f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/imports/imports_granularity_crate.rs
@@ -0,0 +1,59 @@
+// rustfmt-imports_granularity: Crate
+
+use a::{a, b, c, d, e, f, g};
+
+#[doc(hidden)]
+use a::b;
+use a::{c, d};
+
+#[doc(hidden)]
+use a::b;
+use a::{c, d, e};
+
+use foo::{a, b, c};
+pub use foo::{bar, foobar};
+
+use a::b::c::{d, xxx, yyy, zzz, *};
+
+// https://github.com/rust-lang/rustfmt/issues/3808
+use d::{self};
+use e::{self as foo};
+use f::{self, b};
+use g::{self, a, b};
+use h::a;
+use i::a::{self};
+use j::a::{self};
+
+use k::{a, b, c, d};
+use l::{a, b, c, d};
+
+use b::q::{self /* After b::q::self */};
+use b::r; // After b::r
+use b::s::{
+ a,
+ b, // After b::s::b
+};
+use b::t::{/* Before b::t::self */ self};
+use b::t::{
+ // Before b::t::a
+ a,
+ b,
+};
+use b::v::{
+ // Before b::v::a
+ a,
+ // Before b::v::b
+ b,
+};
+use b::{
+ c, d, e,
+ u::{a, b},
+};
+use b::{
+ f::g,
+ h::{i, j}, /* After b::h group */
+};
+use b::{
+ /* Before b::l group */ l::{self, m, n::o, p::*},
+ q,
+};
diff --git a/src/tools/rustfmt/tests/target/imports/imports_granularity_default-with-dups.rs b/src/tools/rustfmt/tests/target/imports/imports_granularity_default-with-dups.rs
new file mode 100644
index 000000000..5da6d588e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/imports/imports_granularity_default-with-dups.rs
@@ -0,0 +1,6 @@
+use crate::lexer;
+use crate::lexer;
+use crate::lexer::tokens::TokenData;
+use crate::lexer::tokens::TokenData;
+use crate::lexer::{self};
+use crate::lexer::{self, tokens::TokenData};
diff --git a/src/tools/rustfmt/tests/target/imports/imports_granularity_item-with-dups-StdExternalCrate-no-reorder.rs b/src/tools/rustfmt/tests/target/imports/imports_granularity_item-with-dups-StdExternalCrate-no-reorder.rs
new file mode 100644
index 000000000..ed4df544d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/imports/imports_granularity_item-with-dups-StdExternalCrate-no-reorder.rs
@@ -0,0 +1,7 @@
+// rustfmt-imports_granularity: Item
+// rustfmt-reorder_imports: false
+// rustfmt-group_imports: StdExternalCrate
+
+use crate::lexer;
+use crate::lexer::tokens::TokenData;
+use crate::lexer::{self};
diff --git a/src/tools/rustfmt/tests/target/imports/imports_granularity_item-with-dups.rs b/src/tools/rustfmt/tests/target/imports/imports_granularity_item-with-dups.rs
new file mode 100644
index 000000000..00df37f93
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/imports/imports_granularity_item-with-dups.rs
@@ -0,0 +1,5 @@
+// rustfmt-imports_granularity: Item
+
+use crate::lexer;
+use crate::lexer::tokens::TokenData;
+use crate::lexer::{self};
diff --git a/src/tools/rustfmt/tests/target/imports/imports_granularity_item.rs b/src/tools/rustfmt/tests/target/imports/imports_granularity_item.rs
new file mode 100644
index 000000000..d2f5496fd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/imports/imports_granularity_item.rs
@@ -0,0 +1,45 @@
+// rustfmt-imports_granularity: Item
+
+use a::b;
+use a::c;
+use a::d;
+use a::f::g;
+use a::h::i;
+use a::h::j;
+use a::l::m;
+use a::l::n::o;
+use a::l::p::*;
+use a::l::{self};
+use a::q::{self};
+
+use b::c;
+use b::d;
+use b::e;
+use b::q::{self /* After b::q::self */};
+use b::r; // After b::r
+use b::s::{
+ a,
+ b, // After b::s::b
+};
+use b::t::{/* Before b::t::self */ self};
+use b::t::{
+ // Before b::t::a
+ a,
+ b,
+};
+use b::u::a;
+use b::u::b;
+use b::v::{
+ // Before b::v::a
+ a,
+ // Before b::v::b
+ b,
+};
+use b::{
+ f::g,
+ h::{i, j}, /* After b::h group */
+};
+use b::{
+ /* Before b::l group */ l::{self, m, n::o, p::*},
+ q,
+};
diff --git a/src/tools/rustfmt/tests/target/imports/imports_granularity_module.rs b/src/tools/rustfmt/tests/target/imports/imports_granularity_module.rs
new file mode 100644
index 000000000..14f341016
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/imports/imports_granularity_module.rs
@@ -0,0 +1,55 @@
+// rustfmt-imports_granularity: Module
+
+use a::b::c;
+use a::d::e;
+use a::f;
+use a::g::{h, i};
+use a::j::k::{self, l};
+use a::j::{self, m};
+use a::n::o::p;
+use a::n::q;
+pub use a::r::s;
+pub use a::t;
+use b::c::d;
+use b::{self};
+
+use foo::e;
+#[cfg(test)]
+use foo::{a::b, c::d};
+
+use bar::{
+ // comment
+ a::b,
+ // more comment
+ c::d,
+ e::f,
+};
+
+use b::q::{self /* After b::q::self */};
+use b::r; // After b::r
+use b::s::{
+ a,
+ b, // After b::s::b
+};
+use b::t::{/* Before b::t::self */ self};
+use b::t::{
+ // Before b::t::a
+ a,
+ b,
+};
+use b::u::{a, b};
+use b::v::{
+ // Before b::v::a
+ a,
+ // Before b::v::b
+ b,
+};
+use b::{c, d, e};
+use b::{
+ f::g,
+ h::{i, j}, /* After b::h group */
+};
+use b::{
+ /* Before b::l group */ l::{self, m, n::o, p::*},
+ q,
+};
diff --git a/src/tools/rustfmt/tests/target/imports_granularity_one.rs b/src/tools/rustfmt/tests/target/imports_granularity_one.rs
new file mode 100644
index 000000000..da4c6678d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/imports_granularity_one.rs
@@ -0,0 +1,109 @@
+// rustfmt-imports_granularity: One
+
+use {
+ a::{
+ aa::*,
+ ab,
+ ac::{aca, acb},
+ },
+ b,
+};
+
+use {
+ a::{self as x, aa, ab},
+ b::ba,
+};
+
+use a::{
+ aa::{aaa, *},
+ ab::aba as x,
+};
+
+#[cfg(test)]
+use a::{ab, ac::aca};
+#[cfg(test)]
+use b::{
+ ba, bb,
+ bc::bca::{bcaa, bcab},
+};
+use {
+ a::{aa, ad::ada},
+ b,
+};
+
+pub use {
+ a::{aa, ae},
+ b::{bb, bc::bca},
+};
+use {
+ a::{ab, ac, ad},
+ b::ba,
+};
+
+use {
+ a::{
+ aa::{aaa, *},
+ ab,
+ ac::{aca, acb},
+ },
+ b::{
+ ba,
+ bb::{self, bba},
+ },
+};
+
+use {
+ crate::{a, b::ba},
+ c::ca,
+};
+
+use {
+ super::{a, b::ba},
+ c::ca,
+};
+
+use {
+ super::b,
+ crate::a,
+ c::{self, ca},
+};
+
+use a::{
+ // some comment
+ aa::{aaa, aab},
+ ab,
+ // another comment
+ ac::aca,
+};
+use {a::ad::ada, b as x};
+
+use b::q::{self /* After b::q::self */};
+use b::r; // After b::r
+use b::s::{
+ a,
+ b, // After b::s::b
+};
+use b::t::{/* Before b::t::self */ self};
+use b::t::{
+ // Before b::t::a
+ a,
+ b,
+};
+use b::v::{
+ // Before b::v::a
+ a,
+ // Before b::v::b
+ b,
+};
+use b::{
+ c, d, e,
+ u::{a, b},
+};
+use b::{
+ f::g,
+ h::{i, j}, /* After b::h group */
+};
+use b::{
+ /* Before b::l group */ l::{self, m, n::o, p::*},
+ q,
+};
diff --git a/src/tools/rustfmt/tests/target/imports_raw_identifiers/version_One.rs b/src/tools/rustfmt/tests/target/imports_raw_identifiers/version_One.rs
new file mode 100644
index 000000000..bc4b5b135
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/imports_raw_identifiers/version_One.rs
@@ -0,0 +1,5 @@
+// rustfmt-version:One
+
+use websocket::client::ClientBuilder;
+use websocket::r#async::futures::Stream;
+use websocket::result::WebSocketError;
diff --git a/src/tools/rustfmt/tests/target/imports_raw_identifiers/version_Two.rs b/src/tools/rustfmt/tests/target/imports_raw_identifiers/version_Two.rs
new file mode 100644
index 000000000..22bfe9312
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/imports_raw_identifiers/version_Two.rs
@@ -0,0 +1,5 @@
+// rustfmt-version:Two
+
+use websocket::r#async::futures::Stream;
+use websocket::client::ClientBuilder;
+use websocket::result::WebSocketError;
diff --git a/src/tools/rustfmt/tests/target/indented-impl.rs b/src/tools/rustfmt/tests/target/indented-impl.rs
new file mode 100644
index 000000000..eff579ddd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/indented-impl.rs
@@ -0,0 +1,13 @@
+// rustfmt-brace_style: AlwaysNextLine
+mod x
+{
+ struct X(i8);
+
+ impl Y for X
+ {
+ fn y(self) -> ()
+ {
+ println!("ok");
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/inner-module-path/b.rs b/src/tools/rustfmt/tests/target/inner-module-path/b.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/inner-module-path/b.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/rustfmt/tests/target/inner-module-path/c/d.rs b/src/tools/rustfmt/tests/target/inner-module-path/c/d.rs
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/inner-module-path/c/d.rs
@@ -0,0 +1 @@
+
diff --git a/src/tools/rustfmt/tests/target/inner-module-path/lib.rs b/src/tools/rustfmt/tests/target/inner-module-path/lib.rs
new file mode 100644
index 000000000..60d246dd5
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/inner-module-path/lib.rs
@@ -0,0 +1,8 @@
+#[path = "."]
+mod a {
+ mod b;
+}
+
+mod c {
+ mod d;
+}
diff --git a/src/tools/rustfmt/tests/target/invalid-rust-code-in-doc-comment.rs b/src/tools/rustfmt/tests/target/invalid-rust-code-in-doc-comment.rs
new file mode 100644
index 000000000..f8479d4e3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/invalid-rust-code-in-doc-comment.rs
@@ -0,0 +1,18 @@
+// rustfmt-format_code_in_doc_comments: true
+
+/// ```rust
+/// if (true) { … }
+/// ```
+fn a() {}
+
+/// ```rust
+/// if foo() {
+/// …
+/// }
+/// ```
+fn a() {}
+
+/// ```rust
+/// k1 == k2 ⇒ hash(k1) == hash(k2)
+/// ```
+pub struct a;
diff --git a/src/tools/rustfmt/tests/target/issue-1021.rs b/src/tools/rustfmt/tests/target/issue-1021.rs
new file mode 100644
index 000000000..ba1029d4e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1021.rs
@@ -0,0 +1,22 @@
+// rustfmt-normalize_comments: true
+fn main() {
+ match x {
+ S(true, .., true) => (),
+ S(true, ..) => (),
+ S(.., true) => (),
+ S(..) => (),
+ S(_) => (),
+ S(/* .. */ ..) => (),
+ S(/* .. */ .., true) => (),
+ }
+
+ match y {
+ (true, .., true) => (),
+ (true, ..) => (),
+ (.., true) => (),
+ (..) => (),
+ (_,) => (),
+ (/* .. */ ..) => (),
+ (/* .. */ .., true) => (),
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1049.rs b/src/tools/rustfmt/tests/target/issue-1049.rs
new file mode 100644
index 000000000..c788519ca
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1049.rs
@@ -0,0 +1,29 @@
+// Test overlong function signature
+pub unsafe fn reborrow_mut(
+ &mut X: Abcde,
+) -> Handle<NodeRef<marker::Mut, K, V, NodeType>, HandleType> {
+}
+
+pub fn merge(
+ mut X: Abcdef,
+) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
+}
+
+impl Handle {
+ pub fn merge(
+ a: Abcd,
+ ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
+ }
+}
+
+// Long function without return type that should not be reformatted.
+fn veeeeeeeeeeeeeeeeeeeeery_long_name(a: FirstTypeeeeeeeeee, b: SecondTypeeeeeeeeeeeeeeeeeeeeeee) {}
+
+fn veeeeeeeeeeeeeeeeeeeeeery_long_name(a: FirstTypeeeeeeeeee, b: SecondTypeeeeeeeeeeeeeeeeeeeeeee) {
+}
+
+fn veeeeeeeeeeeeeeeeeeeeeeery_long_name(
+ a: FirstTypeeeeeeeeee,
+ b: SecondTypeeeeeeeeeeeeeeeeeeeeeee,
+) {
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1055.rs b/src/tools/rustfmt/tests/target/issue-1055.rs
new file mode 100644
index 000000000..ee143e792
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1055.rs
@@ -0,0 +1,3 @@
+fn issue_1055() {
+ let foo = (|| {})();
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1096.rs b/src/tools/rustfmt/tests/target/issue-1096.rs
new file mode 100644
index 000000000..de78e7364
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1096.rs
@@ -0,0 +1,71 @@
+struct StructA<T> /* comment 1 */ {
+ t: T,
+}
+
+struct StructB<T> /* comment 2 */;
+
+struct StructC /* comment 3 */;
+
+struct StructD /* comment 4 */ {
+ t: usize,
+}
+
+struct StructE<T>
+/* comment 5 */
+where
+ T: Clone,
+{
+ t: usize,
+}
+
+struct StructF
+/* comment 6 */
+where
+ T: Clone,
+{
+ t: usize,
+}
+
+struct StructG<T>
+/* comment 7 */
+// why a line comment??
+{
+ t: T,
+}
+
+struct StructH<T>
+/* comment 8 */
+// why a line comment??
+where
+ T: Clone,
+{
+ t: T,
+}
+
+enum EnumA<T> /* comment 8 */ {
+ Field(T),
+}
+
+enum EnumB /* comment 9 */ {
+ Field,
+}
+
+// Issue 2781
+struct StructX1<T>
+// where
+// T: Clone
+{
+ inner: String,
+}
+
+struct StructX2<
+ T,
+ U: Iterator<Item = String>,
+ V: Iterator<Item = String>,
+ W: Iterator<Item = String>,
+>
+// where
+// T: Clone
+{
+ inner: String,
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1111.rs b/src/tools/rustfmt/tests/target/issue-1111.rs
new file mode 100644
index 000000000..2e1a89ad7
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1111.rs
@@ -0,0 +1 @@
+use bar;
diff --git a/src/tools/rustfmt/tests/target/issue-1113.rs b/src/tools/rustfmt/tests/target/issue-1113.rs
new file mode 100644
index 000000000..1245bcd05
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1113.rs
@@ -0,0 +1,33 @@
+pub fn foo() -> fmt::Result
+//pub fn writeStringToken
+{
+ panic!()
+}
+
+pub fn foo() -> fmt::Result // pub fn writeStringToken
+{
+ panic!()
+}
+
+pub fn foo() -> fmt::Result /* pub fn writeStringToken */ {
+ panic!()
+}
+
+pub fn foo() -> fmt::Result
+/* pub fn writeStringToken */ {
+ panic!()
+}
+
+pub fn foo() -> fmt::Result
+/* pub fn writeStringToken */
+{
+ panic!()
+}
+
+pub fn foo() -> fmt::Result /*
+ *
+ *
+ */
+{
+ panic!()
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1120.rs b/src/tools/rustfmt/tests/target/issue-1120.rs
new file mode 100644
index 000000000..f44597e7d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1120.rs
@@ -0,0 +1,11 @@
+// rustfmt-reorder_imports: true
+
+// Ensure that a use at the start of an inline module is correctly formatted.
+mod foo {
+ use bar;
+}
+
+// Ensure that an indented `use` gets the correct indentation.
+mod foo {
+ use bar;
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1124.rs b/src/tools/rustfmt/tests/target/issue-1124.rs
new file mode 100644
index 000000000..f0fc485a3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1124.rs
@@ -0,0 +1,21 @@
+// rustfmt-reorder_imports: true
+
+use a;
+use b;
+use c;
+use d;
+// The previous line has a space after the `use a;`
+
+mod a {
+ use a;
+ use b;
+ use c;
+ use d;
+}
+
+use z;
+
+use y;
+
+use a;
+use x;
diff --git a/src/tools/rustfmt/tests/target/issue-1127.rs b/src/tools/rustfmt/tests/target/issue-1127.rs
new file mode 100644
index 000000000..fb09036d1
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1127.rs
@@ -0,0 +1,25 @@
+// rustfmt-max_width: 120
+// rustfmt-match_arm_blocks: false
+// rustfmt-match_block_trailing_comma: true
+
+fn a_very_very_very_very_very_very_very_very_very_very_very_long_function_name() -> i32 {
+ 42
+}
+
+enum TestEnum {
+ AVeryVeryLongEnumName,
+ AnotherVeryLongEnumName,
+ TheLastVeryLongEnumName,
+}
+
+fn main() {
+ let var = TestEnum::AVeryVeryLongEnumName;
+ let num = match var {
+ TestEnum::AVeryVeryLongEnumName =>
+ a_very_very_very_very_very_very_very_very_very_very_very_long_function_name(),
+ TestEnum::AnotherVeryLongEnumName =>
+ a_very_very_very_very_very_very_very_very_very_very_very_long_function_name(),
+ TestEnum::TheLastVeryLongEnumName =>
+ a_very_very_very_very_very_very_very_very_very_very_very_long_function_name(),
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1158.rs b/src/tools/rustfmt/tests/target/issue-1158.rs
new file mode 100644
index 000000000..2abfa5a29
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1158.rs
@@ -0,0 +1,3 @@
+trait T {
+ itemmacro!(this, is.now().formatted(yay));
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1177.rs b/src/tools/rustfmt/tests/target/issue-1177.rs
new file mode 100644
index 000000000..dcda39728
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1177.rs
@@ -0,0 +1,7 @@
+// rustfmt-normalize_comments: true
+fn main() {
+ // Line Comment
+ // Block Comment
+
+ let d = 5;
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1192.rs b/src/tools/rustfmt/tests/target/issue-1192.rs
new file mode 100644
index 000000000..432fe8cce
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1192.rs
@@ -0,0 +1,3 @@
+fn main() {
+ assert!(true);
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1210/a.rs b/src/tools/rustfmt/tests/target/issue-1210/a.rs
new file mode 100644
index 000000000..94c1b44e5
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1210/a.rs
@@ -0,0 +1,16 @@
+// rustfmt-format_strings: true
+// rustfmt-max_width: 50
+
+impl Foo {
+ fn cxx(&self, target: &str) -> &Path {
+ match self.cxx.get(target) {
+ Some(p) => p.path(),
+ None => panic!(
+ "\n\ntarget `{}` is not \
+ configured as a host,
+ only as a target\n\n",
+ target
+ ),
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1210/b.rs b/src/tools/rustfmt/tests/target/issue-1210/b.rs
new file mode 100644
index 000000000..a7b1e3bcd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1210/b.rs
@@ -0,0 +1,16 @@
+// rustfmt-format_strings: true
+// rustfmt-max_width: 50
+
+impl Foo {
+ fn cxx(&self, target: &str) -> &Path {
+ match self.cxx.get(target) {
+ Some(p) => p.path(),
+ None => panic!(
+ "\ntarget `{}`: is not, \
+ configured as a host,
+ only as a target\n\n",
+ target
+ ),
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1210/c.rs b/src/tools/rustfmt/tests/target/issue-1210/c.rs
new file mode 100644
index 000000000..183d79f92
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1210/c.rs
@@ -0,0 +1,7 @@
+// rustfmt-format_strings: true
+// rustfmt-max_width: 50
+
+const foo: String =
+ "trailing_spaces!!
+ keep them! Amet neque. Praesent \
+ rhoncus eros non velit.";
diff --git a/src/tools/rustfmt/tests/target/issue-1210/d.rs b/src/tools/rustfmt/tests/target/issue-1210/d.rs
new file mode 100644
index 000000000..9279e6fc9
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1210/d.rs
@@ -0,0 +1,4 @@
+// rustfmt-wrap_comments: true
+
+// trailing_spaces_in_comment!!
+// remove those from above
diff --git a/src/tools/rustfmt/tests/target/issue-1210/e.rs b/src/tools/rustfmt/tests/target/issue-1210/e.rs
new file mode 100644
index 000000000..55f80c6c3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1210/e.rs
@@ -0,0 +1,11 @@
+// rustfmt-format_strings: true
+// rustfmt-max_width: 50
+
+// explicit line breaks should be kept in order to preserve the layout
+
+const foo: String =
+ "Suspendisse vel augue at felis tincidunt \
+ sollicitudin. Fusce arcu.
+ Duis et odio et leo
+ sollicitudin consequat. Aliquam \
+ lobortis. Phasellus condimentum.";
diff --git a/src/tools/rustfmt/tests/target/issue-1211.rs b/src/tools/rustfmt/tests/target/issue-1211.rs
new file mode 100644
index 000000000..de4c5c87e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1211.rs
@@ -0,0 +1,13 @@
+fn main() {
+ for iface in &ifaces {
+ match iface.addr {
+ get_if_addrs::IfAddr::V4(ref addr) => match addr.broadcast {
+ Some(ip) => {
+ sock.send_to(&buf, (ip, 8765)).expect("foobar");
+ }
+ _ => (),
+ },
+ _ => (),
+ };
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1214.rs b/src/tools/rustfmt/tests/target/issue-1214.rs
new file mode 100644
index 000000000..c622abb3a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1214.rs
@@ -0,0 +1,8 @@
+/*!
+# Example
+
+```
+ // Here goes some example
+```
+ */
+struct Item;
diff --git a/src/tools/rustfmt/tests/target/issue-1216.rs b/src/tools/rustfmt/tests/target/issue-1216.rs
new file mode 100644
index 000000000..d727c158a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1216.rs
@@ -0,0 +1,5 @@
+// rustfmt-normalize_comments: true
+enum E {
+ A, //* I am not a block comment (caused panic)
+ B,
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1239.rs b/src/tools/rustfmt/tests/target/issue-1239.rs
new file mode 100644
index 000000000..e950200b1
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1239.rs
@@ -0,0 +1,11 @@
+fn foo() {
+ let with_alignment = if condition__uses_alignment_for_first_if__0
+ || condition__uses_alignment_for_first_if__1
+ || condition__uses_alignment_for_first_if__2
+ {
+ } else if condition__no_alignment_for_later_else__0
+ || condition__no_alignment_for_later_else__1
+ || condition__no_alignment_for_later_else__2
+ {
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1247.rs b/src/tools/rustfmt/tests/target/issue-1247.rs
new file mode 100644
index 000000000..16c63e0f5
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1247.rs
@@ -0,0 +1,8 @@
+// rustfmt-max_width: 80
+
+fn foo() {
+ polyfill::slice::fill(
+ &mut self.pending[padding_pos..(self.algorithm.block_len - 8)],
+ 0,
+ );
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1255.rs b/src/tools/rustfmt/tests/target/issue-1255.rs
new file mode 100644
index 000000000..2d4633844
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1255.rs
@@ -0,0 +1,10 @@
+// Test for issue #1255
+// Default annotation incorrectly removed on associated types
+#![feature(specialization)]
+
+trait Trait {
+ type Type;
+}
+impl<T> Trait for T {
+ default type Type = u64; // 'default' should not be removed
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1278.rs b/src/tools/rustfmt/tests/target/issue-1278.rs
new file mode 100644
index 000000000..e25376561
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1278.rs
@@ -0,0 +1,9 @@
+// rustfmt-indent_style = "block"
+
+#![feature(pub_restricted)]
+
+mod inner_mode {
+ pub(super) fn func_name(abc: i32) -> i32 {
+ abc
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1350.rs b/src/tools/rustfmt/tests/target/issue-1350.rs
new file mode 100644
index 000000000..2cf65509c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1350.rs
@@ -0,0 +1,14 @@
+// rustfmt-max_width: 120
+// rustfmt-comment_width: 110
+
+impl Struct {
+ fn fun() {
+ let result = match <R::RequestResult as serde::Deserialize>::deserialize(&json) {
+ Ok(v) => v,
+ Err(e) => match <R::ErrorResult as serde::Deserialize>::deserialize(&json) {
+ Ok(v) => return Err(Error::with_json(v)),
+ Err(e2) => return Err(Error::with_json(e)),
+ },
+ };
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1366.rs b/src/tools/rustfmt/tests/target/issue-1366.rs
new file mode 100644
index 000000000..eee147baa
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1366.rs
@@ -0,0 +1,13 @@
+fn main() {
+ fn f() -> Option<i32> {
+ Some("fffffffsssssssssddddssssfffffddddff")
+ .map(|s| s)
+ .map(|s| s.to_string())
+ .map(|res| match Some(res) {
+ Some(ref s) if s == "" => 41,
+ Some(_) => 42,
+ _ => 43,
+ })
+ }
+ println!("{:?}", f())
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1397.rs b/src/tools/rustfmt/tests/target/issue-1397.rs
new file mode 100644
index 000000000..86b7a7841
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1397.rs
@@ -0,0 +1,25 @@
+pub enum TransactionState {
+ Committed(i64),
+}
+
+pub enum Packet {
+ Transaction { state: TransactionState },
+}
+
+fn baz(p: Packet) {
+ loop {
+ loop {
+ loop {
+ loop {
+ if let Packet::Transaction {
+ state: TransactionState::Committed(ts, ..),
+ ..
+ } = p
+ {
+ unreachable!()
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1468.rs b/src/tools/rustfmt/tests/target/issue-1468.rs
new file mode 100644
index 000000000..4c14a0f74
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1468.rs
@@ -0,0 +1,29 @@
+fn issue1468() {
+ euc_jp_decoder_functions!({
+ let trail_minus_offset = byte.wrapping_sub(0xA1);
+ // Fast-track Hiragana (60% according to Lunde)
+ // and Katakana (10% according to Lunde).
+ if jis0208_lead_minus_offset == 0x03 && trail_minus_offset < 0x53 {
+ // Hiragana
+ handle.write_upper_bmp(0x3041 + trail_minus_offset as u16)
+ } else if jis0208_lead_minus_offset == 0x04 && trail_minus_offset < 0x56 {
+ // Katakana
+ handle.write_upper_bmp(0x30A1 + trail_minus_offset as u16)
+ } else if trail_minus_offset > (0xFE - 0xA1) {
+ if byte < 0x80 {
+ return (
+ DecoderResult::Malformed(1, 0),
+ unread_handle_trail.unread(),
+ handle.written(),
+ );
+ }
+ return (
+ DecoderResult::Malformed(2, 0),
+ unread_handle_trail.consumed(),
+ handle.written(),
+ );
+ } else {
+ unreachable!();
+ }
+ });
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1598.rs b/src/tools/rustfmt/tests/target/issue-1598.rs
new file mode 100644
index 000000000..c7e02c961
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1598.rs
@@ -0,0 +1,6 @@
+fn main() {
+ //foo
+ /*
+ */
+ format!("hello");
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1624.rs b/src/tools/rustfmt/tests/target/issue-1624.rs
new file mode 100644
index 000000000..477fc2735
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1624.rs
@@ -0,0 +1,6 @@
+// #1624
+pub unsafe fn some_long_function_name(
+ arg1: Type1,
+ arg2: Type2,
+) -> (SomeLongTypeName, AnotherLongTypeName, AnotherLongTypeName) {
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1681.rs b/src/tools/rustfmt/tests/target/issue-1681.rs
new file mode 100644
index 000000000..902765302
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1681.rs
@@ -0,0 +1,21 @@
+// rustfmt-max_width: 80
+
+// We would like to surround closure body with block when overflowing the last
+// argument of function call if the last argument has condition and without
+// block it may go multi lines.
+fn foo() {
+ refmut_map_result(self.cache.borrow_mut(), |cache| {
+ match cache.entry(cache_key) {
+ Occupied(entry) => Ok(entry.into_mut()),
+ Vacant(entry) => {
+ let statement = {
+ let sql = try!(entry.key().sql(source));
+ prepare_fn(&sql)
+ };
+
+ Ok(entry.insert(try!(statement)))
+ }
+ }
+ })
+ .map(MaybeCached::Cached)
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1693.rs b/src/tools/rustfmt/tests/target/issue-1693.rs
new file mode 100644
index 000000000..85421a123
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1693.rs
@@ -0,0 +1,10 @@
+fn issue1693() {
+ let pixel_data = vec![
+ (
+ f16::from_f32(0.82),
+ f16::from_f32(1.78),
+ f16::from_f32(0.21)
+ );
+ 256 * 256
+ ];
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1703.rs b/src/tools/rustfmt/tests/target/issue-1703.rs
new file mode 100644
index 000000000..4079ef4cf
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1703.rs
@@ -0,0 +1,9 @@
+// rustfmt should not remove doc comments or comments inside attributes.
+
+/**
+This function has a block doc comment.
+ */
+fn test_function() {}
+
+#[foo /* do not remove this! */]
+fn foo() {}
diff --git a/src/tools/rustfmt/tests/target/issue-1800.rs b/src/tools/rustfmt/tests/target/issue-1800.rs
new file mode 100644
index 000000000..06c5cfd05
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1800.rs
@@ -0,0 +1,3 @@
+#![doc(html_root_url = "http://example.com")]
+#[cfg(feature = "foo")]
+fn a() {}
diff --git a/src/tools/rustfmt/tests/target/issue-1802.rs b/src/tools/rustfmt/tests/target/issue-1802.rs
new file mode 100644
index 000000000..ef7ee8910
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1802.rs
@@ -0,0 +1,9 @@
+// rustfmt-tab_spaces: 2
+// rustfmt-max_width: 30
+
+enum F {
+ X {
+ a: dddddddddddddd,
+ b: eeeeeeeeeeeeeee,
+ },
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1824.rs b/src/tools/rustfmt/tests/target/issue-1824.rs
new file mode 100644
index 000000000..1c4c2db46
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1824.rs
@@ -0,0 +1,5 @@
+pub trait Ingredient
+where
+ Self: Send,
+{
+}
diff --git a/src/tools/rustfmt/tests/target/issue-1914.rs b/src/tools/rustfmt/tests/target/issue-1914.rs
new file mode 100644
index 000000000..d2d532af1
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-1914.rs
@@ -0,0 +1,7 @@
+// rustfmt-max_width: 80
+
+extern "C" {
+ #[link_name = "_ZN7MyClass26example_check_no_collisionE"]
+ pub static mut MyClass_example_check_no_collision:
+ *const ::std::os::raw::c_int;
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2025.rs b/src/tools/rustfmt/tests/target/issue-2025.rs
new file mode 100644
index 000000000..38bf369be
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2025.rs
@@ -0,0 +1,4 @@
+// See if rustfmt removes empty lines on top of the file.
+pub fn foo() {
+ println!("hello, world");
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2103.rs b/src/tools/rustfmt/tests/target/issue-2103.rs
new file mode 100644
index 000000000..5a043d54b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2103.rs
@@ -0,0 +1,14 @@
+struct X
+where
+ i32: Sized,
+{
+ x: i32,
+}
+
+struct X
+// with comment
+where
+ i32: Sized,
+{
+ x: i32,
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2111.rs b/src/tools/rustfmt/tests/target/issue-2111.rs
new file mode 100644
index 000000000..42c1862e8
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2111.rs
@@ -0,0 +1,26 @@
+// An import with single line comments.
+use super::{
+ DelayChoice,
+ Destinations,
+ Holding,
+ LodaModel,
+ MethodDescription,
+ ModelBehaviour,
+ ModelEdges,
+ ModelProperties,
+ ModelRequestGraph,
+ ModelSelector,
+ RequestDescription,
+ StringMap,
+ Switch,
+ // ModelMetaData,
+ // Generated,
+ // SecondsString,
+ // DateString,
+ // ModelConfiguration,
+ // ModelRequests,
+ // RestResponse,
+ // RestResponseCode,
+ // UniformHolding
+ SCHEMA_VERSIONS,
+};
diff --git a/src/tools/rustfmt/tests/target/issue-2123.rs b/src/tools/rustfmt/tests/target/issue-2123.rs
new file mode 100644
index 000000000..5e9917b40
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2123.rs
@@ -0,0 +1,6 @@
+// rustfmt-wrap_comments: true
+
+//hello
+//world
+
+fn main() {}
diff --git a/src/tools/rustfmt/tests/target/issue-2164.rs b/src/tools/rustfmt/tests/target/issue-2164.rs
new file mode 100644
index 000000000..dbf92107c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2164.rs
@@ -0,0 +1,135 @@
+// A stress test against code generated by bindgen.
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct emacs_env_25 {
+ pub size: isize,
+ pub private_members: *mut emacs_env_private,
+ pub make_global_ref: ::std::option::Option<
+ unsafe extern "C" fn(env: *mut emacs_env, any_reference: emacs_value) -> emacs_value,
+ >,
+ pub free_global_ref: ::std::option::Option<
+ unsafe extern "C" fn(env: *mut emacs_env, global_reference: emacs_value),
+ >,
+ pub non_local_exit_check:
+ ::std::option::Option<unsafe extern "C" fn(env: *mut emacs_env) -> emacs_funcall_exit>,
+ pub non_local_exit_clear: ::std::option::Option<unsafe extern "C" fn(env: *mut emacs_env)>,
+ pub non_local_exit_get: ::std::option::Option<
+ unsafe extern "C" fn(
+ env: *mut emacs_env,
+ non_local_exit_symbol_out: *mut emacs_value,
+ non_local_exit_data_out: *mut emacs_value,
+ ) -> emacs_funcall_exit,
+ >,
+ pub non_local_exit_signal: ::std::option::Option<
+ unsafe extern "C" fn(
+ env: *mut emacs_env,
+ non_local_exit_symbol: emacs_value,
+ non_local_exit_data: emacs_value,
+ ),
+ >,
+ pub non_local_exit_throw: ::std::option::Option<
+ unsafe extern "C" fn(env: *mut emacs_env, tag: emacs_value, value: emacs_value),
+ >,
+ pub make_function: ::std::option::Option<
+ unsafe extern "C" fn(
+ env: *mut emacs_env,
+ min_arity: isize,
+ max_arity: isize,
+ function: ::std::option::Option<
+ unsafe extern "C" fn(
+ env: *mut emacs_env,
+ nargs: isize,
+ args: *mut emacs_value,
+ arg1: *mut ::libc::c_void,
+ ) -> emacs_value,
+ >,
+ documentation: *const ::libc::c_char,
+ data: *mut ::libc::c_void,
+ ) -> emacs_value,
+ >,
+ pub funcall: ::std::option::Option<
+ unsafe extern "C" fn(
+ env: *mut emacs_env,
+ function: emacs_value,
+ nargs: isize,
+ args: *mut emacs_value,
+ ) -> emacs_value,
+ >,
+ pub intern: ::std::option::Option<
+ unsafe extern "C" fn(
+ env: *mut emacs_env,
+ symbol_name: *const ::libc::c_char,
+ ) -> emacs_value,
+ >,
+ pub type_of: ::std::option::Option<
+ unsafe extern "C" fn(env: *mut emacs_env, value: emacs_value) -> emacs_value,
+ >,
+ pub is_not_nil: ::std::option::Option<
+ unsafe extern "C" fn(env: *mut emacs_env, value: emacs_value) -> bool,
+ >,
+ pub eq: ::std::option::Option<
+ unsafe extern "C" fn(env: *mut emacs_env, a: emacs_value, b: emacs_value) -> bool,
+ >,
+ pub extract_integer: ::std::option::Option<
+ unsafe extern "C" fn(env: *mut emacs_env, value: emacs_value) -> intmax_t,
+ >,
+ pub make_integer: ::std::option::Option<
+ unsafe extern "C" fn(env: *mut emacs_env, value: intmax_t) -> emacs_value,
+ >,
+ pub extract_float:
+ ::std::option::Option<unsafe extern "C" fn(env: *mut emacs_env, value: emacs_value) -> f64>,
+ pub make_float:
+ ::std::option::Option<unsafe extern "C" fn(env: *mut emacs_env, value: f64) -> emacs_value>,
+ pub copy_string_contents: ::std::option::Option<
+ unsafe extern "C" fn(
+ env: *mut emacs_env,
+ value: emacs_value,
+ buffer: *mut ::libc::c_char,
+ size_inout: *mut isize,
+ ) -> bool,
+ >,
+ pub make_string: ::std::option::Option<
+ unsafe extern "C" fn(
+ env: *mut emacs_env,
+ contents: *const ::libc::c_char,
+ length: isize,
+ ) -> emacs_value,
+ >,
+ pub make_user_ptr: ::std::option::Option<
+ unsafe extern "C" fn(
+ env: *mut emacs_env,
+ fin: ::std::option::Option<unsafe extern "C" fn(arg1: *mut ::libc::c_void)>,
+ ptr: *mut ::libc::c_void,
+ ) -> emacs_value,
+ >,
+ pub get_user_ptr: ::std::option::Option<
+ unsafe extern "C" fn(env: *mut emacs_env, uptr: emacs_value) -> *mut ::libc::c_void,
+ >,
+ pub set_user_ptr: ::std::option::Option<
+ unsafe extern "C" fn(env: *mut emacs_env, uptr: emacs_value, ptr: *mut ::libc::c_void),
+ >,
+ pub get_user_finalizer: ::std::option::Option<
+ unsafe extern "C" fn(
+ arg1: *mut ::libc::c_void,
+ env: *mut emacs_env,
+ uptr: emacs_value,
+ ) -> ::std::option::Option<
+ unsafe extern "C" fn(arg1: *mut ::libc::c_void, env: *mut emacs_env, uptr: emacs_value),
+ >,
+ >,
+ pub set_user_finalizer: ::std::option::Option<
+ unsafe extern "C" fn(
+ env: *mut emacs_env,
+ uptr: emacs_value,
+ fin: ::std::option::Option<unsafe extern "C" fn(arg1: *mut ::libc::c_void)>,
+ ),
+ >,
+ pub vec_get: ::std::option::Option<
+ unsafe extern "C" fn(env: *mut emacs_env, vec: emacs_value, i: isize) -> emacs_value,
+ >,
+ pub vec_set: ::std::option::Option<
+ unsafe extern "C" fn(env: *mut emacs_env, vec: emacs_value, i: isize, val: emacs_value),
+ >,
+ pub vec_size:
+ ::std::option::Option<unsafe extern "C" fn(env: *mut emacs_env, vec: emacs_value) -> isize>,
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2179/one.rs b/src/tools/rustfmt/tests/target/issue-2179/one.rs
new file mode 100644
index 000000000..3f98acc8d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2179/one.rs
@@ -0,0 +1,37 @@
+// rustfmt-version: One
+// rustfmt-error_on_line_overflow: false
+
+fn issue_2179() {
+ let (opts, rustflags, clear_env_rust_log) = {
+ // We mustn't lock configuration for the whole build process
+ let rls_config = rls_config.lock().unwrap();
+
+ let opts = CargoOptions::new(&rls_config);
+ trace!("Cargo compilation options:\n{:?}", opts);
+ let rustflags = prepare_cargo_rustflags(&rls_config);
+
+ // Warn about invalid specified bin target or package depending on current mode
+ // TODO: Return client notifications along with diagnostics to inform the user
+ if !rls_config.workspace_mode {
+ let cur_pkg_targets = ws.current().unwrap().targets();
+
+ if let &Some(ref build_bin) = rls_config.build_bin.as_ref() {
+ let mut bins = cur_pkg_targets.iter().filter(|x| x.is_bin());
+ if let None = bins.find(|x| x.name() == build_bin) {
+ warn!(
+ "cargo - couldn't find binary `{}` specified in `build_bin` configuration",
+ build_bin
+ );
+ }
+ }
+ } else {
+ for package in &opts.package {
+ if let None = ws.members().find(|x| x.name() == package) {
+ warn!("cargo - couldn't find member package `{}` specified in `analyze_package` configuration", package);
+ }
+ }
+ }
+
+ (opts, rustflags, rls_config.clear_env_rust_log)
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2179/two.rs b/src/tools/rustfmt/tests/target/issue-2179/two.rs
new file mode 100644
index 000000000..96531509e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2179/two.rs
@@ -0,0 +1,40 @@
+// rustfmt-version: Two
+// rustfmt-error_on_line_overflow: false
+
+fn issue_2179() {
+ let (opts, rustflags, clear_env_rust_log) = {
+ // We mustn't lock configuration for the whole build process
+ let rls_config = rls_config.lock().unwrap();
+
+ let opts = CargoOptions::new(&rls_config);
+ trace!("Cargo compilation options:\n{:?}", opts);
+ let rustflags = prepare_cargo_rustflags(&rls_config);
+
+ // Warn about invalid specified bin target or package depending on current mode
+ // TODO: Return client notifications along with diagnostics to inform the user
+ if !rls_config.workspace_mode {
+ let cur_pkg_targets = ws.current().unwrap().targets();
+
+ if let &Some(ref build_bin) = rls_config.build_bin.as_ref() {
+ let mut bins = cur_pkg_targets.iter().filter(|x| x.is_bin());
+ if let None = bins.find(|x| x.name() == build_bin) {
+ warn!(
+ "cargo - couldn't find binary `{}` specified in `build_bin` configuration",
+ build_bin
+ );
+ }
+ }
+ } else {
+ for package in &opts.package {
+ if let None = ws.members().find(|x| x.name() == package) {
+ warn!(
+ "cargo - couldn't find member package `{}` specified in `analyze_package` configuration",
+ package
+ );
+ }
+ }
+ }
+
+ (opts, rustflags, rls_config.clear_env_rust_log)
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2197.rs b/src/tools/rustfmt/tests/target/issue-2197.rs
new file mode 100644
index 000000000..d42c08e19
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2197.rs
@@ -0,0 +1,17 @@
+// rustfmt-max_width: 79
+// rustfmt-wrap_comments: true
+
+/// ```rust
+/// unsafe fn sum_sse2(x: i32x4) -> i32 {
+/// let x = vendor::_mm_add_epi32(
+/// x,
+/// vendor::_mm_srli_si128(x.into(), 8).into(),
+/// );
+/// let x = vendor::_mm_add_epi32(
+/// x,
+/// vendor::_mm_srli_si128(x.into(), 4).into(),
+/// );
+/// vendor::_mm_cvtsi128_si32(x)
+/// }
+/// ```
+fn foo() {}
diff --git a/src/tools/rustfmt/tests/target/issue-2256.rs b/src/tools/rustfmt/tests/target/issue-2256.rs
new file mode 100644
index 000000000..0a59c3083
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2256.rs
@@ -0,0 +1,7 @@
+// こんにちは
+use std::borrow::Cow;
+
+/* comment 1 */
+/* comment 2 */
+
+/* comment 3 */
diff --git a/src/tools/rustfmt/tests/target/issue-2324.rs b/src/tools/rustfmt/tests/target/issue-2324.rs
new file mode 100644
index 000000000..9211b24d8
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2324.rs
@@ -0,0 +1,7 @@
+// nested function calls with cast.
+fn main() {
+ self.ptr
+ .set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) as *mut T);
+ self.ptr
+ .set(intrinsics::arith_offset(self.ptr.get(), mem::size_of::<T>() as isize) as *mut u8);
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2329.rs b/src/tools/rustfmt/tests/target/issue-2329.rs
new file mode 100644
index 000000000..e36e9546b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2329.rs
@@ -0,0 +1,30 @@
+// Comments with characters which must be represented by multibyte.
+
+// フー
+use foo;
+// バー
+use bar;
+
+impl MyStruct {
+ // コメント
+ fn f1() {} // こんにちは
+ fn f2() {} // ありがとう
+ // コメント
+}
+
+trait MyTrait {
+ // コメント
+ fn f1() {} // こんにちは
+ fn f2() {} // ありがとう
+ // コメント
+}
+
+fn main() {
+ // コメント
+ let x = 1; // X
+ println!(
+ "x = {}", // xの値
+ x, // X
+ );
+ // コメント
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2342.rs b/src/tools/rustfmt/tests/target/issue-2342.rs
new file mode 100644
index 000000000..f9c26857e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2342.rs
@@ -0,0 +1,6 @@
+// rustfmt-max_width: 80
+
+struct Foo {
+ #[cfg(feature = "serde")]
+ bytes: [[u8; 17]; 5], // Same size as signature::ED25519_PKCS8_V2_LEN
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2346.rs b/src/tools/rustfmt/tests/target/issue-2346.rs
new file mode 100644
index 000000000..07817221a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2346.rs
@@ -0,0 +1,4 @@
+// rustfmt-normalize_comments: true
+// the following empty comment should not have any trailing space added.
+//
+fn main() {}
diff --git a/src/tools/rustfmt/tests/target/issue-2401.rs b/src/tools/rustfmt/tests/target/issue-2401.rs
new file mode 100644
index 000000000..ec8f27b73
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2401.rs
@@ -0,0 +1,7 @@
+// rustfmt-hard_tabs = true
+// rustfmt-normalize_comments = true
+
+/// ```
+/// println!("Hello, World!");
+/// ```
+fn main() {}
diff --git a/src/tools/rustfmt/tests/target/issue-2445.rs b/src/tools/rustfmt/tests/target/issue-2445.rs
new file mode 100644
index 000000000..1bc7752fd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2445.rs
@@ -0,0 +1,21 @@
+test!(RunPassPretty {
+ // comment
+ path: "src/test/run-pass/pretty",
+ mode: "pretty",
+ suite: "run-pass",
+ default: false,
+ host: true // should, force, , no trailing comma here
+});
+
+test!(RunPassPretty {
+ // comment
+ path: "src/test/run-pass/pretty",
+ mode: "pretty",
+ suite: "run-pass",
+ default: false,
+ host: true, // should, , preserve, the trailing comma
+});
+
+test!(Test {
+ field: i32, // comment
+});
diff --git a/src/tools/rustfmt/tests/target/issue-2446.rs b/src/tools/rustfmt/tests/target/issue-2446.rs
new file mode 100644
index 000000000..be62e9c9c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2446.rs
@@ -0,0 +1,9 @@
+enum Issue2446 {
+ V {
+ f: u8, // x
+ },
+}
+
+enum Issue2446TrailingCommentsOnly {
+ V { f: u8 /* */ },
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2479.rs b/src/tools/rustfmt/tests/target/issue-2479.rs
new file mode 100644
index 000000000..3683ab220
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2479.rs
@@ -0,0 +1,12 @@
+// Long attributes.
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub enum POLARITYR {
+ #[doc = "Task mode: No effect on pin from OUT[n] task. Event mode: no IN[n] event generated on pin activity."]
+ NONE,
+ #[doc = "Task mode: Set pin from OUT[n] task. Event mode: Generate IN[n] event when rising edge on pin."]
+ LOTOHI,
+ #[doc = "Task mode: Clear pin from OUT[n] task. Event mode: Generate IN[n] event when falling edge on pin."]
+ HITOLO,
+ #[doc = "Task mode: Toggle pin from OUT[n]. Event mode: Generate IN[n] when any change on pin."]
+ TOGGLE,
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2482/a.rs b/src/tools/rustfmt/tests/target/issue-2482/a.rs
new file mode 100644
index 000000000..fbbcb52a8
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2482/a.rs
@@ -0,0 +1,9 @@
+// rustfmt-reorder_modules: true
+
+// Do not reorder inline modules.
+
+mod c;
+mod a {
+ fn a() {}
+}
+mod b;
diff --git a/src/tools/rustfmt/tests/target/issue-2482/b.rs b/src/tools/rustfmt/tests/target/issue-2482/b.rs
new file mode 100644
index 000000000..40a8d9421
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2482/b.rs
@@ -0,0 +1 @@
+pub fn b() {}
diff --git a/src/tools/rustfmt/tests/target/issue-2482/c.rs b/src/tools/rustfmt/tests/target/issue-2482/c.rs
new file mode 100644
index 000000000..d93754551
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2482/c.rs
@@ -0,0 +1 @@
+pub fn c() {}
diff --git a/src/tools/rustfmt/tests/target/issue-2496.rs b/src/tools/rustfmt/tests/target/issue-2496.rs
new file mode 100644
index 000000000..60c4f55dd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2496.rs
@@ -0,0 +1,14 @@
+// rustfmt-indent_style: Visual
+fn main() {
+ match option {
+ None => some_function(first_reasonably_long_argument,
+ second_reasonably_long_argument),
+ }
+}
+
+fn main() {
+ match option {
+ None => some_function(first_reasonably_long_argument,
+ second_reasonably_long_argument),
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2520.rs b/src/tools/rustfmt/tests/target/issue-2520.rs
new file mode 100644
index 000000000..7c134d397
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2520.rs
@@ -0,0 +1,13 @@
+// rustfmt-normalize_comments: true
+// rustfmt-format_code_in_doc_comments: true
+
+//! ```rust
+//! println!("hello, world");
+//! ```
+
+#![deny(missing_docs)]
+
+//! ```rust
+//! println!("hello, world");
+
+#![deny(missing_docs)]
diff --git a/src/tools/rustfmt/tests/target/issue-2523.rs b/src/tools/rustfmt/tests/target/issue-2523.rs
new file mode 100644
index 000000000..612f93249
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2523.rs
@@ -0,0 +1,20 @@
+// rustfmt-normalize_comments: true
+// rustfmt-format_code_in_doc_comments: true
+
+// Do not unindent macro calls in comment with unformattable syntax.
+//! ```rust
+//! let x = 3 ;
+//! some_macro!(pub fn fn foo() (
+//! println!("Don't unindent me!");
+//! ));
+//! ```
+
+// Format items that appear as arguments of macro call.
+//! ```rust
+//! let x = 3;
+//! some_macro!(
+//! pub fn foo() {
+//! println!("Don't unindent me!");
+//! }
+//! );
+//! ```
diff --git a/src/tools/rustfmt/tests/target/issue-2526.rs b/src/tools/rustfmt/tests/target/issue-2526.rs
new file mode 100644
index 000000000..7dd58aba3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2526.rs
@@ -0,0 +1,8 @@
+// Test that rustfmt will not warn about comments exceeding max width around lifetime.
+// See #2526.
+
+// comment comment comment comment comment comment comment comment comment comment comment comment comment
+fn foo() -> F<'a> {
+ bar()
+}
+// comment comment comment comment comment comment comment comment comment comment comment comment comment
diff --git a/src/tools/rustfmt/tests/target/issue-2551.rs b/src/tools/rustfmt/tests/target/issue-2551.rs
new file mode 100644
index 000000000..d7b0d625b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2551.rs
@@ -0,0 +1,3 @@
+mcro!(func(A {
+ a: 12345667800111111111111,
+}));
diff --git a/src/tools/rustfmt/tests/target/issue-2554.rs b/src/tools/rustfmt/tests/target/issue-2554.rs
new file mode 100644
index 000000000..d5f0563a6
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2554.rs
@@ -0,0 +1,13 @@
+// #2554
+// Do not add the beginning vert to the first match arm's pattern.
+
+fn main() {
+ match foo(|_| {
+ bar(|_| {
+ //
+ })
+ }) {
+ Ok(()) => (),
+ Err(_) => (),
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2582.rs b/src/tools/rustfmt/tests/target/issue-2582.rs
new file mode 100644
index 000000000..f328e4d9d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2582.rs
@@ -0,0 +1 @@
+fn main() {}
diff --git a/src/tools/rustfmt/tests/target/issue-2641.rs b/src/tools/rustfmt/tests/target/issue-2641.rs
new file mode 100644
index 000000000..fbf5326c3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2641.rs
@@ -0,0 +1,3 @@
+macro_rules! a {
+ () => {{}};
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2644.rs b/src/tools/rustfmt/tests/target/issue-2644.rs
new file mode 100644
index 000000000..a87e4c0b4
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2644.rs
@@ -0,0 +1,8 @@
+// rustfmt-max_width: 80
+fn foo(e: Enum) {
+ match e {
+ Enum::Var { element1, element2 } => {
+ return;
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2673-nonmodrs-mods/foo.rs b/src/tools/rustfmt/tests/target/issue-2673-nonmodrs-mods/foo.rs
new file mode 100644
index 000000000..5340816d6
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2673-nonmodrs-mods/foo.rs
@@ -0,0 +1,4 @@
+// rustfmt-config: skip_children.toml
+mod bar;
+
+mod baz {}
diff --git a/src/tools/rustfmt/tests/target/issue-2673-nonmodrs-mods/foo/bar.rs b/src/tools/rustfmt/tests/target/issue-2673-nonmodrs-mods/foo/bar.rs
new file mode 100644
index 000000000..9ceacd59d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2673-nonmodrs-mods/foo/bar.rs
@@ -0,0 +1 @@
+fn dummy() {}
diff --git a/src/tools/rustfmt/tests/target/issue-2673-nonmodrs-mods/lib.rs b/src/tools/rustfmt/tests/target/issue-2673-nonmodrs-mods/lib.rs
new file mode 100644
index 000000000..82425de56
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2673-nonmodrs-mods/lib.rs
@@ -0,0 +1,6 @@
+#![feature(non_modrs_mods)]
+
+// Test that submodules in non-mod.rs files work. This is just an idempotence
+// test since we just want to verify that rustfmt doesn't fail.
+
+mod foo;
diff --git a/src/tools/rustfmt/tests/target/issue-2728.rs b/src/tools/rustfmt/tests/target/issue-2728.rs
new file mode 100644
index 000000000..6cb41b75b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2728.rs
@@ -0,0 +1,8 @@
+// rustfmt-wrap_comments: true
+// rustfmt-newline_style: Windows
+
+//! ```rust
+//! extern crate uom;
+//! ```
+
+fn main() {}
diff --git a/src/tools/rustfmt/tests/target/issue-2759.rs b/src/tools/rustfmt/tests/target/issue-2759.rs
new file mode 100644
index 000000000..b7176ec66
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2759.rs
@@ -0,0 +1,65 @@
+// rustfmt-wrap_comments: true
+// rustfmt-max_width: 89
+
+// Code block in doc comments that will exceed max width.
+/// ```rust
+/// extern crate actix_web;
+/// use actix_web::{actix, server, App, HttpResponse};
+///
+/// fn main() {
+/// // Run actix system, this method actually starts all async processes
+/// actix::System::run(|| {
+/// server::new(|| App::new().resource("/", |r| r.h(|_| HttpResponse::Ok())))
+/// .bind("127.0.0.1:0")
+/// .expect("Can not bind to 127.0.0.1:0")
+/// .start();
+/// # actix::Arbiter::system().do_send(actix::msgs::SystemExit(0));
+/// });
+/// }
+/// ```
+fn foo() {}
+
+// Code block in doc comments without the closing '```'.
+/// ```rust
+/// # extern crate actix_web;
+/// use actix_web::{App, HttpResponse, http};
+///
+/// fn main() {
+/// let app = App::new()
+/// .resource(
+/// "/", |r| r.method(http::Method::GET).f(|r| HttpResponse::Ok()))
+/// .finish();
+/// }
+fn bar() {}
+
+// `#` with indent.
+/// ```rust
+/// # use std::thread;
+/// # extern crate actix_web;
+/// use actix_web::{server, App, HttpResponse};
+///
+/// struct State1;
+///
+/// struct State2;
+///
+/// fn main() {
+/// # thread::spawn(|| {
+/// server::new(|| {
+/// vec![
+/// App::with_state(State1)
+/// .prefix("/app1")
+/// .resource("/", |r| r.f(|r| HttpResponse::Ok()))
+/// .boxed(),
+/// App::with_state(State2)
+/// .prefix("/app2")
+/// .resource("/", |r| r.f(|r| HttpResponse::Ok()))
+/// .boxed(),
+/// ]
+/// })
+/// .bind("127.0.0.1:8080")
+/// .unwrap()
+/// .run()
+/// # });
+/// }
+/// ```
+fn foobar() {}
diff --git a/src/tools/rustfmt/tests/target/issue-2761.rs b/src/tools/rustfmt/tests/target/issue-2761.rs
new file mode 100644
index 000000000..ae4086617
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2761.rs
@@ -0,0 +1,15 @@
+const DATA: &'static [u8] = &[
+ 0x42, 0x50, 0x54, 0x44, //type
+ 0x23, 0x00, 0x00, 0x00, //size
+ 0x00, 0x00, 0x04, 0x00, //flags
+ 0xEC, 0x0C, 0x00, 0x00, //id
+ 0x00, 0x00, 0x00, 0x00, //revision
+ 0x2B, 0x00, //version
+ 0x00, 0x00, //unknown
+ 0x42, 0x50, 0x54, 0x4E, //field type
+ 0x1D, 0x00, //field size
+ 0x19, 0x00, 0x00, 0x00, //decompressed field size
+ 0x75, 0xc5, 0x21, 0x0d, 0x00, 0x00, 0x08, 0x05, 0xd1, 0x6c, //field data (compressed)
+ 0x6c, 0xdc, 0x57, 0x48, 0x3c, 0xfd, 0x5b, 0x5c, 0x02, 0xd4, //field data (compressed)
+ 0x6b, 0x32, 0xb5, 0xdc, 0xa3, //field data (compressed)
+];
diff --git a/src/tools/rustfmt/tests/target/issue-2781.rs b/src/tools/rustfmt/tests/target/issue-2781.rs
new file mode 100644
index 000000000..f144d716b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2781.rs
@@ -0,0 +1,11 @@
+pub // Oh, no. A line comment.
+struct Foo {}
+
+pub /* Oh, no. A block comment. */ struct Foo {}
+
+mod inner {
+ pub // Oh, no. A line comment.
+ struct Foo {}
+
+ pub /* Oh, no. A block comment. */ struct Foo {}
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2794.rs b/src/tools/rustfmt/tests/target/issue-2794.rs
new file mode 100644
index 000000000..951c0af20
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2794.rs
@@ -0,0 +1,12 @@
+// rustfmt-indent_style: Block
+// rustfmt-imports_indent: Block
+// rustfmt-imports_layout: Vertical
+
+use std::{
+ env,
+ fs,
+ io::{
+ Read,
+ Write,
+ },
+};
diff --git a/src/tools/rustfmt/tests/target/issue-2810.rs b/src/tools/rustfmt/tests/target/issue-2810.rs
new file mode 100644
index 000000000..34140c7a1
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2810.rs
@@ -0,0 +1,14 @@
+// rustfmt-newline_style: Windows
+
+#[macro_export]
+macro_rules! hmmm___ffi_error {
+ ($result:ident) => {
+ pub struct $result {
+ success: bool,
+ }
+
+ impl $result {
+ pub fn foo(self) {}
+ }
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2835.rs b/src/tools/rustfmt/tests/target/issue-2835.rs
new file mode 100644
index 000000000..21e8ce411
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2835.rs
@@ -0,0 +1,4 @@
+// rustfmt-brace_style: AlwaysNextLine
+// rustfmt-fn_single_line: true
+
+fn lorem() -> i32 { 42 }
diff --git a/src/tools/rustfmt/tests/target/issue-2863.rs b/src/tools/rustfmt/tests/target/issue-2863.rs
new file mode 100644
index 000000000..35a80f7a6
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2863.rs
@@ -0,0 +1,54 @@
+// rustfmt-reorder_impl_items: true
+
+impl<T> IntoIterator for SafeVec<T> {
+ type Bar = u32;
+ type BarFoo = u32;
+ type FooBar = u32;
+ // comment on FoooooBar
+ type FoooooBar = u32;
+ type IntoIter = self::IntoIter<T>;
+ type Item = T;
+
+ type E = impl Trait;
+ type F = impl Trait;
+
+ const AnotherConst: i32 = 100;
+ const SomeConst: i32 = 100;
+
+ // comment on foo()
+ fn foo() {
+ println!("hello, world");
+ }
+
+ fn foo1() {
+ println!("hello, world");
+ }
+
+ fn foo2() {
+ println!("hello, world");
+ }
+
+ fn foo3() {
+ println!("hello, world");
+ }
+
+ fn foo4() {
+ println!("hello, world");
+ }
+
+ fn foo5() {
+ println!("hello, world");
+ }
+
+ fn foo6() {
+ println!("hello, world");
+ }
+
+ fn foo7() {
+ println!("hello, world");
+ }
+
+ fn foo8() {
+ println!("hello, world");
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2869.rs b/src/tools/rustfmt/tests/target/issue-2869.rs
new file mode 100644
index 000000000..6a68c2d95
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2869.rs
@@ -0,0 +1,41 @@
+// rustfmt-struct_field_align_threshold: 50
+
+#[derive(Serialize, Deserialize, Debug)]
+#[serde(rename_all = "PascalCase")]
+struct AuditLog1 {
+ creation_time: String,
+ id: String,
+ operation: String,
+ organization_id: String,
+ record_type: u32,
+ result_status: Option<String>,
+ #[serde(rename = "ClientIP")]
+ client_ip: Option<IpAddr>,
+ object_id: String,
+ actor: Option<Vec<IDType>>,
+ actor_context_id: Option<String>,
+ actor_ip_address: Option<IpAddr>,
+ azure_active_directory_event_type: Option<u8>,
+
+ #[serde(rename = "very")]
+ aaaaa: String,
+ #[serde(rename = "cool")]
+ bb: i32,
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+#[serde(rename_all = "PascalCase")]
+struct AuditLog2 {
+ creation_time: String,
+ id: String,
+ operation: String,
+ organization_id: String,
+ record_type: u32,
+ result_status: Option<String>,
+ client_ip: Option<IpAddr>,
+ object_id: String,
+ actor: Option<Vec<IDType>>,
+ actor_context_id: Option<String>,
+ actor_ip_address: Option<IpAddr>,
+ azure_active_directory_event_type: Option<u8>,
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2896.rs b/src/tools/rustfmt/tests/target/issue-2896.rs
new file mode 100644
index 000000000..6fb6b12ed
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2896.rs
@@ -0,0 +1,165 @@
+extern crate differential_dataflow;
+extern crate rand;
+extern crate timely;
+
+use rand::{Rng, SeedableRng, StdRng};
+
+use timely::dataflow::operators::*;
+
+use differential_dataflow::input::InputSession;
+use differential_dataflow::operators::*;
+use differential_dataflow::AsCollection;
+
+// mod loglikelihoodratio;
+
+fn main() {
+ // define a new timely dataflow computation.
+ timely::execute_from_args(std::env::args().skip(6), move |worker| {
+ // capture parameters of the experiment.
+ let users: usize = std::env::args().nth(1).unwrap().parse().unwrap();
+ let items: usize = std::env::args().nth(2).unwrap().parse().unwrap();
+ let scale: usize = std::env::args().nth(3).unwrap().parse().unwrap();
+ let batch: usize = std::env::args().nth(4).unwrap().parse().unwrap();
+ let noisy: bool = std::env::args().nth(5).unwrap() == "noisy";
+
+ let index = worker.index();
+ let peers = worker.peers();
+
+ let (input, probe) = worker.dataflow(|scope| {
+ // input of (user, item) collection.
+ let (input, occurrences) = scope.new_input();
+ let occurrences = occurrences.as_collection();
+
+ //TODO adjust code to only work with upper triangular half of cooccurrence matrix
+
+ /* Compute the cooccurrence matrix C = A'A from the binary interaction matrix A. */
+ let cooccurrences = occurrences
+ .join_map(&occurrences, |_user, &item_a, &item_b| (item_a, item_b))
+ .filter(|&(item_a, item_b)| item_a != item_b)
+ .count();
+
+ /* compute the rowsums of C indicating how often we encounter individual items. */
+ let row_sums = occurrences.map(|(_user, item)| item).count();
+
+ // row_sums.inspect(|record| println!("[row_sums] {:?}", record));
+
+ /* Join the cooccurrence pairs with the corresponding row sums. */
+ let mut cooccurrences_with_row_sums = cooccurrences
+ .map(|((item_a, item_b), num_cooccurrences)| (item_a, (item_b, num_cooccurrences)))
+ .join_map(
+ &row_sums,
+ |&item_a, &(item_b, num_cooccurrences), &row_sum_a| {
+ assert!(row_sum_a > 0);
+ (item_b, (item_a, num_cooccurrences, row_sum_a))
+ },
+ )
+ .join_map(
+ &row_sums,
+ |&item_b, &(item_a, num_cooccurrences, row_sum_a), &row_sum_b| {
+ assert!(row_sum_a > 0);
+ assert!(row_sum_b > 0);
+ (item_a, (item_b, num_cooccurrences, row_sum_a, row_sum_b))
+ },
+ );
+
+ // cooccurrences_with_row_sums
+ // .inspect(|record| println!("[cooccurrences_with_row_sums] {:?}", record));
+
+ // //TODO compute top-k "similar items" per item
+ // /* Compute LLR scores for each item pair. */
+ // let llr_scores = cooccurrences_with_row_sums.map(
+ // |(item_a, (item_b, num_cooccurrences, row_sum_a, row_sum_b))| {
+
+ // println!(
+ // "[llr_scores] item_a={} item_b={}, num_cooccurrences={} row_sum_a={} row_sum_b={}",
+ // item_a, item_b, num_cooccurrences, row_sum_a, row_sum_b);
+
+ // let k11: isize = num_cooccurrences;
+ // let k12: isize = row_sum_a as isize - k11;
+ // let k21: isize = row_sum_b as isize - k11;
+ // let k22: isize = 10000 - k12 - k21 + k11;
+
+ // let llr_score = loglikelihoodratio::log_likelihood_ratio(k11, k12, k21, k22);
+
+ // ((item_a, item_b), llr_score)
+ // });
+
+ if noisy {
+ cooccurrences_with_row_sums =
+ cooccurrences_with_row_sums.inspect(|x| println!("change: {:?}", x));
+ }
+
+ let probe = cooccurrences_with_row_sums.probe();
+ /*
+ // produce the (item, item) collection
+ let cooccurrences = occurrences
+ .join_map(&occurrences, |_user, &item_a, &item_b| (item_a, item_b));
+ // count the occurrences of each item.
+ let counts = cooccurrences
+ .map(|(item_a,_)| item_a)
+ .count();
+ // produce ((item1, item2), count1, count2, count12) tuples
+ let cooccurrences_with_counts = cooccurrences
+ .join_map(&counts, |&item_a, &item_b, &count_item_a| (item_b, (item_a, count_item_a)))
+ .join_map(&counts, |&item_b, &(item_a, count_item_a), &count_item_b| {
+ ((item_a, item_b), count_item_a, count_item_b)
+ });
+ let probe = cooccurrences_with_counts
+ .inspect(|x| println!("change: {:?}", x))
+ .probe();
+ */
+ (input, probe)
+ });
+
+ let seed: &[_] = &[1, 2, 3, index];
+ let mut rng1: StdRng = SeedableRng::from_seed(seed); // rng for edge additions
+ let mut rng2: StdRng = SeedableRng::from_seed(seed); // rng for edge deletions
+
+ let mut input = InputSession::from(input);
+
+ for count in 0..scale {
+ if count % peers == index {
+ let user = rng1.gen_range(0, users);
+ let item = rng1.gen_range(0, items);
+ // println!("[INITIAL INPUT] ({}, {})", user, item);
+ input.insert((user, item));
+ }
+ }
+
+ // load the initial data up!
+ while probe.less_than(input.time()) {
+ worker.step();
+ }
+
+ for round in 1.. {
+ for element in (round * batch)..((round + 1) * batch) {
+ if element % peers == index {
+ // advance the input timestamp.
+ input.advance_to(round * batch);
+ // insert a new item.
+ let user = rng1.gen_range(0, users);
+ let item = rng1.gen_range(0, items);
+ if noisy {
+ println!("[INPUT: insert] ({}, {})", user, item);
+ }
+ input.insert((user, item));
+ // remove an old item.
+ let user = rng2.gen_range(0, users);
+ let item = rng2.gen_range(0, items);
+ if noisy {
+ println!("[INPUT: remove] ({}, {})", user, item);
+ }
+ input.remove((user, item));
+ }
+ }
+
+ input.advance_to(round * batch);
+ input.flush();
+
+ while probe.less_than(input.time()) {
+ worker.step();
+ }
+ }
+ })
+ .unwrap();
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2916.rs b/src/tools/rustfmt/tests/target/issue-2916.rs
new file mode 100644
index 000000000..fb07cc806
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2916.rs
@@ -0,0 +1,2 @@
+a_macro!(name<Param1, Param2>,
+);
diff --git a/src/tools/rustfmt/tests/target/issue-2917/minimal.rs b/src/tools/rustfmt/tests/target/issue-2917/minimal.rs
new file mode 100644
index 000000000..e81e1e6a5
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2917/minimal.rs
@@ -0,0 +1,8 @@
+macro_rules! foo {
+ () => {
+ // comment
+ /*
+
+ */
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2917/packed_simd.rs b/src/tools/rustfmt/tests/target/issue-2917/packed_simd.rs
new file mode 100644
index 000000000..274614f83
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2917/packed_simd.rs
@@ -0,0 +1,63 @@
+// rustfmt-wrap_comments: true
+//! Implements `From` and `Into` for vector types.
+
+macro_rules! impl_from_vector {
+ ([$elem_ty:ident; $elem_count:expr]: $id:ident | $test_tt:tt | $source:ident) => {
+ impl From<$source> for $id {
+ #[inline]
+ fn from(source: $source) -> Self {
+ fn static_assert_same_number_of_lanes<T, U>()
+ where
+ T: crate::sealed::Simd,
+ U: crate::sealed::Simd<LanesType = T::LanesType>,
+ {
+ }
+ use llvm::simd_cast;
+ static_assert_same_number_of_lanes::<$id, $source>();
+ Simd(unsafe { simd_cast(source.0) })
+ }
+ }
+
+ // FIXME: `Into::into` is not inline, but due to
+ // the blanket impl in `std`, which is not
+ // marked `default`, we cannot override it here with
+ // specialization.
+ /*
+ impl Into<$id> for $source {
+ #[inline]
+ fn into(self) -> $id {
+ unsafe { simd_cast(self) }
+ }
+ }
+ */
+
+ test_if! {
+ $test_tt:
+ interpolate_idents! {
+ mod [$id _from_ $source] {
+ use super::*;
+ #[test]
+ fn from() {
+ assert_eq!($id::lanes(), $source::lanes());
+ let source: $source = Default::default();
+ let vec: $id = Default::default();
+
+ let e = $id::from(source);
+ assert_eq!(e, vec);
+
+ let e: $id = source.into();
+ assert_eq!(e, vec);
+ }
+ }
+ }
+ }
+ };
+}
+
+macro_rules! impl_from_vectors {
+ ([$elem_ty:ident; $elem_count:expr]: $id:ident | $test_tt:tt | $($source:ident),*) => {
+ $(
+ impl_from_vector!([$elem_ty; $elem_count]: $id | $test_tt | $source);
+ )*
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2922.rs b/src/tools/rustfmt/tests/target/issue-2922.rs
new file mode 100644
index 000000000..501f78c78
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2922.rs
@@ -0,0 +1,10 @@
+// rustfmt-indent_style: Visual
+struct Functions {
+ RunListenServer: unsafe extern "C" fn(*mut c_void,
+ *mut c_char,
+ *mut c_char,
+ *mut c_char,
+ *mut c_void,
+ *mut c_void)
+ -> c_int,
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2927-2.rs b/src/tools/rustfmt/tests/target/issue-2927-2.rs
new file mode 100644
index 000000000..e895783ba
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2927-2.rs
@@ -0,0 +1,7 @@
+// rustfmt-edition: 2015
+#![feature(rust_2018_preview, uniform_paths)]
+use futures::prelude::*;
+use http_03::cli::Cli;
+use hyper::{service::service_fn_ok, Body, Response, Server};
+use log::{error, info, log};
+use structopt::StructOpt;
diff --git a/src/tools/rustfmt/tests/target/issue-2927.rs b/src/tools/rustfmt/tests/target/issue-2927.rs
new file mode 100644
index 000000000..3267be28d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2927.rs
@@ -0,0 +1,7 @@
+// rustfmt-edition: 2018
+#![feature(rust_2018_preview, uniform_paths)]
+use ::log::{error, info, log};
+use futures::prelude::*;
+use http_03::cli::Cli;
+use hyper::{service::service_fn_ok, Body, Response, Server};
+use structopt::StructOpt;
diff --git a/src/tools/rustfmt/tests/target/issue-2930.rs b/src/tools/rustfmt/tests/target/issue-2930.rs
new file mode 100644
index 000000000..41e763a7c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2930.rs
@@ -0,0 +1,5 @@
+// rustfmt-indent_style: Visual
+fn main() {
+ let (first_variable, second_variable) = (this_is_something_with_an_extraordinarily_long_name,
+ this_variable_name_is_also_pretty_long);
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2936.rs b/src/tools/rustfmt/tests/target/issue-2936.rs
new file mode 100644
index 000000000..1d6eb6d60
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2936.rs
@@ -0,0 +1,21 @@
+struct AStruct {
+ A: u32,
+ B: u32,
+ C: u32,
+}
+
+impl Something for AStruct {
+ fn a_func() {
+ match a_val {
+ ContextualParseError::InvalidMediaRule(ref err) => {
+ let err: &CStr = match err.kind {
+ ParseErrorKind::Custom(StyleParseErrorKind::MediaQueryExpectedFeatureName(
+ ..,
+ )) => {
+ cstr!("PEMQExpectedFeatureName")
+ }
+ };
+ }
+ };
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2941.rs b/src/tools/rustfmt/tests/target/issue-2941.rs
new file mode 100644
index 000000000..3c6c702c2
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2941.rs
@@ -0,0 +1,5 @@
+// rustfmt-wrap_comments: true
+
+//! ```
+//! \
+//! ```
diff --git a/src/tools/rustfmt/tests/target/issue-2955.rs b/src/tools/rustfmt/tests/target/issue-2955.rs
new file mode 100644
index 000000000..799cd36e2
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2955.rs
@@ -0,0 +1,6 @@
+// rustfmt-condense_wildcard_suffixes: true
+fn main() {
+ match (1, 2, 3) {
+ (..) => (),
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2973.rs b/src/tools/rustfmt/tests/target/issue-2973.rs
new file mode 100644
index 000000000..86574bd86
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2973.rs
@@ -0,0 +1,158 @@
+#[cfg(test)]
+mod test {
+ summary_test! {
+ tokenize_recipe_interpolation_eol,
+ "foo: # some comment
+ {{hello}}
+",
+ "foo: \
+ {{hello}} \
+ {{ahah}}",
+ "N:#$>^{N}$<.",
+ }
+
+ summary_test! {
+ tokenize_strings,
+ r#"a = "'a'" + '"b"' + "'c'" + '"d"'#echo hello"#,
+ r#"N="+'+"+'#."#,
+ }
+
+ summary_test! {
+ tokenize_recipe_interpolation_eol,
+ "foo: # some comment
+ {{hello}}
+",
+ "N:#$>^{N}$<.",
+ }
+
+ summary_test! {
+ tokenize_recipe_interpolation_eof,
+ "foo: # more comments
+ {{hello}}
+# another comment
+",
+ "N:#$>^{N}$<#$.",
+ }
+
+ summary_test! {
+ tokenize_recipe_complex_interpolation_expression,
+ "foo: #lol\n {{a + b + \"z\" + blarg}}",
+ "N:#$>^{N+N+\"+N}<.",
+ }
+
+ summary_test! {
+ tokenize_recipe_multiple_interpolations,
+ "foo:,#ok\n {{a}}0{{b}}1{{c}}",
+ "N:,#$>^{N}_{N}_{N}<.",
+ }
+
+ summary_test! {
+ tokenize_junk,
+ "bob
+
+hello blah blah blah : a b c #whatever
+ ",
+ "N$$NNNN:NNN#$.",
+ }
+
+ summary_test! {
+ tokenize_empty_lines,
+ "
+# this does something
+hello:
+ asdf
+ bsdf
+
+ csdf
+
+ dsdf # whatever
+
+# yolo
+ ",
+ "$#$N:$>^_$^_$$^_$$^_$$<#$.",
+ }
+
+ summary_test! {
+ tokenize_comment_before_variable,
+ "
+#
+A='1'
+echo:
+ echo {{A}}
+ ",
+ "$#$N='$N:$>^_{N}$<.",
+ }
+
+ summary_test! {
+ tokenize_interpolation_backticks,
+ "hello:\n echo {{`echo hello` + `echo goodbye`}}",
+ "N:$>^_{`+`}<.",
+ }
+
+ summary_test! {
+ tokenize_assignment_backticks,
+ "a = `echo hello` + `echo goodbye`",
+ "N=`+`.",
+ }
+
+ summary_test! {
+ tokenize_multiple,
+ "
+hello:
+ a
+ b
+
+ c
+
+ d
+
+# hello
+bob:
+ frank
+ ",
+
+ "$N:$>^_$^_$$^_$$^_$$<#$N:$>^_$<.",
+ }
+
+ summary_test! {
+ tokenize_comment,
+ "a:=#",
+ "N:=#."
+ }
+
+ summary_test! {
+ tokenize_comment_with_bang,
+ "a:=#foo!",
+ "N:=#."
+ }
+
+ summary_test! {
+ tokenize_order,
+ r"
+b: a
+ @mv a b
+
+a:
+ @touch F
+ @touch a
+
+d: c
+ @rm c
+
+c: b
+ @mv b c",
+ "$N:N$>^_$$<N:$>^_$^_$$<N:N$>^_$$<N:N$>^_<.",
+ }
+
+ summary_test! {
+ tokenize_parens,
+ r"((())) )abc(+",
+ "((())))N(+.",
+ }
+
+ summary_test! {
+ crlf_newline,
+ "#\r\n#asdf\r\n",
+ "#$#$.",
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2976.rs b/src/tools/rustfmt/tests/target/issue-2976.rs
new file mode 100644
index 000000000..51c94a84b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2976.rs
@@ -0,0 +1,3 @@
+fn a(_ /*comment*/: u8 /* toto */) {}
+fn b(/*comment*/ _: u8 /* tata */) {}
+fn c(_: /*comment*/ u8) {}
diff --git a/src/tools/rustfmt/tests/target/issue-2977/block.rs b/src/tools/rustfmt/tests/target/issue-2977/block.rs
new file mode 100644
index 000000000..d376e370c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2977/block.rs
@@ -0,0 +1,11 @@
+macro_rules! atomic_bits {
+ ($ldrex:expr) => {
+ execute(|| {
+ asm!($ldrex
+ : "=r"(raw)
+ : "r"(address)
+ :
+ : "volatile");
+ })
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2977/impl.rs b/src/tools/rustfmt/tests/target/issue-2977/impl.rs
new file mode 100644
index 000000000..8d7bb9414
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2977/impl.rs
@@ -0,0 +1,44 @@
+macro_rules! atomic_bits {
+ // the println macro cannot be rewritten because of the asm macro
+ ($type:ty, $ldrex:expr, $strex:expr) => {
+ impl AtomicBits for $type {
+ unsafe fn load_excl(address: usize) -> Self {
+ let raw: $type;
+ asm!($ldrex
+ : "=r"(raw)
+ : "r"(address)
+ :
+ : "volatile");
+ raw
+ }
+
+ unsafe fn store_excl(self, address: usize) -> bool {
+ let status: $type;
+ println!("{}",
+ status);
+ status == 0
+ }
+ }
+ };
+
+ // the println macro should be rewritten here
+ ($type:ty) => {
+ fn some_func(self) {
+ let status: $type;
+ println!("{}", status);
+ }
+ };
+
+ // unrewritale macro in func
+ ($type:ty, $ldrex:expr) => {
+ unsafe fn load_excl(address: usize) -> Self {
+ let raw: $type;
+ asm!($ldrex
+ : "=r"(raw)
+ : "r"(address)
+ :
+ : "volatile");
+ raw
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2977/item.rs b/src/tools/rustfmt/tests/target/issue-2977/item.rs
new file mode 100644
index 000000000..857065ca9
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2977/item.rs
@@ -0,0 +1,11 @@
+macro_rules! atomic_bits {
+ ($ldrex:expr) => {
+ some_macro!(pub fn foo() {
+ asm!($ldrex
+ : "=r"(raw)
+ : "r"(address)
+ :
+ : "volatile");
+ })
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2977/trait.rs b/src/tools/rustfmt/tests/target/issue-2977/trait.rs
new file mode 100644
index 000000000..ae20668cd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2977/trait.rs
@@ -0,0 +1,44 @@
+macro_rules! atomic_bits {
+ // the println macro cannot be rewritten because of the asm macro
+ ($type:ty, $ldrex:expr, $strex:expr) => {
+ trait $type {
+ unsafe fn load_excl(address: usize) -> Self {
+ let raw: $type;
+ asm!($ldrex
+ : "=r"(raw)
+ : "r"(address)
+ :
+ : "volatile");
+ raw
+ }
+
+ unsafe fn store_excl(self, address: usize) -> bool {
+ let status: $type;
+ println!("{}",
+ status);
+ status == 0
+ }
+ }
+ };
+
+ // the println macro should be rewritten here
+ ($type:ty) => {
+ fn some_func(self) {
+ let status: $type;
+ println!("{}", status);
+ }
+ };
+
+ // unrewritale macro in func
+ ($type:ty, $ldrex:expr) => {
+ unsafe fn load_excl(address: usize) -> Self {
+ let raw: $type;
+ asm!($ldrex
+ : "=r"(raw)
+ : "r"(address)
+ :
+ : "volatile");
+ raw
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2985.rs b/src/tools/rustfmt/tests/target/issue-2985.rs
new file mode 100644
index 000000000..faad85923
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2985.rs
@@ -0,0 +1,36 @@
+// rustfmt-indent_style: Visual
+fn foo() {
+ {
+ {
+ let extra_encoder_settings = extra_encoder_settings.iter()
+ .filter_map(|&(name, value)| {
+ value.split()
+ .next()
+ .something()
+ .something2()
+ .something3()
+ .something4()
+ });
+ let extra_encoder_settings = extra_encoder_settings.iter()
+ .filter_map(|&(name, value)| {
+ value.split()
+ .next()
+ .something()
+ .something2()
+ .something3()
+ .something4()
+ })
+ .something();
+ if let Some(subpod) = pod.subpods.iter().find(|s| {
+ !s.plaintext
+ .as_ref()
+ .map(String::as_ref)
+ .unwrap_or("")
+ .is_empty()
+ })
+ {
+ do_something();
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-2995.rs b/src/tools/rustfmt/tests/target/issue-2995.rs
new file mode 100644
index 000000000..890da8def
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-2995.rs
@@ -0,0 +1,7 @@
+fn issue_2995() {
+ // '\u{2028}' is inserted in the code below.
+
+ [0, 1];
+ [0, /* */ 1];
+ [0, 1];
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3029.rs b/src/tools/rustfmt/tests/target/issue-3029.rs
new file mode 100644
index 000000000..a7ac5c32b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3029.rs
@@ -0,0 +1,94 @@
+fn keep_if() {
+ {
+ {
+ {
+ EvaluateJSReply::NumberValue(
+ if FromJSValConvertible::from_jsval(cx, rval.handle(), ()) {
+ unimplemented!();
+ },
+ )
+ }
+ }
+ }
+}
+
+fn keep_if_let() {
+ {
+ {
+ {
+ EvaluateJSReply::NumberValue(
+ if let Some(e) = FromJSValConvertible::from_jsval(cx, rval.handle(), ()) {
+ unimplemented!();
+ },
+ )
+ }
+ }
+ }
+}
+
+fn keep_for() {
+ {
+ {
+ {
+ EvaluateJSReply::NumberValue(
+ for conv in FromJSValConvertible::from_jsval(cx, rval.handle(), ()) {
+ unimplemented!();
+ },
+ )
+ }
+ }
+ }
+}
+
+fn keep_loop() {
+ {
+ {
+ {
+ EvaluateJSReply::NumberValue(loop {
+ FromJSValConvertible::from_jsval(cx, rval.handle(), ());
+ })
+ }
+ }
+ }
+}
+
+fn keep_while() {
+ {
+ {
+ {
+ EvaluateJSReply::NumberValue(
+ while FromJSValConvertible::from_jsval(cx, rval.handle(), ()) {
+ unimplemented!();
+ },
+ )
+ }
+ }
+ }
+}
+
+fn keep_while_let() {
+ {
+ {
+ {
+ EvaluateJSReply::NumberValue(
+ while let Some(e) = FromJSValConvertible::from_jsval(cx, rval.handle(), ()) {
+ unimplemented!();
+ },
+ )
+ }
+ }
+ }
+}
+
+fn keep_match() {
+ {
+ {
+ EvaluateJSReply::NumberValue(
+ match FromJSValConvertible::from_jsval(cx, rval.handle(), ()) {
+ Ok(ConversionResult::Success(v)) => v,
+ _ => unreachable!(),
+ },
+ )
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3032.rs b/src/tools/rustfmt/tests/target/issue-3032.rs
new file mode 100644
index 000000000..3533a81fb
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3032.rs
@@ -0,0 +1,36 @@
+pub fn get_array_index_from_id(_cx: *mut JSContext, id: HandleId) -> Option<u32> {
+ let raw_id = id.into();
+ unsafe {
+ if RUST_JSID_IS_INT(raw_id) {
+ return Some(RUST_JSID_TO_INT(raw_id) as u32);
+ }
+ None
+ }
+ // If `id` is length atom, `-1`, otherwise:
+ /*return if JSID_IS_ATOM(id) {
+ let atom = JSID_TO_ATOM(id);
+ //let s = *GetAtomChars(id);
+ if s > 'a' && s < 'z' {
+ return -1;
+ }
+
+ let i = 0;
+ let str = AtomToLinearString(JSID_TO_ATOM(id));
+ return if StringIsArray(str, &mut i) != 0 { i } else { -1 }
+ } else {
+ IdToInt32(cx, id);
+ }*/
+}
+
+impl Foo {
+ fn bar() -> usize {
+ 42
+ /* a block comment */
+ }
+
+ fn baz() -> usize {
+ 42
+ // this is a line
+ /* a block comment */
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3038.rs b/src/tools/rustfmt/tests/target/issue-3038.rs
new file mode 100644
index 000000000..3c398b825
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3038.rs
@@ -0,0 +1,29 @@
+impl HTMLTableElement {
+ fn func() {
+ if number_of_row_elements == 0 {
+ if let Some(last_tbody) = node
+ .rev_children()
+ .filter_map(DomRoot::downcast::<Element>)
+ .find(|n| {
+ n.is::<HTMLTableSectionElement>() && n.local_name() == &local_name!("tbody")
+ })
+ {
+ last_tbody
+ .upcast::<Node>()
+ .AppendChild(new_row.upcast::<Node>())
+ .expect("InsertRow failed to append first row.");
+ }
+ }
+
+ if number_of_row_elements == 0 {
+ if let Some(last_tbody) = node.find(|n| {
+ n.is::<HTMLTableSectionElement>() && n.local_name() == &local_name!("tbody")
+ }) {
+ last_tbody
+ .upcast::<Node>()
+ .AppendChild(new_row.upcast::<Node>())
+ .expect("InsertRow failed to append first row.");
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3043.rs b/src/tools/rustfmt/tests/target/issue-3043.rs
new file mode 100644
index 000000000..b54e244a4
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3043.rs
@@ -0,0 +1,5 @@
+// rustfmt-edition: 2018
+
+use ::std::vec::Vec;
+
+fn main() {}
diff --git a/src/tools/rustfmt/tests/target/issue-3049.rs b/src/tools/rustfmt/tests/target/issue-3049.rs
new file mode 100644
index 000000000..fad154354
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3049.rs
@@ -0,0 +1,45 @@
+// rustfmt-indent_style: Visual
+fn main() {
+ something.aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .bench_function(|| {
+ let x = hello();
+ });
+
+ something.aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .bench_function(arg, || {
+ let x = hello();
+ });
+
+ something.aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .aaaaaaaaaaaa()
+ .bench_function(arg,
+ || {
+ let x = hello();
+ },
+ arg);
+
+ AAAAAAAAAAA.function(|| {
+ let _ = ();
+ });
+
+ AAAAAAAAAAA.chain().function(|| {
+ let _ = ();
+ })
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3055/backtick.rs b/src/tools/rustfmt/tests/target/issue-3055/backtick.rs
new file mode 100644
index 000000000..f5bae8d3d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3055/backtick.rs
@@ -0,0 +1,10 @@
+// rustfmt-wrap_comments: true
+
+/// Simple block
+///
+/// ```text
+/// `
+/// ```
+fn main() {
+ println!("Hello, world!");
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3055/empty-code-block.rs b/src/tools/rustfmt/tests/target/issue-3055/empty-code-block.rs
new file mode 100644
index 000000000..566f7ef9b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3055/empty-code-block.rs
@@ -0,0 +1,18 @@
+// rustfmt-wrap_comments: true
+
+/// Simple block
+///
+/// ```
+/// ```
+///
+/// ```no_run
+/// ```
+///
+/// ```should_panic
+/// ```
+///
+/// ```compile_fail
+/// ```
+fn main() {
+ println!("Hello, world!");
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3055/original.rs b/src/tools/rustfmt/tests/target/issue-3055/original.rs
new file mode 100644
index 000000000..2df6adbb5
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3055/original.rs
@@ -0,0 +1,42 @@
+// rustfmt-wrap_comments: true
+// rustfmt-format_code_in_doc_comments: true
+
+/// Vestibulum elit nibh, rhoncus non, euismod sit amet, pretium eu, enim. Nunc
+/// commodo ultricies dui.
+///
+/// Should not format with text attribute
+/// ```text
+/// .--------------.
+/// | v
+/// Park <- Idle -> Poll -> Probe -> Download -> Install -> Reboot
+/// ^ ^ ' ' '
+/// ' ' ' ' '
+/// ' `--------' ' '
+/// `---------------' ' '
+/// `--------------------------' '
+/// `-------------------------------------'
+/// ```
+///
+/// Should not format with ignore attribute
+/// ```text
+/// .--------------.
+/// | v
+/// Park <- Idle -> Poll -> Probe -> Download -> Install -> Reboot
+/// ^ ^ ' ' '
+/// ' ' ' ' '
+/// ' `--------' ' '
+/// `---------------' ' '
+/// `--------------------------' '
+/// `-------------------------------------'
+/// ```
+///
+/// Should format with rust attribute
+/// ```rust
+/// let x = 42;
+/// ```
+///
+/// Should format with no attribute as it defaults to rust
+/// ```
+/// let x = 42;
+/// ```
+fn func() {}
diff --git a/src/tools/rustfmt/tests/target/issue-3059.rs b/src/tools/rustfmt/tests/target/issue-3059.rs
new file mode 100644
index 000000000..f750c1287
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3059.rs
@@ -0,0 +1,8 @@
+// rustfmt-wrap_comments: true
+// rustfmt-max_width: 80
+
+/// Vestibulum elit nibh, rhoncus non, euismod sit amet, pretium eu, enim. Nunc
+/// commodo ultricies dui. Cras gravida rutrum massa. Donec accumsan mattis
+/// turpis. Quisque sem. Quisque elementum sapien iaculis augue. In dui sem,
+/// congue sit amet, feugiat quis, lobortis at, eros.
+fn func4() {}
diff --git a/src/tools/rustfmt/tests/target/issue-3066.rs b/src/tools/rustfmt/tests/target/issue-3066.rs
new file mode 100644
index 000000000..d4dccc97e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3066.rs
@@ -0,0 +1,7 @@
+// rustfmt-indent_style: Visual
+fn main() {
+ Struct { field: aaaaaaaaaaa };
+ Struct { field: aaaaaaaaaaaa };
+ Struct { field: value,
+ field2: value2 };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3105.rs b/src/tools/rustfmt/tests/target/issue-3105.rs
new file mode 100644
index 000000000..4f1123805
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3105.rs
@@ -0,0 +1,48 @@
+// rustfmt-wrap_comments: true
+
+/// Although the indentation of the skipped method is off, it shouldn't be
+/// changed.
+///
+/// ```
+/// pub unsafe fn _mm256_shufflehi_epi16(a: __m256i, imm8: i32) -> __m256i {
+/// let imm8 = (imm8 & 0xFF) as u8;
+/// let a = a.as_i16x16();
+/// macro_rules! shuffle_done {
+/// ($x01:expr, $x23:expr, $x45:expr, $x67:expr) => {
+/// #[cfg_attr(rustfmt, rustfmt_skip)]
+/// simd_shuffle16(a, a, [
+/// 0, 1, 2, 3, 4+$x01, 4+$x23, 4+$x45, 4+$x67,
+/// 8, 9, 10, 11, 12+$x01, 12+$x23, 12+$x45, 12+$x67
+/// ]);
+/// };
+/// }
+/// }
+/// ```
+pub unsafe fn _mm256_shufflehi_epi16(a: __m256i, imm8: i32) -> __m256i {
+ let imm8 = (imm8 & 0xFF) as u8;
+ let a = a.as_i16x16();
+ macro_rules! shuffle_done {
+ ($x01:expr, $x23:expr, $x45:expr, $x67:expr) => {
+ #[cfg_attr(rustfmt, rustfmt_skip)]
+ simd_shuffle16(a, a, [
+ 0, 1, 2, 3, 4+$x01, 4+$x23, 4+$x45, 4+$x67,
+ 8, 9, 10, 11, 12+$x01, 12+$x23, 12+$x45, 12+$x67
+ ]);
+ };
+ }
+}
+
+/// The skipped method shouldn't right-shift
+pub unsafe fn _mm256_shufflehi_epi32(a: __m256i, imm8: i32) -> __m256i {
+ let imm8 = (imm8 & 0xFF) as u8;
+ let a = a.as_i16x16();
+ macro_rules! shuffle_done {
+ ($x01:expr, $x23:expr, $x45:expr, $x67:expr) => {
+ #[cfg_attr(rustfmt, rustfmt_skip)]
+ simd_shuffle32(a, a, [
+ 0, 1, 2, 3, 4+$x01, 4+$x23, 4+$x45, 4+$x67,
+ 8, 9, 10, 11, 12+$x01, 12+$x23, 12+$x45, 12+$x67
+ ]);
+ };
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3118.rs b/src/tools/rustfmt/tests/target/issue-3118.rs
new file mode 100644
index 000000000..ce73a5c78
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3118.rs
@@ -0,0 +1,11 @@
+use {
+ crate::foo::bar,
+ bytes::{Buf, BufMut},
+ std::io,
+};
+
+mod foo {
+ pub mod bar {}
+}
+
+fn main() {}
diff --git a/src/tools/rustfmt/tests/target/issue-3124.rs b/src/tools/rustfmt/tests/target/issue-3124.rs
new file mode 100644
index 000000000..1083050d8
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3124.rs
@@ -0,0 +1,14 @@
+pub fn fail1() {
+ // Some comment.
+ /**///
+}
+
+pub fn fail2() {
+ // Some comment.
+ /**/
+}
+
+pub fn fail3() {
+ // Some comment.
+ //
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3131.rs b/src/tools/rustfmt/tests/target/issue-3131.rs
new file mode 100644
index 000000000..c7304dd55
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3131.rs
@@ -0,0 +1,8 @@
+fn main() {
+ match 3 {
+ t if match t {
+ _ => true,
+ } => {}
+ _ => {}
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3132.rs b/src/tools/rustfmt/tests/target/issue-3132.rs
new file mode 100644
index 000000000..4dffe0ab8
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3132.rs
@@ -0,0 +1,15 @@
+// rustfmt-version: Two
+
+fn test() {
+ /*
+ a
+ */
+ let x = 42;
+ /*
+ aaa
+ "line 1
+ line 2
+ line 3"
+ */
+ let x = 42;
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3153.rs b/src/tools/rustfmt/tests/target/issue-3153.rs
new file mode 100644
index 000000000..39e569c0d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3153.rs
@@ -0,0 +1,9 @@
+// rustfmt-wrap_comments: true
+
+/// This may panic if:
+/// - there are fewer than `max_header_bytes` bytes preceding the body
+/// - there are fewer than `max_footer_bytes` bytes following the body
+/// - the sum of the body bytes and post-body bytes is less than the sum of
+/// `min_body_and_padding_bytes` and `max_footer_bytes` (in other words, the
+/// minimum body and padding byte requirement is not met)
+fn foo() {}
diff --git a/src/tools/rustfmt/tests/target/issue-3158.rs b/src/tools/rustfmt/tests/target/issue-3158.rs
new file mode 100644
index 000000000..4bbbdc1d0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3158.rs
@@ -0,0 +1,74 @@
+// rustfmt-format_code_in_doc_comments: true
+
+/// Should format
+/// ```rust
+/// assert!(false);
+/// ```
+///
+/// Should format
+/// ```rust,should_panic
+/// assert!(false);
+/// ```
+///
+/// Should format
+/// ```rust,should_panic,edition2018
+/// assert!(false);
+/// ```
+///
+/// Should format
+/// ```rust , should_panic , edition2018
+/// assert!(false);
+/// ```
+///
+/// Should not format
+/// ```ignore
+/// assert!( false );
+/// ```
+///
+/// Should not format (not all are rust)
+/// ```rust,ignore
+/// assert!( false );
+/// ```
+///
+/// Should not format (rust compile_fail)
+/// ```compile_fail
+/// assert!( false );
+/// ```
+///
+/// Should not format (rust compile_fail)
+/// ```rust,compile_fail
+/// assert!( false );
+/// ```
+///
+/// Various unspecified ones that should format
+/// ```
+/// assert!(false);
+/// ```
+///
+/// ```,
+/// assert!(false);
+/// ```
+///
+/// ```,,,,,
+/// assert!(false);
+/// ```
+///
+/// ```,,, rust ,,
+/// assert!(false);
+/// ```
+///
+/// Should not format
+/// ```,,, rust , ignore,
+/// assert!( false );
+/// ```
+///
+/// Few empty ones
+/// ```
+/// ```
+///
+/// ```rust
+/// ```
+///
+/// ```ignore
+/// ```
+fn foo() {}
diff --git a/src/tools/rustfmt/tests/target/issue-3182.rs b/src/tools/rustfmt/tests/target/issue-3182.rs
new file mode 100644
index 000000000..d8de84438
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3182.rs
@@ -0,0 +1,10 @@
+// rustfmt-max_width: 79
+// rustfmt-wrap_comments: true
+
+/// ```rust
+/// # #![cfg_attr(not(dox), feature(cfg_target_feature, target_feature, stdsimd)not(dox), feature(cfg_target_feature, target_feature, stdsimd))]
+///
+/// // Est lectus hendrerit lorem, eget dignissim orci nisl sit amet massa. Etiam volutpat lobortis eros.
+/// let x = 42;
+/// ```
+fn func() {}
diff --git a/src/tools/rustfmt/tests/target/issue-3184.rs b/src/tools/rustfmt/tests/target/issue-3184.rs
new file mode 100644
index 000000000..f8d9b169f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3184.rs
@@ -0,0 +1,5 @@
+/*/
+struct Error{
+ message: String,
+}
+*/
diff --git a/src/tools/rustfmt/tests/target/issue-3194.rs b/src/tools/rustfmt/tests/target/issue-3194.rs
new file mode 100644
index 000000000..a9614913e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3194.rs
@@ -0,0 +1,52 @@
+mod m {
+ struct S
+ where
+ A: B;
+}
+
+mod n {
+ struct Foo
+ where
+ A: B,
+ {
+ foo: usize,
+ }
+}
+
+mod o {
+ enum Bar
+ where
+ A: B,
+ {
+ Bar,
+ }
+}
+
+mod with_comments {
+ mod m {
+ struct S
+ /* before where */
+ where
+ A: B; /* after where */
+ }
+
+ mod n {
+ struct Foo
+ /* before where */
+ where
+ A: B, /* after where */
+ {
+ foo: usize,
+ }
+ }
+
+ mod o {
+ enum Bar
+ /* before where */
+ where
+ A: B, /* after where */
+ {
+ Bar,
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3198.rs b/src/tools/rustfmt/tests/target/issue-3198.rs
new file mode 100644
index 000000000..9291f181d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3198.rs
@@ -0,0 +1,67 @@
+impl TestTrait {
+ fn foo_one_pre(/* Important comment1 */ self) {}
+
+ fn foo_one_post(self /* Important comment1 */) {}
+
+ fn foo_pre(/* Important comment1 */ self, /* Important comment2 */ a: i32) {}
+
+ fn foo_post(self /* Important comment1 */, a: i32 /* Important comment2 */) {}
+
+ fn bar_pre(/* Important comment1 */ &mut self, /* Important comment2 */ a: i32) {}
+
+ fn bar_post(&mut self /* Important comment1 */, a: i32 /* Important comment2 */) {}
+
+ fn baz_pre(
+ /* Important comment1 */
+ self: X<'a, 'b>,
+ /* Important comment2 */
+ a: i32,
+ ) {
+ }
+
+ fn baz_post(
+ self: X<'a, 'b>, /* Important comment1 */
+ a: i32, /* Important comment2 */
+ ) {
+ }
+
+ fn baz_tree_pre(
+ /* Important comment1 */
+ self: X<'a, 'b>,
+ /* Important comment2 */
+ a: i32,
+ /* Important comment3 */
+ b: i32,
+ ) {
+ }
+
+ fn baz_tree_post(
+ self: X<'a, 'b>, /* Important comment1 */
+ a: i32, /* Important comment2 */
+ b: i32, /* Important comment3 */
+ ) {
+ }
+
+ fn multi_line(
+ self: X<'a, 'b>, /* Important comment1-1 */
+ /* Important comment1-2 */
+ a: i32, /* Important comment2 */
+ b: i32, /* Important comment3 */
+ ) {
+ }
+
+ fn two_line_comment(
+ self: X<'a, 'b>, /* Important comment1-1
+ Important comment1-2 */
+ a: i32, /* Important comment2 */
+ b: i32, /* Important comment3 */
+ ) {
+ }
+
+ fn no_first_line_comment(
+ self: X<'a, 'b>,
+ /* Important comment2 */ a: i32,
+ /* Important comment3 */ b: i32,
+ ) {
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3213/version_one.rs b/src/tools/rustfmt/tests/target/issue-3213/version_one.rs
new file mode 100644
index 000000000..307903b12
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3213/version_one.rs
@@ -0,0 +1,13 @@
+// rustfmt-version: One
+
+fn foo() {
+ match 0 {
+ 0 => {
+ return AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ }
+ 1 => {
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ }
+ _ => "",
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3213/version_two.rs b/src/tools/rustfmt/tests/target/issue-3213/version_two.rs
new file mode 100644
index 000000000..de93d04ba
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3213/version_two.rs
@@ -0,0 +1,13 @@
+// rustfmt-version: Two
+
+fn foo() {
+ match 0 {
+ 0 => {
+ return AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA;
+ }
+ 1 => {
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ }
+ _ => "",
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3217.rs b/src/tools/rustfmt/tests/target/issue-3217.rs
new file mode 100644
index 000000000..5121320a0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3217.rs
@@ -0,0 +1,24 @@
+#![feature(label_break_value)]
+
+fn main() {
+ let mut res = 0;
+ 's_39: {
+ if res == 0i32 {
+ println!("Hello, world!");
+ }
+ }
+ 's_40: loop {
+ println!("res = {}", res);
+ res += 1;
+ if res == 3i32 {
+ break 's_40;
+ }
+ }
+ let toto = || {
+ if true {
+ 42
+ } else {
+ 24
+ }
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3224.rs b/src/tools/rustfmt/tests/target/issue-3224.rs
new file mode 100644
index 000000000..6476d2117
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3224.rs
@@ -0,0 +1,11 @@
+// rustfmt-wrap_comments: true
+
+//! Test:
+//! * aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+//! * [`examples/simple`] – Demonstrates use of the [`init`] API with plain
+//! structs.
+//! * [`examples/simple_flatbuffer`] – Demonstrates use of the [`init`] API with
+//! FlatBuffers.
+//! * [`examples/gravity`] – Demonstrates use of the [`RLBot::set_game_state`]
+//! API
+fn foo() {}
diff --git a/src/tools/rustfmt/tests/target/issue-3227/one.rs b/src/tools/rustfmt/tests/target/issue-3227/one.rs
new file mode 100644
index 000000000..fcc833100
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3227/one.rs
@@ -0,0 +1,13 @@
+// rustfmt-version: One
+
+fn main() {
+ thread::spawn(|| {
+ while true {
+ println!("iteration");
+ }
+ });
+
+ thread::spawn(|| loop {
+ println!("iteration");
+ });
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3227/two.rs b/src/tools/rustfmt/tests/target/issue-3227/two.rs
new file mode 100644
index 000000000..374ab5430
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3227/two.rs
@@ -0,0 +1,15 @@
+// rustfmt-version: Two
+
+fn main() {
+ thread::spawn(|| {
+ while true {
+ println!("iteration");
+ }
+ });
+
+ thread::spawn(|| {
+ loop {
+ println!("iteration");
+ }
+ });
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3234.rs b/src/tools/rustfmt/tests/target/issue-3234.rs
new file mode 100644
index 000000000..c7d9d42bd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3234.rs
@@ -0,0 +1,14 @@
+macro_rules! fuzz_target {
+ (|$data:ident: &[u8]| $body:block) => {};
+}
+
+fuzz_target!(|data: &[u8]| {
+ if let Ok(app_img) = AppImage::parse(data) {
+ if let Ok(app_img) =
+ app_img.sign_for_secureboot(include_str!("../../test-data/signing-key"))
+ {
+ assert!(app_img.is_signed());
+ Gbl::from_app_image(app_img).to_bytes();
+ }
+ }
+});
diff --git a/src/tools/rustfmt/tests/target/issue-3241.rs b/src/tools/rustfmt/tests/target/issue-3241.rs
new file mode 100644
index 000000000..60b452abd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3241.rs
@@ -0,0 +1,11 @@
+// rustfmt-edition: 2018
+
+use ::baz::{bar, foo};
+use ::ignore;
+use ::ignore::some::more;
+use ::*;
+use ::{bar, foo};
+
+fn main() {
+ println!("Hello, world!");
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3253/bar.rs b/src/tools/rustfmt/tests/target/issue-3253/bar.rs
new file mode 100644
index 000000000..6c6ab945b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3253/bar.rs
@@ -0,0 +1,2 @@
+// Empty
+fn empty() {}
diff --git a/src/tools/rustfmt/tests/target/issue-3253/foo.rs b/src/tools/rustfmt/tests/target/issue-3253/foo.rs
new file mode 100644
index 000000000..1a42a1015
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3253/foo.rs
@@ -0,0 +1,3 @@
+pub fn hello() {
+ println!("Hello World!");
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3253/lib.rs b/src/tools/rustfmt/tests/target/issue-3253/lib.rs
new file mode 100644
index 000000000..3eef586bd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3253/lib.rs
@@ -0,0 +1,14 @@
+#[macro_use]
+extern crate cfg_if;
+
+cfg_if! {
+ if #[cfg(target_family = "unix")] {
+ mod foo;
+ #[path = "paths/bar_foo.rs"]
+ mod bar_foo;
+ } else {
+ mod bar;
+ #[path = "paths/foo_bar.rs"]
+ mod foo_bar;
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3253/paths/bar_foo.rs b/src/tools/rustfmt/tests/target/issue-3253/paths/bar_foo.rs
new file mode 100644
index 000000000..f7e1de29a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3253/paths/bar_foo.rs
@@ -0,0 +1,3 @@
+fn foo_decl_item(x: &mut i32) {
+ x = 3;
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3253/paths/excluded.rs b/src/tools/rustfmt/tests/target/issue-3253/paths/excluded.rs
new file mode 100644
index 000000000..9ab88414d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3253/paths/excluded.rs
@@ -0,0 +1,17 @@
+// This module is not imported in the cfg_if macro in lib.rs so it is ignored
+// while the foo and bar mods are formatted.
+// Check the corresponding file in tests/source/issue-3253/paths/excluded.rs
+
+
+
+
+ fn Foo<T>() where T: Bar {
+}
+
+
+
+trait CoolerTypes { fn dummy(&self) {
+}
+}
+
+
diff --git a/src/tools/rustfmt/tests/target/issue-3253/paths/foo_bar.rs b/src/tools/rustfmt/tests/target/issue-3253/paths/foo_bar.rs
new file mode 100644
index 000000000..f52ac11b7
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3253/paths/foo_bar.rs
@@ -0,0 +1,5 @@
+fn Foo<T>()
+where
+ T: Bar,
+{
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3265.rs b/src/tools/rustfmt/tests/target/issue-3265.rs
new file mode 100644
index 000000000..7db1dbd8b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3265.rs
@@ -0,0 +1,14 @@
+// rustfmt-newline_style: Windows
+#[cfg(test)]
+mod test {
+ summary_test! {
+ tokenize_recipe_interpolation_eol,
+ "foo: # some comment
+ {{hello}}
+",
+ "foo: \
+ {{hello}} \
+ {{ahah}}",
+ "N:#$>^{N}$<.",
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3270/one.rs b/src/tools/rustfmt/tests/target/issue-3270/one.rs
new file mode 100644
index 000000000..78de94732
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3270/one.rs
@@ -0,0 +1,12 @@
+// rustfmt-version: One
+
+pub fn main() {
+ /* let s = String::from(
+ "
+ hello
+ world
+ ",
+ ); */
+
+ assert_eq!(s, "\nhello\nworld\n");
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3270/two.rs b/src/tools/rustfmt/tests/target/issue-3270/two.rs
new file mode 100644
index 000000000..e48b59213
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3270/two.rs
@@ -0,0 +1,12 @@
+// rustfmt-version: Two
+
+pub fn main() {
+ /* let s = String::from(
+ "
+hello
+world
+",
+ ); */
+
+ assert_eq!(s, "\nhello\nworld\n");
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3270/wrap.rs b/src/tools/rustfmt/tests/target/issue-3270/wrap.rs
new file mode 100644
index 000000000..7435c5f08
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3270/wrap.rs
@@ -0,0 +1,13 @@
+// rustfmt-wrap_comments: true
+// rustfmt-version: Two
+
+// check that a line below max_width does not get over the limit when wrapping
+// it in a block comment
+fn func() {
+ let x = 42;
+ /*
+ let something = "one line line line line line line line line line line line line line
+ two lines
+ three lines";
+ */
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3272/v1.rs b/src/tools/rustfmt/tests/target/issue-3272/v1.rs
new file mode 100644
index 000000000..aab201027
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3272/v1.rs
@@ -0,0 +1,15 @@
+// rustfmt-version: One
+
+fn main() {
+ assert!(HAYSTACK
+ .par_iter()
+ .find_any(|&&x| x[0] % 1000 == 999)
+ .is_some());
+
+ assert(
+ HAYSTACK
+ .par_iter()
+ .find_any(|&&x| x[0] % 1000 == 999)
+ .is_some(),
+ );
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3272/v2.rs b/src/tools/rustfmt/tests/target/issue-3272/v2.rs
new file mode 100644
index 000000000..a42a2fccd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3272/v2.rs
@@ -0,0 +1,17 @@
+// rustfmt-version: Two
+
+fn main() {
+ assert!(
+ HAYSTACK
+ .par_iter()
+ .find_any(|&&x| x[0] % 1000 == 999)
+ .is_some()
+ );
+
+ assert(
+ HAYSTACK
+ .par_iter()
+ .find_any(|&&x| x[0] % 1000 == 999)
+ .is_some(),
+ );
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3278/version_one.rs b/src/tools/rustfmt/tests/target/issue-3278/version_one.rs
new file mode 100644
index 000000000..580679fba
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3278/version_one.rs
@@ -0,0 +1,8 @@
+// rustfmt-version: One
+
+pub fn parse_conditional<'a, I: 'a>(
+) -> impl Parser<Input = I, Output = Expr, PartialState = ()> + 'a
+where
+ I: Stream<Item = char>,
+{
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3278/version_two.rs b/src/tools/rustfmt/tests/target/issue-3278/version_two.rs
new file mode 100644
index 000000000..c17b1742d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3278/version_two.rs
@@ -0,0 +1,8 @@
+// rustfmt-version: Two
+
+pub fn parse_conditional<'a, I: 'a>()
+-> impl Parser<Input = I, Output = Expr, PartialState = ()> + 'a
+where
+ I: Stream<Item = char>,
+{
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3295/two.rs b/src/tools/rustfmt/tests/target/issue-3295/two.rs
new file mode 100644
index 000000000..3e669a0bb
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3295/two.rs
@@ -0,0 +1,14 @@
+// rustfmt-version: Two
+pub enum TestEnum {
+ a,
+ b,
+}
+
+fn the_test(input: TestEnum) {
+ match input {
+ TestEnum::a => String::from("aaa"),
+ TestEnum::b => String::from(
+ "this is a very very very very very very very very very very very very very very very ong string",
+ ),
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3302.rs b/src/tools/rustfmt/tests/target/issue-3302.rs
new file mode 100644
index 000000000..146cb9838
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3302.rs
@@ -0,0 +1,43 @@
+// rustfmt-version: Two
+
+macro_rules! moo1 {
+ () => {
+ bar! {
+ "
+"
+ }
+ };
+}
+
+macro_rules! moo2 {
+ () => {
+ bar! {
+ "
+"
+ }
+ };
+}
+
+macro_rules! moo3 {
+ () => {
+ 42
+ /*
+ bar! {
+ "
+ toto
+tata"
+ }
+ */
+ };
+}
+
+macro_rules! moo4 {
+ () => {
+ bar! {
+ "
+ foo
+ bar
+baz"
+ }
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3304.rs b/src/tools/rustfmt/tests/target/issue-3304.rs
new file mode 100644
index 000000000..cc1910ce2
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3304.rs
@@ -0,0 +1,42 @@
+// rustfmt-error_on_line_overflow: true
+
+#[rustfmt::skip] use one::two::three::four::five::six::seven::eight::night::ten::eleven::twelve::thirteen::fourteen::fiveteen;
+#[rustfmt::skip]
+
+use one::two::three::four::five::six::seven::eight::night::ten::eleven::twelve::thirteen::fourteen::fiveteen;
+
+macro_rules! test_macro {
+ ($($id:ident),*) => {};
+}
+
+macro_rules! test_macro2 {
+ ($($id:ident),*) => {
+ 1
+ };
+}
+
+fn main() {
+ #[rustfmt::skip] test_macro! { one, two, three, four, five, six, seven, eight, night, ten, eleven, twelve, thirteen, fourteen, fiveteen };
+ #[rustfmt::skip]
+
+ test_macro! { one, two, three, four, five, six, seven, eight, night, ten, eleven, twelve, thirteen, fourteen, fiveteen };
+}
+
+fn test_local() {
+ #[rustfmt::skip] let x = test_macro! { one, two, three, four, five, six, seven, eight, night, ten, eleven, twelve, thirteen, fourteen, fiveteen };
+ #[rustfmt::skip]
+
+ let x = test_macro! { one, two, three, four, five, six, seven, eight, night, ten, eleven, twelve, thirteen, fourteen, fiveteen };
+}
+
+fn test_expr(_: [u32]) -> u32 {
+ #[rustfmt::skip] test_expr([9999999999999, 9999999999999, 9999999999999, 9999999999999, 9999999999999, 9999999999999, 9999999999999, 9999999999999]);
+ #[rustfmt::skip]
+
+ test_expr([9999999999999, 9999999999999, 9999999999999, 9999999999999, 9999999999999, 9999999999999, 9999999999999, 9999999999999])
+}
+
+#[rustfmt::skip] mod test { use one::two::three::four::five::six::seven::eight::night::ten::eleven::twelve::thirteen::fourteen::fiveteen; }
+#[rustfmt::skip]
+
+mod test { use one::two::three::four::five::six::seven::eight::night::ten::eleven::twelve::thirteen::fourteen::fiveteen; }
diff --git a/src/tools/rustfmt/tests/target/issue-3314.rs b/src/tools/rustfmt/tests/target/issue-3314.rs
new file mode 100644
index 000000000..1cd32afb0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3314.rs
@@ -0,0 +1,5 @@
+/*code
+/*code*/
+if true {
+ println!("1");
+}*/
diff --git a/src/tools/rustfmt/tests/target/issue-3343.rs b/src/tools/rustfmt/tests/target/issue-3343.rs
new file mode 100644
index 000000000..d0497758e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3343.rs
@@ -0,0 +1,44 @@
+// rustfmt-inline_attribute_width: 50
+
+#[cfg(feature = "alloc")] use core::slice;
+
+#[cfg(feature = "alloc")] use total_len_is::_50__;
+
+#[cfg(feature = "alloc")]
+use total_len_is::_51___;
+
+#[cfg(feature = "alloc")] extern crate len_is_50_;
+
+#[cfg(feature = "alloc")]
+extern crate len_is_51__;
+
+/// this is a comment to test is_sugared_doc property
+use core::convert;
+
+#[fooooo]
+#[barrrrr]
+use total_len_is_::_51______;
+
+#[cfg(not(all(
+ feature = "std",
+ any(
+ target_os = "linux",
+ target_os = "android",
+ target_os = "netbsd",
+ target_os = "dragonfly",
+ target_os = "haiku",
+ target_os = "emscripten",
+ target_os = "solaris",
+ target_os = "cloudabi",
+ target_os = "macos",
+ target_os = "ios",
+ target_os = "freebsd",
+ target_os = "openbsd",
+ target_os = "redox",
+ target_os = "fuchsia",
+ windows,
+ all(target_arch = "wasm32", feature = "stdweb"),
+ all(target_arch = "wasm32", feature = "wasm-bindgen"),
+ )
+)))]
+use core::slice;
diff --git a/src/tools/rustfmt/tests/target/issue-3423.rs b/src/tools/rustfmt/tests/target/issue-3423.rs
new file mode 100644
index 000000000..cd6025177
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3423.rs
@@ -0,0 +1,5 @@
+/* a nice comment with a trailing whitespace */
+fn foo() {}
+
+/* a nice comment with a trailing tab */
+fn bar() {}
diff --git a/src/tools/rustfmt/tests/target/issue-3434/lib.rs b/src/tools/rustfmt/tests/target/issue-3434/lib.rs
new file mode 100644
index 000000000..2fd7aea21
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3434/lib.rs
@@ -0,0 +1,57 @@
+#![rustfmt::skip::macros(skip_macro_mod)]
+
+mod no_entry;
+
+#[rustfmt::skip::macros(html, skip_macro)]
+fn main() {
+ let macro_result1 = html! { <div>
+this should be skipped</div>
+ }
+ .to_string();
+
+ let macro_result2 = not_skip_macro! { <div>
+ this should be mangled</div>
+ }
+ .to_string();
+
+ skip_macro! {
+this should be skipped
+};
+
+ foo();
+}
+
+fn foo() {
+ let macro_result1 = html! { <div>
+ this should be mangled</div>
+ }
+ .to_string();
+}
+
+fn bar() {
+ let macro_result1 = skip_macro_mod! { <div>
+this should be skipped</div>
+ }
+ .to_string();
+}
+
+fn visitor_made_from_same_context() {
+ let pair = (
+ || {
+ foo!(<div>
+ this should be mangled</div>
+ );
+ skip_macro_mod!(<div>
+this should be skipped</div>
+ );
+ },
+ || {
+ foo!(<div>
+ this should be mangled</div>
+ );
+ skip_macro_mod!(<div>
+this should be skipped</div>
+ );
+ },
+ );
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3434/no_entry.rs b/src/tools/rustfmt/tests/target/issue-3434/no_entry.rs
new file mode 100644
index 000000000..a2ecf2c2f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3434/no_entry.rs
@@ -0,0 +1,19 @@
+#[rustfmt::skip::macros(another_macro)]
+fn foo() {
+ another_macro!(
+This should be skipped.
+ );
+}
+
+fn bar() {
+ skip_macro_mod!(
+This should be skipped.
+ );
+}
+
+fn baz() {
+ let macro_result1 = no_skip_macro! { <div>
+ this should be mangled</div>
+ }
+ .to_string();
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3434/not_skip_macro.rs b/src/tools/rustfmt/tests/target/issue-3434/not_skip_macro.rs
new file mode 100644
index 000000000..c90d09744
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3434/not_skip_macro.rs
@@ -0,0 +1,8 @@
+#[this::is::not::skip::macros(ouch)]
+
+fn main() {
+ let macro_result1 = ouch! { <div>
+ this should be mangled</div>
+ }
+ .to_string();
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3442.rs b/src/tools/rustfmt/tests/target/issue-3442.rs
new file mode 100644
index 000000000..3664c50ee
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3442.rs
@@ -0,0 +1,10 @@
+// rustfmt-file_lines: [{"file":"tests/target/issue-3442.rs","range":[5,5]},{"file":"tests/target/issue-3442.rs","range":[8,8]}]
+
+extern crate alpha; // comment 1
+extern crate beta; // comment 2
+#[allow(aaa)] // comment 3
+#[macro_use]
+extern crate gamma;
+#[allow(bbb)] // comment 4
+#[macro_use]
+extern crate lazy_static;
diff --git a/src/tools/rustfmt/tests/target/issue-3465.rs b/src/tools/rustfmt/tests/target/issue-3465.rs
new file mode 100644
index 000000000..9e2680f0f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3465.rs
@@ -0,0 +1,42 @@
+fn main() {
+ ((((((((((((((((((((((((((((((((((((((((((0) + 1) + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1)
+ + 1);
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3494/crlf.rs b/src/tools/rustfmt/tests/target/issue-3494/crlf.rs
new file mode 100644
index 000000000..cae615a06
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3494/crlf.rs
@@ -0,0 +1,8 @@
+// rustfmt-file_lines: [{"file":"tests/source/issue-3494/crlf.rs","range":[4,5]}]
+
+pub fn main() {
+ let world1 = "world";
+ println!("Hello, {}!", world1);
+let world2 = "world"; println!("Hello, {}!", world2);
+let world3 = "world"; println!("Hello, {}!", world3);
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3494/lf.rs b/src/tools/rustfmt/tests/target/issue-3494/lf.rs
new file mode 100644
index 000000000..60aafe19a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3494/lf.rs
@@ -0,0 +1,8 @@
+// rustfmt-file_lines: [{"file":"tests/source/issue-3494/lf.rs","range":[4,5]}]
+
+pub fn main() {
+ let world1 = "world";
+ println!("Hello, {}!", world1);
+let world2 = "world"; println!("Hello, {}!", world2);
+let world3 = "world"; println!("Hello, {}!", world3);
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3499.rs b/src/tools/rustfmt/tests/target/issue-3499.rs
new file mode 100644
index 000000000..88fd7f7e1
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3499.rs
@@ -0,0 +1 @@
+test![];
diff --git a/src/tools/rustfmt/tests/target/issue-3508.rs b/src/tools/rustfmt/tests/target/issue-3508.rs
new file mode 100644
index 000000000..5f4e15658
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3508.rs
@@ -0,0 +1,22 @@
+fn foo<F>(foo2: F)
+where
+ F: Fn(
+ // this comment is deleted
+ ),
+{
+}
+
+fn foo_block<F>(foo2: F)
+where
+ F: Fn(/* this comment is deleted */),
+{
+}
+
+fn bar(
+ bar2: impl Fn(
+ // this comment is deleted
+ ),
+) {
+}
+
+fn bar_block(bar2: impl Fn(/* this comment is deleted */)) {}
diff --git a/src/tools/rustfmt/tests/target/issue-3515.rs b/src/tools/rustfmt/tests/target/issue-3515.rs
new file mode 100644
index 000000000..b59d03c6c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3515.rs
@@ -0,0 +1,6 @@
+// rustfmt-reorder_imports: false
+
+use std::fmt::{self, Display};
+use std::collections::HashMap;
+
+fn main() {}
diff --git a/src/tools/rustfmt/tests/target/issue-3532.rs b/src/tools/rustfmt/tests/target/issue-3532.rs
new file mode 100644
index 000000000..f41902620
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3532.rs
@@ -0,0 +1,6 @@
+fn foo(a: T) {
+ match a {
+ 1 => {}
+ 0 => {} // _ => panic!("doesn't format!"),
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3539.rs b/src/tools/rustfmt/tests/target/issue-3539.rs
new file mode 100644
index 000000000..aa2fa72ec
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3539.rs
@@ -0,0 +1,8 @@
+use std::io::Error;
+
+fn main() {
+ let _read_num: fn() -> Result<(i32), Error> = || -> Result<(i32), Error> {
+ let a = 1;
+ Ok(a)
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3554.rs b/src/tools/rustfmt/tests/target/issue-3554.rs
new file mode 100644
index 000000000..4ece90403
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3554.rs
@@ -0,0 +1,4 @@
+#![feature(const_generics)]
+
+pub struct S<const N: usize>;
+impl S<{ 0 }> {}
diff --git a/src/tools/rustfmt/tests/target/issue-3567.rs b/src/tools/rustfmt/tests/target/issue-3567.rs
new file mode 100644
index 000000000..3cf08628d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3567.rs
@@ -0,0 +1,3 @@
+fn check() {
+ vec![vec!(0; 10); 10];
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3568.rs b/src/tools/rustfmt/tests/target/issue-3568.rs
new file mode 100644
index 000000000..a146f3df2
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3568.rs
@@ -0,0 +1 @@
+use a::b::{self};
diff --git a/src/tools/rustfmt/tests/target/issue-3585/extern_crate.rs b/src/tools/rustfmt/tests/target/issue-3585/extern_crate.rs
new file mode 100644
index 000000000..dc7c9e024
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3585/extern_crate.rs
@@ -0,0 +1,10 @@
+// rustfmt-inline_attribute_width: 100
+
+#[macro_use] extern crate static_assertions;
+
+#[cfg(unix)] extern crate static_assertions;
+
+// a comment before the attribute
+#[macro_use]
+// some comment after
+extern crate static_assertions;
diff --git a/src/tools/rustfmt/tests/target/issue-3585/reorder_imports_disabled.rs b/src/tools/rustfmt/tests/target/issue-3585/reorder_imports_disabled.rs
new file mode 100644
index 000000000..f9637729b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3585/reorder_imports_disabled.rs
@@ -0,0 +1,8 @@
+// rustfmt-inline_attribute_width: 100
+// rustfmt-reorder_imports: false
+
+#[cfg(unix)] extern crate crateb;
+#[cfg(unix)] extern crate cratea;
+
+#[cfg(unix)] use crateb;
+#[cfg(unix)] use cratea;
diff --git a/src/tools/rustfmt/tests/target/issue-3585/reorder_imports_enabled.rs b/src/tools/rustfmt/tests/target/issue-3585/reorder_imports_enabled.rs
new file mode 100644
index 000000000..d040d0ed3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3585/reorder_imports_enabled.rs
@@ -0,0 +1,8 @@
+// rustfmt-inline_attribute_width: 100
+// rustfmt-reorder_imports: true
+
+#[cfg(unix)] extern crate cratea;
+#[cfg(unix)] extern crate crateb;
+
+#[cfg(unix)] use cratea;
+#[cfg(unix)] use crateb;
diff --git a/src/tools/rustfmt/tests/target/issue-3585/use.rs b/src/tools/rustfmt/tests/target/issue-3585/use.rs
new file mode 100644
index 000000000..c76a9eaac
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3585/use.rs
@@ -0,0 +1,5 @@
+// rustfmt-inline_attribute_width: 100
+
+#[macro_use] use static_assertions;
+
+#[cfg(unix)] use static_assertions;
diff --git a/src/tools/rustfmt/tests/target/issue-3595.rs b/src/tools/rustfmt/tests/target/issue-3595.rs
new file mode 100644
index 000000000..3e06538a4
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3595.rs
@@ -0,0 +1,4 @@
+struct ReqMsg();
+struct RespMsg();
+
+pub type TestType = fn() -> (ReqMsg, fn(RespMsg) -> ());
diff --git a/src/tools/rustfmt/tests/target/issue-3601.rs b/src/tools/rustfmt/tests/target/issue-3601.rs
new file mode 100644
index 000000000..c86ca24e7
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3601.rs
@@ -0,0 +1,11 @@
+#![feature(const_generics)]
+
+trait A {
+ fn foo(&self);
+}
+
+pub struct B<const N: usize>([usize; N]);
+
+impl<const N: usize> A for B<{ N }> {
+ fn foo(&self) {}
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3614/version_one.rs b/src/tools/rustfmt/tests/target/issue-3614/version_one.rs
new file mode 100644
index 000000000..8ab283047
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3614/version_one.rs
@@ -0,0 +1,15 @@
+// rustfmt-version: One
+
+fn main() {
+ let toto = || {
+ if true {
+ 42
+ } else {
+ 24
+ }
+ };
+
+ {
+ T
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3614/version_two.rs b/src/tools/rustfmt/tests/target/issue-3614/version_two.rs
new file mode 100644
index 000000000..5d6f8e7a3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3614/version_two.rs
@@ -0,0 +1,8 @@
+// rustfmt-version: Two
+
+fn main() {
+ let toto = || {
+ if true { 42 } else { 24 }
+ };
+ { T }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3636.rs b/src/tools/rustfmt/tests/target/issue-3636.rs
new file mode 100644
index 000000000..d467ed738
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3636.rs
@@ -0,0 +1,8 @@
+// rustfmt-file_lines: [{"file":"tests/source/issue-3636.rs","range":[4,7]},{"file":"tests/target/issue-3636.rs","range":[3,6]}]
+
+fn foo() {
+ let x = 42;
+ let y = 42;
+ let z = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
+ let z = "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb";
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3639.rs b/src/tools/rustfmt/tests/target/issue-3639.rs
new file mode 100644
index 000000000..e8fddce2d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3639.rs
@@ -0,0 +1,5 @@
+trait Foo {}
+struct Bar {}
+struct Bax;
+struct Baz(String);
+impl Foo for Bar {}
diff --git a/src/tools/rustfmt/tests/target/issue-3645.rs b/src/tools/rustfmt/tests/target/issue-3645.rs
new file mode 100644
index 000000000..14bf96e63
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3645.rs
@@ -0,0 +1,3 @@
+mod x {
+ use super::self as x;
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3651.rs b/src/tools/rustfmt/tests/target/issue-3651.rs
new file mode 100644
index 000000000..4a95a1712
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3651.rs
@@ -0,0 +1,7 @@
+fn f() -> Box<
+ dyn FnMut() -> Thing<
+ WithType = LongItemName,
+ Error = LONGLONGLONGLONGLONGONGEvenLongerErrorNameLongerLonger,
+ >,
+> {
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3665/lib.rs b/src/tools/rustfmt/tests/target/issue-3665/lib.rs
new file mode 100644
index 000000000..c313f3203
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3665/lib.rs
@@ -0,0 +1,31 @@
+#![rustfmt::skip::attributes(skip_mod_attr)]
+
+mod sub_mod;
+
+#[rustfmt::skip::attributes(other, skip_attr)]
+fn main() {
+ #[other(should,
+skip,
+ this, format)]
+ struct S {}
+
+ #[skip_attr(should, skip,
+this, format,too)]
+ fn doesnt_mater() {}
+
+ #[skip_mod_attr(should, skip,
+this, format,
+ enerywhere)]
+ fn more() {}
+
+ #[not_skip(not, skip, me)]
+ struct B {}
+}
+
+#[other(should, not, skip, this, format, here)]
+fn foo() {}
+
+#[skip_mod_attr(should, skip,
+this, format,in, master,
+ and, sub, module)]
+fn bar() {}
diff --git a/src/tools/rustfmt/tests/target/issue-3665/not_skip_attribute.rs b/src/tools/rustfmt/tests/target/issue-3665/not_skip_attribute.rs
new file mode 100644
index 000000000..a4e8b9487
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3665/not_skip_attribute.rs
@@ -0,0 +1,4 @@
+#![this::is::not::skip::attribute(ouch)]
+
+#[ouch(not, skip, me)]
+fn main() {}
diff --git a/src/tools/rustfmt/tests/target/issue-3665/sub_mod.rs b/src/tools/rustfmt/tests/target/issue-3665/sub_mod.rs
new file mode 100644
index 000000000..30a2b0fd9
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3665/sub_mod.rs
@@ -0,0 +1,13 @@
+#[rustfmt::skip::attributes(more_skip)]
+#[more_skip(should,
+ skip,
+this, format)]
+fn foo() {}
+
+#[skip_mod_attr(should, skip,
+this, format,in, master,
+ and, sub, module)]
+fn bar() {}
+
+#[skip_attr(should, not, skip, this, attribute, here)]
+fn baz() {}
diff --git a/src/tools/rustfmt/tests/target/issue-3672.rs b/src/tools/rustfmt/tests/target/issue-3672.rs
new file mode 100644
index 000000000..8cc3d3fd2
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3672.rs
@@ -0,0 +1,3 @@
+fn main() {
+ let x = 5;
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3675.rs b/src/tools/rustfmt/tests/target/issue-3675.rs
new file mode 100644
index 000000000..62d986e77
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3675.rs
@@ -0,0 +1,6 @@
+fn main() {
+ println!(
+ "{}", // comment
+ 111
+ );
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3701/one.rs b/src/tools/rustfmt/tests/target/issue-3701/one.rs
new file mode 100644
index 000000000..9d1ef9eed
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3701/one.rs
@@ -0,0 +1,12 @@
+// rustfmt-version: One
+
+fn build_sorted_static_get_entry_names(
+ mut entries: Vec<(u8, &'static str)>,
+) -> (impl Fn(
+ AlphabeticalTraversal,
+ Box<dyn dirents_sink::Sink<AlphabeticalTraversal>>,
+) -> BoxFuture<'static, Result<Box<dyn dirents_sink::Sealed>, Status>>
+ + Send
+ + Sync
+ + 'static) {
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3701/two.rs b/src/tools/rustfmt/tests/target/issue-3701/two.rs
new file mode 100644
index 000000000..62ffc9d82
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3701/two.rs
@@ -0,0 +1,14 @@
+// rustfmt-version: Two
+
+fn build_sorted_static_get_entry_names(
+ mut entries: Vec<(u8, &'static str)>,
+) -> (
+ impl Fn(
+ AlphabeticalTraversal,
+ Box<dyn dirents_sink::Sink<AlphabeticalTraversal>>,
+ ) -> BoxFuture<'static, Result<Box<dyn dirents_sink::Sealed>, Status>>
+ + Send
+ + Sync
+ + 'static
+) {
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3709.rs b/src/tools/rustfmt/tests/target/issue-3709.rs
new file mode 100644
index 000000000..0f3eae048
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3709.rs
@@ -0,0 +1,10 @@
+// rustfmt-edition: 2018
+
+macro_rules! token {
+ ($t:tt) => {};
+}
+
+fn main() {
+ token!(dyn);
+ token!(dyn);
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3711.rs b/src/tools/rustfmt/tests/target/issue-3711.rs
new file mode 100644
index 000000000..62d986e77
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3711.rs
@@ -0,0 +1,6 @@
+fn main() {
+ println!(
+ "{}", // comment
+ 111
+ );
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3717.rs b/src/tools/rustfmt/tests/target/issue-3717.rs
new file mode 100644
index 000000000..b769cd3ec
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3717.rs
@@ -0,0 +1,7 @@
+fn main() {
+ {
+ #[rustfmt::skip]
+ let _ =
+ [1];
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3718.rs b/src/tools/rustfmt/tests/target/issue-3718.rs
new file mode 100644
index 000000000..8ad21ffc7
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3718.rs
@@ -0,0 +1,7 @@
+fn main() {
+ let x: &[i32] = &[2, 2];
+ match x {
+ [_a, _] => println!("Wrong username or password"),
+ _ => println!("Logged in"),
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3740.rs b/src/tools/rustfmt/tests/target/issue-3740.rs
new file mode 100644
index 000000000..995a6bee3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3740.rs
@@ -0,0 +1,8 @@
+impl<T, const SIZE: usize> IntoNormalized for Vector<T, { SIZE }>
+where
+ Vector<T, { SIZE }>: Div<Vector<T, { SIZE }>>,
+ for<'a> &'a Vector<T, { SIZE }>: IntoLength<Output = T>,
+{
+ type Output = Vector<T, { SIZE }>;
+ fn into_normalized(self) -> Self::Output {}
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3741.rs b/src/tools/rustfmt/tests/target/issue-3741.rs
new file mode 100644
index 000000000..34d22dc91
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3741.rs
@@ -0,0 +1,5 @@
+pub enum PublishedFileVisibility {
+ Public = sys::ERemoteStoragePublishedFileVisibility_k_ERemoteStoragePublishedFileVisibilityPublic as i32,
+ FriendsOnly = sys::ERemoteStoragePublishedFileVisibility_k_ERemoteStoragePublishedFileVisibilityFriendsOnly as i32,
+ Private = sys::ERemoteStoragePublishedFileVisibility_k_ERemoteStoragePublishedFileVisibilityPrivate as i32,
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3750.rs b/src/tools/rustfmt/tests/target/issue-3750.rs
new file mode 100644
index 000000000..6875f8d38
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3750.rs
@@ -0,0 +1,15 @@
+// rustfmt-imports_granularity: Crate
+
+pub mod foo {
+ pub mod bar {
+ pub struct Bar;
+ }
+
+ pub fn bar() {}
+}
+
+use foo::{bar, bar::Bar};
+
+fn main() {
+ bar();
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3751.rs b/src/tools/rustfmt/tests/target/issue-3751.rs
new file mode 100644
index 000000000..e5a03956e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3751.rs
@@ -0,0 +1,10 @@
+// rustfmt-format_code_in_doc_comments: true
+
+//! Empty pound line
+//!
+//! ```rust
+//! #
+//! # fn main() {
+//! foo();
+//! # }
+//! ```
diff --git a/src/tools/rustfmt/tests/target/issue-3759.rs b/src/tools/rustfmt/tests/target/issue-3759.rs
new file mode 100644
index 000000000..b53f5391a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3759.rs
@@ -0,0 +1,27 @@
+fn main() {
+ let Test {
+ #[cfg(feature = "test")]
+ x,
+ } = Test {
+ #[cfg(feature = "test")]
+ x: 1,
+ };
+
+ let Test {
+ #[cfg(feature = "test")]
+ // comment
+ x,
+ } = Test {
+ #[cfg(feature = "test")]
+ x: 1,
+ };
+
+ let Test {
+ // comment
+ #[cfg(feature = "test")]
+ x,
+ } = Test {
+ #[cfg(feature = "test")]
+ x: 1,
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3779/ice.rs b/src/tools/rustfmt/tests/target/issue-3779/ice.rs
new file mode 100644
index 000000000..cde21412d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3779/ice.rs
@@ -0,0 +1,3 @@
+pub fn bar() {
+ 1x;
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3779/lib.rs b/src/tools/rustfmt/tests/target/issue-3779/lib.rs
new file mode 100644
index 000000000..a5673a4db
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3779/lib.rs
@@ -0,0 +1,9 @@
+// rustfmt-unstable: true
+// rustfmt-config: issue-3779.toml
+
+#[path = "ice.rs"]
+mod ice;
+
+fn foo() {
+ println!("abc");
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3786.rs b/src/tools/rustfmt/tests/target/issue-3786.rs
new file mode 100644
index 000000000..d90cba15d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3786.rs
@@ -0,0 +1,9 @@
+fn main() {
+ let _ = r#"
+this is a very long string exceeded maximum width in this case maximum 100. (current this line width is about 115)
+"#;
+
+ let _with_newline = r#"
+this is a very long string exceeded maximum width in this case maximum 100. (current this line width is about 115)
+"#;
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3787.rs b/src/tools/rustfmt/tests/target/issue-3787.rs
new file mode 100644
index 000000000..32cf7e3d7
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3787.rs
@@ -0,0 +1,13 @@
+// rustfmt-wrap_comments: true
+
+//! URLs in items
+//! * [This is a link with a very loooooooooooooooooooooooooooooooooooooooooong URL.](https://example.com/This/is/a/link/with/a/very/loooooooooooooooooooooooooooooooooooooooooong/URL)
+//! * This is a [link](https://example.com/This/is/a/link/with/a/very/loooooooooooooooooooooooooooooooooooooooooong/URL)
+//! with a very loooooooooooooooooooooooooooooooooooooooooong URL.
+//! * there is no link here: In hac habitasse platea dictumst. Maecenas in
+//! ligula. Duis tincidunt odio sollicitudin quam. Nullam non mauris.
+//! Phasellus lacinia, velit sit amet bibendum euismod, leo diam interdum
+//! ligula, eu scelerisque sem purus in tellus.
+fn main() {
+ println!("Hello, world!");
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3815.rs b/src/tools/rustfmt/tests/target/issue-3815.rs
new file mode 100644
index 000000000..eff27e2de
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3815.rs
@@ -0,0 +1,4 @@
+pub type Type = impl Deref<Target = i8>;
+
+pub type Type =
+ impl VeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeryLongType<Target = i8>;
diff --git a/src/tools/rustfmt/tests/target/issue-3840/version-one_hard-tabs.rs b/src/tools/rustfmt/tests/target/issue-3840/version-one_hard-tabs.rs
new file mode 100644
index 000000000..4aa905ce9
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3840/version-one_hard-tabs.rs
@@ -0,0 +1,25 @@
+// rustfmt-hard_tabs: true
+
+impl<
+ Target: FromEvent<A> + FromEvent<B>,
+ A: Widget2<Ctx = C>,
+ B: Widget2<Ctx = C>,
+ C: for<'a> CtxFamily<'a>,
+ > Widget2 for WidgetEventLifter<Target, A, B>
+{
+ type Ctx = C;
+ type Event = Vec<Target>;
+}
+
+mod foo {
+ impl<
+ Target: FromEvent<A> + FromEvent<B>,
+ A: Widget2<Ctx = C>,
+ B: Widget2<Ctx = C>,
+ C: for<'a> CtxFamily<'a>,
+ > Widget2 for WidgetEventLifter<Target, A, B>
+ {
+ type Ctx = C;
+ type Event = Vec<Target>;
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3840/version-one_soft-tabs.rs b/src/tools/rustfmt/tests/target/issue-3840/version-one_soft-tabs.rs
new file mode 100644
index 000000000..099e68018
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3840/version-one_soft-tabs.rs
@@ -0,0 +1,23 @@
+impl<
+ Target: FromEvent<A> + FromEvent<B>,
+ A: Widget2<Ctx = C>,
+ B: Widget2<Ctx = C>,
+ C: for<'a> CtxFamily<'a>,
+ > Widget2 for WidgetEventLifter<Target, A, B>
+{
+ type Ctx = C;
+ type Event = Vec<Target>;
+}
+
+mod foo {
+ impl<
+ Target: FromEvent<A> + FromEvent<B>,
+ A: Widget2<Ctx = C>,
+ B: Widget2<Ctx = C>,
+ C: for<'a> CtxFamily<'a>,
+ > Widget2 for WidgetEventLifter<Target, A, B>
+ {
+ type Ctx = C;
+ type Event = Vec<Target>;
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3840/version-two_hard-tabs.rs b/src/tools/rustfmt/tests/target/issue-3840/version-two_hard-tabs.rs
new file mode 100644
index 000000000..084db3d14
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3840/version-two_hard-tabs.rs
@@ -0,0 +1,26 @@
+// rustfmt-hard_tabs: true
+// rustfmt-version: Two
+
+impl<
+ Target: FromEvent<A> + FromEvent<B>,
+ A: Widget2<Ctx = C>,
+ B: Widget2<Ctx = C>,
+ C: for<'a> CtxFamily<'a>,
+> Widget2 for WidgetEventLifter<Target, A, B>
+{
+ type Ctx = C;
+ type Event = Vec<Target>;
+}
+
+mod foo {
+ impl<
+ Target: FromEvent<A> + FromEvent<B>,
+ A: Widget2<Ctx = C>,
+ B: Widget2<Ctx = C>,
+ C: for<'a> CtxFamily<'a>,
+ > Widget2 for WidgetEventLifter<Target, A, B>
+ {
+ type Ctx = C;
+ type Event = Vec<Target>;
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3840/version-two_soft-tabs.rs b/src/tools/rustfmt/tests/target/issue-3840/version-two_soft-tabs.rs
new file mode 100644
index 000000000..bc59b0baa
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3840/version-two_soft-tabs.rs
@@ -0,0 +1,25 @@
+// rustfmt-version: Two
+
+impl<
+ Target: FromEvent<A> + FromEvent<B>,
+ A: Widget2<Ctx = C>,
+ B: Widget2<Ctx = C>,
+ C: for<'a> CtxFamily<'a>,
+> Widget2 for WidgetEventLifter<Target, A, B>
+{
+ type Ctx = C;
+ type Event = Vec<Target>;
+}
+
+mod foo {
+ impl<
+ Target: FromEvent<A> + FromEvent<B>,
+ A: Widget2<Ctx = C>,
+ B: Widget2<Ctx = C>,
+ C: for<'a> CtxFamily<'a>,
+ > Widget2 for WidgetEventLifter<Target, A, B>
+ {
+ type Ctx = C;
+ type Event = Vec<Target>;
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3845.rs b/src/tools/rustfmt/tests/target/issue-3845.rs
new file mode 100644
index 000000000..877c05b86
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3845.rs
@@ -0,0 +1,8 @@
+fn main() {
+ || {
+ #[allow(deprecated)]
+ {
+ u8::max_value()
+ }
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3882.rs b/src/tools/rustfmt/tests/target/issue-3882.rs
new file mode 100644
index 000000000..5eb442af9
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3882.rs
@@ -0,0 +1,4 @@
+// rustfmt-version: Two
+fn bar(_t: T, // bar
+) {
+}
diff --git a/src/tools/rustfmt/tests/target/issue-3974.rs b/src/tools/rustfmt/tests/target/issue-3974.rs
new file mode 100644
index 000000000..a9f992ebd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-3974.rs
@@ -0,0 +1,10 @@
+fn emulate_foreign_item() {
+ match link_name {
+ // A comment here will duplicate the attribute
+ #[rustfmt::skip]
+ | "pthread_mutexattr_init"
+ | "pthread_mutexattr_settype"
+ | "pthread_mutex_init"
+ => {}
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4018.rs b/src/tools/rustfmt/tests/target/issue-4018.rs
new file mode 100644
index 000000000..cef3be061
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4018.rs
@@ -0,0 +1,11 @@
+fn main() {
+ /* extra comment */
+}
+
+fn main() {
+ println!("");
+ // comment 1
+ // comment 2
+ // comment 3
+ // comment 4
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4020.rs b/src/tools/rustfmt/tests/target/issue-4020.rs
new file mode 100644
index 000000000..f29ecec02
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4020.rs
@@ -0,0 +1,9 @@
+// rustfmt-wrap_comments: true
+
+/** foobar */
+const foo1: u32 = 0;
+
+/**
+ * foobar
+ */
+const foo2: u32 = 0;
diff --git a/src/tools/rustfmt/tests/target/issue-4029.rs b/src/tools/rustfmt/tests/target/issue-4029.rs
new file mode 100644
index 000000000..314d01805
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4029.rs
@@ -0,0 +1,7 @@
+// issue #4029
+#[derive(Debug, Clone, Default Hash)]
+struct S;
+
+// issue #3898
+#[derive(Debug, Clone, Default,, Hash)]
+struct T;
diff --git a/src/tools/rustfmt/tests/target/issue-4036/one.rs b/src/tools/rustfmt/tests/target/issue-4036/one.rs
new file mode 100644
index 000000000..54e490b7f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4036/one.rs
@@ -0,0 +1,12 @@
+// rustfmt-format_strings: true
+
+macro_rules! test {
+ () => {
+ fn from() {
+ None.expect(
+ "We asserted that `buffer.len()` is exactly `$n` so we can expect \
+ `ApInt::from_iter` to be successful.",
+ )
+ }
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4036/three.rs b/src/tools/rustfmt/tests/target/issue-4036/three.rs
new file mode 100644
index 000000000..394dc8633
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4036/three.rs
@@ -0,0 +1,17 @@
+// rustfmt-format_strings: true
+// rustfmt-hard_tabs: true
+
+macro_rules! test {
+ () => {
+ fn from() {
+ None.expect(
+ "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor \
+ incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis \
+ nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. \
+ Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu \
+ fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in \
+ culpa qui officia deserunt mollit anim id est laborum.",
+ )
+ }
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4036/two.rs b/src/tools/rustfmt/tests/target/issue-4036/two.rs
new file mode 100644
index 000000000..01cafa76b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4036/two.rs
@@ -0,0 +1,16 @@
+// rustfmt-format_strings: true
+
+macro_rules! test {
+ () => {
+ fn from() {
+ None.expect(
+ "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor \
+ incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis \
+ nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. \
+ Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu \
+ fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in \
+ culpa qui officia deserunt mollit anim id est laborum.",
+ )
+ }
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4068.rs b/src/tools/rustfmt/tests/target/issue-4068.rs
new file mode 100644
index 000000000..cd8a1f276
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4068.rs
@@ -0,0 +1,3 @@
+fn main() {
+ extern "C" fn packet_records_options_impl_layout_length_encoding_option_len_multiplier();
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4079.rs b/src/tools/rustfmt/tests/target/issue-4079.rs
new file mode 100644
index 000000000..1871c5b8a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4079.rs
@@ -0,0 +1,11 @@
+// rustfmt-wrap_comments: true
+
+/*!
+ * Lorem ipsum dolor sit amet, consectetur adipiscing elit. In lacinia
+ * ullamcorper lorem, non hendrerit enim convallis ut. Curabitur id sem
+ * volutpat
+ */
+
+/*! Lorem ipsum dolor sit amet, consectetur adipiscing elit. In lacinia
+ * ullamcorper lorem, non hendrerit enim convallis ut. Curabitur id sem
+ * volutpat */
diff --git a/src/tools/rustfmt/tests/target/issue-4115.rs b/src/tools/rustfmt/tests/target/issue-4115.rs
new file mode 100644
index 000000000..0dd7bdbd0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4115.rs
@@ -0,0 +1,8 @@
+#[derive(
+ A,
+ B,
+ C,
+ D,
+ // E,
+)]
+fn foo() {}
diff --git a/src/tools/rustfmt/tests/target/issue-4120.rs b/src/tools/rustfmt/tests/target/issue-4120.rs
new file mode 100644
index 000000000..a7d461dcf
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4120.rs
@@ -0,0 +1,85 @@
+fn main() {
+ let x = if true {
+ 1
+ // In if
+ } else {
+ 0
+ // In else
+ };
+
+ let x = if true {
+ 1
+ /* In if */
+ } else {
+ 0
+ /* In else */
+ };
+
+ let z = if true {
+ if true {
+ 1
+
+ // In if level 2
+ } else {
+ 2
+ }
+ } else {
+ 3
+ };
+
+ let a = if true {
+ 1
+ // In if
+ } else {
+ 0
+ // In else
+ };
+
+ let a = if true {
+ 1
+
+ // In if
+ } else {
+ 0
+ // In else
+ };
+
+ let b = if true {
+ 1
+
+ // In if
+ } else {
+ 0
+ // In else
+ };
+
+ let c = if true {
+ 1
+
+ // In if
+ } else {
+ 0
+ // In else
+ };
+ for i in 0..2 {
+ println!("Something");
+ // In for
+ }
+
+ for i in 0..2 {
+ println!("Something");
+ /* In for */
+ }
+
+ extern "C" {
+ fn first();
+
+ // In foreign mod
+ }
+
+ extern "C" {
+ fn first();
+
+ /* In foreign mod */
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4152.rs b/src/tools/rustfmt/tests/target/issue-4152.rs
new file mode 100644
index 000000000..80f9ff5e3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4152.rs
@@ -0,0 +1,18 @@
+// rustfmt-hard_tabs: true
+
+macro_rules! bit {
+ ($bool:expr) => {
+ if $bool {
+ 1;
+ 1
+ } else {
+ 0;
+ 0
+ }
+ };
+}
+macro_rules! add_one {
+ ($vec:expr) => {{
+ $vec.push(1);
+ }};
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4159.rs b/src/tools/rustfmt/tests/target/issue-4159.rs
new file mode 100644
index 000000000..2f8cf20da
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4159.rs
@@ -0,0 +1,18 @@
+extern "C" {
+ type A: Ord;
+
+ type A<'a>
+ where
+ 'a: 'static;
+
+ type A<T: Ord>
+ where
+ T: 'static;
+
+ type A = u8;
+
+ type A<'a: 'static, T: Ord + 'static>: Eq + PartialEq
+ where
+ T: 'static + Copy,
+ = Vec<u8>;
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4243.rs b/src/tools/rustfmt/tests/target/issue-4243.rs
new file mode 100644
index 000000000..67fa1d2a3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4243.rs
@@ -0,0 +1,28 @@
+fn main() {
+ type A: AA /*AA*/ + /*AB*/ AB + AC = AA
+ /*AA*/
+ +
+ /*AB*/
+ AB
+ + AC;
+
+ type B: BA /*BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA*/
+ + /*BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB*/ BB
+ + BC = BA /*BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA*/
+ + /*BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB*/ BB
+ + BC;
+
+ type C: CA // CAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ // CAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ +
+ // CBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
+ // CBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
+ CB
+ + CC = CA // CAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ // CAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ +
+ // CBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
+ // CBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
+ CB
+ + CC;
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4244.rs b/src/tools/rustfmt/tests/target/issue-4244.rs
new file mode 100644
index 000000000..8958ba99e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4244.rs
@@ -0,0 +1,20 @@
+pub struct SS {}
+
+pub type A /* A Comment */ = SS;
+
+pub type B // Comment
+ // B
+ = SS;
+
+pub type C
+ /* Comment C */
+ = SS;
+
+pub trait D<T> {
+ type E /* Comment E */ = SS;
+}
+
+type F<'a: 'static, T: Ord + 'static>: Eq + PartialEq
+where
+ T: 'static + Copy, /* x */
+= Vec<u8>;
diff --git a/src/tools/rustfmt/tests/target/issue-4245.rs b/src/tools/rustfmt/tests/target/issue-4245.rs
new file mode 100644
index 000000000..e3d40eb42
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4245.rs
@@ -0,0 +1,34 @@
+fn a(
+ a: & // Comment
+ // Another comment
+ 'a File,
+) {
+}
+
+fn b(b: & /* Another Comment */ 'a File) {}
+
+fn c(c: &'a /*Comment */ mut /*Comment */ File) {}
+
+fn d(
+ c: & // Comment
+ 'b // Multi Line
+ // Comment
+ mut // Multi Line
+ // Comment
+ File,
+) {
+}
+
+fn e(
+ c: & // Comment
+ File,
+) {
+}
+
+fn d(
+ c: & // Comment
+ mut // Multi Line
+ // Comment
+ File,
+) {
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4310.rs b/src/tools/rustfmt/tests/target/issue-4310.rs
new file mode 100644
index 000000000..6cf494fc5
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4310.rs
@@ -0,0 +1,9 @@
+#![feature(const_generics)]
+
+fn foo<
+ const N: [u8; {
+ struct Inner<'a>(&'a ());
+ 3
+ }],
+>() {
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4312.rs b/src/tools/rustfmt/tests/target/issue-4312.rs
new file mode 100644
index 000000000..b36b0efdb
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4312.rs
@@ -0,0 +1,22 @@
+// issue 4312
+fn main() {
+ /* " */
+ println!("Hello, world!");
+ /* abc " */
+ println!("Hello, world!");
+ /* " abc */
+ println!("Hello, world!");
+ let y = 4;
+ let x = match 1 + y == 3 {
+ True => 3,
+ False => 4,
+ /* " unreachable */
+ };
+}
+
+// issue 4806
+enum X {
+ A,
+ B,
+ /*"*/
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4313.rs b/src/tools/rustfmt/tests/target/issue-4313.rs
new file mode 100644
index 000000000..c390ee6ba
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4313.rs
@@ -0,0 +1,5 @@
+extern "C" {
+ fn f() {
+ fn g() {}
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4382.rs b/src/tools/rustfmt/tests/target/issue-4382.rs
new file mode 100644
index 000000000..740fa9bfe
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4382.rs
@@ -0,0 +1,10 @@
+pub const NAME_MAX: usize = {
+ #[cfg(target_os = "linux")]
+ {
+ 1024
+ }
+ #[cfg(target_os = "freebsd")]
+ {
+ 255
+ }
+};
diff --git a/src/tools/rustfmt/tests/target/issue-4398.rs b/src/tools/rustfmt/tests/target/issue-4398.rs
new file mode 100644
index 000000000..2ca894528
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4398.rs
@@ -0,0 +1,19 @@
+impl Struct {
+ /// Documentation for `foo`
+ #[rustfmt::skip] // comment on why use a skip here
+ pub fn foo(&self) {}
+}
+
+impl Struct {
+ /// Documentation for `foo`
+ #[rustfmt::skip] // comment on why use a skip here
+ pub fn foo(&self) {}
+}
+
+/// Documentation for `Struct`
+#[rustfmt::skip] // comment
+impl Struct {
+ /// Documentation for `foo`
+ #[rustfmt::skip] // comment on why use a skip here
+ pub fn foo(&self) {}
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4427.rs b/src/tools/rustfmt/tests/target/issue-4427.rs
new file mode 100644
index 000000000..c8a37ead8
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4427.rs
@@ -0,0 +1,30 @@
+const A: usize =
+ // Some constant
+ 2;
+
+const B: usize =
+ /* constant */
+ 3;
+
+const C: usize = /* foo */ 5;
+
+const D: usize = // baz
+ /* Some constant */
+ /* ba */
+ {
+ 3
+ // foo
+ };
+const E: usize = /* foo */ 5;
+const F: usize = { 7 };
+const G: usize =
+ /* foooooooooooooooooooooooooooooooooooooooooooooooooooooooo0000000000000000xx00 */
+ 5;
+const H: usize = /* asdfasdf */
+ match G > 1 {
+ true => 1,
+ false => 3,
+ };
+
+pub static FOO_BAR: Vec<u8> = //f
+ { vec![] };
diff --git a/src/tools/rustfmt/tests/target/issue-447.rs b/src/tools/rustfmt/tests/target/issue-447.rs
new file mode 100644
index 000000000..d41cdb65c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-447.rs
@@ -0,0 +1,40 @@
+// rustfmt-normalize_comments: true
+
+fn main() {
+ if
+ // shouldn't be dropped
+ // shouldn't be dropped
+ cond
+ // shouldn't be dropped
+ // shouldn't be dropped
+ {
+ }
+ // shouldn't be dropped
+ // shouldn't be dropped
+ else
+ // shouldn't be dropped
+ // shouldn't be dropped
+ if
+ // shouldn't be dropped
+ // shouldn't be dropped
+ cond
+ // shouldn't be dropped
+ // shouldn't be dropped
+ {
+ }
+ // shouldn't be dropped
+ // shouldn't be dropped
+ else
+ // shouldn't be dropped
+ // shouldn't be dropped
+ {
+ }
+
+ if
+ // shouldn't be dropped
+ // shouldn't be dropped
+ let Some(x) = y
+ // shouldn't be dropped
+ // shouldn't be dropped
+ {}
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4530.rs b/src/tools/rustfmt/tests/target/issue-4530.rs
new file mode 100644
index 000000000..296dc559a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4530.rs
@@ -0,0 +1,9 @@
+// rustfmt-version: Two
+fn main() {
+ let [
+ aaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ cccccccccccccccccccccccccc,
+ ddddddddddddddddddddddddd,
+ ] = panic!();
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4577.rs b/src/tools/rustfmt/tests/target/issue-4577.rs
new file mode 100644
index 000000000..1bd9eb6b8
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4577.rs
@@ -0,0 +1,15 @@
+fn main() {
+ let s: String = "ABAABBAA"
+ .chars()
+ .filter(|c| if *c == 'A' { true } else { false })
+ .map(|c| -> char {
+ if c == 'A' {
+ '0'
+ } else {
+ '1'
+ }
+ })
+ .collect();
+
+ println!("{}", s);
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4603.rs b/src/tools/rustfmt/tests/target/issue-4603.rs
new file mode 100644
index 000000000..e8c368a24
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4603.rs
@@ -0,0 +1,47 @@
+// Formatting when original macro snippet is used
+
+// Original issue #4603 code
+#![feature(or_patterns)]
+macro_rules! t_or_f {
+ () => {
+ (true // some comment
+ | false)
+ };
+}
+
+// Other test cases variations
+macro_rules! RULES {
+ () => {
+ (
+ xxxxxxx // COMMENT
+ | yyyyyyy
+ )
+ };
+}
+macro_rules! RULES {
+ () => {
+ (xxxxxxx // COMMENT
+ | yyyyyyy)
+ };
+}
+
+fn main() {
+ macro_rules! RULES {
+ () => {
+ (xxxxxxx // COMMENT
+ | yyyyyyy)
+ };
+ }
+}
+
+macro_rules! RULES {
+ () => {
+ (xxxxxxx /* COMMENT */ | yyyyyyy)
+ };
+}
+macro_rules! RULES {
+ () => {
+ (xxxxxxx /* COMMENT */
+ | yyyyyyy)
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4615/minimum_example.rs b/src/tools/rustfmt/tests/target/issue-4615/minimum_example.rs
new file mode 100644
index 000000000..223b89b81
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4615/minimum_example.rs
@@ -0,0 +1,5 @@
+info!(
+ //debug
+ "{}: sending function_code={:04x} data={:04x} crc=0x{:04X} data={:02X?}",
+ self.name, function_code, data, crc, output_cmd
+);
diff --git a/src/tools/rustfmt/tests/target/issue-4646.rs b/src/tools/rustfmt/tests/target/issue-4646.rs
new file mode 100644
index 000000000..4e149399f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4646.rs
@@ -0,0 +1,20 @@
+trait Foo {
+ fn bar(&self)
+ // where
+ // Self: Bar
+ ;
+}
+
+trait Foo {
+ fn bar(&self)
+ // where
+ // Self: Bar
+ ;
+}
+
+trait Foo {
+ fn bar(&self)
+ // where
+ // Self: Bar
+ ;
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4656/format_me_please.rs b/src/tools/rustfmt/tests/target/issue-4656/format_me_please.rs
new file mode 100644
index 000000000..421e195a2
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4656/format_me_please.rs
@@ -0,0 +1 @@
+pub fn hello() {}
diff --git a/src/tools/rustfmt/tests/target/issue-4656/lib.rs b/src/tools/rustfmt/tests/target/issue-4656/lib.rs
new file mode 100644
index 000000000..5dac91b8a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4656/lib.rs
@@ -0,0 +1,7 @@
+extern crate cfg_if;
+
+cfg_if::cfg_if! {
+ if #[cfg(target_family = "unix")] {
+ mod format_me_please;
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4656/lib2.rs b/src/tools/rustfmt/tests/target/issue-4656/lib2.rs
new file mode 100644
index 000000000..b17fffc58
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4656/lib2.rs
@@ -0,0 +1,3 @@
+its_a_macro! {
+ // Contents
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4791/buggy.rs b/src/tools/rustfmt/tests/target/issue-4791/buggy.rs
new file mode 100644
index 000000000..fff58be99
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4791/buggy.rs
@@ -0,0 +1,14 @@
+// rustfmt-struct_field_align_threshold: 30
+// rustfmt-trailing_comma: Never
+
+struct Foo {
+ group_a: u8,
+
+ group_b: u8
+}
+
+struct Bar {
+ group_a: u8,
+
+ group_b: u8
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4791/issue_4928.rs b/src/tools/rustfmt/tests/target/issue-4791/issue_4928.rs
new file mode 100644
index 000000000..588656b53
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4791/issue_4928.rs
@@ -0,0 +1,70 @@
+// rustfmt-brace_style: SameLineWhere
+// rustfmt-comment_width: 100
+// rustfmt-edition: 2018
+// rustfmt-fn_args_layout: Compressed
+// rustfmt-hard_tabs: false
+// rustfmt-match_block_trailing_comma: true
+// rustfmt-max_width: 100
+// rustfmt-merge_derives: false
+// rustfmt-newline_style: Unix
+// rustfmt-normalize_doc_attributes: true
+// rustfmt-overflow_delimited_expr: true
+// rustfmt-reorder_imports: false
+// rustfmt-reorder_modules: true
+// rustfmt-struct_field_align_threshold: 20
+// rustfmt-tab_spaces: 4
+// rustfmt-trailing_comma: Never
+// rustfmt-use_small_heuristics: Max
+// rustfmt-use_try_shorthand: true
+// rustfmt-wrap_comments: true
+
+/// Lorem ipsum dolor sit amet.
+#[repr(C)]
+#[derive(Debug, Default, Copy, Clone, PartialEq, Eq)]
+pub struct BufferAttr {
+ /* NOTE: Blah blah blah blah blah. */
+ /// Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
+ /// ut labore et dolore magna aliqua. Morbi quis commodo odio aenean sed adipiscing. Nunc
+ /// congue nisi vitae suscipit tellus mauris a. Consectetur adipiscing elit pellentesque
+ /// habitant morbi tristique senectus.
+ pub foo: u32,
+
+ /// Elit eget gravida cum sociis natoque penatibus et magnis dis. Consequat semper viverra nam
+ /// libero. Accumsan in nisl nisi scelerisque eu. Pellentesque id nibh tortor id aliquet. Sed
+ /// velit dignissim sodales ut. Facilisis sed odio morbi quis commodo odio aenean sed. Et
+ /// ultrices neque ornare aenean euismod elementum. Condimentum lacinia quis vel eros donec ac
+ /// odio tempor.
+ ///
+ /// Lacinia at quis risus sed vulputate odio ut enim. Etiam erat velit scelerisque in dictum.
+ /// Nibh tellus molestie nunc non blandit massa enim nec. Nascetur ridiculus mus mauris vitae.
+ pub bar: u32,
+
+ /// Mi proin sed libero enim sed faucibus turpis. Amet consectetur adipiscing elit duis
+ /// tristique sollicitudin nibh sit amet. Congue quisque egestas diam in arcu cursus euismod
+ /// quis viverra. Cum sociis natoque penatibus et magnis dis parturient montes. Enim sit amet
+ /// venenatis urna cursus eget nunc scelerisque viverra. Cras semper auctor neque vitae tempus
+ /// quam pellentesque. Tortor posuere ac ut consequat semper viverra nam libero justo. Vitae
+ /// auctor eu augue ut lectus arcu bibendum at. Faucibus vitae aliquet nec ullamcorper sit amet
+ /// risus nullam. Maecenas accumsan lacus vel facilisis volutpat. Arcu non odio euismod
+ /// lacinia.
+ ///
+ /// [`FooBar::beep()`]: crate::foobar::FooBar::beep
+ /// [`FooBar::boop()`]: crate::foobar::FooBar::boop
+ /// [`foobar::BazBaq::BEEP_BOOP`]: crate::foobar::BazBaq::BEEP_BOOP
+ pub baz: u32,
+
+ /// Eu consequat ac felis donec et odio pellentesque diam. Ut eu sem integer vitae justo eget.
+ /// Consequat ac felis donec et odio pellentesque diam volutpat.
+ pub baq: u32,
+
+ /// Amet consectetur adipiscing elit pellentesque habitant. Ut morbi tincidunt augue interdum
+ /// velit euismod in pellentesque. Imperdiet sed euismod nisi porta lorem. Nec tincidunt
+ /// praesent semper feugiat. Facilisis leo vel fringilla est. Egestas diam in arcu cursus
+ /// euismod quis viverra. Sagittis eu volutpat odio facilisis mauris sit amet. Posuere morbi
+ /// leo urna molestie at.
+ ///
+ /// Pretium aenean pharetra magna ac. Nisl condimentum id venenatis a condimentum vitae. Semper
+ /// quis lectus nulla at volutpat diam ut venenatis tellus. Egestas tellus rutrum tellus
+ /// pellentesque eu tincidunt tortor aliquam.
+ pub foobar: u32
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4791/no_trailing_comma.rs b/src/tools/rustfmt/tests/target/issue-4791/no_trailing_comma.rs
new file mode 100644
index 000000000..4a3716396
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4791/no_trailing_comma.rs
@@ -0,0 +1,8 @@
+// rustfmt-struct_field_align_threshold: 0
+// rustfmt-trailing_comma: Never
+
+pub struct Baz {
+ group_a: u8,
+
+ group_b: u8
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4791/trailing_comma.rs b/src/tools/rustfmt/tests/target/issue-4791/trailing_comma.rs
new file mode 100644
index 000000000..29a224b3f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4791/trailing_comma.rs
@@ -0,0 +1,14 @@
+// rustfmt-struct_field_align_threshold: 30
+// rustfmt-trailing_comma: Always
+
+struct Foo {
+ group_a: u8,
+
+ group_b: u8,
+}
+
+struct Bar {
+ group_a: u8,
+
+ group_b: u8,
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4816/lib.rs b/src/tools/rustfmt/tests/target/issue-4816/lib.rs
new file mode 100644
index 000000000..246e775e1
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4816/lib.rs
@@ -0,0 +1,35 @@
+#![feature(const_generics_defaults)]
+struct Foo<const N: usize = 1, const N2: usize = 2>;
+struct Bar<const N: usize, const N2: usize = { N + 1 }>;
+struct Lots<
+ const N1BlahFooUwU: usize = { 10 + 28 + 1872 / 10 * 3 },
+ const N2SecondParamOhmyyy: usize = { N1BlahFooUwU / 2 + 10 * 2 },
+>;
+struct NamesRHard<const N: usize = { 1 + 1 + 1 + 1 + 1 + 1 }>;
+struct FooBar<
+ const LessThan100ButClose: usize = {
+ 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1
+ },
+>;
+struct FooBarrrrrrrr<
+ const N: usize = {
+ 13478234326456456444323871
+ + 1
+ + 1
+ + 1
+ + 1
+ + 1
+ + 1
+ + 1
+ + 1
+ + 1
+ + 1
+ + 1
+ + 1
+ + 1
+ + 1
+ + 1
+ + 1
+ + 1
+ },
+>;
diff --git a/src/tools/rustfmt/tests/target/issue-4908-2.rs b/src/tools/rustfmt/tests/target/issue-4908-2.rs
new file mode 100644
index 000000000..023b323cb
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4908-2.rs
@@ -0,0 +1,20 @@
+#![feature(more_qualified_paths)]
+
+fn main() {
+ // destructure through a qualified path
+ let <Foo as A>::Assoc { br } = StructStruct { br: 2 };
+}
+
+struct StructStruct {
+ br: i8,
+}
+
+struct Foo;
+
+trait A {
+ type Assoc;
+}
+
+impl A for Foo {
+ type Assoc = StructStruct;
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4908.rs b/src/tools/rustfmt/tests/target/issue-4908.rs
new file mode 100644
index 000000000..ac5357abe
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4908.rs
@@ -0,0 +1,34 @@
+#![feature(more_qualified_paths)]
+
+mod foo_bar {
+ pub enum Example {
+ Example1 {},
+ Example2 {},
+ }
+}
+
+fn main() {
+ foo!(crate::foo_bar::Example, Example1);
+
+ let i1 = foo_bar::Example::Example1 {};
+
+ assert_eq!(i1.foo_example(), 1);
+
+ let i2 = foo_bar::Example::Example2 {};
+
+ assert_eq!(i2.foo_example(), 2);
+}
+
+#[macro_export]
+macro_rules! foo {
+ ($struct:path, $variant:ident) => {
+ impl $struct {
+ pub fn foo_example(&self) -> i32 {
+ match self {
+ <$struct>::$variant { .. } => 1,
+ _ => 2,
+ }
+ }
+ }
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4926/deeply_nested_struct.rs b/src/tools/rustfmt/tests/target/issue-4926/deeply_nested_struct.rs
new file mode 100644
index 000000000..072cf2f66
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4926/deeply_nested_struct.rs
@@ -0,0 +1,38 @@
+// rustfmt-struct_field_align_threshold: 30
+
+struct X {
+ a: i32,
+ b: i32,
+ c: i32,
+}
+
+fn test(x: X) {
+ let d = {
+ let e = {
+ let f = {
+ let g = {
+ let h = {
+ let i = {
+ let j = {
+ matches!(
+ x,
+ X {
+ a: 1_000,
+ b: 1_000,
+ ..
+ }
+ )
+ };
+ j
+ };
+ i
+ };
+ h
+ };
+ g
+ };
+ f
+ };
+ e
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4926/deeply_nested_struct_with_long_field_names.rs b/src/tools/rustfmt/tests/target/issue-4926/deeply_nested_struct_with_long_field_names.rs
new file mode 100644
index 000000000..c7bc7f729
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4926/deeply_nested_struct_with_long_field_names.rs
@@ -0,0 +1,44 @@
+// rustfmt-struct_field_align_threshold: 30
+
+struct X {
+ really_really_long_field_a: i32,
+ really_really_really_long_field_b: i32,
+ really_really_really_really_long_field_c: i32,
+ really_really_really_really_really_long_field_d: i32,
+ really_really_really_really_really_really_long_field_e: i32,
+ f: i32,
+}
+
+fn test(x: X) {
+ let d = {
+ let e = {
+ let f = {
+ let g = {
+ let h = {
+ let i = {
+ let j = {
+ matches!(
+ x,
+ X {
+ really_really_long_field_a: 10,
+ really_really_really_long_field_b: 10,
+ really_really_really_really_long_field_c: 10,
+ really_really_really_really_really_long_field_d: 10,
+ really_really_really_really_really_really_long_field_e: 10,
+ ..
+ }
+ )
+ };
+ j
+ };
+ i
+ };
+ h
+ };
+ g
+ };
+ f
+ };
+ e
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4926/deeply_nested_struct_with_many_fields.rs b/src/tools/rustfmt/tests/target/issue-4926/deeply_nested_struct_with_many_fields.rs
new file mode 100644
index 000000000..697931625
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4926/deeply_nested_struct_with_many_fields.rs
@@ -0,0 +1,54 @@
+// rustfmt-struct_field_align_threshold: 30
+
+struct X {
+ a: i32,
+ b: i32,
+ c: i32,
+ d: i32,
+ e: i32,
+ f: i32,
+ g: i32,
+ h: i32,
+ i: i32,
+ j: i32,
+ k: i32,
+}
+
+fn test(x: X) {
+ let d = {
+ let e = {
+ let f = {
+ let g = {
+ let h = {
+ let i = {
+ let j = {
+ matches!(
+ x,
+ X {
+ a: 1_000,
+ b: 1_000,
+ c: 1_000,
+ d: 1_000,
+ e: 1_000,
+ f: 1_000,
+ g: 1_000,
+ h: 1_000,
+ i: 1_000,
+ j: 1_000,
+ ..
+ }
+ )
+ };
+ j
+ };
+ i
+ };
+ h
+ };
+ g
+ };
+ f
+ };
+ e
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4926/enum_struct_field.rs b/src/tools/rustfmt/tests/target/issue-4926/enum_struct_field.rs
new file mode 100644
index 000000000..2471df846
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4926/enum_struct_field.rs
@@ -0,0 +1,41 @@
+// rustfmt-struct_field_align_threshold: 30
+// rustfmt-enum_discrim_align_threshold: 30
+// rustfmt-imports_layout: HorizontalVertical
+
+#[derive(Default)]
+struct InnerStructA {
+ bbbbbbbbb: i32,
+ cccccccc: i32,
+}
+
+enum SomeEnumNamedD {
+ E(InnerStructA),
+ F {
+ ggggggggggggggggggggggggg: bool,
+ h: bool,
+ },
+}
+
+impl SomeEnumNamedD {
+ fn f_variant() -> Self {
+ Self::F {
+ ggggggggggggggggggggggggg: true,
+ h: true,
+ }
+ }
+}
+
+fn main() {
+ let kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk = SomeEnumNamedD::f_variant();
+ let something_we_care_about = matches!(
+ kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk,
+ SomeEnumNamedD::F {
+ ggggggggggggggggggggggggg: true,
+ ..
+ }
+ );
+
+ if something_we_care_about {
+ println!("Yup it happened");
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4926/minimum_example.rs b/src/tools/rustfmt/tests/target/issue-4926/minimum_example.rs
new file mode 100644
index 000000000..06e184274
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4926/minimum_example.rs
@@ -0,0 +1,10 @@
+// rustfmt-struct_field_align_threshold: 30
+
+struct X {
+ a: i32,
+ b: i32,
+}
+
+fn test(x: X) {
+ let y = matches!(x, X { a: 1, .. });
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4926/struct_with_long_field_names.rs b/src/tools/rustfmt/tests/target/issue-4926/struct_with_long_field_names.rs
new file mode 100644
index 000000000..ac4674ab5
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4926/struct_with_long_field_names.rs
@@ -0,0 +1,24 @@
+// rustfmt-struct_field_align_threshold: 30
+
+struct X {
+ really_really_long_field_a: i32,
+ really_really_really_long_field_b: i32,
+ really_really_really_really_long_field_c: i32,
+ really_really_really_really_really_long_field_d: i32,
+ really_really_really_really_really_really_long_field_e: i32,
+ f: i32,
+}
+
+fn test(x: X) {
+ let y = matches!(
+ x,
+ X {
+ really_really_long_field_a: 10,
+ really_really_really_long_field_b: 10,
+ really_really_really_really_long_field_c: 10,
+ really_really_really_really_really_long_field_d: 10,
+ really_really_really_really_really_really_long_field_e: 10,
+ ..
+ }
+ );
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4926/struct_with_many_fields.rs b/src/tools/rustfmt/tests/target/issue-4926/struct_with_many_fields.rs
new file mode 100644
index 000000000..96dfe14bf
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4926/struct_with_many_fields.rs
@@ -0,0 +1,34 @@
+// rustfmt-struct_field_align_threshold: 30
+
+struct X {
+ a: i32,
+ b: i32,
+ c: i32,
+ d: i32,
+ e: i32,
+ f: i32,
+ g: i32,
+ h: i32,
+ i: i32,
+ j: i32,
+ k: i32,
+}
+
+fn test(x: X) {
+ let y = matches!(
+ x,
+ X {
+ a: 1_000,
+ b: 1_000,
+ c: 1_000,
+ d: 1_000,
+ e: 1_000,
+ f: 1_000,
+ g: 1_000,
+ h: 1_000,
+ i: 1_000,
+ j: 1_000,
+ ..
+ }
+ );
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4984/minimum_example.rs b/src/tools/rustfmt/tests/target/issue-4984/minimum_example.rs
new file mode 100644
index 000000000..f0599c5d6
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4984/minimum_example.rs
@@ -0,0 +1,2 @@
+#[derive(/*Debug, */ Clone)]
+struct Foo;
diff --git a/src/tools/rustfmt/tests/target/issue-4984/multi_line_derive.rs b/src/tools/rustfmt/tests/target/issue-4984/multi_line_derive.rs
new file mode 100644
index 000000000..5fbd9784a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4984/multi_line_derive.rs
@@ -0,0 +1,26 @@
+#[derive(
+ /* ---------- Some really important comment that just had to go inside the derive --------- */
+ Debug,
+ Clone,
+ Eq,
+ PartialEq,
+)]
+struct Foo {
+ a: i32,
+ b: T,
+}
+
+#[derive(
+ /*
+ Some really important comment that just had to go inside the derive.
+ Also had to be put over multiple lines
+ */
+ Debug,
+ Clone,
+ Eq,
+ PartialEq,
+)]
+struct Bar {
+ a: i32,
+ b: T,
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4984/multiple_comments_within.rs b/src/tools/rustfmt/tests/target/issue-4984/multiple_comments_within.rs
new file mode 100644
index 000000000..d2924f0d0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4984/multiple_comments_within.rs
@@ -0,0 +1,11 @@
+#[derive(
+ /* ---------- Some really important comment that just had to go inside the derive --------- */
+ Debug,
+ Clone,
+ /* Another comment */ Eq,
+ PartialEq,
+)]
+struct Foo {
+ a: i32,
+ b: T,
+}
diff --git a/src/tools/rustfmt/tests/target/issue-4984/should_not_change.rs b/src/tools/rustfmt/tests/target/issue-4984/should_not_change.rs
new file mode 100644
index 000000000..e46ee5110
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-4984/should_not_change.rs
@@ -0,0 +1,5 @@
+#[derive(Clone, Debug, Eq, PartialEq)]
+struct Foo;
+
+#[derive(Clone)]
+struct Bar;
diff --git a/src/tools/rustfmt/tests/target/issue-5005/minimum_example.rs b/src/tools/rustfmt/tests/target/issue-5005/minimum_example.rs
new file mode 100644
index 000000000..11cc645fa
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5005/minimum_example.rs
@@ -0,0 +1,9 @@
+#![feature(more_qualified_paths)]
+macro_rules! show {
+ ($ty:ty, $ex:expr) => {
+ match $ex {
+ <$ty>::A(_val) => println!("got a"), // formatting should not remove <$ty>::
+ <$ty>::B => println!("got b"),
+ }
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5009/1_minimum_example.rs b/src/tools/rustfmt/tests/target/issue-5009/1_minimum_example.rs
new file mode 100644
index 000000000..55836f4bf
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5009/1_minimum_example.rs
@@ -0,0 +1,4 @@
+fn main() {
+ // the "in" inside the pattern produced invalid syntax
+ for variable_in_here /* ... */ in 0..1 {}
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5009/2_many_in_connectors_in_pattern.rs b/src/tools/rustfmt/tests/target/issue-5009/2_many_in_connectors_in_pattern.rs
new file mode 100644
index 000000000..d83590c68
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5009/2_many_in_connectors_in_pattern.rs
@@ -0,0 +1,3 @@
+fn main() {
+ for in_in_in_in_in_in_in_in /* ... */ in 0..1 {}
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5009/3_nested_for_loop_with_connector_in_pattern.rs b/src/tools/rustfmt/tests/target/issue-5009/3_nested_for_loop_with_connector_in_pattern.rs
new file mode 100644
index 000000000..9c8007239
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5009/3_nested_for_loop_with_connector_in_pattern.rs
@@ -0,0 +1,5 @@
+fn main() {
+ for variable_in_x /* ... */ in 0..1 {
+ for variable_in_y /* ... */ in 0..1 {}
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5009/4_nested_for_loop_with_if_elseif_else.rs b/src/tools/rustfmt/tests/target/issue-5009/4_nested_for_loop_with_if_elseif_else.rs
new file mode 100644
index 000000000..a716d0d30
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5009/4_nested_for_loop_with_if_elseif_else.rs
@@ -0,0 +1,13 @@
+fn main() {
+ for variable_in_x /* ... */ in 0..1 {
+ for variable_in_y /* ... */ in 0..1 {
+ if false {
+
+ } else if false {
+
+ } else {
+
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5009/5_nested_for_loop_with_connector_in_if_elseif_else.rs b/src/tools/rustfmt/tests/target/issue-5009/5_nested_for_loop_with_connector_in_if_elseif_else.rs
new file mode 100644
index 000000000..41ea46d4c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5009/5_nested_for_loop_with_connector_in_if_elseif_else.rs
@@ -0,0 +1,15 @@
+fn main() {
+ let in_ = false;
+
+ for variable_in_x /* ... */ in 0..1 {
+ for variable_in_y /* ... */ in 0..1 {
+ if in_ {
+
+ } else if in_ {
+
+ } else {
+
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5009/6_deeply_nested_for_loop_with_connector_in_pattern.rs b/src/tools/rustfmt/tests/target/issue-5009/6_deeply_nested_for_loop_with_connector_in_pattern.rs
new file mode 100644
index 000000000..789e54f7e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5009/6_deeply_nested_for_loop_with_connector_in_pattern.rs
@@ -0,0 +1,32 @@
+fn main() {
+ for variable_in_a /* ... */ in 0..1 {
+ for variable_in_b /* ... */ in 0..1 {
+ for variable_in_c /* ... */ in 0..1 {
+ for variable_in_d /* ... */ in 0..1 {
+ for variable_in_e /* ... */ in 0..1 {
+ for variable_in_f /* ... */ in 0..1 {
+ for variable_in_g /* ... */ in 0..1 {
+ for variable_in_h /* ... */ in 0..1 {
+ for variable_in_i /* ... */ in 0..1 {
+ for variable_in_j /* ... */ in 0..1 {
+ for variable_in_k /* ... */ in 0..1 {
+ for variable_in_l /* ... */ in 0..1 {
+ for variable_in_m /* ... */ in 0..1 {
+ for variable_in_n /* ... */ in 0..1 {
+ for variable_in_o /* ... */ in 0..1 {
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5011.rs b/src/tools/rustfmt/tests/target/issue-5011.rs
new file mode 100644
index 000000000..9ad4a1929
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5011.rs
@@ -0,0 +1,8 @@
+pub(crate) struct ASlash(
+ // hello
+ i32,
+);
+
+pub(crate) struct AStar(/* hello */ i32);
+
+pub(crate) struct BStar(/* hello */ i32);
diff --git a/src/tools/rustfmt/tests/target/issue-5012/trailing_comma_always.rs b/src/tools/rustfmt/tests/target/issue-5012/trailing_comma_always.rs
new file mode 100644
index 000000000..ff9c40fbb
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5012/trailing_comma_always.rs
@@ -0,0 +1,8 @@
+// rustfmt-trailing_comma: Always
+
+pub struct Matrix<T, const R: usize, const C: usize,>
+where
+ [T; R * C]:,
+{
+ contents: [T; R * C],
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5012/trailing_comma_never.rs b/src/tools/rustfmt/tests/target/issue-5012/trailing_comma_never.rs
new file mode 100644
index 000000000..2fac8eae5
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5012/trailing_comma_never.rs
@@ -0,0 +1,8 @@
+// rustfmt-trailing_comma: Never
+
+pub struct Matrix<T, const R: usize, const C: usize>
+where
+ [T; R * C]:
+{
+ contents: [T; R * C]
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5023.rs b/src/tools/rustfmt/tests/target/issue-5023.rs
new file mode 100644
index 000000000..4e84c7d98
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5023.rs
@@ -0,0 +1,23 @@
+// rustfmt-wrap_comments: true
+
+/// A comment to test special unicode characters on boundaries
+/// 是,是,是,是,是,是,是,是,是,是,是,是 it should break right here
+/// this goes to the next line
+fn main() {
+ if xxx {
+ let xxx = xxx
+ .into_iter()
+ .filter(|(xxx, xxx)| {
+ if let Some(x) = Some(1) {
+ // xxxxxxxxxxxxxxxxxx, xxxxxxxxxxxx, xxxxxxxxxxxxxxxxxxxx xxx xxxxxxx, xxxxx xxx
+ // xxxxxxxxxx. xxxxxxxxxxxxxxxx,xxxxxxxxxxxxxxxxx xxx xxxxxxx
+ // 是sdfadsdfxxxxxxxxx,sdfaxxxxxx_xxxxx_masdfaonxxx,
+ if false {
+ return true;
+ }
+ }
+ false
+ })
+ .collect();
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5030.rs b/src/tools/rustfmt/tests/target/issue-5030.rs
new file mode 100644
index 000000000..8ac3888bd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5030.rs
@@ -0,0 +1,21 @@
+// rustfmt-imports_granularity: Item
+// rustfmt-group_imports: One
+
+// Confirm that attributes are duplicated to all items in the use statement
+#[cfg(feature = "foo")]
+use std::collections::HashMap;
+#[cfg(feature = "foo")]
+use std::collections::HashSet;
+
+// Separate the imports below from the ones above
+const A: usize = 0;
+
+// Copying attrs works with import grouping as well
+#[cfg(feature = "spam")]
+use qux::bar;
+#[cfg(feature = "spam")]
+use qux::baz;
+#[cfg(feature = "foo")]
+use std::collections::HashMap;
+#[cfg(feature = "foo")]
+use std::collections::HashSet;
diff --git a/src/tools/rustfmt/tests/target/issue-5033/minimum_example.rs b/src/tools/rustfmt/tests/target/issue-5033/minimum_example.rs
new file mode 100644
index 000000000..0e7df41de
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5033/minimum_example.rs
@@ -0,0 +1,8 @@
+// leading comment
+
+#![rustfmt::skip]
+fn main() {
+ println!("main"); // commented
+}
+
+// post comment
diff --git a/src/tools/rustfmt/tests/target/issue-5033/nested_modules.rs b/src/tools/rustfmt/tests/target/issue-5033/nested_modules.rs
new file mode 100644
index 000000000..7a11133b6
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5033/nested_modules.rs
@@ -0,0 +1,11 @@
+#![rustfmt::skip]
+
+mod a {
+ mod b {
+
+ }
+
+ // trailing comment b
+}
+
+// trailing comment a
diff --git a/src/tools/rustfmt/tests/target/issue-5042/multi-line_comment_with_trailing_comma.rs b/src/tools/rustfmt/tests/target/issue-5042/multi-line_comment_with_trailing_comma.rs
new file mode 100644
index 000000000..1ae1212b4
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5042/multi-line_comment_with_trailing_comma.rs
@@ -0,0 +1,24 @@
+fn main() {
+ // 5042 deals with trailing commas, not the indentation issue of these comments
+ // When a future PR fixes the inentation issues these test can be updated
+ let _ = std::ops::Add::add(
+ 10, 20, // ...
+ // ...,
+ );
+
+ let _ = std::ops::Add::add(
+ 10, 20, /* ... */
+ // ...,
+ );
+
+ let _ = std::ops::Add::add(
+ 10, 20, // ...,
+ // ...,
+ );
+
+ let _ = std::ops::Add::add(
+ 10, 20, // ...,
+ /* ...
+ */
+ );
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5042/multi-line_comment_without_trailing_comma.rs b/src/tools/rustfmt/tests/target/issue-5042/multi-line_comment_without_trailing_comma.rs
new file mode 100644
index 000000000..30d174664
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5042/multi-line_comment_without_trailing_comma.rs
@@ -0,0 +1,24 @@
+fn main() {
+ // 5042 deals with trailing commas, not the indentation issue of these comments
+ // When a future PR fixes the inentation issues these test can be updated
+ let _ = std::ops::Add::add(
+ 10, 20, // ...
+ // ...
+ );
+
+ let _ = std::ops::Add::add(
+ 10, 20, /* ... */
+ // ...
+ );
+
+ let _ = std::ops::Add::add(
+ 10, 20, // ...
+ // ...
+ );
+
+ let _ = std::ops::Add::add(
+ 10, 20, // ...
+ /* ...
+ */
+ );
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5042/single-line_comment_with_trailing_comma.rs b/src/tools/rustfmt/tests/target/issue-5042/single-line_comment_with_trailing_comma.rs
new file mode 100644
index 000000000..87b651dd2
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5042/single-line_comment_with_trailing_comma.rs
@@ -0,0 +1,7 @@
+fn main() {
+ let _ = std::ops::Add::add(
+ 10, 20, // ...,
+ );
+
+ let _ = std::ops::Add::add(10, 20 /* ... */);
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5042/single-line_comment_without_trailing_comma.rs b/src/tools/rustfmt/tests/target/issue-5042/single-line_comment_without_trailing_comma.rs
new file mode 100644
index 000000000..116df86a4
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5042/single-line_comment_without_trailing_comma.rs
@@ -0,0 +1,7 @@
+fn main() {
+ let _ = std::ops::Add::add(
+ 10, 20, // ...
+ );
+
+ let _ = std::ops::Add::add(10, 20 /* ... */);
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5066/multi_line_struct_trailing_comma_always_struct_lit_width_0.rs b/src/tools/rustfmt/tests/target/issue-5066/multi_line_struct_trailing_comma_always_struct_lit_width_0.rs
new file mode 100644
index 000000000..c7122c676
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5066/multi_line_struct_trailing_comma_always_struct_lit_width_0.rs
@@ -0,0 +1,10 @@
+// rustfmt-trailing_comma: Always
+// rustfmt-struct_lit_single_line: false
+// rustfmt-struct_lit_width: 0
+
+fn main() {
+ let Foo {
+ a,
+ ..
+ } = b;
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5066/multi_line_struct_trailing_comma_never_struct_lit_width_0.rs b/src/tools/rustfmt/tests/target/issue-5066/multi_line_struct_trailing_comma_never_struct_lit_width_0.rs
new file mode 100644
index 000000000..68e89c417
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5066/multi_line_struct_trailing_comma_never_struct_lit_width_0.rs
@@ -0,0 +1,10 @@
+// rustfmt-trailing_comma: Never
+// rustfmt-struct_lit_single_line: false
+// rustfmt-struct_lit_width: 0
+
+fn main() {
+ let Foo {
+ a,
+ ..
+ } = b;
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5066/multi_line_struct_with_trailing_comma_always.rs b/src/tools/rustfmt/tests/target/issue-5066/multi_line_struct_with_trailing_comma_always.rs
new file mode 100644
index 000000000..3368f0703
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5066/multi_line_struct_with_trailing_comma_always.rs
@@ -0,0 +1,10 @@
+// rustfmt-trailing_comma: Always
+// rustfmt-struct_lit_single_line: false
+
+// There is an issue with how this is formatted.
+// formatting should look like ./multi_line_struct_trailing_comma_always_struct_lit_width_0.rs
+fn main() {
+ let Foo {
+ a, ..
+ } = b;
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5066/multi_line_struct_with_trailing_comma_never.rs b/src/tools/rustfmt/tests/target/issue-5066/multi_line_struct_with_trailing_comma_never.rs
new file mode 100644
index 000000000..cf63c4c98
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5066/multi_line_struct_with_trailing_comma_never.rs
@@ -0,0 +1,10 @@
+// rustfmt-trailing_comma: Never
+// rustfmt-struct_lit_single_line: false
+
+// There is an issue with how this is formatted.
+// formatting should look like ./multi_line_struct_trailing_comma_never_struct_lit_width_0.rs
+fn main() {
+ let Foo {
+ a, ..
+ } = b;
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5066/with_trailing_comma_always.rs b/src/tools/rustfmt/tests/target/issue-5066/with_trailing_comma_always.rs
new file mode 100644
index 000000000..e20bcec93
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5066/with_trailing_comma_always.rs
@@ -0,0 +1,5 @@
+// rustfmt-trailing_comma: Always
+
+fn main() {
+ let Foo { a, .. } = b;
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5066/with_trailing_comma_never.rs b/src/tools/rustfmt/tests/target/issue-5066/with_trailing_comma_never.rs
new file mode 100644
index 000000000..8b95bb137
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5066/with_trailing_comma_never.rs
@@ -0,0 +1,5 @@
+// rustfmt-trailing_comma: Never
+
+fn main() {
+ let Foo { a, .. } = b;
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5088/deeply_nested_long_comment_wrap_comments_false.rs b/src/tools/rustfmt/tests/target/issue-5088/deeply_nested_long_comment_wrap_comments_false.rs
new file mode 100644
index 000000000..f4801de01
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5088/deeply_nested_long_comment_wrap_comments_false.rs
@@ -0,0 +1,33 @@
+// rustfmt-wrap_comments: false
+
+fn main() {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ // - aaaa aaaaaaaaa aaaaaaaaa aaaaaaaaa aaaaaaaaa bbbbbbbbbb bbbbbbbbb bbbbbbbbb ccc cccccccccc ccccccc cccccccc
+
+ // * aaaa aaaaaaaaa aaaaaaaaa aaaaaaaaa aaaaaaaaa bbbbbbbbbb bbbbbbbbb bbbbbbbbb ccc cccccccccc ccccccc cccccccc
+
+ /* - aaaa aaaaaaaaa aaaaaaaaa aaaaaaaaa aaaaaaaaa bbbbbbbbbb bbbbbbbbb bbbbbbbbb ccc cccccccccc ccccccc cccccccc */
+
+ /* * aaaa aaaaaaaaa aaaaaaaaa aaaaaaaaa aaaaaaaaa bbbbbbbbbb bbbbbbbbb bbbbbbbbb ccc cccccccccc ccccccc cccccccc */
+ };
+ };
+ };
+ };
+ };
+ };
+ };
+ };
+ };
+ };
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5088/deeply_nested_long_comment_wrap_comments_true.rs b/src/tools/rustfmt/tests/target/issue-5088/deeply_nested_long_comment_wrap_comments_true.rs
new file mode 100644
index 000000000..b289c9f85
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5088/deeply_nested_long_comment_wrap_comments_true.rs
@@ -0,0 +1,49 @@
+// rustfmt-wrap_comments: true
+
+fn main() {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ // - aaaa aaaaaaaaa aaaaaaaaa
+ // aaaaaaaaa aaaaaaaaa
+ // bbbbbbbbbb bbbbbbbbb
+ // bbbbbbbbb ccc cccccccccc
+ // ccccccc cccccccc
+
+ // * aaaa aaaaaaaaa aaaaaaaaa
+ // aaaaaaaaa aaaaaaaaa
+ // bbbbbbbbbb bbbbbbbbb
+ // bbbbbbbbb ccc cccccccccc
+ // ccccccc cccccccc
+
+ /* - aaaa aaaaaaaaa aaaaaaaaa
+ * aaaaaaaaa aaaaaaaaa
+ * bbbbbbbbbb bbbbbbbbb
+ * bbbbbbbbb ccc cccccccccc
+ * ccccccc cccccccc */
+
+ /* * aaaa aaaaaaaaa aaaaaaaaa
+ * aaaaaaaaa aaaaaaaaa
+ * bbbbbbbbbb bbbbbbbbb
+ * bbbbbbbbb ccc cccccccccc
+ * ccccccc cccccccc */
+ };
+ };
+ };
+ };
+ };
+ };
+ };
+ };
+ };
+ };
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5088/multi_line_itemized_block_wrap_comments_false.rs b/src/tools/rustfmt/tests/target/issue-5088/multi_line_itemized_block_wrap_comments_false.rs
new file mode 100644
index 000000000..60beed1b0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5088/multi_line_itemized_block_wrap_comments_false.rs
@@ -0,0 +1,17 @@
+// rustfmt-wrap_comments: false
+
+// - some itemized block 1
+// - some itemized block 2
+
+// * some itemized block 3
+// * some itemized block 4
+
+/*
+ * - some itemized block 5
+ * - some itemized block 6
+ */
+
+/*
+ * * some itemized block 7
+ * * some itemized block 8
+ */
diff --git a/src/tools/rustfmt/tests/target/issue-5088/multi_line_itemized_block_wrap_comments_true.rs b/src/tools/rustfmt/tests/target/issue-5088/multi_line_itemized_block_wrap_comments_true.rs
new file mode 100644
index 000000000..84fba4b7c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5088/multi_line_itemized_block_wrap_comments_true.rs
@@ -0,0 +1,17 @@
+// rustfmt-wrap_comments: true
+
+// - some itemized block 1
+// - some itemized block 2
+
+// * some itemized block 3
+// * some itemized block 4
+
+/*
+ * - some itemized block 5
+ * - some itemized block 6
+ */
+
+/*
+ * * some itemized block 7
+ * * some itemized block 8
+ */
diff --git a/src/tools/rustfmt/tests/target/issue-5088/multi_line_text_with_itemized_block_wrap_comments_false.rs b/src/tools/rustfmt/tests/target/issue-5088/multi_line_text_with_itemized_block_wrap_comments_false.rs
new file mode 100644
index 000000000..d1bf44f6c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5088/multi_line_text_with_itemized_block_wrap_comments_false.rs
@@ -0,0 +1,37 @@
+// rustfmt-wrap_comments: false
+
+// Some text
+// - some itemized block 1
+// - some itemized block 2
+// Some more text
+// - some itemized block 3
+// - some itemized block 4
+// Even more text
+
+// Some text
+// * some itemized block 5
+// * some itemized block 6
+// Some more text
+// * some itemized block 7
+// * some itemized block 8
+// Even more text
+
+/*
+ * Some text
+ * - some itemized block 9
+ * - some itemized block 10
+ * Some more text
+ * - some itemized block 11
+ * - some itemized block 12
+ * Even more text
+ */
+
+/*
+ * Some text
+ * * some itemized block 13
+ * * some itemized block 14
+ * Some more text
+ * * some itemized block 15
+ * * some itemized block 16
+ * Even more text
+ */
diff --git a/src/tools/rustfmt/tests/target/issue-5088/multi_line_text_with_itemized_block_wrap_comments_true.rs b/src/tools/rustfmt/tests/target/issue-5088/multi_line_text_with_itemized_block_wrap_comments_true.rs
new file mode 100644
index 000000000..f767491f9
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5088/multi_line_text_with_itemized_block_wrap_comments_true.rs
@@ -0,0 +1,37 @@
+// rustfmt-wrap_comments: true
+
+// Some text
+// - some itemized block 1
+// - some itemized block 2
+// Some more text
+// - some itemized block 3
+// - some itemized block 4
+// Even more text
+
+// Some text
+// * some itemized block 5
+// * some itemized block 6
+// Some more text
+// * some itemized block 7
+// * some itemized block 8
+// Even more text
+
+/*
+ * Some text
+ * - some itemized block 9
+ * - some itemized block 10
+ * Some more text
+ * - some itemized block 11
+ * - some itemized block 12
+ * Even more text
+ */
+
+/*
+ * Some text
+ * * some itemized block 13
+ * * some itemized block 14
+ * Some more text
+ * * some itemized block 15
+ * * some itemized block 16
+ * Even more text
+ */
diff --git a/src/tools/rustfmt/tests/target/issue-5088/single_line_itemized_block_wrap_comments_false.rs b/src/tools/rustfmt/tests/target/issue-5088/single_line_itemized_block_wrap_comments_false.rs
new file mode 100644
index 000000000..2cd85c787
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5088/single_line_itemized_block_wrap_comments_false.rs
@@ -0,0 +1,9 @@
+// rustfmt-wrap_comments: false
+
+// - some itemized block 1
+
+// * some itemized block 2
+
+/* - some itemized block 3 */
+
+/* * some itemized block 4 */
diff --git a/src/tools/rustfmt/tests/target/issue-5088/single_line_itemized_block_wrap_comments_true.rs b/src/tools/rustfmt/tests/target/issue-5088/single_line_itemized_block_wrap_comments_true.rs
new file mode 100644
index 000000000..e9f343d75
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5088/single_line_itemized_block_wrap_comments_true.rs
@@ -0,0 +1,9 @@
+// rustfmt-wrap_comments: true
+
+// - some itemized block 1
+
+// * some itemized block 2
+
+/* - some itemized block 3 */
+
+/* * some itemized block 4 */
diff --git a/src/tools/rustfmt/tests/target/issue-5088/start_with_empty_comment_very_long_itemized_block_wrap_comments_false.rs b/src/tools/rustfmt/tests/target/issue-5088/start_with_empty_comment_very_long_itemized_block_wrap_comments_false.rs
new file mode 100644
index 000000000..97bb7733d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5088/start_with_empty_comment_very_long_itemized_block_wrap_comments_false.rs
@@ -0,0 +1,19 @@
+// rustfmt-wrap_comments: false
+
+//
+// - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
+// - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
+
+//
+// * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
+// * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
+
+/*
+ * - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.*/
+/*
+ * - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.*/
+
+/*
+ * * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.*/
+/*
+ * * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.*/
diff --git a/src/tools/rustfmt/tests/target/issue-5088/start_with_empty_comment_very_long_itemized_block_wrap_comments_true.rs b/src/tools/rustfmt/tests/target/issue-5088/start_with_empty_comment_very_long_itemized_block_wrap_comments_true.rs
new file mode 100644
index 000000000..c8af8383e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5088/start_with_empty_comment_very_long_itemized_block_wrap_comments_true.rs
@@ -0,0 +1,27 @@
+// rustfmt-wrap_comments: true
+
+//
+// - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod
+// tempor incididunt ut labore et dolore magna aliqua.
+// - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod
+// tempor incididunt ut labore et dolore magna aliqua.
+
+//
+// * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod
+// tempor incididunt ut labore et dolore magna aliqua.
+// * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod
+// tempor incididunt ut labore et dolore magna aliqua.
+
+/*
+ * - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod
+ * tempor incididunt ut labore et dolore magna aliqua. */
+/*
+ * - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod
+ * tempor incididunt ut labore et dolore magna aliqua. */
+
+/*
+ * * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod
+ * tempor incididunt ut labore et dolore magna aliqua. */
+/*
+ * * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod
+ * tempor incididunt ut labore et dolore magna aliqua. */
diff --git a/src/tools/rustfmt/tests/target/issue-5088/start_with_empty_comment_wrap_comments_false.rs b/src/tools/rustfmt/tests/target/issue-5088/start_with_empty_comment_wrap_comments_false.rs
new file mode 100644
index 000000000..75cc42c0e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5088/start_with_empty_comment_wrap_comments_false.rs
@@ -0,0 +1,17 @@
+// rustfmt-wrap_comments: false
+
+//
+// - some itemized block 1
+// - some itemized block 2
+
+//
+// * some itemized block 3
+// * some itemized block 4
+
+/*
+ * - some itemized block 5
+ * - some itemized block 6 */
+
+/*
+ * * some itemized block 7
+ * * some itemized block 8 */
diff --git a/src/tools/rustfmt/tests/target/issue-5088/start_with_empty_comment_wrap_comments_true.rs b/src/tools/rustfmt/tests/target/issue-5088/start_with_empty_comment_wrap_comments_true.rs
new file mode 100644
index 000000000..ef2c8f90c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5088/start_with_empty_comment_wrap_comments_true.rs
@@ -0,0 +1,17 @@
+// rustfmt-wrap_comments: true
+
+//
+// - some itemized block 1
+// - some itemized block 2
+
+//
+// * some itemized block 3
+// * some itemized block 4
+
+/*
+ * - some itemized block 5
+ * - some itemized block 6 */
+
+/*
+ * * some itemized block 7
+ * * some itemized block 8 */
diff --git a/src/tools/rustfmt/tests/target/issue-5088/very_long_comment_wrap_comments_false.rs b/src/tools/rustfmt/tests/target/issue-5088/very_long_comment_wrap_comments_false.rs
new file mode 100644
index 000000000..c826cc5d4
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5088/very_long_comment_wrap_comments_false.rs
@@ -0,0 +1,13 @@
+// rustfmt-wrap_comments: false
+
+// - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
+// - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
+
+// * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
+// * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
+
+/* - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.*/
+/* - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.*/
+
+/* * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.*/
+/* * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.*/
diff --git a/src/tools/rustfmt/tests/target/issue-5088/very_long_comment_wrap_comments_true.rs b/src/tools/rustfmt/tests/target/issue-5088/very_long_comment_wrap_comments_true.rs
new file mode 100644
index 000000000..7f764dbd8
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5088/very_long_comment_wrap_comments_true.rs
@@ -0,0 +1,21 @@
+// rustfmt-wrap_comments: true
+
+// - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod
+// tempor incididunt ut labore et dolore magna aliqua.
+// - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod
+// tempor incididunt ut labore et dolore magna aliqua.
+
+// * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod
+// tempor incididunt ut labore et dolore magna aliqua.
+// * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod
+// tempor incididunt ut labore et dolore magna aliqua.
+
+/* - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod
+ * tempor incididunt ut labore et dolore magna aliqua. */
+/* - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod
+ * tempor incididunt ut labore et dolore magna aliqua. */
+
+/* * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod
+ * tempor incididunt ut labore et dolore magna aliqua. */
+/* * Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod
+ * tempor incididunt ut labore et dolore magna aliqua. */
diff --git a/src/tools/rustfmt/tests/target/issue-5095.rs b/src/tools/rustfmt/tests/target/issue-5095.rs
new file mode 100644
index 000000000..6981a6580
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5095.rs
@@ -0,0 +1,27 @@
+// rustfmt-wrap_comments: true
+
+pub mod a_long_name {
+ pub mod b_long_name {
+ pub mod c_long_name {
+ pub mod d_long_name {
+ pub mod e_long_name {
+ pub struct Bananas;
+ impl Bananas {
+ pub fn fantastic() {}
+ }
+
+ pub mod f_long_name {
+ pub struct Apples;
+ }
+ }
+ }
+ }
+ }
+}
+
+/// Check out [my other struct] ([`Bananas`]) and [the method it has].
+///
+/// [my other struct]: a_long_name::b_long_name::c_long_name::d_long_name::e_long_name::f_long_name::Apples
+/// [`Bananas`]: a_long_name::b_long_name::c_long_name::d_long_name::e_long_name::Bananas::fantastic()
+/// [the method it has]: a_long_name::b_long_name::c_long_name::d_long_name::e_long_name::Bananas::fantastic()
+pub struct A;
diff --git a/src/tools/rustfmt/tests/target/issue-510.rs b/src/tools/rustfmt/tests/target/issue-510.rs
new file mode 100644
index 000000000..a166b6849
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-510.rs
@@ -0,0 +1,41 @@
+impl ISizeAndMarginsComputer for AbsoluteNonReplaced {
+ fn solve_inline_size_constraints(
+ &self,
+ block: &mut BlockFlow,
+ input: &ISizeConstraintInput,
+ ) -> ISizeConstraintSolution {
+ let (inline_start, inline_size, margin_inline_start, margin_inline_end) = match (
+ inline_startssssssxxxxxxsssssxxxxxxxxxssssssxxx,
+ inline_startssssssxxxxxxsssssxxxxxxxxxssssssxxx,
+ ) {
+ (MaybeAuto::Auto, MaybeAuto::Auto, MaybeAuto::Auto) => {
+ let margin_start = inline_start_margin.specified_or_zero();
+ let margin_end = inline_end_margin.specified_or_zero();
+ // Now it is the same situation as inline-start Specified and inline-end
+ // and inline-size Auto.
+ //
+ // Set inline-end to zero to calculate inline-size.
+ let inline_size = block.get_shrink_to_fit_inline_size(
+ available_inline_size - (margin_start + margin_end),
+ );
+ (Au(0), inline_size, margin_start, margin_end)
+ }
+ };
+
+ let (inline_start, inline_size, margin_inline_start, margin_inline_end) =
+ match (inline_start, inline_end, computed_inline_size) {
+ (MaybeAuto::Auto, MaybeAuto::Auto, MaybeAuto::Auto) => {
+ let margin_start = inline_start_margin.specified_or_zero();
+ let margin_end = inline_end_margin.specified_or_zero();
+ // Now it is the same situation as inline-start Specified and inline-end
+ // and inline-size Auto.
+ //
+ // Set inline-end to zero to calculate inline-size.
+ let inline_size = block.get_shrink_to_fit_inline_size(
+ available_inline_size - (margin_start + margin_end),
+ );
+ (Au(0), inline_size, margin_start, margin_end)
+ }
+ };
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5125/attributes_in_formal_fuction_parameter.rs b/src/tools/rustfmt/tests/target/issue-5125/attributes_in_formal_fuction_parameter.rs
new file mode 100644
index 000000000..5d1679328
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5125/attributes_in_formal_fuction_parameter.rs
@@ -0,0 +1,6 @@
+fn foo(
+ #[unused] a: <u16 as intercom::type_system::ExternType<
+ intercom::type_system::AutomationTypeSystem,
+ >>::ForeignType,
+) {
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5125/long_parameter_in_different_positions.rs b/src/tools/rustfmt/tests/target/issue-5125/long_parameter_in_different_positions.rs
new file mode 100644
index 000000000..cab20381c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5125/long_parameter_in_different_positions.rs
@@ -0,0 +1,24 @@
+fn middle(
+ a: usize,
+ b: <u16 as intercom::type_system::ExternType<
+ intercom::type_system::AutomationTypeSystem,
+ >>::ForeignType,
+ c: bool,
+) {
+}
+
+fn last(
+ a: usize,
+ b: <u16 as intercom::type_system::ExternType<
+ intercom::type_system::AutomationTypeSystem,
+ >>::ForeignType,
+) {
+}
+
+fn first(
+ a: <u16 as intercom::type_system::ExternType<
+ intercom::type_system::AutomationTypeSystem,
+ >>::ForeignType,
+ b: usize,
+) {
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5125/minimum_example.rs b/src/tools/rustfmt/tests/target/issue-5125/minimum_example.rs
new file mode 100644
index 000000000..8003e6696
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5125/minimum_example.rs
@@ -0,0 +1,6 @@
+fn foo(
+ a: <u16 as intercom::type_system::ExternType<
+ intercom::type_system::AutomationTypeSystem,
+ >>::ForeignType,
+) {
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5125/with_leading_and_inline_comments.rs b/src/tools/rustfmt/tests/target/issue-5125/with_leading_and_inline_comments.rs
new file mode 100644
index 000000000..2340b2f34
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5125/with_leading_and_inline_comments.rs
@@ -0,0 +1,7 @@
+fn foo(
+ // Pre Comment
+ a: <u16 as intercom::type_system::ExternType<
+ intercom::type_system::AutomationTypeSystem,
+ >>::ForeignType, // Inline comment
+) {
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5151/minimum_example.rs b/src/tools/rustfmt/tests/target/issue-5151/minimum_example.rs
new file mode 100644
index 000000000..2ed3d936e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5151/minimum_example.rs
@@ -0,0 +1,16 @@
+#![feature(more_qualified_paths)]
+
+struct Struct {}
+
+trait Trait {
+ type Type;
+}
+
+impl Trait for Struct {
+ type Type = Self;
+}
+
+fn main() {
+ // keep the qualified path details
+ let _ = <Struct as Trait>::Type {};
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5157/indented_itemized_markdown_blockquote.rs b/src/tools/rustfmt/tests/target/issue-5157/indented_itemized_markdown_blockquote.rs
new file mode 100644
index 000000000..e47677f20
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5157/indented_itemized_markdown_blockquote.rs
@@ -0,0 +1,6 @@
+// rustfmt-wrap_comments: true
+
+/// > For each sample received, the middleware internally maintains a
+/// > sample_state relative to each DataReader. The sample_state can
+/// > either be READ or NOT_READ.
+fn block_quote() {}
diff --git a/src/tools/rustfmt/tests/target/issue-5157/nested_itemized_markdown_blockquote.rs b/src/tools/rustfmt/tests/target/issue-5157/nested_itemized_markdown_blockquote.rs
new file mode 100644
index 000000000..079510442
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5157/nested_itemized_markdown_blockquote.rs
@@ -0,0 +1,18 @@
+// rustfmt-wrap_comments: true
+
+/// > For each sample received, the middleware internally maintains a
+/// > sample_state relative to each DataReader. The sample_state can either be
+/// > READ or NOT_READ.
+///
+/// > > For each sample received, the middleware internally maintains a
+/// > > sample_state relative to each DataReader. The sample_state can either be
+/// > > READ or NOT_READ.
+///
+/// > > > For each sample received, the middleware internally maintains a
+/// > > > sample_state relative to each DataReader. The sample_state can either
+/// > > > be READ or NOT_READ.
+///
+/// > > > > > > > > For each sample received, the middleware internally
+/// > > > > > > > > maintains a sample_state relative to each DataReader. The
+/// > > > > > > > > sample_state can either be READ or NOT_READ.
+fn block_quote() {}
diff --git a/src/tools/rustfmt/tests/target/issue-5157/support_itemized_markdown_blockquote.rs b/src/tools/rustfmt/tests/target/issue-5157/support_itemized_markdown_blockquote.rs
new file mode 100644
index 000000000..029ee37d2
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5157/support_itemized_markdown_blockquote.rs
@@ -0,0 +1,6 @@
+// rustfmt-wrap_comments: true
+
+/// > For each sample received, the middleware internally maintains a
+/// > sample_state relative to each DataReader. The sample_state can either be
+/// > READ or NOT_READ.
+fn block_quote() {}
diff --git a/src/tools/rustfmt/tests/target/issue-5238/markdown_header_wrap_comments_false.rs b/src/tools/rustfmt/tests/target/issue-5238/markdown_header_wrap_comments_false.rs
new file mode 100644
index 000000000..229c6e575
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5238/markdown_header_wrap_comments_false.rs
@@ -0,0 +1,11 @@
+// rustfmt-wrap_comments: false
+
+/// no markdown header so rustfmt should wrap this comment when `format_code_in_doc_comments = true` and `wrap_comments = true`
+fn not_documented_with_markdown_header() {
+ // This is just a normal inline comment so rustfmt should wrap this comment when `wrap_comments = true`
+}
+
+/// # We're using a markdown header here so rustfmt should refuse to wrap this comment in all circumstances
+fn documented_with_markdown_header() {
+ // # We're using a markdown header in an inline comment. rustfmt should be able to wrap this comment when `wrap_comments = true`
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5238/markdown_header_wrap_comments_true.rs b/src/tools/rustfmt/tests/target/issue-5238/markdown_header_wrap_comments_true.rs
new file mode 100644
index 000000000..87dae58ec
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5238/markdown_header_wrap_comments_true.rs
@@ -0,0 +1,14 @@
+// rustfmt-wrap_comments: true
+
+/// no markdown header so rustfmt should wrap this comment when
+/// `format_code_in_doc_comments = true` and `wrap_comments = true`
+fn not_documented_with_markdown_header() {
+ // This is just a normal inline comment so rustfmt should wrap this comment
+ // when `wrap_comments = true`
+}
+
+/// # We're using a markdown header here so rustfmt should refuse to wrap this comment in all circumstances
+fn documented_with_markdown_header() {
+ // # We're using a markdown header in an inline comment. rustfmt should be
+ // able to wrap this comment when `wrap_comments = true`
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5260.rs b/src/tools/rustfmt/tests/target/issue-5260.rs
new file mode 100644
index 000000000..171f6fa51
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5260.rs
@@ -0,0 +1,13 @@
+// rustfmt-wrap_comments: true
+
+/// [MyType](VeryLongPathToMyType::NoLineBreak::Here::Okay::ThatWouldBeNice::Thanks)
+fn documented_with_longtype() {
+ // # We're using a long type link, rustfmt should not break line
+ // on the type when `wrap_comments = true`
+}
+
+/// VeryLongPathToMyType::JustMyType::But::VeryVery::Long::NoLineBreak::Here::Okay::ThatWouldBeNice::Thanks
+fn documented_with_verylongtype() {
+ // # We're using a long type link, rustfmt should not break line
+ // on the type when `wrap_comments = true`
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5270/merge_derives_false.rs b/src/tools/rustfmt/tests/target/issue-5270/merge_derives_false.rs
new file mode 100644
index 000000000..3b6f7e669
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5270/merge_derives_false.rs
@@ -0,0 +1,62 @@
+// rustfmt-merge_derives:false
+
+#[rustfmt::skip::attributes(derive)]
+#[allow(dead_code)]
+#[derive(StructField)]
+#[derive(Clone)]
+struct DoNotMergeDerives {
+ field: String,
+}
+
+#[allow(dead_code)]
+#[derive(StructField)]
+#[rustfmt::skip::attributes(derive)]
+#[derive(Clone)]
+struct DoNotMergeDerivesSkipInMiddle {
+ field: String,
+}
+
+#[allow(dead_code)]
+#[derive(StructField)]
+#[derive(Clone)]
+#[rustfmt::skip::attributes(derive)]
+struct DoNotMergeDerivesSkipAtEnd {
+ field: String,
+}
+
+#[allow(dead_code)]
+#[derive(StructField)]
+#[derive(Clone)]
+struct MergeDerives {
+ field: String,
+}
+
+mod inner_attribute_derive_skip {
+ #![rustfmt::skip::attributes(derive)]
+
+ #[allow(dead_code)]
+ #[derive(StructField)]
+ #[derive(Clone)]
+ struct DoNotMergeDerives {
+ field: String,
+ }
+}
+
+#[rustfmt::skip::attributes(derive)]
+mod outer_attribute_derive_skip {
+ #[allow(dead_code)]
+ #[derive(StructField)]
+ #[derive(Clone)]
+ struct DoNotMergeDerives {
+ field: String,
+ }
+}
+
+mod no_derive_skip {
+ #[allow(dead_code)]
+ #[derive(StructField)]
+ #[derive(Clone)]
+ struct MergeDerives {
+ field: String,
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-5270/merge_derives_true.rs b/src/tools/rustfmt/tests/target/issue-5270/merge_derives_true.rs
new file mode 100644
index 000000000..5f488b454
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-5270/merge_derives_true.rs
@@ -0,0 +1,60 @@
+// rustfmt-merge_derives:true
+
+#[rustfmt::skip::attributes(derive)]
+#[allow(dead_code)]
+#[derive(StructField)]
+#[derive(Clone)]
+struct DoNotMergeDerives {
+ field: String,
+}
+
+#[allow(dead_code)]
+#[derive(StructField)]
+#[rustfmt::skip::attributes(derive)]
+#[derive(Clone)]
+struct DoNotMergeDerivesSkipInMiddle {
+ field: String,
+}
+
+#[allow(dead_code)]
+#[derive(StructField)]
+#[derive(Clone)]
+#[rustfmt::skip::attributes(derive)]
+struct DoNotMergeDerivesSkipAtEnd {
+ field: String,
+}
+
+#[allow(dead_code)]
+#[derive(StructField, Clone)]
+struct MergeDerives {
+ field: String,
+}
+
+mod inner_attribute_derive_skip {
+ #![rustfmt::skip::attributes(derive)]
+
+ #[allow(dead_code)]
+ #[derive(StructField)]
+ #[derive(Clone)]
+ struct DoNotMergeDerives {
+ field: String,
+ }
+}
+
+#[rustfmt::skip::attributes(derive)]
+mod outer_attribute_derive_skip {
+ #[allow(dead_code)]
+ #[derive(StructField)]
+ #[derive(Clone)]
+ struct DoNotMergeDerives {
+ field: String,
+ }
+}
+
+mod no_derive_skip {
+ #[allow(dead_code)]
+ #[derive(StructField, Clone)]
+ struct MergeDerives {
+ field: String,
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-539.rs b/src/tools/rustfmt/tests/target/issue-539.rs
new file mode 100644
index 000000000..adeb33555
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-539.rs
@@ -0,0 +1,3 @@
+// rustfmt-normalize_comments: true
+// FIXME (#3300): Should allow items to be anonymous. Right now
+// we just use dummy names for anon items.
diff --git a/src/tools/rustfmt/tests/target/issue-64.rs b/src/tools/rustfmt/tests/target/issue-64.rs
new file mode 100644
index 000000000..c06606302
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-64.rs
@@ -0,0 +1,7 @@
+// Regression test for issue 64
+
+pub fn header_name<T: Header>() -> &'static str {
+ let name = <T as Header>::header_name();
+ let func = <T as Header>::header_name;
+ name
+}
diff --git a/src/tools/rustfmt/tests/target/issue-683.rs b/src/tools/rustfmt/tests/target/issue-683.rs
new file mode 100644
index 000000000..adeb33555
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-683.rs
@@ -0,0 +1,3 @@
+// rustfmt-normalize_comments: true
+// FIXME (#3300): Should allow items to be anonymous. Right now
+// we just use dummy names for anon items.
diff --git a/src/tools/rustfmt/tests/target/issue-691.rs b/src/tools/rustfmt/tests/target/issue-691.rs
new file mode 100644
index 000000000..7473d070e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-691.rs
@@ -0,0 +1,9 @@
+// rustfmt-normalize_comments: true
+
+//! `std` or `core` and simply link to this library. In case the target
+//! platform has no hardware
+//! support for some operation, software implementations provided by this
+//! library will be used automagically.
+// TODO: provide instructions to override default libm link and how to link to
+// this library.
+fn foo() {}
diff --git a/src/tools/rustfmt/tests/target/issue-770.rs b/src/tools/rustfmt/tests/target/issue-770.rs
new file mode 100644
index 000000000..5fbedd7b7
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-770.rs
@@ -0,0 +1,10 @@
+fn main() {
+ if false {
+ if false {
+ } else {
+ // A let binding here seems necessary to trigger it.
+ let _ = ();
+ }
+ } else if let false = false {
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-811.rs b/src/tools/rustfmt/tests/target/issue-811.rs
new file mode 100644
index 000000000..b7a89b5d0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-811.rs
@@ -0,0 +1,19 @@
+trait FooTrait<T>: Sized {
+ type Bar: BarTrait<T>;
+}
+
+trait BarTrait<T>: Sized {
+ type Baz;
+ fn foo();
+}
+
+type Foo<T: FooTrait> = <<T as FooTrait<U>>::Bar as BarTrait<U>>::Baz;
+type Bar<T: BarTrait> = <T as BarTrait<U>>::Baz;
+
+fn some_func<T: FooTrait<U>, U>() {
+ <<T as FooTrait<U>>::Bar as BarTrait<U>>::foo();
+}
+
+fn some_func<T: BarTrait<U>>() {
+ <T as BarTrait<U>>::foo();
+}
diff --git a/src/tools/rustfmt/tests/target/issue-831.rs b/src/tools/rustfmt/tests/target/issue-831.rs
new file mode 100644
index 000000000..1d6327c21
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-831.rs
@@ -0,0 +1,9 @@
+fn main() {
+ let y = a.iter().any(|x| {
+ println!("a");
+ }) || b.iter().any(|x| {
+ println!("b");
+ }) || c.iter().any(|x| {
+ println!("c");
+ });
+}
diff --git a/src/tools/rustfmt/tests/target/issue-850.rs b/src/tools/rustfmt/tests/target/issue-850.rs
new file mode 100644
index 000000000..c939716a6
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-850.rs
@@ -0,0 +1 @@
+const unsafe fn x() {}
diff --git a/src/tools/rustfmt/tests/target/issue-855.rs b/src/tools/rustfmt/tests/target/issue-855.rs
new file mode 100644
index 000000000..0430cc629
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-855.rs
@@ -0,0 +1,27 @@
+fn main() {
+ 'running: loop {
+ for event in event_pump.poll_iter() {
+ match event {
+ Event::Quit { .. }
+ | Event::KeyDown {
+ keycode: Some(Keycode::Escape),
+ ..
+ } => break 'running,
+ }
+ }
+ }
+}
+
+fn main2() {
+ 'running: loop {
+ for event in event_pump.poll_iter() {
+ match event {
+ Event::Quit { .. }
+ | Event::KeyDownXXXXXXXXXXXXX {
+ keycode: Some(Keycode::Escape),
+ ..
+ } => break 'running,
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-913.rs b/src/tools/rustfmt/tests/target/issue-913.rs
new file mode 100644
index 000000000..a2b5800a7
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-913.rs
@@ -0,0 +1,22 @@
+mod client {
+ impl Client {
+ fn test(self) -> Result<()> {
+ let next_state = match self.state {
+ State::V5(v5::State::Command(v5::coand::State::WriteVersion(ref mut response))) => {
+ let x = reformat.meeee();
+ }
+ };
+
+ let next_state = match self.state {
+ State::V5(v5::State::Command(v5::comand::State::WriteVersion(
+ ref mut response,
+ ))) => {
+ // The pattern cannot be formatted in a way that the match stays
+ // within the column limit. The rewrite should therefore be
+ // skipped.
+ let x = dont.reformat.meeee();
+ }
+ };
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-945.rs b/src/tools/rustfmt/tests/target/issue-945.rs
new file mode 100644
index 000000000..d46c69a4f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-945.rs
@@ -0,0 +1,17 @@
+impl Bar {
+ default const unsafe fn foo() {
+ "hi"
+ }
+}
+
+impl Baz {
+ default unsafe extern "C" fn foo() {
+ "hi"
+ }
+}
+
+impl Foo for Bar {
+ default fn foo() {
+ "hi"
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue-977.rs b/src/tools/rustfmt/tests/target/issue-977.rs
new file mode 100644
index 000000000..3784a3874
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue-977.rs
@@ -0,0 +1,16 @@
+// rustfmt-normalize_comments: true
+
+trait NameC {
+ // comment
+}
+struct FooC {
+ // comment
+}
+enum MooC {
+ // comment
+}
+mod BarC { // comment
+}
+extern "C" {
+ // comment
+}
diff --git a/src/tools/rustfmt/tests/target/issue_3839.rs b/src/tools/rustfmt/tests/target/issue_3839.rs
new file mode 100644
index 000000000..b7bdf4c75
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_3839.rs
@@ -0,0 +1,8 @@
+struct Foo {
+ a: i32,
+ /*
+ asd
+ */
+ // foo
+ b: i32,
+}
diff --git a/src/tools/rustfmt/tests/target/issue_3844.rs b/src/tools/rustfmt/tests/target/issue_3844.rs
new file mode 100644
index 000000000..81d208346
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_3844.rs
@@ -0,0 +1,3 @@
+fn main() {
+ || {};
+}
diff --git a/src/tools/rustfmt/tests/target/issue_3853.rs b/src/tools/rustfmt/tests/target/issue_3853.rs
new file mode 100644
index 000000000..eae59eff9
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_3853.rs
@@ -0,0 +1,47 @@
+fn by_ref_with_block_before_ident() {
+ if let Some(ref /*def*/ state) = foo {
+ println!("asdfasdfasdf");
+ }
+}
+
+fn mut_block_before_ident() {
+ if let Some(mut /*def*/ state) = foo {
+ println!("123");
+ }
+}
+
+fn ref_and_mut_blocks_before_ident() {
+ if let Some(ref /*abc*/ mut /*def*/ state) = foo {
+ println!("deefefefefefwea");
+ }
+}
+
+fn sub_pattern() {
+ let foo @ /*foo*/ bar(f) = 42;
+}
+
+fn no_prefix_block_before_ident() {
+ if let Some(/*def*/ state) = foo {
+ println!("129387123123");
+ }
+}
+
+fn issue_3853() {
+ if let Some(ref /*mut*/ state) = foo {}
+}
+
+fn double_slash_comment_between_lhs_and_rhs() {
+ if let Some(e) =
+ // self.foo.bar(e, tx)
+ packet.transaction.state.committed
+ {
+ // body
+ println!("a2304712836123");
+ }
+}
+
+fn block_comment_between_lhs_and_rhs() {
+ if let Some(ref /*def*/ mut /*abc*/ state) = /*abc*/ foo {
+ println!("asdfasdfasdf");
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue_3854.rs b/src/tools/rustfmt/tests/target/issue_3854.rs
new file mode 100644
index 000000000..3051335c2
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_3854.rs
@@ -0,0 +1,3 @@
+fn main() {
+ println!("{:?}", -1. ..1.);
+}
diff --git a/src/tools/rustfmt/tests/target/issue_3868.rs b/src/tools/rustfmt/tests/target/issue_3868.rs
new file mode 100644
index 000000000..067241359
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_3868.rs
@@ -0,0 +1,9 @@
+fn foo() {}
+
+fn bar() {
+ for _ in 0..1 {}
+}
+
+fn baz() {
+ ();
+}
diff --git a/src/tools/rustfmt/tests/target/issue_3934.rs b/src/tools/rustfmt/tests/target/issue_3934.rs
new file mode 100644
index 000000000..68f9fd0ae
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_3934.rs
@@ -0,0 +1,8 @@
+mod repro {
+ pub fn push() -> Result<(), ()> {
+ self.api.map_api_result(|api| {
+ #[allow(deprecated)]
+ match api.apply_extrinsic_before_version_4_with_context()? {}
+ })
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue_3937.rs b/src/tools/rustfmt/tests/target/issue_3937.rs
new file mode 100644
index 000000000..806731085
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_3937.rs
@@ -0,0 +1,13 @@
+// rustfmt-format_code_in_doc_comments:true
+
+struct Foo {
+ // a: i32,
+ //
+ // b: i32,
+}
+
+struct Foo {
+ a: i32,
+ //
+ // b: i32,
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4031.rs b/src/tools/rustfmt/tests/target/issue_4031.rs
new file mode 100644
index 000000000..065d5395c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4031.rs
@@ -0,0 +1,21 @@
+fn foo() {
+ with_woff2_glyf_table("tests/fonts/woff2/SFNT-TTF-Composite.woff2", |glyf| {
+ let actual = glyf
+ .records
+ .iter()
+ .map(|glyph| match glyph {
+ GlyfRecord::Parsed(
+ found @ Glyph {
+ data: GlyphData::Composite { .. },
+ ..
+ },
+ ) => Some(found),
+ _ => None,
+ })
+ .find(|candidate| candidate.is_some())
+ .unwrap()
+ .unwrap();
+
+ assert_eq!(*actual, expected)
+ });
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4032.rs b/src/tools/rustfmt/tests/target/issue_4032.rs
new file mode 100644
index 000000000..2e7e624ca
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4032.rs
@@ -0,0 +1,18 @@
+fn a1(
+ #[aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa]
+ a: u8,
+) {
+}
+fn b1(
+ #[aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa]
+ bb: u8,
+) {
+}
+fn a2(
+ #[aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa] a: u8,
+) {
+}
+fn b2(
+ #[aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa] bb: u8,
+) {
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4049.rs b/src/tools/rustfmt/tests/target/issue_4049.rs
new file mode 100644
index 000000000..fe025a0f6
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4049.rs
@@ -0,0 +1,26 @@
+// rustfmt-max_width: 110
+// rustfmt-use_small_heuristics: Max
+// rustfmt-hard_tabs: true
+// rustfmt-use_field_init_shorthand: true
+// rustfmt-overflow_delimited_expr: true
+
+// https://github.com/rust-lang/rustfmt/issues/4049
+fn foo() {
+ {
+ {
+ if let Some(MpcEv::PlayDrum(pitch, vel)) =
+ // self.mpc.handle_input(e, /*btn_ctrl_down,*/ tx_launch_to_daw, state_view)
+ self.mpc.handle_input(e, &mut MyBorrowedState { tx_launch_to_daw, state_view })
+ {
+ println!("bar");
+ }
+
+ if let Some(e) =
+ // self.note_input.handle_input(e, /*btn_ctrl_down,*/ tx_launch_to_daw, state_view)
+ self.note_input.handle_input(e, &mut MyBorrowedState { tx_launch_to_daw, state_view })
+ {
+ println!("baz");
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4057.rs b/src/tools/rustfmt/tests/target/issue_4057.rs
new file mode 100644
index 000000000..467e67bca
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4057.rs
@@ -0,0 +1,15 @@
+// rustfmt-format_code_in_doc_comments: true
+
+/// ```
+/// # #[rustversion::since(1.36)]
+/// # fn dox() {
+/// # use std::pin::Pin;
+/// # type Projection<'a> = &'a ();
+/// # type ProjectionRef<'a> = &'a ();
+/// # trait Dox {
+/// fn project_ex(self: Pin<&mut Self>) -> Projection<'_>;
+/// fn project_ref(self: Pin<&Self>) -> ProjectionRef<'_>;
+/// # }
+/// # }
+/// ```
+struct Foo;
diff --git a/src/tools/rustfmt/tests/target/issue_4086.rs b/src/tools/rustfmt/tests/target/issue_4086.rs
new file mode 100644
index 000000000..959d3b3d4
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4086.rs
@@ -0,0 +1,2 @@
+#[cfg(any())]
+extern "C++" {}
diff --git a/src/tools/rustfmt/tests/target/issue_4110.rs b/src/tools/rustfmt/tests/target/issue_4110.rs
new file mode 100644
index 000000000..4a58c3946
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4110.rs
@@ -0,0 +1,55 @@
+fn bindings() {
+ let err = match (place_desc, explanation) {
+ (
+ Some(ref name),
+ BorrowExplanation::MustBeValidFor {
+ category:
+ category @ (ConstraintCategory::Return
+ | ConstraintCategory::CallArgument
+ | ConstraintCategory::OpaqueType),
+ from_closure: false,
+ ref region_name,
+ span,
+ ..
+ },
+ ) if borrow_spans.for_generator() | borrow_spans.for_closure() => self
+ .report_escaping_closure_capture(
+ borrow_spans,
+ borrow_span,
+ region_name,
+ category,
+ span,
+ &format!("`{}`", name),
+ ),
+ (
+ ref name,
+ BorrowExplanation::MustBeValidFor {
+ category: ConstraintCategory::Assignment,
+ from_closure: false,
+ region_name:
+ RegionName {
+ source: RegionNameSource::AnonRegionFromUpvar(upvar_span, ref upvar_name),
+ ..
+ },
+ span,
+ ..
+ },
+ ) => self.report_escaping_data(borrow_span, name, upvar_span, upvar_name, span),
+ (Some(name), explanation) => self.report_local_value_does_not_live_long_enough(
+ location,
+ &name,
+ &borrow,
+ drop_span,
+ borrow_spans,
+ explanation,
+ ),
+ (None, explanation) => self.report_temporary_value_does_not_live_long_enough(
+ location,
+ &borrow,
+ drop_span,
+ borrow_spans,
+ proper_span,
+ explanation,
+ ),
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4257.rs b/src/tools/rustfmt/tests/target/issue_4257.rs
new file mode 100644
index 000000000..1ebaaf2b6
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4257.rs
@@ -0,0 +1,18 @@
+#![feature(generic_associated_types)]
+#![allow(incomplete_features)]
+
+trait Trait<T> {
+ type Type<'a>
+ where
+ T: 'a;
+ fn foo(x: &T) -> Self::Type<'_>;
+}
+impl<T> Trait<T> for () {
+ type Type<'a>
+ where
+ T: 'a,
+ = &'a T;
+ fn foo(x: &T) -> Self::Type<'_> {
+ x
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4322.rs b/src/tools/rustfmt/tests/target/issue_4322.rs
new file mode 100644
index 000000000..0ec054711
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4322.rs
@@ -0,0 +1,5 @@
+trait Bar {
+ type X<'a>
+ where
+ Self: 'a;
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4374.rs b/src/tools/rustfmt/tests/target/issue_4374.rs
new file mode 100644
index 000000000..f5bf657bb
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4374.rs
@@ -0,0 +1,13 @@
+fn a<F>(_f: F) -> ()
+where
+ F: FnOnce() -> (),
+{
+}
+fn main() {
+ a(|| {
+ #[allow(irrefutable_let_patterns)]
+ while let _ = 0 {
+ break;
+ }
+ });
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4467.rs b/src/tools/rustfmt/tests/target/issue_4467.rs
new file mode 100644
index 000000000..f5ee96c4c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4467.rs
@@ -0,0 +1,6 @@
+pub fn main() {
+ #[cfg(feature = "std")]
+ {
+ // Comment
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4475.rs b/src/tools/rustfmt/tests/target/issue_4475.rs
new file mode 100644
index 000000000..ea6726c5a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4475.rs
@@ -0,0 +1,29 @@
+fn main() {
+ #[cfg(debug_assertions)]
+ {
+ println!("DEBUG");
+ }
+}
+
+fn main() {
+ #[cfg(feature = "foo")]
+ {
+ /*
+ let foo = 0
+ */
+ }
+}
+
+fn main() {
+ #[cfg(feature = "foo")]
+ { /* let foo = 0; */ }
+}
+
+fn main() {
+ #[foo]
+ #[bar]
+ #[baz]
+ {
+ // let foo = 0;
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4522.rs b/src/tools/rustfmt/tests/target/issue_4522.rs
new file mode 100644
index 000000000..5ca70e1c0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4522.rs
@@ -0,0 +1,6 @@
+fn main() {
+ #[cfg(feature = "foo")]
+ {
+ // let foo = 0;
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4528.rs b/src/tools/rustfmt/tests/target/issue_4528.rs
new file mode 100644
index 000000000..7828804b0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4528.rs
@@ -0,0 +1,8 @@
+#![allow(clippy::no_effect)]
+
+extern "C" {
+ // N.B., mutability can be easily incorrect in FFI calls -- as
+ // in C, the default is mutable pointers.
+ fn ffi(c: *mut u8);
+ fn int_ffi(c: *mut i32);
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4545.rs b/src/tools/rustfmt/tests/target/issue_4545.rs
new file mode 100644
index 000000000..f87c81036
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4545.rs
@@ -0,0 +1,5 @@
+#[derive(Debug, Foo<T>)]
+enum Bar {}
+
+#[derive(Debug, , Default)]
+struct Struct(i32);
diff --git a/src/tools/rustfmt/tests/target/issue_4573.rs b/src/tools/rustfmt/tests/target/issue_4573.rs
new file mode 100644
index 000000000..82cfe4f53
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4573.rs
@@ -0,0 +1,245 @@
+// rustmft-version:Two
+// rustmft-use_small_heuristics:Max
+// rustmft-merge_derives:false
+// These are the same rustfmt configuration options that are used
+// in the comiler as of ce39461ca75a and 8eb7c58dbb7b
+// These are commits in https://github.com/rust-lang/rust
+
+#![no_std] // inner attribute comment
+// inner attribute comment
+#![no_implicit_prelude]
+// post inner attribute comment
+
+#[cfg(not(miri))] // inline comment
+#[no_link]
+extern crate foo;
+
+// before attributes
+#[no_link]
+// between attributes
+#[cfg(not(miri))] // inline comment
+extern crate foo as bar;
+
+#[cfg(not(miri))] // inline comment
+// between attribute and use
+use foo;
+
+#[cfg(not(miri))] // inline comment
+use foo;
+
+/* pre attributre */
+#[cfg(not(miri))]
+use foo::bar;
+
+#[cfg(not(miri))] // inline comment
+use foo::bar as FooBar;
+
+#[cfg(not(miri))] // inline comment
+#[allow(unused)]
+#[deprecated(
+ since = "5.2", // inline inner comment
+ note = "FOO was rarely used. Users should instead use BAR"
+)]
+#[allow(unused)]
+static FOO: i32 = 42;
+
+#[used]
+#[export_name = "FOO"]
+#[cfg(not(miri))] // inline comment
+#[deprecated(
+ since = "5.2",
+ note = "FOO was rarely used. Users should instead use BAR"
+)]
+static FOO: i32 = 42;
+
+#[cfg(not(miri))] // inline comment
+#[export_name = "FOO"]
+static BAR: &'static str = "bar";
+
+#[cfg(not(miri))] // inline comment
+const BAR: i32 = 42;
+
+#[cfg(not(miri))] // inline comment
+#[no_mangle]
+#[link_section = ".example_section"]
+fn foo(bar: usize) {
+ #[cfg(not(miri))] // inline comment
+ println!("hello world!");
+}
+
+#[cfg(not(miri))] // inline comment
+mod foo {}
+
+#[cfg(not(miri))] // inline comment
+extern "C" {
+ fn my_c_function(x: i32) -> bool;
+}
+
+#[cfg(not(miri))] // inline comment
+#[link(name = "CoreFoundation", kind = "framework")]
+extern "C" {
+
+ #[link_name = "actual_symbol_name"] // inline comment
+ // between attribute and function
+ fn my_c_function(x: i32) -> bool;
+}
+
+#[cfg(not(miri))] // inline comment
+pub extern "C" fn callable_from_c(x: i32) -> bool {
+ x % 3 == 0
+}
+
+#[cfg(not(miri))] // inline comment
+/* between attribute block comment */
+#[no_mangle]
+/* between attribute and type */
+type Foo = Bar<u8>;
+
+#[no_mangle]
+#[cfg(not(miri))] // inline comment
+#[non_exhaustive] // inline comment
+enum Foo {
+ Bar,
+ Baz,
+}
+
+#[no_mangle]
+#[cfg(not(miri))] /* inline comment */
+struct Foo<A> {
+ x: A,
+}
+
+#[cfg(not(miri))] // inline comment
+union Foo<A, B> {
+ x: A,
+ y: B,
+}
+
+#[cfg(not(miri))] // inline comment
+trait Foo {}
+
+#[cfg(not(miri))] // inline comment
+trait Foo = Bar + Quux;
+
+#[cfg(not(miri))] // inline comment
+impl Foo {}
+
+#[cfg(not(miri))] // inline comment
+macro_rules! bar {
+ (3) => {};
+}
+
+mod nested {
+ #[cfg(not(miri))] // inline comment
+ // between attribute and use
+ use foo;
+
+ #[cfg(not(miri))] // inline comment
+ use foo;
+
+ #[cfg(not(miri))] // inline comment
+ use foo::bar;
+
+ #[cfg(not(miri))] // inline comment
+ use foo::bar as FooBar;
+
+ #[cfg(not(miri))] // inline comment
+ static FOO: i32 = 42;
+
+ #[cfg(not(miri))] // inline comment
+ static FOO: i32 = 42;
+
+ #[cfg(not(miri))] // inline comment
+ static FOO: &'static str = "bar";
+
+ #[cfg(not(miri))] // inline comment
+ const FOO: i32 = 42;
+
+ #[cfg(not(miri))] // inline comment
+ fn foo(bar: usize) {
+ #[cfg(not(miri))] // inline comment
+ println!("hello world!");
+ }
+
+ #[cfg(not(miri))] // inline comment
+ mod foo {}
+
+ #[cfg(not(miri))] // inline comment
+ mod foo {}
+
+ #[cfg(not(miri))] // inline comment
+ extern "C" {
+ fn my_c_function(x: i32) -> bool;
+ }
+
+ #[cfg(not(miri))] // inline comment
+ #[link(name = "CoreFoundation", kind = "framework")]
+ extern "C" {
+
+ #[link_name = "actual_symbol_name"] // inline comment
+ // between attribute and function
+ fn my_c_function(x: i32) -> bool;
+ }
+
+ #[cfg(not(miri))] // inline comment
+ pub extern "C" fn callable_from_c(x: i32) -> bool {
+ x % 3 == 0
+ }
+
+ #[cfg(not(miri))] // inline comment
+ type Foo = Bar<u8>;
+
+ #[cfg(not(miri))] // inline comment
+ #[non_exhaustive] // inline comment
+ enum Foo {
+ // comment
+ #[attribute_1]
+ #[attribute_2] // comment
+ // comment!
+ Bar,
+ /* comment */
+ #[attribute_1]
+ #[attribute_2] /* comment */
+ #[attribute_3]
+ #[attribute_4]
+ /* comment! */
+ Baz,
+ }
+
+ #[cfg(not(miri))] // inline comment
+ struct Foo<A> {
+ x: A,
+ }
+
+ #[cfg(not(miri))] // inline comment
+ union Foo<A, B> {
+ #[attribute_1]
+ #[attribute_2] /* comment */
+ #[attribute_3]
+ #[attribute_4] // comment
+ x: A,
+ y: B,
+ }
+
+ #[cfg(not(miri))] // inline comment
+ #[allow(missing_docs)]
+ trait Foo {
+ #[must_use] /* comment
+ * that wrappes to
+ * the next line */
+ fn bar() {}
+ }
+
+ #[allow(missing_docs)]
+ #[cfg(not(miri))] // inline comment
+ trait Foo = Bar + Quux;
+
+ #[allow(missing_docs)]
+ #[cfg(not(miri))] // inline comment
+ impl Foo {}
+
+ #[cfg(not(miri))] // inline comment
+ macro_rules! bar {
+ (3) => {};
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4579.rs b/src/tools/rustfmt/tests/target/issue_4579.rs
new file mode 100644
index 000000000..7b0a5f3a6
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4579.rs
@@ -0,0 +1,16 @@
+// rustfmt-hard_tabs: true
+
+#[macro_export]
+macro_rules! main {
+ () => {
+ #[spirv(fragment)]
+ pub fn main_fs(
+ mut out_color: ::spirv_std::storage_class::Output<Vec4>,
+ #[spirv(descriptor_set = 1)]
+ iChannelResolution: ::spirv_std::storage_class::UniformConstant<
+ [::spirv_std::glam::Vec3A; 4],
+ >,
+ ) {
+ }
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4584.rs b/src/tools/rustfmt/tests/target/issue_4584.rs
new file mode 100644
index 000000000..20255bead
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4584.rs
@@ -0,0 +1,32 @@
+// rustfmt-indent_style: Visual
+
+#[derive(Debug)]
+pub enum Case {
+ Upper,
+ Lower,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum Case {
+ Upper,
+ Lower,
+}
+
+// NB - This formatting looks potentially off the desired state, but is
+// consistent with current behavior. Included here to provide a line wrapped
+// derive case with the changes applied to resolve issue #4584
+#[derive(Add,
+ Sub,
+ Mul,
+ Div,
+ Clone,
+ Copy,
+ Eq,
+ PartialEq,
+ Ord,
+ PartialOrd,
+ Debug,
+ Hash,
+ Serialize,
+ Mul)]
+struct Foo {}
diff --git a/src/tools/rustfmt/tests/target/issue_4636.rs b/src/tools/rustfmt/tests/target/issue_4636.rs
new file mode 100644
index 000000000..a6465e29a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4636.rs
@@ -0,0 +1,13 @@
+pub trait PrettyPrinter<'tcx>:
+ Printer<
+ 'tcx,
+ Error = fmt::Error,
+ Path = Self,
+ Region = Self,
+ Type = Self,
+ DynExistential = Self,
+ Const = Self,
+ > + fmt::Write
+{
+ //
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4675.rs b/src/tools/rustfmt/tests/target/issue_4675.rs
new file mode 100644
index 000000000..a65f86832
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4675.rs
@@ -0,0 +1,8 @@
+macro_rules! foo {
+ ($s:ident ( $p:pat )) => {
+ Foo {
+ name: Name::$s($p),
+ ..
+ }
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4823.rs b/src/tools/rustfmt/tests/target/issue_4823.rs
new file mode 100644
index 000000000..de17467c0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4823.rs
@@ -0,0 +1,5 @@
+macro_rules! m {
+ () => {
+ type Type;
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4850.rs b/src/tools/rustfmt/tests/target/issue_4850.rs
new file mode 100644
index 000000000..7d4da9022
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4850.rs
@@ -0,0 +1,4 @@
+impl ThisIsALongStructNameToPushTheWhereToWrapLolololol where
+ [(); this_is_a_long_const_function_name()]:
+{
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4854.rs b/src/tools/rustfmt/tests/target/issue_4854.rs
new file mode 100644
index 000000000..a81c5a517
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4854.rs
@@ -0,0 +1,115 @@
+struct Struct {
+ // Multiline comment
+ // should be formatted
+ // properly.
+}
+
+struct Struct2 {
+ // This formatting
+ // Should be changed
+}
+
+struct Struct3(
+ // This
+ // is
+ // correct
+);
+
+struct Struct4(
+ // This
+ // is
+ // not
+ // correct
+);
+
+struct Struct5 {
+ /*
+ Comment block
+ with many lines.
+ */
+}
+
+struct Struct6(
+ /*
+ Comment block
+ with many lines.
+ */
+);
+
+struct Struct7 {
+ /*
+ Invalid
+ format
+ */
+}
+
+struct Struct8(
+ /*
+ Invalid
+ format
+ */
+);
+
+struct Struct9 {/* bar */}
+
+struct Struct10 {
+ /* bar
+ baz
+ */
+}
+
+mod module {
+ struct Struct {
+ // Multiline comment
+ // should be formatted
+ // properly.
+ }
+
+ struct Struct2 {
+ // This formatting
+ // Should be changed
+ }
+
+ struct Struct3(
+ // This
+ // is
+ // correct
+ );
+
+ struct Struct4(
+ // This
+ // is
+ // not
+ // correct
+ );
+
+ struct Struct5 {
+ /*
+ Comment block
+ with many lines.
+ */
+ }
+
+ struct Struct6(
+ /*
+ Comment block
+ with many lines.
+ */
+ );
+
+ struct Struct7 {
+ /*
+ Invalid
+ format
+ */
+ }
+
+ struct Struct8(
+ /*
+ Invalid
+ format
+ */
+ );
+
+ struct Struct9 {/* bar */}
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4868.rs b/src/tools/rustfmt/tests/target/issue_4868.rs
new file mode 100644
index 000000000..763a82c32
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4868.rs
@@ -0,0 +1,17 @@
+enum NonAscii {
+ Abcd,
+ Éfgh,
+}
+
+use NonAscii::*;
+
+fn f(x: NonAscii) -> bool {
+ match x {
+ Éfgh => true,
+ _ => false,
+ }
+}
+
+fn main() {
+ dbg!(f(Abcd));
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4911.rs b/src/tools/rustfmt/tests/target/issue_4911.rs
new file mode 100644
index 000000000..890a62267
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4911.rs
@@ -0,0 +1,9 @@
+#![feature(generic_associated_types)]
+#![feature(min_type_alias_impl_trait)]
+
+impl SomeTrait for SomeType {
+ type SomeGAT<'a>
+ where
+ Self: 'a,
+ = impl SomeOtherTrait;
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4936.rs b/src/tools/rustfmt/tests/target/issue_4936.rs
new file mode 100644
index 000000000..c19e505fd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4936.rs
@@ -0,0 +1,10 @@
+#[discard_params_doc]
+trait Trait {
+ fn foo(
+ &self,
+ /// some docs
+ bar: String,
+ /// another docs
+ baz: i32,
+ );
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4943.rs b/src/tools/rustfmt/tests/target/issue_4943.rs
new file mode 100644
index 000000000..318f7ebed
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4943.rs
@@ -0,0 +1,10 @@
+#![feature(generic_associated_types)]
+
+impl SomeStruct {
+ fn process<T>(v: T) -> <Self as GAT>::R<T>
+ where
+ Self: GAT<R<T> = T>,
+ {
+ SomeStruct::do_something(v)
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/issue_4954.rs b/src/tools/rustfmt/tests/target/issue_4954.rs
new file mode 100644
index 000000000..aa5e79bef
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4954.rs
@@ -0,0 +1,7 @@
+trait Foo {
+ type Arg<'a>;
+}
+
+struct Bar<T>(T)
+where
+ for<'a> T: Foo<Arg<'a> = ()>;
diff --git a/src/tools/rustfmt/tests/target/issue_4963.rs b/src/tools/rustfmt/tests/target/issue_4963.rs
new file mode 100644
index 000000000..0c3c13579
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_4963.rs
@@ -0,0 +1,9 @@
+mod test {
+ extern "C" {
+ fn test();
+ }
+}
+
+extern "C" {
+ fn test();
+}
diff --git a/src/tools/rustfmt/tests/target/issue_5027.rs b/src/tools/rustfmt/tests/target/issue_5027.rs
new file mode 100644
index 000000000..26d771720
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_5027.rs
@@ -0,0 +1,17 @@
+// rustfmt-version: Two
+
+pub type Iter<'a, D> = impl DoubleEndedIterator<Item = (SomethingSomethingSomethingLongType<D>)>
+ + ExactSizeIterator
+ + 'a;
+
+trait FOo {
+ pub type Iter<'a, D> = impl DoubleEndedIterator<Item = (SomethingSomethingSomethingLongType<D>)>
+ + ExactSizeIterator
+ + 'a;
+}
+
+impl Bar {
+ type Iter<'a, D> = impl DoubleEndedIterator<Item = (SomethingSomethingSomethingLongType<D>)>
+ + ExactSizeIterator
+ + 'a;
+}
diff --git a/src/tools/rustfmt/tests/target/issue_5086.rs b/src/tools/rustfmt/tests/target/issue_5086.rs
new file mode 100644
index 000000000..7a0be06f7
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_5086.rs
@@ -0,0 +1,2 @@
+#[cfg(any())]
+type Type: Bound;
diff --git a/src/tools/rustfmt/tests/target/issue_5273.rs b/src/tools/rustfmt/tests/target/issue_5273.rs
new file mode 100644
index 000000000..3bb9048a5
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_5273.rs
@@ -0,0 +1,3 @@
+struct Example<const N: usize = { 1048576 }> {
+ //
+}
diff --git a/src/tools/rustfmt/tests/target/issue_5399.rs b/src/tools/rustfmt/tests/target/issue_5399.rs
new file mode 100644
index 000000000..17364c389
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/issue_5399.rs
@@ -0,0 +1,48 @@
+// rustfmt-max_width: 140
+
+impl NotificationRepository {
+ fn set_status_changed(
+ &self,
+ repo_tx_conn: &RepoTxConn,
+ rid: &RoutableId,
+ changed_at: NaiveDateTime,
+ ) -> NukeResult<Option<NotificationStatus>> {
+ repo_tx_conn.run(move |conn| {
+ let res = diesel::update(client_notification::table)
+ .filter(
+ client_notification::routable_id.eq(DieselRoutableId(rid.clone())).and(
+ client_notification::changed_at
+ .lt(changed_at)
+ .or(client_notification::changed_at.is_null()),
+ ),
+ )
+ .set(client_notification::changed_at.eq(changed_at))
+ .returning((
+ client_notification::id,
+ client_notification::changed_at,
+ client_notification::polled_at,
+ client_notification::notified_at,
+ ))
+ .get_result::<(Uuid, Option<NaiveDateTime>, Option<NaiveDateTime>, Option<NaiveDateTime>)>(conn)
+ .optional()?;
+
+ match res {
+ Some(row) => {
+ let client_id = client_contract::table
+ .inner_join(client_notification::table)
+ .filter(client_notification::id.eq(row.0))
+ .select(client_contract::client_id)
+ .get_result::<Uuid>(conn)?;
+
+ Ok(Some(NotificationStatus {
+ client_id: client_id.into(),
+ changed_at: row.1,
+ polled_at: row.2,
+ notified_at: row.3,
+ }))
+ }
+ None => Ok(None),
+ }
+ })
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/item-brace-style-always-next-line.rs b/src/tools/rustfmt/tests/target/item-brace-style-always-next-line.rs
new file mode 100644
index 000000000..4935fac04
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/item-brace-style-always-next-line.rs
@@ -0,0 +1,71 @@
+// rustfmt-brace_style: AlwaysNextLine
+
+mod M
+{
+ enum A
+ {
+ A,
+ }
+
+ struct B
+ {
+ b: i32,
+ }
+
+ // For empty enums and structs, the brace remains on the same line.
+ enum C {}
+
+ struct D {}
+
+ enum A<T>
+ where
+ T: Copy,
+ {
+ A,
+ }
+
+ struct B<T>
+ where
+ T: Copy,
+ {
+ b: i32,
+ }
+
+ // For empty enums and structs, the brace remains on the same line.
+ enum C<T>
+ where
+ T: Copy, {}
+
+ struct D<T>
+ where
+ T: Copy, {}
+}
+
+fn function() {}
+
+trait Trait {}
+
+impl<T> Trait for T {}
+
+trait Trait2<T>
+where
+ T: Copy + Display + Write + Read + FromStr,
+{
+}
+
+trait Trait3<T>
+where
+ T: Something
+ + SomethingElse
+ + Sync
+ + Send
+ + Display
+ + Debug
+ + Copy
+ + Hash
+ + Debug
+ + Display
+ + Write
+ + Read,
+{
+}
diff --git a/src/tools/rustfmt/tests/target/item-brace-style-prefer-same-line.rs b/src/tools/rustfmt/tests/target/item-brace-style-prefer-same-line.rs
new file mode 100644
index 000000000..ef8dc028c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/item-brace-style-prefer-same-line.rs
@@ -0,0 +1,35 @@
+// rustfmt-brace_style: PreferSameLine
+
+mod M {
+ enum A {
+ A,
+ }
+
+ struct B {
+ b: i32,
+ }
+
+ enum C {}
+
+ struct D {}
+
+ enum A<T>
+ where
+ T: Copy, {
+ A,
+ }
+
+ struct B<T>
+ where
+ T: Copy, {
+ b: i32,
+ }
+
+ enum C<T>
+ where
+ T: Copy, {}
+
+ struct D<T>
+ where
+ T: Copy, {}
+}
diff --git a/src/tools/rustfmt/tests/target/item-brace-style-same-line-where.rs b/src/tools/rustfmt/tests/target/item-brace-style-same-line-where.rs
new file mode 100644
index 000000000..fabe5822c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/item-brace-style-same-line-where.rs
@@ -0,0 +1,37 @@
+mod M {
+ enum A {
+ A,
+ }
+
+ struct B {
+ b: i32,
+ }
+
+ // For empty enums and structs, the brace remains on the same line.
+ enum C {}
+
+ struct D {}
+
+ enum A<T>
+ where
+ T: Copy,
+ {
+ A,
+ }
+
+ struct B<T>
+ where
+ T: Copy,
+ {
+ b: i32,
+ }
+
+ // For empty enums and structs, the brace remains on the same line.
+ enum C<T>
+ where
+ T: Copy, {}
+
+ struct D<T>
+ where
+ T: Copy, {}
+}
diff --git a/src/tools/rustfmt/tests/target/itemized-blocks/no_wrap.rs b/src/tools/rustfmt/tests/target/itemized-blocks/no_wrap.rs
new file mode 100644
index 000000000..de8856382
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/itemized-blocks/no_wrap.rs
@@ -0,0 +1,47 @@
+// rustfmt-normalize_comments: true
+// rustfmt-format_code_in_doc_comments: true
+
+//! This is a list:
+//! * Outer
+//! * Outer
+//! * Inner
+//! * Inner with lots of text so that it could be reformatted something something something lots of text so that it could be reformatted something something something
+//!
+//! This example shows how to configure fern to output really nicely colored logs
+//! - when the log level is error, the whole line is red
+//! - when the log level is warn, the whole line is yellow
+//! - when the log level is info, the level name is green and the rest of the line is white
+//! - when the log level is debug, the whole line is white
+//! - when the log level is trace, the whole line is gray ("bright black")
+
+/// All the parameters ***except for `from_theater`*** should be inserted as sent by the remote
+/// theater, i.e., as passed to [`Theater::send`] on the remote actor:
+/// * `from` is the sending (remote) [`ActorId`], as reported by the remote theater by theater-specific means
+/// * `to` is the receiving (local) [`ActorId`], as requested by the remote theater
+/// * `tag` is a tag that identifies the message type
+/// * `msg` is the (serialized) message
+/// All the parameters ***except for `from_theater`*** should be inserted as sent by the remote
+/// theater, i.e., as passed to [`Theater::send`] on the remote actor
+fn func1() {}
+
+/// All the parameters ***except for `from_theater`*** should be inserted as sent by the remote
+/// theater, i.e., as passed to [`Theater::send`] on the remote actor:
+/// * `from` is the sending (remote) [`ActorId`], as reported by the remote theater by theater-specific means
+/// * `to` is the receiving (local) [`ActorId`], as requested by the remote theater
+/// * `tag` is a tag that identifies the message type
+/// * `msg` is the (serialized) message
+/// ```
+/// let x = 42;
+/// ```
+fn func2() {}
+
+/// Look:
+///
+/// ```
+/// let x = 42;
+/// ```
+/// * `from` is the sending (remote) [`ActorId`], as reported by the remote theater by theater-specific means
+/// * `to` is the receiving (local) [`ActorId`], as requested by the remote theater
+/// * `tag` is a tag that identifies the message type
+/// * `msg` is the (serialized) message
+fn func3() {}
diff --git a/src/tools/rustfmt/tests/target/itemized-blocks/rewrite_fail.rs b/src/tools/rustfmt/tests/target/itemized-blocks/rewrite_fail.rs
new file mode 100644
index 000000000..a118ef6fa
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/itemized-blocks/rewrite_fail.rs
@@ -0,0 +1,14 @@
+// rustfmt-wrap_comments: true
+// rustfmt-max_width: 50
+
+// This example shows how to configure fern to
+// output really nicely colored logs
+// - aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+// - aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+// - aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+// - when the log level is info, the level
+// name is green and the rest of the line is
+// white
+// - aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+// - aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+fn func1() {}
diff --git a/src/tools/rustfmt/tests/target/itemized-blocks/urls.rs b/src/tools/rustfmt/tests/target/itemized-blocks/urls.rs
new file mode 100644
index 000000000..bc46ea47e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/itemized-blocks/urls.rs
@@ -0,0 +1,25 @@
+// rustfmt-wrap_comments: true
+// rustfmt-max_width: 79
+
+//! CMSIS: Cortex Microcontroller Software Interface Standard
+//!
+//! The version 5 of the standard can be found at:
+//!
+//! http://arm-software.github.io/CMSIS_5/Core/html/index.html
+//!
+//! The API reference of the standard can be found at:
+//!
+//! - example -- http://example.org -- something something something something
+//! something something
+//! - something something something something something something more -- http://example.org
+//! - http://example.org/something/something/something/something/something/something
+//! and the rest
+//! - Core function access -- http://arm-software.github.io/CMSIS_5/Core/html/group__Core__Register__gr.html
+//! - Intrinsic functions for CPU instructions -- http://arm-software.github.io/CMSIS_5/Core/html/group__intrinsic__CPU__gr.html
+//! - Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Vestibulum sem
+//! lacus, commodo vitae.
+//!
+//! The reference C implementation used as the base of this Rust port can be
+//! found at
+//!
+//! https://github.com/ARM-software/CMSIS_5/blob/5.3.0/CMSIS/Core/Include/cmsis_gcc.h
diff --git a/src/tools/rustfmt/tests/target/itemized-blocks/wrap.rs b/src/tools/rustfmt/tests/target/itemized-blocks/wrap.rs
new file mode 100644
index 000000000..a4907303c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/itemized-blocks/wrap.rs
@@ -0,0 +1,89 @@
+// rustfmt-wrap_comments: true
+// rustfmt-format_code_in_doc_comments: true
+// rustfmt-max_width: 50
+
+//! This is a list:
+//! * Outer
+//! * Outer
+//! * Inner
+//! * Inner with lots of text so that it could
+//! be reformatted something something
+//! something lots of text so that it could be
+//! reformatted something something something
+//!
+//! This example shows how to configure fern to
+//! output really nicely colored logs
+//! - when the log level is error, the whole line
+//! is red
+//! - when the log level is warn, the whole line
+//! is yellow
+//! - when the log level is info, the level name
+//! is green and the rest of the line is white
+//! - when the log level is debug, the whole line
+//! is white
+//! - when the log level is trace, the whole line
+//! is gray ("bright black")
+
+// This example shows how to configure fern to
+// output really nicely colored logs
+// - when the log level is error, the whole line
+// is red
+// - when the log level is warn, the whole line
+// is yellow
+// - when the log level is info, the level
+// name is green and the rest of the line is
+// white
+// - when the log level is debug, the whole line
+// is white
+// - when the log level is trace, the whole line
+// is gray ("bright black")
+
+/// All the parameters ***except for
+/// `from_theater`*** should be inserted as sent
+/// by the remote theater, i.e., as passed to
+/// [`Theater::send`] on the remote actor:
+/// * `from` is the sending (remote) [`ActorId`],
+/// as reported by the remote theater by
+/// theater-specific means
+/// * `to` is the receiving (local) [`ActorId`],
+/// as requested by the remote theater
+/// * `tag` is a tag that identifies the message
+/// type
+/// * `msg` is the (serialized) message
+/// All the parameters ***except for
+/// `from_theater`*** should be inserted as sent
+/// by the remote theater, i.e., as passed to
+/// [`Theater::send`] on the remote actor
+fn func1() {}
+
+/// All the parameters ***except for
+/// `from_theater`*** should be inserted as sent
+/// by the remote theater, i.e., as passed to
+/// [`Theater::send`] on the remote actor:
+/// * `from` is the sending (remote) [`ActorId`],
+/// as reported by the remote theater by
+/// theater-specific means
+/// * `to` is the receiving (local) [`ActorId`],
+/// as requested by the remote theater
+/// * `tag` is a tag that identifies the message
+/// type
+/// * `msg` is the (serialized) message
+/// ```
+/// let x = 42;
+/// ```
+fn func2() {}
+
+/// Look:
+///
+/// ```
+/// let x = 42;
+/// ```
+/// * `from` is the sending (remote) [`ActorId`],
+/// as reported by the remote theater by
+/// theater-specific means
+/// * `to` is the receiving (local) [`ActorId`],
+/// as requested by the remote theater
+/// * `tag` is a tag that identifies the message
+/// type
+/// * `msg` is the (serialized) message
+fn func3() {}
diff --git a/src/tools/rustfmt/tests/target/label_break.rs b/src/tools/rustfmt/tests/target/label_break.rs
new file mode 100644
index 000000000..728d78137
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/label_break.rs
@@ -0,0 +1,28 @@
+// format with label break value.
+fn main() {
+ 'empty_block: {}
+
+ 'block: {
+ do_thing();
+ if condition_not_met() {
+ break 'block;
+ }
+ do_next_thing();
+ if condition_not_met() {
+ break 'block;
+ }
+ do_last_thing();
+ }
+
+ let result = 'block: {
+ if foo() {
+ // comment
+ break 'block 1;
+ }
+ if bar() {
+ /* comment */
+ break 'block 2;
+ }
+ 3
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/large-block.rs b/src/tools/rustfmt/tests/target/large-block.rs
new file mode 100644
index 000000000..09e9169f3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/large-block.rs
@@ -0,0 +1,5 @@
+fn issue1351() {
+ std_fmt_Arguments_new_v1_std_rt_begin_panic_fmt_sdfasfasdfasdf({
+ static __STATIC_FMTSTR: &'static [&'static str] = &[];
+ });
+}
diff --git a/src/tools/rustfmt/tests/target/large_vec.rs b/src/tools/rustfmt/tests/target/large_vec.rs
new file mode 100644
index 000000000..95d1fc43c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/large_vec.rs
@@ -0,0 +1,42 @@
+// See #1470.
+
+impl Environment {
+ pub fn new_root() -> Rc<RefCell<Environment>> {
+ let mut env = Environment::new();
+ let builtin_functions = &[
+ (
+ "println",
+ Function::NativeVoid(
+ CallSign {
+ num_params: 0,
+ variadic: true,
+ param_types: vec![],
+ },
+ native_println,
+ ),
+ ),
+ (
+ "run_http_server",
+ Function::NativeVoid(
+ CallSign {
+ num_params: 1,
+ variadic: false,
+ param_types: vec![Some(ConstraintType::Function)],
+ },
+ native_run_http_server,
+ ),
+ ),
+ (
+ "len",
+ Function::NativeReturning(
+ CallSign {
+ num_params: 1,
+ variadic: false,
+ param_types: vec![None],
+ },
+ native_len,
+ ),
+ ),
+ ];
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/lazy_static.rs b/src/tools/rustfmt/tests/target/lazy_static.rs
new file mode 100644
index 000000000..3625e0a5f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/lazy_static.rs
@@ -0,0 +1,49 @@
+// Format `lazy_static!`.
+
+lazy_static! {
+ static ref CONFIG_NAME_REGEX: regex::Regex =
+ regex::Regex::new(r"^## `([^`]+)`").expect("Failed creating configuration pattern");
+ static ref CONFIG_VALUE_REGEX: regex::Regex = regex::Regex::new(r#"^#### `"?([^`"]+)"?`"#)
+ .expect("Failed creating configuration value pattern");
+}
+
+// We need to be able to format `lazy_static!` without known syntax.
+lazy_static!(xxx, yyyy, zzzzz);
+
+lazy_static! {}
+
+// #2354
+lazy_static! {
+ pub static ref Sbase64_encode_string: ::lisp::LispSubrRef = {
+ let subr = ::remacs_sys::Lisp_Subr {
+ header: ::remacs_sys::Lisp_Vectorlike_Header {
+ size: ((::remacs_sys::PseudovecType::PVEC_SUBR as ::libc::ptrdiff_t)
+ << ::remacs_sys::PSEUDOVECTOR_AREA_BITS),
+ },
+ function: self::Fbase64_encode_string as *const ::libc::c_void,
+ min_args: 1i16,
+ max_args: 2i16,
+ symbol_name: (b"base64-encode-string\x00").as_ptr() as *const ::libc::c_char,
+ intspec: ::std::ptr::null(),
+ doc: ::std::ptr::null(),
+ lang: ::remacs_sys::Lisp_Subr_Lang_Rust,
+ };
+ unsafe {
+ let ptr = ::remacs_sys::xmalloc(::std::mem::size_of::<::remacs_sys::Lisp_Subr>())
+ as *mut ::remacs_sys::Lisp_Subr;
+ ::std::ptr::copy_nonoverlapping(&subr, ptr, 1);
+ ::std::mem::forget(subr);
+ ::lisp::ExternalPtr::new(ptr)
+ }
+ };
+}
+
+lazy_static! {
+ static ref FOO: HashMap<
+ String,
+ (
+ &'static str,
+ fn(Foo) -> Result<Box<Bar>, Either<FooError, BarError>>
+ ),
+ > = HashMap::new();
+}
diff --git a/src/tools/rustfmt/tests/target/let_else.rs b/src/tools/rustfmt/tests/target/let_else.rs
new file mode 100644
index 000000000..a6e816fb5
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/let_else.rs
@@ -0,0 +1,3 @@
+fn main() {
+ let Some(1) = Some(1) else { return };
+}
diff --git a/src/tools/rustfmt/tests/target/long-fn-1/version_one.rs b/src/tools/rustfmt/tests/target/long-fn-1/version_one.rs
new file mode 100644
index 000000000..05f69953c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/long-fn-1/version_one.rs
@@ -0,0 +1,29 @@
+// rustfmt-version: One
+// Tests that a function which is almost short enough, but not quite, gets
+// formatted correctly.
+
+impl Foo {
+ fn some_input(
+ &mut self,
+ input: Input,
+ input_path: Option<PathBuf>,
+ ) -> (Input, Option<PathBuf>) {
+ }
+
+ fn some_inpu(&mut self, input: Input, input_path: Option<PathBuf>) -> (Input, Option<PathBuf>) {
+ }
+}
+
+// #1843
+#[allow(non_snake_case)]
+pub extern "C" fn Java_com_exonum_binding_storage_indices_ValueSetIndexProxy_nativeContainsByHash(
+) -> bool {
+ false
+}
+
+// #3009
+impl Something {
+ fn my_function_name_is_way_to_long_but_used_as_a_case_study_or_an_example_its_fine(
+ ) -> Result<(), String> {
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/long-fn-1/version_two.rs b/src/tools/rustfmt/tests/target/long-fn-1/version_two.rs
new file mode 100644
index 000000000..32794bccd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/long-fn-1/version_two.rs
@@ -0,0 +1,29 @@
+// rustfmt-version: Two
+// Tests that a function which is almost short enough, but not quite, gets
+// formatted correctly.
+
+impl Foo {
+ fn some_input(
+ &mut self,
+ input: Input,
+ input_path: Option<PathBuf>,
+ ) -> (Input, Option<PathBuf>) {
+ }
+
+ fn some_inpu(&mut self, input: Input, input_path: Option<PathBuf>) -> (Input, Option<PathBuf>) {
+ }
+}
+
+// #1843
+#[allow(non_snake_case)]
+pub extern "C" fn Java_com_exonum_binding_storage_indices_ValueSetIndexProxy_nativeContainsByHash()
+-> bool {
+ false
+}
+
+// #3009
+impl Something {
+ fn my_function_name_is_way_to_long_but_used_as_a_case_study_or_an_example_its_fine()
+ -> Result<(), String> {
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/long-match-arms-brace-newline.rs b/src/tools/rustfmt/tests/target/long-match-arms-brace-newline.rs
new file mode 100644
index 000000000..aeb384e72
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/long-match-arms-brace-newline.rs
@@ -0,0 +1,15 @@
+// rustfmt-format_strings: true
+// rustfmt-max_width: 80
+// rustfmt-control_brace_style: AlwaysNextLine
+
+fn main() {
+ match x
+ {
+ aaaaaaaa::Bbbbb::Ccccccccccccc(_, Some(ref x))
+ if x == "aaaaaaaaaaa aaaaaaa aaaaaa" =>
+ {
+ Ok(())
+ }
+ _ => Err(x),
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/long-use-statement-issue-3154.rs b/src/tools/rustfmt/tests/target/long-use-statement-issue-3154.rs
new file mode 100644
index 000000000..877241e3b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/long-use-statement-issue-3154.rs
@@ -0,0 +1,3 @@
+// rustfmt-reorder_imports: false
+
+pub use self::super::super::super::root::mozilla::detail::StringClassFlags as nsTStringRepr_ClassFlags;
diff --git a/src/tools/rustfmt/tests/target/long_field_access.rs b/src/tools/rustfmt/tests/target/long_field_access.rs
new file mode 100644
index 000000000..349d2c2f6
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/long_field_access.rs
@@ -0,0 +1,4 @@
+fn f() {
+ block_flow.base.stacking_relative_position_of_display_port =
+ self.base.stacking_relative_position_of_display_port;
+}
diff --git a/src/tools/rustfmt/tests/target/loop.rs b/src/tools/rustfmt/tests/target/loop.rs
new file mode 100644
index 000000000..f669e7e2c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/loop.rs
@@ -0,0 +1,34 @@
+fn main() {
+ loop {
+ return some_val;
+ }
+
+ let x = loop {
+ do_forever();
+ };
+
+ 'label: loop {
+ // Just comments
+ }
+
+ 'a: while loooooooooooooooooooooooooooooooooong_variable_name + another_value > some_other_value
+ {
+ }
+
+ while aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa > bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb {
+ }
+
+ while aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa {
+ }
+
+ 'b: for xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx in some_iter(arg1, arg2)
+ {
+ // do smth
+ }
+
+ while let Some(i) = x.find('s') {
+ x.update();
+ continue;
+ continue 'foo;
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/macro_not_expr.rs b/src/tools/rustfmt/tests/target/macro_not_expr.rs
new file mode 100644
index 000000000..45f85ff2c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/macro_not_expr.rs
@@ -0,0 +1,7 @@
+macro_rules! test {
+ ($($t:tt)*) => {};
+}
+
+fn main() {
+ test!( a : B => c d );
+}
diff --git a/src/tools/rustfmt/tests/target/macro_rules.rs b/src/tools/rustfmt/tests/target/macro_rules.rs
new file mode 100644
index 000000000..97444aef4
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/macro_rules.rs
@@ -0,0 +1,360 @@
+// rustfmt-format_macro_matchers: true
+
+macro_rules! m {
+ () => {};
+ ($x:ident) => {};
+ ($m1:ident, $m2:ident, $x:ident) => {};
+ ($($beginning:ident),*; $middle:ident; $($end:ident),*) => {};
+ (
+ $($beginning:ident),*;
+ $middle:ident;
+ $($end:ident),*;
+ $($beginning:ident),*;
+ $middle:ident;
+ $($end:ident),*
+ ) => {};
+ ($name:ident($($dol:tt $var:ident)*) $($body:tt)*) => {};
+ (
+ $($i:ident : $ty:ty, $def:expr, $stb:expr, $($dstring:tt),+);+ $(;)*
+ $($i:ident : $ty:ty, $def:expr, $stb:expr, $($dstring:tt),+);+ $(;)*
+ ) => {};
+ ($foo:tt foo[$attr:meta] $name:ident) => {};
+ ($foo:tt[$attr:meta] $name:ident) => {};
+ ($foo:tt &'a[$attr:meta] $name:ident) => {};
+ ($foo:tt foo #[$attr:meta] $name:ident) => {};
+ ($foo:tt #[$attr:meta] $name:ident) => {};
+ ($foo:tt &'a #[$attr:meta] $name:ident) => {};
+ ($x:tt foo bar foo bar foo bar $y:tt => x * y * z $z:tt, $($a:tt),*) => {};
+}
+
+macro_rules! impl_a_method {
+ ($n:ident($a:ident : $ta:ty) -> $ret:ty { $body:expr }) => {
+ fn $n($a: $ta) -> $ret {
+ $body
+ }
+ macro_rules! $n {
+ ($va: expr) => {
+ $n($va)
+ };
+ }
+ };
+ ($n:ident($a:ident : $ta:ty, $b:ident : $tb:ty) -> $ret:ty { $body:expr }) => {
+ fn $n($a: $ta, $b: $tb) -> $ret {
+ $body
+ }
+ macro_rules! $n {
+ ($va: expr,$vb: expr) => {
+ $n($va, $vb)
+ };
+ }
+ };
+ (
+ $n:ident($a:ident : $ta:ty, $b:ident : $tb:ty, $c:ident : $tc:ty) -> $ret:ty { $body:expr }
+ ) => {
+ fn $n($a: $ta, $b: $tb, $c: $tc) -> $ret {
+ $body
+ }
+ macro_rules! $n {
+ ($va: expr,$vb: expr,$vc: expr) => {
+ $n($va, $vb, $vc)
+ };
+ }
+ };
+ (
+ $n:ident($a:ident : $ta:ty, $b:ident : $tb:ty, $c:ident : $tc:ty, $d:ident : $td:ty) ->
+ $ret:ty { $body:expr }
+ ) => {
+ fn $n($a: $ta, $b: $tb, $c: $tc, $d: $td) -> $ret {
+ $body
+ }
+ macro_rules! $n {
+ ($va: expr,$vb: expr,$vc: expr,$vd: expr) => {
+ $n($va, $vb, $vc, $vd)
+ };
+ }
+ };
+}
+
+macro_rules! m {
+ // a
+ ($expr:expr, $($func:ident)*) => {{
+ let x = $expr;
+ $func(x)
+ }};
+
+ /* b */
+ () => {
+ /* c */
+ };
+
+ (@tag) => {};
+
+ // d
+ ($item:ident) => {
+ mod macro_item {
+ struct $item;
+ }
+ };
+}
+
+macro m2 {
+ // a
+ ($expr:expr, $($func:ident)*) => {{
+ let x = $expr;
+ $func(x)
+ }}
+
+ /* b */
+ () => {
+ /* c */
+ }
+
+ (@tag) => {}
+
+ // d
+ ($item:ident) => {
+ mod macro_item {
+ struct $item;
+ }
+ }
+}
+
+// #2438, #2476
+macro_rules! m {
+ () => {
+ fn foo() {
+ this_line_is_98_characters_long_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx();
+ }
+ };
+}
+macro_rules! m {
+ () => {
+ fn foo() {
+ this_line_is_99_characters_long_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx(
+ );
+ }
+ };
+}
+macro_rules! m {
+ () => {
+ fn foo() {
+ this_line_is_100_characters_long_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx(
+ );
+ }
+ };
+}
+macro_rules! m {
+ () => {
+ fn foo() {
+ this_line_is_101_characters_long_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx(
+ );
+ }
+ };
+}
+
+// #2439
+macro_rules! m {
+ (
+ $line0_xxxxxxxxxxxxxxxxx:expr,
+ $line1_xxxxxxxxxxxxxxxxx:expr,
+ $line2_xxxxxxxxxxxxxxxxx:expr,
+ $line3_xxxxxxxxxxxxxxxxx:expr,
+ ) => {};
+}
+
+// #2466
+// Skip formatting `macro_rules!` that are not using `{}`.
+macro_rules! m (
+ () => ()
+);
+macro_rules! m [
+ () => ()
+];
+
+// #2470
+macro foo($type_name:ident, $docs:expr) {
+ #[allow(non_camel_case_types)]
+ #[doc=$docs]
+ #[derive(Debug, Clone, Copy)]
+ pub struct $type_name;
+}
+
+// #2534
+macro_rules! foo {
+ ($a:ident : $b:ty) => {};
+ ($a:ident $b:ident $c:ident) => {};
+}
+
+// #2538
+macro_rules! add_message_to_notes {
+ ($msg:expr) => {{
+ let mut lines = message.lines();
+ notes.push_str(&format!("\n{}: {}", level, lines.next().unwrap()));
+ for line in lines {
+ notes.push_str(&format!(
+ "\n{:indent$}{line}",
+ "",
+ indent = level.len() + 2,
+ line = line,
+ ));
+ }
+ }};
+}
+
+// #2560
+macro_rules! binary {
+ ($_self:ident, $expr:expr, $lhs:expr, $func:ident) => {
+ while $_self.matched($expr) {
+ let op = $_self.get_binary_op()?;
+
+ let rhs = Box::new($_self.$func()?);
+
+ $lhs = Spanned {
+ span: $lhs.get_span().to(rhs.get_span()),
+ value: Expression::Binary {
+ lhs: Box::new($lhs),
+ op,
+ rhs,
+ },
+ }
+ }
+ };
+}
+
+// #2558
+macro_rules! m {
+ ($x:) => {};
+ ($($foo:expr)()?) => {};
+}
+
+// #2749
+macro_rules! foo {
+ ($(x)* {}) => {};
+ ($(x)*()) => {};
+ ($(x)*[]) => {};
+}
+macro_rules! __wundergraph_expand_sqlite_mutation {
+ (
+ $mutation_name:ident $((context = $($context:tt)*))* {
+ $(
+ $entity_name:ident(
+ $(insert = $insert:ident,)*
+ $(update = $update:ident,)*
+ $(delete = $($delete:tt)+)*
+ ),
+ )*
+ }
+ ) => {};
+}
+
+// #2607
+macro_rules! bench {
+ ($ty:ident) => {
+ criterion_group!(
+ name = benches;
+ config = ::common_bench::reduced_samples();
+ targets = call, map;
+ );
+ };
+}
+
+// #2770
+macro_rules! save_regs {
+ () => {
+ asm!("push rax
+ push rcx
+ push rdx
+ push rsi
+ push rdi
+ push r8
+ push r9
+ push r10
+ push r11"
+ :::: "intel", "volatile");
+ };
+}
+
+// #2721
+macro_rules! impl_as_byte_slice_arrays {
+ ($n:expr,) => {};
+ ($n:expr, $N:ident, $($NN:ident,)*) => {
+ impl_as_byte_slice_arrays!($n - 1, $($NN,)*);
+
+ impl<T> AsByteSliceMut for [T; $n] where [T]: AsByteSliceMut {
+ fn as_byte_slice_mut(&mut self) -> &mut [u8] {
+ self[..].as_byte_slice_mut()
+ }
+
+ fn to_le(&mut self) {
+ self[..].to_le()
+ }
+ }
+ };
+ (!div $n:expr,) => {};
+ (!div $n:expr, $N:ident, $($NN:ident,)*) => {
+ impl_as_byte_slice_arrays!(!div $n / 2, $($NN,)*);
+
+ impl<T> AsByteSliceMut for [T; $n] where [T]: AsByteSliceMut {
+ fn as_byte_slice_mut(&mut self) -> &mut [u8] {
+ self[..].as_byte_slice_mut()
+ }
+
+ fn to_le(&mut self) {
+ self[..].to_le()
+ }
+ }
+ };
+}
+
+// #2919
+fn foo() {
+ {
+ macro_rules! touch_value {
+ ($func:ident, $value:expr) => {{
+ let result = API::get_cached().$func(
+ self,
+ key.as_ptr(),
+ $value,
+ ffi::VSPropAppendMode::paTouch,
+ );
+ let result = API::get_cached().$func(self, key.as_ptr(), $value, ffi::VSPropAppend);
+ let result =
+ API::get_cached().$func(self, key.as_ptr(), $value, ffi::VSPropAppendM);
+ let result =
+ APIIIIIIIII::get_cached().$func(self, key.as_ptr(), $value, ffi::VSPropAppendM);
+ let result = API::get_cached().$func(
+ self,
+ key.as_ptr(),
+ $value,
+ ffi::VSPropAppendMMMMMMMMMM,
+ );
+ debug_assert!(result == 0);
+ }};
+ }
+ }
+}
+
+// #2642
+macro_rules! template {
+ ($name:expr) => {
+ format_args!(
+ r##"
+"http://example.com"
+
+# test
+"##,
+ $name
+ )
+ };
+}
+
+macro_rules! template {
+ () => {
+ format_args!(
+ r"
+//
+
+"
+ )
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/macro_rules_semi.rs b/src/tools/rustfmt/tests/target/macro_rules_semi.rs
new file mode 100644
index 000000000..84e12d16e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/macro_rules_semi.rs
@@ -0,0 +1,22 @@
+macro_rules! expr {
+ (no_semi) => {
+ return true
+ };
+ (semi) => {
+ return true;
+ };
+}
+
+fn foo() -> bool {
+ match true {
+ true => expr!(no_semi),
+ false if false => {
+ expr!(semi)
+ }
+ false => {
+ expr!(semi);
+ }
+ }
+}
+
+fn main() {}
diff --git a/src/tools/rustfmt/tests/target/macros.rs b/src/tools/rustfmt/tests/target/macros.rs
new file mode 100644
index 000000000..e930b5037
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/macros.rs
@@ -0,0 +1,1062 @@
+// rustfmt-normalize_comments: true
+// rustfmt-format_macro_matchers: true
+itemmacro!(this, is.now().formatted(yay));
+
+itemmacro!(
+ really,
+ long.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbb()
+ .is
+ .formatted()
+);
+
+itemmacro! {this, is.brace().formatted()}
+
+fn main() {
+ foo!();
+
+ foo!(,);
+
+ bar!(a, b, c);
+
+ bar!(a, b, c,);
+
+ baz!(1 + 2 + 3, quux.kaas());
+
+ quux!(
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+ BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
+ );
+
+ kaas!(
+ // comments
+ a, // post macro
+ b // another
+ );
+
+ trailingcomma!(a, b, c,);
+ // Preserve trailing comma only when necessary.
+ ok!(file.seek(SeekFrom::Start(
+ table.map(|table| fixture.offset(table)).unwrap_or(0),
+ )));
+
+ noexpr!( i am not an expression, OK? );
+
+ vec![a, b, c];
+
+ vec![
+ AAAAAA,
+ AAAAAA,
+ AAAAAA,
+ AAAAAA,
+ AAAAAA,
+ AAAAAA,
+ AAAAAA,
+ AAAAAA,
+ AAAAAA,
+ BBBBB,
+ 5,
+ 100 - 30,
+ 1.33,
+ b,
+ b,
+ b,
+ ];
+
+ vec![a /* comment */];
+
+ // Trailing spaces after a comma
+ vec![a];
+
+ vec![a; b];
+ vec![a; b];
+ vec![a; b];
+
+ vec![a, b; c];
+ vec![a; b, c];
+
+ vec![
+ a;
+ (|x| {
+ let y = x + 1;
+ let z = y + 1;
+ z
+ })(2)
+ ];
+ vec![
+ a;
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+ ];
+ vec![a; unsafe { x + 1 }];
+
+ unknown_bracket_macro__comma_should_not_be_stripped![a,];
+
+ foo(makro!(1, 3));
+
+ hamkaas! { () };
+
+ macrowithbraces! {dont, format, me}
+
+ x!(fn);
+
+ some_macro!();
+
+ some_macro![];
+
+ some_macro! {
+ // comment
+ };
+
+ some_macro! {
+ // comment
+ };
+
+ some_macro!(
+ // comment
+ not function like
+ );
+
+ // #1712
+ let image = gray_image!(
+ 00, 01, 02;
+ 10, 11, 12;
+ 20, 21, 22);
+
+ // #1092
+ chain!(input, a: take!(max_size), || []);
+
+ // #2727
+ foo!("bar");
+}
+
+impl X {
+ empty_invoc! {}
+ empty_invoc! {}
+}
+
+fn issue_1279() {
+ println!("dsfs"); // a comment
+}
+
+fn issue_1555() {
+ let hello = &format!(
+ "HTTP/1.1 200 OK\r\nServer: {}\r\n\r\n{}",
+ "65454654654654654654654655464", "4"
+ );
+}
+
+fn issue1178() {
+ macro_rules! foo {
+ (#[$attr:meta] $name:ident) => {};
+ }
+
+ foo!(
+ #[doc = "bar"]
+ baz
+ );
+}
+
+fn issue1739() {
+ sql_function!(
+ add_rss_item,
+ add_rss_item_t,
+ (
+ a: types::Integer,
+ b: types::Timestamptz,
+ c: types::Text,
+ d: types::Text,
+ e: types::Text
+ )
+ );
+
+ w.slice_mut(s![
+ ..,
+ init_size[1] - extreeeeeeeeeeeeeeeeeeeeeeeem..init_size[1],
+ ..
+ ])
+ .par_map_inplace(|el| *el = 0.);
+}
+
+fn issue_1885() {
+ let threads = people
+ .into_iter()
+ .map(|name| {
+ chan_select! {
+ rx.recv() => {}
+ }
+ })
+ .collect::<Vec<_>>();
+}
+
+fn issue_1917() {
+ mod x {
+ quickcheck! {
+ fn test(a: String, s: String, b: String) -> TestResult {
+ if a.find(&s).is_none() {
+
+ TestResult::from_bool(true)
+ } else {
+ TestResult::discard()
+ }
+ }
+ }
+ }
+}
+
+fn issue_1921() {
+ // Macro with tabs.
+ lazy_static! {
+ static ref ONE: u32 = 1;
+ static ref TWO: u32 = 2;
+ static ref THREE: u32 = 3;
+ static ref FOUR: u32 = {
+ let mut acc = 1;
+ acc += 1;
+ acc += 2;
+ acc
+ };
+ }
+}
+
+// #1577
+fn issue1577() {
+ let json = json!({
+ "foo": "bar",
+ });
+}
+
+// #3174
+fn issue_3174() {
+ let data = if let Some(debug) = error.debug_info() {
+ json!({
+ "errorKind": format!("{:?}", error.err_kind()),
+ "debugMessage": debug.message,
+ })
+ } else {
+ json!({ "errorKind": format!("{:?}", error.err_kind()) })
+ };
+}
+
+gfx_pipeline!(pipe {
+ vbuf: gfx::VertexBuffer<Vertex> = (),
+ out: gfx::RenderTarget<ColorFormat> = "Target0",
+});
+
+// #1919
+#[test]
+fn __bindgen_test_layout_HandleWithDtor_open0_int_close0_instantiation() {
+ assert_eq!(
+ ::std::mem::size_of::<HandleWithDtor<::std::os::raw::c_int>>(),
+ 8usize,
+ concat!(
+ "Size of template specialization: ",
+ stringify!(HandleWithDtor<::std::os::raw::c_int>)
+ )
+ );
+ assert_eq!(
+ ::std::mem::align_of::<HandleWithDtor<::std::os::raw::c_int>>(),
+ 8usize,
+ concat!(
+ "Alignment of template specialization: ",
+ stringify!(HandleWithDtor<::std::os::raw::c_int>)
+ )
+ );
+}
+
+// #878
+macro_rules! try_opt {
+ ($expr:expr) => {
+ match $expr {
+ Some(val) => val,
+
+ None => {
+ return None;
+ }
+ }
+ };
+}
+
+// #2214
+// macro call whose argument is an array with trailing comma.
+fn issue2214() {
+ make_test!(
+ str_searcher_ascii_haystack,
+ "bb",
+ "abbcbbd",
+ [
+ Reject(0, 1),
+ Match(1, 3),
+ Reject(3, 4),
+ Match(4, 6),
+ Reject(6, 7),
+ ]
+ );
+}
+
+fn special_case_macros() {
+ let p = eprint!();
+ let q = eprint!("{}", 1);
+ let r = eprint!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
+ );
+ let s = eprint!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26
+ );
+
+ let q = eprintln!("{}", 1);
+ let r = eprintln!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
+ );
+ let s = eprintln!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26
+ );
+
+ let q = format!("{}", 1);
+ let r = format!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
+ );
+ let s = format!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26
+ );
+
+ let q = format_args!("{}", 1);
+ let r = format_args!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
+ );
+ let s = format_args!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26
+ );
+
+ let q = print!("{}", 1);
+ let r = print!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
+ );
+ let s = print!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26
+ );
+
+ let q = println!("{}", 1);
+ let r = println!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
+ );
+ let s = println!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26
+ );
+
+ let q = unreachable!("{}", 1);
+ let r = unreachable!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
+ );
+ let s = unreachable!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26
+ );
+
+ debug!("{}", 1);
+ debug!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
+ );
+ debug!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26
+ );
+
+ error!("{}", 1);
+ error!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
+ );
+ error!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26
+ );
+
+ info!("{}", 1);
+ info!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
+ );
+ info!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26
+ );
+
+ panic!("{}", 1);
+ panic!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
+ );
+ panic!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26
+ );
+
+ warn!("{}", 1);
+ warn!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
+ );
+ warn!(
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26
+ );
+
+ assert!();
+ assert!(result == 42);
+ assert!(result == 42, "Ahoy there, {}!", target);
+ assert!(
+ result == 42,
+ "Arr! While plunderin' the hold, we got '{}' when given '{}' (we expected '{}')",
+ result,
+ input,
+ expected
+ );
+ assert!(
+ result == 42,
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26
+ );
+
+ assert_eq!();
+ assert_eq!(left);
+ assert_eq!(left, right);
+ assert_eq!(left, right, "Ahoy there, {}!", target);
+ assert_eq!(
+ left, right,
+ "Arr! While plunderin' the hold, we got '{}' when given '{}' (we expected '{}')",
+ result, input, expected
+ );
+ assert_eq!(
+ first_realllllllllllly_long_variable_that_doesnt_fit_one_one_line,
+ second_reallllllllllly_long_variable_that_doesnt_fit_one_one_line,
+ "Arr! While plunderin' the hold, we got '{}' when given '{}' (we expected '{}')",
+ result,
+ input,
+ expected
+ );
+ assert_eq!(
+ left + 42,
+ right,
+ "Arr! While plunderin' the hold, we got '{}' when given '{}' (we expected '{}')",
+ result,
+ input,
+ expected
+ );
+ assert_eq!(
+ left,
+ right,
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26
+ );
+
+ write!(&mut s, "Ahoy there, {}!", target);
+ write!(
+ &mut s,
+ "Arr! While plunderin' the hold, we got '{}' when given '{}' (we expected '{}')",
+ result, input, expected
+ );
+ write!(
+ &mut s,
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26
+ );
+
+ writeln!(&mut s, "Ahoy there, {}!", target);
+ writeln!(
+ &mut s,
+ "Arr! While plunderin' the hold, we got '{}' when given '{}' (we expected '{}')",
+ result, input, expected
+ );
+ writeln!(
+ &mut s,
+ "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}",
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26
+ );
+}
+
+// #1209
+impl Foo {
+ /// foo
+ pub fn foo(&self) -> Bar<foo!()> {}
+}
+
+// #819
+fn macro_in_pattern_position() {
+ let x = match y {
+ foo!() => (),
+ bar!(a, b, c) => (),
+ bar!(a, b, c,) => (),
+ baz!(1 + 2 + 3, quux.kaas()) => (),
+ quux!(
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+ BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
+ ) => (),
+ };
+}
+
+macro foo() {}
+
+pub macro bar($x:ident + $y:expr;) {
+ fn foo($x: Foo) {
+ long_function(
+ a_long_argument_to_a_long_function_is_what_this_is(AAAAAAAAAAAAAAAAAAAAAAAAAAAA),
+ $x.bar($y),
+ );
+ }
+}
+
+macro foo() {
+ // a comment
+ fn foo() {
+ // another comment
+ bar();
+ }
+}
+
+// #2574
+macro_rules! test {
+ () => {{}};
+}
+
+macro lex_err($kind: ident $(, $body: expr)*) {
+ Err(QlError::LexError(LexError::$kind($($body,)*)))
+}
+
+// Preserve trailing comma on item-level macro with `()` or `[]`.
+methods![get, post, delete,];
+methods!(get, post, delete,);
+
+// #2588
+macro_rules! m {
+ () => {
+ r#"
+ test
+ "#
+ };
+}
+fn foo() {
+ f! {r#"
+ test
+ "#};
+}
+
+// #2591
+fn foo() {
+ match 0u32 {
+ 0 => (),
+ _ => unreachable!(/* obviously */),
+ }
+}
+
+fn foo() {
+ let _ = column!(/* here */);
+}
+
+// #2616
+// Preserve trailing comma when using mixed layout for macro call.
+fn foo() {
+ foo!(
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
+ );
+ foo!(
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ );
+}
+
+// #2830
+// Preserve trailing comma-less/ness inside nested macro.
+named!(
+ do_parse_gsv<GsvData>,
+ map_res!(
+ do_parse!(
+ number_of_sentences: map_res!(digit, parse_num::<u16>)
+ >> char!(',')
+ >> sentence_index: map_res!(digit, parse_num::<u16>)
+ >> char!(',')
+ >> total_number_of_sats: map_res!(digit, parse_num::<u16>)
+ >> char!(',')
+ >> sat0: opt!(complete!(parse_gsv_sat_info))
+ >> sat1: opt!(complete!(parse_gsv_sat_info))
+ >> sat2: opt!(complete!(parse_gsv_sat_info))
+ >> sat3: opt!(complete!(parse_gsv_sat_info))
+ >> (
+ number_of_sentences,
+ sentence_index,
+ total_number_of_sats,
+ sat0,
+ sat1,
+ sat2,
+ sat3
+ )
+ ),
+ construct_gsv_data
+ )
+);
+
+// #2857
+convert_args!(vec!(1, 2, 3));
+
+// #3031
+thread_local!(
+ /// TLV Holds a set of JSTraceables that need to be rooted
+ static ROOTED_TRACEABLES: RefCell<RootedTraceableSet> = RefCell::new(RootedTraceableSet::new());
+);
+
+thread_local![
+ /// TLV Holds a set of JSTraceables that need to be rooted
+ static ROOTED_TRACEABLES: RefCell<RootedTraceableSet> = RefCell::new(RootedTraceableSet::new());
+
+ /// TLV Holds a set of JSTraceables that need to be rooted
+ static ROOTED_TRACEABLES: RefCell<RootedTraceableSet> =
+ RefCell::new(RootedTraceableSet::new(0));
+
+ /// TLV Holds a set of JSTraceables that need to be rooted
+ static ROOTED_TRACEABLES: RefCell<RootedTraceableSet> =
+ RefCell::new(RootedTraceableSet::new(), xxx, yyy);
+
+ /// TLV Holds a set of JSTraceables that need to be rooted
+ static ROOTED_TRACEABLES: RefCell<RootedTraceableSet> =
+ RefCell::new(RootedTraceableSet::new(1234));
+];
+
+fn issue3004() {
+ foo!(|_| { () });
+ stringify!((foo+));
+}
+
+// #3331
+pub fn fold_abi<V: Fold + ?Sized>(_visitor: &mut V, _i: Abi) -> Abi {
+ Abi {
+ extern_token: Token![extern](tokens_helper(_visitor, &_i.extern_token.span)),
+ name: (_i.name).map(|it| _visitor.fold_lit_str(it)),
+ }
+}
+
+// #3463
+x! {()}
+
+// #3746
+f!(match a {
+ 4 => &[
+ (3, false), // Missing
+ (4, true) // I-frame
+ ][..],
+});
+
+// #3583
+foo!(|x = y|);
diff --git a/src/tools/rustfmt/tests/target/markdown-comment-with-options.rs b/src/tools/rustfmt/tests/target/markdown-comment-with-options.rs
new file mode 100644
index 000000000..ede2bc0d0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/markdown-comment-with-options.rs
@@ -0,0 +1,17 @@
+// rustfmt-wrap_comments: true
+
+// Preserve two trailing whitespaces in doc comment,
+// but trim any whitespaces in normal comment.
+
+//! hello world
+//! hello world
+
+/// hello world
+/// hello world
+/// hello world
+fn foo() {
+ // hello world
+ // hello world
+ let x = 3;
+ println!("x = {}", x);
+}
diff --git a/src/tools/rustfmt/tests/target/markdown-comment.rs b/src/tools/rustfmt/tests/target/markdown-comment.rs
new file mode 100644
index 000000000..71a9921d2
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/markdown-comment.rs
@@ -0,0 +1,15 @@
+// Preserve two trailing whitespaces in doc comment,
+// but trim any whitespaces in normal comment.
+
+//! hello world
+//! hello world
+
+/// hello world
+/// hello world
+/// hello world
+fn foo() {
+ // hello world
+ // hello world
+ let x = 3;
+ println!("x = {}", x);
+}
diff --git a/src/tools/rustfmt/tests/target/match-block-trailing-comma.rs b/src/tools/rustfmt/tests/target/match-block-trailing-comma.rs
new file mode 100644
index 000000000..5ab433a2e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/match-block-trailing-comma.rs
@@ -0,0 +1,26 @@
+// rustfmt-match_block_trailing_comma: true
+// Match expressions, no unwrapping of block arms or wrapping of multiline
+// expressions.
+
+fn foo() {
+ match x {
+ a => {
+ "line1";
+ "line2"
+ },
+ ThisIsA::Guard if true => {
+ "line1";
+ "line2"
+ },
+ ThisIsA::ReallyLongPattern(ThatWillForce::TheGuard, ToWrapOnto::TheFollowingLine)
+ if true =>
+ {
+ "line1";
+ "line2"
+ },
+ b => (
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ ),
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/match-flattening.rs b/src/tools/rustfmt/tests/target/match-flattening.rs
new file mode 100644
index 000000000..f246952a0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/match-flattening.rs
@@ -0,0 +1,23 @@
+fn main() {
+ match option {
+ None => {
+ if condition {
+ true
+ } else {
+ false
+ }
+ }
+ }
+}
+
+fn main() {
+ match option {
+ None => {
+ if condition {
+ true
+ } else {
+ false
+ }
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/match-nowrap-trailing-comma.rs b/src/tools/rustfmt/tests/target/match-nowrap-trailing-comma.rs
new file mode 100644
index 000000000..19ef21448
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/match-nowrap-trailing-comma.rs
@@ -0,0 +1,17 @@
+// rustfmt-match_arm_blocks: false
+// rustfmt-match_block_trailing_comma: true
+// Match expressions, no unwrapping of block arms or wrapping of multiline
+// expressions.
+
+fn foo() {
+ match x {
+ a => {
+ "line1";
+ "line2"
+ },
+ b => (
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ ),
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/match-nowrap.rs b/src/tools/rustfmt/tests/target/match-nowrap.rs
new file mode 100644
index 000000000..9e674b1e2
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/match-nowrap.rs
@@ -0,0 +1,13 @@
+// rustfmt-match_arm_blocks: false
+// Match expressions, no unwrapping of block arms or wrapping of multiline
+// expressions.
+
+fn foo() {
+ match x {
+ a => foo(),
+ b => (
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ ),
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/match.rs b/src/tools/rustfmt/tests/target/match.rs
new file mode 100644
index 000000000..1bf3fb758
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/match.rs
@@ -0,0 +1,629 @@
+// rustfmt-normalize_comments: true
+// Match expressions.
+
+fn foo() {
+ // A match expression.
+ match x {
+ // Some comment.
+ a => foo(),
+ b if 0 < 42 => foo(),
+ c => {
+ // Another comment.
+ // Comment.
+ an_expression;
+ foo()
+ }
+ Foo(ref bar) => {
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ }
+ Pattern1 | Pattern2 | Pattern3 => false,
+ Paternnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn
+ | Paternnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn => blah,
+ Patternnnnnnnnnnnnnnnnnnn
+ | Patternnnnnnnnnnnnnnnnnnn
+ | Patternnnnnnnnnnnnnnnnnnn
+ | Patternnnnnnnnnnnnnnnnnnn => meh,
+
+ Patternnnnnnnnnnnnnnnnnnn | Patternnnnnnnnnnnnnnnnnnn if looooooooooooooooooong_guard => {
+ meh
+ }
+
+ Patternnnnnnnnnnnnnnnnnnnnnnnnn | Patternnnnnnnnnnnnnnnnnnnnnnnnn
+ if looooooooooooooooooooooooooooooooooooooooong_guard =>
+ {
+ meh
+ }
+
+ // Test that earlier patterns can take the guard space
+ (aaaa, bbbbb, ccccccc, aaaaa, bbbbbbbb, cccccc, aaaa, bbbbbbbb, cccccc, dddddd)
+ | Patternnnnnnnnnnnnnnnnnnnnnnnnn
+ if loooooooooooooooooooooooooooooooooooooooooong_guard => {}
+
+ _ => {}
+ ast::PathParameters::AngleBracketedParameters(ref data)
+ if data.lifetimes.len() > 0 || data.types.len() > 0 || data.bindings.len() > 0 => {}
+ }
+
+ let whatever = match something {
+ /// DOC COMMENT!
+ Some(_) => 42,
+ // Comment on an attribute.
+ #[an_attribute]
+ // Comment after an attribute.
+ None => 0,
+ #[rustfmt::skip]
+ Blurb => { }
+ };
+}
+
+// Test that a match on an overflow line is laid out properly.
+fn main() {
+ let sub_span =
+ match xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx {
+ Some(sub_span) => Some(sub_span),
+ None => sub_span,
+ };
+}
+
+// Test that one-line bodies align.
+fn main() {
+ match r {
+ Variableeeeeeeeeeeeeeeeee => (
+ "variable",
+ vec!["id", "name", "qualname", "value", "type", "scopeid"],
+ true,
+ true,
+ ),
+ Enummmmmmmmmmmmmmmmmmmmm => (
+ "enum",
+ vec!["id", "qualname", "scopeid", "value"],
+ true,
+ true,
+ ),
+ Variantttttttttttttttttttttttt => (
+ "variant",
+ vec!["id", "name", "qualname", "type", "value", "scopeid"],
+ true,
+ true,
+ ),
+ };
+
+ match x {
+ y => { /*Block with comment. Preserve me.*/ }
+ z => {
+ stmt();
+ }
+ }
+}
+
+fn matches() {
+ match 1 {
+ -1 => 10,
+ 1 => 1, // foo
+ 2 => 2,
+ // bar
+ 3 => 3,
+ _ => 0, // baz
+ }
+}
+
+fn match_skip() {
+ let _ = match Some(1) {
+ #[rustfmt::skip]
+ Some( n ) => n,
+ None => 1,
+ };
+}
+
+fn issue339() {
+ match a {
+ b => {}
+ c => {}
+ d => {}
+ e => {}
+ // collapsing here is safe
+ ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff => {}
+ // collapsing here exceeds line length
+ ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffg => {
+ }
+ h => { // comment above block
+ }
+ i => {} // comment below block
+ j => {
+ // comment inside block
+ }
+ j2 => {
+ // comments inside...
+ } // ... and after
+ // TODO uncomment when vertical whitespace is handled better
+ // k => {
+ //
+ // // comment with WS above
+ // }
+ // l => {
+ // // comment with ws below
+ //
+ // }
+ m => {}
+ n => {}
+ o => {}
+ p => { // Don't collapse me
+ }
+ q => {}
+ r => {}
+ s => 0, // s comment
+ // t comment
+ t => 1,
+ u => 2,
+ v => {} /* funky block
+ * comment */
+ /* final comment */
+ }
+}
+
+fn issue355() {
+ match mac {
+ a => println!("a", b),
+ b => vec![1, 2],
+ c => vec![3; 4],
+ d => {
+ println!("a", b)
+ }
+ e => {
+ vec![1, 2]
+ }
+ f => {
+ vec![3; 4]
+ }
+ h => println!("a", b), // h comment
+ i => vec![1, 2], // i comment
+ j => vec![3; 4], // j comment
+ // k comment
+ k => println!("a", b),
+ // l comment
+ l => vec![1, 2],
+ // m comment
+ m => vec![3; 4],
+ // Rewrite splits macro
+ nnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn => {
+ println!("a", b)
+ }
+ // Rewrite splits macro
+ oooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo => {
+ vec![1, 2]
+ }
+ // Macro support fails to recognise this macro as splittable
+ // We push the whole expr to a new line, TODO split this macro as well
+ pppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppp => {
+ vec![3; 4]
+ }
+ // q, r and s: Rewrite splits match arm
+ qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq => {
+ println!("a", b)
+ }
+ rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr => {
+ vec![1, 2]
+ }
+ ssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssss => {
+ vec![3; 4]
+ }
+ // Funky bracketing styles
+ t => println! {"a", b},
+ u => vec![1, 2],
+ v => vec![3; 4],
+ w => println!["a", b],
+ x => vec![1, 2],
+ y => vec![3; 4],
+ // Brackets with comments
+ tc => println! {"a", b}, // comment
+ uc => vec![1, 2], // comment
+ vc => vec![3; 4], // comment
+ wc => println!["a", b], // comment
+ xc => vec![1, 2], // comment
+ yc => vec![3; 4], // comment
+ yd => looooooooooooooooooooooooooooooooooooooooooooooooooooooooong_func(
+ aaaaaaaaaa, bbbbbbbbbb, cccccccccc, dddddddddd,
+ ),
+ }
+}
+
+fn issue280() {
+ {
+ match x {
+ CompressionMode::DiscardNewline | CompressionMode::CompressWhitespaceNewline => {
+ ch == '\n'
+ }
+ ast::ItemConst(ref typ, ref expr) => {
+ self.process_static_or_const_item(item, &typ, &expr)
+ }
+ }
+ }
+}
+
+fn issue383() {
+ match resolution.last_private {
+ LastImport { .. } => false,
+ _ => true,
+ };
+}
+
+fn issue507() {
+ match 1 {
+ 1 => unsafe { std::intrinsics::abort() },
+ _ => (),
+ }
+}
+
+fn issue508() {
+ match s.type_id() {
+ Some(NodeTypeId::Element(ElementTypeId::HTMLElement(
+ HTMLElementTypeId::HTMLCanvasElement,
+ ))) => true,
+ Some(NodeTypeId::Element(ElementTypeId::HTMLElement(
+ HTMLElementTypeId::HTMLObjectElement,
+ ))) => s.has_object_data(),
+ Some(NodeTypeId::Element(_)) => false,
+ }
+}
+
+fn issue496() {
+ {
+ {
+ {
+ match def {
+ def::DefConst(def_id) | def::DefAssociatedConst(def_id) => {
+ match const_eval::lookup_const_by_id(cx.tcx, def_id, Some(self.pat.id)) {
+ Some(const_expr) => x,
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+fn issue494() {
+ {
+ match stmt.node {
+ hir::StmtExpr(ref expr, id) | hir::StmtSemi(ref expr, id) => {
+ result.push(StmtRef::Mirror(Box::new(Stmt {
+ span: stmt.span,
+ kind: StmtKind::Expr {
+ scope: cx.tcx.region_maps.node_extent(id),
+ expr: expr.to_ref(),
+ },
+ })))
+ }
+ }
+ }
+}
+
+fn issue386() {
+ match foo {
+ BiEq | BiLt | BiLe | BiNe | BiGt | BiGe => true,
+ BiAnd | BiOr | BiAdd | BiSub | BiMul | BiDiv | BiRem | BiBitXor | BiBitAnd | BiBitOr
+ | BiShl | BiShr => false,
+ }
+}
+
+fn guards() {
+ match foo {
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ if foooooooooooooo && barrrrrrrrrrrr => {}
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ | aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ if foooooooooooooo && barrrrrrrrrrrr => {}
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ if fooooooooooooooooooooo
+ && (bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
+ || cccccccccccccccccccccccccccccccccccccccc) => {}
+ }
+}
+
+fn issue1371() {
+ Some(match type_ {
+ sfEvtClosed => Closed,
+ sfEvtResized => {
+ let e = unsafe { *event.size.as_ref() };
+
+ Resized {
+ width: e.width,
+ height: e.height,
+ }
+ }
+ sfEvtLostFocus => LostFocus,
+ sfEvtGainedFocus => GainedFocus,
+ sfEvtTextEntered => TextEntered {
+ unicode: unsafe {
+ ::std::char::from_u32((*event.text.as_ref()).unicode)
+ .expect("Invalid unicode encountered on TextEntered event")
+ },
+ },
+ sfEvtKeyPressed => {
+ let e = unsafe { event.key.as_ref() };
+
+ KeyPressed {
+ code: unsafe { ::std::mem::transmute(e.code) },
+ alt: e.alt.to_bool(),
+ ctrl: e.control.to_bool(),
+ shift: e.shift.to_bool(),
+ system: e.system.to_bool(),
+ }
+ }
+ sfEvtKeyReleased => {
+ let e = unsafe { event.key.as_ref() };
+
+ KeyReleased {
+ code: unsafe { ::std::mem::transmute(e.code) },
+ alt: e.alt.to_bool(),
+ ctrl: e.control.to_bool(),
+ shift: e.shift.to_bool(),
+ system: e.system.to_bool(),
+ }
+ }
+ })
+}
+
+fn issue1395() {
+ let bar = Some(true);
+ let foo = Some(true);
+ let mut x = false;
+ bar.and_then(|_| match foo {
+ None => None,
+ Some(b) => {
+ x = true;
+ Some(b)
+ }
+ });
+}
+
+fn issue1456() {
+ Ok(Recording {
+ artists: match reader.evaluate(".//mb:recording/mb:artist-credit/mb:name-credit")? {
+ Nodeset(nodeset) => {
+ let res: Result<Vec<ArtistRef>, ReadError> = nodeset
+ .iter()
+ .map(|node| {
+ XPathNodeReader::new(node, &context).and_then(|r| ArtistRef::from_xml(&r))
+ })
+ .collect();
+ res?
+ }
+ _ => Vec::new(),
+ },
+ })
+}
+
+fn issue1460() {
+ let _ = match foo {
+ REORDER_BUFFER_CHANGE_INTERNAL_SPEC_INSERT => {
+ "internal_spec_insert_internal_spec_insert_internal_spec_insert"
+ }
+ _ => "reorder_something",
+ };
+}
+
+fn issue525() {
+ foobar(
+ f,
+ "{}",
+ match *self {
+ TaskState::Started => "started",
+ TaskState::Success => "success",
+ TaskState::Failed => "failed",
+ },
+ );
+}
+
+// #1838, #1839
+fn match_with_near_max_width() {
+ let (this_line_uses_99_characters_and_is_formatted_properly, x012345) = match some_expression {
+ _ => unimplemented!(),
+ };
+
+ let (should_be_formatted_like_the_line_above_using_100_characters, x0) = match some_expression {
+ _ => unimplemented!(),
+ };
+
+ let (should_put_the_brace_on_the_next_line_using_101_characters, x0000) = match some_expression
+ {
+ _ => unimplemented!(),
+ };
+ match m {
+ Variant::Tag
+ | Variant::Tag2
+ | Variant::Tag3
+ | Variant::Tag4
+ | Variant::Tag5
+ | Variant::Tag6 => {}
+ }
+}
+
+fn match_with_trailing_spaces() {
+ match x {
+ Some(..) => 0,
+ None => 1,
+ }
+}
+
+fn issue_2099() {
+ let a = match x {};
+ let b = match x {};
+
+ match x {}
+}
+
+// #2021
+impl<'tcx> Const<'tcx> {
+ pub fn from_constval<'a>() -> Const<'tcx> {
+ let val = match *cv {
+ ConstVal::Variant(_) | ConstVal::Aggregate(..) | ConstVal::Unevaluated(..) => bug!(
+ "MIR must not use `{:?}` (aggregates are expanded to MIR rvalues)",
+ cv
+ ),
+ };
+ }
+}
+
+// #2151
+fn issue_2151() {
+ match either {
+ x => {}
+ y => (),
+ }
+}
+
+// #2152
+fn issue_2152() {
+ match m {
+ "aaaaaaaaaaaaa" | "bbbbbbbbbbbbb" | "cccccccccccccccccccccccccccccccccccccccccccc"
+ if true => {}
+ "bind" | "writev" | "readv" | "sendmsg" | "recvmsg" if android && (aarch64 || x86_64) => {
+ true
+ }
+ }
+}
+
+// #2376
+// Preserve block around expressions with condition.
+fn issue_2376() {
+ let mut x = None;
+ match x {
+ Some(0) => {
+ for i in 1..11 {
+ x = Some(i);
+ }
+ }
+ Some(ref mut y) => {
+ while *y < 10 {
+ *y += 1;
+ }
+ }
+ None => {
+ while let None = x {
+ x = Some(10);
+ }
+ }
+ }
+}
+
+// #2621
+// Strip leading `|` in match arm patterns
+fn issue_2621() {
+ let x = Foo::A;
+ match x {
+ Foo::A => println!("No vert single condition"),
+ Foo::B | Foo::C => println!("Center vert two conditions"),
+ Foo::D => println!("Preceding vert single condition"),
+ Foo::E | Foo::F => println!("Preceding vert over two lines"),
+ Foo::G | Foo::H => println!("Trailing vert over two lines"),
+ // Comment on its own line
+ Foo::I => println!("With comment"), // Comment after line
+ }
+}
+
+fn issue_2377() {
+ match tok {
+ Tok::Not
+ | Tok::BNot
+ | Tok::Plus
+ | Tok::Minus
+ | Tok::PlusPlus
+ | Tok::MinusMinus
+ | Tok::Void
+ | Tok::Delete
+ if prec <= 16 =>
+ {
+ // code here...
+ }
+ Tok::TypeOf if prec <= 16 => {}
+ }
+}
+
+// #3040
+fn issue_3040() {
+ {
+ match foo {
+ DevtoolScriptControlMsg::WantsLiveNotifications(id, to_send) => {
+ match documents.find_window(id) {
+ Some(window) => {
+ devtools::handle_wants_live_notifications(window.upcast(), to_send)
+ }
+ None => return warn!("Message sent to closed pipeline {}.", id),
+ }
+ }
+ }
+ }
+}
+
+// #3030
+fn issue_3030() {
+ match input.trim().parse::<f64>() {
+ Ok(val)
+ if !(
+ // A valid number is the same as what rust considers to be valid,
+ // except for +1., NaN, and Infinity.
+ val.is_infinite() || val.is_nan() || input.ends_with(".") || input.starts_with("+")
+ ) => {}
+ }
+}
+
+fn issue_3005() {
+ match *token {
+ Token::Dimension {
+ value, ref unit, ..
+ } if num_context.is_ok(context.parsing_mode, value) => {
+ return NoCalcLength::parse_dimension(context, value, unit)
+ .map(LengthOrPercentage::Length)
+ .map_err(|()| location.new_unexpected_token_error(token.clone()));
+ }
+ }
+}
+
+// #3774
+fn issue_3774() {
+ {
+ {
+ {
+ match foo {
+ Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreachab(),
+ Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => unreacha!(),
+ Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => {
+ unreachabl()
+ }
+ Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => {
+ unreachae!()
+ }
+ Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => {
+ unreachable()
+ }
+ Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => {
+ unreachable!()
+ }
+ Lam(_, _, _) | Pi(_, _, _) | Let(_, _, _, _) | Embed(_) | Var(_) => {
+ rrunreachable!()
+ }
+ }
+ }
+ }
+ }
+}
+
+// #4109
+fn issue_4109() {
+ match () {
+ _ => {
+ #[cfg(debug_assertions)]
+ {
+ println!("Foo");
+ }
+ }
+ }
+
+ match () {
+ _ => {
+ #[allow(unsafe_code)]
+ unsafe {}
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/match_overflow_expr.rs b/src/tools/rustfmt/tests/target/match_overflow_expr.rs
new file mode 100644
index 000000000..b817879d1
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/match_overflow_expr.rs
@@ -0,0 +1,50 @@
+// rustfmt-overflow_delimited_expr: true
+
+fn main() {
+ println!("Foobar: {}", match "input" {
+ "a" => "",
+ "b" => "",
+ "c" => "",
+ "d" => "",
+ "e" => "",
+ "f" => "",
+ "g" => "",
+ "h" => "",
+ "i" => "",
+ "j" => "",
+ "k" => "",
+ "l" => "",
+ "m" => "",
+ "n" => "",
+ "o" => "",
+ "p" => "",
+ "q" => "",
+ "r" => "Rust",
+ });
+}
+
+fn main() {
+ println!(
+ "Very Long Input String Which Makes It Impossible To Fit On The Same Line: {}",
+ match "input" {
+ "a" => "",
+ "b" => "",
+ "c" => "",
+ "d" => "",
+ "e" => "",
+ "f" => "",
+ "g" => "",
+ "h" => "",
+ "i" => "",
+ "j" => "",
+ "k" => "",
+ "l" => "",
+ "m" => "",
+ "n" => "",
+ "o" => "",
+ "p" => "",
+ "q" => "",
+ "r" => "Rust",
+ }
+ );
+}
diff --git a/src/tools/rustfmt/tests/target/max-line-length-in-chars.rs b/src/tools/rustfmt/tests/target/max-line-length-in-chars.rs
new file mode 100644
index 000000000..d49fbb7e3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/max-line-length-in-chars.rs
@@ -0,0 +1,4 @@
+// rustfmt-max_width: 25
+
+// абвгдеёжзийклмнопрст
+fn main() {}
diff --git a/src/tools/rustfmt/tests/target/merge_imports_true_compat.rs b/src/tools/rustfmt/tests/target/merge_imports_true_compat.rs
new file mode 100644
index 000000000..46cd0a3b8
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/merge_imports_true_compat.rs
@@ -0,0 +1,3 @@
+// rustfmt-merge_imports: true
+
+use a::{b, c};
diff --git a/src/tools/rustfmt/tests/target/mod-1.rs b/src/tools/rustfmt/tests/target/mod-1.rs
new file mode 100644
index 000000000..4118d123d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/mod-1.rs
@@ -0,0 +1,37 @@
+// Deeply indented modules.
+
+mod foo {
+ mod bar {
+ mod baz {}
+ }
+}
+
+mod foo {
+ mod bar {
+ mod baz {
+ fn foo() {
+ bar()
+ }
+ }
+ }
+
+ mod qux {}
+}
+
+mod boxed {
+ pub use std::boxed::{Box, HEAP};
+}
+
+pub mod x {
+ pub fn freopen(
+ filename: *const c_char,
+ mode: *const c_char,
+ mode2: *const c_char,
+ mode3: *const c_char,
+ file: *mut FILE,
+ ) -> *mut FILE {
+ }
+}
+
+mod y { // sup boooooiiii
+}
diff --git a/src/tools/rustfmt/tests/target/mod-2.rs b/src/tools/rustfmt/tests/target/mod-2.rs
new file mode 100644
index 000000000..1a093bd52
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/mod-2.rs
@@ -0,0 +1,5 @@
+// Some nested mods
+
+#[cfg(test)]
+mod nestedmod;
+pub mod no_new_line_beginning;
diff --git a/src/tools/rustfmt/tests/target/mod_skip_child.rs b/src/tools/rustfmt/tests/target/mod_skip_child.rs
new file mode 100644
index 000000000..d48c4a37e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/mod_skip_child.rs
@@ -0,0 +1,2 @@
+// rustfmt-skip_children: true
+mod nested_skipped;
diff --git a/src/tools/rustfmt/tests/target/mulit-file.rs b/src/tools/rustfmt/tests/target/mulit-file.rs
new file mode 100644
index 000000000..1f829b36f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/mulit-file.rs
@@ -0,0 +1,10 @@
+// Tests that where a single file is referred to in multiple places, we don't
+// crash.
+
+#[cfg(all(foo))]
+#[path = "closure.rs"]
+pub mod imp;
+
+#[cfg(all(bar))]
+#[path = "closure.rs"]
+pub mod imp;
diff --git a/src/tools/rustfmt/tests/target/multiline_string_in_macro_def.rs b/src/tools/rustfmt/tests/target/multiline_string_in_macro_def.rs
new file mode 100644
index 000000000..dafc738f8
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/multiline_string_in_macro_def.rs
@@ -0,0 +1,14 @@
+macro_rules! assert_approx_eq {
+ ($a:expr, $b:expr, $eps:expr) => {{
+ let (a, b) = (&$a, &$b);
+ assert!(
+ (*a - *b).abs() < $eps,
+ "assertion failed: `(left !== right)` \
+ (left: `{:?}`, right: `{:?}`, expect diff: `{:?}`, real diff: `{:?}`)",
+ *a,
+ *b,
+ $eps,
+ (*a - *b).abs()
+ );
+ }};
+}
diff --git a/src/tools/rustfmt/tests/target/multiple.rs b/src/tools/rustfmt/tests/target/multiple.rs
new file mode 100644
index 000000000..ee6ef220c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/multiple.rs
@@ -0,0 +1,180 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+// rustfmt-format_strings: true
+// Test of lots of random stuff.
+// FIXME split this into multiple, self-contained tests.
+
+#[attr1]
+extern crate foo;
+#[attr2]
+#[attr3]
+extern crate foo;
+#[attr1]
+extern crate foo;
+#[attr2]
+#[attr3]
+extern crate foo;
+
+use std::cell::*;
+use std::{
+ self, any, ascii, borrow, borrow, borrow, borrow, borrow, borrow, borrow, borrow, borrow,
+ borrow, borrow, boxed, boxed, boxed, boxed, boxed, boxed, boxed, boxed, boxed, boxed, char,
+ char, char, char, char, char, char, char, char, char,
+};
+
+mod doc;
+mod other;
+
+// sfdgfffffffffffffffffffffffffffffffffffffffffffffffffffffff
+// ffffffffffffffffffffffffffffffffffffffffff
+
+fn foo(a: isize, b: u32 /* blah blah */, c: f64) {}
+
+fn foo() -> Box<Write + 'static>
+where
+ 'a: 'b,
+ for<'a> D<'b>: 'a,
+{
+ hello!()
+}
+
+fn baz<
+ 'a: 'b, // comment on 'a
+ T: SomsssssssssssssssssssssssssssssssssssssssssssssssssssssseType, // comment on T
+>(
+ a: A,
+ b: B, // comment on b
+ c: C,
+) -> Bob {
+ #[attr1]
+ extern crate foo;
+ #[attr2]
+ #[attr3]
+ extern crate foo;
+ #[attr1]
+ extern crate foo;
+ #[attr2]
+ #[attr3]
+ extern crate foo;
+}
+
+#[rustfmt::skip]
+fn qux(a: dadsfa, // Comment 1
+ b: sdfasdfa, // Comment 2
+ c: dsfdsafa) // Comment 3
+{
+
+}
+
+/// Blah blah blah.
+impl Bar {
+ fn foo(
+ &mut self,
+ a: sdfsdfcccccccccccccccccccccccccccccccccccccccccccccccccc, // comment on a
+ b: sdfasdfsdfasfs, // closing comment
+ ) -> isize {
+ }
+
+ /// Blah blah blah.
+ pub fn f2(self) {
+ (foo, bar)
+ }
+
+ #[an_attribute]
+ fn f3(self) -> Dog {}
+}
+
+/// The `nodes` and `edges` method each return instantiations of
+/// `Cow<[T]>` to leave implementers the freedom to create
+
+/// entirely new vectors or to pass back slices into internally owned
+/// vectors.
+pub trait GraphWalk<'a, N, E> {
+ /// Returns all the nodes in this graph.
+ fn nodes(&'a self) -> Nodes<'a, N>;
+ /// Returns all of the edges in this graph.
+ fn edges(&'a self) -> Edges<'a, E>;
+ /// The source node for `edge`.
+ fn source(&'a self, edge: &E) -> N;
+ /// The target node for `edge`.
+ fn target(&'a self, edge: &E) -> N;
+}
+
+/// A Doc comment
+#[AnAttribute]
+pub struct Foo {
+ #[rustfmt::skip]
+ f : SomeType, // Comment beside a field
+ f: SomeType, // Comment beside a field
+ // Comment on a field
+ g: SomeOtherType,
+ /// A doc comment on a field
+ h: AThirdType,
+}
+
+struct Bar;
+
+// With a where-clause and generics.
+pub struct Foo<'a, Y: Baz>
+where
+ X: Whatever,
+{
+ f: SomeType, // Comment beside a field
+}
+
+fn foo(ann: &'a (PpAnn + 'a)) {}
+
+fn main() {
+ for i in 0i32..4 {
+ println!("{}", i);
+ }
+
+ while true {
+ hello();
+ }
+
+ let rc = Cell::new(
+ 42usize,
+ 42usize,
+ Cell::new(
+ 42usize,
+ remaining_widthremaining_widthremaining_widthremaining_width,
+ ),
+ 42usize,
+ );
+ let rc = RefCell::new(42usize, remaining_width, remaining_width); // a comment
+ let x = "Hello!!!!!!!!! abcd abcd abcd abcd abcd abcd\n abcd abcd abcd abcd abcd abcd abcd \
+ abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd \
+ abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd abcd \
+ abcd abcd";
+ let s = expand(a, b);
+}
+
+fn deconstruct() -> (
+ SocketAddr,
+ Method,
+ Headers,
+ RequestUri,
+ HttpVersion,
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+) {
+}
+
+fn deconstruct(
+ foo: Bar,
+) -> (
+ SocketAddr,
+ Method,
+ Headers,
+ RequestUri,
+ HttpVersion,
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+) {
+}
+
+#[rustfmt::skip]
+mod a{
+fn foo(x: T) {
+ let x: T = dfasdf;
+}
+}
diff --git a/src/tools/rustfmt/tests/target/negative-impl.rs b/src/tools/rustfmt/tests/target/negative-impl.rs
new file mode 100644
index 000000000..16ce7e26a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/negative-impl.rs
@@ -0,0 +1,14 @@
+impl !Display for JoinHandle {}
+
+impl !Box<JoinHandle> {}
+
+impl !std::fmt::Display
+ for JoinHandle<T: std::future::Future + std::marker::Send + std::marker::Sync>
+{
+}
+
+impl
+ !JoinHandle<T: std::future::Future<Output> + std::marker::Send + std::marker::Sync + 'static>
+ + 'static
+{
+}
diff --git a/src/tools/rustfmt/tests/target/nested-if-else.rs b/src/tools/rustfmt/tests/target/nested-if-else.rs
new file mode 100644
index 000000000..9a54789dd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/nested-if-else.rs
@@ -0,0 +1,11 @@
+fn issue1518() {
+ Some(Object {
+ field: if a {
+ a_thing
+ } else if b {
+ b_thing
+ } else {
+ c_thing
+ },
+ })
+}
diff --git a/src/tools/rustfmt/tests/target/nested-visual-block.rs b/src/tools/rustfmt/tests/target/nested-visual-block.rs
new file mode 100644
index 000000000..fe7190d0a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/nested-visual-block.rs
@@ -0,0 +1,60 @@
+fn main() {
+ // #1078
+ let items = itemize_list(
+ context.source_map,
+ field_iter,
+ "}",
+ |item| match *item {
+ StructLitField::Regular(ref field) => field.span.lo(),
+ StructLitField::Base(ref expr) => {
+ let last_field_hi = fields.last().map_or(span.lo(), |field| field.span.hi());
+ let snippet = context.snippet(mk_sp(last_field_hi, expr.span.lo()));
+ let pos = snippet.find_uncommented("..").unwrap();
+ last_field_hi + BytePos(pos as u32)
+ }
+ },
+ |item| match *item {
+ StructLitField::Regular(ref field) => field.span.hi(),
+ StructLitField::Base(ref expr) => expr.span.hi(),
+ },
+ |item| {
+ match *item {
+ StructLitField::Regular(ref field) => rewrite_field(
+ inner_context,
+ &field,
+ &Constraints::new(v_budget.checked_sub(1).unwrap_or(0), indent),
+ ),
+ StructLitField::Base(ref expr) => {
+ // 2 = ..
+ expr.rewrite(
+ inner_context,
+ &Constraints::new(try_opt!(v_budget.checked_sub(2)), indent + 2),
+ )
+ .map(|s| format!("..{}", s))
+ }
+ }
+ },
+ context.source_map.span_after(span, "{"),
+ span.hi(),
+ );
+
+ // #1580
+ self.0.pool.execute(move || {
+ let _timer = segments.0.rotate_timer.time();
+ if let Err(e) = segments.rotate_async(wal) {
+ error!("error compacting segment storage WAL", unsafe { error: e.display() });
+ }
+ });
+
+ // #1581
+ bootstrap.checks.register("PERSISTED_LOCATIONS", move || {
+ if locations2.0.inner_mut.lock().poisoned {
+ Check::new(
+ State::Error,
+ "Persisted location storage is poisoned due to a write failure",
+ )
+ } else {
+ Check::new(State::Healthy, "Persisted location storage is healthy")
+ }
+ });
+}
diff --git a/src/tools/rustfmt/tests/target/nested_skipped/mod.rs b/src/tools/rustfmt/tests/target/nested_skipped/mod.rs
new file mode 100644
index 000000000..0ab6f081e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/nested_skipped/mod.rs
@@ -0,0 +1,3 @@
+fn ugly() {
+ 92;
+}
diff --git a/src/tools/rustfmt/tests/target/nestedmod/mod.rs b/src/tools/rustfmt/tests/target/nestedmod/mod.rs
new file mode 100644
index 000000000..1df462931
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/nestedmod/mod.rs
@@ -0,0 +1,12 @@
+mod mod2a;
+mod mod2b;
+
+mod mymod1 {
+ use mod2a::{Bar, Foo};
+ mod mod3a;
+}
+
+#[path = "mod2c.rs"]
+mod mymod2;
+
+mod submod2;
diff --git a/src/tools/rustfmt/tests/target/nestedmod/mod2a.rs b/src/tools/rustfmt/tests/target/nestedmod/mod2a.rs
new file mode 100644
index 000000000..5df457a83
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/nestedmod/mod2a.rs
@@ -0,0 +1,4 @@
+// This is an empty file containing only
+// comments
+
+// ...................
diff --git a/src/tools/rustfmt/tests/target/nestedmod/mod2b.rs b/src/tools/rustfmt/tests/target/nestedmod/mod2b.rs
new file mode 100644
index 000000000..9b6ea844e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/nestedmod/mod2b.rs
@@ -0,0 +1,2 @@
+#[path = "mod2a.rs"]
+mod c;
diff --git a/src/tools/rustfmt/tests/target/nestedmod/mod2c.rs b/src/tools/rustfmt/tests/target/nestedmod/mod2c.rs
new file mode 100644
index 000000000..7db4572e7
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/nestedmod/mod2c.rs
@@ -0,0 +1,3 @@
+// A standard mod
+
+fn a() {}
diff --git a/src/tools/rustfmt/tests/target/nestedmod/mymod1/mod3a.rs b/src/tools/rustfmt/tests/target/nestedmod/mymod1/mod3a.rs
new file mode 100644
index 000000000..ae09d8dda
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/nestedmod/mymod1/mod3a.rs
@@ -0,0 +1,2 @@
+// Another mod
+fn a() {}
diff --git a/src/tools/rustfmt/tests/target/nestedmod/submod2/a.rs b/src/tools/rustfmt/tests/target/nestedmod/submod2/a.rs
new file mode 100644
index 000000000..120b17145
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/nestedmod/submod2/a.rs
@@ -0,0 +1,6 @@
+// Yet Another mod
+// Nested
+
+use c::a;
+
+fn foo() {}
diff --git a/src/tools/rustfmt/tests/target/nestedmod/submod2/mod.rs b/src/tools/rustfmt/tests/target/nestedmod/submod2/mod.rs
new file mode 100644
index 000000000..52f8be910
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/nestedmod/submod2/mod.rs
@@ -0,0 +1,5 @@
+// Another mod
+
+mod a;
+
+use a::a;
diff --git a/src/tools/rustfmt/tests/target/no_arg_with_commnet.rs b/src/tools/rustfmt/tests/target/no_arg_with_commnet.rs
new file mode 100644
index 000000000..69f61b60f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/no_arg_with_commnet.rs
@@ -0,0 +1 @@
+fn foo(/* cooment */) {}
diff --git a/src/tools/rustfmt/tests/target/no_new_line_beginning.rs b/src/tools/rustfmt/tests/target/no_new_line_beginning.rs
new file mode 100644
index 000000000..f328e4d9d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/no_new_line_beginning.rs
@@ -0,0 +1 @@
+fn main() {}
diff --git a/src/tools/rustfmt/tests/target/normalize_doc_attributes_should_not_imply_format_doc_comments.rs b/src/tools/rustfmt/tests/target/normalize_doc_attributes_should_not_imply_format_doc_comments.rs
new file mode 100644
index 000000000..562d9565e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/normalize_doc_attributes_should_not_imply_format_doc_comments.rs
@@ -0,0 +1,15 @@
+// rustfmt-normalize_doc_attributes: true
+
+/// Foo
+///
+/// # Example
+/// ```
+/// # #![cfg_attr(not(dox), feature(cfg_target_feature, target_feature, stdsimd))]
+/// # #![cfg_attr(not(dox), no_std)]
+/// fn foo() { }
+/// ```
+///
+fn foo() {}
+
+///Bar documents
+fn bar() {}
diff --git a/src/tools/rustfmt/tests/target/normalize_multiline_doc_attribute.rs b/src/tools/rustfmt/tests/target/normalize_multiline_doc_attribute.rs
new file mode 100644
index 000000000..890c9bb20
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/normalize_multiline_doc_attribute.rs
@@ -0,0 +1,12 @@
+// rustfmt-unstable: true
+// rustfmt-normalize_doc_attributes: true
+
+///This comment
+///is split
+///on multiple lines
+fn foo() {}
+
+/// B1
+///
+/// A1
+fn bar() {}
diff --git a/src/tools/rustfmt/tests/target/obsolete_in_place.rs b/src/tools/rustfmt/tests/target/obsolete_in_place.rs
new file mode 100644
index 000000000..3f364c1ae
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/obsolete_in_place.rs
@@ -0,0 +1,9 @@
+// #2953
+
+macro_rules! demo {
+ ($a:ident <- $b:expr) => {};
+}
+
+fn main() {
+ demo!(i <- 0);
+}
diff --git a/src/tools/rustfmt/tests/target/one_line_if_v1.rs b/src/tools/rustfmt/tests/target/one_line_if_v1.rs
new file mode 100644
index 000000000..b3c6c4cbe
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/one_line_if_v1.rs
@@ -0,0 +1,46 @@
+// rustfmt-version: One
+
+fn plain_if(x: bool) -> u8 {
+ if x {
+ 0
+ } else {
+ 1
+ }
+}
+
+fn paren_if(x: bool) -> u8 {
+ (if x { 0 } else { 1 })
+}
+
+fn let_if(x: bool) -> u8 {
+ let x = if x { foo() } else { bar() };
+ x
+}
+
+fn return_if(x: bool) -> u8 {
+ return if x { 0 } else { 1 };
+}
+
+fn multi_if() {
+ use std::io;
+ if x {
+ foo()
+ } else {
+ bar()
+ }
+ if x {
+ foo()
+ } else {
+ bar()
+ }
+}
+
+fn middle_if() {
+ use std::io;
+ if x {
+ foo()
+ } else {
+ bar()
+ }
+ let x = 1;
+}
diff --git a/src/tools/rustfmt/tests/target/one_line_if_v2.rs b/src/tools/rustfmt/tests/target/one_line_if_v2.rs
new file mode 100644
index 000000000..81ca4c8b8
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/one_line_if_v2.rs
@@ -0,0 +1,38 @@
+// rustfmt-version: Two
+
+fn plain_if(x: bool) -> u8 {
+ if x { 0 } else { 1 }
+}
+
+fn paren_if(x: bool) -> u8 {
+ (if x { 0 } else { 1 })
+}
+
+fn let_if(x: bool) -> u8 {
+ let x = if x { foo() } else { bar() };
+ x
+}
+
+fn return_if(x: bool) -> u8 {
+ return if x { 0 } else { 1 };
+}
+
+fn multi_if() {
+ use std::io;
+ if x {
+ foo()
+ } else {
+ bar()
+ }
+ if x { foo() } else { bar() }
+}
+
+fn middle_if() {
+ use std::io;
+ if x {
+ foo()
+ } else {
+ bar()
+ }
+ let x = 1;
+}
diff --git a/src/tools/rustfmt/tests/target/other.rs b/src/tools/rustfmt/tests/target/other.rs
new file mode 100644
index 000000000..dfce84fcd
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/other.rs
@@ -0,0 +1,5 @@
+// Part of multiple.rs
+
+fn bob() {
+ println!("hello other!");
+}
diff --git a/src/tools/rustfmt/tests/target/paren.rs b/src/tools/rustfmt/tests/target/paren.rs
new file mode 100644
index 000000000..f7714d85d
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/paren.rs
@@ -0,0 +1,6 @@
+fn main() {
+ let x = (1);
+ let y = (/* comment */(2));
+ let z = ((3)/* comment */);
+ let a = (4/* comment */);
+}
diff --git a/src/tools/rustfmt/tests/target/path_clarity/foo.rs b/src/tools/rustfmt/tests/target/path_clarity/foo.rs
new file mode 100644
index 000000000..cd247fabf
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/path_clarity/foo.rs
@@ -0,0 +1,2 @@
+// rustfmt-edition: 2018
+mod bar;
diff --git a/src/tools/rustfmt/tests/target/path_clarity/foo/bar.rs b/src/tools/rustfmt/tests/target/path_clarity/foo/bar.rs
new file mode 100644
index 000000000..b18a7d349
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/path_clarity/foo/bar.rs
@@ -0,0 +1,3 @@
+pub fn fn_in_bar() {
+ println!("foo/bar.rs");
+}
diff --git a/src/tools/rustfmt/tests/target/paths.rs b/src/tools/rustfmt/tests/target/paths.rs
new file mode 100644
index 000000000..0d2ba797e
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/paths.rs
@@ -0,0 +1,28 @@
+// rustfmt-normalize_comments: true
+
+fn main() {
+ let constellation_chan =
+ Constellation::<layout::layout_task::LayoutTask, script::script_task::ScriptTask>::start(
+ compositor_proxy,
+ resource_task,
+ image_cache_task,
+ font_cache_task,
+ time_profiler_chan,
+ mem_profiler_chan,
+ devtools_chan,
+ storage_task,
+ supports_clipboard,
+ );
+
+ Quux::<
+ ParamOne, // Comment 1
+ ParamTwo, // Comment 2
+ >::some_func();
+
+ <*mut JSObject>::relocate(entry);
+
+ let x: Foo<A>;
+ let x: Foo/*::*/<A>;
+}
+
+fn op(foo: Bar, key: &[u8], upd: Fn(Option<&memcache::Item>, Baz) -> Result) -> MapResult {}
diff --git a/src/tools/rustfmt/tests/target/pattern-condense-wildcards.rs b/src/tools/rustfmt/tests/target/pattern-condense-wildcards.rs
new file mode 100644
index 000000000..a85a16004
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/pattern-condense-wildcards.rs
@@ -0,0 +1,12 @@
+// rustfmt-normalize_comments: true
+// rustfmt-condense_wildcard_suffixes: true
+
+fn main() {
+ match x {
+ Butt(..) => "hah",
+ Tup(_) => "nah",
+ Quad(_, _, x, _) => " also no rewrite",
+ Quad(x, ..) => "condense me pls",
+ Weird(x, _, _, /* don't condense before */ ..) => "pls work",
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/pattern.rs b/src/tools/rustfmt/tests/target/pattern.rs
new file mode 100644
index 000000000..576018ac6
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/pattern.rs
@@ -0,0 +1,98 @@
+// rustfmt-normalize_comments: true
+#![feature(exclusive_range_pattern)]
+use core::u8::MAX;
+
+fn main() {
+ let z = match x {
+ "pat1" => 1,
+ (ref x, ref mut y /* comment */) => 2,
+ };
+
+ if let <T as Trait>::CONST = ident {
+ do_smth();
+ }
+
+ let Some(ref xyz /* comment! */) = opt;
+
+ if let None = opt2 {
+ panic!("oh noes");
+ }
+
+ let foo @ bar(f) = 42;
+ let a::foo(..) = 42;
+ let [] = 42;
+ let [a, b, c] = 42;
+ let [a, b, c] = 42;
+ let [a, b, c, d, e, f, g] = 42;
+ let foo {} = 42;
+ let foo { .. } = 42;
+ let foo { x, y: ref foo, .. } = 42;
+ let foo {
+ x,
+ yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy: ref foo,
+ ..
+ } = 42;
+ let foo {
+ x,
+ yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy: ref foo,
+ } = 42;
+ let foo {
+ x,
+ yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy: ref foo,
+ ..
+ };
+ let foo {
+ x,
+ yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy: ref foo,
+ };
+
+ match b"12" {
+ [0, 1..MAX] => {}
+ _ => {}
+ }
+}
+
+impl<'a, 'b> ResolveGeneratedContentFragmentMutator<'a, 'b> {
+ fn mutate_fragment(&mut self, fragment: &mut Fragment) {
+ match **info {
+ GeneratedContentInfo::ContentItem(ContentItem::Counter(
+ ref counter_name,
+ counter_style,
+ )) => {}
+ }
+ }
+}
+
+fn issue_1319() {
+ if let (Event { .. }, ..) = ev_state {}
+}
+
+fn issue_1874() {
+ if let Some(()) = x {
+ y
+ }
+}
+
+fn combine_patterns() {
+ let x = match y {
+ Some(Some(Foo {
+ z: Bar(..),
+ a: Bar(..),
+ b: Bar(..),
+ })) => z,
+ _ => return,
+ };
+}
+
+fn slice_patterns() {
+ match b"123" {
+ [0, ..] => {}
+ [0, foo] => {}
+ _ => {}
+ }
+}
+
+fn issue3728() {
+ let foo = |(c,)| c;
+ foo((1,));
+}
diff --git a/src/tools/rustfmt/tests/target/preserves_carriage_return_for_unix.rs b/src/tools/rustfmt/tests/target/preserves_carriage_return_for_unix.rs
new file mode 100644
index 000000000..e5e0b2865
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/preserves_carriage_return_for_unix.rs
@@ -0,0 +1,2 @@
+// rustfmt-newline_style: Unix
+// Foo Bar
diff --git a/src/tools/rustfmt/tests/target/preserves_carriage_return_for_windows.rs b/src/tools/rustfmt/tests/target/preserves_carriage_return_for_windows.rs
new file mode 100644
index 000000000..1085360ee
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/preserves_carriage_return_for_windows.rs
@@ -0,0 +1,2 @@
+// rustfmt-newline_style: Windows
+// Foo Bar
diff --git a/src/tools/rustfmt/tests/target/pub-restricted.rs b/src/tools/rustfmt/tests/target/pub-restricted.rs
new file mode 100644
index 000000000..0e178ef10
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/pub-restricted.rs
@@ -0,0 +1,51 @@
+pub(super) enum WriteState<D> {
+ WriteId {
+ id: U64Writer,
+ size: U64Writer,
+ payload: Option<Writer<D>>,
+ },
+ WriteSize {
+ size: U64Writer,
+ payload: Option<Writer<D>>,
+ },
+ WriteData(Writer<D>),
+}
+
+pub(crate) enum WriteState<D> {
+ WriteId {
+ id: U64Writer,
+ size: U64Writer,
+ payload: Option<Writer<D>>,
+ },
+ WriteSize {
+ size: U64Writer,
+ payload: Option<Writer<D>>,
+ },
+ WriteData(Writer<D>),
+}
+
+pub(in global::path::to::some_mod) enum WriteState<D> {
+ WriteId {
+ id: U64Writer,
+ size: U64Writer,
+ payload: Option<Writer<D>>,
+ },
+ WriteSize {
+ size: U64Writer,
+ payload: Option<Writer<D>>,
+ },
+ WriteData(Writer<D>),
+}
+
+pub(in local::path::to::some_mod) enum WriteState<D> {
+ WriteId {
+ id: U64Writer,
+ size: U64Writer,
+ payload: Option<Writer<D>>,
+ },
+ WriteSize {
+ size: U64Writer,
+ payload: Option<Writer<D>>,
+ },
+ WriteData(Writer<D>),
+}
diff --git a/src/tools/rustfmt/tests/target/raw_identifiers.rs b/src/tools/rustfmt/tests/target/raw_identifiers.rs
new file mode 100644
index 000000000..6ab0fdf05
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/raw_identifiers.rs
@@ -0,0 +1,66 @@
+#![feature(custom_attribute)]
+#![feature(raw_identifiers)]
+#![feature(extern_types)]
+#![allow(invalid_type_param_default)]
+#![allow(unused_attributes)]
+
+use r#foo as r#alias_foo;
+
+// https://github.com/rust-lang/rustfmt/issues/3837
+pub(crate) static r#break: &'static str = "foo";
+
+fn main() {
+ #[r#attr]
+ r#foo::r#bar();
+
+ let r#local = r#Struct { r#field: () };
+ r#local.r#field = 1;
+ r#foo.r#barr();
+ let r#async = r#foo(r#local);
+ r#macro!();
+
+ if let r#sub_pat @ r#Foo(_) = r#Foo(3) {}
+
+ match r#async {
+ r#Foo | r#Bar => r#foo(),
+ }
+}
+
+fn r#bar<'a, r#T>(r#x: &'a r#T) {}
+
+mod r#foo {
+ pub fn r#bar() {}
+}
+
+enum r#Foo {
+ r#Bar {},
+}
+
+struct r#Struct {
+ r#field: r#FieldType,
+}
+
+trait r#Trait {
+ type r#Type;
+}
+
+impl r#Trait for r#Impl {
+ type r#Type = r#u32;
+ fn r#xxx(r#fjio: r#u32) {}
+}
+
+extern "C" {
+ type r#ccc;
+ static r#static_val: u32;
+}
+
+macro_rules! r#macro {
+ () => {};
+}
+
+macro_rules! foo {
+ ($x:expr) => {
+ let r#catch = $x + 1;
+ println!("{}", r#catch);
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/remove_blank_lines.rs b/src/tools/rustfmt/tests/target/remove_blank_lines.rs
new file mode 100644
index 000000000..de74c81ef
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/remove_blank_lines.rs
@@ -0,0 +1,28 @@
+fn main() {
+ let x = 1;
+
+ let y = 2;
+
+ println!("x + y = {}", x + y);
+}
+
+fn foo() {
+ #![attribute]
+
+ let x = 1;
+
+ // comment
+}
+// comment after item
+
+// comment before item
+fn bar() {
+ let x = 1;
+ // comment after statement
+
+ // comment before statement
+ let y = 2;
+ let z = 3;
+
+ println!("x + y + z = {}", x + y + z);
+}
diff --git a/src/tools/rustfmt/tests/target/reorder-impl-items.rs b/src/tools/rustfmt/tests/target/reorder-impl-items.rs
new file mode 100644
index 000000000..16efff55b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/reorder-impl-items.rs
@@ -0,0 +1,15 @@
+// rustfmt-reorder_impl_items: true
+
+// The ordering of the following impl items should be idempotent.
+impl<'a> Command<'a> {
+ pub fn send_to(&self, w: &mut io::Write) -> io::Result<()> {
+ match self {
+ &Command::Data(ref c) => c.send_to(w),
+ &Command::Vrfy(ref c) => c.send_to(w),
+ }
+ }
+
+ pub fn parse(arg: &[u8]) -> Result<Command, ParseError> {
+ nom_to_result(command(arg))
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/should_not_format_string_when_format_strings_is_not_set.rs b/src/tools/rustfmt/tests/target/should_not_format_string_when_format_strings_is_not_set.rs
new file mode 100644
index 000000000..efb755d4a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/should_not_format_string_when_format_strings_is_not_set.rs
@@ -0,0 +1,16 @@
+// format_strings is false by default.
+
+println!(
+ "DirEntry {{ \
+ binary_name: {:<64}, \
+ context_id: {:>2}, \
+ file_size: {:>6}, \
+ offset: 0x {:>08X}, \
+ actual_crc: 0x{:>08X} \
+ }}",
+ dir_entry.binary_name,
+ dir_entry.context_id,
+ dir_entry.file_size,
+ dir_entry.offset,
+ dir_entry.actual_crc
+);
diff --git a/src/tools/rustfmt/tests/target/single-line-if-else.rs b/src/tools/rustfmt/tests/target/single-line-if-else.rs
new file mode 100644
index 000000000..98fd793cb
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/single-line-if-else.rs
@@ -0,0 +1,58 @@
+// Format if-else expressions on a single line, when possible.
+
+fn main() {
+ let a = if 1 > 2 { unreachable!() } else { 10 };
+
+ let a = if x {
+ 1
+ } else if y {
+ 2
+ } else {
+ 3
+ };
+
+ let b = if cond() {
+ 5
+ } else {
+ // Brief comment.
+ 10
+ };
+
+ let c = if cond() {
+ statement();
+
+ 5
+ } else {
+ 10
+ };
+
+ let d = if let Some(val) = turbo {
+ "cool"
+ } else {
+ "beans"
+ };
+
+ if cond() {
+ statement();
+ } else {
+ other_statement();
+ }
+
+ if true {
+ do_something()
+ }
+
+ let x = if veeeeeeeeery_loooooong_condition() {
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ } else {
+ bbbbbbbbbb
+ };
+
+ let x = if veeeeeeeeery_loooooong_condition() {
+ aaaaaaaaaaaaaaaaaaaaaaaaa
+ } else {
+ bbbbbbbbbb
+ };
+
+ funk(if test() { 1 } else { 2 }, arg2);
+}
diff --git a/src/tools/rustfmt/tests/target/single-line-macro/v1.rs b/src/tools/rustfmt/tests/target/single-line-macro/v1.rs
new file mode 100644
index 000000000..a3aa631ed
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/single-line-macro/v1.rs
@@ -0,0 +1,10 @@
+// rustfmt-version: One
+
+// #2652
+// Preserve trailing comma inside macro, even if it looks an array.
+macro_rules! bar {
+ ($m:ident) => {
+ $m!([a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v, w, x, y, z,]);
+ $m!([a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v, w, x, y, z]);
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/single-line-macro/v2.rs b/src/tools/rustfmt/tests/target/single-line-macro/v2.rs
new file mode 100644
index 000000000..9c6bcf33a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/single-line-macro/v2.rs
@@ -0,0 +1,14 @@
+// rustfmt-version: Two
+
+// #2652
+// Preserve trailing comma inside macro, even if it looks an array.
+macro_rules! bar {
+ ($m:ident) => {
+ $m!([
+ a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v, w, x, y, z,
+ ]);
+ $m!([
+ a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v, w, x, y, z
+ ]);
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/skip.rs b/src/tools/rustfmt/tests/target/skip.rs
new file mode 100644
index 000000000..6c9737a33
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/skip.rs
@@ -0,0 +1,87 @@
+// Test the skip attribute works
+
+#[rustfmt::skip]
+fn foo() { badly; formatted; stuff
+; }
+
+#[rustfmt::skip]
+trait Foo
+{
+fn foo(
+);
+}
+
+impl LateLintPass for UsedUnderscoreBinding {
+ #[cfg_attr(rustfmt, rustfmt::skip)]
+ fn check_expr() { // comment
+ }
+}
+
+fn issue1346() {
+ #[cfg_attr(rustfmt, rustfmt::skip)]
+ Box::new(self.inner.call(req).then(move |result| {
+ match result {
+ Ok(resp) => Box::new(future::done(Ok(resp))),
+ Err(e) => {
+ try_error!(clo_stderr, "{}", e);
+ Box::new(future::err(e))
+ }
+ }
+ }))
+}
+
+fn skip_on_statements() {
+ // Outside block
+ #[rustfmt::skip]
+ {
+ foo; bar;
+ // junk
+ }
+
+ {
+ // Inside block
+ #![rustfmt::skip]
+ foo; bar;
+ // junk
+ }
+
+ // Semi
+ #[cfg_attr(rustfmt, rustfmt::skip)]
+ foo(
+ 1, 2, 3, 4,
+ 1, 2,
+ 1, 2, 3,
+ );
+
+ // Local
+ #[cfg_attr(rustfmt, rustfmt::skip)]
+ let x = foo( a, b , c);
+
+ // Item
+ #[cfg_attr(rustfmt, rustfmt::skip)]
+ use foobar;
+
+ // Mac
+ #[cfg_attr(rustfmt, rustfmt::skip)]
+ vec![
+ 1, 2, 3, 4,
+ 1, 2, 3, 4,
+ 1, 2, 3, 4,
+ 1, 2, 3,
+ 1,
+ 1, 2,
+ 1,
+ ];
+
+ // Expr
+ #[cfg_attr(rustfmt, rustfmt::skip)]
+ foo( a, b , c)
+}
+
+// Check that the skip attribute applies to other attributes.
+#[rustfmt::skip]
+#[cfg
+( a , b
+)]
+fn
+main() {}
diff --git a/src/tools/rustfmt/tests/target/skip/foo.rs b/src/tools/rustfmt/tests/target/skip/foo.rs
new file mode 100644
index 000000000..776658f8f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/skip/foo.rs
@@ -0,0 +1,5 @@
+#![rustfmt::skip]
+
+fn
+foo()
+{}
diff --git a/src/tools/rustfmt/tests/target/skip/main.rs b/src/tools/rustfmt/tests/target/skip/main.rs
new file mode 100644
index 000000000..2d33bef92
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/skip/main.rs
@@ -0,0 +1,5 @@
+mod foo;
+
+fn main() {
+ println!("Hello, world!");
+}
diff --git a/src/tools/rustfmt/tests/target/skip/preserve_trailing_comment.rs b/src/tools/rustfmt/tests/target/skip/preserve_trailing_comment.rs
new file mode 100644
index 000000000..f85de3325
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/skip/preserve_trailing_comment.rs
@@ -0,0 +1,7 @@
+#![rustfmt::skip]
+
+fn main() {
+ println!("Hello, world!");
+}
+
+// Trailing Comment
diff --git a/src/tools/rustfmt/tests/target/skip_mod.rs b/src/tools/rustfmt/tests/target/skip_mod.rs
new file mode 100644
index 000000000..d770ab349
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/skip_mod.rs
@@ -0,0 +1,3 @@
+#![rustfmt::skip]
+use a :: b
+;
diff --git a/src/tools/rustfmt/tests/target/soft-wrapping.rs b/src/tools/rustfmt/tests/target/soft-wrapping.rs
new file mode 100644
index 000000000..5b4c6d9e8
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/soft-wrapping.rs
@@ -0,0 +1,15 @@
+// rustfmt-wrap_comments: true
+// rustfmt-max_width: 80
+// Soft wrapping for comments.
+
+// #535, soft wrapping for comments
+// Compare the lowest `f32` of both inputs for greater than or equal. The
+// lowest 32 bits of the result will be `0xffffffff` if `a.extract(0)` is
+// ggreater than or equal `b.extract(0)`, or `0` otherwise. The upper 96 bits
+// off the result are the upper 96 bits of `a`.
+
+/// Compares the lowest `f32` of both inputs for greater than or equal. The
+/// lowest 32 bits of the result will be `0xffffffff` if `a.extract(0)` is
+/// greater than or equal `b.extract(0)`, or `0` otherwise. The upper 96 bits
+/// off the result are the upper 96 bits of `a`.
+fn foo() {}
diff --git a/src/tools/rustfmt/tests/target/space-not-before-newline.rs b/src/tools/rustfmt/tests/target/space-not-before-newline.rs
new file mode 100644
index 000000000..9d75b726a
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/space-not-before-newline.rs
@@ -0,0 +1,8 @@
+struct Foo {
+ a: (),
+ // spaces ^^^ to be removed
+}
+enum Foo {
+ Bar,
+ // spaces ^^^ to be removed
+}
diff --git a/src/tools/rustfmt/tests/target/spaces-around-ranges.rs b/src/tools/rustfmt/tests/target/spaces-around-ranges.rs
new file mode 100644
index 000000000..b53e5b58b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/spaces-around-ranges.rs
@@ -0,0 +1,15 @@
+// rustfmt-spaces_around_ranges: true
+
+fn bar(v: &[u8]) {}
+
+fn foo() {
+ let a = vec![0; 20];
+ for j in 0 ..= 20 {
+ for i in 0 .. 3 {
+ bar(a[i .. j]);
+ bar(a[i ..]);
+ bar(a[.. j]);
+ bar(a[..= (j + 1)]);
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/statements.rs b/src/tools/rustfmt/tests/target/statements.rs
new file mode 100644
index 000000000..c1e7dc464
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/statements.rs
@@ -0,0 +1,42 @@
+// FIXME(calebcartwright) - Hopefully one day we can
+// elide these redundant semis like we do in other contexts.
+fn redundant_item_semis() {
+ impl Foo {
+ fn get(&self) -> usize {
+ 5
+ }
+ };
+
+ impl Bar {
+ fn get(&self) -> usize {
+ 5
+ }
+ } /*asdfsf*/
+ ;
+
+ impl Baz {
+ fn get(&self) -> usize {
+ 5
+ }
+ } /*asdfsf*/
+
+ // why would someone do this
+ ;
+
+ impl Qux {
+ fn get(&self) -> usize {
+ 5
+ }
+ }
+
+ // why
+ ;
+
+ impl Lorem {
+ fn get(&self) -> usize {
+ 5
+ }
+ }
+ // oh why
+ ;
+}
diff --git a/src/tools/rustfmt/tests/target/static.rs b/src/tools/rustfmt/tests/target/static.rs
new file mode 100644
index 000000000..5daccf3e7
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/static.rs
@@ -0,0 +1,27 @@
+const FILE_GENERIC_READ: DWORD =
+ STANDARD_RIGHTS_READ | FILE_READ_DATA | FILE_READ_ATTRIBUTES | FILE_READ_EA | SYNCHRONIZE;
+
+static boolnames: &'static [&'static str] = &[
+ "bw", "am", "xsb", "xhp", "xenl", "eo", "gn", "hc", "km", "hs", "in", "db", "da", "mir",
+ "msgr", "os", "eslok", "xt", "hz", "ul", "xon", "nxon", "mc5i", "chts", "nrrmc", "npc",
+ "ndscr", "ccc", "bce", "hls", "xhpa", "crxm", "daisy", "xvpa", "sam", "cpix", "lpix", "OTbs",
+ "OTns", "OTnc", "OTMT", "OTNL", "OTpt", "OTxr",
+];
+
+static mut name: SomeType =
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa;
+
+pub static count: u8 = 10;
+
+pub const test: &Type = &val;
+
+impl Color {
+ pub const WHITE: u32 = 10;
+}
+
+// #1391
+pub const XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX: NTSTATUS =
+ 0 as usize;
+
+pub const XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX:
+ Yyyyyyyyyyyyyyyyyyyyyyyyyyyy = 1;
diff --git a/src/tools/rustfmt/tests/target/string-lit-2.rs b/src/tools/rustfmt/tests/target/string-lit-2.rs
new file mode 100644
index 000000000..6b95e25a0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/string-lit-2.rs
@@ -0,0 +1,25 @@
+fn main() -> &'static str {
+ let too_many_lines = "Hello";
+ let leave_me = "sssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssss\
+ s
+ jjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjj";
+}
+
+fn issue_1237() {
+ let msg = "eedadn\n\
+ drvtee\n\
+ eandsr\n\
+ raavrd\n\
+ atevrs\n\
+ tsrnev\n\
+ sdttsa\n\
+ rasrtv\n\
+ nssdts\n\
+ ntnada\n\
+ svetve\n\
+ tesnvt\n\
+ vntsnd\n\
+ vrdear\n\
+ dvrsen\n\
+ enarar";
+}
diff --git a/src/tools/rustfmt/tests/target/string-lit-custom.rs b/src/tools/rustfmt/tests/target/string-lit-custom.rs
new file mode 100644
index 000000000..89639b8eb
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/string-lit-custom.rs
@@ -0,0 +1,20 @@
+fn main() {
+ let expected = "; ModuleID = \'foo\'
+
+; Function Attrs: nounwind
+declare void @llvm.memset.p0i8.i32(i8* nocapture, i8, i32, i32, i1) #0
+
+declare i32 @write(i32, i8*, i32)
+
+declare i32 @putchar(i32)
+
+declare i32 @getchar()
+
+define i32 @main() {
+entry:
+ ret i32 0
+}
+
+attributes #0 = { nounwind }
+";
+}
diff --git a/src/tools/rustfmt/tests/target/string-lit.rs b/src/tools/rustfmt/tests/target/string-lit.rs
new file mode 100644
index 000000000..2d3306107
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/string-lit.rs
@@ -0,0 +1,63 @@
+// rustfmt-format_strings: true
+// Long string literals
+
+fn main() -> &'static str {
+ let str = "AAAAAAAAAAAAAAaAAAAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAaAA \
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAaAa";
+ let str = "AAAAAAAAAAAAAAaAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAaAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAaAa";
+ let str = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA";
+
+ let too_many_lines = "Hello";
+
+ // Make sure we don't break after an escape character.
+ let odd_length_name =
+ "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n";
+ let even_length_name =
+ "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n";
+
+ let really_long_variable_name = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA";
+
+ let raw_string = r#"Do
+not
+remove
+formatting"#;
+
+ filename.replace(" ", "\\");
+
+ let xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx =
+ funktion("yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy");
+
+ let unicode = "a̐éö̲\r\n";
+ let unicode2 = "Löwe 老虎 Léopard";
+ let unicode3 = "中华Việt Nam";
+ let unicode4 = "☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃☃";
+
+ "stuffin'"
+}
+
+fn issue682() {
+ let a = "hello \\ o/";
+ let b = a.replace("\\ ", "\\");
+}
+
+fn issue716() {
+ println!(
+ "forall x. mult(e(), x) = x /\\
+ forall x. mult(x, x) = e()"
+ );
+}
+
+fn issue_1282() {
+ {
+ match foo {
+ Permission::AndroidPermissionAccessLocationExtraCommands => {
+ "android.permission.ACCESS_LOCATION_EXTRA_COMMANDS"
+ }
+ }
+ }
+}
+
+// #1987
+#[link_args = "-s NO_FILESYSTEM=1 -s NO_EXIT_RUNTIME=1 -s EXPORTED_RUNTIME_METHODS=[\"_malloc\"] \
+ -s NO_DYNAMIC_EXECUTION=1 -s ELIMINATE_DUPLICATE_FUNCTIONS=1 -s EVAL_CTORS=1"]
+extern "C" {}
diff --git a/src/tools/rustfmt/tests/target/string_punctuation.rs b/src/tools/rustfmt/tests/target/string_punctuation.rs
new file mode 100644
index 000000000..0b8ec1b7f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/string_punctuation.rs
@@ -0,0 +1,24 @@
+// rustfmt-format_strings: true
+
+fn main() {
+ println!(
+ "ThisIsAReallyLongStringWithNoSpaces.It_should_prefer_to_break_onpunctuation:\
+ Likethisssssssssssss"
+ );
+ format!("{}__{}__{}ItShouldOnlyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyNoticeSemicolonsPeriodsColonsAndCommasAndResortToMid-CharBreaksAfterPunctuation{}{}",x,y,z,a,b);
+ println!(
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaalhijalfhiigjapdighjapdigjapdighdapighapdighpaidhg;\
+ adopgihadoguaadbadgad,qeoihapethae8t0aet8haetadbjtaeg;\
+ ooeouthaoeutgadlgajduabgoiuadogabudogubaodugbadgadgadga;adoughaoeugbaouea"
+ );
+ println!(
+ "sentuhaesnuthaesnutheasunteahusnaethuseantuihaesntdiastnidaetnuhaideuhsenathe。\
+ WeShouldSupportNonAsciiPunctuations§\
+ ensuhatheasunteahsuneathusneathuasnuhaesnuhaesnuaethusnaetuheasnuth"
+ );
+ println!(
+ "ThisIsASampleOfCJKString.祇園精舍の鐘の声、諸行無常の響きあり。娑羅双樹の花の色、\
+ 盛者必衰の理をあらはす。奢れる人も久しからず、ただ春の夜の夢のごとし。\
+ 猛き者もつひにはほろびぬ、ひとへに風の前の塵に同じ。"
+ );
+}
diff --git a/src/tools/rustfmt/tests/target/struct-field-attributes.rs b/src/tools/rustfmt/tests/target/struct-field-attributes.rs
new file mode 100644
index 000000000..0f461b98b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/struct-field-attributes.rs
@@ -0,0 +1,62 @@
+// #1535
+#![feature(struct_field_attributes)]
+
+struct Foo {
+ bar: u64,
+
+ #[cfg(test)]
+ qux: u64,
+}
+
+fn do_something() -> Foo {
+ Foo {
+ bar: 0,
+
+ #[cfg(test)]
+ qux: 1,
+ }
+}
+
+fn main() {
+ do_something();
+}
+
+// #1462
+struct Foo {
+ foo: usize,
+ #[cfg(feature = "include-bar")]
+ bar: usize,
+}
+
+fn new_foo() -> Foo {
+ Foo {
+ foo: 0,
+ #[cfg(feature = "include-bar")]
+ bar: 0,
+ }
+}
+
+// #2044
+pub enum State {
+ Closure(
+ #[cfg_attr(
+ feature = "serde_derive",
+ serde(state_with = "::serialization::closure")
+ )]
+ GcPtr<ClosureData>,
+ ),
+}
+
+struct Fields(
+ #[cfg_attr(
+ feature = "serde_derive",
+ serde(state_with = "::base::serialization::shared")
+ )]
+ Arc<Vec<InternedStr>>,
+);
+
+// #2309
+pub struct A {
+ #[doc = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"]
+ pub foos: Vec<bool>,
+}
diff --git a/src/tools/rustfmt/tests/target/struct_field_doc_comment.rs b/src/tools/rustfmt/tests/target/struct_field_doc_comment.rs
new file mode 100644
index 000000000..ebb01a668
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/struct_field_doc_comment.rs
@@ -0,0 +1,69 @@
+// #5215
+struct MyTuple(
+ /// Doc Comments
+ /* TODO note to add more to Doc Comments */
+ u32,
+ /// Doc Comments
+ // TODO note
+ u64,
+);
+
+struct MyTuple(
+ #[cfg(unix)] // some comment
+ u64,
+ #[cfg(not(unix))] /*block comment */ u32,
+);
+
+struct MyTuple(
+ #[cfg(unix)]
+ // some comment
+ u64,
+ #[cfg(not(unix))]
+ /*block comment */
+ u32,
+);
+
+struct MyTuple(
+ #[cfg(unix)] // some comment
+ pub u64,
+ #[cfg(not(unix))] /*block comment */ pub(crate) u32,
+);
+
+struct MyTuple(
+ /// Doc Comments
+ /* TODO note to add more to Doc Comments */
+ pub u32,
+ /// Doc Comments
+ // TODO note
+ pub(crate) u64,
+);
+
+struct MyStruct {
+ #[cfg(unix)] // some comment
+ a: u64,
+ #[cfg(not(unix))] /*block comment */ b: u32,
+}
+
+struct MyStruct {
+ #[cfg(unix)] // some comment
+ pub a: u64,
+ #[cfg(not(unix))] /*block comment */ pub(crate) b: u32,
+}
+
+struct MyStruct {
+ /// Doc Comments
+ /* TODO note to add more to Doc Comments */
+ a: u32,
+ /// Doc Comments
+ // TODO note
+ b: u64,
+}
+
+struct MyStruct {
+ /// Doc Comments
+ /* TODO note to add more to Doc Comments */
+ pub a: u32,
+ /// Doc Comments
+ // TODO note
+ pub(crate) b: u64,
+}
diff --git a/src/tools/rustfmt/tests/target/struct_lits.rs b/src/tools/rustfmt/tests/target/struct_lits.rs
new file mode 100644
index 000000000..d3bc364c3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/struct_lits.rs
@@ -0,0 +1,190 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+// Struct literal expressions.
+
+fn main() {
+ let x = Bar;
+
+ // Comment
+ let y = Foo { a: x };
+
+ Foo {
+ a: foo(), // comment
+ // comment
+ b: bar(),
+ ..something
+ };
+
+ Fooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo { a: f(), b: b() };
+
+ Foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo {
+ a: f(),
+ b: b(),
+ };
+
+ Foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo {
+ // Comment
+ a: foo(), // Comment
+ // Comment
+ b: bar(), // Comment
+ };
+
+ Foo { a: Bar, b: f() };
+
+ Quux {
+ x: if cond {
+ bar();
+ },
+ y: baz(),
+ };
+
+ A {
+ // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec a diam lectus. Sed sit
+ // amet ipsum mauris. Maecenas congue ligula ac quam viverra nec consectetur ante hendrerit.
+ // Donec et mollis dolor.
+ first: item(),
+ // Praesent et diam eget libero egestas mattis sit amet vitae augue.
+ // Nam tincidunt congue enim, ut porta lorem lacinia consectetur.
+ second: Item,
+ };
+
+ Some(Data::MethodCallData(MethodCallData {
+ span: sub_span.unwrap(),
+ scope: self.enclosing_scope(id),
+ ref_id: def_id,
+ decl_id: Some(decl_id),
+ }));
+
+ Diagram {
+ // o This graph demonstrates how
+ // / \ significant whitespace is
+ // o o preserved.
+ // /|\ \
+ // o o o o
+ graph: G,
+ }
+}
+
+fn matcher() {
+ TagTerminatedByteMatcher {
+ matcher: ByteMatcher {
+ pattern: b"<HTML",
+ mask: b"\xFF\xDF\xDF\xDF\xDF\xFF",
+ },
+ };
+}
+
+fn issue177() {
+ struct Foo<T> {
+ memb: T,
+ }
+ let foo = Foo::<i64> { memb: 10 };
+}
+
+fn issue201() {
+ let s = S { a: 0, ..b };
+}
+
+fn issue201_2() {
+ let s = S { a: S2 { ..c }, ..b };
+}
+
+fn issue278() {
+ let s = S {
+ a: 0,
+ //
+ b: 0,
+ };
+ let s1 = S {
+ a: 0,
+ // foo
+ //
+ // bar
+ b: 0,
+ };
+}
+
+fn struct_exprs() {
+ Foo { a: 1, b: f(2) };
+ Foo {
+ a: 1,
+ b: f(2),
+ ..g(3)
+ };
+ LoooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooongStruct {
+ ..base
+ };
+ IntrinsicISizesContribution {
+ content_intrinsic_sizes: IntrinsicISizes {
+ minimum_inline_size: 0,
+ },
+ };
+}
+
+fn issue123() {
+ Foo { a: b, c: d, e: f };
+
+ Foo {
+ a: bb,
+ c: dd,
+ e: ff,
+ };
+
+ Foo {
+ a: ddddddddddddddddddddd,
+ b: cccccccccccccccccccccccccccccccccccccc,
+ };
+}
+
+fn issue491() {
+ Foo {
+ guard: None,
+ arm: 0, // Comment
+ };
+
+ Foo {
+ arm: 0, // Comment
+ };
+
+ Foo {
+ a: aaaaaaaaaa,
+ b: bbbbbbbb,
+ c: cccccccccc,
+ d: dddddddddd, // a comment
+ e: eeeeeeeee,
+ };
+}
+
+fn issue698() {
+ Record {
+ ffffffffffffffffffffffffffields: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ };
+ Record {
+ ffffffffffffffffffffffffffields:
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ }
+}
+
+fn issue835() {
+ MyStruct {};
+ MyStruct { /* a comment */ };
+ MyStruct {
+ // Another comment
+ };
+ MyStruct {}
+}
+
+fn field_init_shorthand() {
+ MyStruct { x, y, z };
+ MyStruct { x, y, z, ..base };
+ Foo {
+ aaaaaaaaaa,
+ bbbbbbbb,
+ cccccccccc,
+ dddddddddd, // a comment
+ eeeeeeeee,
+ };
+ Record {
+ ffffffffffffffffffffffffffieldsaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/struct_lits_multiline.rs b/src/tools/rustfmt/tests/target/struct_lits_multiline.rs
new file mode 100644
index 000000000..b29aafd05
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/struct_lits_multiline.rs
@@ -0,0 +1,117 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+// rustfmt-struct_lit_single_line: false
+
+// Struct literal expressions.
+
+fn main() {
+ let x = Bar;
+
+ // Comment
+ let y = Foo {
+ a: x,
+ };
+
+ Foo {
+ a: foo(), // comment
+ // comment
+ b: bar(),
+ ..something
+ };
+
+ Foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo {
+ a: foo(),
+ b: bar(),
+ };
+
+ Foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo {
+ a: foo(),
+ b: bar(),
+ };
+
+ Foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo {
+ // Comment
+ a: foo(), // Comment
+ // Comment
+ b: bar(), // Comment
+ };
+
+ Foo {
+ a: Bar,
+ b: foo(),
+ };
+
+ Quux {
+ x: if cond {
+ bar();
+ },
+ y: baz(),
+ };
+
+ A {
+ // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec a diam lectus. Sed sit
+ // amet ipsum mauris. Maecenas congue ligula ac quam viverra nec consectetur ante hendrerit.
+ // Donec et mollis dolor.
+ first: item(),
+ // Praesent et diam eget libero egestas mattis sit amet vitae augue.
+ // Nam tincidunt congue enim, ut porta lorem lacinia consectetur.
+ second: Item,
+ };
+
+ Some(Data::MethodCallData(MethodCallData {
+ span: sub_span.unwrap(),
+ scope: self.enclosing_scope(id),
+ ref_id: def_id,
+ decl_id: Some(decl_id),
+ }));
+
+ Diagram {
+ // o This graph demonstrates how
+ // / \ significant whitespace is
+ // o o preserved.
+ // /|\ \
+ // o o o o
+ graph: G,
+ }
+}
+
+fn matcher() {
+ TagTerminatedByteMatcher {
+ matcher: ByteMatcher {
+ pattern: b"<HTML",
+ mask: b"\xFF\xDF\xDF\xDF\xDF\xFF",
+ },
+ };
+}
+
+fn issue177() {
+ struct Foo<T> {
+ memb: T,
+ }
+ let foo = Foo::<i64> {
+ memb: 10,
+ };
+}
+
+fn issue201() {
+ let s = S {
+ a: 0,
+ ..b
+ };
+}
+
+fn issue201_2() {
+ let s = S {
+ a: S2 {
+ ..c
+ },
+ ..b
+ };
+}
+
+fn issue491() {
+ Foo {
+ guard: None,
+ arm: 0, // Comment
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/struct_lits_visual.rs b/src/tools/rustfmt/tests/target/struct_lits_visual.rs
new file mode 100644
index 000000000..a9627fb90
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/struct_lits_visual.rs
@@ -0,0 +1,49 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+// rustfmt-indent_style: Visual
+
+// Struct literal expressions.
+
+fn main() {
+ let x = Bar;
+
+ // Comment
+ let y = Foo { a: x };
+
+ Foo { a: foo(), // comment
+ // comment
+ b: bar(),
+ ..something };
+
+ Fooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo { a: f(), b: b() };
+
+ Foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo { // Comment
+ a: foo(), /* Comment */
+ // Comment
+ b: bar() /* Comment */ };
+
+ Foo { a: Bar, b: f() };
+
+ Quux { x: if cond {
+ bar();
+ },
+ y: baz() };
+
+ Baz { x: yxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,
+ z: zzzzz /* test */ };
+
+ A { // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec a diam lectus. Sed sit
+ // amet ipsum mauris. Maecenas congue ligula ac quam viverra nec consectetur ante
+ // hendrerit. Donec et mollis dolor.
+ first: item(),
+ // Praesent et diam eget libero egestas mattis sit amet vitae augue.
+ // Nam tincidunt congue enim, ut porta lorem lacinia consectetur.
+ second: Item };
+
+ Diagram { // o This graph demonstrates how
+ // / \ significant whitespace is
+ // o o preserved.
+ // /|\ \
+ // o o o o
+ graph: G }
+}
diff --git a/src/tools/rustfmt/tests/target/struct_lits_visual_multiline.rs b/src/tools/rustfmt/tests/target/struct_lits_visual_multiline.rs
new file mode 100644
index 000000000..3f43ef0c9
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/struct_lits_visual_multiline.rs
@@ -0,0 +1,49 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+// rustfmt-indent_style: Visual
+// rustfmt-struct_lit_single_line: false
+
+// Struct literal expressions.
+
+fn main() {
+ let x = Bar;
+
+ // Comment
+ let y = Foo { a: x };
+
+ Foo { a: foo(), // comment
+ // comment
+ b: bar(),
+ ..something };
+
+ Fooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo { a: foo(),
+ b: bar() };
+
+ Foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo { // Comment
+ a: foo(), /* Comment */
+ // Comment
+ b: bar() /* Comment */ };
+
+ Foo { a: Bar,
+ b: foo() };
+
+ Quux { x: if cond {
+ bar();
+ },
+ y: baz() };
+
+ A { // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec a diam lectus. Sed sit
+ // amet ipsum mauris. Maecenas congue ligula ac quam viverra nec consectetur ante
+ // hendrerit. Donec et mollis dolor.
+ first: item(),
+ // Praesent et diam eget libero egestas mattis sit amet vitae augue.
+ // Nam tincidunt congue enim, ut porta lorem lacinia consectetur.
+ second: Item };
+
+ Diagram { // o This graph demonstrates how
+ // / \ significant whitespace is
+ // o o preserved.
+ // /|\ \
+ // o o o o
+ graph: G }
+}
diff --git a/src/tools/rustfmt/tests/target/struct_tuple_visual.rs b/src/tools/rustfmt/tests/target/struct_tuple_visual.rs
new file mode 100644
index 000000000..f95f3fe4f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/struct_tuple_visual.rs
@@ -0,0 +1,36 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+// rustfmt-indent_style: Visual
+fn foo() {
+ Fooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo(f(), b());
+
+ Foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo(// Comment
+ foo(), /* Comment */
+ // Comment
+ bar() /* Comment */);
+
+ Foo(Bar, f());
+
+ Quux(if cond {
+ bar();
+ },
+ baz());
+
+ Baz(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,
+ zzzzz /* test */);
+
+ A(// Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec a diam lectus. Sed sit
+ // amet ipsum mauris. Maecenas congue ligula ac quam viverra nec consectetur ante
+ // hendrerit. Donec et mollis dolor.
+ item(),
+ // Praesent et diam eget libero egestas mattis sit amet vitae augue.
+ // Nam tincidunt congue enim, ut porta lorem lacinia consectetur.
+ Item);
+
+ Diagram(// o This graph demonstrates how
+ // / \ significant whitespace is
+ // o o preserved.
+ // /|\ \
+ // o o o o
+ G)
+}
diff --git a/src/tools/rustfmt/tests/target/structs.rs b/src/tools/rustfmt/tests/target/structs.rs
new file mode 100644
index 000000000..4948e37a5
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/structs.rs
@@ -0,0 +1,358 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+
+/// A Doc comment
+#[AnAttribute]
+pub struct Foo {
+ #[rustfmt::skip]
+ f : SomeType, // Comment beside a field
+ f: SomeType, // Comment beside a field
+ // Comment on a field
+ #[AnAttribute]
+ g: SomeOtherType,
+ /// A doc comment on a field
+ h: AThirdType,
+ pub i: TypeForPublicField,
+}
+
+// Destructuring
+fn foo() {
+ S { x: 5, .. };
+ Struct { .. } = Struct { a: 1, b: 4 };
+ Struct { a, .. } = Struct { a: 1, b: 2, c: 3 };
+ TupleStruct(a, .., b) = TupleStruct(1, 2);
+ TupleStruct(..) = TupleStruct(3, 4);
+ TupleStruct(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ ..,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ ) = TupleStruct(1, 2);
+}
+
+// #1095
+struct S<T /* comment */> {
+ t: T,
+}
+
+// #1029
+pub struct Foo {
+ #[doc(hidden)]
+ // This will NOT get deleted!
+ bar: String, // hi
+}
+
+// #1029
+struct X {
+ // `x` is an important number.
+ #[allow(unused)] // TODO: use
+ x: u32,
+}
+
+// #410
+#[allow(missing_docs)]
+pub struct Writebatch<K: Key> {
+ #[allow(dead_code)] // only used for holding the internal pointer
+ writebatch: RawWritebatch,
+ marker: PhantomData<K>,
+}
+
+struct Bar;
+
+struct NewType(Type, OtherType);
+
+struct NewInt<T: Copy>(
+ pub i32,
+ SomeType, // inline comment
+ T, // sup
+);
+
+struct Qux<
+ 'a,
+ N: Clone + 'a,
+ E: Clone + 'a,
+ G: Labeller<'a, N, E> + GraphWalk<'a, N, E>,
+ W: Write + Copy,
+>(
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA, // Comment
+ BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB,
+ #[AnAttr]
+ // Comment
+ /// Testdoc
+ G,
+ pub W,
+);
+
+struct Tuple(
+ // Comment 1
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+ // Comment 2
+ BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB,
+);
+
+// With a where-clause and generics.
+pub struct Foo<'a, Y: Baz>
+where
+ X: Whatever,
+{
+ f: SomeType, // Comment beside a field
+}
+
+struct Baz {
+ a: A, // Comment A
+ b: B, // Comment B
+ c: C, // Comment C
+}
+
+struct Baz {
+ a: A, // Comment A
+
+ b: B, // Comment B
+
+ c: C, // Comment C
+}
+
+struct Baz {
+ a: A,
+
+ b: B,
+ c: C,
+
+ d: D,
+}
+
+struct Baz {
+ // Comment A
+ a: A,
+
+ // Comment B
+ b: B,
+ // Comment C
+ c: C,
+}
+
+// Will this be a one-liner?
+struct Tuple(
+ A, // Comment
+ B,
+);
+
+pub struct State<F: FnMut() -> time::Timespec> {
+ now: F,
+}
+
+pub struct State<F: FnMut() -> ()> {
+ now: F,
+}
+
+pub struct State<F: FnMut()> {
+ now: F,
+}
+
+struct Palette {
+ /// A map of indices in the palette to a count of pixels in approximately
+ /// that color
+ foo: i32,
+}
+
+// Splitting a single line comment into a block previously had a misalignment
+// when the field had attributes
+struct FieldsWithAttributes {
+ // Pre Comment
+ #[rustfmt::skip] pub host:String, /* Post comment BBBBBBBBBBBBBB BBBBBBBBBBBBBBBB
+ * BBBBBBBBBBBBBBBB BBBBBBBBBBBBBBBBB BBBBBBBBBBB */
+ // Another pre comment
+ #[attr1]
+ #[attr2]
+ pub id: usize, /* CCCCCCCCCCCCCCCCCCC CCCCCCCCCCCCCCCCCCC CCCCCCCCCCCCCCCC
+ * CCCCCCCCCCCCCCCCCC CCCCCCCCCCCCCC CCCCCCCCCCCC */
+}
+
+struct Deep {
+ deeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeep:
+ node::Handle<IdRef<'id, Node<K, V>>, Type, NodeType>,
+}
+
+struct Foo<T>(T);
+struct Foo<T>(T)
+where
+ T: Copy,
+ T: Eq;
+struct Foo<T>(
+ TTTTTTTTTTTTTTTTT,
+ UUUUUUUUUUUUUUUUUUUUUUUU,
+ TTTTTTTTTTTTTTTTTTT,
+ UUUUUUUUUUUUUUUUUUU,
+);
+struct Foo<T>(
+ TTTTTTTTTTTTTTTTTT,
+ UUUUUUUUUUUUUUUUUUUUUUUU,
+ TTTTTTTTTTTTTTTTTTT,
+)
+where
+ T: PartialEq;
+struct Foo<T>(
+ TTTTTTTTTTTTTTTTT,
+ UUUUUUUUUUUUUUUUUUUUUUUU,
+ TTTTTTTTTTTTTTTTTTTTT,
+)
+where
+ T: PartialEq;
+struct Foo<T>(
+ TTTTTTTTTTTTTTTTT,
+ UUUUUUUUUUUUUUUUUUUUUUUU,
+ TTTTTTTTTTTTTTTTTTT,
+ UUUUUUUUUUUUUUUUUUU,
+)
+where
+ T: PartialEq;
+struct Foo<T>(
+ TTTTTTTTTTTTTTTTT, // Foo
+ UUUUUUUUUUUUUUUUUUUUUUUU, // Bar
+ // Baz
+ TTTTTTTTTTTTTTTTTTT,
+ // Qux (FIXME #572 - doc comment)
+ UUUUUUUUUUUUUUUUUUU,
+);
+
+mod m {
+ struct X<T>
+ where
+ T: Sized,
+ {
+ a: T,
+ }
+}
+
+struct Foo<T>(
+ TTTTTTTTTTTTTTTTTTT,
+ /// Qux
+ UUUUUUUUUUUUUUUUUUU,
+);
+
+struct Issue677 {
+ pub ptr: *const libc::c_void,
+ pub trace: fn(obj: *const libc::c_void, tracer: *mut JSTracer),
+}
+
+struct Foo {}
+struct Foo {}
+struct Foo {
+ // comment
+}
+struct Foo {
+ // trailing space ->
+}
+struct Foo {
+ // comment
+}
+struct Foo(
+ // comment
+);
+
+struct LongStruct {
+ a: A,
+ the_quick_brown_fox_jumps_over_the_lazy_dog:
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+}
+
+struct Deep {
+ deeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeep:
+ node::Handle<IdRef<'id, Node<Key, Value>>, Type, NodeType>,
+}
+
+struct Foo<C = ()>(String);
+
+// #1364
+fn foo() {
+ convex_shape.set_point(0, &Vector2f { x: 400.0, y: 100.0 });
+ convex_shape.set_point(1, &Vector2f { x: 500.0, y: 70.0 });
+ convex_shape.set_point(2, &Vector2f { x: 450.0, y: 100.0 });
+ convex_shape.set_point(3, &Vector2f { x: 580.0, y: 150.0 });
+}
+
+// Vertical alignment
+struct Foo {
+ aaaaa: u32, // a
+
+ b: u32, // b
+ cc: u32, // cc
+
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx: u32, // 1
+ yy: u32, // comment2
+ zzz: u32, // comment3
+
+ aaaaaa: u32, // comment4
+ bb: u32, // comment5
+ // separate
+ dd: u32, // comment7
+ c: u32, // comment6
+
+ aaaaaaa: u32, /* multi
+ * line
+ * comment
+ */
+ b: u32, // hi
+
+ do_not_push_this_comment1: u32, // comment1
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx: u32, // 2
+ please_do_not_push_this_comment3: u32, // comment3
+
+ do_not_push_this_comment1: u32, // comment1
+ // separate
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx: u32, // 2
+ please_do_not_push_this_comment3: u32, // comment3
+
+ do_not_push_this_comment1: u32, // comment1
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx: u32, // 2
+ // separate
+ please_do_not_push_this_comment3: u32, // comment3
+}
+
+// structs with long identifier
+struct Loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong
+{}
+struct Looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong
+{}
+struct Loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong
+{}
+struct Loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong
+{
+ x: i32,
+}
+
+// structs with visibility, do not duplicate visibility (#2110).
+pub(self) struct Foo {}
+pub(super) struct Foo {}
+pub(crate) struct Foo {}
+pub(self) struct Foo();
+pub(super) struct Foo();
+pub(crate) struct Foo();
+
+// #2125
+pub struct ReadinessCheckRegistry(
+ Mutex<HashMap<Arc<String>, Box<Fn() -> ReadinessCheck + Sync + Send>>>,
+);
+
+// #2144 unit struct with generics
+struct MyBox<T: ?Sized>;
+struct MyBoxx<T, S>
+where
+ T: ?Sized,
+ S: Clone;
+
+// #2208
+struct Test {
+ /// foo
+ #[serde(default)]
+ pub join: Vec<String>,
+ #[serde(default)]
+ pub tls: bool,
+}
+
+// #2818
+struct Paren((i32))
+where
+ i32: Trait;
+struct Parens((i32, i32))
+where
+ i32: Trait;
diff --git a/src/tools/rustfmt/tests/target/trailing-comma-never.rs b/src/tools/rustfmt/tests/target/trailing-comma-never.rs
new file mode 100644
index 000000000..ea199f5ff
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/trailing-comma-never.rs
@@ -0,0 +1,35 @@
+// rustfmt-trailing_comma: Never
+
+enum X {
+ A,
+ B
+}
+
+enum Y {
+ A,
+ B
+}
+
+enum TupX {
+ A(u32),
+ B(i32, u16)
+}
+
+enum TupY {
+ A(u32),
+ B(i32, u16)
+}
+
+enum StructX {
+ A { s: u16 },
+ B { u: u32, i: i32 }
+}
+
+enum StructY {
+ A { s: u16 },
+ B { u: u32, i: i32 }
+}
+
+static XXX: [i8; 64] = [
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
+];
diff --git a/src/tools/rustfmt/tests/target/trailing_commas.rs b/src/tools/rustfmt/tests/target/trailing_commas.rs
new file mode 100644
index 000000000..06f0a13b1
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/trailing_commas.rs
@@ -0,0 +1,78 @@
+// rustfmt-match_block_trailing_comma: true
+// rustfmt-trailing_comma: Always
+
+fn main() {
+ match foo {
+ x => {},
+ y => {
+ foo();
+ },
+ _ => x,
+ }
+}
+
+fn f<S, T,>(x: T, y: S,) -> T
+where
+ T: P,
+ S: Q,
+{
+ x
+}
+
+impl Trait for T
+where
+ T: P,
+{
+ fn f(x: T,) -> T
+ where
+ T: Q + R,
+ {
+ x
+ }
+}
+
+struct Pair<S, T,>
+where
+ T: P,
+ S: P + Q,
+{
+ a: T,
+ b: S,
+}
+
+struct TupPair<S, T,>(S, T,)
+where
+ T: P,
+ S: P + Q;
+
+enum E<S, T,>
+where
+ S: P,
+ T: P,
+{
+ A { a: T, },
+}
+
+type Double<T,>
+where
+ T: P,
+ T: Q,
+= Pair<T, T,>;
+
+extern "C" {
+ fn f<S, T,>(x: T, y: S,) -> T
+ where
+ T: P,
+ S: Q;
+}
+
+trait Q<S, T,>
+where
+ T: P,
+ S: R,
+{
+ fn f<U, V,>(self, x: T, y: S, z: U,) -> Self
+ where
+ U: P,
+ V: P;
+}
diff --git a/src/tools/rustfmt/tests/target/trailing_comments/hard_tabs.rs b/src/tools/rustfmt/tests/target/trailing_comments/hard_tabs.rs
new file mode 100644
index 000000000..35e72f1af
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/trailing_comments/hard_tabs.rs
@@ -0,0 +1,30 @@
+// rustfmt-version: Two
+// rustfmt-wrap_comments: true
+// rustfmt-hard_tabs: true
+
+impl Foo {
+ fn foo() {
+ bar(); // comment 1
+ // comment 2
+ // comment 3
+ baz();
+ }
+}
+
+fn lorem_ipsum() {
+ let f = bar(); // Donec consequat mi. Quisque vitae dolor. Integer lobortis. Maecenas id nulla. Lorem.
+ // Id turpis. Nam posuere lectus vitae nibh. Etiam tortor orci, sagittis
+ // malesuada, rhoncus quis, hendrerit eget, libero. Quisque commodo nulla at
+ // nunc. Mauris consequat, enim vitae venenatis sollicitudin, dolor orci
+ // bibendum enim, a sagittis nulla nunc quis elit. Phasellus augue. Nunc
+ // suscipit, magna tincidunt lacinia faucibus, lacus tellus ornare purus, a
+ // pulvinar lacus orci eget nibh. Maecenas sed nibh non lacus tempor faucibus.
+ // In hac habitasse platea dictumst. Vivamus a orci at nulla tristique
+ // condimentum. Donec arcu quam, dictum accumsan, convallis accumsan, cursus sit
+ // amet, ipsum. In pharetra sagittis nunc.
+ let b = baz();
+
+ let normalized = self.ctfont.all_traits().normalized_weight(); // [-1.0, 1.0]
+ // TODO(emilio): It may make sense to make this range [.01, 10.0], to align
+ // with css-fonts-4's range of [1, 1000].
+}
diff --git a/src/tools/rustfmt/tests/target/trailing_comments/soft_tabs.rs b/src/tools/rustfmt/tests/target/trailing_comments/soft_tabs.rs
new file mode 100644
index 000000000..eba943042
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/trailing_comments/soft_tabs.rs
@@ -0,0 +1,30 @@
+// rustfmt-version: Two
+// rustfmt-wrap_comments: true
+
+pub const IFF_MULTICAST: ::c_int = 0x0000000800; // Supports multicast
+// Multicast using broadcst. add.
+
+pub const SQ_CRETAB: u16 = 0x000e; // CREATE TABLE
+pub const SQ_DRPTAB: u16 = 0x000f; // DROP TABLE
+pub const SQ_CREIDX: u16 = 0x0010; // CREATE INDEX
+//const SQ_DRPIDX: u16 = 0x0011; // DROP INDEX
+//const SQ_GRANT: u16 = 0x0012; // GRANT
+//const SQ_REVOKE: u16 = 0x0013; // REVOKE
+
+fn foo() {
+ let f = bar(); // Donec consequat mi. Quisque vitae dolor. Integer lobortis. Maecenas id nulla. Lorem.
+ // Id turpis. Nam posuere lectus vitae nibh. Etiam tortor orci, sagittis
+ // malesuada, rhoncus quis, hendrerit eget, libero. Quisque commodo nulla at
+ // nunc. Mauris consequat, enim vitae venenatis sollicitudin, dolor orci
+ // bibendum enim, a sagittis nulla nunc quis elit. Phasellus augue. Nunc
+ // suscipit, magna tincidunt lacinia faucibus, lacus tellus ornare purus, a
+ // pulvinar lacus orci eget nibh. Maecenas sed nibh non lacus tempor faucibus.
+ // In hac habitasse platea dictumst. Vivamus a orci at nulla tristique
+ // condimentum. Donec arcu quam, dictum accumsan, convallis accumsan, cursus sit
+ // amet, ipsum. In pharetra sagittis nunc.
+ let b = baz();
+
+ let normalized = self.ctfont.all_traits().normalized_weight(); // [-1.0, 1.0]
+ // TODO(emilio): It may make sense to make this range [.01, 10.0], to align
+ // with css-fonts-4's range of [1, 1000].
+}
diff --git a/src/tools/rustfmt/tests/target/trait.rs b/src/tools/rustfmt/tests/target/trait.rs
new file mode 100644
index 000000000..7f067991b
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/trait.rs
@@ -0,0 +1,220 @@
+// Test traits
+
+trait Foo {
+ fn bar(x: i32) -> Baz<U> {
+ Baz::new()
+ }
+
+ fn baz(a: AAAAAAAAAAAAAAAAAAAAAA, b: BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB) -> RetType;
+
+ fn foo(
+ a: AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA, // Another comment
+ b: BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB,
+ ) -> RetType; // Some comment
+
+ fn baz(&mut self) -> i32;
+
+ fn increment(&mut self, x: i32);
+
+ fn read(&mut self, x: BufReader<R> /* Used to be MemReader */)
+ where
+ R: Read;
+}
+
+pub trait WriteMessage {
+ fn write_message(&mut self, &FrontendMessage) -> io::Result<()>;
+}
+
+trait Runnable {
+ fn handler(self: &Runnable);
+}
+
+trait TraitWithExpr {
+ fn fn_with_expr(x: [i32; 1]);
+}
+
+trait Test {
+ fn read_struct<T, F>(&mut self, s_name: &str, len: usize, f: F) -> Result<T, Self::Error>
+ where
+ F: FnOnce(&mut Self) -> Result<T, Self::Error>;
+}
+
+trait T {}
+
+trait Foo {
+ type Bar: Baz;
+ type Inner: Foo = Box<Foo>;
+}
+
+trait ConstCheck<T>: Foo
+where
+ T: Baz,
+{
+ const J: i32;
+}
+
+trait Tttttttttttttttttttttttttttttttttttttttttttttttttttttttttt<T>
+where
+ T: Foo,
+{
+}
+
+trait Ttttttttttttttttttttttttttttttttttttttttttttttttttttttttttt<T>
+where
+ T: Foo,
+{
+}
+
+trait FooBar<T>: Tttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttt
+where
+ J: Bar,
+{
+ fn test();
+}
+
+trait WhereList<T, J>
+where
+ T: Foo,
+ J: Bar,
+{
+}
+
+trait X /* comment */ {}
+trait Y // comment
+{
+}
+
+// #2055
+pub trait Foo:
+// A and C
+A + C
+// and B
+ + B
+{}
+
+// #2158
+trait Foo {
+ type ItRev = <MergingUntypedTimeSeries<SliceSeries<SliceWindow>> as UntypedTimeSeries>::IterRev;
+ type IteRev =
+ <MergingUntypedTimeSeries<SliceSeries<SliceWindow>> as UntypedTimeSeries>::IterRev;
+}
+
+// #2331
+trait MyTrait<
+ AAAAAAAAAAAAAAAAAAAA,
+ BBBBBBBBBBBBBBBBBBBB,
+ CCCCCCCCCCCCCCCCCCCC,
+ DDDDDDDDDDDDDDDDDDDD,
+>
+{
+ fn foo() {}
+}
+
+// Trait aliases
+trait FooBar = Foo + Bar;
+trait FooBar<A, B, C> = Foo + Bar;
+pub trait FooBar = Foo + Bar;
+pub trait FooBar<A, B, C> = Foo + Bar;
+trait AAAAAAAAAAAAAAAAAA = BBBBBBBBBBBBBBBBBBB + CCCCCCCCCCCCCCCCCCCCCCCCCCCCC + DDDDDDDDDDDDDDDDDD;
+pub trait AAAAAAAAAAAAAAAAAA =
+ BBBBBBBBBBBBBBBBBBB + CCCCCCCCCCCCCCCCCCCCCCCCCCCCC + DDDDDDDDDDDDDDDDDD;
+trait AAAAAAAAAAAAAAAAAAA =
+ BBBBBBBBBBBBBBBBBBB + CCCCCCCCCCCCCCCCCCCCCCCCCCCCC + DDDDDDDDDDDDDDDDDD;
+trait AAAAAAAAAAAAAAAAAA =
+ BBBBBBBBBBBBBBBBBBB + CCCCCCCCCCCCCCCCCCCCCCCCCCCCC + DDDDDDDDDDDDDDDDDDD;
+trait AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA<A, B, C, D, E> =
+ FooBar;
+trait AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA<
+ A,
+ B,
+ C,
+ D,
+ E,
+> = FooBar;
+#[rustfmt::skip]
+trait FooBar = Foo
+ + Bar;
+
+// #2637
+auto trait Example {}
+pub auto trait PubExample {}
+pub unsafe auto trait PubUnsafeExample {}
+
+// #3006
+trait Foo<'a> {
+ type Bar<'a>;
+}
+
+impl<'a> Foo<'a> for i32 {
+ type Bar<'a> = i32;
+}
+
+// #3092
+pub mod test {
+ pub trait ATraitWithALooongName {}
+ pub trait ATrait:
+ ATraitWithALooongName
+ + ATraitWithALooongName
+ + ATraitWithALooongName
+ + ATraitWithALooongName
+ {
+ }
+}
+
+// Trait aliases with where clauses.
+trait A = where for<'b> &'b Self: Send;
+
+trait B = where for<'b> &'b Self: Send + Clone + Copy + SomeTrait + AAAAAAAA + BBBBBBB + CCCCCCCCCC;
+trait B =
+ where for<'b> &'b Self: Send + Clone + Copy + SomeTrait + AAAAAAAA + BBBBBBB + CCCCCCCCCCC;
+trait B = where
+ for<'b> &'b Self:
+ Send + Clone + Copy + SomeTrait + AAAAAAAA + BBBBBBB + CCCCCCCCCCCCCCCCCCCCCCC;
+trait B = where
+ for<'b> &'b Self: Send
+ + Clone
+ + Copy
+ + SomeTrait
+ + AAAAAAAA
+ + BBBBBBB
+ + CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC;
+
+trait B = where
+ for<'b> &'b Self: Send
+ + Clone
+ + Copy
+ + SomeTrait
+ + AAAAAAAA
+ + BBBBBBB
+ + CCCCCCCCC
+ + DDDDDDD
+ + DDDDDDDD
+ + DDDDDDDDD
+ + EEEEEEE;
+
+trait A<'a, 'b, 'c> = Debug<T> + Foo where for<'b> &'b Self: Send;
+
+trait B<'a, 'b, 'c> = Debug<T> + Foo
+where
+ for<'b> &'b Self: Send + Clone + Copy + SomeTrait + AAAAAAAA + BBBBBBB + CCCCCCCCC + DDDDDDD;
+
+trait B<'a, 'b, 'c, T> = Debug<'a, T>
+where
+ for<'b> &'b Self: Send
+ + Clone
+ + Copy
+ + SomeTrait
+ + AAAAAAAA
+ + BBBBBBB
+ + CCCCCCCCC
+ + DDDDDDD
+ + DDDDDDDD
+ + DDDDDDDDD
+ + EEEEEEE;
+
+trait Visible {
+ pub const C: i32;
+ pub type T;
+ pub fn f();
+ pub fn g() {}
+}
diff --git a/src/tools/rustfmt/tests/target/try-conversion.rs b/src/tools/rustfmt/tests/target/try-conversion.rs
new file mode 100644
index 000000000..04992a0a0
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/try-conversion.rs
@@ -0,0 +1,28 @@
+// rustfmt-use_try_shorthand: true
+
+fn main() {
+ let x = some_expr()?;
+
+ let y = a
+ .very
+ .loooooooooooooooooooooooooooooooooooooong()
+ .chain()
+ .inside()
+ .weeeeeeeeeeeeeee()?
+ .test()
+ .0
+ .x;
+}
+
+fn test() {
+ a?
+}
+
+fn issue1291() {
+ fs::create_dir_all(&gitfiledir).chain_err(|| {
+ format!(
+ "failed to create the {} submodule directory for the workarea",
+ name
+ )
+ })?;
+}
diff --git a/src/tools/rustfmt/tests/target/try_block.rs b/src/tools/rustfmt/tests/target/try_block.rs
new file mode 100644
index 000000000..19a3f3e14
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/try_block.rs
@@ -0,0 +1,29 @@
+// rustfmt-edition: 2018
+
+fn main() -> Result<(), !> {
+ let _x: Option<_> = try { 4 };
+
+ try {}
+}
+
+fn baz() -> Option<i32> {
+ if (1 == 1) {
+ return try { 5 };
+ }
+
+ // test
+ let x: Option<()> = try {
+ // try blocks are great
+ };
+
+ let y: Option<i32> = try { 6 }; // comment
+
+ let x: Option<i32> = try {
+ baz()?;
+ baz()?;
+ baz()?;
+ 7
+ };
+
+ return None;
+}
diff --git a/src/tools/rustfmt/tests/target/tuple.rs b/src/tools/rustfmt/tests/target/tuple.rs
new file mode 100644
index 000000000..68bb2f3bc
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/tuple.rs
@@ -0,0 +1,100 @@
+// Test tuple litterals
+
+fn foo() {
+ let a = (a, a, a, a, a);
+ let aaaaaaaaaaaaaaaa = (
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaa,
+ );
+ let aaaaaaaaaaaaaaaaaaaaaa = (
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaaaaaaaaaaa,
+ aaaa,
+ );
+ let a = (a,);
+
+ let b = (
+ // This is a comment
+ b, // Comment
+ b, /* Trailing comment */
+ );
+
+ // #1063
+ foo(x.0 .0);
+}
+
+fn a() {
+ ((
+ aaaaaaaa,
+ aaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaa,
+ ),)
+}
+
+fn b() {
+ (
+ (
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,
+ ),
+ bbbbbbbbbbbbbbbbbb,
+ )
+}
+
+fn issue550() {
+ self.visitor.visit_volume(
+ self.level.sector_id(sector),
+ (
+ floor_y,
+ if is_sky_flat(ceil_tex) {
+ from_wad_height(self.height_range.1)
+ } else {
+ ceil_y
+ },
+ ),
+ );
+}
+
+fn issue775() {
+ if indent {
+ let a = mk_object(&[
+ ("a".to_string(), Boolean(true)),
+ (
+ "b".to_string(),
+ Array(vec![
+ mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]),
+ mk_object(&[("d".to_string(), String("".to_string()))]),
+ ]),
+ ),
+ ]);
+ }
+}
+
+fn issue1725() {
+ bench_antialiased_lines!(
+ bench_draw_antialiased_line_segment_diagonal,
+ (10, 10),
+ (450, 450)
+ );
+ bench_antialiased_lines!(
+ bench_draw_antialiased_line_segment_shallow,
+ (10, 10),
+ (450, 80)
+ );
+}
+
+fn issue_4355() {
+ let _ = ((1,),).0 .0;
+}
+
+// https://github.com/rust-lang/rustfmt/issues/4410
+impl Drop for LockGuard {
+ fn drop(&mut self) {
+ LockMap::unlock(&self.0 .0, &self.0 .1);
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/tuple_v2.rs b/src/tools/rustfmt/tests/target/tuple_v2.rs
new file mode 100644
index 000000000..ba653291c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/tuple_v2.rs
@@ -0,0 +1,5 @@
+// rustfmt-version: Two
+
+fn issue_4355() {
+ let _ = ((1,),).0.0;
+}
diff --git a/src/tools/rustfmt/tests/target/type-ascription.rs b/src/tools/rustfmt/tests/target/type-ascription.rs
new file mode 100644
index 000000000..a2f082ba4
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/type-ascription.rs
@@ -0,0 +1,12 @@
+fn main() {
+ let xxxxxxxxxxx =
+ yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy: SomeTrait<AA, BB, CC>;
+
+ let xxxxxxxxxxxxxxx =
+ yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy: AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA;
+
+ let z = funk(yyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzz, wwwwww):
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA;
+
+ x: u32 - 1u32 / 10f32: u32
+}
diff --git a/src/tools/rustfmt/tests/target/type.rs b/src/tools/rustfmt/tests/target/type.rs
new file mode 100644
index 000000000..38cf909c2
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/type.rs
@@ -0,0 +1,175 @@
+// rustfmt-normalize_comments: true
+fn types() {
+ let x: [Vec<_>] = [];
+ let y: *mut [SomeType; konst_funk()] = expr();
+ let z: (/* #digits */ usize, /* exp */ i16) = funk();
+ let z: (usize /* #digits */, i16 /* exp */) = funk();
+}
+
+struct F {
+ f: extern "C" fn(x: u8, ... /* comment */),
+ g: extern "C" fn(x: u8, /* comment */ ...),
+ h: extern "C" fn(x: u8, ...),
+ i: extern "C" fn(
+ x: u8,
+ // comment 4
+ y: String, // comment 3
+ z: Foo,
+ // comment
+ ... // comment 2
+ ),
+}
+
+fn issue_1006(def_id_to_string: for<'a, 'b> unsafe fn(TyCtxt<'b, 'tcx, 'tcx>, DefId) -> String) {}
+
+fn impl_trait_fn_1() -> impl Fn(i32) -> Option<u8> {}
+
+fn impl_trait_fn_2<E>() -> impl Future<Item = &'a i64, Error = E> {}
+
+fn issue_1234() {
+ do_parse!(name: take_while1!(is_token) >> (Header))
+}
+
+// #2510
+impl CombineTypes {
+ pub fn pop_callback(
+ &self,
+ query_id: Uuid,
+ ) -> Option<(
+ ProjectId,
+ Box<FnMut(&ProjectState, serde_json::Value, bool) -> () + Sync + Send>,
+ )> {
+ self.query_callbacks()(&query_id)
+ }
+}
+
+// #2859
+pub fn do_something<'a, T: Trait1 + Trait2 + 'a>(
+ &fooo: u32,
+) -> impl Future<
+ Item = (
+ impl Future<Item = (), Error = SomeError> + 'a,
+ impl Future<Item = (), Error = SomeError> + 'a,
+ impl Future<Item = (), Error = SomeError> + 'a,
+ ),
+ Error = SomeError,
+> + 'a {
+}
+
+pub fn do_something<'a, T: Trait1 + Trait2 + 'a>(
+ &fooo: u32,
+) -> impl Future<
+ Item = (
+ impl Future<Item = (), Error = SomeError> + 'a,
+ impl Future<Item = (), Error = SomeError> + 'a,
+ impl Future<Item = (), Error = SomeError> + 'a,
+ ),
+ Error = SomeError,
+> + Future<
+ Item = (
+ impl Future<Item = (), Error = SomeError> + 'a,
+ impl Future<Item = (), Error = SomeError> + 'a,
+ impl Future<Item = (), Error = SomeError> + 'a,
+ ),
+ Error = SomeError,
+> + Future<
+ Item = (
+ impl Future<Item = (), Error = SomeError> + 'a,
+ impl Future<Item = (), Error = SomeError> + 'a,
+ impl Future<Item = (), Error = SomeError> + 'a,
+ ),
+ Error = SomeError,
+> + 'a + 'b + 'c {
+}
+
+// #3051
+token![impl];
+token![impl];
+
+// #3060
+macro_rules! foo {
+ ($foo_api: ty) => {
+ type Target = ($foo_api) + 'static;
+ };
+}
+
+type Target = (FooAPI) + 'static;
+
+// #3137
+fn foo<T>(t: T)
+where
+ T: (FnOnce() -> ()) + Clone,
+ U: (FnOnce() -> ()) + 'static,
+{
+}
+
+// #3117
+fn issue3117() {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ {
+ let opt: &mut Option<MyLongTypeHere> =
+ unsafe { &mut *self.future.get() };
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+// #3139
+fn issue3139() {
+ assert_eq!(
+ to_json_value(&None::<i32>).unwrap(),
+ json!({ "test": None::<i32> })
+ );
+}
+
+// #3180
+fn foo(
+ a: SomeLongComplexType,
+ b: SomeOtherLongComplexType,
+) -> Box<Future<Item = AnotherLongType, Error = ALongErrorType>> {
+}
+
+type MyFn = fn(
+ a: SomeLongComplexType,
+ b: SomeOtherLongComplexType,
+) -> Box<Future<Item = AnotherLongType, Error = ALongErrorType>>;
+
+// Const bound
+
+trait T: ~const Super {}
+
+const fn not_quite_const<S: ~const T>() -> i32 {
+ <S as T>::CONST
+}
+
+struct S<T: ~const ?Sized>(std::marker::PhantomData<T>);
+
+impl ~const T {}
+
+fn apit(_: impl ~const T) {}
+
+fn rpit() -> impl ~const T {
+ S
+}
+
+pub struct Foo<T: Trait>(T);
+impl<T: ~const Trait> Foo<T> {
+ fn new(t: T) -> Self {
+ Self(t)
+ }
+}
+
+// #4357
+type T = typeof(1);
+impl T for .. {}
diff --git a/src/tools/rustfmt/tests/target/type_alias.rs b/src/tools/rustfmt/tests/target/type_alias.rs
new file mode 100644
index 000000000..862f9ecbe
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/type_alias.rs
@@ -0,0 +1,76 @@
+// rustfmt-normalize_comments: true
+
+type PrivateTest<'a, I> = (
+ Box<Parser<Input = I, Output = char> + 'a>,
+ Box<Parser<Input = I, Output = char> + 'a>,
+);
+
+pub type PublicTest<'a, I, O> = Result<
+ Vec<MyLongType>,
+ Box<Parser<Input = I, Output = char> + 'a>,
+ Box<Parser<Input = I, Output = char> + 'a>,
+>;
+
+pub type LongGenericListTest<
+ 'a,
+ 'b,
+ 'c,
+ 'd,
+ LONGPARAMETERNAME,
+ LONGPARAMETERNAME,
+ LONGPARAMETERNAME,
+ A,
+ B,
+ C,
+> = Option<Vec<MyType>>;
+
+pub type Exactly100CharsTest<'a, 'b, 'c, 'd, LONGPARAMETERNAME, LONGPARAMETERNAME, A, B> = Vec<i32>;
+
+pub type Exactly101CharsTest<'a, 'b, 'c, 'd, LONGPARAMETERNAME, LONGPARAMETERNAME, A, B> =
+ Vec<Test>;
+
+pub type Exactly100CharsToEqualTest<'a, 'b, 'c, 'd, LONGPARAMETERNAME, LONGPARAMETERNAME, A, B, C> =
+ Vec<i32>;
+
+pub type GenericsFitButNotEqualTest<
+ 'a,
+ 'b,
+ 'c,
+ 'd,
+ LONGPARAMETERNAME,
+ LONGPARAMETERNAME,
+ A1,
+ B,
+ C,
+> = Vec<i32>;
+
+pub type CommentTest<
+ // Lifetime
+ 'a,
+ // Type
+ T,
+> = ();
+
+pub type WithWhereClause<LONGPARAMETERNAME, T>
+where
+ T: Clone,
+ LONGPARAMETERNAME: Clone + Eq + OtherTrait,
+= Option<T>;
+
+pub type Exactly100CharstoEqualWhereTest<T, U, PARAMET>
+where
+ T: Clone + Ord + Eq + SomeOtherTrait,
+= Option<T>;
+
+pub type Exactly101CharstoEqualWhereTest<T, U, PARAMETE>
+where
+ T: Clone + Ord + Eq + SomeOtherTrait,
+= Option<T>;
+
+type RegisterPlugin = unsafe fn(pt: *const c_char, plugin: *mut c_void, data: *mut CallbackData);
+
+// #1683
+pub type Between<Lhs, Rhs> =
+ super::operators::Between<Lhs, super::operators::And<AsExpr<Rhs, Lhs>, AsExpr<Rhs, Lhs>>>;
+pub type NotBetween<Lhs, Rhs> =
+ super::operators::NotBetween<Lhs, super::operators::And<AsExpr<Rhs, Lhs>, AsExpr<Rhs, Lhs>>>;
diff --git a/src/tools/rustfmt/tests/target/unicode.rs b/src/tools/rustfmt/tests/target/unicode.rs
new file mode 100644
index 000000000..34a4f4634
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/unicode.rs
@@ -0,0 +1,30 @@
+// rustfmt-wrap_comments: true
+
+fn foo() {
+ let s = "this line goes to 100: ͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶͶ";
+ let s = 42;
+
+ // a comment of length 80, with the starting sigil: ҘҘҘҘҘҘҘҘҘҘ ҘҘҘҘҘҘҘҘҘҘҘҘҘҘ
+ let s = 42;
+}
+
+pub fn bar(config: &Config) {
+ let csv = RefCell::new(create_csv(config, "foo"));
+ {
+ let mut csv = csv.borrow_mut();
+ for (i1, i2, i3) in iproduct!(0..2, 0..3, 0..3) {
+ csv.write_field(format!("γ[{}.{}.{}]", i1, i2, i3)).unwrap();
+ csv.write_field(format!("d[{}.{}.{}]", i1, i2, i3)).unwrap();
+ csv.write_field(format!("i[{}.{}.{}]", i1, i2, i3)).unwrap();
+ }
+ csv.write_record(None::<&[u8]>).unwrap();
+ }
+}
+
+// The NotUnicode line is below 100 wrt chars but over it wrt String::len
+fn baz() {
+ let our_error_b = result_b_from_func.or_else(|e| match e {
+ NotPresent => Err(e).chain_err(|| "env var wasn't provided"),
+ NotUnicode(_) => Err(e).chain_err(|| "env var was very very very bork文字化ã"),
+ });
+}
diff --git a/src/tools/rustfmt/tests/target/unindent_if_else_cond_comment.rs b/src/tools/rustfmt/tests/target/unindent_if_else_cond_comment.rs
new file mode 100644
index 000000000..98621b1ee
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/unindent_if_else_cond_comment.rs
@@ -0,0 +1,27 @@
+// Comments on else block. See #1575.
+
+fn example() {
+ // `if` comment
+ if x {
+ foo();
+ // `else if` comment
+ } else if y {
+ foo();
+ // Comment on `else if`.
+ // Comment on `else if`.
+ } else if z {
+ bar();
+ /*
+ * Multi line comment on `else if`
+ */
+ } else if xx {
+ bar();
+ /* Single line comment on `else if` */
+ } else if yy {
+ foo();
+ // `else` comment
+ } else {
+ foo();
+ // Comment at the end of `else` block
+ };
+}
diff --git a/src/tools/rustfmt/tests/target/unions.rs b/src/tools/rustfmt/tests/target/unions.rs
new file mode 100644
index 000000000..8ed16b269
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/unions.rs
@@ -0,0 +1,198 @@
+// rustfmt-normalize_comments: true
+// rustfmt-wrap_comments: true
+
+/// A Doc comment
+#[AnAttribute]
+pub union Foo {
+ #[rustfmt::skip]
+ f : SomeType, // Comment beside a field
+ f: SomeType, // Comment beside a field
+ // Comment on a field
+ #[AnAttribute]
+ g: SomeOtherType,
+ /// A doc comment on a field
+ h: AThirdType,
+ pub i: TypeForPublicField,
+}
+
+// #1029
+pub union Foo {
+ #[doc(hidden)]
+ // This will NOT get deleted!
+ bar: String, // hi
+}
+
+// #1029
+union X {
+ // `x` is an important number.
+ #[allow(unused)] // TODO: use
+ x: u32,
+}
+
+// #410
+#[allow(missing_docs)]
+pub union Writebatch<K: Key> {
+ #[allow(dead_code)] // only used for holding the internal pointer
+ writebatch: RawWritebatch,
+ marker: PhantomData<K>,
+}
+
+// With a where-clause and generics.
+pub union Foo<'a, Y: Baz>
+where
+ X: Whatever,
+{
+ f: SomeType, // Comment beside a field
+}
+
+union Baz {
+ a: A, // Comment A
+ b: B, // Comment B
+ c: C, // Comment C
+}
+
+union Baz {
+ a: A, // Comment A
+
+ b: B, // Comment B
+
+ c: C, // Comment C
+}
+
+union Baz {
+ a: A,
+
+ b: B,
+ c: C,
+
+ d: D,
+}
+
+union Baz {
+ // Comment A
+ a: A,
+
+ // Comment B
+ b: B,
+ // Comment C
+ c: C,
+}
+
+pub union State<F: FnMut() -> time::Timespec> {
+ now: F,
+}
+
+pub union State<F: FnMut() -> ()> {
+ now: F,
+}
+
+pub union State<F: FnMut()> {
+ now: F,
+}
+
+union Palette {
+ /// A map of indices in the palette to a count of pixels in approximately
+ /// that color
+ foo: i32,
+}
+
+// Splitting a single line comment into a block previously had a misalignment
+// when the field had attributes
+union FieldsWithAttributes {
+ // Pre Comment
+ #[rustfmt::skip] pub host:String, /* Post comment BBBBBBBBBBBBBB BBBBBBBBBBBBBBBB
+ * BBBBBBBBBBBBBBBB BBBBBBBBBBBBBBBBB BBBBBBBBBBB */
+ // Another pre comment
+ #[attr1]
+ #[attr2]
+ pub id: usize, /* CCCCCCCCCCCCCCCCCCC CCCCCCCCCCCCCCCCCCC CCCCCCCCCCCCCCCC
+ * CCCCCCCCCCCCCCCCCC CCCCCCCCCCCCCC CCCCCCCCCCCC */
+}
+
+union Deep {
+ deeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeep:
+ node::Handle<IdRef<'id, Node<K, V>>, Type, NodeType>,
+}
+
+mod m {
+ union X<T>
+ where
+ T: Sized,
+ {
+ a: T,
+ }
+}
+
+union Issue677 {
+ pub ptr: *const libc::c_void,
+ pub trace: fn(obj: *const libc::c_void, tracer: *mut JSTracer),
+}
+
+union Foo {}
+union Foo {}
+union Foo {
+ // comment
+}
+union Foo {
+ // trailing space ->
+}
+union Foo {
+ // comment
+}
+
+union LongUnion {
+ a: A,
+ the_quick_brown_fox_jumps_over_the_lazy_dog:
+ AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA,
+}
+
+union Deep {
+ deeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeep:
+ node::Handle<IdRef<'id, Node<Key, Value>>, Type, NodeType>,
+}
+
+// #1364
+fn foo() {
+ convex_shape.set_point(0, &Vector2f { x: 400.0, y: 100.0 });
+ convex_shape.set_point(1, &Vector2f { x: 500.0, y: 70.0 });
+ convex_shape.set_point(2, &Vector2f { x: 450.0, y: 100.0 });
+ convex_shape.set_point(3, &Vector2f { x: 580.0, y: 150.0 });
+}
+
+// Vertical alignment
+union Foo {
+ aaaaa: u32, // a
+
+ b: u32, // b
+ cc: u32, // cc
+
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx: u32, // 1
+ yy: u32, // comment2
+ zzz: u32, // comment3
+
+ aaaaaa: u32, // comment4
+ bb: u32, // comment5
+ // separate
+ dd: u32, // comment7
+ c: u32, // comment6
+
+ aaaaaaa: u32, /* multi
+ * line
+ * comment
+ */
+ b: u32, // hi
+
+ do_not_push_this_comment1: u32, // comment1
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx: u32, // 2
+ please_do_not_push_this_comment3: u32, // comment3
+
+ do_not_push_this_comment1: u32, // comment1
+ // separate
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx: u32, // 2
+ please_do_not_push_this_comment3: u32, // comment3
+
+ do_not_push_this_comment1: u32, // comment1
+ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx: u32, // 2
+ // separate
+ please_do_not_push_this_comment3: u32, // comment3
+}
diff --git a/src/tools/rustfmt/tests/target/unsafe-mod.rs b/src/tools/rustfmt/tests/target/unsafe-mod.rs
new file mode 100644
index 000000000..05ba2f54f
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/unsafe-mod.rs
@@ -0,0 +1,7 @@
+// These are supported by rustc syntactically but not semantically.
+
+#[cfg(any())]
+unsafe mod m {}
+
+#[cfg(any())]
+unsafe extern "C++" {}
diff --git a/src/tools/rustfmt/tests/target/visibility.rs b/src/tools/rustfmt/tests/target/visibility.rs
new file mode 100644
index 000000000..ca078422c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/visibility.rs
@@ -0,0 +1,8 @@
+// #2398
+pub mod outer_mod {
+ pub mod inner_mod {
+ pub(in outer_mod) fn outer_mod_visible_fn() {}
+ pub(super) fn super_mod_visible_fn() {}
+ pub(self) fn inner_mod_visible_fn() {}
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/visual-fn-type.rs b/src/tools/rustfmt/tests/target/visual-fn-type.rs
new file mode 100644
index 000000000..052acde02
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/visual-fn-type.rs
@@ -0,0 +1,9 @@
+// rustfmt-indent_style: Visual
+type CNodeSetAtts = unsafe extern "C" fn(node: *const RsvgNode,
+ node_impl: *const RsvgCNodeImpl,
+ handle: *const RsvgHandle,
+ pbag: *const PropertyBag);
+type CNodeDraw = unsafe extern "C" fn(node: *const RsvgNode,
+ node_impl: *const RsvgCNodeImpl,
+ draw_ctx: *const RsvgDrawingCtx,
+ dominate: i32);
diff --git a/src/tools/rustfmt/tests/target/where-clause-rfc.rs b/src/tools/rustfmt/tests/target/where-clause-rfc.rs
new file mode 100644
index 000000000..9c43e91d3
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/where-clause-rfc.rs
@@ -0,0 +1,156 @@
+fn reflow_list_node_with_rule(node: &CompoundNode, rule: &Rule, args: &[Arg], shape: &Shape)
+where
+ T: FOo,
+ U: Bar,
+{
+ let mut effects = HashMap::new();
+}
+
+fn reflow_list_node_with_rule(node: &CompoundNode, rule: &Rule, args: &[Arg], shape: &Shape)
+where
+ T: FOo,
+{
+ let mut effects = HashMap::new();
+}
+
+fn reflow_list_node_with_rule(
+ node: &CompoundNode,
+ rule: &Rule,
+ args: &[Arg],
+ shape: &Shape,
+ shape: &Shape,
+) where
+ T: FOo,
+ U: Bar,
+{
+ let mut effects = HashMap::new();
+}
+
+fn reflow_list_node_with_rule(
+ node: &CompoundNode,
+ rule: &Rule,
+ args: &[Arg],
+ shape: &Shape,
+ shape: &Shape,
+) where
+ T: FOo,
+{
+ let mut effects = HashMap::new();
+}
+
+fn reflow_list_node_with_rule(
+ node: &CompoundNode,
+ rule: &Rule,
+ args: &[Arg],
+ shape: &Shape,
+) -> Option<String>
+where
+ T: FOo,
+ U: Bar,
+{
+ let mut effects = HashMap::new();
+}
+
+fn reflow_list_node_with_rule(
+ node: &CompoundNode,
+ rule: &Rule,
+ args: &[Arg],
+ shape: &Shape,
+) -> Option<String>
+where
+ T: FOo,
+{
+ let mut effects = HashMap::new();
+}
+
+pub trait Test {
+ fn very_long_method_name<F>(self, f: F) -> MyVeryLongReturnType
+ where
+ F: FnMut(Self::Item) -> bool;
+
+ fn exactly_100_chars1<F>(self, f: F) -> MyVeryLongReturnType
+ where
+ F: FnMut(Self::Item) -> bool;
+}
+
+fn very_long_function_name<F>(very_long_argument: F) -> MyVeryLongReturnType
+where
+ F: FnMut(Self::Item) -> bool,
+{
+}
+
+struct VeryLongTupleStructName<A, B, C, D, E>(LongLongTypename, LongLongTypename, i32, i32)
+where
+ A: LongTrait;
+
+struct Exactly100CharsToSemicolon<A, B, C, D, E>(LongLongTypename, i32, i32)
+where
+ A: LongTrait1234;
+
+struct AlwaysOnNextLine<LongLongTypename, LongTypename, A, B, C, D, E, F>
+where
+ A: LongTrait,
+{
+ x: i32,
+}
+
+pub trait SomeTrait<T>
+where
+ T: Something
+ + Sync
+ + Send
+ + Display
+ + Debug
+ + Copy
+ + Hash
+ + Debug
+ + Display
+ + Write
+ + Read
+ + FromStr,
+{
+}
+
+// #2020
+impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> {
+ fn elaborate_bounds<F>(&mut self, bounds: &[ty::PolyTraitRef<'tcx>], mut mk_cand: F)
+ where
+ F: for<'b> FnMut(
+ &mut ProbeContext<'b, 'gcx, 'tcx>,
+ ty::PolyTraitRef<'tcx>,
+ ty::AssociatedItem,
+ ),
+ {
+ // ...
+ }
+}
+
+// #2497
+fn handle_update<'a, Tab, Conn, R, C>(
+ executor: &Executor<PooledConnection<ConnectionManager<Conn>>>,
+ change_set: &'a C,
+) -> ExecutionResult
+where
+ &'a C: Identifiable + AsChangeset<Target = Tab> + HasTable<Table = Tab>,
+ <&'a C as AsChangeset>::Changeset: QueryFragment<Conn::Backend>,
+ Tab: Table + HasTable<Table = Tab>,
+ Tab::PrimaryKey: EqAll<<&'a C as Identifiable>::Id>,
+ Tab::FromClause: QueryFragment<Conn::Backend>,
+ Tab: FindDsl<<&'a C as Identifiable>::Id>,
+ Find<Tab, <&'a C as Identifiable>::Id>: IntoUpdateTarget<Table = Tab>,
+ <Find<Tab, <&'a C as Identifiable>::Id> as IntoUpdateTarget>::WhereClause:
+ QueryFragment<Conn::Backend>,
+ Tab::Query: FilterDsl<<Tab::PrimaryKey as EqAll<<&'a C as Identifiable>::Id>>::Output>,
+ Filter<Tab::Query, <Tab::PrimaryKey as EqAll<<&'a C as Identifiable>::Id>>::Output>: LimitDsl,
+ Limit<Filter<Tab::Query, <Tab::PrimaryKey as EqAll<<&'a C as Identifiable>::Id>>::Output>>:
+ QueryDsl
+ + BoxedDsl<
+ 'a,
+ Conn::Backend,
+ Output = BoxedSelectStatement<'a, R::SqlType, Tab, Conn::Backend>,
+ >,
+ R: LoadingHandler<Conn, Table = Tab, SqlType = Tab::SqlType>
+ + GraphQLType<TypeInfo = (), Context = ()>,
+{
+ unimplemented!()
+}
diff --git a/src/tools/rustfmt/tests/target/where-clause.rs b/src/tools/rustfmt/tests/target/where-clause.rs
new file mode 100644
index 000000000..eb2f8d5e6
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/where-clause.rs
@@ -0,0 +1,107 @@
+// rustfmt-indent_style: Visual
+
+fn reflow_list_node_with_rule(node: &CompoundNode, rule: &Rule, args: &[Arg], shape: &Shape)
+ where T: FOo,
+ U: Bar
+{
+ let mut effects = HashMap::new();
+}
+
+fn reflow_list_node_with_rule(node: &CompoundNode, rule: &Rule, args: &[Arg], shape: &Shape)
+ where T: FOo
+{
+ let mut effects = HashMap::new();
+}
+
+fn reflow_list_node_with_rule(node: &CompoundNode,
+ rule: &Rule,
+ args: &[Arg],
+ shape: &Shape,
+ shape: &Shape)
+ where T: FOo,
+ U: Bar
+{
+ let mut effects = HashMap::new();
+}
+
+fn reflow_list_node_with_rule(node: &CompoundNode,
+ rule: &Rule,
+ args: &[Arg],
+ shape: &Shape,
+ shape: &Shape)
+ where T: FOo
+{
+ let mut effects = HashMap::new();
+}
+
+fn reflow_list_node_with_rule(node: &CompoundNode,
+ rule: &Rule,
+ args: &[Arg],
+ shape: &Shape)
+ -> Option<String>
+ where T: FOo,
+ U: Bar
+{
+ let mut effects = HashMap::new();
+}
+
+fn reflow_list_node_with_rule(node: &CompoundNode,
+ rule: &Rule,
+ args: &[Arg],
+ shape: &Shape)
+ -> Option<String>
+ where T: FOo
+{
+ let mut effects = HashMap::new();
+}
+
+pub trait Test {
+ fn very_long_method_name<F>(self, f: F) -> MyVeryLongReturnType
+ where F: FnMut(Self::Item) -> bool;
+
+ fn exactly_100_chars1<F>(self, f: F) -> MyVeryLongReturnType
+ where F: FnMut(Self::Item) -> bool;
+}
+
+fn very_long_function_name<F>(very_long_argument: F) -> MyVeryLongReturnType
+ where F: FnMut(Self::Item) -> bool
+{
+}
+
+struct VeryLongTupleStructName<A, B, C, D, E>(LongLongTypename, LongLongTypename, i32, i32)
+ where A: LongTrait;
+
+struct Exactly100CharsToSemicolon<A, B, C, D, E>(LongLongTypename, i32, i32) where A: LongTrait1234;
+
+struct AlwaysOnNextLine<LongLongTypename, LongTypename, A, B, C, D, E, F>
+ where A: LongTrait
+{
+ x: i32,
+}
+
+pub trait SomeTrait<T>
+ where T: Something
+ + Sync
+ + Send
+ + Display
+ + Debug
+ + Copy
+ + Hash
+ + Debug
+ + Display
+ + Write
+ + Read
+ + FromStr
+{
+}
+
+// #2020
+impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> {
+ fn elaborate_bounds<F>(&mut self, bounds: &[ty::PolyTraitRef<'tcx>], mut mk_cand: F)
+ where F: for<'b> FnMut(&mut ProbeContext<'b, 'gcx, 'tcx>,
+ ty::PolyTraitRef<'tcx>,
+ ty::AssociatedItem)
+ {
+ // ...
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/width-heuristics.rs b/src/tools/rustfmt/tests/target/width-heuristics.rs
new file mode 100644
index 000000000..e177a2152
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/width-heuristics.rs
@@ -0,0 +1,24 @@
+// rustfmt-max_width: 120
+
+// elems on multiple lines for max_width 100, but same line for max_width 120
+fn foo(e: Enum) {
+ match e {
+ Enum::Var { elem1, elem2, elem3 } => {
+ return;
+ }
+ }
+}
+
+// elems not on same line for either max_width 100 or 120
+fn bar(e: Enum) {
+ match e {
+ Enum::Var {
+ elem1,
+ elem2,
+ elem3,
+ elem4,
+ } => {
+ return;
+ }
+ }
+}
diff --git a/src/tools/rustfmt/tests/target/wrap_comments_should_not_imply_format_doc_comments.rs b/src/tools/rustfmt/tests/target/wrap_comments_should_not_imply_format_doc_comments.rs
new file mode 100644
index 000000000..d61d4d7c2
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/wrap_comments_should_not_imply_format_doc_comments.rs
@@ -0,0 +1,16 @@
+// rustfmt-wrap_comments: true
+
+/// Foo
+///
+/// # Example
+/// ```
+/// # #![cfg_attr(not(dox), feature(cfg_target_feature, target_feature, stdsimd))]
+/// # #![cfg_attr(not(dox), no_std)]
+/// fn foo() { }
+/// ```
+fn foo() {}
+
+/// A long commment for wrapping
+/// This is a long long long long long long long long long long long long long
+/// long long long long long long long sentence.
+fn bar() {}
diff --git a/src/tools/rustfmt/tests/writemode/source/fn-single-line.rs b/src/tools/rustfmt/tests/writemode/source/fn-single-line.rs
new file mode 100644
index 000000000..ab1e13e17
--- /dev/null
+++ b/src/tools/rustfmt/tests/writemode/source/fn-single-line.rs
@@ -0,0 +1,80 @@
+// rustfmt-fn_single_line: true
+// rustfmt-emit_mode: checkstyle
+// Test single-line functions.
+
+fn foo_expr() {
+ 1
+}
+
+fn foo_stmt() {
+ foo();
+}
+
+fn foo_decl_local() {
+ let z = 5;
+ }
+
+fn foo_decl_item(x: &mut i32) {
+ x = 3;
+}
+
+ fn empty() {
+
+}
+
+fn foo_return() -> String {
+ "yay"
+}
+
+fn foo_where() -> T where T: Sync {
+ let x = 2;
+}
+
+fn fooblock() {
+ {
+ "inner-block"
+ }
+}
+
+fn fooblock2(x: i32) {
+ let z = match x {
+ _ => 2,
+ };
+}
+
+fn comment() {
+ // this is a test comment
+ 1
+}
+
+fn comment2() {
+ // multi-line comment
+ let z = 2;
+ 1
+}
+
+fn only_comment() {
+ // Keep this here
+}
+
+fn aaaaaaaaaaaaaaaaa_looooooooooooooooooooooong_name() {
+ let z = "aaaaaaawwwwwwwwwwwwwwwwwwwwwwwwwwww";
+}
+
+fn lots_of_space () {
+ 1
+}
+
+fn mac() -> Vec<i32> { vec![] }
+
+trait CoolTypes {
+ fn dummy(&self) {
+ }
+}
+
+trait CoolerTypes { fn dummy(&self) {
+}
+}
+
+fn Foo<T>() where T: Bar {
+}
diff --git a/src/tools/rustfmt/tests/writemode/source/json.rs b/src/tools/rustfmt/tests/writemode/source/json.rs
new file mode 100644
index 000000000..89dcf6941
--- /dev/null
+++ b/src/tools/rustfmt/tests/writemode/source/json.rs
@@ -0,0 +1,80 @@
+// rustfmt-fn_single_line: true
+// rustfmt-emit_mode: json
+// Test single-line functions.
+
+fn foo_expr() {
+ 1
+}
+
+fn foo_stmt() {
+ foo();
+}
+
+fn foo_decl_local() {
+ let z = 5;
+ }
+
+fn foo_decl_item(x: &mut i32) {
+ x = 3;
+}
+
+ fn empty() {
+
+}
+
+fn foo_return() -> String {
+ "yay"
+}
+
+fn foo_where() -> T where T: Sync {
+ let x = 2;
+}
+
+fn fooblock() {
+ {
+ "inner-block"
+ }
+}
+
+fn fooblock2(x: i32) {
+ let z = match x {
+ _ => 2,
+ };
+}
+
+fn comment() {
+ // this is a test comment
+ 1
+}
+
+fn comment2() {
+ // multi-line comment
+ let z = 2;
+ 1
+}
+
+fn only_comment() {
+ // Keep this here
+}
+
+fn aaaaaaaaaaaaaaaaa_looooooooooooooooooooooong_name() {
+ let z = "aaaaaaawwwwwwwwwwwwwwwwwwwwwwwwwwww";
+}
+
+fn lots_of_space () {
+ 1
+}
+
+fn mac() -> Vec<i32> { vec![] }
+
+trait CoolTypes {
+ fn dummy(&self) {
+ }
+}
+
+trait CoolerTypes { fn dummy(&self) {
+}
+}
+
+fn Foo<T>() where T: Bar {
+}
diff --git a/src/tools/rustfmt/tests/writemode/source/modified.rs b/src/tools/rustfmt/tests/writemode/source/modified.rs
new file mode 100644
index 000000000..948beb348
--- /dev/null
+++ b/src/tools/rustfmt/tests/writemode/source/modified.rs
@@ -0,0 +1,14 @@
+// rustfmt-write_mode: modified
+// Test "modified" output
+
+fn
+blah
+()
+{ }
+
+
+#[cfg
+( a , b
+)]
+fn
+main() {}
diff --git a/src/tools/rustfmt/tests/writemode/source/stdin.rs b/src/tools/rustfmt/tests/writemode/source/stdin.rs
new file mode 100644
index 000000000..06f8a0c28
--- /dev/null
+++ b/src/tools/rustfmt/tests/writemode/source/stdin.rs
@@ -0,0 +1,6 @@
+
+fn
+ some( )
+{
+}
+fn main () {}
diff --git a/src/tools/rustfmt/tests/writemode/target/checkstyle.xml b/src/tools/rustfmt/tests/writemode/target/checkstyle.xml
new file mode 100644
index 000000000..05bc3a252
--- /dev/null
+++ b/src/tools/rustfmt/tests/writemode/target/checkstyle.xml
@@ -0,0 +1,2 @@
+<?xml version="1.0" encoding="utf-8"?>
+<checkstyle version="4.3"><file name="tests/writemode/source/fn-single-line.rs"><error line="5" severity="warning" message="Should be `fn foo_expr() { 1 }`" /><error line="7" severity="warning" message="Should be `fn foo_stmt() { foo(); }`" /><error line="9" severity="warning" message="Should be `fn foo_decl_local() { let z = 5; }`" /><error line="11" severity="warning" message="Should be `fn foo_decl_item(x: &amp;mut i32) { x = 3; }`" /><error line="13" severity="warning" message="Should be `fn empty() {}`" /><error line="15" severity="warning" message="Should be `fn foo_return() -&gt; String { &quot;yay&quot; }`" /><error line="17" severity="warning" message="Should be `fn foo_where() -&gt; T`" /><error line="18" severity="warning" message="Should be `where`" /><error line="19" severity="warning" message="Should be ` T: Sync,`" /><error line="20" severity="warning" message="Should be `{`" /><error line="55" severity="warning" message="Should be `fn lots_of_space() { 1 }`" /><error line="60" severity="warning" message="Should be ` fn dummy(&amp;self) {}`" /><error line="63" severity="warning" message="Should be `trait CoolerTypes {`" /><error line="64" severity="warning" message="Should be ` fn dummy(&amp;self) {}`" /><error line="67" severity="warning" message="Should be `fn Foo&lt;T&gt;()`" /><error line="68" severity="warning" message="Should be `where`" /><error line="69" severity="warning" message="Should be ` T: Bar,`" /><error line="70" severity="warning" message="Should be `{`" /></file></checkstyle>
diff --git a/src/tools/rustfmt/tests/writemode/target/modified.txt b/src/tools/rustfmt/tests/writemode/target/modified.txt
new file mode 100644
index 000000000..5c0539a66
--- /dev/null
+++ b/src/tools/rustfmt/tests/writemode/target/modified.txt
@@ -0,0 +1,5 @@
+4 4 1
+fn blah() {}
+10 5 2
+#[cfg(a, b)]
+fn main() {}
diff --git a/src/tools/rustfmt/tests/writemode/target/output.json b/src/tools/rustfmt/tests/writemode/target/output.json
new file mode 100644
index 000000000..d8b5467ee
--- /dev/null
+++ b/src/tools/rustfmt/tests/writemode/target/output.json
@@ -0,0 +1 @@
+[{"name":"tests/writemode/source/json.rs","mismatches":[{"original_begin_line":5,"original_end_line":7,"expected_begin_line":5,"expected_end_line":5,"original":"fn foo_expr() {\n 1\n}\n","expected":"fn foo_expr() { 1 }\n"},{"original_begin_line":9,"original_end_line":11,"expected_begin_line":7,"expected_end_line":7,"original":"fn foo_stmt() {\n foo();\n}\n","expected":"fn foo_stmt() { foo(); }\n"},{"original_begin_line":13,"original_end_line":15,"expected_begin_line":9,"expected_end_line":9,"original":"fn foo_decl_local() {\n let z = 5;\n }\n","expected":"fn foo_decl_local() { let z = 5; }\n"},{"original_begin_line":17,"original_end_line":19,"expected_begin_line":11,"expected_end_line":11,"original":"fn foo_decl_item(x: &mut i32) {\n x = 3;\n}\n","expected":"fn foo_decl_item(x: &mut i32) { x = 3; }\n"},{"original_begin_line":21,"original_end_line":21,"expected_begin_line":13,"expected_end_line":13,"original":" fn empty() {\n","expected":"fn empty() {}\n"},{"original_begin_line":23,"original_end_line":23,"expected_begin_line":15,"expected_end_line":15,"original":"}\n","expected":"fn foo_return() -> String { \"yay\" }\n"},{"original_begin_line":25,"original_end_line":29,"expected_begin_line":17,"expected_end_line":20,"original":"fn foo_return() -> String {\n \"yay\"\n}\n\nfn foo_where() -> T where T: Sync {\n","expected":"fn foo_where() -> T\nwhere\n T: Sync,\n{\n"},{"original_begin_line":64,"original_end_line":66,"expected_begin_line":55,"expected_end_line":55,"original":"fn lots_of_space () {\n 1 \n}\n","expected":"fn lots_of_space() { 1 }\n"},{"original_begin_line":71,"original_end_line":72,"expected_begin_line":60,"expected_end_line":60,"original":" fn dummy(&self) {\n }\n","expected":" fn dummy(&self) {}\n"},{"original_begin_line":75,"original_end_line":75,"expected_begin_line":63,"expected_end_line":64,"original":"trait CoolerTypes { fn dummy(&self) { \n","expected":"trait CoolerTypes {\n fn dummy(&self) {}\n"},{"original_begin_line":77,"original_end_line":77,"expected_begin_line":66,"expected_end_line":66,"original":"}\n","expected":""},{"original_begin_line":79,"original_end_line":79,"expected_begin_line":67,"expected_end_line":70,"original":"fn Foo<T>() where T: Bar {\n","expected":"fn Foo<T>()\nwhere\n T: Bar,\n{\n"}]}]
diff --git a/src/tools/rustfmt/tests/writemode/target/stdin.json b/src/tools/rustfmt/tests/writemode/target/stdin.json
new file mode 100644
index 000000000..dbf2c4863
--- /dev/null
+++ b/src/tools/rustfmt/tests/writemode/target/stdin.json
@@ -0,0 +1 @@
+[{"name":"<stdin>","mismatches":[{"original_begin_line":1,"original_end_line":6,"expected_begin_line":1,"expected_end_line":2,"original":"\nfn\n some( )\n{\n}\nfn main () {}\n","expected":"fn some() {}\nfn main() {}\n"}]}]
diff --git a/src/tools/rustfmt/tests/writemode/target/stdin.xml b/src/tools/rustfmt/tests/writemode/target/stdin.xml
new file mode 100644
index 000000000..a7301bbc5
--- /dev/null
+++ b/src/tools/rustfmt/tests/writemode/target/stdin.xml
@@ -0,0 +1,2 @@
+<?xml version="1.0" encoding="utf-8"?>
+<checkstyle version="4.3"><file name="<stdin>"><error line="1" severity="warning" message="Should be `fn some() {}`" /><error line="2" severity="warning" message="Should be `fn main() {}`" /></file></checkstyle>
diff --git a/src/tools/rustfmt/triagebot.toml b/src/tools/rustfmt/triagebot.toml
new file mode 100644
index 000000000..fa0824ac5
--- /dev/null
+++ b/src/tools/rustfmt/triagebot.toml
@@ -0,0 +1 @@
+[assign]
diff --git a/src/tools/tidy/Cargo.toml b/src/tools/tidy/Cargo.toml
new file mode 100644
index 000000000..96ab42b47
--- /dev/null
+++ b/src/tools/tidy/Cargo.toml
@@ -0,0 +1,16 @@
+[package]
+name = "tidy"
+version = "0.1.0"
+edition = "2021"
+autobins = false
+
+[dependencies]
+cargo_metadata = "0.14"
+regex = "1"
+lazy_static = "1"
+walkdir = "2"
+crossbeam-utils = "0.8.0"
+
+[[bin]]
+name = "rust-tidy"
+path = "src/main.rs"
diff --git a/src/tools/tidy/src/bins.rs b/src/tools/tidy/src/bins.rs
new file mode 100644
index 000000000..9615c4db6
--- /dev/null
+++ b/src/tools/tidy/src/bins.rs
@@ -0,0 +1,151 @@
+//! Tidy check to ensure that there are no binaries checked into the source tree
+//! by accident.
+//!
+//! In the past we've accidentally checked in test binaries and such which add a
+//! huge amount of bloat to the Git history, so it's good to just ensure we
+//! don't do that again.
+
+pub use os_impl::*;
+
+// All files are executable on Windows, so just check on Unix.
+#[cfg(windows)]
+mod os_impl {
+ use std::path::Path;
+
+ pub fn check_filesystem_support(_sources: &[&Path], _output: &Path) -> bool {
+ return false;
+ }
+
+ pub fn check(_path: &Path, _bad: &mut bool) {}
+}
+
+#[cfg(unix)]
+mod os_impl {
+ use std::fs;
+ use std::os::unix::prelude::*;
+ use std::path::Path;
+ use std::process::{Command, Stdio};
+
+ enum FilesystemSupport {
+ Supported,
+ Unsupported,
+ ReadOnlyFs,
+ }
+
+ use FilesystemSupport::*;
+
+ fn is_executable(path: &Path) -> std::io::Result<bool> {
+ Ok(path.metadata()?.mode() & 0o111 != 0)
+ }
+
+ pub fn check_filesystem_support(sources: &[&Path], output: &Path) -> bool {
+ // We want to avoid false positives on filesystems that do not support the
+ // executable bit. This occurs on some versions of Window's linux subsystem,
+ // for example.
+ //
+ // We try to create the temporary file first in the src directory, which is
+ // the preferred location as it's most likely to be on the same filesystem,
+ // and then in the output (`build`) directory if that fails. Sometimes we
+ // see the source directory mounted as read-only which means we can't
+ // readily create a file there to test.
+ //
+ // See #36706 and #74753 for context.
+
+ fn check_dir(dir: &Path) -> FilesystemSupport {
+ let path = dir.join("tidy-test-file");
+ match fs::File::create(&path) {
+ Ok(file) => {
+ let exec = is_executable(&path).unwrap_or(false);
+ std::mem::drop(file);
+ std::fs::remove_file(&path).expect("Deleted temp file");
+ // If the file is executable, then we assume that this
+ // filesystem does not track executability, so skip this check.
+ return if exec { Unsupported } else { Supported };
+ }
+ Err(e) => {
+ // If the directory is read-only or we otherwise don't have rights,
+ // just don't run this check.
+ //
+ // 30 is the "Read-only filesystem" code at least in one CI
+ // environment.
+ if e.raw_os_error() == Some(30) {
+ eprintln!("tidy: Skipping binary file check, read-only filesystem");
+ return ReadOnlyFs;
+ }
+
+ panic!("unable to create temporary file `{:?}`: {:?}", path, e);
+ }
+ };
+ }
+
+ for &source_dir in sources {
+ match check_dir(source_dir) {
+ Unsupported => return false,
+ ReadOnlyFs => {
+ return match check_dir(output) {
+ Supported => true,
+ _ => false,
+ };
+ }
+ _ => {}
+ }
+ }
+
+ return true;
+ }
+
+ #[cfg(unix)]
+ pub fn check(path: &Path, bad: &mut bool) {
+ const ALLOWED: &[&str] = &["configure"];
+
+ crate::walk_no_read(
+ path,
+ &mut |path| {
+ crate::filter_dirs(path)
+ || path.ends_with("src/etc")
+ // This is a list of directories that we almost certainly
+ // don't need to walk. A future PR will likely want to
+ // remove these in favor of crate::walk_no_read using git
+ // ls-files to discover the paths we should check, which
+ // would naturally ignore all of these directories. It's
+ // also likely faster than walking the directory tree
+ // directly (since git is just reading from a couple files
+ // to produce the results).
+ || path.ends_with("target")
+ || path.ends_with("build")
+ || path.ends_with(".git")
+ },
+ &mut |entry| {
+ let file = entry.path();
+ let filename = file.file_name().unwrap().to_string_lossy();
+ let extensions = [".py", ".sh"];
+ if extensions.iter().any(|e| filename.ends_with(e)) {
+ return;
+ }
+
+ if t!(is_executable(&file), file) {
+ let rel_path = file.strip_prefix(path).unwrap();
+ let git_friendly_path = rel_path.to_str().unwrap().replace("\\", "/");
+
+ if ALLOWED.contains(&git_friendly_path.as_str()) {
+ return;
+ }
+
+ let output = Command::new("git")
+ .arg("ls-files")
+ .arg(&git_friendly_path)
+ .current_dir(path)
+ .stderr(Stdio::null())
+ .output()
+ .unwrap_or_else(|e| {
+ panic!("could not run git ls-files: {e}");
+ });
+ let path_bytes = rel_path.as_os_str().as_bytes();
+ if output.status.success() && output.stdout.starts_with(path_bytes) {
+ tidy_error!(bad, "binary checked into source: {}", file.display());
+ }
+ }
+ },
+ )
+ }
+}
diff --git a/src/tools/tidy/src/debug_artifacts.rs b/src/tools/tidy/src/debug_artifacts.rs
new file mode 100644
index 000000000..ab87230f8
--- /dev/null
+++ b/src/tools/tidy/src/debug_artifacts.rs
@@ -0,0 +1,23 @@
+//! Tidy check to prevent creation of unnecessary debug artifacts while running tests.
+
+use std::path::{Path, PathBuf};
+
+const GRAPHVIZ_POSTFLOW_MSG: &str = "`borrowck_graphviz_postflow` attribute in test";
+
+pub fn check(path: &Path, bad: &mut bool) {
+ let test_dir: PathBuf = path.join("test");
+
+ super::walk(&test_dir, &mut super::filter_dirs, &mut |entry, contents| {
+ let filename = entry.path();
+ let is_rust = filename.extension().map_or(false, |ext| ext == "rs");
+ if !is_rust {
+ return;
+ }
+
+ for (i, line) in contents.lines().enumerate() {
+ if line.contains("borrowck_graphviz_postflow") {
+ tidy_error!(bad, "{}:{}: {}", filename.display(), i + 1, GRAPHVIZ_POSTFLOW_MSG);
+ }
+ }
+ });
+}
diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs
new file mode 100644
index 000000000..333f85f6d
--- /dev/null
+++ b/src/tools/tidy/src/deps.rs
@@ -0,0 +1,621 @@
+//! Checks the licenses of third-party dependencies.
+
+use cargo_metadata::{Metadata, Package, PackageId, Resolve};
+use std::collections::{BTreeSet, HashSet};
+use std::path::Path;
+
+/// These are licenses that are allowed for all crates, including the runtime,
+/// rustc, tools, etc.
+const LICENSES: &[&str] = &[
+ "MIT/Apache-2.0",
+ "MIT / Apache-2.0",
+ "Apache-2.0/MIT",
+ "Apache-2.0 / MIT",
+ "MIT OR Apache-2.0",
+ "Apache-2.0 OR MIT",
+ "Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT", // wasi license
+ "MIT",
+ "ISC",
+ "Unlicense/MIT",
+ "Unlicense OR MIT",
+ "0BSD OR MIT OR Apache-2.0", // adler license
+ "Zlib OR Apache-2.0 OR MIT", // tinyvec
+ "MIT OR Zlib OR Apache-2.0", // miniz_oxide
+];
+
+/// These are exceptions to Rust's permissive licensing policy, and
+/// should be considered bugs. Exceptions are only allowed in Rust
+/// tooling. It is _crucial_ that no exception crates be dependencies
+/// of the Rust runtime (std/test).
+const EXCEPTIONS: &[(&str, &str)] = &[
+ ("mdbook", "MPL-2.0"), // mdbook
+ ("openssl", "Apache-2.0"), // cargo, mdbook
+ ("colored", "MPL-2.0"), // rustfmt
+ ("ordslice", "Apache-2.0"), // rls
+ ("ryu", "Apache-2.0 OR BSL-1.0"), // rls/cargo/... (because of serde)
+ ("bytesize", "Apache-2.0"), // cargo
+ ("im-rc", "MPL-2.0+"), // cargo
+ ("sized-chunks", "MPL-2.0+"), // cargo via im-rc
+ ("bitmaps", "MPL-2.0+"), // cargo via im-rc
+ ("instant", "BSD-3-Clause"), // rustc_driver/tracing-subscriber/parking_lot
+ ("snap", "BSD-3-Clause"), // rustc
+ ("fluent-langneg", "Apache-2.0"), // rustc (fluent translations)
+ ("self_cell", "Apache-2.0"), // rustc (fluent translations)
+ // FIXME: this dependency violates the documentation comment above:
+ ("fortanix-sgx-abi", "MPL-2.0"), // libstd but only for `sgx` target
+ ("dunce", "CC0-1.0"), // cargo (dev dependency)
+ ("similar", "Apache-2.0"), // cargo (dev dependency)
+ ("normalize-line-endings", "Apache-2.0"), // cargo (dev dependency)
+];
+
+const EXCEPTIONS_CRANELIFT: &[(&str, &str)] = &[
+ ("cranelift-bforest", "Apache-2.0 WITH LLVM-exception"),
+ ("cranelift-codegen", "Apache-2.0 WITH LLVM-exception"),
+ ("cranelift-codegen-meta", "Apache-2.0 WITH LLVM-exception"),
+ ("cranelift-codegen-shared", "Apache-2.0 WITH LLVM-exception"),
+ ("cranelift-entity", "Apache-2.0 WITH LLVM-exception"),
+ ("cranelift-frontend", "Apache-2.0 WITH LLVM-exception"),
+ ("cranelift-isle", "Apache-2.0 WITH LLVM-exception"),
+ ("cranelift-jit", "Apache-2.0 WITH LLVM-exception"),
+ ("cranelift-module", "Apache-2.0 WITH LLVM-exception"),
+ ("cranelift-native", "Apache-2.0 WITH LLVM-exception"),
+ ("cranelift-object", "Apache-2.0 WITH LLVM-exception"),
+ ("mach", "BSD-2-Clause"),
+ ("regalloc2", "Apache-2.0 WITH LLVM-exception"),
+ ("target-lexicon", "Apache-2.0 WITH LLVM-exception"),
+];
+
+const EXCEPTIONS_BOOTSTRAP: &[(&str, &str)] = &[
+ ("ryu", "Apache-2.0 OR BSL-1.0"), // through serde
+];
+
+/// These are the root crates that are part of the runtime. The licenses for
+/// these and all their dependencies *must not* be in the exception list.
+const RUNTIME_CRATES: &[&str] = &["std", "core", "alloc", "test", "panic_abort", "panic_unwind"];
+
+/// Crates whose dependencies must be explicitly permitted.
+const RESTRICTED_DEPENDENCY_CRATES: &[&str] = &["rustc_driver", "rustc_codegen_llvm"];
+
+/// Crates rustc is allowed to depend on. Avoid adding to the list if possible.
+///
+/// This list is here to provide a speed-bump to adding a new dependency to
+/// rustc. Please check with the compiler team before adding an entry.
+const PERMITTED_DEPENDENCIES: &[&str] = &[
+ "addr2line",
+ "adler",
+ "ahash",
+ "aho-corasick",
+ "annotate-snippets",
+ "ansi_term",
+ "arrayvec",
+ "atty",
+ "autocfg",
+ "bitflags",
+ "block-buffer",
+ "block-padding",
+ "byte-tools",
+ "byteorder",
+ "cc",
+ "cfg-if",
+ "chalk-derive",
+ "chalk-engine",
+ "chalk-ir",
+ "chalk-solve",
+ "chrono",
+ "compiler_builtins",
+ "cpufeatures",
+ "crc32fast",
+ "crossbeam-channel",
+ "crossbeam-deque",
+ "crossbeam-epoch",
+ "crossbeam-utils",
+ "crypto-common",
+ "cstr",
+ "datafrog",
+ "difference",
+ "digest",
+ "dlmalloc",
+ "either",
+ "ena",
+ "env_logger",
+ "expect-test",
+ "fake-simd",
+ "fallible-iterator", // dependency of `thorin`
+ "filetime",
+ "fixedbitset",
+ "flate2",
+ "fluent-bundle",
+ "fluent-langneg",
+ "fluent-syntax",
+ "fortanix-sgx-abi",
+ "generic-array",
+ "getopts",
+ "getrandom",
+ "gimli",
+ "gsgdt",
+ "hashbrown",
+ "hermit-abi",
+ "humantime",
+ "if_chain",
+ "indexmap",
+ "instant",
+ "intl-memoizer",
+ "intl_pluralrules",
+ "itertools",
+ "itoa",
+ "jobserver",
+ "lazy_static",
+ "libc",
+ "libloading",
+ "libz-sys",
+ "lock_api",
+ "log",
+ "matchers",
+ "md-5",
+ "measureme",
+ "memchr",
+ "memmap2",
+ "memoffset",
+ "miniz_oxide",
+ "num-integer",
+ "num-traits",
+ "num_cpus",
+ "object",
+ "odht",
+ "once_cell",
+ "opaque-debug",
+ "parking_lot",
+ "parking_lot_core",
+ "pathdiff",
+ "perf-event-open-sys",
+ "petgraph",
+ "pin-project-lite",
+ "pkg-config",
+ "polonius-engine",
+ "ppv-lite86",
+ "proc-macro-hack",
+ "proc-macro2",
+ "psm",
+ "punycode",
+ "quick-error",
+ "quote",
+ "rand",
+ "rand_chacha",
+ "rand_core",
+ "rand_hc",
+ "rand_xorshift",
+ "rand_xoshiro",
+ "redox_syscall",
+ "regex",
+ "regex-automata",
+ "regex-syntax",
+ "remove_dir_all",
+ "rls-data",
+ "rls-span",
+ "rustc-demangle",
+ "rustc-hash",
+ "rustc-rayon",
+ "rustc-rayon-core",
+ "rustc_version",
+ "ryu",
+ "scoped-tls",
+ "scopeguard",
+ "self_cell",
+ "semver",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "sha-1",
+ "sha2",
+ "sharded-slab",
+ "smallvec",
+ "snap",
+ "stable_deref_trait",
+ "stacker",
+ "syn",
+ "synstructure",
+ "tempfile",
+ "termcolor",
+ "termize",
+ "thiserror",
+ "thiserror-impl",
+ "thorin-dwp",
+ "thread_local",
+ "time",
+ "tinystr",
+ "tinyvec",
+ "tracing",
+ "tracing-attributes",
+ "tracing-core",
+ "tracing-log",
+ "tracing-subscriber",
+ "tracing-tree",
+ "type-map",
+ "typenum",
+ "unic-char-property",
+ "unic-char-range",
+ "unic-common",
+ "unic-emoji-char",
+ "unic-langid",
+ "unic-langid-impl",
+ "unic-langid-macros",
+ "unic-langid-macros-impl",
+ "unic-ucd-version",
+ "unicode-normalization",
+ "unicode-script",
+ "unicode-security",
+ "unicode-width",
+ "unicode-xid",
+ "vcpkg",
+ "version_check",
+ "wasi",
+ "winapi",
+ "winapi-i686-pc-windows-gnu",
+ "winapi-util",
+ "winapi-x86_64-pc-windows-gnu",
+ // this is a false-positive: it's only used by rustfmt, but because it's enabled through a
+ // feature, tidy thinks it's used by rustc as well.
+ "yansi-term",
+];
+
+const PERMITTED_CRANELIFT_DEPENDENCIES: &[&str] = &[
+ "ahash",
+ "anyhow",
+ "ar",
+ "autocfg",
+ "bitflags",
+ "byteorder",
+ "cfg-if",
+ "cranelift-bforest",
+ "cranelift-codegen",
+ "cranelift-codegen-meta",
+ "cranelift-codegen-shared",
+ "cranelift-entity",
+ "cranelift-frontend",
+ "cranelift-isle",
+ "cranelift-jit",
+ "cranelift-module",
+ "cranelift-native",
+ "cranelift-object",
+ "crc32fast",
+ "fxhash",
+ "getrandom",
+ "gimli",
+ "hashbrown",
+ "indexmap",
+ "libc",
+ "libloading",
+ "log",
+ "mach",
+ "memchr",
+ "object",
+ "once_cell",
+ "regalloc2",
+ "region",
+ "slice-group-by",
+ "smallvec",
+ "target-lexicon",
+ "version_check",
+ "wasi",
+ "winapi",
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+];
+
+const FORBIDDEN_TO_HAVE_DUPLICATES: &[&str] = &[
+ // These two crates take quite a long time to build, so don't allow two versions of them
+ // to accidentally sneak into our dependency graph, in order to ensure we keep our CI times
+ // under control.
+ "cargo",
+];
+
+/// Dependency checks.
+///
+/// `root` is path to the directory with the root `Cargo.toml` (for the workspace). `cargo` is path
+/// to the cargo executable.
+pub fn check(root: &Path, cargo: &Path, bad: &mut bool) {
+ let mut cmd = cargo_metadata::MetadataCommand::new();
+ cmd.cargo_path(cargo)
+ .manifest_path(root.join("Cargo.toml"))
+ .features(cargo_metadata::CargoOpt::AllFeatures);
+ let metadata = t!(cmd.exec());
+ let runtime_ids = compute_runtime_crates(&metadata);
+ check_exceptions(&metadata, EXCEPTIONS, runtime_ids, bad);
+ check_dependencies(
+ &metadata,
+ "main workspace",
+ PERMITTED_DEPENDENCIES,
+ RESTRICTED_DEPENDENCY_CRATES,
+ bad,
+ );
+ check_crate_duplicate(&metadata, FORBIDDEN_TO_HAVE_DUPLICATES, bad);
+ check_rustfix(&metadata, bad);
+
+ // Check rustc_codegen_cranelift independently as it has it's own workspace.
+ let mut cmd = cargo_metadata::MetadataCommand::new();
+ cmd.cargo_path(cargo)
+ .manifest_path(root.join("compiler/rustc_codegen_cranelift/Cargo.toml"))
+ .features(cargo_metadata::CargoOpt::AllFeatures);
+ let metadata = t!(cmd.exec());
+ let runtime_ids = HashSet::new();
+ check_exceptions(&metadata, EXCEPTIONS_CRANELIFT, runtime_ids, bad);
+ check_dependencies(
+ &metadata,
+ "cranelift",
+ PERMITTED_CRANELIFT_DEPENDENCIES,
+ &["rustc_codegen_cranelift"],
+ bad,
+ );
+ check_crate_duplicate(&metadata, &[], bad);
+
+ let mut cmd = cargo_metadata::MetadataCommand::new();
+ cmd.cargo_path(cargo)
+ .manifest_path(root.join("src/bootstrap/Cargo.toml"))
+ .features(cargo_metadata::CargoOpt::AllFeatures);
+ let metadata = t!(cmd.exec());
+ let runtime_ids = HashSet::new();
+ check_exceptions(&metadata, EXCEPTIONS_BOOTSTRAP, runtime_ids, bad);
+}
+
+/// Check that all licenses are in the valid list in `LICENSES`.
+///
+/// Packages listed in `EXCEPTIONS` are allowed for tools.
+fn check_exceptions(
+ metadata: &Metadata,
+ exceptions: &[(&str, &str)],
+ runtime_ids: HashSet<&PackageId>,
+ bad: &mut bool,
+) {
+ // Validate the EXCEPTIONS list hasn't changed.
+ for (name, license) in exceptions {
+ // Check that the package actually exists.
+ if !metadata.packages.iter().any(|p| p.name == *name) {
+ tidy_error!(
+ bad,
+ "could not find exception package `{}`\n\
+ Remove from EXCEPTIONS list if it is no longer used.",
+ name
+ );
+ }
+ // Check that the license hasn't changed.
+ for pkg in metadata.packages.iter().filter(|p| p.name == *name) {
+ match &pkg.license {
+ None => {
+ tidy_error!(
+ bad,
+ "dependency exception `{}` does not declare a license expression",
+ pkg.id
+ );
+ }
+ Some(pkg_license) => {
+ if pkg_license.as_str() != *license {
+ println!("dependency exception `{name}` license has changed");
+ println!(" previously `{license}` now `{pkg_license}`");
+ println!(" update EXCEPTIONS for the new license");
+ *bad = true;
+ }
+ }
+ }
+ }
+ }
+
+ let exception_names: Vec<_> = exceptions.iter().map(|(name, _license)| *name).collect();
+
+ // Check if any package does not have a valid license.
+ for pkg in &metadata.packages {
+ if pkg.source.is_none() {
+ // No need to check local packages.
+ continue;
+ }
+ if !runtime_ids.contains(&pkg.id) && exception_names.contains(&pkg.name.as_str()) {
+ continue;
+ }
+ let license = match &pkg.license {
+ Some(license) => license,
+ None => {
+ tidy_error!(bad, "dependency `{}` does not define a license expression", pkg.id);
+ continue;
+ }
+ };
+ if !LICENSES.contains(&license.as_str()) {
+ if pkg.name == "fortanix-sgx-abi" {
+ // This is a specific exception because SGX is considered
+ // "third party". See
+ // https://github.com/rust-lang/rust/issues/62620 for more. In
+ // general, these should never be added.
+ continue;
+ }
+ tidy_error!(bad, "invalid license `{}` in `{}`", license, pkg.id);
+ }
+ }
+}
+
+/// Checks the dependency of `RESTRICTED_DEPENDENCY_CRATES` at the given path. Changes `bad` to
+/// `true` if a check failed.
+///
+/// Specifically, this checks that the dependencies are on the `PERMITTED_DEPENDENCIES`.
+fn check_dependencies(
+ metadata: &Metadata,
+ descr: &str,
+ permitted_dependencies: &[&'static str],
+ restricted_dependency_crates: &[&'static str],
+ bad: &mut bool,
+) {
+ // Check that the PERMITTED_DEPENDENCIES does not have unused entries.
+ for name in permitted_dependencies {
+ if !metadata.packages.iter().any(|p| p.name == *name) {
+ tidy_error!(
+ bad,
+ "could not find allowed package `{}`\n\
+ Remove from PERMITTED_DEPENDENCIES list if it is no longer used.",
+ name
+ );
+ }
+ }
+ // Get the list in a convenient form.
+ let permitted_dependencies: HashSet<_> = permitted_dependencies.iter().cloned().collect();
+
+ // Check dependencies.
+ let mut visited = BTreeSet::new();
+ let mut unapproved = BTreeSet::new();
+ for &krate in restricted_dependency_crates.iter() {
+ let pkg = pkg_from_name(metadata, krate);
+ let mut bad =
+ check_crate_dependencies(&permitted_dependencies, metadata, &mut visited, pkg);
+ unapproved.append(&mut bad);
+ }
+
+ if !unapproved.is_empty() {
+ tidy_error!(bad, "Dependencies for {} not explicitly permitted:", descr);
+ for dep in unapproved {
+ println!("* {dep}");
+ }
+ }
+}
+
+/// Checks the dependencies of the given crate from the given cargo metadata to see if they are on
+/// the list of permitted dependencies. Returns a list of disallowed dependencies.
+fn check_crate_dependencies<'a>(
+ permitted_dependencies: &'a HashSet<&'static str>,
+ metadata: &'a Metadata,
+ visited: &mut BTreeSet<&'a PackageId>,
+ krate: &'a Package,
+) -> BTreeSet<&'a PackageId> {
+ // This will contain bad deps.
+ let mut unapproved = BTreeSet::new();
+
+ // Check if we have already visited this crate.
+ if visited.contains(&krate.id) {
+ return unapproved;
+ }
+
+ visited.insert(&krate.id);
+
+ // If this path is in-tree, we don't require it to be explicitly permitted.
+ if krate.source.is_some() {
+ // If this dependency is not on `PERMITTED_DEPENDENCIES`, add to bad set.
+ if !permitted_dependencies.contains(krate.name.as_str()) {
+ unapproved.insert(&krate.id);
+ }
+ }
+
+ // Do a DFS in the crate graph.
+ let to_check = deps_of(metadata, &krate.id);
+
+ for dep in to_check {
+ let mut bad = check_crate_dependencies(permitted_dependencies, metadata, visited, dep);
+ unapproved.append(&mut bad);
+ }
+
+ unapproved
+}
+
+/// Prevents multiple versions of some expensive crates.
+fn check_crate_duplicate(
+ metadata: &Metadata,
+ forbidden_to_have_duplicates: &[&str],
+ bad: &mut bool,
+) {
+ for &name in forbidden_to_have_duplicates {
+ let matches: Vec<_> = metadata.packages.iter().filter(|pkg| pkg.name == name).collect();
+ match matches.len() {
+ 0 => {
+ tidy_error!(
+ bad,
+ "crate `{}` is missing, update `check_crate_duplicate` \
+ if it is no longer used",
+ name
+ );
+ }
+ 1 => {}
+ _ => {
+ tidy_error!(
+ bad,
+ "crate `{}` is duplicated in `Cargo.lock`, \
+ it is too expensive to build multiple times, \
+ so make sure only one version appears across all dependencies",
+ name
+ );
+ for pkg in matches {
+ println!(" * {}", pkg.id);
+ }
+ }
+ }
+ }
+}
+
+/// Returns a list of dependencies for the given package.
+fn deps_of<'a>(metadata: &'a Metadata, pkg_id: &'a PackageId) -> Vec<&'a Package> {
+ let resolve = metadata.resolve.as_ref().unwrap();
+ let node = resolve
+ .nodes
+ .iter()
+ .find(|n| &n.id == pkg_id)
+ .unwrap_or_else(|| panic!("could not find `{pkg_id}` in resolve"));
+ node.deps
+ .iter()
+ .map(|dep| {
+ metadata.packages.iter().find(|pkg| pkg.id == dep.pkg).unwrap_or_else(|| {
+ panic!("could not find dep `{}` for pkg `{}` in resolve", dep.pkg, pkg_id)
+ })
+ })
+ .collect()
+}
+
+/// Finds a package with the given name.
+fn pkg_from_name<'a>(metadata: &'a Metadata, name: &'static str) -> &'a Package {
+ let mut i = metadata.packages.iter().filter(|p| p.name == name);
+ let result =
+ i.next().unwrap_or_else(|| panic!("could not find package `{name}` in package list"));
+ assert!(i.next().is_none(), "more than one package found for `{name}`");
+ result
+}
+
+/// Finds all the packages that are in the rust runtime.
+fn compute_runtime_crates<'a>(metadata: &'a Metadata) -> HashSet<&'a PackageId> {
+ let resolve = metadata.resolve.as_ref().unwrap();
+ let mut result = HashSet::new();
+ for name in RUNTIME_CRATES {
+ let id = &pkg_from_name(metadata, name).id;
+ normal_deps_of_r(resolve, id, &mut result);
+ }
+ result
+}
+
+/// Recursively find all normal dependencies.
+fn normal_deps_of_r<'a>(
+ resolve: &'a Resolve,
+ pkg_id: &'a PackageId,
+ result: &mut HashSet<&'a PackageId>,
+) {
+ if !result.insert(pkg_id) {
+ return;
+ }
+ let node = resolve
+ .nodes
+ .iter()
+ .find(|n| &n.id == pkg_id)
+ .unwrap_or_else(|| panic!("could not find `{pkg_id}` in resolve"));
+ for dep in &node.deps {
+ normal_deps_of_r(resolve, &dep.pkg, result);
+ }
+}
+
+fn check_rustfix(metadata: &Metadata, bad: &mut bool) {
+ let cargo = pkg_from_name(metadata, "cargo");
+ let compiletest = pkg_from_name(metadata, "compiletest");
+ let cargo_deps = deps_of(metadata, &cargo.id);
+ let compiletest_deps = deps_of(metadata, &compiletest.id);
+ let cargo_rustfix = cargo_deps.iter().find(|p| p.name == "rustfix").unwrap();
+ let compiletest_rustfix = compiletest_deps.iter().find(|p| p.name == "rustfix").unwrap();
+ if cargo_rustfix.version != compiletest_rustfix.version {
+ tidy_error!(
+ bad,
+ "cargo's rustfix version {} does not match compiletest's rustfix version {}\n\
+ rustfix should be kept in sync, update the cargo side first, and then update \
+ compiletest along with cargo.",
+ cargo_rustfix.version,
+ compiletest_rustfix.version
+ );
+ }
+}
diff --git a/src/tools/tidy/src/edition.rs b/src/tools/tidy/src/edition.rs
new file mode 100644
index 000000000..b0abee459
--- /dev/null
+++ b/src/tools/tidy/src/edition.rs
@@ -0,0 +1,31 @@
+//! Tidy check to ensure that crate `edition` is '2018' or '2021'.
+
+use std::path::Path;
+
+fn is_edition_2021(mut line: &str) -> bool {
+ line = line.trim();
+ line == "edition = \"2021\""
+}
+
+pub fn check(path: &Path, bad: &mut bool) {
+ super::walk(
+ path,
+ &mut |path| super::filter_dirs(path) || path.ends_with("src/test"),
+ &mut |entry, contents| {
+ let file = entry.path();
+ let filename = file.file_name().unwrap();
+ if filename != "Cargo.toml" {
+ return;
+ }
+
+ let is_2021 = contents.lines().any(is_edition_2021);
+ if !is_2021 {
+ tidy_error!(
+ bad,
+ "{} doesn't have `edition = \"2021\"` on a separate line",
+ file.display()
+ );
+ }
+ },
+ );
+}
diff --git a/src/tools/tidy/src/error_codes_check.rs b/src/tools/tidy/src/error_codes_check.rs
new file mode 100644
index 000000000..f0054a1c1
--- /dev/null
+++ b/src/tools/tidy/src/error_codes_check.rs
@@ -0,0 +1,319 @@
+//! Checks that all error codes have at least one test to prevent having error
+//! codes that are silently not thrown by the compiler anymore.
+
+use std::collections::{HashMap, HashSet};
+use std::ffi::OsStr;
+use std::fs::read_to_string;
+use std::path::Path;
+
+use regex::Regex;
+
+// A few of those error codes can't be tested but all the others can and *should* be tested!
+const EXEMPTED_FROM_TEST: &[&str] = &[
+ "E0313", "E0377", "E0461", "E0462", "E0465", "E0476", "E0490", "E0514", "E0519", "E0523",
+ "E0554", "E0640", "E0717", "E0729", "E0789",
+];
+
+// Some error codes don't have any tests apparently...
+const IGNORE_EXPLANATION_CHECK: &[&str] = &["E0464", "E0570", "E0601", "E0602", "E0729"];
+
+// If the file path contains any of these, we don't want to try to extract error codes from it.
+//
+// We need to declare each path in the windows version (with backslash).
+const PATHS_TO_IGNORE_FOR_EXTRACTION: &[&str] =
+ &["src/test/", "src\\test\\", "src/doc/", "src\\doc\\", "src/tools/", "src\\tools\\"];
+
+#[derive(Default, Debug)]
+struct ErrorCodeStatus {
+ has_test: bool,
+ has_explanation: bool,
+ is_used: bool,
+}
+
+fn check_error_code_explanation(
+ f: &str,
+ error_codes: &mut HashMap<String, ErrorCodeStatus>,
+ err_code: String,
+) -> bool {
+ let mut invalid_compile_fail_format = false;
+ let mut found_error_code = false;
+
+ for line in f.lines() {
+ let s = line.trim();
+ if s.starts_with("```") {
+ if s.contains("compile_fail") && s.contains('E') {
+ if !found_error_code {
+ error_codes.get_mut(&err_code).map(|x| x.has_test = true);
+ found_error_code = true;
+ }
+ } else if s.contains("compile-fail") {
+ invalid_compile_fail_format = true;
+ }
+ } else if s.starts_with("#### Note: this error code is no longer emitted by the compiler") {
+ if !found_error_code {
+ error_codes.get_mut(&err_code).map(|x| x.has_test = true);
+ found_error_code = true;
+ }
+ }
+ }
+ invalid_compile_fail_format
+}
+
+fn check_if_error_code_is_test_in_explanation(f: &str, err_code: &str) -> bool {
+ let mut ignore_found = false;
+
+ for line in f.lines() {
+ let s = line.trim();
+ if s.starts_with("#### Note: this error code is no longer emitted by the compiler") {
+ return true;
+ }
+ if s.starts_with("```") {
+ if s.contains("compile_fail") && s.contains(err_code) {
+ return true;
+ } else if s.contains("ignore") {
+ // It's very likely that we can't actually make it fail compilation...
+ ignore_found = true;
+ }
+ }
+ }
+ ignore_found
+}
+
+macro_rules! some_or_continue {
+ ($e:expr) => {
+ match $e {
+ Some(e) => e,
+ None => continue,
+ }
+ };
+}
+
+fn extract_error_codes(
+ f: &str,
+ error_codes: &mut HashMap<String, ErrorCodeStatus>,
+ path: &Path,
+ errors: &mut Vec<String>,
+) {
+ let mut reached_no_explanation = false;
+
+ for line in f.lines() {
+ let s = line.trim();
+ if !reached_no_explanation && s.starts_with('E') && s.contains("include_str!(\"") {
+ let err_code = s
+ .split_once(':')
+ .expect(
+ format!(
+ "Expected a line with the format `E0xxx: include_str!(\"..\")`, but got {} \
+ without a `:` delimiter",
+ s,
+ )
+ .as_str(),
+ )
+ .0
+ .to_owned();
+ error_codes.entry(err_code.clone()).or_default().has_explanation = true;
+
+ // Now we extract the tests from the markdown file!
+ let md_file_name = match s.split_once("include_str!(\"") {
+ None => continue,
+ Some((_, md)) => match md.split_once("\")") {
+ None => continue,
+ Some((file_name, _)) => file_name,
+ },
+ };
+ let path = some_or_continue!(path.parent())
+ .join(md_file_name)
+ .canonicalize()
+ .expect("failed to canonicalize error explanation file path");
+ match read_to_string(&path) {
+ Ok(content) => {
+ let has_test = check_if_error_code_is_test_in_explanation(&content, &err_code);
+ if !has_test && !IGNORE_EXPLANATION_CHECK.contains(&err_code.as_str()) {
+ errors.push(format!(
+ "`{}` doesn't use its own error code in compile_fail example",
+ path.display(),
+ ));
+ } else if has_test && IGNORE_EXPLANATION_CHECK.contains(&err_code.as_str()) {
+ errors.push(format!(
+ "`{}` has a compile_fail example with its own error code, it shouldn't \
+ be listed in IGNORE_EXPLANATION_CHECK!",
+ path.display(),
+ ));
+ }
+ if check_error_code_explanation(&content, error_codes, err_code) {
+ errors.push(format!(
+ "`{}` uses invalid tag `compile-fail` instead of `compile_fail`",
+ path.display(),
+ ));
+ }
+ }
+ Err(e) => {
+ eprintln!("Couldn't read `{}`: {}", path.display(), e);
+ }
+ }
+ } else if reached_no_explanation && s.starts_with('E') {
+ let err_code = match s.split_once(',') {
+ None => s,
+ Some((err_code, _)) => err_code,
+ }
+ .to_string();
+ if !error_codes.contains_key(&err_code) {
+ // this check should *never* fail!
+ error_codes.insert(err_code, ErrorCodeStatus::default());
+ }
+ } else if s == ";" {
+ reached_no_explanation = true;
+ }
+ }
+}
+
+fn extract_error_codes_from_tests(f: &str, error_codes: &mut HashMap<String, ErrorCodeStatus>) {
+ for line in f.lines() {
+ let s = line.trim();
+ if s.starts_with("error[E") || s.starts_with("warning[E") {
+ let err_code = match s.split_once(']') {
+ None => continue,
+ Some((err_code, _)) => match err_code.split_once('[') {
+ None => continue,
+ Some((_, err_code)) => err_code,
+ },
+ };
+ error_codes.entry(err_code.to_owned()).or_default().has_test = true;
+ }
+ }
+}
+
+fn extract_error_codes_from_source(
+ f: &str,
+ error_codes: &mut HashMap<String, ErrorCodeStatus>,
+ regex: &Regex,
+) {
+ for line in f.lines() {
+ if line.trim_start().starts_with("//") {
+ continue;
+ }
+ for cap in regex.captures_iter(line) {
+ if let Some(error_code) = cap.get(1) {
+ error_codes.entry(error_code.as_str().to_owned()).or_default().is_used = true;
+ }
+ }
+ }
+}
+
+pub fn check(paths: &[&Path], bad: &mut bool) {
+ let mut errors = Vec::new();
+ let mut found_explanations = 0;
+ let mut found_tests = 0;
+ let mut error_codes: HashMap<String, ErrorCodeStatus> = HashMap::new();
+ let mut explanations: HashSet<String> = HashSet::new();
+ // We want error codes which match the following cases:
+ //
+ // * foo(a, E0111, a)
+ // * foo(a, E0111)
+ // * foo(E0111, a)
+ // * #[error = "E0111"]
+ let regex = Regex::new(r#"[(,"\s](E\d{4})[,)"]"#).unwrap();
+
+ println!("Checking which error codes lack tests...");
+
+ for path in paths {
+ super::walk(path, &mut |path| super::filter_dirs(path), &mut |entry, contents| {
+ let file_name = entry.file_name();
+ let entry_path = entry.path();
+
+ if file_name == "error_codes.rs" {
+ extract_error_codes(contents, &mut error_codes, entry.path(), &mut errors);
+ found_explanations += 1;
+ } else if entry_path.extension() == Some(OsStr::new("stderr")) {
+ extract_error_codes_from_tests(contents, &mut error_codes);
+ found_tests += 1;
+ } else if entry_path.extension() == Some(OsStr::new("rs")) {
+ let path = entry.path().to_string_lossy();
+ if PATHS_TO_IGNORE_FOR_EXTRACTION.iter().all(|c| !path.contains(c)) {
+ extract_error_codes_from_source(contents, &mut error_codes, &regex);
+ }
+ } else if entry_path
+ .parent()
+ .and_then(|p| p.file_name())
+ .map(|p| p == "error_codes")
+ .unwrap_or(false)
+ && entry_path.extension() == Some(OsStr::new("md"))
+ {
+ explanations.insert(file_name.to_str().unwrap().replace(".md", ""));
+ }
+ });
+ }
+ if found_explanations == 0 {
+ eprintln!("No error code explanation was tested!");
+ *bad = true;
+ }
+ if found_tests == 0 {
+ eprintln!("No error code was found in compilation errors!");
+ *bad = true;
+ }
+ if explanations.is_empty() {
+ eprintln!("No error code explanation was found!");
+ *bad = true;
+ }
+ if errors.is_empty() {
+ println!("Found {} error codes", error_codes.len());
+
+ for (err_code, error_status) in &error_codes {
+ if !error_status.has_test && !EXEMPTED_FROM_TEST.contains(&err_code.as_str()) {
+ errors.push(format!("Error code {err_code} needs to have at least one UI test!"));
+ } else if error_status.has_test && EXEMPTED_FROM_TEST.contains(&err_code.as_str()) {
+ errors.push(format!(
+ "Error code {} has a UI test, it shouldn't be listed into EXEMPTED_FROM_TEST!",
+ err_code
+ ));
+ }
+ if !error_status.is_used && !error_status.has_explanation {
+ errors.push(format!(
+ "Error code {} isn't used and doesn't have an error explanation, it should be \
+ commented in error_codes.rs file",
+ err_code
+ ));
+ }
+ }
+ }
+ if errors.is_empty() {
+ // Checking if local constants need to be cleaned.
+ for err_code in EXEMPTED_FROM_TEST {
+ match error_codes.get(err_code.to_owned()) {
+ Some(status) => {
+ if status.has_test {
+ errors.push(format!(
+ "{} error code has a test and therefore should be \
+ removed from the `EXEMPTED_FROM_TEST` constant",
+ err_code
+ ));
+ }
+ }
+ None => errors.push(format!(
+ "{} error code isn't used anymore and therefore should be removed \
+ from `EXEMPTED_FROM_TEST` constant",
+ err_code
+ )),
+ }
+ }
+ }
+ if errors.is_empty() {
+ for explanation in explanations {
+ if !error_codes.contains_key(&explanation) {
+ errors.push(format!(
+ "{} error code explanation should be listed in `error_codes.rs`",
+ explanation
+ ));
+ }
+ }
+ }
+ errors.sort();
+ for err in &errors {
+ eprintln!("{err}");
+ }
+ println!("Found {} error(s) in error codes", errors.len());
+ if !errors.is_empty() {
+ *bad = true;
+ }
+ println!("Done!");
+}
diff --git a/src/tools/tidy/src/errors.rs b/src/tools/tidy/src/errors.rs
new file mode 100644
index 000000000..dbcc9341a
--- /dev/null
+++ b/src/tools/tidy/src/errors.rs
@@ -0,0 +1,76 @@
+//! Tidy check to verify the validity of long error diagnostic codes.
+//!
+//! This ensures that error codes are used at most once and also prints out some
+//! statistics about the error codes.
+
+use std::collections::HashMap;
+use std::path::Path;
+
+pub fn check(path: &Path, bad: &mut bool) {
+ let mut map: HashMap<_, Vec<_>> = HashMap::new();
+ super::walk(
+ path,
+ &mut |path| super::filter_dirs(path) || path.ends_with("src/test"),
+ &mut |entry, contents| {
+ let file = entry.path();
+ let filename = file.file_name().unwrap().to_string_lossy();
+ if filename != "error_codes.rs" {
+ return;
+ }
+
+ // In the `register_long_diagnostics!` macro, entries look like this:
+ //
+ // ```
+ // EXXXX: r##"
+ // <Long diagnostic message>
+ // "##,
+ // ```
+ //
+ // and these long messages often have error codes themselves inside
+ // them, but we don't want to report duplicates in these cases. This
+ // variable keeps track of whether we're currently inside one of these
+ // long diagnostic messages.
+ let mut inside_long_diag = false;
+ for (num, line) in contents.lines().enumerate() {
+ if inside_long_diag {
+ inside_long_diag = !line.contains("\"##");
+ continue;
+ }
+
+ let mut search = line;
+ while let Some(i) = search.find('E') {
+ search = &search[i + 1..];
+ let code = if search.len() > 4 { search[..4].parse::<u32>() } else { continue };
+ let code = match code {
+ Ok(n) => n,
+ Err(..) => continue,
+ };
+ map.entry(code).or_default().push((file.to_owned(), num + 1, line.to_owned()));
+ break;
+ }
+
+ inside_long_diag = line.contains("r##\"");
+ }
+ },
+ );
+
+ let mut max = 0;
+ for (&code, entries) in map.iter() {
+ if code > max {
+ max = code;
+ }
+ if entries.len() == 1 {
+ continue;
+ }
+
+ tidy_error!(bad, "duplicate error code: {}", code);
+ for &(ref file, line_num, ref line) in entries.iter() {
+ tidy_error!(bad, "{}:{}: {}", file.display(), line_num, line);
+ }
+ }
+
+ if !*bad {
+ println!("* {} error codes", map.len());
+ println!("* highest error code: E{:04}", max);
+ }
+}
diff --git a/src/tools/tidy/src/extdeps.rs b/src/tools/tidy/src/extdeps.rs
new file mode 100644
index 000000000..aad57cacb
--- /dev/null
+++ b/src/tools/tidy/src/extdeps.rs
@@ -0,0 +1,33 @@
+//! Check for external package sources. Allow only vendorable packages.
+
+use std::fs;
+use std::path::Path;
+
+/// List of allowed sources for packages.
+const ALLOWED_SOURCES: &[&str] = &["\"registry+https://github.com/rust-lang/crates.io-index\""];
+
+/// Checks for external package sources. `root` is the path to the directory that contains the
+/// workspace `Cargo.toml`.
+pub fn check(root: &Path, bad: &mut bool) {
+ // `Cargo.lock` of rust.
+ let path = root.join("Cargo.lock");
+
+ // Open and read the whole file.
+ let cargo_lock = t!(fs::read_to_string(&path));
+
+ // Process each line.
+ for line in cargo_lock.lines() {
+ // Consider only source entries.
+ if !line.starts_with("source = ") {
+ continue;
+ }
+
+ // Extract source value.
+ let source = line.split_once('=').unwrap().1.trim();
+
+ // Ensure source is allowed.
+ if !ALLOWED_SOURCES.contains(&&*source) {
+ tidy_error!(bad, "invalid source: {}", source);
+ }
+ }
+}
diff --git a/src/tools/tidy/src/features.rs b/src/tools/tidy/src/features.rs
new file mode 100644
index 000000000..2f22c081a
--- /dev/null
+++ b/src/tools/tidy/src/features.rs
@@ -0,0 +1,550 @@
+//! Tidy check to ensure that unstable features are all in order.
+//!
+//! This check will ensure properties like:
+//!
+//! * All stability attributes look reasonably well formed.
+//! * The set of library features is disjoint from the set of language features.
+//! * Library features have at most one stability level.
+//! * Library features have at most one `since` value.
+//! * All unstable lang features have tests to ensure they are actually unstable.
+//! * Language features in a group are sorted by feature name.
+
+use std::collections::HashMap;
+use std::fmt;
+use std::fs;
+use std::num::NonZeroU32;
+use std::path::Path;
+
+use regex::Regex;
+
+#[cfg(test)]
+mod tests;
+
+mod version;
+use version::Version;
+
+const FEATURE_GROUP_START_PREFIX: &str = "// feature-group-start";
+const FEATURE_GROUP_END_PREFIX: &str = "// feature-group-end";
+
+#[derive(Debug, PartialEq, Clone)]
+pub enum Status {
+ Stable,
+ Removed,
+ Unstable,
+}
+
+impl fmt::Display for Status {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let as_str = match *self {
+ Status::Stable => "stable",
+ Status::Unstable => "unstable",
+ Status::Removed => "removed",
+ };
+ fmt::Display::fmt(as_str, f)
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct Feature {
+ pub level: Status,
+ pub since: Option<Version>,
+ pub has_gate_test: bool,
+ pub tracking_issue: Option<NonZeroU32>,
+}
+impl Feature {
+ fn tracking_issue_display(&self) -> impl fmt::Display {
+ match self.tracking_issue {
+ None => "none".to_string(),
+ Some(x) => x.to_string(),
+ }
+ }
+}
+
+pub type Features = HashMap<String, Feature>;
+
+pub struct CollectedFeatures {
+ pub lib: Features,
+ pub lang: Features,
+}
+
+// Currently only used for unstable book generation
+pub fn collect_lib_features(base_src_path: &Path) -> Features {
+ let mut lib_features = Features::new();
+
+ map_lib_features(base_src_path, &mut |res, _, _| {
+ if let Ok((name, feature)) = res {
+ lib_features.insert(name.to_owned(), feature);
+ }
+ });
+ lib_features
+}
+
+pub fn check(
+ src_path: &Path,
+ compiler_path: &Path,
+ lib_path: &Path,
+ bad: &mut bool,
+ verbose: bool,
+) -> CollectedFeatures {
+ let mut features = collect_lang_features(compiler_path, bad);
+ assert!(!features.is_empty());
+
+ let lib_features = get_and_check_lib_features(lib_path, bad, &features);
+ assert!(!lib_features.is_empty());
+
+ super::walk_many(
+ &[
+ &src_path.join("test/ui"),
+ &src_path.join("test/ui-fulldeps"),
+ &src_path.join("test/rustdoc-ui"),
+ &src_path.join("test/rustdoc"),
+ ],
+ &mut |path| super::filter_dirs(path),
+ &mut |entry, contents| {
+ let file = entry.path();
+ let filename = file.file_name().unwrap().to_string_lossy();
+ if !filename.ends_with(".rs")
+ || filename == "features.rs"
+ || filename == "diagnostic_list.rs"
+ {
+ return;
+ }
+
+ let filen_underscore = filename.replace('-', "_").replace(".rs", "");
+ let filename_is_gate_test = test_filen_gate(&filen_underscore, &mut features);
+
+ for (i, line) in contents.lines().enumerate() {
+ let mut err = |msg: &str| {
+ tidy_error!(bad, "{}:{}: {}", file.display(), i + 1, msg);
+ };
+
+ let gate_test_str = "gate-test-";
+
+ let feature_name = match line.find(gate_test_str) {
+ // NB: the `splitn` always succeeds, even if the delimiter is not present.
+ Some(i) => line[i + gate_test_str.len()..].splitn(2, ' ').next().unwrap(),
+ None => continue,
+ };
+ match features.get_mut(feature_name) {
+ Some(f) => {
+ if filename_is_gate_test {
+ err(&format!(
+ "The file is already marked as gate test \
+ through its name, no need for a \
+ 'gate-test-{}' comment",
+ feature_name
+ ));
+ }
+ f.has_gate_test = true;
+ }
+ None => {
+ err(&format!(
+ "gate-test test found referencing a nonexistent feature '{}'",
+ feature_name
+ ));
+ }
+ }
+ }
+ },
+ );
+
+ // Only check the number of lang features.
+ // Obligatory testing for library features is dumb.
+ let gate_untested = features
+ .iter()
+ .filter(|&(_, f)| f.level == Status::Unstable)
+ .filter(|&(_, f)| !f.has_gate_test)
+ .collect::<Vec<_>>();
+
+ for &(name, _) in gate_untested.iter() {
+ println!("Expected a gate test for the feature '{name}'.");
+ println!(
+ "Hint: create a failing test file named 'feature-gate-{}.rs'\
+ \n in the 'ui' test suite, with its failures due to\
+ \n missing usage of `#![feature({})]`.",
+ name, name
+ );
+ println!(
+ "Hint: If you already have such a test and don't want to rename it,\
+ \n you can also add a // gate-test-{} line to the test file.",
+ name
+ );
+ }
+
+ if !gate_untested.is_empty() {
+ tidy_error!(bad, "Found {} features without a gate test.", gate_untested.len());
+ }
+
+ if *bad {
+ return CollectedFeatures { lib: lib_features, lang: features };
+ }
+
+ if verbose {
+ let mut lines = Vec::new();
+ lines.extend(format_features(&features, "lang"));
+ lines.extend(format_features(&lib_features, "lib"));
+
+ lines.sort();
+ for line in lines {
+ println!("* {line}");
+ }
+ } else {
+ println!("* {} features", features.len());
+ }
+
+ CollectedFeatures { lib: lib_features, lang: features }
+}
+
+fn format_features<'a>(
+ features: &'a Features,
+ family: &'a str,
+) -> impl Iterator<Item = String> + 'a {
+ features.iter().map(move |(name, feature)| {
+ format!(
+ "{:<32} {:<8} {:<12} {:<8}",
+ name,
+ family,
+ feature.level,
+ feature.since.map_or("None".to_owned(), |since| since.to_string())
+ )
+ })
+}
+
+fn find_attr_val<'a>(line: &'a str, attr: &str) -> Option<&'a str> {
+ lazy_static::lazy_static! {
+ static ref ISSUE: Regex = Regex::new(r#"issue\s*=\s*"([^"]*)""#).unwrap();
+ static ref FEATURE: Regex = Regex::new(r#"feature\s*=\s*"([^"]*)""#).unwrap();
+ static ref SINCE: Regex = Regex::new(r#"since\s*=\s*"([^"]*)""#).unwrap();
+ }
+
+ let r = match attr {
+ "issue" => &*ISSUE,
+ "feature" => &*FEATURE,
+ "since" => &*SINCE,
+ _ => unimplemented!("{attr} not handled"),
+ };
+
+ r.captures(line).and_then(|c| c.get(1)).map(|m| m.as_str())
+}
+
+fn test_filen_gate(filen_underscore: &str, features: &mut Features) -> bool {
+ let prefix = "feature_gate_";
+ if filen_underscore.starts_with(prefix) {
+ for (n, f) in features.iter_mut() {
+ // Equivalent to filen_underscore == format!("feature_gate_{n}")
+ if &filen_underscore[prefix.len()..] == n {
+ f.has_gate_test = true;
+ return true;
+ }
+ }
+ }
+ false
+}
+
+pub fn collect_lang_features(base_compiler_path: &Path, bad: &mut bool) -> Features {
+ let mut all = collect_lang_features_in(base_compiler_path, "active.rs", bad);
+ all.extend(collect_lang_features_in(base_compiler_path, "accepted.rs", bad));
+ all.extend(collect_lang_features_in(base_compiler_path, "removed.rs", bad));
+ all
+}
+
+fn collect_lang_features_in(base: &Path, file: &str, bad: &mut bool) -> Features {
+ let path = base.join("rustc_feature").join("src").join(file);
+ let contents = t!(fs::read_to_string(&path));
+
+ // We allow rustc-internal features to omit a tracking issue.
+ // To make tidy accept omitting a tracking issue, group the list of features
+ // without one inside `// no-tracking-issue` and `// no-tracking-issue-end`.
+ let mut next_feature_omits_tracking_issue = false;
+
+ let mut in_feature_group = false;
+ let mut prev_names = vec![];
+
+ contents
+ .lines()
+ .zip(1..)
+ .filter_map(|(line, line_number)| {
+ let line = line.trim();
+
+ // Within -start and -end, the tracking issue can be omitted.
+ match line {
+ "// no-tracking-issue-start" => {
+ next_feature_omits_tracking_issue = true;
+ return None;
+ }
+ "// no-tracking-issue-end" => {
+ next_feature_omits_tracking_issue = false;
+ return None;
+ }
+ _ => {}
+ }
+
+ if line.starts_with(FEATURE_GROUP_START_PREFIX) {
+ if in_feature_group {
+ tidy_error!(
+ bad,
+ "{}:{}: \
+ new feature group is started without ending the previous one",
+ path.display(),
+ line_number,
+ );
+ }
+
+ in_feature_group = true;
+ prev_names = vec![];
+ return None;
+ } else if line.starts_with(FEATURE_GROUP_END_PREFIX) {
+ in_feature_group = false;
+ prev_names = vec![];
+ return None;
+ }
+
+ let mut parts = line.split(',');
+ let level = match parts.next().map(|l| l.trim().trim_start_matches('(')) {
+ Some("active") => Status::Unstable,
+ Some("incomplete") => Status::Unstable,
+ Some("removed") => Status::Removed,
+ Some("accepted") => Status::Stable,
+ _ => return None,
+ };
+ let name = parts.next().unwrap().trim();
+
+ let since_str = parts.next().unwrap().trim().trim_matches('"');
+ let since = match since_str.parse() {
+ Ok(since) => Some(since),
+ Err(err) => {
+ tidy_error!(
+ bad,
+ "{}:{}: failed to parse since: {} ({:?})",
+ path.display(),
+ line_number,
+ since_str,
+ err,
+ );
+ None
+ }
+ };
+ if in_feature_group {
+ if prev_names.last() > Some(&name) {
+ // This assumes the user adds the feature name at the end of the list, as we're
+ // not looking ahead.
+ let correct_index = match prev_names.binary_search(&name) {
+ Ok(_) => {
+ // This only occurs when the feature name has already been declared.
+ tidy_error!(
+ bad,
+ "{}:{}: duplicate feature {}",
+ path.display(),
+ line_number,
+ name,
+ );
+ // skip any additional checks for this line
+ return None;
+ }
+ Err(index) => index,
+ };
+
+ let correct_placement = if correct_index == 0 {
+ "at the beginning of the feature group".to_owned()
+ } else if correct_index == prev_names.len() {
+ // I don't believe this is reachable given the above assumption, but it
+ // doesn't hurt to be safe.
+ "at the end of the feature group".to_owned()
+ } else {
+ format!(
+ "between {} and {}",
+ prev_names[correct_index - 1],
+ prev_names[correct_index],
+ )
+ };
+
+ tidy_error!(
+ bad,
+ "{}:{}: feature {} is not sorted by feature name (should be {})",
+ path.display(),
+ line_number,
+ name,
+ correct_placement,
+ );
+ }
+ prev_names.push(name);
+ }
+
+ let issue_str = parts.next().unwrap().trim();
+ let tracking_issue = if issue_str.starts_with("None") {
+ if level == Status::Unstable && !next_feature_omits_tracking_issue {
+ tidy_error!(
+ bad,
+ "{}:{}: no tracking issue for feature {}",
+ path.display(),
+ line_number,
+ name,
+ );
+ }
+ None
+ } else {
+ let s = issue_str.split('(').nth(1).unwrap().split(')').next().unwrap();
+ Some(s.parse().unwrap())
+ };
+ Some((name.to_owned(), Feature { level, since, has_gate_test: false, tracking_issue }))
+ })
+ .collect()
+}
+
+fn get_and_check_lib_features(
+ base_src_path: &Path,
+ bad: &mut bool,
+ lang_features: &Features,
+) -> Features {
+ let mut lib_features = Features::new();
+ map_lib_features(base_src_path, &mut |res, file, line| match res {
+ Ok((name, f)) => {
+ let mut check_features = |f: &Feature, list: &Features, display: &str| {
+ if let Some(ref s) = list.get(name) {
+ if f.tracking_issue != s.tracking_issue && f.level != Status::Stable {
+ tidy_error!(
+ bad,
+ "{}:{}: `issue` \"{}\" mismatches the {} `issue` of \"{}\"",
+ file.display(),
+ line,
+ f.tracking_issue_display(),
+ display,
+ s.tracking_issue_display(),
+ );
+ }
+ }
+ };
+ check_features(&f, &lang_features, "corresponding lang feature");
+ check_features(&f, &lib_features, "previous");
+ lib_features.insert(name.to_owned(), f);
+ }
+ Err(msg) => {
+ tidy_error!(bad, "{}:{}: {}", file.display(), line, msg);
+ }
+ });
+ lib_features
+}
+
+fn map_lib_features(
+ base_src_path: &Path,
+ mf: &mut dyn FnMut(Result<(&str, Feature), &str>, &Path, usize),
+) {
+ super::walk(
+ base_src_path,
+ &mut |path| super::filter_dirs(path) || path.ends_with("src/test"),
+ &mut |entry, contents| {
+ let file = entry.path();
+ let filename = file.file_name().unwrap().to_string_lossy();
+ if !filename.ends_with(".rs")
+ || filename == "features.rs"
+ || filename == "diagnostic_list.rs"
+ || filename == "error_codes.rs"
+ {
+ return;
+ }
+
+ // This is an early exit -- all the attributes we're concerned with must contain this:
+ // * rustc_const_unstable(
+ // * unstable(
+ // * stable(
+ if !contents.contains("stable(") {
+ return;
+ }
+
+ let handle_issue_none = |s| match s {
+ "none" => None,
+ issue => {
+ let n = issue.parse().expect("issue number is not a valid integer");
+ assert_ne!(n, 0, "\"none\" should be used when there is no issue, not \"0\"");
+ NonZeroU32::new(n)
+ }
+ };
+ let mut becoming_feature: Option<(&str, Feature)> = None;
+ let mut iter_lines = contents.lines().enumerate().peekable();
+ while let Some((i, line)) = iter_lines.next() {
+ macro_rules! err {
+ ($msg:expr) => {{
+ mf(Err($msg), file, i + 1);
+ continue;
+ }};
+ }
+
+ lazy_static::lazy_static! {
+ static ref COMMENT_LINE: Regex = Regex::new(r"^\s*//").unwrap();
+ }
+ // exclude commented out lines
+ if COMMENT_LINE.is_match(line) {
+ continue;
+ }
+
+ if let Some((ref name, ref mut f)) = becoming_feature {
+ if f.tracking_issue.is_none() {
+ f.tracking_issue = find_attr_val(line, "issue").and_then(handle_issue_none);
+ }
+ if line.ends_with(']') {
+ mf(Ok((name, f.clone())), file, i + 1);
+ } else if !line.ends_with(',') && !line.ends_with('\\') && !line.ends_with('"')
+ {
+ // We need to bail here because we might have missed the
+ // end of a stability attribute above because the ']'
+ // might not have been at the end of the line.
+ // We could then get into the very unfortunate situation that
+ // we continue parsing the file assuming the current stability
+ // attribute has not ended, and ignoring possible feature
+ // attributes in the process.
+ err!("malformed stability attribute");
+ } else {
+ continue;
+ }
+ }
+ becoming_feature = None;
+ if line.contains("rustc_const_unstable(") {
+ // `const fn` features are handled specially.
+ let feature_name = match find_attr_val(line, "feature") {
+ Some(name) => name,
+ None => err!("malformed stability attribute: missing `feature` key"),
+ };
+ let feature = Feature {
+ level: Status::Unstable,
+ since: None,
+ has_gate_test: false,
+ tracking_issue: find_attr_val(line, "issue").and_then(handle_issue_none),
+ };
+ mf(Ok((feature_name, feature)), file, i + 1);
+ continue;
+ }
+ let level = if line.contains("[unstable(") {
+ Status::Unstable
+ } else if line.contains("[stable(") {
+ Status::Stable
+ } else {
+ continue;
+ };
+ let feature_name = match find_attr_val(line, "feature")
+ .or_else(|| iter_lines.peek().and_then(|next| find_attr_val(next.1, "feature")))
+ {
+ Some(name) => name,
+ None => err!("malformed stability attribute: missing `feature` key"),
+ };
+ let since = match find_attr_val(line, "since").map(|x| x.parse()) {
+ Some(Ok(since)) => Some(since),
+ Some(Err(_err)) => {
+ err!("malformed stability attribute: can't parse `since` key");
+ }
+ None if level == Status::Stable => {
+ err!("malformed stability attribute: missing the `since` key");
+ }
+ None => None,
+ };
+ let tracking_issue = find_attr_val(line, "issue").and_then(handle_issue_none);
+
+ let feature = Feature { level, since, has_gate_test: false, tracking_issue };
+ if line.contains(']') {
+ mf(Ok((feature_name, feature)), file, i + 1);
+ } else {
+ becoming_feature = Some((feature_name, feature));
+ }
+ }
+ },
+ );
+}
diff --git a/src/tools/tidy/src/features/tests.rs b/src/tools/tidy/src/features/tests.rs
new file mode 100644
index 000000000..994523ac1
--- /dev/null
+++ b/src/tools/tidy/src/features/tests.rs
@@ -0,0 +1,9 @@
+use super::*;
+
+#[test]
+fn test_find_attr_val() {
+ let s = r#"#[unstable(feature = "tidy_test_never_used_anywhere_else", issue = "58402")]"#;
+ assert_eq!(find_attr_val(s, "feature"), Some("tidy_test_never_used_anywhere_else"));
+ assert_eq!(find_attr_val(s, "issue"), Some("58402"));
+ assert_eq!(find_attr_val(s, "since"), None);
+}
diff --git a/src/tools/tidy/src/features/version.rs b/src/tools/tidy/src/features/version.rs
new file mode 100644
index 000000000..620be2f98
--- /dev/null
+++ b/src/tools/tidy/src/features/version.rs
@@ -0,0 +1,48 @@
+use std::fmt;
+use std::num::ParseIntError;
+use std::str::FromStr;
+
+#[cfg(test)]
+mod tests;
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
+pub struct Version {
+ parts: [u32; 3],
+}
+
+impl fmt::Display for Version {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.pad(&format!("{}.{}.{}", self.parts[0], self.parts[1], self.parts[2]))
+ }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub enum ParseVersionError {
+ ParseIntError(ParseIntError),
+ WrongNumberOfParts,
+}
+
+impl From<ParseIntError> for ParseVersionError {
+ fn from(err: ParseIntError) -> Self {
+ ParseVersionError::ParseIntError(err)
+ }
+}
+
+impl FromStr for Version {
+ type Err = ParseVersionError;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ let mut iter = s.split('.').map(|part| Ok(part.parse()?));
+
+ let mut part = || iter.next().unwrap_or(Err(ParseVersionError::WrongNumberOfParts));
+
+ let parts = [part()?, part()?, part()?];
+
+ if iter.next().is_some() {
+ // Ensure we don't have more than 3 parts.
+ return Err(ParseVersionError::WrongNumberOfParts);
+ }
+
+ Ok(Self { parts })
+ }
+}
diff --git a/src/tools/tidy/src/features/version/tests.rs b/src/tools/tidy/src/features/version/tests.rs
new file mode 100644
index 000000000..31224fdf1
--- /dev/null
+++ b/src/tools/tidy/src/features/version/tests.rs
@@ -0,0 +1,38 @@
+use super::*;
+
+#[test]
+fn test_try_from_invalid_version() {
+ assert!("".parse::<Version>().is_err());
+ assert!("hello".parse::<Version>().is_err());
+ assert!("1.32.hi".parse::<Version>().is_err());
+ assert!("1.32..1".parse::<Version>().is_err());
+ assert!("1.32".parse::<Version>().is_err());
+ assert!("1.32.0.1".parse::<Version>().is_err());
+}
+
+#[test]
+fn test_try_from_single() {
+ assert_eq!("1.32.0".parse(), Ok(Version { parts: [1, 32, 0] }));
+ assert_eq!("1.0.0".parse(), Ok(Version { parts: [1, 0, 0] }));
+}
+
+#[test]
+fn test_compare() {
+ let v_1_0_0 = "1.0.0".parse::<Version>().unwrap();
+ let v_1_32_0 = "1.32.0".parse::<Version>().unwrap();
+ let v_1_32_1 = "1.32.1".parse::<Version>().unwrap();
+ assert!(v_1_0_0 < v_1_32_1);
+ assert!(v_1_0_0 < v_1_32_0);
+ assert!(v_1_32_0 < v_1_32_1);
+}
+
+#[test]
+fn test_to_string() {
+ let v_1_0_0 = "1.0.0".parse::<Version>().unwrap();
+ let v_1_32_1 = "1.32.1".parse::<Version>().unwrap();
+
+ assert_eq!(v_1_0_0.to_string(), "1.0.0");
+ assert_eq!(v_1_32_1.to_string(), "1.32.1");
+ assert_eq!(format!("{:<8}", v_1_32_1), "1.32.1 ");
+ assert_eq!(format!("{:>8}", v_1_32_1), " 1.32.1");
+}
diff --git a/src/tools/tidy/src/lib.rs b/src/tools/tidy/src/lib.rs
new file mode 100644
index 000000000..09848462a
--- /dev/null
+++ b/src/tools/tidy/src/lib.rs
@@ -0,0 +1,110 @@
+//! Library used by tidy and other tools.
+//!
+//! This library contains the tidy lints and exposes it
+//! to be used by tools.
+
+use std::fs::File;
+use std::io::Read;
+use walkdir::{DirEntry, WalkDir};
+
+use std::path::Path;
+
+macro_rules! t {
+ ($e:expr, $p:expr) => {
+ match $e {
+ Ok(e) => e,
+ Err(e) => panic!("{} failed on {} with {}", stringify!($e), ($p).display(), e),
+ }
+ };
+
+ ($e:expr) => {
+ match $e {
+ Ok(e) => e,
+ Err(e) => panic!("{} failed with {}", stringify!($e), e),
+ }
+ };
+}
+
+macro_rules! tidy_error {
+ ($bad:expr, $fmt:expr) => ({
+ *$bad = true;
+ eprintln!("tidy error: {}", $fmt);
+ });
+ ($bad:expr, $fmt:expr, $($arg:tt)*) => ({
+ *$bad = true;
+ eprint!("tidy error: ");
+ eprintln!($fmt, $($arg)*);
+ });
+}
+
+pub mod bins;
+pub mod debug_artifacts;
+pub mod deps;
+pub mod edition;
+pub mod error_codes_check;
+pub mod errors;
+pub mod extdeps;
+pub mod features;
+pub mod pal;
+pub mod primitive_docs;
+pub mod style;
+pub mod target_specific_tests;
+pub mod ui_tests;
+pub mod unit_tests;
+pub mod unstable_book;
+
+fn filter_dirs(path: &Path) -> bool {
+ let skip = [
+ "tidy-test-file",
+ "compiler/rustc_codegen_cranelift",
+ "compiler/rustc_codegen_gcc",
+ "src/llvm-project",
+ "library/backtrace",
+ "library/portable-simd",
+ "library/stdarch",
+ "src/tools/cargo",
+ "src/tools/clippy",
+ "src/tools/miri",
+ "src/tools/rls",
+ "src/tools/rust-analyzer",
+ "src/tools/rust-installer",
+ "src/tools/rustfmt",
+ "src/doc/book",
+ // Filter RLS output directories
+ "target/rls",
+ ];
+ skip.iter().any(|p| path.ends_with(p))
+}
+
+fn walk_many(
+ paths: &[&Path],
+ skip: &mut dyn FnMut(&Path) -> bool,
+ f: &mut dyn FnMut(&DirEntry, &str),
+) {
+ for path in paths {
+ walk(path, skip, f);
+ }
+}
+
+fn walk(path: &Path, skip: &mut dyn FnMut(&Path) -> bool, f: &mut dyn FnMut(&DirEntry, &str)) {
+ let mut contents = String::new();
+ walk_no_read(path, skip, &mut |entry| {
+ contents.clear();
+ if t!(File::open(entry.path()), entry.path()).read_to_string(&mut contents).is_err() {
+ contents.clear();
+ }
+ f(&entry, &contents);
+ });
+}
+
+fn walk_no_read(path: &Path, skip: &mut dyn FnMut(&Path) -> bool, f: &mut dyn FnMut(&DirEntry)) {
+ let walker = WalkDir::new(path).into_iter().filter_entry(|e| !skip(e.path()));
+ for entry in walker {
+ if let Ok(entry) = entry {
+ if entry.file_type().is_dir() {
+ continue;
+ }
+ f(&entry);
+ }
+ }
+}
diff --git a/src/tools/tidy/src/main.rs b/src/tools/tidy/src/main.rs
new file mode 100644
index 000000000..aa8d8b4f6
--- /dev/null
+++ b/src/tools/tidy/src/main.rs
@@ -0,0 +1,112 @@
+//! Tidy checks source code in this repository.
+//!
+//! This program runs all of the various tidy checks for style, cleanliness,
+//! etc. This is run by default on `./x.py test` and as part of the auto
+//! builders. The tidy checks can be executed with `./x.py test tidy`.
+
+use tidy::*;
+
+use crossbeam_utils::thread::{scope, ScopedJoinHandle};
+use std::collections::VecDeque;
+use std::env;
+use std::num::NonZeroUsize;
+use std::path::PathBuf;
+use std::process;
+use std::str::FromStr;
+use std::sync::atomic::{AtomicBool, Ordering};
+
+fn main() {
+ let root_path: PathBuf = env::args_os().nth(1).expect("need path to root of repo").into();
+ let cargo: PathBuf = env::args_os().nth(2).expect("need path to cargo").into();
+ let output_directory: PathBuf =
+ env::args_os().nth(3).expect("need path to output directory").into();
+ let concurrency: NonZeroUsize =
+ FromStr::from_str(&env::args().nth(4).expect("need concurrency"))
+ .expect("concurrency must be a number");
+
+ let src_path = root_path.join("src");
+ let library_path = root_path.join("library");
+ let compiler_path = root_path.join("compiler");
+
+ let args: Vec<String> = env::args().skip(1).collect();
+
+ let verbose = args.iter().any(|s| *s == "--verbose");
+
+ let bad = std::sync::Arc::new(AtomicBool::new(false));
+
+ scope(|s| {
+ let mut handles: VecDeque<ScopedJoinHandle<'_, ()>> =
+ VecDeque::with_capacity(concurrency.get());
+
+ macro_rules! check {
+ ($p:ident $(, $args:expr)* ) => {
+ while handles.len() >= concurrency.get() {
+ handles.pop_front().unwrap().join().unwrap();
+ }
+
+ let handle = s.spawn(|_| {
+ let mut flag = false;
+ $p::check($($args),* , &mut flag);
+ if (flag) {
+ bad.store(true, Ordering::Relaxed);
+ }
+ });
+ handles.push_back(handle);
+ }
+ }
+
+ check!(target_specific_tests, &src_path);
+
+ // Checks that are done on the cargo workspace.
+ check!(deps, &root_path, &cargo);
+ check!(extdeps, &root_path);
+
+ // Checks over tests.
+ check!(debug_artifacts, &src_path);
+ check!(ui_tests, &src_path);
+
+ // Checks that only make sense for the compiler.
+ check!(errors, &compiler_path);
+ check!(error_codes_check, &[&src_path, &compiler_path]);
+
+ // Checks that only make sense for the std libs.
+ check!(pal, &library_path);
+ check!(primitive_docs, &library_path);
+
+ // Checks that need to be done for both the compiler and std libraries.
+ check!(unit_tests, &src_path);
+ check!(unit_tests, &compiler_path);
+ check!(unit_tests, &library_path);
+
+ if bins::check_filesystem_support(&[&root_path], &output_directory) {
+ check!(bins, &root_path);
+ }
+
+ check!(style, &src_path);
+ check!(style, &compiler_path);
+ check!(style, &library_path);
+
+ check!(edition, &src_path);
+ check!(edition, &compiler_path);
+ check!(edition, &library_path);
+
+ let collected = {
+ while handles.len() >= concurrency.get() {
+ handles.pop_front().unwrap().join().unwrap();
+ }
+ let mut flag = false;
+ let r = features::check(&src_path, &compiler_path, &library_path, &mut flag, verbose);
+ if flag {
+ bad.store(true, Ordering::Relaxed);
+ }
+ r
+ };
+ check!(unstable_book, &src_path, collected);
+ })
+ .unwrap();
+
+ if bad.load(Ordering::Relaxed) {
+ eprintln!("some tidy checks failed");
+ process::exit(1);
+ }
+}
diff --git a/src/tools/tidy/src/pal.rs b/src/tools/tidy/src/pal.rs
new file mode 100644
index 000000000..c5414233f
--- /dev/null
+++ b/src/tools/tidy/src/pal.rs
@@ -0,0 +1,209 @@
+//! Tidy check to enforce rules about platform-specific code in std.
+//!
+//! This is intended to maintain existing standards of code
+//! organization in hopes that the standard library will continue to
+//! be refactored to isolate platform-specific bits, making porting
+//! easier; where "standard library" roughly means "all the
+//! dependencies of the std and test crates".
+//!
+//! This generally means placing restrictions on where `cfg(unix)`,
+//! `cfg(windows)`, `cfg(target_os)` and `cfg(target_env)` may appear,
+//! the basic objective being to isolate platform-specific code to the
+//! platform-specific `std::sys` modules, and to the allocation,
+//! unwinding, and libc crates.
+//!
+//! Following are the basic rules, though there are currently
+//! exceptions:
+//!
+//! - core may not have platform-specific code.
+//! - libpanic_abort may have platform-specific code.
+//! - libpanic_unwind may have platform-specific code.
+//! - libunwind may have platform-specific code.
+//! - other crates in the std facade may not.
+//! - std may have platform-specific code in the following places:
+//! - `sys/`
+//! - `os/`
+//!
+//! `std/sys_common` should _not_ contain platform-specific code.
+//! Finally, because std contains tests with platform-specific
+//! `ignore` attributes, once the parser encounters `mod tests`,
+//! platform-specific cfgs are allowed. Not sure yet how to deal with
+//! this in the long term.
+
+use std::iter::Iterator;
+use std::path::Path;
+
+// Paths that may contain platform-specific code.
+const EXCEPTION_PATHS: &[&str] = &[
+ "library/panic_abort",
+ "library/panic_unwind",
+ "library/unwind",
+ "library/rtstartup", // Not sure what to do about this. magic stuff for mingw
+ "library/term", // Not sure how to make this crate portable, but test crate needs it.
+ "library/test", // Probably should defer to unstable `std::sys` APIs.
+ // The `VaList` implementation must have platform specific code.
+ // The Windows implementation of a `va_list` is always a character
+ // pointer regardless of the target architecture. As a result,
+ // we must use `#[cfg(windows)]` to conditionally compile the
+ // correct `VaList` structure for windows.
+ "library/core/src/ffi/mod.rs",
+ "library/std/src/sys/", // Platform-specific code for std lives here.
+ "library/std/src/os", // Platform-specific public interfaces
+ // Temporary `std` exceptions
+ // FIXME: platform-specific code should be moved to `sys`
+ "library/std/src/io/copy.rs",
+ "library/std/src/io/stdio.rs",
+ "library/std/src/f32.rs",
+ "library/std/src/f64.rs",
+ "library/std/src/path.rs",
+ "library/std/src/sys_common", // Should only contain abstractions over platforms
+ "library/std/src/net/test.rs", // Utility helpers for tests
+ "library/std/src/panic.rs", // fuchsia-specific panic backtrace handling
+];
+
+pub fn check(path: &Path, bad: &mut bool) {
+ // Sanity check that the complex parsing here works.
+ let mut saw_target_arch = false;
+ let mut saw_cfg_bang = false;
+ super::walk(path, &mut super::filter_dirs, &mut |entry, contents| {
+ let file = entry.path();
+ let filestr = file.to_string_lossy().replace("\\", "/");
+ if !filestr.ends_with(".rs") {
+ return;
+ }
+
+ let is_exception_path = EXCEPTION_PATHS.iter().any(|s| filestr.contains(&**s));
+ if is_exception_path {
+ return;
+ }
+
+ // exclude tests and benchmarks as some platforms do not support all tests
+ if filestr.contains("tests") || filestr.contains("benches") {
+ return;
+ }
+
+ check_cfgs(contents, &file, bad, &mut saw_target_arch, &mut saw_cfg_bang);
+ });
+
+ assert!(saw_target_arch);
+ assert!(saw_cfg_bang);
+}
+
+fn check_cfgs(
+ contents: &str,
+ file: &Path,
+ bad: &mut bool,
+ saw_target_arch: &mut bool,
+ saw_cfg_bang: &mut bool,
+) {
+ // Pull out all `cfg(...)` and `cfg!(...)` strings.
+ let cfgs = parse_cfgs(contents);
+
+ let mut line_numbers: Option<Vec<usize>> = None;
+ let mut err = |idx: usize, cfg: &str| {
+ if line_numbers.is_none() {
+ line_numbers = Some(contents.match_indices('\n').map(|(i, _)| i).collect());
+ }
+ let line_numbers = line_numbers.as_ref().expect("");
+ let line = match line_numbers.binary_search(&idx) {
+ Ok(_) => unreachable!(),
+ Err(i) => i + 1,
+ };
+ tidy_error!(bad, "{}:{}: platform-specific cfg: {}", file.display(), line, cfg);
+ };
+
+ for (idx, cfg) in cfgs {
+ // Sanity check that the parsing here works.
+ if !*saw_target_arch && cfg.contains("target_arch") {
+ *saw_target_arch = true
+ }
+ if !*saw_cfg_bang && cfg.contains("cfg!") {
+ *saw_cfg_bang = true
+ }
+
+ let contains_platform_specific_cfg = cfg.contains("target_os")
+ || cfg.contains("target_env")
+ || cfg.contains("target_abi")
+ || cfg.contains("target_vendor")
+ || cfg.contains("unix")
+ || cfg.contains("windows");
+
+ if !contains_platform_specific_cfg {
+ continue;
+ }
+
+ let preceded_by_doc_comment = {
+ let pre_contents = &contents[..idx];
+ let pre_newline = pre_contents.rfind('\n');
+ let pre_doc_comment = pre_contents.rfind("///");
+ match (pre_newline, pre_doc_comment) {
+ (Some(n), Some(c)) => n < c,
+ (None, Some(_)) => true,
+ (_, None) => false,
+ }
+ };
+
+ if preceded_by_doc_comment {
+ continue;
+ }
+
+ // exclude tests as some platforms do not support all tests
+ if cfg.contains("test") {
+ continue;
+ }
+
+ err(idx, cfg);
+ }
+}
+
+fn parse_cfgs(contents: &str) -> Vec<(usize, &str)> {
+ let candidate_cfgs = contents.match_indices("cfg");
+ let candidate_cfg_idxs = candidate_cfgs.map(|(i, _)| i);
+ // This is puling out the indexes of all "cfg" strings
+ // that appear to be tokens followed by a parenthesis.
+ let cfgs = candidate_cfg_idxs.filter(|i| {
+ let pre_idx = i.saturating_sub(1);
+ let succeeds_non_ident = !contents
+ .as_bytes()
+ .get(pre_idx)
+ .cloned()
+ .map(char::from)
+ .map(char::is_alphanumeric)
+ .unwrap_or(false);
+ let contents_after = &contents[*i..];
+ let first_paren = contents_after.find('(');
+ let paren_idx = first_paren.map(|ip| i + ip);
+ let preceeds_whitespace_and_paren = paren_idx
+ .map(|ip| {
+ let maybe_space = &contents[*i + "cfg".len()..ip];
+ maybe_space.chars().all(|c| char::is_whitespace(c) || c == '!')
+ })
+ .unwrap_or(false);
+
+ succeeds_non_ident && preceeds_whitespace_and_paren
+ });
+
+ cfgs.flat_map(|i| {
+ let mut depth = 0;
+ let contents_from = &contents[i..];
+ for (j, byte) in contents_from.bytes().enumerate() {
+ match byte {
+ b'(' => {
+ depth += 1;
+ }
+ b')' => {
+ depth -= 1;
+ if depth == 0 {
+ return Some((i, &contents_from[..=j]));
+ }
+ }
+ _ => {}
+ }
+ }
+
+ // if the parentheses are unbalanced just ignore this cfg -- it'll be caught when attempting
+ // to run the compiler, and there's no real reason to lint it separately here
+ None
+ })
+ .collect()
+}
diff --git a/src/tools/tidy/src/primitive_docs.rs b/src/tools/tidy/src/primitive_docs.rs
new file mode 100644
index 000000000..f3200e0af
--- /dev/null
+++ b/src/tools/tidy/src/primitive_docs.rs
@@ -0,0 +1,17 @@
+//! Tidy check to make sure `library/{std,core}/src/primitive_docs.rs` are the same file. These are
+//! different files so that relative links work properly without having to have `CARGO_PKG_NAME`
+//! set, but conceptually they should always be the same.
+
+use std::path::Path;
+
+pub fn check(library_path: &Path, bad: &mut bool) {
+ let std_name = "std/src/primitive_docs.rs";
+ let core_name = "core/src/primitive_docs.rs";
+ let std_contents = std::fs::read_to_string(library_path.join(std_name))
+ .unwrap_or_else(|e| panic!("failed to read library/{std_name}: {e}"));
+ let core_contents = std::fs::read_to_string(library_path.join(core_name))
+ .unwrap_or_else(|e| panic!("failed to read library/{core_name}: {e}"));
+ if std_contents != core_contents {
+ tidy_error!(bad, "library/{core_name} and library/{std_name} have different contents");
+ }
+}
diff --git a/src/tools/tidy/src/style.rs b/src/tools/tidy/src/style.rs
new file mode 100644
index 000000000..3cf44a2d7
--- /dev/null
+++ b/src/tools/tidy/src/style.rs
@@ -0,0 +1,423 @@
+//! Tidy check to enforce various stylistic guidelines on the Rust codebase.
+//!
+//! Example checks are:
+//!
+//! * No lines over 100 characters (in non-Rust files).
+//! * No files with over 3000 lines (in non-Rust files).
+//! * No tabs.
+//! * No trailing whitespace.
+//! * No CR characters.
+//! * No `TODO` or `XXX` directives.
+//! * No unexplained ` ```ignore ` or ` ```rust,ignore ` doc tests.
+//!
+//! Note that some of these rules are excluded from Rust files because we enforce rustfmt. It is
+//! preferable to be formatted rather than tidy-clean.
+//!
+//! A number of these checks can be opted-out of with various directives of the form:
+//! `// ignore-tidy-CHECK-NAME`.
+
+use regex::Regex;
+use std::path::Path;
+
+/// Error code markdown is restricted to 80 columns because they can be
+/// displayed on the console with --example.
+const ERROR_CODE_COLS: usize = 80;
+const COLS: usize = 100;
+
+const LINES: usize = 3000;
+
+const UNEXPLAINED_IGNORE_DOCTEST_INFO: &str = r#"unexplained "```ignore" doctest; try one:
+
+* make the test actually pass, by adding necessary imports and declarations, or
+* use "```text", if the code is not Rust code, or
+* use "```compile_fail,Ennnn", if the code is expected to fail at compile time, or
+* use "```should_panic", if the code is expected to fail at run time, or
+* use "```no_run", if the code should type-check but not necessary linkable/runnable, or
+* explain it like "```ignore (cannot-test-this-because-xxxx)", if the annotation cannot be avoided.
+
+"#;
+
+const LLVM_UNREACHABLE_INFO: &str = r"\
+C++ code used llvm_unreachable, which triggers undefined behavior
+when executed when assertions are disabled.
+Use llvm::report_fatal_error for increased robustness.";
+
+const ANNOTATIONS_TO_IGNORE: &[&str] = &[
+ "// @!has",
+ "// @has",
+ "// @matches",
+ "// CHECK",
+ "// EMIT_MIR",
+ "// compile-flags",
+ "// error-pattern",
+ "// gdb",
+ "// lldb",
+ "// cdb",
+ "// normalize-stderr-test",
+];
+
+// Intentionally written in decimal rather than hex
+const PROBLEMATIC_CONSTS: &[u32] = &[
+ 184594741, 2880289470, 2881141438, 2965027518, 2976579765, 3203381950, 3405691582, 3405697037,
+ 3735927486, 4027431614, 4276992702,
+];
+
+/// Parser states for `line_is_url`.
+#[derive(Clone, Copy, PartialEq)]
+#[allow(non_camel_case_types)]
+enum LIUState {
+ EXP_COMMENT_START,
+ EXP_LINK_LABEL_OR_URL,
+ EXP_URL,
+ EXP_END,
+}
+
+/// Returns `true` if `line` appears to be a line comment containing a URL,
+/// possibly with a Markdown link label in front, and nothing else.
+/// The Markdown link label, if present, may not contain whitespace.
+/// Lines of this form are allowed to be overlength, because Markdown
+/// offers no way to split a line in the middle of a URL, and the lengths
+/// of URLs to external references are beyond our control.
+fn line_is_url(is_error_code: bool, columns: usize, line: &str) -> bool {
+ // more basic check for markdown, to avoid complexity in implementing two state machines
+ if is_error_code {
+ return line.starts_with('[') && line.contains("]:") && line.contains("http");
+ }
+
+ use self::LIUState::*;
+ let mut state: LIUState = EXP_COMMENT_START;
+ let is_url = |w: &str| w.starts_with("http://") || w.starts_with("https://");
+
+ for tok in line.split_whitespace() {
+ match (state, tok) {
+ (EXP_COMMENT_START, "//") | (EXP_COMMENT_START, "///") | (EXP_COMMENT_START, "//!") => {
+ state = EXP_LINK_LABEL_OR_URL
+ }
+
+ (EXP_LINK_LABEL_OR_URL, w)
+ if w.len() >= 4 && w.starts_with('[') && w.ends_with("]:") =>
+ {
+ state = EXP_URL
+ }
+
+ (EXP_LINK_LABEL_OR_URL, w) if is_url(w) => state = EXP_END,
+
+ (EXP_URL, w) if is_url(w) || w.starts_with("../") => state = EXP_END,
+
+ (_, w) if w.len() > columns && is_url(w) => state = EXP_END,
+
+ (_, _) => {}
+ }
+ }
+
+ state == EXP_END
+}
+
+/// Returns `true` if `line` can be ignored. This is the case when it contains
+/// an annotation that is explicitly ignored.
+fn should_ignore(line: &str) -> bool {
+ // Matches test annotations like `//~ ERROR text`.
+ // This mirrors the regex in src/tools/compiletest/src/runtest.rs, please
+ // update both if either are changed.
+ let re = Regex::new("\\s*//(\\[.*\\])?~.*").unwrap();
+ re.is_match(line) || ANNOTATIONS_TO_IGNORE.iter().any(|a| line.contains(a))
+}
+
+/// Returns `true` if `line` is allowed to be longer than the normal limit.
+fn long_line_is_ok(extension: &str, is_error_code: bool, max_columns: usize, line: &str) -> bool {
+ if extension != "md" || is_error_code {
+ if line_is_url(is_error_code, max_columns, line) || should_ignore(line) {
+ return true;
+ }
+ } else if extension == "md" {
+ // non-error code markdown is allowed to be any length
+ return true;
+ }
+
+ false
+}
+
+enum Directive {
+ /// By default, tidy always warns against style issues.
+ Deny,
+
+ /// `Ignore(false)` means that an `ignore-tidy-*` directive
+ /// has been provided, but is unnecessary. `Ignore(true)`
+ /// means that it is necessary (i.e. a warning would be
+ /// produced if `ignore-tidy-*` was not present).
+ Ignore(bool),
+}
+
+fn contains_ignore_directive(can_contain: bool, contents: &str, check: &str) -> Directive {
+ if !can_contain {
+ return Directive::Deny;
+ }
+ // Update `can_contain` when changing this
+ if contents.contains(&format!("// ignore-tidy-{check}"))
+ || contents.contains(&format!("# ignore-tidy-{check}"))
+ || contents.contains(&format!("/* ignore-tidy-{check} */"))
+ {
+ Directive::Ignore(false)
+ } else {
+ Directive::Deny
+ }
+}
+
+macro_rules! suppressible_tidy_err {
+ ($err:ident, $skip:ident, $msg:expr) => {
+ if let Directive::Deny = $skip {
+ $err($msg);
+ } else {
+ $skip = Directive::Ignore(true);
+ }
+ };
+}
+
+pub fn is_in(full_path: &Path, parent_folder_to_find: &str, folder_to_find: &str) -> bool {
+ if let Some(parent) = full_path.parent() {
+ if parent.file_name().map_or_else(
+ || false,
+ |f| {
+ f.to_string_lossy() == folder_to_find
+ && parent
+ .parent()
+ .and_then(|f| f.file_name())
+ .map_or_else(|| false, |f| f == parent_folder_to_find)
+ },
+ ) {
+ true
+ } else {
+ is_in(parent, parent_folder_to_find, folder_to_find)
+ }
+ } else {
+ false
+ }
+}
+
+fn skip_markdown_path(path: &Path) -> bool {
+ // These aren't ready for tidy.
+ const SKIP_MD: &[&str] = &[
+ "src/doc/edition-guide",
+ "src/doc/embedded-book",
+ "src/doc/nomicon",
+ "src/doc/reference",
+ "src/doc/rust-by-example",
+ "src/doc/rustc-dev-guide",
+ ];
+ SKIP_MD.iter().any(|p| path.ends_with(p))
+}
+
+fn is_unexplained_ignore(extension: &str, line: &str) -> bool {
+ if !line.ends_with("```ignore") && !line.ends_with("```rust,ignore") {
+ return false;
+ }
+ if extension == "md" && line.trim().starts_with("//") {
+ // Markdown examples may include doc comments with ignore inside a
+ // code block.
+ return false;
+ }
+ true
+}
+
+pub fn check(path: &Path, bad: &mut bool) {
+ fn skip(path: &Path) -> bool {
+ super::filter_dirs(path) || skip_markdown_path(path)
+ }
+ let problematic_consts_strings: Vec<String> = (PROBLEMATIC_CONSTS.iter().map(u32::to_string))
+ .chain(PROBLEMATIC_CONSTS.iter().map(|v| format!("{:x}", v)))
+ .chain(PROBLEMATIC_CONSTS.iter().map(|v| format!("{:X}", v)))
+ .collect();
+ super::walk(path, &mut skip, &mut |entry, contents| {
+ let file = entry.path();
+ let filename = file.file_name().unwrap().to_string_lossy();
+ let extensions = [".rs", ".py", ".js", ".sh", ".c", ".cpp", ".h", ".md", ".css"];
+ if extensions.iter().all(|e| !filename.ends_with(e)) || filename.starts_with(".#") {
+ return;
+ }
+
+ let is_style_file = filename.ends_with(".css");
+ let under_rustfmt = filename.ends_with(".rs") &&
+ // This list should ideally be sourced from rustfmt.toml but we don't want to add a toml
+ // parser to tidy.
+ !file.ancestors().any(|a| {
+ a.ends_with("src/test") ||
+ a.ends_with("src/doc/book")
+ });
+
+ if is_style_file && !is_in(file, "src", "librustdoc") {
+ // We only check CSS files in rustdoc.
+ return;
+ }
+
+ if contents.is_empty() {
+ tidy_error!(bad, "{}: empty file", file.display());
+ }
+
+ let extension = file.extension().unwrap().to_string_lossy();
+ let is_error_code = extension == "md" && is_in(file, "src", "error_codes");
+
+ let max_columns = if is_error_code { ERROR_CODE_COLS } else { COLS };
+
+ let can_contain = contents.contains("// ignore-tidy-")
+ || contents.contains("# ignore-tidy-")
+ || contents.contains("/* ignore-tidy-");
+ // Enable testing ICE's that require specific (untidy)
+ // file formats easily eg. `issue-1234-ignore-tidy.rs`
+ if filename.contains("ignore-tidy") {
+ return;
+ }
+ let mut skip_cr = contains_ignore_directive(can_contain, &contents, "cr");
+ let mut skip_undocumented_unsafe =
+ contains_ignore_directive(can_contain, &contents, "undocumented-unsafe");
+ let mut skip_tab = contains_ignore_directive(can_contain, &contents, "tab");
+ let mut skip_line_length = contains_ignore_directive(can_contain, &contents, "linelength");
+ let mut skip_file_length = contains_ignore_directive(can_contain, &contents, "filelength");
+ let mut skip_end_whitespace =
+ contains_ignore_directive(can_contain, &contents, "end-whitespace");
+ let mut skip_trailing_newlines =
+ contains_ignore_directive(can_contain, &contents, "trailing-newlines");
+ let mut skip_leading_newlines =
+ contains_ignore_directive(can_contain, &contents, "leading-newlines");
+ let mut skip_copyright = contains_ignore_directive(can_contain, &contents, "copyright");
+ let mut leading_new_lines = false;
+ let mut trailing_new_lines = 0;
+ let mut lines = 0;
+ let mut last_safety_comment = false;
+ for (i, line) in contents.split('\n').enumerate() {
+ let mut err = |msg: &str| {
+ tidy_error!(bad, "{}:{}: {}", file.display(), i + 1, msg);
+ };
+ if !under_rustfmt
+ && line.chars().count() > max_columns
+ && !long_line_is_ok(&extension, is_error_code, max_columns, line)
+ {
+ suppressible_tidy_err!(
+ err,
+ skip_line_length,
+ &format!("line longer than {max_columns} chars")
+ );
+ }
+ if !is_style_file && line.contains('\t') {
+ suppressible_tidy_err!(err, skip_tab, "tab character");
+ }
+ if line.ends_with(' ') || line.ends_with('\t') {
+ suppressible_tidy_err!(err, skip_end_whitespace, "trailing whitespace");
+ }
+ if is_style_file && line.starts_with(' ') {
+ err("CSS files use tabs for indent");
+ }
+ if line.contains('\r') {
+ suppressible_tidy_err!(err, skip_cr, "CR character");
+ }
+ if filename != "style.rs" {
+ if line.contains("TODO") {
+ err("TODO is deprecated; use FIXME")
+ }
+ if line.contains("//") && line.contains(" XXX") {
+ err("XXX is deprecated; use FIXME")
+ }
+ for s in problematic_consts_strings.iter() {
+ if line.contains(s) {
+ err("Don't use magic numbers that spell things (consider 0x12345678)");
+ }
+ }
+ }
+ let is_test = || file.components().any(|c| c.as_os_str() == "tests");
+ // for now we just check libcore
+ if line.contains("unsafe {") && !line.trim().starts_with("//") && !last_safety_comment {
+ if file.components().any(|c| c.as_os_str() == "core") && !is_test() {
+ suppressible_tidy_err!(err, skip_undocumented_unsafe, "undocumented unsafe");
+ }
+ }
+ if line.contains("// SAFETY:") {
+ last_safety_comment = true;
+ } else if line.trim().starts_with("//") || line.trim().is_empty() {
+ // keep previous value
+ } else {
+ last_safety_comment = false;
+ }
+ if (line.starts_with("// Copyright")
+ || line.starts_with("# Copyright")
+ || line.starts_with("Copyright"))
+ && (line.contains("Rust Developers") || line.contains("Rust Project Developers"))
+ {
+ suppressible_tidy_err!(
+ err,
+ skip_copyright,
+ "copyright notices attributed to the Rust Project Developers are deprecated"
+ );
+ }
+ if is_unexplained_ignore(&extension, line) {
+ err(UNEXPLAINED_IGNORE_DOCTEST_INFO);
+ }
+ if filename.ends_with(".cpp") && line.contains("llvm_unreachable") {
+ err(LLVM_UNREACHABLE_INFO);
+ }
+ if line.is_empty() {
+ if i == 0 {
+ leading_new_lines = true;
+ }
+ trailing_new_lines += 1;
+ } else {
+ trailing_new_lines = 0;
+ }
+
+ if !line.trim().starts_with("//") {
+ lines += 1;
+ }
+ }
+ if leading_new_lines {
+ let mut err = |_| {
+ tidy_error!(bad, "{}: leading newline", file.display());
+ };
+ suppressible_tidy_err!(err, skip_leading_newlines, "mising leading newline");
+ }
+ let mut err = |msg: &str| {
+ tidy_error!(bad, "{}: {}", file.display(), msg);
+ };
+ match trailing_new_lines {
+ 0 => suppressible_tidy_err!(err, skip_trailing_newlines, "missing trailing newline"),
+ 1 => {}
+ n => suppressible_tidy_err!(
+ err,
+ skip_trailing_newlines,
+ &format!("too many trailing newlines ({n})")
+ ),
+ };
+ if lines > LINES {
+ let mut err = |_| {
+ tidy_error!(
+ bad,
+ "{}: too many lines ({}) (add `// \
+ ignore-tidy-filelength` to the file to suppress this error)",
+ file.display(),
+ lines
+ );
+ };
+ suppressible_tidy_err!(err, skip_file_length, "");
+ }
+
+ if let Directive::Ignore(false) = skip_cr {
+ tidy_error!(bad, "{}: ignoring CR characters unnecessarily", file.display());
+ }
+ if let Directive::Ignore(false) = skip_tab {
+ tidy_error!(bad, "{}: ignoring tab characters unnecessarily", file.display());
+ }
+ if let Directive::Ignore(false) = skip_end_whitespace {
+ tidy_error!(bad, "{}: ignoring trailing whitespace unnecessarily", file.display());
+ }
+ if let Directive::Ignore(false) = skip_trailing_newlines {
+ tidy_error!(bad, "{}: ignoring trailing newlines unnecessarily", file.display());
+ }
+ if let Directive::Ignore(false) = skip_leading_newlines {
+ tidy_error!(bad, "{}: ignoring leading newlines unnecessarily", file.display());
+ }
+ if let Directive::Ignore(false) = skip_copyright {
+ tidy_error!(bad, "{}: ignoring copyright unnecessarily", file.display());
+ }
+ // We deliberately do not warn about these being unnecessary,
+ // that would just lead to annoying churn.
+ let _unused = skip_line_length;
+ let _unused = skip_file_length;
+ })
+}
diff --git a/src/tools/tidy/src/target_specific_tests.rs b/src/tools/tidy/src/target_specific_tests.rs
new file mode 100644
index 000000000..723684bfa
--- /dev/null
+++ b/src/tools/tidy/src/target_specific_tests.rs
@@ -0,0 +1,96 @@
+//! Tidy check to ensure that all target specific tests (those that require a `--target` flag)
+//! also require the pre-requisite LLVM components to run.
+
+use std::collections::BTreeMap;
+use std::path::Path;
+
+const COMMENT: &str = "//";
+const LLVM_COMPONENTS_HEADER: &str = "needs-llvm-components:";
+const COMPILE_FLAGS_HEADER: &str = "compile-flags:";
+
+/// Iterate through compiletest headers in a test contents.
+///
+/// Adjusted from compiletest/src/header.rs.
+fn iter_header<'a>(contents: &'a str, it: &mut dyn FnMut(Option<&'a str>, &'a str)) {
+ for ln in contents.lines() {
+ let ln = ln.trim();
+ if ln.starts_with(COMMENT) && ln[COMMENT.len()..].trim_start().starts_with('[') {
+ if let Some(close_brace) = ln.find(']') {
+ let open_brace = ln.find('[').unwrap();
+ let lncfg = &ln[open_brace + 1..close_brace];
+ it(Some(lncfg), ln[(close_brace + 1)..].trim_start());
+ } else {
+ panic!("malformed condition directive: expected `//[foo]`, found `{ln}`")
+ }
+ } else if ln.starts_with(COMMENT) {
+ it(None, ln[COMMENT.len()..].trim_start());
+ }
+ }
+}
+
+#[derive(Default, Debug)]
+struct RevisionInfo<'a> {
+ target_arch: Option<&'a str>,
+ llvm_components: Option<Vec<&'a str>>,
+}
+
+pub fn check(path: &Path, bad: &mut bool) {
+ let tests = path.join("test");
+ super::walk(
+ &tests,
+ &mut |path| path.extension().map(|p| p == "rs") == Some(false),
+ &mut |entry, content| {
+ let file = entry.path().display();
+ let mut header_map = BTreeMap::new();
+ iter_header(content, &mut |cfg, directive| {
+ if let Some(value) = directive.strip_prefix(LLVM_COMPONENTS_HEADER) {
+ let info = header_map.entry(cfg).or_insert(RevisionInfo::default());
+ let comp_vec = info.llvm_components.get_or_insert(Vec::new());
+ for component in value.split(' ') {
+ let component = component.trim();
+ if !component.is_empty() {
+ comp_vec.push(component);
+ }
+ }
+ } else if directive.starts_with(COMPILE_FLAGS_HEADER) {
+ let compile_flags = &directive[COMPILE_FLAGS_HEADER.len()..];
+ if let Some((_, v)) = compile_flags.split_once("--target") {
+ if let Some((arch, _)) =
+ v.trim_start_matches(|c| c == ' ' || c == '=').split_once("-")
+ {
+ let info = header_map.entry(cfg).or_insert(RevisionInfo::default());
+ info.target_arch.replace(arch);
+ } else {
+ eprintln!("{file}: seems to have a malformed --target value");
+ *bad = true;
+ }
+ }
+ }
+ });
+ for (rev, RevisionInfo { target_arch, llvm_components }) in &header_map {
+ let rev = rev.unwrap_or("[unspecified]");
+ match (target_arch, llvm_components) {
+ (None, None) => {}
+ (Some(_), None) => {
+ eprintln!(
+ "{}: revision {} should specify `{}` as it has `--target` set",
+ file, rev, LLVM_COMPONENTS_HEADER
+ );
+ *bad = true;
+ }
+ (None, Some(_)) => {
+ eprintln!(
+ "{}: revision {} should not specify `{}` as it doesn't need `--target`",
+ file, rev, LLVM_COMPONENTS_HEADER
+ );
+ *bad = true;
+ }
+ (Some(_), Some(_)) => {
+ // FIXME: check specified components against the target architectures we
+ // gathered.
+ }
+ }
+ }
+ },
+ );
+}
diff --git a/src/tools/tidy/src/ui_tests.rs b/src/tools/tidy/src/ui_tests.rs
new file mode 100644
index 000000000..8ec5c3324
--- /dev/null
+++ b/src/tools/tidy/src/ui_tests.rs
@@ -0,0 +1,82 @@
+//! Tidy check to ensure below in UI test directories:
+//! - the number of entries in each directory must be less than `ENTRY_LIMIT`
+//! - there are no stray `.stderr` files
+
+use std::fs;
+use std::path::Path;
+
+const ENTRY_LIMIT: usize = 1000;
+// FIXME: The following limits should be reduced eventually.
+const ROOT_ENTRY_LIMIT: usize = 968;
+const ISSUES_ENTRY_LIMIT: usize = 2147;
+
+fn check_entries(path: &Path, bad: &mut bool) {
+ let dirs = walkdir::WalkDir::new(&path.join("test/ui"))
+ .into_iter()
+ .filter_entry(|e| e.file_type().is_dir());
+ for dir in dirs {
+ if let Ok(dir) = dir {
+ let dir_path = dir.path();
+
+ // Use special values for these dirs.
+ let is_root = path.join("test/ui") == dir_path;
+ let is_issues_dir = path.join("test/ui/issues") == dir_path;
+ let limit = if is_root {
+ ROOT_ENTRY_LIMIT
+ } else if is_issues_dir {
+ ISSUES_ENTRY_LIMIT
+ } else {
+ ENTRY_LIMIT
+ };
+
+ let count = std::fs::read_dir(dir_path).unwrap().count();
+ if count > limit {
+ tidy_error!(
+ bad,
+ "following path contains more than {} entries, \
+ you should move the test to some relevant subdirectory (current: {}): {}",
+ limit,
+ count,
+ dir_path.display()
+ );
+ }
+ }
+ }
+}
+
+pub fn check(path: &Path, bad: &mut bool) {
+ check_entries(&path, bad);
+ for path in &[&path.join("test/ui"), &path.join("test/ui-fulldeps")] {
+ super::walk_no_read(path, &mut |_| false, &mut |entry| {
+ let file_path = entry.path();
+ if let Some(ext) = file_path.extension() {
+ if ext == "stderr" || ext == "stdout" {
+ // Test output filenames have one of the formats:
+ // ```
+ // $testname.stderr
+ // $testname.$mode.stderr
+ // $testname.$revision.stderr
+ // $testname.$revision.$mode.stderr
+ // ```
+ //
+ // For now, just make sure that there is a corresponding
+ // `$testname.rs` file.
+ //
+ // NB: We do not use file_stem() as some file names have multiple `.`s and we
+ // must strip all of them.
+ let testname =
+ file_path.file_name().unwrap().to_str().unwrap().split_once('.').unwrap().0;
+ if !file_path.with_file_name(testname).with_extension("rs").exists() {
+ tidy_error!(bad, "Stray file with UI testing output: {:?}", file_path);
+ }
+
+ if let Ok(metadata) = fs::metadata(file_path) {
+ if metadata.len() == 0 {
+ tidy_error!(bad, "Empty file with UI testing output: {:?}", file_path);
+ }
+ }
+ }
+ }
+ });
+ }
+}
diff --git a/src/tools/tidy/src/unit_tests.rs b/src/tools/tidy/src/unit_tests.rs
new file mode 100644
index 000000000..f675b7865
--- /dev/null
+++ b/src/tools/tidy/src/unit_tests.rs
@@ -0,0 +1,66 @@
+//! Tidy check to ensure `#[test]` and `#[bench]` are not used directly inside `core`.
+//!
+//! `#![no_core]` libraries cannot be tested directly due to duplicating lang
+//! items. All tests and benchmarks must be written externally in `core/{tests,benches}`.
+//!
+//! Outside of core tests and benchmarks should be outlined into separate files
+//! named `tests.rs` or `benches.rs`, or directories named `tests` or `benches` unconfigured
+//! during normal build.
+
+use std::path::Path;
+
+pub fn check(root_path: &Path, bad: &mut bool) {
+ let core = &root_path.join("core");
+ let core_tests = &core.join("tests");
+ let core_benches = &core.join("benches");
+ let is_core = |path: &Path| {
+ path.starts_with(core) && !(path.starts_with(core_tests) || path.starts_with(core_benches))
+ };
+
+ let mut skip = |path: &Path| {
+ let file_name = path.file_name().unwrap_or_default();
+ if path.is_dir() {
+ super::filter_dirs(path)
+ || path.ends_with("src/test")
+ || path.ends_with("src/doc")
+ || (file_name == "tests" || file_name == "benches") && !is_core(path)
+ } else {
+ let extension = path.extension().unwrap_or_default();
+ extension != "rs"
+ || (file_name == "tests.rs" || file_name == "benches.rs") && !is_core(path)
+ // UI tests with different names
+ || path.ends_with("src/thread/local/dynamic_tests.rs")
+ || path.ends_with("src/sync/mpsc/sync_tests.rs")
+ }
+ };
+
+ super::walk(root_path, &mut skip, &mut |entry, contents| {
+ let path = entry.path();
+ let is_core = path.starts_with(core);
+ for (i, line) in contents.lines().enumerate() {
+ let line = line.trim();
+ let is_test = || line.contains("#[test]") && !line.contains("`#[test]");
+ let is_bench = || line.contains("#[bench]") && !line.contains("`#[bench]");
+ if !line.starts_with("//") && (is_test() || is_bench()) {
+ let explanation = if is_core {
+ "core unit tests and benchmarks must be placed into \
+ `core/tests` or `core/benches`"
+ } else {
+ "unit tests and benchmarks must be placed into \
+ separate files or directories named \
+ `tests.rs`, `benches.rs`, `tests` or `benches`"
+ };
+ let name = if is_test() { "test" } else { "bench" };
+ tidy_error!(
+ bad,
+ "`{}:{}` contains `#[{}]`; {}",
+ path.display(),
+ i + 1,
+ name,
+ explanation,
+ );
+ return;
+ }
+ }
+ });
+}
diff --git a/src/tools/tidy/src/unstable_book.rs b/src/tools/tidy/src/unstable_book.rs
new file mode 100644
index 000000000..7dfb6224d
--- /dev/null
+++ b/src/tools/tidy/src/unstable_book.rs
@@ -0,0 +1,132 @@
+use crate::features::{CollectedFeatures, Features, Status};
+use std::collections::BTreeSet;
+use std::fs;
+use std::path::{Path, PathBuf};
+
+pub const PATH_STR: &str = "doc/unstable-book";
+
+pub const COMPILER_FLAGS_DIR: &str = "src/compiler-flags";
+
+pub const LANG_FEATURES_DIR: &str = "src/language-features";
+
+pub const LIB_FEATURES_DIR: &str = "src/library-features";
+
+/// Builds the path to the Unstable Book source directory from the Rust 'src' directory.
+pub fn unstable_book_path(base_src_path: &Path) -> PathBuf {
+ base_src_path.join(PATH_STR)
+}
+
+/// Builds the path to the directory where the features are documented within the Unstable Book
+/// source directory.
+pub fn unstable_book_lang_features_path(base_src_path: &Path) -> PathBuf {
+ unstable_book_path(base_src_path).join(LANG_FEATURES_DIR)
+}
+
+/// Builds the path to the directory where the features are documented within the Unstable Book
+/// source directory.
+pub fn unstable_book_lib_features_path(base_src_path: &Path) -> PathBuf {
+ unstable_book_path(base_src_path).join(LIB_FEATURES_DIR)
+}
+
+/// Tests whether `DirEntry` is a file.
+fn dir_entry_is_file(dir_entry: &fs::DirEntry) -> bool {
+ dir_entry.file_type().expect("could not determine file type of directory entry").is_file()
+}
+
+/// Retrieves names of all unstable features.
+pub fn collect_unstable_feature_names(features: &Features) -> BTreeSet<String> {
+ features
+ .iter()
+ .filter(|&(_, ref f)| f.level == Status::Unstable)
+ .map(|(name, _)| name.replace('_', "-"))
+ .collect()
+}
+
+pub fn collect_unstable_book_section_file_names(dir: &Path) -> BTreeSet<String> {
+ fs::read_dir(dir)
+ .expect("could not read directory")
+ .map(|entry| entry.expect("could not read directory entry"))
+ .filter(dir_entry_is_file)
+ .map(|entry| entry.path())
+ .filter(|path| path.extension().map(|e| e.to_str().unwrap()) == Some("md"))
+ .map(|path| path.file_stem().unwrap().to_str().unwrap().into())
+ .collect()
+}
+
+/// Retrieves file names of all library feature sections in the Unstable Book with:
+///
+/// * hyphens replaced by underscores,
+/// * the markdown suffix ('.md') removed.
+fn collect_unstable_book_lang_features_section_file_names(
+ base_src_path: &Path,
+) -> BTreeSet<String> {
+ collect_unstable_book_section_file_names(&unstable_book_lang_features_path(base_src_path))
+}
+
+/// Retrieves file names of all language feature sections in the Unstable Book with:
+///
+/// * hyphens replaced by underscores,
+/// * the markdown suffix ('.md') removed.
+fn collect_unstable_book_lib_features_section_file_names(base_src_path: &Path) -> BTreeSet<String> {
+ collect_unstable_book_section_file_names(&unstable_book_lib_features_path(base_src_path))
+}
+
+pub fn check(path: &Path, features: CollectedFeatures, bad: &mut bool) {
+ let lang_features = features.lang;
+ let lib_features = features
+ .lib
+ .into_iter()
+ .filter(|&(ref name, _)| !lang_features.contains_key(name))
+ .collect::<Features>();
+
+ // Library features
+ let unstable_lib_feature_names = collect_unstable_feature_names(&lib_features);
+ let unstable_book_lib_features_section_file_names =
+ collect_unstable_book_lib_features_section_file_names(path);
+
+ // Language features
+ let unstable_lang_feature_names = collect_unstable_feature_names(&lang_features);
+ let unstable_book_lang_features_section_file_names =
+ collect_unstable_book_lang_features_section_file_names(path);
+
+ // Check for Unstable Book sections that don't have a corresponding unstable feature
+ for feature_name in &unstable_book_lib_features_section_file_names - &unstable_lib_feature_names
+ {
+ if !unstable_lang_feature_names.contains(&feature_name) {
+ tidy_error!(
+ bad,
+ "The Unstable Book has a 'library feature' section '{}' which doesn't \
+ correspond to an unstable library feature",
+ feature_name
+ );
+ }
+ }
+
+ // Check for Unstable Book sections that don't have a corresponding unstable feature.
+ for feature_name in
+ &unstable_book_lang_features_section_file_names - &unstable_lang_feature_names
+ {
+ tidy_error!(
+ bad,
+ "The Unstable Book has a 'language feature' section '{}' which doesn't \
+ correspond to an unstable language feature",
+ feature_name
+ )
+ }
+
+ // List unstable features that don't have Unstable Book sections.
+ // Remove the comment marker if you want the list printed.
+ /*
+ println!("Lib features without unstable book sections:");
+ for feature_name in &unstable_lang_feature_names -
+ &unstable_book_lang_features_section_file_names {
+ println!(" * {} {:?}", feature_name, lib_features[&feature_name].tracking_issue);
+ }
+
+ println!("Lang features without unstable book sections:");
+ for feature_name in &unstable_lib_feature_names-
+ &unstable_book_lib_features_section_file_names {
+ println!(" * {} {:?}", feature_name, lang_features[&feature_name].tracking_issue);
+ }
+ // */
+}
diff --git a/src/tools/tier-check/Cargo.toml b/src/tools/tier-check/Cargo.toml
new file mode 100644
index 000000000..3f08165a3
--- /dev/null
+++ b/src/tools/tier-check/Cargo.toml
@@ -0,0 +1,7 @@
+[package]
+name = "tier-check"
+version = "0.1.0"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+
+[dependencies]
diff --git a/src/tools/tier-check/src/main.rs b/src/tools/tier-check/src/main.rs
new file mode 100644
index 000000000..a41e2d6e3
--- /dev/null
+++ b/src/tools/tier-check/src/main.rs
@@ -0,0 +1,50 @@
+//! This is a script for validating the platform support page in the rustc book.
+//!
+//! The script takes two arguments, the path to the Platform Support source
+//! page, and the second argument is the path to `rustc`.
+
+use std::collections::HashSet;
+
+fn main() {
+ let mut args = std::env::args().skip(1);
+ let src = args.next().expect("expected source file as first argument");
+ let filename = std::path::Path::new(&src).file_name().unwrap().to_str().unwrap();
+ let rustc = args.next().expect("expected rustc as second argument");
+ let output = std::process::Command::new(rustc)
+ .arg("--print=target-list")
+ .output()
+ .expect("rustc should run");
+ if !output.status.success() {
+ eprintln!("rustc failed to run");
+ std::process::exit(0);
+ }
+ let stdout = std::str::from_utf8(&output.stdout).expect("utf8");
+ let target_list: HashSet<_> = stdout.lines().collect();
+
+ let doc_targets_md = std::fs::read_to_string(&src).expect("failed to read input source");
+ let doc_targets: HashSet<_> = doc_targets_md
+ .lines()
+ .filter(|line| line.starts_with(&['`', '['][..]) && line.contains('|'))
+ .map(|line| line.split('`').skip(1).next().expect("expected target code span"))
+ .collect();
+
+ let missing: Vec<_> = target_list.difference(&doc_targets).collect();
+ let extra: Vec<_> = doc_targets.difference(&target_list).collect();
+ for target in &missing {
+ eprintln!(
+ "error: target `{}` is missing from {}\n\
+ If this is a new target, please add it to {}.",
+ target, filename, src
+ );
+ }
+ for target in &extra {
+ eprintln!(
+ "error: target `{}` is in {}, but does not appear in the rustc target list\n\
+ If the target has been removed, please edit {} and remove the target.",
+ target, filename, src
+ );
+ }
+ if !missing.is_empty() || !extra.is_empty() {
+ std::process::exit(1);
+ }
+}
diff --git a/src/tools/unicode-table-generator/Cargo.toml b/src/tools/unicode-table-generator/Cargo.toml
new file mode 100644
index 000000000..ef01877c0
--- /dev/null
+++ b/src/tools/unicode-table-generator/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "unicode-bdd"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+ucd-parse = "0.1.3"
diff --git a/src/tools/unicode-table-generator/src/case_mapping.rs b/src/tools/unicode-table-generator/src/case_mapping.rs
new file mode 100644
index 000000000..992aac1f8
--- /dev/null
+++ b/src/tools/unicode-table-generator/src/case_mapping.rs
@@ -0,0 +1,70 @@
+use crate::{fmt_list, UnicodeData};
+use std::fmt;
+
+pub(crate) fn generate_case_mapping(data: &UnicodeData) -> String {
+ let mut file = String::new();
+
+ file.push_str(HEADER.trim_start());
+
+ let decl_type = "&[(char, [char; 3])]";
+
+ file.push_str(&format!(
+ "static LOWERCASE_TABLE: {} = &[{}];",
+ decl_type,
+ fmt_list(data.to_lower.iter().map(to_mapping))
+ ));
+ file.push_str("\n\n");
+ file.push_str(&format!(
+ "static UPPERCASE_TABLE: {} = &[{}];",
+ decl_type,
+ fmt_list(data.to_upper.iter().map(to_mapping))
+ ));
+ file
+}
+
+fn to_mapping((key, (a, b, c)): (&u32, &(u32, u32, u32))) -> (CharEscape, [CharEscape; 3]) {
+ (
+ CharEscape(std::char::from_u32(*key).unwrap()),
+ [
+ CharEscape(std::char::from_u32(*a).unwrap()),
+ CharEscape(std::char::from_u32(*b).unwrap()),
+ CharEscape(std::char::from_u32(*c).unwrap()),
+ ],
+ )
+}
+
+struct CharEscape(char);
+
+impl fmt::Debug for CharEscape {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "'{}'", self.0.escape_default())
+ }
+}
+
+static HEADER: &str = r"
+pub fn to_lower(c: char) -> [char; 3] {
+ if c.is_ascii() {
+ [(c as u8).to_ascii_lowercase() as char, '\0', '\0']
+ } else {
+ match bsearch_case_table(c, LOWERCASE_TABLE) {
+ None => [c, '\0', '\0'],
+ Some(index) => LOWERCASE_TABLE[index].1,
+ }
+ }
+}
+
+pub fn to_upper(c: char) -> [char; 3] {
+ if c.is_ascii() {
+ [(c as u8).to_ascii_uppercase() as char, '\0', '\0']
+ } else {
+ match bsearch_case_table(c, UPPERCASE_TABLE) {
+ None => [c, '\0', '\0'],
+ Some(index) => UPPERCASE_TABLE[index].1,
+ }
+ }
+}
+
+fn bsearch_case_table(c: char, table: &[(char, [char; 3])]) -> Option<usize> {
+ table.binary_search_by(|&(key, _)| key.cmp(&c)).ok()
+}
+";
diff --git a/src/tools/unicode-table-generator/src/main.rs b/src/tools/unicode-table-generator/src/main.rs
new file mode 100644
index 000000000..4720ee702
--- /dev/null
+++ b/src/tools/unicode-table-generator/src/main.rs
@@ -0,0 +1,439 @@
+//! This implements the core logic of the compression scheme used to compactly
+//! encode Unicode properties.
+//!
+//! We have two primary goals with the encoding: we want to be compact, because
+//! these tables often end up in ~every Rust program (especially the
+//! grapheme_extend table, used for str debugging), including those for embedded
+//! targets (where space is important). We also want to be relatively fast,
+//! though this is more of a nice to have rather than a key design constraint.
+//! It is expected that libraries/applications which are performance-sensitive
+//! to Unicode property lookups are extremely rare, and those that care may find
+//! the tradeoff of the raw bitsets worth it. For most applications, a
+//! relatively fast but much smaller (and as such less cache-impacting, etc.)
+//! data set is likely preferable.
+//!
+//! We have two separate encoding schemes: a skiplist-like approach, and a
+//! compressed bitset. The datasets we consider mostly use the skiplist (it's
+//! smaller) but the lowercase and uppercase sets are sufficiently sparse for
+//! the bitset to be worthwhile -- for those sets the bitset is a 2x size win.
+//! Since the bitset is also faster, this seems an obvious choice. (As a
+//! historical note, the bitset was also the prior implementation, so its
+//! relative complexity had already been paid).
+//!
+//! ## The bitset
+//!
+//! The primary idea is that we 'flatten' the Unicode ranges into an enormous
+//! bitset. To represent any arbitrary codepoint in a raw bitset, we would need
+//! over 17 kilobytes of data per character set -- way too much for our
+//! purposes.
+//!
+//! First, the raw bitset (one bit for every valid `char`, from 0 to 0x10FFFF,
+//! not skipping the small 'gap') is associated into words (u64) and
+//! deduplicated. On random data, this would be useless; on our data, this is
+//! incredibly beneficial -- our data sets have (far) less than 256 unique
+//! words.
+//!
+//! This gives us an array that maps `u8 -> word`; the current algorithm does
+//! not handle the case of more than 256 unique words, but we are relatively far
+//! from coming that close.
+//!
+//! With that scheme, we now have a single byte for every 64 codepoints.
+//!
+//! We further chunk these by some constant N (between 1 and 64 per group,
+//! dynamically chosen for smallest size), and again deduplicate and store in an
+//! array (u8 -> [u8; N]).
+//!
+//! The bytes of this array map into the words from the bitset above, but we
+//! apply another trick here: some of these words are similar enough that they
+//! can be represented by some function of another word. The particular
+//! functions chosen are rotation, inversion, and shifting (right).
+//!
+//! ## The skiplist
+//!
+//! The skip list arose out of the desire for an even smaller encoding than the
+//! bitset -- and was the answer to the question "what is the smallest
+//! representation we can imagine?". However, it is not necessarily the
+//! smallest, and if you have a better proposal, please do suggest it!
+//!
+//! This is a relatively straightforward encoding. First, we break up all the
+//! ranges in the input data into offsets from each other, essentially a gap
+//! encoding. In practice, most gaps are small -- less than u8::MAX -- so we
+//! store those directly. We make use of the larger gaps (which are nicely
+//! interspersed already) throughout the dataset to index this data set.
+//!
+//! In particular, each run of small gaps (terminating in a large gap) is
+//! indexed in a separate dataset. That data set stores an index into the
+//! primary offset list and a prefix sum of that offset list. These are packed
+//! into a single u32 (11 bits for the offset, 21 bits for the prefix sum).
+//!
+//! Lookup proceeds via a binary search in the index and then a straightforward
+//! linear scan (adding up the offsets) until we reach the needle, and then the
+//! index of that offset is utilized as the answer to whether we're in the set
+//! or not.
+
+use std::collections::{BTreeMap, HashMap};
+use std::ops::Range;
+use ucd_parse::Codepoints;
+
+mod case_mapping;
+mod raw_emitter;
+mod skiplist;
+mod unicode_download;
+
+use raw_emitter::{emit_codepoints, RawEmitter};
+
+static PROPERTIES: &[&str] = &[
+ "Alphabetic",
+ "Lowercase",
+ "Uppercase",
+ "Cased",
+ "Case_Ignorable",
+ "Grapheme_Extend",
+ "White_Space",
+ "Cc",
+ "N",
+];
+
+struct UnicodeData {
+ ranges: Vec<(&'static str, Vec<Range<u32>>)>,
+ to_upper: BTreeMap<u32, (u32, u32, u32)>,
+ to_lower: BTreeMap<u32, (u32, u32, u32)>,
+}
+
+fn to_mapping(origin: u32, codepoints: Vec<ucd_parse::Codepoint>) -> Option<(u32, u32, u32)> {
+ let mut a = None;
+ let mut b = None;
+ let mut c = None;
+
+ for codepoint in codepoints {
+ if origin == codepoint.value() {
+ return None;
+ }
+
+ if a.is_none() {
+ a = Some(codepoint.value());
+ } else if b.is_none() {
+ b = Some(codepoint.value());
+ } else if c.is_none() {
+ c = Some(codepoint.value());
+ } else {
+ panic!("more than 3 mapped codepoints")
+ }
+ }
+
+ Some((a.unwrap(), b.unwrap_or(0), c.unwrap_or(0)))
+}
+
+static UNICODE_DIRECTORY: &str = "unicode-downloads";
+
+fn load_data() -> UnicodeData {
+ unicode_download::fetch_latest();
+
+ let mut properties = HashMap::new();
+ for row in ucd_parse::parse::<_, ucd_parse::CoreProperty>(&UNICODE_DIRECTORY).unwrap() {
+ if let Some(name) = PROPERTIES.iter().find(|prop| **prop == row.property.as_str()) {
+ properties.entry(*name).or_insert_with(Vec::new).push(row.codepoints);
+ }
+ }
+ for row in ucd_parse::parse::<_, ucd_parse::Property>(&UNICODE_DIRECTORY).unwrap() {
+ if let Some(name) = PROPERTIES.iter().find(|prop| **prop == row.property.as_str()) {
+ properties.entry(*name).or_insert_with(Vec::new).push(row.codepoints);
+ }
+ }
+
+ let mut to_lower = BTreeMap::new();
+ let mut to_upper = BTreeMap::new();
+ for row in ucd_parse::UnicodeDataExpander::new(
+ ucd_parse::parse::<_, ucd_parse::UnicodeData>(&UNICODE_DIRECTORY).unwrap(),
+ ) {
+ let general_category = if ["Nd", "Nl", "No"].contains(&row.general_category.as_str()) {
+ "N"
+ } else {
+ row.general_category.as_str()
+ };
+ if let Some(name) = PROPERTIES.iter().find(|prop| **prop == general_category) {
+ properties
+ .entry(*name)
+ .or_insert_with(Vec::new)
+ .push(Codepoints::Single(row.codepoint));
+ }
+
+ if let Some(mapped) = row.simple_lowercase_mapping {
+ if mapped != row.codepoint {
+ to_lower.insert(row.codepoint.value(), (mapped.value(), 0, 0));
+ }
+ }
+ if let Some(mapped) = row.simple_uppercase_mapping {
+ if mapped != row.codepoint {
+ to_upper.insert(row.codepoint.value(), (mapped.value(), 0, 0));
+ }
+ }
+ }
+
+ for row in ucd_parse::parse::<_, ucd_parse::SpecialCaseMapping>(&UNICODE_DIRECTORY).unwrap() {
+ if !row.conditions.is_empty() {
+ // Skip conditional case mappings
+ continue;
+ }
+
+ let key = row.codepoint.value();
+ if let Some(lower) = to_mapping(key, row.lowercase) {
+ to_lower.insert(key, lower);
+ }
+ if let Some(upper) = to_mapping(key, row.uppercase) {
+ to_upper.insert(key, upper);
+ }
+ }
+
+ let mut properties: HashMap<&'static str, Vec<Range<u32>>> = properties
+ .into_iter()
+ .map(|(k, v)| {
+ (
+ k,
+ v.into_iter()
+ .flat_map(|codepoints| match codepoints {
+ Codepoints::Single(c) => c
+ .scalar()
+ .map(|ch| (ch as u32..ch as u32 + 1))
+ .into_iter()
+ .collect::<Vec<_>>(),
+ Codepoints::Range(c) => c
+ .into_iter()
+ .flat_map(|c| c.scalar().map(|ch| (ch as u32..ch as u32 + 1)))
+ .collect::<Vec<_>>(),
+ })
+ .collect::<Vec<Range<u32>>>(),
+ )
+ })
+ .collect();
+
+ for ranges in properties.values_mut() {
+ merge_ranges(ranges);
+ }
+
+ let mut properties = properties.into_iter().collect::<Vec<_>>();
+ properties.sort_by_key(|p| p.0);
+ UnicodeData { ranges: properties, to_lower, to_upper }
+}
+
+fn main() {
+ let write_location = std::env::args().nth(1).unwrap_or_else(|| {
+ eprintln!("Must provide path to write unicode tables to");
+ eprintln!(
+ "e.g. {} library/core/unicode/unicode_data.rs",
+ std::env::args().next().unwrap_or_default()
+ );
+ std::process::exit(1);
+ });
+
+ // Optional test path, which is a Rust source file testing that the unicode
+ // property lookups are correct.
+ let test_path = std::env::args().nth(2);
+
+ let unicode_data = load_data();
+ let ranges_by_property = &unicode_data.ranges;
+
+ if let Some(path) = test_path {
+ std::fs::write(&path, generate_tests(&write_location, &ranges_by_property)).unwrap();
+ }
+
+ let mut total_bytes = 0;
+ let mut modules = Vec::new();
+ for (property, ranges) in ranges_by_property {
+ let datapoints = ranges.iter().map(|r| r.end - r.start).sum::<u32>();
+ let mut emitter = RawEmitter::new();
+ emit_codepoints(&mut emitter, &ranges);
+
+ modules.push((property.to_lowercase().to_string(), emitter.file));
+ println!(
+ "{:15}: {} bytes, {} codepoints in {} ranges ({} - {}) using {}",
+ property,
+ emitter.bytes_used,
+ datapoints,
+ ranges.len(),
+ ranges.first().unwrap().start,
+ ranges.last().unwrap().end,
+ emitter.desc,
+ );
+ total_bytes += emitter.bytes_used;
+ }
+
+ let mut table_file = String::new();
+
+ table_file.push_str(
+ "///! This file is generated by src/tools/unicode-table-generator; do not edit manually!\n",
+ );
+
+ // Include the range search function
+ table_file.push('\n');
+ table_file.push_str(include_str!("range_search.rs"));
+ table_file.push('\n');
+
+ table_file.push_str(&version());
+
+ table_file.push('\n');
+
+ modules.push((String::from("conversions"), case_mapping::generate_case_mapping(&unicode_data)));
+
+ for (name, contents) in modules {
+ table_file.push_str("#[rustfmt::skip]\n");
+ table_file.push_str(&format!("pub mod {name} {{\n"));
+ for line in contents.lines() {
+ if !line.trim().is_empty() {
+ table_file.push_str(" ");
+ table_file.push_str(&line);
+ }
+ table_file.push('\n');
+ }
+ table_file.push_str("}\n\n");
+ }
+
+ std::fs::write(&write_location, format!("{}\n", table_file.trim_end())).unwrap();
+
+ println!("Total table sizes: {total_bytes} bytes");
+}
+
+fn version() -> String {
+ let mut out = String::new();
+ out.push_str("pub const UNICODE_VERSION: (u8, u8, u8) = ");
+
+ let readme =
+ std::fs::read_to_string(std::path::Path::new(UNICODE_DIRECTORY).join("ReadMe.txt"))
+ .unwrap();
+
+ let prefix = "for Version ";
+ let start = readme.find(prefix).unwrap() + prefix.len();
+ let end = readme.find(" of the Unicode Standard.").unwrap();
+ let version =
+ readme[start..end].split('.').map(|v| v.parse::<u32>().expect(&v)).collect::<Vec<_>>();
+ let [major, minor, micro] = [version[0], version[1], version[2]];
+
+ out.push_str(&format!("({major}, {minor}, {micro});\n"));
+ out
+}
+
+fn fmt_list<V: std::fmt::Debug>(values: impl IntoIterator<Item = V>) -> String {
+ let pieces = values.into_iter().map(|b| format!("{:?}, ", b)).collect::<Vec<_>>();
+ let mut out = String::new();
+ let mut line = String::from("\n ");
+ for piece in pieces {
+ if line.len() + piece.len() < 98 {
+ line.push_str(&piece);
+ } else {
+ out.push_str(line.trim_end());
+ out.push('\n');
+ line = format!(" {piece}");
+ }
+ }
+ out.push_str(line.trim_end());
+ out.push('\n');
+ out
+}
+
+fn generate_tests(data_path: &str, ranges: &[(&str, Vec<Range<u32>>)]) -> String {
+ let mut s = String::new();
+ s.push_str("#![allow(incomplete_features, unused)]\n");
+ s.push_str("#![feature(const_generics)]\n\n");
+ s.push_str("\n#[allow(unused)]\nuse std::hint;\n");
+ s.push_str(&format!("#[path = \"{data_path}\"]\n"));
+ s.push_str("mod unicode_data;\n\n");
+
+ s.push_str("\nfn main() {\n");
+
+ for (property, ranges) in ranges {
+ s.push_str(&format!(r#" println!("Testing {}");"#, property));
+ s.push('\n');
+ s.push_str(&format!(" {}_true();\n", property.to_lowercase()));
+ s.push_str(&format!(" {}_false();\n", property.to_lowercase()));
+ let mut is_true = Vec::new();
+ let mut is_false = Vec::new();
+ for ch_num in 0..(std::char::MAX as u32) {
+ if std::char::from_u32(ch_num).is_none() {
+ continue;
+ }
+ if ranges.iter().any(|r| r.contains(&ch_num)) {
+ is_true.push(ch_num);
+ } else {
+ is_false.push(ch_num);
+ }
+ }
+
+ s.push_str(&format!(" fn {}_true() {{\n", property.to_lowercase()));
+ generate_asserts(&mut s, property, &is_true, true);
+ s.push_str(" }\n\n");
+ s.push_str(&format!(" fn {}_false() {{\n", property.to_lowercase()));
+ generate_asserts(&mut s, property, &is_false, false);
+ s.push_str(" }\n\n");
+ }
+
+ s.push_str("}");
+ s
+}
+
+fn generate_asserts(s: &mut String, property: &str, points: &[u32], truthy: bool) {
+ for range in ranges_from_set(points) {
+ if range.end == range.start + 1 {
+ s.push_str(&format!(
+ " assert!({}unicode_data::{}::lookup({:?}), \"{}\");\n",
+ if truthy { "" } else { "!" },
+ property.to_lowercase(),
+ std::char::from_u32(range.start).unwrap(),
+ range.start,
+ ));
+ } else {
+ s.push_str(&format!(" for chn in {:?}u32 {{\n", range));
+ s.push_str(&format!(
+ " assert!({}unicode_data::{}::lookup(std::char::from_u32(chn).unwrap()), \"{{:?}}\", chn);\n",
+ if truthy { "" } else { "!" },
+ property.to_lowercase(),
+ ));
+ s.push_str(" }\n");
+ }
+ }
+}
+
+fn ranges_from_set(set: &[u32]) -> Vec<Range<u32>> {
+ let mut ranges = set.iter().map(|e| (*e)..(*e + 1)).collect::<Vec<Range<u32>>>();
+ merge_ranges(&mut ranges);
+ ranges
+}
+
+fn merge_ranges(ranges: &mut Vec<Range<u32>>) {
+ loop {
+ let mut new_ranges = Vec::new();
+ let mut idx_iter = 0..(ranges.len() - 1);
+ let mut should_insert_last = true;
+ while let Some(idx) = idx_iter.next() {
+ let cur = ranges[idx].clone();
+ let next = ranges[idx + 1].clone();
+ if cur.end == next.start {
+ if idx_iter.next().is_none() {
+ // We're merging the last element
+ should_insert_last = false;
+ }
+ new_ranges.push(cur.start..next.end);
+ } else {
+ // We're *not* merging the last element
+ should_insert_last = true;
+ new_ranges.push(cur);
+ }
+ }
+ if should_insert_last {
+ new_ranges.push(ranges.last().unwrap().clone());
+ }
+ if new_ranges.len() == ranges.len() {
+ *ranges = new_ranges;
+ break;
+ } else {
+ *ranges = new_ranges;
+ }
+ }
+
+ let mut last_end = None;
+ for range in ranges {
+ if let Some(last) = last_end {
+ assert!(range.start > last, "{:?}", range);
+ }
+ last_end = Some(range.end);
+ }
+}
diff --git a/src/tools/unicode-table-generator/src/range_search.rs b/src/tools/unicode-table-generator/src/range_search.rs
new file mode 100644
index 000000000..39b47ce70
--- /dev/null
+++ b/src/tools/unicode-table-generator/src/range_search.rs
@@ -0,0 +1,93 @@
+#[inline(always)]
+fn bitset_search<
+ const N: usize,
+ const CHUNK_SIZE: usize,
+ const N1: usize,
+ const CANONICAL: usize,
+ const CANONICALIZED: usize,
+>(
+ needle: u32,
+ chunk_idx_map: &[u8; N],
+ bitset_chunk_idx: &[[u8; CHUNK_SIZE]; N1],
+ bitset_canonical: &[u64; CANONICAL],
+ bitset_canonicalized: &[(u8, u8); CANONICALIZED],
+) -> bool {
+ let bucket_idx = (needle / 64) as usize;
+ let chunk_map_idx = bucket_idx / CHUNK_SIZE;
+ let chunk_piece = bucket_idx % CHUNK_SIZE;
+ let chunk_idx = if let Some(&v) = chunk_idx_map.get(chunk_map_idx) {
+ v
+ } else {
+ return false;
+ };
+ let idx = bitset_chunk_idx[chunk_idx as usize][chunk_piece] as usize;
+ let word = if let Some(word) = bitset_canonical.get(idx) {
+ *word
+ } else {
+ let (real_idx, mapping) = bitset_canonicalized[idx - bitset_canonical.len()];
+ let mut word = bitset_canonical[real_idx as usize];
+ let should_invert = mapping & (1 << 6) != 0;
+ if should_invert {
+ word = !word;
+ }
+ // Lower 6 bits
+ let quantity = mapping & ((1 << 6) - 1);
+ if mapping & (1 << 7) != 0 {
+ // shift
+ word >>= quantity as u64;
+ } else {
+ word = word.rotate_left(quantity as u32);
+ }
+ word
+ };
+ (word & (1 << (needle % 64) as u64)) != 0
+}
+
+fn decode_prefix_sum(short_offset_run_header: u32) -> u32 {
+ short_offset_run_header & ((1 << 21) - 1)
+}
+
+fn decode_length(short_offset_run_header: u32) -> usize {
+ (short_offset_run_header >> 21) as usize
+}
+
+#[inline(always)]
+fn skip_search<const SOR: usize, const OFFSETS: usize>(
+ needle: u32,
+ short_offset_runs: &[u32; SOR],
+ offsets: &[u8; OFFSETS],
+) -> bool {
+ // Note that this *cannot* be past the end of the array, as the last
+ // element is greater than std::char::MAX (the largest possible needle).
+ //
+ // So, we cannot have found it (i.e. Ok(idx) + 1 != length) and the correct
+ // location cannot be past it, so Err(idx) != length either.
+ //
+ // This means that we can avoid bounds checking for the accesses below, too.
+ let last_idx =
+ match short_offset_runs.binary_search_by_key(&(needle << 11), |header| header << 11) {
+ Ok(idx) => idx + 1,
+ Err(idx) => idx,
+ };
+
+ let mut offset_idx = decode_length(short_offset_runs[last_idx]);
+ let length = if let Some(next) = short_offset_runs.get(last_idx + 1) {
+ decode_length(*next) - offset_idx
+ } else {
+ offsets.len() - offset_idx
+ };
+ let prev =
+ last_idx.checked_sub(1).map(|prev| decode_prefix_sum(short_offset_runs[prev])).unwrap_or(0);
+
+ let total = needle - prev;
+ let mut prefix_sum = 0;
+ for _ in 0..(length - 1) {
+ let offset = offsets[offset_idx];
+ prefix_sum += offset as u32;
+ if prefix_sum > total {
+ break;
+ }
+ offset_idx += 1;
+ }
+ offset_idx % 2 == 1
+}
diff --git a/src/tools/unicode-table-generator/src/raw_emitter.rs b/src/tools/unicode-table-generator/src/raw_emitter.rs
new file mode 100644
index 000000000..ab8eaee95
--- /dev/null
+++ b/src/tools/unicode-table-generator/src/raw_emitter.rs
@@ -0,0 +1,394 @@
+use crate::fmt_list;
+use std::collections::{BTreeMap, BTreeSet, HashMap};
+use std::convert::TryFrom;
+use std::fmt::{self, Write};
+use std::ops::Range;
+
+#[derive(Clone)]
+pub struct RawEmitter {
+ pub file: String,
+ pub desc: String,
+ pub bytes_used: usize,
+}
+
+impl RawEmitter {
+ pub fn new() -> RawEmitter {
+ RawEmitter { file: String::new(), bytes_used: 0, desc: String::new() }
+ }
+
+ fn blank_line(&mut self) {
+ if self.file.is_empty() || self.file.ends_with("\n\n") {
+ return;
+ }
+ writeln!(&mut self.file).unwrap();
+ }
+
+ fn emit_bitset(&mut self, ranges: &[Range<u32>]) -> Result<(), String> {
+ let last_code_point = ranges.last().unwrap().end;
+ // bitset for every bit in the codepoint range
+ //
+ // + 2 to ensure an all zero word to use for padding
+ let mut buckets = vec![0u64; (last_code_point as usize / 64) + 2];
+ for range in ranges {
+ for codepoint in range.clone() {
+ let bucket = codepoint as usize / 64;
+ let bit = codepoint as u64 % 64;
+ buckets[bucket] |= 1 << bit;
+ }
+ }
+
+ let mut words = buckets;
+ // Ensure that there's a zero word in the dataset, used for padding and
+ // such.
+ words.push(0);
+ let unique_words =
+ words.iter().cloned().collect::<BTreeSet<_>>().into_iter().collect::<Vec<_>>();
+ if unique_words.len() > u8::MAX as usize {
+ return Err(format!("cannot pack {} into 8 bits", unique_words.len()));
+ }
+ // needed for the chunk mapping to work
+ assert_eq!(unique_words[0], 0, "has a zero word");
+ let canonicalized = Canonicalized::canonicalize(&unique_words);
+
+ let word_indices = canonicalized.unique_mapping.clone();
+ let compressed_words = words.iter().map(|w| word_indices[w]).collect::<Vec<u8>>();
+
+ let mut best = None;
+ for length in 1..=64 {
+ let mut temp = self.clone();
+ temp.emit_chunk_map(word_indices[&0], &compressed_words, length);
+ if let Some((_, size)) = best {
+ if temp.bytes_used < size {
+ best = Some((length, temp.bytes_used));
+ }
+ } else {
+ best = Some((length, temp.bytes_used));
+ }
+ }
+ self.emit_chunk_map(word_indices[&0], &compressed_words, best.unwrap().0);
+
+ struct Bits(u64);
+ impl fmt::Debug for Bits {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "0b{:064b}", self.0)
+ }
+ }
+
+ writeln!(
+ &mut self.file,
+ "static BITSET_CANONICAL: [u64; {}] = [{}];",
+ canonicalized.canonical_words.len(),
+ fmt_list(canonicalized.canonical_words.iter().map(|v| Bits(*v))),
+ )
+ .unwrap();
+ self.bytes_used += 8 * canonicalized.canonical_words.len();
+ writeln!(
+ &mut self.file,
+ "static BITSET_MAPPING: [(u8, u8); {}] = [{}];",
+ canonicalized.canonicalized_words.len(),
+ fmt_list(&canonicalized.canonicalized_words),
+ )
+ .unwrap();
+ // 8 bit index into shifted words, 7 bits for shift + optional flip
+ // We only need it for the words that we removed by applying a shift and
+ // flip to them.
+ self.bytes_used += 2 * canonicalized.canonicalized_words.len();
+
+ self.blank_line();
+
+ writeln!(&mut self.file, "pub fn lookup(c: char) -> bool {{").unwrap();
+ writeln!(&mut self.file, " super::bitset_search(",).unwrap();
+ writeln!(&mut self.file, " c as u32,").unwrap();
+ writeln!(&mut self.file, " &BITSET_CHUNKS_MAP,").unwrap();
+ writeln!(&mut self.file, " &BITSET_INDEX_CHUNKS,").unwrap();
+ writeln!(&mut self.file, " &BITSET_CANONICAL,").unwrap();
+ writeln!(&mut self.file, " &BITSET_MAPPING,").unwrap();
+ writeln!(&mut self.file, " )").unwrap();
+ writeln!(&mut self.file, "}}").unwrap();
+
+ Ok(())
+ }
+
+ fn emit_chunk_map(&mut self, zero_at: u8, compressed_words: &[u8], chunk_length: usize) {
+ let mut compressed_words = compressed_words.to_vec();
+ for _ in 0..(chunk_length - (compressed_words.len() % chunk_length)) {
+ // pad out bitset index with zero words so we have all chunks of
+ // chunkchunk_length
+ compressed_words.push(zero_at);
+ }
+
+ let mut chunks = BTreeSet::new();
+ for chunk in compressed_words.chunks(chunk_length) {
+ chunks.insert(chunk);
+ }
+ let chunk_map = chunks
+ .clone()
+ .into_iter()
+ .enumerate()
+ .map(|(idx, chunk)| (chunk, idx))
+ .collect::<HashMap<_, _>>();
+ let mut chunk_indices = Vec::new();
+ for chunk in compressed_words.chunks(chunk_length) {
+ chunk_indices.push(chunk_map[chunk]);
+ }
+
+ writeln!(
+ &mut self.file,
+ "static BITSET_CHUNKS_MAP: [u8; {}] = [{}];",
+ chunk_indices.len(),
+ fmt_list(&chunk_indices),
+ )
+ .unwrap();
+ self.bytes_used += chunk_indices.len();
+ writeln!(
+ &mut self.file,
+ "static BITSET_INDEX_CHUNKS: [[u8; {}]; {}] = [{}];",
+ chunk_length,
+ chunks.len(),
+ fmt_list(chunks.iter()),
+ )
+ .unwrap();
+ self.bytes_used += chunk_length * chunks.len();
+ }
+}
+
+pub fn emit_codepoints(emitter: &mut RawEmitter, ranges: &[Range<u32>]) {
+ emitter.blank_line();
+
+ let mut bitset = emitter.clone();
+ let bitset_ok = bitset.emit_bitset(&ranges).is_ok();
+
+ let mut skiplist = emitter.clone();
+ skiplist.emit_skiplist(&ranges);
+
+ if bitset_ok && bitset.bytes_used <= skiplist.bytes_used {
+ *emitter = bitset;
+ emitter.desc = String::from("bitset");
+ } else {
+ *emitter = skiplist;
+ emitter.desc = String::from("skiplist");
+ }
+}
+
+struct Canonicalized {
+ canonical_words: Vec<u64>,
+ canonicalized_words: Vec<(u8, u8)>,
+
+ /// Maps an input unique word to the associated index (u8) which is into
+ /// canonical_words or canonicalized_words (in order).
+ unique_mapping: HashMap<u64, u8>,
+}
+
+impl Canonicalized {
+ fn canonicalize(unique_words: &[u64]) -> Self {
+ #[derive(Copy, Clone, Debug)]
+ enum Mapping {
+ Rotate(u32),
+ Invert,
+ RotateAndInvert(u32),
+ ShiftRight(u32),
+ }
+
+ // key is the word being mapped to
+ let mut mappings: BTreeMap<u64, Vec<(u64, Mapping)>> = BTreeMap::new();
+ for &a in unique_words {
+ 'b: for &b in unique_words {
+ // skip self
+ if a == b {
+ continue;
+ }
+
+ // All possible distinct rotations
+ for rotation in 1..64 {
+ if a.rotate_right(rotation) == b {
+ mappings.entry(b).or_default().push((a, Mapping::Rotate(rotation)));
+ // We're not interested in further mappings between a and b
+ continue 'b;
+ }
+ }
+
+ if (!a) == b {
+ mappings.entry(b).or_default().push((a, Mapping::Invert));
+ // We're not interested in further mappings between a and b
+ continue 'b;
+ }
+
+ // All possible distinct rotations, inverted
+ for rotation in 1..64 {
+ if (!a.rotate_right(rotation)) == b {
+ mappings
+ .entry(b)
+ .or_default()
+ .push((a, Mapping::RotateAndInvert(rotation)));
+ // We're not interested in further mappings between a and b
+ continue 'b;
+ }
+ }
+
+ // All possible shifts
+ for shift_by in 1..64 {
+ if a == (b >> shift_by) {
+ mappings
+ .entry(b)
+ .or_default()
+ .push((a, Mapping::ShiftRight(shift_by as u32)));
+ // We're not interested in further mappings between a and b
+ continue 'b;
+ }
+ }
+ }
+ }
+ // These are the bitset words which will be represented "raw" (as a u64)
+ let mut canonical_words = Vec::new();
+ // These are mapped words, which will be represented by an index into
+ // the canonical_words and a Mapping; u16 when encoded.
+ let mut canonicalized_words = Vec::new();
+ let mut unique_mapping = HashMap::new();
+
+ #[derive(Debug, PartialEq, Eq)]
+ enum UniqueMapping {
+ Canonical(usize),
+ Canonicalized(usize),
+ }
+
+ // Map 0 first, so that it is the first canonical word.
+ // This is realistically not inefficient because 0 is not mapped to by
+ // anything else (a shift pattern could do it, but would be wasteful).
+ //
+ // However, 0s are quite common in the overall dataset, and it is quite
+ // wasteful to have to go through a mapping function to determine that
+ // we have a zero.
+ //
+ // FIXME: Experiment with choosing most common words in overall data set
+ // for canonical when possible.
+ while let Some((&to, _)) = mappings
+ .iter()
+ .find(|(&to, _)| to == 0)
+ .or_else(|| mappings.iter().max_by_key(|m| m.1.len()))
+ {
+ // Get the mapping with the most entries. Currently, no mapping can
+ // only exist transitively (i.e., there is no A, B, C such that A
+ // does not map to C and but A maps to B maps to C), so this is
+ // guaranteed to be acceptable.
+ //
+ // In the future, we may need a more sophisticated algorithm to
+ // identify which keys to prefer as canonical.
+ let mapped_from = mappings.remove(&to).unwrap();
+ for (from, how) in &mapped_from {
+ // Remove the entries which mapped to this one.
+ // Noting that it should be associated with the Nth canonical word.
+ //
+ // We do not assert that this is present, because there may be
+ // no mappings to the `from` word; that's fine.
+ mappings.remove(from);
+ assert_eq!(
+ unique_mapping
+ .insert(*from, UniqueMapping::Canonicalized(canonicalized_words.len())),
+ None
+ );
+ canonicalized_words.push((canonical_words.len(), *how));
+
+ // Remove the now-canonicalized word from other mappings,
+ // to ensure that we deprioritize them in the next iteration of
+ // the while loop.
+ for mapped in mappings.values_mut() {
+ let mut i = 0;
+ while i != mapped.len() {
+ if mapped[i].0 == *from {
+ mapped.remove(i);
+ } else {
+ i += 1;
+ }
+ }
+ }
+ }
+ assert!(
+ unique_mapping
+ .insert(to, UniqueMapping::Canonical(canonical_words.len()))
+ .is_none()
+ );
+ canonical_words.push(to);
+
+ // Remove the now-canonical word from other mappings, to ensure that
+ // we deprioritize them in the next iteration of the while loop.
+ for mapped in mappings.values_mut() {
+ let mut i = 0;
+ while i != mapped.len() {
+ if mapped[i].0 == to {
+ mapped.remove(i);
+ } else {
+ i += 1;
+ }
+ }
+ }
+ }
+
+ // Any words which we couldn't shrink, just stick into the canonical
+ // words.
+ //
+ // FIXME: work harder -- there are more possibilities for mapping
+ // functions (e.g., multiplication, shifting instead of rotation, etc.)
+ // We'll probably always have some slack though so this loop will still
+ // be needed.
+ for &w in unique_words {
+ if !unique_mapping.contains_key(&w) {
+ assert!(
+ unique_mapping
+ .insert(w, UniqueMapping::Canonical(canonical_words.len()))
+ .is_none()
+ );
+ canonical_words.push(w);
+ }
+ }
+ assert_eq!(canonicalized_words.len() + canonical_words.len(), unique_words.len());
+ assert_eq!(unique_mapping.len(), unique_words.len());
+
+ let unique_mapping = unique_mapping
+ .into_iter()
+ .map(|(key, value)| {
+ (
+ key,
+ match value {
+ UniqueMapping::Canonicalized(idx) => {
+ u8::try_from(canonical_words.len() + idx).unwrap()
+ }
+ UniqueMapping::Canonical(idx) => u8::try_from(idx).unwrap(),
+ },
+ )
+ })
+ .collect::<HashMap<_, _>>();
+
+ let mut distinct_indices = BTreeSet::new();
+ for &w in unique_words {
+ let idx = unique_mapping.get(&w).unwrap();
+ assert!(distinct_indices.insert(idx));
+ }
+
+ const LOWER_6: u32 = (1 << 6) - 1;
+
+ let canonicalized_words = canonicalized_words
+ .into_iter()
+ .map(|v| {
+ (
+ u8::try_from(v.0).unwrap(),
+ match v.1 {
+ Mapping::RotateAndInvert(amount) => {
+ assert_eq!(amount, amount & LOWER_6);
+ 1 << 6 | (amount as u8)
+ }
+ Mapping::Rotate(amount) => {
+ assert_eq!(amount, amount & LOWER_6);
+ amount as u8
+ }
+ Mapping::Invert => 1 << 6,
+ Mapping::ShiftRight(shift_by) => {
+ assert_eq!(shift_by, shift_by & LOWER_6);
+ 1 << 7 | (shift_by as u8)
+ }
+ },
+ )
+ })
+ .collect::<Vec<(u8, u8)>>();
+ Canonicalized { unique_mapping, canonical_words, canonicalized_words }
+ }
+}
diff --git a/src/tools/unicode-table-generator/src/skiplist.rs b/src/tools/unicode-table-generator/src/skiplist.rs
new file mode 100644
index 000000000..6e439968c
--- /dev/null
+++ b/src/tools/unicode-table-generator/src/skiplist.rs
@@ -0,0 +1,98 @@
+use crate::fmt_list;
+use crate::raw_emitter::RawEmitter;
+use std::convert::TryInto;
+use std::fmt::Write as _;
+use std::ops::Range;
+
+/// This will get packed into a single u32 before inserting into the data set.
+#[derive(Debug, PartialEq)]
+struct ShortOffsetRunHeader {
+ /// Note, we only allow for 21 bits here.
+ prefix_sum: u32,
+
+ /// Note, we actually only allow for 11 bits here. This should be enough --
+ /// our largest sets are around ~1400 offsets long.
+ start_idx: u16,
+}
+
+impl ShortOffsetRunHeader {
+ fn pack(&self) -> u32 {
+ assert!(self.start_idx < (1 << 11));
+ assert!(self.prefix_sum < (1 << 21));
+
+ (self.start_idx as u32) << 21 | self.prefix_sum
+ }
+}
+
+impl RawEmitter {
+ pub fn emit_skiplist(&mut self, ranges: &[Range<u32>]) {
+ let mut offsets = Vec::<u32>::new();
+ let points = ranges.iter().flat_map(|r| vec![r.start, r.end]).collect::<Vec<u32>>();
+ let mut offset = 0;
+ for pt in points {
+ let delta = pt - offset;
+ offsets.push(delta);
+ offset = pt;
+ }
+ // Guaranteed to terminate, as it's impossible to subtract a value this
+ // large from a valid char.
+ offsets.push(std::char::MAX as u32 + 1);
+ let mut coded_offsets: Vec<u8> = Vec::new();
+ let mut short_offset_runs: Vec<ShortOffsetRunHeader> = vec![];
+ let mut iter = offsets.iter().cloned();
+ let mut prefix_sum = 0;
+ loop {
+ let mut any_elements = false;
+ let mut inserted = false;
+ let start = coded_offsets.len();
+ for offset in iter.by_ref() {
+ any_elements = true;
+ prefix_sum += offset;
+ if let Ok(offset) = offset.try_into() {
+ coded_offsets.push(offset);
+ } else {
+ short_offset_runs.push(ShortOffsetRunHeader {
+ start_idx: start.try_into().unwrap(),
+ prefix_sum,
+ });
+ // This is just needed to maintain indices even/odd
+ // correctly.
+ coded_offsets.push(0);
+ inserted = true;
+ break;
+ }
+ }
+ if !any_elements {
+ break;
+ }
+ // We always append the huge char::MAX offset to the end which
+ // should never be able to fit into the u8 offsets.
+ assert!(inserted);
+ }
+
+ writeln!(
+ &mut self.file,
+ "static SHORT_OFFSET_RUNS: [u32; {}] = [{}];",
+ short_offset_runs.len(),
+ fmt_list(short_offset_runs.iter().map(|v| v.pack()))
+ )
+ .unwrap();
+ self.bytes_used += 4 * short_offset_runs.len();
+ writeln!(
+ &mut self.file,
+ "static OFFSETS: [u8; {}] = [{}];",
+ coded_offsets.len(),
+ fmt_list(&coded_offsets)
+ )
+ .unwrap();
+ self.bytes_used += coded_offsets.len();
+
+ writeln!(&mut self.file, "pub fn lookup(c: char) -> bool {{").unwrap();
+ writeln!(&mut self.file, " super::skip_search(",).unwrap();
+ writeln!(&mut self.file, " c as u32,").unwrap();
+ writeln!(&mut self.file, " &SHORT_OFFSET_RUNS,").unwrap();
+ writeln!(&mut self.file, " &OFFSETS,").unwrap();
+ writeln!(&mut self.file, " )").unwrap();
+ writeln!(&mut self.file, "}}").unwrap();
+ }
+}
diff --git a/src/tools/unicode-table-generator/src/unicode_download.rs b/src/tools/unicode-table-generator/src/unicode_download.rs
new file mode 100644
index 000000000..9b2e0a258
--- /dev/null
+++ b/src/tools/unicode-table-generator/src/unicode_download.rs
@@ -0,0 +1,46 @@
+use crate::UNICODE_DIRECTORY;
+use std::path::Path;
+use std::process::Command;
+
+static URL_PREFIX: &str = "https://www.unicode.org/Public/UCD/latest/ucd/";
+
+static README: &str = "ReadMe.txt";
+
+static RESOURCES: &[&str] =
+ &["DerivedCoreProperties.txt", "PropList.txt", "UnicodeData.txt", "SpecialCasing.txt"];
+
+pub fn fetch_latest() {
+ let directory = Path::new(UNICODE_DIRECTORY);
+ if directory.exists() {
+ eprintln!(
+ "Not refetching unicode data, already exists, please delete {directory:?} to regenerate",
+ );
+ return;
+ }
+ if let Err(e) = std::fs::create_dir_all(directory) {
+ panic!("Failed to create {UNICODE_DIRECTORY:?}: {e}");
+ }
+ let output = Command::new("curl").arg(URL_PREFIX.to_owned() + README).output().unwrap();
+ if !output.status.success() {
+ panic!(
+ "Failed to run curl to fetch readme: stderr: {}",
+ String::from_utf8_lossy(&output.stderr)
+ );
+ }
+ let current = std::fs::read_to_string(directory.join(README)).unwrap_or_default();
+ if current.as_bytes() != &output.stdout[..] {
+ std::fs::write(directory.join(README), output.stdout).unwrap();
+ }
+
+ for resource in RESOURCES {
+ let output = Command::new("curl").arg(URL_PREFIX.to_owned() + resource).output().unwrap();
+ if !output.status.success() {
+ panic!(
+ "Failed to run curl to fetch {}: stderr: {}",
+ resource,
+ String::from_utf8_lossy(&output.stderr)
+ );
+ }
+ std::fs::write(directory.join(resource), output.stdout).unwrap();
+ }
+}
diff --git a/src/tools/unstable-book-gen/Cargo.toml b/src/tools/unstable-book-gen/Cargo.toml
new file mode 100644
index 000000000..73e5a91be
--- /dev/null
+++ b/src/tools/unstable-book-gen/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "unstable-book-gen"
+version = "0.1.0"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+
+[dependencies]
+tidy = { path = "../tidy" }
+
+# not actually needed but required for now to unify the feature selection of
+# `num-traits` between this and `rustbook`
+num-traits = "0.2"
diff --git a/src/tools/unstable-book-gen/src/SUMMARY.md b/src/tools/unstable-book-gen/src/SUMMARY.md
new file mode 100644
index 000000000..933c928e2
--- /dev/null
+++ b/src/tools/unstable-book-gen/src/SUMMARY.md
@@ -0,0 +1,8 @@
+[The Unstable Book](the-unstable-book.md)
+
+- [Compiler flags](compiler-flags.md)
+{compiler_flags}
+- [Language features](language-features.md)
+{language_features}
+- [Library Features](library-features.md)
+{library_features}
diff --git a/src/tools/unstable-book-gen/src/main.rs b/src/tools/unstable-book-gen/src/main.rs
new file mode 100644
index 000000000..5a0477b4b
--- /dev/null
+++ b/src/tools/unstable-book-gen/src/main.rs
@@ -0,0 +1,130 @@
+//! Auto-generate stub docs for the unstable book
+
+use std::collections::BTreeSet;
+use std::env;
+use std::fs::{self, File};
+use std::io::Write;
+use std::path::Path;
+use tidy::features::{collect_lang_features, collect_lib_features, Features};
+use tidy::unstable_book::{
+ collect_unstable_book_section_file_names, collect_unstable_feature_names, LANG_FEATURES_DIR,
+ LIB_FEATURES_DIR, PATH_STR,
+};
+
+/// A helper macro to `unwrap` a result except also print out details like:
+///
+/// * The file/line of the panic
+/// * The expression that failed
+/// * The error itself
+macro_rules! t {
+ ($e:expr) => {
+ match $e {
+ Ok(e) => e,
+ Err(e) => panic!("{} failed with {}", stringify!($e), e),
+ }
+ };
+}
+
+fn generate_stub_issue(path: &Path, name: &str, issue: u32) {
+ let mut file = t!(File::create(path));
+ t!(write!(file, include_str!("stub-issue.md"), name = name, issue = issue));
+}
+
+fn generate_stub_no_issue(path: &Path, name: &str) {
+ let mut file = t!(File::create(path));
+ t!(write!(file, include_str!("stub-no-issue.md"), name = name));
+}
+
+fn set_to_summary_str(set: &BTreeSet<String>, dir: &str) -> String {
+ set.iter()
+ .map(|ref n| format!(" - [{}]({}/{}.md)", n.replace('-', "_"), dir, n))
+ .fold("".to_owned(), |s, a| s + &a + "\n")
+}
+
+fn generate_summary(path: &Path, lang_features: &Features, lib_features: &Features) {
+ let compiler_flags = collect_unstable_book_section_file_names(&path.join("src/compiler-flags"));
+
+ let compiler_flags_str = set_to_summary_str(&compiler_flags, "compiler-flags");
+
+ let unstable_lang_features = collect_unstable_feature_names(&lang_features);
+ let unstable_lib_features = collect_unstable_feature_names(&lib_features);
+
+ let lang_features_str = set_to_summary_str(&unstable_lang_features, "language-features");
+ let lib_features_str = set_to_summary_str(&unstable_lib_features, "library-features");
+
+ let mut file = t!(File::create(&path.join("src/SUMMARY.md")));
+ t!(file.write_fmt(format_args!(
+ include_str!("SUMMARY.md"),
+ compiler_flags = compiler_flags_str,
+ language_features = lang_features_str,
+ library_features = lib_features_str
+ )));
+}
+
+fn generate_unstable_book_files(src: &Path, out: &Path, features: &Features) {
+ let unstable_features = collect_unstable_feature_names(features);
+ let unstable_section_file_names = collect_unstable_book_section_file_names(src);
+ t!(fs::create_dir_all(&out));
+ for feature_name in &unstable_features - &unstable_section_file_names {
+ let feature_name_underscore = feature_name.replace('-', "_");
+ let file_name = format!("{feature_name}.md");
+ let out_file_path = out.join(&file_name);
+ let feature = &features[&feature_name_underscore];
+
+ if let Some(issue) = feature.tracking_issue {
+ generate_stub_issue(&out_file_path, &feature_name_underscore, issue.get());
+ } else {
+ generate_stub_no_issue(&out_file_path, &feature_name_underscore);
+ }
+ }
+}
+
+fn copy_recursive(from: &Path, to: &Path) {
+ for entry in t!(fs::read_dir(from)) {
+ let e = t!(entry);
+ let t = t!(e.metadata());
+ let dest = &to.join(e.file_name());
+ if t.is_file() {
+ t!(fs::copy(&e.path(), dest));
+ } else if t.is_dir() {
+ t!(fs::create_dir_all(dest));
+ copy_recursive(&e.path(), dest);
+ }
+ }
+}
+
+fn main() {
+ let library_path_str = env::args_os().nth(1).expect("library/ path required");
+ let compiler_path_str = env::args_os().nth(2).expect("compiler/ path required");
+ let src_path_str = env::args_os().nth(3).expect("src/ path required");
+ let dest_path_str = env::args_os().nth(4).expect("destination path required");
+ let library_path = Path::new(&library_path_str);
+ let compiler_path = Path::new(&compiler_path_str);
+ let src_path = Path::new(&src_path_str);
+ let dest_path = Path::new(&dest_path_str);
+
+ let lang_features = collect_lang_features(compiler_path, &mut false);
+ let lib_features = collect_lib_features(library_path)
+ .into_iter()
+ .filter(|&(ref name, _)| !lang_features.contains_key(name))
+ .collect();
+
+ let doc_src_path = src_path.join(PATH_STR);
+
+ t!(fs::create_dir_all(&dest_path));
+
+ generate_unstable_book_files(
+ &doc_src_path.join(LANG_FEATURES_DIR),
+ &dest_path.join(LANG_FEATURES_DIR),
+ &lang_features,
+ );
+ generate_unstable_book_files(
+ &doc_src_path.join(LIB_FEATURES_DIR),
+ &dest_path.join(LIB_FEATURES_DIR),
+ &lib_features,
+ );
+
+ copy_recursive(&doc_src_path, &dest_path);
+
+ generate_summary(&dest_path, &lang_features, &lib_features);
+}
diff --git a/src/tools/unstable-book-gen/src/stub-issue.md b/src/tools/unstable-book-gen/src/stub-issue.md
new file mode 100644
index 000000000..8698fb727
--- /dev/null
+++ b/src/tools/unstable-book-gen/src/stub-issue.md
@@ -0,0 +1,7 @@
+# `{name}`
+
+The tracking issue for this feature is: [#{issue}]
+
+[#{issue}]: https://github.com/rust-lang/rust/issues/{issue}
+
+------------------------
diff --git a/src/tools/unstable-book-gen/src/stub-no-issue.md b/src/tools/unstable-book-gen/src/stub-no-issue.md
new file mode 100644
index 000000000..3da140633
--- /dev/null
+++ b/src/tools/unstable-book-gen/src/stub-no-issue.md
@@ -0,0 +1,5 @@
+# `{name}`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
diff --git a/src/tools/x/Cargo.lock b/src/tools/x/Cargo.lock
new file mode 100644
index 000000000..723d6cb25
--- /dev/null
+++ b/src/tools/x/Cargo.lock
@@ -0,0 +1,5 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+[[package]]
+name = "x"
+version = "0.1.0"
diff --git a/src/tools/x/Cargo.toml b/src/tools/x/Cargo.toml
new file mode 100644
index 000000000..315027279
--- /dev/null
+++ b/src/tools/x/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "x"
+version = "0.1.0"
+description = "Run x.py slightly more conveniently"
+edition = "2021"
+publish = false
diff --git a/src/tools/x/README.md b/src/tools/x/README.md
new file mode 100644
index 000000000..80bf02e8a
--- /dev/null
+++ b/src/tools/x/README.md
@@ -0,0 +1,10 @@
+# x
+
+`x` invokes `x.py` from any subdirectory.
+
+To install, run the following commands:
+
+```
+$ cd rust/src/tools/x/
+$ cargo install --path .
+```
diff --git a/src/tools/x/src/main.rs b/src/tools/x/src/main.rs
new file mode 100644
index 000000000..9187c3551
--- /dev/null
+++ b/src/tools/x/src/main.rs
@@ -0,0 +1,96 @@
+//! Run `x.py` from any subdirectory of a rust compiler checkout.
+//!
+//! We prefer `exec`, to avoid adding an extra process in the process tree.
+//! However, since `exec` isn't available on Windows, we indirect through
+//! `exec_or_status`, which will call `exec` on unix and `status` on Windows.
+//!
+//! We use `python`, `python3`, or `python2` as the python interpreter to run
+//! `x.py`, in that order of preference.
+
+use std::{
+ env, io,
+ process::{self, Command, ExitStatus},
+};
+
+const PYTHON: &str = "python";
+const PYTHON2: &str = "python2";
+const PYTHON3: &str = "python3";
+
+fn python() -> &'static str {
+ let val = match env::var_os("PATH") {
+ Some(val) => val,
+ None => return PYTHON,
+ };
+
+ let mut python2 = false;
+ let mut python3 = false;
+
+ for dir in env::split_paths(&val) {
+ // `python` should always take precedence over python2 / python3 if it exists
+ if dir.join(PYTHON).exists() {
+ return PYTHON;
+ }
+
+ python2 |= dir.join(PYTHON2).exists();
+ python3 |= dir.join(PYTHON3).exists();
+ }
+
+ // try 3 before 2
+ if python3 {
+ PYTHON3
+ } else if python2 {
+ PYTHON2
+ } else {
+ // Python was not found on path, so exit
+ eprintln!("Unable to find python in your PATH. Please check it is installed.");
+ process::exit(1);
+ }
+}
+
+#[cfg(unix)]
+fn exec_or_status(command: &mut Command) -> io::Result<ExitStatus> {
+ use std::os::unix::process::CommandExt;
+ Err(command.exec())
+}
+
+#[cfg(not(unix))]
+fn exec_or_status(command: &mut Command) -> io::Result<ExitStatus> {
+ command.status()
+}
+
+fn main() {
+ let current = match env::current_dir() {
+ Ok(dir) => dir,
+ Err(err) => {
+ eprintln!("Failed to get current directory: {err}");
+ process::exit(1);
+ }
+ };
+
+ for dir in current.ancestors() {
+ let candidate = dir.join("x.py");
+
+ if candidate.exists() {
+ let mut python = Command::new(python());
+
+ python.arg(&candidate).args(env::args().skip(1)).current_dir(dir);
+
+ let result = exec_or_status(&mut python);
+
+ match result {
+ Err(error) => {
+ eprintln!("Failed to invoke `{}`: {}", candidate.display(), error);
+ }
+ Ok(status) => {
+ process::exit(status.code().unwrap_or(1));
+ }
+ }
+ }
+ }
+
+ eprintln!(
+ "x.py not found. Please run inside of a checkout of `https://github.com/rust-lang/rust`."
+ );
+
+ process::exit(1);
+}